hexsha
stringlengths
40
40
size
int64
6
14.9M
ext
stringclasses
1 value
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
6
260
max_stars_repo_name
stringlengths
6
119
max_stars_repo_head_hexsha
stringlengths
40
41
max_stars_repo_licenses
list
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
6
260
max_issues_repo_name
stringlengths
6
119
max_issues_repo_head_hexsha
stringlengths
40
41
max_issues_repo_licenses
list
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
6
260
max_forks_repo_name
stringlengths
6
119
max_forks_repo_head_hexsha
stringlengths
40
41
max_forks_repo_licenses
list
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
avg_line_length
float64
2
1.04M
max_line_length
int64
2
11.2M
alphanum_fraction
float64
0
1
cells
list
cell_types
list
cell_type_groups
list
cb53f4f1f05deb19400c4e9bb853f4293f94ec19
122,010
ipynb
Jupyter Notebook
Week_9/cleaning data by re.ipynb
Wabinab/NLP_GroupProject_DG
5ed5eb5cacc59dbac1d208fd6df83744c89f3bbc
[ "Apache-2.0" ]
null
null
null
Week_9/cleaning data by re.ipynb
Wabinab/NLP_GroupProject_DG
5ed5eb5cacc59dbac1d208fd6df83744c89f3bbc
[ "Apache-2.0" ]
null
null
null
Week_9/cleaning data by re.ipynb
Wabinab/NLP_GroupProject_DG
5ed5eb5cacc59dbac1d208fd6df83744c89f3bbc
[ "Apache-2.0" ]
null
null
null
59.808824
5,875
0.642119
[ [ [ "import os\nimport sys\nimport pandas as pd\nimport re\n# pd.set_option('display.max_colwidth', -1)", "<ipython-input-365-74384648d893>:5: FutureWarning: Passing a negative integer is deprecated in version 1.0 and will not be supported in future version. Instead, use None to not limit the column width.\n pd.set_option('display.max_colwidth', -1)\n" ] ], [ [ "Read data and Spilit into texts and Labels", "_____no_output_____" ] ], [ [ "BASE_DIR = ''\nGLOVE_DIR = os.path.join(BASE_DIR, 'glove.6B')\nTEXT_DATA_DIR = os.path.join(BASE_DIR, '20_newsgroups')", "_____no_output_____" ], [ "# This code from http#s://www.kaggle.com/mansijharia with some edit\n# May take a few time \n\ntexts = []\nlabels_index = {}\nlabels = []\n\nfor name in sorted(os.listdir((BASE_DIR+'20_newsgroups'))):\n path = os.path.join(BASE_DIR,'20_newsgroups', name)\n if os.path.isdir(path):\n label_id = len(labels_index)\n labels_index[name] = label_id\n for fname in sorted(os.listdir(path)):\n if fname.isdigit():\n fpath = os.path.join(path, fname)\n args = {} if sys.version_info < (3,) else {'encoding': 'latin-1'}\n with open(fpath, **args) as f:\n t = f.read()\n #Skip the matadata at 1st pragraph.\n i = t.find('\\n\\n')\n if 0 < i:\n t = t[i:]\n texts.append(t)\n labels.append(label_id)", "_____no_output_____" ] ], [ [ "print('Found %s texts.' % len(texts))\nprint('Found %s label.' % len(labels))", "_____no_output_____" ], [ "dict(labels_index.items())", "_____no_output_____" ], [ "- First remove the matadata by remove the frisr pragraph\n- Some matadata contian two pragraphs we will with the expration \n ", "_____no_output_____" ] ], [ [ "# for i in range(0,len(texts)):\n# match = re.search(r'([\\w\\.-]+)@([\\w\\.-]+)', texts[i])\n# if texts[i]!= None:\n# print(match)# just to show result\n# #so long output\n#no need ", "_____no_output_____" ], [ "for i in range(0,len(texts)):\n texts[i]= texts[i].strip() #To remove spaces from the beginning and the end of a string", "_____no_output_____" ] ], [ [ "# all these step will oreder and put it on the clwaning mathod on the process_data.py file.", "_____no_output_____" ], [ "#### trye the i in (0,5,7644,5432, 567)", "_____no_output_____" ] ], [ [ "#just one sample to show the work\n# you can change the value of i \ni=0\nprint('before:',texts[i])\nprint('the length is',len(texts[i]))", "before: Archive-name: atheism/resources\nAlt-atheism-archive-name: resources\nLast-modified: 11 December 1992\nVersion: 1.0\n\n Atheist Resources\n\n Addresses of Atheist Organizations\n\n USA\n\nFREEDOM FROM RELIGION FOUNDATION\n\nDarwin fish bumper stickers and assorted other atheist paraphernalia are\navailable from the Freedom From Religion Foundation in the US.\n\nWrite to: FFRF, P.O. Box 750, Madison, WI 53701.\nTelephone: (608) 256-8900\n\nEVOLUTION DESIGNS\n\nEvolution Designs sell the \"Darwin fish\". It's a fish symbol, like the ones\nChristians stick on their cars, but with feet and the word \"Darwin\" written\ninside. The deluxe moulded 3D plastic fish is $4.95 postpaid in the US.\n\nWrite to: Evolution Designs, 7119 Laurel Canyon #4, North Hollywood,\n CA 91605.\n\nPeople in the San Francisco Bay area can get Darwin Fish from Lynn Gold --\ntry mailing <[email protected]>. For net people who go to Lynn directly, the\nprice is $4.95 per fish.\n\nAMERICAN ATHEIST PRESS\n\nAAP publish various atheist books -- critiques of the Bible, lists of\nBiblical contradictions, and so on. One such book is:\n\n\"The Bible Handbook\" by W.P. Ball and G.W. Foote. American Atheist Press.\n372 pp. ISBN 0-910309-26-4, 2nd edition, 1986. Bible contradictions,\nabsurdities, atrocities, immoralities... contains Ball, Foote: \"The Bible\nContradicts Itself\", AAP. Based on the King James version of the Bible.\n\nWrite to: American Atheist Press, P.O. Box 140195, Austin, TX 78714-0195.\n or: 7215 Cameron Road, Austin, TX 78752-2973.\nTelephone: (512) 458-1244\nFax: (512) 467-9525\n\nPROMETHEUS BOOKS\n\nSell books including Haught's \"Holy Horrors\" (see below).\n\nWrite to: 700 East Amherst Street, Buffalo, New York 14215.\nTelephone: (716) 837-2475.\n\nAn alternate address (which may be newer or older) is:\nPrometheus Books, 59 Glenn Drive, Buffalo, NY 14228-2197.\n\nAFRICAN-AMERICANS FOR HUMANISM\n\nAn organization promoting black secular humanism and uncovering the history of\nblack freethought. They publish a quarterly newsletter, AAH EXAMINER.\n\nWrite to: Norm R. Allen, Jr., African Americans for Humanism, P.O. Box 664,\n Buffalo, NY 14226.\n\n United Kingdom\n\nRationalist Press Association National Secular Society\n88 Islington High Street 702 Holloway Road\nLondon N1 8EW London N19 3NL\n071 226 7251 071 272 1266\n\nBritish Humanist Association South Place Ethical Society\n14 Lamb's Conduit Passage Conway Hall\nLondon WC1R 4RH Red Lion Square\n071 430 0908 London WC1R 4RL\nfax 071 430 1271 071 831 7723\n\nThe National Secular Society publish \"The Freethinker\", a monthly magazine\nfounded in 1881.\n\n Germany\n\nIBKA e.V.\nInternationaler Bund der Konfessionslosen und Atheisten\nPostfach 880, D-1000 Berlin 41. Germany.\n\nIBKA publish a journal:\nMIZ. (Materialien und Informationen zur Zeit. Politisches\nJournal der Konfessionslosesn und Atheisten. Hrsg. IBKA e.V.)\nMIZ-Vertrieb, Postfach 880, D-1000 Berlin 41. Germany.\n\nFor atheist books, write to:\n\nIBDK, Internationaler B\"ucherdienst der Konfessionslosen\nPostfach 3005, D-3000 Hannover 1. Germany.\nTelephone: 0511/211216\n\n\n Books -- Fiction\n\nTHOMAS M. DISCH\n\n\"The Santa Claus Compromise\"\nShort story. The ultimate proof that Santa exists. All characters and \nevents are fictitious. Any similarity to living or dead gods -- uh, well...\n\nWALTER M. MILLER, JR\n\n\"A Canticle for Leibowitz\"\nOne gem in this post atomic doomsday novel is the monks who spent their lives\ncopying blueprints from \"Saint Leibowitz\", filling the sheets of paper with\nink and leaving white lines and letters.\n\nEDGAR PANGBORN\n\n\"Davy\"\nPost atomic doomsday novel set in clerical states. The church, for example,\nforbids that anyone \"produce, describe or use any substance containing...\natoms\". \n\nPHILIP K. DICK\n\nPhilip K. Dick Dick wrote many philosophical and thought-provoking short \nstories and novels. His stories are bizarre at times, but very approachable.\nHe wrote mainly SF, but he wrote about people, truth and religion rather than\ntechnology. Although he often believed that he had met some sort of God, he\nremained sceptical. Amongst his novels, the following are of some relevance:\n\n\"Galactic Pot-Healer\"\nA fallible alien deity summons a group of Earth craftsmen and women to a\nremote planet to raise a giant cathedral from beneath the oceans. When the\ndeity begins to demand faith from the earthers, pot-healer Joe Fernwright is\nunable to comply. A polished, ironic and amusing novel.\n\n\"A Maze of Death\"\nNoteworthy for its description of a technology-based religion.\n\n\"VALIS\"\nThe schizophrenic hero searches for the hidden mysteries of Gnostic\nChristianity after reality is fired into his brain by a pink laser beam of\nunknown but possibly divine origin. He is accompanied by his dogmatic and\ndismissively atheist friend and assorted other odd characters.\n\n\"The Divine Invasion\"\nGod invades Earth by making a young woman pregnant as she returns from\nanother star system. Unfortunately she is terminally ill, and must be\nassisted by a dead man whose brain is wired to 24-hour easy listening music.\n\nMARGARET ATWOOD\n\n\"The Handmaid's Tale\"\nA story based on the premise that the US Congress is mysteriously\nassassinated, and fundamentalists quickly take charge of the nation to set it\n\"right\" again. The book is the diary of a woman's life as she tries to live\nunder the new Christian theocracy. Women's right to own property is revoked,\nand their bank accounts are closed; sinful luxuries are outlawed, and the\nradio is only used for readings from the Bible. Crimes are punished\nretroactively: doctors who performed legal abortions in the \"old world\" are\nhunted down and hanged. Atwood's writing style is difficult to get used to\nat first, but the tale grows more and more chilling as it goes on.\n\nVARIOUS AUTHORS\n\n\"The Bible\"\nThis somewhat dull and rambling work has often been criticized. However, it\nis probably worth reading, if only so that you'll know what all the fuss is\nabout. It exists in many different versions, so make sure you get the one\ntrue version.\n\n Books -- Non-fiction\n\nPETER DE ROSA\n\n\"Vicars of Christ\", Bantam Press, 1988\nAlthough de Rosa seems to be Christian or even Catholic this is a very\nenlighting history of papal immoralities, adulteries, fallacies etc.\n(German translation: \"Gottes erste Diener. Die dunkle Seite des Papsttums\",\nDroemer-Knaur, 1989)\n\nMICHAEL MARTIN\n\n\"Atheism: A Philosophical Justification\", Temple University Press,\n Philadelphia, USA.\nA detailed and scholarly justification of atheism. Contains an outstanding\nappendix defining terminology and usage in this (necessarily) tendentious\narea. Argues both for \"negative atheism\" (i.e. the \"non-belief in the\nexistence of god(s)\") and also for \"positive atheism\" (\"the belief in the\nnon-existence of god(s)\"). Includes great refutations of the most\nchallenging arguments for god; particular attention is paid to refuting\ncontempory theists such as Platinga and Swinburne.\n541 pages. ISBN 0-87722-642-3 (hardcover; paperback also available)\n\n\"The Case Against Christianity\", Temple University Press\nA comprehensive critique of Christianity, in which he considers\nthe best contemporary defences of Christianity and (ultimately)\ndemonstrates that they are unsupportable and/or incoherent.\n273 pages. ISBN 0-87722-767-5\n\nJAMES TURNER\n\n\"Without God, Without Creed\", The Johns Hopkins University Press, Baltimore,\n MD, USA\nSubtitled \"The Origins of Unbelief in America\". Examines the way in which\nunbelief (whether agnostic or atheistic) became a mainstream alternative\nworld-view. Focusses on the period 1770-1900, and while considering France\nand Britain the emphasis is on American, and particularly New England\ndevelopments. \"Neither a religious history of secularization or atheism,\nWithout God, Without Creed is, rather, the intellectual history of the fate\nof a single idea, the belief that God exists.\" \n316 pages. ISBN (hardcover) 0-8018-2494-X (paper) 0-8018-3407-4\n\nGEORGE SELDES (Editor)\n\n\"The great thoughts\", Ballantine Books, New York, USA\nA \"dictionary of quotations\" of a different kind, concentrating on statements\nand writings which, explicitly or implicitly, present the person's philosophy\nand world-view. Includes obscure (and often suppressed) opinions from many\npeople. For some popular observations, traces the way in which various\npeople expressed and twisted the idea over the centuries. Quite a number of\nthe quotations are derived from Cardiff's \"What Great Men Think of Religion\"\nand Noyes' \"Views of Religion\".\n490 pages. ISBN (paper) 0-345-29887-X.\n\nRICHARD SWINBURNE\n\n\"The Existence of God (Revised Edition)\", Clarendon Paperbacks, Oxford\nThis book is the second volume in a trilogy that began with \"The Coherence of\nTheism\" (1977) and was concluded with \"Faith and Reason\" (1981). In this\nwork, Swinburne attempts to construct a series of inductive arguments for the\nexistence of God. His arguments, which are somewhat tendentious and rely\nupon the imputation of late 20th century western Christian values and\naesthetics to a God which is supposedly as simple as can be conceived, were\ndecisively rejected in Mackie's \"The Miracle of Theism\". In the revised\nedition of \"The Existence of God\", Swinburne includes an Appendix in which he\nmakes a somewhat incoherent attempt to rebut Mackie.\n\nJ. L. MACKIE\n\n\"The Miracle of Theism\", Oxford\nThis (posthumous) volume contains a comprehensive review of the principal\narguments for and against the existence of God. It ranges from the classical\nphilosophical positions of Descartes, Anselm, Berkeley, Hume et al, through\nthe moral arguments of Newman, Kant and Sidgwick, to the recent restatements\nof the classical theses by Plantinga and Swinburne. It also addresses those\npositions which push the concept of God beyond the realm of the rational,\nsuch as those of Kierkegaard, Kung and Philips, as well as \"replacements for\nGod\" such as Lelie's axiarchism. The book is a delight to read - less\nformalistic and better written than Martin's works, and refreshingly direct\nwhen compared with the hand-waving of Swinburne.\n\nJAMES A. HAUGHT\n\n\"Holy Horrors: An Illustrated History of Religious Murder and Madness\",\n Prometheus Books\nLooks at religious persecution from ancient times to the present day -- and\nnot only by Christians.\nLibrary of Congress Catalog Card Number 89-64079. 1990.\n\nNORM R. ALLEN, JR.\n\n\"African American Humanism: an Anthology\"\nSee the listing for African Americans for Humanism above.\n\nGORDON STEIN\n\n\"An Anthology of Atheism and Rationalism\", Prometheus Books\nAn anthology covering a wide range of subjects, including 'The Devil, Evil\nand Morality' and 'The History of Freethought'. Comprehensive bibliography.\n\nEDMUND D. COHEN\n\n\"The Mind of The Bible-Believer\", Prometheus Books\nA study of why people become Christian fundamentalists, and what effect it\nhas on them.\n\n Net Resources\n\nThere's a small mail-based archive server at mantis.co.uk which carries\narchives of old alt.atheism.moderated articles and assorted other files. For\nmore information, send mail to [email protected] saying\n\n help\n send atheism/index\n\nand it will mail back a reply.\n\n\nmathew\nÿ\nthe length is 11518\n" ], [ "texts[i]= texts[i].strip() #To remove spaces from the beginning and the end \nprint('after:',texts[i])\nprint('the length is',len(texts[i]))", "_____no_output_____" ], [ "texts[i] =re.sub(r'\\=+','', texts[i])#To remove any == characters\nprint('after:',texts[i])\nprint('the length is',len(texts[i]))\n", "after: Dean J. Falcione (posting from [email protected]) writes:\n[I wrote:]\n\n>>When the Pens got Mario, granted there was big publicity, etc, etc,\n>>and interest was immediately generated. Gretzky did the same thing for LA. \n>>However, imnsho, neither team would have seen a marked improvement in\n>>attendance if the team record did not improve. In the year before Lemieux\n>>came, Pittsburgh finished with 38 points. Following his arrival, the Pens\n>>finished with 53, 76, 72, 81, 87, 72, 88, and 87 points, with a couple of\n ^^\n>>Stanley Cups thrown in.\n \n>It was at this point the Pens attendance was near capacity (34 out of 40 \n>sellouts) yet they hadn't made the playoffs since 1982. How do you explain\n>a 6th place team breaking attendance records when they haven't been to the\n>playoffs in 7 years? Mario Lemieux is the explanation, IMHO. \n\n>You could make a case that the *expectation* of an improving team that\n>would make the playoffs is the reason. \n\nFunny you should mention it...this is exactly the case I was going to make.\n\n>But I think the reason is Lemieux\n>had a 168 point season and was the first non-Gretzky to win the Hart and\n>Ross since 1980. People turned out to watch him play. \n\nI will grant that a star like Mario will draw fans, even if the team sucks. \nBut this is short term only; I still do not think the attendance increase \nwill last, unless the team is a winning/competitive/improving/butt-kicking\none. Pittsburgh was still getting better, so people continued to support\nthem. If they suddenly dropped to, say, 50 points, you'd have knee surgery\nfor some of the people jumping off the bandwagon. \n\n>Also, the following year (88-89) the Pens had 89 points not 87. \n\nOk. My numbers came from the NHL Guide and Record Book. \n\n>They made the transaction to try and build a winner around Mario, that is \n>true. But the improvement in attendance came before they started doing\n>this (Coffey late in 1987) and before they even had a playoff bound team.\n>A doubling of attendance occured in 1984-85 from the previous year. An\n>increase from 38 points to 53 points is not going to do that. The arrival\n>of Mario Lemieux is what did it. \n\nYou can give the credit to Mario since he deserves it. But my point is that\nit wasn't Mario himself, but it was the *expectation* of things to come (i.e.\na winning team) that he created by being the next great hockey superstar. And\nbefore anybody jumps in and says I'm nit-picking and mincing words, go back\nand read from where this thread started...\n\nIt might help to think about what would go through a fan's mind who suddenly\nfound an interest in Mario and the Pens. Was it \"gee, Mario Lemieux is\namazing, I'll go watch him play\", or was it \"gee, now we've got a *kick*\n*ass* guy on *our* side, I'll go watch him play\". I think it was the latter.\n\n> Similar thing happened in L.A. Before\n>Gretzky's arrival, about 12000 per game. After, constant sellouts. They\n>are STILL selling out every game despite showing little or no improvement\n>since Gretzky's first year there. How do you explain it? People are going\n>to see Gretzky. they certainly aren't going to see a winner, they haven't\n>GOT a winner. They've had MUCH better teams in their past history than\n>they currently have, yet they didn't draw as well then.\n\nI don't think this is accurate. The *tickets* sell, but people don't go to\nthe games. I think this thread has already been discussed...season ticket\nholders in LA don't always use their tickets. So in effect, after the Kings\ninitial success following Gretzky's arrival (68 to 91 points, same source)\nand corresponding attendance jump, there has been an effective drop in\nattendance even though ticket sales may not have changed much. \n\nWhether or not the Kings are a 'winner' is debatable. I claim that since\nGretzky's arrival they have at the very least been competitive...I also claim\nthat McNall has made a stupid move in trying to reassemble the Oiler\ndynasty...but that's another story and included only because I don't like\nMcNall:-). Anyway, McNall did do some heavy marketing around Gretzky, and\nthat undoubtedly was also responsible for the attendance and merchandising\nsales, etc. But as I said, when the Kings have been in there little\ntailspins over the past couple of years there have been empty seats at the\nForum even if the tickets were sold.\n\n>I think in the case of a Lemieux or Gretzky, the player can transcend\n>winning as the major drawing power. \n\nFor the short term, IMO. Although I think that it's inevitable that the team\nwill improve with a player such as Lemieux or Gretzky, simply because they\nmake people around them better.\n\n>But winning sure as hell helps. ;-)\n\nWell, at least we are in full agreement here!\n\n>This does not make Roger's point any more valid, but the Jets aren't\n\nSo are you saying Roger has ever had a valid point? <couldn't resist...>\n\n>getting a HUGE jump in productivity, yet they ARE getting a huge\n>jump in attendance. This is due to the emergence of Teemu Selanne.\n>They have the 17th best record in hockey, it sure as hell isn't because\n>they are winning.\n\nYes, but they are doing no worse than last year. I think the same type of\nreasoning I applied to a new Pittsburgh fan applies to all the extra people\nshowing up at Winnipeg games. It's difficult to predict, but do you think\nthat if the Jets miss the playoffs next season that in the year after they\nwill maintain their attendance levels? I seriously doubt it, because in that\ncase the expectation of an improving team would be gone, with or without\nSelanne.\n\nI did provide the example of Rocket Ismail and the Toronto Argonauts of the \nCFL...did you leave it out because you don't know much about the CFL? If \nthat's the case then fair enough, but if it isn't the case then I'm curious\nto hear your explanation.\nthe length is 5868\n" ], [ "texts[i] =re.sub(r'\\|+','', texts[i])#To remove any | characters\nprint('after:',texts[i])\nprint('the length is',len(texts[i]))", "after: Dean J. Falcione (posting from [email protected]) writes:\n[I wrote:]\n\n>>When the Pens got Mario, granted there was big publicity, etc, etc,\n>>and interest was immediately generated. Gretzky did the same thing for LA. \n>>However, imnsho, neither team would have seen a marked improvement in\n>>attendance if the team record did not improve. In the year before Lemieux\n>>came, Pittsburgh finished with 38 points. Following his arrival, the Pens\n>>finished with 53, 76, 72, 81, 87, 72, 88, and 87 points, with a couple of\n ^^\n>>Stanley Cups thrown in.\n \n>It was at this point the Pens attendance was near capacity (34 out of 40 \n>sellouts) yet they hadn't made the playoffs since 1982. How do you explain\n>a 6th place team breaking attendance records when they haven't been to the\n>playoffs in 7 years? Mario Lemieux is the explanation, IMHO. \n\n>You could make a case that the *expectation* of an improving team that\n>would make the playoffs is the reason. \n\nFunny you should mention it...this is exactly the case I was going to make.\n\n>But I think the reason is Lemieux\n>had a 168 point season and was the first non-Gretzky to win the Hart and\n>Ross since 1980. People turned out to watch him play. \n\nI will grant that a star like Mario will draw fans, even if the team sucks. \nBut this is short term only; I still do not think the attendance increase \nwill last, unless the team is a winning/competitive/improving/butt-kicking\none. Pittsburgh was still getting better, so people continued to support\nthem. If they suddenly dropped to, say, 50 points, you'd have knee surgery\nfor some of the people jumping off the bandwagon. \n\n>Also, the following year (88-89) the Pens had 89 points not 87. \n\nOk. My numbers came from the NHL Guide and Record Book. \n\n>They made the transaction to try and build a winner around Mario, that is \n>true. But the improvement in attendance came before they started doing\n>this (Coffey late in 1987) and before they even had a playoff bound team.\n>A doubling of attendance occured in 1984-85 from the previous year. An\n>increase from 38 points to 53 points is not going to do that. The arrival\n>of Mario Lemieux is what did it. \n\nYou can give the credit to Mario since he deserves it. But my point is that\nit wasn't Mario himself, but it was the *expectation* of things to come (i.e.\na winning team) that he created by being the next great hockey superstar. And\nbefore anybody jumps in and says I'm nit-picking and mincing words, go back\nand read from where this thread started...\n\nIt might help to think about what would go through a fan's mind who suddenly\nfound an interest in Mario and the Pens. Was it \"gee, Mario Lemieux is\namazing, I'll go watch him play\", or was it \"gee, now we've got a *kick*\n*ass* guy on *our* side, I'll go watch him play\". I think it was the latter.\n\n> Similar thing happened in L.A. Before\n>Gretzky's arrival, about 12000 per game. After, constant sellouts. They\n>are STILL selling out every game despite showing little or no improvement\n>since Gretzky's first year there. How do you explain it? People are going\n>to see Gretzky. they certainly aren't going to see a winner, they haven't\n>GOT a winner. They've had MUCH better teams in their past history than\n>they currently have, yet they didn't draw as well then.\n\nI don't think this is accurate. The *tickets* sell, but people don't go to\nthe games. I think this thread has already been discussed...season ticket\nholders in LA don't always use their tickets. So in effect, after the Kings\ninitial success following Gretzky's arrival (68 to 91 points, same source)\nand corresponding attendance jump, there has been an effective drop in\nattendance even though ticket sales may not have changed much. \n\nWhether or not the Kings are a 'winner' is debatable. I claim that since\nGretzky's arrival they have at the very least been competitive...I also claim\nthat McNall has made a stupid move in trying to reassemble the Oiler\ndynasty...but that's another story and included only because I don't like\nMcNall:-). Anyway, McNall did do some heavy marketing around Gretzky, and\nthat undoubtedly was also responsible for the attendance and merchandising\nsales, etc. But as I said, when the Kings have been in there little\ntailspins over the past couple of years there have been empty seats at the\nForum even if the tickets were sold.\n\n>I think in the case of a Lemieux or Gretzky, the player can transcend\n>winning as the major drawing power. \n\nFor the short term, IMO. Although I think that it's inevitable that the team\nwill improve with a player such as Lemieux or Gretzky, simply because they\nmake people around them better.\n\n>But winning sure as hell helps. ;-)\n\nWell, at least we are in full agreement here!\n\n>This does not make Roger's point any more valid, but the Jets aren't\n\nSo are you saying Roger has ever had a valid point? <couldn't resist...>\n\n>getting a HUGE jump in productivity, yet they ARE getting a huge\n>jump in attendance. This is due to the emergence of Teemu Selanne.\n>They have the 17th best record in hockey, it sure as hell isn't because\n>they are winning.\n\nYes, but they are doing no worse than last year. I think the same type of\nreasoning I applied to a new Pittsburgh fan applies to all the extra people\nshowing up at Winnipeg games. It's difficult to predict, but do you think\nthat if the Jets miss the playoffs next season that in the year after they\nwill maintain their attendance levels? I seriously doubt it, because in that\ncase the expectation of an improving team would be gone, with or without\nSelanne.\n\nI did provide the example of Rocket Ismail and the Toronto Argonauts of the \nCFL...did you leave it out because you don't know much about the CFL? If \nthat's the case then fair enough, but if it isn't the case then I'm curious\nto hear your explanation.\nthe length is 5868\n" ], [ "texts[i] =re.sub(r'\\(\\)+','', texts[i])#To remove any () empty parentheses\nprint('after:',texts[i])\nprint('the length is',len(texts[i]))", "after: Dean J. Falcione (posting from [email protected]) writes:\n[I wrote:]\n\n>>When the Pens got Mario, granted there was big publicity, etc, etc,\n>>and interest was immediately generated. Gretzky did the same thing for LA. \n>>However, imnsho, neither team would have seen a marked improvement in\n>>attendance if the team record did not improve. In the year before Lemieux\n>>came, Pittsburgh finished with 38 points. Following his arrival, the Pens\n>>finished with 53, 76, 72, 81, 87, 72, 88, and 87 points, with a couple of\n ^^\n>>Stanley Cups thrown in.\n \n>It was at this point the Pens attendance was near capacity (34 out of 40 \n>sellouts) yet they hadn't made the playoffs since 1982. How do you explain\n>a 6th place team breaking attendance records when they haven't been to the\n>playoffs in 7 years? Mario Lemieux is the explanation, IMHO. \n\n>You could make a case that the *expectation* of an improving team that\n>would make the playoffs is the reason. \n\nFunny you should mention it...this is exactly the case I was going to make.\n\n>But I think the reason is Lemieux\n>had a 168 point season and was the first non-Gretzky to win the Hart and\n>Ross since 1980. People turned out to watch him play. \n\nI will grant that a star like Mario will draw fans, even if the team sucks. \nBut this is short term only; I still do not think the attendance increase \nwill last, unless the team is a winning/competitive/improving/butt-kicking\none. Pittsburgh was still getting better, so people continued to support\nthem. If they suddenly dropped to, say, 50 points, you'd have knee surgery\nfor some of the people jumping off the bandwagon. \n\n>Also, the following year (88-89) the Pens had 89 points not 87. \n\nOk. My numbers came from the NHL Guide and Record Book. \n\n>They made the transaction to try and build a winner around Mario, that is \n>true. But the improvement in attendance came before they started doing\n>this (Coffey late in 1987) and before they even had a playoff bound team.\n>A doubling of attendance occured in 1984-85 from the previous year. An\n>increase from 38 points to 53 points is not going to do that. The arrival\n>of Mario Lemieux is what did it. \n\nYou can give the credit to Mario since he deserves it. But my point is that\nit wasn't Mario himself, but it was the *expectation* of things to come (i.e.\na winning team) that he created by being the next great hockey superstar. And\nbefore anybody jumps in and says I'm nit-picking and mincing words, go back\nand read from where this thread started...\n\nIt might help to think about what would go through a fan's mind who suddenly\nfound an interest in Mario and the Pens. Was it \"gee, Mario Lemieux is\namazing, I'll go watch him play\", or was it \"gee, now we've got a *kick*\n*ass* guy on *our* side, I'll go watch him play\". I think it was the latter.\n\n> Similar thing happened in L.A. Before\n>Gretzky's arrival, about 12000 per game. After, constant sellouts. They\n>are STILL selling out every game despite showing little or no improvement\n>since Gretzky's first year there. How do you explain it? People are going\n>to see Gretzky. they certainly aren't going to see a winner, they haven't\n>GOT a winner. They've had MUCH better teams in their past history than\n>they currently have, yet they didn't draw as well then.\n\nI don't think this is accurate. The *tickets* sell, but people don't go to\nthe games. I think this thread has already been discussed...season ticket\nholders in LA don't always use their tickets. So in effect, after the Kings\ninitial success following Gretzky's arrival (68 to 91 points, same source)\nand corresponding attendance jump, there has been an effective drop in\nattendance even though ticket sales may not have changed much. \n\nWhether or not the Kings are a 'winner' is debatable. I claim that since\nGretzky's arrival they have at the very least been competitive...I also claim\nthat McNall has made a stupid move in trying to reassemble the Oiler\ndynasty...but that's another story and included only because I don't like\nMcNall:-). Anyway, McNall did do some heavy marketing around Gretzky, and\nthat undoubtedly was also responsible for the attendance and merchandising\nsales, etc. But as I said, when the Kings have been in there little\ntailspins over the past couple of years there have been empty seats at the\nForum even if the tickets were sold.\n\n>I think in the case of a Lemieux or Gretzky, the player can transcend\n>winning as the major drawing power. \n\nFor the short term, IMO. Although I think that it's inevitable that the team\nwill improve with a player such as Lemieux or Gretzky, simply because they\nmake people around them better.\n\n>But winning sure as hell helps. ;-)\n\nWell, at least we are in full agreement here!\n\n>This does not make Roger's point any more valid, but the Jets aren't\n\nSo are you saying Roger has ever had a valid point? <couldn't resist...>\n\n>getting a HUGE jump in productivity, yet they ARE getting a huge\n>jump in attendance. This is due to the emergence of Teemu Selanne.\n>They have the 17th best record in hockey, it sure as hell isn't because\n>they are winning.\n\nYes, but they are doing no worse than last year. I think the same type of\nreasoning I applied to a new Pittsburgh fan applies to all the extra people\nshowing up at Winnipeg games. It's difficult to predict, but do you think\nthat if the Jets miss the playoffs next season that in the year after they\nwill maintain their attendance levels? I seriously doubt it, because in that\ncase the expectation of an improving team would be gone, with or without\nSelanne.\n\nI did provide the example of Rocket Ismail and the Toronto Argonauts of the \nCFL...did you leave it out because you don't know much about the CFL? If \nthat's the case then fair enough, but if it isn't the case then I'm curious\nto hear your explanation.\nthe length is 5868\n" ], [ "texts[i] =re.sub(r'\\[\\]+','', texts[i])#To remove any [] characters\nprint('after:',texts[i])\nprint('the length is',len(texts[i]))", "after: Dean J. Falcione (posting from [email protected]) writes:\n[I wrote:]\n\n>>When the Pens got Mario, granted there was big publicity, etc, etc,\n>>and interest was immediately generated. Gretzky did the same thing for LA. \n>>However, imnsho, neither team would have seen a marked improvement in\n>>attendance if the team record did not improve. In the year before Lemieux\n>>came, Pittsburgh finished with 38 points. Following his arrival, the Pens\n>>finished with 53, 76, 72, 81, 87, 72, 88, and 87 points, with a couple of\n ^^\n>>Stanley Cups thrown in.\n \n>It was at this point the Pens attendance was near capacity (34 out of 40 \n>sellouts) yet they hadn't made the playoffs since 1982. How do you explain\n>a 6th place team breaking attendance records when they haven't been to the\n>playoffs in 7 years? Mario Lemieux is the explanation, IMHO. \n\n>You could make a case that the *expectation* of an improving team that\n>would make the playoffs is the reason. \n\nFunny you should mention it...this is exactly the case I was going to make.\n\n>But I think the reason is Lemieux\n>had a 168 point season and was the first non-Gretzky to win the Hart and\n>Ross since 1980. People turned out to watch him play. \n\nI will grant that a star like Mario will draw fans, even if the team sucks. \nBut this is short term only; I still do not think the attendance increase \nwill last, unless the team is a winning/competitive/improving/butt-kicking\none. Pittsburgh was still getting better, so people continued to support\nthem. If they suddenly dropped to, say, 50 points, you'd have knee surgery\nfor some of the people jumping off the bandwagon. \n\n>Also, the following year (88-89) the Pens had 89 points not 87. \n\nOk. My numbers came from the NHL Guide and Record Book. \n\n>They made the transaction to try and build a winner around Mario, that is \n>true. But the improvement in attendance came before they started doing\n>this (Coffey late in 1987) and before they even had a playoff bound team.\n>A doubling of attendance occured in 1984-85 from the previous year. An\n>increase from 38 points to 53 points is not going to do that. The arrival\n>of Mario Lemieux is what did it. \n\nYou can give the credit to Mario since he deserves it. But my point is that\nit wasn't Mario himself, but it was the *expectation* of things to come (i.e.\na winning team) that he created by being the next great hockey superstar. And\nbefore anybody jumps in and says I'm nit-picking and mincing words, go back\nand read from where this thread started...\n\nIt might help to think about what would go through a fan's mind who suddenly\nfound an interest in Mario and the Pens. Was it \"gee, Mario Lemieux is\namazing, I'll go watch him play\", or was it \"gee, now we've got a *kick*\n*ass* guy on *our* side, I'll go watch him play\". I think it was the latter.\n\n> Similar thing happened in L.A. Before\n>Gretzky's arrival, about 12000 per game. After, constant sellouts. They\n>are STILL selling out every game despite showing little or no improvement\n>since Gretzky's first year there. How do you explain it? People are going\n>to see Gretzky. they certainly aren't going to see a winner, they haven't\n>GOT a winner. They've had MUCH better teams in their past history than\n>they currently have, yet they didn't draw as well then.\n\nI don't think this is accurate. The *tickets* sell, but people don't go to\nthe games. I think this thread has already been discussed...season ticket\nholders in LA don't always use their tickets. So in effect, after the Kings\ninitial success following Gretzky's arrival (68 to 91 points, same source)\nand corresponding attendance jump, there has been an effective drop in\nattendance even though ticket sales may not have changed much. \n\nWhether or not the Kings are a 'winner' is debatable. I claim that since\nGretzky's arrival they have at the very least been competitive...I also claim\nthat McNall has made a stupid move in trying to reassemble the Oiler\ndynasty...but that's another story and included only because I don't like\nMcNall:-). Anyway, McNall did do some heavy marketing around Gretzky, and\nthat undoubtedly was also responsible for the attendance and merchandising\nsales, etc. But as I said, when the Kings have been in there little\ntailspins over the past couple of years there have been empty seats at the\nForum even if the tickets were sold.\n\n>I think in the case of a Lemieux or Gretzky, the player can transcend\n>winning as the major drawing power. \n\nFor the short term, IMO. Although I think that it's inevitable that the team\nwill improve with a player such as Lemieux or Gretzky, simply because they\nmake people around them better.\n\n>But winning sure as hell helps. ;-)\n\nWell, at least we are in full agreement here!\n\n>This does not make Roger's point any more valid, but the Jets aren't\n\nSo are you saying Roger has ever had a valid point? <couldn't resist...>\n\n>getting a HUGE jump in productivity, yet they ARE getting a huge\n>jump in attendance. This is due to the emergence of Teemu Selanne.\n>They have the 17th best record in hockey, it sure as hell isn't because\n>they are winning.\n\nYes, but they are doing no worse than last year. I think the same type of\nreasoning I applied to a new Pittsburgh fan applies to all the extra people\nshowing up at Winnipeg games. It's difficult to predict, but do you think\nthat if the Jets miss the playoffs next season that in the year after they\nwill maintain their attendance levels? I seriously doubt it, because in that\ncase the expectation of an improving team would be gone, with or without\nSelanne.\n\nI did provide the example of Rocket Ismail and the Toronto Argonauts of the \nCFL...did you leave it out because you don't know much about the CFL? If \nthat's the case then fair enough, but if it isn't the case then I'm curious\nto hear your explanation.\nthe length is 5868\n" ], [ "texts[i] = re.sub(\"[<>]\", \" \",texts[i])#To remove < and > characters\nprint('after:',texts[i])\nprint('the length is',len(texts[i]))", "after: Dean J. Falcione (posting from [email protected]) writes:\n[I wrote:]\n\n When the Pens got Mario, granted there was big publicity, etc, etc,\n and interest was immediately generated. Gretzky did the same thing for LA. \n However, imnsho, neither team would have seen a marked improvement in\n attendance if the team record did not improve. In the year before Lemieux\n came, Pittsburgh finished with 38 points. Following his arrival, the Pens\n finished with 53, 76, 72, 81, 87, 72, 88, and 87 points, with a couple of\n ^^\n Stanley Cups thrown in.\n \n It was at this point the Pens attendance was near capacity (34 out of 40 \n sellouts) yet they hadn't made the playoffs since 1982. How do you explain\n a 6th place team breaking attendance records when they haven't been to the\n playoffs in 7 years? Mario Lemieux is the explanation, IMHO. \n\n You could make a case that the *expectation* of an improving team that\n would make the playoffs is the reason. \n\nFunny you should mention it...this is exactly the case I was going to make.\n\n But I think the reason is Lemieux\n had a 168 point season and was the first non-Gretzky to win the Hart and\n Ross since 1980. People turned out to watch him play. \n\nI will grant that a star like Mario will draw fans, even if the team sucks. \nBut this is short term only; I still do not think the attendance increase \nwill last, unless the team is a winning/competitive/improving/butt-kicking\none. Pittsburgh was still getting better, so people continued to support\nthem. If they suddenly dropped to, say, 50 points, you'd have knee surgery\nfor some of the people jumping off the bandwagon. \n\n Also, the following year (88-89) the Pens had 89 points not 87. \n\nOk. My numbers came from the NHL Guide and Record Book. \n\n They made the transaction to try and build a winner around Mario, that is \n true. But the improvement in attendance came before they started doing\n this (Coffey late in 1987) and before they even had a playoff bound team.\n A doubling of attendance occured in 1984-85 from the previous year. An\n increase from 38 points to 53 points is not going to do that. The arrival\n of Mario Lemieux is what did it. \n\nYou can give the credit to Mario since he deserves it. But my point is that\nit wasn't Mario himself, but it was the *expectation* of things to come (i.e.\na winning team) that he created by being the next great hockey superstar. And\nbefore anybody jumps in and says I'm nit-picking and mincing words, go back\nand read from where this thread started...\n\nIt might help to think about what would go through a fan's mind who suddenly\nfound an interest in Mario and the Pens. Was it \"gee, Mario Lemieux is\namazing, I'll go watch him play\", or was it \"gee, now we've got a *kick*\n*ass* guy on *our* side, I'll go watch him play\". I think it was the latter.\n\n Similar thing happened in L.A. Before\n Gretzky's arrival, about 12000 per game. After, constant sellouts. They\n are STILL selling out every game despite showing little or no improvement\n since Gretzky's first year there. How do you explain it? People are going\n to see Gretzky. they certainly aren't going to see a winner, they haven't\n GOT a winner. They've had MUCH better teams in their past history than\n they currently have, yet they didn't draw as well then.\n\nI don't think this is accurate. The *tickets* sell, but people don't go to\nthe games. I think this thread has already been discussed...season ticket\nholders in LA don't always use their tickets. So in effect, after the Kings\ninitial success following Gretzky's arrival (68 to 91 points, same source)\nand corresponding attendance jump, there has been an effective drop in\nattendance even though ticket sales may not have changed much. \n\nWhether or not the Kings are a 'winner' is debatable. I claim that since\nGretzky's arrival they have at the very least been competitive...I also claim\nthat McNall has made a stupid move in trying to reassemble the Oiler\ndynasty...but that's another story and included only because I don't like\nMcNall:-). Anyway, McNall did do some heavy marketing around Gretzky, and\nthat undoubtedly was also responsible for the attendance and merchandising\nsales, etc. But as I said, when the Kings have been in there little\ntailspins over the past couple of years there have been empty seats at the\nForum even if the tickets were sold.\n\n I think in the case of a Lemieux or Gretzky, the player can transcend\n winning as the major drawing power. \n\nFor the short term, IMO. Although I think that it's inevitable that the team\nwill improve with a player such as Lemieux or Gretzky, simply because they\nmake people around them better.\n\n But winning sure as hell helps. ;-)\n\nWell, at least we are in full agreement here!\n\n This does not make Roger's point any more valid, but the Jets aren't\n\nSo are you saying Roger has ever had a valid point? couldn't resist... \n\n getting a HUGE jump in productivity, yet they ARE getting a huge\n jump in attendance. This is due to the emergence of Teemu Selanne.\n They have the 17th best record in hockey, it sure as hell isn't because\n they are winning.\n\nYes, but they are doing no worse than last year. I think the same type of\nreasoning I applied to a new Pittsburgh fan applies to all the extra people\nshowing up at Winnipeg games. It's difficult to predict, but do you think\nthat if the Jets miss the playoffs next season that in the year after they\nwill maintain their attendance levels? I seriously doubt it, because in that\ncase the expectation of an improving team would be gone, with or without\nSelanne.\n\nI did provide the example of Rocket Ismail and the Toronto Argonauts of the \nCFL...did you leave it out because you don't know much about the CFL? If \nthat's the case then fair enough, but if it isn't the case then I'm curious\nto hear your explanation.\nthe length is 5868\n" ], [ "texts[i] = re.sub('([\\w\\.-]+)@([\\w\\.-]+)','', texts[i])#To remove any emails \nprint('after:',texts[i])\nprint('the length is',len(texts[i]))", "after: Dean J. Falcione (posting from jrmst+) writes:\n[I wrote:]\n\n When the Pens got Mario, granted there was big publicity, etc, etc,\n and interest was immediately generated. Gretzky did the same thing for LA. \n However, imnsho, neither team would have seen a marked improvement in\n attendance if the team record did not improve. In the year before Lemieux\n came, Pittsburgh finished with 38 points. Following his arrival, the Pens\n finished with 53, 76, 72, 81, 87, 72, 88, and 87 points, with a couple of\n ^^\n Stanley Cups thrown in.\n \n It was at this point the Pens attendance was near capacity (34 out of 40 \n sellouts) yet they hadn't made the playoffs since 1982. How do you explain\n a 6th place team breaking attendance records when they haven't been to the\n playoffs in 7 years? Mario Lemieux is the explanation, IMHO. \n\n You could make a case that the *expectation* of an improving team that\n would make the playoffs is the reason. \n\nFunny you should mention it...this is exactly the case I was going to make.\n\n But I think the reason is Lemieux\n had a 168 point season and was the first non-Gretzky to win the Hart and\n Ross since 1980. People turned out to watch him play. \n\nI will grant that a star like Mario will draw fans, even if the team sucks. \nBut this is short term only; I still do not think the attendance increase \nwill last, unless the team is a winning/competitive/improving/butt-kicking\none. Pittsburgh was still getting better, so people continued to support\nthem. If they suddenly dropped to, say, 50 points, you'd have knee surgery\nfor some of the people jumping off the bandwagon. \n\n Also, the following year (88-89) the Pens had 89 points not 87. \n\nOk. My numbers came from the NHL Guide and Record Book. \n\n They made the transaction to try and build a winner around Mario, that is \n true. But the improvement in attendance came before they started doing\n this (Coffey late in 1987) and before they even had a playoff bound team.\n A doubling of attendance occured in 1984-85 from the previous year. An\n increase from 38 points to 53 points is not going to do that. The arrival\n of Mario Lemieux is what did it. \n\nYou can give the credit to Mario since he deserves it. But my point is that\nit wasn't Mario himself, but it was the *expectation* of things to come (i.e.\na winning team) that he created by being the next great hockey superstar. And\nbefore anybody jumps in and says I'm nit-picking and mincing words, go back\nand read from where this thread started...\n\nIt might help to think about what would go through a fan's mind who suddenly\nfound an interest in Mario and the Pens. Was it \"gee, Mario Lemieux is\namazing, I'll go watch him play\", or was it \"gee, now we've got a *kick*\n*ass* guy on *our* side, I'll go watch him play\". I think it was the latter.\n\n Similar thing happened in L.A. Before\n Gretzky's arrival, about 12000 per game. After, constant sellouts. They\n are STILL selling out every game despite showing little or no improvement\n since Gretzky's first year there. How do you explain it? People are going\n to see Gretzky. they certainly aren't going to see a winner, they haven't\n GOT a winner. They've had MUCH better teams in their past history than\n they currently have, yet they didn't draw as well then.\n\nI don't think this is accurate. The *tickets* sell, but people don't go to\nthe games. I think this thread has already been discussed...season ticket\nholders in LA don't always use their tickets. So in effect, after the Kings\ninitial success following Gretzky's arrival (68 to 91 points, same source)\nand corresponding attendance jump, there has been an effective drop in\nattendance even though ticket sales may not have changed much. \n\nWhether or not the Kings are a 'winner' is debatable. I claim that since\nGretzky's arrival they have at the very least been competitive...I also claim\nthat McNall has made a stupid move in trying to reassemble the Oiler\ndynasty...but that's another story and included only because I don't like\nMcNall:-). Anyway, McNall did do some heavy marketing around Gretzky, and\nthat undoubtedly was also responsible for the attendance and merchandising\nsales, etc. But as I said, when the Kings have been in there little\ntailspins over the past couple of years there have been empty seats at the\nForum even if the tickets were sold.\n\n I think in the case of a Lemieux or Gretzky, the player can transcend\n winning as the major drawing power. \n\nFor the short term, IMO. Although I think that it's inevitable that the team\nwill improve with a player such as Lemieux or Gretzky, simply because they\nmake people around them better.\n\n But winning sure as hell helps. ;-)\n\nWell, at least we are in full agreement here!\n\n This does not make Roger's point any more valid, but the Jets aren't\n\nSo are you saying Roger has ever had a valid point? couldn't resist... \n\n getting a HUGE jump in productivity, yet they ARE getting a huge\n jump in attendance. This is due to the emergence of Teemu Selanne.\n They have the 17th best record in hockey, it sure as hell isn't because\n they are winning.\n\nYes, but they are doing no worse than last year. I think the same type of\nreasoning I applied to a new Pittsburgh fan applies to all the extra people\nshowing up at Winnipeg games. It's difficult to predict, but do you think\nthat if the Jets miss the playoffs next season that in the year after they\nwill maintain their attendance levels? I seriously doubt it, because in that\ncase the expectation of an improving team would be gone, with or without\nSelanne.\n\nI did provide the example of Rocket Ismail and the Toronto Argonauts of the \nCFL...did you leave it out because you don't know much about the CFL? If \nthat's the case then fair enough, but if it isn't the case then I'm curious\nto hear your explanation.\nthe length is 5858\n" ], [ "texts[i] = re.sub(r\"/*\\\\*/*\",'', texts[i])#To remove \\/ characters\nprint('after:',texts[i])\nprint('the length is',len(texts[i]))", "after: Dean J. Falcione (posting from jrmst+) writes:\n[I wrote:]\n\n When the Pens got Mario, granted there was big publicity, etc, etc,\n and interest was immediately generated. Gretzky did the same thing for LA. \n However, imnsho, neither team would have seen a marked improvement in\n attendance if the team record did not improve. In the year before Lemieux\n came, Pittsburgh finished with 38 points. Following his arrival, the Pens\n finished with 53, 76, 72, 81, 87, 72, 88, and 87 points, with a couple of\n ^^\n Stanley Cups thrown in.\n \n It was at this point the Pens attendance was near capacity (34 out of 40 \n sellouts) yet they hadn't made the playoffs since 1982. How do you explain\n a 6th place team breaking attendance records when they haven't been to the\n playoffs in 7 years? Mario Lemieux is the explanation, IMHO. \n\n You could make a case that the *expectation* of an improving team that\n would make the playoffs is the reason. \n\nFunny you should mention it...this is exactly the case I was going to make.\n\n But I think the reason is Lemieux\n had a 168 point season and was the first non-Gretzky to win the Hart and\n Ross since 1980. People turned out to watch him play. \n\nI will grant that a star like Mario will draw fans, even if the team sucks. \nBut this is short term only; I still do not think the attendance increase \nwill last, unless the team is a winningcompetitiveimprovingbutt-kicking\none. Pittsburgh was still getting better, so people continued to support\nthem. If they suddenly dropped to, say, 50 points, you'd have knee surgery\nfor some of the people jumping off the bandwagon. \n\n Also, the following year (88-89) the Pens had 89 points not 87. \n\nOk. My numbers came from the NHL Guide and Record Book. \n\n They made the transaction to try and build a winner around Mario, that is \n true. But the improvement in attendance came before they started doing\n this (Coffey late in 1987) and before they even had a playoff bound team.\n A doubling of attendance occured in 1984-85 from the previous year. An\n increase from 38 points to 53 points is not going to do that. The arrival\n of Mario Lemieux is what did it. \n\nYou can give the credit to Mario since he deserves it. But my point is that\nit wasn't Mario himself, but it was the *expectation* of things to come (i.e.\na winning team) that he created by being the next great hockey superstar. And\nbefore anybody jumps in and says I'm nit-picking and mincing words, go back\nand read from where this thread started...\n\nIt might help to think about what would go through a fan's mind who suddenly\nfound an interest in Mario and the Pens. Was it \"gee, Mario Lemieux is\namazing, I'll go watch him play\", or was it \"gee, now we've got a *kick*\n*ass* guy on *our* side, I'll go watch him play\". I think it was the latter.\n\n Similar thing happened in L.A. Before\n Gretzky's arrival, about 12000 per game. After, constant sellouts. They\n are STILL selling out every game despite showing little or no improvement\n since Gretzky's first year there. How do you explain it? People are going\n to see Gretzky. they certainly aren't going to see a winner, they haven't\n GOT a winner. They've had MUCH better teams in their past history than\n they currently have, yet they didn't draw as well then.\n\nI don't think this is accurate. The *tickets* sell, but people don't go to\nthe games. I think this thread has already been discussed...season ticket\nholders in LA don't always use their tickets. So in effect, after the Kings\ninitial success following Gretzky's arrival (68 to 91 points, same source)\nand corresponding attendance jump, there has been an effective drop in\nattendance even though ticket sales may not have changed much. \n\nWhether or not the Kings are a 'winner' is debatable. I claim that since\nGretzky's arrival they have at the very least been competitive...I also claim\nthat McNall has made a stupid move in trying to reassemble the Oiler\ndynasty...but that's another story and included only because I don't like\nMcNall:-). Anyway, McNall did do some heavy marketing around Gretzky, and\nthat undoubtedly was also responsible for the attendance and merchandising\nsales, etc. But as I said, when the Kings have been in there little\ntailspins over the past couple of years there have been empty seats at the\nForum even if the tickets were sold.\n\n I think in the case of a Lemieux or Gretzky, the player can transcend\n winning as the major drawing power. \n\nFor the short term, IMO. Although I think that it's inevitable that the team\nwill improve with a player such as Lemieux or Gretzky, simply because they\nmake people around them better.\n\n But winning sure as hell helps. ;-)\n\nWell, at least we are in full agreement here!\n\n This does not make Roger's point any more valid, but the Jets aren't\n\nSo are you saying Roger has ever had a valid point? couldn't resist... \n\n getting a HUGE jump in productivity, yet they ARE getting a huge\n jump in attendance. This is due to the emergence of Teemu Selanne.\n They have the 17th best record in hockey, it sure as hell isn't because\n they are winning.\n\nYes, but they are doing no worse than last year. I think the same type of\nreasoning I applied to a new Pittsburgh fan applies to all the extra people\nshowing up at Winnipeg games. It's difficult to predict, but do you think\nthat if the Jets miss the playoffs next season that in the year after they\nwill maintain their attendance levels? I seriously doubt it, because in that\ncase the expectation of an improving team would be gone, with or without\nSelanne.\n\nI did provide the example of Rocket Ismail and the Toronto Argonauts of the \nCFL...did you leave it out because you don't know much about the CFL? If \nthat's the case then fair enough, but if it isn't the case then I'm curious\nto hear your explanation.\nthe length is 5855\n" ], [ "texts[i] = re.sub('\\^+','', texts[i])#To remove ^ characters \nprint('after:',texts[i])\nprint('the length is',len(texts[i]))", "after: Dean J. Falcione (posting from jrmst+) writes:\n[I wrote:]\n\n When the Pens got Mario, granted there was big publicity, etc, etc,\n and interest was immediately generated. Gretzky did the same thing for LA. \n However, imnsho, neither team would have seen a marked improvement in\n attendance if the team record did not improve. In the year before Lemieux\n came, Pittsburgh finished with 38 points. Following his arrival, the Pens\n finished with 53, 76, 72, 81, 87, 72, 88, and 87 points, with a couple of\n \n Stanley Cups thrown in.\n \n It was at this point the Pens attendance was near capacity (34 out of 40 \n sellouts) yet they hadn't made the playoffs since 1982. How do you explain\n a 6th place team breaking attendance records when they haven't been to the\n playoffs in 7 years? Mario Lemieux is the explanation, IMHO. \n\n You could make a case that the *expectation* of an improving team that\n would make the playoffs is the reason. \n\nFunny you should mention it...this is exactly the case I was going to make.\n\n But I think the reason is Lemieux\n had a 168 point season and was the first non-Gretzky to win the Hart and\n Ross since 1980. People turned out to watch him play. \n\nI will grant that a star like Mario will draw fans, even if the team sucks. \nBut this is short term only; I still do not think the attendance increase \nwill last, unless the team is a winningcompetitiveimprovingbutt-kicking\none. Pittsburgh was still getting better, so people continued to support\nthem. If they suddenly dropped to, say, 50 points, you'd have knee surgery\nfor some of the people jumping off the bandwagon. \n\n Also, the following year (88-89) the Pens had 89 points not 87. \n\nOk. My numbers came from the NHL Guide and Record Book. \n\n They made the transaction to try and build a winner around Mario, that is \n true. But the improvement in attendance came before they started doing\n this (Coffey late in 1987) and before they even had a playoff bound team.\n A doubling of attendance occured in 1984-85 from the previous year. An\n increase from 38 points to 53 points is not going to do that. The arrival\n of Mario Lemieux is what did it. \n\nYou can give the credit to Mario since he deserves it. But my point is that\nit wasn't Mario himself, but it was the *expectation* of things to come (i.e.\na winning team) that he created by being the next great hockey superstar. And\nbefore anybody jumps in and says I'm nit-picking and mincing words, go back\nand read from where this thread started...\n\nIt might help to think about what would go through a fan's mind who suddenly\nfound an interest in Mario and the Pens. Was it \"gee, Mario Lemieux is\namazing, I'll go watch him play\", or was it \"gee, now we've got a *kick*\n*ass* guy on *our* side, I'll go watch him play\". I think it was the latter.\n\n Similar thing happened in L.A. Before\n Gretzky's arrival, about 12000 per game. After, constant sellouts. They\n are STILL selling out every game despite showing little or no improvement\n since Gretzky's first year there. How do you explain it? People are going\n to see Gretzky. they certainly aren't going to see a winner, they haven't\n GOT a winner. They've had MUCH better teams in their past history than\n they currently have, yet they didn't draw as well then.\n\nI don't think this is accurate. The *tickets* sell, but people don't go to\nthe games. I think this thread has already been discussed...season ticket\nholders in LA don't always use their tickets. So in effect, after the Kings\ninitial success following Gretzky's arrival (68 to 91 points, same source)\nand corresponding attendance jump, there has been an effective drop in\nattendance even though ticket sales may not have changed much. \n\nWhether or not the Kings are a 'winner' is debatable. I claim that since\nGretzky's arrival they have at the very least been competitive...I also claim\nthat McNall has made a stupid move in trying to reassemble the Oiler\ndynasty...but that's another story and included only because I don't like\nMcNall:-). Anyway, McNall did do some heavy marketing around Gretzky, and\nthat undoubtedly was also responsible for the attendance and merchandising\nsales, etc. But as I said, when the Kings have been in there little\ntailspins over the past couple of years there have been empty seats at the\nForum even if the tickets were sold.\n\n I think in the case of a Lemieux or Gretzky, the player can transcend\n winning as the major drawing power. \n\nFor the short term, IMO. Although I think that it's inevitable that the team\nwill improve with a player such as Lemieux or Gretzky, simply because they\nmake people around them better.\n\n But winning sure as hell helps. ;-)\n\nWell, at least we are in full agreement here!\n\n This does not make Roger's point any more valid, but the Jets aren't\n\nSo are you saying Roger has ever had a valid point? couldn't resist... \n\n getting a HUGE jump in productivity, yet they ARE getting a huge\n jump in attendance. This is due to the emergence of Teemu Selanne.\n They have the 17th best record in hockey, it sure as hell isn't because\n they are winning.\n\nYes, but they are doing no worse than last year. I think the same type of\nreasoning I applied to a new Pittsburgh fan applies to all the extra people\nshowing up at Winnipeg games. It's difficult to predict, but do you think\nthat if the Jets miss the playoffs next season that in the year after they\nwill maintain their attendance levels? I seriously doubt it, because in that\ncase the expectation of an improving team would be gone, with or without\nSelanne.\n\nI did provide the example of Rocket Ismail and the Toronto Argonauts of the \nCFL...did you leave it out because you don't know much about the CFL? If \nthat's the case then fair enough, but if it isn't the case then I'm curious\nto hear your explanation.\nthe length is 5853\n" ], [ "texts[i] = re.sub(\"[__]+\", \" \", texts[i])#To remove lines\nprint('after:',texts[i])\nprint('the length is',len(texts[i]))", "after: Dean J. Falcione (posting from jrmst+) writes:\n[I wrote:]\n\n When the Pens got Mario, granted there was big publicity, etc, etc,\n and interest was immediately generated. Gretzky did the same thing for LA. \n However, imnsho, neither team would have seen a marked improvement in\n attendance if the team record did not improve. In the year before Lemieux\n came, Pittsburgh finished with 38 points. Following his arrival, the Pens\n finished with 53, 76, 72, 81, 87, 72, 88, and 87 points, with a couple of\n \n Stanley Cups thrown in.\n \n It was at this point the Pens attendance was near capacity (34 out of 40 \n sellouts) yet they hadn't made the playoffs since 1982. How do you explain\n a 6th place team breaking attendance records when they haven't been to the\n playoffs in 7 years? Mario Lemieux is the explanation, IMHO. \n\n You could make a case that the *expectation* of an improving team that\n would make the playoffs is the reason. \n\nFunny you should mention it...this is exactly the case I was going to make.\n\n But I think the reason is Lemieux\n had a 168 point season and was the first non-Gretzky to win the Hart and\n Ross since 1980. People turned out to watch him play. \n\nI will grant that a star like Mario will draw fans, even if the team sucks. \nBut this is short term only; I still do not think the attendance increase \nwill last, unless the team is a winningcompetitiveimprovingbutt-kicking\none. Pittsburgh was still getting better, so people continued to support\nthem. If they suddenly dropped to, say, 50 points, you'd have knee surgery\nfor some of the people jumping off the bandwagon. \n\n Also, the following year (88-89) the Pens had 89 points not 87. \n\nOk. My numbers came from the NHL Guide and Record Book. \n\n They made the transaction to try and build a winner around Mario, that is \n true. But the improvement in attendance came before they started doing\n this (Coffey late in 1987) and before they even had a playoff bound team.\n A doubling of attendance occured in 1984-85 from the previous year. An\n increase from 38 points to 53 points is not going to do that. The arrival\n of Mario Lemieux is what did it. \n\nYou can give the credit to Mario since he deserves it. But my point is that\nit wasn't Mario himself, but it was the *expectation* of things to come (i.e.\na winning team) that he created by being the next great hockey superstar. And\nbefore anybody jumps in and says I'm nit-picking and mincing words, go back\nand read from where this thread started...\n\nIt might help to think about what would go through a fan's mind who suddenly\nfound an interest in Mario and the Pens. Was it \"gee, Mario Lemieux is\namazing, I'll go watch him play\", or was it \"gee, now we've got a *kick*\n*ass* guy on *our* side, I'll go watch him play\". I think it was the latter.\n\n Similar thing happened in L.A. Before\n Gretzky's arrival, about 12000 per game. After, constant sellouts. They\n are STILL selling out every game despite showing little or no improvement\n since Gretzky's first year there. How do you explain it? People are going\n to see Gretzky. they certainly aren't going to see a winner, they haven't\n GOT a winner. They've had MUCH better teams in their past history than\n they currently have, yet they didn't draw as well then.\n\nI don't think this is accurate. The *tickets* sell, but people don't go to\nthe games. I think this thread has already been discussed...season ticket\nholders in LA don't always use their tickets. So in effect, after the Kings\ninitial success following Gretzky's arrival (68 to 91 points, same source)\nand corresponding attendance jump, there has been an effective drop in\nattendance even though ticket sales may not have changed much. \n\nWhether or not the Kings are a 'winner' is debatable. I claim that since\nGretzky's arrival they have at the very least been competitive...I also claim\nthat McNall has made a stupid move in trying to reassemble the Oiler\ndynasty...but that's another story and included only because I don't like\nMcNall:-). Anyway, McNall did do some heavy marketing around Gretzky, and\nthat undoubtedly was also responsible for the attendance and merchandising\nsales, etc. But as I said, when the Kings have been in there little\ntailspins over the past couple of years there have been empty seats at the\nForum even if the tickets were sold.\n\n I think in the case of a Lemieux or Gretzky, the player can transcend\n winning as the major drawing power. \n\nFor the short term, IMO. Although I think that it's inevitable that the team\nwill improve with a player such as Lemieux or Gretzky, simply because they\nmake people around them better.\n\n But winning sure as hell helps. ;-)\n\nWell, at least we are in full agreement here!\n\n This does not make Roger's point any more valid, but the Jets aren't\n\nSo are you saying Roger has ever had a valid point? couldn't resist... \n\n getting a HUGE jump in productivity, yet they ARE getting a huge\n jump in attendance. This is due to the emergence of Teemu Selanne.\n They have the 17th best record in hockey, it sure as hell isn't because\n they are winning.\n\nYes, but they are doing no worse than last year. I think the same type of\nreasoning I applied to a new Pittsburgh fan applies to all the extra people\nshowing up at Winnipeg games. It's difficult to predict, but do you think\nthat if the Jets miss the playoffs next season that in the year after they\nwill maintain their attendance levels? I seriously doubt it, because in that\ncase the expectation of an improving team would be gone, with or without\nSelanne.\n\nI did provide the example of Rocket Ismail and the Toronto Argonauts of the \nCFL...did you leave it out because you don't know much about the CFL? If \nthat's the case then fair enough, but if it isn't the case then I'm curious\nto hear your explanation.\nthe length is 5853\n" ], [ "texts[i] =re.sub('--+', ' ',texts[i])#To remove multiple spaces \nprint('after:',texts[i])\nprint('the length is',len(texts[i]))", "after: Dean J. Falcione (posting from jrmst+) writes:\n[I wrote:]\n\n When the Pens got Mario, granted there was big publicity, etc, etc,\n and interest was immediately generated. Gretzky did the same thing for LA. \n However, imnsho, neither team would have seen a marked improvement in\n attendance if the team record did not improve. In the year before Lemieux\n came, Pittsburgh finished with 38 points. Following his arrival, the Pens\n finished with 53, 76, 72, 81, 87, 72, 88, and 87 points, with a couple of\n \n Stanley Cups thrown in.\n \n It was at this point the Pens attendance was near capacity (34 out of 40 \n sellouts) yet they hadn't made the playoffs since 1982. How do you explain\n a 6th place team breaking attendance records when they haven't been to the\n playoffs in 7 years? Mario Lemieux is the explanation, IMHO. \n\n You could make a case that the *expectation* of an improving team that\n would make the playoffs is the reason. \n\nFunny you should mention it...this is exactly the case I was going to make.\n\n But I think the reason is Lemieux\n had a 168 point season and was the first non-Gretzky to win the Hart and\n Ross since 1980. People turned out to watch him play. \n\nI will grant that a star like Mario will draw fans, even if the team sucks. \nBut this is short term only; I still do not think the attendance increase \nwill last, unless the team is a winningcompetitiveimprovingbutt-kicking\none. Pittsburgh was still getting better, so people continued to support\nthem. If they suddenly dropped to, say, 50 points, you'd have knee surgery\nfor some of the people jumping off the bandwagon. \n\n Also, the following year (88-89) the Pens had 89 points not 87. \n\nOk. My numbers came from the NHL Guide and Record Book. \n\n They made the transaction to try and build a winner around Mario, that is \n true. But the improvement in attendance came before they started doing\n this (Coffey late in 1987) and before they even had a playoff bound team.\n A doubling of attendance occured in 1984-85 from the previous year. An\n increase from 38 points to 53 points is not going to do that. The arrival\n of Mario Lemieux is what did it. \n\nYou can give the credit to Mario since he deserves it. But my point is that\nit wasn't Mario himself, but it was the *expectation* of things to come (i.e.\na winning team) that he created by being the next great hockey superstar. And\nbefore anybody jumps in and says I'm nit-picking and mincing words, go back\nand read from where this thread started...\n\nIt might help to think about what would go through a fan's mind who suddenly\nfound an interest in Mario and the Pens. Was it \"gee, Mario Lemieux is\namazing, I'll go watch him play\", or was it \"gee, now we've got a *kick*\n*ass* guy on *our* side, I'll go watch him play\". I think it was the latter.\n\n Similar thing happened in L.A. Before\n Gretzky's arrival, about 12000 per game. After, constant sellouts. They\n are STILL selling out every game despite showing little or no improvement\n since Gretzky's first year there. How do you explain it? People are going\n to see Gretzky. they certainly aren't going to see a winner, they haven't\n GOT a winner. They've had MUCH better teams in their past history than\n they currently have, yet they didn't draw as well then.\n\nI don't think this is accurate. The *tickets* sell, but people don't go to\nthe games. I think this thread has already been discussed...season ticket\nholders in LA don't always use their tickets. So in effect, after the Kings\ninitial success following Gretzky's arrival (68 to 91 points, same source)\nand corresponding attendance jump, there has been an effective drop in\nattendance even though ticket sales may not have changed much. \n\nWhether or not the Kings are a 'winner' is debatable. I claim that since\nGretzky's arrival they have at the very least been competitive...I also claim\nthat McNall has made a stupid move in trying to reassemble the Oiler\ndynasty...but that's another story and included only because I don't like\nMcNall:-). Anyway, McNall did do some heavy marketing around Gretzky, and\nthat undoubtedly was also responsible for the attendance and merchandising\nsales, etc. But as I said, when the Kings have been in there little\ntailspins over the past couple of years there have been empty seats at the\nForum even if the tickets were sold.\n\n I think in the case of a Lemieux or Gretzky, the player can transcend\n winning as the major drawing power. \n\nFor the short term, IMO. Although I think that it's inevitable that the team\nwill improve with a player such as Lemieux or Gretzky, simply because they\nmake people around them better.\n\n But winning sure as hell helps. ;-)\n\nWell, at least we are in full agreement here!\n\n This does not make Roger's point any more valid, but the Jets aren't\n\nSo are you saying Roger has ever had a valid point? couldn't resist... \n\n getting a HUGE jump in productivity, yet they ARE getting a huge\n jump in attendance. This is due to the emergence of Teemu Selanne.\n They have the 17th best record in hockey, it sure as hell isn't because\n they are winning.\n\nYes, but they are doing no worse than last year. I think the same type of\nreasoning I applied to a new Pittsburgh fan applies to all the extra people\nshowing up at Winnipeg games. It's difficult to predict, but do you think\nthat if the Jets miss the playoffs next season that in the year after they\nwill maintain their attendance levels? I seriously doubt it, because in that\ncase the expectation of an improving team would be gone, with or without\nSelanne.\n\nI did provide the example of Rocket Ismail and the Toronto Argonauts of the \nCFL...did you leave it out because you don't know much about the CFL? If \nthat's the case then fair enough, but if it isn't the case then I'm curious\nto hear your explanation.\nthe length is 5853\n" ], [ "texts[i] =re.sub(r'\\~\\~+','', texts[i])#To remove any == characters\nprint('after:',texts[i])\nprint('the length is',len(texts[i]))", "after: Dean J. Falcione (posting from jrmst+) writes:\n[I wrote:]\n\n When the Pens got Mario, granted there was big publicity, etc, etc,\n and interest was immediately generated. Gretzky did the same thing for LA. \n However, imnsho, neither team would have seen a marked improvement in\n attendance if the team record did not improve. In the year before Lemieux\n came, Pittsburgh finished with 38 points. Following his arrival, the Pens\n finished with 53, 76, 72, 81, 87, 72, 88, and 87 points, with a couple of\n \n Stanley Cups thrown in.\n \n It was at this point the Pens attendance was near capacity (34 out of 40 \n sellouts) yet they hadn't made the playoffs since 1982. How do you explain\n a 6th place team breaking attendance records when they haven't been to the\n playoffs in 7 years? Mario Lemieux is the explanation, IMHO. \n\n You could make a case that the *expectation* of an improving team that\n would make the playoffs is the reason. \n\nFunny you should mention it...this is exactly the case I was going to make.\n\n But I think the reason is Lemieux\n had a 168 point season and was the first non-Gretzky to win the Hart and\n Ross since 1980. People turned out to watch him play. \n\nI will grant that a star like Mario will draw fans, even if the team sucks. \nBut this is short term only; I still do not think the attendance increase \nwill last, unless the team is a winningcompetitiveimprovingbutt-kicking\none. Pittsburgh was still getting better, so people continued to support\nthem. If they suddenly dropped to, say, 50 points, you'd have knee surgery\nfor some of the people jumping off the bandwagon. \n\n Also, the following year (88-89) the Pens had 89 points not 87. \n\nOk. My numbers came from the NHL Guide and Record Book. \n\n They made the transaction to try and build a winner around Mario, that is \n true. But the improvement in attendance came before they started doing\n this (Coffey late in 1987) and before they even had a playoff bound team.\n A doubling of attendance occured in 1984-85 from the previous year. An\n increase from 38 points to 53 points is not going to do that. The arrival\n of Mario Lemieux is what did it. \n\nYou can give the credit to Mario since he deserves it. But my point is that\nit wasn't Mario himself, but it was the *expectation* of things to come (i.e.\na winning team) that he created by being the next great hockey superstar. And\nbefore anybody jumps in and says I'm nit-picking and mincing words, go back\nand read from where this thread started...\n\nIt might help to think about what would go through a fan's mind who suddenly\nfound an interest in Mario and the Pens. Was it \"gee, Mario Lemieux is\namazing, I'll go watch him play\", or was it \"gee, now we've got a *kick*\n*ass* guy on *our* side, I'll go watch him play\". I think it was the latter.\n\n Similar thing happened in L.A. Before\n Gretzky's arrival, about 12000 per game. After, constant sellouts. They\n are STILL selling out every game despite showing little or no improvement\n since Gretzky's first year there. How do you explain it? People are going\n to see Gretzky. they certainly aren't going to see a winner, they haven't\n GOT a winner. They've had MUCH better teams in their past history than\n they currently have, yet they didn't draw as well then.\n\nI don't think this is accurate. The *tickets* sell, but people don't go to\nthe games. I think this thread has already been discussed...season ticket\nholders in LA don't always use their tickets. So in effect, after the Kings\ninitial success following Gretzky's arrival (68 to 91 points, same source)\nand corresponding attendance jump, there has been an effective drop in\nattendance even though ticket sales may not have changed much. \n\nWhether or not the Kings are a 'winner' is debatable. I claim that since\nGretzky's arrival they have at the very least been competitive...I also claim\nthat McNall has made a stupid move in trying to reassemble the Oiler\ndynasty...but that's another story and included only because I don't like\nMcNall:-). Anyway, McNall did do some heavy marketing around Gretzky, and\nthat undoubtedly was also responsible for the attendance and merchandising\nsales, etc. But as I said, when the Kings have been in there little\ntailspins over the past couple of years there have been empty seats at the\nForum even if the tickets were sold.\n\n I think in the case of a Lemieux or Gretzky, the player can transcend\n winning as the major drawing power. \n\nFor the short term, IMO. Although I think that it's inevitable that the team\nwill improve with a player such as Lemieux or Gretzky, simply because they\nmake people around them better.\n\n But winning sure as hell helps. ;-)\n\nWell, at least we are in full agreement here!\n\n This does not make Roger's point any more valid, but the Jets aren't\n\nSo are you saying Roger has ever had a valid point? couldn't resist... \n\n getting a HUGE jump in productivity, yet they ARE getting a huge\n jump in attendance. This is due to the emergence of Teemu Selanne.\n They have the 17th best record in hockey, it sure as hell isn't because\n they are winning.\n\nYes, but they are doing no worse than last year. I think the same type of\nreasoning I applied to a new Pittsburgh fan applies to all the extra people\nshowing up at Winnipeg games. It's difficult to predict, but do you think\nthat if the Jets miss the playoffs next season that in the year after they\nwill maintain their attendance levels? I seriously doubt it, because in that\ncase the expectation of an improving team would be gone, with or without\nSelanne.\n\nI did provide the example of Rocket Ismail and the Toronto Argonauts of the \nCFL...did you leave it out because you don't know much about the CFL? If \nthat's the case then fair enough, but if it isn't the case then I'm curious\nto hear your explanation.\nthe length is 5853\n" ], [ "texts[i] = re.sub(\"\\n\", \" \", texts[i])#To remove lines\nprint('after:',texts[i])\nprint('the length is',len(texts[i]))", "after: Dean J. Falcione (posting from jrmst+) writes: [I wrote:] When the Pens got Mario, granted there was big publicity, etc, etc, and interest was immediately generated. Gretzky did the same thing for LA. However, imnsho, neither team would have seen a marked improvement in attendance if the team record did not improve. In the year before Lemieux came, Pittsburgh finished with 38 points. Following his arrival, the Pens finished with 53, 76, 72, 81, 87, 72, 88, and 87 points, with a couple of Stanley Cups thrown in. It was at this point the Pens attendance was near capacity (34 out of 40 sellouts) yet they hadn't made the playoffs since 1982. How do you explain a 6th place team breaking attendance records when they haven't been to the playoffs in 7 years? Mario Lemieux is the explanation, IMHO. You could make a case that the *expectation* of an improving team that would make the playoffs is the reason. Funny you should mention it...this is exactly the case I was going to make. But I think the reason is Lemieux had a 168 point season and was the first non-Gretzky to win the Hart and Ross since 1980. People turned out to watch him play. I will grant that a star like Mario will draw fans, even if the team sucks. But this is short term only; I still do not think the attendance increase will last, unless the team is a winningcompetitiveimprovingbutt-kicking one. Pittsburgh was still getting better, so people continued to support them. If they suddenly dropped to, say, 50 points, you'd have knee surgery for some of the people jumping off the bandwagon. Also, the following year (88-89) the Pens had 89 points not 87. Ok. My numbers came from the NHL Guide and Record Book. They made the transaction to try and build a winner around Mario, that is true. But the improvement in attendance came before they started doing this (Coffey late in 1987) and before they even had a playoff bound team. A doubling of attendance occured in 1984-85 from the previous year. An increase from 38 points to 53 points is not going to do that. The arrival of Mario Lemieux is what did it. You can give the credit to Mario since he deserves it. But my point is that it wasn't Mario himself, but it was the *expectation* of things to come (i.e. a winning team) that he created by being the next great hockey superstar. And before anybody jumps in and says I'm nit-picking and mincing words, go back and read from where this thread started... It might help to think about what would go through a fan's mind who suddenly found an interest in Mario and the Pens. Was it \"gee, Mario Lemieux is amazing, I'll go watch him play\", or was it \"gee, now we've got a *kick* *ass* guy on *our* side, I'll go watch him play\". I think it was the latter. Similar thing happened in L.A. Before Gretzky's arrival, about 12000 per game. After, constant sellouts. They are STILL selling out every game despite showing little or no improvement since Gretzky's first year there. How do you explain it? People are going to see Gretzky. they certainly aren't going to see a winner, they haven't GOT a winner. They've had MUCH better teams in their past history than they currently have, yet they didn't draw as well then. I don't think this is accurate. The *tickets* sell, but people don't go to the games. I think this thread has already been discussed...season ticket holders in LA don't always use their tickets. So in effect, after the Kings initial success following Gretzky's arrival (68 to 91 points, same source) and corresponding attendance jump, there has been an effective drop in attendance even though ticket sales may not have changed much. Whether or not the Kings are a 'winner' is debatable. I claim that since Gretzky's arrival they have at the very least been competitive...I also claim that McNall has made a stupid move in trying to reassemble the Oiler dynasty...but that's another story and included only because I don't like McNall:-). Anyway, McNall did do some heavy marketing around Gretzky, and that undoubtedly was also responsible for the attendance and merchandising sales, etc. But as I said, when the Kings have been in there little tailspins over the past couple of years there have been empty seats at the Forum even if the tickets were sold. I think in the case of a Lemieux or Gretzky, the player can transcend winning as the major drawing power. For the short term, IMO. Although I think that it's inevitable that the team will improve with a player such as Lemieux or Gretzky, simply because they make people around them better. But winning sure as hell helps. ;-) Well, at least we are in full agreement here! This does not make Roger's point any more valid, but the Jets aren't So are you saying Roger has ever had a valid point? couldn't resist... getting a HUGE jump in productivity, yet they ARE getting a huge jump in attendance. This is due to the emergence of Teemu Selanne. They have the 17th best record in hockey, it sure as hell isn't because they are winning. Yes, but they are doing no worse than last year. I think the same type of reasoning I applied to a new Pittsburgh fan applies to all the extra people showing up at Winnipeg games. It's difficult to predict, but do you think that if the Jets miss the playoffs next season that in the year after they will maintain their attendance levels? I seriously doubt it, because in that case the expectation of an improving team would be gone, with or without Selanne. I did provide the example of Rocket Ismail and the Toronto Argonauts of the CFL...did you leave it out because you don't know much about the CFL? If that's the case then fair enough, but if it isn't the case then I'm curious to hear your explanation.\nthe length is 5853\n" ], [ "texts[i] =re.sub('\\t', ' ',texts[i])#To remove multiple spaces \nprint('after:',texts[i])\nprint('the length is',len(texts[i]))\n", "after: Dean J. Falcione (posting from jrmst+) writes: [I wrote:] When the Pens got Mario, granted there was big publicity, etc, etc, and interest was immediately generated. Gretzky did the same thing for LA. However, imnsho, neither team would have seen a marked improvement in attendance if the team record did not improve. In the year before Lemieux came, Pittsburgh finished with 38 points. Following his arrival, the Pens finished with 53, 76, 72, 81, 87, 72, 88, and 87 points, with a couple of Stanley Cups thrown in. It was at this point the Pens attendance was near capacity (34 out of 40 sellouts) yet they hadn't made the playoffs since 1982. How do you explain a 6th place team breaking attendance records when they haven't been to the playoffs in 7 years? Mario Lemieux is the explanation, IMHO. You could make a case that the *expectation* of an improving team that would make the playoffs is the reason. Funny you should mention it...this is exactly the case I was going to make. But I think the reason is Lemieux had a 168 point season and was the first non-Gretzky to win the Hart and Ross since 1980. People turned out to watch him play. I will grant that a star like Mario will draw fans, even if the team sucks. But this is short term only; I still do not think the attendance increase will last, unless the team is a winningcompetitiveimprovingbutt-kicking one. Pittsburgh was still getting better, so people continued to support them. If they suddenly dropped to, say, 50 points, you'd have knee surgery for some of the people jumping off the bandwagon. Also, the following year (88-89) the Pens had 89 points not 87. Ok. My numbers came from the NHL Guide and Record Book. They made the transaction to try and build a winner around Mario, that is true. But the improvement in attendance came before they started doing this (Coffey late in 1987) and before they even had a playoff bound team. A doubling of attendance occured in 1984-85 from the previous year. An increase from 38 points to 53 points is not going to do that. The arrival of Mario Lemieux is what did it. You can give the credit to Mario since he deserves it. But my point is that it wasn't Mario himself, but it was the *expectation* of things to come (i.e. a winning team) that he created by being the next great hockey superstar. And before anybody jumps in and says I'm nit-picking and mincing words, go back and read from where this thread started... It might help to think about what would go through a fan's mind who suddenly found an interest in Mario and the Pens. Was it \"gee, Mario Lemieux is amazing, I'll go watch him play\", or was it \"gee, now we've got a *kick* *ass* guy on *our* side, I'll go watch him play\". I think it was the latter. Similar thing happened in L.A. Before Gretzky's arrival, about 12000 per game. After, constant sellouts. They are STILL selling out every game despite showing little or no improvement since Gretzky's first year there. How do you explain it? People are going to see Gretzky. they certainly aren't going to see a winner, they haven't GOT a winner. They've had MUCH better teams in their past history than they currently have, yet they didn't draw as well then. I don't think this is accurate. The *tickets* sell, but people don't go to the games. I think this thread has already been discussed...season ticket holders in LA don't always use their tickets. So in effect, after the Kings initial success following Gretzky's arrival (68 to 91 points, same source) and corresponding attendance jump, there has been an effective drop in attendance even though ticket sales may not have changed much. Whether or not the Kings are a 'winner' is debatable. I claim that since Gretzky's arrival they have at the very least been competitive...I also claim that McNall has made a stupid move in trying to reassemble the Oiler dynasty...but that's another story and included only because I don't like McNall:-). Anyway, McNall did do some heavy marketing around Gretzky, and that undoubtedly was also responsible for the attendance and merchandising sales, etc. But as I said, when the Kings have been in there little tailspins over the past couple of years there have been empty seats at the Forum even if the tickets were sold. I think in the case of a Lemieux or Gretzky, the player can transcend winning as the major drawing power. For the short term, IMO. Although I think that it's inevitable that the team will improve with a player such as Lemieux or Gretzky, simply because they make people around them better. But winning sure as hell helps. ;-) Well, at least we are in full agreement here! This does not make Roger's point any more valid, but the Jets aren't So are you saying Roger has ever had a valid point? couldn't resist... getting a HUGE jump in productivity, yet they ARE getting a huge jump in attendance. This is due to the emergence of Teemu Selanne. They have the 17th best record in hockey, it sure as hell isn't because they are winning. Yes, but they are doing no worse than last year. I think the same type of reasoning I applied to a new Pittsburgh fan applies to all the extra people showing up at Winnipeg games. It's difficult to predict, but do you think that if the Jets miss the playoffs next season that in the year after they will maintain their attendance levels? I seriously doubt it, because in that case the expectation of an improving team would be gone, with or without Selanne. I did provide the example of Rocket Ismail and the Toronto Argonauts of the CFL...did you leave it out because you don't know much about the CFL? If that's the case then fair enough, but if it isn't the case then I'm curious to hear your explanation.\nthe length is 5853\n" ], [ "texts[i] =re.sub(' +', ' ',texts[i])#To remove multiple spaces \nprint('after:',texts[i])\nprint('the length is',len(texts[i]))", "after: Dean J. Falcione (posting from jrmst+) writes: [I wrote:] When the Pens got Mario, granted there was big publicity, etc, etc, and interest was immediately generated. Gretzky did the same thing for LA. However, imnsho, neither team would have seen a marked improvement in attendance if the team record did not improve. In the year before Lemieux came, Pittsburgh finished with 38 points. Following his arrival, the Pens finished with 53, 76, 72, 81, 87, 72, 88, and 87 points, with a couple of Stanley Cups thrown in. It was at this point the Pens attendance was near capacity (34 out of 40 sellouts) yet they hadn't made the playoffs since 1982. How do you explain a 6th place team breaking attendance records when they haven't been to the playoffs in 7 years? Mario Lemieux is the explanation, IMHO. You could make a case that the *expectation* of an improving team that would make the playoffs is the reason. Funny you should mention it...this is exactly the case I was going to make. But I think the reason is Lemieux had a 168 point season and was the first non-Gretzky to win the Hart and Ross since 1980. People turned out to watch him play. I will grant that a star like Mario will draw fans, even if the team sucks. But this is short term only; I still do not think the attendance increase will last, unless the team is a winningcompetitiveimprovingbutt-kicking one. Pittsburgh was still getting better, so people continued to support them. If they suddenly dropped to, say, 50 points, you'd have knee surgery for some of the people jumping off the bandwagon. Also, the following year (88-89) the Pens had 89 points not 87. Ok. My numbers came from the NHL Guide and Record Book. They made the transaction to try and build a winner around Mario, that is true. But the improvement in attendance came before they started doing this (Coffey late in 1987) and before they even had a playoff bound team. A doubling of attendance occured in 1984-85 from the previous year. An increase from 38 points to 53 points is not going to do that. The arrival of Mario Lemieux is what did it. You can give the credit to Mario since he deserves it. But my point is that it wasn't Mario himself, but it was the *expectation* of things to come (i.e. a winning team) that he created by being the next great hockey superstar. And before anybody jumps in and says I'm nit-picking and mincing words, go back and read from where this thread started... It might help to think about what would go through a fan's mind who suddenly found an interest in Mario and the Pens. Was it \"gee, Mario Lemieux is amazing, I'll go watch him play\", or was it \"gee, now we've got a *kick* *ass* guy on *our* side, I'll go watch him play\". I think it was the latter. Similar thing happened in L.A. Before Gretzky's arrival, about 12000 per game. After, constant sellouts. They are STILL selling out every game despite showing little or no improvement since Gretzky's first year there. How do you explain it? People are going to see Gretzky. they certainly aren't going to see a winner, they haven't GOT a winner. They've had MUCH better teams in their past history than they currently have, yet they didn't draw as well then. I don't think this is accurate. The *tickets* sell, but people don't go to the games. I think this thread has already been discussed...season ticket holders in LA don't always use their tickets. So in effect, after the Kings initial success following Gretzky's arrival (68 to 91 points, same source) and corresponding attendance jump, there has been an effective drop in attendance even though ticket sales may not have changed much. Whether or not the Kings are a 'winner' is debatable. I claim that since Gretzky's arrival they have at the very least been competitive...I also claim that McNall has made a stupid move in trying to reassemble the Oiler dynasty...but that's another story and included only because I don't like McNall:-). Anyway, McNall did do some heavy marketing around Gretzky, and that undoubtedly was also responsible for the attendance and merchandising sales, etc. But as I said, when the Kings have been in there little tailspins over the past couple of years there have been empty seats at the Forum even if the tickets were sold. I think in the case of a Lemieux or Gretzky, the player can transcend winning as the major drawing power. For the short term, IMO. Although I think that it's inevitable that the team will improve with a player such as Lemieux or Gretzky, simply because they make people around them better. But winning sure as hell helps. ;-) Well, at least we are in full agreement here! This does not make Roger's point any more valid, but the Jets aren't So are you saying Roger has ever had a valid point? couldn't resist... getting a HUGE jump in productivity, yet they ARE getting a huge jump in attendance. This is due to the emergence of Teemu Selanne. They have the 17th best record in hockey, it sure as hell isn't because they are winning. Yes, but they are doing no worse than last year. I think the same type of reasoning I applied to a new Pittsburgh fan applies to all the extra people showing up at Winnipeg games. It's difficult to predict, but do you think that if the Jets miss the playoffs next season that in the year after they will maintain their attendance levels? I seriously doubt it, because in that case the expectation of an improving team would be gone, with or without Selanne. I did provide the example of Rocket Ismail and the Toronto Argonauts of the CFL...did you leave it out because you don't know much about the CFL? If that's the case then fair enough, but if it isn't the case then I'm curious to hear your explanation.\nthe length is 5692\n" ] ], [ [ "## Note the change the order of the these stpe will change the result ^", "_____no_output_____" ], [ "# I think need to remove the long word and spastic but this step will be in the tokenize process", "_____no_output_____" ] ], [ [ "####################### Dosen't work #######################\n\n# match=re.match('Version: ', texts[1])\n# if match:\n# index = match.start()\n# # print(texts[0:index])\n# texts[i]=texts[0:index]\n# ", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ] ]
cb53fa7f0e565ef4bd336e59ad715aceca9a9aab
12,517
ipynb
Jupyter Notebook
Stock_Algorithms/30_Regression_Models.ipynb
sharavsambuu/Deep-Learning-Machine-Learning-Stock
85e8a66cc203d3e1bdaedf40034d567224275d30
[ "MIT" ]
569
2019-02-06T16:35:19.000Z
2022-03-31T03:45:28.000Z
Stock_Algorithms/30_Regression_Models.ipynb
crazyguitar/Deep-Learning-Machine-Learning-Stock
99b4f30c3315806e8098327544d3d8cccfea8d65
[ "MIT" ]
5
2021-02-27T07:03:58.000Z
2022-03-31T14:09:41.000Z
Stock_Algorithms/30_Regression_Models.ipynb
ysdede/Deep-Learning-Machine-Learning-Stock
2e3794efab3276b6bc389c8b38615540d4e2b144
[ "MIT" ]
174
2019-05-23T11:46:54.000Z
2022-03-31T04:44:38.000Z
46.359259
1,614
0.405449
[ [ [ "# 30 Regression Models", "_____no_output_____" ] ], [ [ "from lazypredict.Supervised import LazyRegressor\nfrom pandas.plotting import scatter_matrix# Scikit-learn packages\nfrom sklearn.linear_model import LinearRegression\nfrom sklearn.tree import DecisionTreeRegressor\nfrom sklearn.ensemble import ExtraTreesRegressor\nfrom sklearn import metrics\nfrom sklearn.metrics import mean_squared_error# Hide warnings\nfrom sklearn.model_selection import train_test_split \n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nimport yfinance as yf\nyf.pdr_override()", "C:\\Users\\Tin Hang\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:143: FutureWarning: The sklearn.utils.testing module is deprecated in version 0.22 and will be removed in version 0.24. The corresponding classes / functions should instead be imported from sklearn.utils. Anything that cannot be imported from sklearn.utils is now part of the private API.\n warnings.warn(message, FutureWarning)\n" ], [ "# input\nsymbol = 'AMD'\nstart = '2014-01-01'\nend = '2018-08-27'\n\n# Read data \ndataset = yf.download(symbol,start,end)\n\n# Only keep close columns \ndataset.head()", "[*********************100%***********************] 1 of 1 completed\n" ], [ "# Creating train test split\nX = dataset.drop(columns=['Adj Close'])\ny = dataset['Adj Close']\n\noffset = int(X.shape[0] * 0.9)\n\nX_train, y_train = X[:offset], y[:offset]\nX_test, y_test = X[offset:], y[offset:]", "_____no_output_____" ], [ "reg = LazyRegressor(verbose=0, ignore_warnings=False, custom_metric=None)\nmodels, predictions = reg.fit(X_train, X_test, y_train, y_test)\nprint(models)", "100%|██████████| 42/42 [00:02<00:00, 17.49it/s]\n" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code" ] ]
cb53fb5a7b1116788df7790f29de00a7e4440ea4
6,441
ipynb
Jupyter Notebook
01-Iterators and Generators Homework.ipynb
ahmednourelden/Assignment7
9f305bf97827b2ddf8d229428a9b1e73f1cd96ba
[ "MIT" ]
null
null
null
01-Iterators and Generators Homework.ipynb
ahmednourelden/Assignment7
9f305bf97827b2ddf8d229428a9b1e73f1cd96ba
[ "MIT" ]
null
null
null
01-Iterators and Generators Homework.ipynb
ahmednourelden/Assignment7
9f305bf97827b2ddf8d229428a9b1e73f1cd96ba
[ "MIT" ]
null
null
null
17.842105
166
0.433628
[ [ [ "# Generators Homework \n\n### Problem 1\n\nCreate a generator that generates the squares of numbers up to some number N.", "_____no_output_____" ] ], [ [ "def gensquares(N): \n a = 0\n while a < N:\n yield a**2\n a += 1\n\n ", "_____no_output_____" ], [ "for x in gensquares(10):\n print(x)", "0\n1\n4\n9\n16\n25\n36\n49\n64\n81\n" ] ], [ [ "### Problem 2\n\nCreate a generator that yields \"n\" random numbers between a low and high number (that are inputs). <br>Note: Use the random library. For example:", "_____no_output_____" ] ], [ [ "import random\n\nrandom.randint(1,10)", "_____no_output_____" ], [ "def rand_num(low,high,n):\n a = 1\n while a <= n:\n yield random.randint(low,high)\n a += 1\n\n", "_____no_output_____" ], [ "for num in rand_num(1,10,12):\n print(num)", "3\n8\n6\n1\n2\n7\n5\n10\n4\n4\n9\n5\n" ] ], [ [ "### Problem 3\nExplain a use case for a generator using a yield statement where you would not want to use a normal function with a return statement.<br><br><br><br><br><br>\n\n\n", "_____no_output_____" ] ], [ [ "#Frist USE ::We can us it to create list easliy by any sequence or equation \ndef create_list1(n):\n for x in range(n+1):\n yield x \n \ndef create_list2(n): \n for x in range(n+1):\n yield x*2 ", "_____no_output_____" ], [ "# Create list increment by 1\nlist (create_list1(10))", "_____no_output_____" ], [ "# Create list multiple by 2\nlist (create_list2(10))", "_____no_output_____" ], [ "# Second use :: We need to use __next()__ method without implement \n\ndef steps_func(n): \n a=0\n while a < n:\n print (f'Step number {a}')\n yield a\n a+=1\n while True :\n yield print (\"IT is End , No More Steps\") ", "_____no_output_____" ], [ "step=steps_func(3)\n ", "_____no_output_____" ], [ "step.__next__()\n", "Step number 0\n" ], [ "next(step)\n", "Step number 1\n" ], [ "next(step)\n", "Step number 2\n" ], [ "step.__next__()\n", "IT is End , No More Steps\n" ] ], [ [ "# Great Job!", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ] ]
cb53ff3df6189a954071a479ef6bda39ba1acbe8
2,626
ipynb
Jupyter Notebook
scripts/SIR.ipynb
RuihanWei/covid
db80f87d7c762349ca1faeb2471cd2fd0c53c3ab
[ "MIT" ]
null
null
null
scripts/SIR.ipynb
RuihanWei/covid
db80f87d7c762349ca1faeb2471cd2fd0c53c3ab
[ "MIT" ]
null
null
null
scripts/SIR.ipynb
RuihanWei/covid
db80f87d7c762349ca1faeb2471cd2fd0c53c3ab
[ "MIT" ]
null
null
null
23.872727
83
0.493526
[ [ [ "import matplotlib\nimport matplotlib.pyplot as plt\n\nimport jax\nimport jax.numpy as np\nfrom jax.random import PRNGKey\n\nimport numpyro\nimport numpyro.distributions as dist\nfrom numpyro.infer import MCMC, NUTS, Predictive\n\nimport pandas as pd\n\nimport covid\nimport covid.util as util\nfrom covid.models.SEIRD import SEIRD_stochastic", "_____no_output_____" ] ], [ [ "# Run Inference", "_____no_output_____" ] ], [ [ "data = util.load_state_data()\n\n#places = state_data.keys()\n#places = ['Italy', 'US', 'WA', 'NY', 'MA']\n#places = ['NY', 'MA', 'WA', 'AK', 'WV']\n#places = ['US']\n#places = sorted(['NY', 'WA', 'MA', 'MI', 'AL', 'AR', 'KY'])\nplaces = ['SD']\n\nsave = True\nfor place in places:\n util.run_place(data, place, save=save, start='2020-03-15', \n num_warmup=1000, num_samples=1000, num_prior_samples=1000)", "_____no_output_____" ], [ "data = util.load_state_data()\n\nstart = '2020-03-15'\n#places = ['Italy', 'US', 'WA', 'NY', 'MA']\n#places = ['ID']\n#places = ['NY']\n#places = state_data.keys()\n#places = ['AK']\n#places = ['US']\n#places = ['LA', 'NY']\n#places = ['NY', 'MA', 'WA', 'AK', 'WV']\n#places = ['CO']\n#places = ['NY', 'WA', 'MA']\n#places = sorted(['NY', 'WA', 'MA', 'MI', 'AL', 'AR', 'KY'])\nplaces = ['SD']\n\nsave = True\n\n\n# Inspect and Save Results \nfor place in places:\n util.gen_forecasts(data, place, start=start, save=save, daily=False)", "_____no_output_____" ] ] ]
[ "code", "markdown", "code" ]
[ [ "code" ], [ "markdown" ], [ "code", "code" ] ]
cb5412df8b878f1de168b6e5443c3487114de50f
45,005
ipynb
Jupyter Notebook
2-custom_data.ipynb
jinyiabc/zipline-sandbox
b85e5dd2e62b90e408375fd412c129a0e5684453
[ "MIT" ]
null
null
null
2-custom_data.ipynb
jinyiabc/zipline-sandbox
b85e5dd2e62b90e408375fd412c129a0e5684453
[ "MIT" ]
null
null
null
2-custom_data.ipynb
jinyiabc/zipline-sandbox
b85e5dd2e62b90e408375fd412c129a0e5684453
[ "MIT" ]
null
null
null
238.121693
27,120
0.91632
[ [ [ "import pandas as pd\nfrom collections import OrderedDict\nimport pytz\n\nfull_file_path = \"SPY.csv\"\ndata = OrderedDict()\ndata['SPY'] = pd.read_csv(full_file_path, index_col=0, parse_dates=['date'])\ndata['SPY'] = data['SPY'][[\"open\",\"high\",\"low\",\"close\",\"volume\"]]\ndata['SPY'] = data['SPY'].resample(\"1d\").mean()\ndata['SPY'].fillna(method=\"ffill\", inplace=True)\nprint(data['SPY'].head())", " open high low close volume\ndate \n1993-01-29 43.9687 43.9687 43.7500 43.9375 1003200.0\n1993-01-30 43.9687 43.9687 43.7500 43.9375 1003200.0\n1993-01-31 43.9687 43.9687 43.7500 43.9375 1003200.0\n1993-02-01 43.9687 44.2500 43.9687 44.2500 480500.0\n1993-02-02 44.2187 44.3750 44.1250 44.3437 201300.0\n" ] ], [ [ "Whenever you have all of your dataframes stored in this dictionary, you can then convert it to a panel, like so:", "_____no_output_____" ] ], [ [ "panel = pd.Panel(data)\npanel.minor_axis = [\"open\",\"high\",\"low\",\"close\",\"volume\"]\npanel.major_axis = panel.major_axis.tz_localize(pytz.utc)\nprint(panel)", "<class 'pandas.core.panel.Panel'>\nDimensions: 1 (items) x 9178 (major_axis) x 5 (minor_axis)\nItems axis: SPY to SPY\nMajor_axis axis: 1993-01-29 00:00:00+00:00 to 2018-03-16 00:00:00+00:00\nMinor_axis axis: open to volume\n" ] ], [ [ "With this panel now, we can actually pass this as our \"data\" to our backtest, like this:", "_____no_output_____" ] ], [ [ "from zipline.api import order, record, symbol, set_benchmark\nimport zipline\nimport matplotlib.pyplot as plt\nfrom datetime import datetime\n\n\ndef initialize(context):\n set_benchmark(symbol(\"SPY\"))\n\n\ndef handle_data(context, data):\n order(symbol(\"SPY\"), 10)\n record(SPY=data.current(symbol('SPY'), 'price'))\n\nperf = zipline.run_algorithm(start=datetime(2017, 1, 5, 0, 0, 0, 0, pytz.utc),\n end=datetime(2018, 3, 1, 0, 0, 0, 0, pytz.utc),\n initialize=initialize,\n capital_base=100000,\n handle_data=handle_data,\n data=panel)", "_____no_output_____" ], [ "import matplotlib.pyplot as plt\nfrom matplotlib import style\n\n\nstyle.use(\"ggplot\")\n\nperf.portfolio_value.pct_change().fillna(0).add(1).cumprod().sub(1).plot(label='portfolio')\nperf.SPY.pct_change().fillna(0).add(1).cumprod().sub(1).plot(label='benchmark')\nplt.legend(loc=2)\n\nplt.show()", "_____no_output_____" ], [ "perf.max_leverage.plot()\nplt.show()", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code" ]
[ [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ] ]
cb5421b1c9632d7cbcfa7a9e5b7b89f5406b4959
9,410
ipynb
Jupyter Notebook
notebooks/data-processing.ipynb
raspstephan/nwp-downscale
dbb6d560126a8d13a2748bc088f382e68fa7c0b3
[ "MIT" ]
7
2020-12-14T02:57:39.000Z
2022-03-29T08:32:36.000Z
notebooks/data-processing.ipynb
raspstephan/nwp-downscale
dbb6d560126a8d13a2748bc088f382e68fa7c0b3
[ "MIT" ]
39
2020-12-03T11:39:11.000Z
2021-09-28T09:57:33.000Z
notebooks/data-processing.ipynb
raspstephan/nwp-downscale
dbb6d560126a8d13a2748bc088f382e68fa7c0b3
[ "MIT" ]
1
2021-09-21T23:57:36.000Z
2021-09-21T23:57:36.000Z
27.434402
751
0.559405
[ [ [ "import torch\nfrom torch.nn import functional as F\nfrom torch import nn\nfrom pytorch_lightning.core.lightning import LightningModule\nimport pytorch_lightning as pl\n\nimport torch.optim as optim\nimport torchvision\nimport torchvision.datasets as datasets\nimport torchvision.transforms as transforms\nfrom torch.utils.data import DataLoader\n\nfrom src.models import *\nfrom src.dataloader import *\nfrom src.utils import *\n\nimport xarray as xr\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nimport pickle\nimport json", "_____no_output_____" ] ], [ [ "## Train and Val", "_____no_output_____" ] ], [ [ "data_dir = '/home/jupyter/data/'", "_____no_output_____" ], [ "args = {'tigge_dir':data_dir + f'tigge/32km/',\n 'tigge_vars':['total_precipitation_ens10','total_column_water', '2m_temperature', 'convective_available_potential_energy', 'convective_inhibition'],\n 'mrms_dir':data_dir + f'mrms/4km/RadarOnly_QPE_06H/',\n 'rq_fn':data_dir + f'mrms/4km/RadarQuality.nc',\n# 'const_fn':data_dir + 'tigge/32km/constants.nc',\n# 'const_vars':['orog', 'lsm'],\n 'data_period':('2018-01', '2019-12'),\n 'val_days':1,\n 'split':'train',\n# 'pure_sr_ratio':8, \n 'tp_log':0.01, \n 'scale':True,\n 'ensemble_mode':'stack_by_variable',\n 'pad_tigge':15,\n 'pad_tigge_channel': True, \n 'idx_stride': 8\n }\n\nsave_dir = '/home/jupyter/data/data_patches/'\n# dataset_name = 'ensemble_tp_x10_added_vars_TCW-T-CAPE-CIN_log_trans_padded_15_channel'", "_____no_output_____" ], [ "ds_train = TiggeMRMSDataset(**args)", "setting nans in convective_inhibition to 0\n" ], [ "# pickle.dump(args, open(save_dir+'train/configs/dataset_args.pkl', 'wb'))", "_____no_output_____" ], [ "#save_images(ds_train, save_dir, 'train')", "_____no_output_____" ], [ "pickle.dump(ds_train, open(data_dir + f\"saved_datasets/traindataset_{dataset_name}.pkl\", \"wb\"))\npickle.dump(args, open(data_dir + f\"saved_datasets/traindataset_{dataset_name}_args.pkl\", \"wb\"))", "_____no_output_____" ], [ "val_args = args\nval_args['maxs'] = ds_train.maxs\nval_args['mins'] = ds_train.mins\nval_args['split'] = 'valid'\n\n#ds_valid = TiggeMRMSDataset(**val_args)", "_____no_output_____" ], [ "pickle.dump(val_args, open(save_dir+'valid/configs/dataset_args.pkl', 'wb'))\nlen(ds_valid)", "_____no_output_____" ], [ "save_images(ds_valid, save_dir, 'valid')", "_____no_output_____" ], [ "#pickle.dump(ds_valid, open(data_dir + f\"saved_datasets/validdataset_{dataset_name}.pkl\", \"wb\"))\n#pickle.dump(val_args, open(data_dir + f\"saved_datasets/validdataset_{dataset_name}_args.pkl\", \"wb\"))", "_____no_output_____" ], [ "val_args = pickle.load(open('/home/jupyter/data/data_patches/valid/configs/dataset_args.pkl', 'rb'))\n\ntest_args = args\ntest_args['href_dir'] = data_dir + 'hrefv2/4km/total_precipitation/2020*.nc'\ntest_args['maxs'] = val_args['maxs']\ntest_args['mins'] = val_args['mins']\ntest_args.pop('val_days')\ntest_args.pop('split')\ntest_args['first_days'] = 5\ntest_args['data_period'] = ('2020-01', '2020-12')\n# test_dataset_name = dataset_name + f\"_first_days_{test_args['first_days']}\"\n\nds_test = TiggeMRMSHREFDataset(**test_args)", "_____no_output_____" ], [ "save_images(ds_test, save_dir, 'test')", "_____no_output_____" ], [ "pickle.dump(test_args, open(save_dir+'test/configs/dataset_args.pkl', 'wb'))\nlen(ds_test)", "_____no_output_____" ], [ "pickle.dump(ds_test, open(data_dir + f\"saved_datasets/testdataset_{test_dataset_name}.pkl\", \"wb\"))\npickle.dump(test_args, open(data_dir + f\"saved_datasets/testdataset_{test_dataset_name}_args.pkl\", \"wb\"))", "_____no_output_____" ], [ "print(\"check\")", "_____no_output_____" ] ] ]
[ "code", "markdown", "code" ]
[ [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cb542ec860e96612a5d20ecefd0e558aeeda12cc
551,637
ipynb
Jupyter Notebook
07_ensemble_learning_and_random_forests.ipynb
BStudent/handson-ml2
1dc4f2fc684e4d14d71f9bdc67d7b03f76b715d6
[ "Apache-2.0" ]
14
2020-03-27T22:29:10.000Z
2022-01-20T18:10:49.000Z
07_ensemble_learning_and_random_forests.ipynb
AmitHasanShuvo/handson-ml2
9643e7ea3a40d43f71cef4dcf116c43140d3a4d0
[ "Apache-2.0" ]
9
2020-02-01T09:41:53.000Z
2022-03-12T00:12:00.000Z
07_ensemble_learning_and_random_forests.ipynb
AmitHasanShuvo/handson-ml2
9643e7ea3a40d43f71cef4dcf116c43140d3a4d0
[ "Apache-2.0" ]
18
2020-03-30T04:52:34.000Z
2021-06-17T10:22:44.000Z
204.461453
107,348
0.892808
[ [ [ "**Chapter 7 – Ensemble Learning and Random Forests**", "_____no_output_____" ], [ "_This notebook contains all the sample code and solutions to the exercises in chapter 7._", "_____no_output_____" ], [ "<table align=\"left\">\n <td>\n <a target=\"_blank\" href=\"https://colab.research.google.com/github/ageron/handson-ml2/blob/master/07_ensemble_learning_and_random_forests.ipynb\"><img src=\"https://www.tensorflow.org/images/colab_logo_32px.png\" />Run in Google Colab</a>\n </td>\n</table>", "_____no_output_____" ], [ "# Setup", "_____no_output_____" ], [ "First, let's import a few common modules, ensure MatplotLib plots figures inline and prepare a function to save the figures. We also check that Python 3.5 or later is installed (although Python 2.x may work, it is deprecated so we strongly recommend you use Python 3 instead), as well as Scikit-Learn ≥0.20.", "_____no_output_____" ] ], [ [ "# Python ≥3.5 is required\nimport sys\nassert sys.version_info >= (3, 5)\n\n# Scikit-Learn ≥0.20 is required\nimport sklearn\nassert sklearn.__version__ >= \"0.20\"\n\n# Common imports\nimport numpy as np\nimport os\n\n# to make this notebook's output stable across runs\nnp.random.seed(42)\n\n# To plot pretty figures\n%matplotlib inline\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nmpl.rc('axes', labelsize=14)\nmpl.rc('xtick', labelsize=12)\nmpl.rc('ytick', labelsize=12)\n\n# Where to save the figures\nPROJECT_ROOT_DIR = \".\"\nCHAPTER_ID = \"ensembles\"\nIMAGES_PATH = os.path.join(PROJECT_ROOT_DIR, \"images\", CHAPTER_ID)\nos.makedirs(IMAGES_PATH, exist_ok=True)\n\ndef save_fig(fig_id, tight_layout=True, fig_extension=\"png\", resolution=300):\n path = os.path.join(IMAGES_PATH, fig_id + \".\" + fig_extension)\n print(\"Saving figure\", fig_id)\n if tight_layout:\n plt.tight_layout()\n plt.savefig(path, format=fig_extension, dpi=resolution)", "_____no_output_____" ] ], [ [ "# Voting classifiers", "_____no_output_____" ] ], [ [ "heads_proba = 0.51\ncoin_tosses = (np.random.rand(10000, 10) < heads_proba).astype(np.int32)\ncumulative_heads_ratio = np.cumsum(coin_tosses, axis=0) / np.arange(1, 10001).reshape(-1, 1)", "_____no_output_____" ], [ "plt.figure(figsize=(8,3.5))\nplt.plot(cumulative_heads_ratio)\nplt.plot([0, 10000], [0.51, 0.51], \"k--\", linewidth=2, label=\"51%\")\nplt.plot([0, 10000], [0.5, 0.5], \"k-\", label=\"50%\")\nplt.xlabel(\"Number of coin tosses\")\nplt.ylabel(\"Heads ratio\")\nplt.legend(loc=\"lower right\")\nplt.axis([0, 10000, 0.42, 0.58])\nsave_fig(\"law_of_large_numbers_plot\")\nplt.show()", "Saving figure law_of_large_numbers_plot\n" ], [ "from sklearn.model_selection import train_test_split\nfrom sklearn.datasets import make_moons\n\nX, y = make_moons(n_samples=500, noise=0.30, random_state=42)\nX_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)", "_____no_output_____" ] ], [ [ "**Note**: to be future-proof, we set `solver=\"lbfgs\"`, `n_estimators=100`, and `gamma=\"scale\"` since these will be the default values in upcoming Scikit-Learn versions.", "_____no_output_____" ] ], [ [ "from sklearn.ensemble import RandomForestClassifier\nfrom sklearn.ensemble import VotingClassifier\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.svm import SVC\n\nlog_clf = LogisticRegression(solver=\"lbfgs\", random_state=42)\nrnd_clf = RandomForestClassifier(n_estimators=100, random_state=42)\nsvm_clf = SVC(gamma=\"scale\", random_state=42)\n\nvoting_clf = VotingClassifier(\n estimators=[('lr', log_clf), ('rf', rnd_clf), ('svc', svm_clf)],\n voting='hard')", "_____no_output_____" ], [ "voting_clf.fit(X_train, y_train)", "_____no_output_____" ], [ "from sklearn.metrics import accuracy_score\n\nfor clf in (log_clf, rnd_clf, svm_clf, voting_clf):\n clf.fit(X_train, y_train)\n y_pred = clf.predict(X_test)\n print(clf.__class__.__name__, accuracy_score(y_test, y_pred))", "LogisticRegression 0.864\nRandomForestClassifier 0.896\nSVC 0.896\nVotingClassifier 0.912\n" ] ], [ [ "Soft voting:", "_____no_output_____" ] ], [ [ "log_clf = LogisticRegression(solver=\"lbfgs\", random_state=42)\nrnd_clf = RandomForestClassifier(n_estimators=100, random_state=42)\nsvm_clf = SVC(gamma=\"scale\", probability=True, random_state=42)\n\nvoting_clf = VotingClassifier(\n estimators=[('lr', log_clf), ('rf', rnd_clf), ('svc', svm_clf)],\n voting='soft')\nvoting_clf.fit(X_train, y_train)", "_____no_output_____" ], [ "from sklearn.metrics import accuracy_score\n\nfor clf in (log_clf, rnd_clf, svm_clf, voting_clf):\n clf.fit(X_train, y_train)\n y_pred = clf.predict(X_test)\n print(clf.__class__.__name__, accuracy_score(y_test, y_pred))", "LogisticRegression 0.864\nRandomForestClassifier 0.896\nSVC 0.896\nVotingClassifier 0.92\n" ] ], [ [ "# Bagging ensembles", "_____no_output_____" ] ], [ [ "from sklearn.ensemble import BaggingClassifier\nfrom sklearn.tree import DecisionTreeClassifier\n\nbag_clf = BaggingClassifier(\n DecisionTreeClassifier(random_state=42), n_estimators=500,\n max_samples=100, bootstrap=True, random_state=42)\nbag_clf.fit(X_train, y_train)\ny_pred = bag_clf.predict(X_test)", "_____no_output_____" ], [ "from sklearn.metrics import accuracy_score\nprint(accuracy_score(y_test, y_pred))", "0.904\n" ], [ "tree_clf = DecisionTreeClassifier(random_state=42)\ntree_clf.fit(X_train, y_train)\ny_pred_tree = tree_clf.predict(X_test)\nprint(accuracy_score(y_test, y_pred_tree))", "0.856\n" ], [ "from matplotlib.colors import ListedColormap\n\ndef plot_decision_boundary(clf, X, y, axes=[-1.5, 2.45, -1, 1.5], alpha=0.5, contour=True):\n x1s = np.linspace(axes[0], axes[1], 100)\n x2s = np.linspace(axes[2], axes[3], 100)\n x1, x2 = np.meshgrid(x1s, x2s)\n X_new = np.c_[x1.ravel(), x2.ravel()]\n y_pred = clf.predict(X_new).reshape(x1.shape)\n custom_cmap = ListedColormap(['#fafab0','#9898ff','#a0faa0'])\n plt.contourf(x1, x2, y_pred, alpha=0.3, cmap=custom_cmap)\n if contour:\n custom_cmap2 = ListedColormap(['#7d7d58','#4c4c7f','#507d50'])\n plt.contour(x1, x2, y_pred, cmap=custom_cmap2, alpha=0.8)\n plt.plot(X[:, 0][y==0], X[:, 1][y==0], \"yo\", alpha=alpha)\n plt.plot(X[:, 0][y==1], X[:, 1][y==1], \"bs\", alpha=alpha)\n plt.axis(axes)\n plt.xlabel(r\"$x_1$\", fontsize=18)\n plt.ylabel(r\"$x_2$\", fontsize=18, rotation=0)", "_____no_output_____" ], [ "fix, axes = plt.subplots(ncols=2, figsize=(10,4), sharey=True)\nplt.sca(axes[0])\nplot_decision_boundary(tree_clf, X, y)\nplt.title(\"Decision Tree\", fontsize=14)\nplt.sca(axes[1])\nplot_decision_boundary(bag_clf, X, y)\nplt.title(\"Decision Trees with Bagging\", fontsize=14)\nplt.ylabel(\"\")\nsave_fig(\"decision_tree_without_and_with_bagging_plot\")\nplt.show()", "Saving figure decision_tree_without_and_with_bagging_plot\n" ] ], [ [ "# Random Forests", "_____no_output_____" ] ], [ [ "bag_clf = BaggingClassifier(\n DecisionTreeClassifier(splitter=\"random\", max_leaf_nodes=16, random_state=42),\n n_estimators=500, max_samples=1.0, bootstrap=True, random_state=42)", "_____no_output_____" ], [ "bag_clf.fit(X_train, y_train)\ny_pred = bag_clf.predict(X_test)", "_____no_output_____" ], [ "from sklearn.ensemble import RandomForestClassifier\n\nrnd_clf = RandomForestClassifier(n_estimators=500, max_leaf_nodes=16, random_state=42)\nrnd_clf.fit(X_train, y_train)\n\ny_pred_rf = rnd_clf.predict(X_test)", "_____no_output_____" ], [ "np.sum(y_pred == y_pred_rf) / len(y_pred) # almost identical predictions", "_____no_output_____" ], [ "from sklearn.datasets import load_iris\niris = load_iris()\nrnd_clf = RandomForestClassifier(n_estimators=500, random_state=42)\nrnd_clf.fit(iris[\"data\"], iris[\"target\"])\nfor name, score in zip(iris[\"feature_names\"], rnd_clf.feature_importances_):\n print(name, score)", "sepal length (cm) 0.11249225099876374\nsepal width (cm) 0.023119288282510326\npetal length (cm) 0.44103046436395765\npetal width (cm) 0.4233579963547681\n" ], [ "rnd_clf.feature_importances_", "_____no_output_____" ], [ "plt.figure(figsize=(6, 4))\n\nfor i in range(15):\n tree_clf = DecisionTreeClassifier(max_leaf_nodes=16, random_state=42 + i)\n indices_with_replacement = np.random.randint(0, len(X_train), len(X_train))\n tree_clf.fit(X[indices_with_replacement], y[indices_with_replacement])\n plot_decision_boundary(tree_clf, X, y, axes=[-1.5, 2.45, -1, 1.5], alpha=0.02, contour=False)\n\nplt.show()", "_____no_output_____" ] ], [ [ "## Out-of-Bag evaluation", "_____no_output_____" ] ], [ [ "bag_clf = BaggingClassifier(\n DecisionTreeClassifier(random_state=42), n_estimators=500,\n bootstrap=True, oob_score=True, random_state=40)\nbag_clf.fit(X_train, y_train)\nbag_clf.oob_score_", "_____no_output_____" ], [ "bag_clf.oob_decision_function_", "_____no_output_____" ], [ "from sklearn.metrics import accuracy_score\ny_pred = bag_clf.predict(X_test)\naccuracy_score(y_test, y_pred)", "_____no_output_____" ] ], [ [ "## Feature importance", "_____no_output_____" ] ], [ [ "from sklearn.datasets import fetch_openml\n\nmnist = fetch_openml('mnist_784', version=1)\nmnist.target = mnist.target.astype(np.uint8)", "_____no_output_____" ], [ "rnd_clf = RandomForestClassifier(n_estimators=100, random_state=42)\nrnd_clf.fit(mnist[\"data\"], mnist[\"target\"])", "_____no_output_____" ], [ "def plot_digit(data):\n image = data.reshape(28, 28)\n plt.imshow(image, cmap = mpl.cm.hot,\n interpolation=\"nearest\")\n plt.axis(\"off\")", "_____no_output_____" ], [ "plot_digit(rnd_clf.feature_importances_)\n\ncbar = plt.colorbar(ticks=[rnd_clf.feature_importances_.min(), rnd_clf.feature_importances_.max()])\ncbar.ax.set_yticklabels(['Not important', 'Very important'])\n\nsave_fig(\"mnist_feature_importance_plot\")\nplt.show()", "Saving figure mnist_feature_importance_plot\n" ] ], [ [ "# AdaBoost", "_____no_output_____" ] ], [ [ "from sklearn.ensemble import AdaBoostClassifier\n\nada_clf = AdaBoostClassifier(\n DecisionTreeClassifier(max_depth=1), n_estimators=200,\n algorithm=\"SAMME.R\", learning_rate=0.5, random_state=42)\nada_clf.fit(X_train, y_train)", "_____no_output_____" ], [ "plot_decision_boundary(ada_clf, X, y)", "_____no_output_____" ], [ "m = len(X_train)\n\nfix, axes = plt.subplots(ncols=2, figsize=(10,4), sharey=True)\nfor subplot, learning_rate in ((0, 1), (1, 0.5)):\n sample_weights = np.ones(m)\n plt.sca(axes[subplot])\n for i in range(5):\n svm_clf = SVC(kernel=\"rbf\", C=0.05, gamma=\"scale\", random_state=42)\n svm_clf.fit(X_train, y_train, sample_weight=sample_weights)\n y_pred = svm_clf.predict(X_train)\n sample_weights[y_pred != y_train] *= (1 + learning_rate)\n plot_decision_boundary(svm_clf, X, y, alpha=0.2)\n plt.title(\"learning_rate = {}\".format(learning_rate), fontsize=16)\n if subplot == 0:\n plt.text(-0.7, -0.65, \"1\", fontsize=14)\n plt.text(-0.6, -0.10, \"2\", fontsize=14)\n plt.text(-0.5, 0.10, \"3\", fontsize=14)\n plt.text(-0.4, 0.55, \"4\", fontsize=14)\n plt.text(-0.3, 0.90, \"5\", fontsize=14)\n else:\n plt.ylabel(\"\")\n\nsave_fig(\"boosting_plot\")\nplt.show()", "Saving figure boosting_plot\n" ], [ "list(m for m in dir(ada_clf) if not m.startswith(\"_\") and m.endswith(\"_\"))", "_____no_output_____" ] ], [ [ "# Gradient Boosting", "_____no_output_____" ] ], [ [ "np.random.seed(42)\nX = np.random.rand(100, 1) - 0.5\ny = 3*X[:, 0]**2 + 0.05 * np.random.randn(100)", "_____no_output_____" ], [ "from sklearn.tree import DecisionTreeRegressor\n\ntree_reg1 = DecisionTreeRegressor(max_depth=2, random_state=42)\ntree_reg1.fit(X, y)", "_____no_output_____" ], [ "y2 = y - tree_reg1.predict(X)\ntree_reg2 = DecisionTreeRegressor(max_depth=2, random_state=42)\ntree_reg2.fit(X, y2)", "_____no_output_____" ], [ "y3 = y2 - tree_reg2.predict(X)\ntree_reg3 = DecisionTreeRegressor(max_depth=2, random_state=42)\ntree_reg3.fit(X, y3)", "_____no_output_____" ], [ "X_new = np.array([[0.8]])", "_____no_output_____" ], [ "y_pred = sum(tree.predict(X_new) for tree in (tree_reg1, tree_reg2, tree_reg3))", "_____no_output_____" ], [ "y_pred", "_____no_output_____" ], [ "def plot_predictions(regressors, X, y, axes, label=None, style=\"r-\", data_style=\"b.\", data_label=None):\n x1 = np.linspace(axes[0], axes[1], 500)\n y_pred = sum(regressor.predict(x1.reshape(-1, 1)) for regressor in regressors)\n plt.plot(X[:, 0], y, data_style, label=data_label)\n plt.plot(x1, y_pred, style, linewidth=2, label=label)\n if label or data_label:\n plt.legend(loc=\"upper center\", fontsize=16)\n plt.axis(axes)", "_____no_output_____" ], [ "plt.figure(figsize=(11,11))\n\nplt.subplot(321)\nplot_predictions([tree_reg1], X, y, axes=[-0.5, 0.5, -0.1, 0.8], label=\"$h_1(x_1)$\", style=\"g-\", data_label=\"Training set\")\nplt.ylabel(\"$y$\", fontsize=16, rotation=0)\nplt.title(\"Residuals and tree predictions\", fontsize=16)\n\nplt.subplot(322)\nplot_predictions([tree_reg1], X, y, axes=[-0.5, 0.5, -0.1, 0.8], label=\"$h(x_1) = h_1(x_1)$\", data_label=\"Training set\")\nplt.ylabel(\"$y$\", fontsize=16, rotation=0)\nplt.title(\"Ensemble predictions\", fontsize=16)\n\nplt.subplot(323)\nplot_predictions([tree_reg2], X, y2, axes=[-0.5, 0.5, -0.5, 0.5], label=\"$h_2(x_1)$\", style=\"g-\", data_style=\"k+\", data_label=\"Residuals\")\nplt.ylabel(\"$y - h_1(x_1)$\", fontsize=16)\n\nplt.subplot(324)\nplot_predictions([tree_reg1, tree_reg2], X, y, axes=[-0.5, 0.5, -0.1, 0.8], label=\"$h(x_1) = h_1(x_1) + h_2(x_1)$\")\nplt.ylabel(\"$y$\", fontsize=16, rotation=0)\n\nplt.subplot(325)\nplot_predictions([tree_reg3], X, y3, axes=[-0.5, 0.5, -0.5, 0.5], label=\"$h_3(x_1)$\", style=\"g-\", data_style=\"k+\")\nplt.ylabel(\"$y - h_1(x_1) - h_2(x_1)$\", fontsize=16)\nplt.xlabel(\"$x_1$\", fontsize=16)\n\nplt.subplot(326)\nplot_predictions([tree_reg1, tree_reg2, tree_reg3], X, y, axes=[-0.5, 0.5, -0.1, 0.8], label=\"$h(x_1) = h_1(x_1) + h_2(x_1) + h_3(x_1)$\")\nplt.xlabel(\"$x_1$\", fontsize=16)\nplt.ylabel(\"$y$\", fontsize=16, rotation=0)\n\nsave_fig(\"gradient_boosting_plot\")\nplt.show()", "Saving figure gradient_boosting_plot\n" ], [ "from sklearn.ensemble import GradientBoostingRegressor\n\ngbrt = GradientBoostingRegressor(max_depth=2, n_estimators=3, learning_rate=1.0, random_state=42)\ngbrt.fit(X, y)", "_____no_output_____" ], [ "gbrt_slow = GradientBoostingRegressor(max_depth=2, n_estimators=200, learning_rate=0.1, random_state=42)\ngbrt_slow.fit(X, y)", "_____no_output_____" ], [ "fix, axes = plt.subplots(ncols=2, figsize=(10,4), sharey=True)\n\nplt.sca(axes[0])\nplot_predictions([gbrt], X, y, axes=[-0.5, 0.5, -0.1, 0.8], label=\"Ensemble predictions\")\nplt.title(\"learning_rate={}, n_estimators={}\".format(gbrt.learning_rate, gbrt.n_estimators), fontsize=14)\nplt.xlabel(\"$x_1$\", fontsize=16)\nplt.ylabel(\"$y$\", fontsize=16, rotation=0)\n\nplt.sca(axes[1])\nplot_predictions([gbrt_slow], X, y, axes=[-0.5, 0.5, -0.1, 0.8])\nplt.title(\"learning_rate={}, n_estimators={}\".format(gbrt_slow.learning_rate, gbrt_slow.n_estimators), fontsize=14)\nplt.xlabel(\"$x_1$\", fontsize=16)\n\nsave_fig(\"gbrt_learning_rate_plot\")\nplt.show()", "Saving figure gbrt_learning_rate_plot\n" ] ], [ [ "## Gradient Boosting with Early stopping", "_____no_output_____" ] ], [ [ "import numpy as np\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.metrics import mean_squared_error\n\nX_train, X_val, y_train, y_val = train_test_split(X, y, random_state=49)\n\ngbrt = GradientBoostingRegressor(max_depth=2, n_estimators=120, random_state=42)\ngbrt.fit(X_train, y_train)\n\nerrors = [mean_squared_error(y_val, y_pred)\n for y_pred in gbrt.staged_predict(X_val)]\nbst_n_estimators = np.argmin(errors) + 1\n\ngbrt_best = GradientBoostingRegressor(max_depth=2, n_estimators=bst_n_estimators, random_state=42)\ngbrt_best.fit(X_train, y_train)", "_____no_output_____" ], [ "min_error = np.min(errors)", "_____no_output_____" ], [ "plt.figure(figsize=(10, 4))\n\nplt.subplot(121)\nplt.plot(errors, \"b.-\")\nplt.plot([bst_n_estimators, bst_n_estimators], [0, min_error], \"k--\")\nplt.plot([0, 120], [min_error, min_error], \"k--\")\nplt.plot(bst_n_estimators, min_error, \"ko\")\nplt.text(bst_n_estimators, min_error*1.2, \"Minimum\", ha=\"center\", fontsize=14)\nplt.axis([0, 120, 0, 0.01])\nplt.xlabel(\"Number of trees\")\nplt.ylabel(\"Error\", fontsize=16)\nplt.title(\"Validation error\", fontsize=14)\n\nplt.subplot(122)\nplot_predictions([gbrt_best], X, y, axes=[-0.5, 0.5, -0.1, 0.8])\nplt.title(\"Best model (%d trees)\" % bst_n_estimators, fontsize=14)\nplt.ylabel(\"$y$\", fontsize=16, rotation=0)\nplt.xlabel(\"$x_1$\", fontsize=16)\n\nsave_fig(\"early_stopping_gbrt_plot\")\nplt.show()", "Saving figure early_stopping_gbrt_plot\n" ], [ "gbrt = GradientBoostingRegressor(max_depth=2, warm_start=True, random_state=42)\n\nmin_val_error = float(\"inf\")\nerror_going_up = 0\nfor n_estimators in range(1, 120):\n gbrt.n_estimators = n_estimators\n gbrt.fit(X_train, y_train)\n y_pred = gbrt.predict(X_val)\n val_error = mean_squared_error(y_val, y_pred)\n if val_error < min_val_error:\n min_val_error = val_error\n error_going_up = 0\n else:\n error_going_up += 1\n if error_going_up == 5:\n break # early stopping", "_____no_output_____" ], [ "print(gbrt.n_estimators)", "61\n" ], [ "print(\"Minimum validation MSE:\", min_val_error)", "Minimum validation MSE: 0.002712853325235463\n" ] ], [ [ "## Using XGBoost", "_____no_output_____" ] ], [ [ "try:\n import xgboost\nexcept ImportError as ex:\n print(\"Error: the xgboost library is not installed.\")\n xgboost = None", "_____no_output_____" ], [ "if xgboost is not None: # not shown in the book\n xgb_reg = xgboost.XGBRegressor(random_state=42)\n xgb_reg.fit(X_train, y_train)\n y_pred = xgb_reg.predict(X_val)\n val_error = mean_squared_error(y_val, y_pred) # Not shown\n print(\"Validation MSE:\", val_error) # Not shown", "[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\nValidation MSE: 0.0028512559726563943\n" ], [ "if xgboost is not None: # not shown in the book\n xgb_reg.fit(X_train, y_train,\n eval_set=[(X_val, y_val)], early_stopping_rounds=2)\n y_pred = xgb_reg.predict(X_val)\n val_error = mean_squared_error(y_val, y_pred) # Not shown\n print(\"Validation MSE:\", val_error) # Not shown", "[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[0]\tvalidation_0-rmse:0.286719\nWill train until validation_0-rmse hasn't improved in 2 rounds.\n[1]\tvalidation_0-rmse:0.258221\n[2]\tvalidation_0-rmse:0.232634\n[3]\tvalidation_0-rmse:0.210526\n[4]\tvalidation_0-rmse:0.190232\n[5]\tvalidation_0-rmse:0.172196\n[6]\tvalidation_0-rmse:0.156394\n[7]\tvalidation_0-rmse:0.142241\n[8]\tvalidation_0-rmse:0.129789\n[9]\tvalidation_0-rmse:0.118752\n[10]\tvalidation_0-rmse:0.108388\n[11]\tvalidation_0-rmse:0.100155\n[12]\tvalidation_0-rmse:0.09208\n[13]\tvalidation_0-rmse:0.084791\n[14]\tvalidation_0-rmse:0.078699\n[15]\tvalidation_0-rmse:0.073248\n[16]\tvalidation_0-rmse:0.069391\n[17]\tvalidation_0-rmse:0.066277\n[18]\tvalidation_0-rmse:0.063458\n[19]\tvalidation_0-rmse:0.060326\n[20]\tvalidation_0-rmse:0.0578\n[21]\tvalidation_0-rmse:0.055643\n[22]\tvalidation_0-rmse:0.053943\n[23]\tvalidation_0-rmse:0.053138\n[24]\tvalidation_0-rmse:0.052415\n[25]\tvalidation_0-rmse:0.051821\n[26]\tvalidation_0-rmse:0.051226\n[27]\tvalidation_0-rmse:0.051135\n[28]\tvalidation_0-rmse:0.05091\n[29]\tvalidation_0-rmse:0.050893\n[30]\tvalidation_0-rmse:0.050725\n[31]\tvalidation_0-rmse:0.050471\n[32]\tvalidation_0-rmse:0.050285\n[33]\tvalidation_0-rmse:0.050492\n[34]\tvalidation_0-rmse:0.050348\nStopping. Best iteration:\n[32]\tvalidation_0-rmse:0.050285\n\nValidation MSE: 0.002528626115371327\n" ], [ "%timeit xgboost.XGBRegressor().fit(X_train, y_train) if xgboost is not None else None", "[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:46] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n<<742 more lines>>\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:49] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:50] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:50] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:50] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:50] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:50] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:50] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:50] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:50] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:50] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:50] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n[16:33:50] WARNING: src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror.\n4.29 ms ± 46.2 µs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n" ], [ "%timeit GradientBoostingRegressor().fit(X_train, y_train)", "12.9 ms ± 827 µs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n" ] ], [ [ "# Exercise solutions", "_____no_output_____" ], [ "## 1. to 7.", "_____no_output_____" ], [ "See Appendix A.", "_____no_output_____" ], [ "## 8. Voting Classifier", "_____no_output_____" ], [ "Exercise: _Load the MNIST data and split it into a training set, a validation set, and a test set (e.g., use 50,000 instances for training, 10,000 for validation, and 10,000 for testing)._", "_____no_output_____" ], [ "The MNIST dataset was loaded earlier.", "_____no_output_____" ] ], [ [ "from sklearn.model_selection import train_test_split", "_____no_output_____" ], [ "X_train_val, X_test, y_train_val, y_test = train_test_split(\n mnist.data, mnist.target, test_size=10000, random_state=42)\nX_train, X_val, y_train, y_val = train_test_split(\n X_train_val, y_train_val, test_size=10000, random_state=42)", "_____no_output_____" ] ], [ [ "Exercise: _Then train various classifiers, such as a Random Forest classifier, an Extra-Trees classifier, and an SVM._", "_____no_output_____" ] ], [ [ "from sklearn.ensemble import RandomForestClassifier, ExtraTreesClassifier\nfrom sklearn.svm import LinearSVC\nfrom sklearn.neural_network import MLPClassifier", "_____no_output_____" ], [ "random_forest_clf = RandomForestClassifier(n_estimators=100, random_state=42)\nextra_trees_clf = ExtraTreesClassifier(n_estimators=100, random_state=42)\nsvm_clf = LinearSVC(random_state=42)\nmlp_clf = MLPClassifier(random_state=42)", "_____no_output_____" ], [ "estimators = [random_forest_clf, extra_trees_clf, svm_clf, mlp_clf]\nfor estimator in estimators:\n print(\"Training the\", estimator)\n estimator.fit(X_train, y_train)", "Training the RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n min_impurity_decrease=0.0, min_impurity_split=None,\n min_samples_leaf=1, min_samples_split=2,\n min_weight_fraction_leaf=0.0, n_estimators=100, n_jobs=None,\n oob_score=False, random_state=42, verbose=0, warm_start=False)\nTraining the ExtraTreesClassifier(bootstrap=False, class_weight=None, criterion='gini',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n min_impurity_decrease=0.0, min_impurity_split=None,\n min_samples_leaf=1, min_samples_split=2,\n min_weight_fraction_leaf=0.0, n_estimators=100, n_jobs=None,\n oob_score=False, random_state=42, verbose=0, warm_start=False)\nTraining the LinearSVC(C=1.0, class_weight=None, dual=True, fit_intercept=True,\n intercept_scaling=1, loss='squared_hinge', max_iter=1000,\n multi_class='ovr', penalty='l2', random_state=42, tol=0.0001,\n verbose=0)\n" ], [ "[estimator.score(X_val, y_val) for estimator in estimators]", "_____no_output_____" ] ], [ [ "The linear SVM is far outperformed by the other classifiers. However, let's keep it for now since it may improve the voting classifier's performance.", "_____no_output_____" ], [ "Exercise: _Next, try to combine them into an ensemble that outperforms them all on the validation set, using a soft or hard voting classifier._", "_____no_output_____" ] ], [ [ "from sklearn.ensemble import VotingClassifier", "_____no_output_____" ], [ "named_estimators = [\n (\"random_forest_clf\", random_forest_clf),\n (\"extra_trees_clf\", extra_trees_clf),\n (\"svm_clf\", svm_clf),\n (\"mlp_clf\", mlp_clf),\n]", "_____no_output_____" ], [ "voting_clf = VotingClassifier(named_estimators)", "_____no_output_____" ], [ "voting_clf.fit(X_train, y_train)", "/Users/ageron/miniconda3/envs/tf2b/lib/python3.7/site-packages/sklearn/svm/base.py:931: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n \"the number of iterations.\", ConvergenceWarning)\n" ], [ "voting_clf.score(X_val, y_val)", "_____no_output_____" ], [ "[estimator.score(X_val, y_val) for estimator in voting_clf.estimators_]", "_____no_output_____" ] ], [ [ "Let's remove the SVM to see if performance improves. It is possible to remove an estimator by setting it to `None` using `set_params()` like this:", "_____no_output_____" ] ], [ [ "voting_clf.set_params(svm_clf=None)", "_____no_output_____" ] ], [ [ "This updated the list of estimators:", "_____no_output_____" ] ], [ [ "voting_clf.estimators", "_____no_output_____" ] ], [ [ "However, it did not update the list of _trained_ estimators:", "_____no_output_____" ] ], [ [ "voting_clf.estimators_", "_____no_output_____" ] ], [ [ "So we can either fit the `VotingClassifier` again, or just remove the SVM from the list of trained estimators:", "_____no_output_____" ] ], [ [ "del voting_clf.estimators_[2]", "_____no_output_____" ] ], [ [ "Now let's evaluate the `VotingClassifier` again:", "_____no_output_____" ] ], [ [ "voting_clf.score(X_val, y_val)", "_____no_output_____" ] ], [ [ "A bit better! The SVM was hurting performance. Now let's try using a soft voting classifier. We do not actually need to retrain the classifier, we can just set `voting` to `\"soft\"`:", "_____no_output_____" ] ], [ [ "voting_clf.voting = \"soft\"", "_____no_output_____" ], [ "voting_clf.score(X_val, y_val)", "_____no_output_____" ] ], [ [ "Nope, hard voting wins in this case.", "_____no_output_____" ], [ "_Once you have found one, try it on the test set. How much better does it perform compared to the individual classifiers?_", "_____no_output_____" ] ], [ [ "voting_clf.voting = \"hard\"\nvoting_clf.score(X_test, y_test)", "_____no_output_____" ], [ "[estimator.score(X_test, y_test) for estimator in voting_clf.estimators_]", "_____no_output_____" ] ], [ [ "The voting classifier only very slightly reduced the error rate of the best model in this case.", "_____no_output_____" ], [ "## 9. Stacking Ensemble", "_____no_output_____" ], [ "Exercise: _Run the individual classifiers from the previous exercise to make predictions on the validation set, and create a new training set with the resulting predictions: each training instance is a vector containing the set of predictions from all your classifiers for an image, and the target is the image's class. Train a classifier on this new training set._", "_____no_output_____" ] ], [ [ "X_val_predictions = np.empty((len(X_val), len(estimators)), dtype=np.float32)\n\nfor index, estimator in enumerate(estimators):\n X_val_predictions[:, index] = estimator.predict(X_val)", "_____no_output_____" ], [ "X_val_predictions", "_____no_output_____" ], [ "rnd_forest_blender = RandomForestClassifier(n_estimators=200, oob_score=True, random_state=42)\nrnd_forest_blender.fit(X_val_predictions, y_val)", "_____no_output_____" ], [ "rnd_forest_blender.oob_score_", "_____no_output_____" ] ], [ [ "You could fine-tune this blender or try other types of blenders (e.g., an `MLPClassifier`), then select the best one using cross-validation, as always.", "_____no_output_____" ], [ "Exercise: _Congratulations, you have just trained a blender, and together with the classifiers they form a stacking ensemble! Now let's evaluate the ensemble on the test set. For each image in the test set, make predictions with all your classifiers, then feed the predictions to the blender to get the ensemble's predictions. How does it compare to the voting classifier you trained earlier?_", "_____no_output_____" ] ], [ [ "X_test_predictions = np.empty((len(X_test), len(estimators)), dtype=np.float32)\n\nfor index, estimator in enumerate(estimators):\n X_test_predictions[:, index] = estimator.predict(X_test)", "_____no_output_____" ], [ "y_pred = rnd_forest_blender.predict(X_test_predictions)", "_____no_output_____" ], [ "from sklearn.metrics import accuracy_score", "_____no_output_____" ], [ "accuracy_score(y_test, y_pred)", "_____no_output_____" ] ], [ [ "This stacking ensemble does not perform as well as the voting classifier we trained earlier, it's not quite as good as the best individual classifier.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ] ]
cb54343171eff7d62b0d8df55c042604a1486da3
176,966
ipynb
Jupyter Notebook
siddharth_day_1.ipynb
drudgery/lxmls-toolkit
c76c2b96f2c43a07bca75b5863762f0e0cd63fd6
[ "MIT" ]
null
null
null
siddharth_day_1.ipynb
drudgery/lxmls-toolkit
c76c2b96f2c43a07bca75b5863762f0e0cd63fd6
[ "MIT" ]
null
null
null
siddharth_day_1.ipynb
drudgery/lxmls-toolkit
c76c2b96f2c43a07bca75b5863762f0e0cd63fd6
[ "MIT" ]
null
null
null
276.942097
44,225
0.920092
[ [ [ "## Exercise 1.1\nIn this exercise we will use the Amazon sentiment analysis data (Blitzer et al., 2007), where the goal is toclassify text documents as expressing apositiveornegativesentiment (i.e., a classification problem with two classes).We are going to focus on book reviews. To load the data, type:", "_____no_output_____" ] ], [ [ "import lxmls.readers.sentiment_reader as srs\nscr = srs.SentimentCorpus(\"books\")", "_____no_output_____" ], [ "import lxmls.classifiers.multinomial_naive_bayes as mnbb\nmnb = mnbb.MultinomialNaiveBayes()\nparams_nb_sc = mnb.train(scr.train_X,scr.train_y)\ny_pred_train = mnb.test(scr.train_X,params_nb_sc)\nacc_train = mnb.evaluate(scr.train_y, y_pred_train)\ny_pred_test = mnb.test(scr.test_X,params_nb_sc)\nacc_test = mnb.evaluate(scr.test_y, y_pred_test)\nprint(\"Multinomial Naive Bayes Amazon Sentiment Accuracy train: %f test: %f\"%(acc_train,acc_test))", "Multinomial Naive Bayes Amazon Sentiment Accuracy train: 0.987500 test: 0.635000\n" ] ], [ [ "## Exercise 1.2\nWe provide an implementation of the perceptron algorithm in the classPerceptron(fileperceptron.py).", "_____no_output_____" ] ], [ [ "# 1. Run the following commands to generate a simple dataset \nimport lxmls.readers.simple_data_set as sds\nsd = sds.SimpleDataSet(nr_examples=100, g1 = [[-1,-1],1], g2 = [[1,1],1], balance=0.5, split=[0.5,0,0.5])", "_____no_output_____" ], [ "# 2. Run the perceptron algorithm on the simple dataset previously generated and report its train and test set accuracy:\nimport lxmls.classifiers.perceptron as percc\nperc = percc.Perceptron()\nparams_perc_sd = perc.train(sd.train_X,sd.train_y)\ny_pred_train = perc.test(sd.train_X,params_perc_sd)\nacc_train = perc.evaluate(sd.train_y, y_pred_train)\ny_pred_test = perc.test(sd.test_X,params_perc_sd)\nacc_test = perc.evaluate(sd.test_y, y_pred_test)\nprint(\"Perceptron Simple Dataset Accuracy train: %f test: %f\"%(acc_train, acc_test))", "Rounds: 0 Accuracy: 0.900000\nRounds: 1 Accuracy: 1.000000\nRounds: 2 Accuracy: 1.000000\nRounds: 3 Accuracy: 1.000000\nRounds: 4 Accuracy: 1.000000\nRounds: 5 Accuracy: 1.000000\nRounds: 6 Accuracy: 1.000000\nRounds: 7 Accuracy: 1.000000\nRounds: 8 Accuracy: 1.000000\nRounds: 9 Accuracy: 1.000000\nPerceptron Simple Dataset Accuracy train: 0.960000 test: 0.960000\n" ], [ "# 3. Plot the decision boundary found:\nfig,axis = sd.plot_data()\nfig,axis = sd.add_line(fig,axis,params_perc_sd,\"Perceptron\",\"blue\")", "[[-1.69314718 -1.69314718]\n [-1. 1. ]\n [-1. 1. ]]\n" ], [ "# 4. Run the perceptron algorithm on the Amazon dataset.\nimport lxmls.classifiers.perceptron as percc\nperc = percc.Perceptron()\nparams_perc_sc = perc.train(scr.train_X,scr.train_y)\ny_pred_train = perc.test(scr.train_X,params_perc_sc)\nacc_train = perc.evaluate(scr.train_y, y_pred_train)\ny_pred_test = perc.test(scr.test_X,params_perc_sc)\nacc_test = perc.evaluate(scr.test_y, y_pred_test)\nprint(\"Perceptron Amazon Sentiment Accuracy train: %f test: %f\"%(acc_train,acc_test))", "Rounds: 0 Accuracy: 0.870000\nRounds: 1 Accuracy: 0.940000\nRounds: 2 Accuracy: 0.979375\nRounds: 3 Accuracy: 0.965625\nRounds: 4 Accuracy: 0.989375\nRounds: 5 Accuracy: 0.996250\nRounds: 6 Accuracy: 0.995000\nRounds: 7 Accuracy: 0.999375\nRounds: 8 Accuracy: 0.996250\nRounds: 9 Accuracy: 0.998125\nPerceptron Amazon Sentiment Accuracy train: 0.998750 test: 0.825000\n" ] ], [ [ "## Exercise 1.3\nWe provide an implementation of the MIRA algorithm. Compare it with the perceptron for various values", "_____no_output_____" ] ], [ [ "import lxmls.classifiers.mira as mirac\nmira = mirac.Mira()\nmira.regularizer = 1.0 # This is lambda\nparams_mira_sd = mira.train(sd.train_X,sd.train_y)\ny_pred_train = mira.test(sd.train_X,params_mira_sd)\nacc_train = mira.evaluate(sd.train_y, y_pred_train)\ny_pred_test = mira.test(sd.test_X,params_mira_sd)\nacc_test = mira.evaluate(sd.test_y, y_pred_test)\nprint(\"Mira Simple Dataset Accuracy train: %f test: %f\"%(acc_train, acc_test))", "Rounds: 0 Accuracy: 0.980000\nRounds: 1 Accuracy: 0.940000\nRounds: 2 Accuracy: 0.940000\nRounds: 3 Accuracy: 0.980000\nRounds: 4 Accuracy: 0.980000\nRounds: 5 Accuracy: 0.940000\nRounds: 6 Accuracy: 0.920000\nRounds: 7 Accuracy: 0.940000\nRounds: 8 Accuracy: 1.000000\nRounds: 9 Accuracy: 1.000000\nMira Simple Dataset Accuracy train: 1.000000 test: 0.960000\n" ], [ "fig, axis = sd.add_line(fig, axis, params_mira_sd, \"Mira\",\"green\")\nfig", "/home/siddharth/Projects/MSc_CL/Sem_2/CLI177_LR/Homework_3/lxmls-toolkit/lxmls/readers/simple_data_set.py:96: UserWarning: color is redundantly defined by the 'color' keyword argument and the fmt string \"g--\" (-> color='g'). The keyword argument will take precedence.\n axis.plot(x, y_star, 'g--', c=colour, label=name, linewidth=2)\n" ], [ "import lxmls.classifiers.mira as mirac\nmira = mirac.Mira()\nmira.regularizer = 1.0 # This is lambda\nparams_mira_sc = mira.train(scr.train_X,scr.train_y)\ny_pred_train = mira.test(scr.train_X,params_mira_sc)\nacc_train = mira.evaluate(scr.train_y, y_pred_train)\ny_pred_test = mira.test(scr.test_X,params_mira_sc)\nacc_test = mira.evaluate(scr.test_y, y_pred_test)\nprint(\"Mira Amazon Sentiment Accuracy train: %f test: %f\"%(acc_train,acc_test))", "/home/siddharth/.local/lib/python3.8/site-packages/numpy/core/fromnumeric.py:87: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray.\n return ufunc.reduce(obj, axis, dtype, out, **passkwargs)\n" ] ], [ [ "## Exercise 1.4\nWe provide an implementation of the L-BFGS algorithm for training maximum entropy models in the classMaxEntbatch, as well as an implementation of the SGD algorithm in the classMaxEntonline.", "_____no_output_____" ] ], [ [ "# 1. Train a maximum entropy model using L-BFGS on the Simple data set (try different values ofλ). \n# Compare theresults with the previous methods. \n# Plot the decision boundary.\nimport lxmls.classifiers.max_ent_batch as mebc\n\nme_lbfgs = mebc.MaxEntBatch()\nme_lbfgs.regularizer = 1.0\nparams_meb_sd = me_lbfgs.train(sd.train_X,sd.train_y)\ny_pred_train = me_lbfgs.test(sd.train_X,params_meb_sd)\nacc_train = me_lbfgs.evaluate(sd.train_y, y_pred_train)\ny_pred_test = me_lbfgs.test(sd.test_X,params_meb_sd)\nacc_test = me_lbfgs.evaluate(sd.test_y, y_pred_test)\nprint(\n \"Max-Ent batch Simple Dataset Accuracy train: %f test: %f\" % \n (acc_train,acc_test)\n)\n\nfig, axis = sd.add_line(fig, axis, params_meb_sd, \"Max-Ent-Batch\",\"orange\")\nfig", "Objective = 0.6931471805599453\nObjective = 0.6975612499276493\nObjective = 0.45557543775964016\nObjective = 0.45544739525546596\nObjective = 0.45542384019232723\nObjective = 0.4554237592429567\nObjective = 0.4554237572145117\nMax-Ent batch Simple Dataset Accuracy train: 0.980000 test: 0.960000\n" ], [ "# 2. Train a maximum entropy model using L-BFGS, \n# on the Amazon dataset (try different values ofλ) and reporttraining and test set accuracy. \n# What do you observe?\nparams_meb_sc = me_lbfgs.train(scr.train_X,scr.train_y)\ny_pred_train = me_lbfgs.test(scr.train_X,params_meb_sc)\nacc_train = me_lbfgs.evaluate(scr.train_y, y_pred_train)\ny_pred_test = me_lbfgs.test(scr.test_X,params_meb_sc)\nacc_test = me_lbfgs.evaluate(scr.test_y, y_pred_test)\nprint(\n \"Max-Ent Batch Amazon Sentiment Accuracy train: %f test: %f\" % \n (acc_train, acc_test)\n)", "Objective = 0.6931471805599452\nObjective = 2.356993591885034\nObjective = 0.684446278347706\nObjective = 0.6624653397269713\nObjective = 0.6369050882978825\nObjective = 0.6279555562071627\nObjective = 0.6242854266098836\nObjective = 0.6226210165612387\nObjective = 0.6225118183216435\nObjective = 0.6224325438719657\nObjective = 0.6224098226931887\nObjective = 0.6224035006188007\nObjective = 0.622402924504082\nObjective = 0.6224027961317902\nObjective = 0.6224027875941004\nObjective = 0.6224027809592892\nObjective = 0.6224027960617738\nObjective = 0.6224027798482331\nMax-Ent Batch Amazon Sentiment Accuracy train: 0.858125 test: 0.790000\n" ], [ "# 3. Now, fixλ=1.0and train with SGD (you might try to adjust the initial step). \n# Compare the objective valuesobtained during training with those obtained with L-BFGS. \n# What do you observe?\nimport lxmls.classifiers.max_ent_online as meoc\nme_sgd = meoc.MaxEntOnline()\nme_sgd.regularizer = 1.0\nparams_meo_sc = me_sgd.train(scr.train_X,scr.train_y)\ny_pred_train = me_sgd.test(scr.train_X,params_meo_sc)\nacc_train = me_sgd.evaluate(scr.train_y, y_pred_train)\ny_pred_test = me_sgd.test(scr.test_X,params_meo_sc)\nacc_test = me_sgd.evaluate(scr.test_y, y_pred_test)\nprint(\n \"Max-Ent Online Amazon Sentiment Accuracy train: %f test: %f\" % \n (acc_train, acc_test)\n)", "Epochs: 0 Objective: 1.449590\nEpochs: 0 Accuracy: 0.846875\nEpochs: 1 Objective: 0.643229\nEpochs: 1 Accuracy: 0.858750\nEpochs: 2 Objective: 0.633205\nEpochs: 2 Accuracy: 0.859375\nEpochs: 3 Objective: 0.629873\nEpochs: 3 Accuracy: 0.860000\nEpochs: 4 Objective: 0.628213\nEpochs: 4 Accuracy: 0.860000\nEpochs: 5 Objective: 0.627210\nEpochs: 5 Accuracy: 0.860000\nEpochs: 6 Objective: 0.626526\nEpochs: 6 Accuracy: 0.859375\nEpochs: 7 Objective: 0.626022\nEpochs: 7 Accuracy: 0.858750\nEpochs: 8 Objective: 0.625632\nEpochs: 8 Accuracy: 0.859375\nEpochs: 9 Objective: 0.625319\nEpochs: 9 Accuracy: 0.860000\nMax-Ent Online Amazon Sentiment Accuracy train: 0.860000 test: 0.795000\n" ] ], [ [ "## Exercise 1.5\nRun the SVM primal algorithm. Then, repeat the MaxEnt exercise now using SVMs, for several values ofλ:", "_____no_output_____" ] ], [ [ "import lxmls.classifiers.svm as svmc\nsvm = svmc.SVM()\nsvm.regularizer = 1.0 # This is lambda\nparams_svm_sd = svm.train(sd.train_X,sd.train_y)\ny_pred_train = svm.test(sd.train_X,params_svm_sd)\nacc_train = svm.evaluate(sd.train_y, y_pred_train)\ny_pred_test = svm.test(sd.test_X,params_svm_sd)\nacc_test = svm.evaluate(sd.test_y, y_pred_test)\nprint(\"SVM Online Simple Dataset Accuracy train: {} test: {}\".format(acc_train,acc_test))", "Epochs: 0 Objective: 0.545698\nEpochs: 0 Accuracy: 0.940000\nEpochs: 1 Objective: 0.392799\nEpochs: 1 Accuracy: 0.980000\nEpochs: 2 Objective: 0.379019\nEpochs: 2 Accuracy: 0.980000\nEpochs: 3 Objective: 0.374794\nEpochs: 3 Accuracy: 0.960000\nEpochs: 4 Objective: 0.372746\nEpochs: 4 Accuracy: 0.940000\nEpochs: 5 Objective: 0.371416\nEpochs: 5 Accuracy: 0.940000\nEpochs: 6 Objective: 0.370676\nEpochs: 6 Accuracy: 0.940000\nEpochs: 7 Objective: 0.370046\nEpochs: 7 Accuracy: 0.940000\nEpochs: 8 Objective: 0.369570\nEpochs: 8 Accuracy: 0.940000\nEpochs: 9 Objective: 0.369198\nEpochs: 9 Accuracy: 0.940000\nSVM Online Simple Dataset Accuracy train: 0.94 test: 0.96\n" ], [ "fig, axis = sd.add_line(fig, axis, params_svm_sd, \"SVM\", \"yellow\")\nfig", "/home/siddharth/Projects/MSc_CL/Sem_2/CLI177_LR/Homework_3/lxmls-toolkit/lxmls/readers/simple_data_set.py:96: UserWarning: color is redundantly defined by the 'color' keyword argument and the fmt string \"g--\" (-> color='g'). The keyword argument will take precedence.\n axis.plot(x, y_star, 'g--', c=colour, label=name, linewidth=2)\n" ], [ "params_svm_sc = svm.train(scr.train_X,scr.train_y)\ny_pred_train = svm.test(scr.train_X,params_svm_sc)\nacc_train = svm.evaluate(scr.train_y, y_pred_train)\ny_pred_test = svm.test(scr.test_X,params_svm_sc)\nacc_test = svm.evaluate(scr.test_y, y_pred_test)\nprint(\"SVM Online Amazon Sentiment Accuracy train: {} test: {}\".format(acc_train,acc_test))", "Epochs: 0 Objective: 1.512689\nEpochs: 0 Accuracy: 0.825000\nEpochs: 1 Objective: 0.800713\nEpochs: 1 Accuracy: 0.861250\nEpochs: 2 Objective: 0.765247\nEpochs: 2 Accuracy: 0.870625\nEpochs: 3 Objective: 0.748420\nEpochs: 3 Accuracy: 0.873750\nEpochs: 4 Objective: 0.742675\nEpochs: 4 Accuracy: 0.880000\nEpochs: 5 Objective: 0.736711\nEpochs: 5 Accuracy: 0.883125\nEpochs: 6 Objective: 0.733872\nEpochs: 6 Accuracy: 0.878750\nEpochs: 7 Objective: 0.730902\nEpochs: 7 Accuracy: 0.876875\nEpochs: 8 Objective: 0.729642\nEpochs: 8 Accuracy: 0.880625\nEpochs: 9 Objective: 0.728029\nEpochs: 9 Accuracy: 0.881875\nSVM Online Amazon Sentiment Accuracy train: 0.881875 test: 0.81\n" ] ], [ [ "## Exercise 1.6\nUsing the simple dataset run the different algorithms varying some characteristics of the data: like the number of points, variance (hence separability), class balance. Use function run_all_classifiers in file lab-s/run_all_classifiers.py which receives a dataset and plots all decisions boundaries and accuracies. What can you say about the methods when the amount of data increases? What about when the classes become too unbalanced?", "_____no_output_____" ] ], [ [ "from lxmls.run_all_classifiers import run_all_classifiers\nrun_all_classifiers(sd)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ] ]
cb5436a350091163578aecf0d9dce5df689ae783
14,495
ipynb
Jupyter Notebook
examples/jupyter-tutorials/API.ipynb
kashif/dynet
95145a3808c5dd54b17eb9ed109c5815142a9b6c
[ "Apache-2.0" ]
null
null
null
examples/jupyter-tutorials/API.ipynb
kashif/dynet
95145a3808c5dd54b17eb9ed109c5815142a9b6c
[ "Apache-2.0" ]
1
2020-07-01T18:31:27.000Z
2020-07-01T18:31:27.000Z
examples/jupyter-tutorials/API.ipynb
kashif/dynet
95145a3808c5dd54b17eb9ed109c5815142a9b6c
[ "Apache-2.0" ]
1
2018-12-26T19:04:47.000Z
2018-12-26T19:04:47.000Z
30.135135
128
0.494515
[ [ [ "## API tutorial", "_____no_output_____" ], [ "### Expression building\n\n(note: may have old API in some cases)", "_____no_output_____" ] ], [ [ "import dynet as dy\n\n## ==== Create a new computation graph\n# (it is a singleton, we have one at each stage.\n# dy.renew_cg() clears the current one and starts anew)\ndy.renew_cg()\n\n## ==== Creating Expressions from user input / constants.\nx = dy.scalarInput(value)\n\nv = dy.vecInput(dimension)\nv.set([1,2,3])\n\nz = dy.matInput(dim1, dim2)\n\n# for example:\nz1 = dy.matInput(2, 2)\nz1.set([1,2,3,4]) # Column major\n\n# Or directly from a numpy array\nz1 = inputTensor([[1,2],[3,4]]) # Row major\n\n## ==== We can take the value of an expression. \n# For complex expressions, this will run forward propagation.\nprint z.value() \nprint z.npvalue() # as numpy array\nprint v.vec_value() # as vector, if vector\nprint x.scalar_value() # as scalar, if scalar\nprint x.value() # choose the correct one\n\n## ==== Parameters\n# Parameters are things we tune during training.\n# Usually a matrix or a vector.\n\n# First we create a parameter collection and add the parameters to it.\nm = ParameterCollection() \npW = m.add_parameters((8,8)) # an 8x8 matrix\npb = m.add_parameters(8)\n\n# then we create an Expression out of the parameter collection's parameters\nW = dy.parameter(pW)\nb = dy.parameter(pb)\n\n## ===== Lookup parameters\n# Similar to parameters, but are representing a \"lookup table\"\n# that maps numbers to vectors.\n# These are used for embedding matrices.\n# for example, this will have VOCAB_SIZE rows, each of DIM dimensions.\nlp = m.add_lookup_parameters((VOCAB_SIZE, DIM))\n\n# lookup parameters can be initialized from an existing array, i.e:\n# m[\"lookup\"].init_from_array(wv)\n\ne5 = dy.lookup(lp, 5) # create an Expression from row 5.\ne5 = lp[5] # same\ne5c = dy.lookup(lp, 5, update=False) # as before, but don't update when optimizing.\n\ne5 = dy.lookup_batch(lp, [4, 5]) # create a batched Expression from rows 4 and 5.\ne5 = lp.batch([4, 5]) # same\n\ne5.set(9) # now the e5 expression contains row 9\ne5c.set(9) # ditto\n\n\n## ===== Combine expression into complex expressions.\n\n# Math \ne = e1 + e2 \ne = e1 * e2 # for vectors/matrices: matrix multiplication (like e1.dot(e2) in numpy)\ne = e1 - e2 \ne = -e1 \n\ne = dy.dot_product(e1, e2)\ne = dy.cmult(e1, e2) # component-wise multiply (like e1*e2 in numpy)\ne = dy.cdiv(e1, e2) # component-wise divide\ne = dy.colwise_add(e1, e2) # column-wise addition\n\n# Matrix Shapes\ne = dy.reshape(e1, new_dimension)\ne = dy.transpose(e1)\n\n# Per-element unary functions.\ne = dy.tanh(e1) \ne = dy.exp(e1)\ne = dy.log(e1)\ne = dy.logistic(e1) # Sigmoid(x)\ne = dy.rectify(e1) # Relu (= max(x,0))\ne = dy.softsign(e1) # x/(1+|x|)\n\n# softmaxes\ne = dy.softmax(e1)\ne = dy.log_softmax(e1, restrict=[]) # restrict is a set of indices. \n # if not empty, only entries in restrict are part \n # of softmax computation, others get 0.\n\n\ne = dy.sum_cols(e1)\n\n\n# Picking values from vector expressions\ne = dy.pick(e1, k) # k is unsigned integer, e1 is vector. return e1[k]\ne = e1[k] # same\n\ne = dy.pickrange(e1, k, v) # like python's e1[k:v] for lists. e1 is an Expression, k,v integers.\ne = e1[k:v] # same\n\ne = dy.pickneglogsoftmax(e1, k) # k is unsigned integer. equiv to: (pick(-log(dy.softmax(e1)), k))\n \n\n# Neural net stuff\ndy.noise(e1, stddev) # add a noise to each element from a gausian with standard-dev = stddev\ndy.dropout(e1, p) # apply dropout with probability p \n\n# functions over lists of expressions\ne = dy.esum([e1, e2, ...]) # sum\ne = dy.average([e1, e2, ...]) # average\ne = dy.concatenate_cols([e1, e2, ...]) # e1, e2,.. are column vectors. return a matrix. (sim to np.hstack([e1,e2,...])\ne = dy.concatenate([e1, e2, ...]) # concatenate\n\ne = dy.affine_transform([e0,e1,e2, ...]) # e = e0 + ((e1*e2) + (e3*e4) ...) \n\n## Loss functions\ne = dy.squared_distance(e1, e2)\ne = dy.l1_distance(e1, e2)\ne = dy.huber_distance(e1, e2, c=1.345)\n\n# e1 must be a scalar that is a value between 0 and 1\n# e2 (ty) must be a scalar that is a value between 0 and 1\n# e = ty * log(e1) + (1 - ty) * log(1 - e1)\ne = dy.binary_log_loss(e1, e2)\n\n# e1 is row vector or scalar\n# e2 is row vector or scalar\n# m is number\n# e = max(0, m - (e1 - e2))\ne = dy.pairwise_rank_loss(e1, e2, m=1.0) \n\n# Convolutions\n# e1 \\in R^{d x s} (input)\n# e2 \\in R^{d x m} (filter)\ne = dy.conv1d_narrow(e1, e2) # e = e1 *conv e2\ne = dy.conv1d_wide(e1, e2) # e = e1 *conv e2\ne = dy.filter1d_narrow(e1, e2) # e = e1 *filter e2\n\ne = dy.kmax_pooling(e1, k) # kmax-pooling operation (Kalchbrenner et al 2014)\ne = dy.kmh_ngram(e1, k) # \ne = dy.fold_rows(e1, nrows=2) #\n\n\n\n\n", "_____no_output_____" ] ], [ [ "### Recipe", "_____no_output_____" ] ], [ [ "import dynet as dy\n\n# create parameter collection\nm = dy.ParameterCollection()\n\n# add parameters to parameter collection\npW = m.add_parameters((10,30))\npB = m.add_parameters(10)\nlookup = m.add_lookup_parameters((500, 10))\nprint \"added\"\n\n# create trainer \ntrainer = dy.SimpleSGDTrainer(m)\n\n# Regularization is set via the --dynet-l2 commandline flag.\n# Learning rate parameters can be passed to the trainer:\n# alpha = 0.1 # learning rate\n# trainer = dy.SimpleSGDTrainer(m, e0=alpha)\n\n# function for graph creation\ndef create_network_return_loss(inputs, expected_output):\n \"\"\"\n inputs is a list of numbers\n \"\"\"\n dy.renew_cg()\n W = dy.parameter(pW) # from parameters to expressions\n b = dy.parameter(pB)\n emb_vectors = [lookup[i] for i in inputs]\n net_input = dy.concatenate(emb_vectors)\n net_output = dy.softmax( (W*net_input) + b)\n loss = -dy.log(dy.pick(net_output, expected_output))\n return loss\n\n# function for prediction\ndef create_network_return_best(inputs):\n \"\"\"\n inputs is a list of numbers\n \"\"\"\n dy.renew_cg()\n W = dy.parameter(pW)\n b = dy.parameter(pB)\n emb_vectors = [lookup[i] for i in inputs]\n net_input = dy.concatenate(emb_vectors)\n net_output = dy.softmax( (W*net_input) + b)\n return np.argmax(net_output.npvalue())\n\n\n# train network\nfor epoch in xrange(5):\n for inp,lbl in ( ([1,2,3],1), ([3,2,4],2) ):\n print inp, lbl\n loss = create_network_return_loss(inp, lbl)\n print loss.value() # need to run loss.value() for the forward prop\n loss.backward()\n trainer.update()\n\nprint create_network_return_best([1,2,3])\n", "added\n[1, 2, 3] 1\n2.71492385864\n[3, 2, 4] 2\n2.48228144646\n[1, 2, 3] 1\n2.00279903412\n[3, 2, 4] 2\n1.82602763176\n[1, 2, 3] 1\n1.44809651375\n[3, 2, 4] 2\n1.34181213379\n[1, 2, 3] 1\n1.03570735455\n[3, 2, 4] 2\n0.988352060318\n[1, 2, 3] 1\n0.744616270065\n[3, 2, 4] 2\n0.732948303223\n1\n" ] ], [ [ "### Recipe (using classes)", "_____no_output_____" ] ], [ [ "import dynet as dy\n# create parameter collection\nm = dy.ParameterCollection()\n\n# create a class encapsulating the network\nclass OurNetwork(object):\n # The init method adds parameters to the parameter collection.\n def __init__(self, pc):\n self.pW = pc.add_parameters((10,30))\n self.pB = pc.add_parameters(10)\n self.lookup = pc.add_lookup_parameters((500,10))\n \n # the __call__ method applies the network to an input\n def __call__(self, inputs):\n W = dy.parameter(self.pW)\n b = dy.parameter(self.pB)\n lookup = self.lookup\n emb_vectors = [lookup[i] for i in inputs]\n net_input = dy.concatenate(emb_vectors)\n net_output = dy.softmax( (W*net_input) + b)\n return net_output\n \n def create_network_return_loss(self, inputs, expected_output):\n dy.renew_cg()\n out = self(inputs)\n loss = -dy.log(dy.pick(out, expected_output))\n return loss\n \n def create_network_return_best(self, inputs):\n dy.renew_cg()\n out = self(inputs)\n return np.argmax(out.npvalue())\n \n \n# create network\nnetwork = OurNetwork(m)\n\n# create trainer \ntrainer = dy.SimpleSGDTrainer(m)\n \n# train network\nfor epoch in xrange(5):\n for inp,lbl in ( ([1,2,3],1), ([3,2,4],2) ):\n print inp, lbl\n loss = network.create_network_return_loss(inp, lbl)\n print loss.value() # need to run loss.value() for the forward prop\n loss.backward()\n trainer.update()\n\nprint\nprint network.create_network_return_best([1,2,3])\n", "[1, 2, 3] 1\n2.5900914669\n[3, 2, 4] 2\n2.00347089767\n[1, 2, 3] 1\n1.98409461975\n[3, 2, 4] 2\n1.50869822502\n[1, 2, 3] 1\n1.50195622444\n[3, 2, 4] 2\n1.12316584587\n[1, 2, 3] 1\n1.12293696404\n[3, 2, 4] 2\n0.831095397472\n[1, 2, 3] 1\n0.833912611008\n[3, 2, 4] 2\n0.61754822731\n\n1\n" ] ], [ [ "### or, alternatively, have the training outside of the network class", "_____no_output_____" ] ], [ [ "# create network\nnetwork = OurNetwork(m)\n\n# create trainer \ntrainer = dy.SimpleSGDTrainer(m)\n \n# train network\nfor epoch in xrange(5):\n for inp,lbl in ( ([1,2,3],1), ([3,2,4],2) ):\n print inp, lbl\n dy.renew_cg()\n out = network(inp)\n loss = -dy.log(dy.pick(out, lbl))\n print loss.value() # need to run loss.value() for the forward prop\n loss.backward()\n trainer.update()\n\nprint\nprint np.argmax(network([1,2,3]).npvalue())", "[1, 2, 3] 1\n3.63615298271\n[3, 2, 4] 2\n3.29473733902\n[1, 2, 3] 1\n2.81605744362\n[3, 2, 4] 2\n2.46070289612\n[1, 2, 3] 1\n2.13946056366\n[3, 2, 4] 2\n1.77259361744\n[1, 2, 3] 1\n1.57904195786\n[3, 2, 4] 2\n1.2269589901\n[1, 2, 3] 1\n1.13014268875\n[3, 2, 4] 2\n0.830479979515\n\n1\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
cb5440ec317439dd6dc21ca3a8d94bf7471255e9
189,948
ipynb
Jupyter Notebook
Examples/Classification/Classification.ipynb
sdrees/pyvtreat
fed9a653b2524ba04b1e92b1087e58bead25f99a
[ "BSD-3-Clause" ]
104
2019-07-21T06:15:02.000Z
2022-02-23T19:41:58.000Z
Examples/Classification/Classification.ipynb
arita37/pyvtreat
c32e7ce6db11a2ccdd63e545b25028cbec03a3ff
[ "BSD-3-Clause" ]
15
2019-08-12T09:59:40.000Z
2021-12-09T00:38:47.000Z
Examples/Classification/Classification.ipynb
arita37/pyvtreat
c32e7ce6db11a2ccdd63e545b25028cbec03a3ff
[ "BSD-3-Clause" ]
9
2019-08-15T13:29:15.000Z
2021-03-08T18:04:08.000Z
92.342246
25,004
0.781366
[ [ [ "# Using [vtreat](https://github.com/WinVector/pyvtreat) with Classification Problems\n\nNina Zumel and John Mount\nNovember 2019\n\nNote: this is a description of the [`Python` version of `vtreat`](https://github.com/WinVector/pyvtreat), the same example for the [`R` version of `vtreat`](https://github.com/WinVector/vtreat) can be found [here](https://github.com/WinVector/vtreat/blob/master/Examples/Classification/Classification.md).\n", "_____no_output_____" ], [ "## Preliminaries", "_____no_output_____" ], [ "Load modules/packages.", "_____no_output_____" ] ], [ [ "import pkg_resources\nimport pandas\nimport numpy\nimport numpy.random\nimport seaborn\nimport matplotlib.pyplot as plt\nimport vtreat\nimport vtreat.util\nimport wvpy.util\n\nnumpy.random.seed(2019)", "_____no_output_____" ] ], [ [ "Generate example data. \n\n* `y` is a noisy sinusoidal function of the variable `x`\n* `yc` is the output to be predicted: : whether `y` is > 0.5. \n* Input `xc` is a categorical variable that represents a discretization of `y`, along some `NaN`s\n* Input `x2` is a pure noise variable with no relationship to the output", "_____no_output_____" ] ], [ [ "def make_data(nrows):\n d = pandas.DataFrame({'x': 5*numpy.random.normal(size=nrows)})\n d['y'] = numpy.sin(d['x']) + 0.1*numpy.random.normal(size=nrows)\n d.loc[numpy.arange(3, 10), 'x'] = numpy.nan # introduce a nan level\n d['xc'] = ['level_' + str(5*numpy.round(yi/5, 1)) for yi in d['y']]\n d['x2'] = numpy.random.normal(size=nrows)\n d.loc[d['xc']=='level_-1.0', 'xc'] = numpy.nan # introduce a nan level\n d['yc'] = d['y']>0.5\n return d\n\nd = make_data(500)\n\nd.head()", "_____no_output_____" ], [ "outcome_name = 'yc' # outcome variable / column\noutcome_target = True # value we consider positive", "_____no_output_____" ] ], [ [ "### Some quick data exploration", "_____no_output_____" ], [ "Check how many levels `xc` has, and their distribution (including `NaN`)", "_____no_output_____" ] ], [ [ "d['xc'].unique()", "_____no_output_____" ], [ "d['xc'].value_counts(dropna=False)", "_____no_output_____" ] ], [ [ "Find the prevalence of `yc == True` (our chosen notion of \"positive\").", "_____no_output_____" ] ], [ [ "numpy.mean(d[outcome_name] == outcome_target)", "_____no_output_____" ] ], [ [ "Plot of `yc` versus `x`.", "_____no_output_____" ] ], [ [ "seaborn.lineplot(x='x', y='yc', data=d)", "_____no_output_____" ] ], [ [ "## Build a transform appropriate for classification problems.\n\nNow that we have the data, we want to treat it prior to modeling: we want training data where all the input variables are numeric and have no missing values or `NaN`s.\n\nFirst create the data treatment transform object, in this case a treatment for a binomial classification problem.", "_____no_output_____" ] ], [ [ "transform = vtreat.BinomialOutcomeTreatment(\n outcome_name=outcome_name, # outcome variable\n outcome_target=outcome_target, # outcome of interest\n cols_to_copy=['y'], # columns to \"carry along\" but not treat as input variables\n) ", "_____no_output_____" ] ], [ [ "Use the training data `d` to fit the transform and the return a treated training set: completely numeric, with no missing values.\nNote that for the training data `d`: `transform.fit_transform()` is **not** the same as `transform.fit().transform()`; the second call can lead to nested model bias in some situations, and is **not** recommended. \nFor other, later data, not seen during transform design `transform.transform(o)` is an appropriate step.", "_____no_output_____" ] ], [ [ "d_prepared = transform.fit_transform(d, d['yc'])", "_____no_output_____" ] ], [ [ "Now examine the score frame, which gives information about each new variable, including its type, which original variable it is derived from, its (cross-validated) correlation with the outcome, and its (cross-validated) significance as a one-variable linear model for the outcome. ", "_____no_output_____" ] ], [ [ "transform.score_frame_", "_____no_output_____" ] ], [ [ "Note that the variable `xc` has been converted to multiple variables: \n\n* an indicator variable for each possible level (`xc_lev_level_*`)\n* the value of a (cross-validated) one-variable model for `yc` as a function of `xc` (`xc_logit_code`)\n* a variable that returns how prevalent this particular value of `xc` is in the training data (`xc_prevalence_code`)\n* a variable indicating when `xc` was `NaN` in the original data (`xc_is_bad`, `x_is_bad`)\n\nAny or all of these new variables are available for downstream modeling. `x` doesn't show as exciting a significance as `xc`, as we are only checking linear relations, and `x` is related to `y` in a very non-linear way.\n\nThe `recommended` column indicates which variables are non constant (`has_range` == True) and have a significance value smaller than `default_threshold`. See the section *Deriving the Default Thresholds* below for the reasoning behind the default thresholds. Recommended columns are intended as advice about which variables appear to be most likely to be useful in a downstream model. This advice attempts to be conservative, to reduce the possibility of mistakenly eliminating variables that may in fact be useful (although, obviously, it can still mistakenly eliminate variables that have a real but non-linear relationship to the output, as is the case with `x`, in our example).", "_____no_output_____" ], [ "Let's look at the variables that are and are not recommended:", "_____no_output_____" ] ], [ [ "# recommended variables\ntransform.score_frame_.loc[transform.score_frame_['recommended'], ['variable']]", "_____no_output_____" ], [ "# not recommended variables\ntransform.score_frame_.loc[~transform.score_frame_['recommended'], ['variable']]", "_____no_output_____" ] ], [ [ "Notice that `d_prepared` only includes recommended variables (along with `y` and `yc`):", "_____no_output_____" ] ], [ [ "d_prepared.head()", "_____no_output_____" ] ], [ [ "This is `vtreat`s default behavior; to include all variables in the prepared data, set the parameter `filter_to_recommended` to False, as we show later, in the *Parameters for `BinomialOutcomeTreatment`* section below.", "_____no_output_____" ], [ "## A Closer Look at `logit_code` variables\n\nVariables of type `logit_code` are the outputs of a one-variable hierarchical logistic regression of a categorical variable (in our example, `xc`) against the centered output on the (cross-validated) treated training data. \n\nLet's see whether `xc_logit_code` makes a good one-variable model for `yc`. It has a large AUC:", "_____no_output_____" ] ], [ [ "wvpy.util.plot_roc(prediction=d_prepared['xc_logit_code'], \n istrue=d_prepared['yc'],\n title = 'performance of xc_logit_code variable')", "_____no_output_____" ] ], [ [ "This indicates that `xc_logit_code` is strongly predictive of the outcome. Negative values of `xc_logit_code` correspond strongly to negative outcomes, and positive values correspond strongly to positive outcomes.", "_____no_output_____" ] ], [ [ "wvpy.util.dual_density_plot(probs=d_prepared['xc_logit_code'], \n istrue=d_prepared['yc'])", "_____no_output_____" ] ], [ [ "The values of `xc_logit_code` are in \"link space\". We can often visualize the relationship a little better by converting the logistic score to a probability.", "_____no_output_____" ] ], [ [ "from scipy.special import expit # sigmoid\nfrom scipy.special import logit\n\noffset = logit(numpy.mean(d_prepared.yc))\nwvpy.util.dual_density_plot(probs=expit(d_prepared['xc_logit_code'] + offset),\n istrue=d_prepared['yc']) ", "_____no_output_____" ] ], [ [ "Variables of type `logit_code` are useful when dealing with categorical variables with a very large number of possible levels. For example, a categorical variable with 10,000 possible values potentially converts to 10,000 indicator variables, which may be unwieldy for some modeling methods. Using a single numerical variable of type `logit_code` may be a preferable alternative.", "_____no_output_____" ], [ "## Using the Prepared Data in a Model\n\nOf course, what we really want to do with the prepared training data is to fit a model jointly with all the (recommended) variables. \nLet's try fitting a logistic regression model to `d_prepared`.", "_____no_output_____" ] ], [ [ "import sklearn.linear_model\nimport seaborn\n\nnot_variables = ['y', 'yc', 'prediction']\nmodel_vars = [v for v in d_prepared.columns if v not in set(not_variables)]\n\nfitter = sklearn.linear_model.LogisticRegression()\nfitter.fit(d_prepared[model_vars], d_prepared['yc'])\n\n# now predict\nd_prepared['prediction'] = fitter.predict_proba(d_prepared[model_vars])[:, 1]\n\n# look at the ROC curve (on the training data)\nwvpy.util.plot_roc(prediction=d_prepared['prediction'], \n istrue=d_prepared['yc'],\n title = 'Performance of logistic regression model on training data')", "/Users/johnmount/opt/anaconda3/envs/ai_academy_3_7/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:432: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n FutureWarning)\n" ] ], [ [ "Now apply the model to new data.", "_____no_output_____" ] ], [ [ "# create the new data\ndtest = make_data(450)\n\n# prepare the new data with vtreat\ndtest_prepared = transform.transform(dtest)\n\n# apply the model to the prepared data\ndtest_prepared['prediction'] = fitter.predict_proba(dtest_prepared[model_vars])[:, 1]\n\nwvpy.util.plot_roc(prediction=dtest_prepared['prediction'], \n istrue=dtest_prepared['yc'],\n title = 'Performance of logistic regression model on test data')", "_____no_output_____" ] ], [ [ "## Parameters for `BinomialOutcomeTreatment`\n\nWe've tried to set the defaults for all parameters so that `vtreat` is usable out of the box for most applications.\n", "_____no_output_____" ] ], [ [ "vtreat.vtreat_parameters()", "_____no_output_____" ] ], [ [ "**use_hierarchical_estimate:**: When True, uses hierarchical smoothing when estimating `logit_code` variables; when False, uses unsmoothed logistic regression.\n\n**coders**: The types of synthetic variables that `vtreat` will (potentially) produce. See *Types of prepared variables* below.\n\n**filter_to_recommended**: When True, prepared data only includes variables marked as \"recommended\" in score frame. When False, prepared data includes all variables. See the Example below.\n\n**indicator_min_fraction**: For categorical variables, indicator variables (type `indicator_code`) are only produced for levels that are present at least `indicator_min_fraction` of the time. A consequence of this is that 1/`indicator_min_fraction` is the maximum number of indicators that will be produced for a given categorical variable. To make sure that *all* possible indicator variables are produced, set `indicator_min_fraction = 0`\n\n**cross_validation_plan**: The cross validation method used by `vtreat`. Most people won't have to change this.\n\n**cross_validation_k**: The number of folds to use for cross-validation\n\n**user_transforms**: For passing in user-defined transforms for custom data preparation. Won't be needed in most situations, but see [here](https://github.com/WinVector/pyvtreat/blob/master/Examples/UserCoders/UserCoders.ipynb) for an example of applying a GAM transform to input variables.\n\n**sparse_indicators**: When True, use a (Pandas) sparse representation for indicator variables. This representation is compatible with `sklearn`; however, it may not be compatible with other modeling packages. When False, use a dense representation.\n\n**missingness_imputation** The function or value that `vtreat` uses to impute or \"fill in\" missing numerical values. The default is `numpy.mean()`. To change the imputation function or use different functions/values for different columns, see the [Imputation example](https://github.com/WinVector/pyvtreat/blob/master/Examples/Imputation/Imputation.ipynb).\n\n### Example: Use all variables to model, not just recommended", "_____no_output_____" ] ], [ [ "transform_all = vtreat.BinomialOutcomeTreatment(\n outcome_name='yc', # outcome variable\n outcome_target=True, # outcome of interest\n cols_to_copy=['y'], # columns to \"carry along\" but not treat as input variables\n params = vtreat.vtreat_parameters({\n 'filter_to_recommended': False\n })\n) \n\ntransform_all.fit_transform(d, d['yc']).columns", "_____no_output_____" ], [ "transform_all.score_frame_", "_____no_output_____" ] ], [ [ "Note that the prepared data produced by `fit_transform()` includes all the variables, including those that were not marked as \"recommended\". \n\n## Types of prepared variables\n\n**clean_copy**: Produced from numerical variables: a clean numerical variable with no `NaNs` or missing values\n\n**indicator_code**: Produced from categorical variables, one for each (common) level: for each level of the variable, indicates if that level was \"on\"\n\n**prevalence_code**: Produced from categorical variables: indicates how often each level of the variable was \"on\"\n\n**logit_code**: Produced from categorical variables: score from a one-dimensional model of the centered output as a function of the variable\n\n**missing_indicator**: Produced for both numerical and categorical variables: an indicator variable that marks when the original variable was missing or `NaN`\n\n**deviation_code**: not used by `BinomialOutcomeTreatment`\n\n**impact_code**: not used by `BinomialOutcomeTreatment`\n\n### Example: Produce only a subset of variable types\n\nIn this example, suppose you only want to use indicators and continuous variables in your model; \nin other words, you only want to use variables of types (`clean_copy`, `missing_indicator`, and `indicator_code`), and no `logit_code` or `prevalence_code` variables.", "_____no_output_____" ] ], [ [ "transform_thin = vtreat.BinomialOutcomeTreatment(\n outcome_name='yc', # outcome variable\n outcome_target=True, # outcome of interest\n cols_to_copy=['y'], # columns to \"carry along\" but not treat as input variables\n params = vtreat.vtreat_parameters({\n 'filter_to_recommended': False,\n 'coders': {'clean_copy',\n 'missing_indicator',\n 'indicator_code',\n }\n })\n)\n\ntransform_thin.fit_transform(d, d['yc']).head()", "_____no_output_____" ], [ "transform_thin.score_frame_", "_____no_output_____" ] ], [ [ "## Deriving the Default Thresholds\n\nWhile machine learning algorithms are generally tolerant to a reasonable number of irrelevant or noise variables, too many irrelevant variables can lead to serious overfit; see [this article](http://www.win-vector.com/blog/2014/02/bad-bayes-an-example-of-why-you-need-hold-out-testing/) for an extreme example, one we call \"Bad Bayes\". The default threshold is an attempt to eliminate obviously irrelevant variables early.\n\nImagine that you have a pure noise dataset, where none of the *n* inputs are related to the output. If you treat each variable as a one-variable model for the output, and look at the significances of each model, these significance-values will be uniformly distributed in the range [0:1]. You want to pick a weakest possible significance threshold that eliminates as many noise variables as possible. A moment's thought should convince you that a threshold of *1/n* allows only one variable through, in expectation. \n\nThis leads to the general-case heuristic that a significance threshold of *1/n* on your variables should allow only one irrelevant variable through, in expectation (along with all the relevant variables). Hence, *1/n* used to be our recommended threshold, when we developed the R version of `vtreat`.\n\nWe noticed, however, that this biases the filtering against numerical variables, since there are at most two derived variables (of types *clean_copy* and *missing_indicator* for every numerical variable in the original data. Categorical variables, on the other hand, are expanded to many derived variables: several indicators (one for every common level), plus a *logit_code* and a *prevalence_code*. So we now reweight the thresholds. \n\nSuppose you have a (treated) data set with *ntreat* different types of `vtreat` variables (`clean_copy`, `indicator_code`, etc).\nThere are *nT* variables of type *T*. Then the default threshold for all the variables of type *T* is *1/(ntreat nT)*. This reweighting helps to reduce the bias against any particular type of variable. The heuristic is still that the set of recommended variables will allow at most one noise variable into the set of candidate variables.\n\nAs noted above, because `vtreat` estimates variable significances using linear methods by default, some variables with a non-linear relationship to the output may fail to pass the threshold. Setting the `filter_to_recommended` parameter to False will keep all derived variables in the treated frame, for the data scientist to filter (or not) as they will.\n\n", "_____no_output_____" ], [ "## Conclusion\n\nIn all cases (classification, regression, unsupervised, and multinomial classification) the intent is that `vtreat` transforms are essentially one liners.\n\nThe preparation commands are organized as follows:\n\n\n * **Regression**: [`Python` regression example](https://github.com/WinVector/pyvtreat/blob/master/Examples/Regression/Regression.md), [`R` regression example, fit/prepare interface](https://github.com/WinVector/vtreat/blob/master/Examples/Regression/Regression_FP.md), [`R` regression example, design/prepare/experiment interface](https://github.com/WinVector/vtreat/blob/master/Examples/Regression/Regression.md).\n * **Classification**: [`Python` classification example](https://github.com/WinVector/pyvtreat/blob/master/Examples/Classification/Classification.md), [`R` classification example, fit/prepare interface](https://github.com/WinVector/vtreat/blob/master/Examples/Classification/Classification_FP.md), [`R` classification example, design/prepare/experiment interface](https://github.com/WinVector/vtreat/blob/master/Examples/Classification/Classification.md).\n * **Unsupervised tasks**: [`Python` unsupervised example](https://github.com/WinVector/pyvtreat/blob/master/Examples/Unsupervised/Unsupervised.md), [`R` unsupervised example, fit/prepare interface](https://github.com/WinVector/vtreat/blob/master/Examples/Unsupervised/Unsupervised_FP.md), [`R` unsupervised example, design/prepare/experiment interface](https://github.com/WinVector/vtreat/blob/master/Examples/Unsupervised/Unsupervised.md).\n * **Multinomial classification**: [`Python` multinomial classification example](https://github.com/WinVector/pyvtreat/blob/master/Examples/Multinomial/MultinomialExample.md), [`R` multinomial classification example, fit/prepare interface](https://github.com/WinVector/vtreat/blob/master/Examples/Multinomial/MultinomialExample_FP.md), [`R` multinomial classification example, design/prepare/experiment interface](https://github.com/WinVector/vtreat/blob/master/Examples/Multinomial/MultinomialExample.md).\n\n\nSome `vtreat` common capabilities are documented here:\n\n * **Score Frame** [score_frame_](https://github.com/WinVector/pyvtreat/blob/master/Examples/ScoreFrame/ScoreFrame.md), using the `score_frame_` information.\n * **Cross Validation** [Customized Cross Plans](https://github.com/WinVector/pyvtreat/blob/master/Examples/CustomizedCrossPlan/CustomizedCrossPlan.md), controlling the cross validation plan.\n\nThese current revisions of the examples are designed to be small, yet complete. So as a set they have some overlap, but the user can rely mostly on a single example for a single task type.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ] ]
cb5462216b1defcac82342e5ee88a3ed83269bd4
12,771
ipynb
Jupyter Notebook
Random Forest Pipeline.ipynb
Alberto-Moreno/alzheimer_meg
0dca81909bbe84445b4efa6115df68ce428681d4
[ "MIT" ]
null
null
null
Random Forest Pipeline.ipynb
Alberto-Moreno/alzheimer_meg
0dca81909bbe84445b4efa6115df68ce428681d4
[ "MIT" ]
null
null
null
Random Forest Pipeline.ipynb
Alberto-Moreno/alzheimer_meg
0dca81909bbe84445b4efa6115df68ce428681d4
[ "MIT" ]
null
null
null
29.908665
369
0.51523
[ [ [ "This notebook load the the Mean MEG Data and uses a Random Forest to classify between MCI and Control patients. It is a simple bechmark to measure the performance of other strategies. I have selected the random forest as the benchmarking model because it's simple to set up and doesn't have many hyperparameters to optimize (mtry being the most important one).\n\nAlso, it will be useful for getting comfortable with the MLR package's syntax, since I've since now always used caret.", "_____no_output_____" ] ], [ [ "library(tidyverse)\nlibrary(mlr)", "── Attaching packages ─────────────────────────────────────── tidyverse 1.2.1 ──\n✔ ggplot2 2.2.1 ✔ purrr 0.2.5\n✔ tibble 1.4.2 ✔ dplyr 0.7.6\n✔ tidyr 0.8.1 ✔ stringr 1.2.0\n✔ readr 1.1.1 ✔ forcats 0.3.0\n── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──\n✖ dplyr::filter() masks stats::filter()\n✖ dplyr::lag() masks stats::lag()\nLoading required package: ParamHelpers\nWarning message:\n“replacing previous import ‘BBmisc::isFALSE’ by ‘backports::isFALSE’ when loading ‘ParamHelpers’”" ], [ "mean_data <- readRDS(\"/home/rstudio/data/mean.rds\")\nmean_data$class <- factor(mean_data$class)", "_____no_output_____" ], [ "mean_pca <- as.data.frame(predict(prcomp(select(mean_data, -id, -class), scale = TRUE)))", "_____no_output_____" ], [ "mean_pca$class <- mean_data$class", "_____no_output_____" ], [ "task = makeClassifTask(id = \"rf_mean_meg_pca\", data = mean_data, target = \"class\")", "Warning message in makeTask(type = type, data = data, weights = weights, blocking = blocking, :\n“Provided data is not a pure data.frame but from class tbl_df, hence it will be converted.”" ] ], [ [ "Here, I create a randomForest learner and set the resampling strategy as a Leave-One-Out Crossvalidation. ", "_____no_output_____" ] ], [ [ "lrn = makeLearner(\"classif.randomForest\")\nrdesc = makeResampleDesc(method = \"LOO\")", "_____no_output_____" ] ], [ [ "Train the model", "_____no_output_____" ] ], [ [ "r = resample(learner = lrn, task = task, resampling = rdesc)", "Resampling: LOO\nMeasures: mmce \n[Resample] iter 1: 0.0000000 \n[Resample] iter 2: 1.0000000 \n[Resample] iter 3: 1.0000000 \n[Resample] iter 4: 0.0000000 \n[Resample] iter 5: 1.0000000 \n[Resample] iter 6: 1.0000000 \n[Resample] iter 7: 0.0000000 \n[Resample] iter 8: 0.0000000 \n[Resample] iter 9: 0.0000000 \n[Resample] iter 10: 0.0000000 \n[Resample] iter 11: 0.0000000 \n[Resample] iter 12: 1.0000000 \n[Resample] iter 13: 0.0000000 \n[Resample] iter 14: 0.0000000 \n[Resample] iter 15: 0.0000000 \n[Resample] iter 16: 0.0000000 \n[Resample] iter 17: 1.0000000 \n[Resample] iter 18: 0.0000000 \n[Resample] iter 19: 0.0000000 \n[Resample] iter 20: 0.0000000 \n[Resample] iter 21: 0.0000000 \n[Resample] iter 22: 1.0000000 \n[Resample] iter 23: 0.0000000 \n[Resample] iter 24: 0.0000000 \n[Resample] iter 25: 1.0000000 \n[Resample] iter 26: 0.0000000 \n[Resample] iter 27: 0.0000000 \n[Resample] iter 28: 0.0000000 \n[Resample] iter 29: 1.0000000 \n[Resample] iter 30: 0.0000000 \n[Resample] iter 31: 0.0000000 \n[Resample] iter 32: 1.0000000 \n[Resample] iter 33: 1.0000000 \n[Resample] iter 34: 1.0000000 \n[Resample] iter 35: 1.0000000 \n[Resample] iter 36: 0.0000000 \n[Resample] iter 37: 1.0000000 \n[Resample] iter 38: 0.0000000 \n[Resample] iter 39: 0.0000000 \n[Resample] iter 40: 0.0000000 \n[Resample] iter 41: 0.0000000 \n[Resample] iter 42: 0.0000000 \n[Resample] iter 43: 0.0000000 \n[Resample] iter 44: 0.0000000 \n[Resample] iter 45: 0.0000000 \n[Resample] iter 46: 0.0000000 \n[Resample] iter 47: 0.0000000 \n[Resample] iter 48: 0.0000000 \n[Resample] iter 49: 1.0000000 \n[Resample] iter 50: 0.0000000 \n[Resample] iter 51: 1.0000000 \n[Resample] iter 52: 1.0000000 \n[Resample] iter 53: 1.0000000 \n[Resample] iter 54: 0.0000000 \n[Resample] iter 55: 0.0000000 \n[Resample] iter 56: 1.0000000 \n[Resample] iter 57: 0.0000000 \n[Resample] iter 58: 0.0000000 \n[Resample] iter 59: 1.0000000 \n[Resample] iter 60: 0.0000000 \n[Resample] iter 61: 0.0000000 \n[Resample] iter 62: 0.0000000 \n[Resample] iter 63: 0.0000000 \n[Resample] iter 64: 0.0000000 \n[Resample] iter 65: 0.0000000 \n[Resample] iter 66: 0.0000000 \n[Resample] iter 67: 0.0000000 \n[Resample] iter 68: 0.0000000 \n[Resample] iter 69: 0.0000000 \n[Resample] iter 70: 1.0000000 \n[Resample] iter 71: 1.0000000 \n[Resample] iter 72: 0.0000000 \n[Resample] iter 73: 0.0000000 \n[Resample] iter 74: 0.0000000 \n[Resample] iter 75: 0.0000000 \n[Resample] iter 76: 1.0000000 \n[Resample] iter 77: 0.0000000 \n[Resample] iter 78: 0.0000000 \n[Resample] iter 79: 0.0000000 \n[Resample] iter 80: 0.0000000 \n[Resample] iter 81: 1.0000000 \n[Resample] iter 82: 1.0000000 \n[Resample] iter 83: 0.0000000 \n[Resample] iter 84: 1.0000000 \n[Resample] iter 85: 0.0000000 \n[Resample] iter 86: 0.0000000 \n[Resample] iter 87: 1.0000000 \n[Resample] iter 88: 1.0000000 \n[Resample] iter 89: 1.0000000 \n[Resample] iter 90: 0.0000000 \n[Resample] iter 91: 1.0000000 \n[Resample] iter 92: 0.0000000 \n[Resample] iter 93: 0.0000000 \n[Resample] iter 94: 1.0000000 \n[Resample] iter 95: 0.0000000 \n[Resample] iter 96: 1.0000000 \n[Resample] iter 97: 0.0000000 \n[Resample] iter 98: 0.0000000 \n[Resample] iter 99: 1.0000000 \n[Resample] iter 100: 0.0000000 \n[Resample] iter 101: 1.0000000 \n[Resample] iter 102: 1.0000000 \n[Resample] iter 103: 1.0000000 \n[Resample] iter 104: 0.0000000 \n[Resample] iter 105: 0.0000000 \n[Resample] iter 106: 0.0000000 \n[Resample] iter 107: 0.0000000 \n[Resample] iter 108: 1.0000000 \n[Resample] iter 109: 0.0000000 \n[Resample] iter 110: 1.0000000 \n[Resample] iter 111: 0.0000000 \n[Resample] iter 112: 0.0000000 \n[Resample] iter 113: 0.0000000 \n[Resample] iter 114: 0.0000000 \n[Resample] iter 115: 1.0000000 \n[Resample] iter 116: 1.0000000 \n[Resample] iter 117: 0.0000000 \n[Resample] iter 118: 1.0000000 \n[Resample] iter 119: 0.0000000 \n[Resample] iter 120: 1.0000000 \n[Resample] iter 121: 0.0000000 \n[Resample] iter 122: 0.0000000 \n[Resample] iter 123: 1.0000000 \n[Resample] iter 124: 1.0000000 \n[Resample] iter 125: 0.0000000 \n[Resample] iter 126: 1.0000000 \n[Resample] iter 127: 0.0000000 \n[Resample] iter 128: 0.0000000 \n[Resample] iter 129: 0.0000000 \n[Resample] iter 130: 0.0000000 \n[Resample] iter 131: 1.0000000 \n[Resample] iter 132: 1.0000000 \n\n\nAggregated Result: mmce.test.mean=0.3560606\n\n\n" ] ], [ [ "The mean MMCE is 0.3560606. Now, we will look at the Confunsion Matrix (class 2 is MCI):", "_____no_output_____" ] ], [ [ "print(\"Confusion matrix:\")\nconfusion_matrix <- calculateConfusionMatrix(r$pred)\nprint(confusion_matrix)\nprint(\"Accuracy:\")\nsum(r$pred$data$truth == r$pred$data$response)/length(r$pred$data$truth)\nt(confusion_matrix$result)", "[1] \"Confusion matrix:\"\n predicted\ntrue 1 2 -err.-\n 1 25 29 29\n 2 18 60 18\n -err.- 18 29 47\n" ], [ "TP <- confusion_matrix$result[2,2]\nFP <- confusion_matrix$result[1,2]\nTN <- confusion_matrix$result[1,1]\nFN <- confusion_matrix$result[2,1]\n\nprecision = TP / (TP + FP)\nrecall = TP / (TP + FN)\nf1 <- 2 * precision * recall / (precision + recall)\nf1\nprecision\nrecall", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ] ]
cb547db272258d705d66362a37f52e91eb1588aa
58,577
ipynb
Jupyter Notebook
workshops/Advanced_Convolutional_Neural_Networks/Idiomatic Programmer - handbook 1 - Codelab 4.ipynb
Acidburn0zzz/keras-idiomatic-programmer
715c6f16c458ab2129033b5c9a0e394c5cc540e2
[ "Apache-2.0" ]
833
2019-03-13T16:05:54.000Z
2022-03-29T07:27:19.000Z
workshops/Advanced_Convolutional_Neural_Networks/Idiomatic Programmer - handbook 1 - Codelab 4.ipynb
jimypeter/keras-idiomatic-programmer
07f45374074afbc7d06854cbda030ff431e6c0bb
[ "Apache-2.0" ]
7
2019-06-07T17:03:23.000Z
2020-12-07T02:37:00.000Z
workshops/Advanced_Convolutional_Neural_Networks/Idiomatic Programmer - handbook 1 - Codelab 4.ipynb
jimypeter/keras-idiomatic-programmer
07f45374074afbc7d06854cbda030ff431e6c0bb
[ "Apache-2.0" ]
290
2019-04-28T19:33:52.000Z
2022-02-22T13:08:05.000Z
64.941242
254
0.615327
[ [ [ "# Copyright 2019 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.", "_____no_output_____" ] ], [ [ "<a target=\"_blank\" href=\"https://colab.research.google.com/github/GoogleCloudPlatform/keras-idiomatic-programmer/blob/master/workshops/Advanced_Convolutional_Neural_Networks/Idiomatic%20Programmer%20-%20handbook%201%20-%20Codelab%204.ipynb\">\n<img src=\"https://www.tensorflow.org/images/colab_logo_32px.png\" />Run in Google Colab</a>", "_____no_output_____" ], [ "# Idiomatic Programmer Code Labs\n\n## Code Labs #4 - Get Familiar with Advanced CNN Designs\n\n## Prerequistes:\n\n 1. Familiar with Python\n 2. Completed Handbook 1/Part 4: Advanced Convolutional Neural Networks\n\n## Objectives:\n\n 1. Architecture Changes - Pre-stems\n 2. Dense connections across sublayers in DenseNet\n 3. Xception Redesigned Macro-Architecture for CNN", "_____no_output_____" ], [ "## Pre-Stems Groups for Handling Different Input Sizes\n\nLet's create a pre-stem to handle an input size different than what the neural network was designed for.\n\nWe will use these approaches:\n\n 1. Calculate the difference in size between the expected input and the actual size of\n the input (in our case we are assuming actual size less than expected size).\n A. Expected = (230, 230, 3)\n B. Actual = (224, 224, 3)\n 2. Pad the inputs to fit into the expected size.\n \nYou fill in the blanks (replace the ??), make sure it passes the Python interpreter, and then verify it's correctness with the summary output.\n\nYou will need to:\n\n 1. Set the padding of the image prior to the first convolution.", "_____no_output_____" ] ], [ [ "from keras import layers, Input\n\n# Not the input shape expected by the stem (which is (230, 230, 3)\ninputs = Input(shape=(224, 224, 3))\n\n# Add a pre-stem and pad (224, 224, 3) to (230, 230, 3)\n# HINT: Since the pad is on both sides (left/right, top/bottom) you want to divide the\n# difference by two (half goes to the left, half goes to the right, etc)\ninputs = layers.ZeroPadding2D(??)(inputs)\n\n# This stem's expected shape is (230, 230, 3)\nx = layers.Conv2D(64, (7, 7), strides=(2,2))(inputs)\nX = layers.BatchNormalization()(x)\nx = layers.ReLU()(x)", "_____no_output_____" ] ], [ [ "## Verify that actual is padded to expected:\n\nYou should get the following output on the shape of the inputs and outputs\n\n```\ninputs (?, 230, 230, 3)\noutputs (?, 112, 112, 64)\n```", "_____no_output_____" ] ], [ [ "# this will output: (230, 230, 3)\nprint(\"inputs\", inputs.shape)\n\n# this will output: (?, 112, 112, 64)\nprint(\"outputs\", x.shape)", "_____no_output_____" ] ], [ [ "## DenseNet as Function API\n\nLet's create a DenseNet-121:\n\nWe will use these approaches:\n\n 1. Add a pre-stem step of padding by 1 pixel so a 230x230x3 input results in 7x7 \n feature maps at the global average (bottleneck) layer.\n 2. Use average pooling (subsamnpling) in transition blocks.\n 3. Accumulated feature maps through residual blocks by concatenting the input to the \n output, and making that the new output.\n 4. Use compression to reduce feature map sizes between dense blocks.\n \nYou will need to:\n\n 1. Set the padding in the stem group.\n 2. Concatenate the input and output at each residual block.\n 3. Set the compression (reduction) of filters in the transition block.\n 4. Use average pooling in transition block.", "_____no_output_____" ] ], [ [ "from keras import layers, Input, Model\n\ndef stem(inputs):\n \"\"\" The Stem Convolution Group\n inputs : input tensor\n \"\"\"\n # First large convolution for abstract features for input 230 x 230 and output\n # 112 x 112\n x = layers.Conv2D(64, (7, 7), strides=2)(inputs)\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x)\n # Add padding so when downsampling we fit shape 56 x 56\n # Hint: we want to pad one pixel all around.\n x = layers.ZeroPadding2D(padding=(??, ??)(x)\n x = layers.MaxPooling2D((3, 3), strides=2)(x)\n return x\n\ndef dense_block(x, nblocks, nb_filters):\n \"\"\" Construct a Dense Block\n x : input layer\n nblocks : number of residual blocks in dense block\n nb_filters: number of filters in convolution layer in residual block\n \"\"\"\n # Construct a group of residual blocks\n for _ in range(nblocks):\n x = residual_block(x, nb_filters)\n return x\n\ndef residual_block(x, nb_filters):\n \"\"\" Construct Residual Block\n x : input layer\n nb_filters: number of filters in convolution layer in residual block\n \"\"\"\n shortcut = x # remember input tensor into residual block\n\n # Bottleneck convolution, expand filters by 4 (DenseNet-B)\n x = layers.Conv2D(4 * nb_filters, (1, 1), strides=(1, 1))(x)\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x)\n\n # 3 x 3 convolution with padding=same to preserve same shape of feature maps\n x = layers.Conv2D(nb_filters, (3, 3), strides=(1, 1), padding='same')(x)\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x)\n\n # Concatenate the input (identity) with the output of the residual block\n # Concatenation (vs. merging) provides Feature Reuse between layers\n # HINT: Use a list which includes the remembered input and the output from the residual block - which becomes the new output\n x = layers.concatenate([??])\n return x\n\ndef trans_block(x, reduce_by):\n \"\"\" Construct a Transition Block\n x : input layer\n reduce_by: percentage of reduction of feature maps\n \"\"\"\n # Reduce (compression) the number of feature maps (DenseNet-C)\n # shape[n] returns a class object. We use int() to cast it into the dimension\n # size\n # HINT: the compression is a percentage (~0.5) that was passed as a parameter to this function\n nb_filters = int( int(x.shape[3]) * ?? )\n\n # Bottleneck convolution\n x = layers.Conv2D(nb_filters, (1, 1), strides=(1, 1))(x)\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x)\n\n # Use mean value (average) instead of max value sampling when pooling\n # reduce by 75%\n # HINT: instead of Max Pooling (downsampling) we use Average Pooling (subsampling) \n x = layers.??Pooling2D((2, 2), strides=(2, 2))(x)\n return x\n\ninputs = Input(shape=(230, 230, 3))\n\n# Create the Stem Convolution Group\nx = stem(inputs)\n\n# number of residual blocks in each dense block\nblocks = [6, 12, 24, 16]\n\n# pop off the list the last dense block\nlast = blocks.pop()\n\n# amount to reduce feature maps by (compression) during transition blocks\nreduce_by = 0.5\n\n# number of filters in a convolution block within a residual block\nnb_filters = 32\n\n# Create the dense blocks and interceding transition blocks\nfor nblocks in blocks:\n x = dense_block(x, nblocks, nb_filters)\n x = trans_block(x, reduce_by)\n\n# Add the last dense block w/o a following transition block\nx = dense_block(x, last, nb_filters)\n\n# Classifier\n# Global Average Pooling will flatten the 7x7 feature maps into 1D feature maps\nx = layers.GlobalAveragePooling2D()(x)\n# Fully connected output layer (classification)\noutputs = x = layers.Dense(1000, activation='softmax')(x)\n\nmodel = Model(inputs, outputs)", "_____no_output_____" ] ], [ [ "### Verify the model architecture using summary method\n\nIt should look like below:\n\n```\nLayer (type) Output Shape Param # Connected to \n==================================================================================================\ninput_3 (InputLayer) (None, 230, 230, 3) 0 \n__________________________________________________________________________________________________\nconv2d_241 (Conv2D) (None, 112, 112, 64) 9472 input_3[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_241 (BatchN (None, 112, 112, 64) 256 conv2d_241[0][0] \n__________________________________________________________________________________________________\nre_lu_241 (ReLU) (None, 112, 112, 64) 0 batch_normalization_241[0][0] \n__________________________________________________________________________________________________\nzero_padding2d_2 (ZeroPadding2D (None, 114, 114, 64) 0 re_lu_241[0][0] \n__________________________________________________________________________________________________\nmax_pooling2d_3 (MaxPooling2D) (None, 56, 56, 64) 0 zero_padding2d_2[0][0] \n__________________________________________________________________________________________________\nconv2d_242 (Conv2D) (None, 56, 56, 128) 8320 max_pooling2d_3[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_242 (BatchN (None, 56, 56, 128) 512 conv2d_242[0][0] \n__________________________________________________________________________________________________\nre_lu_242 (ReLU) (None, 56, 56, 128) 0 batch_normalization_242[0][0] \n__________________________________________________________________________________________________\nconv2d_243 (Conv2D) (None, 56, 56, 32) 36896 re_lu_242[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_243 (BatchN (None, 56, 56, 32) 128 conv2d_243[0][0] \n__________________________________________________________________________________________________\nre_lu_243 (ReLU) (None, 56, 56, 32) 0 batch_normalization_243[0][0] \n__________________________________________________________________________________________________\nconcatenate_117 (Concatenate) (None, 56, 56, 96) 0 max_pooling2d_3[0][0] \n re_lu_243[0][0] \n__________________________________________________________________________________________________\nconv2d_244 (Conv2D) (None, 56, 56, 128) 12416 concatenate_117[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_244 (BatchN (None, 56, 56, 128) 512 conv2d_244[0][0] \n__________________________________________________________________________________________________\nre_lu_244 (ReLU) (None, 56, 56, 128) 0 batch_normalization_244[0][0] \n__________________________________________________________________________________________________\nconv2d_245 (Conv2D) (None, 56, 56, 32) 36896 re_lu_244[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_245 (BatchN (None, 56, 56, 32) 128 conv2d_245[0][0] \n__________________________________________________________________________________________________\nre_lu_245 (ReLU) (None, 56, 56, 32) 0 batch_normalization_245[0][0] \n__________________________________________________________________________________________________\nconcatenate_118 (Concatenate) (None, 56, 56, 128) 0 concatenate_117[0][0] \n re_lu_245[0][0] \n__________________________________________________________________________________________________\nconv2d_246 (Conv2D) (None, 56, 56, 128) 16512 concatenate_118[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_246 (BatchN (None, 56, 56, 128) 512 conv2d_246[0][0] \n__________________________________________________________________________________________________\nre_lu_246 (ReLU) (None, 56, 56, 128) 0 batch_normalization_246[0][0] \n__________________________________________________________________________________________________\nconv2d_247 (Conv2D) (None, 56, 56, 32) 36896 re_lu_246[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_247 (BatchN (None, 56, 56, 32) 128 conv2d_247[0][0] \n__________________________________________________________________________________________________\nre_lu_247 (ReLU) (None, 56, 56, 32) 0 batch_normalization_247[0][0] \n__________________________________________________________________________________________________\nconcatenate_119 (Concatenate) (None, 56, 56, 160) 0 concatenate_118[0][0] \n re_lu_247[0][0] \n__________________________________________________________________________________________________\nconv2d_248 (Conv2D) (None, 56, 56, 128) 20608 concatenate_119[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_248 (BatchN (None, 56, 56, 128) 512 conv2d_248[0][0] \n__________________________________________________________________________________________________\nre_lu_248 (ReLU) (None, 56, 56, 128) 0 batch_normalization_248[0][0] \n__________________________________________________________________________________________________\nconv2d_249 (Conv2D) (None, 56, 56, 32) 36896 re_lu_248[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_249 (BatchN (None, 56, 56, 32) 128 conv2d_249[0][0] \n__________________________________________________________________________________________________\nre_lu_249 (ReLU) (None, 56, 56, 32) 0 batch_normalization_249[0][0] \n__________________________________________________________________________________________________\nconcatenate_120 (Concatenate) (None, 56, 56, 192) 0 concatenate_119[0][0] \n re_lu_249[0][0] \n__________________________________________________________________________________________________\nconv2d_250 (Conv2D) (None, 56, 56, 128) 24704 concatenate_120[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_250 (BatchN (None, 56, 56, 128) 512 conv2d_250[0][0] \n__________________________________________________________________________________________________\nre_lu_250 (ReLU) (None, 56, 56, 128) 0 batch_normalization_250[0][0] \n__________________________________________________________________________________________________\nconv2d_251 (Conv2D) (None, 56, 56, 32) 36896 re_lu_250[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_251 (BatchN (None, 56, 56, 32) 128 conv2d_251[0][0] \n__________________________________________________________________________________________________\nre_lu_251 (ReLU) (None, 56, 56, 32) 0 batch_normalization_251[0][0] \n__________________________________________________________________________________________________\nconcatenate_121 (Concatenate) (None, 56, 56, 224) 0 concatenate_120[0][0] \n re_lu_251[0][0] \n__________________________________________________________________________________________________\nconv2d_252 (Conv2D) (None, 56, 56, 128) 28800 concatenate_121[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_252 (BatchN (None, 56, 56, 128) 512 conv2d_252[0][0] \n__________________________________________________________________________________________________\nre_lu_252 (ReLU) (None, 56, 56, 128) 0 batch_normalization_252[0][0] \n__________________________________________________________________________________________________\nconv2d_253 (Conv2D) (None, 56, 56, 32) 36896 re_lu_252[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_253 (BatchN (None, 56, 56, 32) 128 conv2d_253[0][0] \n__________________________________________________________________________________________________\nre_lu_253 (ReLU) (None, 56, 56, 32) 0 batch_normalization_253[0][0] \n__________________________________________________________________________________________________\nconcatenate_122 (Concatenate) (None, 56, 56, 256) 0 concatenate_121[0][0] \n re_lu_253[0][0] \n__________________________________________________________________________________________________\nconv2d_254 (Conv2D) (None, 56, 56, 128) 32896 concatenate_122[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_254 (BatchN (None, 56, 56, 128) 512 conv2d_254[0][0] \n__________________________________________________________________________________________________\nre_lu_254 (ReLU) (None, 56, 56, 128) 0 batch_normalization_254[0][0] \n\nREMOVED for BREVITY ...\n__________________________________________________________________________________________________\naverage_pooling2d_9 (AveragePoo (None, 7, 7, 512) 0 re_lu_328[0][0] \n__________________________________________________________________________________________________\nconv2d_329 (Conv2D) (None, 7, 7, 128) 65664 average_pooling2d_9[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_329 (BatchN (None, 7, 7, 128) 512 conv2d_329[0][0] \n__________________________________________________________________________________________________\nre_lu_329 (ReLU) (None, 7, 7, 128) 0 batch_normalization_329[0][0] \n__________________________________________________________________________________________________\nconv2d_330 (Conv2D) (None, 7, 7, 32) 36896 re_lu_329[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_330 (BatchN (None, 7, 7, 32) 128 conv2d_330[0][0] \n__________________________________________________________________________________________________\nre_lu_330 (ReLU) (None, 7, 7, 32) 0 batch_normalization_330[0][0] \n__________________________________________________________________________________________________\nconcatenate_159 (Concatenate) (None, 7, 7, 544) 0 average_pooling2d_9[0][0] \n re_lu_330[0][0] \n__________________________________________________________________________________________________\nconv2d_331 (Conv2D) (None, 7, 7, 128) 69760 concatenate_159[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_331 (BatchN (None, 7, 7, 128) 512 conv2d_331[0][0] \n__________________________________________________________________________________________________\nre_lu_331 (ReLU) (None, 7, 7, 128) 0 batch_normalization_331[0][0] \n__________________________________________________________________________________________________\nconv2d_332 (Conv2D) (None, 7, 7, 32) 36896 re_lu_331[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_332 (BatchN (None, 7, 7, 32) 128 conv2d_332[0][0] \n__________________________________________________________________________________________________\nre_lu_332 (ReLU) (None, 7, 7, 32) 0 batch_normalization_332[0][0] \n__________________________________________________________________________________________________\nconcatenate_160 (Concatenate) (None, 7, 7, 576) 0 concatenate_159[0][0] \n re_lu_332[0][0] \n__________________________________________________________________________________________________\nconv2d_333 (Conv2D) (None, 7, 7, 128) 73856 concatenate_160[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_333 (BatchN (None, 7, 7, 128) 512 conv2d_333[0][0] \n__________________________________________________________________________________________________\nre_lu_333 (ReLU) (None, 7, 7, 128) 0 batch_normalization_333[0][0] \n__________________________________________________________________________________________________\nconv2d_334 (Conv2D) (None, 7, 7, 32) 36896 re_lu_333[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_334 (BatchN (None, 7, 7, 32) 128 conv2d_334[0][0] \n__________________________________________________________________________________________________\nre_lu_334 (ReLU) (None, 7, 7, 32) 0 batch_normalization_334[0][0] \n__________________________________________________________________________________________________\nconcatenate_161 (Concatenate) (None, 7, 7, 608) 0 concatenate_160[0][0] \n re_lu_334[0][0] \n__________________________________________________________________________________________________\nconv2d_335 (Conv2D) (None, 7, 7, 128) 77952 concatenate_161[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_335 (BatchN (None, 7, 7, 128) 512 conv2d_335[0][0] \n__________________________________________________________________________________________________\nre_lu_335 (ReLU) (None, 7, 7, 128) 0 batch_normalization_335[0][0] \n__________________________________________________________________________________________________\nconv2d_336 (Conv2D) (None, 7, 7, 32) 36896 re_lu_335[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_336 (BatchN (None, 7, 7, 32) 128 conv2d_336[0][0] \n__________________________________________________________________________________________________\nre_lu_336 (ReLU) (None, 7, 7, 32) 0 batch_normalization_336[0][0] \n__________________________________________________________________________________________________\nconcatenate_162 (Concatenate) (None, 7, 7, 640) 0 concatenate_161[0][0] \n re_lu_336[0][0] \n__________________________________________________________________________________________________\nconv2d_337 (Conv2D) (None, 7, 7, 128) 82048 concatenate_162[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_337 (BatchN (None, 7, 7, 128) 512 conv2d_337[0][0] \n__________________________________________________________________________________________________\nre_lu_337 (ReLU) (None, 7, 7, 128) 0 batch_normalization_337[0][0] \n__________________________________________________________________________________________________\nconv2d_338 (Conv2D) (None, 7, 7, 32) 36896 re_lu_337[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_338 (BatchN (None, 7, 7, 32) 128 conv2d_338[0][0] \n__________________________________________________________________________________________________\nre_lu_338 (ReLU) (None, 7, 7, 32) 0 batch_normalization_338[0][0] \n__________________________________________________________________________________________________\nconcatenate_163 (Concatenate) (None, 7, 7, 672) 0 concatenate_162[0][0] \n re_lu_338[0][0] \n__________________________________________________________________________________________________\nconv2d_339 (Conv2D) (None, 7, 7, 128) 86144 concatenate_163[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_339 (BatchN (None, 7, 7, 128) 512 conv2d_339[0][0] \n__________________________________________________________________________________________________\nre_lu_339 (ReLU) (None, 7, 7, 128) 0 batch_normalization_339[0][0] \n__________________________________________________________________________________________________\nconv2d_340 (Conv2D) (None, 7, 7, 32) 36896 re_lu_339[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_340 (BatchN (None, 7, 7, 32) 128 conv2d_340[0][0] \n__________________________________________________________________________________________________\nre_lu_340 (ReLU) (None, 7, 7, 32) 0 batch_normalization_340[0][0] \n__________________________________________________________________________________________________\nconcatenate_164 (Concatenate) (None, 7, 7, 704) 0 concatenate_163[0][0] \n re_lu_340[0][0] \n__________________________________________________________________________________________________\nconv2d_341 (Conv2D) (None, 7, 7, 128) 90240 concatenate_164[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_341 (BatchN (None, 7, 7, 128) 512 conv2d_341[0][0] \n__________________________________________________________________________________________________\nre_lu_341 (ReLU) (None, 7, 7, 128) 0 batch_normalization_341[0][0] \n__________________________________________________________________________________________________\nconv2d_342 (Conv2D) (None, 7, 7, 32) 36896 re_lu_341[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_342 (BatchN (None, 7, 7, 32) 128 conv2d_342[0][0] \n__________________________________________________________________________________________________\nre_lu_342 (ReLU) (None, 7, 7, 32) 0 batch_normalization_342[0][0] \n__________________________________________________________________________________________________\nconcatenate_165 (Concatenate) (None, 7, 7, 736) 0 concatenate_164[0][0] \n re_lu_342[0][0] \n__________________________________________________________________________________________________\nconv2d_343 (Conv2D) (None, 7, 7, 128) 94336 concatenate_165[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_343 (BatchN (None, 7, 7, 128) 512 conv2d_343[0][0] \n__________________________________________________________________________________________________\nre_lu_343 (ReLU) (None, 7, 7, 128) 0 batch_normalization_343[0][0] \n__________________________________________________________________________________________________\nconv2d_344 (Conv2D) (None, 7, 7, 32) 36896 re_lu_343[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_344 (BatchN (None, 7, 7, 32) 128 conv2d_344[0][0] \n__________________________________________________________________________________________________\nre_lu_344 (ReLU) (None, 7, 7, 32) 0 batch_normalization_344[0][0] \n__________________________________________________________________________________________________\nconcatenate_166 (Concatenate) (None, 7, 7, 768) 0 concatenate_165[0][0] \n re_lu_344[0][0] \n__________________________________________________________________________________________________\nconv2d_345 (Conv2D) (None, 7, 7, 128) 98432 concatenate_166[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_345 (BatchN (None, 7, 7, 128) 512 conv2d_345[0][0] \n__________________________________________________________________________________________________\nre_lu_345 (ReLU) (None, 7, 7, 128) 0 batch_normalization_345[0][0] \n__________________________________________________________________________________________________\nconv2d_346 (Conv2D) (None, 7, 7, 32) 36896 re_lu_345[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_346 (BatchN (None, 7, 7, 32) 128 conv2d_346[0][0] \n__________________________________________________________________________________________________\nre_lu_346 (ReLU) (None, 7, 7, 32) 0 batch_normalization_346[0][0] \n__________________________________________________________________________________________________\nconcatenate_167 (Concatenate) (None, 7, 7, 800) 0 concatenate_166[0][0] \n re_lu_346[0][0] \n__________________________________________________________________________________________________\nconv2d_347 (Conv2D) (None, 7, 7, 128) 102528 concatenate_167[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_347 (BatchN (None, 7, 7, 128) 512 conv2d_347[0][0] \n__________________________________________________________________________________________________\nre_lu_347 (ReLU) (None, 7, 7, 128) 0 batch_normalization_347[0][0] \n__________________________________________________________________________________________________\nconv2d_348 (Conv2D) (None, 7, 7, 32) 36896 re_lu_347[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_348 (BatchN (None, 7, 7, 32) 128 conv2d_348[0][0] \n__________________________________________________________________________________________________\nre_lu_348 (ReLU) (None, 7, 7, 32) 0 batch_normalization_348[0][0] \n__________________________________________________________________________________________________\nconcatenate_168 (Concatenate) (None, 7, 7, 832) 0 concatenate_167[0][0] \n re_lu_348[0][0] \n__________________________________________________________________________________________________\nconv2d_349 (Conv2D) (None, 7, 7, 128) 106624 concatenate_168[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_349 (BatchN (None, 7, 7, 128) 512 conv2d_349[0][0] \n__________________________________________________________________________________________________\nre_lu_349 (ReLU) (None, 7, 7, 128) 0 batch_normalization_349[0][0] \n__________________________________________________________________________________________________\nconv2d_350 (Conv2D) (None, 7, 7, 32) 36896 re_lu_349[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_350 (BatchN (None, 7, 7, 32) 128 conv2d_350[0][0] \n__________________________________________________________________________________________________\nre_lu_350 (ReLU) (None, 7, 7, 32) 0 batch_normalization_350[0][0] \n__________________________________________________________________________________________________\nconcatenate_169 (Concatenate) (None, 7, 7, 864) 0 concatenate_168[0][0] \n re_lu_350[0][0] \n__________________________________________________________________________________________________\nconv2d_351 (Conv2D) (None, 7, 7, 128) 110720 concatenate_169[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_351 (BatchN (None, 7, 7, 128) 512 conv2d_351[0][0] \n__________________________________________________________________________________________________\nre_lu_351 (ReLU) (None, 7, 7, 128) 0 batch_normalization_351[0][0] \n__________________________________________________________________________________________________\nconv2d_352 (Conv2D) (None, 7, 7, 32) 36896 re_lu_351[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_352 (BatchN (None, 7, 7, 32) 128 conv2d_352[0][0] \n__________________________________________________________________________________________________\nre_lu_352 (ReLU) (None, 7, 7, 32) 0 batch_normalization_352[0][0] \n__________________________________________________________________________________________________\nconcatenate_170 (Concatenate) (None, 7, 7, 896) 0 concatenate_169[0][0] \n re_lu_352[0][0] \n__________________________________________________________________________________________________\nconv2d_353 (Conv2D) (None, 7, 7, 128) 114816 concatenate_170[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_353 (BatchN (None, 7, 7, 128) 512 conv2d_353[0][0] \n__________________________________________________________________________________________________\nre_lu_353 (ReLU) (None, 7, 7, 128) 0 batch_normalization_353[0][0] \n__________________________________________________________________________________________________\nconv2d_354 (Conv2D) (None, 7, 7, 32) 36896 re_lu_353[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_354 (BatchN (None, 7, 7, 32) 128 conv2d_354[0][0] \n__________________________________________________________________________________________________\nre_lu_354 (ReLU) (None, 7, 7, 32) 0 batch_normalization_354[0][0] \n__________________________________________________________________________________________________\nconcatenate_171 (Concatenate) (None, 7, 7, 928) 0 concatenate_170[0][0] \n re_lu_354[0][0] \n__________________________________________________________________________________________________\nconv2d_355 (Conv2D) (None, 7, 7, 128) 118912 concatenate_171[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_355 (BatchN (None, 7, 7, 128) 512 conv2d_355[0][0] \n__________________________________________________________________________________________________\nre_lu_355 (ReLU) (None, 7, 7, 128) 0 batch_normalization_355[0][0] \n__________________________________________________________________________________________________\nconv2d_356 (Conv2D) (None, 7, 7, 32) 36896 re_lu_355[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_356 (BatchN (None, 7, 7, 32) 128 conv2d_356[0][0] \n__________________________________________________________________________________________________\nre_lu_356 (ReLU) (None, 7, 7, 32) 0 batch_normalization_356[0][0] \n__________________________________________________________________________________________________\nconcatenate_172 (Concatenate) (None, 7, 7, 960) 0 concatenate_171[0][0] \n re_lu_356[0][0] \n__________________________________________________________________________________________________\nconv2d_357 (Conv2D) (None, 7, 7, 128) 123008 concatenate_172[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_357 (BatchN (None, 7, 7, 128) 512 conv2d_357[0][0] \n__________________________________________________________________________________________________\nre_lu_357 (ReLU) (None, 7, 7, 128) 0 batch_normalization_357[0][0] \n__________________________________________________________________________________________________\nconv2d_358 (Conv2D) (None, 7, 7, 32) 36896 re_lu_357[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_358 (BatchN (None, 7, 7, 32) 128 conv2d_358[0][0] \n__________________________________________________________________________________________________\nre_lu_358 (ReLU) (None, 7, 7, 32) 0 batch_normalization_358[0][0] \n__________________________________________________________________________________________________\nconcatenate_173 (Concatenate) (None, 7, 7, 992) 0 concatenate_172[0][0] \n re_lu_358[0][0] \n__________________________________________________________________________________________________\nconv2d_359 (Conv2D) (None, 7, 7, 128) 127104 concatenate_173[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_359 (BatchN (None, 7, 7, 128) 512 conv2d_359[0][0] \n__________________________________________________________________________________________________\nre_lu_359 (ReLU) (None, 7, 7, 128) 0 batch_normalization_359[0][0] \n__________________________________________________________________________________________________\nconv2d_360 (Conv2D) (None, 7, 7, 32) 36896 re_lu_359[0][0] \n__________________________________________________________________________________________________\nbatch_normalization_360 (BatchN (None, 7, 7, 32) 128 conv2d_360[0][0] \n__________________________________________________________________________________________________\nre_lu_360 (ReLU) (None, 7, 7, 32) 0 batch_normalization_360[0][0] \n__________________________________________________________________________________________________\nconcatenate_174 (Concatenate) (None, 7, 7, 1024) 0 concatenate_173[0][0] \n re_lu_360[0][0] \n__________________________________________________________________________________________________\nglobal_average_pooling2d_3 (Glo (None, 1024) 0 concatenate_174[0][0] \n__________________________________________________________________________________________________\ndense_3 (Dense) (None, 1000) 1025000 global_average_pooling2d_3[0][0] \n==================================================================================================\nTotal params: 7,946,408\nTrainable params: 7,925,928\nNon-trainable params: 20,480\n__________________________________________________________________________________________________\n```", "_____no_output_____" ] ], [ [ "model.summary()", "_____no_output_____" ] ], [ [ "## Xception Architecture using Functional API\n\nLet's layout a CNN using the Xception architecture pattern.\n\nWe will use these approaches:\n\n 1. Decompose into a stem, entrance, middle and exit module.\n 2. Stem does the initial sequential convolutional layers for the input.\n 3. Entrance does the coarse filter learning.\n 4. Middle does the detail filter learning.\n 5. Exit does the classification.\n \nWe won't build a full Xception, just a mini-example to practice the layout.\n\nYou will need to:\n\n 1. Use a strided convolution in the stem group.\n 2. Set the number of residual blocks in the residual groups in the middle flow.\n 3. Use global averaging in the classifier.\n 4. Set the input to the project link in the residual blocks in the entry flow.\n 5. Remember the input to the residual blocks in the middle flow.", "_____no_output_____" ] ], [ [ "from keras import layers, Input, Model\n\ndef entryFlow(inputs):\n \"\"\" Create the entry flow section\n inputs : input tensor to neural network\n \"\"\"\n\n def stem(inputs):\n \"\"\" Create the stem entry into the neural network\n inputs : input tensor to neural network\n \"\"\"\n # The stem uses two 3x3 convolutions.\n # The first one downsamples and the second one doubles the number of filters\n \n # First convolution\n x = layers.Conv2D(32, (3, 3), strides=(2, 2))(inputs)\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x)\n\n # Second convolution, double the number of filters (no downsampling)\n # HINT: when stride > 1 you are downsampling (also known as strided convolution)\n x = layers.Conv2D(??, (3, 3), strides=??)(inputs)\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x)\n return x\n \n # Create the stem to the neural network\n x = stem(inputs)\n\n # Create three residual blocks\n for nb_filters in [128, 256, 728]:\n x = residual_block_entry(x, nb_filters)\n\n return x\n\ndef middleFlow(x):\n \"\"\" Create the middle flow section\n x : input tensor into section\n \"\"\"\n # Create 8 residual blocks, each with 728 filters\n for _ in range(8):\n x = residual_block_middle(x, ??)\n return x\n\ndef exitFlow(x):\n \"\"\" Create the exit flow section\n x : input tensor into section\n \"\"\"\n def classifier(x):\n \"\"\" The output classifier\n x : input tensor\n \"\"\"\n # Global Average Pooling will flatten the 10x10 feature maps into 1D\n # feature maps\n x = layers.??()(x)\n # Fully connected output layer (classification)\n x = layers.Dense(1000, activation='softmax')(x)\n return x\n\n shortcut = x\n\n # First Depthwise Separable Convolution\n x = layers.SeparableConv2D(728, (3, 3), padding='same')(x)\n x = layers.BatchNormalization()(x)\n\n # Second Depthwise Separable Convolution\n x = layers.SeparableConv2D(1024, (3, 3), padding='same')(x)\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x)\n\n # Create pooled feature maps, reduce size by 75%\n x = layers.MaxPooling2D((3, 3), strides=(2, 2), padding='same')(x)\n\n # Add strided convolution to identity link to double number of filters to\n # match output of residual block for the add operation\n shortcut = layers.Conv2D(1024, (1, 1), strides=(2, 2),\n padding='same')(shortcut)\n shortcut = layers.BatchNormalization()(shortcut)\n\n x = layers.add([x, shortcut])\n\n # Third Depthwise Separable Convolution\n x = layers.SeparableConv2D(1556, (3, 3), padding='same')(x)\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x)\n\n # Fourth Depthwise Separable Convolution\n x = layers.SeparableConv2D(2048, (3, 3), padding='same')(x)\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x)\n\n # Create classifier section\n x = classifier(x)\n\n return x\n\ndef residual_block_entry(x, nb_filters):\n \"\"\" Create a residual block using Depthwise Separable Convolutions\n x : input into residual block\n nb_filters: number of filters\n \"\"\"\n shortcut = x\n\n # First Depthwise Separable Convolution\n x = layers.SeparableConv2D(nb_filters, (3, 3), padding='same')(x)\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x)\n\n # Second depthwise Separable Convolution\n x = layers.SeparableConv2D(nb_filters, (3, 3), padding='same')(x)\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x)\n\n # Create pooled feature maps, reduce size by 75%\n x = layers.MaxPooling2D((3, 3), strides=(2, 2), padding='same')(x)\n\n # Add strided convolution to identity link to double number of filters to\n # match output of residual block for the add operation\n # HINT: this is the identity branch, so what should be the input?\n shortcut = layers.Conv2D(nb_filters, (1, 1), strides=(2, 2),\n padding='same')(??)\n shortcut = layers.BatchNormalization()(shortcut)\n\n x = layers.add([x, shortcut])\n\n return x\n\ndef residual_block_middle(x, nb_filters):\n \"\"\" Create a residual block using Depthwise Separable Convolutions\n x : input into residual block\n nb_filters: number of filters\n \"\"\"\n # Remember to save the input for the identity link\n # HINT: it's in the params!\n shortcut = ??\n\n # First Depthwise Separable Convolution\n x = layers.SeparableConv2D(nb_filters, (3, 3), padding='same')(x)\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x)\n\n # Second depthwise Separable Convolution\n x = layers.SeparableConv2D(nb_filters, (3, 3), padding='same')(x)\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x)\n\n # Third depthwise Separable Convolution\n x = layers.SeparableConv2D(nb_filters, (3, 3), padding='same')(x)\n x = layers.BatchNormalization()(x)\n x = layers.ReLU()(x) \n \n x = layers.add([x, shortcut])\n return x\n\ninputs = Input(shape=(299, 299, 3))\n\n# Create entry section\nx = entryFlow(inputs)\n# Create the middle section\nx = middleFlow(x)\n# Create the exit section\noutputs = exitFlow(x)\n\nmodel = Model(inputs, outputs)", "_____no_output_____" ] ], [ [ "### Verify the model architecture using summary method\n\nIt should look (end) like below:\n\n```\nglobal_average_pooling2d_1 (Glo (None, 2048) 0 re_lu_37[0][0] \n__________________________________________________________________________________________________\ndense_1 (Dense) (None, 1000) 2049000 global_average_pooling2d_1[0][0] \n==================================================================================================\nTotal params: 22,981,736\nTrainable params: 22,927,232\nNon-trainable params: 54,504\n```", "_____no_output_____" ] ], [ [ "model.summary()", "_____no_output_____" ] ], [ [ "## End of Code Lab", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
cb5489b2396f05b30407a2183b34bf2fac29476d
8,802
ipynb
Jupyter Notebook
data_preparation.ipynb
kurosouza/learnware
7f19206fefa130ba0d454f67a26c2b0de73ccb4f
[ "MIT" ]
1
2019-11-10T03:54:48.000Z
2019-11-10T03:54:48.000Z
data_preparation.ipynb
kurosouza/learnware
7f19206fefa130ba0d454f67a26c2b0de73ccb4f
[ "MIT" ]
null
null
null
data_preparation.ipynb
kurosouza/learnware
7f19206fefa130ba0d454f67a26c2b0de73ccb4f
[ "MIT" ]
1
2019-10-15T01:13:25.000Z
2019-10-15T01:13:25.000Z
27.85443
98
0.388434
[ [ [ "import matplotlib.pyplot as plt\nimport pandas as pd\nimport seaborn as sns", "_____no_output_____" ], [ "import random", "_____no_output_____" ], [ "filename = 'data/unpacked/studentVle.csv'", "_____no_output_____" ], [ "dframe = pd.read_csv(filename, header=0, skiprows=lambda i: i > 0 and random.random() > 0.1)", "_____no_output_____" ], [ "dframe.describe()", "_____no_output_____" ], [ "dframe.head()", "_____no_output_____" ], [ "dframe.info()", "<class 'pandas.core.frame.DataFrame'>\nRangeIndex: 1066498 entries, 0 to 1066497\nData columns (total 6 columns):\ncode_module 1066498 non-null object\ncode_presentation 1066498 non-null object\nid_student 1066498 non-null int64\nid_site 1066498 non-null int64\ndate 1066498 non-null int64\nsum_click 1066498 non-null int64\ndtypes: int64(4), object(2)\nmemory usage: 48.8+ MB\n" ], [ "dframe.to_csv('data/unpacked/studentVle2.csv')", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cb548fa5904017cb3968ec23a33b67954c0cee7d
102,282
ipynb
Jupyter Notebook
sphinx/r-intro/source/hypothesis.ipynb
oneoffcoder/books
84619477294a3e37e0d7538adf819113c9e8dcb8
[ "CC-BY-4.0" ]
26
2020-05-05T08:07:43.000Z
2022-02-12T03:28:15.000Z
sphinx/r-intro/source/hypothesis.ipynb
oneoffcoder/books
84619477294a3e37e0d7538adf819113c9e8dcb8
[ "CC-BY-4.0" ]
19
2021-03-10T00:33:51.000Z
2022-03-02T13:04:32.000Z
sphinx/r-intro/source/hypothesis.ipynb
oneoffcoder/books
84619477294a3e37e0d7538adf819113c9e8dcb8
[ "CC-BY-4.0" ]
2
2022-01-09T16:48:21.000Z
2022-02-19T17:06:50.000Z
103.315152
14,722
0.847119
[ [ [ "# Hypothesis Testing", "_____no_output_____" ] ], [ [ "set.seed(37)", "_____no_output_____" ] ], [ [ "## Student's t-test\n\nThe `Student's t-test` compares the means of two samples to see if they are different. Here is a `two-sided` Student's t-test.", "_____no_output_____" ] ], [ [ "x <- rnorm(1000, mean=0, sd=1)\ny <- rnorm(1000, mean=1, sd=1)\n\nr <- t.test(x, y, alternative='two.sided')\nprint(r)", "\n\tWelch Two Sample t-test\n\ndata: x and y\nt = -23.159, df = 1998, p-value < 2.2e-16\nalternative hypothesis: true difference in means is not equal to 0\n95 percent confidence interval:\n -1.1425178 -0.9641235\nsample estimates:\n mean of x mean of y \n-0.01839959 1.03492108 \n\n" ] ], [ [ "Here is a directional Student's t-test to see if the mean of `x` is greater than the mean of `y`.", "_____no_output_____" ] ], [ [ "x <- rnorm(1000, mean=0, sd=1)\ny <- rnorm(1000, mean=1, sd=1)\n\nr <- t.test(x, y, alternative='greater')\nprint(r)", "\n\tWelch Two Sample t-test\n\ndata: x and y\nt = -22.576, df = 1991.2, p-value = 1\nalternative hypothesis: true difference in means is greater than 0\n95 percent confidence interval:\n -1.118479 Inf\nsample estimates:\n mean of x mean of y \n0.01325957 1.05574987 \n\n" ] ], [ [ "Here is a directional Student's t-test to see if the mean of `x` is less than the mean of `y`.", "_____no_output_____" ] ], [ [ "x <- rnorm(1000, mean=0, sd=1)\ny <- rnorm(1000, mean=1, sd=1)\n\nr <- t.test(x, y, alternative='less')\nprint(r)", "\n\tWelch Two Sample t-test\n\ndata: x and y\nt = -22.097, df = 1996.7, p-value < 2.2e-16\nalternative hypothesis: true difference in means is less than 0\n95 percent confidence interval:\n -Inf -0.9224035\nsample estimates:\n mean of x mean of y \n0.01069279 1.00731729 \n\n" ] ], [ [ "We may also perform a `one-sample` Student's t-test.", "_____no_output_____" ] ], [ [ "x <- rnorm(1000, mean=0, sd=1)\n\nr <- t.test(x, mu=5)\nprint(r)", "\n\tOne Sample t-test\n\ndata: x\nt = -159.87, df = 999, p-value < 2.2e-16\nalternative hypothesis: true mean is not equal to 5\n95 percent confidence interval:\n -0.13452024 -0.01000024\nsample estimates:\n mean of x \n-0.07226024 \n\n" ] ], [ [ "If your data is in long format, you may use a formula to perform a Student's t-test.", "_____no_output_____" ] ], [ [ "data <- data.frame(\n score = c(90, 89, 70, 99, 100, 77, 80, 67, 70),\n gender = c(rep('girl', 5), rep('boy', 4))\n)\n\nr <- t.test(score ~ gender, data=data)\nprint(r)", "\n\tWelch Two Sample t-test\n\ndata: score by gender\nt = -2.6069, df = 6.0971, p-value = 0.0397\nalternative hypothesis: true difference in means is not equal to 0\n95 percent confidence interval:\n -31.15404 -1.04596\nsample estimates:\n mean in group boy mean in group girl \n 73.5 89.6 \n\n" ] ], [ [ "## Wilcoxon U-Test\n\nThe `Wilcoxon U-Test` is non-parametric test used to compare two samples. The function `wilcox.text` behaves the same way as the `t.test` function.", "_____no_output_____" ] ], [ [ "x <- rnorm(1000, mean=0, sd=1)\ny <- rnorm(1000, mean=0.5, sd=1)\n\nr <- wilcox.test(x, y)\nprint(r)", "\n\tWilcoxon rank sum test with continuity correction\n\ndata: x and y\nW = 339274, p-value < 2.2e-16\nalternative hypothesis: true location shift is not equal to 0\n\n" ] ], [ [ "## Correlation\n\nMay also compute correlation and test the it as well.", "_____no_output_____" ] ], [ [ "x <- seq(1, 1000)\ny <- x * 2 + rnorm(1000, mean=5, sd=5)\n\nc <- cor(x, y)\nprint(c)", "[1] 0.9999633\n" ] ], [ [ "We compute the covariance with the `cov` function.`", "_____no_output_____" ] ], [ [ "x <- seq(1, 1000)\ny <- x * 2 + rnorm(1000, mean=5, sd=5)\n\nc <- cov(x, y)\nprint(c)", "[1] 166818.4\n" ] ], [ [ "We compute the significance with `cor.test`.", "_____no_output_____" ] ], [ [ "x <- seq(1, 1000)\ny <- x * 2 + rnorm(1000, mean=5, sd=5)\n\nr <- cor.test(x, y)\nprint(r)", "\n\tPearson's product-moment correlation\n\ndata: x and y\nt = 3806.6, df = 998, p-value < 2.2e-16\nalternative hypothesis: true correlation is not equal to 0\n95 percent confidence interval:\n 0.9999610 0.9999696\nsample estimates:\n cor \n0.9999656 \n\n" ] ], [ [ "## Chi-squared test\n\nA `Chi-squared` test is used to test for association with contigency tables.", "_____no_output_____" ] ], [ [ "df <- data.frame(\n rural = c(10, 15, 12),\n urban = c(20, 30, 25),\n row.names=c('DC', 'MD', 'VA')\n)\n\nr <- chisq.test(df)\nprint(r)", "\n\tPearson's Chi-squared test\n\ndata: df\nX-squared = 0.0090902, df = 2, p-value = 0.9955\n\n" ] ], [ [ "A `goodness of fit` test using the `Chi-squared test` is performed as follows.", "_____no_output_____" ] ], [ [ "df <- data.frame(\n rural = c(10, 15, 12),\n urban = c(20, 30, 25),\n row.names=c('DC', 'MD', 'VA')\n)\n\nr <- chisq.test(df$rural, p=df$urban, rescale.p=TRUE)\nprint(r)", "\n\tChi-squared test for given probabilities\n\ndata: df$rural\nX-squared = 0.013514, df = 2, p-value = 0.9933\n\n" ] ], [ [ "## Analysis of variance\n\n### One-way analysis of variance\nA one-way `analysis of variance` (`AOV`) may be conducted as follows.", "_____no_output_____" ] ], [ [ "library(tidyr)\n\ndf <- data.frame(\n city = c('A', 'B', 'C', 'D', 'E'),\n urban = c(20, 25, 22, 24, 21),\n rural = c(10, 15, 12, 14, 11),\n suburb = c(15, 18, 19, 20, 17)\n)\n\ndf <- df %>% pivot_longer(-city, names_to='location', values_to='expense')\nr <- aov(expense ~ location, data=df)\nprint(r)\nprint('-- summary below --')\nprint(summary(r))", "Call:\n aov(formula = expense ~ location, data = df)\n\nTerms:\n location Residuals\nSum of Squares 250.5333 49.2000\nDeg. of Freedom 2 12\n\nResidual standard error: 2.024846\nEstimated effects may be unbalanced\n[1] \"-- summary below --\"\n Df Sum Sq Mean Sq F value Pr(>F) \nlocation 2 250.5 125.3 30.55 1.96e-05 ***\nResiduals 12 49.2 4.1 \n---\nSignif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1\n" ] ], [ [ "#### Post-hoc test\n\nWe apply `Tukey's Honestly Significant Difference` (`HSD`) test to see which pairs differ.", "_____no_output_____" ] ], [ [ "t <- TukeyHSD(r)\nprint(t)", " Tukey multiple comparisons of means\n 95% family-wise confidence level\n\nFit: aov(formula = expense ~ location, data = df)\n\n$location\n diff lwr upr p adj\nsuburb-rural 5.4 1.983468 8.816532 0.0031673\nurban-rural 10.0 6.583468 13.416532 0.0000133\nurban-suburb 4.6 1.183468 8.016532 0.0095794\n\n" ] ], [ [ "#### Obtaining the effects", "_____no_output_____" ] ], [ [ "e <- model.tables(r, type='effects')\nprint(e)", "Tables of effects\n\n location \nlocation\n rural suburb urban \n-5.133 0.267 4.867 \n" ] ], [ [ "#### Obtaining the means", "_____no_output_____" ] ], [ [ "m <- model.tables(r, type='means')\nprint(m)", "Tables of means\nGrand mean\n \n17.53333 \n\n location \nlocation\n rural suburb urban \n 12.4 17.8 22.4 \n" ] ], [ [ "#### Visualizing the means", "_____no_output_____" ] ], [ [ "options(repr.plot.width=4, repr.plot.height=4)\n\nboxplot(expense ~ location, data=df)", "_____no_output_____" ] ], [ [ "#### Visualizing the differences", "_____no_output_____" ] ], [ [ "options(repr.plot.width=5, repr.plot.height=3)\n\nop = par(mar = c(5, 8, 4, 2))\nplot(t, cex=0.2, las=1)\npar(op)", "_____no_output_____" ] ], [ [ "### Two-way ANOVA", "_____no_output_____" ] ], [ [ "suppressMessages({\n library('dplyr')\n})\n\nN = 5\na <- 5 + 20 * rnorm(N, mean=20, sd=1) + 4 * rnorm(N, mean=4, sd=1) # urban-high\nb <- 5 + 18 * rnorm(N, mean=18, sd=1) + 2 * rnorm(N, mean=2, sd=1) # urban-low\nc <- 5 + 10 * rnorm(N, mean=10, sd=1) + 4 * rnorm(N, mean=4, sd=1) # suburban-high\nd <- 5 + 8 * rnorm(N, mean=8, sd=1) + 2 * rnorm(N, mean=2, sd=1) # suburban-low\ne <- 5 + 5 * rnorm(N, mean=5, sd=1) + 4 * rnorm(N, mean=4, sd=1) # rural-high\nf <- 5 + 3 * rnorm(N, mean=3, sd=1) + 2 * rnorm(N, mean=2, sd=1) # rural-low\n\ndf <- data.frame(\n expense=c(a, b, c, d, e, f), \n location=c(rep('urban', 2*N), rep('suburban', 2*N), rep('rural', 2*N)), \n income=c(rep('high', N), rep('low', N), rep('high', N), rep('low', N), rep('high', N), rep('low', N)),\n stringsAsFactors=TRUE\n)", "_____no_output_____" ], [ "r <- aov(expense ~ location * income, data=df)\n\nprint(r)\nprint('-- summary below --')\nprint(summary(r))", "Call:\n aov(formula = expense ~ location * income, data = df)\n\nTerms:\n location income location:income Residuals\nSum of Squares 687822.6 24346.4 4833.1 7098.1\nDeg. of Freedom 2 1 2 24\n\nResidual standard error: 17.19753\nEstimated effects may be unbalanced\n[1] \"-- summary below --\"\n Df Sum Sq Mean Sq F value Pr(>F) \nlocation 2 687823 343911 1162.825 < 2e-16 ***\nincome 1 24346 24346 82.319 3.17e-09 ***\nlocation:income 2 4833 2417 8.171 0.00197 ** \nResiduals 24 7098 296 \n---\nSignif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1\n" ] ], [ [ "#### Two-Way ANOVA post-hoc", "_____no_output_____" ] ], [ [ "t <- TukeyHSD(r)\nprint(t)", " Tukey multiple comparisons of means\n 95% family-wise confidence level\n\nFit: aov(formula = expense ~ location * income, data = df)\n\n$location\n diff lwr upr p adj\nsuburban-rural 60.83863 41.63207 80.04519 1e-07\nurban-rural 347.27478 328.06822 366.48134 0e+00\nurban-suburban 286.43615 267.22959 305.64271 0e+00\n\n$income\n diff lwr upr p adj\nlow-high -56.97529 -69.93585 -44.01473 0\n\n$`location:income`\n diff lwr upr p adj\nsuburban:high-rural:high 69.64827 36.01835 103.278196 0.0000173\nurban:high-rural:high 377.50133 343.87140 411.131254 0.0000000\nrural:low-rural:high -30.95116 -64.58109 2.678762 0.0837968\nsuburban:low-rural:high 21.07783 -12.55210 54.707751 0.4049037\nurban:low-rural:high 286.09707 252.46714 319.726994 0.0000000\nurban:high-suburban:high 307.85306 274.22313 341.482983 0.0000000\nrural:low-suburban:high -100.59943 -134.22936 -66.969509 0.0000000\nsuburban:low-suburban:high -48.57045 -82.20037 -14.940521 0.0019940\nurban:low-suburban:high 216.44880 182.81887 250.078723 0.0000000\nrural:low-urban:high -408.45249 -442.08242 -374.822567 0.0000000\nsuburban:low-urban:high -356.42350 -390.05343 -322.793579 0.0000000\nurban:low-urban:high -91.40426 -125.03418 -57.774335 0.0000002\nsuburban:low-rural:low 52.02899 18.39906 85.658913 0.0009103\nurban:low-rural:low 317.04823 283.41831 350.678157 0.0000000\nurban:low-suburban:low 265.01924 231.38932 298.649168 0.0000000\n\n" ] ], [ [ "#### Two-Way ANOVA effects", "_____no_output_____" ] ], [ [ "e <- model.tables(r, type='effects')\nprint(e)", "Tables of effects\n\n location \nlocation\n rural suburban urban \n -136.04 -75.20 211.24 \n\n income \nincome\n high low \n 28.488 -28.488 \n\n location:income \n income\nlocation high low \n rural -13.012 13.012\n suburban -4.202 4.202\n urban 17.214 -17.214\n" ] ], [ [ "#### Two-Way ANOVA means", "_____no_output_____" ] ], [ [ "m <- model.tables(r, type='means')\nprint(m)", "Tables of means\nGrand mean\n \n168.0042 \n\n location \nlocation\n rural suburban urban \n 32.0 92.8 379.2 \n\n income \nincome\n high low \n196.49 139.52 \n\n location:income \n income\nlocation high low \n rural 47.4 16.5\n suburban 117.1 68.5\n urban 424.9 333.5\n" ] ], [ [ "#### Two-Way ANOVA means visualization", "_____no_output_____" ] ], [ [ "options(repr.plot.width=5, repr.plot.height=5)\n\nop = par(mar = c(8, 4, 4, 2))\nboxplot(expense ~ location * income, data = df, cex.axis = 0.9, las=2, xlab='')\npar(op)", "_____no_output_____" ] ], [ [ "#### Two-Way ANOVA differences visualization", "_____no_output_____" ] ], [ [ "options(repr.plot.width=5, repr.plot.height=3)\n\nop = par(mar = c(5, 14, 4, 2))\nplot(t, cex=0.2, las=1)\npar(op)", "_____no_output_____" ] ], [ [ "#### Two-Way ANOVA interaction plot", "_____no_output_____" ] ], [ [ "options(repr.plot.width=5, repr.plot.height=5)\n\nattach(df)\ninteraction.plot(location, income, expense)\ndetach(df)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
cb549256182121417e42a60efe7d3c4ea7227ee7
8,412
ipynb
Jupyter Notebook
.ipynb_checkpoints/ER_test_local_self-checkpoint.ipynb
danhtaihoang/expectation-reflection
ae89c77da1e47ffc0ea09fb2e919d29308998b95
[ "MIT" ]
null
null
null
.ipynb_checkpoints/ER_test_local_self-checkpoint.ipynb
danhtaihoang/expectation-reflection
ae89c77da1e47ffc0ea09fb2e919d29308998b95
[ "MIT" ]
null
null
null
.ipynb_checkpoints/ER_test_local_self-checkpoint.ipynb
danhtaihoang/expectation-reflection
ae89c77da1e47ffc0ea09fb2e919d29308998b95
[ "MIT" ]
null
null
null
29.515789
94
0.417261
[ [ [ "## Expectation Reflection\n\nTest ER package.", "_____no_output_____" ] ], [ [ "import numpy as np\nimport pandas as pd\n\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.utils import shuffle\nfrom sklearn.metrics import accuracy_score,precision_score,recall_score,roc_curve,auc\nfrom sklearn.preprocessing import MinMaxScaler\n\n# pypi:\n#from expectation_reflection import classification as ER\n#from expectation_reflection import function\n\n# local:\nfrom expectation_reflection_local import classification as ER\nfrom expectation_reflection_local import function", "_____no_output_____" ], [ "np.random.seed(1)", "_____no_output_____" ], [ "Xy = np.loadtxt('kidney.dat') \nX = Xy[:,:-1]\ny = Xy[:,-1]", "_____no_output_____" ], [ "print(y[:4])", "[-1. -1. -1. -1.]\n" ], [ "y1 = np.full(len(y),'Yes')\n\ny1[y== -1] = 'No'", "_____no_output_____" ], [ "y = y1", "_____no_output_____" ], [ "# 2020.07.15: convert y from {-1,+1} to {0,1}:\n#y = (Xy[:,-1]+1)/2. ", "_____no_output_____" ], [ "X,y = function.make_data_balance(X,y)\nprint(np.unique(y,return_counts=True))\n\nX, y = shuffle(X, y, random_state=1)\n\nX_train,X_test,y_train,y_test = train_test_split(X,y,test_size=0.5,random_state = 1)\n\nsc = MinMaxScaler()\nX_train = sc.fit_transform(X_train)\nX_test = sc.transform(X_test)", "(array(['No', 'Yes'], dtype='<U3'), array([149, 149]))\n" ], [ "y", "_____no_output_____" ], [ "model = ER.model(max_iter=200,regu=0.1,random_state=2)\nmodel.fit(X_train, y_train)", "_____no_output_____" ], [ "# performance:\ny_test_pred = model.predict(X_test)\nacc = accuracy_score(y_test,y_test_pred)\nprint(acc)", "0.9395973154362416\n" ], [ "y_test_pred", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cb549268880af4b27da73c8fd2f2ae85d467c9f9
4,279
ipynb
Jupyter Notebook
chapter10/section10.6_portmanteau_test_demo.ipynb
yestolife/python_quant
f85f3c8c25fbddc1b2dfcad8ac75642175d319b8
[ "MIT" ]
1
2022-02-26T11:49:30.000Z
2022-02-26T11:49:30.000Z
chapter10/section10.6_portmanteau_test_demo.ipynb
yestolife/python_quant
f85f3c8c25fbddc1b2dfcad8ac75642175d319b8
[ "MIT" ]
null
null
null
chapter10/section10.6_portmanteau_test_demo.ipynb
yestolife/python_quant
f85f3c8c25fbddc1b2dfcad8ac75642175d319b8
[ "MIT" ]
2
2021-05-18T02:17:42.000Z
2021-05-22T09:37:38.000Z
49.183908
1,769
0.49591
[ [ [ "import pandas as pd \nimport numpy as np \nfrom scipy import stats \nimport statsmodels.api as sm \nimport tushare as ts \n\ndf = ts.get_k_data('399300', index=True, start = '2016-01-01', end = '2016-12-31')\n\ndf['rtn'] = np.log(df['close']) - np.log(df['close'].shift(1))\ndf = df.dropna()\n\nm = 10\nacf, q, p = sm.tsa.acf(df['rtn'], nlags = m, qstat=True)\nout = np.c_[range(1, 11), acf[1:], q, p]\noutput = pd.DataFrame(out, columns=['lag', 'AC', 'Q', 'P-value'])\noutput = output.set_index('lag')\noutput\n", "本接口即将停止更新,请尽快使用Pro版接口:https://waditu.com/document/2\nD:\\Users\\Andy\\anaconda3\\lib\\site-packages\\statsmodels\\tsa\\stattools.py:662: FutureWarning: fft=True will become the default after the release of the 0.12 release of statsmodels. To suppress this warning, explicitly set fft=False.\n warnings.warn(\n" ] ] ]
[ "code" ]
[ [ "code" ] ]
cb549a083b56b7af7bb994a6f7b8e31239874d8c
171,477
ipynb
Jupyter Notebook
ProjektSi_2021.ipynb
michalwilk123/nlp-transformer-app
fd10e0b3120607085b1e46062bcf2c8e08d4c9d8
[ "MIT" ]
null
null
null
ProjektSi_2021.ipynb
michalwilk123/nlp-transformer-app
fd10e0b3120607085b1e46062bcf2c8e08d4c9d8
[ "MIT" ]
null
null
null
ProjektSi_2021.ipynb
michalwilk123/nlp-transformer-app
fd10e0b3120607085b1e46062bcf2c8e08d4c9d8
[ "MIT" ]
null
null
null
38.080613
1,217
0.517865
[ [ [ "<a href=\"https://colab.research.google.com/github/michalwilk123/nlp-transformer-app-pl/blob/master/ProjektSi_2021.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ], [ "# **Transformacja liniowa nastroju skończonego tekstu**\n## Projekt: Sztuczna Inteligencja 2021\n---\n\n</br>\n\n#### Michał Wilk 180333\n#### Radosław Baziak 180197", "_____no_output_____" ], [ "# __Spis Treści__\n---\n\n<br/>\n\n#### __Teoria:__\n* [Treść zadania](#TrescZadania)\n* [Modele Natural Language Processing](#ModeleNaturalLanguageProcessing)\n* [Model Transformer](#ModelTransformer)\n* [Wykorzystane biblioteki / narzędzia](#WykorzystaneBiblioteki)\n<br/>\n\n#### __Praktyka:__\n* [__Aplikacja__](#Aplikacja)\n<br/>\n\n#### __Podsumowanie:__\n* [Ocena aplikacji](#OcenaAplikacji)\n* [Ocena projektu](#OcenaProjektu)\n* [Przyszłość modeli transformer](#PrzyszloscModeliTransformer)\n* [Bibliografia](#Bibliografia)", "_____no_output_____" ], [ "# Teoria", "_____no_output_____" ], [ "## <a name=\"TrescZadania\"></a>Treść zadania\nPochylamy się nad takimi zagadnieniami jak:\n* rozpoznawanie tekstu\n* określenie polaryzacji tekstu (Analiza nastroju tekstu/Sentiment analysis)\n* generowanie tekstu na podstawie jego kontekstu\n\n<br/>\nRezultatem naszej pracy jest stworzenie metody(aplikacji), która będzie w stanie zmienić nastrój tekstu (czyli jego polaryzację) w sposób liniowy. W ten sposób sprawdzimy, czy najnowsze ogólnie dostępne modele przetwarzania języka są w stanie rozwiązać wydawałoby się bardzo złożony problem.", "_____no_output_____" ], [ "## <a name=\"ModeleNaturalLanguageProcessing\"></a>__Modele Natural Language Processing__\n\n\n", "_____no_output_____" ], [ "#### **O dziedzinie Natural Language Processing**\n---\n</br>\n\nPrzetwarzanie języka naturalnego jest bardzo dynamicznie rozwijającą\nsię dziedziną. W ciągu ostatnich kilkudziesięciu lat jesteśmy świadkami \nznacznego rozwoju tej gałęzi nauki.\n\nTypową problematyką tego działu sztucznej inteligencji jest próba nauczenia maszyny interpretacji tekstu. \n\nPraktycznymi zastosowaniami tej gałęzi sztucznej inteligencji są m.in. [[12]](https://arxiv.org/abs/1908.09203):\n* tłumaczenie tekstu\n* generowania tekstu podobnego do innego tekstu\n* chatboty\n* określanie prawdziwości tekstu\n* określanie nastroju tekstu\n* podpowiedzi wyrazów w programach do pisania oprogramowania (IDE), np. IntelliSense", "_____no_output_____" ], [ "Często te modele opierają swoje działania na podobnych zasadach, a ich funkcjonowanie można zredukować do kilku etapów:\n\n1) __Tokenizowanie tekstu__ - czyli zamiana tekstu w formie tekstowej, np: \"Informatyka\" na formę dyskretną np. ciąg bajtowy: _0101_\n\n2) __Encoding__- enkodowanie ciągu tokenów na obiekt dyskretny, np. można zdanie w formie tokenów zamienić na graf czy drzewo zależności w tekście\n\n3) __Decoding__ - kiedy jesteśmy w posiadaniu struktury zdania możemy (zazwyczaj!) poprosić naszą strukturę o dodanie naszego słowa do struktury i dzięki temu wygenerować tekst, który został stworzony na podstawie poprzednich wejść.\n\n4) __Tłumaczenie obiektu zdania na faktyczny tekst__ - w tym kroku tworzymy tekst i dobieramy najbardziej prawdopodobne słowo.\n\nJest sporo podejść jeżeli chodzi o implementację tego typu modeli, które postaramy się wyszczególnić poniżej.", "_____no_output_____" ], [ "#### __Przykłady modeli przetwarzających język:__\n---\n</br>\n\n__Ukryte modele Markowa__ - jest to statystyczny model Markowa, który zawiera łańcuch Markowa, w którym część danych nie jest jawna. Predykcja następnego stanu bazowana jest wyłącznie na aktualnym stanie. \nW ten sposób stworzony model \"pamięta\" 1 słowo wstecz. Dodanie kolejnych słów do zapamiętania znacząco zwiększa złożoność pamięciową tego algorytmu.\n\n___n___ - _ilość tokenów w słowniku_\n\n___m___ - _pamięć modelu, czyli ile słów jest wstecz jest zapamiętywanych_\n\n___ZP___ - _złożoność pamięciowa_\n\n$$ ZP(n, m) = n^m $$\n\n> Mimo swojego podeszłego wieku model jest wciąż szeroko wykorzystywany.\nW porównaniu do innych podanych modeli, ten generuje nowy tekst bardzo\nwydajnie. Jednym z zastosowań tego modelu jest podpowiadanie kolejnych słów\nw niektórych telefonach komórkowych.\n\n__Long short-term memory__ - jest to rekurencyjny model analizy danych, który określając aktualny stan posiada informacje o poprzednich stanach. W skład budowy LSTM wchodzą bramki, przez które przechodzą dane z wcześniejszych outputów oraz nowy input. Mają one na celu określić czy dane informacje należy zapomnieć, zaktualizować bądź zachować. Dopiero tak przefiltrowane informacje trafiają do dalszej analizy. Taka budowa pozwala na przywiązywanie większej wagi do częściej pojawiających się danych i szybko zapominać o nieregularnych wariacjach, w wyniku czego system ten bardzo dobrze radzi sobie z szumami w danych.\n\n</br>\n\n__Transformer__ - jest to model opierający się o mechanizm skupienia, w przeciwieństwie do poprzednich modeli transformery pozwalają na niesekwencyjną analizę danych wejściowych, gdyż skupiają się na ogólnym kontekście. Podczas zwracania danych model zachowuje je jako dane kontekstowe do uzyskania lepszych wyników generując pozostały output.", "_____no_output_____" ], [ "#### **Rozwój sposobów reprezentacji wyrazów w pamięci komputera**\n\nJak wspomnieliśmy wyżej, w poprzednich pracach wyrazy często były reprezentowane jako atomiczne jednostki.\n\nWspółcześnie [[2]](https://arxiv.org/abs/1301.3781), informacje o wyrazach często reprezentowane są jako wektor pewnych cech ustalonych przez inny model.\n\nDzięki temu komputer jest w stanie określić zależności między słowami. Na przykład w ten sposób komputer będzie w stanie określić, że słowo \"Paryż\" ma więcej wspólnego ze słowem \"Marsylia\", niż ze słowem \"ryż\".\n", "_____no_output_____" ], [ "## <a name=\"ModelTransformer\"></a>__Model Transformer__", "_____no_output_____" ], [ "#### __Budowa transformera__\nTransformer składa się z dwóch głównych części enkodera i dekodera, domyślnie jest to po sześć kopii każdego z nich. Enkodery pobierają dane z wejścia i przepuszcza je przez warstwę self-attention, która zwraca uwagę na całość danych wejściowych i na ich podstawie określa wagi wyrazów i ich powiązania między sobą. Następnie te informacje są przekazywane do Feed Forward Neural Network, która na podstawie dostarczonych danych dokonuje predykcji outputu. Dekoder również zawiera te dwie warstwy, ale między nimi posiada warstwę Encoder-Decoder Attention, która pomaga dekoderowi skupić się na ważniejszych częściach inputu. Więcej o pracy transformera można znaleźć w pracy [[1]](https://arxiv.org/abs/1706.03762).", "_____no_output_____" ], [ "#### Rozwój modeli __Transformer__\n---\n__Podział modeli ze względu na wykonywane zadania:__\n* __Translacja tekstu:__\n * T5\n* __Czatboty:__\n * LaMDA\n * Meena\n* __Streszczenia tekstu:__\n * XSUM\n * BigBird\n* __Modele o wielu zastosowaniach:__\n * GPT\n * BERT", "_____no_output_____" ], [ "### __Model BERT__\n__Bert__ - (Bidirectional Encoder Representations from Transformers) model stworzony przez Google. Wprowadził inny sposób analizy tekstu. W przeciwieństwie do swoich poprzedników, którzy analizowali tekst w jednym kierunku (lewo->prawo lub prawo->lewo), model Bert analizuje tekst zarówno od lewej do prawej, jak i od prawej do lewej.\nJest to istotna różnica w porównaniu do innych modeli.\n\nBERT zdecydowanie lepiej radzi sobie ze zrozumieniem kontekstu całego tekstu.\nNa przykład w pracy naukowej [[3]](https://arxiv.org/abs/1810.04805) podany został przykład problemu odpowiadania na zadane pytanie. Bardzo często, tekst w pytaniu może się odnosić do przyszłych wyrazów w zdaniu więc takie rozumienie 'dwukierunkowe' może dawać znacznie lepsze rezultaty.\n\nWadą tego typu modeli jest to, że BERT może operować na jedynie z góry \nokreślonej długości danych wejściowych, ponieważ model ten jest uczony na ustalonym zakresie słów (na przykład nasze modele były uczone w zakresie 512 tokenów). Dla tekstu, który nie wykorzystuje wszystkich miejsc na tokeny, dodawane są pod koniec tokeny specjalne: [PAD], które oznaczają niewykorzystane miejsce na słowo.\n", "_____no_output_____" ], [ "### __Model GPT-2__\n**GPT** [[11]](https://d4mucfpksywv.cloudfront.net/better-language-models/language_models_are_unsupervised_multitask_learners.pdf)\n[[12]](https://arxiv.org/abs/1908.09203) - (Generative Pre-trained Transformer) model szerzej opisany w pracach stworzony przez OpenAI, w przeciwieństwie do Bert'a GTP korzysta z dekoderów, a nie enkoderów transformatora. GPT, tak jak wiele innych tradycyjnych modeli podaje na output sekwencyjnie po jednym wyrazie. Działanie owego modelu zawiera proces tzw. auto-regresji, czyli po każdym wygenerowanym słowie trafia ono również do danych wejściowych używanych do generacji kolejnych słów. Podczas pracy dekodera zwracana jest uwaga tylko na słowa leżące na lewo od tokenu, a nie całe jego otoczenie. Pozostałe aspekty pracy modelu GPT są podobne do modelu Bert.\n<br>\nModel GPT-2 posiada 1.5 miliarda parametrów i został przetrenowany na 40GB danych tekstowych ze stron internetowych. Głównym zadaniem GPT-2 była predykcja następnego wyrazu w zdaniu oraz Zero Shot Learning, czyli oczekiwanie na wypełnienie przez model zdania na podstawie instrukcji, np. dla podanego inputu \"To jest polskie zdanie. ENGLISH: ___ \" model miał sam zrozumieć zadanie i przetłumaczyć tekst.\n", "_____no_output_____" ], [ "### __Model laMDA__\n**laMDA** [[7]](https://arxiv.org/abs/2102.08602v1).- (Language Model for Dialogue Applications) stworzony przez Google jeden z nowszych (18 maj 2021) modeli korzystających z architektury transformerów, który działa na podobnej zasadzie co Bert'a, czy GPT-2. Wprowadza za to bardzo dużą różnicę w warstwie self-attention. Ze względu na przeznaczenie tego modelu do prowadzenia długich konwersacji, które często są w stanie zmieniać temat wypowiedzi trzeba znaleźć rozwiązanie dla problemu rozpoznawania wejścia i kontekstu zdań. Aby zoptymalizować pracę modelu wprowadzona została nowa warstwa 'lambda', zastępująca warstwę self-attention. Dane na wejściu przekazywane są do tejże warstwy. Ta analizuje dane słowo razem z jego kontekstem i na tej podstawie formułuje funkcję liniową lambda i nakłada ją na każdy input. Powoduje to uwzględnienie zarówno zawartości wyrazu, kontekstu i jego pozycji. Rozwiązanie to sprawdza się zarówno w analizie pełnych tekstów, jak i tych zawierających puste pola mask przy niskim zużyciu pamięci nawet przy długich tekstach. W odróżnieniu od innych modeli ze względu na swoje przeznaczenie laMDA trenowany był prawie wyłącznie na zbiorze danych składającym się z dialogów.", "_____no_output_____" ], [ "### **Modyfikacje modelu BERT**\nBert w bardzo szybkim tempie zdominował rynek NLP dzięki swojej wydajności i jakości rezultatów. Mimo to pojawiły się modele usprawniające go pod różnymi względami jak na przykład szybkość przetwarzania danych, czy poprawność wyników.\n\n\n**RoBERTa** [[8]](https://arxiv.org/abs/1907.11692) przedstawiony przez Facebook'a model jest wersją Bert'a z innym podejściem do jego treningu. RoBERTa nie podejmuje się Next Sentence Prediction, a zamiast tego przedstawia dynamiczne tokeny mask, które zmieniają się podczas treningu. Poza tym do treningu wykorzystano dziesięciokrotnie większy zasób danych. Mimo dużo większej mocy obliczeniowej używanej podczas treningu trwa on 4-5 razy dłużej, ale dzięki temu osiągnięto 2-20% trafniejsze wyniki niż tradycyjny Bert.\n\n\n**AlBERT** [[9]](https://arxiv.org/abs/1909.11942) wydany przez Google model mający aż 89% mniej parametrów niż standardowy Bert zachowując znikomą różnicę w poprawności wyników. AlBERT używa dwóch technik do optymalizacji swojej pracy: faktoryzacji danych oraz dzielenia parametrów między warstwami. Ze względu na przetwarzanie danych wyrazy zamienione w reprezentujące je wektory musiały mieć odpowiednio dopasowane wymiary, faktoryzacja pozwoliła na budowanie mniejszych wektorów i skalowanie wyniku. Dzielenie parametrów między warstwami powoduje największy spadek poprawności modelu, ale pozwala niemal ośmiokrotnie zmniejszyć ilość parametrów w modelu. \n\n\n**DistilBERT** [[5]](https://arxiv.org/abs/1910.01108) zaprezentowany przez HuggingFace model, który miał na celu zminimalizować rozmiar Bert'a i poprawić jego wydajność. Posiada on tylko połowę oryginalnych warstw, gdyż przyjmuje inne podejście zwane destylacją \n[[4]](https://arxiv.org/abs/1503.02531v1), która ma na celu przybliżenie wyniku Bert'a. Ogólny pomysł polega na tym, że gdy inna sieć neuronów (Bert) zostanie przetrenowana, jej wyniki można mniej więcej przewidzieć za pomocą innej mniejszej sieci. Jedną z funkcji optymalizacyjnych modelu DistilBERT jest dywergencja Kullbacka-Leiblera, która określa rozbieżność między dwoma rozkładami prawdopodobieństwa. Dzięki ograniczonej złożoności model ten trenuje się czterokrotnie szybciej zachowując 95% poprawności wyników w porównaniu z oryginalnym Bert'em.\n\n---\n\n### **Ciekawostki**\n\n**MegatronBERT** [[10]](https://arxiv.org/abs/1909.08053) - model bardzo niepraktyczny zaprojektowany przez firmę NVIDIA. Posiada 3.9 miliarda parametrów trenowany równolegle na 512 GPU utrzymując 15.1 PetaFlop'ów. Aby skorzystać z modelu potrzebny jest superkomputer, a poprawność wyników zwiększa się o niewiele punktów procentowych.\n\n**herBERT** \n[[13]](https://arxiv.org/abs/2005.00630) - Bert przetrenowany na polskim tekście", "_____no_output_____" ], [ "## <a name=\"WykorzystaneBiblioteki\"></a>__Wykorzystane biblioteki / narzędzia__\n\nDo naszej aplikacji użyliśmy bardzo popularnego frameworka \n[huggingface](https://huggingface.co),\nktóry zaopatrzył nas w gotowy bazowy model distilbert-uncased, gotowe\nfunkcje przygotowujące dane do treningu oraz funkcje przeprowadzające\noperacje _fine-tune_, o których pisaliśmy w części teoretycznej projektu.\n\nWyjściem naszego modelu są tensory z wartościami funkcji logitowej, tak więc\nsam rezultat musieliśmy dodatkowo przerobić. Aby to wykonać użyliśmy\npopularnego frameworka [pytorch](https://pytorch.org/).\n\nNasza aplikacja wymagała również użycia innych bibliotek. Do klasyfikacji\nczęści mowy w wejściowym tekście użyliśmy bardzo popularnej biblioteki do\nzadań z dziedziny NLP: [nltk](https://www.nltk.org/).\n\nInteraktywne widgety zostały wygenerowane za pomocą biblioteki [ipywidgets](https://ipywidgets.readthedocs.io/en/latest/).", "_____no_output_____" ], [ "# Praktyka\n\nNa potrzeby projektu stworzyliśmy 2 modele BERT poddane operacji fine-tune.\n\n[Link do kodu tworzącego modele](https://colab.research.google.com/drive/18EYy2SXvyCEE5WhUC6Y5lM0Rks1wMEp9?usp=sharing)", "_____no_output_____" ], [ "## <a name=\"Aplikacja\"></a>**Aplikacja**\n---\n</br>\n\n### **Rozszerzony opis:**\n\n</br>\n\n#### **Proces tworzenia tej pracy:**\n\nOd początku mieliśmy dosyć niecodzienne podejście jeżeli chodzi o wykonanie\nnaszego projektu. Nie wiedzieliśmy jak się za ten problem zabrać, tak\nwięc błądziliśmy i testowaliśmy wiele różnych modeli z różnymi rezultatami.\nPod koniec okazało się, że odnieśliśmy najlepsze rezultaty z podanym modelem BERT.\n\n#### **Napotkane problemy**\n* **modele są bardzo duże** - aby to rozwiązać modele udostępniliśmy za pomocą platformy hugging space, dzięki której nie musimy się martwić, że nasze modele o łącznym rozmiarze ok. 700 mb przepadną.\n* **modele są bardzo wolne** - rozwiązaliśmy to przez wybranie modelu nieco bardziej nastawionego na wydajność (DistilBERT) niżeli na jakość wyników. Dodatkowo poddajemy wyniki cachowaniu, dzięki czemu użytkownik czeka tylko raz\nna otrzymanie wyniku.\n* **modele zwracają uwagę na nieistotne elementy zdania** - jak zostało to opisane przez inną pracę naukową [[6]](https://arxiv.org/abs/1906.04341). Model BERT najczęściej zwraca uwagę na niestotne z naszego punktu widzenia części zdania \n(głównie na token początkowy i końcowy, ale sporo uwagi jest również zwracana zaimkom i spójnikom).\n\n Dlatego określiliśmy nasze, heurystyczne kryterium doboru słów do zmiany.\nZa pomocą innej biblioteki szukamy słów, które są np. przymiotnikami i je maskujemy. Oczywiście nie jest to optymalna metoda, ale wydaje nam się, że efekt końcowy jest wystarczająco satysfakcjonujący.\n\n* **interaktywność** - ten problem okazał się bardzo prosty do rozwiązania. Odkryliśmy bibliotekę ipywidgets, która pozwala nam na tworzenie podstawowych, interaktywnych widgetów w środowisku juptera.\n", "_____no_output_____" ] ], [ [ "!pip install ipywidgets transformers torch nltk &> /dev/null\n!jupyter nbextension enable --py widgetsnbextension &> /dev/null\nimport nltk\n\nnltk.download('punkt')\nnltk.download('averaged_perceptron_tagger')", "[nltk_data] Downloading package punkt to /root/nltk_data...\n[nltk_data] Unzipping tokenizers/punkt.zip.\n[nltk_data] Downloading package averaged_perceptron_tagger to\n[nltk_data] /root/nltk_data...\n[nltk_data] Unzipping taggers/averaged_perceptron_tagger.zip.\n" ] ], [ [ "### <a name=\"DeklaracjaModeli\"></a>Deklaracja modeli", "_____no_output_____" ] ], [ [ "from transformers import (AutoTokenizer, \n AutoModelForMaskedLM,\n AutoModelForSequenceClassification)\nimport torch\n\ndevice = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')\n\n# model wytrenowany na negatywnie nastawionych danych\nnegative_model = AutoModelForMaskedLM.from_pretrained(\n \"michalwilk123/distilbert-imdb-negative\"\n).to(device)\n# model wytrenowany na pozytywnie nastawionych danych\npositive_model = AutoModelForMaskedLM.from_pretrained(\n \"michalwilk123/distilbert-imdb-positive\"\n).to(device)\ntokenizer = AutoTokenizer.from_pretrained(\n \"michalwilk123/distilbert-imdb-negative\",\n use_fast=True\n)\n# model klasyfikujący nastrój zdania\nclassif_model = AutoModelForSequenceClassification.from_pretrained(\n \"textattack/bert-base-uncased-imdb\"\n).to(device)", "_____no_output_____" ] ], [ [ "### Określenie jakie części mowy nas interesują", "_____no_output_____" ] ], [ [ "import sys\nimport nltk\nfrom enum import Enum\nfrom dataclasses import dataclass\n\n\n# zmieniane wyrazy poukładane wg ich 'ważności'\n# poniższe ciągi liter są nazywane tagami POS\ntoken_order = [\n \"JJ\", # przymiotnik\n \"JJR\", # inny przymiotnik\n \"JJS\", # przymiotnik hiperbola\n \"RB\", # przyimek czyli slowo 'not'\n \"RBS\", # inny przyimek\n \"RBR\", # przyimek informujacy o intesnywności np. \"bardzo\"\n \"PDT\", # inny przymiotnik\n \"RP\", # partykuła\n \"VB\", # czasownik\n \"VBD\", # czasownik czas przeszły\n \"VBG\", # czasownik czas present participle\n \"VBN\", # czasownik czas past participle\n \"VBP\", # czasownik inny\n \"NNS\", # rzeczownik\n \"NNPS\", # rzeczownik plural\n \"VBZ\", # czasownik inny\n \"CC\", # spójnik\n \"PRP\", # zaimek osobowy\n \"IN\",\n]\n\n# w jaki sposób modyfikujemy zdanie\nTRANSFORM_TYPE = Enum(\"TRANSFORM_TYPE\", [\"POSITIVE\", \"NEGATIVE\"])\n\ndef negate_transform(en:Enum):\n return TRANSFORM_TYPE.POSITIVE if en is TRANSFORM_TYPE.NEGATIVE else \\\n TRANSFORM_TYPE.POSITIVE\n\n# reprezentacja wyrazu\n@dataclass\nclass WordStructure:\n idx:int\n word:str\n pos_token:str\n transform_type:Enum\n\n# daj słowa które będą podmieniane, sortując wg ich ważności\ndef get_relevant_tokens(sentence:str, t_type:Enum):\n # podział zdania na kolejne słowa i przydzielenie tagów POS\n tokens = nltk.pos_tag(tokenizer.tokenize(sentence))\n # filtruj z wyrazów które nie mają znaczenia\n tokens = list(filter(\n lambda tt: tt.pos_token in token_order,\n [WordStructure(i, *el, t_type) for i, el in enumerate(tokens)]\n ))\n\n # wewnętrzna struktura wyrazu:\n # (index, słowo, pos_token, czy_zamaskowany)\n # sortujemy wg istotnosci tokenu\n tokens.sort(key=lambda el: token_order.index(el.pos_token))\n\n return tokens", "_____no_output_____" ] ], [ [ "### **Wykorzystanie modeli przetrenowanych w aplikacji**\n---\n</br>\n\n**Zmieniamy kolejne wyrazy w tekście dzięki wymyślonej przez nas heurystyce.**\n\n* Na początku wypisujemy jakie słowa w zdaniu będą miały znaczenie w decydowaniu o nastawieniu zdania (_patrz powyższa lista token_order_). Z podaną kolejnością zmieniamy kolejne wyrazy w zdaniu.\n</br>\n\n* Deklarujemy 100 najlepszych kandydatów na zajęcie znaczących słów.\nCzęsto jednak model dobierający tych kandydatów może się mylić. \n\n Jak zauważyliśmy model BERT bardziej zwraca uwagę na to, aby wybrane słowo było możliwie najlepiej dopasowane do kontekstu, a nie na to aby dopasować najbardziej odpowiednie słowo ze swojego zbioru danych. Istnieją również inne powody tego zachowania, jak na przykład niewystarczająca ilość danych do treningu, bądź nieprawidłowe dane wejściowe.\n\n Aby ten problem zmniejszyć, dodajemy 3 model: **klasyfikator nastroju**\n</br>\n\n* Trzeci model sprawdza, czy dodane słowo poprawnie zmieni nam wartość nastroju zdania. \n\n * Jeżeli tak, to zwracamy i wyświetlamy takie zdanie.\n * Jeżeli nie, testujemy kolejnych kandydatów, czy ci nie dadzą lepszego rezultatu.\n\nOczywiście metodę możnaby jeszcze usprawnić na wiele różnych sposobów, lecz na ten moment obecny rezultat jest naszym zdaniem akceptowalny.", "_____no_output_____" ] ], [ [ "from torch.nn import functional as F\n\ndef get_current_polarity(sentence):\n # określanie obecnego nastroju zdania\n outputs_n = classif_model(**tokenizer(sentence, return_tensors = \"pt\"))\n sof = round(F.softmax(outputs_n['logits'], dim=-1).tolist()[0][1],4)\n print(\"Polarity: \", sof)\n return sof\n\n\ndef get_model_predictions(word_list:str, t_type:Enum) -> None:\n \"\"\"\n Dostaje liste z wyrazami oraz maskami. Zdanie jest modyfikowane poprzez \n wypełnianie jego zamaskowanych wyrazów zgodnie z podanym typem transformacji\n \"\"\"\n # przygotowujemy dane do zaserwowania naszemu modelowi\n inputs = tokenizer(tokenizer.convert_tokens_to_string(word_list), return_tensors = \"pt\").to(device)\n # szukamy indeksow pustych tokenow w zdaniu\n masked_idxs = torch.where(inputs[\"input_ids\"][0] == tokenizer.mask_token_id)[0].to(device)\n ori_list = list(\n map(\n lambda el: el[0], \n filter(\n lambda x: x[1] == tokenizer.mask_token,\n enumerate(word_list)\n )\n )\n )\n\n # wybór odpowiedniego modelu\n if t_type is TRANSFORM_TYPE.POSITIVE:\n outputs = positive_model(**inputs)\n elif t_type is TRANSFORM_TYPE.NEGATIVE:\n outputs = negative_model(**inputs)\n else: assert False\n \n # określamy funkcja softmax jaki wyraz ma największe prawdopodobieństwo\n # pojawienia sie w wolnym miejscu\n for pred_idx, ori_idx in zip(masked_idxs, ori_list):\n end_layer = outputs.logits[0, pred_idx, :]\n assert word_list[ori_idx] == tokenizer.mask_token, \"masking not masked word!!!\"\n # tworzymy liste 100 slow kandydujących i przypisujemy im tagi POS\n candidates = nltk.pos_tag(\n tokenizer.convert_ids_to_tokens(torch.topk(end_layer, 100, dim = -1)[1])\n )\n current_polarity = None\n \n for cand_word in filter(lambda el: el[1] in token_order, candidates):\n word_list[ori_idx] = cand_word[0]\n new_polar = get_current_polarity(\n tokenizer.convert_tokens_to_string(word_list)\n )\n if current_polarity is None:\n current_polarity = new_polar\n continue\n # kończymy dodawanie jeżeli wynik jest poprawiony\n # jeżeli wynik jest tak dobry że raczej nie jesteśmy w stanie go\n # poprawić to kończymy z obecnym wynikiem\n if t_type is TRANSFORM_TYPE.POSITIVE:\n if new_polar > current_polarity or new_polar > 0.99:\n break\n elif t_type is TRANSFORM_TYPE.NEGATIVE:\n if new_polar < current_polarity or new_polar < 0.01:\n break\n \n\ntest_sentence = (\n \"I absolutely [MASK] this movie! I do think it is [MASK].\"\n \" Watching this film was a [MASK] experience for me and my \"\n \"friends on this rainy afternoon. The acting was also very [MASK] made.\"\n)\ntest_sentence_tok = tokenizer.tokenize(test_sentence)\n\nget_model_predictions(test_sentence_tok, TRANSFORM_TYPE.NEGATIVE)\ntokenizer.convert_tokens_to_string(test_sentence_tok)", "Polarity: 0.9987\nPolarity: 0.9991\nPolarity: 0.9986\nPolarity: 0.9984\nPolarity: 0.9986\nPolarity: 0.9975\nPolarity: 0.997\nPolarity: 0.9973\nPolarity: 0.997\nPolarity: 0.9968\nPolarity: 0.9953\nPolarity: 0.9843\n" ], [ "# krok zmiany nastroju tekstu. Ta funkcja jest wywoływana po kliknięciu w przycisk\ndef transform_polarity(splited_sentence, relevant_tokens, t_type:Enum):\n # jezeli t_type jest pozytywny -> zmien n tokenow na negatywne\n number_of_samples = 1 # round(len(relevant_tokens) / 6 + 0.5)\n samples = list(filter(lambda el: el.transform_type is not t_type, relevant_tokens))[:number_of_samples]\n\n # każdemu nowo usuniętemu wyrazowi przypisz maske\n for s in samples:\n s.transform_type = t_type\n splited_sentence[s.idx] = tokenizer.mask_token\n \n get_model_predictions(splited_sentence, t_type)\n\n\nclass SentenceStructure:\n \"\"\"\n Określenie struktury zdania. Ze zdania tworzymy liste tokentów\n \"\"\"\n def __init__(self, sentence:str):\n self.sentence = sentence\n self.split_sentence = tokenizer.tokenize(sentence)\n\n self.start_polar = get_current_polarity(self.__repr__())\n # obecna polaryzacja\n self.sentiment = TRANSFORM_TYPE.POSITIVE if self.start_polar > 0.5 \\\n else TRANSFORM_TYPE.NEGATIVE\n\n self.relevant_tokens = get_relevant_tokens(sentence, self.sentiment)\n \n def __repr__(self):\n return tokenizer.convert_tokens_to_string(self.split_sentence)", "_____no_output_____" ] ], [ [ "### Część interaktywna\n\nPoniżej podane jest miejsce do wpisania naszego zdania do zmiany transformacji.", "_____no_output_____" ] ], [ [ "from ipywidgets import interact, IntSlider, Textarea, Button, Output\n\n# przykładowe zdanie\nsentence = \"I absolutely love this movie! I do think it is great. Watching this film was a great experience for me and my friends on this rainy afternoon. The acting was also very well made.\"\nout = Output()\n\n@interact(\n inner=Textarea(\n value=sentence,\n placeholder='',\n description='Tekst:',\n disabled=False,\n )\n)\ndef choose_sentence(inner:str):\n global sentence\n sentence = inner\n\n\nconfirm_button = Button(description=\"Zatwierdz zdanie\")\nsentence_obj = None\ncurrent_sentence = None\n\ndef on_button_clicked(b):\n global current_sentence, sentence_obj\n with out:\n out.clear_output()\n sentence_obj = SentenceStructure(sentence)\n\nconfirm_button.on_click(on_button_clicked)\n\ndisplay(confirm_button)\ndisplay(out)", "_____no_output_____" ], [ "from functools import lru_cache\n\npolarity_score = 50\nsentence_out = Output()\nt_type = None\n\npositive_button = Button(description=\"Bardziej pozytywne\")\nnegative_button = Button(description=\"Bardziej negatywne\")\n\n@lru_cache()\ndef get_morphed_sentence(polarity_score):\n transform_polarity(sentence_obj.split_sentence, sentence_obj.relevant_tokens, t_type)\n return sentence_obj.__repr__()\n\ndef polarize_up(btn):\n global sentence_obj, polarity_score, t_type\n if polarity_score >= 100:\n return\n \n polarity_score += 1\n with sentence_out:\n sentence_out.clear_output()\n t_type = TRANSFORM_TYPE.POSITIVE\n print(\n f\"Score: {polarity_score}: {TRANSFORM_TYPE.POSITIVE}\\n\",\n get_morphed_sentence(polarity_score)\n )\n\ndef polarize_down(btn):\n global sentence_obj, polarity_score, t_type\n if polarity_score <= 0:\n return\n \n polarity_score -= 1\n with sentence_out:\n sentence_out.clear_output()\n t_type = TRANSFORM_TYPE.NEGATIVE\n print(\n f\"Score: {polarity_score} {TRANSFORM_TYPE.NEGATIVE}\\n\",\n get_morphed_sentence(polarity_score)\n )\n\npositive_button.on_click(polarize_up)\nnegative_button.on_click(polarize_down)\n\nwith sentence_out:\n print(sentence_obj)\n\ndisplay(positive_button)\ndisplay(negative_button)\ndisplay(sentence_out)", "_____no_output_____" ] ], [ [ "# Podsumowanie", "_____no_output_____" ], [ "## <a name=\"OcenaAplikacji\"></a>Ocena aplikacji\nZaimplementowane modele uczone są na bardzo ograniczonej ilości danych, co wyraźnie przekłada się na zwracane przez nie wyniki i szybki zanik powiązania między proponowanymi przez nie wyrazami, a docelową wartością polaryzacji. \n\nModel klasyfikatora ma wyraźną tendencję do dawania skrajnych ocen, dlatego największa widoczna różnica przy zmianie polaryzacji zdania ma miejsce w przedziale score od 30 do 50, poza tym przedziałem zdania zaproponowane przez model pozytywny i negatywny szybko zbiegają się do odpowiednio maksymalnie pozytywnie lub negatywnie spolaryzowanej formy, a więc zmiana czynnika score nie wpływa już na zdanie. \n\nKlasyfikator części mowy również jest niedoskonałym rozwiązaniem, gdyż wiele wyrazów w języku angielskim mimo jednakowej pisowni może zmieniać swoją funkcję w zdaniu w zależności od kontekstu, nie wspominając o możliwych wystąpieniach określeń składających się z więcej niż jednego wyrazu.\n\nAplikacja działa zadowalająco na miarę projektu studenckiego, ale na pewno pozostawia wiele pola do optymalizacji i dodatkowego treningu.", "_____no_output_____" ], [ "## <a name=\"OcenaProjektu\"></a>Ocena projektu\n\nNasza aplikacja była przykładem, że takie modele mogą być naprawdę przydatne.\nResponsywne sterowanie nastrojem arbitralnego tekstu byłoby niegdyś zapewne bardzo trudnym (jeżeli nie niemożliwym) zadaniem.\n\nDzisiejszy rozwój technologii oraz infrastruktury wokół samej branży ML sprawia, że jako studenci jesteśmy w stanie napisać tak rozwiniętą aplikacje w ograniczonym czasie, przy zerowym wkładzie własnym.\n\nModele przetwarzania językowego mają wiele zastosowań, a dzisiejszy rozwój technologii daje nam sporo nadziei, że kolejne pokolenie studentów będzie w stanie napisać aplikacje rozwiązującą jeszcze bardziej złożony problem za pomocą jeszcze lepszych narzędzi.", "_____no_output_____" ], [ "## <a name=\"PrzyszloscModeliTransformer\"></a>Przyszłość modeli Transformer\nPrzedstawione w 2017 pierwsze modele transformerów zrewolucjonizowały środowisko NLP swoją wydajnością i sposobem analizy danych. Od tamtego czasu stoją w samym centrum badań z przetwarzania języka naturalnego. Nowe pomysły pojawiają się praktycznie co miesiąc, starając się w różny sposób usprawnić i udoskonalić system.\n\nPrzełomowe rozwiązania prezentowane są w krótkich odstępach czasu również bardzo często przez wielkie korporacje jak Google czy Facebook, co tylko podkreśla jak bardzo praktyczna jest ta gałąź sztucznej inteligencji.\n\nOglądając dotychczasowe tempo rozwoju, możemy zaryzykować stwierdzeniem, że transformery zostaną jeszcze z nami przez jakiś czas, wciąż ewoluując i zmieniając swoją formę.\n\nJednocześnie nie możemy wykluczyć, że ta metoda nie zostanie nagle niedługo wyparta przez kolejną innowację w dziedzinie nauki.", "_____no_output_____" ], [ "## <a name=\"Bibliografia\"></a>__Bibliografia__\n\n[[1] *Attention Is All You Need*](https://arxiv.org/abs/1706.03762)\n\n[[2] *Efficient Estimation of Word Representations in Vector Space*](https://arxiv.org/abs/1301.3781)\n\n[[3] *BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding*](https://arxiv.org/abs/1810.04805)\n\n[[4] *Distilling the Knowledge in a Neural Network*](https://arxiv.org/abs/1503.02531v1)\n\n[[5] *DistilBERT, a distilled version of BERT: smaller, faster, cheaper and lighter*](https://arxiv.org/abs/1910.01108)\n\n[[6] *What Does BERT Look At? An Analysis of BERT's Attention*](https://arxiv.org/abs/1906.04341)\n\n[[7] *LambdaNetworks: Modeling Long-Range Interactions Without Attention*](https://arxiv.org/abs/2102.08602v1)\n\n[[8] *RoBERTa: A Robustly Optimized BERT Pretraining Approach*](https://arxiv.org/abs/1907.11692)\n\n[[9] *ALBERT: A Lite BERT for Self-supervised Learning of Language Representations*](https://arxiv.org/abs/1909.11942)\n\n[[10] *Megatron-LM: Training Multi-Billion Parameter Language Models Using Model Parallelism*](https://arxiv.org/abs/1909.08053)\n\n[[11] *Language Models are Unsupervised Multitask Learners*](https://d4mucfpksywv.cloudfront.net/better-language-models/language_models_are_unsupervised_multitask_learners.pdf)\n\n[[12] *Release Strategies and the Social Impacts of Language Models*](https://arxiv.org/abs/1908.09203)\n\n[[13] *KLEJ: Comprehensive Benchmark for Polish Language Understanding*](https://arxiv.org/abs/2005.00630)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown" ] ]
cb549e0802980952cf88c2e3b2cfc00c065668d7
8,785
ipynb
Jupyter Notebook
sss_robotic-arm/notebooks/datagen1.ipynb
quantiaconsulting/continuous-analytics-examples
c10e29b457536cc989317b2c9af14ec3924685ca
[ "Apache-2.0" ]
13
2021-01-13T17:21:53.000Z
2022-02-04T10:20:47.000Z
sss_robotic-arm/notebooks/datagen1.ipynb
quantiaconsulting/continuous-analytics-examples
c10e29b457536cc989317b2c9af14ec3924685ca
[ "Apache-2.0" ]
17
2021-03-04T19:49:14.000Z
2022-03-30T09:47:28.000Z
sss_robotic-arm/notebooks/datagen1.ipynb
quantiaconsulting/continuous-analytics-examples
c10e29b457536cc989317b2c9af14ec3924685ca
[ "Apache-2.0" ]
8
2021-01-14T13:26:09.000Z
2022-01-26T09:12:51.000Z
40.298165
2,380
0.552191
[ [ [ "!pip install confluent-kafka==1.7.0", "Collecting confluent-kafka==1.7.0\n Downloading confluent_kafka-1.7.0-cp38-cp38-manylinux2010_x86_64.whl (2.7 MB)\n\u001b[K |████████████████████████████████| 2.7 MB 37.7 MB/s eta 0:00:01\n\u001b[?25hInstalling collected packages: confluent-kafka\nSuccessfully installed confluent-kafka-1.7.0\n" ], [ "from confluent_kafka.admin import AdminClient, NewTopic, NewPartitions\nfrom confluent_kafka import KafkaException\nimport sys\nfrom uuid import uuid4", "_____no_output_____" ], [ "bootstrap_server = \"kafka:9092\" # Brokers act as cluster entripoints", "_____no_output_____" ], [ "conf = {'bootstrap.servers': bootstrap_server}", "_____no_output_____" ], [ "a = AdminClient(conf)", "_____no_output_____" ], [ "md = a.list_topics(timeout=10)\nprint(\" {} topics:\".format(len(md.topics)))\nfor t in iter(md.topics.values()):\n if t.error is not None:\n errstr = \": {}\".format(t.error)\n else:\n errstr = \"\"\n print(\" \\\"{}\\\" with {} partition(s){}\".format(t, len(t.partitions), errstr))", " 1 topics:\n \"_schemas\" with 1 partition(s)\n" ], [ "from confluent_kafka import SerializingProducer\nfrom confluent_kafka.serialization import *\n\nimport time\n\ntopic = \"RoboticArm\"\n\ndef delivery_report(err, msg):\n if err is not None:\n print(\"Failed to deliver message: {}\".format(err))\n else:\n print(\"Produced record to topic {} partition [{}] @ offset {}\"\n .format(msg.topic(), msg.partition(), msg.offset()))", "_____no_output_____" ], [ "producer_conf = {\n 'bootstrap.servers': bootstrap_server,\n 'key.serializer': StringSerializer('utf_8'),\n 'value.serializer': StringSerializer('utf_8')\n}\n\nproducer = SerializingProducer(producer_conf)", "_____no_output_____" ] ], [ [ "## run the following cell to loop across the data\n\nthey are the same data as those in the EPL example only the time flows at half of the speed", "_____no_output_____" ] ], [ [ "import json\nfrom IPython.display import clear_output\n\ndef send(value):\n key = None\n producer.produce(topic=topic, value=json.dumps(value), key=key, on_delivery=delivery_report)\n print(value)\n producer.poll(1)\n clear_output(wait=True)\n\nwhile True:\n send({\"id\":\"1\", \"status\":\"ready\", \"stressLevel\": 0, \"ts\": int(time.time())})\n time.sleep(2)\n send({\"id\":\"1\", \"status\": \"goodGrasped\", \"stressLevel\": 1, \"ts\": int(time.time())}) \n time.sleep(2)\n ts = int(time.time())\n send({\"id\":\"1\", \"status\":\"movingGood\", \"stressLevel\": 7, \"ts\": ts})\n send({\"id\":\"2\", \"status\":\"ready\", \"stressLevel\": 0, \"ts\": ts })\n time.sleep(2)\n send({\"id\":\"2\", \"status\":\"goodGrasped\", \"stressLevel\": 5, \"ts\": int(time.time()) })\n time.sleep(1)\n send({\"id\":\"2\", \"status\":\"movingGood\", \"stressLevel\": 9, \"ts\": int(time.time()) })\n time.sleep(10)\n ts = int(time.time())\n send({\"id\":\"1\", \"status\":\"placingGood\", \"stressLevel\": 3, \"ts\": ts})\n send({\"id\":\"2\", \"status\":\"placingGood\", \"stressLevel\": 3, \"ts\": ts }) \n time.sleep(8)\n ts = int(time.time())\n send({\"id\":\"1\", \"status\":\"moving\", \"stressLevel\": 2, \"ts\": ts})\n send({\"id\":\"2\", \"status\":\"moving\", \"stressLevel\": 1, \"ts\": ts }) \n time.sleep(6)\n ts = int(time.time())\n send({\"id\":\"1\", \"status\":\"ready\", \"stressLevel\": 0, \"ts\": ts})\n send({\"id\":\"2\", \"status\":\"ready\", \"stressLevel\": 0, \"ts\": ts }) \n time.sleep(2)", "_____no_output_____" ] ], [ [ "to interrupt the execution of the cell, prese the square icon in the bar or choose *interrupt kernel* from the *kernel* dropdown menu", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
cb54d09c85edaf5eb36e8e5b848c95f08fa41cbd
20,261
ipynb
Jupyter Notebook
nbs/03_dispatch.ipynb
lgvaz/fastcore
fa39696dee5929e9dede5a69e4614ac9f0257ba2
[ "Apache-2.0" ]
1
2020-04-02T22:34:55.000Z
2020-04-02T22:34:55.000Z
nbs/03_dispatch.ipynb
GenevieveBuckley/fastcore
8e331b8a26a42e0f57c8692f2d3e1a3ce2542e3a
[ "Apache-2.0" ]
null
null
null
nbs/03_dispatch.ipynb
GenevieveBuckley/fastcore
8e331b8a26a42e0f57c8692f2d3e1a3ce2542e3a
[ "Apache-2.0" ]
null
null
null
26.589239
232
0.506046
[ [ [ "#default_exp dispatch", "_____no_output_____" ], [ "#export\nfrom fastcore.imports import *\nfrom fastcore.foundation import *\nfrom fastcore.utils import *", "_____no_output_____" ], [ "from nbdev.showdoc import *\nfrom fastcore.test import *", "_____no_output_____" ] ], [ [ "# Type dispatch\n\n> Basic single and dual parameter dispatch", "_____no_output_____" ], [ "## Helpers", "_____no_output_____" ] ], [ [ "#exports\ndef type_hints(f):\n \"Same as `typing.get_type_hints` but returns `{}` if not allowed type\"\n return typing.get_type_hints(f) if isinstance(f, typing._allowed_types) else {}", "_____no_output_____" ], [ "#export\ndef anno_ret(func):\n \"Get the return annotation of `func`\"\n if not func: return None\n ann = type_hints(func)\n if not ann: return None\n return ann.get('return')", "_____no_output_____" ], [ "#hide\ndef f(x) -> float: return x\ntest_eq(anno_ret(f), float)\ndef f(x) -> typing.Tuple[float,float]: return x\ntest_eq(anno_ret(f), typing.Tuple[float,float])\ndef f(x) -> None: return x\ntest_eq(anno_ret(f), NoneType)\ndef f(x): return x\ntest_eq(anno_ret(f), None)\ntest_eq(anno_ret(None), None)", "_____no_output_____" ], [ "#export\ncmp_instance = functools.cmp_to_key(lambda a,b: 0 if a==b else 1 if issubclass(a,b) else -1)", "_____no_output_____" ], [ "td = {int:1, numbers.Number:2, numbers.Integral:3}\ntest_eq(sorted(td, key=cmp_instance), [numbers.Number, numbers.Integral, int])", "_____no_output_____" ], [ "#export\ndef _p2_anno(f):\n \"Get the 1st 2 annotations of `f`, defaulting to `object`\"\n hints = type_hints(f)\n ann = [o for n,o in hints.items() if n!='return']\n while len(ann)<2: ann.append(object)\n return ann[:2]", "_____no_output_____" ], [ "def _f(a): pass\ntest_eq(_p2_anno(_f), (object,object))\ndef _f(a, b): pass\ntest_eq(_p2_anno(_f), (object,object))\ndef _f(a:None, b)->str: pass\ntest_eq(_p2_anno(_f), (NoneType,object))\ndef _f(a:str, b)->float: pass\ntest_eq(_p2_anno(_f), (str,object))\ndef _f(a:None, b:str)->float: pass\ntest_eq(_p2_anno(_f), (NoneType,str))\ndef _f(a:int, b:int)->float: pass\ntest_eq(_p2_anno(_f), (int,int))\ndef _f(self, a:int, b:int): pass\ntest_eq(_p2_anno(_f), (int,int))\ndef _f(a:int, b:str)->float: pass\ntest_eq(_p2_anno(_f), (int,str))\ntest_eq(_p2_anno(attrgetter('foo')), (object,object))", "_____no_output_____" ] ], [ [ "## TypeDispatch -", "_____no_output_____" ], [ "The following class is the basis that allows us to do type dipatch with type annotations. It contains a dictionary type -> functions and ensures that the proper function is called when passed an object (depending on its type).", "_____no_output_____" ] ], [ [ "#export\nclass _TypeDict:\n def __init__(self): self.d,self.cache = {},{}\n\n def _reset(self):\n self.d = {k:self.d[k] for k in sorted(self.d, key=cmp_instance, reverse=True)}\n self.cache = {}\n\n def add(self, t, f):\n \"Add type `t` and function `f`\"\n if not isinstance(t,tuple): t=tuple(L(t))\n for t_ in t: self.d[t_] = f\n self._reset()\n\n def all_matches(self, k):\n \"Find first matching type that is a super-class of `k`\"\n if k not in self.cache:\n types = [f for f in self.d if k==f or (isinstance(k,type) and issubclass(k,f))]\n self.cache[k] = [self.d[o] for o in types]\n return self.cache[k]\n\n def __getitem__(self, k):\n \"Find first matching type that is a super-class of `k`\"\n res = self.all_matches(k)\n return res[0] if len(res) else None\n\n def __repr__(self): return self.d.__repr__()\n def first(self): return first(self.d.values())", "_____no_output_____" ], [ "#export\nclass TypeDispatch:\n \"Dictionary-like object; `__getitem__` matches keys of types using `issubclass`\"\n def __init__(self, funcs=(), bases=()):\n self.funcs,self.bases = _TypeDict(),L(bases).filter(is_not(None))\n for o in L(funcs): self.add(o)\n self.inst = None\n\n def add(self, f):\n \"Add type `t` and function `f`\"\n a0,a1 = _p2_anno(f)\n t = self.funcs.d.get(a0)\n if t is None:\n t = _TypeDict()\n self.funcs.add(a0, t)\n t.add(a1, f)\n\n def first(self): return self.funcs.first().first()\n def returns(self, x): return anno_ret(self[type(x)])\n def returns_none(self, x):\n r = anno_ret(self[type(x)])\n return r if r == NoneType else None\n\n def _attname(self,k): return getattr(k,'__name__',str(k))\n def __repr__(self):\n r = [f'({self._attname(k)},{self._attname(l)}) -> {getattr(v, \"__name__\", v.__class__.__name__)}'\n for k in self.funcs.d for l,v in self.funcs[k].d.items()]\n return '\\n'.join(r)\n\n def __call__(self, *args, **kwargs):\n ts = L(args).map(type)[:2]\n f = self[tuple(ts)]\n if not f: return args[0]\n if self.inst is not None: f = MethodType(f, self.inst)\n return f(*args, **kwargs)\n\n def __get__(self, inst, owner):\n self.inst = inst\n return self\n\n def __getitem__(self, k):\n \"Find first matching type that is a super-class of `k`\"\n k = L(k)\n while len(k)<2: k.append(object)\n r = self.funcs.all_matches(k[0])\n for t in r:\n o = t[k[1]]\n if o is not None: return o\n for base in self.bases:\n res = base[k]\n if res is not None: return res\n return None", "_____no_output_____" ], [ "def f_col(x:typing.Collection): return x\ndef f_nin(x:numbers.Integral)->int: return x+1\ndef f_ni2(x:int): return x\ndef f_bll(x:(bool,list)): return x\ndef f_num(x:numbers.Number): return x\nt = TypeDispatch([f_nin,f_ni2,f_num,f_bll,None])\n\nt.add(f_ni2) #Should work even if we add the same function twice.\ntest_eq(t[int], f_ni2)\ntest_eq(t[np.int32], f_nin)\ntest_eq(t[str], None)\ntest_eq(t[float], f_num)\ntest_eq(t[bool], f_bll)\ntest_eq(t[list], f_bll)\nt.add(f_col)\ntest_eq(t[str], f_col)\ntest_eq(t[np.int32], f_nin)\no = np.int32(1)\ntest_eq(t(o), 2)\ntest_eq(t.returns(o), int)\nassert t.first() is not None\nt", "_____no_output_____" ] ], [ [ "If `bases` is set to a collection of `TypeDispatch` objects, then they are searched matching functions if no match is found in this object.", "_____no_output_____" ] ], [ [ "def f_str(x:str): return x+'1'\n\nt2 = TypeDispatch(f_str, bases=t)\ntest_eq(t2[int], f_ni2)\ntest_eq(t2[np.int32], f_nin)\ntest_eq(t2[float], f_num)\ntest_eq(t2[bool], f_bll)\ntest_eq(t2[str], f_str)\ntest_eq(t2('a'), 'a1')\ntest_eq(t2[np.int32], f_nin)\ntest_eq(t2(o), 2)\ntest_eq(t2.returns(o), int)", "_____no_output_____" ], [ "def m_nin(self, x:(str,numbers.Integral)): return str(x)+'1'\ndef m_bll(self, x:bool): self.foo='a'\ndef m_num(self, x:numbers.Number): return x\n\nt = TypeDispatch([m_nin,m_num,m_bll])\nclass A: f = t\na = A()\ntest_eq(a.f(1), '11')\ntest_eq(a.f(1.), 1.)\ntest_is(a.f.inst, a)\na.f(False)\ntest_eq(a.foo, 'a')\ntest_eq(a.f(()), ())", "_____no_output_____" ], [ "def m_tup(self, x:tuple): return x+(1,)\nt2 = TypeDispatch(m_tup, t)\nclass A2: f = t2\na2 = A2()\ntest_eq(a2.f(1), '11')\ntest_eq(a2.f(1.), 1.)\ntest_is(a2.f.inst, a2)\na2.f(False)\ntest_eq(a2.foo, 'a')\ntest_eq(a2.f(()), (1,))", "_____no_output_____" ], [ "def f1(x:numbers.Integral, y): return x+1\ndef f2(x:int, y:float): return x+y\nt = TypeDispatch([f1,f2])\n\ntest_eq(t[int], f1)\ntest_eq(t[int,int], f1)\ntest_eq(t[int,float], f2)\ntest_eq(t[float,float], None)\ntest_eq(t[np.int32,float], f1)\ntest_eq(t(3,2.0), 5)\ntest_eq(t(3,2), 4)\ntest_eq(t('a'), 'a')\nt", "_____no_output_____" ] ], [ [ "## typedispatch Decorator", "_____no_output_____" ] ], [ [ "#export\nclass DispatchReg:\n \"A global registry for `TypeDispatch` objects keyed by function name\"\n def __init__(self): self.d = defaultdict(TypeDispatch)\n def __call__(self, f):\n nm = f'{f.__qualname__}'\n self.d[nm].add(f)\n return self.d[nm]\n\ntypedispatch = DispatchReg()", "_____no_output_____" ], [ "@typedispatch\ndef f_td_test(x, y): return f'{x}{y}'\n@typedispatch\ndef f_td_test(x:numbers.Integral, y): return x+1\n@typedispatch\ndef f_td_test(x:int, y:float): return x+y\n\ntest_eq(f_td_test(3,2.0), 5)\ntest_eq(f_td_test(3,2), 4)\ntest_eq(f_td_test('a','b'), 'ab')", "_____no_output_____" ] ], [ [ "## Casting", "_____no_output_____" ], [ "Now that we can dispatch on types, let's make it easier to cast objects to a different type.", "_____no_output_____" ] ], [ [ "#export\n_all_=['cast']", "_____no_output_____" ], [ "#export\ndef retain_meta(x, res):\n \"Call `res.set_meta(x)`, if it exists\"\n if hasattr(res,'set_meta'): res.set_meta(x)\n return res", "_____no_output_____" ], [ "#export\ndef default_set_meta(self, x):\n \"Copy over `_meta` from `x` to `res`, if it's missing\"\n if hasattr(x, '_meta') and not hasattr(self, '_meta'): self._meta = x._meta\n return self", "_____no_output_____" ], [ "#export\n@typedispatch\ndef cast(x, typ):\n \"cast `x` to type `typ` (may also change `x` inplace)\"\n res = typ._before_cast(x) if hasattr(typ, '_before_cast') else x\n if isinstance(res, ndarray): res = res.view(typ)\n elif hasattr(res, 'as_subclass'): res = res.as_subclass(typ)\n else:\n try: res.__class__ = typ\n except: res = typ(res)\n return retain_meta(x, res)", "_____no_output_____" ] ], [ [ "This works both for plain python classes:...", "_____no_output_____" ] ], [ [ "mk_class('_T1', 'a')\nclass _T2(_T1): pass\n\nt = _T1(a=1)\nt2 = cast(t, _T2)\ntest_eq_type(_T2(a=1), t2)", "_____no_output_____" ] ], [ [ "...as well as for arrays and tensors.", "_____no_output_____" ] ], [ [ "class _T1(ndarray): pass\n\nt = array([1])\nt2 = cast(t, _T1)\ntest_eq(array([1]), t2)\ntest_eq(_T1, type(t2))", "_____no_output_____" ] ], [ [ "To customize casting for other types, define a separate `cast` function with `typedispatch` for your type.", "_____no_output_____" ] ], [ [ "#export\ndef retain_type(new, old=None, typ=None):\n \"Cast `new` to type of `old` or `typ` if it's a superclass\"\n # e.g. old is TensorImage, new is Tensor - if not subclass then do nothing\n if new is None: return\n assert old is not None or typ is not None\n if typ is None:\n if not isinstance(old, type(new)): return new\n typ = old if isinstance(old,type) else type(old)\n # Do nothing the new type is already an instance of requested type (i.e. same type)\n if typ==NoneType or isinstance(new, typ): return new\n return retain_meta(old, cast(new, typ))", "_____no_output_____" ], [ "class _T(tuple): pass\na = _T((1,2))\nb = tuple((1,2))\ntest_eq_type(retain_type(b, typ=_T), a)", "_____no_output_____" ] ], [ [ "If `old` has a `_meta` attribute, its content is passed when casting `new` to the type of `old`.", "_____no_output_____" ] ], [ [ "class _A():\n set_meta = default_set_meta\n def __init__(self, t): self.t=t\n\nclass _B1(_A):\n def __init__(self, t, a=1):\n super().__init__(t)\n self._meta = {'a':a}\n \nx = _B1(1, a=2)\nb = _A(1)\ntest_eq(retain_type(b, old=x)._meta, {'a': 2})", "_____no_output_____" ], [ "a = {L: [int, tuple]}\nfirst(a.keys())", "_____no_output_____" ], [ "#export\ndef retain_types(new, old=None, typs=None):\n \"Cast each item of `new` to type of matching item in `old` if it's a superclass\"\n if not is_listy(new): return retain_type(new, old, typs)\n if typs is not None:\n if isinstance(typs, dict):\n t = first(typs.keys())\n typs = typs[t]\n else: t,typs = typs,None\n else: t = type(old) if old is not None and isinstance(old,type(new)) else type(new)\n return t(L(new, old, typs).map_zip(retain_types, cycled=True))", "_____no_output_____" ], [ "class T(tuple): pass\n\nt1,t2 = retain_types((1,(1,(1,1))), (2,T((2,T((3,4))))))\ntest_eq_type(t1, 1)\ntest_eq_type(t2, T((1,T((1,1)))))\n\nt1,t2 = retain_types((1,(1,(1,1))), typs = {tuple: [int, {T: [int, {T: [int,int]}]}]})\ntest_eq_type(t1, 1)\ntest_eq_type(t2, T((1,T((1,1)))))", "_____no_output_____" ], [ "#export\ndef explode_types(o):\n \"Return the type of `o`, potentially in nested dictionaries for thing that are listy\"\n if not is_listy(o): return type(o)\n return {type(o): [explode_types(o_) for o_ in o]}", "_____no_output_____" ], [ "test_eq(explode_types((2,T((2,T((3,4)))))), {tuple: [int, {T: [int, {T: [int,int]}]}]})", "_____no_output_____" ] ], [ [ "## Export -", "_____no_output_____" ] ], [ [ "#hide\nfrom nbdev.export import notebook2script\nnotebook2script()", "Converted 00_test.ipynb.\nConverted 01_foundation.ipynb.\nConverted 02_utils.ipynb.\nConverted 03_dispatch.ipynb.\nConverted 04_transform.ipynb.\nConverted index.ipynb.\n" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ] ]
cb54e154724dfbd4bf3767192faf3924d7cdb497
479
ipynb
Jupyter Notebook
pset_challenging_ext/exercises/nb/p90.ipynb
mottaquikarim/pydev-psets
9749e0d216ee0a5c586d0d3013ef481cc21dee27
[ "MIT" ]
5
2019-04-08T20:05:37.000Z
2019-12-04T20:48:45.000Z
pset_challenging_ext/exercises/nb/p90.ipynb
mottaquikarim/pydev-psets
9749e0d216ee0a5c586d0d3013ef481cc21dee27
[ "MIT" ]
8
2019-04-15T15:16:05.000Z
2022-02-12T10:33:32.000Z
pset_challenging_ext/exercises/nb/p90.ipynb
mottaquikarim/pydev-psets
9749e0d216ee0a5c586d0d3013ef481cc21dee27
[ "MIT" ]
2
2019-04-10T00:14:42.000Z
2020-02-26T20:35:21.000Z
17.740741
111
0.473904
[ [ [ "empty" ] ] ]
[ "empty" ]
[ [ "empty" ] ]
cb54e4b2481b98c99585e37c7af248594bfaadfb
53,683
ipynb
Jupyter Notebook
tf_linsolve_workspace.ipynb
tyler-a-cox/linsolve
324945adc71caf576e3f7c0a216ba85e8235ae56
[ "MIT" ]
null
null
null
tf_linsolve_workspace.ipynb
tyler-a-cox/linsolve
324945adc71caf576e3f7c0a216ba85e8235ae56
[ "MIT" ]
null
null
null
tf_linsolve_workspace.ipynb
tyler-a-cox/linsolve
324945adc71caf576e3f7c0a216ba85e8235ae56
[ "MIT" ]
null
null
null
46.119416
28,544
0.739396
[ [ [ "import linsolve\nimport tf_linsolve\nimport tensorflow as tf\nimport scipy\nimport numpy as np\nimport pylab as plt\n%load_ext line_profiler", "_____no_output_____" ], [ "from hera_cal.io import HERAData", "_____no_output_____" ], [ "hd = HERAData('zen.2458098.27465.sum.corrupt.uvh5')", "Telescope RIMEz calculation is not in known_telescopes.\n" ], [ "data, flags, _ = hd.read(polarizations=['nn'])", "Telescope RIMEz calculation is not in known_telescopes.\n" ], [ "from hera_cal.redcal import predict_noise_variance_from_autos, SEC_PER_DAY, split_pol, join_pol", "_____no_output_____" ], [ "data_wgts = {\n bl: predict_noise_variance_from_autos(\n bl, data\n )\n ** -1\n for bl in data.keys() if bl[0] != bl[1]\n}\n\ndata = {bl: data[bl] for bl in data.keys() if bl[0] != bl[1]}", "_____no_output_____" ], [ "np.savez('zen.2458098.27465.sum.corrupt.npz', antpos=hd.antpos, data=data, wgts=data_wgts, freqs=hd.freqs)", "_____no_output_____" ], [ "!ls -alh zen.2458098.27465.sum.corrupt.npz", "-rw-r--r-- 1 tyler staff 743M Nov 11 14:25 zen.2458098.27465.sum.corrupt.npz\r\n" ] ], [ [ "## Methods", "_____no_output_____" ], [ "Here, I'll develop some methods to replace the solvers in linsolve assuming that the inputs are tensors", "_____no_output_____" ], [ "### Solvers Dense", "_____no_output_____" ] ], [ [ "# This could help with repeated calls, but increases the runtime for single usage\n#@tf.function\ndef _invert_lsqr(A, y, rcond=0, sparse=False):\n \"\"\"\n \n rcond: \n rcond must be set to 0 to work for complex datasets\n \"\"\"\n dtype = y.dtype\n assert not (\n dtype in [np.complex128, np.complex64, complex] and rcond > 0\n ), \"If using complex data, rcond must be equal to 0 for performance reasons\"\n\n x = tf.linalg.lstsq(\n tf.transpose(A, perm=[2, 0, 1]),\n tf.transpose(y)[..., None],\n l2_regularizer=rcond,\n )[..., 0]\n return x\n\ndef _invert_lsqr_sparse(xs_ys_vals, y, rcond):\n \"\"\"\n \"\"\"\n A = _get_A_sparse(xs_ys_vals)\n return _invert_lsqr(A, y, rcond, sparse=True)\n\n# This could help with repeated calls, but increases the runtime for single usage\n#@tf.function\ndef _invert_pinv(A, y, rcond, sparse=False):\n \"\"\"\n \"\"\"\n dtype = y.dtype\n A = tf.transpose(A, perm=[2, 0, 1])\n AtA = tf.matmul(A, A, adjoint_a=True, a_is_sparse=sparse, b_is_sparse=sparse)\n\n if dtype in [complex, np.complex64, np.complex128]:\n # tensorflow does not allow for complex psuedo-inverses. Compute the value manually\n R = tf.math.real(AtA)\n C = tf.math.imag(AtA)\n r0 = tf.matmul(tf.linalg.pinv(R), C)\n y11 = tf.linalg.pinv(tf.matmul(C, r0) + R)\n y10 = tf.matmul(-r0, y11)\n AtAi = tf.cast(tf.complex(y11, y10), dtype=AtA.dtype)\n\n else:\n AtAi = tf.linalg.pinv(AtA, rcond=rcond)\n\n return tf.einsum(\n \"nij,njk,kn->ni\", AtAi, tf.transpose(A, perm=[0, 2, 1], conjugate=True), y\n )\n\n\ndef _invert_pinv_sparse(xs_ys_vals, y, rcond):\n \"\"\"\n \"\"\"\n A = _get_A_sparse(xs_ys_vals)\n return _invert_pinv(A, y, rcond, sparse=True)\n\n# This could help with repeated calls, but increases the runtime for single usage\n#@tf.function\ndef _invert_solve(A, y, rcond, sparse=False):\n \"\"\"\n \"\"\"\n A = tf.transpose(A, perm=[2, 0, 1])\n AtA = tf.matmul(A, A, adjoint_a=True, a_is_sparse=sparse, b_is_sparse=sparse)\n Aty = tf.matmul(\n tf.transpose(A, perm=[0, 2, 1], conjugate=True),\n tf.transpose(y)[..., None],\n a_is_sparse=sparse,\n )\n return tf.linalg.solve(AtA, Aty)[..., 0]\n\ndef _invert_solve_sparse(xs_ys_vals, y, rcond):\n \"\"\"\n \"\"\"\n A = _get_A_sparse(xs_ys_vals)\n return _invert_solve(A, y, rcond, sparse=True)\n\n# This could help with repeated calls, but increases the runtime for single usage\n#@tf.function\ndef _invert_pinv_shared(A, y, rcond, sparse=False):\n \"\"\"\n \"\"\"\n AtA = tf.matmul(A, A, adjoint_a=True, a_is_sparse=sparse, b_is_sparse=sparse)\n dtype = AtA.dtype\n\n if dtype in [complex, np.complex64, np.complex128]:\n # tensorflow does not allow for complex psuedo-inverses. Compute the value manually\n R = tf.math.real(AtA)\n C = tf.math.imag(AtA)\n r0 = tf.matmul(tf.linalg.pinv(R), C)\n y11 = tf.linalg.pinv(tf.matmul(C, r0) + R)\n y10 = tf.matmul(-r0, y11)\n AtAi = tf.cast(tf.complex(y11, y10), dtype=AtA.dtype)\n\n else:\n AtAi = tf.linalg.pinv(AtA, rcond=rcond)\n \n return tf.transpose(tf.matmul(AtAi, tf.matmul(A, y, adjoint_a=True, a_is_sparse=sparse)))\n\ndef _invert_pinv_shared_sparse(xs_ys_vals, y, rcond):\n \"\"\"\n \"\"\"\n A = _get_A_sparse(xs_ys_vals)\n return _invert_pinv_shared(A, y, rcond, sparse=True)", "_____no_output_____" ], [ "tf.convert_to_tensor?", "_____no_output_____" ] ], [ [ "### Helper Methods", "_____no_output_____" ] ], [ [ "def _get_AtA_Aty_sparse(xs_ys_vals, y):\n \"\"\"\n \"\"\"\n pass", "_____no_output_____" ] ], [ [ "## Standard Linsolve", "_____no_output_____" ], [ "### Standard Linsolve Case", "_____no_output_____" ] ], [ [ "x = np.linspace(0, 2 * np.pi, 1000)\ng = np.cos(x) + 1j * np.sin(x)\nh = np.sin(x) + 1j * np.cos(x)\ni = x + 1j * x\ndata = {'g + h': g + h, 'g + i': g + i, 'i + h': i + h, 'i + g + h': i + g + h}\nwgts = {k: np.random.uniform(0.9, 1.1, v.shape[0]) for k, v in data.items()}", "_____no_output_____" ], [ "ls = linsolve.LinearSolver(data)", "_____no_output_____" ], [ "A = ls.get_A()[..., 0]\ny = ls.get_weighted_data()", "_____no_output_____" ], [ "ATF = tf.convert_to_tensor(A)\nyTF = tf.constant(y)", "_____no_output_____" ], [ "%time ma = _invert_pinv_shared(ATF, yTF, rcond=None)", "CPU times: user 2.84 ms, sys: 1.46 ms, total: 4.3 ms\nWall time: 2.94 ms\n" ], [ "%time s = ls.solve()", "CPU times: user 2.9 ms, sys: 2.92 ms, total: 5.82 ms\nWall time: 1.75 ms\n" ] ], [ [ "#### Profile", "_____no_output_____" ] ], [ [ "%lprun -f _invert_pinv_shared _invert_pinv_shared(ATF, yTF, rcond=None)", "_____no_output_____" ] ], [ [ "### Least-Squares Case", "_____no_output_____" ] ], [ [ "ls = linsolve.LinearSolver(data, wgts=wgts)\nATF = tf.convert_to_tensor(ls.get_A())\nyTF = tf.convert_to_tensor(ls.get_weighted_data())", "_____no_output_____" ], [ "%time sol = _invert_lsqr(ATF, yTF, rcond=0)", "CPU times: user 3.62 ms, sys: 1.27 ms, total: 4.89 ms\nWall time: 2.43 ms\n" ], [ "%time solution = ls.solve(mode='lsqr')", "CPU times: user 40.5 ms, sys: 5.64 ms, total: 46.1 ms\nWall time: 41.3 ms\n" ], [ "plt.figure(figsize=(10, 6))\nplt.plot(np.abs(solution['g'] - sol[..., 0]))\nplt.plot(np.abs(solution['h'] - sol[..., 1]))\nplt.plot(np.abs(solution['i'] - sol[..., 2]))\nplt.show()", "_____no_output_____" ] ], [ [ "#### Profiling", "_____no_output_____" ] ], [ [ "%lprun -f _invert_lsqr _invert_lsqr(ATF, yTF, rcond=0)", "_____no_output_____" ] ], [ [ "### Psuedo-inverse", "_____no_output_____" ] ], [ [ "%time solution = _invert_pinv(ATF, yTF, rcond=None)", "CPU times: user 139 ms, sys: 19 ms, total: 158 ms\nWall time: 25.1 ms\n" ], [ "%time sol = ls.solve()", "CPU times: user 196 ms, sys: 34.7 ms, total: 231 ms\nWall time: 74.8 ms\n" ] ], [ [ "#### Profiling", "_____no_output_____" ] ], [ [ "%lprun -f _invert_pinv _invert_pinv(ATF, yTF, rcond=None)", "_____no_output_____" ] ], [ [ "### Solve", "_____no_output_____" ] ], [ [ "%time _ = _invert_solve(ATF, yTF, 0)", "CPU times: user 37.6 ms, sys: 2 ms, total: 39.6 ms\nWall time: 7.94 ms\n" ], [ "%time sol = ls.solve(mode='solve')", "CPU times: user 72.1 ms, sys: 5.21 ms, total: 77.3 ms\nWall time: 76.3 ms\n" ] ], [ [ "#### Profiling", "_____no_output_____" ] ], [ [ "%lprun -f _invert_solve _invert_solve(ATF, yTF, rcond=None)", "_____no_output_____" ] ], [ [ "# Tensorflow Operations", "_____no_output_____" ] ], [ [ "A = np.random.uniform(0, 1, size=(200, 100, 300)) + 1j * np.random.uniform(0, 1, size=(200, 100, 300))\nATF = tf.complex(tf.random.uniform((200, 100, 300)), tf.random.uniform((200, 100, 300)))", "_____no_output_____" ] ], [ [ "## Conjugation", "_____no_output_____" ] ], [ [ "%%time\n_ = A.conj()", "CPU times: user 18.3 ms, sys: 4.04 ms, total: 22.3 ms\nWall time: 20.9 ms\n" ], [ "%%time\n_ = tf.math.conj(ATF)", "CPU times: user 54 ms, sys: 7.93 ms, total: 62 ms\nWall time: 12.6 ms\n" ] ], [ [ "## Transpose", "_____no_output_____" ] ], [ [ "%%time\n_ = tf.einsum(\"ijk...->ikj...\", ATF)", "CPU times: user 65.5 ms, sys: 943 µs, total: 66.4 ms\nWall time: 6.99 ms\n" ], [ "%%time\n_ = tf.transpose(ATF, perm=[0, 2, 1])", "CPU times: user 66.2 ms, sys: 5.13 ms, total: 71.3 ms\nWall time: 11.6 ms\n" ], [ "%%time\n_ = tf.reshape(ATF, (200, 300, 100))", "CPU times: user 258 µs, sys: 3.44 ms, total: 3.69 ms\nWall time: 3.7 ms\n" ], [ "%%time\nG = tf.linalg.matrix_transpose(ATF)", "CPU times: user 69.2 ms, sys: 6.17 ms, total: 75.4 ms\nWall time: 11.5 ms\n" ], [ "G = tf.constant(G)", "_____no_output_____" ], [ "%%time\n_ = tf.einsum('ijk,ijl->ij', G, G)", "CPU times: user 39.2 ms, sys: 1.8 ms, total: 41 ms\nWall time: 8.54 ms\n" ], [ "%%time\n_ = tf.einsum('ijk,ijl->ji', G, G)", "CPU times: user 38.5 ms, sys: 6.51 ms, total: 45 ms\nWall time: 8.57 ms\n" ], [ "tf.linalg.solve?", "_____no_output_____" ], [ "A = tf.random.uniform((1000, 100, 100))\ny = tf.random.uniform((1000, 100, 20))", "_____no_output_____" ], [ "%%timeit\n_ = tf.linalg.solve(A, y)", "18.7 ms ± 680 µs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n" ], [ "%%timeit\n_ = tf.transpose(tf.linalg.solve(A, y))", "24.2 ms ± 3.56 ms per loop (mean ± std. dev. of 7 runs, 10 loops each)\n" ], [ "@tf.function\ndef function(A):\n \"\"\"\n \"\"\"\n return result(A)\n\[email protected]\ndef function_graph(A):\n \"\"\"\n \"\"\"\n return result_graph(A)\n\ndef function_no_opt(A):\n \"\"\"\n \"\"\"\n return result_no_opt(A)\n\ndef result(A):\n \"\"\"\n \"\"\"\n return tf.matmul(A, A, transpose_a=True)\n\[email protected]\ndef result_graph(A):\n \"\"\"\n \"\"\"\n return tf.matmul(A, A, transpose_a=True)\n\ndef result_no_opt(A):\n \"\"\"\n \"\"\"\n return tf.matmul(A, A, transpose_a=True)", "_____no_output_____" ], [ "A = tf.random.uniform((5000, 1000))", "_____no_output_____" ], [ "%timeit _ = function(A)", "20.3 ms ± 1.36 ms per loop (mean ± std. dev. of 7 runs, 100 loops each)\n" ], [ "%timeit _ = function_graph(A)", "20.3 ms ± 224 µs per loop (mean ± std. dev. of 7 runs, 10 loops each)\n" ], [ "%timeit _ = function_no_opt(A)", "21.4 ms ± 1.04 ms per loop (mean ± std. dev. of 7 runs, 10 loops each)\n" ], [ "from uvtools.dspec import dpss_operator", "_____no_output_____" ], [ "ugrid = np.arange(-40, 40, 0.499)\nfreqs = np.linspace(50e6, 250e6, 1024)\nr, _ = dpss_operator(ugrid, filter_centers=[0], filter_half_widths=[1], eigenval_cutoff=[1e-10])\nf, _ = dpss_operator(freqs, filter_centers=[0], filter_half_widths=[10e-9], eigenval_cutoff=[1e-10])", "_____no_output_____" ], [ "f.shape, r.shape", "_____no_output_____" ], [ "r.shape[1] ** 2 * f.shape[1] / 2", "_____no_output_____" ], [ "ugrid = np.arange(-40, 0, 0.499)\nfreqs = np.linspace(50e6, 250e6, 1024)\nr, _ = dpss_operator(ugrid, filter_centers=[0], filter_half_widths=[1], eigenval_cutoff=[1e-10])\nf, _ = dpss_operator(freqs, filter_centers=[0], filter_half_widths=[10e-9], eigenval_cutoff=[1e-10])", "_____no_output_____" ], [ "x1 = np.random.uniform(0, 1, (10, 100))\nx2 = np.random.uniform(0, 1, (5, 10, 100))", "_____no_output_____" ], [ "i1 = np.argmax(x1, axis=-1)\ni2 = np.argmax(x2, axis=-1)", "_____no_output_____" ], [ "ind1 = np.indices(i1.shape)\nind2 = np.indices(i2.shape)", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cb54ee6e7e06d88b96e4f63564c0a2d4dcc74690
45,176
ipynb
Jupyter Notebook
notebook/S03A_Scalars_Annotated.ipynb
mtldatascience/sta-663-2019
ce565879c3c22618db8d28e72daf08023b915a94
[ "BSD-3-Clause" ]
68
2019-01-09T21:53:55.000Z
2022-02-16T17:14:22.000Z
notebook/S03A_Scalars_Annotated.ipynb
mtldatascience/sta-663-2019
ce565879c3c22618db8d28e72daf08023b915a94
[ "BSD-3-Clause" ]
null
null
null
notebook/S03A_Scalars_Annotated.ipynb
mtldatascience/sta-663-2019
ce565879c3c22618db8d28e72daf08023b915a94
[ "BSD-3-Clause" ]
62
2019-01-09T21:43:48.000Z
2021-11-15T04:26:25.000Z
38.44766
22,708
0.721622
[ [ [ "# Scalars", "_____no_output_____" ] ], [ [ "%matplotlib inline", "_____no_output_____" ], [ "import numpy as np\nimport matplotlib.pyplot as plt", "_____no_output_____" ] ], [ [ "## Integers", "_____no_output_____" ], [ "### Binary representation of integers", "_____no_output_____" ] ], [ [ "format(16, '032b')", "_____no_output_____" ] ], [ [ "### Bit shifting", "_____no_output_____" ] ], [ [ "format(16 >> 2, '032b')", "_____no_output_____" ], [ "16 >> 2", "_____no_output_____" ], [ "format(16 << 2, '032b')", "_____no_output_____" ], [ "16 << 2", "_____no_output_____" ] ], [ [ "### Overflow\n\nIn general, the computer representation of integers has a limited range, and may overflow. The range depends on whether the integer is signed or unsigned.\n\nFor example, with 8 bits, we can represent at most $2^8 = 256$ integers.\n\n- 0 to 255 unsigned\n- -128 ti 127 signed", "_____no_output_____" ], [ "Signed integers", "_____no_output_____" ] ], [ [ "np.arange(130, dtype=np.int8)[-5:]", "_____no_output_____" ] ], [ [ "Unsigned integers", "_____no_output_____" ] ], [ [ "np.arange(130, dtype=np.uint8)[-5:]", "_____no_output_____" ], [ "np.arange(260, dtype=np.uint8)[-5:]", "_____no_output_____" ] ], [ [ "### Integer division\n\nIn Python 2 or other languages such as C/C++, be very careful when dividing as the division operator `/` performs integer division when both numerator and denominator are integers. This is rarely what you want. In Python 3 the `/` always performs floating point division, and you use `//` for integer division, removing a common source of bugs in numerical calculations.", "_____no_output_____" ] ], [ [ "%%python2\n\nimport numpy as np\n\nx = np.arange(10)\nprint(x/10)", "[0 0 0 0 0 0 0 0 0 0]\n" ] ], [ [ "Python 3 does the \"right\" thing.", "_____no_output_____" ] ], [ [ "x = np.arange(10)\nx/10", "_____no_output_____" ] ], [ [ "## Real numbers\n\nReal numbers are represented as **floating point** numbers. A floating point number is stored in 3 pieces (sign bit, exponent, mantissa) so that every float is represetned as get +/- mantissa ^ exponent. Because of this, the interval between consecutive numbers is smallest (high precison) for numebrs close to 0 and largest for numbers close to the lower and upper bounds.\n\nBecause exponents have to be singed to represent both small and large numbers, but it is more convenint to use unsigned numbers here, the exponnent has an offset (also knwnn as the exponentn bias). For example, if the expoennt is an unsigned 8-bit number, it can rerpesent the range (0, 255). By using an offset of 128, it will now represent the range (-127, 128).\n\n![float1](http://www.dspguide.com/graphics/F_4_2.gif)\n\n**Note**: Intervals between consecutive floating point numbers are not constant. In particular, the precision for small numbers is much larger than for large numbers. In fact, approximately half of all floating point numbers lie between -1 and 1 when using the `double` type in C/C++ (also the default for `numpy`).\n\n![float2](http://jasss.soc.surrey.ac.uk/9/4/4/fig1.jpg)\n\nBecause of this, if you are adding many numbers, it is more accurate to first add the small numbers before the large numbers.", "_____no_output_____" ], [ "#### IEEE 754 32-bit floating point representation\n\n![img](https://upload.wikimedia.org/wikipedia/commons/thumb/d/d2/Float_example.svg/590px-Float_example.svg.png)\n\nSee [Wikipedia](https://en.wikipedia.org/wiki/Single-precision_floating-point_format) for how this binary number is evaluated to 0.15625.", "_____no_output_____" ] ], [ [ "from ctypes import c_int, c_float", "_____no_output_____" ], [ "s = c_int.from_buffer(c_float(0.15625)).value", "_____no_output_____" ], [ "s = format(s, '032b')\ns", "_____no_output_____" ], [ "rep = {\n 'sign': s[:1], \n 'exponent' : s[1:9:], \n 'fraction' : s[9:]\n}\nrep", "_____no_output_____" ] ], [ [ "### Most base 10 real numbers are approximations\n\nThis is simply because numbers are stored in finite-precision binary format.", "_____no_output_____" ] ], [ [ "'%.20f' % (0.1 * 0.1 * 100)", "_____no_output_____" ] ], [ [ "### Never check for equality of floating point numbers", "_____no_output_____" ] ], [ [ "i = 0\nloops = 0\nwhile i != 1:\n i += 0.1 * 0.1\n loops += 1\n if loops == 1000000:\n break\ni", "_____no_output_____" ], [ "i = 0\nloops = 0\nwhile np.abs(1 - i) > 1e-6:\n i += 0.1 * 0.1\n loops += 1\n if loops == 1000000:\n break\ni", "_____no_output_____" ] ], [ [ "### Associative law does not necessarily hold", "_____no_output_____" ] ], [ [ "6.022e23 - 6.022e23 + 1", "_____no_output_____" ], [ "1 + 6.022e23 - 6.022e23", "_____no_output_____" ] ], [ [ "### Distributive law does not hold", "_____no_output_____" ] ], [ [ "a = np.exp(1)\nb = np.pi\nc = np.sin(1)", "_____no_output_____" ], [ "a*(b+c)", "_____no_output_____" ], [ "a*b + a*c", "_____no_output_____" ] ], [ [ "### Catastrophic cancellation", "_____no_output_____" ], [ "Consider calculating sample variance\n\n$$\ns^2= \\frac{1}{n(n-1)}\\sum_{i=1}^n x_i^2 - (\\sum_{i=1}^n x_i)^2\n$$\n\nBe careful whenever you calculate the difference of potentially big numbers.", "_____no_output_____" ] ], [ [ "def var(x):\n \"\"\"Returns variance of sample data using sum of squares formula.\"\"\"\n \n n = len(x)\n return (1.0/(n*(n-1))*(n*np.sum(x**2) - (np.sum(x))**2))", "_____no_output_____" ] ], [ [ "### Underflow", "_____no_output_____" ] ], [ [ "np.warnings.filterwarnings('ignore')", "_____no_output_____" ], [ "np.random.seed(4)\nxs = np.random.random(1000)\nys = np.random.random(1000)\nnp.prod(xs)/np.prod(ys)", "_____no_output_____" ] ], [ [ "#### Prevent underflow by staying in log space", "_____no_output_____" ] ], [ [ "x = np.sum(np.log(xs))\ny = np.sum(np.log(ys))\nnp.exp(x - y)", "_____no_output_____" ] ], [ [ "### Overflow", "_____no_output_____" ] ], [ [ "np.exp(1000)", "_____no_output_____" ] ], [ [ "### Numerically stable algorithms", "_____no_output_____" ], [ "#### What is the sample variance for numbers from a normal distribution with variance 1?", "_____no_output_____" ] ], [ [ "np.random.seed(15)\nx_ = np.random.normal(0, 1, int(1e6))\nx = 1e12 + x_\nvar(x)", "_____no_output_____" ] ], [ [ "#### Use functions from numerical libraries where available", "_____no_output_____" ] ], [ [ "np.var(x)", "_____no_output_____" ] ], [ [ "There is also a variance function in the standard library, but it is slower for large arrays.", "_____no_output_____" ] ], [ [ "import statistics\n\nstatistics.variance(x)", "_____no_output_____" ] ], [ [ "Note that `numpy` uses does not use the asymptotically unbiased estimator by default. If you want the unbiased variance, set `ddof` to 1.", "_____no_output_____" ] ], [ [ "np.var([1,2,3,4], ddof=1)", "_____no_output_____" ], [ "statistics.variance([1,2,3,4])", "_____no_output_____" ] ], [ [ "### Useful numerically stable functions ", "_____no_output_____" ], [ "Let's calculate\n\n$$\n\\log(e^{1000} + e^{1000})\n$$\n\nUsing basic algebra, we get the solution $\\log(2) + 1000$.\n\n\\begin{align}\n\\log(e^{1000} + e^{1000}) &= \\log(e^{0}e^{1000} + e^{0}e^{1000}) \\\\\n&= \\log(e^{100}(e^{0} + e^{0})) \\\\\n&= \\log(e^{1000}) + \\log(e^{0} + e^{0}) \\\\\n&= 1000 + \\log(2)\n\\end{align}", "_____no_output_____" ], [ "**logaddexp**", "_____no_output_____" ] ], [ [ "x = np.array([1000, 1000])\nnp.log(np.sum(np.exp(x)))", "_____no_output_____" ], [ "np.logaddexp(*x)", "_____no_output_____" ] ], [ [ "**logsumexp**\n\nThis function generalizes `logaddexp` to an arbitrary number of addends and is useful in a variety of statistical contexts.", "_____no_output_____" ], [ "Suppose we need to calculate a probability distribution $\\pi$ parameterized by a vector $x$\n\n$$\n\\pi_i = \\frac{e^{x_i}}{\\sum_{j=1}^n e^{x_j}}\n$$\n\nTaking logs, we get\n\n$$\n\\log(\\pi_i) = x_i - \\log{\\sum_{j=1}^n e^{x_j}}\n$$", "_____no_output_____" ] ], [ [ "x = 1e6*np.random.random(100)", "_____no_output_____" ], [ "np.log(np.sum(np.exp(x))) ", "_____no_output_____" ], [ "from scipy.special import logsumexp", "_____no_output_____" ], [ "logsumexp(x)", "_____no_output_____" ] ], [ [ "**logp1 and expm1**", "_____no_output_____" ] ], [ [ "np.exp(np.log(1 + 1e-6)) - 1", "_____no_output_____" ], [ "np.expm1(np.log1p(1e-6))", "_____no_output_____" ] ], [ [ "**sinc**", "_____no_output_____" ] ], [ [ "x = 1", "_____no_output_____" ], [ "np.sin(x)/x", "_____no_output_____" ], [ "np.sinc(x)", "_____no_output_____" ], [ "x = np.linspace(0.01, 2*np.pi, 100)", "_____no_output_____" ], [ "plt.plot(x, np.sinc(x), label='Library function')\nplt.plot(x, np.sin(x)/x, label='DIY function')\nplt.legend()\npass", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ] ]
cb54f23555682b3704c668e7454d8a4848a83e7d
409,078
ipynb
Jupyter Notebook
Lab1/crvargasmlecture1.ipynb
crvargasm/MetNumUN2021I
e8b56337482c51eb0ca397ebd173360979e9013c
[ "MIT" ]
1
2021-07-27T05:34:50.000Z
2021-07-27T05:34:50.000Z
Lab1/crvargasmlecture1.ipynb
crvargasm/MetNumUN2021I
e8b56337482c51eb0ca397ebd173360979e9013c
[ "MIT" ]
null
null
null
Lab1/crvargasmlecture1.ipynb
crvargasm/MetNumUN2021I
e8b56337482c51eb0ca397ebd173360979e9013c
[ "MIT" ]
null
null
null
198.774538
50,286
0.88655
[ [ [ "#Introduction to the Research Environment\n\nThe research environment is powered by IPython notebooks, which allow one to perform a great deal of data analysis and statistical validation. We'll demonstrate a few simple techniques here.", "_____no_output_____" ], [ "##Code Cells vs. Text Cells\n\nAs you can see, each cell can be either code or text. To select between them, choose from the 'Cell Type' dropdown menu on the top left.", "_____no_output_____" ], [ "###This is a test\r\nOh, so amazing\r\n\r\nIncluso se puede usar ${{LaTeX}}:$\r\n$$x=\\frac{-b \\pm \\sqrt{b^2 -4(a)(c)}}{2(a)}$$\r\n$$\\text{Incluso podemos escribir}$$", "_____no_output_____" ], [ "##Executing a Command\n\nA code cell will be evaluated when you press play, or when you press the shortcut, shift-enter. Evaluating a cell evaluates each line of code in sequence, and prints the results of the last line below the cell.", "_____no_output_____" ] ], [ [ "2 + 2", "_____no_output_____" ], [ "6 + 6", "_____no_output_____" ] ], [ [ "Sometimes there is no result to be printed, as is the case with assignment.", "_____no_output_____" ] ], [ [ "X = 2", "_____no_output_____" ], [ "W = 10", "_____no_output_____" ] ], [ [ "Remember that only the result from the last line is printed.", "_____no_output_____" ] ], [ [ "2 + 2\n3 + 3", "_____no_output_____" ], [ "6 + 6\r\n7 + 7", "_____no_output_____" ] ], [ [ "However, you can print whichever lines you want using the `print` statement.", "_____no_output_____" ] ], [ [ "print (2 + 2)\n3 + 3", "4\n" ], [ "print (4 + 4)\r\n5 + 5", "8\n" ] ], [ [ "##Knowing When a Cell is Running\n\nWhile a cell is running, a `[*]` will display on the left. When a cell has yet to be executed, `[ ]` will display. When it has been run, a number will display indicating the order in which it was run during the execution of the notebook `[5]`. Try on this cell and note it happening.", "_____no_output_____" ] ], [ [ "#Take some time to run something\nc = 0\nfor i in range(10000000):\n c = c + i\nc", "_____no_output_____" ], [ "c = 1\r\nfor i in range(10):\r\n c = c * (i+1)\r\nc", "_____no_output_____" ] ], [ [ "##Importing Libraries\n\nThe vast majority of the time, you'll want to use functions from pre-built libraries. You can't import every library on Quantopian due to security issues, but you can import most of the common scientific ones. Here I import numpy and pandas, the two most common and useful libraries in quant finance. I recommend copying this import statement to every new notebook.\n\nNotice that you can rename libraries to whatever you want after importing. The `as` statement allows this. Here we use `np` and `pd` as aliases for `numpy` and `pandas`. This is a very common aliasing and will be found in most code snippets around the web. The point behind this is to allow you to type fewer characters when you are frequently accessing these libraries.", "_____no_output_____" ] ], [ [ "import numpy as np\nimport pandas as pd\n\n# This is a plotting library for pretty pictures.\nimport matplotlib.pyplot as plt", "_____no_output_____" ], [ "import cython as cy\r\nimport pandas_datareader as pdr\r\nimport datetime\r\n\r\nimport xarray as xa", "_____no_output_____" ] ], [ [ "##Tab Autocomplete\n\nPressing tab will give you a list of IPython's best guesses for what you might want to type next. This is incredibly valuable and will save you a lot of time. If there is only one possible option for what you could type next, IPython will fill that in for you. Try pressing tab very frequently, it will seldom fill in anything you don't want, as if there is ambiguity a list will be shown. This is a great way to see what functions are available in a library.\n\nTry placing your cursor after the `.` and pressing tab.", "_____no_output_____" ] ], [ [ "np.random.normal ", "_____no_output_____" ], [ "np.random.binomial", "_____no_output_____" ] ], [ [ "##Getting Documentation Help\n\nPlacing a question mark after a function and executing that line of code will give you the documentation IPython has for that function. It's often best to do this in a new cell, as you avoid re-executing other code and running into bugs.", "_____no_output_____" ] ], [ [ "np.random.normal?", "_____no_output_____" ], [ "np.test?", "_____no_output_____" ] ], [ [ "##Sampling\n\nWe'll sample some random data using a function from `numpy`.", "_____no_output_____" ] ], [ [ "# Sample 100 points with a mean of 0 and an std of 1. This is a standard normal distribution.\nX = np.random.normal(0, 1, 100)\nprint(X)", "[ 1.33333797e-01 -1.19845820e+00 -5.31631881e-01 1.51438133e+00\n -3.00894118e-01 5.84090076e-01 1.48353538e-01 2.61105042e+00\n 4.94789391e-01 -7.62425174e-01 -5.96282324e-01 1.04560276e+00\n -2.96136243e-01 -1.04534993e+00 -4.57417445e-01 5.48735421e-01\n -1.63911700e+00 1.46149835e+00 1.53159855e+00 -1.78678816e-01\n -2.17217055e+00 9.20558096e-01 4.61187556e-01 2.02840890e-01\n 1.59240741e+00 -1.02979302e+00 -1.40578496e+00 3.66345173e-01\n 3.02499135e-01 -7.95855176e-01 -5.97303331e-03 -9.50036574e-01\n -6.55130376e-01 1.18962256e-01 1.35661874e+00 -1.74870862e+00\n 1.50627240e+00 1.77060758e-03 4.27289598e-01 1.79560265e+00\n -8.91127958e-01 -1.77928386e+00 -1.66588741e+00 -8.93963737e-01\n 1.06811187e+00 2.03375531e+00 -4.90987566e-01 1.81750004e-01\n -1.40729118e+00 4.89688758e-01 4.46563579e-01 3.59325689e-01\n -8.97762814e-01 -2.04190517e+00 8.32537410e-01 5.36486170e-01\n 4.45258647e-02 1.21034538e+00 -7.06366172e-01 -1.83979353e+00\n -1.36792573e+00 -1.23350388e+00 -1.67230350e-01 -2.38098935e+00\n -1.37111144e+00 1.58764264e+00 2.10710677e-01 1.04085043e+00\n -8.04840642e-01 -1.10012655e+00 4.25685755e-01 3.92136435e-01\n -7.26983422e-01 -1.13883388e+00 -2.24492205e+00 5.41081084e-02\n 2.26412905e-01 -9.85999165e-01 1.00534971e-01 2.05983520e+00\n -1.55180048e-01 -1.13302751e+00 1.28053462e+00 1.47828638e+00\n -2.27211068e-01 3.53311919e-01 4.40705783e-01 2.30809705e+00\n -1.59169693e-01 -6.36378648e-01 9.83194246e-01 -5.44847663e-01\n 2.57722658e-01 -2.62555910e+00 7.24949010e-01 -1.53128607e-01\n -8.44145363e-01 1.85652520e-01 -1.92323405e+00 -1.06202165e+00]\n" ], [ "W = np.random.lognormal(0,1,100)\r\nprint(W)", "[ 0.70979893 0.55469766 2.91417851 0.66848996 0.40893314 0.71667271\n 12.1521411 0.57140386 0.71406844 0.41033167 5.74758427 0.21459655\n 1.23436635 0.86430036 0.55094359 0.62821802 1.05735867 1.22938236\n 0.25978262 0.31778232 1.05416502 0.06377617 1.04146083 2.25078844\n 4.25660615 3.12769001 1.23029643 0.81750613 1.39944232 1.66525892\n 1.40931605 0.73918134 3.68986084 3.37496663 0.50075401 0.91808259\n 1.04213062 0.59117045 0.55703281 0.08060356 1.6429251 0.29399515\n 2.46217598 0.34301399 0.65711022 0.7782767 3.25354523 1.41735538\n 1.38530251 0.51640037 0.66131761 1.2975024 2.86202607 0.49724817\n 0.97907875 1.73350516 0.56260989 1.28722266 4.20935383 3.13482762\n 0.93764837 0.95141163 4.57956434 0.93020868 0.14746197 2.12226569\n 0.32111301 0.66386897 0.26540208 0.49450312 1.80782984 2.29327413\n 1.62502591 0.85471284 0.82583693 0.99511292 1.54233442 0.19137111\n 2.34798178 1.60106035 1.39783754 3.30882516 1.55657934 1.12419599\n 1.30094629 0.20036997 0.16365757 0.14624653 0.34125579 0.26272789\n 0.19258821 0.51012751 1.71993136 1.28942225 1.08951373 0.34408303\n 1.08697926 1.63253743 2.59927298 0.82355338]\n" ] ], [ [ "##Plotting\n\nWe can use the plotting library we imported as follows.", "_____no_output_____" ] ], [ [ "plt.plot(X)", "_____no_output_____" ], [ "plt.plot(W)", "_____no_output_____" ] ], [ [ "###Squelching Line Output\n\nYou might have noticed the annoying line of the form `[<matplotlib.lines.Line2D at 0x7f72fdbc1710>]` before the plots. This is because the `.plot` function actually produces output. Sometimes we wish not to display output, we can accomplish this with the semi-colon as follows.", "_____no_output_____" ] ], [ [ "plt.plot(X);", "_____no_output_____" ], [ "plt.plot(W);", "_____no_output_____" ] ], [ [ "###Adding Axis Labels\n\nNo self-respecting quant leaves a graph without labeled axes. Here are some commands to help with that.", "_____no_output_____" ] ], [ [ "X = np.random.normal(0, 1, 100)\nX2 = np.random.normal(0, 1, 100)\n\nplt.plot(X);\nplt.plot(X2);\nplt.xlabel('Time') # The data we generated is unitless, but don't forget units in general.\nplt.ylabel('Returns')\nplt.legend(['X', 'X2']);", "_____no_output_____" ], [ "W = np.random.lognormal(0, 1, 100)\r\nW2 = np.random.lognormal(0, 1, 100)\r\n\r\nplt.plot(W);\r\nplt.plot(W2);\r\nplt.xlabel('Time') # The data we generated is unitless, but don't forget units in general.\r\nplt.ylabel('Returns')\r\nplt.legend(['W', 'W2']);", "_____no_output_____" ] ], [ [ "##Generating Statistics\n\nLet's use `numpy` to take some simple statistics.", "_____no_output_____" ] ], [ [ "np.mean(X)", "_____no_output_____" ], [ "np.std(X)", "_____no_output_____" ], [ "np.mean(W)", "_____no_output_____" ], [ "np.std(W)", "_____no_output_____" ] ], [ [ "##Getting Real Pricing Data\n\nRandomly sampled data can be great for testing ideas, but let's get some real data. We can use `get_pricing` to do that. You can use the `?` syntax as discussed above to get more information on `get_pricing`'s arguments.", "_____no_output_____" ] ], [ [ "#No Funciona :c\r\n#get_pricing?\r\n#data = get_pricing('MSFT', start_date='2012-1-1', end_date='2015-6-1')", "_____no_output_____" ], [ "pdr.get_data_yahoo?\r\ndata = pdr.get_data_yahoo('MSFT', start=datetime.datetime(2020, 1, 1), \r\n end=datetime.datetime(2021,1,1))", "_____no_output_____" ], [ "pdr.get_data_yahoo?\r\nmi_ejemplo = pdr.get_data_yahoo('LNVGY', start=datetime.datetime(2020, 1, 1), \r\n end=datetime.datetime(2021,1,1))", "_____no_output_____" ] ], [ [ "Our data is now a dataframe. You can see the datetime index and the colums with different pricing data.", "_____no_output_____" ] ], [ [ "data", "_____no_output_____" ], [ "mi_ejemplo", "_____no_output_____" ] ], [ [ "This is a pandas dataframe, so we can index in to just get price like this. For more info on pandas, please [click here](http://pandas.pydata.org/pandas-docs/stable/10min.html).", "_____no_output_____" ] ], [ [ "X = data['Close']", "_____no_output_____" ], [ "Y= mi_ejemplo['Close']", "_____no_output_____" ] ], [ [ "Because there is now also date information in our data, we provide two series to `.plot`. `X.index` gives us the datetime index, and `X.values` gives us the pricing values. These are used as the X and Y coordinates to make a graph.", "_____no_output_____" ] ], [ [ "plt.plot(X.index, X.values)\nplt.ylabel('Price')\nplt.legend(['MSFT']);", "_____no_output_____" ], [ "plt.plot(X.index, X.values)\r\nplt.ylabel('Precio')\r\nplt.legend(['LNVGY']);", "_____no_output_____" ] ], [ [ "We can get statistics again on real data.", "_____no_output_____" ] ], [ [ "np.mean(X)", "_____no_output_____" ], [ "np.mean(Y)", "_____no_output_____" ], [ "np.std(X)", "_____no_output_____" ], [ "np.std(Y)", "_____no_output_____" ] ], [ [ "##Getting Returns from Prices\n\nWe can use the `pct_change` function to get returns. Notice how we drop the first element after doing this, as it will be `NaN` (nothing -> something results in a NaN percent change).", "_____no_output_____" ] ], [ [ "R = X.pct_change()[1:]", "_____no_output_____" ], [ "T = Y.pct_change()[1:]", "_____no_output_____" ] ], [ [ "We can plot the returns distribution as a histogram.", "_____no_output_____" ] ], [ [ "plt.hist(R, bins=20)\nplt.xlabel('Return')\nplt.ylabel('Frequency')\nplt.legend(['MSFT Returns']);", "_____no_output_____" ], [ "plt.hist(T, bins=20)\r\nplt.xlabel('Return')\r\nplt.ylabel('Frequency')\r\nplt.legend(['LNVGY Returns']);", "_____no_output_____" ] ], [ [ "Get statistics again.", "_____no_output_____" ] ], [ [ "np.mean(R)", "_____no_output_____" ], [ "np.mean(T)", "_____no_output_____" ], [ "np.std(R)", "_____no_output_____" ], [ "np.std(T)", "_____no_output_____" ] ], [ [ "Now let's go backwards and generate data out of a normal distribution using the statistics we estimated from Microsoft's returns. We'll see that we have good reason to suspect Microsoft's returns may not be normal, as the resulting normal distribution looks far different.", "_____no_output_____" ] ], [ [ "plt.hist(np.random.normal(np.mean(R), np.std(R), 10000), bins=20)\nplt.xlabel('Return')\nplt.ylabel('Frequency')\nplt.legend(['Normally Distributed Returns']);", "_____no_output_____" ], [ "plt.hist(np.random.normal(np.mean(T), np.std(T), 10000), bins=20)\r\nplt.xlabel('Return')\r\nplt.ylabel('Frequency')\r\nplt.legend(['Normally Distributed Returns']);", "_____no_output_____" ] ], [ [ "##Generating a Moving Average\n\n`pandas` has some nice tools to allow us to generate rolling statistics. Here's an example. Notice how there's no moving average for the first 60 days, as we don't have 60 days of data on which to generate the statistic.", "_____no_output_____" ] ], [ [ "##Rolling_mean ya se dejó de usar!!!!\n\n# Take the average of the last 60 days at each timepoint.\n#MAVG = pd.rolling_mean(X, window=60)\n#plt.plot(X.index, X.values)\n#plt.plot(MAVG.index, MAVG.values)\n#plt.ylabel('Price')\n#plt.legend(['MSFT', '60-day MAVG']);", "_____no_output_____" ], [ "MAVG = X.rolling(60).mean()\r\nplt.plot(X.index, X.values)\r\nplt.plot(MAVG.index, MAVG.values)\r\nplt.ylabel('Price')\r\nplt.legend(['MSFT', '60-day MAVG']);", "_____no_output_____" ], [ "SPRT = Y.rolling(60).mean()\r\nplt.plot(Y.index, Y.values)\r\nplt.plot(SPRT.index, SPRT.values)\r\nplt.ylabel('Price')\r\nplt.legend(['LNVGY', '60-day SPRT']);", "_____no_output_____" ] ], [ [ "This presentation is for informational purposes only and does not constitute an offer to sell, a solicitation to buy, or a recommendation for any security; nor does it constitute an offer to provide investment advisory or other services by Quantopian, Inc. (\"Quantopian\"). Nothing contained herein constitutes investment advice or offers any opinion with respect to the suitability of any security, and any views expressed herein should not be taken as advice to buy, sell, or hold any security or as an endorsement of any security or company. In preparing the information contained herein, Quantopian, Inc. has not taken into account the investment needs, objectives, and financial circumstances of any particular investor. Any views expressed and data illustrated herein were prepared based upon information, believed to be reliable, available to Quantopian, Inc. at the time of publication. Quantopian makes no guarantees as to their accuracy or completeness. All information is subject to change and may quickly become unreliable for various reasons, including changes in market conditions or economic circumstances.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ] ]
cb54fbf6a7ff086ee9fffc276c50fb73d0fe8807
106,447
ipynb
Jupyter Notebook
docs/tutorials/t1.ipynb
blakejohnson/qiskit-experiments
2ecffa8f7d6aa6e8e6c1fc0a1c30f7776c49c493
[ "Apache-2.0" ]
null
null
null
docs/tutorials/t1.ipynb
blakejohnson/qiskit-experiments
2ecffa8f7d6aa6e8e6c1fc0a1c30f7776c49c493
[ "Apache-2.0" ]
1
2021-07-21T19:53:15.000Z
2021-07-21T19:53:15.000Z
docs/tutorials/t1.ipynb
nkanazawa1989/qiskit-experiments
510b55541c1018e6411ed22f5268b7fcc1467514
[ "Apache-2.0" ]
null
null
null
417.439216
25,069
0.93734
[ [ [ "# Running T<sub>1</sub> Experiments with Qiskit", "_____no_output_____" ], [ "In a T<sub>1</sub> experiment, we measure an excited qubit after a delay. Due to decoherence processes (e.g. amplitude damping channel), it is possible that, at the time of measurement, after the delay, the qubit will not be excited anymore. The larger the delay time is, the more likely is the qubit to fall to the ground state. The goal of the experiment is to characterize the decay rate of the qubit towards the ground state.\n\nWe start by fixing a delay time $t$ and a number of shots $s$. Then, by repeating $s$ times the procedure of exciting the qubit, waiting, and measuring, we estimate the probability to measure $|1\\rangle$ after the delay. We repeat this process for a set of delay times, resulting in a set of probability estimates.\n\nIn the absence of state preparation and measurement errors, the probablity to measure |1> after time $t$ is $e^{-t/T_1}$, for a constant $T_1$ (the coherence time), which is our target number. Since state preparation and measurement errors do exist, the qubit's decay towards the ground state assumes the form $Ae^{-t/T_1} + B$, for parameters $A, T_1$, and $B$, which we deduce form the probability estimates. To this end, the T<sub>1</sub> experiment internally calls the `curve_fit` method of `scipy.optimize`.\n\nThe following code demonstrates a basic run of a T<sub>1</sub> experiment for qubit 0.", "_____no_output_____" ] ], [ [ "from qiskit_experiments.framework import ParallelExperiment\nfrom qiskit_experiments.library import T1\n\n# A T1 simulator\nfrom qiskit_experiments.test.t1_backend import T1Backend\n\n# Simulate T1 of 25 microseconds\nt1 = 25\nbackend = T1Backend(t1=[t1*1e-6])\n\n# Time intervals to wait before measurement\ndelays = list(range(1, 40, 3))\n\n# Create an experiment for qubit 0,\n# setting the unit to microseconds,\n# with the specified time intervals\nexp = T1(qubit=0, \n delays=delays,\n unit=\"us\")\n\n# Run the experiment circuits with 1000 shots each,\n# and analyze the result\nexp_data = exp.run(backend=backend,\n shots=1000)\n\n# Print the result\nres = exp_data.analysis_result(0)\nres", "_____no_output_____" ] ], [ [ "It is possible to override the default analysis options. In particular, be aware of the `t1_guess` and `t1_bounds` options. In the following snippet, we instruct to look for T<sub>1</sub> in the range between 3 to 10. Since T<sub>1</sub> is outside this range (equals 25 in the example), the analysis will fail.", "_____no_output_____" ] ], [ [ "exp.set_analysis_options(t1_bounds=[3, 10])\nfail_fit = exp.run(backend=backend,\n shots=1000)\n\nprint(fail_fit.analysis_result(0))", "\n- success: False\n- error_message: 'scipy.optimize.curve_fit failed with error: `x0` is infeasible.'\n" ], [ "# Return the default analysis option\nexp.set_analysis_options(t1_bounds=exp._default_analysis_options().get(\"t1_bounds\"))", "_____no_output_____" ] ], [ [ "You can combine a new experiment with an old one. This way, the T<sub>1</sub> estimate will be based on the data of both experiments, hence will be more accurate. This is done by setting the `experiment_data` parameter of `run` with the returned value of an earlier call to `run`:", "_____no_output_____" ] ], [ [ "# Run again and combine with an earlier run.\ncombined = exp.run(backend=backend,\n shots=1000,\n experiment_data=exp_data)\n\n# `combined` consists now of two analysis results:\n# - The result from the first execution of the experiment\n# - The result of the two first executions together\ncombined_analysis_result = combined.analysis_result(1)\nprint(\"T1:\", combined_analysis_result[\"value\"])\nprint(\"Error bar:\", combined_analysis_result[\"stderr\"])\n\n# Compare with the previous error bar:\nprint(\"Previous error bar:\", res[\"stderr\"])", "T1: 2.460747728506449e-05\nError bar: 1.3694285787020714e-06\nPrevious error bar: 1.803242292430317e-06\n" ] ], [ [ "To measure T1 of multiple qubits in the same experiment, we create a parallel experiment:", "_____no_output_____" ] ], [ [ "# A simulator where qubits 0 and 1 have T1 of 25 microseconds\nbackend = T1Backend(t1=[t1*1e-6, t1*1e-6])\n\n# An experiment for qubit 1\nexp_q1 = T1(qubit=1, \n delays=delays,\n unit=\"us\")\n\n# A parallel experiment\nparallel_exp = ParallelExperiment([exp, exp_q1])\nparallel_data = parallel_exp.run(backend=backend)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
cb55029a4d35aa8134692d9ee32d828d10fcbd8a
27,269
ipynb
Jupyter Notebook
CityBike2020.ipynb
Zone6Mars/tableau_city_bike_analysis
494d2765afeac22854f43f400058a4e8f1744e21
[ "ADSL" ]
null
null
null
CityBike2020.ipynb
Zone6Mars/tableau_city_bike_analysis
494d2765afeac22854f43f400058a4e8f1744e21
[ "ADSL" ]
null
null
null
CityBike2020.ipynb
Zone6Mars/tableau_city_bike_analysis
494d2765afeac22854f43f400058a4e8f1744e21
[ "ADSL" ]
null
null
null
35.231266
195
0.405369
[ [ [ "import pandas as pd\nimport numpy as np\nimport datetime as dt", "_____no_output_____" ], [ "# Files to load\njan2020 = \"Data/2020/JC-202001-citibike-tripdata.csv\"\nfeb2020 = \"Data/2020/JC-202002-citibike-tripdata.csv\"\nmar2020 = \"Data/2020/JC-202003-citibike-tripdata.csv\"\napr2020 = \"Data/2020/JC-202004-citibike-tripdata.csv\"\nmay2020 = \"Data/2020/JC-202005-citibike-tripdata.csv\"\njun2020 = \"Data/2020/JC-202006-citibike-tripdata.csv\"\njul2020 = \"Data/2020/JC-202007-citibike-tripdata.csv\"\naug2020 = \"Data/2020/JC-202008-citibike-tripdata.csv\"\nsep2020 = \"Data/2020/JC-202009-citibike-tripdata.csv\"\noct2020 = \"Data/2020/JC-202010-citibike-tripdata.csv\"\nnov2020 = \"Data/2020/JC-202011-citibike-tripdata.csv\"\ndec2020 = \"Data/2020/JC-202012-citibike-tripdata.csv\"", "_____no_output_____" ], [ "# read csv files\n\njan2020_df = pd.read_csv(jan2020)\nfeb2020_df = pd.read_csv(feb2020)\nmar2020_df = pd.read_csv(mar2020)\napr2020_df = pd.read_csv(apr2020)\nmay2020_df = pd.read_csv(may2020)\njun2020_df = pd.read_csv(jun2020)\njul2020_df = pd.read_csv(jul2020)\naug2020_df = pd.read_csv(aug2020)\nsep2020_df = pd.read_csv(sep2020)\noct2020_df = pd.read_csv(oct2020)\nnov2020_df = pd.read_csv(nov2020)\ndec2020_df = pd.read_csv(dec2020)\n", "_____no_output_____" ], [ "# combine 2020 dataframes into a single datafram \n\njanthrudec_df = jan2020_df.append([feb2020_df, mar2020_df, apr2020_df, may2020_df, jun2020_df, jul2020_df, aug2020_df, sep2020_df, oct2020_df, nov2020_df, dec2020_df], ignore_index=True)\njanthrudec_df.head() ", "_____no_output_____" ], [ "janthrudec_df.count()", "_____no_output_____" ], [ "BikeData_2020_df = janthrudec_df\nBikeData_2020_df.head()", "_____no_output_____" ], [ "# Convert All DateTimes to \"%Y-%m-%d %H:%M:%S\" Format\nBikeData_2020_df[\"starttime\"] = pd.to_datetime(BikeData_2020_df[\"starttime\"])\nBikeData_2020_df[\"stoptime\"] = pd.to_datetime(BikeData_2020_df[\"stoptime\"])", "_____no_output_____" ], [ "BikeData_2020_df.head()", "_____no_output_____" ], [ "# Dropped these columns because they were not included in the most recent data in 2021\n\nClean_BikeData_2020_df = BikeData_2020_df.drop(columns=['birth year', 'gender', 'bikeid','start station id', 'end station id'])", "_____no_output_____" ], [ "# made all usertypes the same binary options because the data changd in 2021 removing some fields\n# and changing the values of some existing fields\n\nClean_BikeData_2020_df['usertype'].replace('Subscriber','member',inplace = True)", "_____no_output_____" ], [ "Clean_BikeData_2020_df['usertype'].replace('Customer','casual',inplace = True)", "_____no_output_____" ], [ "Clean_BikeData_2020_df.count()", "_____no_output_____" ], [ "# Export dataframe to csv for analysisClean_BikeData_2020_df['tripduration'].max()\n\nClean_BikeData_2020_df.to_csv(\"Source/Clean_BikeData_2020_df.csv\", index=False, header=True)", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cb55153daf3c53b9146817e1f7cf189b4d98d642
53,329
ipynb
Jupyter Notebook
pose_estimatition_updated.ipynb
rmulton/dl_project
755925164b96124155071c6211127cc93d8095f7
[ "MIT" ]
null
null
null
pose_estimatition_updated.ipynb
rmulton/dl_project
755925164b96124155071c6211127cc93d8095f7
[ "MIT" ]
null
null
null
pose_estimatition_updated.ipynb
rmulton/dl_project
755925164b96124155071c6211127cc93d8095f7
[ "MIT" ]
null
null
null
57.036364
2,109
0.58306
[ [ [ "import os\nfrom pycocotools.coco import COCO\nimport numpy as np\nimport torch.utils.data as data\nimport torch\nfrom heatmap import heatmaps_from_keypoints\nfrom imageio import imread\nfrom skimage.transform import resize\nimport numpy as np\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.utils.model_zoo as model_zoo\nfrom torch.nn import init\nfrom torch.autograd.variable import Variable\nimport matplotlib.pyplot as plt\nimport pickle", "_____no_output_____" ], [ "MAIN_FOLDER = \"/Volumes/TOSHIBA EXT/data/\"\nIMAGES_FOLDER = os.path.join(MAIN_FOLDER, \"train2017\")\nIMAGES_FOLDER_TEST = os.path.join(MAIN_FOLDER, \"val2017\")\nANNOTATION_FILE = os.path.join(MAIN_FOLDER, \"annotations/person_keypoints_train2017.json\")\nANNOTATION_FILE_TEST = os.path.join(MAIN_FOLDER, \"annotations/person_keypoints_val2017.json\")\nCHECKPOINTS_FOLDER = \"./cktp/\"", "_____no_output_____" ] ], [ [ "### Heatmap", "_____no_output_____" ] ], [ [ "def gaussian_heatmap(shape, keypoint_coordinates, std = 1.5):\n \"\"\"\n Computes a square gaussian kernel\n\n :param shape: Shape of the output heatmap\n :param keypoint_coordinates: Location of the keypoint\n :param std: Standard deviation\n\n :return: Heatmap of shape (1,shape,shape)\n \"\"\"\n \n # Get the coordinates\n x = keypoint_coordinates[0]\n y = keypoint_coordinates[1]\n \n a = np.arange(0, shape, 1, float)\n b = a[:,np.newaxis]\n\n # Generate the heatmap\n heatmap_raw = np.exp(-(((a-x)**2)/(2*std**2) + ((b-y)**2)/(2*std**2)))\n \n # Normalize\n heatmap_max = np.amax(heatmap_raw)\n heatmap_normalized = heatmap_raw/heatmap_max\n \n # Get it in the accurate format\n heatmap = np.expand_dims(heatmap_raw, axis=0)\n return heatmap\n\ndef gaussian_heatmaps(xs, ys, vs, shape=32, image_height=512, image_width=640, std=1.):\n \"\"\"\n Computes heatmaps from the keypoints\n :param xs: Array of x coordinates for the keypoints\n :param ys: Array of y coordinates for the keypoints\n :param shape: shape of the heatmaps\n :param image_height: Height of the images the keypoints are for\n :param image_width: Width of the images the keypoints are for\n :param std: Standard deviation of the gaussion function used\n \n :return: Heatmaps as numpy arrays of shape (shape, shape, n_keypoints)\n \"\"\"\n \n # Rescale keypoints coordinates to the heatmaps scale\n # ys\n height_scale = shape/image_height\n ys = ys*height_scale\n # xs\n width_scale = shape/image_width\n xs = xs*width_scale\n \n \n # Render a heatmap for each joint\n heatmaps = gaussian_heatmap(shape, (xs[0],ys[0]))\n for i, v in enumerate(vs):\n if i!=0:\n # If the joint is visible, generate a heatmaps\n if v!=0:\n new_heatmap = gaussian_heatmap(shape, (xs[i],ys[i]))\n # Otherwise the heatmaps is composed of zeros\n else:\n new_heatmap = np.zeros((1, shape, shape))\n heatmaps = np.append(heatmaps, new_heatmap, axis=0)\n\n return heatmaps\n\ndef keypoints_from_heatmap(heatmap):\n \"\"\"Get the coordinates of the max value heatmap - it is the keypoint\"\"\"\n max_heatmap = np.amax(heatmap)\n keypoints = np.where(heatmap == max_heatmap)\n if len(keypoints) == 2:\n return keypoints[1][0], keypoints[0][0], max_heatmap\n \n elif len(keypoints) == 3:\n return keypoints[2][0], keypoints[1][0], max_heatmap\n\ndef keypoints_from_heatmaps(heatmaps, shape=32, image_height=512, image_width=640):\n \"\"\"Get the coordinates of the keypoints from the 17 heatmaps\"\"\"\n keypoints = []\n for i, heatmap in enumerate(heatmaps):\n x, y, max_heatmap = keypoints_from_heatmap(heatmap)\n if max_heatmap == 0:\n keypoints += [0,0,0]\n else:\n x = x*image_width/shape\n y = y*image_height/shape\n keypoints += [x,y,2]\n return keypoints\n\ndef get_xs_ys_vs(keypoints):\n \"\"\" Splits MSCOCO keypoints notations from [x0, y0, v0, ...] to [x0, ...], [y0, ...] and [v0, ...] \"\"\"\n keypoints_array = np.asarray(keypoints)\n xs = np.take(keypoints_array, [3*i for i in range(17)])\n ys = np.take(keypoints_array, [3*i+1 for i in range(17)])\n vs = np.take(keypoints_array, [3*i+2 for i in range(17)])\n return xs, ys, vs\n\ndef heatmaps_from_keypoints(keypoints):\n xs, ys, vs = get_xs_ys_vs(keypoints)\n heatmaps = gaussian_heatmaps(xs, ys, vs)\n return heatmaps", "_____no_output_____" ] ], [ [ "### Dataset", "_____no_output_____" ] ], [ [ "class MSCOCO(data.Dataset):\n \"\"\" Represents a MSCOCO Keypoints dataset \"\"\"\n \n def __init__(self, images_folder, annotations_json, train=False, evalu=False, input_type=0):\n \"\"\" Instantiate a MSCOCO dataset \"\"\"\n super().__init__()\n \n self.images_folder = images_folder\n #Input type indicates if the input is the original image or a combination of original image with filtered image\n #O : original image\n #1 : original image + skin filtered \n #2 : original image + edge filter \n #3 : original image + clustering filter \n #4 : orignal image + skin filter + edge filter\n #5 : orignal image + skin filter + clustering filter\n self.input_type = input_type\n \n # Load the annotations\n self.annotations = COCO(annotations_json)\n imgs_id = self.annotations.getImgIds()\n if train:\n self.img_ids = imgs_id[:int(len(imgs_id)*2/3)]\n \n elif evalu:\n self.img_ids = imgs_id[int(len(imgs_id)*2/3)+1:]\n \n else:\n self.img_ids = imgs_id \n \n def __len__(self):\n return len(self.img_ids)\n \n def __getitem__(self, index):\n \"\"\" Returns the index-th image with keypoints annotations, both as tensors \"\"\"\n \n try:\n #L is the list of the input's path for a single image\n L = []\n input_imgs = []\n\n # Get the image informations\n img_id = self.img_ids[index]\n img = self.annotations.loadImgs(img_id)[0]\n \n # Load the image from the file\n img_path = os.path.join(self.images_folder, img['file_name'])\n L.append(img_path)\n \n #Need to adapt it depending on the path of the filtered image\n if self.input_type == 1 or self.input_type == 4 or self.input_type == 5:\n L.append(img_path) #Need to change with skin filtered image\n if self.input_type == 2 or self.input_type == 4:\n L.append(img_path) #Need to change with edge filtered image\n if self.input_type == 3 or self.input_type == 5:\n L.append(img_path) #Need to change with clustering filtered image\n \n for image in L:\n img_array = load_image(image)\n img_array = MSCOCO.transformGreyImage(img_array)\n img_tensor = torch.from_numpy(img_array)\n img_tensor = img_tensor.float() # Pytorch needs a float tensor\n input_imgs.append(img_tensor)\n \n # Get the keypoints\n annIds = self.annotations.getAnnIds(imgIds=img['id'])\n anns = self.annotations.loadAnns(annIds)\n # Some images do not contain any coco object, so anns = []\n if len(anns)>0:\n keypoints = anns[0]['keypoints'] # anns is a list with only one element\n else:\n # keypoints are not visible so \n keypoints = [0 for i in range(3*17)]\n \n # Check to avoid errors\n if len(keypoints)!=3*17:\n print('Warning: Keypoints list for image {} has length {} instead of 17'.format(img_id, len(keypoints)))\n \n # Generate the heatmaps\n heatmaps_array = heatmaps_from_keypoints(keypoints)\n \n #img_tensor_input = torch.cat((img_tensor,img_tensor_filtered),0)\n keypoints_tensor = torch.from_numpy(heatmaps_array).float() # Pytorch needs a float tensor\n img_tensor = torch.cat(input_imgs,0)\n \n return img_tensor, keypoints_tensor\n\n except:\n #L is the list of the input's path for a single image\n L = []\n input_imgs = []\n\n # Get the image informations\n img_id = 391895\n img = self.annotations.loadImgs(img_id)[0]\n \n # Load the image from the file\n img_path = os.path.join(self.images_folder, img['file_name'])\n L.append(img_path)\n \n #Need to adapt it depending on the path of the filtered image\n if self.input_type == 1 or self.input_type == 4 or self.input_type == 5:\n L.append(img_path) #Need to change with skin filtered image\n if self.input_type == 2 or self.input_type == 4:\n L.append(img_path) #Need to change with edge filtered image\n if self.input_type == 3 or self.input_type == 5:\n L.append(img_path) #Need to change with clustering filtered image\n \n for image in L:\n img_array = load_image(image)\n img_array = MSCOCO.transformGreyImage(img_array)\n img_tensor = torch.from_numpy(img_array)\n img_tensor = img_tensor.float() # Pytorch needs a float tensor\n input_imgs.append(img_tensor)\n \n # Get the keypoints\n annIds = self.annotations.getAnnIds(imgIds=img['id'])\n anns = self.annotations.loadAnns(annIds)\n # Some images do not contain any coco object, so anns = []\n if len(anns)>0:\n keypoints = anns[0]['keypoints'] # anns is a list with only one element\n else:\n # keypoints are not visible so \n keypoints = [0 for i in range(3*17)]\n \n # Check to avoid errors\n if len(keypoints)!=3*17:\n print('Warning: Keypoints list for image {} has length {} instead of 17'.format(img_id, len(keypoints)))\n \n # Generate the heatmaps\n heatmaps_array = heatmaps_from_keypoints(keypoints)\n \n #img_tensor_input = torch.cat((img_tensor,img_tensor_filtered),0)\n keypoints_tensor = torch.from_numpy(heatmaps_array).float() # Pytorch needs a float tensor\n img_tensor = torch.cat(input_imgs,0)\n \n return img_tensor, keypoints_tensor \n\n @staticmethod\n def transformGreyImage(img_array):\n # Black and white images\n if len(img_array.shape)==2:\n # Add a channel axis\n img_array = np.expand_dims(img_array, axis=2)\n # Fill all the axes with the black&white image\n img_array = np.concatenate((img_array, img_array, img_array), axis=2)\n img_array = np.transpose(img_array, (2,1,0))\n return img_array\n\n\n# Homemade image loader\ndef load_image(image_path):\n image = imread(image_path)\n image = resize(image, (256, 256))\n return image", "_____no_output_____" ] ], [ [ "### Model", "_____no_output_____" ] ], [ [ "class ConvRelu(nn.Module):\n def __init__(self, in_channels, out_channels, kernel_size, training=True, padding=1, stride=1):\n super().__init__()\n self.conv = nn.Conv2d(in_channels,\n out_channels,\n kernel_size,\n padding=padding,\n stride=stride)\n\n self.relu = nn.ReLU()\n self.batch_norm = nn.BatchNorm2d(out_channels)\n self.training = training\n\n def forward(self, x):\n x = self.relu(self.conv(x))\n if self.training:\n x = self.batch_norm(x)\n return x\n\n\nclass Model(nn.Module):\n def __init__(self, input_type=0):\n super().__init__()\n self.pool = nn.MaxPool2d(2)\n \n #1 image\n if input_type == 0:\n input_size = 3\n \n #2 images\n elif input_type == 1 or input_type == 2 or input_type == 3:\n input_size = 6\n \n #3 images\n elif input_type == 4 or input_type == 5:\n input_size = 9\n self.feature_extraction = nn.Sequential(\n ConvRelu(input_size, 64, 3),\n ConvRelu(64, 64, 3),\n self.pool,\n ConvRelu(64, 128, 3),\n #ConvRelu(128, 128, 3),\n self.pool,\n ConvRelu(128, 128, 3),\n #ConvRelu(128, 128, 3),\n self.pool,\n ConvRelu(128, 512, 3),\n #ConvRelu(512, 512, 3),\n )\n \n self.features_to_heatmaps = nn.Conv2d(512, 17, 1) # 17 kind of joints, 17 heatmaps\n\n def forward(self, x):\n x = self.feature_extraction(x)\n heatmaps = self.features_to_heatmaps(x)\n return heatmaps\n\ndef plotKeypointsOverOutputModel(index,dataset,model,img_folder):\n \"\"\"Forward a img to the model and display the output keypoints over the image.\n It enables us to see the loss evolution over the model visually over the image\n index is the index of the img in the dataset argument\"\"\"\n # Get an image\n imgId = dataset.img_ids[index]\n img, keypoints = dataset[index]\n\n # Transform into a pytorch model input and Forward pass \n y = model(Variable(img.unsqueeze(0)))\n\n #Get the coordinates of the keypoints\n keypoints = keypoints_from_heatmaps(y[0].data.numpy())\n\n # Plot the image\n img_anno = dataset.annotations.loadImgs(imgId)[0]\n img_path = os.path.join(img_folder, img_anno['file_name'])\n img_array = load_image(img_path)\n img_array_resized = resize(img_array, (512, 640))\n plt.figure()\n plt.title('Original image')\n plt.imshow(img_array_resized)\n xs,ys,vs = get_xs_ys_vs(keypoints)\n plt.plot(xs,ys,'ro',color='c')\n plt.show()", "_____no_output_____" ] ], [ [ "### Configuration of the training", "_____no_output_____" ] ], [ [ "def conf_training(resuming=False, input_type=0, *args):\n \"\"\"Function that initiates the configuration of the model depending if a last model\n is loaded or if it's the beginning of a new model\"\"\"\n \n #Data\n trainset = MSCOCO(IMAGES_FOLDER, ANNOTATION_FILE, train=True, input_type=input_type)\n evalset = MSCOCO(IMAGES_FOLDER, ANNOTATION_FILE, evalu=True, input_type=input_type)\n\n # Loss\n criterion = nn.MSELoss()\n #criterion = nn.CrossEntropyLoss()\n \n # Number of epochs\n epochs = 10\n\n # Batch sizes\n batch_size_train = 1\n batch_size_val = 1\n \n if not resuming:\n # Model\n net = Model(input_type=input_type)\n\n # Optimizer\n optimizer = torch.optim.Adam(net.parameters())\n \n #First epoch\n current_epoch = -1\n \n else:\n #Load the last saved model with its configurations\n checkpoint = torch.load(os.path.join(MAIN_FOLDER,\"model_\"+args[0]))\n \n #Model\n net = Model(input_type=input_type)\n net.load_state_dict(checkpoint['state_dict'])\n \n #Current_epoch\n current_epoch = checkpoint['epoch']\n \n #Optimizer\n optimizer = torch.optim.Adam(net.parameters())\n \n #Data loaders\n trainloader = torch.utils.data.DataLoader(trainset,\n batch_size=batch_size_train,\n shuffle=True,\n num_workers=4\n )\n\n evaloader = torch.utils.data.DataLoader(evalset,\n batch_size=batch_size_val,\n shuffle=True,\n num_workers=4\n )\n \n evalset_length = len(evalset)\n \n return epochs, trainloader, evaloader, optimizer, net, current_epoch, criterion, evalset_length, evalset", "_____no_output_____" ] ], [ [ "### Running the model", "_____no_output_____" ] ], [ [ "def training(epochs, trainloader, evaloader, optimizer, net, current_epoch, criterion, evalset_length, evalset):\n plt.ion()\n if current_epoch == -1:\n #If not resuming a model, creating the loss file\n lossFile = open(os.path.join(MAIN_FOLDER,\"loss\"),'wb')\n pickle.dump({\"loss_train\":{}, \"loss_val\":{}},lossFile)\n lossFile.close()\n \n start_epoch = current_epoch + 1\n for epoch in range(start_epoch, epochs): # loop over the dataset multiple times\n print(\"Epoch number {}\".format(epoch))\n #plotKeypointsOverOutputModel(0,evalset,net,IMAGES_FOLDER)#Displaying the result over the first element of the evalset\n running_loss = 0.0\n\n #For each epoch, we keep the loss under a dictionnary with epoch_nb as key and list of loss as value\n lossFile = open(os.path.join(MAIN_FOLDER,\"loss\"),'rb')\n loss_dic = pickle.load(lossFile)\n lossFile.close()\n lossFile = open(os.path.join(MAIN_FOLDER,\"loss\"),'wb')\n loss_dic['loss_train'][epoch] = []\n loss_dic['loss_val'][epoch] = []\n pickle.dump(loss_dic,lossFile)\n lossFile.close()\n\n for i, data in enumerate(trainloader, 0):\n print(\"Batch number {}\".format(i))\n # get the inputs\n inputs, labels = data\n\n # wrap them in Variable\n inputs, labels = Variable(inputs), Variable(labels)\n\n # zero the parameter gradients\n optimizer.zero_grad()\n\n # forward + backward + optimize\n outputs = net(inputs)\n loss = criterion(outputs, labels)\n loss.backward()\n optimizer.step()\n\n # print statistics\n running_loss += loss.data[0]\n if i % 2000 == 1999: # print every 2000 mini-batches\n print('Trainset loss[%d, %5d] loss: %.3f' %\n (epoch + 1, i + 1, running_loss / 2000))\n running_loss = 0.0\n \n #Save the loss_train in disk for each batch\n lossFile = open(os.path.join(MAIN_FOLDER,\"loss\"),'rb') \n loss_dic = pickle.load(lossFile)\n lossFile.close()\n lossFile = open(os.path.join(MAIN_FOLDER,\"loss\"),'wb')\n loss_dic['loss_train'][epoch] += [loss.data[0]]\n pickle.dump(loss_dic,lossFile)\n lossFile.close()\n \n #Save the model\n #net.cpu()\n state = {\n 'epoch': epoch,\n 'state_dict': net.state_dict()\n }\n torch.save(state, os.path.join(MAIN_FOLDER,\"model_\"+str(epoch))) #Save the torch model after each epoch\n \n #net.cuda()\n running_loss_eval = 0.0\n print(\"Starting Eval for Epoch {}\".format(epoch))\n for i, data in enumerate(evaloader, 0):\n # get the inputs\n inputs, labels = data\n\n # wrap them in Variable\n inputs, labels = Variable(inputs), Variable(labels)\n\n # forward \n outputs = net(inputs)\n loss = criterion(outputs, labels)\n\n # print statistics\n running_loss_eval += loss.data[0]\n\n #Save the loss_val in disk for each batch\n lossFile = open(os.path.join(MAIN_FOLDER,\"loss\"),'rb') \n loss_dic = pickle.load(lossFile)\n lossFile.close()\n lossFile = open(os.path.join(MAIN_FOLDER,\"loss\"),'wb') \n loss_dic['loss_val'][epoch] += [loss.data[0]]\n pickle.dump(loss_dic,lossFile)\n lossFile.close()\n\n print(\"Evalset Loss for Epoch {0} : {1}\".format(epoch,running_loss_eval/evalset_length))\n #loss_val[epoch] += [running_loss_eval/evalset_length] #Stock the loss on evalset for each epoch\n \n \n\n print('Finished Training')\n\ndef launch_training(resuming=False, input_type=0, *args):\n \"\"\"Function that configurates the model from init or a last model ; and then it trains the model\"\"\"\n epochs, trainloader, evaloader, optimizer, net, current_epoch, criterion, evalset_length, evalset = conf_training(resuming=resuming,input_type=input_type, *args)\n training(epochs, trainloader, evaloader, optimizer, net, current_epoch, criterion, evalset_length, evalset)\n\ndef launch_testing(model_epoch, input_type=0):\n \"\"\"Function that launches a model over the test dataset\"\"\"\n testset = MSCOCO(IMAGES_FOLDER_TEST, ANNOTATION_FILE_TEST,input_type=input_type)\n\n #Load the training model\n checkpoint = torch.load(os.path.join(MAIN_FOLDER, model_epoch))\n net = Model(input_type=input_type)\n net.load_state_dict(checkpoint['state_dict'])\n\n # Loss\n criterion = nn.MSELoss()\n\n # Batch sizes\n batch_size_test = 1\n\n #TestLoader\n evaloader = torch.utils.data.DataLoader(testset,\n batch_size=batch_size_test,\n shuffle=True,\n num_workers=4\n )\n\n loss_test = 0.0\n for i, data in enumerate(evaloader):\n inputs, labels = data[0], data[1]\n inputs, labels = Variable(inputs), Variable(labels)\n outputs = net(inputs)\n loss = criterion(y, outputs)\n loss_test += loss.data[0]\n if i % 500 ==0:\n print(\"Current loss over the test dataset: {0} after {1}ème iteration\".format(loss_test/(i+1),i+1))\n\n loss_test = loss_test/len(testset)\n print(\"Average loss over the test dataset: {}\".format(loss_test))", "_____no_output_____" ], [ "#Launch a training over a new model with inputSize = 0\nlaunch_training(False,0)", "loading annotations into memory...\nDone (t=21.31s)\ncreating index...\nindex created!\nloading annotations into memory...\nDone (t=38.47s)\ncreating index...\nindex created!\nEpoch number 0\n" ], [ "#Launch a training over a model currently trained with inputSize = 0\n#launch_training(True,0,path_model)", "_____no_output_____" ], [ "#Launch a trained model over the test dataset, with inputSize = 0\n#launch_testing(path_model,0)", "_____no_output_____" ], [ "%cd cocoapi\n!ls", "/Users/alexandresioufi/Documents/Projets infos/deeplearning/dl_project/cocoapi\n\u001b[34mLuaAPI\u001b[m\u001b[m \u001b[34mPythonAPI\u001b[m\u001b[m \u001b[34mcommon\u001b[m\u001b[m \u001b[34mresults\u001b[m\u001b[m\r\n\u001b[34mMatlabAPI\u001b[m\u001b[m README.txt license.txt\r\n" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ] ]
cb5515b70d41c374c42284826b4d302a43bd8534
52,166
ipynb
Jupyter Notebook
5/rode.ipynb
exucutional/study_comp_math
c73b6f00e86b2d19c4cb81c377ca4706d70b2831
[ "MIT" ]
null
null
null
5/rode.ipynb
exucutional/study_comp_math
c73b6f00e86b2d19c4cb81c377ca4706d70b2831
[ "MIT" ]
null
null
null
5/rode.ipynb
exucutional/study_comp_math
c73b6f00e86b2d19c4cb81c377ca4706d70b2831
[ "MIT" ]
null
null
null
199.10687
15,348
0.891845
[ [ [ "# Lab 05\n\n## Solving a rigid system of differential equations\n\n### Konks Eric, Б01-818\n\nX.9.7", "_____no_output_____" ], [ "$$y_1'=-0.04y_1+10^4y_2y_3$$", "_____no_output_____" ], [ "$$y_2'=0.04y_1-10^4y_2y_3-3*10^7y_2^2$$", "_____no_output_____" ], [ "$$y_3'=3*10^7y_2^2$$", "_____no_output_____" ], [ "$$y_1(0)=1,\\ y_2(0)=0,\\ y_3(0)=0$$", "_____no_output_____" ] ], [ [ "import unittest\nimport logging\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt", "_____no_output_____" ], [ "#logging.basicConfig(level=logging.DEBUG)", "_____no_output_____" ], [ "class RODE:\n def __init__(self):\n self.log = logging.getLogger(\"RODE\")\n \n def k_calc_stop(self, k_cur, k_next, delta):\n if id(k_cur) == id(k_next):\n return False\n \n if np.abs(np.linalg.norm(np.matrix(k_cur)) - np.linalg.norm(np.matrix(k_next))) < delta:\n return True\n \n return False\n \n def k_calc(self, stages, c_vec, b_vec, a, f_vec, u_res, h, t_res, delta):\n k_next = [[0 for _ in range(stages)] for _ in range(len(f_vec))]\n k_cur = k_next\n \n itr = 0\n while not self.k_calc_stop(k_cur, k_next, delta):\n k_tmp = k_next\n k_next = [k_cur[i][:] for i in range(len(k_cur))]\n k_cur = k_tmp\n for s in range(stages):\n u_k = [u_res[-1][j]+h*sum(a[s][m]*k_cur[j][m] for m in range(s)) for j in range(len(f_vec))]\n self.log.debug(f\"Iter[{itr}]|S[{s}]: u_k: {u_k}\")\n for i in range(len(f_vec)):\n k_next[i][s] = f_vec[i](t_res[-1]+c_vec[s]*h, u_k)\n\n self.log.debug(f\"Iter[{itr}]]: k: {k_next}\")\n \n itr = itr + 1\n\n return k_next\n \n def solve(self, stages, c_vec, b_vec, a, f_vec, u_init, h, t_range, delta):\n u_res = [u_init,]\n t_res = [t_range[0],]\n while t_res[-1] < t_range[1]:\n u_cur = [0 for _ in range(len(f_vec))]\n k = self.k_calc(stages, c_vec, b_vec, a, f_vec, u_res, h, t_res, delta)\n for i in range(len(f_vec)):\n u_cur[i] = u_res[-1][i]+h*sum(b_vec[s]*k[i][s] for s in range(stages))\n \n self.log.debug(f\"T[{t_res[-1]}]: k: {k}\")\n self.log.debug(f\"T[{t_res[-1]}]: u: {u_cur}\")\n u_res.append(u_cur)\n t_res.append(t_res[-1]+h)\n \n return (t_res, u_res)", "_____no_output_____" ], [ "log = logging.getLogger()\nc_vec = [1/2-np.sqrt(15)/10, 1/2, 1/2+np.sqrt(15)/10]\nb_vec = [5/18, 4/9, 5/18]\na = [[5/36,2/9-np.sqrt(15)/15,5/36-np.sqrt(15)/30],\n [5/36+np.sqrt(15)/24,2/9,5/36-np.sqrt(15)/24],\n [5/36+np.sqrt(15)/30,2/9+np.sqrt(15)/15,5/36]]\n#c_vec = [1/3, 1]\n#b_vec = [3/4, 1/4]\n#a = [[5/12, -1/12], [3/4, 1/4]]\nlog.debug(f\"c={c_vec}\")\nlog.debug(f\"b={b_vec}\")\nlog.debug(f\"a={a}\")\nu_init = [1, 0, 0]\nt_range = (0, 40)\ndelta = 10e-6\nh = 0.001\nf1 = lambda t, u_vec: -0.04*u_vec[0]+10**4*u_vec[1]*u_vec[2]\nf2 = lambda t, u_vec: 0.04*u_vec[0]-10**4*u_vec[1]*u_vec[2]-3*10**7*u_vec[1]**2\nf3 = lambda t, u_vec: 3*10**7*u_vec[1]**2\nf_vec = [f1, f2, f3]\nrode = RODE()\nres = rode.solve(len(c_vec), c_vec, b_vec, a, f_vec, u_init, h, t_range, delta)\ndf = pd.DataFrame({\"t\": res[0], \"(y1, y2, y3)\": res[1]})\nprint(df)", " t (y1, y2, y3)\n0 0.000 [1, 0, 0]\n1 0.001 [0.9999600007070238, 3.160911290458521e-05, 8....\n2 0.002 [0.9999200096569564, 3.635339873378606e-05, 4....\n3 0.003 [0.9998800342501171, 3.649800066650599e-05, 8....\n4 0.004 [0.999840075037782, 3.649288365169819e-05, 0.0...\n... ... ...\n39997 39.997 [0.7158335537211122, 9.185178117035123e-06, 0....\n39998 39.998 [0.7158310227510469, 9.184753283527227e-06, 0....\n39999 39.999 [0.715828491981854, 9.184153341901414e-06, 0.2...\n40000 40.000 [0.715825961492197, 9.183284351392223e-06, 0.2...\n40001 40.001 [0.7158234314029306, 9.182001972429047e-06, 0....\n\n[40002 rows x 2 columns]\n" ], [ "def mplot(x, y, xlabel, ylabel):\n plt.plot(x, y, label=f\"{ylabel}({xlabel})\")\n plt.grid(True)\n plt.xlabel(xlabel)\n plt.ylabel(ylabel)\n plt.legend()\n plt.show()\n\nmplot(res[0], [j[0] for j in res[1]], 't', 'y1')\nmplot(res[0], [j[1] for j in res[1]], 't', 'y2')\nmplot(res[0], [j[2] for j in res[1]], 't', 'y3')", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code", "code" ] ]
cb5521f4f65e8c38e956cd806fc038bdaeaf7d7d
18,859
ipynb
Jupyter Notebook
markdown_generator/publications.ipynb
phani-vadrevu/phani-vadrevu.github.io
1386d215d61e50c55e1e1ea4ee810e262d1c24e8
[ "MIT" ]
null
null
null
markdown_generator/publications.ipynb
phani-vadrevu/phani-vadrevu.github.io
1386d215d61e50c55e1e1ea4ee810e262d1c24e8
[ "MIT" ]
null
null
null
markdown_generator/publications.ipynb
phani-vadrevu/phani-vadrevu.github.io
1386d215d61e50c55e1e1ea4ee810e262d1c24e8
[ "MIT" ]
null
null
null
41.087146
448
0.525638
[ [ [ "# Publications markdown generator for academicpages\n\nTakes a TSV of publications with metadata and converts them for use with [academicpages.github.io](academicpages.github.io). This is an interactive Jupyter notebook ([see more info here](http://jupyter-notebook-beginner-guide.readthedocs.io/en/latest/what_is_jupyter.html)). The core python code is also in `publications.py`. Run either from the `markdown_generator` folder after replacing `publications.tsv` with one containing your data.\n\nTODO: Make this work with BibTex and other databases of citations, rather than Stuart's non-standard TSV format and citation style.\n", "_____no_output_____" ], [ "## Data format\n\nThe TSV needs to have the following columns: pub_date, title, venue, excerpt, citation, site_url, and paper_url, with a header at the top. \n\n- `excerpt` and `paper_url` can be blank, but the others must have values. \n- `pub_date` must be formatted as YYYY-MM-DD.\n- `url_slug` will be the descriptive part of the .md file and the permalink URL for the page about the paper. The .md file will be `YYYY-MM-DD-[url_slug].md` and the permalink will be `https://[yourdomain]/publications/YYYY-MM-DD-[url_slug]`\n\nThis is how the raw file looks (it doesn't look pretty, use a spreadsheet or other program to edit and create).", "_____no_output_____" ] ], [ [ "!cat publications.tsv", "authors\tpub_year\ttitle\tconference\tlocation\tpaper_url\taccepted\tsubmitted\tvideo_url\r\nPhani Vadrevu, Babak Rahbarinia, Roberto Perdisci, Kang Li, Manos Antonakakis\t2013\tMeasuring and detecting malware downloads in live network traffic\tEuropean Symposium on Research in Computer Security (ESORICS)\tEgham, U.K.\t/files/papers/amico.pdf\t43\t242\t-\r\nPhani Vadrevu, Roberto Perdisci\t2016\tMAXS: Scaling malware execution with sequential multi-hypothesis testing\t11th ACM on Asia Conference on Computer and Communications Security (AsiaCCS)\tXi'an, China\t/files/papers/maxs.pdf\t73\t350\t-\r\nPhani Vadrevu, Jienan Liu, Bo Li, Babak Rahbarinia, Kyu Hyung Lee, Roberto Perdisci\t2017\tEnabling Reconstruction of Attacks on Users via Efficient Browsing Snapshots\t24th Annual Network and Distributed System Security Symposium (NDSS)\tSan Diego, U.S.A.\t/files/papers/chromepic.pdf\t68\t423\thttps://www.youtube.com/watch?v=iIgTjHr1w0o\r\nBo Li, Phani Vadrevu, Kyu Hyung Lee, Roberto Perdisci\t2018\tJSgraph: Enabling Reconstruction of Web Attacks via Efficient Tracking of Live In-Browser JavaScript Executions\t25th Annual Network and Distributed System Security Symposium (NDSS)\tSan Diego, U.S.A.\t/files/papers/jsgraph.pdf\t71\t331\thttps://www.youtube.com/watch?v=pZU1RIxTMUs\r\nPhani Vadrevu, Roberto Perdisci\t2019\tWhat You See is NOT What You Get: Discovering and Tracking Ad-Driven Social Engineering Attack Campaigns\t19th ACM Internet Measurement Conference (IMC)\tAmsterdam, Netherlands\t/files/papers/seacma.pdf\t38\t197\thttps://vimeo.com/showcase/6531379/video/369121670#t=3018s\r\nKarthika Subramani, Xingzi Yuan, Omid Setayeshfar, Phani Vadrevu, Kyu Hyung Lee, Roberto Perdisci\t2020\tWhen Push Comes to Ads: Measuring the Rise of (Malicious) Push Advertising\t20th ACM Internet Measurement Conference (IMC)\tVirtual\t/files/papers/pushads.pdf\t53\t216\thttps://dl.acm.org/action/downloadSupplement?doi=10.1145%2F3419394.3423631&file=imc2020-107-long.mp4\r\nBhupendra Acharya, Phani Vadrevu\t2021\tPhishPrint: Evading Phishing Detection Crawlers by Prior Profiling\tVirtual 2021 30th USENIX Security Symposium (USENIX Security)\t/files/papers/phishprint.pdf\t-\t-\thttps://www.usenix.org/conference/usenixsecurity21/presentation/acharya\r\n" ] ], [ [ "## Import pandas\n\nWe are using the very handy pandas library for dataframes.", "_____no_output_____" ] ], [ [ "import pandas as pd", "_____no_output_____" ] ], [ [ "## Import TSV\n\nPandas makes this easy with the read_csv function. We are using a TSV, so we specify the separator as a tab, or `\\t`.\n\nI found it important to put this data in a tab-separated values format, because there are a lot of commas in this kind of data and comma-separated values can get messed up. However, you can modify the import statement, as pandas also has read_excel(), read_json(), and others.", "_____no_output_____" ] ], [ [ "publications = pd.read_csv(\"publications.tsv\", sep=\"\\t\", header=0)\npublications\n", "_____no_output_____" ], [ "publications.columns", "_____no_output_____" ] ], [ [ "## Escape special characters\n\nYAML is very picky about how it takes a valid string, so we are replacing single and double quotes (and ampersands) with their HTML encoded equivilents. This makes them look not so readable in raw format, but they are parsed and rendered nicely.", "_____no_output_____" ] ], [ [ "html_escape_table = {\n \"&\": \"&amp;\",\n '\"': \"&quot;\",\n \"'\": \"&apos;\"\n }\n\ndef html_escape(text):\n \"\"\"Produce entities within text.\"\"\"\n return \"\".join(html_escape_table.get(c,c) for c in text)", "_____no_output_____" ] ], [ [ "## Creating the markdown files\n\nThis is where the heavy lifting is done. This loops through all the rows in the TSV dataframe, then starts to concatentate a big string (```md```) that contains the markdown for each type. It does the YAML metadata first, then does the description for the individual page.", "_____no_output_____" ] ], [ [ "import os\nfor row, item in publications.iterrows():\n \n paper_name = item.paper_url.rsplit('/', 1)[1].split('.')[0]\n md_filename = str(item.pub_year) + \"-\" + paper_name + \".md\"\n html_filename = str(item.pub_year) + \"-\" + paper_name\n ## YAML variables\n \n md = \"---\\ntitle: \\\"\" + item.title + '\"\\n'\n \n md += \"\"\"collection: publications\"\"\"\n \n md += \"\"\"\\npermalink: /publication/\"\"\" + html_filename\n \n md += \"\\nyear: \" + str(item.pub_year) \n \n md += \"\\nconference: '\" + html_escape(item.conference) + \"'\"\n \n md += \"\\nauthors: \" + \"[\" + \", \".join([\"'\" + a + \"'\" for a in item.authors.split(', ')]) + \"]\"\n\n md += \"\\nlocation: '\" + html_escape(item.location) + \"'\"\n\n md += \"\\naccepted: '\" + str(item.accepted) + \"'\"\n \n md += \"\\nsubmitted: '\" + str(item.submitted) + \"'\"\n \n if len(str(item.paper_url)) > 5:\n md += \"\\npaper_url: '\" + item.paper_url + \"'\"\n \n if item.video_url != '-':\n md += \"\\nvideo_url: '\" + item.video_url + \"'\"\n \n md += \"\\n---\"\n \n ## Markdown description for individual page\n \n #if len(str(item.paper_url)) > 5:\n # md += \"\\n[Download paper here](\" + item.paper_url + \")\\n\" \n \n md_filename = os.path.basename(md_filename)\n \n with open(\"../_publications/\" + md_filename, 'w') as f:\n f.write(md)", "_____no_output_____" ] ], [ [ "These files are in the publications directory, one directory below where we're working from.", "_____no_output_____" ] ], [ [ "!ls ../_publications/", "2013-amico.md 2017-chromepic.md 2019-seacma.md 2021-phishprint.md\r\n2016-maxs.md 2018-jsgraph.md 2020-pushads.md\r\n" ], [ "!cat ../_publications/2019-seacma.md", "---\r\ntitle: \"What You See is NOT What You Get: Discovering and Tracking Ad-Driven Social Engineering Attack Campaigns\"\r\ncollection: publications\r\npermalink: /publication/2019-seacma\r\nyear: 2019\r\nconference: '19th ACM Internet Measurement Conference (IMC)'\r\nauthors: ['Phani Vadrevu', 'Roberto Perdisci']\r\nlocation: 'Amsterdam, Netherlands'\r\naccepted: '38'\r\nsubmitted: '197'\r\npaper_url: '/files/papers/seacma.pdf'\r\nvideo_url: 'https://vimeo.com/showcase/6531379/video/369121670#t=3018s'\r\n---" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ] ]
cb5524a071873cb14861463f1fa7d452a2d8d311
2,605
ipynb
Jupyter Notebook
dense_correspondence/experiments/mugs/mugs_qualitative_plots.ipynb
peteflorence/pytorch-dense-correspondence
9bdfd08f49ec7aa601ea2675919574c5d37713c2
[ "BSD-3-Clause" ]
null
null
null
dense_correspondence/experiments/mugs/mugs_qualitative_plots.ipynb
peteflorence/pytorch-dense-correspondence
9bdfd08f49ec7aa601ea2675919574c5d37713c2
[ "BSD-3-Clause" ]
null
null
null
dense_correspondence/experiments/mugs/mugs_qualitative_plots.ipynb
peteflorence/pytorch-dense-correspondence
9bdfd08f49ec7aa601ea2675919574c5d37713c2
[ "BSD-3-Clause" ]
null
null
null
28.010753
106
0.63071
[ [ [ "# Mugs Qualitative Plots\nEval class consistent shoes", "_____no_output_____" ] ], [ [ "%matplotlib inline\n\nimport random\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport os\nimport cv2\nimport torch\nimport dense_correspondence_manipulation.utils.utils as utils\nutils.add_dense_correspondence_to_python_path()\n\nimport dense_correspondence\nfrom dense_correspondence.evaluation.evaluation import *\nimport dense_correspondence.correspondence_tools.correspondence_plotter as correspondence_plotter\nfrom dense_correspondence.dataset.dense_correspondence_dataset_masked import ImageType", "_____no_output_____" ], [ "config_filename = os.path.join(utils.getDenseCorrespondenceSourceDir(), 'config', \n 'dense_correspondence', 'evaluation', 'lucas_evaluation.yaml')\nconfig = utils.getDictFromYamlFilename(config_filename)\ndefault_config = utils.get_defaults_config()\n\n\nutils.set_cuda_visible_devices([0])\n\ndce = DenseCorrespondenceEvaluation(config)\n\nDCE = DenseCorrespondenceEvaluation\n\n\n\nnetwork_name = \"mugs_consistent_M_background_1.000_3\"\n# dcn = dce.load_network_from_config(network_name)\n# dataset = dcn.load_training_dataset()\n\n# network_name = \"baymax_starbot_caterpillar_only_multi_3\"\ndcn = dce.load_network_from_config(network_name)\ndataset = dcn.load_training_dataset()\n\n\nDenseCorrespondenceEvaluation.evaluate_network_qualitative(dcn, dataset=dataset, randomize=True)\n\n# DenseCorrespondenceEvaluation.evaluate_network_qualitative_cross_scene(dcn, dataset=dataset)\n", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code" ] ]
cb552d557aeca80119439d7e84d4b57de9ff9a8c
16,678
ipynb
Jupyter Notebook
lessons/thw-python/data-structures/data_structures.ipynb
karthik/bc
56efbcef5552d411ffe63f7d84da9131beef1dcb
[ "CC-BY-3.0" ]
null
null
null
lessons/thw-python/data-structures/data_structures.ipynb
karthik/bc
56efbcef5552d411ffe63f7d84da9131beef1dcb
[ "CC-BY-3.0" ]
1
2017-09-22T03:42:51.000Z
2017-09-22T03:42:51.000Z
lessons/thw-python/data-structures/data_structures.ipynb
karthik/bc
56efbcef5552d411ffe63f7d84da9131beef1dcb
[ "CC-BY-3.0" ]
null
null
null
22.972452
327
0.469661
[ [ [ "empty" ] ] ]
[ "empty" ]
[ [ "empty" ] ]
cb552fd116efce527a2ade731a31e1e8618e5c9b
383,089
ipynb
Jupyter Notebook
examples/regression_diff_op_1d.ipynb
atzberg/gmls-nets
d78e5b513b7dda8491f68e11dab730f106f86385
[ "BSD-3-Clause" ]
18
2019-09-17T18:58:26.000Z
2021-08-05T06:02:16.000Z
examples/regression_diff_op_1d.ipynb
atzberg/gmls-nets
d78e5b513b7dda8491f68e11dab730f106f86385
[ "BSD-3-Clause" ]
null
null
null
examples/regression_diff_op_1d.ipynb
atzberg/gmls-nets
d78e5b513b7dda8491f68e11dab730f106f86385
[ "BSD-3-Clause" ]
4
2019-11-16T04:02:07.000Z
2021-03-06T11:43:04.000Z
375.577451
180,784
0.927495
[ [ [ "## GMLS-Nets: 1D Regression of Linear and Non-linear Operators $L[u]$.\n\n__Ben J. Gross__, __Paul J. Atzberger__ <br>\nhttp://atzberger.org/\n\nExamples showing how GMLS-Nets can be used to perform regression for some basic linear and non-linear differential operators in 1D. \n\n__Parameters:__</span> <br>\nThe key parameter terms to adjust are:<br> \n``op_type``: The operator type.<br>\n``flag_mlp_case``: The type of mapping unit to use.<br>\n\n__Examples of Non-linear Operators ($u{u_x},u_x^2,u{u_{xx}},u_{xx}^2$) :__<br>\nTo run training for a non-linear operator like ``u*ux`` using MLP for the non-linear GMLS mapping unit, you can use:<br> \n``op_type='u*ux';`` <br>\n``flag_mlp_case = 'NonLinear1';`` <br>\nYou can obtain different performance by adjusting the mapping architecture and hyperparameters of the network.\n\n__Examples of linear Operators ($u_x,u_{xx}$):__<br>\nTo run training for a linear operator like the 1d Laplacian ``uxx`` with a linear mapping unit, you can use<br> \n``op_type='uxx';``<br>\n``flag_mlp_case = 'Linear1';``<br>\n\nThese are organized for different combinations of these settings allowing for exploring the methods. The codes are easy to modify and adjust to also experiment with other operators. For example, see the dataset classes.\n", "_____no_output_____" ], [ "### Imports", "_____no_output_____" ] ], [ [ "import sys;\n\n# setup path to location of gmlsnets_pytorch (if not install system-wide)\npath_gmlsnets_pytorch = '../../';\nsys.path.append(path_gmlsnets_pytorch);\n\nimport torch;\nimport torch.nn as nn;\n\nimport numpy as np;\nimport pickle;\n\nimport matplotlib.pyplot as plt;\n\nimport pdb\nimport time\n\nimport os\n\n# setup gmlsnets package\nimport gmlsnets_pytorch as gmlsnets;\nimport gmlsnets_pytorch.nn;\nimport gmlsnets_pytorch.vis;\nimport gmlsnets_pytorch.dataset;\n\n# dereference a few common items\nMapToPoly_Function = gmlsnets.nn.MapToPoly_Function;\nget_num_polys = MapToPoly_Function.get_num_polys;\nweight_one_minus_r = MapToPoly_Function.weight_one_minus_r;\neval_poly = MapToPoly_Function.eval_poly;\n\nprint(\"Packages:\");\nprint(\"torch.__version__ = \" + str(torch.__version__));\nprint(\"numpy.__version__ = \" + str(np.__version__));\nprint(\"gmlsnets.__version__ = \" + str(gmlsnets.__version__));", "Packages:\ntorch.__version__ = 1.2.0\nnumpy.__version__ = 1.16.4\ngmlsnets.__version__ = 1.0.0\n" ] ], [ [ "### Parameters and basic setup", "_____no_output_____" ] ], [ [ "# Setup the parameters\nbatch_size = int(1e2);\nflag_extend_periodic = False; # periodic boundaries\nflag_dataset = 'diffOp1';\nrun_name = '%s_Test1'%flag_dataset;\nbase_dir = './output/regression_diff_op_1d/%s'%run_name;\nflag_print_model = False;\n\nprint(\"Settings:\");\nprint(\"flag_dataset = \" + flag_dataset);\nprint(\"run_name = \" + run_name);\nprint(\"base_dir = \" + base_dir); \n\nif not os.path.exists(base_dir):\n os.makedirs(base_dir);\n\n# Configure devices\nif torch.cuda.is_available(): \n num_gpus = torch.cuda.device_count();\n print(\"num_gpus = \" + str(num_gpus));\n if num_gpus >= 4:\n device = torch.device('cuda:3');\n else:\n device = torch.device('cuda:0');\nelse: \n device = torch.device('cpu');\nprint(\"device = \" + str(device)); \n", "Settings:\nflag_dataset = diffOp1\nrun_name = diffOp1_Test1\nbase_dir = ./output/regression_diff_op_1d/diffOp1_Test1\nnum_gpus = 4\ndevice = cuda:3\n" ] ], [ [ "### Setup GMLS-Net for regressing differential operator", "_____no_output_____" ] ], [ [ "class gmlsNetRegressionDiffOp1(nn.Module):\n \"\"\"Sets up a GMLS-Net for regression differential operator in 1D.\"\"\" \n\n def __init__(self,\n flag_GMLS_type=None, \n porder1=None,Nc=None,\n pts_x1=None,layer1_epsilon=None, \n weight_func1=None,weight_func1_params=None, \n mlp_q1=None,pts_x2=None,\n device=None,flag_verbose=0,\n **extra_params):\n\n super(gmlsNetRegressionDiffOp1, self).__init__();\n\n self.layer_types = [];\n\n if device is None:\n device = torch.device('cpu'); # default \n\n # --\n Ncp1 = mlp_q1.channels_out; # number of channels out of the MLP-Pointwise layer\n\n num_features1 = mlp_q1.channels_out; # number of channels out (16 typical)\n\n GMLS_Layer = gmlsnets.nn.GMLS_Layer;\n ExtractFromTuple = gmlsnets.nn.ExtractFromTuple;\n PermuteLayer = gmlsnets.nn.PermuteLayer; \n PdbSetTraceLayer = gmlsnets.nn.PdbSetTraceLayer;\n\n # --- Layer 1\n #flag_layer1 = 'standard_conv1';\n flag_layer1 = 'gmls1d_1';\n self.layer_types.append(flag_layer1);\n if flag_layer1 == 'standard_conv1':\n self.layer1 = nn.Sequential(\n nn.Conv1d(in_channels=Nc,out_channels=num_features1,\n kernel_size=5,stride=1,padding=2,bias=False), \n ).to(device);\n elif flag_layer1 == 'gmls1d_1':\n self.layer1 = nn.Sequential( \n GMLS_Layer(flag_GMLS_type, porder1, \n pts_x1, layer1_epsilon, \n weight_func1, weight_func1_params, \n mlp_q=mlp_q1, pts_x2=pts_x2, device=device, \n flag_verbose=flag_verbose), \n #PdbSetTraceLayer(),\n ExtractFromTuple(index=0), # just get the forward output associated with the mapping and not pts_x2 \n #PdbSetTraceLayer(),\n PermuteLayer((0,2,1))\n ).to(device);\n\n else:\n raise Exception('flag_layer1 type not recognized.');\n \n def forward(self, x): \n out = self.layer1(x);\n return out;\n", "_____no_output_____" ] ], [ [ "### Setup the Model: Neural Network", "_____no_output_____" ] ], [ [ "# setup sample point locations\nxj = torch.linspace(0,1.0,steps=101,device=device).unsqueeze(1);\nxi = torch.linspace(0,1.0,steps=101,device=device).unsqueeze(1);\n\n# make a numpy copy for plotting and some other routines\nnp_xj = xj.cpu().numpy(); np_xi = xi.cpu().numpy();\n\n# setup parameters\nNc = 1; # scalar field\nNx = xj.shape[0]; num_dim = xj.shape[1];\nporder = 2; num_polys = get_num_polys(porder,num_dim);\n\nweight_func1 = MapToPoly_Function.weight_one_minus_r;\ntarg_kernel_width = 11.5; layer1_epsilon = 0.4*0.5*np.sqrt(2)*targ_kernel_width/Nx; \n#targ_kernel_width = 21.5; layer1_epsilon = 0.4*0.5*np.sqrt(2)*targ_kernel_width/Nx; \nweight_func1_params = {'epsilon': layer1_epsilon,'p':4};\n\ncolor_input = (0.05,0.44,0.69);\ncolor_output = (0.44,0.30,0.60);\ncolor_predict = (0.05,0.40,0.5);\ncolor_target = (221/255,103/255,103/255);\n\n# print the current settings\nprint(\"GMLS Parameters:\")\nprint(\"porder = \" + str(porder));\nprint(\"num_dim = \" + str(num_dim));\nprint(\"num_polys = \" + str(num_polys));\nprint(\"layer1_epsilon = %.3e\"%layer1_epsilon);\nprint(\"weight_func1 = \" + str(weight_func1));\nprint(\"weight_func1_params = \" + str(weight_func1_params));\nprint(\"xj.shape = \" + str(xj.shape));\nprint(\"xi.shape = \" + str(xi.shape));", "GMLS Parameters:\nporder = 2\nnum_dim = 1\nnum_polys = 3\nlayer1_epsilon = 3.220e-02\nweight_func1 = <function MapToPoly_Function.weight_one_minus_r at 0x7fe5b0bc2290>\nweight_func1_params = {'epsilon': 0.03220486330156554, 'p': 4}\nxj.shape = torch.Size([101, 1])\nxi.shape = torch.Size([101, 1])\n" ], [ "# create an MLP for training the non-linear part of the GMLS Net\n#flag_mlp_case = 'Linear1';flag_mlp_case = 'Nonlinear1'\nflag_mlp_case = 'Nonlinear1';\nif (flag_mlp_case == 'Linear1'):\n layer_sizes = [];\n\n num_depth = 0; # number of internal layers\n num_hidden = -1; # number of hidden per layer\n\n channels_in = Nc; # number of poly channels (matches input u channel size)\n channels_out = 1; # number of output filters\n \n layer_sizes.append(num_polys); # input\n layer_sizes.append(1); # output, single channel always, for vectors, we use channels_out separate units.\n\n mlp_q1 = gmlsnets.nn.MLP_Pointwise(layer_sizes,channels_in=channels_in,channels_out=channels_out,\n flag_bias=False).to(device);\nelif (flag_mlp_case == 'Nonlinear1'):\n layer_sizes = [];\n num_input = Nc*num_polys; # number of channels*num_polys, allows for cross-channel coupling\n num_depth = 4; # number of internal layers\n num_hidden = 100; # number of hidden per layer\n num_out_channels = 16; # number of output filters\n layer_sizes.append(num_polys);\n for k in range(num_depth):\n layer_sizes.append(num_hidden);\n layer_sizes.append(1); # output, single channel always, for vectors, we use channels_out separate units.\n \n mlp_q1 = gmlsnets.nn.MLP_Pointwise(layer_sizes,channels_out=num_out_channels,\n flag_bias=True).to(device);\n \nif flag_print_model:\n print(\"mlp_q1:\");\n print(mlp_q1);", "_____no_output_____" ], [ "# Setup the Neural Network for Regression\nflag_verbose = 0;\nflag_case = 'standard';\n\n# Setup the model\nxi = xi.float();\nxj = xj.float();\nmodel = gmlsNetRegressionDiffOp1(flag_case,porder,Nc,xj,layer1_epsilon,\n weight_func1,weight_func1_params,\n mlp_q1=mlp_q1,pts_x2=xi,\n device=device, \n flag_verbose=flag_verbose);\n\nif flag_print_model:\n print(\"model:\");\n print(model);", "_____no_output_____" ] ], [ [ "## Setup the training and test data", "_____no_output_____" ] ], [ [ "### Generate Dataset\n\nif flag_dataset == 'diffOp1':\n # Use the FFT to represent differential operators for training data sets.\n #\n # Setup a data set of the following:\n # To start let's do regression for the Laplacian (not inverse, just action of it, like finding FD)\n #\n\n #op_type = 'u*ux';op_type = 'ux*ux';op_type = 'uxx';op_type = 'u*uxx';op_type = 'uxx*uxx';\n op_type = 'u*ux';\n flag_verbose = 1;\n\n num_training_samples = int(5e4);\n nchannels = 1;\n nx = np_xj.shape[0];\n #alpha1 = 0.05;\n alpha1 = 0.1;\n scale_factor = 1e2;\n train_dataset = gmlsnets.dataset.diffOp1(op_type=op_type,op_params=None,\n gen_mode='exp1',gen_params={'alpha1':alpha1},\n num_samples=num_training_samples,\n nchannels=nchannels,nx=nx,\n noise_factor=0,scale_factor=scale_factor,\n flag_verbose=flag_verbose);\n \n train_dataset = train_dataset.to(device);\n if flag_verbose > 0:\n print(\"done.\");\n\n num_test_samples = int(1e4);\n scale_factor = 1e2;\n test_dataset = gmlsnets.dataset.diffOp1(op_type=op_type,op_params=None,\n gen_mode='exp1',gen_params={'alpha1':alpha1},\n num_samples=num_test_samples,\n nchannels=nchannels,nx=nx,\n noise_factor=0,scale_factor=scale_factor,\n flag_verbose=flag_verbose);\n test_dataset = test_dataset.to(device);\n \n if flag_verbose > 0:\n print(\"done.\");\n\n # Put the data into the \n #train_dataset and test_dataset structures for processing\n\nelse:\n msg = \"flag_dataset not recognized.\";\n msg += \"flag_data_set = \" + str(flag_data_set);\n raise Exception(msg);\n\n# Data loader\ntrain_loader = torch.utils.data.DataLoader(dataset=train_dataset,batch_size=batch_size,shuffle=True);\ntest_loader = torch.utils.data.DataLoader(dataset=test_dataset,batch_size=batch_size,shuffle=False);\n\n", "Generating the data samples which can take some time.\nnum_samples = 50000\ndone.\nGenerating the data samples which can take some time.\nnum_samples = 10000\ndone.\n" ], [ "%matplotlib inline\n\n# plot sample of the training data\ngmlsnets.vis.plot_dataset_diffOp1(train_dataset,np_xj,np_xi,rows=4,cols=6,\n title=\"Data Samples: u, f=L[u], L = %s\"%op_type);", "_____no_output_____" ] ], [ [ "## Train the Model", "_____no_output_____" ], [ "### Custom Functions", "_____no_output_____" ] ], [ [ "def custom_loss_least_squares(val1,val2): \n r\"\"\"Computes the Mean-Square-Error (MSE) over the entire batch.\"\"\"\n diff_flat = (val1 - val2).flatten();\n N = diff_flat.shape[0];\n loss = torch.sum(torch.pow(diff_flat,2),-1)/N;\n return loss;\n\ndef domain_periodic_repeat(Z):\n r\"\"\"Extends the input periodically.\"\"\" \n Z_periodic = torch.cat((Z, Z, Z), 2);\n return Z_periodic;\n \ndef domain_periodic_extract(Z_periodic):\n r\"\"\"Extracts the middle unit cell portion of the extended data.\"\"\" \n nn = int(Z_periodic.shape[2]/3);\n Z = Z_periodic[:,:,nn:2*nn];\n return Z;", "_____no_output_____" ] ], [ [ "### Initialize", "_____no_output_____" ] ], [ [ "loss_list = np.empty(0); loss_step_list = np.empty(0);\nsave_skip = 1; step_count = 0;", "_____no_output_____" ] ], [ [ "### Train the network.", "_____no_output_____" ] ], [ [ "num_epochs = int(3e0); #int(1e4);\nlearning_rate = 1e-2;\n\nprint(\"Training the network with:\");\nprint(\"\");\nprint(\"model:\");\nprint(\"model.layer_types = \" + str(model.layer_types));\nprint(\"\");\n\n# setup the optimization method and loss function\noptimizer = torch.optim.Adam(model.parameters(), lr=learning_rate);\n\n#loss_func = nn.CrossEntropyLoss();\n#loss_func = nn.MSELoss();\nloss_func = custom_loss_least_squares;\n\nprint(\"num_epochs = %d\"%num_epochs);\nprint(\"batch_size = %d\"%batch_size);\nprint(\" \");\n\n# Train the model\nflag_time_it = True;\nif flag_time_it:\n time_1 = time.time();\nprint(\"-\"*80);\nnum_steps = len(train_loader);\nfor epoch in range(num_epochs):\n for i, (input,target) in enumerate(train_loader): \n input = input.to(device);\n target = target.to(device);\n \n if flag_extend_periodic:\n # Extend input periodically\n input_periodic = domain_periodic_repeat(input);\n\n # Forward pass\n output_periodic = model(input_periodic);\n output = domain_periodic_extract(output_periodic);\n else:\n output = model(input);\n\n # Compute loss\n loss = loss_func(output,target);\n\n # Display\n if step_count % save_skip == 0:\n np_loss = loss.cpu().detach().numpy();\n loss_list = np.append(loss_list,np_loss);\n loss_step_list = np.append(loss_step_list,step_count);\n \n # Back-propagation for gradients and use to optimize\n optimizer.zero_grad(); \n loss.backward();\n \n optimizer.step();\n \n step_count += 1;\n \n if ((i + 1) % 100) == 0 or i == 0: \n msg = 'epoch: [%d/%d]; '%(epoch+1,num_epochs);\n msg += 'batch_step = [%d/%d]; '%(i + 1,num_steps);\n msg += 'loss_MSE: %.3e.'%(loss.item());\n print(msg);\n \n if flag_time_it and i > 0:\n msg = 'elapsed_time = %.4e secs \\n'%(time.time() - time_1); \n print(msg);\n time_1 = time.time();\n\n \nprint(\"done training.\")\nprint(\"-\"*80);", "Training the network with:\n\nmodel:\nmodel.layer_types = ['gmls1d_1']\n\nnum_epochs = 3\nbatch_size = 100\n \n--------------------------------------------------------------------------------\nepoch: [1/3]; batch_step = [1/500]; loss_MSE: 1.642e+03.\nepoch: [1/3]; batch_step = [100/500]; loss_MSE: 2.088e+02.\nelapsed_time = 4.1750e+00 secs \n\nepoch: [1/3]; batch_step = [200/500]; loss_MSE: 1.286e+02.\nelapsed_time = 4.2026e+00 secs \n\nepoch: [1/3]; batch_step = [300/500]; loss_MSE: 6.603e+01.\nelapsed_time = 4.4263e+00 secs \n\nepoch: [1/3]; batch_step = [400/500]; loss_MSE: 7.932e+01.\nelapsed_time = 4.4641e+00 secs \n\nepoch: [1/3]; batch_step = [500/500]; loss_MSE: 2.718e+01.\nelapsed_time = 4.4179e+00 secs \n\nepoch: [2/3]; batch_step = [1/500]; loss_MSE: 2.126e+01.\nepoch: [2/3]; batch_step = [100/500]; loss_MSE: 9.035e+00.\nelapsed_time = 4.6116e+00 secs \n\nepoch: [2/3]; batch_step = [200/500]; loss_MSE: 8.711e+00.\nelapsed_time = 5.0313e+00 secs \n\nepoch: [2/3]; batch_step = [300/500]; loss_MSE: 8.170e+00.\nelapsed_time = 5.5461e+00 secs \n\nepoch: [2/3]; batch_step = [400/500]; loss_MSE: 1.447e+01.\nelapsed_time = 5.0642e+00 secs \n\nepoch: [2/3]; batch_step = [500/500]; loss_MSE: 4.620e+00.\nelapsed_time = 5.0649e+00 secs \n\nepoch: [3/3]; batch_step = [1/500]; loss_MSE: 7.441e+00.\nepoch: [3/3]; batch_step = [100/500]; loss_MSE: 2.331e+01.\nelapsed_time = 4.8299e+00 secs \n\nepoch: [3/3]; batch_step = [200/500]; loss_MSE: 3.698e+00.\nelapsed_time = 5.0481e+00 secs \n\nepoch: [3/3]; batch_step = [300/500]; loss_MSE: 4.575e+00.\nelapsed_time = 5.0761e+00 secs \n\nepoch: [3/3]; batch_step = [400/500]; loss_MSE: 1.884e+01.\nelapsed_time = 5.0468e+00 secs \n\nepoch: [3/3]; batch_step = [500/500]; loss_MSE: 4.437e+00.\nelapsed_time = 5.0546e+00 secs \n\ndone training.\n--------------------------------------------------------------------------------\n" ] ], [ [ "### Plot Loss", "_____no_output_____" ] ], [ [ "%matplotlib inline\n\nplt.figure(figsize=(8,6));\n\nplt.plot(loss_step_list,loss_list,'b-');\nplt.yscale('log');\nplt.xlabel('step');\nplt.ylabel('loss');\n\nplt.title('Loss');", "_____no_output_____" ] ], [ [ "### Test the Neural Network Predictions ", "_____no_output_____" ] ], [ [ "print(\"Testing predictions of the neural network:\");\n\nflag_save_tests = True;\nif flag_save_tests:\n test_data = {};\n\n# Save the first few to show as examples of labeling\nsaved_test_input = [];\nsaved_test_target = [];\nsaved_test_output_pred = [];\n\ncount_batch = 0;\nwith torch.no_grad(): \n total = 0; II = 0;\n avg_error = 0;\n for input,target in test_loader: # loads data in batches and then sums up\n\n if (II >= 1000):\n print(\"tested on %d samples\"%total);\n II = 0;\n\n input = input.to(device); target = target.to(device); \n \n # Compute model\n flag_extend_periodic = False;\n if flag_extend_periodic:\n # Extend input periodically\n input_periodic = domain_periodic_repeat(input);\n\n # Forward pass\n output_periodic = model(input_periodic);\n output = domain_periodic_extract(output_periodic);\n else:\n output = model(input); \n\n # Compute loss\n loss = loss_func(output,target);\n\n # Record the results\n avg_error += loss; \n\n total += output.shape[0]; \n II += output.shape[0];\n count_batch += 1;\n\n NN = output.shape[0];\n for k in range(min(NN,20)): # save first 10 images of each batch \n saved_test_input.append(input[k]); \n saved_test_target.append(target[k]);\n saved_test_output_pred.append(output[k]);\n \n print(\"\");\n print(\"Tested on a total of %d samples.\"%total); \n print(\"\");\n \n # Compute RMSD error\n test_accuracy = avg_error.cpu()/count_batch;\n test_accuracy = np.sqrt(test_accuracy); \n\n print(\"The neural network has RMSD error %.2e on the %d test samples.\"%(test_accuracy,total)); \n print(\"\");\n", "Testing predictions of the neural network:\ntested on 1000 samples\ntested on 2000 samples\ntested on 3000 samples\ntested on 4000 samples\ntested on 5000 samples\ntested on 6000 samples\ntested on 7000 samples\ntested on 8000 samples\ntested on 9000 samples\n\nTested on a total of 10000 samples.\n\nThe neural network has RMSD error 2.38e+00 on the 10000 test samples.\n\n" ] ], [ [ "### Show a Sample of the Predictions ", "_____no_output_____" ] ], [ [ "# collect a subset of the data to show and attach named labels\n%matplotlib inline\n\nnum_prediction_samples = len(saved_test_input);\nprint(\"num_prediction_samples = \" + str(num_prediction_samples));\n\n#II = np.random.permutation(num_samples); # compute random collection of indices @optimize\nII = np.arange(num_prediction_samples);\n\nif flag_dataset == 'name-here' or 0 == 0: \n u_list = []; f_list = []; f_pred_list = []; \n for I in np.arange(0,min(num_prediction_samples,16)): \n u_list.append(saved_test_input[II[I]].cpu());\n f_list.append(saved_test_target[II[I]].cpu());\n f_pred_list.append(saved_test_output_pred[II[I]].cpu());\n\n# plot predictions against test data\ngmlsnets.vis.plot_samples_u_f_fp_1d(u_list,f_list,f_pred_list,np_xj,np_xi,rows=4,cols=6,\n title=\"Test Samples and Predictions: u, f=L[u], L = %s\"%op_type);", "num_prediction_samples = 20\n" ] ], [ [ "### Save Model", "_____no_output_____" ] ], [ [ "model_filename = '%s/model.ckpt'%base_dir;\nprint(\"model_filename = \" + model_filename);\ntorch.save(model.state_dict(), model_filename);\n\nmodel_filename = \"%s/model_state.pickle\"%base_dir;\nprint(\"model_filename = \" + model_filename);\nf = open(model_filename,'wb');\npickle.dump(model.state_dict(),f);\nf.close();\n", "model_filename = ./output/regression_diff_op_1d/diffOp1_Test1/model.ckpt\nmodel_filename = ./output/regression_diff_op_1d/diffOp1_Test1/model_state.pickle\n" ] ], [ [ "### Display the GMLS-Nets Learned Parameters", "_____no_output_____" ] ], [ [ "flag_run_cell = flag_print_model;\n\nif flag_run_cell:\n print(\"-\"*80) \n print(\"model.parameters():\");\n ll = model.parameters();\n for l in ll:\n print(l); \n \nif flag_run_cell:\n print(\"-\"*80) \n print(\"model.state_dict():\"); \n print(model.state_dict());\n print(\"-\"*80) ", "_____no_output_____" ] ], [ [ "### Done", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
cb55323af59e4d2184dd1835c6b1fa832f4f1b4e
33,494
ipynb
Jupyter Notebook
Text_Sentiment_Analysis/TextVectorization_layer.ipynb
ee2110/Natural_Language_Processing-NLP-TensorFlow
e4697d28a7cfd694f6a231598cd939a9362a4e94
[ "MIT" ]
null
null
null
Text_Sentiment_Analysis/TextVectorization_layer.ipynb
ee2110/Natural_Language_Processing-NLP-TensorFlow
e4697d28a7cfd694f6a231598cd939a9362a4e94
[ "MIT" ]
null
null
null
Text_Sentiment_Analysis/TextVectorization_layer.ipynb
ee2110/Natural_Language_Processing-NLP-TensorFlow
e4697d28a7cfd694f6a231598cd939a9362a4e94
[ "MIT" ]
null
null
null
41.659204
2,440
0.580522
[ [ [ "**General Work Process**\n1. Import dataset and preprocess\n2. Train model\n3. Test model", "_____no_output_____" ] ], [ [ "import io\nimport os\nimport re\nimport shutil\nimport string\nimport numpy as np\nimport pandas as pd\nimport tensorflow as tf\n\nfrom tensorflow.keras import Sequential, layers, losses\nfrom tensorflow.keras.layers import Dense, Embedding, GlobalAveragePooling1D\nfrom tensorflow.keras.layers import TextVectorization", "_____no_output_____" ], [ "url = \"https://ai.stanford.edu/~amaas/data/sentiment/aclImdb_v1.tar.gz\"\ndataset = tf.keras.utils.get_file(\"aclImdb_v1.tar.gz\", url,\n untar=True, cache_dir='.',\n cache_subdir='')", "Downloading data from https://ai.stanford.edu/~amaas/data/sentiment/aclImdb_v1.tar.gz\n84131840/84125825 [==============================] - 258s 3us/step\n84140032/84125825 [==============================] - 258s 3us/step\n" ], [ "dataset_dir = os.path.join(os.path.dirname(dataset), 'aclImdb')\nos.listdir(dataset_dir)", "_____no_output_____" ], [ "# view train data files\ntrain_dir = os.path.join(dataset_dir, 'train')\nos.listdir(train_dir)", "_____no_output_____" ], [ "# clean unnecessary empty folder\nremove_dir = os.path.join(train_dir, 'unsup')\nshutil.rmtree(remove_dir)", "_____no_output_____" ], [ "batch_size = 1024\nseed = 10\n\ntrain_data = tf.keras.preprocessing.text_dataset_from_directory(\n 'aclImdb/train',\n batch_size=batch_size, \n validation_split=0.2,\n subset='training', \n seed=seed)\n\nval_data = tf.keras.preprocessing.text_dataset_from_directory(\n 'aclImdb/train', \n batch_size=batch_size, \n validation_split=0.2,\n subset='validation', \n seed=seed)", "Found 25000 files belonging to 2 classes.\nUsing 20000 files for training.\nFound 25000 files belonging to 2 classes.\nUsing 5000 files for validation.\n" ], [ "# sample batch from train data\nfor text_batch, label_batch in train_data.take(1):\n \n # view the first 5 samples\n for i in range(5):\n print(label_batch[i].numpy(), text_batch.numpy()[i])", "1 b\"This film is more about how children make sense of the world around them, and how they (and we) use myth to make sense of it all. I think it's been misperceived, everyone going in expecting a stalkfest won't enjoy it but if you want a deeper story, it's here.......\"\n0 b'God, I was bored out of my head as I watched this pilot. I had been expecting a lot from it, as I\\'m a huge fan of James Cameron (and not just since \"Titanic\", I might add), and his name in the credits I thought would be a guarantee of quality (Then again, he also wrote the leaden Strange Days..). But the thing failed miserably at grabbing my attention at any point of its almost two hours of duration. In all that time, it barely went beyond its two line synopsis, and I would be very hard pressed to try to figure out any kind of coherent plot out of all the mess of strands that went nowhere. On top of that, I don\\'t think the acrobatics outdid even those of any regular \"A-Team\" episode. As for Alba, yes, she is gorgeous, of course, but the fact that she only displays one single facial expression the entire movie (pouty and surly), makes me also get bored of her \"gal wit an attitude\" schtick pretty soon. You can count me out of this one, Mr. Cameron!'\n0 b'me, my boyfriend, and our friend watched this \"movie\" if thats what u wanna call it, and we agree with the last person, but we were stupid and bought the damn thing, we thought it really was about diablo so we bought it.<br /><br />we hate it Really SUXZ!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! so beware: DO NOT BUY THIS THING THEY CALL A MOVIE!!!!!!!!!!!!!!!!!!!!!!!<br /><br />we would return it, but don\\'t no if anybody would want this stupid movie.<br /><br />oh and another thing, the shouldn\\'t call it \"The Legend of Diablo\" they should of called it \"Legend of Azar\".<br /><br />and this movie is rated R????? this should not of even been not rated.<br /><br />we think that diablo would be crying his eyes out laughing at this stupid movie.<br /><br />this is a movie that would have been done by a Church.<br /><br />theses \"actors\" are never gonna become nothing because this movie.'\n0 b\"SPOILERS THROUGHOUT: <br /><br />The Gettaway is mostly an action movie. And what action there is to!! Shootouts, chases, dumpsters and much much more. It stars Kim Bassenger and Alec Baldwin as the Mc Coy's.<br /><br />This is a remake and I have not seen the original but really didn't care for this one at all although Bassenger and Baldwin have some nice screen chemistry. But the movie itself didn't do it for me.<br /><br />The Gettaway became really tiresome really quickly. The plot is overshadowed by one fight/chase after another and as the violence keeps piling up, Bassenger and Baldwin retain their great looks no matter what perils they maybe in. In fact, by the end of the movie they almost look BETTER then in the beginning. I don't think Bassenger's eye makeup moves once during the whole picture.<br /><br />This isn't the worst movie I've ever seen, certainly not, but it isn't very good and unless one is an action movie purist I can't see really enjoying this movie because there's just not a lot here. The Gettaway isn't terribly original either, and goes every way from unnecessarily brutal to rather dull. It really could have been better I think.<br /><br />Bassenger and Baldwin give OK performances but they don't have a lot to do except get chased and run for their lives. Sometimes less is more, after seeing the same thing over and over again it gets stale. Didn't enjoy this one to much.\"\n0 b'This was a \"cute\" movie at first, then then got too sappy and featured mediocre songs, at best.<br /><br />There is too much King James English spoken with is not only annoying in today\\'s world but not always easy to interpret. Can you imagine young people of today trying to listen to this film? Forget it.<br /><br />Bing Crosby has some good lines in here and is likable as \"Hank Martin.\" Rhonda Fleming (\"Alisande La Carteloise\") was, too, in addition to her good looks and beautiful, long red hair. <br /><br />It\\'s a nice movie with a feel-good ending, and I can\\'t knock that. Maybe this is worthy of a rental, for historical sake or if you\\'re a big Crosby fan but, overall, it\\'s not that much.'\n" ], [ "AUTOTUNE = tf.data.AUTOTUNE\n\ntrain_ds = train_data.cache().prefetch(buffer_size=AUTOTUNE)\nval_ds = val_data.cache().prefetch(buffer_size=AUTOTUNE)", "_____no_output_____" ], [ "# Create a custom standardization function to strip HTML break tags '<br />'.\ndef custom_standardization(input_data):\n lowercase = tf.strings.lower(input_data)\n stripped_html = tf.strings.regex_replace(lowercase, '<br />', ' ')\n return tf.strings.regex_replace(stripped_html,\n '[%s]' % re.escape(string.punctuation), '')\n\n\n# Vocabulary size and number of words in a sequence.\nvocab_size = 10000\nsequence_length = 100\n\n# Use the text vectorization layer to normalize, split, and map strings to\n# integers. Note that the layer uses the custom standardization defined above.\n# Set maximum_sequence length as all samples are not of the same length.\nvectorize_layer = TextVectorization(\n standardize=custom_standardization,\n max_tokens=vocab_size,\n output_mode='int',\n output_sequence_length=sequence_length)\n\n# Make a text-only dataset (no labels) and call adapt to build the vocabulary.\ntext_ds = train_ds.map(lambda x, y: x)\nvectorize_layer.adapt(text_ds)", "_____no_output_____" ], [ "embedding_dim=16\n\nmodel = Sequential([\n vectorize_layer,\n Embedding(vocab_size, embedding_dim, name=\"embedding\"),\n GlobalAveragePooling1D(),\n Dense(32, activation='relu'),\n Dense(1)\n])\n\ntensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=\"logs\")\n\nmodel.compile(optimizer='adam',\n loss=tf.keras.losses.BinaryCrossentropy(from_logits=True),\n metrics=['accuracy'])", "_____no_output_____" ], [ "model.fit(\n train_ds,\n validation_data=val_ds,\n epochs=20,\n callbacks=[tensorboard_callback])", "Epoch 1/20\n20/20 [==============================] - 47s 2s/step - loss: 0.6920 - accuracy: 0.5003 - val_loss: 0.6898 - val_accuracy: 0.4986\nEpoch 2/20\n20/20 [==============================] - 4s 200ms/step - loss: 0.6863 - accuracy: 0.5003 - val_loss: 0.6818 - val_accuracy: 0.4986\nEpoch 3/20\n20/20 [==============================] - 4s 198ms/step - loss: 0.6749 - accuracy: 0.5004 - val_loss: 0.6677 - val_accuracy: 0.4986\nEpoch 4/20\n20/20 [==============================] - 4s 195ms/step - loss: 0.6559 - accuracy: 0.5052 - val_loss: 0.6462 - val_accuracy: 0.5212\nEpoch 5/20\n20/20 [==============================] - 4s 198ms/step - loss: 0.6286 - accuracy: 0.5525 - val_loss: 0.6175 - val_accuracy: 0.5982\nEpoch 6/20\n20/20 [==============================] - 4s 197ms/step - loss: 0.5940 - accuracy: 0.6482 - val_loss: 0.5839 - val_accuracy: 0.6822\nEpoch 7/20\n20/20 [==============================] - 4s 185ms/step - loss: 0.5548 - accuracy: 0.7211 - val_loss: 0.5487 - val_accuracy: 0.7316\nEpoch 8/20\n20/20 [==============================] - 4s 188ms/step - loss: 0.5145 - accuracy: 0.7621 - val_loss: 0.5152 - val_accuracy: 0.7544\nEpoch 9/20\n20/20 [==============================] - 4s 186ms/step - loss: 0.4762 - accuracy: 0.7897 - val_loss: 0.4857 - val_accuracy: 0.7698\nEpoch 10/20\n20/20 [==============================] - 4s 193ms/step - loss: 0.4418 - accuracy: 0.8087 - val_loss: 0.4611 - val_accuracy: 0.7836\nEpoch 11/20\n20/20 [==============================] - 4s 195ms/step - loss: 0.4115 - accuracy: 0.8239 - val_loss: 0.4411 - val_accuracy: 0.7928\nEpoch 12/20\n20/20 [==============================] - 4s 196ms/step - loss: 0.3853 - accuracy: 0.8367 - val_loss: 0.4250 - val_accuracy: 0.7992\nEpoch 13/20\n20/20 [==============================] - 4s 204ms/step - loss: 0.3624 - accuracy: 0.8468 - val_loss: 0.4120 - val_accuracy: 0.8046\nEpoch 14/20\n20/20 [==============================] - 4s 201ms/step - loss: 0.3422 - accuracy: 0.8565 - val_loss: 0.4018 - val_accuracy: 0.8096\nEpoch 15/20\n20/20 [==============================] - 4s 194ms/step - loss: 0.3244 - accuracy: 0.8640 - val_loss: 0.3938 - val_accuracy: 0.8144\nEpoch 16/20\n20/20 [==============================] - 4s 194ms/step - loss: 0.3086 - accuracy: 0.8712 - val_loss: 0.3877 - val_accuracy: 0.8166\nEpoch 17/20\n20/20 [==============================] - 4s 194ms/step - loss: 0.2945 - accuracy: 0.8773 - val_loss: 0.3832 - val_accuracy: 0.8194\nEpoch 18/20\n20/20 [==============================] - 4s 195ms/step - loss: 0.2817 - accuracy: 0.8824 - val_loss: 0.3800 - val_accuracy: 0.8228\nEpoch 19/20\n20/20 [==============================] - 4s 194ms/step - loss: 0.2701 - accuracy: 0.8877 - val_loss: 0.3779 - val_accuracy: 0.8252\nEpoch 20/20\n20/20 [==============================] - 4s 196ms/step - loss: 0.2595 - accuracy: 0.8931 - val_loss: 0.3767 - val_accuracy: 0.8262\n" ], [ "%load_ext tensorboard\n%tensorboard --logdir logs", "_____no_output_____" ], [ "# get the trained word embeddings\nweights = model.get_layer('embedding').get_weights()[0]\nvocab = vectorize_layer.get_vocabulary()", "_____no_output_____" ], [ "vocab[:10]", "_____no_output_____" ], [ "out_v = io.open('vectors.tsv', 'w', encoding='utf-8')\nout_m = io.open('metadata.tsv', 'w', encoding='utf-8')\n\nfor index, word in enumerate(vocab):\n if index == 0:\n continue # skip 0, it's padding.\n vec = weights[index]\n out_v.write('\\t'.join([str(x) for x in vec]) + \"\\n\")\n out_m.write(word + \"\\n\")\nout_v.close()\nout_m.close()", "_____no_output_____" ] ], [ [ "## Test model", "_____no_output_____" ] ], [ [ "# view test data files\ntest_dir = os.path.join(dataset_dir, 'test')\nos.listdir(test_dir)", "_____no_output_____" ], [ "test_data = tf.keras.preprocessing.text_dataset_from_directory(\n 'aclImdb/test')", "Found 25000 files belonging to 2 classes.\n" ], [ "def vectorize_text(text, label):\n text = tf.expand_dims(text, -1)\n return vectorize_layer(text), label", "_____no_output_____" ], [ "# sample batch from test data\nfor test_text_batch, test_label_batch in test_data.take(1):\n \n # view the first 5 samples\n for i in range(5):\n print(test_label_batch[i].numpy(), test_text_batch.numpy()[i])", "0 b\"An insult to both poker and cinema, this movie manages to make the most dynamic, brilliant, and fascinating figure in poker history into an utter bore. Still a fun film to make jokes about, from the lame gangster movie clich\\xc3\\xa9s of the first half to the incomprehensible nonsense of that second hour. Hilariously, Stu Ungar wins all three of his World Series titles without playing a single hand on screen. His infamous dealer abuse? 1 scene. His coke habit? 1 scene. His incredible memory? 0 scenes. They couldn't even get any real poker players. What did they cover? A lot of high angle shots from inside a house in the suburbs. Oh, and a montage of Stu waking up every day and shopping for meat which doesn't come anywhere close to making sense. Why do I care so much about this little Sopranos summer camp trying to cash in on the poker craze? Because I think there's still a great film to be made about Stu Ungar waiting for someone willing to do it right.\"\n0 b'(SMALL SPOILERS) I just bought the DVD of this movie yesterday. I saw it with my friends and I couldn\\'t believe what had happened.<br /><br />In the first 3 movies, the critters at least had a sense of humor (especially the 3rd movie), but not only did the critters barely ever make an appearance, they weren\\'t funny! They never made me laugh. I must admit that the story did start off nicely. After an hour had gone by I remembered that the Critters movies were always very short. So I thought to myself, \"Where the $^%#$ are the critters?!?!\" They were barely in this movie! If that didn\\'t make me mad enough, the boy named Ethan was sitting on his bed after Charlie had \"murdered the ship\" and he knew that the critters were still on board! In the first movie the Brown family was scared out of their minds. But here, Ethan didn\\'t even care! It was as if the critters weren\\'t even a threat!<br /><br />Now what I\\'m about to say next may ruin the ending, but I\\'m going to say it anyways. In the first movie, at the end, they had to face the giant critter for a final battle. In the second one, there was the great ball of critter. In the third movie, the critter with his fave burned did a spindash (from Sonic the Hedgehog) and was going to attack the little kid. But at the end of the fourth one (which is what made me the angriest) the bald critter charges toward Ethan, and Ethan kills it as if it were nothing.<br /><br />Now something that I really don\\'t understand was what happened to Ug. He was one of my favorite characters in the first two. Then after 50 years, he\\'s evil. That was very disappointing. Not only that, but wasn\\'t he a faceless bounty hunter? Why was he still \"Johnny Steele?\" Plus he seemed to have a different personality. He seemed much smarter and not as monotone like in the first two.<br /><br />Being someone who actually enjoyed the first two critters movies, and loved the third one, I give Critters 4 a 2/10'\n0 b\"Very disappointing 7th chapter of this slowly dying series. Very evident that the budget was extremely low. This movie was made for one reason and one reason alone. To sell Puppet Master Toys! Fans, such as myself of the series have decided, from what I have read and heard that the only one in the series worse than this is Curse of the Puppetmaster. In turn, turning us away from the series. <br /><br />Opting to make this a PG-13 film, for whatever reason, did not work in the films favor. The plot seemed almost to be there, but was easily lost in the steady stream of nonsense. <br /><br />The only film in the series worth watching, also directed by Decoteau is part 3 - Toulon's Revenge.<br /><br />Granted, I do favor the scenery in the film. <br /><br />Yuck!\"\n0 b'Stay away from this movie! It is terrible in every way. Bad acting, a thin recycled plot and the worst ending in film history. Seldom do I watch a movie that makes my adrenaline pump from irritation, in fact the only other movie that immediately springs to mind is another \"people in an aircraft in trouble\" movie (Airspeed). Please, please don\\'t watch this one as it is utterly and totally pathetic from beginning to end. Helge Iversen'\n0 b\"This film is BORING, BORING, BORING, BORING, and BORING!!! It's not the worse film I ever saw, on the contrary, but.......how shall I put this.......IT'S BORING! There is some very nice scenery and some clever dry wit but that's about it. If it was advertised as a travelogue I would rate it a 7 but it's supposed to be a film with a plot, some drama, and for god's sake a point or a satisfying conclusion.<br /><br />I read some of the comments on this board about this films and I wondered if they saw the same movie as I did.<br /><br />See this film (yawn) at your own risk........one thing for sure- it really is rated correctly= G RATING! (Which most stand for GOD AWFUL BORING!)\"\n" ], [ "text_batch, label_batch = next(iter(test_data))\nfirst_review, first_label = text_batch[0], label_batch[0]\nprint(\"Review\", first_review)\nprint(\"Label\", test_data.class_names[first_label])\nprint(\"Vectorized review\", vectorize_text(first_review, first_label))", "Review tf.Tensor(b'This film biography of early rock and roll star Buddy Holly (1936-1959) is a tour de force for Gary Busey. The movie\\'s highlights are Busey\\'s stage performances where he plays guitar and sings Holly songs. He brings such energy to the performances that Holly\\'s own filmed performances almost pale in comparison. Busey\\'s infectious toothy grin lights up the screen, he creates a totally believable and winning personality and his Oscar nomination for best actor was well deserved.<br /><br />The film follows Holly\\'s career from growing up in Lubbock, Texas, to stardom and New York and his untimely death in a plane crash. One thing I found interesting, if true, was Buddy\\'s driving ambition--he had great plans to go beyond recording and performance to producing. As young as he was he was already establishing himself as a shrewd businessman and definitely wanted to take things to a higher level. We will never know if he would have ultimately catapulted his early success into a business brand like The Rolling Stones.<br /><br />The lyrics of many of Holly\\'s songs are pretty adolescent; read the lyrics for \"Peggy Sue\" or \"Oh Boy!\" and you will see what I mean. Maybe to a great extent this explains his popularity with adolescent audiences, but his instrumentation and stage performances surely account for his influence on groups to follow--both The Rolling Stones and The Beatles have acknowledged his importance.<br /><br />Clearly some liberties were taken for dramatic effect. For example, I doubt that Holly ever punched out a producer in Nashville or that the audience at New York\\'s Apollo theater was so immediately responsive as to be wildly dancing in the aisles. If you are interested in getting closer to the truth, see the documentary \"The Real Buddy Holly Story\" (1985) that is produced and hosted by a very relaxed and engaging Paul McCartney. This contains interviews with Holly\\'s family, friends, and band-mates (Holly\\'s musical brothers are not even mentioned in \"The Buddy Holly Story\"). Members of other bands like Keith Richards and Don Everly also offer opinions and stories and there is footage of old Holly performances. The McCartney production can stand on its own, but it makes an excellent companion piece to \"The Buddy Holly Story\" and perhaps should be required viewing for anyone who watches the fictionalized story.', shape=(), dtype=string)\nLabel pos\nVectorized review (<tf.Tensor: shape=(1, 100), dtype=int64, numpy=\narray([[ 11, 19, 4980, 5, 410, 860, 4, 2072, 355, 1752, 3086,\n 1, 7, 3, 2918, 1017, 1079, 16, 1864, 5468, 2, 91,\n 3255, 23, 1, 1025, 367, 116, 28, 295, 4303, 4, 3209,\n 3086, 761, 28, 969, 137, 1668, 6, 2, 367, 12, 1,\n 197, 704, 367, 208, 4786, 8, 1716, 1, 9627, 1, 9236,\n 2363, 55, 2, 270, 28, 2181, 3, 423, 785, 4, 2238,\n 1556, 4, 24, 980, 4788, 16, 117, 299, 13, 70, 1875,\n 2, 19, 1039, 1, 640, 35, 1928, 55, 8, 1, 1709,\n 6, 6158, 4, 172, 962, 4, 24, 1, 316, 8, 3,\n 1373]], dtype=int64)>, <tf.Tensor: shape=(), dtype=int32, numpy=1>)\n" ], [ "# the vectorize function is not required to process the test data\n# if the vectorize layer included in model\n\n# test_ds = test_data.map(vectorize_text)\n\n# # sample batch from test data\n# for test_text_batch, test_label_batch in test_ds.take(1):\n# for i in range(1):\n# print(test_label_batch[i].numpy(), test_text_batch.numpy()[i])", "_____no_output_____" ], [ "loss, accuracy = model.evaluate(test_data)\n\nprint(\"Loss: \", loss)\nprint(\"Accuracy: \", accuracy)", "782/782 [==============================] - 26s 34ms/step - loss: 0.4029 - accuracy: 0.8025\nLoss: 0.40294232964515686\nAccuracy: 0.8024799823760986\n" ], [ "export_model = tf.keras.Sequential([\n model,\n layers.Activation('sigmoid')\n])\n\nexport_model.compile(\n loss=losses.BinaryCrossentropy(from_logits=False), optimizer=\"adam\", metrics=['accuracy']\n)\n\n# Test it with `raw_test_ds`, which yields raw strings\nloss, accuracy = export_model.evaluate(test_data)\nprint(accuracy)", "782/782 [==============================] - 20s 24ms/step - loss: 0.9002 - accuracy: 0.5179\n0.5178800225257874\n" ], [ "text_batch, label_batch = next(iter(test_data))\nfirst_review, first_label = text_batch[0], label_batch[0]", "_____no_output_____" ], [ "pred_label = export_model.predict(test_data)", "_____no_output_____" ], [ "pred_label", "_____no_output_____" ], [ "pred_label.shape", "_____no_output_____" ], [ "pred_y = []\n\nfor i in range(len(pred_label)):\n pred_y.append(round(pred_label[i][0]))", "_____no_output_____" ], [ "len(pred_y)", "_____no_output_____" ], [ "actual_y = []\nfor tt, ll in test_data:\n for l in ll:\n actual_y.append(l.numpy())", "_____no_output_____" ], [ "correct = 0\nfor i in range(len(pred_y)):\n if pred_y[i] == actual_y[i]:\n correct+=1", "_____no_output_____" ], [ "correct/len(pred_y)*100", "_____no_output_____" ] ], [ [ "**Analyze my own review**", "_____no_output_____" ] ], [ [ "my_reviews =[\"The new movie is popular and awesome\",\n \"The background music is annoying and too loud\",\n \"We are very enjoy the movie\",\n \"Negative comment in internent is hurt people\",\n \"The smile is very sweat and cute!\",\n \"The view is so beautiful and attrative\",\n ]", "_____no_output_____" ], [ "export_model.predict(my_reviews)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ] ]
cb55412d420ab4ac49e7b6dac64bc04543412bcf
193,074
ipynb
Jupyter Notebook
Densenet_depth_model/DepthImageVisualize.ipynb
varun-affinsys/Monocular-Depth-Estimation-with-Transfer-Learning-pretrained-MobileNetV2
9b20c5b3d7a9f90e1dc6f40e17ee31d9b3dee684
[ "MIT" ]
70
2020-02-01T06:30:58.000Z
2022-03-29T03:46:06.000Z
Densenet_depth_model/DepthImageVisualize.ipynb
varun-affinsys/Monocular-Depth-Estimation-with-Transfer-Learning-pretrained-MobileNetV2
9b20c5b3d7a9f90e1dc6f40e17ee31d9b3dee684
[ "MIT" ]
4
2020-06-24T18:10:34.000Z
2021-11-25T19:21:08.000Z
Densenet_depth_model/DepthImageVisualize.ipynb
varun-affinsys/Monocular-Depth-Estimation-with-Transfer-Learning-pretrained-MobileNetV2
9b20c5b3d7a9f90e1dc6f40e17ee31d9b3dee684
[ "MIT" ]
16
2020-01-28T12:11:03.000Z
2021-11-02T11:50:54.000Z
1,804.429907
157,944
0.963366
[ [ [ "import pandas as pd\nimport os\nfrom sklearn.utils import shuffle\nfrom skimage import io, transform\nimport matplotlib.pyplot as plt", "_____no_output_____" ], [ "#for testing image\ntraincsv=pd.read_csv('/workspace/data/nyu2_train.csv')\ntraincsv = traincsv.rename(columns={'data/nyu2_train/living_room_0038_out/37.jpg': 'image', 'data/nyu2_train/living_room_0038_out/37.png': 'depth'})\ntraincsv = shuffle(traincsv, random_state=1)\nroot_dir='/workspace/'\nimg_name = os.path.join(root_dir,traincsv.iloc[2,0])\nimage = io.imread(img_name)\ndepth_name = os.path.join(root_dir,traincsv.iloc[2,1])\ndepth_image = io.imread((depth_name))\nfrom skimage import img_as_float\ndepth_float = img_as_float(depth_image)\nprint(depth_float.max())\nprint(depth_float.min())\n\nplt.imshow(image)\nplt.figure()\nplt.imshow(depth_image)", "0.5843137254901961\n0.14901960784313725\n" ] ] ]
[ "code" ]
[ [ "code", "code" ] ]
cb55413c1d7ad1693993f7d1dbed415de6d267c0
4,340
ipynb
Jupyter Notebook
AutoYoutubeChannel/Untitled5.ipynb
FuckBrains/AutoYoutubeChannel
8898b4489a6daf954080ecaac4d221f27ddb8a43
[ "Apache-2.0" ]
2
2020-09-14T18:03:46.000Z
2020-11-05T21:08:10.000Z
AutoYoutubeChannel/Untitled5.ipynb
bamby97/AutoYoutubeChannel
8898b4489a6daf954080ecaac4d221f27ddb8a43
[ "Apache-2.0" ]
null
null
null
AutoYoutubeChannel/Untitled5.ipynb
bamby97/AutoYoutubeChannel
8898b4489a6daf954080ecaac4d221f27ddb8a43
[ "Apache-2.0" ]
2
2020-09-14T18:03:48.000Z
2021-04-30T19:13:44.000Z
33.90625
125
0.564977
[ [ [ "from moviepy.editor import *\n\npostedByFontSize=25\nreplyFontSize=35\ntitleFontSize=100\ncortinilla= VideoFileClip('assets for Channel/assets for video/transicion.mp4')\nclip = ImageClip('assets for Channel/assets for video/background assets/fondo_preguntas.jpg').on_color((1920, 1080))\nfinal= VideoFileClip('assets for Channel/assets for video/transicion.mp4')", "_____no_output_____" ], [ "def generate_video_of_reply(author,replyLines,replyaudio):\n videoComponents=[]\n textReply= []\n postedBy = TextClip('Posted by /'+author, fontsize=postedByFontSize, color='white')\n postedBy=postedBy.set_pos((162, 124))\n index=0\n yAxis=184\n for replyLine in replyLines:\n print('line '+str(index)+replyLine)\n try:\n replyline=TextClip(replyLine, fontsize=postedByFontSize, color='white')\n replyline=replyline.set_pos((162,yAxis))\n textReply.append(replyline)\n except:\n print('null line')\n print(yAxis)\n yAxis+=25\n index+=1\n videoComponents.append(clip)\n videoComponents.append(postedBy)\n videoComponents.extend(textReply)\n replyVideo = CompositeVideoClip(videoComponents)\n replyVideo = replyVideo.set_duration(replyaudio.duration)\n replyVideo = replyVideo.set_audio(replyaudio)\n return replyVideo", "_____no_output_____" ], [ "def generate_final_video(title,replies):\n videoClips=[]\n videoClips.append(generate_title(title))\n index=0\n for reply in replies:\n audio=AudioFileClip('comment'+str(index)+'.mp3')\n videoClips.append(generate_video_of_reply(reply['author'],reply['replyLines'],audio))\n videoClips.append(cortinilla)\n index+=1\n videoClips.append(final)\n finalVideo=concatenate_videoclips(videoClips)\n finalVideo.fx(vfx.speedx, factor=1.3)\n finalVideo.write_videofile(\"text.mp4\", fps=24)", "_____no_output_____" ], [ "def generate_title(title): \n videoComponents=[]\n yAxisJumpInLine=80\n maxCharsInLine=38\n titleaudio=AudioFileClip('title.mp3')\n titleline=TextClip(title, fontsize=titleFontSize, color='white')\n titleline=titleline.set_pos((202,94))\n #if(len(titleline)>38):\n # sublines=[line[i:i+maxCharsInLine] for i in range(0, len(line), maxCharsInLine)]\n # sublinesSize=len(sublines)\n # for x in range(sublinesSize):\n # index = len(sublines[x]) # calculate length of string and save in index\n # while index > 0: \n # if(sublines[x][ index - 1 ]==' '): # save the value of str[index-1] in reverseString\n # index = index - 1\n #if(' ' in sublines[x+1]):\n \n videoComponents.append(clip)\n videoComponents.append(titleline)\n titleVideo = CompositeVideoClip(videoComponents)\n titleVideo = titleVideo.set_duration(titleaudio.duration)\n titleVideo = titleVideo.set_audio(titleaudio)\n return titleVideo\n ", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code" ] ]
cb5542674d11e617d859554a6515b435135b2752
151,554
ipynb
Jupyter Notebook
Regression/Linear Models/PoissonRegressor_StandardScaler_PowerTransformer.ipynb
shreepad-nade/ds-seed
93ddd3b73541f436b6832b94ca09f50872dfaf10
[ "Apache-2.0" ]
53
2021-08-28T07:41:49.000Z
2022-03-09T02:20:17.000Z
Regression/Linear Models/PoissonRegressor_StandardScaler_PowerTransformer.ipynb
shreepad-nade/ds-seed
93ddd3b73541f436b6832b94ca09f50872dfaf10
[ "Apache-2.0" ]
142
2021-07-27T07:23:10.000Z
2021-08-25T14:57:24.000Z
Regression/Linear Models/PoissonRegressor_StandardScaler_PowerTransformer.ipynb
shreepad-nade/ds-seed
93ddd3b73541f436b6832b94ca09f50872dfaf10
[ "Apache-2.0" ]
38
2021-07-27T04:54:08.000Z
2021-08-23T02:27:20.000Z
212.558205
68,980
0.892269
[ [ [ "# PoissonRegressor with StandardScaler & Power Transformer", "_____no_output_____" ], [ "This Code template is for the regression analysis using Poisson Regressor, StandardScaler as feature rescaling technique and Power Transformer as transformer in a pipeline. This is a generalized Linear Model with a Poisson distribution.", "_____no_output_____" ], [ "### Required Packages", "_____no_output_____" ] ], [ [ "import warnings\nimport numpy as np \nimport pandas as pd \nimport matplotlib.pyplot as plt \nimport seaborn as se \nfrom sklearn.linear_model import PoissonRegressor\nfrom sklearn.model_selection import train_test_split \nfrom sklearn.pipeline import make_pipeline\nfrom sklearn.preprocessing import StandardScaler, PowerTransformer\nfrom sklearn.metrics import r2_score, mean_absolute_error, mean_squared_error \nwarnings.filterwarnings('ignore')", "_____no_output_____" ] ], [ [ "### Initialization\n\nFilepath of CSV file", "_____no_output_____" ] ], [ [ "#filepath\nfile_path= \"\"", "_____no_output_____" ] ], [ [ "List of features which are required for model training .", "_____no_output_____" ] ], [ [ "#x_values\nfeatures=[]", "_____no_output_____" ] ], [ [ "Target feature for prediction.", "_____no_output_____" ] ], [ [ "#y_value\ntarget=''", "_____no_output_____" ] ], [ [ "### Data Fetching\n\nPandas is an open-source, BSD-licensed library providing high-performance, easy-to-use data manipulation and data analysis tools.\n\nWe will use panda's library to read the CSV file using its storage path.And we use the head function to display the initial row or entry.", "_____no_output_____" ] ], [ [ "df=pd.read_csv(file_path)\ndf.head()", "_____no_output_____" ] ], [ [ "### Feature Selections\n\nIt is the process of reducing the number of input variables when developing a predictive model. Used to reduce the number of input variables to both reduce the computational cost of modelling and, in some cases, to improve the performance of the model.\n\nWe will assign all the required input features to X and target/outcome to Y.", "_____no_output_____" ] ], [ [ "X=df[features]\nY=df[target]", "_____no_output_____" ] ], [ [ "### Data Preprocessing\n\nSince the majority of the machine learning models in the Sklearn library doesn't handle string category data and Null value, we have to explicitly remove or replace null values. The below snippet have functions, which removes the null value if any exists. And convert the string classes data in the datasets by encoding them to integer classes.\n", "_____no_output_____" ] ], [ [ "def NullClearner(df):\n if(isinstance(df, pd.Series) and (df.dtype in [\"float64\",\"int64\"])):\n df.fillna(df.mean(),inplace=True)\n return df\n elif(isinstance(df, pd.Series)):\n df.fillna(df.mode()[0],inplace=True)\n return df\n else:return df\ndef EncodeX(df):\n return pd.get_dummies(df)", "_____no_output_____" ] ], [ [ "Calling preprocessing functions on the feature and target set.\n", "_____no_output_____" ] ], [ [ "x=X.columns.to_list()\nfor i in x:\n X[i]=NullClearner(X[i])\nX=EncodeX(X)\nY=NullClearner(Y)\nX.head()", "_____no_output_____" ] ], [ [ "#### Correlation Map\n\nIn order to check the correlation between the features, we will plot a correlation matrix. It is effective in summarizing a large amount of data where the goal is to see patterns.", "_____no_output_____" ] ], [ [ "f,ax = plt.subplots(figsize=(18, 18))\nmatrix = np.triu(X.corr())\nse.heatmap(X.corr(), annot=True, linewidths=.5, fmt= '.1f',ax=ax, mask=matrix)\nplt.show()", "_____no_output_____" ] ], [ [ "### Data Splitting\n\nThe train-test split is a procedure for evaluating the performance of an algorithm. The procedure involves taking a dataset and dividing it into two subsets. The first subset is utilized to fit/train the model. The second subset is used for prediction. The main motive is to estimate the performance of the model on new data.", "_____no_output_____" ] ], [ [ "x_train,x_test,y_train,y_test=train_test_split(X,Y,test_size=0.2,random_state=123)", "_____no_output_____" ] ], [ [ "### Model\n\nPoisson regression is a generalized linear model form of regression used to model count data and contingency tables. It assumes the response variable or target variable Y has a Poisson distribution, and assumes the logarithm of its expected value can be modeled by a linear combination of unknown parameters. It is sometimes known as a log-linear model, especially when used to model contingency tables.\n\n#### Model Tuning Parameters\n> **alpha** -> Constant that multiplies the penalty term and thus determines the regularization strength. alpha = 0 is equivalent to unpenalized GLMs.\n\n> **tol** -> Stopping criterion.\n\n> **max_iter** -> The maximal number of iterations for the solver.\n\nFeature Transformation\n\nPower Transformers are a family of parametric, monotonic transformations that are applied to make data more Gaussian-like. This is useful for modeling issues related to heteroscedasticity (non-constant variance), or other situations where normality is desired.\n\nCurrently, <Code>PowerTransformer</Code> supports the Box-Cox transform and the Yeo-Johnson transform. The optimal parameter for stabilizing variance and minimizing skewness is estimated through maximum likelihood.\n\n\nRefer [API](https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.PowerTransformer.html) for the parameters", "_____no_output_____" ] ], [ [ "model=make_pipeline(StandardScaler(),PowerTransformer(),PoissonRegressor())\nmodel.fit(x_train,y_train)", "_____no_output_____" ] ], [ [ "#### Model Accuracy\n\nWe will use the trained model to make a prediction on the test set.Then use the predicted value for measuring the accuracy of our model.\n\n> **score**: The **score** function returns the coefficient of determination <code>R<sup>2</sup></code> of the prediction.", "_____no_output_____" ] ], [ [ "print(\"Accuracy score {:.2f} %\\n\".format(model.score(x_test,y_test)*100))", "Accuracy score 44.87 %\n\n" ] ], [ [ "> **r2_score**: The **r2_score** function computes the percentage variablility explained by our model, either the fraction or the count of correct predictions. \n\n> **mae**: The **mean abosolute error** function calculates the amount of total error(absolute average distance between the real data and the predicted data) by our model. \n\n> **mse**: The **mean squared error** function squares the error(penalizes the model for large errors) by our model. ", "_____no_output_____" ] ], [ [ "y_pred=model.predict(x_test)\nprint(\"R2 Score: {:.2f} %\".format(r2_score(y_test,y_pred)*100))\nprint(\"Mean Absolute Error {:.2f}\".format(mean_absolute_error(y_test,y_pred)))\nprint(\"Mean Squared Error {:.2f}\".format(mean_squared_error(y_test,y_pred)))", "R2 Score: 42.73 %\nMean Absolute Error 347.32\nMean Squared Error 509806.26\n" ] ], [ [ "#### Prediction Plot\n\nFirst, we make use of a plot to plot the actual observations, with x_train on the x-axis and y_train on the y-axis.\nFor the regression line, we will use x_train on the x-axis and then the predictions of the x_train observations on the y-axis.", "_____no_output_____" ] ], [ [ "plt.figure(figsize=(14,10))\nplt.plot(range(20),y_test[0:20], color = \"green\")\nplt.plot(range(20),model.predict(x_test[0:20]), color = \"red\")\nplt.legend([\"Actual\",\"prediction\"]) \nplt.title(\"Predicted vs True Value\")\nplt.xlabel(\"Record number\")\nplt.ylabel(target)\nplt.show()", "_____no_output_____" ] ], [ [ "#### Creator: Viraj Jayant , Github: [Profile](https://github.com/Viraj-Jayant)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
cb55484e57329de74c5ba7e8f76cd8f5bf2c7274
94,792
ipynb
Jupyter Notebook
Google_ColabNB/ML_SelfStudy_LogReg.ipynb
Pradyumna1312/ML_SelfStudy
0827d6c23bfbfdca7064536c639be120cd2e76db
[ "MIT" ]
null
null
null
Google_ColabNB/ML_SelfStudy_LogReg.ipynb
Pradyumna1312/ML_SelfStudy
0827d6c23bfbfdca7064536c639be120cd2e76db
[ "MIT" ]
null
null
null
Google_ColabNB/ML_SelfStudy_LogReg.ipynb
Pradyumna1312/ML_SelfStudy
0827d6c23bfbfdca7064536c639be120cd2e76db
[ "MIT" ]
null
null
null
219.425926
63,618
0.895951
[ [ [ "<a href=\"https://colab.research.google.com/github/Pradyumna1312/ML_SelfStudy/blob/main/ML_SelfStudy_LogReg.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ], [ "#Logistic regression\nIt is a statistical technique for modelling the probability of a specific class or occurrence.\n\nSocial Network Ads is a categorical dataset describes information about a product being purchased through an advertisement on social media.\n\nImplementing Logistic regression model in Python to predict whether the product is purchased or not by a person using any one of the three attributes given in the dataset.\n\nFollow the following steps:\n\n1. Import Libraries", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport numpy as np\nfrom math import exp\nimport matplotlib.pyplot as plt\nfrom scipy.stats import pearsonr", "_____no_output_____" ] ], [ [ "2. Load the dataset", "_____no_output_____" ] ], [ [ "df = pd.read_csv(\"https://raw.githubusercontent.com/Pradyumna1312/ML_SelfStudy/main/Datasets/Social_Network_Ads.csv\")\nX = df\nY = df.iloc[:,-1].values\nX = df[df.columns[[1,2,3]]]\nprint(df)", " User ID Gender Age EstimatedSalary Purchased\n0 15624510 Male 19 19000 0\n1 15810944 Male 35 20000 0\n2 15668575 Female 26 43000 0\n3 15603246 Female 27 57000 0\n4 15804002 Male 19 76000 0\n.. ... ... ... ... ...\n395 15691863 Female 46 41000 1\n396 15706071 Male 51 23000 1\n397 15654296 Female 50 20000 1\n398 15755018 Male 36 33000 0\n399 15594041 Female 49 36000 1\n\n[400 rows x 5 columns]\n" ] ], [ [ "3. Consider any one highly related input attribute with the output variable and\ndisplay the scatter plot", "_____no_output_____" ] ], [ [ "AY = np.cov(X['Age'],Y)\nESY = np.cov(X['EstimatedSalary'], Y)\nprint(\"Covariance of Age with Output\\n\", AY,'\\n\\n',\"Covariance of Estimated Salary with Output\\n\", ESY,'\\n')\n\ncorrAY, _ = pearsonr(X['Age'],Y)\ncorrESY, _ = pearsonr(X['EstimatedSalary'], Y)\nprint(\"Correlation of Age with Output\\n\", corrAY,'\\n\\n',\"Correlation of Estimated Salary with Output\\n\",corrESY)\n\n# Therefore Age is highly related to output.\n\nplt.scatter(X['Age'],Y)\nplt.title(\"Scatter plot of Highly related feature\")\nplt.show()", "Covariance of Age with Output\n [[109.89070175 3.13116541]\n [ 3.13116541 0.23026942]] \n\n Covariance of Estimated Salary with Output\n [[1.16260270e+09 5.92436717e+03]\n [5.92436717e+03 2.30269424e-01]] \n\nCorrelation of Age with Output\n 0.6224541988845291 \n\n Correlation of Estimated Salary with Output\n 0.3620830258046779\n" ], [ "from sklearn.model_selection import train_test_split\nx_train, x_test, y_train, y_test= train_test_split(X,Y,test_size= 0.41, random_state= 0)\n\nprint(x_train)", " Gender Age EstimatedSalary\n162 Female 37 33000\n34 Male 27 90000\n231 Male 39 42000\n97 Male 28 123000\n85 Female 31 118000\n.. ... ... ...\n323 Female 48 30000\n192 Male 29 43000\n117 Male 36 52000\n47 Female 27 54000\n172 Female 26 118000\n\n[236 rows x 3 columns]\n" ] ], [ [ "4. Use stochastic gradient decent method to train the model and use 300 epochs\nand initialize the weights=0 and learning rate=0.001, threshold value =0.5.\n", "_____no_output_____" ] ], [ [ "def normalize(X):\n return X - X.mean()\n\n# Method to make predictions\ndef predict(X, b0, b1):\n return np.array([1 / (1 + exp(-1*b0 + -1*b1*x)) for x in X])\n\n# Method to train the model\ndef logistic_regression(X, Y, epochs):\n\n X = normalize(X)\n\n # Initializing variables\n b0 = 0\n b1 = 0\n L = 0.001\n\n for epoch in range(epochs):\n y_pred = predict(X, b0, b1)\n D_b0 = -2 * sum((Y - y_pred) * y_pred * (1 - y_pred)) # Derivative of loss wrt b0\n D_b1 = -2 * sum(X * (Y - y_pred) * y_pred * (1 - y_pred)) # Derivative of loss wrt b1\n # Update b0 and b1\n b0 = b0 - L * D_b0\n b1 = b1 - L * D_b1\n \n return b0, b1", "_____no_output_____" ], [ "def sqr_err(y_true, y_pred):\n\n return np.array([(y_pred[i]-y_true[i])**2 for i in range(len(y_true))])\n", "_____no_output_____" ] ], [ [ "\n6. Predict the MSE and accuracy of the trained model after 300 epochs", "_____no_output_____" ] ], [ [ "b0, b1 = logistic_regression(X['Age'],Y,300)\n\n# Making predictions\nX_test_norm = normalize(x_test['Age'])\ny_pred = predict(X_test_norm, b0, b1)\ny_pred = [1 if p >= 0.5 else 0 for p in y_pred]\n\nplt.clf()\nplt.scatter(x_test['Age'], y_test)\nplt.scatter(x_test['Age'], y_pred, c=\"red\")\nplt.show()\n\n# The accuracy\naccuracy = 0\nfor i in range(len(y_pred)):\n if y_pred[i] == y_test[i]:\n accuracy += 1\nprint(f\"Accuracy = {accuracy / len(y_pred)}\")\n\n# The MSE\n\nmse = ((y_test - y_pred) ** 2).mean()\nprint(\"MSE =\", mse)", "_____no_output_____" ] ], [ [ "5. Plot the MSE for 300 epochs", "_____no_output_____" ] ], [ [ "y_pred = predict(X_test_norm, b0, b1)\nSquared_error = sqr_err(y_test, y_pred)\nplt.figure()\nplt.plot(y_pred, Squared_error)\nplt.xlabel('Predicted Values')\nplt.ylabel('Squared Error')\nplt.show()", "_____no_output_____" ] ], [ [ "7. Validate the classification model for any 2 unseen values.", "_____no_output_____" ] ], [ [ "valid=np.array([20,40])\nvalid = normalize(valid)\ny_valid = predict(valid,b0,b1)\ny_valid = [1 if p >= 0.5 else 0 for p in y_valid]\nprint('The ouputs for Ages 20, 40 are as follows:', y_valid)", "The ouputs for Ages 20, 40 are as follows: [0, 1]\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
cb554f1b786113d44696774413dd4bbab936379b
32,917
ipynb
Jupyter Notebook
vision/workshop/day1/training.ipynb
LeeSeunghwanSeungLee/mldl-sandbox
86a63e4c0c199a46c8463d2179b55f89aeca4c21
[ "MIT" ]
null
null
null
vision/workshop/day1/training.ipynb
LeeSeunghwanSeungLee/mldl-sandbox
86a63e4c0c199a46c8463d2179b55f89aeca4c21
[ "MIT" ]
null
null
null
vision/workshop/day1/training.ipynb
LeeSeunghwanSeungLee/mldl-sandbox
86a63e4c0c199a46c8463d2179b55f89aeca4c21
[ "MIT" ]
null
null
null
33.657464
274
0.554941
[ [ [ "# Transformers installation\n! pip install transformers datasets\n# To install from source instead of the last release, comment the command above and uncomment the following one.\n# ! pip install git+https://github.com/huggingface/transformers.git", "_____no_output_____" ] ], [ [ "# Fine-tuning a pretrained model", "_____no_output_____" ], [ "In this tutorial, we will show you how to fine-tune a pretrained model from the Transformers library. In TensorFlow,\nmodels can be directly trained using Keras and the `fit` method. In PyTorch, there is no generic training loop so\nthe 🤗 Transformers library provides an API with the class [Trainer](https://huggingface.co/docs/transformers/master/en/main_classes/trainer#transformers.Trainer) to let you fine-tune or train\na model from scratch easily. Then we will show you how to alternatively write the whole training loop in PyTorch.\n\nBefore we can fine-tune a model, we need a dataset. In this tutorial, we will show you how to fine-tune BERT on the\n[IMDB dataset](https://www.imdb.com/interfaces/): the task is to classify whether movie reviews are positive or\nnegative. For examples of other tasks, refer to the [additional-resources](#additional-resources) section!\n\n<a id='data-processing'></a>", "_____no_output_____" ], [ "## Preparing the datasets", "_____no_output_____" ] ], [ [ "#@title\nfrom IPython.display import HTML\n\nHTML('<iframe width=\"560\" height=\"315\" src=\"https://www.youtube.com/embed/_BZearw7f0w?rel=0&amp;controls=0&amp;showinfo=0\" frameborder=\"0\" allowfullscreen></iframe>')", "_____no_output_____" ] ], [ [ "We will use the [🤗 Datasets](https://github.com/huggingface/datasets/) library to download and preprocess the IMDB\ndatasets. We will go over this part pretty quickly. Since the focus of this tutorial is on training, you should refer\nto the 🤗 Datasets [documentation](https://huggingface.co/docs/datasets/) or the [preprocessing](https://huggingface.co/docs/transformers/master/en/preprocessing) tutorial for\nmore information.\n\nFirst, we can use the `load_dataset` function to download and cache the dataset:", "_____no_output_____" ] ], [ [ "from datasets import load_dataset\n\nraw_datasets = load_dataset(\"imdb\")", "_____no_output_____" ] ], [ [ "This works like the `from_pretrained` method we saw for the models and tokenizers (except the cache directory is\n_~/.cache/huggingface/dataset_ by default).\n\nThe `raw_datasets` object is a dictionary with three keys: `\"train\"`, `\"test\"` and `\"unsupervised\"`\n(which correspond to the three splits of that dataset). We will use the `\"train\"` split for training and the\n`\"test\"` split for validation.\n\nTo preprocess our data, we will need a tokenizer:", "_____no_output_____" ] ], [ [ "from transformers import AutoTokenizer\n\ntokenizer = AutoTokenizer.from_pretrained(\"bert-base-cased\")", "_____no_output_____" ] ], [ [ "As we saw in [preprocessing](https://huggingface.co/docs/transformers/master/en/preprocessing), we can prepare the text inputs for the model with the following command (this is an\nexample, not a command you can execute):", "_____no_output_____" ] ], [ [ "inputs = tokenizer(sentences, padding=\"max_length\", truncation=True)", "_____no_output_____" ] ], [ [ "This will make all the samples have the maximum length the model can accept (here 512), either by padding or truncating\nthem.\n\nHowever, we can instead apply these preprocessing steps to all the splits of our dataset at once by using the\n`map` method:", "_____no_output_____" ] ], [ [ "def tokenize_function(examples):\n return tokenizer(examples[\"text\"], padding=\"max_length\", truncation=True)\n\n\ntokenized_datasets = raw_datasets.map(tokenize_function, batched=True)", "_____no_output_____" ] ], [ [ "You can learn more about the map method or the other ways to preprocess the data in the 🤗 Datasets [documentation](https://huggingface.co/docs/datasets/).\n\nNext we will generate a small subset of the training and validation set, to enable faster training:", "_____no_output_____" ] ], [ [ "small_train_dataset = tokenized_datasets[\"train\"].shuffle(seed=42).select(range(1000))\nsmall_eval_dataset = tokenized_datasets[\"test\"].shuffle(seed=42).select(range(1000))\nfull_train_dataset = tokenized_datasets[\"train\"]\nfull_eval_dataset = tokenized_datasets[\"test\"]", "_____no_output_____" ] ], [ [ "In all the examples below, we will always use `small_train_dataset` and `small_eval_dataset`. Just replace\nthem by their _full_ equivalent to train or evaluate on the full dataset.\n\n<a id='trainer'></a>", "_____no_output_____" ], [ "## Fine-tuning in PyTorch with the Trainer API", "_____no_output_____" ] ], [ [ "#@title\nfrom IPython.display import HTML\n\nHTML('<iframe width=\"560\" height=\"315\" src=\"https://www.youtube.com/embed/nvBXf7s7vTI?rel=0&amp;controls=0&amp;showinfo=0\" frameborder=\"0\" allowfullscreen></iframe>')", "_____no_output_____" ] ], [ [ "Since PyTorch does not provide a training loop, the 🤗 Transformers library provides a [Trainer](https://huggingface.co/docs/transformers/master/en/main_classes/trainer#transformers.Trainer)\nAPI that is optimized for 🤗 Transformers models, with a wide range of training options and with built-in features like\nlogging, gradient accumulation, and mixed precision.\n\nFirst, let's define our model:", "_____no_output_____" ] ], [ [ "from transformers import AutoModelForSequenceClassification\n\nmodel = AutoModelForSequenceClassification.from_pretrained(\"bert-base-cased\", num_labels=2)", "_____no_output_____" ] ], [ [ "This will issue a warning about some of the pretrained weights not being used and some weights being randomly\ninitialized. That's because we are throwing away the pretraining head of the BERT model to replace it with a\nclassification head which is randomly initialized. We will fine-tune this model on our task, transferring the knowledge\nof the pretrained model to it (which is why doing this is called transfer learning).\n\nThen, to define our [Trainer](https://huggingface.co/docs/transformers/master/en/main_classes/trainer#transformers.Trainer), we will need to instantiate a\n[TrainingArguments](https://huggingface.co/docs/transformers/master/en/main_classes/trainer#transformers.TrainingArguments). This class contains all the hyperparameters we can tune for the\n[Trainer](https://huggingface.co/docs/transformers/master/en/main_classes/trainer#transformers.Trainer) or the flags to activate the different training options it supports. Let's begin by\nusing all the defaults, the only thing we then have to provide is a directory in which the checkpoints will be saved:", "_____no_output_____" ] ], [ [ "from transformers import TrainingArguments\n\ntraining_args = TrainingArguments(\"test_trainer\")", "_____no_output_____" ] ], [ [ "Then we can instantiate a [Trainer](https://huggingface.co/docs/transformers/master/en/main_classes/trainer#transformers.Trainer) like this:", "_____no_output_____" ] ], [ [ "from transformers import Trainer\n\ntrainer = Trainer(model=model, args=training_args, train_dataset=small_train_dataset, eval_dataset=small_eval_dataset)", "_____no_output_____" ] ], [ [ "To fine-tune our model, we just need to call", "_____no_output_____" ] ], [ [ "trainer.train()", "_____no_output_____" ] ], [ [ "which will start a training that you can follow with a progress bar, which should take a couple of minutes to complete\n(as long as you have access to a GPU). It won't actually tell you anything useful about how well (or badly) your model\nis performing however as by default, there is no evaluation during training, and we didn't tell the\n[Trainer](https://huggingface.co/docs/transformers/master/en/main_classes/trainer#transformers.Trainer) to compute any metrics. Let's have a look on how to do that now!\n\nTo have the [Trainer](https://huggingface.co/docs/transformers/master/en/main_classes/trainer#transformers.Trainer) compute and report metrics, we need to give it a `compute_metrics`\nfunction that takes predictions and labels (grouped in a namedtuple called [EvalPrediction](https://huggingface.co/docs/transformers/master/en/internal/trainer_utils#transformers.EvalPrediction)) and\nreturn a dictionary with string items (the metric names) and float values (the metric values).\n\nThe 🤗 Datasets library provides an easy way to get the common metrics used in NLP with the `load_metric` function.\nhere we simply use accuracy. Then we define the `compute_metrics` function that just convert logits to predictions\n(remember that all 🤗 Transformers models return the logits) and feed them to `compute` method of this metric.", "_____no_output_____" ] ], [ [ "import numpy as np\nfrom datasets import load_metric\n\nmetric = load_metric(\"accuracy\")\n\n\ndef compute_metrics(eval_pred):\n logits, labels = eval_pred\n predictions = np.argmax(logits, axis=-1)\n return metric.compute(predictions=predictions, references=labels)", "_____no_output_____" ] ], [ [ "The compute function needs to receive a tuple (with logits and labels) and has to return a dictionary with string keys\n(the name of the metric) and float values. It will be called at the end of each evaluation phase on the whole arrays of\npredictions/labels.\n\nTo check if this works on practice, let's create a new [Trainer](https://huggingface.co/docs/transformers/master/en/main_classes/trainer#transformers.Trainer) with our fine-tuned model:", "_____no_output_____" ] ], [ [ "trainer = Trainer(\n model=model,\n args=training_args,\n train_dataset=small_train_dataset,\n eval_dataset=small_eval_dataset,\n compute_metrics=compute_metrics,\n)\ntrainer.evaluate()", "_____no_output_____" ] ], [ [ "which showed an accuracy of 87.5% in our case.\n\nIf you want to fine-tune your model and regularly report the evaluation metrics (for instance at the end of each\nepoch), here is how you should define your training arguments:", "_____no_output_____" ] ], [ [ "from transformers import TrainingArguments\n\ntraining_args = TrainingArguments(\"test_trainer\", evaluation_strategy=\"epoch\")", "_____no_output_____" ] ], [ [ "See the documentation of [TrainingArguments](https://huggingface.co/docs/transformers/master/en/main_classes/trainer#transformers.TrainingArguments) for more options.\n\n\n<a id='keras'></a>", "_____no_output_____" ], [ "## Fine-tuning with Keras", "_____no_output_____" ] ], [ [ "#@title\nfrom IPython.display import HTML\n\nHTML('<iframe width=\"560\" height=\"315\" src=\"https://www.youtube.com/embed/rnTGBy2ax1c?rel=0&amp;controls=0&amp;showinfo=0\" frameborder=\"0\" allowfullscreen></iframe>')", "_____no_output_____" ] ], [ [ "Models can also be trained natively in TensorFlow using the Keras API. First, let's define our model:", "_____no_output_____" ] ], [ [ "import tensorflow as tf\nfrom transformers import TFAutoModelForSequenceClassification\n\nmodel = TFAutoModelForSequenceClassification.from_pretrained(\"bert-base-cased\", num_labels=2)", "_____no_output_____" ] ], [ [ "Then we will need to convert our datasets from before in standard `tf.data.Dataset`. Since we have fixed shapes,\nit can easily be done like this. First we remove the _\"text\"_ column from our datasets and set them in TensorFlow\nformat:", "_____no_output_____" ] ], [ [ "tf_train_dataset = small_train_dataset.remove_columns([\"text\"]).with_format(\"tensorflow\")\ntf_eval_dataset = small_eval_dataset.remove_columns([\"text\"]).with_format(\"tensorflow\")", "_____no_output_____" ] ], [ [ "Then we convert everything in big tensors and use the `tf.data.Dataset.from_tensor_slices` method:", "_____no_output_____" ] ], [ [ "train_features = {x: tf_train_dataset[x] for x in tokenizer.model_input_names}\ntrain_tf_dataset = tf.data.Dataset.from_tensor_slices((train_features, tf_train_dataset[\"label\"]))\ntrain_tf_dataset = train_tf_dataset.shuffle(len(tf_train_dataset)).batch(8)\n\neval_features = {x: tf_eval_dataset[x] for x in tokenizer.model_input_names}\neval_tf_dataset = tf.data.Dataset.from_tensor_slices((eval_features, tf_eval_dataset[\"label\"]))\neval_tf_dataset = eval_tf_dataset.batch(8)", "_____no_output_____" ] ], [ [ "With this done, the model can then be compiled and trained as any Keras model:", "_____no_output_____" ] ], [ [ "model.compile(\n optimizer=tf.keras.optimizers.Adam(learning_rate=5e-5),\n loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),\n metrics=tf.metrics.SparseCategoricalAccuracy(),\n)\n\nmodel.fit(train_tf_dataset, validation_data=eval_tf_dataset, epochs=3)", "_____no_output_____" ] ], [ [ "With the tight interoperability between TensorFlow and PyTorch models, you can even save the model and then reload it\nas a PyTorch model (or vice-versa):", "_____no_output_____" ] ], [ [ "from transformers import AutoModelForSequenceClassification\n\nmodel.save_pretrained(\"my_imdb_model\")\npytorch_model = AutoModelForSequenceClassification.from_pretrained(\"my_imdb_model\", from_tf=True)", "_____no_output_____" ] ], [ [ "<a id='pytorch_native'></a>", "_____no_output_____" ], [ "## Fine-tuning in native PyTorch", "_____no_output_____" ] ], [ [ "#@title\nfrom IPython.display import HTML\n\nHTML('<iframe width=\"560\" height=\"315\" src=\"https://www.youtube.com/embed/Dh9CL8fyG80?rel=0&amp;controls=0&amp;showinfo=0\" frameborder=\"0\" allowfullscreen></iframe>')", "_____no_output_____" ] ], [ [ "You might need to restart your notebook at this stage to free some memory, or execute the following code:", "_____no_output_____" ] ], [ [ "del model\ndel pytorch_model\ndel trainer\ntorch.cuda.empty_cache()", "_____no_output_____" ] ], [ [ "Let's now see how to achieve the same results as in [trainer section](#trainer) in PyTorch. First we need to\ndefine the dataloaders, which we will use to iterate over batches. We just need to apply a bit of post-processing to\nour `tokenized_datasets` before doing that to:\n\n- remove the columns corresponding to values the model does not expect (here the `\"text\"` column)\n- rename the column `\"label\"` to `\"labels\"` (because the model expect the argument to be named `labels`)\n- set the format of the datasets so they return PyTorch Tensors instead of lists.\n\nOur _tokenized_datasets_ has one method for each of those steps:", "_____no_output_____" ] ], [ [ "tokenized_datasets = tokenized_datasets.remove_columns([\"text\"])\ntokenized_datasets = tokenized_datasets.rename_column(\"label\", \"labels\")\ntokenized_datasets.set_format(\"torch\")\n\nsmall_train_dataset = tokenized_datasets[\"train\"].shuffle(seed=42).select(range(1000))\nsmall_eval_dataset = tokenized_datasets[\"test\"].shuffle(seed=42).select(range(1000))", "_____no_output_____" ] ], [ [ "Now that this is done, we can easily define our dataloaders:", "_____no_output_____" ] ], [ [ "from torch.utils.data import DataLoader\n\ntrain_dataloader = DataLoader(small_train_dataset, shuffle=True, batch_size=8)\neval_dataloader = DataLoader(small_eval_dataset, batch_size=8)", "_____no_output_____" ] ], [ [ "Next, we define our model:", "_____no_output_____" ] ], [ [ "from transformers import AutoModelForSequenceClassification\n\nmodel = AutoModelForSequenceClassification.from_pretrained(\"bert-base-cased\", num_labels=2)", "_____no_output_____" ] ], [ [ "We are almost ready to write our training loop, the only two things are missing are an optimizer and a learning rate\nscheduler. The default optimizer used by the [Trainer](https://huggingface.co/docs/transformers/master/en/main_classes/trainer#transformers.Trainer) is [AdamW](https://huggingface.co/docs/transformers/master/en/main_classes/optimizer_schedules#transformers.AdamW):", "_____no_output_____" ] ], [ [ "from transformers import AdamW\n\noptimizer = AdamW(model.parameters(), lr=5e-5)", "_____no_output_____" ] ], [ [ "Finally, the learning rate scheduler used by default is just a linear decay from the maximum value (5e-5 here) to 0:", "_____no_output_____" ] ], [ [ "from transformers import get_scheduler\n\nnum_epochs = 3\nnum_training_steps = num_epochs * len(train_dataloader)\nlr_scheduler = get_scheduler(\"linear\", optimizer=optimizer, num_warmup_steps=0, num_training_steps=num_training_steps)", "_____no_output_____" ] ], [ [ "One last thing, we will want to use the GPU if we have access to one (otherwise training might take several hours\ninstead of a couple of minutes). To do this, we define a `device` we will put our model and our batches on.", "_____no_output_____" ] ], [ [ "import torch\n\ndevice = torch.device(\"cuda\") if torch.cuda.is_available() else torch.device(\"cpu\")\nmodel.to(device)", "_____no_output_____" ] ], [ [ "We now are ready to train! To get some sense of when it will be finished, we add a progress bar over our number of\ntraining steps, using the _tqdm_ library.", "_____no_output_____" ] ], [ [ "from tqdm.auto import tqdm\n\nprogress_bar = tqdm(range(num_training_steps))\n\nmodel.train()\nfor epoch in range(num_epochs):\n for batch in train_dataloader:\n batch = {k: v.to(device) for k, v in batch.items()}\n outputs = model(**batch)\n loss = outputs.loss\n loss.backward()\n\n optimizer.step()\n lr_scheduler.step()\n optimizer.zero_grad()\n progress_bar.update(1)", "_____no_output_____" ] ], [ [ "Note that if you are used to freezing the body of your pretrained model (like in computer vision) the above may seem a\nbit strange, as we are directly fine-tuning the whole model without taking any precaution. It actually works better\nthis way for Transformers model (so this is not an oversight on our side). If you're not familiar with what \"freezing\nthe body\" of the model means, forget you read this paragraph.\n\nNow to check the results, we need to write the evaluation loop. Like in the [trainer section](#trainer) we will\nuse a metric from the datasets library. Here we accumulate the predictions at each batch before computing the final\nresult when the loop is finished.", "_____no_output_____" ] ], [ [ "metric = load_metric(\"accuracy\")\nmodel.eval()\nfor batch in eval_dataloader:\n batch = {k: v.to(device) for k, v in batch.items()}\n with torch.no_grad():\n outputs = model(**batch)\n\n logits = outputs.logits\n predictions = torch.argmax(logits, dim=-1)\n metric.add_batch(predictions=predictions, references=batch[\"labels\"])\n\nmetric.compute()", "_____no_output_____" ] ], [ [ "<a id='additional-resources'></a>", "_____no_output_____" ], [ "## Additional resources", "_____no_output_____" ], [ "To look at more fine-tuning examples you can refer to:\n\n- [🤗 Transformers Examples](https://github.com/huggingface/transformers/tree/master/examples) which includes scripts\n to train on all common NLP tasks in PyTorch and TensorFlow.\n\n- [🤗 Transformers Notebooks](https://huggingface.co/docs/transformers/master/en/notebooks) which contains various notebooks and in particular one per task (look for\n the _how to finetune a model on xxx_).", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ] ]
cb55502d7a2c03788cb371cc3be511b78052142c
201,100
ipynb
Jupyter Notebook
week-3/week-3-1-class-empty.ipynb
xeophin/lede-algorithms
2d74d59ed55dd81de6b7fd18fc9aae3430766cc2
[ "CC-BY-3.0" ]
null
null
null
week-3/week-3-1-class-empty.ipynb
xeophin/lede-algorithms
2d74d59ed55dd81de6b7fd18fc9aae3430766cc2
[ "CC-BY-3.0" ]
null
null
null
week-3/week-3-1-class-empty.ipynb
xeophin/lede-algorithms
2d74d59ed55dd81de6b7fd18fc9aae3430766cc2
[ "CC-BY-3.0" ]
null
null
null
83.652246
63,100
0.774654
[ [ [ "## ## Week 3-1 - Linear Regression - class notebook\n\nThis notebook gives three examples of regression, that is, fitting a linear model to our data to find trends. For the finale, we're going to duplicate the analysis behind the Washington Post story \n", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.linear_model import LinearRegression\n%matplotlib inline", "_____no_output_____" ] ], [ [ "## Part 1 - Single variable regression\nWe'll start with some simple data on height and weight.", "_____no_output_____" ] ], [ [ "hw = pd.read_csv(\"week-3/height-weight.csv\")\nhw", "_____no_output_____" ] ], [ [ "Let's look at the distribution of each of these variables.", "_____no_output_____" ] ], [ [ "hw.height.hist()", "_____no_output_____" ], [ "hw.weight.hist()", "_____no_output_____" ] ], [ [ "Really, the interesting thing is to look at them together. For this we use a scatter plot.", "_____no_output_____" ] ], [ [ "hw.plot(kind='scatter', x='height', y='weight')", "_____no_output_____" ] ], [ [ "Clearly there's a trend that relates the two. One measure of the strength of that trend is called \"correlation\". We can compute the correlation between every pair of columns with `corr()`, though in this case it's really only between one pair.\n", "_____no_output_____" ] ], [ [ "# Show the correlations! OMG\nhw.corr()\n\n# the closer to 1 the correlation is, the closer to a line are the values", "_____no_output_____" ] ], [ [ "If you want to get better at knowing what sort of graph a correlation coefficient corresponds to, play the remarkable 8-bit game [Guess the Correlation](http://guessthecorrelation.com/)\n\nSo far so good. Now suppose we want to know what weight we should guess if we know someone is 60\" tall. We don't have anyone of that height in our data, and even id we did, they could be above or below average height. We need to build some sort of *model* which captures the trend, and guesses the average weight at each height.\n\n*ENTER THE REGRESSION*.", "_____no_output_____" ] ], [ [ "# convert pandas dataframe to a numpy array, which can be understood by sklearn\n\nx = hw[['height']].values\ny = hw[['weight']].values", "_____no_output_____" ], [ "lm = LinearRegression()\nlm.fit(x,y)", "/Users/km/.pyenv/versions/3.6.5/lib/python3.6/site-packages/sklearn/linear_model/base.py:509: RuntimeWarning: internal gelsd driver lwork query error, required iwork dimension not returned. This is likely the result of LAPACK bug 0038, fixed in LAPACK 3.2.2 (released July 21, 2010). Falling back to 'gelss' driver.\n linalg.lstsq(X, y)\n" ] ], [ [ "Ok, now we've got a \"linear regression.\" What is it? It's just a line `y=mx+b`, which we can recover like this:", "_____no_output_____" ] ], [ [ "m = lm.coef_[0]\nm", "_____no_output_____" ], [ "b = lm.intercept_\nb", "_____no_output_____" ] ], [ [ "We can plot this line `y=mx+b` on top of the scatterplot to see it.", "_____no_output_____" ] ], [ [ "hw.plot(kind='scatter', x='height', y='weight')\nplt.plot(hw.height, m*hw.height+b, '–')", "_____no_output_____" ] ], [ [ "So if we want to figure out the average weight of someone who is 60\" tall, we can compute", "_____no_output_____" ] ], [ [ "m*60+b", "_____no_output_____" ] ], [ [ "There's a shortcut for this, which will come in handy when we add variables", "_____no_output_____" ] ], [ [ "lm.predict(60)", "_____no_output_____" ] ], [ [ "## Part 2 - Multi-variable regression \n\nWe can do essentially the same trick with one more independent variable. Then our regression equation is `y = m1*x1 + m2*x2 + b`. We'll use one of the built-in `sklearn` data test as demonstration data.", "_____no_output_____" ] ], [ [ "from sklearn import datasets\nfrom mpl_toolkits.mplot3d import Axes3D\ndiabetes = datasets.load_diabetes()\n\nprint(diabetes.DESCR)", "Diabetes dataset\n================\n\nNotes\n-----\n\nTen baseline variables, age, sex, body mass index, average blood\npressure, and six blood serum measurements were obtained for each of n =\n442 diabetes patients, as well as the response of interest, a\nquantitative measure of disease progression one year after baseline.\n\nData Set Characteristics:\n\n :Number of Instances: 442\n\n :Number of Attributes: First 10 columns are numeric predictive values\n\n :Target: Column 11 is a quantitative measure of disease progression one year after baseline\n\n :Attributes:\n :Age:\n :Sex:\n :Body mass index:\n :Average blood pressure:\n :S1:\n :S2:\n :S3:\n :S4:\n :S5:\n :S6:\n\nNote: Each of these 10 feature variables have been mean centered and scaled by the standard deviation times `n_samples` (i.e. the sum of squares of each column totals 1).\n\nSource URL:\nhttp://www4.stat.ncsu.edu/~boos/var.select/diabetes.html\n\nFor more information see:\nBradley Efron, Trevor Hastie, Iain Johnstone and Robert Tibshirani (2004) \"Least Angle Regression,\" Annals of Statistics (with discussion), 407-499.\n(http://web.stanford.edu/~hastie/Papers/LARS/LeastAngle_2002.pdf)\n\n" ], [ "# take a look at the predictive (independent) variables\n# The variables to be used for prediction\ndf = pd.DataFrame(diabetes.data,\n columns=['age', 'sex', 'bmi', 'bp', 's1', 's2', 's3', 's4', 's5', 's6'])\ndf.hist()", "_____no_output_____" ], [ "# take a look at the \"target\" (dependent) variable\n", "/Users/km/.pyenv/versions/3.6.5/lib/python3.6/site-packages/sklearn/linear_model/base.py:509: RuntimeWarning: internal gelsd driver lwork query error, required iwork dimension not returned. This is likely the result of LAPACK bug 0038, fixed in LAPACK 3.2.2 (released July 21, 2010). Falling back to 'gelss' driver.\n linalg.lstsq(X, y)\n" ], [ "# fit a regression\n# Which columns do we want to use to try to predict? I’m choosing age and BMI here\n# (BMI is “body mass index”, it’s a measure of weight compared to height)\nindices = (0, 2)\n\nx = diabetes.data[:, indices]\ny = diabetes.target\n\nlm2 = LinearRegression()\nlm2.fit(x, y)", "_____no_output_____" ] ], [ [ "Ok awesome, we've fit a regression with multiple variables. What did we get? Let's check the coefficients", "_____no_output_____" ] ], [ [ "lm2.coef_", "_____no_output_____" ] ], [ [ "Now we have *two* coefficients. They're both positive, which means that both age and BMI are associated with increased disease progression. We have an intercept too, the predicted value of the target variable when both age and BMI are zero (which never happens, but that's the way the math works)", "_____no_output_____" ] ], [ [ "lm2.intercept_", "_____no_output_____" ] ], [ [ "To really see what's going on here, we're going to plot the whole thing in beautiful 3D. Now instead of a regression line, we have a regression *plane.* Are you ready for this?", "_____no_output_____" ] ], [ [ "# Helpful function that we'll use later for making more 3D regression plots\ndef plot_regression_3d(x, y, z, model, elev=30, azim=30, xlab=None, ylab=None):\n fig = plt.figure()\n ax = Axes3D(fig, elev=elev, azim=azim)\n\n # This looks gnarly, but we're just taking four points at the corners of the plot, \n # and using predict() to determine their vertical position\n xmin = x.min()\n xmax = x.max()\n ymin = y.min()\n ymax = y.max()\n corners_x = np.array([[xmin, xmin], [xmax, xmax]])\n corners_y = np.array([[ymin, ymax], [ymin, ymax]])\n corners_z = model.predict(np.array([[xmin, xmin, xmax, xmax], [ymin, ymax, ymin, ymax]]).T).reshape((2, 2))\n ax.plot_surface(corners_x, corners_y, corners_z, alpha=0.5)\n\n ax.scatter(x, y, z, alpha=0.3)\n\n ax.set_xlabel(xlab)\n ax.set_ylabel(ylab)\n\n", "_____no_output_____" ], [ "# Now plot our diabetes data\nplot_regression_3d(x[:, 0], x[:, 1], y, lm2, elev=20, azim=0, xlab='age',\n ylab='BMI')\n", "_____no_output_____" ] ], [ [ "## Part 3 - Analysis of 2016 voters\n\nAside from prediction, we can use regression to attempt explanations. The coefficient `m` in the above encodes a guess about the existence and strength of the relationship between `x` and `y`. If it's zero, we guess that they're unrelated. Otherwise, it tells us how they are likely to vary together.\n\nIn this section we're going to try to understand what motivated people to vote for Trump but looking at the relationship between vote and other variables in the [2016 American National Election Study data](http://electionstudies.org/project/2016-time-series-study/). \n\nThere were quite a few statistical analyses of this \"why did Trump win?\" kind after the election, by journalists and researchers. \n\n- [Racism motivated Trump voters more than authoritarianism](https://www.washingtonpost.com/news/monkey-cage/wp/2017/04/17/racism-motivated-trump-voters-more-than-authoritarianism-or-income-inequality) - Washington Post\n- [The Rise of American Authoritarianism](https://www.vox.com/2016/3/1/11127424/trump-authoritarianism) - Vox\n- [Education, Not Income, Predicted Who Would Vote For Trump](https://fivethirtyeight.com/features/education-not-income-predicted-who-would-vote-for-trump/) - 538\n- [Why White Americans Voted for Trump – A Research Psychologist’s Analysis](https://techonomy.com/2018/02/white-americans-voted-trump-research-psychologists-analysis/) - Techonomy\n- [Status threat, not economic hardship, explains the 2016 presidential vote](http://www.pnas.org/content/early/2018/04/18/1718155115) - Diana C. Mutz, PNAS\n- [Trump thrives in areas that lack traditional news outlets](https://www.politico.com/story/2018/04/08/news-subscriptions-decline-donald-trump-voters-505605) - Politico\n- [The Five Types of Trump Voters](https://www.voterstudygroup.org/publications/2016-elections/the-five-types-trump-\nvoters) - Voter Study Group\n\nMany of these used regression, but some did not. My favoite is the Voter Study Group analysis which used clustering -- just like we learned last week. It has a good discussion of the problems with using a regression to answer this question. \n\nWe're going to use regression anyway, along the lines of the [Washington Post piece](https://www.washingtonpost.com/news/monkey-cage/wp/2017/04/17/racism-motivated-trump-voters-more-than-authoritarianism-or-income-inequality/?utm_term=.01d9d3764f2c) which also uses ANES data. In particular, a regression on variables representing attitudes about authoritarianism and minorities.\n", "_____no_output_____" ] ], [ [ "# read 'anes_timeseries_2016_rawdata.csv'\nanes = pd.read_csv('week-3/anes_timeseries_2016_rawdata.csv')\nprint(anes.shape)\nanes.head()", "/Users/km/.pyenv/versions/3.6.5/lib/python3.6/site-packages/IPython/core/interactiveshell.py:2705: DtypeWarning: Columns (790,1129,1131) have mixed types. Specify dtype option on import or set low_memory=False.\n interactivity=interactivity, compiler=compiler, result=result)\n" ] ], [ [ "The first thing we need to do is construct indices of \"authoritarianism\" and \"racism\" from answers to the survey questions. We're following exactly what the Washington Post did here. Are \"authoritarianism\" and \"racism\" accurate and/or useful words for indices constructed of these questions? Our choice of words will hugely shape the impression that readers come away with -- even if we do the exact same calculations.\n\nWe start by dropping everything we don't need: we keep only white voters, only people who voted, and just the cols we want", "_____no_output_____" ] ], [ [ "# drop non-white voters\nwhite_col = 'V161310a'\nanes = anes[anes[white_col] == 1]\nanes.shape", "_____no_output_____" ], [ "# keep only Trump, Clinton voters\nvoted_col = 'V162034a' # 1=Clinton, 2=Trump, 3=Johnson, 4=Stein, negative numbers = didn't vote or won't say\nanes = anes[(anes[voted_col] == 1) | (anes[voted_col] == 2)]\nanes.shape", "_____no_output_____" ], [ "# keep only columns on authoritarian, racial scales\nauthoritarian_cols = ['V162239', 'V162240', 'V162241', 'V162242']\nracial_cols = ['V162211', 'V162212', 'V162213', 'V162214']\nanes = anes[[voted_col] + authoritarian_cols + racial_cols]\nanes.head()", "_____no_output_____" ] ], [ [ "Now we have to decode these values.\n\nFor the child-rearing questions, the code book tells us that 1 means the first option and 2 means the second. But 3 means both and then there are all sorts of codes that mean the question wasn't answered, in different ways. And then there's the issue that the questions have different directions: Options 1 might mean either \"more\" or \"less\" authoritarian. So we have a custom translation dictionary for each column. This is the stuff that dreams are made of, people.", "_____no_output_____" ] ], [ [ "# recode the authoritarian variables\n# These variables are proxies for authoritarian attitudes. Why are these questiones about children? \n# Because that's the only way to get honest answers! It's a long story. \n# See https://www.vox.com/2016/3/1/11127424/trump-authoritarianism\n\n# All authoritarian traits are coded 1 for first option and 2 for second\n# We turn this into +1/0/-1 where +1 is the more authoritarian option, and 0 means no data\n\n# Child trait more important: independence or respect\nanes['V162239'].replace({1: -1, 2: 1, 3: 0, -6: 0, -7: 0, -8: 0, -9: 0},\n inplace=True)\n\n# Child trait more important: curiosity or good manners\nanes['V162240'].replace({1: -1, 2: 1, 3: 0, -6: 0, -7: 0, -8: 0, -9: 0},\n inplace=True)\n\n# Child trait more important: obedience or self-reliance\nanes['V162241'].replace({1: 1, 2: -1, 3: 0, -6: 0, -7: 0, -8: 0, -9: 0},\n inplace=True)\n\n# Child trait more important: considerate or well-behaved\nanes['V162242'].replace({1: -1, 2: 1, 3: 0, -6: 0, -7: 0, -8: 0, -9: 0},\n inplace=True)", "_____no_output_____" ], [ "# recode the racial variables\n# All racial questions are coded on a five point scale, 1=agree strongy, 5=disagree strongly\n# We recode so that least tolerant = +2 and most tolerant =-2\n\n# Agree/disagree: blacks shd work way up w/o special favors\nanes['V162211'].replace(\n {1: 2, 2: 1, 3: 0, 4: -1, 5: -2, -6: 0, -7: 0, -8: 0, -9: 0}, inplace=True)\n\n# Agree/disagree: past slavery make more diff for blacks\nanes['V162212'].replace(\n {1: -2, 2: -1, 3: 0, 4: 1, 5: 2, -6: 0, -7: 0, -8: 0, -9: 0}, inplace=True)\n\n# Agree/disagree: blacks have gotten less than deserve\nanes['V162213'].replace(\n {1: -2, 2: -1, 3: 0, 4: 1, 5: 2, -6: 0, -7: 0, -8: 0, -9: 0}, inplace=True)\n\nanes['V162214'].replace(\n {1: 2, 2: 1, 3: 0, 4: -1, 5: -2, -6: 0, -7: 0, -8: 0, -9: 0}, inplace=True)", "_____no_output_____" ], [ "# check the results\nanes.head()", "_____no_output_____" ] ], [ [ "Finally, add the authority and racial columns together to form the composite indexes.", "_____no_output_____" ] ], [ [ "# sum each group of columns. End up with vote, authority, racial columns\nanes['authority'] = anes[authoritarian_cols].sum(axis=1)\nanes['racial'] = anes[racial_cols].sum(axis=1)\nanes['vote'] = anes[voted_col]\nanes = anes[['vote', 'authority', 'racial']]\nanes.head(10)", "_____no_output_____" ] ], [ [ "Data prepared at last! Let's first look at the scatter plots", "_____no_output_____" ] ], [ [ "anes.plot(kind='scatter', x='authority', y='vote')", "_____no_output_____" ] ], [ [ "Er, right... all this says is that we've got votes for both candidates at all levels of authoritarianism. To get a sense of how many dots in each point, we can add some jitter and make the points a bit transparent.", "_____no_output_____" ] ], [ [ "# function to add noise to the values in the array\n\n# add a noise to the values in the array\ndef jitter(arr):\n # pick a standard deviation for the jitter of 3% of the data range\n stdev = .02 * (max(arr) - min(arr))\n return arr + np.random.randn(len(arr)) * stdev", "_____no_output_____" ], [ "# plot vote vs authoritarian variables with jitter\nplt.scatter(x=jitter(anes.authority), y=jitter(anes.vote), alpha=0.05)", "_____no_output_____" ] ], [ [ "Note that, generally, as you move to the right (more authoritarian) there are more Trump voters. We can do this same plot with the racial axis.", "_____no_output_____" ] ], [ [ "# plot vote vs racial variables with jitter\n\n# ... oh fuck it, this is just copy-pasting stuff from the class notebook, \n# I'd rather just keep listening to the lecture instead of wasting brainpower\n# on copy-pasting", "_____no_output_____" ] ], [ [ "Similar deal. The axis is smoother because we are summing numbers from a five point agree/disagree scale, rather than just the two-option questions of the authoritarianism subplot. \n\nNow in glorious 3D.", "_____no_output_____" ] ], [ [ "# 3D plot of both sets of vars", "_____no_output_____" ] ], [ [ "Same problem: everything is on top of each other. Same solution.", "_____no_output_____" ] ], [ [ "# jittered 3D plot", "_____no_output_____" ] ], [ [ "You can definitely see the change alog both axes. But which factor matters more? Let's get quantitative by fitting a linear model. Regression to the rescue!", "_____no_output_____" ] ], [ [ "# This is some drudgery to convert the dataframe into the format that sklearn needs: \n", "_____no_output_____" ], [ "# This does the actual regression\n", "_____no_output_____" ], [ "# call plot_regression_3d", "_____no_output_____" ] ], [ [ "Well that looks cool but doesn't really clear it up for me. Let's look at the coefficients.\n", "_____no_output_____" ], [ "Looks like the coefficient on `racial` is higher. But wait, we choose the numbers that we turned each response into! We could have coded `racial` on a +/-1 scale instead of a +/-2 scale, or a +/-10 scale. So... we could get any number we want just be changing how we convert the data.\n\nTo fix this, we're going to standardize the values (both dependent and independent) to have mean 0 and standard deviation 1. This gives us [standardized coefficients](https://en.wikipedia.org/wiki/Standardized_coefficient).", "_____no_output_____" ] ], [ [ "# normalize the columns and take a look", "_____no_output_____" ], [ "# fit another regression", "_____no_output_____" ] ], [ [ "What we have now is the same data, just scaled in each direction", "_____no_output_____" ] ], [ [ "# call plot_regression_3d", "_____no_output_____" ] ], [ [ "Finally, we can compare the coefficients directly. It doesn't matter what range we used to code the survey answers, because we divided it out during normalization.\n", "_____no_output_____" ], [ "So there we have it. For white voters in the 2016 election, the standardized regression coefficient on racial factors is quite a bit bigger than the standardized coeffiecient on authoritrianism. But what does this actually mean?", "_____no_output_____" ] ], [ [ "# what's the new intercept?", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ] ]
cb555274500be0769303b3ae2cfcc1ddb1128297
100,156
ipynb
Jupyter Notebook
Pattern Memorization.ipynb
jongukim/notebooks-deep-learning
d702133f9f04121798dacaa493fb3abc3033e18f
[ "MIT" ]
null
null
null
Pattern Memorization.ipynb
jongukim/notebooks-deep-learning
d702133f9f04121798dacaa493fb3abc3033e18f
[ "MIT" ]
null
null
null
Pattern Memorization.ipynb
jongukim/notebooks-deep-learning
d702133f9f04121798dacaa493fb3abc3033e18f
[ "MIT" ]
null
null
null
138.720222
14,620
0.788959
[ [ [ "import numpy as np\nimport tensorflow as tf\nimport matplotlib.pyplot as plt", "_____no_output_____" ], [ "mnist = tf.contrib.learn.datasets.load_dataset(\"mnist\")\ntrain_data = mnist.train.images # Returns np.array\ntrain_labels = np.asarray(mnist.train.labels, dtype=np.int32)\neval_data = mnist.test.images # Returns np.array\neval_labels = np.asarray(mnist.test.labels, dtype=np.int32)", "Extracting MNIST-data\\train-images-idx3-ubyte.gz\nExtracting MNIST-data\\train-labels-idx1-ubyte.gz\nExtracting MNIST-data\\t10k-images-idx3-ubyte.gz\nExtracting MNIST-data\\t10k-labels-idx1-ubyte.gz\n" ], [ "BATCH_SIZE = 512\nRNN_HIDDEN_SIZE = 128", "_____no_output_____" ], [ "def model_fn(features, labels, mode):\n # input_layer = tf.reshape(features[\"x\"], [-1, 784, 1])\n\n # rnn_cell = tf.nn.rnn_cell.LSTMCell(RNN_HIDDEN_SIZE)\n # initial_state = rnn_cell.zero_state(batch_size=BATCH_SIZE, dtype=tf.float32)\n # _, state = tf.nn.dynamic_rnn(rnn_cell, input_layer, initial_state=initial_state, dtype=tf.float32)\n \n # dense1 = tf.layers.dense(inputs=tf.reshape(state, [-1, RNN_HIDDEN_SIZE * 2]), units=512, activation=tf.nn.relu)\n # dense2 = tf.layers.dense(inputs=dense1, units=1024, activation=tf.nn.relu)\n \n input_layer = tf.reshape(features['x'], [-1, 28, 28, 1])\n conv1 = tf.layers.conv2d(inputs=input_layer,\n filters=32,\n kernel_size=[5, 5],\n padding=\"same\",\n activation=tf.nn.relu)\n pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2, 2], strides=2)\n\n conv2 = tf.layers.conv2d(inputs=pool1,\n filters=64,\n kernel_size=[5, 5],\n padding=\"same\",\n activation=tf.nn.relu)\n pool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2], strides=2)\n\n pool2_flat = tf.reshape(pool2, [-1, 7 * 7 * 64])\n dense = tf.layers.dense(inputs=pool2_flat, units=1024, activation=tf.nn.relu)\n \n predictions = tf.layers.dense(inputs=dense, units=784)\n \n if mode == tf.estimator.ModeKeys.PREDICT:\n return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions)\n\n loss = tf.losses.mean_squared_error(labels=labels, predictions=predictions)\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n optimizer = tf.train.AdamOptimizer(learning_rate=0.001)\n train_op = optimizer.minimize(loss=loss, global_step=tf.train.get_global_step())\n return tf.estimator.EstimatorSpec(mode=mode, loss=loss, train_op=train_op)\n\n eval_metric_ops = {'distance': tf.metrics.mean_squared_error(labels=labels, predictions=predictions)}\n return tf.estimator.EstimatorSpec(mode=mode, loss=loss, eval_metric_ops=eval_metric_ops)", "_____no_output_____" ], [ "est = tf.estimator.Estimator(model_fn=model_fn, model_dir='pattern_memorization_model')", "INFO:tensorflow:Using default config.\nINFO:tensorflow:Using config: {'_model_dir': 'pattern_memorization_model', '_tf_random_seed': None, '_save_summary_steps': 100, '_save_checkpoints_steps': None, '_save_checkpoints_secs': 600, '_session_config': None, '_keep_checkpoint_max': 5, '_keep_checkpoint_every_n_hours': 10000, '_log_step_count_steps': 100, '_service': None, '_cluster_spec': <tensorflow.python.training.server_lib.ClusterSpec object at 0x000001ECCB871FD0>, '_task_type': 'worker', '_task_id': 0, '_global_id_in_cluster': 0, '_master': '', '_evaluation_master': '', '_is_chief': True, '_num_ps_replicas': 0, '_num_worker_replicas': 1}\n" ], [ "logging_hook = tf.train.LoggingTensorHook(tensors={}, at_end=True)\n\ntrain_input_fn = tf.estimator.inputs.numpy_input_fn(x={\"x\": train_data},\n y=train_data,\n batch_size=BATCH_SIZE,\n num_epochs=None,\n shuffle=True)\nest.train(input_fn=train_input_fn, steps=2000, hooks=[logging_hook])", "_____no_output_____" ], [ "eval_input_fn = tf.estimator.inputs.numpy_input_fn(x={\"x\": eval_data},\n y=eval_data,\n num_epochs=1,\n shuffle=False)\nest.evaluate(input_fn=eval_input_fn)", "INFO:tensorflow:Calling model_fn.\nINFO:tensorflow:Done calling model_fn.\nINFO:tensorflow:Starting evaluation at 2018-03-08-07:09:59\nINFO:tensorflow:Graph was finalized.\nINFO:tensorflow:Restoring parameters from pattern_memorization_model\\model.ckpt-2001\nINFO:tensorflow:Running local_init_op.\nINFO:tensorflow:Done running local_init_op.\nINFO:tensorflow:Finished evaluation at 2018-03-08-07:10:00\nINFO:tensorflow:Saving dict for global step 2001: distance = 0.0041406923, global_step = 2001, loss = 0.0041421107\n" ], [ "test_images = eval_data[np.random.choice(mnist.test.num_examples, 3)]\ninput_fn = tf.estimator.inputs.numpy_input_fn(x={'x': test_images}, shuffle=False)\npreds = list(est.predict(input_fn))", "INFO:tensorflow:Calling model_fn.\nINFO:tensorflow:Done calling model_fn.\nINFO:tensorflow:Graph was finalized.\nINFO:tensorflow:Restoring parameters from pattern_memorization_model\\model.ckpt-2001\nINFO:tensorflow:Running local_init_op.\nINFO:tensorflow:Done running local_init_op.\n" ], [ "plt.rcParams[\"figure.figsize\"] = [13, 6]\n\nfor i in range(3):\n plt.subplot(1, 2, 1)\n plt.imshow(np.reshape(test_images[i], [28, 28]), cmap='gray')\n plt.subplot(1, 2, 2)\n plt.imshow(np.reshape(preds[i], [28, 28]), cmap='gray')\n plt.show()", "_____no_output_____" ], [ "test_image = np.random.randn(1, 28, 28).astype(np.float32)\ntest_image[test_image < 0] = 0\ntest_image[test_image > 1] = 1.0\nfor i in range(10, 20):\n for j in range(28):\n test_image[0][i][j] = 1.0\ntest_image[0]", "_____no_output_____" ], [ "input_fn = tf.estimator.inputs.numpy_input_fn(x={'x': test_image}, shuffle=False)\npred = list(est.predict(input_fn))\n\nplt.subplot(1, 2, 1)\nplt.imshow(np.reshape(test_image, [28, 28]), cmap='gray')\nplt.subplot(1, 2, 2)\nplt.imshow(np.reshape(pred[0], [28, 28]), cmap='gray')\nplt.show()", "INFO:tensorflow:Calling model_fn.\nINFO:tensorflow:Done calling model_fn.\nINFO:tensorflow:Graph was finalized.\nINFO:tensorflow:Restoring parameters from pattern_memorization_model\\model.ckpt-2001\nINFO:tensorflow:Running local_init_op.\nINFO:tensorflow:Done running local_init_op.\n" ], [ "est.evaluate(input_fn=tf.estimator.inputs.numpy_input_fn(x={\"x\": test_image},\n y=test_image.reshape(1, 784),\n num_epochs=1,\n shuffle=False))", "INFO:tensorflow:Calling model_fn.\nINFO:tensorflow:Done calling model_fn.\nINFO:tensorflow:Starting evaluation at 2018-03-08-08:04:11\nINFO:tensorflow:Graph was finalized.\nINFO:tensorflow:Restoring parameters from pattern_memorization_model\\model.ckpt-2001\nINFO:tensorflow:Running local_init_op.\nINFO:tensorflow:Done running local_init_op.\nINFO:tensorflow:Finished evaluation at 2018-03-08-08:04:11\nINFO:tensorflow:Saving dict for global step 2001: distance = 0.2170231, global_step = 2001, loss = 0.2170231\n" ], [ "test_image = np.random.randn(1, 28, 28).astype(np.float32)\ntest_image[test_image < 0] = 0\ntest_image[test_image > 0] = 0.25\nfor i in list(range(0, 4)) + list(range(24, 28)):\n for j in range(0, 28):\n test_image[0][i][j] = 1.0\n test_image[0][j][i] = 1.0\ntest_image[0]", "_____no_output_____" ], [ "input_fn = tf.estimator.inputs.numpy_input_fn(x={'x': test_image}, shuffle=False)\npred = list(est.predict(input_fn))\n\nplt.subplot(1, 2, 1)\nplt.imshow(np.reshape(test_image, [28, 28]), cmap='gray')\nplt.subplot(1, 2, 2)\nplt.imshow(np.reshape(pred[0], [28, 28]), cmap='gray')\nplt.show()", "INFO:tensorflow:Calling model_fn.\nINFO:tensorflow:Done calling model_fn.\nINFO:tensorflow:Graph was finalized.\nINFO:tensorflow:Restoring parameters from pattern_memorization_model\\model.ckpt-2001\nINFO:tensorflow:Running local_init_op.\nINFO:tensorflow:Done running local_init_op.\n" ] ], [ [ "# 끗", "_____no_output_____" ] ] ]
[ "code", "markdown" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ] ]
cb55534c4ae78508ef5172ce9ae6378685fadaf0
18,064
ipynb
Jupyter Notebook
.ipynb_checkpoints/exp_python1-checkpoint.ipynb
yyamnk/numerical-methods-py3
c2ba7d516b81348cb57744c78d9238f11e3104ce
[ "MIT" ]
null
null
null
.ipynb_checkpoints/exp_python1-checkpoint.ipynb
yyamnk/numerical-methods-py3
c2ba7d516b81348cb57744c78d9238f11e3104ce
[ "MIT" ]
null
null
null
.ipynb_checkpoints/exp_python1-checkpoint.ipynb
yyamnk/numerical-methods-py3
c2ba7d516b81348cb57744c78d9238f11e3104ce
[ "MIT" ]
1
2020-07-01T06:31:13.000Z
2020-07-01T06:31:13.000Z
20.955916
137
0.514172
[ [ [ "# はじめに\n\n本実験では,PythonとGoogle Colaboratory(以下,Colab)を使用して,力学系の数値解析手法を学ぶ.PythonとColabの特徴は以下のとおり.\n\n- Pythonとは\n - プログラミング言語の1つで,現在,広く利用されている.\n- Google Colaboratory(Colab)とは\n - ブラウザ上で Python を記述して実行できるツール.\n - 具体的には,まずブラウザで表示されるノートブック(今開いているこのページが1つのノートブックである)を作成し,そこにPythonコードの記述と実行を行う.\n - Pythonコードの他に,テキストも入力できる\n - 連続使用が12時間を超えるか,ノートブックを90分程度操作しないと,自動的に切断される.\n - 上記の制約のため,ノートブックを細かく保存すること(保存方法は次に説明する)\n\n\nColabの概要について説明している,[Google Colaboratory の開始方法 (日本語字幕あり)](https://www.youtube.com/watch?v=inN8seMm7UI)を視聴すること.", "_____no_output_____" ], [ "# 実験の進め方\n\n本実験では,Colabのノートブックを使って進めていく.\nノートブックは,複数のセルから構成されている.\nセルには,文字を入力するための`テキストセル`,Pythonのコードを入力するための`コードセル`がある.\nセルに書かれた説明を読み,コードを実行していくことで,内容の理解を深めてほしい.\n特に,\n<font color=\"red\">\n(TODO)\n</font>\nと書かれた指示は必ず実行すること.\nノートブックの内容を順に理解していけば,最後のレポート課題が解けるはずだ.\n\nノートブックには,各自がコードの追加や,実行ができる.プログラムを学ぶためにその動作を確認することは重要なので,積極的にコードを書き,実行してみること.\n\nその試行錯誤の過程でノートブックを壊滅的に壊してしまった場合でも,この初期状態から再開できる.その場合は,\n[実験のTopページ](https://github.com/yyamnk/numerical-methods-py3/blob/master/uu_experiment.md)\nからやり直すこと.\n", "_____no_output_____" ], [ "次に,ノートブックの保存方法を説明する.\n現在,開いているノートブックは教科書用であり,編集や実行ができない状態である.そこで,次の手順で実験を進めること.\n\n1. [実験のTopページ](https://github.com/yyamnk/numerical-methods-py3/blob/master/uu_experiment.md)から,教科書用ノートブックを開く(今ここ)\n2. 下図のように,ブラウザの左上から`ファイル` -> `ドライブにコピーを保存`をクリックし,ノートブックを各自のGoogle Driveへ保存する.\n3. コピーしたノートブックが開かれたら,それを編集・実行しながら学習を進める.\n\n![図1 ノートブックの保存](https://docs.google.com/uc?export=download&id=1_0LtxmcJs4FmNjKKKNvr41qCNKNG4aoE)\n\n保存したコピーが開けたら,実験を始めよう.", "_____no_output_____" ], [ "# 四則演算\n\nまず最初に,Pythonによる四則演算を学ぶ.\nPythonコードの入力と実行は,次の手順で行う.\n\n1. ブラウザ左上にある「+ コード」をクリックして`コードセル`を追加\n2. 追加されたセルをクリックし,プログラムを記述\n3. セルの左端にある「▷(再生ボタン)」をクリックして,セルを実行する\n - [方法がわからない場合は,ここを視聴せよ](https://youtu.be/inN8seMm7UI?t=27)\n\nここでは,例として,既にPythonコードを入力したセルを用意した.このセルを実行してみよう.", "_____no_output_____" ] ], [ [ "# ここはコメント.Pythonは#記号以降の文字を無視する.\n\n1 + 5 # 和,このセルを実行すると,計算結果が出力される(初回の実行では多少の時間がかかる)", "_____no_output_____" ] ], [ [ "セルを実行すると,結果が出力されたはずだ.", "_____no_output_____" ], [ "<font color=\"red\">\n(TODO)適当な2つの数の,差・積・商を計算するコードを作成し,実行せよ.ここでは,1つのセルに1つの四則演算のみとすること.また,手計算と比較して動作を確かめよ.\n</font>", "_____no_output_____" ] ], [ [ "# 差を計算するコード\n", "_____no_output_____" ], [ "# 積を計算するコード\n", "_____no_output_____" ], [ "# 商を計算するコード\n", "_____no_output_____" ] ], [ [ "# 累乗\n\nPythonでは,累乗を`**`で表す.\n\n<font color=\"red\">\n(TODO)次のコードを実行して動作を確かめよ.\n</font>", "_____no_output_____" ] ], [ [ "2 ** 3 # 2の3乗(2 ^ 3)", "_____no_output_____" ], [ "2 ** 0.5 # 2の平方根", "_____no_output_____" ] ], [ [ "# 複素数\n\nPythonでは,複素数の演算もサポートされている.虚数には`j`をつけること.\n\n<font color=\"red\">\n(TODO)以下のセルを実行し,動作を確認せよ.\n</font>", "_____no_output_____" ] ], [ [ "(1 + 2j) + (3 + 4j)", "_____no_output_____" ] ], [ [ "# 変数\n\nPythonでは,数値等を変数に代入できる.\nここでは変数の定義・代入と,変数を使った計算をやってみよう.\n\n<font color=\"red\">\n(TODO)以下のセルを実行し,動作を確認せよ.\n</font>", "_____no_output_____" ] ], [ [ "x = 10 # `x`という変数を定義し,10を代入する.\n# 代入のみの場合,このセルを実行しても出力は無いが,内部で処理はされている.", "_____no_output_____" ] ], [ [ "このように定義した変数は,同一のnotebookでいつでも参照できる.", "_____no_output_____" ] ], [ [ "x # 定義した変数の参照方法1: 変数のみを書く", "_____no_output_____" ], [ "print(x) # # 定義した変数の参照方法2: print()を用いる", "_____no_output_____" ] ], [ [ "<font color=\"red\">\n(TODO)次のセルを実行し,変数を用いた四則演算ができることを確かめよ\n</font>", "_____no_output_____" ] ], [ [ "r = 5 # 新たな変数を定義\npi = 3.14 # 新たな変数を定義, 変数名には複数の文字を使っても良い\n2 * r * pi # 変数を使って計算する.", "_____no_output_____" ] ], [ [ "# プログラム例:2次方程式の解の公式\n\nここまでの知識で,2次方程式の解を計算するプログラムを考えてみよう.\n\n$$\na x^2 + b x + c = 0\n$$\nの解は,解の公式より\n$$\nx = \\frac{-b \\pm \\sqrt{b^2 - 4 a c}}{2a}\n$$\nである.\n\n<font color=\"red\">\n(TODO)次のセルを実行し,解が計算できることを確かめよ\n</font>", "_____no_output_____" ] ], [ [ "a = 1\nb = -6\nc = 13\n\n(-b + (b ** 2 - 4 * a * c) ** 0.5) / (2 * a) # 1個めの解", "_____no_output_____" ], [ "(-b - (b ** 2 - 4 * a * c) ** 0.5) / (2 * a) # 2個めの解", "_____no_output_____" ] ], [ [ "# Numpy 配列\n\nプログラミングでは,複数の要素(値)の集合を管理したいことがある.これを実現するデータ構造を配列と呼ぶ.Pythonには,配列を実現する方法はいくつかあるが,本実験では,`Numpy 配列`を用いる.基本的な使い方は以下の通り.\n\n<font color=\"red\">\n(TODO)以下のセルを実行し,動作を確認せよ\n</font>", "_____no_output_____" ] ], [ [ "# Numpyを用いるためにライブラリを読み込む.\nimport numpy as np # これにより,以降のセルでは`np`と書くことでnumpyの機能が使える.", "_____no_output_____" ], [ "# 次に,配列を定義する.\nxs = np.array([1, 2, 3, 3, 5]) # 要素は[]で指定する\nxs # 確認のため,xsを出力する", "_____no_output_____" ], [ "# 全ての要素がゼロの配列も定義できる.ここでは要素が5個の配列を定義する.\nxs = np.zeros(5)\nxs # 確認のため,xsを出力する", "_____no_output_____" ], [ "# 定義したNumpy配列へ値を代入する\n# 代入するときは,0からはじまる配列の要素番号を指定し,`=`で値を代入する\nxs[0] = 10 # 配列の先頭に代入\nxs[1] = 20 # 2番目の要素に代入\nxs[2] = 30 # 3番目の要素に代入\nxs # 確認のため,xsを出力する", "_____no_output_____" ], [ "# 配列の要素を呼び出したい場合は,`配列名[インデックス番号]`とする\nxs[2]", "_____no_output_____" ] ], [ [ "数値計算でよく用いるのは,初期値,値の刻み幅,最終値から配列を作成することだ.これは,次のように記述できる.", "_____no_output_____" ] ], [ [ "ts = np.arange(start=10, stop=15, step=0.5) # 初期値10, 終了値15, 刻み幅0.5の数列を定義する\nts", "_____no_output_____" ] ], [ [ "# 関数\n\nプログラムでは,頻繁に実行する手続き・処理がある.このような処理を,いつでも使えるように部品のような形にまとめたものを`関数`という.\n\nPythonには便利な関数が予め実装されている.このような関数を組み込み関数と呼ぶ.\n組み込み関数の例として,\n- 配列の長さを返す`len()`\n- Numpy配列の要素の平均を返す`np.mean()`\n- Numpy配列の絶対値を返す`np.abs()`\n- Numpy配列の$\\cos, \\sin$を返す`np.cos()`, `np.sin()`\n\nなどがある.\n`()`の中には変数を書く.これを引数と呼ぶ.また,関数の出力は返り値と呼ばれる.\n\n<font color=\"red\">\n(TODO)以下のセルを実行し,動作を確認せよ\n</font>", "_____no_output_____" ] ], [ [ "len(xs) # 配列の長さを出力する(引数:配列xs, 返り値:xsの要素数)", "_____no_output_____" ], [ "np.mean(xs) # 配列の各要素の平均を出力する(引数:配列xs, 返り値:xsの要素の平均)", "_____no_output_____" ], [ "np.abs(np.array([-1, 2, -3, 4])) # 配列の各要素の絶対値を出力する", "_____no_output_____" ] ], [ [ "\nPythonでは,組み込み関数だけれはなく,ユーザが独自定義する関数も利用することができる.ここでは関数の定義と実行を行ってみよう.\n\n変数$x$から次の$y$を算出する関数を考える.\n$$\ny = ax + b\n$$\nここで,$a=10$, $b=3$とすると,Pythonコードは次のようになる.\n\n<font color=\"red\">\n(TODO)以下のセルを実行し,動作を確認せよ\n</font>", "_____no_output_____" ] ], [ [ "# 関数の定義\ndef myfunc(x): # 関数の定義は,def 関数名(入力変数): で行う.\n # 関数内のコードは,インデント(行頭に半角スペース2個を入れること)する.\n a = 5 # 変数を定義(この変数は関数の中でのみ有効)\n b = 3\n y = a * x + b\n return y # 返り値はreturnで書く.", "_____no_output_____" ], [ "# 関数の実行,組み込み関数と同様に`関数名(引数)`とする\nmyfunc(5)", "_____no_output_____" ] ], [ [ "<font color=\"red\">\n(TODO)以下のセルに,`myfunc`の引数を`10`と`20`としたコードを書き,出力を確認せよ.\n</font>\n", "_____no_output_____" ] ], [ [ "# 引数が10のコード\n", "_____no_output_____" ], [ "# 引数が20のコード\n", "_____no_output_____" ] ], [ [ "# for文\n\nある処理を繰り返すとき,for文を用いる.\n\n例えば,「0から`n`まで,1刻みの数字を表示する」という処理を考えよう.\nこのとき,\n```\nprint(0)\nprint(1)\n...(略)...\nprint(n)\n```\nと書くのは大変だが,for文を使えば以下のように書ける.\n\n<font color=\"red\">\n(TODO)以下のセルを実行し,動作を確認せよ\n</font>", "_____no_output_____" ] ], [ [ "# 0からnまでの数字を表示するプログラム\nn = 10\nfor i in range(0, n + 1): # 変数`i`には,0からnまでの数字が逐次代入される.\n print(i) # iの値を画面出力する.for文内のコードはインデントする\n\nprint('終了') # この行はインデントされていないので,for文には含まれない.", "_____no_output_____" ] ], [ [ "上記のコードでは,`print(n)`の行がインデント(行の先頭に半角スペースが2個あること)されていることがわかる.for文は,直後のインデントされたコードのみを繰り返し実行する.そのため,最後の`print(終了)`は1度のみ実行されている.", "_____no_output_____" ], [ "# グラフの作成\n\n数値計算の結果をグラフに描写することができる.\n\n<font color=\"red\">\n(TODO)以下のセルを実行し,動作を確認せよ\n</font>", "_____no_output_____" ] ], [ [ "from matplotlib import pyplot as plt # グラフ描写に用いるpyplotというライブラリを読み込む\n# 以降,`plt`と書くことで利用できる.", "_____no_output_____" ], [ "xs = np.array([0, 1, 2, 3, 4]) # x軸となるnumpy配列\nys = np.array([-1, 1, -1, 1, -1]) # y軸となるnumpy配列\nplt.plot(xs, ys) # plt.plot()は,最初の引数がx軸,2番目の引数がy軸となるようにグラフを作成する.", "_____no_output_____" ], [ "# どの変数をx軸,y軸に割り当てるかは,変数を書く順番による.\n# x軸にys, y軸にxsをplotするには,次のようにする.\nplt.plot(ys, xs)", "_____no_output_____" ], [ "# 複数のグラフを同時に描写する\nplt.plot(xs, ys, 'r-') # '-r'はオプションで,plot xs and ys using red line を意味する.\nplt.plot(xs, 2 * ys, 'g:') # plot using green dot line\nplt.plot(xs, 3 * ys, 'b.') # plot using blue dots", "_____no_output_____" ] ], [ [ "# プログラム例による理解度の確認\n\nここまで学習してきた内容を用いて,次の課題をやってみよう.\n\n<font color=\"red\">\n(TODO)$y=x^3$の概形をグラフに出力するプログラムを書け.ただし,$x$の範囲は[-3, 3]とし,刻みは0.2とする.\n</font>\n\n> わからない場合のヒント\n>\n> 1. $y=x^3$を計算する関数`f`を定義する.\n> 2. $x$の点列をnumpy配列`xs`で定義する.\n> 3. $y$の点列を,要素が全てゼロのnumpy配列`ys`として定義する.\n> 4. `ys[i]`(`i`はインデックス)に,`f(xs[i])`の返り値を代入する.\n> 5. 4を全ての`i`で処理するようfor文に入れる.\n> 6. `xs`, `ys`をplotする\n\nわからない場合でも,考えることが重要です.しばらく悩んでみてください.\nできたら,[ここ](https://colab.research.google.com/github/yyamnk/numerical-methods-py3/blob/master/exp_python1_ans.ipynb)から答え合わせしてください.)\n", "_____no_output_____" ], [ "# ここまでのまとめ\n\nこのノートブックでは,PythonとColabの基本的な使い方を学んだ.これらは本実験をこなすための最低限の内容であり,機能のごく一部にしかすぎない.詳細は入門書等を参照すること.\n\nこのノートブックの内容を一通り理解したら,[実験のTopページ](https://github.com/yyamnk/numerical-methods-py3/blob/master/uu_experiment.md) に戻り,次のノートブックに進むこと.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown", "markdown" ] ]
cb5556c884277d1337c8501ab9241b262de995ed
44,467
ipynb
Jupyter Notebook
site/en/r1/tutorials/estimators/cnn.ipynb
Harirai/docs
c61b885bd5761f5283c1073b866a5ccfd56d7abf
[ "Apache-2.0" ]
1
2020-09-02T07:40:29.000Z
2020-09-02T07:40:29.000Z
site/en/r1/tutorials/estimators/cnn.ipynb
Harirai/docs
c61b885bd5761f5283c1073b866a5ccfd56d7abf
[ "Apache-2.0" ]
null
null
null
site/en/r1/tutorials/estimators/cnn.ipynb
Harirai/docs
c61b885bd5761f5283c1073b866a5ccfd56d7abf
[ "Apache-2.0" ]
null
null
null
43.382439
344
0.568062
[ [ [ "##### Copyright 2018 The TensorFlow Authors.\n", "_____no_output_____" ] ], [ [ "#@title Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.", "_____no_output_____" ] ], [ [ "# Build a Convolutional Neural Network using Estimators\n\n<table class=\"tfo-notebook-buttons\" align=\"left\">\n <td>\n <a target=\"_blank\" href=\"https://colab.research.google.com/github/tensorflow/docs/blob/master/site/en/r1/tutorials/estimators/cnn.ipynb\"><img src=\"https://www.tensorflow.org/images/colab_logo_32px.png\" />Run in Google Colab</a>\n </td>\n <td>\n <a target=\"_blank\" href=\"https://github.com/tensorflow/docs/blob/master/site/en/r1/tutorials/estimators/cnn.ipynb\"><img src=\"https://www.tensorflow.org/images/GitHub-Mark-32px.png\" />View source on GitHub</a>\n </td>\n</table>", "_____no_output_____" ], [ "> Note: This is an archived TF1 notebook. These are configured\nto run in TF2's \n[compatbility mode](https://www.tensorflow.org/guide/migrate)\nbut will run in TF1 as well. To use TF1 in Colab, use the\n[%tensorflow_version 1.x](https://colab.research.google.com/notebooks/tensorflow_version.ipynb)\nmagic.", "_____no_output_____" ], [ "The `tf.layers` module provides a high-level API that makes\nit easy to construct a neural network. It provides methods that facilitate the\ncreation of dense (fully connected) layers and convolutional layers, adding\nactivation functions, and applying dropout regularization. In this tutorial,\nyou'll learn how to use `layers` to build a convolutional neural network model\nto recognize the handwritten digits in the MNIST data set.\n\n![handwritten digits 0–9 from the MNIST data set](https://www.tensorflow.org/images/mnist_0-9.png)\n\nThe [MNIST dataset](http://yann.lecun.com/exdb/mnist/) comprises 60,000\ntraining examples and 10,000 test examples of the handwritten digits 0–9,\nformatted as 28x28-pixel monochrome images.", "_____no_output_____" ], [ "## Get Started\n\nLet's set up the imports for our TensorFlow program:", "_____no_output_____" ] ], [ [ "import tensorflow.compat.v1 as tf\n\nimport numpy as np\n\ntf.logging.set_verbosity(tf.logging.INFO)", "_____no_output_____" ] ], [ [ "## Intro to Convolutional Neural Networks\n\nConvolutional neural networks (CNNs) are the current state-of-the-art model\narchitecture for image classification tasks. CNNs apply a series of filters to\nthe raw pixel data of an image to extract and learn higher-level features, which\nthe model can then use for classification. CNNs contains three components:\n\n* **Convolutional layers**, which apply a specified number of convolution\n filters to the image. For each subregion, the layer performs a set of\n mathematical operations to produce a single value in the output feature map.\n Convolutional layers then typically apply a\n [ReLU activation function](https://en.wikipedia.org/wiki/Rectifier_\\(neural_networks\\)) to\n the output to introduce nonlinearities into the model.\n\n* **Pooling layers**, which\n [downsample the image data](https://en.wikipedia.org/wiki/Convolutional_neural_network#Pooling_layer)\n extracted by the convolutional layers to reduce the dimensionality of the\n feature map in order to decrease processing time. A commonly used pooling\n algorithm is max pooling, which extracts subregions of the feature map\n (e.g., 2x2-pixel tiles), keeps their maximum value, and discards all other\n values.\n\n* **Dense (fully connected) layers**, which perform classification on the\n features extracted by the convolutional layers and downsampled by the\n pooling layers. In a dense layer, every node in the layer is connected to\n every node in the preceding layer.\n\nTypically, a CNN is composed of a stack of convolutional modules that perform\nfeature extraction. Each module consists of a convolutional layer followed by a\npooling layer. The last convolutional module is followed by one or more dense\nlayers that perform classification. The final dense layer in a CNN contains a\nsingle node for each target class in the model (all the possible classes the\nmodel may predict), with a\n[softmax](https://en.wikipedia.org/wiki/Softmax_function) activation function to\ngenerate a value between 0–1 for each node (the sum of all these softmax values\nis equal to 1). We can interpret the softmax values for a given image as\nrelative measurements of how likely it is that the image falls into each target\nclass.\n\nNote: For a more comprehensive walkthrough of CNN architecture, see Stanford University's [Convolutional Neural Networks for Visual Recognition course material](https://cs231n.github.io/convolutional-networks/).", "_____no_output_____" ], [ "## Building the CNN MNIST Classifier\n\nLet's build a model to classify the images in the MNIST dataset using the\nfollowing CNN architecture:\n\n1. **Convolutional Layer #1**: Applies 32 5x5 filters (extracting 5x5-pixel\n subregions), with ReLU activation function\n2. **Pooling Layer #1**: Performs max pooling with a 2x2 filter and stride of 2\n (which specifies that pooled regions do not overlap)\n3. **Convolutional Layer #2**: Applies 64 5x5 filters, with ReLU activation\n function\n4. **Pooling Layer #2**: Again, performs max pooling with a 2x2 filter and\n stride of 2\n5. **Dense Layer #1**: 1,024 neurons, with dropout regularization rate of 0.4\n (probability of 0.4 that any given element will be dropped during training)\n6. **Dense Layer #2 (Logits Layer)**: 10 neurons, one for each digit target\n class (0–9).\n\nThe `tf.layers` module contains methods to create each of the three layer types\nabove:\n\n* `conv2d()`. Constructs a two-dimensional convolutional layer. Takes number\n of filters, filter kernel size, padding, and activation function as\n arguments.\n* `max_pooling2d()`. Constructs a two-dimensional pooling layer using the\n max-pooling algorithm. Takes pooling filter size and stride as arguments.\n* `dense()`. Constructs a dense layer. Takes number of neurons and activation\n function as arguments.\n\nEach of these methods accepts a tensor as input and returns a transformed tensor\nas output. This makes it easy to connect one layer to another: just take the\noutput from one layer-creation method and supply it as input to another.\n\nAdd the following `cnn_model_fn` function, which\nconforms to the interface expected by TensorFlow's Estimator API (more on this\nlater in [Create the Estimator](#create-the-estimator)). This function takes\nMNIST feature data, labels, and mode (from\n`tf.estimator.ModeKeys`: `TRAIN`, `EVAL`, `PREDICT`) as arguments;\nconfigures the CNN; and returns predictions, loss, and a training operation:", "_____no_output_____" ] ], [ [ "def cnn_model_fn(features, labels, mode):\n \"\"\"Model function for CNN.\"\"\"\n # Input Layer\n input_layer = tf.reshape(features[\"x\"], [-1, 28, 28, 1])\n\n # Convolutional Layer #1\n conv1 = tf.layers.conv2d(\n inputs=input_layer,\n filters=32,\n kernel_size=[5, 5],\n padding=\"same\",\n activation=tf.nn.relu)\n\n # Pooling Layer #1\n pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2, 2], strides=2)\n\n # Convolutional Layer #2 and Pooling Layer #2\n conv2 = tf.layers.conv2d(\n inputs=pool1,\n filters=64,\n kernel_size=[5, 5],\n padding=\"same\",\n activation=tf.nn.relu)\n pool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2], strides=2)\n\n # Dense Layer\n pool2_flat = tf.reshape(pool2, [-1, 7 * 7 * 64])\n dense = tf.layers.dense(inputs=pool2_flat, units=1024, activation=tf.nn.relu)\n dropout = tf.layers.dropout(\n inputs=dense, rate=0.4, training=mode == tf.estimator.ModeKeys.TRAIN)\n\n # Logits Layer\n logits = tf.layers.dense(inputs=dropout, units=10)\n\n predictions = {\n # Generate predictions (for PREDICT and EVAL mode)\n \"classes\": tf.argmax(input=logits, axis=1),\n # Add `softmax_tensor` to the graph. It is used for PREDICT and by the\n # `logging_hook`.\n \"probabilities\": tf.nn.softmax(logits, name=\"softmax_tensor\")\n }\n\n if mode == tf.estimator.ModeKeys.PREDICT:\n return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions)\n\n # Calculate Loss (for both TRAIN and EVAL modes)\n loss = tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits)\n\n # Configure the Training Op (for TRAIN mode)\n if mode == tf.estimator.ModeKeys.TRAIN:\n optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.001)\n train_op = optimizer.minimize(\n loss=loss,\n global_step=tf.train.get_global_step())\n return tf.estimator.EstimatorSpec(mode=mode, loss=loss, train_op=train_op)\n\n # Add evaluation metrics (for EVAL mode)\n eval_metric_ops = {\n \"accuracy\": tf.metrics.accuracy(\n labels=labels, predictions=predictions[\"classes\"])\n }\n return tf.estimator.EstimatorSpec(\n mode=mode, loss=loss, eval_metric_ops=eval_metric_ops)", "_____no_output_____" ] ], [ [ "The following sections (with headings corresponding to each code block above)\ndive deeper into the `tf.layers` code used to create each layer, as well as how\nto calculate loss, configure the training op, and generate predictions. If\nyou're already experienced with CNNs and [TensorFlow `Estimator`s](../../guide/custom_estimators.md),\nand find the above code intuitive, you may want to skim these sections or just\nskip ahead to [\"Training and Evaluating the CNN MNIST Classifier\"](#train_eval_mnist).", "_____no_output_____" ], [ "### Input Layer\n\nThe methods in the `layers` module for creating convolutional and pooling layers\nfor two-dimensional image data expect input tensors to have a shape of\n<code>[<em>batch_size</em>, <em>image_height</em>, <em>image_width</em>,\n<em>channels</em>]</code> by default. This behavior can be changed using the\n<code><em>data_format</em></code> parameter; defined as follows:\n\n* `batch_size` —Size of the subset of examples to use when performing\n gradient descent during training.\n* `image_height` —Height of the example images.\n* `image_width` —Width of the example images.\n* `channels` —Number of color channels in the example images. For color\n images, the number of channels is 3 (red, green, blue). For monochrome\n images, there is just 1 channel (black).\n* `data_format` —A string, one of `channels_last` (default) or `channels_first`.\n `channels_last` corresponds to inputs with shape\n `(batch, ..., channels)` while `channels_first` corresponds to\n inputs with shape `(batch, channels, ...)`.\n\nHere, our MNIST dataset is composed of monochrome 28x28 pixel images, so the\ndesired shape for our input layer is <code>[<em>batch_size</em>, 28, 28,\n1]</code>.\n\nTo convert our input feature map (`features`) to this shape, we can perform the\nfollowing `reshape` operation:\n\n```\ninput_layer = tf.reshape(features[\"x\"], [-1, 28, 28, 1])\n```\n\nNote that we've indicated `-1` for batch size, which specifies that this\ndimension should be dynamically computed based on the number of input values in\n`features[\"x\"]`, holding the size of all other dimensions constant. This allows\nus to treat `batch_size` as a hyperparameter that we can tune. For example, if\nwe feed examples into our model in batches of 5, `features[\"x\"]` will contain\n3,920 values (one value for each pixel in each image), and `input_layer` will\nhave a shape of `[5, 28, 28, 1]`. Similarly, if we feed examples in batches of\n100, `features[\"x\"]` will contain 78,400 values, and `input_layer` will have a\nshape of `[100, 28, 28, 1]`.", "_____no_output_____" ], [ "### Convolutional Layer #1\n\nIn our first convolutional layer, we want to apply 32 5x5 filters to the input\nlayer, with a ReLU activation function. We can use the `conv2d()` method in the\n`layers` module to create this layer as follows:\n\n```\nconv1 = tf.layers.conv2d(\n inputs=input_layer,\n filters=32,\n kernel_size=[5, 5],\n padding=\"same\",\n activation=tf.nn.relu)\n```\n\nThe `inputs` argument specifies our input tensor, which must have the shape\n<code>[<em>batch_size</em>, <em>image_height</em>, <em>image_width</em>,\n<em>channels</em>]</code>. Here, we're connecting our first convolutional layer\nto `input_layer`, which has the shape <code>[<em>batch_size</em>, 28, 28,\n1]</code>.\n\nNote: `conv2d()` will instead accept a shape of `[<em>batch_size</em>, <em>channels</em>, <em>image_height</em>, <em>image_width</em>]` when passed the argument `data_format=channels_first`.\n\nThe `filters` argument specifies the number of filters to apply (here, 32), and\n`kernel_size` specifies the dimensions of the filters as `[<em>height</em>,\n<em>width</em>]</code> (here, <code>[5, 5]`).\n\n<p class=\"tip\"><b>TIP:</b> If filter height and width have the same value, you can instead specify a\nsingle integer for <code>kernel_size</code>—e.g., <code>kernel_size=5</code>.</p>\n\nThe `padding` argument specifies one of two enumerated values\n(case-insensitive): `valid` (default value) or `same`. To specify that the\noutput tensor should have the same height and width values as the input tensor,\nwe set `padding=same` here, which instructs TensorFlow to add 0 values to the\nedges of the input tensor to preserve height and width of 28. (Without padding,\na 5x5 convolution over a 28x28 tensor will produce a 24x24 tensor, as there are\n24x24 locations to extract a 5x5 tile from a 28x28 grid.)\n\nThe `activation` argument specifies the activation function to apply to the\noutput of the convolution. Here, we specify ReLU activation with\n`tf.nn.relu`.\n\nOur output tensor produced by `conv2d()` has a shape of\n<code>[<em>batch_size</em>, 28, 28, 32]</code>: the same height and width\ndimensions as the input, but now with 32 channels holding the output from each\nof the filters.", "_____no_output_____" ], [ "### Pooling Layer #1\n\nNext, we connect our first pooling layer to the convolutional layer we just\ncreated. We can use the `max_pooling2d()` method in `layers` to construct a\nlayer that performs max pooling with a 2x2 filter and stride of 2:\n\n```\npool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2, 2], strides=2)\n```\n\nAgain, `inputs` specifies the input tensor, with a shape of\n<code>[<em>batch_size</em>, <em>image_height</em>, <em>image_width</em>,\n<em>channels</em>]</code>. Here, our input tensor is `conv1`, the output from\nthe first convolutional layer, which has a shape of <code>[<em>batch_size</em>,\n28, 28, 32]</code>.\n\nNote: As with <code>conv2d()</code>, <code>max_pooling2d()</code> will instead\naccept a shape of <code>[<em>batch_size</em>, <em>channels</em>,\n<em>image_height</em>, <em>image_width</em>]</code> when passed the argument\n<code>data_format=channels_first</code>.\n\nThe `pool_size` argument specifies the size of the max pooling filter as\n<code>[<em>height</em>, <em>width</em>]</code> (here, `[2, 2]`). If both\ndimensions have the same value, you can instead specify a single integer (e.g.,\n`pool_size=2`).\n\nThe `strides` argument specifies the size of the stride. Here, we set a stride\nof 2, which indicates that the subregions extracted by the filter should be\nseparated by 2 pixels in both the height and width dimensions (for a 2x2 filter,\nthis means that none of the regions extracted will overlap). If you want to set\ndifferent stride values for height and width, you can instead specify a tuple or\nlist (e.g., `stride=[3, 6]`).\n\nOur output tensor produced by `max_pooling2d()` (`pool1`) has a shape of\n<code>[<em>batch_size</em>, 14, 14, 32]</code>: the 2x2 filter reduces height and width by 50% each.", "_____no_output_____" ], [ "### Convolutional Layer #2 and Pooling Layer #2\n\nWe can connect a second convolutional and pooling layer to our CNN using\n`conv2d()` and `max_pooling2d()` as before. For convolutional layer #2, we\nconfigure 64 5x5 filters with ReLU activation, and for pooling layer #2, we use\nthe same specs as pooling layer #1 (a 2x2 max pooling filter with stride of 2):\n\n```\nconv2 = tf.layers.conv2d(\n inputs=pool1,\n filters=64,\n kernel_size=[5, 5],\n padding=\"same\",\n activation=tf.nn.relu)\n\npool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2], strides=2)\n```\n\nNote that convolutional layer #2 takes the output tensor of our first pooling\nlayer (`pool1`) as input, and produces the tensor `conv2` as output. `conv2`\nhas a shape of <code>[<em>batch_size</em>, 14, 14, 64]</code>, the same height and width as `pool1` (due to `padding=\"same\"`), and 64 channels for the 64\nfilters applied.\n\nPooling layer #2 takes `conv2` as input, producing `pool2` as output. `pool2`\nhas shape <code>[<em>batch_size</em>, 7, 7, 64]</code> (50% reduction of height and width from `conv2`).", "_____no_output_____" ], [ "### Dense Layer\n\nNext, we want to add a dense layer (with 1,024 neurons and ReLU activation) to\nour CNN to perform classification on the features extracted by the\nconvolution/pooling layers. Before we connect the layer, however, we'll flatten\nour feature map (`pool2`) to shape <code>[<em>batch_size</em>,\n<em>features</em>]</code>, so that our tensor has only two dimensions:\n\n```\npool2_flat = tf.reshape(pool2, [-1, 7 * 7 * 64])\n```\n\nIn the `reshape()` operation above, the `-1` signifies that the *`batch_size`*\ndimension will be dynamically calculated based on the number of examples in our\ninput data. Each example has 7 (`pool2` height) * 7 (`pool2` width) * 64\n(`pool2` channels) features, so we want the `features` dimension to have a value\nof 7 * 7 * 64 (3136 in total). The output tensor, `pool2_flat`, has shape\n<code>[<em>batch_size</em>, 3136]</code>.\n\nNow, we can use the `dense()` method in `layers` to connect our dense layer as\nfollows:\n\n```\ndense = tf.layers.dense(inputs=pool2_flat, units=1024, activation=tf.nn.relu)\n```\n\nThe `inputs` argument specifies the input tensor: our flattened feature map,\n`pool2_flat`. The `units` argument specifies the number of neurons in the dense\nlayer (1,024). The `activation` argument takes the activation function; again,\nwe'll use `tf.nn.relu` to add ReLU activation.\n\nTo help improve the results of our model, we also apply dropout regularization\nto our dense layer, using the `dropout` method in `layers`:\n\n```\ndropout = tf.layers.dropout(\n inputs=dense, rate=0.4, training=mode == tf.estimator.ModeKeys.TRAIN)\n```\n\nAgain, `inputs` specifies the input tensor, which is the output tensor from our\ndense layer (`dense`).\n\nThe `rate` argument specifies the dropout rate; here, we use `0.4`, which means\n40% of the elements will be randomly dropped out during training.\n\nThe `training` argument takes a boolean specifying whether or not the model is\ncurrently being run in training mode; dropout will only be performed if\n`training` is `True`. Here, we check if the `mode` passed to our model function\n`cnn_model_fn` is `TRAIN` mode.\n\nOur output tensor `dropout` has shape <code>[<em>batch_size</em>, 1024]</code>.", "_____no_output_____" ], [ "### Logits Layer\n\nThe final layer in our neural network is the logits layer, which will return the\nraw values for our predictions. We create a dense layer with 10 neurons (one for\neach target class 0–9), with linear activation (the default):\n\n```\nlogits = tf.layers.dense(inputs=dropout, units=10)\n```\n\nOur final output tensor of the CNN, `logits`, has shape `[batch_size, 10]`.", "_____no_output_____" ], [ "### Generate Predictions {#generate_predictions}\n\nThe logits layer of our model returns our predictions as raw values in a\n<code>[<em>batch_size</em>, 10]</code>-dimensional tensor. Let's convert these\nraw values into two different formats that our model function can return:\n\n* The **predicted class** for each example: a digit from 0–9.\n* The **probabilities** for each possible target class for each example: the\n probability that the example is a 0, is a 1, is a 2, etc.\n\nFor a given example, our predicted class is the element in the corresponding row\nof the logits tensor with the highest raw value. We can find the index of this\nelement using the `tf.argmax`\nfunction:\n\n```\ntf.argmax(input=logits, axis=1)\n```\n\nThe `input` argument specifies the tensor from which to extract maximum\nvalues—here `logits`. The `axis` argument specifies the axis of the `input`\ntensor along which to find the greatest value. Here, we want to find the largest\nvalue along the dimension with index of 1, which corresponds to our predictions\n(recall that our logits tensor has shape <code>[<em>batch_size</em>,\n10]</code>).\n\nWe can derive probabilities from our logits layer by applying softmax activation\nusing `tf.nn.softmax`:\n\n```\ntf.nn.softmax(logits, name=\"softmax_tensor\")\n```\n\nNote: We use the `name` argument to explicitly name this operation `softmax_tensor`, so we can reference it later. (We'll set up logging for the softmax values in [\"Set Up a Logging Hook\"](#set-up-a-logging-hook)).\n\nWe compile our predictions in a dict, and return an `EstimatorSpec` object:\n\n```\npredictions = {\n \"classes\": tf.argmax(input=logits, axis=1),\n \"probabilities\": tf.nn.softmax(logits, name=\"softmax_tensor\")\n}\nif mode == tf.estimator.ModeKeys.PREDICT:\n return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions)\n```", "_____no_output_____" ], [ "### Calculate Loss {#calculating-loss}\n\nFor both training and evaluation, we need to define a\n[loss function](https://en.wikipedia.org/wiki/Loss_function)\nthat measures how closely the model's predictions match the target classes. For\nmulticlass classification problems like MNIST,\n[cross entropy](https://en.wikipedia.org/wiki/Cross_entropy) is typically used\nas the loss metric. The following code calculates cross entropy when the model\nruns in either `TRAIN` or `EVAL` mode:\n\n```\nloss = tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits)\n```\n\nLet's take a closer look at what's happening above.\n\nOur `labels` tensor contains a list of prediction indices for our examples, e.g. `[1,\n9, ...]`. `logits` contains the linear outputs of our last layer.\n\n`tf.losses.sparse_softmax_cross_entropy`, calculates the softmax crossentropy\n(aka: categorical crossentropy, negative log-likelihood) from these two inputs\nin an efficient, numerically stable way.", "_____no_output_____" ], [ "### Configure the Training Op\n\nIn the previous section, we defined loss for our CNN as the softmax\ncross-entropy of the logits layer and our labels. Let's configure our model to\noptimize this loss value during training. We'll use a learning rate of 0.001 and\n[stochastic gradient descent](https://en.wikipedia.org/wiki/Stochastic_gradient_descent)\nas the optimization algorithm:\n\n```\nif mode == tf.estimator.ModeKeys.TRAIN:\n optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.001)\n train_op = optimizer.minimize(\n loss=loss,\n global_step=tf.train.get_global_step())\n return tf.estimator.EstimatorSpec(mode=mode, loss=loss, train_op=train_op)\n```", "_____no_output_____" ], [ "Note: For a more in-depth look at configuring training ops for Estimator model functions, see [\"Defining the training op for the model\"](../../guide/custom_estimators.md#defining-the-training-op-for-the-model) in the [\"Creating Estimations in tf.estimator\"](../../guide/custom_estimators.md) tutorial.", "_____no_output_____" ], [ "### Add evaluation metrics\n\nTo add accuracy metric in our model, we define `eval_metric_ops` dict in EVAL\nmode as follows:\n\n```\neval_metric_ops = {\n \"accuracy\": tf.metrics.accuracy(\n labels=labels, predictions=predictions[\"classes\"])\n}\nreturn tf.estimator.EstimatorSpec(\n mode=mode, loss=loss, eval_metric_ops=eval_metric_ops)\n```", "_____no_output_____" ], [ "<a id=\"train_eval_mnist\"></a>\n## Training and Evaluating the CNN MNIST Classifier\n\nWe've coded our MNIST CNN model function; now we're ready to train and evaluate\nit.", "_____no_output_____" ], [ "### Load Training and Test Data\n\nFirst, let's load our training and test data with the following code:", "_____no_output_____" ] ], [ [ "# Load training and eval data\n((train_data, train_labels),\n (eval_data, eval_labels)) = tf.keras.datasets.mnist.load_data()\n\ntrain_data = train_data/np.float32(255)\ntrain_labels = train_labels.astype(np.int32) # not required\n\neval_data = eval_data/np.float32(255)\neval_labels = eval_labels.astype(np.int32) # not required", "_____no_output_____" ] ], [ [ "We store the training feature data (the raw pixel values for 55,000 images of\nhand-drawn digits) and training labels (the corresponding value from 0–9 for\neach image) as [numpy\narrays](https://docs.scipy.org/doc/numpy/reference/generated/numpy.array.html)\nin `train_data` and `train_labels`, respectively. Similarly, we store the\nevaluation feature data (10,000 images) and evaluation labels in `eval_data`\nand `eval_labels`, respectively.", "_____no_output_____" ], [ "### Create the Estimator {#create-the-estimator}\n\nNext, let's create an `Estimator` (a TensorFlow class for performing high-level\nmodel training, evaluation, and inference) for our model. Add the following code\nto `main()`:", "_____no_output_____" ] ], [ [ "# Create the Estimator\nmnist_classifier = tf.estimator.Estimator(\n model_fn=cnn_model_fn, model_dir=\"/tmp/mnist_convnet_model\")", "_____no_output_____" ] ], [ [ "The `model_fn` argument specifies the model function to use for training,\nevaluation, and prediction; we pass it the `cnn_model_fn` we created in\n[\"Building the CNN MNIST Classifier.\"](#building-the-cnn-mnist-classifier) The\n`model_dir` argument specifies the directory where model data (checkpoints) will\nbe saved (here, we specify the temp directory `/tmp/mnist_convnet_model`, but\nfeel free to change to another directory of your choice).\n\nNote: For an in-depth walkthrough of the TensorFlow `Estimator` API, see the tutorial [Creating Estimators in tf.estimator](../../guide/custom_estimators.md).", "_____no_output_____" ], [ "### Set Up a Logging Hook {#set_up_a_logging_hook}\n\nSince CNNs can take a while to train, let's set up some logging so we can track\nprogress during training. We can use TensorFlow's `tf.train.SessionRunHook` to create a\n`tf.train.LoggingTensorHook`\nthat will log the probability values from the softmax layer of our CNN. Add the\nfollowing to `main()`:", "_____no_output_____" ] ], [ [ "# Set up logging for predictions\ntensors_to_log = {\"probabilities\": \"softmax_tensor\"}\n\nlogging_hook = tf.train.LoggingTensorHook(\n tensors=tensors_to_log, every_n_iter=50)", "_____no_output_____" ] ], [ [ "We store a dict of the tensors we want to log in `tensors_to_log`. Each key is a\nlabel of our choice that will be printed in the log output, and the\ncorresponding label is the name of a `Tensor` in the TensorFlow graph. Here, our\n`probabilities` can be found in `softmax_tensor`, the name we gave our softmax\noperation earlier when we generated the probabilities in `cnn_model_fn`.\n\nNote: If you don't explicitly assign a name to an operation via the `name` argument, TensorFlow will assign a default name. A couple easy ways to discover the names applied to operations are to visualize your graph on [TensorBoard](../../guide/graph_viz.md)) or to enable the [TensorFlow Debugger (tfdbg)](../../guide/debugger.md).\n\nNext, we create the `LoggingTensorHook`, passing `tensors_to_log` to the\n`tensors` argument. We set `every_n_iter=50`, which specifies that probabilities\nshould be logged after every 50 steps of training.", "_____no_output_____" ], [ "### Train the Model\n\nNow we're ready to train our model, which we can do by creating `train_input_fn`\nand calling `train()` on `mnist_classifier`. In the `numpy_input_fn` call, we pass the training feature data and labels to\n`x` (as a dict) and `y`, respectively. We set a `batch_size` of `100` (which\nmeans that the model will train on minibatches of 100 examples at each step).\n`num_epochs=None` means that the model will train until the specified number of\nsteps is reached. We also set `shuffle=True` to shuffle the training data. Then train the model a single step and log the output:", "_____no_output_____" ] ], [ [ "# Train the model\ntrain_input_fn = tf.estimator.inputs.numpy_input_fn(\n x={\"x\": train_data},\n y=train_labels,\n batch_size=100,\n num_epochs=None,\n shuffle=True)\n\n# train one step and display the probabilties\nmnist_classifier.train(\n input_fn=train_input_fn,\n steps=1,\n hooks=[logging_hook])", "_____no_output_____" ] ], [ [ "Now—without logging each step—set `steps=1000` to train the model longer, but in a reasonable time to run this example. Training CNNs is computationally intensive. To increase the accuracy of your model, increase the number of `steps` passed to `train()`, like 20,000 steps.", "_____no_output_____" ] ], [ [ "mnist_classifier.train(input_fn=train_input_fn, steps=1000)", "_____no_output_____" ] ], [ [ "### Evaluate the Model\n\nOnce training is complete, we want to evaluate our model to determine its\naccuracy on the MNIST test set. We call the `evaluate` method, which evaluates\nthe metrics we specified in `eval_metric_ops` argument in the `model_fn`.\nAdd the following to `main()`:", "_____no_output_____" ] ], [ [ "eval_input_fn = tf.estimator.inputs.numpy_input_fn(\n x={\"x\": eval_data},\n y=eval_labels,\n num_epochs=1,\n shuffle=False)\n\neval_results = mnist_classifier.evaluate(input_fn=eval_input_fn)\nprint(eval_results)", "_____no_output_____" ] ], [ [ "To create `eval_input_fn`, we set `num_epochs=1`, so that the model evaluates\nthe metrics over one epoch of data and returns the result. We also set\n`shuffle=False` to iterate through the data sequentially.", "_____no_output_____" ], [ "## Additional Resources\n\nTo learn more about TensorFlow Estimators and CNNs in TensorFlow, see the\nfollowing resources:\n\n* [Creating Estimators in tf.estimator](../../guide/custom_estimators.md)\n provides an introduction to the TensorFlow Estimator API. It walks through\n configuring an Estimator, writing a model function, calculating loss, and\n defining a training op.\n* [Advanced Convolutional Neural Networks](../../tutorials/images/deep_cnn.md) walks through how to build a MNIST CNN classification model\n *without estimators* using lower-level TensorFlow operations.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ] ]
cb5557a3e7a2596e6f80c837bb589c00a44833dc
104,504
ipynb
Jupyter Notebook
misc/runfastai.ipynb
ygCoconut/MRIreconUNET
4f03cea30590f676f4e63fd5817e06396983acb7
[ "Apache-2.0" ]
8
2019-11-08T07:49:40.000Z
2022-02-25T08:11:02.000Z
misc/runfastai.ipynb
ygCoconut/MRIreconUNET
4f03cea30590f676f4e63fd5817e06396983acb7
[ "Apache-2.0" ]
1
2020-07-12T16:58:09.000Z
2020-07-23T14:53:20.000Z
misc/runfastai.ipynb
ygCoconut/MRIreconUNET
4f03cea30590f676f4e63fd5817e06396983acb7
[ "Apache-2.0" ]
2
2019-11-08T07:49:48.000Z
2021-07-22T05:33:30.000Z
121.657742
32,044
0.848121
[ [ [ "# This notebook helps you to do several things:\n1) Find your optimal learning rate\nhttps://docs.fast.ai/callbacks.html#LRFinder\n2) ", "_____no_output_____" ] ], [ [ "%reload_ext autoreload\n%autoreload 2\n\nimport fastai\nfrom fastai.callbacks import *\n\nfrom torch.utils.data import Dataset, DataLoader\nfrom models import UNet2d_assembled\nimport numpy as np\nimport torch\nfrom fastai.vision import *\n\ntorch.backends.cudnn.benchmark = True", "_____no_output_____" ], [ "DEVICE = 'cuda'\nOS = 'Windows'\n\n", "_____no_output_____" ], [ "# GET DATASET\nclass CMRIreconDataset(Dataset):\n \"\"\"CMRIrecon dataset.\"\"\"\n def __init__(self, input_file_path, target_file_path):\n \"\"\"\n Args:\n csv_file (string): Path to the csv file with annotations.\n root_dir (string): Directory with all the images.\n transform (callable, optional): Optional transform to be applied\n on a sample.\n \"\"\"\n self.inputs = np.load(input_file_path)\n self.targets = np.load(target_file_path)\n\n def __len__(self):\n# print(\"print length of inputs\",len(self.inputs))\n# print(\"print shape of inputs\",np.shape(self.inputs))\n return len(self.inputs)\n\n def __getitem__(self, idx):\n\n# sample = {'input': self.inputs[idx], 'target': self.targets[idx]}\n X = self.inputs[idx].astype(np.float32)\n Y = self.targets[idx].astype(np.float32)\n return X, Y\n\n", "_____no_output_____" ], [ "\n\nif OS == 'Linux':\n CMRIdataset = CMRIreconDataset(\n input_file_path = \\\n '/home/nw92/reconproject_data/input_data.npy', \\\n target_file_path = \\\n '/home/nw92/reconproject_data/target_data.npy')\n\nelif OS == 'Windows':\n CMRIdataset = CMRIreconDataset(\n input_file_path = \\\n 'C:/Users/littl/Documents/PythonScripts/reconproject_data/input_data.npy', \\\n target_file_path = \\\n 'C:/Users/littl/Documents/PythonScripts/reconproject_data/target_data.npy')\n\nelse:\n print(\"Please use valid COMPUTER.\\nOptions:\\t\\'Windows\\'\\t\\'Linux\\'\")\n\n\n# SPLIT DATASET INTO TRAIN, VAL AND TEST #####################################\n# CMRIdataset = train_dataset + test_dataset\nprint(\"\\nSplit dataset into train data (80%) and test data (20%)...\\n\")\ntrain_size = int(0.8 * len(CMRIdataset))\ntest_size = len(CMRIdataset) - train_size\ntrain_dataset, test_dataset = torch.utils.data.random_split(CMRIdataset, [train_size, test_size])\n\n# train_dataset = train_dataset + val_dataset\nprint(\"\\nSplit train data into train data (80%) and val data (20%)...\\n\")\ntrain_size = int(0.8 * len(train_dataset))\nval_size = len(train_dataset) - train_size\ntrain_dataset, val_dataset = torch.utils.data.random_split(train_dataset, [train_size, val_size])\n\n\nprint(\"Load train_dl, val_dl and test_dl...\")\n# load train set\ntrain_dl = DataLoader(train_dataset, batch_size=16,\n shuffle=True, num_workers=0)\n# load validation set\nvalid_dl = DataLoader(val_dataset, batch_size=16,\n shuffle=True, num_workers=0)\n# load test set\ntest_dl = DataLoader(test_dataset, batch_size=16,\n shuffle=True, num_workers=0)\n\nprint(\"train_dl, val_dl and test_dl loaded!\")\n\n\n# # DEFINE DATABUNCH TO FEED THE MODEL\ndata = DataBunch(train_dl,\n valid_dl,\n test_dl,\n device=DEVICE,\n # dl_tfms:Optional[Collection[Callable]]=None,\n # path:PathOrStr='.',\n # collate_fn:Callable='data_collate',\n # no_check:bool=False\n )\n\n\n# data.show_batch(rows=4)", "\nSplit dataset into train data (80%) and test data (20%)...\n\n\nSplit train data into train data (80%) and val data (20%)...\n\nLoad train_dl, val_dl and test_dl...\ntrain_dl, val_dl and test_dl loaded!\n" ], [ "# DEFINE LEARNER\nloss_func = nn.MSELoss()\nmetrics = mean_absolute_error\nmodel = UNet2d_assembled.UNet2D(20) #20 channels\n\nlearn = Learner(data = data,\n model = model,\n # opt_func:Callable='Adam',\n loss_func = loss_func,\n metrics = metrics,\n# callback_fns=[CSVLogger],\n # true_wd:bool=True,\n # bn_wd:bool=True,\n # wd:Floats=0.01,\n # train_bn:bool=True,\n # path:str=None,\n # model_dir:PathOrStr='models',\n # callback_fns:Collection[Callable]=None,\n # callbacks:Collection[Callback]=<factory>,\n # layer_groups:ModuleList=None,\n # add_time:bool=True,\n # silent:bool=None\n )\n\n# learn.summary()\nlearn.lr_find(start_lr=1e-07, end_lr=10)\n# learn = cnn_learner(data, models.resnet18, metrics=accuracy)\n# learn.fit(1)", "_____no_output_____" ], [ "learn.recorder.plot()\n", "_____no_output_____" ], [ "learn.recorder.plot()", "_____no_output_____" ], [ "lr = 1.5e-2", "_____no_output_____" ], [ "learn.fit_one_cycle(3, lr)", "_____no_output_____" ], [ "learn.recorder.plot_lr(show_moms=True)", "_____no_output_____" ], [ "learn = Learner(data = data,\n model = model,\n # opt_func:Callable='Adam',\n loss_func = loss_func,\n metrics = metrics,\n callback_fns=[CSVLogger],\n # true_wd:bool=True,\n # bn_wd:bool=True,\n # wd:Floats=0.01,\n # train_bn:bool=True,\n # path:str=None,\n # model_dir:PathOrStr='models',\n # callback_fns:Collection[Callable]=None,\n # callbacks:Collection[Callback]=<factory>,\n # layer_groups:ModuleList=None,\n # add_time:bool=True,\n # silent:bool=None\n )\n", "_____no_output_____" ], [ "learn.fit(3)", "_____no_output_____" ], [ "learn.fit(3)", "_____no_output_____" ], [ "learn.fit(3, 1e-1)", "_____no_output_____" ], [ "learn.csv_logger.read_logged_file()", "_____no_output_____" ], [ "def fit_odd_shedule(learn, lr):\n n = len(learn.data.train_dl)\n phases = [TrainingPhase(n).schedule_hp('lr', lr, anneal=annealing_cos), \n TrainingPhase(n*2).schedule_hp('lr', lr, anneal=annealing_poly(2))]\n sched = GeneralScheduler(learn, phases)\n learn.callbacks.append(sched)\n total_epochs = 3\n learn.fit(total_epochs)", "_____no_output_____" ], [ "learn = Learner(data = data,\n model = model,\n # opt_func:Callable='Adam',\n loss_func = loss_func,\n metrics = metrics,\n# callback_fns=[CSVLogger],\n # true_wd:bool=True,\n # bn_wd:bool=True,\n # wd:Floats=0.01,\n # train_bn:bool=True,\n # path:str=None,\n # model_dir:PathOrStr='models',\n # callback_fns:Collection[Callable]=None,\n # callbacks:Collection[Callback]=<factory>,\n # layer_groups:ModuleList=None,\n # add_time:bool=True,\n # silent:bool=None\n )\nfit_odd_shedule(learn, lr)", "_____no_output_____" ], [ "learn.recorder.plot_lr()", "_____no_output_____" ], [ "learn = Learner(data = data,\n model = model,\n # opt_func:Callable='Adam',\n loss_func = loss_func,\n metrics = metrics,\n# callback_fns=[CSVLogger,\n# SaveModelCallback(learn,\n# every='epoch',\n# monitor='valid_loss')],\n \n # true_wd:bool=True,\n # bn_wd:bool=True,\n # wd:Floats=0.01,\n # train_bn:bool=True,\n # path:str=None,\n # model_dir:PathOrStr='models',\n # callback_fns:Collection[Callable]=None,\n # callbacks:Collection[Callback]=<factory>,\n # layer_groups:ModuleList=None,\n # add_time:bool=True,\n # silent:bool=None\n )", "_____no_output_____" ], [ "learn.fit_one_cycle(3, lr,\n callbacks=[fastai.callbacks.SaveModelCallback(learn, every='epoch', monitor='valid_loss')])", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cb5565a3aea28984ec09c03c5327e0e88a7aa09d
13,512
ipynb
Jupyter Notebook
old_versions_tentatives/Definitivo con plot.ipynb
Dorianeve/python_assignment
6ad408b4f65c5dde96960e251930e3f83850acfc
[ "MIT" ]
null
null
null
old_versions_tentatives/Definitivo con plot.ipynb
Dorianeve/python_assignment
6ad408b4f65c5dde96960e251930e3f83850acfc
[ "MIT" ]
null
null
null
old_versions_tentatives/Definitivo con plot.ipynb
Dorianeve/python_assignment
6ad408b4f65c5dde96960e251930e3f83850acfc
[ "MIT" ]
null
null
null
26.339181
170
0.548105
[ [ [ "## import data manipulation packages for data cleaning and distance calculation\nimport pandas as pd\nimport numpy as np\nfrom sklearn.neighbors import DistanceMetric\nfrom math import radians", "_____no_output_____" ], [ "## DATA CLEANING AND PREPARATION\n## import dataset as variable 'city' and drop NaN\ncities = pd.read_excel('worldcities.xlsx')\nct = cities.dropna(axis = 'rows', how = 'any')", "_____no_output_____" ], [ "## add london starting point as 'London_st' slights on the right (to facilitate the assignment resolution)\nLondon_st = ct.loc[(ct['city'] == 'London') & (ct['iso3'] == 'GBR')]\nLondon_st['city']='London_st' \nLondon_st['lng'] = London_st['lng'] + 0.2\nct = ct.append(London_st)", "_____no_output_____" ], [ "## resetting index after append\nct = ct.reset_index()", "_____no_output_____" ], [ "## concatenate iso2 and city to get unique id\nct['ID'] = ct['city'].map(str) + ct['iso2'].map(str)", "_____no_output_____" ], [ "## drop not usable columns\nct = ct.drop(['city_ascii', 'country', 'iso2', 'admin_name', 'capital'], axis = 1)\nct = ct.drop('index', axis = 1)", "_____no_output_____" ], [ "## identifying location of 'London_st' to be used later as 'source'\nsource = ct.loc[(ct['city'] == 'London_st')]", "_____no_output_____" ], [ "## identifying location of 'London' to be used later as 'target'\ntarget = ct.loc[(ct['city'] == 'London') & (ct['iso3'] == 'GBR')]", "_____no_output_____" ], [ "## GETTING WEIGHTS - part I\n## population weights '+2', where population > 200000\npop = np.where(ct['population'] < 200000 , 0, 2)", "_____no_output_____" ], [ "## same state weights '+2', where 'iso3' is different\ni = ct['iso3'].to_numpy()\nst = (i[:, None ] != i) * 2", "_____no_output_____" ], [ "## GETTING DIRECTION - getting an array comparing longitudes (0 if a city is west the other, 1 if a city is east)\n## to get all positive longitudes we need to rescale from -180/+180 to 0/360 scale, where London is approx 0\ndr_x = np.where(ct['lng']>= 0 , ct['lng'] , (ct['lng'] + 180) + 180)\nx = dr_x\ndr = (x[:, None] < x) * 1", "_____no_output_____" ], [ "## computing big distances (>60 degrees) as a '0' (no go area) to get the final matrix less 'heavy' to be handled\nrang = (x[: , None] < x + 60 ) * 1", "_____no_output_____" ], [ "## QUESTO NON SERVE GIUSTO?\n## dir_test = pd.DataFrame(dr*rang.T, columns = ct['ID'], index = ct['ID'])\n## dir_test", "_____no_output_____" ], [ "## creating 3 dataframes with direction, same state and population weights\ndirection = pd.DataFrame(dr*rang.T, columns = ct['ID'], index = ct['ID'])\nsame_state = pd.DataFrame(st, columns = ct['ID'], index = ct['ID'])\npopulation = pd.DataFrame(pop , index = ct['ID'])", "_____no_output_____" ], [ "## DISTANCE COMPUTATION - 'Harvesine'\n## the earth is spheric, so a specific calculation ('Harvesine distance') is required to get the distance from places\nct['lat'] = np.radians(ct['lat'])\nct['lng'] = np.radians(ct['lng'])", "_____no_output_____" ], [ "## retrieve the 'harvesine' metric from scipy\ndist = DistanceMetric.get_metric('haversine')", "_____no_output_____" ], [ "## calculating the pairwise distance between cities multiplying *6373 to get kms\n## get a smaller size object by getting distance only if direction is 'east' (value 1 in 'direction' dataframe)\nD = np.where(direction > 0, dist.pairwise(ct [['lat','lng']].to_numpy())*6373 , 0)\n## create the distance matrix with cities in the indexes\ndistance = pd.DataFrame(D.T, columns = ct['ID'], index = ct['ID'])", "_____no_output_____" ], [ "## view matrix of distance", "_____no_output_____" ], [ "## QUESTO NON SERVE GIUSTO?\n## distance.loc['London_stGB'].sum()", "_____no_output_____" ], [ "## secondo me questo è già risolto con import pandas as pd no?\n## from pandas import DataFrame", "_____no_output_____" ], [ "## GETTING WEIGHTS - part II\n## utilising the matrix of distance called 'distance' (which contains already directions)\n## populate 'dis' with weights: '+2' if closest, '4' if second closest, '8' if third closest\n## the rest of distances as '0', meaning 'no go'\ndis = distance.T.replace(0, 0)\ndis = dis.replace(dis.apply(lambda x: x[x > 0].min(axis=0)), 2)\ndis = dis.replace(dis.apply(lambda x: x[x > 2].min(axis=0)), 4)\ndis = dis.replace(dis.apply(lambda x: x[x > 4].min(axis=0)), 8)\ndis = dis.where((dis <= 8), 0) \ndis", "_____no_output_____" ], [ "## SUMMING THE TOTAL WEIGHTS\n## sum of dataframes: 'dis', 'same_state' and 'population' to get final weights\ngraph =((dis + same_state + pop.T) * dis / dis)\ngraph = graph.where((graph > 1), 0) \ngraph", "_____no_output_____" ], [ "## preparation of final dataframe as array for 'NetworkX' \ngr_array = np.array(graph)\ngr_array", "_____no_output_____" ], [ "## SHORTEST PATH ALGORITHM aka Dijkstra's algorithm\n## import NetworkX\nimport networkx as nx", "_____no_output_____" ], [ "## convert the numpy array to GRAPH data structure, with has nodes (cities) and edges (weights between nodes)\n## zeros are not taken into account, so the direction is taken into account in the built array\nGR = nx.from_numpy_array(gr_array)", "_____no_output_____" ], [ "## edges visualization (optional)\nGR.edges(data=True)", "_____no_output_____" ], [ "## nodes visualization (optional)\nGR.nodes()", "_____no_output_____" ], [ "## retrieve location of 'London_st' as source and 'London' as origin\nprint(source)\nprint(target)", "_____no_output_____" ], [ "## using networkx.single_source_dijkstra()\n## the command computes shortest paths and lengths in a weighted graph G\n## it returns a tuple containing the 'length' of the shortest path, and the 'path' itself\nlength, path = nx.single_source_dijkstra(GR, 6622, 31)\nprint(length, path)", "_____no_output_____" ], [ "## get the names of the 'path' retrieving from 'ct' original object\nct.loc[path, 'city']", "_____no_output_____" ], [ "## quanti giorni per fare il giro del mondo?\ndays_to_london = length * 0.041667\ndays_to_london", "_____no_output_____" ], [ "## draw the graph (drop if too long to compute)\n##nx.draw(GR)", "_____no_output_____" ], [ "##carica i dati del percorso ottimo in un dataframe\npercorso=ct.loc[path]", "_____no_output_____" ], [ "##ricavo lista di \"id\" per filtrare il dataframe orignale con le città (per i dati di lon e lat)\nfiltro = percorso['id'].tolist()", "_____no_output_____" ], [ "##crea dataframe con i dati origari di \"cities\" per le città che compongono il percorso ottimo \ncittà= cities[cities['id'].isin(filtro)]", "_____no_output_____" ], [ "##imposta la colonna \"id\" come indice\ncittà = città.set_index('id')", "_____no_output_____" ], [ "##ordina per gli \"id\" del filtro (quelli del percorso ottimo in ordine)\ncittà_def=città.loc[filtro]", "_____no_output_____" ], [ "##sostituisce in \"città_def\" il nome della città di partenza con \"London_st\"\ncittà_def.iloc[0,0]='London_St' ", "_____no_output_____" ], [ "##sostituisce la coordinata di longitudine della città di partenza con quella leggermente spostata per far partire il percorso\ncittà_def.iloc[0,3]='0.0725'", "_____no_output_____" ], [ "#import delle librerie per i grafici\nimport matplotlib.pyplot as plt\nimport plotly.graph_objects as go", "_____no_output_____" ], [ "##crea il primo grafico con le traiettorie tra le città sulla base della mappa mondiale\nfig = go.Figure(data=go.Scattergeo(\n lat = città_def['lat'],\n lon =città_def['lng'],\n mode = 'lines',\n line = dict(width = 1, color = 'blue'),))", "_____no_output_____" ], [ "##aggiorna il grafico aggiungendo i marker per le città visitate con nome della città se selezionate con mouse, titolo e varia la tipologia di mappa sullo sfondo\nfig.add_trace(go.Scattergeo(\n locationmode = 'country names',\n lon = città_def['lng'],\n lat = città_def['lat'],\n hoverinfo = 'text',\n text = città_def['city'],\n name = \"Cities\",\n mode = 'markers',\n marker = dict(\n size = 4,\n color = 'rgb(102,102,102)',\n line = dict(\n width = 4,\n color = 'rgba(68, 68, 68, 0)'\n )\n )))\nfig.update_geos(projection_type=\"natural earth\")\nfig.update_layout(title_text='Shortest Path Around the World')\nfig.show()", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cb5576761e464f9b21124723a49e8135f5c00107
5,293
ipynb
Jupyter Notebook
DCRI/ipynb/05_random_forest.ipynb
Kao-PMP/Pilot_Project
79451e75b64d832644a4967f828bf5d685cd5a2d
[ "Apache-2.0" ]
null
null
null
DCRI/ipynb/05_random_forest.ipynb
Kao-PMP/Pilot_Project
79451e75b64d832644a4967f828bf5d685cd5a2d
[ "Apache-2.0" ]
null
null
null
DCRI/ipynb/05_random_forest.ipynb
Kao-PMP/Pilot_Project
79451e75b64d832644a4967f828bf5d685cd5a2d
[ "Apache-2.0" ]
null
null
null
28.005291
124
0.453996
[ [ [ "Purpose: Random Forest for survival \nProgrammer:Hillary Mulder \nDate: October 2019 \nModification History: \n", "_____no_output_____" ] ], [ [ "#library(tidyverse)\nlibrary(ranger)\nlibrary(survival)\nlibrary(dplyr)\nlibrary(caret)", "_____no_output_____" ], [ "hyp=read.csv(\"Data/analysis_ds.csv\")\nhyp$allhat=ifelse(hyp$study.1=='ALLHAT', 1, 0)\nhyp$aimhigh=ifelse(hyp$study.1=='AIMHIGH', 1, 0)\nhyp$accord=ifelse(hyp$study.1=='ACCORD', 1, 0)\n\ntrain=hyp[which(hyp$train==1),]\ntest=hyp[which(hyp$test==1),]\n\n#train2=train[complete.cases(train[, c(1:3, 5, 6, 9:19, 20:22, 50:53)]), c(1:3, 5, 6, 9:19, 20:22, 50:53)]\n#colnames(train2)\ntrain2=train[complete.cases(train[, c(2, 3, 5, 6, 9:19, 20:22, 56:58)]), c(2, 3, 5, 6, 9:19, 20:22, 56:58)]\ncolnames(train2)\ntest2=test[complete.cases(test[, c(2, 3, 5, 6, 9:19, 20:22, 56:58)]), c(2, 3, 5, 6, 9:19, 20:22, 56:58)]\nppr=preProcess(train2, method=c('center', 'scale'))\ntrainx=predict(ppr, newdata=train2)\ntrainx=data.frame(trainx, train2[,52:53])\n\ntestx=predict(ppr, newdata=test2)\ntestx=data.frame(testx, test2[,52:53])", "_____no_output_____" ], [ "r0 = ranger(Surv(tdeath, death)~age+Sex+Race2+BMI+Toba+Htn+HxDM+HxMIStr+revasc+BP.s+BP.d+\n LDL+HDL+TChol+Trig+HMG+asprin+antihyp+study.1,\n data=train2, seed=354, replace=F, verbose=T, num.threads=10, num.trees=100, importance=\"none\")", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code" ] ]
cb557a33959a130089e5f047b65f345e364c37f3
94,442
ipynb
Jupyter Notebook
Python/Pandas/01-Installation/Pandas Demo.ipynb
marflejs/code_snippets
2e928ed7f5eb3a21cccc3915cf797ab6a8d30e2b
[ "MIT" ]
9,588
2017-03-21T16:07:40.000Z
2022-03-31T08:43:39.000Z
Python/Pandas/01-Installation/Pandas Demo.ipynb
marflejs/code_snippets
2e928ed7f5eb3a21cccc3915cf797ab6a8d30e2b
[ "MIT" ]
135
2017-04-29T15:28:11.000Z
2022-03-27T19:20:49.000Z
Python/Pandas/01-Installation/Pandas Demo.ipynb
marflejs/code_snippets
2e928ed7f5eb3a21cccc3915cf797ab6a8d30e2b
[ "MIT" ]
20,939
2017-03-27T14:42:56.000Z
2022-03-31T16:41:14.000Z
44.485163
113
0.35066
[ [ [ "import pandas as pd", "_____no_output_____" ], [ "df = pd.read_csv('data/survey_results_public.csv')", "_____no_output_____" ], [ "df.tail(10)", "_____no_output_____" ], [ "df.shape", "_____no_output_____" ], [ "pd.set_option('display.max_columns', 85)\npd.set_option('display.max_rows', 85)", "_____no_output_____" ], [ "schema_df = pd.read_csv('data/survey_results_schema.csv')", "_____no_output_____" ], [ "schema_df", "_____no_output_____" ], [ "pd", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cb557a576037b98f72893e330082f465f3b59772
126,926
ipynb
Jupyter Notebook
5PandasFundations/.ipynb_checkpoints/3Pandas_Fundations_sample_duplicate_and_merging-checkpoint.ipynb
micolsi/DataSciencePythonCourse
8f02b4dd6b3302b18fecefb489725d4d397f0bea
[ "MIT" ]
5
2020-09-12T17:16:12.000Z
2021-02-03T01:37:02.000Z
5PandasFundations/.ipynb_checkpoints/3Pandas_Fundations_sample_duplicate_and_merging-checkpoint.ipynb
micolsi/DataSciencePythonCourse
8f02b4dd6b3302b18fecefb489725d4d397f0bea
[ "MIT" ]
null
null
null
5PandasFundations/.ipynb_checkpoints/3Pandas_Fundations_sample_duplicate_and_merging-checkpoint.ipynb
micolsi/DataSciencePythonCourse
8f02b4dd6b3302b18fecefb489725d4d397f0bea
[ "MIT" ]
4
2020-05-22T12:57:49.000Z
2021-02-03T01:37:07.000Z
31.132205
468
0.357964
[ [ [ "## Sampling\n\nYou can get a randomly rows of the dataset. It is very usefull in training machine learning models.\nWe will use the dataset about movie reviewers obtained of [here](http://grouplens.org/datasets/movielens/100k/).", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt", "_____no_output_____" ], [ "# read a dataset of movie reviewers into a DataFrame\nuser_cols = ['user_id', 'age', 'gender', 'occupation', 'zip_code']\nusers = pd.read_csv('./dataset/u.user', sep='|', header=None, names=user_cols, index_col='user_id')\nusers.head()", "_____no_output_____" ], [ "# sample 3 rows from the DataFrame without replacement (new in pandas 0.16.1)\nusers.sample(n=3)", "_____no_output_____" ], [ "#use the 'random_state' parameter for reproducibility\nusers.sample(n=3, random_state=42)", "_____no_output_____" ], [ "# sample 75% of the DataFrame's rows without replacement\ntrain = users.sample(frac=0.75, random_state=99)", "_____no_output_____" ], [ "# store the remaining 25% of the rows in another DataFrame\ntest = users.loc[~users.index.isin(train.index), :]", "_____no_output_____" ], [ "train.head()", "_____no_output_____" ], [ "test.head()", "_____no_output_____" ], [ "# detect duplicate zip codes: True if an item is identical to a previous item\nusers.zip_code.duplicated().tail()", "_____no_output_____" ], [ "# count the duplicate items (True becomes 1, False becomes 0)\nusers.zip_code.duplicated().sum()", "_____no_output_____" ], [ "# detect duplicate DataFrame rows: True if an entire row is identical to a previous row\nusers.duplicated().tail()", "_____no_output_____" ] ], [ [ "### Logic for duplicated:\n\n+ keep='first' (default): Mark duplicates as True except for the first occurrence.\n+ keep='last': Mark duplicates as True except for the last occurrence.\n+ keep=False: Mark all duplicates as True.", "_____no_output_____" ] ], [ [ "# examine the duplicate rows (ignoring the first occurrence)\nusers.loc[users.duplicated(keep='first'), :]", "_____no_output_____" ], [ "# examine the duplicate rows (ignoring the last occurrence)\nusers.loc[users.duplicated(keep='last'), :]", "_____no_output_____" ], [ "# examine the duplicate rows (including all duplicates)\nusers.loc[users.duplicated(keep=False), :]", "_____no_output_____" ], [ "# only consider a subset of columns when identifying duplicates\nusers.duplicated(subset=['age', 'zip_code']).sum()", "_____no_output_____" ], [ "# drop the duplicate rows (inplace=False by default)\nusers.drop_duplicates(keep='first').shape", "_____no_output_____" ], [ "users.drop_duplicates(keep='last').shape", "_____no_output_____" ], [ "users.drop_duplicates(keep=False).shape", "_____no_output_____" ] ], [ [ "## Appending pandas Series", "_____no_output_____" ] ], [ [ "# Load 'sales-jan-2015.csv' into a DataFrame: jan\njan = pd.read_csv('./dataset/sales-jan-2015.csv', parse_dates=True, index_col='Date')\n\n# Load 'sales-feb-2015.csv' into a DataFrame: feb\nfeb = pd.read_csv('./dataset/sales-feb-2015.csv', parse_dates=True, index_col='Date')\n\n# Load 'sales-mar-2015.csv' into a DataFrame: mar\nmar = pd.read_csv('./dataset/sales-mar-2015.csv', parse_dates=True, index_col='Date')\n\n# Extract the 'Units' column from jan: jan_units\njan_units = pd.DataFrame(jan['Units'])\n\n# Extract the 'Units' column from feb: feb_units\nfeb_units = pd.DataFrame(feb['Units'])\n\n# Extract the 'Units' column from mar: mar_units\nmar_units = pd.DataFrame(mar['Units'])\n\n# Append feb_units and then mar_units to jan_units: quarter1\nquarter1 = jan_units.append(feb_units).append(mar_units)\n\n# Print the first slice from quarter1\nprint(quarter1.loc['jan 27, 2015':'feb 2, 2015'])\n\n# Print the second slice from quarter1\nprint(quarter1.loc['feb 26, 2015':'mar 7, 2015'])\n\n# Compute & print total sales in quarter1\nprint(quarter1.sum())", " Units\nDate \n2015-01-27 07:11:55 18\n2015-02-02 08:33:01 3\n2015-02-02 20:54:49 9\n Units\nDate \n2015-02-26 08:57:45 4\n2015-02-26 08:58:51 1\n2015-03-06 10:11:45 17\n2015-03-06 02:03:56 17\nUnits 642\ndtype: int64\n" ], [ "df_quarter= pd.DataFrame(quarter1, columns = ['Units'])", "_____no_output_____" ], [ "df_quarter", "_____no_output_____" ], [ "jan_units.reset_index(inplace = True)\nfeb_units.reset_index(inplace = True)\nmar_units.reset_index(inplace = True)\nquarter_columns = pd.concat([jan_units, feb_units, mar_units], axis= 1, ignore_index=False)", "_____no_output_____" ], [ "df_quarter_columns= pd.DataFrame(quarter_columns)", "_____no_output_____" ], [ "df_quarter_columns", "_____no_output_____" ] ], [ [ "## Reading multiple files to build a DataFrame\n\nIt is often convenient to build a large DataFrame by parsing many files as DataFrames and concatenating them all at once. You'll do this here with three files, but, in principle, this approach can be used to combine data from dozens or hundreds of files.\n\nHere, you'll work with DataFrames compiled from The Guardian's Olympic medal dataset.", "_____no_output_____" ] ], [ [ "medals=[]\nmedal_types = ['gold','silver','bronze']\nfor medal in medal_types:\n\n # Create the file name: file_name\n file_name = \"./dataset/olympic-medals/%s_top5.csv\" % medal\n \n # Create list of column names: columns\n columns = ['Country', medal]\n \n # Read file_name into a DataFrame: df\n medal_df = pd.read_csv(file_name, header=0, index_col='Country', names=columns)\n\n # Append medal_df to medals\n medals.append(medal_df)\n\n# Concatenate medals horizontally: medals\nmedals = pd.concat(medals, axis='columns', sort = True)\n\n# Print medals\npd.DataFrame(medals)", "_____no_output_____" ] ], [ [ "## Concatenating vertically to get MultiIndexed rows\n\nWhen stacking a sequence of DataFrames vertically, it is sometimes desirable to construct a MultiIndex to indicate the DataFrame from which each row originated. This can be done by specifying the keys parameter in the call to pd.concat(), which generates a hierarchical index with the labels from keys as the outermost index label. So you don't have to rename the columns of each DataFrame as you load it. Instead, only the Index column needs to be specified.\n\n", "_____no_output_____" ] ], [ [ "medals=[]\nfor medal in medal_types:\n\n file_name = \"./dataset/olympic-medals/%s_top5.csv\" % medal\n\n # Read file_name into a DataFrame: medal_df\n medal_df = pd.read_csv(file_name, index_col='Country')\n \n # Append medal_df to medals\n medals.append(medal_df)\n\n# Concatenate medals: medals\nmedals = pd.concat(medals, keys=['bronze', 'silver', 'gold'])\n\n# Print medals\npd.DataFrame(medals)", "_____no_output_____" ] ], [ [ "## Concatenating DataFrames with inner join", "_____no_output_____" ] ], [ [ "medals=[]\nfor medal in medal_types:\n\n file_name = \"./dataset/olympic-medals/%s_top5.csv\" % medal\n\n # Read file_name into a DataFrame: medal_df\n medal_df = pd.read_csv(file_name, index_col='Country')\n \n # Append medal_df to medals\n medals.append(medal_df)\n\n# Concatenate medal_list horizontally using an inner join: medals\nmedals = pd.concat(medals, keys=['bronze', 'silver', 'gold'], axis=1, join='inner')\n\n# Print medals\npd.DataFrame(medals)\n", "_____no_output_____" ] ], [ [ "## Slicing MultiIndexed DataFrames\n", "_____no_output_____" ] ], [ [ "# Sort the entries of medals\nmedals_sorted = medals.sort_index(level=0)\n\n# Print the number of Bronze medals won by Germany\nprint(medals_sorted.loc[('bronze','Germany')])\n\n# Print data about silver medals\nprint(medals_sorted.loc['silver'])\n\n# Create alias for pd.IndexSlice: idx\nidx = pd.IndexSlice\n\n# Print all the data on medals won by the United Kingdom\nprint(medals_sorted.loc[idx[:,'United Kingdom'], :])\n\n", "Total 407.0\nName: (bronze, Germany), dtype: float64\n Total\nCountry \nFrance 461.0\nItaly 394.0\nSoviet Union 627.0\nUnited Kingdom 591.0\nUnited States 1195.0\n Total\n Country \nbronze United Kingdom 498.0\ngold United Kingdom 505.0\nsilver United Kingdom 591.0\n" ] ], [ [ "## Merging", "_____no_output_____" ] ], [ [ "user_usage = pd.read_csv(\"./dataset/merge/user_usage.csv\")\nuser_device = pd.read_csv(\"./dataset/merge/user_device.csv\")\ndevices = pd.read_csv(\"./dataset/merge/android_devices.csv\")", "_____no_output_____" ], [ "user_usage.head()", "_____no_output_____" ], [ "user_device.head()", "_____no_output_____" ], [ "devices.head()", "_____no_output_____" ], [ "devices.rename(columns={\"Retail Branding\": \"manufacturer\"}, inplace=True)", "_____no_output_____" ], [ "devices.head()", "_____no_output_____" ] ], [ [ "## First merge\n\nWe're trying to get the average usage figures for different types of devices. So we need to get the user's device code from user_usage as a column on user_usage, and then get the device's manufacturer from devices as a column on the result.\n\nFirst, we merge user_usage with user_device with \"use_id\" as our common column", "_____no_output_____" ] ], [ [ "result = pd.merge(user_usage,\n user_device[['use_id', 'platform', 'device']],\n on='use_id')\nresult.head()", "_____no_output_____" ] ], [ [ "An inner merge, (or inner join) keeps only the common values in both the left and right dataframes for the result. In our example above, only the rows that contain use_id values that are common between user_usage and user_device remain in the result dataset. We can validate this by looking at how many values are common:", "_____no_output_____" ] ], [ [ "print(\"user_usage dimensions: {}\".format(user_usage.shape))\nprint(\"user_device dimensions: {}\".format(user_device[['use_id', 'platform', 'device']].shape))\nprint(\"Result dimensions : {}\".format(result.shape))", "user_usage dimensions: (240, 4)\nuser_device dimensions: (272, 3)\nResult dimensions : (159, 6)\n" ] ], [ [ "## Left merge example\n\nA left merge, or left join, between two dataframes keeps all of the rows and values from the left dataframe, in this case \"user_usage\". Rows from the right dataframe will be kept in the result only where there is a match in the merge variable in the right dataframe, and NaN values will be in the result where not.", "_____no_output_____" ] ], [ [ "result = pd.merge(user_usage,\n user_device[['use_id', 'platform', 'device']],\n on='use_id', how='left')\nprint(\"user_usage dimensions: {}\".format(user_usage.shape))\nprint(\"result dimensions: {}\".format(result.shape))\nprint(\"There are {} missing values in the result.\".format(\n result['device'].isnull().sum()))", "user_usage dimensions: (240, 4)\nresult dimensions: (240, 6)\nThere are 81 missing values in the result.\n" ], [ "result.head()", "_____no_output_____" ] ], [ [ "## Right merge example\n\nA right merge, or right join, between two dataframes keeps all of the rows and values from the right dataframe, in this case \"user_device\". Rows from the left dataframe will be kept where there is a match in the merge variable, and NaN values will be in the result where not.", "_____no_output_____" ] ], [ [ "result = pd.merge(user_usage,\n user_device[['use_id', 'platform', 'device']],\n on='use_id', how='right')\nprint(\"user_device dimensions: {}\".format(user_device.shape))\nprint(\"result dimensions: {}\".format(result.shape))\nprint(\"There are {} missing values in the 'monthly_mb' column in the result.\".format(\n result['monthly_mb'].isnull().sum()))\nprint(\"There are {} missing values in the 'platform' column in the result.\".format(\n result['platform'].isnull().sum()))", "user_device dimensions: (272, 6)\nresult dimensions: (272, 6)\nThere are 113 missing values in the 'monthly_mb' column in the result.\nThere are 0 missing values in the 'platform' column in the result.\n" ] ], [ [ "## Outer merge example\n\nA full outer join, or outer merge, keeps all rows from the left and right dataframe in the result. Rows will be aligned where there is shared join values between the left and right, and rows with NaN values, in either the left-originating or right-originating columns will be, will be left in the result where there is no shared join value.\n\nIn the final result, a subset of rows should have no missing values. These rows are the rows where there was a match between the merge column in the left and right dataframes. These rows are the same values as found by our inner merge result before.", "_____no_output_____" ] ], [ [ "print(\"There are {} unique values of use_id in our dataframes.\".format(\n pd.concat([user_usage['use_id'], user_device['use_id']]).unique().shape[0]))\nresult = pd.merge(user_usage,\n user_device[['use_id', 'platform', 'device']],\n on='use_id', how='outer', indicator=True)\n\nprint(\"Outer merge result has {} rows.\".format(result.shape))\n\nprint(\"There are {} rows with no missing values.\".format(\n (result.apply(lambda x: x.isnull().sum(), axis=1) == 0).sum()))", "There are 353 unique values of use_id in our dataframes.\nOuter merge result has (353, 7) rows.\nThere are 159 rows with no missing values.\n" ], [ "result.iloc[[0, 1, 200,201, 350,351]]", "_____no_output_____" ], [ "# First, add the platform and device to the user usage.\nresult = pd.merge(user_usage,\n user_device[['use_id', 'platform', 'device']],\n on='use_id',\n how='left')\n\n# Now, based on the \"device\" column in result, match the \"Model\" column in devices.\ndevices.rename(columns={\"Retail Branding\": \"manufacturer\"}, inplace=True)\nresult = pd.merge(result, \n devices[['manufacturer', 'Model']],\n left_on='device',\n right_on='Model',\n how='left')\n\nresult.head()\n \n ", "_____no_output_____" ], [ "devices[devices.Device.str.startswith('GT')]", "_____no_output_____" ] ], [ [ "## Calculating statistics on final result\n\nWith merges complete, we can simply calculate statistics for users grouped by the manufacturer of their device.", "_____no_output_____" ] ], [ [ "result.groupby(\"manufacturer\").agg({\n \"outgoing_mins_per_month\": \"mean\",\n \"outgoing_sms_per_month\": \"mean\",\n \"monthly_mb\": \"mean\",\n \"use_id\": \"count\"\n })", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ] ]
cb5587551660ed4803536f0b3ae73af35f666c02
948,116
ipynb
Jupyter Notebook
homework8/.ipynb_checkpoints/HW8-checkpoint.ipynb
yan6pz/AML-Projects
56eaf40ece472a3865cac95752e53702de3352da
[ "MIT" ]
null
null
null
homework8/.ipynb_checkpoints/HW8-checkpoint.ipynb
yan6pz/AML-Projects
56eaf40ece472a3865cac95752e53702de3352da
[ "MIT" ]
null
null
null
homework8/.ipynb_checkpoints/HW8-checkpoint.ipynb
yan6pz/AML-Projects
56eaf40ece472a3865cac95752e53702de3352da
[ "MIT" ]
null
null
null
810.355556
849,560
0.944755
[ [ [ "# Homework 8 - Artificial Neural Networks with PyTorch", "_____no_output_____" ], [ "## About ", "_____no_output_____" ], [ "### In this homework, you will get your feet wet with deep learning using the PyTorch deep learning platform. This will involve:\n* Preparing data \n* Learning about the components of a deep learning pipeline\n* Setting up a model, a loss function, and an optimizer\n* Setting up training and testing loops\n* Using a visualizer like tensorboard to monitor logged data\n\n*This homework is due __April 15th 2019__. Training neural networks takes some time, particularly on CPUs so start early.* \n", "_____no_output_____" ], [ "## Dev Environment\n### Working on Google Colab\nYou may choose to work locally or on Google Colaboratory. You have access to free compute through this service. \n1. Visit https://colab.research.google.com/drive \n2. Navigate to the **`Upload`** tab, and upload your `HW8.ipynb`\n3. Now on the top right corner, under the `Comment` and `Share` options, you should see a `Connect` option. Once you are connected, you will have access to a VM with 12GB RAM, 50 GB disk space and a single GPU. The dropdown menu will allow you to connect to a local runtime as well.\n\n**Notes:** \n* **If you do not have a working setup for Python 3, this is your best bet. It will also save you from heavy installations like `tensorflow` if you don't want to deal with those.**\n* ***There is a downside*. You can only use this instance for a single 12-hour stretch, after which your data will be deleted, and you would have redownload all your datasets, any libraries not already on the VM, and regenerate your logs**.\n\n\n### Installing PyTorch and Dependencies\n\nThe instructions for installing and setting up PyTorch can be found at https://pytorch.org/get-started/locally/. Make sure you follow the instructions for your machine. For any of the remaining libraries used in this assignment:\n* We have provided a `hw8_requirements.txt` file on the homework web page. \n* Download this file, and in the same directory you can run `pip3 install -r hw8_requirements.txt`\n\nCheck that PyTorch installed correctly by running the following:", "_____no_output_____" ] ], [ [ "import torch\ntorch.rand(5, 3)", "_____no_output_____" ] ], [ [ "The output should look something like\n\n```python\ntensor([[0.3380, 0.3845, 0.3217],\n [0.8337, 0.9050, 0.2650],\n [0.2979, 0.7141, 0.9069],\n [0.1449, 0.1132, 0.1375],\n [0.4675, 0.3947, 0.1426]])\n```\n\n### Let's get started with the assignment.", "_____no_output_____" ], [ "## Instructions\n### Part 1 - Datasets and Dataloaders (10 points)\n\nIn this section we will download the MNIST dataset using PyTorch's own API.\n\nHelpful Resources:\n* https://pytorch.org/docs/stable/torchvision/datasets.html#mnist\n* https://pytorch.org/docs/stable/torchvision/transforms.html\n* https://pytorch.org/tutorials/beginner/data_loading_tutorial.html\n\n\nThe `torchvision` package consists of popular datasets, model architectures, and common image transformations for computer vision. We are particularly concerned with `torchvision.datasets` and `torchvision.transforms`. Check out the API for these modules in the links provided above.\n\n**Create a directory named `hw8_data` with the following command**.", "_____no_output_____" ] ], [ [ "!mkdir hw8_data", "mkdir: hw8_data: File exists\r\n" ] ], [ [ "\n**Now use `torch.datasets.MNIST` to load the Train and Test data into `hw8_data`.** \n* ** Use the directory you created above as the `root` directory for your datasets**\n* ** Populate the `transformations` variable with any transformations you would like to perform on your data.** (Hint: You will need to do at least one)\n* **Pass your `transformations` variable to `torch.datasets.MNIST`. This allows you to perform arbitrary transformations to your data at loading time.**", "_____no_output_____" ] ], [ [ "from torchvision import datasets, transforms\n\ntransformations = transforms.Compose([\n transforms.ToTensor(),\n transforms.Normalize(\n (0.1000,), (0.3000,))\n ])\n\nmnist_train = datasets.MNIST(root='./hw8_data', train=True, download=True, transform=transformations) \nmnist_test = datasets.MNIST(root='./hw8_data', train=False, download=True, transform=transformations) ", "\r0.00B [00:00, ?B/s]" ] ], [ [ "Check that your torch datasets have been successfully downloaded into your data directory by running the next two cells. \n\n* Each will output some metadata about your dataset. \n* Check that the training set has 60000 datapoints and a `Root Location: hw8_data`\n* Check that the testing (__also validation in our case__) set has 10000 datapoints and `Root Location: hw8_data`", "_____no_output_____" ], [ "Notice that these datasets implement the python `__len__` and `__getitem__` functions. Each element in the dataset should be a 2-tuple. What does yours look like?", "_____no_output_____" ] ], [ [ "print(len(mnist_train))\nprint(len(mnist_train[0]))\nmnist_train", "60000\n2\n" ], [ "print(len(mnist_test))\nprint(len(mnist_test[0]))\nmnist_test", "10000\n2\n" ] ], [ [ "**Any file in our dataset will now be read at runtime, and the specified transformations we need on it will be applied when we need it.**. \n\nWe could iterate through these directly using a loop, but this is not idiomatic. PyTorch provides us with this abstraction in the form of `DataLoaders`. The module of interest is `torch.utils.data.DataLoader`. \n\n`DataLoader` allows us to do lots of useful things\n* Group our data into batches\n* Shuffle our data\n* Load the data in parallel using `multiprocessing` workers\n\n**Use `DataLoader` to create a loader for the training set and one for the testing set**\n* **Use a `batch_size` of 32 to start, you may change it if you wish.**\n* **Set the `shuffle` parameter to `True`.** \n", "_____no_output_____" ] ], [ [ "from torch.utils.data import DataLoader\n\ntrain_loader = torch.utils.data.DataLoader(dataset=mnist_train,\n batch_size=32, \n shuffle=True)\ntest_loader = torch.utils.data.DataLoader(dataset=mnist_test,\n batch_size=len(mnist_test), \n shuffle=False)\n\nrandom_seed = 1\ntorch.backends.cudnn.enabled = False\ntorch.manual_seed(random_seed)", "_____no_output_____" ] ], [ [ "The following function is adapted from `show_landmarks_batch` at \nhttps://pytorch.org/tutorials/beginner/data_loading_tutorial.html#iterating-through-the-dataset . \n\nRun the following cell to see that your loader provides a random `batch_size` number of data points.", "_____no_output_____" ] ], [ [ "import matplotlib.pyplot as plt\nfrom torchvision import utils\n%matplotlib inline\n\ndef show_mnist_batch(sample_batched):\n \"\"\"Show images for a batch of samples.\"\"\"\n images_batch = sample_batched[0]\n batch_size = len(images_batch)\n im_size = images_batch.size(2)\n\n grid = utils.make_grid(images_batch)\n plt.imshow(grid.numpy().transpose((1, 2, 0)))\n plt.title('Batch from DataLoader')\n \n# Displays the first batch of images\nfor i, batch in enumerate(train_loader):\n if i==1:\n break\n show_mnist_batch(batch)", "WARNING:matplotlib.image:Clipping input data to the valid range for imshow with RGB data ([0..1] for floats or [0..255] for integers).\n" ] ], [ [ "### Part 2 - Models, Loss Functions and Optimizers (10 points)\n\nIn this section, we will do the following:\n* Learn about how to build your deep learning model and define its parameters\n* Choose a loss function to optimize\n* Choose an optimization method to maximize/minimize the loss\n\nWe'll first start with a single layer neural network to do handwritten digit classification. The math may ring some bells from homework 7.\n\n`torch.nn` is the module we will be using here. You can find the API at https://pytorch.org/docs/stable/nn.html. There is also a quick summary at https://pytorch.org/tutorials/beginner/nn_tutorial.html#closing_thoughts.\n\n#### Models\n\nWe will use the following python modules in building our one layer model.\n\n* `torch.nn.Module`: Your model will be abstracted as a python class. Your python class must subclass `torch.nn.Module`. It is the base class for all neural network modules in PyTorch (Do not confuse python modules with PyTorch Modules). These implement the `forward()` function which defines how your model handles input and produces an output. Your model class can also have `torch.nn.Module`s as members, allowing nested tree like structures, and it is leveraging this that you are able to build neural networks in PyTorch. \n\n* `torch.nn.Linear`: A unit of computation in neural networks are *Layers* and PyTorch provides abstractions for layers as `nn.Modules`. These come in many forms including *Convolutional*, *Recurrent*, and *Linear*. You can find the API for linear layers here https://pytorch.org/docs/stable/nn.html#linear-layers.\n\n**Now use the information provided to define the `OneLayerModel` class below. The superclass constructor has been called for you, and this allows your subclass to access superclass methods and members.**\n* **Finish the `__init__()` function.**\n* **Finish the `forward()` function.** (Hint: Use that fact that layer modules implement their own `forward()` function)\n\n\n\n", "_____no_output_____" ] ], [ [ "from torch import nn\nclass OneLayerModel(nn.Module):\n def __init__(self, input_dim, output_dim):\n super(OneLayerModel, self).__init__()\n self.flin = nn.Linear(input_dim, output_dim)\n \n def forward(self, x):\n x = self.flin(x)\n return x", "_____no_output_____" ] ], [ [ "#### Loss Functions and Optimizers\n\nYou've defined your model but now what? It's just a black box that takes an input and spits out some numbers. You haven't yet defined what it means to be a good or bad model. \n\nA ***Loss Function*** takes what your model outputs and compares it to what it *should* have put out. It returns some meaningful value used to update your model parameters, and so train your model. Check out Section 21.2.1 of the textbook for more details about types of loss functions. The Loss function represents the overall goal of building this model, and the choice of loss function is very important. \n\nWe must examine our model parameters and our problem instance to see about how to choose a loss function.\n* We take in a 784-dimensional vector and output 10 real values, giving our model 784 x 10 parameters. \n* It is natural given that our problem is an instance of *multi-class classification* that we would want each of our output values to model `P(y==i|x)`.\n* If we go this route, we get an added constraint that the sum of all 10 of our output values should be 1 (forming a probability mass distribution).\n\nTurns out there is a very convenient loss function for just our use case known as ***cross-entropy loss***. Check out this reference https://ml-cheatsheet.readthedocs.io/en/latest/loss_functions.html#cross-entropy for a little more intuition on this.\n\nOnce again, PyTorch has abstractions built in for us in the `torch.nn` module, namely `torch.nn.CrossEntropyLoss`. The API can be found at https://pytorch.org/docs/stable/nn.html#crossentropyloss. \n\nWe're still not ready to train our model because while we have some parameters, and we have some measure of how good or bad our predictions are, we have no notion of how to go about updating our parameters in order to improve our loss. \n\nThis is where ***Optimizers*** come in. In general, we have one main way of minimizing loss functions (training our models), and that is through *Stochastic Gradient Descent* https://en.wikipedia.org/wiki/Stochastic_gradient_descent. There are many variants and optimizations of this method, however, and the `torch.optim` package gives us abstractions for these. The API can be found at https://pytorch.org/docs/stable/optim.html#.", "_____no_output_____" ] ], [ [ "from torch import optim", "_____no_output_____" ] ], [ [ "### Part 3 - Training and Validation (45 points)\n\nIn this section we will learn how to use the concepts we've learned about so far to train the model we built, and validate how well it does.We also want to monitor how well our training is going while it is happening. \n\nFor this we can use a package called `tensorboardX`. You will need to install this package using `pip` or `Anaconda`, based on your dev environment. Additionally, we'll want to use a logging module called `tensorboardX.SummaryWriter`. You can consult the API here https://tensorboardx.readthedocs.io/en/latest/tutorial.html. Run the next cell to ensure that all is working well.", "_____no_output_____" ] ], [ [ "\"\"\" Try uncommenting these commands if you're facing issues here\n!pip3 install -U protobuf\n!pip3 install -U tensorflow\n!pip3 install -U tensorboardX\n\"\"\"\n%load_ext tensorboard.notebook\nfrom tensorboardX import SummaryWriter", "The tensorboard.notebook extension is already loaded. To reload it, use:\n %reload_ext tensorboard.notebook\n" ] ], [ [ "We have provided the code to use `tensorboard` just before calling your `train` function. You don't have to change the top-level log directory, but you can create multiple runs (different parameters or versions of your code) just by creating subdirectories for these within your top-level directory.\n\n**Now use the information provided above to do the following:**\n* ** Instantiate a `OneLayerModel` with the appropriate input/output parameters.**\n* ** Define a cross-entropy loss function.**\n* ** Define a stochastic gradient descent optimizer based for you model's parameters. Start with a learning rate of 0.001, and adjust as necessary. You can start with the vanilla `optim.SGD` optimizer, and change it if you wish.** \n* **Create a `SummaryWriter` object that will be responsible for logging our training progress into a directory called `logs/expt1` (Or whatever you wish your top-level directory to be called).**", "_____no_output_____" ] ], [ [ "model = OneLayerModel(1*28*28, 10)\n\n# Loss and optimizer\nloss = nn.CrossEntropyLoss()\nlearning_rate = 0.01\noptimizer = optim.SGD(model.parameters(), lr=learning_rate, momentum = 0.5)\nwriter = SummaryWriter('logs/expt1')", "_____no_output_____" ] ], [ [ "We've finally come to the point where we need to write our training set up. We're going to use both our training and testing (validation) sets for this. Note that traditionally, you would separate part of your training data into validation data in order to get an unbiased estimate of how your model performs, but here we'll just pretend that our testing data is our validation data. \n\n**Training a model with batches of data broadly involves the following steps:**\n1. **One `epoch` is defined as a full pass of your dataset through your model. We choose the number of epochs we wish to train our model for.**\n2. **In each epoch, set your model to train mode.** \n3. **you feed your model `batch_size` examples at a time, and receive `batch_size` number of outputs until you've gotten through your entire dataset.**\n4. **Calculate the loss function for those outputs given the labels for that batch.**\n5. **Now calculate the gradients for each model parameter.** (Hint: Your loss function object can do this for you)\n6. **Update your model parameters** (Hint: The optimizer comes in here)\n7. **Set the gradients in your model to zero for the next batch.**\n8. **After each epoch, set your model to evaluation mode.**\n9. **Now evaluate your model on the validation data. Log the total loss and accuracy over the validation data.** (Note: PyTorch does automatic gradient calculations in the background through its `Autograd` mechanism https://pytorch.org/docs/stable/notes/autograd.html. Make sure to do evaluation in a context where this is turned off!)\n\n**Complete the `train()` function below. Try to make it as general as possible, so that it can be used for improved versions of you model. Feel free to define as many helper functions as needed.**\n**Make sure that you do the following: **\n* **Log the *training loss* and *training accuracy* on each batch for every epoch, such that it will show up on `tensorboard`.**\n* **Log the loss on the validation set and the accuracy on the validation set every epoch**\n\n**You will need to produce the plots for these.**\n\nYou may also want to add some print statements in your training function to report progress in this notebook.", "_____no_output_____" ] ], [ [ "\ndef train(model, train_loader, val_loader, loss_func, optimizer,num_epochs=10, writer=None):\n test(model, val_loader, loss_func, 0, writer)\n for epoch in range(1, num_epochs + 1):\n train_internal(model, train_loader, loss_func, optimizer, writer, epoch)\n test(model, val_loader, loss_func, epoch, writer)\n\n\nlog_interval = 500\n\ndef train_internal(model, train_loader, loss_func, optimizer, writer, epoch):\n model.train()\n for batch_id, (data, target) in enumerate(train_loader):\n data=data.reshape(len(data),-1)\n loss = loss_func(model(data), target)\n loss.backward()\n optimizer.step()\n optimizer.zero_grad()\n loss_item = loss.item()\n \n with torch.no_grad():\n output = model(data)\n predicted = torch.argmax(output, dim=1)\n train_accuracy = predicted.eq(target.data.view_as(predicted)).float().mean()\n writer.add_scalars('Training', {'loss':loss_item,\n 'accuracy': train_accuracy.item()}, batch_id)\n \n if batch_id % log_interval == 0:\n print('Train Epoch: {} [{}/{} ({:.0f}%)]\\tLoss: {:.6f} Accuracy: {:.6f}'.format(\n epoch, batch_id * len(data), len(train_loader.dataset),\n 100. * batch_id / len(train_loader), loss_item, train_accuracy))\n \n\ndef test(model, val_loader, loss_func, epoch_num, writer):\n model.eval()\n loss_item = 0\n correct = 0\n with torch.no_grad():\n for data, target in val_loader:\n data=data.reshape(len(data),-1)\n output = model(data)\n loss_item += loss_func(output, target)\n pred = torch.argmax(output, dim=1)\n correct += pred.eq(target.data.view_as(pred)).sum()\n \n accuracy = 100. * correct.item()/ len(val_loader.dataset)\n loss = loss_item.item()/len(val_loader)\n writer.add_scalar('Validation set loss', loss, epoch_num)\n writer.add_scalar('Validation set accuracy', accuracy, epoch_num)\n \n print('\\nTest set: Avg. loss: {:.4f}, Accuracy: {}/{} ({:.2f}%)\\n'.format(\n loss, correct, len(val_loader.dataset),accuracy))\n ", "_____no_output_____" ] ], [ [ "Finally call `train` with the relevant parameters. Run the tensorboard command on your top-level logs directory to monitor training. If there is logging data from a previous run, just delete the directory for the run, and reinstantiate the `SummaryWriter` for that run. (You may want to reinstantiate the model itself if you want to clear the model parameters too).\n\nNote : This function may take a while to complete if you're training for many epochs on a cpu. This is where it comes in handy to be running on Google Colab, or just have a GPU on hand. ", "_____no_output_____" ] ], [ [ "#%tensorboard --logdir=logs\n\ntrain(model, train_loader, test_loader, loss, optimizer, 15, writer)", "\nTest set: Avg. loss: 2.5893, Accuracy: 959/10000 (9.59%)\n\nTrain Epoch: 1 [0/60000 (0%)]\tLoss: 2.285258 Accuracy: 0.375000\nTrain Epoch: 1 [16000/60000 (27%)]\tLoss: 0.135970 Accuracy: 1.000000\nTrain Epoch: 1 [32000/60000 (53%)]\tLoss: 0.265094 Accuracy: 0.906250\nTrain Epoch: 1 [48000/60000 (80%)]\tLoss: 0.315391 Accuracy: 0.875000\n\nTest set: Avg. loss: 0.3014, Accuracy: 9131/10000 (91.31%)\n\nTrain Epoch: 2 [0/60000 (0%)]\tLoss: 0.416031 Accuracy: 0.875000\nTrain Epoch: 2 [16000/60000 (27%)]\tLoss: 0.183989 Accuracy: 0.968750\nTrain Epoch: 2 [32000/60000 (53%)]\tLoss: 0.204093 Accuracy: 0.937500\nTrain Epoch: 2 [48000/60000 (80%)]\tLoss: 0.068926 Accuracy: 0.968750\n\nTest set: Avg. loss: 0.2847, Accuracy: 9180/10000 (91.80%)\n\nTrain Epoch: 3 [0/60000 (0%)]\tLoss: 0.323833 Accuracy: 0.906250\nTrain Epoch: 3 [16000/60000 (27%)]\tLoss: 0.198829 Accuracy: 0.906250\nTrain Epoch: 3 [32000/60000 (53%)]\tLoss: 0.441675 Accuracy: 0.843750\nTrain Epoch: 3 [48000/60000 (80%)]\tLoss: 0.250088 Accuracy: 0.875000\n\nTest set: Avg. loss: 0.2859, Accuracy: 9175/10000 (91.75%)\n\nTrain Epoch: 4 [0/60000 (0%)]\tLoss: 0.193627 Accuracy: 0.937500\nTrain Epoch: 4 [16000/60000 (27%)]\tLoss: 0.153793 Accuracy: 0.968750\nTrain Epoch: 4 [32000/60000 (53%)]\tLoss: 0.503042 Accuracy: 0.875000\nTrain Epoch: 4 [48000/60000 (80%)]\tLoss: 0.048945 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.2764, Accuracy: 9247/10000 (92.47%)\n\nTrain Epoch: 5 [0/60000 (0%)]\tLoss: 0.469417 Accuracy: 0.812500\nTrain Epoch: 5 [16000/60000 (27%)]\tLoss: 0.185711 Accuracy: 0.937500\nTrain Epoch: 5 [32000/60000 (53%)]\tLoss: 0.159629 Accuracy: 0.968750\nTrain Epoch: 5 [48000/60000 (80%)]\tLoss: 0.090220 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.2796, Accuracy: 9214/10000 (92.14%)\n\nTrain Epoch: 6 [0/60000 (0%)]\tLoss: 0.422258 Accuracy: 0.812500\nTrain Epoch: 6 [16000/60000 (27%)]\tLoss: 0.350122 Accuracy: 0.906250\nTrain Epoch: 6 [32000/60000 (53%)]\tLoss: 0.430337 Accuracy: 0.906250\nTrain Epoch: 6 [48000/60000 (80%)]\tLoss: 0.329624 Accuracy: 0.843750\n\nTest set: Avg. loss: 0.2768, Accuracy: 9215/10000 (92.15%)\n\nTrain Epoch: 7 [0/60000 (0%)]\tLoss: 0.088976 Accuracy: 1.000000\nTrain Epoch: 7 [16000/60000 (27%)]\tLoss: 0.454884 Accuracy: 0.906250\nTrain Epoch: 7 [32000/60000 (53%)]\tLoss: 0.196702 Accuracy: 0.937500\nTrain Epoch: 7 [48000/60000 (80%)]\tLoss: 0.236154 Accuracy: 0.937500\n\nTest set: Avg. loss: 0.2782, Accuracy: 9236/10000 (92.36%)\n\nTrain Epoch: 8 [0/60000 (0%)]\tLoss: 0.077039 Accuracy: 1.000000\nTrain Epoch: 8 [16000/60000 (27%)]\tLoss: 0.253117 Accuracy: 0.968750\nTrain Epoch: 8 [32000/60000 (53%)]\tLoss: 0.240023 Accuracy: 0.906250\nTrain Epoch: 8 [48000/60000 (80%)]\tLoss: 0.332529 Accuracy: 0.906250\n\nTest set: Avg. loss: 0.2839, Accuracy: 9218/10000 (92.18%)\n\nTrain Epoch: 9 [0/60000 (0%)]\tLoss: 0.261160 Accuracy: 0.937500\nTrain Epoch: 9 [16000/60000 (27%)]\tLoss: 1.006131 Accuracy: 0.937500\nTrain Epoch: 9 [32000/60000 (53%)]\tLoss: 0.236034 Accuracy: 0.937500\nTrain Epoch: 9 [48000/60000 (80%)]\tLoss: 0.535593 Accuracy: 0.906250\n\nTest set: Avg. loss: 0.2779, Accuracy: 9251/10000 (92.51%)\n\nTrain Epoch: 10 [0/60000 (0%)]\tLoss: 0.152766 Accuracy: 0.968750\nTrain Epoch: 10 [16000/60000 (27%)]\tLoss: 0.099334 Accuracy: 0.968750\nTrain Epoch: 10 [32000/60000 (53%)]\tLoss: 0.509017 Accuracy: 0.906250\nTrain Epoch: 10 [48000/60000 (80%)]\tLoss: 0.325277 Accuracy: 0.968750\n\nTest set: Avg. loss: 0.2810, Accuracy: 9224/10000 (92.24%)\n\nTrain Epoch: 11 [0/60000 (0%)]\tLoss: 0.263349 Accuracy: 0.937500\nTrain Epoch: 11 [16000/60000 (27%)]\tLoss: 0.102887 Accuracy: 1.000000\nTrain Epoch: 11 [32000/60000 (53%)]\tLoss: 0.251061 Accuracy: 0.906250\nTrain Epoch: 11 [48000/60000 (80%)]\tLoss: 0.053319 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.2750, Accuracy: 9222/10000 (92.22%)\n\nTrain Epoch: 12 [0/60000 (0%)]\tLoss: 0.357726 Accuracy: 0.937500\nTrain Epoch: 12 [16000/60000 (27%)]\tLoss: 0.262770 Accuracy: 0.937500\nTrain Epoch: 12 [32000/60000 (53%)]\tLoss: 0.366191 Accuracy: 0.875000\nTrain Epoch: 12 [48000/60000 (80%)]\tLoss: 0.122046 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.2748, Accuracy: 9242/10000 (92.42%)\n\nTrain Epoch: 13 [0/60000 (0%)]\tLoss: 0.288103 Accuracy: 0.906250\nTrain Epoch: 13 [16000/60000 (27%)]\tLoss: 0.359828 Accuracy: 0.906250\nTrain Epoch: 13 [32000/60000 (53%)]\tLoss: 0.854552 Accuracy: 0.875000\nTrain Epoch: 13 [48000/60000 (80%)]\tLoss: 0.222035 Accuracy: 0.968750\n\nTest set: Avg. loss: 0.2726, Accuracy: 9241/10000 (92.41%)\n\nTrain Epoch: 14 [0/60000 (0%)]\tLoss: 0.178735 Accuracy: 0.968750\nTrain Epoch: 14 [16000/60000 (27%)]\tLoss: 0.099271 Accuracy: 1.000000\nTrain Epoch: 14 [32000/60000 (53%)]\tLoss: 0.134073 Accuracy: 0.937500\nTrain Epoch: 14 [48000/60000 (80%)]\tLoss: 0.392958 Accuracy: 0.906250\n\nTest set: Avg. loss: 0.2805, Accuracy: 9218/10000 (92.18%)\n\nTrain Epoch: 15 [0/60000 (0%)]\tLoss: 0.539425 Accuracy: 0.875000\nTrain Epoch: 15 [16000/60000 (27%)]\tLoss: 0.107347 Accuracy: 0.968750\nTrain Epoch: 15 [32000/60000 (53%)]\tLoss: 0.251062 Accuracy: 0.906250\nTrain Epoch: 15 [48000/60000 (80%)]\tLoss: 0.066185 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.2768, Accuracy: 9230/10000 (92.30%)\n\n" ] ], [ [ "__Final Validation Loss:__ *0.2722*\n\n__Final Validation Accuracy:__ *92.53%*", "_____no_output_____" ], [ "#### What is familiar about a 1-layer neural network with cross-entopy loss? Have you seen this before?", "_____no_output_____" ], [ "Answer: SVM model has similar linear function and uses SGD.", "_____no_output_____" ], [ "### Part 4 - Two Layer Neural Net (20 points)\n\nThe thing that makes neural networks really powerful is that they are able to do complex function approximation. As we saw earlier, we can organize the computation done in neural networks into units called *layers*. In a general neural network, there is an *input layer*, and an *output layer*. These may be the same layer as they were in our previous example. When they are not the same, there are intermediate layers known as _hidden layers_. These layers receive input from other layers and send their output to other layers. \n\nWe have been dealing with a certain type of neural network known as a __fully connected__ network. For our purposes, this just means that the output of the layer is just the dot product of its input `x`, its weights `w` plus a bias term `b`, all wrapped in a non-linear *activation function* `F`. \n\n`y = F(w^T x + b)`.\n\nThese non-linear activation functions are very important but where in our last neural network did we apply such a function? Implicitly we applied what's known as a __softmax activation__ in order to compute cross-entropy loss https://en.wikipedia.org/wiki/Softmax_function.\n\nWe'll now try to create a neural network with one hidden layer. This means that we have to come up with an activation function for the output of that hidden layer. A famous, simple but powerful activation function is the __Rectified Linear Unit (ReLU)__ function defined nas `ReLU(x) = max(x,0)`. We will use this on the output of the hidden layer.\n\n`torch.nn` has a module known as `nn.Sequential` that allows us to chain together other modules. This module implements a `forward()` function that automatically handles input-output connections etc. Check out the API at https://pytorch.org/docs/stable/nn.html#sequential. \n\n**Just like you did with the single layer model, define a class `TwoLayerModel`, a neural network with ReLU activation for the hidden layer. `nn.Sequential` may come in handy.**", "_____no_output_____" ] ], [ [ "import torch.nn.functional as F\n\nclass TwoLayerModel(nn.Module):\n def __init__(self, input_dim, output_dim, hidden_layers):\n super(TwoLayerModel, self).__init__()\n self.relu = nn.ReLU(inplace=True)\n #self.conv1 = nn.Conv2d(1, 20, kernel_size=5)\n self.flin1 = nn.Linear(input_dim, 256)\n self.flin2 = nn.Linear(256, output_dim)\n \n def forward(self, x):\n #x = self.relu(F.max_pool2d(self.conv1(x), 2))\n x = self.relu(self.flin1(x))\n x = self.flin2(x)\n return x\n ", "_____no_output_____" ] ], [ [ "**Once again use the information provided above to do the following:**\n* ** Instantiate a `TwoLayerModel` with the appropriate input/output/hidden layer parameters.**\n* ** Define a cross-entropy loss function again.**\n* ** Define a stochastic gradient descent optimizer based for you model's parameters. Start with a learning rate of 0.001, and adjust as necessary. You can start with the vanilla `optim.SGD` optimizer, and change it if you wish.** \n* **Create a `SummaryWriter` object that will be responsible for logging our training progress into a directory called `logs/expt2` (Or whatever you wish your top-level directory to be called, just make sure the subdirectory is different from your previous SummaryWriter).**", "_____no_output_____" ] ], [ [ "model2 = TwoLayerModel(1*28*28, 10, 256)\nlearning_rate=0.01\n# Loss and optimizer\nloss2 = nn.CrossEntropyLoss()\noptimizer2 = optim.SGD(model2.parameters(), lr=learning_rate, momentum = 0.5)\nwriter2 = SummaryWriter('logs/expt2')", "_____no_output_____" ] ], [ [ "Call `train` on your two layer neural network.", "_____no_output_____" ] ], [ [ "#%tensorboard --logdir=logs\n\ntrain(model2, train_loader, test_loader, loss2, optimizer2, 15, writer2)", "\nTest set: Avg. loss: 2.3277, Accuracy: 1320/10000 (13.20%)\n\nTrain Epoch: 1 [0/60000 (0%)]\tLoss: 2.335348 Accuracy: 0.187500\nTrain Epoch: 1 [16000/60000 (27%)]\tLoss: 0.285264 Accuracy: 0.937500\nTrain Epoch: 1 [32000/60000 (53%)]\tLoss: 0.128189 Accuracy: 0.968750\nTrain Epoch: 1 [48000/60000 (80%)]\tLoss: 0.067269 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.1963, Accuracy: 9420/10000 (94.20%)\n\nTrain Epoch: 2 [0/60000 (0%)]\tLoss: 0.539982 Accuracy: 0.906250\nTrain Epoch: 2 [16000/60000 (27%)]\tLoss: 0.050927 Accuracy: 1.000000\nTrain Epoch: 2 [32000/60000 (53%)]\tLoss: 0.128570 Accuracy: 0.968750\nTrain Epoch: 2 [48000/60000 (80%)]\tLoss: 0.135588 Accuracy: 0.968750\n\nTest set: Avg. loss: 0.1367, Accuracy: 9608/10000 (96.08%)\n\nTrain Epoch: 3 [0/60000 (0%)]\tLoss: 0.270527 Accuracy: 0.906250\nTrain Epoch: 3 [16000/60000 (27%)]\tLoss: 0.037929 Accuracy: 1.000000\nTrain Epoch: 3 [32000/60000 (53%)]\tLoss: 0.106803 Accuracy: 0.968750\nTrain Epoch: 3 [48000/60000 (80%)]\tLoss: 0.109574 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.1108, Accuracy: 9674/10000 (96.74%)\n\nTrain Epoch: 4 [0/60000 (0%)]\tLoss: 0.135188 Accuracy: 0.937500\nTrain Epoch: 4 [16000/60000 (27%)]\tLoss: 0.116851 Accuracy: 0.968750\nTrain Epoch: 4 [32000/60000 (53%)]\tLoss: 0.086674 Accuracy: 1.000000\nTrain Epoch: 4 [48000/60000 (80%)]\tLoss: 0.150417 Accuracy: 0.937500\n\nTest set: Avg. loss: 0.0960, Accuracy: 9723/10000 (97.23%)\n\nTrain Epoch: 5 [0/60000 (0%)]\tLoss: 0.124979 Accuracy: 0.968750\nTrain Epoch: 5 [16000/60000 (27%)]\tLoss: 0.129281 Accuracy: 0.968750\nTrain Epoch: 5 [32000/60000 (53%)]\tLoss: 0.326966 Accuracy: 0.937500\nTrain Epoch: 5 [48000/60000 (80%)]\tLoss: 0.029757 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.0872, Accuracy: 9747/10000 (97.47%)\n\nTrain Epoch: 6 [0/60000 (0%)]\tLoss: 0.033068 Accuracy: 1.000000\nTrain Epoch: 6 [16000/60000 (27%)]\tLoss: 0.126767 Accuracy: 0.968750\nTrain Epoch: 6 [32000/60000 (53%)]\tLoss: 0.043683 Accuracy: 1.000000\nTrain Epoch: 6 [48000/60000 (80%)]\tLoss: 0.033442 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.0802, Accuracy: 9759/10000 (97.59%)\n\nTrain Epoch: 7 [0/60000 (0%)]\tLoss: 0.063860 Accuracy: 1.000000\nTrain Epoch: 7 [16000/60000 (27%)]\tLoss: 0.064125 Accuracy: 1.000000\nTrain Epoch: 7 [32000/60000 (53%)]\tLoss: 0.031584 Accuracy: 1.000000\nTrain Epoch: 7 [48000/60000 (80%)]\tLoss: 0.034758 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.0768, Accuracy: 9762/10000 (97.62%)\n\nTrain Epoch: 8 [0/60000 (0%)]\tLoss: 0.050293 Accuracy: 1.000000\nTrain Epoch: 8 [16000/60000 (27%)]\tLoss: 0.029434 Accuracy: 1.000000\nTrain Epoch: 8 [32000/60000 (53%)]\tLoss: 0.016755 Accuracy: 1.000000\nTrain Epoch: 8 [48000/60000 (80%)]\tLoss: 0.013657 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.0728, Accuracy: 9781/10000 (97.81%)\n\nTrain Epoch: 9 [0/60000 (0%)]\tLoss: 0.012535 Accuracy: 1.000000\nTrain Epoch: 9 [16000/60000 (27%)]\tLoss: 0.063505 Accuracy: 1.000000\nTrain Epoch: 9 [32000/60000 (53%)]\tLoss: 0.051221 Accuracy: 1.000000\nTrain Epoch: 9 [48000/60000 (80%)]\tLoss: 0.018655 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.0718, Accuracy: 9770/10000 (97.70%)\n\nTrain Epoch: 10 [0/60000 (0%)]\tLoss: 0.014394 Accuracy: 1.000000\nTrain Epoch: 10 [16000/60000 (27%)]\tLoss: 0.100838 Accuracy: 0.968750\nTrain Epoch: 10 [32000/60000 (53%)]\tLoss: 0.006110 Accuracy: 1.000000\nTrain Epoch: 10 [48000/60000 (80%)]\tLoss: 0.024305 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.0664, Accuracy: 9787/10000 (97.87%)\n\nTrain Epoch: 11 [0/60000 (0%)]\tLoss: 0.019937 Accuracy: 1.000000\nTrain Epoch: 11 [16000/60000 (27%)]\tLoss: 0.009565 Accuracy: 1.000000\nTrain Epoch: 11 [32000/60000 (53%)]\tLoss: 0.014097 Accuracy: 1.000000\nTrain Epoch: 11 [48000/60000 (80%)]\tLoss: 0.024803 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.0659, Accuracy: 9798/10000 (97.98%)\n\nTrain Epoch: 12 [0/60000 (0%)]\tLoss: 0.015090 Accuracy: 1.000000\nTrain Epoch: 12 [16000/60000 (27%)]\tLoss: 0.013423 Accuracy: 1.000000\nTrain Epoch: 12 [32000/60000 (53%)]\tLoss: 0.028191 Accuracy: 1.000000\nTrain Epoch: 12 [48000/60000 (80%)]\tLoss: 0.015992 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.0648, Accuracy: 9802/10000 (98.02%)\n\nTrain Epoch: 13 [0/60000 (0%)]\tLoss: 0.014123 Accuracy: 1.000000\nTrain Epoch: 13 [16000/60000 (27%)]\tLoss: 0.009030 Accuracy: 1.000000\nTrain Epoch: 13 [32000/60000 (53%)]\tLoss: 0.047013 Accuracy: 1.000000\nTrain Epoch: 13 [48000/60000 (80%)]\tLoss: 0.002832 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.0637, Accuracy: 9798/10000 (97.98%)\n\nTrain Epoch: 14 [0/60000 (0%)]\tLoss: 0.044640 Accuracy: 1.000000\nTrain Epoch: 14 [16000/60000 (27%)]\tLoss: 0.003624 Accuracy: 1.000000\nTrain Epoch: 14 [32000/60000 (53%)]\tLoss: 0.004764 Accuracy: 1.000000\nTrain Epoch: 14 [48000/60000 (80%)]\tLoss: 0.006835 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.0619, Accuracy: 9811/10000 (98.11%)\n\nTrain Epoch: 15 [0/60000 (0%)]\tLoss: 0.013615 Accuracy: 1.000000\nTrain Epoch: 15 [16000/60000 (27%)]\tLoss: 0.008699 Accuracy: 1.000000\nTrain Epoch: 15 [32000/60000 (53%)]\tLoss: 0.019869 Accuracy: 1.000000\nTrain Epoch: 15 [48000/60000 (80%)]\tLoss: 0.021455 Accuracy: 1.000000\n\nTest set: Avg. loss: 0.0625, Accuracy: 9806/10000 (98.06%)\n\n" ] ], [ [ "__Final Validation Loss:__ *0.0618*\n\n__Final Validation Accuracy:__ *98.11%*", "_____no_output_____" ], [ "#### Did your accuracy on the validation set improve with multiple layers? Why do you think this is ?\n\nAnswer: The problem itself is not linear. Most of the digits' features are not linearly separable. That is why there is a 6% accuracy increase when using 2 linear layers or if adding a third one with convolution and max pooling the accuracy increases to 98% (6% more than using a single layer).", "_____no_output_____" ], [ "### Part 5 - What is being learned at each layer? (10 points)\n\nSo what exactly are these weights that our network is learning at each layer? By conveniently picking our layer dimensions as perfect square numbers, we can try to visualize the weights learned at each layer as square images. Use the following function to do so for *all interesting layers* across your models. Feel free to modify the function as you wish. \n\n**At the very least, you must generate:**\n1. **The ten 28x28 weight images learned by your one layer model.**\n2. **The 256 28x28 weight images learned by the hidden layer in your two-layer model.**", "_____no_output_____" ] ], [ [ "def visualize_layer_weights(model, layer_idx, num_images, image_dim, title):\n # Find number of rows and columns based on number of images\n for d in range(1,num_images):\n f = num_images/d\n if int(f)==f:\n dim1 = int(min(f,d))\n dim2 = int(max(f,d))\n if d > f:\n break \n # Plot weights as square images\n fig, ax = plt.subplots(dim1, dim2)\n \n # At least 1 inch by 1 inch images\n fig.set_size_inches(dim2, dim1)\n weights = (list(model.parameters())[layer_idx])\n fig.suptitle(title)\n for i in range(dim1):\n for j in range(dim2):\n item = weights[dim2*i+j]\n ax[i][j].imshow(item.reshape(image_dim,image_dim).detach().numpy(), cmap='gray')\n \nvisualize_layer_weights(model, 0,10,28,'One layer NN')", "_____no_output_____" ], [ "visualize_layer_weights(model2, 0,256,28,'Two layer NN')\n", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ] ]
cb5588ea8fc030175b5390fd6250cc4ad03069a0
287,953
ipynb
Jupyter Notebook
plot_folded.ipynb
adammcmaster/pirate-photometry
49d68fffa5f37df991ec1d11b0c1366dc0588fc7
[ "BSD-2-Clause" ]
null
null
null
plot_folded.ipynb
adammcmaster/pirate-photometry
49d68fffa5f37df991ec1d11b0c1366dc0588fc7
[ "BSD-2-Clause" ]
null
null
null
plot_folded.ipynb
adammcmaster/pirate-photometry
49d68fffa5f37df991ec1d11b0c1366dc0588fc7
[ "BSD-2-Clause" ]
null
null
null
1,147.223108
76,502
0.954454
[ [ [ "from astropy.table import Table\nfrom astropy.timeseries import TimeSeries\nfrom astropy import units as u\n\nfrom matplotlib import pyplot\nimport numpy\nimport seaborn\n\nfrom pathlib import Path\n\nfrom targets import TARGETS", "_____no_output_____" ], [ "for table in Path('data').glob('target_observations/*.ecsv'):\n if table.stem not in TARGETS:\n continue\n target_config = TARGETS[table.stem]\n if 'expected_periods' not in target_config:\n continue\n print(table.stem)\n timeseries = TimeSeries.read(table, time_column='timestamp', time_format='jd')\n timeseries = timeseries[timeseries['FLAGS'] == 0]\n for period in target_config['expected_periods']:\n fig = pyplot.figure()\n fig.suptitle(f'{table.stem}@{period}s')\n folded = timeseries.fold(period=period * u.second)\n\n mag = seaborn.relplot(\n x=folded.time.jd,\n y=folded['calibrated magnitude'],\n style=folded['telescope'],\n hue=folded['exposure'],\n row=folded['band'],\n row_order=('B', 'V', 'R', 'I'),\n facet_kws={'sharey': False},\n height=2.5,\n aspect=2,\n )\n for band, ax in mag.axes_dict.items():\n band_data = folded[folded['band'] == band]\n band_data['calibrated magnitude'].fill_value = numpy.nan\n ax.errorbar(\n band_data.time.jd,\n band_data['calibrated magnitude'],\n yerr=band_data['calibrated magnitude err'],\n ls='none',\n zorder=-1,\n alpha=0.5,\n elinewidth=1,\n )\n ax.invert_yaxis()\n mag.fig.suptitle(f'{table.stem}@{period}s')\n pyplot.subplots_adjust(top=0.94)\n pyplot.show()", "1SWASPJ002552.75+454445.3\n" ] ] ]
[ "code" ]
[ [ "code", "code" ] ]
cb5589f7991e85a53061ce0dd5a8fed162f94038
1,803
ipynb
Jupyter Notebook
algorithms/bubble_sort_descending/Bubble_sort_descending.ipynb
rishabhkumar112/algorithms_with_git
e47521e0dc0ff69e9518ec71e34b5f6df65bea92
[ "MIT" ]
19
2018-11-27T14:45:24.000Z
2022-03-29T10:36:31.000Z
algorithms/bubble_sort_descending/Bubble_sort_descending.ipynb
rishabhkumar112/algorithms_with_git
e47521e0dc0ff69e9518ec71e34b5f6df65bea92
[ "MIT" ]
83
2018-12-01T19:44:08.000Z
2019-01-08T19:20:32.000Z
algorithms/bubble_sort_descending/Bubble_sort_descending.ipynb
rishabhkumar112/algorithms_with_git
e47521e0dc0ff69e9518ec71e34b5f6df65bea92
[ "MIT" ]
93
2018-11-22T18:46:33.000Z
2020-10-28T11:09:44.000Z
24.04
102
0.404881
[ [ [ "# BUBBLE SORT IN DESCENDINGG ORDER\n#--------------------------------------------------------------------------------------------\nclass prac(object):\n def __init__(self):\n self.l=[]\n self.n=int(input(\"Enter for no of elements: \"))\n\n def work(self):\n for i in range(self.n):\n self.l+=[int(input(\"Enter value: \"))]\n\n for k in range(self.n):\n for j in range(self.n -1-k):\n if self.l[j]<self.l[j+1]:\n temp=self.l[j+1]\n self.l[j+1]=self.l[j]\n self.l[j]=temp\n\n print (\"The values in descending order is: \")\n \n for i in (self.l):\n print (i, end=\" \")\n\nobj=prac()\nobj.work()", "The values in ascending order is: \n89 54 23 12 6 " ] ] ]
[ "code" ]
[ [ "code" ] ]
cb558f116c8cece3319964b5cd0139ada827df31
54,567
ipynb
Jupyter Notebook
intro-to-pytorch/Part 8 - Transfer Learning (Exercises).ipynb
ArtaFarahmand/deep-learning-v2-pytorch-master
4602e0862fbe47e615814cf6f595dfea27b5b2e2
[ "MIT" ]
null
null
null
intro-to-pytorch/Part 8 - Transfer Learning (Exercises).ipynb
ArtaFarahmand/deep-learning-v2-pytorch-master
4602e0862fbe47e615814cf6f595dfea27b5b2e2
[ "MIT" ]
null
null
null
intro-to-pytorch/Part 8 - Transfer Learning (Exercises).ipynb
ArtaFarahmand/deep-learning-v2-pytorch-master
4602e0862fbe47e615814cf6f595dfea27b5b2e2
[ "MIT" ]
null
null
null
61.173767
662
0.54478
[ [ [ "# Transfer Learning\n\nIn this notebook, you'll learn how to use pre-trained networks to solved challenging problems in computer vision. Specifically, you'll use networks trained on [ImageNet](http://www.image-net.org/) [available from torchvision](http://pytorch.org/docs/0.3.0/torchvision/models.html). \n\nImageNet is a massive dataset with over 1 million labeled images in 1000 categories. It's used to train deep neural networks using an architecture called convolutional layers. I'm not going to get into the details of convolutional networks here, but if you want to learn more about them, please [watch this](https://www.youtube.com/watch?v=2-Ol7ZB0MmU).\n\nOnce trained, these models work astonishingly well as feature detectors for images they weren't trained on. Using a pre-trained network on images not in the training set is called transfer learning. Here we'll use transfer learning to train a network that can classify our cat and dog photos with near perfect accuracy.\n\nWith `torchvision.models` you can download these pre-trained networks and use them in your applications. We'll include `models` in our imports now.", "_____no_output_____" ] ], [ [ "%matplotlib inline\n%config InlineBackend.figure_format = 'retina'\n\nimport matplotlib.pyplot as plt\n\nimport torch\nfrom torch import nn\nfrom torch import optim\nimport torch.nn.functional as F\nfrom torchvision import datasets, transforms, models", "_____no_output_____" ] ], [ [ "Most of the pretrained models require the input to be 224x224 images. Also, we'll need to match the normalization used when the models were trained. Each color channel was normalized separately, the means are `[0.485, 0.456, 0.406]` and the standard deviations are `[0.229, 0.224, 0.225]`.", "_____no_output_____" ] ], [ [ "data_dir = 'assets/Cat_Dog_data'\n\n# TODO: Define transforms for the training data and testing data\ntrain_transforms = transforms.Compose([transforms.RandomRotation(30),\n transforms.RandomResizedCrop(224),\n transforms.RandomHorizontalFlip(),\n transforms.ToTensor(),\n transforms.Normalize([0.485, 0.456,0.406],\n [0.229, 0.224,0.225])])\n\ntest_transforms = transforms.Compose([transforms.Resize(255),\n transforms.CenterCrop(224),\n transforms.ToTensor(),\n transforms.Normalize([0.485, 0.456,0.406],\n [0.229, 0.224,0.225])])\n\n# Pass transforms in here, then run the next cell to see how the transforms look\ntrain_data = datasets.ImageFolder(data_dir + '/train', transform=train_transforms)\ntest_data = datasets.ImageFolder(data_dir + '/test', transform=test_transforms)\n\ntrainloader = torch.utils.data.DataLoader(train_data, batch_size=64, shuffle=True)\ntestloader = torch.utils.data.DataLoader(test_data, batch_size=64)", "_____no_output_____" ] ], [ [ "We can load in a model such as [DenseNet](http://pytorch.org/docs/0.3.0/torchvision/models.html#id5). Let's print out the model architecture so we can see what's going on.", "_____no_output_____" ] ], [ [ "model = models.densenet121(pretrained=True)\nmodel", "_____no_output_____" ] ], [ [ "This model is built out of two main parts, the features and the classifier. The features part is a stack of convolutional layers and overall works as a feature detector that can be fed into a classifier. The classifier part is a single fully-connected layer `(classifier): Linear(in_features=1024, out_features=1000)`. This layer was trained on the ImageNet dataset, so it won't work for our specific problem. That means we need to replace the classifier, but the features will work perfectly on their own. In general, I think about pre-trained networks as amazingly good feature detectors that can be used as the input for simple feed-forward classifiers.", "_____no_output_____" ] ], [ [ "# Freeze parameters so we don't backprop through them\nfor param in model.parameters():\n param.requires_grad = False\n\nfrom collections import OrderedDict\nclassifier = nn.Sequential(OrderedDict([\n ('fc1', nn.Linear(1024, 500)),\n ('relu', nn.ReLU()),\n ('fc2', nn.Linear(500, 2)),\n ('output', nn.LogSoftmax(dim=1))\n ]))\n \nmodel.classifier = classifier", "_____no_output_____" ] ], [ [ "With our model built, we need to train the classifier. However, now we're using a **really deep** neural network. If you try to train this on a CPU like normal, it will take a long, long time. Instead, we're going to use the GPU to do the calculations. The linear algebra computations are done in parallel on the GPU leading to 100x increased training speeds. It's also possible to train on multiple GPUs, further decreasing training time.\n\nPyTorch, along with pretty much every other deep learning framework, uses [CUDA](https://developer.nvidia.com/cuda-zone) to efficiently compute the forward and backwards passes on the GPU. In PyTorch, you move your model parameters and other tensors to the GPU memory using `model.to('cuda')`. You can move them back from the GPU with `model.to('cpu')` which you'll commonly do when you need to operate on the network output outside of PyTorch. As a demonstration of the increased speed, I'll compare how long it takes to perform a forward and backward pass with and without a GPU.", "_____no_output_____" ] ], [ [ "import time", "_____no_output_____" ], [ "for device in ['cpu', 'cuda']:\n\n criterion = nn.NLLLoss()\n # Only train the classifier parameters, feature parameters are frozen\n optimizer = optim.Adam(model.classifier.parameters(), lr=0.001)\n\n model.to(device)\n\n for ii, (inputs, labels) in enumerate(trainloader):\n\n # Move input and label tensors to the GPU\n inputs, labels = inputs.to(device), labels.to(device)\n\n start = time.time()\n\n outputs = model.forward(inputs)\n loss = criterion(outputs, labels)\n\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n\n if ii==3:\n break\n \n print(f\"Device = {device}; Time per batch: {(time.time() - start)/3:.3f} seconds\")", "_____no_output_____" ] ], [ [ "You can write device agnostic code which will automatically use CUDA if it's enabled like so:\n```python\n# at beginning of the script\ndevice = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n\n...\n\n# then whenever you get a new Tensor or Module\n# this won't copy if they are already on the desired device\ninput = data.to(device)\nmodel = MyModule(...).to(device)\n```\n\nFrom here, I'll let you finish training the model. The process is the same as before except now your model is much more powerful. You should get better than 95% accuracy easily.\n\n>**Exercise:** Train a pretrained models to classify the cat and dog images. Continue with the DenseNet model, or try ResNet, it's also a good model to try out first. Make sure you are only training the classifier and the parameters for the features part are frozen.", "_____no_output_____" ] ], [ [ "## TODO: Use a pretrained model to classify the cat and dog images\n\ndevice = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n\nmodel = models.densenet121(pretrained=True)\n\n# Freeze parameters so we don't backprop through them\nfor param in model.parameters():\n param.requires_grad = False\n \nmodel.classifier = nn.Sequential(nn.Linear(1024, 256),\n nn.ReLU(),\n nn.Dropout(0.2),\n nn.Linear(256, 2),\n nn.LogSoftmax(dim=1))\n\ncriterion = nn.NLLLoss()\n\n# Only train the classifier parameters, feature parameters are frozen\noptimizer = optim.Adam(model.classifier.parameters(), lr=0.003)\n\nmodel.to(device);", "_____no_output_____" ], [ "epochs = 1\nsteps = 0\nrunning_loss = 0\nprint_every = 5\nfor epoch in range(epochs):\n for inputs, labels in trainloader:\n steps += 1\n # Move input and label tensors to the default device\n inputs, labels = inputs.to(device), labels.to(device)\n \n logps = model.forward(inputs)\n loss = criterion(logps, labels)\n \n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n\n running_loss += loss.item()\n \n if steps % print_every == 0:\n test_loss = 0\n accuracy = 0\n model.eval()\n with torch.no_grad():\n for inputs, labels in testloader:\n inputs, labels = inputs.to(device), labels.to(device)\n logps = model.forward(inputs)\n batch_loss = criterion(logps, labels)\n \n test_loss += batch_loss.item()\n \n # Calculate accuracy\n ps = torch.exp(logps)\n top_p, top_class = ps.topk(1, dim=1)\n equals = top_class == labels.view(*top_class.shape)\n accuracy += torch.mean(equals.type(torch.FloatTensor)).item()\n \n print(f\"Epoch {epoch+1}/{epochs}.. \"\n f\"Train loss: {running_loss/print_every:.3f}.. \"\n f\"Test loss: {test_loss/len(testloader):.3f}.. \"\n f\"Test accuracy: {accuracy/len(testloader):.3f}\")\n running_loss = 0\n model.train()", "Epoch 1/1.. Train loss: 0.816.. Test loss: 0.322.. Test accuracy: 0.877\nEpoch 1/1.. Train loss: 0.336.. Test loss: 0.124.. Test accuracy: 0.968\nEpoch 1/1.. Train loss: 0.233.. Test loss: 0.094.. Test accuracy: 0.969\nEpoch 1/1.. Train loss: 0.184.. Test loss: 0.082.. Test accuracy: 0.969\nEpoch 1/1.. Train loss: 0.257.. Test loss: 0.097.. Test accuracy: 0.964\nEpoch 1/1.. Train loss: 0.240.. Test loss: 0.093.. Test accuracy: 0.963\nEpoch 1/1.. Train loss: 0.167.. Test loss: 0.056.. Test accuracy: 0.982\nEpoch 1/1.. Train loss: 0.236.. Test loss: 0.131.. Test accuracy: 0.946\nEpoch 1/1.. Train loss: 0.271.. Test loss: 0.054.. Test accuracy: 0.981\nEpoch 1/1.. Train loss: 0.220.. Test loss: 0.054.. Test accuracy: 0.983\nEpoch 1/1.. Train loss: 0.201.. Test loss: 0.058.. Test accuracy: 0.980\nEpoch 1/1.. Train loss: 0.144.. Test loss: 0.060.. Test accuracy: 0.978\nEpoch 1/1.. Train loss: 0.185.. Test loss: 0.048.. Test accuracy: 0.982\nEpoch 1/1.. Train loss: 0.168.. Test loss: 0.046.. Test accuracy: 0.984\nEpoch 1/1.. Train loss: 0.135.. Test loss: 0.044.. Test accuracy: 0.983\nEpoch 1/1.. Train loss: 0.182.. Test loss: 0.053.. Test accuracy: 0.980\nEpoch 1/1.. Train loss: 0.155.. Test loss: 0.090.. Test accuracy: 0.964\nEpoch 1/1.. Train loss: 0.301.. Test loss: 0.118.. Test accuracy: 0.957\nEpoch 1/1.. Train loss: 0.243.. Test loss: 0.062.. Test accuracy: 0.979\nEpoch 1/1.. Train loss: 0.172.. Test loss: 0.076.. Test accuracy: 0.971\nEpoch 1/1.. Train loss: 0.162.. Test loss: 0.063.. Test accuracy: 0.979\nEpoch 1/1.. Train loss: 0.210.. Test loss: 0.068.. Test accuracy: 0.975\nEpoch 1/1.. Train loss: 0.246.. Test loss: 0.049.. Test accuracy: 0.983\nEpoch 1/1.. Train loss: 0.169.. Test loss: 0.049.. Test accuracy: 0.985\nEpoch 1/1.. Train loss: 0.179.. Test loss: 0.059.. Test accuracy: 0.979\nEpoch 1/1.. Train loss: 0.200.. Test loss: 0.044.. Test accuracy: 0.984\nEpoch 1/1.. Train loss: 0.141.. Test loss: 0.051.. Test accuracy: 0.981\nEpoch 1/1.. Train loss: 0.143.. Test loss: 0.043.. Test accuracy: 0.983\nEpoch 1/1.. Train loss: 0.128.. Test loss: 0.049.. Test accuracy: 0.982\nEpoch 1/1.. Train loss: 0.133.. Test loss: 0.066.. Test accuracy: 0.975\nEpoch 1/1.. Train loss: 0.192.. Test loss: 0.043.. Test accuracy: 0.984\nEpoch 1/1.. Train loss: 0.155.. Test loss: 0.043.. Test accuracy: 0.984\nEpoch 1/1.. Train loss: 0.210.. Test loss: 0.046.. Test accuracy: 0.982\nEpoch 1/1.. Train loss: 0.138.. Test loss: 0.042.. Test accuracy: 0.984\nEpoch 1/1.. Train loss: 0.188.. Test loss: 0.043.. Test accuracy: 0.982\nEpoch 1/1.. Train loss: 0.155.. Test loss: 0.042.. Test accuracy: 0.984\nEpoch 1/1.. Train loss: 0.138.. Test loss: 0.040.. Test accuracy: 0.982\nEpoch 1/1.. Train loss: 0.274.. Test loss: 0.040.. Test accuracy: 0.986\nEpoch 1/1.. Train loss: 0.148.. Test loss: 0.043.. Test accuracy: 0.985\nEpoch 1/1.. Train loss: 0.132.. Test loss: 0.044.. Test accuracy: 0.982\nEpoch 1/1.. Train loss: 0.128.. Test loss: 0.040.. Test accuracy: 0.986\nEpoch 1/1.. Train loss: 0.150.. Test loss: 0.043.. Test accuracy: 0.982\nEpoch 1/1.. Train loss: 0.142.. Test loss: 0.037.. Test accuracy: 0.985\nEpoch 1/1.. Train loss: 0.356.. Test loss: 0.056.. Test accuracy: 0.981\nEpoch 1/1.. Train loss: 0.214.. Test loss: 0.103.. Test accuracy: 0.955\nEpoch 1/1.. Train loss: 0.184.. Test loss: 0.051.. Test accuracy: 0.984\nEpoch 1/1.. Train loss: 0.148.. Test loss: 0.043.. Test accuracy: 0.983\nEpoch 1/1.. Train loss: 0.196.. Test loss: 0.047.. Test accuracy: 0.984\nEpoch 1/1.. Train loss: 0.125.. Test loss: 0.043.. Test accuracy: 0.984\nEpoch 1/1.. Train loss: 0.164.. Test loss: 0.039.. Test accuracy: 0.984\nEpoch 1/1.. Train loss: 0.180.. Test loss: 0.039.. Test accuracy: 0.984\nEpoch 1/1.. Train loss: 0.149.. Test loss: 0.038.. Test accuracy: 0.986\nEpoch 1/1.. Train loss: 0.123.. Test loss: 0.039.. Test accuracy: 0.985\nEpoch 1/1.. Train loss: 0.129.. Test loss: 0.040.. Test accuracy: 0.984\nEpoch 1/1.. Train loss: 0.101.. Test loss: 0.043.. Test accuracy: 0.981\nEpoch 1/1.. Train loss: 0.148.. Test loss: 0.040.. Test accuracy: 0.984\nEpoch 1/1.. Train loss: 0.182.. Test loss: 0.037.. Test accuracy: 0.984\nEpoch 1/1.. Train loss: 0.130.. Test loss: 0.039.. Test accuracy: 0.985\nEpoch 1/1.. Train loss: 0.235.. Test loss: 0.041.. Test accuracy: 0.986\nEpoch 1/1.. Train loss: 0.200.. Test loss: 0.050.. Test accuracy: 0.980\nEpoch 1/1.. Train loss: 0.150.. Test loss: 0.044.. Test accuracy: 0.983\nEpoch 1/1.. Train loss: 0.150.. Test loss: 0.041.. Test accuracy: 0.985\nEpoch 1/1.. Train loss: 0.114.. Test loss: 0.037.. Test accuracy: 0.986\nEpoch 1/1.. Train loss: 0.107.. Test loss: 0.038.. Test accuracy: 0.985\nEpoch 1/1.. Train loss: 0.116.. Test loss: 0.040.. Test accuracy: 0.984\nEpoch 1/1.. Train loss: 0.166.. Test loss: 0.036.. Test accuracy: 0.986\nEpoch 1/1.. Train loss: 0.174.. Test loss: 0.036.. Test accuracy: 0.986\nEpoch 1/1.. Train loss: 0.120.. Test loss: 0.048.. Test accuracy: 0.982\nEpoch 1/1.. Train loss: 0.157.. Test loss: 0.044.. Test accuracy: 0.984\nEpoch 1/1.. Train loss: 0.156.. Test loss: 0.042.. Test accuracy: 0.986\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ] ]
cb5590924e01a6ba4eb64cd77c2b9e3b9db76d78
11,048
ipynb
Jupyter Notebook
dev/_downloads/ee17e3e8df43ce4f0119faeeeccc374f/plot_sensors_time_frequency.ipynb
massich/mne-tools.github.io
95650593ba0eca4ff8257ebcbdf05731038d8d4e
[ "BSD-3-Clause" ]
null
null
null
dev/_downloads/ee17e3e8df43ce4f0119faeeeccc374f/plot_sensors_time_frequency.ipynb
massich/mne-tools.github.io
95650593ba0eca4ff8257ebcbdf05731038d8d4e
[ "BSD-3-Clause" ]
null
null
null
dev/_downloads/ee17e3e8df43ce4f0119faeeeccc374f/plot_sensors_time_frequency.ipynb
massich/mne-tools.github.io
95650593ba0eca4ff8257ebcbdf05731038d8d4e
[ "BSD-3-Clause" ]
null
null
null
43.32549
1,012
0.579381
[ [ [ "%matplotlib inline", "_____no_output_____" ] ], [ [ "\n\n# Frequency and time-frequency sensors analysis\n\n\nThe objective is to show you how to explore the spectral content\nof your data (frequency and time-frequency). Here we'll work on Epochs.\n\nWe will use this dataset: `somato-dataset`. It contains so-called event\nrelated synchronizations (ERS) / desynchronizations (ERD) in the beta band.\n", "_____no_output_____" ] ], [ [ "# Authors: Alexandre Gramfort <[email protected]>\n# Stefan Appelhoff <[email protected]>\n# Richard Höchenberger <[email protected]>\n#\n# License: BSD (3-clause)\nimport os.path as op\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nimport mne\nfrom mne.time_frequency import tfr_morlet, psd_multitaper, psd_welch\nfrom mne.datasets import somato", "_____no_output_____" ] ], [ [ "Set parameters\n\n", "_____no_output_____" ] ], [ [ "data_path = somato.data_path()\nsubject = '01'\ntask = 'somato'\nraw_fname = op.join(data_path, 'sub-{}'.format(subject), 'meg',\n 'sub-{}_task-{}_meg.fif'.format(subject, task))\n\n# Setup for reading the raw data\nraw = mne.io.read_raw_fif(raw_fname)\nevents = mne.find_events(raw, stim_channel='STI 014')\n\n# picks MEG gradiometers\npicks = mne.pick_types(raw.info, meg='grad', eeg=False, eog=True, stim=False)\n\n# Construct Epochs\nevent_id, tmin, tmax = 1, -1., 3.\nbaseline = (None, 0)\nepochs = mne.Epochs(raw, events, event_id, tmin, tmax, picks=picks,\n baseline=baseline, reject=dict(grad=4000e-13, eog=350e-6),\n preload=True)\n\nepochs.resample(200., npad='auto') # resample to reduce computation time", "_____no_output_____" ] ], [ [ "Frequency analysis\n------------------\n\nWe start by exploring the frequence content of our epochs.\n\n", "_____no_output_____" ], [ "Let's first check out all channel types by averaging across epochs.\n\n", "_____no_output_____" ] ], [ [ "epochs.plot_psd(fmin=2., fmax=40., average=True, spatial_colors=False)", "_____no_output_____" ] ], [ [ "Now let's take a look at the spatial distributions of the PSD.\n\n", "_____no_output_____" ] ], [ [ "epochs.plot_psd_topomap(ch_type='grad', normalize=True)", "_____no_output_____" ] ], [ [ "Alternatively, you can also create PSDs from Epochs objects with functions\nthat start with ``psd_`` such as\n:func:`mne.time_frequency.psd_multitaper` and\n:func:`mne.time_frequency.psd_welch`.\n\n", "_____no_output_____" ] ], [ [ "f, ax = plt.subplots()\npsds, freqs = psd_multitaper(epochs, fmin=2, fmax=40, n_jobs=1)\npsds = 10. * np.log10(psds)\npsds_mean = psds.mean(0).mean(0)\npsds_std = psds.mean(0).std(0)\n\nax.plot(freqs, psds_mean, color='k')\nax.fill_between(freqs, psds_mean - psds_std, psds_mean + psds_std,\n color='k', alpha=.5)\nax.set(title='Multitaper PSD (gradiometers)', xlabel='Frequency (Hz)',\n ylabel='Power Spectral Density (dB)')\nplt.show()", "_____no_output_____" ] ], [ [ "Notably, :func:`mne.time_frequency.psd_welch` supports the keyword argument\n``average``, which specifies how to estimate the PSD based on the individual\nwindowed segments. The default is ``average='mean'``, which simply calculates\nthe arithmetic mean across segments. Specifying ``average='median'``, in\ncontrast, returns the PSD based on the median of the segments (corrected for\nbias relative to the mean), which is a more robust measure.\n\n", "_____no_output_____" ] ], [ [ "# Estimate PSDs based on \"mean\" and \"median\" averaging for comparison.\nkwargs = dict(fmin=2, fmax=40, n_jobs=1)\npsds_welch_mean, freqs_mean = psd_welch(epochs, average='mean', **kwargs)\npsds_welch_median, freqs_median = psd_welch(epochs, average='median', **kwargs)\n\n# Convert power to dB scale.\npsds_welch_mean = 10 * np.log10(psds_welch_mean)\npsds_welch_median = 10 * np.log10(psds_welch_median)\n\n# We will only plot the PSD for a single sensor in the first epoch.\nch_name = 'MEG 0122'\nch_idx = epochs.info['ch_names'].index(ch_name)\nepo_idx = 0\n\n_, ax = plt.subplots()\nax.plot(freqs_mean, psds_welch_mean[epo_idx, ch_idx, :], color='k',\n ls='-', label='mean of segments')\nax.plot(freqs_median, psds_welch_median[epo_idx, ch_idx, :], color='k',\n ls='--', label='median of segments')\n\nax.set(title='Welch PSD ({}, Epoch {})'.format(ch_name, epo_idx),\n xlabel='Frequency (Hz)', ylabel='Power Spectral Density (dB)')\nax.legend(loc='upper right')\nplt.show()", "_____no_output_____" ] ], [ [ "Lastly, we can also retrieve the unaggregated segments by passing\n``average=None`` to :func:`mne.time_frequency.psd_welch`. The dimensions of\nthe returned array are ``(n_epochs, n_sensors, n_freqs, n_segments)``.\n\n", "_____no_output_____" ] ], [ [ "psds_welch_unagg, freqs_unagg = psd_welch(epochs, average=None, **kwargs)\nprint(psds_welch_unagg.shape)", "_____no_output_____" ] ], [ [ "\nTime-frequency analysis: power and inter-trial coherence\n--------------------------------------------------------\n\nWe now compute time-frequency representations (TFRs) from our Epochs.\nWe'll look at power and inter-trial coherence (ITC).\n\nTo this we'll use the function :func:`mne.time_frequency.tfr_morlet`\nbut you can also use :func:`mne.time_frequency.tfr_multitaper`\nor :func:`mne.time_frequency.tfr_stockwell`.\n\n", "_____no_output_____" ] ], [ [ "# define frequencies of interest (log-spaced)\nfreqs = np.logspace(*np.log10([6, 35]), num=8)\nn_cycles = freqs / 2. # different number of cycle per frequency\npower, itc = tfr_morlet(epochs, freqs=freqs, n_cycles=n_cycles, use_fft=True,\n return_itc=True, decim=3, n_jobs=1)", "_____no_output_____" ] ], [ [ "Inspect power\n-------------\n\n<div class=\"alert alert-info\"><h4>Note</h4><p>The generated figures are interactive. In the topo you can click\n on an image to visualize the data for one sensor.\n You can also select a portion in the time-frequency plane to\n obtain a topomap for a certain time-frequency region.</p></div>\n\n", "_____no_output_____" ] ], [ [ "power.plot_topo(baseline=(-0.5, 0), mode='logratio', title='Average power')\npower.plot([82], baseline=(-0.5, 0), mode='logratio', title=power.ch_names[82])\n\nfig, axis = plt.subplots(1, 2, figsize=(7, 4))\npower.plot_topomap(ch_type='grad', tmin=0.5, tmax=1.5, fmin=8, fmax=12,\n baseline=(-0.5, 0), mode='logratio', axes=axis[0],\n title='Alpha', show=False)\npower.plot_topomap(ch_type='grad', tmin=0.5, tmax=1.5, fmin=13, fmax=25,\n baseline=(-0.5, 0), mode='logratio', axes=axis[1],\n title='Beta', show=False)\nmne.viz.tight_layout()\nplt.show()", "_____no_output_____" ] ], [ [ "Joint Plot\n----------\nYou can also create a joint plot showing both the aggregated TFR\nacross channels and topomaps at specific times and frequencies to obtain\na quick overview regarding oscillatory effects across time and space.\n\n", "_____no_output_____" ] ], [ [ "power.plot_joint(baseline=(-0.5, 0), mode='mean', tmin=-.5, tmax=2,\n timefreqs=[(.5, 10), (1.3, 8)])", "_____no_output_____" ] ], [ [ "Inspect ITC\n-----------\n\n", "_____no_output_____" ] ], [ [ "itc.plot_topo(title='Inter-Trial coherence', vmin=0., vmax=1., cmap='Reds')", "_____no_output_____" ] ], [ [ "<div class=\"alert alert-info\"><h4>Note</h4><p>Baseline correction can be applied to power or done in plots.\n To illustrate the baseline correction in plots, the next line is\n commented power.apply_baseline(baseline=(-0.5, 0), mode='logratio')</p></div>\n\n", "_____no_output_____" ], [ "Exercise\n--------\n\n - Visualize the inter-trial coherence values as topomaps as done with\n power.\n\n", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ] ]
cb55a636bbc941c47d0b486728d93b7ccfcddda0
719,425
ipynb
Jupyter Notebook
Distance/DistanceComparison.ipynb
OceanParcels/Tides_GlobalOceanPlastic
639ec84462357d121770635b5247aca77d51be7b
[ "MIT" ]
3
2020-06-15T23:27:42.000Z
2022-03-18T02:54:35.000Z
Distance/DistanceComparison.ipynb
OceanParcels/Tides_GlobalOceanPlastic
639ec84462357d121770635b5247aca77d51be7b
[ "MIT" ]
null
null
null
Distance/DistanceComparison.ipynb
OceanParcels/Tides_GlobalOceanPlastic
639ec84462357d121770635b5247aca77d51be7b
[ "MIT" ]
null
null
null
3,615.201005
712,132
0.963094
[ [ [ "# Plots of the total distance covered by the particles as a function of their initial position", "_____no_output_____" ], [ "*Author: Miriam Sterl*", "_____no_output_____" ], [ "We plot the total distances covered by the particles during the simulation, as a function of their initial position. We do this for the FES, the GC and the GC+FES run.", "_____no_output_____" ] ], [ [ "from netCDF4 import Dataset\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib.colors as colors\nimport cartopy.crs as ccrs\nimport cartopy.feature as cfeature\nimport cartopy.mpl.ticker as cticker", "_____no_output_____" ], [ "File1 = '/science/projects/oceanparcels/output_data/data_Miriam/Results_TrackingFES.nc'\ndataset1 = Dataset(File1)\nlat1 = dataset1.variables['lat'][:]\nlon1 = dataset1.variables['lon'][:]\ntime1 = dataset1.variables['time'][:]\ndist1 = dataset1.variables['distance'][:]\nlon1[lon1>180]-=360\nlon1[lon1<-180]+=360\n\nFile2 = '/science/projects/oceanparcels/output_data/data_Miriam/Results_TrackingGC.nc'\ndataset2 = Dataset(File2)\nlat2 = dataset2.variables['lat'][:]\nlon2 = dataset2.variables['lon'][:]\ntime2 = dataset2.variables['time'][:]\ndist2 = dataset2.variables['distance'][:]\nlon2[lon2>180]-=360\nlon2[lon2<-180]+=360\n\nFile3 = '/science/projects/oceanparcels/output_data/data_Miriam/Results_TrackingGCFES.nc'\ndataset3 = Dataset(File3)\nlat3 = dataset3.variables['lat'][:]\nlon3 = dataset3.variables['lon'][:]\ntime3 = dataset3.variables['time'][:]\ndist3 = dataset3.variables['distance'][:]\nlon3[lon3>180]-=360\nlon3[lon3<-180]+=360", "_____no_output_____" ], [ "# Initial longitudes and latitudes (on 2002-01-01)\nstartLons = lon1[:,0]\nstartLats = lat1[:,0]\n\n# Distance travelled by the particles between 2002-01-01 and 2015-01-01\nfinalDist = [dist1[:,-1], dist2[:,-1], dist3[:,-1]]\n\ntitles = ['(a) FES run', '(b) GC run', '(c) GC+FES run']", "_____no_output_____" ], [ "def DistancePlot(lons, lats, dist, fig, ax, vmin, vmax, titlenr, titlesize, labelnr, labelsize, colormap):\n \"\"\"\n Function that plots the total distance covered by particles during a certain period as a function of their initial position\n \"\"\" \n minLat = np.min(np.round(lats)) # the minimal (rounded) latitude\n maxLat = np.max(np.round(lats)) # the maximal (rounded) latitude\n minLon = np.min(np.round(lons)) # the minimal (rounded) longitude\n maxLon = np.max(np.round(lons)) # the maximal (rounded) longitude\n\n allLats = np.arange(minLat, maxLat+1) # the latitudinal grid\n allLons = np.arange(minLon, maxLon+1) # the longitudinal grid\n \n distances = np.zeros((len(allLons), len(allLats)))\n for i in range(len(dist)):\n distances[int(np.round(lons[i]-minLon)), int(np.round(lats[i]-minLat))] = dist[i]\n # shift by minLon, minLat to get positive indices\n maskedDist = np.ma.masked_where(distances==0.0, distances) # mask land points\n \n Lat, Lon = np.meshgrid(allLats, allLons)\n distplot = ax.pcolormesh(Lon, Lat, maskedDist/1e4, cmap = colormap, vmin=vmin, vmax=vmax)\n ax.set_title(titles[titlenr], fontsize=titlesize,fontweight='bold')\n ax.coastlines()\n ax.add_feature(cfeature.LAND, zorder=0, edgecolor='black', facecolor=(0.6,0.6,0.6))\n ax.set_xticks([-180, -150, -120, -90, -60, -30, 0, 30, 60, 90, 120, 150, 180], crs=ccrs.PlateCarree())\n ax.set_xticklabels([-180, -150, -120, -90, -60, -30, 0, 30, 60, 90, 120, 150, 180], fontsize=labelsize)\n ax.set_yticks([-90, -60, - 30, 0, 30, 60, 90], crs=ccrs.PlateCarree())\n ax.set_yticklabels([-90, -60, - 30, 0, 30, 60, 90], fontsize=labelsize)\n lon_formatter = cticker.LongitudeFormatter()\n lat_formatter = cticker.LatitudeFormatter()\n ax.xaxis.set_major_formatter(lon_formatter)\n ax.yaxis.set_major_formatter(lat_formatter)\n ax.grid(linewidth=2, color='black', alpha=0.25, linestyle=':')\n \n return distplot", "_____no_output_____" ], [ "# Compare the three different runs after 13 years\n\nfig, axes = plt.subplots(nrows=3, ncols=1, figsize=(28,16), subplot_kw={'projection': ccrs.PlateCarree()})\ni=0\nfor ax in axes.flat:\n distance = DistancePlot(startLons, startLats, finalDist[i], fig, ax, \n vmin=1, vmax=10, titlenr = i, titlesize=18, labelnr = 0, labelsize=15, colormap='YlOrRd')\n i = i+1\ncbar = fig.colorbar(distance, ax=axes.ravel().tolist(), shrink=0.53, extend='both', anchor=(2.2,0.5))\ncbar.set_label(\"Distance ($10^{4}$ km)\", rotation=90, fontsize=15)\ncbar.ax.tick_params(labelsize=12)\nfig.suptitle('Total distance covered', x=0.835, y=1.02, fontsize=21, fontweight='bold')\nplt.tight_layout()\n#plt.savefig('DistanceComparison', bbox_inches='tight')", "C:\\Users\\miria\\Anaconda3\\envs\\py2_parcels_M\\lib\\site-packages\\ipykernel_launcher.py:15: UserWarning: Warning: converting a masked element to nan.\n from ipykernel import kernelapp as app\nC:\\Users\\miria\\Anaconda3\\envs\\py2_parcels_M\\lib\\site-packages\\matplotlib\\figure.py:1743: UserWarning: This figure includes Axes that are not compatible with tight_layout, so its results might be incorrect.\n warnings.warn(\"This figure includes Axes that are not \"\n" ] ] ]
[ "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code", "code" ] ]
cb55a690b455bc71a595e5bae757220aa470c5b3
12,096
ipynb
Jupyter Notebook
dsc8_just_the_code.ipynb
datasittersclub/dsc8
161f94ca6fe07fa998bdafd581e159bb8af376ae
[ "Apache-2.0" ]
2
2020-10-22T19:22:14.000Z
2020-10-23T12:13:43.000Z
dsc8_just_the_code.ipynb
datasittersclub/dsc8
161f94ca6fe07fa998bdafd581e159bb8af376ae
[ "Apache-2.0" ]
null
null
null
dsc8_just_the_code.ipynb
datasittersclub/dsc8
161f94ca6fe07fa998bdafd581e159bb8af376ae
[ "Apache-2.0" ]
null
null
null
30.545455
302
0.616567
[ [ [ "# Data-Sitters Club 8: Just the Code\n\nThis notebook contains just the code (and a little bit of text) from the portions of *[DSC 8: Text-Comparison-Algorithm-Crazy-Quinn](https://datasittersclub.github.io/site/dsc8/)* for using Euclidean and cosine distance with word counts and word frequencies, and running TF-IDF for your texts.\n\nThis code assumes you've actually read the Data-Sitters Club book already. There's lots of pitfalls if you just try to apply the code without understanding what it's doing, or the effect caused by the various different options. Read first, then try!", "_____no_output_____" ], [ "## Load modules", "_____no_output_____" ] ], [ [ "#Installs seaborn\n#You only need to run this cell the first time you run this notebook\nimport sys\n!{sys.executable} -m pip install seaborn", "_____no_output_____" ], [ "#Imports the count vectorizer from Scikit-learn along with \nfrom sklearn.feature_extraction.text import CountVectorizer\n#Glob is used for finding path names\nimport glob\n#We need these to format the data correctly\nfrom scipy.spatial.distance import pdist, squareform\n#In case you're starting to run the code just at this point, we'll need os again\nimport os\nimport numpy as np\n#In case you're starting to run the code just at this point, we'll need pandas again\nimport pandas as pd\n#Import matplotlib\nimport matplotlib.pyplot as plt\n#Import seaborn\nimport seaborn as sns", "_____no_output_____" ] ], [ [ "## Set the file directory for your corpus", "_____no_output_____" ] ], [ [ "filedir = '/Users/qad/Documents/dsc_corpus_clean'\nos.chdir(filedir)", "_____no_output_____" ] ], [ [ "# Word count vectorizer\nThis looks at just the top 1000 words, and doesn't use `max_df` to remove words that occur across all your texts. You can add it in between the input and the `max_features` parameters, separated by a comma (e.g. `input=\"filename\", max_df=.7, max_features=1000`).", "_____no_output_____" ] ], [ [ "# Use the glob library to create a list of file names, sorted alphabetically\n# Alphabetical sorting will get us the books in numerical order\nfilenames = sorted(glob.glob(\"*.txt\"))\n# Parse those filenames to create a list of file keys (ID numbers)\n# You'll use these later on.\nfilekeys = [f.split('/')[-1].split('.')[0] for f in filenames]\n\n# Create a CountVectorizer instance with the parameters you need\nwordcountvectorizer = CountVectorizer(input=\"filename\", max_features=1000)\n# Run the vectorizer on your list of filenames to create your wordcounts\n# Use the toarray() function so that SciPy will accept the results\nwordcounts = wordcountvectorizer.fit_transform(filenames)", "_____no_output_____" ] ], [ [ "### Bonus: word count toy\nThe code below will display all the words that were included in the word count vectorizer, based on the parameters you've set.", "_____no_output_____" ] ], [ [ "sum_words = wordcounts.sum(axis=0)\nwords_freq = [(word, sum_words[0, idx]) for word, idx in wordcountvectorizer.vocabulary_.items()]\nsorted(words_freq, key = lambda x: x[1], reverse=True)", "_____no_output_____" ] ], [ [ "## Euclidean distance for word count vectorizer", "_____no_output_____" ] ], [ [ "#Runs the Euclidean distance calculation, prints the output, and saves it as a CSV\neuclidean_distances = pd.DataFrame(squareform(pdist(wordcounts)), index=filekeys, columns=filekeys)\neuclidean_distances", "_____no_output_____" ] ], [ [ "### Euclidean distance visualization", "_____no_output_____" ] ], [ [ "#Defines the size of the image\nplt.figure(figsize=(100, 100))\n#Increases the label size so it's more legible\nsns.set(font_scale=3)\n#Generates the visualization using the data in the dataframe\nax = sns.heatmap(euclidean_distances)\n#Displays the image\nplt.show()", "_____no_output_____" ] ], [ [ "## Cosine distance for word count vectorizer", "_____no_output_____" ] ], [ [ "cosine_distances = pd.DataFrame(squareform(pdist(wordcounts, metric='cosine')), index=filekeys, columns=filekeys)\ncosine_distances", "_____no_output_____" ] ], [ [ "### Cosine distance visualization", "_____no_output_____" ] ], [ [ "#Defines the size of the image\nplt.figure(figsize=(100, 100))\n#Increases the label size so it's more legible\nsns.set(font_scale=3)\n#Generates the visualization using the data in the dataframe\nax = sns.heatmap(cosine_distances)\n#Displays the image\nplt.show()", "_____no_output_____" ] ], [ [ "# Term frequency vectorizer", "_____no_output_____" ] ], [ [ "from sklearn.feature_extraction.text import TfidfVectorizer\n\n# Use the glob library to create a list of file names, sorted alphabetically\n# Alphabetical sorting will get us the books in numerical order\nfilenames = sorted(glob.glob(\"*.txt\"))\n# Parse those filenames to create a list of file keys (ID numbers)\n# You'll use these later on.\nfilekeys = [f.split('/')[-1].split('.')[0] for f in filenames]\n\n# Create a CountVectorizer instance with the parameters you need\nfreqvectorizer = TfidfVectorizer(input=\"filename\", stop_words=None, use_idf=False, norm='l1', max_features=1000)\n# Run the vectorizer on your list of filenames to create your wordcounts\n# Use the toarray() function so that SciPy will accept the results\nwordfreqs = freqvectorizer.fit_transform(filenames).toarray()", "_____no_output_____" ] ], [ [ "## Euclidean distance for term frequency vectorizer", "_____no_output_____" ] ], [ [ "euclidean_distances_freq = pd.DataFrame(squareform(pdist(wordfreqs, metric='euclidean')), index=filekeys, columns=filekeys)\neuclidean_distances_freq", "_____no_output_____" ] ], [ [ "### Euclidean distance visualization", "_____no_output_____" ] ], [ [ "#Defines the size of the image\nplt.figure(figsize=(100, 100))\n#Increases the label size so it's more legible\nsns.set(font_scale=3)\n#Generates the visualization using the data in the dataframe\nax = sns.heatmap(euclidean_distances_freq)\n#Displays the image\nplt.show()", "_____no_output_____" ] ], [ [ "## Cosine distance for word count vectorizer", "_____no_output_____" ] ], [ [ "cosine_distances_freq = pd.DataFrame(squareform(pdist(wordfreqs, metric='cosine')), index=filekeys, columns=filekeys)\ncosine_distances_freq", "_____no_output_____" ] ], [ [ "### Cosine distance visualization", "_____no_output_____" ] ], [ [ "#Defines the size of the image\nplt.figure(figsize=(100, 100))\n#Increases the label size so it's more legible\nsns.set(font_scale=3)\n#Generates the visualization using the data in the dataframe\nax = sns.heatmap(cosine_distances_freq)\n#Displays the image\nplt.show()", "_____no_output_____" ] ], [ [ "## TF-IDF", "_____no_output_____" ] ], [ [ "# Use the glob library to create a list of file names, sorted alphabetically\n# Alphabetical sorting will get us the books in numerical order\nfilenames = sorted(glob.glob(\"*.txt\"))\n# Parse those filenames to create a list of file keys (ID numbers)\n# You'll use these later on.\nfilekeys = [f.split('/')[-1].split('.')[0] for f in filenames]\n\n# Create a CountVectorizer instance with the parameters you need\nvectorizer = TfidfVectorizer(input=\"filename\", stop_words=None, use_idf=True, norm=None, max_features=1000, max_df=.95)\n# Run the vectorizer on your list of filenames to create your wordcounts\n# Use the toarray() function so that SciPy will accept the results\ntransformed_documents = vectorizer.fit_transform(filenames)\ntransformed_documents_as_array = transformed_documents.toarray()", "_____no_output_____" ] ], [ [ "Create a CSV per text file with most distinctive terms.", "_____no_output_____" ] ], [ [ "# construct a list of output file paths using the previous list of text files the relative path for tf_idf_output\noutput_filenames = [str(txt_file).replace(\".txt\", \".csv\") for txt_file in filenames]\n\n# loop each item in transformed_documents_as_array, using enumerate to keep track of the current position\nfor counter, doc in enumerate(transformed_documents_as_array):\n # construct a dataframe\n tf_idf_tuples = list(zip(vectorizer.get_feature_names(), doc))\n one_doc_as_df = pd.DataFrame.from_records(tf_idf_tuples, columns=['term', 'score']).sort_values(by='score', ascending=False).reset_index(drop=True)\n\n # output to a csv using the enumerated value for the filename\n one_doc_as_df.to_csv(output_filenames[counter])", "_____no_output_____" ] ], [ [ "## Suggested Citation\n\nDombrowski, Quinn. “DSC #8: Just the Code.” Jupyter Notebook. *The Data-Sitters Club*, October 21, 2020. https://github.com/datasittersclub/dsc8.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
cb55c39ed78afa07de0e22c34ae8757122566105
5,119
ipynb
Jupyter Notebook
python/affiliations/facebook.ipynb
gesiscss/smm_demo
dfd722278551f17b42f226ab5a196b2b4c58e298
[ "MIT" ]
1
2019-12-11T23:57:28.000Z
2019-12-11T23:57:28.000Z
python/affiliations/facebook.ipynb
gesiscss/smm_demo
dfd722278551f17b42f226ab5a196b2b4c58e298
[ "MIT" ]
null
null
null
python/affiliations/facebook.ipynb
gesiscss/smm_demo
dfd722278551f17b42f226ab5a196b2b4c58e298
[ "MIT" ]
null
null
null
35.548611
260
0.599922
[ [ [ "# Politician Activity on Facebook by Political Affiliation\n\nThe parameters in the cell below can be adjusted to explore other political affiliations and time frames.\n\n### How to explore other political affiliation?\nThe ***affiliation*** parameter can be use to aggregate politicians by their political affiliations. The column `affiliation` in this [this other notebook](../politicians.ipynb?autorun=true) show the politicians that belong each political affiliation.\n\n***Alternatively***, you can direcly use the [politicians API](http://mediamonitoring.gesis.org/api/politicians/swagger/), or access it with the [SMM Wrapper](https://pypi.org/project/smm-wrapper/).\n\n## A. Set Up parameters", "_____no_output_____" ] ], [ [ "# Parameters: \naffiliation = 'Grüne'\nfrom_date = '2017-09-01'\nto_date = '2018-12-31'\naggregation = 'week'", "_____no_output_____" ] ], [ [ "## B. Using the SMM Politician API", "_____no_output_____" ] ], [ [ "import pandas as pd\n\n# Create an instance to the smm wrapper\nfrom smm_wrapper import SMMPoliticians\nsmm = SMMPoliticians()\n\n#using the api to get the data\ndf = smm.dv.get_politicians()\n\n# Filter the accounts by party, and valid ones (the ones that contain fb_ids)\nparty_df = df[(df['affiliation']==affiliation) & (df['fb_ids'].notnull())] \n\n# query the Social Media Monitoring API\nposts_by = pd.concat(smm.dv.posts_by(_id=organization_id, from_date=from_date, to_date=to_date, aggregate_by=aggregation) \n for organization_id in party_df.index)\ncomments_by = pd.concat(smm.dv.comments_by(_id=organization_id, from_date=from_date, to_date=to_date, aggregate_by=aggregation) \n for organization_id in party_df.index)\n\n# aggregate posts and comments\ntotal_posts_by = posts_by.groupby('date')[\n 'posts', 'replies', 'shares', 'reactions', 'likes'].sum()\ntotal_comments_by = comments_by.groupby('date')[\n 'comments', 'replies', 'likes'].sum() ", "_____no_output_____" ] ], [ [ "## C. Plotting\n### C.1 Plot Facebook Post Activity", "_____no_output_____" ] ], [ [ "import plotly\nfrom plotly import graph_objs as go\nplotly.offline.init_notebook_mode(connected=True)\n\n#plot for facebook posts activity\nplotly.offline.iplot({\n \"data\": [go.Scatter(x=total_posts_by.index.tolist(), y=total_posts_by['posts'], name='Posts', line_shape='spline'), \n go.Scatter(x=total_posts_by.index.tolist(), y=total_posts_by['replies'], name='Replies',line_shape='spline'),\n go.Scatter(x=total_posts_by.index.tolist(), y=total_posts_by['shares'], name='Shares', line_shape='spline'),\n go.Scatter(x=total_posts_by.index.tolist(), y=total_posts_by['reactions'], name='Reactions', line_shape='spline'),\n go.Scatter(x=total_posts_by.index.tolist(), y=total_posts_by['likes'], name='Likes', line_shape='spline')], \n \"layout\": go.Layout(title='Facebook posts for {}'.format(affiliation), xaxis={'title':''}, yaxis={'title':'N'})\n})", "_____no_output_____" ] ], [ [ "### C.2 Plot Facebook Comment Activity", "_____no_output_____" ] ], [ [ "# plot for facebook comments activity\nplotly.offline.iplot({\n \"data\": [go.Scatter(x=total_comments_by.index.tolist(), y=total_comments_by['comments'], name='Comments', line_shape='spline'), \n go.Scatter(x=total_comments_by.index.tolist(), y=total_comments_by['replies'], name='Replies', line_shape='spline'),\n go.Scatter(x=total_comments_by.index.tolist(), y=total_comments_by['likes'], name='Likes', line_shape='spline')], \n \"layout\": go.Layout(title='Facebook comments for {}'.format(affiliation), xaxis={'title':''}, yaxis={'title':'N'})\n})", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
cb55e5337749007ef4294555e01b5e1df2dfc3fa
62,481
ipynb
Jupyter Notebook
tutorials/PythonTutorial.ipynb
chi-hung/PythonTutorial
611c29655b0d30f9d36cbe5ab449c3162ab5ed79
[ "MIT" ]
20
2017-04-17T11:48:45.000Z
2017-09-11T02:02:02.000Z
tutorials/PythonTutorial.ipynb
chi-hung/PythonTutorial
611c29655b0d30f9d36cbe5ab449c3162ab5ed79
[ "MIT" ]
null
null
null
tutorials/PythonTutorial.ipynb
chi-hung/PythonTutorial
611c29655b0d30f9d36cbe5ab449c3162ab5ed79
[ "MIT" ]
18
2017-03-20T08:29:27.000Z
2017-09-11T02:02:16.000Z
18.403829
1,278
0.452009
[ [ [ "# 目的:了解Python基本語法", "_____no_output_____" ], [ "1. [資料型別](#01)\n2. [for-loop](#02)\n3. [while-loop](#03)\n4. [清單(list)](#04)\n5. [tuple是什麼?](#05)\n6. [Python特殊的清單處理方式](#06)\n7. [if的用法](#07)\n8. [以if控制迴圈的break和continue](#08)\n9. [函數:將計算結果直接於函數內印出或回傳(return)出函數外](#09)\n10. [匿名函數](#10)\n11. [物件導向範例](#11)\n12. [NumPy (Python中用於處理numerical array的套件)](#12)\n13. [一維序列](#13)\n14. [二維矩陣](#14)", "_____no_output_____" ], [ "# 練習\n\n* [運用range(5),for以及append()建立一清單,其內容為\\[0,1,4,9,16\\] ](#ex01)\n* [運用range(5), if以及for建立一清單,其內容為\\[0,4,16\\] ](#ex02)\n* [試輸出99乘法表](#ex1)\n* [試輸出99乘法表(以清單表示)](#ex2)\n* [寫一個函數factorial(n)。](#ex3)\n* [建立一函數 f。輸入: 一個 2 維矩陣,輸出: 該2維矩陣內的所有數值加總。](#ex4)\n", "_____no_output_____" ], [ "---", "_____no_output_____" ], [ "## <a id=\"01\"/>資料型別", "_____no_output_____" ], [ "### 整數(int)", "_____no_output_____" ] ], [ [ "a=1", "_____no_output_____" ], [ "type(a)", "_____no_output_____" ], [ "b=3", "_____no_output_____" ], [ "type(b)", "_____no_output_____" ] ], [ [ "兩整數相除,輸出結果為浮點數(float)。(備註:Python 3開始)", "_____no_output_____" ] ], [ [ "a/b", "_____no_output_____" ], [ "type(a/b)", "_____no_output_____" ] ], [ [ "在Python3,兩整數相除,需以//運算子來相除,方能真正用整數儲存該結果。", "_____no_output_____" ] ], [ [ "a//b", "_____no_output_____" ], [ "type(a//b)", "_____no_output_____" ] ], [ [ "兩整數相加,其輸出仍然為整數。", "_____no_output_____" ] ], [ [ "a+b", "_____no_output_____" ], [ "type(a+b)", "_____no_output_____" ] ], [ [ "### 浮點數(float)", "_____no_output_____" ], [ "Python不需宣告型別。一個數字將會被判別為整數(int)或浮點數(float),需看該數是否有小數點存在。", "_____no_output_____" ] ], [ [ "type(1)", "_____no_output_____" ], [ "type(1.)", "_____no_output_____" ], [ "type(1.E-5)", "_____no_output_____" ] ], [ [ "### 字串(str)", "_____no_output_____" ] ], [ [ "mystr='Hello World!'", "_____no_output_____" ], [ "type(mystr)", "_____no_output_____" ] ], [ [ "將該字串所有字變成大寫", "_____no_output_____" ] ], [ [ "mystr.upper()", "_____no_output_____" ] ], [ [ "將該字串所有字變成小寫", "_____no_output_____" ] ], [ [ "mystr.upper().lower()", "_____no_output_____" ] ], [ [ "取出該字串前三個字", "_____no_output_____" ] ], [ [ "mystr[0:3]", "_____no_output_____" ] ], [ [ "檢查某字串片段是否存在於該字串", "_____no_output_____" ] ], [ [ "'Wor' in mystr", "_____no_output_____" ], [ "'WOR' in mystr", "_____no_output_____" ], [ "'WOR' in mystr.upper()", "_____no_output_____" ] ], [ [ "以len()看字串長度", "_____no_output_____" ] ], [ [ "len(mystr)", "_____no_output_____" ], [ "mystr=' hi '", "_____no_output_____" ] ], [ [ "清除左右空白", "_____no_output_____" ] ], [ [ "mystr.strip()", "_____no_output_____" ] ], [ [ "清除左空白", "_____no_output_____" ] ], [ [ "mystr.lstrip()", "_____no_output_____" ] ], [ [ "清除右空白", "_____no_output_____" ] ], [ [ "mystr.rstrip()", "_____no_output_____" ] ], [ [ "置換字串內的h成f", "_____no_output_____" ] ], [ [ "mystr.replace('h','f')", "_____no_output_____" ] ], [ [ "### 布林(Boolean)", "_____no_output_____" ] ], [ [ "t=True #真\nf=False #假", "_____no_output_____" ], [ "t==f #真等於假?", "_____no_output_____" ], [ "t==t #真等於真?", "_____no_output_____" ], [ "t!=f #真不等於假?", "_____no_output_____" ], [ "t==f or t!=f #真等於假 或是 真不等於假?", "_____no_output_____" ], [ "t==f and t!=f #真等於假 和 真不等於假?", "_____no_output_____" ], [ "not t #非真?", "_____no_output_____" ] ], [ [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "## <a id=\"02\"/>for-loop", "_____no_output_____" ] ], [ [ "for j in range(5):\n print(j)", "0\n1\n2\n3\n4\n" ] ], [ [ "以上,我們使用了range()這個內建函數,它到底是什麼?", "_____no_output_____" ] ], [ [ "r=range(5)\nprint( type(r) )", "<class 'range'>\n" ] ], [ [ "用type()檢查變數r的型別,我們發現了r=range(5)是屬於'range'這個類別的一個物件。", "_____no_output_____" ], [ "接下來,我們以內建函數hasattr()去檢查range(5)這個物件是不是可疊代(iterable):", "_____no_output_____" ], [ "首先以help()函數檢查一下hasattr()的用法:", "_____no_output_____" ] ], [ [ "help(hasattr)", "Help on built-in function hasattr in module builtins:\n\nhasattr(obj, name, /)\n Return whether the object has an attribute with the given name.\n \n This is done by calling getattr(obj, name) and catching AttributeError.\n\n" ], [ "hasattr(range(5), '__iter__')", "_____no_output_____" ], [ "r=range(5).__iter__() # 取得range(5)的疊代器\nprint( r.__next__() ) # 進行疊代並印出\nprint( r.__next__() ) # 進行疊代並印出\nprint( r.__next__() ) # 進行疊代並印出\nprint( r.__next__() ) # 進行疊代並印出\nprint( r.__next__() ) # 進行疊代並印出\nprint( r.__next__() ) # 進行疊代並印出", "0\n1\n2\n3\n4\n" ] ], [ [ "### 小結\n\n1. 若物件(object)為可疊代(iterable):\n\t* 表示我們可用\\_\\_iter\\_\\_()以及\\_\\_next\\_\\_()來操控該物件,一個一個的去取得物件裡面的元素。\n\t* 物件裡面的元素亦可簡單的以for迴圈來取得。\n2. 複習以下函數的意義:hasattr(),\\_\\_iter\\_\\_(),\\_\\_next\\_\\_(),range()", "_____no_output_____" ], [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "-----", "_____no_output_____" ], [ "## <a id=\"03\"/> while-loop", "_____no_output_____" ] ], [ [ "i=0\nwhile(i<5):\n print(i)\n i+=1 # i=i+1的簡寫", "0\n1\n2\n3\n4\n" ] ], [ [ "常用於不確定要跑幾次,要跑到直到條件滿足才跳出迴圈的情形。例如:嘗試擷取某網頁,直到失敗次數太多或是擷取成功為止。", "_____no_output_____" ], [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "## <a id=\"04\"/>清單(list)\n\n定義:包含元素的一個集合。清單內的元素可重複,且每個元素都有一個索引(index)。", "_____no_output_____" ] ], [ [ "array=[1,2,2,3,4,5] #建立一個清單\nprint(array)\nprint(array[0]) #印出清單內的第一個元素\nprint(array[-1]) #印出清單內最後一個元素", "[1, 2, 2, 3, 4, 5]\n1\n5\n" ], [ "type([1,2,2,3,4,5]) #以type查看清單型別,確定清單(list)的型別就是list。", "_____no_output_____" ], [ "hasattr([1,2,3,4,5],'__iter__') # 若是[1,2,3,4,5]為可疊代物件,那我們就可以用迴圈來疊代出清單內的所有元素。", "_____no_output_____" ], [ "for j in [1,2,3,4,5]:\n print(j,j**2)", "1 1\n2 4\n3 9\n4 16\n5 25\n" ], [ "for j in [1,2.,'字串',3,range(10),5,[1,1,1,2,2,2]]:\n print(j,'\\t',type(j),'\\t',hasattr(j,'__iter__'))", "1 \t <class 'int'> \t False\n2.0 \t <class 'float'> \t False\n字串 \t <class 'str'> \t True\n3 \t <class 'int'> \t False\nrange(0, 10) \t <class 'range'> \t True\n5 \t <class 'int'> \t False\n[1, 1, 1, 2, 2, 2] \t <class 'list'> \t True\n" ] ], [ [ "從以上得知:\n1. 清單裡的元素可以有不同的型別(type)。\n2. 字串(str)和清單(list)一樣,是可以疊代的物件。因此,他們可以用for迴圈來進行內容的提取,例如:", "_____no_output_____" ] ], [ [ "for j in 'Python':\n print(j)", "P\ny\nt\nh\no\nn\n" ] ], [ [ "使用append()添加新元素至清單內", "_____no_output_____" ] ], [ [ "array=[1,2,3]\narray.append(4)\nprint(array)", "[1, 2, 3, 4]\n" ] ], [ [ "使用del 刪除清單內元素", "_____no_output_____" ] ], [ [ "print(array)\ndel array[2] #刪除清單內的第二個元素\nprint(array)", "[1, 2, 3, 4]\n[1, 2, 4]\n" ] ], [ [ "我們可使用len()去得知清單的長度", "_____no_output_____" ] ], [ [ "array=[10,20,30,40]\nprint(len(array))", "4\n" ] ], [ [ "使用enumerate()去列舉清單", "_____no_output_____" ] ], [ [ "enumerate(array)", "_____no_output_____" ], [ "type(enumerate(array))", "_____no_output_____" ], [ "hasattr(enumerate,'__iter__')", "_____no_output_____" ], [ "for j in enumerate(array):\n print(j)", "(0, 10)\n(1, 20)\n(2, 30)\n(3, 40)\n" ], [ "print( type( (0,10) ) )", "<class 'tuple'>\n" ] ], [ [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "## <a id=\"05\"/>tuple是什麼?", "_____no_output_____" ] ], [ [ "array=(1,2,3,\"abc\")\nprint(array)", "(1, 2, 3, 'abc')\n" ], [ "del array[1]", "_____no_output_____" ], [ "array.append(5)", "_____no_output_____" ], [ "array[2]=0", "_____no_output_____" ] ], [ [ "結論:不可新增刪除覆蓋tuple內的元素,因此tuple可以被看做是唯讀的list。", "_____no_output_____" ], [ "list可以被取set()。\n\nset的定義:集合內元素不允許重複,且集合內的元素無索引。", "_____no_output_____" ] ], [ [ "set([1,1,2,3,3,4,1,2,'alpha','beta'])", "_____no_output_____" ], [ "type( {1, 2, 3, 4, 'beta', 'alpha'} )", "_____no_output_____" ], [ "st={1,1,2,3,3,4,1,2,'alpha','beta'}\nprint(st)\nprint(hasattr(st,'__iter__'))", "{1, 2, 3, 4, 'beta', 'alpha'}\nTrue\n" ], [ "for j in st:\n print(j)", "1\n2\n3\n4\nbeta\nalpha\n" ], [ "print(st[0])", "_____no_output_____" ] ], [ [ "也就是先前說的,set內的元素並無索引。", "_____no_output_____" ], [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "## <a id=\"06\"/>Python特殊的清單處理方式", "_____no_output_____" ], [ "將range(5)裡面的東西抓出來,放到一清單叫做lst,可有各種寫法:", "_____no_output_____" ], [ "第一種", "_____no_output_____" ] ], [ [ "lst=[]\nfor j in range(5):\n lst.append(j)\nprint(lst)", "[0, 1, 2, 3, 4]\n" ] ], [ [ "第二種", "_____no_output_____" ] ], [ [ "lst=[j for j in range(5)] #此是非常Python的寫法(Pythonic way of coding)\nprint(lst)", "[0, 1, 2, 3, 4]\n" ] ], [ [ "第三種", "_____no_output_____" ] ], [ [ "lst=list(range(5))\nprint(lst)", "[0, 1, 2, 3, 4]\n" ] ], [ [ "第四種", "_____no_output_____" ] ], [ [ "lst=[*range(5)]\nprint(lst)", "[0, 1, 2, 3, 4]\n" ] ], [ [ "## <a id=\"ex01\" style='color:purple'/> 練習0-1. 運用range(5),for以及append()建立一清單,其內容為[0,1,4,9,16]", "_____no_output_____" ] ], [ [ "#法一:\nlst=[]\nfor j in range(5):\n #完成接下來的部分", "_____no_output_____" ], [ "#法二:\n#提示: lst=[.....]", "_____no_output_____" ] ], [ [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "## <a id=\"ex02\" style='color:purple'/> 練習0-2. 運用range(5), if以及for建立一清單,其內容為[0,4,16]", "_____no_output_____" ] ], [ [ "# 法一:\nlst=[]\nfor j in range(5):\n #完成接下來的部分", "_____no_output_____" ], [ "#法二:\n#提示: lst=[.....]", "_____no_output_____" ] ], [ [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "## <a id=\"07\"/>if的用法", "_____no_output_____" ], [ "### if...elif..else的使用 :", "_____no_output_____" ] ], [ [ "x=5\n\nif(x==1):\n print('x is 1')\nelif(x==2):\n print('x is 2')\nelse:\n print('x is neither 1 nor 2.')", "x is neither 1 nor 2.\n" ] ], [ [ "### 例:取range(10)內的偶數並印出:", "_____no_output_____" ], [ "法一", "_____no_output_____" ] ], [ [ "for j in range(10):\n if(j%2==0):\n print(j)", "0\n2\n4\n6\n8\n" ] ], [ [ "法二", "_____no_output_____" ] ], [ [ "[j for j in range(10) if j%2==0]", "_____no_output_____" ] ], [ [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "## <a id=\"08\"/>以if控制迴圈的break和continue", "_____no_output_____" ] ], [ [ "for j in range(5):\n print(j)\n if(j==2):\n break #中斷,跳出迴圈", "0\n1\n2\n" ], [ "for j in range(5):\n if(j==2):\n continue #略過以下程式碼,並繼續疊代至下一個元素\n print(j)", "0\n1\n3\n4\n" ] ], [ [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "## <a id=\"ex1\" style='color:purple'/> 練習1. 試著輸出以下內容", "_____no_output_____" ] ], [ [ "1 1 1\n1 2 2\n1 3 3\n1 4 4\n1 5 5\n\n2 1 2\n2 2 4\n2 3 6\n2 4 8\n2 5 10\n\n3 1 3\n3 2 6\n3 3 9\n3 4 12\n3 5 15", "_____no_output_____" ] ], [ [ "#提示:使用for, range(),print()\n\nfor i in range(1,4):\n #完成接下來的部分", "_____no_output_____" ] ], [ [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "## <a id=\"ex2\" style='color:purple'/> 練習2. 試著輸出以下內容", "_____no_output_____" ] ], [ [ "[1, 2, 3, 4, 5, 6, 7, 8, 9]\n[2, 4, 6, 8, 10, 12, 14, 16, 18]\n[3, 6, 9, 12, 15, 18, 21, 24, 27]\n[4, 8, 12, 16, 20, 24, 28, 32, 36]\n[5, 10, 15, 20, 25, 30, 35, 40, 45]\n[6, 12, 18, 24, 30, 36, 42, 48, 54]\n[7, 14, 21, 28, 35, 42, 49, 56, 63]\n[8, 16, 24, 32, 40, 48, 56, 64, 72]\n[9, 18, 27, 36, 45, 54, 63, 72, 81]", "_____no_output_____" ] ], [ [ "#提示:使用for, range(),print(),以及建立一個清單(list)\n#完成接下來的部分", "_____no_output_____" ] ], [ [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "## <a id=\"09\"/>函數:將計算結果直接於函數內印出或回傳(return)出函數外", "_____no_output_____" ], [ "### 例一", "_____no_output_____" ] ], [ [ "def square(x):\n print(x*x)", "_____no_output_____" ], [ "def square_return(x):\n return(x**2)", "_____no_output_____" ] ], [ [ "square(x)將只會印出x, 而square_return(x)將會回傳x。", "_____no_output_____" ] ], [ [ "square(2)", "4\n" ], [ "square_return(2)", "_____no_output_____" ] ], [ [ "可另一變數res接收square_return(x)回傳的值。", "_____no_output_____" ] ], [ [ "res=square_return(2)", "_____no_output_____" ], [ "print(res)", "4\n" ] ], [ [ "需注意的是,square(x)並不會回傳值,因此res將接收到None(無)。", "_____no_output_____" ] ], [ [ "res=square(2)", "4\n" ], [ "print(res)", "None\n" ] ], [ [ "### 例二: 寫一個函數add(a, b)。其輸入為 a和b,輸出為 a+b。", "_____no_output_____" ] ], [ [ "def add(a,b):\n return a+b", "_____no_output_____" ], [ "addResult=add(5,7)\nprint(addResult)", "12\n" ] ], [ [ "### 複習:Java函數寫法(輸入x,回傳x平方)", "_____no_output_____" ] ], [ [ "%%file testSquare.java\npublic class testSquare{\n public static void main(String args[]){\n int y=square(2);\n System.out.println(y);\n }\n \n static int square(int x){\n return x*x;\n }\n}", "Writing testSquare.java\n" ], [ "!javac testSquare.java\n!java testSquare", "4\r\n" ] ], [ [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "## <a id=\"ex3\" style='color:purple'/> 練習3:寫一個函數factorial(n)。\n\n其作用為:\n\n輸入:$n$,輸出:$1*2*3*....*n$", "_____no_output_____" ] ], [ [ "# 修改以下程式碼,以完成函數factorial(n)\ndef factorial(n):\n if(n==0):\n return ???\n if(n!=0):\n return ???", "_____no_output_____" ] ], [ [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "## <a id=\"10\"/>匿名函數", "_____no_output_____" ], [ "一般函數寫法", "_____no_output_____" ] ], [ [ "def f(x,y):\n return x+y", "_____no_output_____" ], [ "f(1,2)", "_____no_output_____" ] ], [ [ "使用匿名函數,並將匿名函數給予名稱f。此方法得到的函數等同於上述使用一般函數寫法的結果。", "_____no_output_____" ] ], [ [ "f=lambda x,y:x+y", "_____no_output_____" ], [ "f(1,2)", "_____no_output_____" ] ], [ [ "將匿名函數直接拿來使用,不給名稱,用完就丟。", "_____no_output_____" ] ], [ [ "(lambda x,y:x+y)(1,2) # 1+2=3", "_____no_output_____" ], [ "(lambda x:x*x)(7) # 7X7=49", "_____no_output_____" ] ], [ [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "## <a id=\"11\"/> 物件導向範例", "_____no_output_____" ], [ "範例:提款機", "_____no_output_____" ] ], [ [ "class Customer(object):\n def __init__(self, name, balance=0.0):\n self.name=name #當物件被新建立,姓名以及餘額兩個屬性將被初始化\n self.balance=balance\n\n def withdraw(self, amount): #提款\n if amount > self.balance: #若要提取大於帳戶餘額的數目,將提出錯誤訊息\n raise RuntimeError('Amount greater than available balance.')\n self.balance -= amount\n return self.balance\n \n def deposit(self, amount): #存款\n self.balance += amount\n return self.balance", "_____no_output_____" ] ], [ [ "* 第1行:所有Python3類別都是object這個類別的子類別。\n* 第2行:當物件產生時,初始子__init__()(等同Java中的建構子)將初始化屬於該物件的一些屬性。此範例中,屬於該物件的兩個屬性,也就是人名和帳戶餘額將被建立。\n* 所有方法都要接收物件本身為第一個參數。依照慣例,大家將該物件本身稱作self。", "_____no_output_____" ] ], [ [ "a=Customer(\"Bill\",100)\na.withdraw(70)", "_____no_output_____" ], [ "a.deposit(60)", "_____no_output_____" ], [ "a.withdraw(100)", "_____no_output_____" ] ], [ [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "---", "_____no_output_____" ], [ "## <a id=\"12\"/>NumPy (Python中用於處理numerical array的套件)", "_____no_output_____" ], [ "此套件用於建立數值陣列和做數值運算。", "_____no_output_____" ], [ "https://docs.scipy.org/doc/numpy/reference/index.html", "_____no_output_____" ] ], [ [ "import numpy as np", "_____no_output_____" ] ], [ [ "內建常數$\\pi$", "_____no_output_____" ] ], [ [ "np.pi", "_____no_output_____" ] ], [ [ "計算根號$\\pi$", "_____no_output_____" ] ], [ [ "np.sqrt(np.pi)", "_____no_output_____" ] ], [ [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "## <a id=\"13\"/>一維序列", "_____no_output_____" ], [ "用np.arange(n)建立一序列內容為[0 1 2 .....n-1]", "_____no_output_____" ] ], [ [ "np.arange(10)", "_____no_output_____" ] ], [ [ "用np.linspace(0,2.*np.pi,10)建立一個一維線性空間。起始為0,終點為 $\\pi$ ,共10個點。", "_____no_output_____" ] ], [ [ "np.linspace(0,2.*np.pi,10)", "_____no_output_____" ] ], [ [ "將數列內所有數值+100", "_____no_output_____" ] ], [ [ "np.arange(10)+100", "_____no_output_____" ] ], [ [ "將數列內所有數值取平方", "_____no_output_____" ] ], [ [ "np.arange(10)**2", "_____no_output_____" ] ], [ [ "以np.mean()計算出算數平均", "_____no_output_____" ] ], [ [ "np.mean( np.arange(10) )", "_____no_output_____" ] ], [ [ "以np.std()計算出標準差", "_____no_output_____" ] ], [ [ "np.std( np.arange(10) )", "_____no_output_____" ] ], [ [ "檢驗Numpy序列和Python清單效能差異", "_____no_output_____" ] ], [ [ "a=np.random.normal(0,1,100000) # 100000個常態分佈亂數\nb=np.random.normal(0,1,100000) # 100000個常態分佈亂數\n\nlist_a=list(a)\nlist_b=list(b)", "_____no_output_____" ], [ "%%timeit\nres=a+b", "51.8 µs ± 51.1 ns per loop (mean ± std. dev. of 7 runs, 10000 loops each)\n" ], [ "%%timeit\n\nres=[] \nfor j in range(len(list_a)):\n res.append(list_a[j]+list_b[j])", "14.2 ms ± 20 µs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n" ] ], [ [ "NumPy較快,因\n* 有做vectorization (能把資料一次餵給多個算數邏輯閘,以加快運算速度。)\n* array裡的資料皆同型別,相加時不用一個個做型別判別。", "_____no_output_____" ], [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "## <a id=\"14\"/>二維矩陣", "_____no_output_____" ], [ "建立一矩陣", "_____no_output_____" ] ], [ [ "A=np.array([[1,2,3],[4,5,6],[7,8,9]])", "_____no_output_____" ], [ "A", "_____no_output_____" ] ], [ [ "將$A$轉置 ($A^{T}$)", "_____no_output_____" ] ], [ [ "A.T", "_____no_output_____" ] ], [ [ "$A\\cdot A^{T}$", "_____no_output_____" ] ], [ [ "A.dot(A.T)", "_____no_output_____" ] ], [ [ "截取片段:清單方式:以A [index0][index1] 取出二維陣列$A$的部分片段。", "_____no_output_____" ] ], [ [ "A[0]", "_____no_output_____" ], [ "A[1:3]", "_____no_output_____" ], [ "A[1:3]", "_____no_output_____" ], [ "A[:][1:3]", "_____no_output_____" ] ], [ [ "截取片段:矩陣方式:以A [index0,index1] 取出二維陣列$A$的部分片段。(index0方向:垂直,index1方向:水平)", "_____no_output_____" ] ], [ [ "A", "_____no_output_____" ], [ "A[1:3,:]", "_____no_output_____" ], [ "A[:,1:3]", "_____no_output_____" ] ], [ [ "檢查一下A的形狀", "_____no_output_____" ] ], [ [ "A.shape", "_____no_output_____" ] ], [ [ "以條件找尋A裡面符合條件的數值", "_____no_output_____" ] ], [ [ "A>5", "_____no_output_____" ], [ "A[A>5]", "_____no_output_____" ] ], [ [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ], [ "## <a id=\"ex4\" style='color:purple'/>練習4: 建立一函數 f。輸入: 一個 2 維矩陣,輸出: 該2維矩陣內的所有數值加總。", "_____no_output_____" ] ], [ [ "A=np.array([[1,2,3],[4,5,6],[7,8,9]])", "_____no_output_____" ], [ "def f(A)\n # 完成此函數\n return ???", "_____no_output_____" ] ], [ [ "[回索引](#目的:了解Python基本語法)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "raw", "code", "markdown", "raw", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "raw" ], [ "code" ], [ "markdown", "markdown" ], [ "raw" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ] ]
cb55fa706ae29dd6af108f50c17c2c82aeabc1e8
147,262
ipynb
Jupyter Notebook
WebVisualizations/Untitled.ipynb
Damola-A/Web-Design-Challenge
c11b79729b8539ddfd5b17aa6913fe7ad516e677
[ "ADSL" ]
null
null
null
WebVisualizations/Untitled.ipynb
Damola-A/Web-Design-Challenge
c11b79729b8539ddfd5b17aa6913fe7ad516e677
[ "ADSL" ]
null
null
null
WebVisualizations/Untitled.ipynb
Damola-A/Web-Design-Challenge
c11b79729b8539ddfd5b17aa6913fe7ad516e677
[ "ADSL" ]
null
null
null
423.166667
137,394
0.387846
[ [ [ "import pandas as pd", "_____no_output_____" ], [ "df = pd.read_csv('cities.csv')\ndf.head()", "_____no_output_____" ], [ "data = df.set_index(\"City_ID\")\ndata.head()", "_____no_output_____" ], [ "html_table = data.to_html()\nhtml_table", "_____no_output_____" ], [ "data.to_html('Data.html')", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code" ] ]
cb56051c3d392a02bff853ac64c24ca74d2f1a10
16,478
ipynb
Jupyter Notebook
CourseworkAssignment_Student.ipynb
AmCholadawan/ILAS_intropython
86ec46a3ac7321d7310da2467eacfba5b890b7f6
[ "MIT" ]
null
null
null
CourseworkAssignment_Student.ipynb
AmCholadawan/ILAS_intropython
86ec46a3ac7321d7310da2467eacfba5b890b7f6
[ "MIT" ]
null
null
null
CourseworkAssignment_Student.ipynb
AmCholadawan/ILAS_intropython
86ec46a3ac7321d7310da2467eacfba5b890b7f6
[ "MIT" ]
null
null
null
35.284797
324
0.592001
[ [ [ "### ILAS: Introduction to Programming 2017/18\n# Coursework Assignment: Plant-life Report\n\n__Complete exercises A to E.__\n<br>__The exercises should be completed using Python programming skills we have covered in class. The questions are focussed on an imaginary case study:__ \n\n>It is though that the acidification of an area of protected land is having a destructive effect on plant populations. \n<br>Experts are particularly worried about the demise of a species of shrub called *winter heath*, that supports the area's insect populations, and the spread of an acid-loving poisonous weed called *darley heath* . <br>Chemical waste from local industries are thought to be reposonsible for the soil acidification.\n<br>Your job is to process data collected over a number of years to present as part of a report.\n<br>The report will be used as evidence to try and impose restrictions disposal of industrial waste within the area.\n\n<img src=\"img/map2.png\" alt=\"Drawing\" style=\"width: 500px;\"/>", "_____no_output_____" ], [ "### Input data\nData collectd by a plant survey over the past 20 years is given in the folder `environmental_survey` in the `sample_data` folder of the ILAS_python repository.\n\nThe survey was conducted once a year. \n\nThe locations and characteristics of plants and trees were recorded.\n\nSoil pH was also recorded at different locations.", "_____no_output_____" ], [ "### Setting up\n\nCreate a new folder in which to store your project.\n\nCopy the `environmental_survey` folder into the project folder.", "_____no_output_____" ], [ "### Part A: Assembling a Data Set\n\n__Aim: Import plant data from .csv files and manipulate data to convert units and remove unecessary values.__\n\n__(1.) Input and Output: Data Frames \n<br>*(5 marks)*__\n<br>Write a Python program that imports the data from the file `plants2017` and stores it as a __`pandas DataFrame`__. \n\nThe data set should contain only the data for shrub plants. \n<br>Remove the rows with \"tree\" in the plants column to leave only information about shrubs in your data set.\n\n(Hint: After removing data from a DataFrame use `df.reset_index(drop=True)` (where 'df' is the DataFrame name) to re-assign index numbers). ", "_____no_output_____" ], [ "__(2.) Functions__\n<br>__*(5 marks)*__\n<br>The GPS location information for each plant is in units of decimal degrees. \n<br>To make them more \"human readable\", the values should be converted to represent each data point on a 2D grid, with units of metres (or kilometres). \n\n<img src=\"img/lat_long.png\" alt=\"Drawing\" style=\"width: 400px;\"/>\n\nThe following equations can be used to approximate:\n - the vertical distance from the *equator* from `GPS_lat`\n - the horizontal distance from the *meridian* from `GPS_lon`\n \nThe latitude in m from the equator:\n\n$lat = \\frac{40,008,000 \\times GPS_{lat}}{360} $\n\n\nThe longitude in m from the meridian:\n\n$lon = \\frac{40,075,160 \\times GPS_{lon}}{360} \\times \\cos(GPS_{lat})$\n\n<img src=\"img/ParametricCircle.png\" alt=\"Drawing\" style=\"width: 200px;\"/>\n\nWrite code to convert GPS_lat and GPS_lon in decimal degrees to units of m or km, using the equation above. \n<br>__*Hint: `GPS_lat` and `GPS_lat` are given in degrees, `numpy.cos` automatically applies to angles given in radians.*__\n\nEncapsulate your code in a function so that it can be applied to any data frame.\n(Hint: your function should take the columns of data frame to be converted as its arguments).\n\nShow your function works by applying it to your data frame.\n\n(You can also want to *rename* your column heading as they are no longer GPS coordinates.)", "_____no_output_____" ], [ "__(3.) Functions and Data Structures: Boolean Indexing__ \n<br>__*(5 marks)*__\n<br>When fully grown, the four main shrubs that grow in the area can be identified by distinct features.\n\nTo include *only fully grown* plants in your data set:\n- Write a function that selects only plants above a height of 50cm.\n- Apply the function to your data set.\n- Edit your function so that the same function may be used to:\n - remove plants below 50cm by default\n - remove plants below a height set by the user\n \n", "_____no_output_____" ], [ "### Part B: Refining the Data Set and Mapping pH\n\n__Aim: Split the area over which the survey was taken into a grid of equally sized cells. Sort the pH samples by grid cell to show how pH varies across the area.__\n\n__(1.) Input and Output__ \n<br>__*(2 marks)*__\n<br>In the same Python file you wrote in __Part A__, import the data from the file `pH_2017` and store it as a new __`pandas DataFrame`__ called `pH`.\n<br>\n\n__(2.) Functions__\n<br>__*(2 marks)*__\n<br>Use the function that you wrote in __Part A (2.)__ to convert the the columns GPS_lat and GPS_lon in `pH` to units of m or km. ", "_____no_output_____" ] ], [ [ "')", "_____no_output_____" ] ], [ [ "The sampled area measures approximately 3445m x 3950m.\n<br>An orthoganol grid of 15 x 15 cells (3000m x 3000m) can be used to represent the sampled area:\n - the grid is chosen to be slightly smaller than the sampled area so that no unsampled regions are included.\n - the origin is chosen to be at \n - $x = x_{min} + \\frac{3445-3000}{2}$\n - $y = y_{min} + \\frac{3950-3000}{2}$\n \n<img src=\"img/map.png\" alt=\"Drawing\" style=\"width: 500px;\"/>\n\nThe following equation can be used to map a point, $P$, in range A to range B.\n\n$P_B=\\frac{P_A-A_{min}}{A_{max}-A_{min}} \\times (B_{max}-B_{min}) + B_{min}$\n\n__(3.) Functions and mathematical operators.__ \n<br>__*(5 marks)*__\n\nWrite a function called `scale` to map points in the range (origin, origin+3000) to the range (0, 3000).\n\nBy floor dividing (seminar 2) points in the range 0 to 3000 by 200, each point can be assigned an integer value in the range 0 to 14. Create an additional step in your function that uses floor division to assign an x and y grid reference to each data point. \n\nNote:\n- some grid references may be outside of the range 0 to 14.\n- multiple cells will blong to the same grid reference.\n\nAdd two new columns to your DataFrame to store the x and y grid reference for each data point \n\nStore your function that assigns a grid index as function so that it can be applied to any data set collected in the same area.", "_____no_output_____" ], [ "__(3.) `numpy` multi-dimensional arrays.__ \n<br>__*(2 marks)*__\n<br>_Find the mean of the pH readings taken in each grid cell.\n<br>Use a 2D numpy array to store each mean reading at each 2D grid location. \n\n", "_____no_output_____" ], [ "__(4.) Plotting__\n<br>__*(3 marks)*__\n<br>Plot the mean pH for each grid cell as a colour map of the gridded area. \n<br>You may use a *2D colour map* or a *3D plot*. \n<br>Save your figure as a .png file in your project folder.\n", "_____no_output_____" ], [ "### Part C: Classifying Plants Using Simple Mathematical Operations\n\n__Aim: Sort the plant samples species. Produce a total count of each species in each grid cell.__\n\n<br>The shrub plants in your DataFrame from __Part A__ can be catagorsied as one of four species.\n\nThe *average* physical characteristics of each *plant species* are shown in the table below:\n\n|Shrub |Height (m)|Leaf length (cm)|Leaf aspect ratio|Bud length (cm)|\n|------------|----------|----------------|-----------------|---------------|\n|Winter heath| 1.2| 3.5| 2.0| 2.3| \n|Bell heather| 1.8| 1.5| 1.2| 2.3|\n|Brush bush | 0.7| 2.1| 10.2| 1.5|\n|Darley heath| 0.7| 2.2| 3.1| 1.7|\n\n<br>The *vector quantisation algorithm* is a simple algorithm used for catagorisation.\n\nIt determines which catagory a data point should belong to from its closest proximity to a set of values representing possible catagories.\n<br>Each value represents the *average* of the corresponding catagory.\n\nThe *closeness* of the characteristics of a point $(c_1, c_2, c_3, ... c_n)$ to the average value of a catagory $(ca_1, ca_2, ca_3, ... ca_n)$ can be determined by the magnitude:\n\n<br>$d = \\sqrt{(ca_1-c_1)^2 + (ca_2-c_2)^2 + (ca_3-c_3)^2 + ... + (ca_n-c_n)^2}$ <br>\n\nIf $d$ is evaluated for each catagory, the catagory with the *minimium* value of $d$ represents the closest fit.\n\nThe vector quantisation algorithm can be applied to each data point using a for loop or numpy broadcasting.\n\n\n\n__(1.) Mathematical compuation with Numpy__ \n<br>__*(5 marks)*__\n<br>Use the vector quantisation algorithm to determine the species of each plant.\n<br>Hint: Use a for loop or use broadcasting. \n<br>Add a column to your DataFrame called \"species\" with the species of each plant that most closely fits the plant characteristics. ", "_____no_output_____" ], [ "__(2.) Functions__ \n<br>__*(1 mark)*__\n<br>Use the function that you wrote for __Part B: (2.)__ to assign a grid reference to each data point. <br>Save the grid refernce x and y value as two columns in your Data Frame.\n", "_____no_output_____" ], [ "__(3.) Data Structures: Lists__ \n<br>__*(5 marks)*__\n\nCreate a list for each of the following fields.\n1. x grid index \n1. y grid index \n1. average pH reading\n1. total count of *Winter heath* plant\n1. total count of *Bell heather* plant\n1. total count of *Brush bush* plant\n1. total count of *Darley heath* plant\n\nLoop through each grid cell and store a computed value for each field. \n\nStore the lists as a list of lists (nested lists).\n\n\n", "_____no_output_____" ], [ "### Part D: Using Multiple Files to Produce Time-Series Data\n\n__Aim: Run all the steps that you coded in Part A-C for every envioronmental survey collected between the years 1997-2017 to produce time series data of the plant count and average pH.__\n\n__(1.) Control Flow__ \n<br>__*(5 marks)*__\n<br>Use a for loop to store a list of lists like you created in __Part C: (3.)__ for each year of the environmental survey (1997-2017)\n\nHint: You can loop through each plant survey using:\n>```Python \nannual_data=[]\nfor year in range(1997, 2018):\n df = pd.read_csv(\"environmental_survey/plants\" + str(year) + \".csv\") \n```\n\nHint: Append the list of lists created in __Part C: (3.)__ to the list `annual_data` each time the code loops.\n\n\n>```Python \nannual_data=[]\nfor year in range(1997, 2018):\n df = pd.read_csv(\"environmental_survey/plants\" + str(year) + \".csv\") \n```\n\n__(2.) Plotting and Curve Fitting__\n<br>__*(5 marks)*__\n\n<br>The two closest industrial sites to the area of land are:\n<br>__Sketchy inc.__ , established 1995, GPS coordinates lon = 136.7647, lat = 35.7336\n<br>__Philamore co.__ , established 1990, GPS coordinates lon = 136.8262, lat = 35.7498\n\n<br>Choose one grid cell that is close to an industrial site and one grid cell that is far from the industrial sites.\n<br>Plot a scatter graph of the average pH and plant count for each species (y axis) against time (x axis).\n<br>Fit a trendline to each data series\n<br>Show the equation of the trendline and the proximity to an industrial site as labels. ", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ] ]
cb56230b2965cb105fa859a4e522fd22529d4689
1,593
ipynb
Jupyter Notebook
crawler.ipynb
maxmilian/twse_bshtm_captcha
b2d6764f1cde50aed085c9990dda81e6ecf72435
[ "Apache-2.0" ]
7
2019-09-20T01:41:09.000Z
2022-02-13T05:45:39.000Z
crawler.ipynb
maxmilian/twse_bshtm_captcha
b2d6764f1cde50aed085c9990dda81e6ecf72435
[ "Apache-2.0" ]
null
null
null
crawler.ipynb
maxmilian/twse_bshtm_captcha
b2d6764f1cde50aed085c9990dda81e6ecf72435
[ "Apache-2.0" ]
4
2020-10-16T04:10:21.000Z
2021-11-25T09:43:17.000Z
24.507692
87
0.497803
[ [ [ "import requests, shutil, time\nfrom bs4 import BeautifulSoup\n\nFOLDER = \"captcha/\"\n\ndef getCatchaSrc():\n resp = requests.get(\"https://bsr.twse.com.tw/bshtm/bsMenu.aspx\")\n soup = BeautifulSoup(resp.text, 'html.parser')\n img_tags = soup.select(\"#Panel_bshtm img\")\n return img_tags[0].get('src')\n\ni = 4480\nwhile i < 4500:\n src = getCatchaSrc()\n resp = requests.get(\"https://bsr.twse.com.tw/bshtm/\" + src, stream=True)\n if resp.status_code == 200:\n i += 1\n with open(FOLDER + str(i) + \".jpg\", 'wb') as f:\n resp.raw.decode_content = True\n shutil.copyfileobj(resp.raw, f)\n print(\"i: \" + str(i))\n time.sleep(1)\n \nprint(\"completed\")", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code" ] ]
cb562472979b87875c82c956ab6aa759b282d719
35,849
ipynb
Jupyter Notebook
aws_sagemaker_studio/sagemaker_studio_image_build/xgboost_bring_your_own/Batch_Transform_BYO_XGB.ipynb
Amirosimani/amazon-sagemaker-examples
bc35e7a9da9e2258e77f98098254c2a8e308041a
[ "Apache-2.0" ]
2
2021-08-30T20:02:12.000Z
2021-11-16T11:16:19.000Z
aws_sagemaker_studio/sagemaker_studio_image_build/xgboost_bring_your_own/Batch_Transform_BYO_XGB.ipynb
Amirosimani/amazon-sagemaker-examples
bc35e7a9da9e2258e77f98098254c2a8e308041a
[ "Apache-2.0" ]
1
2021-09-17T10:36:36.000Z
2021-09-17T10:36:36.000Z
aws_sagemaker_studio/sagemaker_studio_image_build/xgboost_bring_your_own/Batch_Transform_BYO_XGB.ipynb
Amirosimani/amazon-sagemaker-examples
bc35e7a9da9e2258e77f98098254c2a8e308041a
[ "Apache-2.0" ]
2
2021-06-24T11:49:58.000Z
2021-06-24T11:54:01.000Z
35.599801
602
0.577422
[ [ [ "# SageMaker Batch Transform using an XgBoost Bring Your Own Container (BYOC)\n\nIn this notebook, we will walk through an end to end data science workflow demonstrating how to build your own custom XGBoost Container using Amazon SageMaker Studio. We will first process the data using SageMaker Processing, push an XGB algorithm container to ECR, train the model, and use Batch Transform to generate inferences from your model in batch or offline mode. Finally we will use SageMaker Experiments to capture the metadata and lineage associated with the trained model. This is a key differentiator of SageMaker Studio as the metadata captured is visible in the Experiments UI. \n\n\n## The example\n\nIn this example we show how to package a custom XGBoost container with Amazon SageMaker studio with a Python example which works with the UCI Credit Card dataset. To use a different algorithm or a different dataset, you can easily change the Docker container and the xgboost folder attached with this code.\n\nIn this example, we use a single image to support training and hosting. This simplifies the procedure because we only need to manage one image for both tasks. Sometimes you may want separate images for training and hosting because they have different requirements. In this case, separate the parts discussed below into separate Dockerfiles and build two images. Choosing whether to use a single image or two images is a matter of what is most convenient for you to develop and manage.\n\nIf you're only using Amazon SageMaker for training or hosting, but not both, only the functionality used needs to be built into your container.\n\n## The workflow\n\nThis notebook is divided into three parts: *exploring your data and feature engineering*, *building your contianer* and *using your container to train a model and generate inferences*", "_____no_output_____" ], [ "### The Dockerfile\n\nThe Dockerfile describes the image that we want to build. You can think of it as describing the complete operating system installation of the system that you want to run. A Docker container running is quite a bit lighter than a full operating system, however, because it takes advantage of Linux on the host machine for the basic operations. \n\nFor the Python science stack, we start from an official TensorFlow docker image and run the normal tools to install TensorFlow Serving. Then we add the code that implements our specific algorithm to the container and set up the right environment for it to run under.\n\nFor details on how BYOC works with SageMaker Notebook instances, see this example: https://github.com/awslabs/amazon-sagemaker-examples/blob/master/advanced_functionality/scikit_bring_your_own/scikit_bring_your_own.ipynb. Unlike SageMaker notebook instances, in SageMaker studio as we will see below, you will not need the build_and_push.sh script anymore. The studio-build CLI will handle pushing the container to ECR for you. \n\nLet's look at the Dockerfile for this example.", "_____no_output_____" ] ], [ [ "!cat Dockerfile", "_____no_output_____" ] ], [ [ "### Step 1: Pre-requisites: Download the necessary libraries", "_____no_output_____" ] ], [ [ "import sys\n\n#!{sys.executable} -m pip install \"sagemaker-experiments\"\n#!{sys.executable} -m pip install \"sagemaker-studio-image-build\"", "_____no_output_____" ] ], [ [ "### Step 2: Ensure IAM Role has access to necessary services\n\nThe SageMaker Studio Image Build CLI uses Amazon Elastic Container Registry and AWS CodeBuild so we need to ensure that the role we provide as input to our CLI commands has the necessary policies and permissions attached. \n\nTwo scenarios are supported including: \n\n * **Add IAM Permissions to SageMaker Execution Role** \n\n This scenario includes updating the Execution Role attached to this notebook instance with the required permissions. In this scenario, you need to get the current execution role and ensure the trust policy and additional permissions are associated with the role. \n \n * **Create/Utilize a secondary role with appropriate permissions attached** \n\n This scenario include using a secondary role setup with the permissions below and identified in the --role argument when invoking the CLI (Example: *sm-docker build . --role build-cli-role*)\n \n\n**Ensure the role that will be used has the following**\n\n1) Trust policy with CodeBuild\n\n {\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Effect\": \"Allow\",\n \"Principal\": {\n \"Service\": [\n \"codebuild.amazonaws.com\"\n ]\n },\n \"Action\": \"sts:AssumeRole\"\n }\n ]\n }\n \n2) Permissions attached to the execution role to execute a build in AWS CodeBuild, create ECR repository and push images to ECR \n\n {\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Effect\": \"Allow\",\n \"Action\": [\n \"codebuild:DeleteProject\",\n \"codebuild:CreateProject\",\n \"codebuild:BatchGetBuilds\",\n \"codebuild:StartBuild\"\n ],\n \"Resource\": \"arn:aws:codebuild:*:*:project/sagemaker-studio*\"\n },\n {\n \"Effect\": \"Allow\",\n \"Action\": \"logs:CreateLogStream\",\n \"Resource\": \"arn:aws:logs:*:*:log-group:/aws/codebuild/sagemaker-studio*\"\n },\n {\n \"Effect\": \"Allow\",\n \"Action\": [\n \"logs:GetLogEvents\",\n \"logs:PutLogEvents\"\n ],\n \"Resource\": \"arn:aws:logs:*:*:log-group:/aws/codebuild/sagemaker-studio*:log-stream:*\"\n },\n {\n \"Effect\": \"Allow\",\n \"Action\": \"logs:CreateLogGroup\",\n \"Resource\": \"*\"\n },\n {\n \"Effect\": \"Allow\",\n \"Action\": [\n \"ecr:CreateRepository\",\n \"ecr:BatchGetImage\",\n \"ecr:CompleteLayerUpload\",\n \"ecr:DescribeImages\",\n \"ecr:DescribeRepositories\",\n \"ecr:UploadLayerPart\",\n \"ecr:ListImages\",\n \"ecr:InitiateLayerUpload\",\n \"ecr:BatchCheckLayerAvailability\",\n \"ecr:PutImage\"\n ],\n \"Resource\": \"arn:aws:ecr:*:*:repository/sagemaker-studio*\"\n },\n {\n \"Effect\": \"Allow\",\n \"Action\": \"ecr:GetAuthorizationToken\",\n \"Resource\": \"*\"\n },\n {\n \"Effect\": \"Allow\",\n \"Action\": [\n \"s3:GetObject\",\n \"s3:DeleteObject\",\n \"s3:PutObject\"\n ],\n \"Resource\": \"arn:aws:s3:::sagemaker-*/*\"\n },\n {\n \"Effect\": \"Allow\",\n \"Action\": [\n \"s3:CreateBucket\"\n ],\n \"Resource\": \"arn:aws:s3:::sagemaker*\"\n },\n {\n \"Effect\": \"Allow\",\n \"Action\": [\n \"iam:GetRole\",\n \"iam:ListRoles\"\n ],\n \"Resource\": \"*\"\n },\n {\n \"Effect\": \"Allow\",\n \"Action\": \"iam:PassRole\",\n \"Resource\": \"arn:aws:iam::*:role/*\",\n \"Condition\": {\n \"StringLikeIfExists\": {\n \"iam:PassedToService\": \"codebuild.amazonaws.com\"\n }\n }\n }\n ]\n}", "_____no_output_____" ], [ "### Restart Kernel\n\nOnce the libraries are installed, restart the kernel by clicking Kernel --> Restart and Running all the cells below.", "_____no_output_____" ] ], [ [ "# Let's inspect the role we have created for our notebook here:\nimport boto3\nimport sagemaker\nfrom sagemaker import get_execution_role\n\nrole = get_execution_role()\nsess = sagemaker.Session()\nregion = boto3.session.Session().region_name\nprint(\"Region = {}\".format(region))\nsm = boto3.Session().client(\"sagemaker\")", "_____no_output_____" ] ], [ [ "### Complete Setup: Import libraries and set global definitions.\n\nAll needed libraries will come pre-installed with this notebook with the Lifecycle configuration scripts.", "_____no_output_____" ] ], [ [ "import matplotlib.pyplot as plt\nimport numpy as np\nimport pandas as pd\nimport os\nfrom time import sleep, gmtime, strftime\nimport json\nimport time", "_____no_output_____" ], [ "# Import SageMaker Experiments\n\nfrom sagemaker.analytics import ExperimentAnalytics\nfrom smexperiments.experiment import Experiment\nfrom smexperiments.trial import Trial\nfrom smexperiments.trial_component import TrialComponent\nfrom smexperiments.tracker import Tracker", "_____no_output_____" ] ], [ [ "### Specify buckets for storing data", "_____no_output_____" ] ], [ [ "# Use our custom bucket here.\nrawbucket = sess.default_bucket()\nprefix = \"sagemaker-modelmonitor\" # use this prefix to store all files pertaining to this workshop.\n\ndataprefix = prefix + \"/data\"\ntraindataprefix = prefix + \"/train_data\"\ntestdataprefix = prefix + \"/test_data\"\ntestdatanolabelprefix = prefix + \"/test_data_no_label\"\ntrainheaderprefix = prefix + \"/train_headers\"", "_____no_output_____" ] ], [ [ "### Step 3: Data Exploration", "_____no_output_____" ], [ "A key part of the data science lifecyle is data exploration, pre-processing and feature engineering. We will demonstrate how to use SM notebooks for data exploration and SM Processing for feature engineering and pre-processing data", "_____no_output_____" ], [ "### Download and Import the data", "_____no_output_____" ], [ "We will use the UCI Machine Learning Archive dataset on payment default for this example [https://archive.ics.uci.edu/ml/datasets/default+of+credit+card+client]. Here we have a number of common features such as payment histories from prior months, payments, bills etc to predict a binary outcome -- whether or not a user will default on their payment in the following month.", "_____no_output_____" ] ], [ [ "data = pd.read_excel(\"data.xls\", header=1)\ndata = data.drop(columns=[\"ID\"])\ndata.head()", "_____no_output_____" ], [ "data.rename(columns={\"default payment next month\": \"Label\"}, inplace=True)\nlbl = data.Label\ndata = pd.concat([lbl, data.drop(columns=[\"Label\"])], axis=1)\ndata.head()", "_____no_output_____" ], [ "COLS = data.columns", "_____no_output_____" ] ], [ [ "### Data Exploration\n\nOnce you have downloaded the dataset, the next step in the data science lifecycle is to explore the dataset. A correlation plot can indicate whether the features are correlated to one another and the label itself.", "_____no_output_____" ] ], [ [ "## Corr plot\nf = plt.figure(figsize=(19, 15))\nplt.matshow(data.corr(), fignum=f.number)\nplt.xticks(range(data.shape[1]), data.columns, fontsize=14, rotation=45)\nplt.yticks(range(data.shape[1]), data.columns, fontsize=14)\ncb = plt.colorbar()\ncb.ax.tick_params(labelsize=14)\nplt.title(\"Correlation Matrix\", fontsize=16);", "_____no_output_____" ], [ "from pandas.plotting import scatter_matrix\n\nSCAT_COLUMNS = [\"BILL_AMT1\", \"BILL_AMT2\", \"PAY_AMT1\", \"PAY_AMT2\"]\nscatter_matrix(data[SCAT_COLUMNS], figsize=(10, 10), diagonal=\"kde\")\nplt.show()", "_____no_output_____" ] ], [ [ "### Step 4: Secure Feature Processing pipeline using SageMaker Processing\n\nWhile you can pre-process small amounts of data directly in a notebook SageMaker Processing offloads the heavy lifting of pre-processing larger datasets by provisioning the underlying infrastructure, downloading the data from an S3 location to the processing container, running the processing scripts, storing the processed data in an output directory in Amazon S3 and deleting the underlying transient resources needed to run the processing job. Once the processing job is complete, the infrastructure used to run the job is wiped, and any temporary data stored on it is deleted.", "_____no_output_____" ] ], [ [ "if not os.path.exists('rawdata/rawdata.csv'):\n !mkdir rawdata\n data.to_csv('rawdata/rawdata.csv', index=None)\nelse:\n pass", "_____no_output_____" ], [ "# Upload the raw dataset\nraw_data_location = sess.upload_data(\"rawdata\", bucket=rawbucket, key_prefix=dataprefix)\nprint(raw_data_location)", "_____no_output_____" ], [ "## Use SageMaker Processing with Sk Learn. -- combine data into train and test at this stage if possible.\nfrom sagemaker.sklearn.processing import SKLearnProcessor\n\nsklearn_processor = SKLearnProcessor(\n framework_version=\"0.20.0\", role=role, instance_type=\"ml.c4.xlarge\", instance_count=1\n)", "_____no_output_____" ] ], [ [ "### Write a preprocessing script (same as above)", "_____no_output_____" ] ], [ [ "%%writefile preprocessing.py\n\nimport argparse\nimport os\nimport warnings\n\nimport pandas as pd\nimport numpy as np\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.preprocessing import StandardScaler, MinMaxScaler\nfrom sklearn.exceptions import DataConversionWarning\nfrom sklearn.compose import make_column_transformer\n\nwarnings.filterwarnings(action=\"ignore\", category=DataConversionWarning)\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--train-test-split-ratio\", type=float, default=0.3)\n parser.add_argument(\"--random-split\", type=int, default=0)\n args, _ = parser.parse_known_args()\n\n print(\"Received arguments {}\".format(args))\n\n input_data_path = os.path.join(\"/opt/ml/processing/input\", \"rawdata.csv\")\n\n print(\"Reading input data from {}\".format(input_data_path))\n df = pd.read_csv(input_data_path)\n df.sample(frac=1)\n\n COLS = df.columns\n newcolorder = (\n [\"PAY_AMT1\", \"BILL_AMT1\"]\n + list(COLS[1:])[:11]\n + list(COLS[1:])[12:17]\n + list(COLS[1:])[18:]\n )\n\n split_ratio = args.train_test_split_ratio\n random_state = args.random_split\n\n X_train, X_test, y_train, y_test = train_test_split(\n df.drop(\"Label\", axis=1), df[\"Label\"], test_size=split_ratio, random_state=random_state\n )\n\n preprocess = make_column_transformer(\n ([\"PAY_AMT1\"], StandardScaler()), ([\"BILL_AMT1\"], MinMaxScaler()), remainder=\"passthrough\"\n )\n\n print(\"Running preprocessing and feature engineering transformations\")\n train_features = pd.DataFrame(preprocess.fit_transform(X_train), columns=newcolorder)\n test_features = pd.DataFrame(preprocess.transform(X_test), columns=newcolorder)\n\n # concat to ensure Label column is the first column in dataframe\n train_full = pd.concat(\n [pd.DataFrame(y_train.values, columns=[\"Label\"]), train_features], axis=1\n )\n test_full = pd.concat([pd.DataFrame(y_test.values, columns=[\"Label\"]), test_features], axis=1)\n\n print(\"Train data shape after preprocessing: {}\".format(train_features.shape))\n print(\"Test data shape after preprocessing: {}\".format(test_features.shape))\n\n train_features_headers_output_path = os.path.join(\n \"/opt/ml/processing/train_headers\", \"train_data_with_headers.csv\"\n )\n\n train_features_output_path = os.path.join(\"/opt/ml/processing/train\", \"train_data.csv\")\n\n test_features_output_path = os.path.join(\"/opt/ml/processing/test\", \"test_data.csv\")\n\n print(\"Saving training features to {}\".format(train_features_output_path))\n train_full.to_csv(train_features_output_path, header=False, index=False)\n print(\"Complete\")\n\n print(\"Save training data with headers to {}\".format(train_features_headers_output_path))\n train_full.to_csv(train_features_headers_output_path, index=False)\n\n print(\"Saving test features to {}\".format(test_features_output_path))\n test_full.to_csv(test_features_output_path, header=False, index=False)\n print(\"Complete\")", "_____no_output_____" ], [ "# Copy the preprocessing code over to the s3 bucket\ncodeprefix = prefix + \"/code\"\ncodeupload = sess.upload_data(\"preprocessing.py\", bucket=rawbucket, key_prefix=codeprefix)\nprint(codeupload)", "_____no_output_____" ], [ "train_data_location = rawbucket + \"/\" + traindataprefix\ntest_data_location = rawbucket + \"/\" + testdataprefix\nprint(\"Training data location = {}\".format(train_data_location))\nprint(\"Test data location = {}\".format(test_data_location))", "_____no_output_____" ] ], [ [ "Next we will execute the script above using the managed scikit-learn preprocessing container. This step may take a few minutes to execute.", "_____no_output_____" ] ], [ [ "from sagemaker.processing import ProcessingInput, ProcessingOutput\n\nsklearn_processor.run(\n code=codeupload,\n inputs=[ProcessingInput(source=raw_data_location, destination=\"/opt/ml/processing/input\")],\n outputs=[\n ProcessingOutput(\n output_name=\"train_data\",\n source=\"/opt/ml/processing/train\",\n destination=\"s3://\" + train_data_location,\n ),\n ProcessingOutput(\n output_name=\"test_data\",\n source=\"/opt/ml/processing/test\",\n destination=\"s3://\" + test_data_location,\n ),\n ProcessingOutput(\n output_name=\"train_data_headers\",\n source=\"/opt/ml/processing/train_headers\",\n destination=\"s3://\" + rawbucket + \"/\" + prefix + \"/train_headers\",\n ),\n ],\n arguments=[\"--train-test-split-ratio\", \"0.2\"],\n)\n\npreprocessing_job_description = sklearn_processor.jobs[-1].describe()\n\noutput_config = preprocessing_job_description[\"ProcessingOutputConfig\"]\nfor output in output_config[\"Outputs\"]:\n if output[\"OutputName\"] == \"train_data\":\n preprocessed_training_data = output[\"S3Output\"][\"S3Uri\"]\n if output[\"OutputName\"] == \"test_data\":\n preprocessed_test_data = output[\"S3Output\"][\"S3Uri\"]", "_____no_output_____" ] ], [ [ "# Part 2: Building the Container and Training the model\n\n\n### Step 5: Set up SageMaker Experiments", "_____no_output_____" ], [ "In this notebook, we first build the Docker image by providing the Dockerfile discussed before and train a model using that Dockerfile\n\nWe use SageMaker Experiments for data scientists to track the lineage of the model from the raw data source to the preprocessing steps and the model training pipeline. With SageMaker Experiments, data scientists can compare, track and manage multiple diferent model training jobs, data processing jobs, hyperparameter tuning jobs and retain a lineage from the source data to the training job artifacts to the model hyperparameters and any custom metrics that they may want to monitor as part of the model training.\n", "_____no_output_____" ] ], [ [ "# Create a SageMaker Experiment\ncc_experiment = Experiment.create(\n experiment_name=f\"CreditCardDefault-{int(time.time())}\",\n description=\"Predict credit card default from payments data\",\n sagemaker_boto_client=sm,\n)\nprint(cc_experiment)", "_____no_output_____" ] ], [ [ "In addition to training, we want to track the lineage of the entire machine learing pipeline also including the processing job above. ", "_____no_output_____" ] ], [ [ "# Start Tracking parameters used in the Pre-processing pipeline.\nwith Tracker.create(display_name=\"Preprocessing\", sagemaker_boto_client=sm) as tracker:\n tracker.log_parameters({\"train_test_split_ratio\": 0.2, \"random_state\": 0})\n # we can log the s3 uri to the dataset we just uploaded\n tracker.log_input(name=\"ccdefault-raw-dataset\", media_type=\"s3/uri\", value=raw_data_location)\n tracker.log_input(\n name=\"ccdefault-train-dataset\", media_type=\"s3/uri\", value=train_data_location\n )\n tracker.log_input(name=\"ccdefault-test-dataset\", media_type=\"s3/uri\", value=test_data_location)", "_____no_output_____" ] ], [ [ "### Step 6: Build XgBoost container for training\n\nThe code for the XGB container is already supplied with this notebook. We simply need to build this container and push it to ECR. The single line of code below will do it.", "_____no_output_____" ] ], [ [ "!sm-docker build .", "_____no_output_____" ] ], [ [ "### Step 7: Train the Model\n\nThe same security postures we applied previously during SM Processing apply to training jobs. We will also have SageMaker experiments track the training job and store metadata such as model artifact location, training/validation data location, model hyperparameters etc.\n\nAs shown above, your image URI has the following form:\nImage URI: {account-id}.dkr.ecr.{region}.amazonaws.com/sagemaker-studio-{studioID}:{username}", "_____no_output_____" ] ], [ [ "account = sess.boto_session.client(\"sts\").get_caller_identity()[\"Account\"]\necr = boto3.client(\"ecr\")\ndomain_id = \"sagemaker-studio-{}\".format(sm.list_apps()[\"Apps\"][0][\"DomainId\"])\nimage_tag = ecr.list_images(repositoryName=domain_id, filter={\"tagStatus\": \"TAGGED\"})[\"imageIds\"][\n 0\n][\"imageTag\"]\nimage = \"{}.dkr.ecr.{}.amazonaws.com/{}:{}\".format(account, region, domain_id, image_tag)\npreprocessing_trial_component = tracker.trial_component\n\ntrial_name = f\"cc-fraud-training-job-{int(time.time())}\"\ncc_trial = Trial.create(\n trial_name=trial_name, experiment_name=cc_experiment.experiment_name, sagemaker_boto_client=sm\n)\n\ncc_trial.add_trial_component(preprocessing_trial_component)\ncc_training_job_name = \"cc-training-job-{}\".format(int(time.time()))", "_____no_output_____" ], [ "xgb = sagemaker.estimator.Estimator(\n image,\n role,\n instance_count=1,\n instance_type=\"ml.m4.xlarge\",\n max_run=86400,\n output_path=\"s3://{}/{}/models\".format(rawbucket, prefix),\n sagemaker_session=sess,\n) # set to true for distributed training\n\nxgb.set_hyperparameters(\n max_depth=5,\n eta=0.2,\n gamma=4,\n min_child_weight=6,\n subsample=0.8,\n verbosity=0,\n objective=\"binary:logistic\",\n num_round=100,\n)\n\nxgb.fit(\n inputs={\"training\": \"s3://\" + train_data_location},\n job_name=cc_training_job_name,\n experiment_config={\n \"TrialName\": cc_trial.trial_name, # log training job in Trials for lineage\n \"TrialComponentDisplayName\": \"Training\",\n },\n wait=True,\n)\ntime.sleep(2)", "_____no_output_____" ] ], [ [ "Having used SageMaker Experiments to track the training runs, we can now extract model metadata to get the entire lineage of the model from the source data to the model artifacts and the hyperparameters.\n\nTo do this, simply call the **describe_trial_component** API.", "_____no_output_____" ] ], [ [ "# Present the Model Lineage as a dataframe\nfrom sagemaker.session import Session\n\nsession = boto3.Session()\nlineage_table = ExperimentAnalytics(\n sagemaker_session=Session(session, sm),\n search_expression={\n \"Filters\": [{\"Name\": \"Parents.TrialName\", \"Operator\": \"Equals\", \"Value\": trial_name}]\n },\n sort_by=\"CreationTime\",\n sort_order=\"Ascending\",\n)\nlineagedf = lineage_table.dataframe()\n\nlineagedf", "_____no_output_____" ], [ "# get detailed information about a particular trial\nsm.describe_trial_component(TrialComponentName=lineagedf.TrialComponentName[1])", "_____no_output_____" ] ], [ [ "# Part 3: Using the trained model for inference\n\n### Step 8: Inference using Batch Transform\n\nLet's first use Batch Transform to generate inferences for the test dataset you pre-processed before. \n", "_____no_output_____" ] ], [ [ "s3 = boto3.client(\"s3\")\ns3.download_file(rawbucket, testdataprefix + \"/test_data.csv\", \"test_data.csv\")", "_____no_output_____" ], [ "newcolorder = (\n [\"PAY_AMT1\", \"BILL_AMT1\"] + list(COLS[1:])[:11] + list(COLS[1:])[12:17] + list(COLS[1:])[18:]\n)\ntest_full = pd.read_csv(\"test_data.csv\", names=[\"Label\"] + newcolorder)\ntest_full.head()", "_____no_output_____" ], [ "test_data_no_label = test_full.drop(columns=[\"Label\"], axis=1)\nlabel = test_full[\"Label\"]\ntest_data_no_label.to_csv(\"test_data_no_label.csv\", index=False, header=False)\ntest_data_no_label.shape", "_____no_output_____" ], [ "sess = sagemaker.Session()\ntest_data_nohead_location = sess.upload_data(\n \"test_data_no_label.csv\", bucket=rawbucket, key_prefix=testdatanolabelprefix\n)", "_____no_output_____" ], [ "%%time\n\nsm_transformer = xgb.transformer(1, \"ml.m5.xlarge\", accept=\"text/csv\")\n\n# start a transform job\nsm_transformer.transform(test_data_nohead_location, split_type=\"Line\", content_type=\"text/csv\")\nsm_transformer.wait()", "_____no_output_____" ], [ "import json\nimport io\nfrom urllib.parse import urlparse\n\n\ndef get_csv_output_from_s3(s3uri, file_name):\n parsed_url = urlparse(s3uri)\n bucket_name = parsed_url.netloc\n prefix = parsed_url.path[1:]\n s3 = boto3.resource(\"s3\")\n obj = s3.Object(bucket_name, \"{}/{}\".format(prefix, file_name))\n return obj.get()[\"Body\"].read().decode(\"utf-8\")", "_____no_output_____" ], [ "output = get_csv_output_from_s3(sm_transformer.output_path, \"test_data_no_label.csv.out\")\noutput_df = pd.read_csv(io.StringIO(output), sep=\",\", header=None)\noutput_df.head(8)", "_____no_output_____" ], [ "from sklearn.metrics import confusion_matrix, accuracy_score", "_____no_output_____" ], [ "1 - np.unique(data[\"Label\"], return_counts=True)[1][1] / (len(data[\"Label\"]))", "_____no_output_____" ], [ "print(\n \"Baseline Accuracy = {}\".format(\n 1 - np.unique(data[\"Label\"], return_counts=True)[1][1] / (len(data[\"Label\"]))\n )\n)\nprint(\"Accuracy Score = {}\".format(accuracy_score(label, output_df)))", "_____no_output_____" ], [ "output_df[\"Predicted\"] = output_df.values\noutput_df[\"Label\"] = label\nconfusion_matrix = pd.crosstab(\n output_df[\"Predicted\"],\n output_df[\"Label\"],\n rownames=[\"Actual\"],\n colnames=[\"Predicted\"],\n margins=True,\n)\nconfusion_matrix", "_____no_output_____" ] ], [ [ "### Step 9: Conclusions", "_____no_output_____" ], [ "In this notebook we demonstrated an end to end cycle of data exploration, data processing using SageMaker processing, model development using an XGBoost Bring Your Own Container which we pushed to ECR, model training and offline inference using Batch Transform. Finally we logged our training metadata using SageMaker Experiments.\n\nYou can use this notebook to experiment with end to end data science experimentation using SageMaker Studio. \n\n\nRemember to delete your datasets in the Amazon S3 bucket you used for this notebook.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ] ]
cb56364f71dfbe4157bbbef0dde416bd94e55714
5,592
ipynb
Jupyter Notebook
100days/day 61 - hanoi tower II.ipynb
gopala-kr/ds-notebooks
bc35430ecdd851f2ceab8f2437eec4d77cb59423
[ "MIT" ]
13
2021-03-11T00:25:22.000Z
2022-03-19T00:19:23.000Z
100days/day 61 - hanoi tower II.ipynb
gopala-kr/ds-notebooks
bc35430ecdd851f2ceab8f2437eec4d77cb59423
[ "MIT" ]
160
2021-04-26T19:04:15.000Z
2022-03-26T20:18:37.000Z
100days/day 61 - hanoi tower II.ipynb
gopala-kr/ds-notebooks
bc35430ecdd851f2ceab8f2437eec4d77cb59423
[ "MIT" ]
12
2021-04-26T19:43:01.000Z
2022-01-31T08:36:29.000Z
24
76
0.381617
[ [ [ "## algorithm", "_____no_output_____" ] ], [ [ "def get_rods(move, towers, left, middle, right):\n if towers:\n if (move << 1) & (1 << towers):\n right.append(towers)\n get_rods(move, towers - 1, middle, left, right)\n else:\n left.append(towers)\n get_rods(move, towers - 1, left, right, middle)", "_____no_output_____" ], [ "def get_move(towers, left, middle, right):\n if not towers:\n return 0\n if not left or right and left[0] < right[0]:\n move = 1 << (towers - 1)\n return move + get_move(towers - 1, middle, left, right[1:])\n else:\n return get_move(towers - 1, left[1:], right, middle)", "_____no_output_____" ], [ "def hanoi(towers):\n for i in range(2 ** towers):\n rods = [], [], []\n get_rods(i, towers, *rods)\n move = get_move(towers, *rods)\n print('{:2} moves -- {} {} {}'.format(move, *rods))", "_____no_output_____" ] ], [ [ "## run", "_____no_output_____" ] ], [ [ "hanoi(2)", " 0 moves -- [2, 1] [] []\n 1 moves -- [2] [1] []\n 2 moves -- [] [1] [2]\n 3 moves -- [] [] [2, 1]\n" ], [ "hanoi(3)", " 0 moves -- [3, 2, 1] [] []\n 1 moves -- [3, 2] [] [1]\n 2 moves -- [3] [2] [1]\n 3 moves -- [3] [2, 1] []\n 4 moves -- [] [2, 1] [3]\n 5 moves -- [1] [2] [3]\n 6 moves -- [1] [] [3, 2]\n 7 moves -- [] [] [3, 2, 1]\n" ], [ "hanoi(4)", " 0 moves -- [4, 3, 2, 1] [] []\n 1 moves -- [4, 3, 2] [1] []\n 2 moves -- [4, 3] [1] [2]\n 3 moves -- [4, 3] [] [2, 1]\n 4 moves -- [4] [3] [2, 1]\n 5 moves -- [4, 1] [3] [2]\n 6 moves -- [4, 1] [3, 2] []\n 7 moves -- [4] [3, 2, 1] []\n 8 moves -- [] [3, 2, 1] [4]\n 9 moves -- [] [3, 2] [4, 1]\n10 moves -- [2] [3] [4, 1]\n11 moves -- [2, 1] [3] [4]\n12 moves -- [2, 1] [] [4, 3]\n13 moves -- [2] [1] [4, 3]\n14 moves -- [] [1] [4, 3, 2]\n15 moves -- [] [] [4, 3, 2, 1]\n" ], [ "hanoi(5)", " 0 moves -- [5, 4, 3, 2, 1] [] []\n 1 moves -- [5, 4, 3, 2] [] [1]\n 2 moves -- [5, 4, 3] [2] [1]\n 3 moves -- [5, 4, 3] [2, 1] []\n 4 moves -- [5, 4] [2, 1] [3]\n 5 moves -- [5, 4, 1] [2] [3]\n 6 moves -- [5, 4, 1] [] [3, 2]\n 7 moves -- [5, 4] [] [3, 2, 1]\n 8 moves -- [5] [4] [3, 2, 1]\n 9 moves -- [5] [4, 1] [3, 2]\n10 moves -- [5, 2] [4, 1] [3]\n11 moves -- [5, 2, 1] [4] [3]\n12 moves -- [5, 2, 1] [4, 3] []\n13 moves -- [5, 2] [4, 3] [1]\n14 moves -- [5] [4, 3, 2] [1]\n15 moves -- [5] [4, 3, 2, 1] []\n16 moves -- [] [4, 3, 2, 1] [5]\n17 moves -- [1] [4, 3, 2] [5]\n18 moves -- [1] [4, 3] [5, 2]\n19 moves -- [] [4, 3] [5, 2, 1]\n20 moves -- [3] [4] [5, 2, 1]\n21 moves -- [3] [4, 1] [5, 2]\n22 moves -- [3, 2] [4, 1] [5]\n23 moves -- [3, 2, 1] [4] [5]\n24 moves -- [3, 2, 1] [] [5, 4]\n25 moves -- [3, 2] [] [5, 4, 1]\n26 moves -- [3] [2] [5, 4, 1]\n27 moves -- [3] [2, 1] [5, 4]\n28 moves -- [] [2, 1] [5, 4, 3]\n29 moves -- [1] [2] [5, 4, 3]\n30 moves -- [1] [] [5, 4, 3, 2]\n31 moves -- [] [] [5, 4, 3, 2, 1]\n" ] ] ]
[ "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ] ]
cb5649c41d4a54145efda40025c0f82206ba56fb
6,352
ipynb
Jupyter Notebook
Interactive_Control.ipynb
razaulmustafa852/dashframework
e168fc989ea8b92deb5c47e7ba583352f98b1595
[ "MIT" ]
null
null
null
Interactive_Control.ipynb
razaulmustafa852/dashframework
e168fc989ea8b92deb5c47e7ba583352f98b1595
[ "MIT" ]
null
null
null
Interactive_Control.ipynb
razaulmustafa852/dashframework
e168fc989ea8b92deb5c47e7ba583352f98b1595
[ "MIT" ]
null
null
null
31.29064
211
0.496222
[ [ [ "import numpy as np\nimport pandas as pd\nimport seaborn as sns\nfrom scipy import stats\nimport matplotlib.pyplot as plt\nfrom ipywidgets import *\nimport warnings\nwarnings.simplefilter(action='ignore', category=Warning)\n%matplotlib inline\nfrom google.colab import drive", "_____no_output_____" ], [ "df = pd.read_csv(\"DEMOFINAL - Sheet1.csv\")\ndf = df.rename(columns={'rtt_qos': 'persegment_RTT', 'tp_qos': 'Throughput', 'p_qos': 'Packets'})", "_____no_output_____" ], [ "df.columns", "_____no_output_____" ], [ "def interactive_contol(Mobility, Column, Total_users, User_no , Algorithm, Target):\n t= Mobility\n c= Column\n u= User_no\n a=Algorithm\n tr= Target\n tu= Total_users\n \n if a=='Rate Based':\n case1= df[(df['algorithm_used']=='conventional')]\n case2= df[(df['algorithm_used']=='exponential')]\n a1='Conventional'\n a2='Exponential'\n elif a=='Buffer Based':\n case1= df[(df['algorithm_used']=='bba')]\n case2= df[(df['algorithm_used']=='logistic')]\n a1='BBA'\n a2='Logistic'\n else:\n case1= df[(df['algorithm_used']=='arbiter')]\n case2= df[(df['algorithm_used']=='elastic')]\n a1='Arbiter +'\n a2='Elastic'\n \n case1_final = case1[( case1['column']==c) & ( case1['type']==t) & ( case1['user_no']==u) & ( case1['total_users']==tu)]\n case2_final = case2[( case2['column']==c) & ( case2['type']==t) & ( case2['user_no']==u) & ( case2['total_users']==tu)]\n \n if c==8 and t=='driving':\n title = '0.5 - 3 Mbps';\n elif c==10 and t=='driving':\n title = '6 - 14 Mbps';\n elif c==1 and t=='driving':\n title = '38.26 - 10.33 Mbps';\n elif c==2 and t=='driving':\n title = '29.33 - 10.55 Mbps';\n elif c==4 and t=='static':\n title = '72.42 - 9 Mbps';\n elif c==5 and t=='static':\n title = '70 - 20 Mbps'; \n elif c==7 and t=='static':\n title = '4 - 7.6 Mbps';\n elif c==9 and t=='static':\n title = '0.5 - 6 Mbps';\n elif c==11 and t=='static':\n title = '8 - 57 Mbps'; \n else:\n title='Unknown Case'\n \n plt.style.use('classic')\n fig = plt.figure(figsize=(10,5))\n with plt.style.context('Solarize_Light2'):\n fig.set_facecolor('white')\n plt.rcParams['axes.facecolor'] = 'white'\n plt.plot(case1_final['intSeg'], case1_final[tr], label=a1)\n plt.plot(case2_final['intSeg'], case2_final[tr], label=a2, linestyle='--', color='orange')\n plt.title(title, fontsize=12)\n plt.xlabel('Segments (2 sec)', fontsize=12, color='black')\n plt.ylabel(tr, fontsize=12, color='black')\n plt.legend(loc='best',frameon=False)\n plt.grid(axis='y', c='#D3D3D3')\n plt.grid(axis='x', c='#D3D3D3')\n plt.tick_params(axis='x', colors='black')\n plt.tick_params(axis='y', colors='black')\n plt.show()\n \n\n", "_____no_output_____" ], [ "interact(interactive_contol, Mobility=['driving','static'], Column=[1,2,4,5,7,8,9,10,11],Total_users=[2,3], User_no=[1,2,3], Algorithm=['Rate Based','Hybrid', 'Buffer Based' ], Target=['Clae', 'Duanmu',\n 'Yin', 'Yu','P1203', 'persegment_RTT', 'Throughput', 'Packets','intArr','intDel', 'intSta', 'intDelRate',\n 'intActRate', 'intByteSize', 'floatBuf'])\n", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code" ] ]
cb565dc6e7d445368d6a6db47a2d701f437739e5
3,802
ipynb
Jupyter Notebook
demo.ipynb
datvo06/3ddfav2_modified
d9128c5ec7c2e3c7f517b5c7b61689a44d62c028
[ "MIT" ]
1
2021-02-24T08:14:59.000Z
2021-02-24T08:14:59.000Z
demo.ipynb
datvo06/3ddfav2_modified
d9128c5ec7c2e3c7f517b5c7b61689a44d62c028
[ "MIT" ]
null
null
null
demo.ipynb
datvo06/3ddfav2_modified
d9128c5ec7c2e3c7f517b5c7b61689a44d62c028
[ "MIT" ]
null
null
null
22.497041
86
0.565755
[ [ [ "## A simple demostration of how to run", "_____no_output_____" ] ], [ [ "# before import, make sure FaceBoxes and Sim3DR are built successfully, e.g.,\n# sh build.sh\n\nimport cv2\nimport yaml\n\nfrom FaceBoxes import FaceBoxes\nfrom TDDFA import TDDFA\nfrom utils.functions import draw_landmarks\nfrom utils.render import render\nfrom utils.depth import depth\n\nimport matplotlib.pyplot as plt", "_____no_output_____" ] ], [ [ "### Load configs", "_____no_output_____" ] ], [ [ "# load config\ncfg = yaml.load(open('configs/mb1_120x120.yml'), Loader=yaml.SafeLoader)\ntddfa = TDDFA(gpu_mode=False, **cfg)\n\n# Initialize FaceBoxes\nface_boxes = FaceBoxes()", "_____no_output_____" ], [ "# given an image path\nimg_fp = 'examples/inputs/emma.jpg'\nimg = cv2.imread(img_fp)\nplt.imshow(img[..., ::-1])", "_____no_output_____" ] ], [ [ "### Detect faces using FaceBoxes", "_____no_output_____" ] ], [ [ "# face detection\nboxes = face_boxes(img)\nprint(f'Detect {len(boxes)} faces')\nprint(boxes)", "_____no_output_____" ] ], [ [ "### Regressing 3DMM parameters, reconstruction and visualization", "_____no_output_____" ] ], [ [ "# regress 3DMM params\nparam_lst, roi_box_lst = tddfa(img, boxes)", "_____no_output_____" ], [ "# reconstruct vertices and visualizing sparse landmarks\ndense_flag = False\nver_lst = tddfa.recon_vers(param_lst, roi_box_lst, dense_flag=dense_flag)\ndraw_landmarks(img, ver_lst, dense_flag=dense_flag)", "_____no_output_____" ], [ "# reconstruct vertices and visualizing dense landmarks\ndense_flag = True\nver_lst = tddfa.recon_vers(param_lst, roi_box_lst, dense_flag=dense_flag)\ndraw_landmarks(img, ver_lst, dense_flag=dense_flag)", "_____no_output_____" ], [ "# reconstruct vertices and render\nver_lst = tddfa.recon_vers(param_lst, roi_box_lst, dense_flag=dense_flag)\nrender(img, ver_lst, alpha=0.6, show_flag=True);", "_____no_output_____" ], [ "# reconstruct vertices and render depth\nver_lst = tddfa.recon_vers(param_lst, roi_box_lst, dense_flag=dense_flag)\ndepth(img, ver_lst, show_flag=True);", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code" ] ]
cb5670276c272cf733f511c3b08cc1570426638f
32,506
ipynb
Jupyter Notebook
deeplearning/Operations+on+word+vectors+-+v2.ipynb
donutloop/machine_learning_examples
46192a57e2dd194925ae76d6bfb169cd2af142dd
[ "MIT" ]
1
2018-10-08T18:24:40.000Z
2018-10-08T18:24:40.000Z
deeplearning/Operations+on+word+vectors+-+v2.ipynb
donutloop/machine_learning_examples
46192a57e2dd194925ae76d6bfb169cd2af142dd
[ "MIT" ]
null
null
null
deeplearning/Operations+on+word+vectors+-+v2.ipynb
donutloop/machine_learning_examples
46192a57e2dd194925ae76d6bfb169cd2af142dd
[ "MIT" ]
1
2018-10-09T06:50:48.000Z
2018-10-09T06:50:48.000Z
39.401212
608
0.558881
[ [ [ "# Operations on word vectors\n\nWelcome to your first assignment of this week! \n\nBecause word embeddings are very computionally expensive to train, most ML practitioners will load a pre-trained set of embeddings. \n\n**After this assignment you will be able to:**\n\n- Load pre-trained word vectors, and measure similarity using cosine similarity\n- Use word embeddings to solve word analogy problems such as Man is to Woman as King is to ______. \n- Modify word embeddings to reduce their gender bias \n\nLet's get started! Run the following cell to load the packages you will need.", "_____no_output_____" ] ], [ [ "import numpy as np\nfrom w2v_utils import *", "Using TensorFlow backend.\n" ] ], [ [ "Next, lets load the word vectors. For this assignment, we will use 50-dimensional GloVe vectors to represent words. Run the following cell to load the `word_to_vec_map`. ", "_____no_output_____" ] ], [ [ "words, word_to_vec_map = read_glove_vecs('data/glove.6B.50d.txt')", "_____no_output_____" ] ], [ [ "You've loaded:\n- `words`: set of words in the vocabulary.\n- `word_to_vec_map`: dictionary mapping words to their GloVe vector representation.\n\nYou've seen that one-hot vectors do not do a good job cpaturing what words are similar. GloVe vectors provide much more useful information about the meaning of individual words. Lets now see how you can use GloVe vectors to decide how similar two words are. \n\n", "_____no_output_____" ], [ "# 1 - Cosine similarity\n\nTo measure how similar two words are, we need a way to measure the degree of similarity between two embedding vectors for the two words. Given two vectors $u$ and $v$, cosine similarity is defined as follows: \n\n$$\\text{CosineSimilarity(u, v)} = \\frac {u . v} {||u||_2 ||v||_2} = cos(\\theta) \\tag{1}$$\n\nwhere $u.v$ is the dot product (or inner product) of two vectors, $||u||_2$ is the norm (or length) of the vector $u$, and $\\theta$ is the angle between $u$ and $v$. This similarity depends on the angle between $u$ and $v$. If $u$ and $v$ are very similar, their cosine similarity will be close to 1; if they are dissimilar, the cosine similarity will take a smaller value. \n\n<img src=\"images/cosine_sim.png\" style=\"width:800px;height:250px;\">\n<caption><center> **Figure 1**: The cosine of the angle between two vectors is a measure of how similar they are</center></caption>\n\n**Exercise**: Implement the function `cosine_similarity()` to evaluate similarity between word vectors.\n\n**Reminder**: The norm of $u$ is defined as $ ||u||_2 = \\sqrt{\\sum_{i=1}^{n} u_i^2}$", "_____no_output_____" ] ], [ [ "# GRADED FUNCTION: cosine_similarity\n\ndef cosine_similarity(u, v):\n \"\"\"\n Cosine similarity reflects the degree of similariy between u and v\n \n Arguments:\n u -- a word vector of shape (n,) \n v -- a word vector of shape (n,)\n\n Returns:\n cosine_similarity -- the cosine similarity between u and v defined by the formula above.\n \"\"\"\n \n distance = 0.0\n \n ### START CODE HERE ###\n # Compute the dot product between u and v (≈1 line)\n dot = np.dot(u,v)\n # Compute the L2 norm of u (≈1 line)\n norm_u = np.linalg.norm(u)\n \n # Compute the L2 norm of v (≈1 line)\n norm_v = np.linalg.norm(v)\n # Compute the cosine similarity defined by formula (1) (≈1 line)\n cosine_similarity = dot / (norm_u * norm_v)\n ### END CODE HERE ###\n \n return cosine_similarity", "_____no_output_____" ], [ "father = word_to_vec_map[\"father\"]\nmother = word_to_vec_map[\"mother\"]\nball = word_to_vec_map[\"ball\"]\ncrocodile = word_to_vec_map[\"crocodile\"]\nfrance = word_to_vec_map[\"france\"]\nitaly = word_to_vec_map[\"italy\"]\nparis = word_to_vec_map[\"paris\"]\nrome = word_to_vec_map[\"rome\"]\n\nprint(\"cosine_similarity(father, mother) = \", cosine_similarity(father, mother))\nprint(\"cosine_similarity(ball, crocodile) = \",cosine_similarity(ball, crocodile))\nprint(\"cosine_similarity(france - paris, rome - italy) = \",cosine_similarity(france - paris, rome - italy))", "cosine_similarity(father, mother) = 0.890903844289\ncosine_similarity(ball, crocodile) = 0.274392462614\ncosine_similarity(france - paris, rome - italy) = -0.675147930817\n" ] ], [ [ "**Expected Output**:\n\n<table>\n <tr>\n <td>\n **cosine_similarity(father, mother)** =\n </td>\n <td>\n 0.890903844289\n </td>\n </tr>\n <tr>\n <td>\n **cosine_similarity(ball, crocodile)** =\n </td>\n <td>\n 0.274392462614\n </td>\n </tr>\n <tr>\n <td>\n **cosine_similarity(france - paris, rome - italy)** =\n </td>\n <td>\n -0.675147930817\n </td>\n </tr>\n</table>", "_____no_output_____" ], [ "After you get the correct expected output, please feel free to modify the inputs and measure the cosine similarity between other pairs of words! Playing around the cosine similarity of other inputs will give you a better sense of how word vectors behave. ", "_____no_output_____" ], [ "## 2 - Word analogy task\n\nIn the word analogy task, we complete the sentence <font color='brown'>\"*a* is to *b* as *c* is to **____**\"</font>. An example is <font color='brown'> '*man* is to *woman* as *king* is to *queen*' </font>. In detail, we are trying to find a word *d*, such that the associated word vectors $e_a, e_b, e_c, e_d$ are related in the following manner: $e_b - e_a \\approx e_d - e_c$. We will measure the similarity between $e_b - e_a$ and $e_d - e_c$ using cosine similarity. \n\n**Exercise**: Complete the code below to be able to perform word analogies!", "_____no_output_____" ] ], [ [ "# GRADED FUNCTION: complete_analogy\n\ndef complete_analogy(word_a, word_b, word_c, word_to_vec_map):\n \"\"\"\n Performs the word analogy task as explained above: a is to b as c is to ____. \n \n Arguments:\n word_a -- a word, string\n word_b -- a word, string\n word_c -- a word, string\n word_to_vec_map -- dictionary that maps words to their corresponding vectors. \n \n Returns:\n best_word -- the word such that v_b - v_a is close to v_best_word - v_c, as measured by cosine similarity\n \"\"\"\n \n # convert words to lower case\n word_a, word_b, word_c = word_a.lower(), word_b.lower(), word_c.lower()\n \n ### START CODE HERE ###\n # Get the word embeddings v_a, v_b and v_c (≈1-3 lines)\n e_a, e_b, e_c = word_to_vec_map[word_a], word_to_vec_map[word_b], word_to_vec_map[word_c]\n ### END CODE HERE ###\n \n words = word_to_vec_map.keys()\n max_cosine_sim = -100 # Initialize max_cosine_sim to a large negative number\n best_word = None # Initialize best_word with None, it will help keep track of the word to output\n\n # loop over the whole word vector set\n for w in words: \n # to avoid best_word being one of the input words, pass on them.\n if w in [word_a, word_b, word_c] :\n continue\n \n ### START CODE HERE ###\n # Compute cosine similarity between the vector (e_b - e_a) and the vector ((w's vector representation) - e_c) (≈1 line)\n cosine_sim = cosine_similarity(e_b-e_a, word_to_vec_map[w]-e_c)\n \n # If the cosine_sim is more than the max_cosine_sim seen so far,\n # then: set the new max_cosine_sim to the current cosine_sim and the best_word to the current word (≈3 lines)\n if cosine_sim > max_cosine_sim:\n max_cosine_sim = cosine_sim\n best_word = w\n ### END CODE HERE ###\n \n return best_word", "_____no_output_____" ] ], [ [ "Run the cell below to test your code, this may take 1-2 minutes.", "_____no_output_____" ] ], [ [ "triads_to_try = [('italy', 'italian', 'spain'), ('india', 'delhi', 'japan'), ('man', 'woman', 'boy'), ('small', 'smaller', 'large')]\nfor triad in triads_to_try:\n print ('{} -> {} :: {} -> {}'.format( *triad, complete_analogy(*triad,word_to_vec_map)))", "italy -> italian :: spain -> spanish\nindia -> delhi :: japan -> tokyo\nman -> woman :: boy -> girl\nsmall -> smaller :: large -> larger\n" ] ], [ [ "**Expected Output**:\n\n<table>\n <tr>\n <td>\n **italy -> italian** ::\n </td>\n <td>\n spain -> spanish\n </td>\n </tr>\n <tr>\n <td>\n **india -> delhi** ::\n </td>\n <td>\n japan -> tokyo\n </td>\n </tr>\n <tr>\n <td>\n **man -> woman ** ::\n </td>\n <td>\n boy -> girl\n </td>\n </tr>\n <tr>\n <td>\n **small -> smaller ** ::\n </td>\n <td>\n large -> larger\n </td>\n </tr>\n</table>", "_____no_output_____" ], [ "Once you get the correct expected output, please feel free to modify the input cells above to test your own analogies. Try to find some other analogy pairs that do work, but also find some where the algorithm doesn't give the right answer: For example, you can try small->smaller as big->?. ", "_____no_output_____" ], [ "### Congratulations!\n\nYou've come to the end of this assignment. Here are the main points you should remember:\n\n- Cosine similarity a good way to compare similarity between pairs of word vectors. (Though L2 distance works too.) \n- For NLP applications, using a pre-trained set of word vectors from the internet is often a good way to get started. \n\nEven though you have finished the graded portions, we recommend you take a look too at the rest of this notebook. \n\nCongratulations on finishing the graded portions of this notebook! \n", "_____no_output_____" ], [ "## 3 - Debiasing word vectors (OPTIONAL/UNGRADED) ", "_____no_output_____" ], [ "In the following exercise, you will examine gender biases that can be reflected in a word embedding, and explore algorithms for reducing the bias. In addition to learning about the topic of debiasing, this exercise will also help hone your intuition about what word vectors are doing. This section involves a bit of linear algebra, though you can probably complete it even without being expert in linear algebra, and we encourage you to give it a shot. This portion of the notebook is optional and is not graded. \n\nLets first see how the GloVe word embeddings relate to gender. You will first compute a vector $g = e_{woman}-e_{man}$, where $e_{woman}$ represents the word vector corresponding to the word *woman*, and $e_{man}$ corresponds to the word vector corresponding to the word *man*. The resulting vector $g$ roughly encodes the concept of \"gender\". (You might get a more accurate representation if you compute $g_1 = e_{mother}-e_{father}$, $g_2 = e_{girl}-e_{boy}$, etc. and average over them. But just using $e_{woman}-e_{man}$ will give good enough results for now.) \n", "_____no_output_____" ] ], [ [ "g = word_to_vec_map['woman'] - word_to_vec_map['man']\nprint(g)", "[-0.087144 0.2182 -0.40986 -0.03922 -0.1032 0.94165\n -0.06042 0.32988 0.46144 -0.35962 0.31102 -0.86824\n 0.96006 0.01073 0.24337 0.08193 -1.02722 -0.21122\n 0.695044 -0.00222 0.29106 0.5053 -0.099454 0.40445\n 0.30181 0.1355 -0.0606 -0.07131 -0.19245 -0.06115\n -0.3204 0.07165 -0.13337 -0.25068714 -0.14293 -0.224957\n -0.149 0.048882 0.12191 -0.27362 -0.165476 -0.20426\n 0.54376 -0.271425 -0.10245 -0.32108 0.2516 -0.33455\n -0.04371 0.01258 ]\n" ] ], [ [ "Now, you will consider the cosine similarity of different words with $g$. Consider what a positive value of similarity means vs a negative cosine similarity. ", "_____no_output_____" ] ], [ [ "print ('List of names and their similarities with constructed vector:')\n\n# girls and boys name\nname_list = ['john', 'marie', 'sophie', 'ronaldo', 'priya', 'rahul', 'danielle', 'reza', 'katy', 'yasmin']\n\nfor w in name_list:\n print (w, cosine_similarity(word_to_vec_map[w], g))", "List of names and their similarities with constructed vector:\njohn -0.23163356146\nmarie 0.315597935396\nsophie 0.318687898594\nronaldo -0.312447968503\npriya 0.17632041839\nrahul -0.169154710392\ndanielle 0.243932992163\nreza -0.079304296722\nkaty 0.283106865957\nyasmin 0.233138577679\n" ] ], [ [ "As you can see, female first names tend to have a positive cosine similarity with our constructed vector $g$, while male first names tend to have a negative cosine similarity. This is not suprising, and the result seems acceptable. \n\nBut let's try with some other words.", "_____no_output_____" ] ], [ [ "print('Other words and their similarities:')\nword_list = ['lipstick', 'guns', 'science', 'arts', 'literature', 'warrior','doctor', 'tree', 'receptionist', \n 'technology', 'fashion', 'teacher', 'engineer', 'pilot', 'computer', 'singer']\nfor w in word_list:\n print (w, cosine_similarity(word_to_vec_map[w], g))", "Other words and their similarities:\nlipstick 0.276919162564\nguns -0.18884855679\nscience -0.0608290654093\narts 0.00818931238588\nliterature 0.0647250443346\nwarrior -0.209201646411\ndoctor 0.118952894109\ntree -0.0708939917548\nreceptionist 0.330779417506\ntechnology -0.131937324476\nfashion 0.0356389462577\nteacher 0.179209234318\nengineer -0.0803928049452\npilot 0.00107644989919\ncomputer -0.103303588739\nsinger 0.185005181365\n" ] ], [ [ "Do you notice anything surprising? It is astonishing how these results reflect certain unhealthy gender stereotypes. For example, \"computer\" is closer to \"man\" while \"literature\" is closer to \"woman\". Ouch! \n\nWe'll see below how to reduce the bias of these vectors, using an algorithm due to [Boliukbasi et al., 2016](https://arxiv.org/abs/1607.06520). Note that some word pairs such as \"actor\"/\"actress\" or \"grandmother\"/\"grandfather\" should remain gender specific, while other words such as \"receptionist\" or \"technology\" should be neutralized, i.e. not be gender-related. You will have to treat these two type of words differently when debiasing.\n\n### 3.1 - Neutralize bias for non-gender specific words \n\nThe figure below should help you visualize what neutralizing does. If you're using a 50-dimensional word embedding, the 50 dimensional space can be split into two parts: The bias-direction $g$, and the remaining 49 dimensions, which we'll call $g_{\\perp}$. In linear algebra, we say that the 49 dimensional $g_{\\perp}$ is perpendicular (or \"othogonal\") to $g$, meaning it is at 90 degrees to $g$. The neutralization step takes a vector such as $e_{receptionist}$ and zeros out the component in the direction of $g$, giving us $e_{receptionist}^{debiased}$. \n\nEven though $g_{\\perp}$ is 49 dimensional, given the limitations of what we can draw on a screen, we illustrate it using a 1 dimensional axis below. \n\n<img src=\"images/neutral.png\" style=\"width:800px;height:300px;\">\n<caption><center> **Figure 2**: The word vector for \"receptionist\" represented before and after applying the neutralize operation. </center></caption>\n\n**Exercise**: Implement `neutralize()` to remove the bias of words such as \"receptionist\" or \"scientist\". Given an input embedding $e$, you can use the following formulas to compute $e^{debiased}$: \n\n$$e^{bias\\_component} = \\frac{e \\cdot g}{||g||_2^2} * g\\tag{2}$$\n$$e^{debiased} = e - e^{bias\\_component}\\tag{3}$$\n\nIf you are an expert in linear algebra, you may recognize $e^{bias\\_component}$ as the projection of $e$ onto the direction $g$. If you're not an expert in linear algebra, don't worry about this.\n\n<!-- \n**Reminder**: a vector $u$ can be split into two parts: its projection over a vector-axis $v_B$ and its projection over the axis orthogonal to $v$:\n$$u = u_B + u_{\\perp}$$\nwhere : $u_B = $ and $ u_{\\perp} = u - u_B $\n!--> ", "_____no_output_____" ] ], [ [ "def neutralize(word, g, word_to_vec_map):\n \"\"\"\n Removes the bias of \"word\" by projecting it on the space orthogonal to the bias axis. \n This function ensures that gender neutral words are zero in the gender subspace.\n \n Arguments:\n word -- string indicating the word to debias\n g -- numpy-array of shape (50,), corresponding to the bias axis (such as gender)\n word_to_vec_map -- dictionary mapping words to their corresponding vectors.\n \n Returns:\n e_debiased -- neutralized word vector representation of the input \"word\"\n \"\"\"\n \n ### START CODE HERE ###\n # Select word vector representation of \"word\". Use word_to_vec_map. (≈ 1 line)\n e = word_to_vec_map[word]\n \n # Compute e_biascomponent using the formula give above. (≈ 1 line)\n e_biascomponent = np.multiply((np.dot(e, g) / np.linalg.norm(g)**2), g) \n \n # Neutralize e by substracting e_biascomponent from it \n # e_debiased should be equal to its orthogonal projection. (≈ 1 line)\n e_debiased = e - e_biascomponent\n ### END CODE HERE ###\n \n return e_debiased", "_____no_output_____" ], [ "e = \"receptionist\"\nprint(\"cosine similarity between \" + e + \" and g, before neutralizing: \", cosine_similarity(word_to_vec_map[\"receptionist\"], g))\n\ne_debiased = neutralize(\"receptionist\", g, word_to_vec_map)\nprint(\"cosine similarity between \" + e + \" and g, after neutralizing: \", cosine_similarity(e_debiased, g))", "cosine similarity between receptionist and g, before neutralizing: 0.330779417506\ncosine similarity between receptionist and g, after neutralizing: -3.26732746085e-17\n" ] ], [ [ "**Expected Output**: The second result is essentially 0, up to numerical roundof (on the order of $10^{-17}$).\n\n\n<table>\n <tr>\n <td>\n **cosine similarity between receptionist and g, before neutralizing:** :\n </td>\n <td>\n 0.330779417506\n </td>\n </tr>\n <tr>\n <td>\n **cosine similarity between receptionist and g, after neutralizing:** :\n </td>\n <td>\n -3.26732746085e-17\n </tr>\n</table>", "_____no_output_____" ], [ "### 3.2 - Equalization algorithm for gender-specific words\n\nNext, lets see how debiasing can also be applied to word pairs such as \"actress\" and \"actor.\" Equalization is applied to pairs of words that you might want to have differ only through the gender property. As a concrete example, suppose that \"actress\" is closer to \"babysit\" than \"actor.\" By applying neutralizing to \"babysit\" we can reduce the gender-stereotype associated with babysitting. But this still does not guarantee that \"actor\" and \"actress\" are equidistant from \"babysit.\" The equalization algorithm takes care of this. \n\nThe key idea behind equalization is to make sure that a particular pair of words are equi-distant from the 49-dimensional $g_\\perp$. The equalization step also ensures that the two equalized steps are now the same distance from $e_{receptionist}^{debiased}$, or from any other work that has been neutralized. In pictures, this is how equalization works: \n\n<img src=\"images/equalize10.png\" style=\"width:800px;height:400px;\">\n\n\nThe derivation of the linear algebra to do this is a bit more complex. (See Bolukbasi et al., 2016 for details.) But the key equations are: \n\n$$ \\mu = \\frac{e_{w1} + e_{w2}}{2}\\tag{4}$$ \n\n$$ \\mu_{B} = \\frac {\\mu \\cdot \\text{bias_axis}}{||\\text{bias_axis}||_2^2} *\\text{bias_axis}\n\\tag{5}$$ \n\n$$\\mu_{\\perp} = \\mu - \\mu_{B} \\tag{6}$$\n\n$$ e_{w1B} = \\frac {e_{w1} \\cdot \\text{bias_axis}}{||\\text{bias_axis}||_2^2} *\\text{bias_axis}\n\\tag{7}$$ \n$$ e_{w2B} = \\frac {e_{w2} \\cdot \\text{bias_axis}}{||\\text{bias_axis}||_2^2} *\\text{bias_axis}\n\\tag{8}$$\n\n\n$$e_{w1B}^{corrected} = \\sqrt{ |{1 - ||\\mu_{\\perp} ||^2_2} |} * \\frac{e_{\\text{w1B}} - \\mu_B} {|(e_{w1} - \\mu_{\\perp}) - \\mu_B)|} \\tag{9}$$\n\n\n$$e_{w2B}^{corrected} = \\sqrt{ |{1 - ||\\mu_{\\perp} ||^2_2} |} * \\frac{e_{\\text{w2B}} - \\mu_B} {|(e_{w2} - \\mu_{\\perp}) - \\mu_B)|} \\tag{10}$$\n\n$$e_1 = e_{w1B}^{corrected} + \\mu_{\\perp} \\tag{11}$$\n$$e_2 = e_{w2B}^{corrected} + \\mu_{\\perp} \\tag{12}$$\n\n\n**Exercise**: Implement the function below. Use the equations above to get the final equalized version of the pair of words. Good luck!", "_____no_output_____" ] ], [ [ "def equalize(pair, bias_axis, word_to_vec_map):\n \"\"\"\n Debias gender specific words by following the equalize method described in the figure above.\n \n Arguments:\n pair -- pair of strings of gender specific words to debias, e.g. (\"actress\", \"actor\") \n bias_axis -- numpy-array of shape (50,), vector corresponding to the bias axis, e.g. gender\n word_to_vec_map -- dictionary mapping words to their corresponding vectors\n \n Returns\n e_1 -- word vector corresponding to the first word\n e_2 -- word vector corresponding to the second word\n \"\"\"\n \n ### START CODE HERE ###\n # Step 1: Select word vector representation of \"word\". Use word_to_vec_map. (≈ 2 lines)\n w1, w2 = None\n e_w1, e_w2 = None\n \n # Step 2: Compute the mean of e_w1 and e_w2 (≈ 1 line)\n mu = None\n\n # Step 3: Compute the projections of mu over the bias axis and the orthogonal axis (≈ 2 lines)\n mu_B = None\n mu_orth = None\n\n # Step 4: Use equations (7) and (8) to compute e_w1B and e_w2B (≈2 lines)\n e_w1B = None\n e_w2B = None\n \n # Step 5: Adjust the Bias part of e_w1B and e_w2B using the formulas (9) and (10) given above (≈2 lines)\n corrected_e_w1B = None\n corrected_e_w2B = None\n\n # Step 6: Debias by equalizing e1 and e2 to the sum of their corrected projections (≈2 lines)\n e1 = None\n e2 = None\n \n ### END CODE HERE ###\n \n return e1, e2", "_____no_output_____" ], [ "print(\"cosine similarities before equalizing:\")\nprint(\"cosine_similarity(word_to_vec_map[\\\"man\\\"], gender) = \", cosine_similarity(word_to_vec_map[\"man\"], g))\nprint(\"cosine_similarity(word_to_vec_map[\\\"woman\\\"], gender) = \", cosine_similarity(word_to_vec_map[\"woman\"], g))\nprint()\ne1, e2 = equalize((\"man\", \"woman\"), g, word_to_vec_map)\nprint(\"cosine similarities after equalizing:\")\nprint(\"cosine_similarity(e1, gender) = \", cosine_similarity(e1, g))\nprint(\"cosine_similarity(e2, gender) = \", cosine_similarity(e2, g))", "_____no_output_____" ] ], [ [ "**Expected Output**:\n\ncosine similarities before equalizing:\n<table>\n <tr>\n <td>\n **cosine_similarity(word_to_vec_map[\"man\"], gender)** =\n </td>\n <td>\n -0.117110957653\n </td>\n </tr>\n <tr>\n <td>\n **cosine_similarity(word_to_vec_map[\"woman\"], gender)** =\n </td>\n <td>\n 0.356666188463\n </td>\n </tr>\n</table>\n\ncosine similarities after equalizing:\n<table>\n <tr>\n <td>\n **cosine_similarity(u1, gender)** =\n </td>\n <td>\n -0.700436428931\n </td>\n </tr>\n <tr>\n <td>\n **cosine_similarity(u2, gender)** =\n </td>\n <td>\n 0.700436428931\n </td>\n </tr>\n</table>", "_____no_output_____" ], [ "Please feel free to play with the input words in the cell above, to apply equalization to other pairs of words. \n\nThese debiasing algorithms are very helpful for reducing bias, but are not perfect and do not eliminate all traces of bias. For example, one weakness of this implementation was that the bias direction $g$ was defined using only the pair of words _woman_ and _man_. As discussed earlier, if $g$ were defined by computing $g_1 = e_{woman} - e_{man}$; $g_2 = e_{mother} - e_{father}$; $g_3 = e_{girl} - e_{boy}$; and so on and averaging over them, you would obtain a better estimate of the \"gender\" dimension in the 50 dimensional word embedding space. Feel free to play with such variants as well. \n ", "_____no_output_____" ], [ "### Congratulations\n\nYou have come to the end of this notebook, and have seen a lot of the ways that word vectors can be used as well as modified. \n\nCongratulations on finishing this notebook! \n", "_____no_output_____" ], [ "**References**:\n- The debiasing algorithm is from Bolukbasi et al., 2016, [Man is to Computer Programmer as Woman is to\nHomemaker? Debiasing Word Embeddings](https://papers.nips.cc/paper/6228-man-is-to-computer-programmer-as-woman-is-to-homemaker-debiasing-word-embeddings.pdf)\n- The GloVe word embeddings were due to Jeffrey Pennington, Richard Socher, and Christopher D. Manning. (https://nlp.stanford.edu/projects/glove/)\n", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown", "markdown" ] ]
cb5674b77f13a18984262eabb7294621f68c187d
1,349
ipynb
Jupyter Notebook
Lesson2/Exercise17.ipynb
TrainingByPackt/Introduction-to-Natural-Language-Processing
7b3197f979c1c3def5553d70afb72844b468d7f9
[ "MIT" ]
35
2019-02-16T17:05:07.000Z
2022-03-20T20:26:41.000Z
Lesson2/Exercise17.ipynb
TrainingByPackt/Introduction-to-Natural-Language-Processing
7b3197f979c1c3def5553d70afb72844b468d7f9
[ "MIT" ]
4
2019-11-20T05:39:40.000Z
2020-12-23T07:05:10.000Z
Lesson2/Exercise17.ipynb
TrainingByPackt/Introduction-to-Natural-Language-Processing
7b3197f979c1c3def5553d70afb72844b468d7f9
[ "MIT" ]
44
2019-03-04T07:15:28.000Z
2022-03-06T02:13:45.000Z
21.078125
95
0.547813
[ [ [ "# Exercise 6 : Porter Stemmer", "_____no_output_____" ], [ "Apply Porter stemmer on the following sentence:\n“Before eating, it would be nice to sanitize your hands with a sanitizer.”", "_____no_output_____" ] ], [ [ "sentence = \"Before eating, it would be nice to sanitize your hands with a sanitizer\"\nfrom nltk.stem.porter import *\nps_stemmer = PorterStemmer()\n' '.join([ps_stemmer.stem(wd) for wd in sentence.split()])", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ] ]
cb567b8a3794cd9a9213b5178f51712d3531fa1e
468,282
ipynb
Jupyter Notebook
causal-inference-for-the-brave-and-true/16-Regression-Discontinuity-Design.ipynb
keesterbrugge/python-causality-handbook
4075476ee99422ed04ef3b2f8cabc982698f96b5
[ "MIT" ]
1
2021-12-21T12:59:17.000Z
2021-12-21T12:59:17.000Z
causal-inference-for-the-brave-and-true/16-Regression-Discontinuity-Design.ipynb
HAlicia/python-causality-handbook
d2614cb1fbf8ae621d08be0e71df39b7a0d9e524
[ "MIT" ]
null
null
null
causal-inference-for-the-brave-and-true/16-Regression-Discontinuity-Design.ipynb
HAlicia/python-causality-handbook
d2614cb1fbf8ae621d08be0e71df39b7a0d9e524
[ "MIT" ]
null
null
null
488.812109
78,064
0.930768
[ [ [ "# 16 - Regression Discontinuity Design\n\n\nWe don't stop to think about it much, but it is impressive how smooth nature is. You can't grow a tree without first getting a bud, you can't teleport from one place to another, a wound takes its time to heal. Even in the social realm, smoothness seems to be the norm. You can't grow a business in one day, consistency and hard work are required to build wealth and it takes years before you learn how linear regression works. Under normal circumstances, nature is very cohesive and doesn't jump around much.\n\n\n> When the intelligent and animal souls are held together in one embrace, they can be kept from separating.\n\n\\- Tao Te Ching, Lao Tzu.\n\nWhich means that **when we do see jumps and spikes, they are probably artificial** and often man-made situations. These events are usually accompanied by counterfactuals to the normal way of things: if a weird thing happens, this gives us some insight into what would have happened if nature was to work in a different way. Exploring these artificial jumps is at the core of Regression Discontinuity Design.\n\n![img](./data/img/rdd/smooth.png)\n\nThe basic setup goes like this. Imagine that you have a treatment variable $T$ and potential outcomes $Y_0$ and $Y_1$. The treatment T is a discontinuous function of an observed running variable $R$ such that\n\n$\nD_i = \\mathcal{1}\\{R_i>c\\}\n$\n\nIn other words, this is saying that treatment is zero when $R$ is below a threshold $c$ and one otherwise. This means that we get to observe $Y_1$ when $R>c$ and $Y_0$ when $R<c$. To wrap our head around this, think about the potential outcomes as 2 functions that we can't observe entirely. Both $Y_0(R)$ and $Y_1(R)$ are there, we just can't see that. The threshold acts as a switch that allows us to see one or the other of those function, but never both, much like in the image below:\n\n![img](./data/img/rdd/rdd.png)\n\nThe idea of regression discontinuity is to compare the outcome just above and just below the threshold to identify the treatment effect at the threshold. This is called a **sharp RD** design, since the probability of getting the treatment jumps from 0 to 1 at the threshold, but we could also think about a **fuzzy RD** design, where the probability also jumps, but is a less dramatic manner.\n\n## Is Alcohol Killing You?\n\nA very relevant public policy question is what should be the minimal drinking age. Most countries, Brazil included, set it to be 18 year, but in the US (most states) it is currently 21. So, is it the case that the US is being overly prudent and that they should lower their minimal drinking age? Or is it the case that other countries should make their legal drinking age higher? \n\nOne way to look at this question is from a [mortality rate perspective (Carpenter and Dobkin, 2009)](https://www.aeaweb.org/articles?id=10.1257/app.1.1.164). From the public policy standpoint, one could argue that we should lower the mortality rate as much as possible. If alcohol consumption increases the mortality rate by a lot, we should avoid lowering the minimum drinking age. This would be consistent with the objective of lowering deaths caused by alcohol consumption.\n\nTo estimate the impacts of alcohol on death, we could use the fact that legal drinking age imposes a discontinuity on nature. In the US, those just under 21 years don't drink (or drink much less) while those just older than 21 do drink. This means that the probability of drinking jumps at 21 years and that is something we can explore with an RDD.", "_____no_output_____" ] ], [ [ "import warnings\nwarnings.filterwarnings('ignore')\n\nimport pandas as pd\nimport numpy as np\nfrom matplotlib import style\nfrom matplotlib import pyplot as plt\nimport seaborn as sns\nimport statsmodels.formula.api as smf\n\n%matplotlib inline\n\nstyle.use(\"fivethirtyeight\")", "_____no_output_____" ] ], [ [ "To do so we can grab some mortality data aggregated by age. Each row is the average age of a group of people and the average mortality by all causes (`all`), by moving vehicle accident (`mva`) and by suicide (`suicide`). ", "_____no_output_____" ] ], [ [ "drinking = pd.read_csv(\"./data/drinking.csv\")\ndrinking.head()[[\"agecell\", \"all\", \"mva\", \"suicide\"]]", "_____no_output_____" ] ], [ [ "Just to aid visibility (and for another important reason we will see later) we will centralize the running variable `agecell` at the threshold 21.", "_____no_output_____" ] ], [ [ "drinking[\"agecell\"] -= 21", "_____no_output_____" ] ], [ [ "If we plot the multiple outcome variables (`all`, `mva`, `suicide`) with the runing variable on the x axis, we get some visual cue about some sort of jump in mortality as we cross the legal drinking age.", "_____no_output_____" ] ], [ [ "plt.figure(figsize=(8,8))\nax = plt.subplot(3,1,1)\ndrinking.plot.scatter(x=\"agecell\", y=\"all\", ax=ax)\nplt.title(\"Death Cause by Age (Centered at 0)\")\n\nax = plt.subplot(3,1,2, sharex=ax)\ndrinking.plot.scatter(x=\"agecell\", y=\"mva\", ax=ax)\n\nax = plt.subplot(3,1,3, sharex=ax)\ndrinking.plot.scatter(x=\"agecell\", y=\"suicide\", ax=ax);\n", "_____no_output_____" ] ], [ [ "There are some cues, but we need more than that. What exactly is the effect of drinking on mortality at the threshold? And what is the standard error on that estimate?\n\n## RDD Estimation\n\nThe key assumption that RDD relies on is the smoothness of the potential outcome at the threshold. Formally, the limits of the potential outcomes as the running variable approaches the threshold from the right and from the left should be the same.\n\n$$\n\\lim_{r \\to c^-} E[Y_{ti}|R_i=r] = \\lim_{r \\to c^+} E[Y_{ti}|R_i=r]\n$$\n\nIf this holds true, we can find the causal effect at the threshold\n\n$$\n\\begin{align}\n\\lim_{r \\to c^+} E[Y_{ti}|R_i=r] - \\lim_{r \\to c^-} E[Y_{ti}|R_i=r]=&\\lim_{r \\to c^+} E[Y_{1i}|R_i=r] - \\lim_{r \\to c^-} E[Y_{0i}|R_i=r] \\\\\n=& E[Y_{1i}|R_i=r] - E[Y_{0i}|R_i=r] \\\\\n=& E[Y_{1i} - Y_{0i}|R_i=r]\n\\end{align}\n$$\n\nThis is, in its own way, a sort of Local Average Treatment Effect (LATE), since we can only know it at the threshold. In this setting, we can think of RDD as a local randomized trial. For those at the threshold, the treatment could have gone either way and, by chance, some people fell below the threshold, and some people fell above. In our example, at the same point in time, some people are just above 21 years and some people are just below 21. What determines this is if someone was born some days later or not, which is pretty random. For this reason, RDD provides a very compelling causal story. It is not the golden standard of RCT, but it is close. \n\nNow, to estimate the treatment effect at the threshold, all we need to do is estimate both of the limits in the formula above and compare them. The simplest way to do that is by running a linear regression\n\n![img](./data/img/rdd/ols.png)\n\nTo make it work, we interact a dummy for being above the threshold with the running variable\n\n$\ny_i = \\beta_0 + \\beta_1 r_i + \\beta_2 \\mathcal{1}\\{r_i>c\\} + \\beta_3 \\mathcal{1}\\{r_i>c\\} r_i\n$\n\nEssentially, this is the same as fitting a linear regression above the threshold and another below it. The parameter $\\beta_0$ is the intercept of the regression below the threshold and $\\beta_0+\\beta_2$ is the intercept for the regression above the threshold.\n\nHere is where the trick of centering the running variable at the threshold comes into play. After this pre-processing step, the threshold becomes zero. This causes the intercept $\\beta_0$ to be the predicted value at the threshold, for the regression below it. In other words, $\\beta_0=\\lim_{r \\to c^-} E[Y_{ti}|R_i=r]$. By the same reasoning, $\\beta_0+\\beta_2$ is the limit of the outcome from above. Wich means, that\n\n$\n\\lim_{r \\to c^+} E[Y_{ti}|R_i=r] - \\lim_{r \\to c^-} E[Y_{ti}|R_i=r]=\\beta_2=E[ATE|R=c]\n$\n\nHere is what this looks like in code for the case where we want to estimate the effect of alcohol consumption on death by all causes at 21 years.", "_____no_output_____" ] ], [ [ "rdd_df = drinking.assign(threshold=(drinking[\"agecell\"] > 0).astype(int))\n\nmodel = smf.wls(\"all~agecell*threshold\", rdd_df).fit()\n\nmodel.summary().tables[1]", "_____no_output_____" ] ], [ [ "This model is telling us that mortality increases by 7.6627 points with the consumption of alcohol. Another way of putting this is that alcohol increases the chance of death by all causes by 8% ((7.6627+93.6184)/93.6184). Notice that this also gives us standard errors for our causal effect estimate. In this case, the effect is statistically significant, since the p-value is below 0.01.\n\nIf we want to verify this model visually, we can show the predicted values on the data that we have. You can see that it is as though we had 2 regression models: one for those above the threshold and one for below it.", "_____no_output_____" ] ], [ [ "ax = drinking.plot.scatter(x=\"agecell\", y=\"all\", color=\"C0\")\ndrinking.assign(predictions=model.fittedvalues).plot(x=\"agecell\", y=\"predictions\", ax=ax, color=\"C1\")\nplt.title(\"Regression Discontinuity\");", "_____no_output_____" ] ], [ [ "If we do the same for the other causes, this is what we get.", "_____no_output_____" ] ], [ [ "plt.figure(figsize=(8,8))\n\nfor p, cause in enumerate([\"all\", \"mva\", \"suicide\"], 1):\n ax = plt.subplot(3,1,p)\n drinking.plot.scatter(x=\"agecell\", y=cause, ax=ax)\n m = smf.wls(f\"{cause}~agecell*threshold\", rdd_df).fit()\n ate_pct = 100*((m.params[\"threshold\"] + m.params[\"Intercept\"])/m.params[\"Intercept\"] - 1)\n drinking.assign(predictions=m.fittedvalues).plot(x=\"agecell\", y=\"predictions\", ax=ax, color=\"C1\")\n plt.title(f\"Impact of Alcohol on Death: {np.round(ate_pct, 2)}%\")\n\nplt.tight_layout()", "_____no_output_____" ] ], [ [ "RDD is telling us that alcohol increases the chance of death by suicide and car accidents by 15%, which is a pretty significant amount. These results are compelling arguments to not lower the drinking age, if we want to minimize mortality rates.\n\n### Kernel Weighting\n\nRegression Discontinuity relies heavily on the extrapolations properties of linear regression. Since we are looking at the values at the beginning and end of 2 regression lines, we better get those limits right. What can happen is that regression might focus too much on fitting the other data points at the cost of a poor fit at the threshold. If this happens, we might get the wrong measure of the treatment effect.\n\nOne way to solve this is to give higher weights for the points that are closer to the threshold. There are many ways to do this, but a popular one is to reweight the samples with the **triangular kernel**\n\n$\nK(R, c, h) = \\mathcal{1}\\{|R-c| \\leq h\\} * \\bigg(1-\\frac{|R-c|}{h}\\bigg)\n$\n\nThe first part of this kernel is an indicator function to whether we are close to the threshold. How close? This is determined by a bandwidth parameter $h$. The second part of this kernel is a weighting function. As we move away from the threshold, the weights get smaller and smaller. These weights are divided by the bandwidth. If the bandwidth is large, the weights get smaller at a slower rate. If the bandwidth is small, the weights quickly go to zero. \n\nTo make it easier to understand, here is what the weights look like for this kernel applied to our problem. I've set the bandwidth to be 1 here, meaning we will only consider data from people that are no older than 22 years and no younger than 20 years.", "_____no_output_____" ] ], [ [ "def kernel(R, c, h):\n indicator = (np.abs(R-c) <= h).astype(float)\n return indicator * (1 - np.abs(R-c)/h)", "_____no_output_____" ], [ "plt.plot(drinking[\"agecell\"], kernel(drinking[\"agecell\"], c=0, h=1))\nplt.xlabel(\"agecell\")\nplt.ylabel(\"Weight\")\nplt.title(\"Kernel Weight by Age\");", "_____no_output_____" ] ], [ [ "If we apply these weights to our original problem, the impact of alcohol gets bigger, at least for all causes. It jumps from 7.6627 to 9.7004. The result remains very significant. Also, notice that I'm using `wls` instead of `ols`", "_____no_output_____" ] ], [ [ "model = smf.wls(\"all~agecell*threshold\", rdd_df,\n weights=kernel(drinking[\"agecell\"], c=0, h=1)).fit()\n\nmodel.summary().tables[1]", "_____no_output_____" ], [ "ax = drinking.plot.scatter(x=\"agecell\", y=\"all\", color=\"C0\")\ndrinking.assign(predictions=model.fittedvalues).plot(x=\"agecell\", y=\"predictions\", ax=ax, color=\"C1\")\nplt.title(\"Regression Discontinuity (Local Regression)\");", "_____no_output_____" ] ], [ [ "And here is what it looks like for the other causes of death. Notice how the regression on the right is more negatively sloped since it disconsiders the right most points. ", "_____no_output_____" ] ], [ [ "plt.figure(figsize=(8,8))\nweights = kernel(drinking[\"agecell\"], c=0, h=1)\n\nfor p, cause in enumerate([\"all\", \"mva\", \"suicide\"], 1):\n ax = plt.subplot(3,1,p)\n drinking.plot.scatter(x=\"agecell\", y=cause, ax=ax)\n m = smf.wls(f\"{cause}~agecell*threshold\", rdd_df, weights=weights).fit()\n ate_pct = 100*((m.params[\"threshold\"] + m.params[\"Intercept\"])/m.params[\"Intercept\"] - 1)\n drinking.assign(predictions=m.fittedvalues).plot(x=\"agecell\", y=\"predictions\", ax=ax, color=\"C1\")\n plt.title(f\"Impact of Alcohol on Death: {np.round(ate_pct, 2)}%\")\n\nplt.tight_layout()", "_____no_output_____" ] ], [ [ "With the exception of suicide, it looks like adding the kernel weight made the negative impact on alcohol bigger. Once again, if we want to minimize the death rate, we should NOT recommend lowering the legal drinking age, since there is a clear impact of alcohol on the death rates.\n\nThis simple case covers what happens when regression discontinuity design works perfectly. Next, we will see some diagnostics that we should run in order to check how much we can trust RDD and talk about a topic that is very dear to our heart: the effect of education on earnings.\n\n## Sheepskin Effect and Fuzzy RDD\n\nWhen it comes to the effect of education on earnings, there are two major views in economics. The first one is the widely known argument that education increases human capital, increasing productivity and thus, earnings. In this view, education actually changes you for the better. Another view is that education is simply a signaling mechanism. It just puts you through all these hard tests and academic tasks. If you can make it, it signals to the market that you are a good employee. In this way, education doesn't make you more productive. It only tells the market how productive you have always been. What matters here is the diploma. If you have it, you will be paid more. We refer to this as the **sheepskin effect**, since diplomas were printed in sheepskin in the past.\n\nTo test this hypothesis, [Clark and Martorell](https://faculty.smu.edu/millimet/classes/eco7321/papers/clark%20martorell%202014.pdf) used regression discontinuity to measure the effect of graduating 12th grade on earnings. In order to do that, they had to think about some running variable where students that fall above it graduate and those who fall below it, don't. They found such data in the Texas education system.\n\nIn order to graduate in Texas, one has to pass an exam. Testing starts at 10th grade and students can do it multiple times, but eventually, they face a last chance exam at the end of 12th grade. The idea was to get data from students who took those last chance exams and compare those that had barely failed it to those that barely passed it. These students will have very similar human capital, but different signaling credentials. Namely, those that barely passed it, will receive a diploma. ", "_____no_output_____" ] ], [ [ "sheepskin = pd.read_csv(\"./data/sheepskin.csv\")[[\"avgearnings\", \"minscore\", \"receivehsd\", \"n\"]]\nsheepskin.head()", "_____no_output_____" ] ], [ [ "Once again, this data is grouped by the running variable. It contains not only the running variable (minscore, already centered at zero) and the outcome (avgearnings), but it also has the probability of receiving a diploma in that score cell and the size of the call (n). So, for example, out of the 12 students in the cell -30 below the score threshold, only 5 were able to get the diploma (12 * 0,416). \n\nThis means that there is some slippage in the treatment assignment. Some students that are below the passing threshold managed to get the diploma anyway. Here, the regression discontinuity is **fuzzy**, rather than sharp. Notice how the probability of getting the diploma doesn't jump from zero to one at the threshold. But it does jump from something like 50% to 90%.", "_____no_output_____" ] ], [ [ "sheepskin.plot.scatter(x=\"minscore\", y=\"receivehsd\", figsize=(10,5))\nplt.xlabel(\"Test Scores Relative to Cut off\")\nplt.ylabel(\"Fraction Receiving Diplomas\")\nplt.title(\"Last-chance Exams\");", "_____no_output_____" ] ], [ [ "We can think of fuzzy RD as a sort of non compliance. Passing the threshold should make everyone receive the diploma, but some students, the never takers, don’t get it. Likewise, being below the threshold should prevent you from getting a diploma, but some students, the always takers, manage to get it anyway. \n\nJust like when we have the potential outcome, we have the potential treatment status in this situation. $T_1$ is the treatment everyone would have received had they been above the threshold. $T_0$ is the treatment everyone would have received had they been below the threshold. As you've might have noticed, we can think of the **threshold as an Instrumental Variable**. Just as in IV, if we naively estimate the treatment effect, it will be biased towards zero. \n\n![img](./data/img/rdd/rdd_fuzzy.png)\n\nThe probability of treatment being less than one, even above the threshold, makes the outcome we observe less than the true potential outcome $Y_1$. By the same token, the outcome we observe below the threshold is higher than the true potential outcome $Y_0$. This makes it look like the treatment effect at the threshold is smaller than it actually is and we will have to use IV techniques to correct for that.\n\nJust like when we've assumed smoothness on the potential outcome, we now assume it for the potential treatment. Also, we need to assume monotonicity, just like in IV. In case you don't remember, it states that $T_{i1}>T_{i0} \\ \\forall i$. This means that crossing the threshold from the left to the right only increases your chance of getting a diploma (or that there are no defiers). With these 2 assumptions, we have a Wald Estimator for LATE.\n\n$$\n\\dfrac{\\lim_{r \\to c^+} E[Y_i|R_i=r] - \\lim_{r \\to c^-} E[Y_i|R_i=r]}{\\lim_{r \\to c^+} E[T_i|R_i=r] - \\lim_{r \\to c^-} E[T_i|R_i=r]} = E[Y_{1i} - Y_{0i} | T_{1i} > T_{0i}, R_i=c]\n$$\n\nNotice how this is a local estimate in two senses. First, it is local because it only gives the treatment effect at the threshold $c$. This is the RD locality. Second, it is local because it only estimates the treatment effect for the compliers. This is the IV locality.\n\nTo estimate this, we will use 2 linear regression. The numerator can be estimated just like we've done before. To get the denominator, we simply replace the outcome with the treatment. But first, let's talk about a sanity check we need to run to make sure we can trust our RDD estimates.\n\n### The McCrary Test\n\nOne thing that could break our RDD argument is if people can manipulate where they stand at the threshold. In the sheepskin example this could happen if students just below the threshold found a way around the system to increase their test score by just a bit. Another example is when you need to be below a certain income level to get a government benefit. Some families might lower their income on purpose, just to be just eligible for the program.\n\nIn these sorts of situations, we tend to see a phenomenon called bunching on the density of the running variable. This means that we will have a lot of entities just above or just below the threshold. To check for that, we can plot the density function of the running variable and see if there are any spikes around the threshold. For our case, the density is given by the `n` column in our data.", "_____no_output_____" ] ], [ [ "plt.figure(figsize=(8,8))\n\nax = plt.subplot(2,1,1)\nsheepskin.plot.bar(x=\"minscore\", y=\"n\", ax=ax)\nplt.title(\"McCrary Test\")\nplt.ylabel(\"Smoothness at the Threshold\")\n\nax = plt.subplot(2,1,2, sharex=ax)\nsheepskin.replace({1877:1977, 1874:2277}).plot.bar(x=\"minscore\", y=\"n\", ax=ax)\nplt.xlabel(\"Test Scores Relative to Cut off\")\nplt.ylabel(\"Spike at the Threshold\");", "_____no_output_____" ] ], [ [ "The first plot shows how our data density looks like. As we can see, there are no spikes around the threshold, meaning there is no bunching. Students are not manipulating where they fall on the threshold. Just for illustrative purposes, the second plot shows what bunching would look like if students could manipulate where they fall on the threshold. We would see a spike in the density for the cells just above the threshold, since many students would be on that cell, barely passing the exam. \n\nGetting this out of the way, we can go back to estimate the sheepskin effect. As I've said before, the numerator of the Wald estimator can be estimated just like we did in the Sharp RD. Here, we will use as weight the kernel with a bandwidth of 15. Since we also have the cell size, we will multiply the kernel by the sample size to get a final weight for the cell. ", "_____no_output_____" ] ], [ [ "sheepsking_rdd = sheepskin.assign(threshold=(sheepskin[\"minscore\"]>0).astype(int))\nmodel = smf.wls(\"avgearnings~minscore*threshold\",\n sheepsking_rdd,\n weights=kernel(sheepsking_rdd[\"minscore\"], c=0, h=15)*sheepsking_rdd[\"n\"]).fit()\n\nmodel.summary().tables[1]", "_____no_output_____" ] ], [ [ "This is telling us that the effect of a diploma is -97.7571, but this is not statistically significant (P-value of 0.5). If we plot these results, we get a very continuous line at the threshold. More educated people indeed make more money, but there isn't a jump at the point where they receive the 12th grade diploma. This is an argument in favor of the view that says that education increases earnings by making people more productive, rather than being just a signal to the marker. In other words, there is no sheepskin effect. ", "_____no_output_____" ] ], [ [ "ax = sheepskin.plot.scatter(x=\"minscore\", y=\"avgearnings\", color=\"C0\")\nsheepskin.assign(predictions=model.fittedvalues).plot(x=\"minscore\", y=\"predictions\", ax=ax, color=\"C1\", figsize=(8,5))\nplt.xlabel(\"Test Scores Relative to Cutoff\")\nplt.ylabel(\"Average Earnings\")\nplt.title(\"Last-chance Exams\");", "_____no_output_____" ] ], [ [ "However, as we know from the way non compliance bias works, this result is biased towards zero. To correct for that, we need to scale it by the first stage and get the Wald estimator. Unfortunately, there isn't a good Python implementation for this, so we will have to do it manually and use bootstrap to get the standard errors.\n\nThe code below runs the numerator of the Wald estimator just like we did before and also constructs the denominator by replacing the target variable with the treatment variable `receivehsd`. The final step just divides the numerator by the denominator. ", "_____no_output_____" ] ], [ [ "def wald_rdd(data):\n weights=kernel(data[\"minscore\"], c=0, h=15)*data[\"n\"]\n denominator = smf.wls(\"receivehsd~minscore*threshold\", data, weights=weights).fit()\n numerator = smf.wls(\"avgearnings~minscore*threshold\", data, weights=weights).fit()\n return numerator.params[\"threshold\"]/denominator.params[\"threshold\"]", "_____no_output_____" ], [ "from joblib import Parallel, delayed \n\nnp.random.seed(45)\nbootstrap_sample = 1000\nates = Parallel(n_jobs=4)(delayed(wald_rdd)(sheepsking_rdd.sample(frac=1, replace=True))\n for _ in range(bootstrap_sample))\nates = np.array(ates)", "_____no_output_____" ] ], [ [ "With the bootstrap samples, we can plot the distribution of ATEs and see where the 95% confidence interval is.", "_____no_output_____" ] ], [ [ "sns.distplot(ates, kde=False)\nplt.vlines(np.percentile(ates, 2.5), 0, 100, linestyles=\"dotted\")\nplt.vlines(np.percentile(ates, 97.5), 0, 100, linestyles=\"dotted\", label=\"95% CI\")\nplt.title(\"ATE Bootstrap Distribution\")\nplt.xlim([-10000, 10000])\nplt.legend();", "_____no_output_____" ] ], [ [ "As you can see, even when we scale the effect by the first stage, it is still not statistically different from zero. This means that education doesn't increase earnings by a simple sheepskin effect, but rather by increasing one's productivity.\n\n## Key Ideas\n\nWe learned how to take advantage of artificial discontinuities to estimate causal effects. The idea is that we will have some artificial threshold that makes the probability of treatment jump. One example that we saw was how age makes the probability of drinking jump at 21 years. We could use that to estimate the impact of drinking on mortality rate. We use the fact that very close to the threshold, we have something close to a randomized trial. Entities very close to the threshold could have gone either way and what determines where they've landed is essentially random. With this, we can compare those just above and just below to get the treatment effect. We saw how we could do that with weighted linear regression using a kernel and how this even gave us, for free, standard errors for our ATE.\n\nThen, we look at what would happen in the fuzzy RD design, where we have non compliance. We saw how we could approach the situation much like we did with IV.\n\n\n## References\n\nI like to think of this entire book as a tribute to Joshua Angrist, Alberto Abadie and Christopher Walters for their amazing Econometrics class. Most of the ideas here are taken from their classes at the American Economic Association. Watching them is what is keeping me sane during this tough year of 2020.\n* [Cross-Section Econometrics](https://www.aeaweb.org/conference/cont-ed/2017-webcasts)\n* [Mastering Mostly Harmless Econometrics](https://www.aeaweb.org/conference/cont-ed/2020-webcasts)\n\nI'll also like to reference the amazing books from Angrist. They have shown me that Econometrics, or 'Metrics as they call it, is not only extremely useful but also profoundly fun.\n\n* [Mostly Harmless Econometrics](https://www.mostlyharmlesseconometrics.com/)\n* [Mastering 'Metrics](https://www.masteringmetrics.com/)\n\nOther important reference is Miguel Hernan and Jamie Robins' book. It has been my trustworthy companion in the most thorny causal questions I had to answer.\n\n* [Causal Inference Book](https://www.hsph.harvard.edu/miguel-hernan/causal-inference-book/)\n\n![img](./data/img/poetry.png)\n\n## Contribute\n\nCausal Inference for the Brave and True is an open-source material on causal inference, the statistics of science. It uses only free software, based in Python. Its goal is to be accessible monetarily and intellectually.\nIf you found this book valuable and you want to support it, please go to [Patreon](https://www.patreon.com/causal_inference_for_the_brave_and_true). If you are not ready to contribute financially, you can also help by fixing typos, suggesting edits or giving feedback on passages you didn't understand. Just go to the book's repository and [open an issue](https://github.com/matheusfacure/python-causality-handbook/issues). Finally, if you liked this content, please share it with others who might find it useful and give it a [star on GitHub](https://github.com/matheusfacure/python-causality-handbook/stargazers).", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
cb56853c1dee4b6e569c5875704fa870009b3745
3,343
ipynb
Jupyter Notebook
models/ODE_Mimi.ipynb
lwt852/MangroveConservation
bf303f7e7bb7972e858ff6fa2b6fdd87c7c63569
[ "MIT" ]
null
null
null
models/ODE_Mimi.ipynb
lwt852/MangroveConservation
bf303f7e7bb7972e858ff6fa2b6fdd87c7c63569
[ "MIT" ]
1
2020-04-08T15:15:45.000Z
2020-04-08T15:15:45.000Z
models/ODE_Mimi.ipynb
lwt852/MangroveConservation
bf303f7e7bb7972e858ff6fa2b6fdd87c7c63569
[ "MIT" ]
6
2020-04-06T12:59:34.000Z
2020-04-20T19:49:42.000Z
49.161765
514
0.690099
[ [ [ "# <center>Using Ordinary Differential Equations (ODEs) in development studies</center>\n\n<center>by Mimi Gong</center>", "_____no_output_____" ], [ "---\n## Definition\n\nAn ordinary differential equation is an expression which relates function to the ordinary derivatives.\nOne the most common differential equations used in physical application is Newton’s law: F = ma. Acceleration is the second derivative of a displacement function x(t).\n\n## Applications\n\nPopulation model is a common application of ordinary differential equations in my field:conservation studies, and has been widely studied in ecology to model the population growth of many species including species in mangrove forests. More broadly, population models has been widely adopted in development studies, which is to depict development changes over time by ODEs. \nIn a dynamic system, the 'dynamics' is characterized by constant change of progress.These developments can happen in individual, between two individuals, or among a group of people(such as a family system). Moreover, the development can be measured on short or long time scales, depending on the phenomenon of interest. They can occurs over long spans of time(decades), short time spans(seconds or less) or time scales in between.\n\nTo quantitatively measure the 'dynamics', we need to be specific on how the system changes and how these interrelationships are defined. Therefore, mathematicaly form to the nature of the changes, such as ODEs can be assigned to achieve the goal.Theoretically, we conceptualize that developmental changes occur in a lawful form, and are initiated, modelrated or regulated by forces within and outside of an individual. This is where and how differential equations are applied to dynamical system theory. \n\nIn brief, a differential equation is a function to describe how a variable changes over a period of time relative to itself and/or other parameters. This is in contrast to traditional growth modeling, where the growth function describes the overall shape(or functional form) of the growth curve. ", "_____no_output_____" ], [ "---\n# References\n\n1. Price, G. J., Louys, J., Faith, J. T., Lorenzen, E., & Westaway, M. C. (2018). Big data little help in megafauna mysteries. Nature, 558(7708), 23–25. https://doi.org/10.1038/d41586-018-05330-7\n2. Introductory ODEs | Quantdev. (n.d.). Retrieved March 29, 2020, from https://quantdev.ssri.psu.edu/tutorials/introductory-odes\n3.Luo, H. (n.d.). Population Modeling by Differential Equations. 31.\n", "_____no_output_____" ] ] ]
[ "markdown" ]
[ [ "markdown", "markdown", "markdown" ] ]
cb56886ce0a43063c2be977dc35ca59ec26f1d90
44,555
ipynb
Jupyter Notebook
FluModel.ipynb
ToJestKrzysio/AgentFlu
ad1d2d247a5d2ff25bf3237d420f38eb9d189718
[ "MIT" ]
null
null
null
FluModel.ipynb
ToJestKrzysio/AgentFlu
ad1d2d247a5d2ff25bf3237d420f38eb9d189718
[ "MIT" ]
null
null
null
FluModel.ipynb
ToJestKrzysio/AgentFlu
ad1d2d247a5d2ff25bf3237d420f38eb9d189718
[ "MIT" ]
null
null
null
138.369565
32,719
0.853238
[ [ [ "import numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib.animation as animation\nimport random\nimport math\nfrom itertools import combinations", "_____no_output_____" ], [ "POPULATION_SIZE = 1000\nINITIAL_SICK = 1\nINITIAL_HEALTHY = POPULATION_SIZE - INITIAL_SICK\n\nSICK_COLOR = (1, 0, 0)\nHEALTHY_COLOR = (0, 1, 0)\nRECOVERED_COLOR = (0.7, 0, 0.7)", "_____no_output_____" ], [ "class Person:\n x: float\n y: float\n sick: bool\n recovered: bool\n susceptibility: float\n color: tuple[int, int, int]\n HEALTHY_COLOR = (0, 1, 0)\n SICK_COLOR = (1, 0, 0)\n RECOVERED_COLOR = (0.7, 0, 0.7)\n\n def __init__(self, **kwargs):\n self.x = random.random()\n self.y = random.random()\n self.sick = False\n self.recovered = False\n self.color = HEALTHY_COLOR\n self.mobility = random.random()\n self.susceptibility = random.random()\n self.recovered_susceptibility = 0\n for key, value in kwargs.items():\n setattr(self, key, value)\n\n def get_sick(self):\n \"\"\" Become sick, update corresponding fields. \"\"\"\n self.sick = True\n self.color = SICK_COLOR\n self.susceptibility = 0\n\n def get_color(self):\n \"\"\" Get representation of a person health as a corresponding color. \"\"\"\n return self.color\n\n def get_position(self) -> tuple[float, float]:\n \"\"\" Return current person location. \"\"\"\n return self.x, self.y\n\n def recover(self):\n \"\"\" Recover from sickness, update corresponding fields. \"\"\"\n self.sick = False\n self.recovered = True\n self.color = RECOVERED_COLOR\n self.susceptibility = self.recovered_susceptibility\n\n def move(self):\n \"\"\" Move from previous position to a new one. \"\"\"\n move_x, move_y = self.get_move_values()\n self.x += move_x\n self.y += move_y\n self.apply_boundary_conditions()\n\n def apply_boundary_conditions(self):\n \"\"\" Check if person did not leave the space of the simulation, if so modifies its position. \"\"\"\n if self.x > 1:\n self.x -= 1\n if self.x < 0:\n self.x += 1\n if self.y > 1:\n self.y -= 1\n if self.y < 0:\n self.y += 1\n\n def get_distance_to_travel(self) -> float:\n \"\"\" Get distance person will move at the given time step. \"\"\"\n return random.random() * self.mobility\n\n @staticmethod\n def get_move_coefficients():\n \"\"\" Generate direction in which person will be moved at the given time step. \"\"\"\n angle = math.radians(random.random() * 360)\n return math.cos(angle), math.sin(angle)\n\n def get_move_values(self):\n distance_to_move = self.get_distance_to_travel()\n x_coefficient, y_coefficient = self.get_move_coefficients()\n return distance_to_move * x_coefficient, distance_to_move * y_coefficient\n \n def update(self):\n \"\"\" Update status related to disease development. \"\"\"\n pass\n \n def can_get_infected(self):\n \"\"\" Returns information if given agent can get infected. \"\"\"\n return not self.sick\n \n def can_infect(self):\n \"\"\" Returns information if given agent can infect others. \"\"\"\n return self.sick\n \n def get_infected(self):\n if self.susceptibility >= random.random():\n self.get_sick()", "_____no_output_____" ], [ "class Simulation:\n color = tuple[float, float, float]\n population_time_step = tuple[float, float, color]\n\n population_size: int\n initial_sick: int\n population: list[Person]\n frames: list[population_time_step]\n\n fig: plt.Figure\n ax: plt.Axes\n animation: animation\n\n def __init__(self, population_size: int, initial_sick: int = 1, number_of_frames: int = 30, person_kwargs: dict = {}):\n self.frames = []\n self.initial_sick = initial_sick\n self.population_size = population_size\n self.population = [Person(**person_kwargs) for x in range(population_size)]\n self.contact_radious = 0.2\n self.squared_contanct_radious = self.contact_radious**2\n for idx in range(initial_sick):\n self.population[idx].get_sick()\n self.generate_frames(number_of_frames)\n \n def find_all_interactions(self):\n \"\"\" Finds all interactions between 2 agents, ignores order in which agents appear. \"\"\"\n contacts = set()\n for person_1, person_2 in combinations(self.population, 2):\n distance = self.calcaulate_squared_euclidean_distance(person_1.get_position(), person_2.get_position())\n if distance <= self.squared_contanct_radious:\n contacts.add((person_1, person_2))\n return contacts\n \n @staticmethod\n def find_possible_infections(contacts: set[Person, Person]):\n \"\"\" Finds all interactions in which one Person is sick. \"\"\"\n # TODO introdcution of personal protection for sick (if prob > value yield else pass) saved by individual protection case\n for person_1, person_2 in contacts:\n if person_1.can_get_infected() and person_2.can_infect():\n yield person_1\n elif person_1.can_infect() and person_2.can_get_infected():\n yield person_2\n \n @staticmethod\n def calcaulate_squared_euclidean_distance(first: tuple[float, float], second: tuple[float, float]) -> float:\n return (first[0] - second[0]) **2 + (first[1] - second[1])**2\n \n def generate_frames(self, number_of_frames: int) -> None:\n \"\"\" Generates given number of frames of the simulation. \"\"\"\n self.save_frame(*self.get_population_position())\n for frame in range(number_of_frames):\n self.update_population()\n self.save_frame(*self.get_population_position()) \n \n def update_population(self) -> None:\n \"\"\" Updates position and healt status for each person in the population. \"\"\"\n for person in self.population:\n person.move()\n interactions = self.find_all_interactions()\n possible_infections = set(self.find_possible_infections(interactions))\n for idx, person in enumerate(possible_infections):\n person.get_infected()\n \n def get_population_position(self) -> population_time_step:\n \"\"\" Get current x, y coordinates of each person and appropriate color depending on the health status. \"\"\"\n population_description = ((*person.get_position(), person.get_color()) for person in\n self.population)\n return tuple(zip(*population_description))\n\n def save_frame(self, x: list[float], y: list[float], c: list[color]) -> None:\n \"\"\" Adds a single frame representing current state of the simulation to the record. \"\"\"\n self.frames.append((x, y, c))\n\n def get_frame(self, frame_index: int = -1) -> population_time_step:\n \"\"\" Get selected frame of the simulation. \"\"\"\n if frame_index not in range(len(self.frames)):\n frame_index = -1\n return self.frames[frame_index]\n\n def __iter__(self):\n return iter(self.frames)\n ", "_____no_output_____" ], [ "simulation = Simulation(100, number_of_frames=50)", "_____no_output_____" ], [ "%matplotlib widget\n\nfig = plt.figure(figsize=(6,6))\nax = fig.add_axes([0, 0, 1, 1])\n\nx, y, c = simulation.frames[0]\nscatter = ax.scatter(x=x, y=y, c=c)\n\n\ndef update(frame):\n x, y, c = frame\n scatter.set_offsets(list(zip(x,y)))\n # scatter.set_array(np.array(c)) # Z nieznanego powodu ustawianie koloru tym cudem znika punkty xddd\n \n\nanim = animation.FuncAnimation(fig, update, iter(simulation), interval=200)\nplt.show()\n ", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code" ] ]
cb568cc351b2dd4d4bd393bdb4fabb6099b99425
188,283
ipynb
Jupyter Notebook
machine_learning/gan/vanilla_gan/tf_vanilla_gan/tf_vanilla_gan_local.ipynb
ryangillard/artificial_intelligence
f7c21af221f366b075d351deeeb00a1b266ac3e3
[ "Apache-2.0" ]
4
2019-07-04T05:15:59.000Z
2020-06-29T19:34:33.000Z
machine_learning/gan/vanilla_gan/tf_vanilla_gan/tf_vanilla_gan_local.ipynb
ryangillard/artificial_intelligence
f7c21af221f366b075d351deeeb00a1b266ac3e3
[ "Apache-2.0" ]
null
null
null
machine_learning/gan/vanilla_gan/tf_vanilla_gan/tf_vanilla_gan_local.ipynb
ryangillard/artificial_intelligence
f7c21af221f366b075d351deeeb00a1b266ac3e3
[ "Apache-2.0" ]
1
2019-05-23T16:06:51.000Z
2019-05-23T16:06:51.000Z
60.599614
12,328
0.641423
[ [ [ "# Import libraries and modules\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport os\nimport tensorflow as tf\nprint(np.__version__)\nprint(tf.__version__)\nnp.set_printoptions(threshold=np.inf)", "1.18.5\n2.2.0-dlenv\n" ] ], [ [ "# Local Development", "_____no_output_____" ], [ "## Arguments", "_____no_output_____" ] ], [ [ "arguments = {}\n# File arguments.\narguments[\"train_file_pattern\"] = \"gs://machine-learning-1234-bucket/gan/data/mnist/train*.tfrecord\"\narguments[\"eval_file_pattern\"] = \"gs://machine-learning-1234-bucket/gan/data/mnist/test*.tfrecord\"\narguments[\"output_dir\"] = \"gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model\"\n\n# Training parameters.\narguments[\"train_batch_size\"] = 32\narguments[\"train_steps\"] = 56250\narguments[\"save_summary_steps\"] = 100\narguments[\"save_checkpoints_steps\"] = 10000\narguments[\"keep_checkpoint_max\"] = 10\narguments[\"input_fn_autotune\"] = False\n\n# Eval parameters.\narguments[\"eval_batch_size\"] = 32\narguments[\"eval_steps\"] = 100\narguments[\"start_delay_secs\"] = 60000\narguments[\"throttle_secs\"] = 60000\n\n# Image parameters.\narguments[\"height\"] = 28\narguments[\"width\"] = 28\narguments[\"depth\"] = 1\n\n# Generator parameters.\narguments[\"latent_size\"] = 512\narguments[\"generator_hidden_units\"] = [256, 512, 1024]\narguments[\"generator_leaky_relu_alpha\"] = 0.2\narguments[\"generator_final_activation\"] = \"tanh\"\narguments[\"generator_l1_regularization_scale\"] = 0.\narguments[\"generator_l2_regularization_scale\"] = 0.\narguments[\"generator_optimizer\"] = \"Adam\"\narguments[\"generator_learning_rate\"] = 0.0002\narguments[\"generator_adam_beta1\"] = 0.5\narguments[\"generator_adam_beta2\"] = 0.999\narguments[\"generator_adam_epsilon\"] = 1e-8\narguments[\"generator_clip_gradients\"] = None\narguments[\"generator_train_steps\"] = 1\n\n# Discriminator hyperparameters.\narguments[\"discriminator_hidden_units\"] = [1024, 512, 256]\narguments[\"discriminator_leaky_relu_alpha\"] = 0.2\narguments[\"discriminator_l1_regularization_scale\"] = 0.\narguments[\"discriminator_l2_regularization_scale\"] = 0.\narguments[\"discriminator_optimizer\"] = \"Adam\"\narguments[\"discriminator_learning_rate\"] = 0.0002\narguments[\"discriminator_adam_beta1\"] = 0.5\narguments[\"discriminator_adam_beta2\"] = 0.999\narguments[\"discriminator_adam_epsilon\"] = 1e-8\narguments[\"discriminator_clip_gradients\"] = None\narguments[\"discriminator_train_steps\"] = 1\narguments[\"label_smoothing\"] = 0.9\n", "_____no_output_____" ] ], [ [ "## print_object.py", "_____no_output_____" ] ], [ [ "def print_obj(function_name, object_name, object_value):\n \"\"\"Prints enclosing function, object name, and object value.\n\n Args:\n function_name: str, name of function.\n object_name: str, name of object.\n object_value: object, value of passed object.\n \"\"\"\n# pass\n print(\"{}: {} = {}\".format(function_name, object_name, object_value))\n", "_____no_output_____" ] ], [ [ "## input.py", "_____no_output_____" ] ], [ [ "def preprocess_image(image):\n \"\"\"Preprocess image tensor.\n\n Args:\n image: tensor, input image with shape\n [cur_batch_size, height, width, depth].\n\n Returns:\n Preprocessed image tensor with shape\n [cur_batch_size, height, width, depth].\n \"\"\"\n func_name = \"preprocess_image\"\n # Convert from [0, 255] -> [-1.0, 1.0] floats.\n image = tf.cast(x=image, dtype=tf.float32) * (2. / 255) - 1.0\n print_obj(func_name, \"image\", image)\n\n return image\n\n\ndef decode_example(protos, params):\n \"\"\"Decodes TFRecord file into tensors.\n\n Given protobufs, decode into image and label tensors.\n\n Args:\n protos: protobufs from TFRecord file.\n params: dict, user passed parameters.\n\n Returns:\n Image and label tensors.\n \"\"\"\n func_name = \"decode_example\"\n # Create feature schema map for protos.\n features = {\n \"image_raw\": tf.io.FixedLenFeature(shape=[], dtype=tf.string),\n \"label\": tf.io.FixedLenFeature(shape=[], dtype=tf.int64)\n }\n\n # Parse features from tf.Example.\n parsed_features = tf.io.parse_single_example(\n serialized=protos, features=features\n )\n print_obj(\"\\n\" + func_name, \"features\", features)\n\n # Convert from a scalar string tensor (whose single string has\n # length height * width * depth) to a uint8 tensor with shape\n # [height * width * depth].\n image = tf.io.decode_raw(\n input_bytes=parsed_features[\"image_raw\"], out_type=tf.uint8\n )\n print_obj(func_name, \"image\", image)\n\n # Reshape flattened image back into normal dimensions.\n image = tf.reshape(\n tensor=image,\n shape=[params[\"height\"], params[\"width\"], params[\"depth\"]]\n )\n print_obj(func_name, \"image\", image)\n\n # Preprocess image.\n image = preprocess_image(image=image)\n print_obj(func_name, \"image\", image)\n\n # Convert label from a scalar uint8 tensor to an int32 scalar.\n label = tf.cast(x=parsed_features[\"label\"], dtype=tf.int32)\n print_obj(func_name, \"label\", label)\n\n return {\"image\": image}, label\n\n\ndef read_dataset(filename, mode, batch_size, params):\n \"\"\"Reads TF Record data using tf.data, doing necessary preprocessing.\n\n Given filename, mode, batch size, and other parameters, read TF Record\n dataset using Dataset API, apply necessary preprocessing, and return an\n input function to the Estimator API.\n\n Args:\n filename: str, file pattern that to read into our tf.data dataset.\n mode: The estimator ModeKeys. Can be TRAIN or EVAL.\n batch_size: int, number of examples per batch.\n params: dict, dictionary of user passed parameters.\n\n Returns:\n An input function.\n \"\"\"\n def _input_fn():\n \"\"\"Wrapper input function used by Estimator API to get data tensors.\n\n Returns:\n Batched dataset object of dictionary of feature tensors and label\n tensor.\n \"\"\"\n # Create list of files that match pattern.\n file_list = tf.data.Dataset.list_files(file_pattern=filename)\n\n # Create dataset from file list.\n if params[\"input_fn_autotune\"]:\n dataset = tf.data.TFRecordDataset(\n filenames=file_list,\n num_parallel_reads=tf.data.experimental.AUTOTUNE\n )\n else:\n dataset = tf.data.TFRecordDataset(filenames=file_list)\n\n # Shuffle and repeat if training with fused op.\n if mode == tf.estimator.ModeKeys.TRAIN:\n dataset = dataset.apply(\n tf.data.experimental.shuffle_and_repeat(\n buffer_size=50 * batch_size,\n count=None # indefinitely\n )\n )\n\n # Decode CSV file into a features dictionary of tensors, then batch.\n if params[\"input_fn_autotune\"]:\n dataset = dataset.apply(\n tf.data.experimental.map_and_batch(\n map_func=lambda x: decode_example(\n protos=x,\n params=params\n ),\n batch_size=batch_size,\n num_parallel_calls=tf.data.experimental.AUTOTUNE\n )\n )\n else:\n dataset = dataset.apply(\n tf.data.experimental.map_and_batch(\n map_func=lambda x: decode_example(\n protos=x,\n params=params\n ),\n batch_size=batch_size\n )\n )\n\n # Prefetch data to improve latency.\n if params[\"input_fn_autotune\"]:\n dataset = dataset.prefetch(buffer_size=tf.data.experimental.AUTOTUNE)\n else:\n dataset = dataset.prefetch(buffer_size=1)\n\n return dataset\n return _input_fn\n", "_____no_output_____" ] ], [ [ "## generator.py", "_____no_output_____" ] ], [ [ "class Generator(object):\n \"\"\"Generator that takes latent vector input and outputs image.\n Fields:\n name: str, name of `Generator`.\n kernel_regularizer: `l1_l2_regularizer` object, regularizar for kernel\n variables.\n bias_regularizer: `l1_l2_regularizer` object, regularizar for bias\n variables.\n \"\"\"\n def __init__(self, kernel_regularizer, bias_regularizer, name):\n \"\"\"Instantiates and builds generator network.\n Args:\n kernel_regularizer: `l1_l2_regularizer` object, regularizar for\n kernel variables.\n bias_regularizer: `l1_l2_regularizer` object, regularizar for bias\n variables.\n name: str, name of generator.\n \"\"\"\n # Set name of generator.\n self.name = name\n\n # Regularizer for kernel weights.\n self.kernel_regularizer = kernel_regularizer\n\n # Regularizer for bias weights.\n self.bias_regularizer = bias_regularizer\n\n def get_fake_images(self, Z, params):\n \"\"\"Creates generator network and returns generated images.\n\n Args:\n Z: tensor, latent vectors of shape [cur_batch_size, latent_size].\n params: dict, user passed parameters.\n\n Returns:\n Generated image tensor of shape\n [cur_batch_size, height * width * depth].\n \"\"\"\n func_name = \"get_fake_images\"\n # Create the input layer to our DNN.\n # shape = (cur_batch_size, latent_size)\n network = Z\n print_obj(\"\\n\" + func_name, \"network\", network)\n\n # Dictionary containing possible final activations.\n final_activation_dict = {\n \"sigmoid\": tf.nn.sigmoid, \"relu\": tf.nn.relu, \"tanh\": tf.nn.tanh\n }\n\n with tf.compat.v1.variable_scope(\"generator\", reuse=tf.compat.v1.AUTO_REUSE):\n # Add hidden layers with given number of units/neurons per layer.\n for i, units in enumerate(params[\"generator_hidden_units\"]):\n # shape = (cur_batch_size, generator_hidden_units[i])\n network = tf.compat.v1.layers.dense(\n inputs=network,\n units=units,\n activation=None,\n kernel_regularizer=self.kernel_regularizer,\n bias_regularizer=self.bias_regularizer,\n name=\"layers_dense_{}\".format(i)\n )\n print_obj(func_name, \"network\", network)\n\n network = tf.nn.leaky_relu(\n features=network,\n alpha=params[\"generator_leaky_relu_alpha\"],\n name=\"leaky_relu_{}\".format(i)\n )\n print_obj(func_name, \"network\", network)\n\n # Final linear layer for outputs.\n # shape = (cur_batch_size, height * width * depth)\n generated_outputs = tf.compat.v1.layers.dense(\n inputs=network,\n units=params[\"height\"] * params[\"width\"] * params[\"depth\"],\n activation=final_activation_dict.get(\n params[\"generator_final_activation\"].lower(), None\n ),\n kernel_regularizer=self.kernel_regularizer,\n bias_regularizer=self.bias_regularizer,\n name=\"layers_dense_generated_outputs\"\n )\n print_obj(func_name, \"generated_outputs\", generated_outputs)\n\n return generated_outputs\n\n def get_generator_loss(self, fake_logits):\n \"\"\"Gets generator loss.\n\n Args:\n fake_logits: tensor, shape of\n [cur_batch_size, 1].\n\n Returns:\n Tensor of generator's total loss of shape [].\n \"\"\"\n func_name = \"get_generator_loss\"\n # Calculate base generator loss.\n generator_loss = tf.reduce_mean(\n input_tensor=tf.nn.sigmoid_cross_entropy_with_logits(\n logits=fake_logits,\n labels=tf.ones_like(input=fake_logits)\n ),\n name=\"generator_loss\"\n )\n print_obj(\"\\n\" + func_name, \"generator_loss\", generator_loss)\n\n # Get regularization losses.\n generator_reg_loss = tf.compat.v1.losses.get_regularization_loss(\n scope=\"generator\",\n name=\"generator_regularization_loss\"\n )\n print_obj(func_name, \"generator_reg_loss\", generator_reg_loss)\n\n # Combine losses for total losses.\n generator_total_loss = tf.math.add(\n x=generator_loss,\n y=generator_reg_loss,\n name=\"generator_total_loss\"\n )\n print_obj(func_name, \"generator_total_loss\", generator_total_loss)\n\n# # Add summaries for TensorBoard.\n# tf.summary.scalar(\n# name=\"generator_loss\", tensor=generator_loss, family=\"losses\"\n# )\n# tf.summary.scalar(\n# name=\"generator_reg_loss\",\n# tensor=generator_reg_loss,\n# family=\"losses\"\n# )\n# tf.summary.scalar(\n# name=\"generator_total_loss\",\n# tensor=generator_total_loss,\n# family=\"total_losses\"\n# )\n\n return generator_total_loss\n", "_____no_output_____" ] ], [ [ "## discriminator.py", "_____no_output_____" ] ], [ [ "class Discriminator(object):\n \"\"\"Discriminator that takes image input and outputs logits.\n Fields:\n name: str, name of `Discriminator`.\n kernel_regularizer: `l1_l2_regularizer` object, regularizar for kernel\n variables.\n bias_regularizer: `l1_l2_regularizer` object, regularizar for bias\n variables.\n \"\"\"\n def __init__(self, kernel_regularizer, bias_regularizer, name):\n \"\"\"Instantiates and builds discriminator network.\n Args:\n kernel_regularizer: `l1_l2_regularizer` object, regularizar for\n kernel variables.\n bias_regularizer: `l1_l2_regularizer` object, regularizar for bias\n variables.\n name: str, name of discriminator.\n \"\"\"\n # Set name of discriminator.\n self.name = name\n\n # Regularizer for kernel weights.\n self.kernel_regularizer = kernel_regularizer\n\n # Regularizer for bias weights.\n self.bias_regularizer = bias_regularizer\n\n def get_discriminator_logits(self, X, params):\n \"\"\"Creates discriminator network and returns logits.\n\n Args:\n X: tensor, image tensors of shape\n [cur_batch_size, height * width * depth].\n params: dict, user passed parameters.\n\n Returns:\n Logits tensor of shape [cur_batch_size, 1].\n \"\"\"\n func_name = \"get_discriminator_logits\"\n # Create the input layer to our DNN.\n # shape = (cur_batch_size, height * width * depth)\n network = X\n print_obj(\"\\n\" + func_name, \"network\", network)\n\n with tf.compat.v1.variable_scope(\"discriminator\", reuse=tf.compat.v1.AUTO_REUSE):\n # Add hidden layers with given number of units/neurons per layer.\n for i, units in enumerate(params[\"discriminator_hidden_units\"]):\n # shape = (cur_batch_size, discriminator_hidden_units[i])\n network = tf.compat.v1.layers.dense(\n inputs=network,\n units=units,\n activation=None,\n kernel_regularizer=self.kernel_regularizer,\n bias_regularizer=self.bias_regularizer,\n name=\"layers_dense_{}\".format(i)\n )\n print_obj(func_name, \"network\", network)\n\n network = tf.nn.leaky_relu(\n features=network,\n alpha=params[\"discriminator_leaky_relu_alpha\"],\n name=\"leaky_relu_{}\".format(i)\n )\n print_obj(func_name, \"network\", network)\n\n # Final linear layer for logits.\n # shape = (cur_batch_size, 1)\n logits = tf.compat.v1.layers.dense(\n inputs=network,\n units=1,\n activation=None,\n kernel_regularizer=self.kernel_regularizer,\n bias_regularizer=self.bias_regularizer,\n name=\"layers_dense_logits\"\n )\n print_obj(func_name, \"logits\", logits)\n\n return logits\n\n def get_discriminator_loss(self, fake_logits, real_logits, params):\n \"\"\"Gets discriminator loss.\n\n Args:\n fake_logits: tensor, shape of\n [cur_batch_size, 1].\n real_logits: tensor, shape of\n [cur_batch_size, 1].\n params: dict, user passed parameters.\n\n Returns:\n Tensor of discriminator's total loss of shape [].\n \"\"\"\n func_name = \"get_discriminator_loss\"\n # Calculate base discriminator loss.\n discriminator_real_loss = tf.reduce_mean(\n input_tensor=tf.nn.sigmoid_cross_entropy_with_logits(\n logits=real_logits,\n labels=tf.multiply(\n x=tf.ones_like(input=real_logits),\n y=params[\"label_smoothing\"]\n )\n ),\n name=\"discriminator_real_loss\"\n )\n print_obj(\n \"\\n\" + func_name,\n \"discriminator_real_loss\",\n discriminator_real_loss\n )\n\n discriminator_fake_loss = tf.reduce_mean(\n input_tensor=tf.nn.sigmoid_cross_entropy_with_logits(\n logits=fake_logits,\n labels=tf.zeros_like(input=fake_logits)\n ),\n name=\"discriminator_fake_loss\"\n )\n print_obj(\n func_name, \"discriminator_fake_loss\", discriminator_fake_loss\n )\n\n discriminator_loss = tf.add(\n x=discriminator_real_loss,\n y=discriminator_fake_loss,\n name=\"discriminator_loss\"\n )\n print_obj(func_name, \"discriminator_loss\", discriminator_loss)\n\n # Get regularization losses.\n discriminator_reg_loss = tf.compat.v1.losses.get_regularization_loss(\n scope=\"discriminator\",\n name=\"discriminator_reg_loss\"\n )\n print_obj(func_name, \"discriminator_reg_loss\", discriminator_reg_loss)\n\n # Combine losses for total losses.\n discriminator_total_loss = tf.math.add(\n x=discriminator_loss,\n y=discriminator_reg_loss,\n name=\"discriminator_total_loss\"\n )\n print_obj(\n func_name, \"discriminator_total_loss\", discriminator_total_loss\n )\n\n# # Add summaries for TensorBoard.\n# tf.summary.scalar(\n# name=\"discriminator_real_loss\",\n# tensor=discriminator_real_loss,\n# family=\"losses\"\n# )\n# tf.summary.scalar(\n# name=\"discriminator_fake_loss\",\n# tensor=discriminator_fake_loss,\n# family=\"losses\"\n# )\n# tf.summary.scalar(\n# name=\"discriminator_loss\",\n# tensor=discriminator_loss,\n# family=\"losses\"\n# )\n# tf.summary.scalar(\n# name=\"discriminator_reg_loss\",\n# tensor=discriminator_reg_loss,\n# family=\"losses\"\n# )\n# tf.summary.scalar(\n# name=\"discriminator_total_loss\",\n# tensor=discriminator_total_loss,\n# family=\"total_losses\"\n# )\n\n return discriminator_total_loss\n", "_____no_output_____" ] ], [ [ "## train_and_eval.py", "_____no_output_____" ] ], [ [ "def get_logits_and_losses(features, generator, discriminator, params):\n \"\"\"Gets logits and losses for both train and eval modes.\n\n Args:\n features: dict, feature tensors from input function.\n generator: instance of generator.`Generator`.\n discriminator: instance of discriminator.`Discriminator`.\n params: dict, user passed parameters.\n\n Returns:\n Real and fake logits and generator and discriminator losses.\n \"\"\"\n func_name = \"get_logits_and_losses\"\n # Extract real images from features dictionary.\n real_images = tf.reshape(\n tensor=features[\"image\"],\n shape=[-1, params[\"height\"] * params[\"width\"] * params[\"depth\"]]\n )\n print_obj(\"\\n\" + func_name, \"real_images\", real_images)\n\n # Get dynamic batch size in case of partial batch.\n cur_batch_size = tf.shape(\n input=real_images,\n out_type=tf.int32,\n name=\"{}_cur_batch_size\".format(func_name)\n )[0]\n\n # Create random noise latent vector for each batch example.\n Z = tf.random.normal(\n shape=[cur_batch_size, params[\"latent_size\"]],\n mean=0.0,\n stddev=1.0,\n dtype=tf.float32\n )\n print_obj(func_name, \"Z\", Z)\n\n # Get generated image from generator network from gaussian noise.\n print(\"\\nCall generator with Z = {}.\".format(Z))\n fake_images = generator.get_fake_images(Z=Z, params=params)\n\n# # Add summaries for TensorBoard.\n# tf.summary.image(\n# name=\"fake_images\",\n# tensor=tf.reshape(\n# tensor=fake_images,\n# shape=[-1, params[\"height\"], params[\"width\"], params[\"depth\"]]\n# ),\n# max_outputs=5\n# )\n\n # Get fake logits from discriminator using generator's output image.\n print(\"\\nCall discriminator with fake_images = {}.\".format(fake_images))\n fake_logits = discriminator.get_discriminator_logits(\n X=fake_images, params=params\n )\n\n # Get real logits from discriminator using real image.\n print(\n \"\\nCall discriminator with real_images = {}.\".format(real_images)\n )\n real_logits = discriminator.get_discriminator_logits(\n X=real_images, params=params\n )\n\n # Get generator total loss.\n generator_total_loss = generator.get_generator_loss(\n fake_logits=fake_logits\n )\n\n # Get discriminator total loss.\n discriminator_total_loss = discriminator.get_discriminator_loss(\n fake_logits=fake_logits, real_logits=real_logits, params=params\n )\n\n return (real_logits,\n fake_logits,\n generator_total_loss,\n discriminator_total_loss)\n", "_____no_output_____" ] ], [ [ "## train.py", "_____no_output_____" ] ], [ [ "def get_variables_and_gradients(loss, scope):\n \"\"\"Gets variables and their gradients wrt. loss.\n Args:\n loss: tensor, shape of [].\n scope: str, the network's name to find its variables to train.\n Returns:\n Lists of variables and their gradients.\n \"\"\"\n func_name = \"get_variables_and_gradients\"\n # Get trainable variables.\n variables = tf.compat.v1.trainable_variables(scope=scope)\n print_obj(\"\\n{}_{}\".format(func_name, scope), \"variables\", variables)\n\n # Get gradients.\n gradients = tf.gradients(\n ys=loss,\n xs=variables,\n name=\"{}_gradients\".format(scope)\n )\n print_obj(\"\\n{}_{}\".format(func_name, scope), \"gradients\", gradients)\n\n # Add variable names back in for identification.\n gradients = [\n tf.identity(\n input=g,\n name=\"{}_{}_gradients\".format(func_name, v.name[:-2])\n )\n if tf.is_tensor(x=g) else g\n for g, v in zip(gradients, variables)\n ]\n print_obj(\"\\n{}_{}\".format(func_name, scope), \"gradients\", gradients)\n\n return variables, gradients\n\n\ndef create_variable_and_gradient_histogram_summaries(loss_dict, params):\n \"\"\"Creates variable and gradient histogram summaries.\n Args:\n loss_dict: dict, keys are scopes and values are scalar loss tensors\n for each network kind.\n params: dict, user passed parameters.\n \"\"\"\n pass\n# for scope, loss in loss_dict.items():\n# # Get variables and their gradients wrt. loss.\n# variables, gradients = get_variables_and_gradients(loss, scope)\n\n# # Add summaries for TensorBoard.\n# for g, v in zip(gradients, variables):\n# tf.summary.histogram(\n# name=\"{}\".format(v.name[:-2]),\n# values=v,\n# family=\"{}_variables\".format(scope)\n# )\n# if tf.is_tensor(x=g):\n# tf.summary.histogram(\n# name=\"{}\".format(v.name[:-2]),\n# values=g,\n# family=\"{}_gradients\".format(scope)\n# )\n\n\ndef train_network(loss, global_step, params, scope):\n \"\"\"Trains network and returns loss and train op.\n\n Args:\n loss: tensor, shape of [].\n global_step: tensor, the current training step or batch in the\n training loop.\n params: dict, user passed parameters.\n scope: str, the variables that to train.\n\n Returns:\n Loss tensor and training op.\n \"\"\"\n func_name = \"train_network\"\n print_obj(\"\\n\" + func_name, \"scope\", scope)\n # Create optimizer map.\n optimizers = {\n \"Adam\": tf.compat.v1.train.AdamOptimizer,\n \"Adadelta\": tf.compat.v1.train.AdadeltaOptimizer,\n \"AdagradDA\": tf.compat.v1.train.AdagradDAOptimizer,\n \"Adagrad\": tf.compat.v1.train.AdagradOptimizer,\n \"Ftrl\": tf.compat.v1.train.FtrlOptimizer,\n \"GradientDescent\": tf.compat.v1.train.GradientDescentOptimizer,\n \"Momentum\": tf.compat.v1.train.MomentumOptimizer,\n \"ProximalAdagrad\": tf.compat.v1.train.ProximalAdagradOptimizer,\n \"ProximalGradientDescent\": tf.compat.v1.train.ProximalGradientDescentOptimizer,\n \"RMSProp\": tf.compat.v1.train.RMSPropOptimizer\n }\n\n # Get optimizer and instantiate it.\n if params[\"{}_optimizer\".format(scope)] == \"Adam\":\n optimizer = optimizers[params[\"{}_optimizer\".format(scope)]](\n learning_rate=params[\"{}_learning_rate\".format(scope)],\n beta1=params[\"{}_adam_beta1\".format(scope)],\n beta2=params[\"{}_adam_beta2\".format(scope)],\n epsilon=params[\"{}_adam_epsilon\".format(scope)],\n name=\"{}_{}_optimizer\".format(\n scope, params[\"{}_optimizer\".format(scope)].lower()\n )\n )\n else:\n optimizer = optimizers[params[\"{}_optimizer\".format(scope)]](\n learning_rate=params[\"{}_learning_rate\".format(scope)],\n name=\"{}_{}_optimizer\".format(\n scope, params[\"{}_optimizer\".format(scope)].lower()\n )\n )\n print_obj(\"{}_{}\".format(func_name, scope), \"optimizer\", optimizer)\n\n # Get gradients.\n gradients = tf.gradients(\n ys=loss,\n xs=tf.compat.v1.trainable_variables(scope=scope),\n name=\"{}_gradients\".format(scope)\n )\n print_obj(\"\\n{}_{}\".format(func_name, scope), \"gradients\", gradients)\n\n # Clip gradients.\n if params[\"{}_clip_gradients\".format(scope)]:\n gradients, _ = tf.clip_by_global_norm(\n t_list=gradients,\n clip_norm=params[\"{}_clip_gradients\".format(scope)],\n name=\"{}_clip_by_global_norm_gradients\".format(scope)\n )\n print_obj(\"\\n{}_{}\".format(func_name, scope), \"gradients\", gradients)\n\n # Zip back together gradients and variables.\n grads_and_vars = zip(gradients, tf.compat.v1.trainable_variables(scope=scope))\n print_obj(\n \"{}_{}\".format(func_name, scope), \"grads_and_vars\", grads_and_vars\n )\n\n # Create train op by applying gradients to variables and incrementing\n # global step.\n train_op = optimizer.apply_gradients(\n grads_and_vars=grads_and_vars,\n global_step=global_step,\n name=\"{}_apply_gradients\".format(scope)\n )\n\n return loss, train_op\n\n\ndef get_loss_and_train_op(\n generator_total_loss, discriminator_total_loss, params):\n \"\"\"Gets loss and train op for train mode.\n Args:\n generator_total_loss: tensor, scalar total loss of generator.\n discriminator_total_loss: tensor, scalar total loss of discriminator.\n params: dict, user passed parameters.\n Returns:\n Loss scalar tensor and train_op to be used by the EstimatorSpec.\n \"\"\"\n func_name = \"get_loss_and_train_op\"\n # Get global step.\n global_step = tf.compat.v1.train.get_or_create_global_step()\n\n # Determine if it is time to train generator or discriminator.\n cycle_step = tf.math.mod(\n x=global_step,\n y=tf.cast(\n x=tf.add(\n x=params[\"discriminator_train_steps\"],\n y=params[\"generator_train_steps\"]\n ),\n dtype=tf.int64\n ),\n name=\"{}_cycle_step\".format(func_name)\n )\n\n # Create choose discriminator condition.\n condition = tf.less(\n x=cycle_step, y=params[\"discriminator_train_steps\"]\n )\n\n # Conditionally choose to train generator or discriminator subgraph.\n loss, train_op = tf.cond(\n pred=condition,\n true_fn=lambda: train_network(\n loss=discriminator_total_loss,\n global_step=global_step,\n params=params,\n scope=\"discriminator\"\n ),\n false_fn=lambda: train_network(\n loss=generator_total_loss,\n global_step=global_step,\n params=params,\n scope=\"generator\"\n )\n )\n\n return loss, train_op\n", "_____no_output_____" ] ], [ [ "## eval_metrics.py", "_____no_output_____" ] ], [ [ "def get_eval_metric_ops(fake_logits, real_logits, params):\n \"\"\"Gets eval metric ops.\n\n Args:\n fake_logits: tensor, shape of [cur_batch_size, 1] that came from\n discriminator having processed generator's output image.\n real_logits: tensor, shape of [cur_batch_size, 1] that came from\n discriminator having processed real image.\n params: dict, user passed parameters.\n\n Returns:\n Dictionary of eval metric ops.\n \"\"\"\n func_name = \"get_eval_metric_ops\"\n # Concatenate discriminator logits and labels.\n discriminator_logits = tf.concat(\n values=[real_logits, fake_logits],\n axis=0,\n name=\"discriminator_concat_logits\"\n )\n print_obj(\"\\n\" + func_name, \"discriminator_logits\", discriminator_logits)\n\n discriminator_labels = tf.concat(\n values=[\n tf.ones_like(input=real_logits) * params[\"label_smoothing\"],\n tf.zeros_like(input=fake_logits)\n ],\n axis=0,\n name=\"discriminator_concat_labels\"\n )\n print_obj(func_name, \"discriminator_labels\", discriminator_labels)\n\n # Calculate discriminator probabilities.\n discriminator_probabilities = tf.nn.sigmoid(\n x=discriminator_logits, name=\"discriminator_probabilities\"\n )\n print_obj(\n func_name, \"discriminator_probabilities\", discriminator_probabilities\n )\n\n # Create eval metric ops dictionary.\n eval_metric_ops = {\n \"accuracy\": tf.compat.v1.metrics.accuracy(\n labels=discriminator_labels,\n predictions=discriminator_probabilities,\n name=\"discriminator_accuracy\"\n ),\n \"precision\": tf.compat.v1.metrics.precision(\n labels=discriminator_labels,\n predictions=discriminator_probabilities,\n name=\"discriminator_precision\"\n ),\n \"recall\": tf.compat.v1.metrics.recall(\n labels=discriminator_labels,\n predictions=discriminator_probabilities,\n name=\"discriminator_recall\"\n ),\n \"auc_roc\": tf.compat.v1.metrics.auc(\n labels=discriminator_labels,\n predictions=discriminator_probabilities,\n num_thresholds=200,\n curve=\"ROC\",\n name=\"discriminator_auc_roc\"\n ),\n \"auc_pr\": tf.compat.v1.metrics.auc(\n labels=discriminator_labels,\n predictions=discriminator_probabilities,\n num_thresholds=200,\n curve=\"PR\",\n name=\"discriminator_auc_pr\"\n )\n }\n print_obj(func_name, \"eval_metric_ops\", eval_metric_ops)\n\n return eval_metric_ops\n", "_____no_output_____" ] ], [ [ "## predict.py", "_____no_output_____" ] ], [ [ "def get_predictions_and_export_outputs(features, generator, params):\n \"\"\"Gets predictions and serving export outputs.\n\n Args:\n features: dict, feature tensors from serving input function.\n generator: instance of `Generator`.\n params: dict, user passed parameters.\n\n Returns:\n Predictions dictionary and export outputs dictionary.\n \"\"\"\n func_name = \"get_predictions_and_export_outputs\"\n\n # Extract given latent vectors from features dictionary.\n Z = features[\"Z\"]\n print_obj(\"\\n\" + func_name, \"Z\", Z)\n\n # Establish generator network subgraph.\n fake_images = generator.get_fake_images(Z=Z, params=params)\n print_obj(func_name, \"fake_images\", fake_images)\n\n # Reshape into a rank 4 image.\n generated_images = tf.reshape(\n tensor=fake_images,\n shape=[-1, params[\"height\"], params[\"width\"], params[\"depth\"]]\n )\n print_obj(func_name, \"generated_images\", generated_images)\n\n # Create predictions dictionary.\n predictions_dict = {\n \"generated_images\": generated_images\n }\n print_obj(func_name, \"predictions_dict\", predictions_dict)\n\n # Create export outputs.\n export_outputs = {\n \"predict_export_outputs\": tf.estimator.export.PredictOutput(\n outputs=predictions_dict)\n }\n print_obj(func_name, \"export_outputs\", export_outputs)\n\n return predictions_dict, export_outputs\n", "_____no_output_____" ] ], [ [ "## vanilla_gan.py", "_____no_output_____" ] ], [ [ "def vanilla_gan_model(features, labels, mode, params):\n \"\"\"Vanilla GAN custom Estimator model function.\n\n Args:\n features: dict, keys are feature names and values are feature tensors.\n labels: tensor, label data.\n mode: tf.estimator.ModeKeys with values of either TRAIN, EVAL, or\n PREDICT.\n params: dict, user passed parameters.\n\n Returns:\n Instance of `tf.estimator.EstimatorSpec` class.\n \"\"\"\n func_name = \"vanilla_gan_model\"\n print_obj(\"\\n\" + func_name, \"features\", features)\n print_obj(func_name, \"labels\", labels)\n print_obj(func_name, \"mode\", mode)\n print_obj(func_name, \"params\", params)\n\n # Loss function, training/eval ops, etc.\n predictions_dict = None\n loss = None\n train_op = None\n eval_metric_ops = None\n export_outputs = None\n\n # Instantiate generator.\n vanilla_generator = Generator(\n kernel_regularizer=None,\n# tf.contrib.layers.l1_l2_regularizer(\n# scale_l1=params[\"generator_l1_regularization_scale\"],\n# scale_l2=params[\"generator_l2_regularization_scale\"]\n# ),\n bias_regularizer=None,\n name=\"generator\"\n )\n\n # Instantiate discriminator.\n vanilla_discriminator = Discriminator(\n kernel_regularizer=None,\n# tf.contrib.layers.l1_l2_regularizer(\n# scale_l1=params[\"discriminator_l1_regularization_scale\"],\n# scale_l2=params[\"discriminator_l2_regularization_scale\"]\n# ),\n bias_regularizer=None,\n name=\"discriminator\"\n )\n\n if mode == tf.estimator.ModeKeys.PREDICT:\n # Get predictions and export outputs.\n (predictions_dict,\n export_outputs) = get_predictions_and_export_outputs(\n features=features, generator=vanilla_generator, params=params\n )\n else:\n # Get logits and losses from networks for train and eval modes.\n (real_logits,\n fake_logits,\n generator_total_loss,\n discriminator_total_loss) = get_logits_and_losses(\n features=features,\n generator=vanilla_generator,\n discriminator=vanilla_discriminator,\n params=params\n )\n\n if mode == tf.estimator.ModeKeys.TRAIN:\n # Create variable and gradient histogram summaries.\n create_variable_and_gradient_histogram_summaries(\n loss_dict={\n \"generator\": generator_total_loss,\n \"discriminator\": discriminator_total_loss\n },\n params=params\n )\n\n # Get loss and train op for EstimatorSpec.\n loss, train_op = get_loss_and_train_op(\n generator_total_loss=generator_total_loss,\n discriminator_total_loss=discriminator_total_loss,\n params=params\n )\n else:\n # Set eval loss.\n loss = discriminator_total_loss\n\n # Get eval metrics.\n eval_metric_ops = get_eval_metric_ops(\n real_logits=real_logits,\n fake_logits=fake_logits,\n params=params\n )\n\n # Return EstimatorSpec\n return tf.estimator.EstimatorSpec(\n mode=mode,\n predictions=predictions_dict,\n loss=loss,\n train_op=train_op,\n eval_metric_ops=eval_metric_ops,\n export_outputs=export_outputs\n )\n", "_____no_output_____" ] ], [ [ "## serving.py", "_____no_output_____" ] ], [ [ "def serving_input_fn(params):\n \"\"\"Serving input function.\n\n Args:\n params: dict, user passed parameters.\n\n Returns:\n ServingInputReceiver object containing features and receiver tensors.\n \"\"\"\n func_name = \"serving_input_fn\"\n # Create placeholders to accept data sent to the model at serving time.\n # shape = (batch_size,)\n feature_placeholders = {\n \"Z\": tf.compat.v1.placeholder(\n dtype=tf.float32,\n shape=[None, params[\"latent_size\"]],\n name=\"serving_input_placeholder_Z\"\n )\n }\n print_obj(\"\\n\" + func_name, \"feature_placeholders\", feature_placeholders)\n\n # Create clones of the feature placeholder tensors so that the SavedModel\n # SignatureDef will point to the placeholder.\n features = {\n key: tf.identity(\n input=value,\n name=\"{}_identity_placeholder_{}\".format(func_name, key)\n )\n for key, value in feature_placeholders.items()\n }\n print_obj(func_name, \"features\", features)\n\n return tf.estimator.export.ServingInputReceiver(\n features=features, receiver_tensors=feature_placeholders\n )\n", "_____no_output_____" ] ], [ [ "## model.py", "_____no_output_____" ] ], [ [ "def train_and_evaluate(args):\n \"\"\"Trains and evaluates custom Estimator model.\n\n Args:\n args: dict, user passed parameters.\n\n Returns:\n `Estimator` object.\n \"\"\"\n func_name = \"train_and_evaluate\"\n print_obj(\"\\n\" + func_name, \"args\", args)\n # Ensure filewriter cache is clear for TensorBoard events file.\n# tf.summary.FileWriterCache.clear()\n\n # Set logging to be level of INFO.\n# tf.logging.set_verbosity(tf.logging.INFO)\n\n # Create a RunConfig for Estimator.\n config = tf.estimator.RunConfig(\n model_dir=args[\"output_dir\"],\n save_summary_steps=args[\"save_summary_steps\"],\n save_checkpoints_steps=args[\"save_checkpoints_steps\"],\n keep_checkpoint_max=args[\"keep_checkpoint_max\"]\n )\n\n # Create our custom estimator using our model function.\n estimator = tf.estimator.Estimator(\n model_fn=vanilla_gan_model,\n model_dir=args[\"output_dir\"],\n config=config,\n params=args\n )\n\n # Create train spec to read in our training data.\n train_spec = tf.estimator.TrainSpec(\n input_fn=read_dataset(\n filename=args[\"train_file_pattern\"],\n mode=tf.estimator.ModeKeys.TRAIN,\n batch_size=args[\"train_batch_size\"],\n params=args\n ),\n max_steps=args[\"train_steps\"]\n )\n\n # Create exporter to save out the complete model to disk.\n exporter = tf.estimator.LatestExporter(\n name=\"exporter\",\n serving_input_receiver_fn=lambda: serving_input_fn(args)\n )\n\n # Create eval spec to read in our validation data and export our model.\n eval_spec = tf.estimator.EvalSpec(\n input_fn=read_dataset(\n filename=args[\"eval_file_pattern\"],\n mode=tf.estimator.ModeKeys.EVAL,\n batch_size=args[\"eval_batch_size\"],\n params=args\n ),\n steps=args[\"eval_steps\"],\n start_delay_secs=args[\"start_delay_secs\"],\n throttle_secs=args[\"throttle_secs\"],\n exporters=exporter\n )\n\n # Create train and evaluate loop to train and evaluate our estimator.\n tf.estimator.train_and_evaluate(\n estimator=estimator, train_spec=train_spec, eval_spec=eval_spec)\n\n return estimator\n", "_____no_output_____" ] ], [ [ "## Run model", "_____no_output_____" ] ], [ [ "os.environ[\"OUTPUT_DIR\"] = arguments[\"output_dir\"]", "_____no_output_____" ], [ "%%bash\ngsutil -m rm -rf ${OUTPUT_DIR}", "Removing gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/checkpoint#1595549336880429...\nRemoving gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/eval/#1595549340160019...\nRemoving gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/eval/events.out.tfevents.1595549340.tensorflow-2-2-20200707-090436#1595549340908390...\nRemoving gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/events.out.tfevents.1595549209.tensorflow-2-2-20200707-090436#1595549330068079...\nRemoving gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/export/#1595549341665539...\nRemoving gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/export/exporter/#1595549341897739...\nRemoving gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/export/exporter/temp-1595549340/#1595549342115797...\nRemoving gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/graph.pbtxt#1595549302077652...\nRemoving gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt-0.data-00000-of-00001#1595549304127644...\nRemoving gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt-0.index#1595549304365451...\nRemoving gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt-0.meta#1595549305315409...\nRemoving gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt-10000.data-00000-of-00001#1595549336239502...\nRemoving gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt-10000.index#1595549336407397...\nRemoving gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt-10000.meta#1595549337325631...\n/ [14/14 objects] 100% Done \nOperation completed over 14 objects. \n" ], [ "estimator = train_and_evaluate(arguments)", "\ntrain_and_evaluate: args = {'train_file_pattern': 'gs://machine-learning-1234-bucket/gan/data/mnist/train*.tfrecord', 'eval_file_pattern': 'gs://machine-learning-1234-bucket/gan/data/mnist/test*.tfrecord', 'output_dir': 'gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model', 'train_batch_size': 32, 'train_steps': 56250, 'save_summary_steps': 100, 'save_checkpoints_steps': 10000, 'keep_checkpoint_max': 10, 'input_fn_autotune': False, 'eval_batch_size': 32, 'eval_steps': 100, 'start_delay_secs': 60000, 'throttle_secs': 60000, 'height': 28, 'width': 28, 'depth': 1, 'latent_size': 512, 'generator_hidden_units': [256, 512, 1024], 'generator_leaky_relu_alpha': 0.2, 'generator_final_activation': 'tanh', 'generator_l1_regularization_scale': 0.0, 'generator_l2_regularization_scale': 0.0, 'generator_optimizer': 'Adam', 'generator_learning_rate': 0.0002, 'generator_adam_beta1': 0.5, 'generator_adam_beta2': 0.999, 'generator_adam_epsilon': 1e-08, 'generator_clip_gradients': None, 'generator_train_steps': 1, 'discriminator_hidden_units': [1024, 512, 256], 'discriminator_leaky_relu_alpha': 0.2, 'discriminator_l1_regularization_scale': 0.0, 'discriminator_l2_regularization_scale': 0.0, 'discriminator_optimizer': 'Adam', 'discriminator_learning_rate': 0.0002, 'discriminator_adam_beta1': 0.5, 'discriminator_adam_beta2': 0.999, 'discriminator_adam_epsilon': 1e-08, 'discriminator_clip_gradients': None, 'discriminator_train_steps': 1, 'label_smoothing': 0.9}\nINFO:tensorflow:Using config: {'_model_dir': 'gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model', '_tf_random_seed': None, '_save_summary_steps': 100, '_save_checkpoints_steps': 10000, '_save_checkpoints_secs': None, '_session_config': allow_soft_placement: true\ngraph_options {\n rewrite_options {\n meta_optimizer_iterations: ONE\n }\n}\n, '_keep_checkpoint_max': 10, '_keep_checkpoint_every_n_hours': 10000, '_log_step_count_steps': 100, '_train_distribute': None, '_device_fn': None, '_protocol': None, '_eval_distribute': None, '_experimental_distribute': None, '_experimental_max_worker_delay_secs': None, '_session_creation_timeout_secs': 7200, '_service': None, '_cluster_spec': ClusterSpec({}), '_task_type': 'worker', '_task_id': 0, '_global_id_in_cluster': 0, '_master': '', '_evaluation_master': '', '_is_chief': True, '_num_ps_replicas': 0, '_num_worker_replicas': 1}\nINFO:tensorflow:Not using Distribute Coordinator.\nINFO:tensorflow:Running training and evaluation locally (non-distributed).\nINFO:tensorflow:Start train and evaluate loop. The evaluate will happen after every checkpoint. Checkpoint frequency is determined based on RunConfig arguments: save_checkpoints_steps 10000 or save_checkpoints_secs None.\nWARNING:tensorflow:From /opt/conda/lib/python3.7/site-packages/tensorflow/python/ops/resource_variable_ops.py:1666: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\nInstructions for updating:\nIf using Keras pass *_constraint arguments to layers.\nWARNING:tensorflow:From /opt/conda/lib/python3.7/site-packages/tensorflow/python/training/training_util.py:236: Variable.initialized_value (from tensorflow.python.ops.variables) is deprecated and will be removed in a future version.\nInstructions for updating:\nUse Variable.read_value. Variables in 2.X are initialized automatically both in eager and graph (inside tf.defun) contexts.\nWARNING:tensorflow:From <ipython-input-4-52033d9e690d>:111: shuffle_and_repeat (from tensorflow.python.data.experimental.ops.shuffle_ops) is deprecated and will be removed in a future version.\nInstructions for updating:\nUse `tf.data.Dataset.shuffle(buffer_size, seed)` followed by `tf.data.Dataset.repeat(count)`. Static tf.data optimizations will take care of using the fused implementation.\nWARNING:tensorflow:From <ipython-input-4-52033d9e690d>:134: map_and_batch (from tensorflow.python.data.experimental.ops.batching) is deprecated and will be removed in a future version.\nInstructions for updating:\nUse `tf.data.Dataset.map(map_func, num_parallel_calls)` followed by `tf.data.Dataset.batch(batch_size, drop_remainder)`. Static tf.data optimizations will take care of using the fused implementation.\n\ndecode_example: features = {'image_raw': FixedLenFeature(shape=[], dtype=tf.string, default_value=None), 'label': FixedLenFeature(shape=[], dtype=tf.int64, default_value=None)}\ndecode_example: image = Tensor(\"DecodeRaw:0\", shape=(None,), dtype=uint8)\ndecode_example: image = Tensor(\"Reshape:0\", shape=(28, 28, 1), dtype=uint8)\npreprocess_image: image = Tensor(\"sub:0\", shape=(28, 28, 1), dtype=float32)\ndecode_example: image = Tensor(\"sub:0\", shape=(28, 28, 1), dtype=float32)\ndecode_example: label = Tensor(\"Cast_1:0\", shape=(), dtype=int32)\nINFO:tensorflow:Calling model_fn.\n\nvanilla_gan_model: features = {'image': <tf.Tensor 'IteratorGetNext:0' shape=(None, 28, 28, 1) dtype=float32>}\nvanilla_gan_model: labels = Tensor(\"IteratorGetNext:1\", shape=(None,), dtype=int32)\nvanilla_gan_model: mode = train\nvanilla_gan_model: params = {'train_file_pattern': 'gs://machine-learning-1234-bucket/gan/data/mnist/train*.tfrecord', 'eval_file_pattern': 'gs://machine-learning-1234-bucket/gan/data/mnist/test*.tfrecord', 'output_dir': 'gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model', 'train_batch_size': 32, 'train_steps': 56250, 'save_summary_steps': 100, 'save_checkpoints_steps': 10000, 'keep_checkpoint_max': 10, 'input_fn_autotune': False, 'eval_batch_size': 32, 'eval_steps': 100, 'start_delay_secs': 60000, 'throttle_secs': 60000, 'height': 28, 'width': 28, 'depth': 1, 'latent_size': 512, 'generator_hidden_units': [256, 512, 1024], 'generator_leaky_relu_alpha': 0.2, 'generator_final_activation': 'tanh', 'generator_l1_regularization_scale': 0.0, 'generator_l2_regularization_scale': 0.0, 'generator_optimizer': 'Adam', 'generator_learning_rate': 0.0002, 'generator_adam_beta1': 0.5, 'generator_adam_beta2': 0.999, 'generator_adam_epsilon': 1e-08, 'generator_clip_gradients': None, 'generator_train_steps': 1, 'discriminator_hidden_units': [1024, 512, 256], 'discriminator_leaky_relu_alpha': 0.2, 'discriminator_l1_regularization_scale': 0.0, 'discriminator_l2_regularization_scale': 0.0, 'discriminator_optimizer': 'Adam', 'discriminator_learning_rate': 0.0002, 'discriminator_adam_beta1': 0.5, 'discriminator_adam_beta2': 0.999, 'discriminator_adam_epsilon': 1e-08, 'discriminator_clip_gradients': None, 'discriminator_train_steps': 1, 'label_smoothing': 0.9}\n\nget_logits_and_losses: real_images = Tensor(\"Reshape:0\", shape=(None, 784), dtype=float32)\nget_logits_and_losses: Z = Tensor(\"random_normal:0\", shape=(None, 512), dtype=float32)\n\nCall generator with Z = Tensor(\"random_normal:0\", shape=(None, 512), dtype=float32).\n\nget_fake_images: network = Tensor(\"random_normal:0\", shape=(None, 512), dtype=float32)\nWARNING:tensorflow:From <ipython-input-5-398137daed4b>:60: dense (from tensorflow.python.layers.core) is deprecated and will be removed in a future version.\nInstructions for updating:\nUse keras.layers.Dense instead.\nWARNING:tensorflow:From /opt/conda/lib/python3.7/site-packages/tensorflow/python/layers/core.py:187: Layer.apply (from tensorflow.python.keras.engine.base_layer_v1) is deprecated and will be removed in a future version.\nInstructions for updating:\nPlease use `layer.__call__` method instead.\nget_fake_images: network = Tensor(\"generator/layers_dense_0/BiasAdd:0\", shape=(None, 256), dtype=float32)\nget_fake_images: network = Tensor(\"generator/leaky_relu_0:0\", shape=(None, 256), dtype=float32)\nget_fake_images: network = Tensor(\"generator/layers_dense_1/BiasAdd:0\", shape=(None, 512), dtype=float32)\nget_fake_images: network = Tensor(\"generator/leaky_relu_1:0\", shape=(None, 512), dtype=float32)\nget_fake_images: network = Tensor(\"generator/layers_dense_2/BiasAdd:0\", shape=(None, 1024), dtype=float32)\nget_fake_images: network = Tensor(\"generator/leaky_relu_2:0\", shape=(None, 1024), dtype=float32)\nget_fake_images: generated_outputs = Tensor(\"generator/layers_dense_generated_outputs/Tanh:0\", shape=(None, 784), dtype=float32)\n\nCall discriminator with fake_images = Tensor(\"generator/layers_dense_generated_outputs/Tanh:0\", shape=(None, 784), dtype=float32).\n\nget_discriminator_logits: network = Tensor(\"generator/layers_dense_generated_outputs/Tanh:0\", shape=(None, 784), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/layers_dense_0/BiasAdd:0\", shape=(None, 1024), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/leaky_relu_0:0\", shape=(None, 1024), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/layers_dense_1/BiasAdd:0\", shape=(None, 512), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/leaky_relu_1:0\", shape=(None, 512), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/layers_dense_2/BiasAdd:0\", shape=(None, 256), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/leaky_relu_2:0\", shape=(None, 256), dtype=float32)\nget_discriminator_logits: logits = Tensor(\"discriminator/layers_dense_logits/BiasAdd:0\", shape=(None, 1), dtype=float32)\n\nCall discriminator with real_images = Tensor(\"Reshape:0\", shape=(None, 784), dtype=float32).\n\nget_discriminator_logits: network = Tensor(\"Reshape:0\", shape=(None, 784), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/layers_dense_0/BiasAdd:0\", shape=(None, 1024), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/leaky_relu_0:0\", shape=(None, 1024), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/layers_dense_1/BiasAdd:0\", shape=(None, 512), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/leaky_relu_1:0\", shape=(None, 512), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/layers_dense_2/BiasAdd:0\", shape=(None, 256), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/leaky_relu_2:0\", shape=(None, 256), dtype=float32)\nget_discriminator_logits: logits = Tensor(\"discriminator_1/layers_dense_logits/BiasAdd:0\", shape=(None, 1), dtype=float32)\n\nget_generator_loss: generator_loss = Tensor(\"generator_loss:0\", shape=(), dtype=float32)\nget_generator_loss: generator_reg_loss = Tensor(\"Const_1:0\", shape=(), dtype=float32)\nget_generator_loss: generator_total_loss = Tensor(\"generator_total_loss:0\", shape=(), dtype=float32)\n\nget_discriminator_loss: discriminator_real_loss = Tensor(\"discriminator_real_loss:0\", shape=(), dtype=float32)\nget_discriminator_loss: discriminator_fake_loss = Tensor(\"discriminator_fake_loss:0\", shape=(), dtype=float32)\nget_discriminator_loss: discriminator_loss = Tensor(\"discriminator_loss:0\", shape=(), dtype=float32)\nget_discriminator_loss: discriminator_reg_loss = Tensor(\"Const_4:0\", shape=(), dtype=float32)\nget_discriminator_loss: discriminator_total_loss = Tensor(\"discriminator_total_loss:0\", shape=(), dtype=float32)\n\ntrain_network: scope = discriminator\ntrain_network_discriminator: optimizer = <tensorflow.python.training.adam.AdamOptimizer object at 0x7f07e9c2ef90>\n\ntrain_network_discriminator: gradients = [<tf.Tensor 'discriminator_gradients/AddN_9:0' shape=(784, 1024) dtype=float32>, <tf.Tensor 'discriminator_gradients/AddN_8:0' shape=(1024,) dtype=float32>, <tf.Tensor 'discriminator_gradients/AddN_7:0' shape=(1024, 512) dtype=float32>, <tf.Tensor 'discriminator_gradients/AddN_6:0' shape=(512,) dtype=float32>, <tf.Tensor 'discriminator_gradients/AddN_5:0' shape=(512, 256) dtype=float32>, <tf.Tensor 'discriminator_gradients/AddN_4:0' shape=(256,) dtype=float32>, <tf.Tensor 'discriminator_gradients/AddN_3:0' shape=(256, 1) dtype=float32>, <tf.Tensor 'discriminator_gradients/AddN_2:0' shape=(1,) dtype=float32>]\ntrain_network_discriminator: grads_and_vars = <zip object at 0x7f07e992ac80>\n\ntrain_network: scope = generator\ntrain_network_generator: optimizer = <tensorflow.python.training.adam.AdamOptimizer object at 0x7f07e98adfd0>\n\ntrain_network_generator: gradients = [<tf.Tensor 'generator_gradients/generator/layers_dense_0/MatMul_grad/MatMul_1:0' shape=(512, 256) dtype=float32>, <tf.Tensor 'generator_gradients/generator/layers_dense_0/BiasAdd_grad/BiasAddGrad:0' shape=(256,) dtype=float32>, <tf.Tensor 'generator_gradients/generator/layers_dense_1/MatMul_grad/MatMul_1:0' shape=(256, 512) dtype=float32>, <tf.Tensor 'generator_gradients/generator/layers_dense_1/BiasAdd_grad/BiasAddGrad:0' shape=(512,) dtype=float32>, <tf.Tensor 'generator_gradients/generator/layers_dense_2/MatMul_grad/MatMul_1:0' shape=(512, 1024) dtype=float32>, <tf.Tensor 'generator_gradients/generator/layers_dense_2/BiasAdd_grad/BiasAddGrad:0' shape=(1024,) dtype=float32>, <tf.Tensor 'generator_gradients/generator/layers_dense_generated_outputs/MatMul_grad/MatMul_1:0' shape=(1024, 784) dtype=float32>, <tf.Tensor 'generator_gradients/generator/layers_dense_generated_outputs/BiasAdd_grad/BiasAddGrad:0' shape=(784,) dtype=float32>]\ntrain_network_generator: grads_and_vars = <zip object at 0x7f07e9993cd0>\nINFO:tensorflow:Done calling model_fn.\nINFO:tensorflow:Create CheckpointSaverHook.\nINFO:tensorflow:Graph was finalized.\nINFO:tensorflow:Running local_init_op.\nINFO:tensorflow:Done running local_init_op.\nINFO:tensorflow:Calling checkpoint listeners before saving checkpoint 0...\nINFO:tensorflow:Saving checkpoints for 0 into gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt.\nINFO:tensorflow:Calling checkpoint listeners after saving checkpoint 0...\nINFO:tensorflow:loss = 1.6783597, step = 0\nINFO:tensorflow:global_step/sec: 187.884\nINFO:tensorflow:loss = 1.4146522, step = 100 (0.533 sec)\nINFO:tensorflow:global_step/sec: 330.29\nINFO:tensorflow:loss = 1.1655922, step = 200 (0.303 sec)\nINFO:tensorflow:global_step/sec: 330.527\nINFO:tensorflow:loss = 1.1388303, step = 300 (0.302 sec)\nINFO:tensorflow:global_step/sec: 330.3\nINFO:tensorflow:loss = 0.94181263, step = 400 (0.303 sec)\nINFO:tensorflow:global_step/sec: 336.172\nINFO:tensorflow:loss = 0.9471133, step = 500 (0.297 sec)\nINFO:tensorflow:global_step/sec: 334.177\nINFO:tensorflow:loss = 1.0776197, step = 600 (0.299 sec)\nINFO:tensorflow:global_step/sec: 335.985\nINFO:tensorflow:loss = 1.1414977, step = 700 (0.297 sec)\nINFO:tensorflow:global_step/sec: 328.297\nINFO:tensorflow:loss = 0.8701129, step = 800 (0.305 sec)\nINFO:tensorflow:global_step/sec: 318.732\nINFO:tensorflow:loss = 0.92916894, step = 900 (0.313 sec)\nINFO:tensorflow:global_step/sec: 324.123\nINFO:tensorflow:loss = 0.82630396, step = 1000 (0.309 sec)\nINFO:tensorflow:global_step/sec: 342.025\nINFO:tensorflow:loss = 0.8000124, step = 1100 (0.292 sec)\nINFO:tensorflow:global_step/sec: 335.061\nINFO:tensorflow:loss = 1.0007714, step = 1200 (0.298 sec)\nINFO:tensorflow:global_step/sec: 340.135\nINFO:tensorflow:loss = 0.9889116, step = 1300 (0.294 sec)\nINFO:tensorflow:global_step/sec: 336.428\nINFO:tensorflow:loss = 0.85279393, step = 1400 (0.297 sec)\nINFO:tensorflow:global_step/sec: 345.059\nINFO:tensorflow:loss = 0.85975635, step = 1500 (0.290 sec)\nINFO:tensorflow:global_step/sec: 344.95\nINFO:tensorflow:loss = 0.95591795, step = 1600 (0.290 sec)\nINFO:tensorflow:global_step/sec: 344.417\nINFO:tensorflow:loss = 0.7880587, step = 1700 (0.290 sec)\nINFO:tensorflow:global_step/sec: 338.9\nINFO:tensorflow:loss = 0.7408558, step = 1800 (0.295 sec)\nINFO:tensorflow:global_step/sec: 145.807\nINFO:tensorflow:loss = 1.2193019, step = 1900 (0.686 sec)\nINFO:tensorflow:global_step/sec: 342.533\nINFO:tensorflow:loss = 0.75948584, step = 2000 (0.292 sec)\nINFO:tensorflow:global_step/sec: 339.077\nINFO:tensorflow:loss = 0.9345492, step = 2100 (0.295 sec)\nINFO:tensorflow:global_step/sec: 337.35\nINFO:tensorflow:loss = 1.1738211, step = 2200 (0.296 sec)\nINFO:tensorflow:global_step/sec: 342.798\nINFO:tensorflow:loss = 0.8416182, step = 2300 (0.292 sec)\nINFO:tensorflow:global_step/sec: 336.031\nINFO:tensorflow:loss = 0.818261, step = 2400 (0.298 sec)\nINFO:tensorflow:global_step/sec: 335.743\nINFO:tensorflow:loss = 0.8556186, step = 2500 (0.298 sec)\nINFO:tensorflow:global_step/sec: 337.943\nINFO:tensorflow:loss = 0.8121357, step = 2600 (0.296 sec)\nINFO:tensorflow:global_step/sec: 329.261\nINFO:tensorflow:loss = 0.7875591, step = 2700 (0.304 sec)\nINFO:tensorflow:global_step/sec: 333.778\nINFO:tensorflow:loss = 0.74380505, step = 2800 (0.300 sec)\nINFO:tensorflow:global_step/sec: 340.446\nINFO:tensorflow:loss = 0.8051708, step = 2900 (0.294 sec)\nINFO:tensorflow:global_step/sec: 341.703\nINFO:tensorflow:loss = 0.65318304, step = 3000 (0.293 sec)\nINFO:tensorflow:global_step/sec: 340.495\nINFO:tensorflow:loss = 0.73927474, step = 3100 (0.294 sec)\nINFO:tensorflow:global_step/sec: 338.879\nINFO:tensorflow:loss = 0.69126177, step = 3200 (0.295 sec)\nINFO:tensorflow:global_step/sec: 340.229\nINFO:tensorflow:loss = 0.7112443, step = 3300 (0.294 sec)\nINFO:tensorflow:global_step/sec: 342.126\nINFO:tensorflow:loss = 0.77803195, step = 3400 (0.292 sec)\nINFO:tensorflow:global_step/sec: 340.468\nINFO:tensorflow:loss = 0.81526965, step = 3500 (0.294 sec)\nINFO:tensorflow:global_step/sec: 338.456\nINFO:tensorflow:loss = 0.7812644, step = 3600 (0.296 sec)\nINFO:tensorflow:global_step/sec: 145.248\nINFO:tensorflow:loss = 0.6732077, step = 3700 (0.689 sec)\nINFO:tensorflow:global_step/sec: 335.552\nINFO:tensorflow:loss = 0.79103494, step = 3800 (0.298 sec)\nINFO:tensorflow:global_step/sec: 338.556\nINFO:tensorflow:loss = 0.71357477, step = 3900 (0.295 sec)\nINFO:tensorflow:global_step/sec: 336.918\nINFO:tensorflow:loss = 0.84349644, step = 4000 (0.297 sec)\nINFO:tensorflow:global_step/sec: 335.544\nINFO:tensorflow:loss = 0.644444, step = 4100 (0.298 sec)\nINFO:tensorflow:global_step/sec: 341.499\nINFO:tensorflow:loss = 0.8965486, step = 4200 (0.293 sec)\nINFO:tensorflow:global_step/sec: 342.638\nINFO:tensorflow:loss = 0.77435315, step = 4300 (0.292 sec)\nINFO:tensorflow:global_step/sec: 333.702\nINFO:tensorflow:loss = 0.7869276, step = 4400 (0.300 sec)\nINFO:tensorflow:global_step/sec: 336.072\nINFO:tensorflow:loss = 0.8268868, step = 4500 (0.297 sec)\nINFO:tensorflow:global_step/sec: 333.479\nINFO:tensorflow:loss = 0.6925389, step = 4600 (0.300 sec)\nINFO:tensorflow:global_step/sec: 333.81\nINFO:tensorflow:loss = 0.9199798, step = 4700 (0.300 sec)\nINFO:tensorflow:global_step/sec: 339.485\nINFO:tensorflow:loss = 0.94469917, step = 4800 (0.294 sec)\nINFO:tensorflow:global_step/sec: 339.363\nINFO:tensorflow:loss = 0.71888095, step = 4900 (0.295 sec)\nINFO:tensorflow:global_step/sec: 341.059\nINFO:tensorflow:loss = 0.9310282, step = 5000 (0.293 sec)\nINFO:tensorflow:global_step/sec: 344.116\nINFO:tensorflow:loss = 0.720786, step = 5100 (0.291 sec)\nINFO:tensorflow:global_step/sec: 342.56\nINFO:tensorflow:loss = 1.1235368, step = 5200 (0.292 sec)\nINFO:tensorflow:global_step/sec: 339.196\nINFO:tensorflow:loss = 0.9222283, step = 5300 (0.295 sec)\nINFO:tensorflow:global_step/sec: 339.139\nINFO:tensorflow:loss = 0.8783831, step = 5400 (0.295 sec)\nINFO:tensorflow:global_step/sec: 345.11\nINFO:tensorflow:loss = 0.7842911, step = 5500 (0.290 sec)\nINFO:tensorflow:global_step/sec: 133.756\nINFO:tensorflow:loss = 0.8956688, step = 5600 (0.748 sec)\nINFO:tensorflow:global_step/sec: 330.493\nINFO:tensorflow:loss = 0.85700643, step = 5700 (0.302 sec)\nINFO:tensorflow:global_step/sec: 341.038\nINFO:tensorflow:loss = 0.9098345, step = 5800 (0.293 sec)\nINFO:tensorflow:global_step/sec: 336.525\nINFO:tensorflow:loss = 0.9515579, step = 5900 (0.297 sec)\nINFO:tensorflow:global_step/sec: 341.763\nINFO:tensorflow:loss = 0.960791, step = 6000 (0.292 sec)\nINFO:tensorflow:global_step/sec: 348.793\nINFO:tensorflow:loss = 1.068954, step = 6100 (0.287 sec)\nINFO:tensorflow:global_step/sec: 334.032\nINFO:tensorflow:loss = 0.98294127, step = 6200 (0.300 sec)\nINFO:tensorflow:global_step/sec: 341.543\nINFO:tensorflow:loss = 0.8593848, step = 6300 (0.292 sec)\nINFO:tensorflow:global_step/sec: 341.367\nINFO:tensorflow:loss = 1.0397906, step = 6400 (0.293 sec)\nINFO:tensorflow:global_step/sec: 337.484\nINFO:tensorflow:loss = 0.9381981, step = 6500 (0.296 sec)\nINFO:tensorflow:global_step/sec: 337.606\nINFO:tensorflow:loss = 1.030613, step = 6600 (0.296 sec)\nINFO:tensorflow:global_step/sec: 343.669\nINFO:tensorflow:loss = 0.99548197, step = 6700 (0.291 sec)\nINFO:tensorflow:global_step/sec: 338.679\nINFO:tensorflow:loss = 0.9113633, step = 6800 (0.295 sec)\nINFO:tensorflow:global_step/sec: 340.229\nINFO:tensorflow:loss = 0.96387225, step = 6900 (0.294 sec)\nINFO:tensorflow:global_step/sec: 338.875\nINFO:tensorflow:loss = 0.87956315, step = 7000 (0.295 sec)\nINFO:tensorflow:global_step/sec: 339.943\nINFO:tensorflow:loss = 1.0169915, step = 7100 (0.294 sec)\nINFO:tensorflow:global_step/sec: 341.785\nINFO:tensorflow:loss = 0.9996823, step = 7200 (0.292 sec)\nINFO:tensorflow:global_step/sec: 336.922\nINFO:tensorflow:loss = 0.95276785, step = 7300 (0.297 sec)\nINFO:tensorflow:global_step/sec: 341.343\nINFO:tensorflow:loss = 1.0619712, step = 7400 (0.293 sec)\nINFO:tensorflow:global_step/sec: 155.516\nINFO:tensorflow:loss = 1.0762533, step = 7500 (0.643 sec)\nINFO:tensorflow:global_step/sec: 337.885\nINFO:tensorflow:loss = 0.9289282, step = 7600 (0.296 sec)\nINFO:tensorflow:global_step/sec: 344.54\nINFO:tensorflow:loss = 1.0458697, step = 7700 (0.290 sec)\nINFO:tensorflow:global_step/sec: 339.274\nINFO:tensorflow:loss = 0.93249536, step = 7800 (0.295 sec)\nINFO:tensorflow:global_step/sec: 339.726\nINFO:tensorflow:loss = 0.821493, step = 7900 (0.294 sec)\nINFO:tensorflow:global_step/sec: 328.943\nINFO:tensorflow:loss = 0.98210984, step = 8000 (0.304 sec)\nINFO:tensorflow:global_step/sec: 335.848\nINFO:tensorflow:loss = 0.9925418, step = 8100 (0.298 sec)\nINFO:tensorflow:global_step/sec: 343.02\nINFO:tensorflow:loss = 0.93644935, step = 8200 (0.292 sec)\nINFO:tensorflow:global_step/sec: 343.699\nINFO:tensorflow:loss = 1.0503954, step = 8300 (0.291 sec)\nINFO:tensorflow:global_step/sec: 343.575\nINFO:tensorflow:loss = 1.0466498, step = 8400 (0.291 sec)\nINFO:tensorflow:global_step/sec: 344.811\nINFO:tensorflow:loss = 1.1357701, step = 8500 (0.290 sec)\nINFO:tensorflow:global_step/sec: 340.84\nINFO:tensorflow:loss = 0.927688, step = 8600 (0.293 sec)\nINFO:tensorflow:global_step/sec: 343.451\nINFO:tensorflow:loss = 1.0741647, step = 8700 (0.291 sec)\nINFO:tensorflow:global_step/sec: 346.168\nINFO:tensorflow:loss = 0.8999358, step = 8800 (0.289 sec)\nINFO:tensorflow:global_step/sec: 343.694\nINFO:tensorflow:loss = 1.0770683, step = 8900 (0.291 sec)\nINFO:tensorflow:global_step/sec: 343.279\nINFO:tensorflow:loss = 0.9208231, step = 9000 (0.291 sec)\nINFO:tensorflow:global_step/sec: 345.123\nINFO:tensorflow:loss = 1.2568507, step = 9100 (0.290 sec)\nINFO:tensorflow:global_step/sec: 342.129\nINFO:tensorflow:loss = 1.0407605, step = 9200 (0.292 sec)\nINFO:tensorflow:global_step/sec: 335.786\nINFO:tensorflow:loss = 0.96152157, step = 9300 (0.298 sec)\nINFO:tensorflow:global_step/sec: 128.832\nINFO:tensorflow:loss = 0.95566505, step = 9400 (0.776 sec)\nINFO:tensorflow:global_step/sec: 338.125\nINFO:tensorflow:loss = 1.0109394, step = 9500 (0.296 sec)\nINFO:tensorflow:global_step/sec: 335.021\nINFO:tensorflow:loss = 1.3177092, step = 9600 (0.298 sec)\nINFO:tensorflow:global_step/sec: 338.426\nINFO:tensorflow:loss = 1.2634734, step = 9700 (0.296 sec)\nINFO:tensorflow:global_step/sec: 329.597\nINFO:tensorflow:loss = 1.0169425, step = 9800 (0.303 sec)\nINFO:tensorflow:global_step/sec: 342.504\nINFO:tensorflow:loss = 0.8844881, step = 9900 (0.292 sec)\nINFO:tensorflow:Calling checkpoint listeners before saving checkpoint 10000...\nINFO:tensorflow:Saving checkpoints for 10000 into gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt.\nINFO:tensorflow:Calling checkpoint listeners after saving checkpoint 10000...\n\ndecode_example: features = {'image_raw': FixedLenFeature(shape=[], dtype=tf.string, default_value=None), 'label': FixedLenFeature(shape=[], dtype=tf.int64, default_value=None)}\ndecode_example: image = Tensor(\"DecodeRaw:0\", shape=(None,), dtype=uint8)\ndecode_example: image = Tensor(\"Reshape:0\", shape=(28, 28, 1), dtype=uint8)\npreprocess_image: image = Tensor(\"sub:0\", shape=(28, 28, 1), dtype=float32)\ndecode_example: image = Tensor(\"sub:0\", shape=(28, 28, 1), dtype=float32)\ndecode_example: label = Tensor(\"Cast_1:0\", shape=(), dtype=int32)\nINFO:tensorflow:Calling model_fn.\n\nvanilla_gan_model: features = {'image': <tf.Tensor 'IteratorGetNext:0' shape=(None, 28, 28, 1) dtype=float32>}\nvanilla_gan_model: labels = Tensor(\"IteratorGetNext:1\", shape=(None,), dtype=int32)\nvanilla_gan_model: mode = eval\nvanilla_gan_model: params = {'train_file_pattern': 'gs://machine-learning-1234-bucket/gan/data/mnist/train*.tfrecord', 'eval_file_pattern': 'gs://machine-learning-1234-bucket/gan/data/mnist/test*.tfrecord', 'output_dir': 'gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model', 'train_batch_size': 32, 'train_steps': 56250, 'save_summary_steps': 100, 'save_checkpoints_steps': 10000, 'keep_checkpoint_max': 10, 'input_fn_autotune': False, 'eval_batch_size': 32, 'eval_steps': 100, 'start_delay_secs': 60000, 'throttle_secs': 60000, 'height': 28, 'width': 28, 'depth': 1, 'latent_size': 512, 'generator_hidden_units': [256, 512, 1024], 'generator_leaky_relu_alpha': 0.2, 'generator_final_activation': 'tanh', 'generator_l1_regularization_scale': 0.0, 'generator_l2_regularization_scale': 0.0, 'generator_optimizer': 'Adam', 'generator_learning_rate': 0.0002, 'generator_adam_beta1': 0.5, 'generator_adam_beta2': 0.999, 'generator_adam_epsilon': 1e-08, 'generator_clip_gradients': None, 'generator_train_steps': 1, 'discriminator_hidden_units': [1024, 512, 256], 'discriminator_leaky_relu_alpha': 0.2, 'discriminator_l1_regularization_scale': 0.0, 'discriminator_l2_regularization_scale': 0.0, 'discriminator_optimizer': 'Adam', 'discriminator_learning_rate': 0.0002, 'discriminator_adam_beta1': 0.5, 'discriminator_adam_beta2': 0.999, 'discriminator_adam_epsilon': 1e-08, 'discriminator_clip_gradients': None, 'discriminator_train_steps': 1, 'label_smoothing': 0.9}\n\nget_logits_and_losses: real_images = Tensor(\"Reshape:0\", shape=(None, 784), dtype=float32)\nget_logits_and_losses: Z = Tensor(\"random_normal:0\", shape=(None, 512), dtype=float32)\n\nCall generator with Z = Tensor(\"random_normal:0\", shape=(None, 512), dtype=float32).\n\nget_fake_images: network = Tensor(\"random_normal:0\", shape=(None, 512), dtype=float32)\nget_fake_images: network = Tensor(\"generator/layers_dense_0/BiasAdd:0\", shape=(None, 256), dtype=float32)\nget_fake_images: network = Tensor(\"generator/leaky_relu_0:0\", shape=(None, 256), dtype=float32)\nget_fake_images: network = Tensor(\"generator/layers_dense_1/BiasAdd:0\", shape=(None, 512), dtype=float32)\nget_fake_images: network = Tensor(\"generator/leaky_relu_1:0\", shape=(None, 512), dtype=float32)\nget_fake_images: network = Tensor(\"generator/layers_dense_2/BiasAdd:0\", shape=(None, 1024), dtype=float32)\nget_fake_images: network = Tensor(\"generator/leaky_relu_2:0\", shape=(None, 1024), dtype=float32)\nget_fake_images: generated_outputs = Tensor(\"generator/layers_dense_generated_outputs/Tanh:0\", shape=(None, 784), dtype=float32)\n\nCall discriminator with fake_images = Tensor(\"generator/layers_dense_generated_outputs/Tanh:0\", shape=(None, 784), dtype=float32).\n\nget_discriminator_logits: network = Tensor(\"generator/layers_dense_generated_outputs/Tanh:0\", shape=(None, 784), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/layers_dense_0/BiasAdd:0\", shape=(None, 1024), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/leaky_relu_0:0\", shape=(None, 1024), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/layers_dense_1/BiasAdd:0\", shape=(None, 512), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/leaky_relu_1:0\", shape=(None, 512), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/layers_dense_2/BiasAdd:0\", shape=(None, 256), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/leaky_relu_2:0\", shape=(None, 256), dtype=float32)\nget_discriminator_logits: logits = Tensor(\"discriminator/layers_dense_logits/BiasAdd:0\", shape=(None, 1), dtype=float32)\n\nCall discriminator with real_images = Tensor(\"Reshape:0\", shape=(None, 784), dtype=float32).\n\nget_discriminator_logits: network = Tensor(\"Reshape:0\", shape=(None, 784), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/layers_dense_0/BiasAdd:0\", shape=(None, 1024), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/leaky_relu_0:0\", shape=(None, 1024), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/layers_dense_1/BiasAdd:0\", shape=(None, 512), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/leaky_relu_1:0\", shape=(None, 512), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/layers_dense_2/BiasAdd:0\", shape=(None, 256), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/leaky_relu_2:0\", shape=(None, 256), dtype=float32)\nget_discriminator_logits: logits = Tensor(\"discriminator_1/layers_dense_logits/BiasAdd:0\", shape=(None, 1), dtype=float32)\n\nget_generator_loss: generator_loss = Tensor(\"generator_loss:0\", shape=(), dtype=float32)\nget_generator_loss: generator_reg_loss = Tensor(\"Const_1:0\", shape=(), dtype=float32)\nget_generator_loss: generator_total_loss = Tensor(\"generator_total_loss:0\", shape=(), dtype=float32)\n\nget_discriminator_loss: discriminator_real_loss = Tensor(\"discriminator_real_loss:0\", shape=(), dtype=float32)\nget_discriminator_loss: discriminator_fake_loss = Tensor(\"discriminator_fake_loss:0\", shape=(), dtype=float32)\nget_discriminator_loss: discriminator_loss = Tensor(\"discriminator_loss:0\", shape=(), dtype=float32)\nget_discriminator_loss: discriminator_reg_loss = Tensor(\"Const_4:0\", shape=(), dtype=float32)\nget_discriminator_loss: discriminator_total_loss = Tensor(\"discriminator_total_loss:0\", shape=(), dtype=float32)\n\nget_eval_metric_ops: discriminator_logits = Tensor(\"discriminator_concat_logits:0\", shape=(None, 1), dtype=float32)\nget_eval_metric_ops: discriminator_labels = Tensor(\"discriminator_concat_labels:0\", shape=(None, 1), dtype=float32)\nget_eval_metric_ops: discriminator_probabilities = Tensor(\"discriminator_probabilities:0\", shape=(None, 1), dtype=float32)\nWARNING:tensorflow:From <ipython-input-9-71b84ff8f43a>:63: auc (from tensorflow.python.ops.metrics_impl) is deprecated and will be removed in a future version.\nInstructions for updating:\nThe value of AUC returned by this may race with the update so this is deprecated. Please use tf.keras.metrics.AUC instead.\nWARNING:tensorflow:Trapezoidal rule is known to produce incorrect PR-AUCs; please switch to \"careful_interpolation\" instead.\nWARNING:tensorflow:Trapezoidal rule is known to produce incorrect PR-AUCs; please switch to \"careful_interpolation\" instead.\nget_eval_metric_ops: eval_metric_ops = {'accuracy': (<tf.Tensor 'discriminator_accuracy/value:0' shape=() dtype=float32>, <tf.Tensor 'discriminator_accuracy/update_op:0' shape=() dtype=float32>), 'precision': (<tf.Tensor 'discriminator_precision/value:0' shape=() dtype=float32>, <tf.Tensor 'discriminator_precision/update_op:0' shape=() dtype=float32>), 'recall': (<tf.Tensor 'discriminator_recall/value:0' shape=() dtype=float32>, <tf.Tensor 'discriminator_recall/update_op:0' shape=() dtype=float32>), 'auc_roc': (<tf.Tensor 'discriminator_auc_roc/value:0' shape=() dtype=float32>, <tf.Tensor 'discriminator_auc_roc/update_op:0' shape=() dtype=float32>), 'auc_pr': (<tf.Tensor 'discriminator_auc_pr/value:0' shape=() dtype=float32>, <tf.Tensor 'discriminator_auc_pr/update_op:0' shape=() dtype=float32>)}\nINFO:tensorflow:Done calling model_fn.\nINFO:tensorflow:Starting evaluation at 2020-07-24T00:11:29Z\nINFO:tensorflow:Graph was finalized.\nINFO:tensorflow:Restoring parameters from gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt-10000\nINFO:tensorflow:Running local_init_op.\nINFO:tensorflow:Done running local_init_op.\nINFO:tensorflow:Evaluation [10/100]\nINFO:tensorflow:Evaluation [20/100]\nINFO:tensorflow:Evaluation [30/100]\nINFO:tensorflow:Evaluation [40/100]\nINFO:tensorflow:Evaluation [50/100]\nINFO:tensorflow:Evaluation [60/100]\nINFO:tensorflow:Evaluation [70/100]\nINFO:tensorflow:Evaluation [80/100]\nINFO:tensorflow:Evaluation [90/100]\nINFO:tensorflow:Evaluation [100/100]\nINFO:tensorflow:Inference Time : 1.40707s\nINFO:tensorflow:Finished evaluation at 2020-07-24-00:11:31\nINFO:tensorflow:Saving dict for global step 10000: accuracy = 0.0, auc_pr = 0.8769913, auc_roc = 0.8615721, global_step = 10000, loss = 1.0742536, precision = 0.5, recall = 1.0\nINFO:tensorflow:Saving 'checkpoint_path' summary for global step 10000: gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt-10000\n\nserving_input_fn: feature_placeholders = {'Z': <tf.Tensor 'serving_input_placeholder_Z:0' shape=(None, 512) dtype=float32>}\nserving_input_fn: features = {'Z': <tf.Tensor 'serving_input_fn_identity_placeholder_Z:0' shape=(None, 512) dtype=float32>}\nINFO:tensorflow:Calling model_fn.\n\nvanilla_gan_model: features = {'Z': <tf.Tensor 'serving_input_fn_identity_placeholder_Z:0' shape=(None, 512) dtype=float32>}\nvanilla_gan_model: labels = None\nvanilla_gan_model: mode = infer\nvanilla_gan_model: params = {'train_file_pattern': 'gs://machine-learning-1234-bucket/gan/data/mnist/train*.tfrecord', 'eval_file_pattern': 'gs://machine-learning-1234-bucket/gan/data/mnist/test*.tfrecord', 'output_dir': 'gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model', 'train_batch_size': 32, 'train_steps': 56250, 'save_summary_steps': 100, 'save_checkpoints_steps': 10000, 'keep_checkpoint_max': 10, 'input_fn_autotune': False, 'eval_batch_size': 32, 'eval_steps': 100, 'start_delay_secs': 60000, 'throttle_secs': 60000, 'height': 28, 'width': 28, 'depth': 1, 'latent_size': 512, 'generator_hidden_units': [256, 512, 1024], 'generator_leaky_relu_alpha': 0.2, 'generator_final_activation': 'tanh', 'generator_l1_regularization_scale': 0.0, 'generator_l2_regularization_scale': 0.0, 'generator_optimizer': 'Adam', 'generator_learning_rate': 0.0002, 'generator_adam_beta1': 0.5, 'generator_adam_beta2': 0.999, 'generator_adam_epsilon': 1e-08, 'generator_clip_gradients': None, 'generator_train_steps': 1, 'discriminator_hidden_units': [1024, 512, 256], 'discriminator_leaky_relu_alpha': 0.2, 'discriminator_l1_regularization_scale': 0.0, 'discriminator_l2_regularization_scale': 0.0, 'discriminator_optimizer': 'Adam', 'discriminator_learning_rate': 0.0002, 'discriminator_adam_beta1': 0.5, 'discriminator_adam_beta2': 0.999, 'discriminator_adam_epsilon': 1e-08, 'discriminator_clip_gradients': None, 'discriminator_train_steps': 1, 'label_smoothing': 0.9}\n\nget_predictions_and_export_outputs: Z = Tensor(\"serving_input_fn_identity_placeholder_Z:0\", shape=(None, 512), dtype=float32)\n\nget_fake_images: network = Tensor(\"serving_input_fn_identity_placeholder_Z:0\", shape=(None, 512), dtype=float32)\nget_fake_images: network = Tensor(\"generator/layers_dense_0/BiasAdd:0\", shape=(None, 256), dtype=float32)\nget_fake_images: network = Tensor(\"generator/leaky_relu_0:0\", shape=(None, 256), dtype=float32)\nget_fake_images: network = Tensor(\"generator/layers_dense_1/BiasAdd:0\", shape=(None, 512), dtype=float32)\nget_fake_images: network = Tensor(\"generator/leaky_relu_1:0\", shape=(None, 512), dtype=float32)\nget_fake_images: network = Tensor(\"generator/layers_dense_2/BiasAdd:0\", shape=(None, 1024), dtype=float32)\nget_fake_images: network = Tensor(\"generator/leaky_relu_2:0\", shape=(None, 1024), dtype=float32)\nget_fake_images: generated_outputs = Tensor(\"generator/layers_dense_generated_outputs/Tanh:0\", shape=(None, 784), dtype=float32)\nget_predictions_and_export_outputs: fake_images = Tensor(\"generator/layers_dense_generated_outputs/Tanh:0\", shape=(None, 784), dtype=float32)\nget_predictions_and_export_outputs: generated_images = Tensor(\"Reshape:0\", shape=(None, 28, 28, 1), dtype=float32)\nget_predictions_and_export_outputs: predictions_dict = {'generated_images': <tf.Tensor 'Reshape:0' shape=(None, 28, 28, 1) dtype=float32>}\nget_predictions_and_export_outputs: export_outputs = {'predict_export_outputs': <tensorflow.python.saved_model.model_utils.export_output.PredictOutput object at 0x7f07d00d92d0>}\nINFO:tensorflow:Done calling model_fn.\nWARNING:tensorflow:From /opt/conda/lib/python3.7/site-packages/tensorflow/python/saved_model/signature_def_utils_impl.py:201: build_tensor_info (from tensorflow.python.saved_model.utils_impl) is deprecated and will be removed in a future version.\nInstructions for updating:\nThis function will only be available through the v1 compatibility library as tf.compat.v1.saved_model.utils.build_tensor_info or tf.compat.v1.saved_model.build_tensor_info.\nINFO:tensorflow:Signatures INCLUDED in export for Classify: None\nINFO:tensorflow:Signatures INCLUDED in export for Regress: None\nINFO:tensorflow:Signatures INCLUDED in export for Predict: ['predict_export_outputs', 'serving_default']\nINFO:tensorflow:Signatures INCLUDED in export for Train: None\nINFO:tensorflow:Signatures INCLUDED in export for Eval: None\nINFO:tensorflow:Restoring parameters from gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt-10000\nINFO:tensorflow:Assets added to graph.\nINFO:tensorflow:No assets to write.\nINFO:tensorflow:SavedModel written to: gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/export/exporter/temp-1595549492/saved_model.pb\nINFO:tensorflow:global_step/sec: 8.15184\nINFO:tensorflow:loss = 1.0479531, step = 10000 (12.267 sec)\nINFO:tensorflow:global_step/sec: 319.039\nINFO:tensorflow:loss = 0.98213786, step = 10100 (0.313 sec)\nINFO:tensorflow:global_step/sec: 331.532\nINFO:tensorflow:loss = 1.0675718, step = 10200 (0.301 sec)\nINFO:tensorflow:global_step/sec: 340.758\nINFO:tensorflow:loss = 0.892248, step = 10300 (0.293 sec)\nINFO:tensorflow:global_step/sec: 335.836\nINFO:tensorflow:loss = 1.0174904, step = 10400 (0.298 sec)\nINFO:tensorflow:global_step/sec: 338.374\nINFO:tensorflow:loss = 1.0973763, step = 10500 (0.296 sec)\nINFO:tensorflow:global_step/sec: 340.458\nINFO:tensorflow:loss = 0.99393946, step = 10600 (0.294 sec)\nINFO:tensorflow:global_step/sec: 339.195\nINFO:tensorflow:loss = 1.0264039, step = 10700 (0.295 sec)\nINFO:tensorflow:global_step/sec: 335.477\nINFO:tensorflow:loss = 0.90980774, step = 10800 (0.298 sec)\nINFO:tensorflow:global_step/sec: 341.943\nINFO:tensorflow:loss = 0.9814321, step = 10900 (0.292 sec)\nINFO:tensorflow:global_step/sec: 348.485\nINFO:tensorflow:loss = 1.1479112, step = 11000 (0.287 sec)\nINFO:tensorflow:global_step/sec: 338.982\nINFO:tensorflow:loss = 1.158084, step = 11100 (0.296 sec)\nINFO:tensorflow:global_step/sec: 148.913\nINFO:tensorflow:loss = 0.94809985, step = 11200 (0.672 sec)\nINFO:tensorflow:global_step/sec: 323.545\nINFO:tensorflow:loss = 1.115243, step = 11300 (0.309 sec)\nINFO:tensorflow:global_step/sec: 338.777\nINFO:tensorflow:loss = 1.0971072, step = 11400 (0.295 sec)\nINFO:tensorflow:global_step/sec: 342.911\nINFO:tensorflow:loss = 0.96836376, step = 11500 (0.292 sec)\nINFO:tensorflow:global_step/sec: 338.121\nINFO:tensorflow:loss = 0.90071964, step = 11600 (0.296 sec)\nINFO:tensorflow:global_step/sec: 338.234\nINFO:tensorflow:loss = 1.010527, step = 11700 (0.296 sec)\nINFO:tensorflow:global_step/sec: 345.704\nINFO:tensorflow:loss = 1.1645865, step = 11800 (0.289 sec)\nINFO:tensorflow:global_step/sec: 344.439\nINFO:tensorflow:loss = 1.2000377, step = 11900 (0.291 sec)\nINFO:tensorflow:global_step/sec: 333.012\nINFO:tensorflow:loss = 1.0159081, step = 12000 (0.300 sec)\nINFO:tensorflow:global_step/sec: 340.085\nINFO:tensorflow:loss = 1.0369492, step = 12100 (0.294 sec)\nINFO:tensorflow:global_step/sec: 338.497\nINFO:tensorflow:loss = 1.0337043, step = 12200 (0.295 sec)\nINFO:tensorflow:global_step/sec: 341.208\nINFO:tensorflow:loss = 1.033351, step = 12300 (0.293 sec)\nINFO:tensorflow:global_step/sec: 342.358\nINFO:tensorflow:loss = 1.0280449, step = 12400 (0.292 sec)\nINFO:tensorflow:global_step/sec: 345.582\nINFO:tensorflow:loss = 1.2459284, step = 12500 (0.289 sec)\nINFO:tensorflow:global_step/sec: 341.75\nINFO:tensorflow:loss = 1.215595, step = 12600 (0.293 sec)\nINFO:tensorflow:global_step/sec: 339.627\nINFO:tensorflow:loss = 1.0202229, step = 12700 (0.295 sec)\nINFO:tensorflow:global_step/sec: 337.575\nINFO:tensorflow:loss = 1.2994078, step = 12800 (0.296 sec)\nINFO:tensorflow:global_step/sec: 344.439\nINFO:tensorflow:loss = 0.9924799, step = 12900 (0.290 sec)\nINFO:tensorflow:global_step/sec: 339.882\nINFO:tensorflow:loss = 1.2552774, step = 13000 (0.294 sec)\nINFO:tensorflow:global_step/sec: 122.852\nINFO:tensorflow:loss = 0.9298098, step = 13100 (0.814 sec)\nINFO:tensorflow:global_step/sec: 337.159\nINFO:tensorflow:loss = 1.0249662, step = 13200 (0.297 sec)\nINFO:tensorflow:global_step/sec: 339.75\nINFO:tensorflow:loss = 1.0652672, step = 13300 (0.294 sec)\nINFO:tensorflow:global_step/sec: 338.998\nINFO:tensorflow:loss = 1.076685, step = 13400 (0.295 sec)\nINFO:tensorflow:global_step/sec: 344.072\nINFO:tensorflow:loss = 1.15031, step = 13500 (0.291 sec)\nINFO:tensorflow:global_step/sec: 330.148\nINFO:tensorflow:loss = 1.0625025, step = 13600 (0.303 sec)\nINFO:tensorflow:global_step/sec: 338.56\nINFO:tensorflow:loss = 1.277576, step = 13700 (0.296 sec)\nINFO:tensorflow:global_step/sec: 342.363\nINFO:tensorflow:loss = 1.0658808, step = 13800 (0.292 sec)\nINFO:tensorflow:global_step/sec: 336.87\nINFO:tensorflow:loss = 1.2370113, step = 13900 (0.297 sec)\nINFO:tensorflow:global_step/sec: 349.945\nINFO:tensorflow:loss = 1.1320643, step = 14000 (0.286 sec)\nINFO:tensorflow:global_step/sec: 341.945\nINFO:tensorflow:loss = 1.0675814, step = 14100 (0.292 sec)\nINFO:tensorflow:global_step/sec: 341.133\nINFO:tensorflow:loss = 0.93721235, step = 14200 (0.293 sec)\nINFO:tensorflow:global_step/sec: 343.462\nINFO:tensorflow:loss = 1.147, step = 14300 (0.291 sec)\nINFO:tensorflow:global_step/sec: 346.122\nINFO:tensorflow:loss = 1.0193636, step = 14400 (0.289 sec)\nINFO:tensorflow:global_step/sec: 339.744\nINFO:tensorflow:loss = 0.95166767, step = 14500 (0.294 sec)\nINFO:tensorflow:global_step/sec: 349.327\nINFO:tensorflow:loss = 1.0073727, step = 14600 (0.286 sec)\nINFO:tensorflow:global_step/sec: 343.132\nINFO:tensorflow:loss = 1.0411992, step = 14700 (0.291 sec)\nINFO:tensorflow:global_step/sec: 339.643\nINFO:tensorflow:loss = 1.0756077, step = 14800 (0.294 sec)\nINFO:tensorflow:global_step/sec: 331.694\nINFO:tensorflow:loss = 1.1424799, step = 14900 (0.301 sec)\nINFO:tensorflow:global_step/sec: 168.845\nINFO:tensorflow:loss = 0.985397, step = 15000 (0.592 sec)\nINFO:tensorflow:global_step/sec: 345.636\nINFO:tensorflow:loss = 1.2275574, step = 15100 (0.289 sec)\nINFO:tensorflow:global_step/sec: 342.154\nINFO:tensorflow:loss = 1.0213279, step = 15200 (0.292 sec)\nINFO:tensorflow:global_step/sec: 342.936\nINFO:tensorflow:loss = 1.074875, step = 15300 (0.292 sec)\nINFO:tensorflow:global_step/sec: 334.404\nINFO:tensorflow:loss = 1.0923427, step = 15400 (0.299 sec)\nINFO:tensorflow:global_step/sec: 341.644\nINFO:tensorflow:loss = 1.0628321, step = 15500 (0.292 sec)\nINFO:tensorflow:global_step/sec: 342.783\nINFO:tensorflow:loss = 1.0461942, step = 15600 (0.292 sec)\nINFO:tensorflow:global_step/sec: 344.833\nINFO:tensorflow:loss = 1.1516154, step = 15700 (0.290 sec)\nINFO:tensorflow:global_step/sec: 344.609\nINFO:tensorflow:loss = 1.1167228, step = 15800 (0.290 sec)\nINFO:tensorflow:global_step/sec: 340.367\nINFO:tensorflow:loss = 0.87745494, step = 15900 (0.294 sec)\nINFO:tensorflow:global_step/sec: 337.319\nINFO:tensorflow:loss = 1.031332, step = 16000 (0.297 sec)\nINFO:tensorflow:global_step/sec: 338.538\nINFO:tensorflow:loss = 1.2620256, step = 16100 (0.295 sec)\nINFO:tensorflow:global_step/sec: 340.897\nINFO:tensorflow:loss = 0.9878269, step = 16200 (0.293 sec)\nINFO:tensorflow:global_step/sec: 345.002\nINFO:tensorflow:loss = 0.97543836, step = 16300 (0.290 sec)\nINFO:tensorflow:global_step/sec: 347.082\nINFO:tensorflow:loss = 1.0837225, step = 16400 (0.288 sec)\nINFO:tensorflow:global_step/sec: 344.637\nINFO:tensorflow:loss = 1.0090368, step = 16500 (0.290 sec)\nINFO:tensorflow:global_step/sec: 326.233\nINFO:tensorflow:loss = 0.9897798, step = 16600 (0.306 sec)\nINFO:tensorflow:global_step/sec: 332.05\nINFO:tensorflow:loss = 1.1298048, step = 16700 (0.301 sec)\nINFO:tensorflow:global_step/sec: 338.227\nINFO:tensorflow:loss = 0.9411819, step = 16800 (0.296 sec)\nINFO:tensorflow:global_step/sec: 155.976\nINFO:tensorflow:loss = 0.91928065, step = 16900 (0.641 sec)\nINFO:tensorflow:global_step/sec: 336.6\nINFO:tensorflow:loss = 0.9146326, step = 17000 (0.297 sec)\nINFO:tensorflow:global_step/sec: 341.459\nINFO:tensorflow:loss = 1.0875126, step = 17100 (0.293 sec)\nINFO:tensorflow:global_step/sec: 328.93\nINFO:tensorflow:loss = 1.1640179, step = 17200 (0.304 sec)\nINFO:tensorflow:global_step/sec: 338.434\nINFO:tensorflow:loss = 1.2217764, step = 17300 (0.296 sec)\nINFO:tensorflow:global_step/sec: 343.747\nINFO:tensorflow:loss = 1.0016961, step = 17400 (0.291 sec)\nINFO:tensorflow:global_step/sec: 346.769\nINFO:tensorflow:loss = 1.0767293, step = 17500 (0.288 sec)\nINFO:tensorflow:global_step/sec: 343.406\nINFO:tensorflow:loss = 1.1265266, step = 17600 (0.291 sec)\nINFO:tensorflow:global_step/sec: 341.063\nINFO:tensorflow:loss = 1.1385243, step = 17700 (0.293 sec)\nINFO:tensorflow:global_step/sec: 339.397\nINFO:tensorflow:loss = 1.1245238, step = 17800 (0.294 sec)\nINFO:tensorflow:global_step/sec: 345.516\nINFO:tensorflow:loss = 1.1862565, step = 17900 (0.290 sec)\nINFO:tensorflow:global_step/sec: 344.043\nINFO:tensorflow:loss = 1.2530525, step = 18000 (0.291 sec)\nINFO:tensorflow:global_step/sec: 343.718\nINFO:tensorflow:loss = 1.0786252, step = 18100 (0.291 sec)\nINFO:tensorflow:global_step/sec: 347.409\nINFO:tensorflow:loss = 0.9018778, step = 18200 (0.288 sec)\nINFO:tensorflow:global_step/sec: 342.02\nINFO:tensorflow:loss = 1.2021788, step = 18300 (0.293 sec)\nINFO:tensorflow:global_step/sec: 334.151\nINFO:tensorflow:loss = 1.2201877, step = 18400 (0.299 sec)\nINFO:tensorflow:global_step/sec: 348.886\nINFO:tensorflow:loss = 1.0727044, step = 18500 (0.287 sec)\nINFO:tensorflow:global_step/sec: 347.654\nINFO:tensorflow:loss = 1.1316624, step = 18600 (0.288 sec)\nINFO:tensorflow:global_step/sec: 168.362\nINFO:tensorflow:loss = 0.9946443, step = 18700 (0.594 sec)\nINFO:tensorflow:global_step/sec: 336.785\nINFO:tensorflow:loss = 1.0604126, step = 18800 (0.297 sec)\nINFO:tensorflow:global_step/sec: 345.518\nINFO:tensorflow:loss = 1.1634941, step = 18900 (0.290 sec)\nINFO:tensorflow:global_step/sec: 334.704\nINFO:tensorflow:loss = 0.96396667, step = 19000 (0.299 sec)\nINFO:tensorflow:global_step/sec: 344.326\nINFO:tensorflow:loss = 1.2078096, step = 19100 (0.290 sec)\nINFO:tensorflow:global_step/sec: 343.46\nINFO:tensorflow:loss = 1.2078229, step = 19200 (0.291 sec)\nINFO:tensorflow:global_step/sec: 340.254\nINFO:tensorflow:loss = 1.0995294, step = 19300 (0.294 sec)\nINFO:tensorflow:global_step/sec: 344.171\nINFO:tensorflow:loss = 0.9196749, step = 19400 (0.290 sec)\nINFO:tensorflow:global_step/sec: 344.726\nINFO:tensorflow:loss = 1.0326978, step = 19500 (0.290 sec)\nINFO:tensorflow:global_step/sec: 344.175\nINFO:tensorflow:loss = 1.2680268, step = 19600 (0.291 sec)\nINFO:tensorflow:global_step/sec: 345.142\nINFO:tensorflow:loss = 0.9592879, step = 19700 (0.290 sec)\nINFO:tensorflow:global_step/sec: 347.912\nINFO:tensorflow:loss = 1.0903792, step = 19800 (0.287 sec)\nINFO:tensorflow:global_step/sec: 340.824\nINFO:tensorflow:loss = 0.9953799, step = 19900 (0.294 sec)\nINFO:tensorflow:Calling checkpoint listeners before saving checkpoint 20000...\nINFO:tensorflow:Saving checkpoints for 20000 into gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt.\nINFO:tensorflow:Calling checkpoint listeners after saving checkpoint 20000...\nINFO:tensorflow:Skip the current checkpoint eval due to throttle secs (60000 secs).\nINFO:tensorflow:global_step/sec: 29.8831\nINFO:tensorflow:loss = 1.2095861, step = 20000 (3.346 sec)\nINFO:tensorflow:global_step/sec: 324.697\nINFO:tensorflow:loss = 1.1185255, step = 20100 (0.308 sec)\nINFO:tensorflow:global_step/sec: 339.658\nINFO:tensorflow:loss = 0.9155216, step = 20200 (0.294 sec)\nINFO:tensorflow:global_step/sec: 335.267\nINFO:tensorflow:loss = 1.1468832, step = 20300 (0.298 sec)\nINFO:tensorflow:global_step/sec: 338.833\nINFO:tensorflow:loss = 1.1518002, step = 20400 (0.295 sec)\nINFO:tensorflow:global_step/sec: 333.656\nINFO:tensorflow:loss = 1.019797, step = 20500 (0.300 sec)\nINFO:tensorflow:global_step/sec: 138.822\nINFO:tensorflow:loss = 1.014814, step = 20600 (0.721 sec)\nINFO:tensorflow:global_step/sec: 328.442\nINFO:tensorflow:loss = 1.0342565, step = 20700 (0.304 sec)\nINFO:tensorflow:global_step/sec: 324.971\nINFO:tensorflow:loss = 1.0329521, step = 20800 (0.308 sec)\nINFO:tensorflow:global_step/sec: 327.839\nINFO:tensorflow:loss = 1.0601107, step = 20900 (0.305 sec)\nINFO:tensorflow:global_step/sec: 337.43\nINFO:tensorflow:loss = 0.9498259, step = 21000 (0.296 sec)\nINFO:tensorflow:global_step/sec: 340.077\nINFO:tensorflow:loss = 0.9240876, step = 21100 (0.294 sec)\nINFO:tensorflow:global_step/sec: 334.791\nINFO:tensorflow:loss = 1.0736605, step = 21200 (0.299 sec)\nINFO:tensorflow:global_step/sec: 330.533\nINFO:tensorflow:loss = 0.95147634, step = 21300 (0.303 sec)\nINFO:tensorflow:global_step/sec: 341.654\nINFO:tensorflow:loss = 1.1640346, step = 21400 (0.292 sec)\nINFO:tensorflow:global_step/sec: 331.266\nINFO:tensorflow:loss = 1.1049736, step = 21500 (0.302 sec)\nINFO:tensorflow:global_step/sec: 332.425\nINFO:tensorflow:loss = 1.1189556, step = 21600 (0.301 sec)\nINFO:tensorflow:global_step/sec: 340.534\nINFO:tensorflow:loss = 1.0988692, step = 21700 (0.293 sec)\nINFO:tensorflow:global_step/sec: 330.743\nINFO:tensorflow:loss = 0.8827927, step = 21800 (0.303 sec)\nINFO:tensorflow:global_step/sec: 336.764\nINFO:tensorflow:loss = 1.0876393, step = 21900 (0.297 sec)\nINFO:tensorflow:global_step/sec: 342.162\nINFO:tensorflow:loss = 1.1097544, step = 22000 (0.292 sec)\nINFO:tensorflow:global_step/sec: 334.45\nINFO:tensorflow:loss = 1.2520988, step = 22100 (0.299 sec)\nINFO:tensorflow:global_step/sec: 339.222\nINFO:tensorflow:loss = 1.0567733, step = 22200 (0.295 sec)\nINFO:tensorflow:global_step/sec: 343.124\nINFO:tensorflow:loss = 0.9549648, step = 22300 (0.292 sec)\nINFO:tensorflow:global_step/sec: 344.989\nINFO:tensorflow:loss = 1.24928, step = 22400 (0.290 sec)\nINFO:tensorflow:global_step/sec: 113.394\nINFO:tensorflow:loss = 1.0122098, step = 22500 (0.882 sec)\nINFO:tensorflow:global_step/sec: 332.309\nINFO:tensorflow:loss = 1.0279343, step = 22600 (0.301 sec)\nINFO:tensorflow:global_step/sec: 336.576\nINFO:tensorflow:loss = 1.1465982, step = 22700 (0.297 sec)\nINFO:tensorflow:global_step/sec: 341.931\nINFO:tensorflow:loss = 1.0049295, step = 22800 (0.292 sec)\nINFO:tensorflow:global_step/sec: 340.501\nINFO:tensorflow:loss = 1.1505655, step = 22900 (0.294 sec)\nINFO:tensorflow:global_step/sec: 335.282\nINFO:tensorflow:loss = 1.1561027, step = 23000 (0.298 sec)\nINFO:tensorflow:global_step/sec: 342.802\nINFO:tensorflow:loss = 1.1104548, step = 23100 (0.292 sec)\nINFO:tensorflow:global_step/sec: 342.627\nINFO:tensorflow:loss = 1.0214257, step = 23200 (0.292 sec)\nINFO:tensorflow:global_step/sec: 339.54\nINFO:tensorflow:loss = 1.1174617, step = 23300 (0.294 sec)\nINFO:tensorflow:global_step/sec: 342.042\nINFO:tensorflow:loss = 1.1508381, step = 23400 (0.292 sec)\nINFO:tensorflow:global_step/sec: 337.395\nINFO:tensorflow:loss = 0.916763, step = 23500 (0.296 sec)\nINFO:tensorflow:global_step/sec: 334.432\nINFO:tensorflow:loss = 1.0897524, step = 23600 (0.299 sec)\nINFO:tensorflow:global_step/sec: 336.476\nINFO:tensorflow:loss = 0.9429777, step = 23700 (0.297 sec)\nINFO:tensorflow:global_step/sec: 342.205\nINFO:tensorflow:loss = 1.0865431, step = 23800 (0.292 sec)\nINFO:tensorflow:global_step/sec: 342.745\nINFO:tensorflow:loss = 1.2693365, step = 23900 (0.292 sec)\nINFO:tensorflow:global_step/sec: 340.121\nINFO:tensorflow:loss = 1.1597139, step = 24000 (0.294 sec)\nINFO:tensorflow:global_step/sec: 346.415\nINFO:tensorflow:loss = 1.2075784, step = 24100 (0.289 sec)\nINFO:tensorflow:global_step/sec: 343.347\nINFO:tensorflow:loss = 1.1012646, step = 24200 (0.291 sec)\nINFO:tensorflow:global_step/sec: 332.894\nINFO:tensorflow:loss = 0.9035676, step = 24300 (0.300 sec)\nINFO:tensorflow:global_step/sec: 140.77\nINFO:tensorflow:loss = 1.0996423, step = 24400 (0.710 sec)\nINFO:tensorflow:global_step/sec: 336.61\nINFO:tensorflow:loss = 1.0685426, step = 24500 (0.297 sec)\nINFO:tensorflow:global_step/sec: 338.556\nINFO:tensorflow:loss = 1.1785536, step = 24600 (0.295 sec)\nINFO:tensorflow:global_step/sec: 338.133\nINFO:tensorflow:loss = 1.067791, step = 24700 (0.296 sec)\nINFO:tensorflow:global_step/sec: 336.17\nINFO:tensorflow:loss = 1.068905, step = 24800 (0.298 sec)\nINFO:tensorflow:global_step/sec: 341.371\nINFO:tensorflow:loss = 1.2307315, step = 24900 (0.293 sec)\nINFO:tensorflow:global_step/sec: 343.811\nINFO:tensorflow:loss = 1.0282319, step = 25000 (0.291 sec)\nINFO:tensorflow:global_step/sec: 344.483\nINFO:tensorflow:loss = 0.98169094, step = 25100 (0.290 sec)\nINFO:tensorflow:global_step/sec: 339.496\nINFO:tensorflow:loss = 1.155837, step = 25200 (0.295 sec)\nINFO:tensorflow:global_step/sec: 340.56\nINFO:tensorflow:loss = 1.0228617, step = 25300 (0.294 sec)\nINFO:tensorflow:global_step/sec: 340.42\nINFO:tensorflow:loss = 1.211204, step = 25400 (0.294 sec)\nINFO:tensorflow:global_step/sec: 346.197\nINFO:tensorflow:loss = 1.1147974, step = 25500 (0.289 sec)\nINFO:tensorflow:global_step/sec: 335.729\nINFO:tensorflow:loss = 1.0767391, step = 25600 (0.298 sec)\nINFO:tensorflow:global_step/sec: 341.982\nINFO:tensorflow:loss = 1.1485176, step = 25700 (0.293 sec)\nINFO:tensorflow:global_step/sec: 340.11\nINFO:tensorflow:loss = 1.1760935, step = 25800 (0.294 sec)\nINFO:tensorflow:global_step/sec: 344.013\nINFO:tensorflow:loss = 1.1312004, step = 25900 (0.291 sec)\nINFO:tensorflow:global_step/sec: 342.157\nINFO:tensorflow:loss = 1.1343367, step = 26000 (0.292 sec)\nINFO:tensorflow:global_step/sec: 334.958\nINFO:tensorflow:loss = 1.0945675, step = 26100 (0.299 sec)\nINFO:tensorflow:global_step/sec: 133.227\nINFO:tensorflow:loss = 1.0671948, step = 26200 (0.751 sec)\nINFO:tensorflow:global_step/sec: 335.984\nINFO:tensorflow:loss = 1.0909472, step = 26300 (0.297 sec)\nINFO:tensorflow:global_step/sec: 335.2\nINFO:tensorflow:loss = 1.0683181, step = 26400 (0.298 sec)\nINFO:tensorflow:global_step/sec: 339.896\nINFO:tensorflow:loss = 1.2220905, step = 26500 (0.294 sec)\nINFO:tensorflow:global_step/sec: 344.281\nINFO:tensorflow:loss = 1.0501812, step = 26600 (0.290 sec)\nINFO:tensorflow:global_step/sec: 347.751\nINFO:tensorflow:loss = 1.2830334, step = 26700 (0.288 sec)\nINFO:tensorflow:global_step/sec: 344.435\nINFO:tensorflow:loss = 1.1820927, step = 26800 (0.291 sec)\nINFO:tensorflow:global_step/sec: 343.6\nINFO:tensorflow:loss = 1.0412974, step = 26900 (0.291 sec)\nINFO:tensorflow:global_step/sec: 342.768\nINFO:tensorflow:loss = 0.89205205, step = 27000 (0.292 sec)\nINFO:tensorflow:global_step/sec: 339.685\nINFO:tensorflow:loss = 1.0662029, step = 27100 (0.294 sec)\nINFO:tensorflow:global_step/sec: 336.079\nINFO:tensorflow:loss = 1.2203815, step = 27200 (0.297 sec)\nINFO:tensorflow:global_step/sec: 347.852\nINFO:tensorflow:loss = 1.1736934, step = 27300 (0.287 sec)\nINFO:tensorflow:global_step/sec: 348.475\nINFO:tensorflow:loss = 1.0458995, step = 27400 (0.287 sec)\nINFO:tensorflow:global_step/sec: 345.294\nINFO:tensorflow:loss = 1.0451627, step = 27500 (0.290 sec)\nINFO:tensorflow:global_step/sec: 340.812\nINFO:tensorflow:loss = 1.1065347, step = 27600 (0.293 sec)\nINFO:tensorflow:global_step/sec: 348.133\nINFO:tensorflow:loss = 1.063629, step = 27700 (0.287 sec)\nINFO:tensorflow:global_step/sec: 342.732\nINFO:tensorflow:loss = 1.0002826, step = 27800 (0.292 sec)\nINFO:tensorflow:global_step/sec: 334.16\nINFO:tensorflow:loss = 1.1134276, step = 27900 (0.299 sec)\nINFO:tensorflow:global_step/sec: 344.791\nINFO:tensorflow:loss = 1.0658729, step = 28000 (0.290 sec)\nINFO:tensorflow:global_step/sec: 133.05\nINFO:tensorflow:loss = 0.9752915, step = 28100 (0.752 sec)\nINFO:tensorflow:global_step/sec: 341\nINFO:tensorflow:loss = 1.0684254, step = 28200 (0.293 sec)\nINFO:tensorflow:global_step/sec: 340.574\nINFO:tensorflow:loss = 1.0734992, step = 28300 (0.293 sec)\nINFO:tensorflow:global_step/sec: 344.967\nINFO:tensorflow:loss = 1.0858517, step = 28400 (0.290 sec)\nINFO:tensorflow:global_step/sec: 346.002\nINFO:tensorflow:loss = 1.0033655, step = 28500 (0.289 sec)\nINFO:tensorflow:global_step/sec: 345.574\nINFO:tensorflow:loss = 1.1671939, step = 28600 (0.290 sec)\nINFO:tensorflow:global_step/sec: 339.375\nINFO:tensorflow:loss = 0.99499124, step = 28700 (0.295 sec)\nINFO:tensorflow:global_step/sec: 340.835\nINFO:tensorflow:loss = 0.99299574, step = 28800 (0.294 sec)\nINFO:tensorflow:global_step/sec: 345.458\nINFO:tensorflow:loss = 1.2031922, step = 28900 (0.289 sec)\nINFO:tensorflow:global_step/sec: 340.619\nINFO:tensorflow:loss = 1.0500741, step = 29000 (0.294 sec)\nINFO:tensorflow:global_step/sec: 347.759\nINFO:tensorflow:loss = 1.0177547, step = 29100 (0.288 sec)\nINFO:tensorflow:global_step/sec: 345.559\nINFO:tensorflow:loss = 0.92903847, step = 29200 (0.289 sec)\nINFO:tensorflow:global_step/sec: 344.102\nINFO:tensorflow:loss = 1.0332358, step = 29300 (0.291 sec)\nINFO:tensorflow:global_step/sec: 341.279\nINFO:tensorflow:loss = 1.101382, step = 29400 (0.293 sec)\nINFO:tensorflow:global_step/sec: 341.18\nINFO:tensorflow:loss = 1.0280107, step = 29500 (0.293 sec)\nINFO:tensorflow:global_step/sec: 343.791\nINFO:tensorflow:loss = 1.1035588, step = 29600 (0.291 sec)\nINFO:tensorflow:global_step/sec: 330.254\nINFO:tensorflow:loss = 1.2564795, step = 29700 (0.303 sec)\nINFO:tensorflow:global_step/sec: 344.168\nINFO:tensorflow:loss = 1.0615232, step = 29800 (0.290 sec)\nINFO:tensorflow:global_step/sec: 346.787\nINFO:tensorflow:loss = 0.8842442, step = 29900 (0.288 sec)\nINFO:tensorflow:Calling checkpoint listeners before saving checkpoint 30000...\nINFO:tensorflow:Saving checkpoints for 30000 into gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt.\nINFO:tensorflow:Calling checkpoint listeners after saving checkpoint 30000...\nINFO:tensorflow:Skip the current checkpoint eval due to throttle secs (60000 secs).\nINFO:tensorflow:global_step/sec: 27.9768\nINFO:tensorflow:loss = 1.1193485, step = 30000 (3.575 sec)\nINFO:tensorflow:global_step/sec: 333.136\nINFO:tensorflow:loss = 1.1473632, step = 30100 (0.300 sec)\nINFO:tensorflow:global_step/sec: 334.877\nINFO:tensorflow:loss = 1.0435296, step = 30200 (0.433 sec)\nINFO:tensorflow:global_step/sec: 230.524\nINFO:tensorflow:loss = 1.0566214, step = 30300 (0.300 sec)\nINFO:tensorflow:global_step/sec: 331.22\nINFO:tensorflow:loss = 1.1068819, step = 30400 (0.302 sec)\nINFO:tensorflow:global_step/sec: 337.541\nINFO:tensorflow:loss = 1.0392063, step = 30500 (0.296 sec)\nINFO:tensorflow:global_step/sec: 340.453\nINFO:tensorflow:loss = 1.1663389, step = 30600 (0.294 sec)\nINFO:tensorflow:global_step/sec: 346.82\nINFO:tensorflow:loss = 1.1058118, step = 30700 (0.288 sec)\nINFO:tensorflow:global_step/sec: 346.739\nINFO:tensorflow:loss = 1.00103, step = 30800 (0.288 sec)\nINFO:tensorflow:global_step/sec: 346.53\nINFO:tensorflow:loss = 1.1379355, step = 30900 (0.289 sec)\nINFO:tensorflow:global_step/sec: 345.305\nINFO:tensorflow:loss = 1.0583435, step = 31000 (0.290 sec)\nINFO:tensorflow:global_step/sec: 345.321\nINFO:tensorflow:loss = 1.0049303, step = 31100 (0.289 sec)\nINFO:tensorflow:global_step/sec: 342.977\nINFO:tensorflow:loss = 1.0395126, step = 31200 (0.292 sec)\nINFO:tensorflow:global_step/sec: 342.276\nINFO:tensorflow:loss = 1.1322054, step = 31300 (0.292 sec)\nINFO:tensorflow:global_step/sec: 344.49\nINFO:tensorflow:loss = 0.9953739, step = 31400 (0.290 sec)\nINFO:tensorflow:global_step/sec: 345.068\nINFO:tensorflow:loss = 1.1938149, step = 31500 (0.290 sec)\nINFO:tensorflow:global_step/sec: 345.221\nINFO:tensorflow:loss = 1.0344702, step = 31600 (0.290 sec)\nINFO:tensorflow:global_step/sec: 345.909\nINFO:tensorflow:loss = 1.1111047, step = 31700 (0.289 sec)\nINFO:tensorflow:global_step/sec: 340.745\nINFO:tensorflow:loss = 1.087033, step = 31800 (0.293 sec)\nINFO:tensorflow:global_step/sec: 155.137\nINFO:tensorflow:loss = 1.2103608, step = 31900 (0.645 sec)\nINFO:tensorflow:global_step/sec: 342.046\nINFO:tensorflow:loss = 1.1073233, step = 32000 (0.292 sec)\nINFO:tensorflow:global_step/sec: 343.646\nINFO:tensorflow:loss = 1.0712032, step = 32100 (0.291 sec)\nINFO:tensorflow:global_step/sec: 339.076\nINFO:tensorflow:loss = 1.22971, step = 32200 (0.295 sec)\nINFO:tensorflow:global_step/sec: 338.173\nINFO:tensorflow:loss = 1.1562189, step = 32300 (0.296 sec)\nINFO:tensorflow:global_step/sec: 341.953\nINFO:tensorflow:loss = 1.1556934, step = 32400 (0.292 sec)\nINFO:tensorflow:global_step/sec: 340.442\nINFO:tensorflow:loss = 1.1015875, step = 32500 (0.294 sec)\nINFO:tensorflow:global_step/sec: 341.648\nINFO:tensorflow:loss = 1.2375529, step = 32600 (0.293 sec)\nINFO:tensorflow:global_step/sec: 343.244\nINFO:tensorflow:loss = 1.065863, step = 32700 (0.291 sec)\nINFO:tensorflow:global_step/sec: 345.324\nINFO:tensorflow:loss = 1.2560565, step = 32800 (0.290 sec)\nINFO:tensorflow:global_step/sec: 338.211\nINFO:tensorflow:loss = 1.0613945, step = 32900 (0.296 sec)\nINFO:tensorflow:global_step/sec: 342.682\nINFO:tensorflow:loss = 1.015824, step = 33000 (0.292 sec)\nINFO:tensorflow:global_step/sec: 347.409\nINFO:tensorflow:loss = 1.046206, step = 33100 (0.288 sec)\nINFO:tensorflow:global_step/sec: 340.005\nINFO:tensorflow:loss = 1.0956609, step = 33200 (0.294 sec)\nINFO:tensorflow:global_step/sec: 342.256\nINFO:tensorflow:loss = 1.1372961, step = 33300 (0.292 sec)\nINFO:tensorflow:global_step/sec: 343.899\nINFO:tensorflow:loss = 1.2318778, step = 33400 (0.291 sec)\nINFO:tensorflow:global_step/sec: 342.124\nINFO:tensorflow:loss = 1.1371951, step = 33500 (0.292 sec)\nINFO:tensorflow:global_step/sec: 341.441\nINFO:tensorflow:loss = 1.0990528, step = 33600 (0.293 sec)\nINFO:tensorflow:global_step/sec: 135.111\nINFO:tensorflow:loss = 1.0226643, step = 33700 (0.740 sec)\nINFO:tensorflow:global_step/sec: 336.011\nINFO:tensorflow:loss = 1.1384839, step = 33800 (0.297 sec)\nINFO:tensorflow:global_step/sec: 342.022\nINFO:tensorflow:loss = 1.0459784, step = 33900 (0.292 sec)\nINFO:tensorflow:global_step/sec: 330.587\nINFO:tensorflow:loss = 1.1106534, step = 34000 (0.303 sec)\nINFO:tensorflow:global_step/sec: 331.217\nINFO:tensorflow:loss = 1.0465572, step = 34100 (0.302 sec)\nINFO:tensorflow:global_step/sec: 343.036\nINFO:tensorflow:loss = 0.9607486, step = 34200 (0.291 sec)\nINFO:tensorflow:global_step/sec: 343.362\nINFO:tensorflow:loss = 0.951609, step = 34300 (0.291 sec)\nINFO:tensorflow:global_step/sec: 340.334\nINFO:tensorflow:loss = 1.0153944, step = 34400 (0.294 sec)\nINFO:tensorflow:global_step/sec: 346.002\nINFO:tensorflow:loss = 0.9575238, step = 34500 (0.289 sec)\nINFO:tensorflow:global_step/sec: 345.189\nINFO:tensorflow:loss = 1.1038914, step = 34600 (0.290 sec)\nINFO:tensorflow:global_step/sec: 350.371\nINFO:tensorflow:loss = 0.961535, step = 34700 (0.286 sec)\nINFO:tensorflow:global_step/sec: 345\nINFO:tensorflow:loss = 1.0181007, step = 34800 (0.290 sec)\nINFO:tensorflow:global_step/sec: 341.479\nINFO:tensorflow:loss = 1.2611041, step = 34900 (0.293 sec)\nINFO:tensorflow:global_step/sec: 346.403\nINFO:tensorflow:loss = 0.9899153, step = 35000 (0.289 sec)\nINFO:tensorflow:global_step/sec: 342.498\nINFO:tensorflow:loss = 1.0656755, step = 35100 (0.292 sec)\nINFO:tensorflow:global_step/sec: 348.518\nINFO:tensorflow:loss = 1.0796914, step = 35200 (0.287 sec)\nINFO:tensorflow:global_step/sec: 337.988\nINFO:tensorflow:loss = 0.97306824, step = 35300 (0.296 sec)\nINFO:tensorflow:global_step/sec: 343.224\nINFO:tensorflow:loss = 1.1053491, step = 35400 (0.291 sec)\nINFO:tensorflow:global_step/sec: 343.386\nINFO:tensorflow:loss = 1.1233327, step = 35500 (0.291 sec)\nINFO:tensorflow:global_step/sec: 154.748\nINFO:tensorflow:loss = 0.9686781, step = 35600 (0.646 sec)\nINFO:tensorflow:global_step/sec: 341.903\nINFO:tensorflow:loss = 1.3582169, step = 35700 (0.292 sec)\nINFO:tensorflow:global_step/sec: 329.773\nINFO:tensorflow:loss = 1.0037897, step = 35800 (0.303 sec)\nINFO:tensorflow:global_step/sec: 334.716\nINFO:tensorflow:loss = 1.0814939, step = 35900 (0.299 sec)\nINFO:tensorflow:global_step/sec: 342.074\nINFO:tensorflow:loss = 1.2097222, step = 36000 (0.292 sec)\nINFO:tensorflow:global_step/sec: 348.523\nINFO:tensorflow:loss = 0.98724204, step = 36100 (0.287 sec)\nINFO:tensorflow:global_step/sec: 346.928\nINFO:tensorflow:loss = 1.0833836, step = 36200 (0.288 sec)\nINFO:tensorflow:global_step/sec: 343.045\nINFO:tensorflow:loss = 0.98804444, step = 36300 (0.292 sec)\nINFO:tensorflow:global_step/sec: 341.414\nINFO:tensorflow:loss = 0.976433, step = 36400 (0.293 sec)\nINFO:tensorflow:global_step/sec: 347.842\nINFO:tensorflow:loss = 1.2175274, step = 36500 (0.287 sec)\nINFO:tensorflow:global_step/sec: 350.095\nINFO:tensorflow:loss = 1.1842133, step = 36600 (0.286 sec)\nINFO:tensorflow:global_step/sec: 344.092\nINFO:tensorflow:loss = 0.89120644, step = 36700 (0.291 sec)\nINFO:tensorflow:global_step/sec: 346.308\nINFO:tensorflow:loss = 1.1233673, step = 36800 (0.289 sec)\nINFO:tensorflow:global_step/sec: 337.326\nINFO:tensorflow:loss = 1.0067838, step = 36900 (0.297 sec)\nINFO:tensorflow:global_step/sec: 346.651\nINFO:tensorflow:loss = 1.0484271, step = 37000 (0.288 sec)\nINFO:tensorflow:global_step/sec: 343.437\nINFO:tensorflow:loss = 1.2422557, step = 37100 (0.291 sec)\nINFO:tensorflow:global_step/sec: 343.757\nINFO:tensorflow:loss = 1.1391327, step = 37200 (0.291 sec)\nINFO:tensorflow:global_step/sec: 349.012\nINFO:tensorflow:loss = 0.9281557, step = 37300 (0.286 sec)\nINFO:tensorflow:global_step/sec: 342.621\nINFO:tensorflow:loss = 0.9056669, step = 37400 (0.292 sec)\nINFO:tensorflow:global_step/sec: 154.123\nINFO:tensorflow:loss = 0.95753783, step = 37500 (0.649 sec)\nINFO:tensorflow:global_step/sec: 328.321\nINFO:tensorflow:loss = 1.0757469, step = 37600 (0.305 sec)\nINFO:tensorflow:global_step/sec: 343.656\nINFO:tensorflow:loss = 1.0720007, step = 37700 (0.291 sec)\nINFO:tensorflow:global_step/sec: 339.88\nINFO:tensorflow:loss = 1.1266361, step = 37800 (0.294 sec)\nINFO:tensorflow:global_step/sec: 341.805\nINFO:tensorflow:loss = 1.0245874, step = 37900 (0.293 sec)\nINFO:tensorflow:global_step/sec: 346.913\nINFO:tensorflow:loss = 1.1378975, step = 38000 (0.288 sec)\nINFO:tensorflow:global_step/sec: 342.538\nINFO:tensorflow:loss = 1.1172436, step = 38100 (0.292 sec)\nINFO:tensorflow:global_step/sec: 345.027\nINFO:tensorflow:loss = 1.180836, step = 38200 (0.290 sec)\nINFO:tensorflow:global_step/sec: 347.62\nINFO:tensorflow:loss = 0.9818892, step = 38300 (0.288 sec)\nINFO:tensorflow:global_step/sec: 338.613\nINFO:tensorflow:loss = 1.2129061, step = 38400 (0.295 sec)\nINFO:tensorflow:global_step/sec: 342.064\nINFO:tensorflow:loss = 1.0748774, step = 38500 (0.292 sec)\nINFO:tensorflow:global_step/sec: 345.834\nINFO:tensorflow:loss = 1.03192, step = 38600 (0.289 sec)\nINFO:tensorflow:global_step/sec: 333.536\nINFO:tensorflow:loss = 1.1989402, step = 38700 (0.300 sec)\nINFO:tensorflow:global_step/sec: 341.338\nINFO:tensorflow:loss = 1.1601467, step = 38800 (0.293 sec)\nINFO:tensorflow:global_step/sec: 338.029\nINFO:tensorflow:loss = 1.221862, step = 38900 (0.296 sec)\nINFO:tensorflow:global_step/sec: 340.678\nINFO:tensorflow:loss = 1.0911837, step = 39000 (0.293 sec)\nINFO:tensorflow:global_step/sec: 348.067\nINFO:tensorflow:loss = 1.0675159, step = 39100 (0.287 sec)\nINFO:tensorflow:global_step/sec: 346.98\nINFO:tensorflow:loss = 1.0522077, step = 39200 (0.288 sec)\nINFO:tensorflow:global_step/sec: 346.58\nINFO:tensorflow:loss = 1.1193844, step = 39300 (0.288 sec)\nINFO:tensorflow:global_step/sec: 156.951\nINFO:tensorflow:loss = 1.0520983, step = 39400 (0.637 sec)\nINFO:tensorflow:global_step/sec: 329.672\nINFO:tensorflow:loss = 1.2332582, step = 39500 (0.303 sec)\nINFO:tensorflow:global_step/sec: 341.291\nINFO:tensorflow:loss = 1.2317872, step = 39600 (0.293 sec)\nINFO:tensorflow:global_step/sec: 341.982\nINFO:tensorflow:loss = 1.2166523, step = 39700 (0.292 sec)\nINFO:tensorflow:global_step/sec: 344.614\nINFO:tensorflow:loss = 1.1683915, step = 39800 (0.290 sec)\nINFO:tensorflow:global_step/sec: 343.65\nINFO:tensorflow:loss = 1.2827923, step = 39900 (0.291 sec)\nINFO:tensorflow:Calling checkpoint listeners before saving checkpoint 40000...\nINFO:tensorflow:Saving checkpoints for 40000 into gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt.\nINFO:tensorflow:Calling checkpoint listeners after saving checkpoint 40000...\nINFO:tensorflow:Skip the current checkpoint eval due to throttle secs (60000 secs).\nINFO:tensorflow:global_step/sec: 23.2777\nINFO:tensorflow:loss = 1.088998, step = 40000 (4.296 sec)\nINFO:tensorflow:global_step/sec: 323.794\nINFO:tensorflow:loss = 1.1167402, step = 40100 (0.309 sec)\nINFO:tensorflow:global_step/sec: 336.12\nINFO:tensorflow:loss = 1.1435668, step = 40200 (0.297 sec)\nINFO:tensorflow:global_step/sec: 342.222\nINFO:tensorflow:loss = 1.1281053, step = 40300 (0.292 sec)\nINFO:tensorflow:global_step/sec: 347.019\nINFO:tensorflow:loss = 1.1748445, step = 40400 (0.288 sec)\nINFO:tensorflow:global_step/sec: 346.581\nINFO:tensorflow:loss = 1.0353835, step = 40500 (0.289 sec)\nINFO:tensorflow:global_step/sec: 347.309\nINFO:tensorflow:loss = 1.2025824, step = 40600 (0.288 sec)\nINFO:tensorflow:global_step/sec: 345.598\nINFO:tensorflow:loss = 0.9991964, step = 40700 (0.289 sec)\nINFO:tensorflow:global_step/sec: 343.202\nINFO:tensorflow:loss = 1.2206808, step = 40800 (0.291 sec)\nINFO:tensorflow:global_step/sec: 339.598\nINFO:tensorflow:loss = 1.1644576, step = 40900 (0.295 sec)\nINFO:tensorflow:global_step/sec: 343.732\nINFO:tensorflow:loss = 1.2040467, step = 41000 (0.291 sec)\nINFO:tensorflow:global_step/sec: 345.221\nINFO:tensorflow:loss = 1.0213187, step = 41100 (0.290 sec)\nINFO:tensorflow:global_step/sec: 161.795\nINFO:tensorflow:loss = 1.2124133, step = 41200 (0.618 sec)\nINFO:tensorflow:global_step/sec: 341.827\nINFO:tensorflow:loss = 1.0791284, step = 41300 (0.292 sec)\nINFO:tensorflow:global_step/sec: 345.685\nINFO:tensorflow:loss = 1.1841204, step = 41400 (0.289 sec)\nINFO:tensorflow:global_step/sec: 341.094\nINFO:tensorflow:loss = 1.0521338, step = 41500 (0.293 sec)\nINFO:tensorflow:global_step/sec: 345.029\nINFO:tensorflow:loss = 1.1498733, step = 41600 (0.290 sec)\nINFO:tensorflow:global_step/sec: 346.218\nINFO:tensorflow:loss = 1.1286198, step = 41700 (0.289 sec)\nINFO:tensorflow:global_step/sec: 326.654\nINFO:tensorflow:loss = 1.183981, step = 41800 (0.306 sec)\nINFO:tensorflow:global_step/sec: 344.409\nINFO:tensorflow:loss = 1.1244378, step = 41900 (0.290 sec)\nINFO:tensorflow:global_step/sec: 348.471\nINFO:tensorflow:loss = 0.932783, step = 42000 (0.287 sec)\nINFO:tensorflow:global_step/sec: 341.095\nINFO:tensorflow:loss = 1.2444737, step = 42100 (0.293 sec)\nINFO:tensorflow:global_step/sec: 342.06\nINFO:tensorflow:loss = 0.9673578, step = 42200 (0.292 sec)\nINFO:tensorflow:global_step/sec: 340.403\nINFO:tensorflow:loss = 0.99728286, step = 42300 (0.294 sec)\nINFO:tensorflow:global_step/sec: 340.58\nINFO:tensorflow:loss = 1.0250247, step = 42400 (0.294 sec)\nINFO:tensorflow:global_step/sec: 339.039\nINFO:tensorflow:loss = 1.0707052, step = 42500 (0.295 sec)\nINFO:tensorflow:global_step/sec: 339.245\nINFO:tensorflow:loss = 1.2774116, step = 42600 (0.295 sec)\nINFO:tensorflow:global_step/sec: 341.644\nINFO:tensorflow:loss = 1.0707562, step = 42700 (0.293 sec)\nINFO:tensorflow:global_step/sec: 341.973\nINFO:tensorflow:loss = 1.197412, step = 42800 (0.293 sec)\nINFO:tensorflow:global_step/sec: 337.498\nINFO:tensorflow:loss = 1.1422148, step = 42900 (0.296 sec)\nINFO:tensorflow:global_step/sec: 343.009\nINFO:tensorflow:loss = 1.1020937, step = 43000 (0.291 sec)\nINFO:tensorflow:global_step/sec: 158.389\nINFO:tensorflow:loss = 1.0058253, step = 43100 (0.632 sec)\nINFO:tensorflow:global_step/sec: 336.549\nINFO:tensorflow:loss = 1.3194963, step = 43200 (0.297 sec)\nINFO:tensorflow:global_step/sec: 342.378\nINFO:tensorflow:loss = 1.0444306, step = 43300 (0.292 sec)\nINFO:tensorflow:global_step/sec: 339.443\nINFO:tensorflow:loss = 0.98150766, step = 43400 (0.295 sec)\nINFO:tensorflow:global_step/sec: 342.902\nINFO:tensorflow:loss = 1.3193485, step = 43500 (0.291 sec)\nINFO:tensorflow:global_step/sec: 332.568\nINFO:tensorflow:loss = 1.0730414, step = 43600 (0.301 sec)\nINFO:tensorflow:global_step/sec: 337.074\nINFO:tensorflow:loss = 1.1102042, step = 43700 (0.297 sec)\nINFO:tensorflow:global_step/sec: 340.44\nINFO:tensorflow:loss = 1.084512, step = 43800 (0.294 sec)\nINFO:tensorflow:global_step/sec: 342.181\nINFO:tensorflow:loss = 1.2366986, step = 43900 (0.292 sec)\nINFO:tensorflow:global_step/sec: 337.619\nINFO:tensorflow:loss = 1.1173992, step = 44000 (0.296 sec)\nINFO:tensorflow:global_step/sec: 339.333\nINFO:tensorflow:loss = 0.91128004, step = 44100 (0.295 sec)\nINFO:tensorflow:global_step/sec: 330.942\nINFO:tensorflow:loss = 1.050028, step = 44200 (0.302 sec)\nINFO:tensorflow:global_step/sec: 334.261\nINFO:tensorflow:loss = 1.1301243, step = 44300 (0.299 sec)\nINFO:tensorflow:global_step/sec: 337.539\nINFO:tensorflow:loss = 1.318706, step = 44400 (0.296 sec)\nINFO:tensorflow:global_step/sec: 343.249\nINFO:tensorflow:loss = 1.0387466, step = 44500 (0.291 sec)\nINFO:tensorflow:global_step/sec: 336.899\nINFO:tensorflow:loss = 1.1280148, step = 44600 (0.297 sec)\nINFO:tensorflow:global_step/sec: 337.68\nINFO:tensorflow:loss = 0.9963414, step = 44700 (0.296 sec)\nINFO:tensorflow:global_step/sec: 338.018\nINFO:tensorflow:loss = 0.9823272, step = 44800 (0.296 sec)\nINFO:tensorflow:global_step/sec: 343.348\nINFO:tensorflow:loss = 1.1039433, step = 44900 (0.291 sec)\nINFO:tensorflow:global_step/sec: 171.936\nINFO:tensorflow:loss = 1.2893485, step = 45000 (0.582 sec)\nINFO:tensorflow:global_step/sec: 336.415\nINFO:tensorflow:loss = 1.0473925, step = 45100 (0.297 sec)\nINFO:tensorflow:global_step/sec: 340.889\nINFO:tensorflow:loss = 1.2806127, step = 45200 (0.293 sec)\nINFO:tensorflow:global_step/sec: 338.212\nINFO:tensorflow:loss = 1.1749237, step = 45300 (0.296 sec)\nINFO:tensorflow:global_step/sec: 324.947\nINFO:tensorflow:loss = 1.0401398, step = 45400 (0.308 sec)\nINFO:tensorflow:global_step/sec: 348.516\nINFO:tensorflow:loss = 1.166014, step = 45500 (0.287 sec)\nINFO:tensorflow:global_step/sec: 346.323\nINFO:tensorflow:loss = 1.0148687, step = 45600 (0.289 sec)\nINFO:tensorflow:global_step/sec: 349.704\nINFO:tensorflow:loss = 1.0965788, step = 45700 (0.286 sec)\nINFO:tensorflow:global_step/sec: 337.336\nINFO:tensorflow:loss = 0.97115713, step = 45800 (0.296 sec)\nINFO:tensorflow:global_step/sec: 347.995\nINFO:tensorflow:loss = 1.0068071, step = 45900 (0.287 sec)\nINFO:tensorflow:global_step/sec: 343.894\nINFO:tensorflow:loss = 1.1256866, step = 46000 (0.291 sec)\nINFO:tensorflow:global_step/sec: 347.567\nINFO:tensorflow:loss = 1.1325053, step = 46100 (0.288 sec)\nINFO:tensorflow:global_step/sec: 350.925\nINFO:tensorflow:loss = 1.1767026, step = 46200 (0.285 sec)\nINFO:tensorflow:global_step/sec: 348.366\nINFO:tensorflow:loss = 0.999475, step = 46300 (0.287 sec)\nINFO:tensorflow:global_step/sec: 343.309\nINFO:tensorflow:loss = 0.9938817, step = 46400 (0.291 sec)\nINFO:tensorflow:global_step/sec: 333.171\nINFO:tensorflow:loss = 1.0111455, step = 46500 (0.300 sec)\nINFO:tensorflow:global_step/sec: 318.49\nINFO:tensorflow:loss = 1.0661242, step = 46600 (0.314 sec)\nINFO:tensorflow:global_step/sec: 323.184\nINFO:tensorflow:loss = 1.1180861, step = 46700 (0.310 sec)\nINFO:tensorflow:global_step/sec: 317.549\nINFO:tensorflow:loss = 1.1348033, step = 46800 (0.315 sec)\nINFO:tensorflow:global_step/sec: 121.052\nINFO:tensorflow:loss = 0.96910524, step = 46900 (0.826 sec)\nINFO:tensorflow:global_step/sec: 323.341\nINFO:tensorflow:loss = 1.1614258, step = 47000 (0.309 sec)\nINFO:tensorflow:global_step/sec: 312.575\nINFO:tensorflow:loss = 1.0673203, step = 47100 (0.320 sec)\nINFO:tensorflow:global_step/sec: 308.945\nINFO:tensorflow:loss = 1.1182046, step = 47200 (0.323 sec)\nINFO:tensorflow:global_step/sec: 313.884\nINFO:tensorflow:loss = 1.0694213, step = 47300 (0.319 sec)\nINFO:tensorflow:global_step/sec: 316.938\nINFO:tensorflow:loss = 0.99380255, step = 47400 (0.315 sec)\nINFO:tensorflow:global_step/sec: 316.238\nINFO:tensorflow:loss = 1.2808595, step = 47500 (0.316 sec)\nINFO:tensorflow:global_step/sec: 318.946\nINFO:tensorflow:loss = 1.222106, step = 47600 (0.313 sec)\nINFO:tensorflow:global_step/sec: 321.783\nINFO:tensorflow:loss = 1.0585419, step = 47700 (0.311 sec)\nINFO:tensorflow:global_step/sec: 316.088\nINFO:tensorflow:loss = 1.1118407, step = 47800 (0.316 sec)\nINFO:tensorflow:global_step/sec: 314.58\nINFO:tensorflow:loss = 1.0781112, step = 47900 (0.318 sec)\nINFO:tensorflow:global_step/sec: 332.051\nINFO:tensorflow:loss = 1.3312261, step = 48000 (0.301 sec)\nINFO:tensorflow:global_step/sec: 346.645\nINFO:tensorflow:loss = 1.0130258, step = 48100 (0.289 sec)\nINFO:tensorflow:global_step/sec: 347.616\nINFO:tensorflow:loss = 1.0572872, step = 48200 (0.288 sec)\nINFO:tensorflow:global_step/sec: 346.103\nINFO:tensorflow:loss = 1.1172012, step = 48300 (0.289 sec)\nINFO:tensorflow:global_step/sec: 345.525\nINFO:tensorflow:loss = 1.2224332, step = 48400 (0.289 sec)\nINFO:tensorflow:global_step/sec: 344.002\nINFO:tensorflow:loss = 1.2341423, step = 48500 (0.291 sec)\nINFO:tensorflow:global_step/sec: 341.705\nINFO:tensorflow:loss = 1.0560459, step = 48600 (0.293 sec)\nINFO:tensorflow:global_step/sec: 144.349\nINFO:tensorflow:loss = 0.9799346, step = 48700 (0.693 sec)\nINFO:tensorflow:global_step/sec: 330.964\nINFO:tensorflow:loss = 1.0709302, step = 48800 (0.302 sec)\nINFO:tensorflow:global_step/sec: 328.099\nINFO:tensorflow:loss = 1.0980372, step = 48900 (0.305 sec)\nINFO:tensorflow:global_step/sec: 344.418\nINFO:tensorflow:loss = 1.161141, step = 49000 (0.290 sec)\nINFO:tensorflow:global_step/sec: 343.133\nINFO:tensorflow:loss = 1.1133852, step = 49100 (0.291 sec)\nINFO:tensorflow:global_step/sec: 350.749\nINFO:tensorflow:loss = 1.0951934, step = 49200 (0.285 sec)\nINFO:tensorflow:global_step/sec: 345.31\nINFO:tensorflow:loss = 1.0972655, step = 49300 (0.290 sec)\nINFO:tensorflow:global_step/sec: 347.432\nINFO:tensorflow:loss = 1.2974272, step = 49400 (0.288 sec)\nINFO:tensorflow:global_step/sec: 349.966\nINFO:tensorflow:loss = 1.2389125, step = 49500 (0.286 sec)\nINFO:tensorflow:global_step/sec: 352.067\nINFO:tensorflow:loss = 1.1128131, step = 49600 (0.284 sec)\nINFO:tensorflow:global_step/sec: 345.567\nINFO:tensorflow:loss = 1.0854824, step = 49700 (0.289 sec)\nINFO:tensorflow:global_step/sec: 353.579\nINFO:tensorflow:loss = 1.0695616, step = 49800 (0.283 sec)\nINFO:tensorflow:global_step/sec: 353.352\nINFO:tensorflow:loss = 1.1879729, step = 49900 (0.283 sec)\nINFO:tensorflow:Calling checkpoint listeners before saving checkpoint 50000...\nINFO:tensorflow:Saving checkpoints for 50000 into gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt.\nINFO:tensorflow:Calling checkpoint listeners after saving checkpoint 50000...\nINFO:tensorflow:Skip the current checkpoint eval due to throttle secs (60000 secs).\nINFO:tensorflow:global_step/sec: 28.8348\nINFO:tensorflow:loss = 0.99197626, step = 50000 (3.468 sec)\nINFO:tensorflow:global_step/sec: 332.439\nINFO:tensorflow:loss = 0.97236145, step = 50100 (0.301 sec)\nINFO:tensorflow:global_step/sec: 344.74\nINFO:tensorflow:loss = 1.0635287, step = 50200 (0.290 sec)\nINFO:tensorflow:global_step/sec: 340.45\nINFO:tensorflow:loss = 1.1402745, step = 50300 (0.294 sec)\nINFO:tensorflow:global_step/sec: 346.961\nINFO:tensorflow:loss = 1.1059217, step = 50400 (0.288 sec)\nINFO:tensorflow:global_step/sec: 347.599\nINFO:tensorflow:loss = 1.1500279, step = 50500 (0.288 sec)\nINFO:tensorflow:global_step/sec: 157.87\nINFO:tensorflow:loss = 0.87957287, step = 50600 (0.633 sec)\nINFO:tensorflow:global_step/sec: 347.23\nINFO:tensorflow:loss = 1.0019858, step = 50700 (0.288 sec)\nINFO:tensorflow:global_step/sec: 340.162\nINFO:tensorflow:loss = 1.2761388, step = 50800 (0.294 sec)\nINFO:tensorflow:global_step/sec: 341.285\nINFO:tensorflow:loss = 1.1495982, step = 50900 (0.293 sec)\nINFO:tensorflow:global_step/sec: 343.284\nINFO:tensorflow:loss = 1.1036322, step = 51000 (0.291 sec)\nINFO:tensorflow:global_step/sec: 346.256\nINFO:tensorflow:loss = 1.0683929, step = 51100 (0.289 sec)\nINFO:tensorflow:global_step/sec: 336.716\nINFO:tensorflow:loss = 1.1423435, step = 51200 (0.297 sec)\nINFO:tensorflow:global_step/sec: 334.993\nINFO:tensorflow:loss = 1.190402, step = 51300 (0.298 sec)\nINFO:tensorflow:global_step/sec: 340.38\nINFO:tensorflow:loss = 1.1283497, step = 51400 (0.293 sec)\nINFO:tensorflow:global_step/sec: 348.967\nINFO:tensorflow:loss = 1.1106353, step = 51500 (0.287 sec)\nINFO:tensorflow:global_step/sec: 346.548\nINFO:tensorflow:loss = 1.0037339, step = 51600 (0.288 sec)\nINFO:tensorflow:global_step/sec: 348.007\nINFO:tensorflow:loss = 1.2215939, step = 51700 (0.287 sec)\nINFO:tensorflow:global_step/sec: 350.963\nINFO:tensorflow:loss = 1.2761807, step = 51800 (0.285 sec)\nINFO:tensorflow:global_step/sec: 346.035\nINFO:tensorflow:loss = 0.9973612, step = 51900 (0.289 sec)\nINFO:tensorflow:global_step/sec: 342.686\nINFO:tensorflow:loss = 1.0269008, step = 52000 (0.292 sec)\nINFO:tensorflow:global_step/sec: 342.164\nINFO:tensorflow:loss = 1.0359094, step = 52100 (0.292 sec)\nINFO:tensorflow:global_step/sec: 341.583\nINFO:tensorflow:loss = 1.110756, step = 52200 (0.293 sec)\nINFO:tensorflow:global_step/sec: 346.269\nINFO:tensorflow:loss = 1.1946311, step = 52300 (0.289 sec)\nINFO:tensorflow:global_step/sec: 348.294\nINFO:tensorflow:loss = 1.0684041, step = 52400 (0.287 sec)\nINFO:tensorflow:global_step/sec: 165.895\nINFO:tensorflow:loss = 1.389905, step = 52500 (0.603 sec)\nINFO:tensorflow:global_step/sec: 342.571\nINFO:tensorflow:loss = 1.2885227, step = 52600 (0.292 sec)\nINFO:tensorflow:global_step/sec: 340.96\nINFO:tensorflow:loss = 1.0094182, step = 52700 (0.293 sec)\nINFO:tensorflow:global_step/sec: 350.071\nINFO:tensorflow:loss = 1.2153224, step = 52800 (0.286 sec)\nINFO:tensorflow:global_step/sec: 338.427\nINFO:tensorflow:loss = 1.1167386, step = 52900 (0.296 sec)\nINFO:tensorflow:global_step/sec: 348.204\nINFO:tensorflow:loss = 1.1928575, step = 53000 (0.287 sec)\nINFO:tensorflow:global_step/sec: 336.445\nINFO:tensorflow:loss = 0.9940459, step = 53100 (0.297 sec)\nINFO:tensorflow:global_step/sec: 339.227\nINFO:tensorflow:loss = 1.0345839, step = 53200 (0.295 sec)\nINFO:tensorflow:global_step/sec: 341.823\nINFO:tensorflow:loss = 1.1056504, step = 53300 (0.292 sec)\nINFO:tensorflow:global_step/sec: 347.187\nINFO:tensorflow:loss = 1.1044167, step = 53400 (0.288 sec)\nINFO:tensorflow:global_step/sec: 347.865\nINFO:tensorflow:loss = 1.1544813, step = 53500 (0.287 sec)\nINFO:tensorflow:global_step/sec: 346.669\nINFO:tensorflow:loss = 1.1924697, step = 53600 (0.288 sec)\nINFO:tensorflow:global_step/sec: 347.968\nINFO:tensorflow:loss = 1.0132308, step = 53700 (0.287 sec)\nINFO:tensorflow:global_step/sec: 348.072\nINFO:tensorflow:loss = 1.1149609, step = 53800 (0.287 sec)\nINFO:tensorflow:global_step/sec: 346.813\nINFO:tensorflow:loss = 1.2164471, step = 53900 (0.288 sec)\nINFO:tensorflow:global_step/sec: 350.045\nINFO:tensorflow:loss = 1.0571082, step = 54000 (0.286 sec)\nINFO:tensorflow:global_step/sec: 346.075\nINFO:tensorflow:loss = 1.1205993, step = 54100 (0.289 sec)\nINFO:tensorflow:global_step/sec: 351.17\nINFO:tensorflow:loss = 1.1484734, step = 54200 (0.285 sec)\nINFO:tensorflow:global_step/sec: 339.571\nINFO:tensorflow:loss = 1.0630224, step = 54300 (0.295 sec)\nINFO:tensorflow:global_step/sec: 164.775\nINFO:tensorflow:loss = 1.0974889, step = 54400 (0.607 sec)\nINFO:tensorflow:global_step/sec: 339.798\nINFO:tensorflow:loss = 1.0769842, step = 54500 (0.294 sec)\nINFO:tensorflow:global_step/sec: 345.517\nINFO:tensorflow:loss = 1.0278537, step = 54600 (0.289 sec)\nINFO:tensorflow:global_step/sec: 342.435\nINFO:tensorflow:loss = 1.3574514, step = 54700 (0.292 sec)\nINFO:tensorflow:global_step/sec: 336.435\nINFO:tensorflow:loss = 1.1019528, step = 54800 (0.297 sec)\nINFO:tensorflow:global_step/sec: 342.882\nINFO:tensorflow:loss = 1.0225238, step = 54900 (0.292 sec)\nINFO:tensorflow:global_step/sec: 349.561\nINFO:tensorflow:loss = 0.94367826, step = 55000 (0.286 sec)\nINFO:tensorflow:global_step/sec: 350.431\nINFO:tensorflow:loss = 1.2015088, step = 55100 (0.285 sec)\nINFO:tensorflow:global_step/sec: 344.151\nINFO:tensorflow:loss = 1.3151857, step = 55200 (0.291 sec)\nINFO:tensorflow:global_step/sec: 352.357\nINFO:tensorflow:loss = 0.97069144, step = 55300 (0.284 sec)\nINFO:tensorflow:global_step/sec: 346.662\nINFO:tensorflow:loss = 1.168324, step = 55400 (0.288 sec)\nINFO:tensorflow:global_step/sec: 347.637\nINFO:tensorflow:loss = 1.1230863, step = 55500 (0.288 sec)\nINFO:tensorflow:global_step/sec: 344.304\nINFO:tensorflow:loss = 1.0397905, step = 55600 (0.290 sec)\nINFO:tensorflow:global_step/sec: 345.123\nINFO:tensorflow:loss = 1.037957, step = 55700 (0.290 sec)\nINFO:tensorflow:global_step/sec: 343.454\nINFO:tensorflow:loss = 1.060916, step = 55800 (0.291 sec)\nINFO:tensorflow:global_step/sec: 345.834\nINFO:tensorflow:loss = 1.1651084, step = 55900 (0.289 sec)\nINFO:tensorflow:global_step/sec: 344.214\nINFO:tensorflow:loss = 1.0948739, step = 56000 (0.290 sec)\nINFO:tensorflow:global_step/sec: 349.573\nINFO:tensorflow:loss = 1.2449062, step = 56100 (0.286 sec)\nINFO:tensorflow:global_step/sec: 162.847\nINFO:tensorflow:loss = 1.0496985, step = 56200 (0.614 sec)\nINFO:tensorflow:Calling checkpoint listeners before saving checkpoint 56250...\nINFO:tensorflow:Saving checkpoints for 56250 into gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt.\nINFO:tensorflow:Calling checkpoint listeners after saving checkpoint 56250...\nINFO:tensorflow:Skip the current checkpoint eval due to throttle secs (60000 secs).\n\ndecode_example: features = {'image_raw': FixedLenFeature(shape=[], dtype=tf.string, default_value=None), 'label': FixedLenFeature(shape=[], dtype=tf.int64, default_value=None)}\ndecode_example: image = Tensor(\"DecodeRaw:0\", shape=(None,), dtype=uint8)\ndecode_example: image = Tensor(\"Reshape:0\", shape=(28, 28, 1), dtype=uint8)\npreprocess_image: image = Tensor(\"sub:0\", shape=(28, 28, 1), dtype=float32)\ndecode_example: image = Tensor(\"sub:0\", shape=(28, 28, 1), dtype=float32)\ndecode_example: label = Tensor(\"Cast_1:0\", shape=(), dtype=int32)\nINFO:tensorflow:Calling model_fn.\n\nvanilla_gan_model: features = {'image': <tf.Tensor 'IteratorGetNext:0' shape=(None, 28, 28, 1) dtype=float32>}\nvanilla_gan_model: labels = Tensor(\"IteratorGetNext:1\", shape=(None,), dtype=int32)\nvanilla_gan_model: mode = eval\nvanilla_gan_model: params = {'train_file_pattern': 'gs://machine-learning-1234-bucket/gan/data/mnist/train*.tfrecord', 'eval_file_pattern': 'gs://machine-learning-1234-bucket/gan/data/mnist/test*.tfrecord', 'output_dir': 'gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model', 'train_batch_size': 32, 'train_steps': 56250, 'save_summary_steps': 100, 'save_checkpoints_steps': 10000, 'keep_checkpoint_max': 10, 'input_fn_autotune': False, 'eval_batch_size': 32, 'eval_steps': 100, 'start_delay_secs': 60000, 'throttle_secs': 60000, 'height': 28, 'width': 28, 'depth': 1, 'latent_size': 512, 'generator_hidden_units': [256, 512, 1024], 'generator_leaky_relu_alpha': 0.2, 'generator_final_activation': 'tanh', 'generator_l1_regularization_scale': 0.0, 'generator_l2_regularization_scale': 0.0, 'generator_optimizer': 'Adam', 'generator_learning_rate': 0.0002, 'generator_adam_beta1': 0.5, 'generator_adam_beta2': 0.999, 'generator_adam_epsilon': 1e-08, 'generator_clip_gradients': None, 'generator_train_steps': 1, 'discriminator_hidden_units': [1024, 512, 256], 'discriminator_leaky_relu_alpha': 0.2, 'discriminator_l1_regularization_scale': 0.0, 'discriminator_l2_regularization_scale': 0.0, 'discriminator_optimizer': 'Adam', 'discriminator_learning_rate': 0.0002, 'discriminator_adam_beta1': 0.5, 'discriminator_adam_beta2': 0.999, 'discriminator_adam_epsilon': 1e-08, 'discriminator_clip_gradients': None, 'discriminator_train_steps': 1, 'label_smoothing': 0.9}\n\nget_logits_and_losses: real_images = Tensor(\"Reshape:0\", shape=(None, 784), dtype=float32)\nget_logits_and_losses: Z = Tensor(\"random_normal:0\", shape=(None, 512), dtype=float32)\n\nCall generator with Z = Tensor(\"random_normal:0\", shape=(None, 512), dtype=float32).\n\nget_fake_images: network = Tensor(\"random_normal:0\", shape=(None, 512), dtype=float32)\nget_fake_images: network = Tensor(\"generator/layers_dense_0/BiasAdd:0\", shape=(None, 256), dtype=float32)\nget_fake_images: network = Tensor(\"generator/leaky_relu_0:0\", shape=(None, 256), dtype=float32)\nget_fake_images: network = Tensor(\"generator/layers_dense_1/BiasAdd:0\", shape=(None, 512), dtype=float32)\nget_fake_images: network = Tensor(\"generator/leaky_relu_1:0\", shape=(None, 512), dtype=float32)\nget_fake_images: network = Tensor(\"generator/layers_dense_2/BiasAdd:0\", shape=(None, 1024), dtype=float32)\nget_fake_images: network = Tensor(\"generator/leaky_relu_2:0\", shape=(None, 1024), dtype=float32)\nget_fake_images: generated_outputs = Tensor(\"generator/layers_dense_generated_outputs/Tanh:0\", shape=(None, 784), dtype=float32)\n\nCall discriminator with fake_images = Tensor(\"generator/layers_dense_generated_outputs/Tanh:0\", shape=(None, 784), dtype=float32).\n\nget_discriminator_logits: network = Tensor(\"generator/layers_dense_generated_outputs/Tanh:0\", shape=(None, 784), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/layers_dense_0/BiasAdd:0\", shape=(None, 1024), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/leaky_relu_0:0\", shape=(None, 1024), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/layers_dense_1/BiasAdd:0\", shape=(None, 512), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/leaky_relu_1:0\", shape=(None, 512), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/layers_dense_2/BiasAdd:0\", shape=(None, 256), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator/leaky_relu_2:0\", shape=(None, 256), dtype=float32)\nget_discriminator_logits: logits = Tensor(\"discriminator/layers_dense_logits/BiasAdd:0\", shape=(None, 1), dtype=float32)\n\nCall discriminator with real_images = Tensor(\"Reshape:0\", shape=(None, 784), dtype=float32).\n\nget_discriminator_logits: network = Tensor(\"Reshape:0\", shape=(None, 784), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/layers_dense_0/BiasAdd:0\", shape=(None, 1024), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/leaky_relu_0:0\", shape=(None, 1024), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/layers_dense_1/BiasAdd:0\", shape=(None, 512), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/leaky_relu_1:0\", shape=(None, 512), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/layers_dense_2/BiasAdd:0\", shape=(None, 256), dtype=float32)\nget_discriminator_logits: network = Tensor(\"discriminator_1/leaky_relu_2:0\", shape=(None, 256), dtype=float32)\nget_discriminator_logits: logits = Tensor(\"discriminator_1/layers_dense_logits/BiasAdd:0\", shape=(None, 1), dtype=float32)\n\nget_generator_loss: generator_loss = Tensor(\"generator_loss:0\", shape=(), dtype=float32)\nget_generator_loss: generator_reg_loss = Tensor(\"Const_1:0\", shape=(), dtype=float32)\nget_generator_loss: generator_total_loss = Tensor(\"generator_total_loss:0\", shape=(), dtype=float32)\n\nget_discriminator_loss: discriminator_real_loss = Tensor(\"discriminator_real_loss:0\", shape=(), dtype=float32)\nget_discriminator_loss: discriminator_fake_loss = Tensor(\"discriminator_fake_loss:0\", shape=(), dtype=float32)\nget_discriminator_loss: discriminator_loss = Tensor(\"discriminator_loss:0\", shape=(), dtype=float32)\nget_discriminator_loss: discriminator_reg_loss = Tensor(\"Const_4:0\", shape=(), dtype=float32)\nget_discriminator_loss: discriminator_total_loss = Tensor(\"discriminator_total_loss:0\", shape=(), dtype=float32)\n\nget_eval_metric_ops: discriminator_logits = Tensor(\"discriminator_concat_logits:0\", shape=(None, 1), dtype=float32)\nget_eval_metric_ops: discriminator_labels = Tensor(\"discriminator_concat_labels:0\", shape=(None, 1), dtype=float32)\nget_eval_metric_ops: discriminator_probabilities = Tensor(\"discriminator_probabilities:0\", shape=(None, 1), dtype=float32)\nWARNING:tensorflow:Trapezoidal rule is known to produce incorrect PR-AUCs; please switch to \"careful_interpolation\" instead.\nWARNING:tensorflow:Trapezoidal rule is known to produce incorrect PR-AUCs; please switch to \"careful_interpolation\" instead.\nget_eval_metric_ops: eval_metric_ops = {'accuracy': (<tf.Tensor 'discriminator_accuracy/value:0' shape=() dtype=float32>, <tf.Tensor 'discriminator_accuracy/update_op:0' shape=() dtype=float32>), 'precision': (<tf.Tensor 'discriminator_precision/value:0' shape=() dtype=float32>, <tf.Tensor 'discriminator_precision/update_op:0' shape=() dtype=float32>), 'recall': (<tf.Tensor 'discriminator_recall/value:0' shape=() dtype=float32>, <tf.Tensor 'discriminator_recall/update_op:0' shape=() dtype=float32>), 'auc_roc': (<tf.Tensor 'discriminator_auc_roc/value:0' shape=() dtype=float32>, <tf.Tensor 'discriminator_auc_roc/update_op:0' shape=() dtype=float32>), 'auc_pr': (<tf.Tensor 'discriminator_auc_pr/value:0' shape=() dtype=float32>, <tf.Tensor 'discriminator_auc_pr/update_op:0' shape=() dtype=float32>)}\nINFO:tensorflow:Done calling model_fn.\nINFO:tensorflow:Starting evaluation at 2020-07-24T00:14:20Z\nINFO:tensorflow:Graph was finalized.\nINFO:tensorflow:Restoring parameters from gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt-56250\nINFO:tensorflow:Running local_init_op.\nINFO:tensorflow:Done running local_init_op.\nINFO:tensorflow:Evaluation [10/100]\nINFO:tensorflow:Evaluation [20/100]\nINFO:tensorflow:Evaluation [30/100]\nINFO:tensorflow:Evaluation [40/100]\nINFO:tensorflow:Evaluation [50/100]\nINFO:tensorflow:Evaluation [60/100]\nINFO:tensorflow:Evaluation [70/100]\nINFO:tensorflow:Evaluation [80/100]\nINFO:tensorflow:Evaluation [90/100]\nINFO:tensorflow:Evaluation [100/100]\nINFO:tensorflow:Inference Time : 1.36762s\nINFO:tensorflow:Finished evaluation at 2020-07-24-00:14:21\nINFO:tensorflow:Saving dict for global step 56250: accuracy = 0.0, auc_pr = 0.70085496, auc_roc = 0.6861256, global_step = 56250, loss = 1.3609688, precision = 0.5, recall = 1.0\nINFO:tensorflow:Saving 'checkpoint_path' summary for global step 56250: gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt-56250\n\nserving_input_fn: feature_placeholders = {'Z': <tf.Tensor 'serving_input_placeholder_Z:0' shape=(None, 512) dtype=float32>}\nserving_input_fn: features = {'Z': <tf.Tensor 'serving_input_fn_identity_placeholder_Z:0' shape=(None, 512) dtype=float32>}\nINFO:tensorflow:Calling model_fn.\n\nvanilla_gan_model: features = {'Z': <tf.Tensor 'serving_input_fn_identity_placeholder_Z:0' shape=(None, 512) dtype=float32>}\nvanilla_gan_model: labels = None\nvanilla_gan_model: mode = infer\nvanilla_gan_model: params = {'train_file_pattern': 'gs://machine-learning-1234-bucket/gan/data/mnist/train*.tfrecord', 'eval_file_pattern': 'gs://machine-learning-1234-bucket/gan/data/mnist/test*.tfrecord', 'output_dir': 'gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model', 'train_batch_size': 32, 'train_steps': 56250, 'save_summary_steps': 100, 'save_checkpoints_steps': 10000, 'keep_checkpoint_max': 10, 'input_fn_autotune': False, 'eval_batch_size': 32, 'eval_steps': 100, 'start_delay_secs': 60000, 'throttle_secs': 60000, 'height': 28, 'width': 28, 'depth': 1, 'latent_size': 512, 'generator_hidden_units': [256, 512, 1024], 'generator_leaky_relu_alpha': 0.2, 'generator_final_activation': 'tanh', 'generator_l1_regularization_scale': 0.0, 'generator_l2_regularization_scale': 0.0, 'generator_optimizer': 'Adam', 'generator_learning_rate': 0.0002, 'generator_adam_beta1': 0.5, 'generator_adam_beta2': 0.999, 'generator_adam_epsilon': 1e-08, 'generator_clip_gradients': None, 'generator_train_steps': 1, 'discriminator_hidden_units': [1024, 512, 256], 'discriminator_leaky_relu_alpha': 0.2, 'discriminator_l1_regularization_scale': 0.0, 'discriminator_l2_regularization_scale': 0.0, 'discriminator_optimizer': 'Adam', 'discriminator_learning_rate': 0.0002, 'discriminator_adam_beta1': 0.5, 'discriminator_adam_beta2': 0.999, 'discriminator_adam_epsilon': 1e-08, 'discriminator_clip_gradients': None, 'discriminator_train_steps': 1, 'label_smoothing': 0.9}\n\nget_predictions_and_export_outputs: Z = Tensor(\"serving_input_fn_identity_placeholder_Z:0\", shape=(None, 512), dtype=float32)\n\nget_fake_images: network = Tensor(\"serving_input_fn_identity_placeholder_Z:0\", shape=(None, 512), dtype=float32)\nget_fake_images: network = Tensor(\"generator/layers_dense_0/BiasAdd:0\", shape=(None, 256), dtype=float32)\nget_fake_images: network = Tensor(\"generator/leaky_relu_0:0\", shape=(None, 256), dtype=float32)\nget_fake_images: network = Tensor(\"generator/layers_dense_1/BiasAdd:0\", shape=(None, 512), dtype=float32)\nget_fake_images: network = Tensor(\"generator/leaky_relu_1:0\", shape=(None, 512), dtype=float32)\nget_fake_images: network = Tensor(\"generator/layers_dense_2/BiasAdd:0\", shape=(None, 1024), dtype=float32)\nget_fake_images: network = Tensor(\"generator/leaky_relu_2:0\", shape=(None, 1024), dtype=float32)\nget_fake_images: generated_outputs = Tensor(\"generator/layers_dense_generated_outputs/Tanh:0\", shape=(None, 784), dtype=float32)\nget_predictions_and_export_outputs: fake_images = Tensor(\"generator/layers_dense_generated_outputs/Tanh:0\", shape=(None, 784), dtype=float32)\nget_predictions_and_export_outputs: generated_images = Tensor(\"Reshape:0\", shape=(None, 28, 28, 1), dtype=float32)\nget_predictions_and_export_outputs: predictions_dict = {'generated_images': <tf.Tensor 'Reshape:0' shape=(None, 28, 28, 1) dtype=float32>}\nget_predictions_and_export_outputs: export_outputs = {'predict_export_outputs': <tensorflow.python.saved_model.model_utils.export_output.PredictOutput object at 0x7f077447ec50>}\nINFO:tensorflow:Done calling model_fn.\nINFO:tensorflow:Signatures INCLUDED in export for Classify: None\nINFO:tensorflow:Signatures INCLUDED in export for Regress: None\nINFO:tensorflow:Signatures INCLUDED in export for Predict: ['predict_export_outputs', 'serving_default']\nINFO:tensorflow:Signatures INCLUDED in export for Train: None\nINFO:tensorflow:Signatures INCLUDED in export for Eval: None\nINFO:tensorflow:Restoring parameters from gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/model.ckpt-56250\nINFO:tensorflow:Assets added to graph.\nINFO:tensorflow:No assets to write.\nINFO:tensorflow:SavedModel written to: gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/export/exporter/temp-1595549661/saved_model.pb\nINFO:tensorflow:Loss for final step: 1.4244734.\n" ] ], [ [ "## Prediction", "_____no_output_____" ] ], [ [ "!gsutil ls gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/export/exporter", "gs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/export/exporter/\ngs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/export/exporter/1595549492/\ngs://machine-learning-1234-bucket/gan/vanilla_gan/trained_model/export/exporter/1595549661/\n" ], [ "loaded = tf.saved_model.load(\n export_dir=os.path.join(\n arguments[\"output_dir\"], \"export\", \"exporter\", \"1595549661\"\n )\n)\nprint(list(loaded.signatures.keys()))", "['serving_default', 'predict_export_outputs']\n" ], [ "infer = loaded.signatures[\"serving_default\"]\nprint(infer.structured_outputs)", "{'generated_images': <tf.Tensor 'Reshape:0' shape=(None, 28, 28, 1) dtype=float32>}\n" ], [ "Z = tf.random.normal(shape=(10, 512))\npredictions = infer(Z)", "_____no_output_____" ] ], [ [ "Convert image back to the original scale.", "_____no_output_____" ] ], [ [ "generated_images = np.clip(\n a=tf.cast(\n x=((tf.reshape(\n tensor=predictions[\"generated_images\"],\n shape=[\n -1,\n arguments[\"height\"],\n arguments[\"width\"],\n arguments[\"depth\"]\n ]\n ) + 1.0) * (255. / 2)),\n dtype=tf.int32\n ),\n a_min=0,\n a_max=255\n)", "_____no_output_____" ], [ "print(generated_images.shape)", "(10, 28, 28, 1)\n" ], [ "def plot_images(images):\n \"\"\"Plots images.\n\n Args:\n images: np.array, array of images of\n [num_images, image_size, image_size, num_channels].\n \"\"\"\n num_images = len(images)\n\n plt.figure(figsize=(20, 20))\n for i in range(num_images):\n image = images[i]\n plt.subplot(1, num_images, i + 1)\n plt.xticks([])\n plt.yticks([])\n plt.grid(False)\n plt.imshow(\n tf.reshape(image, image.shape[:-1]),\n cmap=\"gray_r\"\n )\n plt.show()", "_____no_output_____" ], [ "plot_images(generated_images)", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ] ]
cb5690a8421aca4c03ca5aab4e416739ba826f5d
119,863
ipynb
Jupyter Notebook
notebooks/uPVT. Oil viscosity.ipynb
Shabonasar/unifloc
1f12d6b4110a9ff0e10817560ad99d55c9133954
[ "MIT" ]
null
null
null
notebooks/uPVT. Oil viscosity.ipynb
Shabonasar/unifloc
1f12d6b4110a9ff0e10817560ad99d55c9133954
[ "MIT" ]
null
null
null
notebooks/uPVT. Oil viscosity.ipynb
Shabonasar/unifloc
1f12d6b4110a9ff0e10817560ad99d55c9133954
[ "MIT" ]
null
null
null
409.088737
29,244
0.936135
[ [ [ "Водопьян А.О. Кобзарь О.С. Хабибуллин Р.А. 2019 г.\n\n# Вязкость нефти\n\nИсточники:\n1. Beggs, H.D. and Robinson, J.R. “Estimating the Viscosity of Crude Oil Systems.”\n Journal of Petroleum Technology. Vol. 27, No. 9 (1975)\n2. Vazquez M. et al. Correlations for fluid physical property prediction //SPE Annual Fall Technical Conference and Exhibition. – Society of Petroleum Engineers, 1977.\n\n## Общие принципы \n\nКорреляции для вязкостей были получены с помощью анализа большого объема лабораторных исследований. Для получения готовых формул соблюдалось два взаимо противоположных стремления: охватить корреляцией наиболее большое количество разных нефтей и при этом получить приемлемую точность корреляции.\n\nВязкость нефти условно делится на 3 типа:\n\n1. Вязкость дегазированной нефти - dead oil viscosity.\n2. Вязкость нефти при давлении меньшем, чем давление насыщения - saturated oil viscosity \n3. Вязкость нефти при давлении большем, чем давление насыщения - undersaturated oil viscosity\n\nДля каждой вязкости своя корреляция, причем обычно следующий тип вязкости основывается на предыдущем при увеличении давлении от атмосферного.\n\nUndersaturated oil viscosity, в инностранной литературе, \"недонасыщенная нефть\". Дело в том, что при давлении большем, чем давление насыщения, дополнительное количество газа может растворится в нефти, однако весь доступный газ уже растворился при давлении насыщения. \n\n## Вязкость дегазированной нефти [1] \n$$ \\mu_{OD} = 10^X - 1 $$\n\nгде:\n\n$$ X = yT^{-1.163} $$\n\n$$ y = 10 ^ Z $$\n\n$$ Z = 3.0324 - 0.02023 \\gamma_o $$\n\n\n## Вязкость нефти, содержащей растворенный газ ($P \\leq P_b$) [1]\n\n$$\\mu = A \\mu_{OD}^B$$\n\nгде:\n\n$$A = 10.715(R_s + 100)^{-0.515}$$\n\n$$B = 5.44(R_s + 150)^{-0.338}$$\n\n\n### Номенклатура:\n\n$R_s$ - газосодержание, $scf/STB$\n\n$T$ - температура, $^{\\circ} F$\n\n$\\mu_{OD}$ - вязкость дегазированной нефти при данной $T$, сПуаз\n\n$\\mu$ - вязкость газонасыщенной нефти при данной $T$, сПуаз\n\n$\\gamma_o $ - плотность нефти, $^{\\circ} API$\n\n\n## Вязкость нефти, содержащей растворенный газ ($P > P_b$) [2]\n\n$$\\mu_o = \\mu_{ob}(p/p_b)^m$$\nгде: \n$$ m = C_1p^{C_2} exp(C_3 + C_4 p ) $$\nа коэффициенты равны:\n\n$C_1 = 2.6$\n\n$C_2 = 1.178$\n\n$C_3 = -11.513$\n\n$C_4 = -8.98 \\times 10^{-5}$\n\n\n", "_____no_output_____" ] ], [ [ "import sys\nsys.path.append('../')\nimport uniflocpy.uPVT.PVT_fluids as PVT\nimport matplotlib.pyplot as plt\nimport matplotlib as mpl\nimport numpy as np\nimport pandas as pd\nimport pylab\nimport uniflocpy.uPVT.PVT_correlations as PVTcorr\nimport uniflocpy.uPVT.PVT_fluids as PVT_fluids\n%matplotlib inline", "_____no_output_____" ], [ "def show_example(legend, title, xlabel, ylabel):\n plt.grid(True)\n plt.title(title, color='black')\n plt.ylabel(ylabel, color='black')\n plt.xlabel(xlabel, color='black')\n plt.legend(legend)\n plt.show()", "_____no_output_____" ], [ "list_t_k = np.arange(278.15,400,5)\nlist_t_c = list_t_k - 273.15\nlist_gamma_oil = [0.6, 0.65, 0.7, 0.75]\n\nfor sensivity_parametr in list_gamma_oil:\n mu_do_cp = PVTcorr.unf_deadoilviscosity_Beggs_cP(sensivity_parametr, list_t_k)\n plt.plot(list_t_c, mu_do_cp, linewidth=3)\n\nshow_example(list_gamma_oil,'Вязкость дегазированной нефти в зависимости от относительной плотности', \n 'Температура, C', '$\\mu_{DO}$, сПуаз' )", "_____no_output_____" ], [ "list_rs_m3m3 = np.arange(0, 500, 10)\nrs_m3m3 = 50 \nfor sensivity_parametr in list_gamma_oil:\n mu_do_cp = PVTcorr.unf_deadoilviscosity_Beggs_cP(sensivity_parametr, list_t_k)\n mu_cp = PVTcorr.unf_saturatedoilviscosity_Beggs_cP(mu_do_cp, rs_m3m3)\n plt.plot(list_t_c, mu_cp, linewidth=3)\n\nshow_example(list_gamma_oil,'Вязкость нефти при $P \\leq P_b$ в зависимости от относительной плотности', \n 'Температура, C', '$\\mu_{DO}$, сПуаз' )", "_____no_output_____" ], [ "list_rs_m3m3 = np.arange(0, 500, 10)\nrs_m3m3 = 50 \np_MPaa = 10\npb_MPaa = 8 \nfor sensivity_parametr in list_gamma_oil:\n mu_do_cp = PVTcorr.unf_deadoilviscosity_Beggs_cP(sensivity_parametr, list_t_k)\n mu_cp = PVTcorr.unf_saturatedoilviscosity_Beggs_cP(mu_do_cp, rs_m3m3)\n mu_cp_p = PVTcorr.unf_undersaturatedoilviscosity_VB_cP(p_MPaa, pb_MPaa, mu_cp)\n plt.plot(list_t_c, mu_cp_p, linewidth=3)\n\nshow_example(list_gamma_oil,'Вязкость нефти при $P > P_b$ в зависимости от относительной плотности', \n 'Температура, C', '$\\mu_{DO}$, сПуаз' )", "_____no_output_____" ], [ "rsb_labels = ('400', '200', '50')\nfluid_Standing_1 = PVT_fluids.FluidStanding(rsb_m3m3 = 400)\nfluid_Standing_2 = PVT_fluids.FluidStanding(rsb_m3m3 = 200)\nfluid_Standing_3 = PVT_fluids.FluidStanding(rsb_m3m3 = 50)\np_bar = range(1,700)\nt_c = 80\nmu_oil_1 = []\nmu_oil_2 = []\nmu_oil_3 = []\nfor i in p_bar:\n fluid_Standing_1.calc(i, t_c)\n fluid_Standing_2.calc(i, t_c)\n fluid_Standing_3.calc(i, t_c)\n mu_oil_1.append(fluid_Standing_1.mu_oil_cP)\n mu_oil_2.append(fluid_Standing_2.mu_oil_cP)\n mu_oil_3.append(fluid_Standing_3.mu_oil_cP)\n ", "_____no_output_____" ], [ "plt.plot(p_bar, mu_oil_1, linewidth=3)\nplt.plot(p_bar, mu_oil_2, linewidth=3)\nplt.plot(p_bar, mu_oil_3, linewidth=3)\nshow_example(rsb_labels,'Вязкость нефти в зависимости от ГФ', \n 'Давление, бар', '$\\mu_{DO}$, сПуаз' )", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ] ]
cb569c39191ea23bc538d8f6bc3d66dfa6ece7a1
169,222
ipynb
Jupyter Notebook
lab11.ipynb
carlsondorman/IA241
377a0e4b085905e1c35fc3c15caa33ba3af2cdfb
[ "MIT" ]
null
null
null
lab11.ipynb
carlsondorman/IA241
377a0e4b085905e1c35fc3c15caa33ba3af2cdfb
[ "MIT" ]
null
null
null
lab11.ipynb
carlsondorman/IA241
377a0e4b085905e1c35fc3c15caa33ba3af2cdfb
[ "MIT" ]
null
null
null
216.396419
43,008
0.884607
[ [ [ "import pandas\n\ndf = pandas.read_excel(\"s3://lab11---2019/house_price (1).xls\")\n\ndf[:10]", "_____no_output_____" ], [ "df.describe()", "_____no_output_____" ], [ "df.hist(figsize=(20,20))\n", "_____no_output_____" ], [ "df.groupby('house_type').mean()", "_____no_output_____" ], [ "df[:10]", "_____no_output_____" ], [ "!pip install mglearn\nimport sklearn\nfrom sklearn.model_selection import train_test_split\nfrom matplotlib import pyplot as plt\n%matplotlib inline\nimport pandas\nimport numpy as np\nimport mglearn\nfrom collections import Counter\nfrom sklearn.metrics import cohen_kappa_score\nfrom sklearn import preprocessing\n", "Collecting mglearn\n\u001b[?25l Downloading https://files.pythonhosted.org/packages/fb/01/8d3630ecc767c9de96a9c46e055f2a3a5f9e14a47d3d0348a36a5005fe67/mglearn-0.1.7.tar.gz (540kB)\n\u001b[K 100% |████████████████████████████████| 542kB 20.5MB/s ta 0:00:01\n\u001b[?25hRequirement already satisfied: numpy in /home/ec2-user/anaconda3/envs/python3/lib/python3.6/site-packages (from mglearn) (1.15.4)\nRequirement already satisfied: matplotlib in /home/ec2-user/anaconda3/envs/python3/lib/python3.6/site-packages (from mglearn) (2.2.2)\nRequirement already satisfied: scikit-learn in /home/ec2-user/anaconda3/envs/python3/lib/python3.6/site-packages (from mglearn) (0.19.1)\nRequirement already satisfied: pandas in /home/ec2-user/anaconda3/envs/python3/lib/python3.6/site-packages (from mglearn) (0.22.0)\nRequirement already satisfied: pillow in /home/ec2-user/anaconda3/envs/python3/lib/python3.6/site-packages (from mglearn) (5.2.0)\nRequirement already satisfied: cycler in /home/ec2-user/anaconda3/envs/python3/lib/python3.6/site-packages (from mglearn) (0.10.0)\nRequirement already satisfied: imageio in /home/ec2-user/anaconda3/envs/python3/lib/python3.6/site-packages (from mglearn) (2.3.0)\nRequirement already satisfied: pytz in /home/ec2-user/anaconda3/envs/python3/lib/python3.6/site-packages (from matplotlib->mglearn) (2018.4)\nRequirement already satisfied: kiwisolver>=1.0.1 in /home/ec2-user/anaconda3/envs/python3/lib/python3.6/site-packages (from matplotlib->mglearn) (1.0.1)\nRequirement already satisfied: six>=1.10 in /home/ec2-user/anaconda3/envs/python3/lib/python3.6/site-packages (from matplotlib->mglearn) (1.11.0)\nRequirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /home/ec2-user/anaconda3/envs/python3/lib/python3.6/site-packages (from matplotlib->mglearn) (2.2.0)\nRequirement already satisfied: python-dateutil>=2.1 in /home/ec2-user/anaconda3/envs/python3/lib/python3.6/site-packages (from matplotlib->mglearn) (2.7.3)\nRequirement already satisfied: setuptools in /home/ec2-user/anaconda3/envs/python3/lib/python3.6/site-packages (from kiwisolver>=1.0.1->matplotlib->mglearn) (39.1.0)\nBuilding wheels for collected packages: mglearn\n Running setup.py bdist_wheel for mglearn ... \u001b[?25ldone\n\u001b[?25h Stored in directory: /home/ec2-user/.cache/pip/wheels/74/cf/8d/04f4932d15854a36726c6210763c7127e62de28f5c8ddfcf3b\nSuccessfully built mglearn\nInstalling collected packages: mglearn\nSuccessfully installed mglearn-0.1.7\n\u001b[33mYou are using pip version 10.0.1, however version 19.0.3 is available.\nYou should consider upgrading via the 'pip install --upgrade pip' command.\u001b[0m\n" ], [ "df = pandas.read_excel('s3://lab11---2019/house_price (1).xls')\n# combine multipl columns into a 2D array\n# also convert the integer data to float data\nX = np.column_stack((df.built_in.astype(float),df.price.astype(float))) \nX = preprocessing.scale(X) # scale the data before training the model\ny = df.house_type\nX_train, X_test, y_train, y_test = train_test_split(X, y,test_size =0.3,stratify = y, random_state=0) \n\n# for classification, make sure a stratify splitting method is selected\nmglearn.discrete_scatter(X[:,0],X[:,1],y) # use mglearn to visualize data\n\nplt.legend(y,loc='best')\nplt.xlabel('build_in')\nplt.ylabel('house price')\nplt.show()", "_____no_output_____" ], [ "\nfrom sklearn.neural_network import MLPClassifier\n\nmlp = MLPClassifier(solver='lbfgs', hidden_layer_sizes=(20,20,20), random_state=0).fit(X_train, y_train)\nmglearn.discrete_scatter(X_train[:, 0], X_train[:, 1],mlp.predict(X_train))\nplt.legend(y,loc='best')\nplt.xlabel('build_in')\nplt.ylabel('house price')\nplt.show()\n\nprint(\"Training set accuracy: {:.2f}\".format(mlp.score(X_train, y_train)))\nprint (\"Training Kappa: {:.3f}\".format(cohen_kappa_score(y_train,mlp.predict(X_train))))\nprint(\"Test set accuracy: {:.2f}\".format(mlp.score(X_test, y_test)))\nprint (\"Test Kappa: {:.3f}\".format(cohen_kappa_score(y_test,mlp.predict(X_test))))", "_____no_output_____" ], [ "fig, axes = plt.subplots(2, 4, figsize=(20, 8))\nfor axx, n_hidden_nodes in zip(axes, [10, 20]):\n for ax, alpha in zip(axx, [0.0001, 0.01, 0.1, 1]):\n mlp = MLPClassifier(solver='lbfgs', random_state=0,\n hidden_layer_sizes=[n_hidden_nodes, n_hidden_nodes],\n alpha=alpha)\n\n mlp.fit(X_train, y_train)\n mglearn.discrete_scatter(X_train[:, 0], X_train[:, 1], mlp.predict(X_train), ax=ax)\n ax.set_title(\"n_hidden=[{}, {}]\\nalpha={:.4f}\\nkapa={:.4f}\".format(\n n_hidden_nodes, n_hidden_nodes, alpha,cohen_kappa_score(y_train,mlp.predict(X_train))))\n \nplt.subplots_adjust(hspace=0.5)", "_____no_output_____" ], [ "\nmlp = MLPClassifier(solver='lbfgs', hidden_layer_sizes=(20,20), random_state=0).fit(X_train, y_train)\nfig, axes = plt.subplots(1, 3, figsize=(20, 8))\nfor i , ax in zip(range(3),axes):\n\n img = ax.imshow(mlp.coefs_[i], interpolation='none', cmap='viridis')\n\n ax.set_title(\" No.{} layer\".format(i))\n ax.set_xlabel(\"Columns in weight matrix\")\n ax.set_ylabel(\"Input feature\")\n fig.colorbar(img, ax = ax)", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cb56aeeb16e092e7be30f33bbb01f91f6d13e3ab
8,682
ipynb
Jupyter Notebook
pytorch-Learning/firstTry.ipynb
GalileoSama/MachineLearning
2b0a88884a4b0426f9396fe5275beb9778411378
[ "MIT" ]
null
null
null
pytorch-Learning/firstTry.ipynb
GalileoSama/MachineLearning
2b0a88884a4b0426f9396fe5275beb9778411378
[ "MIT" ]
null
null
null
pytorch-Learning/firstTry.ipynb
GalileoSama/MachineLearning
2b0a88884a4b0426f9396fe5275beb9778411378
[ "MIT" ]
null
null
null
22.376289
428
0.470974
[ [ [ "#快速入门学习", "_____no_output_____" ] ], [ [ "import torch.nn as nn\nimport torch.nn.functional as F\nimport torch", "_____no_output_____" ], [ "class Net(nn.Module):\n def __init__(self):\n super(Net, self).__init__()\n self.conv1 = nn.Conv2d(1, 6, 5)\n self.conv2 = nn.Conv2d(6, 16, 5)\n self.fc1 = nn.Linear(400, 120)\n self.fc2 = nn.Linear(120, 84)\n self.fc3 = nn.Linear(84, 10)\n \n def forward(self, x):\n x = F.max_pool2d(F.relu(self.conv1(x)),(2, 2))\n x = F.max_pool2d(F.relu(self.conv2(x)), 2)\n x = x.view(x.size()[0], -1)\n x = F.relu(self.fc1(x))\n x = F.relu(self.fc2(x))\n x = self.fc3(x)\n return x", "_____no_output_____" ], [ "net = Net()\nnet", "_____no_output_____" ], [ "params = list(net.parameters())\nlen(params)", "_____no_output_____" ], [ "for name,parameters in net.named_parameters():\n print(name,':',parameters.size())", "conv1.weight : torch.Size([6, 1, 5, 5])\nconv1.bias : torch.Size([6])\nconv2.weight : torch.Size([16, 6, 5, 5])\nconv2.bias : torch.Size([16])\nfc1.weight : torch.Size([120, 400])\nfc1.bias : torch.Size([120])\nfc2.weight : torch.Size([84, 120])\nfc2.bias : torch.Size([84])\nfc3.weight : torch.Size([10, 84])\nfc3.bias : torch.Size([10])\n" ], [ "input = torch.rand(1, 1, 32, 32)\ninput", "_____no_output_____" ], [ "out = net.forward(input)\nout", "_____no_output_____" ] ], [ [ "#tensor学习\n", "_____no_output_____" ] ], [ [ "# arrange 与 linspace 的区别如下\na = torch.arange(1, 5, 0.5)\nb = torch.linspace(1, 5, 6)\nprint(\"a:\", a)\nprint(\"b:\", b)", "a: tensor([1.0000, 1.5000, 2.0000, 2.5000, 3.0000, 3.5000, 4.0000, 4.5000])\nb: tensor([1.0000, 1.8000, 2.6000, 3.4000, 4.2000, 5.0000])\n" ], [ "# 创建tensor 以list\na = [[1,2,3],[3,4,5]]\na = torch.Tensor(a)\nprint(a.tolist()) # list 与 tensor相互转换", "[[1.0, 2.0, 3.0], [3.0, 4.0, 5.0]]\n" ], [ "# 以成员元素具体值来创建Tensor\n# () [] 创建结果相同\nc = torch.Tensor(((1, 3), (2, 3)))\nd = torch.Tensor([[1, 3],[2, 3]])\n\nprint(c.equal(d))", "True\n" ], [ "# 区别:分布不同,randn是标准正态,rand是均匀分布\ne = torch.randn(5)\nf = torch.rand(5)\nprint(e)\nprint(f)", "tensor([-0.5737, -1.1302, -0.4247, -0.3141, 0.4999])\ntensor([0.3303, 0.1534, 0.5441, 0.7393, 0.7936])\n" ], [ "# tensor 与 Tensor\n# tensor 的操作基本类似于 numpy\nscalar = torch.tensor(1)\nh = torch.Tensor(1)\nprint(scalar.shape)\nprint(h.shape)\n\nvector = torch.tensor([1, 3]) # 不可以torch.tensor(1, 3) \ntensor = torch.Tensor(1, 3)\nprint(vector.shape)\nprint(tensor.shape)", "torch.Size([])\ntorch.Size([1])\ntorch.Size([2])\ntorch.Size([1, 3])\n" ], [ "# tensor 对比 Tensor\nempty_tensor = t.tensor([])\nempty_Tensor = t.Tensor([])\nempty_tensor.shape == empty_Tensor.shape", "_____no_output_____" ] ], [ [ "##tensor维度操作:view、squeeze、unsqueeze、resize(可超过、少于本tensor的维度) \n##非inplace操作,对应的 xxx_()为inplace操作", "_____no_output_____" ] ], [ [ "a = torch.arange(0, 4)\nb = a.view(-1, 2) # view、unsqueeze非inplace操作\nb.shape", "_____no_output_____" ], [ "b.shape\nc = b.unsqueeze(2)\nc.shape # 在位置1处增加了一维(0/1/2)", "_____no_output_____" ], [ "b = a.unsqueeze(0).shape\n", "torch.Size([1, 4])\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ] ]
cb56b3c82da9c8dc4f1fc653be936cbd33fb2f2a
224,177
ipynb
Jupyter Notebook
some-ml-examples/PyDataSeattle-master/notebooks/5. Tidy Data.ipynb
kryvokhyzha/examples-and-courses
477e82ee24e6abba8a6b6d92555f2ed549ca682c
[ "MIT" ]
1
2021-12-13T15:41:48.000Z
2021-12-13T15:41:48.000Z
some-ml-examples/PyDataSeattle-master/notebooks/5. Tidy Data.ipynb
kryvokhyzha/examples-and-courses
477e82ee24e6abba8a6b6d92555f2ed549ca682c
[ "MIT" ]
15
2021-09-12T15:06:13.000Z
2022-03-31T19:02:08.000Z
some-ml-examples/PyDataSeattle-master/notebooks/5. Tidy Data.ipynb
kryvokhyzha/examples-and-courses
477e82ee24e6abba8a6b6d92555f2ed549ca682c
[ "MIT" ]
1
2022-01-29T00:37:52.000Z
2022-01-29T00:37:52.000Z
100.392745
83,404
0.757424
[ [ [ "# Tidy Data\n\n> Structuring datasets to facilitate analysis [(Wickham 2014)](http://www.jstatsoft.org/v59/i10/paper)\n\nIf there's one maxim I can impart it's that your tools shouldn't get in the way of your analysis. Your problem is already difficult enough, don't let the data or your tools make it any harder.\n\n## The Rules\n\nIn a tidy dataset...\n\n1. Each variable forms a column\n2. Each observation forms a row\n3. Each type of observational unit forms a table\n\nWe'll cover a few methods that help you get there.", "_____no_output_____" ], [ "Based on [this](http://stackoverflow.com/questions/22695680/python-pandas-timedelta-specific-rows) StackOverflow question.", "_____no_output_____" ] ], [ [ "import numpy as np\nimport pandas as pd\nimport seaborn as sns\nimport matplotlib.pyplot as plt\n\npd.options.display.max_rows = 10\n%matplotlib inline", "_____no_output_____" ] ], [ [ "Earlier, I fetched some data\n\n```python\ntables = pd.read_html(\"http://www.basketball-reference.com/leagues/NBA_2015_games.html\")\ngames = tables[0]\ngames.to_csv('data/games.csv', index=False)\n```", "_____no_output_____" ] ], [ [ "pd.read_html?", "_____no_output_____" ], [ "!head -n 2 data/games.csv", "Date,Unnamed: 1,Visitor/Neutral,PTS,Home/Neutral,PTS.1,Unnamed: 6,Notes\r\n\"Tue, Oct 28, 2014\",Box Score,Houston Rockets,108,Los Angeles Lakers,90,,\r\n" ] ], [ [ "The Question:\n> **How many days of rest did each team get between each game?**\n\nWhether or not your dataset is tidy depends on your question. Given our question, what is an observation?", "_____no_output_____" ] ], [ [ "column_names = ['date', '_', 'away_team', 'away_points', 'home_team',\n 'home_points', 'n_ot', 'notes']\ngames = (pd.read_csv('data/games.csv', names=column_names, parse_dates=['date'],\n skiprows=1)\n .drop(['_', 'notes', 'n_ot'], axis='columns')\n .set_index('date', append=True))\ngames.index.names = ['game_id', 'date']\ngames.head()", "_____no_output_____" ] ], [ [ "Is `games` a tidy dataset, given our question? No, we have multiple observations (teams) per row. We'll use `pd.melt` to fix that.", "_____no_output_____" ] ], [ [ "tidy = pd.melt(games.sort_index().reset_index(),\n id_vars=['game_id', 'date'], value_vars=['away_team', 'home_team'],\n value_name='team')", "_____no_output_____" ], [ "tidy.head()", "_____no_output_____" ] ], [ [ "Now the translation from question to operation is direct:", "_____no_output_____" ] ], [ [ "# For each team... get number of dates between games\ntidy.groupby('team')['date'].diff().dt.days - 1", "_____no_output_____" ], [ "tidy['rest'] = tidy.sort('date').groupby('team').date.diff().dt.days - 1\ntidy.dropna().head()", "_____no_output_____" ], [ "un = pd.pivot_table(tidy, values='rest',\n index=['game_id', 'date'],\n columns='variable').rename(\n columns={'away_team': 'away_rest', 'home_team': 'home_rest'}\n)\nun.columns.name = None", "_____no_output_____" ], [ "un.dropna().head()", "_____no_output_____" ], [ "df = pd.concat([games, un], axis=1)\ndf", "_____no_output_____" ], [ "g = sns.FacetGrid(data=tidy.dropna(), col='team', col_wrap=5, hue='team')\ng.map(sns.barplot, \"variable\", \"rest\");", "_____no_output_____" ], [ "delta = (un.home_rest - un.away_rest).dropna().astype(int)\n(delta.value_counts()\n .reindex(np.arange(delta.min(), delta.max() + 1), fill_value=0)\n .sort_index().plot(kind='bar', color='k', width=.9, rot=0, figsize=(12, 6)))", "_____no_output_____" ] ], [ [ "# Stack / Unstack", "_____no_output_____" ], [ "An \"observation\" depends on the question. Home team advantage?", "_____no_output_____" ] ], [ [ "home_adv = games.home_points - games.away_points\nax = (home_adv).plot(kind='hist', bins=80, color='k', figsize=(10, 5))\nax.set_xlim(-40, 40)\nmu = home_adv.mean()\nax.vlines(mu, *ax.get_ylim(), color='steelblue', linewidth=3)\nprint('Home win percent:', (home_adv > 0).mean())", "Home win percent: 0.574796747967\n" ] ], [ [ "# Team Strength", "_____no_output_____" ], [ "# Mini Project: Home Court Advantage?\n\nWhat's the effect (in terms of probability to win) of being\nthe home team.", "_____no_output_____" ], [ "### Step 1: Calculate Win %", "_____no_output_____" ], [ "We need to create an indicator for whether the home team won.\nAdd it as a column called `home_win` in `games`.", "_____no_output_____" ] ], [ [ "games['home_win'] = ... # fill this in", "_____no_output_____" ], [ "#%load -r 1:4 solutions_tidy.py", "_____no_output_____" ] ], [ [ "### Step 2: Find the win percent for each team\n\nTeams are split across two columns. It's easiest to calculate the number of wins and\nnumber of games as away, and the number of wins and number of games as home. Then\ncombine those two results to get the win percent.", "_____no_output_____" ] ], [ [ "wins_as_home = games.groupby('').agg([])\n# hint: use `~` to flip an array of booleans\nwins_as_away = ...\n\nwins_as_home.columns = ['n_wins', 'n_games']\nwins_as_away.columns = ['n_wins', 'n_games']", "_____no_output_____" ], [ "%load -r 5:13 solutions_tidy.py", "_____no_output_____" ] ], [ [ "Now add `wins_as_home` and `wins_as_away` to get a DataFrame with\ntwo columsn, `n_wins`, and `n_games` and one row per team.\n\nFinally, calculate the win percent.", "_____no_output_____" ] ], [ [ "%load -r 14:20 solutions_tidy.py", "_____no_output_____" ], [ "strength.order().plot(kind='barh', figsize=(5, 12))", "_____no_output_____" ] ], [ [ "Bring the `strength` valuess in for each team, for each game.", "_____no_output_____" ] ], [ [ "games.head()", "_____no_output_____" ] ], [ [ "For SQL people\n\n```sql\nSELECT *\nFROM games NATURAL JOIN strength\n```\n\nWe just need to get the names worked out.", "_____no_output_____" ] ], [ [ "strength.head().reset_index().rename(columns=lambda x: 'away_' + x)", "_____no_output_____" ], [ "(pd.merge(games.reset_index(), strength.reset_index().add_prefix('away_'))\n .pipe(pd.merge, strength.reset_index().add_prefix('home_'))\n .set_index(['game_id', 'date']))", "_____no_output_____" ] ], [ [ "For python people", "_____no_output_____" ] ], [ [ "games = games.assign(away_strength=games.away_team.map(strength),\n home_strength=games.home_team.map(strength))\ngames.head()", "_____no_output_____" ], [ "X = pd.concat([games, un], axis=1).set_index(['away_team', 'home_team'], append=True).dropna()", "_____no_output_____" ], [ "X.head()", "_____no_output_____" ], [ "X['home_win'] = X.home_win.astype(int) # for statsmodels", "_____no_output_____" ], [ "import statsmodels.api as sm", "_____no_output_____" ], [ "mod = sm.Logit.from_formula('home_win ~ home_strength + away_strength + home_rest + away_rest', X)\nres = mod.fit()\nres.summary()", "Optimization terminated successfully.\n Current function value: 0.564564\n Iterations 6\n" ], [ "mod = sm.Logit.from_formula('home_win ~ rest_difference',\n X.assign(rest_difference=lambda df: df.home_rest - df.away_rest))\nres = mod.fit()\nres.summary()", "Optimization terminated successfully.\n Current function value: 0.682125\n Iterations 4\n" ], [ "mod = sm.OLS.from_formula('spread ~ home_strength + away_strength + rest_difference',\n X.assign(rest_difference=lambda df: df.home_rest - df.away_rest,\n spread=lambda df: df.home_points - df.away_points))\nres = mod.fit()\nres.summary()", "_____no_output_____" ] ], [ [ "# Recap\n\n- Tidy data: one row per observation\n - melt / stack: wide to long\n - pivot_table / unstack: long to wide", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ] ]
cb56bdfc32feffe88774ff991adcd272a4708215
3,868
ipynb
Jupyter Notebook
Duke University-Statistics with R/Introduction to Probability and Data with R/Quiz solutions/Week 1/.ipynb_checkpoints/Week 1-checkpoint.ipynb
FTiniNadhirah/Coursera-courses-answers
d59311917b740a6ce8b8361e9ac79657b103bb75
[ "Apache-2.0" ]
73
2020-08-26T03:03:03.000Z
2022-03-13T17:35:47.000Z
Duke University-Statistics with R/Introduction to Probability and Data with R/Quiz solutions/Week 1/.ipynb_checkpoints/Week 1-checkpoint.ipynb
FTiniNadhirah/Coursera-courses-answers
d59311917b740a6ce8b8361e9ac79657b103bb75
[ "Apache-2.0" ]
null
null
null
Duke University-Statistics with R/Introduction to Probability and Data with R/Quiz solutions/Week 1/.ipynb_checkpoints/Week 1-checkpoint.ipynb
FTiniNadhirah/Coursera-courses-answers
d59311917b740a6ce8b8361e9ac79657b103bb75
[ "Apache-2.0" ]
44
2020-09-19T09:28:14.000Z
2022-03-29T18:07:19.000Z
50.233766
652
0.688987
[ [ [ "# Week 1\n\n1.Question 1\nConsider the table below describing a data set of individuals who have registered to volunteer at a public school. Which of the choices below lists categorical variables?\n\n**Answer:phone number and name**\n\n2.Question 2\nA study is designed to test the effect of type of light on exam performance of students. 180 students are randomly assigned to three classrooms: one that is dimly lit, another with yellow lighting, and a third with white fluorescent lighting, and given the same exam. Which of the following correctly identifies the variables used in the study as explanatory and response?\n\n**Answer: explanatory: type of light (categorical with 3 levels)\nresponse: exam performance**\n\n\n3.Question 3\nIn a study published in 2011 in The Proceedings of the National Academy of Sciences, researchers randomly assigned 120 elderly men and women who volunteered to be a part of this study (average age mid-60s) to one of two exercise groups. One group walked around a track three times a week; the other did a variety of less aerobic exercises, including yoga and resistance training with bands. After a year, brain scans showed that among the walkers, the hippocampus (part of the brain responsible for forming memories) had increased in volume by about 2% on average; in the others, it had declined by about 1.4%. Which of the following is false?\n\n**Answer:The results of this study can be generalized to all elderly.**\n\n4.Question 4\nA school district is considering whether it will no longer allow students to park at school after two recent accidents where students were severely injured. As a first step, they survey parents of high school students by mail, asking them whether or not the parents would object to this policy change. Of 5,799 surveys that go out, 1,209 are returned. Of these 1,209 surveys that were completed, 926 agreed with the policy change and 283 disagreed. Which of the following statements is the most plausible?\n\n**Answer:It is possible that 80% of the parents of high school students disagree with the policy change.**\n\n5.Question 5\nFor your political science class, you’d like to take a survey from a sample of all the Catholic Church members in your town. Your town is divided into 17 neighborhoods, each with similar socio-economic status distribution and ethnic diversity, and each contains a Catholic Church. Rather than trying to obtain a list of all members of all these churches, you decide to pick 3 churches at random. For these churches, you’ll ask to get a list of all current members and contact 100 members at random. What kind of design have you used?\n\n**Answer:stratified sampling**\n\n6.Question 6\nIn an experiment, what purpose does blocking serve?\n\n**Answer:Control for variables that might influence the response.**\n\n7.Question 7\nWhich of the following is one of the four principles of experimental design?\n\n**Answer:randomize**", "_____no_output_____" ] ] ]
[ "markdown" ]
[ [ "markdown" ] ]
cb56d745c2468a27bd581ce38c6a5ce32219f79a
18,562
ipynb
Jupyter Notebook
Colab/KNN/LIAR3_KNN_remove_speaker.ipynb
pccisme/Fake-News-Detection
1b7af5463af830933231824eb4ac7adf9982ec20
[ "MIT" ]
null
null
null
Colab/KNN/LIAR3_KNN_remove_speaker.ipynb
pccisme/Fake-News-Detection
1b7af5463af830933231824eb4ac7adf9982ec20
[ "MIT" ]
null
null
null
Colab/KNN/LIAR3_KNN_remove_speaker.ipynb
pccisme/Fake-News-Detection
1b7af5463af830933231824eb4ac7adf9982ec20
[ "MIT" ]
null
null
null
18,562
18,562
0.668031
[ [ [ "# import packages\nimport csv\nimport numpy as np\nimport warnings\nimport matplotlib.pyplot as plt\nfrom sklearn.feature_extraction.text import CountVectorizer\nfrom sklearn.feature_extraction.text import TfidfTransformer\nfrom sklearn.feature_extraction.text import TfidfVectorizer\nfrom sklearn.neighbors import KNeighborsClassifier\n\nwarnings.filterwarnings('ignore')", "_____no_output_____" ], [ "# read .tsv files\ncolumns = {0:'ID', 1:'label', 2:'statement', 3:'subject', 4:'speaker', 5:'job_title',\n 6:'state', 7:'party', 8:'barely_true', 9:'false', 10:'half_true', 11:'mostly_true',\n 12:'pants_on_fire', 13:'context'}\n\ndef readTsvFile(file_name):\n tsv_file = open(file_name)\n read_tsv = csv.reader(tsv_file, delimiter='\\t')\n dataset = []\n for row in read_tsv:\n dataset.append(row)\n\n # print('examples:', len(dataset))\n # print('features:', len(dataset[0]))\n # print('row1_example:', dataset[0])\n # print('\\n')\n\n return dataset", "_____no_output_____" ], [ "# plotting bar charts\ndef plottingData(column_name, arr_X, arr_y):\n fig = plt.figure()\n ax = fig.add_axes([1, 1, 2, 2])\n ax.bar(arr_X, arr_y)\n ax.set_xlabel(column_name)\n ax.set_ylabel('count')\n plt.xticks(rotation=90)\n plt.show()", "_____no_output_____" ], [ "def removeSpace(string):\n if string[-1] != ' ':\n return string\n space_count = 0\n pos = -1\n while string[pos] == ' ':\n pos -= 1\n \n return string[:pos+1]", "_____no_output_____" ], [ "#caculate numbers of each category in each columns\ndef categoryChecker(dataset, column_name, n): \n category = {}\n\n for row in dataset:\n if len(row) > n:\n if row[n] == '' or row[n] == 'N/A':\n row[n] = 'None' #missing data will rename as 'None'\n cleaned_string = removeSpace(row[n])\n if cleaned_string in category: \n category[cleaned_string] += 1 \n if cleaned_string not in category:\n category[cleaned_string] = 1\n # else:\n # print('Suspicious case:', row[0]) #len(row) <= n?\n \n category = {k: v for k, v in sorted(category.items(), key=lambda x: x[1],\n reverse=True)} #sorting dictionary\n\n count = 0\n arr_X = []\n arr_y = []\n for k, v in category.items():\n count += v\n arr_X.append(k)\n arr_y.append(v)\n\n # print(column_name, ':', category) \n # print('The total number of examples:', count)\n # print('The number of categories:', len(arr_X))\n plottingData(column_name, arr_X, arr_y) #call plotting function", "_____no_output_____" ], [ "def dataVisualization(dataset):\n for i in [1, 6, 7]:\n categoryChecker(dataset, columns[i], i)", "_____no_output_____" ], [ "def getStatement(dataset, column_num):\n statement = []\n\n for row in dataset:\n if len(row) < column_num:\n statement.append('')\n # print(row[0])\n else:\n statement.append(row[column_num])\n \n return statement", "_____no_output_____" ], [ "def trainRunVectorizer(dataset_words): \n cv = CountVectorizer(stop_words='english')\n doc = np.array([dataset_words])\n dataset_cv = cv.fit_transform(doc.ravel())\n\n # print(cv.vocabulary_)\n # print(dataset_cv.toarray())\n # print(dataset_cv.shape)\n return dataset_cv, cv", "_____no_output_____" ], [ "def runVectorizer(dataset_words, cv): \n doc = np.array([dataset_words])\n dataset_cv = cv.transform(doc.ravel())\n\n # print(cv.vocabulary_)\n # print(dataset_cv.toarray())\n # print(dataset_cv.shape)\n return dataset_cv", "_____no_output_____" ], [ "def runTfidfTransformer(vectorized_statement):\n tfidf = TfidfTransformer(use_idf=True, norm='l2', smooth_idf=True)\n np.set_printoptions(precision=2)\n tfidf_transformed = tfidf.fit_transform(vectorized_statement)\n tfidf_transformed_array = tfidf_transformed.toarray()\n\n return tfidf_transformed_array", "_____no_output_____" ], [ "def categorizedDataset(dataset, column_nums=[3, 5, 6, 7]):\n pre_categorized_dataset = []\n for column_num in column_nums:\n seen = {}\n categorized_row = []\n counter = 0\n for row in dataset:\n if len(row) > column_num:\n data = row[column_num]\n else:\n data = ''\n \n if data in seen:\n categorized_row.append(seen[data])\n if data not in seen:\n seen[data] = counter\n categorized_row.append(seen[data])\n counter += 1\n\n pre_categorized_dataset.append(categorized_row)\n \n categorized_dataset = np.array(pre_categorized_dataset).transpose()\n\n return categorized_dataset", "_____no_output_____" ], [ "def creditHistory(dataset, column_nums=[8, 9, 10, 11, 12]):\n pre_credit_history_dataset = []\n for row in dataset:\n credit_row = []\n for column_num in column_nums:\n if len(row) > column_num:\n data = row[column_num]\n else:\n data = 0\n credit_row.append(data)\n\n pre_credit_history_dataset.append(credit_row)\n \n credit_history_dataset = np.array(pre_credit_history_dataset)\n\n return credit_history_dataset", "_____no_output_____" ], [ "def getTargetDataset(dataset, column_num = 1):\n pre_target_dataset = []\n seen = {}\n counter = 0\n for row in dataset:\n if row[column_num] in seen:\n pre_target_dataset.append(seen[row[column_num]])\n if row[column_num] not in seen:\n seen[row[column_num]] = counter\n pre_target_dataset.append(seen[row[column_num]])\n counter += 1\n\n target_dataset = np.array(pre_target_dataset).transpose()\n\n return target_dataset", "_____no_output_____" ], [ "def runTrainDataset():\n train_dataset = readTsvFile('train.tsv')\n # dataVisualization(train_dataset)\n\n y_train = getTargetDataset(train_dataset)\n\n train_dataset_statement = getStatement(train_dataset, 2)\n train_dataset_context = getStatement(train_dataset, 13)\n\n categorized_train_dataset = categorizedDataset(train_dataset)\n credit_history_train_dataset = creditHistory(train_dataset)\n del train_dataset\n\n train_vectorized_statement, cv_statement = trainRunVectorizer(train_dataset_statement)\n train_vectorized_context, cv_context = trainRunVectorizer(train_dataset_context)\n del train_dataset_statement\n del train_dataset_context\n\n train_tfidfed_statement = runTfidfTransformer(train_vectorized_statement)\n train_tfidfed_context = runTfidfTransformer(train_vectorized_context)\n del train_vectorized_statement\n del train_vectorized_context\n\n train_vectrized_features = np.column_stack((train_tfidfed_statement, train_tfidfed_context))\n del train_tfidfed_statement\n del train_tfidfed_context\n\n X_train = np.column_stack((train_vectrized_features, categorized_train_dataset))\n del train_vectrized_features\n del categorized_train_dataset\n\n X_train = np.column_stack((X_train, credit_history_train_dataset))\n del credit_history_train_dataset\n\n return X_train, y_train, cv_statement, cv_context", "_____no_output_____" ], [ "def runValDataset(cv_statement, cv_context): \n val_dataset = readTsvFile('valid.tsv')\n # dataVisualization(val_dataset)\n\n y_val = getTargetDataset(val_dataset)\n\n val_dataset_statement = getStatement(val_dataset, 2)\n val_dataset_context = getStatement(val_dataset, 13)\n\n categorized_val_dataset = categorizedDataset(val_dataset)\n credit_history_val_dataset = creditHistory(val_dataset)\n val_dataset = None\n\n val_vectorized_statement = runVectorizer(val_dataset_statement, cv_statement)\n val_vectorized_context = runVectorizer(val_dataset_context, cv_context)\n val_dataset_statement = val_dataset_context = None\n\n val_tfidfed_statement = runTfidfTransformer(val_vectorized_statement)\n val_tfidfed_context = runTfidfTransformer(val_vectorized_context)\n val_vectorized_statement = val_vectorized_context = None\n\n val_vectrized_features = np.column_stack((val_tfidfed_statement, val_tfidfed_context))\n val_tfidfed_statement = val_tfidfed_context = None\n\n\n X_val = np.column_stack((val_vectrized_features, categorized_val_dataset))\n val_vectrized_features = categorized_val_dataset = None\n\n X_val = np.column_stack((X_val, credit_history_val_dataset))\n credit_history_val_dataset = None\n\n return X_val, y_val", "_____no_output_____" ], [ "def runTestDataset(cv_statement, cv_context):\n test_dataset = readTsvFile('test.tsv')\n # dataVisualization(test_dataset)\n\n y_test = getTargetDataset(test_dataset)\n\n test_dataset_statement = getStatement(test_dataset, 2)\n test_dataset_context = getStatement(test_dataset, 13)\n\n categorized_test_dataset = categorizedDataset(test_dataset)\n credit_history_test_dataset = creditHistory(test_dataset)\n test_dataset = None\n\n test_vectorized_statement = runVectorizer(test_dataset_statement, cv_statement)\n test_vectorized_context = runVectorizer(test_dataset_context, cv_context)\n test_dataset_statement = test_dataset_context = None\n\n test_tfidfed_statement = runTfidfTransformer(test_vectorized_statement)\n test_tfidfed_context = runTfidfTransformer(test_vectorized_context)\n test_vectorized_statement = test_vectorized_context = None\n\n test_vectrized_features = np.column_stack((test_tfidfed_statement, test_tfidfed_context))\n test_tfidfed_statement = test_tfidfed_context = None\n\n X_test = np.column_stack((test_vectrized_features, categorized_test_dataset))\n test_vectrized_features = categorized_test_dataset = None\n\n X_test = np.column_stack((X_test, credit_history_test_dataset))\n credit_history_test_dataset = None\n\n return X_test, y_test", "_____no_output_____" ], [ "X_train, y_train, cv_statement, cv_context = runTrainDataset()", "_____no_output_____" ], [ "X_val, y_val = runValDataset(cv_statement, cv_context)", "_____no_output_____" ], [ "def trainKNN(X_train, X_val, y_train, y_val, num_neighbor, KNN_type, weight):\n n_neighbor = num_neighbor\n p_value = KNN_type\n best_KNN_val_acc = 0\n best_n_neighbor = []\n best_p_value = []\n best_wight = []\n accuracies = 0\n counter = 0\n\n for neighbor in n_neighbor:\n for pv in p_value:\n for w in weight:\n knn = KNeighborsClassifier(n_neighbors = neighbor, p = pv, weights = w)\n knn.fit(X_train, y_train)\n\n KNN_val_acc = knn.score(X_val, y_val)\n \n accuracies += KNN_val_acc\n counter += 1\n\n if KNN_val_acc > best_KNN_val_acc:\n best_KNN_val_acc = KNN_val_acc\n best_n_neighbor = [neighbor]\n best_p_value = [pv]\n best_weight = [w]\n elif KNN_val_acc == best_KNN_val_acc:\n best_n_neighbor.append(neighbor)\n best_p_value.append(pv)\n best_weight.append(w)\n \n print('Accuracy:', KNN_val_acc, ',', 'n:', neighbor, ',',\n 'm:', pv, ',', 'w:', w)\n \n mean_accuracy = accuracies/counter\n print('Best Accuracy:', best_KNN_val_acc)\n print('num of neighbors:', best_n_neighbor)\n print('KNN type:', best_p_value)\n print('Weight:', best_weight)\n print('Mean Accuracy:', mean_accuracy)\n\n return best_n_neighbor, best_p_value, best_weight", "_____no_output_____" ], [ "best_n_neighbor, best_p_value, best_weight = trainKNN(X_train.astype(np.float32), \n X_val.astype(np.float32), \n y_train.astype(np.float32), \n y_val.astype(np.float32), \n range(3, 16, 3), [1,2], \n ['uniform', 'distance'])", "Accuracy: 0.15965732087227413 , n: 3 , m: 1 , w: uniform\nAccuracy: 0.16043613707165108 , n: 3 , m: 1 , w: distance\nAccuracy: 0.17289719626168223 , n: 3 , m: 2 , w: uniform\nAccuracy: 0.16043613707165108 , n: 3 , m: 2 , w: distance\nAccuracy: 0.15031152647975077 , n: 6 , m: 1 , w: uniform\nAccuracy: 0.15498442367601245 , n: 6 , m: 1 , w: distance\nAccuracy: 0.15654205607476634 , n: 6 , m: 2 , w: uniform\nAccuracy: 0.16822429906542055 , n: 6 , m: 2 , w: distance\nAccuracy: 0.15809968847352024 , n: 9 , m: 1 , w: uniform\nAccuracy: 0.15498442367601245 , n: 9 , m: 1 , w: distance\nAccuracy: 0.15031152647975077 , n: 9 , m: 2 , w: uniform\nAccuracy: 0.14953271028037382 , n: 9 , m: 2 , w: distance\nAccuracy: 0.14174454828660435 , n: 12 , m: 1 , w: uniform\nAccuracy: 0.14797507788161993 , n: 12 , m: 1 , w: distance\nAccuracy: 0.15654205607476634 , n: 12 , m: 2 , w: uniform\nAccuracy: 0.14953271028037382 , n: 12 , m: 2 , w: distance\nAccuracy: 0.14641744548286603 , n: 15 , m: 1 , w: uniform\nAccuracy: 0.14875389408099687 , n: 15 , m: 1 , w: distance\nAccuracy: 0.15342679127725856 , n: 15 , m: 2 , w: uniform\nAccuracy: 0.14797507788161993 , n: 15 , m: 2 , w: distance\nBest Accuracy: 0.17289719626168223\nnum of neighbors: [3]\nKNN type: [2]\nWeight: ['uniform']\nMean Accuracy: 0.15443925233644862\n" ], [ "del X_val\ndel y_val", "_____no_output_____" ], [ "X_test, y_test = runTestDataset(cv_statement, cv_context)\ndel cv_statement\ndel cv_context", "_____no_output_____" ], [ "def testKNN(X_train, X_val, y_train, y_val, num_neighbor, KNN_type, best_weight):\n n_neighbor = num_neighbor[0]\n p_value = KNN_type[0]\n w = best_weight[0]\n\n knn = KNeighborsClassifier(n_neighbors = n_neighbor, p = p_value, weights = w)\n knn.fit(X_train, y_train)\n KNN_test_acc = knn.score(X_test, y_test)\n print('KNN test accuracy:', KNN_test_acc)\n \n return KNN_test_acc", "_____no_output_____" ], [ "testKNN(X_train.astype(np.float32), X_test.astype(np.float32), \n y_train.astype(np.float32), y_test.astype(np.float32), best_n_neighbor, \n best_p_value, best_weight)", "KNN test accuracy: 0.1712707182320442\n" ], [ "", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cb56def2ddb17e3f7b0e7976372e147b4172904f
21,240
ipynb
Jupyter Notebook
BreastCancerKM_Sambbhav.ipynb
sambbhavgarg/Machine-Learning
c14715419ba7e18d09cc295fb9a2026344079534
[ "MIT" ]
null
null
null
BreastCancerKM_Sambbhav.ipynb
sambbhavgarg/Machine-Learning
c14715419ba7e18d09cc295fb9a2026344079534
[ "MIT" ]
null
null
null
BreastCancerKM_Sambbhav.ipynb
sambbhavgarg/Machine-Learning
c14715419ba7e18d09cc295fb9a2026344079534
[ "MIT" ]
null
null
null
82.96875
14,328
0.808145
[ [ [ "import numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib import style\nimport warnings\nfrom math import sqrt\nfrom collections import Counter\nfrom collections import defaultdict\nstyle.use('fivethirtyeight')\nimport pandas as pd\nimport random", "_____no_output_____" ], [ "df = pd.read_csv('Dataset.csv')\noriginal_df = pd.DataFrame.copy(df)\ndf.replace('?',-9999, inplace=True)\ndf.drop(['id'], 1, inplace=True)\ndf.drop(['label'], 1, inplace=True)\nfull_data = df.astype(float).values.tolist()", "_____no_output_____" ], [ "test_size=0.2\ntrain_data = full_data[:-int(test_size*len(full_data))]\ntest_data = full_data[-int(test_size*len(full_data)):]\n", "_____no_output_____" ], [ "class K_means:\n \n def __init__(self, k=3, tol=0.001, max_iter=300):\n self.k = k\n self.tol = tol\n self.max_iter = max_iter\n \n \n def fit(self,data):\n #centroid dict\n self.centroids = {}\n # since k=2 we will select first two points from the data and we will declare that as a centroid\n \n self.track={}\n for i in range(self.k):\n self.track[i]=[]\n \n for i in range(self.k):\n self.centroids[i] = data[i]\n self.track[i].append(data[i])\n \n # we will run this loop for 300 times (300 iteration)\n for i in range(self.max_iter):\n self.classifications = {} #{2: [], 4: []}\n\n for i in range(self.k):\n self.classifications[i] = []\n\n for featureset in data: #finding distance from centroid , finding mini value , putting them in classification\n distances = [np.linalg.norm(featureset - self.centroids[centroid]) for centroid in \n self.centroids]\n classification = distances.index(min(distances)) #find the index of the min distance\n\n self.classifications[classification].append(featureset)\n \n\n prev_centroids = dict(self.centroids)\n \n for classification in self.classifications:\n\n self.centroids[classification] = np.average(self.classifications[classification],axis=0)\n self.track[classification].append(np.average(self.classifications[classification],axis=0))\n #print(self.centroids)\n optimized = True\n \n for c in self.centroids:\n original_centroid = prev_centroids[c]\n current_centroid = self.centroids[c]\n if np.sum((current_centroid-original_centroid)/original_centroid*100.0) > self.tol:\n optimized = False\n \n if optimized:\n break\n \n \n def predict(self,data):\n distances = [np.linalg.norm(data-self.centroids[centroid]) for centroid in self.centroids]\n #print(distances)\n classification = distances.index(min(distances))\n return classification", "_____no_output_____" ], [ "clf=K_means()", "_____no_output_____" ], [ "clf.fit(np.array(train_data))", "_____no_output_____" ], [ "clf.predict(np.array(test_data))", "_____no_output_____" ], [ "labels = original_df['label'].tolist()[:int(0.2*len(full_data))]\n#takes testing data corresponding to original data\ntest_set = []\nfor i in labels:\n if i == 2:\n test_set.append(0)\n else:\n test_set.append(1)", "_____no_output_____" ], [ "acc=[]\nfor i in range(1,4):\n clf = K_means(k=i)\n clf.fit(np.array(train_data))\n correct = 0\n total = 0\n for j in range(len(test_data)):\n if(clf.predict(test_data[j]) == test_set[j]):\n correct+=1\n total += 1\n print(\"Acc:\",i,\" \",(correct/total)*100,\"%\")\n acc.append(correct/total)", "Acc: 1 55.39568345323741 %\nAcc: 2 43.884892086330936 %\nAcc: 3 8.633093525179856 %\n" ], [ "plt.plot([1,2,3],acc)\nplt.show()", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cb56df5c7c4c9c456cd9309330a75c0aad33b29f
6,763
ipynb
Jupyter Notebook
Google IT Automation with Python/Google - Crash Course on Python/Week 3/Module 3 Graded Assessment.ipynb
FTiniNadhirah/Coursera-courses-answers
d59311917b740a6ce8b8361e9ac79657b103bb75
[ "Apache-2.0" ]
73
2020-08-26T03:03:03.000Z
2022-03-13T17:35:47.000Z
Google IT Automation with Python/Google - Crash Course on Python/Week 3/Module 3 Graded Assessment.ipynb
FTiniNadhirah/Coursera-courses-answers
d59311917b740a6ce8b8361e9ac79657b103bb75
[ "Apache-2.0" ]
null
null
null
Google IT Automation with Python/Google - Crash Course on Python/Week 3/Module 3 Graded Assessment.ipynb
FTiniNadhirah/Coursera-courses-answers
d59311917b740a6ce8b8361e9ac79657b103bb75
[ "Apache-2.0" ]
44
2020-09-19T09:28:14.000Z
2022-03-29T18:07:19.000Z
26.315175
251
0.519
[ [ [ "# Module 3 Graded Assessment\n", "_____no_output_____" ] ], [ [ "\"\"\"\n1.Question 1\nFill in the blanks of this code to print out the numbers 1 through 7.\n\n\"\"\"\nnumber = 1\nwhile number <= 7:\n\tprint(number, end=\" \")\n\tnumber +=1", "Automating with Python is fun!\n" ], [ "\"\"\"\n2.Question 2\nThe show_letters function should print out each letter of a word on a separate line. \nFill in the blanks to make that happen.\n\n\"\"\"\ndef show_letters(word):\n\tfor letter in word:\n\t\tprint(letter)\n\nshow_letters(\"Hello\")\n# Should print one line per letter", "Yellow is the color of sunshine\n" ], [ "\"\"\"\n3.Question 3\nComplete the function digits(n) that returns how many digits the number has. \nFor example: 25 has 2 digits and 144 has 3 digits. Tip: you can figure out the digits of a number by dividing \nit by 10 once per digit until there are no digits left.\n\n\"\"\"\n\ndef digits(n):\n count = str(n)\n return len(count)\n \t\nprint(digits(25)) # Should print 2\nprint(digits(144)) # Should print 3\nprint(digits(1000)) # Should print 4\nprint(digits(0)) # Should print 1", "False\nTrue\n" ], [ "\"\"\"\n4.Question 4\nThis function prints out a multiplication table (where each number is the result of multiplying the first number of its row by the number at the top of its column). Fill in the blanks so that calling multiplication_table(1, 3) will print out:\n\n1 2 3\n\n2 4 6\n\n3 6 9\n\n\"\"\"\n\ndef multiplication_table(start, stop):\n\tfor x in range(start,stop+1):\n\t\tfor y in range(start,stop+1):\n\t\t\tprint(str(x*y), end=\" \")\n\t\tprint()\n\nmultiplication_table(1, 3)\n# Should print the multiplication table shown above", "1 2 3 \n2 4 6 \n3 6 9 \n" ], [ "\"\"\"\n5.Question 5\nThe counter function counts down from start to stop when start is bigger than stop, \nand counts up from start to stop otherwise. \nFill in the blanks to make this work correctly.\n\"\"\"\ndef counter(start, stop):\n\tx = start\n\tif x>stop:\n\t\treturn_string = \"Counting down: \"\n\t\twhile x >= stop:\n\t\t\treturn_string += str(x)\n\t\t\tif x>stop:\n\t\t\t\treturn_string += \",\"\n\t\t\tx = x-1\n\telse:\n\t\treturn_string = \"Counting up: \"\n\t\twhile x <= stop:\n\t\t\treturn_string += str(x)\n\t\t\tif x<stop:\n\t\t\t\treturn_string += \",\"\n\t\t\tx = x+1\n\treturn return_string\n\nprint(counter(1, 10)) # Should be \"Counting up: 1,2,3,4,5,6,7,8,9,10\"\nprint(counter(2, 1)) # Should be \"Counting down: 2,1\"\nprint(counter(5, 5)) # Should be \"Counting up: 5\"", "_____no_output_____" ], [ "\"\"\"\n6.Question 6\nThe loop function is similar to range(), but handles the parameters somewhat differently: it takes in 3 parameters: \nthe starting point, the stopping point, and the increment step. When the starting point is greater \nthan the stopping point, it forces the steps to be negative. When, instead, the starting point is less \nthan the stopping point, it forces the step to be positive. Also, if the step is 0, it changes to 1 or -1. \nThe result is returned as a one-line, space-separated string of numbers. For example, loop(11,2,3) \nshould return 11 8 5 and loop(1,5,0) should return 1 2 3 4. Fill in the missing parts to make that happen.\n\"\"\"\n\ndef loop(start, stop, step):\n\treturn_string = \"\"\n\tif step == 0:\n\t step=1\n\tif start>stop:\n\t\tstep = abs(step) * -1\n\telse:\n\t\tstep = abs(step)\n\tfor count in range(start, stop, step):\n\t\treturn_string += str(count) + \" \"\n\treturn return_string.strip()\n\nprint(loop(11,2,3)) # Should be 11 8 5\nprint(loop(1,5,0)) # Should be 1 2 3 4\nprint(loop(-1,-2,0)) # Should be -1\nprint(loop(10,25,-2)) # Should be 10 12 14 16 18 20 22 24 \nprint(loop(1,1,1)) # Should be empty", "_____no_output_____" ], [ "#8.Question 8\n#What is the value of x at the end of the following code?\nfor x in range(1, 10, 3):\n print(x)\n \n#7 \n", "_____no_output_____" ], [ "#9.Question 9\n#What is the value of y at the end of the following code?\n\nfor x in range(10):\n for y in range(x):\n print(y)\n \n#8 \n", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cb56e6d418e5b591862d4f6673591ac7bdf8d8c8
2,422
ipynb
Jupyter Notebook
moving_means.ipynb
Bmcgarry194/moving_means
945fd1bdf3c3e95c4f0b0be7019e6838c4617468
[ "MIT" ]
null
null
null
moving_means.ipynb
Bmcgarry194/moving_means
945fd1bdf3c3e95c4f0b0be7019e6838c4617468
[ "MIT" ]
2
2019-08-08T17:22:38.000Z
2019-08-08T18:19:47.000Z
moving_means.ipynb
Bmcgarry194/moving_means
945fd1bdf3c3e95c4f0b0be7019e6838c4617468
[ "MIT" ]
1
2019-08-08T17:23:33.000Z
2019-08-08T17:23:33.000Z
21.625
72
0.469034
[ [ [ "# Calculating Moving Averages", "_____no_output_____" ] ], [ [ "from typing import List, Dict\n\nVector = List[int]", "_____no_output_____" ], [ "# load necessary data\nJ = [4, 4, 4, 9, 10, 11, 12]\np = 3", "_____no_output_____" ], [ "def create_sequences(J: Vector, p: int) -> List[List[int]]:\n \"\"\"Splitting J into sequences of length p\"\"\"\n\n output = []\n for i, number in enumerate(J):\n sequence = J[i:i + p]\n if len(sequence) == p:\n output.append(sequence)\n else:\n break\n return output", "_____no_output_____" ], [ "def min_max_moving_avg(J: Vector, p: int) -> Dict[str, float]: \n \"\"\"\n Calculates the min and max average from a list of lists\n \n params:\n *x = A list of lists of ints\n \n returns:\n Dictionary of the min and max moving averages\n \"\"\"\n sequences = create_sequences(J, p)\n \n means = []\n for seq in sequences:\n means.append(sum(seq) / len(seq))\n\n min_max = {}\n min_max['min'] = min(means)\n min_max['max'] = max(means)\n \n return min_max", "_____no_output_____" ], [ "min_max_moving_avg(J, p)", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code" ] ]
cb56f7d999b6fd360c7fc7b33f4e29eaeb36cd0e
132,067
ipynb
Jupyter Notebook
COVID_19_Confirmed_curation.ipynb
iam-abbas/Coronavirus-Data-Visualization
96ace352318c7d198299df0c798647c5ffc78d4c
[ "MIT" ]
3
2020-02-02T23:47:19.000Z
2020-12-16T03:42:31.000Z
COVID_19_Confirmed_curation.ipynb
iam-abbas/Coronavirus-Data-Visualization
96ace352318c7d198299df0c798647c5ffc78d4c
[ "MIT" ]
2
2020-10-01T03:43:37.000Z
2020-10-01T03:46:05.000Z
COVID_19_Confirmed_curation.ipynb
iam-abbas/Coronavirus-Data-Visualization
96ace352318c7d198299df0c798647c5ffc78d4c
[ "MIT" ]
null
null
null
37.087054
266
0.219593
[ [ [ "<a href=\"https://colab.research.google.com/github/iam-abbas/Coronavirus-Data-Visualization/blob/master/COVID_19_Confirmed_curation.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport numpy as np", "_____no_output_____" ], [ "data = pd.read_csv('covid_19_data.csv')\ndata.head()", "_____no_output_____" ], [ "data['ObservationDate'] = data['ObservationDate'].apply(pd.to_datetime())\n# data['Country'] = data['Country/Region'].replace('Mainland China', 'China')\ndata['Country'] = data['Country/Region']\ndata = data.drop(columns=['SNo', 'Province/State', 'Last Update', 'Deaths', 'Country/Region', 'Recovered'])\ndata.head()", "_____no_output_____" ], [ "data = data.groupby(['ObservationDate', 'Country']).sum().reset_index()\ndata.tail(25)", "_____no_output_____" ], [ "dates = [str(x)[:10] for x in data['ObservationDate'].unique()]\ncountries = [x for x in data['Country'].unique()]\n", "_____no_output_____" ], [ "cases = pd.DataFrame(columns=dates)\ncases.insert(0, 'Country', countries)", "_____no_output_____" ], [ "cases.head()", "_____no_output_____" ], [ "cases = cases.set_index(\"Country\")\ncases.head()", "_____no_output_____" ], [ "for country in countries:\n for date in dates:\n try:\n case = int(data['Confirmed'].loc[(data['ObservationDate'] == date) & (data['Country'] == country)])\n except:\n case = 0\n \n cases.at[country, date] = case", "_____no_output_____" ], [ "cases.head(30)", "_____no_output_____" ], [ "cases.to_csv(\"data_final.csv\", encoding='utf-8', index=True)", "_____no_output_____" ], [ "cases_without_china = cases.drop(\"Mainland China\")", "_____no_output_____" ], [ "cases_without_china.to_csv(\"data_final_non_china.csv\", encoding='utf-8', index=True)", "_____no_output_____" ], [ "", "_____no_output_____" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cb56f8206072fa1dc27462972109c280b04279c2
20,166
ipynb
Jupyter Notebook
Exercises/Supervised_Learning_Random_Forest/1 - Guided Example.ipynb
yscyang1/Thinkful_Data_Science
14f134c6b94b694cbfd2ca1683253890a17beeb1
[ "MIT" ]
1
2020-04-30T13:55:00.000Z
2020-04-30T13:55:00.000Z
Exercises/Supervised_Learning_Random_Forest/1 - Guided Example.ipynb
yscyang1/Thinkful_Data_Science
14f134c6b94b694cbfd2ca1683253890a17beeb1
[ "MIT" ]
null
null
null
Exercises/Supervised_Learning_Random_Forest/1 - Guided Example.ipynb
yscyang1/Thinkful_Data_Science
14f134c6b94b694cbfd2ca1683253890a17beeb1
[ "MIT" ]
null
null
null
31.60815
477
0.464693
[ [ [ "### Instructions\nThe lecture uses random forest to predict the state of the loan with data taken from Lending Club (2015). With minimal feature engineering, they were able to get an accuracy of 98% with cross validation. However, the accuracies had a lot of variance, ranging from 98% to 86%, indicating there are lots of useless features. \n\nI am tasked with 1) removing as many features as possible without dropping the average below 90% accuracy in a 10 fold cross validation and 2) if the first task is possible without using anything related to payment amount or outstanding principal. ", "_____no_output_____" ], [ "### 1 - Import Data\nIn this dataset, there are 420k+ rows and 110 features and the target variable (loan status). ", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\n%matplotlib inline\nfrom sklearn import ensemble\nfrom sklearn.model_selection import cross_val_score\n\nimport warnings\nwarnings.filterwarnings('ignore')", "C:\\Users\\Crystal\\AppData\\Local\\Continuum\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\fixes.py:313: FutureWarning: numpy not_equal will not check object identity in the future. The comparison did not return the same result as suggested by the identity (`is`)) and will change.\n _nan_object_mask = _nan_object_array != _nan_object_array\n" ], [ "df = pd.read_csv('LoanStats3d.csv', skipinitialspace=True, header=1)", "_____no_output_____" ], [ "df.info()", "<class 'pandas.core.frame.DataFrame'>\nRangeIndex: 421097 entries, 0 to 421096\nColumns: 111 entries, id to total_il_high_credit_limit\ndtypes: float64(85), object(26)\nmemory usage: 356.6+ MB\n" ] ], [ [ "The the last two rows of the dataset holds no data, so these rows will be deleted.", "_____no_output_____" ] ], [ [ "df.tail()", "_____no_output_____" ], [ "df = df[:-2]", "_____no_output_____" ] ], [ [ "### 2 - Removing Features\nIn the lecture, they removed any columns with missing values. I'm not sure this is the best method, as there could be valuable information in the missing values. Instead, the method I employ is to identify the categorical features. If there are less than 30 unique values, then I create dummy variables out of them. If there are more than 30 unique values, I use panda's ability to map each unique value to a numeric value, allowing me to retain all columns and rows.", "_____no_output_____" ] ], [ [ "cat_col = [col for col in df.columns if df[col].dtype == 'object']\nnum_col = [col for col in df.columns if df[col].dtype != 'object']\ncat_col.remove('loan_status')", "_____no_output_____" ], [ "dummy_df = pd.DataFrame()\nfor col in cat_col:\n if df[col].nunique() < 30:\n dummy_df = pd.concat([dummy_df, pd.get_dummies(df[col], prefix = col, drop_first=True)], axis = 1)\n cat_col.remove(col)", "_____no_output_____" ] ], [ [ "For whatever reason, the id and interest rates are labeled as 'objects'. The following is to convert them into numeric features.", "_____no_output_____" ] ], [ [ "df['id'] = pd.to_numeric(df['id'], errors='coerce')\ndf['int_rate'] = pd.to_numeric(df['int_rate'].str.strip('%'), errors='coerce')\n\ncat_col.remove('id')\ncat_col.remove('int_rate')", "_____no_output_____" ] ], [ [ "Using Panda's codes function is as simple as converting the objects into categorical dtypes (instead of objects). Then add one to the codes as null values are given a value of -1, which random forest will not take. ", "_____no_output_____" ] ], [ [ "for col in cat_col + ['loan_status']:\n df[col] = df[col].astype('category')\n df[col] = df[col].cat.codes+1", "_____no_output_____" ], [ "df_combined = pd.concat([df[cat_col+num_col], df['loan_status'], dummy_df], axis = 1)", "_____no_output_____" ], [ "combined_cols_lst = list(df_combined.columns)\ncombined_cols_lst.remove('loan_status')", "_____no_output_____" ] ], [ [ "At this point, I have 136 features. How do we remove the features that do not help predict the loan status? One way is to find the features that are highly correlated with the loan status. Below I've found 9 features that have a correlation of at least 0.15. ", "_____no_output_____" ] ], [ [ "print('There are {} features.'.format(len(combined_cols_lst)))", "There are 136 features.\n" ], [ "important_cols = [col for col in combined_cols_lst if df_combined[[col, 'loan_status']].corr().abs()['loan_status'][0] > 0.15]", "_____no_output_____" ], [ "important_cols", "_____no_output_____" ] ], [ [ "### 3 - Random Forest Classifier\nI'm finally ready to apply the data to a random forest classifier. I will be using a 10 fold cross validation, the same as the lecture for comparison. Recall that in the lecture, the average accuracy was ~97%, but it had a range of ~11%. **On the other hand, this model with only 9 features has an accuracy of ~97%, but a range of ~2.5%. **", "_____no_output_____" ] ], [ [ "rfc = ensemble.RandomForestClassifier()\nX = df_combined[important_cols]\nY = df_combined['loan_status']\n\ncv = cross_val_score(rfc, X, Y, cv = 10)", "_____no_output_____" ], [ "print('The cross validation score has a range of {:0.3f} and mean of {:0.3f}'.format(cv.max() - cv.min(), cv.mean()))", "The cross validation score has a range of 0.025 and mean of 0.972\n" ] ], [ [ "#### 3.1 - Removing Payment Amount and Outstanding Principal\nThe second question to answer is if is is possible to have an accuracy above 90% without using features related to payment amounts or outstanding principals. Looking at the features deemed 'important', there are only three that are not related to payment amount or principals. Of these three features, two of them have very low correlations. My guess is it will be pretty difficult to achieve 90% accuracy. ", "_____no_output_____" ] ], [ [ "for col in important_cols:\n print(col, df_combined[[col, 'loan_status']].corr().abs()['loan_status'][0])", "last_pymnt_d 0.317289555141\nout_prncp 0.218553748244\nout_prncp_inv 0.218595214349\ntotal_pymnt 0.346931943687\ntotal_pymnt_inv 0.346914103008\ntotal_rec_prncp 0.411788669787\nrecoveries 0.162988104241\ncollection_recovery_fee 0.163651718919\nlast_pymnt_amnt 0.492987539961\n" ], [ "important_cols_2 = ['total_rec_prncp',\n 'recoveries',\n 'collection_recovery_fee']", "_____no_output_____" ] ], [ [ "As expected, the average accuracy is ~86% and is not able to meet the target accuracy. ", "_____no_output_____" ] ], [ [ "rfc2 = ensemble.RandomForestClassifier()\nX2 = df_combined[important_cols_2]\nY2 = df_combined['loan_status']\n\ncv2 = cross_val_score(rfc2, X2, Y2, cv = 10)", "_____no_output_____" ], [ "print('The cross validation score has a range of {:0.3f} and mean of {:0.3f}'.format(cv2.max() - cv2.min(), cv2.mean()))", "The cross validation score has a range of 0.068 and mean of 0.864\n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ] ]
cb570c4f4aaa172f76b150f31eb4afc90dbf1683
8,483
ipynb
Jupyter Notebook
openfl-tutorials/Federated_Keras_MNIST_Tutorial.ipynb
karol-brejna-i/openfl
f8114a724bdbcc027b824f68825ac71cd6ce78a1
[ "Apache-2.0" ]
null
null
null
openfl-tutorials/Federated_Keras_MNIST_Tutorial.ipynb
karol-brejna-i/openfl
f8114a724bdbcc027b824f68825ac71cd6ce78a1
[ "Apache-2.0" ]
null
null
null
openfl-tutorials/Federated_Keras_MNIST_Tutorial.ipynb
karol-brejna-i/openfl
f8114a724bdbcc027b824f68825ac71cd6ce78a1
[ "Apache-2.0" ]
null
null
null
30.188612
337
0.604857
[ [ [ "# Federated Keras MNIST Tutorial", "_____no_output_____" ] ], [ [ "#Install Tensorflow and MNIST dataset if not installed\n!pip install tensorflow==2.3.1\n\n#Alternatively you could use the intel-tensorflow build\n# !pip install intel-tensorflow==2.3.0", "_____no_output_____" ], [ "import numpy as np\nimport tensorflow as tf\nimport tensorflow.keras as keras\nfrom tensorflow.keras import backend as K\nfrom tensorflow.keras import Sequential\nfrom tensorflow.keras.layers import Conv2D, Flatten, Dense\nfrom tensorflow.keras.utils import to_categorical\nfrom tensorflow.keras.datasets import mnist\n\nimport openfl.native as fx\nfrom openfl.federated import FederatedModel,FederatedDataSet", "_____no_output_____" ], [ "def test_intel_tensorflow():\n \"\"\"\n Check if Intel version of TensorFlow is installed\n \"\"\"\n import tensorflow as tf\n\n print(\"We are using Tensorflow version {}\".format(tf.__version__))\n\n major_version = int(tf.__version__.split(\".\")[0])\n if major_version >= 2:\n from tensorflow.python import _pywrap_util_port\n print(\"Intel-optimizations (DNNL) enabled:\",\n _pywrap_util_port.IsMklEnabled())\n else:\n print(\"Intel-optimizations (DNNL) enabled:\")\n\ntest_intel_tensorflow()", "_____no_output_____" ] ], [ [ "After importing the required packages, the next step is setting up our openfl workspace. To do this, simply run the `fx.init()` command as follows:", "_____no_output_____" ] ], [ [ "#Setup default workspace, logging, etc.\nfx.init('keras_cnn_mnist')", "_____no_output_____" ] ], [ [ "Now we are ready to define our dataset and model to perform federated learning on. The dataset should be composed of a numpy arrayWe start with a simple fully connected model that is trained on the MNIST dataset. ", "_____no_output_____" ] ], [ [ "#Import and process training, validation, and test images/labels\n\n# Set the ratio of validation imgs, can't be 0.0\nVALID_PERCENT = 0.3\n\n(X_train, y_train), (X_test, y_test) = mnist.load_data()\nsplit_on = int((1 - VALID_PERCENT) * len(X_train))\n\ntrain_images = X_train[0:split_on,:,:]\ntrain_labels = to_categorical(y_train)[0:split_on,:]\n\nvalid_images = X_train[split_on:,:,:]\nvalid_labels = to_categorical(y_train)[split_on:,:]\n\ntest_images = X_test\ntest_labels = to_categorical(y_test)\n\ndef preprocess(images):\n #Normalize\n images = (images / 255) - 0.5\n #Flatten\n images = images.reshape((-1, 784))\n return images\n\n# Preprocess the images.\ntrain_images = preprocess(train_images)\nvalid_images = preprocess(valid_images)\ntest_images = preprocess(test_images)\n\nfeature_shape = train_images.shape[1]\nclasses = 10\n\nfl_data = FederatedDataSet(train_images,train_labels,valid_images,valid_labels,batch_size=32,num_classes=classes)\n\ndef build_model(feature_shape,classes):\n #Defines the MNIST model\n model = Sequential()\n model.add(Dense(64, input_shape=feature_shape, activation='relu'))\n model.add(Dense(64, activation='relu'))\n model.add(Dense(classes, activation='softmax'))\n \n model.compile(optimizer='adam',loss='categorical_crossentropy',metrics=['accuracy'],)\n return model", "_____no_output_____" ], [ "#Create a federated model using the build model function and dataset\nfl_model = FederatedModel(build_model,data_loader=fl_data)", "_____no_output_____" ] ], [ [ "The `FederatedModel` object is a wrapper around your Keras, Tensorflow or PyTorch model that makes it compatible with openfl. It provides built in federated training and validation functions that we will see used below. Using it's `setup` function, collaborator models and datasets can be automatically defined for the experiment. ", "_____no_output_____" ] ], [ [ "collaborator_models = fl_model.setup(num_collaborators=2)\ncollaborators = {'one':collaborator_models[0],'two':collaborator_models[1]}#, 'three':collaborator_models[2]}", "_____no_output_____" ], [ "#Original MNIST dataset\nprint(f'Original training data size: {len(train_images)}')\nprint(f'Original validation data size: {len(valid_images)}\\n')\n\n#Collaborator one's data\nprint(f'Collaborator one\\'s training data size: {len(collaborator_models[0].data_loader.X_train)}')\nprint(f'Collaborator one\\'s validation data size: {len(collaborator_models[0].data_loader.X_valid)}\\n')\n\n#Collaborator two's data\nprint(f'Collaborator two\\'s training data size: {len(collaborator_models[1].data_loader.X_train)}')\nprint(f'Collaborator two\\'s validation data size: {len(collaborator_models[1].data_loader.X_valid)}\\n')\n\n#Collaborator three's data\n#print(f'Collaborator three\\'s training data size: {len(collaborator_models[2].data_loader.X_train)}')\n#print(f'Collaborator three\\'s validation data size: {len(collaborator_models[2].data_loader.X_valid)}')", "_____no_output_____" ] ], [ [ "We can see the current plan values by running the `fx.get_plan()` function", "_____no_output_____" ] ], [ [ "#Get the current values of the plan. Each of these can be overridden\nprint(fx.get_plan())", "_____no_output_____" ] ], [ [ "Now we are ready to run our experiment. If we want to pass in custom plan settings, we can easily do that with the `override_config` parameter", "_____no_output_____" ] ], [ [ "#Run experiment, return trained FederatedModel\nfinal_fl_model = fx.run_experiment(collaborators,override_config={'aggregator.settings.rounds_to_train':5})", "_____no_output_____" ], [ "#Save final model and load into keras\nfinal_fl_model.save_native('final_model')\nmodel = tf.keras.models.load_model('./final_model')", "_____no_output_____" ], [ "#Test the final model on our test set\nmodel.evaluate(test_images,test_labels)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ] ]
cb571323f6c5dace7f6d8a85079a400d303c0f19
21,425
ipynb
Jupyter Notebook
deep-learning/Tensorflow-2.x/Browser-Based-Models/TensorFlow Deployment/Course 2 - TensorFlow Lite/Week 3/Exercise/TFLite_Week3_Exercise.ipynb
AadityaGupta/Artificial-Intelligence-Deep-Learning-Machine-Learning-Tutorials
352dd6d9a785e22fde0ce53a6b0c2e56f4964950
[ "Apache-2.0" ]
3,266
2017-08-06T16:51:46.000Z
2022-03-30T07:34:24.000Z
deep-learning/Tensorflow-2.x/Browser-Based-Models/TensorFlow Deployment/Course 2 - TensorFlow Lite/Week 3/Exercise/TFLite_Week3_Exercise.ipynb
AadityaGupta/Artificial-Intelligence-Deep-Learning-Machine-Learning-Tutorials
352dd6d9a785e22fde0ce53a6b0c2e56f4964950
[ "Apache-2.0" ]
150
2017-08-28T14:59:36.000Z
2022-03-11T23:21:35.000Z
deep-learning/Tensorflow-2.x/Browser-Based-Models/TensorFlow Deployment/Course 2 - TensorFlow Lite/Week 3/Exercise/TFLite_Week3_Exercise.ipynb
AadityaGupta/Artificial-Intelligence-Deep-Learning-Machine-Learning-Tutorials
352dd6d9a785e22fde0ce53a6b0c2e56f4964950
[ "Apache-2.0" ]
1,449
2017-08-06T17:40:59.000Z
2022-03-31T12:03:24.000Z
24.826188
221
0.542544
[ [ [ "##### Copyright 2018 The TensorFlow Authors.", "_____no_output_____" ] ], [ [ "#@title Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.", "_____no_output_____" ] ], [ [ "# Rock, Paper & Scissors with TensorFlow Hub - TFLite", "_____no_output_____" ], [ "<table class=\"tfo-notebook-buttons\" align=\"left\">\n <td>\n <a target=\"_blank\" href=\"https://colab.research.google.com/github/lmoroney/dlaicourse/blob/master/TensorFlow%20Deployment/Course%202%20-%20TensorFlow%20Lite/Week%203/Exercise/TFLite_Week3_Exercise.ipynb\">\n <img src=\"https://www.tensorflow.org/images/colab_logo_32px.png\" />\n Run in Google Colab</a>\n </td>\n <td>\n <a target=\"_blank\" href=\"https://github.com/lmoroney/dlaicourse/blob/master/TensorFlow%20Deployment/Course%202%20-%20TensorFlow%20Lite/Week%203/Exercise/TFLite_Week3_Exercise.ipynb\">\n <img src=\"https://www.tensorflow.org/images/GitHub-Mark-32px.png\" />\n View source on GitHub</a>\n </td>\n</table>", "_____no_output_____" ], [ "## Setup", "_____no_output_____" ] ], [ [ "try:\n %tensorflow_version 2.x\nexcept:\n pass", "_____no_output_____" ], [ "import numpy as np\nimport matplotlib.pylab as plt\n\nimport tensorflow as tf\nimport tensorflow_hub as hub\n\nfrom tqdm import tqdm\n\nprint(\"\\u2022 Using TensorFlow Version:\", tf.__version__)\nprint(\"\\u2022 Using TensorFlow Hub Version: \", hub.__version__)\nprint('\\u2022 GPU Device Found.' if tf.test.is_gpu_available() else '\\u2022 GPU Device Not Found. Running on CPU')", "_____no_output_____" ] ], [ [ "## Select the Hub/TF2 Module to Use\n\nHub modules for TF 1.x won't work here, please use one of the selections provided.", "_____no_output_____" ] ], [ [ "module_selection = (\"mobilenet_v2\", 224, 1280) #@param [\"(\\\"mobilenet_v2\\\", 224, 1280)\", \"(\\\"inception_v3\\\", 299, 2048)\"] {type:\"raw\", allow-input: true}\nhandle_base, pixels, FV_SIZE = module_selection\nMODULE_HANDLE =\"https://tfhub.dev/google/tf2-preview/{}/feature_vector/4\".format(handle_base)\nIMAGE_SIZE = (pixels, pixels)\nprint(\"Using {} with input size {} and output dimension {}\".format(MODULE_HANDLE, IMAGE_SIZE, FV_SIZE))", "_____no_output_____" ] ], [ [ "## Data Preprocessing", "_____no_output_____" ], [ "Use [TensorFlow Datasets](http://tensorflow.org/datasets) to load the cats and dogs dataset.\n\nThis `tfds` package is the easiest way to load pre-defined data. If you have your own data, and are interested in importing using it with TensorFlow see [loading image data](../load_data/images.ipynb)\n", "_____no_output_____" ] ], [ [ "import tensorflow_datasets as tfds\ntfds.disable_progress_bar()", "_____no_output_____" ] ], [ [ "The `tfds.load` method downloads and caches the data, and returns a `tf.data.Dataset` object. These objects provide powerful, efficient methods for manipulating data and piping it into your model.\n\nSince `\"cats_vs_dog\"` doesn't define standard splits, use the subsplit feature to divide it into (train, validation, test) with 80%, 10%, 10% of the data respectively.", "_____no_output_____" ] ], [ [ "splits = tfds.Split.ALL.subsplit(weighted=(80, 10, 10))\n\n# Go to the TensorFlow Dataset's website and search for the Rock, Paper, Scissors dataset and load it here\nsplits, info = tfds.load( # YOUR CODE HERE )\n\n(train_examples, validation_examples, test_examples) = splits\n\nnum_examples = info.splits['train'].num_examples\nnum_classes = info.features['label'].num_classes", "_____no_output_____" ] ], [ [ "### Format the Data\n\nUse the `tf.image` module to format the images for the task.\n\nResize the images to a fixes input size, and rescale the input channels", "_____no_output_____" ] ], [ [ "def format_image(image, label):\n image = tf.image.resize(image, IMAGE_SIZE) / 255.0\n return image, label", "_____no_output_____" ] ], [ [ "Now shuffle and batch the data\n", "_____no_output_____" ] ], [ [ "BATCH_SIZE = 32 #@param {type:\"integer\"}", "_____no_output_____" ], [ "# Prepare the examples by preprocessing the them and then batching them (and optionally prefetching them)\n\n# If you wish you can shuffle train set here\ntrain_batches = # YOUR CODE HERE\n\nvalidation_batches = # YOUR CODE HERE\n\ntest_batches = # YOUR CODE HERE", "_____no_output_____" ] ], [ [ "Inspect a batch", "_____no_output_____" ] ], [ [ "for image_batch, label_batch in train_batches.take(1):\n pass\n\nimage_batch.shape", "_____no_output_____" ] ], [ [ "\n## Defining the Model\n\nAll it takes is to put a linear classifier on top of the `feature_extractor_layer` with the Hub module.\n\nFor speed, we start out with a non-trainable `feature_extractor_layer`, but you can also enable fine-tuning for greater accuracy.", "_____no_output_____" ] ], [ [ "do_fine_tuning = False #@param {type:\"boolean\"}", "_____no_output_____" ], [ "feature_extractor = hub.KerasLayer(MODULE_HANDLE,\n input_shape=IMAGE_SIZE + (3,), \n output_shape=[FV_SIZE],\n trainable=do_fine_tuning)", "_____no_output_____" ], [ "print(\"Building model with\", MODULE_HANDLE)\n\nmodel = tf.keras.Sequential([\n feature_extractor,\n tf.keras.layers.Dense(num_classes, activation='softmax')\n])\n\nmodel.summary()", "_____no_output_____" ], [ "#@title (Optional) Unfreeze some layers\nNUM_LAYERS = 10 #@param {type:\"slider\", min:1, max:50, step:1}\n \nif do_fine_tuning:\n feature_extractor.trainable = True\n \n for layer in model.layers[-NUM_LAYERS:]:\n layer.trainable = True\n\nelse:\n feature_extractor.trainable = False", "_____no_output_____" ] ], [ [ "## Training the Model", "_____no_output_____" ] ], [ [ "if do_fine_tuning:\n model.compile(optimizer=tf.keras.optimizers.SGD(lr=0.002, momentum=0.9),\n loss=tf.keras.losses.SparseCategoricalCrossentropy(),\n metrics=['accuracy'])\nelse:\n model.compile(optimizer='adam',\n loss='sparse_categorical_crossentropy',\n metrics=['accuracy'])", "_____no_output_____" ], [ "EPOCHS = 5\n\nhist = model.fit(train_batches,\n epochs=EPOCHS,\n validation_data=validation_batches)", "_____no_output_____" ] ], [ [ "## Export the Model", "_____no_output_____" ] ], [ [ "RPS_SAVED_MODEL = \"rps_saved_model\"", "_____no_output_____" ] ], [ [ "Export the SavedModel", "_____no_output_____" ] ], [ [ "# Use TensorFlow's SavedModel API to export the SavedModel from the trained Keras model\n\n# YOUR CODE HERE", "_____no_output_____" ], [ "%%bash -s $RPS_SAVED_MODEL\nsaved_model_cli show --dir $1 --tag_set serve --signature_def serving_default", "_____no_output_____" ], [ "loaded = tf.saved_model.load(RPS_SAVED_MODEL)", "_____no_output_____" ], [ "print(list(loaded.signatures.keys()))\ninfer = loaded.signatures[\"serving_default\"]\nprint(infer.structured_input_signature)\nprint(infer.structured_outputs)", "_____no_output_____" ] ], [ [ "## Convert Using TFLite's Converter", "_____no_output_____" ] ], [ [ "# Intialize the TFLite converter to load the SavedModel\nconverter = # YOUR CODE HERE\n\n# Set the optimization strategy for 'size' in the converter \nconverter.optimizations = [# YOUR CODE HERE]\n\n# Use the tool to finally convert the model\ntflite_model = # YOUR CODE HERE", "_____no_output_____" ], [ "tflite_model_file = 'converted_model.tflite'\n\nwith open(tflite_model_file, \"wb\") as f:\n f.write(tflite_model)", "_____no_output_____" ] ], [ [ "## Test the TFLite Model Using the Python Interpreter", "_____no_output_____" ] ], [ [ "# Load TFLite model and allocate tensors.\nwith open(tflite_model_file, 'rb') as fid:\n tflite_model = fid.read()\n \ninterpreter = tf.lite.Interpreter(model_content=tflite_model)\ninterpreter.allocate_tensors()\n\ninput_index = interpreter.get_input_details()[0][\"index\"]\noutput_index = interpreter.get_output_details()[0][\"index\"]", "_____no_output_____" ], [ "# Gather results for the randomly sampled test images\npredictions = []\n\ntest_labels, test_imgs = [], []\nfor img, label in tqdm(test_batches.take(10)):\n interpreter.set_tensor(input_index, img)\n interpreter.invoke()\n predictions.append(interpreter.get_tensor(output_index))\n \n test_labels.append(label.numpy()[0])\n test_imgs.append(img)", "_____no_output_____" ], [ "#@title Utility functions for plotting\n# Utilities for plotting\n\nclass_names = ['rock', 'paper', 'scissors']\n\ndef plot_image(i, predictions_array, true_label, img):\n predictions_array, true_label, img = predictions_array[i], true_label[i], img[i]\n plt.grid(False)\n plt.xticks([])\n plt.yticks([])\n \n img = np.squeeze(img)\n \n plt.imshow(img, cmap=plt.cm.binary)\n \n predicted_label = np.argmax(predictions_array)\n \n print(type(predicted_label), type(true_label))\n \n if predicted_label == true_label:\n color = 'green'\n else:\n color = 'red'\n \n plt.xlabel(\"{} {:2.0f}% ({})\".format(class_names[predicted_label],\n 100*np.max(predictions_array),\n class_names[true_label]), color=color)", "_____no_output_____" ], [ "#@title Visualize the outputs { run: \"auto\" }\nindex = 0 #@param {type:\"slider\", min:0, max:9, step:1}\nplt.figure(figsize=(6,3))\nplt.subplot(1,2,1)\nplot_image(index, predictions, test_labels, test_imgs)\nplt.show()", "_____no_output_____" ] ], [ [ "Create a file to save the labels.", "_____no_output_____" ] ], [ [ "with open('labels.txt', 'w') as f:\n f.write('\\n'.join(class_names))", "_____no_output_____" ] ], [ [ "If you are running this notebook in a Colab, you can run the cell below to download the model and labels to your local disk.\n\n**Note**: If the files do not download when you run the cell, try running the cell a second time. Your browser might prompt you to allow multiple files to be downloaded. ", "_____no_output_____" ] ], [ [ "try:\n from google.colab import files\n files.download('converted_model.tflite')\n files.download('labels.txt')\nexcept:\n pass", "_____no_output_____" ] ], [ [ "# Prepare the Test Images for Download (Optional)", "_____no_output_____" ], [ "This part involves downloading additional test images for the Mobile Apps only in case you need to try out more samples", "_____no_output_____" ] ], [ [ "!mkdir -p test_images", "_____no_output_____" ], [ "from PIL import Image\n\nfor index, (image, label) in enumerate(test_batches.take(50)):\n image = tf.cast(image * 255.0, tf.uint8)\n image = tf.squeeze(image).numpy()\n pil_image = Image.fromarray(image)\n pil_image.save('test_images/{}_{}.jpg'.format(class_names[label[0]], index))", "_____no_output_____" ], [ "!ls test_images", "_____no_output_____" ], [ "!zip -qq rps_test_images.zip -r test_images/", "_____no_output_____" ] ], [ [ "If you are running this notebook in a Colab, you can run the cell below to download the Zip file with the images to your local disk. \n\n**Note**: If the Zip file does not download when you run the cell, try running the cell a second time.", "_____no_output_____" ] ], [ [ "try:\n files.download('rps_test_images.zip')\nexcept:\n pass", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown" ], [ "code" ] ]
cb57244cde8c32b674083bc067474058dad8eee1
9,922
ipynb
Jupyter Notebook
Lama_demo.ipynb
kaz12tech/ai_demos
6925d1885815a61d6f17eb6af53172c7ed1d99b1
[ "MIT" ]
2
2022-02-15T00:54:54.000Z
2022-03-21T14:12:58.000Z
Lama_demo.ipynb
kaz12tech/ai_demos
6925d1885815a61d6f17eb6af53172c7ed1d99b1
[ "MIT" ]
null
null
null
Lama_demo.ipynb
kaz12tech/ai_demos
6925d1885815a61d6f17eb6af53172c7ed1d99b1
[ "MIT" ]
1
2022-02-15T00:55:03.000Z
2022-02-15T00:55:03.000Z
28.511494
225
0.472586
[ [ [ "論文<br>\nhttps://arxiv.org/abs/2109.07161<br>\n<br>\nGitHub<br>\nhttps://github.com/saic-mdal/lama<br>\n<br>\n<a href=\"https://colab.research.google.com/github/kaz12tech/ai_demos/blob/master/Lama_demo.ipynb\" target=\"_blank\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ], [ "# 環境セットアップ", "_____no_output_____" ], [ "## GitHubからソースコードを取得\n## ライブラリをインストール", "_____no_output_____" ] ], [ [ "%cd /content\n!git clone https://github.com/saic-mdal/lama.git\n\n!pip install -r lama/requirements.txt --quiet\n!pip install wget --quiet", "_____no_output_____" ], [ "!pip install --upgrade webdataset==0.1.103\n\n!pip uninstall opencv-python-headless -y --quiet\n!pip install opencv-python-headless==4.1.2.30 --quiet\n\n# torchtext 0.8.0をインストール\n!pip uninstall torch torchvision torchaudio torchtext -y\n!pip install torch==1.7.1+cu110 torchvision==0.8.2+cu110 torchaudio==0.7.2 torchtext -f https://download.pytorch.org/whl/torch_stable.html", "_____no_output_____" ], [ "# avoid AttributeError: 'builtin_function_or_method' object has no attribute 'rfftn'\n!sed -E -i \"15i import torch.fft\" /content/lama/saicinpainting/training/modules/ffc.py", "_____no_output_____" ] ], [ [ "## 学習済みモデルのセットアップ", "_____no_output_____" ] ], [ [ "% cd /content/lama\n\n!curl -L $(yadisk-direct https://disk.yandex.ru/d/ouP6l8VJ0HpMZg) -o big-lama.zip\n!unzip big-lama.zip", "_____no_output_____" ] ], [ [ "## ライブラリをインポート", "_____no_output_____" ] ], [ [ "import base64, os\nfrom IPython.display import HTML, Image\nfrom google.colab.output import eval_js\nfrom base64 import b64decode\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport wget\nfrom shutil import copyfile\nimport shutil", "_____no_output_____" ] ], [ [ "# Canvasのセットアップ", "_____no_output_____" ] ], [ [ "\ncanvas_html = \"\"\"\n<style>\n.button {\n background-color: #4CAF50;\n border: none;\n color: white;\n padding: 15px 32px;\n text-align: center;\n text-decoration: none;\n display: inline-block;\n font-size: 16px;\n margin: 4px 2px;\n cursor: pointer;\n}\n</style>\n<canvas1 width=%d height=%d>\n</canvas1>\n<canvas width=%d height=%d>\n</canvas>\n\n<button class=\"button\">Finish</button>\n<script>\nvar canvas = document.querySelector('canvas')\nvar ctx = canvas.getContext('2d')\n\nvar canvas1 = document.querySelector('canvas1')\nvar ctx1 = canvas.getContext('2d')\n\n\nctx.strokeStyle = 'red';\n\nvar img = new Image();\nimg.src = \"data:image/%s;charset=utf-8;base64,%s\";\nconsole.log(img)\nimg.onload = function() {\n ctx1.drawImage(img, 0, 0);\n};\nimg.crossOrigin = 'Anonymous';\n\nctx.clearRect(0, 0, canvas.width, canvas.height);\n\nctx.lineWidth = %d\nvar button = document.querySelector('button')\nvar mouse = {x: 0, y: 0}\n\ncanvas.addEventListener('mousemove', function(e) {\n mouse.x = e.pageX - this.offsetLeft\n mouse.y = e.pageY - this.offsetTop\n})\ncanvas.onmousedown = ()=>{\n ctx.beginPath()\n ctx.moveTo(mouse.x, mouse.y)\n canvas.addEventListener('mousemove', onPaint)\n}\ncanvas.onmouseup = ()=>{\n canvas.removeEventListener('mousemove', onPaint)\n}\nvar onPaint = ()=>{\n ctx.lineTo(mouse.x, mouse.y)\n ctx.stroke()\n}\n\nvar data = new Promise(resolve=>{\n button.onclick = ()=>{\n resolve(canvas.toDataURL('image/png'))\n }\n})\n</script>\n\"\"\"", "_____no_output_____" ], [ "def draw(imgm, filename='drawing.png', w=400, h=200, line_width=1):\n display(HTML(canvas_html % (w, h, w,h, filename.split('.')[-1], imgm, line_width)))\n data = eval_js(\"data\")\n binary = b64decode(data.split(',')[1])\n with open(filename, 'wb') as f:\n f.write(binary)", "_____no_output_____" ] ], [ [ "# 画像のセットアップ\n[使用画像1](https://www.pakutaso.com/shared/img/thumb/PAK85_oyakudachisimasu20140830_TP_V.jpg)<br>\n[使用画像2](https://www.pakutaso.com/shared/img/thumb/TSU88_awaitoykyo_TP_V.jpg)<br>\n[使用画像3](https://www.pakutaso.com/20211208341post-37933.html)", "_____no_output_____" ] ], [ [ "% cd /content/lama\n\nfrom google.colab import files\nfiles = files.upload()\nfname = list(files.keys())[0]\n\nshutil.rmtree('./data_for_prediction', ignore_errors=True)\n! mkdir data_for_prediction\n\ncopyfile(fname, f'./data_for_prediction/{fname}')\nos.remove(fname)\nfname = f'./data_for_prediction/{fname}'\n\nimage64 = base64.b64encode(open(fname, 'rb').read())\nimage64 = image64.decode('utf-8')\n\nprint(f'Will use {fname} for inpainting')\nimg = np.array(plt.imread(f'{fname}')[:,:,:3])", "_____no_output_____" ] ], [ [ "# inpainting", "_____no_output_____" ] ], [ [ "mask_path = f\".{fname.split('.')[1]}_mask.png\"\ndraw(image64, filename=mask_path, w=img.shape[1], h=img.shape[0], line_width=0.04*img.shape[1])\n\nwith_mask = np.array(plt.imread(mask_path)[:,:,:3])\nmask = (with_mask[:,:,0]==1)*(with_mask[:,:,1]==0)*(with_mask[:,:,2]==0)\nplt.imsave(mask_path,mask, cmap='gray')", "_____no_output_____" ], [ "%cd /content/lama\n\n!mkdir output/\ncopyfile(mask_path,os.path.join(\"./output/\", os.path.basename(mask_path)))\n\n!PYTHONPATH=. TORCH_HOME=$(pwd) python3 bin/predict.py \\\n model.path=$(pwd)/big-lama \\\n indir=$(pwd)/data_for_prediction \\\n outdir=/content/lama/output \\\n dataset.img_suffix={suffix}\n\nplt.rcParams['figure.dpi'] = 200\nplt.imshow(plt.imread(f\"/content/lama/output/{fname.split('.')[1].split('/')[2]}_mask.png\"))\n_=plt.axis('off')\n_=plt.title('inpainting result')\nplt.show()\nfname = None", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ] ]
cb57299725f173a290e0e705847e7296baae6507
155,722
ipynb
Jupyter Notebook
06_Stats/Wind_Stats/Exercises_with_solutions.ipynb
mtodisco10/pandas_exercises
7f7b2b1a773d22a219b38dcb19e3ec2ed1675c9b
[ "BSD-3-Clause" ]
null
null
null
06_Stats/Wind_Stats/Exercises_with_solutions.ipynb
mtodisco10/pandas_exercises
7f7b2b1a773d22a219b38dcb19e3ec2ed1675c9b
[ "BSD-3-Clause" ]
null
null
null
06_Stats/Wind_Stats/Exercises_with_solutions.ipynb
mtodisco10/pandas_exercises
7f7b2b1a773d22a219b38dcb19e3ec2ed1675c9b
[ "BSD-3-Clause" ]
null
null
null
37.478219
346
0.382297
[ [ [ "# Wind Statistics", "_____no_output_____" ], [ "### Introduction:\n\nThe data have been modified to contain some missing values, identified by NaN. \nUsing pandas should make this exercise\neasier, in particular for the bonus question.\n\nYou should be able to perform all of these operations without using\na for loop or other looping construct.\n\n\n1. The data in 'wind.data' has the following format:", "_____no_output_____" ] ], [ [ "\"\"\"\nYr Mo Dy RPT VAL ROS KIL SHA BIR DUB CLA MUL CLO BEL MAL\n61 1 1 15.04 14.96 13.17 9.29 NaN 9.87 13.67 10.25 10.83 12.58 18.50 15.04\n61 1 2 14.71 NaN 10.83 6.50 12.62 7.67 11.50 10.04 9.79 9.67 17.54 13.83\n61 1 3 18.50 16.88 12.33 10.13 11.17 6.17 11.25 NaN 8.50 7.67 12.75 12.71\n\"\"\"", "_____no_output_____" ] ], [ [ " The first three columns are year, month and day. The\n remaining 12 columns are average windspeeds in knots at 12\n locations in Ireland on that day. \n\n More information about the dataset go [here](wind.desc).", "_____no_output_____" ], [ "### Step 1. Import the necessary libraries", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport datetime", "_____no_output_____" ] ], [ [ "### Step 2. Import the dataset from this [address](https://github.com/guipsamora/pandas_exercises/blob/master/06_Stats/Wind_Stats/wind.data)", "_____no_output_____" ], [ "### Step 3. Assign it to a variable called data and replace the first 3 columns by a proper datetime index.", "_____no_output_____" ] ], [ [ "# parse_dates gets 0, 1, 2 columns and parses them as the index\ndata_url = 'https://raw.githubusercontent.com/guipsamora/pandas_exercises/master/06_Stats/Wind_Stats/wind.data'\ndata = pd.read_csv(data_url, sep = \"\\s+\", parse_dates = [[0,1,2]]) \ndata.head()", "_____no_output_____" ] ], [ [ "### Step 4. Year 2061? Do we really have data from this year? Create a function to fix it and apply it.", "_____no_output_____" ] ], [ [ "# The problem is that the dates are 2061 and so on...\n\n# function that uses datetime\ndef fix_century(x):\n year = x.year - 100 if x.year > 1989 else x.year\n return datetime.date(year, x.month, x.day)\n\n# apply the function fix_century on the column and replace the values to the right ones\ndata['Yr_Mo_Dy'] = data['Yr_Mo_Dy'].apply(fix_century)\n\n# data.info()\ndata.head()", "_____no_output_____" ] ], [ [ "### Step 5. Set the right dates as the index. Pay attention at the data type, it should be datetime64[ns].", "_____no_output_____" ] ], [ [ "# transform Yr_Mo_Dy it to date type datetime64\ndata[\"Yr_Mo_Dy\"] = pd.to_datetime(data[\"Yr_Mo_Dy\"])\n\n# set 'Yr_Mo_Dy' as the index\ndata = data.set_index('Yr_Mo_Dy')\n\ndata.head()\n# data.info()", "_____no_output_____" ] ], [ [ "### Step 6. Compute how many values are missing for each location over the entire record. \n#### They should be ignored in all calculations below. ", "_____no_output_____" ] ], [ [ "# \"Number of non-missing values for each location: \"\ndata.isnull().sum()", "_____no_output_____" ] ], [ [ "### Step 7. Compute how many non-missing values there are in total.", "_____no_output_____" ] ], [ [ "#number of columns minus the number of missing values for each location\ndata.shape[0] - data.isnull().sum()\n\n#or\n\ndata.notnull().sum()", "_____no_output_____" ] ], [ [ "### Step 8. Calculate the mean windspeeds of the windspeeds over all the locations and all the times.\n#### A single number for the entire dataset.", "_____no_output_____" ] ], [ [ "data.sum().sum() / data.notna().sum().sum()", "_____no_output_____" ] ], [ [ "### Step 9. Create a DataFrame called loc_stats and calculate the min, max and mean windspeeds and standard deviations of the windspeeds at each location over all the days \n\n#### A different set of numbers for each location.", "_____no_output_____" ] ], [ [ "data.describe(percentiles=[])", "_____no_output_____" ] ], [ [ "### Step 10. Create a DataFrame called day_stats and calculate the min, max and mean windspeed and standard deviations of the windspeeds across all the locations at each day.\n\n#### A different set of numbers for each day.", "_____no_output_____" ] ], [ [ "# create the dataframe\nday_stats = pd.DataFrame()\n\n# this time we determine axis equals to one so it gets each row.\nday_stats['min'] = data.min(axis = 1) # min\nday_stats['max'] = data.max(axis = 1) # max \nday_stats['mean'] = data.mean(axis = 1) # mean\nday_stats['std'] = data.std(axis = 1) # standard deviations\n\nday_stats.head()", "_____no_output_____" ] ], [ [ "### Step 11. Find the average windspeed in January for each location. \n#### Treat January 1961 and January 1962 both as January.", "_____no_output_____" ] ], [ [ "data.loc[data.index.month == 1].mean()", "_____no_output_____" ] ], [ [ "### Step 12. Downsample the record to a yearly frequency for each location.", "_____no_output_____" ] ], [ [ "data.groupby(data.index.to_period('A')).mean()", "_____no_output_____" ] ], [ [ "### Step 13. Downsample the record to a monthly frequency for each location.", "_____no_output_____" ] ], [ [ "data.groupby(data.index.to_period('M')).mean()", "_____no_output_____" ] ], [ [ "### Step 14. Downsample the record to a weekly frequency for each location.", "_____no_output_____" ] ], [ [ "data.groupby(data.index.to_period('W')).mean()", "_____no_output_____" ] ], [ [ "### Step 15. Calculate the min, max and mean windspeeds and standard deviations of the windspeeds across all locations for each week (assume that the first week starts on January 2 1961) for the first 52 weeks.", "_____no_output_____" ] ], [ [ "# resample data to 'W' week and use the functions\nweekly = data.resample('W').agg(['min','max','mean','std'])\n\n# slice it for the first 52 weeks and locations\nweekly.loc[weekly.index[1:53], \"RPT\":\"MAL\"] .head(10)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
cb572fb5b2382c2af9f73790371ef0771f30b0ee
76,328
ipynb
Jupyter Notebook
APS5_alunos.ipynb
gabrielhpbc/CD
1a46c0774f0c2c49d4266faf007b622b33f3cbd1
[ "MIT" ]
null
null
null
APS5_alunos.ipynb
gabrielhpbc/CD
1a46c0774f0c2c49d4266faf007b622b33f3cbd1
[ "MIT" ]
null
null
null
APS5_alunos.ipynb
gabrielhpbc/CD
1a46c0774f0c2c49d4266faf007b622b33f3cbd1
[ "MIT" ]
null
null
null
73.181208
17,560
0.737868
[ [ [ "# APS 5 - Questões com auxílio do Pandas", "_____no_output_____" ], [ "** Nome: ** <font color=blue> Gabriel Heusi Pereira Bueno de Camargo </font>\n", "_____no_output_____" ], [ "APS **INDIVIDUAL**\n\nData de Entrega: 26/Set até às 23h59 via GitHub.\n\nVamos trabalhar com dados do USGS (United States Geological Survey) para tentar determinar se os abalos detectados no hemisfério Norte têm grande probabilidade de serem testes nucleares.", "_____no_output_____" ] ], [ [ "%matplotlib inline\nimport pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom scipy.stats import expon\nfrom numpy import arange\nimport scipy.stats as stats", "_____no_output_____" ], [ "#Abrir o arquivo\ndf = pd.read_csv('earthquake.csv')\n#listar colunas\nprint(list(df))", "['Date', 'Time', 'Latitude', 'Longitude', 'Type', 'Depth', 'Depth Error', 'Depth Seismic Stations', 'Magnitude', 'Magnitude Type', 'Magnitude Error', 'Magnitude Seismic Stations', 'Azimuthal Gap', 'Horizontal Distance', 'Horizontal Error', 'Root Mean Square', 'ID', 'Source', 'Location Source', 'Magnitude Source', 'Status']\n" ] ], [ [ "## Liste as primeiras linhas do DataFrame", "_____no_output_____" ] ], [ [ "df.head()", "_____no_output_____" ] ], [ [ "## Q1 - Manipulando o DataFrame\n\nCrie uma coluna chamada `Hemisfério` baseada na Latitude\n\nA regra de formação é a seguinte:\n\nValor | Critério\n---|---\nNorte | Latitude positiva\nSul | Latitude negativa", "_____no_output_____" ] ], [ [ "df.loc[(df.Latitude >=0), \"Hemisfério\"] = \"Norte\"\ndf.loc[(df.Latitude <0), \"Hemisfério\"] = \"Sul\"\ndf.head()", "_____no_output_____" ], [ "df.Magnitude.describe()", "_____no_output_____" ] ], [ [ "## Q2 - Fit e Histograma\n\nFaça o Histograma da Magnitude. Interprete.", "_____no_output_____" ] ], [ [ "f = plt.figure(figsize=(11,5))\nfaixas = arange(5,9,0.65)\nplot = df.Magnitude.plot.hist(bins=faixas , title=\"Histograma de Magnitude\",normed=1,alpha = 0.9,color=\"g\")\nplt.xlabel(\"Magnitude\")\nplt.ylabel(\"Densidade\")\nplt.show()", "_____no_output_____" ] ], [ [ "Faça o fit de uma distribuição exponencial sobre os dados da Magnitude, achando os valores de **loc** e **scale**. Interprete loc e scale no caso da exponencial.\nDocumentação: https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.expon.html", "_____no_output_____" ], [ "Refaça o Histograma plotando a fdp (função densidade de probabilidade) da exponencial com os parâmetros achados no fit em cima. Cuidado com o domínio utilizado. Interprete.", "_____no_output_____" ] ], [ [ "mu = df.Magnitude.mean()\ndp = df.Magnitude.std()\nfig = plt.figure(figsize=(11, 5))\nplot= df.Magnitude.plot.hist(bins = faixas, title='HISTOGRAMA Magnitude ', normed=1, alpha=0.9,color = 'r')\na = sorted(df.Magnitude)\nplt.plot(a, stats.norm.pdf(a, loc = mu, scale = dp))\nplt.title('Histograma X Pdf')", "_____no_output_____" ] ], [ [ "## Q3 - Tabela cruzada\n\nFaça uma tabela de cruzamento das variáveis `Hemisfério` e `Type`\n\nSua tabela deve ser <font color=red> normalizada</font>", "_____no_output_____" ] ], [ [ "ct = pd.crosstab(df.Hemisfério,df.Type,margins=True,normalize = True)\nct", "_____no_output_____" ] ], [ [ "### Q3.1 - Qual a probabilidade de ocorrer um terremoto no hemisfério norte?", "_____no_output_____" ], [ "Adicione na célula abaixo o cálculo:", "_____no_output_____" ] ], [ [ "probNorte = ct.Earthquake.Norte/ct.Earthquake.All\nprint(probNorte)", "0.450628443526\n" ] ], [ [ "Explique o seu raciocínio", "_____no_output_____" ], [ "O cálculo da probabilidade nesse caso se baseia na análise dos casos que ocorrem no Norte em comparação com os casos totais de terremoto. Portanto para saber a probabilidade de ocorrer um terremoto no hemisfério Norte basta dividir esse valor, apresentado no crosstab, pela probabilidade total.", "_____no_output_____" ], [ "### Q3.2 - Dado que aconteceu no Norte, qual a probabilidade de ele ter sido `Nuclear Explosion`? ", "_____no_output_____" ], [ "Calcule a resposta abaixo, ou explique como a encontrou", "_____no_output_____" ], [ "Se for cálculo preencha a célula a seguir:", "_____no_output_____" ] ], [ [ "probNuclear = ct[\"Nuclear Explosion\"][\"Norte\"]/ct.All.Norte\nprint(probNuclear)", "0.0138444151441\n" ] ], [ [ "Se conseguir obter a resposta sem calcular, insira a resposta abaixo:\n\n* A probabilidade de ter sido `Nuclear Explosion` é ...", "_____no_output_____" ], [ "## Q4 - Análise bivariada\n\nFaça o *plot* de dispersão (*scatter plot*) entre as variáveis `Magnitude Error` e `Depth`", "_____no_output_____" ] ], [ [ "plt.scatter(x = df['Magnitude Error'],\n y = df['Depth'])\nplt.show()", "_____no_output_____" ] ], [ [ "Calcule a correlação entre as variáveis `Magnitude Error` e `Depth`", "_____no_output_____" ] ], [ [ "df[\"Depth\"].corr(df[\"Magnitude Error\"])", "_____no_output_____" ] ], [ [ "Explique o que significa o valor da correlação calculada acima? ", "_____no_output_____" ], [ "A correlação apresentada acima mostra uma espécie de dependência entre as duas variáveis, no caso Magnitude Error e Depth, observando o gráfico mostrado acima os valores são bem distantes, mas é justamente isso e o valor da correlação mostrado, que é baixo, que mostra uma alta dependência entre as duas variáveis, não há grande discrepância entre os valores. O fato de ser negativo justificaria uma reta descrescente.", "_____no_output_____" ], [ "## Q5 - Describe e boxplot\n\nFaça o `describe` e o *boxplot* da `Latitude` e da `Longitude`. Explique os valores", "_____no_output_____" ] ], [ [ "Lat = df[\"Latitude\"].describe()\nLong = df[\"Longitude\"].describe()\nprint(Lat,Long)\n\n\ndf.boxplot(column = [\"Latitude\",\"Longitude\"])\nplt.show()", "count 23412.000000\nmean 1.679033\nstd 30.113183\nmin -77.080000\n25% -18.653000\n50% -3.568500\n75% 26.190750\nmax 86.005000\nName: Latitude, dtype: float64 count 23412.000000\nmean 39.639961\nstd 125.511959\nmin -179.997000\n25% -76.349750\n50% 103.982000\n75% 145.026250\nmax 179.998000\nName: Longitude, dtype: float64\n" ] ], [ [ "## Q6 - Tirando conclusões com base nos dados\n\nEm um certo lugar já ocorreram abalos com *Magnitude Type* `MB` e *Type* `Nuclear Explosion`. \n \nResponda: \n * É mais provável que tenha sido no norte ou no sul? \n \nAssuma que os Magnitude Type e Type são independentes\n", "_____no_output_____" ] ], [ [ "df.loc[(df.Type==\"Nuclear Explosion\")&(df[\"Magnitude Type\"]==\"MB\")&(df[\"Hemisfério\"]==\"Sul\"),\"Hemis\"]=\"Sul\"\ndf.loc[(df.Type==\"Nuclear Explosion\")&(df[\"Magnitude Type\"]==\"MB\")&(df[\"Hemisfério\"]==\"Norte\"),\"Hemis\"]=\"Norte\"\nsul=df[\"Hemis\"].value_counts(\"Sul\")\nsul", "_____no_output_____" ] ], [ [ "Observando os valores mostrados acima pode-se concluir que a probabilidade de se ocorrer um terremoto é maior no hemisfério Norte em comparação com o Sul. Mais precisamente afirma-se que o Norte tem uma probabilidade de 82,82% de se ocorrer um terremoto, enquanto o Sul apenas 17,17%.", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
cb5740eb42acc2296f26acd3e3ed691b11d2b3b3
33,814
ipynb
Jupyter Notebook
Example_building_a_coarse_grained_force_field.ipynb
luwei0917/CG_openMM
e9a1d11fd0a7d9872d71d62f7d933821d9b74967
[ "MIT" ]
2
2021-05-05T13:15:03.000Z
2021-05-05T13:21:08.000Z
Example_building_a_coarse_grained_force_field.ipynb
luwei0917/CG_openMM
e9a1d11fd0a7d9872d71d62f7d933821d9b74967
[ "MIT" ]
null
null
null
Example_building_a_coarse_grained_force_field.ipynb
luwei0917/CG_openMM
e9a1d11fd0a7d9872d71d62f7d933821d9b74967
[ "MIT" ]
null
null
null
44.259162
11,708
0.687703
[ [ [ "# assume you have openmm, pdbfixer and mdtraj installed.\n# if not, you can follow the gudie here https://github.com/npschafer/openawsem", "_____no_output_____" ], [ "# import all using lines below\n\n# from simtk.openmm.app import *\n# from simtk.openmm import *\n# from simtk.unit import *", "_____no_output_____" ], [ "from simtk.openmm.app import ForceField\n# define atoms and residues.\nforcefield = ForceField(\"cg.xml\")", "_____no_output_____" ], [ "from pdbfixer import PDBFixer\nfrom simtk.openmm.app import PDBFile\n\nfixer = PDBFixer(\"1r69.pdb\")\n# more on pdbfixer, check:\n# https://htmlpreview.github.io/?https://github.com/openmm/pdbfixer/blob/master/Manual.html\nfixer.removeHeterogens(keepWater=False)\nPDBFile.writeFile(fixer.topology, fixer.positions, open('1r69_cleaned.pdb', 'w'))", "_____no_output_____" ], [ "import mdtraj\npdb = mdtraj.load(\"1r69_cleaned.pdb\")\nkeep_list = []\nfor atom in pdb.topology.atoms:\n if atom.name == \"CA\":\n keep_list.append(atom.index)\nchosen = pdb.atom_slice(keep_list)\nchosen.save(\"ca_only.pdb\")", "_____no_output_____" ], [ "from simtk.openmm import HarmonicBondForce\ndef connect_term(system):\n k_con= 10000\n con = HarmonicBondForce()\n n = system.getNumParticles()\n for i in range(n-1):\n con.addBond(i, i+1, 0.3816, k_con)\n return con\n\nfrom simtk.openmm import CustomBondForce\ndef connect_term_v2(system):\n k_con= 10000\n r0 = 0.3816\n con = CustomBondForce(f\"0.5*{k_con}*(r-r0)^2\")\n n = system.getNumParticles()\n con.addPerBondParameter(\"r0\")\n for i in range(n-1):\n con.addBond(i, i+1, [r0])\n return con\n\nfrom simtk.openmm import CustomCompoundBondForce\ndef connect_term_v3(system):\n k_con= 10000\n r0 = 0.3816\n con = CustomCompoundBondForce(2, f\"0.5*{k_con}*(distance(p1,p2)-r0)^2\")\n n = system.getNumParticles()\n con.addPerBondParameter(\"r0\")\n for i in range(n-1):\n con.addBond([i, i+1], [r0])\n return con", "_____no_output_____" ], [ "# contact map\nimport numpy as np\nfrom simtk.unit import *\n\npdb = PDBFile(\"ca_only.pdb\")\npos = pdb.positions.value_in_unit(nanometer)\npos = np.array(pos)\ndis = (((pos.reshape(1, -1, 3) - pos.reshape(-1, 1, 3))**2).sum(axis=-1))**0.5", "_____no_output_____" ], [ "import matplotlib.pylab as plt\n%matplotlib inline", "_____no_output_____" ], [ "plt.figure(figsize=[10,10])\nplt.imshow(dis < 0.8, origin=\"lower\")\nplt.colorbar()", "_____no_output_____" ], [ "n = dis.shape[0]\ncontact_threshold = 0.8 # in unit of nm\ncontact_list = []\nfor i in range(n):\n for j in range(i+1, n):\n dis_ij = dis[i][j]\n if dis_ij < contact_threshold:\n sigma_ij = 0.1*(j-i)**0.15\n contact_list.append((i, j, (dis_ij, sigma_ij)))", "_____no_output_____" ], [ "len(contact_list)", "_____no_output_____" ], [ "from simtk.openmm import CustomBondForce\ndef structure_based_term(contact_list):\n k = 10\n structure_based = CustomBondForce(f\"-{k}*exp(-(r-r_ijN)^2/(2*sigma_ij^2))\")\n# structure_based = CustomBondForce(f\"-{k}\")\n structure_based.addPerBondParameter(\"r_ijN\")\n structure_based.addPerBondParameter(\"sigma_ij\")\n for contact in contact_list:\n structure_based.addBond(*contact)\n return structure_based", "_____no_output_____" ], [ "from simtk.openmm import LangevinIntegrator\nfrom simtk.openmm import CustomIntegrator\nfrom simtk.openmm.app import Simulation\nfrom simtk.openmm.app import PDBReporter\nfrom simtk.openmm.app import StateDataReporter\nfrom simtk.openmm.app import DCDReporter\nfrom sys import stdout\n\n\npdb = PDBFile(\"ca_only.pdb\")\nforcefield = ForceField(\"cg.xml\")\n\nprint(pdb.topology)\n\nsystem = forcefield.createSystem(pdb.topology)\nsystem.removeForce(0) # remove the default force \"CMotionRemover\"\n# connect = connect_term(system)\n# system.addForce(connect)\n\n# connect = connect_term_v2(system)\n# system.addForce(connect)\n\nconnect = connect_term_v3(system)\nsystem.addForce(connect)\n\nstructure_based = structure_based_term(contact_list)\nsystem.addForce(structure_based)\n\nprint(\"Number of particles: \", system.getNumParticles())\nprint(\"Number of forces: \", system.getNumForces())\n\nintegrator = LangevinIntegrator(300*kelvin, 1/picosecond, 0.004*picoseconds)\nsimulation = Simulation(pdb.topology, system, integrator)\nsimulation.context.setPositions(pdb.positions)", "<Topology; 1 chains, 63 residues, 63 atoms, 0 bonds>\nNumber of particles: 63\nNumber of forces: 2\n" ], [ "simulation.reporters.append(PDBReporter('output.pdb', 1000))\nsimulation.reporters.append(StateDataReporter(stdout, 1000, step=True,\n potentialEnergy=True, temperature=True))\nsimulation.step(10000)", "#\"Step\",\"Potential Energy (kJ/mole)\",\"Temperature (K)\"\n1000,-2470.588134765625,265.3972095571674\n2000,-2475.900390625,352.0954418361503\n3000,-2447.06787109375,324.6016153903199\n4000,-2443.67724609375,329.38365665470707\n5000,-2472.291015625,311.95841512357555\n6000,-2473.103759765625,354.3896434047219\n7000,-2504.68701171875,232.2963285718921\n8000,-2421.172607421875,318.1490746045123\n9000,-2506.325439453125,311.90592414121977\n10000,-2461.337890625,332.81687894573935\n" ], [ "integrator = CustomIntegrator(0.001)\nsimulation = Simulation(pdb.topology, system, integrator)\nsimulation.context.setPositions(pdb.positions)\nsimulation.reporters.append(DCDReporter('output.dcd', 1))\nsimulation.reporters.append(StateDataReporter(stdout, 1, step=True,\n potentialEnergy=True, temperature=True))\nsimulation.step(int(1))\nsimulation.minimizeEnergy()\nsimulation.step(int(1))\n\nintegrator = LangevinIntegrator(300*kelvin, 1/picosecond, 0.004*picoseconds)\nsimulation = Simulation(pdb.topology, system, integrator)\nsimulation.context.setPositions(pdb.positions)\nsimulation.reporters.append(DCDReporter('output.dcd', 1000, append=True))\nsimulation.reporters.append(StateDataReporter(stdout, 1000, step=True,\n potentialEnergy=True, temperature=True))\nsimulation.step(10000)", "#\"Step\",\"Potential Energy (kJ/mole)\",\"Temperature (K)\"\n1,-2749.25732421875,0.0\n2,-2749.87255859375,0.0\n#\"Step\",\"Potential Energy (kJ/mole)\",\"Temperature (K)\"\n1000,-2507.495361328125,304.4992072527908\n2000,-2500.489501953125,312.70488546326226\n3000,-2472.176513671875,292.76087883875005\n4000,-2527.055419921875,314.59933267693776\n5000,-2477.698486328125,298.6544357930838\n6000,-2480.50732421875,247.62469027800051\n7000,-2532.085205078125,341.49360963315706\n8000,-2523.2939453125,335.394943970924\n9000,-2519.30224609375,313.3308482276988\n10000,-2456.395751953125,330.1487977186053\n" ], [ "# conda install nglview -c conda-forge\n# jupyter-nbextension enable nglview --py --sys-prefix\nimport nglview\nview = nglview.show_pdbid(\"1r69\") # load \"3pqr\" from RCSB PDB and display viewer widget\nview", "_____no_output_____" ], [ "view = nglview.show_structure_file(\"ca_only.pdb\")\nview", "_____no_output_____" ], [ "traj = mdtraj.load_dcd(\"output.dcd\", top=\"ca_only.pdb\")", "_____no_output_____" ], [ "view = nglview.show_mdtraj(traj)\nview", "_____no_output_____" ], [ "# Input: expects 3xN matrix of points\n# Returns R,t\n# R = 3x3 rotation matrix\n# t = 3x1 column vector\n\ndef rigid_transform_3D(A, B, correct_reflection=True):\n assert A.shape == B.shape\n\n num_rows, num_cols = A.shape\n if num_rows != 3:\n raise Exception(f\"matrix A is not 3xN, it is {num_rows}x{num_cols}\")\n\n num_rows, num_cols = B.shape\n if num_rows != 3:\n raise Exception(f\"matrix B is not 3xN, it is {num_rows}x{num_cols}\")\n\n # find mean column wise\n centroid_A = np.mean(A, axis=1)\n centroid_B = np.mean(B, axis=1)\n\n # ensure centroids are 3x1\n centroid_A = centroid_A.reshape(-1, 1)\n centroid_B = centroid_B.reshape(-1, 1)\n\n # subtract mean\n Am = A - centroid_A\n Bm = B - centroid_B\n\n H = Am @ np.transpose(Bm)\n\n # sanity check\n #if linalg.matrix_rank(H) < 3:\n # raise ValueError(\"rank of H = {}, expecting 3\".format(linalg.matrix_rank(H)))\n\n # find rotation\n U, S, Vt = np.linalg.svd(H)\n R = Vt.T @ U.T\n\n # special reflection case\n if np.linalg.det(R) < 0 and correct_reflection:\n print(\"det(R) < R, reflection detected!, correcting for it ...\")\n Vt[2,:] *= -1\n R = Vt.T @ U.T\n\n t = -R @ centroid_A + centroid_B\n\n return R, t", "_____no_output_____" ], [ "target = traj.xyz[0].T\nn = traj.xyz.shape[0]\nfor i in range(1, n):\n current = traj.xyz[i].T\n ret_R, ret_t = rigid_transform_3D(current, target, correct_reflection=False)\n out = (ret_R@current) + ret_t\n traj.xyz[i] = out.T.reshape(1, -1, 3)", "_____no_output_____" ], [ "view = nglview.show_mdtraj(traj, gui=True)\nview", "_____no_output_____" ], [ "# energy evaluation.\npdb = PDBFile('ca_only.pdb')\ntraj = mdtraj.load_dcd(\"output.dcd\", top='ca_only.pdb')\n\nintegrator = CustomIntegrator(0.001)\nsimulation = Simulation(pdb.topology, system, integrator)\nfor frame in range(traj.n_frames):\n simulation.context.setPositions(traj.openmm_positions(frame))\n state = simulation.context.getState(getEnergy=True)\n termEnergy = state.getPotentialEnergy().value_in_unit(kilojoule_per_mole)\n# termEnergy = state.getPotentialEnergy()\n print(frame, f\"{termEnergy:.3f} kJ/mol\")", "0 -2749.257 kJ/mol\n1 -2749.873 kJ/mol\n2 -2507.495 kJ/mol\n3 -2500.490 kJ/mol\n4 -2472.177 kJ/mol\n5 -2527.056 kJ/mol\n6 -2477.699 kJ/mol\n7 -2480.507 kJ/mol\n8 -2532.085 kJ/mol\n9 -2523.294 kJ/mol\n10 -2519.302 kJ/mol\n11 -2456.396 kJ/mol\n" ], [ "system = forcefield.createSystem(pdb.topology)\nsystem.removeForce(0) # remove the default force \"CMotionRemover\"\nconnect = connect_term(system)\nconnect.setForceGroup(1)\nsystem.addForce(connect)\n\nconnect = connect_term_v2(system)\nconnect.setForceGroup(2)\nsystem.addForce(connect)\n\nconnect = connect_term_v3(system)\nconnect.setForceGroup(3)\nsystem.addForce(connect)\n\nstructure_based = structure_based_term(contact_list)\nstructure_based.setForceGroup(4)\nsystem.addForce(structure_based)\n\nprint(\"Number of particles: \", system.getNumParticles())\nprint(\"Number of forces: \", system.getNumForces())\n\nintegrator = LangevinIntegrator(300*kelvin, 1/picosecond, 0.004*picoseconds)\nsimulation = Simulation(pdb.topology, system, integrator)\nsimulation.context.setPositions(pdb.positions)", "Number of particles: 63\nNumber of forces: 4\n" ], [ "force_groups = {\"con\":1, \"con_v2\":2, \"con_v3\":3, \"structure_based_term\":4}\nshow_energy = [\"con\", \"con_v2\", \"con_v3\", \"structure_based_term\"]", "_____no_output_____" ], [ "integrator = CustomIntegrator(0.001)\nsimulation = Simulation(pdb.topology, system, integrator)\nwidth = 15\nline = \"\".join([f\"{term:<15}\" for term in [\"frame\"] + show_energy])\nprint(line)\nfor frame in range(traj.n_frames):\n simulation.context.setPositions(traj.openmm_positions(frame))\n all_energy = []\n for term in show_energy:\n group = force_groups[term]\n state = simulation.context.getState(getEnergy=True, groups={group})\n termEnergy = state.getPotentialEnergy().value_in_unit(kilojoule_per_mole)\n all_energy.append(termEnergy)\n line = \"\".join([f\"{termEnergy:<15.3f}\" for termEnergy in all_energy])\n print(f\"{frame:<15}{line}\")", "frame con con_v2 con_v3 structure_based_term\n0 0.743 0.743 0.743 -2750.000 \n1 0.024 0.024 0.024 -2749.897 \n2 72.760 72.760 72.760 -2580.256 \n3 50.944 50.944 50.944 -2551.433 \n4 56.587 56.587 56.587 -2528.763 \n5 56.172 56.172 56.172 -2583.227 \n6 72.370 72.370 72.370 -2550.069 \n7 77.075 77.075 77.075 -2557.582 \n8 54.391 54.391 54.391 -2586.476 \n9 59.849 59.849 59.849 -2583.143 \n10 70.668 70.668 70.668 -2589.970 \n11 70.643 70.643 70.643 -2527.038 \n" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cb5744a801cd8fc875a19d494e6cb9e0ce50bf91
373,509
ipynb
Jupyter Notebook
src/Dcm_to_JPG.ipynb
msivaraman1982/AIML-Capstone
95768e814423a61f1fd56f26ac677870576aba9d
[ "Apache-2.0" ]
null
null
null
src/Dcm_to_JPG.ipynb
msivaraman1982/AIML-Capstone
95768e814423a61f1fd56f26ac677870576aba9d
[ "Apache-2.0" ]
null
null
null
src/Dcm_to_JPG.ipynb
msivaraman1982/AIML-Capstone
95768e814423a61f1fd56f26ac677870576aba9d
[ "Apache-2.0" ]
null
null
null
373,509
373,509
0.538354
[ [ [ "import cv2\r\nimport os\r\nimport argparse\r\n\r\nfrom tqdm import tqdm", "_____no_output_____" ], [ "!pip install pydicom\r\n", "Collecting pydicom\n\u001b[?25l Downloading https://files.pythonhosted.org/packages/72/7b/6ed88f82dd33a32cdb43432dab7f84fcd40c49d63251442b3cfe0be983d4/pydicom-2.1.1-py3-none-any.whl (1.9MB)\n\u001b[K |████████████████████████████████| 1.9MB 5.9MB/s \n\u001b[?25hInstalling collected packages: pydicom\nSuccessfully installed pydicom-2.1.1\n" ], [ "import pydicom\r\n", "_____no_output_____" ], [ "def convertDcmToJpg(inputdir, outputdir):\r\n os.makedirs(outputdir, exist_ok=True)\r\n\r\n train_list = [f for f in os.listdir(inputdir)]\r\n\r\n for i, f in tqdm(enumerate(train_list[:]), total=len(train_list)): \r\n ds = pydicom.read_file(inputdir + f) # read dicom image\r\n img = ds.pixel_array # get image array\r\n #img = cv2.resize(img, (256, 256))\r\n cv2.imwrite(os.path.join(outputdir, f.replace('.dcm','.jpg')), img) # write jpg image", "_____no_output_____" ], [ "train_inputdir = '/content/drive/MyDrive/CapstoneProject/data/rsna-pneumonia-detection-challenge/stage_2_train_images/'\r\ntrain_outdir = '/content/drive/MyDrive/CapstoneProject/data/rsna-pneumonia-detection-challenge/stage_2_train_images/JPG'\r\n\r\ntest_inputdir = '/content/drive/MyDrive/CapstoneProject/data/rsna-pneumonia-detection-challenge/stage_2_test_images/'\r\ntest_outdir = '/content/drive/MyDrive/CapstoneProject/data/rsna-pneumonia-detection-challenge/stage_2_test_images/JPG'\r\n\r\n#train_list = [f for f in os.listdir(test_outdir)]\r\n#print(len(train_list))", "_____no_output_____" ], [ "convertDcmToJpg(test_inputdir,test_outdir)", "100%|█████████▉| 3000/3001 [22:47<00:00, 1.45it/s]" ], [ "convertDcmToJpg(train_inputdir,train_outdir)", "\u001b[1;30;43mStreaming output truncated to the last 5000 lines.\u001b[0m\n 81%|████████ | 21521/26685 [3:14:41<44:30, 1.93it/s]\u001b[A\n 81%|████████ | 21522/26685 [3:14:42<43:17, 1.99it/s]\u001b[A\n 81%|████████ | 21523/26685 [3:14:42<41:12, 2.09it/s]\u001b[A\n 81%|████████ | 21524/26685 [3:14:42<36:58, 2.33it/s]\u001b[A\n 81%|████████ | 21525/26685 [3:14:43<33:50, 2.54it/s]\u001b[A\n 81%|████████ | 21526/26685 [3:14:44<45:57, 1.87it/s]\u001b[A\n 81%|████████ | 21527/26685 [3:14:44<42:32, 2.02it/s]\u001b[A\n 81%|████████ | 21528/26685 [3:14:44<39:14, 2.19it/s]\u001b[A\n 81%|████████ | 21529/26685 [3:14:45<39:10, 2.19it/s]\u001b[A\n 81%|████████ | 21530/26685 [3:14:45<33:36, 2.56it/s]\u001b[A\n 81%|████████ | 21531/26685 [3:14:46<40:37, 2.11it/s]\u001b[A\n 81%|████████ | 21532/26685 [3:14:46<35:40, 2.41it/s]\u001b[A\n 81%|████████ | 21533/26685 [3:14:46<34:35, 2.48it/s]\u001b[A\n 81%|████████ | 21534/26685 [3:14:47<33:27, 2.57it/s]\u001b[A\n 81%|████████ | 21535/26685 [3:14:48<45:56, 1.87it/s]\u001b[A\n 81%|████████ | 21536/26685 [3:14:48<46:15, 1.86it/s]\u001b[A\n 81%|████████ | 21537/26685 [3:14:49<41:05, 2.09it/s]\u001b[A\n 81%|████████ | 21538/26685 [3:14:49<38:29, 2.23it/s]\u001b[A\n 81%|████████ | 21539/26685 [3:14:49<39:09, 2.19it/s]\u001b[A\n 81%|████████ | 21540/26685 [3:14:50<33:21, 2.57it/s]\u001b[A\n 81%|████████ | 21541/26685 [3:14:50<33:22, 2.57it/s]\u001b[A\n 81%|████████ | 21542/26685 [3:14:50<29:45, 2.88it/s]\u001b[A\n 81%|████████ | 21543/26685 [3:14:51<30:30, 2.81it/s]\u001b[A\n 81%|████████ | 21544/26685 [3:14:51<29:55, 2.86it/s]\u001b[A\n 81%|████████ | 21545/26685 [3:14:51<29:11, 2.94it/s]\u001b[A\n 81%|████████ | 21546/26685 [3:14:52<35:13, 2.43it/s]\u001b[A\n 81%|████████ | 21547/26685 [3:14:52<32:35, 2.63it/s]\u001b[A\n 81%|████████ | 21548/26685 [3:14:52<30:16, 2.83it/s]\u001b[A\n 81%|████████ | 21549/26685 [3:14:53<41:44, 2.05it/s]\u001b[A\n 81%|████████ | 21550/26685 [3:14:53<35:02, 2.44it/s]\u001b[A\n 81%|████████ | 21551/26685 [3:14:54<32:29, 2.63it/s]\u001b[A\n 81%|████████ | 21552/26685 [3:14:54<38:04, 2.25it/s]\u001b[A\n 81%|████████ | 21553/26685 [3:14:55<35:54, 2.38it/s]\u001b[A\n 81%|████████ | 21554/26685 [3:14:55<34:57, 2.45it/s]\u001b[A\n 81%|████████ | 21555/26685 [3:14:56<45:04, 1.90it/s]\u001b[A\n 81%|████████ | 21556/26685 [3:14:57<53:36, 1.59it/s]\u001b[A\n 81%|████████ | 21557/26685 [3:14:57<49:51, 1.71it/s]\u001b[A\n 81%|████████ | 21558/26685 [3:14:58<46:36, 1.83it/s]\u001b[A\n 81%|████████ | 21559/26685 [3:14:58<43:22, 1.97it/s]\u001b[A\n 81%|████████ | 21560/26685 [3:14:59<41:26, 2.06it/s]\u001b[A\n 81%|████████ | 21561/26685 [3:14:59<36:08, 2.36it/s]\u001b[A\n 81%|████████ | 21562/26685 [3:14:59<31:41, 2.69it/s]\u001b[A\n 81%|████████ | 21563/26685 [3:14:59<28:55, 2.95it/s]\u001b[A\n 81%|████████ | 21564/26685 [3:15:00<34:05, 2.50it/s]\u001b[A\n 81%|████████ | 21565/26685 [3:15:00<32:02, 2.66it/s]\u001b[A\n 81%|████████ | 21566/26685 [3:15:01<30:05, 2.84it/s]\u001b[A\n 81%|████████ | 21567/26685 [3:15:01<30:40, 2.78it/s]\u001b[A\n 81%|████████ | 21568/26685 [3:15:01<31:14, 2.73it/s]\u001b[A\n 81%|████████ | 21569/26685 [3:15:02<35:32, 2.40it/s]\u001b[A\n 81%|████████ | 21570/26685 [3:15:02<32:39, 2.61it/s]\u001b[A\n 81%|████████ | 21571/26685 [3:15:03<44:30, 1.92it/s]\u001b[A\n 81%|████████ | 21572/26685 [3:15:03<43:00, 1.98it/s]\u001b[A\n 81%|████████ | 21573/26685 [3:15:06<1:47:35, 1.26s/it]\u001b[A\n 81%|████████ | 21574/26685 [3:15:07<1:22:44, 1.03it/s]\u001b[A\n 81%|████████ | 21575/26685 [3:15:07<1:09:44, 1.22it/s]\u001b[A\n 81%|████████ | 21576/26685 [3:15:08<1:03:05, 1.35it/s]\u001b[A\n 81%|████████ | 21577/26685 [3:15:08<53:32, 1.59it/s] \u001b[A\n 81%|████████ | 21578/26685 [3:15:09<50:28, 1.69it/s]\u001b[A\n 81%|████████ | 21579/26685 [3:15:09<41:56, 2.03it/s]\u001b[A\n 81%|████████ | 21580/26685 [3:15:09<39:47, 2.14it/s]\u001b[A\n 81%|████████ | 21581/26685 [3:15:10<51:32, 1.65it/s]\u001b[A\n 81%|████████ | 21582/26685 [3:15:11<53:27, 1.59it/s]\u001b[A\n 81%|████████ | 21583/26685 [3:15:11<43:50, 1.94it/s]\u001b[A\n 81%|████████ | 21584/26685 [3:15:12<41:07, 2.07it/s]\u001b[A\n 81%|████████ | 21585/26685 [3:15:12<40:06, 2.12it/s]\u001b[A\n 81%|████████ | 21586/26685 [3:15:12<35:43, 2.38it/s]\u001b[A\n 81%|████████ | 21587/26685 [3:15:13<36:39, 2.32it/s]\u001b[A\n 81%|████████ | 21588/26685 [3:15:13<36:16, 2.34it/s]\u001b[A\n 81%|████████ | 21589/26685 [3:15:14<40:39, 2.09it/s]\u001b[A\n 81%|████████ | 21590/26685 [3:15:14<40:42, 2.09it/s]\u001b[A\n 81%|████████ | 21591/26685 [3:15:16<1:01:30, 1.38it/s]\u001b[A\n 81%|████████ | 21592/26685 [3:15:16<58:41, 1.45it/s] \u001b[A\n 81%|████████ | 21593/26685 [3:15:17<48:48, 1.74it/s]\u001b[A\n 81%|████████ | 21594/26685 [3:15:17<42:12, 2.01it/s]\u001b[A\n 81%|████████ | 21595/26685 [3:15:17<40:08, 2.11it/s]\u001b[A\n 81%|████████ | 21596/26685 [3:15:18<37:49, 2.24it/s]\u001b[A\n 81%|████████ | 21597/26685 [3:15:18<35:10, 2.41it/s]\u001b[A\n 81%|████████ | 21598/26685 [3:15:19<38:18, 2.21it/s]\u001b[A\n 81%|████████ | 21599/26685 [3:15:19<35:25, 2.39it/s]\u001b[A\n 81%|████████ | 21600/26685 [3:15:19<35:12, 2.41it/s]\u001b[A\n 81%|████████ | 21601/26685 [3:15:20<32:13, 2.63it/s]\u001b[A\n 81%|████████ | 21602/26685 [3:15:20<44:20, 1.91it/s]\u001b[A\n 81%|████████ | 21603/26685 [3:15:21<42:15, 2.00it/s]\u001b[A\n 81%|████████ | 21604/26685 [3:15:21<42:45, 1.98it/s]\u001b[A\n 81%|████████ | 21605/26685 [3:15:22<38:17, 2.21it/s]\u001b[A\n 81%|████████ | 21606/26685 [3:15:22<36:45, 2.30it/s]\u001b[A\n 81%|████████ | 21607/26685 [3:15:22<33:34, 2.52it/s]\u001b[A\n 81%|████████ | 21608/26685 [3:15:23<46:35, 1.82it/s]\u001b[A\n 81%|████████ | 21609/26685 [3:15:24<41:41, 2.03it/s]\u001b[A\n 81%|████████ | 21610/26685 [3:15:24<38:28, 2.20it/s]\u001b[A\n 81%|████████ | 21611/26685 [3:15:25<39:00, 2.17it/s]\u001b[A\n 81%|████████ | 21612/26685 [3:15:25<47:00, 1.80it/s]\u001b[A\n 81%|████████ | 21613/26685 [3:15:26<40:32, 2.09it/s]\u001b[A\n 81%|████████ | 21614/26685 [3:15:26<35:49, 2.36it/s]\u001b[A\n 81%|████████ | 21615/26685 [3:15:27<46:01, 1.84it/s]\u001b[A\n 81%|████████ | 21616/26685 [3:15:27<40:29, 2.09it/s]\u001b[A\n 81%|████████ | 21617/26685 [3:15:27<38:42, 2.18it/s]\u001b[A\n 81%|████████ | 21618/26685 [3:15:28<36:41, 2.30it/s]\u001b[A\n 81%|████████ | 21619/26685 [3:15:28<37:51, 2.23it/s]\u001b[A\n 81%|████████ | 21620/26685 [3:15:29<37:53, 2.23it/s]\u001b[A\n 81%|████████ | 21621/26685 [3:15:29<36:21, 2.32it/s]\u001b[A\n 81%|████████ | 21622/26685 [3:15:30<34:51, 2.42it/s]\u001b[A\n 81%|████████ | 21623/26685 [3:15:30<34:06, 2.47it/s]\u001b[A\n 81%|████████ | 21624/26685 [3:15:30<31:35, 2.67it/s]\u001b[A\n 81%|████████ | 21625/26685 [3:15:31<29:10, 2.89it/s]\u001b[A\n 81%|████████ | 21626/26685 [3:15:31<41:51, 2.01it/s]\u001b[A\n 81%|████████ | 21627/26685 [3:15:32<48:01, 1.76it/s]\u001b[A\n 81%|████████ | 21628/26685 [3:15:32<41:20, 2.04it/s]\u001b[A\n 81%|████████ | 21629/26685 [3:15:33<35:51, 2.35it/s]\u001b[A\n 81%|████████ | 21630/26685 [3:15:33<34:52, 2.42it/s]\u001b[A\n 81%|████████ | 21631/26685 [3:15:33<31:35, 2.67it/s]\u001b[A\n 81%|████████ | 21632/26685 [3:15:34<28:59, 2.91it/s]\u001b[A\n 81%|████████ | 21633/26685 [3:15:34<33:02, 2.55it/s]\u001b[A\n 81%|████████ | 21634/26685 [3:15:35<37:09, 2.27it/s]\u001b[A\n 81%|████████ | 21635/26685 [3:15:35<35:13, 2.39it/s]\u001b[A\n 81%|████████ | 21636/26685 [3:15:35<30:41, 2.74it/s]\u001b[A\n 81%|████████ | 21637/26685 [3:15:36<29:47, 2.82it/s]\u001b[A\n 81%|████████ | 21638/26685 [3:15:36<27:59, 3.00it/s]\u001b[A\n 81%|████████ | 21639/26685 [3:15:36<29:44, 2.83it/s]\u001b[A\n 81%|████████ | 21640/26685 [3:15:37<30:56, 2.72it/s]\u001b[A\n 81%|████████ | 21641/26685 [3:15:37<28:52, 2.91it/s]\u001b[A\n 81%|████████ | 21642/26685 [3:15:38<35:19, 2.38it/s]\u001b[A\n 81%|████████ | 21643/26685 [3:15:38<33:11, 2.53it/s]\u001b[A\n 81%|████████ | 21644/26685 [3:15:38<31:13, 2.69it/s]\u001b[A\n 81%|████████ | 21645/26685 [3:15:39<29:31, 2.85it/s]\u001b[A\n 81%|████████ | 21646/26685 [3:15:39<30:03, 2.79it/s]\u001b[A\n 81%|████████ | 21647/26685 [3:15:39<33:47, 2.48it/s]\u001b[A\n 81%|████████ | 21648/26685 [3:15:40<32:54, 2.55it/s]\u001b[A\n 81%|████████ | 21649/26685 [3:15:40<32:08, 2.61it/s]\u001b[A\n 81%|████████ | 21650/26685 [3:15:41<31:24, 2.67it/s]\u001b[A\n 81%|████████ | 21651/26685 [3:15:41<28:09, 2.98it/s]\u001b[A\n 81%|████████ | 21652/26685 [3:15:42<40:18, 2.08it/s]\u001b[A\n 81%|████████ | 21653/26685 [3:15:42<36:57, 2.27it/s]\u001b[A\n 81%|████████ | 21654/26685 [3:15:42<32:21, 2.59it/s]\u001b[A\n 81%|████████ | 21655/26685 [3:15:42<30:03, 2.79it/s]\u001b[A\n 81%|████████ | 21656/26685 [3:15:43<33:46, 2.48it/s]\u001b[A\n 81%|████████ | 21657/26685 [3:15:43<36:43, 2.28it/s]\u001b[A\n 81%|████████ | 21658/26685 [3:15:44<33:22, 2.51it/s]\u001b[A\n 81%|████████ | 21659/26685 [3:15:44<30:46, 2.72it/s]\u001b[A\n 81%|████████ | 21660/26685 [3:15:45<43:19, 1.93it/s]\u001b[A\n 81%|████████ | 21661/26685 [3:15:46<48:28, 1.73it/s]\u001b[A\n 81%|████████ | 21662/26685 [3:15:46<43:47, 1.91it/s]\u001b[A\n 81%|████████ | 21663/26685 [3:15:46<39:52, 2.10it/s]\u001b[A\n 81%|████████ | 21664/26685 [3:15:47<37:07, 2.25it/s]\u001b[A\n 81%|████████ | 21665/26685 [3:15:47<35:51, 2.33it/s]\u001b[A\n 81%|████████ | 21666/26685 [3:15:48<36:50, 2.27it/s]\u001b[A\n 81%|████████ | 21667/26685 [3:15:48<32:50, 2.55it/s]\u001b[A\n 81%|████████ | 21668/26685 [3:15:48<33:27, 2.50it/s]\u001b[A\n 81%|████████ | 21669/26685 [3:15:49<34:13, 2.44it/s]\u001b[A\n 81%|████████ | 21670/26685 [3:15:50<44:18, 1.89it/s]\u001b[A\n 81%|████████ | 21671/26685 [3:15:50<40:45, 2.05it/s]\u001b[A\n 81%|████████ | 21672/26685 [3:15:50<35:33, 2.35it/s]\u001b[A\n 81%|████████ | 21673/26685 [3:15:51<32:37, 2.56it/s]\u001b[A\n 81%|████████ | 21674/26685 [3:15:51<31:48, 2.63it/s]\u001b[A\n 81%|████████ | 21675/26685 [3:15:51<29:43, 2.81it/s]\u001b[A\n 81%|████████ | 21676/26685 [3:15:52<27:13, 3.07it/s]\u001b[A\n 81%|████████ | 21677/26685 [3:15:52<27:11, 3.07it/s]\u001b[A\n 81%|████████ | 21678/26685 [3:15:52<28:46, 2.90it/s]\u001b[A\n 81%|████████ | 21679/26685 [3:15:53<29:24, 2.84it/s]\u001b[A\n 81%|████████ | 21680/26685 [3:15:53<26:50, 3.11it/s]\u001b[A\n 81%|████████ | 21681/26685 [3:15:53<27:26, 3.04it/s]\u001b[A\n 81%|████████▏ | 21682/26685 [3:15:54<29:18, 2.84it/s]\u001b[A\n 81%|████████▏ | 21683/26685 [3:15:54<29:32, 2.82it/s]\u001b[A\n 81%|████████▏ | 21684/26685 [3:15:54<27:22, 3.04it/s]\u001b[A\n 81%|████████▏ | 21685/26685 [3:15:55<32:45, 2.54it/s]\u001b[A\n 81%|████████▏ | 21686/26685 [3:15:55<30:39, 2.72it/s]\u001b[A\n 81%|████████▏ | 21687/26685 [3:15:55<28:35, 2.91it/s]\u001b[A\n 81%|████████▏ | 21688/26685 [3:15:56<29:50, 2.79it/s]\u001b[A\n 81%|████████▏ | 21689/26685 [3:15:56<31:52, 2.61it/s]\u001b[A\n 81%|████████▏ | 21690/26685 [3:15:57<37:50, 2.20it/s]\u001b[A\n 81%|████████▏ | 21691/26685 [3:15:57<34:17, 2.43it/s]\u001b[A\n 81%|████████▏ | 21692/26685 [3:15:58<33:46, 2.46it/s]\u001b[A\n 81%|████████▏ | 21693/26685 [3:15:58<29:23, 2.83it/s]\u001b[A\n 81%|████████▏ | 21694/26685 [3:15:59<41:52, 1.99it/s]\u001b[A\n 81%|████████▏ | 21695/26685 [3:15:59<39:07, 2.13it/s]\u001b[A\n 81%|████████▏ | 21696/26685 [3:16:00<40:25, 2.06it/s]\u001b[A\n 81%|████████▏ | 21697/26685 [3:16:00<36:05, 2.30it/s]\u001b[A\n 81%|████████▏ | 21698/26685 [3:16:00<33:08, 2.51it/s]\u001b[A\n 81%|████████▏ | 21699/26685 [3:16:00<30:27, 2.73it/s]\u001b[A\n 81%|████████▏ | 21700/26685 [3:16:01<32:46, 2.53it/s]\u001b[A\n 81%|████████▏ | 21701/26685 [3:16:01<31:17, 2.65it/s]\u001b[A\n 81%|████████▏ | 21702/26685 [3:16:01<27:28, 3.02it/s]\u001b[A\n 81%|████████▏ | 21703/26685 [3:16:02<29:21, 2.83it/s]\u001b[A\n 81%|████████▏ | 21704/26685 [3:16:03<41:21, 2.01it/s]\u001b[A\n 81%|████████▏ | 21705/26685 [3:16:03<36:02, 2.30it/s]\u001b[A\n 81%|████████▏ | 21706/26685 [3:16:03<35:43, 2.32it/s]\u001b[A\n 81%|████████▏ | 21707/26685 [3:16:04<31:55, 2.60it/s]\u001b[A\n 81%|████████▏ | 21708/26685 [3:16:04<31:23, 2.64it/s]\u001b[A\n 81%|████████▏ | 21709/26685 [3:16:04<30:44, 2.70it/s]\u001b[A\n 81%|████████▏ | 21710/26685 [3:16:05<29:24, 2.82it/s]\u001b[A\n 81%|████████▏ | 21711/26685 [3:16:05<29:05, 2.85it/s]\u001b[A\n 81%|████████▏ | 21712/26685 [3:16:05<27:27, 3.02it/s]\u001b[A\n 81%|████████▏ | 21713/26685 [3:16:06<26:50, 3.09it/s]\u001b[A\n 81%|████████▏ | 21714/26685 [3:16:06<27:56, 2.97it/s]\u001b[A\n 81%|████████▏ | 21715/26685 [3:16:07<40:21, 2.05it/s]\u001b[A\n 81%|████████▏ | 21716/26685 [3:16:07<35:35, 2.33it/s]\u001b[A\n 81%|████████▏ | 21717/26685 [3:16:08<37:18, 2.22it/s]\u001b[A\n 81%|████████▏ | 21718/26685 [3:16:08<33:10, 2.50it/s]\u001b[A\n 81%|████████▏ | 21719/26685 [3:16:09<38:22, 2.16it/s]\u001b[A\n 81%|████████▏ | 21720/26685 [3:16:09<33:51, 2.44it/s]\u001b[A\n 81%|████████▏ | 21721/26685 [3:16:09<31:33, 2.62it/s]\u001b[A\n 81%|████████▏ | 21722/26685 [3:16:09<30:31, 2.71it/s]\u001b[A\n 81%|████████▏ | 21723/26685 [3:16:10<29:43, 2.78it/s]\u001b[A\n 81%|████████▏ | 21724/26685 [3:16:10<26:52, 3.08it/s]\u001b[A\n 81%|████████▏ | 21725/26685 [3:16:11<30:40, 2.70it/s]\u001b[A\n 81%|████████▏ | 21726/26685 [3:16:11<37:13, 2.22it/s]\u001b[A\n 81%|████████▏ | 21727/26685 [3:16:12<38:18, 2.16it/s]\u001b[A\n 81%|████████▏ | 21728/26685 [3:16:12<36:59, 2.23it/s]\u001b[A\n 81%|████████▏ | 21729/26685 [3:16:12<34:49, 2.37it/s]\u001b[A\n 81%|████████▏ | 21730/26685 [3:16:13<32:11, 2.57it/s]\u001b[A\n 81%|████████▏ | 21731/26685 [3:16:13<32:26, 2.54it/s]\u001b[A\n 81%|████████▏ | 21732/26685 [3:16:13<30:28, 2.71it/s]\u001b[A\n 81%|████████▏ | 21733/26685 [3:16:14<41:58, 1.97it/s]\u001b[A\n 81%|████████▏ | 21734/26685 [3:16:15<39:05, 2.11it/s]\u001b[A\n 81%|████████▏ | 21735/26685 [3:16:15<38:24, 2.15it/s]\u001b[A\n 81%|████████▏ | 21736/26685 [3:16:15<34:23, 2.40it/s]\u001b[A\n 81%|████████▏ | 21737/26685 [3:16:16<31:17, 2.64it/s]\u001b[A\n 81%|████████▏ | 21738/26685 [3:16:16<30:37, 2.69it/s]\u001b[A\n 81%|████████▏ | 21739/26685 [3:16:16<28:56, 2.85it/s]\u001b[A\n 81%|████████▏ | 21740/26685 [3:16:17<30:31, 2.70it/s]\u001b[A\n 81%|████████▏ | 21741/26685 [3:16:17<30:48, 2.67it/s]\u001b[A\n 81%|████████▏ | 21742/26685 [3:16:18<34:32, 2.38it/s]\u001b[A\n 81%|████████▏ | 21743/26685 [3:16:18<32:56, 2.50it/s]\u001b[A\n 81%|████████▏ | 21744/26685 [3:16:19<34:31, 2.39it/s]\u001b[A\n 81%|████████▏ | 21745/26685 [3:16:19<32:41, 2.52it/s]\u001b[A\n 81%|████████▏ | 21746/26685 [3:16:19<32:13, 2.55it/s]\u001b[A\n 81%|████████▏ | 21747/26685 [3:16:20<32:27, 2.53it/s]\u001b[A\n 81%|████████▏ | 21748/26685 [3:16:20<29:44, 2.77it/s]\u001b[A\n 82%|████████▏ | 21749/26685 [3:16:20<28:45, 2.86it/s]\u001b[A\n 82%|████████▏ | 21750/26685 [3:16:21<27:47, 2.96it/s]\u001b[A\n 82%|████████▏ | 21751/26685 [3:16:21<26:14, 3.13it/s]\u001b[A\n 82%|████████▏ | 21752/26685 [3:16:21<32:59, 2.49it/s]\u001b[A\n 82%|████████▏ | 21753/26685 [3:16:22<29:24, 2.80it/s]\u001b[A\n 82%|████████▏ | 21754/26685 [3:16:23<41:38, 1.97it/s]\u001b[A\n 82%|████████▏ | 21755/26685 [3:16:23<35:59, 2.28it/s]\u001b[A\n 82%|████████▏ | 21756/26685 [3:16:23<31:56, 2.57it/s]\u001b[A\n 82%|████████▏ | 21757/26685 [3:16:24<43:16, 1.90it/s]\u001b[A\n 82%|████████▏ | 21758/26685 [3:16:25<43:41, 1.88it/s]\u001b[A\n 82%|████████▏ | 21759/26685 [3:16:25<37:27, 2.19it/s]\u001b[A\n 82%|████████▏ | 21760/26685 [3:16:26<48:00, 1.71it/s]\u001b[A\n 82%|████████▏ | 21761/26685 [3:16:26<45:17, 1.81it/s]\u001b[A\n 82%|████████▏ | 21762/26685 [3:16:27<41:10, 1.99it/s]\u001b[A\n 82%|████████▏ | 21763/26685 [3:16:27<36:38, 2.24it/s]\u001b[A\n 82%|████████▏ | 21764/26685 [3:16:27<32:38, 2.51it/s]\u001b[A\n 82%|████████▏ | 21765/26685 [3:16:28<33:55, 2.42it/s]\u001b[A\n 82%|████████▏ | 21766/26685 [3:16:28<36:54, 2.22it/s]\u001b[A\n 82%|████████▏ | 21767/26685 [3:16:29<37:33, 2.18it/s]\u001b[A\n 82%|████████▏ | 21768/26685 [3:16:29<42:06, 1.95it/s]\u001b[A\n 82%|████████▏ | 21769/26685 [3:16:30<36:33, 2.24it/s]\u001b[A\n 82%|████████▏ | 21770/26685 [3:16:30<33:45, 2.43it/s]\u001b[A\n 82%|████████▏ | 21771/26685 [3:16:30<35:40, 2.30it/s]\u001b[A\n 82%|████████▏ | 21772/26685 [3:16:31<35:20, 2.32it/s]\u001b[A\n 82%|████████▏ | 21773/26685 [3:16:31<30:47, 2.66it/s]\u001b[A\n 82%|████████▏ | 21774/26685 [3:16:31<30:57, 2.64it/s]\u001b[A\n 82%|████████▏ | 21775/26685 [3:16:32<28:58, 2.82it/s]\u001b[A\n 82%|████████▏ | 21776/26685 [3:16:32<29:48, 2.75it/s]\u001b[A\n 82%|████████▏ | 21777/26685 [3:16:32<26:57, 3.03it/s]\u001b[A\n 82%|████████▏ | 21778/26685 [3:16:33<27:22, 2.99it/s]\u001b[A\n 82%|████████▏ | 21779/26685 [3:16:33<29:38, 2.76it/s]\u001b[A\n 82%|████████▏ | 21780/26685 [3:16:33<29:05, 2.81it/s]\u001b[A\n 82%|████████▏ | 21781/26685 [3:16:34<29:59, 2.73it/s]\u001b[A\n 82%|████████▏ | 21782/26685 [3:16:35<41:00, 1.99it/s]\u001b[A\n 82%|████████▏ | 21783/26685 [3:16:35<38:12, 2.14it/s]\u001b[A\n 82%|████████▏ | 21784/26685 [3:16:35<33:52, 2.41it/s]\u001b[A\n 82%|████████▏ | 21785/26685 [3:16:36<36:12, 2.26it/s]\u001b[A\n 82%|████████▏ | 21786/26685 [3:16:36<32:39, 2.50it/s]\u001b[A\n 82%|████████▏ | 21787/26685 [3:16:36<30:49, 2.65it/s]\u001b[A\n 82%|████████▏ | 21788/26685 [3:16:37<40:20, 2.02it/s]\u001b[A\n 82%|████████▏ | 21789/26685 [3:16:38<35:30, 2.30it/s]\u001b[A\n 82%|████████▏ | 21790/26685 [3:16:38<32:36, 2.50it/s]\u001b[A\n 82%|████████▏ | 21791/26685 [3:16:38<32:01, 2.55it/s]\u001b[A\n 82%|████████▏ | 21792/26685 [3:16:39<30:03, 2.71it/s]\u001b[A\n 82%|████████▏ | 21793/26685 [3:16:39<41:24, 1.97it/s]\u001b[A\n 82%|████████▏ | 21794/26685 [3:16:40<39:11, 2.08it/s]\u001b[A\n 82%|████████▏ | 21795/26685 [3:16:40<33:29, 2.43it/s]\u001b[A\n 82%|████████▏ | 21796/26685 [3:16:41<37:37, 2.17it/s]\u001b[A\n 82%|████████▏ | 21797/26685 [3:16:41<33:52, 2.41it/s]\u001b[A\n 82%|████████▏ | 21798/26685 [3:16:41<33:16, 2.45it/s]\u001b[A\n 82%|████████▏ | 21799/26685 [3:16:42<29:59, 2.72it/s]\u001b[A\n 82%|████████▏ | 21800/26685 [3:16:42<30:25, 2.68it/s]\u001b[A\n 82%|████████▏ | 21801/26685 [3:16:42<30:59, 2.63it/s]\u001b[A\n 82%|████████▏ | 21802/26685 [3:16:43<30:29, 2.67it/s]\u001b[A\n 82%|████████▏ | 21803/26685 [3:16:43<29:47, 2.73it/s]\u001b[A\n 82%|████████▏ | 21804/26685 [3:16:44<32:48, 2.48it/s]\u001b[A\n 82%|████████▏ | 21805/26685 [3:16:44<33:10, 2.45it/s]\u001b[A\n 82%|████████▏ | 21806/26685 [3:16:45<43:42, 1.86it/s]\u001b[A\n 82%|████████▏ | 21807/26685 [3:16:45<39:53, 2.04it/s]\u001b[A\n 82%|████████▏ | 21808/26685 [3:16:46<36:57, 2.20it/s]\u001b[A\n 82%|████████▏ | 21809/26685 [3:16:46<33:04, 2.46it/s]\u001b[A\n 82%|████████▏ | 21810/26685 [3:16:46<29:55, 2.71it/s]\u001b[A\n 82%|████████▏ | 21811/26685 [3:16:46<28:24, 2.86it/s]\u001b[A\n 82%|████████▏ | 21812/26685 [3:16:47<29:14, 2.78it/s]\u001b[A\n 82%|████████▏ | 21813/26685 [3:16:47<30:43, 2.64it/s]\u001b[A\n 82%|████████▏ | 21814/26685 [3:16:48<31:50, 2.55it/s]\u001b[A\n 82%|████████▏ | 21815/26685 [3:16:48<32:07, 2.53it/s]\u001b[A\n 82%|████████▏ | 21816/26685 [3:16:49<34:23, 2.36it/s]\u001b[A\n 82%|████████▏ | 21817/26685 [3:16:49<32:07, 2.53it/s]\u001b[A\n 82%|████████▏ | 21818/26685 [3:16:49<34:59, 2.32it/s]\u001b[A\n 82%|████████▏ | 21819/26685 [3:16:50<34:41, 2.34it/s]\u001b[A\n 82%|████████▏ | 21820/26685 [3:16:50<30:13, 2.68it/s]\u001b[A\n 82%|████████▏ | 21821/26685 [3:16:50<29:26, 2.75it/s]\u001b[A\n 82%|████████▏ | 21822/26685 [3:16:51<28:37, 2.83it/s]\u001b[A\n 82%|████████▏ | 21823/26685 [3:16:51<31:31, 2.57it/s]\u001b[A\n 82%|████████▏ | 21824/26685 [3:16:52<29:26, 2.75it/s]\u001b[A\n 82%|████████▏ | 21825/26685 [3:16:52<33:28, 2.42it/s]\u001b[A\n 82%|████████▏ | 21826/26685 [3:16:53<34:26, 2.35it/s]\u001b[A\n 82%|████████▏ | 21827/26685 [3:16:53<34:20, 2.36it/s]\u001b[A\n 82%|████████▏ | 21828/26685 [3:16:53<32:06, 2.52it/s]\u001b[A\n 82%|████████▏ | 21829/26685 [3:16:54<29:02, 2.79it/s]\u001b[A\n 82%|████████▏ | 21830/26685 [3:16:54<33:35, 2.41it/s]\u001b[A\n 82%|████████▏ | 21831/26685 [3:16:54<31:12, 2.59it/s]\u001b[A\n 82%|████████▏ | 21832/26685 [3:16:55<30:06, 2.69it/s]\u001b[A\n 82%|████████▏ | 21833/26685 [3:16:55<27:07, 2.98it/s]\u001b[A\n 82%|████████▏ | 21834/26685 [3:16:55<26:20, 3.07it/s]\u001b[A\n 82%|████████▏ | 21835/26685 [3:16:56<38:50, 2.08it/s]\u001b[A\n 82%|████████▏ | 21836/26685 [3:16:57<40:50, 1.98it/s]\u001b[A\n 82%|████████▏ | 21837/26685 [3:16:57<42:25, 1.90it/s]\u001b[A\n 82%|████████▏ | 21838/26685 [3:16:58<37:14, 2.17it/s]\u001b[A\n 82%|████████▏ | 21839/26685 [3:16:58<33:05, 2.44it/s]\u001b[A\n 82%|████████▏ | 21840/26685 [3:16:58<32:47, 2.46it/s]\u001b[A\n 82%|████████▏ | 21841/26685 [3:16:59<29:46, 2.71it/s]\u001b[A\n 82%|████████▏ | 21842/26685 [3:16:59<41:32, 1.94it/s]\u001b[A\n 82%|████████▏ | 21843/26685 [3:17:00<41:10, 1.96it/s]\u001b[A\n 82%|████████▏ | 21844/26685 [3:17:00<37:36, 2.14it/s]\u001b[A\n 82%|████████▏ | 21845/26685 [3:17:01<47:50, 1.69it/s]\u001b[A\n 82%|████████▏ | 21846/26685 [3:17:01<39:55, 2.02it/s]\u001b[A\n 82%|████████▏ | 21847/26685 [3:17:02<37:20, 2.16it/s]\u001b[A\n 82%|████████▏ | 21848/26685 [3:17:02<35:38, 2.26it/s]\u001b[A\n 82%|████████▏ | 21849/26685 [3:17:03<35:26, 2.27it/s]\u001b[A\n 82%|████████▏ | 21850/26685 [3:17:03<31:09, 2.59it/s]\u001b[A\n 82%|████████▏ | 21851/26685 [3:17:03<29:12, 2.76it/s]\u001b[A\n 82%|████████▏ | 21852/26685 [3:17:04<29:09, 2.76it/s]\u001b[A\n 82%|████████▏ | 21853/26685 [3:17:04<26:18, 3.06it/s]\u001b[A\n 82%|████████▏ | 21854/26685 [3:17:05<38:06, 2.11it/s]\u001b[A\n 82%|████████▏ | 21855/26685 [3:17:06<47:43, 1.69it/s]\u001b[A\n 82%|████████▏ | 21856/26685 [3:17:06<42:06, 1.91it/s]\u001b[A\n 82%|████████▏ | 21857/26685 [3:17:06<42:23, 1.90it/s]\u001b[A\n 82%|████████▏ | 21858/26685 [3:17:07<38:04, 2.11it/s]\u001b[A\n 82%|████████▏ | 21859/26685 [3:17:07<33:57, 2.37it/s]\u001b[A\n 82%|████████▏ | 21860/26685 [3:17:07<33:57, 2.37it/s]\u001b[A\n 82%|████████▏ | 21861/26685 [3:17:08<30:59, 2.59it/s]\u001b[A\n 82%|████████▏ | 21862/26685 [3:17:08<34:16, 2.35it/s]\u001b[A\n 82%|████████▏ | 21863/26685 [3:17:09<32:55, 2.44it/s]\u001b[A\n 82%|████████▏ | 21864/26685 [3:17:09<29:07, 2.76it/s]\u001b[A\n 82%|████████▏ | 21865/26685 [3:17:09<27:03, 2.97it/s]\u001b[A\n 82%|████████▏ | 21866/26685 [3:17:10<27:00, 2.97it/s]\u001b[A\n 82%|████████▏ | 21867/26685 [3:17:10<28:50, 2.78it/s]\u001b[A\n 82%|████████▏ | 21868/26685 [3:17:10<26:19, 3.05it/s]\u001b[A\n 82%|████████▏ | 21869/26685 [3:17:11<27:51, 2.88it/s]\u001b[A\n 82%|████████▏ | 21870/26685 [3:17:11<25:43, 3.12it/s]\u001b[A\n 82%|████████▏ | 21871/26685 [3:17:11<26:24, 3.04it/s]\u001b[A\n 82%|████████▏ | 21872/26685 [3:17:11<24:41, 3.25it/s]\u001b[A\n 82%|████████▏ | 21873/26685 [3:17:12<38:45, 2.07it/s]\u001b[A\n 82%|████████▏ | 21874/26685 [3:17:13<42:13, 1.90it/s]\u001b[A\n 82%|████████▏ | 21875/26685 [3:17:13<39:21, 2.04it/s]\u001b[A\n 82%|████████▏ | 21876/26685 [3:17:14<34:59, 2.29it/s]\u001b[A\n 82%|████████▏ | 21877/26685 [3:17:14<36:04, 2.22it/s]\u001b[A\n 82%|████████▏ | 21878/26685 [3:17:15<34:00, 2.36it/s]\u001b[A\n 82%|████████▏ | 21879/26685 [3:17:15<31:58, 2.51it/s]\u001b[A\n 82%|████████▏ | 21880/26685 [3:17:15<30:06, 2.66it/s]\u001b[A\n 82%|████████▏ | 21881/26685 [3:17:16<28:41, 2.79it/s]\u001b[A\n 82%|████████▏ | 21882/26685 [3:17:16<39:44, 2.01it/s]\u001b[A\n 82%|████████▏ | 21883/26685 [3:17:17<35:01, 2.28it/s]\u001b[A\n 82%|████████▏ | 21884/26685 [3:17:17<32:27, 2.47it/s]\u001b[A\n 82%|████████▏ | 21885/26685 [3:17:17<33:09, 2.41it/s]\u001b[A\n 82%|████████▏ | 21886/26685 [3:17:18<31:06, 2.57it/s]\u001b[A\n 82%|████████▏ | 21887/26685 [3:17:18<28:27, 2.81it/s]\u001b[A\n 82%|████████▏ | 21888/26685 [3:17:18<29:00, 2.76it/s]\u001b[A\n 82%|████████▏ | 21889/26685 [3:17:19<27:56, 2.86it/s]\u001b[A\n 82%|████████▏ | 21890/26685 [3:17:19<27:17, 2.93it/s]\u001b[A\n 82%|████████▏ | 21891/26685 [3:17:19<28:32, 2.80it/s]\u001b[A\n 82%|████████▏ | 21892/26685 [3:17:20<40:27, 1.97it/s]\u001b[A\n 82%|████████▏ | 21893/26685 [3:17:21<37:31, 2.13it/s]\u001b[A\n 82%|████████▏ | 21894/26685 [3:17:22<45:57, 1.74it/s]\u001b[A\n 82%|████████▏ | 21895/26685 [3:17:22<39:47, 2.01it/s]\u001b[A\n 82%|████████▏ | 21896/26685 [3:17:22<41:21, 1.93it/s]\u001b[A\n 82%|████████▏ | 21897/26685 [3:17:23<39:00, 2.05it/s]\u001b[A\n 82%|████████▏ | 21898/26685 [3:17:24<46:59, 1.70it/s]\u001b[A\n 82%|████████▏ | 21899/26685 [3:17:24<46:36, 1.71it/s]\u001b[A\n 82%|████████▏ | 21900/26685 [3:17:24<39:14, 2.03it/s]\u001b[A\n 82%|████████▏ | 21901/26685 [3:17:25<34:27, 2.31it/s]\u001b[A\n 82%|████████▏ | 21902/26685 [3:17:25<34:00, 2.34it/s]\u001b[A\n 82%|████████▏ | 21903/26685 [3:17:26<31:34, 2.52it/s]\u001b[A\n 82%|████████▏ | 21904/26685 [3:17:26<33:40, 2.37it/s]\u001b[A\n 82%|████████▏ | 21905/26685 [3:17:27<43:04, 1.85it/s]\u001b[A\n 82%|████████▏ | 21906/26685 [3:17:27<38:46, 2.05it/s]\u001b[A\n 82%|████████▏ | 21907/26685 [3:17:28<46:07, 1.73it/s]\u001b[A\n 82%|████████▏ | 21908/26685 [3:17:28<39:27, 2.02it/s]\u001b[A\n 82%|████████▏ | 21909/26685 [3:17:29<33:21, 2.39it/s]\u001b[A\n 82%|████████▏ | 21910/26685 [3:17:29<36:39, 2.17it/s]\u001b[A\n 82%|████████▏ | 21911/26685 [3:17:29<32:15, 2.47it/s]\u001b[A\n 82%|████████▏ | 21912/26685 [3:17:30<30:22, 2.62it/s]\u001b[A\n 82%|████████▏ | 21913/26685 [3:17:30<30:18, 2.62it/s]\u001b[A\n 82%|████████▏ | 21914/26685 [3:17:30<28:28, 2.79it/s]\u001b[A\n 82%|████████▏ | 21915/26685 [3:17:31<32:00, 2.48it/s]\u001b[A\n 82%|████████▏ | 21916/26685 [3:17:31<30:43, 2.59it/s]\u001b[A\n 82%|████████▏ | 21917/26685 [3:17:32<28:37, 2.78it/s]\u001b[A\n 82%|████████▏ | 21918/26685 [3:17:32<28:15, 2.81it/s]\u001b[A\n 82%|████████▏ | 21919/26685 [3:17:32<27:33, 2.88it/s]\u001b[A\n 82%|████████▏ | 21920/26685 [3:17:33<31:41, 2.51it/s]\u001b[A\n 82%|████████▏ | 21921/26685 [3:17:33<31:41, 2.51it/s]\u001b[A\n 82%|████████▏ | 21922/26685 [3:17:34<34:37, 2.29it/s]\u001b[A\n 82%|████████▏ | 21923/26685 [3:17:34<35:20, 2.25it/s]\u001b[A\n 82%|████████▏ | 21924/26685 [3:17:35<36:50, 2.15it/s]\u001b[A\n 82%|████████▏ | 21925/26685 [3:17:35<34:28, 2.30it/s]\u001b[A\n 82%|████████▏ | 21926/26685 [3:17:36<37:07, 2.14it/s]\u001b[A\n 82%|████████▏ | 21927/26685 [3:17:36<33:37, 2.36it/s]\u001b[A\n 82%|████████▏ | 21928/26685 [3:17:36<30:54, 2.56it/s]\u001b[A\n 82%|████████▏ | 21929/26685 [3:17:37<30:53, 2.57it/s]\u001b[A\n 82%|████████▏ | 21930/26685 [3:17:37<34:40, 2.29it/s]\u001b[A\n 82%|████████▏ | 21931/26685 [3:17:37<33:34, 2.36it/s]\u001b[A\n 82%|████████▏ | 21932/26685 [3:17:38<45:10, 1.75it/s]\u001b[A\n 82%|████████▏ | 21933/26685 [3:17:39<40:16, 1.97it/s]\u001b[A\n 82%|████████▏ | 21934/26685 [3:17:39<35:43, 2.22it/s]\u001b[A\n 82%|████████▏ | 21935/26685 [3:17:39<31:59, 2.47it/s]\u001b[A\n 82%|████████▏ | 21936/26685 [3:17:40<31:39, 2.50it/s]\u001b[A\n 82%|████████▏ | 21937/26685 [3:17:41<42:42, 1.85it/s]\u001b[A\n 82%|████████▏ | 21938/26685 [3:17:41<50:15, 1.57it/s]\u001b[A\n 82%|████████▏ | 21939/26685 [3:17:42<50:37, 1.56it/s]\u001b[A\n 82%|████████▏ | 21940/26685 [3:17:42<43:55, 1.80it/s]\u001b[A\n 82%|████████▏ | 21941/26685 [3:17:43<41:41, 1.90it/s]\u001b[A\n 82%|████████▏ | 21942/26685 [3:17:43<40:19, 1.96it/s]\u001b[A\n 82%|████████▏ | 21943/26685 [3:17:44<35:20, 2.24it/s]\u001b[A\n 82%|████████▏ | 21944/26685 [3:17:44<36:37, 2.16it/s]\u001b[A\n 82%|████████▏ | 21945/26685 [3:17:45<37:23, 2.11it/s]\u001b[A\n 82%|████████▏ | 21946/26685 [3:17:45<37:45, 2.09it/s]\u001b[A\n 82%|████████▏ | 21947/26685 [3:17:45<33:15, 2.37it/s]\u001b[A\n 82%|████████▏ | 21948/26685 [3:17:46<31:42, 2.49it/s]\u001b[A\n 82%|████████▏ | 21949/26685 [3:17:47<41:29, 1.90it/s]\u001b[A\n 82%|████████▏ | 21950/26685 [3:17:47<42:45, 1.85it/s]\u001b[A\n 82%|████████▏ | 21951/26685 [3:17:48<39:52, 1.98it/s]\u001b[A\n 82%|████████▏ | 21952/26685 [3:17:48<35:04, 2.25it/s]\u001b[A\n 82%|████████▏ | 21953/26685 [3:17:49<38:57, 2.02it/s]\u001b[A\n 82%|████████▏ | 21954/26685 [3:17:49<34:34, 2.28it/s]\u001b[A\n 82%|████████▏ | 21955/26685 [3:17:49<30:42, 2.57it/s]\u001b[A\n 82%|████████▏ | 21956/26685 [3:17:49<28:03, 2.81it/s]\u001b[A\n 82%|████████▏ | 21957/26685 [3:17:50<31:02, 2.54it/s]\u001b[A\n 82%|████████▏ | 21958/26685 [3:17:50<28:16, 2.79it/s]\u001b[A\n 82%|████████▏ | 21959/26685 [3:17:51<27:15, 2.89it/s]\u001b[A\n 82%|████████▏ | 21960/26685 [3:17:51<38:45, 2.03it/s]\u001b[A\n 82%|████████▏ | 21961/26685 [3:17:52<36:41, 2.15it/s]\u001b[A\n 82%|████████▏ | 21962/26685 [3:17:52<33:33, 2.35it/s]\u001b[A\n 82%|████████▏ | 21963/26685 [3:17:52<30:47, 2.56it/s]\u001b[A\n 82%|████████▏ | 21964/26685 [3:17:53<28:57, 2.72it/s]\u001b[A\n 82%|████████▏ | 21965/26685 [3:17:54<40:26, 1.94it/s]\u001b[A\n 82%|████████▏ | 21966/26685 [3:17:54<38:27, 2.04it/s]\u001b[A\n 82%|████████▏ | 21967/26685 [3:17:54<35:37, 2.21it/s]\u001b[A\n 82%|████████▏ | 21968/26685 [3:17:55<37:48, 2.08it/s]\u001b[A\n 82%|████████▏ | 21969/26685 [3:17:55<36:56, 2.13it/s]\u001b[A\n 82%|████████▏ | 21970/26685 [3:17:56<37:30, 2.09it/s]\u001b[A\n 82%|████████▏ | 21971/26685 [3:17:56<34:32, 2.27it/s]\u001b[A\n 82%|████████▏ | 21972/26685 [3:17:57<33:09, 2.37it/s]\u001b[A\n 82%|████████▏ | 21973/26685 [3:17:57<32:59, 2.38it/s]\u001b[A\n 82%|████████▏ | 21974/26685 [3:17:57<31:42, 2.48it/s]\u001b[A\n 82%|████████▏ | 21975/26685 [3:17:58<31:01, 2.53it/s]\u001b[A\n 82%|████████▏ | 21976/26685 [3:17:58<29:37, 2.65it/s]\u001b[A\n 82%|████████▏ | 21977/26685 [3:17:58<28:06, 2.79it/s]\u001b[A\n 82%|████████▏ | 21978/26685 [3:17:59<37:23, 2.10it/s]\u001b[A\n 82%|████████▏ | 21979/26685 [3:18:00<35:47, 2.19it/s]\u001b[A\n 82%|████████▏ | 21980/26685 [3:18:00<38:19, 2.05it/s]\u001b[A\n 82%|████████▏ | 21981/26685 [3:18:01<38:32, 2.03it/s]\u001b[A\n 82%|████████▏ | 21982/26685 [3:18:01<46:40, 1.68it/s]\u001b[A\n 82%|████████▏ | 21983/26685 [3:18:03<1:04:49, 1.21it/s]\u001b[A\n 82%|████████▏ | 21984/26685 [3:18:03<52:15, 1.50it/s] \u001b[A\n 82%|████████▏ | 21985/26685 [3:18:04<46:14, 1.69it/s]\u001b[A\n 82%|████████▏ | 21986/26685 [3:18:04<39:19, 1.99it/s]\u001b[A\n 82%|████████▏ | 21987/26685 [3:18:04<36:05, 2.17it/s]\u001b[A\n 82%|████████▏ | 21988/26685 [3:18:04<31:59, 2.45it/s]\u001b[A\n 82%|████████▏ | 21989/26685 [3:18:05<30:47, 2.54it/s]\u001b[A\n 82%|████████▏ | 21990/26685 [3:18:05<29:32, 2.65it/s]\u001b[A\n 82%|████████▏ | 21991/26685 [3:18:05<27:26, 2.85it/s]\u001b[A\n 82%|████████▏ | 21992/26685 [3:18:06<32:32, 2.40it/s]\u001b[A\n 82%|████████▏ | 21993/26685 [3:18:06<28:41, 2.72it/s]\u001b[A\n 82%|████████▏ | 21994/26685 [3:18:07<39:09, 2.00it/s]\u001b[A\n 82%|████████▏ | 21995/26685 [3:18:07<35:32, 2.20it/s]\u001b[A\n 82%|████████▏ | 21996/26685 [3:18:08<34:10, 2.29it/s]\u001b[A\n 82%|████████▏ | 21997/26685 [3:18:08<38:59, 2.00it/s]\u001b[A\n 82%|████████▏ | 21998/26685 [3:18:09<47:19, 1.65it/s]\u001b[A\n 82%|████████▏ | 21999/26685 [3:18:10<40:23, 1.93it/s]\u001b[A\n 82%|████████▏ | 22000/26685 [3:18:10<33:29, 2.33it/s]\u001b[A\n 82%|████████▏ | 22001/26685 [3:18:10<33:11, 2.35it/s]\u001b[A\n 82%|████████▏ | 22002/26685 [3:18:11<32:09, 2.43it/s]\u001b[A\n 82%|████████▏ | 22003/26685 [3:18:11<34:03, 2.29it/s]\u001b[A\n 82%|████████▏ | 22004/26685 [3:18:11<31:00, 2.52it/s]\u001b[A\n 82%|████████▏ | 22005/26685 [3:18:12<31:39, 2.46it/s]\u001b[A\n 82%|████████▏ | 22006/26685 [3:18:12<28:44, 2.71it/s]\u001b[A\n 82%|████████▏ | 22007/26685 [3:18:13<41:20, 1.89it/s]\u001b[A\n 82%|████████▏ | 22008/26685 [3:18:13<36:50, 2.12it/s]\u001b[A\n 82%|████████▏ | 22009/26685 [3:18:14<33:00, 2.36it/s]\u001b[A\n 82%|████████▏ | 22010/26685 [3:18:15<44:40, 1.74it/s]\u001b[A\n 82%|████████▏ | 22011/26685 [3:18:15<37:10, 2.10it/s]\u001b[A\n 82%|████████▏ | 22012/26685 [3:18:15<34:38, 2.25it/s]\u001b[A\n 82%|████████▏ | 22013/26685 [3:18:16<35:53, 2.17it/s]\u001b[A\n 82%|████████▏ | 22014/26685 [3:18:17<45:09, 1.72it/s]\u001b[A\n 82%|████████▏ | 22015/26685 [3:18:18<1:02:35, 1.24it/s]\u001b[A\n 83%|████████▎ | 22016/26685 [3:18:18<50:10, 1.55it/s] \u001b[A\n 83%|████████▎ | 22017/26685 [3:18:19<43:38, 1.78it/s]\u001b[A\n 83%|████████▎ | 22018/26685 [3:18:19<43:04, 1.81it/s]\u001b[A\n 83%|████████▎ | 22019/26685 [3:18:20<39:20, 1.98it/s]\u001b[A\n 83%|████████▎ | 22020/26685 [3:18:20<35:43, 2.18it/s]\u001b[A\n 83%|████████▎ | 22021/26685 [3:18:20<37:46, 2.06it/s]\u001b[A\n 83%|████████▎ | 22022/26685 [3:18:21<36:20, 2.14it/s]\u001b[A\n 83%|████████▎ | 22023/26685 [3:18:22<45:35, 1.70it/s]\u001b[A\n 83%|████████▎ | 22024/26685 [3:18:22<40:19, 1.93it/s]\u001b[A\n 83%|████████▎ | 22025/26685 [3:18:22<35:50, 2.17it/s]\u001b[A\n 83%|████████▎ | 22026/26685 [3:18:23<33:15, 2.33it/s]\u001b[A\n 83%|████████▎ | 22027/26685 [3:18:24<43:33, 1.78it/s]\u001b[A\n 83%|████████▎ | 22028/26685 [3:18:24<49:30, 1.57it/s]\u001b[A\n 83%|████████▎ | 22029/26685 [3:18:25<43:08, 1.80it/s]\u001b[A\n 83%|████████▎ | 22030/26685 [3:18:25<39:00, 1.99it/s]\u001b[A\n 83%|████████▎ | 22031/26685 [3:18:26<46:45, 1.66it/s]\u001b[A\n 83%|████████▎ | 22032/26685 [3:18:26<40:24, 1.92it/s]\u001b[A\n 83%|████████▎ | 22033/26685 [3:18:27<37:09, 2.09it/s]\u001b[A\n 83%|████████▎ | 22034/26685 [3:18:27<35:19, 2.19it/s]\u001b[A\n 83%|████████▎ | 22035/26685 [3:18:27<31:04, 2.49it/s]\u001b[A\n 83%|████████▎ | 22036/26685 [3:18:28<32:03, 2.42it/s]\u001b[A\n 83%|████████▎ | 22037/26685 [3:18:28<30:27, 2.54it/s]\u001b[A\n 83%|████████▎ | 22038/26685 [3:18:29<29:41, 2.61it/s]\u001b[A\n 83%|████████▎ | 22039/26685 [3:18:29<30:44, 2.52it/s]\u001b[A\n 83%|████████▎ | 22040/26685 [3:18:30<40:53, 1.89it/s]\u001b[A\n 83%|████████▎ | 22041/26685 [3:18:30<38:59, 1.98it/s]\u001b[A\n 83%|████████▎ | 22042/26685 [3:18:31<34:22, 2.25it/s]\u001b[A\n 83%|████████▎ | 22043/26685 [3:18:31<31:44, 2.44it/s]\u001b[A\n 83%|████████▎ | 22044/26685 [3:18:32<42:13, 1.83it/s]\u001b[A\n 83%|████████▎ | 22045/26685 [3:18:32<41:31, 1.86it/s]\u001b[A\n 83%|████████▎ | 22046/26685 [3:18:33<44:31, 1.74it/s]\u001b[A\n 83%|████████▎ | 22047/26685 [3:18:34<50:20, 1.54it/s]\u001b[A\n 83%|████████▎ | 22048/26685 [3:18:35<54:14, 1.42it/s]\u001b[A\n 83%|████████▎ | 22049/26685 [3:18:35<47:58, 1.61it/s]\u001b[A\n 83%|████████▎ | 22050/26685 [3:18:35<44:04, 1.75it/s]\u001b[A\n 83%|████████▎ | 22051/26685 [3:18:36<38:52, 1.99it/s]\u001b[A\n 83%|████████▎ | 22052/26685 [3:18:36<35:28, 2.18it/s]\u001b[A\n 83%|████████▎ | 22053/26685 [3:18:37<35:36, 2.17it/s]\u001b[A\n 83%|████████▎ | 22054/26685 [3:18:37<31:41, 2.44it/s]\u001b[A\n 83%|████████▎ | 22055/26685 [3:18:38<40:51, 1.89it/s]\u001b[A\n 83%|████████▎ | 22056/26685 [3:18:38<36:41, 2.10it/s]\u001b[A\n 83%|████████▎ | 22057/26685 [3:18:38<34:23, 2.24it/s]\u001b[A\n 83%|████████▎ | 22058/26685 [3:18:39<30:09, 2.56it/s]\u001b[A\n 83%|████████▎ | 22059/26685 [3:18:39<27:52, 2.77it/s]\u001b[A\n 83%|████████▎ | 22060/26685 [3:18:39<25:35, 3.01it/s]\u001b[A\n 83%|████████▎ | 22061/26685 [3:18:40<32:01, 2.41it/s]\u001b[A\n 83%|████████▎ | 22062/26685 [3:18:41<42:35, 1.81it/s]\u001b[A\n 83%|████████▎ | 22063/26685 [3:18:41<38:55, 1.98it/s]\u001b[A\n 83%|████████▎ | 22064/26685 [3:18:41<33:48, 2.28it/s]\u001b[A\n 83%|████████▎ | 22065/26685 [3:18:42<33:01, 2.33it/s]\u001b[A\n 83%|████████▎ | 22066/26685 [3:18:42<33:00, 2.33it/s]\u001b[A\n 83%|████████▎ | 22067/26685 [3:18:43<28:48, 2.67it/s]\u001b[A\n 83%|████████▎ | 22068/26685 [3:18:43<25:50, 2.98it/s]\u001b[A\n 83%|████████▎ | 22069/26685 [3:18:43<27:08, 2.83it/s]\u001b[A\n 83%|████████▎ | 22070/26685 [3:18:44<27:30, 2.80it/s]\u001b[A\n 83%|████████▎ | 22071/26685 [3:18:44<25:36, 3.00it/s]\u001b[A\n 83%|████████▎ | 22072/26685 [3:18:44<23:59, 3.21it/s]\u001b[A\n 83%|████████▎ | 22073/26685 [3:18:45<30:00, 2.56it/s]\u001b[A\n 83%|████████▎ | 22074/26685 [3:18:45<29:18, 2.62it/s]\u001b[A\n 83%|████████▎ | 22075/26685 [3:18:46<33:42, 2.28it/s]\u001b[A\n 83%|████████▎ | 22076/26685 [3:18:46<35:02, 2.19it/s]\u001b[A\n 83%|████████▎ | 22077/26685 [3:18:47<38:15, 2.01it/s]\u001b[A\n 83%|████████▎ | 22078/26685 [3:18:47<40:45, 1.88it/s]\u001b[A\n 83%|████████▎ | 22079/26685 [3:18:48<37:49, 2.03it/s]\u001b[A\n 83%|████████▎ | 22080/26685 [3:18:48<37:38, 2.04it/s]\u001b[A\n 83%|████████▎ | 22081/26685 [3:18:48<33:21, 2.30it/s]\u001b[A\n 83%|████████▎ | 22082/26685 [3:18:49<28:51, 2.66it/s]\u001b[A\n 83%|████████▎ | 22083/26685 [3:18:49<33:08, 2.31it/s]\u001b[A\n 83%|████████▎ | 22084/26685 [3:18:50<37:28, 2.05it/s]\u001b[A\n 83%|████████▎ | 22085/26685 [3:18:51<44:57, 1.71it/s]\u001b[A\n 83%|████████▎ | 22086/26685 [3:18:51<37:58, 2.02it/s]\u001b[A\n 83%|████████▎ | 22087/26685 [3:18:51<33:40, 2.28it/s]\u001b[A\n 83%|████████▎ | 22088/26685 [3:18:52<33:37, 2.28it/s]\u001b[A\n 83%|████████▎ | 22089/26685 [3:18:52<29:37, 2.59it/s]\u001b[A\n 83%|████████▎ | 22090/26685 [3:18:52<27:48, 2.75it/s]\u001b[A\n 83%|████████▎ | 22091/26685 [3:18:53<26:11, 2.92it/s]\u001b[A\n 83%|████████▎ | 22092/26685 [3:18:53<24:56, 3.07it/s]\u001b[A\n 83%|████████▎ | 22093/26685 [3:18:53<25:25, 3.01it/s]\u001b[A\n 83%|████████▎ | 22094/26685 [3:18:54<36:29, 2.10it/s]\u001b[A\n 83%|████████▎ | 22095/26685 [3:18:54<32:01, 2.39it/s]\u001b[A\n 83%|████████▎ | 22096/26685 [3:18:55<29:16, 2.61it/s]\u001b[A\n 83%|████████▎ | 22097/26685 [3:18:55<28:04, 2.72it/s]\u001b[A\n 83%|████████▎ | 22098/26685 [3:18:55<28:44, 2.66it/s]\u001b[A\n 83%|████████▎ | 22099/26685 [3:18:56<26:33, 2.88it/s]\u001b[A\n 83%|████████▎ | 22100/26685 [3:18:56<31:52, 2.40it/s]\u001b[A\n 83%|████████▎ | 22101/26685 [3:18:57<41:41, 1.83it/s]\u001b[A\n 83%|████████▎ | 22102/26685 [3:18:58<48:12, 1.58it/s]\u001b[A\n 83%|████████▎ | 22103/26685 [3:18:58<42:39, 1.79it/s]\u001b[A\n 83%|████████▎ | 22104/26685 [3:18:59<37:05, 2.06it/s]\u001b[A\n 83%|████████▎ | 22105/26685 [3:19:00<47:12, 1.62it/s]\u001b[A\n 83%|████████▎ | 22106/26685 [3:19:00<43:04, 1.77it/s]\u001b[A\n 83%|████████▎ | 22107/26685 [3:19:00<38:33, 1.98it/s]\u001b[A\n 83%|████████▎ | 22108/26685 [3:19:01<33:18, 2.29it/s]\u001b[A\n 83%|████████▎ | 22109/26685 [3:19:01<29:58, 2.54it/s]\u001b[A\n 83%|████████▎ | 22110/26685 [3:19:01<27:15, 2.80it/s]\u001b[A\n 83%|████████▎ | 22111/26685 [3:19:02<32:00, 2.38it/s]\u001b[A\n 83%|████████▎ | 22112/26685 [3:19:02<28:35, 2.67it/s]\u001b[A\n 83%|████████▎ | 22113/26685 [3:19:02<26:11, 2.91it/s]\u001b[A\n 83%|████████▎ | 22114/26685 [3:19:03<25:45, 2.96it/s]\u001b[A\n 83%|████████▎ | 22115/26685 [3:19:03<26:12, 2.91it/s]\u001b[A\n 83%|████████▎ | 22116/26685 [3:19:03<24:06, 3.16it/s]\u001b[A\n 83%|████████▎ | 22117/26685 [3:19:03<22:46, 3.34it/s]\u001b[A\n 83%|████████▎ | 22118/26685 [3:19:04<35:33, 2.14it/s]\u001b[A\n 83%|████████▎ | 22119/26685 [3:19:05<34:31, 2.20it/s]\u001b[A\n 83%|████████▎ | 22120/26685 [3:19:05<30:55, 2.46it/s]\u001b[A\n 83%|████████▎ | 22121/26685 [3:19:05<30:21, 2.51it/s]\u001b[A\n 83%|████████▎ | 22122/26685 [3:19:06<30:23, 2.50it/s]\u001b[A\n 83%|████████▎ | 22123/26685 [3:19:06<32:02, 2.37it/s]\u001b[A\n 83%|████████▎ | 22124/26685 [3:19:07<30:01, 2.53it/s]\u001b[A\n 83%|████████▎ | 22125/26685 [3:19:07<34:25, 2.21it/s]\u001b[A\n 83%|████████▎ | 22126/26685 [3:19:08<33:07, 2.29it/s]\u001b[A\n 83%|████████▎ | 22127/26685 [3:19:08<29:02, 2.62it/s]\u001b[A\n 83%|████████▎ | 22128/26685 [3:19:08<27:15, 2.79it/s]\u001b[A\n 83%|████████▎ | 22129/26685 [3:19:09<39:10, 1.94it/s]\u001b[A\n 83%|████████▎ | 22130/26685 [3:19:10<38:39, 1.96it/s]\u001b[A\n 83%|████████▎ | 22131/26685 [3:19:10<34:04, 2.23it/s]\u001b[A\n 83%|████████▎ | 22132/26685 [3:19:10<35:17, 2.15it/s]\u001b[A\n 83%|████████▎ | 22133/26685 [3:19:11<30:31, 2.49it/s]\u001b[A\n 83%|████████▎ | 22134/26685 [3:19:11<29:38, 2.56it/s]\u001b[A\n 83%|████████▎ | 22135/26685 [3:19:11<27:24, 2.77it/s]\u001b[A\n 83%|████████▎ | 22136/26685 [3:19:12<32:37, 2.32it/s]\u001b[A\n 83%|████████▎ | 22137/26685 [3:19:13<41:46, 1.81it/s]\u001b[A\n 83%|████████▎ | 22138/26685 [3:19:14<52:00, 1.46it/s]\u001b[A\n 83%|████████▎ | 22139/26685 [3:19:14<43:00, 1.76it/s]\u001b[A\n 83%|████████▎ | 22140/26685 [3:19:15<50:30, 1.50it/s]\u001b[A\n 83%|████████▎ | 22141/26685 [3:19:15<42:06, 1.80it/s]\u001b[A\n 83%|████████▎ | 22142/26685 [3:19:16<36:31, 2.07it/s]\u001b[A\n 83%|████████▎ | 22143/26685 [3:19:16<33:35, 2.25it/s]\u001b[A\n 83%|████████▎ | 22144/26685 [3:19:16<31:09, 2.43it/s]\u001b[A\n 83%|████████▎ | 22145/26685 [3:19:17<28:39, 2.64it/s]\u001b[A\n 83%|████████▎ | 22146/26685 [3:19:17<38:44, 1.95it/s]\u001b[A\n 83%|████████▎ | 22147/26685 [3:19:18<40:01, 1.89it/s]\u001b[A\n 83%|████████▎ | 22148/26685 [3:19:18<34:11, 2.21it/s]\u001b[A\n 83%|████████▎ | 22149/26685 [3:19:19<36:14, 2.09it/s]\u001b[A\n 83%|████████▎ | 22150/26685 [3:19:19<32:57, 2.29it/s]\u001b[A\n 83%|████████▎ | 22151/26685 [3:19:19<29:41, 2.54it/s]\u001b[A\n 83%|████████▎ | 22152/26685 [3:19:20<27:19, 2.76it/s]\u001b[A\n 83%|████████▎ | 22153/26685 [3:19:20<30:14, 2.50it/s]\u001b[A\n 83%|████████▎ | 22154/26685 [3:19:20<29:07, 2.59it/s]\u001b[A\n 83%|████████▎ | 22155/26685 [3:19:21<28:44, 2.63it/s]\u001b[A\n 83%|████████▎ | 22156/26685 [3:19:21<27:37, 2.73it/s]\u001b[A\n 83%|████████▎ | 22157/26685 [3:19:22<27:20, 2.76it/s]\u001b[A\n 83%|████████▎ | 22158/26685 [3:19:22<26:27, 2.85it/s]\u001b[A\n 83%|████████▎ | 22159/26685 [3:19:22<24:48, 3.04it/s]\u001b[A\n 83%|████████▎ | 22160/26685 [3:19:22<25:26, 2.96it/s]\u001b[A\n 83%|████████▎ | 22161/26685 [3:19:23<27:45, 2.72it/s]\u001b[A\n 83%|████████▎ | 22162/26685 [3:19:23<25:30, 2.96it/s]\u001b[A\n 83%|████████▎ | 22163/26685 [3:19:25<54:42, 1.38it/s]\u001b[A\n 83%|████████▎ | 22164/26685 [3:19:25<46:59, 1.60it/s]\u001b[A\n 83%|████████▎ | 22165/26685 [3:19:26<45:46, 1.65it/s]\u001b[A\n 83%|████████▎ | 22166/26685 [3:19:26<40:11, 1.87it/s]\u001b[A\n 83%|████████▎ | 22167/26685 [3:19:26<33:50, 2.23it/s]\u001b[A\n 83%|████████▎ | 22168/26685 [3:19:27<32:30, 2.32it/s]\u001b[A\n 83%|████████▎ | 22169/26685 [3:19:27<30:43, 2.45it/s]\u001b[A\n 83%|████████▎ | 22170/26685 [3:19:28<30:08, 2.50it/s]\u001b[A\n 83%|████████▎ | 22171/26685 [3:19:28<36:48, 2.04it/s]\u001b[A\n 83%|████████▎ | 22172/26685 [3:19:29<41:27, 1.81it/s]\u001b[A\n 83%|████████▎ | 22173/26685 [3:19:29<36:17, 2.07it/s]\u001b[A\n 83%|████████▎ | 22174/26685 [3:19:30<39:35, 1.90it/s]\u001b[A\n 83%|████████▎ | 22175/26685 [3:19:30<34:05, 2.21it/s]\u001b[A\n 83%|████████▎ | 22176/26685 [3:19:30<30:36, 2.46it/s]\u001b[A\n 83%|████████▎ | 22177/26685 [3:19:31<39:24, 1.91it/s]\u001b[A\n 83%|████████▎ | 22178/26685 [3:19:32<38:09, 1.97it/s]\u001b[A\n 83%|████████▎ | 22179/26685 [3:19:33<45:27, 1.65it/s]\u001b[A\n 83%|████████▎ | 22180/26685 [3:19:33<37:06, 2.02it/s]\u001b[A\n 83%|████████▎ | 22181/26685 [3:19:33<32:38, 2.30it/s]\u001b[A\n 83%|████████▎ | 22182/26685 [3:19:33<29:52, 2.51it/s]\u001b[A\n 83%|████████▎ | 22183/26685 [3:19:34<28:37, 2.62it/s]\u001b[A\n 83%|████████▎ | 22184/26685 [3:19:35<40:22, 1.86it/s]\u001b[A\n 83%|████████▎ | 22185/26685 [3:19:35<35:36, 2.11it/s]\u001b[A\n 83%|████████▎ | 22186/26685 [3:19:35<29:45, 2.52it/s]\u001b[A\n 83%|████████▎ | 22187/26685 [3:19:35<26:57, 2.78it/s]\u001b[A\n 83%|████████▎ | 22188/26685 [3:19:36<27:22, 2.74it/s]\u001b[A\n 83%|████████▎ | 22189/26685 [3:19:36<25:40, 2.92it/s]\u001b[A\n 83%|████████▎ | 22190/26685 [3:19:36<25:32, 2.93it/s]\u001b[A\n 83%|████████▎ | 22191/26685 [3:19:37<24:35, 3.05it/s]\u001b[A\n 83%|████████▎ | 22192/26685 [3:19:38<35:34, 2.11it/s]\u001b[A\n 83%|████████▎ | 22193/26685 [3:19:38<32:18, 2.32it/s]\u001b[A\n 83%|████████▎ | 22194/26685 [3:19:38<28:24, 2.63it/s]\u001b[A\n 83%|████████▎ | 22195/26685 [3:19:38<26:06, 2.87it/s]\u001b[A\n 83%|████████▎ | 22196/26685 [3:19:39<27:35, 2.71it/s]\u001b[A\n 83%|████████▎ | 22197/26685 [3:19:39<27:32, 2.72it/s]\u001b[A\n 83%|████████▎ | 22198/26685 [3:19:39<24:45, 3.02it/s]\u001b[A\n 83%|████████▎ | 22199/26685 [3:19:40<25:31, 2.93it/s]\u001b[A\n 83%|████████▎ | 22200/26685 [3:19:40<24:13, 3.09it/s]\u001b[A\n 83%|████████▎ | 22201/26685 [3:19:40<25:11, 2.97it/s]\u001b[A\n 83%|████████▎ | 22202/26685 [3:19:41<26:18, 2.84it/s]\u001b[A\n 83%|████████▎ | 22203/26685 [3:19:41<27:06, 2.76it/s]\u001b[A\n 83%|████████▎ | 22204/26685 [3:19:42<28:18, 2.64it/s]\u001b[A\n 83%|████████▎ | 22205/26685 [3:19:42<27:22, 2.73it/s]\u001b[A\n 83%|████████▎ | 22206/26685 [3:19:42<25:03, 2.98it/s]\u001b[A\n 83%|████████▎ | 22207/26685 [3:19:43<25:55, 2.88it/s]\u001b[A\n 83%|████████▎ | 22208/26685 [3:19:43<24:48, 3.01it/s]\u001b[A\n 83%|████████▎ | 22209/26685 [3:19:43<27:08, 2.75it/s]\u001b[A\n 83%|████████▎ | 22210/26685 [3:19:44<28:19, 2.63it/s]\u001b[A\n 83%|████████▎ | 22211/26685 [3:19:44<26:06, 2.86it/s]\u001b[A\n 83%|████████▎ | 22212/26685 [3:19:45<29:18, 2.54it/s]\u001b[A\n 83%|████████▎ | 22213/26685 [3:19:45<29:43, 2.51it/s]\u001b[A\n 83%|████████▎ | 22214/26685 [3:19:46<39:58, 1.86it/s]\u001b[A\n 83%|████████▎ | 22215/26685 [3:19:46<35:53, 2.08it/s]\u001b[A\n 83%|████████▎ | 22216/26685 [3:19:47<34:46, 2.14it/s]\u001b[A\n 83%|████████▎ | 22217/26685 [3:19:47<42:48, 1.74it/s]\u001b[A\n 83%|████████▎ | 22218/26685 [3:19:48<48:27, 1.54it/s]\u001b[A\n 83%|████████▎ | 22219/26685 [3:19:49<40:40, 1.83it/s]\u001b[A\n 83%|████████▎ | 22220/26685 [3:19:49<34:20, 2.17it/s]\u001b[A\n 83%|████████▎ | 22221/26685 [3:19:50<41:44, 1.78it/s]\u001b[A\n 83%|████████▎ | 22222/26685 [3:19:50<36:23, 2.04it/s]\u001b[A\n 83%|████████▎ | 22223/26685 [3:19:50<32:30, 2.29it/s]\u001b[A\n 83%|████████▎ | 22224/26685 [3:19:51<32:02, 2.32it/s]\u001b[A\n 83%|████████▎ | 22225/26685 [3:19:51<31:27, 2.36it/s]\u001b[A\n 83%|████████▎ | 22226/26685 [3:19:52<30:44, 2.42it/s]\u001b[A\n 83%|████████▎ | 22227/26685 [3:19:52<40:03, 1.86it/s]\u001b[A\n 83%|████████▎ | 22228/26685 [3:19:53<40:24, 1.84it/s]\u001b[A\n 83%|████████▎ | 22229/26685 [3:19:54<46:25, 1.60it/s]\u001b[A\n 83%|████████▎ | 22230/26685 [3:19:54<40:36, 1.83it/s]\u001b[A\n 83%|████████▎ | 22231/26685 [3:19:55<39:29, 1.88it/s]\u001b[A\n 83%|████████▎ | 22232/26685 [3:19:55<36:55, 2.01it/s]\u001b[A\n 83%|████████▎ | 22233/26685 [3:19:56<39:05, 1.90it/s]\u001b[A\n 83%|████████▎ | 22234/26685 [3:19:56<34:36, 2.14it/s]\u001b[A\n 83%|████████▎ | 22235/26685 [3:19:56<31:27, 2.36it/s]\u001b[A\n 83%|████████▎ | 22236/26685 [3:19:57<31:25, 2.36it/s]\u001b[A\n 83%|████████▎ | 22237/26685 [3:19:57<33:45, 2.20it/s]\u001b[A\n 83%|████████▎ | 22238/26685 [3:19:58<36:32, 2.03it/s]\u001b[A\n 83%|████████▎ | 22239/26685 [3:19:59<43:08, 1.72it/s]\u001b[A\n 83%|████████▎ | 22240/26685 [3:19:59<38:08, 1.94it/s]\u001b[A\n 83%|████████▎ | 22241/26685 [3:19:59<35:01, 2.11it/s]\u001b[A\n 83%|████████▎ | 22242/26685 [3:20:00<32:04, 2.31it/s]\u001b[A\n 83%|████████▎ | 22243/26685 [3:20:00<31:24, 2.36it/s]\u001b[A\n 83%|████████▎ | 22244/26685 [3:20:01<34:32, 2.14it/s]\u001b[A\n 83%|████████▎ | 22245/26685 [3:20:01<29:35, 2.50it/s]\u001b[A\n 83%|████████▎ | 22246/26685 [3:20:02<40:43, 1.82it/s]\u001b[A\n 83%|████████▎ | 22247/26685 [3:20:02<36:52, 2.01it/s]\u001b[A\n 83%|████████▎ | 22248/26685 [3:20:02<32:24, 2.28it/s]\u001b[A\n 83%|████████▎ | 22249/26685 [3:20:03<30:02, 2.46it/s]\u001b[A\n 83%|████████▎ | 22250/26685 [3:20:03<27:05, 2.73it/s]\u001b[A\n 83%|████████▎ | 22251/26685 [3:20:03<27:11, 2.72it/s]\u001b[A\n 83%|████████▎ | 22252/26685 [3:20:04<24:57, 2.96it/s]\u001b[A\n 83%|████████▎ | 22253/26685 [3:20:05<36:12, 2.04it/s]\u001b[A\n 83%|████████▎ | 22254/26685 [3:20:05<33:02, 2.24it/s]\u001b[A\n 83%|████████▎ | 22255/26685 [3:20:05<28:45, 2.57it/s]\u001b[A\n 83%|████████▎ | 22256/26685 [3:20:06<32:09, 2.29it/s]\u001b[A\n 83%|████████▎ | 22257/26685 [3:20:06<28:17, 2.61it/s]\u001b[A\n 83%|████████▎ | 22258/26685 [3:20:06<27:42, 2.66it/s]\u001b[A\n 83%|████████▎ | 22259/26685 [3:20:07<27:36, 2.67it/s]\u001b[A\n 83%|████████▎ | 22260/26685 [3:20:07<32:31, 2.27it/s]\u001b[A\n 83%|████████▎ | 22261/26685 [3:20:08<31:16, 2.36it/s]\u001b[A\n 83%|████████▎ | 22262/26685 [3:20:08<28:40, 2.57it/s]\u001b[A\n 83%|████████▎ | 22263/26685 [3:20:08<27:42, 2.66it/s]\u001b[A\n 83%|████████▎ | 22264/26685 [3:20:09<28:54, 2.55it/s]\u001b[A\n 83%|████████▎ | 22265/26685 [3:20:09<33:09, 2.22it/s]\u001b[A\n 83%|████████▎ | 22266/26685 [3:20:10<30:42, 2.40it/s]\u001b[A\n 83%|████████▎ | 22267/26685 [3:20:10<28:30, 2.58it/s]\u001b[A\n 83%|████████▎ | 22268/26685 [3:20:10<27:57, 2.63it/s]\u001b[A\n 83%|████████▎ | 22269/26685 [3:20:11<24:47, 2.97it/s]\u001b[A\n 83%|████████▎ | 22270/26685 [3:20:11<24:47, 2.97it/s]\u001b[A\n 83%|████████▎ | 22271/26685 [3:20:11<24:44, 2.97it/s]\u001b[A\n 83%|████████▎ | 22272/26685 [3:20:12<23:34, 3.12it/s]\u001b[A\n 83%|████████▎ | 22273/26685 [3:20:12<28:41, 2.56it/s]\u001b[A\n 83%|████████▎ | 22274/26685 [3:20:12<28:31, 2.58it/s]\u001b[A\n 83%|████████▎ | 22275/26685 [3:20:13<28:42, 2.56it/s]\u001b[A\n 83%|████████▎ | 22276/26685 [3:20:13<26:40, 2.75it/s]\u001b[A\n 83%|████████▎ | 22277/26685 [3:20:13<26:33, 2.77it/s]\u001b[A\n 83%|████████▎ | 22278/26685 [3:20:14<25:05, 2.93it/s]\u001b[A\n 83%|████████▎ | 22279/26685 [3:20:14<24:02, 3.06it/s]\u001b[A\n 83%|████████▎ | 22280/26685 [3:20:14<23:22, 3.14it/s]\u001b[A\n 83%|████████▎ | 22281/26685 [3:20:15<21:39, 3.39it/s]\u001b[A\n 84%|████████▎ | 22282/26685 [3:20:15<32:52, 2.23it/s]\u001b[A\n 84%|████████▎ | 22283/26685 [3:20:16<35:04, 2.09it/s]\u001b[A\n 84%|████████▎ | 22284/26685 [3:20:16<35:28, 2.07it/s]\u001b[A\n 84%|████████▎ | 22285/26685 [3:20:17<37:14, 1.97it/s]\u001b[A\n 84%|████████▎ | 22286/26685 [3:20:17<33:53, 2.16it/s]\u001b[A\n 84%|████████▎ | 22287/26685 [3:20:18<31:06, 2.36it/s]\u001b[A\n 84%|████████▎ | 22288/26685 [3:20:18<30:08, 2.43it/s]\u001b[A\n 84%|████████▎ | 22289/26685 [3:20:19<31:14, 2.35it/s]\u001b[A\n 84%|████████▎ | 22290/26685 [3:20:19<29:48, 2.46it/s]\u001b[A\n 84%|████████▎ | 22291/26685 [3:20:19<26:37, 2.75it/s]\u001b[A\n 84%|████████▎ | 22292/26685 [3:20:20<25:40, 2.85it/s]\u001b[A\n 84%|████████▎ | 22293/26685 [3:20:20<30:11, 2.42it/s]\u001b[A\n 84%|████████▎ | 22294/26685 [3:20:20<27:37, 2.65it/s]\u001b[A\n 84%|████████▎ | 22295/26685 [3:20:21<29:47, 2.46it/s]\u001b[A\n 84%|████████▎ | 22296/26685 [3:20:21<31:22, 2.33it/s]\u001b[A\n 84%|████████▎ | 22297/26685 [3:20:22<30:19, 2.41it/s]\u001b[A\n 84%|████████▎ | 22298/26685 [3:20:22<26:57, 2.71it/s]\u001b[A\n 84%|████████▎ | 22299/26685 [3:20:22<24:36, 2.97it/s]\u001b[A\n 84%|████████▎ | 22300/26685 [3:20:23<35:25, 2.06it/s]\u001b[A\n 84%|████████▎ | 22301/26685 [3:20:23<34:11, 2.14it/s]\u001b[A\n 84%|████████▎ | 22302/26685 [3:20:24<30:45, 2.37it/s]\u001b[A\n 84%|████████▎ | 22303/26685 [3:20:24<27:26, 2.66it/s]\u001b[A\n 84%|████████▎ | 22304/26685 [3:20:24<26:16, 2.78it/s]\u001b[A\n 84%|████████▎ | 22305/26685 [3:20:25<26:17, 2.78it/s]\u001b[A\n 84%|████████▎ | 22306/26685 [3:20:25<26:21, 2.77it/s]\u001b[A\n 84%|████████▎ | 22307/26685 [3:20:25<25:22, 2.88it/s]\u001b[A\n 84%|████████▎ | 22308/26685 [3:20:26<23:50, 3.06it/s]\u001b[A\n 84%|████████▎ | 22309/26685 [3:20:26<22:19, 3.27it/s]\u001b[A\n 84%|████████▎ | 22310/26685 [3:20:26<23:18, 3.13it/s]\u001b[A\n 84%|████████▎ | 22311/26685 [3:20:27<33:39, 2.17it/s]\u001b[A\n 84%|████████▎ | 22312/26685 [3:20:27<30:18, 2.41it/s]\u001b[A\n 84%|████████▎ | 22313/26685 [3:20:28<39:19, 1.85it/s]\u001b[A\n 84%|████████▎ | 22314/26685 [3:20:29<35:13, 2.07it/s]\u001b[A\n 84%|████████▎ | 22315/26685 [3:20:29<30:59, 2.35it/s]\u001b[A\n 84%|████████▎ | 22316/26685 [3:20:29<30:42, 2.37it/s]\u001b[A\n 84%|████████▎ | 22317/26685 [3:20:30<31:42, 2.30it/s]\u001b[A\n 84%|████████▎ | 22318/26685 [3:20:30<29:39, 2.45it/s]\u001b[A\n 84%|████████▎ | 22319/26685 [3:20:30<27:52, 2.61it/s]\u001b[A\n 84%|████████▎ | 22320/26685 [3:20:31<31:51, 2.28it/s]\u001b[A\n 84%|████████▎ | 22321/26685 [3:20:31<30:26, 2.39it/s]\u001b[A\n 84%|████████▎ | 22322/26685 [3:20:32<27:34, 2.64it/s]\u001b[A\n 84%|████████▎ | 22323/26685 [3:20:32<36:57, 1.97it/s]\u001b[A\n 84%|████████▎ | 22324/26685 [3:20:33<32:33, 2.23it/s]\u001b[A\n 84%|████████▎ | 22325/26685 [3:20:33<35:00, 2.08it/s]\u001b[A\n 84%|████████▎ | 22326/26685 [3:20:34<30:13, 2.40it/s]\u001b[A\n 84%|████████▎ | 22327/26685 [3:20:34<31:04, 2.34it/s]\u001b[A\n 84%|████████▎ | 22328/26685 [3:20:34<27:35, 2.63it/s]\u001b[A\n 84%|████████▎ | 22329/26685 [3:20:35<29:45, 2.44it/s]\u001b[A\n 84%|████████▎ | 22330/26685 [3:20:36<39:44, 1.83it/s]\u001b[A\n 84%|████████▎ | 22331/26685 [3:20:36<34:14, 2.12it/s]\u001b[A\n 84%|████████▎ | 22332/26685 [3:20:37<42:40, 1.70it/s]\u001b[A\n 84%|████████▎ | 22333/26685 [3:20:37<40:44, 1.78it/s]\u001b[A\n 84%|████████▎ | 22334/26685 [3:20:38<36:12, 2.00it/s]\u001b[A\n 84%|████████▎ | 22335/26685 [3:20:38<37:47, 1.92it/s]\u001b[A\n 84%|████████▎ | 22336/26685 [3:20:39<33:20, 2.17it/s]\u001b[A\n 84%|████████▎ | 22337/26685 [3:20:39<30:38, 2.37it/s]\u001b[A\n 84%|████████▎ | 22338/26685 [3:20:39<32:06, 2.26it/s]\u001b[A\n 84%|████████▎ | 22339/26685 [3:20:40<28:02, 2.58it/s]\u001b[A\n 84%|████████▎ | 22340/26685 [3:20:40<26:00, 2.78it/s]\u001b[A\n 84%|████████▎ | 22341/26685 [3:20:40<25:30, 2.84it/s]\u001b[A\n 84%|████████▎ | 22342/26685 [3:20:41<24:01, 3.01it/s]\u001b[A\n 84%|████████▎ | 22343/26685 [3:20:41<24:06, 3.00it/s]\u001b[A\n 84%|████████▎ | 22344/26685 [3:20:41<27:04, 2.67it/s]\u001b[A\n 84%|████████▎ | 22345/26685 [3:20:42<25:34, 2.83it/s]\u001b[A\n 84%|████████▎ | 22346/26685 [3:20:42<26:09, 2.77it/s]\u001b[A\n 84%|████████▎ | 22347/26685 [3:20:42<25:57, 2.79it/s]\u001b[A\n 84%|████████▎ | 22348/26685 [3:20:43<24:27, 2.96it/s]\u001b[A\n 84%|████████▍ | 22349/26685 [3:20:43<24:55, 2.90it/s]\u001b[A\n 84%|████████▍ | 22350/26685 [3:20:43<23:55, 3.02it/s]\u001b[A\n 84%|████████▍ | 22351/26685 [3:20:44<23:38, 3.06it/s]\u001b[A\n 84%|████████▍ | 22352/26685 [3:20:44<23:47, 3.04it/s]\u001b[A\n 84%|████████▍ | 22353/26685 [3:20:44<21:54, 3.29it/s]\u001b[A\n 84%|████████▍ | 22354/26685 [3:20:45<23:07, 3.12it/s]\u001b[A\n 84%|████████▍ | 22355/26685 [3:20:45<21:41, 3.33it/s]\u001b[A\n 84%|████████▍ | 22356/26685 [3:20:45<21:20, 3.38it/s]\u001b[A\n 84%|████████▍ | 22357/26685 [3:20:45<20:42, 3.48it/s]\u001b[A\n 84%|████████▍ | 22358/26685 [3:20:46<20:34, 3.51it/s]\u001b[A\n 84%|████████▍ | 22359/26685 [3:20:46<21:36, 3.34it/s]\u001b[A\n 84%|████████▍ | 22360/26685 [3:20:46<20:56, 3.44it/s]\u001b[A\n 84%|████████▍ | 22361/26685 [3:20:47<22:01, 3.27it/s]\u001b[A\n 84%|████████▍ | 22362/26685 [3:20:47<21:55, 3.29it/s]\u001b[A\n 84%|████████▍ | 22363/26685 [3:20:47<25:05, 2.87it/s]\u001b[A\n 84%|████████▍ | 22364/26685 [3:20:48<24:30, 2.94it/s]\u001b[A\n 84%|████████▍ | 22365/26685 [3:20:48<27:51, 2.58it/s]\u001b[A\n 84%|████████▍ | 22366/26685 [3:20:49<25:52, 2.78it/s]\u001b[A\n 84%|████████▍ | 22367/26685 [3:20:49<25:40, 2.80it/s]\u001b[A\n 84%|████████▍ | 22368/26685 [3:20:49<26:58, 2.67it/s]\u001b[A\n 84%|████████▍ | 22369/26685 [3:20:50<27:37, 2.60it/s]\u001b[A\n 84%|████████▍ | 22370/26685 [3:20:50<27:14, 2.64it/s]\u001b[A\n 84%|████████▍ | 22371/26685 [3:20:50<25:08, 2.86it/s]\u001b[A\n 84%|████████▍ | 22372/26685 [3:20:51<25:37, 2.80it/s]\u001b[A\n 84%|████████▍ | 22373/26685 [3:20:51<25:31, 2.82it/s]\u001b[A\n 84%|████████▍ | 22374/26685 [3:20:52<35:42, 2.01it/s]\u001b[A\n 84%|████████▍ | 22375/26685 [3:20:52<32:36, 2.20it/s]\u001b[A\n 84%|████████▍ | 22376/26685 [3:20:53<29:33, 2.43it/s]\u001b[A\n 84%|████████▍ | 22377/26685 [3:20:53<30:06, 2.38it/s]\u001b[A\n 84%|████████▍ | 22378/26685 [3:20:53<28:29, 2.52it/s]\u001b[A\n 84%|████████▍ | 22379/26685 [3:20:54<32:48, 2.19it/s]\u001b[A\n 84%|████████▍ | 22380/26685 [3:20:54<31:00, 2.31it/s]\u001b[A\n 84%|████████▍ | 22381/26685 [3:20:55<34:57, 2.05it/s]\u001b[A\n 84%|████████▍ | 22382/26685 [3:20:55<31:00, 2.31it/s]\u001b[A\n 84%|████████▍ | 22383/26685 [3:20:56<28:23, 2.52it/s]\u001b[A\n 84%|████████▍ | 22384/26685 [3:20:56<38:09, 1.88it/s]\u001b[A\n 84%|████████▍ | 22385/26685 [3:20:57<37:57, 1.89it/s]\u001b[A\n 84%|████████▍ | 22386/26685 [3:20:57<36:01, 1.99it/s]\u001b[A\n 84%|████████▍ | 22387/26685 [3:20:58<33:45, 2.12it/s]\u001b[A\n 84%|████████▍ | 22388/26685 [3:20:58<29:49, 2.40it/s]\u001b[A\n 84%|████████▍ | 22389/26685 [3:20:58<29:02, 2.47it/s]\u001b[A\n 84%|████████▍ | 22390/26685 [3:20:59<29:46, 2.40it/s]\u001b[A\n 84%|████████▍ | 22391/26685 [3:21:00<34:58, 2.05it/s]\u001b[A\n 84%|████████▍ | 22392/26685 [3:21:00<33:07, 2.16it/s]\u001b[A\n 84%|████████▍ | 22393/26685 [3:21:00<30:47, 2.32it/s]\u001b[A\n 84%|████████▍ | 22394/26685 [3:21:01<33:54, 2.11it/s]\u001b[A\n 84%|████████▍ | 22395/26685 [3:21:01<34:26, 2.08it/s]\u001b[A\n 84%|████████▍ | 22396/26685 [3:21:02<31:28, 2.27it/s]\u001b[A\n 84%|████████▍ | 22397/26685 [3:21:02<34:00, 2.10it/s]\u001b[A\n 84%|████████▍ | 22398/26685 [3:21:03<39:40, 1.80it/s]\u001b[A\n 84%|████████▍ | 22399/26685 [3:21:04<46:32, 1.53it/s]\u001b[A\n 84%|████████▍ | 22400/26685 [3:21:04<42:10, 1.69it/s]\u001b[A\n 84%|████████▍ | 22401/26685 [3:21:05<47:18, 1.51it/s]\u001b[A\n 84%|████████▍ | 22402/26685 [3:21:06<41:42, 1.71it/s]\u001b[A\n 84%|████████▍ | 22403/26685 [3:21:06<36:18, 1.97it/s]\u001b[A\n 84%|████████▍ | 22404/26685 [3:21:06<32:20, 2.21it/s]\u001b[A\n 84%|████████▍ | 22405/26685 [3:21:07<31:35, 2.26it/s]\u001b[A\n 84%|████████▍ | 22406/26685 [3:21:07<29:54, 2.38it/s]\u001b[A\n 84%|████████▍ | 22407/26685 [3:21:08<38:51, 1.83it/s]\u001b[A\n 84%|████████▍ | 22408/26685 [3:21:09<44:57, 1.59it/s]\u001b[A\n 84%|████████▍ | 22409/26685 [3:21:09<37:41, 1.89it/s]\u001b[A\n 84%|████████▍ | 22410/26685 [3:21:10<39:30, 1.80it/s]\u001b[A\n 84%|████████▍ | 22411/26685 [3:21:10<37:48, 1.88it/s]\u001b[A\n 84%|████████▍ | 22412/26685 [3:21:11<37:57, 1.88it/s]\u001b[A\n 84%|████████▍ | 22413/26685 [3:21:11<32:51, 2.17it/s]\u001b[A\n 84%|████████▍ | 22414/26685 [3:21:12<40:43, 1.75it/s]\u001b[A\n 84%|████████▍ | 22415/26685 [3:21:12<40:42, 1.75it/s]\u001b[A\n 84%|████████▍ | 22416/26685 [3:21:13<45:57, 1.55it/s]\u001b[A\n 84%|████████▍ | 22417/26685 [3:21:13<40:31, 1.76it/s]\u001b[A\n 84%|████████▍ | 22418/26685 [3:21:14<33:49, 2.10it/s]\u001b[A\n 84%|████████▍ | 22419/26685 [3:21:14<32:11, 2.21it/s]\u001b[A\n 84%|████████▍ | 22420/26685 [3:21:15<30:13, 2.35it/s]\u001b[A\n 84%|████████▍ | 22421/26685 [3:21:15<27:32, 2.58it/s]\u001b[A\n 84%|████████▍ | 22422/26685 [3:21:15<29:12, 2.43it/s]\u001b[A\n 84%|████████▍ | 22423/26685 [3:21:16<38:26, 1.85it/s]\u001b[A\n 84%|████████▍ | 22424/26685 [3:21:17<36:04, 1.97it/s]\u001b[A\n 84%|████████▍ | 22425/26685 [3:21:17<32:23, 2.19it/s]\u001b[A\n 84%|████████▍ | 22426/26685 [3:21:17<29:49, 2.38it/s]\u001b[A\n 84%|████████▍ | 22427/26685 [3:21:18<27:31, 2.58it/s]\u001b[A\n 84%|████████▍ | 22428/26685 [3:21:18<36:20, 1.95it/s]\u001b[A\n 84%|████████▍ | 22429/26685 [3:21:19<33:04, 2.14it/s]\u001b[A\n 84%|████████▍ | 22430/26685 [3:21:20<40:48, 1.74it/s]\u001b[A\n 84%|████████▍ | 22431/26685 [3:21:20<40:44, 1.74it/s]\u001b[A\n 84%|████████▍ | 22432/26685 [3:21:21<37:30, 1.89it/s]\u001b[A\n 84%|████████▍ | 22433/26685 [3:21:21<36:19, 1.95it/s]\u001b[A\n 84%|████████▍ | 22434/26685 [3:21:22<42:01, 1.69it/s]\u001b[A\n 84%|████████▍ | 22435/26685 [3:21:22<33:59, 2.08it/s]\u001b[A\n 84%|████████▍ | 22436/26685 [3:21:22<33:18, 2.13it/s]\u001b[A\n 84%|████████▍ | 22437/26685 [3:21:23<31:11, 2.27it/s]\u001b[A\n 84%|████████▍ | 22438/26685 [3:21:23<29:30, 2.40it/s]\u001b[A\n 84%|████████▍ | 22439/26685 [3:21:23<26:16, 2.69it/s]\u001b[A\n 84%|████████▍ | 22440/26685 [3:21:24<28:19, 2.50it/s]\u001b[A\n 84%|████████▍ | 22441/26685 [3:21:24<26:06, 2.71it/s]\u001b[A\n 84%|████████▍ | 22442/26685 [3:21:25<35:44, 1.98it/s]\u001b[A\n 84%|████████▍ | 22443/26685 [3:21:25<32:13, 2.19it/s]\u001b[A\n 84%|████████▍ | 22444/26685 [3:21:26<28:40, 2.47it/s]\u001b[A\n 84%|████████▍ | 22445/26685 [3:21:26<27:08, 2.60it/s]\u001b[A\n 84%|████████▍ | 22446/26685 [3:21:26<25:07, 2.81it/s]\u001b[A\n 84%|████████▍ | 22447/26685 [3:21:27<22:55, 3.08it/s]\u001b[A\n 84%|████████▍ | 22448/26685 [3:21:27<22:33, 3.13it/s]\u001b[A\n 84%|████████▍ | 22449/26685 [3:21:27<25:13, 2.80it/s]\u001b[A\n 84%|████████▍ | 22450/26685 [3:21:28<23:42, 2.98it/s]\u001b[A\n 84%|████████▍ | 22451/26685 [3:21:28<23:14, 3.04it/s]\u001b[A\n 84%|████████▍ | 22452/26685 [3:21:28<25:15, 2.79it/s]\u001b[A\n 84%|████████▍ | 22453/26685 [3:21:29<30:10, 2.34it/s]\u001b[A\n 84%|████████▍ | 22454/26685 [3:21:30<38:11, 1.85it/s]\u001b[A\n 84%|████████▍ | 22455/26685 [3:21:30<35:16, 2.00it/s]\u001b[A\n 84%|████████▍ | 22456/26685 [3:21:30<32:48, 2.15it/s]\u001b[A\n 84%|████████▍ | 22457/26685 [3:21:31<31:48, 2.22it/s]\u001b[A\n 84%|████████▍ | 22458/26685 [3:21:31<28:10, 2.50it/s]\u001b[A\n 84%|████████▍ | 22459/26685 [3:21:32<29:06, 2.42it/s]\u001b[A\n 84%|████████▍ | 22460/26685 [3:21:32<28:20, 2.48it/s]\u001b[A\n 84%|████████▍ | 22461/26685 [3:21:32<26:01, 2.71it/s]\u001b[A\n 84%|████████▍ | 22462/26685 [3:21:33<24:38, 2.86it/s]\u001b[A\n 84%|████████▍ | 22463/26685 [3:21:33<27:19, 2.57it/s]\u001b[A\n 84%|████████▍ | 22464/26685 [3:21:33<25:54, 2.72it/s]\u001b[A\n 84%|████████▍ | 22465/26685 [3:21:34<24:29, 2.87it/s]\u001b[A\n 84%|████████▍ | 22466/26685 [3:21:34<24:28, 2.87it/s]\u001b[A\n 84%|████████▍ | 22467/26685 [3:21:35<27:05, 2.59it/s]\u001b[A\n 84%|████████▍ | 22468/26685 [3:21:35<26:23, 2.66it/s]\u001b[A\n 84%|████████▍ | 22469/26685 [3:21:35<24:47, 2.83it/s]\u001b[A\n 84%|████████▍ | 22470/26685 [3:21:35<23:29, 2.99it/s]\u001b[A\n 84%|████████▍ | 22471/26685 [3:21:36<22:08, 3.17it/s]\u001b[A\n 84%|████████▍ | 22472/26685 [3:21:36<23:27, 2.99it/s]\u001b[A\n 84%|████████▍ | 22473/26685 [3:21:37<27:12, 2.58it/s]\u001b[A\n 84%|████████▍ | 22474/26685 [3:21:37<26:05, 2.69it/s]\u001b[A\n 84%|████████▍ | 22475/26685 [3:21:37<27:15, 2.57it/s]\u001b[A\n 84%|████████▍ | 22476/26685 [3:21:38<25:11, 2.79it/s]\u001b[A\n 84%|████████▍ | 22477/26685 [3:21:38<27:48, 2.52it/s]\u001b[A\n 84%|████████▍ | 22478/26685 [3:21:39<28:01, 2.50it/s]\u001b[A\n 84%|████████▍ | 22479/26685 [3:21:39<27:56, 2.51it/s]\u001b[A\n 84%|████████▍ | 22480/26685 [3:21:39<25:03, 2.80it/s]\u001b[A\n 84%|████████▍ | 22481/26685 [3:21:39<22:57, 3.05it/s]\u001b[A\n 84%|████████▍ | 22482/26685 [3:21:40<22:45, 3.08it/s]\u001b[A\n 84%|████████▍ | 22483/26685 [3:21:40<23:33, 2.97it/s]\u001b[A\n 84%|████████▍ | 22484/26685 [3:21:41<23:41, 2.96it/s]\u001b[A\n 84%|████████▍ | 22485/26685 [3:21:41<23:36, 2.97it/s]\u001b[A\n 84%|████████▍ | 22486/26685 [3:21:41<22:15, 3.14it/s]\u001b[A\n 84%|████████▍ | 22487/26685 [3:21:41<22:35, 3.10it/s]\u001b[A\n 84%|████████▍ | 22488/26685 [3:21:42<24:55, 2.81it/s]\u001b[A\n 84%|████████▍ | 22489/26685 [3:21:42<28:05, 2.49it/s]\u001b[A\n 84%|████████▍ | 22490/26685 [3:21:43<28:17, 2.47it/s]\u001b[A\n 84%|████████▍ | 22491/26685 [3:21:43<26:09, 2.67it/s]\u001b[A\n 84%|████████▍ | 22492/26685 [3:21:44<31:20, 2.23it/s]\u001b[A\n 84%|████████▍ | 22493/26685 [3:21:44<28:01, 2.49it/s]\u001b[A\n 84%|████████▍ | 22494/26685 [3:21:44<25:11, 2.77it/s]\u001b[A\n 84%|████████▍ | 22495/26685 [3:21:45<24:37, 2.83it/s]\u001b[A\n 84%|████████▍ | 22496/26685 [3:21:45<24:29, 2.85it/s]\u001b[A\n 84%|████████▍ | 22497/26685 [3:21:45<23:09, 3.01it/s]\u001b[A\n 84%|████████▍ | 22498/26685 [3:21:46<32:48, 2.13it/s]\u001b[A\n 84%|████████▍ | 22499/26685 [3:21:46<29:43, 2.35it/s]\u001b[A\n 84%|████████▍ | 22500/26685 [3:21:47<33:42, 2.07it/s]\u001b[A\n 84%|████████▍ | 22501/26685 [3:21:48<34:14, 2.04it/s]\u001b[A\n 84%|████████▍ | 22502/26685 [3:21:48<29:16, 2.38it/s]\u001b[A\n 84%|████████▍ | 22503/26685 [3:21:48<26:11, 2.66it/s]\u001b[A\n 84%|████████▍ | 22504/26685 [3:21:48<25:46, 2.70it/s]\u001b[A\n 84%|████████▍ | 22505/26685 [3:21:49<25:08, 2.77it/s]\u001b[A\n 84%|████████▍ | 22506/26685 [3:21:50<36:29, 1.91it/s]\u001b[A\n 84%|████████▍ | 22507/26685 [3:21:50<34:30, 2.02it/s]\u001b[A\n 84%|████████▍ | 22508/26685 [3:21:50<30:18, 2.30it/s]\u001b[A\n 84%|████████▍ | 22509/26685 [3:21:51<35:23, 1.97it/s]\u001b[A\n 84%|████████▍ | 22510/26685 [3:21:52<41:52, 1.66it/s]\u001b[A\n 84%|████████▍ | 22511/26685 [3:21:52<35:02, 1.99it/s]\u001b[A\n 84%|████████▍ | 22512/26685 [3:21:53<34:55, 1.99it/s]\u001b[A\n 84%|████████▍ | 22513/26685 [3:21:53<34:08, 2.04it/s]\u001b[A\n 84%|████████▍ | 22514/26685 [3:21:54<36:55, 1.88it/s]\u001b[A\n 84%|████████▍ | 22515/26685 [3:21:54<32:59, 2.11it/s]\u001b[A\n 84%|████████▍ | 22516/26685 [3:21:55<32:24, 2.14it/s]\u001b[A\n 84%|████████▍ | 22517/26685 [3:21:55<27:50, 2.49it/s]\u001b[A\n 84%|████████▍ | 22518/26685 [3:21:55<26:38, 2.61it/s]\u001b[A\n 84%|████████▍ | 22519/26685 [3:21:55<24:46, 2.80it/s]\u001b[A\n 84%|████████▍ | 22520/26685 [3:21:56<25:11, 2.76it/s]\u001b[A\n 84%|████████▍ | 22521/26685 [3:21:56<23:15, 2.98it/s]\u001b[A\n 84%|████████▍ | 22522/26685 [3:21:56<22:12, 3.12it/s]\u001b[A\n 84%|████████▍ | 22523/26685 [3:21:57<22:40, 3.06it/s]\u001b[A\n 84%|████████▍ | 22524/26685 [3:21:57<23:27, 2.96it/s]\u001b[A\n 84%|████████▍ | 22525/26685 [3:21:57<24:04, 2.88it/s]\u001b[A\n 84%|████████▍ | 22526/26685 [3:21:58<22:31, 3.08it/s]\u001b[A\n 84%|████████▍ | 22527/26685 [3:21:58<21:28, 3.23it/s]\u001b[A\n 84%|████████▍ | 22528/26685 [3:21:59<31:46, 2.18it/s]\u001b[A\n 84%|████████▍ | 22529/26685 [3:21:59<32:26, 2.14it/s]\u001b[A\n 84%|████████▍ | 22530/26685 [3:22:00<42:30, 1.63it/s]\u001b[A\n 84%|████████▍ | 22531/26685 [3:22:01<36:51, 1.88it/s]\u001b[A\n 84%|████████▍ | 22532/26685 [3:22:01<32:25, 2.13it/s]\u001b[A\n 84%|████████▍ | 22533/26685 [3:22:01<30:15, 2.29it/s]\u001b[A\n 84%|████████▍ | 22534/26685 [3:22:01<26:12, 2.64it/s]\u001b[A\n 84%|████████▍ | 22535/26685 [3:22:02<31:39, 2.18it/s]\u001b[A\n 84%|████████▍ | 22536/26685 [3:22:03<30:27, 2.27it/s]\u001b[A\n 84%|████████▍ | 22537/26685 [3:22:03<29:29, 2.34it/s]\u001b[A\n 84%|████████▍ | 22538/26685 [3:22:03<27:41, 2.50it/s]\u001b[A\n 84%|████████▍ | 22539/26685 [3:22:04<25:05, 2.75it/s]\u001b[A\n 84%|████████▍ | 22540/26685 [3:22:04<27:29, 2.51it/s]\u001b[A\n 84%|████████▍ | 22541/26685 [3:22:04<26:58, 2.56it/s]\u001b[A\n 84%|████████▍ | 22542/26685 [3:22:05<35:33, 1.94it/s]\u001b[A\n 84%|████████▍ | 22543/26685 [3:22:05<30:20, 2.27it/s]\u001b[A\n 84%|████████▍ | 22544/26685 [3:22:06<31:53, 2.16it/s]\u001b[A\n 84%|████████▍ | 22545/26685 [3:22:06<28:43, 2.40it/s]\u001b[A\n 84%|████████▍ | 22546/26685 [3:22:07<29:02, 2.38it/s]\u001b[A\n 84%|████████▍ | 22547/26685 [3:22:07<27:56, 2.47it/s]\u001b[A\n 84%|████████▍ | 22548/26685 [3:22:07<26:40, 2.58it/s]\u001b[A\n 85%|████████▍ | 22549/26685 [3:22:08<23:44, 2.90it/s]\u001b[A\n 85%|████████▍ | 22550/26685 [3:22:08<24:31, 2.81it/s]\u001b[A\n 85%|████████▍ | 22551/26685 [3:22:09<33:29, 2.06it/s]\u001b[A\n 85%|████████▍ | 22552/26685 [3:22:09<29:23, 2.34it/s]\u001b[A\n 85%|████████▍ | 22553/26685 [3:22:09<27:05, 2.54it/s]\u001b[A\n 85%|████████▍ | 22554/26685 [3:22:10<29:48, 2.31it/s]\u001b[A\n 85%|████████▍ | 22555/26685 [3:22:10<27:36, 2.49it/s]\u001b[A\n 85%|████████▍ | 22556/26685 [3:22:11<27:45, 2.48it/s]\u001b[A\n 85%|████████▍ | 22557/26685 [3:22:11<25:59, 2.65it/s]\u001b[A\n 85%|████████▍ | 22558/26685 [3:22:11<24:07, 2.85it/s]\u001b[A\n 85%|████████▍ | 22559/26685 [3:22:12<24:16, 2.83it/s]\u001b[A\n 85%|████████▍ | 22560/26685 [3:22:12<27:54, 2.46it/s]\u001b[A\n 85%|████████▍ | 22561/26685 [3:22:13<26:25, 2.60it/s]\u001b[A\n 85%|████████▍ | 22562/26685 [3:22:13<25:38, 2.68it/s]\u001b[A\n 85%|████████▍ | 22563/26685 [3:22:13<24:06, 2.85it/s]\u001b[A\n 85%|████████▍ | 22564/26685 [3:22:14<24:41, 2.78it/s]\u001b[A\n 85%|████████▍ | 22565/26685 [3:22:14<28:33, 2.41it/s]\u001b[A\n 85%|████████▍ | 22566/26685 [3:22:15<32:32, 2.11it/s]\u001b[A\n 85%|████████▍ | 22567/26685 [3:22:15<31:40, 2.17it/s]\u001b[A\n 85%|████████▍ | 22568/26685 [3:22:15<28:06, 2.44it/s]\u001b[A\n 85%|████████▍ | 22569/26685 [3:22:16<27:58, 2.45it/s]\u001b[A\n 85%|████████▍ | 22570/26685 [3:22:16<26:08, 2.62it/s]\u001b[A\n 85%|████████▍ | 22571/26685 [3:22:17<31:04, 2.21it/s]\u001b[A\n 85%|████████▍ | 22572/26685 [3:22:18<38:42, 1.77it/s]\u001b[A\n 85%|████████▍ | 22573/26685 [3:22:18<33:03, 2.07it/s]\u001b[A\n 85%|████████▍ | 22574/26685 [3:22:18<31:59, 2.14it/s]\u001b[A\n 85%|████████▍ | 22575/26685 [3:22:19<30:10, 2.27it/s]\u001b[A\n 85%|████████▍ | 22576/26685 [3:22:19<32:13, 2.12it/s]\u001b[A\n 85%|████████▍ | 22577/26685 [3:22:20<33:42, 2.03it/s]\u001b[A\n 85%|████████▍ | 22578/26685 [3:22:20<30:09, 2.27it/s]\u001b[A\n 85%|████████▍ | 22579/26685 [3:22:20<26:30, 2.58it/s]\u001b[A\n 85%|████████▍ | 22580/26685 [3:22:21<24:07, 2.84it/s]\u001b[A\n 85%|████████▍ | 22581/26685 [3:22:21<33:53, 2.02it/s]\u001b[A\n 85%|████████▍ | 22582/26685 [3:22:22<33:23, 2.05it/s]\u001b[A\n 85%|████████▍ | 22583/26685 [3:22:23<36:39, 1.87it/s]\u001b[A\n 85%|████████▍ | 22584/26685 [3:22:23<31:13, 2.19it/s]\u001b[A\n 85%|████████▍ | 22585/26685 [3:22:23<30:06, 2.27it/s]\u001b[A\n 85%|████████▍ | 22586/26685 [3:22:24<27:17, 2.50it/s]\u001b[A\n 85%|████████▍ | 22587/26685 [3:22:24<26:30, 2.58it/s]\u001b[A\n 85%|████████▍ | 22588/26685 [3:22:24<28:39, 2.38it/s]\u001b[A\n 85%|████████▍ | 22589/26685 [3:22:25<27:57, 2.44it/s]\u001b[A\n 85%|████████▍ | 22590/26685 [3:22:25<28:32, 2.39it/s]\u001b[A\n 85%|████████▍ | 22591/26685 [3:22:26<42:33, 1.60it/s]\u001b[A\n 85%|████████▍ | 22592/26685 [3:22:27<38:40, 1.76it/s]\u001b[A\n 85%|████████▍ | 22593/26685 [3:22:27<34:54, 1.95it/s]\u001b[A\n 85%|████████▍ | 22594/26685 [3:22:28<31:31, 2.16it/s]\u001b[A\n 85%|████████▍ | 22595/26685 [3:22:28<28:03, 2.43it/s]\u001b[A\n 85%|████████▍ | 22596/26685 [3:22:28<28:08, 2.42it/s]\u001b[A\n 85%|████████▍ | 22597/26685 [3:22:29<27:30, 2.48it/s]\u001b[A\n 85%|████████▍ | 22598/26685 [3:22:29<27:02, 2.52it/s]\u001b[A\n 85%|████████▍ | 22599/26685 [3:22:29<28:55, 2.35it/s]\u001b[A\n 85%|████████▍ | 22600/26685 [3:22:30<27:17, 2.49it/s]\u001b[A\n 85%|████████▍ | 22601/26685 [3:22:30<25:13, 2.70it/s]\u001b[A\n 85%|████████▍ | 22602/26685 [3:22:30<22:21, 3.04it/s]\u001b[A\n 85%|████████▍ | 22603/26685 [3:22:31<24:16, 2.80it/s]\u001b[A\n 85%|████████▍ | 22604/26685 [3:22:31<23:12, 2.93it/s]\u001b[A\n 85%|████████▍ | 22605/26685 [3:22:32<30:53, 2.20it/s]\u001b[A\n 85%|████████▍ | 22606/26685 [3:22:32<28:08, 2.42it/s]\u001b[A\n 85%|████████▍ | 22607/26685 [3:22:33<27:39, 2.46it/s]\u001b[A\n 85%|████████▍ | 22608/26685 [3:22:33<33:52, 2.01it/s]\u001b[A\n 85%|████████▍ | 22609/26685 [3:22:34<40:49, 1.66it/s]\u001b[A\n 85%|████████▍ | 22610/26685 [3:22:35<38:51, 1.75it/s]\u001b[A\n 85%|████████▍ | 22611/26685 [3:22:35<33:29, 2.03it/s]\u001b[A\n 85%|████████▍ | 22612/26685 [3:22:35<29:37, 2.29it/s]\u001b[A\n 85%|████████▍ | 22613/26685 [3:22:36<31:06, 2.18it/s]\u001b[A\n 85%|████████▍ | 22614/26685 [3:22:36<29:14, 2.32it/s]\u001b[A\n 85%|████████▍ | 22615/26685 [3:22:36<29:03, 2.33it/s]\u001b[A\n 85%|████████▍ | 22616/26685 [3:22:37<38:17, 1.77it/s]\u001b[A\n 85%|████████▍ | 22617/26685 [3:22:38<32:16, 2.10it/s]\u001b[A\n 85%|████████▍ | 22618/26685 [3:22:38<28:52, 2.35it/s]\u001b[A\n 85%|████████▍ | 22619/26685 [3:22:38<25:44, 2.63it/s]\u001b[A\n 85%|████████▍ | 22620/26685 [3:22:39<34:38, 1.96it/s]\u001b[A\n 85%|████████▍ | 22621/26685 [3:22:40<35:01, 1.93it/s]\u001b[A\n 85%|████████▍ | 22622/26685 [3:22:40<31:50, 2.13it/s]\u001b[A\n 85%|████████▍ | 22623/26685 [3:22:40<30:03, 2.25it/s]\u001b[A\n 85%|████████▍ | 22624/26685 [3:22:41<28:22, 2.38it/s]\u001b[A\n 85%|████████▍ | 22625/26685 [3:22:41<31:34, 2.14it/s]\u001b[A\n 85%|████████▍ | 22626/26685 [3:22:42<32:03, 2.11it/s]\u001b[A\n 85%|████████▍ | 22627/26685 [3:22:42<34:28, 1.96it/s]\u001b[A\n 85%|████████▍ | 22628/26685 [3:22:43<29:10, 2.32it/s]\u001b[A\n 85%|████████▍ | 22629/26685 [3:22:43<37:00, 1.83it/s]\u001b[A\n 85%|████████▍ | 22630/26685 [3:22:44<32:46, 2.06it/s]\u001b[A\n 85%|████████▍ | 22631/26685 [3:22:44<29:05, 2.32it/s]\u001b[A\n 85%|████████▍ | 22632/26685 [3:22:45<31:38, 2.13it/s]\u001b[A\n 85%|████████▍ | 22633/26685 [3:22:45<30:03, 2.25it/s]\u001b[A\n 85%|████████▍ | 22634/26685 [3:22:46<32:06, 2.10it/s]\u001b[A\n 85%|████████▍ | 22635/26685 [3:22:46<39:08, 1.72it/s]\u001b[A\n 85%|████████▍ | 22636/26685 [3:22:47<34:37, 1.95it/s]\u001b[A\n 85%|████████▍ | 22637/26685 [3:22:47<31:28, 2.14it/s]\u001b[A\n 85%|████████▍ | 22638/26685 [3:22:48<31:18, 2.15it/s]\u001b[A\n 85%|████████▍ | 22639/26685 [3:22:48<31:03, 2.17it/s]\u001b[A\n 85%|████████▍ | 22640/26685 [3:22:48<26:33, 2.54it/s]\u001b[A\n 85%|████████▍ | 22641/26685 [3:22:49<24:56, 2.70it/s]\u001b[A\n 85%|████████▍ | 22642/26685 [3:22:49<25:58, 2.59it/s]\u001b[A\n 85%|████████▍ | 22643/26685 [3:22:49<28:45, 2.34it/s]\u001b[A\n 85%|████████▍ | 22644/26685 [3:22:50<26:27, 2.54it/s]\u001b[A\n 85%|████████▍ | 22645/26685 [3:22:50<24:40, 2.73it/s]\u001b[A\n 85%|████████▍ | 22646/26685 [3:22:50<25:29, 2.64it/s]\u001b[A\n 85%|████████▍ | 22647/26685 [3:22:51<23:21, 2.88it/s]\u001b[A\n 85%|████████▍ | 22648/26685 [3:22:51<21:04, 3.19it/s]\u001b[A\n 85%|████████▍ | 22649/26685 [3:22:51<22:24, 3.00it/s]\u001b[A\n 85%|████████▍ | 22650/26685 [3:22:52<21:38, 3.11it/s]\u001b[A\n 85%|████████▍ | 22651/26685 [3:22:52<23:19, 2.88it/s]\u001b[A\n 85%|████████▍ | 22652/26685 [3:22:52<23:26, 2.87it/s]\u001b[A\n 85%|████████▍ | 22653/26685 [3:22:53<22:56, 2.93it/s]\u001b[A\n 85%|████████▍ | 22654/26685 [3:22:53<21:47, 3.08it/s]\u001b[A\n 85%|████████▍ | 22655/26685 [3:22:54<27:38, 2.43it/s]\u001b[A\n 85%|████████▍ | 22656/26685 [3:22:54<29:51, 2.25it/s]\u001b[A\n 85%|████████▍ | 22657/26685 [3:22:55<28:49, 2.33it/s]\u001b[A\n 85%|████████▍ | 22658/26685 [3:22:55<32:54, 2.04it/s]\u001b[A\n 85%|████████▍ | 22659/26685 [3:22:56<30:34, 2.20it/s]\u001b[A\n 85%|████████▍ | 22660/26685 [3:22:56<26:35, 2.52it/s]\u001b[A\n 85%|████████▍ | 22661/26685 [3:22:56<28:33, 2.35it/s]\u001b[A\n 85%|████████▍ | 22662/26685 [3:22:57<26:06, 2.57it/s]\u001b[A\n 85%|████████▍ | 22663/26685 [3:22:57<24:32, 2.73it/s]\u001b[A\n 85%|████████▍ | 22664/26685 [3:22:57<24:14, 2.76it/s]\u001b[A\n 85%|████████▍ | 22665/26685 [3:22:58<29:10, 2.30it/s]\u001b[A\n 85%|████████▍ | 22666/26685 [3:22:58<26:30, 2.53it/s]\u001b[A\n 85%|████████▍ | 22667/26685 [3:22:59<27:21, 2.45it/s]\u001b[A\n 85%|████████▍ | 22668/26685 [3:23:00<36:42, 1.82it/s]\u001b[A\n 85%|████████▍ | 22669/26685 [3:23:00<33:19, 2.01it/s]\u001b[A\n 85%|████████▍ | 22670/26685 [3:23:00<32:04, 2.09it/s]\u001b[A\n 85%|████████▍ | 22671/26685 [3:23:01<30:23, 2.20it/s]\u001b[A\n 85%|████████▍ | 22672/26685 [3:23:01<27:46, 2.41it/s]\u001b[A\n 85%|████████▍ | 22673/26685 [3:23:01<25:22, 2.64it/s]\u001b[A\n 85%|████████▍ | 22674/26685 [3:23:02<27:25, 2.44it/s]\u001b[A\n 85%|████████▍ | 22675/26685 [3:23:02<32:19, 2.07it/s]\u001b[A\n 85%|████████▍ | 22676/26685 [3:23:03<28:53, 2.31it/s]\u001b[A\n 85%|████████▍ | 22677/26685 [3:23:03<28:31, 2.34it/s]\u001b[A\n 85%|████████▍ | 22678/26685 [3:23:04<29:17, 2.28it/s]\u001b[A\n 85%|████████▍ | 22679/26685 [3:23:04<33:43, 1.98it/s]\u001b[A\n 85%|████████▍ | 22680/26685 [3:23:05<29:34, 2.26it/s]\u001b[A\n 85%|████████▍ | 22681/26685 [3:23:05<27:01, 2.47it/s]\u001b[A\n 85%|████████▍ | 22682/26685 [3:23:05<24:34, 2.71it/s]\u001b[A\n 85%|████████▌ | 22683/26685 [3:23:06<23:21, 2.86it/s]\u001b[A\n 85%|████████▌ | 22684/26685 [3:23:08<58:31, 1.14it/s]\u001b[A\n 85%|████████▌ | 22685/26685 [3:23:08<46:44, 1.43it/s]\u001b[A\n 85%|████████▌ | 22686/26685 [3:23:08<41:09, 1.62it/s]\u001b[A\n 85%|████████▌ | 22687/26685 [3:23:09<36:14, 1.84it/s]\u001b[A\n 85%|████████▌ | 22688/26685 [3:23:09<33:49, 1.97it/s]\u001b[A\n 85%|████████▌ | 22689/26685 [3:23:10<30:28, 2.19it/s]\u001b[A\n 85%|████████▌ | 22690/26685 [3:23:10<37:28, 1.78it/s]\u001b[A\n 85%|████████▌ | 22691/26685 [3:23:11<43:26, 1.53it/s]\u001b[A\n 85%|████████▌ | 22692/26685 [3:23:11<35:20, 1.88it/s]\u001b[A\n 85%|████████▌ | 22693/26685 [3:23:12<41:15, 1.61it/s]\u001b[A\n 85%|████████▌ | 22694/26685 [3:23:13<37:01, 1.80it/s]\u001b[A\n 85%|████████▌ | 22695/26685 [3:23:13<33:22, 1.99it/s]\u001b[A\n 85%|████████▌ | 22696/26685 [3:23:13<28:46, 2.31it/s]\u001b[A\n 85%|████████▌ | 22697/26685 [3:23:14<26:40, 2.49it/s]\u001b[A\n 85%|████████▌ | 22698/26685 [3:23:14<24:15, 2.74it/s]\u001b[A\n 85%|████████▌ | 22699/26685 [3:23:14<27:03, 2.46it/s]\u001b[A\n 85%|████████▌ | 22700/26685 [3:23:15<34:46, 1.91it/s]\u001b[A\n 85%|████████▌ | 22701/26685 [3:23:16<31:07, 2.13it/s]\u001b[A\n 85%|████████▌ | 22702/26685 [3:23:16<27:37, 2.40it/s]\u001b[A\n 85%|████████▌ | 22703/26685 [3:23:17<41:23, 1.60it/s]\u001b[A\n 85%|████████▌ | 22704/26685 [3:23:17<37:30, 1.77it/s]\u001b[A\n 85%|████████▌ | 22705/26685 [3:23:18<32:19, 2.05it/s]\u001b[A\n 85%|████████▌ | 22706/26685 [3:23:18<28:56, 2.29it/s]\u001b[A\n 85%|████████▌ | 22707/26685 [3:23:18<26:56, 2.46it/s]\u001b[A\n 85%|████████▌ | 22708/26685 [3:23:19<26:18, 2.52it/s]\u001b[A\n 85%|████████▌ | 22709/26685 [3:23:19<26:32, 2.50it/s]\u001b[A\n 85%|████████▌ | 22710/26685 [3:23:20<26:00, 2.55it/s]\u001b[A\n 85%|████████▌ | 22711/26685 [3:23:20<25:11, 2.63it/s]\u001b[A\n 85%|████████▌ | 22712/26685 [3:23:20<23:34, 2.81it/s]\u001b[A\n 85%|████████▌ | 22713/26685 [3:23:21<23:49, 2.78it/s]\u001b[A\n 85%|████████▌ | 22714/26685 [3:23:21<25:43, 2.57it/s]\u001b[A\n 85%|████████▌ | 22715/26685 [3:23:22<29:08, 2.27it/s]\u001b[A\n 85%|████████▌ | 22716/26685 [3:23:22<36:57, 1.79it/s]\u001b[A\n 85%|████████▌ | 22717/26685 [3:23:23<34:26, 1.92it/s]\u001b[A\n 85%|████████▌ | 22718/26685 [3:23:24<40:36, 1.63it/s]\u001b[A\n 85%|████████▌ | 22719/26685 [3:23:24<36:24, 1.82it/s]\u001b[A\n 85%|████████▌ | 22720/26685 [3:23:25<37:31, 1.76it/s]\u001b[A\n 85%|████████▌ | 22721/26685 [3:23:25<33:34, 1.97it/s]\u001b[A\n 85%|████████▌ | 22722/26685 [3:23:25<28:58, 2.28it/s]\u001b[A\n 85%|████████▌ | 22723/26685 [3:23:26<28:24, 2.32it/s]\u001b[A\n 85%|████████▌ | 22724/26685 [3:23:26<29:39, 2.23it/s]\u001b[A\n 85%|████████▌ | 22725/26685 [3:23:27<28:55, 2.28it/s]\u001b[A\n 85%|████████▌ | 22726/26685 [3:23:27<25:59, 2.54it/s]\u001b[A\n 85%|████████▌ | 22727/26685 [3:23:27<25:58, 2.54it/s]\u001b[A\n 85%|████████▌ | 22728/26685 [3:23:28<26:17, 2.51it/s]\u001b[A\n 85%|████████▌ | 22729/26685 [3:23:28<25:07, 2.62it/s]\u001b[A\n 85%|████████▌ | 22730/26685 [3:23:29<35:28, 1.86it/s]\u001b[A\n 85%|████████▌ | 22731/26685 [3:23:30<42:46, 1.54it/s]\u001b[A\n 85%|████████▌ | 22732/26685 [3:23:30<37:21, 1.76it/s]\u001b[A\n 85%|████████▌ | 22733/26685 [3:23:31<31:33, 2.09it/s]\u001b[A\n 85%|████████▌ | 22734/26685 [3:23:31<38:04, 1.73it/s]\u001b[A\n 85%|████████▌ | 22735/26685 [3:23:32<36:36, 1.80it/s]\u001b[A\n 85%|████████▌ | 22736/26685 [3:23:32<33:20, 1.97it/s]\u001b[A\n 85%|████████▌ | 22737/26685 [3:23:33<33:38, 1.96it/s]\u001b[A\n 85%|████████▌ | 22738/26685 [3:23:34<39:19, 1.67it/s]\u001b[A\n 85%|████████▌ | 22739/26685 [3:23:34<32:58, 1.99it/s]\u001b[A\n 85%|████████▌ | 22740/26685 [3:23:34<29:08, 2.26it/s]\u001b[A\n 85%|████████▌ | 22741/26685 [3:23:34<25:30, 2.58it/s]\u001b[A\n 85%|████████▌ | 22742/26685 [3:23:35<34:36, 1.90it/s]\u001b[A\n 85%|████████▌ | 22743/26685 [3:23:36<30:35, 2.15it/s]\u001b[A\n 85%|████████▌ | 22744/26685 [3:23:36<27:50, 2.36it/s]\u001b[A\n 85%|████████▌ | 22745/26685 [3:23:37<35:03, 1.87it/s]\u001b[A\n 85%|████████▌ | 22746/26685 [3:23:37<35:34, 1.85it/s]\u001b[A\n 85%|████████▌ | 22747/26685 [3:23:38<41:39, 1.58it/s]\u001b[A\n 85%|████████▌ | 22748/26685 [3:23:39<37:44, 1.74it/s]\u001b[A\n 85%|████████▌ | 22749/26685 [3:23:39<32:27, 2.02it/s]\u001b[A\n 85%|████████▌ | 22750/26685 [3:23:39<33:55, 1.93it/s]\u001b[A\n 85%|████████▌ | 22751/26685 [3:23:40<31:45, 2.06it/s]\u001b[A\n 85%|████████▌ | 22752/26685 [3:23:40<31:14, 2.10it/s]\u001b[A\n 85%|████████▌ | 22753/26685 [3:23:41<28:50, 2.27it/s]\u001b[A\n 85%|████████▌ | 22754/26685 [3:23:41<26:46, 2.45it/s]\u001b[A\n 85%|████████▌ | 22755/26685 [3:23:41<25:26, 2.57it/s]\u001b[A\n 85%|████████▌ | 22756/26685 [3:23:42<23:33, 2.78it/s]\u001b[A\n 85%|████████▌ | 22757/26685 [3:23:42<28:12, 2.32it/s]\u001b[A\n 85%|████████▌ | 22758/26685 [3:23:42<25:17, 2.59it/s]\u001b[A\n 85%|████████▌ | 22759/26685 [3:23:43<23:53, 2.74it/s]\u001b[A\n 85%|████████▌ | 22760/26685 [3:23:43<22:23, 2.92it/s]\u001b[A\n 85%|████████▌ | 22761/26685 [3:23:43<23:45, 2.75it/s]\u001b[A\n 85%|████████▌ | 22762/26685 [3:23:44<23:23, 2.79it/s]\u001b[A\n 85%|████████▌ | 22763/26685 [3:23:44<26:28, 2.47it/s]\u001b[A\n 85%|████████▌ | 22764/26685 [3:23:45<24:13, 2.70it/s]\u001b[A\n 85%|████████▌ | 22765/26685 [3:23:45<22:59, 2.84it/s]\u001b[A\n 85%|████████▌ | 22766/26685 [3:23:45<22:17, 2.93it/s]\u001b[A\n 85%|████████▌ | 22767/26685 [3:23:46<21:09, 3.09it/s]\u001b[A\n 85%|████████▌ | 22768/26685 [3:23:46<20:08, 3.24it/s]\u001b[A\n 85%|████████▌ | 22769/26685 [3:23:46<22:23, 2.91it/s]\u001b[A\n 85%|████████▌ | 22770/26685 [3:23:47<27:56, 2.33it/s]\u001b[A\n 85%|████████▌ | 22771/26685 [3:23:47<26:19, 2.48it/s]\u001b[A\n 85%|████████▌ | 22772/26685 [3:23:47<23:22, 2.79it/s]\u001b[A\n 85%|████████▌ | 22773/26685 [3:23:48<23:33, 2.77it/s]\u001b[A\n 85%|████████▌ | 22774/26685 [3:23:49<33:07, 1.97it/s]\u001b[A\n 85%|████████▌ | 22775/26685 [3:23:49<32:45, 1.99it/s]\u001b[A\n 85%|████████▌ | 22776/26685 [3:23:50<32:10, 2.03it/s]\u001b[A\n 85%|████████▌ | 22777/26685 [3:23:50<31:16, 2.08it/s]\u001b[A\n 85%|████████▌ | 22778/26685 [3:23:51<30:09, 2.16it/s]\u001b[A\n 85%|████████▌ | 22779/26685 [3:23:51<27:15, 2.39it/s]\u001b[A\n 85%|████████▌ | 22780/26685 [3:23:51<25:51, 2.52it/s]\u001b[A\n 85%|████████▌ | 22781/26685 [3:23:52<25:57, 2.51it/s]\u001b[A\n 85%|████████▌ | 22782/26685 [3:23:52<34:15, 1.90it/s]\u001b[A\n 85%|████████▌ | 22783/26685 [3:23:53<40:09, 1.62it/s]\u001b[A\n 85%|████████▌ | 22784/26685 [3:23:54<43:47, 1.48it/s]\u001b[A\n 85%|████████▌ | 22785/26685 [3:23:55<46:18, 1.40it/s]\u001b[A\n 85%|████████▌ | 22786/26685 [3:23:56<58:43, 1.11it/s]\u001b[A\n 85%|████████▌ | 22787/26685 [3:23:57<47:01, 1.38it/s]\u001b[A\n 85%|████████▌ | 22788/26685 [3:23:57<41:12, 1.58it/s]\u001b[A\n 85%|████████▌ | 22789/26685 [3:23:57<34:45, 1.87it/s]\u001b[A\n 85%|████████▌ | 22790/26685 [3:23:58<33:00, 1.97it/s]\u001b[A\n 85%|████████▌ | 22791/26685 [3:23:58<30:07, 2.15it/s]\u001b[A\n 85%|████████▌ | 22792/26685 [3:23:58<27:27, 2.36it/s]\u001b[A\n 85%|████████▌ | 22793/26685 [3:23:59<27:45, 2.34it/s]\u001b[A\n 85%|████████▌ | 22794/26685 [3:23:59<24:57, 2.60it/s]\u001b[A\n 85%|████████▌ | 22795/26685 [3:24:00<33:15, 1.95it/s]\u001b[A\n 85%|████████▌ | 22796/26685 [3:24:00<28:08, 2.30it/s]\u001b[A\n 85%|████████▌ | 22797/26685 [3:24:01<29:35, 2.19it/s]\u001b[A\n 85%|████████▌ | 22798/26685 [3:24:01<27:19, 2.37it/s]\u001b[A\n 85%|████████▌ | 22799/26685 [3:24:01<28:23, 2.28it/s]\u001b[A\n 85%|████████▌ | 22800/26685 [3:24:02<26:05, 2.48it/s]\u001b[A\n 85%|████████▌ | 22801/26685 [3:24:02<26:56, 2.40it/s]\u001b[A\n 85%|████████▌ | 22802/26685 [3:24:03<26:00, 2.49it/s]\u001b[A\n 85%|████████▌ | 22803/26685 [3:24:03<26:46, 2.42it/s]\u001b[A\n 85%|████████▌ | 22804/26685 [3:24:03<27:09, 2.38it/s]\u001b[A\n 85%|████████▌ | 22805/26685 [3:24:04<34:29, 1.88it/s]\u001b[A\n 85%|████████▌ | 22806/26685 [3:24:05<31:03, 2.08it/s]\u001b[A\n 85%|████████▌ | 22807/26685 [3:24:05<29:53, 2.16it/s]\u001b[A\n 85%|████████▌ | 22808/26685 [3:24:05<27:04, 2.39it/s]\u001b[A\n 85%|████████▌ | 22809/26685 [3:24:06<26:56, 2.40it/s]\u001b[A\n 85%|████████▌ | 22810/26685 [3:24:06<25:12, 2.56it/s]\u001b[A\n 85%|████████▌ | 22811/26685 [3:24:06<23:53, 2.70it/s]\u001b[A\n 85%|████████▌ | 22812/26685 [3:24:07<24:01, 2.69it/s]\u001b[A\n 85%|████████▌ | 22813/26685 [3:24:07<21:57, 2.94it/s]\u001b[A\n 85%|████████▌ | 22814/26685 [3:24:07<20:46, 3.11it/s]\u001b[A\n 85%|████████▌ | 22815/26685 [3:24:08<21:43, 2.97it/s]\u001b[A\n 86%|████████▌ | 22816/26685 [3:24:08<24:38, 2.62it/s]\u001b[A\n 86%|████████▌ | 22817/26685 [3:24:09<24:19, 2.65it/s]\u001b[A\n 86%|████████▌ | 22818/26685 [3:24:09<22:28, 2.87it/s]\u001b[A\n 86%|████████▌ | 22819/26685 [3:24:09<20:43, 3.11it/s]\u001b[A\n 86%|████████▌ | 22820/26685 [3:24:09<20:07, 3.20it/s]\u001b[A\n 86%|████████▌ | 22821/26685 [3:24:10<19:30, 3.30it/s]\u001b[A\n 86%|████████▌ | 22822/26685 [3:24:10<19:23, 3.32it/s]\u001b[A\n 86%|████████▌ | 22823/26685 [3:24:10<18:42, 3.44it/s]\u001b[A\n 86%|████████▌ | 22824/26685 [3:24:11<19:15, 3.34it/s]\u001b[A\n 86%|████████▌ | 22825/26685 [3:24:11<18:27, 3.48it/s]\u001b[A\n 86%|████████▌ | 22826/26685 [3:24:11<19:07, 3.36it/s]\u001b[A\n 86%|████████▌ | 22827/26685 [3:24:12<28:41, 2.24it/s]\u001b[A\n 86%|████████▌ | 22828/26685 [3:24:13<33:27, 1.92it/s]\u001b[A\n 86%|████████▌ | 22829/26685 [3:24:13<29:05, 2.21it/s]\u001b[A\n 86%|████████▌ | 22830/26685 [3:24:14<36:04, 1.78it/s]\u001b[A\n 86%|████████▌ | 22831/26685 [3:24:14<34:32, 1.86it/s]\u001b[A\n 86%|████████▌ | 22832/26685 [3:24:15<29:56, 2.14it/s]\u001b[A\n 86%|████████▌ | 22833/26685 [3:24:15<38:13, 1.68it/s]\u001b[A\n 86%|████████▌ | 22834/26685 [3:24:16<34:17, 1.87it/s]\u001b[A\n 86%|████████▌ | 22835/26685 [3:24:16<30:39, 2.09it/s]\u001b[A\n 86%|████████▌ | 22836/26685 [3:24:16<26:35, 2.41it/s]\u001b[A\n 86%|████████▌ | 22837/26685 [3:24:17<34:28, 1.86it/s]\u001b[A\n 86%|████████▌ | 22838/26685 [3:24:18<31:21, 2.05it/s]\u001b[A\n 86%|████████▌ | 22839/26685 [3:24:18<27:32, 2.33it/s]\u001b[A\n 86%|████████▌ | 22840/26685 [3:24:19<30:35, 2.09it/s]\u001b[A\n 86%|████████▌ | 22841/26685 [3:24:19<27:15, 2.35it/s]\u001b[A\n 86%|████████▌ | 22842/26685 [3:24:19<25:57, 2.47it/s]\u001b[A\n 86%|████████▌ | 22843/26685 [3:24:20<24:08, 2.65it/s]\u001b[A\n 86%|████████▌ | 22844/26685 [3:24:20<22:50, 2.80it/s]\u001b[A\n 86%|████████▌ | 22845/26685 [3:24:20<22:27, 2.85it/s]\u001b[A\n 86%|████████▌ | 22846/26685 [3:24:21<31:17, 2.04it/s]\u001b[A\n 86%|████████▌ | 22847/26685 [3:24:21<29:59, 2.13it/s]\u001b[A\n 86%|████████▌ | 22848/26685 [3:24:22<38:36, 1.66it/s]\u001b[A\n 86%|████████▌ | 22849/26685 [3:24:23<34:00, 1.88it/s]\u001b[A\n 86%|████████▌ | 22850/26685 [3:24:23<35:48, 1.79it/s]\u001b[A\n 86%|████████▌ | 22851/26685 [3:24:24<32:40, 1.96it/s]\u001b[A\n 86%|████████▌ | 22852/26685 [3:24:24<29:07, 2.19it/s]\u001b[A\n 86%|████████▌ | 22853/26685 [3:24:24<29:04, 2.20it/s]\u001b[A\n 86%|████████▌ | 22854/26685 [3:24:25<26:19, 2.43it/s]\u001b[A\n 86%|████████▌ | 22855/26685 [3:24:25<23:35, 2.71it/s]\u001b[A\n 86%|████████▌ | 22856/26685 [3:24:25<24:01, 2.66it/s]\u001b[A\n 86%|████████▌ | 22857/26685 [3:24:27<40:57, 1.56it/s]\u001b[A\n 86%|████████▌ | 22858/26685 [3:24:27<34:34, 1.84it/s]\u001b[A\n 86%|████████▌ | 22859/26685 [3:24:27<31:58, 1.99it/s]\u001b[A\n 86%|████████▌ | 22860/26685 [3:24:28<26:54, 2.37it/s]\u001b[A\n 86%|████████▌ | 22861/26685 [3:24:28<31:17, 2.04it/s]\u001b[A\n 86%|████████▌ | 22862/26685 [3:24:29<30:42, 2.07it/s]\u001b[A\n 86%|████████▌ | 22863/26685 [3:24:29<27:01, 2.36it/s]\u001b[A\n 86%|████████▌ | 22864/26685 [3:24:29<26:04, 2.44it/s]\u001b[A\n 86%|████████▌ | 22865/26685 [3:24:30<23:52, 2.67it/s]\u001b[A\n 86%|████████▌ | 22866/26685 [3:24:31<32:46, 1.94it/s]\u001b[A\n 86%|████████▌ | 22867/26685 [3:24:31<33:30, 1.90it/s]\u001b[A\n 86%|████████▌ | 22868/26685 [3:24:32<34:44, 1.83it/s]\u001b[A\n 86%|████████▌ | 22869/26685 [3:24:32<31:11, 2.04it/s]\u001b[A\n 86%|████████▌ | 22870/26685 [3:24:33<31:42, 2.00it/s]\u001b[A\n 86%|████████▌ | 22871/26685 [3:24:33<29:44, 2.14it/s]\u001b[A\n 86%|████████▌ | 22872/26685 [3:24:33<27:40, 2.30it/s]\u001b[A\n 86%|████████▌ | 22873/26685 [3:24:34<30:10, 2.11it/s]\u001b[A\n 86%|████████▌ | 22874/26685 [3:24:34<27:52, 2.28it/s]\u001b[A\n 86%|████████▌ | 22875/26685 [3:24:35<30:02, 2.11it/s]\u001b[A\n 86%|████████▌ | 22876/26685 [3:24:36<36:26, 1.74it/s]\u001b[A\n 86%|████████▌ | 22877/26685 [3:24:36<31:24, 2.02it/s]\u001b[A\n 86%|████████▌ | 22878/26685 [3:24:36<27:38, 2.30it/s]\u001b[A\n 86%|████████▌ | 22879/26685 [3:24:37<24:29, 2.59it/s]\u001b[A\n 86%|████████▌ | 22880/26685 [3:24:37<22:14, 2.85it/s]\u001b[A\n 86%|████████▌ | 22881/26685 [3:24:37<22:39, 2.80it/s]\u001b[A\n 86%|████████▌ | 22882/26685 [3:24:38<22:32, 2.81it/s]\u001b[A\n 86%|████████▌ | 22883/26685 [3:24:38<31:16, 2.03it/s]\u001b[A\n 86%|████████▌ | 22884/26685 [3:24:39<29:24, 2.15it/s]\u001b[A\n 86%|████████▌ | 22885/26685 [3:24:39<25:42, 2.46it/s]\u001b[A\n 86%|████████▌ | 22886/26685 [3:24:40<28:57, 2.19it/s]\u001b[A\n 86%|████████▌ | 22887/26685 [3:24:40<30:27, 2.08it/s]\u001b[A\n 86%|████████▌ | 22888/26685 [3:24:40<27:48, 2.28it/s]\u001b[A\n 86%|████████▌ | 22889/26685 [3:24:41<26:33, 2.38it/s]\u001b[A\n 86%|████████▌ | 22890/26685 [3:24:41<23:54, 2.65it/s]\u001b[A\n 86%|████████▌ | 22891/26685 [3:24:42<33:14, 1.90it/s]\u001b[A\n 86%|████████▌ | 22892/26685 [3:24:42<30:19, 2.08it/s]\u001b[A\n 86%|████████▌ | 22893/26685 [3:24:43<28:48, 2.19it/s]\u001b[A\n 86%|████████▌ | 22894/26685 [3:24:43<26:06, 2.42it/s]\u001b[A\n 86%|████████▌ | 22895/26685 [3:24:43<25:47, 2.45it/s]\u001b[A\n 86%|████████▌ | 22896/26685 [3:24:44<24:52, 2.54it/s]\u001b[A\n 86%|████████▌ | 22897/26685 [3:24:44<22:14, 2.84it/s]\u001b[A\n 86%|████████▌ | 22898/26685 [3:24:44<21:56, 2.88it/s]\u001b[A\n 86%|████████▌ | 22899/26685 [3:24:45<20:33, 3.07it/s]\u001b[A\n 86%|████████▌ | 22900/26685 [3:24:45<19:50, 3.18it/s]\u001b[A\n 86%|████████▌ | 22901/26685 [3:24:45<20:28, 3.08it/s]\u001b[A\n 86%|████████▌ | 22902/26685 [3:24:46<22:06, 2.85it/s]\u001b[A\n 86%|████████▌ | 22903/26685 [3:24:46<21:45, 2.90it/s]\u001b[A\n 86%|████████▌ | 22904/26685 [3:24:46<20:53, 3.02it/s]\u001b[A\n 86%|████████▌ | 22905/26685 [3:24:47<19:57, 3.16it/s]\u001b[A\n 86%|████████▌ | 22906/26685 [3:24:47<20:36, 3.06it/s]\u001b[A\n 86%|████████▌ | 22907/26685 [3:24:47<19:16, 3.27it/s]\u001b[A\n 86%|████████▌ | 22908/26685 [3:24:48<18:22, 3.42it/s]\u001b[A\n 86%|████████▌ | 22909/26685 [3:24:48<21:24, 2.94it/s]\u001b[A\n 86%|████████▌ | 22910/26685 [3:24:48<19:26, 3.24it/s]\u001b[A\n 86%|████████▌ | 22911/26685 [3:24:49<19:54, 3.16it/s]\u001b[A\n 86%|████████▌ | 22912/26685 [3:24:49<20:11, 3.11it/s]\u001b[A\n 86%|████████▌ | 22913/26685 [3:24:49<23:33, 2.67it/s]\u001b[A\n 86%|████████▌ | 22914/26685 [3:24:50<22:11, 2.83it/s]\u001b[A\n 86%|████████▌ | 22915/26685 [3:24:50<23:59, 2.62it/s]\u001b[A\n 86%|████████▌ | 22916/26685 [3:24:51<32:17, 1.94it/s]\u001b[A\n 86%|████████▌ | 22917/26685 [3:24:52<33:58, 1.85it/s]\u001b[A\n 86%|████████▌ | 22918/26685 [3:24:52<28:21, 2.21it/s]\u001b[A\n 86%|████████▌ | 22919/26685 [3:24:52<28:34, 2.20it/s]\u001b[A\n 86%|████████▌ | 22920/26685 [3:24:53<26:25, 2.37it/s]\u001b[A\n 86%|████████▌ | 22921/26685 [3:24:53<34:52, 1.80it/s]\u001b[A\n 86%|████████▌ | 22922/26685 [3:24:54<31:16, 2.01it/s]\u001b[A\n 86%|████████▌ | 22923/26685 [3:24:54<27:27, 2.28it/s]\u001b[A\n 86%|████████▌ | 22924/26685 [3:24:55<31:12, 2.01it/s]\u001b[A\n 86%|████████▌ | 22925/26685 [3:24:55<31:39, 1.98it/s]\u001b[A\n 86%|████████▌ | 22926/26685 [3:24:56<28:45, 2.18it/s]\u001b[A\n 86%|████████▌ | 22927/26685 [3:24:56<25:41, 2.44it/s]\u001b[A\n 86%|████████▌ | 22928/26685 [3:24:56<26:25, 2.37it/s]\u001b[A\n 86%|████████▌ | 22929/26685 [3:24:57<34:11, 1.83it/s]\u001b[A\n 86%|████████▌ | 22930/26685 [3:24:58<34:12, 1.83it/s]\u001b[A\n 86%|████████▌ | 22931/26685 [3:24:59<39:03, 1.60it/s]\u001b[A\n 86%|████████▌ | 22932/26685 [3:24:59<38:11, 1.64it/s]\u001b[A\n 86%|████████▌ | 22933/26685 [3:25:00<33:36, 1.86it/s]\u001b[A\n 86%|████████▌ | 22934/26685 [3:25:00<31:07, 2.01it/s]\u001b[A\n 86%|████████▌ | 22935/26685 [3:25:01<37:34, 1.66it/s]\u001b[A\n 86%|████████▌ | 22936/26685 [3:25:01<31:02, 2.01it/s]\u001b[A\n 86%|████████▌ | 22937/26685 [3:25:01<27:02, 2.31it/s]\u001b[A\n 86%|████████▌ | 22938/26685 [3:25:02<26:27, 2.36it/s]\u001b[A\n 86%|████████▌ | 22939/26685 [3:25:02<26:57, 2.32it/s]\u001b[A\n 86%|████████▌ | 22940/26685 [3:25:02<24:06, 2.59it/s]\u001b[A\n 86%|████████▌ | 22941/26685 [3:25:03<21:51, 2.86it/s]\u001b[A\n 86%|████████▌ | 22942/26685 [3:25:03<23:16, 2.68it/s]\u001b[A\n 86%|████████▌ | 22943/26685 [3:25:04<31:58, 1.95it/s]\u001b[A\n 86%|████████▌ | 22944/26685 [3:25:04<29:56, 2.08it/s]\u001b[A\n 86%|████████▌ | 22945/26685 [3:25:05<30:50, 2.02it/s]\u001b[A\n 86%|████████▌ | 22946/26685 [3:25:05<27:00, 2.31it/s]\u001b[A\n 86%|████████▌ | 22947/26685 [3:25:10<1:54:45, 1.84s/it]\u001b[A\n 86%|████████▌ | 22948/26685 [3:25:11<1:29:12, 1.43s/it]\u001b[A\n 86%|████████▌ | 22949/26685 [3:25:11<1:09:06, 1.11s/it]\u001b[A\n 86%|████████▌ | 22950/26685 [3:25:11<53:50, 1.16it/s] \u001b[A\n 86%|████████▌ | 22951/26685 [3:25:12<43:13, 1.44it/s]\u001b[A\n 86%|████████▌ | 22952/26685 [3:25:12<39:31, 1.57it/s]\u001b[A\n 86%|████████▌ | 22953/26685 [3:25:13<33:32, 1.85it/s]\u001b[A\n 86%|████████▌ | 22954/26685 [3:25:13<29:56, 2.08it/s]\u001b[A\n 86%|████████▌ | 22955/26685 [3:25:13<27:28, 2.26it/s]\u001b[A\n 86%|████████▌ | 22956/26685 [3:25:14<23:51, 2.60it/s]\u001b[A\n 86%|████████▌ | 22957/26685 [3:25:14<24:18, 2.56it/s]\u001b[A\n 86%|████████▌ | 22958/26685 [3:25:14<21:39, 2.87it/s]\u001b[A\n 86%|████████▌ | 22959/26685 [3:25:14<20:25, 3.04it/s]\u001b[A\n 86%|████████▌ | 22960/26685 [3:25:15<21:33, 2.88it/s]\u001b[A\n 86%|████████▌ | 22961/26685 [3:25:15<21:28, 2.89it/s]\u001b[A\n 86%|████████▌ | 22962/26685 [3:25:16<21:51, 2.84it/s]\u001b[A\n 86%|████████▌ | 22963/26685 [3:25:16<21:23, 2.90it/s]\u001b[A\n 86%|████████▌ | 22964/26685 [3:25:16<20:00, 3.10it/s]\u001b[A\n 86%|████████▌ | 22965/26685 [3:25:16<18:56, 3.27it/s]\u001b[A\n 86%|████████▌ | 22966/26685 [3:25:17<28:16, 2.19it/s]\u001b[A\n 86%|████████▌ | 22967/26685 [3:25:18<25:27, 2.43it/s]\u001b[A\n 86%|████████▌ | 22968/26685 [3:25:18<23:56, 2.59it/s]\u001b[A\n 86%|████████▌ | 22969/26685 [3:25:18<22:04, 2.81it/s]\u001b[A\n 86%|████████▌ | 22970/26685 [3:25:19<26:13, 2.36it/s]\u001b[A\n 86%|████████▌ | 22971/26685 [3:25:19<25:04, 2.47it/s]\u001b[A\n 86%|████████▌ | 22972/26685 [3:25:19<24:57, 2.48it/s]\u001b[A\n 86%|████████▌ | 22973/26685 [3:25:20<33:33, 1.84it/s]\u001b[A\n 86%|████████▌ | 22974/26685 [3:25:21<28:42, 2.15it/s]\u001b[A\n 86%|████████▌ | 22975/26685 [3:25:21<24:26, 2.53it/s]\u001b[A\n 86%|████████▌ | 22976/26685 [3:25:21<24:28, 2.53it/s]\u001b[A\n 86%|████████▌ | 22977/26685 [3:25:22<22:15, 2.78it/s]\u001b[A\n 86%|████████▌ | 22978/26685 [3:25:22<30:45, 2.01it/s]\u001b[A\n 86%|████████▌ | 22979/26685 [3:25:23<28:01, 2.20it/s]\u001b[A\n 86%|████████▌ | 22980/26685 [3:25:23<28:05, 2.20it/s]\u001b[A\n 86%|████████▌ | 22981/26685 [3:25:24<30:43, 2.01it/s]\u001b[A\n 86%|████████▌ | 22982/26685 [3:25:24<25:46, 2.39it/s]\u001b[A\n 86%|████████▌ | 22983/26685 [3:25:24<23:15, 2.65it/s]\u001b[A\n 86%|████████▌ | 22984/26685 [3:25:25<22:48, 2.70it/s]\u001b[A\n 86%|████████▌ | 22985/26685 [3:25:25<24:28, 2.52it/s]\u001b[A\n 86%|████████▌ | 22986/26685 [3:25:25<21:59, 2.80it/s]\u001b[A\n 86%|████████▌ | 22987/26685 [3:25:26<32:13, 1.91it/s]\u001b[A\n 86%|████████▌ | 22988/26685 [3:25:27<28:11, 2.19it/s]\u001b[A\n 86%|████████▌ | 22989/26685 [3:25:27<27:19, 2.25it/s]\u001b[A\n 86%|████████▌ | 22990/26685 [3:25:27<23:12, 2.65it/s]\u001b[A\n 86%|████████▌ | 22991/26685 [3:25:28<27:31, 2.24it/s]\u001b[A\n 86%|████████▌ | 22992/26685 [3:25:28<25:43, 2.39it/s]\u001b[A\n 86%|████████▌ | 22993/26685 [3:25:29<28:17, 2.18it/s]\u001b[A\n 86%|████████▌ | 22994/26685 [3:25:30<34:33, 1.78it/s]\u001b[A\n 86%|████████▌ | 22995/26685 [3:25:30<34:42, 1.77it/s]\u001b[A\n 86%|████████▌ | 22996/26685 [3:25:30<30:47, 2.00it/s]\u001b[A\n 86%|████████▌ | 22997/26685 [3:25:31<26:20, 2.33it/s]\u001b[A\n 86%|████████▌ | 22998/26685 [3:25:31<25:42, 2.39it/s]\u001b[A\n 86%|████████▌ | 22999/26685 [3:25:32<27:16, 2.25it/s]\u001b[A\n 86%|████████▌ | 23000/26685 [3:25:32<27:18, 2.25it/s]\u001b[A\n 86%|████████▌ | 23001/26685 [3:25:32<23:43, 2.59it/s]\u001b[A\n 86%|████████▌ | 23002/26685 [3:25:33<32:24, 1.89it/s]\u001b[A\n 86%|████████▌ | 23003/26685 [3:25:33<28:04, 2.19it/s]\u001b[A\n 86%|████████▌ | 23004/26685 [3:25:34<28:24, 2.16it/s]\u001b[A\n 86%|████████▌ | 23005/26685 [3:25:34<24:42, 2.48it/s]\u001b[A\n 86%|████████▌ | 23006/26685 [3:25:35<29:54, 2.05it/s]\u001b[A\n 86%|████████▌ | 23007/26685 [3:25:35<25:59, 2.36it/s]\u001b[A\n 86%|████████▌ | 23008/26685 [3:25:36<26:53, 2.28it/s]\u001b[A\n 86%|████████▌ | 23009/26685 [3:25:36<34:57, 1.75it/s]\u001b[A\n 86%|████████▌ | 23010/26685 [3:25:37<29:05, 2.11it/s]\u001b[A\n 86%|████████▌ | 23011/26685 [3:25:37<27:18, 2.24it/s]\u001b[A\n 86%|████████▌ | 23012/26685 [3:25:38<26:24, 2.32it/s]\u001b[A\n 86%|████████▌ | 23013/26685 [3:25:38<23:28, 2.61it/s]\u001b[A\n 86%|████████▌ | 23014/26685 [3:25:38<22:10, 2.76it/s]\u001b[A\n 86%|████████▌ | 23015/26685 [3:25:39<23:26, 2.61it/s]\u001b[A\n 86%|████████▋ | 23016/26685 [3:25:39<21:08, 2.89it/s]\u001b[A\n 86%|████████▋ | 23017/26685 [3:25:39<20:55, 2.92it/s]\u001b[A\n 86%|████████▋ | 23018/26685 [3:25:40<21:35, 2.83it/s]\u001b[A\n 86%|████████▋ | 23019/26685 [3:25:40<21:00, 2.91it/s]\u001b[A\n 86%|████████▋ | 23020/26685 [3:25:40<24:09, 2.53it/s]\u001b[A\n 86%|████████▋ | 23021/26685 [3:25:41<25:23, 2.40it/s]\u001b[A\n 86%|████████▋ | 23022/26685 [3:25:41<24:32, 2.49it/s]\u001b[A\n 86%|████████▋ | 23023/26685 [3:25:42<25:53, 2.36it/s]\u001b[A\n 86%|████████▋ | 23024/26685 [3:25:42<25:50, 2.36it/s]\u001b[A\n 86%|████████▋ | 23025/26685 [3:25:43<34:06, 1.79it/s]\u001b[A\n 86%|████████▋ | 23026/26685 [3:25:43<28:52, 2.11it/s]\u001b[A\n 86%|████████▋ | 23027/26685 [3:25:44<35:31, 1.72it/s]\u001b[A\n 86%|████████▋ | 23028/26685 [3:25:44<31:41, 1.92it/s]\u001b[A\n 86%|████████▋ | 23029/26685 [3:25:45<26:47, 2.27it/s]\u001b[A\n 86%|████████▋ | 23030/26685 [3:25:45<25:22, 2.40it/s]\u001b[A\n 86%|████████▋ | 23031/26685 [3:25:46<27:35, 2.21it/s]\u001b[A\n 86%|████████▋ | 23032/26685 [3:25:46<25:33, 2.38it/s]\u001b[A\n 86%|████████▋ | 23033/26685 [3:25:46<23:34, 2.58it/s]\u001b[A\n 86%|████████▋ | 23034/26685 [3:25:47<25:55, 2.35it/s]\u001b[A\n 86%|████████▋ | 23035/26685 [3:25:47<24:37, 2.47it/s]\u001b[A\n 86%|████████▋ | 23036/26685 [3:25:47<22:28, 2.71it/s]\u001b[A\n 86%|████████▋ | 23037/26685 [3:25:48<20:30, 2.96it/s]\u001b[A\n 86%|████████▋ | 23038/26685 [3:25:48<21:13, 2.86it/s]\u001b[A\n 86%|████████▋ | 23039/26685 [3:25:49<24:54, 2.44it/s]\u001b[A\n 86%|████████▋ | 23040/26685 [3:25:49<27:58, 2.17it/s]\u001b[A\n 86%|████████▋ | 23041/26685 [3:25:50<35:32, 1.71it/s]\u001b[A\n 86%|████████▋ | 23042/26685 [3:25:50<29:40, 2.05it/s]\u001b[A\n 86%|████████▋ | 23043/26685 [3:25:51<26:12, 2.32it/s]\u001b[A\n 86%|████████▋ | 23044/26685 [3:25:51<23:20, 2.60it/s]\u001b[A\n 86%|████████▋ | 23045/26685 [3:25:51<22:42, 2.67it/s]\u001b[A\n 86%|████████▋ | 23046/26685 [3:25:52<21:40, 2.80it/s]\u001b[A\n 86%|████████▋ | 23047/26685 [3:25:52<21:47, 2.78it/s]\u001b[A\n 86%|████████▋ | 23048/26685 [3:25:52<24:11, 2.51it/s]\u001b[A\n 86%|████████▋ | 23049/26685 [3:25:53<21:52, 2.77it/s]\u001b[A\n 86%|████████▋ | 23050/26685 [3:25:53<22:33, 2.69it/s]\u001b[A\n 86%|████████▋ | 23051/26685 [3:25:53<22:55, 2.64it/s]\u001b[A\n 86%|████████▋ | 23052/26685 [3:25:54<22:54, 2.64it/s]\u001b[A\n 86%|████████▋ | 23053/26685 [3:25:54<21:34, 2.81it/s]\u001b[A\n 86%|████████▋ | 23054/26685 [3:25:54<21:19, 2.84it/s]\u001b[A\n 86%|████████▋ | 23055/26685 [3:25:55<22:26, 2.70it/s]\u001b[A\n 86%|████████▋ | 23056/26685 [3:25:56<31:01, 1.95it/s]\u001b[A\n 86%|████████▋ | 23057/26685 [3:25:56<29:00, 2.08it/s]\u001b[A\n 86%|████████▋ | 23058/26685 [3:25:57<27:02, 2.23it/s]\u001b[A\n 86%|████████▋ | 23059/26685 [3:25:57<25:05, 2.41it/s]\u001b[A\n 86%|████████▋ | 23060/26685 [3:25:57<24:10, 2.50it/s]\u001b[A\n 86%|████████▋ | 23061/26685 [3:25:57<21:33, 2.80it/s]\u001b[A\n 86%|████████▋ | 23062/26685 [3:25:58<20:30, 2.94it/s]\u001b[A\n 86%|████████▋ | 23063/26685 [3:25:58<22:45, 2.65it/s]\u001b[A\n 86%|████████▋ | 23064/26685 [3:25:59<22:49, 2.64it/s]\u001b[A\n 86%|████████▋ | 23065/26685 [3:25:59<25:38, 2.35it/s]\u001b[A\n 86%|████████▋ | 23066/26685 [3:26:00<28:24, 2.12it/s]\u001b[A\n 86%|████████▋ | 23067/26685 [3:26:00<25:58, 2.32it/s]\u001b[A\n 86%|████████▋ | 23068/26685 [3:26:00<23:33, 2.56it/s]\u001b[A\n 86%|████████▋ | 23069/26685 [3:26:01<23:07, 2.61it/s]\u001b[A\n 86%|████████▋ | 23070/26685 [3:26:01<22:57, 2.63it/s]\u001b[A\n 86%|████████▋ | 23071/26685 [3:26:01<21:33, 2.79it/s]\u001b[A\n 86%|████████▋ | 23072/26685 [3:26:02<21:39, 2.78it/s]\u001b[A\n 86%|████████▋ | 23073/26685 [3:26:03<30:01, 2.01it/s]\u001b[A\n 86%|████████▋ | 23074/26685 [3:26:03<25:54, 2.32it/s]\u001b[A\n 86%|████████▋ | 23075/26685 [3:26:03<23:18, 2.58it/s]\u001b[A\n 86%|████████▋ | 23076/26685 [3:26:03<22:06, 2.72it/s]\u001b[A\n 86%|████████▋ | 23077/26685 [3:26:04<23:27, 2.56it/s]\u001b[A\n 86%|████████▋ | 23078/26685 [3:26:04<21:17, 2.82it/s]\u001b[A\n 86%|████████▋ | 23079/26685 [3:26:04<19:54, 3.02it/s]\u001b[A\n 86%|████████▋ | 23080/26685 [3:26:05<18:36, 3.23it/s]\u001b[A\n 86%|████████▋ | 23081/26685 [3:26:05<20:51, 2.88it/s]\u001b[A\n 86%|████████▋ | 23082/26685 [3:26:05<19:06, 3.14it/s]\u001b[A\n 87%|████████▋ | 23083/26685 [3:26:06<23:04, 2.60it/s]\u001b[A\n 87%|████████▋ | 23084/26685 [3:26:07<30:57, 1.94it/s]\u001b[A\n 87%|████████▋ | 23085/26685 [3:26:07<30:34, 1.96it/s]\u001b[A\n 87%|████████▋ | 23086/26685 [3:26:08<29:24, 2.04it/s]\u001b[A\n 87%|████████▋ | 23087/26685 [3:26:08<25:42, 2.33it/s]\u001b[A\n 87%|████████▋ | 23088/26685 [3:26:08<22:46, 2.63it/s]\u001b[A\n 87%|████████▋ | 23089/26685 [3:26:09<33:04, 1.81it/s]\u001b[A\n 87%|████████▋ | 23090/26685 [3:26:10<32:45, 1.83it/s]\u001b[A\n 87%|████████▋ | 23091/26685 [3:26:10<32:41, 1.83it/s]\u001b[A\n 87%|████████▋ | 23092/26685 [3:26:11<27:22, 2.19it/s]\u001b[A\n 87%|████████▋ | 23093/26685 [3:26:11<30:09, 1.98it/s]\u001b[A\n 87%|████████▋ | 23094/26685 [3:26:12<35:37, 1.68it/s]\u001b[A\n 87%|████████▋ | 23095/26685 [3:26:13<39:36, 1.51it/s]\u001b[A\n 87%|████████▋ | 23096/26685 [3:26:14<42:48, 1.40it/s]\u001b[A\n 87%|████████▋ | 23097/26685 [3:26:14<34:37, 1.73it/s]\u001b[A\n 87%|████████▋ | 23098/26685 [3:26:14<31:30, 1.90it/s]\u001b[A\n 87%|████████▋ | 23099/26685 [3:26:15<26:53, 2.22it/s]\u001b[A\n 87%|████████▋ | 23100/26685 [3:26:15<23:25, 2.55it/s]\u001b[A\n 87%|████████▋ | 23101/26685 [3:26:15<24:02, 2.48it/s]\u001b[A\n 87%|████████▋ | 23102/26685 [3:26:16<25:07, 2.38it/s]\u001b[A\n 87%|████████▋ | 23103/26685 [3:26:16<24:48, 2.41it/s]\u001b[A\n 87%|████████▋ | 23104/26685 [3:26:17<34:17, 1.74it/s]\u001b[A\n 87%|████████▋ | 23105/26685 [3:26:17<28:55, 2.06it/s]\u001b[A\n 87%|████████▋ | 23106/26685 [3:26:18<26:56, 2.21it/s]\u001b[A\n 87%|████████▋ | 23107/26685 [3:26:18<26:24, 2.26it/s]\u001b[A\n 87%|████████▋ | 23108/26685 [3:26:18<24:03, 2.48it/s]\u001b[A\n 87%|████████▋ | 23109/26685 [3:26:19<24:20, 2.45it/s]\u001b[A\n 87%|████████▋ | 23110/26685 [3:26:19<22:29, 2.65it/s]\u001b[A\n 87%|████████▋ | 23111/26685 [3:26:20<22:16, 2.67it/s]\u001b[A\n 87%|████████▋ | 23112/26685 [3:26:20<21:58, 2.71it/s]\u001b[A\n 87%|████████▋ | 23113/26685 [3:26:20<22:03, 2.70it/s]\u001b[A\n 87%|████████▋ | 23114/26685 [3:26:21<21:47, 2.73it/s]\u001b[A\n 87%|████████▋ | 23115/26685 [3:26:21<20:33, 2.89it/s]\u001b[A\n 87%|████████▋ | 23116/26685 [3:26:22<28:24, 2.09it/s]\u001b[A\n 87%|████████▋ | 23117/26685 [3:26:22<27:46, 2.14it/s]\u001b[A\n 87%|████████▋ | 23118/26685 [3:26:23<33:15, 1.79it/s]\u001b[A\n 87%|████████▋ | 23119/26685 [3:26:23<28:49, 2.06it/s]\u001b[A\n 87%|████████▋ | 23120/26685 [3:26:24<26:50, 2.21it/s]\u001b[A\n 87%|████████▋ | 23121/26685 [3:26:24<24:34, 2.42it/s]\u001b[A\n 87%|████████▋ | 23122/26685 [3:26:24<22:14, 2.67it/s]\u001b[A\n 87%|████████▋ | 23123/26685 [3:26:24<19:56, 2.98it/s]\u001b[A\n 87%|████████▋ | 23124/26685 [3:26:25<18:22, 3.23it/s]\u001b[A\n 87%|████████▋ | 23125/26685 [3:26:25<19:47, 3.00it/s]\u001b[A\n 87%|████████▋ | 23126/26685 [3:26:25<18:48, 3.15it/s]\u001b[A\n 87%|████████▋ | 23127/26685 [3:26:26<17:34, 3.37it/s]\u001b[A\n 87%|████████▋ | 23128/26685 [3:26:26<19:31, 3.04it/s]\u001b[A\n 87%|████████▋ | 23129/26685 [3:26:26<18:43, 3.17it/s]\u001b[A\n 87%|████████▋ | 23130/26685 [3:26:27<21:48, 2.72it/s]\u001b[A\n 87%|████████▋ | 23131/26685 [3:26:27<21:54, 2.70it/s]\u001b[A\n 87%|████████▋ | 23132/26685 [3:26:28<21:15, 2.79it/s]\u001b[A\n 87%|████████▋ | 23133/26685 [3:26:28<20:41, 2.86it/s]\u001b[A\n 87%|████████▋ | 23134/26685 [3:26:28<21:16, 2.78it/s]\u001b[A\n 87%|████████▋ | 23135/26685 [3:26:29<23:50, 2.48it/s]\u001b[A\n 87%|████████▋ | 23136/26685 [3:26:29<23:01, 2.57it/s]\u001b[A\n 87%|████████▋ | 23137/26685 [3:26:30<24:37, 2.40it/s]\u001b[A\n 87%|████████▋ | 23138/26685 [3:26:30<25:30, 2.32it/s]\u001b[A\n 87%|████████▋ | 23139/26685 [3:26:31<31:08, 1.90it/s]\u001b[A\n 87%|████████▋ | 23140/26685 [3:26:31<27:34, 2.14it/s]\u001b[A\n 87%|████████▋ | 23141/26685 [3:26:31<23:54, 2.47it/s]\u001b[A\n 87%|████████▋ | 23142/26685 [3:26:32<21:38, 2.73it/s]\u001b[A\n 87%|████████▋ | 23143/26685 [3:26:32<19:29, 3.03it/s]\u001b[A\n 87%|████████▋ | 23144/26685 [3:26:32<19:00, 3.10it/s]\u001b[A\n 87%|████████▋ | 23145/26685 [3:26:33<20:48, 2.83it/s]\u001b[A\n 87%|████████▋ | 23146/26685 [3:26:33<24:49, 2.38it/s]\u001b[A\n 87%|████████▋ | 23147/26685 [3:26:33<21:55, 2.69it/s]\u001b[A\n 87%|████████▋ | 23148/26685 [3:26:34<22:35, 2.61it/s]\u001b[A\n 87%|████████▋ | 23149/26685 [3:26:34<21:29, 2.74it/s]\u001b[A\n 87%|████████▋ | 23150/26685 [3:26:35<30:07, 1.96it/s]\u001b[A\n 87%|████████▋ | 23151/26685 [3:26:35<26:12, 2.25it/s]\u001b[A\n 87%|████████▋ | 23152/26685 [3:26:36<28:15, 2.08it/s]\u001b[A\n 87%|████████▋ | 23153/26685 [3:26:36<26:04, 2.26it/s]\u001b[A\n 87%|████████▋ | 23154/26685 [3:26:37<22:56, 2.57it/s]\u001b[A\n 87%|████████▋ | 23155/26685 [3:26:37<25:39, 2.29it/s]\u001b[A\n 87%|████████▋ | 23156/26685 [3:26:37<23:50, 2.47it/s]\u001b[A\n 87%|████████▋ | 23157/26685 [3:26:38<21:55, 2.68it/s]\u001b[A\n 87%|████████▋ | 23158/26685 [3:26:38<21:19, 2.76it/s]\u001b[A\n 87%|████████▋ | 23159/26685 [3:26:39<25:00, 2.35it/s]\u001b[A\n 87%|████████▋ | 23160/26685 [3:26:39<25:56, 2.26it/s]\u001b[A\n 87%|████████▋ | 23161/26685 [3:26:39<23:21, 2.52it/s]\u001b[A\n 87%|████████▋ | 23162/26685 [3:26:40<22:04, 2.66it/s]\u001b[A\n 87%|████████▋ | 23163/26685 [3:26:40<22:45, 2.58it/s]\u001b[A\n 87%|████████▋ | 23164/26685 [3:26:41<30:07, 1.95it/s]\u001b[A\n 87%|████████▋ | 23165/26685 [3:26:41<25:59, 2.26it/s]\u001b[A\n 87%|████████▋ | 23166/26685 [3:26:42<24:26, 2.40it/s]\u001b[A\n 87%|████████▋ | 23167/26685 [3:26:42<25:10, 2.33it/s]\u001b[A\n 87%|████████▋ | 23168/26685 [3:26:42<25:18, 2.32it/s]\u001b[A\n 87%|████████▋ | 23169/26685 [3:26:43<22:38, 2.59it/s]\u001b[A\n 87%|████████▋ | 23170/26685 [3:26:43<25:21, 2.31it/s]\u001b[A\n 87%|████████▋ | 23171/26685 [3:26:44<22:15, 2.63it/s]\u001b[A\n 87%|████████▋ | 23172/26685 [3:26:44<22:09, 2.64it/s]\u001b[A\n 87%|████████▋ | 23173/26685 [3:26:44<22:39, 2.58it/s]\u001b[A\n 87%|████████▋ | 23174/26685 [3:26:45<21:10, 2.76it/s]\u001b[A\n 87%|████████▋ | 23175/26685 [3:26:45<19:32, 2.99it/s]\u001b[A\n 87%|████████▋ | 23176/26685 [3:26:46<28:08, 2.08it/s]\u001b[A\n 87%|████████▋ | 23177/26685 [3:26:46<26:14, 2.23it/s]\u001b[A\n 87%|████████▋ | 23178/26685 [3:26:47<33:12, 1.76it/s]\u001b[A\n 87%|████████▋ | 23179/26685 [3:26:47<28:24, 2.06it/s]\u001b[A\n 87%|████████▋ | 23180/26685 [3:26:48<26:18, 2.22it/s]\u001b[A\n 87%|████████▋ | 23181/26685 [3:26:48<28:54, 2.02it/s]\u001b[A\n 87%|████████▋ | 23182/26685 [3:26:49<34:45, 1.68it/s]\u001b[A\n 87%|████████▋ | 23183/26685 [3:26:49<28:39, 2.04it/s]\u001b[A\n 87%|████████▋ | 23184/26685 [3:26:50<24:43, 2.36it/s]\u001b[A\n 87%|████████▋ | 23185/26685 [3:26:50<23:56, 2.44it/s]\u001b[A\n 87%|████████▋ | 23186/26685 [3:26:50<23:18, 2.50it/s]\u001b[A\n 87%|████████▋ | 23187/26685 [3:26:51<21:40, 2.69it/s]\u001b[A\n 87%|████████▋ | 23188/26685 [3:26:51<19:57, 2.92it/s]\u001b[A\n 87%|████████▋ | 23189/26685 [3:26:52<29:25, 1.98it/s]\u001b[A\n 87%|████████▋ | 23190/26685 [3:26:52<31:35, 1.84it/s]\u001b[A\n 87%|████████▋ | 23191/26685 [3:26:53<28:54, 2.01it/s]\u001b[A\n 87%|████████▋ | 23192/26685 [3:26:53<25:54, 2.25it/s]\u001b[A\n 87%|████████▋ | 23193/26685 [3:26:54<32:15, 1.80it/s]\u001b[A\n 87%|████████▋ | 23194/26685 [3:26:54<28:23, 2.05it/s]\u001b[A\n 87%|████████▋ | 23195/26685 [3:26:55<25:00, 2.33it/s]\u001b[A\n 87%|████████▋ | 23196/26685 [3:26:55<32:05, 1.81it/s]\u001b[A\n 87%|████████▋ | 23197/26685 [3:26:56<29:14, 1.99it/s]\u001b[A\n 87%|████████▋ | 23198/26685 [3:26:56<25:39, 2.26it/s]\u001b[A\n 87%|████████▋ | 23199/26685 [3:26:56<24:21, 2.38it/s]\u001b[A\n 87%|████████▋ | 23200/26685 [3:26:57<21:31, 2.70it/s]\u001b[A\n 87%|████████▋ | 23201/26685 [3:26:57<20:47, 2.79it/s]\u001b[A\n 87%|████████▋ | 23202/26685 [3:26:57<20:22, 2.85it/s]\u001b[A\n 87%|████████▋ | 23203/26685 [3:26:58<18:57, 3.06it/s]\u001b[A\n 87%|████████▋ | 23204/26685 [3:26:58<18:18, 3.17it/s]\u001b[A\n 87%|████████▋ | 23205/26685 [3:26:58<21:57, 2.64it/s]\u001b[A\n 87%|████████▋ | 23206/26685 [3:26:59<21:02, 2.75it/s]\u001b[A\n 87%|████████▋ | 23207/26685 [3:26:59<20:43, 2.80it/s]\u001b[A\n 87%|████████▋ | 23208/26685 [3:26:59<20:40, 2.80it/s]\u001b[A\n 87%|████████▋ | 23209/26685 [3:27:00<19:38, 2.95it/s]\u001b[A\n 87%|████████▋ | 23210/26685 [3:27:00<24:18, 2.38it/s]\u001b[A\n 87%|████████▋ | 23211/26685 [3:27:01<23:08, 2.50it/s]\u001b[A\n 87%|████████▋ | 23212/26685 [3:27:01<21:36, 2.68it/s]\u001b[A\n 87%|████████▋ | 23213/26685 [3:27:01<18:54, 3.06it/s]\u001b[A\n 87%|████████▋ | 23214/26685 [3:27:02<17:50, 3.24it/s]\u001b[A\n 87%|████████▋ | 23215/26685 [3:27:02<20:25, 2.83it/s]\u001b[A\n 87%|████████▋ | 23216/26685 [3:27:02<19:56, 2.90it/s]\u001b[A\n 87%|████████▋ | 23217/26685 [3:27:03<19:15, 3.00it/s]\u001b[A\n 87%|████████▋ | 23218/26685 [3:27:03<22:21, 2.59it/s]\u001b[A\n 87%|████████▋ | 23219/26685 [3:27:03<21:10, 2.73it/s]\u001b[A\n 87%|████████▋ | 23220/26685 [3:27:04<24:34, 2.35it/s]\u001b[A\n 87%|████████▋ | 23221/26685 [3:27:04<24:31, 2.35it/s]\u001b[A\n 87%|████████▋ | 23222/26685 [3:27:05<21:41, 2.66it/s]\u001b[A\n 87%|████████▋ | 23223/26685 [3:27:05<24:22, 2.37it/s]\u001b[A\n 87%|████████▋ | 23224/26685 [3:27:06<22:16, 2.59it/s]\u001b[A\n 87%|████████▋ | 23225/26685 [3:27:06<23:50, 2.42it/s]\u001b[A\n 87%|████████▋ | 23226/26685 [3:27:06<22:36, 2.55it/s]\u001b[A\n 87%|████████▋ | 23227/26685 [3:27:07<21:48, 2.64it/s]\u001b[A\n 87%|████████▋ | 23228/26685 [3:27:07<19:49, 2.91it/s]\u001b[A\n 87%|████████▋ | 23229/26685 [3:27:08<24:50, 2.32it/s]\u001b[A\n 87%|████████▋ | 23230/26685 [3:27:08<22:11, 2.59it/s]\u001b[A\n 87%|████████▋ | 23231/26685 [3:27:08<22:33, 2.55it/s]\u001b[A\n 87%|████████▋ | 23232/26685 [3:27:09<25:42, 2.24it/s]\u001b[A\n 87%|████████▋ | 23233/26685 [3:27:09<22:59, 2.50it/s]\u001b[A\n 87%|████████▋ | 23234/26685 [3:27:10<30:14, 1.90it/s]\u001b[A\n 87%|████████▋ | 23235/26685 [3:27:10<30:04, 1.91it/s]\u001b[A\n 87%|████████▋ | 23236/26685 [3:27:11<29:29, 1.95it/s]\u001b[A\n 87%|████████▋ | 23237/26685 [3:27:11<25:54, 2.22it/s]\u001b[A\n 87%|████████▋ | 23238/26685 [3:27:12<24:13, 2.37it/s]\u001b[A\n 87%|████████▋ | 23239/26685 [3:27:12<24:38, 2.33it/s]\u001b[A\n 87%|████████▋ | 23240/26685 [3:27:12<23:52, 2.40it/s]\u001b[A\n 87%|████████▋ | 23241/26685 [3:27:13<21:41, 2.65it/s]\u001b[A\n 87%|████████▋ | 23242/26685 [3:27:13<21:42, 2.64it/s]\u001b[A\n 87%|████████▋ | 23243/26685 [3:27:13<20:28, 2.80it/s]\u001b[A\n 87%|████████▋ | 23244/26685 [3:27:14<28:36, 2.00it/s]\u001b[A\n 87%|████████▋ | 23245/26685 [3:27:15<33:33, 1.71it/s]\u001b[A\n 87%|████████▋ | 23246/26685 [3:27:15<27:17, 2.10it/s]\u001b[A\n 87%|████████▋ | 23247/26685 [3:27:16<24:23, 2.35it/s]\u001b[A\n 87%|████████▋ | 23248/26685 [3:27:16<22:25, 2.55it/s]\u001b[A\n 87%|████████▋ | 23249/26685 [3:27:16<21:36, 2.65it/s]\u001b[A\n 87%|████████▋ | 23250/26685 [3:27:17<21:00, 2.73it/s]\u001b[A\n 87%|████████▋ | 23251/26685 [3:27:17<20:18, 2.82it/s]\u001b[A\n 87%|████████▋ | 23252/26685 [3:27:17<22:25, 2.55it/s]\u001b[A\n 87%|████████▋ | 23253/26685 [3:27:18<22:07, 2.58it/s]\u001b[A\n 87%|████████▋ | 23254/26685 [3:27:18<20:32, 2.78it/s]\u001b[A\n 87%|████████▋ | 23255/26685 [3:27:18<18:56, 3.02it/s]\u001b[A\n 87%|████████▋ | 23256/26685 [3:27:19<18:53, 3.03it/s]\u001b[A\n 87%|████████▋ | 23257/26685 [3:27:19<19:10, 2.98it/s]\u001b[A\n 87%|████████▋ | 23258/26685 [3:27:19<19:53, 2.87it/s]\u001b[A\n 87%|████████▋ | 23259/26685 [3:27:20<23:32, 2.43it/s]\u001b[A\n 87%|████████▋ | 23260/26685 [3:27:21<27:21, 2.09it/s]\u001b[A\n 87%|████████▋ | 23261/26685 [3:27:21<24:29, 2.33it/s]\u001b[A\n 87%|████████▋ | 23262/26685 [3:27:21<22:54, 2.49it/s]\u001b[A\n 87%|████████▋ | 23263/26685 [3:27:21<20:18, 2.81it/s]\u001b[A\n 87%|████████▋ | 23264/26685 [3:27:22<21:36, 2.64it/s]\u001b[A\n 87%|████████▋ | 23265/26685 [3:27:22<19:57, 2.86it/s]\u001b[A\n 87%|████████▋ | 23266/26685 [3:27:23<19:36, 2.91it/s]\u001b[A\n 87%|████████▋ | 23267/26685 [3:27:23<21:40, 2.63it/s]\u001b[A\n 87%|████████▋ | 23268/26685 [3:27:23<20:23, 2.79it/s]\u001b[A\n 87%|████████▋ | 23269/26685 [3:27:24<20:39, 2.76it/s]\u001b[A\n 87%|████████▋ | 23270/26685 [3:27:24<20:55, 2.72it/s]\u001b[A\n 87%|████████▋ | 23271/26685 [3:27:25<28:17, 2.01it/s]\u001b[A\n 87%|████████▋ | 23272/26685 [3:27:25<25:58, 2.19it/s]\u001b[A\n 87%|████████▋ | 23273/26685 [3:27:25<22:44, 2.50it/s]\u001b[A\n 87%|████████▋ | 23274/26685 [3:27:26<20:54, 2.72it/s]\u001b[A\n 87%|████████▋ | 23275/26685 [3:27:26<20:23, 2.79it/s]\u001b[A\n 87%|████████▋ | 23276/26685 [3:27:26<20:02, 2.83it/s]\u001b[A\n 87%|████████▋ | 23277/26685 [3:27:27<20:26, 2.78it/s]\u001b[A\n 87%|████████▋ | 23278/26685 [3:27:27<24:16, 2.34it/s]\u001b[A\n 87%|████████▋ | 23279/26685 [3:27:28<21:30, 2.64it/s]\u001b[A\n 87%|████████▋ | 23280/26685 [3:27:28<22:19, 2.54it/s]\u001b[A\n 87%|████████▋ | 23281/26685 [3:27:28<21:41, 2.61it/s]\u001b[A\n 87%|████████▋ | 23282/26685 [3:27:29<26:23, 2.15it/s]\u001b[A\n 87%|████████▋ | 23283/26685 [3:27:30<25:58, 2.18it/s]\u001b[A\n 87%|████████▋ | 23284/26685 [3:27:30<25:02, 2.26it/s]\u001b[A\n 87%|████████▋ | 23285/26685 [3:27:30<22:07, 2.56it/s]\u001b[A\n 87%|████████▋ | 23286/26685 [3:27:31<22:23, 2.53it/s]\u001b[A\n 87%|████████▋ | 23287/26685 [3:27:31<23:40, 2.39it/s]\u001b[A\n 87%|████████▋ | 23288/26685 [3:27:32<23:30, 2.41it/s]\u001b[A\n 87%|████████▋ | 23289/26685 [3:27:32<22:04, 2.56it/s]\u001b[A\n 87%|████████▋ | 23290/26685 [3:27:32<21:20, 2.65it/s]\u001b[A\n 87%|████████▋ | 23291/26685 [3:27:33<20:18, 2.78it/s]\u001b[A\n 87%|████████▋ | 23292/26685 [3:27:33<18:54, 2.99it/s]\u001b[A\n 87%|████████▋ | 23293/26685 [3:27:34<26:58, 2.10it/s]\u001b[A\n 87%|████████▋ | 23294/26685 [3:27:34<24:55, 2.27it/s]\u001b[A\n 87%|████████▋ | 23295/26685 [3:27:34<22:25, 2.52it/s]\u001b[A\n 87%|████████▋ | 23296/26685 [3:27:35<21:45, 2.60it/s]\u001b[A\n 87%|████████▋ | 23297/26685 [3:27:35<22:17, 2.53it/s]\u001b[A\n 87%|████████▋ | 23298/26685 [3:27:35<22:35, 2.50it/s]\u001b[A\n 87%|████████▋ | 23299/26685 [3:27:36<21:51, 2.58it/s]\u001b[A\n 87%|████████▋ | 23300/26685 [3:27:36<20:20, 2.77it/s]\u001b[A\n 87%|████████▋ | 23301/26685 [3:27:36<19:54, 2.83it/s]\u001b[A\n 87%|████████▋ | 23302/26685 [3:27:37<20:57, 2.69it/s]\u001b[A\n 87%|████████▋ | 23303/26685 [3:27:37<22:13, 2.54it/s]\u001b[A\n 87%|████████▋ | 23304/26685 [3:27:38<23:54, 2.36it/s]\u001b[A\n 87%|████████▋ | 23305/26685 [3:27:38<23:32, 2.39it/s]\u001b[A\n 87%|████████▋ | 23306/26685 [3:27:39<22:39, 2.48it/s]\u001b[A\n 87%|████████▋ | 23307/26685 [3:27:39<28:11, 2.00it/s]\u001b[A\n 87%|████████▋ | 23308/26685 [3:27:40<24:43, 2.28it/s]\u001b[A\n 87%|████████▋ | 23309/26685 [3:27:40<23:06, 2.44it/s]\u001b[A\n 87%|████████▋ | 23310/26685 [3:27:41<30:37, 1.84it/s]\u001b[A\n 87%|████████▋ | 23311/26685 [3:27:41<31:54, 1.76it/s]\u001b[A\n 87%|████████▋ | 23312/26685 [3:27:42<28:05, 2.00it/s]\u001b[A\n 87%|████████▋ | 23313/26685 [3:27:43<34:13, 1.64it/s]\u001b[A\n 87%|████████▋ | 23314/26685 [3:27:43<29:39, 1.89it/s]\u001b[A\n 87%|████████▋ | 23315/26685 [3:27:43<26:24, 2.13it/s]\u001b[A\n 87%|████████▋ | 23316/26685 [3:27:44<24:02, 2.34it/s]\u001b[A\n 87%|████████▋ | 23317/26685 [3:27:44<23:12, 2.42it/s]\u001b[A\n 87%|████████▋ | 23318/26685 [3:27:44<24:31, 2.29it/s]\u001b[A\n 87%|████████▋ | 23319/26685 [3:27:45<23:11, 2.42it/s]\u001b[A\n 87%|████████▋ | 23320/26685 [3:27:45<21:30, 2.61it/s]\u001b[A\n 87%|████████▋ | 23321/26685 [3:27:45<19:22, 2.89it/s]\u001b[A\n 87%|████████▋ | 23322/26685 [3:27:46<18:33, 3.02it/s]\u001b[A\n 87%|████████▋ | 23323/26685 [3:27:46<18:25, 3.04it/s]\u001b[A\n 87%|████████▋ | 23324/26685 [3:27:47<27:41, 2.02it/s]\u001b[A\n 87%|████████▋ | 23325/26685 [3:27:47<23:26, 2.39it/s]\u001b[A\n 87%|████████▋ | 23326/26685 [3:27:48<22:37, 2.48it/s]\u001b[A\n 87%|████████▋ | 23327/26685 [3:27:48<20:09, 2.78it/s]\u001b[A\n 87%|████████▋ | 23328/26685 [3:27:48<20:42, 2.70it/s]\u001b[A\n 87%|████████▋ | 23329/26685 [3:27:49<21:05, 2.65it/s]\u001b[A\n 87%|████████▋ | 23330/26685 [3:27:49<20:04, 2.79it/s]\u001b[A\n 87%|████████▋ | 23331/26685 [3:27:50<27:51, 2.01it/s]\u001b[A\n 87%|████████▋ | 23332/26685 [3:27:50<24:33, 2.28it/s]\u001b[A\n 87%|████████▋ | 23333/26685 [3:27:51<26:41, 2.09it/s]\u001b[A\n 87%|████████▋ | 23334/26685 [3:27:51<31:59, 1.75it/s]\u001b[A\n 87%|████████▋ | 23335/26685 [3:27:52<27:46, 2.01it/s]\u001b[A\n 87%|████████▋ | 23336/26685 [3:27:52<26:46, 2.08it/s]\u001b[A\n 87%|████████▋ | 23337/26685 [3:27:53<26:01, 2.14it/s]\u001b[A\n 87%|████████▋ | 23338/26685 [3:27:53<26:27, 2.11it/s]\u001b[A\n 87%|████████▋ | 23339/26685 [3:27:53<24:03, 2.32it/s]\u001b[A\n 87%|████████▋ | 23340/26685 [3:27:54<22:14, 2.51it/s]\u001b[A\n 87%|████████▋ | 23341/26685 [3:27:54<19:47, 2.82it/s]\u001b[A\n 87%|████████▋ | 23342/26685 [3:27:54<18:48, 2.96it/s]\u001b[A\n 87%|████████▋ | 23343/26685 [3:27:55<22:49, 2.44it/s]\u001b[A\n 87%|████████▋ | 23344/26685 [3:27:55<20:22, 2.73it/s]\u001b[A\n 87%|████████▋ | 23345/26685 [3:27:56<28:23, 1.96it/s]\u001b[A\n 87%|████████▋ | 23346/26685 [3:27:56<25:13, 2.21it/s]\u001b[A\n 87%|████████▋ | 23347/26685 [3:27:57<24:11, 2.30it/s]\u001b[A\n 87%|████████▋ | 23348/26685 [3:27:57<23:17, 2.39it/s]\u001b[A\n 87%|████████▋ | 23349/26685 [3:27:57<21:05, 2.64it/s]\u001b[A\n 88%|████████▊ | 23350/26685 [3:27:58<21:21, 2.60it/s]\u001b[A\n 88%|████████▊ | 23351/26685 [3:27:58<20:48, 2.67it/s]\u001b[A\n 88%|████████▊ | 23352/26685 [3:27:58<20:51, 2.66it/s]\u001b[A\n 88%|████████▊ | 23353/26685 [3:27:59<20:17, 2.74it/s]\u001b[A\n 88%|████████▊ | 23354/26685 [3:27:59<20:08, 2.76it/s]\u001b[A\n 88%|████████▊ | 23355/26685 [3:28:00<20:23, 2.72it/s]\u001b[A\n 88%|████████▊ | 23356/26685 [3:28:00<18:54, 2.93it/s]\u001b[A\n 88%|████████▊ | 23357/26685 [3:28:00<21:17, 2.61it/s]\u001b[A\n 88%|████████▊ | 23358/26685 [3:28:01<19:58, 2.78it/s]\u001b[A\n 88%|████████▊ | 23359/26685 [3:28:01<25:22, 2.18it/s]\u001b[A\n 88%|████████▊ | 23360/26685 [3:28:02<26:10, 2.12it/s]\u001b[A\n 88%|████████▊ | 23361/26685 [3:28:02<24:49, 2.23it/s]\u001b[A\n 88%|████████▊ | 23362/26685 [3:28:03<27:14, 2.03it/s]\u001b[A\n 88%|████████▊ | 23363/26685 [3:28:04<32:02, 1.73it/s]\u001b[A\n 88%|████████▊ | 23364/26685 [3:28:04<26:42, 2.07it/s]\u001b[A\n 88%|████████▊ | 23365/26685 [3:28:04<26:15, 2.11it/s]\u001b[A\n 88%|████████▊ | 23366/26685 [3:28:05<24:05, 2.30it/s]\u001b[A\n 88%|████████▊ | 23367/26685 [3:28:05<22:41, 2.44it/s]\u001b[A\n 88%|████████▊ | 23368/26685 [3:28:05<20:22, 2.71it/s]\u001b[A\n 88%|████████▊ | 23369/26685 [3:28:06<25:57, 2.13it/s]\u001b[A\n 88%|████████▊ | 23370/26685 [3:28:07<32:13, 1.71it/s]\u001b[A\n 88%|████████▊ | 23371/26685 [3:28:07<29:04, 1.90it/s]\u001b[A\n 88%|████████▊ | 23372/26685 [3:28:08<26:50, 2.06it/s]\u001b[A\n 88%|████████▊ | 23373/26685 [3:28:08<23:22, 2.36it/s]\u001b[A\n 88%|████████▊ | 23374/26685 [3:28:08<23:22, 2.36it/s]\u001b[A\n 88%|████████▊ | 23375/26685 [3:28:09<24:05, 2.29it/s]\u001b[A\n 88%|████████▊ | 23376/26685 [3:28:09<22:14, 2.48it/s]\u001b[A\n 88%|████████▊ | 23377/26685 [3:28:09<20:42, 2.66it/s]\u001b[A\n 88%|████████▊ | 23378/26685 [3:28:10<18:39, 2.95it/s]\u001b[A\n 88%|████████▊ | 23379/26685 [3:28:10<18:55, 2.91it/s]\u001b[A\n 88%|████████▊ | 23380/26685 [3:28:10<18:12, 3.02it/s]\u001b[A\n 88%|████████▊ | 23381/26685 [3:28:11<19:07, 2.88it/s]\u001b[A\n 88%|████████▊ | 23382/26685 [3:28:11<18:20, 3.00it/s]\u001b[A\n 88%|████████▊ | 23383/26685 [3:28:11<17:32, 3.14it/s]\u001b[A\n 88%|████████▊ | 23384/26685 [3:28:12<20:00, 2.75it/s]\u001b[A\n 88%|████████▊ | 23385/26685 [3:28:12<19:12, 2.86it/s]\u001b[A\n 88%|████████▊ | 23386/26685 [3:28:12<18:15, 3.01it/s]\u001b[A\n 88%|████████▊ | 23387/26685 [3:28:13<20:06, 2.73it/s]\u001b[A\n 88%|████████▊ | 23388/26685 [3:28:13<19:48, 2.78it/s]\u001b[A\n 88%|████████▊ | 23389/26685 [3:28:13<19:22, 2.83it/s]\u001b[A\n 88%|████████▊ | 23390/26685 [3:28:14<19:12, 2.86it/s]\u001b[A\n 88%|████████▊ | 23391/26685 [3:28:14<21:54, 2.51it/s]\u001b[A\n 88%|████████▊ | 23392/26685 [3:28:15<21:31, 2.55it/s]\u001b[A\n 88%|████████▊ | 23393/26685 [3:28:15<19:39, 2.79it/s]\u001b[A\n 88%|████████▊ | 23394/26685 [3:28:16<23:35, 2.32it/s]\u001b[A\n 88%|████████▊ | 23395/26685 [3:28:16<21:21, 2.57it/s]\u001b[A\n 88%|████████▊ | 23396/26685 [3:28:16<24:39, 2.22it/s]\u001b[A\n 88%|████████▊ | 23397/26685 [3:28:17<24:16, 2.26it/s]\u001b[A\n 88%|████████▊ | 23398/26685 [3:28:17<23:05, 2.37it/s]\u001b[A\n 88%|████████▊ | 23399/26685 [3:28:18<29:15, 1.87it/s]\u001b[A\n 88%|████████▊ | 23400/26685 [3:28:18<27:09, 2.02it/s]\u001b[A\n 88%|████████▊ | 23401/26685 [3:28:19<24:03, 2.27it/s]\u001b[A\n 88%|████████▊ | 23402/26685 [3:28:19<21:21, 2.56it/s]\u001b[A\n 88%|████████▊ | 23403/26685 [3:28:20<23:23, 2.34it/s]\u001b[A\n 88%|████████▊ | 23404/26685 [3:28:20<23:43, 2.31it/s]\u001b[A\n 88%|████████▊ | 23405/26685 [3:28:21<27:13, 2.01it/s]\u001b[A\n 88%|████████▊ | 23406/26685 [3:28:21<25:38, 2.13it/s]\u001b[A\n 88%|████████▊ | 23407/26685 [3:28:21<23:32, 2.32it/s]\u001b[A\n 88%|████████▊ | 23408/26685 [3:28:22<21:16, 2.57it/s]\u001b[A\n 88%|████████▊ | 23409/26685 [3:28:23<28:39, 1.91it/s]\u001b[A\n 88%|████████▊ | 23410/26685 [3:28:23<27:45, 1.97it/s]\u001b[A\n 88%|████████▊ | 23411/26685 [3:28:24<29:02, 1.88it/s]\u001b[A\n 88%|████████▊ | 23412/26685 [3:28:24<27:43, 1.97it/s]\u001b[A\n 88%|████████▊ | 23413/26685 [3:28:24<26:48, 2.03it/s]\u001b[A\n 88%|████████▊ | 23414/26685 [3:28:25<23:44, 2.30it/s]\u001b[A\n 88%|████████▊ | 23415/26685 [3:28:25<22:04, 2.47it/s]\u001b[A\n 88%|████████▊ | 23416/26685 [3:28:26<23:16, 2.34it/s]\u001b[A\n 88%|████████▊ | 23417/26685 [3:28:26<21:28, 2.54it/s]\u001b[A\n 88%|████████▊ | 23418/26685 [3:28:26<21:00, 2.59it/s]\u001b[A\n 88%|████████▊ | 23419/26685 [3:28:27<19:09, 2.84it/s]\u001b[A\n 88%|████████▊ | 23420/26685 [3:28:27<19:28, 2.79it/s]\u001b[A\n 88%|████████▊ | 23421/26685 [3:28:27<18:54, 2.88it/s]\u001b[A\n 88%|████████▊ | 23422/26685 [3:28:28<18:12, 2.99it/s]\u001b[A\n 88%|████████▊ | 23423/26685 [3:28:28<18:17, 2.97it/s]\u001b[A\n 88%|████████▊ | 23424/26685 [3:28:28<19:29, 2.79it/s]\u001b[A\n 88%|████████▊ | 23425/26685 [3:28:29<26:37, 2.04it/s]\u001b[A\n 88%|████████▊ | 23426/26685 [3:28:30<25:06, 2.16it/s]\u001b[A\n 88%|████████▊ | 23427/26685 [3:28:30<21:57, 2.47it/s]\u001b[A\n 88%|████████▊ | 23428/26685 [3:28:30<21:20, 2.54it/s]\u001b[A\n 88%|████████▊ | 23429/26685 [3:28:31<21:09, 2.57it/s]\u001b[A\n 88%|████████▊ | 23430/26685 [3:28:31<19:59, 2.71it/s]\u001b[A\n 88%|████████▊ | 23431/26685 [3:28:31<20:19, 2.67it/s]\u001b[A\n 88%|████████▊ | 23432/26685 [3:28:32<21:08, 2.56it/s]\u001b[A\n 88%|████████▊ | 23433/26685 [3:28:32<18:56, 2.86it/s]\u001b[A\n 88%|████████▊ | 23434/26685 [3:28:32<20:42, 2.62it/s]\u001b[A\n 88%|████████▊ | 23435/26685 [3:28:33<20:03, 2.70it/s]\u001b[A\n 88%|████████▊ | 23436/26685 [3:28:33<19:34, 2.77it/s]\u001b[A\n 88%|████████▊ | 23437/26685 [3:28:34<28:22, 1.91it/s]\u001b[A\n 88%|████████▊ | 23438/26685 [3:28:34<24:34, 2.20it/s]\u001b[A\n 88%|████████▊ | 23439/26685 [3:28:35<21:12, 2.55it/s]\u001b[A\n 88%|████████▊ | 23440/26685 [3:28:35<19:07, 2.83it/s]\u001b[A\n 88%|████████▊ | 23441/26685 [3:28:35<19:28, 2.78it/s]\u001b[A\n 88%|████████▊ | 23442/26685 [3:28:36<19:53, 2.72it/s]\u001b[A\n 88%|████████▊ | 23443/26685 [3:28:36<20:07, 2.69it/s]\u001b[A\n 88%|████████▊ | 23444/26685 [3:28:36<18:51, 2.86it/s]\u001b[A\n 88%|████████▊ | 23445/26685 [3:28:37<20:06, 2.69it/s]\u001b[A\n 88%|████████▊ | 23446/26685 [3:28:37<20:58, 2.57it/s]\u001b[A\n 88%|████████▊ | 23447/26685 [3:28:38<27:29, 1.96it/s]\u001b[A\n 88%|████████▊ | 23448/26685 [3:28:38<26:27, 2.04it/s]\u001b[A\n 88%|████████▊ | 23449/26685 [3:28:39<22:49, 2.36it/s]\u001b[A\n 88%|████████▊ | 23450/26685 [3:28:39<22:43, 2.37it/s]\u001b[A\n 88%|████████▊ | 23451/26685 [3:28:39<22:35, 2.39it/s]\u001b[A\n 88%|████████▊ | 23452/26685 [3:28:40<20:19, 2.65it/s]\u001b[A\n 88%|████████▊ | 23453/26685 [3:28:41<27:34, 1.95it/s]\u001b[A\n 88%|████████▊ | 23454/26685 [3:28:41<24:56, 2.16it/s]\u001b[A\n 88%|████████▊ | 23455/26685 [3:28:41<21:30, 2.50it/s]\u001b[A\n 88%|████████▊ | 23456/26685 [3:28:42<21:28, 2.51it/s]\u001b[A\n 88%|████████▊ | 23457/26685 [3:28:42<20:54, 2.57it/s]\u001b[A\n 88%|████████▊ | 23458/26685 [3:28:42<20:13, 2.66it/s]\u001b[A\n 88%|████████▊ | 23459/26685 [3:28:43<19:19, 2.78it/s]\u001b[A\n 88%|████████▊ | 23460/26685 [3:28:43<17:21, 3.10it/s]\u001b[A\n 88%|████████▊ | 23461/26685 [3:28:43<18:56, 2.84it/s]\u001b[A\n 88%|████████▊ | 23462/26685 [3:28:43<18:08, 2.96it/s]\u001b[A\n 88%|████████▊ | 23463/26685 [3:28:44<17:49, 3.01it/s]\u001b[A\n 88%|████████▊ | 23464/26685 [3:28:44<20:14, 2.65it/s]\u001b[A\n 88%|████████▊ | 23465/26685 [3:28:46<37:16, 1.44it/s]\u001b[A\n 88%|████████▊ | 23466/26685 [3:28:46<32:26, 1.65it/s]\u001b[A\n 88%|████████▊ | 23467/26685 [3:28:47<29:18, 1.83it/s]\u001b[A\n 88%|████████▊ | 23468/26685 [3:28:47<28:07, 1.91it/s]\u001b[A\n 88%|████████▊ | 23469/26685 [3:28:47<27:30, 1.95it/s]\u001b[A\n 88%|████████▊ | 23470/26685 [3:28:48<24:17, 2.21it/s]\u001b[A\n 88%|████████▊ | 23471/26685 [3:28:48<21:31, 2.49it/s]\u001b[A\n 88%|████████▊ | 23472/26685 [3:28:49<29:35, 1.81it/s]\u001b[A\n 88%|████████▊ | 23473/26685 [3:28:49<27:13, 1.97it/s]\u001b[A\n 88%|████████▊ | 23474/26685 [3:28:50<32:15, 1.66it/s]\u001b[A\n 88%|████████▊ | 23475/26685 [3:28:51<27:17, 1.96it/s]\u001b[A\n 88%|████████▊ | 23476/26685 [3:28:51<23:49, 2.24it/s]\u001b[A\n 88%|████████▊ | 23477/26685 [3:28:51<21:09, 2.53it/s]\u001b[A\n 88%|████████▊ | 23478/26685 [3:28:51<20:06, 2.66it/s]\u001b[A\n 88%|████████▊ | 23479/26685 [3:28:52<18:40, 2.86it/s]\u001b[A\n 88%|████████▊ | 23480/26685 [3:28:52<25:13, 2.12it/s]\u001b[A\n 88%|████████▊ | 23481/26685 [3:28:53<24:00, 2.22it/s]\u001b[A\n 88%|████████▊ | 23482/26685 [3:28:53<21:33, 2.48it/s]\u001b[A\n 88%|████████▊ | 23483/26685 [3:28:54<22:46, 2.34it/s]\u001b[A\n 88%|████████▊ | 23484/26685 [3:28:54<20:09, 2.65it/s]\u001b[A\n 88%|████████▊ | 23485/26685 [3:28:54<18:28, 2.89it/s]\u001b[A\n 88%|████████▊ | 23486/26685 [3:28:54<17:55, 2.97it/s]\u001b[A\n 88%|████████▊ | 23487/26685 [3:28:55<18:19, 2.91it/s]\u001b[A\n 88%|████████▊ | 23488/26685 [3:28:55<20:15, 2.63it/s]\u001b[A\n 88%|████████▊ | 23489/26685 [3:28:56<20:59, 2.54it/s]\u001b[A\n 88%|████████▊ | 23490/26685 [3:28:56<19:04, 2.79it/s]\u001b[A\n 88%|████████▊ | 23491/26685 [3:28:57<21:26, 2.48it/s]\u001b[A\n 88%|████████▊ | 23492/26685 [3:28:57<21:31, 2.47it/s]\u001b[A\n 88%|████████▊ | 23493/26685 [3:28:57<21:28, 2.48it/s]\u001b[A\n 88%|████████▊ | 23494/26685 [3:28:58<20:43, 2.57it/s]\u001b[A\n 88%|████████▊ | 23495/26685 [3:28:58<19:13, 2.77it/s]\u001b[A\n 88%|████████▊ | 23496/26685 [3:28:58<18:40, 2.85it/s]\u001b[A\n 88%|████████▊ | 23497/26685 [3:28:59<18:00, 2.95it/s]\u001b[A\n 88%|████████▊ | 23498/26685 [3:28:59<18:03, 2.94it/s]\u001b[A\n 88%|████████▊ | 23499/26685 [3:28:59<19:04, 2.78it/s]\u001b[A\n 88%|████████▊ | 23500/26685 [3:29:00<21:51, 2.43it/s]\u001b[A\n 88%|████████▊ | 23501/26685 [3:29:00<22:02, 2.41it/s]\u001b[A\n 88%|████████▊ | 23502/26685 [3:29:01<22:35, 2.35it/s]\u001b[A\n 88%|████████▊ | 23503/26685 [3:29:02<29:03, 1.82it/s]\u001b[A\n 88%|████████▊ | 23504/26685 [3:29:02<25:06, 2.11it/s]\u001b[A\n 88%|████████▊ | 23505/26685 [3:29:02<25:38, 2.07it/s]\u001b[A\n 88%|████████▊ | 23506/26685 [3:29:03<31:56, 1.66it/s]\u001b[A\n 88%|████████▊ | 23507/26685 [3:29:04<31:14, 1.70it/s]\u001b[A\n 88%|████████▊ | 23508/26685 [3:29:04<26:58, 1.96it/s]\u001b[A\n 88%|████████▊ | 23509/26685 [3:29:05<25:18, 2.09it/s]\u001b[A\n 88%|████████▊ | 23510/26685 [3:29:05<24:02, 2.20it/s]\u001b[A\n 88%|████████▊ | 23511/26685 [3:29:05<22:25, 2.36it/s]\u001b[A\n 88%|████████▊ | 23512/26685 [3:29:06<20:55, 2.53it/s]\u001b[A\n 88%|████████▊ | 23513/26685 [3:29:06<20:36, 2.56it/s]\u001b[A\n 88%|████████▊ | 23514/26685 [3:29:06<19:26, 2.72it/s]\u001b[A\n 88%|████████▊ | 23515/26685 [3:29:07<20:02, 2.64it/s]\u001b[A\n 88%|████████▊ | 23516/26685 [3:29:07<19:13, 2.75it/s]\u001b[A\n 88%|████████▊ | 23517/26685 [3:29:07<17:26, 3.03it/s]\u001b[A\n 88%|████████▊ | 23518/26685 [3:29:08<17:29, 3.02it/s]\u001b[A\n 88%|████████▊ | 23519/26685 [3:29:08<21:19, 2.47it/s]\u001b[A\n 88%|████████▊ | 23520/26685 [3:29:09<20:43, 2.55it/s]\u001b[A\n 88%|████████▊ | 23521/26685 [3:29:09<19:54, 2.65it/s]\u001b[A\n 88%|████████▊ | 23522/26685 [3:29:09<20:45, 2.54it/s]\u001b[A\n 88%|████████▊ | 23523/26685 [3:29:10<22:46, 2.31it/s]\u001b[A\n 88%|████████▊ | 23524/26685 [3:29:10<20:26, 2.58it/s]\u001b[A\n 88%|████████▊ | 23525/26685 [3:29:11<19:08, 2.75it/s]\u001b[A\n 88%|████████▊ | 23526/26685 [3:29:11<19:10, 2.75it/s]\u001b[A\n 88%|████████▊ | 23527/26685 [3:29:11<18:47, 2.80it/s]\u001b[A\n 88%|████████▊ | 23528/26685 [3:29:12<19:21, 2.72it/s]\u001b[A\n 88%|████████▊ | 23529/26685 [3:29:12<20:17, 2.59it/s]\u001b[A\n 88%|████████▊ | 23530/26685 [3:29:13<22:26, 2.34it/s]\u001b[A\n 88%|████████▊ | 23531/26685 [3:29:13<21:40, 2.42it/s]\u001b[A\n 88%|████████▊ | 23532/26685 [3:29:13<19:57, 2.63it/s]\u001b[A\n 88%|████████▊ | 23533/26685 [3:29:14<20:04, 2.62it/s]\u001b[A\n 88%|████████▊ | 23534/26685 [3:29:14<26:59, 1.95it/s]\u001b[A\n 88%|████████▊ | 23535/26685 [3:29:15<22:58, 2.29it/s]\u001b[A\n 88%|████████▊ | 23536/26685 [3:29:15<21:19, 2.46it/s]\u001b[A\n 88%|████████▊ | 23537/26685 [3:29:16<22:46, 2.30it/s]\u001b[A\n 88%|████████▊ | 23538/26685 [3:29:16<20:33, 2.55it/s]\u001b[A\n 88%|████████▊ | 23539/26685 [3:29:16<22:51, 2.29it/s]\u001b[A\n 88%|████████▊ | 23540/26685 [3:29:17<20:36, 2.54it/s]\u001b[A\n 88%|████████▊ | 23541/26685 [3:29:17<20:41, 2.53it/s]\u001b[A\n 88%|████████▊ | 23542/26685 [3:29:17<20:41, 2.53it/s]\u001b[A\n 88%|████████▊ | 23543/26685 [3:29:18<21:15, 2.46it/s]\u001b[A\n 88%|████████▊ | 23544/26685 [3:29:18<20:06, 2.60it/s]\u001b[A\n 88%|████████▊ | 23545/26685 [3:29:19<18:27, 2.84it/s]\u001b[A\n 88%|████████▊ | 23546/26685 [3:29:19<18:08, 2.88it/s]\u001b[A\n 88%|████████▊ | 23547/26685 [3:29:19<18:35, 2.81it/s]\u001b[A\n 88%|████████▊ | 23548/26685 [3:29:20<21:38, 2.41it/s]\u001b[A\n 88%|████████▊ | 23549/26685 [3:29:20<24:57, 2.09it/s]\u001b[A\n 88%|████████▊ | 23550/26685 [3:29:21<21:16, 2.46it/s]\u001b[A\n 88%|████████▊ | 23551/26685 [3:29:21<20:04, 2.60it/s]\u001b[A\n 88%|████████▊ | 23552/26685 [3:29:21<19:03, 2.74it/s]\u001b[A\n 88%|████████▊ | 23553/26685 [3:29:22<18:03, 2.89it/s]\u001b[A\n 88%|████████▊ | 23554/26685 [3:29:22<17:26, 2.99it/s]\u001b[A\n 88%|████████▊ | 23555/26685 [3:29:22<19:24, 2.69it/s]\u001b[A\n 88%|████████▊ | 23556/26685 [3:29:23<18:08, 2.87it/s]\u001b[A\n 88%|████████▊ | 23557/26685 [3:29:23<17:19, 3.01it/s]\u001b[A\n 88%|████████▊ | 23558/26685 [3:29:23<18:59, 2.74it/s]\u001b[A\n 88%|████████▊ | 23559/26685 [3:29:24<17:59, 2.90it/s]\u001b[A\n 88%|████████▊ | 23560/26685 [3:29:24<16:46, 3.11it/s]\u001b[A\n 88%|████████▊ | 23561/26685 [3:29:24<18:38, 2.79it/s]\u001b[A\n 88%|████████▊ | 23562/26685 [3:29:25<18:51, 2.76it/s]\u001b[A\n 88%|████████▊ | 23563/26685 [3:29:25<18:48, 2.77it/s]\u001b[A\n 88%|████████▊ | 23564/26685 [3:29:26<19:08, 2.72it/s]\u001b[A\n 88%|████████▊ | 23565/26685 [3:29:26<18:38, 2.79it/s]\u001b[A\n 88%|████████▊ | 23566/26685 [3:29:27<26:01, 2.00it/s]\u001b[A\n 88%|████████▊ | 23567/26685 [3:29:27<25:10, 2.06it/s]\u001b[A\n 88%|████████▊ | 23568/26685 [3:29:27<22:41, 2.29it/s]\u001b[A\n 88%|████████▊ | 23569/26685 [3:29:28<24:37, 2.11it/s]\u001b[A\n 88%|████████▊ | 23570/26685 [3:29:28<23:52, 2.17it/s]\u001b[A\n 88%|████████▊ | 23571/26685 [3:29:29<22:01, 2.36it/s]\u001b[A\n 88%|████████▊ | 23572/26685 [3:29:30<27:38, 1.88it/s]\u001b[A\n 88%|████████▊ | 23573/26685 [3:29:30<28:42, 1.81it/s]\u001b[A\n 88%|████████▊ | 23574/26685 [3:29:31<29:06, 1.78it/s]\u001b[A\n 88%|████████▊ | 23575/26685 [3:29:31<26:47, 1.93it/s]\u001b[A\n 88%|████████▊ | 23576/26685 [3:29:31<22:53, 2.26it/s]\u001b[A\n 88%|████████▊ | 23577/26685 [3:29:32<22:10, 2.34it/s]\u001b[A\n 88%|████████▊ | 23578/26685 [3:29:33<28:25, 1.82it/s]\u001b[A\n 88%|████████▊ | 23579/26685 [3:29:33<27:21, 1.89it/s]\u001b[A\n 88%|████████▊ | 23580/26685 [3:29:33<24:00, 2.16it/s]\u001b[A\n 88%|████████▊ | 23581/26685 [3:29:34<23:02, 2.25it/s]\u001b[A\n 88%|████████▊ | 23582/26685 [3:29:34<22:08, 2.34it/s]\u001b[A\n 88%|████████▊ | 23583/26685 [3:29:35<21:08, 2.45it/s]\u001b[A\n 88%|████████▊ | 23584/26685 [3:29:35<18:48, 2.75it/s]\u001b[A\n 88%|████████▊ | 23585/26685 [3:29:35<19:00, 2.72it/s]\u001b[A\n 88%|████████▊ | 23586/26685 [3:29:36<26:18, 1.96it/s]\u001b[A\n 88%|████████▊ | 23587/26685 [3:29:36<22:42, 2.27it/s]\u001b[A\n 88%|████████▊ | 23588/26685 [3:29:37<24:25, 2.11it/s]\u001b[A\n 88%|████████▊ | 23589/26685 [3:29:37<25:02, 2.06it/s]\u001b[A\n 88%|████████▊ | 23590/26685 [3:29:38<22:21, 2.31it/s]\u001b[A\n 88%|████████▊ | 23591/26685 [3:29:38<25:39, 2.01it/s]\u001b[A\n 88%|████████▊ | 23592/26685 [3:29:39<23:55, 2.15it/s]\u001b[A\n 88%|████████▊ | 23593/26685 [3:29:39<22:17, 2.31it/s]\u001b[A\n 88%|████████▊ | 23594/26685 [3:29:40<25:00, 2.06it/s]\u001b[A\n 88%|████████▊ | 23595/26685 [3:29:40<23:05, 2.23it/s]\u001b[A\n 88%|████████▊ | 23596/26685 [3:29:41<29:03, 1.77it/s]\u001b[A\n 88%|████████▊ | 23597/26685 [3:29:41<24:40, 2.09it/s]\u001b[A\n 88%|████████▊ | 23598/26685 [3:29:42<30:10, 1.70it/s]\u001b[A\n 88%|████████▊ | 23599/26685 [3:29:43<29:48, 1.73it/s]\u001b[A\n 88%|████████▊ | 23600/26685 [3:29:43<24:47, 2.07it/s]\u001b[A\n 88%|████████▊ | 23601/26685 [3:29:43<22:45, 2.26it/s]\u001b[A\n 88%|████████▊ | 23602/26685 [3:29:44<21:26, 2.40it/s]\u001b[A\n 88%|████████▊ | 23603/26685 [3:29:44<20:48, 2.47it/s]\u001b[A\n 88%|████████▊ | 23604/26685 [3:29:44<22:42, 2.26it/s]\u001b[A\n 88%|████████▊ | 23605/26685 [3:29:45<25:03, 2.05it/s]\u001b[A\n 88%|████████▊ | 23606/26685 [3:29:45<22:43, 2.26it/s]\u001b[A\n 88%|████████▊ | 23607/26685 [3:29:46<20:43, 2.47it/s]\u001b[A\n 88%|████████▊ | 23608/26685 [3:29:46<19:32, 2.62it/s]\u001b[A\n 88%|████████▊ | 23609/26685 [3:29:46<19:10, 2.67it/s]\u001b[A\n 88%|████████▊ | 23610/26685 [3:29:47<26:05, 1.96it/s]\u001b[A\n 88%|████████▊ | 23611/26685 [3:29:48<22:35, 2.27it/s]\u001b[A\n 88%|████████▊ | 23612/26685 [3:29:48<21:26, 2.39it/s]\u001b[A\n 88%|████████▊ | 23613/26685 [3:29:48<23:46, 2.15it/s]\u001b[A\n 88%|████████▊ | 23614/26685 [3:29:49<26:40, 1.92it/s]\u001b[A\n 88%|████████▊ | 23615/26685 [3:29:50<24:40, 2.07it/s]\u001b[A\n 88%|████████▊ | 23616/26685 [3:29:50<21:55, 2.33it/s]\u001b[A\n 89%|████████▊ | 23617/26685 [3:29:50<24:47, 2.06it/s]\u001b[A\n 89%|████████▊ | 23618/26685 [3:29:51<22:12, 2.30it/s]\u001b[A\n 89%|████████▊ | 23619/26685 [3:29:51<20:48, 2.46it/s]\u001b[A\n 89%|████████▊ | 23620/26685 [3:29:52<22:50, 2.24it/s]\u001b[A\n 89%|████████▊ | 23621/26685 [3:29:52<20:01, 2.55it/s]\u001b[A\n 89%|████████▊ | 23622/26685 [3:29:52<21:24, 2.38it/s]\u001b[A\n 89%|████████▊ | 23623/26685 [3:29:53<18:53, 2.70it/s]\u001b[A\n 89%|████████▊ | 23624/26685 [3:29:53<19:42, 2.59it/s]\u001b[A\n 89%|████████▊ | 23625/26685 [3:29:53<19:41, 2.59it/s]\u001b[A\n 89%|████████▊ | 23626/26685 [3:29:54<17:39, 2.89it/s]\u001b[A\n 89%|████████▊ | 23627/26685 [3:29:54<17:26, 2.92it/s]\u001b[A\n 89%|████████▊ | 23628/26685 [3:29:54<16:32, 3.08it/s]\u001b[A\n 89%|████████▊ | 23629/26685 [3:29:55<20:12, 2.52it/s]\u001b[A\n 89%|████████▊ | 23630/26685 [3:29:55<18:16, 2.79it/s]\u001b[A\n 89%|████████▊ | 23631/26685 [3:29:56<18:48, 2.71it/s]\u001b[A\n 89%|████████▊ | 23632/26685 [3:29:56<16:40, 3.05it/s]\u001b[A\n 89%|████████▊ | 23633/26685 [3:29:56<16:56, 3.00it/s]\u001b[A\n 89%|████████▊ | 23634/26685 [3:29:56<16:40, 3.05it/s]\u001b[A\n 89%|████████▊ | 23635/26685 [3:29:57<16:46, 3.03it/s]\u001b[A\n 89%|████████▊ | 23636/26685 [3:29:57<17:38, 2.88it/s]\u001b[A\n 89%|████████▊ | 23637/26685 [3:29:57<17:24, 2.92it/s]\u001b[A\n 89%|████████▊ | 23638/26685 [3:29:58<16:48, 3.02it/s]\u001b[A\n 89%|████████▊ | 23639/26685 [3:29:58<16:37, 3.05it/s]\u001b[A\n 89%|████████▊ | 23640/26685 [3:29:59<25:00, 2.03it/s]\u001b[A\n 89%|████████▊ | 23641/26685 [3:29:59<22:57, 2.21it/s]\u001b[A\n 89%|████████▊ | 23642/26685 [3:30:00<22:13, 2.28it/s]\u001b[A\n 89%|████████▊ | 23643/26685 [3:30:00<24:48, 2.04it/s]\u001b[A\n 89%|████████▊ | 23644/26685 [3:30:01<22:13, 2.28it/s]\u001b[A\n 89%|████████▊ | 23645/26685 [3:30:01<19:26, 2.61it/s]\u001b[A\n 89%|████████▊ | 23646/26685 [3:30:02<23:07, 2.19it/s]\u001b[A\n 89%|████████▊ | 23647/26685 [3:30:02<22:42, 2.23it/s]\u001b[A\n 89%|████████▊ | 23648/26685 [3:30:02<22:11, 2.28it/s]\u001b[A\n 89%|████████▊ | 23649/26685 [3:30:03<21:42, 2.33it/s]\u001b[A\n 89%|████████▊ | 23650/26685 [3:30:03<22:15, 2.27it/s]\u001b[A\n 89%|████████▊ | 23651/26685 [3:30:04<27:57, 1.81it/s]\u001b[A\n 89%|████████▊ | 23652/26685 [3:30:05<32:35, 1.55it/s]\u001b[A\n 89%|████████▊ | 23653/26685 [3:30:06<35:04, 1.44it/s]\u001b[A\n 89%|████████▊ | 23654/26685 [3:30:06<29:14, 1.73it/s]\u001b[A\n 89%|████████▊ | 23655/26685 [3:30:06<25:45, 1.96it/s]\u001b[A\n 89%|████████▊ | 23656/26685 [3:30:07<23:33, 2.14it/s]\u001b[A\n 89%|████████▊ | 23657/26685 [3:30:08<29:30, 1.71it/s]\u001b[A\n 89%|████████▊ | 23658/26685 [3:30:08<24:47, 2.03it/s]\u001b[A\n 89%|████████▊ | 23659/26685 [3:30:08<23:37, 2.14it/s]\u001b[A\n 89%|████████▊ | 23660/26685 [3:30:09<23:07, 2.18it/s]\u001b[A\n 89%|████████▊ | 23661/26685 [3:30:09<20:24, 2.47it/s]\u001b[A\n 89%|████████▊ | 23662/26685 [3:30:09<18:29, 2.73it/s]\u001b[A\n 89%|████████▊ | 23663/26685 [3:30:10<24:41, 2.04it/s]\u001b[A\n 89%|████████▊ | 23664/26685 [3:30:11<30:57, 1.63it/s]\u001b[A\n 89%|████████▊ | 23665/26685 [3:30:11<28:23, 1.77it/s]\u001b[A\n 89%|████████▊ | 23666/26685 [3:30:12<31:28, 1.60it/s]\u001b[A\n 89%|████████▊ | 23667/26685 [3:30:13<28:14, 1.78it/s]\u001b[A\n 89%|████████▊ | 23668/26685 [3:30:13<24:14, 2.07it/s]\u001b[A\n 89%|████████▊ | 23669/26685 [3:30:13<22:47, 2.21it/s]\u001b[A\n 89%|████████▊ | 23670/26685 [3:30:14<23:09, 2.17it/s]\u001b[A\n 89%|████████▊ | 23671/26685 [3:30:14<24:12, 2.07it/s]\u001b[A\n 89%|████████▊ | 23672/26685 [3:30:15<20:44, 2.42it/s]\u001b[A\n 89%|████████▊ | 23673/26685 [3:30:15<22:53, 2.19it/s]\u001b[A\n 89%|████████▊ | 23674/26685 [3:30:15<19:44, 2.54it/s]\u001b[A\n 89%|████████▊ | 23675/26685 [3:30:16<19:07, 2.62it/s]\u001b[A\n 89%|████████▊ | 23676/26685 [3:30:16<18:12, 2.75it/s]\u001b[A\n 89%|████████▊ | 23677/26685 [3:30:16<19:14, 2.61it/s]\u001b[A\n 89%|████████▊ | 23678/26685 [3:30:17<18:07, 2.77it/s]\u001b[A\n 89%|████████▊ | 23679/26685 [3:30:17<18:35, 2.70it/s]\u001b[A\n 89%|████████▊ | 23680/26685 [3:30:18<18:37, 2.69it/s]\u001b[A\n 89%|████████▊ | 23681/26685 [3:30:18<21:10, 2.36it/s]\u001b[A\n 89%|████████▊ | 23682/26685 [3:30:19<24:54, 2.01it/s]\u001b[A\n 89%|████████▉ | 23683/26685 [3:30:19<21:46, 2.30it/s]\u001b[A\n 89%|████████▉ | 23684/26685 [3:30:20<23:44, 2.11it/s]\u001b[A\n 89%|████████▉ | 23685/26685 [3:30:20<22:11, 2.25it/s]\u001b[A\n 89%|████████▉ | 23686/26685 [3:30:20<19:54, 2.51it/s]\u001b[A\n 89%|████████▉ | 23687/26685 [3:30:21<19:00, 2.63it/s]\u001b[A\n 89%|████████▉ | 23688/26685 [3:30:21<20:28, 2.44it/s]\u001b[A\n 89%|████████▉ | 23689/26685 [3:30:21<18:52, 2.65it/s]\u001b[A\n 89%|████████▉ | 23690/26685 [3:30:22<20:09, 2.48it/s]\u001b[A\n 89%|████████▉ | 23691/26685 [3:30:23<23:46, 2.10it/s]\u001b[A\n 89%|████████▉ | 23692/26685 [3:30:23<21:46, 2.29it/s]\u001b[A\n 89%|████████▉ | 23693/26685 [3:30:23<22:12, 2.25it/s]\u001b[A\n 89%|████████▉ | 23694/26685 [3:30:24<22:06, 2.26it/s]\u001b[A\n 89%|████████▉ | 23695/26685 [3:30:24<22:43, 2.19it/s]\u001b[A\n 89%|████████▉ | 23696/26685 [3:30:25<20:35, 2.42it/s]\u001b[A\n 89%|████████▉ | 23697/26685 [3:30:25<27:22, 1.82it/s]\u001b[A\n 89%|████████▉ | 23698/26685 [3:30:26<23:53, 2.08it/s]\u001b[A\n 89%|████████▉ | 23699/26685 [3:30:26<22:37, 2.20it/s]\u001b[A\n 89%|████████▉ | 23700/26685 [3:30:27<21:42, 2.29it/s]\u001b[A\n 89%|████████▉ | 23701/26685 [3:30:27<21:56, 2.27it/s]\u001b[A\n 89%|████████▉ | 23702/26685 [3:30:28<23:03, 2.16it/s]\u001b[A\n 89%|████████▉ | 23703/26685 [3:30:28<20:34, 2.42it/s]\u001b[A\n 89%|████████▉ | 23704/26685 [3:30:28<23:22, 2.12it/s]\u001b[A\n 89%|████████▉ | 23705/26685 [3:30:29<20:46, 2.39it/s]\u001b[A\n 89%|████████▉ | 23706/26685 [3:30:29<18:30, 2.68it/s]\u001b[A\n 89%|████████▉ | 23707/26685 [3:30:29<18:25, 2.69it/s]\u001b[A\n 89%|████████▉ | 23708/26685 [3:30:30<19:01, 2.61it/s]\u001b[A\n 89%|████████▉ | 23709/26685 [3:30:30<20:36, 2.41it/s]\u001b[A\n 89%|████████▉ | 23710/26685 [3:30:31<18:38, 2.66it/s]\u001b[A\n 89%|████████▉ | 23711/26685 [3:30:31<17:01, 2.91it/s]\u001b[A\n 89%|████████▉ | 23712/26685 [3:30:31<15:42, 3.16it/s]\u001b[A\n 89%|████████▉ | 23713/26685 [3:30:31<16:49, 2.94it/s]\u001b[A\n 89%|████████▉ | 23714/26685 [3:30:32<23:48, 2.08it/s]\u001b[A\n 89%|████████▉ | 23715/26685 [3:30:33<23:49, 2.08it/s]\u001b[A\n 89%|████████▉ | 23716/26685 [3:30:33<21:27, 2.31it/s]\u001b[A\n 89%|████████▉ | 23717/26685 [3:30:34<23:10, 2.13it/s]\u001b[A\n 89%|████████▉ | 23718/26685 [3:30:34<22:51, 2.16it/s]\u001b[A\n 89%|████████▉ | 23719/26685 [3:30:35<27:32, 1.79it/s]\u001b[A\n 89%|████████▉ | 23720/26685 [3:30:36<31:09, 1.59it/s]\u001b[A\n 89%|████████▉ | 23721/26685 [3:30:36<27:40, 1.79it/s]\u001b[A\n 89%|████████▉ | 23722/26685 [3:30:36<25:08, 1.96it/s]\u001b[A\n 89%|████████▉ | 23723/26685 [3:30:37<22:14, 2.22it/s]\u001b[A\n 89%|████████▉ | 23724/26685 [3:30:37<20:17, 2.43it/s]\u001b[A\n 89%|████████▉ | 23725/26685 [3:30:38<26:19, 1.87it/s]\u001b[A\n 89%|████████▉ | 23726/26685 [3:30:38<23:16, 2.12it/s]\u001b[A\n 89%|████████▉ | 23727/26685 [3:30:39<21:29, 2.29it/s]\u001b[A\n 89%|████████▉ | 23728/26685 [3:30:39<20:37, 2.39it/s]\u001b[A\n 89%|████████▉ | 23729/26685 [3:30:39<19:02, 2.59it/s]\u001b[A\n 89%|████████▉ | 23730/26685 [3:30:40<18:01, 2.73it/s]\u001b[A\n 89%|████████▉ | 23731/26685 [3:30:40<17:24, 2.83it/s]\u001b[A\n 89%|████████▉ | 23732/26685 [3:30:40<17:56, 2.74it/s]\u001b[A\n 89%|████████▉ | 23733/26685 [3:30:41<25:23, 1.94it/s]\u001b[A\n 89%|████████▉ | 23734/26685 [3:30:42<25:38, 1.92it/s]\u001b[A\n 89%|████████▉ | 23735/26685 [3:30:42<22:16, 2.21it/s]\u001b[A\n 89%|████████▉ | 23736/26685 [3:30:42<20:15, 2.43it/s]\u001b[A\n 89%|████████▉ | 23737/26685 [3:30:43<19:41, 2.49it/s]\u001b[A\n 89%|████████▉ | 23738/26685 [3:30:43<19:42, 2.49it/s]\u001b[A\n 89%|████████▉ | 23739/26685 [3:30:44<26:46, 1.83it/s]\u001b[A\n 89%|████████▉ | 23740/26685 [3:30:44<23:46, 2.06it/s]\u001b[A\n 89%|████████▉ | 23741/26685 [3:30:45<28:13, 1.74it/s]\u001b[A\n 89%|████████▉ | 23742/26685 [3:30:46<27:07, 1.81it/s]\u001b[A\n 89%|████████▉ | 23743/26685 [3:30:46<23:53, 2.05it/s]\u001b[A\n 89%|████████▉ | 23744/26685 [3:30:46<20:15, 2.42it/s]\u001b[A\n 89%|████████▉ | 23745/26685 [3:30:46<18:26, 2.66it/s]\u001b[A\n 89%|████████▉ | 23746/26685 [3:30:47<18:57, 2.58it/s]\u001b[A\n 89%|████████▉ | 23747/26685 [3:30:47<17:57, 2.73it/s]\u001b[A\n 89%|████████▉ | 23748/26685 [3:30:48<21:34, 2.27it/s]\u001b[A\n 89%|████████▉ | 23749/26685 [3:30:48<18:52, 2.59it/s]\u001b[A\n 89%|████████▉ | 23750/26685 [3:30:49<25:12, 1.94it/s]\u001b[A\n 89%|████████▉ | 23751/26685 [3:30:49<22:30, 2.17it/s]\u001b[A\n 89%|████████▉ | 23752/26685 [3:30:50<21:55, 2.23it/s]\u001b[A\n 89%|████████▉ | 23753/26685 [3:30:50<20:23, 2.40it/s]\u001b[A\n 89%|████████▉ | 23754/26685 [3:30:50<19:39, 2.49it/s]\u001b[A\n 89%|████████▉ | 23755/26685 [3:30:51<18:02, 2.71it/s]\u001b[A\n 89%|████████▉ | 23756/26685 [3:30:51<16:50, 2.90it/s]\u001b[A\n 89%|████████▉ | 23757/26685 [3:30:51<15:57, 3.06it/s]\u001b[A\n 89%|████████▉ | 23758/26685 [3:30:52<15:34, 3.13it/s]\u001b[A\n 89%|████████▉ | 23759/26685 [3:30:52<15:33, 3.13it/s]\u001b[A\n 89%|████████▉ | 23760/26685 [3:30:52<16:26, 2.96it/s]\u001b[A\n 89%|████████▉ | 23761/26685 [3:30:53<18:31, 2.63it/s]\u001b[A\n 89%|████████▉ | 23762/26685 [3:30:53<16:46, 2.90it/s]\u001b[A\n 89%|████████▉ | 23763/26685 [3:30:53<17:23, 2.80it/s]\u001b[A\n 89%|████████▉ | 23764/26685 [3:30:54<16:30, 2.95it/s]\u001b[A\n 89%|████████▉ | 23765/26685 [3:30:54<17:37, 2.76it/s]\u001b[A\n 89%|████████▉ | 23766/26685 [3:30:54<16:47, 2.90it/s]\u001b[A\n 89%|████████▉ | 23767/26685 [3:30:55<17:46, 2.74it/s]\u001b[A\n 89%|████████▉ | 23768/26685 [3:30:56<25:12, 1.93it/s]\u001b[A\n 89%|████████▉ | 23769/26685 [3:30:56<22:05, 2.20it/s]\u001b[A\n 89%|████████▉ | 23770/26685 [3:30:56<20:41, 2.35it/s]\u001b[A\n 89%|████████▉ | 23771/26685 [3:30:57<20:07, 2.41it/s]\u001b[A\n 89%|████████▉ | 23772/26685 [3:30:57<19:12, 2.53it/s]\u001b[A\n 89%|████████▉ | 23773/26685 [3:30:57<17:51, 2.72it/s]\u001b[A\n 89%|████████▉ | 23774/26685 [3:30:58<24:30, 1.98it/s]\u001b[A\n 89%|████████▉ | 23775/26685 [3:31:00<40:00, 1.21it/s]\u001b[A\n 89%|████████▉ | 23776/26685 [3:31:00<32:28, 1.49it/s]\u001b[A\n 89%|████████▉ | 23777/26685 [3:31:00<27:08, 1.79it/s]\u001b[A\n 89%|████████▉ | 23778/26685 [3:31:02<41:45, 1.16it/s]\u001b[A\n 89%|████████▉ | 23779/26685 [3:31:02<33:32, 1.44it/s]\u001b[A\n 89%|████████▉ | 23780/26685 [3:31:03<28:24, 1.70it/s]\u001b[A\n 89%|████████▉ | 23781/26685 [3:31:03<26:24, 1.83it/s]\u001b[A\n 89%|████████▉ | 23782/26685 [3:31:03<22:36, 2.14it/s]\u001b[A\n 89%|████████▉ | 23783/26685 [3:31:04<28:18, 1.71it/s]\u001b[A\n 89%|████████▉ | 23784/26685 [3:31:05<25:25, 1.90it/s]\u001b[A\n 89%|████████▉ | 23785/26685 [3:31:05<30:28, 1.59it/s]\u001b[A\n 89%|████████▉ | 23786/26685 [3:31:06<26:42, 1.81it/s]\u001b[A\n 89%|████████▉ | 23787/26685 [3:31:06<23:48, 2.03it/s]\u001b[A\n 89%|████████▉ | 23788/26685 [3:31:07<22:56, 2.10it/s]\u001b[A\n 89%|████████▉ | 23789/26685 [3:31:07<20:55, 2.31it/s]\u001b[A\n 89%|████████▉ | 23790/26685 [3:31:07<18:04, 2.67it/s]\u001b[A\n 89%|████████▉ | 23791/26685 [3:31:08<25:25, 1.90it/s]\u001b[A\n 89%|████████▉ | 23792/26685 [3:31:08<23:31, 2.05it/s]\u001b[A\n 89%|████████▉ | 23793/26685 [3:31:09<20:15, 2.38it/s]\u001b[A\n 89%|████████▉ | 23794/26685 [3:31:09<23:32, 2.05it/s]\u001b[A\n 89%|████████▉ | 23795/26685 [3:31:10<20:36, 2.34it/s]\u001b[A\n 89%|████████▉ | 23796/26685 [3:31:10<21:10, 2.27it/s]\u001b[A\n 89%|████████▉ | 23797/26685 [3:31:11<26:41, 1.80it/s]\u001b[A\n 89%|████████▉ | 23798/26685 [3:31:11<23:10, 2.08it/s]\u001b[A\n 89%|████████▉ | 23799/26685 [3:31:12<20:55, 2.30it/s]\u001b[A\n 89%|████████▉ | 23800/26685 [3:31:12<18:45, 2.56it/s]\u001b[A\n 89%|████████▉ | 23801/26685 [3:31:12<20:36, 2.33it/s]\u001b[A\n 89%|████████▉ | 23802/26685 [3:31:13<19:38, 2.45it/s]\u001b[A\n 89%|████████▉ | 23803/26685 [3:31:13<17:38, 2.72it/s]\u001b[A\n 89%|████████▉ | 23804/26685 [3:31:13<16:49, 2.85it/s]\u001b[A\n 89%|████████▉ | 23805/26685 [3:31:14<18:12, 2.64it/s]\u001b[A\n 89%|████████▉ | 23806/26685 [3:31:14<17:24, 2.76it/s]\u001b[A\n 89%|████████▉ | 23807/26685 [3:31:14<18:05, 2.65it/s]\u001b[A\n 89%|████████▉ | 23808/26685 [3:31:15<18:48, 2.55it/s]\u001b[A\n 89%|████████▉ | 23809/26685 [3:31:15<18:39, 2.57it/s]\u001b[A\n 89%|████████▉ | 23810/26685 [3:31:16<24:58, 1.92it/s]\u001b[A\n 89%|████████▉ | 23811/26685 [3:31:17<23:04, 2.08it/s]\u001b[A\n 89%|████████▉ | 23812/26685 [3:31:17<24:37, 1.95it/s]\u001b[A\n 89%|████████▉ | 23813/26685 [3:31:17<22:11, 2.16it/s]\u001b[A\n 89%|████████▉ | 23814/26685 [3:31:18<20:14, 2.36it/s]\u001b[A\n 89%|████████▉ | 23815/26685 [3:31:18<18:14, 2.62it/s]\u001b[A\n 89%|████████▉ | 23816/26685 [3:31:19<19:34, 2.44it/s]\u001b[A\n 89%|████████▉ | 23817/26685 [3:31:19<20:53, 2.29it/s]\u001b[A\n 89%|████████▉ | 23818/26685 [3:31:19<18:46, 2.55it/s]\u001b[A\n 89%|████████▉ | 23819/26685 [3:31:20<20:55, 2.28it/s]\u001b[A\n 89%|████████▉ | 23820/26685 [3:31:20<18:38, 2.56it/s]\u001b[A\n 89%|████████▉ | 23821/26685 [3:31:21<24:40, 1.93it/s]\u001b[A\n 89%|████████▉ | 23822/26685 [3:31:21<20:50, 2.29it/s]\u001b[A\n 89%|████████▉ | 23823/26685 [3:31:21<18:08, 2.63it/s]\u001b[A\n 89%|████████▉ | 23824/26685 [3:31:22<16:56, 2.81it/s]\u001b[A\n 89%|████████▉ | 23825/26685 [3:31:22<15:53, 3.00it/s]\u001b[A\n 89%|████████▉ | 23826/26685 [3:31:22<14:51, 3.21it/s]\u001b[A\n 89%|████████▉ | 23827/26685 [3:31:23<14:48, 3.22it/s]\u001b[A\n 89%|████████▉ | 23828/26685 [3:31:23<15:09, 3.14it/s]\u001b[A\n 89%|████████▉ | 23829/26685 [3:31:23<16:04, 2.96it/s]\u001b[A\n 89%|████████▉ | 23830/26685 [3:31:24<18:07, 2.63it/s]\u001b[A\n 89%|████████▉ | 23831/26685 [3:31:25<25:03, 1.90it/s]\u001b[A\n 89%|████████▉ | 23832/26685 [3:31:25<29:08, 1.63it/s]\u001b[A\n 89%|████████▉ | 23833/26685 [3:31:26<26:00, 1.83it/s]\u001b[A\n 89%|████████▉ | 23834/26685 [3:31:26<22:32, 2.11it/s]\u001b[A\n 89%|████████▉ | 23835/26685 [3:31:28<36:35, 1.30it/s]\u001b[A\n 89%|████████▉ | 23836/26685 [3:31:28<30:58, 1.53it/s]\u001b[A\n 89%|████████▉ | 23837/26685 [3:31:28<28:12, 1.68it/s]\u001b[A\n 89%|████████▉ | 23838/26685 [3:31:29<24:16, 1.95it/s]\u001b[A\n 89%|████████▉ | 23839/26685 [3:31:29<23:22, 2.03it/s]\u001b[A\n 89%|████████▉ | 23840/26685 [3:31:30<27:30, 1.72it/s]\u001b[A\n 89%|████████▉ | 23841/26685 [3:31:30<24:39, 1.92it/s]\u001b[A\n 89%|████████▉ | 23842/26685 [3:31:31<25:43, 1.84it/s]\u001b[A\n 89%|████████▉ | 23843/26685 [3:31:32<25:06, 1.89it/s]\u001b[A\n 89%|████████▉ | 23844/26685 [3:31:32<25:47, 1.84it/s]\u001b[A\n 89%|████████▉ | 23845/26685 [3:31:33<24:03, 1.97it/s]\u001b[A\n 89%|████████▉ | 23846/26685 [3:31:33<21:44, 2.18it/s]\u001b[A\n 89%|████████▉ | 23847/26685 [3:31:33<19:03, 2.48it/s]\u001b[A\n 89%|████████▉ | 23848/26685 [3:31:33<18:23, 2.57it/s]\u001b[A\n 89%|████████▉ | 23849/26685 [3:31:34<26:28, 1.79it/s]\u001b[A\n 89%|████████▉ | 23850/26685 [3:31:35<26:40, 1.77it/s]\u001b[A\n 89%|████████▉ | 23851/26685 [3:31:35<25:05, 1.88it/s]\u001b[A\n 89%|████████▉ | 23852/26685 [3:31:36<21:09, 2.23it/s]\u001b[A\n 89%|████████▉ | 23853/26685 [3:31:37<26:09, 1.80it/s]\u001b[A\n 89%|████████▉ | 23854/26685 [3:31:37<26:04, 1.81it/s]\u001b[A\n 89%|████████▉ | 23855/26685 [3:31:37<22:36, 2.09it/s]\u001b[A\n 89%|████████▉ | 23856/26685 [3:31:38<19:53, 2.37it/s]\u001b[A\n 89%|████████▉ | 23857/26685 [3:31:38<18:14, 2.58it/s]\u001b[A\n 89%|████████▉ | 23858/26685 [3:31:38<18:41, 2.52it/s]\u001b[A\n 89%|████████▉ | 23859/26685 [3:31:39<19:43, 2.39it/s]\u001b[A\n 89%|████████▉ | 23860/26685 [3:31:39<17:43, 2.66it/s]\u001b[A\n 89%|████████▉ | 23861/26685 [3:31:39<16:28, 2.86it/s]\u001b[A\n 89%|████████▉ | 23862/26685 [3:31:40<16:44, 2.81it/s]\u001b[A\n 89%|████████▉ | 23863/26685 [3:31:40<15:53, 2.96it/s]\u001b[A\n 89%|████████▉ | 23864/26685 [3:31:40<15:59, 2.94it/s]\u001b[A\n 89%|████████▉ | 23865/26685 [3:31:41<14:49, 3.17it/s]\u001b[A\n 89%|████████▉ | 23866/26685 [3:31:41<16:33, 2.84it/s]\u001b[A\n 89%|████████▉ | 23867/26685 [3:31:41<16:26, 2.86it/s]\u001b[A\n 89%|████████▉ | 23868/26685 [3:31:42<18:25, 2.55it/s]\u001b[A\n 89%|████████▉ | 23869/26685 [3:31:42<18:05, 2.59it/s]\u001b[A\n 89%|████████▉ | 23870/26685 [3:31:43<20:39, 2.27it/s]\u001b[A\n 89%|████████▉ | 23871/26685 [3:31:43<19:29, 2.41it/s]\u001b[A\n 89%|████████▉ | 23872/26685 [3:31:44<17:47, 2.64it/s]\u001b[A\n 89%|████████▉ | 23873/26685 [3:31:44<16:39, 2.81it/s]\u001b[A\n 89%|████████▉ | 23874/26685 [3:31:44<17:17, 2.71it/s]\u001b[A\n 89%|████████▉ | 23875/26685 [3:31:45<18:42, 2.50it/s]\u001b[A\n 89%|████████▉ | 23876/26685 [3:31:46<24:34, 1.91it/s]\u001b[A\n 89%|████████▉ | 23877/26685 [3:31:46<22:07, 2.11it/s]\u001b[A\n 89%|████████▉ | 23878/26685 [3:31:46<19:47, 2.36it/s]\u001b[A\n 89%|████████▉ | 23879/26685 [3:31:47<19:49, 2.36it/s]\u001b[A\n 89%|████████▉ | 23880/26685 [3:31:47<18:59, 2.46it/s]\u001b[A\n 89%|████████▉ | 23881/26685 [3:31:47<17:34, 2.66it/s]\u001b[A\n 89%|████████▉ | 23882/26685 [3:31:48<16:17, 2.87it/s]\u001b[A\n 89%|████████▉ | 23883/26685 [3:31:48<23:08, 2.02it/s]\u001b[A\n 90%|████████▉ | 23884/26685 [3:31:49<27:52, 1.68it/s]\u001b[A\n 90%|████████▉ | 23885/26685 [3:31:50<23:52, 1.95it/s]\u001b[A\n 90%|████████▉ | 23886/26685 [3:31:50<22:09, 2.11it/s]\u001b[A\n 90%|████████▉ | 23887/26685 [3:31:50<20:22, 2.29it/s]\u001b[A\n 90%|████████▉ | 23888/26685 [3:31:51<22:15, 2.09it/s]\u001b[A\n 90%|████████▉ | 23889/26685 [3:31:51<21:49, 2.14it/s]\u001b[A\n 90%|████████▉ | 23890/26685 [3:31:52<21:35, 2.16it/s]\u001b[A\n 90%|████████▉ | 23891/26685 [3:31:52<20:39, 2.25it/s]\u001b[A\n 90%|████████▉ | 23892/26685 [3:31:53<19:13, 2.42it/s]\u001b[A\n 90%|████████▉ | 23893/26685 [3:31:53<18:58, 2.45it/s]\u001b[A\n 90%|████████▉ | 23894/26685 [3:31:54<25:58, 1.79it/s]\u001b[A\n 90%|████████▉ | 23895/26685 [3:31:54<22:30, 2.07it/s]\u001b[A\n 90%|████████▉ | 23896/26685 [3:31:55<21:46, 2.14it/s]\u001b[A\n 90%|████████▉ | 23897/26685 [3:31:55<26:25, 1.76it/s]\u001b[A\n 90%|████████▉ | 23898/26685 [3:31:56<22:42, 2.05it/s]\u001b[A\n 90%|████████▉ | 23899/26685 [3:31:56<20:26, 2.27it/s]\u001b[A\n 90%|████████▉ | 23900/26685 [3:31:56<18:07, 2.56it/s]\u001b[A\n 90%|████████▉ | 23901/26685 [3:31:57<25:22, 1.83it/s]\u001b[A\n 90%|████████▉ | 23902/26685 [3:31:58<22:31, 2.06it/s]\u001b[A\n 90%|████████▉ | 23903/26685 [3:31:58<20:22, 2.28it/s]\u001b[A\n 90%|████████▉ | 23904/26685 [3:31:59<26:23, 1.76it/s]\u001b[A\n 90%|████████▉ | 23905/26685 [3:31:59<22:42, 2.04it/s]\u001b[A\n 90%|████████▉ | 23906/26685 [3:31:59<19:45, 2.34it/s]\u001b[A\n 90%|████████▉ | 23907/26685 [3:32:00<22:01, 2.10it/s]\u001b[A\n 90%|████████▉ | 23908/26685 [3:32:00<19:17, 2.40it/s]\u001b[A\n 90%|████████▉ | 23909/26685 [3:32:01<19:18, 2.40it/s]\u001b[A\n 90%|████████▉ | 23910/26685 [3:32:01<21:44, 2.13it/s]\u001b[A\n 90%|████████▉ | 23911/26685 [3:32:02<19:29, 2.37it/s]\u001b[A\n 90%|████████▉ | 23912/26685 [3:32:02<18:06, 2.55it/s]\u001b[A\n 90%|████████▉ | 23913/26685 [3:32:02<17:15, 2.68it/s]\u001b[A\n 90%|████████▉ | 23914/26685 [3:32:03<22:54, 2.02it/s]\u001b[A\n 90%|████████▉ | 23915/26685 [3:32:03<22:22, 2.06it/s]\u001b[A\n 90%|████████▉ | 23916/26685 [3:32:04<23:18, 1.98it/s]\u001b[A\n 90%|████████▉ | 23917/26685 [3:32:04<22:07, 2.09it/s]\u001b[A\n 90%|████████▉ | 23918/26685 [3:32:05<22:16, 2.07it/s]\u001b[A\n 90%|████████▉ | 23919/26685 [3:32:05<23:31, 1.96it/s]\u001b[A\n 90%|████████▉ | 23920/26685 [3:32:06<20:11, 2.28it/s]\u001b[A\n 90%|████████▉ | 23921/26685 [3:32:06<18:26, 2.50it/s]\u001b[A\n 90%|████████▉ | 23922/26685 [3:32:07<19:42, 2.34it/s]\u001b[A\n 90%|████████▉ | 23923/26685 [3:32:07<22:33, 2.04it/s]\u001b[A\n 90%|████████▉ | 23924/26685 [3:32:08<21:21, 2.15it/s]\u001b[A\n 90%|████████▉ | 23925/26685 [3:32:08<21:47, 2.11it/s]\u001b[A\n 90%|████████▉ | 23926/26685 [3:32:09<25:24, 1.81it/s]\u001b[A\n 90%|████████▉ | 23927/26685 [3:32:09<26:28, 1.74it/s]\u001b[A\n 90%|████████▉ | 23928/26685 [3:32:10<21:52, 2.10it/s]\u001b[A\n 90%|████████▉ | 23929/26685 [3:32:11<27:39, 1.66it/s]\u001b[A\n 90%|████████▉ | 23930/26685 [3:32:11<26:33, 1.73it/s]\u001b[A\n 90%|████████▉ | 23931/26685 [3:32:11<24:11, 1.90it/s]\u001b[A\n 90%|████████▉ | 23932/26685 [3:32:12<21:29, 2.13it/s]\u001b[A\n 90%|████████▉ | 23933/26685 [3:32:12<20:24, 2.25it/s]\u001b[A\n 90%|████████▉ | 23934/26685 [3:32:13<20:06, 2.28it/s]\u001b[A\n 90%|████████▉ | 23935/26685 [3:32:13<25:01, 1.83it/s]\u001b[A\n 90%|████████▉ | 23936/26685 [3:32:14<22:51, 2.00it/s]\u001b[A\n 90%|████████▉ | 23937/26685 [3:32:14<20:48, 2.20it/s]\u001b[A\n 90%|████████▉ | 23938/26685 [3:32:15<25:51, 1.77it/s]\u001b[A\n 90%|████████▉ | 23939/26685 [3:32:15<24:48, 1.84it/s]\u001b[A\n 90%|████████▉ | 23940/26685 [3:32:16<22:38, 2.02it/s]\u001b[A\n 90%|████████▉ | 23941/26685 [3:32:16<20:47, 2.20it/s]\u001b[A\n 90%|████████▉ | 23942/26685 [3:32:17<19:11, 2.38it/s]\u001b[A\n 90%|████████▉ | 23943/26685 [3:32:17<24:58, 1.83it/s]\u001b[A\n 90%|████████▉ | 23944/26685 [3:32:18<24:19, 1.88it/s]\u001b[A\n 90%|████████▉ | 23945/26685 [3:32:18<22:21, 2.04it/s]\u001b[A\n 90%|████████▉ | 23946/26685 [3:32:19<26:39, 1.71it/s]\u001b[A\n 90%|████████▉ | 23947/26685 [3:32:20<24:15, 1.88it/s]\u001b[A\n 90%|████████▉ | 23948/26685 [3:32:20<22:54, 1.99it/s]\u001b[A\n 90%|████████▉ | 23949/26685 [3:32:20<19:33, 2.33it/s]\u001b[A\n 90%|████████▉ | 23950/26685 [3:32:21<18:02, 2.53it/s]\u001b[A\n 90%|████████▉ | 23951/26685 [3:32:21<21:14, 2.15it/s]\u001b[A\n 90%|████████▉ | 23952/26685 [3:32:21<18:18, 2.49it/s]\u001b[A\n 90%|████████▉ | 23953/26685 [3:32:22<21:48, 2.09it/s]\u001b[A\n 90%|████████▉ | 23954/26685 [3:32:22<19:05, 2.38it/s]\u001b[A\n 90%|████████▉ | 23955/26685 [3:32:23<18:13, 2.50it/s]\u001b[A\n 90%|████████▉ | 23956/26685 [3:32:23<18:12, 2.50it/s]\u001b[A\n 90%|████████▉ | 23957/26685 [3:32:23<16:23, 2.77it/s]\u001b[A\n 90%|████████▉ | 23958/26685 [3:32:24<18:56, 2.40it/s]\u001b[A\n 90%|████████▉ | 23959/26685 [3:32:24<17:33, 2.59it/s]\u001b[A\n 90%|████████▉ | 23960/26685 [3:32:25<16:03, 2.83it/s]\u001b[A\n 90%|████████▉ | 23961/26685 [3:32:25<15:27, 2.94it/s]\u001b[A\n 90%|████████▉ | 23962/26685 [3:32:25<14:44, 3.08it/s]\u001b[A\n 90%|████████▉ | 23963/26685 [3:32:26<16:05, 2.82it/s]\u001b[A\n 90%|████████▉ | 23964/26685 [3:32:26<15:02, 3.01it/s]\u001b[A\n 90%|████████▉ | 23965/26685 [3:32:26<15:37, 2.90it/s]\u001b[A\n 90%|████████▉ | 23966/26685 [3:32:26<14:53, 3.04it/s]\u001b[A\n 90%|████████▉ | 23967/26685 [3:32:27<17:10, 2.64it/s]\u001b[A\n 90%|████████▉ | 23968/26685 [3:32:27<15:41, 2.89it/s]\u001b[A\n 90%|████████▉ | 23969/26685 [3:32:28<22:23, 2.02it/s]\u001b[A\n 90%|████████▉ | 23970/26685 [3:32:29<22:02, 2.05it/s]\u001b[A\n 90%|████████▉ | 23971/26685 [3:32:29<19:57, 2.27it/s]\u001b[A\n 90%|████████▉ | 23972/26685 [3:32:29<18:23, 2.46it/s]\u001b[A\n 90%|████████▉ | 23973/26685 [3:32:30<24:50, 1.82it/s]\u001b[A\n 90%|████████▉ | 23974/26685 [3:32:30<22:21, 2.02it/s]\u001b[A\n 90%|████████▉ | 23975/26685 [3:32:31<23:10, 1.95it/s]\u001b[A\n 90%|████████▉ | 23976/26685 [3:32:32<23:44, 1.90it/s]\u001b[A\n 90%|████████▉ | 23977/26685 [3:32:32<21:48, 2.07it/s]\u001b[A\n 90%|████████▉ | 23978/26685 [3:32:32<20:00, 2.25it/s]\u001b[A\n 90%|████████▉ | 23979/26685 [3:32:33<17:26, 2.58it/s]\u001b[A\n 90%|████████▉ | 23980/26685 [3:32:33<24:18, 1.85it/s]\u001b[A\n 90%|████████▉ | 23981/26685 [3:32:34<23:45, 1.90it/s]\u001b[A\n 90%|████████▉ | 23982/26685 [3:32:34<22:45, 1.98it/s]\u001b[A\n 90%|████████▉ | 23983/26685 [3:32:35<27:11, 1.66it/s]\u001b[A\n 90%|████████▉ | 23984/26685 [3:32:36<27:11, 1.66it/s]\u001b[A\n 90%|████████▉ | 23985/26685 [3:32:36<24:45, 1.82it/s]\u001b[A\n 90%|████████▉ | 23986/26685 [3:32:37<23:23, 1.92it/s]\u001b[A\n 90%|████████▉ | 23987/26685 [3:32:37<20:41, 2.17it/s]\u001b[A\n 90%|████████▉ | 23988/26685 [3:32:38<20:38, 2.18it/s]\u001b[A\n 90%|████████▉ | 23989/26685 [3:32:38<20:18, 2.21it/s]\u001b[A\n 90%|████████▉ | 23990/26685 [3:32:38<18:51, 2.38it/s]\u001b[A\n 90%|████████▉ | 23991/26685 [3:32:39<21:50, 2.06it/s]\u001b[A\n 90%|████████▉ | 23992/26685 [3:32:40<23:17, 1.93it/s]\u001b[A\n 90%|████████▉ | 23993/26685 [3:32:40<22:21, 2.01it/s]\u001b[A\n 90%|████████▉ | 23994/26685 [3:32:40<20:44, 2.16it/s]\u001b[A\n 90%|████████▉ | 23995/26685 [3:32:41<18:49, 2.38it/s]\u001b[A\n 90%|████████▉ | 23996/26685 [3:32:41<17:58, 2.49it/s]\u001b[A\n 90%|████████▉ | 23997/26685 [3:32:42<23:36, 1.90it/s]\u001b[A\n 90%|████████▉ | 23998/26685 [3:32:42<24:21, 1.84it/s]\u001b[A\n 90%|████████▉ | 23999/26685 [3:32:43<21:57, 2.04it/s]\u001b[A\n 90%|████████▉ | 24000/26685 [3:32:43<23:15, 1.92it/s]\u001b[A\n 90%|████████▉ | 24001/26685 [3:32:44<19:35, 2.28it/s]\u001b[A\n 90%|████████▉ | 24002/26685 [3:32:44<18:03, 2.48it/s]\u001b[A\n 90%|████████▉ | 24003/26685 [3:32:44<17:01, 2.62it/s]\u001b[A\n 90%|████████▉ | 24004/26685 [3:32:45<19:30, 2.29it/s]\u001b[A\n 90%|████████▉ | 24005/26685 [3:32:45<21:15, 2.10it/s]\u001b[A\n 90%|████████▉ | 24006/26685 [3:32:46<18:58, 2.35it/s]\u001b[A\n 90%|████████▉ | 24007/26685 [3:32:46<19:33, 2.28it/s]\u001b[A\n 90%|████████▉ | 24008/26685 [3:32:46<17:20, 2.57it/s]\u001b[A\n 90%|████████▉ | 24009/26685 [3:32:47<16:25, 2.72it/s]\u001b[A\n 90%|████████▉ | 24010/26685 [3:32:47<17:41, 2.52it/s]\u001b[A\n 90%|████████▉ | 24011/26685 [3:32:48<17:09, 2.60it/s]\u001b[A\n 90%|████████▉ | 24012/26685 [3:32:48<23:17, 1.91it/s]\u001b[A\n 90%|████████▉ | 24013/26685 [3:32:49<20:55, 2.13it/s]\u001b[A\n 90%|████████▉ | 24014/26685 [3:32:49<18:49, 2.36it/s]\u001b[A\n 90%|████████▉ | 24015/26685 [3:32:49<17:21, 2.56it/s]\u001b[A\n 90%|████████▉ | 24016/26685 [3:32:50<20:10, 2.21it/s]\u001b[A\n 90%|█████████ | 24017/26685 [3:32:50<19:53, 2.24it/s]\u001b[A\n 90%|█████████ | 24018/26685 [3:32:51<20:01, 2.22it/s]\u001b[A\n 90%|█████████ | 24019/26685 [3:32:52<25:07, 1.77it/s]\u001b[A\n 90%|█████████ | 24020/26685 [3:32:53<29:06, 1.53it/s]\u001b[A\n 90%|█████████ | 24021/26685 [3:32:53<24:47, 1.79it/s]\u001b[A\n 90%|█████████ | 24022/26685 [3:32:54<28:50, 1.54it/s]\u001b[A\n 90%|█████████ | 24023/26685 [3:32:54<26:13, 1.69it/s]\u001b[A\n 90%|█████████ | 24024/26685 [3:32:55<23:50, 1.86it/s]\u001b[A\n 90%|█████████ | 24025/26685 [3:32:55<23:14, 1.91it/s]\u001b[A\n 90%|█████████ | 24026/26685 [3:32:56<27:37, 1.60it/s]\u001b[A\n 90%|█████████ | 24027/26685 [3:32:56<24:26, 1.81it/s]\u001b[A\n 90%|█████████ | 24028/26685 [3:32:57<23:52, 1.85it/s]\u001b[A\n 90%|█████████ | 24029/26685 [3:32:58<29:03, 1.52it/s]\u001b[A\n 90%|█████████ | 24030/26685 [3:32:58<25:53, 1.71it/s]\u001b[A\n 90%|█████████ | 24031/26685 [3:32:59<24:09, 1.83it/s]\u001b[A\n 90%|█████████ | 24032/26685 [3:32:59<24:12, 1.83it/s]\u001b[A\n 90%|█████████ | 24033/26685 [3:33:00<23:01, 1.92it/s]\u001b[A\n 90%|█████████ | 24034/26685 [3:33:00<20:14, 2.18it/s]\u001b[A\n 90%|█████████ | 24035/26685 [3:33:00<19:30, 2.26it/s]\u001b[A\n 90%|█████████ | 24036/26685 [3:33:01<17:38, 2.50it/s]\u001b[A\n 90%|█████████ | 24037/26685 [3:33:01<15:43, 2.81it/s]\u001b[A\n 90%|█████████ | 24038/26685 [3:33:01<15:16, 2.89it/s]\u001b[A\n 90%|█████████ | 24039/26685 [3:33:02<15:56, 2.77it/s]\u001b[A\n 90%|█████████ | 24040/26685 [3:33:02<16:46, 2.63it/s]\u001b[A\n 90%|█████████ | 24041/26685 [3:33:02<16:03, 2.75it/s]\u001b[A\n 90%|█████████ | 24042/26685 [3:33:03<16:04, 2.74it/s]\u001b[A\n 90%|█████████ | 24043/26685 [3:33:03<17:30, 2.52it/s]\u001b[A\n 90%|█████████ | 24044/26685 [3:33:04<19:57, 2.21it/s]\u001b[A\n 90%|█████████ | 24045/26685 [3:33:04<18:53, 2.33it/s]\u001b[A\n 90%|█████████ | 24046/26685 [3:33:05<17:37, 2.49it/s]\u001b[A\n 90%|█████████ | 24047/26685 [3:33:05<16:33, 2.65it/s]\u001b[A\n 90%|█████████ | 24048/26685 [3:33:05<16:23, 2.68it/s]\u001b[A\n 90%|█████████ | 24049/26685 [3:33:06<15:27, 2.84it/s]\u001b[A\n 90%|█████████ | 24050/26685 [3:33:06<17:01, 2.58it/s]\u001b[A\n 90%|█████████ | 24051/26685 [3:33:06<15:56, 2.76it/s]\u001b[A\n 90%|█████████ | 24052/26685 [3:33:07<17:02, 2.58it/s]\u001b[A\n 90%|█████████ | 24053/26685 [3:33:07<18:50, 2.33it/s]\u001b[A\n 90%|█████████ | 24054/26685 [3:33:08<18:14, 2.40it/s]\u001b[A\n 90%|█████████ | 24055/26685 [3:33:08<16:54, 2.59it/s]\u001b[A\n 90%|█████████ | 24056/26685 [3:33:08<16:00, 2.74it/s]\u001b[A\n 90%|█████████ | 24057/26685 [3:33:09<15:41, 2.79it/s]\u001b[A\n 90%|█████████ | 24058/26685 [3:33:09<19:00, 2.30it/s]\u001b[A\n 90%|█████████ | 24059/26685 [3:33:10<19:51, 2.20it/s]\u001b[A\n 90%|█████████ | 24060/26685 [3:33:10<18:41, 2.34it/s]\u001b[A\n 90%|█████████ | 24061/26685 [3:33:11<18:07, 2.41it/s]\u001b[A\n 90%|█████████ | 24062/26685 [3:33:11<17:45, 2.46it/s]\u001b[A\n 90%|█████████ | 24063/26685 [3:33:11<17:40, 2.47it/s]\u001b[A\n 90%|█████████ | 24064/26685 [3:33:12<18:09, 2.41it/s]\u001b[A\n 90%|█████████ | 24065/26685 [3:33:12<16:58, 2.57it/s]\u001b[A\n 90%|█████████ | 24066/26685 [3:33:12<16:30, 2.64it/s]\u001b[A\n 90%|█████████ | 24067/26685 [3:33:13<16:31, 2.64it/s]\u001b[A\n 90%|█████████ | 24068/26685 [3:33:14<26:32, 1.64it/s]\u001b[A\n 90%|█████████ | 24069/26685 [3:33:14<22:36, 1.93it/s]\u001b[A\n 90%|█████████ | 24070/26685 [3:33:15<21:19, 2.04it/s]\u001b[A\n 90%|█████████ | 24071/26685 [3:33:16<26:11, 1.66it/s]\u001b[A\n 90%|█████████ | 24072/26685 [3:33:16<25:06, 1.74it/s]\u001b[A\n 90%|█████████ | 24073/26685 [3:33:16<20:47, 2.09it/s]\u001b[A\n 90%|█████████ | 24074/26685 [3:33:17<22:29, 1.93it/s]\u001b[A\n 90%|█████████ | 24075/26685 [3:33:17<20:02, 2.17it/s]\u001b[A\n 90%|█████████ | 24076/26685 [3:33:18<25:10, 1.73it/s]\u001b[A\n 90%|█████████ | 24077/26685 [3:33:19<30:34, 1.42it/s]\u001b[A\n 90%|█████████ | 24078/26685 [3:33:19<25:28, 1.71it/s]\u001b[A\n 90%|█████████ | 24079/26685 [3:33:20<28:42, 1.51it/s]\u001b[A\n 90%|█████████ | 24080/26685 [3:33:21<25:19, 1.71it/s]\u001b[A\n 90%|█████████ | 24081/26685 [3:33:21<24:35, 1.76it/s]\u001b[A\n 90%|█████████ | 24082/26685 [3:33:22<21:18, 2.04it/s]\u001b[A\n 90%|█████████ | 24083/26685 [3:33:22<18:23, 2.36it/s]\u001b[A\n 90%|█████████ | 24084/26685 [3:33:22<19:23, 2.23it/s]\u001b[A\n 90%|█████████ | 24085/26685 [3:33:23<17:16, 2.51it/s]\u001b[A\n 90%|█████████ | 24086/26685 [3:33:23<23:13, 1.86it/s]\u001b[A\n 90%|█████████ | 24087/26685 [3:33:24<22:30, 1.92it/s]\u001b[A\n 90%|█████████ | 24088/26685 [3:33:24<21:24, 2.02it/s]\u001b[A\n 90%|█████████ | 24089/26685 [3:33:25<20:01, 2.16it/s]\u001b[A\n 90%|█████████ | 24090/26685 [3:33:25<17:17, 2.50it/s]\u001b[A\n 90%|█████████ | 24091/26685 [3:33:25<18:08, 2.38it/s]\u001b[A\n 90%|█████████ | 24092/26685 [3:33:26<17:43, 2.44it/s]\u001b[A\n 90%|█████████ | 24093/26685 [3:33:26<16:36, 2.60it/s]\u001b[A\n 90%|█████████ | 24094/26685 [3:33:27<15:51, 2.72it/s]\u001b[A\n 90%|█████████ | 24095/26685 [3:33:27<16:18, 2.65it/s]\u001b[A\n 90%|█████████ | 24096/26685 [3:33:28<22:27, 1.92it/s]\u001b[A\n 90%|█████████ | 24097/26685 [3:33:28<23:15, 1.85it/s]\u001b[A\n 90%|█████████ | 24098/26685 [3:33:29<20:40, 2.08it/s]\u001b[A\n 90%|█████████ | 24099/26685 [3:33:30<27:51, 1.55it/s]\u001b[A\n 90%|█████████ | 24100/26685 [3:33:30<23:58, 1.80it/s]\u001b[A\n 90%|█████████ | 24101/26685 [3:33:31<24:07, 1.79it/s]\u001b[A\n 90%|█████████ | 24102/26685 [3:33:31<26:37, 1.62it/s]\u001b[A\n 90%|█████████ | 24103/26685 [3:33:32<26:22, 1.63it/s]\u001b[A\n 90%|█████████ | 24104/26685 [3:33:33<28:41, 1.50it/s]\u001b[A\n 90%|█████████ | 24105/26685 [3:33:33<24:30, 1.75it/s]\u001b[A\n 90%|█████████ | 24106/26685 [3:33:34<27:54, 1.54it/s]\u001b[A\n 90%|█████████ | 24107/26685 [3:33:34<24:18, 1.77it/s]\u001b[A\n 90%|█████████ | 24108/26685 [3:33:35<27:43, 1.55it/s]\u001b[A\n 90%|█████████ | 24109/26685 [3:33:36<24:05, 1.78it/s]\u001b[A\n 90%|█████████ | 24110/26685 [3:33:36<22:33, 1.90it/s]\u001b[A\n 90%|█████████ | 24111/26685 [3:33:36<21:41, 1.98it/s]\u001b[A\n 90%|█████████ | 24112/26685 [3:33:37<19:10, 2.24it/s]\u001b[A\n 90%|█████████ | 24113/26685 [3:33:38<24:24, 1.76it/s]\u001b[A\n 90%|█████████ | 24114/26685 [3:33:38<27:50, 1.54it/s]\u001b[A\n 90%|█████████ | 24115/26685 [3:33:39<23:54, 1.79it/s]\u001b[A\n 90%|█████████ | 24116/26685 [3:33:39<20:45, 2.06it/s]\u001b[A\n 90%|█████████ | 24117/26685 [3:33:39<18:45, 2.28it/s]\u001b[A\n 90%|█████████ | 24118/26685 [3:33:40<16:38, 2.57it/s]\u001b[A\n 90%|█████████ | 24119/26685 [3:33:40<17:39, 2.42it/s]\u001b[A\n 90%|█████████ | 24120/26685 [3:33:41<17:08, 2.49it/s]\u001b[A\n 90%|█████████ | 24121/26685 [3:33:41<16:11, 2.64it/s]\u001b[A\n 90%|█████████ | 24122/26685 [3:33:41<15:00, 2.85it/s]\u001b[A\n 90%|█████████ | 24123/26685 [3:33:42<15:00, 2.85it/s]\u001b[A\n 90%|█████████ | 24124/26685 [3:33:42<16:14, 2.63it/s]\u001b[A\n 90%|█████████ | 24125/26685 [3:33:42<15:47, 2.70it/s]\u001b[A\n 90%|█████████ | 24126/26685 [3:33:43<15:35, 2.73it/s]\u001b[A\n 90%|█████████ | 24127/26685 [3:33:43<19:52, 2.14it/s]\u001b[A\n 90%|█████████ | 24128/26685 [3:33:44<18:27, 2.31it/s]\u001b[A\n 90%|█████████ | 24129/26685 [3:33:44<18:04, 2.36it/s]\u001b[A\n 90%|█████████ | 24130/26685 [3:33:44<16:47, 2.54it/s]\u001b[A\n 90%|█████████ | 24131/26685 [3:33:45<15:26, 2.76it/s]\u001b[A\n 90%|█████████ | 24132/26685 [3:33:45<17:26, 2.44it/s]\u001b[A\n 90%|█████████ | 24133/26685 [3:33:46<18:00, 2.36it/s]\u001b[A\n 90%|█████████ | 24134/26685 [3:33:46<16:24, 2.59it/s]\u001b[A\n 90%|█████████ | 24135/26685 [3:33:47<21:23, 1.99it/s]\u001b[A\n 90%|█████████ | 24136/26685 [3:33:47<20:16, 2.10it/s]\u001b[A\n 90%|█████████ | 24137/26685 [3:33:48<26:58, 1.57it/s]\u001b[A\n 90%|█████████ | 24138/26685 [3:33:49<25:24, 1.67it/s]\u001b[A\n 90%|█████████ | 24139/26685 [3:33:49<22:02, 1.92it/s]\u001b[A\n 90%|█████████ | 24140/26685 [3:33:49<19:00, 2.23it/s]\u001b[A\n 90%|█████████ | 24141/26685 [3:33:50<19:47, 2.14it/s]\u001b[A\n 90%|█████████ | 24142/26685 [3:33:51<24:16, 1.75it/s]\u001b[A\n 90%|█████████ | 24143/26685 [3:33:51<21:32, 1.97it/s]\u001b[A\n 90%|█████████ | 24144/26685 [3:33:51<19:31, 2.17it/s]\u001b[A\n 90%|█████████ | 24145/26685 [3:33:52<18:36, 2.27it/s]\u001b[A\n 90%|█████████ | 24146/26685 [3:33:52<18:35, 2.28it/s]\u001b[A\n 90%|█████████ | 24147/26685 [3:33:52<16:42, 2.53it/s]\u001b[A\n 90%|█████████ | 24148/26685 [3:33:53<18:42, 2.26it/s]\u001b[A\n 90%|█████████ | 24149/26685 [3:33:53<16:49, 2.51it/s]\u001b[A\n 91%|█████████ | 24150/26685 [3:33:54<15:46, 2.68it/s]\u001b[A\n 91%|█████████ | 24151/26685 [3:33:54<16:39, 2.54it/s]\u001b[A\n 91%|█████████ | 24152/26685 [3:33:55<18:27, 2.29it/s]\u001b[A\n 91%|█████████ | 24153/26685 [3:33:55<21:14, 1.99it/s]\u001b[A\n 91%|█████████ | 24154/26685 [3:33:56<20:19, 2.08it/s]\u001b[A\n 91%|█████████ | 24155/26685 [3:33:56<21:00, 2.01it/s]\u001b[A\n 91%|█████████ | 24156/26685 [3:33:57<22:28, 1.87it/s]\u001b[A\n 91%|█████████ | 24157/26685 [3:33:57<20:01, 2.10it/s]\u001b[A\n 91%|█████████ | 24158/26685 [3:33:58<18:59, 2.22it/s]\u001b[A\n 91%|█████████ | 24159/26685 [3:33:58<24:08, 1.74it/s]\u001b[A\n 91%|█████████ | 24160/26685 [3:33:59<28:20, 1.48it/s]\u001b[A\n 91%|█████████ | 24161/26685 [3:34:00<23:38, 1.78it/s]\u001b[A\n 91%|█████████ | 24162/26685 [3:34:00<19:55, 2.11it/s]\u001b[A\n 91%|█████████ | 24163/26685 [3:34:01<24:04, 1.75it/s]\u001b[A\n 91%|█████████ | 24164/26685 [3:34:02<27:46, 1.51it/s]\u001b[A\n 91%|█████████ | 24165/26685 [3:34:02<30:15, 1.39it/s]\u001b[A\n 91%|█████████ | 24166/26685 [3:34:03<31:59, 1.31it/s]\u001b[A\n 91%|█████████ | 24167/26685 [3:34:04<27:40, 1.52it/s]\u001b[A\n 91%|█████████ | 24168/26685 [3:34:04<23:30, 1.78it/s]\u001b[A\n 91%|█████████ | 24169/26685 [3:34:04<20:34, 2.04it/s]\u001b[A\n 91%|█████████ | 24170/26685 [3:34:05<24:57, 1.68it/s]\u001b[A\n 91%|█████████ | 24171/26685 [3:34:06<24:24, 1.72it/s]\u001b[A\n 91%|█████████ | 24172/26685 [3:34:06<24:31, 1.71it/s]\u001b[A\n 91%|█████████ | 24173/26685 [3:34:07<23:04, 1.81it/s]\u001b[A\n 91%|█████████ | 24174/26685 [3:34:07<20:33, 2.04it/s]\u001b[A\n 91%|█████████ | 24175/26685 [3:34:08<25:59, 1.61it/s]\u001b[A\n 91%|█████████ | 24176/26685 [3:34:09<23:01, 1.82it/s]\u001b[A\n 91%|█████████ | 24177/26685 [3:34:09<21:40, 1.93it/s]\u001b[A\n 91%|█████████ | 24178/26685 [3:34:09<18:34, 2.25it/s]\u001b[A\n 91%|█████████ | 24179/26685 [3:34:10<16:38, 2.51it/s]\u001b[A\n 91%|█████████ | 24180/26685 [3:34:10<15:59, 2.61it/s]\u001b[A\n 91%|█████████ | 24181/26685 [3:34:11<21:13, 1.97it/s]\u001b[A\n 91%|█████████ | 24182/26685 [3:34:11<20:58, 1.99it/s]\u001b[A\n 91%|█████████ | 24183/26685 [3:34:12<21:57, 1.90it/s]\u001b[A\n 91%|█████████ | 24184/26685 [3:34:12<19:54, 2.09it/s]\u001b[A\n 91%|█████████ | 24185/26685 [3:34:13<19:35, 2.13it/s]\u001b[A\n 91%|█████████ | 24186/26685 [3:34:13<17:13, 2.42it/s]\u001b[A\n 91%|█████████ | 24187/26685 [3:34:14<22:11, 1.88it/s]\u001b[A\n 91%|█████████ | 24188/26685 [3:34:14<23:59, 1.73it/s]\u001b[A\n 91%|█████████ | 24189/26685 [3:34:15<21:05, 1.97it/s]\u001b[A\n 91%|█████████ | 24190/26685 [3:34:15<19:08, 2.17it/s]\u001b[A\n 91%|█████████ | 24191/26685 [3:34:16<23:51, 1.74it/s]\u001b[A\n 91%|█████████ | 24192/26685 [3:34:16<20:37, 2.01it/s]\u001b[A\n 91%|█████████ | 24193/26685 [3:34:17<21:49, 1.90it/s]\u001b[A\n 91%|█████████ | 24194/26685 [3:34:17<21:06, 1.97it/s]\u001b[A\n 91%|█████████ | 24195/26685 [3:34:18<18:57, 2.19it/s]\u001b[A\n 91%|█████████ | 24196/26685 [3:34:18<17:15, 2.40it/s]\u001b[A\n 91%|█████████ | 24197/26685 [3:34:18<16:07, 2.57it/s]\u001b[A\n 91%|█████████ | 24198/26685 [3:34:19<20:05, 2.06it/s]\u001b[A\n 91%|█████████ | 24199/26685 [3:34:19<19:53, 2.08it/s]\u001b[A\n 91%|█████████ | 24200/26685 [3:34:20<17:57, 2.31it/s]\u001b[A\n 91%|█████████ | 24201/26685 [3:34:20<19:48, 2.09it/s]\u001b[A\n 91%|█████████ | 24202/26685 [3:34:21<18:16, 2.27it/s]\u001b[A\n 91%|█████████ | 24203/26685 [3:34:21<19:15, 2.15it/s]\u001b[A\n 91%|█████████ | 24204/26685 [3:34:22<17:34, 2.35it/s]\u001b[A\n 91%|█████████ | 24205/26685 [3:34:22<17:10, 2.41it/s]\u001b[A\n 91%|█████████ | 24206/26685 [3:34:22<15:56, 2.59it/s]\u001b[A\n 91%|█████████ | 24207/26685 [3:34:23<22:15, 1.86it/s]\u001b[A\n 91%|█████████ | 24208/26685 [3:34:24<20:21, 2.03it/s]\u001b[A\n 91%|█████████ | 24209/26685 [3:34:24<18:03, 2.29it/s]\u001b[A\n 91%|█████████ | 24210/26685 [3:34:24<17:01, 2.42it/s]\u001b[A\n 91%|█████████ | 24211/26685 [3:34:25<16:35, 2.49it/s]\u001b[A\n 91%|█████████ | 24212/26685 [3:34:25<15:49, 2.61it/s]\u001b[A\n 91%|█████████ | 24213/26685 [3:34:26<21:42, 1.90it/s]\u001b[A\n 91%|█████████ | 24214/26685 [3:34:26<19:58, 2.06it/s]\u001b[A\n 91%|█████████ | 24215/26685 [3:34:27<19:52, 2.07it/s]\u001b[A\n 91%|█████████ | 24216/26685 [3:34:27<23:59, 1.71it/s]\u001b[A\n 91%|█████████ | 24217/26685 [3:34:28<22:15, 1.85it/s]\u001b[A\n 91%|█████████ | 24218/26685 [3:34:28<21:18, 1.93it/s]\u001b[A\n 91%|█████████ | 24219/26685 [3:34:29<20:37, 1.99it/s]\u001b[A\n 91%|█████████ | 24220/26685 [3:34:30<25:42, 1.60it/s]\u001b[A\n 91%|█████████ | 24221/26685 [3:34:30<24:03, 1.71it/s]\u001b[A\n 91%|█████████ | 24222/26685 [3:34:31<23:02, 1.78it/s]\u001b[A\n 91%|█████████ | 24223/26685 [3:34:31<20:39, 1.99it/s]\u001b[A\n 91%|█████████ | 24224/26685 [3:34:31<17:54, 2.29it/s]\u001b[A\n 91%|█████████ | 24225/26685 [3:34:32<18:28, 2.22it/s]\u001b[A\n 91%|█████████ | 24226/26685 [3:34:32<17:14, 2.38it/s]\u001b[A\n 91%|█████████ | 24227/26685 [3:34:33<16:25, 2.49it/s]\u001b[A\n 91%|█████████ | 24228/26685 [3:34:33<18:53, 2.17it/s]\u001b[A\n 91%|█████████ | 24229/26685 [3:34:34<18:09, 2.25it/s]\u001b[A\n 91%|█████████ | 24230/26685 [3:34:34<23:17, 1.76it/s]\u001b[A\n 91%|█████████ | 24231/26685 [3:34:35<20:20, 2.01it/s]\u001b[A\n 91%|█████████ | 24232/26685 [3:34:35<19:49, 2.06it/s]\u001b[A\n 91%|█████████ | 24233/26685 [3:34:36<18:14, 2.24it/s]\u001b[A\n 91%|█████████ | 24234/26685 [3:34:36<16:19, 2.50it/s]\u001b[A\n 91%|█████████ | 24235/26685 [3:34:36<16:01, 2.55it/s]\u001b[A\n 91%|█████████ | 24236/26685 [3:34:37<14:58, 2.73it/s]\u001b[A\n 91%|█████████ | 24237/26685 [3:34:37<16:02, 2.54it/s]\u001b[A\n 91%|█████████ | 24238/26685 [3:34:37<16:06, 2.53it/s]\u001b[A\n 91%|█████████ | 24239/26685 [3:34:38<15:39, 2.60it/s]\u001b[A\n 91%|█████████ | 24240/26685 [3:34:38<14:20, 2.84it/s]\u001b[A\n 91%|█████████ | 24241/26685 [3:34:39<20:06, 2.03it/s]\u001b[A\n 91%|█████████ | 24242/26685 [3:34:39<18:00, 2.26it/s]\u001b[A\n 91%|█████████ | 24243/26685 [3:34:40<18:00, 2.26it/s]\u001b[A\n 91%|█████████ | 24244/26685 [3:34:40<18:32, 2.19it/s]\u001b[A\n 91%|█████████ | 24245/26685 [3:34:41<18:39, 2.18it/s]\u001b[A\n 91%|█████████ | 24246/26685 [3:34:41<17:41, 2.30it/s]\u001b[A\n 91%|█████████ | 24247/26685 [3:34:41<16:48, 2.42it/s]\u001b[A\n 91%|█████████ | 24248/26685 [3:34:42<19:56, 2.04it/s]\u001b[A\n 91%|█████████ | 24249/26685 [3:34:42<18:43, 2.17it/s]\u001b[A\n 91%|█████████ | 24250/26685 [3:34:43<19:39, 2.06it/s]\u001b[A\n 91%|█████████ | 24251/26685 [3:34:43<19:35, 2.07it/s]\u001b[A\n 91%|█████████ | 24252/26685 [3:34:44<19:36, 2.07it/s]\u001b[A\n 91%|█████████ | 24253/26685 [3:34:44<17:31, 2.31it/s]\u001b[A\n 91%|█████████ | 24254/26685 [3:34:45<18:19, 2.21it/s]\u001b[A\n 91%|█████████ | 24255/26685 [3:34:45<18:19, 2.21it/s]\u001b[A\n 91%|█████████ | 24256/26685 [3:34:46<23:03, 1.76it/s]\u001b[A\n 91%|█████████ | 24257/26685 [3:34:47<23:03, 1.76it/s]\u001b[A\n 91%|█████████ | 24258/26685 [3:34:47<20:43, 1.95it/s]\u001b[A\n 91%|█████████ | 24259/26685 [3:34:47<18:20, 2.20it/s]\u001b[A\n 91%|█████████ | 24260/26685 [3:34:48<17:41, 2.29it/s]\u001b[A\n 91%|█████████ | 24261/26685 [3:34:48<15:57, 2.53it/s]\u001b[A\n 91%|█████████ | 24262/26685 [3:34:48<15:43, 2.57it/s]\u001b[A\n 91%|█████████ | 24263/26685 [3:34:49<14:48, 2.73it/s]\u001b[A\n 91%|█████████ | 24264/26685 [3:34:49<20:16, 1.99it/s]\u001b[A\n 91%|█████████ | 24265/26685 [3:34:51<26:40, 1.51it/s]\u001b[A\n 91%|█████████ | 24266/26685 [3:34:51<24:18, 1.66it/s]\u001b[A\n 91%|█████████ | 24267/26685 [3:34:51<21:04, 1.91it/s]\u001b[A\n 91%|█████████ | 24268/26685 [3:34:52<19:18, 2.09it/s]\u001b[A\n 91%|█████████ | 24269/26685 [3:34:52<23:15, 1.73it/s]\u001b[A\n 91%|█████████ | 24270/26685 [3:34:53<22:35, 1.78it/s]\u001b[A\n 91%|█████████ | 24271/26685 [3:34:54<23:31, 1.71it/s]\u001b[A\n 91%|█████████ | 24272/26685 [3:34:54<21:55, 1.83it/s]\u001b[A\n 91%|█████████ | 24273/26685 [3:34:54<19:29, 2.06it/s]\u001b[A\n 91%|█████████ | 24274/26685 [3:34:55<18:47, 2.14it/s]\u001b[A\n 91%|█████████ | 24275/26685 [3:34:55<18:07, 2.22it/s]\u001b[A\n 91%|█████████ | 24276/26685 [3:34:56<17:17, 2.32it/s]\u001b[A\n 91%|█████████ | 24277/26685 [3:34:56<15:57, 2.51it/s]\u001b[A\n 91%|█████████ | 24278/26685 [3:34:56<14:15, 2.81it/s]\u001b[A\n 91%|█████████ | 24279/26685 [3:34:57<17:35, 2.28it/s]\u001b[A\n 91%|█████████ | 24280/26685 [3:34:57<17:07, 2.34it/s]\u001b[A\n 91%|█████████ | 24281/26685 [3:34:58<17:19, 2.31it/s]\u001b[A\n 91%|█████████ | 24282/26685 [3:34:58<21:13, 1.89it/s]\u001b[A\n 91%|█████████ | 24283/26685 [3:34:59<20:40, 1.94it/s]\u001b[A\n 91%|█████████ | 24284/26685 [3:34:59<19:23, 2.06it/s]\u001b[A\n 91%|█████████ | 24285/26685 [3:35:00<21:55, 1.82it/s]\u001b[A\n 91%|█████████ | 24286/26685 [3:35:00<19:39, 2.03it/s]\u001b[A\n 91%|█████████ | 24287/26685 [3:35:01<18:10, 2.20it/s]\u001b[A\n 91%|█████████ | 24288/26685 [3:35:02<22:26, 1.78it/s]\u001b[A\n 91%|█████████ | 24289/26685 [3:35:02<21:00, 1.90it/s]\u001b[A\n 91%|█████████ | 24290/26685 [3:35:03<25:07, 1.59it/s]\u001b[A\n 91%|█████████ | 24291/26685 [3:35:03<21:25, 1.86it/s]\u001b[A\n 91%|█████████ | 24292/26685 [3:35:04<19:43, 2.02it/s]\u001b[A\n 91%|█████████ | 24293/26685 [3:35:04<17:21, 2.30it/s]\u001b[A\n 91%|█████████ | 24294/26685 [3:35:04<16:47, 2.37it/s]\u001b[A\n 91%|█████████ | 24295/26685 [3:35:05<17:20, 2.30it/s]\u001b[A\n 91%|█████████ | 24296/26685 [3:35:05<18:04, 2.20it/s]\u001b[A\n 91%|█████████ | 24297/26685 [3:35:06<16:16, 2.45it/s]\u001b[A\n 91%|█████████ | 24298/26685 [3:35:06<14:55, 2.67it/s]\u001b[A\n 91%|█████████ | 24299/26685 [3:35:07<20:45, 1.92it/s]\u001b[A\n 91%|█████████ | 24300/26685 [3:35:07<22:12, 1.79it/s]\u001b[A\n 91%|█████████ | 24301/26685 [3:35:08<20:18, 1.96it/s]\u001b[A\n 91%|█████████ | 24302/26685 [3:35:08<19:39, 2.02it/s]\u001b[A\n 91%|█████████ | 24303/26685 [3:35:09<17:35, 2.26it/s]\u001b[A\n 91%|█████████ | 24304/26685 [3:35:09<15:43, 2.52it/s]\u001b[A\n 91%|█████████ | 24305/26685 [3:35:09<15:35, 2.54it/s]\u001b[A\n 91%|█████████ | 24306/26685 [3:35:10<15:58, 2.48it/s]\u001b[A\n 91%|█████████ | 24307/26685 [3:35:10<14:36, 2.71it/s]\u001b[A\n 91%|█████████ | 24308/26685 [3:35:10<16:20, 2.42it/s]\u001b[A\n 91%|█████████ | 24309/26685 [3:35:11<17:53, 2.21it/s]\u001b[A\n 91%|█████████ | 24310/26685 [3:35:11<16:16, 2.43it/s]\u001b[A\n 91%|█████████ | 24311/26685 [3:35:12<21:48, 1.81it/s]\u001b[A\n 91%|█████████ | 24312/26685 [3:35:13<19:10, 2.06it/s]\u001b[A\n 91%|█████████ | 24313/26685 [3:35:13<16:59, 2.33it/s]\u001b[A\n 91%|█████████ | 24314/26685 [3:35:13<15:47, 2.50it/s]\u001b[A\n 91%|█████████ | 24315/26685 [3:35:14<14:56, 2.64it/s]\u001b[A\n 91%|█████████ | 24316/26685 [3:35:14<14:46, 2.67it/s]\u001b[A\n 91%|█████████ | 24317/26685 [3:35:14<13:21, 2.95it/s]\u001b[A\n 91%|█████████ | 24318/26685 [3:35:15<14:55, 2.64it/s]\u001b[A\n 91%|█████████ | 24319/26685 [3:35:15<13:35, 2.90it/s]\u001b[A\n 91%|█████████ | 24320/26685 [3:35:15<15:10, 2.60it/s]\u001b[A\n 91%|█████████ | 24321/26685 [3:35:16<17:05, 2.30it/s]\u001b[A\n 91%|█████████ | 24322/26685 [3:35:16<18:03, 2.18it/s]\u001b[A\n 91%|█████████ | 24323/26685 [3:35:17<18:40, 2.11it/s]\u001b[A\n 91%|█████████ | 24324/26685 [3:35:17<16:24, 2.40it/s]\u001b[A\n 91%|█████████ | 24325/26685 [3:35:18<21:00, 1.87it/s]\u001b[A\n 91%|█████████ | 24326/26685 [3:35:18<18:09, 2.17it/s]\u001b[A\n 91%|█████████ | 24327/26685 [3:35:19<16:43, 2.35it/s]\u001b[A\n 91%|█████████ | 24328/26685 [3:35:19<18:07, 2.17it/s]\u001b[A\n 91%|█████████ | 24329/26685 [3:35:20<17:31, 2.24it/s]\u001b[A\n 91%|█████████ | 24330/26685 [3:35:20<22:12, 1.77it/s]\u001b[A\n 91%|█████████ | 24331/26685 [3:35:21<21:58, 1.78it/s]\u001b[A\n 91%|█████████ | 24332/26685 [3:35:21<21:02, 1.86it/s]\u001b[A\n 91%|█████████ | 24333/26685 [3:35:22<18:53, 2.08it/s]\u001b[A\n 91%|█████████ | 24334/26685 [3:35:22<18:45, 2.09it/s]\u001b[A\n 91%|█████████ | 24335/26685 [3:35:23<19:44, 1.98it/s]\u001b[A\n 91%|█████████ | 24336/26685 [3:35:23<21:07, 1.85it/s]\u001b[A\n 91%|█████████ | 24337/26685 [3:35:24<18:02, 2.17it/s]\u001b[A\n 91%|█████████ | 24338/26685 [3:35:24<16:06, 2.43it/s]\u001b[A\n 91%|█████████ | 24339/26685 [3:35:24<14:32, 2.69it/s]\u001b[A\n 91%|█████████ | 24340/26685 [3:35:25<19:52, 1.97it/s]\u001b[A\n 91%|█████████ | 24341/26685 [3:35:26<19:44, 1.98it/s]\u001b[A\n 91%|█████████ | 24342/26685 [3:35:26<17:25, 2.24it/s]\u001b[A\n 91%|█████████ | 24343/26685 [3:35:26<16:08, 2.42it/s]\u001b[A\n 91%|█████████ | 24344/26685 [3:35:27<16:07, 2.42it/s]\u001b[A\n 91%|█████████ | 24345/26685 [3:35:28<20:37, 1.89it/s]\u001b[A\n 91%|█████████ | 24346/26685 [3:35:28<19:09, 2.03it/s]\u001b[A\n 91%|█████████ | 24347/26685 [3:35:28<18:52, 2.07it/s]\u001b[A\n 91%|█████████ | 24348/26685 [3:35:29<16:22, 2.38it/s]\u001b[A\n 91%|█████████ | 24349/26685 [3:35:29<14:54, 2.61it/s]\u001b[A\n 91%|█████████ | 24350/26685 [3:35:29<14:06, 2.76it/s]\u001b[A\n 91%|█████████▏| 24351/26685 [3:35:30<13:02, 2.98it/s]\u001b[A\n 91%|█████████▏| 24352/26685 [3:35:30<15:44, 2.47it/s]\u001b[A\n 91%|█████████▏| 24353/26685 [3:35:31<18:18, 2.12it/s]\u001b[A\n 91%|█████████▏| 24354/26685 [3:35:31<16:55, 2.29it/s]\u001b[A\n 91%|█████████▏| 24355/26685 [3:35:31<15:59, 2.43it/s]\u001b[A\n 91%|█████████▏| 24356/26685 [3:35:32<16:20, 2.38it/s]\u001b[A\n 91%|█████████▏| 24357/26685 [3:35:33<22:00, 1.76it/s]\u001b[A\n 91%|█████████▏| 24358/26685 [3:35:33<18:26, 2.10it/s]\u001b[A\n 91%|█████████▏| 24359/26685 [3:35:33<16:01, 2.42it/s]\u001b[A\n 91%|█████████▏| 24360/26685 [3:35:34<15:28, 2.51it/s]\u001b[A\n 91%|█████████▏| 24361/26685 [3:35:34<16:52, 2.30it/s]\u001b[A\n 91%|█████████▏| 24362/26685 [3:35:35<15:32, 2.49it/s]\u001b[A\n 91%|█████████▏| 24363/26685 [3:35:35<15:36, 2.48it/s]\u001b[A\n 91%|█████████▏| 24364/26685 [3:35:35<14:44, 2.62it/s]\u001b[A\n 91%|█████████▏| 24365/26685 [3:35:36<15:47, 2.45it/s]\u001b[A\n 91%|█████████▏| 24366/26685 [3:35:36<15:10, 2.55it/s]\u001b[A\n 91%|█████████▏| 24367/26685 [3:35:37<15:11, 2.54it/s]\u001b[A\n 91%|█████████▏| 24368/26685 [3:35:37<15:49, 2.44it/s]\u001b[A\n 91%|█████████▏| 24369/26685 [3:35:37<14:31, 2.66it/s]\u001b[A\n 91%|█████████▏| 24370/26685 [3:35:38<16:34, 2.33it/s]\u001b[A\n 91%|█████████▏| 24371/26685 [3:35:39<21:05, 1.83it/s]\u001b[A\n 91%|█████████▏| 24372/26685 [3:35:39<20:24, 1.89it/s]\u001b[A\n 91%|█████████▏| 24373/26685 [3:35:39<18:04, 2.13it/s]\u001b[A\n 91%|█████████▏| 24374/26685 [3:35:40<16:01, 2.40it/s]\u001b[A\n 91%|█████████▏| 24375/26685 [3:35:40<16:29, 2.33it/s]\u001b[A\n 91%|█████████▏| 24376/26685 [3:35:41<21:14, 1.81it/s]\u001b[A\n 91%|█████████▏| 24377/26685 [3:35:41<18:58, 2.03it/s]\u001b[A\n 91%|█████████▏| 24378/26685 [3:35:42<16:18, 2.36it/s]\u001b[A\n 91%|█████████▏| 24379/26685 [3:35:42<14:48, 2.59it/s]\u001b[A\n 91%|█████████▏| 24380/26685 [3:35:42<15:13, 2.52it/s]\u001b[A\n 91%|█████████▏| 24381/26685 [3:35:43<17:48, 2.16it/s]\u001b[A\n 91%|█████████▏| 24382/26685 [3:35:44<18:42, 2.05it/s]\u001b[A\n 91%|█████████▏| 24383/26685 [3:35:44<16:54, 2.27it/s]\u001b[A\n 91%|█████████▏| 24384/26685 [3:35:44<18:13, 2.11it/s]\u001b[A\n 91%|█████████▏| 24385/26685 [3:35:45<19:12, 2.00it/s]\u001b[A\n 91%|█████████▏| 24386/26685 [3:35:46<24:18, 1.58it/s]\u001b[A\n 91%|█████████▏| 24387/26685 [3:35:46<20:33, 1.86it/s]\u001b[A\n 91%|█████████▏| 24388/26685 [3:35:47<18:06, 2.11it/s]\u001b[A\n 91%|█████████▏| 24389/26685 [3:35:47<17:03, 2.24it/s]\u001b[A\n 91%|█████████▏| 24390/26685 [3:35:47<18:08, 2.11it/s]\u001b[A\n 91%|█████████▏| 24391/26685 [3:35:48<18:11, 2.10it/s]\u001b[A\n 91%|█████████▏| 24392/26685 [3:35:48<16:30, 2.31it/s]\u001b[A\n 91%|█████████▏| 24393/26685 [3:35:49<15:44, 2.43it/s]\u001b[A\n 91%|█████████▏| 24394/26685 [3:35:49<15:26, 2.47it/s]\u001b[A\n 91%|█████████▏| 24395/26685 [3:35:50<18:04, 2.11it/s]\u001b[A\n 91%|█████████▏| 24396/26685 [3:35:50<15:54, 2.40it/s]\u001b[A\n 91%|█████████▏| 24397/26685 [3:35:50<15:44, 2.42it/s]\u001b[A\n 91%|█████████▏| 24398/26685 [3:35:51<20:10, 1.89it/s]\u001b[A\n 91%|█████████▏| 24399/26685 [3:35:52<19:34, 1.95it/s]\u001b[A\n 91%|█████████▏| 24400/26685 [3:35:52<18:21, 2.08it/s]\u001b[A\n 91%|█████████▏| 24401/26685 [3:35:52<15:56, 2.39it/s]\u001b[A\n 91%|█████████▏| 24402/26685 [3:35:53<16:08, 2.36it/s]\u001b[A\n 91%|█████████▏| 24403/26685 [3:35:53<15:55, 2.39it/s]\u001b[A\n 91%|█████████▏| 24404/26685 [3:35:53<14:52, 2.56it/s]\u001b[A\n 91%|█████████▏| 24405/26685 [3:35:54<20:00, 1.90it/s]\u001b[A\n 91%|█████████▏| 24406/26685 [3:35:55<17:26, 2.18it/s]\u001b[A\n 91%|█████████▏| 24407/26685 [3:35:55<15:33, 2.44it/s]\u001b[A\n 91%|█████████▏| 24408/26685 [3:35:55<15:57, 2.38it/s]\u001b[A\n 91%|█████████▏| 24409/26685 [3:35:56<14:42, 2.58it/s]\u001b[A\n 91%|█████████▏| 24410/26685 [3:35:56<14:10, 2.68it/s]\u001b[A\n 91%|█████████▏| 24411/26685 [3:35:56<13:29, 2.81it/s]\u001b[A\n 91%|█████████▏| 24412/26685 [3:35:57<17:06, 2.21it/s]\u001b[A\n 91%|█████████▏| 24413/26685 [3:35:57<15:10, 2.49it/s]\u001b[A\n 91%|█████████▏| 24414/26685 [3:35:58<13:33, 2.79it/s]\u001b[A\n 91%|█████████▏| 24415/26685 [3:35:58<16:38, 2.27it/s]\u001b[A\n 91%|█████████▏| 24416/26685 [3:35:59<15:50, 2.39it/s]\u001b[A\n 92%|█████████▏| 24417/26685 [3:35:59<14:15, 2.65it/s]\u001b[A\n 92%|█████████▏| 24418/26685 [3:36:00<19:43, 1.92it/s]\u001b[A\n 92%|█████████▏| 24419/26685 [3:36:00<17:42, 2.13it/s]\u001b[A\n 92%|█████████▏| 24420/26685 [3:36:00<15:50, 2.38it/s]\u001b[A\n 92%|█████████▏| 24421/26685 [3:36:01<18:03, 2.09it/s]\u001b[A\n 92%|█████████▏| 24422/26685 [3:36:02<21:38, 1.74it/s]\u001b[A\n 92%|█████████▏| 24423/26685 [3:36:02<19:07, 1.97it/s]\u001b[A\n 92%|█████████▏| 24424/26685 [3:36:03<19:20, 1.95it/s]\u001b[A\n 92%|█████████▏| 24425/26685 [3:36:03<16:31, 2.28it/s]\u001b[A\n 92%|█████████▏| 24426/26685 [3:36:03<16:25, 2.29it/s]\u001b[A\n 92%|█████████▏| 24427/26685 [3:36:04<20:55, 1.80it/s]\u001b[A\n 92%|█████████▏| 24428/26685 [3:36:05<19:24, 1.94it/s]\u001b[A\n 92%|█████████▏| 24429/26685 [3:36:05<22:42, 1.66it/s]\u001b[A\n 92%|█████████▏| 24430/26685 [3:36:06<25:19, 1.48it/s]\u001b[A\n 92%|█████████▏| 24431/26685 [3:36:07<21:27, 1.75it/s]\u001b[A\n 92%|█████████▏| 24432/26685 [3:36:07<22:37, 1.66it/s]\u001b[A\n 92%|█████████▏| 24433/26685 [3:36:08<21:15, 1.77it/s]\u001b[A\n 92%|█████████▏| 24434/26685 [3:36:08<18:32, 2.02it/s]\u001b[A\n 92%|█████████▏| 24435/26685 [3:36:09<23:19, 1.61it/s]\u001b[A\n 92%|█████████▏| 24436/26685 [3:36:10<22:44, 1.65it/s]\u001b[A\n 92%|█████████▏| 24437/26685 [3:36:10<21:12, 1.77it/s]\u001b[A\n 92%|█████████▏| 24438/26685 [3:36:10<19:51, 1.89it/s]\u001b[A\n 92%|█████████▏| 24439/26685 [3:36:11<17:13, 2.17it/s]\u001b[A\n 92%|█████████▏| 24440/26685 [3:36:11<17:16, 2.17it/s]\u001b[A\n 92%|█████████▏| 24441/26685 [3:36:12<15:22, 2.43it/s]\u001b[A\n 92%|█████████▏| 24442/26685 [3:36:12<13:53, 2.69it/s]\u001b[A\n 92%|█████████▏| 24443/26685 [3:36:12<12:50, 2.91it/s]\u001b[A\n 92%|█████████▏| 24444/26685 [3:36:12<12:48, 2.92it/s]\u001b[A\n 92%|█████████▏| 24445/26685 [3:36:13<14:05, 2.65it/s]\u001b[A\n 92%|█████████▏| 24446/26685 [3:36:13<13:33, 2.75it/s]\u001b[A\n 92%|█████████▏| 24447/26685 [3:36:14<13:23, 2.78it/s]\u001b[A\n 92%|█████████▏| 24448/26685 [3:36:14<16:04, 2.32it/s]\u001b[A\n 92%|█████████▏| 24449/26685 [3:36:15<20:15, 1.84it/s]\u001b[A\n 92%|█████████▏| 24450/26685 [3:36:15<17:00, 2.19it/s]\u001b[A\n 92%|█████████▏| 24451/26685 [3:36:16<16:02, 2.32it/s]\u001b[A\n 92%|█████████▏| 24452/26685 [3:36:16<15:54, 2.34it/s]\u001b[A\n 92%|█████████▏| 24453/26685 [3:36:16<14:30, 2.56it/s]\u001b[A\n 92%|█████████▏| 24454/26685 [3:36:17<14:27, 2.57it/s]\u001b[A\n 92%|█████████▏| 24455/26685 [3:36:17<15:38, 2.38it/s]\u001b[A\n 92%|█████████▏| 24456/26685 [3:36:18<14:40, 2.53it/s]\u001b[A\n 92%|█████████▏| 24457/26685 [3:36:18<13:59, 2.65it/s]\u001b[A\n 92%|█████████▏| 24458/26685 [3:36:18<15:17, 2.43it/s]\u001b[A\n 92%|█████████▏| 24459/26685 [3:36:19<13:58, 2.65it/s]\u001b[A\n 92%|█████████▏| 24460/26685 [3:36:19<14:02, 2.64it/s]\u001b[A\n 92%|█████████▏| 24461/26685 [3:36:19<14:24, 2.57it/s]\u001b[A\n 92%|█████████▏| 24462/26685 [3:36:20<13:25, 2.76it/s]\u001b[A\n 92%|█████████▏| 24463/26685 [3:36:21<18:16, 2.03it/s]\u001b[A\n 92%|█████████▏| 24464/26685 [3:36:21<15:57, 2.32it/s]\u001b[A\n 92%|█████████▏| 24465/26685 [3:36:21<16:17, 2.27it/s]\u001b[A\n 92%|█████████▏| 24466/26685 [3:36:22<15:48, 2.34it/s]\u001b[A\n 92%|█████████▏| 24467/26685 [3:36:22<14:19, 2.58it/s]\u001b[A\n 92%|█████████▏| 24468/26685 [3:36:22<14:23, 2.57it/s]\u001b[A\n 92%|█████████▏| 24469/26685 [3:36:23<13:37, 2.71it/s]\u001b[A\n 92%|█████████▏| 24470/26685 [3:36:23<13:01, 2.83it/s]\u001b[A\n 92%|█████████▏| 24471/26685 [3:36:23<12:20, 2.99it/s]\u001b[A\n 92%|█████████▏| 24472/26685 [3:36:24<13:57, 2.64it/s]\u001b[A\n 92%|█████████▏| 24473/26685 [3:36:24<13:27, 2.74it/s]\u001b[A\n 92%|█████████▏| 24474/26685 [3:36:24<13:28, 2.74it/s]\u001b[A\n 92%|█████████▏| 24475/26685 [3:36:25<18:54, 1.95it/s]\u001b[A\n 92%|█████████▏| 24476/26685 [3:36:26<20:29, 1.80it/s]\u001b[A\n 92%|█████████▏| 24477/26685 [3:36:26<18:19, 2.01it/s]\u001b[A\n 92%|█████████▏| 24478/26685 [3:36:27<15:46, 2.33it/s]\u001b[A\n 92%|█████████▏| 24479/26685 [3:36:27<14:52, 2.47it/s]\u001b[A\n 92%|█████████▏| 24480/26685 [3:36:27<15:13, 2.41it/s]\u001b[A\n 92%|█████████▏| 24481/26685 [3:36:28<19:41, 1.86it/s]\u001b[A\n 92%|█████████▏| 24482/26685 [3:36:29<17:00, 2.16it/s]\u001b[A\n 92%|█████████▏| 24483/26685 [3:36:29<15:24, 2.38it/s]\u001b[A\n 92%|█████████▏| 24484/26685 [3:36:30<20:55, 1.75it/s]\u001b[A\n 92%|█████████▏| 24485/26685 [3:36:30<21:57, 1.67it/s]\u001b[A\n 92%|█████████▏| 24486/26685 [3:36:31<22:21, 1.64it/s]\u001b[A\n 92%|█████████▏| 24487/26685 [3:36:32<20:49, 1.76it/s]\u001b[A\n 92%|█████████▏| 24488/26685 [3:36:32<18:03, 2.03it/s]\u001b[A\n 92%|█████████▏| 24489/26685 [3:36:32<15:47, 2.32it/s]\u001b[A\n 92%|█████████▏| 24490/26685 [3:36:33<20:44, 1.76it/s]\u001b[A\n 92%|█████████▏| 24491/26685 [3:36:33<18:37, 1.96it/s]\u001b[A\n 92%|█████████▏| 24492/26685 [3:36:34<17:17, 2.11it/s]\u001b[A\n 92%|█████████▏| 24493/26685 [3:36:34<15:26, 2.37it/s]\u001b[A\n 92%|█████████▏| 24494/26685 [3:36:35<16:18, 2.24it/s]\u001b[A\n 92%|█████████▏| 24495/26685 [3:36:36<21:18, 1.71it/s]\u001b[A\n 92%|█████████▏| 24496/26685 [3:36:36<18:44, 1.95it/s]\u001b[A\n 92%|█████████▏| 24497/26685 [3:36:36<18:59, 1.92it/s]\u001b[A\n 92%|█████████▏| 24498/26685 [3:36:37<18:14, 2.00it/s]\u001b[A\n 92%|█████████▏| 24499/26685 [3:36:38<23:01, 1.58it/s]\u001b[A\n 92%|█████████▏| 24500/26685 [3:36:38<20:10, 1.81it/s]\u001b[A\n 92%|█████████▏| 24501/26685 [3:36:39<23:31, 1.55it/s]\u001b[A\n 92%|█████████▏| 24502/26685 [3:36:39<19:14, 1.89it/s]\u001b[A\n 92%|█████████▏| 24503/26685 [3:36:40<20:24, 1.78it/s]\u001b[A\n 92%|█████████▏| 24504/26685 [3:36:40<17:24, 2.09it/s]\u001b[A\n 92%|█████████▏| 24505/26685 [3:36:41<16:44, 2.17it/s]\u001b[A\n 92%|█████████▏| 24506/26685 [3:36:41<17:53, 2.03it/s]\u001b[A\n 92%|█████████▏| 24507/26685 [3:36:42<16:12, 2.24it/s]\u001b[A\n 92%|█████████▏| 24508/26685 [3:36:42<14:48, 2.45it/s]\u001b[A\n 92%|█████████▏| 24509/26685 [3:36:42<14:57, 2.42it/s]\u001b[A\n 92%|█████████▏| 24510/26685 [3:36:43<13:52, 2.61it/s]\u001b[A\n 92%|█████████▏| 24511/26685 [3:36:43<16:26, 2.20it/s]\u001b[A\n 92%|█████████▏| 24512/26685 [3:36:44<16:01, 2.26it/s]\u001b[A\n 92%|█████████▏| 24513/26685 [3:36:44<17:32, 2.06it/s]\u001b[A\n 92%|█████████▏| 24514/26685 [3:36:45<16:35, 2.18it/s]\u001b[A\n 92%|█████████▏| 24515/26685 [3:36:45<15:33, 2.33it/s]\u001b[A\n 92%|█████████▏| 24516/26685 [3:36:46<16:58, 2.13it/s]\u001b[A\n 92%|█████████▏| 24517/26685 [3:36:46<15:50, 2.28it/s]\u001b[A\n 92%|█████████▏| 24518/26685 [3:36:47<19:57, 1.81it/s]\u001b[A\n 92%|█████████▏| 24519/26685 [3:36:47<18:12, 1.98it/s]\u001b[A\n 92%|█████████▏| 24520/26685 [3:36:47<16:22, 2.20it/s]\u001b[A\n 92%|█████████▏| 24521/26685 [3:36:48<15:26, 2.33it/s]\u001b[A\n 92%|█████████▏| 24522/26685 [3:36:48<15:05, 2.39it/s]\u001b[A\n 92%|█████████▏| 24523/26685 [3:36:49<16:18, 2.21it/s]\u001b[A\n 92%|█████████▏| 24524/26685 [3:36:49<14:43, 2.44it/s]\u001b[A\n 92%|█████████▏| 24525/26685 [3:36:49<13:23, 2.69it/s]\u001b[A\n 92%|█████████▏| 24526/26685 [3:36:50<15:16, 2.35it/s]\u001b[A\n 92%|█████████▏| 24527/26685 [3:36:50<14:16, 2.52it/s]\u001b[A\n 92%|█████████▏| 24528/26685 [3:36:51<15:11, 2.37it/s]\u001b[A\n 92%|█████████▏| 24529/26685 [3:36:51<13:48, 2.60it/s]\u001b[A\n 92%|█████████▏| 24530/26685 [3:36:51<12:48, 2.81it/s]\u001b[A\n 92%|█████████▏| 24531/26685 [3:36:52<15:09, 2.37it/s]\u001b[A\n 92%|█████████▏| 24532/26685 [3:36:52<13:38, 2.63it/s]\u001b[A\n 92%|█████████▏| 24533/26685 [3:36:53<16:29, 2.18it/s]\u001b[A\n 92%|█████████▏| 24534/26685 [3:36:53<15:07, 2.37it/s]\u001b[A\n 92%|█████████▏| 24535/26685 [3:36:53<13:56, 2.57it/s]\u001b[A\n 92%|█████████▏| 24536/26685 [3:36:54<15:03, 2.38it/s]\u001b[A\n 92%|█████████▏| 24537/26685 [3:36:54<16:13, 2.21it/s]\u001b[A\n 92%|█████████▏| 24538/26685 [3:36:55<14:48, 2.42it/s]\u001b[A\n 92%|█████████▏| 24539/26685 [3:36:55<15:41, 2.28it/s]\u001b[A\n 92%|█████████▏| 24540/26685 [3:36:56<14:07, 2.53it/s]\u001b[A\n 92%|█████████▏| 24541/26685 [3:36:56<19:00, 1.88it/s]\u001b[A\n 92%|█████████▏| 24542/26685 [3:36:57<17:46, 2.01it/s]\u001b[A\n 92%|█████████▏| 24543/26685 [3:36:57<16:07, 2.21it/s]\u001b[A\n 92%|█████████▏| 24544/26685 [3:36:57<14:31, 2.46it/s]\u001b[A\n 92%|█████████▏| 24545/26685 [3:36:58<13:27, 2.65it/s]\u001b[A\n 92%|█████████▏| 24546/26685 [3:36:58<13:12, 2.70it/s]\u001b[A\n 92%|█████████▏| 24547/26685 [3:36:58<12:24, 2.87it/s]\u001b[A\n 92%|█████████▏| 24548/26685 [3:36:59<17:42, 2.01it/s]\u001b[A\n 92%|█████████▏| 24549/26685 [3:37:00<16:17, 2.18it/s]\u001b[A\n 92%|█████████▏| 24550/26685 [3:37:00<17:57, 1.98it/s]\u001b[A\n 92%|█████████▏| 24551/26685 [3:37:01<18:36, 1.91it/s]\u001b[A\n 92%|█████████▏| 24552/26685 [3:37:01<17:59, 1.98it/s]\u001b[A\n 92%|█████████▏| 24553/26685 [3:37:02<16:07, 2.20it/s]\u001b[A\n 92%|█████████▏| 24554/26685 [3:37:02<16:32, 2.15it/s]\u001b[A\n 92%|█████████▏| 24555/26685 [3:37:02<15:27, 2.30it/s]\u001b[A\n 92%|█████████▏| 24556/26685 [3:37:03<16:05, 2.20it/s]\u001b[A\n 92%|█████████▏| 24557/26685 [3:37:04<20:51, 1.70it/s]\u001b[A\n 92%|█████████▏| 24558/26685 [3:37:04<19:20, 1.83it/s]\u001b[A\n 92%|█████████▏| 24559/26685 [3:37:05<18:24, 1.93it/s]\u001b[A\n 92%|█████████▏| 24560/26685 [3:37:05<17:16, 2.05it/s]\u001b[A\n 92%|█████████▏| 24561/26685 [3:37:06<17:03, 2.07it/s]\u001b[A\n 92%|█████████▏| 24562/26685 [3:37:06<15:50, 2.23it/s]\u001b[A\n 92%|█████████▏| 24563/26685 [3:37:06<14:19, 2.47it/s]\u001b[A\n 92%|█████████▏| 24564/26685 [3:37:07<12:35, 2.81it/s]\u001b[A\n 92%|█████████▏| 24565/26685 [3:37:07<11:27, 3.08it/s]\u001b[A\n 92%|█████████▏| 24566/26685 [3:37:07<11:08, 3.17it/s]\u001b[A\n 92%|█████████▏| 24567/26685 [3:37:07<10:57, 3.22it/s]\u001b[A\n 92%|█████████▏| 24568/26685 [3:37:08<11:14, 3.14it/s]\u001b[A\n 92%|█████████▏| 24569/26685 [3:37:08<12:01, 2.93it/s]\u001b[A\n 92%|█████████▏| 24570/26685 [3:37:08<11:18, 3.12it/s]\u001b[A\n 92%|█████████▏| 24571/26685 [3:37:09<17:20, 2.03it/s]\u001b[A\n 92%|█████████▏| 24572/26685 [3:37:10<16:22, 2.15it/s]\u001b[A\n 92%|█████████▏| 24573/26685 [3:37:10<17:10, 2.05it/s]\u001b[A\n 92%|█████████▏| 24574/26685 [3:37:11<15:22, 2.29it/s]\u001b[A\n 92%|█████████▏| 24575/26685 [3:37:11<14:31, 2.42it/s]\u001b[A\n 92%|█████████▏| 24576/26685 [3:37:11<14:13, 2.47it/s]\u001b[A\n 92%|█████████▏| 24577/26685 [3:37:12<14:16, 2.46it/s]\u001b[A\n 92%|█████████▏| 24578/26685 [3:37:12<13:31, 2.60it/s]\u001b[A\n 92%|█████████▏| 24579/26685 [3:37:12<13:11, 2.66it/s]\u001b[A\n 92%|█████████▏| 24580/26685 [3:37:13<11:54, 2.94it/s]\u001b[A\n 92%|█████████▏| 24581/26685 [3:37:13<11:27, 3.06it/s]\u001b[A\n 92%|█████████▏| 24582/26685 [3:37:14<14:20, 2.44it/s]\u001b[A\n 92%|█████████▏| 24583/26685 [3:37:14<13:33, 2.58it/s]\u001b[A\n 92%|█████████▏| 24584/26685 [3:37:14<15:15, 2.29it/s]\u001b[A\n 92%|█████████▏| 24585/26685 [3:37:15<14:37, 2.39it/s]\u001b[A\n 92%|█████████▏| 24586/26685 [3:37:15<13:12, 2.65it/s]\u001b[A\n 92%|█████████▏| 24587/26685 [3:37:16<15:23, 2.27it/s]\u001b[A\n 92%|█████████▏| 24588/26685 [3:37:16<14:25, 2.42it/s]\u001b[A\n 92%|█████████▏| 24589/26685 [3:37:17<16:18, 2.14it/s]\u001b[A\n 92%|█████████▏| 24590/26685 [3:37:17<14:29, 2.41it/s]\u001b[A\n 92%|█████████▏| 24591/26685 [3:37:17<14:31, 2.40it/s]\u001b[A\n 92%|█████████▏| 24592/26685 [3:37:18<17:01, 2.05it/s]\u001b[A\n 92%|█████████▏| 24593/26685 [3:37:18<15:06, 2.31it/s]\u001b[A\n 92%|█████████▏| 24594/26685 [3:37:19<16:15, 2.14it/s]\u001b[A\n 92%|█████████▏| 24595/26685 [3:37:20<18:32, 1.88it/s]\u001b[A\n 92%|█████████▏| 24596/26685 [3:37:20<16:00, 2.17it/s]\u001b[A\n 92%|█████████▏| 24597/26685 [3:37:21<19:40, 1.77it/s]\u001b[A\n 92%|█████████▏| 24598/26685 [3:37:21<17:47, 1.95it/s]\u001b[A\n 92%|█████████▏| 24599/26685 [3:37:21<16:15, 2.14it/s]\u001b[A\n 92%|█████████▏| 24600/26685 [3:37:22<15:24, 2.26it/s]\u001b[A\n 92%|█████████▏| 24601/26685 [3:37:22<13:59, 2.48it/s]\u001b[A\n 92%|█████████▏| 24602/26685 [3:37:22<13:10, 2.63it/s]\u001b[A\n 92%|█████████▏| 24603/26685 [3:37:23<16:22, 2.12it/s]\u001b[A\n 92%|█████████▏| 24604/26685 [3:37:23<14:47, 2.34it/s]\u001b[A\n 92%|█████████▏| 24605/26685 [3:37:24<15:18, 2.27it/s]\u001b[A\n 92%|█████████▏| 24606/26685 [3:37:24<14:44, 2.35it/s]\u001b[A\n 92%|█████████▏| 24607/26685 [3:37:25<20:12, 1.71it/s]\u001b[A\n 92%|█████████▏| 24608/26685 [3:37:26<18:20, 1.89it/s]\u001b[A\n 92%|█████████▏| 24609/26685 [3:37:26<17:47, 1.94it/s]\u001b[A\n 92%|█████████▏| 24610/26685 [3:37:27<21:29, 1.61it/s]\u001b[A\n 92%|█████████▏| 24611/26685 [3:37:27<18:34, 1.86it/s]\u001b[A\n 92%|█████████▏| 24612/26685 [3:37:28<19:12, 1.80it/s]\u001b[A\n 92%|█████████▏| 24613/26685 [3:37:28<17:47, 1.94it/s]\u001b[A\n 92%|█████████▏| 24614/26685 [3:37:29<21:04, 1.64it/s]\u001b[A\n 92%|█████████▏| 24615/26685 [3:37:31<28:40, 1.20it/s]\u001b[A\n 92%|█████████▏| 24616/26685 [3:37:32<31:27, 1.10it/s]\u001b[A\n 92%|█████████▏| 24617/26685 [3:37:32<27:01, 1.28it/s]\u001b[A\n 92%|█████████▏| 24618/26685 [3:37:33<24:51, 1.39it/s]\u001b[A\n 92%|█████████▏| 24619/26685 [3:37:33<21:48, 1.58it/s]\u001b[A\n 92%|█████████▏| 24620/26685 [3:37:34<23:53, 1.44it/s]\u001b[A\n 92%|█████████▏| 24621/26685 [3:37:34<19:52, 1.73it/s]\u001b[A\n 92%|█████████▏| 24622/26685 [3:37:35<17:57, 1.91it/s]\u001b[A\n 92%|█████████▏| 24623/26685 [3:37:35<20:22, 1.69it/s]\u001b[A\n 92%|█████████▏| 24624/26685 [3:37:36<17:16, 1.99it/s]\u001b[A\n 92%|█████████▏| 24625/26685 [3:37:37<26:45, 1.28it/s]\u001b[A\n 92%|█████████▏| 24626/26685 [3:37:38<24:22, 1.41it/s]\u001b[A\n 92%|█████████▏| 24627/26685 [3:37:38<20:10, 1.70it/s]\u001b[A\n 92%|█████████▏| 24628/26685 [3:37:38<16:55, 2.03it/s]\u001b[A\n 92%|█████████▏| 24629/26685 [3:37:39<16:28, 2.08it/s]\u001b[A\n 92%|█████████▏| 24630/26685 [3:37:40<19:40, 1.74it/s]\u001b[A\n 92%|█████████▏| 24631/26685 [3:37:40<18:03, 1.90it/s]\u001b[A\n 92%|█████████▏| 24632/26685 [3:37:41<18:40, 1.83it/s]\u001b[A\n 92%|█████████▏| 24633/26685 [3:37:41<18:12, 1.88it/s]\u001b[A\n 92%|█████████▏| 24634/26685 [3:37:42<20:57, 1.63it/s]\u001b[A\n 92%|█████████▏| 24635/26685 [3:37:42<19:11, 1.78it/s]\u001b[A\n 92%|█████████▏| 24636/26685 [3:37:43<18:44, 1.82it/s]\u001b[A\n 92%|█████████▏| 24637/26685 [3:37:43<16:09, 2.11it/s]\u001b[A\n 92%|█████████▏| 24638/26685 [3:37:43<14:12, 2.40it/s]\u001b[A\n 92%|█████████▏| 24639/26685 [3:37:44<14:07, 2.41it/s]\u001b[A\n 92%|█████████▏| 24640/26685 [3:37:44<13:25, 2.54it/s]\u001b[A\n 92%|█████████▏| 24641/26685 [3:37:45<15:27, 2.20it/s]\u001b[A\n 92%|█████████▏| 24642/26685 [3:37:45<16:40, 2.04it/s]\u001b[A\n 92%|█████████▏| 24643/26685 [3:37:46<14:44, 2.31it/s]\u001b[A\n 92%|█████████▏| 24644/26685 [3:37:46<16:50, 2.02it/s]\u001b[A\n 92%|█████████▏| 24645/26685 [3:37:47<14:47, 2.30it/s]\u001b[A\n 92%|█████████▏| 24646/26685 [3:37:47<19:06, 1.78it/s]\u001b[A\n 92%|█████████▏| 24647/26685 [3:37:48<17:33, 1.93it/s]\u001b[A\n 92%|█████████▏| 24648/26685 [3:37:48<16:13, 2.09it/s]\u001b[A\n 92%|█████████▏| 24649/26685 [3:37:49<14:48, 2.29it/s]\u001b[A\n 92%|█████████▏| 24650/26685 [3:37:49<13:35, 2.50it/s]\u001b[A\n 92%|█████████▏| 24651/26685 [3:37:49<15:25, 2.20it/s]\u001b[A\n 92%|█████████▏| 24652/26685 [3:37:50<16:23, 2.07it/s]\u001b[A\n 92%|█████████▏| 24653/26685 [3:37:50<15:02, 2.25it/s]\u001b[A\n 92%|█████████▏| 24654/26685 [3:37:51<13:11, 2.57it/s]\u001b[A\n 92%|█████████▏| 24655/26685 [3:37:51<12:48, 2.64it/s]\u001b[A\n 92%|█████████▏| 24656/26685 [3:37:52<14:43, 2.30it/s]\u001b[A\n 92%|█████████▏| 24657/26685 [3:37:52<14:31, 2.33it/s]\u001b[A\n 92%|█████████▏| 24658/26685 [3:37:52<13:56, 2.42it/s]\u001b[A\n 92%|█████████▏| 24659/26685 [3:37:53<15:53, 2.13it/s]\u001b[A\n 92%|█████████▏| 24660/26685 [3:37:53<15:20, 2.20it/s]\u001b[A\n 92%|█████████▏| 24661/26685 [3:37:54<14:00, 2.41it/s]\u001b[A\n 92%|█████████▏| 24662/26685 [3:37:54<14:30, 2.32it/s]\u001b[A\n 92%|█████████▏| 24663/26685 [3:37:55<15:29, 2.17it/s]\u001b[A\n 92%|█████████▏| 24664/26685 [3:37:55<14:39, 2.30it/s]\u001b[A\n 92%|█████████▏| 24665/26685 [3:37:55<13:53, 2.42it/s]\u001b[A\n 92%|█████████▏| 24666/26685 [3:37:56<12:22, 2.72it/s]\u001b[A\n 92%|█████████▏| 24667/26685 [3:37:56<16:47, 2.00it/s]\u001b[A\n 92%|█████████▏| 24668/26685 [3:37:57<16:09, 2.08it/s]\u001b[A\n 92%|█████████▏| 24669/26685 [3:37:57<14:59, 2.24it/s]\u001b[A\n 92%|█████████▏| 24670/26685 [3:37:58<14:01, 2.39it/s]\u001b[A\n 92%|█████████▏| 24671/26685 [3:37:58<12:16, 2.73it/s]\u001b[A\n 92%|█████████▏| 24672/26685 [3:37:58<11:14, 2.99it/s]\u001b[A\n 92%|█████████▏| 24673/26685 [3:37:58<10:43, 3.13it/s]\u001b[A\n 92%|█████████▏| 24674/26685 [3:37:59<15:22, 2.18it/s]\u001b[A\n 92%|█████████▏| 24675/26685 [3:38:00<14:28, 2.31it/s]\u001b[A\n 92%|█████████▏| 24676/26685 [3:38:00<12:46, 2.62it/s]\u001b[A\n 92%|█████████▏| 24677/26685 [3:38:00<12:10, 2.75it/s]\u001b[A\n 92%|█████████▏| 24678/26685 [3:38:01<16:35, 2.02it/s]\u001b[A\n 92%|█████████▏| 24679/26685 [3:38:01<16:35, 2.01it/s]\u001b[A\n 92%|█████████▏| 24680/26685 [3:38:02<15:19, 2.18it/s]\u001b[A\n 92%|█████████▏| 24681/26685 [3:38:02<13:40, 2.44it/s]\u001b[A\n 92%|█████████▏| 24682/26685 [3:38:02<12:52, 2.59it/s]\u001b[A\n 92%|█████████▏| 24683/26685 [3:38:03<12:46, 2.61it/s]\u001b[A\n 93%|█████████▎| 24684/26685 [3:38:03<12:28, 2.67it/s]\u001b[A\n 93%|█████████▎| 24685/26685 [3:38:04<14:02, 2.37it/s]\u001b[A\n 93%|█████████▎| 24686/26685 [3:38:05<18:23, 1.81it/s]\u001b[A\n 93%|█████████▎| 24687/26685 [3:38:05<16:42, 1.99it/s]\u001b[A\n 93%|█████████▎| 24688/26685 [3:38:06<19:57, 1.67it/s]\u001b[A\n 93%|█████████▎| 24689/26685 [3:38:06<16:55, 1.97it/s]\u001b[A\n 93%|█████████▎| 24690/26685 [3:38:06<15:12, 2.19it/s]\u001b[A\n 93%|█████████▎| 24691/26685 [3:38:07<14:27, 2.30it/s]\u001b[A\n 93%|█████████▎| 24692/26685 [3:38:07<15:41, 2.12it/s]\u001b[A\n 93%|█████████▎| 24693/26685 [3:38:08<13:42, 2.42it/s]\u001b[A\n 93%|█████████▎| 24694/26685 [3:38:08<15:23, 2.16it/s]\u001b[A\n 93%|█████████▎| 24695/26685 [3:38:09<14:15, 2.32it/s]\u001b[A\n 93%|█████████▎| 24696/26685 [3:38:09<19:31, 1.70it/s]\u001b[A\n 93%|█████████▎| 24697/26685 [3:38:10<17:41, 1.87it/s]\u001b[A\n 93%|█████████▎| 24698/26685 [3:38:10<17:28, 1.90it/s]\u001b[A\n 93%|█████████▎| 24699/26685 [3:38:11<15:38, 2.12it/s]\u001b[A\n 93%|█████████▎| 24700/26685 [3:38:12<19:37, 1.69it/s]\u001b[A\n 93%|█████████▎| 24701/26685 [3:38:12<16:27, 2.01it/s]\u001b[A\n 93%|█████████▎| 24702/26685 [3:38:13<19:18, 1.71it/s]\u001b[A\n 93%|█████████▎| 24703/26685 [3:38:13<16:45, 1.97it/s]\u001b[A\n 93%|█████████▎| 24704/26685 [3:38:13<15:04, 2.19it/s]\u001b[A\n 93%|█████████▎| 24705/26685 [3:38:14<13:06, 2.52it/s]\u001b[A\n 93%|█████████▎| 24706/26685 [3:38:14<12:47, 2.58it/s]\u001b[A\n 93%|█████████▎| 24707/26685 [3:38:14<12:48, 2.57it/s]\u001b[A\n 93%|█████████▎| 24708/26685 [3:38:15<12:55, 2.55it/s]\u001b[A\n 93%|█████████▎| 24709/26685 [3:38:15<12:07, 2.72it/s]\u001b[A\n 93%|█████████▎| 24710/26685 [3:38:16<12:51, 2.56it/s]\u001b[A\n 93%|█████████▎| 24711/26685 [3:38:16<14:06, 2.33it/s]\u001b[A\n 93%|█████████▎| 24712/26685 [3:38:16<13:46, 2.39it/s]\u001b[A\n 93%|█████████▎| 24713/26685 [3:38:17<13:38, 2.41it/s]\u001b[A\n 93%|█████████▎| 24714/26685 [3:38:17<13:17, 2.47it/s]\u001b[A\n 93%|█████████▎| 24715/26685 [3:38:18<13:24, 2.45it/s]\u001b[A\n 93%|█████████▎| 24716/26685 [3:38:18<11:54, 2.76it/s]\u001b[A\n 93%|█████████▎| 24717/26685 [3:38:18<11:05, 2.96it/s]\u001b[A\n 93%|█████████▎| 24718/26685 [3:38:19<10:57, 2.99it/s]\u001b[A\n 93%|█████████▎| 24719/26685 [3:38:19<10:51, 3.02it/s]\u001b[A\n 93%|█████████▎| 24720/26685 [3:38:19<10:37, 3.08it/s]\u001b[A\n 93%|█████████▎| 24721/26685 [3:38:19<10:53, 3.00it/s]\u001b[A\n 93%|█████████▎| 24722/26685 [3:38:20<10:46, 3.04it/s]\u001b[A\n 93%|█████████▎| 24723/26685 [3:38:20<11:24, 2.87it/s]\u001b[A\n 93%|█████████▎| 24724/26685 [3:38:21<11:12, 2.92it/s]\u001b[A\n 93%|█████████▎| 24725/26685 [3:38:21<12:47, 2.55it/s]\u001b[A\n 93%|█████████▎| 24726/26685 [3:38:21<11:58, 2.73it/s]\u001b[A\n 93%|█████████▎| 24727/26685 [3:38:22<12:17, 2.66it/s]\u001b[A\n 93%|█████████▎| 24728/26685 [3:38:23<17:18, 1.88it/s]\u001b[A\n 93%|█████████▎| 24729/26685 [3:38:23<15:18, 2.13it/s]\u001b[A\n 93%|█████████▎| 24730/26685 [3:38:23<13:35, 2.40it/s]\u001b[A\n 93%|█████████▎| 24731/26685 [3:38:24<13:34, 2.40it/s]\u001b[A\n 93%|█████████▎| 24732/26685 [3:38:24<13:42, 2.37it/s]\u001b[A\n 93%|█████████▎| 24733/26685 [3:38:25<13:26, 2.42it/s]\u001b[A\n 93%|█████████▎| 24734/26685 [3:38:25<12:01, 2.71it/s]\u001b[A\n 93%|█████████▎| 24735/26685 [3:38:25<12:13, 2.66it/s]\u001b[A\n 93%|█████████▎| 24736/26685 [3:38:25<11:02, 2.94it/s]\u001b[A\n 93%|█████████▎| 24737/26685 [3:38:26<10:33, 3.08it/s]\u001b[A\n 93%|█████████▎| 24738/26685 [3:38:26<11:02, 2.94it/s]\u001b[A\n 93%|█████████▎| 24739/26685 [3:38:26<11:21, 2.86it/s]\u001b[A\n 93%|█████████▎| 24740/26685 [3:38:27<10:37, 3.05it/s]\u001b[A\n 93%|█████████▎| 24741/26685 [3:38:27<12:10, 2.66it/s]\u001b[A\n 93%|█████████▎| 24742/26685 [3:38:28<12:54, 2.51it/s]\u001b[A\n 93%|█████████▎| 24743/26685 [3:38:28<13:36, 2.38it/s]\u001b[A\n 93%|█████████▎| 24744/26685 [3:38:29<17:44, 1.82it/s]\u001b[A\n 93%|█████████▎| 24745/26685 [3:38:29<15:08, 2.14it/s]\u001b[A\n 93%|█████████▎| 24746/26685 [3:38:30<13:19, 2.43it/s]\u001b[A\n 93%|█████████▎| 24747/26685 [3:38:30<13:59, 2.31it/s]\u001b[A\n 93%|█████████▎| 24748/26685 [3:38:30<13:28, 2.40it/s]\u001b[A\n 93%|█████████▎| 24749/26685 [3:38:31<13:37, 2.37it/s]\u001b[A\n 93%|█████████▎| 24750/26685 [3:38:31<14:04, 2.29it/s]\u001b[A\n 93%|█████████▎| 24751/26685 [3:38:32<14:01, 2.30it/s]\u001b[A\n 93%|█████████▎| 24752/26685 [3:38:33<20:20, 1.58it/s]\u001b[A\n 93%|█████████▎| 24753/26685 [3:38:33<17:05, 1.88it/s]\u001b[A\n 93%|█████████▎| 24754/26685 [3:38:34<17:15, 1.87it/s]\u001b[A\n 93%|█████████▎| 24755/26685 [3:38:34<17:26, 1.85it/s]\u001b[A\n 93%|█████████▎| 24756/26685 [3:38:35<18:10, 1.77it/s]\u001b[A\n 93%|█████████▎| 24757/26685 [3:38:35<15:25, 2.08it/s]\u001b[A\n 93%|█████████▎| 24758/26685 [3:38:35<13:41, 2.35it/s]\u001b[A\n 93%|█████████▎| 24759/26685 [3:38:36<12:08, 2.64it/s]\u001b[A\n 93%|█████████▎| 24760/26685 [3:38:36<12:33, 2.56it/s]\u001b[A\n 93%|█████████▎| 24761/26685 [3:38:37<12:50, 2.50it/s]\u001b[A\n 93%|█████████▎| 24762/26685 [3:38:37<12:27, 2.57it/s]\u001b[A\n 93%|█████████▎| 24763/26685 [3:38:37<13:06, 2.44it/s]\u001b[A\n 93%|█████████▎| 24764/26685 [3:38:38<13:53, 2.30it/s]\u001b[A\n 93%|█████████▎| 24765/26685 [3:38:39<17:59, 1.78it/s]\u001b[A\n 93%|█████████▎| 24766/26685 [3:38:39<17:21, 1.84it/s]\u001b[A\n 93%|█████████▎| 24767/26685 [3:38:40<16:55, 1.89it/s]\u001b[A\n 93%|█████████▎| 24768/26685 [3:38:40<15:14, 2.10it/s]\u001b[A\n 93%|█████████▎| 24769/26685 [3:38:40<14:26, 2.21it/s]\u001b[A\n 93%|█████████▎| 24770/26685 [3:38:41<13:49, 2.31it/s]\u001b[A\n 93%|█████████▎| 24771/26685 [3:38:41<14:28, 2.20it/s]\u001b[A\n 93%|█████████▎| 24772/26685 [3:38:42<14:55, 2.14it/s]\u001b[A\n 93%|█████████▎| 24773/26685 [3:38:42<14:05, 2.26it/s]\u001b[A\n 93%|█████████▎| 24774/26685 [3:38:43<12:34, 2.53it/s]\u001b[A\n 93%|█████████▎| 24775/26685 [3:38:43<12:33, 2.54it/s]\u001b[A\n 93%|█████████▎| 24776/26685 [3:38:43<11:52, 2.68it/s]\u001b[A\n 93%|█████████▎| 24777/26685 [3:38:44<12:33, 2.53it/s]\u001b[A\n 93%|█████████▎| 24778/26685 [3:38:44<15:02, 2.11it/s]\u001b[A\n 93%|█████████▎| 24779/26685 [3:38:45<13:24, 2.37it/s]\u001b[A\n 93%|█████████▎| 24780/26685 [3:38:45<11:50, 2.68it/s]\u001b[A\n 93%|█████████▎| 24781/26685 [3:38:46<16:07, 1.97it/s]\u001b[A\n 93%|█████████▎| 24782/26685 [3:38:46<14:04, 2.25it/s]\u001b[A\n 93%|█████████▎| 24783/26685 [3:38:47<17:38, 1.80it/s]\u001b[A\n 93%|█████████▎| 24784/26685 [3:38:47<15:30, 2.04it/s]\u001b[A\n 93%|█████████▎| 24785/26685 [3:38:47<12:54, 2.45it/s]\u001b[A\n 93%|█████████▎| 24786/26685 [3:38:48<12:05, 2.62it/s]\u001b[A\n 93%|█████████▎| 24787/26685 [3:38:48<12:17, 2.57it/s]\u001b[A\n 93%|█████████▎| 24788/26685 [3:38:49<13:41, 2.31it/s]\u001b[A\n 93%|█████████▎| 24789/26685 [3:38:49<13:05, 2.41it/s]\u001b[A\n 93%|█████████▎| 24790/26685 [3:38:49<12:44, 2.48it/s]\u001b[A\n 93%|█████████▎| 24791/26685 [3:38:50<14:29, 2.18it/s]\u001b[A\n 93%|█████████▎| 24792/26685 [3:38:51<18:06, 1.74it/s]\u001b[A\n 93%|█████████▎| 24793/26685 [3:38:52<20:22, 1.55it/s]\u001b[A\n 93%|█████████▎| 24794/26685 [3:38:52<18:28, 1.71it/s]\u001b[A\n 93%|█████████▎| 24795/26685 [3:38:53<18:16, 1.72it/s]\u001b[A\n 93%|█████████▎| 24796/26685 [3:38:53<15:39, 2.01it/s]\u001b[A\n 93%|█████████▎| 24797/26685 [3:38:53<13:58, 2.25it/s]\u001b[A\n 93%|█████████▎| 24798/26685 [3:38:54<12:06, 2.60it/s]\u001b[A\n 93%|█████████▎| 24799/26685 [3:38:54<11:16, 2.79it/s]\u001b[A\n 93%|█████████▎| 24800/26685 [3:38:55<15:49, 1.99it/s]\u001b[A\n 93%|█████████▎| 24801/26685 [3:38:55<15:18, 2.05it/s]\u001b[A\n 93%|█████████▎| 24802/26685 [3:38:56<18:55, 1.66it/s]\u001b[A\n 93%|█████████▎| 24803/26685 [3:38:56<17:47, 1.76it/s]\u001b[A\n 93%|█████████▎| 24804/26685 [3:38:57<15:40, 2.00it/s]\u001b[A\n 93%|█████████▎| 24805/26685 [3:38:57<14:01, 2.23it/s]\u001b[A\n 93%|█████████▎| 24806/26685 [3:38:57<12:25, 2.52it/s]\u001b[A\n 93%|█████████▎| 24807/26685 [3:38:58<13:13, 2.37it/s]\u001b[A\n 93%|█████████▎| 24808/26685 [3:38:59<17:10, 1.82it/s]\u001b[A\n 93%|█████████▎| 24809/26685 [3:38:59<15:47, 1.98it/s]\u001b[A\n 93%|█████████▎| 24810/26685 [3:39:00<14:53, 2.10it/s]\u001b[A\n 93%|█████████▎| 24811/26685 [3:39:00<12:54, 2.42it/s]\u001b[A\n 93%|█████████▎| 24812/26685 [3:39:00<12:00, 2.60it/s]\u001b[A\n 93%|█████████▎| 24813/26685 [3:39:00<11:17, 2.76it/s]\u001b[A\n 93%|█████████▎| 24814/26685 [3:39:01<12:45, 2.44it/s]\u001b[A\n 93%|█████████▎| 24815/26685 [3:39:01<12:27, 2.50it/s]\u001b[A\n 93%|█████████▎| 24816/26685 [3:39:02<11:28, 2.71it/s]\u001b[A\n 93%|█████████▎| 24817/26685 [3:39:02<11:23, 2.73it/s]\u001b[A\n 93%|█████████▎| 24818/26685 [3:39:02<10:25, 2.98it/s]\u001b[A\n 93%|█████████▎| 24819/26685 [3:39:03<09:52, 3.15it/s]\u001b[A\n 93%|█████████▎| 24820/26685 [3:39:03<10:46, 2.89it/s]\u001b[A\n 93%|█████████▎| 24821/26685 [3:39:03<10:10, 3.05it/s]\u001b[A\n 93%|█████████▎| 24822/26685 [3:39:04<11:19, 2.74it/s]\u001b[A\n 93%|█████████▎| 24823/26685 [3:39:04<10:40, 2.91it/s]\u001b[A\n 93%|█████████▎| 24824/26685 [3:39:05<13:42, 2.26it/s]\u001b[A\n 93%|█████████▎| 24825/26685 [3:39:05<13:19, 2.33it/s]\u001b[A\n 93%|█████████▎| 24826/26685 [3:39:05<12:55, 2.40it/s]\u001b[A\n 93%|█████████▎| 24827/26685 [3:39:06<12:33, 2.46it/s]\u001b[A\n 93%|█████████▎| 24828/26685 [3:39:06<12:10, 2.54it/s]\u001b[A\n 93%|█████████▎| 24829/26685 [3:39:07<11:34, 2.67it/s]\u001b[A\n 93%|█████████▎| 24830/26685 [3:39:07<11:15, 2.75it/s]\u001b[A\n 93%|█████████▎| 24831/26685 [3:39:07<11:42, 2.64it/s]\u001b[A\n 93%|█████████▎| 24832/26685 [3:39:08<15:57, 1.94it/s]\u001b[A\n 93%|█████████▎| 24833/26685 [3:39:09<15:25, 2.00it/s]\u001b[A\n 93%|█████████▎| 24834/26685 [3:39:09<18:09, 1.70it/s]\u001b[A\n 93%|█████████▎| 24835/26685 [3:39:10<20:01, 1.54it/s]\u001b[A\n 93%|█████████▎| 24836/26685 [3:39:11<18:39, 1.65it/s]\u001b[A\n 93%|█████████▎| 24837/26685 [3:39:11<16:40, 1.85it/s]\u001b[A\n 93%|█████████▎| 24838/26685 [3:39:11<14:55, 2.06it/s]\u001b[A\n 93%|█████████▎| 24839/26685 [3:39:12<15:46, 1.95it/s]\u001b[A\n 93%|█████████▎| 24840/26685 [3:39:12<14:28, 2.12it/s]\u001b[A\n 93%|█████████▎| 24841/26685 [3:39:13<12:40, 2.43it/s]\u001b[A\n 93%|█████████▎| 24842/26685 [3:39:13<11:37, 2.64it/s]\u001b[A\n 93%|█████████▎| 24843/26685 [3:39:13<10:53, 2.82it/s]\u001b[A\n 93%|█████████▎| 24844/26685 [3:39:14<11:41, 2.62it/s]\u001b[A\n 93%|█████████▎| 24845/26685 [3:39:14<10:39, 2.88it/s]\u001b[A\n 93%|█████████▎| 24846/26685 [3:39:14<11:46, 2.60it/s]\u001b[A\n 93%|█████████▎| 24847/26685 [3:39:15<11:32, 2.65it/s]\u001b[A\n 93%|█████████▎| 24848/26685 [3:39:15<10:44, 2.85it/s]\u001b[A\n 93%|█████████▎| 24849/26685 [3:39:15<09:52, 3.10it/s]\u001b[A\n 93%|█████████▎| 24850/26685 [3:39:16<09:28, 3.23it/s]\u001b[A\n 93%|█████████▎| 24851/26685 [3:39:16<09:48, 3.11it/s]\u001b[A\n 93%|█████████▎| 24852/26685 [3:39:16<10:24, 2.94it/s]\u001b[A\n 93%|█████████▎| 24853/26685 [3:39:17<11:20, 2.69it/s]\u001b[A\n 93%|█████████▎| 24854/26685 [3:39:17<11:26, 2.67it/s]\u001b[A\n 93%|█████████▎| 24855/26685 [3:39:18<13:52, 2.20it/s]\u001b[A\n 93%|█████████▎| 24856/26685 [3:39:18<14:32, 2.10it/s]\u001b[A\n 93%|█████████▎| 24857/26685 [3:39:19<13:46, 2.21it/s]\u001b[A\n 93%|█████████▎| 24858/26685 [3:39:19<12:02, 2.53it/s]\u001b[A\n 93%|█████████▎| 24859/26685 [3:39:20<13:07, 2.32it/s]\u001b[A\n 93%|█████████▎| 24860/26685 [3:39:20<17:01, 1.79it/s]\u001b[A\n 93%|█████████▎| 24861/26685 [3:39:21<19:41, 1.54it/s]\u001b[A\n 93%|█████████▎| 24862/26685 [3:39:22<18:52, 1.61it/s]\u001b[A\n 93%|█████████▎| 24863/26685 [3:39:22<17:50, 1.70it/s]\u001b[A\n 93%|█████████▎| 24864/26685 [3:39:23<16:20, 1.86it/s]\u001b[A\n 93%|█████████▎| 24865/26685 [3:39:23<13:51, 2.19it/s]\u001b[A\n 93%|█████████▎| 24866/26685 [3:39:24<14:54, 2.03it/s]\u001b[A\n 93%|█████████▎| 24867/26685 [3:39:24<13:19, 2.27it/s]\u001b[A\n 93%|█████████▎| 24868/26685 [3:39:24<14:50, 2.04it/s]\u001b[A\n 93%|█████████▎| 24869/26685 [3:39:25<13:50, 2.19it/s]\u001b[A\n 93%|█████████▎| 24870/26685 [3:39:25<12:32, 2.41it/s]\u001b[A\n 93%|█████████▎| 24871/26685 [3:39:26<13:38, 2.22it/s]\u001b[A\n 93%|█████████▎| 24872/26685 [3:39:27<17:21, 1.74it/s]\u001b[A\n 93%|█████████▎| 24873/26685 [3:39:27<15:06, 2.00it/s]\u001b[A\n 93%|█████████▎| 24874/26685 [3:39:27<13:17, 2.27it/s]\u001b[A\n 93%|█████████▎| 24875/26685 [3:39:28<12:47, 2.36it/s]\u001b[A\n 93%|█████████▎| 24876/26685 [3:39:29<18:08, 1.66it/s]\u001b[A\n 93%|█████████▎| 24877/26685 [3:39:29<16:32, 1.82it/s]\u001b[A\n 93%|█████████▎| 24878/26685 [3:39:29<15:05, 1.99it/s]\u001b[A\n 93%|█████████▎| 24879/26685 [3:39:30<14:30, 2.07it/s]\u001b[A\n 93%|█████████▎| 24880/26685 [3:39:30<14:33, 2.07it/s]\u001b[A\n 93%|█████████▎| 24881/26685 [3:39:31<17:19, 1.74it/s]\u001b[A\n 93%|█████████▎| 24882/26685 [3:39:32<15:48, 1.90it/s]\u001b[A\n 93%|█████████▎| 24883/26685 [3:39:32<13:43, 2.19it/s]\u001b[A\n 93%|█████████▎| 24884/26685 [3:39:32<13:56, 2.15it/s]\u001b[A\n 93%|█████████▎| 24885/26685 [3:39:33<12:48, 2.34it/s]\u001b[A\n 93%|█████████▎| 24886/26685 [3:39:33<14:08, 2.12it/s]\u001b[A\n 93%|█████████▎| 24887/26685 [3:39:34<17:08, 1.75it/s]\u001b[A\n 93%|█████████▎| 24888/26685 [3:39:34<15:35, 1.92it/s]\u001b[A\n 93%|█████████▎| 24889/26685 [3:39:35<14:54, 2.01it/s]\u001b[A\n 93%|█████████▎| 24890/26685 [3:39:35<13:52, 2.16it/s]\u001b[A\n 93%|█████████▎| 24891/26685 [3:39:36<17:16, 1.73it/s]\u001b[A\n 93%|█████████▎| 24892/26685 [3:39:36<14:31, 2.06it/s]\u001b[A\n 93%|█████████▎| 24893/26685 [3:39:37<15:36, 1.91it/s]\u001b[A\n 93%|█████████▎| 24894/26685 [3:39:37<13:39, 2.19it/s]\u001b[A\n 93%|█████████▎| 24895/26685 [3:39:38<13:17, 2.24it/s]\u001b[A\n 93%|█████████▎| 24896/26685 [3:39:38<11:55, 2.50it/s]\u001b[A\n 93%|█████████▎| 24897/26685 [3:39:39<13:26, 2.22it/s]\u001b[A\n 93%|█████████▎| 24898/26685 [3:39:39<13:11, 2.26it/s]\u001b[A\n 93%|█████████▎| 24899/26685 [3:39:39<12:02, 2.47it/s]\u001b[A\n 93%|█████████▎| 24900/26685 [3:39:40<11:56, 2.49it/s]\u001b[A\n 93%|█████████▎| 24901/26685 [3:39:40<11:11, 2.66it/s]\u001b[A\n 93%|█████████▎| 24902/26685 [3:39:41<13:01, 2.28it/s]\u001b[A\n 93%|█████████▎| 24903/26685 [3:39:41<12:56, 2.30it/s]\u001b[A\n 93%|█████████▎| 24904/26685 [3:39:41<11:25, 2.60it/s]\u001b[A\n 93%|█████████▎| 24905/26685 [3:39:42<10:50, 2.74it/s]\u001b[A\n 93%|█████████▎| 24906/26685 [3:39:42<09:47, 3.03it/s]\u001b[A\n 93%|█████████▎| 24907/26685 [3:39:42<10:54, 2.72it/s]\u001b[A\n 93%|█████████▎| 24908/26685 [3:39:43<11:42, 2.53it/s]\u001b[A\n 93%|█████████▎| 24909/26685 [3:39:43<11:01, 2.69it/s]\u001b[A\n 93%|█████████▎| 24910/26685 [3:39:44<11:46, 2.51it/s]\u001b[A\n 93%|█████████▎| 24911/26685 [3:39:44<16:07, 1.83it/s]\u001b[A\n 93%|█████████▎| 24912/26685 [3:39:45<14:03, 2.10it/s]\u001b[A\n 93%|█████████▎| 24913/26685 [3:39:45<13:18, 2.22it/s]\u001b[A\n 93%|█████████▎| 24914/26685 [3:39:45<11:32, 2.56it/s]\u001b[A\n 93%|█████████▎| 24915/26685 [3:39:46<10:53, 2.71it/s]\u001b[A\n 93%|█████████▎| 24916/26685 [3:39:46<10:35, 2.78it/s]\u001b[A\n 93%|█████████▎| 24917/26685 [3:39:47<12:21, 2.38it/s]\u001b[A\n 93%|█████████▎| 24918/26685 [3:39:47<10:50, 2.72it/s]\u001b[A\n 93%|█████████▎| 24919/26685 [3:39:47<10:23, 2.83it/s]\u001b[A\n 93%|█████████▎| 24920/26685 [3:39:48<10:10, 2.89it/s]\u001b[A\n 93%|█████████▎| 24921/26685 [3:39:48<10:39, 2.76it/s]\u001b[A\n 93%|█████████▎| 24922/26685 [3:39:48<10:43, 2.74it/s]\u001b[A\n 93%|█████████▎| 24923/26685 [3:39:49<10:40, 2.75it/s]\u001b[A\n 93%|█████████▎| 24924/26685 [3:39:49<09:56, 2.95it/s]\u001b[A\n 93%|█████████▎| 24925/26685 [3:39:49<09:35, 3.06it/s]\u001b[A\n 93%|█████████▎| 24926/26685 [3:39:50<10:46, 2.72it/s]\u001b[A\n 93%|█████████▎| 24927/26685 [3:39:51<15:01, 1.95it/s]\u001b[A\n 93%|█████████▎| 24928/26685 [3:39:51<12:40, 2.31it/s]\u001b[A\n 93%|█████████▎| 24929/26685 [3:39:51<11:51, 2.47it/s]\u001b[A\n 93%|█████████▎| 24930/26685 [3:39:51<11:14, 2.60it/s]\u001b[A\n 93%|█████████▎| 24931/26685 [3:39:52<11:45, 2.49it/s]\u001b[A\n 93%|█████████▎| 24932/26685 [3:39:53<16:22, 1.78it/s]\u001b[A\n 93%|█████████▎| 24933/26685 [3:39:53<14:43, 1.98it/s]\u001b[A\n 93%|█████████▎| 24934/26685 [3:39:54<12:44, 2.29it/s]\u001b[A\n 93%|█████████▎| 24935/26685 [3:39:54<11:50, 2.46it/s]\u001b[A\n 93%|█████████▎| 24936/26685 [3:39:55<15:57, 1.83it/s]\u001b[A\n 93%|█████████▎| 24937/26685 [3:39:55<13:56, 2.09it/s]\u001b[A\n 93%|█████████▎| 24938/26685 [3:39:56<14:15, 2.04it/s]\u001b[A\n 93%|█████████▎| 24939/26685 [3:39:56<12:53, 2.26it/s]\u001b[A\n 93%|█████████▎| 24940/26685 [3:39:56<12:29, 2.33it/s]\u001b[A\n 93%|█████████▎| 24941/26685 [3:39:57<14:26, 2.01it/s]\u001b[A\n 93%|█████████▎| 24942/26685 [3:39:57<12:40, 2.29it/s]\u001b[A\n 93%|█████████▎| 24943/26685 [3:39:58<12:59, 2.23it/s]\u001b[A\n 93%|█████████▎| 24944/26685 [3:39:58<11:34, 2.51it/s]\u001b[A\n 93%|█████████▎| 24945/26685 [3:39:59<12:47, 2.27it/s]\u001b[A\n 93%|█████████▎| 24946/26685 [3:39:59<11:16, 2.57it/s]\u001b[A\n 93%|█████████▎| 24947/26685 [3:39:59<11:18, 2.56it/s]\u001b[A\n 93%|█████████▎| 24948/26685 [3:40:00<11:55, 2.43it/s]\u001b[A\n 93%|█████████▎| 24949/26685 [3:40:00<15:18, 1.89it/s]\u001b[A\n 93%|█████████▎| 24950/26685 [3:40:01<13:45, 2.10it/s]\u001b[A\n 94%|█████████▎| 24951/26685 [3:40:01<13:18, 2.17it/s]\u001b[A\n 94%|█████████▎| 24952/26685 [3:40:02<12:50, 2.25it/s]\u001b[A\n 94%|█████████▎| 24953/26685 [3:40:02<13:18, 2.17it/s]\u001b[A\n 94%|█████████▎| 24954/26685 [3:40:03<12:39, 2.28it/s]\u001b[A\n 94%|█████████▎| 24955/26685 [3:40:03<11:10, 2.58it/s]\u001b[A\n 94%|█████████▎| 24956/26685 [3:40:03<10:26, 2.76it/s]\u001b[A\n 94%|█████████▎| 24957/26685 [3:40:04<11:09, 2.58it/s]\u001b[A\n 94%|█████████▎| 24958/26685 [3:40:04<11:12, 2.57it/s]\u001b[A\n 94%|█████████▎| 24959/26685 [3:40:04<10:42, 2.69it/s]\u001b[A\n 94%|█████████▎| 24960/26685 [3:40:05<10:49, 2.65it/s]\u001b[A\n 94%|█████████▎| 24961/26685 [3:40:05<11:05, 2.59it/s]\u001b[A\n 94%|█████████▎| 24962/26685 [3:40:05<11:26, 2.51it/s]\u001b[A\n 94%|█████████▎| 24963/26685 [3:40:06<11:36, 2.47it/s]\u001b[A\n 94%|█████████▎| 24964/26685 [3:40:06<10:49, 2.65it/s]\u001b[A\n 94%|█████████▎| 24965/26685 [3:40:07<12:43, 2.25it/s]\u001b[A\n 94%|█████████▎| 24966/26685 [3:40:07<12:43, 2.25it/s]\u001b[A\n 94%|█████████▎| 24967/26685 [3:40:08<16:29, 1.74it/s]\u001b[A\n 94%|█████████▎| 24968/26685 [3:40:08<14:25, 1.98it/s]\u001b[A\n 94%|█████████▎| 24969/26685 [3:40:09<12:29, 2.29it/s]\u001b[A\n 94%|█████████▎| 24970/26685 [3:40:09<11:18, 2.53it/s]\u001b[A\n 94%|█████████▎| 24971/26685 [3:40:09<10:13, 2.79it/s]\u001b[A\n 94%|█████████▎| 24972/26685 [3:40:10<09:48, 2.91it/s]\u001b[A\n 94%|█████████▎| 24973/26685 [3:40:10<09:19, 3.06it/s]\u001b[A\n 94%|█████████▎| 24974/26685 [3:40:10<10:12, 2.79it/s]\u001b[A\n 94%|█████████▎| 24975/26685 [3:40:11<14:29, 1.97it/s]\u001b[A\n 94%|█████████▎| 24976/26685 [3:40:12<14:41, 1.94it/s]\u001b[A\n 94%|█████████▎| 24977/26685 [3:40:12<14:11, 2.01it/s]\u001b[A\n 94%|█████████▎| 24978/26685 [3:40:13<12:44, 2.23it/s]\u001b[A\n 94%|█████████▎| 24979/26685 [3:40:13<11:28, 2.48it/s]\u001b[A\n 94%|█████████▎| 24980/26685 [3:40:13<11:19, 2.51it/s]\u001b[A\n 94%|█████████▎| 24981/26685 [3:40:14<11:40, 2.43it/s]\u001b[A\n 94%|█████████▎| 24982/26685 [3:40:14<11:00, 2.58it/s]\u001b[A\n 94%|█████████▎| 24983/26685 [3:40:15<12:09, 2.33it/s]\u001b[A\n 94%|█████████▎| 24984/26685 [3:40:15<11:57, 2.37it/s]\u001b[A\n 94%|█████████▎| 24985/26685 [3:40:15<12:04, 2.35it/s]\u001b[A\n 94%|█████████▎| 24986/26685 [3:40:16<11:36, 2.44it/s]\u001b[A\n 94%|█████████▎| 24987/26685 [3:40:16<11:27, 2.47it/s]\u001b[A\n 94%|█████████▎| 24988/26685 [3:40:16<10:55, 2.59it/s]\u001b[A\n 94%|█████████▎| 24989/26685 [3:40:17<13:07, 2.15it/s]\u001b[A\n 94%|█████████▎| 24990/26685 [3:40:18<13:24, 2.11it/s]\u001b[A\n 94%|█████████▎| 24991/26685 [3:40:18<12:11, 2.32it/s]\u001b[A\n 94%|█████████▎| 24992/26685 [3:40:18<12:42, 2.22it/s]\u001b[A\n 94%|█████████▎| 24993/26685 [3:40:19<16:16, 1.73it/s]\u001b[A\n 94%|█████████▎| 24994/26685 [3:40:20<14:20, 1.97it/s]\u001b[A\n 94%|█████████▎| 24995/26685 [3:40:20<12:26, 2.26it/s]\u001b[A\n 94%|█████████▎| 24996/26685 [3:40:20<11:43, 2.40it/s]\u001b[A\n 94%|█████████▎| 24997/26685 [3:40:21<11:38, 2.42it/s]\u001b[A\n 94%|█████████▎| 24998/26685 [3:40:21<11:52, 2.37it/s]\u001b[A\n 94%|█████████▎| 24999/26685 [3:40:22<11:49, 2.38it/s]\u001b[A\n 94%|█████████▎| 25000/26685 [3:40:22<10:50, 2.59it/s]\u001b[A\n 94%|█████████▎| 25001/26685 [3:40:22<09:48, 2.86it/s]\u001b[A\n 94%|█████████▎| 25002/26685 [3:40:23<10:22, 2.70it/s]\u001b[A\n 94%|█████████▎| 25003/26685 [3:40:23<10:03, 2.79it/s]\u001b[A\n 94%|█████████▎| 25004/26685 [3:40:23<09:08, 3.06it/s]\u001b[A\n 94%|█████████▎| 25005/26685 [3:40:23<08:39, 3.24it/s]\u001b[A\n 94%|█████████▎| 25006/26685 [3:40:24<09:50, 2.84it/s]\u001b[A\n 94%|█████████▎| 25007/26685 [3:40:24<09:59, 2.80it/s]\u001b[A\n 94%|█████████▎| 25008/26685 [3:40:25<10:13, 2.73it/s]\u001b[A\n 94%|█████████▎| 25009/26685 [3:40:25<10:33, 2.65it/s]\u001b[A\n 94%|█████████▎| 25010/26685 [3:40:25<09:35, 2.91it/s]\u001b[A\n 94%|█████████▎| 25011/26685 [3:40:26<13:21, 2.09it/s]\u001b[A\n 94%|█████████▎| 25012/26685 [3:40:26<12:21, 2.26it/s]\u001b[A\n 94%|█████████▎| 25013/26685 [3:40:27<11:27, 2.43it/s]\u001b[A\n 94%|█████████▎| 25014/26685 [3:40:27<11:30, 2.42it/s]\u001b[A\n 94%|█████████▎| 25015/26685 [3:40:28<10:41, 2.61it/s]\u001b[A\n 94%|█████████▎| 25016/26685 [3:40:28<10:23, 2.68it/s]\u001b[A\n 94%|█████████▎| 25017/26685 [3:40:29<12:55, 2.15it/s]\u001b[A\n 94%|█████████▍| 25018/26685 [3:40:29<13:06, 2.12it/s]\u001b[A\n 94%|█████████▍| 25019/26685 [3:40:29<11:41, 2.37it/s]\u001b[A\n 94%|█████████▍| 25020/26685 [3:40:30<16:28, 1.68it/s]\u001b[A\n 94%|█████████▍| 25021/26685 [3:40:31<16:37, 1.67it/s]\u001b[A\n 94%|█████████▍| 25022/26685 [3:40:31<15:44, 1.76it/s]\u001b[A\n 94%|█████████▍| 25023/26685 [3:40:32<14:12, 1.95it/s]\u001b[A\n 94%|█████████▍| 25024/26685 [3:40:33<16:48, 1.65it/s]\u001b[A\n 94%|█████████▍| 25025/26685 [3:40:33<13:59, 1.98it/s]\u001b[A\n 94%|█████████▍| 25026/26685 [3:40:33<13:04, 2.12it/s]\u001b[A\n 94%|█████████▍| 25027/26685 [3:40:34<11:25, 2.42it/s]\u001b[A\n 94%|█████████▍| 25028/26685 [3:40:34<10:00, 2.76it/s]\u001b[A\n 94%|█████████▍| 25029/26685 [3:40:34<11:13, 2.46it/s]\u001b[A\n 94%|█████████▍| 25030/26685 [3:40:35<11:28, 2.40it/s]\u001b[A\n 94%|█████████▍| 25031/26685 [3:40:35<10:22, 2.66it/s]\u001b[A\n 94%|█████████▍| 25032/26685 [3:40:35<09:53, 2.79it/s]\u001b[A\n 94%|█████████▍| 25033/26685 [3:40:36<09:15, 2.97it/s]\u001b[A\n 94%|█████████▍| 25034/26685 [3:40:36<09:03, 3.04it/s]\u001b[A\n 94%|█████████▍| 25035/26685 [3:40:37<10:52, 2.53it/s]\u001b[A\n 94%|█████████▍| 25036/26685 [3:40:37<10:03, 2.73it/s]\u001b[A\n 94%|█████████▍| 25037/26685 [3:40:38<13:59, 1.96it/s]\u001b[A\n 94%|█████████▍| 25038/26685 [3:40:38<12:00, 2.29it/s]\u001b[A\n 94%|█████████▍| 25039/26685 [3:40:38<11:40, 2.35it/s]\u001b[A\n 94%|█████████▍| 25040/26685 [3:40:39<10:53, 2.52it/s]\u001b[A\n 94%|█████████▍| 25041/26685 [3:40:39<14:08, 1.94it/s]\u001b[A\n 94%|█████████▍| 25042/26685 [3:40:40<12:16, 2.23it/s]\u001b[A\n 94%|█████████▍| 25043/26685 [3:40:40<11:48, 2.32it/s]\u001b[A\n 94%|█████████▍| 25044/26685 [3:40:41<11:53, 2.30it/s]\u001b[A\n 94%|█████████▍| 25045/26685 [3:40:41<11:32, 2.37it/s]\u001b[A\n 94%|█████████▍| 25046/26685 [3:40:41<10:42, 2.55it/s]\u001b[A\n 94%|█████████▍| 25047/26685 [3:40:42<10:24, 2.62it/s]\u001b[A\n 94%|█████████▍| 25048/26685 [3:40:42<10:22, 2.63it/s]\u001b[A\n 94%|█████████▍| 25049/26685 [3:40:43<13:44, 1.99it/s]\u001b[A\n 94%|█████████▍| 25050/26685 [3:40:43<12:30, 2.18it/s]\u001b[A\n 94%|█████████▍| 25051/26685 [3:40:44<13:38, 2.00it/s]\u001b[A\n 94%|█████████▍| 25052/26685 [3:40:45<16:09, 1.68it/s]\u001b[A\n 94%|█████████▍| 25053/26685 [3:40:45<15:11, 1.79it/s]\u001b[A\n 94%|█████████▍| 25054/26685 [3:40:46<14:05, 1.93it/s]\u001b[A\n 94%|█████████▍| 25055/26685 [3:40:46<16:40, 1.63it/s]\u001b[A\n 94%|█████████▍| 25056/26685 [3:40:47<14:13, 1.91it/s]\u001b[A\n 94%|█████████▍| 25057/26685 [3:40:48<16:53, 1.61it/s]\u001b[A\n 94%|█████████▍| 25058/26685 [3:40:48<14:36, 1.86it/s]\u001b[A\n 94%|█████████▍| 25059/26685 [3:40:48<13:58, 1.94it/s]\u001b[A\n 94%|█████████▍| 25060/26685 [3:40:49<13:19, 2.03it/s]\u001b[A\n 94%|█████████▍| 25061/26685 [3:40:49<12:40, 2.14it/s]\u001b[A\n 94%|█████████▍| 25062/26685 [3:40:50<11:42, 2.31it/s]\u001b[A\n 94%|█████████▍| 25063/26685 [3:40:50<11:38, 2.32it/s]\u001b[A\n 94%|█████████▍| 25064/26685 [3:40:50<12:34, 2.15it/s]\u001b[A\n 94%|█████████▍| 25065/26685 [3:40:51<12:13, 2.21it/s]\u001b[A\n 94%|█████████▍| 25066/26685 [3:40:51<12:52, 2.09it/s]\u001b[A\n 94%|█████████▍| 25067/26685 [3:40:52<11:08, 2.42it/s]\u001b[A\n 94%|█████████▍| 25068/26685 [3:40:52<10:48, 2.49it/s]\u001b[A\n 94%|█████████▍| 25069/26685 [3:40:52<10:24, 2.59it/s]\u001b[A\n 94%|█████████▍| 25070/26685 [3:40:53<10:06, 2.66it/s]\u001b[A\n 94%|█████████▍| 25071/26685 [3:40:53<09:28, 2.84it/s]\u001b[A\n 94%|█████████▍| 25072/26685 [3:40:53<09:53, 2.72it/s]\u001b[A\n 94%|█████████▍| 25073/26685 [3:40:54<09:31, 2.82it/s]\u001b[A\n 94%|█████████▍| 25074/26685 [3:40:54<09:12, 2.91it/s]\u001b[A\n 94%|█████████▍| 25075/26685 [3:40:55<11:08, 2.41it/s]\u001b[A\n 94%|█████████▍| 25076/26685 [3:40:55<12:14, 2.19it/s]\u001b[A\n 94%|█████████▍| 25077/26685 [3:40:56<11:37, 2.30it/s]\u001b[A\n 94%|█████████▍| 25078/26685 [3:40:56<10:36, 2.53it/s]\u001b[A\n 94%|█████████▍| 25079/26685 [3:40:56<10:36, 2.52it/s]\u001b[A\n 94%|█████████▍| 25080/26685 [3:40:57<10:36, 2.52it/s]\u001b[A\n 94%|█████████▍| 25081/26685 [3:40:57<12:53, 2.07it/s]\u001b[A\n 94%|█████████▍| 25082/26685 [3:40:58<12:17, 2.17it/s]\u001b[A\n 94%|█████████▍| 25083/26685 [3:40:59<15:46, 1.69it/s]\u001b[A\n 94%|█████████▍| 25084/26685 [3:40:59<13:23, 1.99it/s]\u001b[A\n 94%|█████████▍| 25085/26685 [3:40:59<12:40, 2.10it/s]\u001b[A\n 94%|█████████▍| 25086/26685 [3:41:00<16:49, 1.58it/s]\u001b[A\n 94%|█████████▍| 25087/26685 [3:41:01<14:46, 1.80it/s]\u001b[A\n 94%|█████████▍| 25088/26685 [3:41:01<12:39, 2.10it/s]\u001b[A\n 94%|█████████▍| 25089/26685 [3:41:02<12:09, 2.19it/s]\u001b[A\n 94%|█████████▍| 25090/26685 [3:41:02<12:23, 2.14it/s]\u001b[A\n 94%|█████████▍| 25091/26685 [3:41:02<11:00, 2.41it/s]\u001b[A\n 94%|█████████▍| 25092/26685 [3:41:03<11:11, 2.37it/s]\u001b[A\n 94%|█████████▍| 25093/26685 [3:41:04<14:43, 1.80it/s]\u001b[A\n 94%|█████████▍| 25094/26685 [3:41:04<12:55, 2.05it/s]\u001b[A\n 94%|█████████▍| 25095/26685 [3:41:04<11:21, 2.33it/s]\u001b[A\n 94%|█████████▍| 25096/26685 [3:41:05<11:05, 2.39it/s]\u001b[A\n 94%|█████████▍| 25097/26685 [3:41:05<09:41, 2.73it/s]\u001b[A\n 94%|█████████▍| 25098/26685 [3:41:05<09:33, 2.77it/s]\u001b[A\n 94%|█████████▍| 25099/26685 [3:41:05<08:47, 3.01it/s]\u001b[A\n 94%|█████████▍| 25100/26685 [3:41:06<08:51, 2.98it/s]\u001b[A\n 94%|█████████▍| 25101/26685 [3:41:06<08:46, 3.01it/s]\u001b[A\n 94%|█████████▍| 25102/26685 [3:41:06<08:07, 3.24it/s]\u001b[A\n 94%|█████████▍| 25103/26685 [3:41:07<09:56, 2.65it/s]\u001b[A\n 94%|█████████▍| 25104/26685 [3:41:07<09:36, 2.74it/s]\u001b[A\n 94%|█████████▍| 25105/26685 [3:41:08<10:05, 2.61it/s]\u001b[A\n 94%|█████████▍| 25106/26685 [3:41:08<09:05, 2.90it/s]\u001b[A\n 94%|█████████▍| 25107/26685 [3:41:08<08:49, 2.98it/s]\u001b[A\n 94%|█████████▍| 25108/26685 [3:41:09<10:06, 2.60it/s]\u001b[A\n 94%|█████████▍| 25109/26685 [3:41:09<11:07, 2.36it/s]\u001b[A\n 94%|█████████▍| 25110/26685 [3:41:10<10:23, 2.53it/s]\u001b[A\n 94%|█████████▍| 25111/26685 [3:41:10<09:42, 2.70it/s]\u001b[A\n 94%|█████████▍| 25112/26685 [3:41:10<09:01, 2.90it/s]\u001b[A\n 94%|█████████▍| 25113/26685 [3:41:11<09:42, 2.70it/s]\u001b[A\n 94%|█████████▍| 25114/26685 [3:41:11<09:16, 2.82it/s]\u001b[A\n 94%|█████████▍| 25115/26685 [3:41:11<08:53, 2.94it/s]\u001b[A\n 94%|█████████▍| 25116/26685 [3:41:12<08:47, 2.97it/s]\u001b[A\n 94%|█████████▍| 25117/26685 [3:41:12<08:38, 3.02it/s]\u001b[A\n 94%|█████████▍| 25118/26685 [3:41:12<09:00, 2.90it/s]\u001b[A\n 94%|█████████▍| 25119/26685 [3:41:13<09:22, 2.78it/s]\u001b[A\n 94%|█████████▍| 25120/26685 [3:41:13<09:47, 2.66it/s]\u001b[A\n 94%|█████████▍| 25121/26685 [3:41:13<09:42, 2.69it/s]\u001b[A\n 94%|█████████▍| 25122/26685 [3:41:14<09:26, 2.76it/s]\u001b[A\n 94%|█████████▍| 25123/26685 [3:41:14<08:56, 2.91it/s]\u001b[A\n 94%|█████████▍| 25124/26685 [3:41:14<09:09, 2.84it/s]\u001b[A\n 94%|█████████▍| 25125/26685 [3:41:15<10:02, 2.59it/s]\u001b[A\n 94%|█████████▍| 25126/26685 [3:41:15<10:28, 2.48it/s]\u001b[A\n 94%|█████████▍| 25127/26685 [3:41:16<09:26, 2.75it/s]\u001b[A\n 94%|█████████▍| 25128/26685 [3:41:16<12:09, 2.13it/s]\u001b[A\n 94%|█████████▍| 25129/26685 [3:41:17<10:28, 2.48it/s]\u001b[A\n 94%|█████████▍| 25130/26685 [3:41:17<10:48, 2.40it/s]\u001b[A\n 94%|█████████▍| 25131/26685 [3:41:17<10:54, 2.37it/s]\u001b[A\n 94%|█████████▍| 25132/26685 [3:41:18<14:00, 1.85it/s]\u001b[A\n 94%|█████████▍| 25133/26685 [3:41:19<13:16, 1.95it/s]\u001b[A\n 94%|█████████▍| 25134/26685 [3:41:19<11:42, 2.21it/s]\u001b[A\n 94%|█████████▍| 25135/26685 [3:41:20<14:21, 1.80it/s]\u001b[A\n 94%|█████████▍| 25136/26685 [3:41:20<12:15, 2.11it/s]\u001b[A\n 94%|█████████▍| 25137/26685 [3:41:21<11:21, 2.27it/s]\u001b[A\n 94%|█████████▍| 25138/26685 [3:41:21<12:33, 2.05it/s]\u001b[A\n 94%|█████████▍| 25139/26685 [3:41:22<12:55, 1.99it/s]\u001b[A\n 94%|█████████▍| 25140/26685 [3:41:22<11:34, 2.22it/s]\u001b[A\n 94%|█████████▍| 25141/26685 [3:41:22<10:22, 2.48it/s]\u001b[A\n 94%|█████████▍| 25142/26685 [3:41:23<09:54, 2.59it/s]\u001b[A\n 94%|█████████▍| 25143/26685 [3:41:23<09:24, 2.73it/s]\u001b[A\n 94%|█████████▍| 25144/26685 [3:41:23<09:21, 2.74it/s]\u001b[A\n 94%|█████████▍| 25145/26685 [3:41:24<09:29, 2.71it/s]\u001b[A\n 94%|█████████▍| 25146/26685 [3:41:24<09:39, 2.66it/s]\u001b[A\n 94%|█████████▍| 25147/26685 [3:41:24<09:29, 2.70it/s]\u001b[A\n 94%|█████████▍| 25148/26685 [3:41:25<09:13, 2.78it/s]\u001b[A\n 94%|█████████▍| 25149/26685 [3:41:25<09:02, 2.83it/s]\u001b[A\n 94%|█████████▍| 25150/26685 [3:41:27<21:04, 1.21it/s]\u001b[A\n 94%|█████████▍| 25151/26685 [3:41:27<17:58, 1.42it/s]\u001b[A\n 94%|█████████▍| 25152/26685 [3:41:28<15:30, 1.65it/s]\u001b[A\n 94%|█████████▍| 25153/26685 [3:41:28<13:22, 1.91it/s]\u001b[A\n 94%|█████████▍| 25154/26685 [3:41:29<13:19, 1.92it/s]\u001b[A\n 94%|█████████▍| 25155/26685 [3:41:29<11:59, 2.13it/s]\u001b[A\n 94%|█████████▍| 25156/26685 [3:41:30<12:51, 1.98it/s]\u001b[A\n 94%|█████████▍| 25157/26685 [3:41:30<12:02, 2.11it/s]\u001b[A\n 94%|█████████▍| 25158/26685 [3:41:30<10:42, 2.37it/s]\u001b[A\n 94%|█████████▍| 25159/26685 [3:41:31<12:42, 2.00it/s]\u001b[A\n 94%|█████████▍| 25160/26685 [3:41:31<12:25, 2.05it/s]\u001b[A\n 94%|█████████▍| 25161/26685 [3:41:32<10:51, 2.34it/s]\u001b[A\n 94%|█████████▍| 25162/26685 [3:41:32<11:47, 2.15it/s]\u001b[A\n 94%|█████████▍| 25163/26685 [3:41:33<10:45, 2.36it/s]\u001b[A\n 94%|█████████▍| 25164/26685 [3:41:33<10:37, 2.39it/s]\u001b[A\n 94%|█████████▍| 25165/26685 [3:41:33<09:59, 2.53it/s]\u001b[A\n 94%|█████████▍| 25166/26685 [3:41:34<08:54, 2.84it/s]\u001b[A\n 94%|█████████▍| 25167/26685 [3:41:35<14:30, 1.74it/s]\u001b[A\n 94%|█████████▍| 25168/26685 [3:41:35<12:27, 2.03it/s]\u001b[A\n 94%|█████████▍| 25169/26685 [3:41:35<10:48, 2.34it/s]\u001b[A\n 94%|█████████▍| 25170/26685 [3:41:36<09:29, 2.66it/s]\u001b[A\n 94%|█████████▍| 25171/26685 [3:41:36<09:08, 2.76it/s]\u001b[A\n 94%|█████████▍| 25172/26685 [3:41:36<08:36, 2.93it/s]\u001b[A\n 94%|█████████▍| 25173/26685 [3:41:37<09:46, 2.58it/s]\u001b[A\n 94%|█████████▍| 25174/26685 [3:41:37<08:49, 2.85it/s]\u001b[A\n 94%|█████████▍| 25175/26685 [3:41:38<11:48, 2.13it/s]\u001b[A\n 94%|█████████▍| 25176/26685 [3:41:38<10:19, 2.44it/s]\u001b[A\n 94%|█████████▍| 25177/26685 [3:41:39<13:31, 1.86it/s]\u001b[A\n 94%|█████████▍| 25178/26685 [3:41:39<13:16, 1.89it/s]\u001b[A\n 94%|█████████▍| 25179/26685 [3:41:40<12:00, 2.09it/s]\u001b[A\n 94%|█████████▍| 25180/26685 [3:41:40<10:04, 2.49it/s]\u001b[A\n 94%|█████████▍| 25181/26685 [3:41:40<09:23, 2.67it/s]\u001b[A\n 94%|█████████▍| 25182/26685 [3:41:40<08:17, 3.02it/s]\u001b[A\n 94%|█████████▍| 25183/26685 [3:41:41<08:43, 2.87it/s]\u001b[A\n 94%|█████████▍| 25184/26685 [3:41:41<09:50, 2.54it/s]\u001b[A\n 94%|█████████▍| 25185/26685 [3:41:42<12:57, 1.93it/s]\u001b[A\n 94%|█████████▍| 25186/26685 [3:41:42<11:54, 2.10it/s]\u001b[A\n 94%|█████████▍| 25187/26685 [3:41:43<12:47, 1.95it/s]\u001b[A\n 94%|█████████▍| 25188/26685 [3:41:44<15:46, 1.58it/s]\u001b[A\n 94%|█████████▍| 25189/26685 [3:41:44<14:00, 1.78it/s]\u001b[A\n 94%|█████████▍| 25190/26685 [3:41:45<12:01, 2.07it/s]\u001b[A\n 94%|█████████▍| 25191/26685 [3:41:45<12:40, 1.96it/s]\u001b[A\n 94%|█████████▍| 25192/26685 [3:41:46<11:06, 2.24it/s]\u001b[A\n 94%|█████████▍| 25193/26685 [3:41:46<10:01, 2.48it/s]\u001b[A\n 94%|█████████▍| 25194/26685 [3:41:46<09:45, 2.55it/s]\u001b[A\n 94%|█████████▍| 25195/26685 [3:41:47<09:42, 2.56it/s]\u001b[A\n 94%|█████████▍| 25196/26685 [3:41:47<10:02, 2.47it/s]\u001b[A\n 94%|█████████▍| 25197/26685 [3:41:48<11:19, 2.19it/s]\u001b[A\n 94%|█████████▍| 25198/26685 [3:41:48<11:01, 2.25it/s]\u001b[A\n 94%|█████████▍| 25199/26685 [3:41:48<10:33, 2.35it/s]\u001b[A\n 94%|█████████▍| 25200/26685 [3:41:49<10:00, 2.47it/s]\u001b[A\n 94%|█████████▍| 25201/26685 [3:41:50<17:21, 1.42it/s]\u001b[A\n 94%|█████████▍| 25202/26685 [3:41:51<14:35, 1.69it/s]\u001b[A\n 94%|█████████▍| 25203/26685 [3:41:51<12:31, 1.97it/s]\u001b[A\n 94%|█████████▍| 25204/26685 [3:41:52<14:30, 1.70it/s]\u001b[A\n 94%|█████████▍| 25205/26685 [3:41:52<12:23, 1.99it/s]\u001b[A\n 94%|█████████▍| 25206/26685 [3:41:52<11:11, 2.20it/s]\u001b[A\n 94%|█████████▍| 25207/26685 [3:41:53<11:22, 2.17it/s]\u001b[A\n 94%|█████████▍| 25208/26685 [3:41:53<11:37, 2.12it/s]\u001b[A\n 94%|█████████▍| 25209/26685 [3:41:54<11:00, 2.24it/s]\u001b[A\n 94%|█████████▍| 25210/26685 [3:41:54<11:21, 2.17it/s]\u001b[A\n 94%|█████████▍| 25211/26685 [3:41:54<10:03, 2.44it/s]\u001b[A\n 94%|█████████▍| 25212/26685 [3:41:55<11:14, 2.19it/s]\u001b[A\n 94%|█████████▍| 25213/26685 [3:41:56<14:06, 1.74it/s]\u001b[A\n 94%|█████████▍| 25214/26685 [3:41:56<13:56, 1.76it/s]\u001b[A\n 94%|█████████▍| 25215/26685 [3:41:57<11:41, 2.10it/s]\u001b[A\n 94%|█████████▍| 25216/26685 [3:41:57<11:44, 2.08it/s]\u001b[A\n 94%|█████████▍| 25217/26685 [3:41:58<11:29, 2.13it/s]\u001b[A\n 95%|█████████▍| 25218/26685 [3:41:58<13:50, 1.77it/s]\u001b[A\n 95%|█████████▍| 25219/26685 [3:41:59<13:48, 1.77it/s]\u001b[A\n 95%|█████████▍| 25220/26685 [3:41:59<13:24, 1.82it/s]\u001b[A\n 95%|█████████▍| 25221/26685 [3:42:00<11:16, 2.17it/s]\u001b[A\n 95%|█████████▍| 25222/26685 [3:42:00<09:53, 2.46it/s]\u001b[A\n 95%|█████████▍| 25223/26685 [3:42:01<15:02, 1.62it/s]\u001b[A\n 95%|█████████▍| 25224/26685 [3:42:02<13:46, 1.77it/s]\u001b[A\n 95%|█████████▍| 25225/26685 [3:42:02<11:53, 2.05it/s]\u001b[A\n 95%|█████████▍| 25226/26685 [3:42:02<12:35, 1.93it/s]\u001b[A\n 95%|█████████▍| 25227/26685 [3:42:03<11:58, 2.03it/s]\u001b[A\n 95%|█████████▍| 25228/26685 [3:42:03<11:47, 2.06it/s]\u001b[A\n 95%|█████████▍| 25229/26685 [3:42:04<10:19, 2.35it/s]\u001b[A\n 95%|█████████▍| 25230/26685 [3:42:04<10:14, 2.37it/s]\u001b[A\n 95%|█████████▍| 25231/26685 [3:42:04<09:05, 2.67it/s]\u001b[A\n 95%|█████████▍| 25232/26685 [3:42:05<08:20, 2.90it/s]\u001b[A\n 95%|█████████▍| 25233/26685 [3:42:05<08:11, 2.96it/s]\u001b[A\n 95%|█████████▍| 25234/26685 [3:42:05<08:02, 3.01it/s]\u001b[A\n 95%|█████████▍| 25235/26685 [3:42:06<09:46, 2.47it/s]\u001b[A\n 95%|█████████▍| 25236/26685 [3:42:06<11:03, 2.18it/s]\u001b[A\n 95%|█████████▍| 25237/26685 [3:42:07<09:53, 2.44it/s]\u001b[A\n 95%|█████████▍| 25238/26685 [3:42:07<10:36, 2.27it/s]\u001b[A\n 95%|█████████▍| 25239/26685 [3:42:08<11:50, 2.03it/s]\u001b[A\n 95%|█████████▍| 25240/26685 [3:42:08<10:55, 2.20it/s]\u001b[A\n 95%|█████████▍| 25241/26685 [3:42:09<13:42, 1.76it/s]\u001b[A\n 95%|█████████▍| 25242/26685 [3:42:09<12:45, 1.89it/s]\u001b[A\n 95%|█████████▍| 25243/26685 [3:42:10<11:04, 2.17it/s]\u001b[A\n 95%|█████████▍| 25244/26685 [3:42:10<10:09, 2.36it/s]\u001b[A\n 95%|█████████▍| 25245/26685 [3:42:11<10:30, 2.28it/s]\u001b[A\n 95%|█████████▍| 25246/26685 [3:42:11<09:43, 2.46it/s]\u001b[A\n 95%|█████████▍| 25247/26685 [3:42:11<10:07, 2.37it/s]\u001b[A\n 95%|█████████▍| 25248/26685 [3:42:12<10:21, 2.31it/s]\u001b[A\n 95%|█████████▍| 25249/26685 [3:42:12<11:16, 2.12it/s]\u001b[A\n 95%|█████████▍| 25250/26685 [3:42:13<11:11, 2.14it/s]\u001b[A\n 95%|█████████▍| 25251/26685 [3:42:13<10:00, 2.39it/s]\u001b[A\n 95%|█████████▍| 25252/26685 [3:42:14<10:05, 2.37it/s]\u001b[A\n 95%|█████████▍| 25253/26685 [3:42:14<09:46, 2.44it/s]\u001b[A\n 95%|█████████▍| 25254/26685 [3:42:14<08:47, 2.71it/s]\u001b[A\n 95%|█████████▍| 25255/26685 [3:42:15<08:53, 2.68it/s]\u001b[A\n 95%|█████████▍| 25256/26685 [3:42:15<08:03, 2.95it/s]\u001b[A\n 95%|█████████▍| 25257/26685 [3:42:15<07:48, 3.05it/s]\u001b[A\n 95%|█████████▍| 25258/26685 [3:42:15<08:06, 2.93it/s]\u001b[A\n 95%|█████████▍| 25259/26685 [3:42:16<10:01, 2.37it/s]\u001b[A\n 95%|█████████▍| 25260/26685 [3:42:16<09:16, 2.56it/s]\u001b[A\n 95%|█████████▍| 25261/26685 [3:42:17<10:42, 2.22it/s]\u001b[A\n 95%|█████████▍| 25262/26685 [3:42:18<13:20, 1.78it/s]\u001b[A\n 95%|█████████▍| 25263/26685 [3:42:18<12:28, 1.90it/s]\u001b[A\n 95%|█████████▍| 25264/26685 [3:42:19<11:06, 2.13it/s]\u001b[A\n 95%|█████████▍| 25265/26685 [3:42:19<10:53, 2.17it/s]\u001b[A\n 95%|█████████▍| 25266/26685 [3:42:19<09:48, 2.41it/s]\u001b[A\n 95%|█████████▍| 25267/26685 [3:42:20<10:52, 2.17it/s]\u001b[A\n 95%|█████████▍| 25268/26685 [3:42:20<10:08, 2.33it/s]\u001b[A\n 95%|█████████▍| 25269/26685 [3:42:21<09:42, 2.43it/s]\u001b[A\n 95%|█████████▍| 25270/26685 [3:42:21<10:38, 2.21it/s]\u001b[A\n 95%|█████████▍| 25271/26685 [3:42:22<11:33, 2.04it/s]\u001b[A\n 95%|█████████▍| 25272/26685 [3:42:22<10:32, 2.23it/s]\u001b[A\n 95%|█████████▍| 25273/26685 [3:42:23<10:02, 2.34it/s]\u001b[A\n 95%|█████████▍| 25274/26685 [3:42:23<09:12, 2.56it/s]\u001b[A\n 95%|█████████▍| 25275/26685 [3:42:23<08:44, 2.69it/s]\u001b[A\n 95%|█████████▍| 25276/26685 [3:42:23<08:17, 2.83it/s]\u001b[A\n 95%|█████████▍| 25277/26685 [3:42:24<09:21, 2.51it/s]\u001b[A\n 95%|█████████▍| 25278/26685 [3:42:24<08:38, 2.72it/s]\u001b[A\n 95%|█████████▍| 25279/26685 [3:42:25<08:34, 2.73it/s]\u001b[A\n 95%|█████████▍| 25280/26685 [3:42:25<12:04, 1.94it/s]\u001b[A\n 95%|█████████▍| 25281/26685 [3:42:26<11:09, 2.10it/s]\u001b[A\n 95%|█████████▍| 25282/26685 [3:42:26<10:00, 2.34it/s]\u001b[A\n 95%|█████████▍| 25283/26685 [3:42:27<12:41, 1.84it/s]\u001b[A\n 95%|█████████▍| 25284/26685 [3:42:27<11:00, 2.12it/s]\u001b[A\n 95%|█████████▍| 25285/26685 [3:42:28<09:37, 2.42it/s]\u001b[A\n 95%|█████████▍| 25286/26685 [3:42:28<08:59, 2.59it/s]\u001b[A\n 95%|█████████▍| 25287/26685 [3:42:28<09:02, 2.58it/s]\u001b[A\n 95%|█████████▍| 25288/26685 [3:42:29<08:59, 2.59it/s]\u001b[A\n 95%|█████████▍| 25289/26685 [3:42:29<10:40, 2.18it/s]\u001b[A\n 95%|█████████▍| 25290/26685 [3:42:30<10:21, 2.24it/s]\u001b[A\n 95%|█████████▍| 25291/26685 [3:42:30<09:38, 2.41it/s]\u001b[A\n 95%|█████████▍| 25292/26685 [3:42:30<09:30, 2.44it/s]\u001b[A\n 95%|█████████▍| 25293/26685 [3:42:31<09:18, 2.49it/s]\u001b[A\n 95%|█████████▍| 25294/26685 [3:42:32<12:53, 1.80it/s]\u001b[A\n 95%|█████████▍| 25295/26685 [3:42:32<11:09, 2.08it/s]\u001b[A\n 95%|█████████▍| 25296/26685 [3:42:32<10:32, 2.20it/s]\u001b[A\n 95%|█████████▍| 25297/26685 [3:42:33<09:05, 2.55it/s]\u001b[A\n 95%|█████████▍| 25298/26685 [3:42:33<08:21, 2.77it/s]\u001b[A\n 95%|█████████▍| 25299/26685 [3:42:33<08:21, 2.76it/s]\u001b[A\n 95%|█████████▍| 25300/26685 [3:42:34<07:58, 2.89it/s]\u001b[A\n 95%|█████████▍| 25301/26685 [3:42:34<07:43, 2.99it/s]\u001b[A\n 95%|█████████▍| 25302/26685 [3:42:34<08:09, 2.83it/s]\u001b[A\n 95%|█████████▍| 25303/26685 [3:42:35<11:39, 1.98it/s]\u001b[A\n 95%|█████████▍| 25304/26685 [3:42:36<10:52, 2.12it/s]\u001b[A\n 95%|█████████▍| 25305/26685 [3:42:36<10:00, 2.30it/s]\u001b[A\n 95%|█████████▍| 25306/26685 [3:42:36<09:50, 2.34it/s]\u001b[A\n 95%|█████████▍| 25307/26685 [3:42:37<08:38, 2.66it/s]\u001b[A\n 95%|█████████▍| 25308/26685 [3:42:37<09:11, 2.50it/s]\u001b[A\n 95%|█████████▍| 25309/26685 [3:42:37<08:20, 2.75it/s]\u001b[A\n 95%|█████████▍| 25310/26685 [3:42:38<08:30, 2.69it/s]\u001b[A\n 95%|█████████▍| 25311/26685 [3:42:38<08:39, 2.64it/s]\u001b[A\n 95%|█████████▍| 25312/26685 [3:42:39<08:34, 2.67it/s]\u001b[A\n 95%|█████████▍| 25313/26685 [3:42:39<07:56, 2.88it/s]\u001b[A\n 95%|█████████▍| 25314/26685 [3:42:39<07:22, 3.10it/s]\u001b[A\n 95%|█████████▍| 25315/26685 [3:42:39<06:53, 3.32it/s]\u001b[A\n 95%|█████████▍| 25316/26685 [3:42:40<07:03, 3.23it/s]\u001b[A\n 95%|█████████▍| 25317/26685 [3:42:40<06:51, 3.32it/s]\u001b[A\n 95%|█████████▍| 25318/26685 [3:42:40<07:15, 3.14it/s]\u001b[A\n 95%|█████████▍| 25319/26685 [3:42:41<07:03, 3.23it/s]\u001b[A\n 95%|█████████▍| 25320/26685 [3:42:41<07:08, 3.19it/s]\u001b[A\n 95%|█████████▍| 25321/26685 [3:42:42<09:26, 2.41it/s]\u001b[A\n 95%|█████████▍| 25322/26685 [3:42:42<08:56, 2.54it/s]\u001b[A\n 95%|█████████▍| 25323/26685 [3:42:42<07:57, 2.85it/s]\u001b[A\n 95%|█████████▍| 25324/26685 [3:42:43<07:56, 2.86it/s]\u001b[A\n 95%|█████████▍| 25325/26685 [3:42:43<07:07, 3.18it/s]\u001b[A\n 95%|█████████▍| 25326/26685 [3:42:43<07:29, 3.02it/s]\u001b[A\n 95%|█████████▍| 25327/26685 [3:42:44<11:20, 2.00it/s]\u001b[A\n 95%|█████████▍| 25328/26685 [3:42:44<09:42, 2.33it/s]\u001b[A\n 95%|█████████▍| 25329/26685 [3:42:45<09:23, 2.40it/s]\u001b[A\n 95%|█████████▍| 25330/26685 [3:42:45<08:39, 2.61it/s]\u001b[A\n 95%|█████████▍| 25331/26685 [3:42:46<11:47, 1.91it/s]\u001b[A\n 95%|█████████▍| 25332/26685 [3:42:46<10:15, 2.20it/s]\u001b[A\n 95%|█████████▍| 25333/26685 [3:42:46<09:42, 2.32it/s]\u001b[A\n 95%|█████████▍| 25334/26685 [3:42:47<09:34, 2.35it/s]\u001b[A\n 95%|█████████▍| 25335/26685 [3:42:47<09:31, 2.36it/s]\u001b[A\n 95%|█████████▍| 25336/26685 [3:42:48<08:40, 2.59it/s]\u001b[A\n 95%|█████████▍| 25337/26685 [3:42:48<08:06, 2.77it/s]\u001b[A\n 95%|█████████▍| 25338/26685 [3:42:48<07:55, 2.83it/s]\u001b[A\n 95%|█████████▍| 25339/26685 [3:42:48<07:08, 3.14it/s]\u001b[A\n 95%|█████████▍| 25340/26685 [3:42:49<08:20, 2.69it/s]\u001b[A\n 95%|█████████▍| 25341/26685 [3:42:49<08:22, 2.68it/s]\u001b[A\n 95%|█████████▍| 25342/26685 [3:42:50<08:30, 2.63it/s]\u001b[A\n 95%|█████████▍| 25343/26685 [3:42:50<08:08, 2.75it/s]\u001b[A\n 95%|█████████▍| 25344/26685 [3:42:50<07:54, 2.82it/s]\u001b[A\n 95%|█████████▍| 25345/26685 [3:42:51<08:26, 2.65it/s]\u001b[A\n 95%|█████████▍| 25346/26685 [3:42:51<09:39, 2.31it/s]\u001b[A\n 95%|█████████▍| 25347/26685 [3:42:52<08:33, 2.60it/s]\u001b[A\n 95%|█████████▍| 25348/26685 [3:42:52<09:11, 2.43it/s]\u001b[A\n 95%|█████████▍| 25349/26685 [3:42:52<08:37, 2.58it/s]\u001b[A\n 95%|█████████▍| 25350/26685 [3:42:53<08:21, 2.66it/s]\u001b[A\n 95%|█████████▌| 25351/26685 [3:42:53<08:08, 2.73it/s]\u001b[A\n 95%|█████████▌| 25352/26685 [3:42:54<09:02, 2.46it/s]\u001b[A\n 95%|█████████▌| 25353/26685 [3:42:54<10:28, 2.12it/s]\u001b[A\n 95%|█████████▌| 25354/26685 [3:42:55<10:45, 2.06it/s]\u001b[A\n 95%|█████████▌| 25355/26685 [3:42:55<09:21, 2.37it/s]\u001b[A\n 95%|█████████▌| 25356/26685 [3:42:55<08:29, 2.61it/s]\u001b[A\n 95%|█████████▌| 25357/26685 [3:42:56<08:40, 2.55it/s]\u001b[A\n 95%|█████████▌| 25358/26685 [3:42:56<08:30, 2.60it/s]\u001b[A\n 95%|█████████▌| 25359/26685 [3:42:56<07:53, 2.80it/s]\u001b[A\n 95%|█████████▌| 25360/26685 [3:42:57<07:50, 2.81it/s]\u001b[A\n 95%|█████████▌| 25361/26685 [3:42:57<07:12, 3.06it/s]\u001b[A\n 95%|█████████▌| 25362/26685 [3:42:57<07:48, 2.82it/s]\u001b[A\n 95%|█████████▌| 25363/26685 [3:42:58<07:19, 3.01it/s]\u001b[A\n 95%|█████████▌| 25364/26685 [3:42:59<10:23, 2.12it/s]\u001b[A\n 95%|█████████▌| 25365/26685 [3:42:59<09:18, 2.36it/s]\u001b[A\n 95%|█████████▌| 25366/26685 [3:42:59<08:21, 2.63it/s]\u001b[A\n 95%|█████████▌| 25367/26685 [3:42:59<07:38, 2.87it/s]\u001b[A\n 95%|█████████▌| 25368/26685 [3:43:00<07:17, 3.01it/s]\u001b[A\n 95%|█████████▌| 25369/26685 [3:43:00<07:09, 3.06it/s]\u001b[A\n 95%|█████████▌| 25370/26685 [3:43:00<07:59, 2.74it/s]\u001b[A\n 95%|█████████▌| 25371/26685 [3:43:01<08:11, 2.67it/s]\u001b[A\n 95%|█████████▌| 25372/26685 [3:43:01<07:49, 2.80it/s]\u001b[A\n 95%|█████████▌| 25373/26685 [3:43:02<07:29, 2.92it/s]\u001b[A\n 95%|█████████▌| 25374/26685 [3:43:02<07:12, 3.03it/s]\u001b[A\n 95%|█████████▌| 25375/26685 [3:43:02<06:46, 3.23it/s]\u001b[A\n 95%|█████████▌| 25376/26685 [3:43:02<06:49, 3.19it/s]\u001b[A\n 95%|█████████▌| 25377/26685 [3:43:03<07:19, 2.98it/s]\u001b[A\n 95%|█████████▌| 25378/26685 [3:43:03<07:25, 2.93it/s]\u001b[A\n 95%|█████████▌| 25379/26685 [3:43:03<06:47, 3.20it/s]\u001b[A\n 95%|█████████▌| 25380/26685 [3:43:04<06:27, 3.36it/s]\u001b[A\n 95%|█████████▌| 25381/26685 [3:43:04<06:59, 3.11it/s]\u001b[A\n 95%|█████████▌| 25382/26685 [3:43:05<08:09, 2.66it/s]\u001b[A\n 95%|█████████▌| 25383/26685 [3:43:05<07:29, 2.89it/s]\u001b[A\n 95%|█████████▌| 25384/26685 [3:43:05<08:02, 2.70it/s]\u001b[A\n 95%|█████████▌| 25385/26685 [3:43:06<11:00, 1.97it/s]\u001b[A\n 95%|█████████▌| 25386/26685 [3:43:07<13:45, 1.57it/s]\u001b[A\n 95%|█████████▌| 25387/26685 [3:43:07<12:24, 1.74it/s]\u001b[A\n 95%|█████████▌| 25388/26685 [3:43:08<14:32, 1.49it/s]\u001b[A\n 95%|█████████▌| 25389/26685 [3:43:09<13:45, 1.57it/s]\u001b[A\n 95%|█████████▌| 25390/26685 [3:43:09<12:06, 1.78it/s]\u001b[A\n 95%|█████████▌| 25391/26685 [3:43:10<10:48, 2.00it/s]\u001b[A\n 95%|█████████▌| 25392/26685 [3:43:10<09:55, 2.17it/s]\u001b[A\n 95%|█████████▌| 25393/26685 [3:43:10<09:16, 2.32it/s]\u001b[A\n 95%|█████████▌| 25394/26685 [3:43:11<08:43, 2.47it/s]\u001b[A\n 95%|█████████▌| 25395/26685 [3:43:11<08:01, 2.68it/s]\u001b[A\n 95%|█████████▌| 25396/26685 [3:43:11<07:26, 2.89it/s]\u001b[A\n 95%|█████████▌| 25397/26685 [3:43:12<07:41, 2.79it/s]\u001b[A\n 95%|█████████▌| 25398/26685 [3:43:12<07:11, 2.99it/s]\u001b[A\n 95%|█████████▌| 25399/26685 [3:43:12<06:57, 3.08it/s]\u001b[A\n 95%|█████████▌| 25400/26685 [3:43:13<06:42, 3.20it/s]\u001b[A\n 95%|█████████▌| 25401/26685 [3:43:13<06:55, 3.09it/s]\u001b[A\n 95%|█████████▌| 25402/26685 [3:43:13<08:35, 2.49it/s]\u001b[A\n 95%|█████████▌| 25403/26685 [3:43:14<07:57, 2.68it/s]\u001b[A\n 95%|█████████▌| 25404/26685 [3:43:14<07:35, 2.81it/s]\u001b[A\n 95%|█████████▌| 25405/26685 [3:43:15<08:56, 2.39it/s]\u001b[A\n 95%|█████████▌| 25406/26685 [3:43:15<08:46, 2.43it/s]\u001b[A\n 95%|█████████▌| 25407/26685 [3:43:15<08:39, 2.46it/s]\u001b[A\n 95%|█████████▌| 25408/26685 [3:43:16<08:36, 2.47it/s]\u001b[A\n 95%|█████████▌| 25409/26685 [3:43:16<08:38, 2.46it/s]\u001b[A\n 95%|█████████▌| 25410/26685 [3:43:17<09:03, 2.35it/s]\u001b[A\n 95%|█████████▌| 25411/26685 [3:43:17<09:45, 2.18it/s]\u001b[A\n 95%|█████████▌| 25412/26685 [3:43:18<12:05, 1.75it/s]\u001b[A\n 95%|█████████▌| 25413/26685 [3:43:18<10:16, 2.06it/s]\u001b[A\n 95%|█████████▌| 25414/26685 [3:43:19<09:19, 2.27it/s]\u001b[A\n 95%|█████████▌| 25415/26685 [3:43:19<09:47, 2.16it/s]\u001b[A\n 95%|█████████▌| 25416/26685 [3:43:20<09:31, 2.22it/s]\u001b[A\n 95%|█████████▌| 25417/26685 [3:43:20<08:11, 2.58it/s]\u001b[A\n 95%|█████████▌| 25418/26685 [3:43:20<07:50, 2.69it/s]\u001b[A\n 95%|█████████▌| 25419/26685 [3:43:21<08:00, 2.64it/s]\u001b[A\n 95%|█████████▌| 25420/26685 [3:43:21<07:37, 2.76it/s]\u001b[A\n 95%|█████████▌| 25421/26685 [3:43:22<09:02, 2.33it/s]\u001b[A\n 95%|█████████▌| 25422/26685 [3:43:22<08:40, 2.43it/s]\u001b[A\n 95%|█████████▌| 25423/26685 [3:43:22<08:04, 2.61it/s]\u001b[A\n 95%|█████████▌| 25424/26685 [3:43:23<07:58, 2.63it/s]\u001b[A\n 95%|█████████▌| 25425/26685 [3:43:23<07:37, 2.76it/s]\u001b[A\n 95%|█████████▌| 25426/26685 [3:43:23<08:21, 2.51it/s]\u001b[A\n 95%|█████████▌| 25427/26685 [3:43:24<07:52, 2.66it/s]\u001b[A\n 95%|█████████▌| 25428/26685 [3:43:24<07:25, 2.82it/s]\u001b[A\n 95%|█████████▌| 25429/26685 [3:43:24<07:23, 2.83it/s]\u001b[A\n 95%|█████████▌| 25430/26685 [3:43:25<07:39, 2.73it/s]\u001b[A\n 95%|█████████▌| 25431/26685 [3:43:25<07:08, 2.92it/s]\u001b[A\n 95%|█████████▌| 25432/26685 [3:43:25<06:57, 3.00it/s]\u001b[A\n 95%|█████████▌| 25433/26685 [3:43:26<06:59, 2.99it/s]\u001b[A\n 95%|█████████▌| 25434/26685 [3:43:26<06:47, 3.07it/s]\u001b[A\n 95%|█████████▌| 25435/26685 [3:43:27<10:05, 2.07it/s]\u001b[A\n 95%|█████████▌| 25436/26685 [3:43:28<12:06, 1.72it/s]\u001b[A\n 95%|█████████▌| 25437/26685 [3:43:28<10:08, 2.05it/s]\u001b[A\n 95%|█████████▌| 25438/26685 [3:43:28<09:57, 2.09it/s]\u001b[A\n 95%|█████████▌| 25439/26685 [3:43:29<10:28, 1.98it/s]\u001b[A\n 95%|█████████▌| 25440/26685 [3:43:29<09:51, 2.10it/s]\u001b[A\n 95%|█████████▌| 25441/26685 [3:43:30<08:33, 2.42it/s]\u001b[A\n 95%|█████████▌| 25442/26685 [3:43:30<08:02, 2.57it/s]\u001b[A\n 95%|█████████▌| 25443/26685 [3:43:31<09:33, 2.16it/s]\u001b[A\n 95%|█████████▌| 25444/26685 [3:43:31<08:49, 2.34it/s]\u001b[A\n 95%|█████████▌| 25445/26685 [3:43:31<08:04, 2.56it/s]\u001b[A\n 95%|█████████▌| 25446/26685 [3:43:32<08:44, 2.36it/s]\u001b[A\n 95%|█████████▌| 25447/26685 [3:43:33<10:56, 1.89it/s]\u001b[A\n 95%|█████████▌| 25448/26685 [3:43:33<10:16, 2.01it/s]\u001b[A\n 95%|█████████▌| 25449/26685 [3:43:33<09:55, 2.08it/s]\u001b[A\n 95%|█████████▌| 25450/26685 [3:43:34<09:13, 2.23it/s]\u001b[A\n 95%|█████████▌| 25451/26685 [3:43:35<11:19, 1.82it/s]\u001b[A\n 95%|█████████▌| 25452/26685 [3:43:35<10:16, 2.00it/s]\u001b[A\n 95%|█████████▌| 25453/26685 [3:43:35<10:12, 2.01it/s]\u001b[A\n 95%|█████████▌| 25454/26685 [3:43:36<09:32, 2.15it/s]\u001b[A\n 95%|█████████▌| 25455/26685 [3:43:36<09:53, 2.07it/s]\u001b[A\n 95%|█████████▌| 25456/26685 [3:43:37<09:03, 2.26it/s]\u001b[A\n 95%|█████████▌| 25457/26685 [3:43:37<08:06, 2.52it/s]\u001b[A\n 95%|█████████▌| 25458/26685 [3:43:37<08:14, 2.48it/s]\u001b[A\n 95%|█████████▌| 25459/26685 [3:43:38<07:20, 2.79it/s]\u001b[A\n 95%|█████████▌| 25460/26685 [3:43:38<07:21, 2.77it/s]\u001b[A\n 95%|█████████▌| 25461/26685 [3:43:38<06:56, 2.94it/s]\u001b[A\n 95%|█████████▌| 25462/26685 [3:43:39<07:56, 2.56it/s]\u001b[A\n 95%|█████████▌| 25463/26685 [3:43:39<07:53, 2.58it/s]\u001b[A\n 95%|█████████▌| 25464/26685 [3:43:40<08:01, 2.54it/s]\u001b[A\n 95%|█████████▌| 25465/26685 [3:43:40<07:23, 2.75it/s]\u001b[A\n 95%|█████████▌| 25466/26685 [3:43:40<07:34, 2.68it/s]\u001b[A\n 95%|█████████▌| 25467/26685 [3:43:41<07:05, 2.87it/s]\u001b[A\n 95%|█████████▌| 25468/26685 [3:43:41<06:59, 2.90it/s]\u001b[A\n 95%|█████████▌| 25469/26685 [3:43:41<06:37, 3.06it/s]\u001b[A\n 95%|█████████▌| 25470/26685 [3:43:42<08:34, 2.36it/s]\u001b[A\n 95%|█████████▌| 25471/26685 [3:43:42<07:47, 2.60it/s]\u001b[A\n 95%|█████████▌| 25472/26685 [3:43:43<09:14, 2.19it/s]\u001b[A\n 95%|█████████▌| 25473/26685 [3:43:43<08:17, 2.44it/s]\u001b[A\n 95%|█████████▌| 25474/26685 [3:43:44<09:11, 2.20it/s]\u001b[A\n 95%|█████████▌| 25475/26685 [3:43:44<08:52, 2.27it/s]\u001b[A\n 95%|█████████▌| 25476/26685 [3:43:45<09:42, 2.07it/s]\u001b[A\n 95%|█████████▌| 25477/26685 [3:43:45<09:46, 2.06it/s]\u001b[A\n 95%|█████████▌| 25478/26685 [3:43:45<08:54, 2.26it/s]\u001b[A\n 95%|█████████▌| 25479/26685 [3:43:46<08:40, 2.32it/s]\u001b[A\n 95%|█████████▌| 25480/26685 [3:43:46<07:47, 2.58it/s]\u001b[A\n 95%|█████████▌| 25481/26685 [3:43:47<07:47, 2.57it/s]\u001b[A\n 95%|█████████▌| 25482/26685 [3:43:47<08:00, 2.51it/s]\u001b[A\n 95%|█████████▌| 25483/26685 [3:43:47<08:03, 2.49it/s]\u001b[A\n 95%|█████████▌| 25484/26685 [3:43:48<07:58, 2.51it/s]\u001b[A\n 96%|█████████▌| 25485/26685 [3:43:49<10:29, 1.91it/s]\u001b[A\n 96%|█████████▌| 25486/26685 [3:43:49<08:58, 2.23it/s]\u001b[A\n 96%|█████████▌| 25487/26685 [3:43:49<08:26, 2.37it/s]\u001b[A\n 96%|█████████▌| 25488/26685 [3:43:49<07:27, 2.68it/s]\u001b[A\n 96%|█████████▌| 25489/26685 [3:43:50<07:06, 2.80it/s]\u001b[A\n 96%|█████████▌| 25490/26685 [3:43:50<07:03, 2.82it/s]\u001b[A\n 96%|█████████▌| 25491/26685 [3:43:51<08:28, 2.35it/s]\u001b[A\n 96%|█████████▌| 25492/26685 [3:43:51<07:43, 2.57it/s]\u001b[A\n 96%|█████████▌| 25493/26685 [3:43:51<07:19, 2.71it/s]\u001b[A\n 96%|█████████▌| 25494/26685 [3:43:52<07:16, 2.73it/s]\u001b[A\n 96%|█████████▌| 25495/26685 [3:43:52<07:19, 2.71it/s]\u001b[A\n 96%|█████████▌| 25496/26685 [3:43:53<08:07, 2.44it/s]\u001b[A\n 96%|█████████▌| 25497/26685 [3:43:53<07:35, 2.61it/s]\u001b[A\n 96%|█████████▌| 25498/26685 [3:43:54<09:13, 2.14it/s]\u001b[A\n 96%|█████████▌| 25499/26685 [3:43:54<08:09, 2.42it/s]\u001b[A\n 96%|█████████▌| 25500/26685 [3:43:54<07:48, 2.53it/s]\u001b[A\n 96%|█████████▌| 25501/26685 [3:43:55<08:08, 2.42it/s]\u001b[A\n 96%|█████████▌| 25502/26685 [3:43:55<08:57, 2.20it/s]\u001b[A\n 96%|█████████▌| 25503/26685 [3:43:56<09:43, 2.03it/s]\u001b[A\n 96%|█████████▌| 25504/26685 [3:43:56<08:24, 2.34it/s]\u001b[A\n 96%|█████████▌| 25505/26685 [3:43:56<07:46, 2.53it/s]\u001b[A\n 96%|█████████▌| 25506/26685 [3:43:57<08:19, 2.36it/s]\u001b[A\n 96%|█████████▌| 25507/26685 [3:43:57<08:23, 2.34it/s]\u001b[A\n 96%|█████████▌| 25508/26685 [3:43:58<07:17, 2.69it/s]\u001b[A\n 96%|█████████▌| 25509/26685 [3:43:58<09:54, 1.98it/s]\u001b[A\n 96%|█████████▌| 25510/26685 [3:43:59<10:00, 1.96it/s]\u001b[A\n 96%|█████████▌| 25511/26685 [3:43:59<08:58, 2.18it/s]\u001b[A\n 96%|█████████▌| 25512/26685 [3:44:00<07:59, 2.45it/s]\u001b[A\n 96%|█████████▌| 25513/26685 [3:44:00<07:08, 2.74it/s]\u001b[A\n 96%|█████████▌| 25514/26685 [3:44:00<06:52, 2.84it/s]\u001b[A\n 96%|█████████▌| 25515/26685 [3:44:00<06:35, 2.96it/s]\u001b[A\n 96%|█████████▌| 25516/26685 [3:44:01<06:25, 3.03it/s]\u001b[A\n 96%|█████████▌| 25517/26685 [3:44:02<10:00, 1.94it/s]\u001b[A\n 96%|█████████▌| 25518/26685 [3:44:02<09:31, 2.04it/s]\u001b[A\n 96%|█████████▌| 25519/26685 [3:44:03<11:37, 1.67it/s]\u001b[A\n 96%|█████████▌| 25520/26685 [3:44:03<10:11, 1.90it/s]\u001b[A\n 96%|█████████▌| 25521/26685 [3:44:04<08:55, 2.18it/s]\u001b[A\n 96%|█████████▌| 25522/26685 [3:44:04<08:47, 2.20it/s]\u001b[A\n 96%|█████████▌| 25523/26685 [3:44:05<09:35, 2.02it/s]\u001b[A\n 96%|█████████▌| 25524/26685 [3:44:05<08:58, 2.15it/s]\u001b[A\n 96%|█████████▌| 25525/26685 [3:44:05<08:22, 2.31it/s]\u001b[A\n 96%|█████████▌| 25526/26685 [3:44:06<09:07, 2.12it/s]\u001b[A\n 96%|█████████▌| 25527/26685 [3:44:07<09:58, 1.93it/s]\u001b[A\n 96%|█████████▌| 25528/26685 [3:44:07<11:47, 1.64it/s]\u001b[A\n 96%|█████████▌| 25529/26685 [3:44:08<11:27, 1.68it/s]\u001b[A\n 96%|█████████▌| 25530/26685 [3:44:08<10:00, 1.92it/s]\u001b[A\n 96%|█████████▌| 25531/26685 [3:44:09<08:29, 2.26it/s]\u001b[A\n 96%|█████████▌| 25532/26685 [3:44:09<07:42, 2.49it/s]\u001b[A\n 96%|█████████▌| 25533/26685 [3:44:09<07:29, 2.56it/s]\u001b[A\n 96%|█████████▌| 25534/26685 [3:44:10<08:12, 2.34it/s]\u001b[A\n 96%|█████████▌| 25535/26685 [3:44:10<07:38, 2.51it/s]\u001b[A\n 96%|█████████▌| 25536/26685 [3:44:10<06:54, 2.77it/s]\u001b[A\n 96%|█████████▌| 25537/26685 [3:44:11<06:54, 2.77it/s]\u001b[A\n 96%|█████████▌| 25538/26685 [3:44:11<06:25, 2.97it/s]\u001b[A\n 96%|█████████▌| 25539/26685 [3:44:12<09:54, 1.93it/s]\u001b[A\n 96%|█████████▌| 25540/26685 [3:44:12<08:45, 2.18it/s]\u001b[A\n 96%|█████████▌| 25541/26685 [3:44:13<10:45, 1.77it/s]\u001b[A\n 96%|█████████▌| 25542/26685 [3:44:14<10:08, 1.88it/s]\u001b[A\n 96%|█████████▌| 25543/26685 [3:44:14<09:01, 2.11it/s]\u001b[A\n 96%|█████████▌| 25544/26685 [3:44:14<08:36, 2.21it/s]\u001b[A\n 96%|█████████▌| 25545/26685 [3:44:15<07:33, 2.52it/s]\u001b[A\n 96%|█████████▌| 25546/26685 [3:44:15<10:09, 1.87it/s]\u001b[A\n 96%|█████████▌| 25547/26685 [3:44:16<09:40, 1.96it/s]\u001b[A\n 96%|█████████▌| 25548/26685 [3:44:17<11:13, 1.69it/s]\u001b[A\n 96%|█████████▌| 25549/26685 [3:44:17<11:15, 1.68it/s]\u001b[A\n 96%|█████████▌| 25550/26685 [3:44:18<09:44, 1.94it/s]\u001b[A\n 96%|█████████▌| 25551/26685 [3:44:18<08:11, 2.31it/s]\u001b[A\n 96%|█████████▌| 25552/26685 [3:44:18<07:27, 2.53it/s]\u001b[A\n 96%|█████████▌| 25553/26685 [3:44:19<07:15, 2.60it/s]\u001b[A\n 96%|█████████▌| 25554/26685 [3:44:19<07:35, 2.48it/s]\u001b[A\n 96%|█████████▌| 25555/26685 [3:44:19<07:17, 2.58it/s]\u001b[A\n 96%|█████████▌| 25556/26685 [3:44:20<06:53, 2.73it/s]\u001b[A\n 96%|█████████▌| 25557/26685 [3:44:20<07:03, 2.66it/s]\u001b[A\n 96%|█████████▌| 25558/26685 [3:44:21<07:44, 2.43it/s]\u001b[A\n 96%|█████████▌| 25559/26685 [3:44:21<06:47, 2.76it/s]\u001b[A\n 96%|█████████▌| 25560/26685 [3:44:21<06:29, 2.89it/s]\u001b[A\n 96%|█████████▌| 25561/26685 [3:44:21<06:17, 2.98it/s]\u001b[A\n 96%|█████████▌| 25562/26685 [3:44:22<08:50, 2.12it/s]\u001b[A\n 96%|█████████▌| 25563/26685 [3:44:22<08:02, 2.33it/s]\u001b[A\n 96%|█████████▌| 25564/26685 [3:44:23<07:29, 2.49it/s]\u001b[A\n 96%|█████████▌| 25565/26685 [3:44:23<06:43, 2.77it/s]\u001b[A\n 96%|█████████▌| 25566/26685 [3:44:23<06:28, 2.88it/s]\u001b[A\n 96%|█████████▌| 25567/26685 [3:44:24<06:03, 3.08it/s]\u001b[A\n 96%|█████████▌| 25568/26685 [3:44:24<06:49, 2.73it/s]\u001b[A\n 96%|█████████▌| 25569/26685 [3:44:24<06:25, 2.89it/s]\u001b[A\n 96%|█████████▌| 25570/26685 [3:44:25<07:39, 2.43it/s]\u001b[A\n 96%|█████████▌| 25571/26685 [3:44:25<07:06, 2.61it/s]\u001b[A\n 96%|█████████▌| 25572/26685 [3:44:26<06:36, 2.81it/s]\u001b[A\n 96%|█████████▌| 25573/26685 [3:44:26<07:24, 2.50it/s]\u001b[A\n 96%|█████████▌| 25574/26685 [3:44:26<06:55, 2.67it/s]\u001b[A\n 96%|█████████▌| 25575/26685 [3:44:27<06:34, 2.81it/s]\u001b[A\n 96%|█████████▌| 25576/26685 [3:44:27<06:27, 2.86it/s]\u001b[A\n 96%|█████████▌| 25577/26685 [3:44:27<06:22, 2.89it/s]\u001b[A\n 96%|█████████▌| 25578/26685 [3:44:28<07:15, 2.54it/s]\u001b[A\n 96%|█████████▌| 25579/26685 [3:44:28<06:53, 2.68it/s]\u001b[A\n 96%|█████████▌| 25580/26685 [3:44:29<07:15, 2.54it/s]\u001b[A\n 96%|█████████▌| 25581/26685 [3:44:29<06:57, 2.64it/s]\u001b[A\n 96%|█████████▌| 25582/26685 [3:44:29<07:22, 2.49it/s]\u001b[A\n 96%|█████████▌| 25583/26685 [3:44:30<06:54, 2.66it/s]\u001b[A\n 96%|█████████▌| 25584/26685 [3:44:31<09:18, 1.97it/s]\u001b[A\n 96%|█████████▌| 25585/26685 [3:44:31<08:15, 2.22it/s]\u001b[A\n 96%|█████████▌| 25586/26685 [3:44:32<09:04, 2.02it/s]\u001b[A\n 96%|█████████▌| 25587/26685 [3:44:32<11:10, 1.64it/s]\u001b[A\n 96%|█████████▌| 25588/26685 [3:44:33<09:47, 1.87it/s]\u001b[A\n 96%|█████████▌| 25589/26685 [3:44:33<08:25, 2.17it/s]\u001b[A\n 96%|█████████▌| 25590/26685 [3:44:34<08:19, 2.19it/s]\u001b[A\n 96%|█████████▌| 25591/26685 [3:44:34<07:39, 2.38it/s]\u001b[A\n 96%|█████████▌| 25592/26685 [3:44:34<07:06, 2.56it/s]\u001b[A\n 96%|█████████▌| 25593/26685 [3:44:35<09:29, 1.92it/s]\u001b[A\n 96%|█████████▌| 25594/26685 [3:44:35<08:28, 2.15it/s]\u001b[A\n 96%|█████████▌| 25595/26685 [3:44:36<10:24, 1.75it/s]\u001b[A\n 96%|█████████▌| 25596/26685 [3:44:36<08:38, 2.10it/s]\u001b[A\n 96%|█████████▌| 25597/26685 [3:44:37<07:47, 2.33it/s]\u001b[A\n 96%|█████████▌| 25598/26685 [3:44:38<10:13, 1.77it/s]\u001b[A\n 96%|█████████▌| 25599/26685 [3:44:38<08:45, 2.07it/s]\u001b[A\n 96%|█████████▌| 25600/26685 [3:44:38<07:41, 2.35it/s]\u001b[A\n 96%|█████████▌| 25601/26685 [3:44:39<09:58, 1.81it/s]\u001b[A\n 96%|█████████▌| 25602/26685 [3:44:39<08:41, 2.08it/s]\u001b[A\n 96%|█████████▌| 25603/26685 [3:44:40<10:15, 1.76it/s]\u001b[A\n 96%|█████████▌| 25604/26685 [3:44:40<08:53, 2.03it/s]\u001b[A\n 96%|█████████▌| 25605/26685 [3:44:41<08:17, 2.17it/s]\u001b[A\n 96%|█████████▌| 25606/26685 [3:44:41<07:47, 2.31it/s]\u001b[A\n 96%|█████████▌| 25607/26685 [3:44:42<07:35, 2.37it/s]\u001b[A\n 96%|█████████▌| 25608/26685 [3:44:42<06:59, 2.57it/s]\u001b[A\n 96%|█████████▌| 25609/26685 [3:44:43<09:20, 1.92it/s]\u001b[A\n 96%|█████████▌| 25610/26685 [3:44:43<09:38, 1.86it/s]\u001b[A\n 96%|█████████▌| 25611/26685 [3:44:44<08:02, 2.23it/s]\u001b[A\n 96%|█████████▌| 25612/26685 [3:44:44<07:28, 2.39it/s]\u001b[A\n 96%|█████████▌| 25613/26685 [3:44:45<09:48, 1.82it/s]\u001b[A\n 96%|█████████▌| 25614/26685 [3:44:45<08:49, 2.02it/s]\u001b[A\n 96%|█████████▌| 25615/26685 [3:44:45<08:01, 2.22it/s]\u001b[A\n 96%|█████████▌| 25616/26685 [3:44:46<07:40, 2.32it/s]\u001b[A\n 96%|█████████▌| 25617/26685 [3:44:46<07:34, 2.35it/s]\u001b[A\n 96%|█████████▌| 25618/26685 [3:44:47<06:43, 2.64it/s]\u001b[A\n 96%|█████████▌| 25619/26685 [3:44:47<06:25, 2.77it/s]\u001b[A\n 96%|█████████▌| 25620/26685 [3:44:47<05:56, 2.99it/s]\u001b[A\n 96%|█████████▌| 25621/26685 [3:44:48<06:12, 2.86it/s]\u001b[A\n 96%|█████████▌| 25622/26685 [3:44:48<06:02, 2.93it/s]\u001b[A\n 96%|█████████▌| 25623/26685 [3:44:48<06:04, 2.91it/s]\u001b[A\n 96%|█████████▌| 25624/26685 [3:44:49<08:45, 2.02it/s]\u001b[A\n 96%|█████████▌| 25625/26685 [3:44:50<08:41, 2.03it/s]\u001b[A\n 96%|█████████▌| 25626/26685 [3:44:50<07:31, 2.35it/s]\u001b[A\n 96%|█████████▌| 25627/26685 [3:44:50<07:56, 2.22it/s]\u001b[A\n 96%|█████████▌| 25628/26685 [3:44:51<07:56, 2.22it/s]\u001b[A\n 96%|█████████▌| 25629/26685 [3:44:51<06:58, 2.52it/s]\u001b[A\n 96%|█████████▌| 25630/26685 [3:44:51<06:13, 2.82it/s]\u001b[A\n 96%|█████████▌| 25631/26685 [3:44:52<05:47, 3.04it/s]\u001b[A\n 96%|█████████▌| 25632/26685 [3:44:52<06:15, 2.80it/s]\u001b[A\n 96%|█████████▌| 25633/26685 [3:44:52<06:26, 2.72it/s]\u001b[A\n 96%|█████████▌| 25634/26685 [3:44:53<08:36, 2.04it/s]\u001b[A\n 96%|█████████▌| 25635/26685 [3:44:53<07:35, 2.30it/s]\u001b[A\n 96%|█████████▌| 25636/26685 [3:44:54<06:50, 2.55it/s]\u001b[A\n 96%|█████████▌| 25637/26685 [3:44:54<06:19, 2.76it/s]\u001b[A\n 96%|█████████▌| 25638/26685 [3:44:54<06:40, 2.62it/s]\u001b[A\n 96%|█████████▌| 25639/26685 [3:44:55<07:18, 2.39it/s]\u001b[A\n 96%|█████████▌| 25640/26685 [3:44:55<06:40, 2.61it/s]\u001b[A\n 96%|█████████▌| 25641/26685 [3:44:56<07:37, 2.28it/s]\u001b[A\n 96%|█████████▌| 25642/26685 [3:44:56<07:21, 2.36it/s]\u001b[A\n 96%|█████████▌| 25643/26685 [3:44:56<06:32, 2.66it/s]\u001b[A\n 96%|█████████▌| 25644/26685 [3:44:57<06:51, 2.53it/s]\u001b[A\n 96%|█████████▌| 25645/26685 [3:44:58<09:42, 1.78it/s]\u001b[A\n 96%|█████████▌| 25646/26685 [3:44:58<09:45, 1.77it/s]\u001b[A\n 96%|█████████▌| 25647/26685 [3:44:59<08:18, 2.08it/s]\u001b[A\n 96%|█████████▌| 25648/26685 [3:44:59<08:41, 1.99it/s]\u001b[A\n 96%|█████████▌| 25649/26685 [3:45:00<08:02, 2.15it/s]\u001b[A\n 96%|█████████▌| 25650/26685 [3:45:00<07:04, 2.44it/s]\u001b[A\n 96%|█████████▌| 25651/26685 [3:45:00<06:21, 2.71it/s]\u001b[A\n 96%|█████████▌| 25652/26685 [3:45:01<08:50, 1.95it/s]\u001b[A\n 96%|█████████▌| 25653/26685 [3:45:01<07:34, 2.27it/s]\u001b[A\n 96%|█████████▌| 25654/26685 [3:45:02<07:08, 2.41it/s]\u001b[A\n 96%|█████████▌| 25655/26685 [3:45:02<07:31, 2.28it/s]\u001b[A\n 96%|█████████▌| 25656/26685 [3:45:02<06:53, 2.49it/s]\u001b[A\n 96%|█████████▌| 25657/26685 [3:45:03<05:59, 2.86it/s]\u001b[A\n 96%|█████████▌| 25658/26685 [3:45:03<05:37, 3.04it/s]\u001b[A\n 96%|█████████▌| 25659/26685 [3:45:04<06:40, 2.56it/s]\u001b[A\n 96%|█████████▌| 25660/26685 [3:45:04<06:12, 2.75it/s]\u001b[A\n 96%|█████████▌| 25661/26685 [3:45:04<06:39, 2.56it/s]\u001b[A\n 96%|█████████▌| 25662/26685 [3:45:05<06:57, 2.45it/s]\u001b[A\n 96%|█████████▌| 25663/26685 [3:45:05<06:39, 2.56it/s]\u001b[A\n 96%|█████████▌| 25664/26685 [3:45:05<06:06, 2.78it/s]\u001b[A\n 96%|█████████▌| 25665/26685 [3:45:06<05:54, 2.87it/s]\u001b[A\n 96%|█████████▌| 25666/26685 [3:45:06<06:33, 2.59it/s]\u001b[A\n 96%|█████████▌| 25667/26685 [3:45:06<05:51, 2.89it/s]\u001b[A\n 96%|█████████▌| 25668/26685 [3:45:07<05:34, 3.04it/s]\u001b[A\n 96%|█████████▌| 25669/26685 [3:45:07<06:02, 2.80it/s]\u001b[A\n 96%|█████████▌| 25670/26685 [3:45:08<06:11, 2.73it/s]\u001b[A\n 96%|█████████▌| 25671/26685 [3:45:08<06:07, 2.76it/s]\u001b[A\n 96%|█████████▌| 25672/26685 [3:45:08<07:10, 2.35it/s]\u001b[A\n 96%|█████████▌| 25673/26685 [3:45:09<07:45, 2.18it/s]\u001b[A\n 96%|█████████▌| 25674/26685 [3:45:09<07:35, 2.22it/s]\u001b[A\n 96%|█████████▌| 25675/26685 [3:45:10<06:45, 2.49it/s]\u001b[A\n 96%|█████████▌| 25676/26685 [3:45:10<06:52, 2.45it/s]\u001b[A\n 96%|█████████▌| 25677/26685 [3:45:11<09:22, 1.79it/s]\u001b[A\n 96%|█████████▌| 25678/26685 [3:45:11<08:44, 1.92it/s]\u001b[A\n 96%|█████████▌| 25679/26685 [3:45:12<07:49, 2.14it/s]\u001b[A\n 96%|█████████▌| 25680/26685 [3:45:12<07:02, 2.38it/s]\u001b[A\n 96%|█████████▌| 25681/26685 [3:45:13<07:15, 2.31it/s]\u001b[A\n 96%|█████████▌| 25682/26685 [3:45:13<06:41, 2.50it/s]\u001b[A\n 96%|█████████▌| 25683/26685 [3:45:13<06:29, 2.57it/s]\u001b[A\n 96%|█████████▌| 25684/26685 [3:45:14<06:52, 2.42it/s]\u001b[A\n 96%|█████████▋| 25685/26685 [3:45:14<06:04, 2.75it/s]\u001b[A\n 96%|█████████▋| 25686/26685 [3:45:15<07:00, 2.38it/s]\u001b[A\n 96%|█████████▋| 25687/26685 [3:45:15<07:28, 2.23it/s]\u001b[A\n 96%|█████████▋| 25688/26685 [3:45:15<06:55, 2.40it/s]\u001b[A\n 96%|█████████▋| 25689/26685 [3:45:16<06:09, 2.70it/s]\u001b[A\n 96%|█████████▋| 25690/26685 [3:45:16<05:35, 2.97it/s]\u001b[A\n 96%|█████████▋| 25691/26685 [3:45:16<05:42, 2.91it/s]\u001b[A\n 96%|█████████▋| 25692/26685 [3:45:17<08:09, 2.03it/s]\u001b[A\n 96%|█████████▋| 25693/26685 [3:45:18<07:52, 2.10it/s]\u001b[A\n 96%|█████████▋| 25694/26685 [3:45:18<08:01, 2.06it/s]\u001b[A\n 96%|█████████▋| 25695/26685 [3:45:18<07:19, 2.25it/s]\u001b[A\n 96%|█████████▋| 25696/26685 [3:45:19<06:28, 2.55it/s]\u001b[A\n 96%|█████████▋| 25697/26685 [3:45:19<07:21, 2.24it/s]\u001b[A\n 96%|█████████▋| 25698/26685 [3:45:20<06:41, 2.46it/s]\u001b[A\n 96%|█████████▋| 25699/26685 [3:45:20<07:05, 2.32it/s]\u001b[A\n 96%|█████████▋| 25700/26685 [3:45:20<06:55, 2.37it/s]\u001b[A\n 96%|█████████▋| 25701/26685 [3:45:21<06:50, 2.40it/s]\u001b[A\n 96%|█████████▋| 25702/26685 [3:45:21<06:22, 2.57it/s]\u001b[A\n 96%|█████████▋| 25703/26685 [3:45:22<06:05, 2.69it/s]\u001b[A\n 96%|█████████▋| 25704/26685 [3:45:22<05:54, 2.77it/s]\u001b[A\n 96%|█████████▋| 25705/26685 [3:45:22<05:30, 2.97it/s]\u001b[A\n 96%|█████████▋| 25706/26685 [3:45:22<05:22, 3.03it/s]\u001b[A\n 96%|█████████▋| 25707/26685 [3:45:23<05:16, 3.09it/s]\u001b[A\n 96%|█████████▋| 25708/26685 [3:45:24<08:03, 2.02it/s]\u001b[A\n 96%|█████████▋| 25709/26685 [3:45:24<07:44, 2.10it/s]\u001b[A\n 96%|█████████▋| 25710/26685 [3:45:24<07:11, 2.26it/s]\u001b[A\n 96%|█████████▋| 25711/26685 [3:45:25<07:02, 2.31it/s]\u001b[A\n 96%|█████████▋| 25712/26685 [3:45:26<08:57, 1.81it/s]\u001b[A\n 96%|█████████▋| 25713/26685 [3:45:27<10:15, 1.58it/s]\u001b[A\n 96%|█████████▋| 25714/26685 [3:45:27<08:32, 1.90it/s]\u001b[A\n 96%|█████████▋| 25715/26685 [3:45:28<10:10, 1.59it/s]\u001b[A\n 96%|█████████▋| 25716/26685 [3:45:28<09:44, 1.66it/s]\u001b[A\n 96%|█████████▋| 25717/26685 [3:45:29<08:45, 1.84it/s]\u001b[A\n 96%|█████████▋| 25718/26685 [3:45:29<10:21, 1.55it/s]\u001b[A\n 96%|█████████▋| 25719/26685 [3:45:30<08:45, 1.84it/s]\u001b[A\n 96%|█████████▋| 25720/26685 [3:45:30<07:37, 2.11it/s]\u001b[A\n 96%|█████████▋| 25721/26685 [3:45:30<06:58, 2.30it/s]\u001b[A\n 96%|█████████▋| 25722/26685 [3:45:31<07:31, 2.13it/s]\u001b[A\n 96%|█████████▋| 25723/26685 [3:45:31<06:29, 2.47it/s]\u001b[A\n 96%|█████████▋| 25724/26685 [3:45:32<06:20, 2.52it/s]\u001b[A\n 96%|█████████▋| 25725/26685 [3:45:32<05:46, 2.77it/s]\u001b[A\n 96%|█████████▋| 25726/26685 [3:45:32<05:15, 3.04it/s]\u001b[A\n 96%|█████████▋| 25727/26685 [3:45:33<06:35, 2.42it/s]\u001b[A\n 96%|█████████▋| 25728/26685 [3:45:33<05:58, 2.67it/s]\u001b[A\n 96%|█████████▋| 25729/26685 [3:45:33<05:27, 2.92it/s]\u001b[A\n 96%|█████████▋| 25730/26685 [3:45:34<08:01, 1.98it/s]\u001b[A\n 96%|█████████▋| 25731/26685 [3:45:35<07:20, 2.16it/s]\u001b[A\n 96%|█████████▋| 25732/26685 [3:45:35<06:42, 2.37it/s]\u001b[A\n 96%|█████████▋| 25733/26685 [3:45:35<06:13, 2.55it/s]\u001b[A\n 96%|█████████▋| 25734/26685 [3:45:36<06:28, 2.45it/s]\u001b[A\n 96%|█████████▋| 25735/26685 [3:45:36<08:25, 1.88it/s]\u001b[A\n 96%|█████████▋| 25736/26685 [3:45:37<07:12, 2.19it/s]\u001b[A\n 96%|█████████▋| 25737/26685 [3:45:37<06:26, 2.45it/s]\u001b[A\n 96%|█████████▋| 25738/26685 [3:45:37<06:03, 2.61it/s]\u001b[A\n 96%|█████████▋| 25739/26685 [3:45:38<08:07, 1.94it/s]\u001b[A\n 96%|█████████▋| 25740/26685 [3:45:39<08:23, 1.88it/s]\u001b[A\n 96%|█████████▋| 25741/26685 [3:45:39<08:17, 1.90it/s]\u001b[A\n 96%|█████████▋| 25742/26685 [3:45:40<07:21, 2.14it/s]\u001b[A\n 96%|█████████▋| 25743/26685 [3:45:40<07:26, 2.11it/s]\u001b[A\n 96%|█████████▋| 25744/26685 [3:45:41<07:02, 2.23it/s]\u001b[A\n 96%|█████████▋| 25745/26685 [3:45:41<06:08, 2.55it/s]\u001b[A\n 96%|█████████▋| 25746/26685 [3:45:41<06:14, 2.51it/s]\u001b[A\n 96%|█████████▋| 25747/26685 [3:45:42<06:08, 2.54it/s]\u001b[A\n 96%|█████████▋| 25748/26685 [3:45:42<06:07, 2.55it/s]\u001b[A\n 96%|█████████▋| 25749/26685 [3:45:42<06:12, 2.51it/s]\u001b[A\n 96%|█████████▋| 25750/26685 [3:45:43<05:43, 2.72it/s]\u001b[A\n 96%|█████████▋| 25751/26685 [3:45:43<05:27, 2.85it/s]\u001b[A\n 97%|█████████▋| 25752/26685 [3:45:43<05:06, 3.05it/s]\u001b[A\n 97%|█████████▋| 25753/26685 [3:45:44<05:13, 2.97it/s]\u001b[A\n 97%|█████████▋| 25754/26685 [3:45:44<06:03, 2.56it/s]\u001b[A\n 97%|█████████▋| 25755/26685 [3:45:44<05:51, 2.65it/s]\u001b[A\n 97%|█████████▋| 25756/26685 [3:45:45<05:43, 2.70it/s]\u001b[A\n 97%|█████████▋| 25757/26685 [3:45:45<05:08, 3.01it/s]\u001b[A\n 97%|█████████▋| 25758/26685 [3:45:46<06:16, 2.46it/s]\u001b[A\n 97%|█████████▋| 25759/26685 [3:45:46<06:11, 2.49it/s]\u001b[A\n 97%|█████████▋| 25760/26685 [3:45:47<10:24, 1.48it/s]\u001b[A\n 97%|█████████▋| 25761/26685 [3:45:48<08:42, 1.77it/s]\u001b[A\n 97%|█████████▋| 25762/26685 [3:45:48<07:22, 2.09it/s]\u001b[A\n 97%|█████████▋| 25763/26685 [3:45:48<07:01, 2.19it/s]\u001b[A\n 97%|█████████▋| 25764/26685 [3:45:49<06:20, 2.42it/s]\u001b[A\n 97%|█████████▋| 25765/26685 [3:45:49<05:45, 2.66it/s]\u001b[A\n 97%|█████████▋| 25766/26685 [3:45:49<06:30, 2.35it/s]\u001b[A\n 97%|█████████▋| 25767/26685 [3:45:50<06:04, 2.52it/s]\u001b[A\n 97%|█████████▋| 25768/26685 [3:45:50<05:39, 2.70it/s]\u001b[A\n 97%|█████████▋| 25769/26685 [3:45:51<05:52, 2.60it/s]\u001b[A\n 97%|█████████▋| 25770/26685 [3:45:51<05:32, 2.75it/s]\u001b[A\n 97%|█████████▋| 25771/26685 [3:45:51<05:47, 2.63it/s]\u001b[A\n 97%|█████████▋| 25772/26685 [3:45:52<06:14, 2.44it/s]\u001b[A\n 97%|█████████▋| 25773/26685 [3:45:52<05:38, 2.70it/s]\u001b[A\n 97%|█████████▋| 25774/26685 [3:45:53<07:41, 1.97it/s]\u001b[A\n 97%|█████████▋| 25775/26685 [3:45:53<06:50, 2.22it/s]\u001b[A\n 97%|█████████▋| 25776/26685 [3:45:53<06:00, 2.52it/s]\u001b[A\n 97%|█████████▋| 25777/26685 [3:45:54<06:07, 2.47it/s]\u001b[A\n 97%|█████████▋| 25778/26685 [3:45:54<06:02, 2.50it/s]\u001b[A\n 97%|█████████▋| 25779/26685 [3:45:55<05:49, 2.59it/s]\u001b[A\n 97%|█████████▋| 25780/26685 [3:45:55<05:51, 2.57it/s]\u001b[A\n 97%|█████████▋| 25781/26685 [3:45:55<05:53, 2.55it/s]\u001b[A\n 97%|█████████▋| 25782/26685 [3:45:56<06:02, 2.49it/s]\u001b[A\n 97%|█████████▋| 25783/26685 [3:45:57<07:58, 1.88it/s]\u001b[A\n 97%|█████████▋| 25784/26685 [3:45:57<06:55, 2.17it/s]\u001b[A\n 97%|█████████▋| 25785/26685 [3:45:57<06:12, 2.42it/s]\u001b[A\n 97%|█████████▋| 25786/26685 [3:45:58<05:33, 2.70it/s]\u001b[A\n 97%|█████████▋| 25787/26685 [3:45:58<05:06, 2.93it/s]\u001b[A\n 97%|█████████▋| 25788/26685 [3:45:58<04:57, 3.01it/s]\u001b[A\n 97%|█████████▋| 25789/26685 [3:45:58<04:58, 3.01it/s]\u001b[A\n 97%|█████████▋| 25790/26685 [3:45:59<04:52, 3.06it/s]\u001b[A\n 97%|█████████▋| 25791/26685 [3:45:59<05:29, 2.72it/s]\u001b[A\n 97%|█████████▋| 25792/26685 [3:46:00<05:21, 2.77it/s]\u001b[A\n 97%|█████████▋| 25793/26685 [3:46:00<05:11, 2.87it/s]\u001b[A\n 97%|█████████▋| 25794/26685 [3:46:00<05:07, 2.90it/s]\u001b[A\n 97%|█████████▋| 25795/26685 [3:46:01<07:18, 2.03it/s]\u001b[A\n 97%|█████████▋| 25796/26685 [3:46:02<07:32, 1.96it/s]\u001b[A\n 97%|█████████▋| 25797/26685 [3:46:02<07:13, 2.05it/s]\u001b[A\n 97%|█████████▋| 25798/26685 [3:46:02<06:32, 2.26it/s]\u001b[A\n 97%|█████████▋| 25799/26685 [3:46:03<06:21, 2.32it/s]\u001b[A\n 97%|█████████▋| 25800/26685 [3:46:03<05:35, 2.64it/s]\u001b[A\n 97%|█████████▋| 25801/26685 [3:46:04<07:27, 1.97it/s]\u001b[A\n 97%|█████████▋| 25802/26685 [3:46:04<06:58, 2.11it/s]\u001b[A\n 97%|█████████▋| 25803/26685 [3:46:05<06:37, 2.22it/s]\u001b[A\n 97%|█████████▋| 25804/26685 [3:46:05<05:45, 2.55it/s]\u001b[A\n 97%|█████████▋| 25805/26685 [3:46:06<07:36, 1.93it/s]\u001b[A\n 97%|█████████▋| 25806/26685 [3:46:07<08:53, 1.65it/s]\u001b[A\n 97%|█████████▋| 25807/26685 [3:46:07<07:24, 1.98it/s]\u001b[A\n 97%|█████████▋| 25808/26685 [3:46:08<08:57, 1.63it/s]\u001b[A\n 97%|█████████▋| 25809/26685 [3:46:08<07:48, 1.87it/s]\u001b[A\n 97%|█████████▋| 25810/26685 [3:46:09<07:58, 1.83it/s]\u001b[A\n 97%|█████████▋| 25811/26685 [3:46:09<07:04, 2.06it/s]\u001b[A\n 97%|█████████▋| 25812/26685 [3:46:10<07:37, 1.91it/s]\u001b[A\n 97%|█████████▋| 25813/26685 [3:46:10<06:38, 2.19it/s]\u001b[A\n 97%|█████████▋| 25814/26685 [3:46:10<05:45, 2.52it/s]\u001b[A\n 97%|█████████▋| 25815/26685 [3:46:11<06:14, 2.32it/s]\u001b[A\n 97%|█████████▋| 25816/26685 [3:46:11<06:01, 2.40it/s]\u001b[A\n 97%|█████████▋| 25817/26685 [3:46:11<05:26, 2.66it/s]\u001b[A\n 97%|█████████▋| 25818/26685 [3:46:12<05:15, 2.74it/s]\u001b[A\n 97%|█████████▋| 25819/26685 [3:46:12<05:02, 2.86it/s]\u001b[A\n 97%|█████████▋| 25820/26685 [3:46:12<05:16, 2.73it/s]\u001b[A\n 97%|█████████▋| 25821/26685 [3:46:13<04:48, 3.00it/s]\u001b[A\n 97%|█████████▋| 25822/26685 [3:46:13<04:38, 3.09it/s]\u001b[A\n 97%|█████████▋| 25823/26685 [3:46:13<05:41, 2.52it/s]\u001b[A\n 97%|█████████▋| 25824/26685 [3:46:14<05:09, 2.78it/s]\u001b[A\n 97%|█████████▋| 25825/26685 [3:46:14<04:46, 3.01it/s]\u001b[A\n 97%|█████████▋| 25826/26685 [3:46:14<05:22, 2.67it/s]\u001b[A\n 97%|█████████▋| 25827/26685 [3:46:15<05:43, 2.50it/s]\u001b[A\n 97%|█████████▋| 25828/26685 [3:46:15<05:21, 2.67it/s]\u001b[A\n 97%|█████████▋| 25829/26685 [3:46:16<05:00, 2.85it/s]\u001b[A\n 97%|█████████▋| 25830/26685 [3:46:16<05:58, 2.38it/s]\u001b[A\n 97%|█████████▋| 25831/26685 [3:46:16<05:25, 2.63it/s]\u001b[A\n 97%|█████████▋| 25832/26685 [3:46:17<05:19, 2.67it/s]\u001b[A\n 97%|█████████▋| 25833/26685 [3:46:17<05:17, 2.68it/s]\u001b[A\n 97%|█████████▋| 25834/26685 [3:46:17<05:00, 2.83it/s]\u001b[A\n 97%|█████████▋| 25835/26685 [3:46:18<05:51, 2.42it/s]\u001b[A\n 97%|█████████▋| 25836/26685 [3:46:18<05:13, 2.71it/s]\u001b[A\n 97%|█████████▋| 25837/26685 [3:46:19<05:37, 2.51it/s]\u001b[A\n 97%|█████████▋| 25838/26685 [3:46:19<05:57, 2.37it/s]\u001b[A\n 97%|█████████▋| 25839/26685 [3:46:20<05:42, 2.47it/s]\u001b[A\n 97%|█████████▋| 25840/26685 [3:46:20<06:00, 2.34it/s]\u001b[A\n 97%|█████████▋| 25841/26685 [3:46:20<05:11, 2.71it/s]\u001b[A\n 97%|█████████▋| 25842/26685 [3:46:21<05:15, 2.67it/s]\u001b[A\n 97%|█████████▋| 25843/26685 [3:46:21<05:01, 2.79it/s]\u001b[A\n 97%|█████████▋| 25844/26685 [3:46:21<04:38, 3.02it/s]\u001b[A\n 97%|█████████▋| 25845/26685 [3:46:22<04:27, 3.14it/s]\u001b[A\n 97%|█████████▋| 25846/26685 [3:46:22<04:37, 3.02it/s]\u001b[A\n 97%|█████████▋| 25847/26685 [3:46:22<04:55, 2.84it/s]\u001b[A\n 97%|█████████▋| 25848/26685 [3:46:23<04:34, 3.05it/s]\u001b[A\n 97%|█████████▋| 25849/26685 [3:46:23<04:35, 3.03it/s]\u001b[A\n 97%|█████████▋| 25850/26685 [3:46:23<05:30, 2.53it/s]\u001b[A\n 97%|█████████▋| 25851/26685 [3:46:24<05:19, 2.61it/s]\u001b[A\n 97%|█████████▋| 25852/26685 [3:46:24<05:25, 2.56it/s]\u001b[A\n 97%|█████████▋| 25853/26685 [3:46:25<05:39, 2.45it/s]\u001b[A\n 97%|█████████▋| 25854/26685 [3:46:25<05:40, 2.44it/s]\u001b[A\n 97%|█████████▋| 25855/26685 [3:46:26<05:49, 2.38it/s]\u001b[A\n 97%|█████████▋| 25856/26685 [3:46:26<06:14, 2.22it/s]\u001b[A\n 97%|█████████▋| 25857/26685 [3:46:26<06:01, 2.29it/s]\u001b[A\n 97%|█████████▋| 25858/26685 [3:46:27<05:12, 2.65it/s]\u001b[A\n 97%|█████████▋| 25859/26685 [3:46:27<05:27, 2.52it/s]\u001b[A\n 97%|█████████▋| 25860/26685 [3:46:27<05:11, 2.64it/s]\u001b[A\n 97%|█████████▋| 25861/26685 [3:46:28<04:46, 2.88it/s]\u001b[A\n 97%|█████████▋| 25862/26685 [3:46:28<04:51, 2.83it/s]\u001b[A\n 97%|█████████▋| 25863/26685 [3:46:29<05:01, 2.72it/s]\u001b[A\n 97%|█████████▋| 25864/26685 [3:46:29<04:50, 2.82it/s]\u001b[A\n 97%|█████████▋| 25865/26685 [3:46:29<05:13, 2.61it/s]\u001b[A\n 97%|█████████▋| 25866/26685 [3:46:30<05:36, 2.44it/s]\u001b[A\n 97%|█████████▋| 25867/26685 [3:46:30<05:12, 2.62it/s]\u001b[A\n 97%|█████████▋| 25868/26685 [3:46:31<06:57, 1.96it/s]\u001b[A\n 97%|█████████▋| 25869/26685 [3:46:31<06:51, 1.98it/s]\u001b[A\n 97%|█████████▋| 25870/26685 [3:46:32<05:56, 2.28it/s]\u001b[A\n 97%|█████████▋| 25871/26685 [3:46:32<05:51, 2.31it/s]\u001b[A\n 97%|█████████▋| 25872/26685 [3:46:33<06:51, 1.98it/s]\u001b[A\n 97%|█████████▋| 25873/26685 [3:46:33<06:02, 2.24it/s]\u001b[A\n 97%|█████████▋| 25874/26685 [3:46:33<05:18, 2.55it/s]\u001b[A\n 97%|█████████▋| 25875/26685 [3:46:34<05:38, 2.39it/s]\u001b[A\n 97%|█████████▋| 25876/26685 [3:46:34<06:03, 2.23it/s]\u001b[A\n 97%|█████████▋| 25877/26685 [3:46:35<07:33, 1.78it/s]\u001b[A\n 97%|█████████▋| 25878/26685 [3:46:35<06:25, 2.09it/s]\u001b[A\n 97%|█████████▋| 25879/26685 [3:46:36<06:24, 2.09it/s]\u001b[A\n 97%|█████████▋| 25880/26685 [3:46:37<07:59, 1.68it/s]\u001b[A\n 97%|█████████▋| 25881/26685 [3:46:37<08:08, 1.65it/s]\u001b[A\n 97%|█████████▋| 25882/26685 [3:46:38<06:56, 1.93it/s]\u001b[A\n 97%|█████████▋| 25883/26685 [3:46:38<06:18, 2.12it/s]\u001b[A\n 97%|█████████▋| 25884/26685 [3:46:38<05:41, 2.35it/s]\u001b[A\n 97%|█████████▋| 25885/26685 [3:46:39<05:11, 2.57it/s]\u001b[A\n 97%|█████████▋| 25886/26685 [3:46:39<05:36, 2.37it/s]\u001b[A\n 97%|█████████▋| 25887/26685 [3:46:40<05:06, 2.60it/s]\u001b[A\n 97%|█████████▋| 25888/26685 [3:46:40<04:34, 2.91it/s]\u001b[A\n 97%|█████████▋| 25889/26685 [3:46:40<04:22, 3.03it/s]\u001b[A\n 97%|█████████▋| 25890/26685 [3:46:40<04:05, 3.24it/s]\u001b[A\n 97%|█████████▋| 25891/26685 [3:46:41<04:16, 3.09it/s]\u001b[A\n 97%|█████████▋| 25892/26685 [3:46:41<04:42, 2.81it/s]\u001b[A\n 97%|█████████▋| 25893/26685 [3:46:42<05:06, 2.58it/s]\u001b[A\n 97%|█████████▋| 25894/26685 [3:46:42<05:06, 2.58it/s]\u001b[A\n 97%|█████████▋| 25895/26685 [3:46:42<04:55, 2.68it/s]\u001b[A\n 97%|█████████▋| 25896/26685 [3:46:43<04:32, 2.89it/s]\u001b[A\n 97%|█████████▋| 25897/26685 [3:46:43<04:37, 2.84it/s]\u001b[A\n 97%|█████████▋| 25898/26685 [3:46:44<06:21, 2.07it/s]\u001b[A\n 97%|█████████▋| 25899/26685 [3:46:45<07:31, 1.74it/s]\u001b[A\n 97%|█████████▋| 25900/26685 [3:46:45<08:37, 1.52it/s]\u001b[A\n 97%|█████████▋| 25901/26685 [3:46:46<08:20, 1.57it/s]\u001b[A\n 97%|█████████▋| 25902/26685 [3:46:47<08:05, 1.61it/s]\u001b[A\n 97%|█████████▋| 25903/26685 [3:46:48<09:22, 1.39it/s]\u001b[A\n 97%|█████████▋| 25904/26685 [3:46:48<08:02, 1.62it/s]\u001b[A\n 97%|█████████▋| 25905/26685 [3:46:48<07:01, 1.85it/s]\u001b[A\n 97%|█████████▋| 25906/26685 [3:46:49<06:00, 2.16it/s]\u001b[A\n 97%|█████████▋| 25907/26685 [3:46:49<05:22, 2.41it/s]\u001b[A\n 97%|█████████▋| 25908/26685 [3:46:49<05:37, 2.30it/s]\u001b[A\n 97%|█████████▋| 25909/26685 [3:46:50<05:57, 2.17it/s]\u001b[A\n 97%|█████████▋| 25910/26685 [3:46:50<05:46, 2.24it/s]\u001b[A\n 97%|█████████▋| 25911/26685 [3:46:51<05:42, 2.26it/s]\u001b[A\n 97%|█████████▋| 25912/26685 [3:46:51<05:01, 2.56it/s]\u001b[A\n 97%|█████████▋| 25913/26685 [3:46:52<06:53, 1.87it/s]\u001b[A\n 97%|█████████▋| 25914/26685 [3:46:52<06:16, 2.05it/s]\u001b[A\n 97%|█████████▋| 25915/26685 [3:46:53<05:55, 2.16it/s]\u001b[A\n 97%|█████████▋| 25916/26685 [3:46:53<05:30, 2.33it/s]\u001b[A\n 97%|█████████▋| 25917/26685 [3:46:53<05:02, 2.54it/s]\u001b[A\n 97%|█████████▋| 25918/26685 [3:46:54<04:43, 2.71it/s]\u001b[A\n 97%|█████████▋| 25919/26685 [3:46:54<05:30, 2.32it/s]\u001b[A\n 97%|█████████▋| 25920/26685 [3:46:55<06:05, 2.09it/s]\u001b[A\n 97%|█████████▋| 25921/26685 [3:46:55<05:41, 2.24it/s]\u001b[A\n 97%|█████████▋| 25922/26685 [3:46:56<05:31, 2.30it/s]\u001b[A\n 97%|█████████▋| 25923/26685 [3:46:56<05:21, 2.37it/s]\u001b[A\n 97%|█████████▋| 25924/26685 [3:46:56<05:14, 2.42it/s]\u001b[A\n 97%|█████████▋| 25925/26685 [3:46:57<05:23, 2.35it/s]\u001b[A\n 97%|█████████▋| 25926/26685 [3:46:57<04:54, 2.58it/s]\u001b[A\n 97%|█████████▋| 25927/26685 [3:46:57<04:27, 2.83it/s]\u001b[A\n 97%|█████████▋| 25928/26685 [3:46:58<04:15, 2.96it/s]\u001b[A\n 97%|█████████▋| 25929/26685 [3:46:58<04:00, 3.15it/s]\u001b[A\n 97%|█████████▋| 25930/26685 [3:46:58<04:10, 3.01it/s]\u001b[A\n 97%|█████████▋| 25931/26685 [3:46:59<04:01, 3.12it/s]\u001b[A\n 97%|█████████▋| 25932/26685 [3:46:59<03:59, 3.14it/s]\u001b[A\n 97%|█████████▋| 25933/26685 [3:46:59<04:16, 2.93it/s]\u001b[A\n 97%|█████████▋| 25934/26685 [3:47:00<04:03, 3.09it/s]\u001b[A\n 97%|█████████▋| 25935/26685 [3:47:00<04:28, 2.79it/s]\u001b[A\n 97%|█████████▋| 25936/26685 [3:47:00<04:37, 2.70it/s]\u001b[A\n 97%|█████████▋| 25937/26685 [3:47:01<04:42, 2.65it/s]\u001b[A\n 97%|█████████▋| 25938/26685 [3:47:01<04:49, 2.58it/s]\u001b[A\n 97%|█████████▋| 25939/26685 [3:47:02<06:43, 1.85it/s]\u001b[A\n 97%|█████████▋| 25940/26685 [3:47:02<05:47, 2.15it/s]\u001b[A\n 97%|█████████▋| 25941/26685 [3:47:03<05:50, 2.12it/s]\u001b[A\n 97%|█████████▋| 25942/26685 [3:47:03<05:09, 2.40it/s]\u001b[A\n 97%|█████████▋| 25943/26685 [3:47:04<05:32, 2.23it/s]\u001b[A\n 97%|█████████▋| 25944/26685 [3:47:04<05:13, 2.37it/s]\u001b[A\n 97%|█████████▋| 25945/26685 [3:47:04<04:41, 2.62it/s]\u001b[A\n 97%|█████████▋| 25946/26685 [3:47:05<04:19, 2.84it/s]\u001b[A\n 97%|█████████▋| 25947/26685 [3:47:05<05:08, 2.39it/s]\u001b[A\n 97%|█████████▋| 25948/26685 [3:47:06<05:08, 2.39it/s]\u001b[A\n 97%|█████████▋| 25949/26685 [3:47:06<06:31, 1.88it/s]\u001b[A\n 97%|█████████▋| 25950/26685 [3:47:07<05:42, 2.15it/s]\u001b[A\n 97%|█████████▋| 25951/26685 [3:47:07<05:07, 2.39it/s]\u001b[A\n 97%|█████████▋| 25952/26685 [3:47:08<05:23, 2.27it/s]\u001b[A\n 97%|█████████▋| 25953/26685 [3:47:08<05:21, 2.27it/s]\u001b[A\n 97%|█████████▋| 25954/26685 [3:47:08<04:49, 2.53it/s]\u001b[A\n 97%|█████████▋| 25955/26685 [3:47:09<05:36, 2.17it/s]\u001b[A\n 97%|█████████▋| 25956/26685 [3:47:09<04:59, 2.43it/s]\u001b[A\n 97%|█████████▋| 25957/26685 [3:47:10<04:58, 2.44it/s]\u001b[A\n 97%|█████████▋| 25958/26685 [3:47:10<05:16, 2.30it/s]\u001b[A\n 97%|█████████▋| 25959/26685 [3:47:11<05:34, 2.17it/s]\u001b[A\n 97%|█████████▋| 25960/26685 [3:47:11<05:22, 2.25it/s]\u001b[A\n 97%|█████████▋| 25961/26685 [3:47:11<04:55, 2.45it/s]\u001b[A\n 97%|█████████▋| 25962/26685 [3:47:12<05:01, 2.40it/s]\u001b[A\n 97%|█████████▋| 25963/26685 [3:47:12<05:45, 2.09it/s]\u001b[A\n 97%|█████████▋| 25964/26685 [3:47:13<05:28, 2.20it/s]\u001b[A\n 97%|█████████▋| 25965/26685 [3:47:13<05:59, 2.00it/s]\u001b[A\n 97%|█████████▋| 25966/26685 [3:47:14<05:22, 2.23it/s]\u001b[A\n 97%|█████████▋| 25967/26685 [3:47:15<06:45, 1.77it/s]\u001b[A\n 97%|█████████▋| 25968/26685 [3:47:15<05:51, 2.04it/s]\u001b[A\n 97%|█████████▋| 25969/26685 [3:47:15<05:35, 2.14it/s]\u001b[A\n 97%|█████████▋| 25970/26685 [3:47:16<04:55, 2.42it/s]\u001b[A\n 97%|█████████▋| 25971/26685 [3:47:16<04:54, 2.43it/s]\u001b[A\n 97%|█████████▋| 25972/26685 [3:47:16<04:43, 2.52it/s]\u001b[A\n 97%|█████████▋| 25973/26685 [3:47:17<06:09, 1.93it/s]\u001b[A\n 97%|█████████▋| 25974/26685 [3:47:17<05:19, 2.22it/s]\u001b[A\n 97%|█████████▋| 25975/26685 [3:47:18<04:54, 2.41it/s]\u001b[A\n 97%|█████████▋| 25976/26685 [3:47:18<04:22, 2.70it/s]\u001b[A\n 97%|█████████▋| 25977/26685 [3:47:18<04:15, 2.77it/s]\u001b[A\n 97%|█████████▋| 25978/26685 [3:47:19<04:08, 2.84it/s]\u001b[A\n 97%|█████████▋| 25979/26685 [3:47:19<04:20, 2.72it/s]\u001b[A\n 97%|█████████▋| 25980/26685 [3:47:19<04:14, 2.77it/s]\u001b[A\n 97%|█████████▋| 25981/26685 [3:47:20<05:12, 2.25it/s]\u001b[A\n 97%|█████████▋| 25982/26685 [3:47:20<04:37, 2.53it/s]\u001b[A\n 97%|█████████▋| 25983/26685 [3:47:21<06:03, 1.93it/s]\u001b[A\n 97%|█████████▋| 25984/26685 [3:47:21<05:16, 2.22it/s]\u001b[A\n 97%|█████████▋| 25985/26685 [3:47:22<04:38, 2.51it/s]\u001b[A\n 97%|█████████▋| 25986/26685 [3:47:22<04:08, 2.81it/s]\u001b[A\n 97%|█████████▋| 25987/26685 [3:47:22<04:11, 2.77it/s]\u001b[A\n 97%|█████████▋| 25988/26685 [3:47:23<04:14, 2.74it/s]\u001b[A\n 97%|█████████▋| 25989/26685 [3:47:23<04:09, 2.79it/s]\u001b[A\n 97%|█████████▋| 25990/26685 [3:47:23<04:09, 2.79it/s]\u001b[A\n 97%|█████████▋| 25991/26685 [3:47:25<07:33, 1.53it/s]\u001b[A\n 97%|█████████▋| 25992/26685 [3:47:25<06:54, 1.67it/s]\u001b[A\n 97%|█████████▋| 25993/26685 [3:47:26<05:39, 2.04it/s]\u001b[A\n 97%|█████████▋| 25994/26685 [3:47:26<05:38, 2.04it/s]\u001b[A\n 97%|█████████▋| 25995/26685 [3:47:26<05:02, 2.28it/s]\u001b[A\n 97%|█████████▋| 25996/26685 [3:47:27<06:25, 1.79it/s]\u001b[A\n 97%|█████████▋| 25997/26685 [3:47:28<05:56, 1.93it/s]\u001b[A\n 97%|█████████▋| 25998/26685 [3:47:28<05:13, 2.19it/s]\u001b[A\n 97%|█████████▋| 25999/26685 [3:47:28<04:50, 2.36it/s]\u001b[A\n 97%|█████████▋| 26000/26685 [3:47:29<04:45, 2.40it/s]\u001b[A\n 97%|█████████▋| 26001/26685 [3:47:29<05:38, 2.02it/s]\u001b[A\n 97%|█████████▋| 26002/26685 [3:47:30<05:15, 2.16it/s]\u001b[A\n 97%|█████████▋| 26003/26685 [3:47:30<04:53, 2.33it/s]\u001b[A\n 97%|█████████▋| 26004/26685 [3:47:31<06:05, 1.86it/s]\u001b[A\n 97%|█████████▋| 26005/26685 [3:47:32<06:56, 1.63it/s]\u001b[A\n 97%|█████████▋| 26006/26685 [3:47:32<05:58, 1.90it/s]\u001b[A\n 97%|█████████▋| 26007/26685 [3:47:32<05:35, 2.02it/s]\u001b[A\n 97%|█████████▋| 26008/26685 [3:47:33<05:20, 2.11it/s]\u001b[A\n 97%|█████████▋| 26009/26685 [3:47:33<05:03, 2.23it/s]\u001b[A\n 97%|█████████▋| 26010/26685 [3:47:33<04:34, 2.46it/s]\u001b[A\n 97%|█████████▋| 26011/26685 [3:47:34<04:58, 2.26it/s]\u001b[A\n 97%|█████████▋| 26012/26685 [3:47:34<04:25, 2.53it/s]\u001b[A\n 97%|█████████▋| 26013/26685 [3:47:35<04:23, 2.55it/s]\u001b[A\n 97%|█████████▋| 26014/26685 [3:47:35<03:56, 2.84it/s]\u001b[A\n 97%|█████████▋| 26015/26685 [3:47:35<04:10, 2.67it/s]\u001b[A\n 97%|█████████▋| 26016/26685 [3:47:36<04:06, 2.72it/s]\u001b[A\n 97%|█████████▋| 26017/26685 [3:47:36<04:03, 2.75it/s]\u001b[A\n 98%|█████████▊| 26018/26685 [3:47:37<04:14, 2.62it/s]\u001b[A\n 98%|█████████▊| 26019/26685 [3:47:37<03:52, 2.86it/s]\u001b[A\n 98%|█████████▊| 26020/26685 [3:47:37<03:44, 2.96it/s]\u001b[A\n 98%|█████████▊| 26021/26685 [3:47:38<03:59, 2.77it/s]\u001b[A\n 98%|█████████▊| 26022/26685 [3:47:38<03:39, 3.02it/s]\u001b[A\n 98%|█████████▊| 26023/26685 [3:47:39<05:28, 2.01it/s]\u001b[A\n 98%|█████████▊| 26024/26685 [3:47:39<05:45, 1.91it/s]\u001b[A\n 98%|█████████▊| 26025/26685 [3:47:40<05:12, 2.11it/s]\u001b[A\n 98%|█████████▊| 26026/26685 [3:47:40<05:00, 2.20it/s]\u001b[A\n 98%|█████████▊| 26027/26685 [3:47:41<06:20, 1.73it/s]\u001b[A\n 98%|█████████▊| 26028/26685 [3:47:41<05:22, 2.04it/s]\u001b[A\n 98%|█████████▊| 26029/26685 [3:47:41<04:43, 2.32it/s]\u001b[A\n 98%|█████████▊| 26030/26685 [3:47:42<04:41, 2.32it/s]\u001b[A\n 98%|█████████▊| 26031/26685 [3:47:42<04:17, 2.54it/s]\u001b[A\n 98%|█████████▊| 26032/26685 [3:47:43<04:06, 2.65it/s]\u001b[A\n 98%|█████████▊| 26033/26685 [3:47:43<03:58, 2.73it/s]\u001b[A\n 98%|█████████▊| 26034/26685 [3:47:44<05:56, 1.83it/s]\u001b[A\n 98%|█████████▊| 26035/26685 [3:47:44<05:31, 1.96it/s]\u001b[A\n 98%|█████████▊| 26036/26685 [3:47:45<05:25, 1.99it/s]\u001b[A\n 98%|█████████▊| 26037/26685 [3:47:45<05:29, 1.97it/s]\u001b[A\n 98%|█████████▊| 26038/26685 [3:47:46<06:17, 1.71it/s]\u001b[A\n 98%|█████████▊| 26039/26685 [3:47:46<05:30, 1.96it/s]\u001b[A\n 98%|█████████▊| 26040/26685 [3:47:47<04:54, 2.19it/s]\u001b[A\n 98%|█████████▊| 26041/26685 [3:47:47<04:33, 2.35it/s]\u001b[A\n 98%|█████████▊| 26042/26685 [3:47:47<04:29, 2.38it/s]\u001b[A\n 98%|█████████▊| 26043/26685 [3:47:48<04:56, 2.16it/s]\u001b[A\n 98%|█████████▊| 26044/26685 [3:47:48<04:40, 2.29it/s]\u001b[A\n 98%|█████████▊| 26045/26685 [3:47:49<04:49, 2.21it/s]\u001b[A\n 98%|█████████▊| 26046/26685 [3:47:50<05:37, 1.89it/s]\u001b[A\n 98%|█████████▊| 26047/26685 [3:47:50<05:13, 2.04it/s]\u001b[A\n 98%|█████████▊| 26048/26685 [3:47:50<04:32, 2.34it/s]\u001b[A\n 98%|█████████▊| 26049/26685 [3:47:51<04:40, 2.26it/s]\u001b[A\n 98%|█████████▊| 26050/26685 [3:47:51<04:35, 2.30it/s]\u001b[A\n 98%|█████████▊| 26051/26685 [3:47:52<05:20, 1.98it/s]\u001b[A\n 98%|█████████▊| 26052/26685 [3:47:52<04:43, 2.24it/s]\u001b[A\n 98%|█████████▊| 26053/26685 [3:47:53<04:51, 2.17it/s]\u001b[A\n 98%|█████████▊| 26054/26685 [3:47:53<04:23, 2.39it/s]\u001b[A\n 98%|█████████▊| 26055/26685 [3:47:54<06:06, 1.72it/s]\u001b[A\n 98%|█████████▊| 26056/26685 [3:47:54<05:20, 1.96it/s]\u001b[A\n 98%|█████████▊| 26057/26685 [3:47:55<04:51, 2.15it/s]\u001b[A\n 98%|█████████▊| 26058/26685 [3:47:55<06:00, 1.74it/s]\u001b[A\n 98%|█████████▊| 26059/26685 [3:47:56<06:25, 1.62it/s]\u001b[A\n 98%|█████████▊| 26060/26685 [3:47:57<07:07, 1.46it/s]\u001b[A\n 98%|█████████▊| 26061/26685 [3:47:58<06:49, 1.53it/s]\u001b[A\n 98%|█████████▊| 26062/26685 [3:47:58<07:02, 1.47it/s]\u001b[A\n 98%|█████████▊| 26063/26685 [3:47:59<06:37, 1.57it/s]\u001b[A\n 98%|█████████▊| 26064/26685 [3:47:59<05:37, 1.84it/s]\u001b[A\n 98%|█████████▊| 26065/26685 [3:47:59<04:50, 2.14it/s]\u001b[A\n 98%|█████████▊| 26066/26685 [3:48:00<04:28, 2.30it/s]\u001b[A\n 98%|█████████▊| 26067/26685 [3:48:00<04:05, 2.51it/s]\u001b[A\n 98%|█████████▊| 26068/26685 [3:48:01<05:14, 1.96it/s]\u001b[A\n 98%|█████████▊| 26069/26685 [3:48:01<04:41, 2.19it/s]\u001b[A\n 98%|█████████▊| 26070/26685 [3:48:02<04:08, 2.47it/s]\u001b[A\n 98%|█████████▊| 26071/26685 [3:48:02<03:50, 2.66it/s]\u001b[A\n 98%|█████████▊| 26072/26685 [3:48:02<03:44, 2.73it/s]\u001b[A\n 98%|█████████▊| 26073/26685 [3:48:03<03:43, 2.73it/s]\u001b[A\n 98%|█████████▊| 26074/26685 [3:48:03<03:57, 2.57it/s]\u001b[A\n 98%|█████████▊| 26075/26685 [3:48:03<03:57, 2.57it/s]\u001b[A\n 98%|█████████▊| 26076/26685 [3:48:04<03:38, 2.78it/s]\u001b[A\n 98%|█████████▊| 26077/26685 [3:48:04<04:26, 2.28it/s]\u001b[A\n 98%|█████████▊| 26078/26685 [3:48:05<04:12, 2.40it/s]\u001b[A\n 98%|█████████▊| 26079/26685 [3:48:05<03:45, 2.69it/s]\u001b[A\n 98%|█████████▊| 26080/26685 [3:48:05<03:41, 2.73it/s]\u001b[A\n 98%|█████████▊| 26081/26685 [3:48:06<05:04, 1.98it/s]\u001b[A\n 98%|█████████▊| 26082/26685 [3:48:06<04:33, 2.21it/s]\u001b[A\n 98%|█████████▊| 26083/26685 [3:48:07<04:45, 2.11it/s]\u001b[A\n 98%|█████████▊| 26084/26685 [3:48:08<05:06, 1.96it/s]\u001b[A\n 98%|█████████▊| 26085/26685 [3:48:08<04:38, 2.15it/s]\u001b[A\n 98%|█████████▊| 26086/26685 [3:48:08<04:15, 2.34it/s]\u001b[A\n 98%|█████████▊| 26087/26685 [3:48:09<04:07, 2.42it/s]\u001b[A\n 98%|█████████▊| 26088/26685 [3:48:09<03:48, 2.61it/s]\u001b[A\n 98%|█████████▊| 26089/26685 [3:48:10<04:15, 2.33it/s]\u001b[A\n 98%|█████████▊| 26090/26685 [3:48:10<04:10, 2.37it/s]\u001b[A\n 98%|█████████▊| 26091/26685 [3:48:10<04:24, 2.24it/s]\u001b[A\n 98%|█████████▊| 26092/26685 [3:48:11<04:27, 2.22it/s]\u001b[A\n 98%|█████████▊| 26093/26685 [3:48:11<04:06, 2.41it/s]\u001b[A\n 98%|█████████▊| 26094/26685 [3:48:12<05:49, 1.69it/s]\u001b[A\n 98%|█████████▊| 26095/26685 [3:48:12<04:49, 2.04it/s]\u001b[A\n 98%|█████████▊| 26096/26685 [3:48:13<04:24, 2.23it/s]\u001b[A\n 98%|█████████▊| 26097/26685 [3:48:13<04:52, 2.01it/s]\u001b[A\n 98%|█████████▊| 26098/26685 [3:48:14<04:34, 2.14it/s]\u001b[A\n 98%|█████████▊| 26099/26685 [3:48:14<04:23, 2.22it/s]\u001b[A\n 98%|█████████▊| 26100/26685 [3:48:15<05:29, 1.78it/s]\u001b[A\n 98%|█████████▊| 26101/26685 [3:48:16<06:16, 1.55it/s]\u001b[A\n 98%|█████████▊| 26102/26685 [3:48:17<06:49, 1.43it/s]\u001b[A\n 98%|█████████▊| 26103/26685 [3:48:17<05:47, 1.67it/s]\u001b[A\n 98%|█████████▊| 26104/26685 [3:48:17<05:14, 1.84it/s]\u001b[A\n 98%|█████████▊| 26105/26685 [3:48:18<05:06, 1.89it/s]\u001b[A\n 98%|█████████▊| 26106/26685 [3:48:18<04:36, 2.09it/s]\u001b[A\n 98%|█████████▊| 26107/26685 [3:48:19<04:49, 2.00it/s]\u001b[A\n 98%|█████████▊| 26108/26685 [3:48:19<04:13, 2.28it/s]\u001b[A\n 98%|█████████▊| 26109/26685 [3:48:20<03:59, 2.40it/s]\u001b[A\n 98%|█████████▊| 26110/26685 [3:48:20<04:00, 2.39it/s]\u001b[A\n 98%|█████████▊| 26111/26685 [3:48:21<04:32, 2.10it/s]\u001b[A\n 98%|█████████▊| 26112/26685 [3:48:21<04:18, 2.21it/s]\u001b[A\n 98%|█████████▊| 26113/26685 [3:48:21<03:52, 2.46it/s]\u001b[A\n 98%|█████████▊| 26114/26685 [3:48:22<05:19, 1.79it/s]\u001b[A\n 98%|█████████▊| 26115/26685 [3:48:22<04:27, 2.13it/s]\u001b[A\n 98%|█████████▊| 26116/26685 [3:48:23<03:57, 2.39it/s]\u001b[A\n 98%|█████████▊| 26117/26685 [3:48:23<03:35, 2.63it/s]\u001b[A\n 98%|█████████▊| 26118/26685 [3:48:24<03:58, 2.38it/s]\u001b[A\n 98%|█████████▊| 26119/26685 [3:48:24<03:28, 2.71it/s]\u001b[A\n 98%|█████████▊| 26120/26685 [3:48:25<04:40, 2.02it/s]\u001b[A\n 98%|█████████▊| 26121/26685 [3:48:25<04:25, 2.13it/s]\u001b[A\n 98%|█████████▊| 26122/26685 [3:48:25<04:04, 2.30it/s]\u001b[A\n 98%|█████████▊| 26123/26685 [3:48:26<04:23, 2.13it/s]\u001b[A\n 98%|█████████▊| 26124/26685 [3:48:27<05:30, 1.70it/s]\u001b[A\n 98%|█████████▊| 26125/26685 [3:48:27<04:43, 1.97it/s]\u001b[A\n 98%|█████████▊| 26126/26685 [3:48:27<04:00, 2.32it/s]\u001b[A\n 98%|█████████▊| 26127/26685 [3:48:28<03:50, 2.42it/s]\u001b[A\n 98%|█████████▊| 26128/26685 [3:48:28<03:41, 2.52it/s]\u001b[A\n 98%|█████████▊| 26129/26685 [3:48:28<03:20, 2.78it/s]\u001b[A\n 98%|█████████▊| 26130/26685 [3:48:29<04:02, 2.29it/s]\u001b[A\n 98%|█████████▊| 26131/26685 [3:48:29<03:42, 2.49it/s]\u001b[A\n 98%|█████████▊| 26132/26685 [3:48:30<03:39, 2.52it/s]\u001b[A\n 98%|█████████▊| 26133/26685 [3:48:30<03:36, 2.54it/s]\u001b[A\n 98%|█████████▊| 26134/26685 [3:48:30<03:19, 2.76it/s]\u001b[A\n 98%|█████████▊| 26135/26685 [3:48:31<03:41, 2.48it/s]\u001b[A\n 98%|█████████▊| 26136/26685 [3:48:32<04:50, 1.89it/s]\u001b[A\n 98%|█████████▊| 26137/26685 [3:48:32<05:32, 1.65it/s]\u001b[A\n 98%|█████████▊| 26138/26685 [3:48:33<04:38, 1.96it/s]\u001b[A\n 98%|█████████▊| 26139/26685 [3:48:33<04:34, 1.99it/s]\u001b[A\n 98%|█████████▊| 26140/26685 [3:48:34<04:20, 2.09it/s]\u001b[A\n 98%|█████████▊| 26141/26685 [3:48:34<03:49, 2.37it/s]\u001b[A\n 98%|█████████▊| 26142/26685 [3:48:34<03:30, 2.58it/s]\u001b[A\n 98%|█████████▊| 26143/26685 [3:48:35<03:14, 2.78it/s]\u001b[A\n 98%|█████████▊| 26144/26685 [3:48:35<03:44, 2.41it/s]\u001b[A\n 98%|█████████▊| 26145/26685 [3:48:36<03:56, 2.29it/s]\u001b[A\n 98%|█████████▊| 26146/26685 [3:48:37<05:44, 1.56it/s]\u001b[A\n 98%|█████████▊| 26147/26685 [3:48:37<04:55, 1.82it/s]\u001b[A\n 98%|█████████▊| 26148/26685 [3:48:38<05:34, 1.61it/s]\u001b[A\n 98%|█████████▊| 26149/26685 [3:48:38<04:40, 1.91it/s]\u001b[A\n 98%|█████████▊| 26150/26685 [3:48:39<04:19, 2.06it/s]\u001b[A\n 98%|█████████▊| 26151/26685 [3:48:39<03:48, 2.33it/s]\u001b[A\n 98%|█████████▊| 26152/26685 [3:48:39<03:59, 2.23it/s]\u001b[A\n 98%|█████████▊| 26153/26685 [3:48:40<03:55, 2.26it/s]\u001b[A\n 98%|█████████▊| 26154/26685 [3:48:40<03:59, 2.22it/s]\u001b[A\n 98%|█████████▊| 26155/26685 [3:48:41<05:20, 1.65it/s]\u001b[A\n 98%|█████████▊| 26156/26685 [3:48:41<04:31, 1.95it/s]\u001b[A\n 98%|█████████▊| 26157/26685 [3:48:42<03:53, 2.26it/s]\u001b[A\n 98%|█████████▊| 26158/26685 [3:48:42<03:31, 2.49it/s]\u001b[A\n 98%|█████████▊| 26159/26685 [3:48:43<03:53, 2.25it/s]\u001b[A\n 98%|█████████▊| 26160/26685 [3:48:43<03:31, 2.48it/s]\u001b[A\n 98%|█████████▊| 26161/26685 [3:48:43<03:13, 2.71it/s]\u001b[A\n 98%|█████████▊| 26162/26685 [3:48:44<03:13, 2.71it/s]\u001b[A\n 98%|█████████▊| 26163/26685 [3:48:44<04:23, 1.98it/s]\u001b[A\n 98%|█████████▊| 26164/26685 [3:48:45<05:13, 1.66it/s]\u001b[A\n 98%|█████████▊| 26165/26685 [3:48:46<04:41, 1.85it/s]\u001b[A\n 98%|█████████▊| 26166/26685 [3:48:46<04:11, 2.06it/s]\u001b[A\n 98%|█████████▊| 26167/26685 [3:48:47<04:30, 1.91it/s]\u001b[A\n 98%|█████████▊| 26168/26685 [3:48:47<04:00, 2.15it/s]\u001b[A\n 98%|█████████▊| 26169/26685 [3:48:47<03:39, 2.35it/s]\u001b[A\n 98%|█████████▊| 26170/26685 [3:48:48<03:37, 2.37it/s]\u001b[A\n 98%|█████████▊| 26171/26685 [3:48:48<04:08, 2.06it/s]\u001b[A\n 98%|█████████▊| 26172/26685 [3:48:49<03:50, 2.22it/s]\u001b[A\n 98%|█████████▊| 26173/26685 [3:48:49<03:28, 2.46it/s]\u001b[A\n 98%|█████████▊| 26174/26685 [3:48:49<03:40, 2.31it/s]\u001b[A\n 98%|█████████▊| 26175/26685 [3:48:50<03:31, 2.41it/s]\u001b[A\n 98%|█████████▊| 26176/26685 [3:48:50<03:18, 2.56it/s]\u001b[A\n 98%|█████████▊| 26177/26685 [3:48:51<03:37, 2.34it/s]\u001b[A\n 98%|█████████▊| 26178/26685 [3:48:51<03:23, 2.49it/s]\u001b[A\n 98%|█████████▊| 26179/26685 [3:48:51<03:14, 2.60it/s]\u001b[A\n 98%|█████████▊| 26180/26685 [3:48:52<03:04, 2.74it/s]\u001b[A\n 98%|█████████▊| 26181/26685 [3:48:52<03:26, 2.44it/s]\u001b[A\n 98%|█████████▊| 26182/26685 [3:48:53<03:29, 2.40it/s]\u001b[A\n 98%|█████████▊| 26183/26685 [3:48:53<03:07, 2.68it/s]\u001b[A\n 98%|█████████▊| 26184/26685 [3:48:53<03:16, 2.55it/s]\u001b[A\n 98%|█████████▊| 26185/26685 [3:48:54<03:09, 2.63it/s]\u001b[A\n 98%|█████████▊| 26186/26685 [3:48:54<03:23, 2.45it/s]\u001b[A\n 98%|█████████▊| 26187/26685 [3:48:55<03:30, 2.37it/s]\u001b[A\n 98%|█████████▊| 26188/26685 [3:48:55<03:15, 2.54it/s]\u001b[A\n 98%|█████████▊| 26189/26685 [3:48:56<03:56, 2.10it/s]\u001b[A\n 98%|█████████▊| 26190/26685 [3:48:56<03:43, 2.21it/s]\u001b[A\n 98%|█████████▊| 26191/26685 [3:48:56<03:31, 2.33it/s]\u001b[A\n 98%|█████████▊| 26192/26685 [3:48:57<03:28, 2.36it/s]\u001b[A\n 98%|█████████▊| 26193/26685 [3:48:57<03:07, 2.62it/s]\u001b[A\n 98%|█████████▊| 26194/26685 [3:48:58<04:11, 1.95it/s]\u001b[A\n 98%|█████████▊| 26195/26685 [3:48:59<04:55, 1.66it/s]\u001b[A\n 98%|█████████▊| 26196/26685 [3:48:59<04:23, 1.86it/s]\u001b[A\n 98%|█████████▊| 26197/26685 [3:49:00<04:05, 1.99it/s]\u001b[A\n 98%|█████████▊| 26198/26685 [3:49:00<03:42, 2.19it/s]\u001b[A\n 98%|█████████▊| 26199/26685 [3:49:00<03:19, 2.43it/s]\u001b[A\n 98%|█████████▊| 26200/26685 [3:49:01<04:22, 1.85it/s]\u001b[A\n 98%|█████████▊| 26201/26685 [3:49:02<05:00, 1.61it/s]\u001b[A\n 98%|█████████▊| 26202/26685 [3:49:02<04:21, 1.85it/s]\u001b[A\n 98%|█████████▊| 26203/26685 [3:49:03<03:53, 2.06it/s]\u001b[A\n 98%|█████████▊| 26204/26685 [3:49:04<05:34, 1.44it/s]\u001b[A\n 98%|█████████▊| 26205/26685 [3:49:04<04:58, 1.61it/s]\u001b[A\n 98%|█████████▊| 26206/26685 [3:49:05<04:39, 1.72it/s]\u001b[A\n 98%|█████████▊| 26207/26685 [3:49:05<03:55, 2.03it/s]\u001b[A\n 98%|█████████▊| 26208/26685 [3:49:05<03:29, 2.28it/s]\u001b[A\n 98%|█████████▊| 26209/26685 [3:49:06<04:23, 1.81it/s]\u001b[A\n 98%|█████████▊| 26210/26685 [3:49:06<03:41, 2.14it/s]\u001b[A\n 98%|█████████▊| 26211/26685 [3:49:07<03:43, 2.12it/s]\u001b[A\n 98%|█████████▊| 26212/26685 [3:49:07<03:57, 1.99it/s]\u001b[A\n 98%|█████████▊| 26213/26685 [3:49:08<03:35, 2.19it/s]\u001b[A\n 98%|█████████▊| 26214/26685 [3:49:08<03:15, 2.41it/s]\u001b[A\n 98%|█████████▊| 26215/26685 [3:49:09<03:21, 2.34it/s]\u001b[A\n 98%|█████████▊| 26216/26685 [3:49:09<03:55, 1.99it/s]\u001b[A\n 98%|█████████▊| 26217/26685 [3:49:10<03:57, 1.97it/s]\u001b[A\n 98%|█████████▊| 26218/26685 [3:49:10<03:30, 2.22it/s]\u001b[A\n 98%|█████████▊| 26219/26685 [3:49:10<03:27, 2.24it/s]\u001b[A\n 98%|█████████▊| 26220/26685 [3:49:11<03:31, 2.19it/s]\u001b[A\n 98%|█████████▊| 26221/26685 [3:49:11<03:08, 2.47it/s]\u001b[A\n 98%|█████████▊| 26222/26685 [3:49:12<04:01, 1.92it/s]\u001b[A\n 98%|█████████▊| 26223/26685 [3:49:12<03:31, 2.18it/s]\u001b[A\n 98%|█████████▊| 26224/26685 [3:49:13<03:31, 2.18it/s]\u001b[A\n 98%|█████████▊| 26225/26685 [3:49:13<03:10, 2.41it/s]\u001b[A\n 98%|█████████▊| 26226/26685 [3:49:13<02:54, 2.64it/s]\u001b[A\n 98%|█████████▊| 26227/26685 [3:49:14<02:44, 2.79it/s]\u001b[A\n 98%|█████████▊| 26228/26685 [3:49:14<02:51, 2.66it/s]\u001b[A\n 98%|█████████▊| 26229/26685 [3:49:14<02:42, 2.81it/s]\u001b[A\n 98%|█████████▊| 26230/26685 [3:49:15<03:17, 2.30it/s]\u001b[A\n 98%|█████████▊| 26231/26685 [3:49:15<03:11, 2.37it/s]\u001b[A\n 98%|█████████▊| 26232/26685 [3:49:16<02:55, 2.58it/s]\u001b[A\n 98%|█████████▊| 26233/26685 [3:49:16<02:48, 2.68it/s]\u001b[A\n 98%|█████████▊| 26234/26685 [3:49:16<02:40, 2.81it/s]\u001b[A\n 98%|█████████▊| 26235/26685 [3:49:17<02:28, 3.03it/s]\u001b[A\n 98%|█████████▊| 26236/26685 [3:49:17<02:27, 3.04it/s]\u001b[A\n 98%|█████████▊| 26237/26685 [3:49:18<02:55, 2.55it/s]\u001b[A\n 98%|█████████▊| 26238/26685 [3:49:18<03:35, 2.08it/s]\u001b[A\n 98%|█████████▊| 26239/26685 [3:49:19<03:29, 2.13it/s]\u001b[A\n 98%|█████████▊| 26240/26685 [3:49:19<03:26, 2.16it/s]\u001b[A\n 98%|█████████▊| 26241/26685 [3:49:20<03:25, 2.16it/s]\u001b[A\n 98%|█████████▊| 26242/26685 [3:49:20<03:26, 2.14it/s]\u001b[A\n 98%|█████████▊| 26243/26685 [3:49:20<03:05, 2.38it/s]\u001b[A\n 98%|█████████▊| 26244/26685 [3:49:21<02:52, 2.56it/s]\u001b[A\n 98%|█████████▊| 26245/26685 [3:49:21<03:29, 2.10it/s]\u001b[A\n 98%|█████████▊| 26246/26685 [3:49:22<03:20, 2.19it/s]\u001b[A\n 98%|█████████▊| 26247/26685 [3:49:22<02:56, 2.48it/s]\u001b[A\n 98%|█████████▊| 26248/26685 [3:49:23<03:11, 2.28it/s]\u001b[A\n 98%|█████████▊| 26249/26685 [3:49:23<03:05, 2.34it/s]\u001b[A\n 98%|█████████▊| 26250/26685 [3:49:23<02:59, 2.42it/s]\u001b[A\n 98%|█████████▊| 26251/26685 [3:49:24<03:17, 2.20it/s]\u001b[A\n 98%|█████████▊| 26252/26685 [3:49:24<03:09, 2.29it/s]\u001b[A\n 98%|█████████▊| 26253/26685 [3:49:25<03:02, 2.36it/s]\u001b[A\n 98%|█████████▊| 26254/26685 [3:49:25<02:38, 2.71it/s]\u001b[A\n 98%|█████████▊| 26255/26685 [3:49:26<03:39, 1.96it/s]\u001b[A\n 98%|█████████▊| 26256/26685 [3:49:26<03:20, 2.14it/s]\u001b[A\n 98%|█████████▊| 26257/26685 [3:49:27<03:15, 2.19it/s]\u001b[A\n 98%|█████████▊| 26258/26685 [3:49:27<03:30, 2.03it/s]\u001b[A\n 98%|█████████▊| 26259/26685 [3:49:28<03:46, 1.88it/s]\u001b[A\n 98%|█████████▊| 26260/26685 [3:49:29<04:17, 1.65it/s]\u001b[A\n 98%|█████████▊| 26261/26685 [3:49:29<03:42, 1.91it/s]\u001b[A\n 98%|█████████▊| 26262/26685 [3:49:29<03:09, 2.23it/s]\u001b[A\n 98%|█████████▊| 26263/26685 [3:49:29<02:48, 2.50it/s]\u001b[A\n 98%|█████████▊| 26264/26685 [3:49:30<02:47, 2.52it/s]\u001b[A\n 98%|█████████▊| 26265/26685 [3:49:30<02:39, 2.63it/s]\u001b[A\n 98%|█████████▊| 26266/26685 [3:49:30<02:29, 2.80it/s]\u001b[A\n 98%|█████████▊| 26267/26685 [3:49:31<02:35, 2.68it/s]\u001b[A\n 98%|█████████▊| 26268/26685 [3:49:31<02:53, 2.41it/s]\u001b[A\n 98%|█████████▊| 26269/26685 [3:49:32<02:52, 2.41it/s]\u001b[A\n 98%|█████████▊| 26270/26685 [3:49:32<03:00, 2.30it/s]\u001b[A\n 98%|█████████▊| 26271/26685 [3:49:33<02:42, 2.54it/s]\u001b[A\n 98%|█████████▊| 26272/26685 [3:49:33<02:35, 2.66it/s]\u001b[A\n 98%|█████████▊| 26273/26685 [3:49:33<02:44, 2.51it/s]\u001b[A\n 98%|█████████▊| 26274/26685 [3:49:34<02:30, 2.73it/s]\u001b[A\n 98%|█████████▊| 26275/26685 [3:49:35<03:33, 1.92it/s]\u001b[A\n 98%|█████████▊| 26276/26685 [3:49:35<03:02, 2.24it/s]\u001b[A\n 98%|█████████▊| 26277/26685 [3:49:35<02:39, 2.55it/s]\u001b[A\n 98%|█████████▊| 26278/26685 [3:49:35<02:28, 2.74it/s]\u001b[A\n 98%|█████████▊| 26279/26685 [3:49:36<02:17, 2.96it/s]\u001b[A\n 98%|█████████▊| 26280/26685 [3:49:36<02:43, 2.48it/s]\u001b[A\n 98%|█████████▊| 26281/26685 [3:49:37<03:15, 2.06it/s]\u001b[A\n 98%|█████████▊| 26282/26685 [3:49:37<02:51, 2.35it/s]\u001b[A\n 98%|█████████▊| 26283/26685 [3:49:38<03:02, 2.20it/s]\u001b[A\n 98%|█████████▊| 26284/26685 [3:49:38<02:43, 2.45it/s]\u001b[A\n 99%|█████████▊| 26285/26685 [3:49:39<03:00, 2.22it/s]\u001b[A\n 99%|█████████▊| 26286/26685 [3:49:39<03:23, 1.96it/s]\u001b[A\n 99%|█████████▊| 26287/26685 [3:49:40<02:56, 2.26it/s]\u001b[A\n 99%|█████████▊| 26288/26685 [3:49:40<03:16, 2.02it/s]\u001b[A\n 99%|█████████▊| 26289/26685 [3:49:40<02:49, 2.34it/s]\u001b[A\n 99%|█████████▊| 26290/26685 [3:49:41<02:50, 2.32it/s]\u001b[A\n 99%|█████████▊| 26291/26685 [3:49:41<02:41, 2.44it/s]\u001b[A\n 99%|█████████▊| 26292/26685 [3:49:42<03:40, 1.78it/s]\u001b[A\n 99%|█████████▊| 26293/26685 [3:49:42<03:19, 1.97it/s]\u001b[A\n 99%|█████████▊| 26294/26685 [3:49:44<04:48, 1.36it/s]\u001b[A\n 99%|█████████▊| 26295/26685 [3:49:44<04:34, 1.42it/s]\u001b[A\n 99%|█████████▊| 26296/26685 [3:49:45<03:49, 1.70it/s]\u001b[A\n 99%|█████████▊| 26297/26685 [3:49:45<03:17, 1.97it/s]\u001b[A\n 99%|█████████▊| 26298/26685 [3:49:46<03:51, 1.67it/s]\u001b[A\n 99%|█████████▊| 26299/26685 [3:49:46<03:31, 1.82it/s]\u001b[A\n 99%|█████████▊| 26300/26685 [3:49:47<03:01, 2.12it/s]\u001b[A\n 99%|█████████▊| 26301/26685 [3:49:47<02:54, 2.20it/s]\u001b[A\n 99%|█████████▊| 26302/26685 [3:49:47<02:34, 2.48it/s]\u001b[A\n 99%|█████████▊| 26303/26685 [3:49:48<02:16, 2.80it/s]\u001b[A\n 99%|█████████▊| 26304/26685 [3:49:48<02:20, 2.71it/s]\u001b[A\n 99%|█████████▊| 26305/26685 [3:49:48<02:13, 2.84it/s]\u001b[A\n 99%|█████████▊| 26306/26685 [3:49:49<02:27, 2.57it/s]\u001b[A\n 99%|█████████▊| 26307/26685 [3:49:49<02:12, 2.86it/s]\u001b[A\n 99%|█████████▊| 26308/26685 [3:49:50<03:09, 1.99it/s]\u001b[A\n 99%|█████████▊| 26309/26685 [3:49:50<02:49, 2.22it/s]\u001b[A\n 99%|█████████▊| 26310/26685 [3:49:51<02:38, 2.37it/s]\u001b[A\n 99%|█████████▊| 26311/26685 [3:49:51<02:28, 2.53it/s]\u001b[A\n 99%|█████████▊| 26312/26685 [3:49:51<02:33, 2.43it/s]\u001b[A\n 99%|█████████▊| 26313/26685 [3:49:52<02:26, 2.54it/s]\u001b[A\n 99%|█████████▊| 26314/26685 [3:49:52<03:15, 1.90it/s]\u001b[A\n 99%|█████████▊| 26315/26685 [3:49:53<03:48, 1.62it/s]\u001b[A\n 99%|█████████▊| 26316/26685 [3:49:54<04:11, 1.47it/s]\u001b[A\n 99%|█████████▊| 26317/26685 [3:49:54<03:20, 1.83it/s]\u001b[A\n 99%|█████████▊| 26318/26685 [3:49:56<04:43, 1.29it/s]\u001b[A\n 99%|█████████▊| 26319/26685 [3:49:56<03:48, 1.60it/s]\u001b[A\n 99%|█████████▊| 26320/26685 [3:49:56<03:31, 1.73it/s]\u001b[A\n 99%|█████████▊| 26321/26685 [3:49:57<03:05, 1.96it/s]\u001b[A\n 99%|█████████▊| 26322/26685 [3:49:57<02:58, 2.04it/s]\u001b[A\n 99%|█████████▊| 26323/26685 [3:49:57<02:33, 2.36it/s]\u001b[A\n 99%|█████████▊| 26324/26685 [3:49:58<02:25, 2.49it/s]\u001b[A\n 99%|█████████▊| 26325/26685 [3:49:58<02:21, 2.54it/s]\u001b[A\n 99%|█████████▊| 26326/26685 [3:49:59<02:27, 2.44it/s]\u001b[A\n 99%|█████████▊| 26327/26685 [3:49:59<02:29, 2.40it/s]\u001b[A\n 99%|█████████▊| 26328/26685 [3:50:00<02:35, 2.30it/s]\u001b[A\n 99%|█████████▊| 26329/26685 [3:50:00<02:19, 2.55it/s]\u001b[A\n 99%|█████████▊| 26330/26685 [3:50:00<02:03, 2.86it/s]\u001b[A\n 99%|█████████▊| 26331/26685 [3:50:01<02:45, 2.14it/s]\u001b[A\n 99%|█████████▊| 26332/26685 [3:50:02<03:51, 1.53it/s]\u001b[A\n 99%|█████████▊| 26333/26685 [3:50:02<03:19, 1.77it/s]\u001b[A\n 99%|█████████▊| 26334/26685 [3:50:03<03:18, 1.77it/s]\u001b[A\n 99%|█████████▊| 26335/26685 [3:50:03<02:42, 2.15it/s]\u001b[A\n 99%|█████████▊| 26336/26685 [3:50:04<03:03, 1.90it/s]\u001b[A\n 99%|█████████▊| 26337/26685 [3:50:04<02:42, 2.15it/s]\u001b[A\n 99%|█████████▊| 26338/26685 [3:50:04<02:25, 2.39it/s]\u001b[A\n 99%|█████████▊| 26339/26685 [3:50:05<02:18, 2.49it/s]\u001b[A\n 99%|█████████▊| 26340/26685 [3:50:05<02:29, 2.31it/s]\u001b[A\n 99%|█████████▊| 26341/26685 [3:50:06<02:21, 2.43it/s]\u001b[A\n 99%|█████████▊| 26342/26685 [3:50:06<02:11, 2.61it/s]\u001b[A\n 99%|█████████▊| 26343/26685 [3:50:06<01:59, 2.87it/s]\u001b[A\n 99%|█████████▊| 26344/26685 [3:50:07<02:03, 2.77it/s]\u001b[A\n 99%|█████████▊| 26345/26685 [3:50:07<01:58, 2.86it/s]\u001b[A\n 99%|█████████▊| 26346/26685 [3:50:07<02:02, 2.76it/s]\u001b[A\n 99%|█████████▊| 26347/26685 [3:50:08<02:24, 2.34it/s]\u001b[A\n 99%|█████████▊| 26348/26685 [3:50:08<02:17, 2.45it/s]\u001b[A\n 99%|█████████▊| 26349/26685 [3:50:09<02:07, 2.63it/s]\u001b[A\n 99%|█████████▊| 26350/26685 [3:50:09<02:20, 2.39it/s]\u001b[A\n 99%|█████████▊| 26351/26685 [3:50:09<02:05, 2.67it/s]\u001b[A\n 99%|█████████▉| 26352/26685 [3:50:10<02:49, 1.96it/s]\u001b[A\n 99%|█████████▉| 26353/26685 [3:50:11<02:53, 1.91it/s]\u001b[A\n 99%|█████████▉| 26354/26685 [3:50:11<02:28, 2.24it/s]\u001b[A\n 99%|█████████▉| 26355/26685 [3:50:11<02:30, 2.19it/s]\u001b[A\n 99%|█████████▉| 26356/26685 [3:50:12<02:11, 2.50it/s]\u001b[A\n 99%|█████████▉| 26357/26685 [3:50:12<02:05, 2.62it/s]\u001b[A\n 99%|█████████▉| 26358/26685 [3:50:12<02:06, 2.59it/s]\u001b[A\n 99%|█████████▉| 26359/26685 [3:50:13<02:03, 2.64it/s]\u001b[A\n 99%|█████████▉| 26360/26685 [3:50:13<02:12, 2.46it/s]\u001b[A\n 99%|█████████▉| 26361/26685 [3:50:14<02:01, 2.67it/s]\u001b[A\n 99%|█████████▉| 26362/26685 [3:50:14<02:43, 1.97it/s]\u001b[A\n 99%|█████████▉| 26363/26685 [3:50:15<02:15, 2.38it/s]\u001b[A\n 99%|█████████▉| 26364/26685 [3:50:15<02:08, 2.50it/s]\u001b[A\n 99%|█████████▉| 26365/26685 [3:50:16<02:22, 2.25it/s]\u001b[A\n 99%|█████████▉| 26366/26685 [3:50:16<02:24, 2.21it/s]\u001b[A\n 99%|█████████▉| 26367/26685 [3:50:16<02:05, 2.52it/s]\u001b[A\n 99%|█████████▉| 26368/26685 [3:50:17<02:01, 2.62it/s]\u001b[A\n 99%|█████████▉| 26369/26685 [3:50:17<02:08, 2.46it/s]\u001b[A\n 99%|█████████▉| 26370/26685 [3:50:17<02:05, 2.52it/s]\u001b[A\n 99%|█████████▉| 26371/26685 [3:50:18<01:50, 2.84it/s]\u001b[A\n 99%|█████████▉| 26372/26685 [3:50:18<02:01, 2.58it/s]\u001b[A\n 99%|█████████▉| 26373/26685 [3:50:18<01:46, 2.92it/s]\u001b[A\n 99%|█████████▉| 26374/26685 [3:50:19<01:50, 2.83it/s]\u001b[A\n 99%|█████████▉| 26375/26685 [3:50:19<01:43, 2.99it/s]\u001b[A\n 99%|█████████▉| 26376/26685 [3:50:19<01:44, 2.97it/s]\u001b[A\n 99%|█████████▉| 26377/26685 [3:50:20<01:37, 3.15it/s]\u001b[A\n 99%|█████████▉| 26378/26685 [3:50:20<01:43, 2.96it/s]\u001b[A\n 99%|█████████▉| 26379/26685 [3:50:21<01:52, 2.72it/s]\u001b[A\n 99%|█████████▉| 26380/26685 [3:50:21<01:51, 2.73it/s]\u001b[A\n 99%|█████████▉| 26381/26685 [3:50:21<01:50, 2.75it/s]\u001b[A\n 99%|█████████▉| 26382/26685 [3:50:22<02:28, 2.04it/s]\u001b[A\n 99%|█████████▉| 26383/26685 [3:50:22<02:07, 2.38it/s]\u001b[A\n 99%|█████████▉| 26384/26685 [3:50:23<01:56, 2.59it/s]\u001b[A\n 99%|█████████▉| 26385/26685 [3:50:23<02:06, 2.38it/s]\u001b[A\n 99%|█████████▉| 26386/26685 [3:50:23<01:50, 2.71it/s]\u001b[A\n 99%|█████████▉| 26387/26685 [3:50:24<01:40, 2.97it/s]\u001b[A\n 99%|█████████▉| 26388/26685 [3:50:24<01:33, 3.18it/s]\u001b[A\n 99%|█████████▉| 26389/26685 [3:50:24<01:33, 3.18it/s]\u001b[A\n 99%|█████████▉| 26390/26685 [3:50:25<01:37, 3.01it/s]\u001b[A\n 99%|█████████▉| 26391/26685 [3:50:25<01:41, 2.89it/s]\u001b[A\n 99%|█████████▉| 26392/26685 [3:50:26<02:17, 2.13it/s]\u001b[A\n 99%|█████████▉| 26393/26685 [3:50:26<02:23, 2.04it/s]\u001b[A\n 99%|█████████▉| 26394/26685 [3:50:27<02:04, 2.34it/s]\u001b[A\n 99%|█████████▉| 26395/26685 [3:50:27<01:58, 2.44it/s]\u001b[A\n 99%|█████████▉| 26396/26685 [3:50:27<01:50, 2.62it/s]\u001b[A\n 99%|█████████▉| 26397/26685 [3:50:27<01:40, 2.86it/s]\u001b[A\n 99%|█████████▉| 26398/26685 [3:50:28<02:02, 2.35it/s]\u001b[A\n 99%|█████████▉| 26399/26685 [3:50:29<02:31, 1.89it/s]\u001b[A\n 99%|█████████▉| 26400/26685 [3:50:29<02:12, 2.15it/s]\u001b[A\n 99%|█████████▉| 26401/26685 [3:50:30<02:44, 1.73it/s]\u001b[A\n 99%|█████████▉| 26402/26685 [3:50:30<02:22, 1.98it/s]\u001b[A\n 99%|█████████▉| 26403/26685 [3:50:31<02:10, 2.16it/s]\u001b[A\n 99%|█████████▉| 26404/26685 [3:50:31<02:10, 2.15it/s]\u001b[A\n 99%|█████████▉| 26405/26685 [3:50:31<01:55, 2.42it/s]\u001b[A\n 99%|█████████▉| 26406/26685 [3:50:32<01:52, 2.48it/s]\u001b[A\n 99%|█████████▉| 26407/26685 [3:50:32<01:40, 2.76it/s]\u001b[A\n 99%|█████████▉| 26408/26685 [3:50:32<01:38, 2.80it/s]\u001b[A\n 99%|█████████▉| 26409/26685 [3:50:33<01:41, 2.71it/s]\u001b[A\n 99%|█████████▉| 26410/26685 [3:50:33<01:53, 2.42it/s]\u001b[A\n 99%|█████████▉| 26411/26685 [3:50:34<01:46, 2.57it/s]\u001b[A\n 99%|█████████▉| 26412/26685 [3:50:34<01:45, 2.58it/s]\u001b[A\n 99%|█████████▉| 26413/26685 [3:50:34<01:44, 2.60it/s]\u001b[A\n 99%|█████████▉| 26414/26685 [3:50:35<01:51, 2.43it/s]\u001b[A\n 99%|█████████▉| 26415/26685 [3:50:35<01:58, 2.28it/s]\u001b[A\n 99%|█████████▉| 26416/26685 [3:50:36<02:27, 1.83it/s]\u001b[A\n 99%|█████████▉| 26417/26685 [3:50:37<02:15, 1.98it/s]\u001b[A\n 99%|█████████▉| 26418/26685 [3:50:37<02:01, 2.20it/s]\u001b[A\n 99%|█████████▉| 26419/26685 [3:50:37<02:03, 2.16it/s]\u001b[A\n 99%|█████████▉| 26420/26685 [3:50:38<02:29, 1.78it/s]\u001b[A\n 99%|█████████▉| 26421/26685 [3:50:39<02:16, 1.94it/s]\u001b[A\n 99%|█████████▉| 26422/26685 [3:50:39<02:10, 2.01it/s]\u001b[A\n 99%|█████████▉| 26423/26685 [3:50:40<02:11, 1.99it/s]\u001b[A\n 99%|█████████▉| 26424/26685 [3:50:40<01:51, 2.33it/s]\u001b[A\n 99%|█████████▉| 26425/26685 [3:50:40<01:56, 2.24it/s]\u001b[A\n 99%|█████████▉| 26426/26685 [3:50:41<01:48, 2.38it/s]\u001b[A\n 99%|█████████▉| 26427/26685 [3:50:41<01:40, 2.57it/s]\u001b[A\n 99%|█████████▉| 26428/26685 [3:50:41<01:35, 2.70it/s]\u001b[A\n 99%|█████████▉| 26429/26685 [3:50:42<01:35, 2.67it/s]\u001b[A\n 99%|█████████▉| 26430/26685 [3:50:43<02:23, 1.77it/s]\u001b[A\n 99%|█████████▉| 26431/26685 [3:50:44<02:57, 1.43it/s]\u001b[A\n 99%|█████████▉| 26432/26685 [3:50:45<03:05, 1.36it/s]\u001b[A\n 99%|█████████▉| 26433/26685 [3:50:45<02:35, 1.62it/s]\u001b[A\n 99%|█████████▉| 26434/26685 [3:50:45<02:09, 1.95it/s]\u001b[A\n 99%|█████████▉| 26435/26685 [3:50:46<01:52, 2.21it/s]\u001b[A\n 99%|█████████▉| 26436/26685 [3:50:46<01:51, 2.23it/s]\u001b[A\n 99%|█████████▉| 26437/26685 [3:50:46<01:41, 2.43it/s]\u001b[A\n 99%|█████████▉| 26438/26685 [3:50:47<01:35, 2.57it/s]\u001b[A\n 99%|█████████▉| 26439/26685 [3:50:47<01:46, 2.32it/s]\u001b[A\n 99%|█████████▉| 26440/26685 [3:50:47<01:29, 2.74it/s]\u001b[A\n 99%|█████████▉| 26441/26685 [3:50:48<01:21, 3.00it/s]\u001b[A\n 99%|█████████▉| 26442/26685 [3:50:48<01:36, 2.52it/s]\u001b[A\n 99%|█████████▉| 26443/26685 [3:50:48<01:28, 2.74it/s]\u001b[A\n 99%|█████████▉| 26444/26685 [3:50:49<01:32, 2.61it/s]\u001b[A\n 99%|█████████▉| 26445/26685 [3:50:49<01:22, 2.89it/s]\u001b[A\n 99%|█████████▉| 26446/26685 [3:50:50<02:09, 1.85it/s]\u001b[A\n 99%|█████████▉| 26447/26685 [3:50:50<01:48, 2.20it/s]\u001b[A\n 99%|█████████▉| 26448/26685 [3:50:51<01:35, 2.49it/s]\u001b[A\n 99%|█████████▉| 26449/26685 [3:50:51<01:49, 2.15it/s]\u001b[A\n 99%|█████████▉| 26450/26685 [3:50:52<01:37, 2.42it/s]\u001b[A\n 99%|█████████▉| 26451/26685 [3:50:52<01:36, 2.41it/s]\u001b[A\n 99%|█████████▉| 26452/26685 [3:50:52<01:35, 2.45it/s]\u001b[A\n 99%|█████████▉| 26453/26685 [3:50:53<02:00, 1.92it/s]\u001b[A\n 99%|█████████▉| 26454/26685 [3:50:54<01:49, 2.10it/s]\u001b[A\n 99%|█████████▉| 26455/26685 [3:50:54<02:12, 1.73it/s]\u001b[A\n 99%|█████████▉| 26456/26685 [3:50:55<01:55, 1.98it/s]\u001b[A\n 99%|█████████▉| 26457/26685 [3:50:55<01:44, 2.17it/s]\u001b[A\n 99%|█████████▉| 26458/26685 [3:50:55<01:32, 2.46it/s]\u001b[A\n 99%|█████████▉| 26459/26685 [3:50:56<01:29, 2.53it/s]\u001b[A\n 99%|█████████▉| 26460/26685 [3:50:58<03:22, 1.11it/s]\u001b[A\n 99%|█████████▉| 26461/26685 [3:50:59<03:13, 1.16it/s]\u001b[A\n 99%|█████████▉| 26462/26685 [3:50:59<02:54, 1.27it/s]\u001b[A\n 99%|█████████▉| 26463/26685 [3:51:00<02:55, 1.26it/s]\u001b[A\n 99%|█████████▉| 26464/26685 [3:51:00<02:27, 1.50it/s]\u001b[A\n 99%|█████████▉| 26465/26685 [3:51:01<01:59, 1.85it/s]\u001b[A\n 99%|█████████▉| 26466/26685 [3:51:01<01:53, 1.93it/s]\u001b[A\n 99%|█████████▉| 26467/26685 [3:51:01<01:39, 2.18it/s]\u001b[A\n 99%|█████████▉| 26468/26685 [3:51:02<01:31, 2.38it/s]\u001b[A\n 99%|█████████▉| 26469/26685 [3:51:02<01:26, 2.51it/s]\u001b[A\n 99%|█████████▉| 26470/26685 [3:51:02<01:25, 2.51it/s]\u001b[A\n 99%|█████████▉| 26471/26685 [3:51:03<01:20, 2.67it/s]\u001b[A\n 99%|█████████▉| 26472/26685 [3:51:03<01:33, 2.28it/s]\u001b[A\n 99%|█████████▉| 26473/26685 [3:51:04<01:31, 2.32it/s]\u001b[A\n 99%|█████████▉| 26474/26685 [3:51:04<01:22, 2.55it/s]\u001b[A\n 99%|█████████▉| 26475/26685 [3:51:04<01:22, 2.55it/s]\u001b[A\n 99%|█████████▉| 26476/26685 [3:51:05<01:23, 2.50it/s]\u001b[A\n 99%|█████████▉| 26477/26685 [3:51:05<01:22, 2.53it/s]\u001b[A\n 99%|█████████▉| 26478/26685 [3:51:06<01:26, 2.40it/s]\u001b[A\n 99%|█████████▉| 26479/26685 [3:51:06<01:27, 2.35it/s]\u001b[A\n 99%|█████████▉| 26480/26685 [3:51:07<01:20, 2.54it/s]\u001b[A\n 99%|█████████▉| 26481/26685 [3:51:07<01:32, 2.21it/s]\u001b[A\n 99%|█████████▉| 26482/26685 [3:51:08<01:49, 1.86it/s]\u001b[A\n 99%|█████████▉| 26483/26685 [3:51:08<01:40, 2.02it/s]\u001b[A\n 99%|█████████▉| 26484/26685 [3:51:09<01:37, 2.06it/s]\u001b[A\n 99%|█████████▉| 26485/26685 [3:51:09<01:23, 2.41it/s]\u001b[A\n 99%|█████████▉| 26486/26685 [3:51:09<01:20, 2.47it/s]\u001b[A\n 99%|█████████▉| 26487/26685 [3:51:10<01:16, 2.60it/s]\u001b[A\n 99%|█████████▉| 26488/26685 [3:51:10<01:12, 2.73it/s]\u001b[A\n 99%|█████████▉| 26489/26685 [3:51:10<01:13, 2.68it/s]\u001b[A\n 99%|█████████▉| 26490/26685 [3:51:11<01:18, 2.49it/s]\u001b[A\n 99%|█████████▉| 26491/26685 [3:51:11<01:16, 2.52it/s]\u001b[A\n 99%|█████████▉| 26492/26685 [3:51:11<01:07, 2.85it/s]\u001b[A\n 99%|█████████▉| 26493/26685 [3:51:12<01:06, 2.87it/s]\u001b[A\n 99%|█████████▉| 26494/26685 [3:51:13<01:33, 2.05it/s]\u001b[A\n 99%|█████████▉| 26495/26685 [3:51:13<01:20, 2.36it/s]\u001b[A\n 99%|█████████▉| 26496/26685 [3:51:13<01:18, 2.41it/s]\u001b[A\n 99%|█████████▉| 26497/26685 [3:51:14<01:12, 2.58it/s]\u001b[A\n 99%|█████████▉| 26498/26685 [3:51:14<01:18, 2.39it/s]\u001b[A\n 99%|█████████▉| 26499/26685 [3:51:14<01:14, 2.51it/s]\u001b[A\n 99%|█████████▉| 26500/26685 [3:51:15<01:14, 2.47it/s]\u001b[A\n 99%|█████████▉| 26501/26685 [3:51:15<01:09, 2.64it/s]\u001b[A\n 99%|█████████▉| 26502/26685 [3:51:16<01:14, 2.44it/s]\u001b[A\n 99%|█████████▉| 26503/26685 [3:51:17<01:37, 1.87it/s]\u001b[A\n 99%|█████████▉| 26504/26685 [3:51:17<01:26, 2.09it/s]\u001b[A\n 99%|█████████▉| 26505/26685 [3:51:17<01:18, 2.28it/s]\u001b[A\n 99%|█████████▉| 26506/26685 [3:51:17<01:10, 2.53it/s]\u001b[A\n 99%|█████████▉| 26507/26685 [3:51:18<01:10, 2.52it/s]\u001b[A\n 99%|█████████▉| 26508/26685 [3:51:18<01:15, 2.34it/s]\u001b[A\n 99%|█████████▉| 26509/26685 [3:51:19<01:12, 2.42it/s]\u001b[A\n 99%|█████████▉| 26510/26685 [3:51:19<01:04, 2.70it/s]\u001b[A\n 99%|█████████▉| 26511/26685 [3:51:19<01:07, 2.56it/s]\u001b[A\n 99%|█████████▉| 26512/26685 [3:51:20<01:08, 2.53it/s]\u001b[A\n 99%|█████████▉| 26513/26685 [3:51:20<01:05, 2.61it/s]\u001b[A\n 99%|█████████▉| 26514/26685 [3:51:21<01:07, 2.54it/s]\u001b[A\n 99%|█████████▉| 26515/26685 [3:51:21<01:01, 2.75it/s]\u001b[A\n 99%|█████████▉| 26516/26685 [3:51:21<01:00, 2.77it/s]\u001b[A\n 99%|█████████▉| 26517/26685 [3:51:22<00:53, 3.13it/s]\u001b[A\n 99%|█████████▉| 26518/26685 [3:51:22<01:05, 2.56it/s]\u001b[A\n 99%|█████████▉| 26519/26685 [3:51:22<01:04, 2.59it/s]\u001b[A\n 99%|█████████▉| 26520/26685 [3:51:23<01:06, 2.50it/s]\u001b[A" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code" ] ]
cb5752cf9773b99f8b940f07de17aeecbd220cb2
13,501
ipynb
Jupyter Notebook
tutorials/02a_graph_from_edge_list.ipynb
Barbany/NTDS_course
6002c03d8636179ffdf57e5f1bbe9d45ba454af4
[ "MIT" ]
68
2018-07-12T13:49:47.000Z
2022-03-09T14:12:29.000Z
tutorials/02a_graph_from_edge_list.ipynb
Barbany/NTDS_course
6002c03d8636179ffdf57e5f1bbe9d45ba454af4
[ "MIT" ]
1
2018-10-30T12:58:42.000Z
2018-10-30T12:58:42.000Z
tutorials/02a_graph_from_edge_list.ipynb
Barbany/NTDS_course
6002c03d8636179ffdf57e5f1bbe9d45ba454af4
[ "MIT" ]
48
2018-07-26T04:01:46.000Z
2021-11-19T10:19:32.000Z
22.060458
304
0.550478
[ [ [ "# [NTDS'18] tutorial 2: build a graph from an edge list\n[ntds'18]: https://github.com/mdeff/ntds_2018\n\n[Benjamin Ricaud](https://people.epfl.ch/benjamin.ricaud), [EPFL LTS2](https://lts2.epfl.ch)\n\n* Dataset: [Open Tree of Life](https://tree.opentreeoflife.org)\n* Tools: [pandas](https://pandas.pydata.org), [numpy](http://www.numpy.org), [networkx](https://networkx.github.io), [gephi](https://gephi.org/)", "_____no_output_____" ], [ "## Tools", "_____no_output_____" ], [ "The below line is a [magic command](https://ipython.readthedocs.io/en/stable/interactive/magics.html) that allows plots to appear in the notebook.", "_____no_output_____" ] ], [ [ "%matplotlib inline", "_____no_output_____" ] ], [ [ "The first thing is always to import the packages we'll use.", "_____no_output_____" ] ], [ [ "import pandas as pd\nimport numpy as np\nimport networkx as nx", "_____no_output_____" ] ], [ [ "Tutorials on pandas can be found at:\n* <https://pandas.pydata.org/pandas-docs/stable/10min.html>\n* <https://pandas.pydata.org/pandas-docs/stable/tutorials.html>\n\nTutorials on numpy can be found at:\n* <https://docs.scipy.org/doc/numpy/user/quickstart.html>\n* <http://www.scipy-lectures.org/intro/numpy/index.html>\n* <http://www.scipy-lectures.org/advanced/advanced_numpy/index.html>\n\nA tutorial on networkx can be found at:\n* <https://networkx.github.io/documentation/stable/tutorial.html>", "_____no_output_____" ], [ "## Import the data\n\nWe will play with a excerpt of the Tree of Life, that can be found together with this notebook. This dataset is reduced to the first 1000 taxons (starting from the root node). The full version is available here: [Open Tree of Life](https://tree.opentreeoflife.org/about/taxonomy-version/ott3.0).\n\n![Public domain, https://en.wikipedia.org/wiki/File:Phylogenetic_tree.svg](figures/phylogenetic_tree.png)\n![Public Domain, https://commons.wikimedia.org/w/index.php?curid=3633804](figures/tree_of_life.png)", "_____no_output_____" ] ], [ [ "tree_of_life = pd.read_csv('data/taxonomy_small.tsv', sep='\\t\\|\\t?', encoding='utf-8', engine='python')", "_____no_output_____" ] ], [ [ "If you do not remember the details of a function:", "_____no_output_____" ] ], [ [ "pd.read_csv?", "_____no_output_____" ] ], [ [ "For more info on the separator, see [regex](https://docs.python.org/3.6/library/re.html).", "_____no_output_____" ], [ "Now, what is the object `tree_of_life`? It is a Pandas DataFrame.", "_____no_output_____" ] ], [ [ "tree_of_life", "_____no_output_____" ] ], [ [ "The description of the entries is given here:\nhttps://github.com/OpenTreeOfLife/reference-taxonomy/wiki/Interim-taxonomy-file-format", "_____no_output_____" ], [ "## Explore the table", "_____no_output_____" ] ], [ [ "tree_of_life.columns", "_____no_output_____" ] ], [ [ "Let us drop some columns.", "_____no_output_____" ] ], [ [ "tree_of_life = tree_of_life.drop(columns=['sourceinfo', 'uniqname', 'flags','Unnamed: 7'])", "_____no_output_____" ], [ "tree_of_life.head()", "_____no_output_____" ] ], [ [ "Pandas infered the type of values inside each column (int, float, string and string). The parent_uid column has float values because there was a missing value, converted to `NaN`", "_____no_output_____" ] ], [ [ "print(tree_of_life['uid'].dtype, tree_of_life.parent_uid.dtype)", "_____no_output_____" ] ], [ [ "How to access individual values.", "_____no_output_____" ] ], [ [ "tree_of_life.iloc[0, 2]", "_____no_output_____" ], [ "tree_of_life.loc[0, 'name']", "_____no_output_____" ] ], [ [ "**Exercise**: Guess the output of the below line.", "_____no_output_____" ] ], [ [ "# tree_of_life.uid[0] == tree_of_life.parent_uid[1]", "_____no_output_____" ] ], [ [ "Ordering the data.", "_____no_output_____" ] ], [ [ "tree_of_life.sort_values(by='name').head()", "_____no_output_____" ] ], [ [ "## Operation on the columns", "_____no_output_____" ], [ "Unique values, useful for categories:", "_____no_output_____" ] ], [ [ "tree_of_life['rank'].unique()", "_____no_output_____" ] ], [ [ "Selecting only one category.", "_____no_output_____" ] ], [ [ "tree_of_life[tree_of_life['rank'] == 'species'].head()", "_____no_output_____" ] ], [ [ "How many species do we have?", "_____no_output_____" ] ], [ [ "len(tree_of_life[tree_of_life['rank'] == 'species'])", "_____no_output_____" ], [ "tree_of_life['rank'].value_counts()", "_____no_output_____" ] ], [ [ "## Building the graph", "_____no_output_____" ], [ "Let us build the adjacency matrix of the graph. For that we need to reorganize the data. First we separate the nodes and their properties from the edges.", "_____no_output_____" ] ], [ [ "nodes = tree_of_life[['uid', 'name','rank']]\nedges = tree_of_life[['uid', 'parent_uid']]", "_____no_output_____" ] ], [ [ "When using an adjacency matrix, nodes are indexed by their row or column number and not by a `uid`. Let us create a new index for the nodes.", "_____no_output_____" ] ], [ [ "# Create a column for node index.\nnodes.reset_index(level=0, inplace=True)\nnodes = nodes.rename(columns={'index':'node_idx'})\nnodes.head()", "_____no_output_____" ], [ "# Create a conversion table from uid to node index.\nuid2idx = nodes[['node_idx', 'uid']]\nuid2idx = uid2idx.set_index('uid')\nuid2idx.head()", "_____no_output_____" ], [ "edges.head()", "_____no_output_____" ] ], [ [ "Now we are ready to use yet another powerful function of Pandas. Those familiar with SQL will recognize it: the `join` function.", "_____no_output_____" ] ], [ [ "# Add a new column, matching the uid with the node_idx.\nedges = edges.join(uid2idx, on='uid')", "_____no_output_____" ], [ "# Do the same with the parent_uid.\nedges = edges.join(uid2idx, on='parent_uid', rsuffix='_parent')", "_____no_output_____" ], [ "# Drop the uids.\nedges = edges.drop(columns=['uid','parent_uid'])", "_____no_output_____" ], [ "edges.head()", "_____no_output_____" ] ], [ [ "The above table is a list of edges connecting nodes and their parents.", "_____no_output_____" ], [ "## Building the (weighted) adjacency matrix\n\nWe will use numpy to build this matrix. Note that we don't have edge weights here, so our graph is going to be unweighted.", "_____no_output_____" ] ], [ [ "n_nodes = len(nodes)\nadjacency = np.zeros((n_nodes, n_nodes), dtype=int)", "_____no_output_____" ], [ "for idx, row in edges.iterrows():\n if np.isnan(row.node_idx_parent):\n continue\n i, j = int(row.node_idx), int(row.node_idx_parent)\n adjacency[i, j] = 1\n adjacency[j, i] = 1", "_____no_output_____" ], [ "adjacency[:15, :15]", "_____no_output_____" ] ], [ [ "Congratulations, you have built the adjacency matrix!", "_____no_output_____" ], [ "## Graph visualization\n\nTo conclude, let us visualize the graph. We will use the python module networkx.", "_____no_output_____" ] ], [ [ "# A simple command to create the graph from the adjacency matrix.\ngraph = nx.from_numpy_array(adjacency)", "_____no_output_____" ] ], [ [ "In addition, let us add some attributes to the nodes:", "_____no_output_____" ] ], [ [ "node_props = nodes.to_dict()", "_____no_output_____" ], [ "for key in node_props:\n # print(key, node_props[key])\n nx.set_node_attributes(graph, node_props[key], key)", "_____no_output_____" ] ], [ [ "Let us check if it is correctly recorded:", "_____no_output_____" ] ], [ [ "graph.node[1]", "_____no_output_____" ] ], [ [ "Draw the graph with two different [layout algorithms](https://en.wikipedia.org/wiki/Graph_drawing#Layout_methods).", "_____no_output_____" ] ], [ [ "nx.draw_spectral(graph)", "_____no_output_____" ], [ "nx.draw_spring(graph)", "_____no_output_____" ] ], [ [ "Save the graph to disk in the `gexf` format, readable by gephi and other tools that manipulate graphs. You may now explore the graph using gephi and compare the visualizations.", "_____no_output_____" ] ], [ [ "nx.write_gexf(graph, 'tree_of_life.gexf')", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ] ]
cb57676caacf918badbda0c6f053b2d2f8328d61
4,997
ipynb
Jupyter Notebook
.ipynb_checkpoints/14.6 秩和比综合评价法-checkpoint.ipynb
ZDDWLIG/math-model
ec64b7856c7bc43988b1b7e4e68e389cc9c9a8af
[ "MIT" ]
47
2020-12-02T02:06:56.000Z
2022-03-28T02:51:43.000Z
.ipynb_checkpoints/14.6 秩和比综合评价法-checkpoint.ipynb
RabbitWhite1/Mathematical_Modeling_in_Python
ec64b7856c7bc43988b1b7e4e68e389cc9c9a8af
[ "MIT" ]
null
null
null
.ipynb_checkpoints/14.6 秩和比综合评价法-checkpoint.ipynb
RabbitWhite1/Mathematical_Modeling_in_Python
ec64b7856c7bc43988b1b7e4e68e389cc9c9a8af
[ "MIT" ]
15
2021-01-02T15:38:51.000Z
2022-03-02T13:37:47.000Z
33.993197
95
0.473484
[ [ [ "# 秩和比综合评价法(Rank Sum Ratio, RSR)", "_____no_output_____" ] ], [ [ "# 编秩函数\ndef get_rank(data, columns, ascending=True, R=np.zeros(1)):\n if data.ndim == 1:\n tempdata = np.array(data)[:,None]\n if not R.any():\n R = np.zeros(tempdata.shape)\n for i in columns:\n arg = tempdata[:,i].argsort(axis=0)\n if not ascending:\n arg = arg[::-1]\n begin, end = 0, 0\n # 找从begin开始相同项, 用end标记最后一个相同项的下一个\n while begin < len(arg):\n while end < len(arg) and tempdata[arg[end]][i] == tempdata[arg[begin]][i]:\n end += 1\n for j in range(begin, end):\n R[arg[j]][i] = (begin + end + 1) / 2\n begin = end\n return R.reshape(data.shape)", "_____no_output_____" ], [ "# 例14.7\n\nimport numpy as np\nimport pandas as pd\nfrom scipy.stats import norm\nfrom sklearn import linear_model\n\ndata = np.loadtxt('14.D 医院工作质量统计指标.txt')\nw = data[-1]\ndata = data[:-1]\n\n# 编秩\nR = np.zeros(data.shape)\n# 效益型指标编秩\nR = get_rank(data, columns=[1,5], ascending=False, R=R)\n# 效益型指标编秩\nR = get_rank(data, columns=[0,2,3,4], ascending=True, R=R)\n\n# 求加权秩和比\nWRSR = np.matmul(R, w)/R.shape[0]\n\n# R_WRSR列名\nR_WRSR_columns = ['x' + str(i+1) for i in range(R.shape[1])] + ['WRSR']\n# R_WRSR行名\nR_WRSR_index = [str(i) for i in range(1983, 1993)]\n# 构建一个DataFrame. 课本表14.19\nR_WRSR = pd.DataFrame(np.c_[R, WRSR], columns=R_WRSR_columns, index=R_WRSR_index)\nR_WRSR_sorted = R_WRSR.sort_values(by='WRSR', ascending=True)\nprint(R_WRSR_sorted, '\\n')\n\n# f列名\nf_columns = ['f', 'cf', 'p', 'Probit', 'WRSRfit', '排序']\nf = pd.DataFrame(np.c_[np.ones((R_WRSR_sorted.shape[0], 1)), \n [i for i in range(1, 11)],\n np.zeros((R_WRSR_sorted.shape[0], len(f_columns)-2))],\n index=R_WRSR_sorted.index, columns=f_columns)\n# 计算累积频率\nf['p'] = f['cf'] / R.shape[0]\n# 对最后一个累积频率修正\nf['p'].iloc[-1] = 1 - 1/(4*R.shape[0])\n# 计算概率单位, 按正态分布\nf['Probit'] = norm.isf(1-f['p'], 0, 1)+5\n# 以Probit为自变量, RSR/WRSR为因变量, 计算回归方程\nreg = linear_model.LinearRegression()\nreg.fit(np.array(f['Probit']).reshape((10, 1)), R_WRSR_sorted['WRSR'])\n# 得到reg.coef_是系数, reg.intercept_是常数项. 以此计算f['WRSRfit']\nf['WRSRfit'] = f['Probit'] * reg.coef_[0] + reg.intercept_\nf['排序'] = np.array(f['WRSRfit'].argsort())[::-1] + 1\nprint(f)", " x1 x2 x3 x4 x5 x6 WRSR\n1984 8.0 2.0 4.5 6.0 5.0 2.0 0.35820\n1985 10.0 3.5 1.0 1.0 1.0 5.5 0.35975\n1983 6.0 1.0 8.0 7.5 9.0 5.5 0.45385\n1986 9.0 3.5 3.0 7.5 8.0 3.0 0.47070\n1988 4.0 5.0 4.5 2.0 2.0 10.0 0.50420\n1992 1.5 10.0 2.0 4.0 4.0 1.0 0.55345\n1989 5.0 7.0 7.0 3.0 3.0 9.0 0.63400\n1987 7.0 6.0 9.0 5.0 7.0 8.0 0.68050\n1991 3.0 8.0 6.0 9.0 10.0 5.5 0.71695\n1990 1.5 9.0 10.0 10.0 6.0 5.5 0.76840 \n\n f cf p Probit WRSRfit 排序\n1984 1.0 1.0 0.100 3.718448 0.337123 10\n1985 1.0 2.0 0.200 4.158379 0.400506 9\n1983 1.0 3.0 0.300 4.475599 0.446209 8\n1986 1.0 4.0 0.400 4.746653 0.485261 7\n1988 1.0 5.0 0.500 5.000000 0.521762 6\n1992 1.0 6.0 0.600 5.253347 0.558263 5\n1989 1.0 7.0 0.700 5.524401 0.597315 4\n1987 1.0 8.0 0.800 5.841621 0.643018 3\n1991 1.0 9.0 0.900 6.281552 0.706401 2\n1990 1.0 10.0 0.975 6.959964 0.804143 1\n" ] ] ]
[ "markdown", "code" ]
[ [ "markdown" ], [ "code", "code" ] ]
cb5767786662a716ee4dc69cd262ecffe3f2a90c
36,936
ipynb
Jupyter Notebook
missing/test_v1/edgeworth_cc.ipynb
ericschulman/nash
bc2421f703887a553eec2a4664607061df5b377a
[ "MIT" ]
null
null
null
missing/test_v1/edgeworth_cc.ipynb
ericschulman/nash
bc2421f703887a553eec2a4664607061df5b377a
[ "MIT" ]
null
null
null
missing/test_v1/edgeworth_cc.ipynb
ericschulman/nash
bc2421f703887a553eec2a4664607061df5b377a
[ "MIT" ]
null
null
null
104.04507
16,776
0.788499
[ [ [ "%load_ext autoreload\n%autoreload 2\n\nimport numpy as np\nimport scipy.stats as stats\nimport scipy.special\n#graphing\nimport matplotlib.pyplot as plt\n#stats\nimport statsmodels.api as sm\nfrom statsmodels.base.model import GenericLikelihoodModel\n\n#import testing\nimport sys\nsys.path.append(\"../\")\nimport vuong_plots", "_____no_output_____" ], [ "beta0 = 1.\nbeta1 = .25\n\ndef gen_data(beta0=beta0,beta1=beta1):\n nobs = 1000\n #parameters\n sigma = 1\n \n epsilon = stats.norm.rvs(loc=0,scale=sigma,size=nobs)\n #censor data below x<0?\n x = stats.norm.rvs(loc=5,scale=5,size=nobs)\n y = beta0+ beta1*x + epsilon\n \n #censor\n y[y<=0] = 0\n return y,x,nobs\n\n\nyn,xn,nobs = gen_data()\nprint(xn.shape)\nprint(sm.add_constant(xn).shape)\nprint(scipy.stats.mode(yn))", "(1000,)\n(1000, 2)\nModeResult(mode=array([0.]), count=array([87]))\n" ], [ "np.random.seed()\nyn,xn,nobs = gen_data()", "_____no_output_____" ], [ "class Tobit(GenericLikelihoodModel):\n \n def __init__(self, *args,cc=False,ols=False, **kwargs):\n super(Tobit,self).__init__(*args,**kwargs)\n self._set_extra_params_names(['var'])\n self.start_params = np.array([1]*(self.exog.shape[1]+1))\n self.cc = cc\n self.ols = ols\n #self.start_params = np.array( range(1, (2*self.exog.shape[1]+2)))\n #2 sets of params for z, 1 for x, 2 variances...\n \n def loglikeobs(self, params):\n y = self.endog\n x = self.exog\n m = 1*(self.endog == 0) #missingness\n \n beta = params[0:-1]\n sigma2 = max(params[-1],1e-3)\n \n mu_y = np.matmul(x,beta)\n \n pr_y = stats.norm.logpdf( y, loc = mu_y, scale=np.sqrt(sigma2))\n \n \n #if complete case, assign pr missing to all observations...\n pr_m = np.log(max(m.mean(),1e-4))\n if not self.cc:\n pr_m = stats.norm.logcdf( y, loc = mu_y, scale=np.sqrt(sigma2))\n \n #we're done if ols\n if self.ols:\n return pr_y\n else:\n ll = (1-m)*pr_y + m*pr_m\n return ll\n \n def score(self, params):\n y = self.endog\n x = self.exog\n m = 1*(self.endog == 0) #missingness\n m_x = np.repeat(m,x.shape[1]).reshape(x.shape)\n \n if ols: #if OLS use all the data...\n m, m_x = np.ones(y.shape), np.ones(x.shape)\n \n \n b = params[0:-1]\n sigma2 = max(params[-1],1e-3)\n s = np.sqrt(sigma2)\n\n beta_jac = np.zeros(len(b))\n sigma_jac = 0\n \n #for censored\n if not cc and not ols: \n left_stats = (y - np.dot(x, b)) / s\n l_pdf = scipy.stats.norm.logpdf(left_stats)\n l_cdf = scipy.stats.norm.logcdf(left_stats)\n left_frac = np.exp(l_pdf - l_cdf)\n beta_left = np.dot(left_frac*m, x*m_x / s)\n beta_jac -= beta_left\n left_sigma = np.dot(left_frac*m, left_stats*m)\n sigma_jac -= left_sigma\n \n #for non-censored\n mid_stats = (y - np.dot(x, b)) / s\n beta_mid = np.dot(mid_stats*(1-m), x*(1-m_x) / s)\n beta_jac += beta_mid\n mid_sigma = ((np.square(mid_stats) - 1)*(1-m)).sum()\n sigma_jac += mid_sigma\n \n combo_jac = np.append(beta_jac, sigma_jac / (2*s) ) # by chain rule, since the expression above is dloglik/dlogsigma\n return combo_jac\n\n\nmodel1 = Tobit(yn,sm.add_constant(xn))\nmodel1_fit = model1.fit(disp=False)\nmodel1_fit.summary()", "_____no_output_____" ], [ "def setup_shi(yn,xn):\n model1 = Tobit(yn,sm.add_constant(xn))\n model1_fit = model1.fit(disp=False)\n ll1 = model1.loglikeobs(model1_fit.params)\n grad1 = model1.score_obs(model1_fit.params) \n hess1 = model1.hessian(model1_fit.params)\n k1 = len(model1_fit.params)\n \n #fit logistic values\n model2 = Tobit(yn,sm.add_constant(xn),ols=True)\n model2_fit = model2.fit(disp=False)\n ll2 = model2.loglikeobs(model2_fit.params)\n grad2 = model2.score_obs(model2_fit.params) \n hess2 = model2.hessian(model2_fit.params)\n k2 = len(model2_fit.params)\n \n return ll1,grad1,hess1,ll2,k1, grad2,hess2,k2", "_____no_output_____" ], [ "true_stats = vuong_plots.plot_true(gen_data,setup_shi)\n\nyn,xn,nobs = gen_data()\nanayltic_stats = vuong_plots.plot_analytic(yn,xn,nobs,setup_shi)\nbootstrap_stats = vuong_plots.plot_bootstrap(yn,xn,nobs,setup_shi)\nplt.legend()\nplt.show()", "_____no_output_____" ], [ "plt.plot(range(1,5), [ stats.kstat(bootstrap_stats,n=i) for i in range(1,5)], label=\"Bootstrap\")\nplt.plot(range(1,5), [ stats.kstat(anayltic_stats,n=i) for i in range(1,5)], label=\"Analytic\")\nplt.plot(range(1,5), [ stats.kstat(true_stats,n=i) for i in range(1,5)], label=\"True\")\n\nplt.legend()", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code" ] ]
cb5788c73ff7d6e75377ca776cc9bb352f3e6443
201,833
ipynb
Jupyter Notebook
module1-hyperparameter-optimization/LS_DS_241_Hyperparameter_Optimization.ipynb
livjab/DS-Unit-2-Sprint-4-Practicing-Understanding
de69aaad3e1889daf85e06ac26440978c61ab2e7
[ "MIT" ]
1
2019-05-13T16:03:56.000Z
2019-05-13T16:03:56.000Z
module1-hyperparameter-optimization/LS_DS_241_Hyperparameter_Optimization.ipynb
livjab/DS-Unit-2-Sprint-4-Practicing-Understanding
de69aaad3e1889daf85e06ac26440978c61ab2e7
[ "MIT" ]
null
null
null
module1-hyperparameter-optimization/LS_DS_241_Hyperparameter_Optimization.ipynb
livjab/DS-Unit-2-Sprint-4-Practicing-Understanding
de69aaad3e1889daf85e06ac26440978c61ab2e7
[ "MIT" ]
null
null
null
46.40906
16,504
0.482349
[ [ [ "<a href=\"https://colab.research.google.com/github/livjab/DS-Unit-2-Sprint-4-Practicing-Understanding/blob/master/module1-hyperparameter-optimization/LS_DS_241_Hyperparameter_Optimization.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ], [ "_Lambda School Data Science — Practicing & Understanding Predictive Modeling_\n\n# Hyperparameter Optimization", "_____no_output_____" ], [ "Today we'll use this process:\n\n## \"A universal workflow of machine learning\"\n\n_Excerpt from Francois Chollet, [Deep Learning with Python](https://github.com/fchollet/deep-learning-with-python-notebooks/blob/master/README.md), Chapter 4: Fundamentals of machine learning_\n \n**1. Define the problem at hand and the data on which you’ll train.** Collect this data, or annotate it with labels if need be.\n\n**2. Choose how you’ll measure success on your problem.** Which metrics will you monitor on your validation data?\n\n**3. Determine your evaluation protocol:** hold-out validation? K-fold validation? Which portion of the data should you use for validation?\n\n**4. Develop a first model that does better than a basic baseline:** a model with statistical power.\n\n**5. Develop a model that overfits.** The universal tension in machine learning is between optimization and generalization; the ideal model is one that stands right at the border between underfitting and overfitting; between undercapacity and overcapacity. To figure out where this border lies, first you must cross it.\n\n**6. Regularize your model and tune its hyperparameters, based on performance on the validation data.** Repeatedly modify your model, train it, evaluate on your validation data (not the test data, at this point), modify it again, and repeat, until the model is as good as it can get. \n\n**Iterate on feature engineering: add new features, or remove features that don’t seem to be informative.** \n\nOnce you’ve developed a satisfactory model configuration, you can **train your final production model on all the available data (training and validation) and evaluate it one last time on the test set.**\n", "_____no_output_____" ], [ "## 1. Define the problem at hand and the data on which you'll train", "_____no_output_____" ], [ "We'll apply the workflow to a [project from _Python Data Science Handbook_](https://jakevdp.github.io/PythonDataScienceHandbook/05.06-linear-regression.html#Example:-Predicting-Bicycle-Traffic) by Jake VanderPlas:\n\n> **Predicting Bicycle Traffic**\n\n> As an example, let's take a look at whether we can predict the number of bicycle trips across Seattle's Fremont Bridge based on weather, season, and other factors.\n\n> We will join the bike data with another dataset, and try to determine the extent to which weather and seasonal factors—temperature, precipitation, and daylight hours—affect the volume of bicycle traffic through this corridor. Fortunately, the NOAA makes available their daily [weather station data](http://www.ncdc.noaa.gov/cdo-web/search?datasetid=GHCND) (I used station ID USW00024233) and we can easily use Pandas to join the two data sources.\n\n> Let's start by loading the two datasets, indexing by date:", "_____no_output_____" ], [ "So this is a regression problem, not a classification problem. We'll define the target, choose an evaluation metric, and choose models that are appropriate for regression problems.\n\n\n", "_____no_output_____" ], [ "### Download data", "_____no_output_____" ] ], [ [ "!curl -o FremontBridge.csv https://data.seattle.gov/api/views/65db-xm6k/rows.csv?accessType=DOWNLOAD", " % Total % Received % Xferd Average Speed Time Time Time Current\n Dload Upload Total Spent Left Speed\n100 1616k 0 1616k 0 0 802k 0 --:--:-- 0:00:02 --:--:-- 803k\n" ], [ "!wget https://raw.githubusercontent.com/jakevdp/PythonDataScienceHandbook/master/notebooks/data/BicycleWeather.csv", "--2019-05-13 16:08:26-- https://raw.githubusercontent.com/jakevdp/PythonDataScienceHandbook/master/notebooks/data/BicycleWeather.csv\nResolving raw.githubusercontent.com (raw.githubusercontent.com)... 151.101.0.133, 151.101.64.133, 151.101.128.133, ...\nConnecting to raw.githubusercontent.com (raw.githubusercontent.com)|151.101.0.133|:443... connected.\nHTTP request sent, awaiting response... 200 OK\nLength: 234945 (229K) [text/plain]\nSaving to: ‘BicycleWeather.csv’\n\n\rBicycleWeather.csv 0%[ ] 0 --.-KB/s \rBicycleWeather.csv 100%[===================>] 229.44K --.-KB/s in 0.03s \n\n2019-05-13 16:08:26 (7.92 MB/s) - ‘BicycleWeather.csv’ saved [234945/234945]\n\n" ] ], [ [ "### Load data", "_____no_output_____" ] ], [ [ "# Modified from cells 15, 16, and 20, at\n# https://jakevdp.github.io/PythonDataScienceHandbook/05.06-linear-regression.html#Example:-Predicting-Bicycle-Traffic\n\nimport pandas as pd\n\n# Download and join data into a dataframe\ndef load(): \n fremont_bridge = 'https://data.seattle.gov/api/views/65db-xm6k/rows.csv?accessType=DOWNLOAD'\n \n bicycle_weather = 'https://raw.githubusercontent.com/jakevdp/PythonDataScienceHandbook/master/notebooks/data/BicycleWeather.csv'\n\n counts = pd.read_csv(fremont_bridge, index_col='Date', parse_dates=True, \n infer_datetime_format=True)\n\n weather = pd.read_csv(bicycle_weather, index_col='DATE', parse_dates=True, \n infer_datetime_format=True)\n\n daily = counts.resample('d').sum()\n daily['Total'] = daily.sum(axis=1)\n daily = daily[['Total']] # remove other columns\n\n weather_columns = ['PRCP', 'SNOW', 'SNWD', 'TMAX', 'TMIN', 'AWND']\n daily = daily.join(weather[weather_columns], how='inner')\n \n # Make a feature for yesterday's total\n daily['Total_yesterday'] = daily.Total.shift(1)\n daily = daily.drop(index=daily.index[0])\n \n return daily\n\ndaily = load()", "_____no_output_____" ] ], [ [ "### First fast look at the data\n- What's the shape?\n- What's the date range?\n- What's the target and the features?", "_____no_output_____" ] ], [ [ "# TODO\n\ndaily.shape", "_____no_output_____" ], [ "daily.head()", "_____no_output_____" ], [ "daily.tail()", "_____no_output_____" ] ], [ [ "Target\n- Total : Daily total number of bicycle trips across Seattle's Fremont Bridge\n\nFeatures\n- Date (index) : from 2012-10-04 to 2015-09-01\n- Total_yesterday : Total trips yesterday\n- PRCP : Precipitation (1/10 mm)\n- SNOW : Snowfall (1/10 mm)\n- SNWD : Snow depth (1/10 mm)\n- TMAX : Maximum temperature (1/10 Celsius)\n- TMIN : Minimum temperature (1/10 Celsius)\n- AWND : Average daily wind speed (1/10 meters per second)", "_____no_output_____" ], [ "## 2. Choose how you’ll measure success on your problem.\n\nWhich metrics will you monitor on your validation data?\n\nThis is a regression problem, so we need to choose a regression [metric](https://scikit-learn.org/stable/modules/model_evaluation.html#common-cases-predefined-values).\n\n\n\nI'll choose mean absolute error.\n\n", "_____no_output_____" ] ], [ [ "# TODO\nfrom sklearn.metrics import mean_absolute_error", "_____no_output_____" ] ], [ [ "## 3. Determine your evaluation protocol \n\nWe're doing model selection, hyperparameter optimization, and performance estimation. So generally we have two ideal [options](https://sebastianraschka.com/images/blog/2018/model-evaluation-selection-part4/model-eval-conclusions.jpg) to choose from:\n\n- 3-way holdout method (train/validation/test split)\n- Cross-validation with independent test set\n\nI'll choose cross-validation with independent test set. Scikit-learn makes cross-validation convenient for us!\n\nSpecifically, I will use random shuffled cross validation to train and validate, but I will hold out an \"out-of-time\" test set, from the last 100 days of data:\n\n\n\n", "_____no_output_____" ] ], [ [ "# TODO\n\ntest = daily[-100:]\ntrain = daily[:-100]\ntrain.shape, test.shape", "_____no_output_____" ], [ "X_train = train.drop(columns=\"Total\")\ny_train = train[\"Total\"]\n\nX_test = test.drop(columns=\"Total\")\ny_test = test[\"Total\"]\n", "_____no_output_____" ], [ "X_train.shape, y_train.shape, X_test.shape, y_test.shape", "_____no_output_____" ] ], [ [ "## 4. Develop a first model that does better than a basic baseline", "_____no_output_____" ], [ "### Look at the target's distribution and descriptive stats", "_____no_output_____" ] ], [ [ "import matplotlib.pyplot as plt\nimport seaborn as sns\n\nsns.distplot(y_train);", "_____no_output_____" ], [ "y_train.describe()", "_____no_output_____" ] ], [ [ "### Basic baseline 1", "_____no_output_____" ] ], [ [ "y_pred = [y_train.median()] * len(y_train)\nmean_absolute_error(y_train, y_pred)", "_____no_output_____" ] ], [ [ "### Basic baseline 2", "_____no_output_____" ] ], [ [ "y_pred = X_train[\"Total_yesterday\"]\nmean_absolute_error(y_train, y_pred)", "_____no_output_____" ] ], [ [ "### First model that does better than a basic baseline", "_____no_output_____" ], [ "https://scikit-learn.org/stable/modules/generated/sklearn.model_selection.cross_validate.html", "_____no_output_____" ] ], [ [ "from sklearn.linear_model import LinearRegression\nfrom sklearn.model_selection import cross_validate\n\nscores = cross_validate(LinearRegression(), \n X_train, \n y_train, \n scoring=\"neg_mean_absolute_error\",\n cv=3, \n return_train_score=True, \n return_estimator=True)", "_____no_output_____" ], [ "pd.DataFrame(scores)", "_____no_output_____" ], [ "scores[\"test_score\"].mean()", "_____no_output_____" ], [ "scores[\"estimator\"][0].coef_", "_____no_output_____" ], [ "for i, model in enumerate(scores[\"estimator\"]):\n coefficients = model.coef_\n intercept = model.intercept_\n feature_names = X_train.columns\n \n print(f'Model from cross-validation fols #{i}')\n print(\"Intercept\", intercept)\n print(pd.Series(coefficients, feature_names).to_string())\n print('\\n')", "Model from cross-validation fols #0\nIntercept 566.7766337283679\nPRCP -3.525103\nSNOW -0.082029\nSNWD -12.045027\nTMAX 9.475238\nTMIN -4.607775\nAWND -2.745191\nTotal_yesterday 0.417360\n\n\nModel from cross-validation fols #1\nIntercept 671.9064515706045\nPRCP -2.772253\nSNOW -0.000995\nSNWD 20.800688\nTMAX 8.804948\nTMIN -3.741386\nAWND -6.108300\nTotal_yesterday 0.405074\n\n\nModel from cross-validation fols #2\nIntercept 465.84525362296563\nPRCP -2.876196\nSNOW -0.016432\nSNWD -8.809696\nTMAX 10.419441\nTMIN -5.862868\nAWND -2.398991\nTotal_yesterday 0.423493\n\n\n" ] ], [ [ "## 5. Develop a model that overfits. \n\n\"The universal tension in machine learning is between optimization and generalization; the ideal model is one that stands right at the border between underfitting and overfitting; between undercapacity and overcapacity. To figure out where this border lies, first you must cross it.\" —Chollet", "_____no_output_____" ], [ "<img src=\"https://jakevdp.github.io/PythonDataScienceHandbook/figures/05.03-validation-curve.png\">\n\nDiagram Source: https://jakevdp.github.io/PythonDataScienceHandbook/05.03-hyperparameters-and-model-validation.html#Validation-curves-in-Scikit-Learn", "_____no_output_____" ], [ "### Random Forest?\n\nhttps://scikit-learn.org/stable/modules/generated/sklearn.ensemble.RandomForestRegressor.html", "_____no_output_____" ] ], [ [ "from sklearn.ensemble import RandomForestRegressor\n\nmodel = RandomForestRegressor(n_estimators=100, max_depth=None, n_jobs=-1)\n\nscores = cross_validate(model, \n X_train, \n y_train, \n scoring=\"neg_mean_absolute_error\", \n cv=3, \n return_train_score=True, \n return_estimator=True)\n\npd.DataFrame(scores)", "_____no_output_____" ], [ "scores[\"test_score\"].mean()", "_____no_output_____" ] ], [ [ "### Validation Curve\n\nhttps://scikit-learn.org/stable/modules/generated/sklearn.model_selection.validation_curve.html\n\n> Validation curve. Determine training and test scores for varying parameter values. This is similar to grid search with one parameter.", "_____no_output_____" ] ], [ [ "import numpy as np", "_____no_output_____" ], [ "# Modified from cell 13 at\n# https://jakevdp.github.io/PythonDataScienceHandbook/05.03-hyperparameters-and-model-validation.html#Validation-curves-in-Scikit-Learn\n\n%matplotlib inline\nimport matplotlib.pyplot as plt\nfrom sklearn.model_selection import validation_curve\n\nmodel = RandomForestRegressor(n_estimators=100)\n\ndepth = [2, 3, 4, 5, 6]\ntrain_score, val_score = validation_curve(\n model, X_train, y_train,\n param_name='max_depth', param_range=depth, \n scoring='neg_mean_absolute_error', cv=3)\n\nplt.plot(depth, np.median(train_score, 1), color='blue', label='training score')\nplt.plot(depth, np.median(val_score, 1), color='red', label='validation score')\nplt.legend(loc='best')\nplt.xlabel('depth');", "_____no_output_____" ] ], [ [ "### `RandomizedSearchCV`\n\nhttps://scikit-learn.org/stable/modules/generated/sklearn.model_selection.RandomizedSearchCV.html\n\nhttps://scikit-learn.org/stable/modules/grid_search.html", "_____no_output_____" ] ], [ [ "from sklearn.model_selection import RandomizedSearchCV\n\nparam_distributions = {\n \"n_estimators\": [100, 200],\n \"max_depth\": [4, 5],\n \"criterion\": [\"mse\", \"mae\"]\n}\n\ngridsearch = RandomizedSearchCV(\n RandomForestRegressor(n_jobs=-1, random_state=42),\n param_distributions=param_distributions, \n n_iter=8, \n cv=3, scoring=\"neg_mean_absolute_error\", \n verbose=10, \n return_train_score=True)\n\ngridsearch.fit(X_train, y_train)", "Fitting 3 folds for each of 8 candidates, totalling 24 fits\n[CV] n_estimators=100, max_depth=4, criterion=mse ....................\n" ], [ "results = pd.DataFrame(gridsearch.cv_results_)\nresults.sort_values(by=\"rank_test_score\")", "_____no_output_____" ], [ "gridsearch.best_estimator_", "_____no_output_____" ] ], [ [ "\n## FEATURE ENGINEERING!", "_____no_output_____" ], [ "Jake VanderPlas demonstrates this feature engineering: \nhttps://jakevdp.github.io/PythonDataScienceHandbook/05.06-linear-regression.html#Example:-Predicting-Bicycle-Traffic", "_____no_output_____" ] ], [ [ "# Modified from code cells 17-21 at\n# https://jakevdp.github.io/PythonDataScienceHandbook/05.06-linear-regression.html#Example:-Predicting-Bicycle-Traffic\n\ndef jake_wrangle(X): \n X = X.copy()\n\n # patterns of use generally vary from day to day; \n # let's add binary columns that indicate the day of the week:\n days = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']\n for i, day in enumerate(days):\n X[day] = (X.index.dayofweek == i).astype(float)\n\n\n # we might expect riders to behave differently on holidays; \n # let's add an indicator of this as well:\n from pandas.tseries.holiday import USFederalHolidayCalendar\n cal = USFederalHolidayCalendar()\n holidays = cal.holidays('2012', '2016')\n X = X.join(pd.Series(1, index=holidays, name='holiday'))\n X['holiday'].fillna(0, inplace=True)\n\n\n # We also might suspect that the hours of daylight would affect \n # how many people ride; let's use the standard astronomical calculation \n # to add this information:\n def hours_of_daylight(date, axis=23.44, latitude=47.61):\n \"\"\"Compute the hours of daylight for the given date\"\"\"\n days = (date - pd.datetime(2000, 12, 21)).days\n m = (1. - np.tan(np.radians(latitude))\n * np.tan(np.radians(axis) * np.cos(days * 2 * np.pi / 365.25)))\n return 24. * np.degrees(np.arccos(1 - np.clip(m, 0, 2))) / 180.\n\n X['daylight_hrs'] = list(map(hours_of_daylight, X.index))\n\n \n # temperatures are in 1/10 deg C; convert to C\n X['TMIN'] /= 10\n X['TMAX'] /= 10\n \n # We can also calcuate the average temperature.\n X['Temp (C)'] = 0.5 * (X['TMIN'] + X['TMAX'])\n\n # precip is in 1/10 mm; convert to inches\n X['PRCP'] /= 254\n\n # In addition to the inches of precipitation, let's add a flag that \n # indicates whether a day is dry (has zero precipitation):\n X['dry day'] = (X['PRCP'] == 0).astype(int)\n\n\n # Let's add a counter that increases from day 1, and measures how many \n # years have passed. This will let us measure any observed annual increase \n # or decrease in daily crossings:\n X['annual'] = (X.index - X.index[0]).days / 365.\n\n return X\n\nX_train = jake_wrangle(X_train)", "_____no_output_____" ] ], [ [ "### Linear Regression (with new features)", "_____no_output_____" ] ], [ [ "scores = cross_validate(LinearRegression(), \n X_train, \n y_train, \n scoring=\"neg_mean_absolute_error\", \n cv=3, \n return_train_score=True, \n return_estimator=True)\n\npd.DataFrame(scores)", "_____no_output_____" ], [ "scores[\"test_score\"].mean()", "_____no_output_____" ] ], [ [ "### Random Forest (with new features)", "_____no_output_____" ] ], [ [ "param_distributions = {\n 'n_estimators': [100],\n 'max_depth': [5, 10, 15, None],\n 'criterion': [\"mae\"]\n}\n\ngridsearch = RandomizedSearchCV(\n RandomForestRegressor(n_jobs=-1, random_state=42), \n param_distributions=param_distributions, \n n_iter=2, \n cv=3, \n scoring=\"neg_mean_absolute_error\", \n verbose=10, \n return_train_score=True)\n\ngridsearch.fit(X_train, y_train)", "Fitting 3 folds for each of 2 candidates, totalling 6 fits\n[CV] n_estimators=100, max_depth=None, criterion=mae .................\n" ], [ "gridsearch.best_estimator_", "_____no_output_____" ] ], [ [ "\n### Feature engineering, explained by Francois Chollet\n\n> _Feature engineering_ is the process of using your own knowledge about the data and about the machine learning algorithm at hand to make the algorithm work better by applying hardcoded (nonlearned) transformations to the data before it goes into the model. In many cases, it isn’t reasonable to expect a machine-learning model to be able to learn from completely arbitrary data. The data needs to be presented to the model in a way that will make the model’s job easier.\n\n> Let’s look at an intuitive example. Suppose you’re trying to develop a model that can take as input an image of a clock and can output the time of day.\n\n> If you choose to use the raw pixels of the image as input data, then you have a difficult machine-learning problem on your hands. You’ll need a convolutional neural network to solve it, and you’ll have to expend quite a bit of computational resources to train the network.\n\n> But if you already understand the problem at a high level (you understand how humans read time on a clock face), then you can come up with much better input features for a machine-learning algorithm: for instance, write a Python script to follow the black pixels of the clock hands and output the (x, y) coordinates of the tip of each hand. Then a simple machine-learning algorithm can learn to associate these coordinates with the appropriate time of day.\n\n> You can go even further: do a coordinate change, and express the (x, y) coordinates as polar coordinates with regard to the center of the image. Your input will become the angle theta of each clock hand. At this point, your features are making the problem so easy that no machine learning is required; a simple rounding operation and dictionary lookup are enough to recover the approximate time of day.\n\n> That’s the essence of feature engineering: making a problem easier by expressing it in a simpler way. It usually requires understanding the problem in depth.\n\n> Before convolutional neural networks became successful on the MNIST digit-classification problem, solutions were typically based on hardcoded features such as the number of loops in a digit image, the height of each digit in an image, a histogram of pixel values, and so on.\n\n> Neural networks are capable of automatically extracting useful features from raw data. Does this mean you don’t have to worry about feature engineering as long as you’re using deep neural networks? No, for two reasons:\n\n> - Good features still allow you to solve problems more elegantly while using fewer resources. For instance, it would be ridiculous to solve the problem of reading a clock face using a convolutional neural network.\n> - Good features let you solve a problem with far less data. The ability of deep-learning models to learn features on their own relies on having lots of training data available; if you have only a few samples, then the information value in their features becomes critical.\n", "_____no_output_____" ], [ "# ASSIGNMENT\n\n**1.** Complete the notebook cells that were originally commented **`TODO`**. \n\n**2.** Then, focus on feature engineering to improve your cross validation scores. Collaborate with your cohort on Slack. You could start with the ideas [Jake VanderPlas suggests:](https://jakevdp.github.io/PythonDataScienceHandbook/05.06-linear-regression.html#Example:-Predicting-Bicycle-Traffic)\n\n> Our model is almost certainly missing some relevant information. For example, nonlinear effects (such as effects of precipitation and cold temperature) and nonlinear trends within each variable (such as disinclination to ride at very cold and very hot temperatures) cannot be accounted for in this model. Additionally, we have thrown away some of the finer-grained information (such as the difference between a rainy morning and a rainy afternoon), and we have ignored correlations between days (such as the possible effect of a rainy Tuesday on Wednesday's numbers, or the effect of an unexpected sunny day after a streak of rainy days). These are all potentially interesting effects, and you now have the tools to begin exploring them if you wish!\n\n**3.** Experiment with the Categorical Encoding notebook.\n\n**4.** At the end of the day, take the last step in the \"universal workflow of machine learning\" — \"You can train your final production model on all the available data (training and validation) and evaluate it one last time on the test set.\"\n\nSee the [`RandomizedSearchCV`](https://scikit-learn.org/stable/modules/generated/sklearn.model_selection.RandomizedSearchCV.html) documentation for the `refit` parameter, `best_estimator_` attribute, and `predict` method:\n\n> **refit : boolean, or string, default=True**\n\n> Refit an estimator using the best found parameters on the whole dataset.\n\n> The refitted estimator is made available at the `best_estimator_` attribute and permits using `predict` directly on this `GridSearchCV` instance.\n\n### STRETCH\n\n**A.** Apply this lesson other datasets you've worked with, like Ames Housing, Bank Marketing, or others.\n\n**B.** In additon to `RandomizedSearchCV`, scikit-learn has [`GridSearchCV`](https://scikit-learn.org/stable/modules/generated/sklearn.model_selection.GridSearchCV.html). Another library called scikit-optimize has [`BayesSearchCV`](https://scikit-optimize.github.io/notebooks/sklearn-gridsearchcv-replacement.html). Experiment with these alternatives.\n\n**C.** _[Introduction to Machine Learning with Python](http://shop.oreilly.com/product/0636920030515.do)_ discusses options for \"Grid-Searching Which Model To Use\" in Chapter 6:\n\n> You can even go further in combining GridSearchCV and Pipeline: it is also possible to search over the actual steps being performed in the pipeline (say whether to use StandardScaler or MinMaxScaler). This leads to an even bigger search space and should be considered carefully. Trying all possible solutions is usually not a viable machine learning strategy. However, here is an example comparing a RandomForestClassifier and an SVC ...\n\nThe example is shown in [the accompanying notebook](https://github.com/amueller/introduction_to_ml_with_python/blob/master/06-algorithm-chains-and-pipelines.ipynb), code cells 35-37. Could you apply this concept to your own pipelines?", "_____no_output_____" ] ], [ [ "len(X_train.columns)", "_____no_output_____" ], [ "X_train.describe()", "_____no_output_____" ], [ "# Lets feature engineer a column determining if it rained yesterday.\n# We can use the feature engineered by Jake VanderPlas called \"dry day\"\n# to determine if there was rain on a given day\n\nX_train[\"dry day\"].value_counts()\n", "_____no_output_____" ], [ "X_train[\"yesterday dry day\"] = X_train[\"dry day\"].shift()", "_____no_output_____" ], [ "X_train[[\"dry day\", \"yesterday dry day\"]].head(10)", "_____no_output_____" ], [ "# deal with Nan and change to int type\n\nX_train[\"yesterday dry day\"] = X_train[\"yesterday dry day\"].fillna(value=1).astype(int)", "_____no_output_____" ], [ "# Let's try to make a column for the number of days since it was last sunny\n\nX_train['rainy day streak'] = X_train.groupby( (X_train['dry day'] !=1)\n .cumsum()).cumcount() + ( (X_train['dry day'] != 0)\n .cumsum() == 0).astype(int)", "_____no_output_____" ], [ "X_train[[\"dry day\", \"rainy day streak\"]].head(10)", "_____no_output_____" ], [ "# Let's make a feature for extreme cold/extreme heat\n# Anything above about 80 degrees (F) and below 40 degrees (F) counts as extreme temp\n# 80F = 26.67C, 40F = 4.44C\n\ndef extreme_temps(X_train):\n if (X_train[\"Temp (C)\"] > 26.67):\n return 1\n elif (X_train[\"Temp (C)\"] < 4.44):\n return 1\n else:\n return 0\n \nX_train[\"extreme temp day\"] = X_train.apply(extreme_temps, axis=1)", "_____no_output_____" ], [ "X_train[\"extreme temp day\"].value_counts()", "_____no_output_____" ], [ "X_train[[\"Temp (C)\", \"extreme temp day\"]].sort_values(\"Temp (C)\").head()", "_____no_output_____" ], [ "X_train[[\"Temp (C)\", \"extreme temp day\"]].sort_values(\"Temp (C)\", ascending=False).head()", "_____no_output_____" ], [ "# linear regression with new added features\n\nscores = cross_validate(LinearRegression(), \n X_train, \n y_train, \n scoring=\"neg_mean_absolute_error\", \n cv=3, \n return_train_score=True, \n return_estimator=True)\n\npd.DataFrame(scores)", "_____no_output_____" ], [ "scores[\"test_score\"].mean()", "_____no_output_____" ], [ "# random forest regression\n\nparam_distributions = {\n 'n_estimators': [100, 200, 300],\n 'max_depth': [5, 10, 15, None],\n 'criterion': [\"mse\", \"mae\"]\n}\n\ngridsearch = RandomizedSearchCV(\n RandomForestRegressor(n_jobs=-1, random_state=42), \n param_distributions=param_distributions, \n cv=3, \n scoring=\"neg_mean_absolute_error\", \n verbose=10, \n return_train_score=True)\n\ngridsearch.fit(X_train, y_train)", "Fitting 3 folds for each of 10 candidates, totalling 30 fits\n[CV] n_estimators=300, max_depth=10, criterion=mae ...................\n" ], [ "gridsearch.best_estimator_", "_____no_output_____" ], [ "scores = cross_validate(RandomForestRegressor(bootstrap=True, \n criterion='mse', \n max_depth=None, \n max_features='auto', \n max_leaf_nodes=None, \n min_impurity_decrease=0.0, \n min_impurity_split=None, \n min_samples_leaf=1, \n min_samples_split=2, \n min_weight_fraction_leaf=0.0, \n n_estimators=300, \n n_jobs=-1, \n oob_score=False, \n random_state=42, \n verbose=0, \n warm_start=False), \n X_train, \n y_train, \n scoring=\"neg_mean_absolute_error\", \n cv=3, \n return_train_score=True, \n return_estimator=True)\n\npd.DataFrame(scores)", "_____no_output_____" ], [ "scores[\"test_score\"].mean()", "_____no_output_____" ], [ "pd.DataFrame(gridsearch.cv_results_).sort_values(by=\"rank_test_score\")", "_____no_output_____" ], [ "", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cb578b8eaee73feb9d140a26a9ca7652530cf3f9
515,405
ipynb
Jupyter Notebook
.ipynb_checkpoints/tmp_time_series_analysis-checkpoint.ipynb
rchak007/time_series
7ae69dfcf3f80a64917a11e7dbafb96e203a6787
[ "ADSL" ]
null
null
null
.ipynb_checkpoints/tmp_time_series_analysis-checkpoint.ipynb
rchak007/time_series
7ae69dfcf3f80a64917a11e7dbafb96e203a6787
[ "ADSL" ]
null
null
null
.ipynb_checkpoints/tmp_time_series_analysis-checkpoint.ipynb
rchak007/time_series
7ae69dfcf3f80a64917a11e7dbafb96e203a6787
[ "ADSL" ]
null
null
null
236.099404
126,924
0.893336
[ [ [ "import numpy as np\nimport pandas as pd\nfrom pathlib import Path\nimport matplotlib.pyplot as plt\n%matplotlib inline\nimport warnings\nwarnings.simplefilter(action='ignore', category=FutureWarning)", "_____no_output_____" ] ], [ [ "# Return Forecasting: Read Historical Daily Yen Futures Data\nIn this notebook, you will load historical Dollar-Yen exchange rate futures data and apply time series analysis and modeling to determine whether there is any predictable behavior.", "_____no_output_____" ] ], [ [ "# Futures contract on the Yen-dollar exchange rate:\n# This is the continuous chain of the futures contracts that are 1 month to expiration\nyen_futures = pd.read_csv(\n Path(\"yen.csv\"), index_col=\"Date\", infer_datetime_format=True, parse_dates=True\n)\nyen_futures.head()", "_____no_output_____" ], [ "# Trim the dataset to begin on January 1st, 1990\nyen_futures = yen_futures.loc[\"1990-01-01\":, :]\nyen_futures.head()", "_____no_output_____" ] ], [ [ " # Return Forecasting: Initial Time-Series Plotting", "_____no_output_____" ], [ " Start by plotting the \"Settle\" price. Do you see any patterns, long-term and/or short?", "_____no_output_____" ] ], [ [ "yen_futures_settle= yen_futures['Settle']\n#print(type(yen_futures_settle))\n#print(yen_futures_settle)\nyen_futures_settle = yen_futures_settle.to_frame()\nyen_futures_settle.head()", "_____no_output_____" ] ], [ [ "#### make a copy for later", "_____no_output_____" ] ], [ [ "yen_futures_settle_only = yen_futures_settle.copy()\nyen_futures_settle_only.head()", "_____no_output_____" ], [ "# Plot just the \"Settle\" column from the dataframe:\n# YOUR CODE HERE!\nyen_futures_settle.plot(y='Settle', title='Yen Futures Settle Prices', figsize=(20,10))\n#ax.legend(['Settle prices'])", "_____no_output_____" ] ], [ [ "---", "_____no_output_____" ], [ "# Decomposition Using a Hodrick-Prescott Filter", "_____no_output_____" ], [ " Using a Hodrick-Prescott Filter, decompose the Settle price into a trend and noise.", "_____no_output_____" ] ], [ [ "import statsmodels.api as sm\n\n# Apply the Hodrick-Prescott Filter by decomposing the \"Settle\" price into two separate series:\n# YOUR CODE HERE!\n#Hodrick-Prescott filter\nts_noise, ts_trend = sm.tsa.filters.hpfilter(yen_futures_settle['Settle'])", "_____no_output_____" ] ], [ [ "#### Test the noise , trend datasets", "_____no_output_____" ] ], [ [ "print(ts_noise.head())\nprint(ts_noise[1])\nprint(ts_trend.head())", "Date\n1990-01-02 -61.503967\n1990-01-03 -21.799756\n1990-01-04 98.942896\n1990-01-05 40.776052\n1990-01-08 49.689938\nName: Settle_cycle, dtype: float64\n-21.799755630596337\nDate\n1990-01-02 6908.503967\n1990-01-03 6908.799756\n1990-01-04 6909.057104\n1990-01-05 6909.223948\n1990-01-08 6909.310062\nName: Settle_trend, dtype: float64\n" ], [ "# Create a dataframe of just the settle price, and add columns for \"noise\" and \"trend\" series from above:\n# YOUR CODE HERE!\nyen_futures_settle['noise'] = ts_noise\nyen_futures_settle['trend'] = ts_trend\nyen_futures_settle.head()", "_____no_output_____" ] ], [ [ "#### Drop noise from data frame", "_____no_output_____" ] ], [ [ "yen_futures_settle_trend = yen_futures_settle.drop(columns=['noise'])", "_____no_output_____" ], [ "yen_futures_settle_only.head()", "_____no_output_____" ], [ "yen_futures_settle_only.tail()", "_____no_output_____" ] ], [ [ "#### filter 2015 to now", "_____no_output_____" ] ], [ [ "yen_futures_settle_trend2015 = yen_futures_settle_trend['2015':]", "_____no_output_____" ], [ "# Plot the Settle Price vs. the Trend for 2015 to the present\n# YOUR CODE HERE!\n#yen_futures_settle_trend.plot(title='Yen Futures Settle vs Trend', figsize=(20,10))\nyen_futures_settle_trend2015.plot(title='Yen Futures Settle vs Trend', figsize=(20,10))", "_____no_output_____" ], [ "# Plot the Settle Noise\n# YOUR CODE HERE!\nts_noise.plot(title='Noise', figsize=(20,10))", "_____no_output_____" ] ], [ [ "---", "_____no_output_____" ], [ "# Forecasting Returns using an ARMA Model", "_____no_output_____" ], [ "Using futures Settle *Returns*, estimate an ARMA model\n\n1. ARMA: Create an ARMA model and fit it to the returns data. Note: Set the AR and MA (\"p\" and \"q\") parameters to p=2 and q=1: order=(2, 1).\n2. Output the ARMA summary table and take note of the p-values of the lags. Based on the p-values, is the model a good fit (p < 0.05)?\n3. Plot the 5-day forecast of the forecasted returns (the results forecast from ARMA model)", "_____no_output_____" ] ], [ [ "# Create a series using \"Settle\" price percentage returns, drop any nan\"s, and check the results:\n# (Make sure to multiply the pct_change() results by 100)\n# In this case, you may have to replace inf, -inf values with np.nan\"s\nreturns = (yen_futures[[\"Settle\"]].pct_change() * 100)\nreturns = returns.replace(-np.inf, np.nan).dropna()\nreturns.tail()", "_____no_output_____" ], [ "import statsmodels.api as sm\n\n# Estimate and ARMA model using statsmodels (use order=(2, 1))\n# YOUR CODE HERE!\nfrom statsmodels.tsa.arima_model import ARMA\n# For the order parameter, the first 1 indicates the number of AR lags\n# For the order parameter, the second 1 indicates the number of MA lags\nmodel = ARMA(returns.values, order=(2,1))\n# Fit the model and assign it to a variable called results\n# YOUR CODE HERE!\nresults = model.fit()", "_____no_output_____" ], [ "# Output model summary results:\n# YOUR CODE HERE!\nresults.summary()", "_____no_output_____" ], [ "# Plot the 5 Day Returns Forecast\n# YOUR CODE HERE!\npd.DataFrame(results.forecast(steps=5)[0]).plot(title=\"5 Day Returns Forecast\")", "_____no_output_____" ], [ "pd.DataFrame(results.forecast(steps=5)[0])", "_____no_output_____" ] ], [ [ "---", "_____no_output_____" ], [ "# Forecasting the Settle Price using an ARIMA Model", "_____no_output_____" ], [ " 1. Using the *raw* Yen **Settle Price**, estimate an ARIMA model.\n 1. Set P=5, D=1, and Q=1 in the model (e.g., ARIMA(df, order=(5,1,1))\n 2. P= # of Auto-Regressive Lags, D= # of Differences (this is usually =1), Q= # of Moving Average Lags\n 2. Output the ARIMA summary table and take note of the p-values of the lags. Based on the p-values, is the model a good fit (p < 0.05)?\n 3. Construct a 5 day forecast for the Settle Price. What does the model forecast will happen to the Japanese Yen in the near term?", "_____no_output_____" ] ], [ [ "from statsmodels.tsa.arima_model import ARIMA\n\n# Estimate and ARIMA Model:\n# Hint: ARIMA(df, order=(p, d, q))\n# YOUR CODE HERE!\nmodel2 = ARIMA(yen_futures_settle['Settle'], order=(5, 1, 1))\n# Fit the model\n# YOUR CODE HERE!\nres2 = model2.fit()", "C:\\Users\\chakravartiraghavan\\anaconda3\\envs\\timeseries\\lib\\site-packages\\statsmodels\\tsa\\base\\tsa_model.py:583: ValueWarning: A date index has been provided, but it has no associated frequency information and so will be ignored when e.g. forecasting.\n ' ignored when e.g. forecasting.', ValueWarning)\nC:\\Users\\chakravartiraghavan\\anaconda3\\envs\\timeseries\\lib\\site-packages\\statsmodels\\tsa\\base\\tsa_model.py:583: ValueWarning: A date index has been provided, but it has no associated frequency information and so will be ignored when e.g. forecasting.\n ' ignored when e.g. forecasting.', ValueWarning)\n" ], [ "# Output model summary results:\nres2.summary()", "_____no_output_____" ], [ "# Plot the 5 Day Price Forecast\n# YOUR CODE HERE!\npd.DataFrame(res2.forecast(steps=5)[0]).plot(title=\"5 Day Futures Price Forecast\")", "_____no_output_____" ], [ "pd.DataFrame(res2.forecast(steps=5)[0])", "_____no_output_____" ], [ "from statsmodels.graphics.tsaplots import plot_acf, plot_pacf\nplot_acf(yen_futures_settle['Settle'], lags=30, zero=False)", "_____no_output_____" ], [ "plot_pacf(yen_futures_settle['Settle'], lags=30, zero=False)", "_____no_output_____" ] ], [ [ "---", "_____no_output_____" ], [ "# Volatility Forecasting with GARCH\n\nRather than predicting returns, let's forecast near-term **volatility** of Japanese Yen futures returns. Being able to accurately predict volatility will be extremely useful if we want to trade in derivatives or quantify our maximum loss.\n \nUsing futures Settle *Returns*, estimate an GARCH model\n\n1. GARCH: Create an GARCH model and fit it to the returns data. Note: Set the parameters to p=2 and q=1: order=(2, 1).\n2. Output the GARCH summary table and take note of the p-values of the lags. Based on the p-values, is the model a good fit (p < 0.05)?\n3. Plot the 5-day forecast of the volatility.", "_____no_output_____" ] ], [ [ "yen_futures_settle_only.head()", "_____no_output_____" ], [ "#import arch \nfrom arch import arch_model", "_____no_output_____" ], [ "# Estimate a GARCH model:\n# YOUR CODE HERE!\nmodel = arch_model(returns, mean=\"Zero\", vol=\"GARCH\", p=2, q=1)\n# Fit the model\n# YOUR CODE HERE!\nres_garch = model.fit(disp=\"off\")", "_____no_output_____" ], [ "# Summarize the model results\n# YOUR CODE HERE!\nres_garch.summary()", "_____no_output_____" ], [ "fig = res_garch.plot(annualize='D')", "_____no_output_____" ], [ "# Find the last day of the dataset\nlast_day = returns.index.max().strftime('%Y-%m-%d')\nlast_day", "_____no_output_____" ], [ "# Create a 5 day forecast of volatility\nforecast_horizon = 5\n# Start the forecast using the last_day calculated above\n# YOUR CODE HERE!\nforecasts = res_garch.forecast(start=last_day, horizon=forecast_horizon)\nforecasts", "_____no_output_____" ], [ "# Annualize the forecast\nintermediate = np.sqrt(forecasts.variance.dropna() * 252)\nintermediate.head()", "_____no_output_____" ], [ "# Transpose the forecast so that it is easier to plot\nfinal = intermediate.dropna().T\nfinal.head()", "_____no_output_____" ], [ "# Plot the final forecast\n# YOUR CODE HERE!\nfinal.plot(title = \"5 Day Forecast of Volatality\")", "C:\\Users\\chakravartiraghavan\\anaconda3\\envs\\timeseries\\lib\\site-packages\\pandas\\plotting\\_core.py:396: UserWarning: FixedFormatter should only be used together with FixedLocator\n ax.set_xticklabels(xticklabels)\n" ] ], [ [ "---", "_____no_output_____" ], [ "# Conclusions", "_____no_output_____" ], [ "Based on your time series analysis, would you buy the yen now?\n\nIs the risk of the yen expected to increase or decrease?\n\nBased on the model evaluation, would you feel confident in using these models for trading?", "_____no_output_____" ], [ "#### LaTex: $\\alpha$2", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code", "code" ], [ "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown", "markdown", "markdown", "markdown" ] ]
cb579a1bb3077bcd4724385d5659fcdd5e08ddbf
1,920
ipynb
Jupyter Notebook
index.ipynb
Euclid-Python/Python-pytest
a4b28e0b9b29fb8ef9e62cef1bf19ca6a54bd84f
[ "MIT" ]
1
2019-11-15T09:57:09.000Z
2019-11-15T09:57:09.000Z
index.ipynb
Euclid-Python/Python-pytest
a4b28e0b9b29fb8ef9e62cef1bf19ca6a54bd84f
[ "MIT" ]
9
2019-10-17T15:28:47.000Z
2019-10-22T19:04:04.000Z
index.ipynb
Euclid-Python/Python-pytest
a4b28e0b9b29fb8ef9e62cef1bf19ca6a54bd84f
[ "MIT" ]
1
2019-11-15T09:57:12.000Z
2019-11-15T09:57:12.000Z
22.325581
80
0.560417
[ [ [ "# Pytest, a journey in the testing world !", "_____no_output_____" ], [ "<center>\n <img src=\"https://docs.pytest.org/en/latest/_static/pytest1.png\">\n</center>", "_____no_output_____" ], [ "## Plan\n\n* [Introduction](intro.ipynb) to testing and unit tests\n* Pytest, [first steps](pytest-intro.ipynb)\n* [Fixtures](pytest-fixtures.ipynb)\n* Use leverage with [parametrization](parametrization.ipynb)\n* A few [good practices](pytest-good-practices.ipynb)\n* Application with [some exercises](exercises-01.ipynb)\n* The fabulous World of [Mocks](pytest-mock.ipynb)\n* The tests [coverage](pytest-coverage.ipynb)\n* Application with [some exercises(2)](exercises-02.ipynb)\n\n", "_____no_output_____" ] ] ]
[ "markdown" ]
[ [ "markdown", "markdown", "markdown" ] ]
cb57a10b167a863c060b0c84d2a8fe94efbfccdf
765,371
ipynb
Jupyter Notebook
tutorials/W0D3_LinearAlgebra/W0D3_Tutorial3.ipynb
vasudev-sharma/course-content
46fb9be49da52acb5df252dda43f11b6d1fe827f
[ "CC-BY-4.0", "BSD-3-Clause" ]
null
null
null
tutorials/W0D3_LinearAlgebra/W0D3_Tutorial3.ipynb
vasudev-sharma/course-content
46fb9be49da52acb5df252dda43f11b6d1fe827f
[ "CC-BY-4.0", "BSD-3-Clause" ]
null
null
null
tutorials/W0D3_LinearAlgebra/W0D3_Tutorial3.ipynb
vasudev-sharma/course-content
46fb9be49da52acb5df252dda43f11b6d1fe827f
[ "CC-BY-4.0", "BSD-3-Clause" ]
null
null
null
978.735294
733,680
0.960627
[ [ [ "<a href=\"https://colab.research.google.com/github/NeuromatchAcademy/course-content/blob/master/tutorials/W0D3_LinearAlgebra/W0D3_Tutorial3.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ], [ "\n# Bonus Tutorial: Discrete Dynamical Systems\n**Week 0, Day 3: Linear Algebra**\n\n**By Neuromatch Academy**\n\n__Content creators:__ Name Surname, Name Surname\n\n\n\n__Content reviewers:__ Name Surname, Name Surname. \n\n__Content editors:__ Name Surname, Name Surname.\n\n__Production editors:__ Name Surname, Name Surname. ", "_____no_output_____" ], [ "---\n#Tutorial Objectives\n\nIn this tutorial, we will start to gain an intuition for how eigenvalues and eigenvectors can be helpful for understanding dynamical systems. We will focus on a discrete dynamical system consisting of two neurons. \n\nBy the end of the tutorial, you will:\n\n* Predict whether the firing rates of interconnected model neurons will explode or decay based on the eigenvalues of the weight matrix.\n* Apply ideas from previous tutorials (linear combination, basis vectors, etc) to understand a new concept\n\n\n\n", "_____no_output_____" ], [ "---\n# Setup", "_____no_output_____" ] ], [ [ "# Imports\n\n# Import only the libraries/objects that you use in this tutorial.\n\n# If any external library has to be installed, !pip install library --quiet\n# follow this order: numpy>matplotlib.\n# import widgets in hidden Figure settings cell\n\nimport numpy as np\nimport matplotlib\nimport matplotlib.pyplot as plt", "_____no_output_____" ], [ "#@title Figure settings\nimport ipywidgets as widgets # interactive display\n%config InlineBackend.figure_format = 'retina'\nplt.style.use(\"https://raw.githubusercontent.com/NeuromatchAcademy/course-content/master/nma.mplstyle\")", "_____no_output_____" ], [ "#@title Plotting functions\n\ndef plot_circuit_responses(u, W, eigenstuff = False, xlim='default', ylim='default'):\n fig, ax = plt.subplots(1, 1, figsize=(10,10))\n\n # Set up axis limits\n if xlim =='default':\n extreme = np.maximum(np.abs(np.min(u)), np.max(u))\n xlim = [- extreme, extreme]\n if ylim == 'default':\n extreme = np.maximum(np.abs(np.min(u)), np.max(u))\n ylim = [- extreme, extreme]\n\n # Set up look\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n cs = plt.rcParams['axes.prop_cycle'].by_key()['color']*10\n ax.set_xlim(xlim)\n ax.set_ylim(ylim)\n\n # Set up tracking textz\n tracker_text = ax.text(.5, .9, \"\", color='w', fontsize=20, verticalalignment='top', horizontalalignment='left', transform=ax.transAxes)\n\n # Plot eigenvectors\n if eigenstuff:\n eigvals, eigvecs = np.linalg.eig(W)\n\n if np.abs(eigvals[0]) < np.abs(eigvals[1]):\n lc1 = 'c'\n lc2 = 'g'\n else:\n lc1 = 'g'\n lc2 = 'c'\n\n ax.plot(np.arange(-10000, 10000)*eigvecs[0, 0], np.arange(-10000, 10000)*eigvecs[1, 0],lc1, alpha=.5, label = r'$\\mathbf{v}_1$')\n ax.plot(np.arange(-10000, 10000)*eigvecs[0, 1], np.arange(-10000, 10000)*eigvecs[1, 1], lc2, alpha=.5, label = r'$\\mathbf{v}_2$')\n\n ax.legend()\n\n # Set up scatter\n cmap = plt.cm.Blues_r\n norm = plt.Normalize(vmin=0, vmax=u.shape[1])\n scatter = ax.scatter(u[0, :], u[1, :], alpha=1, c = cmap(norm(np.arange(u.shape[1]))))\n\n\n ax.set(xlabel = 'Neuron 1 Firing Rate', ylabel = 'Neuron 2 Firing Rate', title = 'Neural firing over time')\n\n fig.colorbar(matplotlib.cm.ScalarMappable(norm=norm, cmap=cmap),\n ax=ax, label = 'Time step')", "_____no_output_____" ], [ "#@title Helper functions\n\ndef get_eigval_specified_matrix(target_eig):\n \"\"\"Generates matrix with specified eigvals\n\n Args:\n target_eig (list): list of target eigenvalues, can be real or complex,\n should be length 2 unless you desire repeated eigenvalues\n with the same eigenvector, in which case length 1\n\n Returns:\n ndarray: 2 x 2 matrix with target eigvals\n\n \"\"\"\n\n # Set up two eigenvectors\n V = np.array([[1, 1], [-1, 1]]).astype('float')\n for i in range(2):\n V[:,i] = V[:,i]/np.linalg.norm(V[:,i])\n\n # Get matrix with target eigenvalues\n if type(target_eig[0]) == int or type(target_eig[0]) == float:\n\n if len(target_eig) == 2: # distinct eigvecs (not necessarily distinct eigvals)\n\n D = np.diag(target_eig)\n A = V @ D @ np.linalg.inv(V)\n\n else: # repeated with same vec\n summed = 2*target_eig[0]\n\n a = summed-3\n d = 3\n bc = target_eig[0]**2 - a*d\n factors = [n for n in range(1, bc+ 1) if bc % n == 0]\n b = factors[int(np.floor(len(factors)/2))]\n c = bc/-b\n\n A = np.array([[a, b], [c, d]])\n\n elif type(target_eig[0]) == complex:\n\n C = [np.real(V[:,0]), np.real(V[:,1])]\n B = np.array([[np.real(target_eig[0]), np.imag(target_eig[0])], [-np.imag(target_eig[0]), np.real(target_eig[0])]]).squeeze()\n A = C @ B @ np.linalg.inv(C)\n\n return A", "_____no_output_____" ] ], [ [ "---\n\n# Section 1: Defining a neural circuit\n\nIn previous tutorials, we have looked at static models of postsynaptic neurons based on the responses of presynaptic neurons. \n\nLet's now introduce the concept of time. We will chop time up into little bins and look at the activity of neurons in each bin. That is, we will work in a **discrete** time framework. For example, if each bin is 1 second long, we will look at the firing rate of each neuron at intervals of a second.\n\n\nInstead of examining pre- and post- synaptic neurons, we will examine at two neurons in one area that are connected. In our model, the activity of neuron 1 at one time bin depends on the activities of both neurons during the previous time bin multiplied by the respective weights from itself and neuron 2. It might seem weird for a neuron to have a weight to itself - this is abstracting away some biological details but basically conveys how much the neural activity depends on its history. (Throughout this course, we'll see lots of neuron models and how some model biological detail more faithfully while others abstract.)\n\nWe will refer to the activity of neuron i during time bin j as $a_{i, j}$. The weight from neuron x to neuron y will be $w_{y, x}$. With this helpful notation, we can write an equation for the activity of neuron 1 at time bin t:\n$$a_{1, t} = w_{1, 1}a_{1, t-1} + w_{1, 2}a_{2, t-1} $$\n\nAnd the symmetric model is true of neuron 2:\n$$a_{2, t} = w_{2, 1}a_{1, t-1} + w_{2, 2}a_{2, t-1} $$\n\nThis is already a mess of subscript numbers - luckily we can use matrices and vectors once again and our model becomes: \n\n$$\\mathbf{a}_{t} = \\mathbf{W}\\mathbf{a}_{t-1} $$\nwhere:\n$$\\mathbf{W} = \\begin{bmatrix} w_{1, 1} & w_{1, 2} \\\\ w_{2, 1} & w_{2, 2} \\end{bmatrix}, \\mathbf{a}_{t} = \\begin{bmatrix} a_{1, t} \\\\ a_{2, t} \\end{bmatrix}$$\n\nIt turns out that this is a **discrete dynamical system**. Dynamical systems are concerned with how quantities evolve other time, in this case our neural firing rates. When we model the evolution of quantities over time using a discrete time framework, it is, unsurprisingly, a discrete dynamical system. We will see continuous dynamical systems (where we embrace the full continuity of time) tomorrow and later in the comp neuro course during W2D2: Linear Dynamics.\n\n\n", "_____no_output_____" ], [ "## Coding Exercise 1: Implementing the circuit\n\nIn this exercise, you will implement the function `circuit_implementation`. Given a weight matrix, initial activities at time 0, and a number of time bins to model, this function calculates the neural firing rates at each time bin.\n\nWe will use initial firing rates of 1 for both neurons:\n$$\\mathbf{a}_0 = \\begin{bmatrix}\n1 \\\\\n1 \\\\\n\\end{bmatrix}$$\nand the weight matrix:\n\n$$\\mathbf{W} = \\begin{bmatrix} 1 & 0.2 \\\\\n0.1 & 1 \\\\ \\end{bmatrix}$$\n\nWe will look at activity over 30 time steps. As before, we will allow our firing rates to be negative, despite this not being possible biologically.\n ", "_____no_output_____" ] ], [ [ "def circuit_implementation(W, u0, T):\n \"\"\" Simulate the responses of N neurons over time given their connections\n\n Args:\n W (ndarray): weight matrix of synaptic connections, should be N x N\n u0 (ndarray): initial condition or input vector, should be N,\n T (scalar): number of time steps to run simulation for\n\n Returns:\n u (ndarray): the neural responses over time, should be N x T\n\n \"\"\"\n\n # Compute the number of neurons\n N = W.shape[0]\n\n # Initialize empty response array and initial condition\n u = np.zeros((N, T))\n u[:, 0] = u0\n\n #################################################\n ## TODO for students ##\n # Fill out function and remove\n raise NotImplementedError(\"Student exercise: Complete circuit_implementation\")\n #################################################\n\n # Loop over time steps and compute u(t+1)\n for i_t in range(1, T):\n u[:, i_t] = ...\n\n return u\n\n\n# Define W, u0, T\nW = np.array([[1, .2], [.1, 1]])\nu0 = np.array([1, 1])\nT = 30\n\n# Get neural activities\nu = circuit_implementation(W, u0, T)\n\n# Visualize neural activities\nplot_circuit_responses(u, W)", "_____no_output_____" ], [ "# to_remove solution\ndef circuit_implementation(W, u0, T):\n \"\"\" Simulate the responses of N neurons over time given their connections\n\n Args:\n W (ndarray): weight matrix of synaptic connections, should be N x N\n u0 (ndarray): initial condition or input vector, should be N,\n T (scalar): number of time steps to run simulation for\n\n Returns:\n u (ndarray): the neural responses over time, should be N x T\n\n \"\"\"\n\n # Compute the number of neurons\n N = W.shape[0]\n\n # Initialize empty response array and initial condition\n u = np.zeros((N, T))\n u[:, 0] = u0\n\n # Loop over time steps and compute u(t+1)\n for i_t in range(1, T):\n u[:, i_t] = W @ u[:, i_t-1]\n\n return u\n\n\n# Define W, u0, T\nW = np.array([[1, .2], [.1, 1]])\nu0 = np.array([1, 1])\nT = 30\n\n# Get neural activities\nu = circuit_implementation(W, u0, T)\n\n# Visualize neural activities\nwith plt.xkcd():\n plot_circuit_responses(u, W)", "_____no_output_____" ] ], [ [ "The firing rates of both neurons are exploding to infinity over time. Let's now see what happens with a different weight matrix:\n\n\n$$\\mathbf{W} = \\begin{bmatrix} 0.2 & 0.1 \\\\\n1 & 0.2 \\\\ \\end{bmatrix}$$", "_____no_output_____" ] ], [ [ "# @markdown Execute this cell to visualize activity over time\n\n# Define W, u0, T\nW = np.array([[.2, .1], [1, .2]])\nu0 = np.array([1, 1])\nT = 30\n\n# Get neural activities\nu = circuit_implementation(W, u0, T)\n\n# Visualize neural activities\nwith plt.xkcd():\n plot_circuit_responses(u, W)", "_____no_output_____" ] ], [ [ "We can see that with this weight matrix, the firing rates are decaying towards zero. It turns out that we could have predicted this by looking at the eigenvalues of the weight matrices, as we'll see in the next section.", "_____no_output_____" ], [ "---\n# Section 2: Understanding dynamics using eigenstuff\n\nAs we'll see in this section, eigenvectors and eigenvalues are incredibly useful for understanding the evolution of the neural firing rates, and discrete dynamical systems in general.\n\n", "_____no_output_____" ], [ "## Section 2.1: Rewriting our circuit equation\n\n\nIn our neural circuit, we are modeling the activities at a time step as:\n$$\\mathbf{a}_{t} = \\mathbf{W}\\mathbf{a}_{t-1} $$\n\nLet's start at time step 1:\n$$\\mathbf{a}_{1} = \\mathbf{W}\\mathbf{a}_{0} $$\n\nAnd move on to time step 2:\n$$\\mathbf{a}_{2} = \\mathbf{W}\\mathbf{a}_{1} $$\n\nIn the above equation, we can subsitute in $\\mathbf{a}_{1} = \\mathbf{W}\\mathbf{a}_{0}$:\n$$\\mathbf{a}_{2} = \\mathbf{W}\\mathbf{W}\\mathbf{a}_{0} = \\mathbf{W}^2 \\mathbf{a}_{0}$$\n\nWe can keep doing this with subsequent time steps:\n$$\\mathbf{a}_{3} = \\mathbf{W}\\mathbf{a}_{2} = \\mathbf{W}\\mathbf{W}^2 \\mathbf{a}_{0} = \\mathbf{W}^3\\mathbf{a}_{0} $$\n$$\\mathbf{a}_{4} = \\mathbf{W}\\mathbf{a}_{3} = \\mathbf{W}\\mathbf{W}^3 \\mathbf{a}_{0} = \\mathbf{W}^4\\mathbf{a}_{0} $$\n\nThis means that we can write the activity at any point as:\n$$\\mathbf{a}_{i} = \\mathbf{W}^i\\mathbf{a}_{0} $$", "_____no_output_____" ], [ "## Section 2.2: Initial firing rates along an eigenvector\n\nRemember from the last tutorial, that an eigenvector of matrix $\\mathbf{W}$ is a vector that becomes a scalar multiple (eigenvalue) of itself when multiplied by that matrix:\n\n$$\\mathbf{W}\\mathbf{v} = \\lambda\\mathbf{v}$$\n\nLet's look at what happens if the initial firing rates in our neural circuit lie along that eigenvector, using the same substitution method as in the previous section:\n$$\\mathbf{a}_{0} = \\mathbf{v} $$\n$$\\mathbf{a}_{1} = \\mathbf{W}\\mathbf{a}_0 = \\mathbf{W}\\mathbf{v} = \\lambda\\mathbf{v} $$\n$$\\mathbf{a}_{2} = \\mathbf{W}\\mathbf{a}_1 = \\mathbf{W}\\lambda\\mathbf{v} = \\lambda\\mathbf{W}\\mathbf{v} = \\lambda^2\\mathbf{v}$$\n$$\\mathbf{a}_{3} = \\mathbf{W}\\mathbf{a}_2 = \\mathbf{W}\\lambda^2\\mathbf{v} = \\lambda^2\\mathbf{W}\\mathbf{v} = \\lambda^3\\mathbf{v}$$\n$$...$$\n$$\\mathbf{a}_i = \\lambda^i\\mathbf{v}$$\n\nThe activities at any time step equal a scalar times the initial activities. In other words, if the initial activities lie along an eigenvector, the activities will only evolve along that eigenvector. ", "_____no_output_____" ], [ "### Interactive demo 2.2: Changing the eigenvalue\n\nLet's visualize what happens if the initial activities of the neurons lie along an eigenvector and think about how this depends on the eigenvalue.\n\nThe interactive demo below is the same visualization you saw in Section 1, but now we also plot the eigenvectors $\\mathbf{v}_1$ and $\\mathbf{v}_2$.\n\nQuestions:\n1. What happens if the eigenvalue is large (2)?\n2. What happens if you move the eigenvalue from 2 to towards 0? \n3. What happens with negative eigenvalues?", "_____no_output_____" ] ], [ [ "# @markdown Execute this cell to enable the widget\n\[email protected](eigenvalue = widgets.FloatSlider(value=0.5, min=-2, max=2, step=0.2))\ndef plot_system(eigenvalue):\n\n # Get weight matrix with specified eigenvalues\n W = get_eigval_specified_matrix([eigenvalue, eigenvalue])\n\n # Get initial condition\n u0 = np.array([1, 1])\n\n # Get neural activities\n u = circuit_implementation(W, u0, 10)\n\n # Visualize neural activities\n plot_circuit_responses(u, W, eigenstuff = True, xlim = [-15, 15], ylim = [-15, 15])", "_____no_output_____" ], [ "# to_remove explanation\n\n# 1) With the eigenvalue = 2, the activities of the neurons explode towards infinity, along\n#. the eigenvector.\n\n# 2) At eigenvalue = 1, there is a shift in what happens. With the eigenvalue above 1,\n#. the activites always explode. Once the eigenvalue is below 1, the activities decay to 0.\n#. If the eigenvalue equals 1, the activities never differ from the initial condition.\n#. This makes sense with the equation above. Lambda is raised to a power when computing activities:\n#. if it's a fraction, this term will get smaller so the activities will. If above 1, this term\n#. will explore so the activities will.\n\n# 3) If the eigenvalue is between -1 and 0, the neural activities jump across the\n#. origin repeatedly along the eigenvector but eventually decay to 0. If the eigenvalue is below -1, the\n#. activities jump across the origin repeatedly along the eigenvector but explode to\n#. positive or negative infinity. Once again, this makes sense if you think through the equation above.", "_____no_output_____" ] ], [ [ "## Section 2.3: Other initial conditions\n\nWe now know that if our initial activities (or initial condition) fall on an eigenvector of $\\mathbf{W}$, the activities will evolve along that line, either exploding to infinity if the absolute value of the eigenvalue is above 1 or decaying to the origin it it is below 1. What if our initial condition doesn't fall along the eigenvector though?\n\nTo understand what will happen, we will use the ideas of basis vectors and linear combinations from Tutorial 1.\n\nLet's assume for now that our weight matrix has two distinct eigenvectors ($\\mathbf{v}_1$ and $\\mathbf{v}_2$) with corresponding eigenvalues $\\lambda_1$ and $\\lambda_2$, and that these eigenvectors form a basis for 2D space. That means we can write any vector in 2D space as a linear combination of our eigenvectors, including our initial activity vector:\n\n$$\\mathbf{a}_0 = c_1\\mathbf{v}_1 + c_2\\mathbf{v}_2 $$\n\nLet's compute the next time step, using our previous strategy of substitution:\n$$\\begin{align}\n\\mathbf{a}_1 &= \\mathbf{W}\\mathbf{a}_0\n\\\\ &= \\mathbf{W}(c_1\\mathbf{v}_1 + c_2\\mathbf{v}_2) \\\\ &= c_1\\mathbf{W}\\mathbf{v}_1 + c_2\\mathbf{W}\\mathbf{v}_2 \\\\ &= c_1\\lambda_1\\mathbf{v}_1 + c_2\\lambda_2\\mathbf{v}_2 \\end{align} $$\n\nAll activities can be written as:\n$$\\mathbf{a}_i = c_1\\lambda_1^i\\mathbf{v}_1 + c_2\\lambda_2^i\\mathbf{v}_2 $$\n\n We'll see what this means for our system in the next demo.", "_____no_output_____" ], [ "### Interactive demo 2.3: Changing both eigenvalues\n\nIn the demo below, you can now change both eigenvalues and the initial condition (with `a0_1` setting neuron 1 initial activity and `a0_2` setting neuron 2 initial activity). We will only look at positive eigenvalues to keep things a little more simple.\n\nThink each of the following questions through based on the equation we just arrived at and then play with the demo to see if you are correct.\n$$\\mathbf{a}_i = c_1\\lambda_1^i\\mathbf{v}_1 + c_2\\lambda_2^i\\mathbf{v}_2 $$\n\n1. What will happen when both eigenvalues are greater than 1? Does this depend on initial condition?\n2. What will happen when both eigenvalues are less than 1?\n3. Set eigenvalue1 to 2 and eigenvalue2 to 1.2 and try out different initial conditions. What do you see? Why are you seeing this?\n4. What happens if one eigenvalue is below 1 and the other is above 1?", "_____no_output_____" ] ], [ [ "# @markdown Execute this cell to enable the widget\n\[email protected](eigenvalue1 = widgets.FloatSlider(value=0.5, min=0.2, max=2, step=0.2),\n eigenvalue2 = widgets.FloatSlider(value=0.5, min=0.2, max=2, step=0.2),\n a0_1 = widgets.FloatSlider(value=1, min=-5, max=5, step=0.2),\n a0_2 = widgets.FloatSlider(value=2, min=-5, max=5, step=0.2), )\ndef plot_system(eigenvalue1, eigenvalue2, a0_1, a0_2):\n\n # Get initial condition\n a0 = np.array([a0_1, a0_2])\n\n # Get weight matrix with specified eigenvalues\n W = get_eigval_specified_matrix([eigenvalue1, eigenvalue2])\n\n # Get neural activities\n u = circuit_implementation(W, a0, 10)\n\n # Visualize neural activities\n plot_circuit_responses(u, W, eigenstuff = True, xlim = [-15, 15], ylim = [-15, 15])", "_____no_output_____" ], [ "# to_remove explanation\n\n# 1) If both eigenvalues are above 1, the neural activity will eventually explode\n#. to infinity or negative infinity, depending on initial conditions.\n\n# 2) If both eigenvalues are below 1, the neural activity will eventually decay to 0.\n\n# 3) The activities will explode to positive or negative infinity, but the exact trajectory\n#. is drawn towards the eigenvector with the larger eigenvalue. This is because the larger eigenvalue\n#. will increasingly dominate the other one as it is raised to increasingly larger powers.\n\n#. 4) The activities will eventually explode to positive or negative infinity, unless\n#. the initial condition lies exactly on the eigenvector with the small eigenvalue. If the\n#. initial condition is near to that eigenvector, the trajectory will first go towards\n#. the origin before exploding.", "_____no_output_____" ] ], [ [ "## Section 2.4: Complex eigenvalues\n\nWe've been hiding some complexity from you up until now, namely that eigenvalues can be complex. Complex eigenvalues result in a very specific type of dynamics: rotations.\n\nWe will not delve into the proof or intuition behind this here as you'll encounter complex eigenvalues in dynamical systems in W2D2: Linear Dynamics. \n\nInstead, we will simply demonstrate how the nature of the rotations depends on the complex eigenvalues in the animation below. We plot a 3-neuron circuit to better show the rotations. We illustrate each of the following:\n\n\n* Complex eigenvalues with an absolute value equal to 1 result in a sustained rotation in 3D space.\n\n* Complex eigenvalues with an absolute value below 1 result in a rotation towards the origin.\n\n* Complex eigenvalues with an absolute value above 1 result in a rotation towards the positive/negative infinity.\n\n", "_____no_output_____" ], [ "![complex_eigenvalues.gif](data:image/gif;base64,R0lGODdhlAYHA3cAACH/C05FVFNDQVBFMi4wAwEAAAAh/h0gICAgICAgICAgICAgICAgICAgICAgICAgICAgIAAh+QQFAwD/ACwAAAAAlAYHA4e+vrzS0tTCwsTq6uxMPw5WVlTGxsTm5uRcXFyenpzOzszowj2KiowjGwR8fHyQkI/W1tSGcB+6urzu7uxEREQSEhSqqqxSUlSwsLCvkyqEhIRqamxiYmRMTEzKysympqSioqTy8vTQsDSYmJi2trQWFhQxKAri4uRycnSTfSQ0NDTEpDL1zUR6Zhz29vT6+vwaGhza2tweHhxxXxoqKixubmze3twuLixgUhTcuzxoWBQ+PjwiIiQmJiR2dnSghyS+njSagiQbFwQVDwTiujwKBgQGAgQOCgRwQnQEDBD8/OhGVlj49tDW4vSIfIBMREhceGzmlszIutSaoGhgWthmamhccESmluTcvsTIzLSIgpjQxNAsJCxKdFyklLwMDBjgPIDqymw6PkAYFjTIykBKXHRmaFzyxsD++viogniQyMhgbGCe2nie7sg4OChIOkhgWHxIQmhAwIDO7JigWmzAlozo3OwIGCAgIMDQxvAsLCQEBBgoODx0bnA4OFAgYIAcHBQCBggQEGDQ4tjI1NBERFDw7KCaorx8enAICDBmWGDg0NDk8PSKkIz88vAkLCyqkgz44uDCurC2vtCusKCOmpQ0PDyclIhKVkCOkKBaXGja5thycoDo5vj08uhMTEDiwhAwMGzc0LTQxpQ8NDxcaHzO4ryqoLykqIzo+uzi2CzOuriqrMCgPMSWgoja3NCuvpzW3OCOgmDE4uSuyOyagrR4ZnDo8OAcFhzmlnzO9tgoKEhYQlgcEBzo4sxqaJx2dmCe2jAYBiDypjCksKiEhniwWijYzNBKaJgQMEBodnDmiDCuoKR8cHg4KDyslJjA0tCgHkRKYlBycEiglKDo9swUHByovsBETEiKqqRgWETe8ECu0rQ0NED0/PCKlriYmKiElpSqsLCYkKBgGoC2xLhQQpRYaGBqZnyarrB2WGBmYmQ8PDRyfHgKDgjoxigkJDAKCgz+/vw6OkRmZmQGBgw6Ojz+/vQCAgxmZmwCAgQODgwKCgQODgQGBgQAAAAI/wDtCRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDihxJsqTJkyhTqlzJsqXLlzBjypxJs6bNmzhzftTHs6fPn0CDCh1KtKjRo0iTKl3KtKnTp1CjSp1KtarVq1izat3KtavXr2DDih1LtqzZs2jTql3Ltq3bt3Djyp1Lt67du3jz6t3Lt6/fv4ADCx5MuLDhw4gTK17MuLHjx5AjS55MubLly5gza97MubPnz6BDix5NurTp06hTq17NurXr17Bjy55Nu7bt27hz697Nu7fv38CDCx9OvLjx48iTK1/OvLnz59CjS59Ovbr169iza9/Ovbv37+DDi/8fT768+fPo06tfz769+/fw48ufT7++/fv48+vfz7+///8ABijggAQWaOCBCCao4IIMNujggxBGKOGEFFZo4YUYZqjhhhx26OGHIIYo4ogklmjiiSimqOKKLLbo4oswxijjjDTWaOONOOao44489ujjj0AGKeSQRBZp5JFIJqnkkkw26eSTUEYp5ZRUVmnllVhmqeWWXHbp5ZdghinmmGSWaeaZaKap5ppstunmm3DGKeecdNZp55145qnnnnz26eefgAYq6KCEFmrooYgmquiijDbq6KOQRirppJRWaumlmGaq6aacdurpp6CGKuqopJZq6qmopqrqqqy26uqrsMb/KuustNZq66245qrrrrz26uuvwAYr7LDEFmvsscgmq+yyzDbr7LPQRivttNRWa+212Gar7bbcduvtt+CGK+645JZr7rnopqvuuuy26+678MYr77z01mvvvfjmq+++/Pbr778AByzwwAQXbPDBCCes8MIMN+zwwxBHLPHEFFds8cUYZ6zxxhx37PHHIIcs8sgkl2zyySinrPLKLLfs8sswxyzzzDTXbDNgJdDQQw/7DOUPDDTA4M/NRBcNXAUoYKA0PT0HRQMDGDjAg9FUV33bDSTIozUJKgTlDwIByGNBD1aXbbZrFBigtTwnIBBUPD4cIA8DTZ9t992ixbOBDWwP//CCBnX3tM8DL7hQw9B4J664ZvswEMIEIEggNg1AwfAB2wUsrvnmk8EAgjwxcKBBCAEUgHhPWMtjgD2ct+66YjRgII8A9nQAgQuA++RPBx6ITfbrwAcPWNryYLCz7B9M3VM8NdjwAt3CRy/9XfFwEIMLD5TgjwbyKNCBTxU84ILh05c/1T4IjPDAAxec/hMMGzxADwzmC1WBBgNMgMLQHBwQggPx6IkMLneAzAHPHwh0n9cQWL+h9MAC4wuBBZT3k90ZwAUYoFwDfyIDELygbTxRAQDkkQD68SR1CqDAATvggxpsYAdD2UcBfFCACmwQKDsYodYC8D2g6O0ELhhBCf9u6JPUBUCF+hjcCwyARH0QDwM36Ik/KsCDGwjkBkKLTAVUYEKvTFEGVrxBAHNVAh2+QAIy8FoHYiAPD3SNiDyJBwIgIA8XPM4BCkyiBlwwATzCkSfEk8Ab9dG/CfgggNWLwQuE2BMaaMACEgCAAEgwggvY8DEXIMEGxsgVf+xAAx8ggQBAMMhbqaB3WovBBby2gQG4AASBu+E+HDCAEAhAbR/oYk9gMAK2ceCP+vja7UaQRkCqrYT6uN8EQrC/nnRAAAGQgNIMYAMFoOCSjdnABDTAya3swwcBCOcEUpirr/HNBS/Y5tsYIA91ApOXL7CBAx7Qxib2pAey8x4wZ3n/gD7WjZcuAAAMO/iCA7itJzwoQAdUQAMa7MABMTiiY+JBywf0oAQl6OZVPImADnDAAB6wZ60oGgIXCMADLyABNgVHwA0AUx8PbGMHEDCAAfhAgTsQgDwECUwZjMAFIJTiBg5wgBrEgwZZg0APh2I58jGmAggAwAsgAAL1XUCjWcEnOXG1DwI6gJ0xEKk+SiA5GywVjiLcqQpUIDkQrFSOt0PmH1On1J9QoHdCtIdOeUqUfYzgBTd9Kgd6ZwOlgQABscxK7LZ6qxLoNKwF4KMDgKICBbwAAFH8oz8uEAAXJEAGFWCnAGDYkwo4YALbzGMDd6A2AZRSHzzwIAl2UAAF/wSxmEO5AQYM6hhljkAFO0ssT/zBgx1Q4LjITS4FfmeUxYp1VjyAwAsE0FABYLCbX/OfW/eJggMMAHD+4EAITnC4XfbyAC4FZgEC8ILkVdAHEwjAV21gU6zGQwUdKMAGQBCABDCXMfQ4ADeLUgIUSMAACE6wgj3gA+ECxbm42t0BXlmBeNAzAKxb3h77+FKAniC9O7BsArCJT3lAwIBwjAcKTrDNlfKkAwEIgQEEMIGg/oQGIwiADVBrAXuoFjHazF1fO+CAIhv5yA7QQAew6jQMMHakDuCjBhCIAHkMAAU+6ao80PvSEp+YJzBIgDwAMEh7jBAAGYYjDB4wARsgIP+PNLBA4SbwAolWjgMMeIAFDPCBJT9Gm9AzSgIXmBQI32ofnzvAL/Vhj+th4HSOlYdZX6oCnRqAtP6ogQtssGh/rPeVFNwHCx8Agg88gAMUdIwKHoAAJmMFBh1AAak1oMFj6VYedv4JRQewNjILxR/x2AcP6AEACTwXyNt0cJZlwAMZOPvZzuYBD1w8FEPbqgRqm/RYexmAWt9Auq6tILDjwUDJkFssCDy3smznAgvUmgK3e0AAv9nPAQPSA0Q9QT9t0OcfHwYBIXiAsquyjxoEwH/1TBYFFKC6NP+kAGzU2tiMsg8NHIAB1EZ2oIdSAQ4kwAIfCLnIRW4Bph3F2rX/4sEJuvfGr205vZvVbhfj0QF6JJkBDigAbh0DAyWDpQIUQIAPcD5EZDHPuxhHqAXkQQKywfPDRdQAChRa8w+cAAOvVUwBBOzvqsTjAgxAgQYCENJkrdoCDti5TxwZ8sMOvCeczaBj6DGAjduPAyBQmgX2zncLKG0Db+cJPj1AWltRYALtxq0KJvyBOPpAyqe7wfUmUNMJHOADhW/MDlzwga5TZR97C0EIJO3wYpXgAeNNr+NdAAG3eRnFwcxoAv1hj92qXjHxoMAHXGCAIqNgB64+X0Z1W3Zk+YPZFfixP0rAbBgE/ycdUABfGYMAfqtgH/vwNxV3xv3u94AGRR9K/zwwSgEABAABMMgorfyxAT4ygJOWM3Ea/brlRfMkdiOoAQIQ4AAAhACzj2EPATdo3rQBEqA+JxADWTcsXrZKPtEBNoA99zVCTFQUofU/jVFgYTMAHqAAAGByXTF4x6YtecRP7tUYKoABEPABOAd8W7EDDzACuzUBEpAAlfR8qTI4W0YPuoMCW+Y2FSA5J3BW+qBRPQAAXMYY/tADDOACEqBQF9ADngcVwJZMGKCAZqc2l3ZjspNB0cd5tSYUvHRljbGEP0UCHdUBUigU45d+GPWGbwgDgSd4GFB837IPAjFt+0BFH4VeUygYNAcCAGAAGMABc+gUFyAACtCBCtCIAv9QA4dYKhWQNUNoV4iXAPoAA2ETAAuYZWymAY0hA6NTR94VAw4QiU9RRliILBTAXnKXZVHmAfTgAzHwXRm3DzAgAyqAAgFQgY7BARPwAMnnb+gzAh8AAsiYjMgYcpt0FCIYLj1HAqbGACNgAQEQAw+QaowRDz1gXBQgA3/oFCWgXMjFA+FYKppoYq/FA1mjABVgDxFoAWoHFBWQAGSohDRAOAKAAAWAAGFYQfsQDwI5kAQZkEihip0ILPFADyyWPUDRAfqmNAdgAzxYOQ6AAQcWAwbQao8BcO9ngTVwYAo2kr03h88ILujjZDFwAicQABiwAeZYNYt3RtpIUZJ2ATX/UG/CFWy4iAARNYKGIYAPYBRfNwLqsz5IuT5GeXtEgZDIokzklUc3IFUHcAJ1RoQ8wQM5FgMTOQL/RX0SaBQ5cwNkWZZmiUVIcZIoeQMU0I8FQAE0gIMy8zWPI4xAQQHpFHKWZ3/gc5EScDuhc46AoQIhMAJGcT8xkJiKuZiJaQFyOVYAsIrGwgOXEwAO+BMl0ITy8AKqk3lSpIsqcAEPAAAW1ZFhKWj+do5q+VJGxz1+CBQ8oDZtJmnPJQMWYAP6dgIPoEuMQZiGKWgVgH3COZx7eIhOeSwqoIWeOVw0pTUv8Io+cwMJYAOB1RgIcJo+Yw/04EIb0J3euQE1UAMU//CHq8maxlIBsmMDz1Vxa2NimVU5VXQBI+AB2fgYvokUs5efs3eQkZmQv9KKO/WeP2EPDNdOuyloPmk8jnGd8lYUPbeSLBmhEnoCjGQU5WmexCIDbBQDAipFHbA2Z8SbQdFBNmWfhSloKkAP9MABLNqiLUoPQOkTx2ks+6ACcPlj8cCWFGAP4HgUd0U7jnEBE5AAb7eE5EiONECeFmCHGFosi3cAADCP93dwE2mXRfE1A4AB2pgY9hBEh4k/NRWmYjoAB2ABf1gB/YkuXxMDEtChibED4eQDHfCNgrkUMLB/DhAAurl/KlCnTSorNFcAffprtXMBHbClQrEDEOBGqv/Wbp7nDzewf5I6qZK6nECBQDBAAgpYbtyyfBp1AyBQd6joFxUATjYAAQBQAxlHFRcAAbiJTgNgAwGgAeFnMv6gAghAAbWaRMdVNz/DloYqpWNxVBQwp3EpaDAArB0grNxYrB2wAz2KKp52AgDwj4lBA7jJATtgD7t6FVBVcwEwAA7QAR2QpNwCAyiQAA6AABfAf7sFADFaGOOIAPSAAIOqFTzAASvqohxwrydzeiegAPTQTSmIdT2xAyMgAR4QAx7AAY+5FVC1ZwhWiN06oA+gsDGgAByQRyXAAR9wSzNWSaP6KMuXfVJEmcH4sIBBUSyZsYCnFf5weGQqepZ3Ag7/sKrUYlpcmZg2cAAQMALjKRkE2En66acZUwKfo1Y+kVNbGEwIgAEkoIJXprJYsZCN+ADzFE4v+2sdALUYsGPNlGW8aABY6wAgAAEG0AFGyyglUAOsRgE7UHMQpACWmhgyNE8P4Gda0QPrwwB+m2dKtrbJklAokGQOgAIXEK1/mhglkAAuMJGH1BNmBqQ8IQM7cAP5GJVnkYIxUAMZVWBs6p8wYA83oAIgoLk+wVYQMD/B1gMjMAF2tyklgD8hQFSIdwAkoLaLqzs8GZCCu7t20bgnIIgC0ESTW3rB5AAnELZkwX7Di1uxZQNGRXEPgLo9sXDTpw8BVqGeMmwPgAEA/0BJCFCxwFu+GZgArecANpB0jCZJyLs9B8C8RFG0+skUgxMC1RlM3TUCIkqPIxC/CsRWCoBYmMoAMYACVGu+Cpwowfa7h9G4SoU13jM0x+tDFie/QrEPFMCvHIwANyCYJfC6B9UTCHB1XxkUFfC/GMw8HgAAGgCeIyAAI2CtC1zDjyIDNTACiMUZjRsDartixKQPIkS5PhEPFzyFPMAAHhAAjdjETRwAALC1SOE5BfRwMQCdHKfCefSgLVmLHsCRNhzGkFJ9Yxa0mtG4kxY753er7mvBAExxO4AAHOyiHvxjFRC35EoB9OM5JwB7+nABMUACbuq/b6w79qBnGrCdI/8AAAngY2IML/5gsj7EZOO2FunGqUkxbpgsKiHMmS22GWj8PeEVAyCgixJAxBpWyFyxAxbQxBKAWEjLWz4BcVhsP1r8EzfwAQCww8TFADZwoI/cLhVQAxiQACLFtwzAXDKAABqQdwnQPmexzAzwAUqjw/07ohwggxjwARowyJ4SfWsjAchbGaGclWdrr6KEvEasyj7DA2v1zvD8zuQLWwiAAvZMD32awiFQkSR8AhN0mLfsE/1Tyj6RNqiMLsGmbkWsUenmu2aR0PiZ0F0HbNin0ATTQfJQmDtHoJTraQpAVLULQGbxTRAQTeCbmKU5vzuQAApAiOALAH78Kac3elr/cwA3e8bT2UOe1l8cIAAHHUwasLxIrAGRBABGfdRHXYioaMTbxEnr7JAW+ABF5T7+4ACFWasCHK/fQlEC8AEiVQEgIFdjxX8y+AGXKRauKwG0eqU0gAJ5x807kEeQmq7VDIIXLWYvEAP8LMQeEG4nRA+GCgIHkL9jgT4OsFwy0ANR9V3PRwMfoAAOoALSRgNcRCp31Z47VbeSUc48EcLSZwM/XdXSO4Udm2dJedoaQAEJvF7Tx1alczp27MsboEA1kKVhWACo6p/hEoQZnQC1KgMhwKHMuWO15E5iET7o5AE0LEUUQAI1tsQH8MW6FlUteUsQwDUGE2YDoIgKGkIG/+Brw4VIDjDYDgwV2Kc7wIgBwppEvHjTqFJxE4DZNo2zkMHZL2YAiEfEx2dFCXAAD2APNKC4b0OcBC7JSuFTv7wDDjUCNpAAFLTMHVA38cADNKB7/s1QWeREHqCbcHkDBYABAwDV6YKedaTXPgEDPvs7tPfC6cNh6IYABkAC/4eoAoQBIYB57owCDDtIu7OIG3ADiW0PFDCy9BJmEDBPQu3d4F3E403YakEBJ9B0uRVJfQpGAh4qxIPZs6PVjEHFsKdMndkTq0YCEmCVbIoBGkDjnaR7ECAAGCAAEGABZvw1HmABg7Rq4Gvm0pR2PLEP9EBNhEgC1wgCcb0u6Pl/Af9AArgFAycAAf81NP5ADy4eFipAAofV5mq+OwPAicuDAgHXEyXwAW7WMGGmABeAvW+UnEu+PE0+hft9lrA+z5mc3tfsRNTKAQ5AAjHOAKotKhVH05i9TbLuGAWXAK+F5xrAXDcANRjAd0rD582rAmZrASAQ2bBNASMgNajzvc2+d8+OWxrczHs3AijwweyCnuTFAOK6S41+wpk26V8RwkxEA2ir5nATAiOWum3UM7tDVRWWiwbuKbnHAY68GWHGQxV3AgOm6llHUeRtFKI4iCSZYCQwsFGhWzaAwEMRWVBazAlrA7krKmam5WtDeJlRAjGpOzDQAxkeTCvfA9LGfS3/Txb7wGwykFjxIAPa85k7wwPdt/NSVPPS5nzugp7quQMBq0GM7uiXigLw3pTQFvXP9naJhEc8YAMGYO8+gO8rpQImRjbrzOsXiQHWDCr2IAE2AALLTRlGvkqsVYEM70OtfpgcEIOnrZQjwAC66zXM52xEDxQy8AANfsI+gQAvEAIY0AHpZw8jcAIfQPiZstMkn0pYGT2QSgHeXBlGr9r4M2WZ2O5N//QZTNQSUPqmb/oA8JJdp1cYQD/YmvU+gwATwKhxRA+gwzr74LgtHE0AwGKfJfCPJ2mzbfDomzmzpJs1+t0NP/cUB4fOj1EVNhR4TgIkkAB6u0sacMWazROG/x8DzcgTORQDI+wpKjCfHhCBGc2wMrz2wKMC7BWlnIHu32MPTMw6S+/uTj9Zh8kAEj/xMQ4QG/zpI1iwYIkRCigQvGHDAA+DEfX1ADDgQY99MhAYkGdj4T4L8kKMuKHPXwcDEzRIZNnS5UuYMWXOpFnT5k2cOXXu5NnT58+cKkjIk/fiA0SgSZXGhJEAQgGCKiQovAEAgAqJ8Rwc8DEwZrx9YcWODeuVpYoREiQAAHEhnkEYGmJgoGC2ZQEXBnbAHRHCwVLAgQUPJlzY8GHEM/31oEDBgQ15CmrssMfDbmLMmTVv5tzZ82fQoRPHq0H0AL3LojFXwHCig754KAaM0P8H4wSEHhL9oZjwtzDpGCgK9oAAQMZLfwgUTIBAQoKB5Tb27vsgD4K9gskjv1Xd3ft38OF/kp5A1DqH1OIDN31KkPQJBhSsYo2olWv6iCU4PODf3/8DBjrAzx8ZejCQh30MkkEuuvAziIITBMDuoAdc8E09DDPUcEPwKFBAHgno43BEEks08UQUU9TnhqHkcYEEGFT0iTXXCKJBgBh2gCEG3HTj7UKY/IlnSCKLjMcfB1XwQIEdSigBhg4gMICCChz0R4UHPAhAgg0oCCAGGvTZZwQXMCjBoBuYO05GNtt087sbMDDPxQ9ye/Mm9qCyEQMBHgjgqqy2QsHBgnh4QAH/CAJQdNEAIIBAgA24q2lBCD6wh9CCaMAAAgQM6gGD0+4UdVRSOfNQHgAmLHVVVlt19VXB4tngBfMO6BTWlmh8zSR6JgCBuB4j2q03meLhgAEGNFB2WWWT3VW3Dg5wYVoXQnCBKBfao4mGEDxIUKsJPjgz0wGCxfVcdEmV9YA5z5M03TyzQyAAaQHNzp8SNDjAgX2QRI4HFQIWeGAVbhh3Ug0gaJAgfwvap4cY3avBBgtUQLKEx0gQMV2OOyb11FQ9FnlkkksO79M5yzyYY10Lbc2B2+w0yJ8aBvBBphJAOGFnnns+IQAH8ONBgxGKLhqDFybAwIGSZtrHB5UGOgkC/w+aNqmDyCoweWuuQ6MhpHbLDLNjGUCIQU+CeACBVgFEhAEBBxgwIC8NHEBA68T2QeEApTdAgAN6aigA73gQkICBg2lIwAYJ6jY7APS6lnxy0U4VYC/KM9d8c8794SCEdm0VuWWGCzjghGyRYtifCn5EElMZaKDBQNppnx3vlooUcocYDLA4OxrWZHhIJCtAoHerKwBh33794aGvETDlfHrqW/Kc3XbliSFSskHQtvQY5Gmb4R0wGGCAa10Y4AAMrD4MhhGKGiCGGGyw4QQMMH96AAxU9+cGDSjgAKfDwAUSVD0EJlAnFPCA+DAnspM4gB6qI4jxEHCwEtijABtAAf89KBAxwlTgAj5wQAlLWAP3BckeKNgADeySwQ7QwwcIAKECbdg1HoBtTi+QgPDSRaMLGCRnRDFXCeCmAQm4AAAlpMe7CkMDh/hwBxTzirESoIEN+OADJ1AA2q4GgQOAAAUOSKLvbnhGBfYABNnDFgY2di5/0GAHuHOPCiigAkntw46N4eMdnViYxfSxjyo4YG12cIPLVOAGO6CAPWQgPTRGknoMFN9CRjbEAfDLIPYAgITS5gAAKEABX1LACHYAyZ1IJQQxWNIoJXABVKrAAiGwAfcIso8aAAACE3iBACwpSWCiy3Plyd4BIscyCwQgiNnpwAkGYICx+WMHEjhfNc//ZwAfFkYFMRDA2K42AAlwJzkCOMD6AjCCugiLAhY4wQBjMII3BlOeJfPHBQLARiIuc5775Gc/PUNJX5KsBAkgypQ2KQADTAhLILBbAVDQmg+kcCmclExjOtCBD86kAg7wwABrkMcagOABZjOoP03qJhlUh40ukIDMhKlBOpqEAggQkMNmioAC5BQBNEXleCiwg3ftAwG/FJMKOpBTe8QULhQowAWSelKo4sofFOAIPgMaVaxmVas1oao8SiqygbqgfgzAHSc9KabgeWWqOOLAH5OiAgBgwKU0SQ4GHkCCEwgELghSAQa+ulXAhqee4cNnjQJ7WMT6cx8UoAc9HnCC/448gANDLWRiLXvZYO6AI3/t2EBP8AAMKNMrZlWVRBByAA0o9a1xJWS/amIPCySAAiM4gF4lwgO/EhWzu03MVD0QAmuZp1oTYBJvjXtczb3OcnXpKXKd+9xzadaruuXYQJ+yARskYE2kdQn8TqBJl4CFLOMllD0EYIMRKKtLK+vuAyTQgQo8oLbp+RRnoXvfnuyjAAACAeh8VTcD4lfAA36VPT50VgInWMGk2oEApivQBMSgAznk1EC42xKqKqCmLTGiBpD1YRAnK50tkeWjJCCAABhAA97M3QYEwK94yLcG9M3tgm1skx7c01w35nGPOWRgr5bWx0Mm8nfMKw8PPP/LYwO1gYA4EIM66QOuCI5ID0ZggwfU8LYaMIAHvPxlMAOAHpWNCAwKMNQddMABCrCBBrQ8MwqQ4ANh2od8bVvlGhdZzzQIXwwkqmdABzox9migXgR9aEQfhpNI3vCSF/caGYxge1fqpJAJsqC5XOol8bgBBS76aVBj1DIuEZJZhIooJUdEBu6FJWweixqW1Je6iSbwDSCgvXjSWte7xokKNvtAjxHIHjRwYhxdaJB98OAGKqABDJo7IxoErAeqdYkeaUBmfPVA2mTm9aEXreFnj4rJu7InBqJ9YkvHRWEjxkxYB8WSeNBDARcBCw8SwBVnS0TW3U6wrXHNb4AH/CX/vn7wJTmKAQTYhQYgIMktOwBaABhAAB+44GH2cYG0SJwEGpirSzBmAISbhQcbAAEJEAqAB1DArQLvMVwj0+jOPpogJXjA9qZJZX0sKAALg0k8VHCBC4T600HvQXNj/AIHuBUhE+gTf5ySlxEUgMz7Zjl0b3DPGFiaYxkpQWpgoGWwOKkCRxpNBZzkWpmw7uyXwZcMvg4DJz2JvVW/McE9MGt0lc1F5t6kAjx5JRAEgARHg0AMNDD3pbRuSRZ4wAgwMAKtZ0U5LjjAu2GDAADsvGgAsIEAEk53IrtcAa0Gq8wJwkAL+EDipR1aACxwSpnwgAFaEmXtax8ASHH7JRUg/5PlI9K6TgpA+AZgV4RQwN76Ahv0vL269iJ/rn0kwAUBuFVBYOBlO8WDAndVQAwU8AGpH4YGGhAABL7vliDRwAE4EnwNQDhVCUxggKc7wQRCgIFwL3+3KnDw3UkmgwR4gRA4ga4oCCWxFx64AAroASfhAQ4IgNHLP7riAIXpABloQB5YuYiwhw9YixOYMffQoMpwEgA6gKPQv5aTAHkIgAKQwDcZN4fRAAg8AATTORD4HZlYrA2oAR7sQR+kBxxkiYZhmAvwgBjQJ9hQKxlQAXtoQnvoAPNJuUeKCNy6HBQ0rhv4EOkgmQoYijJxKRmYADAhCHuQgAMIgA9IgIqYAP8GcMFtMYAQQEPzOQAQdAm4EisMwIAYCKOD4QEfGIH+YAAQQJ8EuEIb4z8kw7tzaQr2EYAQMUADsJchvLR70wDdAwpNmZJwiwsMcKwPNItJ1Ad76DJFNETokooVbMFLSoATQMIdAIBrOau4uA0UIJgpDIx4UDM0UzMB+CzhUb+7yR07swsYYKQNMEIHEDU3NEU0ogEtVD6WkZMBBBLbCBaN6IASOBIjOgCPCKEHCIEEsAyhoh9FHJMQmDMhUYEEsJmZeR0koQAbiIFSZMbjQkQNI5mmCIAa2AoN4A5fsxeWiK8BIKvBuIATSICvY6QbuMT62AAAQAEZEBRCuYGpgDn/erwvFknFZWQTGNAZJIwNdkmo0+OIAwAA51gLEnCANwOKpyk8DzCAALABD9AA9ymAuWAxZKMtFJCUwiGB7guBF+AiA0ABhrxIf6KBBurGkWGNCbAAAzAjgqhGmbmMfQCBqBkMezgACLCaOrtKlqCBADAsMpwAA0A8k+CAFyABDTRK3nK5JMPHBAiADuCkkvrHXCOIT4kBWMudTsMoQeqjbDIIkLQAB7CAUTIAdCIU+AMBBNEX32OJDoiBR2RL/IoTjRyZwlmxKkOBB3A/vKyB/+iPYBQMGuCAuAEQH8DGiFA/DqC2etIATSMfBwjNAFlLypynHkjKeXwVGjFNi/AK/6kMLw2YgAewzZ1IjrScGQQYABIITJk6QxaDAQjYQpYoAfMRjtu0OgB4ObiUS5B8AK2xy5bQmxMggT8bjhFgFPVUFAPwgUusAAagPAN4AA4Clfx5iecBgNfYB8d0kEhDraLMTsuyTOrbSDYBi9QAi3cRr/Eyzp4QEgXFj31YS39Au+wYCwY1UAHdnB7gCKUcHQzwCGfMkaiMme4CFQ5Ajk77yz7aAQqqDwcYiYjYgUaJvB24jY3hgT2svoigAOJ60Q3drargzpFhjyCSCmWSskjMtcIZpc+rNgrggMnaKSr9m8kKwvx4gI5wAMuIBw48AIJkCb0ZyrfgTzrEj3zBn/+7DNIBlZMCZVM4xU0P3U1XYQ2PiI3ZeIvghLcNIEvnPIgIi0f6GdRB9YCgGU8GIBaDoAED+FAqJL69jAe8sJkEjdEHiFMsVMF7LFKngIr3QEj+A8iCkFSY3IAANQl3TNXX2T0GABGrSQ7JzLWTsKs10QoboAcOcwDJZDdMvSxNsY4n7VVhTSMHc9SOaZkbabLaMFHdsKcY4FExrQGjmdZpfQBVDMgHyKQq60W8i4fhVAAOCBh6kIAXyKTUUAE2S7VhDSyKJFKRiRd90JSnqApRhY0CQLENoLaZKQEe4AEZ+FeAlYF+RZDwcgB5+AA6gqtNzQ8GOAASYKENIKP72wD/e3gXGHCA5rDIdQ0sTXmBCttYkOWcHnAwsfSYlhmmD/AHGLCBHWMYCujFAhyMrgSSeG1URXyeAZgACMQ9DxgA7JwZeiiTUw3ZfmrXJOVUbYHVEbgAARDVwoEOU5WJiyUBAFALq71ahFs5z3EBhN2kUJq1EmjYAyDU8iAuB8CduNhVDSVaBepYTnFQto1bXOGB7SzZY22NXZGB1riAHWnZXGzUnczBEqgAwi1cwx27gg2BS92kRllTMSmAEbCAEUAAe2BFaK2NDxiAGpDbraKBoWDBtUUReNWH5/k+GwDIwok4BBhafSgBFPgADLAA2Z1d2cWABLhWlqBRBPMcGzBP/3jbgR3sQVAKARKogdi8WAV4vdDl3OnpAQt4gRhoK+ad3o7hARW022g0LH8oAKVRAUdxqb+1gcesNgfIQ/M93zykuAFBgBfAAOVknz9tCRnwgK0UFtMJAJyk3n7y3Mt8105lpuVQIvqQVBxRSSEJCwqtgIBd4H+FgaKkuQNIABV4Eij8rgOqgEZSKyORAQY4AaIETl0VAKnLUP01KeeFXuktYRVuFeuVB+z9Iby1vnujLXOJhwswgEmbCYRAlPVclKF0kB1gDm/qSg3oKWPhWifaBwaQ0RU+SjfF3e55ViF6ALahj75ygRMAAQ/jDwZYXcKwh9YQABAwTPSyGgYaAf8gvaX+dI8ne4EAeIAtfgAHgL0mlqce+ADobSKSqVC3GhJhgVCywwwIRSUjUVDrIZ465hwZGIoXRhfSOT3UsQ6kOImOSrpUJQyEsAgYiAcRUpRfupIgxJcJjQcjGiXq8rUT8KJEDiaUAd0ifYCrKggKkIATsIAwkabWiAFFKTxdZoD4/YkrQSJRwgAUKDpZNgDtGk8UEIBj2odjHNtGGVQAQAC4XeXN4QE81stqLpXCkeM3kgEUgMhMoYe4SZbJYN2eKAE1SxZ6ODZSU4HZZAD/qIEX1QgPY4AWWl5rRilGVuXsRcI6IyI7cTksGyNlsRt9jghfGwAPSABQiYGYFUX/+qGPk1iLzYvDFK0PH0hLfd3nNHLTYO0YArmBYtM2Y4YNbRMYJ1SBUSuMhykYghVMGmjp6qQBOlIklXZCg/HoSFKbPN5mUulIF3mAldEsD8Cc3VCUpqVfAKgBs0yKBaFf6KgUXm3I+5EADCAB52AAb7oSP5ESRLGAquZpkdFbF/bndIm+tzQIdD0BAMgN+DsB++mZGEiApwaKG8gSXUa4ylKS8cFLP4nH+YxNZPOTnyVrNEKZj0Vsxh4VtXGBWgJqcSOoF1hYUUSoCZmqGphLO/IBLUlhXPQByaSHNKu5OSM1eqjrQ7qBZaNpGHgshEszBlAYx23sVzHrVC4ZGUCk/9ua6eyIHdlhbeH+5aTIiB7ggblbjJO+JR6YHRnQQBhYSNs+I8UO6em+7hMpG8i2JUcLAUQZAeG5MIYRTJgZgZUECvN603jdor3UDXqAgENtiQ7wsg1DCAvG7nTBbbTGb/7GEOcF1oTubwEPDADcbskWFSZzALM5JvHWjQ04AfMejA0YgBNkmL0ZAbP0HAi4iAu8DNKwARDwIXqg8DQe8FGBATkRHRNfcfDIIQBncRjvjkjD4o9aRQi4gAJQmLFpcNgQ2BtAgNDSY+upgIEt8iKXAYb0Bw1wAQaorA5QAAk4T3+gBxtAQ4ZLxoOJBx+A8BpCgAOYzBg/8ZBQ8TAvc//OwGbtOSYzX3PMgB8aP/A7GSgJQwjxfQseny0LIAFFUcmXgIHXtYAPCHRBD/TYKoCV4z2/eBepOGrrKQAMkAAL4JPb4HOTQAAIAHN/cACypFM25xAYGPPL7XRRBww0j94AH3VUrw0y8WB0LhU5fw17esQpEzIKAAEMyLxpXksY8IFIR9/zBQFq5rB1DNyCYJEAuNlG4oEe2AGMjWyGABWahAEeKAByTbJTT/XDKIHqMCZs73agKPX29nZxzwk3Z/VVVFb4tAF+mfWIeJho44BA19jsKIHaqXfaYciB8tl3YRGFQA5k03QLyD4OMEISSABbNwCG5vRx7w4Y2PaMXnj/iKcJvdPLa98QadqADtCyfbioQtKjDvgbBGgSzlBZDdqpjNLhm6KAu454VyeTymt1UoFBfaCqq6BInBNMekDDjpuZQi5kBxkTC3kXunw+lvAQK7wljFsLCxgj3CN6lu+OIQqVp596+V0jin9lLqrDqCABN2IYzBslpUYnOD/ODgABARgl80uADrBNr9aSAPCAByBsql8VhKA89zz3XeFPG/CBWb55tgYARh/PDiAhHyh8wy98B0ABuZ8ZB3AB8DSIcsvfl9CsvyKQgLm2C1CYnZ971Yj6DSgZHrAHkhaWgAHFAlm2G8jAw4gHGEjpaYOkfZD2ZUO8bFu2Zqt4zt8M/72rJdzPEL3jodIitLPa3g9gAB/Yxw9ojrEOIQcwgA/QgH0M0feCiRvwnqUpXyir7dxXEZqjPPByNGUlQ5NkAL8jevPq95YwlC9xFPZnf1aqAYZEAPyRGa2wiI6WiAuIVeE8gdTafvAAiBIJ5B3YoO8gwoQKFzJs6PAhxIgSGVbAIG+AD4U8DsS4cdAfAgU2JpBU8MDexJQMeTwwcILkAQsd/EG88cDDAXkhHiwsgULCyAkDJHDYp/Io0qRKlzJt6vQp1KhSp1KVCGOgjQ00q3LtetXFARsM4iFUYQCACoT7ZFSIF2+fzQMPKnRt6E8FBRn73t4YMWEE3YbxUJz4oP9i3z4VIA442Fr3MeTIkidTrmw5aYUHYB0YvezZoUAbHRDG8xEgxgQBKPXFq7Bvqz8YDk5goOFw3w4Eunfz1n3D8cIbGCBsMOpvB1AECP2VKAHbNWwaIwaMgLE83tZ9BTwIoAD8M/jw4seTH19ixMUa5cNXlOcCbUIZByDYZo1CwQgHKBgAmBBgNVf+0HMAfj5oIMEEByjnEEgKQCBADDsp5M8FBkAAggMOfHDABA98tx6IIYo4IolPXSWPDTV8WKJXCZwAAgkeUFDWWWnZRc8AH8hgmT/fXXAABjw4RMNwC+rjTwEBkOARi006+SSUJGbmwgCcRdlVCSCccEFCwrn/II8Bq93ggAYb6FbDCBAowAFZXfnDQQAKaIBADcM9sONBNGjggJD6yODDAzXohoIFJwgw00cU+GAmAhoI4AE9bV45KaWVWnpQBegNoN6lUVX0wgAnMOAYDCdA0MNynR1UwgMdSlpVDx2ouo8GIQCAKmg0OIeChBrtgOeRCAxgww6dGnssspTCgF6KKyab0lUBcFDDAYAdZBZ8Ex4pwwMnaBAYgw85i1QHtPXJEAUnSMBknhjEUMCz8co7L4hTVqkqvRPBAEIMoy3HAQQvZEvDAwrEGYCaHyCAL1cVbCBBABF7wAC7+lDgAQDF+vlAxBHHoEACHUgaDwIkxKmAAhYg/wBuvi27/DJD56U3LswIVWABdcPNeFCppzJ4QQgYsBzZDR6csDNEAvYKUQkYFFQz1FFLzdCy8pxQw6tTI3SVAh0QCQG8+mBrI0I9TKuBuxgAyNA+FGzwNtxxb8CBCjRLtI8Dfw2dUAEuWFBCQstOgILWhRsO9T4MhHDv4aR1wIF1CZVwQQ0XBOZPrAjQY2YHPdgdVTwqFEAPAhREjlAJCFj+EQ0daG5mXt+VQIHmHHj9eeO5615ZphcRrntFNow+AQhb9YxrQj0yl/cDWU+oPPTRI8WDAKJJ5M8GSz9UAQib7v49+E1WfQIKzmsd7UwcvGjd2MkjEMAJA0wgAaIO8f8wQgwn6L8//wqUzxSS4uSvhiBAHh9g2ZQ0EL4FMpBEiQvBBL7VwAlSsIIWlEhmLuID3L0seHmRwJZ4ZirkHYQHHKCHAyxwAABUbCH+6AA9OCDDGc6QdGS7nvoEQEIGZY8nEdlBDCCwtgsSsYhNkRn5zDe1q0CASz2wQAw4cJcaKQQGHbgAPUZAAgbsUCHxuAEFwijGMYbxXAupAAUuoMa8uJACAIgB1h7CAQMiUDMKNCIe89gUWkFQgnr8IyAD+awMYoSDLvPgmwZgAbocL3kFiAGHJmCAATYkSyc4ACYzqUkINCYlKhDACQxyvR5GRHAjUKIgU0lEJPoAlVFjIpf/QBIDC/Tgk9lyoQwcAIEHAI4rOxgOBCAAAHrgyx8UIAEnT0dAA/ZyVZpxgCqjmUo+RnBv0rwmNrM5mX08QIOGbFlFTjAaGKiQHvpoZHwQwAEfaNEBzWSIMXsjTwTcECI9wMBf3ikuUj6kBBoYSgu1KdDdsdKVUGNi2GQwghPQ4wYAuCVDdsCdISavB/a4KEYzugN7KFMhJXBdARDQOccY02QO6ChDgIYBYPlpMaIcKEwXSE0/xrSmNr0pRBInjwl0MnfhHA2FDiCBCsDABj6rpAPkYtCo9CABxDNj0viJ1BVqDKdWrVlBdYfQj3QgAAqjokM+KSP7FQxlZj2rAgQQ/0e7QC95HYCYD/TZkB0EQDUJsQcAunbVvUaNjyGgKV8DK9hpMuAiPW3cT1c1goLIIJhdjE8QH/sRHtxABTe4LGYva1m5MqQHi0kAVKOqPY9qYIUUHSxqLcUqgjhgqTDb6qoYoAAJrLCe7rOBXRtSgUZpYE8Z+m1vHeCdoxgTAx4ojr4W2sqDDGZ9qX0usuJRqxAwwJrQvS52G6dTnn4zX4k9Urm6E9mHwEABMbCtzUZgsI6xN2IAQIHdPEsdlIrWhz0pLQBOm939iiiDjHHty2B7kB0AwAUCsxEPClAAe9CABvaoAQC8xVnSIKbCFq5wd0v6oh2UAAYy4AEMYHMDev+IjKsBOG5lN+ABCBiJvy4ukXRDQF3rvrjGNj4WrXaqge7S67v6SNwAMDCAo8KTAgcIgGT1UQENAEAAD31ykwUgAAxo5SE98MsIJnykFQloAgyImQZOQAL03rjMlvFva3XHgw/YoMW0+pJd76LeswDAAzHwgAbqQxlWGRgCGMAACSQAgAcwaR8+sAEIzuUTC0kZAh6Iq5kjHZ54OEDGDNCypDOt6fLkmLvAw4A47xoAecjjqLPbAQ+aw4MO4POUdeEBCN5jj7dUoALOUYtIPSoDGdRqBPuAwemWPIEYLGwfti4BgDetbJVUoLD/BR4KRlBPFTzAAhro0z46oIERfMD/AgnQQAcw3ZV9bODPFrDAnwH9gPq8MAEoQGAHGAACEDCgAzReNr6f4o9Ku4CX+f43wCcjXR3zeF4VIMEJwsZcH7yg1Lbxhz1IMAADkIAECjjACUBbl3jUwD0emHe3MQCCqnZAHgEAlkI94BKdSMAAWT5SAU7wAht8IAEhB0GLA67ziDT7IhpgmOGI+h3myODWHyHqrmFQgYJDZR882PWHeSD1EJOmBED3B1GVzvSdc/1I/PZ318Mu9qNQmuC6S1zGFHJPP+NJOyOQgAcUAIARXADoU4kHB4KIsmAG81A0EhpCeJAACESM8Aj7gHVeiEyUISyYHjjs2MWe4wH8PPKW/7+8vr8ubsxzHuBlj+DW4+UP8ymPPB/q0fNcmPrluDD0nX/x5Cv/+tnT3oWarz3u//35UeW+95zvtOx9L/yx+8MHE3jBy4evfBvv3vXLf/51gW936FNf0sU/fvKrr/3BlmanHto++M0s/fCT38b+QMEE5JH98rN/oIPRyffbL//ndpoB058//nF6/vSvP//+/+P77YTz/R8BEtHu3V8BJqA0oZ/60ZcCPmD4+EPHCSAEVmA0HaAFZmAq1UD6JYADaiAIRo0E6sQIDGAIniDMfN4DICAKtuDucKA8eKALzuDLjGAIlCAN5uDulN1OsKAO/iDMbEAHfiAQFiGUYA8Jmv+gES5hk3RfDzIhFL6MEMYgEUahFZoePbhHAijhFXbhpPkACd6bF47hk0whCFQhGaZhZAiIFnKhGr7hxqHAC7iAtcChHbIIPaQfCLDUHfZhgGShCxSPHw6i6ckhHYohISZiZOCIPOyhIj6iU7yJewgiJFbiGhpiHVqiJtYFI+rIJn5iRICEe3yAG4LiJp7fHGaiKa5iU3QiH7KiKYqiC3xAssGiLS5HDaTi5t0iLyZEJ4ZWL1aiLNJiMBajXeQiHe6iMdriLy6jJsqiBdSiM57iBrzACySAMk6jKTajNj4ikrhHNHajMWKPNWKjOC4jBwyAAQHjOcLhN7oABkhjOyb/ooBYIwhk4zxCYjquYz764Te+QDz2oy3WIzMJ5C3u4wewo0F2IYXIA0DK40LaIQc03AfgY0T24T5agEJeJBS+kEOSAERyZBpOZEGK5CdyQE5opEmOoUe+AEiupCYiQMNZABrCpESmZJLZ5A+2pASEpE5GIQJ8CU3+pD7iJFFGYcm9QE8eZSIGpTwMJVMSIkrKAwbkZFS2IAU4JAD45FX+oFNiQE12pRVOZVWKJRBmpTwAgA+aJRR+ZViy5RIiQE6UJVzSIFoKwFrWZRG6pV6qoVxSpVX25QOipQHkpWDmYN9Q5VseJg3+ZW0w5gkSpmFCZgsmJlhS5hU6pp5hZgXu/wCpFSZnQqFlLmZogiACnABVbmZpJqA9kJoHTOZqWqBlvmJsNiZqPmZtsqZrwmZuKuBs9qZX3qZqAuf8qQCpKQBvEuf/XcCXrJRyNqYNyMOSPGf+Gac8ICd1tiBzUiVtZqdsRud0emf7WWcAJKd4st92Oud5gmABgGdArWf1kad5wif4pWd30icBPpJ0vid+Lt8NkFp59icEpudGCuj86Wd4Gij0/afJzaeCLh/QUGWBPmj5XUAMyMO6UOjzMWgAIKKGbl8HhICEfuhyQgCGkhmJzh6HemiKQl8HpF+QtCj+XYCJSgCKyijmrSiOtl8HqCMJBOaOLl8H1OiNBunYcf+oRRpp7fWodAKpkvrekKZlkT4p19EAgCYplXYek5LAcGapkI4aRHnpkQIoaYop5m1pl5oplILplKopvtFAwwVAmbrp2HVATnApnX6plOZp2MGpyc0pn3KdnWJomgbq63WAAuypoeocDXwJBNznol5euWAof0Yq5lFAouaWpeZbo5YapG6q2KULpYIq7WGqPGgqqW4aDYjoo6Zq54lqhroq55kqqspqpK1qqU2orf6bqIbprobdxZxqVf1qmdFA+kGArhKrslFAdPqqsupcsBrAsD7ri/WAOiIrtXYdBVyos2ZrvkUr0ngrf1lrqTmpuJbZtirquQIcuK7ruKpjDBT/qrtGWrp267xKWrve63VthDzEq74u2w6YqL3+643tgAHIw1gRbGr1QE50hMJqWsCeaps+bHbtgAAgbLhS7F7xAGo6rMaaWcTW6sfWmMUiLCWNLE5xbL9OLMoOFl2dqn61LHbZw8V6wMnKbEzxQHSeF86+mD0kapj0rM8CwHXerNBqEw9cKM8ebXb9LJhMK9M+lwoQbdeUYtR+Dw+Y6NJe7XM5rbRy7XVNbdFaLdg2TmOtbNmmlj14AJhkbNrylQpIwNi+rTSdbQzELN3WlAqwrQG4bd7ilAqQwAsEwAWQ7d9GjQyMGrEc7l7tLcYybuOSgMkVLuQCUuKiCNRW7kCZ/8Xjau5NBe7gFoDhem6+yECiLi7pxhTnJmzqwhToBoDotm4RmS6K+K3sRtMnmeztDpRwhO7o7i6ywADb2oDtAm8g5a5eGS829W4AIMDvKu+lCG/tQq80iW3yUq8qMa/zYu8CwcDBEi/3ppJDFW34phKRvACLPW/5QgkMXCz4ru8f3YDcXi/85hENWIALpG/95k77Wo3R7m8Fye91cgkA49H95u/2FnDh9G+oKTARCTDhOnAR0cAHuEAUcaUEV0oJEG0DZzAF0YDkRrAHV1APVPAFjzDUbLD/ovAEgbDJKRwLL1AJW3CkxPDLlIDchpANh48Lw6767nCUeJYL2EANA/8xvZSA5CacEX8PkZhcAi9x7ghxVmAwFItI01gNDFdx4dxTqT2xFhcOrA3xBlDxF5fHFStxGW+xRehvGmuN4IXAFLexsZxxzskxzHAxG9sx1NxPCFwNGeuxZ7SHggAy1DxRF/8wIYuHQvVxDThoIgeyRQzyI7sMD1jAIU+yyywy+TgyJlPGzRBEHXdysqxZv0qRKNOLJqMAJ58yZHzyAXAAK8sLKccAPSByLEtGKq/yLTfMBxAELO8yssxyLQMzsmiyD+gyMUvFPoAAQZhTMneKDDBzVtjyM1cFDLTKCRxzNXMaMx+AM28zpUQzilQZOCtLqxyANpdzeOzDQDyNOl//STTP3Bi/85WwSoJYCT17BjsTxEvlc5Mo1MytlT83iT0zBjIPdErsg6ZwCkKXyLK8QBI1NIvYc5WwqEQ/hUKnx0WTyENH9EZbMQMIhQNY9EcvRUZ7T0mDSEf/T0qvhz+JNEm39FGc9O/I9Hh09HLZ9HgIG+XFtE7fjabU9E9/xjVDdE4P9WcsmfwAFlLvUTdhRFN/BqsY9UFHNbP9UwRhqVUrBDdp0FZbBp8Z9FdXBq3Iz6WN9VLEw1NDE1pLRljjc1tDBt6YtVajdTwUFk/FdWSUAANsRlXrNds4AF0Dtkrc9U6xNWE3TF+LdWJzxVzvRF2P9cBFUGMrNlgEX2Ur/3NSQXZmQ8Rk31Fne4oGUAlmh7ZTUNoBcLZpM8RnrzZULBlp/3Vmo7Zqu3ZCfDY1b/WbUZ5sV3ZpHEC/AapdO4DZ2TZTvJmX+bRxexEKAHd1LDdpELeX5bZV4w0EVRd0p3Vz06FwS7Z08152I4V1J3d4J8VgOHd3f7UKUndUj/dclDdSSCB6hzcPxh98T4R7K3d4yzfypfdWO6F933dE5LeApwR/J8CnZvb5JWGBS0RpTEBtN/g+oSaC7zcKMLiEP8RgQHj/ZbgLbQCFJ3hl2yAOerhg8MoNRvZyYw9qOmJ2kzh7N/VgyFiHm/hy0EOLi3hjI+ENxjhSc9wEJKON2/+F+vDji29AGw75QgC5kCv5QhR5Qu43IG6hkyePELqAOVZ5QkB5siY2Gwaijw819gR5lmv5QZymkUO3JIK5mR9JHgaifxs3AkSnSr54Ac1imP90l73AGbb5mdO5uRI2NOa5Tr/J8fW5n8/5UwY6YA96mxv6CyCen+uDotPSft+5BRC6TUO6pPt5ewLmfheAewSkmYOEOkJlm386Xaq5qAOkpsu0qT+ljje4fq76irc6qZu5sMj6pNe6vHZ2S75km++6eqb6heKmml/AR/7xaheAOha7mfv6fielsEf7s3d5gUt7eCflUqb6tU+6haZmeXM7s5v2j4yon4c7skM3Wm7/pZ8Paoym+7H/eme3e7mHNrxju4Cre6Wa9l3ee2fnO7hfaIKy+2cCfGYL/LubaMEvt2eCCcJXNrwz+n1Haaxm98MbQMQ39qD+6KRbPMsm9sO/5rvfKcXDN8iXd2teZ29zvMl//KjZqMofZ8sn9qR6/LvHfMgTtnViZ5uLKs63eVedaHn3fM0T9s2f/LizadHTfMlLJ71n+NAPbGj3/MbbPGri6bsnKtV3NoP6vJnDatRLuKl2fWZz6NEDtthPetnvPGBbaYP6Oaz2u4nTqtvrNdwH6M+j5sWHfabefVz3AICmvV4z66jKPduKrG0Lftz/fHT2vZYHq+K7NuPrfdg3/yvgh7fk421o90Cc6nd212vmhz7fcn5ne77Jqbhxi/6kGyyYmH5m88CXBIDq2zYFCOzoQ7frB21498Ds175rR6zZS/juZ65r84CI0r6fh2zuO/zBfm14I/+fLr+JCkDzGzfNPm1588CxzrqAv+zkD3n2sy50c7+nLv+ohb+Nj3/xmjYPXKu+wzf4w76E41XnZje/EpmWey39N7j1AgQFfQMJFjR4EGFChQsZNnT4EGJEiRMpVrR4EWNGjRs5dvT4EWTIiDIOyIPQQ2RKlStZtnT5EmZMmTNp1lRoT4E8Azts9vT5E2hQoUOJFu2pAoA8BQKNNnX6FGpUqVOdkjRJg/9qVq1buXb1+vWhPQ86eYI1exZtWrVrPyJVypRtXLlz6da1KeOEvBhY7fb1+xdw4I9iyQo2fBhxYsMqJCjtoBhyZMmTfcqwoZcvZc2bOXd+SXinZ9GjSZeeyNixadWrWQOGEUPvjdazadcWrMJAYdu7efdGizrAY9/DiReH+Tq2ceXLmc/ErRNuc+nTqW8ELrx6du3MZcCOoWJ7ePHjVQiQ5yH6ePXrl19n/x5+ZxgQ9IKPfx//6vLn0+f3/5+yG0iQJ4AL/AEQwQTVmk8vexR8EEK/9kMvwgotZOsGDAjE7sIOPfQJhgDq+5DEEqFySwEOTVyRRZ9UGBACDuJpkcb/GjMKcUQbddxRJBRV5BHIIC3qYKwJMNhAhRJmFJJJHWHIyQYHm5ySSoVuSCrFKrXc0p8bRphAHnlciEGCBHzogIclt1zzwRLGipLNOIW8obEs5byzRn92AGGAMP0McwIPPtAAgRv2wRNR9tyUx4ayEn2URDpTg5TSCuPpAIMQ5DngAwceIMGGF/x8wQYSHtiAAhkOrJRV40rIzYb+Wp01P0ntpBVX+PZBAAAX5DlBgx782ecGBDT4wAMw/QwUhDMNzRXa1V5lVNZordVO0gKv3ba6EjbwwFcIHJDBoHhkoACFESQ4QdQwQ4gBAwY42AGGVbm9N7ESzIsV336lo2HA/wAK8Jfg3nhwAIIXXvCAnhIU8qcEFYy1QAFNw3ThBANGqKEDGtQsGGS2SkiK35BNtg1gAi84meXR/FHhgbxCkKCAjx+Op4cOHEgAAHbDfGEACDBwoAB76m0Zaa1G/vXHpJ3WjAYNb32aasP05FOeCSzowGaH4oFhhxpGICEAi1+YwAYAHkBghx4OrRpuoJY+oem47QZsvwMesOftu/1e61ILwDxghB3spehriRnAwAOfs7bBABBQ6LiEw/++PKQKGqMb887p2geFkl44QG1UebDc89Sd2pUETU944AbUK4KYBgp8AEGAGCzG2AMMNChABRi6Vp14iGTIaQANPJa9+P/moSoBhQDalefsExT4wNm+nd+eJm8NcOEFcXkIaR8eduBA3QAOaHeCACQYYYMdeNiHee6bj4eD3QHQoOMK6rcfgDQ5WKhcEAAM9MxX8gjBASBAgt8l6X8BlCBGDiY9FxhgAw5TiT9goIILOAADCjhBAl1gAwF0qgM98N8Ei7cPDtDnT9SzQQI4AEEW3tAlXXpASUJAggLwQAXoA4DufnaAGAhgBPSwhwyGh0MnMkSHeXEBABCgPZZUoHYbGIEHDpBAeQygdw+o4QqfeLcSOKBPowuADQbgRTEBwAE7UFIEy1hHh/jDHljTGgVWFY8S0KADGsBAAAbAPhsoYGgUmB//He04QTyOoCRG4hpN/MEDCiCAARKwgcXEFAMSjAAFFDhaI5PGgwf4ygUe4BoP6GGBGBwgBNMjFQgQsEhS3rIhl8LA4EagAtT5owLm20ACDDDCIrpvbTbEJQ7jQQHBbSoBhvtJBTzogA8oYALTG0DkiGYoRi6TVi8Dga966Ch9+EMGgRTACSbgxRKSII4ViMc3wTnBXQHAdQxACUO+doMLMACBy4qBASyAAjnSs55+20cBJOArG8AOoS2pJAU28IB1uRNeD6AHvSKa0Ds1U0NZ+4B9DgIxFWzgA6/kJMYEsDa3ddSjqqtADQLgqxj4QIMPAWYPwoaBvPgJYwowVQFU/xVT4pWAHt/TiwPG1xRgSkwDFijbqA5ggAQ4oGNNNCqi4lGA3GzqATDwGg868AAuttNP78IAClSwj3luVYIycMBlFkaPCljEXPZAwAMMoCx3xQAACagB3+D6N7kmTCkZnArOOiA2AJQkrUKTlxwLm6h9bACGNnBATh9iLnT5tI3tepcEGHABJsK0skjTYUlcQDMrzq4CN9CZK2V5AA9YQAMXOF1qneYPGjBAilR8rVQqYI+KDpKTm0LiBlKoVd4C6Yx5eUEANnDX2clgB/TY4gBCkEBSSUCjbUXtcwmGRz1+gALOrUg8LKlFD3DybA0M73DJe69H9kmS6oXKsGhQgP9jCWB9FxtAALB3AWXWl0cyeECfLliA8RqEmsYiATu9GAKrhrJeD0YwtHTJS1+mxKQIAOFPqReCEwjgqh0o6oa3FbjBRVPDQhnWudJFAgiYLWikZVuaWNwicfpqAiTgI0h68MEPBGACsfwZBCzggAssr8cnu6evgCUsie4jZ+lKFlAH7Lsa6jfKd1qoBFz3gA+zJXF7ZVyAzwY5wVKABmQMs4VAGqYBgICkIqkABRx7AiXbGbzzGuWc7wU9BYTLB+SKiT++RtFPkdgFaAMvPVIVY0LTCKlKjYED9kmXneoMBEMkofV8Z2DhXVpBXTUPWJsqURqo+cayjAEITAdmVCP/Sq4xeMEFG2YTRkcMkxRTVvWsyjEa0PfWTJKrBRVQA87aZVg8sIcQIVBI6g34fUqUAbKTLR4XwhBYz37JPjrAABujFagScIAi6ddtSq1Wga4NSjwqkDMH4M5xJjbgAyvnbiH5lgGXCYFwEWMuFfh3kF38KwBAsG5b+ls8FfCBdCFQA26/5AYbAEHj/ky9GIwAATToN8TjhMcE4NcC6S3KsDqoXQkQUUwMROK8mEjyPNkDklnDwCQhA0zZomDjfvZTVXFbqJHbXDqmRKUHHDyUCpQbADZAN6BI4AOjvRXpTerwF0dgD0uvxI80uIAgFWDtx93WBxR4adY/1MwPgOnO/0PWDHvPZ9EYZNNdQttYqmzNdtWIU1Qzk3tR4qGCGlgAArD8kwsUMIICrN3vOooHAsi8KQbQ4Os5rAB2UXDkNl7sAMh8fLsj/6Cutu5XZs58XExaAGsSsl2jM0Cnnkz60s/GHxQI6QRGKhV0lrWYeL+YDdaqAnmu/vYRr8GhGeUAsXbF4AXgq9mzBoET1sD4yE9+b/aRVJsydTU4c7QA+pRWA2rUHtbdfmkuNZZNjUDRWYnHDVAwYe7+KQQBGEEH5rh+C+V61zygus6CdmwHA2BOTE6gd3KLx/xvPWDAB+jjBZpN3FSjBPRKkCDAjUgHlFJI+xzwLC5LRJpP/bZiWP96QPoU4ACmbsA+APvo5wNBsDdoIGbeqAAujiv2QQambQT66k8mAAIAIInaSga1gwY0ILiqqDdw5oMSAMCmxwWYzAEQIEmKEDLOiK4qDgenAst0BrS8qMv4R3hi0ApX42VOToEwQOXmgna8ELIuRgGHpgOerwyN42VyLgR2ru9Mg4N24Lg0EKhOYNIUiQzr0CgUDExSCQH2cCr6kB4SoOz+jFSEkANuAOsMMTv8AVPgrpcKkSjKJ7tGgPl+BtuGkBExkTPcDu5AQJqWw+dcDwQMoPxi7np8wMBKEBXVoksSQFOCrAM8cZoURwJWEAw1JpQqBxhzETGm7FcuLxlX7gL/JewNExCJJmfQlLE0Fgr1XufMpIPRzsUHxgYQqWcCPIkBduwZsXEl9CSkBuADpMQv4kHsbgfJwOdnCIxooEwde6MCvuX74g8xhuUGYLFigOoArqebTnEf/6ICvE8vgkU8/CiIHiCEyq/NNAZV4mwhteJSkuL9Wk0wSsB2iCm0lsWBqFDONnI1DkthFECxJiPa+MyiIC1oSGCyrlElEQMCpWcCa4AOxyPsAskCBMAGEigEbOC2sOoGji4ni2JXcuJXNKACBeNltEiTumtU9I8DjCYdm9IreiBmdo3gOOPXoEqqdgdtjDGrvDIwfEsDYGPgEAAX12OnKGoEAAAC8G50/4RqbZZoC9nSJSSOrmIABf7SNQKpIt1oAOTw2LoSMPcL5/BLD9mv3kANwH5G34YGeJjyMdfiDvMivwAk+gRJfYwyCBMglFTIMTuTIGSAARjMAxZRNAqvBoKu4xjvAQpgKVeTNefNmeAuAbyONVjOHhwL5lzAiNQGAZZIIXvTKHLvA/ok7nhzOEvg55BF6KgnY+LF6JrTOSPCt0Yg8CSgbuRD+ohS+MLkADBgsE7tO+2iqyrvBJzRNvyoWCwqL0kxBgSlBtjtPbkiHi4gU8CqGyMEZ+quZzgp/zBgY+yBM/8zI/Bo9zAgz8JPBXwAA2xA4UZFAR5AxWwPQtGiIZVKs/8AcjeAyZJ8gGLMLqhssvaoM0QlwoUEwKEgkkQ4yB5c7wDh6wQCC6saMEbXqwO+inBAsjZKoABGQH3Sc1OOxFC8M0iBAgYQpiWdbTl0MIhGYJ1w7DQ3wEGhNEpDAgZoinoCAAV+0kTksQPSxQACzF0CoOiyL0wfwoWgEljQ9ERLYAcw9AQ+Dw4FgAHm8BLnFCp6ALjeSAm98Y+8EAF5x3dUDEwJFSP8oQc04DJcQAAS1UZwlAMW58ZGpUf3DkgltSBKwAdgQy8szhWpCX2WNPZsi4a8iVRXTgWwJg95LjvKZ9oSYByvLQACiwMwb1ZzSAXwcDKFZB8A6d62bFkQEnj/DNM5YWDByhQBYBQwPMsB1sVPFciTNIACjm9YfSL3dumLgtNaFYODPEgDJGAWxcSEOoUCpjJcNQI68QsEBq9J0MnRQAUKyeRUKi1MfWuc4g1fqQNH6QEEqk0S08alBnVeXSI+yyx24qMLawAS4avLztESH3Yj9kFANYVwClRLiMVYkMWvzkYBULMDngVCJVRUtAYexwMG9AqgBgDdXuDENmYH/Odco7QfwaX5jDQ+vmYHMGkYR6UchbA/5ZVjG2JGbcpG74TRyOreos67ylEzudI5L6Ux3k82hnYgHcCV2qldCqjh0MQfevY/pzRhJvAlEcTnPggEasogPaDhCkBY/5v2IWAABRSgJVHARPGEOOmBIrFJPylR7aDVEHfF/e4UQNLJB0iy49aTUORUbzWCBoPrBitkah0NAGbRBYIGAzSKsi4XISj1Ld+IA+YSUiDGn6KqmIwSKVFoN3PSVGHjBWJAVRVkH1SAcElAd9wJIwnRdCniDvvkViNVPH5t2jz1ajMGfjyweM8JZiIJAy5Aec9JCBpACPhhR1CUz9QlP2MuAGySCrdtH2FAA1hLAThAbVeyABYHyabnVzAg7dxmeu+IAt6uXFuxROitv45FFn9mAqwHe1aWdWc197Am7rKXHwggAjIgCHBACILE4PaKbNwUCAMrlB7OM2GgB3pgVP8NAp1UKGR6YARQSQDKE0LwCIRU0I0uiH9UM38R4vRAFqLyRAfRhWeK0l0YSMe0dlgDlFxD9uv8gQAyYAFYYAFWQAeGgElcd7ZgePgEALeeLCXN4rcwAAM+wEBOlwJGgD2ZFlokFFAwIGZNRAYQoAfZiX45BUmUpIYHYkS/T2hpZFisE4ORTD8jpz/nR1J3hUYZJWo1ogFSYAESOZEzwASqJFntEj2bdXRr6EG54gJGUB5AIHD1YR8coIsegIxxpSPVMwE6rUbiYQeylY1i2AC8dQyLdycVJgCsNF9LoAcoQGwbxyhJjX/i7H3drQSWT5Z9AE8xwgSUWJEXIAcIYE3/cNRYMCAvy9YG3udMRlj+NuAywiQ0DkJaF8h9++Upw8Rxg4RS2VgF03NMQKASQTRcecBSd80ANJVKZixLuci7goYSbyCLWfNgYOOCVvcjjjmZF0AEmFlOiGVNE4CLpidQ4nSfn4cB2qlPD+CbDaIHNAQCWBhXKgAFUJUwE/h7RdIBemZbMUbd4tVh59RLeIgEcJVNvgaQNOCxpuddDCCRKjknX6YGe+gCFPchhqAFRCCZf6CREQWdzieTHKeTPgnDftkhesACSOUuXwCUD8ItDCCNrQUGXpNanVo7cIYDLEBDb3PWaomdQzT3nmk6IQXLKGpuT/YgTSXksvf2Fhi//9CLDE0gAoQ6BzKAALxaVw7u3gpyWew2IaHCHpICAC7gAeRBALK6A/zWAkzZWnqAF8VkhQE7PMpHnYhxWYQmjtxKs+csYlNPZB8lzQDKTblVACQnXkc7pnQJZDsxJYygAQhgBnTABIognGhMXXx4+E5yB1aMKCJbHj6gBzZgAA4AAcoFmyeAAUL53VRgQs2JqywJBVyJu8rWgBjgh1IaMF2IRMEvV+qy83TNIBHJu3dLGZnRBi4vh67lqTo1hPwKOTXG2Hx6gzhgk6J7BwzgBRhAe0rgAQp4A+ha6yjAIwknM1gF2KZv6jjQS2HwMU1VekyClg0ZByJgBnYbSPIKk/8EwK+yBrB0Vr+RztDAJwASjYVOUGd4xnGQ04CIxkHVtgI0YABOoAbM5QPkQQLyLGqmS6MpZVe+Clg2eXbSlkeIRUfvzh7FxLDRBLyx0VClK55PHCH0mq/9mrfJmZqMDL1Bz7B/CLYxYkwf4AEcQDhL6gY8pQBAGlfcWdcwFaCf6GuMiyKnihwBS4wStyZ4YJyCQx/iQQMWaGAIwh7MA6utZaZG8KM5QggIQAdwwASMIEjISmyS5clz13eo8Kx1MW0jSMlZ5ngVqKXpGqiFOpFzgKinxMXThfy4bN88PS0o4NCQs6oPIh42IAYGIKy4RacjyYcQ3G+Aqb8Wp5hEy4D/J6d2cynz3OKxB+KS5cEB7MW4MaDBaUV9RSdGMq8BIiAHmNivy9w39iFs1MXPoLCl5gWnv6KrOIADEICyCwIGECB+yN1CoHMVC9aYkVmRC1pLGA1s6GFxYoCE0KZ0ANYr/AEBSOzHEaIEGGCBfIDYo3gH0NAd1xCX6vJ2hmh3SM27g6eJXoYDKADLDcLW5QHbB+KiVZ4v/IG/Q+DXCQKYZIAGVIC98QSFNSUEBGBlOIIfWiAHkpmRq4QH9koC8tKNbIDW+A7fI0IFLCYERmC4/AEFds0CkJxbAhT19Oa0+52JFTkHcIBNGA2LXA9ZLLKA7dYWWVYrOrmQugsCDt0g/wDdJOg+WraOcNbco3SVAyxKfdqlFOPHg/UBhQFrADMCAYoS/gaC0EuoufWhBHD8BFBATZBUA0DggDSgmAPeHqJa56w7I4YACBKZCFbdoEcWl5Gli2h6f7D46aHoAoYNAL62XBgAaFAg9nmXAwbZBgoZ0hGZ6FO/5HYYBWBctICYUIRYKmRAPEmHkBygaySFQqzlnjTlvfMWrtL1g0LImNz1hLDKl/WhAySQAjEC7g/AAfoGAXRNA2aEBj4gCn9+IHTvBDRU5eddS5rJIwcgAQDihr6BBAsaPIhw4JAVLBbkWAARR8KJFCtavIgxo8aNHDt6rNgDwQgDJ0K8kIfyRf+MBBxUwIj3MaZMgvEcuJBn8gQChBUwyDvRYabQoUSLGj16FAYKBS5eBEABA6k+fwQyLGi4YsYQqVy7ev1K1N++Eio4PJAAIUTKATEMjNiwg8c+sEJpkJDnAUWCFxgEGtzh4QUJv3QLGz6MGGGJGh6aBvAhI7HkyZQrW55YgQaFDSNtTDgpb4ACDA8Q2GMwIeXjqB95jAgRAIE/gjR8fuChb4cAeQZ2FDzNAAGDAxZwXz6O/HC8AgZQnmAQ+Wi/HxCrryCQPLt2sBUoaJAQY8JNlPImSHBAQca+2dsn7ktQHkN4DewLllAgL4aK9vz7V+ahgQ0vuGAAAnNxxQ8BKWT/8AMOQvgHYYQxxVNCDx1ogEEAA4AWgg2jOdCBXPVtZ48H8pBgjw8uQFCAQf4gEEMII0QnYY02bsSDAzHIQyA9FdwIZJBCeuSPDDsgoAEJ4YG2IgA7kpefAzRyZI8E8gjgG0H7POBUUB1AcCJh+sQDQwX+dBCABT0MyWZyFdTwZAwoHHgUVVZBJIJWbe7Znj80bGABBAeoRZ5KIyDQw3oSltBcDAV84IIEaxrUwwEvCGAcn5oG6Y8KIxyAkwQXwOSVP0M0IAQ/m666nT8V8GDPBiB4YGlKBwQAwKE3KKpdBzG4MAIMX07gwIgVODDAAQ7QyWqzlvnJwAk8SlAAs85e/4ttZRWo0AEKICiQGpTi2qBBphuhKQ8GNBjEQUkOxIPACcBOWdAFMaiZbb5ClaBBak5xQKpUdoqQgQ796IuwV/tQwIAHJ4gH5QQAOKDCPvGMqB0PoCrAgw8/tViQPxeEIOOPCZ98mD8UfJDaBB9QgDHKMrNJ5g0FMCCBDYTi1NYHKFDwEnL7oHCAsvH0gAFf5vIAwgsQ7DRz1DP5Y8+n5VlAQcBSb831Rf7wsNlZoIrr3AMqxGwRAqAmQK8KViZwgw9FL5uQvcV1zXUPI6TmggAdoH0UP/0cATjehh/kqgqAxjCASWu5hQAPF29HgQsufLBPBwcQa1BNLwzgQ+GHj/+eUDwdkKDWASOcTXrrrVbQAwUofBBAuPK8cMBoDBTQg9aSwfDAirJVwIA8EFxQ0A0A4EWB686bjkFqqrPufPVSZ7482ShNAMKkGfmDwgQTaGCyliNc6QMIEzwNuN3mWt9spxbcNAEGWcKPP12u0lDAAyRBjJLc+cweFhOdYfxRg/IUCwYScAEGplQBC/wEefmzHrwa85NyGbCCHERKPGSggv55YGflgYAABug7w9iFN817kbsKsoMAyAMA++ng9RAAALXYgAE02KANf3gjt2mPPDEIikZKMAIXxABgB6nBBAYAAKb0ZiLuA+KeVJa9ASRgXVbs4lD8AYMdoAADJ2j/HHmUKAEGdOAlPuzKPs53ghZtSR42oOBAYCBD/XgRbyVAQQBOAoEalGCPhDTK/ijgAzJCCXcewIAGLiADA+6DBoMcij3wIwG/CNECrNHHBWzgAhC8r5DN6iNTjCclUqoyQjAaYmgsUMOM3OAuAmjeQShggBdMQC19oeK9RrnK9uwDASbKIL2CicyEgFEFCHiAAgYAwBecQAAJ2IAKLtZGo5RgeTEQiD84ID4NFKQHqfHAMZN5LQDZ4HYe4ED50AnPi8QDbPQYie1KSIIH0IOAikFBrnaQTYp04AQv+EB0SvAAeSjAlvvwwQSURSd/WKgGD3iLteLJHxnoiEce2MA7/zEKUsO0kmcCwMAIGKCBGlAvI+hSl2JGAJoXsM2XdwspciqwARnmxweVtGlIt1UADD1sPOXxAAhABAN/iG4fN+jkTGSwsUoqz4HlowCPPvDRqSg1oD49jj9uYDUXkKADKeyqT/1BFiQpkjwhOIEBEuCDDkSSIPZoTggwADOZ+GMDT2SAyeKxgQGAbiAygGkMZEMQFYBAUCZ5oFmz89UHSCsEJKjWYy+LlB44IAEjcAAHKNCDCmCzI3yVBwjOqQ8nbo98NAUmZsECAwdIy2n0KOtrg/m1DjiAduKJ6b0cUAAaTK5zNZDA6ix5OxKQao5FHAj4ykMfghTpSCjwLFdve//AlbXsZdfFLiHFYiEUjMADtnNB7iygAQSooAT0GABKKPu3mJSAAUqkR312sDwQDFJ5zJMuBTAggQd84K5O9W7K7JEA9w7gZbY1sIMrUoEy8UomHfjABzhw0YGoQMAfGEF86/bLB5eqBw/gmwEu0F0Rc7A7NRiBAAZFVIkFxx5JTawMHXi/j7yoPAxgjz/o4YLxDeSN8jgA1AZCgxH86iYB6KmKKROPC6CuyGZL8ZOB6A8y7YAeDFgrjyYQAxKMIHsoiVQBGmyR2vSXIDA43xQpsCMa2sceN6hAAU6AgQJfWSqms4D0RmAPK+950BnZBw/kMhEwHjporSV0WFQAqfj/2NLRyPSTWiEAwNsFAAQ1CNEwxxOpvH4kHhooMgd+k0sMzIVRdLwfFtPkEwWgmdJIGaYAbkIu79Eao666AZI+AK6UZLrMAjCQR2Ioj0yGrAbmrYE+NHe597FHc3netVGGKQG1PKeH1u52cr50G293RGVWCg0IYiluVZZAtxZ4JlHLQwINSPCMAPhwR0rgExvkuALng4BvoCqPAOi6BBS45g0UOut0y7cGp4QAZBTOa0PrFgQAAKX2epTVi3TAvZw0CAUCEyx6PJG1CKG2niGekRJswABNgYADXIvymBslHhXYBwdigAF7VEC0Mi/dBYp5gAfAvOdAvEENPuAwxw3x/wUesDeOdhQAc+31cz7QhwpuhwEnF4QHCCe6UHIUgwEZwJ1eR6dYYKCCGjTn4gFwgNYt8uMgP8BabZ5hM+sLOJOX/SIahcCAOvr2vQt+IxXgwAjO0lYLHN6ye7c5mOjogIwPHogV6AADKr6hIcYm4Qi5+pWYpbIAhBJePLYtDbo+eY10Kqyi4nzqf3gDn7hSAUYsdKlPUAOMIfAFNqD4C2g/Eb2/PmQ3iFaojD385FfENZsTrGAnQK6To7wEsr0dBGqr/O/2gDGuPIAPXC/dC+BkBBhD4gsMcCFTA+70ss4+RVTGsquJ2v2EpAB+XBmA2mOEBx9QqP4JQgEQ4AInAP8qcpYQwpd8VGM1E2ABZEV/D0gT9oAAE0iBHKBe4EdrPPAAakEgKAaBXtQD8zZEQLERNREaKIA4G1BkI5BvOWYQ7IeBg3cm0RMaqyNoH9g13zQ2txNkJxAABoABHrURKrAbBmgQ/JcS6QJzCPh68UABflaDgYaDU/h6nRJp9eOCVFhBKnN/2tMlG1ECIPAT/zcQFGApJyAtAYBa+gCDVAgvLPcTDNADN6iFUYNF5kVNDnABdNYDMhCDzwYqGABMJvheIxB4A8GEk4dt2sYAN0CHdQiJDoZFKLFgYhKJ+PNjhDIgT3QCNhADArABMUgD+AEBukYQMtB/5GEAGYZkqPf/gW/yR8YDFZf4Q/FAAzugAj0AAxM2bgiwS4Z4gL+iEjXAioBYba+3GBgUA6lEi80oblFWTCcgdM6YPypjAAfgVkelXjxQJtdVOQqVYXt1RgnQYG34gWB3O2MnedRoUwQHWoCzD/ZAARSwA4eIiAdwjJOnUTviAoDHjv/oaPtAD4+3jPYIkIbzVR1gDz1QAn/oIhcwACfwAYBjhkXjcIVjjg8IVqASKQ54kDGnOR03eTQgWcdXjB+Jko9FfbN1fSeZksFUAgjQAVyEECpzARdAAQaZkdnnDzvwAe7FgPP3kuJGARJpkBCngKDCgB45lE3ZVV/zAO7VgY/olF21k8kH/z1qMQA2WJW7Fg9bZhaw4QMTeE1E12d/BlBdqZbxZIU3QVmTtpaUdpWvZ2s3cQIaQJNxOWh604lS2YkQwADS5237UADZFoeOqJeJSUoqI3sug26KKWJ+0gGI9BMIMI9ziIwMdxMOt4aQ6V0V0EwMgFKi+QCP5JK0xl5wuIxD55mtWT2ms3aqY4qu6V1M8wIDcpu3OQEPcJQoB3a3OXa9SZuXRXM7tw/7sHM8F3My4AN/5AIKIEjDKZ3wI5BPYgMOIJjTaVYl4AMAIAEYQALh+Z30cJrpFlmT1XraqZ45SAMMsE4hAADIt57zyTXcyZIbUJ70qZ/Z0pMgAJRYQ5X7Kf+g/LF6SokBTDmgCYotMlBiPGIABRCgCiqhfdIBNKhFKzWhGSok8OdeB5AAaamhIcomfgICN+ECAACXIqqiQgIvAGCXPBShKyqjHlQAUxZ0iDmjOdofVJM9DJiXOgqk7YFTDceMQWqklFF4qvlyR8qklrEcsTkCs9mkU4oY+3ibHkAPwkmlWyoTMNCctwmdWsqlYxoTAqlT15mdZKqmQ0GS0nKio7KmcToT7blOJyqfcoqnH7GS1oefeeqnemUP/okTeBWjf2qkqweUB+qQhoqnDCqVD7qojJqn0JMaW4mhkoqpU/GE0vOhhZqpQNopIKAWb+mpn2qkw5RDVMZtpor/qfsgZdr2ADjKqp/ak40JS7OKqwOBUxjkcp2Zq2TKXrcWJaz5q2tqOruxgj9arJLad1eapcvqp8ypAGAandBqqDZ3phqQpta6pm06LYzHrWrqJxoQdvB5p+EapxXgA/eZn+g6pQqYqFnjrmRaoPEBp/MapzLQLxyFWPiKp04IhVokhf5KpRxqbvJKsGPqJ/DBIyiasHK6HIYZdKv6sEdKqVR2qRV7pJ3SmBjwmBpLpbvaFMvoqyAbooUnrDGgAVJqsjoaZVBKrC07o7EVdr+3AWIqswOqFH/0e7OYs6fKAdm6rT+rojxgfOcHrkSroj1ArgNSbOuotBpaAShwn5Ea/7X0Wa93hbBXq6FZO1ZWy7XSKQMMIJUK0K9hO6NnGRoJMLBoK6HwV6kgIJRuO6Ff9RqhMrd0q6GL+BMPQLF6K6BRRoPSmLGAK6C1ehIL9rGGK6H7sAEYdJ0ly7iJiTGF56L5sbKlOrkoaTrZozrKurkDSn1+Z7M4G7pNSTX0QAEHohRM4RQ+e7r7aXP3d5dDG7vS2QPGZ6ftertDSQMJYAMAUC000LR9g2G9q59Tu075gQJQi7zDiajxgaDPq5ff5KYYIF7SMgEkcK/Uq53U5y+b5730CbCVyraaO74P2GagAX2VymDpK51+AlOh4nTwO5zLYaOxir72m30d0IXkYf+p+8u/H0g1Hbu4AwyZFUAPqlkuCLyW+6ABJIQSwOfAkNm5lJgALFvBcUmztxkA1brBTilEZHMCkRfCcTlMtMsApnvCB2m0blogvNvCkIhAOygu4jvDTTm1oKISoZPD1espBjq9PwyQSDhEIcCbRJySMKABHOkBCAC2Skx/TNVTcFseIACiUgyQ37S84vI58RnFWpx68juq9SbAYixz6uoW+xFlEuu3Z4zGEBcPCQUlA+ABFvAANbC6cVyHR1M+Bbw9HsvHB4kuEzACFKB2uFakg+yMNRECLhAAH8AAGwBavMjIELgPajcCAmE65fa5l8yO+2ATAWQANRumoMyOKuD/AHooOXCMynumMjsiI74WjRogua/8gCpwF4VikrhMjfEwXL6Mg4QYKuukEs0rzJcIWO51w1CczM8MzTJxA+W2SO0UxtEcc0yjPeyDzd3sze8nciQMMt9MwF9ycfZDzulMziXAsOJCgur8gBWgAe8GJQdAcvA8aBUgAzDAaDWpz2aCzzh4Jk/ixQBgDwGtfNboSiXsvAiNWX0EAhHtzAhBvB/wcA49xXSsPeMjwxjdbaJMz23VSA4Auh59WypAZr2EOAgAJuFm0glof0t3lx390pRGPGVmA3eshyqgHjX9YP5LHhBAhmMiNy+gAdfs03sWD8WzSCFwAMG7sklddv5Q/5gC8AAdcAPq4cpSDT+AtU4HMAAu8C4HUQFcMgH0wNWKeD44cQAxIAEaEFwAndZm2QMMudVzDT/Es0sfYCUuXRCoSEdDjdco9yIQEABXLSKDrdinC9hLpAFOkaJWZyUGcNCLTXRFIjmWrdmbO0u8IYElkXshA9QfoMGbbdqnjdqsgkvp0gNCBIw0UQPyogE0ndq1bdu37VU3h8QlsCXnB5dl7QIDYF+4TdwPS5gohR6AAwMckF4sXNzIJM+CtQGzQQ+bgwL1Adg2INjPvZhKBXfcbWC1EQJPNFOI0wEG4KPg/Voy0DR1NBA70ByvrQ+d7QFZqN571JMIcAEFYIkFsf8wCLADtH3f9bd2ChXZY7IB8jICAj7gXdTZCzUQ7PwCBg2AgeFS0qXPdd3PDY4/PKBTYnVyZ0JQBnDAHG52rTQgB4CCZP3YE1B1Ju5Tqz0YzhU+B1ADMAFYMZLEU8EDzfQBGIABCVADfwvjrWMPBB0AB75Xu/QASF3k1nMsjWMAuTQCWVV3J3BqT85rLC0jNBLTwDhfTxTa+kB9AZDTEuAkMYDFd63l/vEiHPk5K945pTZYbR5PhcV7DlA8U0Qpsbbddk5Iolw0c3KK/ScB+8EDEuTehOUAI0APuKgC9CABWxmzgK4vpCZ2TEGOB7EPYmhklo5OatZ0BfBQzuZxqVb/4qDeRUyjROM8FTZhAxzQKVYC4c61z/XxIgHQXKq+NZ1eZA7w2PV9ECVgJTFw4LzuRfYgAIJxAypA5cwSDze3m86N7M4jRLVOEPbiAn612io9EUgTAlle7TKzTflBARdAdQdxSZFS0uNuQ1SdFsFSd0aoD/Ls1D7s7nv0ccn2mLXxArDE0sCSpjyAAeGe7yhDA38EAD2Q8KHkImpTMgfvRQ01N8CMAi4Q6wVRdzZwZBIPRAUggAkgbQ4wAR7gA5IFOmh2Jgqw6x7PnxdQEglQc+fjAXlJasGN7y5vQ+t7WIj4R01OEEjjfzr/Qw1FgCZsEPYiMQIAfR2PEK4xAKJE//TYEg8ONQDFsnsDYOq6KkGfPvUdJPQUjIQk4D3K/gKH/vUcVFghcFgxowJJM96QTRFGrwAemPas8h4fU4ag9AH20Rw2cOx33zpEeCU1ZII9PxUgHwLlPRW+5gAPwAAosANOLvhBosu/d+AQzMwzVOLRDgExQIyVvyolsBv6Rlj9F3VB73cG0N9bxeai3yuiR9rZLi9AfyxgfdRDhgDgUTTJYgCLDPsngy70XhAjJQ+zjxAXVJDBrykyAHVcVPVFBjVUPSgJUMVggwAbsAH8/frMXxnxENv2XD5qJoj6IAN7kfEDYbS5sgEIYBYQcJ0N7f2bEqjngVo38ADgiQHkWf+TF1BxAKFBhj6CBQ0eRJhQ4UKGDR0+hBhR4kSKFS1exJhR40aOHT1+BBkS444BLiyUIOivwwQXIwjGc+BiggN/KTuQOBBiwIQJHmpUEBlU6FCiRY0eRZpU6VKmR2E8CBGDXk2C+xzIC3BBHw0SWDsU3LeDQol4/vzJ8GHDAIWmbd2+hRtX7ly6dYvG63EDBlWEZ2ncoAG0bwcJNh70sJtY8WLGjR0/jutvg0wN8Qr2wCDPAMp9FuQdQFBQxggbGDTQQwFiwgAHliG/hh1b9mzatGlkVvDVYIEDExjEU2FAngB7Bs0eVCHhwIbazZ0/hx5delt/FDDEYIB4+nbu3b3/f0+6b8Tn0AVhyjuhtYQCeTbYVqVAgS9MFzHeg8efX/9+/hNVeBiuOINuAEAeDHroAAIXQNBuIR4sGACF/iaksEILL1RIhQ8OsEAFs/yJpywMRySxRBM/KkGA9nYwrgMXQnhAHxkOkMeDGxqSwYMD6DmxRx9/BBIjCmJ44cCDKmBAMwR8OGEADfZpSAUATuAgSCuvxDLLi0pgYCcMfNgABRR8qMFDLc9EM03ZZsSqQYJ6UFGACnYIwYUPYGioBAAGqEFNP/8ElDZ/OOiNAZQOQmCCEz6A0IYqGbJqAOICpbRSS7ejwYIQQjjAhhMOOGCAGGpw7VJTT0V1IntecAGD/0PBusqGC2p4YYLKGuJBAdBS5bVXX7dkIAQbNuCroP9eOMGGFwLQTSGVDDjBh1J/pbZaazvahwJ6atigBm9rQGEDM68lt1wr/UFAngkemJYgCmodAYQXDnh0IZhCUEAFc/flF1DM5MktoRIecEGeFzQTMCF/7MHghBHc7DdiiSemuGKLp4tHg8+YQ0iGAJCNob37FKYggANQKPZilVcGzx72Ju0LgQHkMRgDGhaiAYRF9WW5Z59/BjpooRuqILMTmjVonwdoPhgCnhW6LYQPBhq6aqsV6+AEA29OaAf25BngATwT4mGEEzDYIeWr12a7bbff/lEGG+SJ4ca+CqBZHv8XSKA6IRk+aJVruAcnHKQSOrhgXITisacDx3tQWx8eHjhBAvkKxzxzzTfn3C57dhKg74NuiGGnEx4QDCEYRnABgKc7hz32j2Bg4AAAOohcdt1357132SvgYIPLFe6Ang04sBuhgV/wYGTfn4feoBIciOH23KPHPnvtt3e7Ag1cCJh78WHfZ4PSE+jAnh3E2mHs8d+HP375fbXqAGbnx9/tTPU+QQEPFACgAEiVPwIW0IAHvFA8OHCCEIyABjDoAQ3+kjoEVjBiJdjACDSYAA4mAAQjuMD1LDhCEpbQhHHhQWZcEAADKCAAAYCAB4h1Qhr+ah8lwCEMdKhDstTQhz//BGIQN1ICDfzPA/8LgAsFwAERCtGJT4RiFKU4RSpW0YpXxGIWtbhFLnbRi18EYxjFOEYyltGMZ0RjGtW4Rja20Y1vhGMc5ThHOtbRjnfEYx71uEc+9tGPfwRkIAU5SEIW0pCHRGQiFblIRjbSkY+EZCQlOUlKVtKSl8RkJjW5SU520pOfBGUoRTlKUpbSlKdEZSpVuUpWttKVr4RlLGU5S1rW0pa3xGUudblLXvbSl78EZjCFOUxiFtOYx0RmMpW5TGY205nPhGY0pTlNalbTmtfEZja1uU1udtOb3wRnOMU5TnKW05znRGc61blOdrbTne+EZzzlOU961tOe98Rn/z71uU9+9tOf/wRoQAU6UIIW1KAHRWhCFbpQhjbUoQ+FaEQlOlGKVtSiF8VoRjW6UY521KMfBWlIRTpSkpbUpCdFaUpVulKWttSlL4VpTGU6U5rW1KY3xWlOdbpTnvbUpz8FalCFOlSiFtWoR0VqUpW6VKY21alPhWpUpTpVqlbVqlfFala1ulWudtWrXwVrWMU6VrKW1axnRWta1bpWtrbVrW+Fa1zlOle61tWud8VrXvW6V7721a9/BWxgBTtYwhbWsIdFbGIVu1jGNtaxj4VsZCU7WcpW1rKXxWxmNbtZznbWs58FbWhFO1rSlta0p0VtalW7Wta21rWvhW1sZf87W9rW1ra3xW1udbtb3vbWt78FbnCFO1ziFte4x0VucpW7XOY217nPhW50pTtd6lbXutfFbna1u13udte73wVveMU7XvKW17znRW961bte9rbXve+Fb3zlO1/61te+98VvfvW7X/7217//BXCABTxgAhfYwAdGcIIVvGAGN9jBD4ZwhCU8YQpX2MIXxnCGqUiPvMlDQnLhcN4+rOF6hphmI4aLiT2cVRWj+C0tJrE9YQziDruYqjNWyNKYNgAFjIBFIsHxWIP84hqvdcgH0bHBeOzjoBx5qk4uSJI7rJlXeQTK4KHAlLWsZR7d88pL+XJYvyzlDm8mJGHuTpa3vGb/eXQ5nWNm8wGqzBE0c0fNbOYyPuG84x7/+MxFVuuetyxnkNR5O3fGc4fdjE5Bb9kAhQZ0fxCdaJotmp6NLvOc6Rxp/kya0pbWM6cRQuYpP/ojhp6OpxMN6nJ+WQU3sIw/bvCAg+WtXpsWMYVUjWdWxxPTHSb0qUWtn12zudclHrZBXh3rWdeaZrfeCKqlU+w1H1uc0taHCkKQtxMIO9cTyhkIxD1ucoNgytZ256/LDOlvS5rS5+4ntrXNbW+fmELhLne54a1ObOujAx0uD67t/aNt5w3d7XQ1rPUha1p3GNoa6fdzqL3lg/s62Q35d94CHu2Lj6jglV5nxIVDM1ML/3zFBFd0qNsNkXnTrNtW7vh38J3vce97nyLPW8k5vvITfbzNIY/5QjJOMx50JOL58XnFgc7zhwxdHhuHeNAtlPR4Sz0hTi+6yW1cIqrz2+oJ0RrNGGD0r0+o6zIuu0FGrhmYM53rKefn0cMuj7FrHUhnZ3TaDeKDvE2A7G4nEd4vrXeCYP3vA/eR4FWO+InwnWZ+tzvKDb50xkukBB123kWOjh/Fy1Pueav7zitvos6jHfAOuXzeMm+RzYOn9OPcfIFohoHIJx7uph89RByvrsOf3PaTjzvhCyJ7A9UeJA5Y/WNef23hEwRveYNS1E+PoeWnu/mpp1nyKdL671Tf8//N18fzaRb9jHCfIRL4jAM03Rjvf3Pz/igYzfok/dwH/vbInn5DiE970fveMfYIvaFoP3h6v/iTh/krP/BzCPTLGwzQPrsYwG5qvfGgGQjov62zP+DDPf+bCPGTB/LDCPMrin2ogbmxAaOIQOvLv4agQHmwQPrjQI9gQGBzAApSjBTcptZTgQ4THM1TwO67v8FbwYWAv7xBwBD8QaLQEGc7waLAQXbSQR6EQQzciMxYMwdkjCfMJu6buxhJwCG0iBl8tzFsDxQMwnmawLx5wS+sv7eIB/PRsiYkCi30OjBciC6cQqHoAOLTsmgBwbmgw2tqvX1wthBoIocQQYUQQzL/pDQ5FMAz/D47TIgdzJseZL0kBAkVkJc1W8NH1EB9GsRCPMSGSESGgAENmJkr9DO5CERraj0UmDKtQEJJnIhFZEQ8c0ShaEXmo0WEwEM2jMGmeMOQWTMPQIBR3IhdNKdXjEVgpEKQuAk2OwEU+MO2UMZpar25oxkJcMagsMVbXLNcDIprdD8FJMS8McRufAsVSABnmzIYgZg5hMQNfEaH0EZ54MZZbEORqAAHuMcOs4CEscZ5hD3wUzUbnIhSRAgfoLmGdEiaC0BdJMh3YsYOk0Uf7EWPiAd6gAA2E4ACQMaPIMeCzEh32TKElAiFlAjrYDMboMaB/MRz2jwrnDIf/9DHYMxAkPMyBbzHfMTIfRSKG2CdNbMV0VGKkeRFoGQImuwwm/xJnDSKfUCBudmyF/gAgUQKpHymo8M+efAMmhHHlMRE7tDKHDTIk7zJegSJQQkANpMApGmKsizHkuzKryzDp1TLorCHTdyyUWkXeYzJZWw+jaEZQ0xFeXidhBzL7ZDLLQQ/pswbp7zEkqwIGhjKQVM/VpxINBxMdPSHw0xMsaRMkHjDjqxKEAjNcdxMcDq6w4yRJHOJyVTKCmlMbOJKgMybsIwIlVQYBPiaLcNCuqhNbmrNvHnNvInNiuBNjVCBy9QyADDDwGw14buADiu6G8gbFwjJg1jO5xhOV//szML8zLxJTYjoToKwTAOcsmhBSbj4Tm2KuOrMm+vMzu00iPPMiEH5zdyMTp18M+FTEZIrCKp8OtmESupbTShsPtfUB9jEy6BIADazgFWEwAQlQADNuQHVOAPNy7a4jQ7TTZB4z2PqNx4AuIK4CpoRAA69Owutw9lECPkkOn3ATprRThYVCVsclWqsUOmkR4ow0Q0liBQdDhxNDJYE0f78uRc90ByrT9HoMPcRTRg1Oxf9z4wMULYjCAKFut1cTIQQQwMoz8QYUdskvCS7UYKQgShVzi/tiOmhkThUUqWTQL0rQppJToL4TQdo09EEQh+90F4MUpoJOCJd0T6lUor/WEQACCHls1KEs1MDzFN92FNEbVKlIAw2u505pbxLPQgOkEKD2IC8OQBL7dAqBVSKPNMnVVM23T43NYhv9I2sW4wyFUS9A9VKPIhRpZlSfVU/7QgZQEWi1AApBUz/zDtabEuaUQCE2IcOo9CHwM/asNVowrY7lYdJrdRfTVT/gMwyO8YbfFQFVda8adaDeNa8iVZEhNWJ8IcCyNLnvMisHFfHlERKfLaEmMEP4FZPpZBqhSZsy1WasURe/QxTHYp+jFMtgxFLjAuApSZpw1d5eDh92Nd+PVWQmByf6zDfMEp6TdVwkjZzs9HIEb8X+EtSbFeCYMiHdFmHjEjVDFlI/y1XZnVWaMVYo0DSLVMAJgLEemXSjCVZvTHZvEFZxQRWikAXAFkzRn0LiA0mVDtHPHUWA6xYhjjPbwRHOXVCoCUniXU4fc0bfkXabs0IhV2zlhjTpIBaaZJaZ5vUgsDWq12IaT2I9CRKBvhYpmjbX0I1WMyb5EGID8gbDyhbf60Ird3aJO3amSXXRB3aND2Ik01ZrF3ZhthZLYOADajco+jbgC07wKUZwT0IwqUZw51SxMUIBVrWLROAeXVPrwVPO5y7TkQIRKNVab1cxV1csOTUnbTDqc3WqrW1w83Yj0Db08RKkEXWfEK12mUI3E3d46UIdlTPjtXbHm1ewQRDRP9DtwkAven1xt7Fxd/9UYgQXXkgXYMw3Rox3rfI3ClzSR71xO0FXhj13oYAX7ETX5EoAGJ03UYlU9mtJkOjyaNliCQbgP79Pfu1ONpVw+i1TgZ2i+QFzgcUUQLmXhg94M4lCAWm4I+Q1ezNQg3GRqvrShBwiInF4Pu83Ob4XGUytPxliP2luxCG32/ltiOUSMcN1NlM4RXGPC9NWkWcste1z7g0Ybe1OsKkmbUtiLnjP90tYsZcYm8yYKP14AbNmwUmYrMtCgtm3GNd0ptr4g6DYoKQ4i9W3Vp8PBKGjBjmpTqz4RiACCf+QPN8YWq94jodwiCOkiHW4yoOifgNUZH/7GORtbo6vuMOo1+EyNqPTOLY9WE/VkoZPUCIGFR54Bh2JWTpkGNkqjM8TmN9WONBBuOkgFPfbVwHxr9L7jAedpAO62SV/eSk4eNKJk6pW7sXeGSEaF3brds9po1QJtFFzhs7fgg8/mUXvuWgsA5lJmM6VUGl7OVmNohgRuU23g9jzqUw22SyhYgaCFVbTmXOS2SzXEFMlmWF2ORattxnFgps5ghvluGgC2eJIGdd9eRzdr10bqYwk7LlZYgK6DAvNGdu1g97JiY0u+aI0GYq9uc/dWUhBMqBlgiDNk6JVmikA2hm+jJsPWSFIL50TGjqXeiPNtP8y+eI2GeC5WiU//ZoXabZfRRpiijpQ7Tb2WBoW/qygeXmdMmbAujnjkZnmpbJoMPoiNBomkHoeJ5o7+jpY54+oKZeoaYZoj7pFkVqe93H1s3jiYgHZzuBmwkRhdhp2ZjqqI25m56InC5qmT7qiubM6QNrej6IseY2s67ctI6NtZ6lK5vYKZ4IuxRkSCZmnlbpW61qwsNqedBqqDbqf+7qoF2IwbYIw1Y9tE5stV7sZLqy9pWHFlaIGp2y5PNr2ABsv425u6YIvXY5vuZseXaO1R6m0D7siTDt3HbmqO4O24YlKBNeX7WIJDnt2fZtsvzsAu44zK4Izc4+5J5sii5jULy44cYI4+btgv9I7dcA7leCsvTlU4xAAAMwQNTu7L9e7ojtONEm7YTY7c1OiO6O4/Ve6foT74wo7/OWbrmmbLpmzfSOabMSbmcj7orQbvlGbNqGYftm7OQ2vqqib8n2b6gK7w4b74vYbwXnTgH3bgefXQi/QBbz8K0uqwkfZgZPKhTvbxJX8fc98RKncCOT8RQX8Z5i8fmucZ3K8QW/cafq8Q5/8ZoK8t6e7qcqcu7ecZ9KcoJocpN68ieXqShfchyvciH/8Z2i8iEPqi3Pcqby8iPv8is38gpHcjJXci4vqjA38zFX8wGP8TePaxqXcxO/KjYncDR3cj3HKTzHKj8nK0BHK0GXcD7Ql/KYIvQ8r/MZ/3ND5/Mpd/RFB6pEf7JI/3KjovRAt3Qxj7F7lvQW7/RQd6NMF/VSN/VTR/VUV/VVZ/VWd/VXh/VYl/VZp/Vat/Vbx/Vc1/Vd5/Ve9/VfB/ZgF/ZhJ/ZiN/ZjR/ZkV/ZlZ/Zmd/Znh/Zol/Zpp/Zqt/Zrx/Zs1/Zt5/Zu9/ZvB/dwF/dxJ/dyN/dzR/d0V/d1Z/d2d/d3h/d4l/d5p/d6t/d7x/d81/d95/d+9/d/B/iAF/iBJ/iCN/iDR/iEV/iFZ/iGB5qAAAAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUEAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACzQAFkAYAWLAYc8PDxUVFRISEfnwT703HhOTkyliyjGxsRubmyMdSHa2txURRBoaGgSEhTm5uTFpzNxYBjbuTtCQkSxliynp6eAgH/CwsSReyNgURPQrzfg4OGBbBxaWlyenpzNwIqIiIk2NjVCNgz8/PosJAiurqx2dnfq6uzv7+5eXlxpWBaYmJi8ni+QkJCYgCSysrT46qDKyswWFhTOzszRyKgaFAT77rS8p1H29vMxKwyioqRiYmQyMjSajlw4LQwkHASLfT+qonf0zUQuLiwaGhy2trR3YhoKBgR5Zxzy5azdwVG+roTW1tS+pjSegiTmzmROPgweHhxKPg3WwnQmIgYqKiyujizS0tS6urwWDgToy1QmJiQqJiwiIiQ6Mgy+vrxaUjzuykLm5sR2ajT99tcSDgS2njAOCgSxliRWUiwGAgTU2NwuMjyinpQODhRyclwmKizU4sSCQBjWohBYelg4dDighAyAtIRupOAEBBhiZlBwekS6uNCAjIi0mgzowCjc4vguPjgkwoBCVGQKDgi83tyInIg6PihiVnQ6MGh0lJialqxGMDjGqBCyoOh2gGyyoJR0WGheQpAKMiiIdpyymLjooLCyqMQmIsAkJDDY4uCIVBjGliBSWHQeMkRapIQeOhyIbnTK6DCMjHhqemymlqDWmEDAuKScusCmbCy2wjDyvsxCXED42uwEDBDMuOweGDSIXIC2vLwuVEx2VBg4VBTwnjDYuLgGGBRCTDAmZIhwXNS8yvBmQlRSaGTO1tCesDBUQmxuwiQKCDCk7LCgoMTKMIDUzvTc9NSsvrSsusw8TEi8vLSOfIASZDhUPEgaKCjSwhCkxOxu4qS8zszEeCykzrRwGoBkapQUMmjO0sCAlFxoQBjwvhCkxHjoeEjWqlQmZNCCQmxYYhgUEGDwskCIlLB+bgjq1Ow6DkBCIhTGuMTM4uhsehikupxWRigCBgjKeMgKGDSmhEiIfGTeyNBYdhjE9NQODgQCAgwODgwKCgwGBgQGBgwKCgQCAgQAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDihxJsqTJkyhTqlzJsqXLlzBjymzpT98QKiAACImxb6bPn0CDCh1KtKjRo0iTKl3KtKnTp1CjSnU4RIcKEle8EGEhQN/Ur2DDih1LtqzZs2jTql3Ltq1YfwW8wHDRIYcXBRZQeHXLt6/fv4ADCx5MuLDhw4g1+gOAIIAQKFAAfNBwZUfiy5gza97MubPnz6BDJ92nz98/f6iFENEQQLTr17Bjy55Nu7bt2zFR+4NCwQQH3MCDCx9OvLjx48iV6gbgxYqA5NCjS59Ovbr165xRD2GhQQUU7ODDi/8fT768+fMc/TUoseSKhJ7o48ufT7++/fubGyCQ4SXAXvxNmQbggAROtY8QKHyQwz8scBBDgRBGeNR6Mlzhn4TK7UOaQPsIiOGHIKq0mAtLLAGDDBoooAIIHobo4osiUWjhfzACtQ8VAZTwwQclBNBAizUGKaRE/gjQQQUBAACADkSY0AEXQ0YppUPqlSCDC10BOSVL/oCwmgMaOHCCBiwIoeWWaA6pnhb5dFiTADAs0VqadE653hIyMBCDPvn0CV+dJ+1TwAEqMCCBACUccEIODwLqKIyoCaQbFxRowMCjmIboTwBLiLCEChVUsGMFAPyZKUj+5KOFactpoMFzp8b/iqFuVLigwG+y5oqfPxwcIMMBXlhwwAEwEMEBjbp+pFsDVziAQrLQ3oeaPgxQZlm02JrnTwwACHCoAOA+B8WZ2V6k2xAHsFbuuuX5s48AByyhg6ns1mtvjajtg4IJMAhx77/UvUuEAhU0CvDBCBPoLgBWWEpuwhDTto8EJFYwbsQYZ2zePl468EEDGocs28QuWGHxwyKnrHJwHLugwQcGryyzZhNfYUUJMaA88848f7bPDl5oUAHIPRddGMk356yz0Uw3HdjPVwids9NUu7UYESfAYGi4Alxc9ddgq7UPAEQ4MLRukYatNlj7MHCDCCfgacXcFiCA7Np4i7XPEDsI/9AaAEPQe9A+UPQdgABTF5Sq4QJo0SGd/ghxhQgycLDDDkoCkHjenC/ljxYM6KADA6STjoIQgneuelPvqnCFsDJYoEKWCbV+BYowFADkjRVcceIBFKCQz9Iu6nvCCfzKsIQCCsAw7+rQG+XuPz1pqGH02DuVTwkWUMDjBy5fIUDqAumDggvoh8kBkFqogBcLFagAAwx6EQ9i5Czkr3/+/7yX/f8ADOBsxiYALjSATzvIgQlYsLmCEO4fVACACxwQgBa1TQMWCEAM8lEVGFTGfgIMoQhHSEKPuMtNktKBA0iwKoYMIQcUbFEDVHCCjw1khiZgAAhLyMMe+vCHBEGbh/84oAESUGFpvHHA+gjCG0uVJl8fEMEH8gHEKlrxij5E20Dy8QETqGAISKSAElsUgw6coALDmxYLRJCDIWDxjXCM4+q0SL0AyEAGAXjcQpK4xIHoowQmIAEIOqQPCUyOhXJMpCIX2TM3alFgSyjBj8I4xiCSTQMUYEBrKiUCIzLyk/Vylz68MspR6ixfo3RT2k5DmlKWknygdJoO0pgvipkMjKtMCB+BpA8B5EABGliCBTogRgpAKZbIhJYQwIe+ZrKgVAkpZAlUQAEXUIBUevzHECrQzGZSgAOwTKbRdEOyJZxMNwzZZRDdpYUAIKAxVChBdxpYLlT2SR/ZlBRp7on/QnF27l0HcABdckAB7+2AfIu5ggIOgD4UXeFYA6mVCa5Ql4KqoADh9CfPalmyEuASnXukwD8quE6Q/kMfH3AAAvKZLdTEIAAsIIELclCCHQjIXUIoQQdI4D0N7lCjG5WAF0gAuCEYlScPE0IFULADLnBBMgq4AgAi6oIDFMCoWC0NUKvmLgGQAAY4E+JNyfdC1rRIrNSDlwUA8FNHpUYFKTqABWQAVvjsIwAWcIAw0+UA77R1qyszpAoaJcTaNUCVkRPjpQRSKyJoAbBrI5kGcqC5IUCBCzyRFBV0IIH/6IMKQijAFUxQAn9l9h/5AIEEdqCFHejAZgzQ6rq0k4Pu/wCAC1AQggRauC0XnIAFOxiCFjhgAdL+FbIh84dQSSAAKmhhkrlUnFgbwIITlICqVtUCFU6L3KZR9wZZo0BF/1GBI54UASo65j8A0AEieMEB/7ACEf6BM4FAoQJeuAJWYhfW46apJiVwAAuoiNbFlMhf+dLBDSjg3+5GDKD8soAXaHrEh4l1N5VaLASxBoNgkYBUd3OwzPLBAJ6S4MQndsEHzCuoDiDAYCC4SkFFemIEEE0fBWDBjCsggDSGMgYKfU8DoIDUSPljBzKwgpmmxYDfNljECDvyVfJHARk4gAJsTYhY9YFeIthUIFz4AAlUwIIOpGtGUN5oA7TAZu1S4f/NUPiTPqDwIz9ywblOFchzb6qPGHBBC3TuJ7sMfAAOcCAH1qzp49TTgZcJQbgccMFa0+y0VAlhuyALQJO+iJAtBwAGVkCBHj+L6SFIAK4uAAGlVy2lu4LpRBaw2QlkIABWdYkEJ1CAXBxghT6ymmdeQWe+AmCFA0jgTEIUlBciOUlJrZJjJHDYr6f9IuOJ4AD+8QcVWHADLziySnP1wgGs4AVRP5naoUSbFmz1vIOQMy6RVNrDuFjDEKP73vjZBwdM4AAdTgpFFUxVBazwAS68abQIODe+sSVWSimAAQh1V1ySRseD6KMC9V64xgEkKFfVWjf5UAFpNRQABbjgOxz/KgDlqLhxkV0YAL7KY6cF5TsGQDe6BBlCB4zb8p7P58hL0MCxQc4C46K0hqbSggn+oV6fQ6wnXMAnaYRQXRaSi+YUP6GRh9wmDXFBnrR2utjP04Cy6cDWQ2gWCmrygROogOWnAcANrODGsSdsPVspgY6ihm2WDgQEXojbB1AwOtId9KQoGHMFdOQyDZQA7nZPVmFNypCKG4TyEftjIKmgoQagYNZUoB4KHKCAAuBTm1G8QkYjj6187CdMDuD1M5/o7gCcQAQ3SJGrgolG6okW9qSngI8UznoXCTEGHChBAYbHkBu58x8MQNxNL0x8TPnjhSY4wOIbrYCVnuaFY2IB/wJKQIS546r4/1KPEAAggfXuRJUIaQD7JUD/+t+2JzXRgpIkAIDgrh79kANyDNApJGAmCrEtDOACd5RkV6AXznZhGYMaYWYFsUcZECUpWvABFLh0RYRRAPiBIDgc5ORVS3ACRGCACbEeVnAFJcABAcAAJdBZD4hWGKMb+gACBXA4LRRE+rADORgAEgBGITiEREgbtKJjKnAloVc7dkRUT0QaekR91VeEVFiFVoga20MEmmQBLuAvCTEEKrAEorYP+fBEZ2VrmHcwU2iFbNiGhuEumsYCkcGFXuhugJdBEqAjyhdn7mZ5bviHgAiIkZMDRLUYDIWCBiEoykMBJBAsVv8gA2WiJYUViJRYiVXYABVgAbFliF14dSiwBDcgAxVQAALAAEFTMJfnh5a4iqw4dvrmBR+ASwDAhRU2ODqgADcQiwkmTMcmXWnYisA4FqmSW5fzXIJ2efsQA621A49Be34EBaHHjIETjFJCBRRgVWWYD5/mHqeXiBzQMAGHGlQQbbHlizjHVdQYJLtRAo04LFegAsMXTQLgOsIyVKUFH6miA1hhAf8wYQzgNen4InDRMBSQPyrQJEVUAYg4EEVyAArwcdoBV49njmv4IRBYEw0QAwcEJBdGGg2QkQ3QJlKoDxmpkcwXkBByZB1wBRSgAlcxN5KUECR2BS5AZh1gAQr/gGWsEgMfAAMipWMowgJ1h5KaAgA5YAFIiZSgOGuUJYm1Yla6oQUZhiyTmCvUNzZJuASE4j8zSE4ScJRWgCcugAAGl2w7gF8lQiiqRpQFog9sNgQxEANQgALzAyuDEwNUgFkxwDc0NGCslFvfIZeipQDtxpYfkg/rlzkAwB7/oANC4FkM2QApNUW6QWwW0IsldY6PcpUFIAMncAB2YQJmdRoXtj0LlQNXoQAncAUsQk4l5wDDlAO+owOGqTB0BAWN9iwLIUQcsGAPUnECZwJTVJsWKVYw5wLmJRBneYETs2w8JgBMIjVdCYGnQn1QQAImUDDJiAAnYAHfdmENYED4/6QPQkABZ4Q2DGMFPqIh+QAFkEec9EF9QkACVlAAu4k2+1ACN9ABRHN5YOh49gaf0iJWcHKCHsIAJpADKHdSAUAid6QBBIeCUviLAWhS+uYAXvBYAhEDJOAspLlKVdk2i5JGIacB60OhAhof5EQFfoMAFKAiC9qH3CIAHKCBeGQq/sAFAlAAVqEABViRKQodF/Z1ekIQIPABF8ghIMAAoxIAQjidNGh9FXd0FeAhbbNgZniG+Lkev/VEO+AAMNAnmEMFbRKk9aEbUMACYekqLFCLMxcANqMAsHkhBLE9a1pE0GSmBXJhpkQQZOhurYRPWiqF1Ul5+aBYHqJcKwecIP/XLQVQAUvgHPmicoWWlTAwO/+np9YBcqWIACxwBf8wPuQyKRzAABVAAV5QAUPJITvAAAgAPkQgPJo6qy4BgQ3gMr4WOSJgBTcnVu9iAWEyd+bmLigAN3PBAh+ANUugO7SqLeREGi9FBESQZan4rIipphMZRB7JXgcgas36rSbhLn+mXWzyD7c6mgOhBbsqb/L5AXVBkzrwhAwANxQQetdXXScHruNRlZgIoH14YRLgQV/Wlfo2WTGjrwibHkKQA3JlASRQQQ0QbbkqBLvaq9R3GgvreB2iLyKgAVMlKSCAIvaZsNgRotXCAv2ZmUJEBUQAA7UGpRKwBC7QdCRbsxb/ETkqkF9eIHxkCEM6xJASsHIfSqinsQPzwyJ3VbEMGQOKZbObOomS6TEhJoUS8CtZ1pH7QgIx6rRcGxFVSaVW6jYUMLWWFwOrEXByZwUxw7QOQJtdKx37wAUSsF350ABUsB9L4GsFkVoAQGcgc2pOAkZ7039D8JFcoGlmE6Bvu7jVClL6NlHHtC0d2m4gVZWLYQUKMHRQAKzMGneYa5eMexz6oAMHcAWoKZsKYE5PqjhUoAIHMGZXAQMagCXVUwAk0D0uSSLdkZyhW7LTI6hLQ06plIY9MbxWSX1c0KGP12dNdgDfhpicNy0gsEGjlA9UYEYdMDUieoKjxAUq4Ju9/4scXfKpoNaP8Fhn8ccAFDBXVgADJHCPGFgCVfUPv5IDOjAuQBq+oLEP7aQj4oe/B7gtASAqH4AAB/VsXIACBByDJ1movkpsJuAFrnMCvaZPHLBQS7YDmFuTLOAyn8lWrIJ9mNQBVpA17ae/x5GMWiAElqEFSHWA7UkFl/MYPkaa+cAFLLwDVDAEZojC1QEFH7AEYnJtIGxhy4S5vqIARJBtHCIBHZBkwiJMHpW/A3KVTrw8MpAD46NPnXkFFZaFDRNMRFACq5KoQ1ACy9Y8HzCwPtzGxTcETPqqGFTECcGTlKED3TJwXsCVuNkdBaAkVkKYiluhWpQqf8YFNcxK//+Av9PiZ9qFWfDHkMn4Z1AwyG58yRpHhqXBHBbQmue4GHcBTqfBRUIDMqC8VqxydAyEyazcysW5HEhZxGeib0tgoJXpo4+1GBJ2LdRTAcKZsujoysLsc0I0i6j8i/twi0aENgUgTCCMmwrwAQCwwg0qPpk6zNiczdVRzLGsiqcBJwdQxBeknhsrACTisCVzAPGqmdrczu4cHdx8zJoplQK2A5dluzegAMMKxDDAUDaTF4n8zgI90McRz7KMEHd1F6ZLiHklhj1BYirWXK5FBBZwLFRM0Gy4LTiIAihQQIilitsiARygA0nCriiK0bJh0N5cPgLAAu71vgjgK7rzqy7/UMSap6AXvSuTWJUUmZlDO30rjdKa0SUqwL6R+k20xNO95Lpzs4KbGKVCXRsqfdJ7IwQgIAT5ELAflA8V4DE318yVESsXG8PMyCaD6rhrJgRC4MIfbROtdWncFdWcoS8OWwHjRwEm8tQ0qA8B4AUyoAIlgAAVwALCQ51ybYTo6QVe4MkgtA8IME+p0narLCklF9aZcpUAkIRikmsqkKcjWQCVcjyYZHqT8gHpcjwrZHqHnR1aUEAHpCoVMLtLVmAS1K1DkI0bRLSrndLuosuL3dsCsjenoU+CelIC4NcyRy0YJKrJ2HYUsLWAQn35wB0WwAI6EnhL8LJX+XmYVAIV/4BrbZsvo2cFgM1NJ9BvOb3bUjE9DDmLYdeVqFUBClACpfHRQM3O6u0Z2sbRk/EyhLeDMRZW1LOYCCAQmXgzdZZYDjDGpeo+guzAabMwAKBVOcptFIB/F8ahKvVEXH0CFKA0UCAA6PtHn8nL+b0ZR3Y7oKtFlFKfMVAADGA5iczTJx4abYMiDiACIgAmoQYfJWd1J2W79HtHJKAnVnpqMqAAYdkeRi7WZ93eQpuZXAADCrCWXTW7jFy5WgCsc1LjmqHcJ8iQQgR4KkIBDbNQH7CDM+jlrnEgrvpOCOCqDICCc9ljrNIAEvAsJE0FsqVPIY4CpOMYfS55wLkDUS7mLv8VbfGqITEQYB8QbHR0fVHT5WxOGBYEAArlfT+NGnCSz/YrAAhgAb8VM0Fd6aGB3zPztfpJAvSCnwXAPCowfipgBfX60wNRsDJg4qb+Fxe2Ay8ai9OZVjKwYAfM14/YuUO76/A5oWkjKJg7sj5NLSUcexQ8rJFOBc1SAtes7GUhVgmkACzAWwUmAQGl7QMBBSqQuIie3tw+dqnSLeAShHQ0Nqd4N0IExCTwAaTDAkM1dFqeA4uyqu3OF0KUQEsQ7mgY6TsQNbopEB3ul2s+8Ch5Nbunzv3EMWVjQ43bALVVjqlSAicws5XrvR6uoRLvF7rh6wQn7jxNKZZCEJIpnHv/QeMn34q78aqhUgKtySGY7jEHu+4gsKtwl6PvdaKS4r1XZvI13xcckwMmczEF5iFc3Vf9KTmEiY+lvvSVSINjEzUwo2W6EbQw8CemMUE/exronqA0q/VsMYgLflWWhVtmCMdMDMovoxPzuEIVBtVsz4pc7yVCk6Xaip6eUsYDDo5HT0MK6mx9zxYk3rH6Ja0s6e+vbkyRwtcKBQMWgLnMZd/s3vj4htlXsCh8Xrca6SGHWyqosbluN71DAADm+dyr/70wIAF9UpLbDvpOcVeMWFAoRhdFvAOArTSnoQ8AUAHE1AGKdrG6v4rS3QG4F5u+zwJQAmA3cAX4e1clHFVB/9OdH9c2Op7FBNX7oNv8wmgTkHFZTiX35FS4Ee4uHERkSU3z5v+H1IdSkZpkJVIinQiHVwAQHxr4I+hvR4UrMGC4KKGl4D4OS5bIkGFF4pIDKP5t5NjR40eQIUWOJFnS5EmUKVWuZNnS5UuYMWXOpFnT5k2cOXXu5NnT50+gQYUOJVrU6FGkSZUuTVrQKcF9T/3t28cR6j+pWZ9ufErVH1OwYcWOJVvW7Fm0adWuZdvW7Vu4ceXOpVvX7l28efXu5dvX71/AgQUPJly45FfDiRUvZtzY8WPIkSVPplzZ8mXMmTVv5tzZ82fQoUWPJl3a9GnUqVWvZt3a9WvYsWXPpv9d2/Zt3Ll17+bd2/dv4MGFDyde3Phx5MmVL2fe3Plz6NGlT6de3fp17Nm1b+fe3ft38OHFjydf3vx59OnVr2ff3v17+PHlz6df3/59/Pn17+ff3/9/AAMUcEACCzTwQAQTVHBBBht08EEII5RwQgortPBCDDPUcEMOO/TwQxBDFHFEEks08UQUU1RxRRZbdPFFGGOUcUYaa7TxRhxz1HFHHnv08UcggxRySCKLNPJIJJNUckkmm3TySSijlHJKKqu08koss9RySy679PJLMMMUc0wyyzTzTDTTVHNNNtt0800445RzTjrrtPNOPPPUc08++/TzT0ADFXRQQgs19FD/RBNVdFFGG3X0UUgjlXRSSiu19FJMM9V0U0479fRTUEMVdVRSSzX1VFRTVXVVVlt19VVYY5V1VlprtfVWXHPVdVdee/X1V2CDFXZYYos19lhkk1V2WWabdfZZaKOVdlpqq7X2Wmyz1XZbbrv19ltwwxV3XHLLNfdcdNNVd11223X3XXjjlXdeeuu1915889V3X3779fdfgAMWeGCCCzb4YIQTVnhhhht2+GGII5Z4YoortvhijDPWeGOOO/b4Y5BDFnlkkks2+WSUU1Z5ZZZbdvllmGOWeWaaa7b5Zpxz1nlnnnv2+WeggxZ6aKKLNvpopJNWemmmm3b6aaijlnpq/6qrtvpqrLPWemuuu/b6a7DDFntssss2+2y001Z7bbbbdvttuOOWe26667b7brzz1ntvvvv2+2/AAxd8cMILN/xwxBNXfHHGG3f8ccgjl3xyyiu3/HLMM9d8c8479/xz0EMXfXTSSzf9dNRTV3111lt3/XXYY5d9dtprt/123HPXfXfee/f9d+CDF3544os3/njkk1d+eeabd/556KOXfnrqq7f+euyz13577rv3/nvwwxd/fPLLJ6wg86f/aqp++okqfef9oQGDFlqAYArE4E/enwaOyCCIIESgBVPQn/L2sYAMgGEAC4zAEfJXwOL1AwILpOAAJvBACAKPIP3Agf8BFFhBA2Awg7ybChZCcIQJPCACFcwABEQ4QtxNxQcLaMEDVpAADGzgAQvMwAZoAEMSGmEEGDDAAyZwBBxExQwLSMAFMPBDINoOMf64Rw+KkEIDpIAGL4xiDPdBhhBc4AEZaEIU+tFFEnJlH/ObQAYesIEkotF3/jADDq74gCz6oCpy3N1UwJiACaygBQvYIh/7uEYiunEDPegHFw0pO3/0YwR3nEAKRmCER95uffu4Rwg2EMgWYGAEe8ykFNe4gAus4AGDhGIppShE+hlxAyEwgyulSMcuQACLliSlLSG5Rk+qMpSF9OXspkKDKIhxlVHAZDFZJ4SP+MMIPiCiEY//MAJHOtN0RCgBAGKgj3tMUpUGeGI2tRm6JTjgBCLgyAlmAIQ2RsAAIWjmOVW3DygU4ANe0MAJxlADAmQhCwTwQAVAkA97us4f+UCDDQYQgSwggZ0iEMEJZKCChKZuKp3cwBluuIAvsEAG/zhBRlUnTWq2YAUTmCUZCNKGNwigAi4wqekiOQJdGhECODhjTW1qFU56MoVNwIAefXrSU6byhk8g5lFPJ80h1pClIeipU21KRxykoI0TgAA2rfpUYG5AmIT8KlhnWMMb0rOsNpWkVq05yrWWLg1m6MEGmIDHJ8aVdH58QgsykIEWqFWvokNpCop4xEsOVnRzrWsgyZlY/8V+zo9RqOEymxpZzklzfkVcwQa6QAbMes4faeAgBA6bAhzUM7RLIsieNipUvOJvtU9qLZ6OycQVrOACZJ0tlOjoAxz4oJEs2QJHbuCFLEG1mkekam+jJD8iTqAJC6hlSvYBAoz+wwQUKICVCELXnBqgq+Z07pHIkIIdBsGChCRDe9t7j3uYQb5maJ8RjEAGCeTABBWlgAAaqRX0mcQIIUhAFWzkR08+AK8NKC+VejCBCg5Atxug8AaOUIQiQEDDEEhBCjCAgSPYgAA1eIEHfrCAEERBxSFgMYt70AMc4GAEMx7BFHxwYx/QQMc6XoABBhAEGt22CQpOQAhIWdsGO/8JgRG2YAsM8GQoG2ACE4DyCmyQBAIQIAkZYMKTp/xlKkdZzFBuQpmbUL8LqJSCMorkFLSaAa7KNslVCsEOK7iCBYwABy/uQRda3OIUKAEJNaiBB8SwAEQnegEf/nCHU7BhCBThCBWmcAKaeIEEGGCFC3zRa8W6AnI2lbxzPpIPLrDpAWTgCK0EiT+GUAI1iOAGB2DAQDzylDRQRdf27cd8zXCP9u44BBCu4Ipui9Yi32PUpF7SPnpw6lRvYARpGIkQPqABdhIhAPlYdkrIUIQMrDlFmk3BCiKwgq72ktlX2scMMUBIai9lBBsIN4pGS9dP4tWo645cCZkYyN3SIN7//H4cSqvZ2RDcg+CQu3cPTPuAKkCgB6p9WhpGkIAMLFwsC/DwP2ggVEEWtdtG+3bGB6BxsVjQAGXQ7QJAazV/PJjTKAfLAhVogDhabR8joOGmaQ6WCKwwAgvoyMiDRsUeGJbKK5j5z5dCQQxQLQ0f30AVJhDKHvT45E4PSwikltQpJ4CQRoik17nOlIxD7aaGXeksaaDusw/OH2A8gpSbYEmXxt1wo6XBvycgdh+QXe+AWx9H0uADwzJhAkXowduNPni6DTjfj1U25AknP78D/n2WF9zO6bfSI4TA8ZwPHB1DUAQptyAFPSBDGh5PerdhHpB/X8AUhgv7v3m+BVTe/0AURo/7viFd6eL97OaBv7epC3UCThyBGV5//LTJEJVhJ+TtoZ83Oo4gBbs3QOjffv29zV35RB1B5cEvtw9o4dZ9R+sFfC/4889tCEDVvkqZ+/34yy0jbeCKGU6YektyvvyTmxu4ggLQB4LouwRYKcCDvwFsGwrYiBOgAAD4h51jO/ESPeN7QLYhgo1wABagAo4CJdQSQA6MGxFQgw8ogCf4JNoLvOc7Qa2JAQSQgRsQAQUAgh/INAPovd+TQbbhggpQAIpCAikAtQtYPTLYQCBMm31YAxbANoBKApbKMxNsQrbhBwBggzCYwqmiAQfEwrXpBwEAgizLghVgvCWMQf8xnBokgy4R07If6IIrbEO1WaNkyoAs8wAE4AI2tMOqmYqoegAneIEamAEdiIE/BMSpMYIuqDtCrIExuAELCAB9YES0kb0zmAAbmIExqCgSkAB+wESzucDda4IN4IEZuAHtUgEQIEWyoaL/W74FiAI3sIB1cgBYjEUFnLINYCooYIADKKklKIFdDJt9yCruKwINfDUrYCerOEauET/UW4G7wwEzoLYtuDZolMauwUMXZL7bwy5v9BrpuwDeY6owJKly3BqsGj6J+8F27Jrk24DUE6U6nEeuOaXdY6n3W0R9PJqdg4Az0KnGg7uAxBrT+yRQw8fCS0itmTq/24AF0COPgITIoblAAwA1CNDAi8RIoFlIUHJIkKQaz4MAQsK8CyiDKrTIkjTJEPCrAXiABIAAleo+j+SKl4waf/CBFkC1VFO9HrgHJtxJtYuCcKugB3gC6zNKqvGHBaCgTVuBEXBKhayzCGsBH7BKrKEBcAMD9ZqABUBIrlQ7H4CAcaKujyxLnqEiHxgBMHQKtvyMgAAAIfkEBQQAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAP8ALOQAZADcAHYBh+TTjKSkpNCvNjMqCuLi5EI3DGtpYyIiJLi4uNu8PDw8PHp6eo13IVBQUebm5B8bDrCwsKKJJ2VWFMCiMaaSPGNjZPr6+KqqrH9sHIyMjN7e3PftvMq+lHFfGFZWVG5ubCkgBricLerq7JqanK+VLObAPSgoKEpKTFZHEsamNDIyNERERJSUlMmrNJR6I7+/vquNK4lwHzY2MxYWFNLS1P7upFpaXJyBJBISFIKChRQQBMrKzEw/DZR/JPbabPLSVMbGxHJydO7u7HpkG15eXM7OzJ6enC4uLNra3F5OE+rKT/Ly9NbW1IJ+dIaGhAoGBPDKQBoWBF5KFO/FQPbORComCVpWPN62PA4KBKqSJAYKBAYCBA4OFBoeHKLosNLEEGB0GNKytPTI1AQEGDZQFNTm2MDO8NTg9PCeMDZwONqmeGJUKBJgOCRgiNDa2HBgiGyiJKKkdCTAgF5iQLyUMDhISCQgwFRsiH6SXEI4LFBaGGxGiBoWHGwagAIGCGJ2YMTC2LjoMD5YQCAQFH60hIRmWMim7DA6EGxa0E5UPGp2QODYbMSwEBwoDGJiGIR6DKJqLDwqRAowHEJMSOTa9HJgKD5IMKqyxOLI1OZ4SAgYIK6iLL6ynLKwIBIwRFI4EEAeFIaKeH6SsEIqHIRqNMDGsMictFJ2UE5kUO7auMgwgMCcELjGzNasVMDw1NrItDYsaOj66NT21HZsiIRWIGZ0iNrI9BoGIPDwMAgIMGJAWBIQYNakNObAKBIwaH5AOOzq2ODq+LqkUK6elKKw6CRg0K58HAoOCJyewKKUnMDYxNTC1KKCSD5QZDQeFMh4yCoyRBoWNBocFDYePBw4HCxQTPba5MB4LNaYQK6cDNakEK54dPC+EHSSiKLEmO6myNTguFA6WPCyQMSyuLbEMDYMQL6y0Pr43Pro6ODChFqihHZ4lGyi4IaciGzigJiyrFJAMKqylHBUUJyGDHJSGA4OBAICBAICDA4ODAoKDAoKBAYGBBoaHBoaJAYGDAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECM63LfvXz6B9iRq3Mixo8ePIEM23HekwgInGf4ROVBRpMuXMGPKjGnvQQYHDv4RwAmhQcuZQIMKHUpzhkkb/1bYGLGEiQKiUKNKnYrQntWMGPEFEPGBqtevYIniYyEiCNawaNOqdWhvH758+BQAYXJird27eP/tO8EiAAIRGhZczEu4sFR7+BYg0UBAQ4YDhiNLDtp2Bj8ZC4AEMHF2sufPHa8izkDAyWDQqFM3FG11BZIXkFXLnk2QdVvXL0zQ3q16Hw6KVvfxY7GEBT7eyD3vUzAiSIMVJyoYWVLkROfk2PHupeEAyWIHBHru/7uevXxae/kUmHSSwzm/q+bjy59Pv779+/jz69/Pv7///wAGKOCAbeUDVz7wJYQePgwy+A+DCGLkVoMNnjZgdvuo8EEGfv0zg1UKzpABEC+UiMCJL7DAj0D5fPACiiYuQN6FtNU0ghAOiGABEpzNiNFwO5T44gsOWFDEig8GYIEGCAiJgIw0YmcPDkQs4EEFIjBxAIgK4vDADDPggIMKEAjhREaIjeBABfjgACY/OHAZJW9tWcUPElrKeRBr/9hzhAZIyJCVERqs0KdtcyZ3FT8E5OkjRqy1aAEELSFmxJo4HDDDeAkmutuijW75qG32mIDAmmcdZ8EOFyAQwAcyjP/nKZ2gOqoga/vYYOR7WOXjBBMIQPACE0IgUcFPs6rGGqO2VsXaWMUluI8JR4xHEnFIGJqsbKL9w6yot4qmAA3ZkrosUyw8um1htn3brUGRfmBBAJySSpAHq6q7bl7tNvrAuwWx9gAEDhhgrpz7NGABEPrue1e7GjDxb6cBB2eDCLAdnOoCFlzQsMN2XUWRCXiaABxW0/YJKWIsCJEDn4jNkA9FvtnABAEVfAzyWiQtsEAGS4iQQRALqICVDDtAgMPKChShgXXd7uPBBQEs8MECAeA0wtI7f5YPliKEnaMISATR0goOvPDAQOgt4MAFcUq7AgQ7iQDeCwa81/Vn9hz/QAQRHgTuARE26CYQPs8hW2rhALdlwgoNBL6CyTrvPVnllmeu+eacd+7556CHLnrAHGE+OlBttQkmDjMfyidGMzzAz+z8gAmmyLHTbnvt5p4elT0zLADBDngCMcIJCNpbU5k4gaeBA0vkdhsCOOJEAAFCLEGpvb4TZY8Mw0IQQABEOiDYwfgEMQILGbTPQhEL42PVAQuwwH77W1lgRO/dE7XPAfx4i288sJgV9K5ANCsVBMoiGprRbAWAMiD3+kcVfFzAAUQ4y+tqowAR0MBwKwtOBSyAgORRjIJQEdkBEKCBBmgQYAPJB8eMUC/bzGCBZpkgCoUyrRUoJQAECACS/0J4HXuoAAgEcOHBViCEIohKhzucCQ4yQAPsCWEzndmgQPYxQiDIz1wyjJbGoiiTq+CjAiwwwgVSJIMinhAHRnCZ62yjAqdBDUSIIuNLwEiEIgDhKWyD4T5WQAAaHKFiIpzUF194Qj1+5GAydNlPtChDC4yAPIvaygcaB0NHzoSLVzwOEdlmghcQACl7kpoDPgjDTnrykdfBRw6EkIHB5LEtFdDeDFRWm9Es4ZKv0+IrPQK8I7zFQDNowA40kEGrpMdoWLEKHEWwgDmy5ogE8IA1oThMjrRICDsIQF+AsATHxM0eK7AADYbYlgZETAGQ5BgClqYx03VTQSoIABCQ0P+YIgSgAYu0hwIIgIBdYiQfQUCCcbZpFXxQMQiB5N89H4kPfhzABCZ4gPzmOK0tBRIHR+DawfhhzF4Kc6IoTak9U8rSlrr0pTCNKUQ0yMuFHAxe0WykTDeCA2qpQAUm2JRNcSCDn/5UBgr4V0T3MQMT/DSoyNqpRIw4AiSIQAhCCI8HRIkQfDjBAhYIW9gIwIK1uQ4fNqAbVoMISKlqZJABgEB7sEYAakaVIPnIwRICYIAKVMAABlCArKTpBCHQgAU+64u23DpV9FQqYUtYZ0LymsScwgeXb+uRM+/KWLZkkR80YAIIA0ZZn9QQj6Vk5cn01NmItIUi+ViBCIBgUHj/5ZWWNrDBCh5gwj6dYFL54EcDdPublbYWMSswwAdYQK5NTlavdgsbEzLAmS2OEAEZ2IFYX0AEzraWIaUygt2WYD6uHiRhGahAA4hAxSvGppLUSa90hNBC4zIWPSvwqxPEZ0AFGUhkJ9CuWSzCsbJ80aHaG+J3Z2oxGuQmXHgMYwCOU0kafOgqrsnWgl0rmnzc5AOc3eA+iCACBECGi/kSTSmTuOGH2AahZgoxwEAJgbWh00g1PAIQntZi8NZJNPy4oAFkXK8+8SMAtLxIqZYpQfRg6cE9VshyGmCC2l0mAwsbbUGoJAMryyADhrVOn/bxgRKvYHYnINICvBvlw5GF/wbBegEShAAED/hIoFl9gbA00MQK2BJ4aiJAicBT1jZLWQFOuAAQ/BiAIGh2TzjIAfl2AAQILOAIjDwj+V4QACLEydAK0ZMrEck2nCZo1KBONb9QV1NVu/rVsI61rGdN61rb+ta4zrWud83rXvv618AOtrCHTexiG/vYyE62spfN7GY7+9nQjra0p03talv72tjOtra3ze1ue/vb4A63uMdN7nKb+9zoTre6183udrv73fCOt7znTe962/ve+M63vvfN7377+98AD7jAB07wghv84AhPuMIXzvCGO/zhEI+4xCdO8Ypb/OIYz7jGN87xjnv84yAPuchHTvKSm/zkKP9PucpXzvKWu/zlMI+5zGdO85rb/OY4z7nOd87znvv850APutCHTvSiG/3oSE+60pM+gH9A4R8lKAHUoU2eAgzhHy24QtSjLgBpYwUFLphACGLwjyFMoARTEAAGzOpsfVRBAhFoAQkw0HR7PIEHMEgABnTw7HoMYAghaEEEJBCFgmyhABPggbN1UAAXtEAAN1D8nhCPAmVnJApJIIEAJkD3UA8gBB1Q9t9JkAIYSEAvNgXB3JMdgxBMoAcoiAKba/MAEjDA2EmIgEBiUAAszDQKEbgBmoANgiGQQO4daLpGdNADGFTEvm0uAAZIMIEbSAAEs1+IPqbPd14XngETSIH/C2L/kSd0IARsxzUIJNCDf4QAA70PyT6SMAHl47oDWZjAPyRgf/nzoAUFcGvS53o3IAXpJxL2UAAtUHmz9gA80H7iJ3llNAATcHqxtn7H9379N4HvB30vNQDnlwKD131CYQ+qxwAe6FICIAARUABP8DsPEAERkIITtQE+oAT/AAAc0ASC8jvMRwI02E0lkAA4GBb1wAAh8IIb1n88YAUpARbmNwGFd1//8ABJcAMhQAIFwQVhsQ8SkAIg0FkgSHokkHx3sQ8oIAABuFP1MIApcANJgH14kYACwIAwFQVg5363R4JzOAACYIEutX4RkALvt4aGYYITMARB2DlYMIYC/1CGILCIqPMAYyeJmcN4GECAKMCHklETMmiJXfMAKHADKTABDOCCqWEPOnADJLAFw4SB/1CGG4gaWNB62dc9E8CC/8CJqHEc+nB+9aBHPBABK9h+SkgbEGAAMnB+YYhCzZgCJDAEA3CMvLEkACAABsBbFER9g/cP1JgcFrABSgAAxlMBoNMWAtEDKZAC/1AAOgCKaWFKG/ADNfAPFrAESOA5+/AAcCcQdMeL2dEFCmAACQAAIrAEnwOCMMCO+lETIUABBpADL9A50yeCgJgfOhABMbAF/qA5ooiEEzB+AGkfqwgD39g1ETAB0WiI/YEF4BeMmYN8kRgg29cCfAeP9f+RBCPJH14oAM24OazFk2k4izSiDz+5LXTIAzi5GwVAdvq3LiYoAEmwlLPRAxPwlA7zAALQAbcYbVGQAhjQldCmAyHAAL7HbRl5AztJbfXQfGs5bduHft0WhVXQbfNXf922BTyQeN1mDxSIAlSpazUxAR0wfNpGlmHZbTpge/qgmMH3ltKGBS4QAZAZbdtHAgeIbeYXAkeJbW0hAXgZmLi2D3vJktmWgCFgh9pmgiQgAaKZa1Ewd2L5bIvpAme5bWlZmdAmmTAwhdumD62XmddGl3YpAQJAlNaGhmrYlwWQAqp5mhTomtzmkEMwm4s3drd5mCSglo4ZAcJpbS5JAp2biW1xiZzV5oUTYJrYhoYLmJeIlwR9SYGhN52UqIiKyZiKCQPcyW31cAMw8J3VFp7jeW36MAQwMACvaWuqyABg+QDWmWyqaJxQkAAwwAMPemw9OQVbBwPqKW31EAFQsHUaOp/WtphUsHVRhwHYhgU9gHZb94frWQAkUAIhKgAxAKDQtg8FcAMtkIgDSm32oAX1UA8XunT/kaCTERAAIfkEBQQAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACxxAlkAvgP3AYfAozPg4OBqWRXMzMzz3HddTRRoaGh0YRuurqxcXFw0Kwyzs7T87Kg8PDx2dnInHwWKioxDQ0S8vLyJcyDt7e7qxD5EOAw0NDQsLCxiYmSYmJjTsjcuJgvQv4JMTExRRBCOjoyzlixJPgyhoaH9/fzS0tRVVVSDbRzExMRycnSnjSqOfT/a2twbFgR8fHzm5uSGhoQ8MAx8aBxWShHW1tRubmyYgCQmJiShiCWspoS7nC2tkyzhvDz29vSmpqSqqqyQeiMeHhyujix6ajySkpSfgiQaGhyOdiPGwqwhGwSCgoQVEgQiIiThw1LIpTQTDgTyykTy4rTavlz+9rTmvjwSEhQKBgQWFhRiVhSukiQOCgSinpRmUhRmYlx2Zhza2szq6uQGAgTOMIB8ZpjQthCqoMTyvszEeCxigHyqxOzI9NjOuOyqxHgGDBh05KhCeDjO5jCwvrS2mAzooLASHhR+gHCwvjCQpJAYEmBENGyqhEiQXBgkQDQSBBC2eHQmxIBCaIh6mqBmZpRANETWzvRkahjEoKyQmrhAQFAMCDB6XAhiTCiwoFBeqIyKdpBMWDQsaNBkfBiMRhiyppTQxNB0qOSqzrTQrlR8XmwGBBh0Rhh4gJzy2sw8KDzA3tzqvHxiPhgMGjT42uzO4ujAxtjQ0uDuvBBygHxuWmjCuKQ0QkSQYoSiusB4fhjOeMgUFCBecmg2WExSKhCioCAOFBwCBgikhAwkCCCGuoyGmmREUFgEDAjc4vQMNigqFBRMYFjq1Oy2oOiUkoDoeEh6RmwsJMAkLkQeLig4WBRQUGR4bAiQfGx0xChgWnS8uNB6YNTeyNCUahgSDBiWfJAKDgiqupzW4sTA0MzQmDDmskB4gFze2HzYuLiqlqDunjCq7LAkGjTg9NjAyvCGghgkPgxSOBBQNDyUgAwGGhReZnhqXCgqNCjGuMQWaFDG3Hw8PChCJhAYNmiwusxEEEB6HIC8xsSqbCwCAgQODgQKCgQODgwGBgQCAgwKCgwGBgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDihxJsqTJkyhTqrynjwkGgReC7FNJs6bNmzhz6tzJs6fPn0CDCh1KFGWVBCMWSJDwT0MCI/eKSp1KtarVq1izat3KtavXh/eYKJGAYIQGBAMGKAnyta3bt3Djyp1Lt67dt/f6YWiAwYgRDBlQsDAR9a7hw4gTK17MuLHjnfciE6yigUKNmY8za97MubPnz6CFRo7MxMeLBIVDq17NurXr17AN7wvSIEICIiUQvIzNu7fv38CDC8dYxYCEAQEoLPCAebjz59CjS59uuF8EFzA0LPBhQh/17+DDi/8fTx7lvX39rnhAIIF5+feu912oQWTEPyIGMDSHz7+/f4Sj5WVACRoY8d+BmYUFQQklpFUCCwsk4B2CFFYIXoCRRTDAArtZ6KFdLNnmQQMNeABDCRJE8OGKLPKGYWT7GBCAD0y0aKNbMO4z03k3jPBCCvvdKOSQjN0TRAR9XXEFEyZIEEAK/RApJVaSEXQFERQ4kNqUXHYJ1z0eLLCABiAQ4cMABGKwpZdsApXXFX9lkJYHa7Zp551S3XMBCEqhgIIEIxhwwz514mkoTf2YcJYENAxgwISHRiopZPv8VVsEFzDRz2iTdnqSPhkcRwMLIzRQqKeopqrqql3dUwUGe7n/IIEGF5zK6q245qrrTQEGAQMNMEC667DEFmtsRhju48GGNxzr7LPQRivQi8qi2KG02GarraT76INhEBq8oMEV25Zr7rlE3hOBBkpkYEICKfgQAAoeoGvvvfj+p+4Co7LAQgADaODBpvkWvCpLN5RoApIED8QSEwpH4K3B4rF0gQcJZJBBAhHIVCXFIEfKhAvsoTDAPwvUEERqR40gQQkBLFCrrSELB+M/OhLKac083xmBUhq44IIGA7CgBLkCBeHCAj8s8MIAEezc89RUV/0WEx5ccIU+/RghZwkq/pPXXkxEcJypH1ut9tpsF6XjtJFdMUIPBkSFIQYLDIB22m33/+3337xGRhnddgeIt94BAq744oyLNFoDjNYrtuFnJ9745ZhnHlFkQYAQgAZsTT4aBpVLrfnpqKN+zxUu0LCAqXCPXjrfqYeWI8460yx2P7z33g+hDsMYZe61F98W6zS0tx/liJtu/GdBJACBDwsg4BRUtoKKgJjc/1BDFdNW4YESI2wf6A20P6++VMgrb5Dszae//mP6OEADiuz5CwL6AF4BAgUD8MEIRiDADIAPZyYQDArY86AfRG1+ECTKPYzggg25JyGH25vuIogYfSTABRG4QRBukAAJUEAJUUJIFSBAA5X5xS8Tw1kDXOABDDABaz+gwAhqxMEe8mSCFfRBBP+Ah8EFoECDPtSMPrxVmH1kgAIICN1BVjgAyWFoWv3QBxH3oaEqJvGLN6EgwARlhCAwIQgxZAmsEoACGuQHA1XYIBjn8qIMxaxZKlxQCmDlsY/V8QYIGMwcB4kSE7CABDEbIAFHYIIU7sMETUMBBSiAAjElIEiEvEsdnQhFAyHEfxRIXiVB4IEYii5AETBZ2DLJSpBEoHw/QIAst/eDBDjSA+VDwA92KUsTYLKVdHwRBhDwpBQeRB8GWMAIIKABCbBgAN8LXoCMAAJxIQ2Y2NRIP5hwg27eAFawiuNA9PHNf9yQmzcwZTaDmTgiBGAE/EPIPlxihCpc4QIuaBBhYhf/GX2kYF4PXKdAB0qdKHHKCDBgAQL2BiDJjAZcFABBCjmFzBKgwJfyI6hGN9qbCSpBoUN03vtG48QA/ICH4zTAAFBgS5Fy9KUwXY2REoqAkLpUmjBKwB0JgsyVtvSmMQ2qUB1jJCXQwAcNyF1GX6QPJVBAAwf8R08l0MgrDvWqWF1MVIy6AJvqSH76wMAVCLWPKyQAOXUTW0UH8NOvZvWtcLWLP18ARQNkzAA1MIE43/czHyghBS4YAcyIwISoPLIElHTBXQ2QATzGtXh1BMspCVJHyz2WK0bQAAlIQAF/BeCzryuUnhbwrxe8IGYGWFlU+uEACnA2AP/67AAyIMfL//ZNLyYwQApqkIAGXIFmY7NNCvaYuLBkQGjIdYDQBmbbrezjBhGI7j+iG90LqJMg87xAA6bbAE1xykiXmi51GwCV5mZOTyNAk4NQAIEL/FIgykpvAHrQgxokbh8N8EGD0jIAGrh2BJ40r4AHLBoMKNcAJjBBCn5AKpQapB8e0IAGYIACEqSguFUokQc8EF0X/KsGxiSwiEecE/T8DkYNIK3kDsKSIFShHzCwsOXqaEgO1ZbEOM6xRzAktxeYICEYcoGMF6IPGLxACcLSsZKX/JHI9KMKVWACG4/Y0AAJ+cIZbYDJ6MTkLntZI64yAQTMtNJHVXk0V7YsdmugwwB/+f/NcHYI51yAAph9zrEjtbKMXRoE0xjgvXEOdKBHo5cIeCADZ4kmi4O8Z+fdw5ASqJWgaTIacl6gLww5zxXKmamGTeueF9Auibp740nja5MX+EEJfrxoTqVZzf/oBwQoAIEkm5okKIYAU1AwAgnZCr982vUClJBUgtTAZCbjLwpvXbXKnicFL6h1nV70atPp6WW+ZPZJIpNiFsxKvyVIga0H0o8EbI9MzXTdBVfYAwm4QAkwgAEEsq3tnjl7HzV4ARH2WhBG21fN+0iBDldWb5MYwZ1E0Fr0VrrK91XhBky4gj0xAIOnIk0fEAhADZa4xBdntOD3Oo9SwzKCLDUnxP7/tqpAjOADywAa5BtRl0Vh948rvcAFIWaIB2LGQ4wHIAE4LTXMsaUsF2QgArVBtLdpjgHFHjAvF4iuD0iggejqh7ImQJGkhx4SJ/bAB093Igto9BBOIsCTK6R1bchLPK4bLEb9XWnRSuAD5gqkxru5BwaINgDXPm0AIHCwPv4XLLeHpB8u6AEIlJpKKgNZHxczQQXdiJkqgACR6l2AA1Rr+IIZyQQOgAEIIKDYQRGk6QaclhEMQAQQuN71RMjANaWaABDQvPMdsTwFXKDUC+RtxQfhIgL+wYJ/zDaGHtTAPxLA+hK8AARuxr29RJ5F3umMsknOC8e37+ndaVH6HrlC/7gcoFS8lQD47zNCbgP7DwjE01VG+F2l2Kjxl4P//uvUPe8DdIHjoD/4sXYD+QR9QccSrTUCs4d/CjhQTaV4xnQPWuZ4DTFaKBBQ7+MByiFFC7iB2RQjPTACUbV8LPADeMYQQTACLHBJCYGBUcSBLohNMocCFzAQ/hMAOBc8CqF32HYqDegDCfiCQPhFExQutFIFQTAgUFMY+kAiBhUlDcAWVWAEe/IPuhE3/3AB9fQqDuAvGWB/QfiFz8NtCxAAs/ID97NxAwE5PjAzQUAEA2A9GuADKPAPFzUTXPQDKPAD/+AysOUCPwiGxpIsJzaBM/E7CzE8QgeEuXYcvNYdqf/hez8gaR7kA8dhfAvgAhcQYrJiMnSoAXqViIDoKRjCBDUAArK3EHnRAA7QfiCUc/9wAxkgbzCQAUxARKEIZILTLH0RJM9Vi+QWBBgwg+akTvvwcAMhVrZ4i4E4GqwTACTAITlYbs6UFvNiZgPxSjRAAyZTAqWSjMr4jalDUiaQN8pxLQahLG0EQhHgAPq0Iz3yTiZQIhrAAhpQWOB4j+HIbQOkBBtijpTlPy8AA3GUFy4gLgayDwmQPLBzDzfgAyyQAa6IjxLJOEYAAQtgAh4gAdAIIHjDAsCnIRJgKkUWUVF1D601LhOZko3TD8bhAhlWSf7oMCApjAIxTCVwSbr/h2QBYgAUQIIq+ZN/A4HKpCZatpHBl5AI8H6ZVUysNSOF1U8Q0ANGCZRUaTVtSFWEUpQxCV8yQiMBciU26C2pZIN9AYsVFmlVmZZU0w81IAEbx20LtJU4YwBj95RxowFhKVUp8CDKVD59h5YpqQ8RkAEGgFd4lQFqAmT9AF0Z01tM1EQXUJiFWQM1YADFppaTcm0ogJiwMiAsdQMRiZCuoyYPNQIatymrgxRL4QMG4AIxI5ehqHfywgIlkI3zooIqlEwOoo0wQJoCcQUZR5v9UgI3iJmSAiaIVUlK0XeURCvT1ngaZH4YBSNBEGoY4EHvpIH3qIMsMIuT2VinwpAa/4ACQZMCIIAC4hJPmfVUzCeZt2ech7InsyRLkkQBEgACW1cQNzCGqDEtHuA6M2NZgxcADhCRyqiDHKIjvSM/LEEiW/NkGVl/v6kBq6ag1gefk9IPI+RNsLhSJiAT8OViSqgE6UkoAAkB5CI8OlMFGVACMgOKG0g6C2CXsEZZI1dkPaABEyJ+53ceMIqhQvIiWlklDRAoEwKBgaQBG0MEyTNEu+MBLoBg0uMnGKWSpEOcGhMBtbhUL4J4ObqjRMh8JoABBgqkbfIiqWRjhZEBLwBg08JF6dUgJfADdidVBlBn/DUCA/NxoUg6PfBZAYAiKfBbZxYggCSh/5BZJPAC//+ije73o2bqIS9yBRjJRALBBAYQARN1HlijMWO6PEGAMYzlAVsKqQq4ejXAYSYAAjRggyFIWS9SBQVZhQKhD5K3MFDad0QQfZEaKWpmqpjZLag5QZ7ZcLAaIMikjczhUEtkN7aKAgHwZ706reYSIEzgI/bVav3kmdNZGGuiDy7wVONGreS6jJU2a7ynrRVVhzU6LQnQk39YrvKqK7H6P+m6aPpQAyhSVe06l18Xr/MasKpCLWaTguE5Ve5xRWnDkC1XeAL7sKyCX0eHATdwARngND5Qgjw1IAGgBH1RRhEHIxiQAFkDKyZQciEJrBC7si3SUwOwFIJBA90IIP3HWQv/JCZL4ZKx1iQWJQFtFDP0xrJCyy34pAEj8AMjAAIJMCjhKRa65AOxtD2aBz4MyXpH6wMaUAPuNbRc66vdAidBUE/DmhBlFQRme043RKi7A7ZBcAXd17VwG7dyO7d0W7d2e7d4m7d6u7d827d++7eAG7iCO7iEW7iGe7iIm7iKu7iM27iO+7iQG7mSO7mUW7mWe7mYm7mau7mc27me+7mgG7qiO7qkW7qme7qom7qqu7qs27qu+7qwG7uyO7u0W7u2e7u4m7u6u7u827u++7vAG7zCO7zEW7zGe7zIm7zKu7zM27zO+7zQG73SO73UW73We73Ym73au73c273e+73g/xu+4ju+5Fu+5nu+6Ju+6ru+7Nu+7vu+8Bu/8ju/9Fu/9nu/+Ju/+ru//Nu//vu/ABzAAjzABFzABnzACJzACrzADNzADvzAEBzBEjzBFFzBFnzBGJzBGrzBHNzBHvzBIBzCIjzCJFzCJnzCKJzCKrzCLNzCLvzCMBzDMjzDNFzDNnzDOJzDOrzDPNzDPvzDQBzEQjzERFzERnzESJzESrzETNzETvzEUBzFUjzFVFzFVnzFWJzFWuy+56EFWuCNW6yMYfAAB4ADOHAAD6CyYTx09/AAE7ABFVABGzABabzGyqgFAgDHcSzHAuCFdqyAS3AEUFABPMADcQwE+fDHof/4BBMQx4UcxxOQyIoMhlZQAE6wxxUAADPgx5MsffewBAIAAHGsAwLwBGrcybf2yQLgBEIgAvhwyqicyhxQBDIgybAcy5PWAkAwAVpwy7g8aUtwAjbQAr8MiFogAyrwAMVMyQIQAjGwzF+4DwUQAhbgy9D8ZvvwASFQANesiCKwAwLQzS94DzGgAgdgzeLMZPfAASpwAmGQzht4D0lgA4gMzxu4BBNgA0tgzwt4zCqQBPysgPvQzAoQ0PgnzdRs0Pd3D9r8Aeis0CJ2DxYQAgLw0BA9YOS8A+d80Z3Xxu3MyRwtaPMMBFYQ0oaHz0XwBCbtdv4M0Cs9dAMdAgpg0S//HVf3UAA6UM01DXIMrQMzQNM7/VYT3cdBXXAKsAMyANJFvWQPoAITUNJLzWxJgAM2oAVRzWxLAAQ4sM9XbWpacAI7oMxdPWlWQNBAPdYvddM5fdZozVEfAAAFoNRtTWAWQMpyrcSfbAEF8AEccNfSFwMaLclYfA8tEMpUsAFFYAF+3XnsfAJWncV4fMmOXARiHYoPQM8qncVT7ciGvAEfoIwtcATDrMUPsAMVQAWPXAHcfItPIAM4UNlXrMtUQMiz7QQWoIxlrQIFfcWG9c17TAU2ANtgyNA7oNNWbDda8AFFoAMAsAHBfaAT/dNbbAUPEAMxcAA7UABQHYpDvdhL/xzaKhADbO1lCmDOgh3GEh0CE9ACfIp/HIADMoAPinzMOqDdlm0DE8DVdrzORVAEMw2IoT3aipzNGp3ZX/gEJ/DandwCJ6ACIuDdIJcPAqDbnUzOKgAEdRyE0qwCxq3IZU3Rj+3NKuDQFZ4EWh0D7xyE5UzUnbwP37zeX1jeXnDeivwE2G3fQPjeE2Dgk7zONoAD//2CI03MsSzNSM3jG6jLOODSsczgDg7ht4bgyfzL5IwDGP6CWoDdu43LEk7RNH7QBVDc4/3DbQwE4T3mBMbQIUDiv+ziOwDj8dzdy2zj2wzlghYDISADIY7LPg7kaC5g7Jzf0JzNeY7k4HfZQP9A5MsczDvw4Auoy5QNzZERA1ae4fcXzBR+zZXs5QqY5c4szklwBDgg3mCe0N0cBhMt6Avd0H8uxMe8zduNexnN4tfM3zYQ5NJ31Afw5cVs5Adg6G4X6PrdzU7+AXbeZYjO5N1s4Rje6m/VAvks3Nfc5QKw5263BK695eLcxqJO6p2X5RzOzy7ezorudpW8A5/Nz3SO41x3Hh+gAgXg7ER8DwpQBH5ueBYuACluzwgtA8BecO9dywbN4I1+7Dn2ADiw4wZd5c3udiYu4AGdD9Nc7W4H7ZG+8CZ+5lzX2uEN0S4uBCcw7AWHx0Jw2xCNDwcQAps8dOee7gq9zmbMAfL//lKEHu8X3e//PmkSrQL6ztH4HAKOXnDrrAIyEOsLX84NX3CInvPwfO5YwOu5bAMQf9Ghfu/1hs8KHtKo3s4xwAHKPmkcr+0XHcxOoAMhUBEXoHwDhsfFvdLBbMhxbBAqLd8J8UpgQAICYaxw1fIrrQAhQMiGbBAyMPgLQQAMwAAdsAIzYPIHoQCOrxBJ0ALlPkgEvtozn8QbMBA8UBAq0PkMQQBNsAEA0Pk7kBAq8A8qgAMKQc+ttPNccPlU/PgM0QFD8A8f4PIJcQC6fwAycBBAsMutzwE2cABGP9cqVGrFPxBLIPKEZOInwPTG3x1p/g/QPvXG32UInvXX72Uo/9/x2+9lEh7u399l0iwEuD/+Olb56K/OFgDO679k9G7OsP/+HHTZJwD19C9gAc78+T9g2Q8QD/4NJFjQ4EGECRUuZNjQ4UOIESVOpFjR4kWMGTVu5NjR40eQIUWOJFnS5EmUKVWuZJkQn4wdClrOpFnT5k2cOXXu5NnT50+gQYUOVZhPwA4LRJUuZdrU6VOoUaVOpVo1qpUCIQpY5drV61ewYcWOJVu26r4PIQSYrXiPoEC2ceXOpVvX7l28GMNYCCEjL8N8A0P8JVzY8GHEiRWjvKdgxwS3iVt8mBAixJEPizVv5tzZ82ez9x7gsGFFsQ0d/2RYaAHa9WvYsWXPRv/ZwgaOJ4ZjGJT5LzJt4MGFDyfu+smEHUkKI99R3Plz6NGls80nQweHwjtOiGg93ft38OHF39wnQMfuujEE4Bjf3v17+PEt7iugIzNdmCFsyOff3///79DSqq7HPugOQAQTVHBBze7hS4DfwtICoSX2YfBCDDPUkKzGVJAhwq7ueSIGvzY08UQUU3TqniRsmMC0r5b4wAYAUlPxRhxz1JGlJSawYYmv1ktNBvR2NPJIJJOUSAsZcFAOrLWUlHJKKpUsL6Yqs9RySy6DCiMrC0DsckwyyzSzo3vS+kDMoEDA4Ew445TTOwe1YnNOPPPUk8p7YtgBwqFI+CKHFY5QYU//RBNV9DAOPLRQqCg6ACCLgYpc9FJMMw1rtBd52ufNgpoI4YQC4NL0VFRTfaoFIGzIbacGtvhnCgJEXa0FGFXVdVded3qiySTunCkHAmrV4YBek1V22ZaMUkEBYVsqVooVLGX2Wmyz1Yi+EMLkqQMHmNB2XHLLfSjNENbcCYkurjD3XXjhvUcErcLgqZ9489UX2z6zEODRfQMWeGCPOpQBYIITVnhhhzjNlWGII2aYVVcltvhigZ84wUmMO/a4XGex6+iJpD42+WQu6dshhmgjakGEEyxDeWaak0RXXYwe2CqEQ2v2+ecb6yyg5YW0UGA9ng+I4VWgm3b6whiEEMDe/4lasEAGFfQr9Wmuu05QARUOQPihGS2bgLuHvVZ77fZGOyHthUwtaGy267ZbOtuAmHChe/C5+m7AAw+vRxsORKiFGSobTHDGGy8unwNwkPsgHHQI23HMM5+NPhx6Q0gHGwoQWXPSS+eMPgBs+IBpg5KwgmjTY5c9rn1iwKECHgCY4J/9Zvf9d8Ns2KCCCqjgYQMbBPAceOabN0uEgXjg4Z8KALjPeeyzL2v6f5bX/nvwq6qA9yfDN/989NNXf33223f/ffjjl39++uu3/37889d/f/779/9/AAZQgAMkYAENeEAEJlCBC2RgAx34QAhGUIITpGAFLXhBDGZQgxvkYP8HPfhBEIZQhCMkYQlNeEIUplCFK2RhC134QhjGUIYzpGENbXhDHOZQhzvkYQ99+EMgBlGIQyRiEY14RCQmUYlLZGITnfhEKEZRilOkYhWteEUsZlGLW+RiF734RTCGUYxjJGMZzXhGNKZRjWtkYxvd+EY4xlGOc6RjHe14RzzmUY975GMf/fhHQAZSkIMkZCENeUhEJlKRi2RkIx35SEhGUpKTpGQlLXlJTGZSk5vkZCc9+UlQhlKUoyRlKU15SlSmUpWrZGUrXflKWMZSlrOkZS1teUtc5lKXu+RlL335S2AGU5jDJGYxjXlMZCZTmctkZjOd+UxoRlOa0zziPm5ogAG6UdOLRoDBCBqQTW1uMQgjCAAILgDOcGIxCBqQAAggcM50ejEIRFDCBSBgTnTGc4pM0EAK9tEAEIDgm/rUIj8d8I99RIAIAs0nQZ3IzxQM5J8QgAE8HVrFIECgBpGZaD33ATtOBgQAIfkEBQMA/wAsOARHAg0ACQCHHjRoFCxYDBwwcFCgDCR4CgoYKDhgDBQ4DBQoUKDAKChgGjR4DCRofHTwGjRoHgpoYKBAHCRQdHjwChwYFCRQBDxICipoDDRQGgxoFDhQDCx4HBxoUGDACg4YDipoDBwIIOBABDRQBBQweGjgKDAgDgIIEhxIDBQQCgIICAhgaHDgAixYCBhgYOBAOAhgBBgQDCRQOChgGip4BCxIFgJYKBBgGgRYBCRQdFjwEgx4ODhgaFDgFCRwEgRYcPCgHiJ4HARIBCRwFARIGipoDAwIGiJoHiZ4YCBAYCDAUODAGAggeEjgGgxYGBggGhJIGhJYHgx4HBx4FBRQcNDAGi5oHARweFjQBCxwCAgwBDhwMBAgcPDgFBRwFCxwfFjwHBxQICDAYGBAHgRYFgZYFAxocHCgGi54wICAFBxoDCxQOBhgHAxIBixYIKCAGhZYHiJoAgwYFAxICgYIFhxIFARwODAgBBwwHipoIGCAIODAeHjQDDhwHDhQDDxQGiR4Hip4FDhwBDxYDhwYFAxYHCxQHg5ocKDAHjR4fHzwFCxIHi54EDAgFBx4AgYcBg4UHhZ8Bg4MGhZ0Hjp8HhJsGjpsDi5sGiZsEhpcBgoEHhJEFgp8Fg58Bg4cHi5kGj5sGjp8DgYUHjpsEh5UGhZECi5sGg50HhJcAgIMGhJsBgIUDg4UHhZUFg50HgpUBgIEBgoMDgYcHj50HiZsCgYcGg58HgpcDgYEGj50Hg5cHhZ0AgIcHi5sAgoMGiZkFhpUCgIcGhZkDgoUGhJ8AgoEHhJMDgocHiZkGhJkCgIUFhpcBgYEEh5cBgYMBgYcHhJ0BgocHg5UFh5cHhZkDgYMAgIEAg4EAgIUBgoUHj5sEhpUAgYEBgIMAgYUGjpkHhJUBgIcDgIcHhZcDg4cHj58DgIUBgYUHhJ8CgYUGhJ0HhJkHhZMGhZ8Fh5UFgp0Ggp0HhZsGgp8Bg4EGhZsGj5kAg4MGjp0Hjp0Ci5kHj5kAgYMGhZMGj58Di5kHjpkHhZEAAAACCcAqQmk9q+gQYMDBR48mFDhwn8NHTKMSHAiRYsQGyJ0GLFgQo8RAwIAIfkEBQQAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACzbAGMA5QCmAYd0dHRVRxKPj5C8oC+OeCIiIiRwXxempqTa2tx8fHze3txrWBV4YxqioqS6urzduzsqKixra2wmJiSymCzq6uw+Pjy+vrzOzszHx8dgUBRGOg1dXVyvkSt9axxKQgzu7uwwMDGagCWCgoShiCaojSpEREQfHRTPrzbCwsTy8vTFpzNPT04WFhSbm5zyykJiYmTkvzzCojGzs7RmZmS6miyDbhw6OjwSEhTKrjT20kQpIQbi4uQVEAQ2NjT8/Pzm5uQ+MgxWVlQ5LguurqwKBgQaGhzsxT46MgwvJQqWlpSqqqwZFgT29vSGhoRKSkzS0tTuyj4eFgQyKgiFch3WsjReShROPgxyWhQOCgQGAgQCBgTe3risnMS8plhwXkAKDggKMBzuqMwuUExEODDwvhDCpCCEQBhCUGTwoDTKfMimbiwwNCASYDgmYIhCSDBaooT43NSmfijC8NQ0MEjyxtTowChgZHSIkrBiZpQ4LmiapJyAtITo9tjKMoC8oAzEfCwSMGi66DTCzvBSWBjKqOxuoiTAsqR+anTGytisrpQaBiDq1tj49MDW9uiygBzo+vB+eGRmeGwMDBj4+uREOiBURijextAYKCjS5uSansDYxrQwIkREJjymhkiMinhYchhwVGiasKzWrljc3ojWnESInIjKnriMcnTWphCwrsRSZGTUxFxuouBwYHBu4oCGcpymsujo5PgSMEQaFjTcqIBsdhim6LQ4cDhwGoCymkAmIMBEJBTk5NDS3NiMYhhwQGzwxFzUxBBwWtQEBBjW0tAmYNAIGCCypjB6ehjS2PSgsjQEDBBiUmSgjAzCxrBeXhjS1OCYjCgcKAzYxvCyfHSGWICAklzk1vSmpnjSsrhWeFhsQBiGZkSKeAzw8DRUUnTC2MTofEjwskRCWECIVBgkwICgfgwICDDe9rw8SEimxJx0kpgcOBxseETOxDi6xDQ4DkBaXjxwZlgSEGByeGAODgzW1swKCgQCAgzW1tQGBgQGBgwCAgQODgQKCgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSjzIb6LFixgzatzIsaNEfiBB6vOnr6LHkyhTqlzJ8l9IkPVeCABQpKXNmzhzrnzJT18QBT4UlDCps6jRo0gL8gShBIMPBDaIJp1KtWpKfjcEOJixA0EPqVbDih3rUN+GfABM5PMKkqzbt3D59UAxhEURBWzh6t1LlR+LJPmGFun6la/hwzj9RdiRoOTgvIgjS+6orwSKA/VAFkGAAETbyaBDQ/Qr4MmLG6hBIFDgpJ4+0bBjI+QHwgKTHU/yPXniw8cODBtkCxfOz4QICyhQYEDug8kTGSuGS4ftt4CJ60Uq7NixgYW/6eAn8//UjNdz+POIx/Mb/KQCWPTwyar3ByBBzfj48+vfz7+///8ABijggAQWaOCBCCaoIEM9jVTPPyS9p1RP/vhTT4QhCTSePhdKuCB4/NSzQgItDDFECzOY4KFL+oAQgQAHDHGACCWUZBJPNwAgQxIQrPihcD7t8MMTKFzwAwUHmIeQCUp8oAAGKCBAAVolaRiSPy9QENRQP8KnjxMtbACCCRKs4MAHLdwwm3EJlFBAET2IkAJrN4pUgQOcIVBBl/Dx488NFYlUggIK7DnbDa9pWI8AKQhQ53otWADAEPkYyid+IRWBwg7ROdTTC0wokShMAGAQgQlDFHppfiFVkJsNnob/2MQHjmpYGQZp3iCDqqvGBxILLdD6YEM9rYBAYAPxU8ABKETFwq7u9dpnPQB8gAGsZYEgww8iJPpPPQk8EYFj0PoorWghzjDkCuYSBMEBaLIw0JcoCDAsC5R+1e65kaWLwAUreLsQP+9SkIS8A92Q6gss2FWBDPl058++/PIV4gs7XOAExQQrYTDCA7GAARMkl9ybbwB8VzF19UTw5FD7tqjEDwKwABZ9DbSgc4kUpGCBADWujG7LCuQTsD5IVzkbCEP8kEQRSSOtoYUVfqctAgFTLLRbPlHAhAwvBLHBBi9sUICE6x0QVBMriE32xi6N9yxUWm9NFrUUUPDDkXn//xMx2jY8kbfeg/8wxIPqSSADBiXYjW4BGwTh9thlmzBbPSVEPvbmL3Cpnj42lDCs46SXvmDdpqeu+uqst+7666R/NptCqMOeE0/63FBEZgrpw4LlAiFcU6Ai1dPwDRNnaPtY4+1JQQNnz9aDDB8MtB0F/8ggL0xBHJAbBknUKPvyVvFURAMp+IBBjwkVAUALAgzUAlAyfBdiAh/8YMEBFvzwd+3kY0mGStUA/UUvIUkbSA8uwJ2KfCkfCniBa1gQAf0BL4BjqczPVpAPDBywIfpYgQ9QgDB/MKoFwwpRC34wAwwyj0kWAIEEEOBBrd0gCR8QgUluMLMIWEkfMwgVAP9dmBJwIWAD/JDAkz7IkBJwBlvfOgAFAEA8fUTABxcQGBGREsInJCEzIPhNAR7ijwT4YAjeKuMHZHC2nhRACVhU2RaRksQhoMAzBFMACpiYkCQ6gIUEkQsGDFYBCJRAADvA4ujmaJR6iGAHLwgJBPJhgTYyxCcfsMB95rWBC+TvAk+wgAziyMij8KMCENyADSpQAgD4LwI9UNNCFvWBJrynRQlowIxsIEIMDLGUGKlMPj5ATGKmzwcpANjAXIWs2YREHy9o1C+BaZH1zAAA9UkAAJLABJq9YIwKoQ8TWiBHg7wEAnhqHDWLoh4QUPKD/mgcWAqwrRdQ5AbesRAEBPD/gSRocZ02aacYTRJCLL6GoC+wlgQOUg/AHABG+fjBAS4IUJy0EwMyWKhAKrMDGQjMHyLIR7cOohgUrOU5MwBZRW+HuwIwsUV8ZAEIFhlIfxRBAhAoAKJWytOe+vSnQA2qUIdKVOooTz0DIx5P4ja+8RQVIz25gQkgAAI8qsdDIeoBCAD1knoUgKoggADUrjq+pz4kRBuQAQKY8JQekdWcwPLBFLvahIgSU39peatZI6Ks+RHJB/loo14HohgEpGCuIflLvbApAE8OgatO3WtE9CEBCeijAMcS7KPGVxkHyAixU5OjXDYVhKOWVbIQuUtgKaI8l5igAQ4YEWJfUpBF/832tKiNyGBWy9TIfisBCIgAfW77EqT5Q3EKYJdvc6vbHayWrARdAa4ulADi9sRFEWgCCn6Awqsyl1gv2e0Y9VpHB8CqjNYFqd4+8DyrIvW7fTSfc8cL3Rs0wTQVQS8Vk3XdF7wgAQcAm42s1Fr4mlO+vA1kcVfwBAEgynfVBYDUCBySHhzgB0jkb4ENrODwzpe1MOGmA7SZAAGMTFIBo7ADneADGXQYtxw2iHhBHCIBHGlvR2LrbVCoYpfYQH3eWmqM47uZJxThcwMGgeQkt4IXDCEFSliBe3nSshYPdsiB1IcJWKAdoUg1eWhVrorRG4FngqAEY2oYBCLwhCNeGf/LG5UuBpzinOQgESQsfoKKxtyE2QKRgSiQgQOGmY8EgFnIcCaIWXJzgXzopoORBEkPMPBYDYNEMcB5SQEiwL85y0AEFbDRchOdLFE/s7cT1rCteutdS5P61bKZpqdgTeta2/rWuM61rnfN6177+tfADrawh03sYhv72MhOtrKXzexmO/vZ0I62tKdN7Wpb+9rYzra2t83tbnv72+AOt7jHTe5ym/vc6E63utfN7na7+93wjre8503vetv73vjOt773ze9++/vfAA+4wAdO8IIb/OAIT7jCF87whjv84RCPuMQnTvGKW/ziGM+4xjfO8Y57/OMgD7nIR07ykpv85Cj/T7nKV87ylrv85TCPucxnTvOa2/zmOM+5znfO8577/OdAD7rQh070ohv96EhPutKXzvSmO/3pUI+61KdO9dIZ4dwBIME/asADc2uABho4NxBiEABz80MKA1jAuXUwgQ6gmwQEOPcSRjACdBOABP8E9z1qMIFzE+EfA6DouDNQ9cIb/vCIT7ziF8/4xjv+8ZCPvOQnT/nKW/7ymM+85jfP+c57/vOSQXu6Y5DuAbj93BMgwD3MzYMRhKAf6B5B183N9yiYmwgMGIAOzK2PDAzg3PoIwO/FPoGym3v35+bHEtqed3DDffXlbn0IZl9uAnBgCefmu98ZQHrQe7/XUui+/7lNMHzW0yDdHAjBufsxAhIIPtxYIEDfz92BARzh3AtQQdjNHQAVGL/cGqAChFdu/CAEaYdu83duHJBuJKB+5tYPIeB+5hZ/JIB85cYAWnduBKACp0duC3AC//AA5NZ76TYCLkCARIAEAwAFMCCC39YTPAAEHTABD2AERgAD3cYPWYAFSLAAJKACA0AAMwgDUAAF2sYPRBAFAUAANDAAI7AASEAERAAEIQCE0/YZPdEPQjAFA0AFMdABQIAFUqEPsEdtOngPOrAAI6ACKhACAWACzWeG+sADAVADHDAAHMAAQMADskZsPYEFQmAAIxADQRgAOhCH0gYSRGACSzgBA/9AAgYgBFjAbTCoAQwwAgPQdh6wBH04bIqoA0s4ACrwhEiAiIk4h4E4AScQAwSgAZMYSNamg1igAxmwhgMQAhmAfZSoD0ugAQQQAycwAQZQikoRi2SIBAwwASqQehrQD53oiUkYACEQA6OYAXB4bUc1h0ewgQ/AgcRYjHKIhgZAAoRIAAGwBKYYbUi4BI3ohAsgBc64bVl4BB3AAUBYAxrAA+n4bGdIiyEwADGAi0gAfUc4hxpQjzFAAgyQj88YbGeIBBkwjQGZATrQkMAmEiZwkMo4AQv5d/KIimo4AIWoi0eIhKBIAKIIiUiQBbvIA0KQjCdAA1OgAQR5hPfAiCP/cAInMAJVQH0FyQO+qAIxyQA6oAW7yIMGYI800Ip8SInR+I+3uAA64JEago0GiZInYHqSaJG/1o8+yIbmaAJUmW09wY52mJAGsIdc6Wt/GIiDuJSGuI/O9okBMAXKSAILIImwaIxAiYF42AGbSIn8cA8QKZEhIJUs+ZE8cAQwGQMzKZdzuYO0OAI40IYB4JPYJhK9+IsnwAEGcIgt+ZJKOZPxKI83GZEqMJFRIDBrmWswCAQEcAIPMAAM8I3hSARZIBJISY5BqAHoeG36IAWXSABWwAN0KH+P+I6lGYtCMAIwkAMucAIkQAIiiY9NqW1YEAIwcIPbaQQcYIhjqW08O3ACLtCCMHCeVxBuPIAD5XmeRvAAA/ht8VeD2wkDEwAE4cYPOhACD8CCxReeOagDBkAAj+kS4NaayhYQACH5BAUDAP8ALHMCWQC8A3ABhzY2NK6urCQdBLy8vMysNEo9DVlKE7S0tBISE2hoaX9qHEBAQaCgoYRwHSkiBu7u7mBgX3dlHFFGE7OXLCMjJKioqG9dGOrEPoqKjObm5NLS1BoVBMTExKeNKnZ2dJiYmYaGhHp6fL+jMdbW1Nra3ICAgHBwb66TLFhYWOO9PC8mCvTQREA0DJiAJI14IzgtDPz8/BYOBKGIJY6OjLucLjIyNBYWFDo6PGZVFCoqLOLi5JKSlN7e3E5OTBoaHC4uLPb29FJSVJ6CJAoGBM7OzDIqDEpKTK6OLEI6DB4eHN27PEZGRIpyIdy2OJJ6I2paFcrKzF5SFBIOBA4KBEY2DKqSJAYCBHpcfMDG2M7UqHR+GA4MGKSEDKq6nGB6GM647AoOCHgcgBRmUHRqCOD02IpEGPi8QDIuSOy8ECAyRKrEeGZYKOyeMIaaZPDu4Cpm0AIGCMDe3CbEgFx2fN7WxJJ+DAQEGMK4pLaYDIKAGKKgIHSo5GxuQLC+MKqWoKK6wF6ojN7i+NbO9CAYNEhONN7I0D5miIp2kGRKEBYyaBYQYHiCeLKmlLag6GyCbNDS4LC6zOrU7JZ8kDRWFHTkqM4wgLZ4dEw+ID54OOh4SEw6PM62ECA8HHJsgD48KM6YMF4+GLLmMPja7KSmkNi4uKpsLDJWTMR4LMzOwMDQzPDyMNbixNDE0JBihJB8bPLarEwmKPT0xM54yFxcdJCauOigsMa4xMj02KrE7Nz0iOjGKG5wWHJobGJclLC+tJCkkKrOtExOZGpcaEhgTLCgUCoiwNTmMAYYFD4iFHyCXHqaoDgiPJSSgLy40KqgxJBYGEAOQLzGxEAwaNzMfMSgrD4uOBoqKHR2mHhGbAQMEKqESOTCVPK+zAoIMAoyKKrssEBOWHhg1HBEGMDK8JBuSHRwIH5wXH5YGNDi6Ia6jIiSoCYwKGBmGPLazAoYNHTEKLagdHJYKH5qeM7EQKCqqF5KKJRmGAICDAYGDAoKBA4ODAICBAYGBAoKDA4OBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIEOKHEmypMmTKFOi3MeSJT8KN370U0mzps2bOHPq3Mmzp8+fQIMKHUq0ZEuWP3ZwYFBjX9GnUKNKnUq1qtWrWLNq3drw6D4bIUbAGLHEKdezaNOqXcu2rdu3cM+25AehwgEeGsqajcu3r9+/gAMLHkxY574bFWZAGJCXZeHHkCNLnky5smWgSWYcMFLjgIYFji+LHk26tOnTqOH2S8DBA4Icnsumnk27tu3buHNX3LdkQIUf+37E3qu7uPHjyJMrZ7uPAoMBPfgFP0AE9PLr2LNr385d5OoBJvqx/4QNBTTx7ugte7VhgkEJCunjy59/MEcAHh8SJIAAYkSGHShQcB59BPrlVT8Q/AMDEbIV6OCDytVQQQY6kMDDCDwAAcMDHKAwIIQgpuXVPwxw8ACDH4ao4oqiIRCEByHEaMIMPDzAQAI5pMjijlS15AMGDHjAAYo68mjkkX3tw08/TPbDzw2M9SAeklRW9V0CN1CnV5VcdtmWV0gxtqWXZALFjxEDfEABbESW6eabPYJJwQcBAFAknHiS1FyJRvDzwwAo5inooD6B2U8NN+hD6KIl6WMCFB7oExygDTJq6aWYZqrVmQdgYMN4A3BgnqaklmrqqTjtk8QHRCTggw02LP/BgQYQIDATqrjmquuuu3X2gAYDBGvirxX0cCevyCarbKbNlXDAswcEMMADJzJQ6bLYZqvtoPzYQMG3FCTRw6yu3rrtueimiySYk0IxprrwxivvfOzWMOQ/x86rb4gs9ZPDEv8skYO5CLWEAAA99LBAEvwQ5BQFC/QQhBE/SLovduzq0wMKNlzssZEsIVAXByQHAMGnCSmZQwmhQsHBBwuYm4QHAZAMBRQHmCDgx8qxy490PAfN72pEELFDCDtooEECihac2QgHgFBCBVBb988C0n4QI6sklOCD0GCHLXZR+3RGggk29MMeCQPYiRBdbC+hjz45fKADCAgIREEPAKT/3Y8PEEAxgrFjF2744TTxk4AODCQh0J48hIdQEgxEfitvJN8g0M+Ph/wBECbki/jopJfukD4zPOBBw5ubkMEHXx+0AMlW7+MDAw8kWFBL+uwAQ+imBy/88AvZ8EHuA7GEgg4BwHcQCjw033k/vgPvMEsAHEAC4cR3733wt2eAQuf7oEDCATkgBEEGjePrEga/n8dSZiR84Pj3+Ocv9te36zD+UUE43w/Uxz4fHIUf8POA/GxQgvMtgHX6U8+SOMeugv3MSdLxGQbZJboIehAlENCHDRggPq+YD33q00EFknDAGcSvIAjwwAigA8EPRkZVCWDAASpQggdy0GH6MAIG/+xSgR1EZy4/8AARQRCzH9rwiThBwPES4BUIqNB5BukB1HLEuw/AIAEFsYEHgBUdKN6wbiMgwgEGMEMUiKeC+KJACW4WrVCB4FNKWkIASKBGNg4AAm8EkxkHmZJ+lOABJQikITMwg44dBACAMlZLKBAAHQQheWDRwGZqSEjBIKAEKjQCBWoQAk3ewImfHAEDFhAuCgAgBxl0jg4+cIM1JQAKAzCPIDvJS5GUD1hNwR5jUMA6JRHEBhi4Gx7hhkKBgIUIFfBhLwdzg5v1KY4M0IEJinmUf/BjXAEADvmcwo8gaEBUTtmHPg6JgaaNaJrw5IiqPsCDHSwgBwugJwOcp/8qFBjBnUaAggZMUIMcoGAAlnNmCfCiMx8k4aEWiydc9gGBB1TAkd5MAA/aN85/IAADZ+PHaygQSHWaQIX3w1cQHhCAlHZTojC1iJIWQDUO1GwEVwMaP1AAhQrU4HH6YM0MqUMEELBQIAGEgQ4OwICmVoABgIypW/oRAiDswFy8waXm3NeSf1CgAl0bWZpQgACWOCoDFYidQIyQgQH8lKuhkapcH+ISUjLgqSEAQAa9mc/3dE5jQ6zAByDAQrMsgAEBSKxioxXCua4FAakrAQTLRim4OgY2J9rhByqgtEgpCQUj4MApBdIPD3DIOu90rGpTxrskhEtSce1HEmzATSX/IeBbPggkacEFrhzkgAJlXS1abLAD1WXQKbDRgCTBVIMBwGCgFPCWCUjAAb3koAI2WoIrTcABGEDBCHAUrnjHa5yvpC4Ex52OcnfZ3AfALp1zykAivdmDAeiACMFaow7ctUvy+ve/tFEnO9N7Aw6UJ7WwyYD1vGkCG+VtH/24wdQOMNjAtS21AM6whkfDDxMAgQGw/ewWXyoQyimYIHSxqCPN6trcLuF8AurqhmdM48nswwg8GED6BLLODLRTxgP55ANmYK6PDtmd3VynDhIJ5Bo7+cmB+eqSDdiPcWkgCHP5Fuv4ISsiuJEf+kABETRAzH7Z6mc2SIAa7RReKLv5/81q+SYUSMCAEGCAAyTwVEtqwIAPAMcpYBkBFI62AyLoAAMGhHAQdoABE4SAAecsc5vh/MQmMwTDyTNLOklMaan0wwhgpRAHTFBYlizgnKOy3WJ0IGpSt6QfQTgAq+9rz5L2t9P6e8kSIGACE0AAAFNKmQ8WwOt/JCAIsEwePxYQIxjFqARLABqupcKPJABgAfcMsVkBUIM3uq8fMMF2DrStTnxiuwa05aClp929b1YACkUbAdtMYMCEBHVWRSvaAWr1OCkqFQr/yLdn2Y2WuGaa4ONVXABmkAAUQAADI9Bm0w5S5RKYAAVBeLgGRkBMfEnxARhYwD+MYARkSxvhKP9POVCaUwMEsC6GOnBryiboknU+YAeOlCIJwDvODqr850D35XkgmReFOOYo64OdQHTuRn2UNOhQjzpNvAKl8jAEzAiwQQ3sVoKmfZyHIDBBDyhwcqmb/ewfOYo+QrBRLBasBh6YwQcA9YGmLB0DPOBAqIoGM06i/e+Ap8hceEpmvxfkTBUggn86ZK5PJ8AI2DaBfRmQo8Bb/vJ0zeMBRhCC4BrdBgsoubNoWUwmvbo3CiYY5oVS7hu43vX3NPzq8cflAGjAa+s2iMHGuAO1ZlpJJQCC0mc/lG8G4GZFg/cHKk/8XC/BM7jP/e5aAkl0Xq8lCbBRSpv/EwThZQYgCD//BhKAMu5/j8vU8UCipX/9fVT/WgcMPgN8b36e0EUDjZybPhAA2/oTT0lGQB1oY1kWtGlKAgE8cAB/9jNUdwAZEAIT5387QRdEIFk9J4HCsw8LMC0HEAQ/UAM1wG0RtTtJkAAmEATY1gNCwgOR4k034GuQtwQJ4IA+5XMYOBJ0MQIVYARLwErHdYOm0w8gAARAMFTBMgABYDW7kwMMQALnNCvUFQI7s1MDwEdDgherJHtAqBLe9ysagF8YEExbSDpnggEzMAM7kIZpCAJ2R3E3kAAlcIZhtwD9ZztBEAIgMAMYUAIBooVjiBLLtocQwB8mUgGj9YeIA2b6p3/854fe/3QrTvJSSsIkcxOJiJgT6uQD+vAz+rAAB3BzGHWJoth8YLJTbfVWo5iK9XcYRMBfqviK3HcDraiEsFiLgac4DzAA4mSLvAh1iJIECHBbi/EAIDCCvXiMCDcnNvUBOkQCs7SLAIOM8jKJTTITTuIQjogvpCWNJIEAM3gvHFABCcAw3LgvPpAAZniGZhhyjsgPNWACGPAP0GaM/VQC/zB+yVaOHqFOa/JTOZAEwaaP8lJgHAItSFhGCWF8AYdno1Z+/AAASYNTilcB0SaQFmlG1YcCOfADH1gxRpc9eQZ5QhVVtrMD/2AyoQdxAdCGF9mS+gNJFRBjnGYQPWY/rXNF+P8SUN/FOsajAxDokkCJP5B0AAvgA7nFfl6FXRCwF5BEBNHRDyawUS6VfTEZlFZJPAXGPE2FARCQjwfhfmJCED5gN+FRWrOkVhR1ile5lqbDZwOwQ5tHZzcge1nVNgTRY+2UgzK3OSFwWjbIloAZL6B3TxTwAxBwF2pSMD2AX6j4D1TlXmUlHBkAAj/gAxRwUN51TYG5mWDjEqFxJhzAAx7ylYu5l6TVlzuQNwgyZ0yFWCbiikD5FQgziKK0V29zmfrRa71mBFilDz9gBIPogd7GmYuiblIEBKvzlbMjWncJApPZNPoQBHPnG452TofYku73AUOycRpQAShgjAXhKBr/gBc4RQL1cz/NEQIDIFAjoEn09pfECTIc1A8uJFkDMik7RxCUk1CbkwT+GJ0jID1ASRc2ZXEhQDVQkACqlzxrlwH7BgG5yZvkBGoDMAO9xio80HXxWZzscjsPsE0f4gPYlQAQtJy0SBCGdDd5E5sUoF22og9ytFQD9JX64AEksDpLwiR7VW0L8AN+YwMQQF0nuqFvok4u5xJplkYNwg+vYhZQyTxv9Umvo1Y09w9h5jIPZJVKQnVDYgT2ZqOhk15Ht6VHQUk8QKJEKij9gAJHAwEokABJk6Er+g8A8AEDiC9MSALi+HCCFgRbtgAe0HAQUAIGpqBXWS+A4qUIUaMZ/2AyDddyMsYuX3Wm2ZimSLIa+LZxM+RqAxFAAuqCrKIB/6BJ3zkQa+oZAQdNZAWfsGgoi6OAX0otFkICUNB3mgYmS4BqrGqpIGI7C+Bw+xEE3UYcSQAB/+QwSQCcbvoDNfQVxCYQPeCVWnpAGxhSCdEPC+BrS2AEJuCAAXCIcmI3xcirl2Jwj0OumbdnYIVoOgJhm9gviHFkPfcjPFCDu4qu+IopZdOEOyCTl+YSFXUAWJROPtBARGmb+ZqwybKvIzADMnknXrEEOdaYqgICDvSDCpuxulI2nIUBD1skYMJWupg8SYB33wpXGpuyp/KQkFYCpSZ9GTOEaTUQFAACRP/AAHqFaSq7s8X5AyR0AD1glK4FkE4TBChwA761BHhHAhDQMBW7X4TlUBB1rzxbtSwClQ8AAzkWAM/iGyL3lRJCXaEyng1JTiigATDQVly7RiazoFb7tmVSThUQABUwt4nFVF97EAiAAjNwV4IVAnToGMtGJxVwkndbAVEFt4rrJhAmta4FLkRbMP2AAFJrA+96dGrzUI/7LUkAnov7uaAbuqI7uqRbuqZ7uqibuqq7uqzbuq77urAbu7I7u7Rbu7Z7u7ibu7q7u7zbu777u8AbvMI7vMRbvMZ7vMibvMq7vMzbvM77vNAbvdI7vdRbvdZ7vdibvdq7vdzbvd77veD/G77iO77kW77me77om77qu77s277u+77wG7/yO7/0W7/2e7/4m7/HSLX6S7/7MAUbIAWV2r8EvDks0AAd0AIGIAUF3MAJyQIdkAIXcAEi8AQM7MAYPBAb4AISLMEXQAMFkMEirAITcAEpcMImjAMijMFWUAAEYMIorAQqvMIFnA8FIAMvPMEm3AEvQMP9uw8bYAEn0AIW4AIEsAIXcAIFMMA+vL5WoAINQAMu8AJTIAAGEAFP8AJD0MT4a8MtMAERIABOyw/5MARWwMX2C8Q40AEdsMD8i8bjawUOoAAT4AQssMVwrL/5gARfHAEOwMR5rL774A8G0AEngAMb8MaB//y9+yAAETABLVAA+aDIi9y9/MACTjABDaACgFzJ5csSUmAAMtABiEzJnqy9jRwBNBDJk3zK9MsPL9ACNLDJnezK4zsFhXzIiWzL8gvEQiwDBTAFpszL1QvLLqDJL1DLxPy9UyABMnACFiAAZ7zM7uvLJwDMwkzN7ssPRdAAE+ACLKDM2ozKNiwEYCzG48y+anwCbSwFw5zOzvvECnACdozH8Jy+Q1AAffzH95y+g4wDEzABbtzP6NvICrDKd0zQ6HvJmawADjDNCk2++xDKbGwAuxzREu3IkIwErYzR43vJX6wAnOzR4/u/uVzKJC2+jSzEQhDM75zSw2vMyFx2MP+9vSb9zNEM0TXNvUD8BENcAO68097Lzd48xeIs1Mqbzx1AAxGwATqN1NirxhPQzi8N1bzLDypAx0Zt1dubz18czUfN1cQ70QBN1WKNvXJ80PVc1WdNu47h1WDMz21dvfuAAFHQATJg0Ww91249x3XMAh2913z9uiA9ASId1oPduxMtyqR80YkNvSsNyZIs2I/tusY8yyNd2Vgxk2t509Ds2JpdFfnwAjhgARIA2loaxD+dzaFtFTZ8AhJMAAogAJRdf1ZQBC4gAg1QBIjd2jjxAhGswwRgARcclDbcARNgAajt21KxDwbQBDA8wTIgAEFpzXntD7XN3CCxDzigBCb/rAQpsAIn4AADCsUzrd2bjQQ0EN1K0AAb4JI2LAPJLc3onRUb7MFKIAPhfJG+PAF5HdT1jRX+0AAigNcNsN8WidVFfcfZHeAhIQAtsMkB3NtAOAQssM8U7uA4UQAnYABPXY5AXMhtvNwaPhVWgAMnwAINHnj74NesvOIl/hExwAQtQN0JjsmGndkxjhXAHQFTIJCL3QFHEAUkvuPNbQACneGruAGPDMwdbeRXwQ8RcAIvAONSB8uZTMtQrhUb0AIu8N7SOMgnXeRbHhX7UAAT8ARKTnxA/MgtzdplHuVPMAEFYOVAx81ZzttxrhVS4ATTjYy4jNwoveebXQRHoAD5/9CL/d0BLk3oWcEPBkADBmDnKAfLDXACTBDOlO7oG+EP89zDtjgFBSAEJ/AE9M3pWSEAMvDltajGR9ABEoDd5orqzY3mOLDmf6fg38zgtJ4V+WABdA6LcB3GuN7rNLHBMkDeqTgEGyDiBuB5xn4V+1AEMhAB/jCKMrDqxwzOQ7Dp0a4RkN7h3u5mE6wEBOACcv3tWCEFCsDDqQjDSqAAxa7uKgHhrC6KMCzBLpDo9C7tEIwD9jyKE/wP/N7vov0EjD7u5H4B/yACBo8VG9wCyn6JOCACDE8DD48VRdABChADo7gBOEAA/xDCGV8V4W4A8x50+6ACQoDoJV8VG4DAoP+eii7QAD/+8lPhADLgBGD+8Q3QAjg/FWcOzQUvilMQAR0w8UFfFEMgxHX+isA+80tPFBCe7K8I6XSu8FPPEBCsAD2fihIwAVtPNgZwBCgPi7au9WOPEFKAwEWg9lC2Dy9wAhEA92tfECqw6l+fiirQAQ1g93f/OGhuATevik7h5UUf+DrBD0JMBYD/ZBssBHuv+DjR5TXOi0hv45SvEz1+7bY450Ww+YZhAB2e8miH5Egg+jnhDxEgBG/Pi2EvAaqPEzrfAB7Pi2g++xt+yAEPiz0cAbpfEyd+Akjw+G7W9w1g+sGfEBGv+b3oBIm//CTBAnRf+LzoAjIw+dIfEtz/LenK/3cK8A/Ov/0iMQSX/vrkX7UC0AFOcPvpv7NnTgMW8P3vX46Mn/X1v7Mb4AQ1bvz5DxD/BA4kWNDgQYQJFS5k2NDhQ4gRJU6kWNHiRYwK9xU5EYFfRpAhRY4kWdLkSZQpCTLJp9LlS5gxZc6kWdPmTZw5dVLcZ2CCgX07hQ4lWtSoRBktphxl2tTpU6hRpU6lqjBfhA4vqm7l2nWqCxkbvI4lW9bsWbRpQe4T0MKFWLVx5c59qOAEXbx59e7l25dngQk4PvolXDjqxwmGFS9m3NhxTn4WJhR4XNnyS5+UL2/m3NnzYylMhAj4XNq0wwIiDJxm3dr166b7VMiI/7AU9m3PLP7hwN3b92/gFqUUcEHAwpDgyQ2/6Kjc+XPosC3QSHFBhu7o2eOq6NBA+3fw4f3yvlBeiRAV4tVvbfsvxnr48eVTLZ+iepP5+Zu2gKvf/38AY0rhn+ouWC1ABGlSQAbSEnTwQQgjlNAoC7Ka8EIMM9RwQ4oy4/BDEEMU0UHADhzxRBRTVDE4FgIbbEUYY5RxRsZe6MCClmjUcUceeyRLBgV8FHJIIos08kgkk1RySSabdPJJKKOUckoqh6qwSiyz1DJCflZDIqgtwxRzTPEK6AAoMtNUc03fWKhCMDbjlHPOzYroIIIc6dRzTz7zckCGBqToc1BCCx2rBf8n+jN0UUYbHQosRyOVdFJKK7X0UqqmiOAuTDv19NODeAN1VFIvpczEUlNVddB9WoRzVVhjpTMCf2S19dYxu8N11+ci4PVXYJ88oYAXg6VKBgeMVdY1GVwQAMxloWqgg/SitfazE0S91ilfsdv228rA0grco3CYjFx0GQPMu3SL+gnaduPVy58IEpNXqGzv1Xdf9WysjV+AA45OUYELNvhghBNWeGGGG3b4YYgjlnhiiiu2+GKMM9Z4Y4479vhjkEMWeWSSSzb5yAaEOHlllhlSoIOWY5Z5IAuGnfnmk4cwVwKceya5RHh9FlrjoG4c+uiOO1AgT6SbplgAlQV1emr/ilWm+uqHN2ACWay7Zhgrr8NW+ARvxTYbYJvPVnvfE1Bd+21yWehA1KDhtltZFVrw9W6+rRXACXb7FjxYuAge/HBbF0wWccZvhblxyGHFoQPNIrcc1CEMaPtyzj8tsXPQMT3hidBLn7SIf/Y2fXXWW3f9ddhjl312BE8Yl3bcw3wi7dx7x9Int30X/kmehzf+eOSTV3555pt3/nnoUeQ0eupXVICGaqvX/kQatvf+e/CFPjV88i9kYfry009wI6XVdx/B9t6XP0AE5rf/fvzz139//vv3/38ABlCAAyRgAQ14QATqqwVSS2ADGxOkZzlQgozJ3gQt6JeyXVCDVAkIACH5BAUEAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACzWAFkAWgWrAYctJgediiSGcB7qxEC/ozGZgCXY2NiqjywoIAa4uLjS0tTu7uyxmixmZmR9ax0zMzSYmJh1YhpMTExrWxaKioxVShCQkI/hvj7NrjR5eXqzlS1dTBTz8/Tg4OGwsLDWtDeGhoREREQgICA6OjxycnQ0KgtgYGFsbGzMzMwsLCzq6uy6nS1TRBCenpyCgoRFOAyQeiFUVFTJqDXFxcSioqSmpqQ+Pjzm5uQcFQRJPg1aWly+vrwmJiSmjiQ8MQx+fnz7+/waGhyjhiXmukAgGwQWEgThuTz2y0Gqqqx/ZhwTDgRiVhiOciQSEhQWFhQKBgQOCgQOBgQGAgQCBgSImmSuikyMpJBeqIzYyFxqXGAOFBwGDBhkfBx24ig8PCgsJMDa+NSWkoCu0rhSKhCuigzG8tSuyOx4gByucCzk2OTMqhBiTCwEDAi6fnSYfHxAaIh25KgsaNBKWDQ8KDyqxDQMGjQyWEx8anDe5IhQNDzMvsCwpnwSHhQMCDBQUGT65vAGGhTQ6jTk5tAGBBjwvsx8YNg0QkSKfGzKppzCtFhiPhyGcghKYFg8Qkx8eGQ0WBTWzvRkXHwUaFDe3LhCJhBugnCovsTasijC0ND25th0RhxgfmzMyBBuaoDy+Ly6puhCUFi2xMQYNmhAeDh8HIDGvtjqprQkLkTk4vj20ujSyDiUXhxgdkSuvqSQmJh6XGDyyFySbkyqpliqpsS2utBENGzG0LSuyIDIfizqfki6prDYvrxEEEDW6uTCuMDOvvAYEmCIuozevoB6mqDG3sR2qOQkGjSajpjW4NiSmrjsxDCSfKRkXETkzuQkQDTs0DzG1vCSYoj2+NzAyETo+OiORhzaoETaslwUFCCMtCj23LzGvqTywhSurJiu7rTQfsh2dkjypDRkahxuamAeLih8RnQKDgiunqRkXCx8XgjM1NAkCCDColgMNijW3PRubpyWfgxipigqNCgSDBgmxICGghzQMoAODgwGBgwKCgwKCgQGBgQODgQCAgwCAgQAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDihxJsqTJkyhTqlzJsqXLlzBjymS5T8SImThz6tzJs6fPn0CDCh1KtKjRo0iTKl3K1OGDfzv+PW1KtarVq1izat3KtavXr2DDiq3qJIOBfwYkjF3Ltq3bt3Djyp1Lt67duxxrCFQQAq/fv4ADCx5MuLDhw4iB+rPxz4KJBHwTS55MubLly5gza96sUoQFDyFSeIjMubTp06hTq17NuvXOfCd2kLiXAnJf17hz697Nu7fv33T9hUhAI4W/2qSBK1/OvLnz59CjRxQB4Z9agbala9/Ovbv37+Dp5v9rsOPEwBT/UNwOX9Qf+/fw48uf75pHjQ7VTzSgYODGPx080KeTP/vkY+CB+ezjnoAMNujggxCC5c8DNfj3TwcGdAAEEDfMECFM+UhgwYgQlGiBBPt8qOKKLLboYkv+NCEBCRnUeIIFBqhAwwnovWiSP0HQAIQKHRTZgQIZ3OPjkkw26eSTBaV4zz35CLSDAihCGVKMENxgXgoPhOnEglqWaeaZaLL3QALqpckRlwY84M+cdLpp55145ulaCi0gMZWeFHHZQQgGKjgnoIgmquiigFX5QJWMPuSPExAsUMOIGUgQBJmRdurpp6CGGtakFtxgwKk3LJCADZyK6uqrsMb/KmtM/uQzQghN7CNjAhzs8OeswAYr7LDEOkTnof/4w8MMKpzQarHQRivttIoea20+IHDQgpLUduvtt+Auae2x+5ygAhJOhKvuuuy2C964dN5jAQcQQOruvfjmq29rBOaqYIE2KNCBCc/ua/DBCLMG6XX5FFxQjE/FYEMQD//TowQS9NjiPjbsKEEIMfwwwwIeiJDwySinzJkHHs6AxD/pLsRDBgPNYMF6/4hAggc7zDADCh6QYDKL+8Rwg6kZqmCABRqr7PTTUANmAgooWEAz1eYp5JkC/4DwAw0KeMCYQCF4kIDVNKPlQswq+nOPBCf8AIILJ4Rwj8NR56333l6J/2bACek6QUIHCdyE0D4mGJCAQPmkYEEHFDQhkAhqOTFlEFOfxffmnHfeebkd0DC0QDR0kDVCLRhAAkEheKhQExAAcbrntNduu8HzGnQCfhQfZMMMO4w9EA0LKHRP7Lcnr/zy4LZQPKcxdOBBgAfFYMD0BOVjgewJjbB4DMyHL/74sAZ5A/gEgZ9A0wbVMPo/+1Dwz+oIPQ7B++Tnr//+UDZxawgfo5gI7oM+g7BvICZQgUHiN7+EnIVV/IugBCf4IeHUgGooSIAO8mG+AhLEAwckiOgKIj8StKosBtjBdSjIwha6MDyjI8EJHqArCHDgYSb4h/sQIoGwsQ95ZJpUBv+4JoGG4c1BtZqSEqeUIoUQqEBMPOILp0hFWS2oTlXiAAgK4oLGSO4ga0LBCgXigQ4U0D0ZANp16sQiJ4AgAXBMwA524AEJ4G1SEsgABDzgAYJJsYqADOSrYpDBqUwoAQbQgUEW1AQKdIAgiDtLj4A0kBA0MVktOg4KODADOc6xjgkhkA5QsIAbcIBebBSkKlfpqiBUxwICsQEE8DM0f4hAkdzyhwRQoDr06GBxJ2iiE1xwJB4FwWQi4NaKlDWDODXBCdAMgoIQQqARZOAEErAhBVLJym56s1ohQIIBZsAyA9RAeIhDARKadgJeJsADKFCA/ATiD/ABgXQ00EsNcvj/x/mI4GdGtFYoF4Q4DljgWN9MqELxtI+n0AAJNfgBDVkHgQy8722wrAEETDA64fwDCUjgo0j7iMm2/TNOx8wVNxNSLoMidKEwjWmZuJVMg+RDBGMySLpwai/GieCn/+CBUC32xWUui2R83Khx+tnSg65UplCNqlTXYksaZJBlHVjADkLA1BO4FFlTDatYx4qVWj1ABCnKR9lURb2FNPWpZI2rXOcqlGOlYAY3yCFD3tpPuvr1r4A9ibXktQAXSJFAXj1oYBfL2MaSZFz5oEBhL3mQOTXVsZjNrGYrMq4geEAF9KOmZU+wgHkSJG2bTa1qJ3NFsC7SPQTta4TcE4QE/+1jH0HIwA0UILzX3vYepLXAbae52uIa1zBBCIEJTsBcD562RjX6xw8s6SS3tbOiJ3ABr1RHWYLM6R4mqFECgDCDE2TABD09rnrXS5d7kMcAVFNA2P7RBEbGrgM/Q4FAkvSkfeggQ/9QwQ2kF4OGibY2CgywChY8A/yx98EQHkuIfkACHcQgh/JVpEBgV9oQABBjIXyRWUEWA4ltyrXerZVyLVxiHZggSxGOsYzBkqBkceqLsDsLbKs14x772C8qaEHvYPcP8Cnzx0hOchX3cY8mBAE9HeAvfW34MhfQr7tKzrKWx3ecDFgAAmyCgMYkN045Uu0f1N2ymte8vNFkaP8GGxxIiEwQglv9Y3HFYbOe97y5SQEwBhnwAAR6Cz8537mBfE60oqN2D7PcLyFZhADbFlWwIC760pjuyBxH4DB/mGuHUKrVMy1HXGruwwlBSFcQVj2mICYRmlSSbaZnregH9Iyrh/uHtnpX3RSAoJkoqEGBQ1k0BSygSEe7AQiv2FAQXEkBO6CADbBM62rvudTwa8AjFeK9G6C2SceZAQcUUIPPeonaySpoByxAARKRIKfwCwEp9WvVY+tA1tbO93G7TF8EniAGfQH0DKI8kBHoRwI2CMEJPPAPD/yKSU2YVwvQCt5T4SxKOoBcw4ZLXLdVygIUq8kPgLADdOv75A//9kfGQSiQFNAgPR461Q4sOhDwdQAFHjpSC9IM7hGYaqlus8BkD5fxH8CLnk1IQAdWOCEOoCC9KI96j/Nhg+zCsos2OLIT8giCEf3ATP4FQgLIlcAEOFgg/r0BDWwwggdIM4hNQEJeDRUiDiTA5FLPe3ENTE8o/iNBBYOiklDMpHxkAJXHCpgBDLdAEyxgAbzMoAuM03dtlxeAC5+BHfXO+c5/6B6SNbpdB37xgewjBCX6gQtSRzI52Rg2BuBAkY7tLHx7/va4d06MLKCCDMB2TssaFDXv8XYCDUdbVfquCZK6R5tRPvfQj353CCt6Ot1V+As5VgiAYADqGZ53cwpC/7YSUFTpm//8yqnV4Z1KpxEoIE4MOZYIMsQYTcZJ/igYlO3Rz//+X0aUQOAB5KIDynZ2KUYn89cBjFE0+NVRTeABebV//jeBFFgYc2IDC2AAQEdYLnBJrTIu+yABTmcyIYghhqQs+bd5FbiCLZRWB6JMf9REePd38COBDDIpEKACIJcPTRAD8rUeDVVo8JMCyWQgTTAcCwABKaIsO1BYTmAguQUEKGCALFiF40Mx7NZuIwIC9bcQU0EBGVB6NSFdFEABPFImE8IsKNACEGgAGdBE/jACpzIVy6JsJbIDC4Y9aHcCqbKGELBJf2ODVjiIejMCTUhOd5YASDBGBxEiHv8AX7xUXvD2DyNQHZB4PTD2JMdhAbwUNjoAgw+Ac1PRaDuQfxjiASfAa8kSImBTJChAA0VEiLJIPrbWchbzAClwZAfhPQKBcCfgISbQRK7UAS8zNtfzcOB2DyLAAzjlWvsgVHCYD06wjESYKw/zXSKQAsx4N7PYjeGzJnroEPfgAjfwaAKxOzsUglSjgv9QHd74jvAYF2uSACGwag4hAkigAgRDEKVoR/kwOCM0EA2gIyZDePF4kAhpFbYmPTRAAyBgAilgcu6RHQMxjCRQJboFAapoAoSDHgaZkCAZkkbxAC3gOpBhAC8XSrtkEPfwAzcQOYhzZw9XPLgmkjZ5k0X/4QQhwBgBsnwG4I4HUWwHQTOwZDEQCAJEyAOKJIWMiJNO+ZQ6QVmntwMdoGELREgJ8CuGp4OMkzn/8HIso0BNCZVkWZY4AQTf5l0CMQOM9w/3sEVblCz3oD46JBAoEBWgMi7XKFBm2Zf4oj3SBXUCsSZpQRAiQAN/UxBPJhBKEjYB8pF44jY8ACYp8HYDAS9w5ZeaSS1B8g+zUxADtACz0zqugxD5QEwuIDmQaSccAwEG8Hg3sAMmAIOYuZqbeZuv0mQ15h7m0SZoFwTlNzgOt2EuOXH0RIPfRUhbJYhNEoIKoAIe8AMZ0AJZRQFLWJu2iZvaGSmHEgMW8AMmEAMN/wAB75cBXxSH8zNppVMDJ2AC8oMCHhQC1xQDOuACP3MCupgox7MAIGBEIQgECtA7L8WX21mgrjIeaKEAnZgAqXhF1hOOralf8tVHyuRfkKFf6mQCk7YoD3gD94aAGEKFBGqgJOopvWMCDfBiKZBemBOLpicCfdEAMbCiBhGjL/Z3zOkkx3MDP2BbaqUqulibJTqkw5KdxMIxeEUDMmQBwENoQkqkUBqlEkEgEiBuA8YB3tZdmCmlXNqlTiREgqYfb6RBnPKkXnqmXuo25AgC9SVKu4WM9PRSaDqnUXocAsMq8SJ3DdBpckqnfmqgwvFzx3I8vYduI/qniHqbE5Ihdv+EgHilV9hppIk6qTfpBDVAMjYATSngAlL4mJFKqaCqmaeXAKW0A4g0bvsYqTkaqqxqhcoSaD+TABTgejZmpq16q1Cpl3uZmbjaq776q8AarMI6rMRarMZ6rMiarMq6rMzarM76rNAardI6rdRardZ6rdiardq6rdzard76reAaruI6ruRaruZ6ruiaruq6ruzaru76rvAar/I6r/Rar/Z6r/iar/q6r/zar/76rwAbsAI7sARbsAZ7sAibsAq7sAzbsA77sBAbsRI7sRRbsRZ7sRibsRq7sRzbsR77sSAbsiI7siRbsiZ7siibsiq7sizbsi77sjAbszI7szRbszb/e7M4m7M6u7M827M++7NAG7RCO7REW7RGe7RIm7RKu7RM27RO+7RQG7VSO7VUW7VWe7VYm7Vau7Vc27Ve+7VgG7ZiO7ZkW7Zme7Zom7Zqu7Zs27Zu+7ZwG7dyO7d0W7d2e7d4m7d6u7d827d++7eAG7iCO7iEW7iGe7iIm7iKu7iM27iO+7iQG7mSO7mUW7mWe7mYm7ma+y19urmLOyf64AMTMAEsgANS4LmP6w/6UAE9cAEDIAMOQASrirp16wOtOwC4iwEToAS0y7j+sAEfgLuuOwAHgAC9+7lL4LpDMAQDcAQaYLzHm7j+kAMEMADDawQCgAPRq7g4AAPXKwQ+/7C9iqsEAiADB1AAAvACMyi+fosA6AsARFAE68u+fOsPL6ABG0C/jDsBGvACs6u/cCsABQC9AFzABry2K5C/B2y4EfAPJbDAhOsPOIC+RQDBg2u/BzAB82vBdfsEEbAC/svBgOsPSlAAxfu/Ily2JXAASRAFKfy3U8AC+IvCLyy2UJAEGuADklrDbosAQgADRbDDPMy2L3AAS4BtQ3y3E3AAL5DEfTvATsy3DhDFVFzFPesAB2DFd0sEBQADWly3/mC7E/DFdKsPE9ADTUzGctu9AQAAaiy3JSAEAlDBb/y2+8C6ClzHcJvFetzHftyxCzLGf7y2T7DEJTXIZjsnRP8AA1CMyGc7J7brAHTsyGU7JxtwABWwwZSMtSTsAAFQAjS8yUjrDwAQADCgvaJsthkMBamsyofcyrAcy7I8y7Rcy7Z8y7icy7rMvmOcxrtstUXAyETwy1frAwfgAPxAzFa7BASQycpMtU8gAAwAys88tUTQA0DMq9WMtASgwTa2zU67D2OcA6EMzjMrzOVszjF7AA2szk9bASvAAunszi97ANRMz0yLvvi8z/zcz/78zwAd0AI90ARd0AZ90Aid0Aq90Azd0A790BAd0RI90RRd0RZ90Rid0Rq90Rzd0R790SAd0iI90iRd0iZ90iid0iq90izd0i790jAd0zI90zT/XdM2fdM4ndM6vdM83dM+/dNAHdRCPdREXdRGfdRIndRKvdRM3dRO/dRQHdVSPdVUXdVWfdVYndVavdVc3dVe/dVgHdZiXRmWJcRjHa1SQAQ5UAEvEMRn7a3+UAIF4LofIACy+9bbSsIFgLu4+wERoA94ra1x7boXMLwEQMCBba1hjLtGYASuKwOIndjU6g8IIAPCi7sHgMqSba36EAEYwNfxfLqbrdhKEAED8AEHwAK3pdmjPa1xTQARoARQUAIOwACtXa2UvQJJ8AIOoAEr4MW3Pa1P8AJG8AEroAECENzTOttLfAQfMAFurNzTegDH/QEFoMnSXawwwAL3m9zZ/12tSnAATDDP3/2n/lAEBwAD5F3edHre57ve7I2m510APYDd8R2sJXwA9n3fvwoFMKAB+83fverfKxDgAn6r+iAABPAEB96s+uAABMDKDb6sHkwAvDvhylrIMsDaGG6s4kwAw9zhyLoPG3DYIj7iLCAD0X3ixboPKR6+LF6sUvACMgDjMT6sYSwD5HzjOF4CMiDPPC6spIwBGwDfQa6oCKC7on3kvuoPRKDkTP6rEgzbBh7lXDrlDlDlVi6lOLACAqDlWw6lRcAAMMDgYX6rRdADBWDmZ86qSqDmEt7moaoEQtADcS7nlMoPJnzneJ6oUFAAK5DMfU6p/m3hg06pCf9u6IeeqAmOAZO86H76BA7wARwO6Wi6DxHwASFu6XQqzh8Q2ZzupfuwBB/gxkYe6vFI4h/wwKeO6u/o4h9g467epVKQAx/gy7N+5S/wATue61fuAx8A5L4OpbBVAh9Q5MMupZR97K2e7IO47N7s7FCKA34N5tJukziAARFg7dcektkuAIDd7SWKAzIA7uJeokXg5Xx+7riZ7jBw4ey+neFdAPAe77j55gXw6Pa+mXqe2fuunf1e6f9ulv59AJs+8H4JBQKwAge/Fc2O8Ar14CsA6llRAu0M8etV4RSPFQTwD3yM8cdVyASw4ldh4wQwxSC/Xh/+wFYBAIY8EAKf8qr/ReIEIOtKgQMsAAMMQAA9IPMx5uIEgOtU8QIx7/PFtQ/UK/RLAQDhbvQQNuMEwAJO/5RhHPVT75Svjew/UfRX72O5rfU7wbsTIAQd3/Va5uQrMAEPDxEwQAAdL8hmn2VdHgFLrhNervRxj2Tpvu08sQH6nvdKlu5fDvggKfhsTvjxqAQaAANNj/jwGN4wIOiO//jnK/mT740lnO+XD496LgR/v/mD6N89wPWgX4EK3wMNX/pWmOAGr/qz+ODF6/qy6MHPK/uE6MErQPKhZPvSt/INUe+8f3u+vxAbUAAyEPy5R+IrYPMGgQEfANsbj/wo5+IakANCKBAkDwM5gAPcdC79bGbGGODlLKAERZDzDLACAlEC6+79erfCzfu6PdADBODl7G9+e229fJ3FAHD49S99RgAQA4xE+FfQ4EGECRUuZNjQ4UOIESVOpFjR4kWMGTVu5NjR40eQIUWOJFnS5EmUKVWuZNnS5UuYMWXOpFnTZsiAACH5BAUEAP8ALH8CWQCxAwIBh6aLKLS0tG5ubDw8PGJSFPj492JiZHtlHPbOROLi5FRUVJuBJHZ2d4dxHn9sHMLCxJF6I+rFPSQkJEk9DGtZFVZIEmhoaFxcXCwkCezVdkJCRKysrNra3BISFKSkpOG9PEhISSQcBbaqbBoVBLSYLICAf/fuxry8vCwsLIqKjNa0OIaGhHJfGb+jMJqanK6TLN7e3JSUlD0xDPDw7zQrDBYWFPrqlNbW1LubLa6OLObm5MrEpI6OjOrq7LScRBoaHBYOBAoGBE5OTEI1DNLS1FpOFJ6enMrKzE5GDCYiBB4eHJ6GJMbGxMeoNKKehDIyNPLKRM7OzIp2H+a+PNq+VHVmIDY2NHJmNBIOBN6+TLCUJC4qDF5KFA4KBAYCBKamrBISBAIGBDwOQGZqlEg+IFo6GPbW4IpcgO68EOh4SAoyKLS8yBQyaCTCgOzq2DJUTKKEDL6wrLScrLR4dKTSzHaAbLaYDB4yRDwwaJB2dIKWXNTWwHaWmAoYNMbY0O7C4KiESHRYaIyWsOb66HCk4FpGKBoqKO7CgGRqUCQwKCw0MFReWMx4yDwwOEBMTHBmCChk0OTW9O7MsDo+KMLKxPrm6DoiFBQQYHB6RIJ8ZOayQAYYFLjGyAQEGFp6WCgiwEgmKI58DGxAGM6+8GRqeKKikM7S9DQ0KF5gPNC4EIJ4gLqwzFxiGKSwxNCwuBRkUAoOCIp2nMbK3MycmM7w1Fx2GHDCJEZUNEZcWO6ksJ6wqKq0qLjC7B48HLjW3HQagEZMZIygiFqkhMaw7NDS4MR4LOTS1KCgIGZaUKKWsHDipLDmMNTmMIxUGDxkiA4MGDAwSJCOeFZqZNS+zIJudNTI0DQ+EDQiPNCaMGp6bHRCbN7q+IZAGMwwgIpqRAoIMO6eMAQMEDQsMFhadB4YNEg2PHZ2YNCwVDx0OKS+tKKcxC4+OK7wsHRc1K6+MIK2hKhsLHpUGLqc6H58GKS26KaWmHB6GAoKDAoKBAYGDAICDA4OBA4ODAICBAYGBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIEOKHEmypMmTKFNy9KdkwAANMDUMUOJPpc2bOHPq3Mmzp8+fQIMKHUq0qNGMNRg8YML0yJETFvgdnUq1qtWrWLNq3cq1q9evA5V46HEihlkXPITcA8u2rdu3cOPKnUu37tUfHo4oqNGhQ40a9/rZHUy4sOHDiBMrXnwS74kB/SJLZky5suXLmDNr3nwTLxEGMv/V8CeYs+nTqFOrXs0aLN4ZRB48CODiQofWuHPr3s27t++FSY2UYFBiQwIODG7/Xs68ufPn0Lv28/fjB2l/EhjAiAKidPTv4MOL/x9PnqLkyAL7iZ1RQmr594z5XfBAv35t9/Dz699P8Pz5ewz04MIP3vFnYFw/uFAAByc0OFtUB0YoIXT+/RfggAVOqOFWCd7AwBMoPCFiDRluaOKJp1UomQQesBcYijBWleADkPkX4404WtaPBAOg8EMHPzxRwnbd5WhkUAnqxVdg6B3p5JNz9XPBCRvEwIMLJ2wngHJQdmlTgjM8QF8MFqBQk5dopplVPxq4EMAJspWlQAclqmlnRzWsQMQ/ATxwAwwbgHDmnYQW6tM9Ejzx0gBPWFenoZBOdM8AIKAgAQoKbKBDAFZE6umnoIZaVWT+kCaYPwMEoAMDg4rq6quwxv8akoqRdVCCgDXIquuuvPa6EK39AKjDP0o86uuxyCZLKLA1pDBDDHQqK+201HY5HT/n+SPEEQlY0Gq14IYr7oH+gCDcBQpckAITCbggwbjwxiuveNoeAQMHHMCQABMlmDnvvzD6U8MTIAghhAYS1ERrQf3UMIAQCoBgBZ2S1WAFCApE/ATFKgLcWz8daJCuAQYoMLGxHqcMXz8D8NAnE1Ew4YEC2C4s0D0a8HACU0wEcAGT/AgAJ88BlPCEqRWqrBuppZY6mdJQ6+ePBVSmQNwGHBzxs83aBsCEcAysEAPNkQXtgdUMpBDADS6gYHPUcMct90n+PCEECh3cww8KKST/sIHbwFqxARMWKMGP3j/UHFkNEuR9TwepwmABk0nPbfnlmGNUan8DnEBEkemdx08JyAXm9HmhT3aPsyvgZ2PmsMcu+0NPeA76P/5JsMENCigBQslWKD4Q0/eg4AEHkw+P+uzMN8/8PRZw8HeB53V+g1knRBHFCQxIQP0TBljAgAdExOB96k87r/76KVfojwYBIL9Wf5KBcARsHjBgQQnclpDrQNBjAhESsCkNnKly7EugAsdVoSccjwfFYlj9jlCADQyANPcwwA2OIKiB+AMFFxCfBzZgARJ1bIFLi8w97kEag5BqhU5DoQyDUiEUuOB6gJNgZEDAhBl4KyweWJXr/yRTPBcQYXIInKFqiHiBDXjgAi1MD3Ys4IINOJEBblOiFnXinycYMQaAaxLuzmOFE/TgAgQZXQ+gpbzzXAAGHvBeEre4GcmgKgAFmEEKXoS795GvZxugkgLmR8dCpmQ6XiQCD8IoxtxtYAYX8E4HUqCDFHBpeUK4wfQmkz5DYkYySkjBzvT4osg48AhYlIASQvQ/T7qSJP3wYhRWIMfXna4f/FhBDyxZmieszQBrSRqAeuCBYjXylZ9UIdXCpscD2uozUpkjMqeZkX6gwAgJCIAQlPADJXiTcj+wANlwp4ETeMgKKNBADBLgAbd9UGMS4BED7CUAyr2OmoshFQgCYP8+CzQzPSh409FWSZNO4vOgFAHQDApAhA0YwT416ocCGnq+f9xjSjc4QQCOwIEACGo6INjA9qzoJyYwwJhvQ6hhIiMBLBnQAgVIwZn8oYDt7a9PAVjBBVGm0p4i5KJWDKoVA+ABDUhmAC4owQ+Gh7MVjNADJbAC0n5ggBiM0IkrAAHHUupTu4CMASZloQAKwIMWQg8GOpiNC7CUzY929a2/4kc854qCukqActghkPL80QElqBJbY1RhDfyqyg4gjatwlct0hFCWYvljrGUtGwN0MINA/WUARhCQXhPL2c4eyJou8EBEDcAeyfBjshwwANJAQAQmaMCzsI3te+4hAE3/CsElIIhBBUGQsLMewagrWhsUZUvc4j7HVj3ogfaiQAQdFKAHirSO/Vx7Ht2l9lvGza52WXMPEIjNLDyIARMYeh9r7k4Bw3vCA7jD0+26972UCZZf+tLXEjxLCS86LQyMYCbq2DcA74KvgAecTDuOVaalsaYHslmCEhwvCsMlcHY5qZAEh07Cb/GPPxgwAx7wcTp8a64O4AgCQmJYtmUbgAEEYIFtFugeQhgOA2b8DwZc8MRt8Y8SEOYdyXRAURqwgnVwXNx+/IABJ+AAEYgQBSMYsDR4+QcMjqC9fzwgeUSGS3uzbNzTJuAIJbiAAVwgvSdA2QMwKIEGQMBm3m6Z/8twjjNR+lHGbkmFOjGAAQPcUwMPMAEyuEuPnAdNaKrQlAgneAJB3hhHgfSZg3cutKQnPRR/GEB6texHJh8jGLzoAKorEAAINkvpUptaJTSNwgOs4MYEREEBNanBOh8AJyIcYZFvPrWud22egPagBPjlhxdncIPh3kMBFvgHpcCaAAjmmtfQjrZCgsYBthHHA1GYAQcibOKLRqHY2JV2Nfm6yickjKemTdQTCtrjdIsIBaMRtycbZoEAMPcBMUiB9mCNEFl3mEvy1gg/FBCDl7VNjAcJljp39gASEijBwcrt0E7Qr2cHvHnFgxhvQdCgASTEH/Z1QSsvfpEd5fkGHP8oAI0MShBtZekED+VoCkiUHn5YAGZVisEGXGBAkr9SSvuN4EE6EIN/8ADgPq8IPyj1hAvEhtUILwiLEvAPCdTgB/u8rkBo2rPe/cWbgE26DNNn3huo9lSB3joPtS72ivjHCkw4AdSjrvYo/LY0libmUonlAgiHu+3OsyYD/nEbEKPxJQbYXQoi2F0GXAAEyz7BP9xlccC3MTIDUPXc62TpjjISBAlI9D/K1XB4UwoFYbc88yTqedwd23NM+EdsSiDHf9icCRyIWRS2EwOpqn4i/sn8qpdXEH9cgEGfd3V3aNvR4jBXZh38PfvUg8YUWE2rTdrRBdLGA6sJgdTSf0j/8DVPfM75SQFEZEABwN2BFcxAB1+zgACMc4Lohz92fOyj3g5Hd4vqzaL3B3zVQ34s52g8QBaiFmNRsCBQNEkFUEm9hQIxgCsBWIEoNoDDV4C4I4EckAC2RiVEwH4pUAAcJBg7lFGKZoEqyFnjl4H9lx5KoADEwQAFcwK/ZXsBcgIokF42eDsr+IMq1YKbV2H+wA9G+D6eNx3HJ3oDoV4lCIRQiFBvF3eMpDzTVgIJ0Dq440tEIATDIwQwoINROIb4NB18IQQxAwL8ABil0QGQd0A1YCkCgQIMcAOcYoKjw04D4BcaEERHR4aA+ErlQh8nMAMzcAIe4AKgYz+UZ1EK/+ABtGEE5qRNLRQZNgQDMuNn7MRqgTgtL2RRp5N2CTEdoGgQ/rBCK+R/MFR5vwc9N3AcCZAA+uJ3AqEBXxNBqFJ0dlc0RwNx2MEA9rY9/fJ3ncgr/AACYfMPKRBm52YsDaMAJWB9AtCLWzcAJcAD3Wd91scDF4AfgNgwimIFImIFjBItFmUpB0QdcygBNUM/uKRu7MiKxegpgkE+T/EATOYB9udCKKAnHAAzfDJIWycEHqBRAXCQ9zMDKzBy89iQ0ycBF/B4LkFw0rOD/bYCoWcAlFICiNZzuBQidVVX2sEE++iQJhl4ppIeugMD6JVwnbNtppJLCVAChWcjj6UDGP9ykjoZeBWCFwnQkqZ4AXaYRQKhAJhWOSvpLSYojzvZlAw0HR0QTwPAAFEQACloipf2NwSRSSv3OkYZADn0gk45lu1DbyP0AP9wAgKJECDwD3+GdxbggbBmI/zgLO2hgWSZlwATGT8wf15DBAeHEDvih+smAUKwAQx4WJgHM92Bl3r5mPBCKoyDAiBQAgGQAhXFMDSVZIh4ljoAboE1HRxWTOU3hsAyepiCLihASMAiGT+gAWKmAHuIOvfwBApQMk9gYpBJjxJgBGwnQd1VcFTCAAIAM0JwQJEhFjogAIcViK3ZAQawNksWABagHK3ZXS4gQEt2AlC0Ikh2AzdABAH/IAAMuZuQcpN/mHDT8QMogE78oAFPYWbKowB2aGacxJQ+BywZtGQ8wAA8cAREUE+h6R8XtTMxQJzXOJcNswJT1p+iRATtYZ6fsmFrhHSXh3cCkAAxQHMDwQ88oJCKs5RkCCy+xAFb8jhOZ5WtmSrcWQOHs4aKAwIbNCePw3FPKKFpsoaCNimawpxb9wOtNB0spEIc92qDEhll1IX39I3UAz3spARhgU0MYE+TMToBuhYpCUAcZgRcUpc9wCo4ai3TQXAlYABiZpn/0E4m2CkM8D+oQpwRWQL4mBwQN0wY4pjf2AE8sCresWEz4AEPV0NwAgJKIAQWcAG52Ybut0ce/yQAM2AEexemUGJ8nsNc/3ADD5ACN4Y7RrkBAQZjG8Bk2kNCHDpGSmAEOqCUeDqiCZIABtAfCjADAEYrGgADihSq98IEmIk7o1MALoAfAFIATCipTwIyA3CbFiBOA1CqxPJ47gEysGkBJbOa1INLIHABKIWflletYrFtBQECZMFI5yEEhgiYjwdWlUQixpdWN1Y3iPlnxBoqYvkr2vp70JoxEVMs3YpGBAGuOkgrsfqnvXhszIV+g0kWjjc+KQev8dqwr8IyawOeUHEPYgED/DoQQtADYJlw5Io8g5IgBVACpPIEMTBAWWMERiR3Druy8loD15ouCuA9spaq/XEBsv8aYAahAbl3O7lEVufxA8eqAAhjAFHgqSx7tPTYNKejRiVASKPzLBY6h+aEfunhgCvQSOk3k7qJtFyrJiqSaiq7gQFABAr6NLiEhX9oSgHQLZzXOezVtXDrtbSiBDHwaRqQTnVLmixVMoozAH5SAj0CAi7ATnJUNxKTKNF5AzQZt4zrJcCCKg9mgzdQVKbibRtgn8EyJVmDj9LzUV/FFHCiarRXr41buuXRmh/EAKHlAlikMNPRMsD2H9Y4Qi4gAGZiRwPgVPRRAthnur6bI60pWErAhr7IF0izdfzQTRblPkDCTfxAjL8bvdI7vdRbvdZ7vdibvdq7vdzbvd77veD/G77iO77kW77me77om77qu77s277u+77wG7/yO7/0W7/2e7/4m7/6u7/827/++78AHMACPMAEXMAGfMAInMAKvMAM3MAO/MAQHMESPMEUXMEWfMEYnMEavMEc3MEe/MEgHMIiPMIkXMImfMIonMIqvMIs3MIu/MIwHMMyPMOlFgE0fMMFQQE4/MJNEAEf8MMRoAIEsMMtvAAqYMNTMAUfAAA0QMQpPAIsQAILwAL/oAIIgAAfAAEj4MQnjAEOgAMQIAP/EAIVwAIOAAAAMAHQy8UZjA8TsAAkYBD40AUysARMTLpsbMD9MAIEgMZFgBBBUAEvAAEhgMd5LMD+4MUk/wABQzCKXUAAJOAAW3zIGjwBEEACB4ABvwLFJMAC+0DJF4wFf/wC/wAEDdEPIdAAL1ABawzKCRwCByDFEdEPNLAAADAErezKBSwDl+wAE+EPE5ADS0ADhqzL8osFFYDGBDDJEoEPkCwFhWzMCgzLUjwB+OB2WEABmMzM0nzAOLB1JTcCsUwAXXBh3QzAydwR/YABELDKQTBG5+y/UPwCS+AR/iADALAEMlDM8Sy+NCAFJNAANJDLEQHMAAABmtzP+4sES/ACFBACIbEPkNwA3KzQ9UsB9DwB+8DP/cHJLIAFFo2/EGASqOwAL1AE1xzS9MsCIUDQGLHOEJDGLq3S4P87xDfhD0MAAAtAzDStvl7gxaSME/ggyITM0T0Nt+8Mx1yUzThwAGBw1OUr0XHME3t8ADhAASkN1eH7xQvgE6h8yays1eA70v+Q0D1By7YsA14g1uBb0Txh0Aht1GwtqVkNFI/8AhQt13M9v5EBxTjw0XudFf4wAhiAASOw1tGWzDlAFajcACRAAHUd2EcRBENwyS/gABgw01kWAizwAgswAaPCzjIt2VPRD/j8AVAQxA0QzaZ2zwANzlVxz0ugz3pN2hqxDw5wxFMQAUFcAbXdWV2QzlsRyIOMAb9t2xIxHfgwAhVAAhGgxB/A2yyg2QO2xxj92SC9JhL9Agfg1sj/bRPTgQUYUAFVENNH7MPR/QEEQN0CtgWODdtaUdUkQAHl/N3g7Q9YQAMEcMYkMMhm3AK87cMvsM+UBgA5lgSXvN72fRLTMQJDQAGXjAMA0AAEIAMjsNwE0AI//ALWbGoVkGM00NBqvOAi0Q9BkAQTwAJwLOEOUAE0gAU9hgVDQAAuHtmla9ALYNyiSOIY4QUjQANlvAQkIMUsMAEhEATHLaH9MNR4PQJJLtaDLQME0AAAQAIAIAUEMAQjgOTi9uRUjQVRDNg8DnxdgAETAOEv4NkOUAQ0sOVeXlxeTBh77ACP/c5v3s2DDeQHUOUHfQAVgAH7EEUBFwSWPNWDAdMc/87ekm3iIyADFUDlVg4BRT4CXeAFd55dfQwANk0YXoDPt4zYY94fXRACMgDhAPACV17hlH7p7rXIHW4Y/kDUOj7m4R0CKR7TOQAALd7mXB5+Zr1Sdy3JJM7oW1ABDmDLOn0AE2DYiq5ryRzUitHXnU0B2U3aS97oU97QB53lIdAFrD5gUfwPoB1fISAF7rzXkbHct54DVt7iFi7o9wcBOCDQl+EFtZzGoK7QqJPuKH4AcGzlVeDiTr6CFdDVLODdi4HjPC3N6S7lk94PXgAENMAFDiDkRL7s+PDtJzbPn80ZTF7UDM/k0d0EDVABU87nFP7uZNgC8J0Z/ZDNkYzwef982gH+AU0wyH7+4vAOhYaeIiNA55BtzP1QBEf8w7vdBATQ7fNagRi9RCaIAVIw2q7cDwSQ3h+w2wAA0YGIAY4tBWLMGvZuy0Og8RPcDxOAA+jtw8JOhoTe0Czd7ImB47NOySMAAdHtwwDw9WSI0QBQATCeG/3QBVwQ0DK/w7jdAvnsADIA90m3yHq/NEAQ5tWexyGwAA1g2FjA+ElXBUmg+YzR2CSA0ods9i+w6SQM0/h+yBg99id8zzq98Gy8AFoPhZRN1t9B3BCQBGQf6tOk2OHRD/ugzWvvxCTw4WNo5eMOHlV91Z9MxL7cxGT468ov2hXwzje8xwugxS0c9rf/7PkcbPbz7f0ULPe7X8D+oM0TUP7/y+QUPcMvH9OsHYDGHyEvL/yFn8I0kAMOYOPbOgLaDBD/BA4kWNDgQYQJFS5k2NDhQ4gK+4VwQIIAPoH3GMyI2NHjR5AhRY4kWdLkSZQpVa5k2XKgvwo4KvRzWdPmTZw5de5MSKPBiwY8hQ592A8DhBcT/P27948jUahRpU6lWtXq1YX7HLyQ0Y8mVrBhxY5NueAfixBk1Zr0J2PJgq75LDxdW9fuXbx59RYMsQDCCK97BQ8mXNhwTn8TAEAI4W/uYciRJU+OPOEFBX9fKW/m3Nnz2C4ESBwIYaBHASOfVa9m3ZokCxITXM+m/13bdsN+I2CzYHDaw23gwYUfBoBh+HHkyQlP/CliRoHfyqVPp77TQXXs2bXvNAqBiokCX35sJ1/e/Hn06dWHTIwjgwkPStbPpy/cAYD6+fVr31clSwYnxttvQAIlCwECCApUcEHa/Lkggwx8SItBCiscq58hACDAQg47NMxBE2xQ4SIPSzRRJ3woAEC2E1t0ESsHezAhgxwmCOJFHHMMaQQIFjBORyCDvOmC03bwAYKuhFRyyYFoAMABLJiUcsqP+lGgyCv8wkAzKrv0EKYXKvByTDIPurKADTCoQIsGRijzTQrxg3NOMjf4AQsWtGCBBj5HWIpOQAMVdNDjrvxnA/8lJnIABxxaaAECGwmVdLqvWJj00hxhaiICTj8AIElMQ70tCEsn4FJUVClMUYUIPpii1QP+THVWz7w60Edac1WwCwc6/YBTCG7UddjIvJLhSTeJVXY+fFjwlVMHZF122ry8KiJMaanVtjoJ/glDhhc45TS2U7c1N6x+sLiPhnLPdRe4ALrtBx/FVFAhhwq6eHdfrIxaLFl+Aw6unyBCkEGGEIJoV2CGdyKBBX0blpg2rwKb+OKdXvhnYYw79vhjkEMWeWSSSzb5ZJRTVnllllt2+WWYY5Z5ZpprtvlmnHOGjIJ/WNT5Z6CD/odHxoQ2+miawT0gYqSbdjplAmTK9mlPqqvGOIif2LV6a64bDmExICruemyyt8WBhaUsLntttlFdity245Yb0x5D4HhuvPOm8gVL9fb7byorIGEmwAs3HEgAtD58ccY7XECKxtUKCAAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQQA/wAs2gBkAOYAcQGHqqqsRToMrZYuXl5csrKxXU4Tp40pcV8X6cM9urq8m5ucfmsc8NRwb29valgVeWQcIiIkKSAH8NhEODg40tLUampsrpErWFhY4uLkjngiZGRktJgs2NjZPj49UkQQzc3M5ubkyKg0l34j1rU5MjI0vr689PT04b8+zq80fHx8oqKkpqakhG4bTk5MgoKE3t7cEhIUwsLEoIUmxsbE7s1BOC4MREREyrBEHx0R/f384bo7985EPzEMwKEwkJCPLi4s4ty0VUoVioqMUlJUpp5kmYMkSkIMFA8E1sJ0hoaEJiYkKiosGhocwaYzGhYEdnZ0l5aUSkpM9NBQLSgHFhYU9uasCgYErpZEHxYE0q42u5ou+u6shnIc6ursup4tzsa08u7k+vbc7u7sfn6MBgIEDgoFBgoEDg4UgJJc2OowiJKwEjBEWHIYbHYYsq7IOCxoMCZE7Oz4pm4sGgYgCAgwEhBgVFhsZHZopuiwgjwoBAQYUmZkxHwsuLQgCg4IiFIYpoZIcHZYsnx0cFjQynzIdJKYXEIUEhoc4PjUwLKcyp601tz0cFJU8OoUoJ7A0sg0ypowgGyIspyssqiwoIoMKjo4LlBM6HxI5ujQVjYU2p5AbuKAXlAoQkgw3Kh4yjKAaDwUJMCAQlBksq6UJiDACBggEmA46uT0prLobqLgxNjECjAc0rK0psSYYnZAbHaQ3Mj0ouQwjnKI9MjUalIohlZobqIkcDx46trYJmCIjGIYHDgcxM7wGhY0hnJk8rZAEjBo8sIQWqKEQlhA4Oj4WF4YPEhIwLLQRCIodlIYuqAM2MgQoqAMNjooYlqMAgYIyqAQvpwgiJyIOAxAMDoQMB4oxt6E5tr03Mi0Xjw8hnJEorowVnZ47qjIOHA4WmZAJmDQgLSEjIp42MhUyqjs1sLUHCgM7MIo2qgQcBiAxPDUwOow1uDYvsgw5uz04sjU2rJU8qIw6vSEingMcGRA1urkWlQ8Dg4MCgoMDg4EBgYMBgYECgoEAgIMAgIEAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0ok6K9ixX348O2byLGjx48gQ4oc6dDiPRsNhAhJcQEGyZcwY8qcSfOfRQhCKAjEAOJDi5pAgwodOrMiEwVdCGiwYWNAChJEo0qdShWhP3wNxPi4V7Wr169C/S2JQQHCVY0VwapdyzbivigmSjCJksRHChsw/LXdy7fvvgs5Xqh48YIDCDEKzPZdzLjrPg05cnwYQAVGiw9dhOBrzLlz0Mc5QFTY6O8vBg4TPKteTXLfgBwclgz0R4UAhgusc+ueuG9IDgpMZsMgAGLA7uPIFfqbwAH1bBwlMAxJTr26QBg+Mm/ed68BiBmyrYv/112xAwUQQoa0SMHhxejx8FdfbFECBAYMXT40cBm/P2eLYmmQggsNTLCRfwgmqOCCDDbo4IMQRijhhBRWaOGFGGaoIVEAVoTPPfjopRyA3GlE0T4Y3QPiPmltuGCH/iihQAIuUCHiQR3eUwEBCqQmkD8wRJGCAgDwqAETN7qIYIfYmSDZEkkWROIQHIRmnED7VHDeBzF8AEIXKkClZIIkXjADAXFBMGJFPwBAAQW3DYTPAElcQAIOSgyBpgIHjtkfgCQQoMIFHMSgZkIWwSDEDEPG+SOIJMLFwaF+xmcRFVDMYMMSHMxAaUL4aPCBC0r44ChFAHLqXKWWXqUBByns/wPBC54utFwCBOBwDxSn/tihDfnhwGqrE8RAAJJL0KqYVVQoMEML/tyjQK8wUuGDCXwOG1+zH9jAprJREoQVBSnYBMO0uM0GYHcgxCCmtuJhxUEDPyrBQQk4hPtjBzOogGS0vP7kq0kVYPDBEPrCu9tyH3zQARVUMNFCp3j1SZC0IDQAMRMTrJAxFSGu28ALFCAMoMLJvQWCCSy3HJkJHKRb0HAtuxwZeveY1AAGJcOIMnL+QOACFAoUrQABOYhBQAo/HIRPBUYXvcILJszgQwssRttAoVHAmPDPqmWUEUYddGqgiPvIpldpII6thAogaGDiP911ijWKKLYINnleJ/87Q74DdfACAAfCKK10aT2GgQlKtXDBABpcIOzex3nNXAzhCdRCFwlsZlOHR1FwQVr3+PAlCF92obroX1PemNf42NCB5wLdc4G3A1u0DwlD2KjXPj9cIPzjA0A+xOSuJ698pa0v7/zz0Ecv/fTUVy9R8zhaz5fXNuEDQ2Uhqut1aR9GnHWH/3x4j/naszX+PhNA8UGnUNhQ+Pg4pKDCDCC8oAGM8EsCATCzqvZ95X2XEUO/StCFF4zucwC8gH0W14UGVEsIEzTBC2xgQLDgjwBdqNE+qNAAE8QgOOPzBxPs9AOkWLBDu2vBEkhQghf4qINVQR+WJFgCJQykNsVJ4Y//0peECuqNSQQoIA6nosP0CUEMLrhRlkywgpCNr3bZeeH4hmPDJebQIhfz2P9mYwPJ5CyFeimdEa8IgwR00YtM9NlwbnMjf/zgN3lBY7SyKMQ2vhGOHJKVEpSwBCVwZY4II4gSfuO7FhmOjyn04w0BGZYf7C8GxhrdHB/4oztSgD9SWhck18YkN06SkkCxowJKkIASrGAI3IFbBepYxg9wBUcmgaT4KiJJVHLoZALBxxOjOJDHUJF2oayIGl+IKl4mQIm+rInP/vEXzh2KNsTRAGk+l7urYHCWdQTQcKAZzaKMDwLEeQKIqFCBqqEwWj9YwvnOggModEGdaAFQRpTg/8YojA175fTI+4ZAgS6UAAoJEIPoPveXTv3gUkJQAQBe8BsVKKAB/xKLCiQqhrgoQAUDsFhAYfI+GyigOR9Qgf1+tI/LJABKFVnCDO5D09OoAKb+sEFNacqBJNxypDLhXrQgoAQInNFXG8mX7pYQz0EugalMOB/wmurUH9gIqFjNqla3ytWuevWrYJUPGCEITIOgb5pmbWJYBeq1fcBACT8gAQmUQAWRtpUKSpDrEuqapKHOkARLiCpA12qV8ZHgpF0QgxgwQIAL/NRr93gCAV6gWDGUoAJXtQkThOAlxYKgBBgdLGGTCUMbrIAABErBCvATK7ICaAKFUUEK9IcBrf/wJ0YqIIBdUgCFgqqACqOdSArRgqUhmAA4rv0RDCR3oL+8AAMC6x6LfkSCGPQquNcba0GYQAEO+LCsNsnaQO6xggoqRFrmxS5HgFkaFMWuCzMArh5vhF56oQpvSpiszNQbkZNFywYVaIAPumtB16LVHyT4wAumQ5B9dGAADUhCDEDgA1Dy11ZXFAtSVgaCFDxWiDbBFLaAezEfcCCxXVCADy/MkGh1IAo2gPG/umcDDWhAgCvA3XyB5IIuxOCU1JzAACqQAgI0FpksxtEEbHOfGWgzXH+hQAy+y15xpqBduEPUkh0o2uD6AwcRdoELnjABvYkLgw2YZzNhkAKDZln/IVNUAJKTvCZ9YUUMSVCzutiMZYD+BQQEIDGd66w70AFANFIlKwyeAIIEiMnMpWFSErA150HvUncdGIISmBCxCQihalSGAG6ydo82x0xF6wPZ2kT9A05TYQlbi5mlEfXILlAAVyXggAIfeJUnmCABSp1AZDCwAgWsoEgruMBGXNM/Y+X6uA346axJi+kkAGAGH5jBChqgBCleIAFCyOMPutuw7qK0AZv5smpL0DDUGmjatBaqWsN7snleZLqXNjO8952cLlOkIf7mt8AHTvCCG/zgCE+4whfO8IY7/OEQj7jEJ07xilv84hjPuMY3zvGOe/zjIA+5yEdO8pKb/OQo/0+5ylfO8pa7/OUwj7nMZ07zmtv85jjPuc53zvOe+/znQA+60IdO9KIb/ehIT7rSl870pjv96VCPutSnTvWqW/3qWM+61rfO9a57/etgD7vYx072spv97GhPu9rXzva2u/3tcI+73OdO97rb/e54z7ve9873vvv974APvOAHT/jCG/7wiE+84hfP+MY7/vGQj7zkJ0/5ylv+8pjPvOY3z/nOe/7zoA99hu4dcI8jwABry0cAClCAKYhU5SM4gE2OcIAR7IAGAvDA60+Oggc4gZoFyAINEEB8AfAA5SIywAgWcITZG4D4OtABAk7wgJUHYAMBIAM/nOCBEEw/+sRfgP/KyRCAHjzAAxnoAQpGQPz2Z6EAJy9NGXDAgh0gIAQ9yADrLYCA4Y+ABb8ncv5ABlZwBDXwAAagBd7nBTxwBBtBBjUgAj2gBQ8AOBfXAwIRgCy1fQHAAgbQAz0gAwfgABuQAX1VBkfggBUHf//QAyxwfLOxD2WABQWQflkQAjLgAFNQBv5wBCIgAqWHcBEgECFgAA4QAVbAUmVQAw5QBD0QAlqQAR4QAUdABgORDxlgALsncQGwABbQA0VQAEpFBvtwBDxwABmwAT0gAFwQBFOQD+FSBiwgAPlwcR7Qgk0ghUcwgGUYAA8gA09oASwwhfzAEF5QcRFQABK4AQ/AA1b/QIBYEAAHAIhFeH44YIX/xnE1cAAfKANiuA/8gAMesAAf6AUi4AABsIcjdwQduAFeIIU4kA8R4AEssAH4d4o1UAYktw9OEAAZ0AT55wE40Iss4AUogINHiIkjZwU4UAAfWIJBUAOK+IQl6AFOEIQP1wFMUAb5MAUHsAEo8IoOMIlPKAALEABw+HGGYQIEYQJfQAQCEAIjEIIikIAbUAQHUAP8gI0Mtw840AJCEB1gEAZbwAAn0H46MAI34Ik4gG8l5w/5EARXcJDthwBSwABVAARQEAVncHJlWAMs0APSl5A3gARAAAKRkQMmYDDb9i4dVxqheADxeJAjkIM8EAFK/xAFY2AbD1mGBVAEG3ACEjACYVgDVdhgwfFxpLQPqkeKI0ADJ+CJDciPEncRZvgAIhACNIB7qKiBJmeVPOAAGRACEkADTcAFR/iVSOUE0igCIyAFNDACBvAANUCVETeAoRgADqB8O2B/+ceCAngiZRABXWgBT9mXOrABL5iEX1mGs5iG8jgCB5kFBnAAEWCXCBclpSGLBSADTYB/BlAEFhACOBgEzQdyjkSABvgAGzACKKCYBdCEpJkBPLCFHAeT3McFaqgFRYCKZ2gAG1CZEbCPAiiDiZiGPVCCrYcDfigDG+CJSFic3egAMhAC4SiFTsAPqscFXpB/ASCAZMiKf//4hAZwjliAIkfgAU5IgUaJmvvAjEbwh905lwHQkP7ADxGwlz1ghDXAmC8JirNIiiHQBPSJBSJSBjywALboiUopfzUQBKQIjCKYigTBfekXjKeJcQHgAakYntKYAV+onLlIEFaQiEUgENXXcScQl1I4jSGgnK5XEEv4jSNHfMOHAK+Jnc1jACMXfZMZBGZwEPuABSqnAwJxf0NoEPnZcsRnAF4pcwigBXdITTnnAKJ3pViapVq6pWbHozhnATiXhrbpcmJ6c1zQA2PacgvQA4V4cz2QoVwap3I6p3Rap3Z6p0K3AGm6ck7gBSywpyp3BCXonzRnASJwc0dgADKAp4wi2qiO+qiQGqmSOqmUClYjUACY2XERMAIOoIwzt6lWWnMBAQAh+QQFAwD/ACzyAlkANQNLAYcSEhSukSv97J6oqKfHvonpwz3GxsSKiozU1NS0tLSnjSzi4uSAaxvm5uQkHATXtjxRRhApIge1mC1xZzlxXhgyMjRqamzCwsRmVhRmZmVNPw788cPty0uenpx5Zx722mSGhoR2dnTGrkja2tzOzsxaSxNFOgwkJCSYmJi8vLwuLize3ttISEjs7Ozav1qVfCP8/PiurqxYWFhwcHCioqT29vUaFQT24Hw6OjwWFhQwJwrj1ImOjox8fHwqKizKysyalGzEpjOiiCRANAyOfjw2NjSEch+CgoTyykRSUlSMdiEWDgRiYmTivDyagiO+niySkpROTkxeXly6qmw+Pj2yoFgaGhw5Lw02KgwKBgSfgiKspoSuliv04pAeHhxCQkRGQizy8vLKyKw5Myd6YhzMtmAODgve1rC+uqmejkTq0nRCRky3oTxKRjRWQgxiThFeUhQSDgSWfhT08NhCQkwOCgRyclxqbnQGAgR05KggMkRgZhgEBBh6mqDS1sAyJDDOfMjI9NgCBgiQmri+tLwgGDTM4sDc4rBOKBDowhByWggKMii07DRmWChIYEy2oujeyNCspAz87ujKuMRygHyywLS+uNB4YNQmMCj42uRsfkDq1OQGGBSKRBgKCDA+ZogKDggyVkyGuoyqosRgehgWEGAEDBAKGDTooDSWfJDmwCh4gJzOMoDoorTauLyyrDBAEEDE1PCyqpTGzDR0qOTyvtCKdpCqxHw+eDjW3vB4HICkqJAaKiiUkoBeXHQWMmiQWhggPBy+3sCQpJDg9Nh0fhh8cFzmsjhuWmjI1swqZtCsxjR4gFzOuOx0xCiynKC+yqzK9Ix+gHAUZlDofEh8ZpheqIyqupyipDBeSigODBi2fHSqxOzsvISUahjayvBkZpRAMmyq7LR8Xmw+KDBggHw4KEyyusyQYoRccmhwRBiqzrR4Rmzy2rTGqhBMTmTq4tCkusAqIsC+zsyqcCyQfGyqiAzEfCwmxIAKCgQCAgQGBgQKCgwGBgwCAgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDihxJsqTJkPpS6gPgAwcOFf/2nZxJs6bNmzhz6tzJs6fPn0CDCl2Ykt+XIzFSpEgABebQp1CjSp1KtarVq1izah2oj5+MBD8SdOiQIMYXfVvTql3Ltq3bt3DjttXH4sIFCyq8WPFRBIDcv4ADCx5MuLBhqyhISOEnEO3hx5AjS55MufLfLz9onABQ5EsFAI4tix5NurTp06gZ6mPSIEYGHikMMGXBOLXt27hz694dVV+IGgsuDABxZEBwFjJ5K1/OvLnz5wn19ajRokORfftUoFigGbr37+DDi/8n7LuGgSjJ9X25gIDF+OcqzUQBQYMGFCZe3uvfz7+nvhktxHACV14M0IAM/SmXUgVQkIDADz8gYEASoSVo4YUYbiTDCDH4wJUVBiKY4W36nEADAlAk4VIUMgw44oswxqiQCgkgIEN665HwhYynmRECAgd4kRx2FfJo5JH9ddXDAgO4lwMO26GQH5KW6VPBUj7sw08O/BRJ5ZdgQpeSdgik8A8KKYyQwBfJhRmZPkk0MAAVTKAwAApSnOClm3z2iVpK+/gQQgx2pXAAFbX5edg+FsCAAA1KpYDACij4sKeimGYKWXwqUEFFBTm0qelg+8wAQxgpMKGCDzJc0MIBfo3/KuustNYaUakwjGABY13JgMAFVNgq7LDEzsooDCmo4FiJMYwgYrHQRistj/rIEEYCLv6jjxc0rCDFtOCGK65++lDxwwU4LOtDjc+O6+678OaWAxQNHGCGtmbMsEAKFcTr778AU5bSFwZQyoQUPCCga6IBN+zww24VFcUACq+wQgoZgAbxxhvv4wUOLCSRBAs+MHyQPmao8EUUI+NgxT4pNdaVCu6xoEKXHFOk0j4nRCEFE0lUgHPORP+rjw8HxGbA0jHMoGdCXg1wgQE/GJACCrQt68UMYJFgQAwWWHFp0dHtHBPMZKcNrz4qgIACCD30AAV7IFiRkBlSoABF3MWV/4mcQAD0MAIJKPQAGwkzmDG22ow37nBXet27ZRLsuYfQSl7kYAY/ZpwAxat+0WUAAhnk8E8OSSz9t+Ost/54hfrk0MGBZaukEmsd2G1GDw1IORA/IDRwRKyuF2/8u/pgp+WORO2MXQ7Bw/oPAJ8fobhKFrQwwJTHd+89tPzQacERCZDQg+knq+SFDBnMgEJYbP5jBggtQJHDziG0kICH3/fv/6xeOMLoGlAdS10OUCyo0Qha8IPFoGUfvjIAcpJXhAEgq1//y6AG+RQ+KWQAKQOQwb3Sl5ITJIEJ7kvAESxVQhQ0IAEzYB8KEHBBDdoOLYDSkqhImDwt8YNI2rphUf9+iLYNGrFKKTFDAiW4JyHGJIAkOML9YgIlBCxgBD9AAQ0WkACn9E+IXflCD+yUgBCMkIRGOQINYhCDARwgCtfbWRF6sMYBQEEGYjuiHjdlu33wzno8rBAVrJauxnghChaYgRR8wIQRDCBb3gMj22ZHQBgMAH0GsZ0VemAXNkoqil5QiVfYsxRJoQiSe0wlYITIqN4RryA3HMiVfsCCS/mxXpiMJBhRJwUxyimXBOnjCXDggxN44QQySJMUVEIjBIRABScwYQoWYIEdqvKaEbvhvFpwBJNxxXaN8VUKioBD2OEALOjxXyydR5cVXDKQKuHK7loAguSwgItP+wc/evD/Km9i859a2QcOpGCzE6jsCIOr5clywCJi8kUKCVjAAe6XPJpVwKAyoFgPNPa9WMpMJfd8JywlKTMzHKEFPVDJF9SULZM2IATWBKhMrcIoCJUFLCNIleJO5oMOkOAfCUjAJ6FQAZmgbAZKiQFYDHAEPS3OcR4NIkiZBExJJtEHFfhCCCBUyxIaBwosqAAVkJqAQj51pmgFCtvGOIC2dqAHiIrZQcwgAx7QoK0D4IEMhNSYfXzhAANoIw+SQFF1gvOjKQlpVSXpla6t4AIy6BKgcECDETyIBDm9UVo3W5XkmSEHxzyBFeIYnX0AILSZK2JfczCgE4TqrKzbB2i9oBec/9lOscG06j++AIIODEA2kbXdCXggGzsZ4AIzACZnl8tcuFgJCkEVSxQkO1WRSjWeNzQtazOagHTqwwpJm4EPQBuF8jEhps1Nr3qvYiU0KYUGSaBuYqn6TavekB9MeFR+ILgCATmGHxZwJyrXS+ACR8Vj0YxmHKuLSfuCsQgpQJf8+MmDodGFBBI2sIY3HJSo3jCkxHOwEFlAAn7J7wg16MBOY5KEnGKQwzCOMU6sqrwkLCAGQoJZfEQbn/Eqjx8VcCEUQLMPKTTgPIoLFApagALlyvjJUAaJJPfBgiP0IAY1GAEUjuA0tHjFUBSlQhtBEALDpeBiXT2agQwAhRCQb/8BP6BQlOecETCqJmblhF05rwtbqAoRAPRrQRhgAIMWtMAAmvVRgJzaUxIsoAENWIABQIAD6mbnCD+AdANGQAPa0PnTOrNdDmQQAjgqRKAziJuqe+DpmKjAAv+IW6zLHIIv+PN4rDwBC77A614XYYrfpYIKdLwPK3SK11TwAWj+7AMqfOFTLwO1tCFyQzNYgIYx8OJBAMCDMMCZBOBGwPAEso8oxAAB4CZBhMIQBukBVK655cq05z2SPrLg3PpTVkIAgIIaQOELLAh4EoaNQyusLOC7BsECbIReejv84blNiQ+gcCcS7O9SK1nyMu3M55hkgEkDhrjIRZ7EHiTAAq3/6iLG+d2CDHBOvsu6oRW2M4Nbj/zm895HEhLAAy9Q4QIqvxy/azCAHpA5CSfQcZ5BWqZC4vzpD1fBGqlQLtno+2SAHoFsLgDuTuuYz35swfT6DPWyazhwF3B51YN+MqMwgQXOtkBEY5CupVspTTeCt9n3LmMIpqBuKfn5/hTSFUvjIAYo3ekNs/fIeC63KwatgLJDg7JOucQlRSgCDpLO94f74Ldw5FwSfpACW6NXkgCiQSgLwvIQUHe5+ygC+SJ8NVPHhAo0UDeEdk+CiXae3vqIwqTyygMUJKAFN+7B1eFp5MYHkwXnorrj07oPMecUBcZ/0K60dYIZHAAExFHj/wJedcbfg7pcNLCL+hFAHRLQwOmqST33BMKPI4RhyIdNK6DP5CHWhqA1A+JZAGAGBGgGSSAhZ2F+85Yyl+cSM/AgqpIoidJDBfF53HQv8bQuC7Bx0zdT29IBC8AEXEEFZeIUscQo1fFKCvhpYDRIFydLRxBZMVEBFmABcPcFTGAgpZdnrdR4+eeBObBkPZAeLSYgfOZVK5ABZLeCBgZG0KdyjpEBLaB6MZFAK+A1P7ACCDAA0/VRVkADNQBTP+iBEOQgMZgEIZAAkKV0NyQDDZAsS8iEBAZG3Vc6clUEByCDKxEFIQACPMADPdAibaISAMAER1ABY+iBXZEEKdAClv/VAAiQOJIEAAfQT3onh1EGRvwgWQOxD+WXPJtIgD9UJKIUE5e4WSuBVAMQAu7zNRMkREXAdVHQgZhYizKWLwbAAyXDD1vjSCzUR4xHUXFoi8SIVmxTIxTCTDUiBWy4LVs0A2xYjNLYhCRmAHWXEjO3ADXXRzJwMYiYiNMYjqg4SAgQBVyxLiOQAWxIiZZIi+L4jml1AgMQBjTwawBwAieFLnmGA5Iyi+AIjwCJTV7xAy1wATRAFleUOCXFTzEgNv8YkMyVQ6N4VoW3iaGoWjlEbtGIikYBGwggIR0gAxylLTlwAAhgAVEFkevFM6TmhzMQSovjV0fwhzTJA0yAPhT/NANHcAD/AAIWUFSn+G7Jc48VIHnRVhAeowLLFpQq+XheAAIIUEkGIH1jY1KOGF1C1QNTshImiWEp4CAJIAUr1pRkqTbfZQEH4H2PdY1Qwzs8UAEqEJcVICQ4xA+IxAI4UARVpnWWU5Z+STae2CX8eAHkxJRKsgJMYHeUJ1ex40I9UH5/GZk5gwN2wZZttyQ9YAWp9Y8o8zmPKZmguTEpQZn6aJj7dGj1UTgsQFqN0TknUAEZUDWrE5q0CTCjWZnu2BgAphSBZQALgDjL1okycFeS8gMWAJm1mZzucpulOTaBElbRxFuWlXfCOQBqiABm0XDKuZ3QwpzXiHFEohIl/8lkedQY99gpMxADHYAD2vl4ycMqe0WKgXJCUhAFN8OdqUSa31k7ttONLxhxJrUCvteELYRFMsCY35UBMeAgD7Kew4ifjqOfD3mEwvcPy8dnRRhyEZkSADADJAAcibksVgACFpeWIXAAR1AEEKpH/DhOEwpLrGGEEZcS3Sij67UzSRAD6ReCeMYPD7ieAMA5AMAlD7qiObMz+kCZLvp1mWRaZpAc/FBB9BQ6/BCkxHYCS2Y/BaYSFTAAHRAFUBCCXKECEfadL2qkjSNxUiAFB7ACCwACP5NPsOQDPXAAsCYFJsdFVCAT1XcER5ABMiADMzAAOTWb6pUS0JMCeHQAPP/aGEmwAprBKm9Hl0WKphCTPBlAAivQAITWAFrIjCdDJv/wkViEArZGbrjnIAyaRa02h34HAjnAbY2qTzPwhmgyAlqYABkwlpZqPBXVPjMQrImUARdKEPygAkngQRnQS0epLcfqMxmwrF/AV01YAW2EiPPCo2hhUjXQABdwBCzCAwbanr2Ka5U6UjJWkooKM9zmLTFDibmSXCXEAw1AheV6r+ACQd+qMdzmLPFxBDBwcQhUNcyDrwZbLDnQbe7XAXYyAuaBApFVKkQnjEeDjAd7scOSAz2gMCNgWQjQAoVmPl0SBa0BkwvylX2JsSo7KjzDAlGAcMnEZCzwNGRKAkn/YFQA9kLatrI8qyg9RCQzh5hF5CP7IgUqAJsXoI0217NMSyUytx1KuCwncAAOMjWWBQJy2rRaCybVlgEoQHXfZAV11VZ6VZ5be7ZcGx/NKlWypRcAoFpoG7dyO7d0W7d2e7d4m7d6u7d827d++7eAG7iCO7iEW7iGe7iIm7iKu7iM27iO+7iQG7mSO7mUW7mWe7mYm7mau7mc27me+7mgG7qiO7qkW7qme7qom7qqu7qs27qu+7qwG7uyO7u0W7u2e7u4m7u6u7u827u++7vAG7zCO7zEW7zGe7zIm7zKu7zM27zO+7zQG73SO73UW73We73Ym73au73c273e+73g/xu+4ju+5Fu+5nu+6Ju+6ru+7Nu+7vu+8Bu/8ju/9Fu/9nu/+Ju/+ru//Nu//huZEsAANvC/hcsAT4AB+UDAg+sASiABJaDAg4sFWiAEEBy4aKEBClDBg/vAGtzBHvzBIBzCz0vBIgwZBfAAHBxlEVDChtEEBVAAFECuBOYBA8zCbREHK3wQLtwEGIAHUDbAEoABNrwWWFACDKAFAqEEAhEE//DCBRAEGjBnEdDAKTzEWiEBCsAAA5EPNoABEtAETfAAQYABdfBpAWDFWYHFDPDAcVAQcTAEJVACGPACAYABcXCuM2UCGYzGT1HEBqEBDpAFqqEDLxDENoDHfEyMQ//wD0qgABJAwvLmEHigA0oQABRwyIkcmREQxS/wDwHgBFqMBRahDzrAAJaMyZmskkXMAEIgAf/QyRqQwxihDw7AAFzgAQ6AyAAFyzKcyhdRAkbgyArwAhQwBDZQB3B7EbRsy7isy9h0Bf+gAIvsyyIRAAqgBBgAzSJByx7ABQwQAc6MTQHwArJMzRsRABT8wDVMEvpgAxQgAUYQAT4cZVpszhrxAmQQxTbRzhQQAEagA71szzdXB+EsEe2MAdeMBQGtXoIs0AeRBTZgAhTQG3GA0C9wBQvNXApgAg5tEBDgAVogAa5M0RjABU4wBBm9WXrsBNrc0f9AwRJAzLE8FXX/UAJcIAQoDWUBoAQO4NIDYcwpnRP5UAIK4AQmENRo5cUC7NNbkQ8aIARaoAENzWHtzABBXMZMnRVZ8NQKINVQ1tVIndU6sdVCEAAlkMAxdgVOIM1inRX7YAJaoAAlgNUwhsFaoANtjRX7MARrPdcFfUQlIAFKgMp5TRX7cAUvINd3LGMSQAFtXNhVcdiNjAFL8NdH5MCQTVOUzAUYQNgcJgRHndlUMckNfMmWLdq2qA9T3NiH3A+1itrbSctGwNqMsgCwHdu1bMk6oC+3HdvuvNNA0AA1gAK9nZz8zAUusAEw0AHzV9ySqQ9LgAEicAMbwNww9gI6cNrOrRE2QAQu/3ADQNDcBbbTPb3dQtEPFrADN8AGob1hXkzD5h0U+5ABKyAALvACOb1hVi3E8Q0UHwcDO8AGL9DeBkbI/R0UTLAAMIAGE/ACoB3WBy6HMEADKmACQiAEXh3hZTnhXkDWGD7VGq6SjxQTJuAEQgABaB3iZbnXWmDWBM1hUxAG/0ADdqPiObHXrTzXHJbcllTjNo4Th03Hfm1gQ+DKGzAAPv7jN4EHWEDHlK3d3WMGHvAPN7AFSa7kNrEPlGzIUO49H4DlPEHarN3l3nPWYJ4TyywBNEzmxxMAGgDhZ67MtdzYuWxgAj7NbB7nmeQAHqAAzVxgE+AE2J3ner7n/cwAOv9gAxEQAZXdXCegAVxgBOtc6DPBz5/cyH4O0OolBBQgypROE+2sBA/gwhzwAEoAzs1Vy08w0p8+E/nAADv8wg+AASC+WftQAkHwwq0+E2bwAhxQAGBcABxgBMi5WQ8A7Lt+EmagBC8c7AXAAHS9XB5QAANB7ck+EvsAAU/gxAXwBG+eXhSwwy587fVW0RIA7A682BF5BQHA7eRe7kPwBiUwBC/eXChDAcf+AHL97nKYBW8QwFdQ3vy+gmZABgoAzYQ+8DfqAC8w2ArPhPowBApAAQn/8I7+D1kAB25u8SsYBwygAHjN8ebH8EaggiIPdVmCwRhQ8SefVvuTDyXN0S3/b3b7YwNKIATlPPM4tz8OIATErvNltz8Sv/JAD3UJMAYILfNFv/NtwABOIPBL/3DMkwIN7gEpHvUiRwJE8AIlwPJYf00rUAVy0NJfT2/B8g8bUAZLXfYOR4IwsAEu8Aa1zvag5vYCIAJD4PV0v0FufwMKkPN7/2k4gAAbwAGDzZSBL2ODvwGzfvWJT2c4QAJ3bwJ6//j+gwNicPdXUPmW/z04QAA3IAJjwPmd3z2ffwMEALalP2djUAY3IAZnv/pPhhZXIAJd8AN0IPtzVvsCgPu6D2X6oAEPsAE/sAa//2R1QAHJXfzHL2MMvwMwQEvNn9ZBAP3TH2N4UAIPsAM1/0ACKXv9BObxD3AGNQD+HBYBWiACH2r+GhbxQpAGBgAD7G9g+4ABQjABKSD/809gNv8CYAAQCWD8I1jQ4EGECRUuZNjQ4UOIESVOpFjR4kWMGTVu5NjR40eQIUWOJFnS5EmUKUnq06GAgo8YA1XOpFnT5k2cOXXu5NnT50+gQX/q0yChBEyhSZUuZdrU6VOoUaVOpbowH4UAOpBW5drV61ewYcWOJVvWxgsldSqkKNvW7Vu4ceXOpTtV3xUhFMzcWVDX71/AgQUPJvx2n5sAHmb8KNzY8WPIkSVPrliHgogtI2RS5tzZ82fQoaGeVTNnYALRqVWvZt3adcR9EER02f/82vZt3Ll1++XHogqHDS1QFNld3Phx5Ml/mpGCpsuNMwd8KKde3fp17BRnIJhzw4UdK/qyjydf3rzuFTA2uPAA4Px7+PHlR4bRRcIQ8fP17+ffH2wLNETQwgH/CjTwQAR5WgCFCRRgIIsEI5RwQgo1WqCHE0oIAIP8KvTwQxA9DMEKMyjgwoQQU1Rxxf70OcsJAlmUcUYarbsrr31q1HFHHl/LogQuSuhxSCKLnCwODxQYwkgmm3SSLgdeeMGGJ6u08kqu9BlCAQxyxPJLMMP8KR8MAtCgQzHTVHPNkmxQQgEd2JRzTjot0ke8CLRQgso6+/Tzz4Pw0IBLLwE19ND/NS0LwAQ0EXX00SfdFCICSCu11MiWGIjjUk47nXGfQUto1FNSS40QSSVNVXXVA6N8IUZWY5UVvisU8KCOWXPVFbt9gDxzV2CDNc4GBoSIU1hkk3UtAgWmVPZZaD/TUgIK8on2Wmwf2wcDCRjN9ltw/3JTAVjDNffcsphVYlN023W3Ky03LPRdeutlqlchrhjVXn77xWkfB0pQYFJ/Cza4pn2uUCKIJoLAoI59D5Z4Yo30ieCFBwrQ2ChrKfb444t6zbiAJjQWgk+QU1a5oW01bqJkJCQod2WaaxbZZY2dQLlmnlXWJ8omkOCggCdKgLBnpFPGQ4eFH1CgBIiTlvpjEzxs0EEHB7KIeGqu+72za7CRDQgAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFBAABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAP8ALPAAZADQAH8Bh0xADURERKuOLD4xDG5ubJV8I5SUlKqqrGZWFSIiJLa2tLCwsdy5PF5eXOrTd62TLikgBqSkpNra3GlpaYhxHuLi5EpKTG5eFF1PFGJiZI14HsuoNHx8fDEoC9CvNtjKjM7Oy31oHN7e3MfHx+LcwLq6vEM4DLydL3RhHOK+PObm5LOVLMSlMoB2TOjCPr6+vISEhDIyM/T09HZ2dIFvHP39/LSaLKWLKD4+PFRVVB4eHJqanC4uLFVGE9ayOp+FJoyMjPjegCoqLP7wtHJydJmCJL6iL05OTO/GPzc3NyAZBMLCxBQQBBoaHBISFFtKEyYmJAoGBJ6enGZeNNbW1BYWFNLS1PLSVBoWBFpaXNa+XPbORG5aFOrq7OrizP765ComDO7u7PLORBoSBEI+LA4KBAYCBAIGBAgIMMq0VGhAKBIwRKKCSKDkMM60KGqgJNLU4NLY9KJqLDYMQKieDISIcKKw6PLAEFxAcGwagOTq9KC6MBQcFM6eVOZ4SISYhNjqMHZ4YJyewFRaPAQMEOrW2Pja0Ojg+MDErMB4LOzs+HyQsGrggDYmRPLE0Hh6HKyylBoWNFRagFBUaNLo5HyIjHCQhGxURMh4yNLINIBASAQEGBoGIAgYIAwMGKikLC4eKFg6GNycQOTW9KLEmKKkdBJgOLi0QDYsaJieIIRULMKmEBIQYFaghAowHJyIDBw4HMTI2Mak7MLcgOykyNqkeMS0NBIwaPDAgCRgiGxayD5QZCw6EIiImN7EzN740D5IMCQgwOzm9FhGKMgwgN7ONBgoKH5kMCLAgNyyVMDYwGxYhJx6DDZQFOzEKHywhKyyyFR0YBwoDK54dMaatNK+zNjIENLc2CxQTKCeUL7IMIhydPK0QM6utPKeMIpoCCRg0DZwONjE8D5YQNymEMDw0IRYdAoOCLyeUEAgHFRyGCw6OKLosHpoCOj0gGqg4Pjq6GZ6dHyQWNjErMDM8JqyrGZUKAoKDA4ODAICDAoKBAYGBA4OBAICBAYGDAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECP2u0exYsV9/SJq3Mixo8ePIEMa7CcEiIGTJ4GozIJPpMuXMGPKBLkvgJUKOP+JkFBDhhQnM4MKHUo05MQYSGPwEJJDRIUcRaNKnUpVYL+rV//tO1LhBZSqYMOK9Yi1rJMDXWbsG8u2rduRZa8mUUEFR8a3ePNWjTuRg08nWfUKHjyTb5MRFY5gJcy4Mci4/3J0GaFjsePLmB3GxRehCweMgTOLHk0wLg+6PMqSXj267D4iMiLcU826tuOyh59Cts2b8NWtNUBUpt27eN5+TgyoAAI6tPHnbfvha8BBCHHo2LNr3869u/fv4MOL/x9Pvrz58+jTM+63jyK+2c5H7sNHv/77u/+wtv/3HqN63vvEQAAQEZTwTxXxFbRPDgu8oEAJJbzwAhAt5cdeAhPssMABBmRQ2X+s9aPDDmGo0EUNEliH30H3cFCDChBKuASFVtW0QBcSLIFYFxHwAOJqyFGXQwZdUJFAggTdM0MYMDjhRBVVNAGUVfhIEVsSOiRwxAIyGLDij5ixd1UTEhiJ5EBKqjABXwOJOIIIONRoQQ0jrAVma2NWYOaXBSnJZBU6VAFfmzq8IEIAWW0lwwJ83tkYVk3oeWSjaPpFxQElRMBBAPjgdw8QKuyQRBMJWBBBZI6KhpukZwq0zwQjLP+xQAkghOHZlPkJIUUYFeiogggEUJqqXnFFuqdC/SQQQ6cizvBPF8FS2QCtSyxhxRITVDjso4sZO2lCu7E3AZ0V3kMECAYIsc8+OCzApJ3bDmYYq5YVxBdWQtSwZxIgjJAaVjjIQIUO8fpWrJ7DIXnvVVCEQQUU4vLI7JhvxlmwYIaJMPB1bd4LHAgI7kNAFwo0kVU/PEggQRIXy/vbPlCUqS5orn511z5V4LPufO2GYYCrAVChQgP3rFvFDHRW0fJg+/DAwQxAyNAFEETMEMNdSYywAFD9xHCAAjAQQMQOVIRRghBUAlHiDgTMcKME0S6t1z0TdGF3FybiSISdAaj/sATByUZQgQqEVzACDGhbJSIHI5ioQgUlEC2s3GEl20ADOWSeQwNZfCUQPjkEYKeIOFiQuQU8zNbxRDxYkEXoCfhHeWuz12777bjnrvvuvPfue372/l7VPk40MfFuNd6DD5T3WFgWPoFG2cT0x7cqfEz94DACj5OymV8MHESAmBUZ8MVZDRWIQDhdHHh//VD9NBGBDHSqeO8/LT5eQRhhtB+XEwpYFAxUYgAgKAZ57wsKPogwggio4AXdY1PTjhADIRggLXxxwgIOxZ6dNadeCZRJTV5ggCNQYQTfUtxistKitFjFQhqUwNUCczIQhvAlIjrAC2IQMxSGBoH3gIEL/zumQTXFAAdCKNrqJnfDjuCDAxJoQD+gIAIfluY6QRyi4jQoAwmA4IRSyEHznMfEJmpkK1YwQEt4YLgjXRGEWXRWx/ABA7BxAAgl4BUB1sIxM5IFCgtYwr+EIIIluPGNK4rjG3XQBKs0AQYyEAHL+ujHjUgHBhUo31WEQAUI/hCLQpSjCp3jhAgwCYGV7AgOqCCCBiQBBwEgAl0mkARcATGUiFyRyGrAKEqmEiI1KRv/+Ee/noAAKqNMJC6XqEsCyOAAqPylRJowgRnMgANPM4AMVNChBCTzLlfJIhFySShTwiCa0iQLDzoZwXsEYIZWoUgTgOCZimAlAQkACkV0AP9JECRhYWVM50JQ1kb9HCE4o4MCDCIQAZ5QYQeawkiLqACCCBggAlYQWLAAKlB1ag1ivwlABRRgJ3aVACcoTd8COlUTKYDgH+kbgQFw8EH3dXQj+8BnXALkzYHkdClCgIIQhMCD7s1HB0KFQhPgA9CA3vSpUI2qVKdK1apa9aqKwype2AMoIVSQZlody0QasAAJ0G9lNgzrVOJngApY4QVd/JdT1SpCKEDhHjoAQYrSSlep3KUKerVfX8VylSpYYa/WG6xQCntYwSp2L/0wLGIfCxbGInaulH2JZO2H2cyGZLO+9KxIvgRavooWh1mpwgiowNnTBuU3jEwCFRLjBB3/aMu12NuHBUYwAhD05K1LKB9uZcKeHHzRClSgAnJHMAF4DTcmfHRNZ59LXd9U97rYza52t8vd7nr3u+ANr3jHS97ymve86E2vetfL3va6973wja9850vf+tr3vvjNr373y9/++ve/AA6wgAdM4AIb+MAITrCCF8zgBjv4wRCOsIQnTOEKW/jCGM6whjfM4Q57+MMgDrGIR0ziEpv4xChOsYpXzOIWu/jFMI6xjGdM4xrb+MY4zrGOd8zjHvv4x0AOspCHTOQiG/nISE6ykpfM5CY7+clQjrKUp0zlKlv5yljOspa3zOUue/nLYA6zmMdM5jKb+cxoTrOa18zmNrv5/81r7sFAGPAPF9j3BB6oswtS4AI72/cJT7hBn13gA/uCpgMPSIERUKCE8+oDCwCgQUP0gQIb9GC6rs2IPpTwhAIYwQMs+AFDohACAXSAvB1AwA8+fQIN9AACTGBIGSiwAgiQdwUsWAEFetCBWDuEHwUQQKNxK+cTOCQEr46CRphwgx/42rVGWEEIBqDsfwz7IGYInkOY8IAC6GO4CLD1pgFQaplgwQYhcO5plRBpXP+jANgDwwlQgGmqmoACeN7AD8KdbZj0YwBGQMBpn/APFvzDBq5WQr0bsg8AsIDgnmUBCx4g6ajsAwMsAIBoL9CBb0tlHwhggQn0q48QnGAA+v8tgwYecOr88uMHN7j2fbn9Ayzolwk2KAA/9IuFE6Q7v/2AAAu40O/60vvfLMDAwsfbDzMAYAMav+8P2PMED4z8vg9gDwI2gPL7nmAfUUABC2ztdX2U3AYyp+8JlFAGGtzg2fX1wBRiUIQCwD1VT9DACtIzhH98AARaeAAZbgumH7DAA8ZOjxeuMIQvBMEBJFgADApvhImHwARYUDd5WsAAyDsgCDX4R+h/dIMQAEAJa4kCBBBwg/Oc+wEt8MAHVJCqu5RhACGwgXp+oAEA3AAFOeDAtjRgbHijB9gPYH3L4xWC/8yaBTQQANntG3Yf/OAB068vyBlghJjjdx89SAH/A5yN33+7AAkFKEN+IZACJFDA4/fFAp9RoPn5YoEBKUDAGfJ7/xRcmv/4BwAZsXThdX8M0HUECF735wNkl4DftYDZZ19Y4AM+sHzx5wEHeHMYKGf5xQQekH8a6AL0B4AuQAPVdl9K0H7pV34d0H5vV34mIH4rkHbzBX4McAK19n0I4ANFwHL4FXYbgAICcHX2NWsnEGgcaF/8UARDeAMCN3M3UAAdUHr1B1/nRgNKwHv31Q9KwAIo8G7kV19B5wEIMGsCsIUd4AEAUHIwaHX7cAFGcF/78AQscGoYoIMsgHo9kGfUFwIsoH4mYHBFSHxr0QGCSF/9wAQ9mBFKkHiI/6gEzeaA4MWFK6ABkvhdQedzl+hd/WCIT1h+A+ABSgd0JqCG+gV+XHeKIReB9VVyLHB39cUP+AZ/9tUPY1AAK7CJ3cWFMKdfQddtvggG0pZY7dWJ8+aLA8ACn3hfTgd1p9gDG0CEcogARmCB1HcBK2Bz/2CN8kVpLNcBuUdn9LUPEFAEKWADRoBwFSdfkFYAfFYE4VZfNHACG8AASHADZaCL19V6LFAAANAD6GZfAoACA8AEXPh7sgM87wUBfKQPCOABK9ADWABO8tUPUdADRoAESOABNDCR9GUGADdoLsAAP1eDCCAGI8kA53cCO1eDF7AFKbmS6leDAOABSDBoDB9AAScYX4mIAjaJBOPHjfCViKz3ABQglBVpBsSoHgEBACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQQA/wAs4gJZAE4DRwGH3NzcknsjblwXd3d4+u68ysrMLi4spKSkREREu54vVFRU09PUXFxcKiosrpEraGho7tl4vr68fWgcgW4dzs7MmpqcEhIU1rQ7bm5s+Pj2HBwcdWQc6cQ+889MVkcRY1UVm4Ij5OTk4cZZ9+eZv7OBiXIhLSQJhoaEgIB/MjI08vL0uLi4qqqsinpBYmJkTkIPs5cs086prq6sJCQkGhUESDwMFhYU7u7smpJklZWUJBwENjY0rZxUMioMTk5M4bw8pIkoFg4Enp6cy7JU4t7cSkpMOS8MqY8rQDQLsrK0jo6MioqMnph459mhyq482s6MdWc3CgYEx6k86urswsLE2LpErpYrX04TOjo8xsbEPj48JiIH5r48QjoM0rZkEg4EnoYoln4U3uLk0s7E7ubMsp48EhIItrakDgoEBgIEysbMcnJcBgoMCg4MrvC0KGLQBhgU6HxIinac6KA0dBqAcMIkBAQYWFp0FBBgzDKAdpaY4PjkgpZcPA5AxHws8MKQwrSsvLTMqIYM0vCI2MLM0KCcZmqU+ubkyLTsSiYQpMaAEmJQOmKISC48HjBEvKDozOw0ZlpQpLjENDZM5tI4ZGpQxKAMPiIQpKDEpLjodEJsDgwYxMo0GigoelgY0MLw8Ka0pNLMcOKkooZI5uj4PC5sJMKAqG4sREpkHhg0xL7EanpsPDoozvDUgm50uNbcXHgYLlJMtMTIqKQMgniA1LS46MIQWnpYMFIUhkIYilyAdFhokHZ0jFgYqMY0PEpAkI58XmA8AgYIdoBskGYYNCRMVlgY0NT0rJqgXGYYZGp4JCwosKwwVF5Yqr6odFzUnriozHzISEowbEIYWjoY+vbU5sIouMTsPDY4OiQwyMLYCggwcKTggraECjAoxMS8FDBoChg0KCDAuKCsjKCIdGhYRFxMsoZMHjgcgnxkcHpEVmpkjJaw1tzEWkgooKQwxtrQoKSQ3NDYWqSEZlZscHoYOnQ4BgYECgoMDg4EDg4MBgYMAgIMCgoEAgIEAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qcSLGixYsYM2rcyLGjx48gQ4ocSbKkyZMJ/VnAgqBlSy1asGi4h7KmzZs4c+rcybOnz59AgwodSjSiPwQyCijNQoVKlggDLBSdSrWq1atYs2rdyrWrV4f+UqCokKOsEhYhVAzA97Wt27dw48qdS7eu3YT3LNiwYaEvAwoUEPi7S7iw4cOIEyte7NWf48f6BoSooIGx5cuYM2vezDnx48cGVoRwQbOz6dOoU6tezbriZ8f3GABYkWJw69u4c+vezfvra38aKoRAoc9x7+PIkytfztz1awRZAj9uTr269evYO7+ODIDy9OzgF/r/05LjgPnzB4Q8sBG+vfv3JV8bSLKAwb3v8N8flUElgv8VWdygQg5S5WfggQgaBZtsKxjwWYLh+aOPATukkIIBBjywAAAu2AbhhyAm+JgGOQxXHH4hVvcbPi7MZkCKMMYYoWPQZSHYgzIy9xtwwg1QWo5ABnmcY/qgMMUBGrwmZHI7IgCYYEtGKSVrwC2xAgP44Djlbr9ZgMJkSXq45ZhkMoYPhRZoWeZtv/2zQn0orinnnHTW2dFrsYXQYJx29unnn4AO9FpwIazFZ6CIJqqokM8VYKOai0Yq6aQHfhbZZDZASummnHaqo2Ohwampp6SWampqC+o5g5Knturqq5fx/xjCCSceCuutkUo4AxZFKKBFcRr5c88MCPjggxYz4ZqTPggwsOqoykYbqA0PHEDFhizUZtxFNjBgbRZPCaEAsNKeJOx9tparrp0znLDCAStMkQUW6TqEDwYLFJDDACgcsEAWCvy47sAEF3zQmTvMoABTO9TLkD8zoDWAQPrMoMQNObBn8MYcrzvdDhFQQe+2FPnTwAoFICCoC0dW1vHLMN/qGBZNjSymRBAfAMAD+txzjw0nAEBczEQXTerMNTssHosUUHHCAxjkQEUODhpt9dWLIi2y0go5pgEKC4SwQNgUYHkz1minvabWNlskbApnPf1ADhEIoYXAatv02T344P/js5iwCYRu3oQrRvPWXCMEsdQPpHmPBkuAWbjeXruQgwwyHIDCi/+o5AMKQsiQxAEPPDv56XYdbvPZYN2jwAIyPEtj01qwjvpHwmohBAVZJJGEfwoIvjAAVMjwJgAsQHn78m8d3nDiBamMj2RCuNy5FlkUUITtzG9k8gEUDJDwDA3s4PI9WgzgQwMzzFAEC0eu2v38WG0LcgTPkwzRDv/c0+IKDeucP2QTAXrRDySRWQAK0iTA6fgDHz0zzj0QUAAKbO+AGCQKkRrQABcAhgENMAC5JuKPHcQrBwgIoQJkMIUcWC+DGwkN/h5ngJl8R0szkAEAFMA9GPqwJsJigOj/sqCCG/ROBgGzCD4YQAUAFCAJxzvA3X64EQSEIAIuGACAVrAEBLBFIE3KHpR6SMUygkRYCjiADATCAhZkzgd4Mwo+ENCvNlYAAwaIoxknooAMXFEGQsjBCmZzwQaOSAmY2qMiUTKY9gmkfewbYcnuoQ8NtE8DWVqk2xiQARUkwQcasAEWSnSAGYDRUgMAwD+Up8lWulIzA8zAzrLkGC1EAAAMOCWRNASwwb3yl8BMTB+zUDsBBkcthtTHA3hnNv0F85nQjIsVRSYoG+QgAyhooDKZSUtnRvOb4NRKA+jDANtATAYheIAAlVmAFSigm94MpzznOZR7IS8FPrNBKmnT/zll5quZfyMjPQdKUEaaTAizOUG/ALAA0vTvdSqgwgAY4IIHPMAF8ivoQJXkMPygqE3QkqewDIACCoTgpEjUh0Cmp4IM3AAAMAVACCjQIY3S0wIKGIBOdYoCPHIPNgZQANRcoAUGGpJIKlNApgQKzcdYwABaQIBMfgScqLZklS9Jkk3naUIVOBEwC3CnHsFIrRVQ4B8U+BcKGjCY3/ggCxmIQO2YutW62vUfWIgABTDQkiL4AAFaRYj/AEABFDAgpxEYjlReM4MDhOAGVJjrXSdL2YKALHZtSogNKnCDAZxosDJogC69tIIKPKWYlU3tZEGWhB3oI4LxHIgGDvCPcv8+xklJEO1AXLcCFBQhCZGlq2qHC02adQcFKHgAAjKlkCKFgGp8mUFJT6CxzqXAPAYYZ2SJy12N7oAFC6DCCq6VhbX+dAcVoMAKysKCLFQAnwMBWgTsExpiCre7+F2kBhTAAARooQglBcAJXlgQNCZ2ARRgKAoCG5sIUNcfoQluficcTWH5DTJ/WUAhDXIPuLFAfUV4AAsioM7O7cA8KejcOAt4X8JBZgYXSlbX9NEALNjYxlpoQGksBeMUzKBWFP7ljvxhAyFkwEcI0YASFuDZwcQmvILxUvhsAEEEjHdc94GhsLBgJaYcgAEqxYsPklJBClSwAmwVIPqW8DvxKsH/iy0OcveG/EAlYDOTBc5rAbRAkMbOMoc3WIESBo0WPZ0AAWO9XS0HWVp/MTnMB2vRApRwgkovYT1tvUcRGF2WQc430XL2IZ0tUIEjZ7nAFCSmoBpbKHxAjilN0auAAIDEL2KQRM9NgQ00wABHqUywDJi0DV6rDwucqHPWFPAM9iLdEByguqFW5Ln2FrwdesjW/wDZaGhCowIsIGDDiomNd+CC6AwgBUY94FHCazNrDgfbBIlNYan6nYgtwAenRAAAkmDKaC8SHz6YqH+LMAAqPNd6DZjoYm2ghClQga/Ngl8pPfSZ+cg1zmnzXwYOkCbYtKiUgnXBAlhQBP/OAF22/9EAQh/A7dgA4Nn+XmQCnZiFDRXgBFUTiAL2LVph7WDJhC0ATA+AaEFVHECsxCA+UJABJaD8KEzBAkJYBIApjI0CEViCtlYq8iwMQAE5XUESih7zPZrMBShYghKWMIAiMHcgDUCBCxbrNQWkXQly1zFBXkMtDGQ0gxZouI8+kwKzFkGwCFiCYV1wggAdwICds0C5QwBTsTm07NLmGwQvrD8J7R02ENTHhQ3ymr5hPG3uHjxo6IPvg0hIAz2jJAKSMCCNAUeLQtCpELJgN1Bj/veFC/wUUPD0wltQcb/JUwRq8w8LDKC8M+ibdAEghH4D//q3K1LT4Ym97ZKeVf7Agv+Zb5TXRz2GZhY8PfbXX7B7PGDjHQ8i8qz/eRSJP2VBDCtbH5PDhvqe/QD4MutGBfnDcO9mdLpEMu4XaA7iOv8yV46RAlRAAUkUgBZ4Nbj2XhagARqCfxQTE1+EDykgE30xAwwQAVNAK6CSBC1kAH1hAEsQaGl2gTRINOMhGhFQAeC1ABhALljwLgE0AzmQBTJQAUIwSJORc0xzRUIgBIlFAQ8AbzWIOjsCRpr3N/W3Iz5jelVohX2DhcyzZUugV1lwAONiPyuQLYNhAQ+QBNHxD1nAAg9gQ2CkD0UwhGMzNUUgSVN4OkPWObx2AorHAIE1agjwAGl3AnczZDYQcEv/oHguEFiK5hgWAGM1hHKC0z47VjENYCENgEmsYwOdmAINYAP/14doQ2dwk4djkwPwNWT34AOiMQUuZSh4sgNLEB1NgXU+oH6o+IuJwoiRwwIKgAUKwAIAsASVAYtacAI6ZXAY8HSgAj45UIy8AkK+CIza6CdDVgS+1lbQoT3SKCh5kReIFI3mJCFgs2DpmI3b+I7c6BhLdwNKAGmBp1h/OBD3iGSCQjO0kRcaYAGnCI8EmStEUgEq8AAU9wDOFiZ01nwlonqPwQAhgERzcwA5UDoDWZAc+SePYQMHMAW5RBA7l1t05lQRKY0D8FJQJHYJ9l7u2JEymSOU1Bd9cR+z/xUCwUOSLnKSlJiSezMARZQEDJBdDCAaKABtM7mUdAIxA1AWOYACd5OTOzkQO7cnN/QaFgCUj3EPQtlQX/Rk1LQ8uqIFCuACe5iAHmUBKVAEFNVfb9dA99AAPsAA/XVyRwU9THkq4VcBTRGH71RkIlkQshE7yPeThUJvGBBoKTYQOaSTMUkwbGhWMCUD2pKPS5QE3oZg7lUEbPGRDyADYFUAxFgrmbWX6zIhMKEFO5Ap+nBN0SgoDFk9h6kSKUmYIZBbBJGT5aRoGnACEWA8kLU6VagPDJADSjAAGDAA/rICcEQkGOBEyTkAQvAv9tGFqLkxXSkZGRNfEQlpWagSiP/EcnuXaqwUKht2OvgwAzXkA1iXPydZScYGQdI1IB0XMQCAARYAQV9zAxUQfyGVnbiyI7b0bfchljciLHxhSPeRbBjQOQ2UbMp4H85Xkbo1iYOxAwWAP1rCKgXGSSywVKGRMoKiADcQogEqoAO6I9wBFQwwALcUFefHdg7ZiFlEBTcgAxjQX7ViZQuQkS6wBBviAlJ4O+JHgB2apMLiJRhzIn4WhQqaSigAT7GlojIzZBpQcAgWXgMgO/KWBJe5AzqUFi5FeTkgOwAHPmMTVo2DQUeaP+EJRiaIAQu1AhC4RGlVATp1APoiOx5qpdFykhagBQzwAAyABUblNQywhx//qQAY8KiQqlymOSw+4AIuoAApUKST4yFvGic44jqJNQWQFTCfwYYUcAMndQMFQKr5CKgG03ln40yRuamv0amyekM2gABnqQSC5iBONkosgAL8NYZEJ4166arI2h6/Yas3o6SPMwA/yh4QY1pg5jNZSn2FmKLJuq3usaxNA6dGdygllDRPxm+foQUFIGFHxa3sCiG1+p5KGk8mY3gSIhlIQngRoD3tWKXt2q/usQNP4avb4pNg5APEQy9Ux6GPYbAcqpb++rD68VQGEGxlE0K1woHv5Bg2wF9YwD5Y4AKioQRLVXg3QHSdKEQYs1SnCbEsex1LFAE110n/ciXoYrAT/zet/+If3kYBrmgbLztTITN0xKmtLVu0uxGLLJAEMtBGonMAzxkWeqoxdngCFXAALHAAJ6AAMtY5+IAFA6CDGPkAOtaqRlu2yvFAlqQBapu23fQ4iUpJu6a2Agk4RBK3VPaQZpu3eru3fNu3fvu3gBu4gju4hFu4hnu4iJu4iru4jNu4jvu4kBu5kju5lFu5lnu5mJu5mru5nNu5nvu5oBu6oju6pFu6pnu6qJu6qru6rNu6rvu6sBu7sju7tFu7tnu7uJu7uru7vNu7vvu7wBu8wju8xFu8xnu8yJu8yru8zNu8zvu80Bu90ju91Fu91nu92Ju92ru93Nu93vu94P8bvuI7vuRbvuZ7vuibvuq7vuzbvu77vvAbv/I7v/Rbv/Z7v/ibv/q7v/zbv/77vwAcwAI8wARcwAZ8wAicwAq8wAzcwA78wBAcwRI8wRRcwRZ8wRicwRq8wRzcwR78wSAcwiI8wiRcwiZ8wig8ORJgBRKgAyncris8AS/MrjSwATBQAjPMrTQgAP9QAiaQw8m6w0cAxNv6BR9AxEicxO2BBkrcxE78xFAcxVI8xRwxq1RcdiBwBFeQD1fMkfdgBAFwBB/wBV1ckF8cAA5wxB7SBmUMjPdgAgEAAwJAA21MkHAsxzRgGxNTx6iYBiZQAnj8D2wgEBmQA3zchzoAyBv/QAPuFwKHDIwS4AACYAIYIAaPDIwb4AAlwAQhkAGXDIxWIAIEIBAE9skWKABS8A8EQBmmPIU00AL/AAE4UMqtzH77IBAQUAZIsJG1HHMjIAIBsMu9TIMZ8AQJEAA1MMwW6MhnAAUgAAQ1EAXKHIAHYAA1AATQPM0WKM1gAAQeIM3azH73UAPP7AH9EM4AiAQg4AAegM7ijARAAAPunM7rfAVkPM/Adw89gMYfEAT4fH37HMj//Ht3PMcDTdCKnMcHHXP+oAMTAAMboANWvND0pAMScAQtTNFll8kS0AM0sAVboNFytsP/AAJhPMQiHWR0fAFcwAEckNKh9gMcwAUw/y1nAfDSP/ADNT1h+VACLq3TO51f9+ABCeDSLh3U3eUPXyAAF8ABVQADV4DU3WUEMJDGRsDEUk1cPXAEAsDLWb1RRmAFH3CsXx1O/oAEMDDWZT1c/lADUD3Ra61J/vACMOABcB3XipQGHlDXd43XZpQGVwADNdDXfk1F9/ABgk3Yhe1D9yAAVoAEiz1Z95DJRhDZd3UPF90Dlm1XUXDRP7zZWxUFJXAELqxRR9AFm40GAQAEpQ3aGpUPAQACdExQUBAAEnDOlk0DARAAs01PY9ACIGDXm00DIFAC9zxQPBAGlb3ZOgAEE4DVAzUEEuDaJgAEt01Qo3wF4BzZ/mACR/+wAV7dSlIA2ZvtD0YgyeHt2q101mmt2Oodhm59Be793reTBi+QAO1M3wM11Pit3/sd2C/g3/R02IIt4PPUDwIAA6ht4OHUD5lM3gz+TWgQycsd4dE04Ueg2RYeTT0NBJ+94c/0Bavd2iAOTLrN2vNkBAkAAZ4M2oNB3LI9T+3cBC1u2cZBA0DA2/RUBTFA381t3CX+TM393EH+SoD1xkCwAdtd5IqkqrnXAnKM20yuSS7VBCLAA0Q15Yt0VqrcAU1ABFQATvn93gEnBCQgyv9Q41puRuu5AU5AAvShAtGk2wHg30Mtx0H1D2v0TEYABDys3wQ+2OkNQx5wBFDg31H/kODJDE4CkAAk4N8IruDgRNwQIOD5sAFHAOHQ5AFOMAIC/gUXXeHQxAMiYOA0MAFAoOHRBAGVLuA0UAIeHk5esAauHtskHkwEIAISYAa1DgK3/kxdMN/urAMm3dvPNAT/8OvqTewlYOzPNN0GbgJgMAHHveaKVN0SAN3WvkdbLQBLvu1UZN5cPejg7mJIIMbkXu5q09bsLOzqTjD+4AHt/u6G7QFWEOD0/kOB/qo+kwbuPuWJXuADo9QeMAEBMMnpnu8FgeCPTTBo8AEJINMXEAAm8O//YALF7ez6jQaYrunSYt5FbdQXIAHVThI1IMYWng8SAASi/vEeINM53dIo/44SDrDoDB4EK6/q5RLvP/0DLZ0ANZAGCAgSJ/7hpl4CIGD0H6/iRu3SF+DnRvDtIOHcJe/frw4CIT0w+QDxHNABT78BJeAAGI0E2v4RQADidK7syqLULyABJTDJ+UADHgDIVjABNaDQCg8ROrDbGr/2sJEPX9APmUYDNRD2N3z3Qr8RxF7izN732nkPNIAEE+AAMBAAHsDIVmkRRvDdII7x1H41Sm0EkXzMl8+BWXARVlDins/rWOMP+dADH5DFQMADMZABGaAG60EROm/g4wwDctwDUm+D/WACEzAEIgABIxADPEMRdW7hfhwAEg/NwR8zYcECZEAAECACF+DDCf/P4DQgAU390x5u8YuCACxwA/8wBUzwAQ8NAxOABFJO7yYAAzOd0xxwAcKdNiGgBAbQD4QPEBJgwChRI4i/fwkVLmTY0OFDiBElTqRY0eJFjBk1buTY0eNHkCFFjiRZ0uRJlCkp9kjQgcuPHxx+eECo0uZNnDlJopiR0F+aIEg2HEkQwAONmjqVLmXa1OlTqFGlTqVataGOADE5bE2AJKlVsGHF/vxiRACQBCA+6Egj1u1buHHlzqVbt+69Gkd+dOCQ4EM+u4EFh/TnD40JASASABFg4t5gyJElT6Zc2bLJKEgmADGK9PLnyv76mfgQIMGRDUb6+aPhAQho2LFlz6ZhXdvkvSA0aET5att3WNE6PASAwfhKACm/lS9n3tz5c+jRM/qLouNFCRgXtErn3t37d/DhxS/1d4/GhK0/xq9n3979e/jO731IH9/+ffz59e9v6uHCPy74E3BAAgs08LOAAAAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQQA/wAs0QBkAO8AoQGHfmocl5eYpKSk99dE4uLk+Pj4r5EsLCUImoYk5L887cc/p40sS0tKa1sWnp6cX1EUQkJEPDw7vb28fn6Azqw10bI27dZ2JCQkVkcRSj4NVFRUampqEhITtJgs9uiwiIiHaFYUGhUEHBwcX19fxMTEgnIcLCwsIxwEiXIg0tLUmIAk3t7c7Ozsqqqsjo6Ms7O0o4ok989EcnJ01saE27k8xKQ0vqEsdGMc1tbUMzMzCgYEzMzMQDQMcl0ZNSsLQjoMkHojh24cMyYJOi0M2trcFhYUFg4EuposvqE0dnZ0JiIE8s9FjnUf3s6Urq6srpYu17I2Wlpc+uaUxqo04ro80r5UnoIozsq0Zl40Eg4E/vrkXkoUDgoERjYMBgIEyMjYxHwsRCZg2ub0kKSgepigqnAsPFQUdnpEqsQw+ODkrtx42OowFGRQPGSIyvTQDgwYdhqAdlzIHjocdOSobkAoytywWmQ8Cg4IhrqMhphkIiwokJC4PA5AXl5Qopws6Or4Sjow6qawBAwQRkxkWmpczjKAxrZU9MowCggwuMi4kHoM7MCA6NbkTDpQxtbUrKZ0BhgUXqaMMCIwuNS0zqooXHpgCjIouMLQGCgosrLIqohMRlQ04s40emgIuMKcChg0WkYoqpwM1sg0WmYYznzIWkBA5Ozk8MTQkGh0qogMtOowJsKAMDBgdKbk4PSAKCLApsq02tb0xMgwkFgsZEBwtnx0ikBIPHQ42MTAHhg0yLzMFDJoyKaY5Obc6HxIBAQYPiowfFIY3qgQqqwgrKyUKGTQkHRI9LhELj4wxqJUWlQoHjJE8uoUrsrsel5gRDpwal5gVF5c7PjoamoY4uboMlRM3vjoAgYI3p5A9KIw6vi4RlxYqqRQfHpk9MIUFBBgru7IyuLU6ObQ3MgQsJwglJaA2urYWFI8ytz0uKbodOAoWjoY3rJU0MTwSiYgYF6AQExMzpw0YKIoaFJQxqgQCgoMDg4E5ubkAgIMCgoEBgYEBgYMAgIE5ubsDg4MAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0psyK+iPn31LlasOLGjx48gQ4ocSXLiRg4MknyYsCFCPY4lY8qcSbOmTYQVLwQgco8AARYpJhThd7Oo0aNIkwrkEKCAhBEQGEwgcG8DUaVYs2rdqpBfjh1EcgjkVy8JCwEcuKpdy7Yovwg4cFwgyk/fCAIB/LXdy7dvR34iBNwTWs+fCQEpolz1y7ix44H6IOxgQUJACyIEktR7zLnzXrIaSNxb4VMCA32eU6vGWi+KBAcMRFwYIWHHiMWrc+su+TauiY12CUgwsbu48Y92nW6sKIIEAQbHo0tvqG+D8uUXSKyAPr27d8gaCoT/1VfXnwwWO4h/Xy+d3wUnLCRsYCCV9ATU7PMb1xehxT0WpLGAwwQi6GdgcfwUocE/K7W02YEQRijhhBRWaOGFGGao4YYcdujhhyCGKGJ7FxX2T0a4fVSXiSimOCKF/PgjVQBOOOHABiK4KBF/MgTQwgsCyGACeS9eqI8GVKVAwg7/CZCDjg/xJwELRJCgXXwaQFmkgfow4MAIOcjGwJQBpOURfyxFUCAETZEw15Yw1sMBXXUxsMIKEYBUz0tX8WOCaIrBieFGzT2n4nIb+eMAC1YJeuFGEaSAQ56HInqBBAQ4OmiCAbCQF0iIkmWdBAVqWmGM5+1AaaUWMQDWbabC/+jPBvekkGVIy3XpXBLkaRmrdzFuQMQOp+EKHAPOTcCnr7+2589dxDLr0LHaKbtcs/oFuwIJDMAEKlnIErFBrxthy562OJx2kUYq6rqCDBmt66253R3JQgEvjKBBFCP0++ZfEZBQwA4baKBBv1E8Se93/pjFwj3/scDCPzgE2lFwD0cs8Qof4LfwdO5FYXC/JI9Q6l8iMBDFyiUnLO3HMMcs88w012zzzTjnrPPONEn7Ms/t1SNCEUWIYPTQi63oT1oc9Ao0hPVM8N8KEEMsgMddfuBECvFFUO7T+vnT5gQuuBBAAI0KJHZP9xTw3Ndgs+ePC/fctu5FBPGngQljrv8AwbxxM+wCAVnCPdByRbTgd+AGzs3CBBdEYELTuGH3gt8/M56b2AXcQ8KwLeCY9EYXXP535pqnVs8GArgwgdb/uWDmP5ZjDnjqxwE2VEWz8pR27aejjntqwI364OGl23778LlvlEMBKehFkHumX8t8d8tFMLD0yFdv+PW6wZRoEvjixxFgTuBpPfjh+3NBEf7sKcIImDVqUWE5XK7Bnh6zn9uRKSBCCwLggB0UYAUu8AddIhMAAbzgXpUJAKz8lxv3uIAEOPjHClIgAA0ocIEjSEGmfOITHHSMgu2TzQUuIIIPJs0wxFmhCWZYBBTa8IY4zKEOd8jDHvoQTtYLFZT/EEU7Iq6PiD+cSV04IAIT5OA3QgRcjC7gRBOIgFwWYSIVTbAsJCaRJDEawQuIUADxQDGKkInATiR2j6e4sEuIKWPd0PhFMOpkgwKTSxTL1SWuvWACE3DATzpWkagRAAeTmaMQ61gSfazQkUTQ49eIKDYWfOBBXYJeqdxjAn+IIAD3UMwiGSkTEaxALge5FgdeEEqCmBKVY+EdKKNAu1guj5QgEcEhL2DLUP1jbcoqEQPiw72TgHIE0/seLkOiS1TukSj8EU2QNhAAEkhgVcthSt2SectlPgRRzeTlHiGDrM4RoHP36SU/tInMwynTm98E5y6LiMYEJeEFaNvAByQg/wFRIoqd3BQePN25nHAahJJS+4CZ9BGFWj3pn8cM6EBNIk9YFgRRJkgBngjiD/9YRZURJahAJ2oQgx50ORC4Bw7Uo7ZO3edra6OlSEnqEeYQIQU5CpVGKpIDzECHUKK5DW7ERrjzXaubND2oPoYWAQJsh4nLela3iFIEAbDACf8gmgkmMLC5WMRo/WFBEoq2O18mlTrIWlIZlUQCfzJAk9CEwJTaKAEypkCodYGAAHZgwM7xk5CjPKtCGCqpHcRFUiTAaw5I4ITZueeeS3rBBx7aqhfgQFKXTcEOXIDF9Qk2IXXRKT3r0r/RGpWInd2pWT/L2mz9ZVqtja1sZ0vb2v/a9ra4za1ud8vb3vr2t8ANrnCHS9ziGve4yE2ucpfL3OY697nQja50p0vd6lr3utjNrna3y93ueve74A2veMdL3vKa97zoTa9618ve9rr3vfCNr3znS9/62ve++M2vfvfL3/76978ADrCAB0zgAhv4wAhOsIIXzOAGO/jBEI6whCdM4Qpb+MIYzrCGN8zhDnv4wyAOsYhHTOISm/jEKE6xilfM4ha7+MUwjrGMZ0zjGtv4xjjOsY53zOMe+/jHQA6ykIdM5CIb+chITrKSl8zkJjv5yVCOspSnTOUqW/nKWM6ylrfM5S57+ctgDrOYx0zmMpv5yRRg7xYowIP1NoD/Aj5Y7w1scAD2duAE6w0CDNSLhRyoQAVZQK8CmnCFKjwhAtwj7xI8oAULWOAffzQvDSzgAQtIQSAFSC8FZsDePavXAP+AgRDWC4AF1Dm9UFjAAs7M6la7OrwYYG8C2EsDGrT51bj+Lg3YWwH2hoACs1bvr4O9XgUAINeaInZ6eaBs9GKABkhQrz5A8A8VyJkCN1gvCmzAXhUs4Afq5QAMrC3sDhx7vTXowUACbd4TUOABXEABqM3LDx9QIAP/AEAH0JsBNuugATVI7xQE8oA0nxcENQjBPzBgcPOe2wv/CPh5UYDsilv8VHVuwHqH0Ov1/uDe0mb4ENaL8PXm4wbR/1avPVBwhPWGQAX7Ti8/QgCDVcv8BAsAwnoPYO6N2yDb6uVBDR6w3n6zN9bsfUDK1duADihcB+m9gQGEMIRzt1sFCeiADVqO3gQkQAUPUMJ5KUADBVxcUDZfLwU6gPTu2iDth9MBBmygAAVUoATYxTdD9MEDuns9u0DgtqcvWhd7HAAICVAAFbRbAyZkIB/u1AcXQjCEBwDBAFBQQAJ2Ld1T1wAJYFf4WLyQjxD4AAPypsDagaACCmg+ul34BxIo8IQbHADiFsnCATIAAAOo3gYqaIAPuMAPLry57s0VQg+eMIUOBIEH+bDI5Hm/gBpQAAkIaAAPsuAiEMC9uERRuP8KalCDBTwgBHWZPA96UH0KlB8AGQiBF0YKXHsMAQVToEEHenACfejAHicAAjBQAZtnA/CHftalBA2wAEjQAUyAASEQAgeAeh1AfgaAAhhwAFlAJNJVFyGAAUGwAB0AAzeQARnwAAAAczbQAUAAAkMQAtFnXVzgAzcAA1unAgAAACigAgZQA0cQfDxwAvZAf8LFDzpwAlvABEdAARXQgAYggh1gBQCQgXNyXXWRBT9QapkXAwnQhE84hS9YWtNVF/lwACDQekswAEuQABSwACjQABnQf9bVJ16gAxMIAEcQAwOgABRgAEAAh3KYXRWhA0KQAQ2Qh2v4gzeAAUoghtX/xQ9ewAUHwANvpoY04IcNIARQd1DPVTn6oHsZwAQJEAN82AEqgAFcwF1kGALr9wSjqAA0UANAMARECH51oQNZ4AMCWAEKsASKVwMAcAC1SFy3aAQ+YHk2QAOJp3hIgALb54jRdYu69wAo0H4JsARcSAGneAKb+Ij+xwUngHpPYH02YAAdQHb6l4kxSF2QKHnhCAA2uIIqcAMpOHsdUALxB43GNS8WkQ8nYIiXJ4+LiAE3AAQwYADBNwTc14G3yIogUI3kBwNT6AOT+ABM8AQdgIFjSFqUh4I2WAMOCAI8EALqV2pIAAPCl4rQlXsTeAMw14Aq0ANxeBFcMAQ9YIML/wB/yVVqeJZMkqcEPwACQNABU2ADEpmBOjCI4QgESFADVvAAp7aTC4BnFVGHEogB+lYBUAB6wreJhUeDHVABNuCM+shbt9MDBnAAn7h78GgD15eQKjkQkvgAnkZu+zh5ITCERZQFAFADN7CA1ld+N8ADRlA5/xACGQAE1vd9xaUPQ6ACFaCNPDB5Q9AANBADayeR+egiuUgQFGcPyVVvC6AApJgANQADFVgDytgAIZCUCUF00aUDAFB3Xld3FYACcQgCBhBnCHFr08UFQMCFteZ1MAB5+pABT4BvEMdd+TCbmpd4NGB1bzaWerdd+vADHVB3ddcB1TlecueKWYcBZQ7ZXfqwexmglvAleuEVEAAh+QQFAwD/ACzjAlkATQNtAYdWVlTQrzmXfyRzYBpbTBNqamzGxsRKSkzy2nyMjIw4ODiqqqx9fXza2tyKdSBsWxjqxD4oIQatkyxEREQ+PjxPQg5kZGRxcXHKysxaWlxPT04iIiTAwMCUlJRDNwz39/ZkVBS0mCy6urx5Zxze3tweHhjOzsyhhyWojSvyyUSmpqSCgoQuJgvkvTzr6+yurqwSEhReXlyioqQ5Lgx/ahwaGhw0KQvOzqzatjweFgTCojGujiwUEAQWFhQqKiyGhoQmJiSenpyBbx4KBgTdujzW1tTS0tT66rSilkwaFgTi4uQyMjSysrQuLizmxlS6nyzm5uSampy2trSahiQOCgTu5swGAgQCBgSczJTknDDIeMgwKCjk1EBaPpC8pFDW9uCgujAwMCA2UBRodhh+PmyEdJxAUGRsWNTY7jC8yjA2cDhyUBgSYDgkYIhASDB+krA4LCiweHQkwICw0LjSzPS+zOxsouBUdlh0kohQPmzwyNTk5NywuMxOZGSctKTQ0DCwvqyg5DBYXDywxOyopCxmdmxsGIDO1tDm+OjS5NRycmA8SEhCLDT2+Nw2LGh+boCGnIiumLSEdGRwVGhQOEi0rrQuUExOVHAcKAyGdgg2DECkiAykgkjo6Pi2nui+1MjSnsw2HjwYKCi0tKz44uSkehwEDBDk7OTEtnwcOBwSEGDwprC+yMjWtNxUXhjM7rBsoiScnqjGtMiUopjCtOyctuRkPBgIGCASMGjo2uRs4oDS1uBUchhsdkQKMBzEeCyKdIAwLERaooTw+Ly8sCCcnrwaFjRAHhTIMIAKDgjW0sByfGx+klxSNhiwsqBSQiQwOiTYwtDW5PQaBiC8nAx+PBhiPlRAWEDC7uCEUBgSMEQICDDM7oCEZkSk7rAkYNAEBBheYnTsyhAMDBiEWoAqOkCilJgoMCi8nrSongygoogkIMCGingSGhzQthB+tISYniDoeEikaixwXEBgZpTw9oScpnAKCgwKCgQODgwGBgwODgQGBgQCAgQCAgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDihxJsqTJkwj7qeTXo4kCBUDyqeyHsqbNmzhz6tzJs6fPn0CDCh1KFKLKfAcSvOAg4gUDBfxoFp1KtarVq1izat3KtatXhv34xTBghEkQGQYaLKAg9avbt3Djyp1Lt67duwaBMCHBYEmNEhMWQGGQD6/hw4gTK17MuHHXCQakNBEYtoCSKD0ca97MubPnz6ATK+DA4QC/f/3yMWhAOLTr17Bjy55Nu2K+FQ2YFJiggQEHFQra1h5OvLjx48i59tuQoIESE0VcYABwOrn169iza99esR8QBlJkJP9QymHFBuHc06tfz7595xIJRFzYcC+fgihFGMBwz3/hzB4ZJCBDECtMIBN6/SWo4IIg9ZNBAzKUMBM/GmDAgQIMMhiWAkEYUQQGJpBgwg/nZWjiiShG1M8FHyRwz0z9NCFFEQekyF8/NSSghAoAKEDBBQaQcEF1NhZpJIorfhBFDTAeAOIER67Xz2gGTBBVPzAwAEUQmUXp5Zft9aOBhxcA0UMNFAQBhQwbgLldPxQYwMFkqN1zwWX7uannntb1sFoDL3QQBQckcKABkXwmV0IQSiSgQAklHLCAAdQlaumlsuFogQocGGCACAlMcA+myfFDwQsNcPACEyZgcEFhpMb/KmtnGygwAQVL9IDorMPBcAF0BpiQqgWj8mrsscgmG9I9MTDRwQFNLGEBExwQq+y12Gar7UH9QCYFVEcVUAQTPmxr7rnoksqPBUpeqdISIphQY7r01mtvivywGIW7/cBrxLz3BizwwNw56AIHbB11ARQcLEHwwxBHXFs/PizggggMZBBDAkUo0ZrEIIcssmP8KNBBxw2QoAQHFzA58ssEwygzDD4ooAEASyAqs8z8bMCbBhNswO89CgCQwdEaA1AizCHxU8MEGVhggQZLyMT01fburFINF1BbBAkJdEmZ1ljGIEOncsqQAQwz+RAECUYYgAGIHFSKdYP85B3V3Xyj/0u2Dx1wMOkHMtRAENn3WGCCCVEwwEAQRRiRwZXwKtFBARdkXkATCPbt+ed8k53PEgpscGcQhg/09wINXFDCPfcw50IHPagkowhQ8QP7i6D37jvWM6G28z8AkIA6Q1RiKDwAEJYgkIxS5Lz379RXDzOMBGWgxPEGyQyECkYUoCtLDBhB2Lsi5KfxBEDwbv378AuM/UDac3+4zPdk0GkCmAcexBIzWQITXGCCYBngBQVwWfwWyEBlkU049Uvd/WZ2ASMowQhGgIIJLHCgfpSgAAwoQAwuEIWUMUBsDUyhCi8FAwoc4IUTYBIEtydB1e2sCT9YQAIsUIAOiEAGVloJDP+shqMLRO4AnVuhf1Zyj+n5J2+w+4e7CgJFKTpRiVjEC5wWsDgTiICDM7Qft5YTOAvUDkcMKIIMgDA8gfQgCB8YUhYbspImXEAFL1DBBZqwKyoq4HGr0iEAakeZeyRFBi/4hwwKVKw5OlIuyxlh5iwAruzRkCHFW8B5AiiCBmhgbMGTYgJa1MhHcstUrDMAE9KyACslJB8F6BQTXiACDCagXKhhjoVmyQEjuAqFpgzmW+YnkAgyxAIuQB2MNvACKGQAlCoRyD1GuYJSClN1NegACTqwhFrpKAjOQwg/SqCAJmygBEDQABOUIMd+3MMl5SqBD2LAARN88pr49AoxiUf/AsxEU3hhkUo/YqAEKQC0H04yApSGNyURuMACSQwmhbwYnHdJQXJ9hGY072GZKOyHmFhKgAsuENF8mhQoD2yJD7TEBGi1T3g+sICoVKIAEVjuAD5oggZUsCUJlSwDE2gCEJaQAZ6+gHMnFchtoPCDFx1lBbPLU/d2ZicodABWB4FBAqBA0qR6dSiIu4AUmGCEDzBMCh0Iop0aEIVN5k8EImKCFIT1ArVeYG5SoGURiqACV3q1Bx1QQgEOGoMPvCCcU3WnD5bwo15mQDhYqtkEGGAChH31sijVGkdnuYDOrqoDFIgKhYJwgTO6kwIMkEFnSQsumq4gCCpYgAoux8fL/y6KBDGQmQZcIAUgpCQskgIRCVzFNoLkY1oYMAIJRKABa2L2uTdJqQ+AAIQNbAAIPnjdTO5RArbJLB/o3EANOriSfNTAuhvogfuSGpUSyKABj20SFESAy4JMaAkriIIKPgVGghgyAVF4gSoPBd0CG3gxUgjCodwLX91erL4TDAsMehCpIIggBjq7x4Q3kAEmMGECJT2wiEfslebWIApKgCiMAOACJvh2jKGkEAeYQCcbKqwBCZAqiXfM462EU6sjdVe+CFfDCBMECC8wwj012i0TvODFPY6ylK3C0R1JSCWLYqdzNUqZ0cjLxhN60JOnTOYyC6VfTGhdPvhx3FQFR/+a56xOPszUxLwBYasqaNM/5gwDvRGNp9U0s6AHrZPEYbADF+iACYzwD97xIzAd8G2/gqDDzCVgL5Q6TbfyuAJLc0AJL3gzoUftEYGiRiGhHDGWMiAFJUCBBFKIwYEaHQM3C4Q5QYICFJ6T1mL1SwYW1PXKGMDHEJP62A4JSzoxNzUJwTjVBlZJ7DBUOqfSBEsKWEKxWOKSCdyqCbpS3TgZKxCYdBDZ6J7IUWKQyMUZgWVYBTO0BW3sdNs7JO5UHAcyBgALXOAA1iTbvQdOcJF0iykAZ+IVD7rPgjv84RXRqpDylg9rQ1bgEM+4xh+iFyPEQAEX+AEDlObEB9Z74yj/d/iUCNVZ0iwuCspjcsNTTnONIxQDHzBCAoA2Lcs5W2YMr7nQN35zJbExLBUyAnUYOvOTag01NFuCD8ItcxiNM1oxEd1Ql5D1oae7WwZ4aDRx1AEorIC8Jv8q4iYQODlFAeBV35oFlhIZBvgAfxNIgBRII4VQbdnrZu6XQ585kNtE1eRNFybZZJybDrygCByYHMNh5KcGmEAGgyKBDDi3EgCkBa0+TBUATg54EntQBR8oQFsAa3a0P9CrZAOCDPiSXtMBqlwP1EARDKABw1EA0E7N8gqm3hKRcqn0o3anlpY0EMgwevIYN2kbxXL7gQCBdRbg1+oDy4BRIb2yGIqR/wgwAGLKaABQUEY+vZeAqp1PIAYLsJyeE/vPpAJdqT8YDJHuwQAX+FNrS8AB5Hdty6ECKYYa11cEGCZN4kI76kdowLUAGCQsRgBzGRV0sEdMJ5ZibWEwTLBJO3MASkBfY6NVH7AC0iYuBjByALACM1Z+DzhoYdEEFrACCbACMSA0CXF/9kdMDPZYBLFbUnB3WqMBhhVOT9UiMwEDFoABUNAADaBB1hKDEBgWo/J3RvZV91AzL9EEbPODwnEAD0Y2AGBYqZOEHTATJcAAIhAFmRMEBhAEokaFdDhHU6ICc4MBTJAB3PVeQDgQQoh7kGWEhzU2t6GE/dADP7CC9LGFP/9QBG1Vh5KIRctxAQnwDwnwFCwRWBClOhnQYvNXEBMwgkSoEia4AqgxAUYgAkiFGgpwUUs2ibLYQPkAA7a4ZqkhUgywf/2HGQixBHKzUCqxAQZoAajBYipASALhXrg1i864QDxIfeQyEBuAfRmViGXXfSuhAeCHGoEoUPBSBADwjOT4PrHHUyQCA9+hBOSyhBSgANY2JgaQATXQQgaojf3CAVCQAF7YA0vQAS1WY+U4kKCzeBpAKEwQBWmWaajBDwDAAZv3H+VjAioQBBwAIQAULkXAMDJwNs+xgAT5SFbYRATIg3WyO7BTcRj4WyZpSmvXAcHCAb12bRTyQ6WoKS//ACIY4wP8AgMaEAVk8W7PgoUhyUCJ2BsJ8AMWwJMYVzIMMB5QmQDz0Ub29XrX9HQssVhTNz1HUV3ax11Y53ph0QOLJXVUV5RZVAIrgAG7Bx19pX3Bw1EqIwJyJQWgcpP1J29UiZZ8+TJr9UUUoAEd0AAqgJfBsy4NEAQU0ASM2U1wSX9P15eSKTKjgVGo4QPFuDc7sy75QSRWqZegNJmiKTH9UAAukGcDkS9WJTY8YwFFkABAUAJnuZJxN5q2CTH88AMtglUI5UU11ppwowKYxwAaQEgtiXi3mZz3AgNR8AHdpzo1hQEUEGEOyQQisABLoQT5IUP7hHh5qZzgqS1v/zRSFicjChVhyzEBpQMEqPU1BWBxF4ec4Tmf2JKIQUCeMCIjJgAlYNaQAqUaLrAARwdtbdSSB0Z5UGMBAEABypgaTXAAMRADGtAE60Wf5pgPHeCcFpc8WWgQE1BZCUOg3Zl4zyVtbBeUq2gty8GGyRU5UtAypGehWMMPDPABHeBdKjEm08hlBxEnVXKcIjpv0XYPD8k4mfOUsiZFB6ACoJI5isYXViOj1jNQoAZl/OcCOSZvCHF+uAOkqWagBqYAqxQD41UfPWA147SY6vUPPWABqQKDUlo9TcAERVAAMmEqcBVfY6krQoSLUuQDUeB/3Hl/3sljqsEaL6J9wnMl1v/3AiRQABcYp56zLkZgAiuQARfweB1wZRQjciVSApZ4AUdzAfFnKHD5T4W6Y9WoZD2gARaQAVWDqjtDjCSQfZJqPTBQALWEQRjAjzAyARbCFv8AHxiEQZYXBAkXfWm3YyvHVuBDAg1gALApUDujigYgrLdaPfewBP0WAzEkMwCiAcXFDz6gATHAQ95aAqcapGAabcD6ARBiAQdwAf+ApV2yM0CAYj8Qb9m6Qt8pPPblcPxwACbwAQuQM+4EAJWqASW5NQlAAgf7r/06seYCdlzlLiUQBYMBnzWAG0wQWhJLsSJ7LYLXjJQBoAlAXiXwA7kBskI6sjCLLMR4gAMBZC7/ooYPG2qKGrM8myyqYVV5In4NkH3D+AOX11ok2rNKyyc0RSg/sJhsBwUvQIQ18ANKgAEWUAJ/ASlRurReiynSBgCdZCFGACiuxA8ZUFbztSp2+QJT+LVwmyjS9kewRVo5IxCP5nie9QKrogIxQJRxG7g2MiHmJZtd604UBinodU5dK7iO+7iQG7mSO7mUW7mWe7mYm7mau7mc27me+7mgG7qiO7qkW7qme7qom7qqu7qs27qu+7qwG7uyO7u0W7u2e7u4m7u6u7u827u++7vAG7zCO7zEW7zGe7zIm7zKu7zM27zO+7zQG73SO73UW73We73Ym73au73c273e+73g/xu+4ju+5Fu+5nu+6Ju+6ru+7Nu+7vu+8Bu/8ju/9Fu/9nu/+Ju/+ru//Nu//vu/ABzAAjzABFzABnzACJzACrzADNzADvzAEBzBEjzBFFzBFnzBGJzBGrzBHNzBHvzBIBzCIjzCJFzCJnzCKJzCKrzCLNzCLvzCMBzDMjzDNFzDNnzDOJzDOrzDPNzDPvzDQBzEQjzERFzERnzESJzESrzETNzETvzEUBzFUjzFVFzFVnzFWJzFWrzFXNzFXvzFYBzGYjzGZFzGZnzGaJzGarzGbNzGbvzGcBzHcjzHdFzHdnzHeJzHerzHfNzHfvzHgBzIgjzIhFzIhnzIiJzIiv+8yIzcyI78yJDMIPwQAR5QASyAD5FMYvzgAQIQAAGAAhVABZl8oDZwAi2QAhDQAiFQAVYwygXGDyBABKncAqnsAKLsys81BAMAAbPMywLAA7j8XLAsyy1Ayy1gy8GMWf0QAabMy6rsATGazFk0BCAQAC3wyRWAydJ8WfxQASEwAiyQA0MQzdusRLAsAR5QztC1DyOAAiygzs+VBA7wy/B8Wf1QyiMQqfUcTFbgARIAAuS8zynEDwQgARUg0F5FBQOAAjaA0EnFA0IgADng0NLHAlNAA0NA0RLhASHwPt38zwHt0N78PtS8yhotEQQAP1QwAhIwAycNEQT9DwddPTz/4AAnMNEvHREz/TvLfALIHNIOnc7Us8kS8ABA7dDoXD34AAImndMQ0dI0TQM7INRO7RAoYD05IAAnEAFVbUrM7AD70NWO1A8esANGLdZzVNI7jdZKxAPtTNVsvUI5MM9cHddK9NXAbNcqRNYoAAL6rNdDTQA7UAFHDdgw49Yo4NKG3UAloNV1vdgLxAI+ndeQDT9kXdR/XdmTWtCErdnww86J7dnwI89bLdrvg9emXT39MAMoMAAZndq/082gXNiwPTD78AAnoNi13TtzLQCPvdufg9rADTozIAEDoM2endUCgFie0w/e3NmmLQAC4Dv48AAtTdsOPdm9Q9q/rdnM/ywElO05LIACDhDemi3ZI3DLn8PRA6Deot3avePcOkAAmT3cIaPLITAD2G3f6VLTEsDc/I01zEzPAc43xe3aBX43lw3dCc401IwC0NzgTLMPBBACAeAAEdDKEi4yVAACOsDLF/7OGy4yIeDMEBAAIz4yvJzi683iV/MANoDcLg4yEoACI+ABYT3jETPRDiABEkADHsAD+63js1IBPR4CDkDkICPUHf0POVDfSp4tV0AQISAABADgUX4vNvAAAoACJ4AEN5Dl8oMPLCAEAeAECIAATj3dc7QEC1AFR4AAToDiGs0DNPAPK45FC0AQD/DSLCABsyzmAdMPPFDNqfxVVv+eBEPOx0NgAwMgAdac5yaF21UOArhsBTlAACeAAjSw0ESAyidFBSwAAgIQAicA41AeyP2ADx7Q4wJQATxABR4gBCfA5tKH3CHQ2rqtyOP0ACjQ2ixQHfzAA1ieVARQ6icwACCQ5IdM6EZe5bC+6FjkAB1NBIc8yQuNAg+groJOL85e6g7gAe7d7dvSD41OAxJwAgSQA9JO7lHSD0mg6RIwAjPw2u5uLkPQ6tAu5PduLlZQAtYdAgMQAane72C7D/r+6lTQ7gaPLxGw0CcAAgTf8Nny7RIgBOLO8BRvIkMwA+iu7kmg4RufLJgu7zRgA+M88snSD7Lu6tGu8skyySD/8OsDEOwwjyzO3uMO8PI3byz8wAIQDwLc3vO88u1I7gH4oPFE7x78YAM0YOoEoOhLPyvwrukhIAQ2UPBTbyQsP+sS8Or8vvXqEgG+ru0TL/YHXwHgnvFojyk/PwC5LvRK3/ZvEu8ngOQzkPR0byn44PFfv+4iv/d6gukgsOkjgPJzL/jW0Q8IT+1gn/iKXypkv+kPcPaRDyaMr/ZHvw+Qf/nG8fa/LvTV0fmeTxtGH+4yTvqlHxsd//FRr/qrPxvwPvPznvWx7yWsLgBP8Pi3nyKEXskVUAG+btyW3/smAu8P8AQ4EAA68AThvvDGnyId/uG8DAFPwMrRbyMRgALOdEzLAcDg2W8iLBACKdACREDLREAAsB/+n9HY3X/iEc7+HO/Np3ziQoDT8p8hmX8COiDwGQ4Q/wQOJFjQ4EGECRUuZNjQ4UOIESVOpFjR4kWMGTVu5NjR40eQIUWOJFnS5EmUEftRieDBBg8rKWXOpFnTpsCAACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQQAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACzRAGQA5wCbAYfIpzPduTzNrjSenpyrjyxNQA46LgwxMTEwJgi8vLwoIAWFcB5VRxCoqKdHR0deTxTY2NjS0tR4eHiUlJRlVhSioqQoKCiAgIAgICGzs7R9ahzAoTBBQUJFOwzm5uThvzyxlyzExMS4my05OTmKdh/i4uTowjxbW1ze3tzMzMwcFQT09PRQUFGReySKioz8/Px0YhuQdiKcgyXUsjeGhoSihigSEhRsWhYxKgtycnRsbGxmZmRWVlR8Zhz40kSampyurqzux0KOjowWFhQgGgSXfiRiYmTBpjQWEwSliyRDNgxuXhUaGhzq6uwTDgTu7uyykiwqJgQKBgTqzkQGAgQOCgQCBgTCoFQiGjTIvMxOXhTK3PQgPBy8nEDaqBDG1tSWgAy6pnTe+NRCOECw0rTqprB6eERgYjzywhCwyuwWZlBgfGDyojCAfmSuiEwGDBg2VkzKmjTs5uj44NTQxPCwnMSSmrhgehhMUDQmxIDQMoAwPjjKpqxqRnASIBTaslQGGhR6XmDg3IS8rrDs6vhoTBBucIS6fHTqfEhMYkzsxCiGbAgGBBgwMEja5vSmnCzg1thQNBSWkoBiTCg6IjyWeki0wLgmMChSSFhGVlimrsRgXFBCeDiImmRugHTi4Li4wND4+Ny4nKCWYCx4goDc1vRgYoB2xCjwxNSowMBwXihQJihSVGiwwJwsZtD0+LyqpFDyyFTa6tjK9NRqRkDytkBQOjyuiAyORkjIyNiwrpQMGjQMCDC6puhyfhjYxMTIfCzIxLDo5tQKDghCZoh8YMgWEmCqrCBubhiqxDAEDAiGXBh2qOQ+PCjQfMjSyDQaLCjq1uRiPhhgZhg4VhR6mqDs+OggNETYyFSIuozAyDB0Rhg+LjjYyBAWNGiw3HheqIwKNCiucCxCDkASDBiqnAzezjRCMGjK4tQSBBCw7sjCqBDK3LTQ6jDankCWbnQiCCB25Ki2rsh8HIAsIsCAbnSyqLAKCgwCAgQGBgQKCgQODgQODgwGBgwCAgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSoyYT2A+fBMzatzIsaPHjyAh4mNy4oILCTyG4MMYsqXLlzBjyrR4oIKHJjdLTLDAcqbPn0CDxsQAZEUCIw6MNGhSQaXQp1CjSkWY78SLCBgGDslQgkfPqWDDim15T8KKBisF3rvwZMK9sXDjyoW4dsWPlSvz6WiSIevcv4AB632R4eJKfkJepDgQuLHjsPkcPOnK7x4/DileQODwuLNnoPj4XWjSBIiQCiEirIAw4rPr1y1X2tiRAAKEBBdyRAjBGLbv3xPx4rOBAYONfDwgNBgCvLnzhcJ72vjhQcfX59if5813MR8/Hih4X//PTt43vnsOUDpgQcP2iYvl4/8OncMDCtsoEpy4N16+/8f4YGDEBTRccAIGaf2n4IIMNujggxBGKOGEFFZo4YUYZqjhhhwCdx4/IIbIT0UJ5XWPZfcY9pWJFvXXIYND0JDAjDRm4ICL/+RzgA5CNABEAxdwAJ9A+OgoQQX/NCBBby8+iI8FKawQQgY1OlAiBg08gUIKIUDQBAQ5kFgkCyFomUIEJSTAQpNOWtDlATYMIScTQxoU4AVBYsDECBesgIKV/zyZwRM/jIDBARek6RebDGIQQgr8RYfjcCqGNsEKQmCUzw5NhGABkfxM0EQOjDIY4JtMGKdiiZLmY8RZFd3/I8QTLpCYIwsvJIBjqdq5+QQQGWTwgxE8TSopPzTQihFiT9AgZj64psAPr/8FWEEKwYaAwhMJcGCscNDaxlmOeyWAYKA2XHAVE9T6d94BGMTKARBPZPApQsLleEAGHlwAHz4HhNDEBRjwg8EOEWh2b7vV4uWmB0Y09GSWP6jE0qYQPIFtAo9qtijD8UUX6hMX7BqoBUtNYHFP+XDww5kZGMGCBxHYALJ8kt7jAsm2HjRxEyrnO5CkHOTa883YScoEEKMqpGMDHgixcoKBRmeDC6OajPRnGDGRIndMSEDzuHYCDIQHE9DJHXcsFfl1PkPooOjWSfOjQwo/SKDDBQms/xYmvkw08AIKNLDAwwknIIXReTpUcIEOEjRQQgo3ak33Y1VBUMI/OKEABAv84TtCBKQ1wTlpHgAxLeMpeOB6BBOMgNflzr27Hg8scMDE7D7z48AJhyOOuOJV2zCC4TyMwI/QtCtoefPQRy/99NRXb/312Gfv2PPac31PnEOMuGte/MgZvmEFfS/nP+J339mTNHSZQgOgO83BBRmkAEEEQOzglEUsQE0EIDA/HZzLfYF5UghWEIEGZAAnOjgaQbbyJSAMIAEeeEID2CUQxEAgBA0YQAa21RcEBsYGlxpAvPhxAnEl5B47OIHXvMOCFDRBAm0bgnFSxI8RZKlkJpwLPv9G4AEIHAAvzPIXviKFl00ZbWiLy4tVgMC9IP6kKoQB1wn48jGCSGolRQuBBPNigxysYAJVtOJMynLG6HBggK3x2bH6NAExBWoIHFgPDSKQArKpUSz82FnJhOOmP+ErOuHaTEGqkgIUeGAFKfDKH8cyHFHhsGoKLAGgypaWfIwAgxKoU444IIQBVCABQHjPJMeSxOgUcpMJ0RG/JmADqhEJLz70QA7SuMrYsBGNwhkdaxbiSX65oJa2xORKRtCEmvVyKkWySga06IESxnIEXKEBMpMZHQwMcGHPfAoYnwCBYoVmVkokkheLWYILLE9oaQEXB2jGwXBCBR9DEFXavsf/gghEgGw6ylHVPtkWteUjRYvjhwXYdSJ+HGAAsLKnVAAmsBQM4GwQCCWRRmCb1uAzAy/wwAAkgCcC6eAtLQtBCTIwgQnw6wnikehEDyAE/UUgAyeYFpEOwCXGBIgrmyuBUIVaQnxKIAERQAEKbpoDBPFSph8JDQYsgAGLESQfFrCAmA6V1QN41as8Gcg9dJjVqqIPqmhNq0Oeqta2uvWtcI2rXEMWHTupc67bQ+II4PQVuE31AFrFK2CEM4QfvOCGLDtBBiCwghdoUrBCxMs9dMBYxLIkVAKJkiEhG5cmOgCVS9llTyxwAIeO0I+cDQteMHBKFkgga9cZCRBQgNrU/0IzNBeAwElfK9qCCIq2tqUktELwg8rwdjy/rW1w72kBICRAdmvJmp0wcNrlgmU4ezTCSqLbWy9SF7jWlQq0YHccuI0mTEf7KXjDC5VQGUUC8HXBAhMgARb0TL3KZa9PAnkT192ksSsowQB0SqTv5le/MtERDxaMOyMUhX5MGghR1ovgoOTstdbpyT2YwITPlsAIQ2DC8ir8k5zRwLIAzMA/Eja4ECRglyQusXAmG4ITJDYBK45AjlOg0RiDponjkWCgfEzk6LG1yEhOspKXzOQmO/nJUI6ylKdM5Spb+cpYzrKWt8zlLnv5y2AOs5jHTOYym/nMaE6zmtfM5ja7+f/NcI6znOdM5zrb+c54zrOe98znPvv5z4AOtKAHTehCG/rQiE60ohfN6EY7+tGQjrSkJ03pSlv60pjOtKY3zelOe/rToA61qEdN6lKb+tSoTrWqV83qVrv61bCOtaxnTeta2/rWuM61rnfN6177+tfADrawh03sYhv72MhOtrKXzexmO/vZ0I62tKdN7Wpb+9rYzra2t83tbnv72+AOt7jHTe5ym/vc6E63utfN7na7+93wjre8503vetv73vjOt773/Wbm5ZkKTlAAAlQg5DnnwwAyCMAHQFCAgsMZHwgggAmCEAQTiKADRxazFGBQ8Q98wAQmkIHD3SwFDfjABAH/CADICTDyNufjBiD3OMiL0HI24wMHSZg4yEVQgIyLGR8GIEAQAkCAhutZCks4AgWIQAUq/IMIeJbCDWSABCmooAA9wDM+kNACAMAABgQ4wgbu7AQDwEAAQQAAAWKA5wfEQAQB8IEIHqCAmrcZBC2AQgAooGcBMIA7Bbi4ngGggBxRQAQIwIc+FHADOxNABf/QRw82oAQDaEAEY7ez01UgAxNsYAMiIEGeVUCCiotgCXXHcw8IEIAptIDgehZBDGoAgAfsWQMGcMIDeN5npIvAAH2uwgJEAHU+O0EGSdgHn/GhAAKQwOl7BroIYNDnfBRgA3zn88t5n2cS6UMDIAC+/8/JXIUWEKDw4wcziZxQAxk4Ych5xgcRQLAAu/fbABtofJ4znw8G1L76N3AEHaBnGwB1+jB8wKdnhbcPRUAAxZdnCPAPTkAA7jd4wKcCIlB/Fghx2Jd+YQYAHUAFBQAABaBnBkCC+fAAAJCAeIYAAMAAG4d4eyYADyB8DqhnRAAAFIAEMlAE7zd6G4B6NaAB9sdmGKgBHUAAD+CBYKYC9PcARbdnSEAALaABBMCCeDaFPZgEhbdnMkAAeIcEe8aAGwAAC1AFe1Z+MzADS1CEbdYCQ8cATAhmBzh0SsBndTgDUcBnUtADaQd5fAYDJgACYrhnL/cBMqB8fHYDH7AA+v/AhzDwATAgSnemDyQQAA9gBXP4ZfuQBAHQc8YHd3fIZ0gAAB+AcXymAjNwipvYZRhBBB5XgtGnAB7HAMtnAB9ne4b4ACCXfXqmD4JoAm24Z04QAzMQADAgBXqGDypQAyIAAFm3jAoAAjKwAQsQfThgjfT3g3gmghvwAEnQAtx4Zyl4cTIgA4CIZ98HAgpQBDWwjE7QgE6wAASwjESQBCRggw9oZxA3feu4j3UGdLUXg11IjiPYARu3AQVpZ/lAAQr5ckfwi+DnBA0JADiQZ/FIAE6ni1qnAuGIEbYYfwogAtGoIPUoEDIQTjengwIxgPJBAEP4DwZQiL1EBR1Aggz/QgRuqD0puAERiIcwAAKQx3yN0Yp/tHUtkAQkApBxUQUIMIrLxXxU2BkxEHfhBXQboAGOYQI+EAQzkHlReZMcGRhmuJDB1X+fOBXKiIVJ9nIz8JNQ8QD0CJZK9n1HQJM/IZcgAAD/sAEE8IxLxoxFAAI7KREKEJJlKAJF8HXVKABMJpXv+BJEUABL8A9/CQIEAQMtYJlOJn3X6BEVoQJ3SAIE8Hn/sAAhWWXWl38dUYga4Iz/IAIt4ItYloI4mRE4wAAwkJJjVwQCwZRXtnEWWRCfaVf/oAAdQAEtsJd+SX4LIJQMYXW5KRACMAP/UAQ3cJFjhg9O0AIiZxAnSRAERAAAXykDN2AAaGhmB0eB43iaG0B91Dl2MKAETmCUSnZwnRcALcCCS2Cd/4CZXtRmEtdxR5AEArEBKad/cFZyE/dxexYQACH5BAUDAP8ALPACWQA/A2sBh+nDPVpaXE5OTKWLKUJCRFRUVGpqbPLy9Ec7DMrKzJmZmTY2NG1cFmNjZCYmJEhISCsjCNKwNnBwcDovDLCVLH5qHI54IqysrKKipGdVFWBRFFVHEvr6+SIiJCoqLPTadLibLYhxHR8dFnh4eaampL6+vJh/JNra3N7e3Li4uNbW1PbmnIqKjBISFNLS1OTk5OK9PF5eXM7OzC4uLO7itMrEqJ6enPPNQY6OjDIyNJKSlIaGhH5+fOrq7O7u7MOlMxoaHHRiG7KytIKChMLCxH5uPBYWFK6OLD4+PHJeHBUQBMbGxLKeRB4WBBoWBKaadJ6CJJqKRE4+DOjKWAoGBDo6PO7KQF5KFNq+XDI2PN7i5K6UJAYCBA4KBObi1KCcvGRqlHRYaHRiQKCEDIJCbIJ4gO6+EO7MsDhUFLic5O6ksKSsIBJkOCZkiCYiwM7U9LJ4dLasrBoGILjEfHB8GLjE7AIGCAgYIDg+KHaWmIqOeNbCzHAagGZCVIC2hBIyRMrqMPro6MKyrHBc1IqWsMCkUEYmKLTEyLa2oOTS1AQEGHZ8ZFhgGMowgKiWmIJ8GDokNBoWNLjW3Kq0qEJcQMDGMAgIMFQ8SFh2GHpqCMyclMba0NS+EIhudDQ+QFJcdIh8ZEJMMIqgiKq+tDI+EKrwsC5UTBw6HPbW4FxiPJy2tO7C4KLSzFh8WAoyHMR4LDgwaI58gDxMSO6wQM7C8BIyaPr43Ih2nDh0OICWXIhcgMay7Dw+SMp4yFRCbAQMEEJUZMzw1AwMGGZieGJcUOTW9BwoDHhucHRkcNKyuG7ipKyaDKZsLIRAGKTEMNTQwGp8bNTKQHRqWNTcxKqwxLyyzJyaIMzCxMjC2AoOCOacMJyYiLScpOro+G6k4CZk0BIQYG7CJBgoKKK26Co+OCTCgCwyDOh4SCwiRMCcDIxWGFqkhF5CkMCwIBIaHGxAGHB8RFJiWFRGKEZQSFRuZKygLDgOQA4OBAoKDAICDAICBAoKBAYGDA4ODAYGBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIEOKHEmypMmS/kTk+JcDiD99J2PKnEmzps2bOHPq3Mmzp8+fQC/2K6AgRYkUOh7ggxm0qdOnUKNKnUq1qtWrWBv2kyDjRAoMQlwQCbA0q9mzaNOqXcu2rdu3BPU9SKBixAwRMySoSJGDKdy/gAMLHky4sGGo/kYcUGBkIBAFLwz4O0y5suXLmDNrdtsPB4chLwXiG/GC8ebTqFOrXs26tUN8LD4v1advdI8UM1zr3s27t+/fUv1J6EHCA219HTBwIFIFuPPn0KNLn95QX5UUKBQUQPJgiAsOS5BQ/x9Pvrz585TxFUhxQmyJC0JeEFmAvv4/5AYUYFAQA4hf+wAGKGBMtOGzgAE7sCDBAwbIIIQHA47nDwEXnNDeCSoosMB/EXbo4YcSHaePP/jgQ6IEKLDQAojP6ePBBShgUEAVRJ2gQAccsqjjjiCKyJQ+M1RYQI48tuZPAz7gNpk/OVQYA5FFRilleSOK4AEQLQBRhQ4q6CAClFOehg8PHOAwmUD98PACDv2E6eab0/VjwAUK4FCUDBhUQRucq/UzBAc7iIiPBC8I4QCfiCaqGz4BXFDCPyWQIMEMoSkq5ggc2NDPcUZ4xpyloIaKmT79eIAEAUh4sCmYohYmV3sNiP/QDxABLAGeeK3mquuuvF5kBA8WCqEACSm44EN4vSar7LLK0gZEAxcsscQFI/AgQwkrMavttty+eRw+Vs7QwbMukCBCt+imq26EPg7UKQoSnLnuvPTW22K7+DTgQgob2uvvvwCr1o8AIzRQQAM4yLBEDJUG7PDDEAcmpwwoWOgCBgLMFvHGHBPkTwfcFSAAAR3Ii5A/DggQwMorF5CDvKR2UMUDBRCwKqsdK6QPEAI00EAMD5S8Z85EO6zPAjqUIO0SRJAQgxFgtsCDCiokYLUMRIzQmEAtxIBBCS6gIMSGQxcN0Yj+pH2c2Wz/608VLCgwxAgjKCCDChK0iZAROrz/cIEEgANOQFn/iMBDCvC9IAMBa7ft+OOP4wMEEP2UCAShDybUAg5LkEViiS8xhc8MVXTwQAkJIFE25Ky3vvHaxyGxRAn07c15AWrf17iIHgiRuuvABw8x2v7008AJme+tg40/CzDDzQKJOEMKqa8u/PXYc0vqAw0YwMISMjSg90FGsPDCCUtYLQQPlP543PTVW5/9/PTn6qIOd7+Awg5fJjTUEBKIAcISkJ1DRe991FOd/OrHwAbCaXvdY4EQbPAAkxmkH/0I3VCW8IIGnEl6CRRRA0dkIpiJMCG1MZGP0Aa60K0QZw6MYWVo04IGTKtfB3khPobgA9MUBH4KXGDw//QhAgPYQAgXGAISXNgugzDqAk5zIT4egAMMwIcEQ3jAzZoowy5mhlMKOIEE8IGQE96nAX4zIEGAaEbsuUgBKpCBUfZClhdyyB9IEAIHDsCC2eijUyd4j6NcIIMRtMCOXkykZY7Tjx28gAfjK4gZhfMCDJzrhyFsXPZaMAS/BS0HI9iXnl5YEBGwQGl89KM/ZkAAD4jASjaUAe5IqchaAmZE7nNRhRpAxhyKEDkY8MEQ9Da03oWnjdergtUqeJ8O2OAF8YLJCgeSrxSMYAd8NFm7OPmCHfRSd5q0pTjX4o8HxIAAM3DADB4AmbH9hwAD8UAMBJADdVJRcQ+QJqk84P+BWqmgAR6YwYrmp48Y+OACW/vHkWJ0SXAOrZxC0IEDDJDNuIiwBebjwTe5OM6OniVNCvvHBVJwNyEUgHAGEcAS9jXSrqRAfNL0RwHgQwQfHGAJQhBCACwYvNFwQAcbnUsJmnNA2DmgKATwhwE4wAIm0nAGOXiAtUoQRI569KpXweMQhvUPEihAAjnQ2EEcUDcSXOACNhhBFQg3IgFY8axwvQDu5rc5H/AAZjkg6QMsuqcWjGAJIzCRBMrk1HyhLmwlmCUtscpYqxQPCB34RweM4EcokQqyDhjXquLSDwd4ILMd6IADHLCp+fGtByMI3X08UCwB8LWtKVDAl4RDWB//lVMHCoiWEATg1HA29rfAvWWnUMvE6bnAtQORnn70dEa76tAIRujATIVQQUQG97rYXQuphpBNEVWBCAmAZ1Fro5cLCKAK3FEABy7wAAd80Ef5ys7Wppnd+to3q4PV1HFkqoLkOZSTPeiBDAbsghdwoAcuwAEQkts4AvQ3suOF4X0nTGGcyAUFKYAQmjrJgvEJ6gE70IGI7WQrF/CHmL8UQKEaatUKu/jFN+kACR7pH3wIgAguGBJtPia02rTACC0IsuF84KXZtEAELUgbiRaAgR6wybowjrKUY+IPASTgBGn93glYADXa5GA/M8jljgfb1B0/wAZYlMAIklaoUS52/8pwjsie6MuQOQvxqlMkwQleIB8JfOk4BBCL6iIsnAPgYDZM0kGB+Swf9vX2znGO9EFGdzDAxQAJXdZZP6rQAAn8QwB/lnAtU7IAVKnKof1YQFjFTBsRkExE/gBCDk5FgCqoyo6ilnSkj4YBGRDyH2JhQe3KeLkStIeQNkhqroG7QL8sW9fQlrMHqmWwAkigQjaA8EH6QZoE8CAADQjjBfoS7XKbWySfC93b2INcgxwtBZHZVKx1gIIRRPLc+M53RV5oBAy8oAAnK8C++nKcAKCABNrWt8IXXp0CBbkDtVoCUQ1ypBNcwAEiEoAKhvpshns82jQsAAt0QIIEEEF8Af/HGtloEwPFzfXjMId5q3mAYxQU0LLT6wEPRICPfuRAAQdQAcNiTnSGF8gDD+hZbiVwyBxuBUMKqBYJZOCDEwyd2Z3NwQxc4tBw+sMI6sxBDjpQFviCXesopnPRI+2PBVwgxzzVnREaIARfE0EBLBjwyxv7NhakgGkYCMAWT4hHHBiFaUIYQhVc2PYhXIAIS0hBAHAN6bW7mJIdLmNtZiCAAgTtdClgbmP1gQT2xJYELnBB3hD5RGHZKQX95S1tjEexRUuA8pW3/IQpqYOBuvuEBUWBDfrXWCD0TQc5MIIIYpCAJTCOlPpoQWaBbAQP7KCHXUbZAxawABxAE/cd173/RxuGHBsQV/OwC9I/G3ZVfQSaOUzh2yM3iszoPaBQOHJoYr4P/PqLH7vlVDAPwB3hFkiDdhCrFADbQQDQogIs8Gd81wAcQAK+V3EIx2C+JRBHclD+wWCjwX+743//F1xKdTf/kD7tQQIZkxDqUSzSIhY8gHHhx0BjQljJRQBMM3FNRCoLQDM8QBceZFEfeHu4N4L3RUQFcE3/wAIjEADupTPLNwQswIQPcEgzyECbc37Rk1cysFcYyBTl9HZ7lgAwJYSk4WmUZ4QThjbUxH5lRCIY5EfBZQSQkVrSExbt5lDRAwQFYAAj8DUsgHFmCE1dp3ZqeIhdFH04oHNMxIVe/zheyZU2+OAAPJBgC+aBZwh+iLiJiegnHPAPlSU7RIArXZdDebUE+YSJIDg0LcaJrjg/EogBVjgiMWBxatSKhWMDVmdCQ9hGIviKwOg67icWw9Yp8/eFUDIDYDMkRbV/BmBGuBiM0gg5j1EaOXBkBuACCZBPs4cEa1UgC4AESJYl3dcDF2AcO9YP0cUCPTAE0DV4vziN8mg2pAdvJZBbKqB6N3MdJNAvIqADCUAnw7IEKEAEiuUiPGBWKsAB5UIt8Jh78xhDJERGadNwJKRQXlciGtlzcedAfVcCCiMjW7QAiENu/RADJACS2rg+C6BKVUACFYMC+rNnGNBl0RiRXf/UDw8gAQnCAt+mRmXUAgzIAyzwD883NOqBA1PIAjiAAzpwUrUUfQ6gdVwHa5n1QeDiAWInLvA4idwndjmwAFXgXoaIk4pkHagHKTiGJ8x0MgIQHy9wABxgh0NjBDbAkDmVAnppAPdmln7ZNsgRAEBTBTSyPBeQGwlBAEPAAwagR0ToFy1QhwE1A1DVgX95mY6DS0PjADDCjDnEkfpAJrdXEJF5Ao94H5iZmmzDRUDgb575e8chmhwSmSjQAMqXZKqZm0SzXy0gWlUwAg5Cbr4UmxzwmO4CdClgAxigAwbwMrr5nMNDG0ZgAFZEBF5xUpYlIrLZOJyEU/BBMSnAW9D/OZ7/chyXM1IrpQBhlp3E+Zhg2IMe0AEe4FZtdoXkeZ+gsl9gt05DkAIskH/oF5rFCXwVqYFNonN9yUA7ljL/EAALkKBxMToPAG7/8AChNhAtUAUCIEABgGn2iZ+ACXzOdAJBuBAC6p5QkhgHYAOXKEOkUgBTl48lMALEh4APMHWpF5IFUFqk0gAgCWxioQNJBaIuOkmEggO+h0LbmYEDYQA+YEmJKFN0EXU7QAQpklBO9AAiNgJqZjdkOBk9igFMKAE8cAEqcAEHSKQKOkkj0AM6UB2YgqIIgQ87wAE+JEMyhgI7hw8tIADLaFmSYwSV8w+X0wMYsGCzhyVt0g8z/6AAPdBHajpC6qhCtVEFFwBNZxJrqKk7L+EPfxIvquVjlLpBKGAA3+RAF4Ybx8FNmTecPnJ/QhBZbXQkE5ikkZo9MqUDPNAA4MYDQnACJBBmArEAXwU196GTPpMC6+UzSqFQ3bGrgjkEHCRRH2o2SpUpN8Nf/iVJK7QVT4ql4JQS3jcEp3qr2OMPAQA2vkY1RMACizcQBWCL0jQDM3YCPXBgMnkjzvp2hOQCJ0AEPOABHUk/jXQAO8BESAB54sWtzmJtf1g14jkQ/uABnTYEv3qY1Wquw5OhB2MABlMF4Lp8WiRNRmBtgWMABrAgbUIqNNI9vFoFpeVFp0WXtDE9Xf84nBNyqS/gA+GDUqDYAAnwAj2QJG2psb9lPRCZSDPLRL1zXK7aAkhQADHgdwqweM4mAjQTbikwBABqtF5rKXV1VyLiiK6qnw0gA9nGITvWAVOzAxD6tXArJT6lAJVFAAkwVGVLEC8iAwLgbD5CAHKkYXE7uGFSUAdAAjapVJXUUAz7HyKAAST6Xj6SA0SALYR7uVOiTNtonpARLwH6I7LThfpkWwHwAikAlJibuiDCTSblAVWwAyhAO+4SAwUwUEPRAKniAAvQKC8wfK1WAAFga6YiASVwjKp7vB/yRjK6BNdZKbIjBMz1j1VTAkqTjzawRAr1kiowOyBZNUMgiMj/G74DMiIdEG4kYAM8gAQa4yI4wLUwMUVbZVZiGgAlEz0tIHLDglZDIACZJr7+ax/fonyU40IaCF0fVDzKJwJYQn4jkiWvNMBM+r8SPMEUXMEWfMEYnMEavMEc3MEe/MEgHMIiPMIkXMImfMIonMIqvMIs3MIu/MIwHMMyPMM0XMM2fMM4nMM6vMM83MM+/MNAHMRCPMREXMRGfMRInMRKvMRM3MRO/MRQHMVSPMVUXMVWfMVYnMVavMVc3MVe/MVgHMZiPMZkXMZmfMZonMZqvMZs3MZu/MZwHMdyPMd0XMd2fMd4nMd6vMd83Md+/MeAHMiCPMiEXMiGfMiInMiK/7zIjNzIjvzIkBzJkjzJlFzJlnzJmJzJmrzJnNzJnvzJoBzKojzKpFzKpnzKqJzKqrzKrNzKrvzKsBzLsjzLtFzLtnzLuJzLurzLvNzLvvzLwBzMwjzMxFzMxnzMyJzMyrzMzNzMzvzM0BzN0jzN1FzN1nzN2JzN2rzN3NzN3vzN4BzO4jzO5FzO5nzO6JzO6rzO7NzO7vzO8BzP8jzP9FzP9nzP+JzP+rzP/NzP/vzPAB3QAj3QBF3QBn3QCJ3QCr3QDM3LEdzQ9aUPSrABFRAECHAPFfoCEG1fEs0APwAAMAACGtAF97fRXpRrI6IEEBAEHw0ALg0CCPAAKGDS4/9EG/vgBBAwAQiwAQwQBCFgAgMQAS4NAzAA0gxAACdA08BDBSbqD12A0zq9ARng00C9BRRw1QNgAhZgAS1N1C4dBEit1KwTAQOwAfsQPf5wD1C90xpA1QMwAFdNAW9tAiFQARmwAQgwARCgBCLAAELt0gAQARvwAEkt1pADAxSgAQggBW3903ANAiCwBXNd1wyA1xAgAk6gBPewD2ojTRBgARFwAzcQARXQBA+gBYYNOS4dAXEt11pdAQyg2JfdBJrN2W5IcRBQAQMABQwAAXKh0antOC79A0Fg2SJA20pg2zPIBWrtBF0AE6cZ3GYD0iHgBKol3cAFABSAANgdXBH/AAPdjV0bQAHhjV0aUN7ond7qvd7s3d7u/d7wHd/yPd/0Xd/2fd/4nd/6vd/83d/+/d8AHuACPuAEXuAGfuAInuAKvuAM3uAO/uAQHuESPuEUXuEWfuEYnuEavuEc3uEe/uEgHuIiPuIkXuImfuIonuIqvuIs3uIu/uIwHuMyPuM0XuM2fuO2fJM4riNUMAFtvQEQMLA7DiJUgAADUNQRYAFBPuRSAgFHDtik7QRMHiUbINRerd0QMOVFUuVWQNRF/QMIkLFa/hwTAAKAvdoWgABKIOZj7hv3kAEffQMhDdQDUAETcNZt3iESvQEWUOcI0AQTUAFyrQFLnufjSwVO/yACSjAZXDDRFkABFrABa27oHuIX/gABGQDXdn4PbE7pvaEP+xDoct3bQu7p9bHnj24CG9AEXGDqH5ISGgAFFBACat7prq4bl54Ebx0EE0AFtn7rrKEPXSDqA5ABItDqmsHUwB4Se24CIKDqk75IjLvsIMEFmK7pE/DclmEB1E4S+tDjuj0ApE4ZA/APGdDt3q4ECBACg37shCHr6E5lTbABJgDptR7vp37pDIDt2o7v56EP94AAgl7she7v5sEFTkDvkC7pyG7w5XHpmX4EFYAA/e7w4wHqgf7Wve3ryQWuPwEBSWDxQCHRCGABIAAFq97qyOEUSvAP9R4CEyDyP//BBSKgAQMAAmmuBB5QlP/AASnQE/4Q6CDwD1cg802h729tAU/gBf9AHNF9E05g8xRg56Vu9DhBBRvABFPwAStAAwqQBT3B7tD+61bPEcZTAzTwAVOABUXgBGQ/ERpf8GUPFEYgAQnAARxQA1GA7XiOE9w+91GR1BxAhiKw0ledBHJvEoZf7IBPFSkAlQBf8hQABRogAlXfEVN/540/FdGdEgqf829vEKreBJt/Ft8O8m9t5xW/EbBe+m4h7BlP8JcPESXv8q7vFo3O5xSg6k7Q8BhR7iF/+28B8XAN86t/ERgt/HAR6gNP6mIe+spvYeqe6hvg7tGvG/ow7/UO+gzQEejXbxlIn/nK7m5Rr+nfP0PMTwFHMO4EsQ8I4OwmIAXnfxnNjvMbENUCse8UgPjznxmwDhAmKFAAESHCPxAWJvxj2NDhQ4gRJU6kWNHiRYwZNW7k2NHjR5AhRY4kWdLkSZQpVa5k2dLlS5gxY+qjgoACDAA5ESKQ2dPnT6BBhQ4lWtToUaRJlS5lChHCEQAwpAKIIKXpVaxZtW7l2tXrV7BhxTaEQMEKjH9ox65l29btW7hx5c6dK8IEALp59e7l29fvX8BsqWwAgTfwYb8BAQAh+QQFBAABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMA/wAs1wBjAOkAgQGHLiYJXFxcTUMTJCQk79mBs7O0YmJk4uLkaGhopowsYVMTz602TExMRjsM1NTUEhITmpqcbm5s1LM4x6g06ursf2sd+tZE4Lw8PDw8np6cvaAvdGEatJgsvLy85cI9IxwExcXElJSUzMzMgICA7MY/iXIg5ubkoqKkp6en3NzcHBwc/PK4GhUEi4uMmH8kr5Iro4okNjY09vb0bFsVVVVU9tBEdnZ0pp5kFhYUPDALQkJEXEwTw6IyLCws7u7sRkZE27U3cnJ0jnYfhoaEVUgRkHsku5ot/f38enp8CgYEMjI0Z1UUJiIGNi4MqpZM6r808vL0UkEPrq6sDg0InoEkPjcMFg4E789EYk4cemEcrpYudnZk/vbg4r5MyacsEg4FTkosQjIMzs7Uspok1rIsBgIE2tbMWHhgqqwg3MoQzNy0FBBgakRwWqSEjpawyKoQPGKI4tj09KIw+uroyMSwpsq0KGLQzPTUDgwYrqjEzjKA3vjoBAwQtqbohJZcRlxMxKasZnx0qqyUuMLQdhyAclpAChg07Ni4kHgM1so0hLaEtL64zNz02MTEopwsAgYI4KBA4LwovKw0tnx0CjAoBhgUuMKc7sRU6vro2OowqprACggw0LhU7OrYelpwxHwskEpIemYI3MpUdHoYPHQ4wLhAamoYqm4selYYxtzUCg4IxMowrsrsRkpkFGJQHjgc8uoUqoZMuNS0PC5oFDBoznzIMC5Eru7I9LZENDQgNCI0yLzMdl7IkHB0qsQwJMKAWmIYcuKkBAQYdpaY9MIUcqTgMlJMWnYYPC48PA5AMlIUXkRAuMi46HxIkHBIWF48qoYMGigozNCEeEQotOowqpwMcnRE0uLU4KoQWF6AHjBE6KawKCDAWEooaloo3uj4csIk8MTUrtx4lo580tbISDogSCQU7MCEsoAs9tjkWGZc0qJUHhg0wqJUZmyESC484PSE5tjY3tiEeGqEkGQciJSQiI540MTwiKCIAgIMAgIEBgYMBgYECgoECgoMAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0o8qG+ixYsYM2rcyLEjR30V+QkESVEfP3/8+FX0yLKly5cwPz74EcBGi384SBLUN4WBzREBVKyMSbSo0aMfpyBIAQXKkRQYQA7V92BECh8mTBzI0GMo0q9gw4pV2gFCiwMplEgd6C+IDAcRdASQ4gOFCrF48+plCfIk1QJppVbUN0BECgYjB3QwEcDr3seQIyMUjLND4LX8GBzpIFKgvxE+QviTTLp0ZMo4LKvFHAFKCIKZTXQYYLq2bbCC9aW+THJKCx9IdipJ4QDD7ePIX+bevVrqgxAUIuzs4SCFjuTYs2dcrjr38+jTq1//106+vEPBzClPGeJjxM4YxI2bn0+/ZHrMCGRAGMqPxoHZ9QUo4D8PdLeWPgzI0MFonrHXAoMDRphcX/+oAJgSKa1EGAgHBKASPz2AkAINEpZ4HEgPBMAAAg5QMAIDDPTQGT8IUAAXTXSdgIOJPJpm0g8+UGBCU1gdMMIDIz2AhAMmpOAkBD30KCVphIXQwpVYDsFAZwLxo0MQI9jAAJJTlmnmmWimqeaabLbp5ptwxinnnHTWaeedkuWmjz9T+OPYQSkxqFJJIPHpJ55s6jkABB2MkFNC+qgQwAgZFHACAzqNJNgAQxTQwo6IquldCzIcIUJXk/kTQQpCQtHeWpqC/zQFEkcc4QCqoaIpGD8BgFAAFABOxs8PSBhAAwoU2ADrrgx0wKQIQuWqq1RKWBqAAyDQBmln/iCRLGWC9YBCAQiAIMIAf0rLo1Q4hADCD9Rl69Bn3x6IYgsi0DCAueimq66EhRqQAhL8DJCCvA35M0S9JPUVgAhD8KOCCA70+++UIGEAghRC9XAwugkvrKxOICnRQQddDeBAxZlezCMOEIigA0geZ+tvQQoznCS+NIBU2D8Wu8yjqikEkZgDHUTLUM4jD8QrCCOotCG0LQs9oD4xiCACBjjgoAID2P7wAJepiuxVag7Q0PXXWv+Aw6BWRxibDHTLAEWpR7xFIqR7iv+8k8p1112rDCl4GHeEhI0QAgSMQ1DAET4UMEKUkxU6AgVGEzRrBhBw7rirnqp1uIT+lI6SPxgcHAPcIP6joUn84HC5DSl1tmefKPGDgdYYHDq6gHrSfLDS/6SOgkggqWDACCOAoGAExb6+qdZ3/Y64nvCBQLlADFCwoOtegkDBPxSUX34HA+mphAjaW399bv780LvmAczsOlUMGBAADfwHEMAP6YOfDnQAIfcZ8IBruhlEFIjABjrwgRCMoAQnSMGiMLCCGDvJFLx2v50Uikz/gBsGe6QbBGTAMhRoAaxch7oQUEwEKAiA70ZoosyIAC0UkAEKVsgrB/gAhgU4gAn/okbDdX0NAwNwyw4PpBgfDOEuPRERpopYIp30R4eU6c8BTuW0/JyAilLiFRbX4g8b+OAEUxAMBmw1BTDySIxLJAk/ImACFNwvY7banhvlFoAxrmRYTvqB1KryFPnskY9+HAkOThC5GGFgSU/5wQUPiR04tkx3BSif1kSwGKhMkpInsqRBIoUACKCgBQzQwcq0BUr6OEyHDdEHBqAAggK2kjxSEeMXC9IyqrSAAkj45C2pZBJ/PCA/KDDdYPa0o9KpIAiyId4ws7MnG6DgBCI4gglM2YK77CkCvgpBBkTgAxBEZZq41M0JDiCQA7jzACBYzbBQUJ0UNApk6EynCpTw/48eDKAHAB2A7WL3zwGoQIT5TKhC07cQYS70oRCNqEQnSlFqLktPDV1mbu7owRVWdCPBM8kD/qkEJQzgbZVDUQxKWtIYYEAoB5IYQP2Zxo9qJKT6UAIEWOUDHxygADRoIy+lsp68mY8CBwgBTFGEgA4coKcpCAEGyGZTiYR0nlIYQRCQgIIDuIiqgvkMFFCAAAMg4KxTJWoLbBUCG4ygAz5wAACrehGcouSPNICCA6oXK8sdADF3zE0P0HJO3UDgVXS9qUcHooJVBjCsIzgADTKkUTyKgEttcQ1VE2vVZdXOHzqgAAhAFVjL+aAF/vuBCkYTLrT0IHcqCMEBEMDZ7f8caAo/QEAEQlCdCEgPslAQUvkc0IJ+FQoJJhBBCxBgTeKCsLbnCanrepCBVg1RqBvtCwOWS4MAtMCHXxRMVSiAFRP4oAPnhO55poABBvwglTBloQ7O2ikUSBKjUimd0xggAszlMgaVGoEBIpABEIQAV+ptKAaC6E4QGAChXbqWzXC6QrGikWYikqTrVJCB0NgywRRRQQSGwDwbxKBqnmmBCSIgNQr/MQAUKAC6aITF/W5RjyCO5WI9EwQntpjCXUKADzi2pxG4ZicYWJkhc5zR3Pw4UigwAW1z6eTcqICRDzJJBGRQgEPtScjtY7JCgqc7fanAazEgFcIIQ4O0UiX/ADE4swrSHFdMgUQHWQnCSb/WAQoM4cNi7qhgpgAdpBXAMuXsmWdsoCCYYsCnvkK0CAzgJ1kh4alIA0FWUIDjQA/VyRgYAgrMBQIU6Jk/NOhACx6AohGgoAPsk1xX4MeAEMC61Ajgq6dTit+N7gRuDQOXi1G862IjztjITrayl83sZjv72dCOtrSnTe1qW/va2M62trfN7W57+9vgDre4x03ucpv73OhOt7rXze52u/vd8I63vOdN73rb+974zre+983vfvv73wAPuMAHTvCCG/zgCE+4whfO8IY7/OEQj7jEJ07xilv84hjPuMY3zvGOe/zjIA+5yEdO8pKb/OQo/0+5ylfO8pa7/OUwj7nMZ07zmtv85jjPuc53zvOe+/znQA+60IdO9KIb/ehIT7rSl870pjv96VCPutSnTvWqW/3qWM+61rfO9a57/etgD7vYx072spv97GhPu9rXzva2H9IkU4i71OKtjya8wINTUIAWJpAAItQ03knYAAeIsJJ+KGACJKgBCXiwg81u2wUaGMgSBsICF7hgMAIJPAwAwI9+fKEBiPfABUhAAg584NuRN0IR/gGAL2hOCDDIJT8qf4EiLKEICUA8CS7AexJIoAne5kAJiND6oX6hAi8AAAvCsIMNlOACV1gAB17gAiqMnvejnwAArr2BgbygAkRgAv+XQFIGfiSBBQDYAQckkAAYaGACGuDABXiwhBwwYQofKMLoSQ+EErg+2izQAP9ABRrAAwmQBQ3wAWVwPymBfzkgAEtQAS7wAjzgATUwASUwA0TQBDkgfEmQPgDgAkDge0IAAA5VWwpAEAJQAQmgAUbgAjOQA2kEO/jXAETgfDDAARoQfy5QATNQBAswA1zCAt9HNvrwATswAzvwASfIWSzwDx/QABWgBfEnBDsAAElQfg5IBFngAhzAAxMwARzQgwpQBR+APDnAA0QAghrQfc4mgP+gAUIgACwQgBXAAfDnAkvQelNgBQAgABsAeRIgAfAHAxVwhUJlEACgATMwbUj/mAD/IHwCAABEUAJ4qAEJMAMNAAA5sAMsqAESsACYKASaeIYUkW0AIHhiWAI7sAQuMAEXIAEvUAIbEIgvMAELUIiH2HpNqGxDQhA+8A838A+wOAEw4IULAAQe4Hs8AIbxBwOkmIBTUAaYt20Ss11OFYwrQACiRwKiJwFkwHtA4AU8MIYbsAM5wAJJAGEMtW0m8QVg4AQXIHpP8ATeOI+xqAUbkANf0IvV1hdJ0A8soAAwwANA8AQXAASD2AUEQAArwAVc4ANmQFasBG590XnoV4kJIH+Kl5AvsARN8IdbEEQFQQFiIAX/AFjYdpFJgH9V4IkukAA8IAFXYAFXcAEJ/4COyreAArEPKoABtJUQryFtFymQTQCBEkiBEpB4FqB4QOACO5CA/vhssleDzVcCG2kEY6CDS0l6u5cACtAATxhusvcAAGCDG1AEObiDCeACJSAELpCDE+ABHsABPrh93rYrAsmJN+iFPJCL1LcBRNAAhEkEFWAEpMd+WdAEU2lsUlF+X4B+DaAALGgEYWgEMFACSyCN/4B+CuACC5B4F6ABRbADYzltLMCTd2R+D/ABABAFaXmLucgBCVABCpCOFVEGAtkAG5AAS3kFu/cPJfAPH0htvPcCDZASAvkBnVgB7heGXjCG4AcAfwd3HxAFJfACXjCIFyAQkKht3tiGM/8gBHg4iGJYBJrIAo73D0zwmWHIA+O2fySQi4ZIfNX5J02QBRzwD9JXAeXWexpAnZRlEJ0BhwOhiei2e0WgQEi4eu5GenVpoPmWACTwDwkgofhWnAVxmvc2AzDAofrGAkXAA7X4nfnWBBsQmhPwAkLAbxdQAxoQfutpb5MHcBj6bwDQiG63owcxnP42hdDnoPvGAWo5AVjQb/yoABxwo/k2AxoAfP/GASDKo1RapVa6EHf4b1PgAgF3jP/Hb6f3b/qQA5H3b0QwAVeapgLBpQBnovymDyxgl0had/THb2jKD6AXBXYaQof3b4FnBACHlWo6qIRKpVQQl2HKby9ApFN8mm8TUAGJyG8SEEKFWqmWeqmY2nUKMKP0NgUwcAFMem9fIH+ham9PCKqZmqruVgXd6W/8sAMe8G/9UAG8p6P8JgSD6Ib8BgP86W8fQJsa4J+qOqzoJqz9Bqcx+W8skJk42ob/YKz7NgE7EHhlqm+gJ4BOipf5pgAB6m8BAQAh+QQFBAD/ACzIBFkAZwFhAYe0tLRwcHHOzsx2dnRqamxiUhR8fHzpxD6Pj48pIAbn5+fw0FBYShTNqzaJcSAzMzTu7uzCwsTTszlEOAw6Ojx5ZRyAbByKiozS0tSMdyIzKgxvXhiurqxSRA/IyMguJglKSkywlCyUfCQgICCjo6Tg4OGenpzy4qypkiwbFQT6+vk8MAxYWFiampzjvTxKPgw+PjzBojFjY2TyykSGhoTW1tSkiiQSEhSbgSQmJiSnjSwaGhyujixOTkwhGwReXlwWFhTy8vSqqqxGRkS6nCza2tyWlpRCQkSehyZSUlQWEgSCgoTevTy1myy6urwqKiwODgsuLiy+vrwTDgTixlRqVhTCvqRSRhwKBgSigiQOCgSukiQGAgSslqCapITa0MBGNGyGYCTEeCxiYBCsvOxuYHCsvMyUZoysbCys1szsnjCyoFCWlIAeLigkCCACBggkGjTYuLh6eqA2WlR+ZNjooLDC8rBEehhCPCxyckgSIBR4ShzEoKy4mAy8tLxQYBhGEEAMCDDa3MQKDggqNChUOAyytqCmorBOUjzOoBB+HoASDBiUgHSyppSOeph8cgiWYBwGDBhkepyuvjDu8jB2quTK0uCabhyshAx0bmBogBzQxsTkskDMxtg4WBi8uNCCbnTQMICsoMR0boiUclB8hByAgGS+2tyIuoyYgAyagJimvKxUNER6nKDS5jAEDAheqowuatR+YITy8sSywqy8vLS4eHToeEiQShy4oHSSnLjywrDKvhDGuMRkcIDU2PRkgnA+KDx+SnQkQDT6+OSAYAiKgAxCJhAYEmAmKjDCynwGGhQYNmjUxvB2xijcxsxEWpB8eCSmppCshEhUKhg0QjDK3tAsJjBoYJw0PEwWalROZFQSBBDQeMh25Ki+yPAMGjTy3NgmxoBGQlDOuOxGUmxmQhwMNiiMpJC4oOhoYCy05jBmUCwkLkSIlJDsvBAuJMAGBBgkPgyInGRUUmxobhzO0qhiYHxEfGwKCgwGBgQKCgQCAgQCAgwGBgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgL6rsxIgqFKPn4HeQHxOODHfoyqlzJsqXLlzBjypxpUB8MGiQARCDxgJ9Igvoo0AAgxYmRHlBoKl3KtKnTp1AnQhmAoSoEAUN8/vyn7wgHDE5yYvDwI1/Us2jTql3LliA/mz9AyPDg4YjWgSNaFEFAYcSTAUWkUGhLuLDhw4gZavWZg4MAu3f/9ajhpKfAESQUBEiZuLPnz6BpLub3BMDjxf/4BYCAIKnAfAFKmNgRurbt27gVLy59+m6+C0EMcE7dQ0Dl3MiTK+88mjdkn/+AtFBAYDg/ClI8wFjOvbv3p81NP/8XCcSEAhnD/z2Q8vi7+/fwV+4Wj7o8deswImiPz7+/f92k0ecbAsFZB4IHx/2n4IL++eQcavwMEERrA+lDQAkkjMDghhy615h2qP2TRBEARPHTDtMNkF6HLLboGT/5AAHEEVJgwMIOQIR0mQlF0BAFECMQMBYMW7lo5JFt8QMDAi1woAAETphgRBJmcQUCADVwgAAJAnhAgGtIhilmVPwkEUGXAqQpQAQBgJnPEAg4EYEUJLBwQ5Fj5qlnTCQdMcQQR/gJ6Ajp6bPDA0f0teKejDbq6KOQRirppJRWaumlmGaq6aacdurpp6CGKuqoLPFzwxMU9MDCA/rgaZBP+Tz/MUQSIDwQ0lZaxTprD0cQSuqvn/GzQwA61VDCBUBENtIOBHDwD10ABDACrvyMEIAQdHkgxQVPuArst2uRhoAU2KpAwg7KGnSDATUI0IIBFzghgAFADMRPDgh4AMAFBhiAgAHdgiswYVA88IC1sqELHUJDCFADATvkcwMITmDQw09QGCAAAg/ckA8UMi468MhQ3cVCwum6RQAEGeZqgAJLuHaEvj3p0yrJOLPl08mzpWyvASq0tpgMJXCQA1exIfDEAyz8MISvOUcN3s4oL/xqAEG0kKxP+gSgwH5QIFCCEAhw4EGXJoBQpdRsi0Z1z1YbNKIHVL71BAkq1ADCPzuQ/xBECU5cIEMAJJQghV1tJ87n2wp7e5kRgA/AggwICJD33pipgIEMd+oTBQkQ0ACm4qS3xHPjjuvzQOUlFOEBCS1g0F7fKggxrVY/lADAE6X3ztLpIS4LAgEB/MA0gg/8E7YKWi82cwSD+S79RcD7rNCFCNyANMu3+wSCcclPL75DkZ3cvPWpWc1PFBzUwMJPIGBwuMsQzDb+/Qv5VNITLwMAQhQ50NFB9JGDIzzgaEkwAQYugC6BAOECRSBBD6LwAAJ4oATowZ8GRwIbAPwDAypQgBQAYIQjiOwf+ZCBE6TAAQDQhVu4eoARxgIAJ9TAAwZo4AZ3KBAYEQAAzhKCEP9aiAAYnDAoOPkHCRBwoyK9JQcEaIEQlsiCrfHwiiR5wtFGMIIcPGEEAnzVRjQ0AiDczC2w2gEXzYi+K7rxjXCMoxznSMc62vGOeMyjHuvYRoXELSF93CPOCDiEHwSgeLZyXA91xYJDmkiRUBiC5JIlyMTpowfYSlMNAhMAHQ4QBFwqgQpUEADr8aMHHlCBFIhUybbpQwZaIkDTLmCsNiUkHz0wghFokMpSpuxehYNABFjZSqlVq2OcucEAABe+V/EDCiPwGA1IGTyBrMsJLdAWMYsZtT+uBwNDQMho+AE0X8ZNHyxwggGuNExFclNgo8FOXcQ5mnKOpocPIAEJolD/mna+k22LyRiGNDSSelIzRECggRRY4DkXbvOfONMKOj1goxOO056o0ccPpECDekVBJw+F6Mi4NgQsGeBOgFwMRheTT56IpDSCcadIf6WPIXwlh+gzqDl9coMlCCAAOYLCEeSUBCiccaYCqykAMIDTPmplpYzhAAQAgICqCqEECgAADUyI1KSWVAADQJ1MUwPVf4zgAh6IgFrZA4EgFIEDLFhbV0f1liv9tF73pCdZDwodAsKAAoB9wFx+2rGxzjVT/BhqVpNAQYM9AAqOg9ED/oS3FvwpCif86CoNe9hM5WOaeXNCDVfIAcSN5AktSJMCQpgmBBDUIFFwggey0lma/4LgAlXNbVVpYJllEUC3uZVBvQ4ChMFNq7Y0hYJylwuFGxh1IR9jrnLDWBAYURe52M2udrfL3e5697vgDa94SfeWfJj3vPk4ob3Qi0KjOvEt+jBvqzg73j0xC7dVxe0FjJiQriwht/+oqnDtRUAZ0OAfNJABoehbXzFdJwIQ0BcAasiBHqgXCkuAAIkmXEMDvLYrJKjBDQWAARPwt8GU4sd6PMCCJ0SBglGA7C1fxrEXUxBqwMQQC2CQyyK04LgontR6hHC09LUxHwbAYF4FolHKUEAkwCyCDOQa5EetBwBH2EHEqlkQJJcgh2U8qvIuoIDsLWYAEDDCcKsMKQpEoP9o+hTcE8Tc5Ze9DnYGGIKM/3EDBEAgZotZGZHZHKkHmGCEQsBSEUxAAfXmgwBOcMIQL/hTlOZjmS3zCRQuoIISERpSM4LBF6PwAwCU4Mf9ldXBCkiDIlQ0NUd4M8B2kIO5qCBBn27UWxajDxC8+X3itNliHgiBFtBGeQHAAIlMQAKzBQHXuY6UqVqgAhUBiB8n210PgfADE0SaBARIsrajPal8IEAFwrn2ZJwQBaCMwGBPgELuSEBuSvUtCJu5tgwUYLSEhK0EA6ByvcV0g/nqAwgEaBdtxejc+eYjnwqggfa4ZhaR3EAGGABAbwcupnyw4F8/mBwCMPBllL6Kfxf/kOUPDGBqAPAXRj0wgAxmSReGMpjj/nk0RQUiYidIK7JRiF1VlO2BEuromQSIgEC6JMHr4txIwjrC5P4hgyQkUiH5iEISfiADGfwgy/NN3whAwHUZgGDBT//UH/+Y9ra7/e1wj7vc5073utv97njPu973zve++/3vgA+84AdP+MIb/vCIT7ziF8/4xjv+8ZCPvOQnT/nKW/7ymM+85jfP+c57/vOgD73oR0/60pv+9KhPvepXz/rWu/71sI+97GdP+9rb/va4z73ud8/73vv+98APvvCHT/ziG//4yE++8pfP/OY7//nQj770p0/96lv/+tjPvva3z/3ue//74A+///jHT/7ym//86E+/+tfP/va7//3wj7/850//+tv//vjPv/73z//++///ABiAAjiABFiAh8EPWKAEKTAFXDB//JACBSACSGABK6Be6acEG9AAB7AALoAEFQh//DABMXAAJHgATOAAKQCCBeACB+ACLjgDIZAAKsgELciCM6ADMvh+/LACKMCCJNgAFTAF8bcPHRACP2gBCXBz4YcFDBADRFAAKdCA85cAIoCC9jcFFiACPmB/+rABNqABSjh+/MAAPNABFsh+ExACBYAF9rcCOlABoyN/CWADVlh/KSACOLCF9acFFvCFYSh+XFAANjABf/h9UFaEa2h/aQiH9qcBdP+oBPbnAyKghfYHBX34AVxYADpAiPXHDx3AAwxwhus3ATqwAVrQiI9of1QoAjlIf0rgAF9of/uwASGwAunjgAwQAh0QSOmXhhvAhvWnASjgAEJYfwmAA5RohxkQi3tYAbWYibpYiOAXgjxQAKKofo5oAXEYfz6AjCmojDbQivMHBRWgA7ZYf/pQACEwAfbHDy+gAwwgjeDniBuwD5GIAw4AifWnBBmAA+Iof+TIjPSXjuvIhUUYj53ohhtwjemXABNYjPR3h3lYiX2oAfaHBbTIjujIAE3QAe2YhgUgj97HDx9gAxVgj3bojfanBQ6AgxdJi+c4kAXQkR/ZBCHZiR//EAIVwJDolwI2kAEQOX9aIAI2oIcD6YwrIJLdxw8FQAS72Ikd8IRKyX072AQ7GYk/uY3wN5RIYJTzxwXOCIb2p46cWH9F2AFSSH9pWAU8eX4lWQGnuI8ZkAHfSH9YUAHh2I4FsAVJaX9nOZXcB5JtaX70CIz0143EWIkOkIwDuQHmCJjax5S6mJbz9wJqOJjlpwE6iZL0lwA6AJRXiId1OX/6YAE6IJb0xw8b0AQvgJnkx5GJWH+LGJf09wE6kI/2d4dayHbud4cx4AAd8I/upw9RSYISkAHzV5I+eAASYAHy1wEa6IIsGAL15wL/wII9RH8+eJz2xwQhIAI6IBAW/zl/E+ADK2ABKIAEBZCbHSAC/yACHTCa8SeDOhACFKgFkHl9WnCeIVCK99ee1Bmf9ecDBYAEIeAAEzAF+Xl9GuCYbzie9MefOlAFPrCg19cBOEAEOJB/9UmB91cB/xCeH+Ca4TcFL+AAIZAFUDiWOBACGfAC+picXogCFRCT7rcwSWEB1MlkEemR7+mj9xeeE4Cf87cPK1ABPPAPG4CJ9GeiGRACOLCe94cETZABGlmbXmgD94cFV/APVFCJMmAFJ/APCyAQ8rl+QBAAHkAQNmCfQzp/MvAPH0CLPLCkDsgPWjABGdAEOMAAXgl/+rCFIvCdnfgPCeCYAmGj8nel6yBJouMXoClgoda3nva5ArQpfxqgozawAcL5fk66o6YXEAAh+QQFAwD/ACz4AmMA/wAVAYc+Pjz6+vf778BRUVGcnJzFpzjnwj721VO3mjF2YhqukiyWlpQmJiTMvIDbwFu2trREOQ5SQwyQkI9qamlgTxR9fXzVtTzFxcSagiJIQhTe3twuLiyCgoQ0KwseHhrKysza2twiIiSwsK+GhoTW1tVeXluNdiFjY2RrWxXo6Ofw8O8SEhSojyrhuzsqKizc1LDLslKioqTGvoTi4uS+vrympqTm0Xm6urzq7uz24YYyMjR+bjSSjmRzc3RKSkuKioz345TKtmzOtlyyn1E6Ojv69t5aWlwaGhzuzkymllRmVhikiSR+axxWVlSGbhyqqqzl4swWFhTq0nhGRkQ2NjRubmyCfGTmxlhuakTOzszS0tQnIwnguizyxjxCQkRqbmzCupyqppQ6NiQeFgQUEAQZFgfS1thGQjTCniw+QkQ+NiwKBgQOCgQGAgQSEGB6YAhWOBi6fqwEBBjoxBCqsqz21tRwWtQoOjwCBggSMGg4cDhoQBgmYND4uES+yuzsqLSInIgMDBg0OEhgZnikqHhWdljk5vhQWliaoLCqnpCk6LREJBDOwrTKMoBwVGgSMEQICDAKMBx+eBhyfGxueBi8sMxCWEC+1LhiVnTywtRuoiS8qJxCUGS4xsSgggzg0vQSYDhuouAwIkTk9IhwQGxu4oCCQBgcOBy8oMDmsjhaXDyOdHTO0OA4DkDGzjSovjTK0NAaFjQuUExwGoCufuQ2Iii+7syo5jTkfsiyfnSepETe7jQIGCASGhwEDBCktOjCxrgmYIgaBiA4LmgwOgyasKBiUlgcKAzm0thsdkSkqMSAboDWwvCGdJwmIMDEfizoojTO1PREJjimjJxSZmSMiniopBDcwsRiZpT2+rxUVnRgZlDGqOyGWoAkwIB0kpjofkimbiygjLjGrBSIkrDY5uxYchjOvtBGRFBycmBcZmTO5OTOqLTuwoiAtIT25uhaooSunqigcqwKDggYKCiAipikxJwCAgQCAgwODgwODgQGBgQGBgwKCgQKCgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpyo8N6RAT0k/OPg4t49g/kGSIix4ISHjxRTqlzJsqXLlzAd5quiJYWKAB+meET5756HEVpAZCEBIgYRnjGTKl3KtKlTgff8DfjRg4OWD152DszXQ8ONEkQGENAQg8HTs2jTqk3rMV8UfxtEYN2JksqFLE323dtH5AGIEkjXCh5MuHBCuvcYyM2q9V4JFU+ObK2QQkI+w5gza0aLWHFOuv/8VQjwY9/AfU1IPHCxubXr1yo7y9WpdYWEFD1MQwVw4QIA2MCDCzco+7PWKAtmVNHdkwqNLD6GS5/umq5n2h7/HSGgnPk951mmUP8fT36w9dmgbaeo4N1L79/l48tvuvM6aNEBJPg7bQTE6vkABhiTZ4xl5xhkkgmUDweVXSbggxBClB0DT8yVnUA60ECCEabdQ8QNIBgRWIQkltiTRUaUUIEWM0hQghE66ObPBCRcUIERVYgAggQemOijj/cMQMI/MwQQQAoaaMFBFFBFUQUNGoAg1A8bjPjjlfPdE8IJJ/zD5ZdUeJcPFUacUIIXUViJ5Zrl7aXbPnDqRVycF7Jp55145qnnnnz26eefgAYq6KCEFmrooU4hNtBe/sh5WGNwNorYTpE26iiimE7q0T4b9FCDCDX0sAFzBCHmjw8/iCACASUcQZc/XnD/EIOqNfwwwAqYIqrpXgA8AcIFIlwAwhNekHriTvmccAEJNzyghRY/hLDTERX0puoNz47QY66F7nqEBBosoEMIVEgwAwHbLkqXe1mcwEAIAzygwXKbMkCECyF4wEATIJ6gJrd8KrrXAFnccFRPOjjLobo7rTDCDD/g+s8+JZAgwgbH1ilaCiPsB3CgiirIYMdbcaCCBBJPusENWQyAUmIiaFBCxjzlU4EKHBj7ccCgCYTcDBPwdKAIPWo6hVcYDxSFBAFUgFg+LlDhRQ8faOHyzoCG/I8HYwFG0AApPGCWpgOoQDRB+YxA2qtNyJWFBhec4DHWfu4ThQd4H2GaBzGE/4iUDync0JGmTQQQGdoc5EfXPgBwQMATFzxQwtx073mPDhKI8MADBAywD99+fx344I3dU7gICYqs+OIrRAHvEzd4XnnAOixwww00xDCAP9vN4O+iTZjNwK6Ai9DTVj8EMMKkUPlTQhYxhDA7n58zYL0H+dyjXm6nVaFCDK5qSkQWvvG0XQpVQKUVVDrccAER01vec2gTzFBDulzP0IM/u/I9r2734E1OaLaoKXyABlSIn/wmpQMdVSEf+0gWCBC4E3/kSy97KQEIREAEOIXAXChriwuiECe4LCAFC2CSAvO0K3+c4FkSyEgWtFAFj+1jCk+QwPA8EoIFgOAGHKhADf/8UyAi1KAGI+hBD35wgxnQwAf/WuGPdrUCIzxgBkiSXPYE4rwJUoFSLqjAB7AIAgIUaycuWEAWsJiCGVxgBESgnBTvpKnQhIAIXiBCCPj3shUQQQd8RNYGAOAFKuhtcUcYpBe8AAAXrCCKc/RTnUq1qEha8pKYzKQmN8nJTnryk6AMpSiZso8QAIAIqEQlFajgAv6NsnJR4EAWPkBLWl7gA0aB5CsLlQ8j/GAEwOTADy4QgBtUaZdYi8oK8sHMfGwgBv8YgYOQGT8i0FIn1IzfPrxXgzRlM5n1eYIGJnCpb3KLUkYI2w7N+bGGSQBnGGRnOwN4F+zI82P+6MH3Hrn/vntiKgQ1SEHQ5udPQwUJaVorqKCQNQIVdCyhCs3aXtxjNYhG9E8ecd4FeETQi0pUexsI3yQ9StKSmvSkKE2pSlfK0pa69KXEYR6nmlCCAWxgbruq0z5cYAQjnIRSHgBAE7g0AEDClDBUPMEDSPCsB5wgZTn9SGJ8+AER7SQEw5yhFgpWgY4cVS0tfKEWYiiBqtWQZppaQQ+yEADf0SUEFSAAB3pQhRHQAAQLkN5XOaOpBoKgCivwxwpKoIUH6CCqmxqACGJwAbdSKgpRyIc/BDuFpTZhr2dhHv1mEAP8cWd/CNmJDmrQOXN1SX39XFoKTotZ+sxvezzZhz7BxzC6/0TBrk04wg8cuyu+iMBqrU3U/Hr3O6gETwR61dQ+jECDEUTBNrytIABOkBENOTe4ru0n6EREEMAJDrV0aeATDru06HrkCECxCQq9il2laM8LA4jvFE6y3b+NDrweiQIT8/IP28hsUjv1QRM8JYIJSKy9MQmgCEjAYBrIzX9eGwjY/gPe5V6AAxKzjd966w8j4kWXCI7IPaLgAyM0wQgDGB5yViu0xyD3IEtTAfQIQIAhBuACCzBCIDVWBQ1YJsQJjhOcPrKgFHBgbgs62YEJEoUKEEVKDL6JjCuwn/lRTAMEUCGQXxKyfRDsBgnsyQaclRcDoWQfDPDBAHzggykY4f8GKJzCDl87ghT8QI5bXkn/ThgDL7jAC3wumpZO0ITsbSpO2lmABk6AwX3ooAQ+0AEDNjCFEYCgonnmsnKNKBQafIAENShWT5yXBREcVmDEpVQT7nIB92VhggbONEx626kaE0BUcuKVBCrw0/XdI1kEAMBbqzCSJ5C2B3EEsawfUseo3O0tF9rUczE4yV8f8lUrOALeIqvsZXv72+AOt7jHTe5ym/vc6E63utfN7na7+93wjre8503vetv73vjOt773ze9++/vfAA+4wAdO8IIb/OAIT7jCF87whjv84RCPuMQnTvGKW/ziGM+4xjfO8Y57/OMgD7nIR07ykpv85Cj/T7nKV87ylrv85TCPucxnTvOa2/zmOM+5znfO8577/OdAD7rQh070ohv96EhPutKXzvSmO/3pUI+61KdO9apb/epYz7rWt871rnv962APu9jHTvaym/3saE+72tfO9raHsgUQ3wcb/rGFDkDgHxR4SAtaYAADJMALIDA4G8pQkAT8AwMsYIECIGKAf/S9ABH4R+AHToEEmAADBWHB4U1geIhY4ABdsAATyFDwxS8e8wYZw0QSsIQCIODuU5jByNngARQo4O4mvwcEFICCe8Se5D7BgAnIcLSS9yMBCugA4Et+jwwggAJpGFLJt4ABJvjADMD/BxsSgAEsTJ7kbYjA/xKSkIKTdwADQiiCQIZAcjIkwAECCIAAhAD8CFgg/gJwQMk7gIAcFEEABYB7IkcGTmADApADmkdy+xABDpADV2ACI5cdEFAAB2ABStAPJQcBQnAAAWhyatAAHNgB3WZxRACCBSCCJUcEMhCCI1hxYhAEHJgBLUhxHQADFYgCGEhyEGCDD6h6OugAQJADCNABJMcGKAB/AgADEaAzHocBNvB/DpAApEdyNhAAUAADGECEJWeFQ7AEETCDEvcC//ABVrAECTB3JHdjE3B5WjhyN1YCKMACX1hyHzAAEIABCbAGJQcdZcAEGJAuIgcd/YACBVB9EYCGINcyHmACfdcCz3GnDyH3ASWgBGjQd333emDYcDMQBgjABQawd5+YAG0AclboAFywd3zXAgmQiQ0XAEDwiahoATuwRR4nAjTwAlKABJYoBWBQAQPwcSGgAyfAA0NQAEBYBEcifR4XFWWwBWdgBSJgBjhgcvywAqb0BfgWEAAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUEAP8ALPwAZADEAIkBh8aoNOC8PLy8vGhoaGFTFGJiZNSzOUVFRq6urrOXLK6SLEc7DH9qHFhKE9LS0+zGP56enJqanBoVBJqGJKSkpPLy9N7e3OLi5C0kCCQcBO/XcDs7OolzH25ubI+Pj2xcFk5CDiwsLObCPvrqpJh/JDovDM+tNHRiGrqaLObm5MTExFRUVKiOLBISE8zMzLygL4aGhNbW1JB5Iv39/NbGhHJydKOKJDIyNJaWlBYWFHp6fBYOBFxcXH9/fvbORPjmnN7OjFNCDIqKjE5OTAoGBHJmNNra3Pb29BoaHHZ2dYJyHMOhMWhWFKKJLOrq7B4eHGJOFMCoVLa2su7u7CIiJCYiBNqyOPLQQv733CYmJPbSVDIqDA4OC0IyDHJaHObGTF5aRD42DJ6CJLCWJI5yIHpiHNK+VBIOBA4KBD46LAYCBCImLAIGBOzq+Mri1AowKHhqhPSiMB4YNB4wRERKZHJaQOjm1Dx0OJBKSDxiiM4ygNzKEOqmsHhEKJB4DDBSTOLguOCeQAoIMPjg1NrW9KyslAQEGH6SJOC0VIigiAoYNHKk4OC6KDQiMPDE1JBkHHaWmNrm9KqsIKyawKKcLKpwLLyqNMimmPT4vKqITIS2hHpacNKiVHLipBgoKLKwxLZ8dPTCEEg2MPS2QNDE8FhmXHYcgOzAhCIsKMjI2OjW5BRiUFhegNjExFhYaMjEsFp2GCTCgERcTKqkUChi0K7K7Fh4YK7uyKqcDA4MGF6iJAQMEF5EQB44HKKyuDwOQIiwJBQQYKymdM58yM7qMDoiEO7EKNbKNKqIDCw6LLDAuCggwN741DJSFI6WsISWXJBwdGZ8dEhKMAYYFDxKQOh8SJBwSHLgJMr01Lim6KTKtGpqGGpEcMrc9MC4QHZeyHpmCGpaKK7ceMR8LAoOCEZaFEgkIMiqELKkuNKcNHh6ZMTKMLbA0B4kIJKOeLbStKrEMMi8zFhKKLbAnBQwaOCqEMrctFqkhDwuYAICDAYGBAYGDAoKDAoKBAICBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECMO9Kev4j99/vxJ3Mixo8ePIEM+9EflwIAe/zpwySiypcuXMGM69JfDw4UKFWYYCZFRo8yfQIMKbegPCQwEHjzMiPGk59CnUKPCzMgP45MjTHv6lMq1q9eFWkkuber0q9mzXrX+ozJWLdq3cIOqZZu1bNy7eEGGXdvWbt6/gGfO7csysOHDCAfXLYy4MeKwdMkydkz5L+QZDpC4rcz5bs+KWbA2xdi5tGcqK4Z0mJGiw4EVT0zLRsuvQwonKXBOcWKBx+zfXvVtSCqkuBAPQjYAX868ufPn0KNLn069uvXr2LNr387dIEV+/Ljw//N7MKw+8frK6wv/r+rW7oH13egghIKAfzkme9fKj4cUCkMYpE8WA0SAAAIQdIAEfIb580QEU9ymE0/v7ZeRPge4MMMUOhSEIQIXGKGCCv8IcECFDMblTws86LBCAU7EQIV+BWn1BA4uqOBEDQT5s4EAMeiwwRNPbHBADikGRlFGSBghI409ZsSFDiroAMOOBHGBQwoDhIVikp4xecGTX3q4ggo4ZPEPlgL5c4MLAlChTw5PtEAamID1hMSYM5Y5URYICHADPzqwedEAFkBwQA0HRjBAFn7i+ZVWe5Kp0IpCuMCDP/z0YCg/MFTggApwCmDEFAiEIGmYevIJ5UD68P/gggctcOopjwJxodQRCGxA0QFSVBBBequ+pValfSbkTwgCIAApp4V2MJCuM1hwwD89HTBWsWh5iexmBHWaQg9I5JBDCB5MQe5KnR6hAoUZUeGABb5ya5a3Y0r2JRcQHOHvvzMEPAUFdvKQggrPZvSEC9ZGaq9cYSFhwWJfxhrBxRHgQIEDM7iAQwEYhcDwiT1tkEIMkD486YUDOhlCRYUNiC2254FXFRJCOKFDVdjq6oQQOVRkFK/8qGyWPiH0kIQQFTjhQQ1J3ODTBi4g0EKbYXW6Y2E+qpACAjrogEAFLlxr9Ff8DODE2ritbUQNxB5wcGzYhsVFD28zJhwERqT/kIIFFJx49qRU8MDDCoivYLiaua5wALFe+pMFbJPRtAHiG+Q3uJKbp+jwTJ2HLvropJdu+umNofg56kB91gISK4HF3xNB17heCzlcfSfrP2m1gY4UUHGpVjU5UXZBXKxAgQsOVN1BU7zLRCkFOb3rJ38DpHBECr5N28MUFwgAAQIWoEp39C/1xEUNLlCQQpzXX3iAACNesMJEVPfGxXrzc7k6+iORnwA8MIQYqCBZynoCBARQABzYjyArsIAUkCQQQk1BCP8DIFEUFqgbZEFECETIlFwwgCfY5H4CwZABX0aRmlygSxoUiZTwtqksWOCAr7rIEFQQAS5wIQIPbFMO/yKQAgo4bgWZioBmYhgSigzBAThYSQgugMMySS5QvvphEOsWAgpMYQoWSMEUBOCrDDIxMVd8F7ZCcMMQIg8GDihARloARBTWLQseaJbGBCCAAYzHjGc0yN0uIMeMhCAG8KvYE6M4RweugCVXjCNGMLSmDuQwkGDZQAx6swEj1eBkA9jA1QRJASfAoJMbKIAALjgkW6XAA3+kyApmoAJiYVIiKvyiLnMyA7LZkSAtQIAuv8jLI1wABuEppbQmcoOOFe2WEinKAJKQhLAlAQfb80ABhNcmWK0gbNbMVAUQ0IEsQuCCkNPHLGsJzY1E7pDwGwg/rrWVsAzRfizRh9qMsP8C8XBhA2PzACDbiTWtTBGHKRxCx2xpTwjgUyNFOecYDVQ+NRLUnQZVgbNgdYALSIGhlPKAC4bgFAd1wFQhUkEPVHVRjmhFH1SYEUHkI9OC9uRcf5zInLIQgiwgwZYtTV1QhWLFoRr1qEhNqlIDWc+ZMSRyUNrLUhnChSeE4AY9jd3w9NECnvY0aO+B6kAvKpwIuECMUzACDjYAVO/wYwgUKB8YETCEZ9ZNrFM9iK6o6IGlrTIGgkNIrMBHAR30QJgpkKNNI5dXAXXymSTxAK/aSpAcIKA1z7xbBSiAJC/dlTyNrdwGOsaFxFDBBUYo468ugIDYSPWzY71oWJrpgtL/lseyhKzKnD4phKKJFbSNhS0/ajDZhOhjCEZQawdqEAEHUIClsPVscGv0mScaIUDKSpsDIiRGB/AApHidbpQo8qML9GA8CRQCAmAwgAHkEQGB/e0lpyocKaQABrXyUwso8ELfTglVwvOsdMWbwh/dNz/A9RFmbIstKgiAe7CdCLimKxwBHBPBCdaWCxhqWS59drzzPSp5pWDeWCZ4tCiDqHDmdT8Bvza4Fd5sCMKDu1ol5AkqmMJac4CEDVCPAq6Vb2xvya8ZuAsCFDgQUrjpHXVu1wh8rKjZovvi4IIqBs1zQAy2HINUfaknN+iBAFxQNR0wTsKMJXCbgMoy2TUV/43AVbOcuTLkOdv5znjOs573zOc++/nPgA60oAdN6EIb+tCITrSiF83oRjv60ZCOtKQnTelKW/rSmM60pjfN6U57+tOgDrWoR03qUpv61KhOtapXzepWu/rVsI61rGdN61rb+ta4zrWud83rXvv618AOtrCHTexiG/vYyE62spfN7GY7+9nQjra0p03talv72tjOtra3ze1ue/vb4A63uMdN7nKb+9zoTre6183udrv73fCOt7znTe962/ve+M63vvfN7377+98AD7jAB07wghv84AhPuMIXzvCGO/zhEI+4xCcOFBIgxAaWRoMMMF4Qi/+DCf9gAQB88I8AOPoMDP9QAAYyEIYG/IMDAbiCCRKgABKQIAAPCIDJEZ0eCWCgAQkIAAtY8AIAvCDoHy9BBv4hARkEQAQieIChSwACJjCABAkAgAh8AAAOnKABJShBAjhABIKUwAQ5N4AM5vwCjyuEAzZAwRKOTgIGfIAEBvgAUBXAAIZmYAks+IDLCZyAfzQAA2UXSAZKcJAE1J0AC7gItkrwgsELBAMvqNEClgACO58BDT4HQRn+gQITGAAADqH8BwqSeYIQ4QNLwEBwX9CEE0Se8QwgugH+8QIFyGD1S2/JGUjAgsRPlQMfOIEMFACA5r/ABgywfEz8kYEXMKDOrIv6QIyeABb8fgEYQMP/zLB/qS4AQPot1bkVmu/4E/yjBBIgAvkjQoQTvCD4QQ3A7k8A//l7hPpN0ASU1U5YgAX/0GUDsAZn4Q8lAAAMMFUpMBAHsCBcoQ8NYACdx2lowAAAIAFn03NUZ3XQQX1DN4BJggYZ0AUNcAIc0AQJ0HrNsXr/oAYLYH2S0gKyt4IywAIowHs1xwFrFx36QAAAkIHwgQElsIJYJxAAUHNftwBQ2AAPGB0bmAAeyCAo4HwswAEglwFcAHolAAUkgHZXEB0kKAP+JxsEAH9sIBD94IEnwAIGIHUlJx00+AIyKCkSEAQcoADaMYQvEHlJwgVVQAATAAAmAHjYsXRcAHdX/8ggD2gCMzcQE4AdEuAPGMACSmCC2/EBgqgdJHAG+lCDBKBpZpAAaSABHzAGjJdpGqABDiAFUaAAjLc7lKYBPzADWKABQDBAPHBmlAYANHABP6AFI/APM0BFl2YDRcADUfAFQDAFR4BpH2AD4deHYJAEl8YBDsgCGZABNuB+7WFpJ2AANtAEe5gA6FdpAfACYiABJ5AArYhpOkcCGGBzZ6BpT0cCQZAAH5CGgBYAMsAAKPCJ9BgAZGAD1rhp9fgCHJCPmSYBBiB05weQfiaRD2ACAFACFtlnGCl0l6hpEoB2AcAA8ieSJuADJrAAHemRKWkCsrdpI8l1jxiRBnAFVv/IaRKAcyQgfi25Zzv5ACbpVJi2kyIwlD+ZZ5iIczIgfpo2hA/wAE0AkZl2Bk2QczmZadSHc0eHf5aWERggAgKZADGJaTQoAgYQhwZpaURQBg9wfiygaf4gASzwACxQAiwAcpiGiQYgAjKAAdDHiY+mD0GQcwxQk5e2gQ+Qdwx2aQCofy7HAVpJeVkYeVN4aRYIACSwkZvGBQTJALEnl+BoAx/wAohZaQyIAgxgf5tmgepIkHK5Ay2IAcsnlxjAdxJAAhyHaaN4fnSJhplGBF4AACunmkl5Z3NJAgpABFUAAHn4lWLHAZhoAus4aZnpcmFgAprWDwzwAhhggSYwj6j/KQFiYANEoA9daJZhoJoCIY68SQAo4HJOmWkbqHJMJ5cS0ARNeZ+ZRoMosHrUp5WvF4iXp2lcIAMJ4JV7mQFb2A+cNoovUIoCsZaU9noAQKGJuYMK6pgMygEO2pogoI6dxp3y2GlcMAETIAH6IAEYGmkZIQEJIAMLcAKlV5UnoHMv8AIyYISUtgMLwABzqAAfUAKNOWnVmIUPQIvHeWfP9wFAygSCCWkgoKJBwJmcxp2mqQ/z6Zh0WXl8iGlviAEfEHOJGKOXxgA2AABRqQBMsAVbOmkAgAIkwAEGkJVnugCXSJfSuZdrhgH+qAZyiQZAJwJjsABR6mhEAAVqipULRwCoX4kBS5BzT2eXG+qiCxCVOodzBiCek+YPXYCpOveWZUlpc8kCUYepZLADZomXOGcF9rikdqYGW3ACDMAEGQCrc4ar1xEQACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMA/wAsEANZAB8DNAGHzs7M+O2+YFAUr5UsXl5cysrMTk5Nfn58zrpkREREvJ4v0tLUvr68aVgUmH8j6cM9Ojo8mJiY1LE35OTkcF4WPj48goKERTkMKiIHWFhZeWQb7tR0amps29vcqKin+fn3ubm58vL0eHh49M1H082pJiYkY2Nk4bs6kJCPe2odbm5s9uakjHQgIiIkrq6sJhoE1tbUoKGhKiosWUkUrplEp4wpioqMLi4sSkIU7t6UUlJUHhkFoYglkXojhG8dOC0M7u7rsrK0MjI0cnJ0Hh4ccGpEro4qSkpMUEMO6ursEhIUNjY0Tj4OGhochoaEhn5UxqpEFBAE48dZ7spAFhYUnoIlx6g0wsLEPjIMpo48MioMCgYEtpYsxsbEDg4MMi4gkpacwq5UGhIE/vjccm5k2r5cqpIkqqaURkI0DgoETkYsFhocBgIEyuowgkAYHDgcwKQQqp50vLTMyLTsnLi0jGIYsnh01szgCAgwOC5oJiDApKwgAgYIqrjEup7oVHRY0NT01LS40MLwgJJcPEhIyMLYEjBEuJ6sSDhAMDhEYnhotMTI8MB4qqCobqIkdlQYdFhkWqKEWHIYiHScZFh0Cg4IBAwQzPDUorjoHCgMxMS8QlBkYmxgRDgoEhBgxtrQVEJsiHRkXkKQLlBMmKagrJoM2MLMbFg8bqLgVFw8bHYYBAQY7r4QnJogMDgM8KSwzMLEQlhAZkJUynjIuNbcwMYwjnSA7rBAop7EMCJE6sAojIp4otLMEmA4inAIJMCAiFyAGgYggkJsiJKwNCxE6HhIKDo86uj4pmwsxHgsUlgYJmDQyjCAgLSEVDxIdJKYYmh4DAwYwrSsOHA4trSgiFQYppagcmZYCjAcrKAsGhY00J6YGCgoqvCw1MpAcBqA3OjoNiIopMQwqr6obuKA4Pjo1tjAxL7ECBggZGiU+uboRCY4EjBo8Mzc1L4QcFrUoIQM5pwwmJiwuMTsRCQQaEAYVGJYJmCIVFx0CgoEAgIECgoMBgYEBgYMAgIMAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qcSLGixYsYM2rcyLGjx48gQ4ocSbKkyYj6+FFpIQRCCX76TsqcSbOmzZs4c+rcybOnz59AcXrRgcIDiCsoSuiLGbSp06dQo0qdSrWq1atTS6BYAGDBBwZLljLFSras2bNo06pdy/askiMZEqiAAUKI2LZ48+rdy7ev379QUy6t0KWu2LGAEytezLix48dYD+tLUMDwXciYM2vezLmz54KSKVte+rm06dOoU6vuGbqy3curY8ueTbu27dajEdvezbu3799tcb8mDVwzvxIZRETwIKIJ8eLQo0uXKvzwdMf6WqDoMCHEBxA3YF//H0++/Enc4cWb76uviQobJixMAK9+vf37+CUeJhwk/fP8evHjhRL8JLAAffUBqOCC+emzDxVKZAAAAwlQQQVMDOIlWQUH+qdbhiCGKB0/R6AQQRBATOBCBChA8KGIZG1Il4cw1mhjb/uYwAAABQDA44QGvHgjVTIi+N+QSCb5mT5EJODkk05SIaSSTxVJI5VYZqnllgpZaR2XYIYppo1eJjjmmWimKV2ZR6rp5ptwylakDF/GaeedeHK234F0mpnnn4AG2hY/MmSQgQ1JwHAAARm0IOijkEZqlhdDdMDdBx9M0EEBBEwp6adw6uNFCRDAVYEXkonnRQKMGupqBqem//oPPzfocOo/qbYJ6qw3mODrryYQQOeuxN5JhQkeXLFABx4MJxlBLUTAVQEFdNEFDBPE0Kd1EHjArIu56gpqSvzMOis/GBar7pstOAGCUUl0Aa6sAw0lwhAqqGDCAQV8EIFzkjWBQnddJBCuuOsmrLCb+9ywRAs6dHFFWLkSlNI+GKNbAQMdmADTYftwAIILCzBQwcELp6yym3ctwcAV8+LqJ64mTNDfWCklEAQKGYBgMr0rBy30mRBcAbPFMysRARAWoDqQPiVEAEICNwRxxckVD6311lQWffTTfk5mrcFjUXqFCvvIAEIXWGfN9dtwAyiZ1xAgjTA/IgARgZR3Gf8AQgQt6HPD2m27HffhiE83t9F1g41wCS5M4PFhMkTgQgIwDQ7zwZ4G7SDGoO+TrkBi8bOPzImn3lmqdNstpD4ZMLutqCJ0wcE+S8ngAghYO45w0PwY4EEQxBPvgghEPM0PBBZ48A9zQpSr+vSQsc646wh5gQIQIny8FOTZiiB+BDB0EIMKS0gv8+8r7zMEEDB4EEMM8qvQBOn7ZHDFpmvDEIQB6qOeAB/jtSVgzyBiA4DBoBWBDsDggcvC1AQYYAIvkK5Ob+OHCjpggRI04YNN8AKG9OEyDj5MCCJYgAvCM8AWNsZlYLEbuhCTkrztrSD7gEAGdMBDA6SwAygwABH/pPesuGlwASZgyrP4wQEgBEEpuCJCBCTXORdaES2ikoEMTOAjYd0AVUtpggl0AEaZtcADkvPeYdSoOcyhzIhzscANZNAC3N3FCxYIAQpOJxD3JQEFSriiIPPCjwy4IAhdCAEQuhAEF+jgYzpQoVJikpKa5SZc/xDC2shmuLe5DwgdMNrfMsC3f+BRj3Z00BBC4IISDPKVaymkB1zggVrO0gMAXAoEInAAgOGKHwRwgQlOxzmBEOEATvBQEY2YgSDEwEQg6MACLAAwJgLBBUQQCxVQ8JXGwfKbZRFVCUrQgnKWQAYlAGNKWgAwSupDCelUottU0gQ1zmxo7RECEZRA/wUZDAEAMJicPiDQhQnYoAIlqEAKP8A2cDr0oZ75kj6oYIEkxMA5pjRBQQvAABAcZQJXg6hIR4od2PBDBzCYGOlWZYNZooAAQyhAf0hK05qyR6IJAEBDSccPJRChBRDiQAciEEibGvWoaSniPlSgIkchpAkNTCJSp0pVq3hBBhdClxf8NoHurbSo/GhBCj0wrKqa9axAIWEtLTAE5QAgWywUlQki8I8hHMADdDnC6NDKV4+Qhn0HAez6rCiDGCxgAoid4BBeslIOvBWxXbBA9KrY18pCRCUQ0IEJRJABJfxuKV6AgAlUwAEhTglo1OMHEYRQASe5xGlPUwJr/1GBL/8K1rK4XUh7DrA/IHyFYl1qwhA4tgCuxABzgUVtbpfLXNC04AAxsEEMQBqz7IlgAgU4QAbm2gEX2AU0nWyueHObkg/uowKMMxMJQTABDqCKHwLrgAgs6DoMjve+liUOBF5WXYMU8kDOip0HnHrB8OL3wH2lm5n4YYLuTnIpBoABA1xUYAMj+MJVVbC4TgqAiR2GABMAwCMrrFwMm3iqGn7d4JJwACLswwtCiEAIYECAcj1rmSfO8VE1rBAvbBAGERDBXQEAyhrLCsdvS5UXbiAEvsmzqDdoAWx1TOWBptdTEzVBELpyhQjYwEeP5Jx9tSYZH3eBASr4GK72kQAU+Iz/AUE4aACpjGFdprchDTNABo7QggR0dF5ipqzC1miAK2DKBnb8xz4IUAD/mYhjQVggnets5xg+5MPmS16gb5swykkNBCGwQbqIEAP5ApUKQtheBOg76QMfBoYGZMhfl3IDvHpsfWJOnBdEAAIOHCDU6RpcATipDx0kwQNUaLWr2+MkETxwCE7C6EFotcPWIgsGNsgmQpAMt0KC4ABU4MAHRD0QyC0gA6XjwARQwGpli7eSVyhABzDVgQ5nYM4DyR8IFmCtBVzhAJPc9pi5ptYgRE/cKFAfpTrAgCHoIAMWYIAHwuLu++bMBihAAcYzftDXEYEAFrCBDUSQAM+Ki9tc/2uCDbrgMQ1+IOFPc89byzeBLty74vhNiRd2jrGdi3AhpuM5ynGduEVfoWkpUcG41Qc1d0WgrR7oAgpYiPOq03SgHohAnxjMYiKqvAvIw5gMtoOCZFv97BDdNUhFwIF8ueAr0BadAQCQGwOBRdBoz3vqlHDdJFjKgRP4wIxR0AT3TcAD7dSHEBgwbLzr/fHdFsJo86WCIQThA1cgOcbUbaTJ+JvCkA/9AC8Wul3rkUCDYcAEnFACfu4yBB5InuhnP/rSDeHl6lNCpeYzv4JeYcS0P/EaQbfXpZhOdAJcIxdFwEdcUSEDMegCVxhggwTgLvgmFst5hyByJ6jgBtKjFf8BDiByCxCAwImTjBJuICWwwVcGQhBC69WM/QsvhQjD5Tfju6ACC5JQWv62PzAQAxWAb/V3gEIzUQcAAB5gAhWwBAmQARBQLgXiBBagAxAAAcE0Af+Afgj4gStTSAxIdaiDK15QeKRRIF0gYgYIgi5YLFTQQDV2ghdSXwNxRhOANi+4gwkjA1vGAYcSA0EGAXy0TIMDAzfHg0o4Lug1Hx6FSB0AAgCEa0wRMt1FgkuYhYKiD0fQLwAwBEsgAwkwRWSVKyTCAAF1fVq4hoDChf1SdmKxMR1jY6WjMzAgAqVELMdhACaQAUvgfyW2D0uQAQRgAIx1QTBmAATQhwmQeAP/x4ZXh15J0CkDsU1JYAOAmDMkYwHZxGl24gUZ4AEAVTLIg0mkg38MUFwMmAHE5CDN1BVc0QURcASpNHSQKFIrlgEEgUd/VFSzkgCb2ImeCCcn1WhB5gT7YwNOVkRUcACb4gRDEAH29jH542UioAIiYFgMEGYldosj1QQxEAJpdorTNV8CUSAk0xy2CChn1AEttg9KYACpqAMHQyI+0ln70ATXNWBicYJK4AUv9lxJQFQW5o0O5SBC5V0w8V8FoFe/6AIwYAHLOIwscwRRSHVKIB+Y6Dvv5AQGVYSatAD0uEz6YADHZnZEZ5BXBzUN5AEqQAAiwDESiStC4AEhAAAi/3AER2AAOmCILQgoTPQBMcBqheQ/MsCROGgCBKEENjABB1CLpaMEB/BH6vSIKvlNxpdq17IsXQBwMfFfmUItsHgF7gUqXuAEIeAETIdeBcORarMABkAQ1rRqh9ECGaACd0UXhWOVVwlLYtEErBIsFeBZ54gcpNV2+TIEHDCBoGKJXkU6gwMAR+A6LqNAFkMAsGdyDqID7JUEQHAF92aKfdlcFKkmE4UCSdA9lBMEcEmZ8ZYAFpMBsJds2pQAmuVmUyeao7mbiqME23MAaqRJluk7N7Ajk8lTJgAEHhBIqQJfKgAAhKebvDmdv+EgIuAvtUgZE8aRJRAEHaCL+DMEVP95MJp0NfJ0T9SZnrGhD5iJbCmobjEge1QIjjmoRKTWAWmGSZAjmedZmur5n5wBAdRyBEwBVRMwBEQkGUt1eLLneWzDOUdQMqC3jgBaoaaRkTajAzIAAU7AcBQzUYxCmIITBOsGATIgPAZlclSgAwRgohuqZeummRRqoTSqJ5UDAyXTBVEYmoPBSKD3D5zZAWfWFREQV64EA2fGANcCncqEnjX6pJkRVnMlPwdQAbU4dhYQOPgDAdkohCagpX10BE6wHPJjAxngiHwJpWqKGQ5CBUQQQnRofBbCdKD1QVSghjzlBW76pqhXkGv6p4AaqII6qIRaqIZ6qIiaqIq6qIz/2qiO+qiQGqmSOqmUWqmWeqmYmqmauqmc2qme+qmgGqqiOqqkWqqmeqqomqqquqqs2qqu+qqwGquyOqu0Wqu2equ4mqu6uqu82qu++qvAGqzCOqzEWqzGeqzImqzKuqzM2qzO+qzQGq3SOq3UWq3Weq3Ymq3auq3c2q3e+q3gGq7iOq7kyqZpgAFY8ANR4Hjliqn6EAUzUAMKwAUagAHs2q6Vmg8zoAAPMAUPIAEpsAP4WqoYwAMP8AAncLBWcAH3OrCRqgUKMAInMLEIOwMN67CPigE1cLAJ+69McLEY26g74AAd+68ssAMgG7KKyg8XUANWIAEwywI/8JMqW6n6/6AFDlADDXABSHABGECzNTup+oABPTAAM5APQQuq+rADPqAADRAFSQuqUUABA0ABKBu1npoGDTAAAZuyWHuoWyAAA8AC9vq1nMoPOFADVTCzZrup/IAFPFADFwC0bfuobEC0NYAEdFu3jTq0LDAAAoC0fHupS5sCA/C0g4upUzsAGiAGXpu4f+oFW+sDVwu5lKqvA9ADZWu5k7oFSMADDvADbMC5k8qyDuAAc0u6knqzOYsEW6C6keq3gJsGsAuphXu461q7jxoFGkCvlau7jKq1XEC5jwu8/6mvNUC2xWu86ckPTFADoTu6zLuycMsDqTu9iTq0DjAArou92YsBPv8AuILrvYZ6uxQAteR7qIvbuMubvleptQNAvO5bqJirue07v97ID0gAvWyLv4LKsjxgvXvrv/+pDz/Quq9LwIAquwJAuwr8p0urAbh7vw+chev7uxX8pPDrA5ubwU9avxggvR5coyzLvyI8whYKwAKMwjWqvdw7wCyskkPrA0bQwDFsobf7tBR8wwi4uF3LwwCaBmLLAi+ww0CMfchrv0esnlvwvKELw0ushSxbBSscxdPJBgectwlsxbvptzVgw1zcxUw7wWF8lUxxwUZcxnonxPGLwWrsjVswA0agvG98lSxrBGsLxXX8gW9bA3Krx3t8gGyAsy8cyN4ou0dryLf/mMPoq8hr+K5UqwFu7MhK6AViy8FpTMmtFseZqwWZrMl0tgNIUAUOgAWADMqQ1wM8MK9McMqo/Hj9OgUKcL2vzIMIi7A+0Mi1vIMJOwIDgAG77IICcAL/QLEjEMzInMx/kQ9IMAAK6wPKDIJxrAAKIADR/IIOcLLX7ILQLLDbjIDlwgM/8M0IqA9yTM7onM7qvM7s3M7u/M6BYgYMC8+P18wC4Mr0jGAXwLj5rHcF6wNi0M9oN7IOINB6xwPAbNBVxw8NUANYoNBWt78zgM8QzVwtSwEVjXNawAMAndEVd7oJ7dGt5gM1oAUi3Wr5QLUXcNKtNgP/4NIsPWkDENM0/13TpMoDKWDTOZYGGuAARdABOl1n/1AD/8CBQW1iCAAER41hGiAFAfABS41gSPAPKwDVGFsFPWCqGzAGAQAFDssCPBDVF6YBYo1hA7DSZT1e+tCyMJ3W4rXRGO3W4jWyLKDLoqoA4/zVDhDSo6oAX/DJxuoFGlADeT2qI2AFOEDR0roFW4vWhi0BDeDA+NrMbV2q3tyuBqyziv2oCoCx/4yqU42xBS3X40XUpH3aqP2kTxDWqb1cLDDarV1ZBDDUsW1ZOiAQZF3bfHUEAgvbul1VAPAPjG0FrP3bVAUABrADLPAAxm1WnNIA/MrcA8vPpHoGNJCwxDywPJDVpBoAG/+Q3Q6rypcdqmPw3RM7sRJQrvxguIXtqR9QGSSwAf16sAML3fPsqaxkAGqQBQpAAzQABWWQA5NdzZ8KBDEAATtAAV+8AxiABv/gApj9A4y72YPq1EkABjcQBQJgBhoAtUzhgeK6AzzAApy6ASRgAy2QD89LxyrLAlUw3phKBhfyA/wL2Mw6ACaNqQEgBSmAKhiQs7Tc3DSVA7O8tH87A1ss5CRFAmHgADuwuOdr40q+MkXAAx0+A20s5VOuMBz4BTPAAzsLvR285Q4lX2sAv6/NA1hwwmQOSwxlAp4VBT4gAQrAA3rb5g8FAjogOhggAPyqAImM5+DkAZjDzDxgBRNlW9eCDlHM7Mwc2wMwvuivJOIcm7ALK+kOhQERi7AJKwHWjOnfpLEce7ASUNmgLkhaG90Iu9en/kpLsQMaIAH/+sStDkv8gOU+8AM7QOG1HjRpoODzrOW9/ikkztfDLkgFe+yvFBAAIfkEBQQAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAP8ALMkAZADwAJwBh/r6+G5dF2NjZLGXLD4+PFJSUampqNrOnFpaXIp2H4+PjiMcBN+8PNbW1IBsHGNTFKuPLMHBwc2uNPDw8GpqbE5ADohxH1tLFNzc3OjDPOrq7NSwN7+hMffQRMbGxHZ2dBkZGJh/JKCgoLi4uZqanBISEiQkJEJCRPrWRHlkG+bm5CokB3BwcYeHh15eXMioNDAwL+/IQC8mCjIqCuLi5FZHEkQ6DKKJKEZGRLiZLJaWlBwXBDktDNLS1Do6PLKytICAf7aiVCoqLJB7IxgRBHp6fM7OzAoGBMrKzPHOQl5aRD0yDLKRLMKmNI51JNa2OUI1DEpKTKiNJJ6CJLqeL2pWFDY2NE5GEJqGJEo6DBIOBA4KBLKWJMaqLAYCBJhmHA4MGFBOZG5sGH5eyIaUJLTO8JC0JIy6iNq+EHJEcPK2QKyuILTefNJ8yHB8iLx8dJhwdHB8YOzwvCjCgN7q+EI6cLCITPKiMOb66OLa9EJkiHyYnCxk0PLIVMqorIJudBZkUEIOQNqeMEJ2OCAyRAoYNPDk8AoIMNIygEAiEIJ8ZBYQYOLyhLKwlFAmIJ6KmPro6AYYFBoqDAoyKGKmiAQMEH4cgG5aKE5eFGpEGJqUDMqaMBYyaH5EKJqcMM7ItLzMwAQEGLyodIykjIh8gPba5Jh6DHrkpLCIDOp8SGJohM7etHqm5Gh8RKyqxLTGpMKgEIBYGCwiwNq2VM701Lyo6DoiMLrGMNrMQGakJI6QeExONLrqMLKcrJSYuPLI1NzqMExgTBoqKEgmYFA6MOqosPLCEKqCLN68KJKMoHJoXN7u4CA8HOzq2DIwYLqquJ6QfLTwzKycDF4+GGB8dNTI8HxqCN7IxIyYXCAGIOzauGREQLTYvLBwLMh8LHriJAoOCIJcYM7e9LzG2IJ2RHBWcMre1JykgCAWMFI6YHJifJhwSKzGyAIGCGB2GDhWFGBiGDZWTJqMPOzEKCYwKJhKSOja2AICBA4ODAICDAoKBAoKDAYGBAYGDA4OBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnBjx3j9++wRapMixo8ePIEOKHPmQHwwERVq0YEFg30aSMGPKnEmz5kB+OJCoUEFDhYYeRUC8tEm0qNGjNfm58KAAAYEoQDBoKMIPqdWrWLM2vIdP6L979/gJAIAEhtazaNNeBXtPCAYMJ9TKnUtXJFsCNBr4qMu3r9+FYEu0mCACxN/DiPve24cAQ4+4iSNLPsuvgBEMAqpO3syZaOUeGChk7Ey69Mh9BXo0yGy6teuJi1P3cMFv6OvbuA2iRoIBAduvtnMLN83vRA8ABnAIseKDAAF8w6O33vdhwgQMSEC/NYJDuvfO/Ago//jXQoF5BTpamP3Ovr379/Djy59Pv779+/jz69/Pv7//zWFhBN0+tW0lID4E2hbgPgjWBtZ/9XFVQBEkGGCACAJ4pdA9JSCggIUksACDZl+ZIAAQIowgAgG/QThfZT31EIERPhkAQ3AD3WMCCSpg4EEEDWgQQQEb7VOETxoAoIIAbOHo4nf8RCGCCzCAYEIBI0xAQgkJ7QMEACNEYYIJ4oHJ5UUFACGAACOo4NuDT8a3WAkWgYXTWwQkVIIBKrjQpAl5maARP7Xho4CbcebHFggR0FBAQvjwySRbbiFhWJNc6dAnnInSd5dqeyHEDwsq/BAFCCD40AIGQFSFaQk8+v/Jaady3gMCCRMoAB2kQKgwgQce0JDrrq/G2iKtcuLDwgRIhIpQWCcY0EAEEfTQQwsm1NkkrJsei2x7XAmgQg9EJhRWAREYcAKhQigwgQGXbmust996Fy4GRhRA4kG2Nkrkov4Cxxa3ss5ab3Th0mBEFE7miFcDQjSJjw5TOcgWPvPSezBuXFHgIw4GG3QPXhj40CQIBmjAAqaZutnkxggri0ED+hJKaMMmRKABENDxg08BPUJmJz+3LmlzwzCXVlmSPwhQAAIuCOBCtgjtw4IGGhjwAQskYKACCbv+YwIFKfXw7gcflJt0bvh8gPVOWGvwTwO+PYsPmz7tZERQN1n/FnfcKhiw79qu6YhAAQW4oHjULhi2oQk4IB4FDKNpBELiiCOOgFNIE+7556CHLvropJdu+umop25U56p7KiCqLuHYlWGxtw4uDEUYgAQNPUxqUAEGqIaEDuuybrtpXvIk7ARFzIoPEBOokG4EKtBt/PGkmVQADEJQ/AGnUTaAGT5EUyC9htgPV6eXGnz/0j7ugj0Qxktenz5nYO3TQvuc7qkCBU0SC3Lsdz/JsEV//HtJpBJoJxYAwAiDK2BrmoRA9w3ESxP4QbbCYgIDPLByEizcAfdnQY1YwQMa0AEBhIADBdDggWELoWkwVcGXhcUFRoieEWQ0gg/KUIQjZGAA/3EnAgMAwQeP8gABf8gXGpKQZd4SS66WyES60BAIKoNii9oyArhQsYpW3Mc+QLC/IojRJcApAQjEiI92TUAHBQIjaXTUAgthAAANqFCrvjKxBhjgQw0IHPrkyJnwjIAGiEwkBgywq31QAEj/6EHTBknIQpqAe0IwgRCEAANBaWSMmjRBCeJYyQiVkmOnTKUqV8nKVrqyPyzTyMv4NUtZPghTr0TKYqwEg05ySYsE4Qovq2RLsOBDk70Uwhq/mEuJ6EgBwbJO4PQFzK/AQAE6AcAUaViEHixPAx4AQsSamRRxGUEHH2hBlroFxYvgcFoY2KbEoKmAInwAmhOIgCfJKf8THRUAHxt5Xj6pFktbKXNU8rwlg/6UspXxcyazugcMdtedYtpGigqoJS6tNsWHFkVHEcBAFHJUS4HwgwIJZZnNTCACFTjUozYRCw30SVJ6nTSlTRKCACgAhBFkzXEwHQkUYdAoAFrUIDcdDxStRgMNWOcHJguqUFnWQS05LpYFSaoW2xK1IojgByyIoVQ9QlUREMaTWM0qSjNqQ6qSgH9jBQmmWKoCEaAVlwfR6rFwOTKygDCuZGWpBkhwVbzmda0ltSEMJmAEsQKWIjrCFWEtOrSh2EkAE2jBVinoAgBEIIKPrQhLPesDBpVAjRbDhwv+NRBUgQB6YEMVWyBnBdf/msgDKvheaDtiNQCQRQRFtNCK2BIFAPTAK7bSARKCpc0ReICw58JtBEYwgh5oAAMKONNuKbIPAczMCEZogHin9a97nNAAdAILSxtgLfaqBl5gAYEAUoQEI4xAATj463adaTG23CyApLSTwAK0WWbu98AQMjCCF8zgBjv4wRCOsIQnTOEKW/jCGM6whjfM4Q57+MMgDrGIR0ziEpv4xChOsYpXzOIWu/jFMI6xjGdM4xrb+MY4zrGOd8zjHvv4x0AOspCHTOQiG/nISE6ykpfM5CY7+clQjrKUp0zlKlv5yljOspa3zOUue/nLYA6zmMdM5jKb+cxoTrOa18zmNrv5/81wjrOc50znOtv5znjOs573zOc++/nPgA60oAdN6EIb+tCITrSiF83oRjv60ZCOtKQnTelKW/rSmM60pjfN6U57+tOgDrWoR03qUpv61KhOtapXzepWu/rVsI61rGdN61rb+ta4zrWud83rXh/sBjdIcgqSXIEjO+AFSYaAr5fN7GZHWQo7QLIDvODstXFhCdXO9pCD8I8DTAAARN6CBSCghA8UeQFSCICRd1ABKtSAyFSYwg5SMAAeFDkEMghBCI5cAQiou8hOcEAOig3wG0BABkfmgAW0UGQGQOAFF1BwjF9g7yNDINpHHvaRN6DtXidhAEdmQAxCsAUjM+AfDv84QsdXzvKWuzgLGTiyPlIQ83VDoOZEvscMnoDkCsTg5EXeggMywPOcL+AGDNjABYqM7RwceekhoHi4HfCPFDRhBf94gJAXAIFg/yPaBAfyEnJAdacPmR8XGEANBG7kYA/B5Yki+A6UTeQAXH0HZh9yCCBwhBW84N9BXgLI/zEDCRQZ4gIxPJE5gHC4J4rsSHb3P0pe5IOv+wZO0MfXi5wDdd9jAXXnQNiJPAQIYN3IEEiA5otcAQ4gOQAcsIGR/TEEKYD+3BBwAGh9zI8K5GDpOdeHAwZfZC1gIQQYL/IAhlABjQuZ4QxggOuHXAGqZ8DfRM7BC2IgBR5IvMUBSMH/Bqqwex9X4AXYLrLmc5D8Ie9AChyoQQUSMGQZBIABSZAAB4j/YylsPwYQoHWU92M5EAIJ8AQ5QARDZgMKSAQQYAFHJgOdF3RplwEDAAXlJ2JehxAPwAPtVxBHsHQxxwD8F2IzAHj7NhB09w/qtgD+oCD34AWEog88QAU4N2Kg5wQ5wHGuBwHKNgBTkAIVIAP+QFIyyA9HsAULMAMV8AApYAEDEAM3GGIpAAEbEANJkHQcAAEhkAIXMANa4F9IqIQ80IRPuHcc0AQckAMQkAMZMIUb1ncPEHUx0AE/9wI34AAXwANEECD6QAQyAAU1UAUOsHdU0ARd8AJbGAIOEAAX/7AEK2ADSOdhK1ADKZADSdABGfACUmABAVABHkgEOyADNnABDlB6OcABL6CIXOiJNbAEC7AFwcEDK4hhMtCEUXiHOXADnlgBFXABKVB7A6CKL8CGjFgFoLgDsugQM0BhdbIFgRgAL5AEMVCNGbABXBgCNzCMijgAUsCID1ABM7ADGahhLxEWWrAAKzB/DGCHHWCH17iKVMCGQ5AC4SgDRFCOHQYWMrgFRKAFO3ABN5eJdsgAq7iFQxAANeCBKodiQ5OE/rADTJgCNviGdZiFG/ACIXABMjBKDjk0+rAF6QgFwFh6L7ABTxB9daiJBhkCNuAPR0BKIcaPY+gPgGiGTv9wA9q3AdH3hhZpjS1ZAwugD+cYYQP4LAECkUTAhAFgATrZBC8gARLQBAMwjE/gk9KHBRDAAdE3AOq2eg4WAvynBTcwBEVoS4RyBCG5AEtQA025jcT4AgOAeV7YhOPWBBIglxZQBQEQAtpHdjbwgY8VIAXyICsAAVNQECEwBKPED3+4AktwAQFwituYivP4jQ4QjjwgA5ApmRawdzeQAJ/oljnJBCEQABV3YPxgf/a4Ai/BAwNgAUd4BAugfQHwAKfYhqq4hlzYiDVgAyuQj4SyBStwBSkgllSQezXQjFCgdRHGD0sAAXXIAPhGKFoQAE8AAWb4mRmQBMXIAabpiRfQYAMysANEaRH9WJwBMAVUsIYJ8ADY5o9Q4IzvlwFSmAEMcAMB0JRNgAIkOI83EHV/xwMrYJ6WdRHEWQF9SXwtOBA8AHgTdg88IHL4KXIxIAEDYIUM4AS/KQNaoAUOgAWCKRD6sABQUAVDoH3TN5M8YJ89eY1VQAQL4JRnSaIpAG0FsZRrJxAbwAGnSWKf54ZYeQMLcA9aMAQhoI8wxg81wAH2mQE5kAUWwQOmWaM61ntTMAAuSSjNCIc6FoMyaBEycANe+mNHQHMxkGRlamMBAQAh+QQFBAD/ACwIA1kAIAMuAYczKgyZgCTW1tSvkyza2txiYmR9ahy5nC0uLixVRxKGhoTIqDVwcG9GRkS4uLjy0E91YhrS0tSysrTy2oR2dnTGxsSeiiTpxD3u7u7AwMBKSkw8PDxANAzSsDddTRRmZmRNQA4SEhQcHBxqWhZ8fHwkJCSljCqurqyQkI9cXFyenpwkHASkpKTQvngoIgb29vQWFhRqamyQeSOKiozkvTw0NDTy4qT26qyYmJgbFQTyykH7+/yIcx/Y0KROTkxCQkQqKixEPA0WDgSGelyjhiTduz2DbhyCgoTe3tz06sjKysyqqqxSUlQKBgTgxGC+pkzOzsy6rnzizozatjhWVlTi4uRiWky2lizm5uRiVhR+bjzSulxORixubFh+ZhyrkSQ6MgwSDgTCpjx+dlwOCgTy7uQSEgTKztQGAgQCBgiyyjRebHxeqIzK9NjGvKhANETQyvDIppxAPlBQMkR6mqAgPBxigGDg9NgaKihsWmzqfkhecmDQ7jTOxBCoppiYfHxscIBMXhQsZtAKDgjCzPCupsTe5OBipigODBjO1sgyVkzSohBAeDiapIQmxIB8HICSfKSGcgjKfiwyOkxkahwEDBDsxBBKYEwUZlDqvoQKMih8XgiqikwGGBSMkqDi4MzQfsiakoR8RnTowiiuyOw0VhSajpy6qAxCTmS6fnSoqCSovsQyKExAKDzO4tSWbhx2qOSSmrh24ih+gGje3PQWMmjwojTQMoB8XnAKCDB4gEyImmSGghySYoiUXhxAZoi6sDAWEGB25KhiPhywigy6puiMmIhubpxeWmhQTmheWnxUUkiMpJDC0NBQKBjUytB8YNhCMmxygHS2xMSIuoyORhxkXCzKvMS2utBAOijC3uDO0ETSvOxkfBwgGDSuvqSu7rSonKQmMCjO3PS2oKSwcCwKGDRiTCyuyIBCEEDAvNAsIsCu0riWfgwEBBjqprQgMkRQOiRoTBDgyuAyPjB0RhzcvLwODgQCAgwGBgwCAgQGBgQKCgwODgwKCgQAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDihxJsqTJjPr4iUCwoQaQEPpOypxJs6bNmzhz6tzJs6fPn0Bx6ivBQIWEDBkkkKixL6jTp1CjSp1KtarVq1ir7itQocISFSyUEFiyoWnWs2jTql3Ltq3bt2v3/YjRoISIEhpYVMEhAq7fv4ADCx5MuHBUffv2xdSHmEkEJRsMS55MubLly5jhMta3IUOEBplDix5NurTp0w0ZbyXgAAHq17Bjy55NGyrjGksIMOBXu7fv38CDC084FAUBHCWGK1/OvLlzwUNnCGBR9rlpfhtioGDB4oiGftbDi/8fXzV6BOpmyWPeR6XCv8cRqkBR0Fe9/fv4ZRY/Xz1/ZX5U4MCABhv8wIASSFAAnn8MNuigRPvhwFRMDxamTwgl8KPYhTEg4UANFYYoYojRedhAPyiimN6ImlG42QYVRKABizTWSN4+DGCxQwUzkHCEAiiQAASFNraojwZQVBBZkUw2GRw/ByaZQVcVQOHABkQ6uRZjMKBQBQowaCnmmK/pI8IPDaTZgAZs/gMTmWrp008MBFTQQJZw5qnnnnwmxE8KSkRQAG99FmrooVr+WYESHyyI6KOQRuofPwVAAUUB/eAp6aacdgrcn5amwI+mnpZq6qmiKZoBFaOSiuqrsMb/6tc+TECBwQlUoKmBDz+EKeuvwAZ7VT9H7LADFhFAEUEEAjjgg6vCRrvpZoz1swEVNRCKELWIAcFEAzBs9g92KZRrrg8iQCttQfvUUMC7H8T7QQwplKDuuvgWSi17DiChQnLEUYuACqxpsOE/JahQRQSLWioBEyvmu61ZiTUVscQYH0ptDSwg8UJrClEbAgkRYBABFYoJBIQEX8bAwMsfAJHxzDT/upkIR0jAggASyBwwYinoHCjKFJZwwpWJ8aM0YzU37XSnjPHzgQMkFJCUawvpw7EKVLBwcsoIH83UhuI+bfbZhnImgQoI/HA1QzDM4AATCX89UAkSREBCuXW1/4r234CTKQIKzu4DowRY+/lBBST0A4PXKN99AgZJQpHBEgWEG/jmnNMIZQYUwHR44gfJ5QALCJgJeXowMDDDBykUhQQBMTja+e2436dPAw7gMCRnFfSsUAmEQ6x6BBALhFgImTImAglVXHnv5tym5PdAqikNdu7ciyYCDg4YzNjhPh/ETwwZNO48C1Ak/w+3yq9cxW7dYy8yAyeo8IOLIfhwhAon+AcLYmCv+hmQMkdSFgooQAES6OU4BShgQUTAAiywgAQMREEEsLAECmygVdULAQ4woIBMGXBfPqjADqpQAArRqgIEyMDRIjCW/R3whoTZnQMEwEMeVuEFL0CCBP9sOEEU9JCHBMDADjBQAQbApHpmUgEG1HfCzSAABxkgAAtduAES+AAIJcDLEjDAAoDh8Ix/gUEDqMCENlJhBkjQ2w9CoLymxKRdbGQjExhQgb1QoQSKIZtqNBAjolVRH3FzQFFYiD1+9ONg+2iAEqCggemh8ZJWgWIDKvAhIhHFYHWkFgW/FhN+/CAFG0BACWqQghNgATk3jJrVKFCCGTBSedSSHAGoYElM+lIq1ZNka5imjwJgQQX1weX6CJACs4ysKxI4mgAioIL+VBE3qAuBlwpgv7Lp4weLIuIvx5mV6iGgR+laTANY4ESC3A8F/THc/1iwBBbgIILbq5+Z5Ib/si5tUZlM+8fgXqk5cho0k9zahwhgcLBxlcBX3eSSCNKDmH7cJYwwuN4BpZY+mPizhYvhVj8YgIQM7K+XB02pSv3CmRMgJyYhmAEzA0qtOTGMaChdqU53epZ+QE8FBNoAEySABXgWcDP9+ECSRFU2njr1qWsZ3AuwYCllKbEKSmic8pK6VBDmFKpgDetO+sEEFJjVrDgQAAYyMAP3JVUJc/PqV8VK17qeJCVhCoFeBza/EIzqHzZVAlMRk0+7GvawNoHiR13omLXurQDziiBiJ8sRcfXSsu8LaEThF0tufe+f4yKBEjEwOyQgQT4tpKxqK8IPBDDhAy9DZUER0q4Y/2CQBLglARMcJRfcNjC3DbhY7qq3jxJsAKJmKtAP/vGD5v5gA+larXQhojX2JUtZbGWKnyiABapWNQIoSKYIVzjJqirol5qdrno1og8gNPADVKACA3LzLz+R4JgaWNOuJiQQEWJgBrrSABOAINz1Gviw+9CeajbgAAL4YFv8gF4MoEghERJgRpw9sIYnS1MuVZAKEIYeA1Ak1/7iYJeO/OuGV0zZlOi1BCnoypIMkhLROuBHJKACgcXl3wsqgAE+ACSLh2zXELxxO0rIQKMgzAAowDUDkzwBE/wWUyRwMgOWwoE1icxlpz7PM6aF5cQ2kAIfFCgFesmA+MalgQ8QyP9ADqiCCobU5TrvlB9AaIAPCoCDE7RzYiDcBxBGCKbFLE0g/GhABrBAP9ztAwYIcEnzClJTBCDAdnYmsmpwY7eDUDgFVTgBnUt3hBfw5XZy6tqUHADPFVELBiQIHi8z3eXN4AgLM8A0pbnlg2alLiExIGMyN2fTsYCPACcg4r6oAIWpMiC9tN6wrXOEAjo6RB+gFnVC9kGCFyCzc5zJgAAYsFAgzOCV9aHWBliQgX5NeK7RBithQzoUKVKgwIghkpkIHaZ5Y4/BWNAq53z6yicCTwmVzKyZCBcDOD4b3vF+6j40ULUGbKABfI4hljxdApeZ+R8pUBgUfGCxDTCgAAT/akAMiLqEGkB8XRTEwru55CUK8CZqH0jf8+YH7Yirdh8xgIIAlLAoApzHByqmNG6QkCSxEEACKVgQe7JYuTjqr8BnW5kAmLAvBmAAB+BBzA/WVgKCP9zn6jUTFSiggH/0qF4NNUg/GsCAI/wDBUcoAAIoKgImkABIPfoj1s2mNSzfiVopICMdF+4sOR2B0S9H+04JSygNNZXGCa58YRGjNBIXNnCFV8KdFJ54FoRppBnYjeNl3nPJux69NTA8vUsfgkhKgC9MIwEBQPr63pOzvRIQAC83A6Wv9yMEClgrA2JgWwdgQAIx+IG2fE99HKquCh94NQ6qcO+4YQALBAh//xKXCIUjQLT66NcnwXEQrtss6k7YcdnLbJuB5zPgg+nPf/fGl0VyhwABKIBu2aM0SiMCtrQbn6d/Chg4xeZS/eIAyiZSCgB5kbeA1kd5ltd6mKdg6ZV5iKY9w0VWLFAlbGVN1YN6zWSBYmUtH0ACM9AjPtB+IQMDVHAEL8gAE0Ihf4ICL/iCZkVy3KNQkVYCT+RO3AIDL1GBKmhA/PIYUMZ0CiBBnoYACkBDVcIarLIYFPQCUCABDvCFEjAoSziGsrIPKYADFKABNWByGVAFR2BtBwEDChA9BXBxJGNSdvQ4AhADQGBpliaDZBiIpoJIE6WDBSAAGTBjlNYZzLQh/f8wh2/4Po+jJAClgYJ4iYgCRQiwQzNSOinga9RCBcgGMI+jBEwAAyLQPJeHiayYidUDTpRYOgWAbL/DGL2WiDFBQRjgAGCBAh+wd5bYisIIJ9UTN3sxbO6kAUSHJarxAQsTOXGjBBKwBHmDBEvwAwk4jNo4JiIVAwJgJ/hWAhWEAjVwFz5wAi/QiP9gOBoARkDQSlXAAqO2jfRIJuLyJ4EyKK2HGFSQRafDAiewaAKQgoSVWZGUAUgQA9MHOGbCBMzHfC/DBG9ikK7FTRtgQvVoUIrCKKroaSmhAYQjQxTARyPnakiFAi9QbZ1jhhWABD0UfqjjIjBQAMHHQ+kjhRn/iUn9wBUVgCm55JFCWAOX9gNw5XKbxW2mdn6AsxqpB1keN5EjZXRoeAQ7NAOzlZNnpCgVEHUZtmtlgyN7UVDFOEIltJIpEAEllDQZ+D4MphsZZS0nMJCDh5WotpE+2ZXulGAbwg8agGXGIye1h1QFkCyRwzlbUX6Wp3Cy9HTlsw8f8HVwSJdMyARK8DEfsCY+QAXosm0mxwDlQgKeETqlxAQ9UgApUAAzAAVuGF2GOYsOEC+yVVPcxQLpxBgagAUOYEaSWT/8cARKhARKUF4RoGauwg/neF1QcAIfIIN/4gDLwizNQkvB6DSUggSkNTtKwALikxIMEGr2shlUsFYb/7eb9VNbyyd/MIOT7hQCp/QBBTBg14NI11IAMQBfNdCR1NMuJ7cmMeBKhYMYmxQBzcQYJYADO4BwSkiehJegHgluFbUh+3BFGIBMXGJLFZB3DIADEfACCKqgHkqMTaUPTFAFuPg+JUACUIAFcWRPDCNOH/qiTPKTAlEDwTl67xMCNcAEVFAXSDJMMPqjRZJhG9CiWbIvKWBBVwmkSlohnMUPFPAxdJZeQ7EzBZCNS3ql99E/qAQEQPADJCAA83NzgkYgfcgEKiAA4cWgWLqmsxETBVonSAEFwEkC0ZUSKeAAV+ZkKJA6asqmfnoaANMPPjADYPEVJHAiRNIuJKAC9f80A1SQTn8aqeqxPCJQqalYWE1hqYHZp5LaqZ76qaAaqqI6qqRaqqZ6qqiaqqq6qqzaqq76qrAaq7I6q7Raq7Z6q7iaq7q6q7zaq776q8AarMI6rMRarMZ6rMiarMq6rMzarM76rNAardI6rdRardZ6rdiardq6rdzard76reAaruI6ruRaruZ6ruiaruq6ruzaru76rvAar/I6r/Rar/Z6r/iar/q6r/zar/76rwAbsAI7sARbsIfVAQaQAwZLrBdwAQcAAnjCqQv7qTRwATQgAwCQA2HQBNM5sbFasTRwAEQQAEYwAkHgAhuLBhLrsX56AVMQACMAATJgAgMwAAH/wAMjAAIZ6w9WyrKlagIDsQAYuw/+kAMAAAIjwAM0OwAmIAMQ4AFgkAM8u7I+S5cA8A8mMAIDsQIGsQ9C4AIc4AEQEAADcAAHYAI4GwQrQAZUW7Xb6AIyMAAeEDL61gQ5wAEJILNle7Y4q7M50LNu+6M5YAQHMAL3QLe0FQZHm7QmYLYD4LQJwAFr27aBq4IQMAAQkAORpxpksAJhawAzW7MBYAAeEAQZC7iVW48jMAAGwLWVtQ9kYLQJkLRky7QyMAIJAAAr4A+Um7qS5wEDwAMuIBKqYbRIywO1a7NPywEuYAZz6btkCAI2e7UmoQ9o0ARh4LkeYATJSwRG4AEc/5CxHAu9rEgEJhAEzwsS+oC9KwAAYhsAS4u2OZuxZIC65Et9IMATiHEPxjsCMgC/tgsBubsCG9u790uX+hC7ABAEWcC9X8C0owu+KyC1BnzAOYkYOQC2eRu6JnCzOesCOVC/eAIiFiyq+nAP7Yu0M3sAAZy7OdAEKVMDDrADJ1PCJrwP/WsERFC2Nku6HJADGyABL4A8FGXDoZrA2usFZMvC/9sCN9ADVsC72yKjRrym6yu7I2AEA1AED+AEWeu34ysQ++C+uLsC6VvFL4oYYZAAUWADE/AED6y8Eny3AdABF9ABPAAAaIDGnYoA6CgAYxAEG9y4Z0u2RdCwd2wECv/Lx5EKBCdAwwPqtQCQtzxwACBLAxV7ANTLyJ2agri0D/cwAnZMA0VAAzpwAGDAyZ6aAggBAAPQsCBLBMOryqUaygdwAQ9wAQMAAv5Ay77kD3PbG/qQAx5AtjIAAmzry5gEAh3sG/pQtBNsv8p8O/AbBNOcqgnQBNc8qhW8zfSYAwYwAN5MqmFwuRAwzqJKBsBrBK6Lzp8KvBjrzp+6D8wcAPIcqgFgAfc8qtq8z5HqAjwgt/7cqZertQMtqZi7yAftpwPAzt280BYoAxA90RRd0RZ90aaqxcGM0UAqzhy9pOf80SI90iRd0iZ90iid0iq90izd0i790jAd0zI90yv/BrQ03YoAYM83jYn6ANAevdOCCM4/DdSCKM4KTdRLCLw88D5IHYgYu8dNTYYcENVUXdVWfdVYndVavdVc3dVe/dVgHdZiPdZkXdZmfdZondZqvdZs3dZu/dZwHddyPdd0Xdd2fdd4ndd6vdd83dd+/deAHdiCPdiEXdiGfdiIndiKvdiM3diO/diQHdmSPdmUXdmWfdmYndmavdmc3dme/dmgHdqiPdqkXdqmfdqondqqvdoV8dCs/dq++swckAVZwAHJDNszcw/AWwRTILdkgNsZQ8+WDMsPC9XAjS9kYACwDLJG0MsQMQHH3SlhwAO5jMkNKwPOvRAvcAYO0APR/90p/jACh2zdHQABZ+wDXPAPYvDd05LTRaADOkADRAAAX7XR7A0pY8wDV7AAHWAEv33f6+K1nhsAAeACrg3gTSLcEJDdkYngv5IDM3u1ZkICDi4s+xAEmEsGQIACFR4t5WwCWsACGNDhwqIPHDAAW5AEJC4s/dAFLfAAEyAAJFACCfAPBr3iphICTZYEE+AEQ1AfX/C/ATDVOO4pBLADZSAFCzACRcsDHXAAYlzknVIF//B8XfC/CeABt/wPHpDdUs4p19gPIHAAHbAAmCwDR/3lndIEHrAAFnsBRRAAaa7mm7ICAYDJmPwPC0Dnp3IAD0ADfP4qLmACF1AQNR7ofQISEAAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQQA/wAsyQBkAPcAlgGH4uLkLi4s+tZEWUoTUkYRZVcU7+/uvLy8mZmZdmMbMjI0Sz4NzrA1nookcnJ03t7c+/v6EhIT2NjZKiIGsrK05ubknp6c3Lk7sZctpowsbFwW4r88RkZEHBwci3Qhgm4dxKcyJCQkUFBQ6ursvKAv0tLUGhYE6MM9QUFBbGxsLyQJy6o0WFhYg4OEr5IsxMTEIxwFFhYUZmZkYWFhpKSkuJosrq6sm4MkdnZ0OS4M99FDj4+PpIokKioskHsizs7Ml34keWoc17I678g+Ojo8QDMMysrMCgYEtra0enp8Y1EUioqM8M5Dfn58NjY0QjoMqqqsMSsJw6ExSkpMEg4EDgoEBgIEAgYEPmaIIDJEsMjsXFxExtTUlIAM3J5AqL7A4NS4XGKAXqiIGiooqqRQzLRUSE40CjIoyKSs8sIQytq0tqrIIAggaEZw2tDYsNp4FhBgTk5kMlZM0DKAXkZAQA5AsL6cglwcrH4skpq4duSkjJiUJsSACggwlHpI8O68IDwcIBg0Png4rIYMlG50BAwQiJpcmpJ8uOowbFpkyLrMfG5grqSUTD4g2u64qqwglGAsdqjkuHx0JhgcOCI8ehyAduIkYqYk2MDEsO7I+ujoBhgU2vbk6uD4PDooCg4IanCEbF6EQk5kKmbQyvTUXkwo6HxIrIZMuL7Q2MhU2uT0FGZQxKgQdl5EuqToTDo8XGpUyMCwytr06uzYjI6g3KgQ6ProBAQYckYo8MDU8shUFjJoXnocDgwYTDQUyuLU6qSw4NqEemDIrG4s4NT0vqBQgmww7MQo8qIwmnIodlgk9tTU+PrcaoB08rZASGBManRoChg0vLRAzqBUrpq4wMgwXHxkiLqIuMS4xHwscl4IyMbYTCYozpo00MDwjkZIeH4cuqR0sNK00sg05tTY0HzIlHIMfH5kppwsQDBoqpwMepqc3LJUjJJ4fl5wKiLA2MgQdnhIXj4YqsQwgmwIJjAoqKrECgoEAgIMBgYMDg4EAgIEBgYECgoMDg4MAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qcCHHfP34E+VmkyLGjx48gQ4ocybEfChlNlizBISLGRpIwY8qcSbOmwn0BkADYCaCCBAROXtocSrSo0aP/9nVIwvLfSRsGKIRASrWq1asP92klqNQIABFYw4odK7bDga9k06pdO3IfP3/9YswAYCQA27t48y7c508GAhovDJQQIVSv4cNqldKQ8ACAhBT9CiOeTLnqvrgdUOww0iJG5c+gj2rV2sFGhRmSQ6te/XH0VgcGlmBkTbu2RNej+yUxsKOf7d/AE7rtx09j8RAHKqQIzrz5xSkIUoAVkQKJgQM9nGu3zW9G43+MfQL/Tb29/GS+U1KkbJEChb+t5uPLn0+/vv37+PPr38+/v///AAYo4IDaXebPgQj6M5tw/PRzYD/EFabVghoRCGAMLRyg4YYUTEHeRQrggAAUFNCwBAsuDeRWAEnQUGISCixooX779PADBC8gweEUCukmgQRGvOBVBQiEsBE/IgT2gBElAHAAYTPut08IL0igQAQxZNlBhQjxo4AIRITQQQgiUGBACxZNaR0CRHSgQBNOThWlflT+EBluH7qmIgc4YsSPDCO8YKRW/uwwggNz5jdllQp00EEEGsF3EG4C8cPnARj1s0NskbolAgQHfJjodosaYAMFFCAwQw+SGjQaP2OS/0nDAylYVOiZnSIJwQ/+jGrflDT8gAQFLzxwHQp5uoUCYD+MUEELvV6UwggHDLpPBC1AUEIHvtZ3mQIhZIoCVEjIiZBbHEAhpARPVriPAi+M0EII/nQgQwkQSGBut96O1sMLpwmHmz8c0FCCDEfKIIEBP1BwgJD57suvfANviuakegr0LgA/dHAkCgj8UAIFM4hQQQkRTEwfnv0sgSvGoxEUARIPIFuQayiAKqPK5eFZ2qHnZpxUCEZIQISEWkWwxKGi8mybRR0QV1wHOJyMwrmwwpVpDA7g6FlSDV7ETwwpOMmt06T6k4IRCOCQQoYGSODAzir2QMMBSzjw9gGCof8mUD8p0MAeDlDQxUGraDPHDwsSAPCPsw/YIEJk53awQ+Mj/OMTDRwsCPgPFYRewniIJw7ctxyIwIIIKHhcuooxOMEBC6s7kaKKERChOgtEvBez6fg1DTzwwg9v/PHIJ6/88sw37/zzebGMZQSUq4inxm9lCen1l2EZA1zFQ5+Ya/wQEbIELyCAwmx4bsXXDDaUUMIBSVi7lQJLHCA/3kTQLT5euEHSDwzwgruN4AEsSFP7/uGPJhggcgYbgQ1YNSEOLMwINAhW3BL4v8Pg5medGVsKCMit9qHLJ1MgjgKgMIImRCoGCIjNlmC1KRqkrIPRIx8LKlCtgcTANH5z32j/btWCSPFjCg94gQKGViUiDOSIccIhAHHTsjO9hB8pgAANfGM90vygZklJGhQqIIOkdIACX4mUP+ZigRtKcS148gcNyMiVnPGKK/7SlmeGaDGwzaACL0gBBziQAg1x4I1sodS10MgCrgRAW24Mo1YeaSdJ9qMFvMkNDgDwQADkSwb+Q+RV+BGCUvaAXotEC0FCoEc8akUBu8JIbhyIgCHK4ABIQAACHLYDVolyLDgBzAteYIMERsAGAOCgxh6JMlfiZFdc1MolM8kXOC2hhJYzABS+9suwBAABG6KBCN4yx1qpyI7R0lget6UiImrFCSNopsb+pcpuXsWEl3HZxSol/wMtclGdXgSjD01Txn3wyQhCicAYZ2BPrOBzcdSS0z5+eBr2VexlF7GgEYJiUD1uBCcvSGZDxzcl0+AALmQjocb8EYAeGHEKI5AACiAUAhrIK1I9KMEIkhC1fnQgWy+wy0jJ8ioR6PQA4BRMI8HGuKAOsQUjAAANEGAEAyCBgm6RQQUYZgEL3Ohg4RvqUMiHAgv86AecY58AseOaNSLhRy+YVwBFYLAHPGB0nRNrWgZWSlRyhZSuw00MShk1SiWlXoR9j14Xy9jGOvaxkI2sZCdrz99VJCk3URFmKWsZt0SgBwoIwJaaRijQgkux1hvbVMAVgbByViQTQsEY/2EAI/+k4HaT4seythq3Xr6ELyywwQMeJwEaTCGUr5WJsiRQASg0YQc6XUI6DYKkhR2gBRmiVnb+5oCo0gAHSYBKBZaa3LF2wKZN6NU+UOAVKB3kh7xprVIwuYPZxAAJZJSlP3AAAQq4trwcWa9gznYZ+v6TKwpgDLJGo4ARdMyMTTxnnwBsk+5AwAYfRRIPJVYpDgCgBFhVSgkecMh/RACNM/ANPyIwrR38l8IS6YcDd2A9J5RAppMiwgOM5poQPEA5FunOjneQAgd4lQYchjFJbtUEgNKTR64yywhwIDUWQ2CnswHcwnqi1BcrGSIRMBQOJLkoAJSYuiOswIlYsIQbGSD/CWnqwQ4o0IIZpGAHL4CCzb4cE39gEk2uCYARBOoqtTXLMQewABJGUCvF0EqWEWhCfw/MZ5Hwo2u1xJkESrDEhPDDCTNwGxHs9hW3sOABBzhbUnqwY6FWeiS6wtSrZGAqVQtMKyHY9FT60V0aTLcDX9zzq0OCkx8dTSswnLKEujhE2CDAT390apBRENPtDpvYMo6KiUPgALp0moGri6Y/FNCDLLEI1QFIUwD4tgO7ZIYGF6b0tTtCGpuyK14/UOY/iOATJ2b0ABJAAhJKwMPDpak7C2PXWah1tXkTmzQOsMELDrCD9X20B6cyF9WQECQopCCwYSxf/v5hBBs4IMkO/w+wZYV2kZu5j8wuD7KXU07zic285jjPuc53zvOe+/znQA+60IdO9KIb/ehIT7rSl870pjv96VCPutSnTvWqW/3qWM+61rfO9a57/etgD7vYx072spv97GhPu9rXzva2u/3tcI+73OdO97rb/e54z7ve9873vvv974APvOAHT/jCG/7wiE+84hfP+MY7/vGQj7zkJ0/5ylv+8pjPvOY3z/nOe/7zoA+96EdP+tKb/vSoT73qV8/61rv+9bCPvexnT/va2/72uM+97nfP+977/vfAD77wh0/84hv/+MhPvvKXz/zmO//50I++9KdP/epb//rYz772t8/97nv/++APv//4x0/+8pv//OhPv/rXz/7f7OMI+tDHEW4OditMwAMgAIEHYED/ru9DBUCwAUygAxfgAzAwd0eQACcwBCfQgEKgAXNXBUCgAydwARewgEAwd/fgAQx4ARvQgB4wd/xAACvAgA1IAgswd/tABR7QgCdAAkpQBXR3BBqwAjegAQtwD3XHgjdgAq/TdloRBRiwfyagAguQAEDgAm93BFQAAx9wASRwAxgAAgwAAv9wA2/nARkAAkMwBCCQAT6gAU8gdyTAAxhwAUDAf3NHAv9wD/dQADVAAHXnA1bQhh+QAROAdyaQAR9QBfygD3XHDwtAAh9QBBrAAzM4ATxwAiDgAmX/mABxZwJHmAEMmAE4aAJylwEkIAVCcAIfcIBytwIu4AEDsAA3QAKgOHcTgBFUAAQ3UAX3kAOQqIIqUAM+cIRWqIL6kAAXIAQrQAIhKHdHAAMacAFMgAEaoIZxlwMfUAPGmAHKGHc1QAIYIIUgkIJ0VwMaYAIm4AM8QAV1ZwIaYQI18AHItXZKUCkLcI3913XYWAXNiIl2tw8wYIvnqHY1gIlWsAArMADtuHU3wAOAeA9BIAWpqIL0iAFAUIdyVwQ1AIn7yAAFUHcw2IYJsAJ5WHcLQI8ZkAH3qHYYkAP7CAKzSHcZYAI0SAI5YHdAcA9UEJBHUHcY8AFXkAMPaXck/6AB/KAEKFh3AyAFC1AFHtCDM6gBJFAEMIABQfCPXPcBLhAF64iNdAcEPCCLGHCQmQgEA5ABHvCRaWcCM5kAJDAAdhcFD+kBLqACdjeIQ9mVdveTPJABZMmUW6cB+VeVdncPT3gBfVh3BfCSBEiWdZeMLqADJBAFKmgCJBAEDskEHwCOcweWH/CTG+CPdecCPnCRDICYdGcCPHADN8AAQCCPdBeQDHABSuCVaQcEJHACQrCSLMkAJ7ACWDl3DcgDpEl3HHgBlll3enkCDKCWdieUQ8AAtRl3VFCYtHl3JsAATNAAkSR39HiBH6CDdbcPRfCBOpmXCliZ8wiWTHABUv+pgjlgjBdQBHZHgwJ4nnSJdVphAqFZgRtJd/tgkxkgn4FYACTAi96ZmDcglx4omNJplpN5gRMpggNQAwOgARtwnSaghRPwAQwgBPRpk5B4AzWwAnTHDwOwAgtgAi6AhSrIgvk4AYSIAeQ5hNg5logodxwKAmSZoGMYgR9wlfdQktKpAqO4D/qQgRs6ABWZm3BnEQQZkvtgovQJA2AogzmQiy46iOn4D+MZdzeqkhdxoNK5hzcQnU+akxYBmQgoltgopG73pQFJpnGHnRiQAD/odgnYk7URk//gA076WlTQkcf5GQOQADUwBBS4Ai1KWUdKiKppGENwAStQA1yZXPz/UADXaBsMyAAhSpX/QAIgwIaS9ZIYkJG1oaHUCIb/oAQLwKmPRY8g0Jd2x6Ee2p5bJ5SoWBQWwZADAQNFsKdEtw9g6QEyaBP3oA+YuABKkAAewAM1UKdDJ4gkQACsahAa8AFYCAIrsAIM8A8YgAEk0IlGp5dXSRAm4KMRUQV5KhAYkAH/kAClmI5byAAaanSeCQRyKhCK2BATUKsQ+A8Z4AI1cBALAJv/IKD2GoJTGnQqIKF4aBAhiFwZUANSkIvV+AH1ahDhinRFcJ+HCgTC+Q8aIATkGqwKYa4CYQJVsKw855n/4IIbwAMJkAAfAAICMBD5mnb1uQFD8A8e2IUMEYABLnCapPgEKgCmdLeraxcQACH5BAUDAP8ALAkDWQAnAysBh2RkZF9RFLKWLL6+vGpqbIiIiWtYFFRGEc7OzPLy9MrKzPTkoCYmJJiYmJCQkI54I0U6DOvTcubAPFxcXICAgD4+PDo6POTk5K2trHBeGcq6d9LS1E5OTKKipHl5eSoqLKampJiAJCIiJPn59ysjCLi4uHlkHHBwcNi2Od7e3H1qHERERB4eHFJSVNra3KeNKkpKTFZWVMKjNDY2NO7u7NbW1B4YBINtHDgtDJ6enBISFOrq7KGIJi4uLDIyNMLCxO/KQT8yDK6OLBoaHEpCDBQQBBYWFLKytPLqzH5uPL6mTO7GQsbGxM6uNE4+DIZyHtm9VwoGBDoyDLqeLKqSLDIqDJ6CJBoSBO7OXIJ6XHJuVGJeTEI+NFJKJA4KBAYCBK6SJAIGCPr23CZi0NSaMFh8WKK26EQmPMay7Mp4yPCeMIh2nJyiMIJCbGxmCC5STMowgAgYIKrwsNKyuPbW4EZQSAQEGLCaDBJiOAQMELic5MjWfHAagDxKSMzCxMKyrNzo6BQcFAoOCOro+HSUmIhUGHBc1CZiiFRKOObAKNTMMKiWmJyYiMjC2IhcgOTS1IhudCg6PG6k4IC0hIJ4gPrm6BIwRBwoDGZWbM7U9KiccDAiRLjE7NTYwFZcdIp6DOTW9LScpDQwSDguaMba0Mzw1EJKMIiciLjW3MyclOh4SHRiQFh2GMbOqF5CkM7C8I58gLasrGhAGPCyQG7CJPDAEEJcTDh0OKLSzO6ksHhucKaESBgoKICMiIJAGCYgwIyMeLyyzMrqMKZsLFZkWICUXKq0qKTEfIxiGGx6GBoWNFRuZHRkcKCcvEJKZGp8bNS0EBw4HEQkFIiUsFqkhLCsIHB8RFhiGAowHG7ipNbCzPrGQBIwaAwMGBoGIK7EMCTCgDgOQGZCVKq+tO7MsLa2oGRqlAgIMJy2tFQ8SFRCbBIQYO7C4MR4LFxiPKCEDHRYaLJ4dKqwxDA0IHZ8ZO6+gFJUGAYGBAoKDAICBA4OBAoKBAICDAYGDA4ODAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIEOKHEmypEmM93Qw8GFhRg8j907KnEmzps2bOHPq3Mmzp8+fQH/mE3ECRIkBJTAUgOEvX9CnUKNKnUq1qtWrWLNqFZhvRoMSIP7lOLKBCQF/W9OqXcu2rdu3cOOqzadjxgwGQ1j08OCCiQW5gAMLHky4sOHDUPMpdsqVAYYLMRBLnky5suXLmN8uvmehRA0YmUOLHk26tOnTCvF9WAEDQIcNDlignk27tu3buJ8yKMAEwYULDXowzk28uPHjyJMXNDKBQoEOJRxUiKm8uvXr2LMHznfvHj4WAJL6/xiu3bQ/GB4a/OtQYMIQ8uXjy58PdLFiIw4unKBOP3Q+CxjUgMA/CmxQQwcr8Nffggw2GJJ9it3jwQ4FoOWgZfkMMcEEK1hgQQwNpADCBxeWaOKJD0GomA4O0EABPihKFuE9jHHmWWQx5qhjifcwYMEHQ+hgxAcnbOBCC/DtuB15+TjmwgRKRillefeEh0EDDjSAwQYIeGDElIXl409ePnhQwxE+gKnmmsXls8JXA/zwQwkNxGBEkmy6ZcQJHWDwQw0lcKBgnoQW6t89LPhQwQoV+DAEjXgamhYLFAyAgAsbFMBCpJJ26umnoBZ0zxAf9ACDAyVQIFuorLbqKqGb+f8AQg0TcPrqrbjmKh+E+HhwQQEw6irssMQqp6I/vjpgYbHMNuvsaNzhA+EMR+hn67PYZqutW/iA6MEELTRXwgUg9LDtueimm9Y9EzDhggs1uJACEwXMMKi6+A6LqAUwtNDCCh9I21A+RljAQQswzKADYxDe00MLFTSVL0V0VRAuAADEYMHC107s8acfFFACEyQzgcEJIlyLzwqolnzEBMFCaAEILmBgQccfGzTjP93Zl/PPrvZQQAMFeOCBAz9kOoRC93Awcg4UeFCAAzFYaN8Q+SXAxApAd+21ut8N4Q8++BjRQtJcI9QVBkwAwMLYY0r8z2L4EJDUBgNUgPPXfPf/7SqERuRwAZQI+UOBCx5I27NiA0W4whEOTHCU3n5XbjmuM3q3QgkbpH2QCAG2wEJrMcwg99xNfrVCD0f8QPnlsMfeaT74VAAAARSUgAAFXx703wA1ZGkpAiV4wADD/pzwwwmqjfy67NBHL6VilDJRwwU75PBBpG4qkMAGIHgAAAUKXMC7QE3TmXIPzu8t/fvwa0d3BROMjwEIEyxbUD4wKDCCzTTCBwBqoAAY0OgDWkpQPtj3g5u571zR6k7jfKaz7uCDRvujHdk26A9IUTB+IMQMhATij8cxAQZ4chMTaECAmFCvAxfwgD+QZZZgfQADJdDbA7OVDxYQYCwYoMB0/1TEuIFwhgIYwEAHCJAyruCDAyA4ghSnyDsihvCKkyFihHxFgdM1bgYD2EGtFmO4HThAJY/pgNHSE68OnMBeQMsHAgV0FEDBTIsC6VYJXDAnBbggOGT0AA1qAIIOdAAEIDgBx1SExUaGSYv3IABwFqYz0NFgjCsqwK/8IYIGwKsGNdhACkYwggsMAAD685gOKHABDMBABGXaQAkcyMh8+AADKShAD0Swgg6YkWP4OEEuWTCEYrJgkYx0pDKXpCL8uChmLpybPwpAoUX6wDMA6I4FwOUvDnjASA7gAAvulS8LKKCALhSB4PbjFBUFk1xN5N8GXKeYYNYAABMsIuoWs//MfsaFMxzqAQN6sILDIQCFAhkCAFqwrBUAzwMuYRm5thch+zAwQV3LxwRogIHe8QwAKejAqvapmCE0wFoDMWkKWki7E7igAKUSQbAyqE9/2lQtkTxnUkrgx1NarQUbwAADBoKPCQBvAEdQwD+OYECSLuaaW9vhs3o1AgfMtHsD+AtX7MOAI7gAR3mckLLEJEgXDIAJdJqAR51607amZYHpAUESc+CBCghsbhZoAAWWRtQVFECuIKAAHJ1KPecIp2sschF/bKk7z5HUB5YCjREBQIMOLKyoSnSAAwJUg73ms6ZuDW1V7uEPI7BABCIQW03vkVqDpOS0bxsOEe9hhEf/eQ0/O/AApOb2Ac5xgCD2scAPFOBYjdIABF/KkA+OOaQTKCAFzCOsaKdL3bUQzAG59eACfQvcp/4AAZ5TzEaRS1KuJI+jmxphddfL3qqIiQIJKIB2hUvcz253A7/lSiRp0ABkQmgFLiiBcD7Y3gIb+Cf3OMEIctCUCMXgTCTa6mJEAIIL4JMrOtDki/BYgRoMYDz8PLCIR5wT/qWgBBH+h+E2ad8MUyhm7NsAkiA5gR0cIWWgJbGOd0wSCpvvPU9M2oy5g1pI3aN/CojBBY0wIQzgOGzdIS0MSkChBquXx1jOskaa5ke68uald1KMDzoAyLkZ4XAKKJonERADF/pg/7NFS0/5QADiZGr5zhlhnJ3V1s7Gza277SQw0PABA5r9ZnnpVcwK5vm6HirvNy4AAQxiJgIHXO83F/iBBz6g3Svj+dMS0UEL1mg0CpzgsAjRQQyiRmqj2SnQRlgBAE4AgBVwjG+ImgGjPoAW+/hjBj64az0/sCgLjJMgo1LUCjrEACvjEdTQTtEMSpAAFygVAbJsgUI6ySUEIOCcF/jf9ubmgwJcykD0Gvf7cvznhLA72vC2yO8QcIJlw4ADK3hPQvAxAw7AAAbLBsAP4ts7rF2gBCeYwAkCVIA7xfvhEBcIGIVqRT7rswLfbUFM7hGDDShAUNzpDJvJGfGSZxmMR/8wXQcFDejNnGAHQiUhKxsQZu5QwIxrNbnOsSzcFOiVAgRYQZjdDSEWVDhxAmHRDrpoHwDsAAQi2LnUsTyDAM0paUyggLp9ByEOyPJmeZx5ovEBXwFP/ewkHkIMOFQBGFAAAbnkK9fJWIAEnFHP/dsAAIyADx1MeQQlSBPaB19g73jQHxNAwGe4x89rPmm3/zCCJhHgAKkFCPCCz2gtdfABH3wAJjpjJKJ64INmBzrEZvZBD1JJ+HgTLAcj2A9D8gGAC4g0xE3ywAC4NICpndhcmnfnm846gAZM2r5XA4Cf0Kp1Kw7FAUxgautNjg8HjOBFDDFCA3bAUn2KaQYtiEH/vicgoqjHcbYtGIALjqAlD0+AOlo8swsQQOYfhHTAPtNBkUaQAsJNH+I60ACxR3KN43UNxHJExUpdFHwQwgAdkAIUIAJGQBQ1QyJalA8cUANM0AJDYAQVUGEbZh9Nk0Si5H//h2cYhD4t4EdIYnFkRwN3x0+oxzMwwAR5I1XNkkzsUoED4STZpF7XZT7B0jQIMAAzACEfkAMdMAEg0H8niGdP9C2Mgh4/cAGxkUKLwTopEAMjxB30AxorcAKWQgB3lTNXNk3mwx+9wl81p08+MFwrMBw+BgD2cWYlMAGd5IRPqGXIEi//wASilGaoFnrcIUkYQFFFJCbOhQBMoACA/wIAlHR+qGdSFiZbG3VjBAYDB6dudGF9FLAZMdB7tZUDeriHWCZH41MA/1A0MOBwRNc04eRBjeMwBKCKBXACFlCGP3NlLNABT0IeHLADKEZgLfA/m4I6hlNVi0EtR3AzvViKpshj0WJE75ZPpCWL3UUdK4eDU/UBduESC9OLv0gQMCCMiEgeMfA/+qYYyegA9wF97/cPz2iC0ViPsPMfIHBOCvAy3+GLtUIQwTiMM1iMGJBo76WM7JIqFmJSX2WPDnmPROEA/1B5FkBbJ0WHjXOJOFZTK7CJ9sEi15chJ3UyBIA7CnBcADCID7mSXuMPOvCSKzdNikVUgkRzBOYDjf8YhxNWYXTIAid1Ae8SL6S0A2ZBgCx5lOlyZTt4BCkGOi7wgx+EH0IYIRxQhEe4MgRwAlo5PgiAkuOBlGD5MVrEABVWACKgEr7ClIuhAxUwA9KiGF7HBO7BliCIOt6xQarRhABwQWHZl/kCSRxgf+znVUzQZugTih2Af2fGJSCQAz/gAh1QZ6hHYSlwYX65XhUXLReUgi0nghvUabh3lz0zMVq0Mg3QGz/QACtwV+nTAeeoUBigAMSzaS70QVjDBPl1mdR1gT00AQVQABQwAccoYSLIAKP2mwWQkqBJMN70mxTgNtyYK1ZEW533eZwpJgwgAp32HaRnesQ5HKPHerr/eVO8+WYbME/n6QA+UJsq8gEOoHhMkDQ10wJvGSEzwBsIICdFyAHROZ7+aSgXKHmtpDExgEsFsDRadJ/tUQEW0AIhklUW1QGUpzH8MgFb958YqitAyD8KEFVzswKNaEBdKCaxxRU3xEKMYTgbsFcMU40Z+qKhAlovqCwDwSLmg0wt50SeCCnCFXgpESRGCaNC+jf+0AAJQACyVXsgYJCdOTcBOIDi1UotAAAN0AEOAAAMEKRDuqWzYwS+RI8xkAJqSUGixwGyqZP5cAI0sH4lUAJHcCkN8JVcOqeGQlov+ZI0MgQwBFYC0QIBhn+8uTYuQAEcIyEJQAMv0wMfIDnm/5NzdPqo08MAR5MlQjQqe1oQMfCngcodPpADNdAA48YdHpAA9zSEEzBPWhU7KsICa0dR5mUqG9ICPTBTkLpM/9EAcmIy9Bk4YlQQ5BdzvHkPM+CpcbpYahp4BOEYkNGf6AI4h9NZAjMUHsBTBiJLJyB3tapM/tADFdCtM3AnRTqAjSNJObAqkDSsn9oDCqJRF8CUBKGng8OsEEQ3E/ADO1BVAnNkRuEAWtkAcLeA2dpPKpJgwOFR2hdD4vlUOQAbF/qhHRqHA8E+iyc7wYVIGBBf1IEoFfASMGIEAMBHEBuwjlRLFbB7bcYdp/oDaEpbOmCXPtAAn4qIJIUfw0QjOv/gK4cIPYsxBAWAAaOGsVsFef/gGCulpSKrqlqELGaVcGKIOC2rGBZQNOnFANv3Uv52MBzAaYuxORvQABhTAPGylzpLOwSwPP4AACMgX985EJSZTUeLRbzJAh6QNNimafHELgiAJoqGACOwA4zobWXhJXQDRdhmICUAidHDHZsDdZFUVfBHpovGBDr0tiEUqGxZPxOwMS3KAhPAFCWlcCdQkloZunZVnByAMTEQbNIzFA0wAPyZYI57elx1UhXiopQLQjWVY7bbbv2UPAPAPNyhYPIluyVVACmAAXV2u8r7KUcGOasSSUu3W1dzOEcwRLu7vNi7I6u0A+CTA97LBCP/sAE5gLiowwIFsH7WK6/Zu77zkTyX8i7wEm6k6gC900PGazPlxb76OyWsxRr/1i8dMAJHEAOcNjciYG4dYC+etr8MnCPcEWXecQLxFa08ewEK4DZ5wQJv08AcHCPEG7z46hTssgGl9A8YcARtigFi28Es3CDmN7AwsIQZ+yZJZMJJdAT4Q6stvMP0QUT4oFpOZFoajFqoVaI8fMRInMRKvMRM3MRO/MRQHMVSPMVUXMVWfMVYnMVavMVc3MVe/MVgHMZiPMZkXMZmfMZonMZqvMZs3MZu/MZwHMdyPMd0XMd2fMd4nMd6vMd83Md+/MeAHMiCPMiEXMiGfMiInMiK/7zIjNzIjvzIkBzJkjzJlFzJlnzJmJzJmrzJnNzJnvzJoBzKojzKpFzKpnzKqJzKqrzKrNzKrvzKsBzLsjzLtFzLTQwBJFAEtuzGL8ADD5ABB7DLaxwAT/APAiAQD/APAfAPNsAVwizGukwQAjAFAsADBOEF6vvMSxwEB5AByTwF//ACD2AAB4ADPKPNYfwFRUACyPwCAiAA4pwB6GzG7BwAJvAALzDPX4wnMWEDOBDMBfHL5dzM+lzGYCAAQhACA5HLBQ3GQRAAN6DQA3EDy9zQXZwPUVAE/vwP98wDQnDMA1EFNuAFX2DRWqzLJOAE8hzOVODLJjDQ/xAFJo3F+f/gBQOxzCHwAlTwDyGgAv8QBOw80118APdszC8QAsW8EJkn1E7sz0RgAPgsEOL80v8QzQKx1Ez9xPdgA1UgEDfAA8csAD0tEFfABUfwDzJmtFl9xBBgzwoN0iGgAQKxBWttxVttzisNBREQAf9gAE6AAzYg03U9xfdQBF2gAQvwD0qQz8dsAgEAAYM9xT2AAQMBAd2MzyAd2VJM2cj2D1XQzZq9xSV9EEAQ2lgsAagtATwQ1KYtxTIgAUAgAVNwAILd2lBcBAEQAgLQBCFgA9ls28u7DzYAASEQAqwN3FEcBQHwAgeg1sjdwvmAAzzwBAT93LetAi8QBL9t3ZR7DxD/QAUZsA/cDcU28ACrPd5PrNwCEACjjd5LnA8kIM7V7d5KrA8qIAAQsN30Xav5AAECYALivd9KXAT4fNwCzsP3cADr7dwHjr3wHQIPMN8N3sL3kAH4rd8TvqX5EAQvYAL6kOE8XARf3dUgDt0KHgAMXuJvawMQ7tsq3sH3YAACcADt/eL7G90C8ABWbeP76wUPIABSgOE87p/5oOAZYNNDvr/lbdxCnuSX+QUBMOMp7uR0igMvcANXQOXsWwQm8AL5reXZ29///eFgjr3lbQUk0ORlfpT3EOUBUNtrTrkk4MsSHuci6wUZ4OU1bucB298voAI7zucBW94vYM6CfrQJ/y4ABgDnh16rc27cjS6y+2DhNB7p2RrdL/AEgW7pc1oE2B3knA6pXwABL2AAAR7qdHrmBo7qQ6relc7qXJoPVUDnsD6n+mACF17rGu7f4a3rW1re5+3rQtrmU8Dewg6j8C3OWX7sLxoFuP7lzI6hGy4AKoDk0f6f+oDPVaDm125yRb7g3f6fJNDi4e6fMZ7r5a6bVm4Cm57uSOnpPIAD3O7u8Pbti07vfmkDPNDb847voHbuze3vYWnl1C3wYOkF2A3tBr+S354BZL7wK7nkaQ7xK9nm4E7xDznnEY7xD3nrXt7vHK9jfs7uIW+PwE7iJW+KCf4Cb57y0TjukO7ye/+I5wLgBCAv8wW24Vfe7jiPdkXwBAIQBD1/goluAFM+9NHG4sGO9IS3DzL+6kyPdvDtyzwf9Sa3D7iu3VYv9bx+6lsvdcC+6l9fchYf8GO/c1O/8We/c5OO7mtfcmKeAYz+9g9H4Kt983S/TF9wAFNg9HkfccnO785yvX9/Fffw7EdfKPdAAt58AxBg7YXPFv4N6MRyDzgQAiiwBBKw3pAf+Wkh4oWO9wxiA0+AAqm9+ULv+dal4KY+LCQgALGN2kuAAsss+qrvEeMe4bZPH68f+6hN+7e/FjFOBWafK+Vt+qkd9ME/F1Z+A1UfKlxu+rHN+cuvFqCv9bky7jn+BCr/AAH6sPvVvxEN//Cvsg9uXgRFoA+cGf5aweIx/yqY/v7sL/wBIATF3ypcLuXz7xbSrfvrDxD/BA4kWNDgQYQJFS5k2NDhQ4gRCeaDIOBBEYkZNW7k2NHjR5AhRY4kWdLkSZQpVa5k2fJgERMWMxjAcc/lTZwrbdx4ESRfTqBBhQ4lWtToUaRJlSK9F6CJBKgvINhcWlXovQMCMnix2tXrV7BhxY4lW1bjlRsooC6RwIOEWbgc85EI4fZnXLx59e7l29evUhwC2K5FceDvYYFRAggwEAXxY8iRJU+mHBaHDCBQJbA1XBkvCSshbNz1XNr0adSpVROku1mzAByrwUoJ4/BAQIAvsnXv5t3bN1MIQiRklhFA3++khtWi2Irc+XPo0aUvjALhCY8QB4qQno4Twj/XEgR8717e/Hn0ku8VsWFjH/f0KzVrvhHf/n38+fXzliDwwX4AAxRwQAILNPBABBNUcEEGG3TwQQgjlHBCCiu08EIMM9RwQw479PBDEEMUcUQSSzTxRBRTVHHFjV5IgkUYY5SRwYqgQGJGHHPU8T4ANMBigRp2FHJIIpGLAIosilRySSYpoyECGQxockoqq4zrCC0eyM5KLrv08igMVvDnHwG+NPNMNG/qL00223RzoYAAACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQQAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACzQAGQA8ACMAYciIiTQsDbatzqXfSReXlulpaSOjo7y3HxtWhVqamxFOQ2cnJz5+ffOzsze3tzkvzzsxj/LqTS2trRSRQ8+Pjx6Zxx8fHzExMQqKiywlSxjY2QmGgTKyswqIggmJiRERERdThMxMTGojSqhiCfFpzOQeSQ6OjzAoTFZWVmCgoS6ury3mSz47rQ4LQtkVBStrayEcB5ycnQeGQTS0tRQUFCagyS+vrweHhxubmx+bhzu7uxOPgyujimCahyysrSKch+qnmQxKgmWlpQ2NjQSEhTi4uTW1tT40USGhoTy8vQaGhx2dnS6niwUEAQWFhTm5uQ+MgxaSRMKBgTEwLDy0FRKSkzSyrTa2tzq6uxyYhz999cOEgwaEgTiujzy7tymopTxzkSOehQqKiTe2sx2clyulCTmyjyKgnQOCgT28uS+mixCPjTiviwGAgQKDgSInIj01NCgiAzo4tCamsDs4uTAxNg4UBTo4vjE8tBu4oCAklzq+vAICDDEeCxWQDgEBBg2IigSEGDuxChUWoBuVoD44uS+2Mx0kpjWyDSIkrBeQGzY5MikxDCmgkgaBiDKeMismKzOnlSm7MgcKAzesFRwGoDGzoC+nlDcyFTE2Kzc1PRCUGREJjysoCxUVGimaixCSDCsmgwaFjRUdGDKMIB2VlCarKyexrRieHQkwIDwwNBUWjzW1MjMwPDMtFTemkBCWEDcyBCCQEj0wBCKZnSm2niyeHT0njDY5PQmYIgSMGjoorAKMBy8tECmxuzEopiwqsRaooSKcEgwOgycmiAUHBTEphBUYlzqvICkrCA4LmikolAMDBhWchhuoiSwvJzY7rD0tEBucETc1LDeqjQ0MEiwpKxqVigCBgh6VBgEDBBuouAuUEzc9IA4cDiAtIQmYNASMESKViwcOBzephDK6jCyouiwzrTCyDAmIMAwIkTk7vBoQCTE2PDY9uDoeEhwWshiaIQSYDhEJBRWXhg8SEh6ZAgODgwGBgQCAgQKCgwKCgQGBgwCAgwODgQAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpyYMJ/Fi//04ctHsaPHjyBDihxJ0uFFi05iGCBwr6TLlzBjypxZ8GS+exYYMLjggSPNn0CDCv15El8VI0aSqFDic6jTp1CjmsTwQsKSGUubSt3KtSvRe0ga0KAw4x9Tr2jTqgWJD4URJPhCXLlwdq3du3gN5gthQ8WNfCYc0NWat7DhrfmIGJhBwyIFwXUPS54stC0HCxvzDTGiwglhyqBDj9yr4sIHIkScVJlhI8Q9fKJjy/aIjwaDJFdmGJlRROeVFx9mCx/ecO8LGzYuXLAx4zaHBSaIS59eEN8NANhv3CDggMMHJ7Cpi/8nbtPi5s6fx6sHXT7fB8EA0q+fb7i9EhwaWtLfz7+///8ABijggAQWaOCBCCao4IIMNhhaPvjoo88992hk0UIWRUihhXpdhE+F8jno3z0oGFDACy8ssEQIG1WUWIknLhADiwPZREQMEgiBgYgE7mWDAw0oZ8QTF6AQ3kH5ALDAE4JdYAQWNjQm0En6aIAFAw58ECKP9N1DwwcYXDeEBVg0EEJC+uSkQhXYUWAAAyoQ8U9RFKhwxZ0UbMmleu0l9sITBCRExJ+BTulBEUYAMOdFSixgQwwvGJHnngD2qcQLRaCQ0D1/auBTPhh0p8SiN8XAQQI3vOBAnnpSOt1JRGD/F8ISV9iwI0L44PCED1VoZwISV2C2KD4fXLAAaj6seqGr/F2EkwQXXMHAP1pWdE8KT+igXBE6GKCfRQAUcEF0TkigbKvMDncRERYk18AMKcSnELGRKjfDDEjIe5MFMySwkRLmsppupfgAsEQDL3iQUG0XAIcPPhgYoMMLTvxj1AXeohTpEMsODKA+BmBhQXr5KGFDETTUeMMFKP8zqAMaOOGEEhRIYAQKTuiDrsfT4aOBDgvoVxMFiN4q0D1CiJyPExzo5PTTRSyhD8/rEZaPPkskIcTUehHtQHRTOlEAFjhcvUQBC6S9wAtYJGGDAR8cSfWr99yg0cP3mKBCEQmQ/wzABSK39CENT1wR3NUVSvhPCBJcQUOLc1N3tQYNLGBBAhYs8GQBiiKpTwxYYPHCEjEscMUTQU9pkxM+XAF25OIRG+0TAhVxwRI3uHiPBirQ/sQTDViQu+oneeCDd7CPByEGH9CAwpceyI0kuB+ggEIVIXBNvIcmfCB08uCHf+DOFYlv/vnop6/++uy3X5/79F2dmhOvkXpSjfjMPLMS/HtWlD7zI8Jr+kQ++D0FVEhwUgMKQAOdtUcgbZmBDopQhN/tCgMeooG4ZnAFDrwAVQQ0oF1AdYEkzKAAPsDCE3CQmfJYTANJKIIBZmgAIcTAfzcxwFzQliwdSCA+fRKhWv8Uk4QFAOBDBLiTlh7YFhnq7GEPWx0AiKCRvL1ABykIoRC9ApgnGOFMR5NYCo5kkyaOLIj240hbGPCCpthki15ZowSagg8CYEECw0tjW55QAAqYYAhKaCFBinIjrWkRjltJk9YGSRYjvK6MBNCBDhpghAZIIAUh6NicnECB5iFhBg1YogsRyZV7SOyMHCFhlmpUlA+kzQIp0NzEMkkQyzjgCQxoAAoOSUqoJCZkqJyTB1gWHFY6K5AQ/IAEiqg9i7lpAQVQwQuMxMteDiWHOkClRTBwgVVuj2QfYIARFGZM8xTgCTG40Cit6ZSrWSAJBrCJCXQzhHKS7AZXWNUgMfL/j+iUxZ7shMoafUBHFOwqj2/Uyw2M0CF13kA35NxeQA9IAR0YAYNhxGJ4XJhKn9Qml39JoxopQLu6JHSiQlla0gygBADS4F6ijAuL1BgCACSOCB9QgQ6EkJl7eEAJibtHCBbAgAK08KQoDcpeAPecP11hCS00wZ3qmY9h7iptNgidD3oCoWIVwQdCEIIEsnWBTKIxqSkNgQE4kBsfoOAeJwkBB8p6tHY1wAEOMIIPEsCUizhhCczB6wx8EAMgPhCt7aybBwDgGfvhwwPRm5I+KuYBDExxoxeZ7A0qy1jYnBWxoA3tQwoo2tKa9rSoTa1q44eRwzZUKwSMrSZXS5Ly/+CDCB4IgW49AJ6atOe2uQ0Bb4+qjz/q9ri6jQxtRdKeoV4BC5L86lvtmVkNOkCSDnhBAzOEAuiGLnQ6uM3Wljsa237gRCmIgQUKUASRefa3kSxCASxggSs+wVMWuUECEqCB/mogZPclL3PfmFnI1caEoyLw0v6Eg6ntKwkFwOH9fFYEG0RUwB456UmVAFENA4CtVIVQFb6au4QSYQHZJC2Gp8dPxz5MHx/AwgUk7EYnYEoDd0vJEwzgwPtpRokrBsn9jHmPDyQAB4u5Qtk0XJs7CQEHpTshBhOqjxRAWE5BzjBHt7kAFSbhCRZoSWz1kQAJ/k4HMyDAUdWphAsAKv/LGb4HBarwATr31SIw7i8SfFCAahHwBgZ4ARL6awAbAOe3KNABB4wGZ4kARgIUtJ0G1gxBFMyAJyK1CBHYi2OL4EQHPjDsurosrEZPJB83wAESUpCCJXDspCBbIWZPMgQGzACuFwGADZ6gKZuEACmvM7Wjh5zGgXxOB3BBUjg58N6l+eAJCbBfmnRQgO8JGyJbhpBNLgVtz1rsJBQQJ1chNE8HNOYkJsvUtTvyWwrQ4KczG8KbMA0uFFAgMyvb6RBmRoECQPgvGVT0qNZ96vKYEgtY8YENrqCtXc4Ja3ACeAR1cAUV/EhbotR0ATRKcIrYlgJIeMEFglSAGPRkSm3/UYEBiBDXFKiAAxzwgQXGzagXqKCeHdeyOl07kPfus7VDhpD0ck70/6i46EhPutKXzvSmO/3pUI+61KdO9apb/epYz7rWt871rnv962APu9jHTvaym/3saE+72tfO9ra7/e1wj7vc5073utv97njPu973zve++/3vgA+84AdP+MIb/vCIT7ziF8/4xjv+8ZCPvOQnT/nKW/7ymM+85jfP+c57/vOgD73oR0/60pv+9KhPvepXz/rWu/71sI+97GdP+9rb/va4z73ud8/73vv+98APvvCHT/ziG//4yE++8pfP/OY7//nQj770p0/96lv/+tjPvva3z/3ue//74A+///jHT/7ym//86E+/+tfP/va7//3wj7/850//BT3gAQIRwA/U3oKPQOABAfADHTAnUeR1/kAQ+7cCJ/APGSAQCPAPAWAQGVABBSEDJyAC/xAFMpAPUtACEzABLTB0Tyc9E/APA/APJxABJJABA5ADLqAAMnAkalCCU9IBK4AAWtECK6AAF4EGIHACEAABKzABUrB0DWiCLhAE/oAGTdABE1ABNbACARAAJLACI1ABE9ABaIAkQbACLuAhUMAELgBFUiADPXACE9AETSADLkACQRiEO6haC4ACIVAxblSEA5EBPzABQVCCK0ACJzACCKAAHdCHFTACTKCCVggDINACTf/QBhfRBlCED1KwD2jgD00wAScggBvQAS0AAwFQA1GAAFlwiEGYASJQBhkgAEHYBQ/wfz0gWj9yJdMiEDowBRZgAkpwiR2AAEeYgS5QAxEgAAHAAzCAAAjwAxkQAAIgABGwAiWAAFnYBEyohjLgiQowASDgAhVQAT8wADUwAiNwAkcAASfAjAEAAVQQAClYhSQABhFQAyUAAzAQAf8nAK4IASUQWvnADxigAQvQAE+QBAKhE2kwBUDQgAJAAia4jAJwf1TIjA/5igspAgPwAyVQAuA4AiKggCQQABGggiQwkidQkivAAyNQAiIAgAgAAhPgAifwAAggAzKwhJo4AZn/sQ8V8Ir3BwEC8IA8IwVH91r5IIkPIwWV6A9iQABfMAY6wQIHQAU96ZMSeX8P0IwS2YzESIWAuAKoOALyOI8VgAAuEAU7oAAt0AE02QT7IAVRJAMqGR46mQH9JxBSgAArIAM1EgQj8Io+WQIDyDMwsIE/5yGTiJT7YImXiIlcUIhQkI0ggADdCAM/EAY1kAHDaJVW+X+aaZUCIAJjWZZnmZYywAXU+DBtkJrExhD7kAMZgIdo8AMiEJj/IAPiWBMtUAPECJhzo38dkJhowITXGASPqY0uIJkwoJEcuQIKeAIj+ZzPeQJeKQIigIj3xwY8qZUPcAAsoAVawABa4AVW/3AGa7AFmSFkCLGAA1EDJrEPprkPc0ORGVACHCkCGZABzFmS0cmcqFid8ggDoemSCoCWLdACO4AAMICZrHiVzcgDLpCWCkAGPlAEA6ETgjFf39Fx+SgA4iiPPzCWLfmBCgAFadkBaikDTeAGqGmU+9CECuACMDAAIhCTQfiZIPAPQVAQ/HADFMAQGrBuVjkANekP+1CA5QMbUuCi3KiS5/gAR/CkPjkAUQCDWvd/TLADD4EPaCADUDCKPzAC/JkB5/gPEDAQIgACVNp1JwACW6gQTVh3NZAFCLEPeqkA/0CB9ZenKMiAgheEJCAC+xh4AiCkQ7l2QQCfLjCEgFcCsP+BBjAwm4PHBWXwA20aeApwAj2gAAgwAoDXASIAASv4Dxj4d59qBiIwiHoJeBDwA6mqp676qrAaESsgAGAgAq36d5dJAnY6eDLaBIMnA0zQAyLIdxNAAhNQqG73A0xwq38nA9A4rHunABEQBcjadhWgnoPHgrG6rdyaPg9Yl4IXjoOXDzqIp4D3gDC5q4A3AWhAn74KeEwABR0wgdVqqDtgrII3Ai1QAXlJeCOwf3PSrQI7sARbsHvyi4O3AmAAA++6rgt4rIFXAhUQATg6eCMQig1rsBq7sRzbsdcnAxFIeDLQBYVnp/g3eObqsSq7sizbsi77sjAbszKLIAkqECk7+3f7eICD94DM2ncVQJf5QJt/l5IzW7RGe7RIuxBMUHgc2bN+xwQVAK17x5CEd5lCC3gkAAOGh6WEFxAAIfkEBQMA/wAsWQVZANcAgAGHbm5sYVEUVVVUVkcSNDQ01tbUYmJkaGhoJCQkPDw8bFsXLSQKzKw0e2cc892ExsbEc3N0Kyssenp82trc6MI+7u7umpqcQkJEEhIU0tLU7sxMrq6sGhUEhoaEdGIcSj4N/Pz7SEhI0MB8gm4cs7O0np6cgICBwMDAs5Ysm4ElkHgkuqZkrZQsoqKkioqMlJSUIxwENCsMPDEMln4kFhYUqI0spqakiHIgjo6Mqqqso4ok+PDIGhocQjYMup4v9fb0Tk5MnoYlHh4cro4s5urkCgYEXl5curq8ysrM3t7cYlo8zs7MWlpcenZUlpqc4uLk4ro85ubk3Lg8Dg4L3r5UrpQkJiIEWk4UEg4EYmJUDgoEBgIEBAQYxHgsvrTEblYoqKB0XGAYWnpYjnQM7LgQgpZcqMTsXHYYyPTYFGRQimpIwNbw3PSIsKYsPFQUtKDo6KCw2NawoqaQPGSIPDA4HgggRlxYDgwYFBBg5N7kChg0XF5QzsAwbEAYcKTgelYY6HhIooQMzDCAsLrMdBqAopawKGTQBhgULj44dHRg5OTcOj4oSDY8sqiU7PIw6MIohHKQ0MrwSCYo8O7g+LhArp5QBAwQwMysiH6EtHh0Cg4I6ryAqLqcqOywgraEGCgodpaYWqSEqGwsVmpkjFYYND4QcOKkNCI8cHoYoLrAztz0KCLAtp4M5K5AQExMinhs7NYUHjJE4uL4il6EHhg0qMR4AgYIyrrEdFzU1MrQ3LrQqKDEZmqUzMrYvr7QaHpsWFp0MDBIRkxkCggw3tiAOiIURlQ09MzQLDAQjJawkGYY2OLsZGpQxLqoFDJoZGp4WjoYIiwozuQwMlRMxLrskI54ruQwVF5YhkAYoJ4g4MrgzqQQ7JwwkniQzHjIWkYoPA5APDBocMIkdGhYwN7AzuLUdEJsqIRIspigdFhoPHQ4sL60xtyArrowHjocCjIoqM60hH6cSD4gJMKA5NbsdGBAwMzYztbIzphAAgIMBgYECgoEBgYMCgoMAgIEAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0oU6K9iRX0REiDQN7Gjx48gQ4ocKdKiP30hWpwwIcQfyZcwY8qcSdJkhH8VQJCI4JKmz59Ag760SEPCkRNPNiDoKbSp06dQCepjQqIDgAf/lkbdyrVrTQI5WhAI8WADT69o06pdSAPHCQH6LiAxy3St3btc9RlYScNfiCV08QoeLNQfWLEVQ8zVSrix45c0OpAAoq9iggc2ltZ9zLnzw79LOhw4YOBFkiUmBPDwzLq1QgFLoiSZPSEKCBBPSCRwzbt31gMQIEgIbiPKhBdGhPhezrqf8+ewSRCozLy6Y5MW/wa2zl0wdss2cDD/7k5+7fcpBCJwLM++vfv38OPLn0+/vv37+PPr38+/f+fvFv3TTwQhABFCBP0YdF4EFwAhQAgE9PWPSRgw6OCBCfrHG4AVYWBEDg+EuIEBElKEXT8CgBjiA0fgEEI/FkVgAgknPIDEXAeUqKFnHPZzwBJLvCDBCxlkcMAUJmI3BRMvvGACBCbkkMERF1gExBEkCGlCCQUUAAGSO/IIIAEkTAAADf3QAMEERxDAYYdC0DBFP1NEYEEFJsDoTwQCROicEGs+sFuYvulzwBMtKCcQAi0kAYCeACp4QBQvYHARdQMJYUMFRmxGaGNK4lABBOv9ow8EUVjAw5sW6eOqEDhE/yEBpBURREMLnHr66WDY0XCnEQUxkZRmbwohAHAWLKHbdwOhZOOguxLGIQ8tRCFAQQJMsBOrKJGQQRIVPGBEgthRFEEJSZiAQbSNYZBACPBesCq1T1xLULbbsmqsARBYQIIJxAboDwIuTGCDm+zymkAOQC5xhBFT3BoFE3Xhy5NJA32nDwImIKHud0J0UMAGCZSa8F0DGxEcBAdMh8GdBzDljxFP5BAwQRwScISg2AlhQgY5lHxya/2YgGeGApoQBQ59YZwkdgh4G4JJIWdgg9BDS7uZPwJMibBhR2QAV6syV2aSPl2f4GZFBC9RwnRZaw2gEBYk8UICGZlWAgIT+v8jBBMhTMG2AH0igAABBpDwRAeW+u3CEw8YgAAPQlQOY9zmcRiXDQWcsMEJBeRwwXpTIZHD2gnYsITnnxcQ5MX+GPAECBNsUEILLdjQAmWYq8UtARK0EJYEcAsUl5Oa0cAEDsLnEB4TQnBUERA2bJDDBthvQEIOcPXuO4cTTiEEAkIILnM/cZp9Egbjk08DphX1Y/j8CEQQAQKCe6///vz37///AAygAAdIQKg4rW8LOSACCxKpAnbFIu4yAAAAYAQCIE1BF0HAsVgGhJb0JH4EYAIAWHYBSzlwKychQAdOkAGBWA1hB6nIFIywgX8AKQMnAED+2AaBsAGpAJO54An/m2KYFrjtAAJgAgQAMB4G+mh1JmCCAPj1IorwoAOAgQDhRoi1IToFAx3IAEvMpo/LIcQvJzjCiy7SD+qcxABuUw8bMeXFpmiHJ3SiI0Iw4AJHuWpOAZqQEFqAhBCUEZCBrGNQ/IGqF1xAACYwwQESkD+ERC0DRkgAADogAQFshCIXOMFkQiABF0AgBE1TpFD6IaoTtIBGD8AhANYVwwScIAk5yMEJTgAkCwhNLwVAgg1oxEuPNVGVNHkZCCpAggPAqwMTKAATTJYxxfwgAzgQwAUS94QXtKQfEHhCuCQQggtAAAlPkAAtkekTX4FgCUzQEw1cEIUSKIqBigGBBTSj/w8gIEFsZUQVnhoHzie0SVfsJMkUXgCCzFQTCSeAFs4u8IAKHKBv/uDBC6JggjkB4AkZmJpFCMDC7iV0JvowgT5XQxGSLuECCNFZBShmkSkYDQcYQFsGeGYRBGxgAgag5klrMimbPTSiCNGURU1Cg412FGwhzRhJATpUmRiGhdMMn9LsiZB+SCBVq6oIRanaVFmRy1BJOGhVY+IPcE7gBKcMwc+kKdSW/hQHBvpQN7XSz3+aAF4AuOWX1spWv0ngARNYQgbeeoB1HgQlUsrAEiaQAV+up61GOEFikTABYyKUsB6BICldYEpKftZ4ETiACVxgAiN80kT9SAAEXIADcv/qCLQ1sQid2nhanB2Stwy8yD8AidviGve4yE2ucpfL3OZC5FK87W2zOKKnGJbROepz7kT0QQAjlDKb5lOIP2gQAtX+wwRrK8jGDLBaU14gvNp9yMBME4UfgCBRiTRIPzIrThBEoVO1Ml5KJHsCzsJ1VfGVLwJmCwBclSCsn+1HCDoApdjQlCkRyMFxgJAAcy7hCUaoa4KtC0gmJOHBCsRZHnlQHAE4DV/p7YcLfpCnETcEO8JCcX5x1hNqTezFSVBKq4xWYxsn0CQ5hrB0NfXjQEbAWybQSASocoIqGfnItUpyig/C5AtTpJ8k6Jz2djou6doYY1reMZeL4+W+EQz/CQ8gwRE6V+YrHyTKGHVJmgOskC4HcrwQ+NcFIkCAAxzFpHbGFq3+sefe+o3NfP6HAEY2IYHY9AkWaEmiY4jkJ+g40hj0M0H60YEKuABME/qLWjcdXIsIywJ5TohFWFyvgkxBVDj9IBC8ZuYEk80ISUiUqwLcDx5U8iKPfkJWe3KqClCJOjxwQQW4ymqD6CMBpMEVEiRgAAFoGiUvaKxLMsoEfi2hAi04QCbJlQDFkQAC5X7BBP4R4mobZAodeAK4lvmE3BhSQOGktj9C2W/7VuAJE+gASyWcrAL8w+HNtPdjzTlBAIwG3nyNgBEucDm/qaziB7A4TJuFgBAYYTQC/7iJxDk9EVCv/OUwj7nMZ07zmtv85jjPuc53zvOe+/znQA+60IdO9KIb/ehIT7rSl870pjv96VCPutSnTvWqW/3qWM+61rfO9a57/etgD7vYx072spv97GhPu9rXzva2u/3tcI+73OdO97rb/e54z7ve9873vvv974APvOAHT/jC22cBW9dHAGrwAa7HoAY36HoDauD1Khj+5gHoeq8vz/nOe/7zoA+96EdP+tKb/vSoT73qV8/61rv+9bCPvexnT/va2/4gU2gAC7r+ARYoYB9b54AKdID4rQfAB5m/vfKXz/zmO//50I/+1yPf9SH0YPNM/wAKPIDqrKsgBcXXev8AWJD8rnNA+uhPv/rXz/72u//98EeL771OfK4HAAUD4PoCgnCD82/dAyxwfVvXA7tXBMF3A/W3dQPgAwogYlRXAynAdfrQACjQeAPIAg2ABVs3BSOgAzEQfyAYgiI4giRYgiZ4gk8RAyhAAVwnAz5AARQABVq3ADUAgyyYdTQoBTCYdfowABCoAkFgdT0BA98nAxzgf1aneCigAFqgdTAwA0GwANh3dDnVgzoQAAZ4dU4AAEqQAuA3hUcHAkQgAlSwAlmgclWXBD+wAw7gADuQASRgdQQgAB0gAhrgACCgdTHAAizQBC9wAlineFUwAEXgLlfnD/unAkh4dUo4AFv/kHWIGASKqHX7oAD454BQh4g6MIlZVwSWOACY+HSRyIlYNwUecImQKAM1QIpXhwUU6IipCHmLaHWuyHhgeHT+0AM60H9axwGT9wGh6HT+8AE1MAKzOHUuwQEjwHiPiHXDOATG2IvL+AHNeIja1wDHSHUccAM1IAO3aHT+MAAokIG9qALd+I1FtwXa5wFNmHXbGATeCIm9x4Tl6IHoSHThuITtiHXCZ488KI6/B4kcMAPEd49Dpw9XsITB2HT+8IRRaJBCp4QBsJBM15BQGH6MOH4TCYkLMAMzYAVaVwQaSZFLh4heCAOUaIkb6YwxoAPgp3Va8IkkqXT+0JIqgJJZ/xeTNQCKkPh4rGh1WgCAPOmMMrCJ2Th1pqgDwBiL0ZiTALiUzth7TVmKN1CBM5l0uYgCvJh1WDACLPABEBl0uYiB3UeLDaAD8RiVGKiBXMmNaWmNZNmLbhmWQKcP17iPVyd8LPCWSbiOeGl124iWdPlzPeh7f1l1gRkDg+lzPVgD9Jh1RBgEivmPhumEBDmZgTh+AYmDXiiFPHh/AQB8OBgEX9iJKimaWEeDMwADi7lzFwGaWZiaksian8kCQ4l1NkmbgSiTPbmKHNCaPLcPp3ibQtiSW5mEHLAAFAiWV1cEMnADLMAAKCCAVacPRQkFMMgA2AiYN2CD2cmcVAcDLDWgATFogw1YdTBQg95JAQFQjVIXkwzgnSyAmcgIAx4Qn1LAeLFJdf7AAR8QAAMQA/t5iFoXEAAh+QQFBAD/ACwMA2MA7AAPAYeysrRubmzW1tRMTE3ixmSoqKienpxoaGjBoi+YmJiurq5aWlw+PjxWRxJ8fHy2trQqKiySkpTUtD3NsEKLdR/S0tQmJiSYgCTz2Xy1mS5ANQ19Zxzs7Ozszlhzc3Ta2tzGxsRUVFRrWRQiIiQeHhteXlxiUhSsjioSEhT20kyCgoTAwL86OjzsxD7kvjzKsmSioqRiYmQpIQZyaED9/fy6urwaGhzy8vQyMjQ2NjTazqz86KAuLizlxlHe3twWFhT29vSmjjyGbhyGhoSKioyOjozi4uTKysz24oSmjiy6spRCQkTm5uTeuj0UEARKOgzOzsweFgR2XhxGRkRGQjQaFgT69uTOztQKBgQOCghyWGygspR0koigoMSecKDMoFASMGjK0PDEpOwGAgiEaEx+bnhQYlDWxEA+UGR+QnSsfGTK4ODspLBCRFA2UBQEBBig6LDc3vRsGoAEDBDuoDCGinimppDiztQCBghweGDgvBCWmrCgsujUoCC6wuyEXIhsouCwqizM9NAaFjTiyDQkYNASGhxCTEhseHgaBiDAfCiWjoBCOCRQSECAQhze7jAcOBwIGCD0xth6ZgiEeAgICDBQXHhgeHCmolBiZnygpnSgxJgSEGB6VhxQdGC6rMw2DkBoQhxiVny6nMDGuBCWpqCibig+WEAKDghoeEg0MEjWvvDevsDmskDMvqwSMESeiLi+vNCm5jB+klyEdKDOpLSKdHjmfCi6fKAsUEysfOTg4sxQQmxQVkDiviiGnIh+tIRs4oBsoiTG0DBQOkwwNCB+emgSYDjMuszM8IByaFiaoiCYsrgKMBw2cDi63NDK1ND0wIhCJjTczvQwIkSmvjA4SEi6yrykiJAYKChiQlg+REBaQpQ2LmgkYIjkfKgkwIBsWthueBx+krC6vKicgAwcKAzWzsjs+uyWgpD09ogMDBj04NgkIMDIMoBaooRgaJxMVliKjpg2Iig+JAz09MSwogwCAgwKCgQCAgQODgwGBgwKCgwGBgQODgQAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIMJ9FiyMcGDjwg6LHjyBDihxJsmTCixZ/ODBCAwCEfCZjypxJs6ZNlP0WrBDAAYAFmDaDCh1KlOhFFgUKFKngE2jRp1CjSn1oo8iKECGOPPg5tavXr0X5xVjhQN8SKFudgl3Lti3FfAwUGPi5pEJat3jz6j1Y9Wq/fAPsWthLuDBbsSs88LPI4IiCEWoNS54cNB+OByAG/PhBIkaFFVNQ9KNMunRMwD44gKhRY0WFGxyOJGAQ2bTt2w/z5TAAAMCD3lCAcFhRhAXu48gb9rMxovkICwcErNA8Orn16wNRXpwCpSn279a15//rF0JAjZfg0+MWn89GiCn81Mu3zb7f3/n48+vfz7+///8ABijggAQWaOCBCBZl0T/98HOfQ7UlKKFB+aAwgAoGKADAAX85xU8JChikWIQTltjPEgAwwQQHNCSAkkAoFEEDFAbAAMM/BRwQX4k8FpSPBSUssEQENETw4j8oEMGEAzYI1KQ+JPaY4HgO9nMAEEa+qA8RPpQg5ZcH4XRAkdr9s+WSI0BAQnVgSolTAGS+GCMN5v3zQAQhoNBmj2/GeZGZBzxgQBEJPPABFB7os+eE2vUDZwT/vNjPCDg0+Q8ODlQAxQJsLnpgn1n+GemokdpABAcJ6OkpgqCWGWYIHCj/QMKqrF5kpZ8LIjQAEwCMQOunto5ZhKsG8eMBEAV09GuBFtlnLBAJVAlTPvqQoGw/KIQAghGKLSsgBAJRu4AKKtQwowMOxGDDeAwkUIAK/2hUwT8JcOUtgRbAwAEHQNAgHAcP0NYPDgl8YAQT/xhRQwCQ3VtgPxAsIfHES+AA5Xgk4MDAP0uwYMFiDocs8sgkl2zyySinrPLKLLfMI7UW4MCDDaOxVxE/P4zAQw6WRlomP5TmMHN1xLq8Vj8sEFEDCCvAsMDF4oU7QgAFLH2EB+yNl4MDAKzAdAIDdHik0WDBZWgN7lZQQaJZMzjFA0xUwFKoOA0AgF0JJABDjosV/012VzZEYEQEOHBWwhEgLNF2Pj8swQCmWLKHQwFHBGDBZiSM8IPNf3uVT10rsADUDxEsuaPPfwaLK0z6OCCAAyBnx3nnU/UTA7KqMhjDBwX4Gu7YDI5JN0wWUD6FPjhMwcIPYvtN+1P8OABEEWx+zrRxv4sqkKO4RroECFAMkcARFTgWA9TAP/9UjBw4UP1lUAxQkfYMPippCK9VUIAHByQggAAjcp76hvKDBDDBA83jwd3kJ7tcbc9+f/qQAG5QAAY4yAYeUBgOpkW/AQ6lQkVoX/NwUIP4zc+B9eteTgTgAw5NiweGikHNOujByuhDBUD4R98Yw7SNZQ+F3KMbg/8GQL4BLKg9BixL22o4lCvBAAW2KgHvBvNDtdxqWNqBoQBCcEQbJHGHAmTiaUCXg4H8oAhGgF0DUZiP22HxT1484H3ywQPXcGqJYrRJHBOAAxSQ4ADlm8JF9MEAFvQtUvr4gQ2kB4PNiKZZC/hMCH6AAh6cqimzy2NNzGaEFbhLABUIANRYUIMC5AAoKPCAAgpQARrw7l0/SYnr9GeAGvjALz/UJFGQRoQVQAEEMAgB+nLwgAJsUCAkMKAPfGCEDxjMmCixQQwoBwXiLOF06dPlJlEQs6GhbjwWGAGbJsWCHODgnDnIwWC0ww8S8AAHEGDeCaOkzXra8574zKc+98n/z376U5M26wcPQlCCEPDgdD7SDgoYsIASDIAEjeJBDA5A0QAE4AAW/KdIsoaCGDwAlHY5QO7WaBEIqAAEFRAAjeAzSAcs83/OhIIKeqbRt7BHLGorggeKQL4AIPSb+UjmBx6ArgIIAAC0sUiSOJCjiVI0ozX1SNZwAIAPBAAF/EBBCexyzCr2owQ+CJg+9AEBNBYBitQaQgUW4CB+uPVBUZVI1HR4ACPAYFbINAC3sIkSEhjABwEQGwNAcIQlIFKtIehQXEFCrBgd0Cn98MANYGAp8bAAcbQZiA0MwIEAIFJJMDhADAwKpcVOhFheNEIMnJIPWPUKqBbZ1VYIkiQa/wxBqUSgAQdeCoIi8ICepg3T2EgAgw8sQC0D4MB5shYCICiApjekARb5MQAHCGkAHlgBB2AAruBCCAVLwMp7IEpc4yJXuS95kUWaWwDoqsBP/NBHzfgxhRpwq1PePQkDAPA/6eiIuF067wPSm6v13kBWBNHHEGhAhPS1brs0zS9CGDeABYTAwj8pIBMOwNoS3KBXWZuCD2rwWzNGAAgOoGFOfOASCTNkPPaxD+tUwAEVnO6GHIgAWsXDgxIaMVwWUECAI2QlJjzGxS8+EnmgUIMyRgqGFQgBbJU6BCYUQVHjWcAHXMLBiiSTA0RQFJIVkrVkMgEGS4iYAe/qsxGMFv9q3wOgzEpQA6t2CDohyAEEeDAAA8YPuEjOGtIKcKgVHEEABVhCdT4EBQVs0CL6GMsHVuAamULUInGmNAhYWIPzjTnJ4hGoB9yVAA/w4EH9YEARHHBpSE9hCHuLwALWdRESBCACBkhKqXMQu0+TOWr5wJm1DhmuflCyeQxCgQ2s1TyL4GzZwwa0r6dN7Wpb+9rYzra2t83tbnv72+AOt7jHTe5ym/vc6E63utfN7na7+93wjre8503vetv73vjOt773ze9++/vfAA+4wAdO8IIb/OAIT7jCF87whjv84RCPuMQnTvGKW/ziGM+4xjfO8Y57/OMgD7nIR07ykpv85Cj/T7nKV87ylrv85TCPucxnTvOa2/zmOM+5znfO8577/OdAD7rQh070ohv96EhPutKXzvSmO/3pUI+61KdO9apb/epYz7rWt871rnv962APu9jHTvaym/3saE+72tfOdpu3wAUZ0EDDWyCBBgxEAxNIQcGb4IIWNGED+BAI3jHQgwtEQeB9f3sGNqABJzxBAjvAQAZkIPAWtAABFBDCCU6wgQsQYAdIkIAIqiDtd8vgAgKhwAYa4AQnaGADJ5BADzDQARcgYAMyKH27RZABgTjBHz7rhxNE0IS3u+D4E9hAFfStgSSg/iD9aIAEXNAEvvcdAXLHtxM20PuEbMD6x+97at3znf2EnF4C1rc+9guugRME4QU9CH8TKHD4gl9gBkqgvQQQkHwnFJwCFEAFCmAFOjADGgCAeEVvWGAC3bcQUgABCkADVzAA+CACCHABG3Bvz9cQETiBJEABx9cE9XYBGWB3Czd9EtBwAQEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUEAP8ALAMBZAC9AJMBh0pKSSkjBotyH4KCgd7e3J6KKNGyNjIyNKqPLPr6+aWlpuG+PbS0tEs+DHZ2d9DQz2VWE8yrNGxsbCQcBLy8vFlKEhoVBFFGD+/v7sSnMy8mCIFtHebm5LeZLLGWLBwcHNu5Om1bFvrstHZiGpSUlMTExD8/P66urIx4IoqKjOrq7JyDJVxcXJ6enC4uLBcOBO/IQejDPRISFDkuDOLi5JqanGZmZcChMbmfLrqudDY2NOzWhEE0DPfPQ35mHMq+fI6OjRYWFAoGBFZWVCYmJKWLJHpqHMrKzJV+JGJiYioqLNayOfjkhDo6PJR5JGJSE7KSKSIiJOLKfPDOUKOGJtra3EI6DIx4FNbW1PDOQnZuRDEqCvby3EY1DOLGTFpWRA4OCn52XBIOBFJSVGpWDP7+5A4KBAYCBN7U9IJwMAQEGDJWTIiaZAoOCNjIEMi8zHZsfNjCwLp8dAYYFMh8LLDStAQMEKjAwOro1EBOWK6ITGhGcK6arK6IDHJyYHZiMHocgOD0gPDC0LTAuBoqKIKAZLqm6Ii6jCYwKM6oKF5MKHpgyHbkqExOZJRudF5igNy4KAoIMNr25HZYJI5GSDgiPMjG2MTSzEhONHJ+LLjA0KqcDKrEMAoyKF56GOj65BYyaF5GQPK2QDw6KNjUyIKAHDRWFMimrK5wLCAYNHZ2iJKauEhgTMDIMKqkUNDqMEpeFCbEgHJGKHbiKMrY8EAOQD54OBRmUEw+IEAwaKqsINB8yNyoENDC8F5mGNjQ3MrYrHao5LDK7HqaoLamuF58YHKCdM7SwD4sOI54SKacLLDAnMSoEM6aNF6ojA4MGOLU1JaSgGKmKD5miPLCEPTU4NyyVIy0KEwmKEw6PMry0NAygOzEKL6iULy2QNyeQMjCrO7g8Cpm0IykkAoYNIJwCOrUsCAyRNLINOqmsLDceK6slBYQYHhedOp8SPKiMLDuyCoiwCA8HOrAgKimxAIGCF4+GMrezM6iVAYGDAoKDAICDAICBAYGBAoKBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHEBny66evokV9/CJq3Mixo8ePIB3yAyMBCImTJGoAOZAxpMuXMGPKXMjPBZYEHGjopHEEQMuZQIMKHaqQH5EjVQC4OHBAhw4wRKNKnQrTaIkS/fhp3Uq1q9evEolc/VAxq1awaNN+NXpEBUoHJsD8VEu37kyjFGhUqUJABYYaUebaHUy4IxgAT/t9sPHgb5DCkCM/3Hp2IgCdACRr3lyQsmcZJ1RIEMy59GDPlMG0UOGAtOnXaVFv/cCAw2jYuOtO1Ncvaz8wSThg8fnPde7jRPWNISEBgIkhQKrQGAAVufWu+iRw4LCXhoojDh5f/x8vlV+QIRIGqE9yICv59/Djy59Pv779+/jz69/Pv7///5tpRZE+Ap1FE0VggFEWabslyJtxAKo10QESpKAABTUEYSBC/RzgAAkKMKBACixoSFA/UUhQwwkKkJDEBxBGCBY/H9SAAQcqJFCFElwhpI8Dex1RwhE0cEBCYAL1YwIDKlRxlXcKuCBjYTKw4MAQSaiARWAbGtQhAE1E8QERYzCAwQAtqYaBAk18EAUAJ2BAQoxTdjVRRh9UsWWPB1E2ED8AJIBVcR+UQIAJW/UT6KB1EkYjDXt22Zmf/yiaAAX9EErBoYkGegKdjVKl1QeQcglhom4SAYACBNz2jz4pcP9Qgw5urvrAEKHqVtyjkRY1kQkKlPCAChxQR5ASLWBAw5Ac/ONqrhKOWiqffSoa7BFYUACAe8WBwQIFD1wVrgTVQVsXqZGe6tlhCjxgQ0b8ZPcACUr0ZkKcA2RqLl3omuqaZ38eQMMDHxRnwgNHuECZCRhgAeO+aqELI7V/UiqQDAxw2o92Csg12xGcQryWQB8Q4LDFBAFcYBRINTGRBCowMLFWLuylA6giVyVgP0ToWa9Z1X6gYKb6BOGAoI8pigUHLBDYj9FI5yxqPy4M4EAKGKgAhAMOsNSnEhem4EB6FGDwAAtpAnFjDRI4EFoVz0pNVLw2qKDCP8TaXYUD+nb/9gEQWHj3j3AKbPvnBw4cgWNOFDQtt6hRsMDCEJQPITkRCZmnAwCWI2Ziyvq4AMDkJkTR9+O6ov4fzkWp7vrrsMcu++y0l9dn7bEJKIPQMT79we//BCH8B59Tpo8MwsuAEe4xLVyCCgpEkbkOZm6Xk3cYMCCeVvqYkEIJWGBRAhCYM+8SZR8ogIGgPCJEowMqAQFECi0QkAADBBanjw3SldDChdpinfloohUwJE4BHKAAktzXmwbyQwcPoAELBqKocAEADL3Z3fIG6JFOUYAEABAfl9znGUuVQDz/kAGrcCUbDnZwVBc6QM9KMMI+fYYEZ/rTwWQGhgOYQAkYkZQL/yFSwAFUAW1EIAANhVggz5hgLy5LkgRooAAJtEt8IDzdECNimXnJxQXLqqFBPPOjBJwAaLBKwLIYkIIUlK0nAtxixYhwghJIiR9KUKIYJ7UVIlCAA+86CxhIkIC/cIkINTCjFuXYEDAMgAZJ2IoSsrVHDg0BAxSY2auAoCMTNLEJDZMeIydjAiwQgAVNMIEJHCAcCehABgwZ5Jn8VMYSlK84UXjAoUb5ECVhAQPADGYCCvmAMdDkYFhAFFf6MQSeKOFPuQwZLyWyGK5Zk5BGSoIoizOpMrYgiJURCw2IYzAM/KNg02yIbGyiwJ90j5u74kcUahNJP8VrANAjgj7AoP+ERCpgkekkIWXAuMQkBeoB+hJQEjBwBCJQSitEUICWFNAuc0opoCIZaAkYcEtF0YABCb0nFvLFp9kMoAR7KcEAnonRySQqCgtM0gEWuJUgHMBjgkEVEYhAlpZyJo4+rUpQh0rUohr1qBD7CbwymrIxAoxiSAXDB5RwABcQQS400cdUpeQCsii1hETQgemYOFQl1eABHABmFUjQBIAOZH8loAEwv7PSlpQQAFVIwBFuBlRGggEIy9qaGxtGzjHKYAAf5BoQSvCPEtwsnlqJgkT16jWkJimV+ZMnIf+ZuYpUJn2s+aojx4eFhPV1lF3ihwkS8IBy8XFDP8JACuaiKJX/muAID6isZSu2lQOw1rUpo0wDa8SB1kCTAhz9wAMecNHdNnErZTxj5iDKgraFhgEsLY4MUpAUGmEht87lbReTktUkDAuYFEAUBVnwgBRghAjfPUB4n9uPJuRlABu0oVEm6IAWUCA88DoAcidGhOU2d779oF6xZEBWyMILkTTgGz9kALghNLDAduSWc5X0xwFoCKoO1mG2HKoDLGiNawOwkQpaYIPsWra+9/0wiFVWHCXg1mUHeJ7diLU+Yk7QshOh3nTAOeM7GUgfLEggjw5TOcqxcjomQCdSObwmIIIBeQxOSJVo9ZjIxZVvwY1suFwc1RYM038UVcAJgLDNzuiA/wBrZABaCXCk17qAAOB1rj4G8N3lhi98DFAYQkhSAwoc4R8UAAIA8ideoyigBbfc7USC2xuJKFWgxan0fDc9Mk57+tOgDrWoR03qUpv61KhOtapXzepWu/rVsI61rGdN61rb+ta4zrWud83rXvv618AOtrCHTexiG/vYyE62spfN7GY7+9nQjra0p03talv72tjOtra3ze1ue/vb4A63uMdN7nKb+9zoTre6183udrv73fCOt7znTe962/ve+M63vvfN7377+98AD7jAB07wghv84AhPuMIXzvCGO/zhEI+4xCdO8Ypb/OIYz7jGN87xjnv84yAPuchHTvKSm/zkKP9PucpXzvKWu/zlMI+5zGdO85rb/OY4z7nOd87zngNFCAm5AUdCcJAGFEToo66ABhKy9BPx4B8QqBRDYkC70xpkBTiIAESE4A+oiKEBNxDABCaggRlswAArqADRBxIDDyDAA/8AwT+oPju4SFkjIDBABDqAhLVb4B9iEIMFLDCBLfDA6P8YwQYEYJAYZMAAeYdBD5aQ9YKsAAUC2EAEYLAAucvNBggxJ0RAsICBZAABTkABCpyAhH9QAQEdQPo/MpCB2SdkAQYIwRMq8IQbgGDtffLHCEAQg9ILBPjm6joRkkAQETBhCg8BgfTlrvUM4KADbkfACpCAgg2MIAQQqED/AxowA4UgAQF924AHyl+QCQzkDFa4AQxiAAPP4wfo/kDIFmYwfoJs4ApXsAJQoHUCYXwNQQa71wBdMAMBMAEWIAZA5xJ9IwAIsBBn0AAZkHf/gAL5oXgo0HoHkQE3EAEE+A/YhwBFUAQ4IB9moH4NIHj60QEdAAUVeBAVkBAeEAH2pxEO4CgBgAAbkH/BJgQjgAOIF2wWoH1/F2z9gIFPYHWuJgYF4AEBAE++xg8zEAEjEIHA5g+ax369tnb8EAAesALA1Wv9AAE30ABQyGr8YAEpaAbCdoEZAAFtyGpigAIe4H7Bxg8acAMb4Fa3hgAzsE8Q0AFg6GuJFgbaJ4e//yYC/5AAO+AFPyABTQBLvoYBXMAEO1AGrKUAvYYAX+AAOTAFUhCJvzYC/9CCGaAFJEABv3YBfugBAoAnvTYCHqABQhACGXCEvYYESGAGcLgCjuhrCBACTYgDT/BrTmCCVgAGKIAAfOhrRUAFATADHTACd4hqTgCIFjACN/B0vrYACJABFTADRYAC+beNphYBODADFYADN+hrBlB/RdAAArACS8hrFrB5S2AEIYAAy/hrmxcBEIAERdB0vdaPMKCFCDACgnhrmwcCHlAE4uhrFlCPMQACRiAGwGYBINADMRABF4mRIMB5+siOpwaSPWAAsqiSp3aSETCNvtYPFUB/Ef+wj74GBlQAAzCQATrJa2+4BFkwkjS5axkRADEQAzdwAwqJhg0QAwagfiWJlGKABD9ZAUVwAcAWACfZATxQBFF3hTywlE4QAEVgBBE5a/wgBhtQfyNgAZd3hTMQkksAAVe5Ah7JazbpkwZwgwJQBL52lRtpAL4olFYAAhGwAhmQiLwmfDDgASNQe7/mhzgQAysAASvYaxnRD0/QkCNQhJUplz0ABQ2wAR1whTYJAlmABAEQjb8GjVmwAHEJbGfAAx3geBcQlLsmfLTXAQ54hWP4dghABUJQhb62iziwAkAIbG+4AisgAB4wjzVZAdmImk/Ja2BAgRXgBCsAdGOJlDP/8JAN4AEb0IVUaZ1ceYUTAITfiADIiYYhsIZyqY6/ZgEeUAAWgI3It2tNGAEV4JlrGJsogAMTYAbSeZS4phVb0AGBmIQc6GvCh4gXKHtCOQEegARCsIsDioYVkAFcaQZOIJic+QLM+XdJWJk8oIWZcpi71oJOmRE+UJkBgAMosJa3JgS91wUZoaC5xg/tuQIaVpMXkAEhIAY8cHZXaAEosADbl5o1+Y3EdwMrEAKqyGtdMAJFIH0o0AC8iWtIgAMjGANOsJe9ZgCAeJoe4KK6VgEW0JYC4AQWIARfmmtCcAFU2gBa6mvC2AAd0AMgcAMdsAK+tgIdQHwZYAQ88AK9vraRHoACtFeVvBYAZjCeKMCF1PmjlVIBHrCeXCiUFoAEKzABmeKYdiqlCAABIaB+t7gBK8Cae9cBsAemgkl7TXmSqXoBPBCftqZ9AhACFzADM7ACN7AFveYPSlWfZ6hr/HCcIYADdjiaIxABG7kBLwCToyZ8J0l/ILABZsqsE7B5xYeTknprY3iSnbcAG8mmtgakGTB/6up4poprwrcEPgkDSzAC38qsLzCZC6CFdbqgZmB4GlCMPpcbdjVKAQEAIfkEBQMA/wAsGwNZABQDgwGHgoKEwsLE7MpLLi4s0tLUpowppKSkblsXurq8Kios+vr2hoaEfWgc2NjYZmZktLS03t7cQjcMV0cTVFRUSz4Nmpqc4bw8bGxsenp5rKyslJSUcnJ0r5Qsw6UyzK01++eXEhIUYlIU1bQ+5OTkgW4dinMg8NuJvr68NjY0PDAM8vL0dGMczc3MMjI0Xl5cxsbEQkJE7MU+jo6MioqMGhUEm4EkSEhIFhYUYmJkLCQJOjo8JBwEln4kPj48Tk5M7u7sFg4Enp6cMyoMuJktGhocWlpcin5EJiYkro4s5MNM4sZkdGg46ursCgYEHh4c5sI8k3kkJiIEbmZEIiIk0sqknockDg4LWk4U+vTYNi4M6tJ02r5MHiIkzs7UrrK0Eg4EdnZsDgoEJiYsBgIEqpIktrKcWqSEblYoSDZIXHxY7sywPDBsqqJQJDAo5q5AdBqAzJyY4LgQHhg0opzE7PC43OjkpLboBAwQqsYwxHgsoqKQcOKkbEAYMlRMxsww6HhIDgwYWjoYjJawtta8zOww6ujQalxQWkYoFGRQeGxg4roo4PjktsLUZG5wRkxk1sDIKCLASiYQeFps0NrQEBgIFBBghkAY5NLUxpxUcHoYxrRUCg4IkniQdFzU5uj4nrCoupzozvDUjFYYrvCwxrDsbnxsqpx0NCRMrJ4sqIRI+MAw5Nb0Chg0graECjIosJoMRlxYCggwXGIYPGSIWG5knqIgtJysXHYYjF6EkGYYqGwsFDJoNDhMcKTgYlQodEJsPFQUWlx0vrCsPA5AooQM4taAusR8pL60aGqUtHh0xrQQ9tbgelYYxrzU0vCEPHQ4jnQMjHhs+ujksKwgSCY8tsKkHjwczpwQdHxEurDM7sLgxNrQpNLMdpaYBhgUzDCAkI54OiQw7qSwQExMzHjIys64HjJExszwBAQYjKCIzNqsKGTQSDos8Jww8PBwJMKA0LC42JxAgpZcopawaG5URlQ0cMIkAgYIpLDECgoMCgoEAgIEBgYEAgIMBgYMAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qcSLGixYsYM2rcyLGjx48gQ4ocSbKkSYP5jujoAaOlDR0g9p2cSbOmzZs4c+rcybOnz59Ag4YcoOGFURZIM8CQKbSp06dQo0qdSrWq1as9e7yAYECD1woLWjDFSras2bNo06pdyzZqjwAZUICYewMEv7Z48+rdy7ev3788tQYhsq+wYcCIEytezLix46laERTpoSOBlcKPM2vezLmz585ambAIAHeGjXyfU6tezbq1654JFmjAgGHBgxEBJqB+zbu379/Af+/L58TucBQaRjxoEby58+fQo7c1jPnfPhQIRhQZK93vPicYDIj/Hy9jaffz6NNfpU79RhAFG+6q33s9wA8WCE6cgGuD+/z/AAZYE3uGuQeffAKy1UIAL0yQwAAtoDCAFQlWaOGFGxFYmA7ZbYdhWgtmQBh7H5Zo4okH8aPSEXU5AUMFyg3gH4pULbjcXPwcRuOOPApoxQUPZKCBDEEE0AACEyDY41QovKCceAtM4MSMS1ZpJXD8+GBAfqQ9AEAPu10J1T4DZIDfA0Y2UIGMYrbpZmvDHYECSz0MEBOVb/oEgg09HHFECxe8wIQGTuRp6KGIJmrdPjkylc8ELDQwgaKUVmppiRoW5oQBTGyA56WghiqqcJneoMEPGFQ36qqstvpZpt8Z/zDCBZ+6auutuOrFaKOF/QgBC/3lKuywxKZlRRGzFTGBAxU0AAEGIBQr7bTUPpXPBQRA0ICzEDxwAWHVhituQQXaMMEASh6UzxQomGvDnQQZlk8C5voAwxTp5roPEea6gIMLPgyQT63jFoyrvBewAAEAMSk0QRAnKPzPUqoWNsUFGbzwzwsnzJAAwawWxs/II+to8MnDGsaPDRmMoIAGDSN0A5AZ3AbBBNQteoQMLzwwA20yYPAxykQXbSt1RwRhQAYqwKxqQSrqcIQNCDTgQ87/WIEBCzK0AEI+VtxwQ75Gl212ooZpjYALF4zg9KeYTZEBBFfrCAMLy+1K4tl89//9psgTnLAACC64Da9C+xzxAN0587PBCDIkgEIRLtjgRKN+Z655j4UNsDQK++Bg+NMGFaY444dZkVwGMmTMwgtBnLb57LR/uA8IAASAA2qiv4144g/8UzdTRBigwggIzODABgZAcIJ5tUcvPXpE/MPPBAgscINAhctwWa2mLz68TJsqQIADMfHj+Q8LUBgy1tbxk0+Ope8dv/y8ksjo/KRP7/9nDyiCFTwXF6Z073sMOd34/lE8BWRgCtRxQbcSsKpMqS88GTAABtAlEFj9Ix82mEEGMlABF4xIZQMIzwMM8CWy/e+FmWmBFVxAABb8jDZMY8ECirC9hSgwZ1aQgQL/KnAD6twtADoYlQV7kIEG9OwFDVDKXWBlBQdAEQEPIAABZgBBkfngAQ3A2wkacAIXhAmGaMzM9aq2rQYQwGU/aIABBpDAJi7QcT8wwJQMYwP8MEdUmSKCBiCggRawSwYjCEKhMjWxF7DAAUeYwhchcIFGHWFuMujBFAYQKAQkMY2gdMx3zDWBCfjABxVQwQkcoAP3MSpemLlkA/qjIxsQ4HnywsAPBqPEnDEKUp6UyT5akMUi5I86IFjACGYQLUa5oAEPoOM+JoA8sVjnBjMYAQZcGMpu8oVR7OldEWXCDxRcQAfyiVMCJhCAEWzgQTHbnhwDBiiF4YCbiLKfFQAw/4IFhGmfP4AZLDuHABZcTWdzcwGjLqAAA7jPehI0wBS8SVHEEMgBg4rZPvt5GevAwABoYoIC8PaA3S2qBRqo4QOqxgIMgCtU9jPVrMayDxeoIAOFGgh1bAABBNBxUaZSQKr4wVCHDoQfRcDNJyvK1G8a0QAOeGg+XGAAMwrzRSP8xwhHaMZFqegCFcigBiYwTkDCzwlBgIBCCeIDJjzgCAOd5k1z2qsFKGAGhaHmAygoENyN1AZNDSx95DWFmAnEClPoqHXWlQA/TWEKflKsvIjw2LGZ7FL8uIETNkuEu2yqAR4aiA2Qx9cOGmYCDqzeovb5MoIyoQI2kFzCFCApwf/a9rZRGaYGHsDbIPiAH58NrUDaioDSLiqvqdUpa2VQmEedYAQsOAECputGH+D2utj1iW6newID6IYIMMIBTSfwg7fGdbTmHYgVZqCABSwqHz0AwNI0gAMcIOAFPciufvdrE+D66QhOuAwIZMCEbR71AiowgGpNu6HX9WAsRAgCEy5gWn5YwQlTuIEVICVR/nr4wyKx37VGoMeBoNWdZ6TOpiiZTq0A67iqAkE2DQziGts4I5kiZgMuYAUL44CMoBPIui4nsmc+AJ38mAIiZXCn4URrZERIGAJQcOMqW1kimcoHDrQ4Gw2wgAAX2M3KhHQEw0xBA0cCQHigSbHhPGz/BhsIzy2TdOU6hwR+/TvIYS5LLsyAzKwEAkERbjOCbrlAsVMFssoSsLURjEBNMODV9drpaK6Byc6Y7ggIJkCbTmPgAmxCyO10MIELbOACluNzr2yAAbLmuVUa+uAUdAADHUxhYEwZdQtwnbYBsAQFnWXPAOk0gCJm+tgawY4KwsgCgSCJfgchAgZOUMNIsaACkaapYXzwAgWc4MF/ThmVwo3scotEBxFDtQ1s4AMbjEjPCZCBBl1Q6pYVUKemk9UPAgBuc/vbzig4QQbKTKCEDPDWIuvBbTawG8OAAAMIqADHHvzvils54A9AgRV6/Gp8sycfAGhatLyKPQzYAE0U/7e4yj+8Dx0EAAIVAAAALgCDsoqaQPmYwQ+YedwWiGcACUD5yofO3+s0MQAICAABXgCAoelZQz0YozGBuoATGHMAD8Bvx4nOdW/uawJFgEEPWK2wBby0z+xxQnI00EVnCq6IWOf31rtOdzTuj1czjFSw6tceDECzB7xCgXjEso+gn0AHfEbZPi4MoQFc7ub7uEELEjCwutc9pu+Jz9MLhAECPMAGvMIdCzZwA7DBIOkT4HjZQNi6ExglAxuAINqJsIAXZICWlu+6iIWIgTMOVNqeBz3SMlBeGRi/ZcpZAAzMpnbnBUEDGXijBsrs8Xw4AAK03Q65c29n+4GgAge6Of/wpcgeIszgBaQJQMR+sOwMfLBsm3YQETQbOG02XGQnJw0BcLZ97l9Zb5hxPZGCM6I2fuhEICpCGTqgAyiAA460AV7TfylTMrnUUD1kGAkQBN4SBPsngf5XZfngAxjgAmLHau1EKHjycIWGAToAISigA0QGY53zAN82dwZDHTjgQBd4O9N2AUeAZgT4gSunNdvyOtmiQ6FWOuimAEzQM9x1AhjQQzI4APeFe33jcDOgAt5DcgggA2IDhB4ohDVWeDiAATMgAz9jAzZXOk5wAcZnfF7hFYcWVzfgABcwUZkDOGEULJ2zNA8GXv/Af2KocvsDNvxDMMOxcYq4cV/jHzn/U3l5uDJjFGYd9HAncE/70iz8F4aD2InTAwNgFIVjgT2DIxOmYlCJ54mqGEr8AIoE4FIQpgFudQG0CABOYgA4gAL4tIq8KD0rk0WwSBBOkEqPti0QoAIK8AMvsAEj14vO+D+/yAIXYFhCZgOnRosXsAAswAQZcAG6+Izg6IugOAIVQHlgYwWVJxP5sI7rmADNYka7GI7yaDRTkErltQALAIfoI0zl1yzaN491JjL4w4/2U2HseEw6UogjI4OcyCpTEASFpkXb8g9qInsyeAMA8HkNCZAVpSKclo8XsEcFd1RTwGkzMAMLcAEtgJCRJ4InuQAuIJIFGS5JhgJU9oIL/yg10AZj/EAER6BYHHljTrAADTACP6AAWhdrHTSUkYJ+2/IAukEd/NAC5zcaSgeVvAY/QbmVoBJlM3ABMwABSMRIHXQEGCADLkAZEwAjuISBFcACY7WAPuAgLLmRXHmXJmJhA4NuAQA6SikQPSlZCZABBfY9+eB3M/BSezOTeNmYiOJySBRXC8EPGPAyd3JJ+MUPIEAEHFcxWumYoJknkLlUMLYQqvMDAPA9NtAzbKMBXXEBCVCXdhmatBkgoymZCZElSPFbC+VG+cFbChMEuviXtVmcO8Iet+lxcONzC7M9w/E4CrA2QDcBt5GYxGmc2Gk7yEkapPmZOpUAaKYBQ/8zHBjgaJQIUQ0gd9dJNMDVLi5QBF1EEOWEAxT2DxTmAN+YnaBEIMnJYOMGngRQASvZQdfyK8vXQS1QNVMng2ZzHUHwZb9SSapCBDIAARBAkUW4AQ+ln3ZHHf1ZmuRCFAFqTablAw72nWB0T/45m63yiwJXlPGhKpviNg7gAPaJn/HIodODbidAZcpJLgCqARyEb1hnNbnmcgaVTt55MrfTAn+CZhJaHWj1ArLzftaho6EETi7Xo+CkMvPnZ0ShJh+jIcm0TESmgntFlkaDTGEZo9URYS8QaVhaUYXnAi4Qlv3kLwR3HRpAeppCYMvUbqc0AStpGArXAPR1pwTQAA7/kJVLeoMFgkhuSjxpJQM4IDxH4HtzCo0OoDAuowAR6QKNMgFRRHBLKBqO9GUNoD3yYgMG8GVahAAOME4jWTbtIam8IhDlMwICQQABsAAWuanQOAB2iI206AAyoimV8z03MAHXaKwbcBpSKUk44ACE6qipqHiR6k75kzVF0GqBCADbyHbCelv98zTCRC4vdKvc6ktgI0wbthXiVa70KibsOqkJIWNauKH12q80cq+5ihD5sAG7JIX+erAmArDZ+kEYoAIV0IwIG7EXgkxE6QBYg66DWWA5KrEcKx2McgQD4KpuIyGPR4Y24KQtMAGyUoMd27ICkoGOJFLQ1TETdT0v/+CrArEtGaAbLtuz/6FkQbJVI4QBizQFG+Ca7qcBoLaTPtu00pFZU7BZUlscCEIhqlV6LOq0Wru1XNu1Xvu1YBu2Yju2ZFu2Znu2aJu2aru2bNu2bvu2cBu3cju3dFu3dnu3eJu3eru3fNu3fvu3gBu4gju4hFu4hnu4iJu4iru4jNu4jvu4kBu5kju5lFu5lnu5mJu5mru5nNu5nvu5oBu6oju6pFu6pnu6qJu6qru6rNu6rvu6sBu7sju7tFu7tnu7uJu7uru7vNu7vvu7wBu8wju8xFu8xnu8yJu8yru8zNu8zvu80Bu90ju91Fu91nu92Ju92ru93Nu93vu94P8bvuI7vuRbvuZ7vuibvuq7vuzbvu77vvAbv/I7v/Rbv/Z7v/ibv/q7v/zbv/77vwAcwAI8wARcwAZ8wAicwAq8wAzcwA78wBAcwRI8wRRcwRZ8wRicwRq8wRzcwR78wSAcwiI8wiRcwiZ8wiicwiq8wizcwi78wjAcwzI8wzRcwzZ8wzicwzq8wzzcwz78w0AcxEI8xERcxEZ8xEicxEq8xEzcxE78xFAcxVI8xVRcxVZ8xVicxVq8xVzcxV78xWAcxmI8xmRcxmZ8xmicxmq8xmzcxm78xnAcx3I8x3Rcx3Z8x5bHACnwDxQgAh9gAv8gBHjsKgVQACTAA0rwxyL/cACD3CoRwAAcIAIC8A8CYAEd0Miswg80cAAWEAP/YAGYbCsSIAKfPBCeHMqs8gQWsMoCQcqoPCqrHMuv3CpJEMsWUAKzHCpLUAYmUMke4AEMkMuh4gVYYAIdcAAlAAXCfCmDqQAfwAFCEAKXvMyVIgYZ4MxDIAEMYAFPQM2UIgZe4MweUANDEAOg7M2JsiAKYAIi4AEicM7ojChtU8y1zMrxbChLsAFdoABYoAW2HMuufM9i0gD8bAL1bMtJAMgC3SbrfNCrLAIioNALfSVMQAVb8M8eYARg4H4TbSWQIwUdYMsx4AESAAJ42NFLsgBckALlHMsx0AF7jNJXsg8R/9ABT/AEMSAAIkACNCDTV5IDPDAEBRDSHkACOZC1Pt0dHCABO5ADEZAFNIDUSS0dB/A9NjjVWA2aUi3GOZDVbtIEjOzVYpIDBaDMYl0lTfAPHGA9Z70kgozLbb0kK7DUcb0kZd3Tdb0jwSwBeb0jKVAAJYDXfX0iDFAAFDDYKBIBgC3YiI0hNFDYh93YJaLYPC3ZH/LYhm3ZGLIPFIAEDMDYmi0gNEACBRDaGEIBa23aGFLaqt3arv3asB3bpRPZsj0fNFACrF3b6LEPEsABYa3b50EDUFAAggzc58HXxn0e+7ADyY0e/MDXv93cz7EDPFAF0i0dIbDWG3vdrdHV3P/tHNn93c8RBTzAA+LdHPoQ3ucdHDlQAzWw3sChDwfAASGw3fCtGUJQAO99370RBvPN376R32YN4K9xAAWA3ATOGvuQBf8Q2AnuGnPN1g+uGjIB2BPOGlawApl94amxD39d2Rz+GWEw17Qd4pxB0xzg4CbuGSUwBCueGkMA1y++GV8AyTPuGWvNrzfOGDVeABGw1Tse5KDyBbgt5JrBAcFs5I1x20ruGHeB5E0e5eLy5FK+GLed21UOGL0d3VneF1de3F3uF1seBmHuF8xdBWBe5nyR2mreF9Wd5m2eF/Qd53vR3v/g3XSOF9kdAnmeFzlQBTWA532uFmBN32k96Gv/QdY8wNyIrhb8sOeH3uho8ecyLumWfiVvfelpcQDZrOloUdqg7elVoQ8r4OKiThb7kN+VfupVkQOQXOKsHhX8kAIlkNoREOsLcdUfwR5NoNidnOCMog/6wLQ48WqwMgbITjIj0wTMLuz6EAZhYAVfAAQ0UO070NQ5kANCkAIpEAEREAIF0MnwfN/7QAMhUAIkIAFRjWOwUhhjoOwkw+xN8OzSXu3WHgXZvu3dTgH8LgH+HgIAfwACvwIrwAAkQAIlkPBQAAXu7d6F/PAcEPFkMATiPu7rvdwMIMkvfQA0MAbu/u7wPjL0Pu32fu3Yru/ezu/9LgEAHwICL/AMEPMI/5/wCV/eDf/wBRDxHDAEQ6DzPs8BOB/0VVDeUJDwJMAABC/wAH8FnG7OpXzf8u0BMXDTT9ABDODv/x7wAy/zM6/wPODeVfDwSPDzZO/zBYAEQe/eRZ/wMZ/0B9DyWC8BKk8BEcDtWSAE2Z4DJm/vX/AF0D7sI+PxhREG0nzK/A0EUDD1q4zTHlD2Zv/wNcADa4/ubf/ycO/vc+/tdi8EeJ8DUXDt9k4DX2AFYaAPTRD4C4tjNLACCY74AqDK3BwDBRACc08B3J4CnJ/3oF/tfV/6p88Pgr/Vuh7n8i0CMXD8Ly0B9m0Tw9/mGC/1I83xuC4VY7AD514CV7Du059bTQewccTerwEBACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQQAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACzDAGQA/QCUAYfkvzx3d3eDg4Q5OTl0YRh7aBx+fnxubmztxkAfHBCzly0oIAb2zkPatzpqamzm5uS8vLyCbB1FOwwiIiQ+PjzExMS8ni9sWxb09PRbShKampzNzcwoKCfi4uTSsjabgSSkiShERERKQhBWVlQ2LAwyMjSukSzPrTYVEQSioqSWgiTFpTTS0tQaGhze3txiThTKunwWFhS1tbSVfCSenpyKioyoqKlPT07m1pyVlZTW1tRmZmSMdiJkVBM/MQziujyojisuLiza2tzq6uz9/fuOjozqvjyGchxQQxFKSkwSEhTy1mxiYmQKBgSurqwtJgheXlychiXu7uxOPgzwz0bIqjTDpCxaWlzCniy+pjT+/uTWrjwOCgQGAgTMMoDEppiospTenkBWcBiMjLCiyrRodhjEyDB6WgguUEz0whQSMGiEsiQmIMAEDBCopnTi4LjoprCygCwSYDgaFjQIGCAwOhCMoJgKMBy0puhWSCjYxMQYKCjQxPBaooQmYIj0ojByWtQcOBz44NQCBghyGoCihkja6tg4DECwnAwMDBiyoKBoZhjofEhkZpRqeGw4LGjK4tTMfMhweESimLB2lJja5vRYeFgSEGDWyDREIijo4PgwHigcKAw8SEjK3LT0yjDa1vTizjS2wtBw4oDIvMy2wpxcXjzIxLDGqBD0+LygpCAaBiCwrCA4cDi21LTGolTcyFRCWEDeslSobixCUGRSZGTK3PTk6vSipJCs7sjG1tSywriyhkxEOiCKbpwkwIASMETM6jAmYNBiZFCohgwwJkTs+OgqOjiwnCCgsrh2eGCClFywssiKbkR4eBiQeHSCtoR0VmhkQGxwouBCSDDo1uReYHS0fHSsyuxWUnTcyBDg9ITeqBDuxlioxDCs3Hje+NSKSGyopFBWWBjo5tAKDgjIyNjqwITK9NRcoiRoWCh0XkBmUmSooMQEBBgICDCKZnTEfCy8nECQeAyQknyOWhjwxNQCAgwGBgwGBgQODgwCAgQODgQKCgQKCgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIUeC+i/vy5fPnT+O+iiBDihxJsqTJkxAx7otxw0ANAQFC+ENJs6bNmzhzKryYbwANIQ86PHggY4DOo0iTKl1K8OIACB00jKAQYseBCUyzat3KNaWSHB12fOxKtqzZrvsGdJDRkWO+sWfjyp1bMh8UKTkmQCmS48AAf3DpCh5MGKE/AxgqQBXi4t+QAPoKS548WEkRIhhkyGxxYMgDJoEpix7NdJ9lIkIGfNznTwAGGy1Iy56N9DARCPkG7gvRYYNR2sCDn8y3g4gMuPtKCBFCQbjz5xV3J575L2OS3iWga9/ecALUK2/ztf/QgCFH5NDc02vPyOSBjgBJRuSQsiGE+vvqL+oLsOGBC6EQjJAbfgRCh1FPBwhgABMcoFfggxBGKOGEFFZo4YUYZqjhhhx26OGHIIYoYnQb+RPZP28xlNFG+ujzVmgrztTRRSOGuI8+V9SQghNO0BBACQMitNINAeTwjxNQYKQbB0wYQIMMNFCgZI0eJgeBCxtUUIEOD1QAXkL5XLGBFA9gUN6U//gTwANDDIHBZ2hSyaE+N4TAQQITDGDAEBtkl9AAAThwQw4Y1KCSQPm0xAQTMjxwxaFycqgkRkrY8AAUCfGEIhMYFHEoT2/pU4SjkEbaoUotONHBFQ3l40Cnn6r/pE8Ol5ZqqoamTTBBCQEIAQEHC/H0qqdTqqSEBrXSeCuu/+wnQwVCYACBfcFmNCyaxiKbpLLLZmiaARBUsAELAkzgYFPWwspttsme222FGU3Anw3AMuSqusqyuy2372KIkT9FPObuQPcSS6NKpmlra78WHsiEFDScmGnBsWKkj8JxMizhWP8GUB51B43lqhSGTiorrY9mrHGBNybQkUb6PNWBA+eCqs8BUhShUYoHiocsEzsPvDJ3/jCxgY8OBECDDkPYYG7IN0IRQAAyEFFB0kxQtw8HSdfAAgZOTH2D0ENrl08I0D7wT1AVBJAAv7ptLcMQa7fZZgUJIDrCBm06/9bmAzYEWXaB+XAQwg1X1MnBi5n6EwITV4wg+RVQJDHgkFBIPvkVUg4uqeegO0R26KSXbvrpqKeu+uogjs76gxnpo0QMMSgx40486UO77cWyuDtgcL8+20oBOLGBEDr8o0ESgAmZEQcBQDBuBTmEwLM+I2gAAQtCbFB9isIHl9aWMqSQAgRDGQByQU5BgAELNqRQgWcH5JboBr2ZX4EUArsevmD5mEBsOqKEETCGWgcRFQZoMAGNxMABRNhADFAUBChQQAkaaYHHWBCE/zkHYfqwFKYQooRGsao6+0hAB3SAlRUdLIXoY4IHxaeSAMrABTdIiD5S8ID68WQAXZogwv8oRYMhHMB/M5TL80IQgiukoAMpiA1C8mHADcTkcTbYwKNQOMQSHA+BSRzNRSzDgg5IQQopqBeYmPC1DpjxAeqzyBCV4Bob6AOJYSSLxZiQAxrYgHqqadwIbAABGtDACRXQQCCHqA8HdIAFIQheHgczRNZAIUuBPIg/diCENOrHYxVooUr8AQUd6GBfk5xMJS9yGCkIQHC6mUAFXBBJjMSgUfvSzw5MCR6VpXIuq2QPGiXWFN7oIAgne8yLbrRLFqRsYb9U4ir1IYCcrU83A3ABLVHVKJrpkgUVyCHColmYlQRBHxzxB0s24IIRpskoY7klXlymzlexIDs3coA2obD/s6CRkzD+wNkGbJAD+WHABTVQgkVC8L7YZAQKLpACC1KggQoc1AD2u4EOIggTAXjUAEnA4z+Vkhz5CaEDWErBDSS2Dwp0AAITRFQINHA8F+jABiOgTpjwh1I3ulEHrxwpJfXRgglw4E53DEzhnqYbfSTgqBNQAnL8YdSjBuGqJQhCTIX6IZFy9atgDatYx0rWsppVMkOUoy/TykWTqcyXZ8UJa1rAgRKUgAMKXaVuxMOBIGR1AklFGF+v2gLwtdWrcaXIPiZQhAqYkUw2uMEyx4miK1zpjENgQQ4GMFklOGB//KvADk7E1sTmhDgP8F4ABAABMuVynGoqpAFWOzch/4RUP65xQQqKxIIHvLKSptXJ1lY6FmpKq0GHXc1GeKYEjwUuIyFggQu+lA8KzLKWlA2uXAOzjyDgD4HZZd8ArJYbNaFRqh/JxwHOVFrtImWxs0xC3OK0onx4FgMpCNV8fmuRJERwspJ0703ywYQOVECN2U3hCJJmg95IiTXVNI/IoEAEFsRgNXAV8EgqWQLH0kyt/KLi/IYg0REcaAQrtJ5GOJACIrCwvRouSSU54AQpaCCm7U3hDXZQJAjUwFwXmaAUKnCAEezgiS4GVnhjTJIhcoAGaMRKcjMVgwAMQQNaK4EGHPNIG5CHBS3QK5ObrBIWP4CBIA4wQVpwvA5aRP8JSThAAK4wAQpgYAPAg/GYRbK1FFxZino+yC23meaMXAG/wdyzjDmgARvjeIgesQiAe2LTEqwyH0GQgW0TrWg+TyAF0vqL7Ga3TH1AYWz6SUIIJkC7CSTBBhDLc55Y3YIQ+LkIWoNmpykSUMxswHw2sIETUvDgffiXBW/bGgSGUAEZyKA/gEMmKw2gg2ZDgDE5kOJhdz2SoiFvAxswpSnDiZEBVMAJ6PWHfJ61AQho4Aro5clMpZfIG6xvydxWLIBdWEPDWoSLhcY3wPNN8IIb/OAIT7jCF87whjv84RCPuMQnTvGKW/ziGM+4xjfO8Y57/OMgD7nIR07ykpv85Cj/T7nKV87ylrv85TCPucxnTvOa2/zmOM+5znfO8577/OdAD7rQh070ohv96EhPutKXzvSmO/3pUI+61KdO9apb/epYz7rWt871rnv962APu9jHTvaym/3saE+72tfO9ra7/e1wj7vc5073utv97njPu973zve++/3vgA+84AdP+MIb/vCIT7ziF8/4xjv+8ZCPvOQnT/nKW/7ymM+85jfP+c57/vOgD73oR0/60pv+9KhPvepXz/rWu/71sI+97GdP+9rb/va4z73ud8/73vv+98APvvCHT/ziF58LTe+B0xdg/NQpv+km+MDSiYCDf8Cg6UvQAtOp0Pzue//74A///0OAsIIM/AMES7cACcy/dBBIgAdLb0ABLmCCpZ/gAh8AwROUfoICmIAATucD4jeABFiABniACJiACriADBh5EtB0/QCATPcEFoAAUaB0+dADCOABBZB0H/EBDFB/SJcRGdAAVDADSwd/ANCBSScBCgAAK7B0BJAFEwgCCsB0F2ABDbiDPNiDPviDQBiEQjiERFiERihWMch0PAAAF5h0CUAADYAAVqB06NcAT4cCS1cAN7h0KAB/TIcEWSB9TWeFR1iGFodYZogfBUCGSRcBHwAA/3ACSrcCP4AAIPB8R0cCJAAC6rdyCPAPPyCHBcF+/7CFA4GCCxEFPIB8qKNrwP/hAxfwD1SAAJQIACcQfef3DxawAhagAEAwAx04BSSAhQxxAQqAh6bTBSiwAAmAAoHBDx+gAMwnEAsABAZBAF1AFli4ABJwBA3AAAjAAMDYAB5QBRbQiSDAAwWQARKwAK4IESgggf/AgqaTDxIAAgAAACDgA3CxABZQAP1gESSgANSIAhaAi0kBF4yYABkABABAiZTYAJyoAMl4AUgwivsWTAjTBfwYjVGoOl3gAyYAjwAABCSgEVyQAVWABDvTDxKwAhewMwmgAEeAAvnQBBiZkRrZBP3QkR75kSDZD1wwkiPJDyZpkiiQkgSBANloBMFYid+YASTQiilZkymZAAn/sAAL8ARPoIc+6QM+IAFCOZRCKQIEcAJ/qDr9EAGVmI0/AAIZkAEEUAUIwAMvcAFYCQIIYAEXQAAEEAEA8AOgGAERcARmeQQ8kJZpOQNs2ZZuqQJsGQUgAAQmUJcKYAEnkJd6eQIeQBDCKIw/0ACCSYxVYAXHaAFYsAKKuZiMuZhVUAWNGZmcuAKU+AOqwwUgCAAN8APvCAB3eQIMAADHuJgmeAKHaQGT2ImHqQCsyZomAASwCQKyCQL5N5t2WQXvWIk/4JJ/CYzZmJQCkZcWEAFYWZzGWZw9kJzJGZXMGZVI8JzPKQJEOZRAmQGUCYep0w880JTZ+AE6WQBW0AOs6NiKJDAD6leTteid/GCTKWmSXPCRHMkFKEACU3ABEcCHJ9AAmimPJgACJmAFg9kAFtCEAqEAFzCK/JCP+ihwCtEEF7AFwHk61liBlWgCJPAP/VAAIECK1cEPPAAEcAGLPNAECiGfTyABPVAAM2CDh/mJxMmMGXCfeFkF/3AEkXgQsIQU0cg6+YAEJiCYQCABF5EAIDADJEojtUig/wCL/8ChqyiAF6Ci/rman1gAPSABJKCTAvgPPFCbPGCPgsGIq5MPvCgBb0OmEdCXBmGItBgRD7gQ6PcPF5px+UAC7rh0RPoPEQp0AQEAIfkEBQMA/wAsFgNZABkDfwGHLyYKPDw827g71tbUgoKEblwY9OW4rq6sNjY0sJQsTUIP5sE8EhIUcnJ07OzscWk/dGIdmpqcaGhpUlJUfGgc0tLUtLS02trcgW4dhoaEYmJk79qNRjsMZFQUTk5MfHx8knokHBwc/Pz6xsbEFg4ElJSU8vLzw6c2SkpMRENEnp6cv7+/p4wqbm5tnIMkXEwULCwsFhYUXVxcuaZZGhUEiHEfJBwEUkIN0rpk5OTk4ro8ioqMpqakro4s0K4y3Mh4dnZ0t5otPDEMjo6MVlZUoqKk2sJ0qqqsVUcSioJc3t7cmn4kMjI0vp8vysrM0s68CgYEurq88s5E9NhwooYnzs7MJiYk5ubs+vLU4sFMDg4LIiIkinYjPjYM7so8Ni4M/Pjodl4UEg4Ewr6scGpUQjIMBgIEDgoFrpIkzs7cDg4UAgYEsMQwzuLUWkooChg0Cg4IgpZcBAwQRFxMBAQYjHhsPA5AtKDoMC5EWFp04MrgFBBgHigMsL60qGosXHgYjJawWGCUkI58KCDAFDBograEPGKIPC5o1MrQxHgsZmyA3LrQWEBgHjBEemp0kGYYvMzEWHh8oKIw6KCwaFx4yrrEeFxowMyscKTgyPTYCjAospoMNCI0qLqcvrTEzDCAFGJQkniQXGYYwNbwqKB0zHjIZnxstHh0bEQYjGCEvr7QcMIkhkQYwN7AakZUqKDExLrs1JowMFJMcOKk8L4Q8K5AqMTs8JwwPHQ4WGZodByAxKCYGigoNDQgspigdF7U9L7QCggw8sqAdpaYztbIZmxYJCwoSDogREZQMlIUjKCIqMR4PC48dnpccHSYHhg01LpUwMzY4uDMqM60jFgYrqAsHjgczOowSC48YkaQJMKAJCQwoqaQREpkBhgUcFyUWqSEVlgYqOywgGYIen5w6MIo1Mww6HhI3PSISCQU1LQQoogM4PTYSEowqIJIhkZssLrMoLrAjnwMPEpAztz00MrwKGLQWEY8sqiUBgYMCgoMAgIECgoEBgYEAgIMAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qcSLGixYsYM2rcyLGjx48gQ4ocSbKkyYX7UvYLASMAAhgx+p2cSbOmzZs4c+rcybOnz59Ag5pMycBDCQsrohz4EEKo06dQo0qdSrWq1atYne6LAWTFiAMqilgoASOr2bNo06pdy7at26z6JDg5QARGCJZMtLzdy7ev37+AAwsGisBClBQy9w1ezLix48eQI6OVoWTH3QABYGhRLLmz58+gQ4se/a/fhxwqgBRZseIAAQQyScueTbu27ds4tZRwoGSEig8ZouTgwYQz7uPIkytfjjtGBBFKPmzppy+FBQcf9DLfzr279+9sGTz/PwBD8T59LXJYKAveccqUITSU4FEkwwQG7fPr309V3w4RQ2j3zz4pjDBCAPwNllJ1PFRQgRNOVMADgglWaOGFNO3TgAkBDkSggRRi2Nc+AVgwwgcoIBDABBM0JeKLMMZY0T4eDLCeeejlcIAVMr4VQglOSKBFYv30Y1yPSCaZ5D4hRHDBDkzEEIIHwjWgj5Jq7YPCBUXEpEVMWIYp5ov79GNdBWOpMMIAZB05plX9pFcCCqrxkIEH+L2p557dLZjCDkitYAEQMMTGZ1VaZGCCE4GOcEEVH8Rw6KSU1vZePzEwkUJmm1VKVQwliJBDER7AwEQDVQyggaGeturqq7AK/wSqCCMgllIMBAznYqy89uqrkgz8x0OepaFQwYG/Jqvssvz1A4QIKlwpEIkjVJECs9hmq+1x+8jgwAG79jPBACuEuO256Kb72D4wRDGADIn9mEMJxKpr7734tnWeBg8SIIMEKvwzAgpu5mvwwQg/RZQEUShxwT8XHOGBtAlXbPGM752HwAQBCFgaAy15UKqRKVn8nj5MEKGBDB5YwerFMMeM0qUpHKDEAQgQtMUHJg5wQQkhZGzypUUaKfPRSB+UsRUlDGCCtQQhoIJSI3xrhdBJZ6010u9p0YAFKkBoLgMuWSHBADu+t/XabJvcjwcWZIDCEU6Ym3EKVezY9t585//LrgpHBGAFD3V7mLEHFaRdcN+MN+4rAx+soIE+WxxQ+LSHJ351yY537rmr/RARxRBBW2F5iBnTqDnWn7fu+pgwFDGhkaZfPmDminPOd5nUFVlwydRdufjrxHenReTwpmQ6spi/h3juwyfdDwItlHCEBTwQkII+xvUDgwRD8PBPCcVFX/z5yCGwghIRAOF+CReg1kB5qT+/ue5rs8vDAIyaeMEIQhoI5OKXAxFUwANqQ58Cl4OAIlRgABCsgBJMIKoqECF1Wsrb/cx3NCsAQQMI2MIWmPCBYxFMINWRAApksIIDJnCBMMSNFlbEogl4AFU5uJOL7uYEHmyBdWzrBwP/SDagLagAO+bh3T62cAQXvjCGUJQNBtllASekgHOYggEMGnCBKEwABlbg3u6wpgUCmCADiVlaEwn2xCi6MTQYhIEFrMW5GDTgABaogAgcIKgiXHGMumuSAxqQxPeYrgJsxN8bF9kZDIbgA0Ogn6xacIQDHOEfR6ikCgLAQZlhTR8yGECtpnjIRHaSkagMTOowFQIiDogBVojlP0RohS1QrG1CM1MULtCAzRRyH6UEYiqHSUzBZMxMB7hABoJWsqUdAJHCLKY0p6kvldRsADv4IeecCc02UvOb4MSKNS0wgAxoE3/Ke6Ypw8nOdlKlTAF4JlNSZxAmVmGd7synPn/S/494lpOZvuOMSrz3TAQWaZ8ITShOYHAEEyhhCCvTgAQkEABpsesDH4CfA46AUcQo9KMcSWA0C0JPb94uia8j0AVEIKoLKEEJOfiZi/ohgws4wAEsNUEOKlMvkPp0Iv2wQgpk0IIWyAAB3IteP5gggQ8AAaMYnYDHtpICCbRAAilggCIdN8MUePWrXi3UQGIQgGuBVXAv+6laG/I2ulUhVRdYQQOClhB9ACEHOXhrFf5RAdJ5iAk7qMIFHjiCDLBnrYj9aT8kcIAhSIAIMtjBAHLQS4Sc5zQqQEEKUIACDzDBaP+QVxQaQNQDYDMGp0ysaqe5RCYMkSh3jQIT6noaCf9MUTGhe5AHjNTPORIhrasNbjinqL4KXAsh+jhNL7WQVLWVcV6oVckHHDAESQn3uuycYgBWYEXaOsACBMjAB+pCRAYMwQEE8GVKJOAAH2L3vd/E4PGUUIQt1LUFVXBCFLibtwmIMbk5iAAzL2uC9cD3wMVcJREi9NuE9JNlZZVBEXKwAhQkBgUR0kAM9MEAFERBBLJFsIhReUzrDOADWh1emZrrvQhQV1JbyUBeSwCcIwwAxLPdnT5isAUYIICuCaHniEeMzArMc6skzZgMdFQW5UWuAlVYwQ5KoIQb7W2JLeBBFEbgBEIi+aQmHXJwzZTHI3+5ec6zUZNTooWNESH/BSGgjA9TazEy56ACBSxBNKcoZviSuQpAGPCZp7hk8oA5Y/rIVXboXDGqBoCEItBzG2/b5+uWCQVzbEB0Rzogkr3nRw4oAYwx2A8UjKBcZz7aMSUQaSDeltGV1ieBovDdLzKBCQjIi2W3YFUPRFgFed2tQB6MgrI2YAVVkMAt+7bYVj/x1bEOrj4ywNIBRMEw+z3AHw+CgCMooQpcvsAFLCCDTv3Da04AtxOsLQGtdi5Oztbdq2Ed7XaWegdDGEIJ9r3vDBRHaVpIQQMIgG8CaKBQWPxeBoaQgRZUNNVbg7ektznveq+2H1rIuMa1wIAhBTl4GdeHp42z4ox72nMS/39hScNs8ZYzsgXxRrNAWe7ymsOwTDCXtOHmDXGb+/xzKmH1EI7Ec5r//OiNy5gGAJTkoiP96a1js5Q+IIIuxWCIh4Y21LeedAbcsYkiuAAPsnc1pXGa6/EN3pX0wXbgEuRS/xD54lRypZPvTl4vjek/cnCEfxtEyGh3ZwgkgO8h/APfOwiA219Z1Q/g+4puYhIRHP8PFKV4d/3YAoJujWsEhHHuZw+8NLfrgBFY4PRKEbbS3mYBvFLwA64sDQJKUIUKeIV/BCi76HefLvWNQAZaNJVmPp4CAnxAAhYQgZc5F6zhEAEzElhBDoDgMd5bn1nqO8IGOXie0uyD6svnjP8V8ohA3ALBASrY1fXXnyz1WQDOIRDjQ76vfKzJkY7vYa+A2c//X21XR0VQBDtwcItHUuAnNFsxBA8lODCAAjzwewXYfxLIJ0ygAoJyPT6zSRE4LQeoNvBUBKKER2/VS/Q2gSYoIzGwKTDQYzJgAexjXwxBf8uHOVxhIKcHbi2AWie4g5NSJklUautzQTHYgQIFSgcwJ1okAwcAQOaWL0HlASuDAHrBZwWhDwgAWS0zciphBR4gAzIQAClGaTwIRe8hHiIABBGYEh3oISVSYbGxWOpRHgajBUTAA6lie2YGeExybA5SBUdABM21D1qghA8kSjtAP1Q4hje3IEMgArD/pxDvcYAEERdcon4BYCDbdi/ism7tkwHrswObJmQx8AFK4AQZ0AC0twK/tSAyUAEXkBoEkEwRcE6Ap4gK9GlFYAItsHgZQ3UtcCT60ADD8UMDkQJOMEr5sgU8EB3xVxQtNAGvVmpvJQMMoA8hcFdzBkxHkANMkXFMEAFKgIYYZIvnI4ivVSYxcDZ0ZFk+2A8EoHy+M2wTUHsyIH/TZQEGoyVKEAVy+A8MkCs7ICCpwwAytgMWxQRRUAHQKI3MMyBEwHcDRo4KBEo0JgNEIAElIEEoBnpagAIaoAHJdwAStT0DIi8r8AFeuAP/oASVhS/NpgICEjpqtnMpoYw5oAEE/9F8HyByNWUBPDIQAUAunFSLEvk5cTECUFaIUdACzGR2MMADMYVTvBFgMDg9Q+AED/NAcgVk95IoZ2QoJOIVx3VocnRABLFYDhABQ0IE5JJjApECjzIBWliCRalqITBUEqUBE8AEzWV2MTABDVBUVyUBDYACAnmX/yIQUngwoIIdYHl/KPB276E+UOMhMmACw7IPTOCCylaSiqIqc1mXMQRxPRczW7Eb4vgeZekBkpkS6tNdlomZA+I1jzKAGFkFJnABq5KIotmb3LEP5uWYGYOQlYlmMIBskTkQ/aAB7ZViIdAAI7BTFXAEQFIB8CJzvpmd23FZkRaIxlguNAlMFv9wAUQwiQ0QauqlBUzgAUSAAjCQAlu2bUSpnfR5G92CmdFVGhIwKruSOiEAbL84EP+pBLv4anBpATB4aPW5oMcRABByQqEVAZQVGxiUIzywQylwLEP5RD8SjhY1nwwaoqLxj+rxRQGQAUqwAgjwHjFgke5GRfOSGR4AlTuQYpmngg3YNCqwfaEnoj4KGezSNLbnKFGwiilxiRbASQMxAQ1zarXXJirhAZbDGulWAhs6jj+apaGReRjJA7/xcKo5BARAjAOiDwHwAWFRAhJAi8DUAhEwdkMgA9Mhhlpap55xHlISAh53TDEAJs2jBXexYaT2JYEaiCBqp4iaqIq6qIz/2qiO+qiQGqmSOqmUWqmWeqmYmqmauqmc2qme+qmgGqqiOqqkWqqmeqqomqqquqqs2qqu+qqwGquyOqu0Wqu2equ4mqu6uqu82qu++qvAGqzCOqzEWqzGeqzImqzKuqzM2qzO+qzQGq3SOq3UWq3Weq3Ymq3auq3c2q3e+q3gGq7iOq7kWq7meq7omq7quq7s2q7u+q7wGq/yOq/0Wq/2eq/4mq/6uq/82q/++q8AG7ACO7AEW7AGe7AIm7AKu7AM27AO+7AQG7ESO7EUW7EWe7EYm7Eau7Ec27Ee+7EgG7IiO7IkW7Ime7Iom7Iqu7Is27Iu+7IwG7MyO7M0W7M2/3uzOJuzOruzPNuzPvuzQBu0Qju0RFu0Rnu0SJu0Sru0TNu0Tvu0UBu1Uju1VFu1Vnu1WJu1Wru1XNu1Xvu1YBu2Yju2ZFu2Znu2aJu2aru2bNu2bvu2cBu3cju3dFu3dnu3eJu3eru3fNu3fvu3gBu4gju4hFu4hnu4iJu4iru4jNu4jvu4kBu5kju5lFu5lnu5mJu5mru5nNu5nvu5oBu6oju6pFu6pnu6qJu6qru6rNu6hhOProst+2ADL0ABBSAE/BC7yzK7GCAAXrAACaAAuau7v6IFBeADC5C8wPsFdEm8WGIDLJC8OqADCyAAL9C8zpskAJAAUrAA05u81/+bvbxCAzWgvMnbBByAveIrI/0gBFRAvclbAzSArCDgrFAgBBhABQlwAhggBsaqGCxQAGLQAlfwDxuQrP1AAzYAABTAAhywgbnaDzeQAA/QAGnwD1jQrPsAAC4AAgCgvq2qBRBwAjPwBCIABhuQBc0KBS+QAAVQfb7aDxwQBNW7AQaQwgLgrDSAAQlwAxBMqxsMAuabBQJQxM66D1/AAi7wwb8qiBAgANKrA0Usxc/aDx2QABQwv6X5qmogAzMgvQIwvd8LrTucAEgQe7mKKhvgvWJsvtC6wR3MxLyqBCKwxvCbvEHgAtLaD0iAxfPbqyjsA02AvAsQBEjwx9Faxkj/AAW9OgYz4AIc0AFBcAIdcAYgvKtwvMRmsKsl8ABUEAYk0MJZbK18zAKjvKtIQAUFwAFU4MGX7Ks0QAEJ8AKMvL6+AscsIASvbMsxIsEJIL+87CtiAAGzXMvBDCtBTAW6fMyxIsM9AMzMDCvDXMzR/CpBnMu7XM0VIsO/TAPZrM38cQYFEAQdMLzgTCmzCwIs0AWbfM6UYgYcwAJcYAPf7M7tIc4JUMn23IM2oM4c0M77vCfcDAL0HNB8Is5B8MIGvSfp7MA/vNAisg/x7MoQ/Sb8cMUKXdFjYgNc4NAaLSYyzAIU/dFYctEu7L8kjSU7zAIK8NApvR9mIARKLMcv/90jLBwEFIDSNS0j+0C+PezSOw0eSBzH9RzUtcHHWKzTRv0isWzGQL3Uv8nBS1zUUE0a/dDCEADDVZ0givzUW60cG8wCLEDTX50gV+3HZX0hYiDLL+DVaX0cUs28b23WE5zFVD3XkkECbG3MeN0eG7wELrDMfZ0fvowBiDzY4KHX5GzOiO0df80CZQDQjd0dIS2/dz3ZiyEGBZDPjI3ZzHHNXbAGnk3Z8cwFhz3ay2G8nI3a29nPDnzZrO0XA13Qsa0cCF0AZ1DbYN3PCfDPW6zbdzrR9AzbwN0WJq3PxX0cNNDRD5zcuAHPIk3bzl0b/LDZAjzdt7HcHo3dtCHRIv9N1tw9GiZ93eE9G+TL0m5d3oAR00St3qTBwgkAAUrt3qBRxj5M36KxDzI91fgdGvzwAjg93/0dGT3Nw2c84J+RyeCN4I+B1KfM4HktywcO4QQu1QtO4Yvh4CSA4ZGh2LQsexzOGBtMBVQAAGeQAuIT4hmOBGjABWRgASag4oyhBRRwAj9AQQ8j46oUAknwA1NgAE6QgzoOGDCwAxdgAFOAA2TgbkPuFxFQwAZgBE1Qzk0eGAH2AIAtBJJd5W+RA4fID/Fs2VzOF9IhEKqN3GM+IgDgz+md5lJR2dLt5m8hzj1A5XK+Fw3NAfzQ2VChVHeunPHsAhSAARggEuYhEJLj7XbVZwMLDAAAYBBI8A8v8AITQQUMvtZQnLxvNxBmsHgCbgMGwQEcoAAKcANIMOkd0AEFIBAFQAEUQBA1AAL1KxAuUOsscBAJkOsJUBEUAAGrTt9nQAGZXhA3MOmU/g8F8OsCMeg1IBCyTusEsev/oOvUnuvTvus90AO3PhAusATP/g+u3uvKThBIoAAcUBEHhd9iIMTeOxA+UO3Wbu3/INZU0MGyXgM1EO6sXgAdIBCUHun/IOrnLgQE/+iPPhB/fNhQwNd/rhD1uwAF8QKkLvADf/CO/g82QAMav+HKiTloFxAAIfkEBQQAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAP8ALMQAZAD8AJIBhzoxC5KSkUVFRqqSLICAgJN8JH9rHL6+vLi4ubCwsBISEi0kCeC8PPfPRNnZ2ZqanCMcBG9vcfn5+F5OE+jDPdazOFxcXMusNbCVLN7e3b+iMWNjZNLS1GxbFmVWFJuBJKCgoTw8PIh0IeTk5MalM01CC1JSVHh4eaampCwsLBYRBBwcHKeMLEQ5DHNhGLibLu/IQVRHDzEqCqSKJK6OJmZSFM7OzMrKzDY2NIZuHoqKjDIyNEs9DcLCxBwXBO7u7HpiHFZWVBYWFIaGhPHy9E5OTMbGxFJCDIp6TLeWLAoGBOrq7Oa6LI51IqqqrJ2GJSYiCCIiJCYmJPLOQLqiLxIOBN62PA4KBObq7HJuXAYCBAIGBB4iJLSibCTCgFhmXNaoREgwEGJGQFZgGLrGLKrI7Mj00HBqGAoIMHCk4KradPru9DpiiB4GINjAwBQQYLSi6B44HCIsKMwwgAoOCH58HB4WNDQiMIK2hAoYNL6kDMqcTPKeLMzqLOzIKLygIMrG2IygiN7afOr01HQcgMawTNquKJ6uqFx4YNrUyPbU4MCwLCggwOh4SMjasKiESDwuLMR4LPji5PDA0G5wiGpGcMx4yDwOQKKEDLTEuH5udHDCJLS+0DwuYMji1OzEUCIkMEhKMKKgDLS+nFx4GFxgPHB6LFxggG5+dNqaPOzi5ERKZODU9NDGPJCOeAowKN7AUNbGEPLAEIyWsKrStHhedOiirKruyDp0OBJiUMSiqAYYFKhsLEg2MOrUsLicTH5sMAQEGH58ZKLCLHBiRMqwILCiLJ6ojFhAYLSwOOr68M7A8MqWLFqkhA4MGEZaFDBSFFhGgK6ojFpMKLR4dMja9N7OPOji+KK+tObU1DAuIMbArC5STI56DBQwaLCWnChi0HaWmAQMEERcTIKWXBgoKIZGSHBcKMLU1H5sCJBsdPr02NqkEKiixOrutB4wRNr25DxKQEgkIHReyN7AKOji0HDipAICDA4ODA4OBAoKBAoKDAICBAYGBAYGDAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEjxH799GDNm7Fexo8ePIEOKHEnyYb8QDx4EWMlSRwh+JWPKnEmzpk2E/SyM2Mnzh4QlJ2DeHEq0qNGjFvGFwLFjR4odBIhwCIG0qtWrWCHy27r13z4CPwLgy0q2rNmjXNOuMDKiyNm3cOOKTLu134YlPVYIlcu3r1+EdPkpSDAiQte/iBPLDbxjBIcUXBVLnnw18FciAfZFpsy58026Kzi0Teu5tGmSaXNKyEv6tOvXE9MqQLEkQr/WsHPrTpg2hGPIuHcLH7513wkJKDRvHs5899YURhyYoNu8um5+Qk5EwBfcuvfv4MOL/x9Pvrz58+jTq1/Pvr379/DjP7yIr759fBwV0u2Hb1/+gXTt099e8rUnxBAIJIjAAQckIACBBQVoAQIouAVgWlEgqIMQBbrHTwo2SGDEggw6CCFBqQlwgwQ/nIAiV/gcJ4EDKXTonhTR7aCAEDyucBtvXK3wzw1G1PYiP/0UcYADI9wgpI3r8ROFETYoRx1gW8VoxAlDGHnhh04kEIERN0QBZZRTOrDDCisocNthCfVjghEBSHGClwAqoMMNQUxpg5lnpscPjj8kYOgDGwCnnxQJHLCDcXgKlNMNQ/Szgg0cABroeVKCYAMCCfSQwQ//PMibnjdYcBEBkfKzQ4KQSf/BQaabonfRDlHcto8ACfyAgBRxWmBDAAqsiqdgAdgQxFZSYKpprYI+18MIGwCWAqhSFHdnBAPlZAQBb07pJLTu4RPADwSc6BUBIxCwAo8pnOsud0IcwEEQPap4gwBC/EhuevvogO5/BOEDggQIJ5zwEiAosIIDCkfsgAUE/+sdhPyskEBt6uaUUkoBgMCBBMNSbBwID6D8QAJEFKrDDupaTBw+K/jXj6UnOPbSQUj2h5GlOgDln0UC/nzSDTaEoJnM3/GDTwQ3PKAdAQgQ4YBtQKb1FcdCBdasDXrFzPRuOTE5gkAZJGDCPvrRhQ8BVx/GGJGKjm3drQKYEIQJIej/tVBggwYRtkCWCSCAcnYnrvjiDYnN+OOQRy755JRXbvnlmM8FOEwX7ajA0kemto8CnuMXGH87CvG5v5kjtnldISTrAJ0C/Ad4PwKA0AMHDtggZhRp4bOBE9HNDoIFxbbu+uZJ2vCDESAcsEQGy1oEeIzGr+yArzhwJQUKDiAAAggIjLBEABwq/9fmGi9BQL9CREBEXtYDrkAUq++TghPoqrVCf/wRwLQsoD7O5GQEBwCWQIRAGFUFhzR12QARQMCd4AQMXQVMjNYElq5uRQA5VnpdXRRwAsywDYJIWsEDIpXBxQQPBdRCkQBIVsHXKUUARYAbBwRQv62sIAQCCIIO/xzgqBb6hS6DGQEBAZQCCXBAAT3cjwB34kSKBSgCNjAfERBAFSPGpR9RkIIUUiAF7iTRBAWRghOFIMIP6WB8oNpACJFUhJChoAcgqJ0X3/IhEBihBz1IwLKSuETCNfGJ1AmMIUGQgRO8CU7/wN2SqrfHsnzoAQdYEApMwB8QFGYv/JihDWpIoCu56gY36B4kJWWBJaAAipXsywU7KKkNgPCBrcFO+aajrlA6oAcKjCVZVpmTJSAAUNghzAYeKbdE8iMEvOOh9QpilxEk4EnCxMrrokCYE/QnfkQwgpCylIIUvGkfOBCCz/CRAhBMkI1OC4ECfKaAECDAfRXLJlJeJ/8nDizhAAFAwA84sEQkWWB2wNkB7xKwkgSMyggvoeNAnRAy6Y3ACc/SZ1VEiDsQOKB3efxP8xAAnBghgAMZGEH4TpAtrkQhAEvKQAY4kIANpE+j2txcUsIYhQoCCIxhK44QwpiCKPQrMGOJAhnxl0+cOvWpNnEcVKdK1apa9apYVc9y5sO5rX5plVndJ5IU8JQU+Eiq+mtKU3AQAnOG7kphRQuSBOCEs/3gBhFgo0I24JMfLOEfS1iCEWBGuNfFdZ8nYZITCBAAf+pgLDixwPMisIHKRsAE3CmsYQ9blIzRhgBjeeYNRjCdyLaLI4pMbXc4W5NnSuVJqwoL2w5yQHD/3cyZy4Era2tiFwkkoGtyQmAwqamTBFjAAiZIgem6mtvV7jYmlwnAhXDAuy4a5IA/MN8SrKnHKH5Vqs8FibkwWD/ojECa1w0BASIQhCCcQKBGMFUbwRpekiggAEDpoRSmhd4I9SOEUgiAb0M73/rSBB9D6F9aUnCDDPS3bTiwWo2iCDgDz6QfH3wAXULgAA7sICIotW4bLSwTOUngAI+0S6GwyRApZcAB1jWIbkk8lxR8NKLYWWFQIpSQfVhAAuPSbIVpXJLYJuAf94tAkz4sEHzs7YRsE4CZhJCCCIgGXEIeMpFH4kPahK9IytqLb2BsESkc4K4JcgARMoC+7854/8sgCVIEEmAEgIbAXx8yVDAtQD5UIiAAh3urc+H8EdwEp2JwEtubCc1opoG30ZCOtKQnTelKW/rSmM60pjfN6U57+tOgDrWoR03qUpv61KhOtapXzepWu/rVsI61rGdN61rb+ta4zrWud83rXvv618AOtrCHTexiG/vYyE62spfN7GY7+9nQjra0p03talv72tjOtra3ze1ue/vb4A63uMdN7nKb+9zoTre6183udrv73fCOt7znTe962/ve+M63vvfN7377+98AD7jAB07wghv84AhPuMIXzvCGO/zhEI+4xCdO8Ypb/OIYz7jGN87xjnv84yAPuchHTvKSm/zkKP9PucpXzvKWu/zlMI+5zGdO85rb/OY4z7nOd87znhccYdIWwJwzgAUiSODZ/NiCEEKwgQDcYAlEcDY/9AGADogACVkgAGGWzQ8VuOACMGACDUqQDy6gUdn9qEEFYECBtr+AB83OxwwaQAEGMIACMChA3FlAd7u3Xe/M1ocB7n53ClygA83mBwA00HYKVMAAKmh2P3igAQ2wgAUd8IGzq9CEAQAASY8OtuKp4ALWKVsf/9CHDwrAgA4sAAAtiMGyC/CBJ7zA7RjQQLOfUAAWwIABIqiBB5pdABXkowQkaIIKQg9suOvDBUkAwLMncAV+QEADTbgCsz/wDx6gXgkdIEH/C/qBemdT/QUXmMAEDMACZi+ABy54Ad0voIEXtF/ZGCDBBdZOAgOUYAH5sGwsIAIeEAMFoAEtwHy+BgGE4wO8xzrSt2z9EAMvUANKoAJwl3gQ8AEXkAMGkHtJwGxXIAMF0HYX8AJNUALJpgEG0AId8AGM93gt4ANKsGwagAH2dwGtp33/sACIh2wUgHkt0AIfwAI+cAUAYADOpgUtoAEzEAMG8AK612wQMAMwUAFJMAMGEIHIVgBXoAQA8AF19wHDp4C7VgAt8HUNcHg8yGwVQAJUYHceYIa+hgETAAAFgAEL4GxTYISKhwEGUIPMln8t8A9XEIVcOHszUAV/aADl/9dsBtAPh4gBibhsF8AD/LAALyACAehsL6ACgoeAzgYBKKgF11cAsMRsLUACR6AEX1eIzhaFPgABGPABVTCKGFAASjABL5CAztYC6QcBLOCF5ucCJAAAE6ABJUCHvFaFM7AAM/ABkfeLL2AAMaABMcCMu9YPHfACHvAELDCNgVcFBTADHqABE6CNurYFMoABwzgD4shsE3iDVDCHznYEV5ADFaABA6B5zgYAPnB7JPCDzvZ+bccCDOhsMwAALtAAFUCQzDYDBVACfMcCC6COuOYDgNgBFcAALoCRufYCLlAADYABMvBsTSgCLEABkTh9JJB7GvB5zxZ+HdmSz2YAFf/geLL3bPnwAXQnAvHIbAzYAAywk1znA1SwhRjQAC+wh82mkQaQjBTQAU11bCowAAXAegxQicqmAjPAAhdAAf34bFUwAyTAABWQjjw5A3h3AVypbFWwkjBAAtGmAiTAdjMAbda3dmkJbZNnBTBwAU6ZeCrwBG2ZkJLHA2s3l0i3ejlJAU+QihLIAy8wA3anhIlXBSKAAS7QkVTZbEy4iYPHABOQmQZAAzGwks/WD03YgmAndfkgAgg4AQwAA842eVQgAhBQAGBXAXGHiO1omaAJAEkQiLz4bIf4AgCQD4CXeAAAiP0AASHobIeIgP1wBC/AbJzTjl4YmxbJdannAhr/gIknKXULMIxV0A+lWYwvsIyxmZ3aSYvFxw/c2WyuSAI7mYzaaX2VqXltyHX94AEXkI09SAOJ5wNf6Y9GKYG8OAFaIBCYyXVeCY8CgZgSeI0OWpczgAEWmpiHV5XIJhgf8AKImYHLxoQXUHoD4QLayXkawIAgiWtM2H8ximv8oJkaMJhL2Jqcg5yIGG1a8Jw58KC3WQULEIUmKo8AYJkU0IsEwYCOSDgyAJ+I8YZTyFl493ssUJ7/UJqy9x8agHj/IQIQoQ+PeBD/KREN8A9sV40xAAAdGleENwMTEANfRwEiMAEdsKe+R3od4AKjqXcGYAAiUKiG2gT/0ARZuai0zqd3gFcAT/CVLIABlIp+FwARFdCRFUB/VzoQdBlXv4eD/zAFDFB5nMoA/3CC9fcCtvkCGPAPLxCrSUCp//Cq7Xd5l/cBM5CXLGCg6Fd4dXd3tgkRe4p4xbqn/0CQ64lVFCAQUHCkGuABEOADmrcABygDKpCtUDAA3CcQ2VoF4NqJBFF+PrAAKpgDsHqpdveGr0oRJ5JLNZpB+gAERlgQZFoQHyACgugQvYeD9TeMA5GkpAawDPEBqFgQsIgQeVkQOopqvskQDHipDvGWpBYQACH5BAUEAP8ALBUDWQAaA2gBh5qanKaLKfLejhISFHt7e1ZHEjY2NHxnHGpqbGRkZNra3OnCPfz8+IKChLiaLIJuHYaGhG9cGHBwb+G7PPLJRK6urMzMzGJSFEJCQ7OztNm6QSwsLIp0IHRiHBYWFEk9DJSUlM66cOzs7NKvNi0kCfb28lxcXNbW1Dw8PMbGxFVVVBoVBNLS1MDAv5V+JO7ipLCVLJ6enIqKjJyBJDQrDEhISI6OjKKipDIyNKampMmoNDwxDK6OLN7e3CMcBE5OTBoaHKqqrBYOBIp+RNbCfObapL6qVAoGBLq6vOLi5EI2DPLy8x4eHODezJ6GJe7KRJR6I+G/TCIiJKaOPObm5G5mPA4OCyYmJObOdFpOFCYiCEpGLLaylKqSLHZeHMbCvMKiNPby2BIOBA4KBAYCBDJUTJJ4kHB6GMDM8IZAGIx4bCTCgKjEeKbEMIygiBQQYNC67G5WKAQMEL60xHBkcEZcWAoyKOz0vKasILR4dHpqcHCk4FxiGKigdOLk2B4kINjK8Mj01M7i6HRc1F5iPHQagFhuZOh4SAQEGAIGCOiuQLzMxLKolDoiFOq4NNzYfAYYFDx0OGJUKMzqMDo+SMDe3B4YNLC+tFhkWB46HCwwEDQ+ENDKwDwwaMCaDFhcdLKYoCIsKKhsLIK2hNy6uJCOeIxWGG58bBgoKKC6wLC6zDxkiODKzB4yRHZYaOigsIxehL6+0EZMZGZWbEBMTKjOtHDCJNz0hBQyaCgiwHaWmA4MGKaaDLSg6Eg+IHDipMS6pIKWXJBmGAoIMGhqlKaiUFxUWC4+OKigxFo6GDo+KBRkUDwOQIZykHp8ZN7g+I50DDQiPHRCbMDEMMSgmKKEDC4wRIyWsHR8RKyWIDxUFFx2GKjE7KjssChk0FpGKOacMMDM2N6+ECIkMM7W9KKWsHZqWAoYNFqkhMx4yEgmKJ6aLAoOCGxAGKKmkFx8WFRcWKygLMwwgN7U9GZcUOD06NTK1MR4LEg2PAoKBAICBAYGBAYGDAoKDAICDAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIEOKHEmypMmF+jygwMCSJYqXHvSdnEmzps2bOHPq3Mmzp8+fQIOW1GeihYWjKVK0UGpin9CnUKNKnUq1qtWrWLMK1YfBBgAQYEEgEUHFhEytaNOqXcu2rdu3cLPu8+BhgF0mBJJk2BC3r9+/gAMLHkw4qL7DiIEASCKhX+HHkCNLnky5slbEiGuksIDBsufPoEOLHk16IObDVhokAQHkbOnXsGPLnk0b52l9GzIoaHq4tu/fwIMLB356XwIFexEPF917ufPn0C2fZhIjCQEryqNT3vcjh/fv3kH8/3Cqvbz581dP17CQAgNm9JD3qWhBnz4SFiWSIHANv7///zW9pxpr7wEYmD4DGGAADjhsgAIEVLRn4IQUVrgRYgYgcQJv2VkY120eyECFDQN4aOKJKB50mHF6bXBaim3dpg8OGqrAH4w45tjfYVLcQMV1L+qI1m39INBDBXwJqeSS0eljgHcoBMlketPd0IME+9w45ZZcwmYFCjj0U2CXVN22XgpRNkfmmmy26SZBpw0AIQgxqfnmnXjmmeJpOLTAgo0d6inooISehxmLGVwxZqGMNuoobZhJkcOVYgb66KWYZkpZZgq0kKalmoYq6qhvHSanCHRKSeqqrLZappMtbP+YJaiu1lprPxvU8EMNG4j50D5X/PCDB3DqYwUONahQwxXk2WqaByqoMMCizla76gAmBJFUChUkEFND+lwBggJ/ErTPBgQggVQQJlhhrUD67DOrne/Wm2k/CRwFAgEgsMACAu6iNIAEFiyxhAkEidupDfweZYJj9kYssaYz6iaBB/14IIECSBigZUHyBXFDCiIgLBCLLajggRVA5IsEDhPHLHOh+yCQxA1MINYjpQrNeAMANQBQ8kADgCACBNOaKnQCH8/s9NNTWmHDEgTMu48EVAAAREIppauyDUMLxEQO+lUaLwQMyBAw1Gy3LaQHQpuFmQlJVHBFQvuYgAQEQFj/YbTJ/3gQgwhAHtaPDQzcsLXbjDduIhA+AvrPYSogl6RB+qAQRBBR+h32P/0QIEIFBmTZDwZIMGC346y33t/WkCdhI7z6VL4XQkBAkMHD/3gOeOYZ9JADAipIMKnql0O94j79ZIl5b8w77/r0ryHRrgc+mkW77ckPlHcLDRDbOwhludZPDTcokMQJLcQQBBU5SMG24TXIUEEFAJjQmmnAIgDA/QBAwAaaRb0CfqZ0AwDAEpg2tyQEQX4GKRoVMkAAgUCgYEEgAAbIA6ziSUAFG5DAasTnNNQkIAUnQEIG/CUDCP5jHyiowAlSkIEMWEABnGuaAXf4GH30owEiaIDZ/344IhISxAMNYIFATsCCE4iAASJggQTWNjnE9ENEWJofBjaTgCtI4QfBy2LgbJAEAGBAClLAwA1EQCIeunEylGPByzB0HxWQJ17e44sK/qGsBLSgBABQgaKKFS/NeEqHEZNTEmRQoniZ4AS3+8cG1FUDxMjnBKt7oyZ7yAQA9AAEKHAQCHoQAynIRB9MMEEN3LUieP0jgZ/rnQEwgIMr4CAB90GA2yZpgR+cJVwV6IHJctNLS5pAATFY3CaXKZh9YCAH7KtArIKwwZOZwAJBwMHHEliCBBCECQ1oARIqoK4WXIxeMqtBD5BwuZSAoAQV7F1eKmCCl2QrZQRkpj77sv8PHBDgBiIjQOm8h4F9mfIgVkBADjozkH78AAQ5+EcOGrBKWk1MBUuoABNoZwW0yeCUUoAACxRwFJJOcZ8o/YuxmCAFJmCHP/1gggfyaZoBSAFi8MqY/JgwgHm1TQUMCMLWEJMaBoDgLAMTZwxiYBQbDDKlUI0qZVRQggq0hqgNYIANZGIFCbRABigAAhBQAAAWQMCIUk2rWv9SgwkqiqgyYAAEBIICC3hqVs40Sg3Wyte+vqWuaMIMEAYngcmpQARBqNNhmHCDJHjTr5CNLFYYe6V5oWAze62dCDKQM8TkRpiSDa1ooZI35KBAXlIgYxv/UVcFXExeQCAAFTw12tr/2nYn+pDCuJDQAALkADnuEQgS1xcDAvi2B1ei4m2XeyHaIdI0vTmlnW5TRXS6bUXoskASqKAAM0pPH0BAABK2SwW9mGBazE1vRgxnABNIAAE1+BbXfIgD936QWb+UQgKMy1/+/gCnjiOqgzBgACDMinb9uAIK6HpT66r3weBKCQJi1UQL2MBFXBsALk/AxH8E4b8nQ0EOWHCUo5xgCQxIJoRXzOJ4JeAEFmiACRBQAQWAYKMIwRe5ZGACE9gghdVEEAp2VYMaYIAA6msMi5esXtzoBmDxQgESdkNTgVyhAoRL2nAZWd3TcO+5TA4zX4lSt7sJJHRsLBHmtsgZzNSA/wXJCVJHqdAA7Ij5zqLdBwFKAAI1xzGSBdHHelrgMcRctpfzag4K9OpgPDs6qn6jGsQOs2g0qWjRLKgkYn7Agh4gwGzQk4AIktnoR5tan4ITgZI9u8K9HkRSImhhP6xwBRmQpXCYGRsVEHDgU/saqqluDGZyk2mE5O2GNzBuDEj2I1ZipnIdsyjFqIugDTRopoQc0z6YYO0r2Nk0/QCCtRtk4F+LOdIEALUBjLJgFVkhWxZgQQpuEE4FfLrLVxRB76SNKRn5sAYgWEoLgDZpf4M3AdKkIQEwLBArqOAGSBB4DiTALHMveR8QKIHa3GwBQB8EWMn6gYIrkILxODdDf//y6ar8LZ8WIAcEMmxBU7p8Gg8g2QI/a0EPbnA5m7egAmDJwQ1j0D2Lp1cfCBBB/CyJNTpBhNMVMMjVRIAzaoXK31ewEgGk4AEpbAxJNMfMD2aoAiCoJAd0htg+xC1TDwABA+8To9EfjGk7yuQKQXDsHQEcKCkAQAHXgS4QcqDqRK88SKUFu5WDoIAEOEVGHgCBddT+A7saICFolgGA575cHyK5BQjAgArGFYTLeR0Bne2gCux5gxMAgOGIUQELCG31aXdozlVrqOgAYETM9MkClRyIpBybkAGIKPecPzoqG2ABGDdxodIbtHsKKcN/pEDeNhgoR6cGgZeW+lFSUoz/Y11DlM26sMs1SAI7CTIAG5SgAXB6+w8SAAL2RSn5yk/JDwgggwYkYIDN4XUSkDOTAy0VJAME8AP7A13vZgOf4ipSwli7wR8/sFnJ82xVhWMNl1VbNRBW4Ecn0AMTVANVhn+jhRn9kIIqd2a+UkXMEzDoZDhW0GutgisKYgAbMC0SqD0DUQMisH7QRTlVpUy9k1VHlUcz1gDkdDEm2IRPkzlCdxS7E1Ppw4MCUYEeh4EaRRBF1YH8sw9fAgApkACb54Rm+C65JQE28A8Mc1qRN36mYQLml23pB4QC0X7vhzcqYAGZdIZ+WC9WYBcDMIPGYmvIBzq713uIgQOY5RrD//dYKkIjlvaHlOgsUlJagCYFjOd42RZ5P0J5lkc7IKMCKQQzlXiKECgjV4B2ECAFA3AFeZEckzMAKGAAZsNpKaA/tEg26SZcNYADQHAXP1ABVMBlqHiMpMJyP6BzGQAAupECdnQy88FziGFzJJYDTKUAN6BNk4MCGdACOQAA/9NpN3B/yMhXzANgKdg8PRMvjtE8WtJKL/R9meJvpwMCm9ECIFADLcgdSECNiYFwR4EEC3dHeJEBmyFvw1Nx57hW+1ADNmADMjCR/yADEUmCxiYFKsB/MgABCIADBIRKP4AADYCALkKPlyIjL+QB1rYBM0V+tCYFiSYT3FZL3wYvNv81bhvABC3YkGqVNxmgQjVkQyKwBHIHJ0xwQTMUK8ihAjjVVRZABSJQAlTGbz55lYwyAC25ARtgAATQA22mIrBoA/WEAioAALMVXKCjAghIAChkdyiJlXKJJwWSGkWEN0PlWUFAODA4a3Pxd4ASl3M5mGxyG59lhe0YL3vWZ86FIIBZe4QZmW9SHMfhcYnpOXXWmIKjAIEJZpL5mVNSJYTTk+3IHUdhcl22mYEJmqw5mRzXZp55GDgwKeFDSKoJma2Zm0KiHB21GlcFZrhRfyDAcKkZA5yJmxHjQ1JglglQT+jFP0xgAD9gAipgSp6pm29EI1X5fcHJAgAAkkF4GLf/aZWAaAIk13wzlH3N8pDLFoIWwEDXiZ0GZByWg5zd+Z38UXPG2ZnxaStMYAMWUFwSAAHjlQPc2DsmEJRBYAGqRp7yuUPU0QO4pkPdOZzS05iOeQL8CTX9sBJ9M2sxFESOcRhAUEtXACESgJwPOj0cd0iQGZyud5LZYUngFQMaKi8OWi3xkh1XwwA5oFiWJFspmqMr2jhENSe/2TSoBDaLtCvCogLguVgqsF8MegMJkACnNT8dog8J4KNAajhCqipFakCepSEc4mCZ0wJQxB5H0URndTJg1ANJgGIigFwygFYyMyaKIaJEEqYqOqZGKp7UCaRcAy3vhQAIIAGKKgH8/wgvUmBfi6qoGFCGExMkRZIELnoaoUMFQ0qkgMo4gimKEtGflvge/XBNLMAh+OannvqpruohKIiqCeB9q8qpYvqquJojVqQCXOR9vwSmtvqnuTqsE2I412QBD6MqnhesoUqszooePmQCSeGU1GUusrUfrfqs2hodp6pdIIADbsdS2DY5+yAFOEBWbMQgMrmt7GogMiEpDFACcFZDEcc3J4MD+BiVDJAESXGn7fqv8AFBsVUBmxME91NDBKBMG6A7BLs5FUBBygWwEhsd+zAATHCxaJSx4xovXXexHst1pDqxIjuyJFuyJnuyKJuyKruyLNuyLvuyMBuzMjuzNFuzNv97szibszq7szzbsz77s0AbtEI7tERbtEZ7tEibtEq7tEzbtE77tFAbtVI7tVRbtVZ7tVibtVq7tVzbtV77tWAbtmI7tmRbtmZ7tmibtmq7tmzbtm77tnAbt3I7t3Rbt3Z7t3ibt3q7t3zbt377t4AbuII7uIRbuIbLQ/uwAjuwAz5QgocLtPuwAxzgAGDgAkpwBI9btPpAAzMwAQtAARMQADsQspkbs/sQAZ67AKo7AR2QD6UrtEfAAU+wABPguRTAAWLwukG7Dx3gubWrugcwBroLtPqwAw6guqoLAx/guMN7s1bAAROgASPAAwUwBqTbvCx7BAfgABHwATRgvdj/+7Mr4AIcsALhG7TFCwMRwLzna7NkkAUwUADX274rq71dQAP0K75Q4ALmm788u7kBEAGY6787uw8F4ADyS8A7OwYHAAM7oMA7uwIz4AQ+AME5u7kwcACua8E3a8AOcAHzy8Eiy8AO8AEifLPj6wQkcMI2SwMBcAC5y8IzSwYFEL9kIMMzOwYd0AUPjMMyuwIcoMI+LLMk4AQP0L9D7LL68AEBcAHsm8Qkmw8REABKEMJQ7KxAPAMrfMUtSwIBUL5czLJLzAMCHMYre7pd8AFWbMavCsQBsMVsjLJeDMZxjLJKoL7CW8cmuw8XwANqrMcmuwIPoMWAbLJFTMeFPLIf/4DHiTyy+nAByvvEjTysY/AAb7zGk6ybXgwFSJzJ/7rIHRCxnvysjxy/kjzKn5oPHBAANIDJqPyZPhAAnPzK7boDGbzBtEzKNVwAp5zL2CkTR9ABrOzKvjyX44vIxTysLlzGyUysi1wAN9zMuUoGUzy60pyrQOwCFXzNuOrCwcvNuHrATgzOrqoPEeAAVUzOnzoGUBAAPkDM6nyKJMADHJDH8byiS8y9vXzPc3m6JQzP/OyHY7DK2xzQ8hnL9WzQ8pnPIKzQ2MnHyuvQ2GkFlgzHEs2aPjADHCAEF92aSwwDFxDNHf2ZfFzCI82alczKJw2a4zvLKx2ZxRvAA/zSg/8J0X9M04PJwCqN04O5AvvbyTztkwDcukEtlwYcvwBd1I9mBQfAAz2s1D45vjNQ0FB9jt4cw1WNjEcNzVl9jlbQATCgBF19jj7gAkI81sfowg/A0Wh9ivpQw+Pc1pWoww4s16fo029s15Wo1kCt1024xE28z35takdQzUk92LclE1ls0Yhtgi6MzI2dfB99Abgc2cl3ugFw05adfG7M2JttdHPc159tboscAZU92uZGBheQ2SKN2uYmBoNMAoft2pB1yKJN249W2qeN249WyrzM27+WDwcwzMDtaz7gBJBd3HhW2vas3Hj2yAgs2M5tW7ELA6083Y62AgHAv9jtaMb/q8HdfWdv7QBZIN3hLVnBTNznzWTHfNvrnV40QMbm/d599dZITd8shsZPjd8PJgQcoM38vWJe/M0B3mSLHNIFrl6nG9azneA79LwBoAUOzlz6MMeiPOGSNdnzjeH7tOCazeGiNdB5DeIniNBYTeIZfscNjeIZng+QbMIsLlkrAMkjENdjG9NTEAJmewA6kLwfsNtcqw9k4OIzUAVcIABk2wEjgLwTcL9gqw8rUAD/7QAcgAEZEAZmuwACsQAa8OQr4AVLrrpQsAUZUAJIHuNpdQQF0OPI6wBDgARmXraqO7ZiwAGr67kjYARfIAJnTrYUQLthW+ezW7sTMAIj0ARh/9DnYmvJMKADDwC2Lh7mtTsCIYBiij62JDDcXn4ASx69RNAEDMAAacu5ULDpIxC9RVACDCAAUYDma6XmOlC7WPACrD4BbFsAX6sFLuC7ExAFXd62Z621MtEBGkC7tg63Gdy1JmAExv4Px+62DfzbWKtmlw63nOsCrh5aNZzt3O7LnNvtkVUALwzuY/YPDUzuY4a/oovufZXc7L5PYgDWF/DuUiUT7bzf9I5STJzvaQXW/C5VLrDu/67vVO7eA89D5zzvB69PZS26G77wUMPEHPDOEL9MEQDSzV3xPOQDqyzWGq9JMP7xIk/AyzvyPLTvJu9GkBwBF57yjcPxaezyBs6kBAEg8zwEAzZvQJac89SzAwFAyDzPOkdwAQ5wAEHfOuaL80fPOjvgBEvvOsn+9IwjyPEr9df17Z5t9TJz1KGs9WxD9V7fNi4c9m6j9GTvNGJw7mf/NDOw9k+z7ZPj9hOT9vE703JvLyQwwXcfM/tu8HvfKmn/wX9vLxU+AwI/+O9iwhyA+Izf+I7/+JAf+ZI/+ZRf+ZZ/+Zif+Zq/+Zzf+Z7/+aAf+o2D7aJfKJDcAaVfKA9Q9ak/KDXf+oIy9A4A+3oC5Y9O+3myuSwbEAAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQQA/wAs0QBkAO8AdAGHvLy8tpotRzsLiIiJgoKEe2UctLS017U50cOQr5Ar/vCc9Nx3+ey3moYkMCYKTUIP5ubk9M5JcnFybVoXPDw8wqU04bw8xMTEzMzMkIho3NzcEhIUgGwcnp6dREREJCQk7MxMrq6s4uLkiHEe0tLUjo6MYVEU6cQ+bm5s/Pz71tbUen6E6ursHBwcqKioGhUEmJiXNjY1YGBgVFRUJBwFy6s1j3kioYgmamptmIAk7u7sWlpcTk5MFhYUv54teHh4PDAL48BMVUcTqI0o+uScinpMCgYEnpp8cmIZ9vb0kpKUwaMrUkIMZmZkMjI0oqKkrKiYrppMiXYg5tqcSkpMWkoU5r49Li4sfn58zrI0JiIEZlo88vL0/PfeFg4EKiwknoIoKiosysaw2tbEjnYUhoJ08u7Y3rY8tpckampUPjYMNi4M2r5EvsLMQjIMdnpsEg4Ezs7U4rwsDgoEDg4UBgIECg4MRFxMcHgYSC48yKaYru7IkKSg6sAobnxskl4YPEpACggw6vroiLAoKGLQ6KAw+uroFBBgGCgoxtbUrtx41rQozuwwMFIUtnx0PC48gkYY8MTUcFootKQsBAwQHjgc4MIQYmR4yuLUYE4opJ4gBAQY6qawkJC40MTwaERsSCQUqoZMAgYIHjBEdhyAfnwY3PC4Vmhk2OjkOmKIKCDAyty0pIw4XqaMrMgwXGAYdOAouNS07MKEXHYY2NBAeGJEdHxEFDBo3Oj46HxI9tjUNCI0CjAoWFZ0REpkdOSohrqMuKboMC5EHhg03PjktJqctqa4lJaA2MTEXHxYBhgUyvTUlm4YPA5A5tjYepig+MIUxsowxHws1rpUytz0psq0tJ4MuMa4SEowjkxszjKAaFZkqoYMOnQ4IiwopqbEznzIqm4sChg04Nj0LlJMyLzMxqgQdl7UlHyQxqJUPC5oYKIodKbkuMKczOCEjmqErJrAJsKArLAg3OjQrsrsEmJQ+LZECgoEBgYMAgIMAgIEDg4EDg4MCgoMBgYEAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qciFCfRX/++mG0qI+ix48gQ4ocSbLkwostdvwYgKWJE38WTcqcSbOmzZsC9fnz4OKfCJ8QAMjo1xGn0aNIk+L8AEDHkxkUeCjhIsKD0qtYs2plqM9Digv8cvJ7wgLF1rNo0xrtmgSAv38W+Skpq7au3bse9V0hoYEHv378rGKggLew4cMG/cnQoANABwMiSMwoiriy5bT6NqDQIIKziCdX3l4eTVopvx8XBsRoceUHCQAUKJeeTbukPx4QDGzgKFdHiX61gwv/2A+LDgIcdc5gAeDD8OfQuW4YcDz5vxlBw0Tfzp2gPxRJDLT/gOmvBQwuMDZ0Xx9dXwwVLJ7I8DDjvIoZ7PM/97cDAAsInEFwQRNh6Wdgbfp8IAMWLMkQ2oEQRijhhBRWaOGFGGao4YYcdujhhyCGKGJlOvUD2F8wyWaQTvy06OJflJV4okYxjZghPzsM8EQIIXTww0sqejdDCAAYAMCRAAxQ4D87DQADj08Q4AFRNl6ojxMAaIDBBReoIOAOoh1UXApBIXlBCUvyAwMEJHBJAgT/YAFclRbyw4MHYbTwQQxYsICBEwn188NxG2zQQw8tqDeQPxTM4MQHrKGgggg80Flhcslt4AIEMgT6AwQ4YBokpsXpMECQluonqkUthCDCDp4e/4doD0SpKCo/xiGXqoSZfvCBEz9oAIB2CAmahAouAOACFh7wI5tF/UAaRhMYkGDVrhHy9oMBF2iQxD8eoCqQP01waSQGOsSpKFw6yXAkCTqo0IS42HbH0QZYAHBBtQR8QG+CTjirTws/iFBWRxwppi8GGHRAQZj18urPBz9g4IJzB2Gakz84fBXWqj30h0G4EVvYTwks/IDqqhaFkYIK/rLszwzhwVQyhYrp0MGSBLGcYLwxs3yFDhg4e3N+tgqK3pwFycxDChj04HNXUFN5tL38tKARRvxQAIAIoSbUAz8Y/dO1ATooIdbYGGX0AQxJPAHx1dDp048MDf+Aww8dwP93cUVOuGAAAShIAIMKjRHrDxUuhEA4CiVgwMUFhNHd3U7dwunTBT+0kFCCT4gAwegiYEBAGEVd2QFnLLBQ+gBO0Gs5gv6E4QEPO8zgwQdzr9gCBVTMMAMVV1g97u+4D3+F0bM37/zzsj8v/fTUV2/99dhnr331dhvKNrsa9+yP92SLmhE/h25QK8vbH6ZPGAN0afEM64fPZBhYdEuCASi0kBw/9QEAXzAAAw+kaFXtw8v7LsAFEgjuPyg4oMbcA4AkaIBHGoDAE6SmEx5goHRPeMIFdMACCfgsgXfZwFQ6wDt+LEYD4UKgmrjggjBkxAkdSNlFriADCmwAIwRLAgn/rsA+FKpFHxSAgAquMBB+lOA4YbJOGEhAqYEgUQcX6AFcNpKcFgCAUz3jiBHVwp8UGIAyimGBeKyYMA+wiVgCaYEKNBAbBG4gh2ZB4BjRojQY9IwCJFBB5cBnEQqIQAWAyskHMigDBF4JAzAs4h63wg/qYEGMC4QhGznygQtAoAkp6gfNUibBew0gCS4QmP0mmZXMzEVlMUnQBaoSRmgRgGgycEIMmgCAFJDyVjiITAz1yEqs6MOJOrhkcsLQrWsR0iJvY4EOSFCtfZUQYRy5mwpU0MhairGYV7HbD7igBEwBUgUx2GQsAQgDF8Cghx14VeqO2YRtgmlF1gHnVcp4/8ZF7SA3nstJPpvWAgws0Yr8qCcJdrCygeoTKVdUAeoEgkwCiEZUAi3K3VA5J3q2qVLsappDH7qWHsCABUrQ2gZmQAJrJcwJQNri2EwUsmoNkx840IAGZNC2tp2QpEm50gX81IEQAOgHB6SATtPJJCocCQYwAEADG3kRHqgAagP4AQG2igUqSBKoEL1C5DSwvx2o8pEXSGSClMAXDZzpYQnbwQc7I4K6HtKiGAVrOLPmK6kRcmK8Q+g/wgCp+kHrA2FI7BUW6wQm5lWvkI2sSaIn2cpa9rKYzaxmgzNS8Ik0r51VJ2U3+5Ei2u0Kj5qnz/jxq0cZb2MtCMNiP8A80v+WZGr/6EcTuGDGi8rsCjBgjJ+w4C8rboCXItCBDjQAg4fZliRT04nXdGBG1YrqCiM0AIOG+oSAmm0AL4OBVpuiAmc+t7QTTM7bLqCE6ma0RrkdABc6oEW9fDFseulMbOBi0uqcFyT20xYGmkAF9z4zJh+Y4yA5pgMX1JcCUAtTPyRAzt79FyIB7iAB/RGDFADAusmBMAaY9kgSECa/GigeEJUAtgujF77s+kCyYgPhD783YTMwsEB6YAAIwCq3n8LAAPbmAhKUYF0ulogj+UEADFDVKzY+cLviJht+bAoHOdkAAaQ5usbsN8lK7hoVPEBmz82MgB+DsAHUubEmJKH/A1V2AV2YFIPHYEEGKOjABWBARDBHBImQqauTy5OsGGCkH14xwEYOohhUymYDRm0CXJgZSbi0oANpY5qfGzIwFAxgqz/iD4Cy+oMfuCAFGiAADnhmRShHkSkiwA+5HL0oHpSOiZt+SPj4g4H/jA4C1PXlBTBmkCsI0X/j8oBbmVgc9Pxxm4PMNVcm2ILkCW8G4F2oARPWxS9285jU+Q2TwGMAKtmtCViEo7Sn/VMkGrhluUyYDLDogQ20QAbbpEJO3AgBCXwAUU5lwQA0vW6FRHcgUMZmP+T7BFVuoAQAOtIhUcC00yT3NZ6EgAtwXXBOSxKJGChB6vqBA85J0IUi/9wzFebWDypEdUsuaIJ3O67rbxZEIz1jkkN1YrOKhJbmQNfQaINO9KIb/ehIT7rSl870pjv96VCPutSnTvWqW/3qWM+61rfO9a57/etgD7vYx072spv97GhPu9rXzva2u/3tcI+73OdO97rb/e54z7ve9873vvv974APvOAHT/jCG/7wiE+84hfP+MY7/vGQj7zkJ0/5ylv+8pjPvOY3z/nOe/7zoA+96EdP+tKb/vSoT73qV8/61rv+9bCPvexnT/va2/72uM+97nfP+977/vfAD77wh0/84hv/+MhPvvKXz/zmO//50I++9KdP/epb//rYz772t8/97nv/++APv//4x0/+8pv//OhPv/rXz/72u//9kS2CBFYAmXQR5AIDmXnb5TAEAcyh2j/wBHHAAlyQBCnwD6i2LDOgbmlnBVZwAgHABG1jBzEgAwNgAN6SAhqYAo3xDz00HjA2dhZwAidQAQVgAkIgAA7gAECwBRkABWIwBsG2gRrwBDiwO8ARgiRhYX5mARbwgBYQADVwAES4BGhwA2QwAjYQBQjAAE7YBVCogWYAAG+wO3QgUIz2AgJgEC+wEA6wbgUgEBbwDydQAxPwAjTgAAJQBRPAATZwA0MQAGiABj6wBEtwABYAAguwhwqgAAyAABkgBA7wAi+wD0bQNnNAA1VwA0uQACP/8AAOIAQ2EAAVEABgUBAc0ACUuAQBcAMc4GdfOBAHwAFdWBH+MAcvwIICYAIFkANxWAFBAAIREAGyCAJsUAFDMAJIYAITkAMHcAJZYAMmYAI5UAMWcAYJwAETMAEjkABDeAA1EAA2MAEqCAf/MAdwQIhp6Ab/YAIIUQGRJQDgKBBnSBE68QVpcAQIgABTQAREsIcg4IM+SIIkSIQH8IAkaAH26IMH0IkjwAEjIAU2YAM5cAMJQIkVkJAKmZBL8A8BMAQ3YANDMIYTMBAVYAEVyUo2kAAkYQT3wA9fIANQoAFQyAALQI8nYAXy6INAiIcjiJInwI8ueQCVeAM5MAIF/zABJsAEKqgFhGiIXPQCEika98ABARCK/2AEExAApWgQJ3AANqAFzgMEAZCREEED/wAEAsAEBBGQBOmMBxAEYvmD+LiS8ngACWADZOCKB1kBz7iSJGgFNBkAN4kEVaCChbgPczAH93CI/lAHcMABCUCUHDAEUgkXL3ADN3AQRFgDI4CVzdOFGImUHxEABGGZ/yAHZLgQZiAGZeABL9CX/mAEc7APcOAF2qiGbFgAUuCKlFgDDMmJnWgDymgCD7CIPuAAffkCBUkDF+EAARCGA3EPBRAAAmCaGOGRPQdmB6gBbdABEuBDn6MT97APaLiCarCGbUiQBhkAAbAElfiLdP85kERYAAIABA5QAAcwAaLpDw7gifdgEXPwAARZAA4wdCJyApWhBA6iZKNpnWm4BgKwhjZwAktQkAFwAiCQBeAZAAcQAXSJkxMwBAcwAoPoABNQA7NoBTlwnxFTiA9QATZAGh6gRfgEEQJghqYpjjcgBA9AjPpYkAepoSBQAwkQh7+YkjEpBfdQMkYgEEBAGwv4EQ5QAd6oDzQQACbQEf4gjiOQjTQABA1QobxYnBGgkj8IgU2JLTTgAyNqIfpQpEgApEbKpCbgA1soEHBQkF2oEyaQkiyppRGjlP+QphaSpCMgEClKEEZJEEBwAEhwUWtQASg5ij9Kdv5QBRJhBFXLsAT1GJX4uXb+8ACtOQG+yXeRCn+auqmD14uE6ncPOYR+N6BgMAR/JwBLMARCgKk0cAMg0I98BwQ58JQ54I1+Z6ucmqu6uqu82qu++qvAej1jCniKSZl6JwABwAGr2ncVOQFwaI195424GqzUWq3Weq3Y96V9twQjcAP6ia27goef+HcHAK7meq7omq7quq7s2q7uan6Y2XdUKQXCyXeCuax8JwA+UK97tw8jgKajCnhzMALx2nfJyoNzVxR96ndAkIyAV7B9p617FxAAIfkEBQMA/wAsGANiAN8AFAGHOS4M5ubksrK0Y2Nk2bY8xaM0zaw2ampsMDAvysrMVkYRPj48Ojo8xsbEoqKkwsLEs5YsYk4UKios7tuP7+/uSkpMnp6c+Pjy1tbUzsJ8XFxcT09PRUVElZWUfWocrq6szs7MJiYkEhIUIiIkgoKElnwkjIyMdHR0Hh4b6tBw2trc6urstra05taMVlZU5sI9bloUNjY00tLU3ro74uLkcl4cjHIgmpqcGhoc4ro7hoaEFhYU6sM8qqqs3t7cpqakjnYknoYkfn58Dg4M4748urq8bm5s4sJM+vLU5uLMenp8vr68Ew8EDAQQKDo8bOKAxM4w3O4w9L6EHDgcpJ5QhHKcbHZEzPB8qqSQ9MTUpLowvngsCjAcYkBUCAgwxLRUfkBsxKLs5nhI9NzUvvLMXlZ0zqK0hop4NlAU4LgQ7KKwqoAcEmA4JGCIcnxsEhBgBAwQfpKwEhocdJKIlpioWqKE1MyoGhY0WFw8ztTwVHZYoLDo4tz0cFxAuLrQoJ7ACg4I1PLsbKLgvLy0AgYIcGRY7ProrqYsoGgsIBIUGCgo9OBwTmRkhGp0XmZQMDQg1sJAoKR04tjcNiIoyrzwMCJEfnSAPEhIoLKUbBqAuqzMuqycrp4M4vjsTlRwQEgwTlhUmp4gvLa89PaEFhoIZnZs6NIwhHhkoIBIDAwY4szoQFBkYGaUrnh0WkCQfrSENg5ALlBMio6YxKJ42JxANDBIEjBo9PK8xJxUuNi4bFrUqqCkICwoztLAuMbEfnyIyHjIZFZY+LgwJMCA1t7EyDCAQjgkxLQQ4rooRExIhFiAzrzMmLK48JwwcFRoCBggHCgMoOiwbKIkupq0hGZEUEBs2vLMQiY02vKs3rzc7uzYWGZsBAQYJGDQuMCkUEY45q5AfpJcuMzsaEAYho6IJCDAQFhAWlBYoMSYzJwQuMp8qpacgEAYEjBENi5opOQwNnA4BgIIcnJginiAhpyIyN7MGgYgqLLEUDpIAgIMBgYMCgoECgoMAgIEBgYEAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0pc2K/fxIsYM2rcyLGjx4YV+4U40OFHhxMx/FX8yLKly5cwPfbjt+EDBhkJZKhYcmCHxZhAgwodyrIfAhYYTHCIwUEIzg1Eo0qdStVgvw0UBIRQ2W+EhRUnflYdS7Zsx34uKPwQ8a+iCBMBjIg1S7euXYUxGsg4IAFHCA0sWCy4S7gwYX4almAo0qOIjCIb/BmeTLlqVxMyZOj18YGD5MqgQ8fcoUSAkAUSGJxY0nmu6NewL/rToOLHiJAoTNAwMSS2798P364QwrXfbBUsJABfztzgjg4UiIec7aMIgubYmfNTcqHH7Yo4oN//2JG9vO9+HECs6HFgwwALAWS4cG2+PmWaH3zQUOHDhwANvdknYGXGheDeAANsIAE/9A3ooF3G/ePPhA0+aOGFGGao4YYcdujhhyCGKOKIJJZIlHH8TDhQSCtZNWGKP7G4EoopFseiifb1g8IBFgjQAwkL2HjjQP4wQEIPHzhwwHcy7rCBCT34qJSQLeLYXD8S3IABCCwoVoQGDMrYIj8uFKHCAywkoMINCMgogQkgJMDCB0UsocQQYlZpJXAikEBDDxWMgIASMrDAQJ4WGZWfDgiMwIEDK5ggQkg7YNYBAyOgEAICWyG6J3MMJJBABZJ1ZQENJ6jUloz8nECDbYlW/yDDAwtYNBsIN9wmkIy7DvnpeRpQ0AN5AvkzgA8OoLAiizjcEEBYAzXrwwGSPSeDBv7s0GiYLfL6q2/bXdABPytykMASDCwbUggCqOACQa1G2psEAshwghEfNFBEBxvg6em3sb0V3WdtIVAECBwQxCICS4BQAUHGUuDApAs8QIGcDnQgAAYJDMCtrwC/9hxYXLUlAQsyQKVuPww8kEDCKwb7ww7+cNDABQ1ogIK2JFQXw78hh9ZPpSsoYeNRKSscEgIPIKywzCLU3MAK//irqA9GfBxS0KL1MwQJFJhgY8svr2yUYyoXewAFN+AZwxI+aNDiEDpQ0AFbq27NdWj+GP9xgQVW++MCBgIo12tII/xAwwAEvRWAEOSGkJ/ciX4tLt55V7j3Xf1UUJ3hVZMQAG9mi6BDpOQKZHDKFo1sdKI4wKdE6plvHlriNAiBw0wbPMB6Rf6MMAJX/lQgqgspkrbed4KDsAQHKQ4xgAoNcAC07ZP5s4GaFihhQgMqmOBTRQhk3GZbOwihQgImnHCDCiC4UBwOJFBvghId6KTEpHliDxo/FfiBCgJAgwcYAQUs4sCsarUrFBjhATQIQG0qoDUUnOABASBgEQYwvv75rzL+QAFTUNMbFg0hBghgUK/4IYEFcIABKKCS11r4whBoTW8fxI6eVpTDHvrwh0AMohD/h0jEIhrxiEiUCKI0l8QBLXGHTXTQE5kYxfJMkYpVFJC3sqihLXLRiUv84oPCKEYpXq+MOTojGrUIsjWyEYduvJJrvBjH5VyxjnKcIh6Zc8c98pGMfgykIAdJyEIa8pCITKQiiRgSfsTABRrYwFaUFkIOaCCSKVzkWCpiwSVkBgQ9QN6KAGiBBIDglCw4IBY12ZKhCcEHCdDBCToAggfIr1i9k4EFTnACEySABiQIECuJUrxTakAE/MCBq2wjkErRQAc4GAI/dnCs5AwzKqYbXerOxrp/SItyFQmVda5JFNwxbldvyR25HKcD/iEGAw7AATmHcrKk7cofa2tbWxbA/wIf/EAJ7ktAZwg2T5j0421OyxvUcLkEChCQAj44gTALatAYuCxhLJIZebqihA+YQAMuEIIANkg7ir6EYQ5jUcTW8g8RCAEEQqCZP4agARAUgYEmfQm73MWieHUATwZDGIsSh7WS5vQjsYsLi1BggWmpxFwNOBSlOhCA2R3VJfwwwqsQWBEFNqBWB1XMfJZmpgEQ9KpnQYAAAnApCWxAcZISSHgowAIXIEACHICOdVaJVockiiZF8EEDGiaDDpxPIAv4gQ8w8ICGVQdbfe3ICFbIACVYIGMD+M49JUCSH/SgewxQYWRlEpIh4AAHO+DW4aaprB0MoWSjja1sZ0vb2v/a9ra4za1ud8vb3vr2t8ANrnCHS9ziGve4yE2ucpfL3OY697nQja50p0vd6lr3utjNrna3y93ueve74A2veMdL3vKa97zoTa9618ve9rr3vfCNr3znS9/62ve++M2vfvfL3/76978ADrCAB0zgAhv4wAhOsIIXzOAGO/jBEI6whCdM4Qpb+MIYzrCGN8zhDnv4wyAOsYhHTOISm/jEKE6xilfM4ha7+MUwjrGMZ0zjGtv4xjjOsY53zOMe+/jHQA6ykIdM5CIb+chITrKScawAArxgBuBlggdyQIQZWLm7/QAABIhQ5Stzdwg1cHIOZkDlKgMBu8bR8gu6XGUDGMB0A0ywLhMUYAMI5GDNVn5BAWBggxIoy7oF4DKXrZwDHkAAABEoQBCwu+YxW3kGL4CAAjxABOzyYMw5wDQR9FwCCPDgBdnNNAEI0IIJpIAABiDAP3Kg3RkYIAMUuMAEjuBl7ibhAkhIARFYDd4JrJq8UM5uQAAAIfkEBQMA/wAsrwRZAIAB/ACHKSMHSz4NPTEMdHR0wsLEGhUEs5cscF0Zg4OEfmkcMCcLYmJkEhIUjIyMWUoUx8fGoaGh8vL03Nzcz642HBwcWFhY6sQ9fHx8d2McrZMsOTk5JCQkg28dbm5ssLCwQEBA+Pj3R0dH5OTkvLy8Ly8vupwtqI0smpqcl4AkoogkUlJUaWlqJB0EUUYR8spApqakv6MvinIfnoIkZVQUFhYUjXciro4slpaUKiosFg4EqqqsCgYE4708Xl5c7u7sNi4MkpKU6urszs7MTk5Mtra0k3okaloUQjoM0tLUemoc1tbU3Lo8Dg4KooosYk4UOioMxqY0Eg4ERjYMDgoEBgIEuJ6sCg4IpqSQ6MIoFGZQkn4MeF5sAgYI0tLgjmCEiKCIYHoYcEQYTCYoTE5k1M7QvsaAYIB8rp54jm5IlHqQ5vDATD4g7LwQ4vCMXGZ4SE40zuY0QE5YeEZsBAQYIDwcxHgsamZ4MlZMPDwoFhBgeGaYcuKkcsQksL40GiooYmaU6HhIyNbQgoAYwLak8Li0am5AlqB8anJYQDhAsKBY+ubsznjIXj4Y3NDYMD44amZY+vDs0tD0qLrsSl4Uzpg0QDBoorSsBhgUkFgYysbcSGBMqLrM3OjocqbgeHBccoB8zq5Y+vDEqNTMtqAMOEBAqoQMqmws6uj4WlgYppageByA1ra4tnh0OCQ8doCcqMKsvNjM5LxcsJhAvNDwfHB40tzYikQYXFp0MDJEjpiwPDA4KiLAgnIIxrbEJjAoIDJEzvDYvp7oCggwkpB4PmaIsPC0hnKQeGDUXHJo1tbABAwQYGYYFjJoXkooCjIozsREdGoIrri0yrbsalpopp7EurrQhJCIDgwY5LI4dH4YJMSAflgYWqaE7J40dpiYvMjYQA5AytCohJhcMiQgNFYUhLaEKmbQzrYQJhgcZlgoTDo8jnpsChg0Png4qoRM0J6coqAkdoBcrrSgzjCAIBg0fIBw2MbQurDEuMjECgoMAgIMBgYEAgIEBgYMCgoEAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qcSLGixYsYM0bkx3EfBRIaSGzIx5GfxpMoU6pcybKly5cwVXKkUeEEkREjIKzYsM9kzJ9AgwodSrSo0X/8aFwQIkQHhBcPkADZ4POo1atYs2rdejKEkAcLNlDYUOGBiApVuapdy7atW5gLgtygIZAfgxsgBqR9y7ev379/K0h4QZXjhhdn9wJezLix45gbIAi5USFEhQYPGlB4zLmz588S931ALEGIhCAeNCgGzbq168WibxA50UC2hxV0X+vezVstiRceKtDIx2AIESU99vVezrx5zH0DRDRgUpLJBREQNjvfzr07RQYnfHT/6FmyghIPOLyrX8++IBMgIBBQ55gvOuH2+PM757dCBIEKFDBAgQojBHFBPvolqOBrOJxQGgQNnPDAYKotaOGFj+1DwgUeEOChBxdooByGJJbYl0k4fKAiCQysZuKLMMYo44w01mjjjTjmqOOOPPbo449ABinkkEQWaaR+THxQgQYklVQSQvvgoEIINPhUEgMaDKHCBzw56eSRYIK2TwUjSABBYV5yVBA/JEAgwQghKNcRCQiM8NUDEKgwX5ph9ukYPxq4CQIRJKT5JEEMXICED0hUICc/ONzwZgMX3PDAf+Tx6eemflGAgAcvnIeDoYf+MyaoUTlKX3REhMBAPhQM/6DEmaRyaqtb+axAxAULEOBBoVV5WZcGL0BQwQuNkkcBBD4MMOI/G3iQ7EDC3mqtVvx84AEEJHzgK7DUfvkPDQ2MoEJkyRqmQ2ID3RWBfOGWeu28RlEABBFDiPbAr3t9mesDFzBBAwTp8kPBC0Gs8OxmIJyQG1Li0itxUPzk0wEBA7T4wb6FrlmSaES8QMI/yxbMBALYkdBTPio8AEJ2tU4sc0z8hDDbqNnuO6rHhgFhrnIlq5DpByNgt0AFHUAgwssUxDzz0ysZfAKc5G2MXr/0rUBAwAItK4TQPuUzhA4SSKAEAQ14wLTTULfdlRBINDDAABcoLcENYVlZ0rIivP9wwdwNICGCDgN80GSUSHegAg4XBNFAi4a6LXlGNR+nxOVKLA2CBB58oDdHFDSA+eUSRABCEA90AHmaNDi4AoJ1RTz57BPRYJkKuF8mARIXfNAiUvuQt48GFahQvAodmEVZl2mOicTVsctO+/QOGRrCA4Q6ucEAcZrET6YGI6sqfRqEIJIGK9iZHNvUt7/R9dmXtAB2TUcP+gsSODoQudh7YOcIK9iTptxHwIewiVJNK0kIXpCxeHGEAQMAwgeelY+avMApCPiAAKtVwA42xCM0AF8+NlAlB86EAs9CCqw2IBYGgE96HoyhDGdIwxra8IY4zKEOd8jDHvpwP9TSiLD/1KSmHxJwHzTQgApWMIAKQG4h/GCCBhbQgRUMoX72k2IPOtCBBWiAOkZ0HwUuQAAJ+AAEI1BNERESqzIh4R9CgMAEo8c4OzFFCS+YYxipt4G6NUBpBFCji/7BhOgArAILcBC/ILaBGwjhBYqrwABWgIMU7nFyHqFAPrwVSBgiRQNFC6Cp7CUBrv2jPnHjCfCadEnaeQmUnZTXQJw3gpENRDCE4QgO/CeifDAhU610pZM04CFBrmYfC+DcBggyBCWk8XsViGMIVHABBHTAd4MM5sReWUxP7kMFQggktXoggq/twzr+gcBNovKAAWBRm23jZizXSBA2EcFAm2ECCU4Q/wHknLMBEfDBCLgXggsoQQKvg6fb5GlMhDChA2Y7wQUu8AIh+EACyWFAA0Aggg7MxzqoWaZCocZQTwqEBivwgBD+QYAbNIApQjuZD+ZZs3B6bqRPK6ksC5IPEgzhHyHYQAhwohqLiSB+HCGmEEKQTZxyipvP3GlC+NEDM22mZpcyplcIcFOnymyYRBVXxRTCJh0oYQEjOowIFLaqkHp1ZgZTkayUULgPYJFxPfidhor3jw8swKya8ck+eoCEdoagoKZJ6FsnhkwC/EMCINhcOPX3D8FAr4IjKGxUtkYVRGmtNKZpZwkXKzHRNOC0qAVCA7rKuAX8zmA9QMA/5Oaqfv8laQCn5R4YSctYJvhWIL79JUGYQBJqKYcJp5xqxXxZXN4697nQja50p0vd6lr3utjNbtT2ATuB5CMfPSFr8BCEoNV8D3bh1e6RDLYC1DZgtqtN70Fw0IMLvPcfhdutqUhQ39MioAeqVG+RAEUAH+yLCDfxQL7M+9Co/GMEDzibO72ngRMgAQkeMpscLSlgIIHyARXAAQlGTIL5HOR7ft2SBj7w2YSKBgEIUIEGNNADD4jgBJ3tsJBAqQORctAgYoWODxyGFCZo8mPXM6eOdzwCItj1yFKdalyIzLMNrKsD3V2yj4gpgm1BaAGVbMgKSRACsmEZyBwhwXEoq2UftWn/BB7QwXHMJKKFmFbOF0bAO8OVK851rM0+ooGKcLAB/npAAji28wdu4D8kjKAHyAXyPoZ61iwDekff8973QlBGtJB1hP/gLwQUzOFJe0AJFxjtpYF0pRPkhcM8+94CkHCmgpgaCXpu6qpt5KR8bPQCsDYhmyDcVVOFQFq53nWQnLSsCIxHudx8AFMFcusLJFDZPIqiCzuCUiUs1UX7YMAvuX0d6JlaCQiokkmxHaMKAuECPajACoCAhFIyICHba8AKKiBvIBw0oWx6QQSEQFAtDSHA7K7Rv5AAt8uNoAN7rucGXspwJCjheQsoofM4+oAHwA3Dokx4jeKKyBUsQAUk/2juifchVHmb/ODkMRVZqrgCLnZgknUWOY7oSRGeI0TXOg+60IdO9KIb/ehIT7rSl870pjv96VCPutSnTvXF7oMFAXDAEQoA9KrniAoKiMEElgCDBLCg6163UQE4MAELuH0CGChA2pftDwe0nQc8cHsJBDB3H11dAEYwgNvxzgMXTIDvfc/RPgqggBkUwQQmsEHbLZB3C9hAAYnnNRMA4AAOpCADKcBAABRwAChYwAUWKIETppB5GfHDHwUIAAZQkAETcMABCogCRwpgBBvAwARO4HrrTcQPKjCh8bQ3AApmIIACpJAfUVCAAAAwBbQPvzuvZ0HnTWCA0AeABcG+Pv/xFy97FBggA7dXgD+sL3797CMHAphBDTKQgSLM4Ae6bz+N+LEDzscgA8pnBM0nX/oHI1cXABxgAiVgAqLHAjvAfgXIHlRQAPGHAiVgAPanAA8YgTECfQrgADVgAMp3AAMIgRyIff4AAC2QgBkgAw24fif4IosnAAdAeyYQA7gnXDFYIov3BPIHectXgjtIIvwwBViXgN2XBN8Hg0N4IfxHgTVYeziYeybYhMvxPdH3gxkQhM5nhU44BSroeaDXgBvohQryPbGHATJgACaQAC2gANVnhuyhGEUIeBZoADXgBPhXhXLIGsGyAx/IARlQAt5XAFTQhwnyPViHASmwgBz/8H0EiIgSSIFGYINTGIeSiB9R1HhFQH/2J4SZ2B4puILcV4hlGIrrMYNGsIYZUAMOQH18iIqesXgf2IkmgAICWACnKIvcEUUAgIApYAMpoIQswIS82IuwV34mkAJTGAWHeIy9uHg/oIVcGH7QuBvZt30G0ARkGIvXuBhoKAVqKILph4nfyBzFFwXxN38YyHz5d47N8Xqch4QocABbZ43w2BrklwDcx43ft4v5iI0FcAQH0IgZcIneGJB8sYkgKIIyQILvqJDYmIIBkAApwIYJ8I8JKZFqIQACwAIFwAI0uIa2h3tREIkcyRowgAJJUAOQVwQk6HwbmZJtQXkTYAAY/9B8xkiTvFF5RXB2PLkdbscBkRaUzJF3PIABrGeUTIlDPLAEE/APDtCUy2EABxAAVMkcMSB3WckbfHcAXckcWBmWurEDGJABZLkbBYACKMCVaekaDLgDb6mP/2AAUzmXrDEFCWAAiIeXnyF3MuCXrZEBCSCYn6EcJXCXhskZTIABBnAEixmZE6MATSCZnTGWlvkYZ5mZjlEANZACAMCZjKEAKcABoskYAZACinmafnEAJtCXrNkXnxmbgJECtOkX/BAAJnCbfbEDRsCbwBmc2oSWwukWNlCcbLF2gYmcakGZzLkWAYCTz7kVJmEA08kVCYh514kVAJACReCW23kU0f+JAUUZnkXhE6tpnkehneppFLbZnvAZn9eCAfJZFGtZn0TxAxlAn/gZFC1gl/0ZFK4JmwHqEp6JAgX6EwrAgAkaEy3QoBAaoRI6oRS6ainAAhWaEhj6D0uZoZQTnUjhoRqxDzNgAJgpohfBBJ7HnihqESyAAhyQAy2KEUdAnDN6oziaozrKIw5gnTtKEa/5oxJRBAgqpBERekYKEVOZnkmaEI0ZpE3KEPcZpVRapVZ6pViapVq6pSTynlxKELv5pQKxAwMqpv/gmf+woV9KmhwAnlsaABkwA/5gpq5ppgMBmnaap3q6p3zap376pf7Aj3banTXgploanQfQoVw6AyUFYCpmGhAAIfkEBQQAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACwaAWQApgCZAYesrKz1z0Z1dXSTk5FWVlZhURT6+vnPrjbbuDp+fnxqWhaGhofi4uTW1tSUeiR/ahxxXxg2Kwvm5uSLdCH24oyvkyzlwD2lpaRsbGymjijsxkBkZGQ0NDQbGxzDoTG0tLRZSxTc3NybgiWOeiPFpjOjiSe6ury6nC7ExMTLy8xEREQ6Oj0sJgmcnJxIPAwiIiQiHQQrIgh6enzq6uzuz0ROTkyspHxANAy3li2+vrxTRBQsLCzS0tSWfiTu7uwcFgSSimSKjpQmJiQSEhTm1pT68qxQQgwWFhQWEwQ+Pjy+ojBybkTKunxeXlzmyEw6MgyymiziujxKSkyEchzy8vSumkRWUCxqVhQSDgQODgrStjTi1qyukiT6+OAKBgTyxkRKQgwOCgTe2swGAgQCBgQWalCcfkieoDQ4WhS0jAzyyNRkrKi+fnSgkpSchAzUMoAGBBje+OgGDBjKrBAYNmgSDBjeyMRQZEz22uQKDgjQyLTOqrTAxthiYoSQwIz4zCxGNGRkfhwSIBTe6PiarqCEbghmhHhUOBT66ugMNii43ny+rrjQ9NTauChycBxUUmgkQDQ4KCDQ4tQkCCDQ3rSuhiy2oLQMGjS48MzKzDC0yDAkGjRGEEBmUCjuxISIdnjQ3vSSopQqNCiamry+cizS1IRUPjjAzMC0oAyIfkxKUBRUKiSCHoCwxsiQoGSStiji2vS4xqTWyPCEXhwkLkR84igSBBDqfkjq+ujaxjS6oCCCZMi4zvAoxoC2spzAqux0cmDqpDBGKBSQrOSIhGx85qicdgweLijs6thyYDAuatB6SijOrCh2ghyWSkguJMDKplTqxBBuhExmqCj4uEBEfDgEDAigoIS0jEzk9oQGGhRmQhjKjCy42LzsqrBmSkC6stCAajDUfsgMCDC0sCCemgw+KDjU7DA0QjTQwtCIZGSImihQYhQYEmBGQixwSnByYAhEaoiSlICcZCw4WkxidHToxCh0YHTo2tgGBgwGBgQKCgwKCgQCAgwCAgQAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhwf96du3T58/fxAzatzIsaPHj/8k7tiwYIAAFfsuglzJsqXLlfpqfPAhoSaPBEMwvtzJsydPfxx4zGhBQAUGFBIE6PPJtKlTiPsEGLiwT6C/FSF4rHjKtWtXfx0uSMCg898QABI2lPXKti1Lfy/Qqr3oL0sLHwuWut3Ld6O/IwPw0gX7gUqLIX0TK2aorwmVFBwoHsHAYGqHxZgzD4R7YUaIFgMApAhB5cIRzagVX3yRYHQDAAIGzBiQJbXtvXT1HenQIcu+BTNkrL1NvOlglSE7mAhRY3jx5z8l6rOoO4EP086ha387pAmGGioIDP9gkIND9u3oPYIdH6J9iAtJkKefD1IfBwwLFgiokfM8/f8ABijggAQWaOCBCCao4IIMNujgVxJRZJV/E06UhW8VyfePPhf+c+GFVT34lET3LXCBCS0coaFB9sUGwAcXLECAigPtg0EOJnyQw44fCEChiG91cJcEMxgQwg50JbSPDO2lgEIKDEgwwAsDZXGBkTruaIJwQDrVnQwEbDBDAy8kiZB9NazwQgdCEDDTAjrZlVYWQxyxW05dGmfRPx2EQKaZER13kQoGoKCXXSGoEJKgeToFFgN/rkiQoBIRasKhLaQ1xAtHWARoo9F1AGmZkm52kT4dvCBEDReEQJZAdhn/kMKLF2DAwZ6g/rSoqJEqdKoKFzw5gwQL1CbQbw3kmEMDPoSwgV657kQXr6T69ysATyZbA6767LADdTsMQEWi0TJF7acJXZQFsDyoZdC0LVAxwI/ldnQuur5ywEAKl51JgKz01rsRrx0MtlBZ/gzxAbln1lBowAI7pFOfDRSMb7ovjLYVQlkkYAAAEEfs66n6COHnt55GhKpvxx4hFQqneXhEiGYR0AAD7oqsXrcJCLAAFbMJIIB5Ee1won74meADD01gpA8BAFyQwD8CiCXBYTp/5M8+Ys7wz7AzeKZURB0McHPYEjRwgQp66aMCAFH+IwF5GBSctdYvNNEEAXwT/7C3EOkewUF4BNTAQcxWlaxCDXyrIAS0d/sUcuTETU755ZhnrvnmnHfueUa5DdEbvVvb6SGuFe5T5xBZoP75yBclgcIMF1CZ7j4qDJBCAylcQEBKE2ZBwAUp8DBr3ZZnPlhYVBSKZEKNheBD7ybUlIBedS3gAwMoAiD9B/2+jhBdWQjQuwQ5lDm+ELMv0GkWNaDQgAoYATUaAa3jnsNYyVOemwo5GEANGoACUh3kadPbwUD0sQEDtGApTwvBB2K2NRkIRnyBAgsAymOyApYqKoYxVkg4YAAeIMZtBPyWRACDMwwGqmMhaFoHDcgiAZQGcVcxEpL+kinfhWcBKWhB+P9cuJmY8IA2/tgBAzyYnZhIIFE6GYIMjES/kBjNB82SgA9MEB8iTsofQsCWApMYAiaSDQA+AIAUdpAEGTSAivUTwgBMAIABnMgEGKCZFz20AJzRZQcNSF+pNqSCmcyAB8arXghWcJEwtos6AJzBq/bojyQ0IIYrSIIKBJA2WyFmfC/AQAsuMAAVcKABf9pHAqSUklP9y1B7JCSzsIjF5hngMQR4CFAKVRUrSfI4JEyBHl0Ilg0ITQYyiI0BpLQB24XEV0MAjo88dJe8qORphYIcESkFSEHWSFE6qcsRarOPDgjAB+ATiD4w4Bn87SMLKwCAvPrnv+Mo0YPqdFgK9BL/FRR8YAAtSIEPUMBIqwgJi92THgqeF8sJ/dGfgFOnChjwgbZJAQA88FMOZCAEhIEFA8thQBkTsMOGmupUL1DfAjmg0g0dQQgK7IA2c/PSHQhBpoM0KVfoqdOe+vSnQA2qUAlEKYioxDmUyikGS9eBHbD0YgXRxxBgatNOEUQfL+CAVrd6OJ7e7S8CyEEIDGCAHGRBqVtjlfSaBYAahAguJjCAD8I2g2YJQKdA8V7xytpKhTRmexeQQQLQmJb6dWB/CdiAYjGwAYbGcmtCeME+aoDOvibkCGjJY0g6dsPkmIAHNGIUXitJBRNYFiEZCwEHFigFil4GLJ/tlKcMZlK6/1jqtO/CLM4qohtOLqAqsMUZ35IwhJTV1rYGMG1OJVKD9gwAAwJoAQ8u8DzC0IRIEkiBAGjU0OPcdrlbwwAPrru0JuwpYRhIwN420IKwFcuLUuCToL47sg4sAAALUOwAcgCAKm52tlxjAAOa40IG/AMFz/JucltZqoRdYLebteAHqMSoulTTqwIb2nwXTNuC7JIHIhTI/nJ5nBr9bADaXKp3S8vg7PiDUPvczBE+MJZFaeg38kqx+OaLztm6OAkGaEBHnbaCjJJYIkcFY/Um+dgsrGkDA+UAb6D1AgIkYSlgQcE/BnC4DiThAqW5TLdqsIMO7OZtZXXsHqPCAxSMVf9WKABAF6OS3H49bbwhMIFYB6qof9jIB8n6AFJmYAL/NlSVfuJBAxCZLLZtiAAmWMAnR7gAE6QgBR/g6GZ2cN8noaBWLa2tj6UzqWd6uMNfTPJQV10gDLP61bCOtaxnTeta2/rWuM61rnfN6177+tfADrawh03sYhv72MhOtrKXzexmO/vZ0I62tKdN7Wpb+9rYzra2t83tbnv72+AOt7jHTe5ym/vc6E63utfN7na7+93wjre8503vetv73vjOt773ze9++/vfAA+4wAdO8IIb/OAIT7jCF87whjv84RCPuMQnTvGKW/ziGM+4xjfO8Y57/OMgD7nIR07ykpv85Cj/T7nKV87ylrv85TCPucxnTvOa2/zmOM+5znfO8577/OdAD7rQh070ohv96Egnoo59DQQrxAAGCxRIGKBebBoEwAkEgUABICCCA2iAIxAwQgywYJAI9JQIFKAAQSyAAARY4OscmUAPHNCDEhSEBCQoyAkwKIQNXEAMXRiIBQY/eI+wnQQVKEEPJvAPBRTACDcw+0C8sJAHGOQKJ6B6tCqQEMJzhPCHr4AIpgABELiABT8gCD82BCuDWF7z/yiB3VetgAdMQAQlqIASDnAADyjhHydIfA9I/3gQlMADAll9D/6h+QicwPK1doFALI/7ClTgBMgXyNdFMIIRHAAB0I8A/wQOoAB+UD7XSIBBDP5xA4OU4AQaoMEBlOCBE2ghAP9Y/AMgUJD161oBWYAFLuABJQACOlAAPcB2DpB7XHAAA1EBGVABDihsLqAED4AF6fcEIkAQD4ADAWAB/4AAwFYAJyB9A7F8C1QAGmABURAFwcZ5AxEBEzgQN3AAbwd3yuYPEeABbqcBIqhsYTB+DjABDrBs+uACFTABq7ds/oAEDnACT9Bs+gACFuhqtOYPMCB7/ndsD6AXQegBINBs/vAEJ+AASACEsPIAJ9B+yOYETGArR+ACXPAA55dssmIDJ6AEVrBstlQEAUAEAJAABLBaxrYEQfABWxAAFBB4/2AAx//GeP8gBFVwADYgUMhGAkYQEhEwhy+wAjKAbCLAD/6ABQ5QAVtobDqwQDpAAhBABsk2ARkAA1iYAbFohbB2AhDgD16gACQAArZ4ixHgDzFwAiIQYo+oD2GghmyYbCegA2MwgF2obLeHgSKgBKdYbPtXAQWgDwWgBLmobD0gAjAAAxkgAmf4i0KlAQfwABUAgBDQjMvGBSUQARFQAQ8QBsrGdhAQAcqobAGgASRwAyCAi0sXbAgQAAjgAC6wgLBnbAd5AAXwAFAQhskGAwigjhBQAhNAdslWAG+HAEpQAct4bFlQAhpwkghQfsr2Axc5eFzQkMYWA29nARCpVL/2jDf/eAAjeWykOJMHYILJFgMXiQBQQAIFoGxj8A9v14sVAAEFGWwPcJIVAAMiAInJdpAIMAWkKAIceWw6cJIHAAJhMAEwaGyj6ABgCZTIhpPqeABmx3/HVhcT8AXPpwRQd5THpg9GcJIlgHmpl4rFdhHJGAAHAAH8h4/IppcHEAAVAAbQh2yjOAL/2APXaGz6oAOLiQAQgAQbCJmc+Y+YaIQF8H0HMJU5CAMVoAEQ2JnJxg8QEAUH0AMZ8Jhx6Xwa0AMSuWxZMAU+eAVqqGxHSAIakAFG4ACsaZY/UAEI+Xj2GJf/4AUFcJA4EAE6sHeQGQMSCH5IAJfIlozzV4KcmWxj/3AD1ucBPfADzueZT+gAmMiN1mmZRqAEt2eayYaFslcAzalsrnkCW1cBkreWN3ACExABJTACT/lrozgFSnADRgCPYVCElqkDFvgDPVABqZd6ZkmVJQADEcCKygadJwAC+8kC9bmJI5AFPyCg9ZkFD6AELjCFHqCWxXaZJzAF+oAEJXCcgemE/vmMJKAAyQaiVzCKVhmXWSiO2zmDZsmiCFACXNd2cekPYQACi1maNohsWCB+CEADuOgCuVlsMVAAIiCcGlACJIoFRFhsE7B75dikSKCLmWhsOPAAN4AEJFgAYYAEcVpsBfADEuF8CMB9FfCDxpYFMDB+H1iaI5B3xfQGAmSpBBYQAL0IA4hZbNeXhA5wANOJjqsGATcAA3YaokwYEhRqjvpQh5bJfBOAADhQALWHbAf4ABnwdgdwAthnbNiHdw/5AI9ndrMnbEVYAC5wA6v6jaYWl0hQoZpHosgGnUqAl9uZbPpQj1wpEBBwkcZ2AwyaAap5Ay4wfjgYbHlHAh7AgytYqycgnMWmAy7QruNHAldAjzs5o3d6oAh6qlQpAhiamCwAAY9KkWtJoJFKmAXAqUOVBQm4goNnisn2A4vJdh8JmMfGkh8YBRHrnFhQAqBnAdMpnuRpATTAsTpgr76mD09Alj3gAqhanzb5agEBACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQQA/wAsGwNZAA0DIQGHEhIU4bs8ioqMtLS07u7urZMsw6IyZ1cUgWwcKSMGmJiYrq6svLy8ZGRkzs7M/f38ysrMtJYsLi4scl8aoaGhMjI08s1E9Nx8dnZ0y6s5qKin1rQ6jXcj0tLUTUANbGxsIxwE5ubkp4wsWkoThoaEQDQMJCQkjo6MUUYQemcbcnJ0RTsMSEhIWlY8FhYUGhUEfHx84OHhWFhZ9vbzOjo8NjY0ro4smoIkKios3tSkUlJUMCcLo4kj7MVA69+k1tbUXl5chXIfkpKUOC0MGhoc2trc+/PJrqJ0goKETk5MCgYEemIcHh4c2MqEnoIkl30k6M5wsppEPj48wsLE4cJPxsbEFg4EQkJE5sI8up4uXmJkwq5c2r5UEg4EenZcOjIMDgoEBgIEYk4UwN7czrjskHxsChg0kG5IJsSAcEQYlJKAlnyQdOSoquywKiLAwMrw4NAwxswwqpagSE40QDo8kngMGiooikQYFhBgvLjQ3PSIpKaQdMQoXkooeEZseIBcIBg0WlgYPng4DgwYZGaUxMS8YGYY6tTsMiQwcloIzODQanRckJq4oKqoqmwsTioQQDJssqaUeICc8O7gTDI8CjIoYHoY2uLEcoB8Cg4IOChMJjAoorrAAgYIPmaIXj4Y1tr0PigweByAQBBAeGDU8JwwxHgssLrMxrjEepqgKmbQspoMkFoYyPTY6rp8hrqMXqiMqrqckKSQQE5YrKwgSGBMNFYUBAwQrMYw6HhI4PTYdKjkPD5QCggwwMTYBAQYTE5kODooamhYQEA0dH4YqoRMinaQfF5s4NTYqs60xKCs3MbM4roo8r7MMlZMbmRwtqDohppkqqDEIDwcFjJoqsR48trM1sbwkGKElGoYopog2tTAbFpo+Nrstnh0znjIzjCA5q5AqsTsvMTEqoQM6KCwfoBw1uLc4LgQBhgUFGZQIDJEYIB8xsx8XFp02Li4wM7MwrikfGaYzuwwzpwQsL60xrQQCgoMCgoEDg4MAgIMAgIEDg4EBgYEBgYMAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qcSLGixYsYM2rcyLGjx48gQ4ocSbKkSYT3TNT4JwHAvn0nY8qcSbOmzZs4c+rcybOnz58b9zFpoIEBgwFIavSDCbSp06dQo0qdSrWq1as0TQj40WEBBQYdNEjph7Ws2bNo06pdy7btz30NYlQB8o9Jja0KmDB1y7ev37+AAwseXBKAAgIw7g2UMOAHi72EI0ueTLmy5cs6TSz4J+Plv30uKBD4QBaz6dOoU6teLVlzDCBLX4Z+IAAA69u4c+vezTtmvhMEar/sJ4XBAwpEeitfzry5c937ZPz4AYOFFBkKYhxn8ry79+/gw5//dfGhSgwHUxgsqEJAQXLxf/tVwECBgoAk+SDD38+/P9WX+bCAgQAkNMDCCf+QkI9/beUDBAMx/OBAEQ4goReDGGao4U0vDXfPPf24oEAIDei3oVX7sADBDyRY18AA/2Cw4Ik01mhjRh169k8/LFQxBQ0m3ghVPjAQQMGFPE4xxRVBCunkk07uk5IJLgDARBIadKBCflBKxQQFMXyw15cxNNDlmWjaaAIMGihwAgVT/GOhjmkCZYIGRQCRo4gEkKBYnYAGKh55C6THAAVAEEGnoDzxCUNs+9w5gwIuMGrppcqBJoEUV9BgQmyY8tTPByEwwAIA+ZiAQREzHBnqq7DG/wrrPjiA6UCbGkwRg5Hcyerrr8BG2Q8OMAwAwRQKqJCdArYF6+yz0MLXYaoSSECEVjE8Gu223Ha7W457afaDDk16a+656AIGrmwwFJFXuenGK++8KBLRAAZA6NAABT8MIMWi9AYs8MBvadVBET/8U8UJNCxF8MMQE3QPDizokMQVnyrUDxESXJFEEnotmmM/EuggBZcED6tvAzJIQQSoEcccsFBETVFFFQwIIUGT/dxlbBFFlEgnuDTgqQGQEO/Tz9KQyuw0vVpVMYAAMMBwAhI7o6SDBgNk+QAGHQoELhEn7FoFk0+nrXa696jggAI0AHBPPi4Q8edBQtEggQknPP+gQtifdXjPBwMs0AED/8K79uKMh0pDekAuDXhCOcLgN+Ad8jjACTIMgDjAjYcuuqD7kKqABBXIAAQLGS/UoeV/69ihCQoMcAVjUyQ++u68p9mPAARMrUEVDvxDgQ534/365bLvk48KU3ww8QBV6N779djXaNgDpQrwwQdgTsFCaQaBCzvm/STBgAIm7MNY9aBnL//84Yn4wA8fuPAS7SEIUanyy4sdU2ilgAVcYSm4A5LiFqe0pX2oafSL4HK2twAc5EgHh1tJ+czHPKbkAwNVkN5LcLCAzy1QbfKBQQFhJAQZ6O+EEoyhZaREguMoqkM1YIAD0FaQdZ0vR5oJAQX/MEBEBfygCBT4gFKul48PKKlwOvyBAiwowyqy5iUfIIAGQvYSKdzsXxusXAf3pwCETacDMZgB9xjQgBnxrh8mkMLemGACFmigT8mzoh4xs48rQKAK43tJ22KgAROIjSww8eEDxJSje9BABjqIZBJgcLATgIx8u1vXjoCgxf/t8ZMzzAcSiqABHaCuAeaRnkDspYMZKc0E1VLAA05QLSaURmk5wl0gsbeul8igk6AMZmVodQIHOMBzEprTZzC4AEN+RisDGIB2ihBNGPRqXRVgwNlgmDbBmaACUnhREUgjzHJKJlINUIAGNNBCRYmNBgqAwXsihYTCrVMDCxiAPMUG/y4mwIAEWbte5uwIAQec5wMuMadCByOla1HJYQOB43sEsjEcmOCiF8XBy/g5srphkneZq4AKc8WAD+SHmwtNqUoH88LhAMAFTLiCAqJ3D5Su9KY4XQuzNLkjFQ0goDkNqlDVNbl/aAYCjxmqUpfKFnAtRps8ZKpUp1qVfjCBCSDqR0qQELz2UfWrYG1KpE4wNRWoQADSdIAMPhrWts7EM/HDm9hc51ReugAGEIhBCP5xHgVcoaZuDexJwOUCGWAAPwwBQDj/YaCJio0IUtDBBw570rg2bh8AkAANrnAFObqArYIN7Udy1MQfPGABElBIpGBQhX904AcQEMISBQIA1v8WgQCn3VlRRcvbtg50AT8IXmoT4gISxGAKGJDBB/DEPqa4gD4n0EAIfpqj3lrXtyMUQpuOiQPK9ShPgMUBnhrwp40xIUBToK5lr8veoAKoWB+QQXqHe5DBEdKZn8kipQLXIRochb7tDbBU+6GDzTFBCvNNCJFi4B6CACEGFVwXgtVrUwFbeI8SoMAC/uXFnyZkcEWoYESzyAAF5mjCul3vhVf8ydpOgbz76DCAe4jgGKjABXOTwgIeAEhIdXG+1WWxkMvZDxkwgAQ3nHB3E1LbGHRAAQnKEgGQKmEg7zZazvumtRDJU4KECHU44FLm4kiDMpe5BjTAQR6HzGLxVgH/P3PTAQQY8FfQfoYJKgALek4ggCL8qMoUVrGs+kEDAWhzChSQQWXr+pl8yABOOGMYpJggAAdA4NKYhsAU2czmfSQBjRo4wQlqRwAIwwCoPcxHBZLwDxYQgZntAxeKGQ2tGA+gCOvLkpYWHeQmHqxNtzZghwAAhBOQ4NhIEIIDHjCAXnGaxTGGk5Li9IMZEMABFEBaQ/qhghicoFJES68FgxwtIgghBkKoAEyB8Ecm9fIzCMYfE1yAAwGEwD2CQ1U+9k0Daapgzc8WcD40a2YaqAC2DZDA3QAe0SswAAJJuGV/jzJuQc/qCh34s0BcIIQQJGYg4CLSvZsV4ypQmaMd/2LBDxhQgQoH3K0Sltq4BVIBJMigvDQAQhKkwIIPMOAfMEjonXkOAwmpwDru3FY/GjADDTRrRw0gJX77SYEQiGkgHI8BBu5GWq6ewI0vX/G6fKpbgTSAVwJp4hQ6YPKVq+BCaXeiQWcwAwoNAAgM/9U9YDCDE5Cvjz6iAchzhIPG6MDL+nVjjnLYgc6EXcjrMoEKGvBCgdzl5hQ1QQOQYGwVnAwyPBKAqBF0AiEgYSzcAgBwtCW2CgzAASwgSI6yCXvZc9Lpg1fa2Rfg7MeLHVwfolM/wD4QxYAIIcPf9z/mtm+IRktEIcCAjxnTAVbnfh81mAJSZf9LDXiyQ0QYEf/rfU9+kKoeMT6mfeyvr37ba/HpKf+BxstPf9E5DwkzWP6JfSSF69OqMTIgMSrQHoonJUWiAMRXfwq4NkxHAS4xHEAgdf5HJmaCddmxdYPnej/geAvYgWrTRxmnQf/gAmXzcSjnPDBwb55UAyaXVIEzKgTAex44g2kTfvdWAVbyAR1AZQAiBTRQU13EAEXwdpl1Avd2Q+B3GNJHg0wYM7YWA7nGFSY1cRpQAx6kgyGmABBiO+BSZD9QPU1oXSPzQCqWOcsnOT20NAIBIi4XKoRmaA5QBYm2aDXQNS0nNvmgAxRAPAwgAA0DfDooAAkYhjAHGpNEIA2AA863QfdwBWf/hYiKGFESsHkCQQIf8INteCmYpWUbNTIX9XdKwzEVYAIPuC4uUAFJR4ihJRRIsCLE8wMacECUQx5g4SOvpQHj02gwYEw4syIlpT+qGIzncg+rwkZSkARCQEpLdhD9AASw9QGdkgQjUkjDcQUNsHM0ICAOsIF2Joze+CuP03iIJF5lYmcAIASI8SeAtySMRkMEcAJP943y+Culo0WxpjQqoIIIYRgEoALk8zglxjMY0B6eNI8GCSv9QAIzIIg5wgIOwAAzFlFRNwAysDc0QAIQgAQFKSV0JAFG1gHkdZAi+Sr8mBiL93D9hzdEsIsdUDhT8AMUgGo8ok6N0QEYUJAj/5mTgRIa/QiEL0F968eMLPB6R1EFT1YBmMQjFOA5h3N4OvmUOyka/8ZPjLFDyCcF6gQEFdAxArAwXoWHJoADNUAUuNiNwfJuJKM6prRmvYRZ17E6XFRdVsUCQMAyNDCIUGlF+aAAM2CCn+FfVSB4B0GCNqaOdfgDIXl9beMuvYdlvQQAL8IVLdkA8NeWOIAERikhFMACPgkgj2aUHdCSSZCJeSk6/cB3QgB/GDQAy1gQjFEEQfkPAGBvteF/gMdy3dKW99AAoXkCKnACENABqsRf/VRG+sQm/ZI4AFIeUyAEZkUCQuCCpblH+/BgzVR8RQKPCJFNRWB9G4eOtUmcKf/3kBWQm73keuMkN8TWki3Xls0YA/6ybzhQNvA4HEkwZ0mAY/fwUnk3nRHkPo0xhcQBIRwYIjsiEJpBABo5HCzwkjAmm/kQGxtDAmi3LTxlX64iEGTyb3UlFGBCTl1kclfwnR0AG7jkn8Kkex1QIcoFXELgbDhAAjAQa/ZVBELAMnkmRFQkATDgPTKgXGUkjtzCaKoXfXvBbUaChJBBA38ERi9BBKLBSCzIckygLzKgZij6SS8BAD4XmjsoAEDlRya0D0Tgc5JJIQqAeu6jAA6woq91K4pGmnXCaOFXJnsRHV1Fbq02XV61pQLwACTwEixwHgIAXBFSBUjgWFkaQ4L/kzoNsDqpOIKQlFCC5KiP6mq3NCx02QAfsDqfYnGvwmhfUgQcKBAsQAAMUHGQoQMzsABJd3+z5EsEcG0tNEl5JSOLamGgGiqYdQWRpAMsoBejWqqthqqqShAy0HTuBCBI8ABCIKvtoYiYxZu5k6vW6h22Nh0rR15koicEkQTBc6wDoQMxeCEAUkMC8BLk6gA6cEsmUHUfcK3y2hygwQI/CkntA30lAnKcNAD3CBmDCpF7IgR9aarHpW3/kA/AAwPz2rCZwjRoKEoKejcSm5qMJgE6NJqzswCvYXkiuhcc148OO7LfEmQE9pAaBKDi2KEAQAIh8HXDIQPUtGTQBwOu/xRvTkmyOrsabckEI0IBV4ADMiVE3LE/LFNZV1AFP4ABW/kg4yShn2aTNVAyGsBgjbmzWHsZ7lk0FPJwsCiLAgEEDrAA7ek8DSCE6bGiczJsTvSFa4dECJu1cjtDvUQyytImKiABEEUcJ2BNpMUCJEAB7CQDSgogLIAEgqsAHxCJc9u4dOtUUgJTODY0IQIAPqY0AEAE83a5mZMPmju5jhu6oju6pFu6pnu6qJu6qru6rNu6rvu6sAtKLzAQPSAQKTC7sZu7poEPBBEAAdADWTAEuju8lVECBxABA+G7PZABxNu8gvECO3AAHCACImADvbu8HiCnzru9PcEPCeABCP/AAzbAAymwAkMwAQJhAQGwARnAATtgltwbv06BD7M7ATdQACIQBCOwA11QEAYgAhOAACLAASUAv/J7wDwhvQWAvAewECDQBS8wAtTrAUqAwBbcE/gAAiMgwBEAwB4AEfiAAtQ7AmCgvRd8whYxAU8QAQXwDyOQABOhBCtwvwfwAiaMwjhMOQVRAAXAAWIwBP1bEf2wAxwQARMAAjecw0osEBU8AhxgA8j7DyXwAotIEWGQAAhQAAjwvkvcxRUBAuBbAFnAA0vgASBQxRexDy8wAQVwAwXsxXDMEP3wAiXwDzeQBRHAAf+wAxUMEvvQBcfLAx7Au3FcyCDXBTvgxDz//AToaxL7AAYewAMiMAJdkMSG/LooEL4F4AQpYMYzoQQlwAEFUMOWfMmqOwFOgL8csL/8UMoPMcSinAIgEAamfMEvILz/QL1P0MC4ixNXnAI9PAQGXMuxiwD/gLzlCwI+ocZs/AQl0McVSMzEq8q0JVZdIMGCzLtasFfSDLvTOxBTLBX44AEjDAYP1s2rmwAb/A8t3MhW0Q8l8AQ8MAJesFebgc6iyxQrgMrIa8xnMcRBwANbYASn1Zr4nLVgMAQHcAMRkMcjgMtosQ8gMAEZcAFGIGIHjbX4oM5ZPMaNTMtr8ccIQAUXcAT4ldEj6wETwANZIAL/gALK7Bct0AQX/5ABKNDHKL2zOyAYD2YEF2ADI9DKOU2yaMwXQBACD5ADUcADE2DDQ12aCYACuYwZMhAD/8AALZACEYAACQDSTz2SJcDPNqDHlxGA/4BaL7AEeSzMXz2SLczI4XwarKnGB1AAT7ACON3WhAjDA8ED/+DJt/HHI8ADPIACJazXTbjPBcG/rtwWkHwDNkDKiE2D1dsd8LzCTd3Yk91beb0cAL3VSLzZoq08IKDV7jvMo53THfICx3vXnZ3aor0P/IDNJKzZsK3E4wzZkn3bqd0PQ1DEt2vbvI3CVxwEW93Vwz3aEq3WHMDWyb3ZdN3GFPzc0A0Ggz3PYEDdm60EHvAEIv+w29rd1r5dxEcs3OEdv/2QAMa9xah93qbMzGvd3u5tyNHt2vP91I882JMs1Ped0/gwwxEA3v3dzZ4x3kbs1ANO4ExxxQhw3PKd4EvMzMH84BCew4ItAk4w3XNV4cRs3SMs1ObN4a0rwzSM4CJOzAa+BKF94sTM4FvNxSxey/BNwBQe4we8D1ZwADZwA4Mc4jaeutZN2EHt4z9+uiQ+yiZe5HBs4LJM5EpOui5+2k8ex/X9xlPuxbI9AgWQzVcOx+MsyZTs5F3euJddAJk95kts4Fwt5mietWEAAkvQwzDe5ihM1zbgzBJH5xZ84Vy+q3oOu5A8wpX85yd85AJO6PL/C9AFkAIJwOaI3rD7kADALOWPfuOsvclWXunxm+UdPMiafsDcLeiO/um5Cs/Te+ikPrxDnMVcXeOpXroSbdrO/erEG903gNe03rxZTtgecNi5PrxfPslJ/uuwa+pmPuzE7rqrrsVznuyvK9Fs3Nyu7uxyG908sALTTu0Ijc2eTmvabroyzAMFMAIgkMgHYMZe/e2m2w9fIMrevQEBkAFrru6o++YcsAFY0AP6vgG3S++pOwIb0AO+m+8RsNP+froAL/C+awEGANEHT7olEAH6nu9YcAMx/fCk2wUTYAA9oL4RoOEYP7p0LQI97AH8HfIiDwYgkABUPOoo//IwH/My/z/zNF/zNn/zOJ/zOr/zPN/zPv/zQB/0Qj/0RF/0Rn/0SJ/0Sr/0TN/0Tv/0UB/1Uj/1VF/1Vn/1WJ/1Wr/1XN/1Xv/1YB/2Yj/2ZF/2Zn/2aJ/2ar/2bN/2bv/2cB/3cj/3dF/3dn/3eJ/3er/3fN/3fv/3gB/4gj/4hF/4hn/4iJ/4ir/4jN/4jv/4kB/5kj/5lF/5ln/5mJ/5mr/5mOK7vssBvcz5e7S+GWAACMAPog9KAWDEHAD6qa9Hnt++PGAAPIAAH/z6EuT5vrsB8I4Fq4/7EaT7wv+7wE8/6wvvw++7xT8/8Z4Byb/89LMBTZADUPD80J89XOADM+ADVC4w/NefPQEABT5wAd7//dcTAP8QAFSA/Oj/D8xr/tij+/AfQ+0//8xf//aPPQEBACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFBAD/ACy/AGMAAQGYAYdeXlwtJQl/ahx2ZBpzXhu0mCy6urzqxkBFOQyReiMfGw+Dbh3MzMyxsbCCgoSPj4/PrjciIiTBozROTkxkZGQYGBcqKix+fnz4+PfvyEHS0tQmGgQmJiSWgiSbgSRZSRI+PjxsWhWWlpRubmy+vryampyvkSt5eXlDQ0SIiInXsjn2zUQ6OjzhvTzGxsRZWVoSEhRycnTZuDvCwsTnwD2ioqRqamykiShkVRTu7uypqanW1tSenpw/Mgy9oCxKQgyojyoyMjRQQhBKSkw2NjRSUlS7miwKBgQZFgTy8vQVEQQuLiz30kSdhiVOPgw5LQzi4uTGqjFeUhSLciHa2tzm5uTe3tw6MgwOCgQGAgQCBgTk7vBUXHTO4tTathBwQlSKbkQSMGj2xBRyWtQuUEyqyuz2ojDq6thscETM7DCoxjByGoASYDgmYIjw8Lw4LmgaBiC0wJz22NR0kpjgtlQSEGBaooRoZhjGxLAEBBiijAyKkrB0WGTMfMhCWEBoWChUdFgMDBgEDBCopFBYQmzg2LiwnCCgsriygBxYPEh2emSifgwcKAz2uET68PRiaHheSkjq8OiospRkWHTgzhAcOBzofEhUYlj69tzg2PRw4oDu5PB8QhgSMERCSDDa3NDm2NiipJDI9NRWSChodhjAujDkvCi00rQaFjTOvjTAolTEppiKbpyopnQkwICEsiSooMTa8NTYxMRWcBiq7siwnAyq3Hi0fHTk5PgICDDq+ugmIMBwouCQeHT65uiyoKC0pujW3OA2IijoprDe3IQmYNCEnIiCeGTOsCjC3NRkaJQIGCBcoiTI3PSwrCB6kCREJjyKZnSohkgwIkRUXDygpCBsWDzAohCiyrTEfCzwxNRCUGRkQpBieGw0MEiKSGzgzlQKDgjOxPDOtlSCklyCdggwNCCwssjI3LSobiz2zDBEJBQ4cDgKMBzGzjDanjDMMoCQdgw8SEiOiniCtIQGBgQODgQODgwKCgTq6uzq6uQCAgwGBgwCAgQKCgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpzIsB/FixgzatzIsaPHjxXr1fMnsJ5Fg/36jRR5EqTLlzBjypz5r56FIidSpDhRBEbLfynvDcl54UWFnzSTKl3KFGY/Ig2gVJGaA8qDJS37wbhAJYdAKDywNh1LtqzZhfUmuChBAQWKEy5y1KggsJ+/GBg0jEABoEEOHXTPCh5MeGa/exwspuzHwgqUIXU5MLAyITKJKgCQFt7MuTPFxTAMYAaaFoOBegP9XcghgqTn17BjI1xcwQWUIqRHJBFBMG0VEhxkCx/uOWU9APkYLAF6L0WOEydTBqGygwXx69jNqgShwQoFxTBE5P8bsTilhe4osqtfT7Me1CopYNSF8WB8+X7nraRnz78/R/dRpRCYQM3lcMF9RFBnnX8MNghRPUGQAIUD8vVmAwYllFdPEVAYEJyDIIZoEIQGQHHBUQX1MwQGJLhml3MPuCbijA7Ww0IDVVzgU3mR2faCSfUs4YIVL9BoJIP9LGEABgy8EEQQLIAAAooqUZCPXnz5NdeRXLJ3XBJJJMfADlRQwQAFqKUEwwkaVEGFFVaUYEGXdGaX5AN45onnPyigBpRKKMTgwAkTVFjnoYgmquiijDbq6KOQRirppJRWaumlmGaqKU0q+ePPPSVphpKG9/jj50Cd3lOqSaJuSulhL6T/UEMDDfBwQhCnHnSfPy80oMME0fnzVgk61GpDYq26Gmk/EVrBwAwu7FCFCz8mpGF6GDx3Uj0vuJDPDi4wUEU+DaCQrLKQ3jMBChYoEAERFyQXhLWLKSACA97GkFJNEzhAQRDuDqFDEg0Yim6li/2plQ6jLXQPXDnlo69inu6bEgtvgnCwpuVVEFWRCm3ogggcnCDxvgqXp0CJuG18qVYRRBDECVSQMKe1HDRAQhD+mDxxXfcxS+aCLk9q0cMNuEAFi/vNBkMKDGSm2smo3nePAxg0IGPRkmp1AQnhauBABMlyy8ADPk2tb9WL+QOAFRoMcS7XjaoUwQkM6PAhSko2/4Bsz/YhZdcLGlDxHcp0G52SP/VBd5A/DuRYweQWPJCDAxXckxWvhaOZcOKKV5kDD6AadA8PGKSuuupyVWgXAJ3fBzqk0S1+wm5bD8RtCbyXIEINGjApApoEAsBA1EHPXvc9Cpgq0j0slGiDqCqV+g9JFaSQzwmmAuUPBRpoAIBI5M+t/JHfM2CrDSfwsEM+OpCta9Bq75sWA6ZRMMELAFDwQmLnW1Q9UDADKlThH1OZwQkUgDiCBO0wXFnb9U4wriqMKx8Y3MEIchXAQ9kEBfsrAgo4wCp63YcDL1AAqiKwvxfwDwAwfIEFzNdBRdGwhjhEVQ53yMMe+vCHQAyiEP+BeMMhHql6MJgcDExlvvL4Y3K56lQS/7HEBhqxQf2owAkawADq/KMEQ/BHsqx2gSrs4Dt1qQAAeDCDwrkgBSwwyRVB9JQZ7KABNajBZXKUOwcuhltL2962bPCmGdRABxrIARUyM0cQ1SMCdDEVDF7wpqalqDxB0AEJGkC1mrDgBUS4h0gikAIMuABFjXSQ1RgGgNnQ5gEzoEC8fqaSEgJlCegpYipjc59HNsAKLRuVXSjAAMzF4GSKeaACJBTMXfZHJRZwywtqAIUtza8fINhk83xmsQeiAAoauJkz+aOmB2gACjmQizhHVQEeuABY9atdx0SQhBJwcJzYWcw9KCD/Ah7oYAYPIEKr7DICDZzAJPEEGmhOkIMZBEGX+NzMA92GLxZQDwR5OwoEA5cyrcQACgwAlhUjKpwHLm41DrjnP+5hORGgIEovGBgPRnifNYHUXJ8jKXFMKroalK4g9xBBOoeaA9VVgXQLBanceKTTnZr0ajmI0UEGGIMTWPUEDpgBi2IwBFatyQoGSI/smrrTCiyhVJ6qQBEm00qB+MM68mybA+wznzKGlHzlIytx/DGCHOTtATVwQRKsEJ+6oCAvgaHfXGOgOwpUAQMzuMAJLuCAQYEAonolC7MCSwUoOKsGE/gpNjuUWKvhbXpuHUFnPQuF1oJzg5kdzmEqEAEL/3BAAZr7iU3kp1B9WkC0iLGAcJdA3CAswWCxfVlyl8vc5jr3udCNrnQflbyOCrO6PE3YA6er2Sdy4EkcgAGrxlqXezzpvERgwQzbtgQWnPdJSwjCeqvLXaX0IwIPsE06q/Cr8eb0ehdIHQYHTAKN1mMIDMjBgPNRVFOKkb71bY9jGSCCnBggB5iBcM/yR4EO22AC463AC0bQ4Q47wAo5SMF2I2xfDoT2JFdLAnDIO8Hx+PFzQfvmDh5aNRYzpYFJYgAUcPpfwG1QjkztKF8xoAP/+lgw952BFZZa5NuBFgAT4MCDeRqBEmVmxU9uSj0oAIUZIKvKCoYCg6lQA4uadP9DkNUomMP8kgcGYQZQmB6ND+yAEbjQAXExgEAfyNLLZZfOTglaznJQArrQ2C4yesrA7PlAEFDnsnEdKaI5omgeyCUCvZ1bPw6rASq1DS816CONN83p8nCgBkcFdagX0g8OYGAHe1tMzqpAgWtqmtUbqXUN8tHoG/8aVUu4tazr8iUXqPC6mAV2RThQAkYntqMsYbbVRsCi3IJGqCrObrSlrZD71oBFLPgUDJI43ntgGTRDAIECJreEE3j2cOVBQeGIzFNye2TJTMqjDopVA0yrKC8M/AcLvOWCBhiACkmgwgW8vTis6WDL/fY3R75HnePt4OM7mIFIn9LwHe2zBgb/wFcDUtCnoF3NBa08tMY9opKgsURDcuxxTmU3kqyseuZAj+24g070ohv96EhPutKXzvSmO/3pUI+61KdO9apb/epYz7rWt871rnv962APu9jHTvaym/3saE+72tfO9ra7/e1wj7vc5073utv97njPu973zve++/3vgA+84AdP+MIb/vCIT7ziF8/4xjv+8ZCPvOQnT/nKW/7ymM+85jfP+c57/vOgD73oR0/60pv+9KhPvepXz/rWu/71sI+97GdP+9rb/va4z73ud8/73vv+98APvvCHT/ziG//4yE++8pfP/OY7//nQj770p0/96lv/+tjPvva3z/3ue//74A+///jHT/7ym//86E+/+tfP/va7//3wj7/850//+tv//vjPv/73z//++///ABiAAjiABFiABniACJiACriADNiADviAEBiBEjiBFFiBFniBGJiBGriBHNiBHviBIBiCIjiCJFiCJniCKJiCKriCLNiCLviCMIgQPsB4FuEBN/BsiYcPOOADH4APA6B4AfAPCfAPClAAiWcPAjCD//ABEtADiCcEJfEPN5B4SpAARsh4OPAPIeB4HhCDXviFYBiGdNIPR4APR5BzBzMF/+ADLTBE9fAEC3ADCfAB9nAwBSABQpgAECBE9dADJpABTJABEvABftIPCCABA6BSkBICCKAASv+AAwXwAUFkDx5AAwdAA5ZYAAggEkqwAEaAg1V4A0gwEAUgABZxiIuiAB7gAUoQREgAASvQArKIiSZAACGgh59IPgEABCGwLwHgA1n4D4foBImyAQnQhkKEBBIQi7NIAxBQALDIBC2QAAIQAh/gARBAAFhwBEeAAEaAACnxBEYAhYfyg/+QAUOEBQlgiTSQATQABD8QAFPQAkCQAE1wh7AoiNR4ixAwAAqABEIwiD/nIBKghFfUDwHgATLgjiYAjj5oAgGgEkoQAAMgAwUwBR4ABBIAiBAABBkpAwmAAAEQAEigBDd3bNiBAwjQA0M4Rwh5iwJwBSmxi6aoO1KgiUD/oQQfEAUQsABTcI8ZkAEQIAEFIIfWKJKOiAX1kAX/pR4t6Uz9gAUUiZMDYQ8dAJF1oQA3sAB1EQBEGQIEkABAYAQSAAEqQJRySAAf0AMkqQRKyZRDxxQ6aJC71AM3gIwGIQELgA9dWQDm+A9KYAK9OBBaMJFCcIsaCQGKCQE+YJRC0JZYgA831xlvWJTj9AQm0I7oeBDkKBCHuIUCUYQLUIcCwY2myY34oAA98AEC4AE+AAEtYIkt8IwJoJZPoAD2YA9K0IoEkZsFcQCz+A8rgBC8eRFCAAQZcAM9gAC7xJWbeRAHQBAhMIV0ORADsADYmZ3aOQUJkAAe0AQ3YAIS/yADmCiLMnCeKrCY6rmeZqkCsRmUQdkCKqAC5/kPUSABBWkE1SkRzwl0QAAEN9AEHdCd3fmdNxCe0HgAK7ACQYmJGbCgEAqfQXmJ8qmXIYADGIoDIbChHHqhBSGJVMeXpSkQE4kAONCa0CgDLXCeEmAC4QkBNLCiKoqJFCqLUZAAA9CIkXmGcCk7cbl0JjAQ9ViNH4AArHkDZfmMC4ADBHADKkADFpkAC+ABBXCHPlAAJuABCxACQnCbO3qSZreaP9iFQhgCTvABYXkD9aiSAaAE9oAEAdADQnCiYnmH+FmU+1ikJCmZYKo7AQCaV2eEQIAQRigECpAFCVEPWACnRlUaAgLQAQXgA/iJllJ6lHuqi03QAkyQdjfUD/jAqB8QAlNqApJakHg6ABh6A+0IeGQIqo6aADcQqRuZAXh5dzRUDxPZAyEgA8CpeBZBibWqeE5IUgEBACH5BAUDAP8ALBoDWQAVA28Bh2hoaaSkpH1oHMLCxFJSVHx8fG5cF5J7JIFuHLOXLHVjHGJiZJqanLS0tNTGjJuCJFlKEy0lCOPj5CQkJKysrOjUfNra3DY2NIKChGRUFEJCRBoWBMfHxzswDPr6+Tw8PNa1PhISFCwsLOC7PPLKRCQcBEo9Dd7e3MOjM4aGhPLy9EhISK2TK4p7P4hyH8etSVxcXIqKjPnuuXJydJSUlGReOG5ubNbW1KiMLBYWFHZ2dI6OjKOJJE5OTOzFPzIyNBYOBJ6enOrq7BoaHO7u7DIqDAoGBFZWVLq6tNLS1J6GJB4eHEI2DN7GbObCPCYiB+HCT9m+V7q6vIp2I87OzL6+vGJOFK6OLBIOBHp2ZFJCDA4KBEJGRAYCBAYKDKqcdKSwxDQiPNzo6JCOeO7MsMwwgHBEGB4yRKqkUIZGbDJUTFZYGKrKMBQyaLjGyBQQYNTI0AoYNFhmaKKcxFx4GIygiMx4yKKEDNTWwGZsgHRe1OTS1HQcgLqc6GZsWLS8yB4YNDxkiMR4LDwOQHCk4Kq0qAoIMEZUNEg2PGhceJ6wqLy80IZykFhKKI58DLSwoM7w1KhsLDx0OAIGCHDCJOLwhKS+tFh4fDowSKKWsIxghM6+8OTW9CTCgB48HIyWsL6wrFhGPC4wRKKikLScrC4+OHBclLKaDDQ+ELjW3FxmGMzuMHaWmPbW4IxYGAQEGDxUFNDS4PCcMEgmKHpYGHDipGZ8bAoyKMaw7KquIDoiFDwwaNCwuMLKxIhEGHB0mMbQMBRkUM7S9GJGkHB6GOro+Pro6HpqdJBmGNicQKbEfChk0Eg+ICQwKCgiwOh4SPi4MGpGVKiESA4MGIK2hO7C4EZMZHhcaFhadAoOCOauQIKWXOC4EBAYCKTSzMbK3O6ksLjC7OK6KDo+KEZcWHZqWM6cEFqkhJJ4kFhAYLqwzBoqKKS26K7wsMycmKCaIIx4bDo+SMbY0AYYFOD46LR4dFhglA4OBA4ODAoKDAYGDAoKBAICBAICDAYGBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIEOKHEmy5Ed++YaIuPBBRA5/B/nlEPHhxxCYJnPq3Mmzp8+fQIMKHUq0qNGjJYcsoEFBSpUGO1bkI8jP34cUDapIodEDH9KvYMOKHUu2rNmzaNMS5dejCocGQQJUsTAAxlSB/jRQSCIlQIN/HOyqHUy4sOHDiBMrXiyW3wcbBEQsWfIhhgQpPwTyW8LAAo0PS0TosFDlA+PTqFOrXs26tWuf/vLx+8evtogGEgho7nFDyoXZ/5YEEGID5+vjyJMrX868edjam4fD0GyDyA6vAvPNkBBkiPPv4MOL/x9PHjz0D1WSrMgeQ0UB42ypYC5Pv779+/jzA609ZMcJGksIlAMDEgCAU23ocWCafgw26OCDEDbITwg69KYBTjkEIcEC/kDHzwVVUKFBhCSWaOKJKCoWgg1UVEHAXf9kWGCH5w2gYIqM+SMCDDEE8M8OMORgED/4ELDDPzQAsARwODbp5JNjhTCDfEfA+E8+O7hHY20rcDAflIU51sANN3BAxQkWMPDbQJulkIQFVNxgQQAfMAnmnXjmSZKU8r1YED86qHAddP4AIEEAAeqJFj8rBFEAAR98sEADQgQxgWb4zHCCFDB8QEAQJwRwqaKklmoqRBNO2YBUdgpEgAUNiP8wGz9DMCCEDsad+lwIE+BDYz5d3qAbbRcMQMURMFnVgAUwtKrrs9DqSeENVACQQz74ZGvcBEFYkIJLEwAAWJ3RPgccdBMEUKBmMKhAgXcC4VOABDRgV+69+KbIzxE3eHADDQVgkEIMBXyAkz8rjEkBDQFQwQEA9ub7FXS3MZtdAR7EYJw//MYq8ccgSwiDmRxUYSMHbwlGG7A0SDFAFQHAEIKzIQtVWz4AbJrZPyHscCt8H6C8YM1EF21eDh+soMEKTDMd3LlVDXGBBh9MkKvRNiPMwQ0cCkiDBMV5CCIV62Ft9tlop/0PwssWIKRAtYK95Ychlq323XjnfWpeY2L/sORAPQuxtocaCK334YgnniLfSRTw90D5YEyDbLX5AwOsIiiu+eac15eXFEnokEOr/MBAxLsD4YOBENd17vrrsL/G+AyjO/tDFTfAQOMHUjBLc+zABy98WWIS8fDSTf+NEgBlFgCDDcsCOPz01Fd/VKEeeEAEtUl0P4ANU9WWgw1VoAlnDLJar/76a+fwwwo99KCB1bUpBB3SPRCwwgUza5bDBSsggP5+MLPfbY4f4VrAAgDAQAbCQAQHItIFYLAAGGhgdOzL4PQcs4MG2MhhATgCPuoXE5RoYAdSQFnK7oIPG7hMhQ3AwA+u9rqqrG1t/sjhkCqXQxJq8IexK1QV/yhAMAxQwAKBkQ1CqtIDwDiqACmggQjjZYMAEEwHMRgTA9IHxC56cXr++EEPRBACbInAMhTI3BIvQIGHLQEf2BrCCP03gTLmIwQfaMAJAGClL/rxj5vroYfQo56EyMsCBZBND0sInXzEgAgpiBggJ0lJtXnIQz8And0MMgEKCGsJK1jAES4wRzbxMB8iCIAF+FjJVroSa5e8GQAskMYloudfNAiRfAowASbV5gcL1EEAkkCDXr7ymMjMVyz5tkolDqlLKkhCAHQAAAxwQAIYeJssOZAECUigARdKpjjHeapY/kCVO1gSzRjFAQ9QwGAoWUCZVrAlHcEAAAUIAAWsZf9A112STbHpECOho5kc5mNuBC1oPg4qUHI6tD6XFAED/iUrH1KlS0QwEHSEI4ECjNBDK5NoElgpvFhWTjT6nCYEqWJSYMWAAhRgAAyGYNIlwIApDQiADB/KU/JgkgHErKhF2QQiITQLOqoTAg0KaFIYhMqYwDNpVT5wxLdwgJbhpI1J8QGArUmhAd2LQS8JdQEGOKwBFKhCFZrV07Z+p3I/AOoOhDrUgXSSCNOBTggsEwOmxpI3aeyn4qQ6BBr85wcTuMAOuBMgqfKjcNWawAR6oMew2YYCVCjABSYQmgvQ1K2gZQ4/4kqFFIwVpAfBRwqE0Ffo/GBMC6BcLPOhAyH/IEqwiftnVQggH3KNFqy6iyVtQpACCfQVLzC4gcdQUqFs+hK3oY3uYvghAg01oAdLGMJkluDMIWkAd5oVgQa+FgBZ6WiAkv3ADDhwAvAF75/xwoAEUgAj1RFhqfD9hwikQIUenKuTJ8jrbapwAX+oZAk0lK6CUxM5InggCRSISwACwIChDSkfMMDdU666qmStALNSgOkAyqSDx8Uuvzn4GgB82S4KqNOiK9gUF2NEAw8UoDYaIA0MdODBBsRAA31csJAVg2GYGhmmOdXA726mgRT4KACaFSitlqJPCgQgBSvo33tRGxxQsXUgPRBCA05rJwK4y8SqzVhtCCCEy1i5/2Wb6kGCh0znwRBJsngWgZ4nEGSW+iMES+DsR8+VjxwEmrMhaGjw/GHoydwkOKr8skBWcBm6EuQI7vxsfD1Ag9q0SwUNIMAQkPa1Wtb51KhWy2hp0IBWB0HOwvEdQcIsBUsPBNPvYpLqPLCD2hxBBRawAXYc07vppPrYyH7OD1rmlAC8KG4L8OURiDBmLv+D0tVOXQw8kALahFlBTOoZEXSQ7HKbmyj+OLSgJ+QzXA3EHzZQQQBomlCBfMBh5IJbEIhj7xsMIN//UO24z03wgu8kvzg7VKK6LIEZyNaHwmnvgYLGgbJ1MnfbUtcCDM7xjodEqq8Ntq/wsQDS/OZmnP+lkeVgZTB/TGCxOyhgPmwggXfmMAfz8o3Hd85zjEg1HwvoHg10QAMqJMG9VflwMdFFAwtIAQP5hJWSKRaATaUg6jcgac+3/nGTanUhs2LIrOq65ViGAAa4kcAJGgCDUmLY5IQSQQGuKYE0XchDOpq7BNpMgSly/e8ecYwOCkB4wv+jADMw2BKHoIHnzQAAkclVbfDxgecB4Ag/6G5JhZuPCXyAanwm4YRq8nAi0UQDnkXo5E8PmjkD/vUUsRw3qUD7ramACMVBCFcHkATa074BCwgBmyZQAN6biS8A0HQXyQ5d2DvfJJuBX9NWYBn1/C4fPcDADGBAAB4lAePxmlf/FWZwBBhgIAl77PPz18/12c6LAQv/U1UOChz70uBtnWQWjVSngu6w///O5yGdtCFzll/8sAASwADwchvgBh0LUCnwAoAS2HN4l1xVsDMD5Q/4EALuQyAeBTceWEczQQMjpX4TeILm5iGFJQQYIEks9Us6sAMMwF8M8AP14w+URQUBsAOsxgF+03woGIRCdkkfMGL+lRCE8mFxIgF1QTkCwSdw0k0cEFtCWIXldkn4oANP1U/3kzQEUAANUGEChTATBgDwowNfRQCuZ4VsGF0RRQEnwCH1Jn+XRCHERG+plESTNy+i0oZ+OIR4dwRJcIH5RYdiYzJ1Yjk3ICqEIzRA//iHkOhKl1RY2ORXzUc3FYcS23Fb54EyK/CIkRiKkzRInohwVKFoVbEAa2deycUBU1cVhoIZoCiKtPhHkXMop0UQcicztLEEADADBEA1PaAD7KUDXkFdFCAEFGBBK7Be2PQ2tRiN43Qb6TeHR4A5tFFdFgAY3EQXvAQcCHNESHQmSYA+syiN6JhBSzADM0BmulgAwZcdH1BNP5ICNvABpUQbOrIAGHAkBdAD9JaOAimJcFSIRHIQ2AJHimZKsZEtDzeQEBmREjmRFFmRFnmRGJmRGrmRHNmRHvmRIBmSrmFSBkV/AzWHsVF6KblQLLlQayiSMBktJhUCGlBNMRADOv+gAfnoWPUHAztQAEKFgzGwAzdZlENJTzGZlBIzW11VJiFyAhwwA5/FkzdzBO2UG400KV/Vag1ABbdnIEoZljIZSy3EADawPxpQAOiHdFT5AQwwAGrnX3olAj+gZythGaVxjmK5lyWiW7RyE/VXAESAKFqVUCroZDYAh02kGagVOJHEl5CpKAbIZUewioXpQ7IkBTYwAYa1mJc5KxfQO0cYmaQJJpNpmIZCAZfCk3oRBGTEACcgl5ynA7hYmrb5JAZJUAikLh/oWEvQMmqYIbEpVf/QScTxkreZnAyigSHQnIk2dvWTAxhASzbImJeEM1VQAMInnLIJUv5AAElwI8r/OZ4kgkBERwM0gAGK5yFSwhdI+ZnQoRc7MCoh0BmfaFI5sFgpIHxuJRMf0AMUdAQfUEDkiUyO8ZY20nfhUxvtKQV+Yp0ktFc3kAItIQIJIwE6QEaxBCI3MJptFQIAwF/d8w8DsAMfoHkFWkn4QBORcgG1w6AzwBc9YCVcJgIUEE1bWQUS8GANcATQOXNzMipuBaIMEAMzYAMF4EnvpJcpWj2T2Z6r0kdcNgQzcGQUgBsewAEUcIToAodgCVoTMgR2hA8/oCHu1qSSiJ86MIj0hFo3I0eVkwN4ll4U0HC9ckngSWBu6H4qEAMmiKZeRJKz5C4wwDQ9IKAfhTBJYon3/wMq3Sk+MAeN/XlJLkcguQeorWR2DJA9SHR8i2iDmshYwiUTqvSodJMEarin9mQDGDAm5cWkmLpBeNeMNsBANnCr5IdB9gRkCBdK9OYhOQADR8CfYHozC3BNQkBtbRqr4jRUYcdS9rNksApIKrgCRwAAM2ha08qs3AohJHlGmeWC3Tqud8Jlj/UWakSu6lqubppJ4rmu8Ook8JVwHhOv9ooiCCRKFape15RI9/qvfZlH4alWW+ODUAWwCOsgwRoDDDBhjrICGJSwEssgKBEC2pVdvrKtE7uxHNuxHvuxIBuyIjuyJFuyJnuyKJuyKruyLNuyLvuyMBuzMjuzNFuzNv97szibszq7szzbsz77s0AbtEI7tERbtEZ7tEibtEq7tEzbtE77tFAbtVLbHBAwtVYrEBCQBVcLtUXgAjzwAjKwtVBrAC8gtlCLBQIABWb7tDWwtk8LAxJAEPfgtk3LAwawARpLtzErAAkgAE+gt0m7AQqQAC5QBIB7tLNhADhwACZwuEeLBRDAAzwAAVuQt47Lsg/wDxkABJZ7uSnbAQeQAHfbuZ57shGAAH1bAqUbtLVRAnzrAqvLurE7u7SLsg+AA7ULtB0wBQmgABuQuz0bAS7QtxEAvDxbAoN7AMa7s/ywARlwBcvLs1sQuQKxBdGrs7ebAb97vTgbur7LvTf/K7zEC742K7gJMAXke7MZgLv/YATpO7PTi7XW+74zG7p3S781KwCqi78wGwF8y78AHMACPMAEXMAGfMAInMAKvMAM3MAO/MAQHMESPMEUXMEWfMEYnMEavMEc3MEe/MFWq7ymBMLkagKVS3Yk3KQ8gAMQgAUpPK6NywL3+8I0XMM2fMM6YQAs8ABMgJw4TJoQkAA4YAL78MNoagI4wMIubMQpCroyvL1MTJ5FgAACUbxRnKIJcAAdcMUFmgE73LhcrJzTK7kmHMbJuQ8moAQsjLdmfJugiwMG0Ma36Q+nywIIYMVyTJrIm8Ud4MN5DJLOu8M9/MdATMaEHJlGYAI8/8ACEFACeHzIMAmOTnwA7AvJkVwQsGvJkAkBIPAPI/APTuADmpyUnOwDI/DJ/4ACowyTHZAApjwCPuAEmbvKImkAKOADppwAtCyS/FACBwACKMC487vLH2kEOgwBG+DHxDyR/mAASvDIy+yR/qAAONABpBvNfzjN1XzN2NyG2lwE3NzNVugPAoADERDO4iyERlDO55zOGjkb62zO7syR8QzN82yR68wD9nzPFFnP/JyR+8DO/4yRAS3PA22R+4AA+nzQFrkFCr3PDC2QDr3QET2RW+ACSrC/FR2RF53RGy2RHa3RHy2Q94DRIj3S6HgPU+DRKE3SK33SLV2LWHAAD/8A0zEtijNd0zeNjjlt0zsNiTkNxT8tikBA00I91JG4AUaN1LSo1A9w1Ezth04N1VHNhlNd1Un9AE+N1ZC4AQ9wAFTN1UHo1WCNwmItgWSNt+h81siW1mxt1UoA1m9thRsQ12E91/9X13KN12PNA3vN1ye4AX5914DtfCWwuIRd2K932H+t2P/H2Int2FzH2EAg2QBYAjwwBUts2euH2S6w2Zxt2Dzw2aHd2aMN2qUNeE9w2qntfKtN2q39ehHA2rENeLMN27XNdRGAAwgwt7mt27zt27/dc7vd28O9dbNt3MfNc8kt3MvdcbMtAM793AYX3dNN3QS329KN3R1XBDz/sN3cbXDeDd7hTXDjPczlbW4dgAMCgN7pnWzrrQDu/d7HFt/zTd+oZt/4XW76vd/I1t/+nWoAHuCntt4GUMQEnt9wjOAJXmcGzuANPmQPHuEOvuAUTmdMIMP7sNb0ccoXvhoZbgBGwOHiccogAAIe/uGowQRwPOKmMgIg8AKnjMoqvhgsLuIkDh4mHgVRMOM1zhhIjOOlMgJR0ARQ4OM/rhgmwAIZ4OKKggKnfORInuSIseRNnuPfMeNaTuNUbhhIfOVD7slT3uVejgMZsJCkkuJkfhhWjuaKwuVrXhhtjuVx/jpzXucPded4Tk56vufI5AX/oMgQoMx+3kUAwAU1/8DIhF7oPyQBHOAAINACnkUbjO5KHvAPMlABDoAESWJhlf5HrZoEFVABMuABwPbplSQkK9ACIFABEqACl47qrWQCCdACC5ACUiDrs84DJvBnGKjrgAQBLKAFXQDsrRTEEFDsxk5JyO7my95FzU7nz4420T7tfxTEvS7t1o41wm4CXaDt21403f7t4e5F3Q7u5V4z557u0M4CJoDu7L6U6x7vGcQPEDDE8E7v92Lv+K7v7MMPVtDv/q4+AC/wA++k6/vuB289/LC+TJDvC19ODg/xEV8qXbC+1lzx03PxPJDxGi88/pABHU/xH58nIT/yJb9oBoDyKR9EK+/xLf86zf/M8jHvOjMPzjUv886M8znfOcasBDzf85tjBAoA9CQv9ClC9M989Eh/IkT/AO3c9Irz9FEv9YizDwoA9Uxv9RGC9VrP9VcvAF8P9nqzBWJf9WR/N2YP9Wlf9gJwAD7d9lhj9nAv93hzD28f93ZPNHhf93ufNveAAH7/92eDBYKv94T/MYY/+IlvNIuP+I2PL48f+ViDBS7Q2JQfMkBw+ZGd+dCyAZzv+USzAVOA+aKfL6Rv+qd/L0qt+qsfLa3f+a9PKko9BbI/+3oC+ghw+7h/J/6w3kN8D1vf+8fx+w+A4gmgvcNP/KzBDxGgBCMQyk6AAhAA4cyvJ/YOAq+My1uEff2kwg8GEMozTgI4APnery+l7ASw7ASuf/7y6svaTwJOkAC97v6K0gWnmwAo8AAmvPz2DxD/BA4kWNDgQYQJFS5c2GVDhw4ljPBjWNHiRYwZNW7k2NHjR5AhRY4kWdLkSZQpVa5k2dLlS5gxZc6kWdPmTZw5de7k2dPnT6A2KQYlijEgACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQQAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACzAAGQAAAGUAYfCwsCenp99aRu+ny9xcW+Ebh7q1XzTsTdGRkbv8O9/f4CwsLGSfCTq6uxGOgzY2NhWVlbpxkE7MAx2YhsSEhS4uLmSkpEtIwnvyEH9/fvHqDSbgSTe3t2ujiokJCQ6OjpoaGgjGwVOQg1hUhTcuTuQjGy0mC366pT40EPlvzwcHByLdCHS0tRiYmRbTBPOwoz29vMwMC9VRxFpWBRvXBkXEQRSTlSGhoSliiRxYhrq3rQyKgzKyszOzswKBgSnjCwbFwTm1qStkywWFhSFch5eXlyKioxNTk2mpqTiujtiThQqKiyehyXi4uTasjS2njEmIgTm5uS2roxeWjxCMgyOdhTGwqzCpkRycnzOqjSusrwSDgR+elwOCgSukiS+pizGytQGAgT44uR0WnTYwMAEBBiCtoQwUkxseCzgmix0XMgGGBSq2nSq7sierqygmjgkwoDo4vjOwPC0xLiGWkh0GoAUYlDCtjBaXoC2onCoogyMoIhapIRWWGgCBghEXEz29NTCyCzo4tAODBgeBiDg1PS0eHTOmix+bHRGWhQ8SkDkzCjm9NTa1MjGmkzivCg8LmBwpOBcOhjm1NSsouiwmkw8YogYKCjUzDzs4uTczBAuOiyobCzC1NT2whRYSCioxCxsWCi0vpzSrky0vtBadhhaeGC6qrgKGDRIJCDGmgzCsBBESmSymKwKCDBcQkBsXkTI2rA8DkCMlrD21ODmvFD2uED2oCyqyOzGuMw0IjCorDjk3uTk5NzAlCzctBAeMEQeFjQ8dDhufnQeOBzAniAUEGCohEhw4qRaYhg6Lix6bAielJxINjDEeCzqvnyihAza2HwoIMCOWhz2yjCq0rTqoqwuLiAEDBAyUhRwwiQ6PAyOanTSvjzMMIBwQijI9NAKMCgKDggiLCjI2vSenMCIQkjwwNDI4tQoYtDoeEiQjnyivrR6fBwUMGjM6iyGdEjKosTa9uRISjCMagh2lpjMeMgCAgwGBgwKCgQGBgQKCgwODgwCAgQODgQAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxI0WA/fRj/6evXr6LHjyBDihxJsqTHfh4QgFDwjwA/jiZjypxJs6ZNh/2GWGiSIEGGB0s4drxJtKjRo0gtqrixwIKFnyqEJp1KtarViRz3bVQB40FUqVfDih07VShHD1DNkl3Ltm1JtWi9qnVLt65dhnDTgr3Lty9dsx3jfoXpt7DhsID/CZ57uLFjo3nl7n1MubJJwIsnW97MmSLmDCwGD+1MunRDoRg9dI260bTr1whRQjhCIEMUAgggqIDNm/c+AlEaROmZoAGHIr2Tm9b3wamR50YsGPmgvLr169iza9/Ovbv37+DDi/8fT768+fPo3+7jx749P30h17tnDz89+iE3KuivAADAAgSjYfWPAgDo159/IARon3j9LNEDDAUe+J+CEfVDQQU/8dffAgkuaJ4HADwQAwVDlKhCaxVZuEATEPBT4hAqvOQheSgB0MM+gBHmGQULiJgjhTN2V6OIKqhAwUY6UsQjizAeqVmQQoKYwAJUBtBCUEBCxCOEC1QQAAgxoAjldygF0EMFCwDAQQL/APgRPwE8gGaICfAAQZZjYtfPPjF4sNE+CCyQQAUefKRPDGFe9EEAdS6RJ40cLQFAFC2IZJYHFSSgAJ6PcsePBZpyipNZ+hAAAxKidprdPkZoWl+KgOn/A0IGC6SqKmwK9qPCAg0QYKtCOeqUgAW/3lpaP/yooBVGKgDHwgfFWkTisv8MAUIUDyBgbHbIgsBDAAQQoECmDxDw6kT7gLBAAAqIu0AUHNyA47bY6QPBA01EIRAHC0Cwj6EQAJCvcPwWQUGS9Ca3Zwy5QQDBB1GBtOcSCBzhMAInPpnwxhx37PHHIIcs8sgkl2yyedGefOyPHe1JIgXzHqRWkSYW+RKp65UIc8oqj8UyR8xZ0MMDAFiAwLkCkVpEAwk0MXATFnggFD8tWAAACw/wYDTSPfP1sz5H9JAAAAEAYNydBZE66wPSOWWBwUIhkAAHZCMhdhMddl0Yy7s2/6DAEPoMQcDYuw0Uawu94ogRRpcq8AHM+6igQJ3U6U2ZvVEAUKhAQ7xbxFA56tNCFAlOJlTM/+haAemWH2YWq6EOJOup/6YeKwjDfvDBiCjmOBAFAfTa+t5m8YME64YjkEEP/Nh+OwwJDN3DAgREnFjSMYR4xPB+AbYkBAT1EwNoB6sFdKAWKHADEjwFIPX1OYEaAAXc26WPB/gv4cFL3xeEFgtDSMzUTiSQfdyrV/Djx+B6ULn6uUV8ZdvQnfpnuPGxoHw/G81vpjSYpPHjWixAmwMfGIOy8QcJENAHnEg3mn4oj3kZBIs+EJA1aIHugw/owRG4NsK7wG5TsgMB7f9iSJgZcoAH0PIgCI7oph7WBWH2agChktY5SiEphqnjhwJgsIDCdasJANCWE5/IMg+8yyX7sBbhPBiDJSBpHx8YAj/2kUYIsKAJvuKIArGVwsVpZYxr+ZodG1C0TIVQdvcCQAw48oEHPKApZWuaBb4ywwTAgAcKyKT6FNBEQCLmawiIUw4DcDTZha0CWNJJiDjAARagEIP2cposZckCc3nSZyzLIv72p6OLeEA01dolLwHDpyUsAVHIjMEQbsnMZlrKmdCMpjSnSc1qjtF3SWNMNn+0zQxa85MsC9wxY0DAbv4ocogaJje/SZUMGrACTMucv8wJGMmxoCccCIANscn/zqT8LF1OY1cA8uUr5+UIeMu7gQIEBYB9arOfkJkLR7KHN/hEkQWF0qa96rTI+DXAAjh6KEQjGj5ZTWmZAgEeAlkGJwQm7QMsYAGARDrSsnxKUwTRRxG4GNIcecCRMSBIS/OIsJq203iUCt8HQHOzHFmQfgO5qRFoatSasIyCSVsC+Vg2vh6cax+TI1ZRq2pVfnwAAWjFmIpYFD6tAnAvE13eV8OqMbLOpB8fqIAsedACffAoCuCrIPlk1tXaFfAGw6qrXWOiKwIotF3QaikIwqc8HjTvIEtIgFeECqpN8WyxEiNmWF+1wQAYNnwqEFgTc7KiO30WtLBSi73otsjU/2GKA0cgFXyyEtaQxhIAWIItUX42hABEIWoUWMINuNjUIUAAAT1tZBNaoIIhIECveRTucH+W1wawoAI9ICR1gFaEKPSgoxcBwQMyVwF8TXKs2r1MBj0wrgIpICi2m+G63nc+qwEACS0oX3y3S0TNXOSKJYWJYgfM4AY7+MEQjrCEJ0zhClv4whjOsIY3zOEOe/jDIA6xiEdM4hKb+MQoTrGKV8ziFrv4xTCOsYxnTOMa2/jGOM6xjnfM4x77+MdADrKQh0zkIhv5yEhOspKXzOQmO/nJUI6ylKdM5Spb+cpYzrKWt8zlLnv5y2AOs5jHTOYym/nMaE6zmtfM5ja7+f/NcI6znOdM5zrb+c54zrOe98znPvv5z4AOtKAHTehCG/rQiE60ohfN6EY7+tGQjrSkJ03pSlv60pjOtKY3zelOe/rToA61qEdN6lKb+tSoTrWqV83qVrv61bCOtaxnTeta2/rWuM61rj9C1SXrYwshCMEWXttjfTjgBykgwQYkQGwdh0ECJsCAtFOAgws8OR9EiEAEUpCCCJAgB0/uwgZQkOwkdJsB4V4BBlJg7m0LIGlKNoINPFCAAyTB3EnAgQSa/BMpXOEAByDBAXBAhTAsmScwyMAJDGAAHQThBVzYnJKPgIUABCECBkh4BhIABnZBYAmXPbI+JPAEE5RACxz/aHIENDABJbhAAAdggAymUIJ/WIHJJDDBAVBA7gF0YANVaLIGaFADGRxAA0SggQDQ3WQBbEEfLhiAC5qtYxoAoR8XMAEDthDljmxhBSbYd5TD0AURDGAC+vBBPphMA4E4QAQwTwITBEAEpivZ5yYYgAZSgIEDmEAIQlhyClYwgxG4YAQ4IAERJHABaze5ZSHYwAaAMOUuTEAIDqA6jrsgEGMPgAEXAIIEXDCBJvsACA7oQARMsIIODOAfGlBy7GUggMSjgARC2MAKmExuDZjABCRYvQtqoHkbC8ABF5AADjYQgoFQ/u6p+0c+ZjCAGXQhBCPYwJIlb1EJDIAEOPhB/xb8zuQZ+KAGUe+71mfggCYXQABCsLcJXHCBEDig9EuOwACeoAESrCAE/iB2/2B3RhYBByAAEiABP1B9LsAAA/B65ecDPuAAAfd7P+BkOPAP/uACGtB3DOACIVB8MuYAA4ADIkAEB5ACDOAANYCBfTcA5OZ0T7YBKXAAG4BsP/B8QocCWaAEIZADJtB+TfZ6JEACI9AP0FYAPIRkDsAAP3ABNXCDjudkEiAEOVADNKABUwdl8CcBDsB6nPdkVSgAO8AAJrAD0edkBSAEIjAC1SeCNnYBHbACLoADK9CCXGgCNLACQiCAThYCJqB7JjADS4hkPjADGuCEDKCDTVYDQP+wgEKgb1DGAD7AgQPwBTTgA1A2AkCAAzW4iE+WAwMgAUaHARpABXBIYzhQbT+AAU6QiU/2hQIgAiSAAUyAh042AyaQfaYohE/GADigBCSQAgKwdlAWiQywes33ZPogAwMQfwcgAy3zZPWWggxgjFQIBCbAbQOAhlB2AQ7AdwdAA6moihKQAyiAATjAiE3GACLABCigATIAZUDgBQJAAweAASsQho+3AyYwAQyAAtEIZWFAghsQbUoIZVCXggY4j04Wez5AA3yXAgWgiQ/pAl2gbn0XZRowAlvwA+RGBFz3ZANAA194ew7pZNrohLVoAlFWAwNgb+s2Ay+Zj3xHAn7/2GRAUIvc1gHL6GRIGHw16AJQ1g9AwADcVoM5uWTTZ5MpoAE/yWTPJgR7x21FWQMrgAM5cAAGSJROVokDkAOlGHtO9mxaJwEMkASmGJVJ1g9YOQAOQIHSRpZM5gNuKAAhUG8R8GT9sAM/J3omgANJAJRbUABB2AWGqZDOOAE+4I8r8G6PdwE/8AMh0AW1l5JM5g8CMADS6H2Q2WT6IAImUABdYJn/4AAj8HghgAMdcAEF+QRQZplS1w+IGXZl6QAasAIHs28V+XhAsAED4Hif2WT5QANSBx9VyJfQxgB4OJxL5pZmKHZVSIBK5gNKoAGEKGV9aQJM8HxTyGRuuQJw/zlllchyhdiWCriOA9F2j+cPBaABImA7bKlkoTkAArBb+oCIjwcFTPCEBJGB4GmZ8liONVafK+AP2hkCTHCGlTcBHSllPuAPXyiSUSYBBbCA4wllVZgC6XgAI2BwUbZt3GYCVCAQuNhkJDCYAzEDuzcA82lkKXAQKPAPJECdBQGBSCYEK8AEN/qYM8AABzABL5pkK3CDHSiQAFqj7QcF//B0R+aWHJgFBbB7/zCXAxCYu4eZnSdkOFAAA3EBvjcD7IkQNCADEhACQDCStvNjQkCOCqF3AHcAA3CB/yACO5CmPjaaCNoQAgCcAtFt/0B+O1YAMeoR3EYCVRZ+dFplLiuJFAS6awmDfC1okVD2eibwAwwgADMgA77YZDRQABvgev+Ao1PmAFo6RgEBACH5BAUDAP8ALBgDWQAXA8MBh5ycnFpaXGlYFTAwMCIiJFRGEd66PLKytF9QFKenp5d+JMLCxHFfGYCAf5B7JKCHJWpqbCgoKdTU1IiIieDg4PLy8crKzKKipOjUeKiNKWRkZEY7DFRUVCggB8bGxM7OzIt0Ik5OTK6urOS+PDg4OEpCDNra3LygL3Bwb4NvHObm5LCULERERD4+PHh4eR8ZBOrq7JGRkXpnHF5eXOvHP5aWlIp6QDAnC66OLDktDNOxNx4eHBQQBJqCJEpKTD8yDHZiHOLWnBISFM7CjKqSK8qmNBoaHP39/H5pHAoGBBYWFLa2tPLKRLq6vLWaLE4+DHZmNL6+vNK6TBoSBNK2RComCA4KBAYCBOigsDA6IPzw7Mzi6MSgmGxAGLa8vDAePIhcgBoGIPK+zGBAGIJCbMroMPC8EFh2GIiciKB6DKTEeNTO8MT02FRuZHhucDgMQGRqlDYeFLagUEZQSFRGKJqqnEJKMEJKZMa4xDY6SCZi0FJSGNTixDgsKKaCSHRYaBJiONTKQDh0OLKYIBw4HJy6wDAsRI5kGKTssNLWwPLazMowgFJiWCTCgAwMGAQEGN7IzNji4Oh4SBwoDLKYuLKg6BIwaLKglDA6DKCgwFhgGGp8bLzK8H5uCPLarIh8ZKCIDAgYIHZ8ZG7CJFRCbEQiKPL0xHB8RLze3IhudGJcUIh2nG6k4AoOCHRqWFh8WOi8KPCuQLJ4dNS8EIRAGK6YDKS6nGxmCBQcFBoWNCYgwKagdK7AMMp4yCo6PEJcTPja7MC4pAIGCFxiPJygMDxKSHRkcGZieI58gFqkhFQ8SMy47Hx+GF5CkNzi+EQ6IMDMfIyMeG7ipDgsaFJcdEQyPLq40Ly8tAowHNi4uKZqLHSUmHBc1Ky6zPCcMNr0iICMiNz02KTOtGZCVHAagICUXIiUsOrU7OjCVAQMEHZUGBIwRMR4LIhUGAgIMKTE7C5STIC0hGx6GK6oIPLw4BIQYCZiiAYGBA4ODA4OBAoKDAYGDAICBAoKBAICDAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIEOKHEmypMmE/O4pITCARIR7/E7KnEmzps2bOHPq3Mmzp8+fQE3i4xAjQZMFMSLwixm0qdOnUKNKnUq1qtWrQCPEkPBBQoUoJJYyxUq2rNmzaNOqXcuWqhAfAVigkNBkgNi2ePPq3cu3r9+/PVMubeGhrtixgBMrXsy4sePHUA/zY2HB8F3ImDNr3sy582bJlC0v9Uy6tOnTqFPzBF3Z7mXVsGPLnk2bNmvRiGvr3s27t++1t12P/s34XgQOLgAkcGFkOPHn0KNbDX5Y+l9+BGKYUFHhiGjr4MOL/8dJ/fX4tvyMoJigoYGK7+fjy5+v8TDhA8Jz0z97D5+QeyzQld9+BBZooEH86KOEEBx8EAULSigB04FmSdaCgNVRqOGG4t3jQww1HFCBCiIAEENYHF5lIYbmpejii7Xpo0EUH1jwQY0OhqAfjE2tiBuPQAaJGj87sGDkkUYqsaOQgdnHonNMRinllFT6OCCVWGap5YZWZrjll2CGaV2XLYpp5plopuajUmWm6eabcDbmZBNsQhnnnXjm2ZZxAQQwAQwSNDBDAAToaeihiF6FDwombHdEBRSYYMEMSyZqKZySiXUPAS34wEEILBBwD4KSCTEACyFw4MMAQkhGAAeD9v8ZAAezvmTpPQNooOuuGsyg1KXA4pmpWCTEEIUHyC6QwAxKDpSpEBqIsACyhU1AAkwpcRCFBBZ068EHFFCAgj6XpjTqufdMGOy6bw6bEgkTANCACy7U0BUK+Ag0rBIa1BADvROIIEECwkUAAQoIHwyACipoMCq7EEfcrrv6GGEEPvpUjAIFB0Twj7sq7SBExviQkEAFEGCbYMb69LdxxxLHLLOYUEpGEGFgfeyuQfc0UEEDo9r8jxI1wOACuTMnrfSXh6XbnwYmwOxu0/foE8HCDmf6MQsLfMBCpUuHLTaHh+HDggYQTPCtBvhMvRSfELhwwQcX5HcXPu4BYMTYfPf/XaDb/GjVlQoUTLAD4AnOsIQEFMDQhA8qXxbBAQ2D7fflmPsmRKc++MBCc2KZjfYEBwDAQuTDGseBBnIfwFzqMwiYOUr6YJyxQBmrO/vumvHTQgI3ftAE26/xA60HIpCgc/HV4WrvuJkasfDRvBfEzwAhHqD9Af8cIEIDO1Qv/mPYzYAwChpca+fQNZgAvXlC8+PDByJ4DNq3LIzvbAQTJHDB/8qhQAUAoAT9GZAxwzoIPiZAgQa0zUvxI8ECcrY8vFEAAOE7oEosxkEWNIECGrDcAUeoFpAhJnAiMIEG9KGpsQhmNCmBAAXqt7zrHcAEM3iYATN1Dw6YIAoDIKEQ//cCMhbMgAUDiMAAfFADjoVlKRHQAAtYGLgZhCCJESABBDwgLiq+bQYmWEIQR5gpJcQABhNA2hDXCJxh4e0DHvDeErpyAA6okVEAIMBSWDDHJmhvAdxqgB4PQ4CFTSBfZLQQIHXExkamBWQtaIByEpAAAKBgAGq8RwgsqSQiaaAoCRBBAibAgU4ScgI1QNEBM4UPF6jgAocToSNn2SOQ4cMIBIgAAZRARWfpQ2SHUdAOciky1MXkHjvYgQ53KJkIiIACEGgTLadJzcxI5h4BkABYpFnNbnozMYcxQhMdyM1vmvOc6BELYT7gAy+h853wLOFSWkkBWLoznvjMZ1XE4v9MFUZOnwANaGRSEoAf2k2gCG1STJZUnYWuj4xGiEHhWlXOhFqUJGLZQQgONoMB6JAfSgjAvOhF0gZwAJFrNA4JQPfQi7oUo/cgQQ3+IYGaLmEGiLweALhyIwt45QgJKJQjZfnSon4kcACgQAI04CeucCBoJQtB54yEgg/8TAhGzapWGaKPjYnAozHJWwaHRZkFfG2raE3rQHZwMhTokDAeOGuG7oECFdSgWWrNa1YH0AQJtDOjF+hiDaF4ABDqTq+ITehkPLCAJ85zAio45GD5MYMZsimxmEUoP0IgvPzowwUwiAFWayiEMx6NqJlNLTU329lguqACNcAqayzwtZb/Xm4pCmLJAEyJWtVmdrEUnKd7JkBRTb1WbxUdGxRRkIBjLcCtyfWtb/kqgfwNRHpddFUCDHvPzOlDWxQAVwUmoDLpmjchBEgADEI4EBLQKASoqyx+hMa7TbGABAPw2QS6e97z4k0FehPIPWSYgDotRQg1+JkXbeu3w8xgvPztr2+XQoIbxsBzKPCACsl1GK75lb7644cGIMxgCf82WwfgigUkhQK84tZnBATx+ERM4t6aWK1vG0DaJuACH3yMINmKwekizDsa79fGN8bx22rHwoOkBB/lFaKRS5zkKldzyki2spZJOOUte/mbSxnxBL5M5itT9mdZLrOa+8YPfETo/7U1MMKC0rzmOsssPS6g5AeOMLDvFdDOFn1b1X6cEU2hK7oxQ2qkwhUpFQQV0JrF1QxcMIEYnJTKC8lxeyYwARSQoJcN3hQJRk1qlywT0vkMXBO5cwR7Ypp2HBCYBRaw4ihooFWozvWZsLMeFFxggCy1CD/ca4IJ3JcFDaipj3XNbDA9mYUBuGCwLTIDR//KeACoAAro3OxuG2g40UbuqxOiARjAUmf6iAHKuO3tdhco3NOuSAu65mklasCPqnS3vqMEb0QnRB8z2FYURNDXKDx13whnUr/HjZDr2WsBB9jWEuyY8IrzaOEXIVIDXNeCLKLAj2d1k3FCwFQS5CuBBf/RBwlgFYKXuEsJLAiABjjQAt762+Li40e/LYLVAZvgAnUqLRpHi6ahAK+mUXBBLLWmrx2gYFtcSYAdJUOuGnigpsKDwIJvjvPZLQXeF+EASKe34Mo+Gk099CkAKL2Awtn8MkpoAAUs0OmHP1VTIWiCBdaOggbEgHhM77qUxRLuP1eknUb49WnFogGlCvVM6W3gDvQhhBBsS+wgm98HAjAyI7jA8WIZgAg8oIGL1U4JD0S54BMJk8rCMl1pztcCK/BVmGwq28RFk/woYBiBCGG4KC0VZNOoL+qK/R+tlMBpD+ssIq/e6/doga5+bQEXzPxwFZlM2xOAggBAAAAm8ED/O9EOgQGitIcSgJm+CLldDRBECAxsALl2MDcOGMEHJefwzp6/wwWGCwaPEi5LADkWYXQ1dYBSh1JmMnvk5SzzFlfNx0+LEwIEMWDmdz1NYALKwTiSMgGDpHr8l3MAcj4oAAEHQyjsNnJoowEtd2piYkZG81EDMEfLtn4URiPWpS8PlgCtwgIeUAESAAAz4ANPF1pvx24hmGiE1nwbwXBYAlKmFTmTIwEUGIHDRms5+DEBUAE8CCAWwIXX8g/fdSN3J2NJeIaxYTzqBjSHwVdeA2ShRyM1+A/3MGIX0INtB10CIT0wQE6Bh4aAqCafdQQ1sGCUQUGDNTkmwAEE0VWh/9U2M0gBlMIUeANbo2WGgZiJnnFmCdBJdChD52aDSyE9KrBta5VUbvUP9AdCwzF7oiWKSKiJsqgXJNAt4/cP2JWKk9VVSjVWAeIBLRAT9HRXTBEBS+BPsDiLytgZv6cCdZRFDLRNS6EEM8ABuGZDKnAiERAC20VcxHKM1nIqRTNfILiM5ugYqoZ0GtYEZeg7cZRvHPBBHkAjElADnsUBx2gBNGICIgA5+3eOANkYm9IvldQALbBgWiFITJEgJJAcF1ADGvCBYqFyDXABleQCYFWOAbmRgJEgSrADFxM0mhIhHzVPFsNLU3NLIJl6mMiRLvmSMBmTMjmTNFmTNnmTOP+Zkzq5kzzZkz75k0AZlEI5lERZlEZ5lEiZlEq5lEzZlE75lFAZlVI5lVRZlVZ5lViZlVq5lVzZlV75lWAZlmI5lmRZlmZ5lmiZlmq5lmzZlm75lnAZl3I5l3RZl3Z5l3iZl3q5l3zZl375l4AZmII5mIRZmIZ5mIiZmIq5mIzZmI75mJAZmZI5mZRZmZZ5mZiZmZq5mZzZmZ75maAZmqI5mqRZmqZ5mqiZmqq5mqzZmq75mrAZm7I5m7RZm7Z5m7iZm7q5m7zZm775m8AZnMI5nMRZnMZ5nMiZnMq5nMzZnM75nNAZndI5ndRZndZ5ndiZndq5ndzZnd75neAZnuL/OZ7kWZ7meZ7omZ7quZ7s2Z7u+Z7wGZ/yOZ/0WZ/2eZ/4mZ/6uZ/82Z/++Z8AGqACOqAEeh4p8AAF6lIp8A8dkKAWdaAO+qAP0KARKlAHSqEVmqHKmQ8ailAg8AAv0KH6BAI9EKIiGk884AAleqIoyqIu+qIwmiUK0AMxek48oAA1aqMKoAAmmqPVdKM86qPdBKQ9KqS0NAU7aqRKuqRM2qRO+qRQWk0v8AAKwANRykZVeqVCNKUOoKVD9ABd6qUjxKViSkJgWqYjBKZWiqb68wAgsKZsKj4IGqd0Wqd2eqfS1aALiqeZ0wEPkAJwyqd90wEZAKiCejl6eqiKuqiM/9qo1HQDGYAEjio2kIoEHDqpSlOpl4qpM6OpnJo0kCoDm/qpMZMBokqqM2OqqJqqMrCqMqOqrioxsBqrEQMEtAoxOfAPtnqr65IDGcAAvNqrv2oFwQosvsoAxFqsl/Krytqszvqs0BoewBqthkIEAtAPsRgbBvAPBrCt1FoSApAE2aoa3Vqu3vqtILEBOBCu42oaOvAPIzAC5nqu6OoRG5AB4RofMqAD8Tqv9ZquK5Cv8sGv/vqv9roC9FEARdCv5WqwBysABLKt9OqwG7EBAeuCFAsmFisAGJuxW2KxCNCxHpslIDuyZ1KyJismT7ACCJCyYoKyLvsl9xqzYQIFNP8bJkNABTZwszwLkBIQBBjQs1iyRDYgBRhAAUJLJQWAsEmrtBlQAEnQtFJSAERQAFI7tStQACJ7tRxitVwrJUz7tWIreEs7tkGytF5rti+ytCWgtjDCtm67tiuwAXHrImVbt2SDAERAt3i7IQiQAXzbtwfCD38ruH6bAYabuHVGuIo7uC3buAYCsZA7uV/2A5RLHwLwADnQrpebGrnauedxD5m7uaA7HqKruaV7HsD6uakrrf/Auq0rHaMyp7ELHtNau7b7ADfAubirGbrbu9LBALrLu8D7GEAwvMX7HEDQAzeQvM77vFgpAzQKvcSBodRbG1YgvdZ7vbShvdy7G0j/oADb+72xkQLiS761Yb5Fir6wkQIOsL7smxruC7/xaxo8ML/1W77vm7+qwQMgEKb8ixr+G8D9CwL/QL8EzBkGjMAJvBkgwMANnBkGHMGl4QATTMGeYcFTgMGd8QIgYKgcrBn38AMZgAMhzBkKMLEn7Bg38AAGMAIrjBmEqwM0EK8x/BhWkALxasM3vBj88AIIsAI7DMM9nBhJkAMpkAEPkAE0zARFfB08UAAPsAIMcAM5gARO8MR+cQ83AARE0AMFwANLwQO5Or5a/EhWsAEKsAIycANbe8Yl9AIMsAIZgABiDMd9kQQ/AAJO4AAbELV4TERAnAEB+wLEG8g8cQ9I/7wCCrABVnDIiJwT/BDFD4ADDNABbxzJZHEPHSADTvAABZAPkKzJNsEP+aDGK5ACOZDJpGwV9/ACAqDECGDIrcwW/bABIEAEILAB/VDLwAHESywAmOzLaqHIKUAECvAEokzMaGHKUszGVcDKzAwVnDzHoHzH01wW/HDLa6zK0pzNTXEFg1zHtAzOm7zHKwACP4Ct5owVk4wAOFDIV9DOWMHFWPwAjjzK9HxUUdwDbOzG++zKHQAEKwDG2BzQUsEPaewAqZwD4orQU/HKAkAEDzDL+gzRG6HHfOzH7IzRkcEDCLDEl/zNHn0TXCwDBR3GF13SwpYPBbDGSADQLO0UnP8cy9e80jNNEdzsBN6c0z0Cy4QsAOXs0z9xxCmwAhxN1EDxziI9zErtE/bMyCr91IFByU4Q0yRN1SNRzRQ91VqtEwqNyw390F+dE+Icy/j6AvNc1jlh1Eidz2xNHpRMBCMd1zhx0il90HY9E2HdAzy9ynttE2edAXWs14F9Ev2AzuoMyIctEz/8t2m91o09EmOhyCBwAo38yJN9EpMsxU4gAx0g2ZtNEpxM0Dc92iVhyqiMBICN2qQtxxRt0a4dEkxxyyCQyuuM05O9FMCc1lk92xKhyEjw1ssM3CDR2VPcxr9t3A/B1afN3B4R1gydAjIN3R3xwwJAx7Jt3R2h0Sf/4MeMzd310dsMMNTinRHGzMhwfd6FRskrAAROzd4Ywcko/dzyPRELecrTXd33jd//MNgVbd79PRGJfdR+rNkDThFM/avxneDBfQMorQChrNu77dL+jNUOThHVrN2GneENsc0bwNCL7eES8djBLOAkzhAa/dYInuIfDtJB/QLLneFR3QMl0OIunmn9DM0z7uDVrMQFMAUUPtm3PN0OPeRxrdAv0AFLHtKRneMKwQ9TENI48ABTvMs4DuUGwQMCcAI08OUGoLk9PuAb4OVD3ANmrOUFgQAjUMPxSgMn0LxqrhAFYABu3uYrIOdzjhAt3OZfrgMyIMZIvtf3cK92rgMpGRDae64QSXADCMAABYDii249/HAFTohPAQEAIfkEBQQAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAP8ALM4AZADyAHQBh8bGxpqanN7e3FxcXLyfL3FfGdCuNLq6uj09PPr6+nJydGxsbLKytNy7POLi5O3RX9DQ0GtaFcWnM4hzICMjJHh4eY+Pj0Y6DFRUVOO9PKysrOrq7ERERObm5BISFGNUFNS0OJh+JIBsHJ6enGZmZfrqlLSYLPDw75B6IiMcBGJiZE1CDzwwDOfDPhwcHPXQROrKUM7CjICAgTY2NNq1Ou7HQCwkCXpoHCwsLKeMKpKObIaGhBcRBK6OKqCIJlxMFK6ULJaWlFRIEU5OTBsXBAoGBDIpC6KipWJOFBYWFIqKjKampG5qdEpKTDIyNJ6CItra3D43DFNCDHZiHPridOLWpMaiMNbW1DYuDH52VLaujCYiBA4OCvbKRF5aPMKmRDIyJLCUJHZ6bP765FZMKEIyDBIOBA4KBA4OFMrK1LqaJAYKDAYCBHZ8GNjAwKqaTJKOeJBwdOK8KB44HERKZG5YUKqsOHLCJHZeyDpiiOro1FheUOLU1MSaTL6ULMLKLDwuaObAUN7U9CIsKM4wgK7I7HYcgN6aLMTSzLyqMMi6zM7SwLii6MR4LKyYwAIGCM7sLK7StFx2GNLCRJBwSN7WfOrutEZaFMrezKpsLNjQ3AoIMHxsCChi0GJ6RI6WsBQQYAoOCDp0OOSqPOj65KSEDNrk9MrYrLC+uHaWmB4GIHLipPa0LKrGLHKk4KqESJRwKPTU4HhEKF5EQJB4DBQwaKaqxOh4SLZ4dOy8fPDA0NjUyGpEcJBkHLrEuDQiNFZcGHxcYIS2hJBKSLSYnEg6IPbEMKKaLNK8KHxudDwuPAoYNCTCgPbKFDBSFMKwEEgkFLiicERcTM54yMjArGJiGDxKQKqiDB4wRMrY8CggwLa+nK7adMSaDNy2EMry0C5STLaqyDAuREhKMBJiULa+0K7uyBgoKAQEGB4WNAowKOqirFqkhPro5NDA8EguPMiirKS+tAYYFMK2MDwOQPaeLNr25LCgLFp6dAICBAICDAoKBAoKDAYGBAYGDAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnMgw379+/ARapMixo8ePIEOKHBmxn5MBFZTsUICA30aSMGPKnEmzZsF+HAB06OCgwwYIFZK8tEm0qNGjRPsNAGBhAIImMgRsqNAPqdWrWLM+zMfFhcV8+fqRSAAAh9azaNNmBZsPBxQBHNTKnUuXJFsEDq4gqMu3r9+GYD0oOTEiyd/DiP/m4zdAAIS4iSNLTttvCAQoKqpO3szZaD8Mlxdk7Ey6NEx+oK9kNs26NcfFoCEM6DfUte3bCFEDgDKA7T+wuIMHxwkhgQYOOGYgWM5FuHPX/CqcOCEAwOW3j59rL90PgYV/SiyI/7cQRInZ7ejTq1/Pvr379/Djy59Pv779+/jz69celh8/LlzwQ1ttBvX3n4DADQQWRgAiSOB+73ExgBJHaKDBCBU4QVtC/eCwgAVLaLCEDBxoJlB3CgShAQNHKIDDhhDGl48TBzgGAABXdADAbAjl44IG1N0IxQZXKKBZdwdsAMWNUh2AQYzzcTEEci5QMIMMPznRowsykEiBC1dS18SJCMiwAAIuuMBBACcAQAGU8LElZz4eaNDBAAnROSBYXFhwggVf+TcnDjrhCad7c7L1owOG5ilnPyoYp1miXI2wwQIPHsofWx5QQIETFUBxwHk9zsnFDicosRGlFBzgwJOasv/HFhcVHLBbAv9wkOlvcl40BBRXQMarnPwscMIBLsQqa2Ay2AoBBDJQsOucJjHQgQwm9lrZZb0pG2c/FFQAgQZvHpQoDkucEIBhB+EEgANUJejte/xYMNWDgy6xQRBC+TZQP028W4FL8s7bHqSENVcgWzhosG+//l4UsAAD92rwdrUtJl0QoxW0oBN2WgCxv/wMsdsCexZ8cXBcuSBgP/1wgYCrmJrblp1BuADzzl99BoAACrwM864rs7aYChBguEAFI1yxwRLlFujCEgkIsAMGGAwwgApNfIUAAGQtgLXWAzhBdNHc5QRFBwI5AEAFyfaIAAQb1O2T3RoorIIDG+z/VHfdVpuI9m1h4cDBEANgwAEFghfIBQeJk621Crr+5kITkktu9uCcd+7556CHLvropJdu+ulWne2R6qj71R8XHiQhlEaJKohR7El4wEXKlPLjgge8q9x6XQBbwAAEOr5JaYI4BXDAZRAwUIG0y1MQwAYHILD88H3lQ8ESHQjgQAJX4LC8b6cuucQIGjgNwAyU0ppA1dpvz31d/DxFAQICXEH9+b9JAgU8ICB+4MBPQeCdz26kFwDery4bcQGw/veSOZkLAWTZ028axoABaAAKe1GQxR7IFxfkhYIijBhbMIKBBDAgZYIBAAaSoAEBhDCFrCPhVUzov4gNK0FcQQAH/zAgA+tg4FEDgMAO+uEBBtiQIBbUoVrkxMP/QdFilalRBxKAmZTN4ACj+kcSanTD2kkRLXOqog9/eKIZ7GAESwAjCbjwFQ9YAAJH9BEDQEi7EZ5xLWk8oQ+jCEUcBOBaVVEKALAFFho28IfC+yNSEqXGghESihS4gv/+4YEkyUA5CFDBu2SAgJHlUJJG4aFXVOhHgowRLmI8wHRmeYL5cTFoa0QlWlzQv1UCkVK80ggOLjODfzBGBhVIZgUsIIATHGEBZjulLmtiIBwIAApO8M+e8hfNxeCAAgH6DwWUkIAD0JFSP2qg/aZpFZwc4QgMSMAJGDCCEXQNLBxIAASSNf8jnTAgAEFwYpt0tTwXkPF80mRnSPixNwc4QHwOhQDKwIKXAxiGKwo4wD8E0L8DyMB8wPTREQBQv3UqtChccQIOvkmBleLgnIWTlkb6IcCVUiAJwZOXjxjnwJP69KcKgqJEEgrUohr1qEhNqlKfY8aFACeSfczlUolCLBw4QaZ54kdNVYrTobTMqiv1AFGn2hFikaCWB2gcFD3QrGb+A3sDoKMwLaCT6TjgCCUiK0oXNDO0QjUfHPhJAJLpsKkQjAsKcJsFKrADWTJqrHqFiJysxxQX/tUDA4AYP0igJO1dxHBypZOfGBC3yMpkVuIiQRPKKdXfaJCTB+iAClblVQT/XAGWpj1tWEyWwBmwNpKEpKFsMzWjywgrt3bxngaylw8MHkCqUcQJsDx7EH4oYJ6lRe5IuCIDCMwWsL/FV698pK8dyNUg3fnZbLVrl89AIAjNaW54bSanJIyWegXCAQM2YAGFsXckPzrADGDGj3y+EEYeq6+9GNDNm6BrAwHI7n89kiGlhE8JyqSaAMxUqkb6SQPdHEqHQibhCX9EKXTbyU5qmYANuKlU9t0AA0CqwoZ1QGQmtosLEIc1rJFTNnn9TTADqIQNkMuMHdKXBTxAuxyHhFIYZAAQcVC2jdjxBBAYAoBwt6GblXMG/+Gyk0GSqHw+dyD8GMwRmlOZ+UFB/wNHCJGIxvQP+ZFlBHG2UABuOOayymlugELzAt5WFcBC4ArPOrQmAfBdLixAk8/SpCYPMIQ+P5ktAvIYgsMy3qHZLlGetrSoR03qUpv61KhOtapXzepWu/rVsI61rGdN61rb+ta4zrWud83rXvv618AOtrCHTexiG/vYyE62spfN7GY7+9nQjra0p03talv72tjOtra3ze1ue/vb4A63uMdN7nKb+9zoTre6183udrv73fCOt7znTe962/ve+M63vvfN7377+98AD7jAB07wghv84AhPuMIXzvCGO/zhEI+4xCdO8Ypb/OIYz7jGN87xjnv84yAPuchHTvKSm/zkKP9PucpXzvKWu/zlMI+5zGdO85rb/OY4z7nOd87znvv850APutCHTvSiG/3oSE+60pfOdJE8Ydwt8IERwJ2BFtCgAFSvegi4UAQEZ7vqNWgAEFDwAxucoeuQ7bUSDpIBE4gABQQwgARyMAEhpGAf/WBD2nWthS8MBAQ+uEARuGADIUzBBwQggAlCUIAL3H1Ayh5DCR4wkBhkgXEK4gELfiACIEiAAECYwAeiQISh7R3WVaA8QdJwhApgAAcd+w0RLhCBEJjACgTwwQ2EgIXSQ17YJtBBQppSSiG71gYrKIDtP894IRiBB3tiwz5YUIAcEADXBggBGbyQEC4egQmLWwP/FItQeBFYXwImQMEHLmADFqzgBmEwwQRWQAQupGAFIpD1C1pghX88fSG48g9isDhoEFQXYQMfgAIm0AAtYAAZ8AI1QAAFkAJEEAU34HkEEAIf8Gog8A8TUAA3wBAPQAUGQQY2QAQ8cHaJdAYX4AMN8Hkg0AAmkANAYABWlwMRwAIpYARCcAM+sGo/QAA/sHcvAAMvmAMiUAA/kIASkAESgAIXmAFS2AItkAEZGAI12AAggH4LMXU5ZgSLZwYiAQNV+A8NUHVVCAIxKIVsKIUNwIAtAAJ0l38TgAIo8A8+0AMmMBAS8A99+F9mMAEm4IU00YYZQAM0cIZTSIU10AJv/5iIDUADoOcDKDABNxABPyAFF0CI7EUAU2ATbniGHSgQIUCD12cAb/iGLVADXdCITtgDIXADHyAE7Fd//hVZ+PcPGaB7aHEG/8ADwEgELHADBBCDVSiFNAACcpd4JmACQECJArGBQpBUBCABLfACIOCMELQPRNB+SAB3DOiEYycCIjABIfAEpph41/cPe1hUH5ADDTABWGADNnAYXLEFtIcCQAB6ISACP6CDRJAC7XcBP4ACLfCHf3dSIRACPMAXHMAuCdYPRIAFQmB+zRiLQmB2XdcPLCABEcAFZnABuUeLG6hLUwAEF+AXI/IPpFIgRZACF/ABE7CPueePhScBQ//4D0RgAkgwECkpSSmJAjYAkz+AdaWRD+RXkYgnAUBgAi0QAmYnktN4EUGISi0gf3xoG0hpBD8giBDoAyIQAgYQAXjHBRdIBBrBAgYwPCFIAwQxAaRBZwqRD0YgASAwAT7wgASwez+weHpnETcgAZxIOs34D1f5AzxwevZoBijgA/nAAzlwlTkgAQ3wAibgeJo3dkUwEL6IkKDDAj4QAimwHvsgiEhZACZgA/twAZFpAImXAyZgAAVgBhtyAfK3maHzAQTwAWcwmupRAATgi52HlmdgfhdwATdgAq2YgR9gBDYAd1HgWiB5Bmo1LyxAADSQAznQHmowmvnXkFuAhBb/0Q8rAAIEoIComAE10ANYcAY8EAGUKYS4WTRh147uAQYh4JO7KRDFCQQ2kA9n8JxS+HkmUIw10IhCWJ3KAgRlZx9hwAICYQQEIAKacQbE2JxIkAMHyoZXiZaDA6H7gQUDgQWhx2ZSQIUZcIY1AAL1SG0iQAA/mQ8sQAM1wKES4JvYlg9EEAJoGIc34IvKojArmp8rkw82EAIg0IAT4KGxIgVwmQEocAENaTBOUID9wANC8AErMKWxIncvkAEiIIZFEwQkgCZFE5k+wKRo4wAaoACLE3uagpo/6W0s4HYKmm050KJNt6cfcYfeJgKXCW42EHo/QHdcmm37wI4FoI8grbpto+kDghgBd1ptElCKKKCm23Z9PtCo4DaffPqpE+GW37YCDVADT3Co3FYDEjCn3UYDGXADifptNWACOOptBiAEX9FtNuin4LaOoPqrwBqsNVED4zaK4NYC+RduaymszNqs3LOs4EYEogpuUdAA4kakzpqt2rqt6qab4tYDT0AEZWCf3BaoydptnlgEYChu5/pt19mu3Bqv8nooiDeY2xYBQhhu6Sem4Map3hYQACH5BAUEAP8ALL8DWQBwAsABh0hISGBRFPv69uvNWHBfGXV1d8bGxFpaXGlYFe/ZhNra3EY6DMrKzOzs7LKytB8bDY94I31qHObm5EJCRE5OTG5ubDovDJmAJJCQj2hoaJiYmMurNCIiJF5eXHlkHDY2NICAgOfBPc7OzFRUVODg4cLCxFJFEK6urCghB6GhoaiNKy4uLPLjrry8vPftv6GIJoNtHaqqrGJiZBISFL6ylDIyNDAoC7qaLKampIqKjCoqLE4+DBQQBLa2tIaGhLiiTLOVLCYmJBoaHPTy5xYWFNLS1NbW1D4+POzOZIZyHa6OLNbGhObGVKqSLBoWBK6TLPLGPJ6CJLSaLDo6PFpKFAoGBN27PHpqNC4iDIyIeNq2PHJaFK6UJA4KBAYCBA4OFKagdBIaHMSgmM7W1Fh6WDw+SC5UTIh4bOyeMBIyaIhqSMowgAgYIFZGKKrkMIqWsKaWoJyeMCTCgHaWmK6mLKSmkG6k4JqWsLKg6M7AMKy6MMa4xAQMEFxgPBoWNIqgiOh4SDI+EGJmUHAagDg+KCZk0AgIMJimoFQ8SGZqeFJYdEYwFM6YQCAQFHB6RNTO8GJWdIJ+nMC4pIp6DCZkiLJ4dHBc1FJUGLKYuN7IzDQ+QPja7EJMZI54kNji4LKglKy6zMR4LOzyMMy47HRYaOq6fBIyRKS6nFJcWG7CJPLazMzkMMzUqEYkKGp6bLq40Bw6HPK+zCwgNOy6EFh2GIJCbJy6wNLWwKTssDh0OGhAGM6kEFhiGAIGCBoGIK6eDFRCbOD06IC2hCo+OHaAbBgoKMp4yOzWGIxiGCwuRKaESOjAKIJykLzK8CYiwFJoZF5CkDgwaMzi6AoOCIJAGMb01G7ipDokNOigsBIQYKTOtGZCVIhehBwoDDhUFNr0hBJkOKTEeLze3ICWXEJcWLKoxAQEGDxMTGRqlGx6GEJUNFqkhAoyHKCgxIhUGKTE7NTixNzi+KCEDKZsLNi4uLzOzDgOQAYGBA4OBAICBA4ODAYGDAoKDAoKBAICDAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIENazMfvwYopH3TMyCeypcuXMGPKnEmzps2bOHPmzBekQAoHLVo4APHhns6jSJMqXcq0qdOnUD3ek2HAQIwUOBgoiHHEaNSvYMOKHUu2rFmb9yZkABBECAcAOEhoEHK2rt27ePPq3Ssz3717LPP5HVGEwRG+iBMrXsy48VjB+aaUKALAseXLmDNr3vxQ8FQFPVZwHk26tOnTYQXXiKGgAD/UsGPLnk1bI08MCjQEqc27t+/fsPNxyGEExxSvNvlNyYABRwoQAPQBn069unWIwnMUwdEV570RBv4V/y5CQoSPB9fTq1/v+3aRFMdz8huhoQCAKRMKMCBRQDr7/wAGiJlwGBShQQ2A7TQDB/wAls8MGZDQwgcCVmjhhXZlR0IP0enjoT7I0SSYQJBNYQBlGKao4opM3VOBBAIYkAMIIPiAAQg6sLSTYACIYMBhLAYp5JAv8aOfjy1UZYAIPRyhI06CEYEBCRgQQeSVWGY50gMTAODll5WtpFM++mSggAEAPKnlmmy2ORY/BzBQhAyvuWnnnXgiBacBDGTgX56ABipoSPzIIIIIMuij5qCMNuqoQ/x0cOgB/Cz66KWYPhqpASWMUKmlmYYqqp3fidDACSN0CQAFE1g56quwYv+pDwgCCCBBESIUUYQRLVAAaqzABgsVZPqsAMAIrD4QYkFEAHDAs8+OcMAIH7wmmD4cTHHsBIpC5tA9NcggbgbkktsBB78Kq+66O96jQwEOlFBVCSDQhdAUMRzKAAN8KkAlXQ92kEIL5DnwAWTpGuSXQH8ZtSy7EEdsk2oaMBCDDwWAUJ+9Bz3QQQEVhJyBD/8IAIK1D4DQQwwOSCDCBAhLLPPMjxGhHY768KOPEEQ8PJBfDTZ4DwAnpikQPye91YJh3tLs9NNN5QMAAymg+1fM3xZAQgoP6OitDg4wPSLUZJeNEz8g8PfAER10cIQQCS4EGQcxSCBD3ASt0IPYCZv/7fffHM2gwRDP7S1CCxhM4DNBkH1XRAs1jJ330k72DfjlmE9ERAoCKHACCDLASwIOBysEmZQSgKDoQStQLnnmsMcOEV1CcC5CB4reM8UJJKhuumBTEJzm6wK1zvSOCCdvuezMC4n75gLM9Y9nL8bAAUP6aF1105Mff5Py4Dcvvpv6aGByndOPQIIDOjAENgkZOLhoPsYD+T34yY+vv5ZoN4DBDD/rwD9O0D6FOG5CWPuZ3gwzJvxxb38QDFI+KPC4yAlkBhhogPQOAhkhaKB3K3ng9MD2I6bkL4IoZFE+PCgXAOjgAwVQgAhGsDjIHGFJw2samXSggwMYwAgZ0MEK/8R0lBOm8IgX8ssUUlAEAwBFAX0CIEKuBYIGxCAIJ/wOy0rQgCE40QEHWNxMlIfEMlbIM+9KQQxwgAEKrE5h03uQTzyVwHtQIAUnOEEM9phHGhYxgWYMJHsgww+3cKBncvMLER7wqfnpIwg6CAIHJhmEILwRecQTpCYFacRNejKQnfykKFFIxlGaMoKhPKUqmZfKVbrycq18pSwVg784YsSByxMRIGfJS8QojwgH+IcbM0mQe3BgBBX4xwF0gBzBzKAGAAidDNAlwj9Ws5fYvEvyymQEAZxgBcRkGABSkCtdoapOJJEBwRoggCLQ8JpFzKY894KwoZ3ACA0ITTgj4/+A3LQNNyUAgFFIkoEUYEADRVDAO8M5z4aeUTA6wEAMNCAC9qVrVhLQALr+YbN/VIlEROAAtlKg0F069KQWyl4PMjCCEjgAnAkJggNIcIAnURByXmOJ4ErKUJT6VD3f6QEG1uZS0UxxAlWZQt56MMNlbY6nufypVH2zAjw66YYvTcg9DmAE9hHkASkgQQXQJ5CnjgCeU03rb2YAghJkoFJYNepB7hEhHFxvIFLiD1k5StKzRlWtgD3NVlvgAyEI5gguLeBc63rXsk6pP8zqa08ZR8YH6aAGOkBkQcJ3D5PUwJL5u8eCVoBZzQb2tDPRQQwM4EadjYABLZhAg+baASP/nGA3A2GhayJbUofgjx8A0ECSWqABAKDPgULIwAnk1QMcIWwGHdBAD+TVghQcQIqozW5LJkgeNh60Bw0gwedgutmpTYggOjhBEcLI27M2BH/faYECHKCBe7aAvXH8JQhkmAINlGBr4DxsC7qqAekqoAiQ1a6CP5KPI6SgBBCGcDcbIAL4gEpvCvDVQHrUgimo6UEaMIJ7GYK/IIQVBIfkQAwJmF/lUcAIBhgBz46Ag9R9igMdmMAhiRCEChSmMgsOMkeKNYUiF7kARmCADFaAPrJKqQE5kCLaMsoxEgmuCBpOJNbu0QHPKTYIrInfCfOR15MxjAKHOxjQnpSPB2ig/wEV+KuQ56zlBjtRsf+oAQgoJU4DKAA6ACgArvw4vRloqwMtkAAGvBQEMRpRHz5IHXKMpEEiGLEGJWCA0QTCgbhkoJozyEEDCiBnOptaYZCZWmgIkoEGcG0gkWrBP0SgFbf+aWj5wqcAyiMCELiKgwlk4ad/doAGOICarwOAhHI0EAyaTIT5SC8JBHjqak8EYSrOwK//8YEc8BnWUyhADv5RANkyrgYFwIC6c6BuDBzgT6jmHlgVUFOCUCCfOXrgCLzZtYHMSgAYECFbxYtbaxvctwjjR6WKCe+D7JVhOstZxHOWLn688AMfGGKbSVrvDTegBfl+3b5PYFh/0yrgr/8rkwwpIMaDuzxqU8jKvsBYEo6r6d49CLmaRt5vgfwb5U8qE6078PCXGz1qKsaARwtwHCJ8cNgkKvaxAalskD/J2SBoWpn4RfSje/0r+piB2EFEJlGDYNIFqDQga8CnTf+j0/Dz1tYNgLuv290pRuTyfBVLNwXczYioM/M/7JhmYmXAAD3w1N0Xb0LwBaHGPuDADHqyvpDP4AjVggwFDXAAIVw+Lie7VgZ0VYGeRbzljE+9S+BLgf/St58x9kp8UxBgMoMAV84pgQJSEDnBTKAIu36OD9jtA+Oq3uBAU/jVwse4vyi8QfPzyz2UnyDmU1Z5/JiABpZUguIufPAUqK7/zoUggxMw4HAFYCZkJiACCRx4V/4SQYKPb2p9BHr4MzpAo0vJMB10AAQ5kAMYwy1ecw8fUAEAKIAZgCC1tFlkdA9EcFmZhTdkIknyIxglQVqglXArgHEfYGQoUXL0R2eRgQOF0QKTUWEC1Un6UAEnYgBJYgQlUABd40zEIQIl0AIiYAQ9gDsNOIJAGBLCcQAdcB9TMAK48U2ddA9HIAMjgHlq8V9vhYEUsBYoAQD7hSYXaFJB2IUZ4RdsFgS841fQJjn3kHYaYGlj5nQS0B85hVZeGIcZUTskMAItxhAyoHaPlkG+QyJcKIeAKBGixQFBEG4VVQN3aBCFxAHGwhpj/7VNk1QDGcAvK2h9gXiJEkEEBbVcCtACimdSTChRPaArIFCDjTMCajRgfdIt/IeJrkhigiEEFRADPXAiGgBToKh9QPE4PhgYfnEAtDgZJ6A4P/iKxghsnsFjxqIyOYBsmUQSHHBZAiNM1edMQbACR1ABJwAfWwiHGmF/IFMu5UIBRHSMgeVAHEBSf+eNU/EezrhNaZMDltZik5URbkYCCmAERlAE/+AvMSBX5niO4OMiihZCk5UPNZAkMONAI+A5BRdLHAFcFVAuoVMCAoADVRaQC3aG/mOQfRMZ8pJD4HMABPczf8gRySc0H9ADfod6GulQZEIE0EcSuyMBFeAg9//AMyMiWmQ3eERQABJAQBg4AzNpTG9WJcUIEuCjPj1QcC+pVt9xIzJwADIAAv2EA7X3AfWhhiqWAxkwLcxhBArwVoN3BDTylQdQADjQiZXYikqZPE8meE8JlQdAMLmijyWQA8cBGQ15W4LBARiQK3dZBCcgA/PIhOSEK7vCAN6XlAymPEewL8Mzl2r1IEcoA+QyAlOghpDhMdHhGUHgLOQiAxTQaD+DNBTQAeYyAcrimI+ZcAUwBDighpQZZK/zjAlTjzHBmXMTA/AjP7UZnB2BAx4pGOrTArVXasK5nAkBj1DWSMrJnNLpgIdVAiIgktE5ndpJSGmXAryZndopnZD/8XhxB5HheZ5TxA8RglMniZ7uuVnzgQNT6I3vWZ/Tow8PwIrgaZ/82Z/++Z8AGqACOqAEWqAGeqAImqAKuqAM2qAO+qAQGqESOqEUWqEWeqEYmqEauqEc2qEe+qEgGqIiOqIkWqImeqIomqIquqIs2qIu+qIwGqMyOqM0WqM2eqM4mqM6uqM82qM++qNAGqRCOqREWqRGeqRImqRKuqRM2qRO+qRQGqVSOqVUWqVWeqVYmqVauqVc2qVe+qVgGqZiOqZkWqZmeqZomqZquqZs2qZu+qZwGqdyOqd0Wqd2eqd4mqd6uqd82qd++qeAGqiCOqiEWqiGeqiImqiKuqiM/9qojvqokBqpkjqplFqplnqpmJqpmrqpnNqpnvqpoBqqojqqpFqqpnqqqJqqqrqqrNqqrvqqsBqrsjqrtFqrtnqruJqrurqrvNqrvvqrwBqswjqsxFqsxnqsyJqsyrqszNqszvqs0Bqt0jqt1Fqt1nqt2Jqt2rqt3Nqt3vqt4Bqu4jqu5Fqu5nqu6Jqu6rqu7Nqu7vqu8Bqv8jqv9Fqv9nqv+Jqv+rqv/Nqv/vqvABuwAjuwBFuwBnuwCJuwCruwDNuwDvuwEBuxEjuxFFuxFnuxGJuxGruxHNuxHvuxIBuyIjuyJFuyJnuyKJuyKruyLNuyLvuyMBuzMjuzNFuzNv97szibszq7szzbsz77s0AbtEI7tERbtEZ7tEibtEq7tEzbtE77tFAbtVI7tVRbtVZ7tVj7rPmAD0HKPfs5S21GADw6Iv1gAQGAADvAA76YXWULAVLgo/1gAk8QAiGwAR5Qg9nlBAGgAk3go/lgAUBAt3S7AQjQBV+7ShYAA0BwAQvgtwgguIJ7AeixkX5LAJAbAlDwBCgAWPewuUSaDyawAZe7AQSAAv1wuJq0tQtwAUbqBBEgugNgBVLwAipwAQiABS45SwigAi8QAEXaZghAuzCwAChgAhfABRcQAMqCUn2LpPnQBQ+AAjzgMCgQABfwBBBgAss7s/nQDzYQvNj/m7ZeQLPdawMEoAJAkAQLoLbk2wUWEAFPoAIeYAGGS748sACKK78WcLqf1AVjmg88sAMQAAQvgAA2gDdlBLxlmg9OYAID3LsokLvj8wRn6gUPYLyzq7you7EWHAAvcAPZ+wDjG0ER4KZ+gQLgm73sOz7GG6flSwAv8ATqu8KxgwIe8ASsK6fPawEeoAJKEAHruyhK9TdA8A8WUKcAjL8qoL/1+w94ZjbKi6decL8Q8AQvQAAHnA8dQAKA08R42lkOvLgBsADmQ7OdRQUQoAJPsARDUCtm/AAI8AMDkAAs0MYCIQIUICwbDKcNRgMJgARMQMe1wh/YBSsorAKJSpAC/+ACCTAAgcwC7XQA+xArq/u2iooDAlAyi9zISEDHWZCRoaLGCACptcLIAzAAP2AC/DsqELAA/QCpDcAA/5AFV1DFBXzAMOuVLvQF93C/SWDFCBDBNWvBDpzBwiwoLwCqnWW9i6u9EjwkzQuq+VAFWIAAUYC9JuAEz5xEXbAAVZwEpPq85vsCTTDDezwdcDwQkxvO+MDDSwzENCwkiYu9jZuqSRwBSzy/XswitFu628ypSTzAKoDFVXDOtGECscrAxVzAEWzQsHHEs9pmVHABYry9M7vM1xvC/0yynRsAUQAEKuzQjaG3wTrN36sCIBzE1WEBEPAPLS2s98DDTwAEQP+MDyKtTQOB0FLgAZ4rrDuMz0x803ZhwziM0EIdzvcLA0uMxa8sGwF8vRFgARuNq1PswE1wy6tsGih81QHgBM4Kxg8cAMesGWryzfsbrcv80RdABRZ9GYLh1f+gAqNcrdQcvCBtAjwwwpeRuNv6wmpszpZhAi9QxNy6wz2sAvB81GjxD2IbBQjtrQAs0/qb1XjBEqv7BDAA0eCaxIr7BFg81U0Bx0CgAl2t2MAKwAsdzCFi2h3B0jfAuFWArgp9AVKQvMfM2l/IA77L2NOzrhi9uGwN2jUR0/9AwfB6wsycvU6A2xXBA1QQwx6ABfJq0nYd0k7RuR5AwCRCr91rAef/CwTDG89HgQ/eDN64fK9b+775bAE2jRQPcL69u875OsVKPNA2QNk0UQUsDdKuzNzWeg8NXMW2awMFLSL/4MEqsAU9DbBgjcNijcAtcQ82UMIJ+9tr3dYiYbyErbCdu7tSYN0gMb4EEL8PWwXmi76A3RHdPMBJoNkN2909/ASJvREPsLdyLd8Pm943TNP7+7Us7dKubLGcDQRPML+xXRF7W8Q4XrEALMA3INfn/RASHgGLW88c6wUBTsAP/l488A8UzTAf6xcPMNEOjuEF0bnw/Q9dLrLIndHOjBDezAVGfLLlm8Jpew/9gB7Ri+BzTefT477n+8MmgACs+wIx3MoqLMtm7ZzdGyC6/xACVvACUg2zAAwDgisQIVCzAUC3VpCzWvDoOGsDL9DpXhcQACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFBAABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAP8ALBEBZACvAJ8BhzkvC3BfGGZVFNra3CggBeLi5MCjMffPRKiOKsioNLKysVpMFPHfl86uNNTU1K6OLJucnZF/MHxpHKKipC0nB1JEEHR0dJiAJPrWRImJiqenp2VlZMzMzO3HQGpqbICAgN7e3BUVFExMTCgoJzQpCyAgIEA1DKCHJnp6fDQ0M4+Pjo54Iuzs7OK6PBsVBEZGRIRwHbmbLfPz9LCULNu5PFxcXJZ6JLu7vNS0OcbGxL6eMYJqHObm5OrSdJ6adEc6DEJCRKaKKNOuNT4+PFJSVOK+PL6mTG5ubSAaBOvCP1ZWVC4uLJaWlHZhG0k+Dzo6PBoaHBYTBP39+25aFfHPR8LCxObAPA4NCLKaLVpGFLaWLItyITo6RBMOBObCRFpLDFJKFAoGBP765Ma+pE46DMamLAYCBEgQQH5kdAYaFNDo5Ni2KHRMcIp0DJK+jEhqiAwIMNDY9MqyhCjGgJS2KJyevEQmELzINLru0N7qKNz40PLE0NrIEEAqYJKs5CQaNAYEGGxyYNywXDpaTPTIZB4sKMCEDPSeOGpEGAQMCGqALLKmMGpMQBgSYDAkwNTIOGSsqEpaWIxodMC+0KCEDCQIIHSEgPS0TMzY2MqEOL6usMqWNFg0YKBmLIYegOLOOFYqIGiEdKCaDNyaROyk0MqeEEh8OAYMGLKosNbE8NzEzLLCvAw2KPjyuAwaNDBq0LiuzPja0FY+MLrUvH7mqMKk7GpUKJpMSKB+SM6kvMBsLNzgtLSYQIKeoNZ4yNYwgGpuGAIGCHp2HLieDMi0EIRsSKB0dDxaFOp4SD4mMGxihBIEEFhaSJSqlHpmCBIMGO7EUKCSRLrK7H5MKOLCKBg2aFRiFFJUNLiavEY6MPTAFIZkyGaoKOTm0JaYgL6mINymEHqEZH7iKIyAaMB4dOTW9EY+cNzqeKCgNLa0JIiaKAoOCBIgFMDqUGpoRBhqUOTq9CQ+HPL4eNrIXCQ2RNbErPjq6M6+zFJmTAYGDAoKDAICBAICDAoKBAYGBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHDvf980fQn0SIGDNq3Mixo8eE+oBs+JAhgwURIS5+XMmypcuXA/ctUVCgZgEeAyCkUAmzp8+fQPeVQGGByD+RClgoKAG0qdOnHPdJJSiUQwERULNq3doQyo0CRrmKHet0n78r+kLUKMBhCdm3cFvuu7IBwoQqLBwQ4Rm3r9+IJSYMAFHAwRF9fP8qXlxwX9oSQFRw+BCCseXLAqVKLaGAR43EmEOP1TzVAosMFEWrFktasz4ULFToW007q1l9/izmHnGDh4fawJ36E8HEA1YRRxTIuDEiuHOf/moQ/jcYp07Qz7NHvSLCA8kPR4Bc/5mqvbz58+jTq1/Pvr379/Djy59P37lUf/pm/yOv8PYVgbjxpRl+V6BlUX2s+ZPCERlocAMEKWE3UAoWMKGBAhpkUENKMZkFRQ1MKKAABBY0h+BWQkHAAg8sSDHACJoh5BgKAwzwTxVW8cAEU/uZ9YQGIDiQQw5CfnbiVmqhoMQGeZUQ40FmpSDCE/9AMQIRSX0gkVQzgZDBEyWUsAQQR3JllkRQDOCAk/wZ9GRmL0hRBUWOfcDDBxa1ViZrUBTGpoRU3RfnDXSWcIMDS/gTQgkh5AnonjBJ1eeab8roDxQljCACkEcINJwDChDhwYUQHLHEo5DK1eOkbDLkDxB3cf+w4gf/TeQBCyDkwMENh7JwwxCoprqSZqxWepA/L2igqwM3YAWgBTIsJ4JFKUwggwL6CftUsW0mJNUVyTqwgUT6QOsZaSkUMEAK2kLFbbduossWU/4wOcATpIXQmZHtNvUuoK3toy8IZL5a406ahaABD+P269NFUAT5p4QBV3XvflDQpARpI3BAsMOR3ufPCGqOkBu8nkJhIH4hWCBnZf/o8wELEJSQW8svg/ySWUt8YEEGMsRmgQU7QTmCgyYxeENeRko1QlI3EKUB0zrvrA+TLPyzIgssDGBBao1BoYIDBWjNgwMTvHCgZkuMzQMPBSgwbbBVPyRUDTUoobcSeJv/GG8IKbzAtwgpRLilZlc8QYQSQPxZt22Pt0d35JRXbvnlmGfu8JaaWzYXFCGEAMXoJWQLpX+ijzd55xgJlVRNbzNNcQkWTJBDASBYYCzrw3bMwgQqBK/ClwrVOwDuLMigAmm8u7TPCERCYVZuiPWnaQolfKA8882zJJSuiO1++kQbbC9+9xp9DwLhQ4yAW0T+eGA+yuhn9DxeHDggJBPTMnSf/Ms7X/0gIhQVTOADH1DB0gZQA7DJyCwAFOAAIeKPEvzHaSqQAQfY1R8Izm+CzpOKvljQKVdFUIIgJOC3VMACLZlwfqtLobdW2EJUxQ+GArmBDFt3Ec1whmEzlEi9/07DvR0S0Er/0ccVRpDBDT7QMWmBlmzyE8MdzgU2OYCAChQAAg1uDDQi/IAGJuAAKYAAAhqglRExgiwFcGAwA8hBBlKQp9M9zSZ4vAEU1ti6RTVnBCEIH8WEkoI/jmAES4ARH/tSxUUS0JGQjKQkJ0nJ9FTskZVsiGMWtQTsofAfmVqCKEvHE4lk6h+FhMKBHCkwC9wABFKQwg1U562r3U5rLJiMIjGWAQ7wwGw3OIL0WJkCLuZPluHzVgg+cAMmEEUFeKkCu5w2AQWo4B8oYIIDfgczPjpmBKUTgVKS+cD87Gc/JZiaBTLjmLXtIwVVKIASJCmVISyHnA8sSLmIyP9OqlxBRSVkpVRecE/6NSY3FYQAD9YZKIQ+DQTzFOhAC4qq59XAAyhIigL85qkh1IBBVeCBCrq5xtYQ9Ab4BMkGtsm1X/HlCioYANdotksZYkV6Ji2oJkdQgw1YAAI3sABJJ/KEi35ARDUw3QTLloMNOGqiKC3iE58HgQKgwIE9kkoxQcAvGRItYCcVJMBaMwRmcbQ1N4SAUlMIVooatGIdc8AQOkSa6PBAAUNla2uAoJSnJuSpjqkBD5iTGcCGAGhqZeUVSgAFJklzdGArgRKGQKd/1AB7oStBDW73NYFIdgmgC8EIjlCjGkCyXA6owgCkoMEcKABYMYPWDZjyTsL/uNaNPACBCnhkVxBUQQGHkoEDLFCrRcpMTfpL7g2AQBF/KOEGGajMXDwAVA7sSgUiEOtQHGRdBXzgCatkJWBzQ5VzspM/RUzvJzPJXs+1973wja9850vf+tr3vvjNr373y9/++ve/AA6wgAdM4AIb+MAITrCCF8zgBjv4wRCOsIQnTOEKW/jCGM6whjfM4Q57+MMgDrGIR0ziEpv4xChOsYpXzOIWu/jFMI6xjGdM4xrb+MY4zrGOd8zjHvv4x0AOspCHTOQiG/nISE6ykpfM5CY7+clQjrKUp0zlKlv5yljOspa3zOUue/nLYA6zmMdM5jKb+cxoTrOa18zmNrv5/81wjrOc50znOtv5znjOs573zOc++/nPgA60oAdN6EL/eQUS1oEQDCCQLBBgIgomQQX+cYIYGGAGFwiAQMKA1QIDYAEwmAGjZ7ACAZjABQVWKhIEsgItGEAHJ5DAAv4Rhcz817QHIYATAnABLBggBpmuAAm6kBAd9HdLYaCAQE5ggASQ2tTKnvQ/YrCCCqBavzw6yKz/QQMrCKEFAwkAAWp9YBOc4B8JMDYN/hGEGSTgwCtIQBESsIImPIAGLSiCFZKwYBxw2woAt4JCYPAPRPe3BeAuQhHWvRAarJsGjN5vEcD9D38bPCFFSMIBOmAFihPEBAYGgARigIMWCHwgOP9owLtj8A8sEBgJFDCBAAaShCI0YAb/kMAOVnCBC7T7wAJYwQxigGlZmwAJ1zbBDxawgg4IOAoA+IIEToAFLMQaDBS4AnlN8O4CI0HmQv91rLNAggUkYNsFNgsBKjD1Zlc9CRcYSAy+MBC0B7jsoT4ApWFwgQYIoFYSyK+zFkKCBODA4AeIgQQqsACcFyQBJCAIAADcAS0gwABFGMijAUDqfhCkCf8NQAwo0I8fIEAgrw5CDBoQgC4cyAQzILh/Z3BtGAThHz/4xww60IEEXEAAJCDACmIAcv+eXiC3H0gFcEBtLQih2x2YAQCucO38piDuB7lCqPfTDwqsQOEJIDr/ywlMAgMEfiAS0AHwF3ACpzN4C7WqgBW88OB9AKDbEz/5gcOQmfIDPAn8pmD7EAYV0Gw04AUNgGDedVlOcAEn8AMVMHMKJgNjYAQ4EAF5VWDJIwYMQAUM4ACl8gIHVgJckFEd2ANSgGBC92j7QQKX5wO3hGBmsB8uIHQgFxIfYGAIgARS0QUSYAAC0GkDZgDbFgYLoAMwQG4IdgIXxHkIwIIJBnL74AIXEAM/0Ej3JQEWcQU/GIQJdgEG8Gj+YHYwQGwKFgBS4YRQeGDHN4UXoAUmgIX2FQMVMBdNkAACIIf2BQOcVoBbYIYIBgMxQAL7QAEzcAJrWGAmsABYgIZR/zB8V4hgNvADK3ACSHAFAeB3QjhgEjADFeAPBbgCxXVgU3ACZUgBDxAEiWhgMXACJhAFWzADTqBgSdABOCAALiAA6reJAIYDTtcCiFgBMQADo2hgHHd25oaICqZvVpAAU2ADMxCJKrhvE2cAQMiLAEYBvogDM0ADHVCGCmYC+pYAMGAADRB5CgYDAMeNQhAESohg3riOh1eMxtgBSUADF4AAJ1B9B2YDXqBvM0ABnbhqCOYE3ugFODAF/SAAEReIAJgECaBsdndgVtAB4IdqFZCACCYBB2AFBlBp/5F7CDZpB9AAMBCL2AhgXbACGNABF2ACQqdgoJgAHUADAUCF5/+WYFFwAhuXABUQBUGwAnooX0boi1agBSwoAUMJX/YXA1SQAAmQk/8ggQbWBRdQkj63A/+Ajgc2kx0QATDgcgJIAE55izugBQLoAruXBCcgbTK5ADRwALe4YPtgBgAQA/aoBVyJYPuABAhgkTQgAe9oYHMhAQunAzrwijKZkTQQBDqwAqg2foRJArsXA1vgk/5AlWnnAjZgixJwATNAkAc2FwEAcBfQeOeHYDN5AETYBDOwlwVml7tXBDBAAkGZksdWg90mfcLolmmHiQ2gcjdpAwpmhJYGbEgAmwXmDyYQBFrQbhXgeQnWl6C5AghAjC4gmWnXBTtgfqEGcmaHYMb/uQMCMAOpqZomgAAXUAHWqWzTSYUIsHQzoJmEyYUxsAAU0HOrOGADuAAG0ARIEAA4EARTkGBm4IInAAAVkAAHIATTiQQXgAMw0AQNQAXGNpr/4AISwHuYZwVSSWBm0QUmcAFWQAMnEKEfOmBXQAFTMAMKd59RIAAxQJ8DpgMG4IsPAAD+QJb4aGA0EAMB8AMwkABNQAEC0AAY8A8OWmDW5gIC2ntYEJcCIZoB9gNL1wQngANWMIwrgANoWWAwEAGXVwQHgIcu0AULoJ0CRnQXMAVTEAM+KRX8SGBkdwVh4AQI0AT94A9IcHEGdhbCSAM2sACa9g/uR2BLx2tFQAXz/xYDUlpgovZrNImPpgYAfipgFyABAvADMNkAElArVOh4k6me1UejA2YGVIgFALCU8EUBFEACsNqANIAAZEACJlABx0dgUNlsLVCTDWCjDTlgC5AFC8B4F9ABc8epxceXSEB1qzoQphpgdkiEUxEGEIpg/ZCmEtAPZqBsl/pfESABAbAAJlB2MfAASLCjJ2AFejdgD2CjMRCvOEAFRLgAQVAEhypgLhBzFQADOFAEMYAAosZ7mdeVFYAFKwAABAAA0Qpg90ECqbif/OkPLoAEAACayzqaV8CQCmeTuOlfmUmTBLsCg1lgUYCXXpBvEOmbBuYCvrhwvWoFDRtgJ8tx+inWAULAsgUWBkfKezW5AlRKmF0wBWVQBDiwBRILov1AAj8AAF3AqosUEAAh+QQFAwD/ACwhA1kADgO8AYeDbxwSEhN3ZBzszFljUxTMqzTjvTx+ahx8fHzS0tQkJCTy5LTMzMw6Lwza2txtWxeampzv2H/W1tQ2NjRiYmS0tLTGxsSurqxSUlSKioxCQkSUlJSCgoQ8PDwsJAe7njCukyzAwMBKPgyojSpISEloaGkjHARXSBP4+PazliycgyQcHBxcXFyGhoTv7+9ycnRubmzk5OQyMjTq6uyKch6UfSSmpqQXEgSioqRWVlQxKgwcFwQuLix2dnSenpxOTkyOjowWFhRSQg+NdyKqqqze3ty6urwKBgRCNgyujiyihiXsxDxfThPWxoQqKiz67rwODguafiQmIgTmxlT999yMiHgSDgQOCgRCOhwGAgQUMmjyyoTs+ugUZFBqRlRGXFiMYIR2lpiu5DSGRmyoupyMoIiGRBjQ5DTsuBDEuqiozrRsRBioxHyoxOzEoJzKusTMeMjA1vAeBiC0eHRYWnS+vtCMWBjG9Nji4Mw8DkAGGBQUEGDUytCgniSCtoQ8VBQ6PkgKDgiwuswCBgjY1rDcutDMMIDoeEiooMTooLRobEAoIsBYQGBmbICyqJRWXBh2alhw4qQkMCiCllxATEwODBjO3PTAzKxmfGw6IhSymKDOpBAaKihmbFje3PTA3sAKMijquoSwvrS2ngzQyvCMeGwEBBjkrjjOwDRYYJSobCyunlguPjhGVDRGTGRcdhgEDBBiRpBwXJSMbEjGzIQoZNAyVEwKCDB6anSooHhyaAh2elxoXHjc9NiwmEBYeHzu8jQuMESwpjBYZmjAzNg8ZIhINjyippB0HIBYRjz67OjO4tQ6PiiilrBwpOCQfAxwwiQ8dDh0XtR6WBg0PhAkwoCihAxaSihapIRyehj4uER4XGjsnDSo7LSohEju1hg8MGjEeCweMkQKGDRmcGzOmESSeJBwdJhuWCi+tMQeFjTc9Iy0oOigusA6MEhIJijgyuA0IjyGcpDEuuzs7tjgviiuujSMlrAePBwGBgQKCgQKCgwCAgwCAgQGBgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDihxJsqTJfAEUyOgwgUeQfCZjypxJs6bNmzhz6tzJs6fPnzb7KXhho0IIIxcykIDSD6jTp1CjSp1KtarVq1if9psAoYKNfz4qJLBQAkrWs2jTql3Ltq3bt1j7BZgwQcGKFTx6OLDQAa7fv4ADCx5MuDDPfoibChR6IUYOw5AjS55MubLlrInzdTAigcTlz6BDix5NuvS/fU40kKCAIwGQFaZjy55Nu7btmwoyWGAQIwYEHopvCx9OvLjx20FYIMiAw8iGDjCPS59Ovbr1tv3y5du3goIRIjKCX/8HDIUEgg3/cGRgsUL8+Pfw42NO3C/Ihhgwost326/DBQkM/MNAAhLgoIF++yWo4II30YdYPj3MkIFZDKrVzwossKBBBx3kAEERNjhR4YgklriRg4gFsIELCOxj4nzaKaYZZ4+9aOONOBYkVAdOrBBAEE7AkIADGLiXI1SIEcSYAywc6eSTDOZDQQUXbADEBhckwAACQUBJVT9Q3CUDAhJUIIOXaKZZXT8abGBUCEdtkEMQRqrZUxAw4HBBCGX+gKCdgAYaW3YrrLShDCvkk6SgPq3AQQgMOJBABu0xaumlmF6azwpO8EACEEZwAFumpJZqKo6ZyWCDBCzUeeqrsMb/Kp2D+/QQQwYuyqrrrrzOhiIUCMQABIW9FmvssYZlt4+DE1SAn6vIRivttFbtk8MGCLCAgXLO2sADteCGK+5T+bBggQMOSOBAERa0MMGf48Yr70CJBUFCDjwomlA/+ygwwb0kBEDfYlAo0AEJGHSQa7RydbAtBRTk0IHA0M5rsbSJ7VMCA0VwIHBC+2DgA6TsajDwPyvAQEQIQ+Ig4rQPwhRjYhfXHG5mJDSGwgYfI4Qnlc4WUSTN/zgB6sozhHCmzUw3fWxiCvhgwwUu8AxtPgYrQIIRDvxwMhQy1MUCA0Ys7fTZaJ+aGLBGsABDDDwvlKQCFxTh9aIFdXCU2Wn3/+13oA9iEEILAbAAdwAOKVCB3UTnvfffkEfuJWI86DlBPxQcXvFAijPuagcWlC356KS/KNejFCybudWb/9P53QiBLnrptNceH2z55GBEC3T2Y/iweCf0evAEyc43rMpqR+/JOmq3j77/OKjdds+3bvv1s1XAAhSVX3C5QL8z1frwCRkv64UlhEUEB9ChGLxmHBBxAQ4lKEBfPmNCcEEFRGzAAp3YCyBxZAAFFmgpAz1IINUY0IL/MUQB/2Bc+UJ3vFL1wwkQAJBRJNC2ZblPINbiWggqEKnfrK0EcKKSERgggQ04wXoCjOFncscZCdgwATFAgQscgINvKWRJP1CI3v9m96oAcCAGRCCBShCQgAp0wH1N6YcM6tYCHihAAziYARAohrUOWHEFWiPCDFqwMBmasTQXuhcGMPCDH0DABSEoQQco5KoleaZ8I/ThqzrAAAaQACZC8UEMXqAvFO3jBTGwgf2iR4IEhOCJixlYuVxAhC6d8ZKkyY6DVte7f+RjAnLUT7+cILhBdqpncunUxhjAAhExxYIsoKQlPVmCIuBgVNGjzwogMMjg7LIIJYiOg6IXSxvMEpPI/AyKSjADqwkECkdswSvZhAOjzAAFDKhABVL3TArsjwEucIEFKvAPr5VqHwjY2cL6QQIGPHJ5UKuAA2oEwggNi179WkkJ5Bn/zGT6U5n0wWJZFLMPFtiABcuKngb0dwEiOPQCF0DoMw16gYFA9B8kgKGXAgAEFulHiiu8YyQRIwNIidSTFHABDhAnkHyQwAZ9LAIDYNCzf9p0MvTpV03/UbBXglABTlCAUIfqUzANVSBAPY1GoWSfGfRAUVF0gliCCM/+mFRJxZwl/hAAARvAqSw3DatYMRMEIMwAAVCl3FSVlJiS+hGrLjAmvVAShBW0KY5lHKte9+oTMHHABRlIq1UZoIGq9oMHRkgAVVtaAhdAgKW5TJJLs6lHvlr2sjfJBwxQ4APxZScHZXpZZBGjABvEgAIECUAGYtCiqiL1An5cKmZnS1uL/7CzCEYQLTRvRSwHBaAFEloYYhNQpJEuigchsIDJasvc5nqktKxtzz5+wLKhZUeoUJ0sA3LwvCAgYAZEsB+hVvC87SjgrxWAoHPXy96K5OMHkfLBcs6Vgd5JEQcbAE70vOsABiAQAv3NAVQVAIQKZOAFL8jA4lgJr/Y62CQ0I94Ph8lWKBZrHy91QG9CAIP2JEYDjoRk9FIWgt44wAYk8GB9EMCx3sSAARvQQF4fTOOR7GMCbisBCQAoN6z9oAQvKEEOFADI5CDgyEdOIAJyQKxdbWoCG3KCWegDhQnIIKG5RE0HNpQo3/JgyxtySYNrTOaOBAGFEiAQA4Cg332FDP8HA+rjlhB3WAhoSc4JcAEKFCkvCUevzIDmST5KACAOsKAEF3DABnBpkM9awAKG3taSpwyFDvyABJgmAQzA6bFAe5q9POAnU2bEpDFvpQIWYHKWoee+doZguZ+ONW19F4MLiJdf390iQtBZhBblAwpQEGwkc5kPRG7gmLJO9l7zkU7WIQYDTRStklZg2hLIoAQtMLQToKqjCy6OAmNWtrj9CYUVtYg+euPLQbwNXhzAqY8oht7yWFCEC7xw3PgOaxB8MIMXJFStCTgpvWRgBBQomgUkMGgi20wvFZ11xvmO+Bn33e9/H1YsAl/MBEKAAu8BsgOLg0Fe2dTHjD6tYDL/4EGiRtu4geQjCDyQgZSjmNMgqEQGXbKwxMVdbhapeCuQ6kujQ+2Cpw4Enc1ENoQcy2heaSYDRrBACHCwPZ1HDwo5cHfogNC+xDiBKMnlsGApvPNYZ6cFKJgQfdpZAYbTSwFEcAEFgjPoGPig6XCPQT+L1R95GqGrCUjACzxLYSiUYEj9k+cFTIYYa4VApkVI+9jJXvZAI4aZisyMsXlcEPsUXZTBOjZWi9B22VpqBffZAM4xxADlWtiqEugwkFYLAQ9jLczpbAEUW155QFs1AQImLRH0LswyDtoBF9DvxYsg8tSaG8u8YlOIFWMf1q6TyqFnaX92Y/LRsiDtjUNR/+8/XSsHxFEDGNiAA4gg2qGUoFKBLIIPcnAvXhJB+YjRAMsyyrxYSWnPkCUlJ6ZeLLcCOKB3wVF9PXB9/VAC4Ndo/Td+ZeYoLDQgEmADBzIQrgZrmmFnEvAPNhRvkYROjmVffmYq6IQCQPBRGiB1QsdyUiUBGEAQmlU1vXV5D1hhJyiBD1YfP7AcHEAB20YQQ9FhdKcALMABGRCEQxhJubMBf0R5sMJRZ/VRMkBCJ0UfbpWFxQRZieGAGeAeUsiDZcYv+1A94mGGYkg91dNtvzZ5u6KAghVqiqWDG/dW9JJV8ASGaTiGZPiH8yIXZoVWWhhSOriFcGUDXoiDYWhYpv8HiJDYK36FAkqVGKDzTsblbRJAT6fxAja4hzkofpE4ihbjgCuVGSywfgToIAaodwShgAzogLrHco9IiraIPCAWAhMwEGVlfYYFLL4xSxPAfYpBH9+HAB90i8oYLqjnGzIQACtweLGFGJWmMJYYAg7wAj7CA0DgGx5GjUEQBD2wMysQBBQjisuYjnwHcjEQAhBABGkGA561GTbwPTx1eMgHAUZAeoyHGCvQAzYAUyhwgRfAASYYgep4WTFzGvK2Lw/CkOuWHTDhQcfydJBiATjAZPTRLOBRjFCAATiwGyGQAV2HGBgUA+sSA0WgYeFldQmpkPngBBTAAf9wYHMELQ//IgMwkAEZgAACVx8kUAJKyD4NGX0pkXIrdz/YtTybEnMKcI6Z4S90MZUdQGQu+ZJ7lR0/YANaAinm9wLIpiTvlWgBshdgeXQsUGIxEE6EtINY+ZamIkXyBAQksGU9MCTFtW7N4gBK0RcWIAHc5Ekk0AIIAANGMAPBxHtwuZimYjj2FkUq8nkIAYy11xSalUiLxC/Blg8ZgJhpxZigeSqGw2cglAEukB8IAXczMHcDMQEr5CcFkQ/AlZhuGZq2mSaumQA9sIs8wALaJGIGsXEBp0u8tIBM2Zm0WYu3uZwkEkISEAL785ycqCOudjlrc0QZsIic6Zm1yZzeiSoE5k4V/8AZRsBdCNEP8FU2OfVdceOEyMlt3xmfXrICCFABCKABTgBKRmAEsNlo6RkeGcOe2vmeiuk3u4eQ8olMmFME7AeZRyR6jdaC6kYwLRAD0hSbBNqdTXOgGpqgAlRuKMABvfUDTVRZBIFYDnBSBsh8ebWdyUk6HKqcHjo6IHqhi5GKbYcQ1OYCe/cPoKNcBiGb3NmhNhOjM2pTzOYCTqQom7IiEGoQh1RrMqAoAYAA/1CZA7MP3RhM8Gk7fnikZ8Qm+0gEMHBo6LFdDWaSPnBiJcACGSAgGABIPlgCMBACLnABJaBjEDc6XwqmMsQvP4AD/yApNqQ9TcZWmrEBcdZEVf8XPZrRGEWgZy5QBHDTdDDap34qQA+iNRRQAhSAATywpyO1AqsRMZXljzkAA6oKA3T6Ail2PZiaqclGpGgTq7J6q5aCjri6q6Riq7z6q5Pjq8A6rDkirMR6rCaiq8i6rEdirMz6rAnirNA6re8hrdR6rdiardq6rdzard76reAaruI6ruRaruZ6ruiaruq6ruzaru76rvAar/I6r/Rar/Z6r/iar/q6r/zar/76rwAbsAI7sARbsAZ7sAibsAq7sAzbsA77sBAbsRI7sRRbsRZ7sRibsRq7sRzbsR77sSAbsiI7siRbsiZ7siibsiq7sizbsi77sjAbszI7szRbszb/e7M4m7M6u7M827M++7NAG7RCO7REW7RGe7RIm7RKu7RM27RO+7RQG7VSO7VUW7VWe7VYm7Vau7Vc27Ve+7VgG7ZiO7ZkW7Zme7Zom7Zqu7Zs27Zu+7ZwG7dyO7d0W7d2e7d4m7d6u7d827d++7eAG7iCO7iEW7iGe7iIm7iKu7iM27iO+7iQG7mSO7mUW7mWe7mYm7mau7mc27me+7mgG7qiO7qkW7qmO7YyerqWlbqq27qu+7pnw7qwe1NFObvsFWy2S2M7cAS5+2AecAW962ANYAXB214isAN/VrzN9QAeoLzrNQQNILvOG0MjcALhNr169QEEwLvYS1sFIACH/9q9fPUBAIC84otZIzAEOiC950s7NaACIsC+7Us6AqACBHC985tMQlAD4Ju/e6UDAEAD5uu/YmUFD6ACHiC/BGygTDAC8bvAYiUCIHC/EBxWDTAC/VvBNmUCKlADA6zByWQFQ6AEzQvC/pQPD5AESKDAJsw0/XACKcAE3NvCmNQAICAAxEvDmCQFKjAEH6zDMhQAQ4DAQHxJKDwC0VvEZvTCIGC9SmxGF5zBTyxAHtDDPzzFtrMDNEDEWBxA+vAAILDCXYw9TOzEY3w9NiwAwHvGtlPFPszGWTzCJQzHpXMFAgACDUDHtJMFJ9DELKzHT3PBD6APgEw6bnzFhdw3O/8wxHOcyH9jx0jsyJGTDyfgwH8sybrSD4I8w5icNhxMAzdwyZ0MKzfAyKI8yqcCyXmMymiTD0yQAiLAyrWKBCNAwbLsNFUMADl8y0xTyipgArzcNHaMx8HMNPlAALBczEUqAilAAKeszIziASNwAGsMzfNyA+8LzDFrAAbwDwbwxvuqDwdAzNtsAAVQAAewy/l6zB8Qy9v8AQ9AAwLMr/0gAtr7zA5rzjWgBP8wAvTsASBwAJzssuZcAN3crzcQBVGAyCvLzQ7dr0cAACCgAzL70Ae9r1nwALCMz9asJvXczBzd0WmiAzc80CINLjfQwTdw0vGSDwAwAgnM0uLSDxr//cAyDTPRw8wnkAU3HS6b3NPgsgM1AMpATS3izMVFzTBgnMRJjSz9IAQpcAIh3dQvYsMPgL9U/SocDADhm9W6YgU0UAPa7NW9gsJ4PNVkzSAvHNVYndaZ0gApIABt7daXYgLp29V0bSpWsM8MndekkgUCkAJM7dewQgAfYMaE/SrM/ADVnNimIs1E7dings2/LNmncsSDbdmZAsOIrdmYIsGD7NmkcsEAsNKijSlCLdanjSl2rASZvdqA8sIjIARzDds5AtqEbNuCogMjUNq6LSipPda/rSZQcAAjQNHDbSeubMnJ7dG43dxqQtqQBd1QYgI1oNrU7SVWcAAknN1e/3LMzO3dTlLPI8AEtS3eCnLB6YzeTmLd2M3eOHIDL43c8H0jRwDGNl3fpiMCSWDe+n0j6o3X/70gHBwFwj3gIyLfkYzgJfLFyUyrDD4rzMwEaB3hlxHFuW3hDMLBHqzhFXIDNHDcHs4gXxzGI74g+SAEfnzi0frTLL4fOxAF4Pzi8XEFL93IND4e923iOQ4ffBzVFd7jgqHJI3DVQv4en2zaR24dYK0EUrDk13EEd7zKUE4dlLzRVU4dmjzB553lprHV6uzlxQHWlS3mxyHl5GzmxZEFr+zOak4c/YAEIP3mcC7NAk3nxLHXKtDXeG4a+TDOr93nstEPyCzVgm4b9f8Mz11+6JQB0ADQ2IweG1agAnse6bTh0imwvpY+6A/Qzjy96bEB1bYM6qWh3pBO6qEh1DOO6qIh5Sqg6awOGk2hDwLwAUMgAkoe65ahD8zMzd+r659hwwawBMQO7KPBzUtg7KJx0cp+GcheAMxr0s1eGANgACmgBCNgv9M+Gb2NBB5wAjXwDypwAiaw6NuOFp8uECoAAjVwAjtg7ueuFg+gBCBw67ke74ShDzrwAP5MA/8Q5vhOGAeQAhjcAFcQ5AE/E00hAgBA8A/QAPqA8AkvEzcgAjRw7cw78YSRBcg7BNeu8YaxAycA8pUR7iRfGPQ+BCc/GPu+7gCw8oEBvAL/QBAA/4IwzxYC0AA+FTU33xY0AAJF7gG8S2893xbgngIqwARYAAFF7xYmAO4jAAJNQAVNDxcf8A8RUPWAkfVUQIlazxYDMAURsAAokABfnxaUmPUDMPZVcPZpQQVPEAEDMAACkeFujxVtPxAZf/dpIdFBz/docfT8DPiEX/iGj5XTfPhXgQT9rPhXUfH+7viSXyJMcOCT/xQp0O58fvk6QQBKkAJDIAScDxUe8AD/cPWj/xTSnvqs3/rg4gF27/o5cfTaLvs7ARNM8A8pYPs+UfoEce+8jxMgAAC4Hvw6IQAjMM2Mb/w40QDjXPCxz/wQNhApkAQzL/3Yn/3av/3cu9/93v/94E+2wCzx2a/54U8S/mzy5x8S+z4CKUD86w8ShHz98Q8S5O/9SDDOIFD/H9EPxAsQNf4NJFjQ4EGECRUuZNjQ4UOIESVOpFjR4kWMGTVu5NjR40eQIUWOJFnS5EmU/3YUJJDS5UuYMWXOpFnT5k2cOXXu5GkRxL8TPYUOJVrU6FGkSZUuZeqSgAoQQ5pOpVrV6lWsWbVu3enhgRKuYcWOJVvW7Fm0adWuZdvW7Vu4cQf2k1s3a0AAIfkEBQQAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAP8ALLwAZAAEAZ0Bhzg4OVRGEV1OE0Q5DJN7I25bFs+uNpaWlKWMKfHJQtzc3JqanKKipJCQkCkhBpaCJPn5+G5ubLm5urqaLT4+PMqmNOLi5JuBJCgoKHtnHIKChFFRUMzMzOC8PDcsC4ZxH6ampOnGP+bm5CIiJJ6enIiIiMTExK+vr+bBPFxcXB8dE0NDRH19fLygL9m3OrSYLIJqHNeyOmVUFHR0dOrq7KaSLBISFO7u7HFiGcOkMdbW1GZmZBQQBGJiZGpqbD4xDL6+vK2SLEpKTK6OLKqqrBoaHE49DBoWBBYWFPLy9B8WBJ6HJcarMtLS1PbTRAoGBCwmCjIyNF5KFC4uLAYCBA4KBMwwgIxsSBw4HMbWrLCaDOiisHhYYKrStC5QTDA6DDgsaHRAKKrI7FqihDhQFHSSmIxsdNzQsKruyGZAcKraeNji9BJgOCZgiNjm1Or65AwMGOje0MK4zOh4SDgsKLR4dKiESNzQ9BIaHAgIMKisILS+nHCi4NbQyMy+zLS+0MR4LLColMx4yPDstN7agFZyGOje+DhwODA6JDgMQHh4ZCTAgMCgUPje5BoGIMC6MK6uxPLIWLCWIPr42DAoKOC2VFpAQKiWwPrq5HJayODOVODOEHhqdKiidNj24KKEDLSi6IqSsPTQ0F52QBIQYOrs1M6wKMbOMFJkUOLQ1IK0hNbM3AQMEFyiJNq2ECYgwOS8KPbEFFJUOBIwaMCiIPagMFJUZHIagDAwIPC+0MbW8Ni+wBIwRGRSUAowHMCmDBwoDLCgLAQEGHB2GMbezMa+rMzQwHpkCHDigMLQzGh2hPzITDxISKK+tHqQJLTEuEJIMISyJIKSXBoWNAIGCMSimGpWKEJYQPa4RFZ2cGRiQEJQZMby0KjGMIxGSCZg0KiiUMLC2F5cgI6QDDAsRHpUGKhsLNrONM62VM6+8M6+NI6KeNjssHhyRBgoKEQiKIx2DAoOCPbMMFRGKMzsMDAePAgYIAICBA4ODA4OBAoKBAoKDAYGBAYGDAICDAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEgx4b2LGP/t43evosePIEOKHEmyZEiMF5HMKJECn8mXMGPKnElTIsp7+FhAgGACQ8eaQIMKHUpUIUp+QnToSCKhyM+iUKNKnQryHoYTEmY0afqUqtevYL3iLMFhA4Um/5yGXcu2LU1+KXSU4DdFgQm1bvPq3WszChAJKu4BsIuXr+HDiO/ZKNFkw0UKhLsinkw5LL8NHFhwFKxDAhLJlUOLHnpvigQTK2zYQCKkCZAo+PiNnk377QYISRQ00dHEwk4FJ1bUHk585L0oJ4AAMWECSBPcHEgAKE69+kR+KkZoV6EihQIOK5DI/7ZOvrzFm5w9gzbPnjz6eyvsjljfvj7x90Ui9HBpv7///wAGKOCABBZo4IEIJqjgggw26OCDEBp2Dz/77MMfRwvdYyE+HHaIzz4/TVghhxtlFOGJOKVQAgMnnEDCDFGMd9A9SDTQnHIS5AjEAUUIxM8KJSzQIgMarADiiSj69R1zOohgQgoyFnRPEQ1woNxfQIgAAQc9/oPPAiI0wVwTIujAwpFIQojPBitgkB0ALNDAQRQW2aACEkioRucNJQzEDwUbRDGCClNEoIMFG6QZYUYY2UCECCk8VJoCCgAQInr7sMAnfYoqiFIRJ1gQKUMX7RMBBCdg+B5OmmrAaacHKv+mXRQzKAAEBhlmJIEIPVz0D0r7DIrBDhw0IRysDXaU02kKQADEsUZdBNeWeGEElwTO3aDDDr4iy6BiLCxXrAbz5YpTA0k00K21PYgbHQAYeuvghCPMwAERuJIKQBMKrMAoekikIAF4r8pLIEb7NEADCwVrdCoIm3V0k5+3pdqwwQCyewMJ/Cmkwgki+IDSryMLVNoNHOBzMcb1XarhDEkcsI9F/8BFAxDz/VsyfFvOzHKBOKmwET/84AOABBaIjBBG+DRwg6sj44REbETvg8ECEDAQ5c8B7tNDdCz4wAIJOtAAQrkGoUQBBwoIMfFlRJygQQQRVJmECZZyTeCPJij/IMI/IlhgwgyBrYfwDCIQYcPOUZCggAU00GABByVEsbLe5fGDwQobpMAmBpudZ1UKPnXrZxGAei7EFCpjvqjrsE90eey012777bjnrvvusM4Oke+8hybianmCWHCpxMeWdtF42mB88OVNGQEJJihlQuXxzshPFGTxdsIOHf+6Jgi7cXDACltDTxw/O9xgKwknKHBDY5z+yEESOoAQvwgN+OylpiIAAgiypIMUAE99e+EHACKAgdhYjQQ3OIEKEoKEE9zgACoo2gpAZsCaJUUB++EHEnwQwAkisDgiugkAdNCvpa3AApWyVsVmlrAkLEBlF/mSCHZwQhS+RwUmsIAQ/xAClxsABiUjwN8I/uGokB1lBxAgwgF7yJb3XOY7UyBiCm5ggpxdBAO5EQ4+GECDGRzlVBxIHxUr88MTLMx/UhoMojaDDygiSkOaOkHO+DECIvRsjbVhVI1QhbaD4OMANwDCBqYAAHtBAFIXAYAJaHAACmyuAb5JGSDvYwMN3M1SnDoOCGgwPw5Y6VAGnFAKOHADERTrL3/cJG1opAERSACUpksbEnqwACI0YANHUwAFSLY9FjAABBoAwG1MkEtZUoaWNJCA5UoWrS9Orker4kcP0tVMZyIGCSWgwQmmMDFS5XAGN2iAbFaFAQn0q5ve3AuNSoAyChDtnqHcx9QqVP8EHwiOnCZbzYgw4LQDqDGeepmQD3YiARbMgAUaKAELskjEdkmgAQswAcoc85ND5q8BIGgSCEyI0MmYygIoTSlKOTCqgyiQBPyyiwbI2a19+KB6uvneZ0r6THxMAQMYmIJQpxAF2CzEBiMAahGMt659FAGoI7BB6HiKoClStTpWtepVt8rVrnr1q2AFJHpMRs0Zreqs7wnrVzT01KJiQKppTVuwiDqFETB1H4wsql6JWrpyqhUq9xiBjSxwg1YSYQMRMxFBtseYwtLABBHgCFJYGbnI3WAnJmBqWf8qFPa58gArkUArU7nZy3DAAidw6IrOlJINRGAHO+hBDzQgvxL/rIqzRbHKBlrnJU/eyq+/woBzoGQyovlqYvCxgA4sR1bF4lYouSzNaf1V1n1oYGHSyt5AbmKqKEZps8/N7Qj65rayIgEI/bIBBTyHAc2iZwRII+1YwysVbQqur6YrTW8a8KjI6aABhUPPZXjymdvSFyjviUIQRebX5EJAAUSYQQo0wMoG4PAmTXsayeZ7YJq85yo3WAASNmy6H/lmnD4SAkrLexPI6GCYZ+2wh2+CAQgyYIkkfso9ILNDgnzpaes8XNbgmGOtyticGMEAA0RAAhzneLsYOG2iBrIPmC1gPEkGWQ/M6twjw8QqZFxAl5qbSxvsqqUaYUG6sKzKu5GU/yDA9bJJrLKAEI/4Vzk2rkD2Ec7+YUQFu+oVSmyASNuiFZ5yPskIGOAsAFhINeJhmueOK0kFEA4JUyiBs7x4kRXwi7poTTRMurslBhwzbgygAEaEAIEmFK5mq5QTEHTARY4iTANRdG9cRV0SrylAB6ZUivVsLckTLE5iCmzAciQwU/TgQwNPKnKceW2cxGb3Jnre7ljHSiEdc5ja4OaqkcNN7nKb+9zoTre6183udrv73fCOt7znTe962/ve+M63vvfN7377+98AD7jAB07wghv84AhPuMIXzvCGO/zhEI+4xCdO8Ypb/OIYz7jGN87xjnv84yAPuchHTvKSm/zkKP9PucpXzvKWu/zlMI+5zGdO85rb/OY4z7nOd87znvv850APutCHTvSiG/3oSE+60pfO9KY7/elQj7rUp071qlv96ljPuta3zvWue/3rYA+72MdO9rKb/exoT7va1872trv97XCPu9znTve62/3ueM+73vfO9777/e+AD7zgB0/4whv+8IhPvOIXz/jGO/7xkI+85CdP+cpb/vKYz7zmN8/5znv+86APvehHT/rSm/70qE+96lfP+ta7/vWwd2YOZo6AmBcgBwEYd8YvkA+ZD2AgD3hByzPwq3xkoAUygHkADPABHrzcA0GoQeynT/3qW//62M8+zQkQBJm3QOYhaP7/yz+QAwPEPAjmhzkKXBADAsSc+9qPv/znT//62//++A+LA16QAAMkv+VHEAQJkAAIEAAuFwAukAAx8H8s5wETMIAv4AEuhwAh0AEuAHMWSHv5t4Ec2IEe+IEg2BAwIHMC8H0w9wMI4H64MyHZxjJKQAAI8Hu3ww8OIAAFIAAOoHsG8gIC4AEEcAG0cw9U4AEX0AFO0AEX4AFdUXuwkgMZoAIF4AJMCDv3wAMXgAIhgIUdQADOdw/5QH4GOBDC5yBD2H0esHy2cw9QkIAd0IYhYAABwA9PMAATQHw+IgPp1yBK8ADmlwEGkAAmGDtq2AEJ0IYdgAIogAA3+ABBIIG//3IEU9ggGYCFApGIaagCE5CFiNgBOVADf5gALzAAPPAE+hAAL5B7ApEBEXggA3CFUqiCuMMPAdACIZAAKBCKPPABb5gDLfACMCADCPACDiAQSrAEBKAPCNIBExA89/AEAYAALVCA/GCFjagEAUAAE4ACCigAPMADpph8VDAATCAQBXBxGaEPtycA2+UBHVCBLmAACJADOTAAcliCw3h7GZcPAyB8zmcyA2AABeAAA3B8tliAA/ABQcAD/FCCjthxVUgAL2BC6GgA0fgCE8CJHsADMjABDdlxA1ABBPEEBXCKR/ADBGCLLxCPBkCPiLZxVrgEXegBTKCICKCNQVAAov/ID1Sgg2HxArMHVtBXABKzf7l3BBnQARQZjRkwABnUkpWBABlQBQfFU3n4DwPQAr93D/uXAUaQAfGYA4rIlFM1Gt9HAFJQjpxFAAbwhN54ir/yBD9QAPHYAheAk02pY4aBAwLhBP8wey7Al7gVAwhwAbVHAAMABUrAA/pQkgUAkROwBHapD04ZFn94gQzoZRXQi0vwATJgBB4wAAXwAC1gAAYQBDgwAEogmZMpFT/wAWsJbjjAfRNQfhRZAxfAiJ+ojEupAjxABXkRieH2EwFQAB8QjC1Am4c4gB1QgN04imvxAj/Qbot5hrH5AjGAhQmgicwXAB5wBAKRD70nFZ7/uW4fQBA/iADQaABtiIiFGAMxQJrwSZoCIZ8cRwD2eQFLgJ4vkIAD2J/++Z8JsHFV8A+OmAEXsJ+GaIiIiAIJup4MmnHuJ3wtMAEpmAEFEAADEADkx6AucIguwIsdWpof8AEXEAQv0ALfNwQgEYb75gAO8AMBgAMkip4EcKEf0Iak6QIvEJBVoA884ABnKAPEB5wPEQB1+AQAJ3xQyZ0CCYwuMAEXcAEoSgAykJE6+RNUUAXeOREtMARv9m/eeQQDCY0XUKXdGJcXMAFKGQAO0IIS8Yf/IAAs+m8egJYD0Y/blQ91CpFr6gBPsJNWFYgPdw/6UKdSygS+GABQUAXHYYVyQyiX5fcCu1kFO2lyOqYCMiCaFPkBAaAC+ZA9R6ACHQly93AE1ziaOUAAneqjMtACHeCo3vgBL8AELbAES2AAtUhyoFGFAxkE2tiGLqeVf/igLscD/EesL2d+AcpVAQEAIfkEBQQA/wAsvwNZAHACUwGHaGhpcF8ZQkJEYFEUbm5syMjIoKCgpqakyKg1LSQI58E9UkUQEhITvb28iIiI8vL08sxAra2sWlpci3QgkJCPmIAkTk5MVFRUalgVd3d4kHsk5OTk+vr5Xl5cgICASEhI2trctLS0qI4qPDw8GhUENjY0IxwF3t7cYmJkwsLERzwMmpqclpaUt5osFhYUg28d1tbUFg4EMioMOC0MoYgmd2Ibro4sMjI0QDQM7u7seWgc6urssZYsGhocnoIk0tLUCgYEf2ocHh4cLi4srpIsTj4MOjIMKiosIiIkzs7MWkkUJiYk3bo8Eg4EqpIsJiIEDgoEBgIEOg5AuMR8JMKABAwQorbonJ4g1NbAQlRkWHYYDgwYuNbcgLSEiF6EYlZ0ZGqUIiQwxrDsiJSwgkJsgkAY7syAvLDMOFQUotLM7qSwUlxYEmQ4JmSIOHQ4qp5Q3OjodGhYLiwggJRcRCIUalQogIyI5Nb0qrowoJy8sp4MGCgoqJxwIiwoCggwOj5IqvCwqrSobqTgbsIkcBqAxsrcbHoYtrCgzKIQ+ujoBhgUtL7IUlh0yjCAOiI89tbgNDBEtqy07PC8OCIUFBBgcFzUiHhsHhg01MjQdJSYdlQY5LI0poRIdoBsHjJEQkwwQlxAOjBoxHgsanpsVkYorOQwcHpEdGBAWGIYnKCI5LpU+LpA6uj4HiQgztL07LoQFDJo7sywjniQLlRMXkKQ7J4w5NLUWqSErKQsxsyofH4YuJzkVEJsuMLszL4wbuKk0OQwCg4IJiLAsnh0rJYgoIQM1L7MdnZgzJZAwrCsynjIBAQYPExIqr60Lj5ARlBIXGA8dFhoHjocYmZQxtjQiJyIZkJUqrDECjIogn6cnLa0aEAYpmws7vIwzJyUJmTQtJykiGpI0rC4UmhkWHpYjmQYUlQYgnKQAgYIRjA0zPDUND4kuMjIChg0iFQY7sLgjIx4zr7wfm4I0NLgVDxI6HhI4PCECgoEAgIMCgoMDg4MBgYEAgIEBgYMDg4EAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qcSLGixYsYM2rcyLGjx48gQ1rkR5LkviMjkOwTybKly5cwY8qcSbOmzZs4cZbkt++DgQYehPDLSbSo0aNIkypdyrSpx51D/u3gEOLIUKdYs2rdyrWr1682S7rI0KDBhghLroJdy7at27dw48rcJyGEAwIF/qWVy7ev37+AA2flVyKCgRIfCkSwKrix48eQI0tm6IJCgwv7BChmPLmz58+gQzPdhyKFBxf8PiRZrFa069ewY8uWSNjwDZKJ0baezbu379+RXTgIYWEfyREFDiAhWXMnPyQeDABwAby69evYGapO4gAAABQUTiT/8XChB03nLjxsoMo5u/v38GNfSLLhhH0Q6zlsCDHifEm6DcCQQwh7xWfggQg6tgQAGTTY4AEbgEBBB0I0d9wBB1DwA4G7JejhhyB2tU8+JJY4XwglGHdTD5ZdcEEBHIYo44w0JuUcbqu1R1M+KDSQgT4CJBFjjUQWaaRLN/IzQobLhTWCYWkJsGGBR1Zp5ZUV3ajPDUesZBOLlxn3wZRYlmnmmVvx6GM+xynWJJpwxilnczeEUIAFLrggBAo/NPABA17OKeighGrEjwUn5FBAWQ388ICiK4zQYaGUVmppQYQZEMKmmybBQQ4pUNDfpaSWauk+PSChKhILwuCnC4Ga/yrrrGgmqdqQtOaqa5U37nOBqzruKuywSe3jwg0fWGCBACoR5BymLoxgwQUflMCAc/ssIYCyIwhh3LMRJdkDtfkQa+65RClJQQgpFJBEAQZcoI9ANxKUjwCWFaBvCB2wSZILHRzQ7qLx6gNuuL2qiO7CDMO0DwANROBABh5EAEIBEvh78D4W2GmABxk4QIG8JOVDwA8FsECxATAUgNlODccsM1b73GDBEQzko88RDpxlVb3/1FYAAELoo3MPBpOERAQbZOCCzkhQkAMLqJU089VYH7VPrEo2+kGSAunjAQgZsLm11SQdYacABKGwgwFCWZ313HQTdYPXYP+zNAwXCP/xAQoXlGBw0M8dcAIA+mwtHAgeJM1c3ZBHDlM+AIDAWr1dw0ABCw0kkYSPaZWMQhIpdEcABSmwMMTBkrfuepbYChACCAD4SzhzqRXwwA8HZACABwVscFpJPXgAwwY//LBBEv3ePunr0EeP0I03GCBh3PSinRgHEYxgHI8tf2BczRRI7B3nBngPc0xJPi/9+2beOMQKMLDw81Uw554DAIEKYUDTBkMCC1IAgGvtQwg9W0HcHveS9skNfhCM305usIIf2M+BJSlBA3YgAYKIbQcUANQFfoAWgqgmBZJ6IJIw6L4IupBINasgBe6HwaXloANqYUDPHJCzDGwAbgRBTgH/vqbClrCwhS9MIoj4QUHuhC5JZyOJPhywAx4OpE4wQMHWUACCBpQAfxLoYgoZ2MDWsE6JaAwRP4bwP+IIoQdCiKPGegAAkpFEAAHKQAmGgK8NHGB1hNkgBQRwhCFcIAI7YEEP8jaTM6bxkQjKRwZywAESGsAAGIrUUPgxwhIKJB8dCFAD7ASCEIivZB1IwQlgFIIfgCB93wKaER0IyVomCJQRyKUuIxACAwhgkyNYgQfMQ697OeAAETiAB1K0k3x8IDq5XAEBhhBLWbIEg7bMZnz4oQ9WLeGbRwjnEsolkH0gYZEDMQkDhICEokFRH0JYwjk1xshZ1lOb+LQlNvPJ/098OrKfAEXjPwNKUAgOtKAIpdsRE8pQyR0RiQ2NaG82uZtnra+RLJSoRqvDDxd8AAAEQMEIbNerEhAAAF2CKEj2udGWzmaNDkgCDGTagAJSFCoHeMAGaqdSl/oUTiw6QQgAIAHgZbFczhGb8jZAAIX99KmEoourBLASk/nspgAyQAQO51SoejVODJBa2UoyhFZKIFBKMswHHHCCpn71rXFa2gYusJMpbsAB8xJIDxxwGQaohwBwDeyZygoDIpqEABtYAXX+oaYf5aNnbhXsW3jCABLEoKuSJcoIOvfLnaDgh+ZJqwFWZ9fIZnYt/IBCESZAgwmoAAqnNcpmk9BZwv991gDm2WsIXvZYrt7EWEsYwg2GIAQ2xdYh91hACxQAAQUQYQFAOG5O7vaDrw3kYYmFVQdSNs5uhicDPcDsS5y5AnYVIAURyAA1pbuQBIhAAfCFLw0SwN6bLIFpdB2I2O7KABewQFEsoAAFDOColEkgrzLRBwBCEIEVDDgFIDiApOqLEBUgIL7MRQAOKFwTF6wgBxkI1H2zuA99kCUFKG5A8PTjo8XKpKNHQEKeelACCoAQwRweiIUxrAAN55gmADjBYuglARigiCdCKMEIlowYRLJAAOG1kHMusIMIuPjH/5gBDzDMBA2YAMsyWYIBTrCCCwgAYmQbXPakyFb+pev/sDkwAAPA/I99LIAHCGCCAphQASN8i84uyQyBUQaC8bxpzdykQNNidZ5ofUACMXUZoynMjwRogAYBwIAOMJCASQNaJCdBgQP+4QE8VfQ/FvCA93CSjws0Cj8YIyeWTfACESwACvsAgng/3ZIRzUu8MCuxp1+MBAn87gA+IuaPm1CDFgSgCbyGD09KPISYNu7HQLjzBEzQ02jLZicjSIEXu+3VfeCABhWQAbm9DZud3DcJ1q1vpTUgAhUMm93AAbe+JlxfEryABwO4B76vw00lC4EB+uiBAFbwNmVLFwoY4IEOSDBwgh+BBYpZwQoikAShxlu6dhbBttddcYxMu1z5/7j3dXPCDwYAIAIpSALK0ktNkrc0CkagAQ1mEIWSx+QkHfCAA4aeAbY5SwgW+N3EVqcTeB7hBsN1JxkzW+kJPFflPveIPvCCsgb8AwYpyECFwoaXDUwFBGfNOpJIUAMeYAC2an/JPkYAuBGUgG2qrN0nLzCxDBSAb1iPO0agMAAnTFzwZQz2JBV7FZ39w8MgyC/iPxLyCSTA5pNH2APdxviCMGAFkcd85hPCjxnoHAeBH/1FSpKPVA3hAxYjgHEJ4gLrSV71Jqe1raOL+5WaZATla+UPgjL12oe+9xhhttuhjfyP/EcALAhBoxrQgaQVxPi3b75E7jEAHryA29p/Cv/rkfD0Dmi1OLvBfrfBtg9DduACQ5B1OpPEgBFIoAMfEIqzomWBDtT9WlC1DyogAukmeuFHehdFGj9gAIcmEOrnEO3DACgQAjCQPEM1Z4jmHEcAPD8wUwbwAbLmcp2TPD8QKiP1U/NGA/Z2gM6nQndTAL90fbanUu3DI8lDARlAAQXwA7KXgSUhBCxQSiBzAEY2Kv/gciswMQRQMTDQPT9FAkHgBNDFgi34QOE2RAbxeXxDg0lSJyBAADnDAB2wIbfRPvvQAUI1AvqwM4oWQvTCAD2QMzpTPYvmUhDHAzUQA1RIeQj3LcaSAf/AGvRiHD3AMphBLwi4PpTzQ2P3D/7/0zT0hD/+01ZeoiT6YnQHIUkP4ADyF1F2xgMjt4cd8RwhQ1QXAAAU8A+0Q078IAQXgAJ+F2ffsWrTc1Fh1TRqsQ8Z8AC4dVH/IET89g+FuAOA5SwmgQQrwFSpx0/mJgI+MAMGKIoH8RwUIHMyV4ERgAKoUU4dcwIb8AAcsAP24QBXhikX1QPJiAJqwUkDshwq9AH7QSU6xAGjdl1H0AFLSIF/FI1KNG/1tozSCBHZ8mjegQIWsASYUmyARQAMyZAC0InG+ED+g3at8QE70AA0RBAXwD3oFDYewAEUoBY8Ejw7MCDio1H8AIUtEHABaSOkxwAC4CLUIhQTKQEVeZEZ/zkQGxkBHfkPYgOS69gDH3CKK9AADoAEGkV4PBAEFNeSW6EkFAgDrlI7k4hDBGEBO1AVvngBDxAB+hc2DkCP5sgTPJMEP+KJCyByl+eUT+lREnABb5kW/rVT69gBDxAC7khGH3ACDcB0DkgBHOABpKcZIRAVDGVuPuADqMeWXDFtW6MiH+QBsiY2ORBCBzMEnWMBanFfJ9ABpPeCRlhQ83Z1jNkV4OIrn1MC9FJWP/Ay69NyPUMBeUUXpXQE2VMQi1iYDMV2bucPpWmaSQKEP0RI0MeIhIMEgOM4mgEDGTBcofSFK/EcdRdOI+B3kJhQ3EcETMmPv8k+ULQkhaZiMP9wAFQ1EB2wGmXITT0CAuL2A+MxdlDZdeJZAB5AJQElgCKgAWvZnY0JRUOQAStwACugXgozdxRAfCWhD2uFSRQgAT0JMA4QoAfwMR+wjaIpAxVAA4vJn8BpUfmgJ0/DQDzhAoCCLXAoBLBiRvlwom+UOLv5AjagBALHoTT6Ev4QAMvHnTW6owmxD933fTrKo0JKEOZGBDSgbkOapIaSABXwj0r6pCPhb0SgBLwHpVYaEXf4bFe6pQ9hZ0QQilwapj2aczRgBD0npmiKKZZGmmnapkFDAjrgdnDnpmlKeNpJAkFKpwGJn5aXp3oqilEwAxm6oX/KpfxgAhPAA1NYqGH/GgM4igG+yahcyn02sJ2SuqX4WYCXeqX8IAM04KSbaqX+xgNUGqpW6g8YQAQYwHymqqT7oAREAKSt6qo44Iw8N6tJWmkVYANFAJC4GpApGQQrWaW/uqN3qAN6WKw8GnIa8AR+qqyIF6gVoJi+Cq0seKgTsHvWWqNw2gJvt600qpQvgKfgyp/Mup/lWpqlN6jVmq69h63PRazu6pRNgKNaOq+MyX3eB3746pSZCo396pS6CqoBK40p+QJEwJIFG5CoygPP9qwL+2leKqsRu4dRcG47B7EVC2ajuQDturE+l5Jx+q0gy4JKyZQly4Je6mUam7IUtg9GkKFG0LIuy15V/6etNdt8MVADqhqpOdt72WmpP4t7IVcB6Dq0mbeuKvixSAtoB/tcM9q0mVevDsuqUot4E8uvVyt4UTCAmrq1gqerRLCCYBt3B7uSUVu2WXeHNUCuapt1lae1b1txF0sDz3imc1tx/uixeVtyIiunfVtyJ+u2gctumXq0hctr0rq0ictuN7uojRttzKaqVhu5gJad40qzlttQXUuAMoC3m8uxTEqwoYtlKZmoMlq6gHajzla5qsthWau5r3uf51YBADu7OTawZIu7lAaFAJe2vMtebEu4wQtyaamfslu82hSoPqChTKu8KImo8Qq90pWSPEuy1Btb4kq82StYduYEyP/bvadVej5AuuILV/C6AMB7vm9FtavKvpLlDwsQq3ILv1+FARWgAZ9rv4LVAozLv4ElAgoLwHC1ANxLwFCFawgcWFO3wA78wBAcwRI8wRRcwRZ8wRicwRq8wRzcwR78wSAcwiI8wiRcwiZ8wiicwiq8wizcwi78wjAcwzI8wzRcwzZ8w+WaAHOKw64zAzwcPQb8w0I8xEQcHyJQxEicuAiQxJDTAkz8xFAcxVKMezs8xQwzAU1pxQwTABWgxV7ssgPwxQxzxGKMLiZAAxpQxufSBKxFX2pMLBhgA0HzxsLCD3c2APJKx7OyYTqgx8QyAX68KwwAyIG8KxjwDxtWyLT/Mr+KrCuG18i0gsZZDMmXQgJtTMmlcg8BIAKJjMmXsgCeLCtEUAOhfCkJ0FqTXMqEAshurMqEAgU828muTChEMMuFwg+ybMu6vMu83Mu+/MvAHMzCPMzEXMzGfMzInMzKvMzM3MzO/MzQHM3SPM3UXM3WfM1JHMbYDB8yQAR9vM3vQQIVUAGpDM7WUWuIa87AEXEqoM7YYcfl5M7XMQNkLM/YQcj2fB2tnM++ccj8fB2g/M++MQOqKtAGfdAIndAKvdAM3dAO/dAQHdESPdEUXdEWfdEYndEavdEc3dEe/dEgHdIiPdIkjclB8A/7XNJrMQCcrNJuMYAD8LwuTRSmYvfNMw0WX3bTbfECOt3TaKICNqDNPt0VIhAEPjvUWWECPlABOY3UWlFrPuzUW8EDUr0VoCzUVc0UnSoCOlDFWZ0U4kzOX90UTYDOY80UmswDuXzWRzG/SsDWSyED/xAAFhwQACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFBAD/ACy/AGQAAQGWAYeenpzz3IhoaGlbW1zBozT6+vnpxj/UypwuLi9/axwuJQhYSRPvyEHk5ORDQ0SkpKRxXxjS0tQ8PDymjCxhUxSYgCSzs7QSEhPe3tyIch7Z2dlHOw1OQg5iYmThvDy6mi3nwT3MzMxubmyQimy8vLxycnRUVFTKpjIkJCSahiSampyQkI/WsjjYtzo8MAu0mCy+oSzGxsUcHBykiSSBgYIkHQVpVxWQeSKGhoU2NjT69MxMTEwWEQTOrTXCwsSWlpSurqytkywcFwTu7uyfgiR2dnR6enyujiyimnT2zkMWFhRyakRiThTq6uxUQhDRsjiKioyKcBTy8vTm2qTqyFAKBgSqqqwODgtCMgyuqpT99t/80kQ1LgySfiR+elzu9vxeWjwqKhwSDgQmIgQ+NgyyliC2nizGqjQOCgQGAgS2eHTEeCx04qTathCIsCTgzhD2xBSQSkgSYlB4RCjAplQoYtDwxNQUMGjO7DC20rTOeMjGzjDo2NhuWih0ehgoIMCOlrAGGBTkvChIJBTq6tja8NTYxMR4aoR+kiSwwLjOtlQ8DkAKCDB+aAiQcEjOMIBGWBTOsCgODBjanBB2Xsj2zDCQcHSGtoTK4tQEBBgKGDQaKCgkLCg6Yoj2uETuwoT66OgwLkTK3LSisqB0pODg2LiQZBzAoBAKMCgeBiBmbIQmwoDK3PToeEheREA8LmhadhgeMETAujDgtlQuUEz2oDCirMRYeGBILjxoWoSkyrSimizg9oR2HICqxjAEDBBuahiwsshyVBACBgi6xLhEWECqrCD22NRepISu7sjazjQ6cjjQxPC2wJx+WBiqbCzk8PC2mKxISDBYXoAKDgiqhAx04CTOvjQUEGBYZlxEUGSqmgxqRHA0IjRgoiSIoIgeFjQeOByu3HjIxLQwUBSqhEzg2PRyWkDinEB6WnDu5PA8SEDKpJi4pOjqpLCuyuzK9NS8rLiqolBmfHRIOiBYSiiSjni2wNB4lpgKCgQKCgwCAgwCAgQGBgQGBgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxI0SC/fvoyatTHr6LHjyBDihxJsmRFflcErFj544eKFQg6mpxJs6bNmzgb8kOgoUCDnz9D7JCZs6jRo0iTWkQRQoMDBFBz5LiitKrVq1hPoojhox+/r2Czih1LNiu/rTFkZPT6tazbt3Bpng3RZOWKEhKuEI3Lt69fnShINNCgAUOTISpQ7P3LuLHfKzum9pMhIARiJY4za3YLtu3FHT93bB5NWmnn0xeANBGxuLTr1yRPd74CoImR1rBz65YoG6wMCw0E4N5NvHjCi/r6ee13pcNgBx2HG59OnJ8+EysE7HCAXUMDGlSpi/8f/0+fiJ+EGzQJYQQz+ffG+SkxIYIGDhodEHiFz7+///8ABijggAQWaOCBCCao4IIMNujgg0VZd8WEFF7Rj0KyXZhcW/98hZE+5XEIIYRK4EDCiShaMBRCsukzgAUPDCVTPwgIgMMDFvwQk4gjLrhTCAXEYEGKKx50Wj8OADnEbTJdsYIUTTThk4zS9VjgVhogcIESXMqwH4u+/RBCDE2UwGE/A9AwgAghYAAdj1YieFYMIXAkG0NXGBGDETiUSdRFFyphhZudxangnFnKIMMFbFX5Tz8mxPADCkX4uZQFhIZlqJxbDQGEBRaokB+cBZ0FBAkI6GNEmRadhembjm7/6t9ZD4QAqg8YDPEPdMddAEUIA1hHA6uluppprLLyZx0CKHiljwNADGEBCgmhGcIKFwhLLEFfofCqpske+hUCPjTQAYsIkDDtV6quZqS3xyIbLoBODkEDbvrQ8J0MXCKwgr0y6MUtvLDKO69/+kBh74UG0VbAwxBDPMQDFwz8LbgHB7gYPzKoZuZBaKogsgo/PBBBASH80MGFnnUcb8YCoqSWcpMV0UAEEuB20RUaTQaFbcl16KE+CGC6A80Gw7wbSpWpUER9JEihQQkMHwdWvtuOW4IRP+T6QBEl8Kr0f2h614BAGABhAogLzUbD1DJdJIBhAkXZBAYrVD22skRz/2eCCRLIgOxpKJggw8AmDPD33wMMENPekEcu+eSUV2755ZhnrvnmnA+UdOdw9eaZPltewFGxpwm0M5dKWFio51csqjfojonuoQRiaiCpA1XbznEJFkSgQQhWFBw3CqqRIAHtm9l+oWUxPEDC3SZEZ3sO04fwAAAWhFBEo2D5KqWbzGtm++FN0KBEP0qIIIUPh5+PqQheMhf/kSb48EAD5JefGY8XGUADSEAtgSgBCA0I1us61A8BNAEKdgJXZ4r2AAeQoH/+I411FHavgTSwAA+I4F4OqAEJ6AMFEkDA+uL2lQusQChKuKADMmg+2O1PANxyAMr0gjEUnG0AUIjB8P+A0AGBeWgAGqAB+2KAQRr6JXX8uABwBsAtBBQgAtnqDQL+UQAfkGAFNxpCmSJIrmn9QwltmqET/dIPFLiRWXqRYgNMUBAUXFEJorNikN50hRIMIQJvusAPNCCaMzJxeWvky04ewBUfqC2KCKSi56yIxTwWgFhfucIDlnSRDiTRN7jK2ecSGSEEqABFDzABc26Ywx0K7TQIkEIDCtmhhElhBa5qAF4k4AAkNsAIOTgcKePSIg4S5YMhRIgMQjBHgtgSl+kagjSnCbGpDTN0qUNTEwiougOai2FhySQHwfkbP13BBEUwgjqLgIMGTEwAW7xmWWyHvAYUgWdKEIAU0iL/tCtApVEOeA7PZFCpGOQgdQZkohrlOU/RQSoC2/wBCf4oyQDqbkfWWVUTgAAAH0ghAgrEGBqbyFCx+A5JACCM9ninun7sIASo6oyLgFCYCACAdws0oAVigMiSjsV3mXQjCnjouQsJTkRRFCoPMaY6N87Op1CNKm+kStWqWvWqWM2qPO+kupyirjddBapWq3JSJUAFAV5q1WxycFYESKVZs5FBW700yrF6xHfXmd4QBjgAtoUVLP0QwcPEGKUGAAGjFygCM6XZgD0pxq4REp0+BPATABhBBT9hzV8BK4ACkEAAHRCAaB0QQRmsgAQ/MEIRXiitAkL2Jggdl2AEcCFI/zUhAtTiagODw0LPset0HfKWu16LEwA20FPu+ccFasMah1KWtmzhVk7zZS/iJgUl/zICQdBUAAuIsDPHBUDjdoCC00GRZihgpXWPgpL9nctzErjiUo+kz71GSQMAOOiREDAAAdAAOA8Q5npnIjo50nGSV8xihiSQncTRwDIWwGhHrlAEiB7msHUdMENQIoHtOMABgjNwFe8Y2515RgITVWJYaNTfIuBIBOHRcGwkAJygrCw1CRxxJTGG0AZKgYBMXSRvZRwbGYjgPjQowkE12QQcek6HIZivdDWFpOGJEmQmaIAFkkvkkbSIBkPI20D0UQQpqECEUz7TDjAQgx0Zyf8BunNtl0NyJ0ix+XFnuSCVAPVK8MlnBSBUsJ/7KAUgcHnOdxWdEjA7qQsgAAfdpcpX5rMDO11hAIHrlxEGE1IZOI5fSkDBANokHETT2XY0vm33ttlTgTQgBDviSWOHpIEhTI2HLuWfD4YUAVvj4NCmpghQUfBfIdHgcQJBkgUAoBiUlEB6IbAVDiQQXfmUAAAkiLYFoMDSYJ8aqF4FlFgliFBvm/vc6E63utfN7na7+93wjre8503vetv73vjOt773ze9++/vfAA+4wAdO8IIb/OAIT7jCF87whjv84RCPuMQnTvGKW/ziGM+4xjfO8Y57/OMgD7nIR07ykpv85Cj/T7nKV87ylrv85TCPucxnTvOa2/zmOM+5znfO8577/OdAD7rQh070ohv96EhPutKXzvSmO/3pUI+61KdO9apb/epYz7rWt871rnv962APu9jHTvaym/3saE+72tfO9ra7/e1wj7vc5073utv97njPu973zve++/3vgA+84AdP+MIb/vCIT7ziF8/4xjv+8ZBfL6Ciq3J+8MAJEIAAGfJReR5A4AlbSAIMFvDUkVfBBi1gAAhAwAACbCDlYphAEkDgAQ8YAAQZgH0QZl971t8g5Wi4weprb4An2CDlV8gACzwAAgP0IAE8QHk/OACDF0yAAB+AgBBKDgHVuWACE1CA/3LykQaTVwANaKhBBWbgApQz3wAtAD8BekCBKqAcBP94QQKIYADV638BClByLfAP7Od5PXADCVABL0AAJ3cAXhAGLjADNxB9YtB+JlcAXxADSBAET7AEQVNyI/APGPAwWhAADDAFFoADkkRyCwAGI5AFBxAAVBAAD8NFJQcDM0AEURAFZQACM6gDOqAFWkByGZB5GTABLQACJ1AGMAADJzcjGxAEE7ABQqAAFviE6jcBV0iEL8AFqiMEN/ACG0AqH8cBBAAB/VAF+SAEEHACECAGVxB9JPcEHjADCZCAL8B6PfACMMCAJMcATyCFL8AASRB/GXCHApgBXFADCgABLP8wAxsgBv1QfiRXARUQfZaXAUHgApQoBinHAxnwBDewARwAAbn3AiR3A2MgBBuQAc33AkFAAE5YcgYQBAkQBB4AiBNQhAvweiRXiODXAx4wARwgBvkghyRnAC+wAEKABTNwBBtQAy7QfeZXA/wgBBnQAh9whDDwBCxAcjbwATaABgpQAatHANZXAUcwcv3gAgTAAkQwAbVHADewAEzwDz0wcjywAP/AAD1AAMxHBAsAARNgfSN3i8t3BDaQAASgjRMQBDPABAEoci/QAy2QAQqwATNAezCQAb5YchMgjhAAkDAAAQqABie3AT3AAgBpADegAJx3cgsAA7THAB6QAcj/aHKh2AIVEAQHuH0oRwAM0AIJsAAT0AUTeXLr5wFE4AQ3MIUZNnH5yIYvAAEJAANMUHohRwFpwAVBkAIMCQExeXIKwANhmAIdGWMmBwE8QAE9UH0VAJQlhwa36AIu8AItQABEoABROXFHkAAK4IoeEARjmHKEuQBPwACjp5UilwELEAQMwAIQgJIpV5UZYAA3WTGVWQEZ0AMMUAE1oHI2QAAZMIibqHJCMAMfEAR5yQGUaHK/twAnAJD0x5giNwO5CAIJ4IkqtwAtkAQMEH4sNwMGoIxJiXIJwAGp53p9GXHbZwOoWAGqlwC26XH5IAY3MAOoF5yhiXL8oAAvYAZB/2AAH7CFJWcD/bAA/wgCT7AAr3ly+ZAANTkBanmDLiAEg9h65klyt5iRuWh8zRlxV9AFM8ABGbAFBjADKicGMyCBPZAEZ1B5+EkAQmkAuXdyJ+AC3ymUesiPGLoB6dkDw0edZOl6PDABqud8HmpyCnCGFMChHpAAKVcDe0gAq+ePKXeN88gCmFl5CtACLXADQtl5CeABe5iYOYoGENAD8JeLHpByVbAAL5CHNZly/RCFN+AEIvqk3qkAMzABXOACW+qd6ud62Mil+VhylseQFIAGiIl/T6ikJZkPCnB9q3dyUfoCOCkGCTADjnhyV/oCMyB+TpB9XXp9WPCdQVABx/85cteogAvAD2KQATDwkSQnqQyJnuk5iyenpM8nBt+Zg3JJctP3AnGJqf9gqezoAhAZgOn5AWSaAh/wevxQAxPgnaBIABRwIVcgo094BSMplo/CAagInzOZAby5fetocoE6A935D1e5oo7qpae5ctd4A6MnE0JABGraITyQqdX5cV8hpwlQn3i6AARwiSzXrI36hF76AonKcti4mCpXAwOJrWjIcrnojxwQoBR3e7R3A5qZcjZJfE/AAeFKchNwfBXQcgTgkwygjC53mf+Ijr6acj0pENIaeRzbsR77sVhxq7NIhSw3jTPghH5KhSxTckLgic8Zhk4IAdE4lreZAcc15wIbYANd0I09EATUSHKwio8Pa4k+iX9cmnIF2YcwN6og27RO+7RQG7XrBgEVwKku167WFRAAIfkEBQMA/wAsvwNjADgAWACHqqqsSkpMNjY0hISE1tbUxcXEaGhoLi4sPj48Wlpce3t85ubkMjI0mJiXQkJErq6stra0/f38bm5s8PDw6urskpKUdnZ0RkZEpqakoqKkJiYkOjo8Dg4Mzs7MsrK00tLU3t7cKiosUlJUnp6cTk5Mjo6MGhocVlZUIiIkXl5c4uLk9vb0cnJ0vr68urq8ioqMEhIUYmJkysrMFhYUJjg8oqSQ6NzYGAggUlZICBggxHwkJiZIOCQ8tNK4OEhIjqSopnzgmHCYfmRE1s7ggoiY1s70eIiMopqQyLrA5M7kfohwtO60xMo0bGJ4fnKcCAgwaHZAPFhA9tTUdpCwbFBoDAwYXDxQenSIyNrw3tq4bFpAyNjQUHZY2L7IhHiAfoSENFAUVDyMdpBYkIKI4L6ANHA4EmA4UkxgVKCEImCIZqAkTDxkHBwUImDQtJywGBg0ZlgYfmp0vLa8ZhiANFBkuqykvqToBgIIEBBgxDKAZljUenyImLDkflaAvLy0QExIHhogXGSUGBgIVExMur7A1vbUBAwQ4NjgxMS4IiosTlZcKCAsmJiMaGJo+vD0Cg4ITEI44OSIQERQODIoXGQ8pJqkXlI8vqR8TlI4aGZY2M7QjI6Y4tr03tjQ9vbcxM7QgoyEzMTA6qS0DAQQ1OD0PEgwmJiorr7EyNK0yL7YbnpwMi4QfpiEYGhYcGZg0L6kWGRY2tbgyqSwurrQ8L7MtL6kODwotMjsZqDgZuCAFBQgfnhk1ObYYGBQMDwwnp68nq6oVFp0SFZUFBwc9uboIsCAAgYIMC44lI6IHBQcmKR4rqyYXk5YtKzEBAQYoIiIkI543uDQtnyEmHxcbnZYMi5smMCYusTEyOTEEDBAbJCEdjxsMg5AzM7YODZMtMiAdrCEmKKc0L7wLFBAmOa0TGRM3ub02uDg1NrgIiDAUEJM9vC8mIiwYnZobFxg4nyk8Pr0TDZEQDIw3OjY5ujQ2trkBgYEHh4cAgIM2trcCgoMBgYMAgIEHh4kCgoEAAAACP8A/wkcSJAgv3sKMChcWMIBv4IQI0qcOJGfgAITOrho0aIAgAsPKYocKZJBgQIiQhxgIOAAB5IwYxI0CcAEv5s4ZeokadIDAxgw7OXcSVTixQUeFA44cS9k0acC+R0AoBFCAQL5GhxwCrUojAsINGhgYKAAhQb3uj69KTSkPhEdCIhQSxSn3Zv3MlBgwZUuybt2ZzSYoOCm35iAcd7DsEBC38Mi+dlre5ODBBAdAjyGPFFfggYWEogw0CAfCAUwOMfUZ+ADiHwEQICAYMCmapj8TFw4kSJGChIH9G2+LZHt5MlDiStfzry58+fQo0sveHegZH1CIwKOOhn7Zsn2vGv/T2zvgAUMDzBYOGDPYGJ9AV48eNAghW2B+hBYaAAgqYIN7VlHHgIA5FOABwXkA4ADASbGDwdlEeACBB988AIKUQngQT4yeOCBDCC0kII+AgJmQgUgNMAACgKUoMIIaTnIjwMFdGCABiiQAAEIErTHTwgKpLAiChu8kE8LCJRomGRwubDBQ/wwQGEClOXEDwwDqPBCav/YkwIBHhwgkD1BhfQjYwZAaddAHAywwAAkCtTmBBXAUJ1AB7jQAQlmavAACClMxMELE1igZnKCqZCmdSlM8EBTyf1zAQgubDXQDBWsoMBEeakQQ1RrCnTPCIByRQIFEGgQqkAiONoUmwNE//DCZqxRysCYM9yjqwnt5ZVPAlwFsIALIaz6zwkR1ORUmxFU8Jg9IlxlgI8MVOABBBCMQII9vgJL0KnEGnvCCg/c9w+zJfRlDwktEGABl1E24MJGGYigjwkNeOoUPydM4IGqkQr777IvRDBAQfYE4MIHCszgFLdiaXAPB1eWQIEFAXYpwQQZ2BTpBh0UgIBTJoxAgQQEJbxww1zd+Q9rKmCQlqgjqMCCcKvmxWODCBQgwwUDqSyDAXZC5CADG0rAgT0cxHCkADjpg8I9bXmZjwcA2oOCiyVwqQ8JLhDAMnj7vhdDhRVYUEEHH0hAomQXAFABwPygUJoLAySUDwQO/f9jkQsr5PNCbwYULkDGDsKQAAQqLDBbChTjl8LTONkDpAyNZ8VgVCQsEMEKKpgGggoEDMAlqNVJvYEDG6CAc1QwbMAAzjhxcAACDgjAq3Uw4O7A78CHlTFu301n/PHIJ6/88sw/9+A9K/3D63DPwVfBAx15VBv1zPGjAakuNNDAA7G94LDx/MwggggamPDPPSl0kE8Mw0cHHkEwENrAS83rY8EKGVDeXUa1AAXE6Xh2gZkLGMC95lROBDIgQArqh76EtYsFdmpgc9a1MBY4LFLSeUsL2pZBEEJnXVYxwEtcBh3JgC0fA6CaPjjAAeGg7x8YWMECADCAAZSgBA0wwAFd7ccAxoCgQljJx4tmgD59aEAAUBTABqa4AQ1QsHlYzKIWt8jFLnqRIjLS4HLCKEbikBF5ZCyjc1iIPgclL4xvdGMcE7M8GWGRjXM0TBupY6wWprGNZwSkHAXpsoAAACH5BAUDAP8ALK0EWQB7AYkAh7S0tHZ2dKeNLMysNjY2NEpKTHNgG+jBPfDKQO3VclxcXNbW1HBwb5CQkJqCJMbGxDswDK2TLIJuHmpqbJmZmSgoKK6urOPj5FZKFFVVVDEoC0RERNLS1LSXLBgYGKKIJHxoHMzMzBoWBCYfBTo6PImJiTAwMKCgoXx8fBISE66OK2pYFRYOBNra3CIiJEQ4DF9PFMDAwYt1IEw/DWJiZJB6I7auhGZnZaqqrIZyHwoGBLqbLGJWGVREEfT09JqOZN7e3Ozs7DYuDB4eHJ+CJD4+PKampODcwComCKKSRFpKFLq6vE5OTIKChPbuzJZ9JEY+DJqepNq+TNq6PHZmGg4OC25eFBIOBMauXK6UJGZSFPr6/Eo6DPb6/A4KBPbu3CIiHAYCBAIGBIiUkGBccAoOCJB4eKrOtAo2KO7yMOyeMBgaMHpGcMDe3PTMzNDWqAQEGM54yHSq5HqcoKSmkD5AKHh0IAQMEDBCOCxq1FBCIL60xGR+GOigsEImFHKCgOh4SKqeDHhuCN7g+MDM2A4MGCQWMO7WENjK8Oq6fIqGbGpcKOLWQAoaNGZghKqETOjAKJCcuM7AQGxyWM7W9JB+SHocgPDifH6IgDA0SF5yZFJSbHxecKpsLBosKM7i6MR4LG5mQExkVJaGeMj02MbMfOD02GZyhGpGQM6YMJJ4DExSPOy6EL6+0GRqGJSUgCA+HKqWoMSgrEJ8bNLkMKrEeHRGGNz0iM4wgKrssBAeFNy6uNC67CbGgKrE7G6CREI0aKKeLNTK1Lag6DA4MDQmIHpcCEJ6GMq6xCwkwLK6zEJakCYUEAoIMF6qjKqEDBYSYM60ELCmLJBiYKS6wJBcGFA+PPi6QLLkMHiCGCA2RHpiyBY2aJKGDLKmlDZaVHTkqHTGKGKCdLC6MDomPNDKwBZqVEIQQODKzAYaFFAqKGI+GIa6jHiCZKqgxIxGKMS6pKq6nG5aZGJMKMDM8CYwKOSyNLZ4dNrWfAYGBAYGDAICBAICDAoKBAoKDAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNS9FeBRJENIDeQSLFPo8mTKFOqXMmypcuXJk1QeEAzhE0LG0rC3Mmzp8+fQIMCLfIAiJEGFJKWMKFTqNOnUKNKnZqySAwcBFJo9ZAiH9WvYMOKHfuT6AkP+9KqJcu2rdu3cA0SXaKgCIkKVdLG3cu3r1+eRIOEiBHDQokN/v4qXsy4scMKJSigCFACwIUYGRI73sy5c9x9/oZ0BU2gwQUAJjyrXs06qlq9//YRWHJBQdPWuHPrRvn6tYcTPhh43U28uPGHvdX+Dj78uPPnzpOnJUHbNvTr2HPn61iB65ANFE4z/81OvvzmKhMA4KDQ4ESMFksyNDdPv37cfExOLCEcA0CTIprZJ+CAY4FWAQEfFWECSbcR6OCDEEYo4YQUVmjhhRhmqOGGHHbo4YcghijiiCSWqNo+FWSgwIorqphBBfMZtN0GTBQw0lr/5EMAiyy+GKOJQPaVjwIxcGBTCA9wcMECE/w4UBUKGPEAkks0AaNOHpjGwZQ2xUCDPw0GKeZb+5jAwJkMTMCAEUEAYd1B/tAQAgcnNFHZAv+4gOUJQVCgJpom5BPmmIQWmI8/iPqTTxUo/INDBQihaMEFKAxRRRUEGNHCBJr9FsIG+YSKqKCDFmrqV7AJ9NsFwiGE32AkvP9GwwUnDKHqCQ/kKGhsOJ7qK1v7FIBkEaXmQ0MLFlTwGhMLLEFASauWsGIBFYCZ6q/YgpVCCRc0gJarCnCwBFNqZdBCCEx4tdyWSFrAwBDXZitvVCYs4Wa8A+1D3QWc7pOPByUEsYACXlVxQwkTKDABBQsAgUIK80YcVT4TtACAsgntkwIKF4SAwooNhODDwF5pnFdaHjCwQAgFOCnxyy/t48IJrIKZscwohHDBuesZKZ90+/wWRBNVwGz0TmkJ+0CsCqmVwgY3MECDCQUQRmxvA+WDgg8UfHv01yqltW23HjCEtUDm4qDn2f/4U4IP3pYK9twTqUVAkfKZ/VpsHlD/AAQD1mKtL22Ay0334Q2pRTEQye7tqgsiVVDBBg3Qqay/BGSwgQkVmJABzRaMh/joEqlVgRGUntxrQf5kYEFhAExJAQm7GhxDCABYsAQHC+DQsuGkB1+Q6SU0QK7jMpqAwgk4GEEBDS7smqPyFBiBw3oMBAq88Nzz6u8QHpC6uoxVDOGCC+Ffu6gHLlSAvrXdxy///PTXb//9+Oev//789+8/ZzoZ30L0Eq/keG97/4vYjBRwJgUQwGZNqwIJaJAmJsArayaYAAo2uMEATAYxCaQbfnBwpAW0IAYM8NpBgsaA93DASCcAlUDwYwEjldAHPigBxEIItnzcwAINmICK/0qwAFYVDSFVCEDHPkYDCiBrPEGjUQEKABJucYBgPASbzBZEqhQocVyDkg1tJpCXfzXAYXk5YFp8eAEjuCCLWkTe3TiQk4PkIwPiEt0/FGAUPeVLOeEBHBy/djZ93a6OMjpWsgjCrBgwTSCvKQAHHInAQZrqbIwCwgn8KKMMDIYA+VJAx/J2QK0FoQEk4YvLLOkYrN1xTj9boQkAEIRK+aMKMhkZFntTrxbEEi4fkAEX+sHK1fQmHxsAwAIehi+BVIEBLViAZFBghBAEoQW7fM0EgvCoZpLlACqYgQ6K6ZnXILOGKECLN4N2AwDMKQYUKIFNfvaaFFAgCAEIXCWjcv+AA3wAAjogFTkbo7hkciCdyBseaEzAhAwUAHIxWEKserOBwVxNgGPp5xQcYAAe9AACI7iCDsLgzYGSZY3JDEEA1IlRNUKSj7XqjT+aADcGtTQsGo3AByLQgQ4I4AkSWMFHQyo9k7YlHwVwZwpdGpsAqTEtJsDBAmggvrRUwDK2YVtbDjCFGrxAAy9Qwgok4AAVdGAHPgXqCpTwVRHoYJ9G5ck+NrCEIAAgAybIKwFMcMR/VOBjqcyHCRSQgY/QQKoluGDWJtDGy5V0KjPoAEEcAAGB7qMfIgArDAwgAwfwtKcfqAEIhNrWKhQ1rkBxGw6bBYAl7AcnOjHXxUrSut3/0WSSKLhcvpYTgKqe9B81cIjiMAvWHqwABDXYaQd06oCg9uCrRIUrai+C1OI14LrYXYpO/kqDVO5jCAqwUwkCsAEGEURjNGjC8R4LlRHIQAAUWWM+vCCCEbzAuCDorACWKwAiyMAAMJgBSEczXZYs6lIIrkIKTEuQKkAwNoe6FPwMAhoG31QqBtiBFTSyRh3QdwQQwG8NiCCACERAAME0gBIEjAQReGGVBdaQDpTQARmMgCX+0kEV6isEEX/gAz1FcQ2oAIMXgFQEpgUee2N8nXy84AMO0EBP/NWPK9Q3xMetgQBKfGIHyAAERdbACJB8WiY/SANansFTqFxfJEBg/wZjTW6Jt+xlMEMXyW+Vrpl3A4IOwCAs+9CBlTXwZh4g9wNzFoCXqcCDGYjZxaHS8547s4IIUIFMixKBm2cAA+Q6gMtQ/vIKZiCEMXthpJKetJB6IAAZ/GUfYfjwmzebXCAvN7SjHapIBQpHfyFKV9sDTaIQlWSCHOrXxIbxT/IBgR935rJXnoESOKtctOJaqCBNcqpHV4UNMKAEJfjHx66UEGQ2AbvYpUHZBNK6BoD7H9jNzFRGUIMI9AA3/roCiDlN7c/qVNRDdSu+ZtYFINzAqXTTlxGMRBgYtiwhVWjCNQFA8dY2ak9b4EDFW0tGqYjgOrHOLBQ2m4Oy9vTEov/FtgZcTFIXRMEHBkf43GS2IpGQIAMNeGKp/MEx4+U1r0OQnj0vEIDO/VyFUIkACLKzRvoWN8sl5m9zhcoDGzjhCAcPnr/WIikgZGDnKAACDQLoOHu2oABafQqr/3Hj+jQ9s/c9rgyIoAKzSiEBCUgCD15Q6iuUGWbIW9XXEcJzSnkAfL79hz3FfngCS+UDE+owC0DcAxBgAe8DyEIW/g1gAY/5rS9bYwreSIIA4E6PrENBEB5wghNIZgMnUxUFfLCE1gcxUFHJQYbUAgYKfOEIP4CBoZ8gALP61Msq5vuY+yGGbW8IZRM4wevgkwGF+GMCrsWBBR7QMQakUvFNeID/etwJBACkSypX0FDffHABxQv6yvjt7AdU0OX/wgAKRy62mFDGABzEjgMUgHoFMSMm0D4bUAItcEXDoSMb4D4VkB+n8Ug/0Q9/5iGzVzPnlQ9VBn9ZRgSIVn9EZmRjpn8DZCFr5AGdswFNAAAlwEkUFmnKETBdA0mhwiuCZQG11Fc9kQ8zAF8gQnQDpIFWtm9y5wCIhmJ1FmCPJlJLBiEGJDMn0AJUhRwZUH6QolD+sjVnARRoBnnykhbvt2+dJmcn1l9fpgWkNma75nzPwTb5wADdskMOwSxghIX7sE218hMfFwFalA9W5mYjd2j+dm2OJgJX0A+8tkLz9WJsqBhu/xgApySHZjMrjYOFqjWDPrECHWAALEA6++B0L8BpyBV1INhoIHWIvLYPIqAEOZADMDACJdGIfLEPVeABipIW/kACk9IqLzhDTZUpQaBDuJgCt0iLGVAUnOITGFBjsCg/gRZtmyUDiBZkdYZtYzYCBjAACIAAAwACzXgcd9QAKEADCkADKAAA/2AEAghJf4UwK3KOp1EEXmFujbIi4dctGAMUYZA/+TYCGiBtVlByxhcBXuYAA9BP/TQAMDBO4EgkHPAPvLMAMVACtBMpMvFCL3QuFIAYJYFUNWQkHHBCufV3LsGHIdSPTwcCT3CQCHkACCAD6XccGnNzNDABN5ABJP+AdAbhD56jADdwAwqwAeGTLxKUATUJlCSgOj3hZ8UEa/2gAU/Qki4pA504JrIYEUtnUvmgBCyZkFpATPzTgzXwjeSkitmYkN54lTDTbP+EWqqoBBIgATAgAmr5MvRmbwXmL17AiP3TZyugg6qGPzCgdB8XmPojllJmmPmjAUb4AsqmmMEjAhKgAkrAkJBpP1Zgkpd5PzDQATnQdptJPy/gU4kZmvMDlT5omvOzh6pJP15QaZzYmvLTAxHgarIZP80mABBwm91zlzPwmLwJMyLgl14QnMFTnIRpnKTDg6GlnMEDAR7onMEjAxGAAdKJOCJgABGwAtd5OBSYnN05Nz1W+AThSTelWZ7omZ7quZ7s2Z7u+Z7wGZ/yOZ/0WZ/2eZ/4mZ/6uZ/82T912Z8Esg83ppkAOiLp1wHcWaAKuqALoWYMOiKM+aAkQqAS2iHcaQUxWaEdEhAAIfkEBQQAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACyfBL0BDQEdAIfa2tzuxjyLdyPm5uR6enxSUlS2trQhHgx+fnwRDwh3YhpkZGRCQkS0mCwOCgSWfCTQsDQyMjSOjoyvkSwuJghdThQiIiSampxpWhS+njDy8vTiujyBbh7GpjSIciF/ahyagiRGOgwZFgTqwjxnVRRLPgxCOAw2LgzevjxiUxRWRgwGBgQmGgSiiCymiiyGhoQKCAYWEQQqIgc6LwweGARSQhRSRhRGPgyiiCRuXhomHwY+Mgw2KgyqjyymjiwyJgwaEgQuIgxuWhyehiQiGgQyKgx6ahx6ZhwCAgSegiRKQgw+Lgx2ZhxyYhzClnyYhjCaliz06OwSMCCIbHQyXEDCuOxwXihyRCSEiKygeHTClMyQhijQwkSIlizkwlzgvChehiCMeEjsrMy4tsykeEjCoFCUWsiEhhwghlwgGBjm6JQGEBC6wkzCgjAIAgiglkwYEBiklnQ8MBikfCiSlmiAeihIYCTOsLhIThSkbhxMRASWegwqIBhYVAh0UhgGCjAwCiAGHCBsMCBGNhjWZkyKejBKQmTWKoDw8uBmXAimkCASVEBcWBwSHmBmUig+NsC4pri+kCxOMDDw/OwQHhBYZGyglgxodnwYBjBUVByWbBxgZFC4rCA4OBBuhlBUMBA2KDBGEiBKZGRgVmSUVEiWlLxoRmSylCBufKhuloymmiycfKzCyLiyoCxcRDg8KBDU9tju2tiOgIDcwhBYSCD4wjSkggx4aCg+hty86LwqNhBIQiBmZhCGdgzslkxORhC4mAxwaCBGKhA0KBiKaCSqkDgSGDCUIoBiRBSGaAg6EmCKcDB0XmTAtqyClnRW0IAGBhhuhnz89uwwHDB0YgjU5uTOqBDkwoDOlCBKUnRQhnxmYiiUiOTU3rxmEkDW6DxuXDzWbsxYXDimlKwgEBCQmqDSpDAyRCAeBhA2HBCWejBmUgi+nCBeMGBYZBTu8sSIgGQSBhgyOGCUhgwoMDBkZHiWjqByXhxWRhRaShROQgxOQhRyXhRaRhRWShQAAAAI/wD/CRxIsKDBgwgTKjyIZMY/ED8WGjxAAIDEixj/2QAhEN8/Ezb0lRDyr8G9jChTLvSosqVLgz32vZxJs6bNmzhz6tzJs6fPn0CDosTgQOjLH0MIZjDKtKnTp1CjSp1KtarVlg2uXoxh5B9LgwkWEERgQavZszkx/CuBViHHtga/DqQB99+KFHUHnuDwD0cPCAMbKMmrEN+EIwZfCNRwNUG+FgIVEB4YJO/bIjM+ZE1So+jkz6BDix5NujTPk6RV/BPwz8BACaaprhBYYfbZHLRFxN7Nu7fv38CDCx9OXGEA0BcGFH+ao4FcqvkE3pOxvLr169izoxVhBPBAHAJJVP82K3YggoEJcP/zrJ1mRA9AqqptT7++/fv48+vfj7LIvR7/eODUCgy0ZMJAtvlmGwwDOcCeQDTQQIQO/1DwAw9L7GBCCBX8s8FAI/yjz1M2CMQEf6PBwKBAMQhExD86UMfDDBqGUMKI/USXAgkY5HCPAkcYwQEHHrD2wAP/PADCEC3g4IIPAE7wTw9LUSVDEwJFh+KWXHbp5ZekPSCAAB54wMEHRyhwjxAkkJBCh/l4JFIIH+0wg0NFDKTDAXT9E0MMCQi0YlMKdPAPCkztwNo/O8CFD5I44HXTBQQVMGhKHwrUQwX77FNCCSGYsMMSJxTxAwUyUEiDCC0uB8QNb/IgYNQEE2BwAJi45oqRDPMt+hJjAkXgEgsfDNQBP7oGFxAAIfkEBQMA/wAs6wBZADwFgQGHVUcSspUsoqKkq5As9uGJtra0e2gdbGxshoaEhG0djIyMnoooQEA/TUAO0bE3vZ8vZmZkcnJ11tbU6ursvLy88slEPjIM0tLU8tBPpqakdGAa1cuUMicLOjo8MDAxalkWXk4UiXQgkpKUmoIlJBwF8PDw5L08gIB/yKczRkZEzMzM+vr4EhIU2trcKSMIsLCwxcXEFhYUbWVJVVVUGhYE3bo7YGBgnJyc3t7c7MVA4uLkt6JSSkpMdnZ05MVX+fC+lIRIp40kKiosHBwcRTsN5ubkqqqrsad5enp8Tk5MWlpc8Oa8vrqUooUnjnYiNjY0JiYk3Ni8tJoslpaU27U4rJo4Fg4ENS8MkHsj8NRu+/TbppFH5sI8CgYEIiIkfHhg4sp0YVYU6taEVlpcDg4LzrZM5ObsSEg41rpA0qo8Eg4EsrbEvqY8EhIHDgoEWlIsXmJsBgoEBgIEEA4URj4sFhocAgYELCLAzuSIND4o9NQsChg0dnycCjIo6MIossw05PbourQk6n5Iun50fByAlJq4QHg4HBwUfEZs4s7YuMDQ0H7IJjAoNCQgknxs6qi0QjBorooMkmKE7troYGx4mJqEIBg04NREyOTkCggwjJqQalwoBhgU7MQQQg5AfGDUCg4IFhBgYj4YduIo0MDMUE5kJsSAysCwQiQUTF4UMlZM0qJYeH4YdEYYuqjoYHaUglwYzsQQtMC89MbUQGaI4PaIqp7EsO7MFGZQfmp4BAQYNDJI8KI0fpqg1MDwduSo+uDo0O40OiQ8lKaQIDJE0DKAQk5kdmqYUDQUUDo8YqYoyNjUuqCwIDwcYKiMNFYUrnAsLGbQzvbUirqMlFwYztBEalxoUCYoZGoY3sDExM7UhoIYsMrs0qIQFjJoqMC4yH4sGiooYH5orrKcZFyUjkYYippkeGwI+vD0loAMyqicuqIMxsDY3M70fIBwdqjkaEwQmHyQxrzEuMCkQjI4sNK4yNLwZHwYelx8CgoMAgIEBgYEAgIMCgoEBgYMAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qcSLGixYsYM2rcyLGjx48gQ4ocSbKkyZMoU6pcybKly5cwY8oEmQ8Kg5s4O7CYybOnz59AgwodSrSo0aNIkypdyrSp06cxPYj4B6MqDBUvUkDdyrWr169gw4odS7as2bNo0wploAJHhilTbtxQ4EGt3bt48+rdy7ev37+AAws+yADGvycsYiiOMbix48eQI0ueTLmy5cspGQgcog+z58+gQ4seTbq06dMaNVOY0YGBEDKoY8ueTbu27du4c5fsAGOCCsP/FPDIp7u48ePIkytfzjyskBM3/kVA8EIHjBnNs2vfzr279+/e83n/YayP+NR/dcGrX8++vfv38M/q6zywA4X4+PPr/zx//hAbImQgAAIz7LTfgQg6VcIBCW7Unz777JNPPhHS1+CFGLo0Xz4pZHDBBSqocEEGHWRo4okyLWghig89GMMMJyiAQA8pEMfijTg6yMALMJzAwxMMzDDDEDkWaeREKw70hEBKHMnQfPt0cEMLRehQRBEFLOnkllwaNMQUKkBAxj7/QLhPkl2maeQ+XjwBBWNqPqnPExTocMMMDKQAwQFexOlnjvrw0IIAMexDRqF/JppjPgcUYIQIIggAAw73KSqQPixMoQMEaFrqaYP7HFDEFDz0IEAGCCRh4KesHjhDBgXc/wfDCwgwYKOi+nSgwwsUTnhmq8DmRwYCJahQAAwUwNCCCifAGeyz7kHhQWsMpPfpPjaUIIIXNiggwgEd5NMptORmF8MUKxQhQBJCeBCBChLYQGa59Na7XT4nrICsDi3g8M8ESMBm78DKnatvCmfqE8MJRWRAJMEQGzcuqywosMIKL9g6xAETFGHDxBGHfFrFK2Sw6j48XACDZiK37LJomFrcQgedlXdCCUY8/PLOpO3Twwo33KpPYSpoxfPRSD+G7woUzFsmAzqoUGLSVFumjxIlvKDzPjNIAMPUVYcttl37QLBCARbq40ELLbA89tuCqU1BvGTqM4QIo64K9958b/+lTwr62gghD1Fb2/fhd5VnA4gn2ABBdDDwADLilFfOkxd1KnHmPkPcUMIUsE1u+ehOzccCBBTg0MI/LRiRxK2kx56UPlD8o4QSQub+T+0LecFAEj+ykGRduM9w+wy8YwilDUVI0AMPM4hQrNGyV89Vf/l4MIMNNiQBhdPWhy8UtleF+FtbAoF/kKQqCHSCEARV/I/K5VNgw4nzkfFuEThYuZr64gsgUh4UoQoJ8IA/qUsEDsDACAgEBzOYGAQuIAEBBKcAF9jdQIYggCLcgIEgRFF/onSAEzQOCqJDoApXyMLI+CpC/+iBDjKQvILoQwgvKMIJBLITAbSAQQKJwQ3/YIAwMsEwhS1MohKXyMSLCFEHBwBgQSigJYHYQAcCeJgQV9bELnrxi2CMCMqq4raDQAAHL6ihQCgwtRhYUAHc4wEUYBfGOtrxjiqMgQKKoICFKEECFDDcDJaVhCAKYAUqa98/DsAZPDrykZAcXQuwoxA6QbFM/xgCAuanOYEcQAEQWOR5eiC8SJrylKh02YYY9AKG7KQI7dueAlSwghbIi4erglPkUsnLXvqSXBlYZEOgcAIV8OsCRgCTCmYgxYFMYIe/jKY0p8klHlAFbAihDwsYAIEIQOAJPKAABcpokJvdwFntMVM+yEAGCs3nSRJipzupSc961rFiOlAAOg/y/06DDDKYClGAtvTGHjIkAQk3eMELbgABzoDMbgdIqBGE00x7WvSiAqTTBShpwzJNjEg/HMivBpKrOh2AjurhmpUSWYQJZMBwBlGbEUoQtQs0L4oYzalOA7grhAwBAq8bSJ86AAX4TeUG8PtHPjqAJw+0Kwk3KMILYLoelN3ABh4YAhSSQIES3ICg8bvB2VIwBCFAQAUtkNxO18pWymVAh/z8hwSMQBAlvIACCr3KDdxGBtSpAK8FYN0LChmffAhvhCnAAQ7ISdIOSKAFKRihDVYggIq29bKYdRkUFDCFJ6BpaAI4gc48AM232OB7IvUAQjNghAx80AOW9c6DHjQEGP/ogLAG2QfWKODQ+UBhBRLQWWaHS9yQcU64BRHPEKQ4Hjp2hgxD8IJ0h4DSdM52PgygIDYJgq0SUMALD4JCCRZb3PKa97yPua7CPCcCgcUUaoutGddWoIMIove++M2vWmb7D/0Vi2YJiYERJnADD7BgCEl4Qbo+pt8GO/jBXtFHX4uw0X7GdB8p6GoLYiUiHehACUiEsIhHTOKWSBgChExYp/ojBATA4AIUeF4BcBDZEtv4xjhWiYSvqALJPYif6rVbCy4A3hwb+chI1ggZDoADIvIXyNfFFmVHmuQqW9k2/cGkQ96Z5YRcFz6dYbIEklBAAwJZQv8gExl4IKIaX/n/zXAujRdSYIMDRMAGHSBDCkvEoBl4wWn7qAsSBt2DQiOBRtWVLdcmgDEbGI97NugTQvYB1W9FQAA4kABO48zpTl8mCcHM4BojsE8bEgkGEjDfXgeSjyQYwXy/uUAJSgBN95ChBxOYwD9amusJSKBJk85wS4vQggy8LsSeTray73KAVh5gew88qUJkyCzuTWGumFTY74CXghScwF/x0YcXigdpSCO3IIGGHp5iYOFlu/vde5H0r1jQAxwENiF1KcIBBDaEPvYgpgMJZQaKrDx4G/zg6R3e3KyJkD8apElZ9KmmEE7xikuTAipgbPpQnEaCJAGQ2x1ICv4acoub/ORd/7SQwGiYkBlkcLs4WCZCbqYAsKL85jhX4YpE1EmE1OWZQ2DnPzz3awB6ILD2zbnSl37AfTCgAC1AwkKIs7obmFAAKihB0TsqEEkz/esWUW+ZWNAuISDKhl/mnAc8AAU9PxnsJtLKBUTLECK9IEQwmMImN2oQIZYACYkOd4s0gmy4ayfIHJpCsihwg+GQNMg/5REMCvA+sRs+QSgLLBLOHVchQI8HfTJMyNk8zsJnB0LrTAy7L/XlS8VgCLCH/WLYPdt1KmZMafvx5a0b5RlQoAUvmMILANnzIC9sWQIY4j8E4IHW734/PLh7D5zVbobYoAU3QK6NRGDz9qBMAS+wKf8M+hTkmg3hBSW4UpVwUATvonBDHVAABT5EgRPA1qO6f756ggwFTJ9gPF4QAcAHP+X3cdcxBDHAAG91AuLydvr3HpRWAIpEENXnZR7gIfdDgQKhA7eEH7TzVv0DXEJQfv2hPzcgAgqQgmCiL26nWxIwATBwKrJ2AZHlfA/YHa2HLQM4EFBgBC0AASrWTwojAjrAgGVCaX+1JA54g+sxNF31AjOwdlV0Kar1DwYSaP+QBJoBAeGHAMhVNjqgQRXIHvnAAKDXAZkGXiR4hBRSIVDwAhMQAf3BAgPWLBGyWSVjeUzIHW83LDrkNPmABF5Fe12GHlfhZv/gBRmwKfg3hnv/2B3EcTH/IE7i9A9ZUTNKgEZJlQ9KMH9VoTInoEbrJR2x1YSb0QISoIYWtoRl0gETcAEj2Bm11QJV9DcroAJB6IiPmBxv1zmbkjZY8wKquCKEQwEjOBAks0OsGE1EwAEkoAalmDRc00fnASlToABTyHUxogBIkAJghSkC0XwNMgQ6kIr5x4pLcwMqNgR1QmYQQgbZIgB6uIvN0Ys+BGIeNwEFEIvtNgMY00gCQQb5ogA2OE0DEAQjEAIfAABEYAEuQANxYHoD8479JXQilVxpo1Ty5GX9hSHkaI7ttoRqY1sRhD1IoAMUAAFm2APiRDMFSY/KkQ9C8AQ0aWB2c4/E/zgBxph//+CPWqNy+SICyxhNH5AAWLAAQZCUA7AAWGAAINAAFsABNOAGcgCTQEFb5aiKFMiTaWY2MOB2/bEwOlACLTCW8ZJlXGmVx5ErGWA+L6AE+cBBLYCPA5EE+siPK+KTANlfA5mW1KQGNEACFgAARTkCTXCQAzAACakBIBCVLqCWMjFbH6mVW1l9LOA5J3BdLBABeCUXyfJ/9OGXkJkb4hYBfcSNHbAP5/KLJBWMw0gQKYCSx8hDFpOZhXhRUNIPgOkCHNAAH2AAITACiRkAijmaL3Fdk3mbDohhOnABzZdlw4IDCHBYUOA56hhkxokcZMAC3Il7ZCBQJwCIgv94TjzpAYdoIYr4i7pITcopB27QBjTAARbgmwaABQ6yntmJdlgJkivigIEIND+GMl7zfvPhAc1DoEOZn7Pxdjr4AkmViD4oL28XA3hjhGmWBEmIn9OEn53RBYAZESRAA2TQBXLQn7epoA7xkb2Ve4V4Q5kToBwjAGBpN23BANiJoqSpXlDwVgjgBSwABSfpoHPIAE/QgPqQBCqjBEOwTYvIgC9pYwlpAB/QAM5oBWQQB3agoThaEEOQaSvKeroHIdnyAoQIJRzzXQ/yBMT2nE+6pbERZJQ2KQv1Ai1wHfPCNRTAfGF5AiDiWsqip206YhqABQMQAMSJkE4gpQ1wBTT/oAb9EIROMi+Bl1tKJRAT0k5JMiGa2l/RyIcSkj2q4wG+gj0d8Jysd5kT8G/X1QEqUAQR0E7iYTEUIDCi6aaygXgMMAVXkXc1Qh+Ulqd4+R9391dIIAS5qKUOFp/zCQL1OQIB8AAPUAVBgAUJMKUW8IydiiAMMB0p2DgPuhCnmYIiAAGrp1QzkILomoIcBYEMIAACoGBZIxc+ZosXoDMQkgSZxjK1dwITUAIwcAMCIGsyB6YSaav8kXYxUHaFkjZkAAV/VntDsHb9VX5Xpg/9EJjL+gHBWajEOQKJ+gE40hlY9xsvNiLUcxACqWsF0EriJFr0IUT/gBUFEFgUAETw/5EPV+Rh/eNhFxBF2IWS1JcPAgg6BJs/SmAEEqADMTcFDOA0tWqwUBsaFhufRAACGrCxgGI7SpACHcBUgqUQAlkEIlAXUrhcyBhVPdAuZMt5TUgGaycEUFBUaweW+yAEBOdRLGBg+jmHXhC31GWiyBq1gksZs7UPbhCfRtJlN9RKC3ECjPh4YSUBDEdSWja4lhtnylmwDRKGCYEvOoAEr2dzl7k6sOdel3u6j/iQ4HFgAvFvBUBVA5Gy/2pBUme6l+ldAPstqLu7N6iYIaABDBmVU1miXFe5tLExCWUYqxF4S0YBBZABrRQ1/+AsBuIodycQuMW72st0v+kEI5CUSf/ZBE0JAs0YotAYkrUxBBFgBPchagpRtyngAX2bAgqwOh2IHugRtzwQHS9Qctv7vxbXD/8QmFfQAGFgAN57kEnpsU5JBFfwkG3QD4H7Ge3yDydQAF7YIv02dA9TMwUBhydgugD8ZvuwVTagBE8QOi+ZD0+wPd6TizXBAErgaHmWn/pgB7qJsQ3ArN57mEHgu9VKpeZLorThBVOSgQ8xSK+bEPuQLxF3IYFamY0IcMY7wqFBBq8CLzC2eTdaJuo7fyBiBDNgpH3FIyLiNQggjgpKQIBJAr15wBubmIo5AkHsjCJqGvQhKgogwgwBSLBLECXwxAjyINkjv5OTDzEgLU7/tbC5F5dqG3RWDBpcAy830AMuhgP6dKMLE3MIEAFgYqe+patTgASmaVs3gEIoeqLPpaw7jMBNYKgBgJBNGQZCrAZdUFHy5QZuQGX7FVfSIbbdhxAQUAR01bn5Mr0JQsgQUAJng8sscAKpM2v6aAN6xnpD0AOt+i8XQBeR7BmKiAOiZVhcVWFwymYqoAQsEJcyNHD5cxMx0E4sEE5QlK1r7J404ALLOqhKmZhRaq3Yug8lWjM0AAIJkAAgQAOaCxXu1Rn78ATVsWmcczIdSR8dMGDTiYysJxC2JW2DDCUM0FVn44hD84o3MGgDBjCC40aIdJow4K9/3M2OESiUosbP/8xHC/0gLIAAfCQ4HjB/9gUhPyaQXsXHp8tlA4zPBgycwlmovqsBC+mQgakBaVABFeAABkADecE1U3ACELA9PUCnANWK0uEsSIAAjmbB98G4SuVtO7Q9m/QPU6BG+fEgRgwDIoAxIs0CSkB7SgUBE9A2l8IAiITKdgOHNgvTkhEqQONeXCMBDvp4/ZGeSGyFClCEQpNl/3kDRH254wIhauDGVXu13zsAUuC7I+AAOZDaOeAAIJDVNjB/MfshMKAAqXkpg9Rx6HEDISJXAiECbkNpw9c+EkAV79MgJYgEYcIDIS3SucgCFOAxl6LcFGAh+bBJtYbYkTEsJYAATlsYXP9UtEKAQdkbKkMN2Z3xhvOM3fzkoW48mL8ZAqjNBSZgAqkdAn3hOPfDAPv0H0GVZh6QBHV2ADbAAHvZGV77D6GUIVCCoVPQ0ExzouYdljmUgUNTAvUKQ8s3l+otGRQKME57dD1WmXOCcSerD9liMuZNBkhAzKi84QoxH13gAljABTkw3/QdAmoQGbqY0PrhW+xLM4I93SJduCmAiny1Rz3yD0gQfiHs4unV4T0QhDh0AdmbpodIgViTAdSXPweQamTm5PAEACig2qv9ARLM49IkYcX0MUPz4ENOWxkwAQiw0F8yay1liWoM5nFTMbQWhD1dNCL+55ObZie+KhuyOOf/PKl67lE0oAFjngMoYAAkgOZpPslE2+Zo8+ZlQqFZo4bpwwNGYARIYAMONNtsu+h6UR5IsAJTYKRDQ3IR/oaTRBBCOwHtRVJLpjJjjOpPMtAFDQIkkF9DwL5PECFliDFm5mV6NAFThZZsoQKeVSZv2Oe8DhgmXgJa/k6hgkX2SltRdQAW0jk4sGn9dQC6rujVXiZdoMtdQOnR1AOwpQT8owA9cGgZsALgfNj8pEdZw6adgS8l0EciNaannu52wao9Rh++GAHzcl2MMkMdnAIqY6MBCQHI8uUGf2S6pQIdo37MvAIwKNcktezNPltClKrcNQNFQAEin/F2UdNQKAQd/4AAlOJZ8xEDjlZKapNDItABQgBq+XRYfXUBLfA+rxdd5eryI2Y3SVA8QmIxF7C1De9RA0Hyw/hO3wk0deM+K2AEpab0+yUE1wZjyvI//VEY/WshvtdkGHcBU8CmHgCvs/ICsVJ/wQz2+KVegv0CQigEWFUzfK4CScBOqnenM0BTkfZ6TaIDDoT3esEmEDAFrnUCtvIgQqAAoHkpS4VQAjAFEDCMxPQCoW4ECkX3J3D3jo9e1wU4081qAiWjF3oxrXMqpJ8BDBcz7fdXHK8DU+B1qZ84iFy6dTNCiuG0+SN7rg4hmRR70iVdSf/7DjZbrEqQrHYAMIAEdZNdqXbGEv/gNQy2ITNwAxgXg+gM/eZ//hVByOoD1CTFy2YCuJSL/vI///Rf//Z///if//q///zf//4PEP8EDiRY0OBBhAkVLmTY0OFDiBElTqRY0eJFjBk1buTY0eNHkCFFjiRZ0uRJlClVrmTZ0uVLmDFlzqRZ0+ZNnDl17uTZ0+dPoEGFDiVa1OhRpEmVLmXa1OlTqFGlTqVa1epVrFm1buXa1etXsGHFjiVb1uxZtGnVrmXb1u1buHHlzqVb1+5dvHn17uXb1+9fwIEFDyZc2PBhxIkVL2bc2PFjyJElT6Zc2fJlzJk1b+bc2fNn0KFFjyZd2vRp1KlVr2bd2vVr2LFlz6b/Xdv2bdy5de/m3dv3b+DBhQ8nXtz4ceTJlS9n3tz5c+jRpU+nXt36dezZtW/n3t37d/DhxY8nX978efTp1a9n3979e/jx5c+nX9/+ffz59e/n39//fwADFHBAAgs08EAEE1RwQQYbdPBBCCOUcEIKK7TwQgwz1HBDDjv08EMQQxRxRBJLNPFEFFNUcUUWW3TxRRhjlHFGGmu08UYcc9RxRx579PFHIIMUckgiizTySCSTVHJJJpt08kkoo5RySiqrtPJKLLPUcksuu/TySzDDFHNMMss080w001RzTTbbdPNNOOOUc04667TzTjzz1HNPPvv0809AAxV0UEILNfRQ/0QTVXRRRht19FFII5V0UkortfRSTDPVdFNOO/X0U1BDFXVUUks19VRUU1V1VVZbdfVVWGOVdVZaa7X1Vlxz1XVXXnv19VdggxV2WGKLNfZYZJNVdllmm3X2WWijlXZaaqu19lpss9V2W2679fZbcMMVd1xyyzX3XHTTVXdddtt1911445V3XnrrtfdefPPVd19++/X3X4ADFnhgggs2+GCEE1Z4YYYbdvhhiCOWeGKKK7b4Yowz1nhjjjv2+GOQQxZ5ZJJLNvlklFNWeWWWW3b5ZZhjlnlmmmu2+Wacc9Z5Z5579vlnoIMWemiiizb6aKSTVnppppt2+mmoo5Z6av+qq7b6aqyz1nprrrv2+muwwxZ7bLLLNvtstNNWe22223b7bbjjlntuuuu2+26889Z7b7779vtvwAMXfHDCCzf8cMQTV3xxxht3/HHII5d8csort/xyzDPXfHPOO/f8c9BDF3100ks3/XTUU1d9ddZbd51xDP4B4fXEc/gnhweIoP3wGkzIwYQE9NndcBO4yGEAMvQRfnjAuTCheBNQCIEIGuRgPvAcHGgihAccGKEBNfZZ/vq9pdCABn1o+KD7AcAXn/y/5VA/gAeCAIGG9+HvWw4SQAjigQHcL3/621v6QDAAB0jhAyQYIAHzlj4AjAAFAdCACxrowLsZcAT10wD/B8Q3PjJgsG4aRMEAOriPf9CgAU54gAhHqIYGYAGABvjABk1oARdODQcr+IcWLKIBABREH/sgwQhMUIEKHA+HXSBBAwzwjwe0MIdMM8I/CLARC3DgCg34QBAGEALuBeADDdDA/wLgBBC4oAtTXFoIAmCCjXDBAf/AAO4WyIERcAEDe6RCCK5Ahn6owQVsVJoDcoACkATAiygwQQ1Q4ABHhuADBgiCFFDwACkMQHaEHBoOB6A7j/Tuec97ABYkOMpGmrABJFBDP0LIybKh8nlUEOUo5SiFEeRyBJr8ByJhGcviGS8HxuOC8Z5Xgwc0YQQJoCEIGvBLsg3gAY+sQTVF/5kDbJrAAaWkYRAHCU200cACBuhe75JIBSo4wAGXFIgUwIk2EnDgHx944xErUIMBOMEABgjBO9WmDxJYYH3PK+EIDACAK9CABv+Qpz/NJgcOgGAENagABmqASyC6wA0ORRsEsWDI7EUxAAadHUfNpg8rAECG9cNCCEYghQcEwJ0mHZvy9EGGCOISBBawAAAMMIA60hRsPBBCCIXnhhgCcIFd4ED3QtAEOgrVazMQAgv+MUQLsFCMABjACNI3gBwEgJdSfRgN+vmPsxblBQfwwBzy0Q8LJEAKDqjAAwBJz28ORIpkRRgeb/ePACRlBSpAAANi0AYLhACSBkjsABbqhv8E/MMCRHgiXwk2x3+QEo0SXcoKzJABOJwhBDVYZ/RcwL8mGGB8mLUswUYAgDKKVQNMWUEJWrABB2xhCw4wQQgsAIIAlHSjA8hra/XF2wFMsp0DAQEJoPIDH2wgCj7IARqmiQIQKK+plTVuvzhggSbAUSop+AcF/rEEDBBAC1nIwg8I8LsBWEB9D7jCQK4Q2O7e6wMaeIAJ9koVGdRADEvIAgFWAF0f+AAFQSjhK8mQgP/md14PqAEXsJCVmWIWveolQB0dYAA6xKABQeCuhOXlu94utCtK2EF0S6CFOna4DECQAhve4A/xdeGZvjRxuy78FTeEYAAyiMARMAAG977/Nwc+OAISxuABIuzymT2eWgJQIAMFvGAD2EzvD5awgR3MuA4fHB+VmRaAECjPAztwQBVanN4s5IAALRBABFIghDlctcxmRtqUAZsAKHTgC1tAwzDVu4IVaIEJXzhDVflMNQsMoIQoSO979UiAKGTgAI+OGg2wEAAL7IMIA3AeFZ4nZx7ykNNOe2J2r2oB3lrTd0tQ9aqdlgAV64MDxqzlEdaAA1tHTbuGHCXuOOAPKARb2GQwAEizp4E1XlXZUEufAVBAhQecb9pU04cbvsuBjW57WiMQd9WkYLuGlntquduzuplGYQeAAIXupne97X1vfOdb3/vmd7/9/W+AB1zg/wMneMENfnCEJ1zhC2d4wx3+cIhHXOITp3jFLX5xjNdHHzgcQbodMoQTtAAlDSD3P4LY0wZwEbCz1UkQf9K+jMdc5g/6QLhx4le9zlznO+c52vDbEys80eUGiQEECHICL/ScWh/4h597UnKeDH0gKr7JPsLAk7j+owkIHEgAnK70LQEgACUWiAIEUgKZkAEEUP0Hy3nicZuU/LsGcOf3bA52vOdd70xzO1CC2M8XDMTscpk3CKJNE6bLjup7Z3zjHf94yDPJdj25gRn0ogEpSP0lJa1g5D3/edDbjAbNJgjbPwB3mBh9IEgYCBkSf3e2yDMEi2dJ4kN/e9znXve7N+ERBzSgybSaZB8MmAgo/zHvpMz78P2AvUJJ4FwORJ+nRCDC7MT716+XZMpk5/2outCPqQtkoSQY5Henn3KTzw4EH2D/fvdpgASA8R+m/IcumxAEL2pSk/+DiQtYXtLuC0ABHMC9GQEscAIwYiYD0AANaD8QmB0ACKKU0x2e4imGGojnUzEaUINXOryT0ABEqoGTSCyBwCECxJEI+gf72YgbIIgkyAeKuL4ASrkJrMDoiz4XcC6For2moIHqAwHUA4kBSC7nOkEjPEKLcIHEC76JQDuB8ACKIIHIEggU6DsXCQgAIfkEBQQA/wAsCwVZAB0BHQGHEhITzs7MVlZUUlJUKyIG8spC0tLUup40LCwskJCPPDw8ZVYU37k7dnZ0yqo0aGhogoKEx8fHlpaUX08UGxcEOzAMcmIZsZYsTD8MLScL/f38iHIdeWQb7e3sFhEEfHx8mpqcsLCwSEhIIyMkUkIO2NjXMyoMbFoVt5ktgGwcJiIEJBwEQkJEpIsn5OTkVEcRCgYERTsMwMDAurq8rpEs0rI0NjY0QzYMqKipm4IkHBwcZlIU9PT0MjI068Q+ioqMW0oUYWFhTkYMj3gjoqKk5cA8noYmtra0WlpccnJ0hoaE3t7cTk5Mnp6cNi4MooIkqo8sFhYUcl4Wlnok7so8emocwaIvEg4E3r48bm5sqo8kln8kDgoEinYgqoosooYlkn4kBgIEAgYIoKLopKh8XHgscnwstLagQExMgLaEWqSEaEZwbsIkbuKkyIIwmoAMKGTQyKhUpIo4SCYgjGwIRlxYjngwFDJotMq8inKACggwRkxkfGoImoQ4iHxICjIoaHJoHjocynLMxMDwBAQYNCIwpH58gnwoyuo8xJzMcHJg6MIompQg4PjYdmRI1tr0IiwopJYsYmYYfGQw1ujoVFxkgJZcGCgowMTUBAwQgkZwtLbUVmAYinhkPGSIjFwcFBBg6OqULj4w4rZcRDpwchyAqsJMimB0jGRIbG6EnIyMPA5AWlyAxuq8HhYwMDBgWnp0ipawTDpQzLS8oLK0DgwYysTYtGZMbqTg7rDMChg0cl7Iclx04shcqIhQSDowiHwM5tjooJK0JBIQfloc5ub44OLAYkZAQDZQMlRMoKLEWmp0BhgUiqCIWEaAyKAQHgYg4uzgND4QFGRQyJhAdpaYio54cFosqOq8RlQ0HjJEKCLA6sIQnJRw6oRMhEYoJMKA4MKAen6I1sLMxJJ8ysDE+PbEYmBEWkwozMioCg4IPHQ4+ODYRCZgem5gyiyAbH50fG4wrpAMoMbMen5kjGwwrJR8Dg4EBgYMCgoMBgYEDg4MAgIMCgoEAgIEAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qcSLEiRX8Y9elAoMAGgij6LIocSbKkyZMoU6pMiBEAkwRHZMwI8UHHyps4c+rcyROlvygNZEQI0YRICAkIeipdyrSpU5L5HgwVgEDHxh77nmrdyrVrTxtHZrAI6c+r2bNo00oMsuSHVQUKEOwrq7au3btb9X1w0aQBERkyQkCwERKv4cOIU+5L0GFJhCYflMxwgaMH3cSYM2tmGAWEhiUfRujLJ+JIhw9ZN6tevRmA5xAIyvrLl8UFbNa4c+PN90NDgtT//LGIEEGB7uPIvfpLwuP3QOHEjSefTn0pkxKwZdO2PaK69+8r/en/AFHiR48oOphMTpIPvPv3Iv3pYxHCwNEmEUogvQy/v/+FGOXDwg8xBdYAAoX9p+CCBGEkXxQ9sBDXXAxWaOGFGF7IX4YcdujhhyCGKOKIJJZo4okopqiiTg7OhoAIAzDBgmgbDrQPC0EgoaOOAiChAIUBvjiACAjkg9GKSDrkoA4P4CBUBDIkEBtCI0hgQAARCJQfX1OKl0UIEWAZwgM61JjkmQ36M8IPQ/3wwQcJQDDlQfsM0EASWWTxwAcR8ACCTf8A8EEJAUgApwwBJAEcmowSlE8ShSoAQD77oNceQvLlo6k++iggwz9BkCVCfg9EkU8UAxAnQoKNNurpDArI/6dPi5jSGlwQtk2Zz15/OpiPEi5AAECrrfrzgAtI9YBEECKIxpKtAEjQAQRz+QMAY9S2+EAHOABKLJr6/NDBET/gEGYERAxwqUEtyjZcBCI4uI8SHSQQhYP6NNDBbd+i6ZoGLsjwwwNZELGEDKse1K58H3QAwr0ZIWEAvLPqYwMOGszQQ79odqZBCVlAPAIILthbq4MjhODCA7OiTPIRWQjwgAQlaCDDxhwnGa0G2Tk4gAEy2HAyRkgs8U9sDv7TqZUulBABCES4cERSOSP5qwZNlOmgDYiyoHBwGC3WQQMtHxlceg8kEQQCQZSAQ3dVr2gst1pjpEB0CtMFXQDxJv+NkF4u/BBF3Ei+u2qASVAGd1mzFqSXwxAf6bdAZR0RAROsEn7iPhC4PQACPUi1xAOXMjlAaijj4EKotMr3Yg8jICAADgZ8AICZmo/oDwIJBBBAWIRCUGZw/9j3nHzHamyrP/tkIVMIlkcAwQi45677CA+AgAMOCSAxfHAKgFAT5fIFEUIQRpod3Hw//BPCPwkMcK/1SM6mwwgjgMSfPiN8DzYAI6gWfzIyuO7oj34ITKACF8jABjrwgRCMoAQnSEGT2Ip81TuebDa0sONVUEMKaMCbRijCJCggc8/Jx4uQkIQkIE2Ds1FWFpLwABbc7oMWCkIEruS7/PCgA1lAoUD/9CECIgTABTzgQRb8hrIkzICH2GHCunD4H/GIgAkiyKIIfuACA8QLIflgggQkoIQIaCAJrROPEgIQgiQIQAAzlA4VFdSu4OzKBX+q1T50MCklnNFW+ghCAJqAoIzko3FzpCN/RqC6UA0NIx/4o990QASKUSp9GUwkfPSBhBLcDHcLiyQa28WCwGDxAz9IgggGp0kG0UUH0srWI/0hynYF0mk4CAFgsDS9TLaSOrLxVAmYoL6ChFKSvkocDyLwARGwIAkRcIHtfrmgfTRgCUSgnkLaVcsW5aMBLjjN7WaTuBnYwJfURM7uQrAE1m2zRd30VW282CKuGUAAQkwnMPUh/wCgWYYh8ERmRvpZnBalrASO1Cd4MALLYA0LbACN5BKT5o8ePLFvGOmBDO6ZT4XqxkF3g5feqic5idoqChKQZvr08YAlmBOdHtWMr/aSzeUh4AOBolw+epBFjIEgiwgaIhN22EwROG8JDVhUTNWJEQSwk3R19IcAsEO13YHAdx0AmO8SALd/5CMIMiBUfpipzRiYYAVXQORSV7OkFlKvjgpjUgIEkoC6JiAIrBRIPhSQhB8koJnz+wcNoJCDDUgBCBiogAo8AAOYrlU58sLk5PR6mdns47KY3YeR0qSPy252ICdIwRBy0IILXKAFRhiCBRBbgQxQgAsdfSyH7uGBFf9UgAShHYIRWkCDC0AhtRyYQAwqQADZ6k4+/bgCBVbgBAzk1ghQoMFgc2BcEU3WH2HoBwBWkIEKOFe0FixmdVUjXnb9AwbKjcgKPABbtYKtvOP1UA6GwIEFkMAJ6+UCDFoW3xFZQLdaQAEKaJDaKti3Amjd7wX7+86FyWpTHHTwaDRFYaV1UCkeIIB3gSCFLhiBBgI+LRhScAISIPgesWWwg63FggdA4Ac/aAALKPTeduVjAH5NQI67J8DJKkU+V7AtBiZggS6UVsA0+AJ9F8DgZ9lYKk5DlGOSMLwVo7QDJZjJEY7Qxsj5WCth4AIFTBCDCUhhAzmgQZOd3KLmgSD/CyKwAQs+YIAlZKE9Vk5ACT7gEQT4WX9whWzSxGyCNQ+NfFZJ0D4a1q0aX+ZaMiBmjdNEPEFT2tANWbAAlnCbFV8rAmSySdm+3J/FanLFlT3W2xztoCgkIGBEIIIEsqAAycIXPC0o7GFj4AQVUKAf7q20sHOHasqNIGqoYXVLPiCDI+QyAi5Fgq0dm5vckja6ue5CcIdLAAqguJjUVlFnAUBuAIyaLlHoXGU2uDB9IIAFCIidCCRwMBY4uD/9+AcFVFABIVi7BS2AQra37YRufzvcJ1JTA+oqAQicsF0ASIIBZpCwSb+Xv8FBAA5OM+0FIVe5BGjuAqrQBdJqQeA5/9C2cP/R7Ss0llj+CB9gIoADAUg24hNXF4Amp48kdIAINgm0Kx2EXgqEfMgc8HB0UU7fHWDgrBS4AqP2wRG42MDLOJ+BFAHKc1yt2uIW+rI+7kGB7paZA0PgLQp8q+QULODpUQc2S9bHBdgi/KNfa4nEjyCCKT5yIPmAAOQcjSKMiDkDN0B62qFg2oG/HcFXADZFKQCEDWxgAiu4+0et3ACgHY6iAdKBAPOhA0z+QwQy6ADLLqwiyf0jw94l8hC+wHjfpry+Jl6sCizgACpQwQEpWEGFVszSEvAAB0jIIhME8KOMzPsB41RAnHIUBAjIAJtIE3rhMaWPIJsAA0A4Af+apTvdkjugCOj3gQMmMHwH/4sHTguTAUpABMuQky9aE8ETDTB/TyqhB+emeYXXfStgAjfwAifAAVPgAD5QBAzAAD5QAEPQfu0mAnjyAASTJ24EMe4WBCyQPvugAAMQBBjoI+OkffSDXf1AAFtQBD7wgBE4BFLnIeLlY65HTfIBBAyYfuvXWAKIacDkDxTAAefnAz4wBZkHhCEihECQAl/gAFXABUpoXfrABRmwBTmgAj84hQs1ARewAPnGhSJCAEaQhVsohtWhDydwAROQYmjIICbQAkMgfG/4If1wAjTwAhhXhxjiD3E4BBTAhx7CBRxAAxjghoIIH/5QAS2wAYH/mIgZcgVV0AIxEAaQ2Ic30AIp4AGXiCEekAJecANn2ImrEQYYQAMcMIOkyCAU0AU0UAGjuIqaoQ8YcAEcIIWyuCAUkHYmEIu5mBj68AI0cAIw8IsKsotGkAG+aIyGoQ9e2IbM2B/+sAI5kAN0GI3vAQMLcAF6iI3v4Q8ZkFqP6I3gcYcXQALLSI5p4YeNyInq+B39wAEogAHp+I5msYgXsAHuaI/UwQUpcAGVyI/AFAMXkAKqKJDIcQUp0AKwiJDJ4Q+nKAW46JDHQQEbYAROUI8U6RRhQAIXcAITuZG4sQJbkIwaKZJL4YxfGIYoyRoEUI1a2JKsoQ/bOAHFKJOr/xGOW3CNOKkZ/bAANAAEiNiT65gBSsaTRJkYXLCG3ZiUmOEPTtACXTCOTokY98ABUECPVZkYi6iJ+7iVhvGJLSCKYHkY/hADLVAFB1mWdvGJr3iSbGkStGiLIRmXarGLWtCQdlkXwfiRN7mXd5l2ygiYaqEPQIACC/CXhHmP1GiNcLmYFKGSQgmZaPGSc0iZZ0GT5/iYmAkR7KiPnekVXCAFF3CIockV+NgFX3maTuGPADmUrBkeBLmJsakV95ACUECWtdkUEImKa7mbPWGRRqCXwPljGAAFFnAPxckUu/gFvbicShGMw1iX0JkTJJkDBMCZ1TkQzggFE8CS25kTL/+JneG5E9rIhrBZnhehk0ipnimxlNyYnu4ZEewIiPN5E1xgAaWpneHZlaB5nypBiDQQA/y5nYsIBQYJoCqhkG+poD4RA6hInQ4qEh6wAVBAnBM6EqbolxlqErvYAoPZoRp6mMQooiMhhKSVnSYqEmEwAShgkysqEtSIhAVanc4YlPIZowVhlI5Yo9AJA3jYlDoqEYtIA6o5pBTBBVXwmkhKpDeAoKvZpA2hkFAQkFL6EGdJA7R5pQ9BAQupm1wKUKeYimHqEK3IkD66nPpAAsOonGXKECugWyH6pgkRjFAAhnS6ECsABuSZpwkBAxNAA9Dopwoznu1JqALxkxcwmYj/akwZ0AI72agGkZ8o8AJpWpyfGaWNSoj7KakNwohb6qkCcZs0AKaeepZQwAFuKqr/oJAtkJGsGhy1WKKx+qHPyar+8AIoIAXg6akUQFpzeqoLcAB4GqskuQUqyqo3OqismgHzlYSsqo1awKis+odU6amEmJU5iqT+ea2SegVYaaWi6g+ZGKqi+om5eanA2ZsJWqtD0KDKOqarKqq2qq67KZ0n0KuSuotPUFyxapjoGavTmANGoAKxel47sKjbOqT+8JKRGqtAWqn2Wpt+CAX2GauEOI8TG5tF+p+sGo8AubGsSa40oJYHe5VoKrAQmq8HW6FGcKvjOqv62qi/CqIi/3uaLYoCIHmwjZmsoqqSMHqqAuGsNCq05xWo1CqqRomEB3uHebiwOgqVcuitjXqV2iqwoEq1iIquBBqrYfCkKfBQrPqJF2Cqkrqhqnqwu3ihNxuatDiMM4uoZwqzntqXtDquFICFPlu3zwi1MeoPKpADDxutCyCxAvuogNi2nRmxL2CJuGoCFzAEmoqo/ZACKBADfrui5AqlB0uIY6m4mEmyZBqrYomhktqbaeuuLUC3Z/sCHymhiHqdeyupN7oDcUuoDmuw/+qFKfACMbACmZuhZ2kFWFADDjAEJhC8DnoPUgCBRsgAl9moGUADDcgALugAGCCpFWAFBeCAEFgELzEgqQTQAkbIgzcgqVwwAQdQBAWgfhygtWXqDx4wAS1gBRdgAdDaqP7ABRpmAh6guAEBACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFBAD/ACwSAWQArgCmAYfgvDzc3NxeThRVRxJBNgyGcB5NPw2pjSqvr7BCQkRnVxRvb3BeXl40LAvU1NT13XmgiCf47LdyYBjbzYxSUlEkHAS+vr6ioqQpIwbuzEnLrDTCozSMjIwwMDDi4uSGhoT0zEPMzMz6+vm2trQvJAibm5ywlSyAgH8SEhTv7+6YfyTq6ux8aRy6ni7m5uR5eXkkJCQcHBxGRkRmZmQ+PjqUlJSmpqS4mS4cFwSKdh/GxsTYtjo6OjwWEgR6YhyQeiOsmkyOglw8Lgu/oiwqKizkxFTsxTzqxESfgiV2bkwWFhTSrjdKSkypkywKBgQ2NjTmwDza0qRGRjSOdSLLpjK+pkzKskwODgvYvjymjkSikkTy0FhGPxH27tTSsji+mix6dmTyxjzy8szVt0fmujiuliT+9tQSDgQOCgQGAgQCBgTm+uSswry2hCxCZIhmoih64CgmMCi8wqiYSkgsZNDO9NAODBi8yMQgBiAaKijqpNBQJiCMmGSARiiMuozy9oCymLhMXlgsIsDy7FDYnEQEBBgWMmiaigxiWijI1tRiZhjExIQGGBQWZFDKpNBgXFCypOiYcEjowBB+oqD22OAgFjTwnDTcxMCssMQ2VEwEDBDa7HBIJmDGujTeoIQKGDT4yiwKDggyMGB+HIAowoCYZhyamBzO3PSGkijo7CDi2PSwhAzGwBC01LxOXBS67FAWEGDUxPC8vtBgemDO4tTyxNBSOmDqeEiAbnSYegxmRkB6XmBuRhiamIQKMih6puR65KgKCDCwtICwxjByXgjY0FDg8LTWsijSMICWkrggMkSwbCzg0CzOnBBgPhg4VBRybhje6tCwmgxyRnBgYjzO3LDo2NTGzERgYoDMxKy0yuyaipC8eHSQsChipox+XsiwrCBucITSeMhCDkCIfIA6IjBQTmjmviggPBxQOjDmrjj4skRufnSCbAiMmpBCdDi07tD2xISqfixieBhgTCjMvsze7uzArjTIeCwKCgQGBgQGBgwKCgwCAgwCAgQAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhw398ZtIkaI+iBgzatzIsaNHg/p4lChRo2RJDhxo+PvIsqXLlzAF6mPgoabNFCJWvFgZs6fPn0D9XeHxpIPRDidSOOABtKnTpxz9SZX6j1/SGlegat3KleDUrzF0eKDQtazZoF/9zVxhIQbPs3DjdkzrDwUCFwuoyt3L1yHdDi4cdJjat7Bhr2mtpqjBj/Dhx33TxnAw9ivky3K/zhTR1jLmz2W/orCxYoE+z6BTQ/3KI/Bg1Kpjo+X3QoSNxo5l6/YplYgOBxTS7h7e05+SFwuuwCbOvLnz59CjS59Ovbr169iza98OXaq+iVX1Lv/UfIXfxYFqJ2Y1/5Z7V7UdFnCwYaGEEvEIv/JjMMIG2YH6UMBBCQggUMILPJznHlf+xFBCCi6sIEIARORmkGYJhCBCCjsJ1KANHoSggwUOuBCCaQsyiAIDL1DAwAoOwGBhQWDVIGJp4iVAQQcwxNDBCy4EwFSKWnm3UgwBxDgjelNd8YIOL3yAI0+ENVkCXu0RqVUMHiiJH2JqUaBDDTC8MOWFUilx5QxZavkUl1626aFUMCBgQQe0nYneFT0SsUAAOhDhZldwyvglkyhwEAIDVZ0w5Vv8LGCBBQGkEMJ/g25V6KEEzRQCByhI5Ghe4kWqgw4hhFBDhZkWuamcHnb/YAEChuZJantTxSDpna0+JdWmClnlwgkxKKEEERykQKxyYKq1gAglwNprS7l2GcNCV5QgwrbccpvCBSg06w8PIoSg4LQwgZXktQrNNNJINVzgQLk1MHARVZpRwJm06EblnT4wJEmEPqflp095Vf0TAwc6mfdPXUqYRzA/HSCQwgf89qsRfCe8kOwKHCzwwmD5JTYqTzMhYMMJCyxQggMpjCCjxi9JNMMKOK8Q4T8BoHgQXVec0DPKCSAQgAsCeaDDCYbS7BKdDDBAwdQuMgCDQnTBQAG7DwNMQ9UyEIFbxk6XbfbZaKet9tpst+32w3AzRPbbT19RrBIx5B0DP1hL/3SFsShccZpeal2BAuDs0Q2RP3WmUJMLETrAAKxqycDBCCE4EAICDChHVcokBhCCDQzcp7hfMITwLUoc1MDBkD8TYUFgBlqwwmLM/nPFBQFYcEEJI3jwjw1KnB4RDL+5RfB30tYldcT6rHg0ox7GAAPCV8gw++TGj4d8CGOTrZZXipVwL2q0pcDB3IozrkMATDxBAwwOL76ZDecJ948SDHfYfUKMs4ClHOCAMTHhXOMxzoNOQKVfJUAGAupdB/6HtYVd4AQnqIEAA2Avv+xnBSGY4JwiRZkV/MMCMmDf6fQRg6zQKVkheIJD9MGEEASATUyiYQ1KQB//FIyCcksTAv9K0xB9JMACHjgB35hEGCMiwAPcA2IChZIsBi5EH9oLwAuWOKfc+EMGnEGgFNFERWXxC4uUWgAXu4gfIlgqK2OkUQMbNAIXzKBdTEhjY+SYmwCJQAdrjGODiLAePtWgXCI8CA11sIITCG5i+VMCDJSAMBTQwGLri+OeXpACHZDEaJYKjpwCOKEavACDJ/jADC4SoMwhYIe2WwGtNNkpGSDAhgEAFAc6MLifycomwAwAAvjGuBOELkkImIFbaOkVFFjvH5McGwB9BAMiHMUoV/MQP6xHBCLEIFQqZKY4x0nOcprznOhMJ10iQqW4kZFTzFTLsYwSgx+W7GAwMAoRlGD/z4fRBZ5jlAgFbAchCzAgkIjBYgkC8A/HjaBzTPxnOOnGjxnU5EAl8ACWDMYEjSLgHy8gjU7OJ1GAUtAfv1xlmGCUzQtdoQYXc6E+bmaBq61Tf8zUxwJSgID7SAUFJcARQpTwREyhVEQTxOly4vhSDmmGASIYJkKuwIFhSUx69vFnSSfqtivYwAWTY40IHADHC9EgBC4oQctq4AAEwG6rXG2bXSrzFSKMNVwIaWUKdOY4NUb0n+KsS1FXMhW7OqB4PzsOAjgwgxlwYFbBYSNgxWkXsKalA3edKgcCcAJwVlRpMrwpTqX4gicI5QKPkgoYQ5AfInTJUFK5whAXoFVc/y0ViKJS3+AkwskSIPRhmCUrYbJlRtiMNqABAtRrAhiAyMKtYCidkEqmAoPZ0Va0t8WtmlxAJhQQ4QNRhaNxKMCEJRKVpwkw1hMOySu4mvR/4xoBjDDHFhoAiAEmSqoRdeC4SVXqUoSFazmLOQIdjOAErJJJ0Upg09684JYhOLBp5zhZc67TKwT7KxvfGdd0ephBHw6xiEdM4hKb+MQoTrGKV8ziFrv4xTCOsYxnTOMa2/jGOM6xjnfM4x77+MdADrKQh0zkIhv5yEhOspKXzOQmO/nJUI6ylKdM5Spb+cpYzrKWt8zlLnv5y2AOs5jHTOYym/nMaE6zmtfM5ja7+f/NcI6znOdM5zrb+c54zrOe98znPvv5z4AOtKAHTehCG/rQiE60ohfN6EY7+tGQjrSkJ03pSlv60pjOtKY3zelOe/rToA61qEdN6lKb+tSoTrWqV83qVrv61bCOtaxnTeta2/rW2sEBkYdwAAVUQIw8PoAGTCABXf/YHzgYAASGLQEh40AAKuD1kJ/t5CEQWwhCNgEBiqyAATR7yCDAtbjHTe5LD4AgJPjxDm6ggmZjQMgaaMENTAABIG+A1yoogAr+sQET+FgF7BYAAbbNggOEm8cG+MENel2Bf5BgA17w8bZbYIJuN0EF/uhBvf3N4xt4IdxZUYAJ3r1jFBCgAEv/AEABTn4AHzNACgUAwA42AIAN+DgFAZjAGLSQhR2QoQAC4XiORRCBgRThH2O4NxVwLIOEROAB/wDAAQiAAwUMwccRKEIGMrCBA1y9rDn2QNEfYIZ/ZEAgLKABYncchX90IQNHeEAGrAABocPNAPy28RYmIIanb50gHeCCCiCAdxoXQANJ+AcCJnCEgUSg6FUYQxBuDPR/dAAIGhjIFh4gd57duPAFAQMQdvCPxmebCi3PNgH0se0gC4CwQtgBAADg4x54qAFQOALtj60PA+R+9j/WhxAOIHMj8Pg8aWhA3RXgA5vnuABnEEgaSICEGxjAH05IN4/9gQEVbEAAwKbx/w0IQNgK/GAICshHjwmLgwIMQQJocOeNc+ChHrBgAyyI/o4h0HB/nEECLcACtrdj4ycVaACABWBsHUZiEqABCpAG/oAGAjAEP9BwHoJjG6ACaOAP+SAAGqACFrhjB9BwTmAAvBaCOqZt/9B79EZyC1hiG6AAasB6JnAADdBjLVAAV5B8JmACQvCCJCYA/wAB/Dd9B6BtQMiAKjB1/kACEDAE18djG/B9yKYCLTAAacBjOwAFLIAGODAFLSAAWahjGlAAU4gBPVAALZB+OwYFO2ACGgABQiAB7wd2N5YBUAAFXuAFB6ACJpB/O2YERuCGP9ACRuAFAthjszd1TWAEif/IY0YAAF5QAAqgAViAbT0GBQCwBH64A1hwgzymewBwBF4AAT+gAa2XYywAAgCgATRngwSwAeemY15wBDtABTPXANwXgzl2Bj8AApq4A0ZwA1mIAy2wYxowiK24AxCwEj1wAznWAyoQBpH4hzdQAG+BBDfmBAKwBEdgBA5IAn+YhCPWADcAjFAAAT1QAejHYyoAAkegiSzQhJmnYxehASCwBDugAbPYY+cIBUhwhCSnYxnXBCAAAhsgAT+wb/+gADsmADsAAjugAAbwhzu2EkJwjkZgAg3QjwSJAQcgiAAggN9GkDhwABkwcxtAAANIkGjAApHodSqAA5ioY9y4BEb/0AJTIIvc6Hw4xnotkI/6xnGVh2PJZ3BHoAICYJE7Nn0QkAEIKQASYHc4xn0QYASsWJQXiQPvqHs+uH5pOHtLgH4tmWP+xwKzZwLshgOgaJZXIAGy14cm4JFViQYKgJMmUAAHoJU1hi92iY8aIAEFoIIE2YEbIJESoACpt2MlOASR+AMGoJfHRwBlEJcEsJQOaY8EcAAboAE3MADdN5kQQHFriANw2ZQNUH0/YAI/0JY6Nn0Ap28bUAA+sH3mpwE5UAA70HgAAAUEaX4zdwA7wIwMmWP6gAE/cAQZYARvSJc25gTPdgOb6AVGYIM6dgUNcHiRqAIDwAJUUJI3pgAA/yeMO8ACc5h5/YZjB9ACptgCMkdvS+CbODYE7MYCBSd7XgAAXCeEN+aKNciZULAEErCUPnljXvAFdTecP0AA+ZCKNkZ7EMACAmAAErABSNAAaGAA0JhjBtAAOJBsELADByABU7B7OTYACqCGIGAE/bYBzKljG3ADN7ADGVCGA3d4LGCHMiahBIABBnCErUcCF2dsVdl+nykVOLCaNUljH4oDFUAAEHAEU1cBgieioCdjU5il+QkFZOCZQ2CiNiYAYioAugkA3MkFA6ACpndjVHEGZigBcFQB1WeWQqEALSAB+ncFtWljLNBtPVoBCIgDVCEAGzpjKtAELXBv82YCs86nAh1KAvyJgjGGAyRAAJEZketWBj14jDvWezeABAPQAEKAd6RnjxUJAQOZYxUQf2pBAD24pDfWmTloAJVad9umD07qoDOWbAonjLYIfWcwgVs5BZHoigsZcVvpfRTpAwcABWdnkjlAhQKBARqwpn2pD/mAA/fHhsfXACyABHBYAPG3fcoHBQc5kk7QY2jwA1CgjFDQArqKYz0wBMA4e5GYmTvWA4epiaMIAPy5Y2gQc4OYh+O3fsgpjFBwAwKQrusHqPlGAAxbbnBBjiwWEAAh+QQFAwD/ACy/A1kAcALiAYfg4OGampyurqx8ZxyKdCCbgiT29vPwyj8tJAgyMjRAQEHExMTowTza2tx1YhxmZmQkJCS8vLwaFQRuWxh7e3qCgoTm5uQsLCxUVFRiYmTQrzZWRxJycnRKPg0SEhQkHQSWfiS0tLSUlJS+oDCzlyyKiowWDgQdHRyGhoRsbGytkyyCbhzS0tTk1Izs7Ow4LwxISEinjSzW1tQ2NjRAMwxeXlw6OjympqRgUBSioqQWFhQyKgu+rmxaWlyOjozGunz9/PikiiQKBgT03HienpyKglyqqqxmVhSujixOTkzOzsz27sTy8vOdhiUmIgRuZkSSeiRCOgziylxaThTewlTKysx2XhwSDgRydnTeujwOCgSqkiRybmQGAgReShQSEgQKMigKDghKPiBwpOCihAzMzsCGQBioupyMoIjA3tzyvsw8ZIjsnjBkbnBsQBjEeCyunlDu8jDWzvQeGDTksjQiLCigniDGuMSwpixATEyQjnhapISohEgEBBgUMmgoZNB2lpg2Ijx0QmzoeEh+fhiymKA6PCjMeMgkwoDowChaOhhaXHQODBhqXFDOsFR0GoDCuKTOthBuViiQZhiMeGxKNBQGGBTeyNA8MGg8DkDooLDu1hBiVCh4Wmyu5DDI9NQwIiAoIsAKCDDQ4ujQ0uCSeJBcfFiuujBwehgEDBA8VBTW4sTO1KigusBYbmR0fERGVDSMXoRaRijg9Oi8uNDsuhAeOhwUZFCoxOyoxHh6fGQyVEyCtoTAyvDAzswUEGDOuOxoapQeMkRwwiRw4qQ+IhSippAsMERuUBDOwkCGcpDAxNiMlrBKJihcdhj4ukC0eHTMMIB0XNSwvrSOdAxcYhi2ngyobCxGXFhGTGTQ5DCooMTYuLjounwCBggsPjio7LCooHQKGDS0oOjy2qyCllxoblTq1Ow8dDjw7tiMVhje4vg6Lkh4YkRKOjzy2szOmjD42uzc9IQYKCiozrSwuszQxNAGBgQKCgwKCgQCAgwODgwGBgwCAgQODgQAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDVuynQ4HJkyZt6OgnsqXLlzBjypxJs6bNmzhz2uzXI0KVnwuCLojQ457Oo0iTKl3KtKnTp1A59lPgI4CIqyIiuLBQg2XUr2DDih1LtqxZm/d06PDA9gQFACEunJ1Lt67du3jzzuzHt++JAAA44NNLuLDhw4gTf+3bF8aCKgoUS55MubLlywsZ891XAYCIE14xix5NurTpppr7XQjRoCjf07Bjy55Nm6LmexkaxO27M3Tt38CDC6ep+QQRABT28aZ5L8mN59Cfi0hidLj169izJ9QMo8oCBYxr3v/DEGFohPMsDABI4Vu7+/fwYYfv/Dk8cQ8zZiRIcMEGCgvfxSfggARW1tcMEcjg2nLEaaZDCRb44EGBFFZoYV184QbXBZrVlFo/CSSIQXsXlmjiiUjxBUEOFiTXYYOM4ZMCAALIheKNOOYoUz8zPGfDizD6lUNg95Co45FIJjnRPjYkgI99HnKnxHcMKmnllVhCpZkH/4mw0mtZhinmmL31FSILI1ZJ5ppstpkRYxqGAAGUbtZp550KMQbBDYE9qSaegAbqZmMNLPAjnYImqmiWfHHpgpdALirppDryhaCCRf5J6aacopYaPhfAkAQMFwwm0IebXaBAEhjAMMOXmu3/E2qrpU5EEgYYeIBop7z2ihOqHtRghFACZLDSP6jig8GwQkXgAwx+8nUCByEoMaUAKRwbUT/3ZAqmr+CGu9enD1grAgUisMBCCvsg++E+PVxFAQUVGMFCBOBJW0EDVZwrwgJKcNCuuAQX/BSqCbCWLT46cNBABDOg2ugJOuyDj6wBuECBuxjIUEUNFeuAwWPgGWzyyTp9eM+MOZww0IrrmUpQau72k4IFIkz4DwUuBOCyQPiUoLHMKBdtdEuo7uMDExRU9889HFjgs0GpdXvPCT60ONg+IjCBwsDIcuACET8fbfbZG6GqQ8Y1FNQDjRAUlNoJGKTAQQBKhPDjP0Ez/5HzQPdQYEBcaBduuG0fnsDiiARhoJuNA90GQwgsAODCAjVE+wAA+BZ5zwwCAAHx4aSXvpAHCsCgugIuKw4A4wM5TvjMxWGQwd0hVDAnXxfwiW3dRMhgwAIzyH0PPt1Gviuy3SLvlcrHV0ez6dRfOZUR1ioRQQ376MBiD77JDnnNakJQQRUV6MqtAukCIMMCOeTwcAKRn5ACESEYUYEC3kZ6jw31EkAOUrA7v2CgAjkQgBFq0L9IVe+BN+oHBGrAgQo+YAb38EAAmPCA0PSjBgAwQtyU9yceledQ3DpBEuzWgwtkgAU3GKFqAiCDvCWIKNF6kbIiUKgQVKEBAeAQX/92aAHLGaAEDXQgBJd4ISDhowIuqIDMnhghHZBQUxCgHAzIF5rAWUCKAvFAZ4wAAwgkgAIs0BuqkJUAAQAABReAgAJy4AIJZegCNcAAVQyAAol9i4mAtNCL+oGBeyWAJSZEk/QaWCSCjOdeERukDSLAAhh4xQY/gYFRJHgcDjTyUxwAQAwRCQMWUIl5zssAH+0zvUC6kkCo+otnFKAqEQCACDI8QQ9goBwJ4moGEIDADDIQgjeq7x6qSgAELtADe1FAZx90gRGsKJB7bK5lV+zHXyzAgdDIMgXS64sq+0i15b3ynNhB1T0UcIP3CWABMjAC/6rZgyoY4ZD9sMENtCf/AAEkSAkiEGI/PMCB8/hzShSYE9AEJwKZ9aM7EbBBNrPYAAwQBB888wHY+vKAI5LInOgM6XDUecYcGCEHFMAg4NiXUJbooAc+MKkRbuCDHpzAW/eAgQ9uMNMSJEFb//CADzTmNBBFQAlbvGKIkOrIDLggBzqrWUdL0B4livSqwpHYPk4AgRP0MnL4oNgnM8jVruqggU/TQTAh4AGnCUQHIiDqa1RDuSTQji82eExST9UDF9yAmlL16F01hdXCAookQ20aY1bDArsqFWB7RVZf/0rCqXrQqobNrJ36wRkmIJExeYVMNi9AScdWMwU9iypHV3mqVmr2tXdamQGI0Etu/3VsdlzcEwAyQBAPQIgCDuWLZbn4R9gat00P5RzkOGOBEmyUMY4qgcxIi6bn9aUGBqjAGo/LXTfplgKgwUcSFlDdDAUzUzn9CQaQpwOeiRBO3UrBEZGXxOJ2975Was4PiUCBEiygASWAVQJyEFBEtpdfJaBAABqgBAx4ywM9qAAFIgCEBVSgAhmoLWbxy+G7dAiR9kVIiEXsEHzA4AYNKOICUgCavijAlCVDlv0WAAALNOAG0GJMAm7gAhcwAQgG6HGNttvhIuOlLzqAQQZSkAIMQCBTIuasDZacgiSAhmpMalsKMmAD5TDkajMwyQXaxZh96OdJrQVVSm6qGTWjxP8kTiKykec8F27Z4F8sUIK6jNADLx9EmwVtgLqUQIR5Ru4CE87eP25g6IuE2Eh0jjRmJCi/GzQ5AyLwmCURso+3VIECPcjAggVwyFNBQASLbvI/OJCCC7hV0rAuEQbgYoNMGccFnvwzgtajnKvZMjkLZYEPnsySTcb62CbKgAWwiayg4frV1SxkBPDJl7fFEFmriSjf9tFIZHvbQgr4SQ1O4IETKOCdmzYIbhoggAL2IwkyiKhtlZADGGCAAihIgQJ09e1+D2gfKQhKDqpyHnYhZDxTiiRfQNjge3QaAPALQXlMCV5I+/viwoGAD2TAYBlYIASaFPFqNHaCiyVggzL/YOA+SsAEF0SAA6qjgAxihvGaX2cfGbiBCGpgbxREIAd7OwjAGyCDAMxrny5oAAOFqh52saTTLmi3xW1O9dIQMo38k5YILICCqMpNBw+olilFUAJrOZgzlyveQGAwpRhX/e2w6dvXGHPb8RkEVKwqIwzO8yMZbYggeUXq1OFO+MloMLs5rDtDGANCsiGrO4YiCNtPWfjKYwYfKDDAKDcDRcdHuWYXMIIMMlAd3YJzoVFXqOVXT5l3V84HeoQBGgHwAGhXk5l6VEAGRF+CKz+tBqaEueyV0IAUEI31yEfMPh7Aw/dVbgHgTcgOTUle6O9uIB4I+D+sBXEOADX54DcM/z5skAIU+CDBvLS4NmtQgRKgX30e3IcCOPAP9Ps5/PgvDLf+YbFuL+R4+2AxSoQPF3N8+XeACJiACriADNiADviAEBiBEjiBFFiBRnMbyPNlBuh/FtiBSpEaZwR7BthbNeADPvAPJ4iCSTCCHtiCZSItQwUEkJIQECAATJA3IfAPERACGcCCLviDMREjKUA5BpAzFrcnStBqF3AB9MNvQPiEMPIPk+N+OOOEB7En8gaFWvgr/7AiAjBlnmGFBrEnC4ABJ+BVg7eFatgRnbYApFcDVXiERmABESAQIpACCWB7a7iHGjEeEVAfymaECXEC/pU/1UIjK8iHipg2F5ADIf+QL4EohgWBD2EmFxeAAUMSAqW2iJw4EbXmARTAg34Ch86VJ8gCODMQAi5wep3Yig+RBPJ3Hi20hFCEY0/2EEFjABrlirzIEB6AD7kBICEwjCwABABSAiPUEIHjN4DVi86obuzTT/20T0AAFwn1EB4gAnzkg8/Ii/2AD8u0VhDAM4x2U82GhkCjAzIDL1UAAODTjfA4WOIUh8XWAznQA4MxFT5AAbyVASVQBf/wGfE4kMT1QfTIf0JTAuqTBAmiLjIQbxTAIQQZjx8CAQ+QdV4xAw9Qa2GkALbzABmAATNwfxPZjcWFSDNTkiq5kizZki75kjAZkzI5kzRZkzZ5kzj/mZM6uZM82ZM++ZNAGZRCOZREWZRGeZRImZRKuZRM2ZRO+ZRQGZVSOZVUWZVWeZVYmZVauZVc2ZVe+ZVgGZZiOZZkWZZmeZZomZZquZZs2ZZu+ZZwGZdyOZd0WZd2eZd4mZd6uZd82ZerFwM04JdVFwMDcAWCaXMEoAKBeZgX1wX/QAIOwJg2FwSSWZmWeZmYmZmauZmc2Zme+ZmgGZqiOZqkWZqmeZqomZqquZqs2Zqu+ZqwGZuyOZu0WZu2eZu4mZu6uZu82Zu++ZvAGZzCOZzEWZzGeZzImZzKuZzM2ZzO+ZzQGZ3SOZ3UWZ3WeZ3YmZ3auZ3c2Z3e+Z3gGZ7U/4kDKoADeiiebIIABVAAToCegTIBJLAB5+meYuIVICAB9IknDhCf+blZAkEA+NmfdeIAKtABAlonNBADBHCgbuIPA6ACDBqhEjqhFFqhFnqhGJqhGrqhHNqhHvqhIBqiIjqiJFqiJnqiKJqiKrqiLNqiLvqiMBqjMjqjNFqjNnqjOJqjOrqjPNqjEtgBJOCjtSEBIECZQjobOHCktPEBStqkTvqkUBqlUjqlVFqlVnqlWJqlWrqlXEqVCJCGXUoXEzCfYVoX7AmmZUoWSZqmbNqmbrqTQgCfG/CmHvYCJACgdGoX+SAQBpqndUEDKjAAfjqohFqohnqoiKqiCJCoY/+BA0HKqGKhniCwqJAKFo66AnNaqVryDyPAABqgAZr6FQzAAKH6FItKqqUKFSNwAKSKqqmqFAgQBKM6qq+6FFowARrAqq5aq0fRBRswAhowApDJq0hxBQ96BC+AACZArClDA0GwAgHKrDqRDw4QAx1AptIKEy+goNGarTcRpyogn96aE7EKAh+ApuP6EfdAnjggBOl6Ex8ABZOKru8qFRugAkewp/VKExJAAE2wA/S6r2+yASQwAVogsDNxBQRAAi8QsAhrEf1AA3fqDw8bE1rwoB3gsBVrKy/wrIa5sS4RpySQsSCLNAjQBHhasiJxD0cQn9iqsiPxAevJpDALEvf/QLDtWrMg0a9B8KU66xH30AEqMAHu+rMd0a8x0LBGyxFdILQDQLFLuxESsAIFqrFR+2fbCq1XexEt5AESMAFBQLJbaxEhIAJFQALzarVjKxAG0AJS8AMVEFlr+xABsAAWsARDMARA4AJKMLeeyAU8QAUt8GN+SxEIAAIx8ARYkAMAWbgQcbNBgAP5YBSR4bgPEa8F4LOWCxH9QLATALWb+xBIq7Sh6xD9AKSFWboP4Q+JSQOOqbqL9wIqQAAfC7sLcatVa7sMEataq7sKwbLh+rq+ixBOUABQQLPD+2eOmrPJexASUAA9q7Zze7MjQLTN67xQEAMAe71yA6QOoK/c/zsQV0C1pBu+yJKgT2u+A+Gguau+/dCxKau++TAB7Wu+/aCeBIC85psPRxCuLxu6XiGzBaC/15shXkACR1C06vsBiKu55nuzQ3uw6vsPoyu9a3u6KuAAtWu+UxsENGDBY9sPUUCY3Rq+Dmqt/1u67xsE8Wu+cRoD4jrBsZq/E/w0jhrDC7yeDhy+XYAD1Qu+HNwEMbDD3Hu6I/C9NWwCBJC0ILy1ETu0QGzCAwCYTXy1EUuYyzrB1DqyVRy198vCJcy99yCnKQzAAkzAYry8ZRy6RNqzNXyzBavAHLzEL/DGHRADBlvDFTzBV5y6E2wCUyy29gu/YXy9/kC/11rDO/8QA1BQyM3Lv/5bwydrrm+8vHIcvgzcBER8vb5asBKsvhKQvdvrvt77yeY7vh7cxVELqOmrxROwBYJsvtvawuE7xiO7xpZ7vzFwn6pstCw7AuZZw2fcyz8Lx8E8wTxLqeobtHGsxwQQBHXMxzSABK2svsZKxXzcsQPgyMnrDwQay0W8yLTMvbfqsnw8yWh8vULArrhsuQycuW+8ATGQr85cAKNsvk07tKA7x6kszVhcw8YaBFEgvOaboL0rv4jczo47y9w8vLaMw+YbqwNMzDq7riRwzKAsr5vcvJ2LwApduEh7z+GLwXk8wQ6KBOU70oC6AhRL0TCbDwPAxac40uL/3NC+O7/x6dIwq8v3WcNC0LJT8NFz2w/vrMwPfK8YPcfwPMFBi8emHL57rL5NS5gbDNUrEANRoNMqe8XjbMjVmsgTzNA+Tb8QHb4IwMjp3Lwse9GXzL0CvNHJC8H0jMwEkLlaXbIk/dTci8opXcRCq8E1rAULm9XZfKdfcNcgu8Vgbb9nDaCIvbFrvQGPXbH9ULyUzNSOGtR6DAKabMfNTNdMPMFTbQV6fb0d/MHSrAIrUNV7HdPgXMCEHNgJLckom9bJ+9ORLMMFcNnLzK5tfb3xqsmT/bBNiwQlDcrP3NcF/Nesfb3ji82kTAIHvb8YK9QhfA9iPdIm0AEO0AQjWHAE1u3FX9upDJAF0Drc70q9s+qpG/DbtpsPK7Deo0q75EwA8n0ABJDFao0DGrDeGjABUey7RE0AuXoAGkAAcG27XYAAE7CeE4AABF3AWiABEqAF6G0dAQEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFBAABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAP8ALLwAZAD9AKMBh/n5+OLi5FVGEj8/PuC8PI13IjwxDNexN9m4Oy8vL11OE15eXtXV1Ew/DfLLRLa2tMSkMzY2NHJycSQcBBISEpeAJIqKjFJSUWdXFGhoaLCwsCkjB0ZGRIJuHHViG5mZmaqOLKGhoqKKJ86uNcjIxzEoCsDAwCQkJJCQj+bm5IpxHUU7DBsbHL2iMH19fLSYLOzFP4aGhOrq7LudLp+FJRYRBK6WK35mFO7u7PLGQDUuDM7OzKmpqhwXBLKSLIKChBYWFCoqLDo6PLq6vJR6I0pKTN7e3ObAPQ4NCvLy9G5ZFAoGBHpqHKeMJMaqNNra3MKeNGZSFBIOBOLCPAYKBAYCBH4cgPzQTFA0FN746LCIDLzC0HR+LLCITPrq6FBOZM701JC0KLTUtOrCKLBwLLTK7GamKKyuxD4+KMh8LHriKGxGcLTceOp8SJiqoLKeDNLsMEIkFCxm0JichCA8HA4MGGJ8YCAWNMrMMAIGCIJwMNIygMq6NOq4NOLY9N7U2H5gdIRydEBmiAoOCIKSuBYyaHJ0YLKwIIJwCGI+GHJ0iGJigF5cUMq0VPj63FA6PLymdHRkSJqODOr66AYYFLzIuAoyKOK+VLiaINTE8N7c0DQkIHReCOrQFCYwKExeFJCcoJ6OiIKAHEpONNCeEOqmsDokPOzYuBRmUIaWKGJGQM7ctJqWRLTCnJiWvLKmUMqmrHqq5NieQEB4ONzQRLTuyJJGSNzExCwiwEIwaOKuVH5gyAoYNOLchGhcXGSqjGJiPAoIMKzCwOq4EPbQMOzq2MieVN7o+IKmoMrW1CAyRLym6I6cZGJmGOjQRJqcHPbY5OzwvCjEgDRWFJp+DHKCdLLIMLKcrDJWTM7i1HZGKDA+OLx8dNJ8yEIOQMzEtPLE1CAGIBYQYGJaKHRiMBoqKM7c9KqCLHrmqMy+0FAmKGJ6GEpgTJpudLKulAQEGISAZMq0EI6+jOjY2PCeMM6uKGJMKJiOpJB4SAYGDAoKBAICDAoKDAICBAYGBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEgxYb+L/vzxy3ixX8WPIEOKHEmypEmIGFkscGHBRYYE/i6enEmzps2bOC36G8DjXwCfKUws4Ocxp9GjSJMqHXjCBI4QFwZcQJEkAIelWLNq3YpyAAASSAT2QxJChgSuaNOqXdrP6xB//y4iQWF2rd27eEn2S8DAyAUk/JAM+LdjcN7DiBMv9LfACI4hITQEYHChqOLLmA/3oyDhSQAjAQKEgJm5tOm0SFyYiBGBRQIJDEwMsHy6tm2c/i6k0ECh41wcKPjdHk78JD8XOH507JdbhokTxaNLp7jZQvLl/3SbCDK9u3eG/iQA/3jAIqY/Fh+SfKDwvb37gf0iMJARYoHU9JTf62/P2ISMFJ6lQEIGYe1n4HT9nLBSDC4sQNqBEEYo4YQUVmjhhRhmqOGGHHbo4YcghijiiAYt109gRDWkERKAxUSQicwBRhuJHZoI3RA/ALHQiRygwAMPKPxlmYmcPfABdzR6SCQKAHyFZEJIwCbDDj4F8EOBJvKzgAwAGMHBjEliuBxjJDyQxHMK+VNEXxKcoCAJKWRQVEc7DfHEnbOFWeNFCTxQX2zQQRkCDi7QeQEAJhQoFgshmCCBBgzMBqaeFvYDxAckcBAEAyQEelCCOxgh6UUs3JkAQVEOyIIGospEqYZaPv/hgj8nGNGpRRyksEMQyyHBQwoLFOUPBySsRwGreb6aYT9CmKABC/0EYaunBjGGA3keyUVXoXGdwIMJQlj6gKjKLntpYXxOm5C1z2Z7YgyEwpUaAxnExMK4yZZbIT+dnXXRCbGxsO4CODwr1onWFaqmCSggcREQkEYwqb4GxkdCYUAAwcIFnHJAAVzVXuDcCb1+EOdmrC6QMQsDPMCAyilSHKGaKTTZZBI2A5BfiREY8YTEHQExRABFWEpCzkgHIIFwMkOY4A8fRB21BgAU7MKpJQoN7HJC4LADtPyGELXYGshwpgUDgNz0gSfy47ZGAzxBQgQu/uMPd9nyKwMPQWT/xAJVH8B1ImBu/9PnExfUvbbTJkpLArRFDWAEDyD3wwIPzrnw6FdYw2ipBk+Eu7iEMEbwxHauijwE03EFgcITKQAYgsQH26gBCYaNzvhy/HAwQMz/IGEfbWMJcUFUQAwJoz9CcKCo7hpODP30JUlP/fXYZ6/99tx3731i1isUfvjfn8YcPxRkTAFR1jOXscYsxJ88fFmy8HH50VkuQaMMcBoDTNbLgGOMELvY8UBHtfvXB5wjOvwNxx8ZwMHpIvMEHFBmYidygc4sYAEUoOADS6OfXDTYpXw5sDb+iIAEgoAEjZxgUAb7lN4kgJGMxGROGLkACXYYKVed0HwaMZEQ//qSO4Pw62See1ECeKCBBfDgCZL6YW08ZzkTEC0h/MLBBxIwgAQA4YZDooAFSHABiLWKfFK8i+dy84QdYO0gxwGADEjAgB1ooE0+ZMwOYuCPY7UqjeaD0apk4ALWVWsBYnNBDHjgmBB4Kj5DeACvgIAvQE5RJpZikgZIZhGNCY4fOsRBDLJFARTsoDKWA50JLVmazfzgTOFCo6F0xh4y/eCGEGNAA1nZSiD8IAVDAJoPEWIjnUGHAkMgZASEMIAFkCAALhjA/Hh5GUv9QAYPSICJxNe4JDAAOkBwCg7GiQOcNekJIaSmYoAQAxloQJvYsUiWFvAV9mhJcy7QHAoa+f8SNKozK5aCF+5syBFi8iMILCAcEC6wA0IKTpCQiuI/88KcDDTpAfj8AYPeWBBmPVMDUSMBDlIQgvl57l5/nOhh+BWalrZ0Bwsgpi9N0BcjMIAHC+iNu0zEKHCpFDFjSUAQgpCAohr1eR1FAgtOEAQ3fcyHJz2B2n5K1apyxZ9WzapWt8rVrno1STAS4TA7Gk8qjvWrAOXHUot6gqcmsaP8OIFRT8A+OrGAqAkoD1bROpMEoeCZ40wBDxJHRYL4IwEWYMA45ygBMFIgA+J0DoH4mhUI6gqEMRjCSINVWLvpMAAa0JwFQlBIucQgCUYgLQoYkAI+UpYtJxCSQJDwyuf/YBI70aLpAirHEeYMoC+79S0JyPVapQxzL8/8klhPdM1CMQeMHjnOU3RqNwmoZ6/F7WtTjFCEFy0nnE/wWDMvQFfffCA5N4xLEb4y1ewqhTEBQNNy+RIAH6VABjJgAAogxw94fcBhAmGMzhDoXpx4LgFWlJN3OzKAAADgCTyQwAJ+INKGMecCk+GADV9ozAIbGEYn0IAWp5nAYTn4nQEuQmiKZqlBkUACF8hACBz8TQ/fBMSDcuRxlxO3OBXkvLeMSwI+EAAZTIYH+BGYjWtioxCQlGQ77kgQdhCAC8CHX+qpHAWKIAEJn2AASSCBIZdMkwSV5QOQ68hykQmsK7sg/wkoUBt2BBwC7JI5Igk67wd0FNbndqS/MghOXCw3hDYPekyH6+6dy8xhcAWGAukDo/BQGZcIDFcCLABCAmKAKCgnKAIn0NgAyiLoRc+EX00iQQhC8CMNhGBU62WAkgfS0B3QFAcmQGUGOaUBOxlhv6amSazquIP+9S/XHbE0b+DDPBSYoEw/gCdGOPCBZ5vgAxcYc7CrB90amigjZN1mAsW97XI70M7mTre6183udrv73fCOt7znTe962/ve+M63vvfN7377+98AD7jAB07wghv84AhPuMIXzvCGO/zhEI+4xCdO8Ypb/OIYz7jGN87xjnv84yAPuchHTvKSm/zkKP9PucpXzvKWu/zlMI+5zGdO85rb/OY4z7nOd87znvv850APutCHTvSiG/3oSE+60pfO9KY7/elQj7rUp071qlv96ljPuta3zvWue/3rYA+72MdO9rKb/exoT7va1872trv97XCPu9znTve62/3ueM+73vfO9777/e+AD7zgB0/4whv+8IhPvOIXz/jGO/7xkI+85CdP+cpb/vKYz7zmN8/5znv+86APvehHT/rSm/70qE+96lfP+ta7/vWwj73sZ0/72tv+9rjPve5T3gIQtLwBLxDBBli+gheAQAcuf4EB0L37CZVABC1IOQT+oYB/TKACM6g+ywvwAgW09+QdeAH/BpbAcg/MwAMpP8IIRvCPCrwA/Sg/wgGI0AIYjMAD7Dn5FAgAghWAAAYdUAMoBwPypwIY4AQEYAAqRwAj4H7/gAAlkHIEQAD2JxDT13ycBwMvNwII8HJM4HIjgAEl4AMtJwI1MAEXiIEquIIs2II00QMg4AAudwMwQAAtJwD/oIEbqC/khnKbYQANYAA1wHwUdxET0AEI4AAIoAITQIQiQgMyMxYqQIEwUIMqIAW60w89UAFN0w8bMAIOcAQEcAT2h3z/EIFNUwMdcARdWAJH4AATSIEI0AD+UAMFMAM4qC9S4AEIYINROAH1J4ZkiAAVIAAdAAXeJxD6UBAKSCIe/3AAbLg2S6AEI1CF9kcDLYAACAACTSgQyEcEs9UBGjcWGAABCJB9PbCG6tcBG7AEWzgDjYiGo+gPPdADH6MDTVABCtABM/ACHdABEIABcKEP8Acii5iC19MP1wcCy4cEDVABEwgBy6eMTUAEWPghDTADOog9Aph9lWMAlYgABSCEHvACDRA8HtIANtCB+OMP2VgATAACNlABEEAEPeCFIFAAG7ICTeAE7OdAIGCC/VACHUAADkADJYAEApB91Hch/mAAmGgDLYABPVA+HdAC5xgXGwABBwABIuABIgACPbAE5XghNPACBZCPstg9KzADHQAy/qAAEOABCiACBikCPf9ghyLAHr5HIR1Ajx1QjN8jiz0gAiYYPETgAAfQARiQj/rQAT35cQLwAgJQFEhofAcAAxXQAx4gkiAHAi8wAQKxhR2ABD1QAA5wfycpliXXfXbTAOoHAfa3fCQHARCQkVMJhGDYf/rghHiBfJXoh1U1A/8Qif8AlS2kBARwADNABAbQl9KBfVnJjl01AwgwfTWgAC+gBE3piyWgOLchAne4mCAAhV4FA8FnlwawBCVgfu83AaBZGyBwAItpkK/lACCgAAawAQ1whzSgAD1QBX6ZFRCgfjTpA8j4VQzYixXgAUrQAU3QmA0AYKeBAI6pD/zoXg0Akr1IAyIwAqtoALH/CXL6YAAe4H4QYJA50AIesAF1M5w2IQJ3RgQrsAEV+Q81YAC7CAKVCAM+IILptRY4qI85WG4dEAUNoAP6yX39GYwlIIBqAQEvYJlp2QErUG4QsH7/AAIVUAEgYJAOUIM0YAATcI1o0QBN8AIX2m4iMKEZyoEwEKL81wD3SX5ZEX55+G4VKQDn2QJjGKKCOAIFkKNGIZYrQAMwwIz0VgE0AJbpSYaWuJgE8Y8nMX0Q0AIZOoYM+AJLWgEFUAAe2gK0KYhVmAMOcKYzgQEYoABsqgL/QABcuAJECm82WgMlsAIYUAEjEIcTqIljWBCCCRJFoYYFMQGmCW8ZKRA+MAO9/2gDImCZR7CJo0kAfciGyfkRHoEB50cQQllvGLACJbABG6AAFdCHLTChutibE0gSGzAB5lcQCkCY+sabHpCSxucDB4AATbCm3/kCHfoP0fcRL2ADgToQUZlvp1oAutkDBlAACBCpBBCMG1AD+akEH1GQaTkDNhBwHtAAbIkEJYABmPgCL3AAKjqeFDEDhbiSAicA2NcCuqig5fh+n+mE6lgBw6dw4jkQVVACRDAC4gebzAcCSipx7kgDEJCb9+gQHXCOR1qwEzcWCgACTlABK0CdCvECCFCFCCCKF6eFGPAC9WgAVGA9ZwkD7MiFVFqErjoDLdAB9QofBHF9wdpxA1XprDNZA+jaAgoAnwbXD9hJj7kpBSCziP8wA+Mnclo4sfAqACvgATRgA07gsSQHsvxZnGdKAJ06cv0AjmG4qrCIcjWgjTZYrCY3tjLocm76cv5KWQEBACH5BAUDAP8ALL8DYgA4AFkAhzY2NNLS1K6urDo6PCIiJLa2tERERMDAv97e3KqqrMbGxF5eXJ6enPX19CoqLNbW1H5+fIqKjGhoZ1ZWVKKipC4uLFpaXJqanHh4d7q6vIaGhIKChDIyNGJiZHJydKampOLi5P39/O7u7CYmJAoKDI6OjD4+PMrKzLKytFJSVE5OTJKSlObm5JaWlM7OzG5ubOrq7EpKTBISFBYWFHSSiA4KCGhiaMz00E5gZNze9DYiLFRMVMrM2EJEUCg6MGBmUKaylN7AwAIGCNDAxJySmEI4UBwoDJagsGxcYAYCCDhISMzA2BQUIFxgdMrQ8KCoeEImPNbA8GxieOz67NzO9NrW4B4aIDA0KBQcHMrg4D5YQJawoAwMGCQgwBoeIPT0xFR2WBIwRBIwaBw4HJ5yoBoIIDYOQNjO3Kx+ZDYuEBJgOKx+5PTg2LqonJSSoIqOmMSofF5klH6SXL6+0EJMSLrc0H6SsPj44IaciGxYGLastAQEGKDEnODYyBoYNFqihMgygH60hCAcFGyiJDA0TGyi4AgYIMDAtIaKeFZaPPTI2PT07D5EQOyotGBaUDgsMODizICOkMi8xHh6iMC6vAwEECwiLEI0PHByYMjMNNzY1KCmsIpydGzigMCwtJakmGwYgMzchKqcqCIcIHBmYKC06BQYCMzArGZ2bMrU0D5IMGw8bGxQaLrKxKaMkN7Y4GxaQAgIMB4UHHxsdDZQFGhmWOR+qLqgwCRg0OLO1F5OZHB8bM6otCAsKCTAgLqwzFhWYB4YCKaikMh+KCRgiNTYyKKyrJackOTY3DA8MCguOFBOcIB6iIJWgKDotAQMEPTcjPjs7PT89EI4KOzu4GxY1DAiSJiGkLrE7AowHD5QZCxQTHx6ZDZwOLp+oMSo7J6MuGp2RIJynIJkRDAsOEQ8PLrEqDYubNbYsBgoKNbOyKCoxJiSgNTi1KymoBocFLS+xFhmWBIQYA4OBNra3AYGDBoaJAYGBA4ODAICDB4eHAICBB4eJBoaHNra5AAAAAj/AP8JHEiw4L59BRMqXMiw4cKD+0ZIaPGhhQcA9w463MhR4T4SKgQ8CHAiAL0DEmYg7MiS474KBR5EMADAAASSKlrqdLgvhQgUIzLuI8AAhoeVO5MS3DehwQcZ/w7KKMHiBVKlWAEoCCDBQb8RCwoUMIG1rEASFg48yJAgQ4AMKe6ZLTu0RIAAWxEIMCB3rtIZGFBAMOFgAIYDe6/67XjPAr0PBCDqKwGiBL7FOmVEgAFB6L7G9Ao4wNxyxgoRnSHeW4AgQwXSLElgaJAg8sF+LURcmAG74z4DLmAkkJBCAgMWASYo7s2QRAoBCEDQQ4AAhYXLzF3eG6Gig3cVDkgs/8/O8PO/e+jHk1/Pvr379/Djy59Pv779+/jz69/Pv7///wAGKOCABBZo4IEIJqjggovtg88IHFTQj1wQVZjQPTNUwIED+CBU4UczEMABB7wxNkAEGShwAAXXfQjRQA5OQMEBCmRQwgCe7eOABx/QeMALffFkQgH0ZHDBB3d50KGLK+EjgUkJtEBkYgeRMMEBCLiAQAMRBNlQPyuAsAKJ+ixwggIGMOmhCWq9oM8MDkTAwgX9HHQPAQYMUAEEDWig3lIGBHDAACuZxgIEJMBYIT4QsNACVFGZoMAJMaxU4QJc/jnQPR04Bel5HTxGgKKSUcCCBEiZBgIGiQoEkQSZbv9EAp8lBPmbigOQepADKDyQAkH3vCBCC9hFdRCsEWgq0FQwYGArBwW4EMNSEHFwgAsGLLWACE+Riqyy/8zQAgvOVlgBCgHkpCsAB5yQLYwWcFuisft8yxOzqVWbgbTUHmQtttp66m2sDTm4QQP/iAeRpAeQ5WqFvD4wAUEkeDBssa8S7BCsFMig2gL0JDCCrvvoQwEIEhAk7qqt0gurnxv9JigAA81AGaIk44PBnPNq5a6lEGEKAbj/4DYnBzLo8+TPB+FjwgAK78MmPR7oI0MFVNG56AyANdBCPzN4XPCQIBxwQQIjvbDkPgNk8AEATT5JjwAXZIBAAWlC1A8GH3z/4EIID3wgwAbzLnTPidcqQMEEa+8DQAEJcGApPilQoIALB0SAI8QXSBcdAvSwUFvMMkAoIYWqjUCArZ/1o+EIHn94JwC01z5AUAzmrvvuvBPI5HkVTLBAChW0TDJEMphgwQIx6OPiPfoAEEMHE/TjEZMydBDTXQVI8KmaOm6w1QMuMGBA1PcY8EFJCByQgmJqktDBXSV4UEJJL7Sqpj4thAYB3w9AgQkgQoIFHCADKACdBRKiJg6ggB4vkAEJZLCAABRAcmpazd1MgA98OIAyJYjdPvrBAQIM4AP0mFi/XpQwCYCAAvoYiD4YAAIPRE0jUZkhAoBkLEm5i163uQA9/xa4QqRMhVxIuYcHGkCBOrFQIAM40wAH0o+ivACII2TAEA1iIYH0o3MdQApTfmIbHAokBiwowAiMGIEQaACLVdxiEWVIgSFeJQYwyIADuiiQFDRAAHUaCD42EIISuCiORHSVDAyQgkYawHkmsyNB8KhHPv6jKQkIpEAGGYIVfKhoWkxkpHr1gAeghAQmQ8ACrqICGIjGkn4UgPMEqYEQJIuFiFzKDGJggQlYIAVrFNepxIgpFJTxKgZoTQVStYIGDM2MuVwKeqaJkEHCYAMtG6QIViBCM1ZgXypAygigs8qrfFGOczxPClyQAZpFBSbJyZGlZKABFlhGIKBBwR4LIqQuiTGQSfxjAQUM4AADjAuGxiIA9dZmAAU8AAMRWkAGIOgZElSgAjFAAQggcNEJPex5J6QH5k4QOL4MZAEuEAAGHdSBA5zkAAFwwQacB5EBFOByImgAAlQEAet9VHYV8MCRLuCBCmQEnyYoAQRo2jQDaIACH1iBBZwIkQowAAUCSEACBJDVlBRRIyB6k8JghCEZyPMeMuhHDOXZOgLo461w7VBAAAAh+QQFBAD/ACzLBFkAZQHwAYeJcR9ZSxMuJguAbBxERESEhIS4my1mZmTi0oyUfSTGpjRgYGBqamznwT6bgSQ0NDQ+Pjx4eHne3t+NdyFmVhTyykRgTxQ5LwzCwsQoIAX6+fbOsDS8vLzn5+dubmyOjox+fnymjiszKQxURRR4ZBwUFBRMTEyxliwsLCyKiozKysxIPAwcFgTWsjScnJwiIiT23nyoqKlsWRVaWlwcHBywsLBxXxpycnSukSxQQgwhGwRCNgzT09SjiSQ+MgzGxsRSUlSWlpSSkpSkiiw6OjxWVlTv7+4WEgQmJiSdhiSioqS2trTOzswTDgTcujyqjyy+ojAODgqqjiQKBgRKQgxmUhQOCgQGAgRoYkRsRkCAXkgYNmiswqyytKASDBgWalQ4XFQSBBD4uECsnMSsuszUxvDquBBUPjS4nHSORijqmjCmpJB+RnBEfmzKtsQMGjSKgAyUppAYEmA+PnAmxoAMNiiUnLiGXiTy7rxUNGDEdCzQdMhGEEB8XHSSekg2QBAGDBiKnGRgqowGBBjS0uDq+uggFjACBgisaCyUhmimtKyMbgh6WiS6lgx+nKC+ssSAXgiWXiw2PlDe3sRKTmy+yPB8bjB25Kg+JjAkPgxuXCjs7NjO3tAGGhRQYBiUeHTO0qgKDggsGjB8cgiygCjy1NRkTCjEtqRGOjBofiyYloCslKCsujB8hByCbHTatrjW1jDOtux8ekisuuyypKCYgAys1sysgAx2xih0Rhi4nOh+YsgkLkQuJMDm8jBQVDysgEh+HIDO0sCwnFDKnBDC8rBQaFTcxszU2PTe0tCKuox8eCRKUBimeihoahgkQDTiwFRUOBRkPhhWWnC25DC+2tweLihEJhAEDAg+KmAuatSqpCAkCCDonLD66OhmXIDyvLBEehgqNCjI0tgqFhDQLoBEXJB2quSqlAwMCDBUKiCUYnRKWmC4dHSKhijodEjEnKw4WBhEOFASHhSabhzowCjs4OhydmBwhnQKCgQCAgQCAgwGBgwGBgQKCgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgd6tvYj8YDIg9e+NuoL6PJkyhTqlzJsqXLlxr1lZjhYgkHDkoYIOlXEqbPn0CDCh1K9KTMCEyYxHARQwUPIUh6Fp1KtarVq1gdmmCiYgESGi9m/OgwQ2rWs2jTql1bcUGHICUEygyi4YZZtnjz6t1LtYiEGFE3voghoSzfw4gTK86IRAmTIDMIFPnw4wONxZgza978rx8EwhKYSOhQg8hdzqhTq67qOcgSFylc12AQd7Xt27hfPohRo0gJfyVMLOGxoF/u48iTS+wXoUOKKCSjNHdxWbn169j/lXBhxANPkjN41P9Akb28edtRhGgoAH2jv+YxXpyfTx+zPgYdMMygUYIGEA4dROBPfQQWqBcKLoSmxAcu/PDXA6cZKOGEU/WDQgQ1YKBhDSA8YByFIIZIVEkoQEAABCiUEKGILLbo4oswxijjjDTWaOONOOao44489ujjj0AGKaR1FhYxw5EzFGGkCTR8aFBwSB5pZBEPDPiPPlG8QIQJRRAA3ZBg4hXFDT8kpcIPGPAgAQdAOFkQETGYeaYKEkhgmUAlLKAEB2rWAOFGYQZ6lmce3OCBBwx4UIMGGECwIg0LGHooAwXwoAEIVtIAwhK8dcAEASQJKqpV+vTjz6n9dCREB0KomJCpsPb/Q8APPJhQkj7+oEDECwRwoIKjoY4q7FAkAfrPA0swUQRPDvVzgwTU3VosCjX8Wuyw2AJVbEn9HPDXC8YmRNJgHRQnLUkoLGFtsNm229K1/9CghAQ3jLSiXBwVwQMHf145ra/AhuvuwCjBa8K+pglsEEklrMpeT9uiADC8BFd80WXX+gOCER+oqHBBJBHBp63G/rvuxxan/NAC7QH6AJ8zfHfvQNJJoAS4JaM7Mbsq9wzRtvgBRnFC1EpwgMwD6XzyzD43DTLD3EXgL8oE9aMvv0MLlO7STne9ULFbcRBwyW6SREMQEhTg8cfU/jC213Aj5F4BGrQKrz4oLECAlSRB/1AmycFiiQIKYvFwwOCuxq140vq8XK7MPfnjAbTy+RsFCB0IDW8/QPCGgRFG/FBDDTEvbvqVRAhRQGDh9mOCCx7UtlEJNyhRhL2tm6DE6DH0Pvqypy+OK39Z+0M8vv30h7tZWCJB3gvQI4HEl8FXb/312Gev/fbcd+/99+CHrzKgVBMU6rkHbVu++KPODsEChy5Q5asPMBDB/feDAEQUA3VEwAyFWgAE1sa+dpXqAQVIExP+wYMYQAgh7+mAp5LCBB7c6UooSAGtKiiBHxQABesr4JAa5xgXMMBIhooKBDHnAhMQwAQw9BC3CBAEF9xASRHAgARSQAOmiTBIJagUCP+axBF7pU9jEmCA+shXghR9xx/h+YEJ3PRDQRGACePBVRQgd0QQ0CsKUVjeubaFhBok0UpVFJQ+bvAWyYCgAAyAQHu62IElFKAAESgCCiC3xKIVJ42i8scHjIABJdiEVhiIndwkx5WbqACLQBBjsfoRniU8EJCB2o4GSMOAFxZAAjwo3cL6QYQFmMBEelrTFJfYODNGgHqYBJMmmTADe5UgBR2gjtxMRRILcadjfUyQEHAWyzClRwPxkRYBVIABIshtiQuQQBYjFgQeuACEPCvmj/pBN13K5WWfUoj6DrYEbIaqmi6QYTa12SN9HCBzlbvSMpv5EH3MQJrk4cgDqhn/BHOuk507ahzMnIS5eO1SKvo4mxFa9U0XWBBnWQMoj3D1LAzcgAD/AAEDRQmyFyTqlP9YQIKYMEUMBqEDXXmASlWaOIn+KKERcNACJYAB2shtN6H5AZ2kOQP+dWYBllqTTW5SA5a5NEgyMUEEUpCCG8hRIVEgwA0K8I8PFGABe+zfAyLwga4K4atCSIEJ0HhUH7nHpwwp1akE4g9mVc0fYIxrXN1a1rra9a54zate98rXvvr1ryM01YDo+ky1oqp8aiUsYGOU0AUUIAVX7aFC+oEEICwVsodT7EDy9IEbxHOxMGrcB3jAgzQ9xUNEE0IF0dSnSB7EH0DVAA8wCtoY/93yL0UgQhEIw8OEPICpAiQCENDGAWcWRFZKGEs4a/sifZjgTKDayKxUUNKDRIEG1MNbDYygxIK84AMxiAAHlsvcFmmMYy0rwQc6oLbCFssf6gGBm8bEAQbshrzlFVEUuKPEYr0zBtVZmPo0eQMnuY4DQYBenGibXxGVQAlkIZ8+/DIe93LkufhFUA0IQNka4LfBFJKXBIogYSBIoJwWbhxhQFCbf0inpvwr44dBTKACQKAjECYxvvyC4vQBqh8PcGg/f1yEJfCwJIP5FY0pFMntRLhY96wBEhYCZIem8zvxcmgEHjC4IvDJAyiA5ZLpo6Io0MU7xcKPNxFSZR70E/9pLzMCE5ZA5x8YQQNKsdWYC1QqNsKFYWh7pbiCbEF/IhkEvOEdgELnAlDtmUAbgQCfllWq8GDA0aUqQeLaPORtdYYG0pPeC0zApwOI5NEGqhkHbjCDG3BAAhGQHRFSEIEe6gMJJ90hDE0ABBNk9SAyZjCqIU2DHPKgghiIALgEQkks/umKm2RCmVYba4QEe9gSkgkEZnCAGRChpQmdgQm+RAMAIipRhYojWQkSBRPMoMXYppDCfCiXr9E73vjOt773ze9++/vfAA+4wAdO8IIb/OAIT7jCF87whjv84RCPuMQnTvGKW/ziGM+4xjfO8Y57/OMgD7nIR07ykpv85Cj/T7nKV87ylrv85TCPucxnTvOa2/zmOM+5znfO8577/OdAD7rQh070ohv96EhPutKXzvSmO/3pUI+61KdO9apb/epYz7rWt871rnv962APu9jHTvaym/3saE+72tfO9ra7/e1wj7vc5073utv97njPu973zve++/3vgA+84AdP+MIb/vCIT7ziF8/4xjv+8ZCPvOQnT/nKW/7ymM+85jfP+c57/vOgD73oR0/60pv+9KhPvepXz/rWu/71sI+97GdP+9rb/va4z73ud8/73vv+98APvvCHT/ziG//4yE++8pfP/OY7//nQj7706Z6BfyQA8CeQQT78DoB/iMDvOTiB/w2mT/7yu+gCIRiA36cggxOs4N5wT8IEWOB3CvxjBH7XwT8cUH2+U7HvEwB4OQB4RzABT+B3xnEC6td3RzAA7ud3F/APC2h+FFiBBgIDGgB4CGAEfzcE/xABSqACgSdsFliC4BOAflcSJwB43bcDfucDOAB4NnCAf9cD/3AE9beCf+cACaB/fWcBBmABfscCCTAE/cd3I2AANrB9fHcECfAE3wd4TGiCVCg+MViFIycDODCAfScA/zABPth3OBAAWFiG3+OBf7eCVtB3LDABIfB3K/APJAB4Ngh/ZniHeEh7ItADKOh3Oth3GZAA1+d3QCgDQ5gEIeCFfZeEJDCFev93BAAQAhdgh23ngkv4d0/gA3+XfnnYcQJgg+tHASdAhoDIg2HIdycghH43iH6Hf4bIhgYYhXy3Ak9gA2vYhAMgiX63A0MwADjYdwPwBHHId/rgA0MAAPTHd1FgA1v4f3cnAEPAins3BRRwhaWYAEe4dxawgo6odwngAIrYd3/Id0Q4BLK4d3E4h33XBP8wBBHYdz7wBCTAjnyXDzP4d+gHAL/YdwbAhX0XAj1IiWxHAUHojHanfw7wdwFQkH43AaDodzjwin4nBe/Yd/KIVntHAk+wAwK5dhfQA77Yd1FAAlvYkWonAkPQh3zXfiNgkHbnAPzXd1MghBbgktT3jQr/+QT213c6MAFJcI56lwM4MH4M2H0puANPMIF71wQk0APDyHc+IAVKqXdTYAMnkAM2OXciEALz93cG0JKAGI2nqHdAqIp9943ZGJQnQAFZKXeQiIZ9R4tEqYzBWJF6V4w4MAAYmXdWQAIP2HdbqZJ8ZwB/94k9KJP/YACkyHcZAJNpiXf9EAA4UJMN+Q/huHcjEAISuXcsAADuaJJopw8r8IZ+1wTB6ILE6AOciIlU0IFg+HfN2IVimYOLyXcw+Xc5cIDdmHcTAI6wKQO3uJQD0AN2mXeieQLz2HdWoH6oyXcRqI+gqXb+uHdbCZ19R5D4B4jWN5Z5J4S1qXc6kABJ//CYeZd9f+eZxZl3K4AD6oiLG9l3+rADebmPGfmXe6cPH9mVfGcFMvAE02mcIuAAgkmWTxAAbRl3jYmNfdcP2/gPu3l34ZkEl5l3/SCUc8l3npmC65mcfGea7xmXefl3fomAz9mhK2ADQ6AAZmeS+tAEFAAFDdAATjCVYHdv+nAFU2AFTcACGSADCtAAAtEAKjp2HJGjO6oDGSACF7ADKzACAWABFiADJDAAE5AAPfCjMZp1N9oP+dAER4CkAqCkTOqkUTqlAFCEQzAET4ADOHACBnACOBACPcCDADABMJqlSVekOsoCSCqmTeqkMmADA3CmDtADPRACa9qmJ/AEQ/8wpwkwARMwACRgAzJgAQGQAyvgAyKQATrAAl5KAVhqcixaKkZ6BDwapj7ApDkwAlEqqFUKk4f6BCcwqzjAqHPKg5E6qZV6qStwAQKQASxwBE0QBfmQD6nCRUmjAwPQAi23EVfApVFgqkiapBfgAyuwqmVKpa96qIrKpk9gqI4KAJJKqRYwApm6qTrQqU3QBFZQrKlyBf/UrFuao1HgpemaAWG6pE1qAYI6qFWaBGrKpgJbqyEwBHQ6rpVqrpr6q50arOw6Be8aQinXS/0wBflQr9KaAdS6AzkQAFVgA1NKpd94qIiaqASbBLgqruSqsBcgAgzrsFYAsVhGc856rBf/66UswKNJaq2sSgEg668ja6hSIAUCK6c8+KgqSwG8ugMLy6kOa6wzO3Mk8awVmw9WgLF8KgBL2rEWQAFSCrQOkASx2qYEe6uPKqm7iqlMu6lOy67GKrErVyxUW7FWIK1ay7FPGqgDILJhOwSKagBvGqdDgLIJkLQBMAJq27IaC7MQG50EF68WIbf9ULenuqRk+rV1Wqh+66aAG7iCK6AAQAIyUAWWaq47oLjqGrNRO3L9IAABIAMBkAEHOkk5aqoaKwLWSgU9+7OEqqacO6vAiwPhiraluwKn+6vBGrPwWnP9sANJ4AQN0AITIAA88axd+qXUyrO7a6Yjy6bAG7zDiaur5Wq8vsqpR0Csq/tzgQi9MdoCDkAB/Mq9iDiw3fqtByu+S1u+wYq+8BpRR7cCGxCjAuwEBvAE9ou0aAu/iNur6Oqpqtu//jt1I9ACFSDADbABNuAD+vuwERvBYXcBJ2DBDTCecKt2UQCEDVDBVzkFxnkEAeAATzABOWAFjkuk+cCnLKBZdBcQACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMA/wAsvgBkAAIBmgGHalkVeXl53t7cioqKXU8ULCMHOy8M17I4+vr4srKzmYMlREREfmgc2NjZ0NDQgW8d5ubk78hB58A8cnJ0p4ws+/CwPDw8TkMNJCQk/OiQ7+/ubGxrra2sdGMbyqYyZmZlmJiY6ctW2Lg6EhITxMTE6cY/vLy8np6c4uLkl30kGxscgoKETExMvaAskpKUzqw09uCIj3ghFhYUGhYEtJcscl4ZxKQ0VFRUiXMg6ursVEYQo4okJB0FLCwsrZMsRDkMupouoqKkYmJkNjY0Sz4M4b48pqakXl5cMjI0n4IlW0kTCgYEWlpcysrMvqA04Lo7Y1AUMSsK3s6EJiIGtra00rI39sxEFg4E9dNChm4cQjEM+vLMDg4LeHZk6ubcro4sppIk6s5E3r5Mxqo0tp4sIh4sEg4E+vbkSkY0tJgkDgoEKiokBgIEAgYEPEhACg4IsJpAOnA4GCgo6qKkLlBMfHJEcqLgdJKYtnh0cuKA0PJwrtKwuKJkqsYkzr40tr7QrtpoqqwkfGp0jJKw3u6wXFg8kpgwyMbY8MDMfpAklJx8qoRMqoQM9rgkBhgUWHIYWnh0pL7AflQY6PrkYl6AhLSEyKKoKCDAfFhgZkAY0MCkzrAo2MDIhJyIeEAoakBwXGpAytTAyvTM5tTUSCQgCggwDgwYRFBk4M4QJMCAhoqYkGxI9swUOmCIkHQMusS4qpoMrKKMytqoBAQYXkBACjAoytr0zjCAkGAc2s40wLokKGDQXqIk5LwoChg0FBBgEmBQPC5gpqrEdhqANCIwkpAM2tTIxtTYsL64IiwoanYYrr6YkGx0iLIkzuwkzMxw+MRwPA5AblZQ2vbkyLrIFDBoBAwQyuLQznjIhJJc6tSsrsjs9qAkbmhAHjgcYnhEru7EwKAQ4LYQWEYotJi0WFRozrYQWqKE9tTgWDoYdlrIqmwsMFAUxs4k4NT0uKLo0MDw6HhIkEZISDYwxHgsHjBE4pwkaFYoRFhABgYEBgYMAgIECgoEAgIMCgoMAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qcSDHhvnz+/OXbt6+ix48gQ4ocSbIkyIs9PgwYEGCBP44mY8qcSbOmTYj5WFDRAKGngwAjOt4cSrSo0aMI9w1xkOPEjQUbSEAIkA+p1atYs1b0FwCBEX8C91lo4MCC1rNo02bdpyIIhA1C/43gAOFDXLV48+oduQ8DXSEcOXI5oWFA1b2IEytuuE8GCA0rArPdCWLE4suYMec7osEBkowyNqDwqiKz6dN4+xrJ0eDEAA4OBGgwIgO17dtWOWII0ERAAw4TXOQAwQW38eM1A+eToUIFF38DcgS4i7y69YqSYf5ja0IAC+rXw4v/Z8gxn/mLMgJoCCID/Pj38MOOOLKBxYIbIFCYGOI+vv/wbOXnm29BWKDdfwiOlw8SG6zAEgtB9ZfghBRWaOGFGGao4YYcdujhhyCGKOKIJJZo4lD7+MPFiixycZhFGK0IlkEXZSTjRhKe+GFjA5jg448JfJfQggGAwEECRgzARHsD5bPAAEZyEMQKLuWoI4f79OAAAiRQAaSQCHHVQANNkPAPChCAgMFAXIAAgQMkkOAABA0EMOOVI/ZFQgNIjCDDnyq8iBASLFiAQWk3JACZUPlYcAMSh/awQQMosIAniXo68FJ2CnG63QJcCpodV4VZeemFevLZ3Ag4WpRdeaCa/yCqZFyoF9mpIeqpwZEJgCBEDxJKlo8KGGDAghECwCUUR/5E+kETDiyAa64YBOEAFQmQIJsJC/Sn3AJByJkDBCtwAVN5R/jogAYN2DVtiP5AupE/C3CgARVrHvQtByQ00YAJLLRanhAmyOnACRYI+u6GkoWVpVRCJPUqFwsY4YBdr8LEhAlNdLswlhnvw4ULi+rbcFhIoOCACiFfdAMCCWz0sYYtQweZwg4HRtAIVAggbcs9dGbuzBm2rAIHOUzg7cl9+WtWy6A28RLRGnKhgkbmqRDAmwYKlM+ajDqHtT8yTMBlbSLL4GJVzYKAQBA4U52gyBs0AUIADZrA7gQvDv/RBAeWZWmECQNMsEGPnR3RUU4ccLDCBoc3oQEJZsmNYT5MUArBuAJwcMOd/ywAAQmlsTUApTmM20Doh+2DxAmo56DyAEiYavl7KSJx3w03WMDyXVw81boMQ9zHBAtD1EbQsBawwMQNLPQw9O3UV2+57Z1ar/323Hfv/ffgh6899uLf1vJ2/vg5wtQ5Z1dVc839yRyrJ5cPYMuNguBAAySAsEDrGcNAAnrSExQIIAe7qp3O7He/jFVlXSQIgglyIIAbdCRkIwiOC1YyABc0AAEcaM8CGWgdo/0jByuQwXI2MLnShewf5jkPBkxQF0+RsIR3uQgTIGCCfP1DBnRR3Av/C+IkAXjGhjccT4oGUDKvbcArmxqiQNpUGByNMInViYvIjFAXguwDVA4wV8uW1QNtfQeJWMQNp/YxggSggAle7AECHBCh8/3DHxPIAQdYtp0rptE2XytWDzBgrjai4AYFwcAcRXiuV/2QhhhD4x9Nk6VwxclzbOTAG+M4R8vQKDuicwCwpDjJzGQJBD8ywg3ywQW3bMCLYCzOJwNDxQFEMWOltM2rbHYrr30AiiYLDAYE4J0xki+XaHEk5nLQw7AAEQJCqMoFl6WcJ1Lhd3ZE5mVa5pepPCc0LaRlD3pgRY7IgArQfJEktakY/N2AKSZAZWfgWJ7MkWCUsOqMD/vo/0d2thN/C4AdWYLwvz7mxAEmwGeKJtAAF4Bunf7cizFFVixCaqc8h2pYYzDgSS/2M6IgDalejinSkpr0pChNqUq/50h+HiiYF9UZElu6UqTUjDk9gJR7QtYsJEDqljrLxwgw4NMeBKqmR2lZ2bqDAASYQJYefRUGTqcBdg1Aocw6VgOqigIqSAupRWkZEjggAGg5FXQulYwAubSCFUyQCgrlQh5bE4AieQ6sYWVaiorlDxbc66Geyoc1f4eB1VClPCwQwH5apxG85jWqgbGABkwA2FfJwASHNOgN9NOD7cgAWd26yEsda1NQnhWykpHjyvipAhrSU7IkIBsLjrCAQP+RlLR8MS1lB9IyC8xRmoIJQtJg+LKfTHBcJNgAVHE7k4l+8bTtMy0JpikykkUmHxGbIwg2cAQX9OQDcWOuSETWvAXYB5sXjNWdWvpcE2RnZIvKxy8LozxS3VO8MhELFVDAXxJE86Lq5W39nksCmQnmMdPZDAIa0NmwlDGz+DUJWw7X1gDwR4sBji5vhzBH9mWyhjnp5M64+IEIN3emC5hsZbWIARQwmIz++hkS2FUaZwbRxDFhbx9THLOYipaWIJAOjvCYA9pwRAUDPAKOcjKmyuE4tyHjwqE+MDkkNAe4GOjdkllApxsw5wb+YoJymJADB7BABTKwANIGsNwnn4T/pwGA0wcRUCYOdI0rTsVmrdDkI2KuoI4iW0FPSIBZCBihwW4eL5zHtD8H7I9bVcnHDQgHaH/c4ARxCsINxKgzS2OaBBzYgPISDWUHuk9U5dRiqkdbntuS+tWwjrWsZ03rWtv61rjOta53zete+/rXwA62sIdN7GIb+9jITrayl83sZjv72dCOtrSnTe1qW/va2M62trfN7W57+9vgDre4x03ucpv73OhOt7rXze52u/vd8I63vOdN73rb+974zre+983vfvv73wAPuMAHTvCCG/zgCE+4whfO8IY7/OEQj7jEJ07xilv84hjPuMY3zvGOe/zjIA+5yEdO8pKb/OQo/0+5ylfO8pa7/OUwj7nMZ07zmtv85jjPuc53zvOe+/znQA+60IdO9KIb/ehIT7rSl870pjv96VCPutSnTvWqW/3qWC/4RfjBD5nZmw08qAEFFACAGbha3PsoQAqKgAUsvIABM6j3EjpQhBJIQAIleAEB6q2GJFihCE94At5xUG8zUCACEnhCEe5OeHf/YHk/cILd7y4BG+ig3XfPwkCWQAQf0IACL4hACWwAADNgvgQUKA7nnbADA8yAADHAgQ7arO4i2AAHRFDC56NA7whUIQkPsEEEIvACHbDh7N52Ag1Kzw8AFEEEB9gBEUwv7yVc5Ac+kD4AdtCCHcx7H/zQQf8LklCA7cQd3sDlBwFsQIHy05sBF+CBGXRAA/K/PwU0aMHnbfACACwhvOxmBgVQA1WABWEgAj7wAFAwb1URfvUHADUQAzRgA04QbyvwAWgAADSAA3F3ETwwbwhwBlIQAiHQBWWgD/EmASEAA1vwDxVQASRYAV6QAF3wVewmAhLgAWmQBi1wABKweCuYARlQAe/mA//wAzPAA0oABCLweRQwgWIQAu9GAQawHVwAADYAd+WxBq/EbjMggUTAEWaAhTEwA+YRbx3gAR3ABQIYA0VQBQBABDoAAB0QbxFgAzTgBC9wdyLgBDZgAwJBA+5mBSKwA1lAA3hHAUrwAz9gAAb/wHvr9oEAEH0/oAZTEANZeH4z4H7uhgM+YAD5MAMA8AItUANKEIEU0ALvhgMvgAM/QAAKcAB55wO0WIHwtng+0AIlEAY08AAPgAMxsAOC+G7EJ4xvSAEA8AM6wACSFwHv9gQ7oAM/kAWkiAMpEHrOCG87QAEXYAA44INFUATD9wTD2IlNGAMvUAQHUAUiUAQvIG8pgHfhWALk2AI2oAA6QH3uBngvYAM4SI+3B4rwZgOCtwMPsIcHsHxT4HXvxgA2cAD89wQ+oAQ8AIDoRngGoAA4mHhliHzfVgM08ATD5wQEoI/xdgBMWALQ1wI6YJHrJnjuyH4tSW8HkAIUcABP/+ABFDCT8SZ8/8AAOrAD4egDF+CS6VYECkAEbhgBNEAERllu7yiKy8cAEsCUP9AG8kYAHeEDCsAAIkkDP+CR3NZ+VyCB12iVYsltAPAPUECKYyABNGAAaZltaiAQjhiS6fiJc6ltRvgPOFACiBeXe6ltTUkAVWAFEtB6g5ltPiB7h1cCevl+gogD4giWi5ltNKAA6MiUYVhvWJgCLXCHSvCU5zYDM7ADFPgEBwAA/CBvHxgDS6AEL7CHRfAAtNduROABC3h4iJcC5xdv/FADLWAASnAAw7cD7neZ1MYDO9B6NCB6NMCJ8qYFQBADACACd/h48xZ3bekDxlkEDEBvKf/AD2YQjyJQAiVQbz4Qnj9QBYgHl1UobwZAA1CgBh0weS+gBPFGfeJXAzrwAnaHevIWd/kAAFUABEAgeiXgBPHZblVhAx+oBjEQASKwkXCIfuvHA/swAzSwoDZwd94XbzpgAwVwhbNJAwBqefHGBkRgAwRQA6HXAqEpATEgb/vwAyIgo+n4g8R3AfLGBj8AeFkQfHcHnqQ5bhv6AEXwiSlgd/9QfPG2oQzAjh0ABbooAd83Aw/QAj6AgEDwBPS2oThQigZwAEXgBAfwfTwwpjWwBDxQBQdQoza6pk7QAWrABXQXpX3EAzFQivygBqMYpgXQpwCgBktAf2F6iS1QqPn/YAAUIKiEyg9pt40N+m5soHaLKqkzkAL0dqmgyZr/YAYPYIsrWgAK0AJQsAQCsZbzdqkKQAap+g+xCQStGgUU4ARKoKr/oAX0lg8FQAEeMJrb8avwxigGsANkIKwCgX/y1qhPmKv31qjCWJQE8Y7xlg8/8IQ8WW/Yqq1Him75oAVg4AOdaW9L8ANf0JTKaW1sZABEQAAUQAE/wAb3hp0qCQDrim3iiJ4iwAB1iW+KVwJjAIn3Fngq6aP5dnf/oABZ17DiwYy9qG/8QAQKEHjW+g8fWG9aEHoiQHhSWW/iFwMM8IQpYAOcOm/xGXcFwADi+A+VWm9T4I+A+A8hWm/rmlelQokFDruzPNuz2iSn+gYAnvcA+7ayqphvBBAFBUAEBPGb8qZ/KVCHmvcPKUAE/0pvR/ukEvB2+kaQVpCN+3axPju2x3F5CTt8WIpvEwim6ZlvCnCrVVAE+vYDAKAAYltvNICg5TgQNYuynCgC/5C1avuo/Oa0ZHu4mfECy6dv+UAESfAChJtvaqADkZtvG8qq+CYU9CprAQEAIfkEBQQA/wAsvwNZAHEC7gGHZ2dnQkJEJycmaFcVwsLEXl5c5sA8Oi8MWlpcdnZ2pqak+fn50q80cF8ZyMjHbm5s8spEmIAkTk5MgICAKCAG8vL0oqKkMDAwSEhIrKysYFEU0tLUIiIkMCgKRzwMtLS0jHUhVFRU3t7czs7MqI0pioqMkHskHh4cnp6c2trcu50sYmJkNjY0g28cjo6Mro4sd2IcHhgE7u7shoaEV0gTUEMRmpqc1tbUoYcmlpaUEhIUeWcc6urskpKUFBAEspYsGhocqpIrQjYMFhYUCgYEvr68f2oc5ubkrpMsnoIkOjo8Pj48tZotXkoU2rI04uLkurq8GhIETj4M2rY8DgoEBgIEOA5AgnKQ7PC8iHhsbEAYzpow7sywyjCAZGqUVDxI7JwwpmwsMlRM1Mrg7syAEjJoOlQUcHpEiGpIODBo7qSwfH4YFGQ4JmSIdGhY6uj4JiLAxHgsuMR81NjAanpssnh0BAQYtL7IuJzkHDocgJZcUlh0uMjItLCggLaE3OjoWqSEXGA8cFzUipawVEYoCg4IztL0AgYIfm4IGgYgvrCsVEo4pK6oUlxYxsyojmQYbuKkPD5I0LC4dpaYUlQYiqCItJykYmZQxtjQGhY0bHoYdFhobqTgbsIkWGIY1L7MoIQM5L5U5NLUdGBAMj4Q4PCECjIcrqYszr7wnJiIRFQ0EhBgdoBsmra0gkJs7sLgZkJUynjIWHpYXkKQKj44BAwQzJyUotLMrLowCBgghEAYvL7QxrDsoJy8dlQY6HhIrOQwpoRIqJxworbourDMzPDUQExM+ujobGYIqK7EKDIoVEJsOnQ4WHYYnJ4wgn6cspggiFQYuMLsYlZ0UmhkRiYorp4MxsrcqLy09tbg5uTYXlQoiF6EMipEJMKAJmTQio54RFxYDAwYuNbcEjJEHCgMtJ5QdnZgOiIc2MrIND5AcBqAGCgoqvCwjniQ7LgQOD4o+LhA5Nb0RExk0OQwCAgwFBwUCgoEAgIMBgYEDg4EAgIECgoMDg4MBgYMAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qcSLGixYsYM2rcyLGjx48gQ2Lkx4HFvws69u0TybKly5cwY8qcSbOmzZs4c+47AUABlCIfJrDItzKn0aNIkypdyrSp06ceOZS4sSGDBShVl+SDyrWr169gw4ode3PfiicOCvw7wWJqjhNFycqdS7eu3bt4X+rIISMBv4EXPtzAEDev4cOIEyte3FRAhn8IVP7bNwRFhQdbGWvezLmz588JOWR4UoCoyiEWFpTQAbq169ewYzvt50LGapX5lhRZgAKI7N/AgwsfXnFfiBs3JmBYEsLGkwUWThCfTr269ddDHjgQMYIAlAwOZNj/8G0z34UEKCyUkNCv8PX38OPLR6iyH4YEJWaswODi34x+NvVTABQi3DBCCiNMANd8DDboIHEq4cYPP/kMkcMRALgH0z4YOHDDDMut8ME/CQD44IkoprhZhJL9k0+HBCih4Uv9TCADCgu+SAABAcyo4o9ABunVPiQJMIQOJ0iQwQYl+tjSCRY8keFAJ6AgpZBYZqklUxxMoEAOLqBAwD8KtjiTaCkUwKKFMszw15ZwximnS9llQEARUKBQABBmzmQhDxOYto9oC9gwxJyIJqqoRZRdsEQASnBg2k35AHBEERjo0I8ACaQAnXSLhirqqF3tI4CVIyhgg50iVBAdqbDG/yprWflw+oEDBNiQgHM5sDbrr8AG21GE/XBwwQVASPVEApkJ6+yz0DLEYlyO3RCCk9Fmq+2s0542QQpvYbvtuOQmug8QKyRQQAgAWHDDB0v0We689Mo5aAkbpHDDPw64oARR9QYscFgR6nABBhJggFKECNUqAQIQQxbCBQBPRqQACEugBBDNPlRrCCsAgMASHMs78MkoI7WPeRPc6oADUExAsY86TIDcyw50l8AQRe30gJ0vF9EDBm9CtHI+SDOc8tJM6ySADSkEhV8RIuQggI9D9PBEBgk88EDXAfBT1BA2F+FCAhNkcAMUPTbt9tth5VNA1AH0w48OITiwQQgdE/+kgwsOIPDXhBS2iMEIDoQwBD/9XGBBX33DLfnkSPGTAA8umLiTAmlGLtAQgPMtqJkIbM1BhPlMsMAERVPu+us1yX1DBhT/ww8GP8WLUNYp2LBCARJc0F5chxNANNIcQF0a7Mw3/9KgPVSVAAAJfFAEACYeNEQJR6SAc1C1f57ACEFRj4IDJYDq/PrsD7tEBkeIsMETPFhwgZP9hDDBAwUAAHjVAhiIqVwggifkSwaY8lz7FshAhpCnHwhQAAoSkK4MQGFnPtpHP/phmvw5AEOZeZENFDAB85ntfgaZlgDzUbiDdEsgRyucChtIw2wVgIN5s8BQiISBD2xgBa2jj0r/+GGjHBxqH0r4wAdCILZaOWcC2bNYt8yTAAVkQAEJCB8Mu8XDEmQgAzbY0wxrSEZgAcAke5EBAASVjwQcYTzSitBZMhDANvLAiCwKQdQCuMUpvs97t0pBBgKQGS5qEAAegoIPN1CC0ymtjJCM1dgswIPIsMhSCuDAQliUjwcc4VW0Uc3wVHK4IrCgZ1wEQg+qdoGSuOAJOJJitwKQMwAIgAMS+IAIMPPISPqSVDqwwerEdpoe8KAH5ElhtzjwOCjabgKuggtuPElHAbJoMvkIwQigIKPJBGYDCBidZPahgxk8YTXYLMC77meyX7oTUW08wggewAIBKOFb4FSgAIB3/wEBHOyVIyDMZEKQrwkowZ8FKAKgNHdNgdToCG4aSI1k0IOUNPQkUBiBBHrmGBGoqZ3vDOmWTIWvFACFAPJLAJ8QIgG9fecDB4LCCrI3hARs5x8WRE4OUCjLuFhISnHZRwEqkAFp9gkDIoACT/+RtdX1UqRQHekJCpADKyrABYpzEqdG+MUJKqF15JRACawIRltWbIxV8mhhJMCDD1ytlyEg6oIcOgPVPDWqeMVShThwyyEoUID9SNYtgdCeguAGSX0lZlEqdILGcmwtFkhBZAiCgSNA4a19CsECMrBSh6quB3fNq2hPto8L9ECJH0CBBPIBJcmulQeXveg/NMvZuP/UaAEuCO1od0uv0uYACj+xABOB4JwVBDUEMnCrbCurXImWYAEz0C1vpzsu1grguidoz994wKyBdNJVfOqlEhCnO4EAAQU8eIB0qcteaI2RHwB4ggLUV6VlKdZMUNplIZfwMsKAtL0AdpYhA5OCB3CwHysw6SmHyIETmEZuUftXPjjwShdYVLYBzjCwDMmPFWxgAz1IQA9GsIEH/GVlGMhADzDLgRykIGZVpBsXNUzj14xTXLK0Jn0sZjQu6gABH3hC/D5ww6LwY26mRJ0AJuAAIfeOkDOusZQ1UyElsCsBIbhwQpR8ZQBIYK7kPFi6VuDIh8yYJEqAFAeICUMdKOH/AvfVoKMCwIKSRXnKeEYMECZAgBTIYAFJ1m19CpCBERj6HwQw8GQ6jBUeLGBvk9JIO3Gc50pzpkvqiVKMMCxA+I6gCBNAAMgSQLRFt6sHOcgX39Zr6VarKB9AAAI/lrAjGf2XQ94h2kpYCLDTcKBYkb0Wq11NbAexSAlF2PRCdFAC/ebDbu5hWDBTIOz/Fvva85mWEmotXQH4sABKeMAMsCwp7aGA2pzGtrrlo21ugxSJKP1isg1tg38ZpDLoHva69z2cdm86tBxywKOxGgARPSFcBcF3tfnN8Pj429aT7lCh3pqPlkI64edeeMM3Xp2Hp3sftFRjz4BwIWcKUOHW/+a4ym18bHeD9AJQqCRBanRMX1kM5SvPuXDaHWiQnkABFZjS50rOUHLawFqU1rnSNcMiFhSh5z6y3Bt9oxJaXpxFwdzARpfOdRsDIQABSAByEgD2zhq2tBlIgQsSRuiDO5KcSsBAQo/gAgxgQAB/7breDZOPFRDAAZ5aAIIIEE4XonhJGzjQBuq9lcOP4AZ/foKhJ3CovVteMfkIQAlc4ILNu6AHJSivMs0DgAmUYAIFKLfFztMDznu+BwiI4uVnj5dnb/AvG+TgllHHuBaukHG2YxyAgkj74hv/+MhPvvKXz/zmO//50I++9KcPNxYS7voy5PTKJiTOa26f+6jUN//1x2+RF7m+BOj3PAa6TxDzPAD9yhkli+yDn/yYtafkz/9H8gFkRaI2PBXALFFWcWmHON6DQSyyZEWwAbjyaTP1QvoXgRthMMfSTywwASLgAD0yY/vAAh+QAiWwHIh0A0AUISdQAiOAAiMTdxPDfhL4ghfRJw/lAjyzEt3yUDaQIw8gX1fjIgCweKfTR3cGg0TIKIEhApZkTREiGjxgXAPBAhm1Wv9wXhr1bCegA403hEW4hRFRKXtkJtPidBuAAQRBXPbVgT+xAivgAhaQAw8wMxDIhXL4EFDCXWJzdqTkAKY0cxNwBCXQHhIwAiKAJ0q0HYPkgnOYiHgYIR0yAhv/uIiBqFQEYTk1x383sAAOAAD9JAEK8EYLkm6KyIU3aE45EF6QuE0XMImXU1GWKAMmtw8tVYU5Foqh2C0wJ1mRpoQg9zJKMHMzAFEcFIgiIAEC9HPLIn+gSIsvyEkJ9gHsBIYscotkSCUWsEtig2wpMI2TMQQ2cARNooXKKIdp9Y3QGCFAoABq1Cz8pYEq8XNHoCYw5BgpAABxJn7hSH4c0h3xMkVAUINEkgBbMzM1cwRTOEQAaQN4d0iDuI9xeI9y2A+/WIo91YE5gEGTcSopoAAAUABTkTiCcgEKcAMWsJETIIgJ4CvJ6JAwCHM3sDzdokd0tFhKkAMkRmIfEHuc/7QENPlhN1AECQBm9qiS4zcEosYzQrgTBYAB2dOOGKCGLThFJ9CUIsMChSWEQqmI1mYyKTcZm5R0V/mVYBmWYjmWZFmWZnmWaJmWarmWbNmWbvmWcBmXcjmXdFmXdnmXeJmXermXfNmXfvmXgBmYgjmYhFmYhnmYiJmYirmYjNmYjvmYkBmZkjmZlFmZlnmZmJmZmrmZnNmZnvmZoBmaojmapFmapnmaqJmaqrmarNmarvmasBmbsjmbtFmbtnmbuJmburmbvNmbvvmbwBmcwjmcxFmcxnmcyJmcyrmczNmczvmc0Bmd0jmd1Fmd1nmd2Jmd2rmd3Nmd3vmd4Bme4v85nuRZnuZ5nuiZnuq5nuzZnu75nvAZn/I5n/RZn/Z5n/iZn/q5n/zZn/75nwAaoAI6oARaoAZ6oAiaoAq6oAzaoA76oBAaoRI6oRRaoRZ6oRiaoRq6oRzaoR76oSAaoiI6oiRaoiZ6oiiaoiq6oizaoi76ojAaozI6ozRaozZ6oziaozq6ozzaoz76o0AapEI6pERapEZ6pEiapEq6pEzapE76pFAapVI6pVRapVZ6pViapVq6pVzapV76pWAapmI6pmRapmZ6pmiapmq6pmzapm76pnAap3I6p3Rap3Z6p3iap3q6p3zap376p4AaqII6qIRaqIZ6qIiaqIq6qIz/2qiO+qiQGqmSOqmUWqmWeqmYmqmauqmc2qme+qmgGqqiOqqkWqqmeqqomqqquqqs2qqu+qqwGquyOqu0Wqu2equ4mqu6uqu82qu++qvAGqzCOqzEWqzGeqzImqzKuqzM2qzO+qzQGq3SOq3UWq3Weq3Ymq3auq3c2q3AuZWcSgV556keEANeiak4QANEcKoqEAEHUAWmCgNJ0ALmWqoxMAA/MABUYKoxYAJB4AHjqqn7IAQkEAEdcK6Veg8agAQ74AOm6gM78AMacA+lug8dEAEkALClmg8eQAImQAEIS6lUgK8NoA8VGwMtgAQ1ELCZug8HgAPuGrKTSgQ0gAT0/yqzkuoDDcAEA7CupLoPFBABSKCxpFoFHhAEH4uzkUoEC9sADiuqRREDRvACK1uxHZAEOHAASgup+VADPwAC9Uqq+gAD+WqyP0sBIJCxLIupVSAEMHuwpUqzSGAEYQu1EIsETUCxP3uxWbu2l9q1JAACIDuqKqEPJPu0oWqDMQACKuu3lloFL5u1W/uo+VCzLRAFD9sASDCxXAm1FGACJCAEjlupHOuxFGCqTPsDMIC4o+oDLcAE6mq1OJC1VaAEKDAioVq5NhsD/8BHoqoPmsu59pq2RDuq+UCwBju5jqqwTNCwD9sCP0ADeku4fCu6G1sDgTu4pDqyP+C0JwsCQf9QtT97ABEQs3Frubxbqj4AAzy7r1CrEkH7AsUrqqWbtKWqsKrLuqKKstE7upRqseWrtddLAjdbqsDbvicbAT8wv4mLvHBLqkzLsFGgvI0KsS8gvO97saHrv5MKuIJLwYw6skgwAGZLuItLAuI7qpALswIMwegLwou6vptLsTCMqEALugwMqvWrvYn7D/jrvVA7GVKLBDTAwZJqsThAAi1svDXABGBbw4lKBTurr9+rthX7sskbtxrwA3QLxYjquhI7vVCrwQdgxJHatUjwwaUqwkBMqiiLwmYMqStsvi68uw9LthjsuTgcx5TbsRHAw/TiATUgEAPQn/jrvPVCAkj/8AM/gAQk0J/74ANGQMQ+Oy8NMAAaQAMecAD+CcCSex35QAT3oA8+EAUxEAMU0AEHwMn/QAMa0AA70AIscQ9Z+J9dGwT0ah010AQDAMtG0AImUL5JTALE/A8vgARBQAI4cKLAm69iTBxIEM3KnAT/YAIg0AJGcMk08A8eIASr/A+nW6JAS7x8zBJxgTSi/BI0UAMe4AH/cAAUQAGn7AP6QMsvCrkF+8BLgcoHIAQ14MoCEcsg0BJe7KFMywSrC64t0QIgEMxJvMgqENFMwMiOPKaSPLcDMADbjBONTAJJEAEDfckakMndzMpjegBBMAUGYAAMMNCN9w9U4AMOe7qrayzIrlzIMGAEGtEB8izTz5ymKL3SBvAPDBABmAzLDG0CArHI0bzIjPwCj+yoUsAAQi3UKkACyQyzIAACRgADIr3Om9wBkFoDTlDVAtEAq8zTMeADVEAESPPSCk2oPyAQK21Yn6oBAgEBmRkQACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFBAABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAP8ALM8AZADxAGEBh9fNl72xd4aGhPrqoLm5uTAwMHFfGE1ADXFpQph/JPr6+qKipI13IYyMjfXru86uNkBAP9TU1MDAwHR0dGhoZ/PfkCYdBPDw7ykjB2dWFOrq7BISE5ubnOLi5PTMQuC6O1RGEObm5KCIJsWnMzEoCuzLSz4yDEZGRG5ubbyeL7aWKdi1OMfHx2BgXzY2NLCwsMOiMubMbBwcHLCTLHxpHOK/PKenp87OzJWVlINvHSQkJHx8fOHEVqiNLFxNE+zFP1RUVEpKTFpaXE5OTBcRBSoqLEU6DLWaLTo6PBYWFIKChObCPaqOJBsWBItyIu/Xcd7YuDYvDN7e3FpGEK6eTJJ2JEZGPKaSRAoGBH5iHNra3NK2NPv23MayVKKORGpeFPbu1LSaJK6aLGJWJBIOBGZmdM6uLAYCBA4KBA4OFAIGBCAWNBRkUM7c9FBOZLLIMLKKDOjY2HBGbMrMMH6OtKysxMzA0GJcdJZebNTG8IhGGIByfMyqrHZ8GKy4uGB6bIyYZGJ2GOqkMOyqsLKerAQEGAoIMJp+DHJeCAYYFNLsMPi4QD4+KNzQTDRUFCbCgOLchLjEuEIOQDA+ON7u4ODwvEBkiCwiwLKwlJqKDN7q+H5e1PjMLOLQLLTM7CA8HGh6mOrEEIB4XJqYhHCAeFAmKIym5AoyKJqYHH6ioCYwKPLG1IyakOp+SIBgfLKgDEIkFM7ctIBsCNJ+yOy2NLxwLM701CAGILTuyLTeeOLY9FA6PLTMnOrCKH4cgGhilGhcXCxk0EB0ONjCNBYQYNzGxOb66I6wKLqquExcFOzq2HJcaIB+cLKwIJaSuIy6jAQMEM7i1DJUTN7c0DokPGKiKBoqKHJuGIB4nMqsEISSKMzGtAoOCLx+dCAyREZMFHjgKNIygPbY5L6q6KzMwEIwaBYyaJamhLzE0MrW1EpeWGBsQKyCLAoYNLzUwJpgGHjkqJqKkN7U2HB6XEJOZGCmqMqKLA4ODAoKDAYGBA4OBAoKBAICBAICDAYGDAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKjJgv3z98Avkd5McvH76KGieKHEmypMmTKFMqzFegxQ4BDf610GFxIMd7QSY02CFERkiVQIMKHUpUKD8ZDUKE+NdB6YshNf/x27BDywWBHTgU+Fm0q9evYIfyS+JSyD8IQjhciAAhI74JCiKggNDixQUbMsLq3cu3r0KOHG3es6EBRUYdLKQMOSwhRAuufiNLnuz1Hg4NEzjmG6KAQFR8Oy7gwEi5tOnTEjvew3cPAosIQaTmQyGa4OYQEnSg3s2798B8QXAsIKBBygSM/O41uLAjJMcCWiIg8U29euTkE7RI6SClgW6pGy6j/wDMsUgEKW2tq1/vtWMSGS52sFhQhGP4wuT5mUfPvr9/oOQp14EAGN0jAHP5uRDddP816GBq+UGgRW7/5EOBAhyQlw8QHRDw3YMghrhRfvmcMKFu/AShgASk8YPPgQ2QJuKMIeazAUgdyYCDaPdIhVgHQuTTUQGJAUHjkQ/mgwQHEwBxQhAtqHVDEBp1RIEGcp1Q1wULJIHkl/4BF0EIWmgXQgcvACGkVFPtMKZ2UnBQBJh0ruciXS8p0aRPgbGZDwQT6DnEBnUWauihiCaq6KKMNuroo5BGKumklFZq6aWYftWRR/esJiRkBgHG6Wof2dQRawKtmaml9wyxAwcvvP/AAQV8JgRYqw3Y8IINSkAQkpIT4KDrAhMUoOqqkW7YVAQS3BCCBjYU8Bc/OjQghRQsNHtDjBVCQJwW/ySmgQRAgIoso8Bx0EIBMugwBAEXcEAoQlM10IENJ+ggQxEQbNWtEiggIYMMEKhF4bmR4rOBhidcm95B+QjRQZdskicVPjJKVQQLIQhhLsKI5geYDBJ0sBhCG9hgcj736IsPYKEGxgF+IDsqMkcQRCAdvTLc0EELQ3BAwAs7QHBsxRzpULKRNTd6cxJq4dDjRkhYxcLVBNxwgRSPEUQePrRJkFfTjOb3zz0TXMACEh+X2EFcLdxzlBJrzWlqR4tp0TXZjyb/R0EIEZT7MT8QSMHcp2O9EAIFkJVY8gSf8u30PRRocQNUMIfqQgRA2gSaaFFVGETJO7zcp+SJJtdCB1OafZAMLHTAtEBvxRsVcImVbjHqiVKO7QmuH3TPzJlltMHMO1jk4hASaIEC4qfzXqdGKEgRAVQVgZRQxB1IgMRHrXbAlmxDJHZc9pFLP/2GGijwQgtACNHC/B8WdFMDWDYwAQdadKAEckiQgAJYgAIgAGF+QvCX+uqENg1o4B/PciCWzDKio8jnWR1gAQXmlY/VaUApEiyOAEK3QCRRSwgGnJ8KWzC2CuKjAEMAQhCKEJWjBEEIOFxhAj9WwpD18IcM4SEQ/4dIxCIa8YhITKISLyXEJdLoVBtIQhI28DKkZc4t95DiBu4BPc3cI4pT/EgTneibo6CAAxLQGQsEYKyb/eoEDchaBG5gg7hpqC5p1AIL6piEMZJxNxa6wIRgZZUIWDE/RWhMBGyARg3w6FYL0AIBFiC0DtzFS3/sTz5cgIIicBEfOlDLP/h0sw0ALQkfMaUUHAOzdnnqHidozN4yuR6PeA0J5/HV3URVEHzQjQM1cV3tGuBHWvbmKCWj0i7N1UEF2CCYJEoC/ppjzP78ZEOWk9YyQwU1BCGNYCcAQgMmpM1qWsdsMniBBkrntd0RBB8t0MANtAkYsHHOkRI4gTnVI/8yabpPB5BxZ6qCkBjGZUQzweGADQhgA6jskzoi2wDdvBe94HWLAB1gpxV/A4EXAKmYD91LfpKghBAQwAVX3GiqYmkcGVmUHydYEQlDipqxKEEDBNiKO13nuJZu02tFuMANpkbT3YxFABp4gbRcJzLRNQ8FGbNohTjDgowVtTRHVZvR0Pc1JOhUdCxYJxcrIkaOJEEHSSDVBjp6AWJeFTUWUkBndjCBHShBADvQKUzjkhdqCVAKOKirEu5KASFt6AaLxEEjk1q/t1IGbB2IrGQjewOP4axDXuJHAUo22chK4QWm04ES0nitCLyAVo6t6T0KUIQiFOC1sJWbZooA0Iz/yKAAOnAtbHH7tXa1VgYLS61wh0vc4hr3uMhNrnKX206p0ss5zK1Ofl7I27/g46yvLUISjpaRfCTBtf/QQXCjq6mvUeACnZmpTSQqAcNdAKd2LEgSKECApfxDCziwG3m7oiEISAC9BOAh4bDEgR3sgDDrdKkMpBTYV+HABfttD2BCKQEcuE/AGxBCHzWCjytpQZcX2YEGcOAT2VQxwl+5xw5uQAEVBfgv6fvHBurbNc1GgAV5UR5IUVySzbAAB5vsjIDdmQTFda2DosGHDOKHBC7umMep0cFC2QYBIdtqdyWKji7vQTcb4OAG//jgU9QL5ZMkRwk3eAzhrPxcmB2F/zACkG0SLKyBNbaAAna5AfDKjBJ9+uSwUsNZZ6ZFnjlf4AUANSsHmNIC5clgATzis0k6EK5Gy2ChLiBrlV+gvZgBRppJ1etULFzV3whBbfqVNEmKFbEQdKeuE7CBAqSgBAoQtbljWY5SLQYaNkslZ+NT9UkidoMPKiUE6FVAnRt70FyH2mwRW9E1TRQBCAvbJEcZAgoNKM64COEEZHY2opvKDxeQqT4aQZv7Wnjtkohs06crwrpCsoHlXK5TYIyKgaAFgfcAoaBPbnebARPTAAcGH8tZQI82I1ct2GABNtCVDWJzGBuQSQJh1YIS5iVwd+cHCduCWYdZkLxf6+wGiP/V2Y1nORY8Z4sDQLBqx0ciMjF6TSo2gZ5suJuRmfuclgH/udCHTvSiG/3oSE+60pfO9KY7/elQj7rUp071qlv96ljPuta3zvWue/3rYA+72MdO9rKb/exoT7va1872trv97XCPu9znTve62/3ueM+73vfO9777/e+AD7zgB0/4whv+8IhPvOIXz/jGO/7xkI+85CdP+cpb/vKYz7zmN8/5znv+86APvehHT/rSm/70qE+96lfP+ta7/vWwj73sZ0/72tv+9rjPve53z/ve+/73wA++8IdP/OIb//jIT77yl8/85jv/+dCPvvSnT/3qW//62M++9rfP/e57//vgD7//+MdP/vI7agn/eADgP7CEJeTg7+z/QRWCXvZ8GCEFAmn/EmowAxNggf5idwA9MBA/sAQjkAA9kAIZQAQA+HUHkAIiYAID4QQgQAYYkAAj4AQY8BMNSHU1MBBTMAMDuBFEQAMwEIHBhHY/8AAJIBAlIBAgsBI+cARHAAL/t3ZksA8ZUAMrsAIMcQYm0AMPYABN0IFSpxFGMAMicAAZ0BD8gAEMAAMMQAJnoHYQSAL/oAYOMRVfcAQ9cABkBnYjwAQYMBH5cAAzkAI+sA8VQQRgZwH/AAIpkABwKBL8QAIJkAIMAAIG0AMj8A9NiAVbJ4Ij8ABNSBL5YAIqsAQPMAIi/wACFmCER/eCIygSZHAAOZACNcCIBpA9/2AEBvAP+Kd1BsAAI2EAedgDBgACGSACKWAAGYCKKTAClSh3/XcG+WABDPADHuABPyACPhAF+lB1XEAUB2AAIiCCCQADDzAFFYEGFiCBUlcBRNGLK5ADJIABOfABKmAAqHgE6td0TBAG/7AC6OcVThCLIvAA5ugBGegDJuCGSdcDRtAEFuADKeCDX/EBPCiC7JgDGKAP+kAG9oiFRdcD0giIMPB+XvGB+/cANLgCH/AAjZgCMDACf/iHRkcGBiCFevEDBfgBErkEIHmABuADB/APBjl0ZPAPDPCBfPEBIFmA+7cEKyCFB/9gAhjQBEL3Axr5D5soGT0Yf+23AhSZAjT4cwmQAzOwibXoFxI4Ax8gkyXwAyuQADRAAwzQgk9ZZkzwD2jgkiNAA6ZRhSaQAZq4f0eAlTHocwbwADAQirzhAyKwBB5QAiugAgmwitdmBP+QACuwgjPgG02QAUxQA74IkUcwmAKXAzTAkMdkATTwACvIBFWQAy2YAmJgd5rhAz3Aj49oAiYAAmRZd54kFSVImfznAxYgiDBAd9sCBEWQBkTgAypQAzXwAD2QASAwAy1Yd/dSBlaAAEcwAirABCnwAB6QdwqgNgHQBSOQAxnAAA/wAZB5dwrABRVwlWPAACL5k8X1fuf/OCOUJhAKMAAlQJErcAVeIJHDNQIrkJwfwJhHwgHOcgEO8AQlUAMAAAU88APEVZQMwJNfIgNBMFpQoJ9P4AAxUAEOQI3CpYYtWSf5IANIgADsVwI8UAEK4AA8UAIvGFLnmAIgEIZHwg9NMIvVGQMO4AAlUAFcMAAhpX49YIOG8hNkIAJMkABmsAIBwAM8AAAXAAYhWk0T2ZWHog8ImAEzwAQqsAIzgAAoEABFakz8l5KOAgM0wASY+AFHYAQlOAIIEBMAQEtHgKWOsgIikABN4AMU2QMJwAROIBUFQAXhKHdz2AQGcARZkAA/UAM0wBEkEAbXKXd46ARoYAIrUAIz/2ABTcAATFCHdTcDGXCGcLmYGJgBIeGG9Pl2KmAE+pADIkACJDADBmgCHKEPZHmIcYcBJPCK/LAPOZCbNNAEaCCHOSCPc5cCUcAPFiCCmTgDBqCEZZgqc2cRNJACJrAPIFCdD+ADP5GQc0cDZ4CiKWCTNEAGGkECyXh3Z3iACXAEU3iBM3AAkth1UaCEFkAEaCmCamiibqcCqPoP+0ADNcCoG8gPZ1Ct55p1rEoEOTACKdCNjnoAIBCP/ap1JZgCNGAEyaqEdnmNRUh3BsCwLbkPBkCSBWiVNBCWu+oEBMoPJiCTU0mSDyCpdFclRkCSU3mvW4Cyc1clUTACBbiJS1kwAwR6d/yABgawqL5IolWYd/zAruBYo4K4dx3RBE1ABPAKd1USKuYXtVI7tVRbtVZ7tVibtYcXtIBXrKyqd2dgAXloAPvwhHt3ga8oEE3wm3o3igJRmnoXEAAh+QQFAwD/ACy/A1kAdgJiAIc+PjyFhYR2dnRSUlQ6OjyysrTW1tQuLizKyszOzsz8/PympqSampxubmx8fHzq6uwmJiReXlxpaWn09PS2trTGxsRGRkTm5uSqqqxOTkyenpwqKiySkpSioqRaWlyWlpRCQkSOjozS0tRycnQyMjQiIiTa2txiYmRWVlRKSky+vry6urzu7uweHhw2NjTe3tzi4uTCwsQWFhSKioyurqwSEhQOCgignrxgaFw4JDxATEiEeIDU5tiasORMNkSopJC6wLxmWBgyDkAmODyAiJjKvtjWzvSyxsRsZFi4yIDkzuQ+NijYvshKTmCyrMxmWNTSvqS+rMBsUmhmGIAsUEBUPIy4nMDe5vRwkITEMoC4pJzK0vBaVDx4kLA0UBS4yOzivoDOpLS4wKRUWmDCpHzo3NgSYDigrpTY9tTspLQyLhDe4NBmoCRm4ICEjIT25ugQEGBGTkwYCCCa5rScopCgiIhKVDji5IgQMECawJgEBBh4sIRcZJTK5OQYGDSYiLD28LwUHBwmJkigmKQMBBCapHgGAggYGAiYcJh2fHhYZlzc6NjK2NRUXFAyOijKxLiarrRUWnTi2vQwMkwICDA8OlA8WEDifKRQQkze2NDg2OBoYHDKusAUFCDm6NB8iIR6fIg4SEjWzuBcZkDy/PQ8SDCompyAiHBMPGTY2rjU4PSAmITSvvBidmhwZHC47rTwvsx2PGyQjni41Lg0cDjAwLSyuswiYIhmoOB+ZEReTli+vND07vTa4OC2fIQMDBhuenBARFBoaHgcHBQcFBwiwIC+yMSUjog4PDTEfCSMjpgwPEQiIMAEDBCQgogoICxeWlCmrsACBghodkA0UGQKDghMZkx+VoD21NRcPFBKWlRudljCpOxqcGgiYNBWoIQ8METAuLx4kFhsWkD29tymfOAyLmx+cpzU2uCOpKiYoqR+anRQdljEyjR+eGSYfFzYztCyqLQaHiBMQjgIGCAGBgwCAgQODgQaGhwODgwKCgwKCgQCAgwaGiQGBgQAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDIrSXr8QBgSRa+DtoT8YBAiTwrRRJs6bNmzhz6tzJs6fPn0Bx1oigoYAKFf8YRMBnb6A9fwQCGF3xIUO+oFizat3KtavXr2DDWrRXIsAKGhoY0EiQIECLgf5A0BCxokOBfxUi6BPLt6/fv4ADCx5s056+lxvw4dtwooKJAU3ttWBgggOBFhsEmFBBgLDnz6BDix5NOqS90wRrMGDRYKW9DAZWuGj6r0WHB61L697Nu7fv311Pny6x4EKEyA1YhLgqUN8IGBrwAZ9Ovbr169gJ+msBwIIHDiJonP/8p2/GBAcz/71OsIJE9vfw48ufr7WGhBUJXjygkGKmDAYXSDDTaQSogAAA9CWo4IIMNviQPhY4EAADFCyAAnMyaHDBCf4IZ48LKiQAgoMklmjiidY9pY8MGdCwQgYrZRhgh8IBEMOBNfnjQgMcdPAPBxJskJ5AJA0Qwj8fSNACbSg26eSTUHqojwQifCCdPiGcR+NpKSDQHk1kzSCCCAggIIIJBXjAnHotBHBmAgaY0AEBTEJp5514wufhaRYgUMAG6gkwwXLC+SMBDB28JRJJFkSQAgEEpOCmCiOql88IL6wQAQEDaPBCBxDkKeqopOq252mGvrBACQINgOYBkeH/w8ADAgz5Ear+uOYPBB1cMIJrLsSQAAorQUWBCceVquyyzIolGQgH4CODDCWgsAIMI+z1TwkamBDAAdRKgBcAdYKEGkEycEBrZBFMgIF0AuXjwAUcrNnsvfjmm9NrBRTAQAgcLGDmBxvQ5k8KFBhAQ48JICCBvWCuuNgJCCSQQlP6ODDBDOn5g4IBFACq78gkl6yRPSSEYFQMMajQgQQQdEgkhBys0HIHHtRQrkj6DKDWCgY4zFwNIdCanj0AVFABgiY37fTTCj2Fz0sggOBCCfoIR6TULoBAQAm21pTPCfjFOSdtMnwAQ2segmgx1HDHLTdf9tSwwQEAOLACAyQ0/yXr2lt+GKIFcxdu+OE+edimAQFcRfQD/wQOgtJMI2755Zif7OHBCRQQasYTcJA1qh6YEHLmqKeu+kKn+pMBXYDaEwEL7w6UTwAPLLf67ryj7k8+e7bwwQUfyICSCgZ4QCMBK5jgwc69Ry990/aAwEAAJ6DgwQgLwFBBCjNLYEAFDnjQQAGVKTr9+uyPXH3CJsT/QgIMpJC1ei01oMIL8ScwA6ztC6AAPVO3DRAgAyggAY12QhIXZCACJziBB0CgknOdJh8u8MAJIgACGUBvgCAMIVfsgY8RFCAGBnjBDDx4rn09JXK58hBBChXDD4rwhjjsiT02wAEVYKACE+gAU/+0lsMiGvFEJCHB1Z4THRke8YlQTNCeUPCCJhIxiljMYoo85AHoDLGFWgyjGE3FRS9ecYxoTONnymhFG6rxjXDcCrzY+EU3luYp/tDH/eLIx8ydAHh0dOJv7AEBCczALgwgVx8XeTgJuAB/wuliG+0omikZ4AIXmIDzGMnJudFGhpKs43SgIgEENs8DnUyl3Kb4AgawEIy9UREJKYNKVdrSaadxyQbmVYAUHAACe6yOrDZ5y2KSzDANoEABRDCBC6iAAh8AQdh+g49u1dKY2MTXlApAAwx4kwYF4AAApumbahIzm+hcVks2AAEIlKAEENhAC4JJHXNeM534zOdC7Kn/z376syD8/KdA9RnQgRo0m8NEwUEXaks8lqADyMqVzBhK0TiWQAAO4IABJkCBADjAKhUNKXDOCMmoae1UkQHjnoKTAhM8gAUKUMAEHgADBrBKpDgdDUqfwp0BSMAB9mPJ5ggwAgdg1AFGtcDohCMDEJygARJAgZC4UgMAVO2qVZtqTre6RpSWxTETUEB0DHIqGcyABRdgC1sqIIBX+oMEM6iACBIwpgJEAGJczSu+dgoBB2hgBhSYwFhnWFYOvKAqFkiBYoUknBJQhgYNQEEEQmAAEWSAknrN7J126g/F6OMEVYTX1k5FtAoMIHB7eh2laJS282hLs7AtlQXP6IHQ/xKWtCHISw3yMTpI2qO2GAiVQPyhsQC8NrbIJZUg/xHK2+4pXTAoQAACIIAMxMxDBUqABEpQAxkAAAMJgExyxzuq5TZ3tGUNQNBsxhYNWGBLn41BBRYQAgaooAINwCt590siFeVxSwQ5r2/3pA8QRCADVpVAAV5Ag74JZ3EmgNMFVtAf/lrYRPgwXwMacIIDTFQgAkYp/moYOQBQgAW/umAEFsCAE6RgAGZZgCIZeKoL2xgrSFvAmGJz1zqFeKXopU3GWGDFFFSAAtJUDz5CcIEQGE8nO8Xsjad8KxmkIILZu26Avehc5xoMtME1jAOUUwOnvI4C43GhiKnM5sTRtv+mTw6yU5zoDwE8oIn5yJLuiJSCBKjgkVDeaZsHTWMwUtGVszWMzGSJGpTRgAUOyBpxJ0CDgjVnzKviiaAJzWnT8NUCICiaCiJgAQLo7CkEcMCFTuMCB0ggBd2RQPdWQCc+IQAGH0ABCFLgABFcoAHHLTQsO03sjOxUXmyBgQJYYIAEYOC95BHABVZ1mgwgYH4VyI8IOtAf4YyteSYwEwwQ4IASSNlcyy22useCUo9xIATwhjcHBODhEgdATblEgQACAG9Vg43ALpAAv2cggBToDChAXrfCK8LZfDjc4f/gLZN+F7hc6cPhe4lyHnn74Z8kfOEgz9PHQ07yJ4285Cj/N9HJU85yBq285TDXU5TPHfOa33HmNLc5lWkYueWiSo+LPjmu9BE4wlq84zXBec51buOnQEDf/G7AkoioDwJEwAEzmAG+N82mDAh8Bg6AFRH9cYDtBWAGJ/AgTpTOdJBLRr0XgOnSnEiCDugnrBgwd5Tj1QAEXOABmlQeEWUQABNkUgEU6FvbF18TfBiyATN4QQxqfa5dBmAEt6H23smjUKRWwADEIqJ9QjACwxYAgIxP/Ud+l7UCxWA2LfRHDYg+ABEsYOqNXqlh8uGP/5gABVckCe8LdPqlq/74DiEAyyhP1gEY4PbBl6GHVPN7G4Ko+MjPPkeUP/lhq+dj0K/T/8wzVH3vEx/12k+/Rbhfa5aAf+pefm63gE/Sf5zf+OrPv/2X/0F7vF+lUUZ+9Dds96d/BggR7Nd//yd+ATh/I1eABrErGSBBLnAVa0YQ+uACkmVdqEVSF3iAF5aAI7GA8echAviAKoB9BZEPKLAAcCICKuAAuJdwkjECKjAmzoYCS0WDXAeC+2UP7JcQ/vd88DdnO3WC6QaBcDEACGAADLBvMaBCrwRkMuAAL4AAl/cBCRADoVdjQVZ/PphcQHgUgMYSzpco0XdSwqEaBgAZ6QYilWYQxPECMqgPNZABN+iG7dZnCZAz+oAPAgADmueFPRiGyPUUQPgPf4aIRugPHv+wbeYWOH8IPFvTIdUEejX0SbliIwWgQBaUApmCejUQABcwA2uyJ6NYitqCMisgAuI1YHPmW4Y4XjsUAREQeTAQABAEAUJmdRJwGwngANnzRSmgAX80EC0wACfgAAjAAi9zAuM0XAcAQYZlADMQVVNlKIK1Jh5jAH9ihMPRPSeQGjMAA5EGjmQFhrOYV4aSHxcQUxfwAiIQATPRJiIAAzA1ATAgeSAgHMnBAPDiOhTwAjAQVg/wAlIYLyOQHw8gU/FYAcd4OxMQAEdjIxVQKQO2ARRgWdohASzAAKdYf+m2jnpFdhKwYRt2khKAegV2kii5YSeAexsgAUmmHiWgYS//2QAgoC064pIpyWH1li4PgB5OcQAU8Dbg6DaE4xTtsgBlJmddRpJiyICElY5GyBCY9UlCWSsesgHLlAHOFSwIsJRE4gETsABxNnIjKZUHWDchAGmBQwL4QZa+dQAhAj5wcQIs4JToWBBryZb6J2YK8AE7CAIIwBngSEjoo1AD4RwPUC99GZWAGYayc5YsFDmHkiiJWU2/xiQt4Cm5gV7p6H2TqX8EUCYXIxB/8ytfqA8NIIiKYg8WIAJzB5WSWZogmIoFMAAGFAAvsIi5FAEocHD2cAAFQC8EsAEZ0D0zcGpeOGDPiZvqt0MfUFn3ZQIr0IVIUwEFQCcDMQAr8AIVRxAiVSJ20Ylz0mmA/lACEvABC6ABDgAAO7gBIRAA5jYzqaYBHZAkeveB6JmegbkiLYAPvOdbvScDR3NBiiEDO6iWMweg2hcQACH5BAUEAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACwEAWQAvACoAYeKdCDIqDSxliw1NTZCQkQUFBSNjY70zkSOeiAqJAahoaF+aRyBgYFWSRJyXhexsbFubm7c3NxWVlWvkSzQsDb22kRkZGTZtzpNQA77+/mqqqxgTxTMzMx6enzCwsFrWhW4mi1eXlzv7+66urxISEji4uREOgwdGASmjCjtykDm5uSZmZqUfSQ8MAzq6uzAojKcgyVOTkx0Yho+PjzW1tQuLiw0KgsyJgzmvjy5ny0eHhwqKiwmHgR2dnQWEgTsxTzsxUSihiTyxkTetjzivTwaGhwmJiRCNAyeiigiIiRiVhTS0tQTDgTmwjzeuzxqUhTiwjzStjyCchwKBgTavjwOCgSqkiwGAgR6bAji2PTc0FDAiAwiMkTMzETmwCiEYMi6zvBmfCyQknhybISeZiwuIsDAfHQKMijwoDS68NCQpIw2VkyeSkjsqNDexswWZlBmpiSgmAweFByEHIDq7IhoPhgEDAjSoFgGGBTo2NTiulxibIQuZtAoxIC8oiBmahhiqKR8mpxEeDhEZoi4plDWMoDAwtiQuojYtCiyqLDAzMzgvEiMjqDQ1sjqwhDs8LQMCDC2giy2iEzQwszitoR0WnTQ9NDOqLyQmlzA7FA4VhT4zCxmXCiGgGQGBBhGDkCMgIAkJjC6xqyImCQYEGAGDBjMwhACBggYMmhSJiCecnQiPBzKiDh+4iTQ4tQkCCBmYkSefEhyZGjAcCyCRiiQqOQaKhy4ngzCqOwKDgjWxvA8LDDs2LDyxtDQ3PTs6tT6+NgSDBigoDD22OD4tkTWxqxKThRGVlg+JmBUMGASBBC4yDBOUDSYfAykooBSOjC4sCCmkDyyxsyMmJSkkIwMGDSUtiTYoDTQ3LCAgEi4nLy4sMx2RnAkGDTqfEjWfMjM1txOYky62MDMtljSoBDm+uRYOhDi0Cx+5KSYdChUVmj66ORoRkBQYBS4toCGYHSGcnTe7uzKmjSAZkiKbAx+aDDe6tAODgQKCgQODgwCAgwKCgwGBgwGBgQCAgQAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAZ+utH8V8/f/4iatzIsaPHjyAf+ktCwgKDfxDyYQzJsqXLlzAlFjBQQoSIDBF2YMwYs6fPn0AV+ivC4IEBAxlo6NgZtKnTpywx7ruoI+lSplCzat2KcCfGJFa9ch1LFqpYsErFll3LFubZsFjbyp270WtGtFdX0t3Ld6Hdf3jV9h1MWODbtHELK6ZrN3DixZDXNs6wpIjgyJjH7qRoJOzFzKDJjpQQA0IGFRBISNARurXWfRBUuFBhU4SLCCFc63bab8bR378H7B5OvLjx48iTK1/OvLnz59CjS5/OFmO/ffv+7dObcGK+7+Dzbf8fOBF7PoGfqe+dOACCAQUjVhTgfrCfhAceHowY4cGDAZXokWDACg88oAADBIynnlz+6LCCCLLhpBN9Be3DwGn79ecfgP7ks4IKS/zjwRIq0MBAdgu25U8BIXQggQUu0JDEYwTt04MIDBSgYwFFzJfRRDNIMEASOtQAAQ0lxJCiihNlVEQEMtI4kI2orcQTefT5Y6EIBly5ZFtFlBAlhTXemKMOBWznJUFMMYDjml+WFeaYcBq2TwdJaTDCgSQAyOY+RO5gAQdLEBDnXHPOSKZh/VjQn34c2Jajl/2EwN8SItBgQZ2HcpXoooaNNIBKDfZQggsQUOqoBxz8o8AA/XT/Wt2nnP5jF0/9mOaBn1gWIMEIHBBQq6xNYfSpUIL5s0NSM7K5mQQZPBArsVvtNCdrfsXljxGZGnHlrTWIwMF51Gbl1ZNKSaRWPzFkwMF8tt7qDwHuoliuU5v1YwSUO1DE6UQF5ENRP/nM8ACXsXYY8MD7GLFCBgpMe2+x/dTAQA8G2GYABD0MUKc/A2jwAAMQQLACDSKMoJNFJIhMsnuReuDxxPjuA6MLOMv2TwQQSMxmEgqUoMLQJXDAwMq2DqBABKe6ULQBHg9Lc0tfhRCCBFhLcLUR3ekwg2oSkFCDgrb2U8QMMWAtNqlST+3223DHLffcdNdt992Dtd0RqHhz/9RhjzwWUYQO9nq0j+A6Co6m3n1LlMTBJQgtGw0hMJ7QPhk7PbQKERjgc+N+GxGpAsBBbXlCCmTgAQPAVQ76R9uOaBnDfDvkYQkSTDTwRae/XtBIrKr5l0f5LE2CvL37Tl4SHkRAwgAzGDFV7Q0Vr0IPNcywg8BSKi+RER6IuwQNHqxAQnodFZ9B0TRwoEAI8HqvUYPvMcB6+Lh9rhFsRrH+gAs4Kpz8HtIPHZwHI0bIGAeE45Gh+GhFEHCaBAa4t50U4H8QeImWLqSA5FHQLvnIGAM8qBD7uIuE3rtVCN/0EhN6AIXKu5UOHqACC2hwJhCDIeiGsj3zJNBdDPRbEf+SkCbtFCGCEYiBDvvWoQ6IwAOke0AExJU7jzSKUApYgQI4gBMG6I+CJSTBAzgQgTL6B1YwBJkBCFWCCHDAfAIEY7YKoAMj7MAIaeqeSPLBGiMYwYBLlKMgB0nIQhrykIhM5HBu1ZC2DU+PiATYDgZQgyKgDyH9KEASakBJS3ppHwMIpSj/EcqlKPJ3+/gVAFXgAQnEkTw2ewANXPCPJ0LAMoYhgU1whrObZEADBThljSwQOQV0YAVCS1VXatC8kfXgZCJQALYgCAELWDMEDIiACzoQSLz5g5klsMBF7OOCJXANIfmIgRE+0w8CcKCGWPJKP0hAgyUEUZi5EsEDHlj/AAWgynIrHGG87II5EazglYhcITc3E4JoIfQgWxKovHTAAdwJkyDW25RXBkAZcklEByMIZ6jkaYGUNeui/7gg7uyyrCUEsyEFcNMDkjBQr1zwn9QT5IoesNKBtvSlC8kHBEqwhOPVdCcEIGrUunm3C6ogd16pAWWAmhChulEC07pVQf8DSTl2rEP+TJVXSOAuhgi1aFh11kqYl6TLJHKDCNuJjQz60H9YlQNpVetEQvBEy3RVp/aJgMx2koQRJJEnGUFfPiwgWKP+zoI0FOtfdVqAD60Ajzu40APItaKwoQg2yQxmPnSkoJ0MgKg1GN5FQfa/JYzRBR6YwUAqpYIF/9qqBhHIgD4VoIB/GMgCnO1QxhQgPKYy0QhFyc/RrtROA9FUWR6oJwdo8A8atC9HBKnsCI432UMykk0VWcgly8Y746L0vFyxnHnRy972uve98I2vfOdL3/ra9774za9+98vf/vr3vwAOsIAHTOACG/jACE6wghfM4AY7+MEQjrCEJ0zhClv4whjOsIY3zOEOe/jDIA6xiEdM4hKb+MQoTrGKV8ziFrv4xTCOsYxnTOMa2/jGOM6xjnfM4x77+MdADrKQh0zkIhv5yEhOspKXzOQmO/nJUI6ylKdM5Spb+cpYzrKWt8zlLnv5y2AOs5jHTOYym/nMaE6zmtfM5ja7+f/NcI6znOdM5zrb+c54zrOe98znPvv5z4AOtKAHTehCG/rQiE60ohfN6EY7ejf3uIeFKfCCSk/4Bz8IAAsezelOe/rToA61qEdN6lKb+tR3XsCFJ8CDE1QYBAJgwQsmnAAbCAAKBJE0hBdAhAAswNUNyEGErfCPHAhgAQJAQoWdQIEGUPgFQLjAAnxQYRaAAAAV9sEGQDABC/P6H93+0YN9AIBZg8ABJzgBBpwNYRMsQNgSXgAT/jEFBzQBwsJ2tUBaQAUgQNjViW0AFH5ABCc0mAeMsgEMcMBwItw7wSggzxRMIIAXTOACPxBCgpOdAMNUYdsoMMEJPgACCjTYH0z/+EAOYGAD6/xD3wYWQAsMc4IFaDoB4kZwxE3woxMAIABSgPmCMZDYBLAgB9Nu8AsacAWLHAEFL/hAFRgMgBxs4Ar+mDgKrD4FqudACVPwRxUaIAAUYOCLBo56FcT+gRegYOYMhsGx184EB9z8wfL2R80pAAChLzjvPECAr+fd4ABcwAZXuMHRP4CPBRNBIAK4wAVaYAMUCKABjVdwwX9AARj8HAUTEADRGRyAABABBQnAgOlh0IL1uvcC/6B0ApgAgCaAAO4LboLkcSCAkV8gBQuYeoJV/wMcXIACQ7gACyYAAwoEf8FCIPgEFoBxqycABSygdoIpAIReO2ADRGh2/z/uAQMU+J3AIDhAE4gAggUEoQksaPo/ACCABMNA/UTAAQUEQIELNIAnqpZgAcB5FxBtAPBzJjAQH4BgCSAABPcCpgcAPNAAL2ADDZYCBOcEp5cA/bABAdBxC1aAm4cDMkBvMjBrCtYCIJAC3VdpL4AB/1AF5aZg1wcENhh1CIB6DXYCKIBpQEABG3ACLAAAmadg97AATVB8TQADJ3ADArCACzYFzpYCBed/HfiCDNYCOZACTiAAAQACrgYAEwCCC3YAP3BsAlCCJycQKXAAAfABCzABN/BgMPADByBtlYcAJ3cCMICBTSBzGJAD7KZgend/4feG1BZxC6Z3KHAAXf8IAixwAhbIYP5wfVSoiAIRgArWD9eHadN3ctZhAxMABAfwAg7wYFlnAiAgBAcQBWp4cvjQACt4h3roYP5wDxvwArp3AW83iIToAzJAAb32hQ1AhoRYcxdABDmwdR8wcgx2BUaHA38IACiAABYYAAvWDwqHaWnIAthIif3QAijQBE3wAgtAff+AggnmD/iAAbeme2UneZSIiy/gBE5Ajsa3hzIQADZ4AQFAAT+AiQg2ETwwAUnIeSzQAJqIYFeADwkgAyDgcOG3ALiXYOpWdcknADkgjwzmACgQAE1wAERQaRyJYCfQAABQck5wAEKAA0SQjAHwfRAHkBmof/ynfC3/gA8mIJAApgT/EABt2H35RwEJKQMrp2DqFwBBAADN5wQosAENgAK8KIAggAIA8AEbwAL9BwIggGkBUH8HRgETwAI5KIwX8AL8l2kLKWD+JhAvUJWR9wPmeAQYYHkJeGBXiQE24ANMgAET8HaZCAIJ5gN8yQRMMHL/6AAJ0AIwAAQ8iX5c+QL9RwQvCYHJ+HgJ9gFK8AF9KJcfgAELIIybdpcDmQAwEHWuRnumiGAY0AIn0HhCyHUZAYUJ9gI58JcwYJAB0ADpZpQKRpdKwAIXUAFc+JWhd3JV8ARVuQEYAJqwt4go9wECgABkCIMI9pULgAGLuQDXBnPWiWAfAHUU42B4KSAALTcRLdBthMgDLGCPFNB+JpAApLmJ4miOH0B/3NeW6zgFfhlyCeNz+jlg7/YPC5AAU0CPb4dY5zdgCQAAFJADABCaOkgQM3hgE7GTBRcALSB/L0eJ1wcFzIYAGHAEqqaeCcaJLCAADnAEG/CR33gAFkoR+CCOILABmdcC2PhwBqZ3DSADyCZ6PtOVFqp31HcA6keEDzYFMoBxTcBwEzCJDeYDL3AALkkE3feKDOYDASCSlAkEP7CW61gFC6eEmeaLC3YF4tgEXup8hOdgaOqNIPAB2gdhE4EP+OB68RUQACH5BAUEAP8ALL8DSgB2AnEAh87OzMXFxNLS1K6urPj49jYrDDIyNL6+vKKGJFhJE5iYmEZGRIJuHTY2NCYfBWdnZkJCRKqqrCIiJHx8fMrKzCYmJLeZLeDg4XBwb2xbFp6enObm5LKytCIWBFBQUJCQj6SkpISEhC4mCtra3D09PBwXBBISFGJiZGZVFC4uLKWMLFZWVJuCJIx2ItbW1CoqLF5OFBQQBEE4DHdkHKuOLO/v7kg9DJN6JEpKTKWKJLGWLJV/JHZ2dBYWFIqKjE9BELq6vF5eXD4yDHJeG+rq7La2tAoGBFFGD66SLH5mHBoaHHpqHB4eHGJOFKqSLEY2DA4KBAYCBDoeFF5aUAIGCBQcHK6WIE5gGMiarNC27Hw+GFZmWFR0GJio5FB2WPK+sDRQZJR8DHiQWFQ8jFpeYDRwOOqasMq2xFaghCJgiMbysICImGagJGbggOr66JjksMbIfEw2RPLohJpmLFxklGhOKLSkII6OoLi6MH54ZHyIhFRaPHBmYPLY1LSSDMLc3LK87AQEGLKklK7kMPLwvCwsRMRyyD46JLK2zOzq2JiUuF5gGLLYzBAQYHxSGCw6IH5uCBwWHKCmvNrcxAwMGFo4GGh2QJiacLKkxDxIME5OcGxcYD46SMze0F5OZMa2pPrq6Gg6GDhISD4sNKhydLKSrCxQQDIePCIgwEpgZGxieGx0GMLQzFw8UICIcGxaQJi6lHiQsLLGuDRQFL6a6HiwhHCQhFpWYGag4BgGILa6pNbY9G56cH5WgORySJiOlH5kRKKiIGZY1BwcFJp8SCLAgNq2uMLG2NrQwGJ2aIyCKHY8bPj63AQMEAgYII6alKKQDAoOCFpgdAgIMJCmsJimjExCJDxYQNbG8GYYgIR4gMqaMMQugLpyLCJg0BJgOICYhExSGL6adJKoMBAwQBgWNJympHZgCOrEMH5ynK6aUDIsaMLK8GxQaH5qdEw8ZH56GNDSqG52WNzGzM7GwFpaZAICDFpaXA4ODAoKDA4OBAYGBAoKBAYGDAICBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNqNOgvRgIGO5bYcMDP38aTKFOqXMmypcuXMGPKnEmzps2LUWIUQNGixYwfJPeZvEm0qNGjSJMqXcq0qdOK+0rYYJBDBQOgUIY+3cq1q9evYMOKHZvQHz8HCVrQyMHARol9ZOPKnUu3rt27T7X+8welQAYWNBAMERIjCt7DiBMrXsxYsT99BWbksMACRYGsjTNr3sy5s2eU+2JMpaHjBgwHcD+rXs26teu7/vahbYEEydW3r3Pr3s27d0yzIlDs0JFjhowYen0rX868eW+tfAsMUaFjBwoRmJ1r3869+2KTHafq/9DRIgFq7+jTq1/PVfaRFuNb2EDOvr79+/hXAocxXIVx+vkFKOCABHKkUwYqIMFCBgXok1yBEEYooXZRTaWCE6aR9OCEHHbooWpGOPCeEzTcJtSH7MXWgwQGkFDBiSjGqJpZDvCHhApLyICbjOvh48EHEQARwAcV+LMhj0jiFR0KLOigwmAAJpleBR8IAIAABBzQgJFHSumlWKEJMQMCSFh3WZdfNmcCDvdAgIELQBjAZZp0hhWbVAwkWN55daYXm5EkBBAnl2j2aehN/oSYFhIlupXaod0R6g8EFAw6J6SYFrWPCBkgYEEODEaZ6XaSUmqpkaOm+htk01F2nRGqev9XaqVyXhrrrSdFIdUSTpBn3qO4ajfrqYUGa+xCsi1qlaPFHpvbsLWiitQ+FawwgQIg8KCEtM6ux89OTbIApWHdkkqoqdE2+5I/EnwwwgY1EABECraWS2oMQgwxWWXYqWvva8PSWy9R/iiBgQ8PhLDBvAP/q1yFDJB2g3lG+OswwFwGyoHA3Bq1Dz4m7AOBAAw3fPGMBcn2Qwskmmjxya75k08PJqwAwAEL9NADjEZJSgLJHL8MM2JD8VXjDWvlGAOwQ1OIwwcKcFDDBgMo8AEJQqvkM5xBN91aR34BtmABonqtXT4PHAAABQCsfbMHWae0dckdm71ZaDJIhsTEGtr/LSsTECwAweALCN5D3CjN3bXfmd25MhLkAcU04xcrTijl3yV7gwW2uUUu5kNbbjLoci3ZZHFCOEi616LXvTpZHeVNHb8Vv846oSTA+cLltsOOJ+QMHIEa4r2PijvJu49evFOy8WcBDTPMR/zyo+7zwj33+ECECxMEcY8E1Hdl+qcznBm+3fhgMMK7BBBwwQgUBDH9+Vrjq68O/JZEP2/+VOs99vdYwT088CLeCQQfEPgfAAVIAnwYqQcLACD2BHgPEuRDUtPbRwpO8IATeJCDQdjd/pKyjw78IE802FPtRribfQQhAAKggAxheAEXPEAo9ZKAAqw0wwC44AIa2N0+/0gAgrXNEADv+sC2MHiR2MDlifvgGQttQqO00GBZb5nfFJVkAAx4EQMPwAAIiHCBe0iKID7iwRcfMAEKEEABS2TCPdb4gA9cYAMTcCAGtbhFpDwGXAqaAWEm18fX7CMfiMzHxyZwgQgUyYB7OSQio0iCA4zgBDiMzT8muY8VwAlre+RjIRGlkyGQqTINEuUoF6MEDWwAA3CBJEdOcIGNnXEg+ZjABj5gAujIcpVfwdsSqLMD8+gPmM3xBw4oEAASCOSXBDGBAmoQAj3e0gBFGEEQMslEZHrlTkfIExJaIDxCerM3JvDBLnvwTOVNKgABWEAojURLEDxyj+fkCo1gQP8b/3gun77BoAGAUMaiKW8fE6iBAg4Xyh4ogAg8uGAoAdqU6PwFfwzKDkX5R6h9PGAEHHjBQKApkAoM4AI3RBUGcQCAZs7TdUXpZh9jlwSJUUyVG7ULBiUAggtgIB8jHZ0/7tFIERKESwitwQcYOs+kNHWKulqZBSxAzhJ8LqfPkRRLm3nU0eHjAzXgweS49AIOXGCbtrrlUSY6wkTNRgeNKhtWmWOCEFxAiV2F6TsBIE+OxOYBjbynXtQaU3zSz3QYTeVcu9OAAwhgBcBCqhRjk9CFpvWB05zAXi6rPJqwlXrhSUIOSnOaYy52Ox69wAAqsFlCKeEBHtDjZiUQgQ3/YHKeC2AmBFr71KWQFHQqY4AOxvkrnJ62MRUAAR7xwVsjeUAAq1VpaoklKXyEQKkmaK5hfdtZu7k1AcNhy3ykeNztvMAHH5CTdkmggAksMZJBGMAJgBpKE2AgAjgAz0ud8luz/ZFJSECAIAtTXvXsgwlKSA0G9yGBJerXBBVwoHZRtSKgBlWmTOnv0HIiplNaRq4FLq+GHXYnCxG3byFO8YWNW6cqtkAFbPnBjlRMY976N0TCcdJ/WFzj/Y3YWBb1FCo12uMUv5THH+rIE2ZAAwvs4DTmLLKI9+ssxzGgNre5qpQLNKdmdbm1ByGsU4/sLNk0Ibw73nKEjIQPEpwA/4wNRJORDuyBMN4jBZP97Jh7G6vo6MtV5lMzhIzEhAn4EAAuCMAEJNCl2EBAAVcSgAA4cA/mtpPPBfaHVJjsZCgjWdDPwgcjA4CBFWDAkhPILkL88QIQjEADJ8ieCyjgAQWTucDBrU0LZExeUBPIH5V87AU7KYAAQOBI+VAfCAwglB7YFY6XxnRO97kDJwhYR732NYH2gYENWNZISlDABfKIkApE4JKZrCQAdmvjaH+acmD7i4JC9W5t7wYfCiAABjrabQ0wASELuNmWuNQDDdTgBO7eLkWjkjcEOGEHTeiXvT9U8BoEgbdEXS1CiBoBRnMpHx/Qt36ljcwSR2ycwv9b4cQ9pATlroC3KxhBEVKAkBNsAARM6KgPCMADg+rZm/woQVqG25YZr7xDL9hHyzfwckJ5QOY0P4jNcd7Ofey85xMWc0Hw6Q8TvMAAL9iZX8944BQYIML1HtAMbkCcNB8dRfeYmQaIcA+Y11KkByEqCMD3TJCLnIkY3voe87EABRwgAAdQAA4snPWCPWAA8CzCBIx6K/xZhshv9xBr8U2AB0jqATfnu0FwQDIDjFSanW/uio8Uyn14wJIcUMAAXHCAe9gagz2YwAgAAAIFBOACIIj6rUYS5cx3KKm85JIJPrCBaiIkBRwQgO2fmYJs4kD17l71HpM7bgmsSH0DEOn/PD2QaA8ooQdEbD7jVQVT4yc5t8bmEgQCAAAcPEqRAxH1BRTwb5lhAPjg001ap10C4UIjEH4DYW4jkFK84w89YEcTYGGuJ3DuV4Ff0QM+0Ehtcg8RcAE+sC17kQI8cAKHsxckAAQjEAI4AAE8gETTd2R6ZUDWhUf3V1lMxS0GEAAU0FcCwVMoZYFAuBX+kAJVUmxWkl6o4kIuUASmtxf5sAIc4AJrMwKkxk4JF3jZJxDhhlLQcQ81wAEe1zE4cAFAQHnLRwAhEIRqmGH+IEch4AMhcA9MgEN7YQAT8AAlOGcGkDA+wAMLIGGrN4DY9w9M4GpmRBAeQARFkDwd4wEE/zAAOZd/IUAAH5B2a3iJDDFnQKVIaYVIPhcb+YAPFxRmWEgQ+fACDZCKKdBLhTgChzgQOEAEZQhJK/CIIHhAk1iJmLiLugFsICBDFEBp+dCKrygQibiIkOSIkKgV1kWJlsiL0FhREoABH/APHzABJLAPDsWFI+WFYAhJC0CGZhhyaRiN5rgaIGMCJoAPQoEPPlADE1CDCnWDBJGDO6gVPvgA57iPnaFWBhhSA0Fb6KZWD4hHEugBFMiPCtk426dcISABEMZIIaV8JNAAEuUPzxUA96AEJkBE47Z+CxmSh9F6HvB7sccBVAhZBbgCBxB8BDcBVgICGhAAI7BszyiSK/8nSYi0SYkUWYAnSb2mkzyJf3kFTaGUD49GfwGgAAswiv/gekAAAoxYMCcwAGwDBJNXfDi5lU/5NB/gA2AJlh/wAfZ3hQ64Am/oAxjAbKZ4D1/pA/+AXh9Qa4FoMvikjV8XdjzjD/hQARLATTLDBGaHdlxZmAixD/fAAUBQBEXAARxAATVQA7B0hSkQAgJAhUhUBCvAeC1HAADAAYzZmPNVl+1nmKY5EyZgdimQAqjISBRwbAnnbGR4AiSwADB5ABCQGj0AAjb0Aqu5miVoljd5msRZlGymMEsViJXkirFkXRcQAqq2my4lSyRXnNYJE0YCfQXlV4i5hML3DzGncf//sJu01gNKIFva113XuZ6J41EghXeCtw8ncIDw+Q/k51L/0HKKqAEg8AEPgGfpWZrsOaAb0YZjFFGkqEzMhDUFCFiPBRcYGIwREH2NlJsJKqAEmqEWoaDrlp7JtUsGwAQS4AEDQADa9EQkgAMvIAHXc1JS+SCCqKEyShHOCW0J2kmWFJUgAHkb4ALbFEmxFBsLcAAoBZKDCBNKZwA44D2UN6PmOFDMGaD58DRFcAADoEb0R5d59VWUqGrGKRND9AE0NALx6KTnKJ8g9Z2Ch1RKkAINkAIIRCvJgVQT8EZW+KUxkQ8eECQgEAAE4APDaaZYxQQacAE8YGkXmhzctn93/3phDkVNiLpiM+GAKVABBoOGgSqoAGUkLPUPoMQtSneDh4RDU6o2dMmXIbNZ+HACVqKSwmmJKnUCmKqpmHic3uZg0tIACoABDDVEXoQ9hiYAPNBLm+QBPjABsXYCPgAAz3mLMDipDzCrtLqG2QkEPgqYxniAj6SnA2AlbTMAeIgq+XAPQCBpAuACcMIDj/SqGJo40Qqo00qtDhhANygtTBAEOGBNJpBAHeQBL3CRe4F+K8BBD7ACBmBN7HqT/vCumRqv3tQxEAujvlQsXlYTC/unDeuwGpuJDLuxHutbHfuxItszshoCGTuyI+sPQQCPJ4uyGstgq7lzy3Z2WumyNji7oRXgA/B0AQSwAfD0Af92s0J7Em04ARwwAAMQAREwABwQAkowtFCrEdooAUxQtVXrfTUbtQQaEAAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQQA/wAsvwBkAAEBpQGHblsXvLy8RkZE0NDQZVYVy6s0VEcPXU4U4uLkoYon07E49tpEgGwdUVFQ9M9DLyUHKCMGiXYfOjo8+fn47+/v78dCsLCwOjAMioqM4Lo8wqQ0q5As3t7cmIAkMDAvJCQk6MI+xMTEj4+P5ubkmZmZtra02NjZEhITiXEeoKCggICBQEA/TUAOuJotj3Yh2bc7XFxcJR0F6urseHh5bGxsHBwcrpYsKiosn4QmhoaEkHskZmZkqampRToM4r48eGQaMSoLspIsDg4Kv6EsFhEEysrM6s5ENjY0SkpMd14cGhcECgYEcnJ0FhYUp4wkspcsYmJky6Y0QjMMZlIRvpospoYsIhYEBgIEmG50mpwckkZIgpK4MlZMsq6UeIKAtpogyHwsytjwdkYoQjhA9sQUYGKAYHxg3q5UZqYojr6MjpxkyJ4Qstx4bEZw9sQwyMKs0MLwQjwkFhBgyMbYBAQYFGZQQjBo0jKADgwYmHpIsIhMeqrkeuaoSmBMYj4YyLzMgH5kIDJEUE5kQHg4fmDI3rgQYFxQ9vrUBAwQblwo8PrwKMSAel5genhEwK405tTUuuwwQg5AUDYUCjIo1sI0stK0LGbQmI6kQGaIsMgwuMDQvHx0ytis8MLQfGoIJjAoNDAggG50YHoY6nxIYkZA2MLAfhyAUCYosKZQhpYompZEYGI86ujUxrQQFjJosHAsNDBIssrs9tTg6tSwxrRU9qIwZKqMIBY02uT0IDwcBhgU6u60no6IsLAgmKqg+ujkyqasAgYIxswwLCLAOiI82tDcboB0vKZ0ztLAYkwobnCEmGAs3r5UUDo8Cg4ImJyExro0yJ5U5vrk9rZENFYUtqQsbmwYxNLM0nzI4NT0kLQoTF4Usu7IeuIo5MIoyvLQmIAMspysGioo+sRMsIgM2tTIkJygmJa8Qi40IAYgssCc6qawrK7EsJ4M2vbkSk40yt7MCggwvKbogqagqsDAChg04J4w5K404NyABgYMCgoEAgIEBgYECgoMAgIMAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qcSPHgvn/8+gm8WLGjx48gQ4ocSdIjPw8wZmDIwWRFP44lY8qcSbOmzYb8BBQZMQLBCBkDZjSBebOo0aNIkxbkByOECBgrkKjgIGMGP6VYs2rdSnGfkBoX9+3jt2NCEQ9c06pdy1bsvhsmOKxgS7euXZtuVyAwIeGu37+Au+47gYFCiiaBEyteTHBfPxgcBsxlTLmyXX4NBpiActWy589ZMWumoRG06dM2+2Xe3Bm169cgHWceAIMfUdi4cztUXcQEDLf/xOoeTnzpigETLAi4cUTCihVCiksf3m8GBQocimiOO0DA9O+w+a3/EPEPg4jzIkhgQAu+vfv38OPLn0+/vv37+PPr38+/v///AELkllj9CPHSQvxkJISB/dh2W2kBRjjQgPt88E8AKiCm0AoqkGCBBTzkgIQQMGGGQQofpjCDBLZJyB+FJ4gwgVk3KCSECAgMEEIIAyDAQQ4a7lMDDzkKNMA/A9DQmov4DchUCCVQEICFCYnXgAcf1HADFCGMsANHVmL5jwczjMAXk/u55UEJKcBgQghUIkShWExNkMKSBQlBwgg03IZmffs0QUIIy70Zp0VzPmYnnhwFuueXf+bXDxQmWPUBB3Aq5FY/NXxwAwwBmNDAbUJkeQMTb9YY6X37SBCCBWDd/4DpoQbR2YAFPFKAABMQCtQPDTuaQEERo65qn6OSieXBrJqOdSuPJliwQosD/brjAAOIcIOfxr73qwlMBFehCQHU4JBYJ9w6QLG11gBsAB5w2+13+xyh3QpNNFFDA28KcAKemq5AQQgnVEnDBCTIO690/CAxwowzUgDxBKI+JOSRtDYmwQQDALzwdBV2SMLIHk5AgQUqsNfQPh7kmPFAmE0QgsIfE+dYPzhntMKbR1DLT40c9foPpzkkV1rBGl3Vjwc8UJADzTUPN6esIYDF0Qoc8HCVeB+q8A8NIoQwrHcYwQCiCjQwQcIAFJTwctTSzXlEqsL904AMAZT2AQkmIP8ggwwjDECCBGCuwAMHIwwUggofQA13bokK4BJMQkAFUw1HINEADEh4EN2E/HywQgP/wLCc0I+zmvrqrLfu+uuwxy777LQH6PjKiNaO21j9nJDvCS9BTacQv1O7EYG+nyCEg7q7JiQNKYRgwps5eGB8rV6ZjS2GjU/Yzwok9DYADzAc2PxpZFFALorCriuvWEKooGsJPAyAd5xMsT2+BT6p4PH5lOHHEWhwg+X14wMpOJm5LNKw6TWgQTcgAQVEcJUKBcBpNeCHEJAQAg4gAYCm4R2FJBCXyRxETxTw30aOYKYaYSZH26JTWVJwOxDeZU5iqUEIEPBBhOgQAaTbyAn/+Peb6hiGRG7Z2AA+Z8PK4BAzJhiAygrCMg6Y4AjH64f8VDAWGoyAB0PJC8VU1UTK4LAGFpCBClC3Eb0MQFV0ogEFSJAgAcRFAA46gQomIJcyWgY4gZKRBbp3kJyMoAgWcgs/DnanQCXQAg24gQRmgBwOCKCGfqzLYOQXAsK9TwCBi5ci5UhHjKwgjUAZQBECMAIOEC6TTmyCCkYQgCMMyCIemJ4tNyW/HHTmeSTggQiQcBwTvA2WfgmUCmRQAlEC5yA/HJVbTsCDEfxGThIYGBuR6Zcm5EAGFnBm3U5IAjU6qF45Ys84ByKCqmCSm1oJVA6GNa0EJUhhUFyXgT4g/4I5VtAxTfhcDZhAywXC8y9dnFEJVDADFaggBymTk2OW+cUUFEFK2wqOtSzwD4sObAXvPChWfoWAkpq0pLSRqFegUILphQADMQxOTngQF3IxLqQiVYpXPHCDG3jgp0BlIvbE0oQPZCl4oCtqjTKI05w69akWg6pUp0rVqlr1qtyc04RuqVLh4PCWX10nVm/imE799AP/+mqtDgjUDwQPfp766Q1q8Fa1jpWs/Nzhdb7YgHNSiCAnwQDbKCCDECiJQJREwHUKq4IY2vWuNSFL4Egwgxxc0JphFQhmioAAlDHhRGuEX9hEMIMZhG1gjXssZGdSoQaQSCDxk1JqxQXIG/8EgDb/vGdYCuTVD/BABuEKC1dXW5N1soyzl9wqL905lgpS8Zb9YMIEGzVc4hqlQh3s4fHc0oRQ+WsFMGiAW4miyAQhkE/CfaZ1j8IPKCAgU8pVVo9EUM2/mUAEYNnulmigghLIgAcL/Ot6WTsnD+ywT40ZkF4oxgMmwEAFFxVBdDbFBL9dxwKvpK1YBywSHH7AAnMcSoLpJAAEJIc9DSvpBwf0qR3MAEU0mHB1OTySOSHQMLON7z525iWCoNB/Aq5QOWegVRqXhELnTUFqycviHgXRV9ahY3V3bJa6GpkmFUqBDEhgNfXCiJUwIIgWJ3hOKnqAAkuc8ZU7vLcQa9j/q7p1zDdFoJEcshIKf90UDGTm1zUfGYETCIAECnQC352zctIMzhE6yIQaNMEDRZuSWD4ggCM4uqhcGgGR1etnkfxqRkVIQQp4wAMLpACkYkECxwxqJKCEamDSxEyXoHRbGXBABCdQc6c9MqnpYWt6LpVmvV5VsI3wQwIiCACUUjagJuwgBQHQTglEIIC6NnXXmvIrnbSdoOeCVa3P5DS2xw078pL73OhOt7rXze52u/vd8I63vOdN73rb+974zre+983vfvv73wAPuMAHTvCCG/zgCE+4whfO8IY7/OEQj7jEJ07xilv84hjPuMY3zvGOe/zjIA+5yEdO8pKb/OQo/0+5ylfO8pa7/OUwj7nMZ07zmtv85jjPuc53zvOe+/znQA+60IdO9KIb/ehIT7rSl870pjv96VCPutSnTvWqW/3qWM+61rfO9a57/etgD7vYx072spv97GhPu9rXzva2u/3tcI+73OdO97rb/e54z7ve9873vvv974APvOAHT/jCG/7wiE+84hfP+MY7/vGQj7zkJ0/5ylv+8pjPvOY3z/nOe/7zoA+96EdP+tKb/vSoT73qV8/61rv+9bCPvexnT/va2/72uM+97nfP+977/vfADz5Sri18+DyA5S8QCBCc8IQDtHwDT2AByzWQgQ5cgOUoAEELro/8fzBAHy3vQP8BYL6BDhDB5ONHSAxeJO7Z+aAFP8BBhPaxhBgA4QGvrZ0PQKCAJxTgBQbwH14BABrAfy4AAcS3Ki9ggChQAD3wH0sAAApQARSYARGgBLVTABYYAyygAdzXH0TQAg4AAvtXAQoQgLOTASCwARDwD84HIEowgRkwgyAAAgQgEA+wAbIDAAMxBAASghVAgip4gldwAQnQAq6TAS2gARqAghEQIPxAAAVAgSCQARpwABCAAzaAgqsDAhpAADrgBC0oIV5BABrgAwWwAUMQBS0QfQkYITGgA0/IJGMBARcQAzHQASPYAZ/zPzXzBN1yBTmoAC3QAgdwFR84cvxwAU6wAQf/0AMdMAQAcAEd0AIPSHIbEAQ9IBZxSIgFcADwQ3I9cAUbcQETWABAcBEsgIQmtwQHUAAtMAQMEANKgAMbcHz/wIohxwJPkAAXAABPUH4tYAAXwQBICAQh5wQ2cH38cAAKsILrdwFPwAAuGHItwAIcEQMFqAEsQARJsH3/oAMlpw8E2AEb0AIdMI1XEAO6KHLSeIFZ6AMv4HyraAA8GHJKUInM6Iw+wAAQwAA48AAMoAAkdxX6kAQKoAFLWAA/QASV+A8/QHIxgAPW9wMZUAGzWH4aQI0iRwTAiI0xsAEVYAM/0AIM8AQoYHIYCAEbUABOUABfmADieHKrCAAs8H86/1B+xWZyTkAEQtABVfgCG3B+JucAVaAEQtCAQQACBYCARakADAAAG6ADQDAEGQAA+VdyGzCBHaAELsCUAKAEbxhxDsCQHaAA0IcCTgkbQvCCq5WQaHgAANACVDmWa6EESTCF6zWFHWAAPzAETnABpHgaLQgCA7YE//ACUbABCbABi9kDfpgYEFCS+8dhB9ACAEAACTAEPlAB0ReZgNECL/ACT+CDHOaB/xADBFiDQzAFS2CXSeEAPiAQL5ABRnYAdygFDKABJoiVsFkUFUAQaHiP62WbudgBAAAADOAEIPACDPAAoMkWTGkDBKkDiWhkbYgDTvACQeiI+vCbMWGbCv/QAW45bgBgjmdYAbL5A2PoF7Z5AAaQAOimBBdwAAzQkt1JANBZF2f5AkPQAjWIblPAAhdQnzoAoCMoif9AlGnhAxkwBDZQAEzJAJc4bj4Ii+WXANxJhS+AA1KgFhspBdIHbwkQBEOgAQpQgkGYAU4wovGEgfKmD0ogBQ3oA/sHAlTIFddnmPDGAAyAAi6AA2dYgzZagjbRAlTwDyi6bxuwARTZATaAovyHosZJEwAglzgQhAp6b/qgD0uwBPygDzFgkQVAkRp4ExDQARoAAAyabyxwAD+gAwXoAwlpAzggmgLhBDGhBDrQfIjZb22YACignEOgAD/wAAbQAbU5Err/CKBXCKP89oFXIAQ/IJQM4ALMV6IECRI3+AILYAQZUADBaAMABwEsAAAJAAIOMIIMAAT0OZMfIQQE0AKOKH0MkHwA9wQa8AQ68APnuab2NwVCCogeQQC8iosFdwD7KYhD8AJUUABDoAMHgIwVcQHGiALrp3BCAAQEuIAgAADR2RAVEATXaXAHkAAaMAQdAJAagK0JOJuESKExUKEGp4MxcBFLMKvWN5gQsYgb0K4AgAKAGJwP14wt4ASQCRFLwAIbgLATVyIMuwEGEK6IeQCAWa4TdwVS8K+H6DgNKH8EAakZewFC6psKMYMFgLEWtw8PIKcNKVwFkQAOCnL7oAQMUDAEKGAFRMEPQJCOxPlx++CRGuACMbAkR/gP4CdyAzgEOGAA9niS0XdyrqgA/7eqIIACf2pyQrABNdi1G6CyIEcEQyCELacBslmlK8ej5BYQACH5BAUDAP8ALL8DSgB3ApcAh9G/etLS1EI3DCsjCIdxHqqqrOrEPmtYFM6wO9jY2c7OzNbQrW5ubLq6vH1oG2hoaUxMTOTdxZqanFRUVHdsRqSkpICAgMLCxD4+PCIiJGJiZJSUlCYmJL6+vFxcXKuOKkREROTUlPv795J6I0o+DHNhGvLy9JKOdCoqLB8dFWNUFLCULE9DD97e3KWLKKOacsnKzOTk5LydL4mJioZ3RLSgUJZ+Iy4uLJuCI7S0tHh4eevivI6GYI56IDQrCxISFI6OjVhKE8C0fMbGxDo6PK6urTIyNO/t6b66sRUQBJiGQHJydOG8PFdFERoaHF9NEx4WBBkWBJ6enDY2MwoGBOrq67SaL6qTLBYWFNnXx52HJTouCzImCfn13FxULJp+FI6CZOjk1EA8HA4KBFZSRI6KdE5OPLaynAYCBG5qVA4OFAYKBBIWFH60hHA+GNb24E5UGBIwRHJYZPDI1Jy25Nbk9K6eDGJCVDA0IFqkhGwagLyetJzMlBIwaGJqlCxQTJqeMMB4LJy0pMScfGxc1BIQYHZ0XGRYWBoWNGZkGMK07KJqLJyevK58HKKijCTCgMbM8ExYWDYOQAoOCCo4DLC+rLTkMOayQK6mLBw4HDYuaIZygICMkFB4WIZyCD5IMM62ENrE6NLk1Gyk4LKYIH6UsHaAaM7SfHR4GObo+GpYPIZ4nPq+QOzwMFpCkLDQuBJiOKq6MFBmTDQ4SNa03MzusAIGCKTusPCmsCRiiIRCGObEVMgwgMDWzIZegLDE7D5QZAQEGMh4yM6aMD5YQBwoDEJMSGA6GBoGIDYiKGzipM7CQCg6POzYMK6YtFRkGKKUmOyeMDAiRIx+gFA6SEImNLLCfFBedMLu4FBCbGzCJNrU9MzugNKezDZyOAgIMCRi0DZQFHZmaGBYdPro5DhISF56aAgYIFp4GIaciHSUiIRYGPDyhCQgwOZ4SAowHLC4zFRIUH5CbIpmGBgoKOy+ELae6AICDA4OBAoKDAICBAoKBAYGDA4ODAYGBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3MhRHz4WBAgI8MexpMmTKFOqXMmypcuXMGPKnEmToT5/Aw6McLCFSs2fQIMKHUq0qNGjSJNORANFhQscTZLoU0q1qtWrWLNq3co1ob4xAmxYoTClq9mzaNOqXcsWqz8xQiIgKfKgrd27ePPq3ZsXRY6BHTBM5Uu4sOHDiBNvBDHknwgRR3ikGKy4suXLmDOnfdDiX4wYIhYQ6Km5tOnTqFPDbCBiBwKoUgU6qaC6tu3buGsn+GfBRIwXOD6UGKBP342/uZMrX84c74R/JmYIcGClh4B9IGA03869u/ejHIr8/6uQIomKKzgOWGghYsb39/Djy+foXgEIfftI9LCywDGD+QAGKOCABumQzz840dBfFR4Q6OCDEHqnDwYX/BMGBWNQFuGGHHaoGWOt1XDAAP7oo0YG/xDh4UL65JPCDURMgcIPGq5o441ErfFAAiKcocQHBPgwhQ4VIIHjQfpwsIQUOXTQQQ4WTEHSkVRWSdMRS+T0gQ0vZKFADkX81RmV/mgwxBAFSFEBDAkUgMGUVsYpZ0oxTPAVCzYgIEQaRnCAgoq7HekPCA+AwIETGUBQQQwbODHno5BqFAAIAvnjgwMuOMDFlLNReVOJ/xTnzwQBwIBBpKimapE+UBzwFAtjCP+0wT86xFmcPkRcMKmqvPbq0J3BqQCFEUXE0KCVomqQQA43+OrsswclQQEAALxQQABScGCrPkYUkMASB0Ir7rMcbBDGY6GZIWeSQCQggbbjxtsrFhrMAEQBWSxAw2RV6pPBDAlUQAScQ+VDxAMbVFCBBRD0I+/DivmTTz8ZMKBABDz4QPCK/s4QQAVvGjVqY6UGEIMCM6QA8cqJqWHBESG4wEJsHHMAxMcDH5XPBBIsAQERICwBQws6OMzy0YRBcHENLhzAb4f+3ryBEaAapc8PGeRT4tWcNVAW0mDnpfQFFDgApMYcdtxCDiD047bbGwNVnEC3EjFEABCErXdb/ej/EEO2ra6AAwv41DigPwyANsQMFlhgrwUoGC7UrUoPceremKOlgWcPlJhEEC584LTkAOYjtAJDdHDmEF8KhlRxWADBKBaZ134WECgKRIUABKwQZNwA6pMCCMQTD8Hx/9D4ej87DnGf7dCnhcYAJQgOK+nRJ5SPBzAEoEG42Ye/lT6fh34AFNiLP9D2Q8DwgNHqx4/V7gRY8bv8B+WjgQIKaNBP+vgLoFDQkILq4YAEGRLgP7bHPw/kA4AKjCBNyMcCLYguChDMHPsuMIEHZlCCIISJP3jnO7TFb1QKqEIRJmA8CICAdiGM4U+EVwIZaOF66uuHBR4TgwAoIAABSEAD/yDwQRkasSTkA50LhFVExRSnHxwgAgQmgIH/3apSWEABBo53g63d6otf7IcWpwgCFHiwRvlAAQgmwMIMAC8h/jCCBub4gDrW0QMZaOIR95iREfbufrXRBxYeUAFdtaAARviiQDJggRwMIQEJAMJkwEjJDOigSY2BAV2cMDe6peABBbgADIZwgRlwoIk3qZQ/SPJGProyJTgxIA5T07EGVCAHVRgCERT5jylUoAEFGIIJKpBHSl5xULfcgA50IIEg6kB5ofrXEHIwg2Uq85SvzGZWkhg6FTwNNfm4wRQyMAFSTuGKAvlBjDiwI/IY85gZAIERsNCPfDhhCQkIzED6Fv+ADRDhBxPDAhaqps2CVmV3PShhKy1zxSl04AK77GSoKPcxlQ2EkqFaJWX04ZcWPOBA3GpAB3apUYOa1C3UW4ENEKhHw+QKohIliD4mEDCL0o2XSApPAj6KoB1t4AZG8IAGQODGkxpVKdxcIgZV89KIIomm5LkoGBFSHKV14Hk/AMIRijCDIrDuAhWAAPiOSlai7G4EK9DUQivT1JheFKo2nSpVOSCBGMwAhliQgghi0AAgPIABFWhBByCw1rIa9iWxVCkJ9tFSvbSVqnCVKjoNog8nWKBN55RNBfa6BBolqa4bgKFMJbZKybqVbqvU2mAwetjWUrZ8V1ABzTTz2Kf/1vSmOJWpo5YFAjjlVQQNMMJbl4UCmX6SSQWwwJveSRB/EMECBShCBR5QTGO69roD2Z1YCEAizeDqAhD1SmQnOlndXpZtBMvqMHMnkOx0ILOhQoEEEvClDgjRgcxd4AQakIAL5IBNErhBfrF73ek5QLEJvMx3L/A120aVtca1QACK0FsN5YMzRYBXexUQ3MH8wAIxKAAEMmAEHQQgBxHFKLeK0IIZ3CCeFagCEDwLYQK3lnxP6OZSGerQ91J2bhCoKHlvRVB/WUABAiMo3YjQAbwNxnQxIOZAiAADGBA2mlKIwRJARUkoE3Mq+gjyBVw35MbamI9nTasJD6MPMaJg/38K8AAKbvC/SjkBBSVeFgRQwIEzoqBQJKnsemCggQw4IQWIrvM/dHhICAD1AY9kQLj04YEjFEC0/uBMVHFbHCfUdQmU8bRHpyTXM18XJwce3D4Q4w8PhEmYR5jmCgPthEvmIAB79S/IbpW40CJoAgoQQQuKoCaFScGBdEPBDBQAgxzwVwEWeFo+dCCCDYBPH+5VEaeTlIMEPGd9OpAx/HJr6hsnIccrUMGOCTOqChShAPCOblgDjYUlFOHd8L63FFIMgQK8D0Eg2MC7733vHFTAA+P+pATgvQEPcHIgWT2CBeAUUvvI9FZG6IAC8jaQMh2hAj8wrZnLfUT8CACtDv/oLmHazAEOZODlfuKAov2BhZa/PAM29+ACM6C8FqWg5S1HgdA5AE262fPlA9UQFjZQBR1sLb635jincwUDSl200hUQbalJ7lpUr2AE11lX7JruReNE/eLFMcIFLH71j2ud3Fw/LPmc4oInzPZIbe7NDMpOdauX+QYNwFtzH3AECYSc03E/8z5OnlaVk4kBIpCCFW9C0xwUd9sZWNTmID6DGFjg2uVN/KkHcGCwU2Hk8QlzCxpw+UWDeAbj/uIPZlCFGYAP8AGw07ZFf+a5f+ADdkc9fDLveU7mAwK6spOoXr41f0Cgyh2kebgLUN2t8x67+cFB4wvrIOezSQo6mMH/I++K8Qr8FMxYuCwMqjlfBUyg7HC/ftdJ//XrCL8o7wRz/E2L+I0cvwIJ8BkXwACTVBwgEABjtlopwAAX8BkBI1YDJn82pg9RQHeydX9C8QMTsEwcaAEMIGDWZRDOtQQPUFwh2EcpMAUggAEo4DBhNAVG8EA3lUYYAAJEkALwF3oSSGAmJxYpx31VMQUNYAIJAAP8EwAN8H4RGCrdYgIedUbWpxKnFSo7WIXNRXpWYANhhxZMpgAM0EIgwElL+GEw8BmSln9WmIZyEyrm4QJ1d3db4VAZln8n2Go5sAE5EAOdg4Zq2Ic18RWM94Nm4VA5MAVwU2pyhStpEnB6+HRl/+aHkEgTXmcdq8YVudICEtA4hYIFkzVVlQUEQ5RVjShR+xeJppgSFEh3wReH3nIBDaArQwA5NaJIF3YBRZMPssMAShaFp9iLJ/EVJGAD27dNTjABHrCCEHBkLeYoaHcTIGBwkdMPGzCKzYiBvniNCmEpfyQAPsEGElAVN6FaT+QBCpAARNSMKbABSUgSuEiNIoeN8IgSqfgBWhAE+PAD34hU+SUCW4Z2F6YAG9BnFJNlOuAERVaK8ZiQFTEGLBAcByAGtKGPxoSLIvB5prV0R3ABGwAEGyAFPDIEGzABsTeFClmSFaGNbtgfUocV+MiPFHcrP7AEDwVeFzAEoBEDF/+wBDDEiybZkxJBgUGwAjsQAd/2OqXVcRPAJsoXKuL4DylIBFApRUVQBRsAAji4ez6ZlRZBBSyABEcABugjEBjQAURxfDpwjFukAw24AQWIAmcJTWCUD9O4h0uolXb5ELMhAgAwGuy4BDEwFH0DSaMUAC2wfiBYHJUXOdsmjVoGhQh5l5CZPx4wAzTwAS4QBFJRMUAwOW/mOEBQTRBAO1+EAhagAYdXZqNiAQNTl5HZmgjBD/+QBCSgfSWAg/wAP3IjMROjWospg/znD/0QaDzpmsR5ENroO9xYnMpJS02xAi4QFda4nNJ5Fh5RQSswHEA4ndqJF/6wBduYndsZnmv/QUAH8AGDA4fimZ544RHB6ALDgQbqGZ95cZtccGCjsQ/+AJswQYfhqDVKtm2iopviWGPyCZkL+AApEAUHsAI1UAaO8w/ucTlSSIcpMJmM4wHfZEz+0Jkz0KFLUEUnWKCRaTn/sA8sIARd0AVZIEoBQJYrQYdGcDMICERTI5yUxDyw2AFs0gEFSaAi2prO9wIAIAQUAAEM0D0s8U5Y0HkFMAFEMAEFsIxlRmRE4AFUpIJC0wIeAH8/upz+kAIqYJ4CkAI60BnMKI8aUlWj9DzY1j6E5YmJmA8WIG5006XTCYgE8AUHcAKMcqZJ2iIvAwTwI4oWQGNpik43sQRU6Vl2/6qdLuIFNQAAEZAAm+cSTyQBJvAAG/UAUVaAh9oiTsABNzABOdACGlB2jbqcAVcECnAEXQAAXpBgf4oFMXYsAzEBaxM5vERkGAAEYQJEFiCGj5mqPQkCUlAEgRcBIXAFT5AC8GkSwPkD0voDJTIbdVIQlXeYGhpwztaiCBeixAqZfjIFHlAAEQAAVsAT4NkQSaIDG/CuyuUP1lqUApGt/JkPGYBnGlABIpaD+Aeu4cpHLUKOC6AEODA4hbMRuCIB4IUmHZRXDFIQHjBs2sKfT8Y9FVCxw/oSfBiw2uRQMEAGs7kCo7MR/XADGJCyU8CJ0igCunhRnCoFFsWfA+EXFv83nDLRsR77SqcCAwOzBfsBSBmwmRnxRf7gl74mEEsXA0WDJLllBA1wsxtrqay5s3tUBCZYQILDUv0goasCRmOZeyXSagh4H6KCBSFXHPlATxMlfTEwhzibs/qng1abTUlkQedjjVMVmB2wBB4gkwnwTBNFBNU0STfgOAxgpQgDSVvqo5MztXUrQ36kUEWrSMKjlj6EgDqQRxlFjjmQSMYhAd0DRD5UAB5gqCQZFHEbuXtEQNVzQ7FyEYn4AxjgAQ/gAf+0WsLjAQ0zNygAAbZ7uxDAAf6aFKvLuiVXPiS7bhSRuhP1Y3ZxvMh7RCMUtGv2HtI7vUbkuldgA7P0Hdn/q70yRD5NoH15i72QK75GRD/2c73bEb7qK7kpNTixyxw0G78nxU0foG7RyRcWi79HhVCUmxz/C8BG5XUrJau3cb8GnL/n5oZM1MASjBGLl1Dcta4TnMEdRz1WoAWLpcEgDBFztwL7i54hfMIGkWaCiMIsbJzz+8EtHMMXBVv8K8M2fFb243g2zMIIjEA7HMM4BsHM+8MhvHjCuMJEfMJeB3YKnMQZHMRK1b9ObKdGPIxTDMIIbH9XrMHz+IZSvMUFWsVIDMYNnMU+QcYSjGMfkG4mjMbiuztakK467Mbxu8RbSMf4S4E55sV4nMdiPMd9PL1ZXImBLL6p6IaY+cWFnrycwGgDV4Cdiyy+ONE7pqfIkUycXbyKl8y6wEibA/Csmxy5k1x/GBzK6pmKzvkEAhAEKtAEgGzKVCwAOCADMoAABoAAI+C+sGynaNAEtmwAwIwABAAFuxywJGDLTMAEwCwDW1DM4coCyJzMtywAzkysPrACwJzNLjAA1Zyq+KACMpDNMhAE9dvNP5pE2gcblmzOCokfKTAAKXB6GREQACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQQAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACzCAGQA/gCbAYckJCQuLi7GxsdiUxTcuzx2dnQ6LwyYmJeEch4sJAjQ0NDY2Njy4IdzXhpqamyenpxwcHCAgIGchyX5+fjFpTRYSRLe3tzv7+6KiopHOwywlSyvr688PDyqpIDi4uT1zUPmwz7YtTrm5uRTU1SMdiFERER8aBy+ojJNQg6mjCzayIQcHBxsWxa6nC4WDgSkpKQSEhS8vLwaFQTq6uzQrjY2NjQjHQSUfSSEbR1cXFzrxTyqki8WFhSajlR1YxubgSSSkpT+8rwyKgtmZmTsxkWkiiROTkxUQhHszFRKSkxiYmOCeFTCupSujixoVRTEpCxiThQKBgTivTwmIge2trRCMgzqzkQODgvGqjTKqzTmvjzi5tS2lSzStjySdiQeHizyzlSigiRKRizWsiwSDgQqKiS2liTKqiyuliQOCgQGAgTmvkQKDgSIuojoeEg0IjB24CSMsCSsbCyUegywpFCwyOywoujYwMDqotDK2vQ6Yoi4eHQKCDBINjAUEGBeREDEeCxYSiiovrzQwPDqwBB4HIDK9NBkYkAYKCiMiqDw9oQKGDS4vqiupKz01OD4+tj4vlACBgh6RCgwUhTi1NSakHyUcEhEXEzYnDSSmLgeOBzKotD66OSCkiTIwKzQMIBsRHCAaAgEDBCAcnje1PS4xMRkfETo+uTsvEA8LmDa9uQ8SkBqahiUcHTgnoR6mJy27FAeMEQUMGjKwBD0sEQKMCjIxtjQnFishAxybFxeXIDKzETq1LASYlDesFyw0rxGWhSwyDCshEyw7tAGGBSWkgwEBBg6dDiAfmzG1NRafHQoYtDIusxadhguUkyMkHjw7Fi2nAz4yCz0woRESmSwmLhIJCBeboTQeMjK4tSAXmCwsCAoIMDisEA8DkB2puTowijIljTwwNDwnDRepojawii6qri4utDKtFhyWnDm7CTW7HRqWihioiTK2rDQnBB25KSUZhyUSkgmwoAODBhiVGCqqsR4XsiwyIAKCgwGBgQKCgQCAgQCAgwGBgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIcSC/f/oI6rtYsaPHjyBDihxJcmK+EkMiYMBQwAgPjiVjypxJs6ZNhfwCUPHA04OIBQdqwLxJtKjRo0gF8lsRocCIfyg3XNgAIKnVq1izRuTHleBSAR6MaB1LtqzZFTE8PDXLtq3bkvz0XcnHI4cHAQHe6t3L1yG/K0MOvBBwQcGIoX0TK9a79MUCCx4WQMiHeLHly1r50V1RAoOACDwwix59lSvXFRtE5KhMurXrkaa7QriAIePr27g7xjadLwLtfLmDC28YN5++jccBxBDhYLjz5wX1JTngQKwRCFQuxMgLvftwfTkg//97/DMoa+/oR/9N4kBlBAglrnRNT7++/fv48+vfz7+///8ABijggAQWaOCBDGl2xYIMXmEbQlzpkw9wGCG2W2wIGsgPDxjE4OGHGyRxHkYBOIDBCxu8EEEJG1l0oWkZEpiTAhMIQAWIIkKI2gUWCCDAAjNIZtuLu8UoID8A/BgADDw0ucKDBi0VwYoArFCDbxbk+A+RMBoJIJICKEBZkQnxAwOUVwBB24hketkfmAsEsMIKZ86XYIRKTLBBlES66R+YU22wwQFKBDCii6ZdgcGaXvXp55sAvKAAFRsIYIF2JRy6pWn6GLHAAiUU5Oij/eUTAAAbnSQVFVXhFGEAqUX/AOWmL5LKH4abBiCACEoQF8ALFxzwUmW12pofl2leEIGmOb0wg7BdHtSmsfVxmc+isuIE67PD2slntNTSxyVqM0Awoj6wioBBt/N5S6u74aZ3xQrGHbdCASIowAFH+lR1EVfpHvDkccf9yyW48Xr3lwMCHFAABBHEcIGQA9UgwAYwbLnCCxNYgMEII+SQgxI5Hgxvwt2Bt4BPM4hgwQYjUChQCSIIsMKWHCgwwwz/iLBzyxtc8W6xKItragkgj8DBCt5eMQKLW15RQg4hiyyyEpkOPW3RXHft9ddghy322GSXbfbZA2qK9m1cbpkPkzBQJmqtcV3RJA8OYigh3HKv/33ZwfpwcIACCzQMtVLjQrAB4QpskCmMRkha+AGH+60YlxkpcIEAL8QwgwWHIf5iDZ4r8MIDVChQwEaaQdAy5557oITalpM17j8zgKYPD7PZrLVpqFkAwcDzysdVDf94MAQP+sCQQ+HI156Yt3HlIEIMrf7DQ2qrkRmXAzNgMCa4Q0wQg8xpzlAA7dKzpRm2MOnjwAQvjD/U9gtwkA8AHATAPEdX8A0Q4gcB+mWsfX2ByV9eIIIheKUEE1CA8eyEJJflwDMLaJwS5POPAE7gADDJRwG3g8C9FMlMG/BADrwSgAjCAFdbamGNYgCEE12gXJQBT42YdpoHRJADJXxLv/8AAIBTyQcGKVzLQAAQQXZZRIYCeNwVZqOAxyXpWUkIQGcW8MMgtqVZPrrYYZCoQha6EIY5mUC5DHYFYC0rLkYgTL4U4KMZKCB6XixLTg7woReMQC4MbI5FIChBNAbgAmHhyPsuMEDTBKAAL3gBBqamAOzl0S0Xupay4le++qFxBWAJnUA0iYGhDUR+wZLZJW1XJPDMwJJb2h6vWNelv2BrSOQyl/eCt8JV6pFIAEhNAebCAwds7mZcuUIAAkBLmoFqLveagQCEkky8TQgAGBCBsHz5yxfpYwQ6iwEfC9NLjDwPL7yJwM428ADCKKB7ZopA4wSzABFsgDvcHAvgSvD/gE+ZrnKdqqSheJODDTxGAQ9gEadGEIPHLGADw8tnN5FFRAAYzyL94mG7YFDRCXZFHysoIgCYJ9GSmtQj7DupSlfK0pa69KVgu5DoToYog50wWluD6U2sxYMVLBMArLkQDGqwzGXWYGkXAmlRLZpSnaKUSLyLgQUmYD6hzS02iqLqz2bgAYHFBgZDSMsFJnYADszKqTHhEqx6RKPzXTU2vbnACxyghCE4wAFmTRQGJgCUAkRsYqFCK028pxki5sMIF6CCKmX6j96EZaYGC0Bk9hXLA2xSsIMdyoU4oJ3Fvsixf2SdqDgQwQeJkJFnxexMLgRBt9YUrr7BgMhKQK/d/0g2TvVaARA8IEjV7nQ3rZWZtXzTsp0tAANATWfNMOCAAiwOAwf0LUlMxpXgvjY2ncLAEEKGAc29ILlcgYE6LyACEWiHstKFzRU4kIQStFej/7IuZBWpmTFhJAkCWGOEavCADURACRBo5wEMlV6R8IMDO+GJAJQg2i3J15SvHaVvXmA8XYHqXyt4ACNVWWDdrAACGJhSAag5SPMJF4YQdiWrMNLJ+BnhLvjsMEgYq5QSdLZR1MOVPoYwlZvFFYQW4cCngCjjGXtvUzbeQIM5tRtangZY4otLARX7r3zwGC9F/sjBrlClK8vpSZsCgNJY57warKCnNViUAkRU3fJCYP+kK0iC58SX5ad+tgB05GKNLkbZfBTAfDzk7F02INXCKGF8AfRAYWKwKxG8IMZ1nshw/ZlBwsUAat+MAXTDG4HO+ci/A8XQdBh9sSHcLNJ29maToRQX6tI4p6iOdUxlTeta2/rWuM61rnfN6177+tfADrawh03sYhv72MhOtrKXzexmO/vZ0I62tKdN7Wpb+9rYzra2t83tbnv72+AOt7jHTe5ym/vc6E63utfN7na7+93wjre8503vetv73vjOt773ze9++/vfAA+4wAdO8IIb/OAIT7jCF87whjv84RCPuMQnTvGKW/ziGM+4xjfO8Y57/OMgD7nIR07ykpv85Cj/T7nKV87ylrv85TCPucxnTvOa2/zmOM+5znfO8577/OdAD7rQh070ohv96EhPutKXzvSmO/3pUI+61KdO9apbXeBNZTc/9kGGK0Qh6+TWRwJuQAMs4MAGYA83PxIQBi18QAcEuIEN5r0PE0hBB1q4ewhYIG9+yCAFH9ACAaQAAh2QYN1E+EcFLJKGCtAA71K4uxZwwO7JK0UGJqBAC2hAhM7roAUZYLcO/qEGfUyBBBTwQgZYQAEigKAFFdhHukFAewJoAAUokEALGkCGf0QBBT7wQQaioG4dgOAEN9BA5ENQgdSquwIkwEEFDEACHeiAAkLwiujUrQYbmOAEaNgB/wV+YACLtDsuBkjBCVgQBX1UoAWwZ3cRMuCgK7y/BUd4UPqzkO4i/GMARdACRdAACHAC5FcZh3duEpAAWyIDTpAFUkAAJ9B87+Z/SrEPKIAFtacBTiAD7NYCP8CAUTMAGlAER5AAFfADNMBuFMB/BIN5LXADBvAvMrB462YFBJACJtAAPvADZ2ACMpB23KYBP8ACLFCAH5CEBOADCdB77WYGJOCBfncDtpd8LaABN9BuIKACS/AF+SADLNACJCAEbICClOduQbAFHbAEEhACOyCCp5RuY/APkYcEDHCHRAB3PSAGNiADHrhuLRACGpACKUADWgACRAAGSNB5REAAFP+gblfAeiZABn6HA1fYAUygAnf4DwygbmTAAicQhftgAyTwBAiQAFdQBr2ybleQACRAADQwAChAdh9AAEWAAyTwAxqgbo8oEMtHATRAACFAAxSwA0WQAuyGAxpwiClQASZAAyFwAxlgABkwAMrIf+p2eidgAjYgBCpIASTAAiZwA8jYbiyQBdtoAilQe8GYhG+3bin4eCGQBXB3AhRwd9bHbhrQAohIASdAAESgBTTQAhRgfFzwbjSAhSlAACCwBsa3gibAbhWQATiAjilAAYhIBFIghjaobikwjTgQAninBXkoiDhgAMSnj2SnA4wokCnAAnPXblqgAxFYeCxJAz//0JHtRgBZ8AQhIAUlCXrOh25aUASZR3hSwAUo4AJCyG0UEAIEWXg0QAIJ0JTbVn4/CZRE8AQs8Ifv1gAaQADGR5P4J3ub4m4iCY06EALkN5TppgUCSQGweHbyRo50GAIhwJVeCW8oUAQE8AEgoAGxN28ZQAIh0HkpMIPvBgVg+A8sgANSEHgpAIfvln4/EJK1eANeaZXWlgJT8A9eIIAh8AEhgAB/yJnWtosu4HjBSAQUwAJW5W4moAEGYAAaQHuBWQEp+W5NYAKuqAOASZuomW3KiAIs8JdakJjDeW1r1wQ30AAU8Ha0GW8yEAU+QAEBmIe7+G42IIb80J1SQANA/9kC8ZYBFIAC++ADJFl4E+iW5uYDXCADBnACxkcENMB37/adKVAEBnAD+EgAOBCb55cBCfkDIkmTcvduTvAP6QmYhGd9FthuTgB6/GAALaADNICbGkCZ6lZ3tCkDJLCRRRACIEABoeduaUACLcACPxCegaiR8UYGgJcFjycFhwkC7tYCGaEP8ykFN0CFhxiY7WYCOip2C0kCBmACBKAF/7CC7dYALSADGVAEeccCFYCRo+du+uAENHADRbCPgsgFgJmf+9AANEkCGeCfeGl878ajyheFf4ejGOluW5cBKWCfBpAGA0ADA5mfV0CCV0gBFcACj/cE+Yl5WCCDGZAF2/8Jb/rgjTSAA0FYBVIQb1uHAjtwAgOwES4AmfDGD2QwoR/Jqc8YAj5ApzKAA8gnBFwBg426buhXBDQAhA2IABTQAHSaBkewj1CQBlsSk/ipdZ/YAqMqEDH5D77KbmqQqk9wA6x6Ecd6fjz6AxRAq1vCobCqqxqgqcmqBtiqbvzgAmGoARkwJHS6JcuKA+OXfQPxrefWjQmQptW6l2PnbhiZdxTgBLspb9YHAlJQBJTpruoWeRjakUKgfvImeQuKbwJ5dQ57G1mob+Spbz+wov1Jb6H3nWS3efhmAJyXBeX3Dycab1FQAVhAkGcnAxJQjvCWARogAQbAAiVYsTrpbl/rWn7ud5g7gG/Rum8TS3r5RgADkG8Z6pv4RgNPQJD/YAIuELHxlgJZ2AB3d3ZF8AOE+Q82oAFEoAE+0AIRWW+FWIhPAID/4ITxxgVWmgVdkHxO+7Qe+AM0GQJNQG8p4JVaSwMCm26kiQACkYD/sLDy9qoPK1htW28V0LWJeW+DqAEUQK72BoBPcHeCO29QWniGam8JUAWZh2/P6QRfO7igG7rlBnf6xpJZSm9VgABZAJf4JgH3CAJMam8/oIsnQKL4Nop2Z2+h5wOoR7r4dnifK7rCO7zESy1Wu28TCrMS+5H6pgGTi7rPe2sBAQAh+QQFAwD/ACy/A1kAdwK4AYdJSUntzUliYmQsJAnOzszn3bRhUxcSEhSampxKPg3ExMQkJCQkHQRubmxcXFxUVFTy8vS8vLzLrTVAQEB0YRv7+/nk5OSzlyyJcR5ycnTS0tStkyykpKQ6Ojzb29xcTBTnwT2ujiyKiozKysxVRhKAgH8sLCwyKgy7my5oaGinjCyUlJRsWxWQeSOsrKzx5rS1tbR+ahyOjoz25pybgSRANAzKvYc4LgwyMjTW1tQZFAQ2NjSkiiSWfyQWFhSGhoSwnlDq6uwKBgRnVRTu7ut6enyOhmQWDgTWzqx2dnRORiTi3twmIgR6ckyehiW+rnCenpymmmyKdiLaxnzyxjwaGhweHhxCOgzatjzdujyOfkQeGgXewkwSDgRiThSafhQOCgSwlCR6YhyigiT68txWWlwGAgTEeCyUbiRmfHS4wuxcQkDsnjC2rLQKGDSIQkiOXCwoIsCohEhYXjw+IhSCtoSwpizsuhC2ngzW2MBYeGBGXFgCBggeMkSMlrDQnJigniCqtKhwQigEDBAKCDCkvrTIwNi0vsgcHBRmcGgsNCi8nOjwpLB+aihGVDQ6LiBqWij66Oiu5DDu1hQ+LjzoeEjCsKzOtBCksMTMwMR0XMgGGBRwwiRKNBSk0szwzHisnKDMeMhATEzc6OjO8NQUEGA8ZIh6WnDUsLhadhjYwMy8sMzu8jCknMQ6PCRcOhjq6Ph4emR4aoS41tzowCgyVEw8DkCObgiuujAUZFBYSCi4yMh6VhjQwPBw4qR0GoBwpOC0eHRYZlxmbIS2sKDG2tAkwoBoWoTQ1PRkQnAsMESkxHxYXoAUMmiesKhKOjygopB6ZggeGDQsPjQKDgjMMIB2lpiOanQWHByOfAxKJig2IjwODBhGTGQ8dDgEBBgoZNAKMihapIQeOhyktujOwEByWkDI1nxqahiu8LBaYhi4nKzIsOzQ5DAwIiB0ehiobCyMoIg8MGjOmDBKPiAaKCiQjngODgQGBgwCAgQKCgQCAgwODgwKCgwGBgQAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDWvR3YAGODjtM+PAnsqXLlzBjypxJs6bNmzhz6jSYb0EGDjAiRHAhAgC/fDuTKl3KtKnTp1CjSs2YbwcCGBz+QYGhQUEKflPDih1LtqzZs2hr5juwY8eCKlZMJPGgoEPau3jz6t3Lt2/MfICRCuzpwsIDv4gTK17MuPHYwP46RMgBwLHly5gza97csJ+JCQAEcNAgwwrn06hTq14ddoEIBQQsWEBgQjDr27hz69590YeDEiI4RFgxgSXv48iTK1+dz5+/flYEDMVhWyc/AEVW/OMgwkGV6svDi/8fT55n4Hw+VlhoYFxnvg4uchD4N0JDDg7Fy+vfzz/3ecD+JBGECGDtlE8VDjgwQQcdPICABxyY0N+EFFbY2H+AHbACESX0kxSA/ggW2WSHWWjiiSiW1VMHJlRxgA8mNKCBBw+AhxNgBBHmgQMp9ujjj0n5IwAMLqwgwwouaEBAET40lQ8/cOFQRA4w4ADklVhmKVI+E6wQlAIKDPeADzbq5EMDHLigAJUAtKflm3DG+VBzVuAwwZ04VBFimTlZUUIEBHiggQhW8CnnoYgmOpU/VZhgAgAyRFCCaYpWaumlS0GGAwc5OGAopqCGKqpG//WThAUieDjqqqy2GhGG/BT/YYEMBbpq6622NtfPfzvAsN6nuAYrrJz9PLBCEQ488JuvEQ7r7LOI+uOAAh54kEO1Cvywg5vQduutUvw0mOC4DxQ6EIbNNQoaAOae6wMA4yaorAMdqHouuhCtNYGyAgjwQAcH4PjtwATfZAIHgiowQn0RtHnvef7sIAIMI1QrQHX+TMABAQv/A5sHFqxQxcPoAlsQiP84d17BLLf8UgfUypBCAw1kkEJtJANWrAtYaVBBEtXlY4UDGdCcQgrqWVBEgSWv7PLTUDclWZXO9WN10BhaweICMlSQAdbPWe3cAzlEYNdgTQsc9dps2ySZCyaobGPTAhXhNdZq9yOrDAH//0N324AHLpNkGiDrAAAL7HrQf4OVcPdCOMCwY4h+py345Zh71KsFBMCmgAspkGleYI0/nlA+AljAwQI4pq125rDHHtECDZSQAtFQVJtBrTmX/rVCPiAQRBKKV+667MgnrxBJu9KZgQcjTDC6wPk4/jtC+QDQ+QTUu26y8uDDHpgVHBCRAreM+209n839Q8QKoqNNuvyvh29/8oH103WHJztdven9M4HkHOCm9NHve/dLYNQCw48V/Ax9/lufefyRAgu4gHU5ctoBFcjBzOhgM93LWAQs4KnBOMd4gsmH3RowP+MFjwhFsNcBU6jBDtowMTVowT9ikDLMZExBJjCJA/8KszrBVCEFDzgKgBbgKARUQAaOsgJLzgOAESiAe/473g236Bd/DEAMG+hBAjDDjyRwjEiTuU9xkJKPB2jggoFxDQwkVwEPzHFSxuPHDyCwgoBl0XJcDORe8qGDD6hgA0O4TGRKAIU0cWAFN9uT3zqAgBJ8BzALKAGRXMBJnhVhZIE5QAZcAAAM5cyAgkxlXhLQggv48ABVWIAsfdA8gfnjLRBrlCx32aIpBsYHiTPlKeunymKiZQDGTKYyl8nMZjrzmdD8xxijSU0FqkAF1cym/RJAA216M3leXAzp2EhMFPavnOZcSAu/yc4e+cMHDUpBEh7gx6AdYAIzE8AEjoL/QrU96VFEu9mn+PGAItATge1MaHiqUAIFuK8CEdhBC3vS0BzYRwGfxJdA/PGAJFmgAhY4X0KkRYAKwABnCk1pTS7QA71kkjscsEBdWuiDH3ggAhkoKLV2J8x/FAsBRhpBEES6uB0gjAgnValSZ2IAbOaFUVXoxwTA1AF/ak8BANhVPxywvZ6uxQoH4IcMhsqtgTDUBQggQFKXylaXgIGVeTmPZOpSEL2F7ABm5QAEUtDTyqXsB2Q9SD+G1IAHKGCtbU1sSAbAw7vIlapqqwIUQtqefpQAAgRC5UD8IYLAnmwCMEDAAqYaAQkp9rQeQShU5OqxqgpMgBp4QHsoaAEo/1xynZvtLFEJYoUVNCxjYTItaod7m8fOVGBzLaUJHZADONYwt54dCD8agFGwTBWxxM0uaozrWhwll5zSai7rnrtR3brJHwAILQavK9wc8cMkvewnMd9pAhyYgJ8a1S47nYAWmB0XRzgAlHIBJAAIFaqvKTMvQaqwAgJ8hR/XGUHD+HHeDoggAmDigAOU6FWCcgBMEZBBB/aEYP1mMwSJLIt/u4uUBSBMAL70RxHeV8/ncja6/whwEFwggyPBIAgWgEEJtjUY0N4UAaPRAE81yo8UzKhIknMBFkts4mieIAY8+AcSyAKziK6MHyUIwg/AkiEEQCADJLaxggeygBWACf/EPiOCBjjgsH8wOGQ48MHQrDhlxnFpTQ2wAoxEMJvbarbK0CTkBzbwgiVEZX47EIpEx0k2GEwavQrQQCkB1A9JEoSzlP30ZxiEkgzkYAQ3w+8EulJVgaTHAvw7IJjvWmSFKbefiP6mEtxnhA86BTBVuFMScpCDDNzpOwLpLYQUJAAXeEAETQLMDn6w4cHwYwcAAIALIICAbMcNIez9tAAqwAG8brTAq8sZ+UJqm1cTj2S5ZmcFpoCBpzRHAArzQAXq2DkCDsYqGtDACOQDRXKmgAjpnuTGNBAEkBKANCM7yAQkTJ2BWPaJ7fkzXQ8owByUaKMNeF+tyBtvaJZBBFr/4AEPSOCkjIlABv94+ZFEwD0TLuA3MihBuWho4Wr7jXY9loEIXr4CnxvEJ6GzzQHGWoSMR44AlSFZgKGeIwcg3Ny4Lnk2+dEFbm7g1/6AsED6AWEKP+w5/Oh00MJePL+RHcJWgzv7rkYQd6cZtlGn3w5sXXWEN2mDWtemP24Qg6/rZy1jLUGap553FDK+6hAod+8C700dDGED2OzCeJ7kOJ+eZ6pXzFnHP+7TDIgc3pT3Zj7eyl8KMIE8KSB3PaXlgQuqO6YpqLt63k2/1LfzBv/YgBRqIB4udWUHA/GBDGBtr//EajZ//8feR3DrQ/s+myxwqnjujAAcHMAKTqY+/wMXVMsJRMADGQCrCZaPAEOT/Pra5AELGLAcLvkqAgiIjwYawGHJcECiAtFkT4YA5wcDfWZ98FdNyPQPOuQkuiI2n5IPVuMhEwiBGBEZF9Y5HJBE59ErLlBxfuNhsBEBIjBiJZOACmV5hpcp/mACAgAcIlACWZUQJlACPSZzQ9cd0WcRawFfeoJCt7QAGZcu9bUANYaAKKhNJGADmUJQLsAxGOYBK0ApB9EBWzFHczRCFdB+SdiFLdEEOsFRYZIBANABE3A4vFNXTOQojmJYIeWFcCgSW3AT+WACaiJb8nU61AMAGhABIBiHgKgRX8ACN0FBOTBmUFVLCMQPhFYC/P8UiJBoEQYABDZQADYhVkEggyXAAVAgg36kEOcRYJSBTpFYigxBAERABjZgADRxAE40FLAoKBnQN3oogc9TRKaYixNhiTXhAxwAUisAAPVVAh5AAHhYiwtQGOejWrrYjDbRJBCVH/+gfEEgMqDYHG7kZaSYL6bUgsryACYgQzOUIWd4OO3ijID4AP3gik+EdfngAEHWXgjxaiJQTxhRMgcwJBalATCQAu5YMjWYafIBBTOIjoHYQBUQa37zAHYkj1U4GZu2jQ1RMoMVcDKQATJQHw2gKiWjbEJWBJxigAbZTq30D8DXEg1QWxGXMhVkewrRD7c4XhLJECUTOR7QAAf/sI4OwI/UUTK0Z4AQtn6zgnUjmU0MQAGuNE0g8R4RoAEOsCv+gAOF4YhjVwWPGEfKmGY8mD79UEEcQIVWMFkZoIgpFJY3aRx/Fj1FyU4koH0hwQ+mFgHIgia1B4DoBUl9AyDM9V8zeY0ttHQWADSblQGRZ2gE0QF8ZhuSFQQNsJZxWAUN0JT2QQAIII0wWVuUAhg+IAIeIAOGuZUtVAUIYAEXcy4PgFQyWR0AEGQYJBAHIAIV8AOOGYfX5gApIADsMlsmoCCKozNnGDe4NREGRD47oppBUFo19AAV4ALIFoCOIwPMOJuB2Jdzck8PcJ3sIjQIU0IDAQDHWRvrpJzM/2kbYFYBKxCd0llMDcgY9kdsZZMC0LGdxnlSyQkBLnCOelQBIpCeCUUBYaSUfoEe8PKNrPNqfHUuDgABMJCaBAEAN4VS0+hAJcCfCUUCNLABhJgYzeEcKvMPYJaJ9gJmNGZAJiBgtpGMO0Kh7SQEOeRKipE+HEUAETUYsCVb6bMWPzArBfKTDqmiRkkB4oQuvaU6E/AZ6vGVlbMA/sJhU5UDSWBfDnB+7OGjVAqa/xEZCCOjA4cf7cFVH8hA0kEXTUkAeFSlZiqc6NKCGYBkCJABwLlRE5BzBwYY1/EDHPBI3oGeZ2qm+CKBekZL/vROB5BmzQFLgkY5e6pQH5CojP+qFOvZqJA6E/5Af5FaqZZ6qZiaqRTSTZraqRthSCqQYp46qhXRoo1FqqgKEV4UAy6aqq76EIv6qrI6q7Raq7YKErF6q7qaAD3QqrpKq4z1q8I6rL+6gMTqqqzaA8R3rKiKFB8QAv/Aiszqqvw1ram6qtaaqoSUra4KBtxaq/rwrZ6qAjzwATqgp+IqnWO0ARRgrOmaqY/6rvI6r/Rar/Z6r/iar/q6r/zar2KRABIAAv5apQAbACAgsANLoTdwAQd7sAnLn2DwASjQsCjwsNKZD/cwBBIgATQAoBZblPlwAirQAzdQVh9rkAPQAzHgayfrmAzQA1LAsi1blC/bAnP/OLNruQUt0AKUirMjqQMt0AM967PoqANSQAOvR7QGqQMY4ATuqrS62AUxoAJPC7WmCAZTW7VWG4n6QAE8cAJb24xC4LUnGbam6A8swANla7aR6A9DwAPLyrZtawBwi65yi2j5YEgJYLd3a2L50JZ727eQmAAqQAImK7goSLgfcLiIC3+EOwRC0LhxeAUYGq6S64U1gKFgwLeXe1qZSwGb27lJeAMqEAP3ILqjW7qah7oJKLIry7oJOABOgAEyC7uUNwA0QLu2e324a7O763sMsLND+7slF7xCS7yUZ7Q9oLXIa2JM67TNq3VMywPMG73ZdQRTC7bWm2tSGwJru736/4W1GxC34KtfXTu+5VtlQsAC6Ju++oW2G3AF7vu+BrABHju/p+UPi3a/+JtYf2u/nNu/zvS/JBDAAsxM+ZAAG/ABBnzAy6TABsC4DqxQNYBiEjzB7ZS5LBC5GNxWNxACFGC5HbxUHxwD3jrCS3UCIWC6KLxUA6ACGLC6LZxSL6y7M0zDKtACtXvD38QANKDDPKxQPny8QdxOL0sDw1vE2QS0NFC9SvxMRku9T/xN0+vEU8xMOjC133vF0CS1KrDFXOxM91B45BvGzwQGSFnGZtxMXXsB/LvGyqQPLODGcAxNaEvHdexM/mAAeJzHzKS/fezHymQGJHABBSzIy5TAF//AwIisTAm8AV5gBo2cTPmQuUNwwZPcQR/MApicyQpEuhTAwZ6cSq4rwqMcSAPAAyZ8yqn0wizMyoHEADAsw7B8Q7IMxLW8RQzgBD2ww7mcQDpAA738yzekAz1AxMTMQUycxMkcPkaLtM2szE1rxdEMO1IrxdV8P2P8xdl8P2jMzd0cPt+sxuEcO3LcvuWcPOf8xul8OW4byO0MO4DMzvEMOPNcz7LjD4XMcvgsPgkQAofcz5iTwCrAyAJ9OZVc0A180LlSAyoQwQwtOPlwA/LXyRFNMKkcyhcdOBltyhsdNbi7yh/NNj78yiMdNVuQu7R80i4TzLjM0k9jzMMM0zH/HbQ3S9Mt0wUtgMQ47TJdgAE83dMsM8bQK9QFg7VFbdQDg7Vfq9QEg8Zq69QD07XgLNXdIsdwa9XeIgSXR85ajSt7bL9fDS3+4AXwPNZg/QFnjda2os+GzNbD8rdvDdfBosgBTde5osAGjdeuUskbYAALzdc9ktCXLNh9TbqcbNiu8sIardisksox4NGODSpM4AQiPdmiUtIrjdmXktI2zNmgYswvDdqXItO+TNqIotM0cNOoXSn30AJOwMytLSf3gAE8INuzDSdgYNvUnNtXwtS97ds/og9Ypr3CfShjG9XHjdwUUNXLDSfri87PnSVI4bbSPd1XUt31S8/YfSLV/63W3N3dJ3LP4p0lhDzX5X0l583P6Q0kipyr7e0j7x3f7l0DixzY9F1clozf+c0cIpvY/Z0ijG3RAT4eGS3KBW4hDDAGl53gFaLZDm4iwRzDEW4hon3aFU4eQDvTGd4fqo3bHR4eYCAFtx3i/aEPvG3i/EHcVKvi+yEEWevi+uEP/gnGMp4cNL4BNn7jxwG/Xs3jvOHjQC4ee7zWQ74b5H3kOA7eSq4c693kyWHXUI4cCYwC8D3l/nEFKCCtWK4blYwCotrluPHlQ8DfYp4WE40CLGDmZ35MF0ABktzmtzEA7Ergct4YDKDKdn7ni5HnMYDgfH4aW8ADGHDCgY4aLv+92YfuQT+M4YvuGF3QAzTg6I/OGGAA25Re6YqhDy1Q4pquGf6AASoA4p+eGP4Q46V+GSZwAFgW3KmeF0QBBCgwRmz+6klRARBQAFwQBSnQeLbuF1o4AwHwAhagAb+uGB2ANE8w7BVw7IvBEhQgAU+gJs7+oiQQBgawAL5e7YN07YHL7Ygh13sN7nzx3rVO7mqRABdQ5ujeF1/OAnHe7nrx7vEu73gx0W++5/ZubydwATGg7/vuFAPg7wAf8EzBBP4O6AZvFrKMAfpw7gsvEjqgAlJwDxAf8SChAzzQAoqO8WERzD1wBB5/FpE+6SNvFpdu8idPFpzu6SsvFqE+6i//PxanTrUXP/P3yKoDcPM4z4OsegI83/MVgZQ3EPRCPxEUgAI1YPRHHxEsoPRM3/QPMQRuHPVS3xB8nAD1fvVKAd5bz/U7odaGC/ZMUciLS/ZLoe5nj/ZJAcEFz/YeocCFDfc6ocFvT/cb8cGNjfc3cQPsqvB8LxOlHPg3AdmGTvgz4eeHj/gx4cMUzvgzYcwYIPKQLxNAO9qV7xJdIAWYn/kiUds9wNqeLxLEHdSj3xIw3sSn7xI0js2rHxL5kH1A//qwn31FT/sh0dVWT/j1u/S4j6sXcAW7H/jgPfx8X/y//xEksAF3nfwbocBj7/wcodd3L/0G8c9rb/0ZcQUP/1392j8QDj33338RiO3940/RGzz+GHECPLD36k8RkC3Z7+8QSOHK8j//NPkPstzg+A8RJQ0Q9/4NJFjQ4EGECRUuZNjQ4UOIESVOpFjR4kWMGTVu5NjR40eQIUWODKmDBoYuJFWuZNnS5UuYMWXOpFnTJk0dLTDouNnT50+gQYUOJVrUqMicLXgeZdrU6VOoUaVO7dkFg1KqWbVu5drV69ea92L0YADW7Fm0adWulQomBo2ybOXOpVvX7l2JQijQGJAP71/AgQUPhuqPgpMTfgkvZtzY8eOO/ljwuKEY8mXMmTUzzjdERQ3Lm0WPJl2aaz4Dn0ObZt3a9WubHzYkWP8N2/Zt3LkvfriQwIxu4MGFDx9I4gIJf8SVL2cumsQG5M2lT6cOOMGGD8mrb+fe/et1A9q9jydf3ih48ebVr2cPs8YGFkLaz6df3+MN+Prs7+ff3+ENFSjQzz8CC+zvBBXEAMNABhtkbwAVYljQQQor3A5CDCa0cEMOh2NCBQwE6nBEEm1jQIUWUipxRRZH24KHFFuUccbHdIBxKRpz1BEvk7Da8Ucg18qJhhP0qS1IJJOUigEaJOCBghPSU3JKKovSxwAJQAAhCxpukLJKMMOsCT8ttcxiJzHTVDOmD7AoE4QALhhgTTrrHIkEN8uMc047+/QzIzLLPBPHPws11CEifYwDgQouvTz0UUgRygeMBGLAAMovI9XU0Hz8AeMeIx8KCAAh+QQFBAABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMA/wAs8wBkAM0AVwGHWksUjIyM3t7c4Lw8OzAMLCMInIYshXMeXFxcgWscZmZlxKUympqabGxsu7u8dHR0MjI08s1ChoaEqo8s6ursUkYPdWAatra017E5Pz8+ZFUUvKAv1saELCws7NNysZYszq02EhIToqKk2bg6/Pz7JCQk7OS05OTkHBwcsLCwTkEOxMTEJBwEfHx8m4EkR0dHbVoW7MU/lHwj0tLU7u7semccMSoJT09O8vL0kpKUjXgh8sZAYmJkOjo8pqakooYlFhYUzs7MVlZUt5ktNjY0ysrMRzoMgoKEGxcE9vb0spIqposk9OKQFhEE5cVP5sE+2trcxas24spsqqqs2ta0jHIfCgYE1tbUQjoMhn5MppJEDg4M6spIRkJEzrZM4sI8zassJiIEZlokEg4EDgoEBgIElJK4Ojwo2NTISFQ0sNK0duAo9vTUFDJoNiI8YKaMel7IqMDAyHwsunx0KiLAyMKsCg4IehyAirqMBAQY6nxIrogM4PSAKmTQHjJEFGRQ9NTg2NDcXlo8iphkHhY0ytr0sMgwsLAgcIB8fo60XmYYIiwoytqw0HzIlHgMGCgofEQofpIo6sIoglgYCjIo0MLwyswwyJo0sMCcHgYggHxk6qawSFxY5vTUYkRAcFhQqq7EBAwQChg07MCADgwYXnp0uJyg6tAUCggwFBBgyJ5UTCYo0J4QZnpEirAoHjoc4q5UlHBIlGQcyqasuqbo8J4wsMrssO7IyuLUxtTUMlRM2MLA2vbkyLzMtMC4PjBoakQY0DKA6rg06rgQBhgUlHB0XD4Y2uT0PmSIdqbkfqKgbkRwcHpg4tTU3NBE2J5AyvTQLDw4sKZQpp4ggG50uqZ0PnQ4uMDQ0OwwyMbYgFxg9tAwlEpIsNx46OLQPg5AsK6UlKagAgYI5vrw8MLQJsKATExk7OLk+OLksJzEZmCAQExY3tT0yrQQ6OL4duSoyro0YKIocGhAmpiEclooXngYtp4MOi5IrnAsBgYMAgIMAgIECgoECgoMBgYEAAAACP8A/wkcSLCgwYMIEypcyLChw4cQIy7cR7Hiv37+9kncyLGjx48gQ36sSBHIgwAItohcybKly5cjSW5pQYLEihIaYercybOnR5L+Xly5gsMBipw+kypdunRfhxQXHswwipSp1atYf26REORGhhn/jmYdS7ZsQn8Irkjw1wHKCrFm48q9ug+CA6o9BLytOrevX5f7QgSYcYNiBr1w/ype/NGfkCItMu4jcsUBEL6MM2tW6NTBigwhQgB5MRXCFn+bU6smWMLfjSQ4oMy4MuNETSgpMqzerXnFv7op7q5Y4WAGCRxFGPTgzTyzPxQloqNAgUBAkQxAUDff7pdkRcqWMXP/Hz/Wu2G9OMmrL2t+H4oGPFSun0+/vv37+PPr38+/v///AAYo4IAEKrWPP/70s8UWGFE0UYLyNZjQgQyKV+BmW9zQAgMppMCAAkdZKBAEDzAwRQo+oHTZQYE9cAEDHVzIm2MnnDDDCkGcQIEPEEzYTwtQQPHPCkXUyEAJB/XDAwUkCPCCjLsFxQACEEB3gwM0MBACQgdCcMNyVqZAwxF8+ZOBA0FCoRuUq/UTAlAvCCDAmiw6ONA+L9iknUDuMeDAAylcQSebmrXn3gon3NBQRUGR4MCe/2zxQBENoJDCnISmZuhXVyzH2XPR3eCDAA0QFNQKWoZw6aCZLtYeEAzQ/5CDfGdlIAKOFJxwBK37lODDCssBcQGmrWbmXaQP0FBEDyIKFNQURFam6EAzzdBARigMy2qxjO2zhQI2Ftbsnd6+4MMMCmh06qwCAREoEdxu5q0CUHQl2bjeQXBCEEiqKgAPQACBQgYXXIEAEP2MG29W3vKw7wvHTuhdCNr+A0QRSWSscU0knPBAPwv/tUUDekFMEmfmlVAEFMv184AIDMTMIQVFBZABpCGz9w/JV9yA4M8iPscggv2YZBMQvynYz9JsXQCFzwrnbKBjFCSRAg9CIIAADwggyWIHPjgQwAMNSIDlDAiIt4+7LEvdl6QUUPCPjnFTYDCXKARwxQn/5P96hQgvZMRiCSlc5/ZcvSIgRNZab40AClyuTcQLWd9AxGV2GuRPDy/Qeri8nxcbNWehl2766ainrvrqrLf+0oFuBhxCwr+ZZxCFsp9mnoIB696e6zC51wADKwy1ggQQ3Bsxn++lMFsRPqRdUT89BFDcVEck/zvwLfmjAA1QOMAhFDSAZWhOwJU/BYcrBJDwgWlRsIII55Zvsu3cr+QPEQ10cFo/JYhVWAz1GxRM4QQtQAFGtoACzAVmChRoQXb8UYIAJMEHBMyf/kB2px5cYU5I8Y4/GnAC9xEQBSuAArz4lKci7Gl5GmzJoU4AMXJVxF8vAGAGIJAdkqDAAYnKiD//toCAf4gghCeLoUiQ4pp6xciGFCnBDATQghYULwgpUMAWpteCEzhAARl4wQPuwiwojk6JKLOIpSLIQT6R5DBJqAwDAuADHUWmIkDoYmxOgAODne+MaKwTSYBgwRSkhyCMeoFtVlDDfjQgNsyiSAjG6CE/reAIOMlgICVinhAcAQfAylztKpIBKFDgAXfagggiiJqtCEACb+oVA3DAAOUlcZMR8Q4QjuBFIljEjB2ojRDu9CNaIkgoN3FQXU5whUziD5cbWdsRKHABCCxPl0AsIp+KWcsRUkAEW9SIe4JArFGKEpoPWZsEKJCCGEXMPFvIAQWOoJ21QbABB2oABRyQ/8nfEOEEUHgiDNHpEHUq62Y/Ux71rKkuoRCGQUBQAA6C0AGK9CBHD2AQAAPgKHP+kqAPGSEJknCBFjygBUeQQAsqSpEXJGEGkBOIy+J2gVvFEZ8U+REFaIAqEcxgokLYHkgf4sgaGdWoQZDePjLgRaRtUwgpgIIAruCDwMkEAVPYm3UYcLNnDrWgW+iAWMfaAQiYhlEdOOQoQ1CCtG7Ro4GJTgkUKNSv2vWueM2rXvfK175yr66Rs9NHEenGc/o1jTktq1pZ5I8QoKCsHbgXIg8EhLJCoARvOixDvNMPiZLgAjgriGuyioOO3Y9cEQUiDcAHI81OhFFnosFnRRSYuf/dKAkgzFyfJpqDkzIgByt0bWBluYIckCAFzXJPB/qBwtwSswUUYECIYGdY4RZkJkFQQJ4ukFw8rqCcA+nAFYpwFAQN1LqmukFy9ueo7paEnBnIHFpwkAPmZq0HukMvl0owBQcwKwPtRSwQ4PvLLRyBBj5gQBDidoIU+Ey/tzNwEHhgmABL7L25FSesSCA/CSCgAWIKQg31+6TyCiEIDAgngC/wWo0MOMMlYUDHECAZFIiAlp5zLd+KwIPn9JcIRANwCszLkBfHlyQhMG4RaAc/nj5Rvw9IHgJOIICxmdQHTTpCA3J8ECOf7EcW/k0GhrIt66JlwUeVLYdvUuQiwPj/H2ixCRNfIJvgQtg9N1CcEG5wA46iLXCBtdh3j6zMfwY0J5KyWkwhbMalHjdzHaCSOA80VwjkCAGOfau31ukD7KDgMQJIF6MnS5I8OcAi/QgADsBZux4QqQi38cy1+OQrgHqGAlA4wpZG3eiLBgDVDVhBZGqXgSLQZigzmEERPnYnIIB4OAy4QRt5XdjpCe5OvxGtLa9NWDhT+9sBAiS4x03ucpv73OhOt7rXze52u/vd8I63vOdN73rb+974zre+983vfvv73wAPuMAHTvCCG/zgCE+4whfO8IY7/OEQj7jEJ07xilv84hjPuMY3zvGOe/zjIA+5yEdO8pKb/OQo/0+5ylfO8pa7/OUwj7nMZ07zmtv85jjPuc53zvOe+/znQA+60IdO9KIb/ehIT7rSl870pjv96VCPutSnTvWqW/3qWM+61rfO9a57/etgD7vYx072spv97GhPu9rXzva2u/3tcI+73OdO97rb/e54z7veQwIAgTxhAAP4ewIIjgQLLGADIIhBDJ7whAWogOBVWEAVjACDBSh+ABtQgbjj7YJ/WGAMAlGBBSyQgAm4gAWbfzcIABBaf6hgAwfY9b8XYINu74MMFtgADFCTenRv4AMTWEDnWYQEHWS+9+iGgRVCW5B9FGAJQyAA8kf9+IJUvyFlIMAQJsCCMvgDCe7WwP8SNrCEgQwfIv4AAAgmb4EPLGACGkC3AQogECTE/x8DWEDfJVKADQwABAsgAyqABGWAbvQnEPygAlHwBPg3BBHRBADwA4E3AAkwBtPnWmOgAR+wBCpQABXgAiAAEcEHfyogA0tQANpRe+jWBIMnAwTAJ+AXEX23D9k3BEoAAxogA+53bhbwD2CQADHYGGOgAgsQAfm3BDXwD2FgbhEwEAfYERagARYgAxOwAVEwABMgfdlmbjIwEDLQgx4RAy6gAQRAADowASpgBWTAAi9ocE0AAyCwAQmgA+NHcENgATBwholnhC5QA/s3cDGwAAcAA5EnA2SwhQOHAQdQAGNQADL/AAI1YAMEoAJ/KHAAuAELMAAxMAJxuAALUHCNpwMygAEjcAAEwAJBGHCBNwIbIAMuMAJZSAaHaHD/BwJ/F4fA9wM6MHCauHj/FwNYKAMm+AEE9wWMNwKA9wNGgARIwAJPGHAG0IvA+AMW0IGzSHBLII24eH4DxwI1kHiN5wIJ4AIOaHAAMAHIuAQA0IYHx4IYYIQTAAAsgHBIAANgsAObCH8G5wMKcAPR+AQgMAHlWHAcFgQcMAAYMIY6EIIGVxMe8AQYYABisIv/kIkCpyNJQAJMAIxR4AUYoAUFxwMMMAMm4AGK9wROwAFUUHAA1AVZAAJcwAVP4AEleXBbAAAL//B3A+AETsAEpRMD/MEPcAiQIDAAUvA5C8CJ+iFOTaAD+ScDYDACobMAMNAE/LEPBDAAIyAD5KcEhzMBFXCN+9EEH7CJUaACCTAAxMgtAxABMZCFF7gbFvAFMbABSAAAYMCQbLKWA/APMfADz+gfAKCJ+neGFsCNbNKEMpCK/lEGKjAAO/B/I5AAZEAAGEAoIxABpYhtAJKVMVCUA7B7RhCCA3khVCkjgOcCmSgDRjCOBUAAXkkgjPcBFWAFF4IEWll6IAACUXCa/zB4BfIBBFCAMjICI6ADQyADP/AFIPB4LLCWBRKX5IEEGICFmad+odkEFhB9lsiAG2ADAICMSv9gfDXAfP72A1hwABNgARPwBBswj673iQCHAQkAA/HIDwkQAUsAejYwAeU3cKeHBBMQiADgiNwpcBMAA1sAA7AXeT8QBRognetGADbwAeXJApm5AaBncNxJAIm3AKg3BmBIcE0JAu5nASwAA0MAhAUHAhrwhgEZBTrAmP+Gk4bofBgQiCooEOz4b/CpAtW5AVpoBVggcEYgEEawgRowATN6k9tHcBtoA+n3ATqQAB/gAj0KcMKpEWQgA4AnA/NIcEX6GwIKjLsHNAPnDwWAnFS4nhbgh0ggoehGAFWoAWOgfhGQp/RplQMHAMunAolnjJ95fwRnBQlweYH3lnKqbvgr6ZaA9wSKWnDpB46QSoGLmm77gAQJkJmbCKYHl6ntZ6FhinAH4g/EaR8BAQAh+QQFBAABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAP8ALL8DWQB3AgICh+7QXEpCDpZ+JG5cF1xcXNzc3L6+vIJuHM7OzLCVLLi4uCggBaCgoHBwb+7ejPr6+WpqbHxoHDIyNHd3d6iNKkU6DM6tNyIiJPLy9EREROTk5JmZmaGHJGRkZE5OTICAgD4+PKioqMrKzMXFxBQUFK6urMaybOfCPHRiGzg4OdTU1B4YBLKytI95Ix4eHIqKjLubLYlzH1dIE4aGhFZWVDAnC1JSVJmCJGRUFCYmJC4uLI6OjO7u7BQRBDcuDEpKTFBDD11PFE4+DOrq7JKSlCoqLLWaLLaqdBoaHD4yDO7itAoGBKqTLK6OLN69QfbqtA4OC5J+JBoSBA4KBComDAYCBKqe6KaCSDY6SDA6EMjC2HRYaIpWGDxKSFh8WGZWbKCIDLjO7KK4fKq+pF5CkBIwaGBAGFJaGIhehIRAGDgMQFp2GIhqSGRqlBJiOG6k4AIGCIqOeNDO8IqWsNzQ0BIQYCo6OCTCgNjCwMK0sMbUrKKexBQcFLy0zC5STEZQSMowgBoGIEQ6INbUsHhucHAagBgoKIJ+nKqgqFRGKPCktMbazKakkCYgwHZ8YIBmCGRuVDgsaKB6DIJCbEQyPBw4HHB6GLjE0Oh4SDAeKM7ANAQEGLjEqG5WKBwoDKrwtFRuZMzkNGp8bMjCrKK4xLJ4dHBc1Dh0OI5mGKrkNGxAGNCetIx0DJyeJNS00Mzw2AQMEAowHNzo6OyeNICWXAgYIBIwROro+Ly07CZi0I54kDAmRFqkhIqgiMzCxLierLSeDFZcdMp4yBoWNDY6KM6kEK6YQOzyNGZieOSyONDc0HpWGGBUKHRkcKLSnLjWvIJykHRqWGZcUEJKMJy4pKZqLFRCbEJcTIh4bPi6RHRiQAoOCNLUyKLG6CZiiJimoK6mMKyeWAgIMIC2hKqymODwjOS6XEJKZMR4LG7CJGZCVHB8RGBiGKaWoG7ipEQiKFxiPOy6EFRKOPro7FQ8SKy6NAwMGFZkWHaWmAYGBAoKDAoKBAICBAICDAYGDAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIENa5EeSZL4iKS7kE8mypcuXMGPKnEmzps2bOHMaLMkvXwYGBj644KezqNGjSJMqXcq0qVONPHX8G/KARRGiT7Nq3cq1q9evYGmWJDFBgQENJXJgDcu2rdu3cOPKjZmPAIsZDUb8Uzu3r9+/gAMLzsovRQgGKTKMKHF1sOPHkCNLnsyQxA4DNHyKYLyWsufPoEOLXpqvw4gPJPhlQMB5tOvXsGPLHmmYgQSSGTbznc27t+/fn0nMUOAhH0kQI0JcIGmUJ78LHxhAIAG8uvXr2A/+QIBgRofvRAog//hgA0lR5yQ+aKjaOLv79/BBM/5HA4GGAvgLrH+ggQWI8yXVZYAKPLCwW3wIJqjgXzl0MMGDD4ZwHxEEuNAcSYaFsIMKBna24IcghqhVPvqUaGJ9LKRgHFJIXEaDDSIocKCINNZoI07OkbRda0fp04EBE0Cxmowe3mjkkUh2lCM/IGi43FFMlsCAWhmoQGSSWGap5UQ5QiFBESsd1aIBNhj3g5U5bKnmmmx65aMBDehz3GZPtmnnnXjiKIECI3hAAgkudKCCARmQEGaeiCaqqJI/FMDDCAqYpQIGPIiwAQhFLqrpppwixI8EDLDAggKiIvDAozuk0OmqrLaaDxIXxP96QYOD/mBoq7jmmuiSq3Wo66/ArpljPjZY2V6wyCaL1JL8CEnAs9AWihWz/+hTRAY2/CCBnAEWQQO0z35rQ51c5ogEDRnoo+y67OK4ZAolFKCCCCIgoEIIGazELD9IQFACvSKw0MBQJL2JAMAjqKCBCh0cWq5z+azY7sQUu8SsYirMAEEDDUxQIVHMkqUCAht88IIC46XWkwQcc7xxCTyo4EGmFddsc1IXjzBlxCVKfLG9ECChDwk/KCBzgCbqk49wQzBg4c1QRw0lryNs4ELERTL3T0kQ8OD0QPqo9wEUzg3kgoQNS6322jXlPILHBIBw9UE8TfDADlAMxE8HBaT/VfY/+dAwqKpsF244SzlrgMAICBjAAA15F8RTAxhsQJ1A+TSggQiY8iQQCRsMMfbhpJcOFbM5TPBBBwRMICECBKhLEE82FDCCDXKexMADKvzg+dYpGFDAzKYXb3xEJIDww/IZIGESCXICnsMOQ8w3e0kXbFCAAhPQ0AERpqqwdUkC6TNBAco99PfWJEos+d89tU/z8fTbyWQI3CGgQAdk/64a48W5nkkksAME4EcEIdjAyDJAPoHkAG0OSwiz8qED15UgBBPQgcOYpY8fvKAEJdgAAZxXvxLi6TkEaFkHVLS+HMSrA7ITiLl+sDECpIAGkLqN1og1uIZMsEm2Y8EI/9CXLxkuCQqmsZLRVPCCC5jwiYtanw4UUAACbPB3BMkHBAqwA5VhhQRE0MAMLrcQZiEhPBuQwAVSsAMNfG1fikFAB3JwAQ9QsQERhKIekaQ1Ixpnh3wbAQNl+McG6k0HJVABDRpYGMb5zoee64kN9JcCkO1JBVbM0daEo4EXXK4uHJLKHkd5pAn+IG5FyEEKIHAWT8qwCB1Il0lykIEU5EAHNmAAE52nNX00oAAMcCJD1gcF9cwghsXkARG82McpIoB4W3NhFUlJTRuF7AVBLIHwELADHYDMlwXYwHL44SOzlEABIhjBC67STBZoIE6QNCToNACBtfCDABgoAcH6+P+PDGzPmwMB4wM+UM2ChsiUO2BACBa6g48R0gMMaIDK8pGCGSxUQyPEog520M35CbCPLmBAFTvjgSEQyZD/sEE+hzIQKMzgAS/wqEFnap0JAmpWLjBUH8mZU54szQV01ClK83EBJOQRIT91gQuM+o+QjpQgP9CAAtjJTxo8IAQkFEgxH0AEmdL0q+v6FBFGpQAGFMepBCCpSanaGauWIKv/2OoOvArWugLrUxuIlONwh4QNaKAD9qRBgdSC0qj6SqsveMAM7MrYiuUDqDnIgQvIZpkhTOBQmcMAA3jJzxQsDlMDQQIDhtCAxpqWXevTBwQ0EIKnNZUB7+SWIZ2KRxn/IkcEGTitbpPFLAmwoAANgEI+kFgAA1SyYBe4mkkIUIAURewCbezibqf7Kw4KSgVEmAD4VADPnmQgBEQg7HO0p4APuG57g6SuekPDHI/6D4t0Axk/dzJfI+aIBARQgAbuowACkK18zDVutz4ggv2Gs4jrTbBkelLHja0whnrLkT5SQAAIQMCG/7UvFCjMsRVm+H3wta/n9LHGWl6AWzIkQQq25Rwo6AAEtWSqgmf8GJ/oUgU4hhMZx+ccFzRgQNwZ1ATqRJIifMAA+bsXCPK4pHLRjcZQZm8KfhuCC09gQPAUMUmgMAENvO1bExjBOyPHjxxsAAEhaIANaNAACIAJ/8QhjrKc4ZOPD/AgBBIwjj48MALjapkkZyMtt3zpxqeZTwU7UIlJogdnlM750dkRrWVlC7ohAJbHWpPmzLBCA2BaiB9FYIGf9SHcTFEL0qjGzgVK8NdC6mMHMM3b38DYyRz8qQgvQECQAFcfBmDrA3iR1kfXl+pi+4YEDODB2Mayu81iGis+CcFmiECEEoxgB3zhspcZYJaEvQ2u+zK2uHuTjwnwQAEZgII+kEAAEVzVicQ0jXjsgwCJIpZSCmhABjLwARUUAAKyK1ucx01w9kogBNt7gXap+O5nYw4EG0jz8howqtj9wzL8Ce6WP1C9NGly4AUPOWUouoOEIeAusP/dgHkEnj0VAJwo4GTBbZI5guPi5rP7IrbId77g5NGABilwgdhkHUkP6E+HAtkTAspEaAV4syQpcGTOdc7zqj+G2U8tGyhZ8PSkGy2TP0inzVWD86k72upo7wv5CiaoEKRpfEorSdiPJkPBCZIoZ6PnH83X8WeHO+2A7wsJPECADIDAAx9YXJmwIoEPxI4kFxjt/vbdAf0SwTyAI4AKRqDvDEzAgAD31NkDT3q3TI87jGMNDVD8jw40jWA2PmCBVbABFaUYAkPkju0kSjOql/73X9EHCBrwgh04HkwNTMELHj9LApjsBY5XtBGFRPx/TCDd9dUy8LfPFnJCQd2FXMv/cGXLHBJ9X12GLEmJ1A1ykHP//fCPv/znT//62//++M+//vfP//7XrydJQ2oQFl89US1Yg1IA2DN05X8MiDgX0AA78AIS+A87kEFHhTk64HwSuDo58EcDkQ8N8nzX92ENWIIw8T9eRlYKcCkX+A8pcGMjMEQq4GsSY2MjMwIisHuYZ4I82BL/QxxFoAM6IAE5wGgHAQJE8AE0AAIUBlslAFD/UAQIRwQekBgNYB9W1INaKBKKoXL2lH1a5QKMRlH6RQADUTsKcBvlk1gfMIBb+IYY8T8hkALmwXoS1BnNsgEPUE8C0Wlp8YEfgAFtCIeEmBH/0zcMwAAzQAME4xCA/xYCPGCGAqEDLKACH2BLGWgW6VWInMglL/gPF0RFXHQsZQRKNfeBHlCJBnBOm2dxnfiKEcEPJGB4dMQyYoYadOUTdxRDz/EC6TQqg+KKsDiMCfEB/2ES5KNaxVVJDJEPTWKJcEUWLPABGVAEElB5ZNKCxDiMNOBwXldF2gg438VE5MIPglMCahhX6mE129iOBNhH0gRDChFtGfN2YDMDPPACkbM125GG7viPEWZIyKECZZIQPlEC48FSBAEFO4AB0iVDHmAlEgCQACmLQvVYMzAEXEczzhgCGbNPO9QAJpUvJIEEL+A1rkWR2wgFELABDfAsDSBSDGOEBXFwMTMBy//jATTgAR0IPO4kMATwPfiRhSrZjlxmL/YiLyVAAKmREMzFH4sTZCMQetWSAWeGYzi2PztWlMP4HKfUARDQATzpPgdxAR3QZhvTMhBge5hzARlQYR1gAzrghlxZl2WEhwtol3q5l3zZl375l4AZmII5mIRZmIZ5mIiZmIq5mIzZmI75mJAZmZI5mZRZmZZ5mZiZmZq5mZzZmZ75maAZmqI5mqRZmqZ5mqiZmqq5mqzZmq75mrAZm7I5m7RZm7Z5m7iZm7q5m7zZm775m8AZnMI5nMRZnMZ5nMiZnMq5nMzZnM75nNAZndI5ndRZndZ5ndiZndq5ndzZnd75neAZnuL/OZ7kWZ7meZ7omZ7quZ7s2Z7u+Z7wGZ/yOZ/0WZ/2eZ/4mZ/6uZ/82Z/++Z8AGqACOqAEWqAGeqAImqAKuqAM2qAO+qAQGqESOqEUWqEWeqEYmqEauqEc2qEe+qEgGqIiOqIkWqImeqIomqIquqIs2qIu+qIwGqMyOqM0WqM2eqM4mqM6uqM82qOmtQAUcAA+miRAGgNDiiRA2gI9cKRGsgL/EAVOyqRGEgVSeiMrIABVaqUtcANZWiNSEAM3sABdKiI9EAMcQAVjGiI9IKQ1kKYhEgEU0KZuOqd0Wqd2eqd4mqd6uqd82qd++qeAGqiCWh1yOqi+sQAcgAKG2huI/xoBi8obK3ADQvqosuGkRkqpmJqpmrqpnNqpnvqpoBqqojqqpFqqpnqqqJqqqrqqrNqqrvqqsBqrsjqrtFqr+neptsoWN9ACudoWAiAAUdqrX7GlwSqsXcGrxhoWHCCmydoVB7CszcoVUwCnhRqtWQGn1roV+/APFOAD2fqt4Eo/CVAB4foU41quTREE/0Cu6LoUMvAPQNCu7gqv8lqv9nqv+Jqv+rqvAcoB/JoTiPqvOLECHHAA+yiwMnGl/7CkCEsTWMqwDRsTEBuxMtEDW0qxM4GrGPsS27qxHvuxCxKOINsRTDCyLTEA/5AEJisSA3CuKwsSOJAALzuzXyEDMv9Lsx4hA0aAs0oCBEbwrjwLFRUAA+oatBrBrkabtEoBAwOQl0qbEEz7tFILsFOLERQQASJbtQgRpB2rtRBBsDEwBV4bEU6KrGMLEVx6tmq7tmzbtm77ttgRAHCbEA7gBP8AAT8wtwSxHv8AAEqwMHpbECYAAE/wAIFLECSAAhZwBCVwuAUhs/bouDbruAWhs0BLufBqBEWLuUiLuZ77uaAbuqI7uo4Yuk7rtQSrpJ7bAzcgABN7uD0gADcgBZ9rtpi7D7xarJRLAcyKuULau5QbATJ7ul4bAUZQrY6rqN7quQMAAyrLvONKvFqLA0bQuY5LvRUgvVprBPH6uQlwuZj/+72hGwRZy7YJQL7eiwPly7Yo67k+0ASgWwMUoKieK7+gK6aO6rliOqmYG6kx8LqBe6X/u7otYLuUq7Gkm8B6GwHQqsCOSwEOHMESPMEUbLLvKrehWwWeCwTfq8GYKwTi67kVcLMiTAE4ELrqW8EqvMIs3MIuPKv8i7kPW7sDjLkW2wK6e7gxgMOgi6UvrLY3gLw/XLXLO8Rem72ga72Ha7Pdi7mT+7kcDL6OC8Lo+7knbMRYnMVavMWLmr+eSwEIfLhF+rkrQAEGfLhlrLr9ywE+bMMCoMaUa7EcUANd67gcYAEc0L6YewIngLk+kAB87LkyYAGBTLlLgAJOUMiB/8sPPRCzidzHgZsPNXAACSAAMQADkDy3+1ABApAAA7AAPVABXuy2jBwEFEABMtADJFEFYvu2khwBTCAAFVDHcLsEnJwAEVAD6zu1jCwDHGDCK6C9RlsFCzAATHADQjAFwhy0+eADMZAAMeADuyy1zQIEvzwAwbzIK9CyHAAEyhzJzgzN0ny4+yAEHODJ2ay3/LACMYvKJOjKNSC8LZAES0DOnDy/CzDNT8vIOEABHJDKy8yz+UAFwivL9Ry4m9wCuKzLi9wDvgzMAY2z+VDMlVwB36y3tvzMB8DQ6jwFvozOEU2z68zN3gwyc9vMB2AELTDOHQ0ENwDS2jwA/gzQkf9cA88czQett1NQAS+NAuk8t/x8ykGgyjUtz7Mc0jOb0Liczw0dBBzAAUON1C9LzAU9y4e7BEnQAkaw0fqstIxszTCtzttMATfgzYfbzDfN0m+LFVMA1tjswXBLEtucAP/cynorySk9z12dtPyw052MAgsA13HNzjNN1HcdzwnQAhWwBFK9spvcyRHA1OrcA05twoZ90guAAglwA0etzlit0Lm810bby7+MAz892C1b1hd90j6g1dEs2kHb12592ms91t282nAryWkN20Fbzuf81trczjLwzm77yoldAbzNs4/NBJGd3CJN2UItBY1tsq8cy52N0RUA2hwN1FAgA6d5bNrTPbIjvdklDc4xYASvvchtfc20TcpzfdvhDbJoDQMr7dwz29cB8NIR0N5tu879/M+XnduTDM30HN8fuw/5zQSAbd8vG9RQHeBsW34LUNX7YOAYSxS2rN0MvrKkDdGRTNGcjdukPAU+AKehfbh9XQNCUAEQjh0BAQAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUEAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMA/wAs+ABkAMgAqwGHZFUUkJCQ9tBD9vb3PDw8bFoVLiYJtZgtXFxczMzMcV8avaAvm4EkSj0MTU1O2LQ5bGxs2NjYWksSdHR04uLkv7+/0LA33t7coKCgPDAMmpqcqo4sr6+vJCQkEhIT88lC5ubk3Lo8lHwkZmZk0tLUT0QOLCwsHBwc5sM+jXchfHx8oYkmxaU0iYmKrpYsVlZUlpaUYmJkGhUE7MZEIxwEtra0NjY0QTcMhG8dxsbEp6emgoKEQkJEMjI0f2kd7cY8spEsy6k0FhYUCgYEi3EdeGIcRkZEeGYcya82JiIEqIwkFg4E4r08Dg4L5r48xaUsNi4M6ursVkIM7u7swp4smoYk7s5EEg4EZlokDgoEBgIE5r5EBAwQsp4MrMK86tbkrK7EytBEtMrs6qbQCjAo7Ob02KA0bkRwtO7QsrAgYqQoPg5A0KBYvHx0JsKAfByAcFhQPnQ4DgwY3MTA4LiEgFxgPmKI9NQs6ObQvMjEsqZQlkpI8sTQiqbkspy4mpRESFxMyqbQKmLQsqbolnB0PjQk2ub0nngM4NbYnoqQcGhAIiwotNS8cHpgXngYTCYU0u5QXnp08KA0XqaoHjBEmowMjoygNiI0zNz04rxclmQc0nzIHjgcSFoUvMKo7Pjo6uC0lKSEvL7Qmpwc2sZAgG50SEo0lJC4epigzPTQChg0sriAjKSsiphkYkRAhJQoyr40FBBgAgYIeHwYGCgoFDBoyHwseOIofEQo6sQQXmYYzL7M0jKA1MTwZmCAssowzNyw+ODklnBICggw+LwsQkgU+LRQ2sIoKiDAzOLUMDREyrhY+PjcuKIgXlo8Hhg0Pi5ofmwITC487Oz4MlIU3Nb0TEpknKS86vi45Or0ysQQ2rIoMlJM6OD4jrQoZnpEirqMgHxk6nxIcIB8kHgM6MIo7LxAFGJQfF7IzMSs0KAQ3vjo6LI4sIgMCg4IsHAssIhMgHZEBhgUakQYXEoo3tREBAQYeOSoqoIsCgoEBgYMCgoMAgIMBgYEAgIEAAAACP8A/wkcSLCgwYMIEypcyLChw4cQIxr012+fxYv7/EncyLGjx48gQ0b01wRCgJMwYGgI0EOjyJcwY8qcCdGfiQgDQFDYSSGBA5c0gwodSrRmhwQRjJjo0cOGjSZFo0qdOtRfhxwV+vnbypWq169gN1rNkeOERa1bw6pdy3ag1QRRTgaYQKAJ0LZ48xK1WoFChAgXokzR0OGu3sOIRTZx8LTfiREJBgtJTLkyR65pKTrY6cCy588KMYv2wCEKBMOgU1cuLJprEwxRVKBWTTtvhX+tuZ6oAeJ07d+UKe7rp7VfkxggkmqcDbz5130OAkBwwONFgAgUdkB1zp3tPgggkmP/j5JAxeTu6MP6E/ICwo4WKmL00Jq+vv37+PPr38+/v///AAYo4IAEFmigWMLt809GzBHEVUVNNHFWQcJFOFyDByJGUQ8QtKBDBRoIkVZCrjngIQc67MADUP10AIEGKAYQwwkYZpiXPydoMAUIUQwQgQldIbRVBy1ccAFWCSTQgoL/9MNDaRFUkAMFUehggo2f+eMBAiq8EEMUEbDWoJYBUKCDER2cYAIBLQn02gAYEHBCBw5wMEAANWK51lb0nfCXmAn1gwAFGEzWGm4n5HABAQ86MEAO/eiZ2gl+AYqQBzpQ4IBxaTK43AkVXLAin45ykKekbVEaZpATnZAABQg4/6BBDRyoQAB9C7YAggY2zOkABgm8gCpoqlo6EQERTJFDAkhOcQECQJmAwRQUVJAACP/4NqxnxY54UD9GUDAACTHYdcIO416JWxMIWFtBBSRUAMF22wbX7Zg8XDCFCmitx0FvW+0zQQIwmEAcARxMsUOk9WqIY6UY+mMDdgi4tY8KA8CgFQFJAskVAT7S2LBeuvl1AkOJUiDsQAIPoAFxEFRpl24JLHrqyEVhdsIFEZy80GtRTOCSlrDJ1k/MNdDI1U0R2HAzzkI92OJfBuP67aAVEDDcYn4R0KQREYCAgIL9CDHBo+dBrRZFJqgwQQsDxDXBBG1ORFIAUZDQwgQaYP+3A5NNBLCjBhBMAKW2aqu3zwhRNB4Fj/9EMAHDFG51ggrXRkFBDhB44NblmetUwdiJ72kVAgi8oPoLqHdAYsA9OPCCAwZTuI8JDqTOQweUl+777+p99DTwxBdv/PHIJ698UMMvH1wTJwghxAnUm7WQP/s0Ib0HTaDlYEUeTOa981F3kPBO4YEJbaBGtFADUgnogMDMAvVDwA46XOsTq+TL9NYUGJBLAFrAKCGZoAIgIIEOMFABwcDALgLZh652kpMY8K9/MBkLCWhEnIp4i0IeiIEDhDCcLV1AbC7pR+xMYAMdoPCCGAzJWBLgKcyQqF8BQ9fLcIOZwL3wgzGU4VX/LsAYAnTgQmNqTT9iMAAdMKyHGvhh84J4vQ5UYAoJIEEEcgCDTTEnN+vRkWwEghkPRBFaQKSi8E4QAAzsQAUBuGIEENA7B7VmH19KAJDIyBUz/lCNMHEMVIYUgAEkwAZCEk0/HFCzEXSljP+QIiBjghkhlAYCiXwQD/qiAk/xcCt+RGMaJ9kR0QROYbPBDLhCNYEajuaMNiQlSFpzyh18kSJGYKUrTQkDScpSeKLZDQhGcEMHVCACENglL2EVy1+KRU0Syl4HCpmAHgTKATmITZOIc6FPWqQDGBjmWabozHWpQFkB5MAFDPmCL1pxABeAwQRUsIP3jOCJPQiADjgg/64EYAADiCsnRMDFAaTwLActmM8Xe9AXnvDkAhxg0iKnRIF/8CQCGKijQBsCyjmZoAMkhOHnPmqCkpa0B64j4z468FGBsNQEPtuoTGdK05ra9KY4zamkmumWNIqmp0Bk1aF0SpB+eGApMMXVUCniATX14KPKdIsHbGACZY7Spk5y4RSwCAERfTJI/XiBDkggmCnozWOxtIoOciLKoerUfmHTARzJ2oKZ/fQflrxADfYGgwQMoAJozUwTMMZWMJJTjTjSQWwGyTFN5aZ+jKGPP3qQsBb8NKzv4mdbv0pUf4BsgyzbwRQCsEs+FiSsf8WhTTjAgehI8apYZaKpBrJIEP9UQEwiJaOjslLGFgTLh23lKU4vdicH2UCLjLrrRDwgOMtyBY8kWFgTehlc4dr0lLLJjAmm9BPlOmhxWLQmV2xQAQ5cqQmwNGxOyRS0R1qRAkbg7EH28QItrm8rQgjA/v4xXWZmxrs1JUkLUIkZE9RsVG5dECORySSKxCABfyMOG59FHKDCVqAUOZsGRIMsErTksRFccDKlaicOzE0FLSDBAExshCdal6aLTC0Z+zGCKXBAaUOFDlJGwCQyAi19PBqAkC+wJD7mVqY2+UsBw9heqXVFxxGwIA95aL/UrQ4CV9QA7ZYDYJtezMYE8EAHJtATa65rdhJl5BRaYJYmhM//A4YNnMoMe1gqJhZMUiJPOwdCgOR4DUcVEHINNIABHeyzBXDOjQcwQAELgpGoZDyB4bASgFu5xQSsdR2OMBABEmTxL3/RAY5NqQIOGOHRkLbjKCs8Y9VSxNUjooiFU01rVNW51rjOta53zete+/rXwA62sIdN7GIb+9jITrayl83sZjv72dCOtrSnTe1qW/va2M62trfN7W57+9vgDre4x03ucpv73OhOt7rXze52u/vd8I63vOdN73rb+974zre+983vfvv73wAPuMAHTvCCG/zgCE+4whfO8IY7/OEQj7jEJ07xilv84hjPuMY3zvGOe/zjIA+5yEdO8pKb/OQo/0+5ylfO8pa7/OUwj7nMZ07zmtv85jjPuc53zvOe+/znQA+60IdO9KIb/ehIT7rSl870pjv96VCPutSnTvWqW/3qWM+6uxcAhHOzQABMILcINrAAFoTg3DdIwQbMfQUABGEB/0hBuBWgALUj4QflZgEOCiD3chvgCgYQQRDIbYF/lP3seU8BAwpP7hAsQAQMIPcMUOCEwqPgHywY9wx+gIIQWKDz46Y8Ex4Q9nIzYQYz0LrqV+/yD5w7BRZA/LhXgIW+lxsJjJf85M+Neq9fnvXAD77wh0/84hv/+MhPvvKXz/xhPSDdAEB3PyRwgH/4wNwiCEEIREBuGWzgB7IXd/8KDqB2cwPgChKwwA8EQO5+NCAInAe3EhoQqSEc4QdOKP22CwB3guTDBx/gBOGnbSJgAG4hAyKAf07QbSkgAzNmACnAAkGgfqmHbSnwBA7SBBLgAgtQABkAAC7gAtuWD/VDAziwACtwA5HyatkmAWSUDw2gBE+AA0lwa8W2AQfQAMshAwXAAgtQAiSobSzABAyQAXxiAAzAAgVog8WGAgeQAesiATioAA54bc8nEAXwACjgA0PgDzTgAywABA0wBN22AUl4BDIABVWwAERggNv2ADNwABJgAGQ3dnJ4BWSEbQogAUxgASXgD1ewAQLwACKQAVrgbUyQAv4wBDfAAlb/gAN4yG0WcAAsYAESIAMAcABO8AANwG1n9wBEgANBoHgLwAArwAJQuG1BAIoGIAH4twGXWAIsUALbpn0rYAB0OAMbYABbcQNP4ILWhnghIIA+kAErMAMs4Ib/oIzWhndMcAALgAIWQHYhcAAadW0z8IwKIALS6AMSsAIbwITCpgQ/MAOVhwM+YAEWUABDsAQisALZlgU+gALZyATVeHdc6A/ex33XNgTph38hgAJMwAIPEAQ3IBAGUH3YdgMLkI2xR4QAsAEbQAMDsQAKcG25OAMh8AAhcIsywAA4EITadoz4xwQ/gIr+kAH9p200wACohwIPYHYiECn8t200sALl//gDFpACJ9iJWdBtDPAB5fgAPpAEKbACDliFI/mSRCgDBmCR3gaPGhkCQQCER7AAzIhtBoCTHzCNB4ADWUADCpltFAEFGzADAmABPkAELgCFEpB52gaDB/ABg3gEEYkDkbJ2ZCkDCtCQHxACKdAARLCL3OaFIvAA5cgESvAPIDgQtldtFLGVAoACq3gAtNht/pAFDXCWTnAAZFcA3qaPCvB1TuACg5kCWYlt/RB4KPABTLABRSACelcE2+YPcol6TrAAPoAD/4ACmfd71yaaLDB5o3cAK7CRtTkEGcAAIZCNIcAEC4iZ6HcAKPAD+FeQFlCB1tYV/UADRICYKBCeFv8gAhJwfdpGEiWwAKenfX04ELm3nfmwnEHwATMwjQswgNfWD1mQASf4AwH4AAuwAFeYbUNAAxKQAlRgnZOHePq3kEWgBBTIeU4QBCJIoAYAALK5fviHAlswnsCYbUVwAOu3oAdwABy5dpdZbZ34DzPwl6THBKO3AitgAQCabQKAehppAUMYnvkHoBVqbXT5n6TXeQcABFqom6lobTHJAsbJAPcZBAVYAhuwmALBm9WmmyWQATJAAwVgAUhwBBnwlhagAKkZbc+no2U3gTT6AAX5nJgHl9QGAHIqpymABDPwABtQANwYnfGYD0OQDyYIpQbQD0nAj9cGjQtwAErAAET/wAAHMI1QMAQyMH7ZlgElAABFwADwd6MDyQAi0HVwSqAl8KgpIAFS8J03im0rcAQNQJGayQBKAAA/mQWZqJfXJgIlugK7yQIuoIOAmInbtpo+8AACcKMbUAJp6AML8JjRBgA0QDnoyYEMgINBYHYzAARl6mxBAIsyoAWreYIFiBs0YKjWlgUSoARK2ABSaJFKmQG2Gpw8+HlMEAQAQDlSam3QapsNcJ8WEAQ4QANkmG3RtxxtF6ASwJ9B8JXZloQF4KeB9wRL2CTGSJYGQARfWQBTKANAsZXniYAbyQL0RxDZ6mx3sYjuWp1YeYgCcZDbSRHe2nYlqgBHcAAiUAK0d+gDYzltJHEDEtCzJ7gCRjh96nh2AoB31KYlCmABxfoDoKiU//B9wFlt/QAA2RmeTLAAK+ptTbACApB/0BkCF/ltgdi1zzl55ultWZACqAed9Rl936YFJ2udD4ADTttt/ZABsnkAR0CR4paZMiAD19h8lrEc5BYQACH5BAUDAP8ALL8DWQB+AgICh/n59yIiJN7e3GJiZKqqrDIyNIJsHCYmJMLCxC4uLKampISEhGlpaZKSlEJCRLa2tFZWVFJSVLKytDY2NPLy9JaWlBISFHx8fI6OjMbGxMrKzJqanFVFEUZGROrq7O7u7D4+PIqKjNra3KSKLLq6vJB4JD40DJ6enE5OTKKipHZ2dG5ubHJydF5eXL6+vNLS1FpaXK6urDo6PObm5BoaHEpKTCQcBOLi5NbW1B4eHEY6DA4KBAYCBNrWxJjKtDI6SGigJCw6INbk9PD6xHBkcM7MwEpUOAoOCNi+zEpaVCxQQPCotGxSaIaImKy26BwcFNSwuLp4JNb25NDQ8DIeRNC+8Gh2QKqWoGxaQBQcHMLu5LKcxLrC7Mqw7JpsmJqEsFB2WDRwOMzuiKCEiLZ4mMrKqMKc6KqikBJgOFpUPKh44OR4JIZ4gCJg0JikeMrQ1D46SMLCgC4oMIBynHqQWAQMEF5aUNrKzFaghBAwQAwMGAQEGLiwoPDgjBgWNHqQsOjc6Ojq+DRQZIBWgFRwGNLQzGhgcKqisCY6PAgYIComMIB4ZEZOTJKKmJKKeHp8iPDmwCwsRCweKMKwrDxIMMy+xGgYgF5OWLKssLCsoJxoJAIGCF48UG52WGxkWMi+2Gig4FY8jFRadBgGIGJ2aHxIGKywxAwEEIKYhDxYQJiwqG56cJicvDRQFLKceMTM2HqwhMbINKy+tExmTKLutGhY1OJ4pLrIyHyIjGRAGIBkREpOYDIMQBgQCCJgiBgaCJik5NCcnDIsaJyUqPD67IKIcBwWHD4sNExCOAgIMJJ+iNLk2Ob67CQwKBwQHLq+0Ho8bMrYzFhYGEw8ZD46JKzQzPDM2LrU3BQWIHCQhGjggKh4XLicoFxmQMzuuL6wzDoeHMYwgPjg6JCinIBqdBAQYCwoIOTm2CLAgFxklCIgwFBCTDhISEw2RJyADMTQxJygkCoqLAoKBAoKDBIWFAICDCoqNAICBA4OBM7SzA4ODM7OzAYGDAYGBBYWFAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIENanHdABggHKDvIsHBPpMuXMGPKnEmzps2bOHPq3MkwQYUMQDVo2BfDQUueSJMqXcq0qdOnUKOCBJFBgIIGFSpsWFDgqNSvYMOKHUu2rFmeIBAQmGChrT8L/c7KnUu3rt27eHdSPUHjnt+/eQMLHky4sGGzVEnAACEjnj6/hyNLnky5suWJVD1oQIAgRogO8y6LHk26tOmy8RZUuKBiwYMbCCCEPk27tu3buDnem5cD7u4JDW48KJC7uPHjyG//hfzv3gQSM2B4ZXovxwUF2LNjMJq8u/fv4A0u/1/u7wQAFnGdOkfwYR8JFy7Udpgevr79+5clxGs+3m/58+k5VQACGUQQTwIFTJCAPvg16OCDhvXnlwzQSQfVgAT0NR6EHHboYVn9lHTAWzk4sMENEiRA31IDSlBAW/0A9uGMNNaIlD4MPEBABRicgIAIJEQQoFMTZDCDBNgtAEEOK9ro5JNQdtQPCim8x9kDC4Aw21P3JECAew8ggIMIG6gY5ZlopinRbgdMcBIICbDU5FIWdADCAQcUwEAGHlSQg5qABiooXX7FeNQ8EeyDQwSDNuroo01J6FcOKXjAwpyQZqrpph5Jeo8/FXxwAXOclmrqqRN5moMCM6yAKaqwxv8K6z39GOqXPisIsE8Nr8rq66+PzgNDBSrAEAEDG4ggwAUWAOvss5nOw8ALAoiAgwACPMBAX9B26y1YflkQjwwoQFCArQLtFoAMNUSAAluSDrRciB1EEEEH8cxzVD/ikotCAVs+dA8NHUDQwgAtoJCAvt827DB1NLAggZgChOCPjPq0oEB8Q7nAla3LpesXDQMoQGAGCFTQlUDWxZDBmBUEkCqttdYq48M453xTPA24QEAGAKSgYUvVNZDBCQtcgAECApQpocg5LKDBAyFccEEIXB1VwAYkEMBeDPvpLPbYY+mTYAAs3MAXc/fo00EHOegzjwUouHADC/r2J9A8K+z/cwII/swzjz80bKnPBBMcMMALMRxA9uOQq7ccBALwVRCtXu12gQcb+COyjM+5AELNIX9+jwMaNB756qzrNB4ManObEK0scE6D6US3oHYCCRhcQwBDylsDUY63bvzxML0eO6niHZDCDSowyB9g/WxOQggKZLBPBgrIJt7wqiMv/vi6LQf72swTpA8LAsTQ1dP6YEDBDC5gwMAKJwigAQrB3wN+8eQLoABn5pfzDe0g81jcPmAAspBZoAEAEIAKLnaPADTAdpf73wA3yEGIGLB0AxHWPl4wgMfESx8hAMADVPSX4WVABhkkXgdnSMODfPBm/xCW9kroqZZUjwIpYNJf/xLwgBfwiiD+k2ENl0jDG5JqHi0gUAsChrvm6K5xy5mAC/bhgMt1QIlMDOP4mEO5zpUOipyBARWnZyiByGCLQioUA1AUNoKgjgAyE6MejeefBMTjAkeqQQIOwDAoaqBiCaBBDnIQAH8Yqh8gWIGZ/qGPC4hAAjBA0ADshjeB9MMfB1oBDlxgIMfs8ZSP280KHiCBFwCAfg+owOjuEY8YAMADLkiBLhVAABZQMH4zQM9ADhCCfezjASR4wT4WcICj+GMFMXjAPijgAQRIgADcQaU2cbYbBkggBv8gAAFiIIEGzDIAF4jBOGPATnJeQEPSIoCQ5JWDAVQAOxUYgOz+Yf8BBoxTnOKMQQpA0KttGlRWn4qH4wIQgAPEIwcM+ySeJhqPitLAVhYIABV34w+GBm4697DARBmKJ40e9KQoTalKV8rSlrr0pTCNqUxnStOa2vSmOM0poTbEkIKGDIQ6DaqayAMDFaBAegipTgdasAIWMAAFwPNKP+IRgQGwYAUD6MABhcpVKC0HRzgAQAwSoBALXEADyhzhPggAgcewDAMvSOsLMlABB8Soq3it0bw6EAMcfGCFmMrYAlYAAwgMoAEiyEAN0uMPBiyAARA4lgJuoIDm5PWyHloOz3a0D/1gajfzeKQ/MOCBBUgPtI+cwANw0ILgYfa19vlLJbUFAWv/sjCpN7vHCijQgGZJigbPY8AaYUtc7xQqAg/AQA7SkqL0yatQ+rAADUCgAAEId3q7aUsOYACUDri2uOA9jl8SkIIYEJQqzZ3TX2SwgB4hYB8hyONfpoQBDKQgAxpYgVvDy1/khPQCCBDuPdB726T2AwYk+IcIZkCCxX5uHioYirJOUOD+Whg3B/aYhpgbD+d+jgZdhMECJIABM833ADWoQQsaEAMV7PMiTwtpPAoQD0eKR2/9yAGCDrDfC/v4JfH42VHnoY8IaIAEDgjtQfQ2sAXgYAFyKt1UT4ADAWNEUvPoQAVckAEXbKAGW/IUDRgQAwI94AId/rGaQXIPFFBL/wH1rcADPHCDGFxgkkgE6hc9y9PmMMADa4OxhPoRARdcsgJ9dQEDcfcXf1hyHynYANNSQNY1W3ojA04BZzYd1vakQAYr6vM/vgjYPt/jz0Es6EIk5bxlNRJtIgAbdseDAhwUiAb+oO4MFjDcS/s6IvpIgAyGPWwW4EADA1jYkjHHHAjPQAEBKBS6PhUqDHjOInr7Rz9aEOs6HoAAImBAA9Plj+BcYDZT2ocLJvDrdlekP+hN80Gmej8UnCQCC9hHuEPTDwdgtQYnGZYAFPtdiPRZHwuYwQUCNI8LfMCMIByQBuYzkABMlgHuznhE+tOBqVV6yTIA9/Y0MCYSrOBiOf9EMA5Gbq0YyEbVCukzDU7EgMzB4AMSiDYOa3ADEsibnxgAwAI0TvSG9AdtDLj2kvXh7wWEAAOPlQHDmkMDFLCgvVVrQTzuKuibUUoEFhoICjzwgA7jMAJiZdJAEA4ADMC86O7uj+B6RSsi57CN8uqH4PQhN65jZB7xQJyCWPL1sAukBh7wOVAhINZ9sr0Bb4e75KNyDxkoQCgawCRvUgD26Yy97EBFewzULhC2u33yqJdLBVmAgX9g4AIy+GQFblBzed0853ru+c8tEPShp/73ZoluW/QRIxSKiuEOhzjzCoBfigvE4rQHvvTF0udtX7KOAQD3AMYtkHIrHN0oUDf/u6dPfqmwmlULCIBIL0DHv1gABBPIW5vnCgMavH+y5y6//rk0aBQwTQIbIAGJBQHp0Q8QkEss9CkXoEwK4CMikAIrs38ZV3eCYzOeghB6J388VXcNhEMGJyHz4AA/sQ8pAxr7ggIkkAJmNzIDEANDQQJoVnASqGZuc3UhUDUwcAAd6IE5hAJVE3tPkwMMgAEDQEFPsyY4Bko05kiZow8HEFXLwRs71mMz6GuVpwAvoAHxoUwpsFjxgkQg4AKvhDcgmDYAkF5HWIVq+BEVBAMtoBIyEAGINVZfmC4BsDQCQHsaWCg1IIAfADY9tIaC2BGYIy8HEAMCEAG9wjdYEgO0/8d1fxEPJ6AA+QaIbANUg5iJHAFcNwABouZJNUACGFAADTAD4oZEZqUYNWBNZvdcPKiJsBgR/NJQMqACnfU+phYPKfAAIABMKzAkBugCFkMVlliHsXiMENFY5fUjQbKHMmJWGSBu81CKvygvBaAAEgBqDpABxZhtyPiNqzYyK0AAD5ABL+A08RKMDXA7+lCKp9gc/rAALjAAoYE63fiJ4JiPrkgr/nAACdABrhFfR7EhCTCJoJZDGHAD29ccBigBzNISMqAWzXSBsngAKDAAMDABDJKGITQBBoMCOmiM+ohZnhIAJyACC2k6+qACOLABDsA7IEAAHhACMnAxJrkPkv/EOwygblq3X/i4EPoAAQqgKC/gAhcgRN5YHSzgAnG1Vi/HkSN5WZLSDyswAxjQLKYzcwAAJA/wAHazlSTAABZQACRAAZnXldPkAfvgkunxkwlBaCS3Aa3BNBbzhY6mPwvAAhVAggQIlVEJXv2gAh7QW/uIIwrQTuQkAACAAzFQQlHzTexkMh8wAwiAAaNTRQ9hccsCUXTDlIo4lcOzQBYwDzSgApSlc974l3jVNoFjKPMgAzHQKo+Ea402UXgyAei3ILRCAwpVUovjAkvijB6WVDUgACQwSRaQcCGAVOORnDMQArNxD2T5AoqImaqJVwbYABeAkQNwAQKoAAk4ARX/4EsDGYXuuIN+QYwr6JdvyQAAcAJIZYA4oB/7WEGTNQAEYQEhcAP5ZzrXmVcZpkwvgAM4gAA0aSsREGvNhDvtaF3CmZ4+s55uiRAIRwELECADRiBdhDvxUEQoQBD9wAAPx5yv+J84FVJxOAAMAFk1OR450AI1QKLzBQJah56fci8mlJoNUW4esHDyQkS7kmd+oUX70AFI1AIUoABYOWsm6mPOdYm4JTIZ8SkXpAIgEw+t9KH7OAEIMHFIBANJqnQT2qT6GFIYcHzLQZZFKqRdskU1AKID8AFKuo9kSqaaAwAVoIGo4wIwhDsHIIAQQBDzUDsNIKORV6eweA9IqgAU/6RtcxRE9UkDJ9AqXpED+VON/omoTSoDQnFE/zBzd9OW48E3lPUnzdEBc0VQ1qmp1+mcEmAg7CUA69ZoLQABLNEcCSABM9AAjYECkxUCUTamrPqNtFQBOFCUGQAkfZmeGZCNXhEBJDBwW/QCFZCAwjqs39gPAcAA93QCF6AlmgV10ZYur3kBJ5ACFcAAqKmj2PqXHJUDNEB82PVJNvY5+kADuPagh9qu/Nqv/vqvABuwAjuwBFuwBnuwCJuwCruwDNuwDvuwEBuxEjuxFFuxFnuxGJuxGruxHNuxHvuxIBuyIjuyJFuyJnuyKJuyKruyLNuyLvuyMBuzMjuzNFuzNv97szibszq7szzbsz77s0AbtEI7tERbtEZ7tEibtEq7tEzbtE77tFAbtVI7tVRbtVZ7tVibtVq7tVzbtV77tWAbtmI7tmRbtmZ7tmibtmq7tmzbtm77tnAbt3I7t3Rbt3Z7t3ibt3q7t3zbt377t4AbuII7uIRbuIZ7uIibuIq7uIzbuI77uJAbuZI7uZRbuZZ7uZibuZq7uZzbuZ77uaAbuqI7uqRbuqZ7uqibuqq7uqzbuq77urAbu7I7u7Rbu7Z7u7ibu7q7u7zbu777u8AbvMI7vMRbvMZ7vMibvMq7vMzbvM77vNAbvdI7vdRbvdZ7vdibvdq7vdzbvd77veD/G77iO77kW77me77om77qu77s277u+77wG7/yO7/0W7/2e7/4m7/6u7/827/++78AHMACPMAEXMAGfMAInMAKvMAM3MAO/MAQHMESPMEUXMEWfMEYnMEavMEc3MEe/MEgHMIiPMIkXMImfMIonMIqvMIs3MIu/MIwHMMyPMM0XMM2fMM4nMM6vMM83MM+/MNAHMRCPMREXMRGfMRInMRKvMRM3MRO/MRQHMVSPMVUXMVWfMVYnMVavMVc3MVe/MVgHMZiPMZkXMZmfMZonMZqvMZs3MZu/MZwHMdyPMd0XMd2fMd4nMd6vMd83Md+/MeAHMiCPMiEXMiGfMiInMiKqrzIjNzIjvzIkBzJkjzJlFzJlnzJmJzJmrzJnNzJnvzJoBzKojzKpFzKpnzKqJzKqrzKrNzKrvzKsBzLsjzLtFzLtnzLuJzLurzLvNzLvvzLwBzMwjzMxFzMxnzMyJzMyrzMzNzMzvzM0BzN0jzN1FzN1nzN2JzN2rzN3FwWw+nDJmADfhfEI1ACHCDO+wrDBlACI2AA57wDPJDOLIwPNqADHBDO8CzPZBsQACH5BAUEAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACzDAGQA9gClAYdZWVjQsDd8fHxSRxH1z0Pc3NxOQA0zKgtycnRsbGyYfySMjIwyMjTJqDRAMwzYtDrkvTwSEhSSkpRqWhaKciGYmJmwsLBfUBSioqTHx8diYmQuLizy8vS4uLjdujvq6ux1YxmNeCKwkyzzyUOGhoTV1dTu7uwbGxyoqKg4ODk/Pz62lizOzsxmZmS/v79wXBdISEfsxkAkHQQWFhShiCj9/fxQUFAnIgV2dnRXSBKDbh3AoTBnVhQbFgT29vSpjizlwz24nC8qKiyCgoR7ZRxFOwyenpzm5uR9axwmJiTi4uQKBgQiIiTCpjTGojSmiiQ6MgwOCgQSDgQGAgQCBgQ8PFCIRhgODBjO4tTO3PTO3LRuRmyszMAWEGCGkij6+Nzs2LiMmGRceHysgiyyikwgCCBMMDzYojSynqy0tCBmoii8qnS03ni0zDB2XpQowoCeipAqZNDSMoBcTiicYBggOhzyxtTq1BToxih8XtQaKCjc6Ojm5Nh8fFwKDghgPhgKMih+jrQgMkSMuozwojCamBwqIsD01DA4JDy4xLhqcIAGGBTovCj4xCyAagi6tqjMvjScfgwKCDDqqrDK1tTqvBDS7jD4tEB+Xmh65KjO9NQ0IBBqfmy81MDMuFQEDBBcWnScmISAWhhipoy8xNDi3oSQsCji2PRqcFhADkB+oqB8HICyigy8cCycpoT22ORcYJSsoiCWXmy0zJwyVExgdhhATFi0qlC07sgEBBh8blw+dDjqfkjq6PjKqqy8fnQmJDB2dpjMxrTUxvAWMmisrMS8quiQiqDAyLh2fBiWiihMTGRIXFjs8Lyaigycchje1ESKfAziwlRAMGw8MEQKGDTo2NhIVDQgGDTe1NjSohDaxjTMwNDg+OiysJR64Cj66Oh6puTislQ+ZIgUZFC6qrhubEBOJhDMuBC8niCakkSWnryWiri8ogzqsjTKiizSfsjSolS0zOzM0DCsuLjcxsTU1sgGBgwCAgwCAgQKCgQODgwKCgwGBgQODgQAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIUWC+i/n69ePHT2O+iiBDihxJsqTJkwkx5pthQ8ACEjhg8ENJs6bNmzhzDrzYL4WRAkeUHDnSIYXOo0iTKkV6MUUHJRUAqIDRIgGTpVizat3qMF+ECkpafORKtqxZrflSKLHQkWO/sWfjyp0rsh8AExKYAFggIUEKfnDpCh5MuCA/AT5cuFBSoMC/DwL2FZ5Mee6+BTV8WFDB70SCD0c0BK5MujTTCJgLpPiYj98QDihOmJ5NG+fhGi767VShhIXR2sCDj+zXokYHuPkYNFYhvLnziPlg+Mgw819GGL0ZPN/OPSGTxQDe9v87UYFDBcmju6sXnlHDkRIxAUgwwQLG+vvbL+5DwOJIAaEuhIffgOzxlEICQwigQRLpEejggxBGKOGEFFZo4YUYZqjhhhx26OGHIFKoUmv7AKZQRvukqGKJo2U0U0cXhdjhiEn808EQMyjUjwYduNCjYi7885ZF/KQwBAoWYICAEDHKmOGIqPlQQwZCKLRPBTWsBaQLJAz5Dz8AsGBCARmUcMRmDTopoUp2ZdABBy5cldBlHyTAzwx4nhBBjGllcMQCDJyQwgIfoLCnmhhixEAHGABQQpxWLqCEDXwaNoQJKORo3QkoKAFAmohCuFIFGcAgRAkZyInQPhJ8gMA+TMz/8BZc+6BQJ5sJ+GAEqKE6yI8GBQjQDxNkqmpQPqz6wAIGHaAgwF9j1XorRv0kUAMLuvUqYgouWHBCPkIUm9JhLHTbQQkmHGHnR/wsYAIGe140A5YsVKdthKOyoMJFG4iLUEZCbAAYuCSY4OlH/cBgJgIn7JNEAgVcK9m9EfIDMQLW5ZPEo7L9O6JA+yBmwZCHHWFCBhYolsG19lJMYJ/65mkDqjBEkG1BH1uUgsTU2oACCyWgAAAAU97s8oAJH+HD0j5wIGVmJQDgcZPWMcDziDHa5QMGvB79XD5MDFHB2GNbUIMJFgiwwdRN/orbrCpZxOnBXjvYGkdtqUBmCl72/1Mla3fuo1E/+8DgpwCskTj4CQIcYWjXdTuHdbgZfDuW3ijolrAFFgyRAAIYFFDotwId5mwCAlhwhAurQR55gSqlUIALTI5lwwcdzJScBUKBpkQGOJBeugBADcXCArW//vKI/MDAGa1SjTWsCjYMDUMSXlo0A/UA2JDCPlQrb6Hr4pdv/vnop6/++uy37z5+rNVE/vuFubhPBHhitBC1++AZgeAEYZ6ejEY/01xnARYwU5z0NzXPJBBVQhvLiJhQgQ+wroC10RgK/KOEGpQgeVNjgAVMEDQjWCADC6iOSkJWgxoUgDkYnE1rpsIE2ZWACeHDWT7mJoATdGQfJ5iBBP95YoMMGLEEMIxhaUZ0ggLckCHV+tPAcpaxDSAJACh4oRINqJITKOGGXYuABQogEyaogAGyoppXFpABG8xgjEncImWY+EUcgkpjJQiWAMrEAgu0AHwDsQsLuiRGLcqxMnQEYw53UwAfPKoCC9ggZLKVlg50gEkz6IAhDznHGHlRkWlKWAdLdRGLcUA11kENCyi1QzhycjaftGOa8qE3V60QA5DJCAAyMIRZvbEErXslaWIZt2NtoAQHw8hhzNOPCHQAMilIgQo0kAElCEAFQhQmZT5puUUKZAYuOMKnlCkAZoLTBOhEJwda6IMCIKBl2oxLk5pYgm7OqVW9xMgMbJX/gNYAQAA4EABAF1CAdyWAAfOLJ1Nc9I9+FYABbklIwkpQAht8aQYt4AAL1maRLiZwXwqdC09UgAEMmI0DFjCCEWDQoFLi4AO4w4ALHNnPjnZRk/tKaEh10hoNKOGn//lpCRJAQJuCaYxORAEMvIS1E8g0mDuV5z4YsLYkJCFgG5jYQVQSgasyAXzIaSoTshfVspp1KTo9q1rXyta2uvWtFWIgRLDWUj6lFa4lac0JksAABiTBZrzC2j5SsIEI4Gw8G2CAENKIV7QygY1KMME/HGcDsupQn1j6AA6QcwIcZACduAMAPBubE+IcgQUVwAEJOpCuT6VEmRDjgGYlOC8X/1ZAABUASk1JexSN2QCs/9jHa1zAoKklrAMWsBUCsgaAL7I0IzYoAAuqxFueFjMfG6gmSxvIrJbY8iNXMsEC4NYuzVbXugwEmwvIyKuQFcBOL13uR8SoLjZpoAYWuOt55yrXHSmBuKDqRxHPc5jvBtcIuVQmDq5V1P2a5LoMWEw/Z5kECxSlNQKwpUCqZUEGaIQfi2KZg20SN42NsAJCbGkESFAC0WBYw9apsAk6kAAAJKBTEhux/FSSBCO8S5YHEXAJJGCzfswgwwjQiHX6oQJJStcI9GqwjkXCYwwcwQiybNCVfNABgLokA4nBQWUxcgIbfA4ATLABbrw5ZZBgJP8JuKxAN715mSOAZigfkJIPlGAEsGKNHwgwwRDY3GaKXCQJFTABirGGMyExYGhDs4EGLAAbGyCUrkwuE1QLPRKwYSAxX7of/m62j+4NUSUFnvCGk5CCIJ4ABqpDHKdNYrHMZKCkKEASBpIIAw/Khq78IMFsowUWH623AEMw7KxL8isnsgBoFH2URa2TApQZFtgJyMA4N+wzF2TABQtYqn6X/a9Z7cQjBDEaXTOSQ7qS+93wjre8503vetv73vjOt773ze9++/vfAA+4wAdO8IIb/OAIT7jCF87whjv84RCPuMQnTvGKW/ziGM+4xjfO8Y57/OMgD7nIR07ykpv85Cj/T7nKV87ylrv85TCPucxnTvOa2/zmOM+5znfO8577/OdAD7rQh070ohv96EhPutKXzvSmO/3pUI+61KdO9apb/epYz7rWt871rnv962APu9jHTvaym/3saE+72tfO9ra7/e1wj7vc5073utv97njPu973zve++/3vgA+84AdP+MIb/vCIT7ziF8/4xjv+8ZCPvOQnT/nKW/7ymM+85jfP+c57/vOgD73oR0/60pv+9KhPvepXz/rWu/71sI+97GdP+9rb/va4z73ud8/73vv+98APvvCHT/ziG39D4+54PqLQgx5EIfkY74cDaOABDyjgANCveD4O8AMgjGAEEPjB/wFYrg8kACEGEIAA+nXA8igogAAQqP75aUB+CsQACOmPP/v18Q8RoLwfBhAE54d/InAAUsADQfAPBoByS2AAIpB+1ncARLAD16dy/XAABpADCtAANLADINADLucPIBADHqADUeByUnABItAADUAE8aNyINgECnABIbADDtAPMvAPO7ByRNAD+SADPyACF6AAQXAB/MdyAOh9DXABUnZyDgABI0AD/vAPS8By20cDARAAInAD0ud/Ksd9O3ABDhgCBkCEDqBy9JcDGcEDWbgCBpB9EueF1nEAARADSKARF5CAK6cPF/AAQBACPSADNKAALOeACqADIhACCiACRbBy3P9HAzcgBTrwAB5whwcQBCFABCvXD30IBBcggiKQAz+gcstHBA8AATqQAyKgiXWocg44iEHQACvgABfgASOAhj9wAz1AAyOgAD0QAgHwDw+QckGghlGgAwRAAwbwAzQABB7AcgcgAh7AgkEwAc/Ici8gAhOgAzEQiqeYAyu3itIXAzuABB4AATywch7AhAYQAw9QhxAwASyHBDeABDJ4fg/wAlWYcjsQBDvQADpAhyRIBEeYci/gAXYoAwGAfxRwgi0nAiDwAA0ABCIAgi33AyxIAQ/wAA4AhxP3AgPQAAGgAAFQiVPYcgrgjh4ABA2QA01ocg8YAwEgfhKkciFwiRD/IJKAaBEq54sU4AEPQII88JIkR5IdOAI7UAQeOXE5mQMXsAMQQAM52HIxoAAG8AINgIr7CBxTuV8NIAI0II35SJSEEYBTRgNZSQABMAFkSRcb2GYy8AI7EAMEkI/6sJRHEQO3CJcOoAMNQIIvcJIqtwIK8AITUIMj8AAByXI0EAQigJF6CQQ/cAEsBwUvQIQBoH4E4AEg0HI9cAAXQAMQcH4QIIcopwMXUAQOcJDpd346SIz/yILqh3/qFwMxgHJU8JkGwAMUsALpJ5K1eZsqpw8HsAIBQAR+mX6nqQPMiQQhEJRfuQPnCAEptwIi8Jgr8JceIAJPsAPDiHJS4A9R/6AP/dADCrADBrAEPUAE1LlyzIcEDRAEFFCDzrhyNPAD0kkABBAEOvACLKcASMADQxgAILAEH9GZViiIylgEE0B/FhgFN5ADQQB/DfADmahyPBACItCPE1mAbflxFMgDGHifIBgF49dyANgAM7iBFLiJPXABdbidE3ADELGDGgcCG9oEDbADAwASDYBxCpADMuAAIqAAgukQC+gBAfCjF5ctnEigU1CjO5ADB+CXGpcPvxgEHekQTnB9CHOGVxqNPyADeElx/ZADDYAEDvly/qADQTAAZTpx+XADNACJ/RClLgeAmJgDfPpyUUABmkkAL7cEILCSo8mmvKicLycFIgUAf/wWEAAh+QQFBAD/ACy/A1kAfgJZAIdmZmQuLiyKiozv7+6WlpTGxsRQUFD7+/oaGhwiIiQeHhxKSkzCwsR0dHSenpzq6uwyMjSysrQqKizW1tSioqR8fHyOjoympqQ6OjxiYmReXlyampw2NjSSkpTS0tSqqqzi4uTa2twWFhRaWlwSEhQmJiTm5uQ+Pjy2trSGhoSCgoTe3tyurqzKysxubmzOzsy6urxGRkRCQkRqamy+vrxWVlQ2UBRgZpxydIC4nMDi6vhCOFByeGC8vLTu7NimoLByfHg8SEgMBBAoOjDMxsi2wKgcKAxgakQkYNBOXkicsqwEBBiCeGji+ORmeHA4LDD67OjO3vTKvKwwIkjU8uQcOByCaniSjnjizOBeWEQWGggkIMCaiLBgQFgKMByimLDUvPASGhwSMESCcqC88tRETEgSMGjE0tB6QHQ2LmyofFwuUEwMDBhwaIC+rMBWoIScpnhqWtjspLTMytgaFjSGiJgOCgheUGCcoMQEDBBAWEAIGCC6utC2wOxo4IBYQJT06ox6kLBYXnxERFC2xsTOzvBooCREPDyCiHBwkIQ2LhCmmJiyqLR6kFggLCjg6uQWEAi22NA2IiwKDggYEBxcamC8trwgEhQCBggwNEycqKhOUkCwusQaBiAwPDAYKCiSpqBCOChooODI3tRqGoDEvNhQRkBQalScsujOvMz08sBCJjzi3PSgiIhwXkjCpHzevMD03NhkWGBsbGBsZGiacJju+ujK8oiofODifKTCpOxAUGSSmqwiwIDW0sgICDCyrMyc6LQ2cDhqWhxKUlBaWGg4PChCNDxaYGh8dIDKzIR8fIhASDB6sITW3sji6tDi2NwGAggkYIji3MhQOkxwVnBQQGycxJw2DkCGeIDa8ri4pJyCWIjU3tzWyNDGfCTU1LCCZkwSEGC2fJhUeGDGyjTEuMT6+tx0aGjM1NAoLjgSYDiCmISipJBOWGRseEjOpLSSgohURlT0xLTGMoAsIix8iIwODgQKCgQCAgQCAgwGBgwODgwGBgQKCgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDWsxHsh+CABg4SBDRT6TLlzBjypxJs6bNmzhz6tzJkCQJAx1Q0IDBooICnkiTKl3KtKnTp1CjhswnogGDAiwcUIhAIIDUr2DDih1LtqzZpP4AtGBRQwICBQEg8DtLt67du3jz6tXJAQUMGS3z7R1MuLDhw4jralghAAECDCck8BOcuLLly5gza47Yr4IJBw0o0GDAQgWHlptTq17NurVYfh0GrCjgoEIKGCAuQKDsurfv38CDaxSx4cCKCgn6+YuB4kGFucKjS59O3TeJ4iwkCM7nzwWICBKekv8kqSBDhwsUUhggUb29+/fwDfoTcMAC9H/5ZBQocEI89xgXeOBBCy14cAEG8SWo4IK95dPAAPYNlN9+/Yl3QgQFVLAABycYYAACDIYo4oiH5WPABBEEsF13ILBQAlQKENACAPwE1k8/vJGo4448hpWPAhuEIAAEIiBgAAwmNOCPeAuEQAFL/LDU45RUVrlUPv3IEIEHXDlQwAQEaPdUPy6YQMACoV2gHntWtunmmyKR5I8MAqDAAAMRNCABak7xk8IALdhJQwEhvFCBCHAmquiiEo3XjwgQyBDZZFGJQMABJlBggAQQNPDCBBnwyeiopJZqlqUHFBADjlSpYMIFR5n/KuustDJFAn0XsPlPPwt4wF+twAYrrEz9NHCAA0sKlM8JBbwQw7DQRivtRfnUMAALIArUz4kMIDjtt+CGa1A+AcAwgQaBIdCBmbqK6+67wnKXwYAqaACAA/8UsECO8Pbr76g+AQDDCiH8E8IHBiT778IMQ+UTBAsYsAAEJPA23sWCQRCDhxNXTFA+/CSAwQI1yDAZSRJijHJE4/kDQQ0ZaGBACaI2bPPNOmEJgQoo7FcADCpAgJrK+ZCQAQtX7QdDChjwSYIGonmwQgQcXKws0RNdfOON/OLs9dcx5VNCkBGo0IAANKxAQAlEkyQCAARYUEEDKbAwwQcQDKRABSh8/xCBCS/IYDV+WINt+OFf9aNBCBGY7M9PBXhgAKsqP6oAe/7ww8EHA8yAmj8oJRADDS2cMF7KKiOu+upL+dPAA/aNp8AFIaBLeI4Ya6vCARUMPZ4EEZQ+buGsF298TYpPwILQysUAAw2mI6S1ciU4YAIAgV0cAAqlr1xQ7seHL36cCXTgwQcNAFBBBDTQmFBJJYwwQ2gvUKDi7SSV2733qPM//v8ApNYJPmCCFXgABA+wX9euxh0NoGACCITBAnw3Hv2Z7nQfG1wAN8jBhmSLHyO4AGgyUIEPwKABIuia1iRQAwCEJk8I2I72YLA//4HvIFjyx436t0AsKQdHDMRgB/+HOC0N1KgGBaDAaf6DAg9kQGFB9F4/AkCAF7hgSRizoAajiEMFzMABEfiACk5AuRvuCgMq+AALKDCDBLRtgUSMo6lmwIF/kIAAnaNcZ0ywgWxFkV/5WMALsoM/ctHwgkIs5LgksIEJvEAoE4DBCPzRNoH4owYwCAEDuBeCDaiIeHIMpakoIwIKPGAE+JvBqxLAQxVigAE0qGMWuYdIQG5RICRQAQg+sIAEQKACHkABBiqZDwiwYAUpCEACZGBKC1QMlKKM5qiuwztKuq0DD+hAtrCEMSAOpB8zWIGLggg8/twSfwbBAIEmiJ8EOAAEDQCiylyXGzfip1cMiJ4ipcn/T0YVC3AuUAkGKhACD4wgMBLIgAysWQKZQUACEsDADAoAz2SBDKIj+BIAJBAAjxUydfjRwAA+gChtAWAFFIhVIRGwgSTxhqUr8BwX+0lTOIlNAB4IAQ0iwAADViCG3HFBJ92YD+a8AAYR4OkEWqACe+7KAH5jwAAGUICkHvSPqfNHBQ7QAYUVtQU08FYhSxCBENSAIK6D3X3OWdO29uhHGiDABT5wAQvUIIUlMYADXIBXBQDgPGq8gAAMgFdtGYACLGDBBxab2BpQ8I12tMAAeichCMDAWRkkCQRo8IIFEKQfGRgABXRlRreaVkclEUECSpAAlpyOO5e7mD9EoIDV/16OVRLiRwkkwNoErLYElPoo+ESAzXiiLB8SaKIBMpuPV7bgWRIawQAuUNJ9nva6xqOKBZxTxu1JjrkQYABmozvd6pYWu+g1HMhUMAABlNG5MuBhub77zRkMYAOkTWR69ws2Mh3rZFiqAYrCE8UEXAAEGSDIrUxQAa/ql78QtlkgVwADAv+DH64SwFovRoIUPEAACpuvAShz3gib+F8GBsFPuWMABkjOUb5lFa8IVAMdiqACD/gAUUF64h7/a1stCEFtBEAoAeC1mBToynZuHIIWCKABQXqBYyvp4yq3RogP/mhmtRxEi/hjAbQzAQgY4AIFXEwGHsgniRXgAgaAwP8EIbjAAqzJYyvbWTOOUkCHZqAhKL6xaDLIQANmEIMjw9gAM3CBBjgARc4ogAOSkgz+NAcBSl7NHxI4gQwwoIAysvXOoC7ReBKQAkIN4Fh+FC5JJKACQnlgAgUQwCfldNicvpoGfJ1KD0PNa9dgrAQVcECdBuCAVLcNARZYAQ0qID8KeIAAO5YBDZwEgBFUIHIAaHSvt22lyjnGHxlYQbGZe7ETFSBhWArAB0IwApL4KYEBwJE/RtCCFHH73m2i8gjEnWqEuOAf49ZWA0zgzH+UgAX/yPZARGC9duP74TyC7D9GAIKAf+9iW+2ArkA7NRV5d7kD8YcFeFcziJtcQRL/p7jFW+kC0apUqw9ogeASQEAlDQQBFOBqu07Oc/fk8IfgU3m/P9orDwAAAY/T0gEmEAPusLdx/PAHAgDggQP0sedYdw8CRuCCrmcg3hgU+vAuJgIVGJAAFVDBBV4wAKaT5EIUFkAFOoAbqw8963j3zbIC9GoYGBFjYn8fSRLQABgIiAYEEMAL9jcnGS21NBcYgMbzTvngiGABGch8DWgG+Irf/eL8gIABRhCDEtSg3uEpCQJkUAMDcEABf6Js5Wd/5T+Ph+IbqK7gvadVEKhgLjdEAO1GTPvir8b2JKnBCnJ/3JbJ02pYknZnC8cPF0zAASo1vvZZM54SxEAG26WB/wZigIFn9mOgNThZPzggsxPEwAVNbMAz82Ek0ndIBWle1fb3r5rx8KMCL/ACIHAAbfcCH7Aq3DFwF0BUaRE5BdACE0ADDQBU+bMBi9csBpgw/LeBeFYSNdABFhCCIdgBDQB2/XACKjACAFYCAJACFiAALnACAOZuMXA2cqcBe8KBOogZjsIPPviDUUdi/VAj3ZQ5RJg6ypE5OgRHO9iETviEUBiFUjiFVFiFVniFWJiFWriFvFYSQKdCN+IPS8hc3xOGOoRbH2OGXMOFbDgT/WB6dGMBZeY9VPF+KiAAT7ZQf/RNHOACdygAKTADQkNi/FCDKYCH9ZIcbbiIcQJ7E/9gAqdmTitDAjPAAE0mXivAAChEPLfiSLD0KShgRMoSABSwVHciIBTAToy4ihxBfzMAgwKQicO0MggwAxZAaBgQAyowASsQKjzmD4i2ACmhiyGgKoEhAjBDfhhgAASQG2LCitCIEUNISRgwGlWzMo9CAnwiAgLwAMVWZypDHAwGHYlkWS8wOdGYjhnxSt2SZdoyAoyjHaWFMbDhHPdhEMAjZSWnjvzYECTBjrPIhK7zKmZmRu7mWxCgFvoiKkWzWmh0N/fTjxLpEP94JwEpPQHAAklCZ1gWYBRgQks1A/d4YTOgRpYIA+g4kSq5EBXZjv6jLAogAHF2PzdUEiPwAXb/4gEsABgFUX0R0DMvoGQrOZTS01wWeUv0VwGRhID7xGElEAAn4AJZ0TQfIwIcJQPr01RMSJQqCZBblJQegJIciXzuZnZG1kMJQAATMAPaxpVD6ZVYhgDAJJZkSTQGEAIuAkf5AABmontuSZTVGEuDA5YRMEF1STT7lpcIUSwPQAB++Zf86ChGKZi4lZSRZJhEI3UA9jhLiCVpmU14xQ/a6CgYEAEVBZkquWoaoAGxCAIpoAEZwDYX1gArIFqkFzEGYDKEswAOkAFzcYIqoALVNgINQDs0wE78UAM3WAMawDcotRuoOZFYAgAvsAImcACYYkC2kwA5NwC+EoAeUD95YUMSLXd1J0gBLxCer9YCGzBnAsEPDqSeORVrpxGd0jlFANB1XZdoAHA//CADiZZo+ukCd0U4EjADPMkdAWAA9gIAGiADnUZiJLCMGTAD1XYCKWSf0ck/L/kxhKMQHSoRAQEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUEAP8ALMYAZAD6AJ8Bh01CDWFTFMbGxrGVLHV1drueLn5qHGlpasWlM1lKEtjY2d7e3J6enPLIQ1ZWV0c7DPr6+GJiZCQkJNDQ0O7Ta+/v77KytGpYFTIqCm5ubPLOQhISFJiYlzs7PERERJqCJKaLLIuLjK2trNy6OjY2NLy8vNayNxYOBKqTMJF6I+Li5BwcHExMTHRgGpd+JJKSlIKChM6tNK6iZIlzHyQcBOzHPxkUBD0xDBYWFOK6OywsLKOJJJqOVPbqtN7OkKKipH5+fNKyNl5eXOrCPoJ6TIaGhKCCJIZuHa6NLKampDIyNPrwzLa2tOfHTWJOFCoiB7eWLYJyHPbihOK+PFJCEObm5Orq7D42DAoGBMWqNHJaHObCPebGP+a+PCYmDNa3O9y8TEpGLBIOBA4KBAYCBOa/NAIGBKaqxISciNCcEHhEKGpEcMR4LNrAKMq+PKyqlDxwOOTw7KS+tJBsSBQQYK7StChg0IySsJB0DKqETAoOCDxgiOjo3DJQFCTAgLa+nA4MGJBISIaKmDwuaAQEGHKi4MiirOiuPMiWMJScfOj65M4wgFh0YNrUyMrezPi6FNr25BooKPbU4JKQDNicPPj62BQwaMTSzK6uOOro0IiyJHJaQGpoGHpacNjAwFp0GK7KLCggwMzOfHpmCEgmFOK6EHh4YOy8fOjAKMqwTK6kDHZcyDQiNF5EQAoYNMq2EPCcLFhcgDBQTObk+DQ0IAowKJKYMKpsLOqirLZ4dDxIQEg4IAQMEHR4iMjArM54yOrUsOLo5PDk7MjG2MicTH6QJIS0hLaaTDwuPNrGRJBgHHLigPLQkMrY8FhWaFqihM7uLMry0Mi6zK7uyLrEuF6iJODU9EguPLa+0HSSmERYQB44HK7I7BRgUJBsdDAuROh4SK7adB4wRMrQLOD0fAYYFObU1NzULM7SwB4YNKyYwAoIMKqEDMrYrPDA0ERQZFhcPDwOQLii6LC+uNDA8PLUFHYcgAICDAoKDAoKBAYGBAICBA4OBAYGDA4ODAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnDhxH76LGDHqo8ixo8ePIEOKHNlRXwcOKFG+eBEiRId9JGPKnEmzps2G+hyo2MmzAgQrBGDeHEq0qNGjBff560BCiRIdSoBUmNABqdWrWLNW3McVJj4YFV7400q2rNmrXdOuEKCCxdm3cOOSTMtVXwQrAlYIlcu3r1+FdPdtEFElA9e/iBP7DaykygQdXRVLnqw18NcKHPBFpsy5M1G6Kya0Teu5tOmYaXNCyEv6tOvXFNNuSAJUX2vYuHMD7trBMeTbuoML/8cVHwEISTRvHs48N1cdAhQ4oNu8Ou59OAgQ8Afcuvfv4MOL/x9Pvrz58+jTq1/Pvr379/DhW/RHv76/jQnT4qOPz/Ze4vrsJ1B/8b2HXRElJKigBSz8l5RFHRQhgggcCLHBXiYBwYEI/yRRBAvcFcjePjpMsBoTCzaYHz5CTGDFBGxVEQIOA+ETggoKCCCAaCrMKOJ6+0gQnRIb4GDkCvghtA8JbAGhgwQOMAEUTPvoQ4IDJEiwgg5CCFDFAT+qF+SOynVXkI2YlenBVCsQ11qVEUAggoNhjjemAkqssMIG/imEQwkqTNfVn4FS1xU+EfzzA511hjdmBSJYYAEHEUCWnw4KKPDSoSFUAIR/xempgwMlSMdoo98F+cMETFggwAIVlP/gwan7eKDCY6kRgBmo+rAgwo5WLECAbaiah48SEtiGjwciVMCEBAj1WoUAOrhZ5QEQMMArC67uKIIHxBZr52bPeZnoQfp44NtAVWawq7Vd+cPtBNOJK15g8b5QAQyM7tMBjkqwi49UIeCH75oCjGUvePgW1ykMSRK0z1ptsesPA186eNjEEygA7cKpNryCCFMi5A9tQQlEYqYkqPwfV0rc2ibI4PmzQn/66LMCAY69JJA+0FJpVxUldJBzdnLiQxw+OPiDMz4rFAFBCUrTbJ1SBwjAAQEZwFBCBQoMOxCTImzg5gYc4AUDAUlUIIAHKneQBBMhcB2CAFO5ZbV3OSn/oEIVVlSxgAgOVC2QunlZKwEME/wj+LdCBcmB31YEPgEHRu/tnUVKeODA5x3o9Z8/DoDrskVXlq5XQSt0wMLnLCihsOa012777bjnrvvuvPfu+++vnRoSncID/1fDKuNT5AaaGdRwlf4Y2TSoxdGHA5LGc4a8SRx0rLXp7IpMgAUdTwB5XSRI6KICLWcv2fO9TuD2DyUEWy+8dJFQ/wRJMMDEBMPiCg5CUIWdQGABcHPf+/A1MivAAAf6wIG7Eve8FVhgARlAkj5sxp26KIEFUCnBAqqiwPedLidE+9g/cEAYIcDvAFYIQZme94/BaKqE2nPYvjCEreQ0jIWawocE/zqgBAgGRiAbYMIIcUiZeCXhSxLzAAQm0MHASEBwQribAswXgSoepoYiJCETFxOYDVggUBLTwRQvhC81rqYELyhC24AyQySGcYx+AZoEJPAk7pgRjQSRwBRxcEQSQWA1s1KKriaQyC9u4I54lAuJfqAjARBOMCIApMrUOAE2fpE4apySEysQFLo8comRjAuJOKCgJDhggz+AIrukSMXbkKgCo+FUWMoIyVSS0SIP4yFy6qgyiglKhyHg5Q19+cucWKEEH8MOYSJAvcj542GgsqAoZWMB9jFTlfiSAGG2w7QDuK1NXPGHDnQAKnUpwANO2xleSKAfIYqQBRcJ1zcrg/+vnLioBKycigMG0jdqHYoAlRMBA/A2ARd2RQc/+EGzpsaBHwghYvtES8PSxYBMTeAH4PtH/Erwm+I4QAQKWMAEGACutNiKJzxRQBFml1GNPs8fe5RAB9kFNNFFRjA5DZG19LHOJ+1xnYSsqVKX+hmmOvWpUI2qVKdqryPijyHk+qktzUTVmiDPZlDpI63oIkSnSGCGXdFZWLHX1Zts9CQTqEIFwIY5jOIvSCFQwFwVEIKScmUDGcBbBZ7ZxbbapGHXVIEA6haCr71TY2mRgAVWAwOvWYEJJb0mBBTAAAJMrgqfMixNqAOgDnSgeUF6gZwiRhd9ZAACTBCdBFDmnzX/vXMjJolOAkUrk09eNW5TnF1gCDXQuuiEpCJ9rQiShI8i7JC3QyEtcZQQ3PClhZPo5MoKSlCFgQ6sAskkKLZEQCPoHvY2xlnt6dLSgSluJF4/AMo/0ruoGh2HWuYdyfM2Ez8F6M1QtVoNldKpL371arNKyBnQRDBFMebXI0pxnQdY4AGfCsUkgAKC0goZ4BIMWCkF1i7JSkAABxwgkw1+MEj8xQSeCICaX7QSE6pQBLPdtStSFED4/MEBUnZFCbEMlvl+sAABBEzFEF5BBopQWQLQ88Id4G4RyntjrpBgis0jzmAyNigWZKDEUFJAbJE8PNJiWAVTlljDJIAjS4FS/7fwIohxwGtXMkvEqjJWgYZJk5q08Lg2xUFoEgjZMB2USm925shbv/YDHexneZGDktHqwoIqSOd6DojOMYFGghVcj1kqgAFNE31nfF3skAJggEQnFIKPpbcEPvUHEP6WoAUsAAae3AAQcmQBQPF1ZqSODb6au8UJdCxTCrCApXJSghB40iIOWKgAfuAAL3K0BAIAKD6DDeGNBiZnEgsXuar5Sely+9zoTre6183udrv73fCOt7znTe962/ve+M63vvfN7377+98AD7jAB07wghv84AhPuMIXzvCGO/zhEI+4xCdO8Ypb/OIYz7jGN87xjnv84yAPuchHTvKSm/zkKP9PucpXzvKWu/zlMI+5zGdO85rb/OY4z7nOd87znvv850APutCHTvSiG/3oSE+60pfO9KY7/elQj7rUp071qlv96ljPuta3zvWue/3rYA+72MdO9rKb/exoT7va1872trv97XCPu9znTve62/3ueM+73vfO9777/e+AD7zgB0/4whv+8IhPvOKZWDyRk4EfNjhBPqhkcn08YAcxQMAMntD4jZPhClCoQQM0MIUU0ADkpidIPmYwhCFMYQs1CMIFQG6C2Q/EBijQQBemMIUh1OAIIK/BDFSWjwcgoAZdyEEOajAFA3Rc+VtofpVo0IIBZGEENcj+FFBwg44PoQtdKMD/A8YAgB1ongoXKIDrB/CAOlP8Aghw/QiyYIAWFAAKARCDSB9wgQBgwP0UV3wt0AIgMHomcAQYQCsWFwQJEG40wHpDMABP8HHZhwAYwC5ikAAgUAA7gAQFEADv1XEggAAucAM5gwEpgAAg0H42MAMFAAAehwX/AAADUAAJcAEDgAAtYAMq8wQ7UAAbB4T4YXlBwHwDcAMv030goHEJKBBkYAMtEAQj8AU7cIEghwAiRRxjcHkFYAA3cAEgsIIgNwIDEAA2QAMGgABQkAD6Z3kD8HFU8AQA4AIIUAAFEAMzQAOdR3HO5wJukg8JEAMaUAMj4AJPAIAZZwNcwQ8JYAQD/2AAAeCCIHABVrhxGqABI9ACT2ADN3AEHPgAMsiIKeBxZJgCOwAFA5CDW8B+fSIQo8hxM/B/GLADXBB7QVAAM5AAN4ABlZhxRCAQKOAF+8CIKDAAuegEMzAAI7AFMbBxUkABTbAFYMADYfAAKXCLAJAPA/EARsBxMbAFW9AE0Jh8OVAGU/CIF5AAAZCCG7cPNmAABYACMuADFDAF4kgB0DgCUzACAzEEG+eJBsCLKfAFYOADS7AEPUABXLAFHQcAKRADhDgASIAAJjACFtkETfAPW9AFHPcF/xADI2ACvKePXVCHylcGZeCPHJcABgCSRnABO2CRMRADQWACNbAFFv9pAkiwcTSAAR9AkwPwAQWgj/q4kPzYcV1oACBQA64He6LneibwAQnwAKfXcRDZelPQBf7IfCAQAA/AgyDXBdEnlsgHBQZge6i3hGIJjjGwAwE5ciPYBdk3AgWQAglwAiPnejkwBA0wBQhwBA8Qcm+okd8He0OgeVcwcvu4kWQZAyDQfSE3AghAkTlAlnQJAIiIcV1gBGmojxuJABegiCJHkXY4BUwZAyWYmRU3fP/gh/voezVgg3gpctWHfQu5kQOAAWbgJiFnkyZgk1tQAJpIciMgllMghUPwmDIociYwEK+3BUGQAr3ocQjQACPgfDF5k2UIliDnAjFpBA75D9n/t4Jk8HGCiABOYAPplwIpMAK6BwIT+HFDOQIB8A8YUAC3uHyFyJ0fZ4o0YAMu4HtyGQRRMHI3AAItQH1OuQWg+XFvGJj5AI8tYAAm0ADa2YD9uQNjYJ9QIJk2iXzst4cSZwNd6I4pMATR53tT8JgiKnHG95UGsIxZ+X3/AJkfNwbwyJKmSZMoCnIFYIU0cH3KSIggWQPs53HJqIhj0AIN8AUgEAT7qJUIgKEcxw8u8AH8sAEXMJMtkH4zGgNoaXE78IoCQQM7sIIzYJMfcAMzsIz/wI/8kHEgkAJCQQbGR4YDAAXn6AJfkH0dtwMfQCUncARcEAMt8AAGwAW8x5Ae//cBgaoP1CeFF6APGDAAU9CMHycGpmgDDzCCOVAAN9CCOfAPOwByVuqkCLADCeACIKAFLnCcIbcPGHB8AxCa/PABNUCTNQBy+4AFKJirJiirBTAFfniUHfeE6QeSBaAPZEADIDACrOlx+1B8H4AAdpmkN1CqvAqp9red+hCIKbCEvEp+I1iCXJEPLVADORCtHFclTwCPZTibSUKm7cqIG4iAexGY24oBaYgCCbChAsEPATCY0pqBOWgAVSkQ7+hxY2ADkYcBM+CY7UcQGOCHYziTOsifMxhyW2ChXWCo5Ylyu9cF11kQzkdylVkD2qpyJAt8i/eyMAtxWNhy9Kpyzm0qrisHgy2nrys3nTH7s2tXsyiXjEJ7cjHacglgrCmHoyznhi6Hs0AbtVI7tVRbtVZ7tVibtVq7tVzbtV77tWAbtmI7tmRbtmZ7tmibtmq7tmzbtm6LGC3wcg/QohxXqTuQsCj3rZjKci67bwEBACH5BAUDAP8ALL8DYgArAFkAh2hoaerq7EJCRGJiZDo6PMrKzNLS1MbGxEZGRISEhKampBoaHC4uLEtLTCYmJPLy9JqanF5eXFZWVDIyNPb29OLi5H5+fO7u7FJSVJ6enG5ubKKipD4+PBYWFI6OjCoqLLq6vCIiJJKSlDY2NObm5Nra3KqqrNbW1K6urM7OzJaWlP39/Hp6fL6+vFpaXHJydLKytHZ2dLa2tN7e3MLCxIqKjLLAzMC80GZY1Ew8ZLLGsJimeEpQONK87NLUsEpacM7e1BgaCDxIMFhOdODq0A4KCHiIjMLQ0GZYGJjEnOLc+FpklHxUgMLM8GZ2QBAQYH6IcGhOZBweFFxOWOD45KCkkFw8UOz67FB2WDIubNDy5DgyKGyQhDRQZNjyuNjeyKSosHaQsHaQWODq+OqktH6YhMjyiBYQFFSghCJgiL6k6OJ8pGagJGbggKSgsAwEELK6uDIuEPTqjCYmSDxYQBwQHOLM4FpgdPTc2Mq8xDRwOJigxLLY0PTyxNbSyJCOeDg2TAQMEJCCiKCIiBAwQJhwmAgIMJiy6NjQ3KZ84NLe9EpMWJiIsOTs5GBaPDAuOK6smFRaYDA8MKSYmIyOmGpacFZmWLSsxFBCTKCYsLLA7JCmoOLY5BgQCHxwmEpUUCY4PBQQIGZ2WJCarIKImIJ2ZHZ2YLqspHxkRH6EhCIgwIKMhL6kfIJyfCxQQFQ8jMQygEpiSHY8bNbI1HawhGJ2cMC4vGag4MR8JMDAtBgGIMLI2Jh8XEAyMGYYgJSclMzUzLZ8hDIOQCLAgCJg0HxmcNjg3EA8KExCOAgYIF5aUMjMhMzS4AIGCNzq5Pj64BgWNLScsKy6sDgkPEBMSAoOCGpmeMLc4MTKNMa8qAwMGLjy1Gx6cBJgONy8vLa6qDRQFNLO9JiyrOLczAYCCFROYMqksAQEGJjotBQcHEw2RJSOiGpgVHZygPTEtDhISIyalPro7BoeIAICBAoKDAoKBA4OBA4ODAICDB4eJBIOFAYGDB4eHAYGBBISFAAAAAj/AP8JHEiw4Lx5BRMqXMiwYL8GCiJKjKiiQb+GGBn2k9CCRouPIAxQqAAAYcaTBOf5GzFiwoQPBBKQOCAApc2BB3PO61CDhAd/N23qzDkBxAkJJoNmHDqPHoAZKD4oRcmU34YZL/olncqQKYIUBzjk5NpwqD+ZIjqMJbtw6IQWBpCuZZtQZ78BFWA40ElXoc4QCrDS49u37sEGJWgQGFrY8NkLKtQSboxz3ogWJ1xonUsZZwcJGPxN7pyy3+aDpFOrXs26tevXsGPLnk27tu3buHPr3s27t+/fwIMLH068eNt7DiYwWHCR6VaB/ToweHkPofOE/QjUAHGAxgYX1a8L/5x3T8IGGgdAeCBwmjFODjJKgICgwICBF+GZCrwHwEAJEyrEh4IAzm21gAgViDBBB/xEUABNBf4zDwc0nKABPx180BMEC0QooQAGKGZSByKQYAE9leV0jwUkqACUhBwcUEADJrl3FwUKvPjPXSUoEEKKB1lFQkkDkVhBDCiOtxY9FlDgwUXjCdAdAUDO8wEMJ2BAUD8aQHYPkAL540EALEAp4QQypNBASkS1kAICKUVwQY5V/tOBCiTE0B4DMBiwJpAj0FAAnDi5MGcHVao0pgXtFaUmmwe99WacD9CppE73JEDBP4PlFCMNHFR55VEE0fOCl2AKBAAFG4h2UD8R/P/nQJVWkUTQnUcmKeFa84BIwwhFelDBiVXewwIJECAq0AgyIlDjZAvgCcEE/vDT36AqckBApxO2UMIL/PjDgAfIdigejDJU0AIEJpxggAb5EQCCAiOYxJ9/KEAAwgwyEOghdNq5ecAGEuRnmQwmTFDjPRhscEAKLdTA3r84+ZPccs3l1I8DIZj56gLTOeDqucaVbPLJKF/XDwMSRIABA7omepA/HLgQQQP8PLcrU/4MIIO7BsgAgI4FWpnAAQackEIGCMRcID0D2OfBCx4U8G6SBfKjQgkyWMCCAifAEKrM80wAQwka+EOPPxEErXCBsPLLwT33fCDsT5cuCUAFG/D/MxA/GVTwAreoSQj4DBpAOaGMNe1c+D9i5plUPy88sIG5j/9DwINiDbRABgFokHdSC0BQwQBJzSPBBTCEMFoDJMiw10D+1LBCAqP/vUEJLmzVQAAgfDAaBhSg0OFAma7gwcwCYOA8AjlbxbvvwAvPmQQUmHD8fgmsIMJBHGB5wgktAECPVTNEQL0M1meOwQMo5Ix89zVI2EEDLkjgAgZ73Tlk6hF4QOtGg4AZgIABSSERBSwwHtM4ECGZCkACdJWpC4hgZI9jAAjUlBQHoCB9ddoRBlIAAmBJiAEyiEt7anQWn3xpRy4oAQykMrqcaI0EGxDABwSAp77tKgQDKFhOX6R0ghgoJwIgQJvHnJMdBZQAYgU4gQIEYKYIpAAFbyPPALzlEQOkIAF+g9RQVvYC+kDgBQzQCnQ44AEL5ExFCEjABhQgAhdsT4xjaQqDOtApnETHHyvshz8WgCE1CiQgACH5BAUDAP8ALPcFWQA+AFgAh+bm5D4+PKqqrLKytEZGRNLS1FJSVJqanDo6PNbW1DIyNKampIqKjMLCxOLi5EpKTM7OzDY2NIKChMrKzFZWVMbGxLq6vNra3CYmJBISFGdnZ3Z2dEJCRCIiJJKSlC4uLPb29JaWlE5OTF5eXOrq7O7u7KKipGJiZLa2tFpaXL6+vN7e3I6OjK6urA4ODBoaHHJydIaGhHp6fPz8/G5ubJ6enB4eHH5+fPLy9LicwAwMGPTc2LC6xGqiJEBEUDA8MLZ8mBwcFJKOeDA4SO7s2MKk7FxmQDIqRGxieOL45DxIMDQqLMje1F48UAQEGMTI2JKCiFxklPrs6JzmtGxQaDw4KExmTPj63MbKNF5OWGh2RJKsqGxaQDIeRCxQQMYygLzy1AwEELbAqMrMhMryiHSKiDRQFCgeDHy0hHo8bFY8jPTEtBJgOCJgiMKkfPTywISKeKKevLKotJyw5G58bAQMEF5UPBgWNAIGCHyciBAwQPTqjGrigNTUsBgGIBwQHGhY1AYCCM7O8CweKOLY5CYwLLiknGqi4NTe3GhmWHZ8eJSOiJ6utMrSyJ6ikKh84JKgmAgIMFBCTCQeMIBkRG5yYCYsRNjK0Lq60AoOCJKYoFqihExCONbeyMS82JykeOLczKKulJzAmDhISFJOYMq8rICOkBwWHOLc+NTy5LKszCooJMTS0M7e9EpaVDxYQEw8ZBQcHHBmYFB2WJqIsLbGxM6ktDRwOKh8XIBWgDIyEIBqdNS88IB4ZIBynCY6OOJ8pJKcuCTAgIZ4gDIsaCJg0HySsBQWIOLq0FhmXJ6YkGBoXJqipL6swBAQYM7I2GgYgKianComMGxcYOz66FRaSGJ2bM68zHp8iKCIiDIMQDoeHDRQZEhOYLbY0JpwmN68wE5UOMZ8JNryuD4qOOyktAgYIGhYGEA0QHySXHp0iF5aUIiOmOLM4LbA7Ew2REBMSCIgwBgQCBgaCFRadCoqNAICBAICDAYGBAYGDBYWFBIWFAoKDCoqLAoKBAAAAAj/AP8JHEiQoD19ATgoVBggAAJ99gpKnEixokWDKVRM2FihQoMKKlLgu0iyZEl7HFgcCBHCgwcLJQCMiGiypk2C+PTpy8DTxg0HAz7cHGrTntGjLw44gMGPqFOSR48SqDCBw9OrFKMadSHBQYgXNLGK/afV3gcUF0QaHStWK74TFwb0O8oWq1YbNRzIcEG37lOtDyZU4BDV71+6Xb8WNuz0aAQLCdT2ZTzU6FugcxdTvmm0gwkAN/hq3lw0woIFCLSSduoCgQJ+o1fLnk27tu3buHPr3s3bRQAKEWCXpWkPnw0FBAw8gDh8Lb8IBhAIj30RHwULK2pgaB4RH4EQFiBc/4BAgbvRBwMuHNiu2qS9CCZWgEDxwTy/EwMGtLhQ4oR5DAcAAIJcZdX0ggQDLJAAgdzh0wEC/fRzAAkzNeeCDCjs10Jm1FHEjwYoyHBCA0GtJZBqa+HDQH9hDVTcdTekUMGGEXUokT0BDFDDBxyQWB93BPHDAosnRqXAAgJ8gEADGxZp4kU2eICCCPgEUEGJzRUkJJFa6SOBCiNUOWM/TrY4kT380NDABhngeOWPw2k5pH9kHYVPRgzo8w8HEzRZ55NZEYDCAZlZyWCWA21JZ1SmmaCAUVa2gEGZFtljwwEWPIAPpFdyGGeic7aoTwwhbYojk3P9aaZED0BQAAsbbP8ggwkOXBDCCR0QN5qiLSpggQMHxLpBDSskEIIG/Zi6qkGCJuCssw6AAMIKAxBWZotbzkTQByY8m0AB0YLgwJTKXqQPARQYoC4FDKxQgAwcZHAiPuXWyQ8DAKTQl2/pqiuCBOPdQABzgJ5Z1lQoPHoUBjBoehQ/HSi5QAkSfPDBC8oqi9KVCiN6kT0ETEBfVCcAUANYxUUg5a8zJIDCPxsQrBWfWHpc6QcM3GBDVAQsAEOb/+CjAAMttCDA0S0M8HOc/TAgw85AloTPC/pozA8GEM1rQz8YdNABBhj0I/OTU1dtHm9op6322my37TalnLV3Ldzu8aMABRrAQEMKCPD/9fHDBGyggQ0D4YMADDIkLkOsMsDwaE34cLBAARBUXoAKMmxXUVSRWzDDBQ/Q9KEKlFc+wQUgkKDBSCYJLesJBlAggworSKBnVkf1w0IFAKxgAHEdPCDCAw8QIMKEVS1bET780GsUPyO4LBTu9riwQQsxYCfCWsO5EAMJLNxO1FFWqhAB9fgYMMANChywwvY1FmZPPy04oK/ylfLDEwY3XGBCB9RTgAkEEIEMhMB33CsMPjQQl8xwRh8nYEEIBAABC1AJd15SwQn4oQ/3/S5+dMFLCWQgHM5ggAUTAFcCbiC+idxJBSzYmT6IBT9VGWUqECDAZGpiN+FpYAEtSEFT/ygiQBOkxh4ZOMAFaqgVrngFZfj7Gz8M0AAVWEsi/JABBGTwAhdcbQErGIELWBeVs6xAMlGslFEywIIVMGUiGGgBCSzgARawoAYXwAEddVgkfESvZmnc3FYY4IDQTKQDpGqAIlVQARJ8LiQtwosDNiCaglWKXk5CAGTC5MIHIeCTERCBBUDzEBCGbDA28lAKZJCCfwSAABqonwlSZbCozHCJYdlKVw4AxUASxAU3KEABOnI6CIQgAJuiHlJo9TsX2cNXYaxXTeY3AhkwgAUM2IAIbGAqZT4vBRIgkzM7sAEYQM2XN2JeU/jRvB3eSCv8GKNBtjKdt9nznvjMpz73mf/OkbAzmWyxzBAtCZUXrDIGDHCaCLJ2FctEgAYMEIgBGDrNB6ggASCpQAEmwILH/cUFKUDLMBMAAQlobpr90EAKOBCBAJygBf/I01Xw8YAKrIABD/jNAi5wA6C5pzhhwYcIJmC+q2SAASQIwc7IwgGQBACdWQmAIhFwFRssAAcaCMsLQjDJIdpkah3oBwc8cAEPvOAqHRBACSq0xhWFoIUm6UcMBDCA06EGqhN5QQ1wcIPp9EMAIDjZUJo2gPBM4AZgucqHVjCBE2DABh+QwQVmYALC3cQ4/VCACFgwABrI6y8dYMB4BFADJE0ABCY4a9wsgwAUFIAARInoiTBAgwWQqMACBxgBvg4AV6g0MQQgyKpdXICBCESgA/qQQe1cUJSyJHEGwvUL/SZwwdZlYIxHcQEFKrDEhqJpiFMDmFmh+gINMAAGI6DACRhgUwaotjEuOIEETvAPDRygAANAJlT1AQPSWa4AFthArhqqDwYIs3IV8IB+v/oCDqTgBBqAnQJE01B89EO9GhgBB1AmkIAAACH5BAUEAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACz9AGQAwwBjAYdeXlzQsDZGOwyukSuCgoGQkJBCQkR0dHTZtTmOeiPu03Pa2tyKioyWlpSIch5YSRKxsbEzKQpyXxloaGjU1NRPQQ4sJAbe3tz0z0ROTkwbGxxHR0Y5LwxoWBWxlyyihinzyUPJqDNubmyampy3mS3MzMx6enyliyokJCRhUBSagiTExMTWxITcujuoqKf09PR8aRu6urwzMzRYWFjm5uQuLizAoTEcFgXi4uSioqTsx0Dmwz2enp1iYmR6Yhni2qw+Pjy+vrwWEwQqKixSUlS6ni2UeiRAMgyahjwSEhQ6OjwnHgT8/PyOfkSCahwWFhT26rSqlizivTzlx0/y3ozqwj/6+ORCQjzSskzq6uzmvjzu7uwTDgSmkkQKBgQeIiTqvj+eiiTO0tQOCgTivkwGAgTmvjSipry2iEw4VhS4nLCWtCjqfEjMuBCeTEiSquSA5qg+PHAqxIAGDBhkZhh2RnBmbIS63nTk6vTMvjDivBDAcCySnGTOqLDW7iyeahzs5vTqsjyEHIDWMoAMCDCyqLASHBxEZohQXhS4nkzi2HzMmjAYMmhkQBi68MhsgEQGBBiYfgy4sDi6zqDMoEzyoCzCqOzQ2PTk5tCGgGzc+NCA4ijo4PicooRyRhgMMijexszuwnwGGBTQ6OTcoDzWxqzM0Cw+KmCcjri62MjUoBDWxvC4sMyglgxOYkyERigSBBBEeDjs7PiGYGDKiDiedHQWZlDCqHByXgjsqKwYEGBGVlguZtCkjpDcsihGEEBkehgkPBwkGDRmqqj4vBTQwswSDBiyxshqRkA2Vky6zvDowijawijAfHS4yizs4OSEYMhUVmgkCCDAxtj4tEDyxtBsZkDe1Cz42tBUKiACBgjAzMC4pgxOUDRqpiguIsAKDghUMmB0WnCCkriKlih2bFzk1vRycBjAiAz46ujaxkTWfMgMGDS8oiCCpqC4sJQEDAiSvozk9HyIbghkgHSGcngkMkQODgwCAgwGBgQCAgQKCgQKCgwODgQGBgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAj/tNHMV8+fhYp6pPIsaPHjyBDilxYUcMMEwwITJCRj+LIlzBjypxJMp8BF/9w5KQRBAC/jTSDCh1KlCGKIFtyzACSocALHBuKSp1KNaY+A0xW3BOo716OLCKqih1LtuHVFzHyTexaAGzZt3DL6qtB4UKGe/zuGfhXAkjcv4CL5utxYUsMHhBwUCACNLDjxyP1JRGxAMcFHDhy1FALubNnifdMrCAgQ0ONAxSCAGn8ubXrgvky0ICQROO9AlsK8HvNmzc/AlsIaNSXj0iWICh6K4e8QuA9BsGH/5vBc8jy64/ziXgBQUPLfBpGvP8YkQS7+b/6ZFDIkgOAgRkNXiw+Tx9uvhlBstCoTGPFhK31BTiWPigAQMCBPWwm4IIMNujggxBGKOGEFFZo4YUYZqjhhhoSd9E9eLXEmkcV5RXiiBx6dk8GJowAAQQ8TKCBSx9RpEEPDbgAQQMToIBiio7dhwMNFKxQAg1ZuCBDSOm5kMUFK6ywQBYQ7AWkZ/lswAMApaGQQQxbkAcSPwxwt5cGWr7gAmdXQsZPbRplecEFfn2URGIZDKSPEnzt1uZjww2nwQo45GlnYjMAlQ8QL6zA5p+ABaoREBRQwGdHG/FjAg0uAKEBCgY08I8IP0Iql6RPjJAbgCRq0FZ/hNL/QICpgEp3zwFblKBEqQ/pw08GEBS5AgUlmPAEr7TKdc8ERDKGbEP5AFCCkhbJoOoIfiYb6bILlJCBiM8qRKCUVk5UwwILXKptXF31gEMJGwTqUWwXBKEBQXfikOi6cS0LZbzSzUvEuzXoOSgOjPEr16gXUPCtRRnVWMMKWRywWz5J9CBfcgoPeF8W3PUwwwwA9AAAx5j+tgV7BxyQAw04MPBox1TdmkUW/yB5cxYUzFCjPk+IsAIN/2SBwwoiPEEzWQSOTDIAUJt878/5oBDVDBugMPPSjoXL9ddghy322GSXbfbZEHqNNkTE3ZPEE08kgdFa8ur54RMasDrcRf/A/y23pDSuzeQTB0BQwgJFjrDBT4BzhYLLhF5wAFe2zTBCEBR028AG4GokuEjpDQtBDjkEQYOsjEv6Tz4TVFbYCwXQTVwGJbxL+gorH6D65yFVnTdGScywwAUGAL4RgRvIgAIBsAeaTw0AAJGERRqYIF8NdfMe0z0u0ABAY/K6NFjz0kXMlQYx0NCD7NpHVhEKEFyQMOWeD8Q6+cYDlQQPYGXf/rxDMMAGZpCDzEyNfoFb3QTwl7/0HC4qAfufR5JQAArgYAtJGQJr/He/AjSQIklgngu2EkEJso0i98ARD1ywggLIYEQl7KDqhrMsxViphCY0C+D4Ia0VqEt2rJFhBP8pwsNKfQ+BOTzhDoFDADZxcIEerB9XllWpGThRikmcVw+SorcYQnGIVOzZBrGYRYYk8B/3AI5uEEijikxgCwwIHxVXYCiC4LCMCQFaDfDCD348gQgluMD3iKiEFwLFV348AOz6mDp/CRJiEMsfHhPCDxHkygUFyMEKXnABBsBJHxuQz9SARgAX5IACTLjACFxAgHvMDpUlIMABDnQggDVukgdJjyZdV4Ic3GU4QMBBDJQ2Efhh5piYicGM7lM7yyCTAk00Hi4R0pVPDQEFeTtenK4JPhTIYAjXBGcNkkNEFIBzCDVIpwxqcCxpTvOd8IynPOdJz3ra80Hgm0ivuGL/xns2JAkoqIEMlPeErRmEOE8Ywjq9gyJ+JKcGQziWP3NprSlhEAcQmAGrcpnC/KzMP7XR0/IW8IKcQYBzE4XNBnQkSxO4AAdZaCIlFZkZExTgSAwAEIEgwAQKFKCUWVjAvlJKublNpDiipGYNLjOD3aQnCAjLlAiy4qN/8GMCF4AAyoiaQH1ooFJbJUhxmBCEbPEDVwVoiQa+oruB3Ml7aounhy5igCysgJgGGZ8LGnMfGsTARyiIQaEIcpvoEFVP99jABERQwQWQCiH3YQIExLpAChRPA92bQGP2N55scXUI/KPBC2S10YLsiZMGgNhamTDY33CnoFY1QAnUhFeu//LDABOYAAEg4IJ4UfMeDciVCYjQAx4sgLVEmIgS1pMDkR0ATLQ9rB0rMoMihVVPBGrABYxGgR1lLiqr+5J+uuWiF/CgtCkNFJloIAKD2kaxBwBADTYQpSU5bgYHEMEGnmACOMb1nZI6a3DcG0HWhQm9bh2B+v47yYoECrM0mIBBgVjM+A3VjlYlgnWluygiYBNvMijTCq5rsNLADQg8wEED8JqPhT4BBTOQ0oVTehuexQACQVjAFlYw44Jc9QIlmOzwRlDVYrogNRAYGgUm4NmULqqURlqBCw7gozPqSQYNCEIJVsADjYLvHgBgYQliwACWSBe707XyQf2H3Qmf+f/NA8IUnOdM5zrb+c54zrOe98znPvv5z4AOtKAHTehCG/rQiE60ohfN6EY7+tGQjrSkJ03pSlv60pjOtKY3zelOe/rToA61qEdN6lKb+tSoTrWqV83qVrv61bCOtaxnTeta2/rWuM61rnfN6177+tfADrawh03sYhv72MhOtrKXzexmO/vZ0I62tKdN7Wpb+9rYzra2t83tbnv72+AOt7jHTe5ym/vc6E63utfN7na7+93wjre8503vetv73vjOt773ze9++/vfAA+4wAdO8IIb/OBwkUKnpaCDTOtgB1WQwgk4YGkJFEELOiCBANxMaAT8AwZ5PQIMbKACITj6BCD/SIgXJECCIzjaARCn5hE84ASOB/oBJ7DBBwZgA4SMwQcD4IAXLNCBQyfgHx8QgD6E4ADICoDkEvBACAqdAg/0vCFCSEEAMIAAIwiA0A8fgENwHgAE7OAENmf0CVIgAAeQ4AaAHsPXOVIBIeijDE/vgBf2AXc8DyAFN7jByElQoxuowAZOSIANAnDnCqggBDboedM/YgEJhAADLfCAAx5ggTxjoOH/6DxHOmAEDxTBBlIIgAD2wWB/pkAkMKgAB1iudPvRmQRJ90JMhHACFVjgBhyoAJ134IGY5IPvFSBBC3pv9asT9QQ7EEgASLB5DliA4iBxgANOQAIEfF4FEmD7/2EfEAXND8QDLYj+S8AvgZxzoPWTDEARKrCPtIOkDBagvskPuwMp0LwDDxAUMGABEZACCFAFCSABkzdRCqAAU9ACUtACQoEANkACIQACOmADHuABJ5BSUKAAO6B+QuEAHMABHxAAD1AG9vc/HncQU/APO6AFRRF0XCAAHtAB9beCvKNwECiDZSEFIUACJJB6KgAD4fdOLSCBjFcWAbADCPABQ5iBVjdNRQCBEAh6Y+FxT9gBH2ADKbAEHDB3eKQDOlAF/4AAKgAXVYAA3BcAEmByOqg9YlgWZJh6LcCGEvAAEUBpERcCTUiBRVB8EiSCnYFzLaADd0gCElABIPc/Lf/gfJ8BAwHwcC2QADeQD6L3OVJgAw4wh55xA5ZXBVUABiSQAhUgdp/jAEcAf2SBBC1gBmDQAkVwgZbGAlKAAB4gAQkQABJIaVZABVrAhh0weVM3aVagABEXAFgQAF1AaTwgBj8wBWSoA2TAApX2BVfQBIc4BTugAP/gjfwSfTqgBaj4IDfgAFWwA0k4BS+oMFJQBBKQiQ6iD0cwiS3ghx1zdkege/g0EVwAc5toBMWoLQHgexWiDxwwiQFgBB2zfxSiD/+Yji1oafogAAxnhph2AxcIBgNZafuAjsmIAE5QafRodi2gBVJQdJ74aEuQfmUnBWlYaTdwiAjghwHQkZH/dgMIoAO36AAqoHCUdgOT6IQV8AA4GWmTqAWq1wG8GJRJuXweEAYSUI5IOY5SEIwB6AApJ2mTyIZlJwBC8AGE6GhCMIkhAANDGH7Ut4CP1pUDUIU2cAIwkA9LkJOTmHpGQAIgMAARoA97KGkgsAM20HY64ABe8I9L+GgtkHJepwI6oAIC0AGQGGkpYHEkMItFwJaRJgEfQIIOoAMn0HeUNnFC4JgOQBwr+Wg3wJQtoAI3YIOVlnwJMHIn4AFBR2kq4JpLMAAZuIqTZgQnoHQRYAMg8AGsGGgpUAY3YAQBEABolw8c0IGQ5gNjcI6KaHEVIAAnUASUVgTJeQNWh3sbvDdpEjAG/wCeOhAC2Cdp1PkPXNABO0kC5mlpThAAA/CWoveXkZYAZ8kFFuABMCAENjiZjoaD/zAGDuCcHJiaj9YPEhCBMCCakyYEHRAC6qifkpYPEXB44BcCKVB/ZXCcf9YBXCAEJEByTrB6kyYAXqAPFkCcn1cED8CPkoagWlAFGLcDLTdpS3eBKMlwWlB0k6aRIPCjPCmkNWoENwpxVaBxlOaXJ8BwOyCjNCppLgoDJ5AALJppgSOirxEQACH5BAUEAP8ALM0DWQBoAlkAh6ampISEhHBwcBISFLKytL6+vGJiZMrKzM7OzOrq7KqqrJaWlPn5+E5OTFZWVGhoaJqanCoqLC4uLCIiJNra3D4+PHZ2dEJCRPLy9CYmJJKSlF5eXDIyNFJSVO7u7N7e3J6enNLS1DY2NKKipI6OjObm5K6urLq6vLa2tMbGxFpaXA4ODEZGRNbW1EpKTBoaHB4eHH5+fHp6fIqKjOLi5JCOeAYCCExAaJiWpNDY1CweKL60oFpmRDQqLCgeDOTm2Ew6SDIqRMLGyEpYSJameLrE1KqaqBgWNH6IcHBkYLigtPDgjAoOCDxYQD4qOOTw4LrMpNb25CxQQEpUYPDmwBQcHNbuuMbMwDRQZIyOmGxcRDIeRFB2XMK0uMDu1PDM2HZAdHaQWMruiHaQsDIMQFxAWJ7utNLk2FSghCJgiDIsaG56bGagJGbggFxmmKyyoH5yoGZa2Ky+rKCIiKzQuKZ84Ky4zNrUwMK4gBwcFDoeHH5qeJiIsLiwuFZQYNrEzLKg6DIyEAwEEHZ8eBAwQCIqLNzo2DA4SOJ8pGBSWPju3EA0QFJaUDg0SEpQQDo+OGhoWFZmXG5wYKzE7CYqRFBGSDhISIKMhLrIvOb67MR8JMrE8DxIMKiytH5mTBgGIIKImAgIMHiIjH5YiPD6xNDCzGJ2bMbC2MQygJawqKqgxGxkeH6YhBwQHCIgwBQWIDRQFGYagDRwOJbKmHawhMK+xDY8OCJg0NrQ1CQeMHp8iNbk9AQEGMTKNCY4OL601FRaYOjc6I6enCLAgGag4DA8MMjAxJikpPD67NDMwAwMGBAQYBwWHNDWwJaevBgQCEBMSDg8SJCCiMbWzAgYIPCqtLrUyGh2SH54aGyQhFRAlJa45Ojq+BJgODw4KIR4gNS0vGxUaNDMqEpkUJykkPr64JhwmGhgYNCgzLZ8hAIGCPjg6GZaHBgaCJh8XMLEvAQMEFRcdFpWQAoKDAYGBDo6PMLCzDo6NBIWFMLCxAICBAYGDAICDAoKBBYWFAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIEMe1EeSHgwO9ThMmEdSn8iXMGPKnEmzps2bOHPq3AmTpD8VEFAUODHiQQZ6LnkqXcq0qdOnUKNKnTpRnz8ZCBAoAAHgQAgNGZJSHUu2rNmzaNOqfekCwQEDGV5MUJGihAqxa/Pq3cu3r9+/HQ0kWOBPoL4BCxgIwAu4sePHkCNL3ungA4CwJCcAoHF3sufPoEOLHi0wwwgEC1SwcEAiBQkYpGPLnk279lJ6FTZ/QEAhAYF6jG0LH068uPGEuBeggDBDOYEHhXsGP069uvXrNDkAMOHA37wBDVC0/9hA72XLDA8WAFggQATS6djjy59PfyA9CyVmrGi5QkYJEC+YN08DJrQQglcUFAAdfPU16OCDpJkQwT+IeSDAeySp0AIBE4qkjwTizcCCCCzEcGADEKao4oqgraABAwHsR9I8+AEwgXkd+HZUZiAksBiLQAYpZF76PFBCPirAMMALHZyQgAzzmOcABgAM8A9JA5BQwo9Ddunll05FAAEFCIxAAgQpUACACAxuJEIKITwQwQsZbIACChWAqeeefM5EjwQyEJDPoATE4F5M86hQQAsnKHBCCCd0UF6flFZq6UUuRVDBBRVIMECbHOkzAQkhhADnByawMOmlrLbqqlNXFf9aQQT1WFCACReA+uquvPaqET0qqDlBSzCQQAMJK/iq7LLMVjTADAnEgCGwFKDQYbPYZqvtQP5o4IG0LdGzwQcnSLDtuej6Oo8MDCgwLEkvLOABBNGla++9lOpzAQIJKPBABw+AUEIIDuiK78EIPzhPByZ8QAMFH3xAgArJJmzxxVLps0IG9bjgQAUyImQVCw3s2BJj+rxQDwsddMCCBPsRdBitDTggwaqG0ZNBAwb03EAELGEs9NBK+fMAAPmE8IECHJCE0AACHNCCDJ+eXNBVJ+STQgr5ECBDBElZJYCgLXwwQ70yl0fP2gYT7fbbHU0QwAkAoJBACsBNR08HBXj/4AEJ/pzM2FUjBCCDDCQUQMFrLomqQQEKpOABgHBXbvlO80ggwgQdcM1mcPpwEBQKFMxQdUsF0fOCkvN810A+CEgq0Dwi1INeC/RervvuMp0sQgH55G2QPwE8J0MIJJzutEGCvzACDQJEeaXTbeXO+/XYd9RSPYMKT5C4J8QAgwEtJC848yXRM88FJ8SOs0AuhABBgNnXbz9F23e/vGH1mAAAB/R4wOKUBzp61OMBD4gBAUIQgBcEpwHyo9/9JkjBhJyMe8Hb3z9ekDgVrE2A5rOazDSGlRDQoARrmg4E51fBFrpwevnLoFjmYYATUO0fASwdAUeoD3qIwAAJBIAC/zZQsYKsUIIvTGL2BIdB7/1DBAQYgQRcEkAG7nAg59PHPOrRFQ8a5IhKDOP1BKePJgrOSCkIgAUsULwSpGAG3UEdFslIEgEMJnBGjKAY93g5MmLwcy1RAQJaQEhCeoABGKAABKaoQTqSxAAlGAEMGBM/6/Hxkm4T3O8KAEiXwKADDmhZAzYAgBKc4AEiYElJwkbHFczAAwu4UkGqhzZM2tJiJFlBBCJggKxsIAIwy9nJ5iGAFpzNd6hUGwccwAIOZGBCMTCQF6/kDwlIQAAUIIALJJCBoN3ym/gClgkIkAIMeCAFBDCB7GDoEmLSQAOBm1EAaBAAKyUKBQc4wTjTlP+PB3xKICsQwJ0QwIAjLUdV4ExouujhgO0o4KEK8F8DJmU1ejQAAgaQUQ9JaYAo6SMDAoCAELcSgwsU8R8reIAJTADRlS7gAu9TqEyXpbEMZGACOH1mBmLGzivNQ0mCW8EEeKpFf8DApi9YAVJk5g+b3tSpMPDmTKdK1apa9apYzapWt8rVrnr1q2ANq1jHStaqypGdoHOkWERoGLW2raxwPQ49/FEPgFnAAQQ8H4UcEIPDHW6NMoijXtHqyLgalj4viEE+KHBITrrVJRNYQAiy4pYDlIABEiLjHB972M5iZwIxGMEMnidDtf6Ddg1wgQtYcAED5AMDxxzsYzXo2dr/Ekd1L5hHBfTn1raebLfu0+xmC2vb4hJnefUAXj1Gctbv2VEBmBks+thq3OoKp4m+FW5BYFBKC6hSugUBr3XHKxvs9ja8EDxB3rQ7XdqS972TYWL3CNvcgawgABgIIXt5WF/4+vcx8g1edsXLgRNQwIOooy5/3/rfBqPFj/MNr3SLFMlJnnfBDs4wYCAs4PbixR8QSMADMGTa4TJYwyg2C/c4ydyz6gOCpb0wWlNM479s8nMymweGDDOPAPwtr3Q08YlrTOSlpOwCF7AAIQWAZAeSJAIy2EDVpicBAnygYO31cJG3fBZ6uPYAFGAAA8iEpPc4IJtgy5mRJNTitPaX/8twfgo9LjADEpCgziTQwAwq0JIIxMAAVspZA0gw0TZrOc6Ijgo9VsDo1jFaqSdbgVSvtOiYZjfLic60pjfN6U57+tOgDrWoR03qUpv61CARnPqWKmSn9bB1rB6h+hayX1TbWmQnm4AASGAAtDnSHw2IwQxmEAMHOBCLf3pAAP4RABUce8EKvrW0s+uPGNCAASgw14BbIoEAeGVrZCJB0wRiURNQwFSTFfdaZTxtaYfLAQTgFwG0Td/7lmBi9ajABghQgnpeCYofWEADKtABEPzjmPQVb7tRvb0RgMDbHOLhQCaggBJsIGwGoAEAYEMjjadZHyLApwvKo9laLxzVL/+YAQoc4IICzFshE2hYA8RSGUn+A8QWWtU8oLXsGQ/45NOexwMKQDXuvVzCWJonBC6wywtA4AAPiJI/RuCBEYcrBhiwOXUVDvRR66MCUZxiBdBJbxNzQLIHIAABvNJAlzwrAfBsyQtA0K4M+PzuXTc1DDRwAgcgZexHX3BK83GAgSJAfFTswCAFEJcICCAEmLX7bIecdzgTswACkBH32Ix0LTrABBBogDU7MIIDCCDQ1TYQ6EeggANEPuFcr3ynQX6CFBjAmhJ4QAiQ1M2RSMAECPA7uTVUAOBc6QUPGMEJTgCCDQQAhTeC/ZtlP3sI3u1O7TNnCiDAJoMwNATZXp7jBE7QAuH7dAIcEMFOBVACeLY62tT3NAdIsNL6Sy4BBVC39zfAKEYK5HcEY2n/AAMj0AIGIIB4F3+f9lNOZVO9lA8OAAMkZxKncwEp8AFRNyMP8AHF1xKtQ27+UEwgEBbbFnsKuGlkBHj+93Ug8AAyknIlcAAxoALOdwD0VBhapAIx8AAOYAAakE8jx0oldoKhRkYX0DUrCEkAwm0BkCalQgEpEAMf1x8I8A9ZsX0sIFXsRoSeRkb+4AANEDKi8gBZeDL+UAEqgEAqUA9Tlhk88wAbwAISiBdDyIWntnWgMn0VERAAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUEAP8ALOsAZADVAKkBh09BDYODhJmAJHx8fMPDwmxbFq+vr4qKjGJRFC8lCdTU1MzMzKaMKkBAPzAwMHxnG0hISOLi5HZ2dIlyHt26O6KipI6OjPLJQvbQQzQrC0E2DKioqN7e3K+RLNra3FdIEiojBXBwb456I8WmM7OYLCUcBebm5Nq1OhISE72gMMupM6KGJzg4OIJuHWRkZObDPiQkJFhYV7q6vNOyOU5OTHRhG/T09JycnBYOBGpqbBoUBO3GROzFPBYWFF5eXCoqLLqaLZKSlJJ6I/39/M2uNZaWlDotDFJSVOK9PAoGBLa2tE5GD+a+PBoaHA4OCx4eHEY+DOrq7O7u7J6KJNa6PO7ORBIOBA4KBOO+RAYCBOjCKFI6PHpiCNDQRAQEGOrW2MBwLNDi1LrMMLaITOzm9NbG8LrO8GZqGEQOQHZKcOy8QKCcNGamKNZ8yNagENCyKE5QNGZ8GE5iTLq0IGRCGBYyaBwcFCjEgJ50SBYQYOS4hAQMEPi8LMKo7NygNNYygPLG0AoyKGhyhEQwaMDMzMCkIJKq5KSqwLaCLG5oQC4iwLKaINzCKIKmoAYYFGRQKIKSuFBeFLqgDHiAGPj43JaahH7mqGaAdLicvGhihNDCzJ50dIRiyIpgGDA+OODURC5m0NagWBoqKJh+DKCWDAoYNLKmLLq4gIh2eOD46HJKGDhWFG6ARNDcsNbuUM6ovNDc9Ozs+LrYwMB8dJ5OSLrw0Op8SAoOCAIGCCAyRNbGrCYwKPi0UOTm0JK+jLqoUA4MGERmiER4OAoIMNDWyOqyOD4uODZWTD48KCA8HHiAZLiwzOzgtBZmUOyo0GSqqCAYNFRWaOz46IhkYIh8SJ5qHMCIDOTW9LrGrGpKQND00JyqiJS0KIRKKNDEEPKgNJyWuIpwCH7iKGJyZPjg5DoiPNC4WJSqsKCORN7GzFImKLLGzIQegOz4uOS8XIiWKMqIOBYcHHpgcJKcZEZWWPTULOTq9MDC2OzEEAICDAoKBAYGBAYGDAICBAoKDAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKHNhvnz9/+/r1m8ixo8ePIEOKBFnxh4sAFiRA8KdxpMuXMGPKDLmPhhIpJnIqGIBi48yfQIMKDdmPhYIoN2I0CEHAhIR9Q6NKnUr1nz8JNjb4E1jUgwIWVcOKHeuxX5MKJnL4/IdigwkXa8nKnUuXKwwDbzVqdHJDSgCodQMLptqvRxG/es0qsXEDxeDHkIHu82FjgYOLPXJEyNoksufPIvvB2BDFww0LGxZwyNoDtOvXETXCGKBagYEQQaIUcQK7t2+Eevf1ePLEib8AUQbE/c0cNAGuibk+kcGBxvLm2D1r3Me9Yo8BUir0/7ievbzgfih8hKDRIEaRCARYkDdPf26/J+85eNBfoUHL+gAGto8DOQQQgAQ09DRfgAw26OCDEEYo4YQUVmjhhRhmqOGGHHaoXUUXWfXfQiAOlBFC/vC21YIe9laRAyEcsIEMN4zH4j/oxYCaATeE4ABgAu0DQw43GLBBEC480aJ59/VlQhRDePCDXgmJdoMJHBBAwFEEHOHTPg0YUJqWEfyzgQNLZoeeDwPE4EIUCsBAJYoD2CADBDDA0IAFdjr2D182VAAWDDQYYEMQN6YZ2XYbNeGVnCMahAJePvgkWgRx4vgEARw0YCIEQxAApKLYNYEppAs64RZcev3AwQJNaP+0aadf0mCDAYmS+pmpcc5pUEUhmGAABMSxEIAHA5zozwH/3MBCEzBAUMECMeSqa2S8ogpcP04EYIIUZEphgRMt9fNDX/AtgGUI1l4LWbaRFrRdAxsooKUCCgQAqUBO+CDDAs/9Q0AO5LrLnFmnxktQRTQQYEAD3P1ggRQbjGdVCAsU8QN3Yfo1qsGuaQSvQmYREMERFG16skYsLGAZRQ3YoICSIL+mF680V9kApj8Q5EQRyXGXQxQb8DZQE6r5VzNoiTXBwcwk7syBfFz1QBq7+4QQhRKdceWABx5QvfSi2wnp1cYnHiQaAck5AZUTR5jggaf7QKCACT5s9U8PEoT/2trYiw44gAQH2BCFBSFI4MB8/mgdhQEShHCDBybcYLQTE1eeuJgesAv4ov68GcXoT/7T+ccUOeGCDKWbsMAAOd9Hm0AmRCBD3p+TDYMPPsTgewy9w0BitL5DcJm8/jgAge8NwIB67tBHL/301Fdv/fXYZ699Xe2SvP1j3DbRQw9NlP+E3lVWxFsPKKQNXT/+oMC+29+LJZoBUkQQQU5wVpq+EzEwAL4IMAA5QccfDSjCAhSwgA3EAH31k4poFhAeC1gwCAcQm9qcAJ4IKCE1UZCB8HC0jxgoQAoNNMD+/hLBqaxNAbHijkUU9imwPXAfPzhMEKAiGhn4pTNOgACn/yDQQgnCgAALYEliaMivw/xFLywwgQJ6VsIILKBnJHRBoLpXxLIckQMQYEHzMMLEfzTBZNbRy6QiUK2rhMdoOGLBEBQAxy7+ZG0oHGARIOA+eTlAP/LRy3EQkzUTVAw6DZgjFu14xydYoAIBGEAQnuMBHzwPRzu7YmKyZoMicAcC++GjRlAwgCF0ipFB2Udx9AIDPi0ALAepm+tQVZEcMCYjPaiAFAxAgx+wYAAKMCUEuIjKjxRGTCEADgRct7jghOCW/wCTmKLgMoBhSYPFFArmEKO2r4UtMYNkoaaItAELQKAB+BphNoeCuX8EYD5njEAaR+kW/+Foif9ggQ0IAP/Bdb4kLvdRQl4QwpcoPJFlmELTPZeIAgskh5j+bEgTFukEGAShMgqNJQ28cgTjtLKTPIRfD4zjjyYESwY5i2hoOAiuG/zDABz4B7UWBD/kGPIGC9jnlHDUOIcVAafg8g9EVZqQuhlgAf/QDwEO8COSccsFSgDbUqe0kYpAoF76kUEBfUVUoqDgCcKDQQ9YwhC99CBP56PSds76Axg0ISNl7KpcpzfUudr1rnjNq173mr5yLXE5fw1spOaET74+ZB8o+IEDHPAEuP7VRD9owGInu9gfKDE66GHBZQJr2IaAyS1SQGEIbPTYveFvdKi1gSmFGp3RDAFvgu2sQsB0tw3/DMACRzkAuThbtxC44LcuyIEBhrAA0lKpTjaAbWxl+6smkGYAvOlHAxawsvf9B7M96ItyqrqdGBBABiqsll+5ylyuxAxqAhmkBdAXnV+xIKFr0YgDDDCsIJigWgsjb3n34YIh4MpEcSMALfVrFaxswH3oOQC1nGBf/FKkveVNb50ssDCjzI2z8vpBU8Q7kNDlax8/u++IChvhP01su1XVcASGSWKCTIa4xi2Kw6bEFxELtq7+RI9DJbDEI64YwwRBwWEOsMQeBGEBafwZG2+M43VyKwCISYwDkmZdwDbAK4HcjgsWEACM7KMJQeBADLoD5PICizFLvLICMtriqwTK/7HoMRTkJDCAAyjgViEQZWlLvI8jhApI/N1l1xZ6XdEoIQIuKIgTKhAF/j1JtTbgwAH0BuES98ObVCvMDaLA4+A4liuB5tpCo8mC3/kuBwSQwg2O0MwWl9gf4DEAC1AAgxBYsdUAPAJZ74kC7RKaswxe8nJLLB3SeEAGbJspFOUmNquCjYhMFkivET1sYnPFpAbQkgUglpgf0FedFfHuARxzY65wcFhltnZ+x+tpeVUEoJUG9Z7VTW/YDLXJ9c63vvfN7377+98AD7jAB07wghv84AhPuMIXzvCGO/zhEI+4xCdO8Ypb/OIYz7jGN87xjnv84yAPuchHTvKSm/zkKP9PucpXzvKWu/zlMI+5zGdO85rb/OY4z7nOd87znvv850APutCHTvSiG/3oSE+60pfO9KY7/elQj7rUp071qlv96ljPuta3zvWue/3rYA+72MdO9rKb/exoT7va1872trv97XCPu9znTve62/3ueM+73vfO9777/e+AD7zgB0/4whv+8IhPvOIXz/jGO/7xkI+85CdP+cpb/vKYz7zmN8/5znv+86APvehHT/rSm/70qE+96lfP+tZHUAf/2MELUq6BGcwe5VaYgOt3z3ug8EDlGqDA7U9eAgqs3Pi9Tz69kX/y3yOBCcy/AMlnwAAiUEAD/1jB70n+ABIMhAQtUL7/+MdP/g3pXuUCgD3KH5ACBqBcByXowA5SnoEUSD/lM6A+yklQgxqsXAfel3I60AHbd3IIMAMFWHIPoAId0AEpcHIj8AD/kAAoVwADcX4oxxtIkHIFMAIvQAUoNwICUAMq8A/6cHI48A8kIIImBxgj8A8YcHJXcH5M8A/DJ3I6MAIXgAQUgATzJ3IVUXs7gAREOHJZoAMfwAC2xwMvkIAah304cgUaIAAqwAAtwAAUIHsZJwD/IAAMkAJG0A9JkAETMAIkUAA9UQIPMAUikHEdgAA6AAAp8AE6UAAkkAITAAIAlXEMkAH9kAUaYIYMIIIacEkUZwQMsAMjUAI4YgU1//B8DAAAJ8gVGveFM9AB3KGGI8ADDECBHfcCM/AAH+CFJYAADEACK0AEEzCJGdcCD0AERIAA+2AFAkABXygAGlACDCAAVrBxJKACL/AA+6APAICADECHf+KF6mdxwocEIzADI6ABdQgEGJACJeATM9gBIIBxFEABAmCKMyAAApACI0AB4UcQEohx+ScACZAAHYAB4fgBCAAEH8BxPMCDSJACAJABQvACQACH/RCIS6BxOyB7MwAEFDACNSAATDADALAR+wAA0UiQTKACIlCLO9B+1LeM9ogEJ/CNtheJGrAC6SgQFmhxJbACBcmDDEgEO7ACOgCIJFCPGgcCiXiPTP8AipuYAhSoD93HcQzAA/eIBDwQjgvYAoZIcfuQAQQ4lDxwAg+gA/MIhR0HBDvAA0xwAkyABBNwBTogjkkgEP53cf2gAzUwAgX5AkRAAkTAANsYiAVgBVRpcf1QAhh5lQrZAiSAAAIxARu4cQkgABdwAS+wA0RQABkgAkLAiB13BQDQARewA904AzVQAgVAfSe5cSVwlhhQlCsoAgkgkf8wAxtXEQnAABeAAS+QAiIgAhFYA0RQBQ+ocWX5ASRwlfnYAhNwAv8wAkjwAlNAm/sQmBTAhEgwAyTAACdAmhz3h1KZAkLJBNJZgycQgKWpDxpAAlmIlS+QkAjYcRWhAw//MAORSYQ1KAQfUJK0iQMIUI6ydwK8SQQf1w9SKARE0JlIUIXzmQVSKAIpUJgvcJyz2XEgBgATAJ0XgJXQ95el+Q848AFlyANXKZ356XH6kAAIUIsYcAFbSYTySZMZtw8l8AE1cJv4SQEkQAK8+XG3eZUvsJXG536AqQEdeJUFuZUvmpM1uHETMAMFmZa/+Zso2gErenEMMHsSupVYkJUj4JvOKAIZAAAyenE8kKD3mAIpMAMoWgA1AI3+x5fMSXEF+ZREMAM5SQHL2YMnUKYzwJvMN3GzJ6CuqITQuIspcJwMUAD9eHFYgAXOKAQFkKFNWgMgQIYjAJo8+gBeuIIj/3AC3bmLHUB9GZBxZwgC+rAPILCALzACE3CgPhigXThxK4CcXAgEeIgAGkCjXogACYADS0ACMzABH2CgFYcAIGAFOmAEGaqlCEiZToCdAvCGF4ecC+MEBUAEJ5ACdygAD8B+DAAFGFei/JcA+gACJZqnRpAB44kBGLADHQAAGNcPifkPKSAEQMCp2ygQH+CAK/APQECbJSAETMChAqCHmHqgGZcCAuCHYmioyoqlIqABSTigF2ebz5oANaCsBaAD3IIARLCjG7cPS1COa9oCGQAk4AqeVwAF0OmtV+BxB6oBGqEPRnCgVrgCI4AAYQkCQqBxTLkCJbCZKfoAIJAEJawwAUAwAZkZoh9QnXcYsGEpEOp3Ai8ocdfBLR+ABBhwAgjwsQORBEZAlhUBV2PYfSmgAjyZBQPRi2QphQjwARmAoSvYAhqAACQwAQDwASBKl8Z6nxdghiIIAGGpD365AzEYogigAkJZmEggAlwLck6wAqrJg1upnh/nBAyAAQtamBgIcjPIBEPIBEXJl0CIiC8wmEiwmCSXBUYgBCOQAg/AmCRHnzqgA6wocgEBACH5BAUDAP8ALL8DSgDEAt4Ah5SUlGxZFaSJJ87OzGhoaCwjCMq9fJN9JBoWBI6BR/r6+YBsHG5ubHx8fDIyNEBAQEo+DVJSVCQkJJycnLy8vHR0dISEhDIqDBISFGRUFCMcBUxMTNLS1IyMjCwsLHFfGKSkpGBgX8fIx9jStObm5LCwsE5GFLeZLBYWFJyDJDo6PFxMFNbW1MLCxBYOBDY2NKiOLIx1Ie/v766aTHpnHDkuDHZiHEAzDNra3KqqrFZWVLGWLOrq7N7e3BoaHK6SLCYiBQoGBFpaXKKONFRHEeLi5JqWdB4eHLa2tFVPP+7mvEZGRLqeLKuPJFJCDJp+JA4OC+7mzEI6DIZyHRIOBL66rGJOFD42JJ6alB4iJHp2XA4KBAYCBEY2DDYyJKKepGJWLLq60Cw+NLzK8DJWTNzGzFyAfHBEGNz01GpGVKbAdLzOzNr0hKbA6LR0dCgiwFpcdIZGbB4YNA4MGOicsIyYsG6AfKhoLOzu2Ky6MEpUTBQQYFxmGLKUDKy2xAoIMCwwRNi2uIBWGLKcoIaCnFqmhPru6Ny+fHaYmAQMEMra6AoOCMqqMMzcfGpaaMR0LMwugDwiFJKADNre9FhAYPK8zFZYGHDEJISQiLKAHNrQ9HSAXKKSoPL0vAYYFIKYXIaAGPja7FhKKH5uCLSc6J62uMScmMy27GBmlEo6POz66NDGxHRg1Cw0KFx6GHSAnB46HAoYNKa2lEZONKKADCQsKJCQeOqqMBQyaKiwrKiASIygiEBOTMa2xHqAcAQEGLze3GhuQGhyWCTEgOrU7JBuDHQcgFhyaDwOQHDipG5wGEZgTChm0MzG2DwwaGJGkOh0SIZ2kJBaGIhEGKicxN7axHSAGKy6rNTG8B4yRIxihM7gxMT01Eo+IHCm4GpmWDx4OFpoaKbqsJyoqBAYCMzkMPLKfIK2hEomKIyUkHhmmHhebAIGCJJ8kDYiPKbKrEZOZIx8bDxmiAoyKBRmUMx0yLqwxJBqJHxweAYGBAICBAoKDAICDAYGDAoKBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIENyRBBggQaRKFOqXMmypcuXMGPKnEmzps2W/lakgJDvps+fQIMKHUq0qNGjSFXqu5DiwMmkUKNKnUq1qtWrWFduCbBjRc+sYMOKHUu2rNmzEvUVSJGiANq3cOPKnUu3Lsd8K04ECGK3r9+/gAMLNqoPwQEBF/QNXsy4sePHkBXmI7Ljw5bImDNr3swZK4IYTWoo7ky6tOnTqEPqg/DDBpXUsGPLnj2byoIfN0bT3s27t2+/+m78WPD6t/HjyJNXpUJjB0/l0KNLnw5TX40fBxDops69u/fvD/3Z/zhB5Cv48+jT+94n4cU/Dxj06V4q4ECB7erz698fWd8RAiBQQEEJFryQj275ZLBDBv7w5+CDEPolQQcscFBCgBzk8IB5/xTwRFv4RSjiiCRWpU8IRYgQwj9HvEAhAEcguMIPH0BR4o045kgUBgDI0MA+A3mABAsbbGeYAKLpqOSSTLYkQQn/CCHfP/qgMIEMDHA4WWvFNenll2Ba9GQRIRwoHwogKNABBgUhMIUAN3AR5px01nkQFB3IsKZ8+TxAgQIT+FDQak0sgICdiCb6pT46sMBCA0s8oMMERSgAwhEGIXDbc4p26mmJKDAgQg8DtDCgCDIEalBwMGQnWz4OVP8wAQgdbABFiJ/mqqtD8kGxRAUdWBDCEh38Y4GNBkHxQVccmgZFCBT0wMIAOAxgQYy7ZqvtQSV4QKV8+uSzzz75oAAACQTgal19T52mzxIisGBBpCFAWQGy2+abL7hT/pPPBiK0oAKu//iTwQ9eoQZFA6li+28LLTxAsL4UJ6oPexKggMERG+TAwb0E6wPEASCedgQIRaQ70BGUElDxy7pK0AAIAHQwQQvGxjgxXjsE0KBpEuSAQwj8miuDBUDCrHSioZZgKgUThOBDvwjpo8FhiZlmLg8NmKlP0ICisPTYdVbpwQMPqCCBmQxtaZlp+RBAAgUbYACFBBXgYCmmZPf//fJnMORWmj4eUDoACBPk0EIPMlzq9+P6rrbDAvhyFq4HDSAR8ASyFgEAm5CHnq1tzsnZGbh3e+CBDxMW0bXosOca3A4xaHc6vwM9yUIEE8fuO5j+NMfpZvyCi0IDOMDY++/ML0lf7af7EEIFIUQAIAslSLx889zjmKAATjTb34Qc4MDCPyJ0oMKB3bd/Xj4tLhHBBir4wH5CfErwwAYJGJAAEPO5mwo2oIMXJE0w+fBABEJAACE8wH5Uc58EpbOPDQCAAi0QQcAA8IADHqRKIQCBqUQQhRFowXb6gAIBSkABDvxjAh7YnlzClY8agmuCOJyODzIHAgtUwAJIwEH2/ya2DwZwQAQAqEADvlCEAYSgQVXqgAhYyIMSOECGOcyiFpOlggdIAAr7gMIDcsADC4DuIEErQgXEdrceYUFQ/nKAF3UwxRhu8Y54VEi4ihe3IpRAAgkRkggeMJATkaAKV1CMbl4wIDvm8ZGQFAgfQyBEbyFEAiDoAQFsVC4LFMEAH+iSQFTQSCxG8pS/K14USbCmhOwjBKWyAAEYcEEQzCAFcSIIKbtlSlT6MnTF24cQBjCAIilkhywoAgdcqCIIpMBQuizlL6fZPj5ugAIsuJdkHNCBHMiSABcEQBIWIIDh7dKRcSleIcV1v0Gpk0o1JFcvqUlPq/ArH0soAQcagP+CiX0NAC0gQHzg14HkgSEF0BulNOGiSj55oAIguFAFPMChhl5sAx0oQQmiNrV6etQs4PqXPvn5ToLkQwgWAqQklzAACiThA00oj0J5ydCG9kloUxQBDjT0FYuqMF4UQMIyO6DSjxoVLPKpoFDXqEqD7KMCRZgA3wTyAA0+4AIHcMpMLfkWi/oAAD0AgAPa04GoYsqi+qjqAAggAQlsAAk9yNJR53qVi2qOAfF5pyIFkg9KUuAFo9GHEHBAAQcEIQNNSBgju5VO3IUrAi0dGJUcIFQheO2G+sCAJ1vprxBgj6t0DS1U/oWEqKoABT44AqZ66gECbIhKjOQBB1WngxL/yHZqBTjAEJLwD1i2QAge+OI8k1JSKFiABEgbiHFl8LmSeoACxQzsk3qwItFaFykSuJIMRIA4EHjXApbUBwNIAACxXSwEixPBP4SKAxC8NggrMMAINCeD7SIBBDrwoFhKaq6UzScEMiiBziK4hB5QAJ3mUkADrsvgovigAjnQqIQ12oHwdmyT39rHEiwAgn/kYAIMoOhANGAEJQzgQjkQyAQioN+wlJRl1N3OBniABAngbiARCDC2BGJcNQ23wUBGSbnaSmTVeeAIPWXPGfkEBUxJwAf72E4+TDCDBHghC4Bs663MUi7VHsF+LAIBDqREkA3MLYY3HIgOFJCDjvLY/wIKAMCPg0znqbhgASlIEkMdAIASIAEJE9gA/MRM5oHM+MA3FsiaS+Dmf/S4A3Ous6SRsprhiPIs+uCzgCgAAhb7gFJEK6QOAmzjNAvEzH/UDZ4UYIFJu9ostvkBBCI92iMQ+Qi3wkAHeFAB8+SDAY2bmqn/oYIBDFI3n+YBA17N7P3eAAbQrKmp90GAIjhOICxTY5QTfbK49rSqIlhCs8edlS3YwDm0Jm5DHVACHDAACvl4FmEBm1QJIJlPlCzB+vLROs6S+99TWVft0n0Ui75ymUkEwAA4wAAghWsJOQBAqf9JWAvMDAdIkBjANw6RKWEx0XstZFMT4o8AnCBh0v8uHgaEUFoS9AAJIdjyP14575BiTgRFIAEOJvBajvscf/zewCxDoIIWFyQfKmBAAxa89AZsYNv+UuD0GDCsRg+qADBIAQAbq072dFFtUKcSBlTggG2j7mwPeAGYf852g+BzAhyoEAsosEaF6FoGKfoHMTlgxikdoQPL3DsSe+72ADCBQYuJYCHbzviH6EMF7QZBAxvQwoYnhEdxfsASNrCBCFB0NCgggCx1YD2UgQCdBblayRrPetjkg2E5cMCBKiiCv14eADwI9bfSfDF5husFQyoTQrhAhB8E4DKtT75pfAACXpsdAxPIPcF4JAMChNHsvWN+yowukM/kmeDKD3//XMZUJnDtowNqqhxBMJ+DBvgwAmvDz8UwsDEhaHAJ4htIPlhDg0uL//+NgSY+citnMgGAAkcGoVk4IAJI0ALEBAL4d3QY1QEgoEHvthCAIwWmA4AcuBj5UAEyQAFLAEbSIwJsVlQFsQ8PEAIbgDYr5EeSNRBPJQLU0gMwtDyrAW2H0oE8GBiZlkkU0AENcEE9cIJVMy6KhHS21QCVE3QbEAJ9VgFWdxDMAQPD04NYWBdIJ0XTQiAgQAKqshD8AmzX5naFwwLWJ4bPFgPtkoVuOEMoICk6oAJH0ADIpX4fBC4hQAIggIKDQgA8EIYKoSzM8oaGOBfGMwE9UGj4M4Y8/1CGqwKIkFg1F1AfQHCImFgWmHUxBJAhKHhANFQ8HtB8xwJP7VQlPdIBYsMQW7EgfJGJsJgVKLABQhApG2ABxhYB9+MAFiAEQAIrBEAALbiCOTA3GtcnVDeMQgBWIiBoDaEWKSAAbhGL1FgVE0JMNHhi+dUvkrhaSwBXxjYqLJADTycQwoRN4Wg+JZBfD4EXP4B41RiPUKGCwBIsIfB5/eIivih2G/BDHSCEQrA2heQD/WgB/9gA95h/CvEZSAJ+8viQKHExUABGl6V/lRMu4zKR8jQo4hJGFDlc+9caeAiRJFkabhI4DlmSKjkXXMAaC+ACKxmTnUE6syaTNokZ1v/xAwl1kzzZGOa2AzLVk0IpGPThFCk5lEjpTpjVkRtJECOnf0h4T+MylVSpkCZlciiXlFqZEaqUQCFgkEK4BEmDVqPxLEKIZuEygf+4lv8YgQuxFgewdVs5lxVRPFBQW8ZGAToFI7uHVvtARwpQBLyTVPUSVEigUQNQXwRglfo3IxnwinQZmY53TzpgKhWwBF3EgjZClvKhAjdTBD1QJOCCAR7gAEbmInMTgwuhevchma4phqKYAyKgA+axibwHLj7AYQxQAqGpSO+ka2U0kgcxGZWBfK95nErJJwQgL/DmA1A2H8OWVARAAQwgAWC1ASKHWYyEA8bkEIYBA3qGnMj/qUp4wjVLMDMTYAF145sW9QAbBR+KiJ2ShDtPxYd+KIZEcAIvKZ7jqUrQpwADEqA98DFsglZHcEG6aCWhmZ1TEjTKxpgH4SayBqH82ZPxRn/0dyBoEpgAsASliTwDQJt9aX7T2QDxoaCi2ZURcEQqkBYuuYMVOpdfUwEAUKMWsCEbSgE9hwK7BgDCpkruSVSKAX3caVE8ilxnBBFVeIUxmpSeCTEikAP59Z/+diJ+hJb9omvyogKqk09qBB+qxEhE8mMCpwFH2aRumEJol3b9BAUAwGpRpmYYh6VBUgIywAGGSQEkoAAcgAQ6EEwM0F73+RBb8AEIQ6FoqpIW9Wtg/+hmcUMCOTBxyAZhEhZhlRKlKSofQaNJiEqJTdGGidqTaEVKHOCL4eIAOeA6BLgPPkCAQ0Zk+lMCaiRc/LKitmcRhwUDKwCZobqVvVIBc4eQDJBJOUBv/wIAAjVyKKpKR6qKGJFbbdGrMoqbDNBCcTcAPNcTFwNVUqWsYpap8sFIHKCLGMEzPiOtWmmXLyAEBMCCSKZ/HhACHVRSFSQ16oQCQqADSWoRVwOeZ4quWah437IqvSOw87kRU7YDNGCcANuwL/EZuPGvDjuxCtGSwwGjFJuxIsEcsraBGvuxHWEdMLCTIFuyGvGTTGqyKjsR6+IqK/uyFGEwhQizNOt4cP8plzWbs5KRF3uhsz6bEBqQAjCQNT9btAPBBXlRI0a7tP/AkOHJtDqbg64BtUWrKQLQBR5LtTQbHIWCsVpLszTZqV87saxCsmP7soUaU2J7tgC7FE3Rmmz7soe1ID8Ttyu7FoghsXY7lzzzNnursk6rt3+blPsHA1M7uCVrtYKDuB/bkjrIuCBLk4ILuTxZtl5LuQ1bqECZtZiLrmrBLp1LsUGAlWsbuq9pNWwBt6aLrjwDj6uLrqpHtK/bq8R5rrMrrQy5uLebqJJDHLvbq1QQA85Rur+blJIzBZdbvOK5BTQwocqLpjUgAGw4uc8biyV3qNUbowUgAE+gutl7nIf/9Y68+r2uqRYk473kK5l4AQMBIJzpu5UaEAMN+b6vGZI0AJP065oSWpP5G5kWG239S5ccCwGcG8BDKXDJa8BC2YpBqcDGu71a5cBaWXIzK8EHvBbRasFIiRcnkAHEq8GxaDVYQ70grHyTcQILW8JCaRgRq8KVCwGT438urJK2AQMaOMM2GRwCAMA4vJJQYANW+ME9HLA10Cq2M8QkqRj+sCy7isQrWYn2QcJOzHEJQrdTXJIjkwKye8XxSJxKy8UPeSS6C8bUmA9O8AP9R8byqLgFrMaGqA9SsMMJ7MZvOMBtTMdELL1zjMdYWKgnJ8R83Hj0kQKgGshuWHJ/bMiH+oi60ijFihxkHGy7j+yGhjEETzvJPNiSleG+mCx+VnvJnQyAlUYDnBzKyVfDBGzKPBgcOrnHqsx6W7AAJxA+r8yBIhvBtfx/o1vBuRx+cIm+vcx6QWAFVhzM4Xc1jWzMJrwCm6zMyhe4ztx6+9cENODK0fxvmtKx1yzIUgADyLvNjecCwgPI4MxgOmy25bxxW/EDDZzOGweNuOzOGze3HizPHGc1HwLM9sxs5jq++9xsRwLK/+xq+1cZdTvQAC28Y4zQrrYa+inDDF1nsRw4Ed1sOrwAFd1sJScAGc1sS3EAHc1sW5ABIf1qS1HSr8awKE1njrzSEqQPAQEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFBAABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAP8ALMgAYwDxAKYBh9ra3OO8PMWlMn5+f1JHDnJydJ6GJDMzNGxsbEs/Dby8vNDQ0LGXLBISFDswDLygMGpaFIVxH39pHc+uNnFfGaeMLLOztIyMjNi0Oa+SLGZVFJmAJBwcHOzFP66urJubnGZmZOzs7FlKE4aGhPX19C0kB2BgYNbW1CwsLEhIR0BAQObCPd26PKSkpFBQT97e3BYWFFhYWLeaLfz8/HZ2dBYOBPXPRBoVBDIqCyUdBcbGxHhmGZaWlKqqrPLJPwoGBKWKJDo6PMaqNA4OCsrKzI52JObm5MLCxCIiJJKSlEI6DCYmJCYiBFZCDJF6Id62PF5SFBIOBAYCBCIkMAQEGDAwRMiaNGxGbM6uKMrYrKyITJJebNCeEIq6jMq0EJSevKyIDOTk3DA+NOLCVAoyKHbgKJSmlJZ+DKxwLMrMMLDceMrezLCmULh8dOqmsMjG2KaeINjCwB4kIGCmjB4yRHx4nODcgKimxHpe1F5cdLDIMPje5HbkqAoIMLqmdDYiPN746GxsQHam5Cpk0OrIEBQQYN7k5DwiFAYYFIRyCNrS9NDsMJSKuPjOLEw0FAIGCD4waOLs7OLSQCoiwCw0KHocgJSYhDRUFNrGNGJYXLDCnD48JLamuKaALLDuyPi+QN6wVMjCrJZiGAoYNHReCD4uNMimrCIsKD5kiEwmKNrS4PSwQOretD50OFpOKOqwEMq0VOr4uMTSzHxgfMi8zPCeMOLg+BgoKMR8LOh8SIR+GLaeDIRiCCbCgH6OtExMZBQyaK6arA4MGB4YNIqYZLqm6M7SwLjIuEhUNOjGKFx6bHx+cOz46HR6GG5caEw6PMry0Mq6NLCwIJqKkGRilHx4XDJUTOjS5GR6mPj43MrY8EhcWLDK7D4OQAoOCNB8yAQMELDUtPDC0Ozg7NieQMieVObk0EBMWNAygH6ioNDC8Njk9LjC0OLm6IywKGKiKKjCwK6slGB2GIZIGIKSKB46HBRkUAICDAICBAYGBAYGDOLi5AoKBAoKDOLi7AAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnHgQH8WLGDNq3Mixo8eL+EKGzNcvn8WC+PLl+6fy5MeXMGPKnNlRZMghJi7Q4EDwposBFwa4aOCSptGjSJN6tJkyxosZL1S4xMfhwr4Q/0K8uICkqNKvYMOK/ccURQ8dMwAEOYlvyIgZOkyoMKEgxIUhY/Pq3RsTX4MLCkDsA3CA7YEXJw4MPLBgXwq+kCNLhpjPxIkCHE4QFlmZRAuXQ3iEGOB1sunTfPEdOOIBBocXm8kOuTCaYD4Enhug3s1bLD4YPE6koDq4sEAOLYyYYJvPhREFS3pLn06zH4J9A0xyKG4RHxIL+1ww/03xgggK6ujTb8yn4kiLISE5ANgc8vu+GDbzpdin47z6/wA69FsSC5jQwIEHAPBCCkNol9xynDkHXYAUVmgQPigoQMI+C5ywwAIzzMBfDP+4NVp+CITwgW4WtkghVQMocMQROsg4AwkLWOACSzGQ0ANbQ3wQAg2luWjkdL8hwcGSHKiwz30w9ENWglGZxN4Jwh2pZXpMxQebcWT1M0AIcaUAggJG3LXlmtJ1SdwCUgkU3wUADIbYBTyxqSdqbvZTwAB5yolPPynQMAINDO6p6KKMNuroo5BGKumklFZq6aWYZqrpppxmylQ/Q/RTpEEkJZRSP6D+U1JInWLKVHQKDP8Aw0JUmfBBDz18gMASLjV3wQceWPDBAEGY1GqlTP1FwgzmKaRaC1rR+MI/FqggJ3Ic6nDEAkYsgMBKx06aXww6WECCAkgoFBoJHqiABBIqfOAZuOy5gIKSBwxghFqjhruoSAdY0EIMJxyRbkIw6ODYQKrNsABeTA0UpBEI9Ouvor/xoEMKKJygw8EIwaBAeAPlEwQJR0jZJVkwfGAECBZfrGc/JgCQHRIvfKyQmP+0EMQ/8AZXsaBtLcEBCgUA0J/MkeITBGscYJgzyAhxkIQRRuiggxGjgUvWoAjMCEAIRODH9KO/fQBnSChM7Ww/LliwgEAfysow2Fp/mAQKMZ//faR1ABTw9RInKBDoQTQv0MIBKh3wwT884PW1TRyErQCYfu+pGhFwwuCaCx6n0IDXPaGwAADWDoSCgmtFLNBtJHzQd+YV6mcECbjnHiIJJ5BIagwhGE5QAxYot7KcQThMOu1bejcADx9E/4EFM4RgwQD+FZQP8M1ee0R4x7PkwgxHzM58gIOiWlI/KijN+En5nNcdYxRL+U8DJuAofwNRqqTqAT0IwQjMdz4AdaltOojaSVTwgh6AS0xYaUEBCtCCJ2WHJSYQ2AAQUAC1WY9XBdRTlxJ0BL6dxAXBs98/YIAAHfzDCP/YxxEQwCL29OAFXIuhDgYAwhCyqUuEUoGo/yQWgziVbAkpiEEMUrAEr6UEXi4gEceG6ENxVfGKWMyiFrfIxS568YtgdFR3NkLAMFLnVEPgHwxEQitQea4BDfLKqXQDA2OZ8T8pScEF5GYEg7ERIfrZY4cW0AMTwKdkQRhAC7hFBPGU8Y6pWUJyXrCPGZzAhKbKULdyVZcQRG4g/biAEZ5EAuOxCpJnZF8KkBAExHSlX34xgQv6hz8cLocs+TiAvQ6QHPycEpVtohwATkA1hKTkj2KK3UqYMhsj+PKXwNwNU7ZDzJjZpGT5a4FJmOkyCEEzmqeZ5j6qCctrskxIpOlSA7ppTnDyiXLjfOWoXEezECzAhMli5x/d+f9Oi1BTnsa8pn6I8AKYuWmdptwnP3fzT4X2JD8qGNkARMUqmyDUm99cqGka6lCi5VEBL6ABRU8pkou2U6OooaYCMyoo/RzhBQUY6TdD48yTohQyp3zNCVZqKv0oAAAIkGlpQnMfm95ULyORUtsIg6rl2SYFOhhNg1SyKkGhCgkuA0FTjxqZkaigBS2gHgmE9YHhVGQJR4AKD2gwgLaOAATgUs0FWuCBfeAIrEPjampo9qR9UPJJ3nLqlEbW176+wAP2a47CCjsYbeoVp0M4wHmWsAQUWFZyFTlaZS1r2QOAbFCVpSxlLXu4x5r2tBB5JGpXy9rWuva1sO1UR53FRtf/ebSiLI2tTAbFgSUc4ABLGJ3FUtIAJJwHBRwYaZhQEITfOhcFklWtbifinQsoDCtG6IEL7Ii4DJ4gBOBdAKA4kwIigPe8IQiRDlQ4XZnkAwTdWusI6mI8Y2YIAB4YAA0+cAJ2RY1lMUAACEBgAhMM4AUClG57U7sEF8DHIkMYwLl6iJIhcMxY7CECxabCFCclZsE0cR2GiOCYeRZlUAWY10NDYh0fCRbENTGnd15q1oakBAQqvi0SRmYCGM+knZWRIYVpywEPbPhuI3EBymbl4778UTXfq9js2iJh4d12NidqspMrugQPqGiNfRuUCV6wgB3dFh/tO4ERtfyRVwmp/wUAfZsJOmQCJ9rET55RLptjHBJJGuED8hwuTjzkOyT3uXgYVfCeL9RnaH1gpbkNk2UWEIPl5Sd/CbTtojHSZyF9AMw2ZQvNsOTIr3m0AaK5QPg2DRIktOBcqkoj/7yGE/EIhGZjkxWo+EeUnqjgdFKJNKsjYp0bEQGsLeiBB1qQun+kwGE8wRCIrPeBZCv7gqCUcA/0POyM4NpDHcJSwcysGh14QDcY6gG4w+2hD7A3wnExarepu02GtcQ2KKn3SPynvSEKe94AD5eiA07wghv84AhPuMIXzvCGO/zhEI+4xCdO8Ypb/OIYz7jGN87xjnv84yAPuchHTvKSm/zkKP9PucpXzvKWu/zlMI+5zGdO85rb/OY4z7nOd87znvv850APutCHTvSiG/3oSE+60pfO9KY7/elQj7rUp071qlv96ljPuta3zvWue/3rYA+72MdO9rKb/exoT7va1872trv97XCPu9znTve62/3ueM+73vfO9777/e+AD7zgB0/4whv+8IhPvOIXz/jGO/7xkI+85CdP+cpb/vKYz7zmN8/5znv+86APvehHT/rSm/70qE+96lfP+ta7/vWwj73sZ0/72tv+9rjPve53z/ve+/73wA++8IdP/OIb//jIT77yl8/85jtf6ALwgdCHsIEArCAAPU/JEG4gAhmwoAM8x0f/FCgggAnIYAMMEAADdN4WCkygAx0IgAw0cIMXyxwfDpiAD64fgAAAAbM4lw8J0AE+EAAsEAA+gAE5sHP4oATf138B0AECcAMMmAMbsAIdsAIrMAEUwG85JwUOsAFPYH1AcAP8kADhxwQisAMMAARKoAEysH49NwQSMAHqFwHZlw8lYAArwAIi8AM+JwIVIAQTMAFM4HMRMAEVIAIaIAAJkBI4wHMUUAP/cAMM4AQl4AAV8HP8IAECUAFgiII99wMiMAEBsAEl4HM6WAEdwAIEQBb/gIM6lwBAoH4CIAL4cAMRIAA7JwNAkAAhGAEOEAEPsHMCsAE4gA8/4H4PIAMU/7BzO0CB/7CIGCgBP5APb1iBbCgESgCCW7hzJeAED/AAGUAAFlgBDrBzG/AAFJAA6LeKYrhzGvADPyABNnCH/CACO6eLbREBHVAEQ1ACFSAA2KdzP0AA3rcBgwgEFIABObeADpABFbCKMsCKIuCMO8cADiCMHYCGEVCMDHgDbOgESiAE4NdzZYgBDJCEPXcDOQAEDwAELHB+4JhzRbABGZAACbACjogB9ZhzEoB/EhgBLOCDM0gB8ScAK4ABsYhzARABIpB+GdgBGJCKO/cA6ud+GliRPdcBToADxLiQT8hz3UgAE4CBLNCQOccCAkCM72eQ2QgBErACD7ADGOCDUv/Qcw7QAQ8QATcJAfZXcxRphiuwAVHwc9UHfwKwgD93kh0wAWnocznglBNAAAOXchhAjGd4lD0nBAKQhBLoAFeJchsgBD1IAfwAdP7Ij1HZcxkQgQaoAXG1czggAwu5AgJgkWNJcgSJARrYgT63AUr4AD4wAbrYcwFQlRrwfgbAlCS5AUJYgBKQlj2nfk6JARDgcxAABRXwfU+pAT73AxaIgRSJljtXAVDgAAnAhhiIjReZAav4lAJAkTyXABQgmLK5AvsXfj9Ahz1ohueIc07wDyVwA1FQhgwAARvAAjr3lKS4AUCAAfkIAX75jzMnA/9wmxXAABNgAxjwACyggT3/J4BFKAERIIM8lw83sAMy4AA/0JY6FwESIAEZwJPm+X59yAAZIAMBYANKmAH/EJw4VwNRwH0CQAENcgMb4HPiiIP8cAMSwHNZCAETwAIbsAGfqHPbiZEsYAMdYAA7AJo7FwEUIAJC+AAlsJcjR5kskQDamA/84Jg7lw8OIARAAAHnWYigmAC+aAM0uQEiqnNEyAAygAEVIIk6WaBOkAFUCHT5IAJC0ARRUAKP2HMgOAHqiJELqnP5EAWDuAI2wIolEJQyN4wYiQESAIAaqgE4gAMGsAFIunM3YBH5oAEPMJI/hw/CGAEs6nNdyAA4kJM/96SHCIjwqXNDQAH9KYHoEbmLs6mBAnqaPmqARGd9GxcQACH5BAUDAP8ALL4DWQDFAgICh7q6vJqanCoiBmZmZKSkpL6+vJSUlEI2DGxsbFFDDyoqLMi2YGtZFc7OzHNzdEVFRMrKyyIiJI14Iz4+PNfJgmBRFEpKTLa2tE9PT97e3IhyH5aOZJZ+Izo6PICAf7CULINzQSYmJNra3I6OjIiIiR8dFvn5+DY2NKeNKntnG6qqq2JiYxISFHp6fOzs7J6enV5eXDQrDIFvHBoaHFpKFKGIJjIyNFpaXC4uLHNhG1ZWVKKchBUQBKKOPMbGxJ6STOrmwK6urPLy9BYWFK6OLN7exB4WBAoGBLKytMLCxNbW1LqcLJ6CJLWaLb6mXFZOPOLi5NLS1NrSpObm5BoWBTouCjImCEA6KNLKvKqSLPb25Eo6DA4KBJqKNGZSFOrm1HJubAYCBObq7NbW3OLi3Jym5KKmkAoOCK60oKTutBJiOAowHLTkNBIQYKq6NJzMtGyk4H6UXJy0qDAiRHBWaBIaHJyevFxUKH6UsE5eVGzipOb46GwagOh4SGxc1FR6WCTCgKKUnMR4LDA0IEBKME5aGBoWNDQ8MDxGSBgoKIp8gOja5HpkCNC0gFxiGIpiGMiyrBoGILyepK6YpGhAGCo4DHJ+bFA6SHBeQJaEDIRagBwoDHBoWFBEJNKenGpsGF5QWNbk9K7EuMbUqDQwSHSUiE5YcK6mMGJAVMgwgFBAbLLUuFZ2GPCmtAwMGIBAGIRudMSe6LaexC5STEBcTIBUGAgIMAIGCCQgwCRi0EBQSNb25MLu5LaefEImNMh4yOjo+GBolLzE7ICMkFpAkEBKZPDeiNi43HB8GMLCpNjWwBIwRH60hDZSFAQMEF5YdDYuaIaMeK64tAgYIKSEDBIwaIaciL7GzFqkhMKyzLSeDMLUyMzuiBw4HCRiiLTEgOzINKRsLDZ0OPDI1Cg6PDYiKFBqZNLk2MLGuAQEGH5AbGZ6bGx6RPD4vK60zIR8ZIR2nNjQ9Pji5GzCJFBGOK6YQDYOQA4ODA4OBAICBAoKDAoKBAYGDAICDAYGBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIEMmzEeSpL8IHXDoM+hvCI4ONob4E0mzps2bOHPq3Mmzp8+fQIMKtViSJA4DSQjYyDcwn74HIy78uzDiwcqhWLNq3cq1q9evYMOKjVg035AWSkwoecBUoD8MAJRcIHBBCQAMM8fq3cu3r9+/gAMLlljSHwwCSDJEYcs0nwIVIjzYKIHDg4ggCgZr3sy5s+fPoEMLzNdBxQgYABaT/JcPBhQVIdoqQDIFRlvRuHPr3s27t++DJUYgsWADiWqm/jwI8XD13z0SLkjc+029uvXr2LPn1IcgiQMWs1UL/5xBAMoKgv5WZCAQQbv79/Djy7ee7wEAAjjy4bgg/t9sJTq0RRIGUVyAw3wIJqjgggxylU8ELxSAl34XNMCWQPtFgUFZGDQAgA0NhijiiCSWOBF3BSCgD0kKXAABY/4hocSGRRFooImsFTUEAgR4EEJBZt1gAAEGwDAEjkgmqeRnCgSRgQEDDLACCUpMYcANEeRTQnkr+FPYCiKwZ2JZ+sDwjwkW3uaYAUqI8I8IIhigwG1L1mnnnV7ZoMIUUIiQgRIZmGCCCz4EqA90LaxI0j0eRDddiWX984IPLqQ52hAkZBDEDR3cEAQUI8yA56ikltoTCxg40MKqCIyQgQsEIP8wZz4rTEFACaOFEIQLK9A5YkkzkECAAz6k2dgDSvjwwEz+PFCAhqZGK+20HOXjjz7Y6uNPB6lhsKJAxSnRQghDhNCCCBeAmCSKA3RQIYz+ICBEAI/+M8QIU3iQF7X89uvvSGXplxqM//hzgw9KEDACAVEkcUNzJvpjAQABRNCisfbi68C+3EFBgKj/hiwyvwHnE0IAQZyg5j0YBABAARR7m+SDLyRhgT84AGDpPxEQIIJtA/mjg10Hjmz00aMGrI8NHdSboz8lnPBAByXsi+M9DkDgwD36FbCzrgAWJXEUBaiL9Nlopy2ixEiQMASLAPgwwWq6RhFgYQSWrfbefPf/nZ2WBjSAwAxDDPGAD1HAwMJKW/4s9tAXZOb35JRX/lk+NlwgRBQAdE7p5gRsyMIILrTgJUncSXqk5ay37vpYD3qABBIXXBBEAS4I0cALbMVrAr0DDWHAFC34+vrxyCuYgHstReB8BCVggPgAM6yUzwQNQGCBSRZA0IAFyYcvfnwx1CCBEdqVnPOLq+XDggcZFOCADg4UENnq4+ev/2/+MPBBAlarTslskIQGsGY0WmqBDzLgJx+0IEv7i6AEeVKytuhDAQ/AwAMUoKiisGQGOMggBkpQFAFwoAcruIEKVaiDFuLgdLkpGct0gL8csWACMEjhBFhgvAn68IcdqeA//1gAAxX44IhBWMHbItWUDowAABAQQQO6VJgKUKAIDfCB97ToJw/wcDW4KZk/vAQkk4yxh0BMoxorUkF9DKABDTBACwwQhSgggGtMLBgGgnCBxLhgY0UpwQ8osAMEGBIBBhDBH0+HxjU68pFIqmBxRICAIehjCA4QAQBUlsd8zCAlJygPAhhpLRo0IQVUGKM+btCwuZUEkrCMJY5eWRgEeAxXJOlZBlSUxxw5ZwRQGCWdjKCBGsSAKYyawgi+CEZZOvOZDSrJPUYghI0FzQFTCMAMejkQFgwPkEUJQwI+kAMeUCgDd3slNNfJzviUZAgBcIGZCHIDKAQhNh40iDenAP/OolBBAyg4wBEGoCl8qrOdCE3obipIninooCA6EAESZnVQguyzn0U5AApScIUXLJKbCg2pSDVzQwuY9AHbJA8UHkoQDKArPxW16DdJWRIepAAFG8jeBHIU05H69Kd6uZ4K4OghGNxjCASYwg0gKlGKJuSiNC1JFXogBWUeCaRAzapWtfIgGDjgqwM4gT9YEAAhDEBNrlFBlpqpz5nSqSRcAAEQRBAgnrJ1q3jNa1ZKoo9GMWcgfbXqXQuyT2ESBK5gIAMEroDAwer1sZDdSVF0wLmlsOYEqdFBXqylptWwIFPCFFBJIKQFClRgJvmMrGpXG5IKliAAT5qAAiZggAz/vGCtWrqBBR7lFIs9IKkjOIECZkBTCzTgCwvggAAq2EjWOve5DalgswighCQEIQlKUMGyTHIDCKhgKSQBERIKAAUTaBIJHiDhO/GlAhCggAH7ECJ050tfhjDXHzZoAQFUQIAWiLUwDzCAB3CLgxEEIQgqSPCBW6BeksygBQF4gAAkgIJjlqy+GM7wYZmbj3uUAHp4LIo+SiCT0UAtBCGIAIpRLBOxzaBq4iQnFy6s4RrbmCc8AOgBmnvjHvs4JB9IwY+HTOQiG/nISE6ykpfM5CZfxwo10IA5nUxlaLYPYBy+Kxh5/I99+A+AVQ7zI1vSAQwMYH48LMg9OqADBDgA/wE3sEGI8+iPGTzgBm5ewQ7fOoMJmHkHFOiCAMRM6DTOwANJUKQJCsDJpjyAP3CsIwAeSGP9kMAH/2hAFNxEgvx0U4FjEIMJgLCACuyj0KiOYOwIICwoJKED6szHCTzQAhjo4AaIzkAL5gzGZhkgAPPTQQuSAAUSbFMgZ1FYebDgBOVyOdXQnpy1ZlC9CSTh1Qe9FiPLVNlODkEBJf7HKqPgg5u55cP3cBYSQPABBnAh2vA+Xlk6UABs93Q0HUiCD2A92LvqKgPibgpTTlAAFTxBAxw4QLwXzrp5X5vfbPXHPVjwbQ8kDLf9LspsfhbAf9A7CDbYAgoYTnLKOdzebP+NQKsMEIQGXOBmASsjd+WiMoN8XAE8kMHIS85ztZ0c4nSyAQE0DYUotGCJMRc4SU4QBBFs7SD0nmg+FC6DnlsdaT9PrXM6YAEM8EgFOrBeL0uCgwDEaa02L8BE/2FT3VzYfQqwAbg7fmGo4cAGIeDa1fc+kaLkG+Vo1AcGHt5YLR9FCS+AqfGi7ul/7Dw0bQxwAXxQgABYoDnMncEAruuDC3hgTnwP/UNK8negP7WsvSr8YQ8fABswMu1IaPw/PgB5MWLAfkhguRIKcIPNlmwIlmkAAQKQhPUUTfTIZ0i+Ga11NRvABAhoTGrxS0c5gZTgaxdI+WofsBAQIAMDHkL/BDKJGbtyKFkYINwEkvrX5Lt/wwMvAPO3/HqmTAAADn2atqRvgwBEYQQYN1jYJzn/cAQMABpJZxiXQYAhABkDQFOXYgBQ0H5v0QCM9n4YyBp29gAOoARK4AAPgFJMMQQ3MD8PMAEWgABI4DEGNQEkIDNr4gIQsAI2UIMnEBMCUgIhiBYNAIITsE0mdDmp9Rz5wjEt4AIBgHTNREDsMxA9AwUDkIHvlx5JEEWCIkUOMxOHpmlHFAUZAAEG0AGM5ABC8AKiYhhRYAJQAAAX0DlqtwKPcg/d0QCBYgIZUFTagoCpNQMBAIVqcgMugAQYRxAWsIagJxCjYwIeIIXu1ywk/zACI/CII2AAJDA3BaMAMOABj0gCDmAB21QULhh2BXMCLQCJk2gAqEgCl+cWFiCJJPCIArYs3NdMjXMDdIIBLhA5WocBJhAEJDQQjGICI/BsjLhwEncP94AtyHgP+2ItzpGMUWUtzNgW14KMz7iMZOQWy6iMyNhxnHFBJxCOOMBDtUgnFuACAEBRdKIDvXhsAhGMBkCMxTiPs9QBBAAB+IgED1OOLZWL6thSvfiL7+gBwiiP9HiQIvIgDjAC/zACLSCGwuOHTQGIgqh1D2CIt5GIi4iQHIknE8cCLDCNz+EC+gJYR5iEWmcDWnQhAvGEUdiRMKkkdAYDTeWEkEFFqf8lPPlyFRV4gTH5k7NUMt43BSQQASxgLlAgdSRxQyfQQQTiAzcwAzdUHu0HlFaZkLZXfEgQAEggAoWSF0JTAPjxTh4QBcJXM2FiWVe5d2a0f/12LfsHf3QSQCuxEtn4NNpmkLBTMk9hAMWSBAZgFdQIFwSgjjOwAkHgPZOmAN64lgyHXzDQAo+oAyGGHjiAAK/oARaQZp3UAY9oECNAmW6BA/TzioOjh5HSEnE3dyuTYjQ1YneXd3rpmGFmMrU1BYJyK9n2ACqgBFmULA7wialVHyowELNTALi5TC05AlUiBCbwarQZnQs1fiTAIy5ghrGmAD5jABjQAQNgP1vTSff/cHcCoQA48EYZYIsCUQKY2SrxcwLSGZ+iIY0rcgO2JZxugU0EEBsFAyaxx01zSVD3FDTJKGvyZzbymaCbURb1hJ0EgVRCEH0DER6913yHNQMeZU0GQXB6o6AeOhgMCgXYCUYbhwEyNYFzNhKH8yIIwaEI+qEwyhchOqKNYW0QsFMmaSVK+FQkoKMteqAxGqQyWhQNKpwkcTjQaWLYRKNoRCFQoJ4H4aJCOqVf4QE7NaNGWh8+kKQm4QDXaaQI4Q8DIFHHt6FA+hn+EAIYkEInMB00RhD6cAI6AAMYEALROJtUemMPhaVlQXAsmiN95QIGoIRaVgIvwE8QUxBS6hn3/6ADQ6cEZMNg8sUaJVA/ddQAYNdBWJWnSsanr6QrUGCiOQJPUBCeSTcaK4qjP9qhnPEWEKAEwEYCxec28gV8X8iJgZMEmvWmnNpkIYqSr0RW1YRamKMzE3KqzuEBUGAAIPOjH+IZupRe+oAqz0IjYmRcDXADizMDDuAxAXhvvXpkhdEa61E1MJQeEiWGHdatsMFXM6B3TbEfAOKNnDUBMKMydxkY+WABGQAAjfc+RFkvZfFZRHkVxaohogWu4Wpk/jABUkIAMtgCKzBCo1ECtRUEcOYBcAQD1mMwBPAw6IEAGZB9BYEzOeQqGUACOfQjg+E7L1AvQqMEJFsWT3ge3f9EAhOIeQq7sET2HFDwKiYgBFAABS9HrDYwAhBQRwUCh6Mxkso5EIaqa07TTQjQAFCAmyYwBQwEpYHxHEJAAs1obcqidCzCH6LqFgiAhAK7szz7Y83yZofkZlgifZ70ADAwADpgA5iHXysghgRxDxYwtwfhDyewAnF7SIcYGMJTOs24H98jlwRnIYcFA0JAACxAtnjatvTlWGWkENKnEZy7F2ZhAB9VEuFxtnZ1AlX4AId1A5WLP5uqubILFO5DOvpSFDagM6xLtjjgNeATNCsAK5ererNbvFjhFC1gAgagqQ8AAQXQAZgbAl3JUgKhD15qAGsbusa7vTnRGq+7GvH/ckuYi6FTIKHr+QK71Ixsy73sSxMdgI/bMx59qKFKY0u3gqrkZonm1778uxMAiwQYoACe+Z7vZGuciQO0EYYKgAHlQQLMFLv9G8FBpABsQjY+oEm7ShIT4ANIAGsDARcZQHmaZgCKB8ESfMIacRIDMCQvYKWaqgAjMGACog8d0AIvQCQD8K3ri8I8zEaXVALvSqwmUTjq22HUZknM1cNKvMRM3MRO/MRQHMVSPMVUXMVWfMVYnMVavMVc3MVe/MVgHMZiPMZkXMZmfMZonMZqvMZs3MZu/MZwHMdyPMd0XMd2fMd4nMd6vMd83Md+/MeAHMiCPMiEXMiGfMiInMiK/7zIjNzIjvzIkBzJkjzJlFzJlnzJmJzJmrzJnNzJnvzJoBzKojzKpFzKpnzKqJzKqrzKrNzKrvzKsBzLsjzLtFzLtnzLuJzLurzLvNzLvvzLwBzMwjzMxFzMxnzMyJzMyrzMzNzMzvzM0BzN0jzN1FzN1nzN2JzN2rzN3NzN3vzN4BzO4jzO5FzO5nzO6JzO6rzO7NzO7vzO8BzP8jzP9FzP9nzP+JzP+rzP/NzP/vzPAB3QAj3QBF3QBn3QCJ3QCr3QDN3QDv3QEB3REj3RFF3RFn3RGJ3RGr3RHN3RHv3RIB3SIj3SJF3SJn3SKJ3SKr3SLN3SLv3SMB3TMj3TNP9d0zZ90zid0zq90zzd0z7900Ad1EI91ERd1EZ91Eid1Eq91Ezd1E791FAd1VI91VRd1VZ91Vid1Vq91Vzd1V791WAd1mI91mRd1mZ91mid1mq91mzd1m791nAd13I913Rd13Z913id13q913zd137914Ad2II92IRd2IZ92Iid2Iq92Izd2I792JAd2ZI92ZRd2ZZ92Zid2Zq92Zzd2Z792aAd2qI92qRd2qZ92qid2qq92qzd2q792rAd27I927Rd27Z927id27q927zd277928Ad3MI93MRd3MbNcJmL28l926d23AQxaM7tFssT3WwnZNRdBUxA3f5QATVA3SX/IAEaEN35kABEQAPRjQ8AZQXOPXUfIAPvdtxHkANLkADLHdv5IAA1wAHoc9z+kABLUAFh4Nw5VmHrfQBZkAPvbdxckAMfsGPHfd8ccD7O7Q808AGnJeDFBN3GPXUogOACngINXt+wfd/5vd/GfQQV8AE00JjAXQIcUAMaXtzjTU7Nbdw5RwQOvuFVgAIaQAXObYAoAGbHbULK5dwx5m4YXmEi/tpTlwUywAM/zgBBzuK/LQBMIAG48uDjZGoYbkxL/toHEGRQDt8gfgABPuQowAE+zt8V0AQXftxUIAHO9uBhzgBH4Nz7wOA5LuP4LWUT3uZCbuMSAONf3trsnQN3/37cXFDmhc7a983jY37iX9borO3iMD7h48QAVO7bOdbg673jT+7cXDDpzk3kMU7c/Y0CFZDoxv1PSk7nKBDqis7gCXDmxo3fHJDlxk3hFs7qxf3dXk7nYi7qN2Xmzh0DKCDh/O0FKu7rxG0EHMAEy6XlMubc+JACNbDnxY3sKRDpxb3d/7Ppve3ic77hCdAE8IXnGvABVUDpqp0PMfABfs7fDNAEte7cJcAEue7uqT3ebi7uvJ1zBA7rKZDgxe1l/8PvqA3huY7pFg7wuz3gMVDgG2XwxL3gIf7guK7rxY3iD4/vHDDwxN0Y45QD+PDhAaXwpw3vUcbxxI3iU17qHNxQ5NSO5Mct8NpO3Mg+77vOAEQQ6MVt6uud6TVe3AJ/TM4d5rK+67QO8botAGluBCpv2v6u6V0+7Rse5jlQ9C/vPzk/3PgtAWu+4ab05oJO6MK+9XieAk3w9cH96BLg7aje5jQw9aYN7Zd+3Efu7MNtU57+4OXD8wf/Zbb+6zN/6sMtTu/F9X2vc24P3Cy/9MU96uFe6vqO+MLd3+3G98JtBBSG9Fk/7Med5wFV+MS98X/e685NBS+O9TIe5ilw8rOe8qUeZXI/3PmQ4iv+4MRU7jLOAwcQA5z/EwEBACH5BAUEAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACzLAGQA9QCiAYdgYF/Y2NhtXBfo0niioqLPz89/f4BjVBSjiih1dXS6uryLi4svJgkkJCRmZmTzzkSurq5aShJZWVr6+vlOTk/HpTPApDKampxra2ze3tzXsjnm5uRIPAziujx4YxryyUISEhPi4uQ8PDytlCw9MQzQrzZPRA8zMzSYgCS3my7q6uyfgiVCQkSrjiuWlpSKdyHExMQWDgTqwj8cHBzu4pwsLCwWFhRANwwjHAQcFgSIcR2BbB2+njDZtzqzlizu7uyQeiS+vrzJqjSSkpTsx0TsxzzeujsKBgTy8vTqzmBGRkS2trSOdiTuylwmIgTmxj5KRizlwUUODgvmwjzmvjyahiQ1Lgl+Zhzbu0SyjiwSDgTGqiwOCgTiwjwGAgQCBgS2pHyCfGTAxthubBi2sCCGlCTWyPBIWhS+eHSWmLi48MwyUkxmpCTM3ti2sMyaSkh+HIBCSBSOkHjo+uTauii2mCCAWBhmekSacEhMOCCyfiwUYlAwMEgowoA+dDgyUhTU7kzspszUMIA2IjRqRBh85KRmYIDinHx+XsgGGBQeGDRwaEDKljQYKCicnDDquhCOuohuWijAzMBiVFzOprg+NCTwnDDorjhiREC2yky2nAzc1EiIfIAKDgjWyKg+YoiijDxIXEzeyMwUEGAqIMAwNCAEDBCCbnSyhEywxsiKjKAeMEQeOBy4xqReWjyclAzS9NwKMCjQwsy+bCy4zvDSnFT6+NhwRHC2pFDUeMg+LmhMSmTS3sD4skQqYtA+DkBipogKCDDo6tTYnDRcPhgODBh8puSStCSwrKDq1BSUegw+LkDoxCiCXGDyyMwwPDxwWFCepICEagjS3vR8mJx+RCiaZhzM0DSCdkSacHTMthDirlwUMGi42LzqeEgKGDR84iS2mrgEBBjS6qjIhCxcSijMwoDQzsCsoixMJhSMpIz66OTSnBCOmFxeYhjApuyyhAzaxjhedhhMLjzMtljMvjReenQKCgwCAgQGBgzivjQKCgTivjwGBgQCAgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpz4EF+/fv/u9cOHj6LHjyBDihxJsiTFfjUoYDCwAMA9jiZjypxJs6ZNhfgaENiABMmEAjM4drxJtKjRo0j/5XQBYYGLCUFsCE1KtarVqw05StlYA6pUmFjDih1bVCjHrjC+giXLtq1bimbxnZiQ1uzbu3jzFowrl67aoXoDCxbLd27dqYMTK0Za2K/dxZAjz2x8eK3ky5g/xjX8N7PnzxA3QwXxGLTp03s5XpyrQOpG1LBjo6RAwcCEABho24jNGzQ+EAY2qFDRU8WGABR6K898j8KC59CHLKixvLr169iza9/Ovbv37+DDi/8fT768+fPoJVq8JyXj6/TwReK7x8LABQj/CGBoADi+f4n9SACDCgHAUIBwELDQ338MMtRPbQCcMEMDFECABAQgNKghQ/PdQxA+IgSQgQgLbmiiQWbNoEAIFJR44oubBRCACC/WmJpWtkHgoY08CnUPABkU0CKPRM4nQQEBOGAZkSYaiSQAGy3JJIPzAYCkkqVN2aAUVhYAAF9aNlhlAV5eZKaUYaL3YAETKOAABRIAAIAE/KX53z3BGSfccARigJGd8eUEpwRxyjlnDS4CquiijDbq6KOQRirppJRul6hHJV5aqVt8gTUfCDaA8NKHnX54jw0zRKnaDKyyasOrM0j/oemmSXXaUT8iuIAkDC6w8KetU/22QAgsCvVgCHpuQOwPKhiwI61hAbvmDzAQEIQKGUgArF39ADnBP18KVcMQLkC3gIUqJAAtWbbOAEGzNvRjAwZIwBDUthydEAQEB4bro5kXARBCASesy65AxkqwQRANDGQDBBv4eyM+NrgAAwUXRJylQFIs8MMCfxrMLkf3eGwAYP1gMAEBz5I6HwYFYFCyxogNlFMBGSQnMltCSbGTAx+y8FN7E4tQLQhSuEDzkvckcOEMO0fLFwgQhCDBh10VkOGNMxBQAAv/SJGxvwvaoMAGGMwatU39NOB2DQ3ISjWLBTXw024uNx1AAhgl/710f/0osQHBa1uFTw0EwKA4BNrOfbXNWW+NsFBGEyArPlIMsYG2NYc9xMctF37U4RcEoUAQBFDQj89oBz20zT0b8MMFLIggAgUKfCwCf4DdHADYohNGsskoO7DyjnHZ4PEPzDM/wfMhDDGqQP048ENrwQtvkQQqKNCwUg9HjBFf/dRnwPkJGFDAhRgoCFj4X2Zfla0NQJyAFKfSa69SmNdQg6pr8RvZKDe478mvVp1akwqC4ILcFeBqqpFAgRAVF46NjS8dQ8IQQnbA0dmqfASQUQEIoITxWYQCBVAABR/TsQfyRUUwUEIHqYIvKbgtbp6ySAPuVRqK1eAlcWnb//9mSMQiTkRtRkyiEpfIxCY6EYGPqeBBKggm/kWxVE8cia3CNoManABusoJdp0BwgjKa8R88nI8IzMhGCSExiwv5oAgucCDmBcAFIvhVpzr2PD6pIAQuuFeAVMAsPv1gAkhwQejgqJ49DgsGC0jAAoLwgwAoYVt4QoJ+AOAADGAgj0KZgScd4AA5LcA4QGOkZqKIKxGMKidDmAAEADgUkhmAbghzGbcExjBVgsQyUlSKCF63pPncUnVR0iVgQECAH5zMlyUJZl8KQDQU4eljhFLCDF7ZwxPISIbQJEkwmyZLDsIuk8YZTgAWwLvSZJIAkgtnRPA1lDUhZ4qqcc6bALD/gAL8gADt7N0MYBAxeR5RCrdTAgVYkEbqiSAIIXAWisxyj2cpMAGlCdAPCkAdg85TBEsgVghgACWw9OMES9jAAuI5uWAK5J2XWyYBmmVOj3JIlAtA3wlMKoKzLQBvypRSt1SwBN7ZzJszsilcpIirFf10oi4dSPV+AIF7DeSallNqIx9z0pA664oW8RQtuwa6tcwAoo/TakUS2FNN/lAKoCIN/xogAVD+JkIzQNUJPCaktTxoo1BT61oxSIDnVYsAEEgsO0mWgDbdSwQ/GCkEgpCBjbpkLcx0Zk0FmxBblSwAZEKSjAIAAepsTwEr5QhwrFUAGEDAACu0mbsUUDDO/w6WfOQLmWqsuMXOWWSztg2ud94o3OIa97jITa5yl8vc5jr3udCNrnSnS93qWve62M2udrfL3e5697vgDa94x0ve8pr3vOhNr3rXy972uve98I2vfOdL3/ra9774za9+98vf/vr3vwAOsIAHTOACG/jACE6wghfM4AY7+MEQjrCEJ0zhClv4whjOsIY3zOEOe/jDIA6xiEdM4hKb+MQoTrGKV8ziFrv4xTCOsYxnTOMa2/jGOM6xjnfM4x77+MdADrKQh0zkIhv5yEhOspKXzOQmO/nJUI6ylKdM5Spb+cpYzrKWt8zlLnv5y2AOs5jHHKaO4OMIXNjHa4ib3n6QAP8IKUDAAWIAXPj24wYjKMIDiLCFHQgABSgQQH21AAQiEIEKVJiCBjTwgQe8Vwb8QIAVlLIPDgjhA/wwghH4gWhEP9oHN8DHPkjggREY4dAdyLSmjeBeUqcABTnAgQBGYAFAa2AKiOaHrs/7AYTs4wolAAICKoACDhwBBwjghwym0AMN8OO8MkBRPxiAggcYAQEkOEI/tMABBAjBBzoQwA4sgF4eHGB80xZAC3hQAR5EoB85iMAKLDACDzCAI/swwXgrgLAc6CAFJjhCDg7gg1qbIAKvnrUQWvCPHLD5uhbwQA4QhgMfVEAHPhACAjggBS2YoAQf6AEK0lsEgZCglvD/fkEXqJACDnAhByZAQAl68I8W1Bm8NEfAvS0ycARYQAgi58ABfN6CA6S32//YAQmGXmwcRAABPkBBBEyQAg20Gwjqnfg/fBABLQiEA1WvgMZNYIUIoEADVOgAe8mtAxyoJgYHqAARiiCEHXhgBXE+gAlGrl4iPKEHTMDBEbQQASCMYAUY74EFLICACODA5OiVd70jsIM46wAFFRgBEHTwghTw4wkaEDR748yBizCgBUR4wAOmMAIUtAABAojA0FOAXh8IxAKsv0E/uHCDHfiA3fwoggyi0IIbCOTx6GUAAqZAhBIIYQQ7OIAOStB8H4wAATr49wjQqwGCmKAFWdAB/+VLoPoPFIEIU+hACg6A/CecdwoCgcI/gFCCFEQgBwwQQAWmwA/Fp2DmcbZeSXBoujYFRhBnFbBpVJBpFsAEHtACm6Z25rUDBDEAHfAETTB8PdADMiADC9gBqeZeO8AADAAEWIAFA0ADSRAF/MCCHbBqErheRdAD1lcBzmYEWBAFUaCAU9CDz/Ze/Rd8adcBiNaBBkhz8tUDulaAy/YPRtB9/0AC71V/KJACRpBoHUgFSPgPDBBfMtABMmBoRABpGiB1xkdfBVgE/CdnHGBfnNaDVHBxoidfWKeEWGgE0McBX1BfFsAPYygDGtAC9vZw5MUPHTAF59cDLSAAWgdftP8nEPBHBWFYAovodfPVf7iWaDNof1xAX/ymAZs2BR5oAYx4XwmobDJwcfd2XzyAij0wAiTQifGlA12IAj3AajLwAUTgbo3oXjeAACkgAALgA0aghohIBSOwinbGATboAyVQBFEgifSFADkgBQh3flkoiVTAcPEVZyawA9QXBS/IDz0YbPGFbE9QAhYQBZCWacE3BT5gdO4lBUdAbf1XARxoBCWwacKHbe6VAkDAAfqnhvS2gWLXAUQQBQjwXh6gj8BXBB2gAUpYAjxwakYAa+0laz1QBNDnASnwit6WapJYAjvQi+XlAf/AAQzge9RXAgKgA/i4ApTHaYlGitV0Xiv/wHIpMAJV4Hn84AMfWQECAGyGxnIBp14voIYjcAAM8HFFsH4C8H8WoAHnFwW2p16l1gNEAAQM0HspUAQ+wAAxoANXqHbo54/pNQI+wAQyJ4gIwAPOFwEgIAA9AH8EcXLkdQT/0Gw7aW85sAMGKIhmx3U7wGrp1RH51wIPUAIR4AQgkH9C8AArcG85wAOv+A9F8IPn1YYjsJMaAARc4AQPaAEdwANSyAUCUHLqBY/AmAI6wAFgNwIC4HPh1gI6wAVaMIfqJQNFUAQocG9uRn7ExgFeEAOAxgE6AIXqpWlUwJjwdgAj8AEloHsmR27wRYRGIH60VnUIABg64H7rxQUo+8Bpofh3r6aSQMB3j1cC7hUBxCiJMlACxaaXXMCWOcAB3MheFVB/IUmJJsBBtigQC8le1JeZGwgEJKAF/cEA1ilf5CgEXVgQk1YE7ClfHUChE0eI6lUEFWCSZPahIBqiIjqiJFqiJnqiKAplJoB8/oUCPPBfFRABXBAB/LWV+6B8/HWRJECB/pUCj7hfp0cEfMdfLaCaKXqkSJqkSrqkTNqknrEDNCcANydfJTAFXYACN4lfykYEFsCi+hVtXOikYjqmZFqmZnqmaJqmarqmbNqmbvqmcBqncjqndFqndnqneJqnejpjBwBw/nV6OvBfvvdfEcBv/oWS3RUQACH5BAUEAP8ALL8DYwArACkAh4KChHJydHp6fC4uLJKSlPT09LKytOLi5E5OTKampIqKjJqanMLCxN7e3EJCRBYWFNbW1FJSVDY2NFpaXBoaHP7+/Nra3DIyNHZ2dJ6enI6OjJaWlFZWVB4eHMrKzM7OzG5ubGpqbLq6vNLS1L6+vEpKTA4ODDo6PBISFKurrGJiZH5+fF5eXKKipEZGRCoqLObm5La2tD4+POrq7O7u7CYmJIaGhGZmZMbGxAwEELzIxPje1NrS4LjAvGx4XJbEnCwuIHQ8bAoOCNjg4OjS5AQEGLbuzIB4hG5gcObs9Fx4aK7CxCQuKHyImDguEPLC1G5mWCImIMy+0Ly+0GJsYODs7CguDHiIhDggHHxuXMzCtDwuLMzi1DAsaCQcKGRiWN741GBSkMzatJa06HSQWNTC8HhSfNrS9AgYIMz01FSghKKmsGSgJGTggIB6cCggDBAwQGJYGOzs+LS0rM6otHSQsGRulICMhMCo7Eo8ZOrs4IiKmAYCCBgaCObk0BQQIHxkiFI8jJSIsOJ+pMJ+JLSstOTc5KysuGJY1DAmRJCKmEpyVIiUkNDYyMjI2GxkQEhGUOLeuEhATDQsLJ6moGIYgLR+hNDQ4OLg+JicsKKcsBgGIMLMNBwQHKR+4Nje0IyOeBgQCDAMQHx2nNDY3CQkMLaonFpWbLbCpHyIcLbK7FpMZOz46AwMGHh2hGZSZE5MWGyQhF5meEpWbLagwCAgwFBCRHSwhGSg4J6ikHyYhOLciHxwfEpQOOzg7L6wwAQMEDA6EOTk3Cg6OJimqN7k5Gh4dJyAiIyorEpkkGReaF5iaDw2OKy0tOyotLbUuERQUFw8ULCwzBgWNMjU3JyQfPj43NDI0CBggDw6SPT4vMza9JaoeJ6yoMIygCLAgFhmYAgIMNjk9LbcgNzCxJR+XMjUyJRumBQcHEo2RMCofDg6KBAQYJbotNjq2EpeSAIGCG5qePT89EZCOCwgKJ6oxIx+iAYGDCIiHAoKDAICDAYGBCIiJAoKBAICBAAAAAj/AP8JHEiw4L4BLFYsaBHigb+CECNKjOhvQAsLMApUMFDDn8eJIEP+88cPA4AQBGZw9PhQpMuC/vahMJEPQQMD/Fi+3CmQpUcXN3Pq5OnSpz8XB3D6JFrUJ1KlQ5mCNPpU6EepU50mtdoSq0SqW5d6hYjA6NGwUccOdDHSaVCxamH6dGAhhdW4EvddmMCCAI0RAlhw6IAXoj8KNiw0mFGhwIEGODgUhpjvhIrLmG+w4DdZ7r59/0B/3te1s+nTqFOrXs0ab8x8nwcapfgZdkuzFDuEyGAgBQAZpM2W3ncCQIoUDLnOlv1iAYQPMUhAEDEhn/CW+TiIsMAghgcLCwZc/yeIAsCBFCX4XRAwIsaJ8f4upGhgYwA/By1maEAxXuAJDx6UABpJGRwQAGlt+ZRPAAckkNNIJYzAgAy3ieUPCzSk8MBA+9zQQAuE9eQTBQvAEEBXJDYQAmgJXpWPAAUQkI9sLnhAwgmy+VSDARZINlA+GOhnQo5XoaABDSuwONIFInzAFpHxkfBBCQTtowINLaAA5UMPpIRBcCO9EMMICBDk0wkMePBkTxPQkMCGIvr0gAYzCBCcRwOMWSaUFzDgpJltvrmlPyYAUIACd/qDpgcObDmACGRWGQINC2gZZ0v7gFBABiawtA8HEBjwwpb8JHCACuQpAMMKM17aUwkNiJYwqkCFwqDAkJf6g4INMyjQ6j+PjhBBhVGVesAKFPhTEwPCesoPP8HtUwKAHMD2gAAz2NVfaAh8l4EACuBggQIOeXRBCxuIN9IDK1jggQIYLGDBBxwkCtc/+ZSQAEYHMABCB3NJSGFPHYDAwAEwWJBACdYJB9E+HUjggAwvDOmTCRJcYJ2I+bwggwMndGBvWjxdBRNPAQEAIfkEBQMA/wAs+AVZAEUAWQCH2dK0jnghoognRkZEsrK0qqqrwsLEzs7M5ubkaGhoxsbEalkV+fn1OzAKUlJUkpKUg24cpqal2trcVlZUlpaUrq6sXU0WQkJELCQHhISEtqRcMDAxEhIUYmJj7e3sinIgsZUtyr6IrqZ8ysrMemYYq44sKiosTkIMdmo0IxwESkpMmpqcdnZ1JiYk4uLknp6cbm5sTk5M1tbUl34fNjY0Xl5cjo6M2s6UQDgUurq8FhYUcnJ0tra0WlpcPj48VkoQIiIkamI8oqKkOjo8ioqM0sqcvr680tLU3t7cenp8Hh4ccmIoHBcEVk40SjoMup4sGhocFhAGgn5s8vL0fn584t7M2NbIRkI0XlpUfn6EDgoEBgIElnhcTDxkloSwmq7kbFBowKDodjxsDAQQEmA4zqw0aHZEwHgkVKCE5ngk1srQdrCEZqAkflaA4nikmua04vjknISISlpUPFhA4tz4urjQOjoo9MC0EDBAPjpIHBwUcDwYDAwYKDo4oJy8Tl4YnKCQtsa8zs7wGAYggoiYVDyMHBYcMCYogoyEMgxA7Mg0XGZABAQYGBY01NqEMixotqjEPEgwtpiwCg4IZhiAbnxsyqCw7urYzrrMeIiMXDxQyrqsjpi4zt70NHA4fohwsOQ0jqqopnjgLFBAfnKc+urodpCw4szgoK6UbJCENFAUYnZslmyYJjAonog8dpBYfmp0UHZYCAgwZljUsro0mqJ4IiDASlQ46qC0IsCAuKYw4tjk9OiMXGSU9PDAMiZEfpiEVFp0hHiAbGJ4FBwcjn6IcHQYTGZMOEhIImDQ1t7I7ProxDCApIIMbFxg9NjYSk5g2vK41PLkmq60tHiETDZEdnx4bnJgEBBgGh4glKw07uTwtr7suqikZqDgZuCAAgYIImCI3rrAuJ4MmsCYqGokttjQbk4Y1LrwVnQYjoqYtsqEBAwQvPLUNFBkyvKIjop4CBggtriopp4kcGZgsKiYyN7UDg4MBgYECgoMBgYMDg4EAgIECgoEAgIMAAAACP8A/wkcSJAgv3w6Wmz4BwQfv4IQI0qcSLEiQR01KFTI8S+DEn4PLYocSZLgBiEyDkhgQMAEyJAlY8osCMXBhAtJkBRo8XKmT5kgHfJTcWTnS5g/k1I8yi+GjAo8eyqdKpGpU6hHqWotaPVpVJBbw/7rilWqWKpkvyI96xMI06Zes7JNOmHs0atq5yY1Yfcl0QJuzer9ye+eDh0dZPDwcRjf4Kn3elBYYWSKiwIrbCx8nFQHCwMHQh8YcSTHBc5J8QG5MGDABdYqLnBATbu27du4c+umfWI30IECAqRY63vkAhA/HM/lx8HEkBgTNjicyA9IDBUfmQ7EEDyF6gHXN+T/64lvw4QaPdL3mLBeCXUoOwgYkIGEiA7BA6EQkXAgwXi5/+BjgQBLdPCCAiP8QwAM9z3EAQxGjDbChDK4IENdVZnwgBEFKMCAEFDg908+CRjggQdJ/CdYCgGEYMUIKyTxwAgy7HDPQ/gMkcAOMMCwYw4MHHDaRPdsQAMQO7jwQoiC8XNBAS8Q4MIOKoJFUBAAIMGCEgj1MOEAD/GDTz5k4oMPFBR4QMRsVR01ARJLAviPEg9UEEMGWlaJFD8dIADAFWHeYwOK+UTEjwkEIDEBcQMx1YOSTFo54g5GJMABCy6woCdB+mQxBQBLaPFSApBGhE8CElTAF3U9PRqnVPio/5DDCkrgkwQCKWo3kA4reHCDAA1sAZIKB/CwGU0vILCDcm2C5WqkYZrwQg4DmJkBrpsOpEQEEogAAglMgDSEAQYMARE/AyDow0RQ9PXQs1lxkIQB/gVIxZTZCgREAUdIAYEAJzg0hBHlQnRPBi48oMNEHdzYKqRH4eOAERS4F2ASElCpq0DbSpAADjMEEK4PChhBA0Qb8CDBBMw2CxK8L3EwaA5UsMBCBkZ4kEMGMTiMFAdp7hDFAiUETFRLBeHTgQRIV+QoxDFTcYQMVMsggQcMeCADEdkhhQ8LHiiMwQwQpACDBxQsTBAQQmRaqNNuIrFCg0GZ4MDdd9dAgAcV1P8g3UvTjRXDEUb40M+AKBTgQQItDzWCAhcweq4OG5hABQIEqLBBC+MJJBRI+SCchM+x7sCTQFA8IPcFTWhQhAsVbPDztRS0K1I+MPBAwBEMIGAEDxRc4Nhb+RDhARUq8upBDSHxQ8MLRxiQQxUMWIFFy/+Mi0QN2E9EIgEVFPBPARUQ8IAPwzOFTw9CsPzSPTBUAOZA+JiwwwsRFBCCBjistcEDGWgBSfihAxO0oAVAAEILTMClMGWlMErwGUgW1rlG3UMJChzQAvpRkHwAAQrdK85AogCBEjRAhD6BAApXyMIWutAnDRDAB8I1wDCxyixyapQNx0KRfhzHAiHkimr/YpAAGHRgCG/T4VtUM4Ee9cAEjWtBDDoAg38kADuNOwp3BIABi+BjANCTwdSMsAO1uStiAxDCAY7Axgo44G3MoYJo1jgCIcQgiepbwBM2uJQhEEACEUhAD5JghCPAAI5v4YcfJbCCGtTgARIwQLXGco8aZCAB/3BAAl6ABB4MoXlMScEMuCg5e3mgAH/LRwxKdjKBvOVgCFhBYHRABATYYGGgy4dy8NGCCEzpHq5kyhZ+AIIFaIFdQvCApmLWqw5IrgVS6gFMBpcD2dmlg4PKADCD2RcmBABYEwFCBVzQgc/lwwYMIMI2gwmSCyhAAZ8cSMoOwDKC4OMeHICCrI7Q/wNDBeUEIKCIDpJJBZ/p4AUMWJJBgtIDGbQEJkrg5CHpN4AM2OAFoEnCfc71kih8oAQT+ZrOBnCPfEChBiNgAGAWKqYEICECQLjIA1yQooGQCDQSuEw8OfoSJ/xDhYbaQASQkAMiyCgHSFBpTJV4qpe6ZVczHV2jgDAAFfSACAQIIHGyEgUSUCRHNlBASrIqhFjaLmk1iEt+VoAEKrGUH0qwgQSkyhW5CGApOvDBBCYwBCVcTpuGGsAITGYlE1SAn8x6ywQksNK6miWgcCMgJ6UpkZRJIAZWGsABjLDTRDogVQJ0LEy40yy7kKgooQVd4HQwqDUJJB+Xq91YzMSUg/95QKGiHYgWFiARHcSgBwPwwZ0OoAAHTIcfG8hAD/4TKwVIIAOtScIRDhCgANHARyrwwQUmQASqcY+diQQJaSHSAhuERgGhqcAE/jOWBHhACF3LRw2MkJIRRDIBNwrQAP5IXBo5tGGgDG9QJJIPH+yACDbIQA2g2BPnEWG5R8nHEA5MBBZcgL2FGcAOLJrgDojHSgIWkUHycY+Sfg4m9zxjUEhcpWCO6R8mBlCIX0jjGtv4xjjO8WDERCZdSk47Zupx4JQoYCKLmCu+TQIRlkwFB4RItGLaQAIysORL0gCPIQZxIr34pCMM1gBefsAQEssUKCQBvQYwAo1yYCnwFln/xZKiTgvScwEaDGECvqyYm0FyjwHUIAZDoMEApCsDzO75LYcu5VtfEoMDVHPPKoaxDaaQhMQu2shHviHJTOZmI48oAwyotMsYlcOv6kCBPoCknt210HwoQSE1MMBlSV1q8I4Eriwgn3MjgL4zjlgFGuFBSnbAJp7G+a239msFeDCCA2TgqRsbiypXQIBCEuCOo3YZSc5kgg3E4AE5ILavdXgPIGxARxUQQq+hfENF+xMfPuCBAlRw4j3VdgdHuKWxS1nrpfCZCEiQqpy0ww8fSO9YnW53TGIWVWDqCtHoGsGjL61wp3FAB+nLxwRIU8+DQMFnJvWxmIBgAwTIluJQb452BxNAARbUYAIdyIACkGCDjwQI2JYSV4IF2QMYvEAGCohBFkWcZe/BAL1rpJoRksDgSSkpO0OIwBrX6GUhOCC/nt73lg0Fhe12IAEJiI7PAmSCGlwYdN6uAdhrMICGxFnlie43N+EWWY4C5SEBAQAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUEAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMA/wAs2ABkAOgAcAGHc2EalX4kyMjHvr6+Xl5fvqEwrq6v9NyI+fn3aFcUMigKdXV2bGxsjIyMQTYMXU4Uo4knhoaEhW8e2bc6VEYRTT8NQEBA0tLUpKSk5ubkSEhI1MaE8sxD3t7c4uLkgoKEkpKU8sZC6ursuJktEhIU7+/uMDAw1rI5Ojo80K82rJArx6UzsZcsKCMFJBwEGRUEfn58zs7M4Lo7FhYU4r48tra0UFBRVlZU7MVEinYfFg4EmpqcZmZkn4IkfGgb9uq0lpaUnp6d6tJsJiYkNS4MNjY03tiwj3Ugyas068Q85MZUKiosjn5E2trcemIcIiIk1tbUGhocspEp5sM8Dg4LHh4cCgYERj4M5r48oo5E2rYsEg4E4r5EDgoEBgIEgByAYmYY4tTUYGKAGiooBAwQYqaIvnAsgEYotu7QmnB06qbQuuxQ6nxIIBg0zNqwFjJoCg4I2NDcjphczMAQcm4YHBwUel5g+ujkYEwoFhBgenZEtrAgQjI4tp4MhpQkQmSIckZwQg5A0p5YQnY48MLQfKbkBhgUFhwcypo0fo6YUDo82uT0vKIgYD4YmkpIOiQ8UDQUzOLUYFxQrqIsyrzMYHpgBAQYmJh8zLRYvMKosogMjp64tsrs4qKElHoMNlYUNDJIDgwYvMjErsK8yqa4oIqo1DKAYGI82MLMTGJM3tT0ZkZAChg0nJQM0sLwjrqIdHpgzPTQyIgs8J400p4Qvnx0QjBofOIkFmRQRlZYzMxELCLAfpi0NlZMTF4UtqZQvqbo6rhAbHp8CjIobkYYnp681ux0gmwI9vrUKMKAzLo0tJy4gF7IIDwc5vrkfqKIUkhY9tBcLGTQmmYcUCYoAgYI0sKs2vbkkrQkttS82NTI6NBQZqQkrq7EsohM2J40tsgwJjAo1HzIkIq4YngYeHaI6sAQfOSk8tAsloww5r4ozK4o6ujUIDJE6K44nJwwtraAzNr0Cggw5uwksoIs9NTg3NBEUlRoTFA0CgoMAgIEBgYECgoEAgIMBgYMAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qcKFDfv335LlqkyLGjx48gQ4ocaVBfPhQEFjSIwADFvo0kY8qcSbNmzH03YmTw4CGDCAEMqNgcSrSoUZv6SCwYEOGGBRsNRGQgAPOo1atYsx7cN2OGvq8mF5QwUEWr2bNoi4LdpyHDBRNp48qd23EtAREDhtDdy7dvya8zgpT4kNGv4cNpv1JZ8BMu4seQr+Yj0OSCjaqRM2seaZJyjBv7Nose7VEfFR4dLIcmzbp1Q9M8msTQ8NW17dsETTO4MBvsP8y4g2vG2aFEEAsmiqCwUESo8OeR9c0AUaIEFAEXmmivgQK6d8Qmof7/a9AAhHkQC/R+X8++vfv38OPLn0+/vv37+PPr38+/f12M+VCRj28MYZQQWPpgRIWAL/n33j4mMNAABgP84xVwBSVFwIQG7LCACautFQUBQBhgQBDpOdiePlXsUIJPCDSxRG0HPrFDBh0IMAAU/wxgQ0UJomBAB9dhJwBVKq6n4QI33AXFEzQilA8MCAygwRNPWNBAlST8po8JQ0aAQhVVmGDBE0myl+BXUTTxZJQHkWDAVAPpM4QHT/42ZQYwvIQghmkGB1YUeEIJqEBUzElAnUvkGMVvVQzw1j4kPDGDn4EqyWahcBq0DwP/GKDBP1WgEEETfV6kARQ12MAABjUE/8GACYdm2hqChL5Z6z9UfJBBCQII4EEJDTj3qQg5xjDAjiUMYMGutm72Z66GKpSgBQbw+M8FF0SA5j/5iFWlBi+ZgAECBhQW7W0I/kNtpwXtY8MABliwzz5LNBDqDOCKNRWCRXjQRBHrsjstpwRmGMUAHvw4UKQNJ8iDCE2ggOAMikJbMHgHQ1FFuyVZgOcSBFEBhAgL3GuBdkUgKOe/G986LZEfJ5ybyB10V9EMGIjAAJs1eHADgkPE0IEFMbu21j5DuLnEvTQy/dtvTwwgAgzOUWFDBk0gbRIMIgSBJlcLcJm0axDCoFIJIjSwwIcbFSGAAV32K0Kob+/QRAY7CP/11RIG4PV24BcsenZr+UwswuI+UZyyQG0NUBaiBFR4dwYxLDA5WCaAAEUGOxlgQ4OHk6bPEwQQcMPqTRKgHq82kJvbExqsroEJ6iJIBQo2OFWzxqWPBnzwxBdv/PHIJ6/88sw3bxVwmA3vvFYJUkFCVyTkE9qfUSbY1QxRhF+FV0BmFEVX4pPAPcjTUz/DAgYI4KYAO2gw4PoCfVpcB6CDbgC/X8lHA9jWE9B1oAF+wl/7qFeEHRkAAxQCHQzux71+IQAK5CkPEBiQEcWciynkMQ9V1ie9BcpkH0+IAoNIcIMmHI2EdlvAV+51Ly+ZZgcRoyHUYGjCuSQKZkACS7j/MvAzAukuCB2gDbxsuMQePm+GT6hBBy5TJwSFqwRAMJMJLvUnKgQhAx+ywBKoQDomltCJN1mCBTRAAAx4AAOPqqIQy/YTKMTAAAswlGKCgAAPCOA6GCCA+oLYRDQWhQQNuMCwSoCBGeUGQftoIxBgEAEMFAcD1QqXARoAgwYEbjCF+ZMhrwIWKpAoCAYYAAhalqGLRWF7+biBAEoQgdro43wWSQpjhGbGUT6Re5OJgQB0RkgMyQsBF1Af/vIRAQRgoIK+NAoJvzaY1fSyJEO44BCmeUwBiDKaaIkkI51zzUcuwTp65F43vwlOaWKmV8QKpShzuZHJIEAAylwfIhEQ/wR2tnMo0jGBgPKRjxnYQAAdGKFJUECrAC5BhQSdQU6utr0oPCF7BI0CY5pwGZv90yb5YACwMAACDMzygPnUADIfpQ8UCMsAQNjBLDOAAX5dhAAxuAAGgICBGMSoT+z7KFLMJT8PdCAGGLABFRAksgEAcAYfkFQHUmMAQdapc9gxKv3sF1ShIoUKFl3CEKqwVCbiC0oVAesTxHpRaw6EClV4whDGWtauevWueH3IGfPK17769a+ADezz6jQ1hmAIThX0qGBFQoIhmOCxQ7jUQvZRBII55rEmeML2/sS0Imi2kIudyJf0JoLqeMAANyBnSYpQnLv9o7QIQJdeOKuBJv/cs2V7DS1C2IIBA3zgbW6kKEJMQwAG8IAH/0gJFKppxicE7p600q1IAhgieZXgAnE8yDxREIMmjKqKVICBABpwnehKNySIjcIFoPC6v0BSLDWN10EjYAEBxMC850UvDfNhgZ/YVLt/whidZjeAGoyVN47JL0hMowEGMAAE2fnZgSBpg6PiNykN4CiLoPAWBaN3CUGQyouwZq0j0rKMkbxAAwY0BA4n2MN14e9xI/BA2kw4ly32rm++VOCaDQHBMEbvDAlwgbw05Ir/cxl5QXOvogkARLkNcjEFSES3IiQKAvgXwKDQtrfBwEVh4wHJpCzaP10xAlb2lErv+ycTCGD/cYwrQWxLEAPDkVmva8LVnBiQ5iCajJYUVEztWHeDBewEBhbI7p0bso+nDOF8USjClgTQ3kf+DQpd6yqCniDMMS/6IVQAgQiKnMomAGtoww1g2Zyqac6l5sWfLpAFaGxfAWAgPYq1YXgHQIAEthIsQ8BAECod64UgNtePxAiyd1zDYjvbNlEu7LOnTe1qW/va2M62trfN7W57+9vgDre4x03ucpv73OhOt7rXze52u/vd8I63vOdN73rb+974zre+983vfvv73wAPuMAHTvCCG/zgCE+4whfO8IY7/OEQj7jEJ07xilv84hjPuMY3zvGOe/zjIA+5yEdO8pKb/OQo/0+5ylfO8pa7/OUwj7nMZ07zmtv85jjPuc53zvOe+/znQA+60IdO9KIb/ehIT7rSl870pjv96VCPutSnTvWqW/3qWM+61rfO9a57/etgD7vYx072spv97GhPu9rXzva2u/3tcI87xYlQEA4Y3AE+YAEN/kEDGez93y5wAQBYUIAA/OMEU6CB4vvtgxQcAQIrCMAVrOCCHsggCVg4wQn23QLDywACDhhIBSBwAiwUwAk+KAC+W5CAfxRgBQWgwEAgUAAVBKAARyBjBfBN+H9U4AEjCEDr/6GCBLyAH05QQQvsvQKBIOEfDujCPyiABLvH6wEFuIK03/0CvhNEB6P/x/8EDuIFB7AAAF14gQTQPYHxDyQ0D6D98weiAmvxwwEFkMEIWJCCJKA7/gEQevH3DwHwALvXEC9QAT6gAlPAASrwAAkQfOhWAQVwAiuABBBAAQrwAIbnEClQAJCXAhWgDy/Qgeb2AgmABDjwDysgAQDQAxFBAS7wAsC3e11wBOdGAQHAAgEgASwwAQUAgqLVAhAAAF7wDz4wbiMAAeu3AipwBBJggh1RgipQARTAAuSWBBzAAVNgeyoAAVOoABWQAAWQBLA3BeEWelLwejSAA0mAhqHXEYQ3AhPAASzwADiYbTiQAvPXeimAAyfAAl8oECNQfB4BAA+AdykAAP+gACP/gG1SIAEP0HhbOBBJQAMsMHyc8QI9IAFe0AKPeG0P4AItAAArkHg28QL7wA+dpwJEUAEjsH7WBgFI4HdoSBQAEIUpwAFIMAIc4H/YhgN/VxQqoAIrYIYBIAMpQHDlpwIS8AKPN3D88A8lGAAvAAD1F3ApEAC5WAApIAG0J3A0MAW1t4sp0H+h+G+0N4oPsAIscATDCHBIAABUUHkSgH8ENwEQEABOCAAF8AAClwJT8IEpoIwyIIUAl4kAcAJtiAUDNwUTcAQJsAIUuYwPiQUygAVIYHsD53c0MAEpoHgjoFr+tgFcoAW1NwE4MAFJKHAH0HcTQANYgAUWGXAIcAAz/9l3KZAFAhcEF2AESoB5MvAPG2AEBMcENHCJNCAEP0Bw6pcESSADE6AEBieQE9B8BpcDUyADqjc9B2gf5UcDIbCNzTOQ/4ADK1gf+uACLMCF01MAKjmU9rEPD4AFU+CQzFMAP3iVtaiWLnB5WICVzEMDH3h59vECKRACack8SCCTbViT80EFPXCJzqN4U0COCUCS8EGXfnd4y/OHbzh9VmAf+tACE5AEIHlwKkkDm1dwLyADODCQXylwr4kDNGlwL7Cactkf2QiQxfMCJyCMu7kfhXgChleKrlc6iAmItOh+/LF/2FgAEqCJh7OHEjAC8agfuxh5DtBnMYOahMeV/f/Bi935m/2HBRMQAL7oH2B4PDIQAjRZAbLIHwHQgS5gPLaZAhxYcKuJiSqQjgF3ebApAz6gAAQ3jjIwBSfgAFswcBBgm35XAL8Hmf4WnIrHlQGwjwOHA1LpjSdwBC9wnwInlSOQAiuwe3QXcBgpAyeQni9QghTKbxPAf6gZAPIJoP6GBBLgjUFYe74pcCsAARBwjgAwjQGZAsY4Bdb4Dw0qcAVAmN+Yfi0ZcAwIjhDwABKwAlP6byswARv4kZlopAAHkg4AABywAhTgnfoGhkE6AkmQA9MYbexWAUgwASpQeFuwDwYqcFMgBWXKAhVQAdkYcDgAAPvgAEhQAIU4cCvdAAA6kACAKAEi6qRH4IsQMKkDlwIQwAIQ0H0XIXAc8IxX6AD6oAOyJ3Ckan4+QAT82J4Al6qNqZfU+W8UoH6xGQAtIKfsxoMrcAIjEIcDhwUQMIYsIHu6um4QoAD6oAAsIAH8UEb/JgFWsKxwCQEFeqznto/80AUAUIchQAPJCnAZ+gJEgASYl5Q0IAHYWm45cKkVkAQc6nccMAJqam8+kKwV0IZ9NwU4wAL1Wm9HUAAAQIbjyK8nAADrWm4cMKN3mqDfiKn9JpSx6APT6an/hpHjOAGMeHBvuKX0ERAAIfkEBQMA/wAsvwNZAH0CYgCHqqqsPj48Hh4caGho8/LxIiIk1tbUWlpcEhIUKiosOjo8vKhQYmJk0cacyr6QiIiJppZUjHQadHR0mpqcysrMj4+PJiYkTk5MoqKkKSIKMjI0bm5sgmoUrq6sVEkclpaUzs7Mvr68Li4spqakrZIralgYGhocNjY0SkpM7u7ssrK0Xl5cnp6cnYIk5ubkenp8kopsioJcUlJUfn58YVEUgoKEQjYIRkZEtra06urr/f385uLErqqUdmIcFhYUurq89vb0VlZUQkJE4uLk2trcwsLExsbEln4Ufm4s3tKc0tLU3t7cdmpM/Pzk2tbAoo5M5t7MtrSkCgYEXlZEjop8PjokHhYEdnJcFhIE2trUDgoEoLzoGCgognxoEmI4MCJICjAcUlxUoHKgoNCcPEpIEjBoXGpgwszEiFyIFhoIgkJ0cGRobsIkrn7kpKCQwtDwwKgU0s7QBAQYmKqgVEJsJmKIQkowWjwcvsTEJmLQQkpkOC5sXGB8uvC03NDYxLjsgIyIGgYgoKx4UlhAiJSwVDxMOFIUrII0mpKgwLTIZmpYzsbgZlZgkIiQUmhUCAgwdGiAWqSEuKTEqJyoEhBgMDwwanpw+ODk8OKIXkKUuKTobqTgnrQ0dJSYzNrIHCgMOCwwmoaQiJyIBAwQLlJMWHpgxH4o6PrkwMQ0xMS8zNbg1tDQEhocRCY81rjctug0doB4oIy4cBqAEjBEiHag6H4ojnx4tMjszqKA5H6obuKk1ND07Mw0gLSEgniAoListMKswtjMMDRMDAwYun6gJiDAJMKAzqhIVEpApnAosp6oiJCQXngcyLjI6OrQcFzYQlxMHDgcSDg8vqSoAgYIBgIIZGqcRDhQ8M7UeGhoiG54KDowCBgg3NbgoKTEOA5A8K60ZkJY0qTM2sbQYmZE6Ob4mpKAGhY0poyQ1NDgwsjYxtaAtNS4dFhwUlhotLzMCg4IcHpINiIsOHQ4LjgQyjKARlBIDg4MDg4ECgoMAgIMBgYMBgYEAgIECgoEAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qcSLGixYsYM2rcyLGjx48gQ1rsd6+ABgUnLNzrJ7Kly5cwY8qcSbOmzZs4c+pk2E/ECwwqQoQAIEEEv51IkypdyrSp06dQo4K8J8FIkRETRoAgwkIDS6lgw4odS7as2bM6711gIMSCiQJBcLiogQCt3bt48+rdy/dmv3z8WPbrx2/AEhwi+ipezLix48d2B/e7oCTECciYM2vezLlzxcH3XgzBUMCz6dOoU6tGS1hICCIMjq6eTbu27dsg+2lgQaSCANzAgwsfHrznBAMTRHx1SlKIhAojWMwQko+49evYsy80ruSD0aj3BlD/IALCiJIhRl6Y0M6+vXvb/DRM6K5cavgPA26cEDIDhJIB1b0n4IAEOhbffA9YMJhU/JggQD6SmTDDECNYUOCFGGZYlnFLABDAPSCCKBtzXw3GD2VFKKDhiiy2iNQ9M+QAxA8PzFDDAw9IUBpYJjJwWGIuBinkkCH5MAMFIBQRQhFGGEHBCJfx2E8BGCwxwz1EZqnllhLxU4AQN4R5AwpkKoAljwi8cNgJy3Hp5ptw5hWeARTIMGKceOap51PhKVFEEAHuKeighL50AUEIbKCEEYAW6uijkHJ0zwYGGHEBhJFmqummC00Kwg8oBNYmp6SWmt2oIOXDABEuYCADmGXW/2XqrLTaRoIHd+Zmwgc6ADEECP4poUQHQtRq7LF89eODAhcE8Y8CPqD6TwYtRJCBZPkoIIMCgf6VQAAoyHBBAAKI6lC2A/zDwADsshvEb8jGKy9Z/ATwwA9NGoHDAGcWJAUNJNAgBWEX4EAEBgmUGAALIeQbAgsHICDtQSYKxM/FA00878Yc68TPDSoYgcEMEtTwAQOyGkRtC9cmMAERBFhWohAffEDyDCwoAcIKuXbs889m9ZMABkYMUMA9+dxjgsQI8QNwCRm8oMIISvzg1Vf5CGAC0vcIMCFpQIcttlj5SEBEDStdLFlC/VgRAQkwdFDDAT9YvaBBgwlBgdVj9//tN1MmVLmCCTcwEMQJTCM0mAcLOIHBSXVfTRBhFydqAAbw/q355jWJYPAHFfyA5A8zJKx4P1U0sMMV+GgQgt1t+nDBABtUYFUQPXOu++4d9RNACAQYMIIEDMxQxBIPrEfxPgM4kQQSWJzwuuQDaQCAAUTkYMALPvDu/fcb9SNEETqoIERg+RzgH+4UBwBAFE8cYUMAkd8tkOy0V6DCBwrkDv7/AFRIPxQQAh284CiD2VUOHpAygpggdEGwQQQ4MIW6KQdVg/HBAIqAAQsF8IMgpFgCVAAEBkjmLy/IAQuUVxC9GaACVHCAA6KwhCXgx3QZixAGXHCAEPrQh/0InA7/NnDCe9QgBxXonkEC0AElGEAJWchBE3o1BFC1qYi8StcPtwhAFObgA9EaTAI6sIQNBCowAkGAEGQgriDEoAFNMMAMbhDGE0pGA3JxFhf3CL4TqECOCvhWBYaAAzb9o14bqI8d+5EBCMTMkP/wQbgCkAALaCAIGKCQB/nISd3xIwg/MEARQkaEHzTqH/eogAskIBs7+oMJUKBAAL5SgAdYRQUd+IETR3C+TvqSc/kIQA0wAAAM1CAAgcrHAABgpxxK5gQ8aIAHsHaDYQIAACO4of9+yc2fkUQABRDASpbTDwTsaHKSkQLjeqAFgRAGAW+xQAF8gKlu2pOTWHjbPffJ/8+BkKCfAA2oQAdK0IIa9KAITahCF8pQsZwQnWujWETduSA7SnQw/ihBQzfqokVK5h4BYMAGBvChK3qUHwKY3QYOYJRFDiQfN5DAAATASI7aNEMeHYwAXmAEA4CgUi8oQIlOKoQJBEsJKjjAOB/KDyH8AAhLONRNp0qgnJJETUWQQBAk8JoX1MWqCRgBVxhwgAfQ6QKiOmEtjZCDIeiRqnBtj1V/pwRAESYIfhKCVfOxASKMQAOB8cEDhvABE9hxUiN4QAiGIIO4OlY7FrWYBFwAxoGYYAJDeMFS7WcBAMCmlQQEQbEkww8ZqGAGJ2ABYx/LWutEFpUfAAIRB8KPDf+4gAU0neg/bgACmQ3EBywggAklIwKgQAuzjW2tcoHzWoGkYAVtOsASOqAg3R5gCAAQ6ksrMETJpCkEAALuapdL3toQBjCASeAIXBCENgWBCIh56EAY0Cqa0vYBOpCAiQ4QggqUxgeZTG55B5waExxgAwhmQAIatMP2EkQG8L2g/dRV3+XwA7/61c0IPHQU4EZ1IiSxpAhMgECXFoQfPhCBBhKwkn94lMAwfonvqObEHxwgH8DNwQHcuwQVJMy6FNKuQPLBXSLCSAkzeFA+wjoEBtxjmwfhhwLuZRUMKDWnGbsHJpn0gwr0D8sxDvNHlIUCBpj5Hxbgxz1iOwByDsD/BSMQqm5RUDWvDAQBEwBCuizQgRT8AHQV4A0BunwDKE8uAAb7QVaEJYHNyrdPRADABwxGLDCL+dIt4ccLUlCBlCFAlWh7rQhUoIQDtNJzREDBPwRQgyUVQUls1UEpTe2QBw6hAhrwgQBWQAEj6PXFviuCATYgAB8k4AEumIBhX4zpZmNkooO5Qa/16mIhGAEEocIWGkkyAxvmtmxLAJuXTqCAcp9ABj9wAWqj1RDx+UkBX/GBKmfQrSLOgLKy8p2TUDBU3Tr73xHJqWA7dAAhHAAAyFvPYERAvDoq4AdEeAAKnLMVU5sYwON1CD8YAIQRpGzjfj2nHQWwwzb/dpAS/6j3hAHO8nbnVAQVEJZ5QIDrr/BjBQbAwdX+EoQ/gmA8RpBAHScaOPZCJB8zAEIFRiQ+q6iIopIZoQEEfMgNpOAD/ZJvy7e+HasK4AA3mlu5MqaBFwxg6PEZAI4kcANHtykfK6hBAiDy6RwckOw4wDZEdRMCENxgcitIgceduXKuG37vET0v0syVsXxAKPH8SNrjm+vipBm6ID74wCrT2hNSSxXq/ThBESjw94wdQPBKdLHWD8/6s5SzAjmYAec18AO9E951fgc8AQYPeo21/vdQIUkNgPCPyeu7CAEgvND++NbiS+DqWfc38KcPlgEAAQMSM9EKIr3JkWdSi/f7wP8QUk546pufR0LwU5QiOUh6Kz80yU79CZx0g34X/vz4V8qukq0BBAhgAEpAeh8VANwiGb9DBBIgAAgAc8m2bCaWfxCoFL6DA0MQAhNwPUqwAUv1cFDyFZDWARPwA4fxa8wWgf9mIpG3bSfleIyHeKTFgmhUEJQDg75HW1PWdyITBJt1AjgAAHbmYvcgA0TTWw/wZSVogs3GDxawVQ9QAcSWU7uGI3NjWC7IDwlwAC+AIw+gYHdSLyWjhVzYbgggYiSmeiYiT0xHGCagYhaQfUeIhJgmAGblAimgA0YwS4t0Ah8gczpTAThkhlEXc4sibAYAAKEyEALwAURAAa/mRB3/cEpwGIkXYQJqtwEPsAQpskiJeDBBEAArQEYPkHqLpAE3cgAEGAQs4AId8INBmB/lhgKX+AOzJIm0SBFqBiEEVARscjf9EAQ51z+HdANFoASlB4iVtzUndAI/MAQA4kwDYQFi1Yy1OI0ToQCv9nQvVQMu8AAt9g8I8AEEwEps81B4lgJokxAmwBtmRI3sCBHWmCJQVwAjsASxYXPP12ku+FACMALjFyhAWAAWcAIS8Cmz2I4GuRDvqCInRHsgcAF3A3IjkDmAaFH8cACVUZAWswIA0AGvYQQr4I8HGZJ2lJCAaI1GUH82dwCFuEkTeUL1ogII2C8WwwAqoALDOALw/xaSOtmSJGmG0iOAKVmI59SSJnICGGAADyBkGYMACfAtEtABFVAfAuRS/CACQbACMiACIEmU5RQAB7ACKGBfO8lai9STA+E5SmAnC1IYQ7BCp0MYqaUEfuh7yvIARHAlU7lICMAAOPBESrAvKQNWNWAePsUCN7CVYzlVI3mNBSEAqrUB5pIPNUAA3Mg2UoYBcvmHChEE4SaRMuhRqiIsFfAcFJCBAQKFiogDM/ACI2AAKpB8iblcBGQZBZEPKdRpCxI4BGByLmYQ/BCXSVmD7rQCo+GZ+agbMLkBCJAPCLACf7lzi3Rzh/Eh95AAg4SPselYhDFASsImnIcCSXIpf/+xAuWRk8WngKSVWi+UW4l3D8spKlaoWnTxlhGlTMWJiKqVcpHVD45ZRq0UAE5SLNkJV0KzAitwiUPwACvAANWlLDVQShKAhUYQk18hBCygnINhAR+QAndJJhfARt+RD7BYAwMQBAcgAZ7VAQHgP6/1aatkYRJAABjggG2iAL2GkemYAxswoHBVGCCwBC6gAzrgAkugBDwjGRZgPDpTGRLggOqSAxFJGDdgBENaHsCiMzPQPfyAAgAgLMJCJx/QS/S5HJfVZOQUBCmgAnI2YSjgAjigIHeGXw/Ao1RVlbSDYCM1UhI2GAjgiQNwAGYSURYwACjweCawVeyCpwhGHUD/qAEysC5/KgRlOI4TRXJEsGMEgQI58AM/NmEyAAQdQIUCYUQ6UAHCSacbdX+fiU48capso0ZsJAM3QFOWiqkDoamcunpBAAQAIKqoVAOl6qqoyo6+80cGYADglTWC0yYXkAM40KltIgME0AFiiUpyOqyoSmYHYKIyoCCZ5wK86WIrQABquno3sAQ/IJXsBwQzgK3ZejHwyhJGlAM1EChGxGluaD8iUHsO+YxktALumq3yVVqfEiU9gQN1xXklggDIVgFnUpFE4GMBS6eo2SpCkAA0U1+qVwCGs1TWZgASoAEisAIQB5kTy6M5JWVi1VsUIDxi+g/kuYofxQCvoSQ6OlMDxnmyY5myDJcVE1AUMVgvFZBkRXQDD4ABI1ABB+CrOpudwIZjxfZ4GYNiCKCw/ABPxRaDTTuWAQEAIfkEBQQAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAP8ALN8AZADhAKcBh5J7I4VyHzs7O97e3JqanBISE2pqauTk5Ht7fHZ2dL+/wF5eXIBrHOrq7GhWFNDQ0fLJRFhYWNm3O4+Pjl9QFKCgoDcsC8jIx4KChPXPQ0Y7DFhIE5+FJU1NTamNKsemMyoqLK+vr9CvN9eyOK2TLE9DDSsiBfPz9LOWLLydLra2tC4uLCMdBP39/BsbHHFeGbSaLHhkGyMjJOrCPhkVBMCjMXJydO7u7EJCRGhaFhYOBMmrNA8NCWJiZDIyNBYWFNbW1JaWlEZGRItxIYaGhO3HRIqKjJaCJMKqNKampOK6PC4mB966PAoKC04+DOW/RNra3AoGBCYiBOzGPJp+JOa+PObGPz42DObCPHpqHN6+PAYCBOLCPB4YNAowKHwagFp2cLKYtGxohN7U9IKSJMh4LNLC8HBUULyk6NKcVOKcfFxeGOp4SMz0zIyUXEhaWMzQNAQMEAIGCJB2DOLCgMqktLTKTObILBQwaEo2PHqi5Mza9BRgUD5giHrigJ6KPD4MQNjQ2D5wOOji+JSUuHxayLTuzAQEGN7UwNIwgD4saEJIFJ6QJMq8wOiuOIJUGCIsKPzw8LCETIBqdMzaqKqcDHyUtHRgCKqiLLSkfEhQNMzUwPji5LqcDMrC2OK4SMzi0KysxGhsWOrusPTU3I6SeGBAQExKZOa8KLSkUNr25NLCpMy2WDJQTJZsdLBsLGRUXPCcMJicJOq6EI60JLx4dDQoRGSiJGKiiPTUXJZGSCpg0LTUuB44HOKuXA4MGOrUFNLuTAoYNOLU2IyijGZAGOb01N7USB4wRCjAgNjGOCogwF5UPLzIxCIkMMy+NIByRPiyRDQ2IBQQYKzCuEhYFLCEDAoIMJZsSNjCyPDCzJZiHOqkzLaELIy2iG5AcBgoKPb01NJ4yKp+LMjU2MqWNJiKDOji0DQePGZUgOLAXIBYYHpAKLTK7NicNOrUqFxyGNKcEMy2EMywKLzCpL6gIDJQFGZuQEoiKAYGBOK+PAICDAICBOK+NAYGDAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKbMgvX76C/CZq3Mixo8ePIEMmbNLBAIYJRGwIKZBRpMuXMGPKfMlPwIV/B3AeeDBBRsuZQIMKHUqTn4yUHQQIMaDgBoECRKNKnUoVIb+rBPn5GDBAQNWvYMMONfoAilexaNOqjciviVsXNg6EcLG2rt27//j9SEAgyYUTFwT8xEu4sFR+IBRwPXBhQb7BhiNLjsmPxw8ZHQhcsNFksufPIa+KXqHgQAfQqFOzFX21iZEGCVTLnn2QtWgeExoggEy79+e2TSxWbGLzQATfyEFXjjDBQAchHRA0TUI3uXXJPOIegAJlwM4JIHhf/x+v9uqPkgiMYDAgoAlW8vDjy59Pv779+/jz69/Pv7///wAGKCBGFjXBwz/BiUfgRQI9Jp5tog0IX2XRERBCCBUY4IKCAukVXRAXLhBhVhC+J+F1+URwwE4KPHBAA0n4wGGKD9xwwAknBMHaQCXueKJ1+XRQwQI+uIBZU08llI8ACTgXxAlG+NjjiD8m1xZLeVUkBFdn1VZRRT2cMIGUJVZJn2gulHaaVaLlY4CYO05popnjiSbAA0B0aRBrbsIZoZxz0mmlXgTcMMGBbF7V55jvARqooL1VZsMNgS3U5puMYuUoh5CiVpkBO3XAKZ+YxuljllR2OtunUDzQAYOJKv9a6pQkpqqqpzz0ECqnqMrqZ4+1PnrrZJ8OcIEQwgZbkQE3RClnh6cO+xsPBgwAxKsWCZeoRdQamm1LjkqLWooNnBBCDxFEsEAPC/hkFQ8LIJCACi1cYAACPXTWK7DifsZDAg00sGLADfwDRAQKIqZCwC8SrEB1+9rWr3IypKvuAhizC/GeTQiBrsXqvqqsrROXbPLJKKes8sost+zyy6rymiXMdtmWTwEu8DBqaz/0zIODBOVjIM4skUzzVxB61UAF7lrVBA5B4PlAEhG4N1CQE4QARAMhhJfs0VPZ5kIFJ9QbnpILQHHD1KUdgACsBYRwgHctALGC0WBTFaENF1T/cIACTe8pgwINYPBDPjx0cAEQQlyNQwcVP/CA13mvlY8QCkzQARAXBF5QipR6DeYJFTDIGggXTF75Wvy4EIICPsgARecKNpHACQToLJoPLTwAVa93AkH56mLxgAAUIspgrOdB23BCEj+wJkALUKwA7VXBi048WEE+EITOKzAmg5IdbIfsVQUg0MIAOFxfE57ab6+3DK/fjdgAgCfs+g0hdLCCAAgAwvoaBzz44U1+QOEBBg7QA9GAAAj5U1A+cKCCGzRAchcoTVcwkr1oITAoNQHCABYgAAHgIC5AYM/vaiMDA1QgCRMQggCAAITxZaWDB/zgSyYIhBv40Idla8EJ/1z1kJqcQAH64hEOv6ZDl7TOAAlIAALk9aQDTKAHNvQSjwrwmgSAKyNaSZ1PcthEkZRoBRAM3NPykiXLCKQJLkjADVSwoSwJLXGpa09wyjgWCIWPdg3qQO8u0pYEXCAEQajABSglmA4FqQL/UACOVFCBIIiKjyD04yGzeLkDqICQQQrBA6ylAAQErpDe0cmKHmAAWGFSJhDKhww8l4/YgSsflwGBDFzwGIMUYAUg0KUMQABMRL3ymMhMpjKXycxmOvOZYCGTxPZkqjKRjIzQTEhFcAYCH9iPXx3igZFW4IPolSgfLiDmCnj5k2lms4jwCgEUygYF0ZUpLyuYwAUOIP/ETLGmAEzxYQMUYABEQeidEDEKAQbwAAXUbYy0yscC8KSAefrzKjwgwgkGUAEETMBFRHCQOxHaEFnKQJZQqKE0NfUDEBzOeRPo1QRHGYGLLOkC7LsnSSPiAmtlMVifw1RBmoCAG1QAS//ok44OulOeHqCG7htpUoVKEB4E4QYYgBU/BHkBkWKzqQVxwVPH96ygUXUgTSDCDb53tQX0Lnq9AqulWCNWqNIqqGL6nIoYly0ZVOChTJWrl+g61oh5cFEk+kEFKGWACLiQn3b1oGB5JLbCYuSwZ0WVDwjwop2EgABD3JBOJxvV1ln2sqlCLGXRVxIbREAGAgCMe0ZLWoP/1PWnq52TaveVlRSR7q61vawLWiVam4kUWm5qlmH5tAIVQEFU4AxuXtD5AwHMbSUumG1lFgDdgfzABS7AAO7EWVyjCEAG38VBBRowAX1JVbpBuoB8hdhQBSDsKoJ8QB1bFwT58vOIFyCAOYnKudd1ZwIQe29waQQEPDWYcw3E3iF/Z5QKNFhqeKKOooRAAAVkkAAdSCJvpUugWMrUlYo68YgU/FUSu/guMlNIjF9M4xrb+MY4zrGOd8zjHvv4x0AOspCHTOQiG/nISE6ykpfM5CY7+clQjrKUp0zlKlv5yljOspa3zOUue/nLYA6zmMdM5jKb+cxoTrOa18zmNrv5/81wjrOc50znOtv5znjOs573zOc++/nPgA60oAdN6EIb+tCITrSiF83oRjv60ZCOtKQnTelKW/rSmM60pjfN6U57+tOgDrWoR03qUpv61KhOtapXzepWu/rVsI61rGdN61rb+ta4zrWud83rXvv618AOtrCHTexiG/vYyE62spfN7GY7+9nQjra0p03talv72tjOtra3ze1ue/tHDACACP6xgzHXwAMk+MAMPiDmcW/hHxbwwBBoIGYHEAQAZ67Bt/fN7377+98AD7jAB07wggsICxIo8waqUIQZlJkF+phCFcjMDxaMoAhPKDMPOIAFfZQ5HxRQgsfLbAIJTMHgKP9PucpXzvKWu/zlMI+5zGcOlBTo28zjfsGYRUCCfzwBAPQWcwr0MYIRMIDMJMACADhAAgqQWQIW+IcW/oECe4d53DE4MwdSgGYPpAAAPyOzBl5AghKUwANi30EKUoB2MhfB6GbOgAekYGYYcCDoZSaBBmie6yIo/cwp2DuZS+AAFJTAzBYgwdG9jHYOECTqEuDA4rcMAA2wgAMAcEsMmJCBf4wcyyf/B715sHQaWOADU/g8ljNQAxMMhPQ1eAEAsDADJWi5BhJYPD+2QAOOi+AD+gj+lce9AQqkYAMVoYEGGCCCIogABfp2+JTxnQIK0IAfFqg+CzYAgA9UQeI837r/7aXMgSqMoAQZyUcJROABD9Qg3UzQgsglkIIAxCAATgaAEopwbhHcvS0WQAUQIAEAUAImwHH6IHIzIAE6B2VWUHYWcAQMYHoMUANTIAE5YBEsgAJFUGUiMAV3lw8akAIkwAE1kAJb5wFLsAQxQAImhwVSJgE1UAUoYAL5wAIekAE7wAE5YAIWwAEjgAJrBwCTx2Sp9w8zIAIAEAMfAAAWQAEekIQvwAIsoAFDwARVwAEUIAVR8GRFEHxWMAIvQAMbIAFMsHYiQIAlQHYfIAFF4AEi1mRFUAVMoA8SEAAs4ASo9wEMUAJHwAQiMISHJ2VF8IXP14aSVwNFUH0UwIRV/0ACOWABB8ICUOaGM6APHxADADACTFADO3CJX+d+bxhlGeF6RSBxSiABipcCWAADAAADWHCGAPACWSdlPKABHlCISrCLEvABFycBHsABIiACAaABOlBlMVADEFAESsAEuygCTAABM7AD7qcPYIdlENBxIscEI9CGTwADL7B8KIBlp9iMIlcFErADVaAPHLAEAgEA+HZl2qgPVVAF/idud+dl9TgDTEACG0ADDgAD/2BMWsYEHyACWRh1rkeKbJRUNFCLB8EAmfgPOTBuU5Z8DoB7oVcQDfgBAtl5pJgPNEABJCAC0qh6WpZ8JCkCGSBxXpZ8LwCEGTADM1CPVjYYw/9hATHgAUqQARhHj/pQbliWEQWwAck4BT5ZBeOnhE53ZVGwBNyHlHOoD0wgkFzmBC+QAlNwiuvocSOQZQdiATmgiKc4A08AlAIxcVSGKBoQABe3lT/ZcVzABFnmAVYwBXBplmqpb5iIZfS4lVhQj8H3BEwwjh4oAl+JBcvYcB0nAfRIh5xoZRCwjDSpD/JHlcMIjSOAgvEYZR7HBGbIBHX4mAnYhCggAuW2kFBGl1gogykAjcHXjBIgAbuIBCJghz0nZRRQAgFghsPIBB23A0NQAm35AUpQA0MwBEPXZIb5D8DoABQgjCPwBH63jjWQBYTHARzgAEtwjM3JZKa3iXb/6Jg7MIwkEAMOMAQ1MIxTIAIvwAO36GT6xgJPCAA2NwKxuAMwkAMsUBkOsANnKBAcAJFMln309wE1oJ0lKQGI6QEW0AQs8AIokJtStgQOkALAyY4xwAF2FwOyVwOuyHVDoJpOdgTRWQMooJ0m+XYcoAEHwg80AABPAAEZ92QiSQHNB4zhWJwfoJ9OkBdRYAEBgKDfmWTsJhAv4HUpgKAOwH0wQAIv8IQlqAEm0IBPZgGIKYRfZ4A5AJr+5wD9KRAlgHtPlhE8IKSKmQIOQANREKFaOQUfgHcCMYJM9hNVqqSvyQD0yQBrNwQUAAA1YAEtYQHK2XZJVgLwWQJDIIRarbgEFgADO0gCKMAAV6AzJhCMVKhzVrlkH6CdIBoDkog466eDMbAEXTgQZQgDMHBzTPYEM9BxnZgDNMACDrB1MOB/cioQFICSTOaMoKkFWhCpX7cBhKqCjycQKSCUTdaMojmahbgD/5hUG9CEJRB1VdZxsSmawccBPxEDPSl9VZZ6jkmHSvB9HuBKWgaPALAD1XmBMfBuL6kDMTACW0mAlAhmMEp2k3qvY9ZiUxYQACH5BAUEAP8ALL8DWQB9AlkAh5aWlFZHENra3G5ubKmOLICAgb6+vN7e3GtZFDg4OCkhCe7kuoaGhNbFhFRUVIqKjD4+PGJiZMrKzDIyNGlpaU5OTPv7+S4uLF5ODJ6enItyG9LS1FpaXK6urJqanHNzdI6OjOTk5B4eHHRgFCIiJJKSlEpKTIt/V6KipNbW1BISFLKytDkuDiYmJF5eXJuFMaaadPLy9CoqLKampLamXHp6fO7u7Orq6UZGRLqyhMa6gM62VBYWFMLCxKqqrBoaHMbGxEJCRH5qHA4SFLa2tLq6vPv55/Lv5D42DLq+vBoWDF5WPMbKzJiSgBYSCOLi2B4aBA4OFA4KB7aytOjq+Nj25JiisBw4HNDUwJ60NKZwKEQyQOzMNLik6LikxHRogBIwRMoygM7uuGZWYF5ClHBc2M7uiDxKSM7GqHaWmK5+ZFqkhMLO8K5+5IC2hAoOCKSikMi4yCZi0HAagMzQ4MaigBJiODAsRLi4rLS2oCYgwAIGCPDijEJcTBoGIOjc6BoWNFxgfNa43DI6NG7ipIheiPDO2BgoKMrQ0KDMnNrG0LKqoDY6SGZqWKC66FRCbNzUxMzY2FJoVJimmCZiiDh0OIqgiMCoGFJcVIJ+kG6k4FJYaG7CJJqGkFRKQOj67EZQSCo6PGxAHL6kqNTk2DA6IEJKMFh6YMS47KaMkIx+HHRgSBIQYMLI2EQ2KMbu5LboNAgIMFxqYMR+KNKkzBIwaFJYQLp+oHZ6YOR+qKCMuGp6cM7UgKC0rMDENAwEEBQcFHhoaAYCCIh4pNTO9IJCdM7G4IqOeGJmRIZiHGRqnI54iOh+KDhSFLKeqKyQRLTG7KaKDM7S0AQMEHRYcGZCWPD6xHaAeEQiKDYoKAowHDgMQICWXFQ8TKjutEJKZC5STIqWsKByoIh4bLTSuMLWzIheTPjg6HB6SKyCNIRAHAQEGLS8zPCutKCixDgsaNjk9IJyiCTCgBwoDCAsKA4OBAoKBA4ODAoKDAICDM7OzM7O1AYGDAICBAYGBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIENa9MejxQSBMlT0E8mypcuXMGPKnEmzps2bOHMy7CejgI8iBv51YBAEn86jSJMqXcq0qdOnUEH2CzKDyIwMKIpsMODAX9SvYMOKHUu2rFmd/XgkmEDih4gJBQ50kHG2rt27ePPq3Wuzn1+C/SZoxcG3sOHDiBMrPuu3nz8TQHokWEy5suXLmDNPxHchiAkKKzYUUKG5tOnTqFOXveAByIYQBx6QWKm6tu3buHN7FBGhwIOfDCZ41U28uPHjuR37w0digAEPLZz2U+GggIcZKBg4II28u/fv4BU2//YrAkUIDrSX9sNhIIU+CfpSbPAwIX34+/jzqx7v9x6IGB8Mpx4JEXAQRAIQuODDAQCIoN+DEEaYGH+NiTCDDRTYl5Rf+AjoDw5ASBCEhCSWaCJZ/siQQAs8qMDDBTUIoM+I0tnXzwVF6GPCiTz26ONRKnxgFQAgeEBEChIMcE9U/qjgVgIPCIBCdD9WaeWVIflTQQZEGNBDD0SUUME9GjIlQg0zrACEAB0EISCWcMYp50P9MDcBBEFAMAEP/vz1VD8tlNCDPjLWwEOZcyaq6KJm+SOCDBNUAEARH/DA6KWYZgpVY/4k0IE+FWgq6qikzsSfCg/c8AGipbbq6qsRUf+oAgg2rArrrbjmalCdfXLKngAu6CrssMbZpwEUrHakQgQlfMCBAy4UYACDJBBr7bW4IUBAAG9KdQ8FEgggQAoHHGBAATIki+267CLlDwkQmOBABUGQ0K1AFOIjAw4OmDCBFOm9oIECalVAr702UuinQ2kFAW0EETiQAJnqtmvxxTH1k0AJBgDhcQ8zuHAoYPz9QEEHEqS8wgAirOQPBgScAEKOEgCBggNL4qtwY3T65Y9XP4+H8dBEy9TpAx4U8EENAMSn5ECy1uBe0g/kWICl/yjwwgICrMCAdfr00JXOOy9c9NloN4XPDz/cg8/aH4SwAl3/KIxDfBT8gI8KJmj/VcFKToBghAE4RIGPCB9sMAOVZfOc9uOQo/VXYxAAYcBkdVNIgQ0oOCgQPgWEwMCSIvhgRA4sVDjDAQ4kK3TksMdu6s/+fNs13QrXYAEIOdcdgVzRXUCEEQ0gAHCdIFhQw5tly+788yH1cw8OEVDwABD6UJCzwh/E4MHI//gzQAgSQPBPCx0c0QABqU/nwe69Z64w9PTXjxFPJcQHGwMty8+fAwIAggPw4RgZoMACKdjREFK1gB2MwAn4qEAPLPA9klHIfhjMIESkRz3rrSADOPDK/EgAgANQygHM0gcCd/QPE/TgCAvQwQk+4IMQUPAHFrxgDtPjmA4hSofK8aEG/4d4Lb8sCwgdSMD8wjcBEOjjAAKQwAw8sIEZ1e0eDvDBE45wAwOUYAUxAAB3CqJD/43HHxf4wAw6MIMPXIB5CsOHCR7QgQ54wAU/qBgR96ipxvAAAAIYAAFfh68ffOYDLkiAA4BQhJP4TAYcaEIDTtCCAsSgAAREyOt25g8I+CCAahKAD9xENv58CwgpKAIRNrAB2fDxla9qzD0YEIICLIk/UBOaPyhwABCQhj8K0IAGWFCCELjAbDks5Xh+UAIGsSUBIAhBBhxUtiBgjwItIEEFiHCAAdwLluCUk2No0xgZdEAAFDCKzxZGoQt0IAWtM6M/AkAAGDBhLrjMpeNw6f8YB+ijCEqs2wRWyYFeCW06tHwAd/zhghSs4ALhjGiiPuSCIFygBRcwAQDkFlCeRKAoPmvBgTDqgAy0Mo91UwGeZECCJejgBkzoykoaNzlC3iN0DDCKQG5qgxKohJA4AhU50XeAYEn0qFi6qT6AIBQixGcFA1wJPgYgAA/Mpk4R+IcBOkCEmj0gXbSRAcem4IMkPGEBMFCAPpuXz3/8MQQZgpoLYtCBljlOIDgw4QXSw4MSWKAASA1slTppnRn8Y4oDmMAgw7elARzKMQlgwAx8MAMQcCCPflIBBwAwWQ80gQbsI2fjlDkQEWSgqPapwA2I0AJC/sMBdO3fThlggQf/6FGwuL1P7X5QLRLwYJCTO9xPfaYCEWSTT3etEw9I0BYl0HMEUlBmW0kigur+wCvlEQB6CGKCEBQBrGZzgAV8gNKdFsACJbhtbtfrKg0QAAkMm8AXiUCEDFTAUSjQbmpv8F3XircD5f3HTXenXvYaeFQEEEJ8JwWUm63NAyGIAA8dYIMVtPauLZRbawdyjwdYgAEHDnHaHNWCEouATLNSlYcGEAMUYNZsCVgqBNLzgwzcYAAizjHR2ooPCoRgBp77h2lD8AHgLqw83RxOPyonAcLo+MkW29kEVhDIe9QuAgK4XGOYI4JeMbRrCfgZCaLpSyibmVQE6Fm+IsDKZuVv/wOCrNuHfFCCC/eDhAIoQgHQJAAiBKHAZw60ZZa4kNvyrDH2iJXCNEuEEMCGCC4g0+dckGUlPrIAEnB0VUkp6E6nxi8/wAEFPkABFwgnIf74AQQ4MGoKBAF8A3HUZwYQAQhIWgFpVrPQmJOAA5FgsSldi5GldwE8JeC6nk62aRwTBAAwVR/62EAJgmyQC4CgB1XcQBQfcGqB+KPZhNpACnpQAxz6AwEdQSa+lM3u3HyICEAAQcQ4UIMIYO0gOLiKs+hdhANYdaYTOKcPBsDvFHwgZy9ot8Lj1A8ReCB7w32bugXyg2P3So49YJ3LPnCDFYS5Tibo2Iz/EYCFm9xK/v/wJwDCdw8ru3aHAlHB++LKAw/YwJayLIENvCkQDZz85zzCBy1rMIFoFSACip24GQXCgwzEQMJCnkEIBMkp2pYZ6Fg3kcwtsAIPAMU1ROBAJg/yusfA589psfnoZAkACwA563CXEA8OGAIUfNQBGzUAp3d16AvMQACj8dn4DFABFeCDBxwAggU6QKW4Oz48dPnBAYFw37q1IAMheMC9yTieQKXAA+C9EQoO0AMQ1AAERBDA4hv/+NYjhwNuzYAFAHDvfviY8ZosJwhSgIKPcwpKrtHHCh6A+c65/vjduUfbRwc1BxzgobnH3wZ6b1D58QACDpAY4kLAO0Aj//uYqcH/P2gPtQiEAPdklz59DDrTRW+UAn0Cv/xxAyIg0KgfP4Cw5gfSIajJAADSFno1xR/44ALCt1feN38KeBgqUABsEgE4UAHR1AMhFD4JUAA4sxItsFH6MABrMQFrAT584wImEC8FAG1js4AqeBo88QCEEm19JnafEzczUC3/wAE2FAJgUgRAQQQRoFMiwADQhj0bsAKRtoJIeBo/UB0PwAAUEGaxBgEFwAGSBhcgcIUlkIUlAAImMByd8gEPAAJH90YJmIRmeBc95G0aUjvxx3It94Zv2IZyhg9uU31neId4mId6uId82Id++IeAGIiCOIiEWIit5yERoVPptxw6Y4iO/7gU+JAAA8AAD1AAFPBGCoFG0fIAlegCNgg1nTIAD/APGAhrj3iKNqECFFAESDIoDjVAmpQAHvAPG/Al45IBECAg98ABqbcBqKQPBbBhqDiMMtEPJhBtNXAgJgAC5oI5BvFtXyMxCeACHRAC/xY+kBEbJegAfxd4xPiNLuEPHEd7M4U+IeAAmrQkivgh2DM2qHIDDTJT1mQAIweO9ggS4ngDD9A7FnKOBUYCNRRnFhIDcUVxG1Vk95iQ6VYBtRgBF5UAH/AcwtgQwpMCBfUPAGkDx9QYqGID5KeQIHk/KjAAErABRNAB7eEDuahePSYXe/UPNXZJwCUDPmABGYBDIf+ZkxThGBAwA/pgACvQAykwA25yW4/RHj/4ObwkARHQAiIAI6pnfDo5lRt0ASXQAU8oA1LYAyhQHwzxISuQAoYCNSQQJfrgAxkwWRJgAS5GlW7ZEDeVAgwwMoAyAzf2TQTxIR1AirKFLy0wADNgAEVwRw9gjZv3lohZEDIQlikYPoDFO5mIAx0gGn0JNffQAgmQAL5VAwdgS4n5mQUhGNolIEIHP6gmmRtQbrdlThJQeaD5mqZ1AyWAMJ1CBAAyHGsjaeEjmaQIa6IlceHzAw5YAgH2monpDxygbR5AAQ4wAAtCBB21IxFwSxMwAzGgDx9gAjsyLy0gQvfQG1lFAVRTtAIraZyvqYpHIh/uAUI61Q8DYAMeYG7JaQHkAx+s1APaUzc88ACsNIQlUJ7m+ZqcgUKlZgL24n+uBjQtwAEDQAEEQWpQWEAEWlGYFaAWKh4vERAAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUEAP8ALM8AZADqAKIBh1RFELygL5CQkJubnDwzDGZmZKWJKHJydJaWlFtMEmxsbPn5+Nu2O0xMTNDQ0LSXLObm5FxcXK2RLEZGRCgoKN27PO/v76KipN7e3OLi5C8oCsmuNM6qNNjY2WJiZIVvHYqKjH5qHHlkG3BeGLS0tMjIyPTOQ4ODhL6+vKaOKFJSVOrq7MeoNJyDJRwcHGdVFTIyNCQdBj8/PzcsDHZ2dLq6vLuaLaampExADdOxOBwVBOvEPBISFC4uLH5+fJN8JI14ISwhBjY2NO3GREU7DO7ORHp6fCIiJK6urKqqrBYWFMLCxFZWVDo6PGlaFpaCJIpyHA4OC7aeLObAPNW2OcSiLM6uNcOiNOK6POK+PEI2DBUTBObGQGJWFBQOBObBRAoGBPLGPPrWROq+POLCPAYCBOj65FZkGJyu6Mqo7CYaNJBmdGyALFpacBIMGBoSYAIGCEpaYKaoxOzCKEp8GMLYvKh4SJzEkO7qsI4egMLGpG6qKMCuxJCcKMLO8IjkKKyejMqeuFRUPBo2aAQMCNx8gIqWvNigEKqcDH5gCOqyOJCAaKKCDCzGgEw0ZAwaNMLwzOTa9HBKQBIgFNjI8AYaFJqgsGqEdPKgNPr42Fo+OMLMNDJq1GxgKI5kyOjq1GxuGEwQQMCypAYMGNwygAYEGPjOXPro5AoOCPi2SCZANN7IzKyujB4uNJqwnAwIMIpKKIx0DIqsqNK0KNTqrO6ozIZgSKh4dN7EEEIoOIZ6SJyiaNjIqBIEEFxSKCYIINLOwNzuTKaSwEp+bAw2KJqgiIRgJNKyuMKgWHqCHFpObPjEFK6SmExoZNy4gNK8WDIkwNDa3IjoqDxaGPLIzNTg9JBgHGpyhHyAZNTm1NT24NTayGyuqPba5CwuRGxmRDpcVFoqJKhQSLCURHBEGEgoFMLG2DxAKFRoQNLCzEpckE5SGJ66KMKoEH5KcKhuHLKCHBhqVISWlMbKuNLENNigWODCKCZADOS8XAICDAICBAYGBAYGDAoKDAoKBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnKhQn8WL+/rt00exo8ePIEOKHEny4MWLR4yAaLCvpMuXMGPKlHlSHw8ECxYkUcJxps+fQIP6PLkvAgYMFgZE6Sm0qdOnUBvqa1IDyYkOSplG3cq160x9LgaUkKGiw4WlXtOqXeuxn4IOB/Y1wHBWK9u7ePP+2yejxA0e+5hkOKu3sGGvYC+M1Rd4MNrDkCMDdetAgcV9ZbNK3sy55FQHJHrw4OGiQIcbFKK07My6tcSiCyx0mO0AQk6sMFzr3l1RxhIUKEr87hC7hIAevJMr77eXwpEjLlwcoYEBSRPAyrPvrnm5AdZ+drWL/5fMnXGEwUrGq+9cnoKCBszXy59Pv779+/jz69/Pv7///wAGKOCABBamTz9RJKigagtdFJ9G4Q3EWBQaFbifEifUoGENwJHQQIQCWdSPDCckkQQCEfAAYj8ekDCADBbmpw8FDixQAgnAoeBhRf1E4MAKDpSQAQQCpGeQXMRB4EGM+M1YQgcw8KDElC5slJA+QgjpAwUUqEDCCkbYpU8PSWBQWwRM4ndECQ6Ax11C/YBgAQJu6jOBBQ64QJBNIJRgxAAZoJlmffqsCWV0gF2UkBIoZMDESUrUkIEKTLFYwgkuCACBoIPO56QFSJDgogcUWITQjLPJcFKcFviwmj4wVP91RBQIbNopfYVeABoJSyCFgqqnypCBAz0QdcCcLemjBAIlNPBPFAPYeqt8B8JwREsjImEBCRQgJBcEJZSKkQILDICtAg74wBy00k6L60UULKGktxNA4IC4lymQ1EZC1HCDns8K4Ki7714UhQAWnBDhVBlAuaoPFoCwD60YHDClEjBcAJcS4BE8HnesnrBaQWAJ+aHB0RZgEwk5tewykVF4LB53LiCxwgEJRXHDzSdRMJsQ/7g1wNBD34CBjQKoMLLM2/3jgkb77OMCDfbC6K0HEKDQRNRKGKETc4z1I7ZGFKRcIdO86ROFAiUMQIMCGcoW15U3rXApDTdYsBhC7HL/ijZvRXWQwT8rQFCdCh2fWugJDvxjOBITmGqQsgM44PffulU7gQpMqCCDC4oqfqAQKngOuuRHwtAAwJgT2vrrsMcu++y012777bjnrjtJB0qphGog7jlxejycHWJ5u5NHwQlLdODADYhXBNYBJDgQ5AXwSYh88od9+vyXECiwNMkUsNxBEgOgsELCr27PfWE8IDzAtVFEkOqp/dCwbQ8a8RCBbN36R3mC9z7ENAECHSiWRQ7GPpNAawUKkJASSoABqwlQgCcpoF5gQwKiRGAFJGDdnvpxghWI7DI0ChfJMqhBvPTDCHOqiQwc0IEmmIQvTzpBAyYQAV2Jb4WhayFb/6IgJx/UhAIUnIBJGKOCEiwgA0hZgQBUBETUCTEtNhEAmDgCryVkQImTO1ADLlCDCwwgCX6JXBUJeMWnMNCIPUuit5gAGhhcpgBAsuOeWNhGxLzQAgKoSRNoCLSDKIEEmzoJrcC0tJr0EYsc9CAEQmgSn1VQQn801x6D+MitWEQGADQYwkQmIQkdwQGB4qJNBtBA7XGyk1FRFgKk+DQeqMB6FtxHblYThQtYADVB44FgOvAhDA6QjbD8CgxKYLcBIAGBNHhVE34mkH1MoAQWwMAN0meBDNDpeMdEZjJjMiYBPAk0EYhZiJa5hNwIcB9CMOdROkCCdLrymOOMZRRc4P8cnmhlH1wamYj46Rzs3NN9+UyoQk+10IY69KEQjahEY4RMcU7Uk/vgAQV60IOnhUdqPYABBZRgpYuuRYzPtACeMKWVqWGTcCRgQnxMmpZ9eGBIJPABCNSXBIDJcgFY8cEAMBA+i9L0Ky5QHw3cVLYV0IAjRWmYGv/hnXsdtSvfItZJZLCAJcSMVhEbWZycatSrvqQo2zqdRY4gG+Tw4JmWGYhNF9BBs0YFM3bD15ie6KwHGnEg+fuHA8Zn16CMqTYF2Ms+lOCDBSRyHwpYQTuxFSvBzrSwTSGhbE4QAQ8gIEkRsEj5/lEDBTBBATfIwAIcoE7MNgUsV1lBw2ogFkf/QVUGSYDAP8w0AJwM1rVPOZAMFEADD/QgXh0opABdoAIFHIAJR2jAAlBQVuB65iI9sBsP4HQshVn3tapUFghW8FeS7aUvDlDud38CFpFOCQbjDWFPANoEFyjBBQ14ZnnX+5OisElUtSFBEyRHqwzoqFcdOMF2+QuUGWWoBCW4gAf8OV8V3CA4KBDABAjLYJpANVmvZEp1O0zi/Yy4xChOsYpXzOIWu/jFMI6xjGdM4xrb+MY4zrGOd8zjHvv4x0AOspCHTOQiG/nISE6ykpfM5CY7+clQjrKUp0zlKlv5yljOspa3zOUue/nLYA6zmMdM5jKb+cxoTrOa18zmNrv5/81wjrOc50znOtv5znjOs573zOc++/nPgA60oAdN6EIb+tCITrSiF83oRjv60ZCOtKQnTelKW/rSmM60pjfN6U57+tOgDrWoR03qUpv61KhOtapXzepWu/rVsI61rGdN61rb+ta4zrWud83rXvv618AOtrCHTexiG/vYyE62spfN7GY7+9nQjra0p03talv72tjOtra3ze1ue/vb4A63uMdN7nKb+84E2MEQBJKFLesjBizYwRSyMAUs6wMMCdrCCOK9A3vH4AMBCEAItDCCClTgyu/+ARZMYIIcQMEJARjBlffhBCzsIAtf2AEHJBAAERDByv6AggmmgAUsZCELFf/IQg4CUOUyeOEDQ0C5yU8+BCtYGQcvkEDJTT4FKzzACkCo8gcqMIQpVIDnEQeAFCRg5SGMAQsGr8ADcBAFAuTABFeoMskNzoActIAACZBAGKyghSsfnecceMADGJDlk2NhCjuQAAFisOUpsIAFVnBClw2AgwQ8QOJR+ME/DHBlAixFAxIIOrxNoGV96OAJEpgBEHaQgywvcPIBYEEFbKDlKOAgACaQAA4+8IAsx2AED8gCAxKwjwTYHOT/aEEAfgCEAAAADCPYARas/ILSJyAKQZBACGaQgnY3XgcpYIEBsvB6y2ugClMIgARYsGUdiNwKCSAA9bOsgR/c/QEzIAD/Aw6OZY4n4PMiGPqWifBuKlQgB5V3d7oZYAAqMF3LW4C5AaxOfizr4ANUkAUpgANWkANBh2Wk9wEfkAMPUAEs8ADxZ2U24AT+QABwp3K7h2UhAAb/kAD0dnJZ0G9XpgX6MAOplwNHd3JYBgAx0AJWEAAccHRTEIFW9gMR9wMMkAVYsG5ZxgIs2AIgOATb13RAAAYzQAW6lwVkgGUN+HUhwABcQG85kABY1gI2oHbLNwV2pwFZJgE78AREkANwN4RYtgNAEIZDsANkWH5QQHRk0AJbtnwbwALzFgJbhgNfwAI/UAFTIHFbJoRAwABYQIValnsVAH3/8HFc1m87QAUE/9BlORCCOcB+XaaFDAAAJ6ZkUxBzIsCBlTgFLeAFXSYCObADAaADW9ZvIZADQkh3XGYFm8gCrrhlIaiGs1h3PUcAmSgeKPcBugN3FRAC/jAgVNACvog70DcEAXCL/pEDDFAFzXc7rJgDjyggNvADpZc7LKB6I+CJ/8EAEmAA0Wg7ATgFnCcgKXAFWCB4uZOGWPACATICRBAC2Zg7XwB3ATAD1fcBOUhyIrBlI/ADkRiC2KdlAZAFMfd2oMiMVIYDGxBzKccF/TdxToCCFTB+PIhlXpAAAfAAHAB/aZhlBhAAXvcAP7B8IohlYyCEVvADCdACKXllIkAFVoAF9PcAMf9pZTEgAVAgAQzwAFeQk1WGAxKQACIwgASQAkaXZV43kgCwkT6XZUNABlOAdUBgADlwf1YWBFcQAAbwfhIgATQoFKLIYhIwAjEABJ34Dziwhk0hlAwmfDoABC1Ad17wj0+RhvWGYh+QBVQABPFmACNgh1HBdilmA0X3cw1nABJwjk6RAzGYYjv5A0HAkd3YWgi4DzFgAPC4i0XWAkDgDzpgAECgAzFQjVjGAiNAilSQAhynZUXQkVXZcX4YEjn4DwWoYyzQBQTwAxuQAJ55EBWwAQ/gBAxJY1uwDwDwAAAQIiAhATYHf5GnYymgA2VAAGcJBlugATgQEv/IjjsWBP7TkAAPKAIGcHcSQYOK+GMj0AUBIAbK2AK1qRCGyZb/UAVX8IBCZgIPCIFVgANWhBAskHUEMQIzEHZCVgFOsAVRMAI2wIUOcZEDMYvNGWQSwIXKGQDrWRA28AJ0d3oCkZFHJgFEAAZgQARF6Q8gehAfYAOclwAHmGQB8AF9p3YfkAIsVxCMWY//EARLNnIPyADRJwL6ODImsAEjoAHe2IFKdnJTcI925wSm+QKEJxAtgIoFQYhJVnIGh3LbN3taCqECoY9OZnBvNwTr9pRatnsZmGQBAQAh+QQFAwD/ACy/A1kAdgJiAIdYTiCJdB/y8vLCwsSSkpTIvIRKSkxQUE/e3tzGxsT5+fhPQhRqamyukCqumUmWlpRWVlRkZGSmpqSkiSUWFhR0dHQmJiRrWhciIiQ+PjzKyswSEhReXlweHhxjUxKtrayCgoQtJg3Ozsx8fHwqKiyioqTk5ORaWlzq3rguLiza2tyXfyHKxrSuomzu7uwaGhyKgmSKekx6ZBjq6uyampyenpy+vrwyMjQ6OjyGhoR+ahzS0tREOAyOjoxubmy2nkSegiCysrSKiozW1tSWgjxuYjS6urxCQkQ2NjRGRkS2trQ4Lgh+emweFgSKinwYFgQSDweiooyyrrTSzrx+foTm5tQ6NiSusqwOCggGAgRyfGzwqrQ2Iiy+zvCgqJA4SEikbii6fKCEWICczJisfGSGjogSMGgcOBw2LmxsouCwxuy+vNCk7rTkfKg+UGScqHicuOQ+WEACBghsGoBCNDwkIMCKjphOWFSw0Lg2DkBOZGR0ZHA2UBQkYIgSYDisqqAwNExiQFQEBBgaFjRmdmxsWtQwIki+xthiZlA2cDgKDggYKCg4LjCcoLy2oujSzvBsdkSEUBhQQGywusyEcpwIGCC+tMhiUlhkPhiYokheUnRCOCggLCjw+sTSoszIuMi+1MzayMxYXDxwVGh+dIBCJjwSMEQsOjCkiJCeiLgMBBCEanRaQJDKsDRUdlgKMBxCRFAMDBiWgpB+QGxaooTM7oiKeIDofCjEfCjo6vjWuNyGnIiovjTCuOxYZlgaBiAEDBBgZpQ+REBeYnSUppx0kojo+uy80oBs4oCy5jR+krB+Phjo3Og+SDBwZFhQOkgICDAcKAyoohTYwoCctKgsUEywnLTw4IywvqxQRjgkYNC+orTqyjQwOBCsfOQkwIB+tIS0tKxsoiSwtKDwzNjIMoDC7uTS5NjM7rjM2MzMxNhCOFAoLjgSEGBOVHDW9uRycmDW5PTa1sSwqsT44OiecKBCTEgCAgwCAgQGBgQKCgQKCgwODgwODgQGBgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDWsRHkuS+FEhe5BPJsqXLlzBjypxJs6bNmzhzFiyJbx+EDzYqUNBJtKjRo0iTKl3KtKlGnjg+CFBQowM+p1izat3KtavXrzRLdshhIwGCBy+ugl3Ltq3bt3DjxtzHQMmIESLQqpXLt6/fv4ADM8WXJAiBFCd20EgruLHjx5AjS16IoYaRJPkgKGY8ubPnz6BDL+VXYUAFfvk4DFm8V7Tr17Bjy5ZI2DAGfPkS653Nu7fv354x0Phw5B9JCBoIUGiNkye+FCBocOAHvLr169gL3uZgIsGICBEYlP8wMWCEgQ06nb8g4EIADavZ48ufH/oAvggmTCBAoEKFCwUKqFDCDemVtE8ECahgAmv0Nejgg36R8E8KFVQ4QgUjBDGDCEJAMFSB+RgQRAkPbMYchCimqCJW+ezjoov8RKAYCfmceFNlQRwgYw2crejjj0Dm5BxuEazWo06kDRABPxAMUcKRQUYp5ZQeDZnPAR+MsFxRVxpBgEonqMCjjVSWaeaZCw2JzwY3YLASUfiQUIMSR+DGgZgvoKnnnnxyRZoNEdSYWV5b9mnooYjehE8GGhhxBAUUYFDBEB/ggFqimGaqaUcG/HOCADMMYIQRNqigAHk9ELjpqqy2ylAS/2T/8IESQdQaxBAKzGAECCm46uuvv+7TAQbEYpDCCCoQR8GbwDbrbHUhTBBAEx9ZqdqYz2ar7Wz5eDDBAsxuNCRdCEgA37boposUPvxYgIMBEGTAz7gkZGDAAQdk4OZBPOWTAgQnQAABDAW0kEGNJOVDwQ1J4HvEvhCpicEJB6tr8cU4URCBBAPsgMAHNwyZQQllaaDBACVwgN5AQ26QAwI7JDCABlUI4AQUCeMghBIDJKBBAiUcMG/EVoaL8dFIs4RBDkZIoMQMCeAg8gMPgIBhDSLsEME+LDtHQQ0ulMDA2CMU4EAIJMWoxAdC3CXBEBqcYHTSdNdN1ElIYHCAzEiM/9vBC/zsw08HFagQhIQCtUyDChDkk48//HjgwAVY4MYwCRsIjoEQM5SAgd2gh64oSf8gYcMAOPBbkkA4lJVB4jwJtMHiBzhO0hMBTLBEws79Y8AONkgt+vDEx4TDAKjD7pzj+Wwg4weI9/7P7C4QcEIEB5CAxQINyACF8qQnIULwZBZv/vkXHZ+8mhToyEAPPm8NvlobPCDAEAOIkMAVPiwQABA8yIJx+uUDE0jgc+hLoAIj4hz1pU5NNyjBDhSkAhDkqWs82QcHhMAADrxPBCoQQhEmoIMnLO8IRlABA7i2wBa6ECENRN4Dh0QBA0TABz0IAg2OwCw18WMDJdkAB/+yxoTcgYsnN5AAAnpwrhc60YVDcuBOekcBDtjgA72an5r+wQ8huOABC1jBtEpyg8U9gATle6IahxdFGapudV9zAQf2oqbY4YMBLqBBCDzQgCOWcQhnTOMaB1k89SHhjaTjxwMU4IPy9S4fFXAPBUIgxhCUcQcEsIAgCcnJ0JnOBn2jo5pIoKE5Jq5GWiQJCT4wAyrwQx98jAHWhKDJTtqyeOwiAQkiIAIRcIAEKZgXu5IQLxJY4AYHeAACgpACtZCAATzETQoOkIQUWGCaNDCBDaIZAgegwAQ5+lsH/raPTd7ynM/KzQeCkAABuCABQfhA7fDxAhAkAJ4fMELWPmD/AITho4DK6QkElKCBtQUBhEo4QTnpGQUAiUACNSiBBCTQg4qh86IWy4wEPsDRjkpgnvnIwAhqsFEJPIABKfAnPrDEAGFawAc02OgHalABJCyUXQxgAQqm0FEpBOEBPMSoUNHFLgtYoFgWMKYwccOPF2DgqBS4qVr2gQEglqSpT8UA4FoGgB8QwQpJ1SUGyjnUspr1H1CQARB2d9a2tpUHDdCBW+dK17ra9a54zate98rXX02gr4A9VB3FdRXS/UMfFwisYs/UkxtAgAEV8MEJLGVY1fEDCRzwgWRtSkcKIEFHFTjBckgSrcWaVkr5OIIEdtDLrNlgBLWEIeFswFoR/wzBCBW44D8oUIFS/ScBGSBdPhJ72uL6KB83wBD2IDACGyAABAohjQrKAzAQiMAEDFgJPjowghIIgQYIEEGdDLsC45o3RS2y3T80OAQlZPEgKbiV/NhVARPwyDj5eEEH9nEDggZ3dQA4r4AbVMcMJACUCTEdcNWCjzCVoAMDLAkpNfBfwwZgwBjOTgY3sAELIMtzCenA4kCAOQqkoAciWGGESZICI1A4dhmO8XVKorEePOADIjBC7RKSjyR8YAcfIMAD2CmE26z4OS6usDllzGTQlMQCPdDADkwwBC1V1iD88IEGwjtlDfhgZc5p8YtX1+QyxyalBrrBvRiwUYWSKf+1NBDbvSqgBCM0DnxiVrKZ9+ya6UhvHwcYwDbJlIIPDMDNODVcFnmSZxjz+dGdscCRB7KBHvyjAiw0yAkMaBUJBwEBB8Bgo8kM6VJLRnpcFIIJRkCdg0Sgc50miQWCYAIIGCQF/nW0qXfdGNz40zg4MMIQODA3gUAAAQnATMISI17l/YOUC74yr6f9l32cYAQnOEIGkhCBD5igBGhECAkkAE5oJsEHNjBBDz5EzyMcwQcgHIG7m0jtesuFHyPYQcx8poK8WJQgCOxxCYaAAA2IAGYPCJlAcmMD2wJIBT8LlL0nHpc4cWAEQuiBECpwgA4Uu9W4sQAHQKDxEUDA42r/CSkICPAPIWS8Bz3ADMVn7hbcwMhFqFRISVrEj8D9euE9F4jgel5smhv96EhPutKXzvSmO/3pUI+61KdO9Sn5ukWwY8jOcc6cnrxovS/KedXHzpIXXDsHLh/BAQoFw54cAeMbxwGzQjoCjbv87j2AQKvJzvcqGcAGQzhwAnaggVSRKW0RGIAK+G0EhQr9ACUwghJo9YF2CiAHK+u75jeyy2wjIQPdRoAQPmQQ3BwgaxXIwBFAMF2Z5/IGKUiBMStQ8H5u/vYY4V3KD6ABBPOLAg9YNdfW1AMTjP7IhWWAfemN++YTjcyLciO/WkdhlkFA0ZP+BwbI7QOyOv/7D9m5/1NJcAQCqOBLCMkNpSSdONNpIAnSazAClBBK8Nu/IarMwTo1oAIJCO8g+1BfiwE7FvABKkBsvcMPPSAAQmBV9/eACaFKQhAE+qQBIwAlA4Fv6oYeJYEBH4AAWxNFDTdPSwaBt7dzHUACyJRDX2Yj+zACJhBQHfiBgbI8FTADVUFq/5APFqAjJ4AE1LFFBbEPSAABHHAAFqBSumaCvDYkO4gDSrAD8AeAPoAAY+JpQyA3K9YBEjADPvBzXAQBEmBbwDMCsYZq29Vb+iYCHwABUrWETFhqqKZIChABNnIlO3A4PGFgCVAnynMA7RUyZHYlGrAaFZADAyB6o+WEuzUCBf+XAxXwACIwAI0jhHG4a6g2OwrAADbyHFFoADzBHR/AfhnoRSMgVQKxfQhghvuwAQdAW/ZRRyHSSyeQOS9QXwc0WJdoas2DGlcFAQliHwixAarWA2mBDxZQA6sGciSBBMIGitBnAAhgBM0kOyBgfHvnHC5jfCyEDzdgBDsgjNm3i5D2AhGwcRwAAedoFkKAgQOUBMJGABwQATWQLH0DO5A0A+YCY/nAAFSxd5kxBIeDQSSxfSYQAQRBjMJHkORYarxFW72kb7hlZKoDaD+2hiWgbLDzNV4IhvyQA5fXQxnQM8UxPyQQhaE2EP2YR9mogw1pZvl1BNcDHgdwA0PTdrj/QQI6gj1uYlhpYwAccBtXRgEEMAMj0EO4JgKdQpCmIwKwwjIcIAASkHmo9pJSJ20D9HtFWQEqRUrhSBA8gQQz85SJ8ykSQHpVaZXgR3wucJRIpE9kuWIp0HBLuXAR4AJTyZBq+YA9MQIK8ABSdQS9lzrzM2uMQxABOAME0JJYuZfOhw9ReZbCpXwPRpD0pIyNNBAdUAMI8IV66Zj3hwMmA4oC8QLZxJVatA8FZC6JkwQxE1zzA5oPuAHXmCMkgAMvA0o0lo5AZBzxFYM4oJPk1oC6KJtrSQIPMATAkyB25k8GFgT/9w8HYATI1nA78ADNZInGCX75gAER8AAQNQIZLiBVJNADIECRPYEDI0UiESCU2rmd39cTFPA3l9IvkNJDafMCLxBVdVSC8Bl1AQEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFBAABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAP8ALNQAZADsAI4BhzExMXNfGLCVLLKysezs7L6+voiIiO7KQERERJqanMKkMl5NE+Li5MqmNEk8DJSUlM3NzIpzINTU1IBrHPPNQ9ra3HJydNSxOK6OLObCPYGBgo6OjGdXFVNFD/T09CQkJHpnG3h4ec2sNWxsbDw8POvEPjovDBISFKenp7mbLZ6HJ+bm5P39/N7e3KmPK6WJKBsbHEpKTMbGxGBgYVRUVC8lB5V9I2ZmZCoqLLeWLJuBJFpaXE5OTBYWFI56IigiBr6eLdqyOaKipEpCDCMaBp6enBwXBMauNEE3DKKCJDEqDNu2OvLGRAoGBBYRBNq6O+a+POq+POa9RBINBOa6PA4KBN6+PCrCgJKolGJmGL6+0M7c9NCwKAQMELiauD4uPIZgYNZ4yKCUDM7i0LjUuIJGKM6kuG5GGJKaYHRGcBRiUDI8PCAwRLjuzLqeDCA4HKCcMLrMTCwgwNy4ENbuTCAWNPjg4JK+iNDANOS4XEIuaDgiNGZGQLqmUOTW9ExcFBYQYMB4dPiyNIhcGIZ+bE4kFEJSWOK2OMjW2EBiiIpoIGp8RNzSEHZccMBsLIIcgEB0ONYwgAYYFLiuzOTmzGJ4GIDiJM703GA+GLCmLHZcKJa0JAoYNCxi0Dg0INC4WGpkQEIOQDJSTDRSFICo5KaMQL6urGBMKLLCxEpMNIZuCCAGIIKkoOp4SAoOCFJQFIDmpPLEzM7oqL7IxLjK7MKk7OykzMKmIOzorA4MGBYwaNTE8GqkJIKOtOScfEpeTJ5KSGaoiJ5wdJ5wSOy+KHyOjBooKF5uZMy+zJh0KGRuhM7WwLqmfIJeyOTCgAQEGAoIMOz48LjCpNy6SAYCCIZ4SJKYgGRegDIwSJ5mHPjSXNzEzAowKE4uPOD44PT40AIGCOC+KLimLJygvLKorPjMFFBSaKKmhIZyeOrW1IqUJJh6DNTEqGJ8dJyKuPKcMHR8ZOK+NAICDOK+PN66NAYGBAoKDOK6NAoKBA4OBA4ODN66POK6PAYGDAICBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnFjQXz17GDPa80exo8ePIEOKHEkyob8eIR6oVJlgw4eSMGPKnEmz5j9/JDywYMCTpwwENoMKHUq0pj8ADCAAWMrUXtGnUKNKVXh0xQB/WLNynMq1q1eaVQtkrJf1q9mzaCceJbDCgMoRADamnUu37sCjEBhUqNCCAAEDPbbaHUx46gkecet9sFDhr77CkCMLxUqWo8UZBFqQkMy5M0mtoHsUWLFDsOfTqB+C1npiwIoZplPLnl1xNVYYeWnEps0btVN/GCvXQ+lB6e7eyDn7OzHDwIwYCGY8WFFhhNPk2D2f1LBCb4UVKwqM/3icvbzyDzssaDCgYQYOrObjy59Pv779+/jz69/Pv7///wAGKOCAc61mjz5ymVSPPgw2iOBWFh3o4D8M1kMgfqt9kEABGgSWUD0zDFCAiAWUqIGF/9SDgBAltkhiCCheSB9oJ2ygEwTvJaRPAjuRWKIBKKoohAwuQsCCBw/EKKN8WtWzgwwDeFDASzpuQIB1PWQJwwnwWdQDDFmecMIOBFQAwJL1aQXAAELsIMGUCumzAQO62UZQkzd4MICSaJqXVQ8P/ISDBDJQiZA+DxBggT4f9FBZWQJpBYNrN/Q5I3AzVADjBy0UahKixbGIQggkbATfTVnVE4MHMsBg6XxYkf8wIgz+4NDpB8fdZE8IEIxYgARsWdelVvoY4MEGub6KXA8JQIAAVgDcmqtFOMR1Ew4GEEDnsFmRIEEFzyobnz0jVGBBpB+8SStCoA2kjwZ6PpoVuR6gkKC42OEFAQla8kAoAifwGalWd5HAAgT6rAYDCiuM0C6+vam6ggcUV8zCkRLQcNDDNwFwcMJNkgkBrgRDzJs/H2jwQAIsJzAACwQMoMGZG5dszwwsFGAhsQ8cu5rJyGl0EQkVyAAAWQLVg8NNuvagTz1Q6xODDCuEYFm3EFQQA8dAy2ZbrZ2uKxAJLaBgoYoDyDyCBUL0hYKrBO1ar6nJdq3dagBUUMDSA/H/QIBYHQ/AwAoE/MOADCGIHenC5pZsN2222YNAqQTps0O4KX6AAA800IDAB0gXVA8JPNAK6eOop6766qy37vrrsMcu++y0d20ncCf0cMK9A7d7YO5OCxypPcCfkHDtaH09+gMQEPoAAjHapqoBA2QtAQo0HE9QPQBo8CuhCdCMvFfK8wABATKwmJluqK6GAwQrXF9EAX4ZcML2NHxXQBEsQjDD+F/52qQI0KHhjEBKrvpaD0aAABjYox7MaQGdCiaBjAVsQTAgDwCnwjEnhcdQPXANbBynq1NRKAEEdNcDGFCa022QKxyzR7Y0IJh6HNBeX3OcDP9iGQCsAGH6AAAJ/3DwtBfC0IX6YNgNBOMPBHwsh5Dyx6QaljQesEAGO2CeBCCQgBgIz4iT+VlrWMhEj0ngflAcCLkIoJSk4YyNA9gA9QggAR7UDYwyscgH9oiDDzxmjOwbyAdYIIHAQJEj9fBXBWYQJJzVa2lSTNQA4IbHoRxlSCUagG7GWJq7mJFLJQONimTQAgtcJ0U404wn88KDShLlKBsqEfYWJISGMdGJCCNhqszXghBoMEU0IOT9ItUDJbrylTqcYQ3zhEMXUsZ81TllpHJSSIIU8zXHtOTDnPQ3Kp1EhPJiYiKbdwOBSbEADLDjNL8FlGwGJYcfcI0v7bFAViXQH0HEgXBiAP8/X0Ltn1uxYcziYg8N1asH7gzjaupBA2AV4AH0k8AOIuWkogEAK+nayQPUo4GO8uA6UhRCZlBQBAkUp50Jfaf0EFDSCkBACNAbyDj3Bi0J9OSmEtgAJaVogV+51AAXTak224XPPfrRhHo0nR5xwFSmLEVxNxnOHh0o1Kpa1SB3vNNVt8rVrnr1q2DtTCh/hlVdloVrYZ2J8k7wAaY0ik85LOhSPkA3F6YVJjkEQAIa4xcGaJI8UPyAASrggTJt4D1ovetIlIcAFMjMAiFAAQMIuLOvxVNKHaXfABBLQsUu9msPDCgNPCABpZ7VIgecJEZRQAAYJdazn7GrQGBQQZL/ne4k6NQYZWjAgL21L6uw7Ugo/yk59BnytgAg7T3PSZrfAje4EiEYPhEwghE84FsO4xhODlZZfIr0XK+FbnS/dq0isMUDK9DAY3LoxAKABlEEJKt4hasPEkAHOqbTFQJucAPqoSBcX2vvexNFw/DO1yE4EZxPGLkbJ73pJQG+ItKwsqPWyvfA0YXBCNijgRBc9DgybFh31XSwUy5HiRfGsEc6KxB7WOAvX/yAXsSHF3D9VsUgkR5oFiZipk0Ynyh0LXBejILjmhDH4xUlCWjwATDBAADG8hRGaUAC4fCAOjT4Eg2y1kIWIxnBq4Hvm0TUGBmwDzghQGBW3jW4ErWg/wUaAKVsvwxmJRsABTKAgAxQYAFcUZQGBbBfWexBg/kVQAjZMzCdGZJD58bGInaFdIoXTekZRbfSmM60pjfN6U57+tOgDrWoR03qUpv61KhOtapXzepWu/rVsI61rGdN61rb+ta4zrWud83rXvv618AOtrCHTexiG/vYyE62spfN7GY7+9nQjra0p03talv72tjOtra3ze1ue/vb4A63uMdN7nKb+9zoTre6183udrv73fCOt7znTe962/ve+M63vvfN7377+98AD7jAB07wghv84AhPuMIXzvCGO/zhEI+4xCdO8Ypb/OIYz7jGN87xjnv84yAPuchHTvKSm/zkKP9PucpXzvKWu/zlMI+5zGdO85rb/OYD+cFzhb2PCBihCaFbdgn28YIIBKAGO681FPghkH00fR4ZKAEF5qGDGnxR1xeAwjz+MY958GMfYOcHP6AwdBssYAEdALYK9pEBflzgAl3/Otj3AfUMQIECB/i1A5JwASukAAQ2uIDd+eH1sHcdCrlWABDSboQF6CAFRYf8AiaggAvwQwR0FzvhNU9rAQxkASnQAQdcoIAXcMAEJhAABURQ9AgowAYpsEIGMtB1sNNaAf9wQBX+kfYDXMDsJuhABFKwhAxMYPepn4APLuB0EUC91g74xxQ6oAMFFF8ADphACkQggACAIAUB+If/EUBwACg0AAgR6MACXEB2Vy9AICL4vQMcnwIXTCACzi/9BJAwhSak/gk54HklkAImMAWVYQKwJgA20AEdIAAXoAD6NwE6IACYlwJKkA9GYAIB8AIZQAECwAF3QWsv0AE1IHwXUAIZoAACIACQFwAdYAMKEAATQIGlpwAiEH1OEGtOAIIlUAL/MAEB8Hg58AIpcAFLoAAR4AA1oIEpUAIXIAARgGsd4AMCoAMT4AIXkAI2qAApEIA5sARP4AMcMAFEqABbZ2uQF4UqGAH3lwJRNw8iAHlAiH9OCAQvMAFpF361pgQCIHVT5wKwNw9QQHdQtw9QKIECYH0ugIC4hgT//8CCCrAPUDCJtBeHQOB8bwh8/xABLnBrE8ABdPiEDih2maeCLvACOgCDF5B2uiYCFIB3KKh1cbcPYtd1GSACE2ACPzAB+5ADvEZ7Xqd5ckeLD3gETyACUfgPKiACS8Br8zB30Bh2S5ACNrCMWsd0vkaK0FiLXYds9PCNS/CN/EAPzahrndgQhPeM/4CNNrBr8+CDVMAPVPAPUeCDyLZ1tRh1+7AEF4ABw3YB/6AD/yCJZIeC86AAp6cEw1YCUNeDPah1H+gASnB1uwaMBZmCLoh0xmaLPbgPNhAASECRvPYEgviGSBgAU8BsQWB/DpB0rmYE/5AE54gQS+ACAQCTvf8mkOIXAU/Yjgdhk7/GASnwfjVgAxnABPPwBMhmAkWoAy7AdiUgBV0niL9WD2n3frzXh/z4jlKgeVtnj71GBDqAi1coAlDwhA1QeLRIdk/geb4WAYTHhWZ4lpX3dYTXg0CAlbimAPvHATYABJc3AZOnANoYjGQ3DwLAiLj2AiWgAC4gAKSXghEQATmQeV4JlrhmBNHHmBmgAmgXAGZ4ASLwkHI3dry2gSkABHAXAUigBAvQh143i053hreGgKPpmOk3BFjIhlhYAlCwBEFAitiYazaQA0/ggUPwc/kwBArQmBAIdyJQefPAj8NZayjIfZy4BBzQBEbwgk/ghC6gAmb/yA8qWI66dosdADUd8HozCIGzZ4lLUAJup5S2ppjreJA1gA8/oAMU0JMBwJP7YH8BkAJt+QIicGtE0JxL13XIOHz7UAIqAIqkFwUN8J8CoZO3VgVOAJqCKAVnmQIqIJ7kKQAvUJxe54u7xoV4GAFPkAFZmIsmYAMM6QIcUIInmAK99opLcIlQsHg14AAg8AKCFwE1gATaF2xQN50XoAMLEKQuEAEq8A82+QL/IAIH+mu0uA9PcHmQGQD2uZFdVwIisAA/4Gw4Cm162GxONwTRJpB62WxQEIduyWxk53ll6mwiAILQNqfOVgMzGW1fymwlAATOVgXrSQF/ymzT+abL6GYDw8eKzTYFNYABuZioyWYEL4B7tMlsCpCC0Tam0oZ60PZ3vAhtTwCQz1YCQUClzjYB//ACgZpscOcCDnClzvaE9Mls+GAC77ipy7YAU+lsqTcPF5CryzaWNkiLzQZ+NkCSzrYAYtmNwnqChNdsJjAB8TmqrIpz3Nqt3vqt4Bqu4jqu5Fqu5nqu6Jqu6rqu7PpVXpcByshso/mMbOd0zMZ8S7AED9psKhh/qLpslacC1Bilgiqmv7cA26psoskPzYii7fqwygavo/oPp2qpyUYEUSiSxGYEPmADOHmvrlcCCSuvFFBsAQEAIfkEBQMA/wAsvwNjACsAQACH5ubklpaUTk5Mfn58xsbE3t7cjo6MNjY0JiYkcXFyHh4cRUVEVlZUysrMZ2dmmpqc1tbUUlJU0tLUIiIkLi4seXl7goKEPDw8zs7Mtra06ursGhocMjI0np6cXl5cWlpc7u7skpKU/v78urq8YmJkDg4Mqqqs4uLkoqKk9vb0rq6svr68hoaEsrK0ioqMSkpMwsLEoqSQajxsssbE4tr0opiwZnZsko545tjYLlBMuqyksrrMGCgonOi0LjogalgYUGZMgHKciHqAICQwanZE5M7kCBggNDpIQDgkuMCkeIiMCAgw9vbcuNS4gIhwcHxsuMiA2L7I9uboDAwYbFpAXmSUmoiw6qS0CjAc0r7w9vC8XlQ8mnCYEjBE9tTUNCgoNgxAuO60eJBY6ODgqHxctnyYuMjssKisVKCE4nykxnwkgJiEaKAkaOCA1s70GgYggFaADAQQLigwahiAyr7YyqSwJigwNjIMnLKsqqigCg4IPDxIgHqIEhBg3NjQnLLooIiIEhocoqig1s7geHqI3tjgQFhALixEwLi83OjYICwo3ub0NDAgyuTkaGZYUFpIxso0gHpkKjAgBAQYUE5g5uDQXmZAPDRAvrzQ+vD0nKZ4Vlp0ppiYKjo8RERQgGRERExIbFxgkqagPEhIRDhAwMC0EmA4HCgMXlpQQCo0bGJ4HDgcQEgw0r6kXGZcNnA4QFBkytLwKiYwEjBoytjUxjKAUFQ4Xk5YaKDgJGDQGBYgBgIIgGx0NixoJGCIIsCAvsjEqHzghIiYcGZgeJCw4r6AysS4qKCgFhoI8Pr0pqqgAgYIusC82tbgtqzEBAwQIBIUkpqsyrrAnKDEGBAc4uSIbFBoVHZYFhAIwKR8bJCE8L7MNh4oNlAU2PbU2Nq4koKIpqCw1OD0gIyEsLCkEhAgIBwIwKToGhY0tpywnMScsKyYaljUeLCEJCDApqakBgYEpqac2trcEg4UGhYUFhYU2trkAgIMBgYMCgoMKiosEhIUCgoEAgIEKio0AAAACP8A/wkcSJBgP30s3ilc6IJDwYcQIz7st0CeBgIjVqwg8O6CxI8gB1YcsUAfBQ4HKJQIyTJixQ75+smc2bImwYomFOzbB4+mzZoUCxQw8Q6FBQH1+v0EemEEhhEZCMiT4GKC0qUh+9UTcAEBggsVMJwYsA9r1n49lfYrQUICgQVmP86cKxOBihMkrsYtKOAf3ZkKUABwoHevSL9/D7Yo4KGw4YH98qWVuW8AgBUXHD/W6sBAgg8RHKA4IS9BvscQ61WQUECevNYqPOzTjBqeghcfSJDw8EJfTNQR0cIbPlwm8OPIkytfzry58+fQo0ufTr269evYs2vfzr279+/gw4v/H0/eemK/+/Rx0FcP3sS//2yfRFBCc+LICwJsXPHgxWmB9/WzgQMqwEBABgPoU9h98ESwgjwtBKACBCt84B5if9UzgDwYoPAADAWgQIFBiSGAQgEDTFDPBAnIo4I+GP4lAAQERLBBPRe8A4AF/8EXnwcuwigQAibI48BkxvlVTwhj/QePABiscACAcw1UAgsADHDhP/lUAMIDSVUpEAcwNLCAXhO8c4IDVPr0zwYPrKlXPx+A0IJVbv7zwgkjKDjQPgakYEGbSf4TmDwfFCaABhkomGcEKaiggF4lWCCCAUrlo88BnFIw26GJEvSCBn2KKRADkW5AqaUhyJRjA7C2yfBBPqAqyqijhf4DqaSrXioTiwb8Y0AFF8BTTwByQlbnnab+swCffgoEqKBW7rRPCT2V4AIIWg7U5Zdh5soBAWaiqSabMeoFD5AtCPnPBEWSgKReS2bpJJRSQlaijixMsA8CFZzQ7kz7XHBATDIJ4NYHGxSs5gD/pTvTkyC28EAL8hDAwIXwMLCCiDNpKAEG73QAgzwocLBgYvnkRwAGMASwwG/xCTACCrgKSIIKDTxVgT5b6ktXfPWox15PkJWAwATz0jpffQMFBAAh+QQFBAD/ACz3BVkAPgBZAIdzc3Q8MQrCwsSYjmS0nkZNPwna2txmZmTm5uQyMjTOzszKvYkiIiQ+Pjzv7+4eHhv8/PgqKiwSEhQmJiRKSkxhURbq6uyioqSWlpTg4OF8fHyDbRmenpymiyTq4qxOTkyEhISwsLDGxsR6Zhg6OjyampyOjowuLiyKiozW1tT29vSQfCzKysxeXlw2NjPOyrRiYmSqqqympqSmmmyKgmSSkpQaGhybgiS6urxORhTS0tQoIAgWFhRSUlT67ry+vrxyXhS2trRWVlSukiRuWhRGRkRubmxCQkRaWlxqamwUDwdaVjy2srRiXkx+goQeFgTm5tTa1syuqqwGAgQOCgRAUGTOvMzE0tB6kLBASDBgQFg8SEi2rMRYQJRUeGCAQhyCWojEfCRsbGCgiIh6QHTApOiCZky2zITofCSy5jTK8oj0xLS22NDi3PgkYIg2UBRwkIQuUExYWExqGoDO3vTKvKwMDBjU8uQ2LmzGMoCcsuhAWEA2DkCwusS6utCwrJg2cDhiUmCcsqwaFjRETEgEBBiimLBgakQIGCBCOChwdBySmqwoOjA8IhBseEhwaIBERFAKDggKMBwYKCiSpqAWGgiWsDRyfHimpEg2IixSYFyc6LQEDBAcKAx6sITi3MwiwICypBSohgySgohQQGwSEGBYXnyCbnimmJiywDTgwoRooCTsyjTa8riIeoASGhyCdKASMETi2OS88tTqpLSacJgCBggSMGgkYNCofOC2fJi2xsQkIMCCiHC4fFwaBiBooOB0aGjApHxwXkgICDC2wKhwVnCcxJySjnji+OS2nLBCOFByeGDKpLD03Nh+eoiipJCCmITI3tRqWthCJjzU2oTizOAMBBASYDgcOBycoMRQRkDWyNBWoIRgZpzifKS2wOwwNEyaiLBsZGjUvPDW3shQOkyGiJho4IBMVGS4rkjw9tjOsDQwIkjevMCibiRmeHDOzvBSWFSCemhWVGhQalRYdBwGBgQKCgwCAgQODgQCAgwGBgwODgwKCgQAAAAI/wD/CRxIsCC+Ew0aHFlYhES/ghAjSpxIMSKJCyIysmChIIaLiiBDihTYw0AKDiYwYCgBIsLIlzAJfkhRYoIECTx4SLgXs+fImSj65RtK1KfRijNDUGhAYoLQfEejRpyJQISAHyFAkOAptatAFyZqaNBg4keGEEW4ej2arx+DnfnwHbmQ4QKDtVGJDv2X714RAQY+4PXJQK/hBzIctIA6GKYQw3oRK2bceKRLyH2LiAhMufJIfBESMND5gIKMDBwKe4ZpQ0MIGTVMXGBhAK3a1SL7IZHx48fVEBpc4MMNs+0EEgtJRHhKvLnz59CjS59OPfqT6iMrdCgwJeY9HgmKfP/4cITBbb5xI1DoUSTC+YJtj7RAgkQIjQUzDpAYjr5fgvXt8SdQPiSYEIQAIiggwgVCPDRgPjwcEIIILIgQAxIORpRPBCUooECFLEDhAAIYPIDeBBrg8CELMbQgwUD3HIAVChqAEIMBLCDBX1sA6MCCWCXooEALAsIngRAAAGDEPweYkAEEJdjA1wMoZCCCWDWIwCR/9yTwQQQS4NNPBFWG4BJfDQigAww84GMDAAYEkcBEfeGDDz/33POBAhAMoMQ/9yDhIxI89MNDCwoEQQJjeUJFFAk/6FCEQPcYgUAJPAzEQAwWwNAZQZDFmIEPBASQjwQmOGDCiwLxUEMGABT/+WA+CeAgaasYIADAPUThA4IDIMg6EGQMXGDAAgQQoQQPHKhghFr3AGABphANhc8BtZ0ZQQgpCMHrUPckgZqUGhrWQwohLLHBPwHwgIEKwQ7Yz68y3AXfPUcEYUAS/LnwAwtF6BVoCjHYW61evjrghBIBdDDCAwdkgEMDvN7TQBAQhDBBtQkYa4KJAkEqQsB6CUHwxuXu5W8KH+SjxAjbdXxWEkIkUYIBGaM87AklpIBBBJTVqkDL6N3TggH1gpSPEQ5c8MBQDW+wAwk9Z5ACCxhwgEDBoCYQ5M+dTRCCAS1ACwCJmVbEwwUWxDoUFUBs588DPRhhBBITtDAujF7r/2AC0AWhaoEGAvaTqgbvwfeBDj8sSlQAN6zwNMIaGKCBg7R2CMIEe4GqdwyA5+MCDgb0MKxaRfWDggNB6eVPBUNU8C1Uf/1whKMncHDWBw/Y8MDv+DgawY0mNBBBESX8UwLI/0wAwAfD6VWrAY/p9c8ONxDQxAkTJIBECDoAINQ/+GjggAo6xMDBBTLIUEIDjurJrQAT/iMDxQPCgIBd6MV1dr3WA1QFFhAFJkhBRQIAwOTI14IYhOCBEAzBBW5XNBJo4ALqA0ACUEeBGAAgU706wD+8VZQBPWAAHnhBDC6Agg9IoChtucsEZhiBGk4geLPqxwMY8IDxFWQCPGAMUf8kcMMSDugeOSDAClzwALgYETsDUcIGbhAAKIakilbMoha3yMWYWM8wdDIiGIfVuf59ik4SaAAM7NYC4WAGVA/4QBKM0IINYgZcDQAADAozxop0CQRqUoAOdCADF9yRUiTAgAJSoIMUBKEFT4EMCYKAPhIGkCK0uoACOEAzJCiJc2/ckO5kcAAkgCAFCujB7IhiAxMYAAE6sOQTJSIBEOhAAw/41j2Cd0h8AOAsduyHBi41OaL0wwg44ICKZFnGiRRBAWaKSz94dbC9SOZZA2lARgJ2xCIEwQRF4EAseXJJieTjbBg4ghBqlIQGRBIy/zgBDlJAgYLQxQi8zMcEShD/hCM8QJzeMmNF8JEqAVwgCD8QgQ4EYIQX3vEIViEBQeaFABA8pR8A+AG/bHCBbpFzlhCRQAkgYIEQJKEIFACBSZCwyr3kY084mNNAymeBGrxQTzgo0T846lGBUoQHI1WAjobCAxQggAPFNAxMZSqQ8jkAAy/kUAiOwCsbdIho5axWP2oAAQASpQgsEIBEzTgUzfzgIwOhKArC1CMQPEBMCZDBmpzYzGrdAwRQskFR/KWAI5CRKLW61UCsCit8SOYHsamBDAzggB+YAHoghc8BtmawfIBVrH8dyqY6RRm+fuA7GhCAaEWrAAdAIANBOEAkQeIvHbBUIMI8KsjiMrvY/0YJKuHKQAw4dw8GkOC3vxUCDjKgFRvMDiS+NEACUaqBRrJ0KBHQgIuI4oKxmYACRzDCZmCQT8PYAKB3rCtBHqCBzQhSuUlwaD6EkC1G9YBbH8LRB8OLmBQ895DmlAAFNIACFADAnXqJLgxYxZcuJQEENKLAO8GoGwCcILxnhI+Y8NHSofQDh2Qk34Ud9cYCdxeeXQyxiEdM4hKLOJR5slOFI3xEFWNml3aKMaDwe0cJHOHA/QXAERYsXnzkMccN6K5u+osCExj5H5AFMWbwkQQRXO0HCrASAPQKYtjC4Ac42swPYBDJTTkgUUEIAg5Sy2MhQuaYJTBCEVxwhOZmAP+fVdYTC255hAZoAJUUIEp9jXCCPvc5iB2+ow2Mm1bzycBEfbQBBogLwlpmwASAfkAMMJsy8ZL1U0I4y2UuqU0R3I4oEBXBWBHDAiEM2oezsnRW+ZIEylYZCRlImp5lgAAhCAQxFsDBBS5QgyQI59KVhs8E6HK5Kh/AAdQyJgYgkIRbo4B+DoxyCD7bx3v04yY3+VZBeKDSGCSgyv9IwlNBaC2uLol8bD4BAyIgBLqEwHEl1CcAalADDICAYp2RQI9wQIEVD0TcGCB3XMx9xKJ0KQQO2JUYqSZaCwkBw//Qtw5w0AMB9REGFuCAlIZYAhWIkKyUAoEKoPrEfiAkIS7aALRAJI4D6MEngOzVWP/ElgHTgRxQAEA2oFlMVokHgQLCGqNnZ7fUm/9DdSpoXYQxw4Me/aDfd3STDx2NAVAyIFcWtVabvtUPIYggA2Vbeqiw5YALofQDQgBwXyiAgQO8aCgk4BYGWtACDKQAB/Auggk0AAMkwAAFX68BHyvNcQiowAAUEmQKLvBt/6GmmPiggCY9pIALAJ0oPfgBIxtpgB9oAGiqDlURlJSEOdoNAEIA9D0i0IIjRA9cE/gADGDwgQmsMo1CgMEBDiAEF7xzJI56ufBn5RNLBwQAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACz5AGQAxwB8AYfOpjSTk5To0nhAQEC4mS1iUhSampwSEhSMdCDi4uS9oDBrWxaDg4QiIiQoKCmQeyPe3tyMjIxXSRHmwz6iiydHR0ba2tx0YRozKgr0zkOnjCpQUE9xcXGxlSztykNYWFfOzszV1dT09PTFxcR2dnRkZGRCNQx8fHywsLDIqDS2trSymiy8vLwlHAQwMDCEcB3bsjw2NjRMPwzPrTWoqKfUszkcHBzrxD/8/Pzm5uTJrjTu4pxeXlwaFgRqamxFOwyYgCQpIwTKysyfhSR6aBqioqQWFhQ6OjwODgt+aRzyxkSenpzq6uzu7uzeujzmxlTEojRuVhTDoizWujw6Mgzu6tTbtjraujx+YhzmvjzqvkDivkQKBgTmujwGAgSatijo4Pje0hT4sjQ+OCg4VhTAphD42uRUSFiGSiiE4ij46LBWVBQgGDTspMxmahggMkROUDTyxMxOYkyYxJCMmCgwOEzAzPBEMGxuaEAKCDCKZGAgCCCGrKg6JDyGlLxEDkDAzEwuIsCskETaMIAKGDQYKCgEBBjSvsx4gGS6vtRicnyAjpSqkJh6gBicfgx6YHA0IBDSsCiGmpTW0khSOiTitjjA2LzsviimmAyIHoDGmrjS2Mi6pEAGGBQWEGAWMmj40lxkQhhmgHTgvihqSkCKfEhQYBT4+NQKMijS2PREZojk6vRorKgKDgjYxPD46uTYxKjSuFiceChugESwoqhoYoTcukikahykdEikdHSospBKThSikMCYkKhSJhiIYsgWHBxSMESsfhyWsJykTkgwPjCOYBj4woAqxIDq1tQODBgEDBDa7kz4zBSE6Kh0gIDeuBAuZtBmfBhkUCjA8MzApnzexMzGljTk1vTAnlQCBgjGvMjS6ODGpOze+ODaeIB4SnDGyNgWZlCYrOjOrriKdnhEeDggOhzS3NzAxKSiqMRGVlhqpii6ysioooyisqw2VkxoclxUVGiCbgiYoGjivjziujwKCgzivjQKCgTiujQGBgQCAgQGBgwCAgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjFuRHkd++ff/uYeQnsaPHjyBDihz5sOI+Gx8YRGDA4cg9kjBjypxJEyLFfQNoQMiRgGcRBzWDCh1KNKLFCkIgRNgwoIKPEjaKSp1KNSg/GyggbKjKtatXkfs2NFli8Z5Gjl/Tql0r8N4JJicclAgQwYcLjGzz6iV61YAIFkISWIDQJIcPvHsTKx55FQUOETRcNjghIsEGtIszay5pgwYOEED/8UNioEmAl5tTqz7Iz8gSyJj38WjCosHq27jvMRCxhOC+CglGhMZNXPO+DyJQoLVYogmKqMWjL3YRAsKAfRaxNjmBWLp3tRTvkf9gMoJDhQ9FRAiJ8b09eIpGGITIYSFBAhQVMLvfX7XivQEcMHDCB9DxZ+CBCCao4IIMNujggxBGKOGEFFZo4YULlXUPEvdUpFBFZSHBoUYC6UeQiB1iuNo+LnAQAQ0s/GMERR9ShMQGL6JAwz8DcMSPCwGMIBALMf7DQgTdqbhYX4UxgYMFDtCYEEUNRAABBCOMAAIIEbzETwwGCGkkYDjggIKSqfFzAA8DlsBECA1IiZCaESRAQwUN2ODAAC74uM8BAx1wwAb1bYXmZhZRZIMFcMp50HEJFDGjaAyF9RcSh6apaAKNmljQATRYtg8SeaY45wGlMZCppldxGqenA13/BUICPGxggAoonHDdnANAEIILq2oGog2uOjrRERY0MYIQI7AAQhMQ8ADrPRyIUESSweYFYqudJvRbAjiEUAISV+0WwnCxOsACrdku6SGx3fJKGHclGoGCYSbKhgMLBbar7bbwvmpsiTGEkMAHsbolQgDdqRnAdv4mti23NoCIkA0jHJxwtQZ091sIFgwQ8V4T2+BrxR4ehMQSTHCw3AEs0zuQwjRgO3JaJu3jAKMOXCSlzpRa9EECLLg06gacipwuCzmUcHNiLJ5AQgQiMBEBByT0KVAMQqAAqI0RvBkBCQbUxwBqAu3T3Aj9Pr3WPW4yIXcOclvAAV4V5MC2aIqe/yAE3cH5AGisSAQAgapu68VPAzzw8MHjHzg+3I0V4OWfCxt8UEHPBu0Twwe2JX4brKKXbvrpqKeu+uqst84g6a7PZBESBxhhxAEkTmwiP/fUbgQS2OleIhI2HOCj8LGL1BoJKAjB6AgGVNAh8v+wGIEQIYSAwri6U+TCvc91D3vyJcUwgvZFFMF0DidMP3H1SIkQAg1Z5XCa7gdEUOa54pMPVp4c+tMHBtMj4dnLNDYY1QDuJa1twY1ZFhiBwN7nP5kggQY5kFasQDQAwRxhIJZCAdooMgAVFOEDWprgwCoIEpM0QAVawQyIZEObfjVAfqETjREMwIIBYKxTFGQhSP901hT0RKpfM/yAsnLIDweIAAL5EQ0SOAACl9kABEAMohA7kj+DNaEJP9HPtmJQqOAhoQQ4OBhHfjOCIlTMAdlT4ba2aBQbzWUJNBhBAGKwu4oUjjYfcMEROCAEHGRQUUUYQQEdsCWB8c1idHRI9+7BAxCM4IMbBJELaMCEJoRgSyww2Ad4R0WXUcQBWXpVJlcYSdZM8gRNYABi5tiaEhiABgHYwBFYYJ3WjCAHHHDBAABkAQv44AjGKxEkW+kRtYERU8pM2UQckAAQVKwBQviiNkVQJvl9YJbSZGaGTIQEBjShS9FMmYdGw4FzYoeSUjuB1AIgtwCUYDhzFGeGjOD/ghHdwwgbSEoDeXeEPvnonyOygQ+CY1DdoVKC4hufPjPSTiHgMpFPjIDxKFIB+UFHbUQKQJiaIITLiA+OWcznRKfkgkTWBwIgKMIGyFWRDrLACGk7whKqE0EGNLR76lKBHMO5UtYQrwEOcIANyPVIizggTrECVFJt4L6I5ql7Rc1qpiT6Ia169atgDatYtapSgmhxrDQZlgtcgDYp6mmtDkgmWtNakQawII2jLBGQ/vbFSFVurjIBUf4skMbLSJEDCRBCAE7AABY0QY2AhYlJeBCC61mGRiwaAFPplJy2RfYjFXEBC2jggFCZVJlmHUB1lPbZFsInSEe44GVJ9yMQ/4SstSEJTwlCwJ0DoGC2CKGWczyLW5vw4wgjQIFtfAvczg0gKSXganGn5BoQKI25+fHUj1BgNWhOtyNq4+1FTmLaixSERZw0AHG/25Cr0CA5JCCBPC0AGQ6wtnqbtN962cuQvswtBwDupgVOEKv8BgCn/G3m5yC3gUqaZgOhQa/VBsfKBNdIsKGKomhKKwIVxGBDvrOZhS9MEeYa9h9IOMFjRrCEItCAfga474jb68cI9HB4HMgeFrOXPRYYasbGLYsYgzdD7AD5yEhOspKXzOQmO/nJUI6ylKdM5Spb+cpYzrKWt8zlLnv5y2AOs5jHTOYym/nMaE6zmtfM5ja7+f/NcI6znOdM5zrb+c54zrOe98znPvv5z4AOtKAHTehCG/rQiE60ohfN6EY7+tGQjrSkJ03pSlv60pjOtKY3zelOe/rToA61qEdN6lKb+tSoTrWqV83qVrv61bCOtaxnTeta2/rWuM61rnfN6177+tfADrawh03sYhv72MhOtrKXzexmO/vZ0I62tKdN7Wpb+9rYzra2t83tbo+4BvWohz3OnIQH1CAL4v7HuNX9j3pguQBDIAAF/qGAf0iA3jCwxwzabQ8n2OPfVQYCQRaAASoIZN8vQIACgEAAe2QB3euecgoyUIN/FMDeDyBAQTCwgoRb4QY1SIETqJwCGNygA///SAIBUtCBCxBB4wIhQruhAO4HFEADVH7BC2bgbg0kwQT/4ALQ/6HxfRPABAVIgQy8gIEpvwAIHZiBPRTQ9B6YYAEayAJBCIAAfmAgBT+QMsuJroEFSAAI9VYIEIDg9RnIQCAXOHIS4j4QjcMgBQgQiAmioIAbLAQBQ/C60vfRgySnIAVSEEgHCGAFKzxAIBoggALqsW+FaCAIErhCB5LwgiRT/h9D+McFEFB5gWDh7XRPSAemMIQOZOAGCkA5krNwA83rvPVQ6EgGoKCBegABA01X8gT8DoSLe2QGCzCBAt5eeDITQAYtSMG9WwDzMAehAxJogQLuHfwxc10CKfg5/xEqPuYMTGDiWkjBBMp8hRkgwB4EWAABRi7mHiBAAxhQQAGQcAHZi/kCQ5B/EpAPFzBvExVxexFvSVcA9rdS9TB8AkF+bMFzBDABCvAAM2AF+nQD4QYFxqcXCOAEM6AEWaAA68dM4dYBBSAD3acXP0AAD5ACNld9dKQAFyABLZgYF6AAL6B/GIBz/uNunZMaGUgA7icB/pc866cACLAA3pUaTjABw5cCFJACC0A+MJByPyBdepECClADTgBuGUCDrXMBzVccOlCFKaABk5eEYFYPV2APUqBzl9cuC6B/E2IFD1cDGkABFHCGmUIFRihwEiJu9lAPTuAEadcuVlB6D/+idYloD1KoD52XLTPQASkghBCSBYm4BU8wfFPQLiEngRByA/0WhRMgAO1iD3GoiaWoBYnoBFuwAzhQZhNwik4gAFVgi4hoD/hAimFmAk5givUAA6FHZi3QAROAbmBXiWMmAfVwAyXYA/eGjAKhdd6Wjdq4jdzoEMOIiAZIZj1wBR6QBV1wZj3gBBQHhGWWjkoQfujoBEpwBWg2jh4wAfR4ZsOoZvnYjf74j5EGjGP2gPZQA2QYZg8ghYjoiNZXAxx4iNgoZhwxfP+mb/W2iF/2AsvobzUwBENQAwLZZR5wijOgASmQBYQYZqbob/0GkmM2AYjob3D4Aseokv/mbxP/MANNR39h1m9W4ATtRwUXkAFixor/xocpEG9jNgNSYAU1MANAkAJWwI5glgIzgHdW6QQol3pfBm4KsAApwIEXsA9hF2YlVwNRZ5Jm+A9+F2YzII0pcHNM6IZeZg9TSQASoAEZMAM1+WVD4ARAYAIdEJU3kARjdngFIAE1MAFhOHRh9gIyuAIWSAFUJ2YtUACcOAM6IAESsALVGGYacHJEsAIy8ANQMJhhRgE3IAVB0AJldwFWMAHh2JUasA8YMAMhJ5tjBgQa0AJJcANWMAQp4IxhNpUzkAVEEARA8HgvOQQvAAUSgAQIAASA6GUZoAAtEAR4mQ+QyZVfZpg9oII9ygAEJxhmePkPPaAAGgCZIEeXW7Z9PbAA3/hwlxhmfEgBM+ABMRluY1YDD5AENdB4/saWYQYDBUB4kzeg62aYX4YBLXABi/lv9UB7NfCBWwYEfKgAemiXTvCANUCcW3YFwcl44tah9SAFF9ACX+YBAdgB4WaiACABXChlHbAAPfCW4haNNfB2YEYBDAgFDxmNbueWZTcEMzB8pvgAKgpmHnADa6gBOlAPVvB7Mzpl6AaVEpCYLVClU+ZwJydiXyaJN0AAT2hmeRcxAQEAIfkEBQQAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACy/A1kAfgJZAIcmGgSwsK8mJiTe3txTRQzs2pja2tzOzsxERESYmJikiie5r4pnWCOFcRz5+vkyMjTy8u5ycnTczIjUwoAiIiSenpyJiYmQkI/FsFJubmweHhssJQlpaWmCgoTU1NTz68GakmwSEhSioqTu7uxiYmR2ZBw+PjxQUFCIflRGOApcXFzKysxKSky2trQaGhyvkiy2mjSmpqTq6up+fnw2NjRiUBTGxsV2dnSqqqyZgyYWFhR6clRaSgw6OjwuLiyinoSqrrTm5uVWVlR6enw6MhS+vrzazpzi4uS6urwqKiw6Lg+afhwUEATSyrxeYmSSeixqVgy+plwKBgTq7vRWTjweFgTW1twOCgS+xOzEwqSYpqBYTFTofEjKMoBmRlhwXESgtDSMjHgSMERIODzO7oheRpSmhki8vLQWGggSMGjAvNCyxLio7rS66jSOfog4UhRCXEygzJyytMyytKAaBiBWYEQuUkwSEGBwXtgmYohsXGBWYHwMDBiAjIh2emQaFjQ2OigqOjACBgiAlFwYKCgwNEwIGCBapITErjTY9uRQSFT4+uREOChmfHDY0PRupOAkwoBmcGgcOBx2aoDU5Nh0lJi8xsx6amigoMSIkJDwqrREOFBQWmB0eBxCSjDI1KikmrB2fnyInIiIUhyQiJCKeGwgLChCSmTGoMwwIkiKcojEvLy8nIDYqDQ2IizEwtDGyDSmbizw9ozo3OhWeGAGAgi21LjwyjTEfCxwHIAmIMDY1sCguOhuwiREJjyAtIQICDDEsrjYuNxmbJxaWmjKfMjCfHTo+uxURmzY5PTwyIwEDBCgigykppA8SkhUPkyyrki8wIASGhyCRnRceBwqLjjw+sR4XGxGUEjMvsQKMBxu4qQmYtCYmrASYjiompjAxriIlLDO7riKYIg4dDiIeAgcKAz44OiKeKQ6NjhmbIAEBBjG7uS+qBTwzNg4LmzE1MgKDghoXHwmLijk5tjCwsQCAgTGwsQODgQCAgwODgwGBgQGBgwKCgwKCgQAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDWrRHcp+LBz1oJAlhT6TLlzBjypxJs6bNmzhz6tzJkKQOIQlaFEESg4MOnkiTKl3KtKnTp1CjhrSnoYONejgSiAhgQYDUr2DDih1LtqzZpPpuHKhwQoALCj6S9DtLt67du3jz6tWJwEaABy1b7h1MuLDhw4jP7oswYIgOCiZMCOgnOLHly5gza94MMcQFAwlmxKiHdUiSypxTq17NujVYCjhGGKgX+oKNIwkouN7Nu7fv3x2TBHDgIYOLff1O2AjCYR/w59CjS+8tYHgFDYH1dRiRwMVTkiQpcP9IEKPCDAT6pqtfz749QQr2XIhwYGGuwH0qPARI8t1ePyEBeOCBDQcckIAP7iWo4IKtCfHPPh3QZ9+D+e0H1T4nFFHEDQjQYMIJCBzF4IgklkgYf/+ocEQMGghkj3bcieiUADjYIIQ+ge2zD2om9ujjj2DZ40OApumggQq3qeCcU/gNcIE++4QQwpJAVmnllUnZg2ERHuBwgQgreGBBi09pd4QFJ4gmQgfoYenmm3C6RFI/LCSARD1F4MABfFBpUMEINiChoQ0G2JBBCHEmquiiEoFnEg0ISEZZVLA5gBsCSfQwgwcrnMAjo6CGKipZlSJBw45UJSDDk6O26uqrTfn/6UAC6d1HwhEtoAjrrrz26pKnIVjgwAVUYuhBEQ/4quyyzFaEwIMcOCBCrf/0w8ERAejW7LbcdmuQPSYcsAILqAogQhAzUOntuuxCt4ECDWAHE0lpGVDEECpwEAOuPbTr77+97VODAgSoKxJJGkRQzwAGDOBBBQgYDPDEFOv0ogA9sCCECThiRK8AH56wAwZPYFdZSQIgcAKIcl0Enj49qECCCixQIHHFOOc8rw761uPBADgA9mlE9uiQQQBFkObBBxIwIIVgPpEgQhE2XHXBqS47qiNJOnfttZwUWEBUCzLY0AN4Iz1QARKh3WAbBAsQEVjRQwR6wQ1DXNAB1l/3/+33d/34QAMFytVDA9oVxYdADzro048OThwgAwpM/KMlCQfEEGI/j+sw6d+gh84T2jQkfTbXI+04UHwVOLCAEuHF0GlJ4KEu+u2429QDaacPPZIOCThgRAlM2HPCCjHQ4MMJJLSFau7QR//S7vX03pE9PRQxwAIKpNBPBrhaUOMBNuCgQsfSp6++RtRbv1F8FgyQAAMKlLDBDCMEQRsH4xlwgJLrC6AAJdI+22lEHxHwABJMwIQS5IABF5iCDDrgApK4wAJBEIG2FqKlfuhodbU7iJaQozrL1c53A0whrLDHOwNiJD3HOsGOlLAEBSwgCCtAQO0QsIIimACFltMAB/8qEAAcdMAEqDohavbRgw7gIAAi2JMSEafCKr6KJAXkSAgy4IF6nGAgV4ACDCYggxb4oHYPQMIBWIBCeyQhAR44gFAUqALKTFEg/0HCbFqwAtCc8Y5WDKSowEM9GrxPBwksggwrs4EcTAACRTCki7JnAx0mJAQdOAIOavaAIXigBb07oeUeEIABWMAHFECACFbFEkAK8pVxql3pIqmRokVgBQGIGGqkwAMMfMAAANSSCgwQAAQlpAcrGJdz7EGBChwhAqpTYj8isCI+2YMFXfyhi0IIy25iiV5JSALm/pcEH6CPIiFIoAci4BYNuLNjAMhBASAQACH4wAcqaEFjqPX/LRWMAAcy2gcHBiACMpkQPC5IQBAiUJmEDqA526SiNyfqI2EGIAA2gACgLirDxCGgCA4IQgAqIIIYmJQD6dkHATBQgEK1oAUCssBpEtKPIQxrQvbgYRH6FVGSVMcADhpIP26wKmpxk6JILZGWhBCDIuLgqQGIQUcpYg8aWOCiWMXqoQRShQZgAAQXiEFWSCCvS15gBOlaXRoP8CwQkuQBRVgjQfZBghGIAFE9BWJS9yodev1DABQIrACSIIBzNsokAkjsYMOZBB0IZh8pqN8GNEABF3wuITq4gAxu8Dw3wvSLbmVhDgliD3/GQEai5Ktq/8WEBihACQ8pmmaHkEQh/32WtOB5QD3YSlrTovaoqw3uul4AkRd1AAIWqO3uRptXHyDBA6C9Dwe4g9eD6lW42KXYPjLggArgqCRC0I+uTkiBfZGAIMFCF04lmt325uyaA0CCrrQTBAsYtXbBkkF9BuJc6EINuO4NMMXKe4QZVDA5PvNUSQKLqn2wIJlC8KAOhiADHMDHlQLO8Kve1YDiNQRDfazAECxAKAs49q0iOFBgdDADA6zAAjdIgP+EUFsAa/jGm+GaC6eI4YIoMSH+YMALCuYQOsXAAEE4Qj0yYDKSICCbUNNABupxhCAYIAYssOOPccxlzOxDBz04AQduIIRWVkYDJBgC3oYwhBlIMf+1LgrBA1hAghuQ4MInc8GHOPADCeRgAw/ZhwYgZYIkpKd2+qDBAz43pySYYHEaqDF7u0zpwrhgBvUwwAgcEElunsADI+BUgfQTSir2gwTPlQFxaGy7ENTNCkFwwAcwUAN/vG9o1620rg9DgRmIwAIiUHIoBXICAyBBBQhgAQtAZObUWksEF4AjUJ8nkAnH4AI4OEITovDnXO/62+4xiWVN0ELbffoCzeYx6opGAX1QQARApaKg2/3RAKDgBQy4Arj33aPaZa96PPp0AjTQDygdNbXgCYGMhSBRrpUOB1RoQA6IwO+KM+iEBbSdEPxngQ7MQAU0MCzCfVKBeLtQINn/A8IDImvxlicI4+WuDAt8NqhjZUBG1v1v0UrO8JP/I3v7aa0CXE509cAc4IgTAAmEECkhXIBhHFgv2mqnA55P+udF2I89UvCPBhT96885+rAf5MGBVOUISDBmzk9YdZMPDej8aSDY5+4bsdvYhGk8QlDXTnWrAxjoZxTI0OlOeNbY/epBjAEEVBBaJba95+wFfGVgUPjKcwbmnUa8bfXe+BMq3L8ALp2FBPIuy5s+M7LU0OGmnkSB6GMIR6Cl5Ug4t4TDUcHsFb2upMCA0/s+MYpDwA0EFAEEIKCC/3CBCjLA9A9ZwANHyICWTYAm+9gjBD1AgAqKEIQLsAABAlgS/1WM78kDFN8EFWzk79dPGLrWo48OcID/6gFAClxAQCuwwc+KEIETkyQCEFAB3vEgLIADBwBqllIgHSAi+jBlBzAA8TcAB3BsZcd+FpgX+4AAFnABF7CBHGgB2rQPNMABHbCBFpABHHNC1BdhLtJJHNiBL3g+98ECHmgBHsgmN3OBOlgW+6APPjgXPgglc1VwjhNNtdODJUR2P0iEjlMZPbiEQZiDOziFVFiFVniFWJiFWriFXNiFXviFYBiGlVYSyGE5C4EcWgZgSuQcSWhCOsI53iaGcvg++/AAKjBiF3AjPvcgJ9BxSLRleZUEEZAiLDEQ+oAAHDADNhgxc9iIL/9hDwKQAAMQa9LSZN/SAy3gADIwBN8FZ1SHQQ4QAAIANQjQAkegashVXY64itdDARFwgiIwAgI4aRfEJQuVhtwEHv2gAqkWA3xiOT1gATMwBC0QBB3AT6yYjB7Tg5ShAgMwiy50ai0wA5J4A4yWiyRhAiJQARaQORv0IFPyegYwA8iojOZIVYKhItCIGjkVABfgA7BnjTq3blRxAQFwAiqAPN8oVEMwAMd4jgCZEepYQbaTKkiAADUVBNaIW+vWgPUQAfrAAt54EPowA/5YjgGZkQ8xkKY2ZRC5DzMQBEMwIdY1gy2QAKMokb74KRV5kRr5khHBkajjYGwzihCikCT/yU0CsDaLdALe6EIt+Y8wOZQMIZMuEom5xIb9mAHqEkIIhAQoZTk8VFBAaZFCSZRY2U9HMItClQFXhgD31AMKdQE9cBw9hXVFoAL39AAJhAQnQAEk+Q9BiZFZOZQkMZDodQEOEF8vhQRHsJdI0H9nWWz605ceAAFlcwGAQRBzWZd1WTsqkgA4t4tNlVUGIH8BYBQ59w80kABZFQArAAFBgASnhBo1NY506ZgBSTvOWFBbYzkhoFiJ5QNj+QCIQhIhQHAt0Q8UIJtJwEUtwAJmOXvX12IdYGaq+ZJaYgIcIDUjsAJD0DzyIkogiZO6qC+WNEU+uZIDEQIsoAIRgAQjcBCYJMACqpic5/giFnAEA4CKR4Ar5LJ2/WCR8khyDnBeawdiGlQZPgBvR7BpI3AEBiACgYeeAKklCBABGbCg/BMBKmBNIVSHaplEGZgBXpFXVMF0JBkCYragHpoBJ4BzBmqOgeFjPFKQO7aH2xQRAQEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFBAABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAP8ALNQAZADrAKUBh66OKXR0dDExMFJGD7GWLD0yDOLi5IiIiW1tbF1MFLKysX5+f8TExEk8DZSUlI6OjMzMzCIiJO3KQ4ZwHsumNPTLQ97e3JqanCgoJxISFIp3IHBdF9ra3Ly8vObm5GVXFKioqLiaLHVjGxwcHCQdBM2tNVZWVF1dXEJCRPT09OvFQNqyOXtnHE1NTJmAJMChMJB5IT4+PKSJJ+K6PGZmZDo6PNSzN/39/KmQKrmeL8SnNJ2GJBYWFA4OCikjBS8kCUZGRJ6enBsWBPrSRKKipGJiZN6+PMKuNDErDAoKDFJSVBYOBNKuN+bDPKqWLOTARNq4PKKCJF5TFNbW1Oa+PJZ6JAoGBOrq7NLS1O7u7Oq+PAYCBN62OhgoKL6urNy4SNbEqIJcYPjosAYYFIDmqNTc3MDA2NbE8NzorPLEzIqUKGqkKOykzKCURDQySGBaPLrM8MzY3GaoqOS4XIBEKIKQuAoIMIDiKMDMyHJEcCrCgBYwaEB0ONDo4KCMDBRiUNC4WHB6YOTCgCxi0Dg2IEpKNNDANCAYNNYwgNacELrEpOC+KGBeUGA+GPjGXNLSSHBYUMKWLLCmLHCAfM6kuOrW1AoYNAQMEEIuaMBsLE4uPIJeyExcFCA4HOp4SLrwzGJYKOTW9LrMTCwgwJygvDRSFGhggKKmhG5EGLiuzPja5PjMFOzm9LLEyLiauGh6RPiyNJxwSPjAPNZ4yOTmzGBUXGJ4GMKk7JxwdJa0KPKcMCAwRLqeDJKaZIRYGLqmfLrYvBYQYNDYyJKYgOScfLqmUIKWkIh8gERcWIJudLKorKKWhEIOQMB4dAIGCE4mFNzSFAQEGNy4ENDKwIJqCJZ6DPjq5Nz44JKo5NCwKOzs+MKkIJxmHIJ2RIIcgJxKSNDY9A4MGGZEQDJSTIKmoJyKuPj41EpcNGB6dIJ8ZAowKJK+jJKolGJmGE5KZNTU4DgkNN7EzEBiiNbuTOTq9NC+zOy+KAICBAYGBN66NN66PAYGDAICDOK+POK+NAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEjwHr4kGDH+y3iPosePIEOKHEmy5EF8QBxccMDSwQMHBwSYnEmzps2bNu/1CGCgp88sNwycwEm0qNGjRu/xqCGgaVMHKRhgQEq1qtWrDe9p1Sqwx4UrAfBhHUu2bNGtaAVgmVLDrNu3cD+i1ZpkQRYiGeLq3cu34FytIxgYMMG1r+HDZP/ia5EFwoitiCNLTjq3B5EsC/BBnsy5c8m5/zB44BADrefTqD2ixRcgBYgMplPLnr0QbWChoGnr3i1w6+IUjnPzHi5baw8HWQ5o3ky8+WmtAixgESDcufXJ90YsQLC84/Xv4MOL/x9Pvrz58+jTq1/Pvr379/DjJ8Z3scdGzQwtJunRA+Pygavtd19h8omnUwsLXKCAAkHQ8JhC+AgQgAMgKADCASfwwJVvI5zgwIJBBICBdwWGh48JBniABQMQeHAFCNQlVBcHHEDAYooORNCRVvjUAIIFUzDAwIonkFjid4sFcYIAI2DQQgdZXJAXQhG2UEMETSqhAGbe3SOAAhYcUMMIIwiAQgRHGpgEbP/wCIQFFsSQEGhaAXEDA2JttIAHmf2Vpnlb3dbCnIrZ2YFY2XUwHT4ZRMADfn+Wt1UMa7V10GojRODkjwh0tNgUCiiBQIVBIBBjpONpxcMFWThg36U8ov9ABItX8NmDpwhcYQGLHXQwRRYdlIaqgTs1ZimsFgEBAotTdNACfkm0loKzmglARAoKJDEseDrRoKISRvr1Vw/KYkEDXa15UORWAhjAgUzbWtdtjc8qJJyXBgSHTxFXcFADWjwooG648eqmUxH5tkCwQbEJlAGYKFiEAo3UbZUBCB4UsXDBqemEwK5AEEjoZvdEAIG/gCkwGFoYQGABChzz5jGQz9JH38b/4DNCf/QlwUMAd/IgUF1XBBEBfT/fOWXMtJ14BbZFmHDCCUWcoOOlGIDQwQEBIHAAlFgU2RsGClzRwQIBgHBF2DgzzdlOV9TqYtz/TCE2wyM8MIUBcXv/MAURQOTZmwAOTOGBBwYoUK/btJV8gglST031CSPMyYMAQEjdggAaMtxDDUqYgMKDjJdu+nltn6766qy37vrrsMcumci9yc7ZXIzunHqbFvXAAw8Z9CB4b2v+Lvzutpf1l5xXEHH1nPhgEICiNQLeZRIpDQmBAiIinzxWc41w7Z0jQg9EB2uDEIQCDASg7UYBHN5BECD82gG83/MFWQAQEOFBB8/Dmq8Q8Ch89GAEbCpZBzyAAOElwVrJ8V7+roKSDjigBUEKoF+I1ic/3aMGWIDAiHh0AtdIcIJUyY7WBIABDjBAgxUJDFt6gIEYCCAD//GSy2KgGUYF4AoLOCEK/4/SgwVwoEgY2BUMARSDGgXgARCYAgRAcIIkcCUJB/CAAk6AAgQxAARTGWJcFoMFV+HrhQlxWhawYMEDbMkD59pR3q6QBQ4YIAsMUJgY4XIPsjGgYkl84cbwcYIsYOtfOmmNCAO1gA6A4ALr60AAhLZHtxTRABrTCgaapaOFEbJW61IVAwaWnQtwwH3Q2dIBhFhJmtwjBlOwwAlqEAMUxG8KploaQRZjAAuEbCvHAaJFEAYCDW0FBTfAAptaiRV8oOBXWYimIW9wA+AoAWsuAwKAvCJMLCanO328wj+WyMyjZIcGAQjAAtAGFQ84oAhoOkgPMNYp3kVAMIT5xwJcY//MZKUAC50r51X+4iVOkigJMPMOIT3AABT0ZwRAY0DlFmOBjPWnBwIIgmuGJ9AU/qVdghTIYm4AgTzp5AC1UsCsrOmdHjyAjh24QP2AA7OODvSjDFBAGHMGhMSZ9B4ZKIICOGABCFyAhwDigVCJ6sKYsNKmJFFMBDQYoQDyiAcYwEAEXuUXHmgKAyOwIlTHSlaE0K6saE2rWtfK1raGJzYEHZnI4tqb6rhVJBbJQJOaQlDmACgJEXBKBN7HI71ioClW7OtdR6KTEww1BTfgwAgVO5AIHQAL0bwCA7jDoxaAAAuQJSVlFyuXCFxAOgy4wRQ62VeFtgACiUPbAYiwgMT/1sUAUgRla89K2ohEDwP4iAAHVgtX0/RRUScw6c22EtgDXkC0O7Jrbz0yAiDFU1xXXIAwLYKfvh6HlBVp2HQ/MgLcxrO1/+BBBzgAhAzE4ARKwEBi/5KB5w4Fu7wdb0TKu1retVYtBqDQ3KbwgMfQ174ME69+HRI+8/rXgygwQGRBEIATLAACrbrVg+urLvzmd8FmbXB/d4sSA2ALXrw0gB7RwuH71lXBIK6NiK8LoLk0EY7bZFUHLYbgF/s1xvmZMbI0CVsT1DVaKbgAOIHaYz8BmboOTrDFFnhfuuzzAUv+7lCc/GSJAIYDWDAw7rqDxSs8YL4jWGAm0XKcleXm/8Nd9ovOeBCDXrY3rMA8gcJ2JAAGWCAAI7jcAW4AwN7obAQ+AgsPAj3aOKexBUKCQDVXxIBQtiCZpCMkBK4AAV/hUY85QwEIIACB0PZKOXR1dBpPsJYoTmEtDFhznxWwNIvU4AEdyOkCTsVTBbx6Cq8OIZZTrWqzLtkiD6aPh2nHoyUvl8vFjrbMcPJUaVv72tjOtra3ze1ue/vb4A63uMdN7nKb+9zoTre6183udrv73fCOt7znTe962/ve+M63vvfN7377+98AD7jAB07wghv84AhPuMIXzvCGO/zhEI+4xCdO8Ypb/OIYz7jGN87xjnv84yAPuchHTvKSm/zkKP9PucpXzvKWu/zlMI+5zGdO85rb/OY4z7nOd87znvv850APutCHTvSiG/3oSE+60pfO9KY7/elQj7rUp071qlv96ljPuta3zvWue/3rYA+72MdO9rKb/exoT7va1872trv97XCPu9znTve62/3ueM+73vfO9777/e+ADzzbQ8DwFwwAHwXIQcNZUAAWNLwCKiiBw2fwjw2EQB+F/0APRABxxwv+86APvehHT/rSm/70qE+96lfP+ta7/vWwj73sZ0/72tv+9rjPve53z/ve+x7emKf8P37AcC7MgAuSVzgU+vEPG+BABwt/ARdswAQXJD/hTNDBBCjQ8H684AM6UAH/w1dgAwI4nAkq6Mf1Ff4EKAgkAQ5vwO/LTgL4z588L/CBw52/hR+UwAYl0AQJ5wIyQAITIH4uwH0I5wI28A9M0A8s4AM7sHA7MAEv0HA5QAI+EAL2p3COJwQE8AFC0HBCoHgTcH/hIQEMVwH/4AIsYAOYp3A28AH/UILRN3z/IAI2IIADVwHr5wIrgAOKR3A4IH7/QAIkUAATAAVc8A8EEIP91oECYQMwUHk48AI2oAIuUAAEMAP9AIX7pgI6wAID4AJQUAIEQAAwMAAb0A8U4AL6wIP9JgEcmAAs8AL6gAMbkAAfsAEu0ARMAAAlQAX3xn1VOBAqoA8EMAEy8AJU/0CFLAADjWgDT1ACLrADTCCH80YALBACTRACMEAARrACIbADLhACVKAPL6CGE7ADJdAPM6ADMoAD6zdvO8gEMuACOgAFL2CKO2ADVOACCYAEPVAAONAPyNgP1Vdv+sAEUKAFsRgCWWgEL+AEOCADJRACP7AFVmCA/fAE96YP4mgDWNgP5EgA4Vd9G4AESCADBDAADTABOJCFqfhuCUB4AyGOM6APK6APVKB+IfCK3ycEPUACHwAFPriKLThvKtAEX6gPMxCRXpiKLwADLCADUAADDSACMhB+EoADNDhvF6gCVLCPEEkFTwCARqCICeADA0AAFUCOIeACE0CK9EYA7v+XfhDphf3AizggilAgAzAwj02gAyJQAAOBA/ImBC8IebAIkRD5BP0QgixQAhVABSUgAxqgAy+AlPgGBSRpkjMABTpQAiqgAkwgjRLwAlIYhrDIkzNAfVAABSEwARrQgAHHk0+QiDDYBDkgBQQpBUMIcP7YBEW5hwRwgfQWLgbUeDKAEATwfzBQAEkgf/h2D1uAD0JghzpQASwocNFlQA2QAC5QAUNAkswXcFqxBUtQAAMAA00AeenHfGCobwpFAkjQAFVgBGd5lpQHjoTob0KggffYm2fpkP/YBLW5bwlAAE1gnFpABcE5ENAHcHt5lv/4j0+giPtWApJXiwNBiE3/SBDLGW+e+Q+e6ZkS8Jn+lpr/MJdeaJLiOJ/i6G87kAPLZwNaeXnySZ/64J779gNlyASqGJlPGZHL943ISG/2x3n/sHnOxwKMmAMlEIf/GJ/kRwVNIJ39UJ3yRgIysAMDUAAfsANU4JmRV5bLt5LIuAIv8H9KGZLw5pUkMAAhIAHUmIuN+IX/J536oAM5YAM9qQMhOG8FIIsfII8hgI4dOgEiII+vKJVQQAAu4AIE4Iw24H7ylpsHOARUAIp7eJfK+H9lSX36AAUuIAKnmIMCoZjtxokdSaQEUH4NUKMTMKdGgIYEEAIAUKVZaITvdoJtepoz+QEFIAQF4IlL+gIh/zCLlLgCMvABDRCPL/Ci6qYPVWgfBbABNlABckqODWAFPfADCQADZmkDLggDALCo3zcAAqGUBdCW5aYCRiCLiRkCc6l5m/cCG1CiOdCoOAAFM8CoizgAPyCj74aM0lkCGjAA9ScDlEkCpcmLkVoADaABYEkAgNmNG1BvyGgDOUAAVQADOnCN0hiM1nqRaUihLJCE9yaWM6ACcdkPVAAFOAADSzoBViqMPoAEISCL9iYBDZmMycihUBCuAsl4XAWe83amwPiQ+jgD0jmxOiCC/japGwCWT1mf8/kPM2AERxBwPRAF2ImM+4hwiHeMs2mYCbcFBfCkjLiDAIpw9yAEyAeacNVWbwEBACH5BAUDAP8ALL8DWQCAAmIAhz4+PEpKTO3t7S8mCSIiJPr6946OjIF1PFpaXKeLKL6+vObm5GpqbFxLE7KytJV9Jc7OzJ6enMa6gtra3HpiGDo6PKqqrFBQUV5eXH19fODSkJSUlB4eGmNkZI6GVLCULSoqLIVvHMrKzJqanLKqgCYmJGtZFVZWVdbW1K6urBYWFDIyNKampE9EFS4uLISEhDY2NLq6vKSceOLi46CIL7a2tBoaHEE4EHZ2dI5yHMrGvNLS1PDozKaWTIqKjMLCxG5ubLamWHJydKKipMbGxN7e3FZONLykREJCRE4+DNLGfEZGRDYyHJ6CIIiEcBURCB4WBOro2IJ+fL66pHJ2fKaklBoWCOLgzG5qXJaWnGZiTAoGBAYCBGjggAQEGE5kTLKeEL54JHpAbD5QZLzy1BIwRHCQhFJ2WLZ4mNTG0AQMEHqwhIaImKh44E5YWA4KCBgQHEJMSD5YQF5SdODG4DZQFBoWNPTojBIQYEJEUDA0TKKcvGig4IJ6GF5mlLK6NCRg0AIGCDYiLBoGILTGvHqQWDZwOBgoKMLc4KSuwCxQTE5UPEI4KLqopKaWnNbSyDYOQAoOCNy4vIBqdMLK8HByGE5cdEI4UOLq+Bw4HD5IMJJ+iHqQsFA6SF5idDAiSMq4xDhISGJ2aJiCDNDG8EImPJasNFaghMDahG52WNLY9NTy5Jyu5IBYgMCg6NryuJKimLTY0DYubHyIjOLY9MYwgOqgtMryiCLAgOzINFhyGMLG2BIwaPTAtJKcuAwMGBJgOKCEiJzmtGigJLS+7OJ4pFJcGCRgiKSulIB4ZFBGOPrq6O766JqEsLTAhFhAkMqgsAowHLaYsLaoxJzAmFhAbGpEGAgYIGBASG5UZJpsmOLS1M6sNOLq0Gh2QAgIMNK47OL45D5EQPTY2M7e1LS4zJKKeIBynCg6MGoagGpa1OZ4JJSuqCQgwBIQIIKYhLLkNBwoDMLQ0GJSWO7k8AoKBA4ODAYGDAYGBBISFAoKDBIOFAICBAICDA4OBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIENW5McBAJKTSACorJCPn8iXMGPKnEmzps2bOHPq3MmToT4GCkQIJfLjB5EYAVz2XMq0qdOnUKNKnUoVJL4LG0Zs2LqBSAEUAaqKHUu2rNmzaNM25adPhYp8cEtsEDCCg9q7ePPq3cu3b09+gAMTsFAEAz6/iBMrXsy4cdrAgfGd2BFjhePLmDNr3syZImTANjbMyGBPaefTqFOrXm32Mz8APyAsAcy6tu3buHN//Gwvw4wIBALrHk68uPHcn104mNABn/Cp9jCwmE59+gYkpo9r3869e3DI+BCg/6gMmaq9Dgp+KFAQQ8GEAhMQdJ9Pv/7tE/8gh57xouVzqfyoAAMMK6wAAgAjCOCAZfY16OCDjYGQX2BICDVbeVS5ZsMIReCgD4QghigiXoDpg8MCQ3z3X1Su8YMEESJgN+KMNNYIFWAr1FAEA/pgOJVr+bywwAg22GjkkUjW5KIDEYDwWVWugbAcBrQlaeWVWGKUDwAuOLcigOAhMEEMLnyZ5ZloppnXZxxwSJqZasYp55w3QgYABDH6SOeefPZJ0wW8ZTCkDXr6aeihiF60xISAuRADCoYVmuiklFaKUGQYCEDmk5Z26qlxSg2QwAMcZBcTaCMs8EJpcH7q6qur4f9jwgct2ATYEjuIcGGrsPbq604tApYPCBVccMIKXrpmzwoBXIAEAYcJ5NoADzxwwwpIXHDBEiuwGpg9AJyAwLgIYHBCkRPxYwMGAfTI66/wxitTsDYI4cAPKBThgwotEoBDDUNF0C6j4DVwBAkOxEDEwjVk0GVgKxAGAYwi7KDAop4dJqm8HHccUrAgbKCABV4NQehn+WQwwQ8+ZDAECgoEkCybNGgwxQtC4OBDDTs0GRgAREzgQwdAAIEDECV4rPTSP7a4LAwECPHbyZFdsMMPF+SjDwEvzFBXsPgY/MAA+uizpQMLUAlYBT9YAAI+ZZcdLdN01w0sbS3+c0IREZz/PJA9BgiQwYcCsU3ErtnxA0UOY7vElgEF4OAl2yxA65zdmGd+d5UCITA154MtcIJpNkRQBBCEFwRYCx9QYAXcLliQdmBsiyBEuc9ervnuvIukp+d9V8kPDDFAIKNA9ghpQD4GBWZFCEE40UEGLECwgYoVKLCACERA8AMLHTDf+/jkexYsQcD7nd+LRFRgGj440IUuQZC1IAEPKEywgAMAQBZ1Bh3AgBCGUIQJ8Kh8CEzgQrYUgAYiQX2d+5xS+LEEosDgfUIQQN9UB5knHEAJJHhBBFLAAH4BBh9acwk+COAbIgBAgTCMYX4AYAEI2DAGHSgN+iQoLSQYpX8D0UcG/+iiAvpNqwkhGIALIgCB5kirPPwoQQoK0AEZWrF8/CCAAIUghA7AQHcRDN4EXVADsJjGHj5QlfgIxjkTJCAJkpnABtmIvA0UQAimuqIeefc7HgrEBiwQQAdMUwILoMCJeRsItUIAhSVAIAVJo+M/8jGCAgAhj3vMpN30tLcRmNBxQhTABoooEKvF4IKREU6VttCAJphASBvkFD4CQATRafKWdQtWgFwAAkE5IAAuKIG7+FGBGEwgAzAAwQUscEzm8QMfK+hABbzkgg4EYAUEMEIQJFDA5rAlXAAAQQmiiTbg4PKcSguWPoBQAwfsoAALUEANruMlfSAAXwpwQMUM4P8kwCSPP/65QGxiYIEYXKEAM5CCCfOBg4U5IAWP6tk00UlReamTAQ+1gEZT4IANACBZ+ljCC1hggQgwIDgT0od0MOAuAgBhAyQlqQQk0ILIVOAFQ7BACliwgQ6UYG4VDSqsdKkCcZaAAAQoAQg4MEwV5gOpNnAXo+xBAG+xRQUcQCoBGpAAE9QjMvawQVIJoIIeCfWsaB3IE0KQgBtgMq1wjes/QiDXutr1rnjNq173yte++lVpA6ABXf9KWAQCph6zasBA3lrYxnIMHy44AdFkoIEeMAGojs1sx/gRsh1MDAJRiEIVJqrZ0lrUpQDUFhAUIMpSmfa1v2KLPryEDy3/XEEEMoOtbofqOMAAQAczwEI9dkvcT7EFLhwAAgp44IEnFPe5ldLHCQywgSEsTAI9uAF0t4uolIkAAgXcwAFoQAHncve8e8IHCBrYgSxYwAk0oIF2GcIPe4zTBTbQmGsMgg8VuMBAOkwkegeMmhPCwLoy+EAI7KIQfFTABwr7wRAQYFUf1fcEQzBKDAwwzfMR+MObgQwDJlCFAyRAsQl5TQ3GNAIW7GAHQqjwc+zBgB1MwAIbWHEKsCNgEPu4MSJeQBZaUC2F7GcDK8AqBrjH4/2+Bl9A4EBR00gkD//4ynt5ZrQEE4EFUOEJDfhAipFwNfcJRAUbWMDgFguZ3ixg/wPiew2MkvLEd2H5zmPRxwWEgIF/mAQDI5hBDNw3gAZ3oAAsWCM+OjCByrE5MBwYwgIYYBo0z8BDj2YsnjdtHgYQAQXcE8EEdsCCC5hVIUIsgAHm5iKiVCDTnHUACi5AEHwAQZT2yDSnd10WkpwABy/4R8sQ8FPOISQfgcsAq3MEgbBkegUKkA39MpVoWPP62lCCmz7sYY/Z2vkfaF6A5CCjnB3QOtMw+IGu6IcAAbCAlJLEtrz7wg9kC25mKyies+vMD2hLe7HUXiOn5k3wNSWvAP9oKtB+8EJ+xxoF+AliBjeQa34X/OJ6OfQQ/PNMDNw4kmyM9AyqOBBLY9riGP9POVpcdDUYlNwAo0ldoIYEbxjAaDYOV7nOzRKaIa0gHxyosa6+BYAKDBMA7hECB/LhAgMMyso7jzpTRKU6ANRgBgoYgSF3AARvFZMFF0RejSeQghHEoAg1aPJ+pc725mlZbsY+yDPjVrZ/2AOowvkQPpwjqw+gWCAO9kG0iTCEE1gVBjWwwAomaI8LWBcCCvBBh3vc9raT5ALA9oEPgPBFTaugAwYIfej/YYAlsBofMGDAC3zwAgYgi1oPUF0+7ptfguHjqKx+pg3+WwKOr73ywFdBBiCAAqLsoAgxwEDFEZJvAYjAATWoQQwccIItn6eMIogNCqgPZjED//s04UD/BnqKhAoEAOZEyC1CHCWCDrjg/StwQUsG8iIUZGAJ5vfByirAgdiD//8wgQ824Hsc0GX9kRBk5ABlMnD8wAALIEZZFEgYwAW1AoAW6DvgAQQPOD8GQUYXYwM24C2L5YBwFhgqMAQCIB8XuIK7wRvKA28GkW/7MwRDYADux2pLIBSuN06rNQQS8g8VyIJCeBGu4UM7gACYRRAgMAI/4AAWwDNFwAJIMDc0RgSUAVHbN4VDuIVE+BkgYDo+QCgJsSVIAAIE4AIYkAJFMAQgR0xMlE8KgAIWYHpcWIcS4RolYAATMASLl2LPhDf4gAQxMDv5kYcpAAQVcCAZMDJhx0FP/wJZJ4ABF+ACqQNrgbEl5RIArmWH8vYZebgDQzB5mKROdhRsdicEKLABpQIYBSgAQoBZwZIPHVBGL1YD4WOJgQECL2CFKAABEbAElciJnAYZnzgEnQcnYOMDBeADPUIALMAcyQI/orRGbBQY+tABL2YAQmAAFYM6DgdpGzABDUM929dwwrhrguEDOzACyDJwRgQklaRssTZy0ThEcPaOn7ECywEEWpMPGLADNbB4YIMBaAcA3AYCMLc854iOLuEDKMACAskp9sBg+WEP+UBbsrgDs+YSoaFBb/NMESMAkmOJAvETMzAEFFmAMyAETTVBBXg6WwY0MbKQu6YCQvJmF//QLMZidCUpHYCScNOVARiAAKC3A/xBSlchAnzTAScABCxQBAoARDknEMgmbhgkACajJxXAPVL5D6UjAEBAk5vGD2RUAAKwA90DARqpilQ5F8r2D/iAAT/gWcQ3Hkp3RmmokRoJAUPQLqYxcBsycn95AgqiItkRAAtQAyVgGvmgjKYollhWb3sGBAzAAEUDBEJwARWHDwDAAGWSHyqABJLVAR1wAd2SOOp1AaTJLtCSOD4SafFxmJrST8Z2AQWQAmKIPC+gapoGmT+GN6qzEHHnEPWWLdqyBKUCmwggmzFAm9lxAgVgAblpd7u5Ab3pm3b4GrKGAjDTAfoQaYWRHRfTIAA14JwEMZ4psInUuYzY2Z6gGQAIIC4XsJjhNkgAV5h6sgTI95lnZkcZ4J7tqWV7dznJIwAvkDoFWoI+4igQ8JMCIUWFAaABiiFXAQGnJC1ktAPVxyn1pjybKSYO8IMSCpnBwgFpNgRliAQnaheAQQBM6S31hwPxhwHGBARJOKLCCDYV8IyQJwIPqYUCgQGPFJHn4R7q4VkvQJE4KpZg4wJC0GIjIAQPA3gAYAAZsIr+JFJDwFMIMJ1LyqRO1hZSJlXS0l8X+RkoZANSBkZfKowBAQAh+QQFBAABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMA/wAs1gBkAOMAoAGHX04THR0eRkZEbm5s3t7ccmAaVUURjngiurq8X19fpqakaGhow6Q0fGgcmJiYLycJmYIkalgU0tLUzKw1WFhY9s9E68Y+8/P0Tk5P2NjYKSkpCgoLRjsM5ubkgW4cp48qioqMg4OENywM4uLk88lB2bg6s5gsQEBAj4+PMDAw07E4oKCgupotfHx8l30kvr68oIclPTMMvqAs6sI/iHEetLS0zMzMr5EsyqY0xsbE+/v8tp4sJB0Hrq6sHBYEqqqsdnZ04ro8EhIUKiAGcnJ0wsLEOjo8poorNjY0TUIQ3ro8FhYUDg4MFhIEdmoc5r486ursEw4E7u7s5sI85sFE3r48nn4k2rI8CgYEDgoE4sI8Jho0jh6AbmSE3Mj0ckxA9MjcsoIc+N7gyMbYqm4cgExw6Mioypy4XE5kSnwY7qbMboAsVmpYHi40xM407Oz4DAgwfoQcDBo0nMSQLC5E3MKA6K44iOQoelQYkHYM3DCArJREyro03HiA1uL0nKJo+OzwinxI4PjgGjZoBhoU3Jx8+LJEwLKk1rpI8JwwAgYIhJaUBgQYpJSg8tRcBgIIjmTInK7o1sLQdoaASn5sTDRsVlY8jpqUWioYGGpUfIZkbFQobK6o+PTQJj4Myqbskmh0PEBQMiTAXFIoQig8Eh4U1ujU1sY46OqsxPDMbmhIvrQ0TlIY3O5Mipa8PFoYgGQIiqyoppTAxLQQJkA0qnhIjkwoqnh0xLRYsrDIhHaAzJwQ4K5cEgQQTl5kJgggfHZkoLooimRI4rhIyq4onpSI5uLMiOio5sgoTlBw2JxA0rK4KDIoEgwYrJwMDDYoxM7wbmgIGhJg6NQU4MBcnLC0VmQY1roorrCgWjRE5uz05rwocKooqlBIgGR0xNi8xJxUsJSY6uT0xMakkIYM2K4oLMaA2tRI6LoQnKLAkmQcXFx04sjQPFxYSlyUlJwo1trIMmrUkoBoTBBApqqMckQYpIIMPDwo4MhAAgIEAgIMBgYE4r40BgYM4r48AAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qcKDCfxXz79m3IeJGix48gQ4ocSZKkxX0aErQA0SKBhn35SsqcSbOmTZonMdToMGJEhw41MOy7SbSo0aNFU0iAggLDCQoOLmQwgrSq1atYFe7DoKPH0H/5hOxMUDGr2bNoZ+6jcGFFTItLFIygkLau3bsR8xkZIeEEkw1CMNh4kQKv4cOIBW4A4rPGCgQjcpxITLny2XxLWvQk0NNBgJiWQ4u+iTlEkRYplqQIkaFH4dGwY4vcl6DDCiYX40Jp8VW2798NMTvosOAixgFthQBfztxgvgA/iBvfQKTtkubYl2/4xyREWyYw9wX/6CElRO/s6GNv7QDlH4UTCVbosDE5vf3YGxbkgDKCcwcEFJx334CV7ZOCSiG0QIEGoBHo4IMQRijhhBRWaOGFGGao4YYcdujhhyB6lM8GTJRoIngLYbTBdv/A1GBFGZG4UUchQrhECAjkiMALLwT1okEpLICCAj0oEMIJMA2UT0otrODYCTTWSOCSNlyQQw089ohBQs/9IAUB/xSRARQZENHbBi2wB8UFHSRgnJRTapBDBikIscSdASR50HMJThYAEt4RgAFoW7WUwE4URAnnfQHkYMMGxilaUFjnMYHCBSiAhhFMlnZA16IOPjdnCgEEIISLPyJ0EW0X/DBUpEwM/+dmqqBit2QOUvRQQw0OuGRRisZ1JwUIFRknhKyS1trccyvYsGsRBEjxApTA5pRBBvVFemybbyqb3YikDrXBCeTVoEG1Bu4UgpLGIpust8tepEERbQangQJSOLDEr9q6+yu86MGKQnm0ErSkAlDo21G/3HYLMHORjghCeQJOqsEPCe+7cLD+/vvwchGPBwURWqUQHQoab3xRrJ46/LFvMQWwUUYBANFBX1ym0EMHnmXks6abBiDrzy//lg8TC9jgABAD4CiVmaoGoIAOBISAAQUJZD1oRRossJIEF/QABBBbFv3bWhmM8A8UHRDQAwaQqmrEUu2pyXYPTAi0lg1Q9P/tdwcKVGx2aOAKcLVTn3k8KRMCJEDB41lnLYCmS2Dg+OOYUzv4fQVvDnDnnocu+uikl2766ajbpXjqsY1o5xLggc4uRqXGDSOJdwoBqeysX6aBaRnYoADcDF20wQAEENACv0xA9YIEwTsggJ6943XrBRIoUAPbAwg++z4CVHmBA79uZcMINqywAq4j8169VUJcukKeTCRwbX17XhTACi/0gCmhBzrBqcTTAuylwH3vM4peOkCnlQ3MPKqyCBNakIMFIAcFSuKIpgKAAG4lsC5r0UENjEMbKNQgAPnDCAVsgDK2YLBYERPCCv4xAAR+8CZoGl+kTgA9qjjHIhroAQL/UsCqTM3OOCmwQQYEcMO0MGFiyzOOnAjAxB8KAQQ5cNM+FvA/oBnLOz/IWxPNEhYU7OYtQCzCCKo4KdpIAAW42WIXYSjBBfBFc2PMiqWkEEV55YCKe9JAjhg0ogQMK2K/sl8GZmXDPJZkRAXMlHHmlgEkHGQxUvgBBoxwAgxEpQcCKEywFnCtAL3LkURRoQhJSAGgoNAgGwiBFGZJywvoQAf/qEHKkJYBCSTKZahUYD5OIIUMEPJoD+xNg/ZhBCK04JlAaEEN5gOEFtVRAjnYUqSCaRXhMEVmgZEAzvRWmK9EbIuHfAvSkpcAnxGNm1XJRwr2k76dZQAI5jTCtSxJ/0eMcFGSLcJABuYTAiCE4KAt2Bo845kCFChRAjWggBjBksQivAYsJLTf8vS2ws30pCcZgOBC48mEAGggABojCEpeMqmV1UlJGzCpBjSQgprW9DojzalOG6nTnvr0p0ANqlA7tLGhwixSf2KpUWETqRQggGr4WyrhLnLFC1yAAFGVamWMpx8sYUursNnHCRCwAgH84KtgJdz+iiDAHmA1raFhAhAkUEOxvBWulAFfDub3nBrcFa+GschaodTXqQD2MBoBgg26R7Me9IV6h0WLA/axhBxcQAFEiCYKooVZJPA0sjRBwT7Ewja2dUAKtyzmAFgE2suI9XIUwAARlgICDP98prV1idgSHGuE1eE2KyFDALZ8+9urhGxIB/xscWdyThct97lmUy50p0vd6lr3utjNrna3y93ueve74A2veMdL3vKa97zoTa9618ve9rr3vfCNr3znS9/62ve++M2vfvfL3/76978ADrCAB0zgAhv4wAhOsIIXzOAGO/jBEI6whCdM4Qpb+MIYzrCGN8zhDnv4wyAOsYhHTOISm/jEKE6xilfM4ha7+MUwjrGMZ0zjGtv4xjjOsY53zOMe+/jHQA6ykIdM5CIb+chITrKSl8zkJjv5yVCOspSnTOUqW/nKWM6ylrfM5S57+ctgDrOYx0zmMpv5zGhOs5rXzOY2u/n/zXCOs5znTOc62/nOeM6znvfM5z77+c+ADrSgB03oQhv60FK2gAWC4I8CN6ACTwiCEv6LxhZFYAoWaHSAVbSBB8DAHzMI8D4eUAAX0MAAHICBBUpA6QdAwB8VsMANIuABBgDgv1k4gD8sgGl/mOAGEGjCf33AgEwHgdFKUIIMCtBfH3AgAiWYgqSV4I9d+4MB/YWAEmZgbEb74wkzuAEH/GuBJ/iD0U9QNgNUEIH/+kMJQYg0C2iQhALIwAX/DcI/qI2DJGyABy4ggQpu4N9Ix1sJMDiACUowAxj4F9tPAPcMGAABGDAgCAwgOH8PUIInqKAEEwCACA6Q6Q/c2r8y/wCAByZwAxhMYAoHALAKXACAI/i6ARAQsBIYYAJ/qCAJUWjAFAJsgQlEAAYyiAAAJiDgCRygCbrm+aQDvIMPFEAFFqABAD4gYKwzYOc+iAK+AfwBEjDAAFaAQBQeIIMAm0AFMNhAA1xA6qEDmAcHOECu7810AR8ABjwwgdlZLWAXzLwEZz8A1wNcbBmUgAZ4t4CAp4ADA9yA3iaogoBLYIImHIHnJYg5gJNAgxs8gAVT8EcJmB1gA0wAAirYuQpMEGAsROAJq4bBB0pAeAAfQd0e6LmAf6+CWstABTigfYA/MAEXTCDdMmg7gAEQgwmUIAgf1zSARSACFVCB8xPQ/v9/C+CBElTh3fEesAp8bn7xAxgGSqgCvN3vXxjEAAeZVoK5B8yB5597BlMXYDfAaIxmAcoXYDOAfvs3YO9GbZJHYB9HbfT3XxMQgIh2gRj4G0cgEFPQdwUGbhPAAwaGaSFYYNFWdCJIYAyAeyVIYBxnASzgAwX2gkcgbAQmA/6gBTSQBQU2BVOwelhQYFrwgwXgPf31BFNQBR7AWgL2BFSgBWpXYOFngDJIYFeHggeGhQW2ay3IgP8wAx4YYB7wACrwD3YnYBbQADygAlOQbgKmcQXQcbs2YBPwAQyQgEEQajEQYBGQBHd4bk9AYFngAtXGaAQWBeV3bnkoYB4QAUf/YH1VYIhdd204cH365nfdJmkDZgBKEH4NuIk3sHTnZoH+xQEeAAMcUIlK0Hv/xQKI1wATgAMTcIAAVgH+IANPAHI5F2AiIANHoGuph20BNgQmAAAcwHMqEIalaAIcEAMm0AAmQAN+NwEe0AAMcACh2ABEVwElsG3rtouJsYdwFYs0wAJPgIv+cIZpgRE8IAIcMHZwBQMNsA8AUIwywAKGAQAswG0PCFcmUIQAcANDAAPtdheKZgEz0IaAxQAR0AQNYHpHoI2HUW2HRQIyQAO8RwNXUIYBVgEMwAIWQAU3gIQCxgAP4Gof4Gnw+F8fkA+55gJZII0C5nBMAAE5J3oB/0YD+eADuvcAi0cRrJdeKvABHHB1JeACJVABEzFuEPAEJEAC/6CM4qUEdlgBSnAADCCVCWEAB3gELvB5x4ZepxYBKhBsTgARvzYQNMBszJdePNgEN1AAWFAAP8kQBQAAHPcPyciR6QUTPJB0tmcQDeBwBjEEBkADtMheeeMDSRd0BBFrLHAAERADPiACJjAFLGACMmAChNleoScC1QcDBSCVHIAFa4eXE7BqEKCNGudeVSADN7Bqb2cQBXB0mfkBu3dt82Vu/lAFMPAAJ0cQJSADHxAB7piXiele51ZtToeTBBEDHDCYtLcD9jV/6caXBEEDMEBwADAE9zVtFElg+whHigHGigMWEAAh+QQFBAD/ACy/A1kAgAJZAIc1KwsyMjSCgoReXlzCrlxKSkyLch/AwL/IyMeylSwiIiTOzsz9/fhjY2ScnJybgiWViVxwXBl+ax3S0tTLwJaOdxxZShLk5OROTkx0dHRFOQpubmyunEz29vRmVBR8fHyIiImmpqQ2NjRGRkTa2tzq5tynjSuOjowuLixaWlzc0JwnHwbW1tPr6+xqamze2ryioqQmJiR2akze3tyysrSqqqwSDgRqWhS6urwqKiy2trRCQkQaFASurqxOQgwaGhz68sy2tKRWVlTy8vQeHhyWfhSuqpQ6OjySkpRSUlSWlpS6niw+PjwmJhw8OCheVkQSEhQWFhTm4MhUTCx2clwKBgSWeiSOinx+foQOCgQGAgSUvpSUlKg2KkRMdlhGUjiUoLwwHkR0sIR8mISQrDC+eCRUoITGuqiu5DCwxHh0PGzY4MhkoCRk4IBSVmCUruQMDBjKvHCUnqAgIMBKPGTK9NgQMEAmOjichIgKDggYGDTQ3oAwDEDI0NDIoKzg9Nh6VoAEBBgmMCxiWNSwmLjmeCRQchiIiphUZFiUbFymaiTSytiqusCw7LBQQkTAtMRIZEwEDBA4Hhy8oOielKAIGCAICDBQWnQwLGhqdlhKNkRochjQ4tRGVlSwzrTgytDKzoAUHBzAzNBedmjyzICiniDOrDCUoHB8iJg2PDiUbJhoUGjw7uA6PEhoYnhoWkDE3uDcurxYVBjCMIDKusSamohcTliMfohkoOCwxOzY5NzqoLDa3PROVlBkdkCkeOAgYIAQEGB6cpxGQjh6anS2qLjQ1siqfDB4Thh+dIB4PBhgOBiMgjx0fIB8iHDEzPCaqpiUhLBoXGAsHiiU5rCqqMRSPIzieKS0eIQ6Kix0kFjAvtCKoJi2wLTsyDAiwIAcHBS2usxYZJRcPFDSuuwcFBxqenC8oHTi1NTe9Iiwpiy2ngwmLERskIQUFCDayvB6ehichjwCBghYZDz0vtB0kLBiGIAODgwGBgQKCgwGBgwCAgQKCgQCAgwODgQAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDVtxHMp+CAEcCEMFHcp/IlzBjypxJs6bNmzhz6tzJkyFJIg0c6MCBw8GAHyR7Kl3KtKnTp1CjSp0ach8REBMQhHBQY8EEAT+oih1LtqzZs2jTMt0nhMSBFDHCNlhAooDau3jz6t3Lt2/PDUNA3BO47weMFgP8Kl7MuLHjx2obtHCAlGSOHnUha97MubPnzxRF0FhwQsiIFEoOCIgCurXr17Bjp8VXgMYFEgtmXAiRw6Xs38CDCx/+Md8IBzSUgHCAo8eAwcSjS59Offi+HTRCFICSL0qKAwsw+P+OShIfig1KQijZgILl+Orw48ufr2DfPRAXMuRrCeVEC8FT2TcADrghMAEJNKQA3XwMNujgb0nso0APM6TQ0j74ZHABZQEyYeAHO4hQAAgk4MDEgyimqCJkOfxDRAgt6NfSfUMgAcVU+DTQAQyVmVfDBQ2sKOSQRN6Vzwct4IABEVAQkQICQOIToAsMOJDPPyT94ACQRXbp5ZdO7XNEDSQg4AASMNClRAxUXccCAgPElYMLB4SAAph45qnnTPiIIIAOByBwQA8Z5CAlVfe4YKAONeAwQQ8jHLrnpJRWatE+UKDAxA5M5HDPe1Ltg4IDLCxgIAkwHAGqpay26qpeEoL/QEMGR+TAhABaBfDqrrz2OlY+G5CARGX7xADDBR9I6uuyzDZLUwGFOTBDAxfmoyGPzmar7bYbjSBhCNNWe21Y3JZr7rkMRYFEB5S1FAOMAlyJ7rz0MrjCqiLhkwQJGzYgxAYhXHCAt/UWbDBxNyRggbIxQTEADTPMwC8JIWAg78EYZywWPj8EUEASGDDxg7L25bADBiCLzDBBADxQwb0lHZHEERf/E0UBKQghUAopjHCjRPvkk0MSDTQwQAExrKzx0kzzhM9xOPyDwAIHKDHCxfhgEEKgCAiqxD+fHlSFBwl4UAWGGOiAam8DYYCA1G8vsAAMulKE4T/4uNf03nzr/3TPB3WCkIGsM+AQqUD5CHHmBxl8AMMEBySh9D8rPPDAvTk4QMIQB4gw3gAXIPCBCwJlkAK5faeuOln4MFGAAtzdE8BhSLCG5T1ERHFPPvcogJ8SqBeEjwVlN5FBdhPgEEBS/6QwA/CSsrT69NRDhaH0LTVwQQ9sYnnhQPuGUB9CPBhgAgQ9YJECUcv75nzteONb/fz089RSCtu36H15eUfxwQVKiIL89mEBAryAbkdgH/PwdwAXDCAFRxiZ/OpHwQq+hCT3aQHw9mcVomUACQjAQQGkhxAnUEAKVPBHAA6gvPEIQTddo5oSClAzC9rwhh7BkBDkJjkOjqArJGjBAv9c8Cn56aMBLFCBBHggAhYubyAiAMEHBjCAD9jmACPEoRa3iJGn6YAFGfjZhYiAgQaghwYgCAAJC7IPJtQgCBAoggaYoEDfdEd6fXLAZIiAkAuNB0P5WCNB/Oib6wVyglxMpLnwgZ0JfAAphPkeln6QAQScAJIG+QEScCAEDVRAAk8gCgqYZ5B9FMBAIyglITGEggyEoAchyEB7BrnKfIyoBz0wCiYVyctFYocFHyDCHyUpEBTgAAE7wNcOEMACJFyBAhQIQsSU0AC2HSQGPbiAEN6zSgy5sUw0QAAJarCDQ3XzHg1gJg50MIEJgEABvYznthhJA0fuMpItIUgPJtD/Q4MwYZ8sOFAJGMCADlxAhPjaRwBwcIEI0ZKQmnxeABQgghNsiI/dvM7UGhADBaRtBhuYnDxHyqqn9QABGRAgqPw4kBF0bQcDcYlvoLCDJIBMCBBQAQOAOQKVGgRYAhMBG7+HoSQsAAeqwlIA2JkC9xAVCvgBwc/wMQAW0OBOJM2qq+h5ARgwIQo/IIICouAeKGAAgjmIQQBS8KMNCoQJ7CnkhVbAASB0Lik3w0CtcnCEDCzgHxlYEAfHcw8BXAAEFytsC2zE0mLiIDyFxOYMEqPVylaqPx0YglYcAIMQ1EAAvZGQEiBHgx44agEOOMKh7mHRDEiJkPyQgRQQwISk/+RACYIq7QFwI4DxxZSYUVDCBVzwxwEMoQfCzGdLCzfKgairAx+wrHT3lKge1ACX2EUj2/KBgeXUoAYhQMIAFCCpfDSgBv0kpBOMQIEp+OYeSTiBZ8ELgiQIcKjKdZG0BvAeDLRABzEg5j+ScFxhDuQ+DAABIqfLYBRhKgYQjkEOJpyDlcgUH1D4gQI6SlaDQEEB+8FnS6owBQ5EIAuEwUcUxBoDJglSIComgoxH5iIYkMBCBCnAQUNLyn8IgQE12GVhGYCEBTf4yObigQRMALQAIIEGOtCBAzCADyLYGMdtSxKP3yOEDvRAyAJgwAmMjOQy802hSiDKAWCQhHxoCf9IfxRCC2gQ4Pz+Q8d0JiwIEmzmPtevyhEmwqf6E6PoAYZHAj6CqWo7EC21YAN+jjT1GvuPfLhgN3wUCBG2JCNiWhmk5vQQAlIp6VKnrpsBoAEJNnAPfKDTLZ4riQIsjKEBIEi1+FCARU/wM1P7ulkAMIEBeEBmEVerAe1Ewge9soErYeiHSKizaEskgA+EgAQ6SOavt/2afMq0xxnlYEIEPNR/REBh5mxIN6GQAh1c4AIz0MFz7Gjru5YnBx+A0m0cUE5u+/szUEhC4z5A8A/E9bd+hMIRhOCCDLigAAZGOCHxS5IVmOByxTZ2UkxyhB0cAcTMw1RK9nOhe2gqRDT//rfKNyMCHAxBanJLXj8HW5gMHGACcsMNvyUVbomX5wZL8AA/Mq7uhK786LE5Ang2MIIRFAADI9glIXMg3yn+K5s98JzG7bzKyj0AAD1Guti71ETuTdzn9kEByL3pbv1sPeyE1ALxTkz0sdsdPk2kgQju0epV9RxxAqhRr5NC6YTwoAJfr/vdFx8dpc/AAQIQgAt2cF+0EzMfIPhPr31+ED/iwwcJkIANGE96FImgBixg4T+yAtphdpMwTDgAC5qqSjujvXwm0EDpd8+gHwghBZwqwAdyAwLUZbQlREACAH370LBvXQMmWCLvp18dfBwSgwNYAAughU+N+w9BTFBa/+Etb4Ml65766KdOS6LggA64to/r/8AEdHC4zpO79vsI9svSz3/itCQfJ8AAH1BDtPQD8kcD9Wd/tqeA/nBuFtB/EAgc/NF+72d/Bvgo/TZu94d/LfMAEfiBn3E9kaQv4uRQJUFChSF/NaBa+6OA4AZ3LZEFZAOCNLgZ3DVFwQc4F4AEBrYP+DYAUEASUPA/M/ABR4ACASACR7ASb0dUx1dxHliDUtgY95ABLFAmBjIDCAACoyRTbUEDoaV0DNACCDAUOHAAB/ABtvOE33Z8eDOFcLgYPtgAHwAC/yA4BaBSMoVvDRCEVrEBJxCIJ4AEhKgEz9GE3uaGcbiIigFI8v/iVO+xO9XCd5TId9zRfKvUhIy4iZzYiZ74iaAYiqI4iqRYiqZ4iqYWbKi4ijPhiIEEieRWHvlwJXqzEIqIieXhgKy4iyBxDyMwOCAgRXABi++RawIXjCDgAmq0YE+Iiy3hdbwYjRshJiGQFWiIczAwQpRmFSCgfYJyhTQgBCGmgX93fMOTANKYjhchISkwACNwBDKDBCTQA12YX8XyAScwAEwgM1tyAMk0Qc1Ic4S0AlagjgZpN2tULBQSIQLGMUWEJTlQAy3wAWEDf+CGicY2PAe5kRdhGNokbgqBDx/QAYxFjuO3jQvIkSoZkh8WA321ADSgKxtIEPeABC0gABX/6YJnJ5CN9YAr+ZMIEQUuAAM9sFs40GYgiRBZIzdUBpC3GJBAGZUI9wMbUAM6YCAO0FwzqVTgshrOZ3nEBJVSCZQXomIxgAIj8CfvNH4xdVssoAShpRD2yFJsqHhjeYoZpQCa0wCQ2Hm3NQEOsIxA44SEmZJ3eZDdhA8bcAG8dn+iMlqB+ZU+UZiuJ5mHCZQZslh+CHc5gAQTgATtYZeDNZrGdpk/aR9RcH35cATZFFLl8QM+JSpIwAIOsGWkJEl1yZNsaZrqiA9CgAQf0AApQIeqZictIQJKkFI/4R+HBXUYgAFCIJijyYaIaJi8GY348B04NwEskHogoFotsS9mQCcmBzCGphJzLAAClUeXUCmW12mQIkc0LuAvR6CHP3E0RRQFAucCpLMB/pkBVyNXifh3mviel/mCPoElD/URAQEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUEAP8ALNoAZADmAG0Bh8umNNTU1D4+PLSVLBISFKqqqUpKSYiIh15eX46OjFBED19OE7ycLmlYFUI2DXl5eXlmG9zc3LOzs5qanJSUlIlzHyQkJLq6vMzMzOLi5PXOQ9SxN+bm5MTExPj49rWaLezLSp6HJICAgPLijKyRK4FsHMWlM1ZWVPDw8BwcHEs9DevFP3BwcI56IW9eF2hoZ6SJKMysNDIqDCoqLDAwMUJCRJR8JCQdBerq7BYWFBYOBCgiBJ6enL6+vJuAJS8lDBkVBOXCPDY2NFJSVA4OCjo6PKKipK6aLKaOKt2+PHdhHL2iMda6PDouDObGVNq4PKKCJP72zOXARNqyPAoGBPLGPOa+POq+PHZuVOa6ON62PAowKPKcMMLI2HBccGA+GPbY4GZGQNbEqG5GGNbE8IKkoHBshN7EzIQcgDJSTKBwdMSk7JS+iDokPND03LzYvILmpPjMFLymUOC+KAoOCGJmGILiJEB0OPrm4OTCgAQMEL6urAoIMFAuRJi2JE5KZEBiiMLMwKSmhLzMTLzwzIZ+bOLY9A4MGOjY1N7YyCrCgEIOQHyOjJKYgIKOtKSWhNDi0HR8ZGJ4GNacEGaopLyeDOK2OOy+KNDc9NgwgNCwKJ6gvERcWLqauLymfHRcKMCmIEpcNJh6DCxi0Ozu4DQ2TLzEpAIGCOb64AYCCIZcGNy4EGp8RPrw8OS4SN7u7IReyGpkQIZgYAQEGKBmHEJIFHRGcKBwSOykzIJGKJSo5CAYNNh4gOruJLy0IEpaFF5ucM6kuIZuCKBKSPLEzIqWJPjSXNLSSEIubNC+zNzSFCA4HIZ4SCwgwNDKwGxsWMLuTIZyeBYwaGJ8dOScfGqkJJKolBRiULbKyKaKkJ6KuEpKNGBMKNDoqJSaYObqzKKODDJSFLzM8BooKEI2KJh0KNDWwLquzFAmGCAwRNC4WPiyNKiOQOzorAoYNNy4SBYQYF5egIpoIAYGBAICBAoKBN66PAICDOK+POK+NAoKDAYGDOK6NOK6PN66NAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnPiP3j99/+bRs0ixo8ePIEOKHEmyoD4hLA4cYFFEY8mXMGPKnAlTH4IOKDjo7MCCCEeaQIMKHQpznoEMHBIYMPAAQwYEP4lKnUq1akECFFBQGGg0Q48ZAqNaHUu2LEh6MzpkMBBV7RCzcOPKfUiPBoYIbDfSI3ABBwuxcwMLNkvPQgEOL/QWxoBCxLzBkCOP1fcAxQUL+vSlqIwiAUbJoEMDrdsXwwQKBTBw6PxZtOvXIjfSSHA3QIEXRhoDhs27N0PF83JYSKGPyATEu30rXx5W8cDFAQQkZ079NcaN87LTm5ciAY4D+qZX/x8fml6OFy+WIjDCQYIF8eTjQ0ZbIEOEAPcp0Ngov//reQKwIMJKBvgEn38IJqjgggw26OCDEEYo4YQUVmjhhRhmqOFoitFTXHgKbTdPcQLp81hBHfK34YMpWjDBBSLkMN08NYgwQQESFHBAgT+lqOKKDXZIQAIeeIDBDMnRQwQFGQTQQQdORXBADs/5CKSDwN0kAQo9WJDQdgIYQMNwM9yEXHMpXhmkXjRIYAQCTnr55Y8ZIeCBESfqVZGeai6oVw4UdGDADHE6pJdNHvBwkHN9KriRTRE8MI8FEXQgZ4iaWVDmBQGcABijjSK4UREdFJACWpW+t9A8QxTQQQAoZP/AQmsEgRqqfzlMgEENsqUK3zwnSAClbTWciCKft/anDwsRsBCWBQH0cGpDGxEwhAQYDPEpssmSR48QGGAgQA45pGCAkwYQYCxCHdaAQgcEGGRrt9UZxUGRRaKArwedhthhCgEEcGmVdNJLXWEiUDDBwhNI4AEKEohAw5wd0tDkwGgeaDBsmWU2ogCVCuFSRmBZ5GEORGQ3YgoHeFBAaz5yu3FvMc9Q6bQCgVzAYwDiKAILLCSAEwZsZRzzzMv5KESlYA1kAA4XXOdiBBn8gwMHAVBQhIoxz4v0az7qY4AAtBKBAK97piCEAScgIKaBHHUt89d012333XjnrffefPf/7fffgBs8IrkEgFjliESUa3jg8nloQGpOHrBfcxawYIRaGTxQMOPU6fMCUj24ikMHW1c0DwJUR4CCVnNz7ltdTr1gogUioFAAEXtaYIAQm3XWuuu86VP7BHBbcJcAe/J3uu+bA88bARJwAJVeuOOgeUHzvMB88867lsJdeWH3wp3rZqR9Ar93X57NEZSO3RAeSFB+9tur/zoNEbRv8nYGeNADreZjnv1exz7p6MUoHohareiHPo0NMDD0+N5agDM+PB3ufA584Fz2Er3pbYQICfiHpNDEQA3STHhaCc9GvBQBtGUJBQcwIc2K4BQEEEEzInAZ7g6ojxywIIWZyaAM/8syDxZwAAcSmABOOoC8tD2gADzAgAciwAMjHAB3QxSNPlqFHwxQQAhVmkF9MkDGMkori67ZiwXW6BOCpYAGM5iBpsCiKjTa8Y5hwaMe98jHPvrxj6/jk5XYJbdacc1rgJxIzOZBAAvQ4JEWyEH5kneoFMzgkTQgzv4q+cgZSDKRHukaDSYQARysLgMSOAEWjeahF6hldTjAgAhKppfNwConEhjCJEFpKB8ZpQAF+NkTM4ADx7ByLyK4AAUeELQlCuFPPOAXBUTAAzImhpeKjJmJOMIqFAQgBZTkWhBNloIC+AU7J+DX5FgVAAyAEZsdQSTABBZOr1EGhh88gO8Gsv+kc8KTIowS0YhqMDoqyQ072UnBcf5iHh74hSAonMAu//kbPSnJAC9gAQXww9CDWgABL3giEkuGFd3ws2Wmoii1joYWh66GAyLYodxsEgBTWsaA2xlfB1KQHX3Q4AIJxJhK5UWEMC2lBqcymT5qgJ4D5Ch8AdXLRzPKgws8QEYbmUEPcHCbE7AgekEdaoiKIIEydgABIyPI6eLUIUN2yEWZ0wiYDoOD+wCzPeAUK7tSkBIRiOABz9yNPg7AARbIlU4xE0C03lNLr7LgBCl4AQ54AEC9UkywPzzAYaNSMwxEZ5BEOID1hEhRX/7rMC847Ga30yGbcOACSPKlADxbBMv/+qtDABrCcMolhJZZKncnEIBcCYAA3pHro2qRFHYumYJyGeACzZqobeupJArgIAAXkEAPVNcBT5XoAQlM6tIy0AEJYIsDEaAAYyNIgQj0QFhYe0C8pktI3ArgAK7CQKlYUMeMnOACByDAB1/wonBdQCnXeZQBLteBAwuXvrdNU5oW2NYJV8l06YOwhmm24Q57+MMgDrGIR0ziEpv4xChOsYpXzOIWu/jFMI6xjGdM4xrb+MY4zrGOd8zjHvv4x0AOspCHTOQiG/nISE6ykpfM5CY7+clQjrKUp0zlKlv5yljOspa3zOUue/nLYA6zmMdM5jKb+cxoTrOa18zmNrv5/81wjrOc50znOtv5znjOs573zOc++/nPgA60oAdN6EIb+tCITrSiF83oRjv60ZCOtKQnTelKW/rSmM60pjfN6U57+tOgDrWoR03qUpv61KhOtapXzepWu/rVsI61rGdN61rb+ta4zrWud83rXsNFBlVWgHSTHAQSNIF7PJ6CQvoRhCdU4AZAGLaOTfCPARCkH/b4hz3wsYIVaOEDCqDCkI9QggpsIAgbAMAT8IHtbVuh2wxQAWlZ/AEX4A4CIHjCEmKwgQo0gAT4YHe78RGEIPgACPXIMe4c8IEnDIAEJgjCABTgAhNUwAVIsIcUsI0Pe2x7BfgwARIUcGMgOMAFMP/ABwgG0AAXMGAALiDBCmJAAhhsIOQxCMLAg9DtFlP7HwyAgQsUYIMluKAEEA+6CZjQggXYIAkht8cKbCADICBdCRXohxTY7XF8sJgBG3iCxR3wAwc0gAFBoLm/G1ACE8wc4itggAMWwACSA8HcNrDBE6xA8I6beAMEefkABvCEJzS9BCH4gNufQAIf+CAEJMg5CRTggMffQAEbIAEESvAEDYgcBhUwdxBOPPp/xCDoJcB6zk+/BAaQgAH9wAfNbeACFSyABCFwgANIsIHEa2AgIW9AE25AhBu0AMXbJjjjS1ACGMQg4Bv4AAmQAAXnA8AFDmhCExbwgSCYgAEr+H3/ym3QBIFUQNwrjsEHGPB8KwTB/UkIuxWkEAMYlODoJtiAD2xAgg8QXgMk8A82EAMN4ABLAGMdZwIkAHH9EHv4kASnp3hi1wIlwH8boAFBEAIQsAAOUBDWFmNdhw9WwHcdZw/98ARBZwMwYAJWoAEbQHsNAAMMwGMe53EN2ID2sAH8RnBLUAINUAEmYANAVoNEaIL9sAEwsIL9tgP/8AMtEAMQ0GIrIAVPIBArQBDZZhABt4U3GHv2oH4MUIUxAHT/0A8uBgIbx3UmaA/+8A9tWBAmGHDu13MzFoADkQT/IAXuNoIjGHAeZxA+AAMCkX8CgYcyxm1l2A9ZkAVX0G34//AEG6AFTjACURAFHmBjVsAQARcEUgByhddvZOcAWFBWNrYDxxeFDBEE+ACKMiADWEQP+2ABTSRj86ADFccAqJgQ+MAA2OcAq2RjdxcDFTB3NrABK5BuV4gQSqAAMiBtK6YCArEDJlcB20YCA9CCK0CCCuGMKwYEKkACxYZ0G5BvJsBtfIeDZshj7LcA/wABIXCBSEgCTxAESbAESHBzNdgP76Zj0PgBVRBy/meMMVACC6AEJhB7HIdt+thtVShjN1AC9sAAPjAAJpB/QcCLlKcA8jgARTePAneD3DZzMaYEPwh3K2ACoLcACgCECwAEP+By7ld479ducVh6LgZyA/8xc0sQAgOwAS0ABNtBBA4wAPawfhvwblKwATenhiYoYxpgBWZIcB+wA9nRAAOwADogA04HAKs4ADL4BEcYdjQ5Y/PQADzHbo9YAfNABTfgA1bwARMJdiCwAUjwemEHABDHbmVYYzvgAyIYcBa3ABUwACsQji1HAlcQfa+3bRvwczbmAxWgAAqABH8pdoPHBElAexXgA5FnAgQJhH6YYxoQdz4QcSYQAgRpexE3ACGABDCwBBcZekQpEOl4YwU3hVJwlTLgAArgdAEXAirpAkN2g/bgcA3QAD5glP1QAf+glR8oZHEIcgpYAgoAjUz2biSgAkDga9zZnd75neAZnn1zxJzMCWUfgAQuAHj/wI5OtgJLEJHb2Z4moAD794tOVoEyoARR1gJBaAKG2GQ38AGe15DiWaAGeqAImqAKuqAM2qAO+qAQGqESOqEUWqEWeqEVMnM3IGVXGJ9QtgIw4KFPFgQlkHBR9gQugH5R5gLcOGQBAQAh+QQFAwD/ACz3BVkARgBZAIcqKizS0tRKSkw2NjR6enzCwsR+fnzW1tSGhoSioqReXlz7+/w+PjxCQkQWFhQmJiRWVlR2dnSysrRqamyOjoympqS2trQuLixSUlTGxsSKioxsWhj09PTr6+w6OjyqqqyenpwyMjTOzswaGhxGRkTKysySkpSCgoRycnTi4uRubmxOTkwSEhS6urza2txiYmRaWlweHhxmZmSurqyWlpQiIiTm5uS+vrze3tyampwEBBgSDAgeKgxsZGR8bnw0QDCUkoi4rrRMQEhgWFiGfpBqaEgQGAg8MDBYXByEjphYQFCkpMSCtoSsnqh6XBxaYFi4xsicqqjIvNg0OExINkjE0MQGGBRUWGjK2sxwwiRIOiwcFBw0JEzYvMySeIgQFCBqRly81KiMeGwsPjCMeKSMUhy8psyMkpjw+MBgYnyMYIhw4qSMbEyGRnQuKDgEDBBaalRGTGQeGDSkrOgkwoDo6Ph0XtiMiJjMzDTo6NAaKijw4rgOBBB4dkxqWlTQyNR4fGgkGBTk7OTK0NS8vIBcTlgCBgg0MCgsMjhiRpi0ushYTkBqViw8OCgePBz44uS2wqh0bHx0HIBIUExgemCIfoA8QCgODBjMfoAGAghcYGik0MykqrCMlrDs5OQUZFBqZhw8dDjCvqi2rqAaGAhUYGAoZNB4bmiGcojQ0vBEQFBoQhxATEw6JDCcnrC41txYRnQUMmiknrC8oKzMMoBIJjwcHBTUyqjUprg8DkCkuLRaXEQQHBx4XmA8MBAkMCgKDggeMkQ8VBRGXFjMvMS88LS8utDCyMgKMijw4Ii2uKBobJwUEGCEkoBGVDSMoIg8ZIjW3NgKCDAKGDSsqqDEyLwoIsCUiJAeCCAWHBzo+OzCrqx2lph0gHzwrLTQvPDwytS8tsB0eBxeWGhapITOzMAyVEzo2uTayszk5NzEzNjIpuza2MT4+OQkJjC4wOwuIDCQjng8MGwOCghwpOCCllzEuMQCAgQCAgwGBgwKCgwKCgQODgQGBgQODgwAAAAI/wD/CRxIsOC/e/4eDBgAwN89gxAjSpxIcWK/Byg+3LgxI8KFfg8rihxJkiAAGgdEfKhwI0COECFLypxZsB8KGy0g1IjBAOUJFjSD0nwww8aEfgLvMSiQgYHQpyQ9FCjR4N7DezEqdFAQE6pXiB5uiBBg1WqMDxwiIP3KtmCMBB0IOLzXT0CGBQj8td0rsJ+MFCIiNGCgQOsCCkD5tr03woAIHCUKtPgggkNexV9rlHWwgoAGBApIwEWxFvNTCGWt9tOnD2uFFKhNQwVwMHVfGCIkAOgqO+g9fQ8eOGBRA0YLEQpA9oZ674KGBDQoJGAawQHv5TRrGJBQ4IYFGhBYWP/FDrXfiBANGgyIAfI6+ffw48ufT7++/fv48/O9xwKAhxUQhKDcQKk58B8EAnhgHXP3GCjACiRc4BBFjKEgwT8H4KCBdTGpxoAGLfyTQQYWyKDXU/owQEELI2YgAQEPuEcgACbc8EEGHCQwQllJ9UOCBBkk8E8EJ9DwQmK+eTDDAR+cQAAFGbigwQgTIRTCADWgkAIIO453EAAJZDBBDf/o488I4j3VjwopVDCAP/o4AEMGIjRQJY8Q4MAlj2Wi4MIJDvXTnpdB+XOCDQa0ZtYHOKxAEgxbdhnSCAngoMAIJLwAwQDiyTjSmjgkAICg/gjQggUhPBopnxdY4AINJrT/UIIILRiw21P3hFCBCAmgIMN2LSigj6p7jqfUDRwcUEEELxhQgIZUqimABTa4EEAKKeSgGbFdJtVAAQtI0ABI+uAWAASl0cTCBB/kgEKzM7QwwYkV3QNpsUmFtQABSFk1Ag0dIIDkTPrIkAENHyG0QgEHrOApgfauOqMEHLxAkD4EdMDlUP8AMIMI6A7kjwn7pgtRWfdGOxClC6gQk6EdmOAATWQxkAFVIdGlwQInDCtRannmMPPFGdMw9D0e46CCzzJZF0ILKSydFMtq/dzgBQAYYIMEAlzwgKL3DCDBAQZ4AICK/1gwwMMUsYCADQW8EMIDDJxgLVk/66OCBRIE/8CBDd7RMK5qELRwQAFAupAT0zTlmsMBB7QwQwaQqTCwQb9NIMEMH3Q+gwQmMNDvb3Un8EECJzDAeOMXTUBDBStpIMCEVjsAQHA11PAAADG0VptV/sSwE+24WhWn7jGwYLJ+zDfv/PPQRy/976l5WT1v12fPJ0G2CbTPBiVpPx6mMqAwgQICUp9aDb8S4D4BEcQ/e181kKCACioo4AG94WvfTwM0oJMIRBAAE8RAfWVhwAwCMEAR0KkDC6ABle4hmhIwMAAuuEEEYkAX8I3EfySwQAYo8AIMwIAAL+BQ9ljQgBUIQAAkaMAEStAB0vyjHwr4gAlUAAEFIOBacpGJ9v9ikAMRTKBTv/Gd+K4ngBI0JSk1CIHygKeCDA5AiNfrBwREYLR++MMf6dqe+gSiD63JDGIFaZULHNa/6unjbQQIgQJOcAK5rY57tkGaBHAAA0/lqgUBwBtJVpgDDkggBy24QQYCYAEY3BGBN5SBC2ZAG4noAwU4+MADwreaTlrFAQlYQAoS8IIGYIAGKbjBuAqSvX+MAAQ2QMEj+yKAArggOSUZAQzwp4IXjIplGVhBvx4Ayw3hsXoHIcHNnBKRfijwACcY2gcZUIEABCByjnQACCIozXvIwAYz2GRSricQt9lAgs2kZgAQwMHwOUAAL4gnBB7gRRrghX95ksAFxon/TIE8zVLLu2EDPrDOdn6lH0Xr5gtSEE4ExqQfL3CBPiHyvwUawKBY7IpVlJmBqhxkBDmwwYbKoo/2EKQGCUhBBO5YUREQYEGN0x4L7DaDF5BgBRRIQQFIAJJ+eMAAEJhLWQQgApyxUkkdyMkFQnClEPCvXuIDgAZE4AICuqCRitJHBGxQAc2UZabZOmBBMAZBxVmgBYmUABs/KD5XYsAAn5nAAEbnzBPAQKgIgYEBVlkTEmiAAoA1gWBNQAEGsI2VreyLmUpqPbqA8XpmCugNv0jZykp2epjNrGY3y9nOVklQaMTcahibWNGyZkCmZaxI/OGBX2ngHwZYAUwJFAMf/2rArpLqHkF8igINaIAADVgdYyAA19/K9mFaBKRASoADESAgRgQaAA2suUgRUGA35BxIuVrgAie6oAAmGkg/QkABAo7oACU4wa2aCYMcREAAA2CACgqQgp8MJAY0cEECIECYGWhIhd27x7fINpgIMDBk/3CABlLwAQh4gAEvgFoQI9Ig9oREHy84nAeSAoEDWMADSPFRAQJAAkj+wx9v00CaymgDEIgVABJwAQZC0o8JtFhlJ+PNBQwnADIe6jLlpAEHbNhPoqwxJg2IjGE71rcSJ0UB2cIxhVPTgBExswYVwMELStOPCMQMKMgUcAZucMWBGBmXDqAADkK3OxKAoP8AuKTQZhaMzn88TQRrvWFEK8BBZPYDBgdo6EAckAOVDqsfAwBBADLwuQGi4HJTRsgEDpABngpEKpV+KKA12U+IuqCrBGGBCRB1IhagYCoWsEBRUTBbq+lDARZ8gaL+MYAblIAEmmaSV/nk6Q+QaSCiJnWZFDADE3TtAhCogJieGpFX38xEPGpVADDA5b8Ui1D9wEAAdEOQV0atNR5gkQDW4ml9ss0fLxjRC/D6jxiAIGql0ccJOKABoXIvLBnY8ECkjS6/4ADUA8E3rifi7AwoAK8PIStiVpYADkxAjAI5Swr6mJQVBKAFMLkkg38tEJtlmsLOLsG6yXkPohZgBa28ebUDPUDS5F2YAFuKEWMoYAMKzCzbBHQk8LRk7mZioAQcaIEMSPAgCKxgRwJxgN1agIITRgkFQm0ACCyXlBDMwAU5UIACTJDBVd6jBjSAGwFgoAANBAAHEWC2due9AMjMioA3EOQ9HuCsC2owty/oAJ/FK3UCDvADKBfvAChQAmtd8wYowKhBED0BFUyg8fjzFXRrwwLCyAAG+6veAyYgAMb1owYriCcGRsVKBzRAATKQwaaIh0U81us9AQEAIfkEBQMA/wAsvwNjACsAWACHRkZEfn58IiIkysrMTk5MMjI02trcGhocwsLEjo6M1tbUurq8eXl6pqakFhYUnJyc4uLkaGhnLi4skpKUSkpMQkJEioqMNjY07u7sxsbEJiYklpaUhoaEYmJk9vb06ursEhIUvr68srK0rq6s5ubkHh4cDg4MoqKk0tLUbm5sUlJUgoKEWlpczs7M/f383t7cqqqscnJ08vL0Xl5cPj48VlZUtra0KiosOjo8gpiE4szgmJSgxjKAUkxgun6MorKgnKh4aFgYIsCAlIKQ7Proxn4oJjo8NHA4DAwYiHqAbFxgfIiMLFBAcGZgVqCEMixo7uzYnKaoyN7UUEJMyrysuKDA4vjkMjIMPio0vq7AIiDAwqbsTDxkNFBkbFBoEDBAHBAczqa0XjxQ4ur4bFpA3rzAzt70hoiYVjyMaFjUEmA44tjkyvKIImCIzs7w1NSwgGREpIqQgHpk5H6oVFp0zsjYLCxEDAQQaOCAtsDsttjQCBggnKjEgohwGBAIGBY0XGSUnH5kCg4IBAwQLigwOEhINCgozNLY1N7cqKKg4OrkLDAgxNLQtKy0JCgwWGZc4tz4urrQ5ODcLCggTEI4TlQ8SFRYmIyYBAQYXGZAeHqIbnJggGx04urQAgYInOi09NzYPjRAPFhAxLzYgHqIKB4MepCwPjgkLB4oYnZsnMScMgxAgFaAaKDgOh4c1PLkMjpIMh5EFBAgppyoytLIvPLUejxsaKAklJCAcJCE+uzoGBoI0NTIGAYg9MS0kqigTlxYgHKcxsw09PLAtsbEnJywPjxIPEgwxLjE2MrQMDo0+PrcaGZYnIq4erCEwqZ8oqKQFBwcTGZMXlpQTDZEbnxs7Ka0EBBgNFAUbGJ42vK4JDAotsCoImDQCAgw1Lzw9OqMqn7ksLrExMjYaGJonHCg7uj4ysyEsq7M1t7IJB4wLDoguKacaHZEBgIIUHZYzrzMXlQ84tzMUlRgnLToaBiAXk5YepBYKiYwCgoMBgYEAgIECgoEAgIMBgYMAAAACP8A/wkcSLDgP304Ajw4sQJAvn0GI0qcOBBEigwvULQwkIHBAYgUQ07UV0MBigD/aERY0CJCPpEwDTrY8MGCg3/78tVoIUJCzJ8CJSxAoQLkPw0jDNQAGnPfBQQDAOybuq/EiQ8xjDKliPRFBH1UJdjAwOHl1pAmAmAQQQOECQkrIMhIYOJsSKciSGQ48WAEAhJz69qduE9fgRULBiDYkALGhwD6BkckQHUfCA0SbhzQsAFCBK2SBUqtDNJpiAEVQIcWSBpniQkGVoBQvXqfgAgxWKiI0MDACQlTVxvcd+OBAgMKFCBYIQGs8Ij5CtToEIEFjtm0n0/Vxx1s8Ofgw4v/H0++vPnz6NOrX8++vfv38OPLn0+/vv37+PPr38+/v3vSwVl2QwE3OBBZQQBCpE8JEhSggQmqJZgTABuEkEEID1BgFk4SHhCBXxnYEMANWkmojwohGCDCBiMoEAILB0roQAAGtMAXAi/8RpCEGpzwQgACOCBADAaMcAOHCRKgQAYqHOAADQ2QsIJZAAqkzwxFHimQBjAY8FVrODkwAQQBmKUPAS2EcAFrlQ1kAgckQDZQPgxg8IADYP5TAFRSDSRAA56xSdVABzzgmVH7sLCWAG0ORAEEC5BYUQIerCDod/9YZQALWhHwgQ0kDjqQCh6MUIJRJqzgQgIQ5XPDBbBK/zCbppwSRMEHkTYqUA2lfuSmqhNMhUMDAxQrAgv50Nrpp6Fi+g+ppqKqKqu2xZDAPwkwgIM+Mx06UKKL6voPAJBKKhAIlFoqkAkgtGsCWCZYgIGcAtFpJ56iClRABlEZ9WegSGJ6pYpa/iNAlx14l6+YcZqJpprfJqhBlBwIcBkDEIjQLAg0XPDQVASgkAELB3AMaJkRA3gmjiI8IAJHNRxIUgi/UTWjRg08gIBvBZSYYD4VbJBBC4s5BNKZC5zQ7D4HdDDCAC0swMANB+5YJbcDFugcayZoIIDCUyXb4IPZ+Wf22WinXZA+GhDQAQsX1JWgQflcUMMMBGgAtrjr1v/QQAsmhcDAqXOzVkIMIaCgEQw1fFzlQCcOoMADDHCAo00SCjTjCwNwEMMGRMdcuEB//lhCPiAQkHhRJlLQQgssgJDPARg3wGjh+1DwwgLAnbsCCRYIFrBlcFpAZQFDFXXp0RF48IDwJCmgccpT/dsBQSBYQOaGjb4pAwdV70MDAhlUQD1xNqBAAEH6RGCn8I2K+QED4YvVAgVWT3VBCC0AsOMMMmgACM63D/nRrzI3EIH68ucUPu2IBQG8yfIsk4B5gQ15/WOgBPiHP8h1AAMCpB5O6OSCDThuHxUYQAhwcD4NvGwpc4rBByYAP1HtA4ANwNdBIgCBE5TgfIUiQQqWjFKCB7wgBeHLFw6KRQGQFAoCMYgRafKRAgg04Ic4AYDIaFAaXYHgdyJQwQ1wwIEXqMlmM6jBbHAigbxMAAc3IACgLICdx+HkBhsIXAYMsADRTYUGGRABDoyiggW84EItQMEGgDM6K91mAzkLAA1OeIMErIBRrMkHDhiwkA1E4HaNzKQDSnCAdwWMWwZKmQkO4KQT5ikgACH5BAUEAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACxIAWMAeACGAYfq0GxAQEBvb2+OdiHOzsz6+vnAwL+9nzCenpzUszmSkpRkVBQzKgq4mi3m5uTY2Njv7+7q6uyurq9QUFDy4pS2trRgYF+YmJi6urwSEhTPrTaojipYWFd2Yht9ah2vkytNPwyMjIzlwj8wMDCDg4R2dnTyykTe3tyymixHR0cbGxx8fHzty0RYSQ9BNAzsxT9rWxcjHATGxsQoIgfIqDWioqQ6Ojzi4uSghSUWFhSiiij20kSEbx+mpqSZgSTKrjTiujwqKizKyszixlhqamxGOwwaFgTeujwODgvS0tSSfiQWDgQ2NjQmJiQiIiTFpizozGDasjzWzqTDoTTWvlSWeiTbtjsKBgTmvjzkvkb++tTaujvqvkQSDgTevkQGAgQEDBAuZtCIYshmfBxKThR0boTU3LD22ODspNDyxNCkahze0hSkdEiMYByatijU3PSYnmTu8LQWEGAKCDCWrKw0IBAaKCjAnlhoqqjo+uRUVGjGljQWMmh2gGiYwIxEDkAqxIBEMGyipIROUDRGVliMmCg2Vkx4cAjk2Mjg7vCKdnjgvigGGBTYxoQgOhzE7njW0kjStihmRBhOYkyKgmyskERugEwKMij4zBRCMkhkgHjAzPDooIRiYIAKDgja7ihsSkAEBBiAYCTEwNjaMIDk2PTaeIAWZlAuIsDCtoTGpOz66OSmmAycfgx2ShjYxPBicHT4sjRSOiSoqpjU9NBSMEREeDiISiiGqKCkdHSKfEzAxKwKGDSGkqA0OExuaEjeuBAgMkSE4ijsviiAkoiIHoCsrsB6hIDAphBGSmTA2MBUSFh6YAiqjpCGkrjA8NACBgjexMxqpig6JDyYquTmnDTSwjRmZCwmJDBEZoiKcgysfhyKZGTo2Nx4SnBCOCykTkju2LD4xlw4VhSE5qhwgICObCggGDSWnLhSJhh4gBzU1sjOrrzAzDRQYBSijrjGmrxmUijU4tQODBh0XnRmahDo6tTivjzivjQCAgwGBgQKCgwCAgQKCgQGBgwAAAAI/wD/CRxIsKDBgwgTKlw4kB/DhxAjSpxIUSE/ffsyatynzyFBfhf/6etYsaTJkxH3TVDA8oJLBSn0NeSHJEWJECs4qPCIsqdPk/xU1CgQ4YbRG0lWIGmYYcUDCA7+3UAwgufPq1gtZrjgQMC/ESOYMMnhkd8+AQWSCAhgQQKEHiqyyp37ceuDqiDzNmwi5MQEgfyaGHBgwSrdwz35bT2RImNHkID1TSiAQabAfSsgXNiHuHPiHBfeslyRYmdkAZoJSnZgoInn1yX55Qjh4IFtBxAqBHBIMwSEFWX5jbBtA7ZxiWZtpMigL8OEChAMjPin+EIEAXlBBklyIsDx7wyzl/8VfB1kBgXls2/vDr79QfHZ95GAgGApkvnAszMh7r7/TPgXCRCBBDmIREQBF2Qn2Q0YuOZffwCChIRvm1GXQgEGcEadfBCEoOGD7l3EXEcYBZDEDYVR14QMN3BA4ggynMABiP7pE0ANAkyQwgQryACBBE5EZkEEaqXQFgQ1FEije6vV9sANETygwHSAKbZCErWdcMIFQSwJIRITELECCSQQEQASVoFkowAklDBBBl7GKeecdNZp55145qnnnnz26eefDWG0DxL76KWQPkgkqiihaWKUKEeGAVqQPiMIEEIPGPxDVqQFTSCBARVggIEBBoSwlED62CDABRJIgOMIlkn/WlBQoTkQQQEPBAFZQpgV4ICopJZ6qo0YRClDjBEYMAGngCpmQU5DPuDErgjtUwIEJGSQQQ45qJABZDaSIIANKqgQQGgGBCnrrI+pYNu0zF5WggNERLjhh9QFIYMDM647K0gq3CAttQdZi223ORRq6EFIIFCevx8BLDC8CvX6gAQY9EAamswGZkCLEP8X1MQEG6SPBTIYgDEBEESwApzVomZAXCGLF/DA8VLnxAgcq1AClNgdJBkBD1gQcpWQ3UzxQ3npc6AMp6qWwsclxOovfEpnFxE/QeCq7kD6TH3DCvhebbPAOy0cXhMQ/OMgqinESPbRSOelwgkPpF3yoZMR/6DkPyoZ8IAAVh/dtD5N2BbESLsiTh2qBSERgFsKWCZZjAJwNBJJR1O6wk0thyBACVUJxIQQEsApnAQVkPDPqk9h0CV1NhhQgBA5cmCBBRyUHvI+Q0YgvK1REi5QCqzR7EQNNzjg/A1CkKArqhZA6bzwwp9AQuGy8uPE7hyErzsHb4MZ00DmphB+CiPgG5T64e8Ovu90J5Rz/fjnr//+/Pfv//8AhEjTMqACNFkEUUjgVgaQwDmRqY1u2QmADCJQg68JjQM9MEASHkAACRDBNHXTGv6yIxQIFEAG00NIwx5ggB4gQAInGNC0QvjAmklIAEKoAWtmmBAVOCEDHEGCDf964LLgwGeEapqaAibwABkszSDiQRUHCiCBWEFGhBAE2KdGkDgn7o2GiELNBRpYNyQiwSmFacIJvNgxkOQgADsiQRKEsBvDYNFwkknCBdA0ghuw8T1qsgABoFQAAiyrZHeEGEiasMV8nSBdX2xaABSAAExJwEWIrKG/7oOivASBhU+MmKH4wYQedCWTX+xeAB5wAgvYIAABEEBtiMAEmEFxYcKJgAzI8q9USspGT4GAMIVZgAJAwJAIiaJA3PWAJthRk91TgQVKUIIVfO4CvlKABb6WJhHywwZEopko7/cnAA0HkgPZh3d4g4Qg5IBQgxoBNmvwoQhBs5zi6aMXUdX/N5mELUYScAkGcCMdBwIoi3mBkQTeFrYbVEAmQVmBAQigpYsJAITUsacvm9U0J/AQVSPgoVly4IQmBMEJOeCeRskZwJZ6hqUujalMZ0rTmvrnijnTy0Zbmpd97IyLZcuomna2OJtWqQkhCOYDQjC7ENpACAW4QQqMChInSABDZCpWBWYnHtCcoADdgSn/9IGWCiiPiCuAaE+JUCpW7saoOcAAyPh5AwPoKoIGmJIBwirW/AknLXoLykA5QMIt5mCvdazpNwv5GAnVID1mKcEJiKCPHMQosTTlx4UMIB4kKABb1AkbARBAlhz0BbMz1SyGOvtZ13lPAjLAi2X5qtgA/5zQMhIKzQoA5xTKjsQJiCVjTElZyHpmQAKE8Rh9qrkC2rxFAExQbBMERiWBTOcBKQiMBLB3vWJCYHA2RYJ10noZAfWgQOoUHwfCFMMQpMCCw13NAybQrQkI4QGEpeFhw2pTCa2geaQ6wT+yRUPZUHSqVDXLBBBAqhpwIGpCzctWJFDdBIekjeDinlE3zOEOe/jDIA6xiEdM4hKb+MQoTrGKV8ziFrv4xTCOsYxnTOMa2/jGOM6xjnfM4x77+MdADrKQh0zkIhv5yEhOspKXzOQmO/nJUI6ylKdM5Spb+cpYzrKWt8zlLnv5y2AOs5jHTOYym/nMaE6zmtfM5ja7+f/NcI6znOdM5zrb+c54zrOe98znPvv5z4AOtKAHTehCG/rQiE60ohfN6EY7+tGQjrSkJ03pSlv60pjOtKY3zelOe/rToA61qHPMhX8cAcb3mAINsAAEF3+gAwMAwj3u0eoVJ0AJOHgBFqyAhRVbgQU0oMELPsCDBKz4HgdYAA9osAAjDGDFH/CAPkDwARf8gwHQ7sAVFvCBGfwjBitGAQyQ4AEfLAXbKk42A3AwACQYgQcq9kACfDCAIxygAzhgAYpfIJAd0EADO0jABqZgghTDoAFWgHUCWnCFFSOhAxtwwQJQEAN9LKABKsa3C/CNhC7gQAcobvYAcNCCAcB7Bhj/R/EBBkADKxxAAz5gAAw0AIMUH0AELHiBCTTwgQQU3AcpbgAONkADHtQ7AVVIMQN8oGwczMAFB4A3igfQgg3AYAAD6AIIUp5iHNDAB9E2AsRVnAAgvOAeR2D6B4Ce4g00YAsJuEcCfP6BFXtgAx9AwQB8IAIaoBgLAgd7A6qw8Q2omNZW0EACBgCCAXRAxUDIAq23gIO7r9gLQ8jCEeSuAWan+AhHEIHk7yGCFwAB5J9He+gBAAV9qxj0oB8C6ymghRWDHggiEAEVpMDi2wPBCoZfMRZAD3cr1P3wIjjC75/wAw0EP8UvUD4QaICDDrAdxQkw/eaP0IF+sFgEwx/+/wL+UQTbR18EQHg2i61ges6z2Ajsv8cWRn4PFwMeBC3w+6j3z//+U2QLMrYF/PZi8DeALTZtR2CALIYEudZrLcYPRpAALEBrLhYD99B+FWh2QGBsLDZtQMBqD2gEG3B2QFBrKsYPDPACpmeCKvYFLoAFw1d/LNYF/wB+/neDNqZrQHBqLVZ6JThrLOYBO8Bqs5Z4K6YBK3gPNMByK7YDySdrB1BzClhiwraDspYADaABK5Z8yneBWKB/J4aEO7h5IpAAOMB1JzZ8XWgFA2BtKGYC6PeDyLYALcCBJzZ3iNd5A4AC6dZyCaABDYAFW6CFKbYF9rYBsoYFx5diaKcDA/9Aeg0Abul2D4mXeM+XYkpwBKy2djogiSfGAx5AAz/QACvnAwmAeif2ATRwDx0AAzSwARogAjiQYlOgAU/QAgtQhibQADPgbSiGAxpwhvaWAB6gYgzwATn3AlbXAC2gYgOABQ0Aix3AAMxoBCmGBRoAAiCwAQtQBM6nYjC3BCAwBVj3AnZ4Yj7gATHAA04od+MXdBuwAcnndpdoYsgYbEfQhj5AiPtGcxdHbFZgBSmmeIz3jFbwBC9wfScWBTTQcy8wADDAgiXWAVNwBBvQAR8gAmtXhte4eB4wBSLQeSKgYoL4ciJwAF4ng3/HdDzwBB/wAZt3YhsAjLx4BS3wBHiIKJEixgDr9gHa2AAmgHY7eGIxsAA+N2u6ZgUJsHkOSGI3d3b3IHk7uAVbcIEmNgCKpwFWUILbdwA+UI8hZgRDt4pWiAX34AE7tWHHyAMboIkfaI41l4pH8AFVcAAX+ALz54klVnpYqAOquIQM0Fc2dQ/DtwFFEAMzYEAr9gI04IsuxpjotmIBAQAh+QQFBAD/ACy+A1kAfwJZAIf5+fZubmzm5uR+fnxMTEw+PjzW1tSDbhyWlpQiIiQuJgmzmTTa2txpWBapjinKv4qampw6OjzCwsSVfycWFhReXlzKysxSRxpNQQ1qamywsLAqKiwmJiTw8PDKsmTS0tQSEhRkZGSqqqzOzsxUVFSikkweHhuEhIR6enydgyKmpqSenpw2NjRaWlxycnSOjozY0LDGxsTq6uyKiowyMjR6ZhwuLizi4uSSkpSOdhxdSxG+rnRANQx2dnSioqSBeVzu4sTe3ty2trRqXCi6urzu5sy+pkxCQkRGRkS+vrw6NhweFgQUDwQKBgS+uqwXFgd6dlxeUjyijkRSTkRWUkQ+NiTq5tyenpRGRkwOCgQGAgQaGhRqGoBcXBxo4IByZoAaHiC88tSyohB6kFgSMERooODK8ogSYDjsyjTCpOwSEGC2xsRWoIR6QnRYdBx6kLCSnLhMVlgiwIAEBBjspLScwJjizOBQOkwMDBh6sIQaFjSirpTOzvCEXGBSXHRCTEgkIMBscEjs+ui4nMBSYlAEDBDAfCRoWGDa8rgoOjBgQljW3sjI3tTi6tDA2ISmokQcKAzi3PiCdAgYKCgkYNA+SDACBgiSjnhsWHzi2OQcOBw+WEB8iIw2DkDUvPDi+OSSgojezIRSaJhCJjzOvMw+REDOpLTU8uR0WFQaFhxibGQsUEyCiHAKMBy6utA2cDiaiLBqWth2ZmgSMGiofODGMoC8vLT67OhYQpS2fJji3Mw2UBSyvjQwNEymhgy2wOwkYIjE0tCcpHhSdFwKDgiy5jQ2LmxwdByyqLTifKT0xLR0aJxqPBy+rMAIGCBiaHxooCRwkITmfCQ4SEjKvKy22NDO3vSyrMxQQmyCmISWsDQ2IiyacJiSrKiwusT06oygiIic5rQ+UGS4pJy8tryywIRCOFAaBiCinrycsOSompwwIkgICDBieHCGiJhUSFSEbnTEvNiqfjDevMCerrQODgwaGiQKCgQODgQGBgwKCgwCAgQGBgQaGhwCAgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxIsaLFixgzatzIsaPHjyBDJuRnIkKBIyiPFDDBj+C+fRRsRKDRr6XImzhz6tzJs6fPn0CDCh1KlAIKCTGSWrBAJES9gfv4RTih4R8RBASeEt3KtavXr2DDih1L9qMJFTKu4kAA4QUBfQP5HdHwgYgPujEqwC3Lt6/fv4ADCx4stJ8PCyQogABBgYK+fQL3mYDAAEcEExt6MEgSgbDnz6BDix5N2qdhzi9TQ45MwAARFpAl+5ARwGbp27hz697Nm6zhDz1U2qDAb/W/fQE6vND6T5+LGyv69Z5Ovbr169gX9lPR4YMECUIgtP8AMVDfjA4obO8jMIIIjezw48ufT58sBRcrTvQ4IeIGAxfk/UMBBAJkYNNLESRhQQH1NejggxBGWFFU/dQUFQcoBDECAZBRsIIAIRSXGgtJjHCEhCimqOKK8am2mmwdDPCUhwWK+FIBEiwYVD0V+KDCjz9CUAFzx9VDwgs+IBCCCcax6OSTUK7oYmr6oCADBP3so88L6Nm4HlPvAdUPBABslkQSEmjgFFQmnPABAyMYwIAPETQZ5Z145nndlC9VeWWW+/SgXD2p8ZPBPz6YENSYBrhAgw00RErBi/W4EAQRFURAwgpB+MCBnqCGKmppfO6TgA8xPvYPCQxoYENsY8r/0INtpkEgQQFTQsWCBCOQUJxUQjBQgZ2jFmvssWCZWsAG/YDQDw0naIjEagmswMAJwyWQwQcx4LooBIgp9thLUFXQgQjSCVTPAALgQCSy8MYr7077tECEBgi8AAERGgYQ4D/8ECCEARrg4MMIFmTwbk9jdiDBjziEYMOBVQIwg238kGCAEBvM6/HHIG+0zxEQCIGmBFeRAIJxWiKBAxESJOHDeMTyRIGb/wghgQFBiECATSC8ICtU+xSQFIMhJ6300gvpkwANJxVAU3EuUcjCEREkQCtQ+kSAxAYc2ECCBgJowIJAFCBwQ22qkbgh03DHLbdgL/EjIsAFCCHArAKq/832iCUiMffghBe+Van7gHDClRT8E7QM6al2xNGGV2755SEh7pwA/zBZMQ7jRtUCAxxjbvrpqEeEOAXnIbDyPuaiG1k9iy+X+u245x4ZP4QWSoAFNxgoEA1JGNCCiBEQwUALNevu/PMprqaAAzks0XxE/CCxwgAVkFDBDDHcAMGnx+mTgQExoNBCABpUpij08MevIj8NOIDB1hQFbEEQDDAQxA0xGMAGDgSTACSBf3Cawavkx8AG9kUfG0ACCQhwBA54qUla4kAECDBBGvSOXALhBweOQIAfeGACCiBaagqCOKiA4AjeC0EISMCClVWtHixoQQgqcIRJOfCHQOyKZP9CIAKkxCAJOHiVaghSDyKiyQLtGcDEyAWTCqggZhYoAgyg8AQ+Va2FkYmK3ey2QhWK8W5BTKMafZIA8GlgBigYwAuw5SKCcMBWEBgACnAAPBxwgFz6CMAHLICAHqDABzcwQAXukaswIu56a4ykJLninBEgIAIg0Ec9KNCP0IFQICBAwhH6UQ99UIAEMQhCCFSVgP64wDH1SACXrrCFRh7nkZCcpC53yZMIoKxOZFzhEqFywXqc5wUB2oAQLHCigYRAAE6ogi3r6Mhc8vKa2OzIPjIgAARAqgUVQILWqslCF/HjBMoJUAJUEASF2e1mN3jAEJhAzVs+Mpv4zOdH+HH/HiHMQAQxGEEMfEACuAzTjC/ZgAbaqSp9hGAEEjhBBgKAgySooAQp4IEWDopLa+rzoyBVCAjIJIAkzGCiPgiCBH52UHumBgQDuIEIlCiQfgzAADf4wAcEkBcMpOAAXSxjNW0Z0qIadSEDAoABAuDDO7ZrUkJ16T7qcT7E2IgfNHiBCE4Qggwg4CpUOID9bPTFUh0kKvqwG0LPare0vqieR43r6RKHAABoYAOqIcEHkgCbqKrGoQhrgScTgAAJhGBlJJkBAxAwhJ9a75MdxaAJMrACDWy1ABeMqlT4owEfZCABYJSraCunpRMAIDordNsRWpqaJgpUsKrhRws+oAHy/x0HCe2ZQv0ukFmOcnQDEDDACEzmGth6sTkkUB54LMAACCiRqKON7ty22QEVvO84RutWS4v00BgI1qXOgc51i5aUIyhgAii851AHojiZEuBpKPiAEOrkxX3QYKHYSsARZoNMs0r3v3I7ggVigASbhFcEf6wb1dQVAgskoaDV5EcIGPCaF43OPU2onw6aIEzfCjUCS/nZcap1AxdQbUrhVQFoj0MA73hLqgCOMdNoxwAVkABSIUhlBsbVjxn2rokfCIIA+2ECEyTgdf9gARFkgIMjbEBsGpABArKkgBSkQAEdxmVkYte4EIagU+NVzZgE4ALjjKmdBIyqjNc8r31s4P8FIxguEYR7Aia9RK+1fYmSASADRPnARyrIgA31UQEJBMECGhDCm3yA2X80QQcLaMA9EHrc5qAAAKCDChIc3JmhcqB9JCCIPnogA9uRk82obnMCQgABEagABy0AFILyCKgNUEUEGsi1rv1VHiScwAciEAEEAjCxyJhgAg7AMqXrGbQYqYcGRBiB4MxIvLfFJQQd8MG/4JrqbhdLS/1IQAKIY84EyJof/eCAujmwgXZvwIe7q4cJOGBuVcXlApHOQlm3SwEcyOpublY0AcrqSwtMOzItqG6XYezthgORCWLlAURgIrTIpcYGAi8rDXh18OMkXAUL57bDRx4/Bxxg4rT/68DFVFPwZkrVBkT4wMDjkoEOQGDbaia5znnJjwCc9oMZI1jHhrrOG4SAICCYgQAGsBeX7vzp11zPpYb+D9oJYAbMcZHiZDCDpsP8AyR4a86hTvY0Fn0AWdIHASQA9kKJW0T6Q0xajSIDEYC20mXPe+q66BD9MWAFKAAfA2YAVfsmaYEwGQADLDCDHlCmV731qN4nTx+R21Oqlo/NajK8AB1E/pMCUbsKGCCAG0ggAHZ+yRFc/CITBEACNxBAjd/iX8rb3kEkKQAJMoAC2oN+H/0gwEQrQANPUrNuG1hfBkigtX1MLwVR4D0Kpo8CQ6Kg0QQhydWW9RTV4LD4b4Xg/0kuE/nbm78+pjpBDBjQgdPKGio0wEGcdNqUD9bxJfVoAV10OoIVHMFuDbAAO8B2cYYwBtB+A4A/DAF6UHF+Dqgi+8ABA7ACMyAEHYBaICQZCBAEGhAALXACI/ABFdBbt0QAMWAAL6BDweUqkjEBRvADBIAFBIAEBBAtN8A8D5iDIINunfRlGLgaEtYqEVAcldIzCTZMwKc2J5AlIvQhLvAYGOAADZAFVOICAmB3OpiF8uIiLRAE0VEQIIAAHcA3wxNtLHVQm3YrQFgBDICFS3AAVzYQ/bACtKGAWniHesKF0AEoA/FpQRBqAzEgN9AD9vciz6RiYQRtJiIQUQgVv//TLXgYiaOihz94HAmiI+XBLjhQeCwjKBDQZS9xFjcQdgPhADwwVScgAMgkiayYh6rRAnvoIpMjAZ0WQlb4g8bBDwOAaeSRGh4iA8NSigfABArFAKTYisjoJFP0irGoGkgQA7SYfbf4fnFhWjjwOi/xi8H4D0xQAylwAYIkAlSXjOSIIkNCibJ2IwqCNKGHAt1UeNmHAh2AAJxoGKNoHDwwAQsAA2vTdOX4jw/yKei4RMU4c6CEAzcgI34VAjKQKKqhUB/QcVnQAA8AAJAIkBgJIXr4iUvkIR2QAcYBkb7CgP/QYhWWGng2jv+gBBU5ZRn5kg3iIiQQBBy5QpujYpD/wQ9syIJ1k1YDsU5B4AK9EzQCcALv4ktW8ANMAJNMOR8RKEpCkwThhEmxsQEiEAQIQAJHkAHFEwC9kz0nIGJRUQEG8AEoQIMnYABqmH1fBgRSkEJNGZfYUQ8oEGc3AAAdIFwiUGDHkT0ioFMWYAAWgAKyZky8eEsU8HqLFydEUFBNwgEqIAAPEGmTJpeWOR0ZgwMvsJmbiQM9UGx1YwMhcAJwRACcGDAzIJb4dwQ9MAMnEAAsgEY1FQAocAFwCJeXmZu6wTv18BS9WUos05Ol5CUhVA/qQSXDqWZaklaNqJvOGTIQ5wDPOZ3UWZ3WeZ3YmZ3auZ3c2Z3e+Z1xI0b6/+BWHiWebsVC/DCe6gmc4NmeYwETSBAApNl4R2BvI8EBo+maITBOxpYBLzADM8CZKBCb7lmgXgECGSABi8crKvVK1yMXPmAA6BOCK4B9RSMBMpAmQiAERLACBSZ5BhqiIdEP/pkBSOA1aalKdhiBiERQBUAACNBcK/YPCfJgTwYpHDAuIrqjP8EPinE3rCMDX3gQOuka9BWBo7dKApEgDrlePPqkOkFNstUqeHUQxlRqNnQcgkKPkOFLGlAAFWJQYwelZMoR1HSTTHIQzWaUUFFzCHYjEvAPnZUfLdB8IFqmeDpxS7QPNkA2T1gzo3YDiKguMwAAQqBENOADaSIC/P8SBEl0p3kaqQvoIiagWCpAUwYxMrCHAsyCnzEAAO7xEi9UQRxAAxkQPicAb5K6qhMiZijgGh9aM1PlAm+iAStwFxbQAaHal381YRKwWqwarBPhqnXhe7JaRStABEQQaAPAgXjVSArFUJAqrKyaGv0QX0RgrMcaFSZAAyywATxyAw5ZT0X3p9R6rpN6rbR1hmNKTfXwAkHQA8ZnHCRiPAuGrvh6ENfqGuz6SfpASi+SnlQDAiFAWzSQk4sBdxTALhpwsNOar2VahNnWAjTIQUegFevxD2vSHASAAjP0D+ATAyPpOBmAAy5QAetDGQywYw8LsWR6KnjJLXH2ASPgA2FJghw2ly5URYAI4wO0hzYoYAE0+wFlqSZQ5bJIW3VbGQATFQBOGwCJERkbkAH/Z2wEUAEy9F73ShJIoEM7RAADRJJJO7ZntRMBAQAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQQA/wAsywBkAPUAnwGH1LE4RkZEaGho4uLkoKCfFBQUhHAcfGcctbW2cmAZDw4KpqakkJCPyMjH3t7cTU1NYGBfbm5spo4rza01aVgU2trcCgoMw6Q0jXYhKSAGzs7M8spDPT087OzswMDAmpqcXU4UNywLmYIkdHR02bc79PT0/f38rJEsVEUPioqMMzM09dBDo4knWFhZuJstlX0k7MdA68I+5ubkg4OETUANLCwsQkJELiYHrq6s4ro8qqqsPTIMHh4cwKIsxqo00tLUGxYEGhocuqIsenp8fn58JiYk1tbURDsM5sA8IiIkYlYUspcsy6Y0n4IkQjYM7s5E5sY+Ih0EFhMEspIsqo8kvpo05sFE3r484sI8CgYE3rY8BgIE3NLcrJxQfmR0DBo02vjk4K5YPFoYoMjMxrYQJC5EXFIotIIcOjwoSnwYrHh0rHhIDDYoiurMknqEPFxYxpxU8tRcjqTM3niALMaAxtjwMmrUxrZY8MLMEiAU3qTMelQY3sKA1sY4inxIimRIGBJg6PiwJAggSjRsSlyUbmhI6vjktpog6LoQkoYMbFQolnIofHJgzLAo2tRIbrDMkmh0sJp86K40EgQQKjQooJTADAgwtLbIrJwsgGQwjkwogGQI2JxAbmgIgHQIkoBo1urUbm4k+N7gkB6AQCg8jqB8rG4cckQYJBo0fIJkxt7MknYMWjREMiTAnrS04MBc2MK8ckxA1rpIGGpUShBAboAsJEA0gExwoLh8rFBI4rhIqqTonpp8SCgQxr40oLowkGTIVlY8spYM6uT0VmQYTlIYAgYI2K4o8JwwWj4kgIQc3u5M+LJExsKkTl5k3px8WioYTlBwpIIM3jCAHi4okmQc+PjU5uTMxtA0VmpY6NLg5rwokJww1uT0bqpMGDZoXFx02NLIjpJsxvLMBgQYBhoU5sgoyrzASn5sxsLYzsLwzpwQspS06NQUiuRMsLKgXE5kxtiognJ82NL06N6sPjhQ1roo3ro04r404r48BgYEBgYMAgIM3ro8AgIEAAAACP8A/wkcSLCgwYMIEypcyLChw4cQI0qc+NCfvov/9PnzR7Gjx48gQ4ocSZKivyQPBBD5F0HBxpIwY8qcSbOmQn8FGAwoUcJEhRobOdocSrSo0aP//AWZgYABAxNGeARFSrWq1asNN1rQyKNE1KBCsYodS9Ym2JNQpU4ty7at245g/yVJG/et3bt4C8ad+3Vt3r+Ax56VS9dv4MOIiw7mq/Zl4seQYRIQuLew48iYM388y7iu5s+gHXI28SOI59CoUxMMerGIV6kaVcuefbLFgwgmZEQI8IDH7N+oLUSQ0UEGzw4dHEAAzlyzPg5OU0hPwSCFiubYs2vfzr279+/gw4v/H0++vPnz6NOr125RnwUF/7aGRQjWvQIFFrauro9//vrw/ijwwBAf4IADAQJIddNGAqawAAILzGCDUGcVMEJTNfxHnj4tDCDDDx5oQNwCKvi3XxIpOOBAAyFqkIIFSYFlAQQdmOBAACZqyJ0+DxAAgQo8FPEAAh18UEBCOOm0QABJ8FADByXGuNFzCFRgJQc56sieBQVwNGUAKnKQEIcDENBlUnoFFQQBHoyAgxFYalmemh4M8EBCBSxgpz4KJBGEBSYqMEIDCeLgAJZZyrldUBz8ACd9PGgwAAQPfIAADkRwoA9B+gTQgJEFIHBoooqyh9MHHTAA30H+cFBBBw3E/4qABslBQGESC3ggZqijlipegCPAKiZCnQ5AGgQu8TBDCT9k+I8CRPwggEZBiDqsr98FKMCHDyTqjw0OdEDEpkmFKoMAFj3ggaobFfDmddh6p20FGjxALn0qGDFACwP5YwERJTCgj7vKFVBAEBwgYEQLBQAaL3sKQDBAvaQKFEQD+/ZrwQglfDBwAzyF3JMJJgwwAowPN6ftijhW/CwBHYxAYQEwD+HvCAR88EHOOHRQggcp2HBvyrQpEIEDRth70UVZWtShrlsJOMAPYvqLX3761FClvS4TnRmHPiMAQQstQGB2EjkyyEAHP6QwwgcVDDADjGe1WyWiXgMnKHIyEP+H3D8Lp70RD0M0QNwAhB4pJVhFINDAhF3nDVltZJttOQS+0aeVCg/YVsO9dVvEQQAuRS756ajHa3rqrLfu+uuwxy777IldRju2/hpcgAKx3a6evwEw0MAPGiAwQhGr+57dxn17QIAORnTggbPKz5kEArrxboEKMM+QfPW/tUp8EQPpA0EJOnwPvmz+1NDAoRoNHKz365NnwQwyiG3DAzN4oAP59RuPP3iwtg7EDVbdCqAAeUAEBOhgZwhok+IUmK0gwO1kSVFBz+hHQe+YbwALmOC3SDPBDm7HAmtLAehq0AEjJMGE3blfCUJYvgAwq4QwxA6PHCADZL2nBgRA39D/cogdBRTQAx9YgBFK8DgicgcnEKiSAyrQAOuoz4mZwUkSilCEP2Hxi2AMoxjHSMYymvGMYKxL6Fh1GjUOpl+2Q6NMLFIAJ6kAKKGL48BqwEceACp0e1TBHU1DmdPIsSQBimIFevKTPDrGXw/Agd80MIPGBEUBLcDBAJAzAB1cp26HnGMSPuAAEEEFbY7kyAfzR4QUeKADOjBNUAowgwEY4QMjIBABIPfGUMJEH1zURxIq4EI3goUHr8SgP4qAKpmpUgAO0AFQksK0NfqSJkFA2gsNUp8HfIh6rTKBB+CDE4VNiGmFBOU1ZxIEW27TkebrAAKC0K8kGDBDPPqBmYLw/wAIcKAAGrHmOmPSThcurm48ghU4a1ACO8VnBPkTHt8IFdBeDhQkZynoCx2ZlBqIaFoXKQARctMCf6XABExkgNlQpZyXGPKiJsmoOw86mPsZcAZlY8ASexigp3RgBqWDljhX9VKYSqRuGuVmXJQyg+gNoAII+ADG+GWBk1agRF6qgQwaKVCjHlWmBlXqVPzFAQGMAAJ89MBVHyo95AWFB+/jpUW96pGkijWOA2FhA47EI3rhcSNJ+MFV80jXzSilAqUBZEXR5NICpEBcL6nWvljzgBJooEtdLSxELMKDAnBgADeq4x8T2S0vBUEFRTCYCh6LgMboIwLyDMDBhtSBFP9QyDCajQiPYtUAlILIAyXdyANIoyDzDQ8BCPhB/hA1SwbIYAAe8IADBvCBzC0utxThkKM0YAQjOKoBttqIChqAA8W1jymx8tGR6oZJAsRqARAoYVGxm5WKsmZxGFnN0vyjxoCKlb4AJs/3rhjgAhv4wAhOsIIXzOAGO/jBEI6whCdM4Qpb+MIYzrCGN8zhDnv4wyAOsYhHTOISm/jEKE6xilfM4ha7+MUwjrGMZ0zjGtv4xjjOsY53zOMe+/jHQA6ykIdM5CIb+chITrKSl8zkJjv5yVCOspSnTOUqW/nKWM6ylrfM5S57+ctgDrOYx0zmMpv5zGhOs5rXzOY2u/n/zXCOs5znTOc62/nOeM6znvfM5z77+c+ADrSgB03oQhv60IhOtKIXzehGO/rRkI60pCdN6Upb+tKYzrSmN83pTnv606AOtahHTepSm/rUqE61qlfN6la7+tWwjrWsZ03rWtv61rjOta53zeteE+QARqaAQFY1ZCocQApGFgEFTkDkfAAAABsY8gmW8A8RIGECQ37BQJwwZAzQ4B/AFjIJmnAAZvv63OhOt7rXze52u/vd8I63vOdN73rb+974zre+983vfvv73wAPuMAHTvAy9yMfFwABCITs7AlcgMgAYMIS8kECbAf5BDnIBxLMDWQWYODgLsiAuCdQcRYMGQk5/1iCCIY8ARc8XNtBzscBKHCBEwBgyChQAhJI8I+QC9kF/egBuIcMAxZQwAUoEPIJLO6CIdNAAiA4gpBfIAUUXAAD/4ABkFeeAQOsAAn5ULoEdn4CF5gcyA7vBwZ2APMfAyABRz+BAXgOZG9jgOJCX7mPScCEE5AgBhigQA6CbGxqMxsJQAYABoCdD4tb3McumEAM/nH2IFPAABdYAseDDIILkIDuQKYBDVywgXz0A8cTQLgSpKCAKCjh5v/IRwwQD3oaH4AFAHgBDRLQhAvkAwYZv/EV/sHsDNDgBDAAgOZf8AISOBsAOejH6V/Mcyr4HQAeZ8EEkHABA9AgBBkAQf/qSZDx6bO4H9FfghISIHkSXEACL+hBP0SQAAwsgeQon0AOEM9i9/sgH1fQctuneAlgAC6ABNx3AiwgAi5AAv1wAS8AdFpnYjkQfdIXcc43AWWHBDDQDy5gfQeHAU6QARlwAPJ3BQBgdpsXYmEnfegXfSRwBTt3ARcAAFAwAS9AATQAAionclmQAROXD2F3Dx8GBJQHACsQA1ZggegnhOgHdgBwAi9wAv1wAjvAOwrAfiygBBRQbhMIYkdwe1ewAjCAci54cEjQDwDgef3wAkcQAijAAiRwAgp3AM33BD33gU3XYSZChmYYfRVYgblnAFNwcC+AAiDwAv2ABDfXA0L/92FhsQUK8G0DoXWBWIENp3GNZwAHMAEw4H4NuAIAYAA6GAIgJhRS4AQ0gAH8JxD5EIiaGIATAAVBtwQXEAMk8AIL9w9YJ2yQmBQ/GAJH8AK/BwNfKBAIaAW46AJLhwU9cABvaAATEG4jFiAZEAU3QAEAAAMxYIxleBAkgAQucAAhQAMssARS9w83IBBGyIcWcR8KIAUh8ALGGAPd+I0PYQAX9ngLoQBAEAIJ0AMwYAVCWI+zJ4RhV2Ja94XYxo8EMQETAAB/twHdeJD5oAUJKWI3t4YAcHD7d48wsAHRZhDeiICAGGLU+A9V8G1AcAD9MAEi8AIXgH5neIYIEXyu/+iOAmEAMNKSEmcAGOACWPCSNWh6NemCKgYEURAC2kgCIoABLNADYwgDNXd/+UCQr5gDWiB9TEhiHEdyLCABSxCK83cAdngBCRh5QliBakiDzleBI5aL/fAEQoBxK+ACCQACKNCJBHiWs4eAWGB6QkgCPRB5WgB2GAYFlJcABfhwxNcDK6B2PZBw86h2IbADe7mND9gDPgAAGpcDTPCUInAB0ReRCJhhGTd7PqCAJOd9FICDQIABJ3AEGUCPF3COS8ACBziHJ9CbukmYBlCAEvBh0kdxGBACLfkCvJMBU+BtF7ABmbeNPvCUB1B/2ziHpLiLjReRPtBhZWh6GXcBB6mAAiJwAiiAAvQojgdHf34nAQkQgT3Qm1E4giiAdSLWeZqIhhTHgSTgAi4ABS5AAVPABAmgBKSXD0sQeDvwj0L5D0vwiCKmDzTQBPiQhk3ohAEKAkLAAuy3Aj1gfWs4EKbYdpVHYlFwi0Z5cBrXn1TgbEtHAiBAbDGmAMgHdkyYcQB4ejcAAiUqY/oAAgJpkUAWICDgAhKJBFYQZBaxlDyomEQmoSuYYwEBACH5BAUDAP8ALL8DWQB+AmIAh/Ly8dra3MDAwExADExMTLaZL+bm5I12HoODhGhoaHR0dFJSVKiNKu7ioBISFOrq7MbGxNrMkLCwr4BtHUZGRG5ubJV9IBYWFDUsDmtZFyIiJM66ZODg4C4uLFZGD9bW1EJCRHx8fFZWVGJiZLq6vJyEJcawUJqanJKSlNLS1Pz9/KKSTCYmJKqqrBoaHF5eXHpmHD4+PFtLFN7SpJaWlI6OjB4eHFpaXCYfBKKadCoqLGJUIFZSRDIyNMrKzM7OzLa2tP72zIqKjKKipKampA4SDPTy5B0YBJ6enDo6PMK+rEI6DDY2NBQQBHJiJLK2tKaSPJ6alDo2JAoGBA4KBAYCBJjmtOLczAIGCEBMSJawNHaQWDRQFGZY1Ljy1HxqdMq8xH6YhMjyiD48SHY8bJ6uqFSghLLAqBJgOGag4GagJO7KNDQuOCJg0NK87PTEtLLY0Gx0GMzK4GYYgHiIjD4yQAwMGAQEGFpgdAQMEHxWgKSepCIgwJCOeI6kqDRQZKKikMqksIR4gJiw5MzGzDoeHCYwLExCOLTMhJhwmMzU1DI8MNTchGJ2cBAwQLZ8hCYsROLM4NjyuCYyENLO9DhISDIMQKp+MJiknM7e1GpgUEpQOLSsxDxIMODq+MLM8GZ2QKSknDIeREw8ZMLQ0CY6OGZ2WDxYQOD45NDy5Ny8vCxQQOLc+LScsBAQYJ6evDgoOLLGxGhOZIyOmHawhNbSyKaGDKZ84CJgiGbggLLmNL6k6KCIiMzMxCweKBgGIHxynDIsaJjAmL6kfEpacJCCiLykDF5aUHxkREpiSLLA7GpmeDRwONLe9FR0GOZ8JEpUUFB2WFpklMC80FZmWHaQsMLc4LbANFQ8jFhOdLi60MB8JAgIMH6IcK6smCLAgJiIsOqktPD89Hx0gNbI1C4yOGpabAoOCOLY5Hx4ZMQygLqspBgWNGyQhIKImFw8UPTc2KpuJEw2RDI2SKqkJOJ8pODq0AYGBA4OBAoKBA4ODAYGDAoKDAICDAICBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIENa9HfvgoYeSVjc8yeypcuXMGPKnEmzps2bOHPqdKhvQY0WJASgYOGP5c6jSJMqXcq0qdOnUEOyQJHiRwoAApgUNRq1q9evYMOKHUt2pwMCN0BU+ECix9aycOPKnUu3rt2cJIvGgNB2K9e7gAMLHky4sFe//kD46PvWsOPHkCNLnvwQsWLGRSlr3sy5s2ewlhe7bfy5tOnTqFNrDI35r+rXsGPL9sx6dGan91iICHGCiAIXt2cLH068eMHafp3601AjgAEAKkh0IG28uvXrnVlPp67Un4sKQhIg/zAgnTv28+jTA/a7V8L24Ezv6XNwD0SK8ubV69/PP6o/fhc4IMIPAlBwwQUrKcfefe+51t+DEEZ40z0EoECDBAAYIMEJKCThIFKIxcBWgxKWaOKJIvGTgAA/+PBDiwQS8OFRIY6YHIo45qhjRP7YAAIFIARJAZAXzLhTjfjBt+OSTDY5FpIkOinllFQuBeWNVWap5ZYvXZkfl2CGKSZESOqA5ZhopqnmcVvFcJ+ZX64p55xU3qPDDTcI8cAHIbxwgwZ0BioolfooEIBzKgDAQQA+vGDkoJBGSlNJPVBAAAEgaLASVw5QgOenNxBQJGIV8aMDBQsskOk9GN3TwQgJjP8gK6wvmCnprbjO5E8SNQAhAAQ/QDCECPrcRoEAAQTr4g8SeIjYowj1mEALAvgAgQBCEIURSax2e0+CuYYrLkj3rCiBECEg0AKjN/BjlAiMhpBABRUoMIINzyq50HJCQHBuCCGgEIK24xZsMG49EKCDA/zoo4MQHEigw0ALBECDC9/yo3G++hoUhQsCFZoCDUkwrM+B4B6s8so6fYtYEgKkQAHFAaCAIKvwkbqQElIU1UNQHrrM8tBE28TxzzLTLEACeMaAMUE6KxSBE03cM8IHKHTQww0jUKBp0WCHHVK+KgYg8UAEpMABBMAKcAIB/Aj0LEMrlIBBEQgAIAECErT/PQTcYgcu+EXP1gdEAAm4K1APAI/wggISrL0AzlErVEIGGJygAgck1DDvEBwIQACrg5duOkPP9jBEADXgaxSAit/TAw0ctEBU5QnxMIEFGbSwuQIOFMXCCQbYfPrxyEPtVwcnfEADnDP6E4MAHMh45kI7HACFEgC0NZA/InxwdvLkm+5XDyeMDL2RNhABgKNxGkQCARkUMAMAQwA6kGJZQVv+/wYriuzSV4P1GYkFEnjADeJ3EBDgwAIRMILtCEKBH/QFgBgEmz/Q9wME3C45OivbP2KQmbw85B4e2EAQfiAjgfBDAQYggv4ySEOV+aMDSIgYAWzgAhv4UHGdIkAS/3TAgiRUAAIGQEDwiqKDBICAdKjDQQkaYIS/aS0BEAhABeJWwy4WjB8hAAAAUtACJAyBCEQ4QQwEwhwfCEACEkDWD4SwvgoU7wIMWeM/UGiCBgQAAk8gwQd+EAJ8efGQ4eLHC1oAx0bCcQggYIk+CCCEIbSgBUSogQiA4xcCtCAB+qjMEQ5gghz0pgUouAFwEMnKW/lDHxpggSxZoINasoCLLHGADTSgARvQ51n80EDwIHKPATBgAhjgJYL818pmdrEJE2AABpjpzGp68ZjWzKY2t8nNbnrzm+AMpzhZdgRqjvOcKHqLkbB0PYPAp34y2BTH0EnPEhVFHzEYQQUSEP+DYrEJMfpIwggUkAACrBJqFwCBPhMgAh2sBAcMKAEOOMbAelrUOEWxQQggMMgPQCAEGuBKvmygAGR94AM+oEEMoCi7flUlBSmQwAtCWb8M5IOi5ryoTmHzyhCETgEiKGkAQuAAuT3LAT71AQJukID0EWFibDyB2SogghfU4AMpkNED7YbTnXr1OtKLmQjcdQ8RpEAAkfxHvhSTghEU6x4aOAEHEkC6tKEVZxegAQBCsI9/eIABGaDCPL9K2OLcA4Y0GJV35BqCUOpsBDEMKUvu8YIADMEGArlB7Qh2jzCGIG6jtNv32lnY0qpGH3qtgF/uYUckYBZLCQDACUalVgL/tGiNu2JRAoR5gRi04AcLMMoAlFdR0xqXMxdAwvvUuhXNSuCDwXmBhm63xxEEgAMLUCs/RvCrTJ7gVxUoFlcYsIQq4O646P2MC4hgABEwtyjwAkKDuDK9ACjABg1jwhAAoEDm2gABH0jWB8gzuuAcswnnTa+CJ+NQFwyhve/1R8XkG7VCHeofABuCD/h7A7Xq4wVpHAEBFoAAEhCBhEZpAgwYMAB5HuSVLOhBB56WL4OURGs68GeNF8zjnbQruf31iwgiZsDvXSABQICp2xAAAezWFgJAeKJaXVADA9SgSAPBAAMOMFF93SMJQiDBtYZwA/HOTW76EMEQftW5JMgz/8E9jjNMWPAP1AIgAYhJQGQjDDV+dIAAIiAAC0Ag5iT8A4wAqEFR5ZY2+RIEH/BM2fdicDgS9AamCjCzzvSRgBQEoAU0OJwEIrljOZt6Jp1N9BL94YAqI8Cx7ExOuSyGR33UINH+rC2BmFAQLVtgogZxAQo4gIIeXMAGL7AWqecW1g9UwAYXeJgBTsDJM5/62i7xBwWUvRUQAGt0zOXHpiK8FSZI4AM36FYYJfa6MMqwIFSo3w6mcBz7CMBDArkACgzw2dFuRR8hMAANFi09CPigheTGtsJbcgGItSAtN2gBB4SwyhvaS7EdeIEIYgCCaaWA4izRtg84QAMRgADDKf8wwBaVJ0VpFsRqACDCoqsbgHcbdSs2eDCeB6JvDiiAi3xeuNA9csMawBQCVSl2Zij7ASCMpqwC8CjSBTCw2+hjBCQIwD98oDYfgPQvApQBYPNBEETXAIqJuZahb87Ec2d3IKwFAA1Cyfah230jGb0BAoSwVBuMe4PyUqwNRrB3ISiAAqseCD+YIJ4aGJ4AwyTuKF0+kFY/IARo7wEQWEjcDbJoZt97QcxnDue7m74yJImbuEnzH3ettmH6cL2SUi8QSfu7mAwoSF4NoIA3d0ACWe08E6oFerndIOZ4ZHtOT8/8phwhmt+zPObPRwLO+9vzPyi+WkUv8+svv/ngV47/PvJ2aPZcS4/k1sG53at4Bcid7kEPv/zjEtshrJqyn6az8nM+V4Lk1edAR1rzN4BdkRhnxWv5VgMc0G/KB3DTlnz/wAQGRwFGUXoEeIFN4QI0MG09oEud5gMUeE8xkASyV1/35QAdUGXUNlgY2IJQIT1AEDon0AJYFV5bkQQmphUh02lmcwIkwAFRRlEueHr3oDFGeIRQRG4lVISr5xrcojFJ6H0WCHdgxiLCQix+wQRA0AJugWYLMARtIwRuxoJDOHQUUgOOJwRqKARoCG4UpQ8UEAJquFRfU3tMUAF7JxAJ0ANoh1PMxGoxNmOUI0AsUIfMdQ8uoDUssGrWVoZ2/3cDEkACQAAEcLRhANB7SlgUGrBRHhV1KaBGpOMAQjBIAsAiTTdTyldqjriKHMFqWtMBHaADTJBUaUU2ChA6IzCCIsBeKAAy/0EABZUETEABCPAPEEABpOOHrLiMeHcj42dlyYclDgYAFZCMCyA+E3NmF0A8jVV3zPiNRCdSv8cBC5QQ0zgCXJE2Z1NhKHB5dEeG4BiPrZIAZmMrCPFCHEAECqMDMYACPpBpfMZLPZAA1uKGXSWPCFkROfcAPxc9y4EAVQFHSFcDIfVeZWVJguQDoBRyypiQHlkZBOAipPZiRcEPIlB9kugi2cKRJBFxvhJTT8SSQviRNLkQ40dyB//1T/dAAUQwLDJGADTgddFYFA7AAh0QAxUgAUjgZjcnhagzN65SVQvQAUAnhUQZAzfwAjv0fTV5URuUdelGkpqIBB9AV4xmcAYJUAgwcRDoTgJolaw2AkCAVSkABAlAehyjA0yWAoOEBBRQlV2JXiRhXe4hlkXhbRCAfv+gAaDzc1JYMc+VEBbIMdsFUzWgADXAdSuXiT1CAwEABABDBOKjmIFpXBmVQwD5T1uxbT6gmCxABAAYLc6lf4b5KEcjAVrEMA7wAnVpG4XzAkDYTw6jgIpWmse1FbYlALh1GyThAoqleQwpXvxwA1VRYPxAcHukATTwAMbjhx2Tii6UABz/cFkDYQOoKXvMaZ4cUI1ysxc+cHLGWVr/Nh4XcyP+wAQ0oACjglQG8HEv8AIhIABWhi/3EAMIgABMcwMKQAQBIDqDOJPRgiWtxntccVj4U21/kQTWQkID4QJI8AAVEJ/yWRQdIEgv8GYhB5kftIme6Gk+IAR8uEcxMAQ/IBB86QNvg0sdGaGk4QJyhY7fIwJ6I1nwQQAGAAQE8w+iqALFKKKEtRUXcAMioFjqhGwEoGkBxVQJ8AIgQFv/8Wcv8A9bCgJ+B5ff2XlckXMBUI5o8wAksD4EsQB6k5PjpwI1wJVO6k05A3bQUlzb4gAgkCoLQAH4oqZsKhAE4KZwOhBC8toCdIoAKoACeJqn8Sg953ZSS8MP/OcoBJGoQLCoAiGnEmBIIQOpQkCpqGpkaCECN7AARLF7Oyc3oicBRPoXFMA508FzKLBXqdqr3PItCTJ+D4AAQDd+AIACjMgVJcpCXIFAHBCmvZqqOnMPC2BBCHhDSSYCKCo3DjAeNUB393AD9Rit0jpSGwhJOgAC5/payzECWHiYHKUAMvYCWcee5EqpHPNlDEogPvABRCBlAvECzGIbV4cspVgVCIBZ94qq+doBCtAbJ6AAHQAuBVoDheQXcIgAZ5RKObmweVpj/3FsF+B631MSv7RaDtBDy+SxThoQACH5BAUDAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQQAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACzOAGQA6wCbAYfAoTHDw8I/Mwzi4uTWtjqniin0ykE5OTnY2NhmVhR4YxuCgoSurqxxXhiampyRkZEfHA9eXly4uLgiIiT0z0NaWlxLPA1PQw6Dbx0+Pjy5nC3e3twYGBfOzsxuWhbu7uyoqKeOdiHKyswoKCkoIAZFRUSysrSJiYno6OgTEQ4yMjTS0tRubmzHqDTz8/QwKAtUVFR7e3yReySXfSRWRhOghiVjY2TrxDyxliygoKHTsTdgThO3lSziujyskCz7+/yZgiUuLix2dnRZSxBycnROTkyljypqamxKSkzsxkQODguKdiLOqjbevjzauDvOrzV6ahzmxkGukCTEpizJrjTeujzmvjzmwjwKBgTasjzqvjwaFgTmwUTetjziwjzkwFyqlLTCxtj48rCexMyqnHzAslQ2UBRUdphqolgaBiA2cDjEoOwSYDgkYIj4uBS+mAxq2oA2HjyeougCBggGAgiclIyYggzOoLisdHTWrFjImFDAujRWdhzq4OTAwHxWTnjGdMignFC82Ly88MzImHx2dkhAUGROaGg+HhQwOiQcKAzm1EgkwIAwLES8wqR4inTW1vTorEQICDC6qsw2DEDW5sxUglxASDBwRGjw1BTS9uTq5PjW1DTywMzmdEjm9tRAWEDq5Mx0OCRiFkS6lrh4eGSefkgkIMC6rDS2xsjq1OCWpHxiNiTULoDumEzsxihOTjguUExUaESUqrQSMGj42MxqZJzsoMzc3KyqlAzAshDa7kwSMETi9vAcOBzeuBCgujDkuEjSmCC+qqx4LsSmpMTOuszEuMhohnD4+NSg5ExqiNg2LGhofpzgvCjCzMAIGCDSrCiWnCCeZiye6MyAZISAcgiSLkTWwKg8SEjc9rC8yvCgunzW2sgMDBgkYNAKDghWNmjWwPAaFjQEDBCEbjDmwIASEGDcwMzcuEh+fqBqZHxgTjgKMBxsdBy+dCx4hhz46uQEBBiWlHCqpCDAzjBCLDQKCgQGBgQKCgzivjQCAgTivjwCAgwGBgwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpzIcN8/fPkEWqTIsaPHjyBDihypEJ+KCjFOnCCSId9GkjBjypxJcya+EiJQoBigc0UMDi9rCh1KtGhNfBECPKiQAcmCDShi4DNKtarVqw33KYFgcd8+fDZciAiCtazZs1W97huBYEMJtHDjyg2pNsMABAfm6t3LV6HXFCdc5ODQt7BhvfvyVdiw4u3hx5Cx4ivSAYGNqZEzaz4KYwWCIxk3ix4dMl9ny5hJq179MHHnFRHwBWVNuzZB0yIQVFD7z6vt36xvrnDBoMSIAwcyZFACvPnofEJcfNggwnPbDo6da4eML8ODfyceiP8Xf4Ls9vPo06tfz769+/fw48ufT7++/fv482NNrKS/fyWpIfRVPv3lk49ss+l3HwcnSOCgBAEEYAISCQ5kkhAPgGACCCdUAFSFCsq3VgcuBABhhBMqlE8MCCAgQgAi8PTABCCGCN9aASCgQgoc9MiBbAmZVMQBE3AwAgwmfLBAjTa+N0EAHbikFm8CTtnbPiX8EECATYr4pI4cQJACgjVOqdZNP0jAZZc3jhDABwyYYIIDNozg20FT4sPBBCMUAcIGLDDJpnr7TJBDBxKYEMAGHwRQAohnlpADlDotoISgg6KXmAoTTJVPCQx8IMEIVX5VAggwIiBBEUBmWt+UbqL/YEOpUypx6go23OlqfGYq8YCSFZo50D4qDNABjbvyamY+Jyi5pkZUCpSCBG5ZJEKy7pm5DwcMoEBEQsL2NkFuB1gkBLbrWcTBgfjoKQQKK2QQJAdKsJsPB0RoSRi66SpxhAgOCMHCAhK4gAARz/Y2AggSrMRCgx/Ahim/tuFTAQID/KPTBgzA4JKAHDyAsU4oIJBDCQlT7NymJcDgcgYflsnBAS1XMCRQKuc3cc6u7szzz0AHLfTQRBdttGi6Hr1aYjzS22qpGPWYwscFMW2hz0qftdYJOXYAgsd+JeZnBysEcIIKL32lAtcIrMCADZdmfRiOLqygoU4spJzYETwF/4AqCgGUK9BNARgMAgNQPRCa3H2l8IBgnSoRQYvZHaRCjKDhM8ECH4DA3D4pgOCCA5GXYIKsjPe1zwElB6GWr84itOLozCncwQby/iMvArlfBMM/aqa+l8Vp5hkBChJAgFAKp0dQ6wNRfVXBAAFwpVYQBisv/FyzP2BmBp7ldRAHtz96Zlg5tAvDBgHYqZYKLlS7vVxKNBuDmW66hSdbG5R7Jgxpkk0QYgQDBPnrByjYzfzgAjro3e9KOBpA5YYVhBb5zyv4QIKWMgK7AMAgCAdgQQAQmKsFwgV294PVoiaoEf7lDoNFSJOUgpADFERMBB3IUQKxZkKZJCYGLvDelP8OED6EkG8ASBgWWARDJg7YIEMPgEEGqNW7Hu6HeCbIUwVQYALtGYR5snqJr6JCpqSNwFj7suJ+9pGBDyDAfVr51QJSE5R8cE5xLczNW8Klleh4T41m2Rb0HrCuFLwmOya5yJUOECO43QuIDPgc0+r1Dye6CG2ADKQKAoACEeSgWwgQAmZWZ8Er4YMFOplT4QLQO4uZ6Dtv6kAReJjJmOwjCA/IzQpMUIHa9eZyAVABBE0DghZ14AGC08gBcrCCDbDvbEmr5VW0AoERFClp+BjBCEaplhTwaQK+HJY3SbUuWkrznNI0JzrXyc52uvOd8AwRk+5kpXjy5SveDIIKggD/gcVpRAl8CkIQRtDPaNoTLvmIgAkQYMMPdCAGBVnRCgbwgQ8ALgZ2Ouhc1iIBBDAgBkJwAAKIk0ZfKQWkD3hTAJCl0bjYapsW6U5OjrAR/sAKBN5SZ0thEq5/5IMITITgsoD6x51qrZ4XQd9UzNQuzdUwUEY9KpX2AYFuQRVWNjgCwVAAgjRGdY29AmLyhsqTiprAf18N5JTyYQPGzFJbI4iADWKQAxOwIG5pBWtiIuCZ2PiGjyNwAAqEoNO8gsQrSrABAjpQQN70lI0/EIE/DUuVxCh2BY11LFL/gb0OhJOyRdlri2CgLaHeKaFaShloZ8JWhkKUQDxKQVdGUIID/0CgRyOwAScJu1qj3LIDP/jAnEBA3I9OBR8w4KSJJLACFGzgASnorW8X1jaytahtOcjItmyQAwngUAIPKMFkpTuUrzC1XVXrKXnXa6OgJKiw7I2vfOdL3/ra9774za9+98vf/vr3vwAOsIAHTOACG/jACE6wghfM4AY7+MEQjrCEJ0zhClv4whjOsIY3zOEOe/jDIA6xiEdM4hKb+MQoTrGKV8ziFrv4xTCOsYxnTOMa2/jGOM6xjnfM4x77+MdADrKQh0zkIhv5yEhOspKXzOQmO/nJUI6ylKdM5Spb+cpYzrKWt8zlLnv5y2AOs5jHTOYym/nMaE6zmtfM5ja7+f/NcI6znOdM5zrb+c54zrOe98znPvv5z4AOtKAHTehCG/rQiE60ohfN6EY7+tGQjrSkJ03pSlv60pjOtKY3zelOe/rToA61qEdNYxlQ4B9VCHHamqoEDNzACj3gx4jxQYIENGAHJBgCAF494n28oAY3oIAVZHCBEFThClYQsRKAcIMbXCEJTpjBDAAABBHvgwQ6SAI/Ys0PAjxhCdEFMQlIQAMnXKEHVagCP/jhBAyI6cM1cIITWuCEJvQg1vfmBw9mgAMQJ+EG7K6CFa6wboIDoAYhvnesrcCPFmgAADoIwRZIHOtoRzcBOEgAiavQgya0IAH3gAAQrKABhIcY1un/roIUZICDKJDYCPywQhW4cIUW4KDeJW64BrgAABoMoQVXKDEOaLCDHrTAAxhogqxJ7IMdKOAKVeABE1JNYqi3wAo68IAFjGDiK0hBARrwwQss0AITPyEBECjAE2rwhBuUuAEACIGr+QGApZN4BknQARVuAAAM6EDo5saBD/ihgxbYXcQ1aAEBFK+DGuBg3SSWwRWckO0C/PwGGiixFWagAMJr4AY8KDEQnuADIPDjCS0oe4k7z24c7AAI/S5xC27AhL5DQANOKDEPnKABDNQAAwm4wt9JvIMChCABAEj+E0ycAhkYYQY30DYQBEDiFLxAA014AhNwb+IXNMAJNyDA/wwwwHATG14HTxC8rA8PYogDwAc6qIIODl5iGoQAB483Og6GP2IIhKAHTgBrTpAF/+B2IoYBF+ADSYBu6mZiAHBzKNcF7BdiULdu6dYDQVdiTgBxKGcFSWBiNVAAOrBt/GCAJoYD54aBV2ByJMaBHHcDLUADJfYE67ZtzRYCJ6YDTrBuMIhiGsAEMccPGKBaIiaDpHaESNgksfYPmZdiPQBrCUCEHlaCTkB9KKZufPcCTmgFXQAFn0VxV/AEVohiJfgEJOCE/JAELXCGZMhwMhBuJzZwYrhitMeGKrZ8dIiHKHZ1H+hFJBYFLSADTpAEAOCHJNYCIaADN1AAcOiAE/+YhJamhf8QdBhwYjVwAVfHBfxQbRqYegSHgaFnYhgQBTdwbzdABYYoYlYgcz2QBGZoYgLwai9IBSZGAqx3bw7IhC3ABPHXAyemAFUQfk7AcclWYhDgAwC3bQ1YYiQQbxZIdSV2AThQAyOIbiamADVQAwlgeLiYcxTgAxiQbvFnYiFgAOxmBU8AcaI4f02AdQCgepGHAT9HACloYj5AAwIwBTjQBVdHH7FXAzJQNFMwBRcwBFRQA01XH3vnBFPAdURzA14AAE+wdtMHj/EhfEvQAJU4NB7wBBrweT2gAxrQAExgke2hAVdwA1WgAQUgBUSDcReQADrQilUAAMUoH8v/5wEXUDTSlgIXoAHTxoIjZgQJoAQNUAM7IAU7YGJU0AACUAMhYAE4YAElRgFJoAE4UAU4gAEtsJEh9gJQQABbCQAUwA9NYAC5N2I7oAECgAUK8AQFoAFdWWI+AAGtJgMWIANeKWJ9hw8pUAMKIAA+0AD/oHEjdgFYYAEakAACsJgllgQYsAM4kAQFAH29N2Jl+ZFWwHdPQAEOJxL8R2AAgHaCOX0h0IQiMQMHxojX5gMJkAIYUAAQgQH38A8TVxBeUAUtoI4EBpBYgA8/SQNpJ5QE4YcCIJNOAAQywIJ85wM4YHjlR2A6YAQkcA8Y9wIk4AMGoZ0e8A+hiXsG0ASc3fgPZVd2P6h0j+hfaCkDAuB8ArADoSkQVUABYeeLI5kANPCWEUeYA3EDXMBwTbCMA7aZXeADuzhtqDYQqQYAU7ADLzADVVCJAkADfpcENUkQ29YCQLAE4zlgHKcFmwkAM6B4CLiNqrkE/UYC0NcCjvcPPqABVuAEsScQuNd3/7CXBBZrOoB2UDADzEECKycQfHdzPVgQH0gQS0AAhKdg6vYECDgDPmABNCADzvYPw8h5PtAFG6ZuAvhsBAAAn9eELdAAGmcAUyifizkE/wAEJJlz7BaKOqCdJuaLJBYQACH5BAUDAP8ALM0DWQBxAlkAh0xMTDMrCxISE1FDE1VVVKCgoK6urFhMFMrKzCIiJD4+PJiYl76+vNLLsYt2HiYmJHp6fGJiZNDEeO3t7JKSlJiAJGtbF/r6+iwsLH5+fOnhvLKcRkRERNLS1KaedNbW1XRuSB4eGc7OzBYWFIKChKqqrKampKKJJ3Z2dLyiPGpqbIqKjLKytH5qFBoaHIaGhHJydJZ+PHZiFJ6CHDY2NGZmZLKqbLq6vLa2tCkiCo6OjPn212ZcLMLCxF5OFPLy9OLi5K6SKTIyNMbGxBIKBNra3FpaXF5eXN7e3G5ubF5aTObm5J6anI6OlDoyJDY6PA4KBAYCBGpYfNrG0GJCWF54cHJYVAowHDYubL6qpKJuLEI4UIZoTFpClDYOQMLQ8LTmNOTk3DZQFKSmkFxUYJ6ixPDgiBIQYGyi4OR8yBJgOGh2cCRgiOzKNFBiUJ6q6NzW4CQgwFxcaLS8zMS47PCqtGJonDA0TBYaCFBIQLS2oMLYzJ7QtHBygMLI2Pji5FBCbMTEvAQMEK6sSDYsMAIGCLTI7H60hNzQ2AgYIHZmcMi4yLSkFM7G4KrANAgIMGp2HNDYgMLMxBgoKHR2YEI4KJ585IZ6aM64qFBWQH4+HJaIuMgygEBIMOjm0DA6EMzW4DAiSIaKeOb46JaarLicpFB0YDo+OIaciPD4vFRIVEBQZMzayM6wNOjY5PDK1Ozi5FA6TCo6MAoOCNKinL7ExBw4HDZwOOZ8SAQEGEIuEH6SsGQ+HNza0Na43OTe5J64rMB8LExWWHSSiDY8KCRg0LbIgDxISBIwRMai6H6SXAwEEERMSIZciNjWwDYiLFBceGzigEImPGwagAwMGLKqmGxa2F5ofLiixJaooC5QTOji+CTAgK58dFqihGyiJKSIrNTQ9BIwaH5CdGRYYCASFBwoDKiMDIZ0oLp8yCouOIZueJqkNEBYQJ5wrLTUuGpmfLTCrBoWNLjwtIp6gDo6PAYGBAoKDAICDAICBAYGDDo6NAoKBA4OBA4ODAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIEMizHcvBIZ6NDAIyCeypcuXMGPKnEmzps2bOHPqzPcARgEWDBiwyEDDns6jSJMqXcq0qdOnUD/aizBkSIkCJhAUKaHAaNSvYMOKHUu2rNmb9jio4PDARQIAJoAscHG2rt27ePPq3Tsznz17LPP5JdABgQK+iBMrXsy4MVnB+er16MDBseXLmDNr3gxR8NQiODBwHk26tOnTYgULKVEExj3UsGPLnk17I08dRRY8qM27t+/fsfMlWPHBRD2vN+/VU0HBRAESAPwBn069uvXOwzuY6JrTHoEh/wp3/wAi4kWI6+jTq/d9u0OB4zrvEVgAA0A9DjAQIEEhfb3//wBiJpwOHSwgBGA7CZDAPYDlI4AKSNxAQ4AUVmihXcKtgAQOHPjjoYfI1SSYQJDVM0QHAFyo4oosMmVPEktcMMQKGZDwgg4ZYMDSToIBIMIQh7Uo5JBEvnRPfj8yUNUQIuCgwI45CTaCDkBQMEKRWGappUX5hMDBl18CIOY/Kx2Vjz8qFDEEB1Bu6eabcJJ1jxEIdBDBa3HmqeeeSc05BAI19MfnoIQWKtI9EYggQgT+tGnoo5BG+tA9RyhqxD2OSqrppppSOkQPBGCaKaeklnpdACc4EMKoEnknwgQGEP8QJgAcXGnqrbj6ZtQJA4RYkT8kXHDBEh2I0EEHH9wAAKu5NutsapDlMwIHADzga0EOYkAtACCkoCq2I9QjJgchNKiQPUJEoG4N7NagwhEJMPvsvPQuFa0A+X0AgQAJ5YNBBjj08KkIGkjAA4L/2FOPDj2IgEAPCwAgqkJ+CfSXUdfWq/HGSEHmHQMTTKCDrQfJZwALFEAAwQIf7OBBAIHRYAJoNBYggrIIc6zzzmNBJsQCOOBQxAr8ImSPthiMcI8/IUAAhAYg9PMPsEuUUI+HIWSAxFzy8uz111FKSwILKkDQgQ5FH+SXuQI9wcAONsCcgAETqNBgPjTcsGbXYPf/7fdLfh1xQwYhRPAB2hRHq9oNO0hgAREYsAAEAdEmUAAQrv2t+eYxRWaACQfWUATi/SpujxEfNLBBBQE8UMIER0SLgQEXkM757bhz5IIODBjxV5poy6v4akWgMMAMMgTQxAQLrErSER1csADJuVdvfUSI3rBvwmkSzaziDyyQWwICyMArAT1sfYQRKBgQ4/TXxy//QjSwUICOCdfQwQtlqh3tAzooDg0EgyoHBIAABkACEjrQAwoY4AcLSNv8JjjBGixhCC9AAQrGdsEVEGAEmZIdbozTIChYgFf9wMARkqACAAjhBUjIQKMoSEP5GUEEH8hhDidwgR/kBn/YEgwG/yjwgQIcKFoBqICqRiQQ1yGAADmroRRvFwICWJEAADiCCZZwgxrQAE+BIZG/KFAgHSmOHxYIwgHMdQ8VFDFeU4zj7fwSrdd8YAUgJBENvOgVAAIBVCFwQQgCKSpUbUAJGHhAPfIzBCjyTY6QrJfiSAKDKuXxH/cgARD4h8kkIOECWymAKE0QsdfwgwcN6IABDMCADtzgCI16ZCRn2axJ2gMAC2DUQOyhxTthkgAFWKUwT1YAif0jHznwgAYaYAAmQEABE6OlNHU2SZKEoH/H9EcC+nOmBDzgmw/AgDi3uSN7DGADMXCCC2Ipy2m6c34CaMEMAvDOetYzCPbMpz73yf/PfvrznwANqEAHStCCfgUyBKkmQhPHxGNGy6ELRaNBJ3ohe4SLADVAAQH4pTh/1IMAKoBBEowghBkmtJr3EIIRkgCDGnBgJdXMwQkoSlMAuSADPSgCDxkwQMVxAAfFMpYrIQBHMU4yATC4QVA/wAKJVdMeFkhBTaeangRkoAAruFwP6jFJGmQAAkcggBFwigQICKqaLniBCAwAAyuy9DgKzUEFqErX6tjDBS64hwJ6sFVbMmhElHKlEAZiy0Tdz6H2+KtCo3CAujr2N0ysBwP6ulCIjkgyQ6gHYRcaggIgAACJ9YeoGqq4gcz0sajljWQoS1p7+EMAI/jXGzc7onz/cKAHTQUABFZQHxAqdEcDwGdqhyub1XK1oQJJQBJ0sAADNAm0tGXJVD6AABMApWEIIEG8qknc7qJGccatrECEYLPxdAAC1CttJX8wBAjQKj9A2Bd3vUvf0YCXr8d1lEfFlIQClCBU0SUJCpYwgQyU6UhAkJBC68tgAd2XtUEUzD0AgN8A3yMJQKBMbWnQSihaFrkNDnFiJhneypZWAAv4QQTa5DHCAKm2DzBAESLgldKK+MaOkQxPH2rUEfljARdQwajyIQSlsikwQuhwjXmM4yYnhsM7Np3pFHCDJVDOYgwayAgWsAQIYCp/EZpQj9vp5DI/JR8u+BIKcgiDL7lA/0r/aCsHFACAJEiuAA+AjAJeECrPAKBOGaBVEtLHnwWb+dBmmUoPtCKsIoigB75DcwbCU5UOIAEBFICrYGDwgwK8WcJHYICjtYKADGx3vohOdVjSsgId6KDVOqDACp7kFxVmoNW8BcCn9ezB0ZJkka12r29RrepiR8W1HrrH0kD00GUrdqGu/fAxkf1lYhv72tjOtra3ze1ue/vb4A63uMd9F1SR+9wP+a1nEhtFEhnlNX+NsI3dHe+RKNsvFkC3vhlqSyEcYbc6IICgSJSAI0x6BStAQYd2pG4ShUAFOlABXQ5iDwXcmgP2kOu+Ny5v8C0ACTG6QAHOQ9h71GAyQ/9Q0gd6QNTANNwfMPgkT9WGARMIiwT+sIcPNsDxnn8YMkhdQX8n4OmEKiwCBFAADRRQgxsUTzrq9gsAnLsEBgzWICOAwBCAUGA8zdXnG0euZ0SbDyMgoegBdncNgGACki90eAswAQmGcIOrE4RSBqBAAWKIJ7D7/ef/MAIQ0D5mwhKgCCZIwEikRIJXcuAGdcdWPUqwAAWggO9//7uNBU94y941AdoqQBHMGmBQM+AFIyhy5AnighXcQOCVzEDfM9/zzQ9+4j2mwQpKwAIc6iDPpfdcCQbI4dULpI09yMAI7IECIMie9vp2rQCmL4C7DYTzuDfqwlig1B4E6qTRan3/715TfLvnQwEGMCJJnPZ86JObJyigAAUWQIKuNBT74LcmBlQazEtZuOkoUDRFFhoDEQIrYADQZQ8wUDwZ437dFhkLwFdW0WdQgn9pN20E8A8GAEQJNTuAIk5CUAMIgAAR8ACvQRgIKE71QAFyoQAu0IAOqG3+gAEKUIM04FsEYYHSRlgP0HvLchAc8CpDEDQ38CoTgADvkQ9JMAFAwABE+ElNCAGrEoPvx2SBJxckM2/HhAE4IALQZRAYcIDDNAQh00ADxAHBtEq8VwSgxAIw8GZUKG6TNG35cARnVy53Yw8CkHMWIwBJUAR1BxlMIyr2EALg9E1HgABDYATlgknerfRNCSAEVLIANwiDcZhtpVVxNRABBXCEEIB0zuMCMKADI2UENaADOAQDsUQpJrAsc/gPkmV8BsF8zjd7l/htpeUPLwAESMBDPwAEQIADoIVmL4AAioIsHcACNTBsKPYDEHA3TBQZDCCLBXEPEFAEMnSLcvhQaSFSScBCIWUEcGQPCcABpqgCEQAAGABGfqEAKmBGJjYCWDRwBXE0JGWJ2ohtYsdiZEZbicMQyBUQACH5BAUEAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwD/ACzTAGQA7QCNAYeqqqmXfiNeThPKpjRERERJPA2Pj42IiIi7nC6AgH+0tLR4eHhoVxRqamxwcHDU1NTMzMxxXxjuy0D1zUNgYGDMrDVSRA719fTa2twkJCTUsjjm5uSzlSyampzs7OyNeCHe3tyghyfmwzzi4uQSEhQcHByWlpSskSy8vL0WFhTrxD5KSkwwMDHGxsQ+PjzEpTRUVFQ6LwwqKiz25py2myyDbhwpIgednp2KcR/asjh9axwkHQb8+/g6QkQODgt6ZBs2NjS9ojFmZmRaWlxyakxOTkyigiTy7tw6OjzCwsSmiSpBNwyumiyioqQaFQTSyrRWUjwKBgTbtjv+8sw6PkSqpozmvjzqzmTaujwuJgSqjyRKQgweIixubnwyKgzqvjzlvkSSdiTevjwGCgzevkTmujy8pnyCpKDQ1sAKDgjyxMxAYohKXky8tCDkwoC8yvAGGBSC4iSMliTQ9NxiZhjQuFgsYtDk6vQGAghmqKTYMIAKCDAgOBy8wqTQsCi81LxqfEQKGDSeoryUqORyXijOpLjQysDQwDSIfAxmRkCejjze1sgqwoC87sx0bFggGDQgCCCUvohuRhiYtiRQUmjQ6KiGcoDWxPAODBh8joxOJhgCBgiejLi6rsxgPhhAdDjcuBBieBj4sjS8zExCDkDEpOzY7kw0UhTc0hTkuFwgMETYeIBOLkTsviikjgwWMGjknHzS0kiGYGCEHICqjkDQ4tBiXoDexMzspMxqpCS8plBCUljWnFSmjJCGeGRgfHSAaggUYlCgSkiYegx0RnCkmIRgTCjM1tjQvsz4wki+rqzApiCCRiiC5qRsfHyEWhiCjrR2fBgWEGC0wsjg+ODq1tQsIMDk5sygcHS6mriSqJSgZhzynDCgcEjQ3PR0XHDitjjk1vRMWhTgvijAwthyfGDClCyUmmAEBBhybhjs+PC8ngw4JDyEXshCLmzWnBCGeEQKMCj4zBTivjzeujTivjTiujQGBgwKCgQKCgwCAgTiujwGBgQCAgzeujwAAAAI/wD/CRxIsKDBgwgTKlzIsKHDhxAjSpxoUN8/fgT5WaTIsaPHjyBDihy5MB8BIQkOHFhQJMVGkjBjypxJs6Y+FgpG6ByxAUMHIC9rCh1KtOhQfSUWLIDx7yQADwoyGJ1KtarVh/qyEkQKYUSRq2DDig1bAsUIpmPTql0LUh8/H/lSUBgBgQXbu3jzJtTnQ0iHJkk8PIARVK/hw2ORNsEAYsQDB/kKI55M2ai+uCUIGICQIEXlz6CPZkUKYAMFyaFTq6Y4WqsDDwcwrp5N22Hr0fkWeDCQr7bv3wXd5uOnkXgGFBsaAF/+m98KEw2+FnGg4AIKGcyzz+ZHofE/xj1/ov/WTv4w3xUNUiZwQMCH1vLw48ufT7++/fv48+vfz7+///8ABijgZLddBtd4A2XFTz4++ICQWwvCpRGCA8JXYAYdoNAZglmZZAAAABgAg3sJZkBBAjcocIML71Vo320kGHABDxDIwKEPDjzgAQT/6JSAgwLltoEHHlxgWosu0tcaP0O0UB0KUim0wgMgOCAVBRBsIMRG/BSRAAUUKLDBEEgmKV9rODUxxANJRImQDzd4kACXMFgHJIR8GTBmmWZamFUKJrRAgAwPtOBmRRlAAAIBrZWAAQZ2JciXCUdS2OdyHVKAwQL8ZACCoXsRsEGNrflQ2mkCjUZCB5VeOl9WSCT/AUAJ+sjwaQbjMQkVraP5YIAHC1jU2qqtuipfCh1AwOhNt+ZKgQeztpbPAXJiNCyrp1lqbG35OICBA6lm8AAKtCLE3a7SUpvAhKpiy+e2wOkDBAQQuJBCCiUUUSgBJMhWED8weABlgj6w6kCBxGY7GrzMObfBBRBHzAMPFwx2kLwgYAAEQSmYtUKBBVe6MMPxZpCACR2k3IECPECVQKQGdXxkqkjsWMI/t/lAKZmtkcxcPkAPl48LGLQAhEYC8YPdRrlBKwNxJchogrUK8lMCtsQh7fNvBWZlawvlCuQCCABQXcJTKChVXQssbKRPBg0scMADFwCgFGFbx1sgEBgk/4HdQEUI3NtoMhiAwQY9/QPUQABDQOTjHmxQdt5cF2iSC70N5MMQjKaalQ9IwACDvcGlUAQFoos+xBAsUu766/tpu5DssNdu++2456777rz37vvsDJJwrw/+prqg8P+Q8Pt8/CBhAAoPYPCAAi/7CzBg0UMAQANhL59dtw9ofwMAPEKwguaGy5riP1Ad6n28JGSQgtD/JFC3bPqkkAEJmSPx1JzvY85tBsKCiilvgEkbAg8AQLsAUqZACyLABVrgnq7xgwQOuIAJGuhAxOCGBQSYDgq+VaAUuIAAMJibst7VQdCMhh8uAABPjLSAyCxpCIp6GAQIw8IWVqY1MljADf+aoAAUOMAlwnKLCwwwRBQAYAgT8uFsEIiUA2zABCTqGhBKczAOSvEuPRtIWRbltgLV7AHK++JqEPiPkKHqYiV4wAPcp8bPsLEsXkniVmC4gQfcrI6p4QcLMgAXBiWFB2DDmQ8ycDOg+YAFN7gfIEODFBQIrAMmAAAGLrBDi/CDAEkYgQJMYAIxeSAJMJvkZ/IhBAVAgDEYQMEBWIC/FCwAeiAAwfSspMrUuAVfGQhmCTKXIMxkQAbyK14vl8lMhUjGi82MpjSnSc1qWtOFI+vaxXrWNa1kM4zXJEk3L5gBFpgzmZKCIBCAYM52npNd/MCXDFjwtHCOpJs36QAGHif/yhF5bm8ggByRKBaVTClgkzzQmD1FMk4CgGg9C5BhtfDpAwo0QAhCANMCdLSurJTABI5BwQUgtVCGdnM4dKoYr765MGEh4ZUEwJlbgpmPErzybyW958gKEsc5yjQ4YeTHawDgGZmOJgU3zak4s5k1k3igBUWtSBhTICaF3SYF0cOpUtvSUh8QoAEOMED0DrbNnnUJBHXRJlYxoNWtSgSfOJNBnB62gR+VdWRwgg27erbWtroVKz5wwQoIMFhe4cwkGD2AAgDQua1QUVwYWFaB/tHXv05EH0jIiU5aQAGtZWRNbXIsAptG1HQeNauWfWsJHHCABCRgAUBBzbQ24AD8/01WICVowcxMmz/UprYj4CRIt/RqVHA6h5OpLG5v2fpb1kDwNmdLDjxb9DkTwMaG3UTqA5YGzb9a0AUwYCS+gHCACUolKxkY3V5rFT0WWbAEJQDCA7xCghIAqbkM6ZqvBIOCIu6zBTy8zAKss9LLOIAHKEDibZzTghZAgGLjSsIb8etMCLrgAABwcAsAYKUkAkyWJChVAlAARQvCAAJyTHH4GqBMCt91gLf1HNWWFJl/tnS9bumui3f8w4aMx208DrKQh0zkIhv5yEhOspKXzOQmO/nJUI6ylKdM5Spb+cpYzrKWt8zlLnv5y2AOs5jHTOYym/nMaE6zmtfM5ja7+f/NcI6znOdM5zrb+c54zrOe98znPvv5z4AOtKAHTehCG/rQiE60ohfN6EY7+tGQjrSkJ03pSlv60pjOtKY3zelOe/rToA61qEdN6lKb+tSoTrWqV83qVrv61bCOtaxnTeta2/rWuM61rnfN6177+tfADrawh03sYhv72MhOtrKXzexmO/vZ0I62tKdN7Wpb+9rYzra2t83tbnv72+AOt7jHTe5ym/vc6E73+14ggBabOQADOcFA5lEQFYjgBDbwrJkRYIGBaCAAAVDBPKRgBXr/Yx8q2EcICoAPHRNZBf+Q90GWMBALaOAEP6iBBiQQhBeogAM6qMA89uEPf8xDBFb/QIAAopBlg/8jAgUQgECWsAQtaCAEJ5iAChBwAg3QQAA/eEEEsqAEMYy85Puwws5jcGUjKKECA4jAEmIQAwtwYB5BoME8VBAEJdxcAAJAwM9/UIEQ/AABAp/HPEq+dSkwwOG/zQEWrACGsusgAjp4wb8/gPOe77wGHMACBwJg9wDMQwIVkIIEEIADJfgDARUoONvnEYAduHvIahfDP2jAAb0HAQc/CIPfrVABngfgBVbgwA9OoALSIx7n+zhBDRDAgRjwowAcYEANciACku9DBPPgQL+R7A8p7EMKNAjAB7yugubPowIVEIMUOBCCEHR+7SfouewroAMnLOEEAthB/wBqgA8nnN0CYVc7ybe+c+L7QwMmV7v85Y+FFyAA9Qq/+w+YcIICFED2FhAEOhADEVABAYADWIAAERACEiACWhACCDAA/mAF6ycCSeYPvrcP+6B+/oAFJfcCH6ADH6ABYvABDPADH/ACEvACHNCAAzABpGcFEwB9/6ABWvACM6h8NVADLwB8/gBIPsAA/6ADAuEENUADFFFyGKiBamcQGqiESIcFFaB3IqABQXACHpdySoCFKjAA8FYBH7AP/hYBOKABKxeABREBvRQBQVADToAPO4ADbegQP7gP9PAPUkAP9jAPVlBwTVgQIoByI3dw+5CAOEB4GlADXhADIYBfEf/QIDbwAWJQAQKwBR9QASIwfQzRh1ZgD0y4gcenAQPAcxyAAAfxBc0niFaABUHwDwEgBRJ3TRFgA0xnAUQoEBkXADSAcC9wApA3gVIwACLXfB3IEBogBWBwBTMwBRNzBE9gAFCwAz4QBZdnEPZ2TRxgEDxHAzTwAmKAeCdgfSIwAVIQAh9wAlKgAi8QACewdQW3gS6nEAVABFWwTwIBYShwA11ABWm0VfD2Dz8oEOXIjiogBjUAdujndTVgA05gAwygARpggh8QBBrwfCcQBFhAckgXjwoxBlzQAw1wAzxWA62HgVbgDx9AAvhQAIxHPPwQBTFwkQwQAwUQAUaABRr/wAF+Nw9l8AV0d3RJF4gS0Y/4ZQP91ntqxwEFwA9OgAPcVwDo9wMcoAI5wAEcUAHNB5ETKHlQuIFKB3FSZgUBIAABAHz7UJURIAAf0Hy0d39kMINbiAAV6Q/QV5EaiXRZpgJY4IsFJwUvwI0VAIPjVwM6QHipV5hBQJeJSYEaKRBWsGVWUAbzAIJpaQG2OIkCYAEMcIkqUHboOA+s2HNiCGYYOHA1II07EAMMwHpSQHs9JwIDUAMWEHQaSGaguIGDd5EvgIkK2G8WEItoZgXAR29YFwIHKWdJN5k00Ir/4AR0toFY8I9CaGclVwHTiWf7IH0c8AH/YIF3JgKdyQFS7IAFdvYCAvEC6GeedyZzFTBvdiYAOiACAWlnIlB/86AB13lnEcBzd2aK7blnashnASABGrBnWCAC2ahnL0CefVag6vagEiEFCtqZzIlnrVkDjZhnSmCC6plnwJlnYuCgelaFAXCLd3afFZCgeWZz84ln2bgPHWpnxzeIeQZxLQqhOJqjOrqjPNqjPvqjQBqkQpppJnpno8l0e1YBNpBnYqiOO7BnlDekUjqlVFqlVnqlWJqlWrqlXNqlXvqlYBqmYjqmZFqmZnqmjKZ5eMZy/6AEpohnPlCYBaBnThAAERCTeYZ7M/mmewaWdxYQACH5BAUEAP8ALL8DWQB/AlkAh2NiY4KChFBQUFZHEj4+PJyDIzo6PMbGxJqanIx1HoqKjNra3JaWlG5bGOrenCIiJGhoacDAv3VlISYmJHx8fLegPPb29KaaXIaGhN7SkTY2NNbW1LyufPz79pySbHZtSaKipCoqLGFQFI6OjC0kCK6urOzs7B4eHK+SKhMOBlxcXEpKTHxpIEM4DDIyNKampN7UsHZ2dNLS1JKSlC4uLLa2tEpCDI6KbLq6vEZGRKmPLKqqrN7e3IJ6XJqKRJ6enMqyZMrKzEJCRBoaHO7mzKqibFZWVHJydG5ubLKytB8YBM7OzNbCbPLy9HZ2fPryxObm5OLi5GZiTDoyFBISFIJ+bDgsDKaKJBYWFOTi1G5qaEI+LBYUBAoGBPLu3AYKDN7azM7O1A4KCAYCBHBe2Mz00FhgHHhcbN7AwEJcTMR+LKqylKLotK5+5CZi0BQUIAQEGIp4pHAcgOyotDgubCg6MFZ4YAoOCBQcHIpWHLqgwGZsnHaWmKaikEJKZKRwdAgYIHZ+fLrE7B4UHBoYNMJ+dHByYHpqaIC2hGZwaOLO1IqgiFQ+TNjO3GZGWLKAHKCMuKymPLamXDxKSH54HLzOhKZuLIpyiIRCHLrc0FRGbKqcqNTi1FZgfAgIMFRMQIJGdM7UhKLEnFpaaM6otFhMVOzMNDAiSGZ8cMrQ8JiisKByrMzyhFZmYGxCHNbA8KaMnDA0TC5STBIwaLbmNF5GlLqwzOR+yG6k4Oh+SFhCLLrEqEZQSIyAJMrU0AwMGNDAxMoygFZgRHZ+ZMCmFAIGCBIQYOTe4DYiLMSo7CZiiMzA2BwoDMCwtNbOyEQmPKCCDKK06CYgwBgoKBw4HIqOeG7ipIqWsLqonBJiOKKoxJyyoNze9MzArEJKMPTI2DgOQL6+0Fh2HGhcfFqkhIx4CEQ4ULrKxIpgiLastDh0OG7CJLp+rMDANNzO9AowHLS+xGZsgBIwRCTCgFBaYMrg4HB+fAICBA4ODAYGBA4OBAYGDAICDAoKDAoKBAAAAAj/AP8JHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIENazEdlggsDGmhgySeypcuXMGPKnEmzps2bOHPqdHjvwZEXNSLgKKFgBb57O5MqXcq0qdOnUKNKDXlPA4IaL/79SCLjAAR8U8OKHUu2rNmzaJXeo6JBw4QhJ2jEWHDAQNq7ePPq3cu3b857gJEKvDehBBQjfhMrXsy4sWO8gfMZwLFhxePLmDNr3sy5Yr8QQlYAACFjxInOqFOrXs067QQFB5ZEiYKAhuDWuHPr3s37IxYVFBSAwDGDAMveyJMrX478Xr58/U4AwLHDxe2n+FZQYPAPhAIVQ64z/x9Pvrx5g4EBY5kBBcnxp/cMlNiw5F8QGRtACHl/vr///62lB1g+MZigAFhQ3TOECioIYYABRiDAwwshAGjhhRg6JiBgVMxgAgX9RDVgPoJJRhliGaao4opl9WRACENQgUUISMiwgADiNQUYQYSVsIAKLAYp5JBJ5QNADSUwMAIDJciwBAVYiHUPPnC5QMEGSbhA5JZcdinSPUIwEFQEQjFgBBY5PoUFEiCUEMEGNQjAn5d01mknT/mc4AIBQhDgwhAkpunUCQFEsMQCMihwgqB3Nuroo2jlM0QINKwwAg4BnAbpppx2OlVkLrywgQqMemrqqah+mV4/MUShQIipxv8q66wZbYgPBVCMgCCtvPbqK3rQCahBElEgUeqvyCaLGwkFJKDEsRz1YwQDFKggAHA1RPECDcp26+1uIlwxwJwi5aPCAQsssMECPByAgQbkfivvvDgN+IAQ/6zgwlEICYiFASsIkAMN/A7m3AMErNADED6QcN2GO9ZKBQHXAgCAEQZQETG9HHcsE2D4CACCfUHUcMSiBwXWjxAz4HDAy0kcMQFSIBsBAplBEAFDFSj/A3FgGo34z3PpeWz00VTlI0AEMoBAwQhMQ4leYAQkscELFDyNrtTOqXAfAk44gQC7SIjh889AI6322lIRBsICATzQDxUCHCADjjwGhk8AJiD/EAI+c0OwAA4aqIfAhyf0E50CJrzwwNloQ8v25JTLZAQPNVjnMz4YmIDBrpCvhYAFxg6kAZlCAHbCCzcOdI8KPESwxYauF1357bjLlA8FFjAQ5WAqLLDDBLXrjesMJzzXj9cUIoUFA1DEQMVzVLQKgwQp2G5w2rl3771H+MxgQQAIAkYAmXaFHpgBOyyBABIQUFBDnLDmY8QBQYwAgRZQ13BBAS0Yg/YE9L0CGvAiWDhcDEIUGBfgYAn4Ut+AlgaFDcgACrnSlEDwgYQlQEEGF5QBEgZQABZwIXIbO6AKV7gQLPzABEfoR3poUAMZ5EB9Z1tTkuAXgCTUAADlC0EA/4gCAQhc6gdSSAAAA4U2ngwQOiRKGQF9prwosvCKqKLC4UCUHgdCEIdTqtEIHkAifKhgCQdYAUuwgIEgxABN98DClV7wgWY9K3IM+Vk+5PKCErwgBjTgz8/6sQIFlKAECACP5LDIyC31QwEW+Fx6chCECGgJYoWJghGANgToUeAoQjhABAoXmNMFAQASEBcTIbYQPRJgB3RJArp2sJ/tCQgfEDjABnBQw0Q9rpHAbNQ9kGCCH4RnQBCIAghOAzEHVgZonMvV9FQggySEID0hsJoKpmDHgkwxIT8bwgx4wAAXPEADI4jCD5j5s1AuAQITeIAAasAD9wTznnXKARpzEP+YISAACjE8G9EAk00oQMCKIdhBFGJwlBUEYQn8DAwlg7ACf4gABSLoQvFS2K+iOUcAS8CBAWjmghqqYJU7WgsGXEUFgZgLS9zCp0yJpNIJCSAEGqDA4Ebqs98IoKVxZBwOVECDEBBAAVCIQOqQsgMTgGAFIQjBCkDQuJkxqwAOY6VCvrk3KGAAVv/YmwlmoDHt0eCBePNZYXgApJm6lUWAoQF3DpCECCwgAiqQoUBCiQMCDMgAP0AUDmTZrq9QcZ48WEISknCoJMjpH10QQQUa4A/atVJ7z4sCBG7zuiaUYFHc+0cOeIAD2wxkPRagwFtXm6IBTQAJP9jBCzAgBBn/CoYGI8DANQdEIwTsQLYYyAG/ANMPA1AABL91GrxopoQC6MAK39yq9k7wA7ZeRwAmqMHMQisAz/YsrBjogAIWydry9mZVQ3jACaa3sTwBKnT9SK962UszvZ1gAm85SsTyMQAUSEAMoVVIPrBwggK/9wRvIxVBVgAFHOw2hUbowA7CM5C9dWAG5DWvhlHFhQTooAXkvYcLZuDDGvxATgj+0XVN4GDt/cMIFigBhTcYgA6MIMMbzrGnUMACiIiYATjAQQRAIID4IiAKAOCsEUyQhO2mkMFNvg0+FNABDOj4ypTLE34ncIKjUGEEJojBe/KBhCaA4JgpNAAa/TqQIbwQ/wlYjrPavtmPZL5Ag9SNQgy/ieB6HuceBMDfDeVMaI/9zAVJWAAS8JEPfADgrqS8Rz/UGyhzLSAJBnjOA9I5gpYW+tOmYpazFjlIAIBwBjGYwRJEyMB85GAHM3DyAxgwuABQgHU1SB2od52alDKKZhPxNaMagIJxPeRnVFBBtqCAORUUbHmQjkwIKBCEKEBhAQioJa+3nZl8TCAHKkACElSgAb0OJB8IMwISjlBbjh4kHyFQNxKMQMYUTskAN8gAB7SggYJdloCTNoCD5BaxtRjABbbVGw34pIH3cvvhjlHaDh66hHVF4AhDcJ1cD8CDJljgBw8IcEH6IYAmgVAGL//IgRXVKr8lZMELWQBBpoOWJhxD/OZ9yQcESqA/I6hAARvQc/lcMAIEjKAGjQu5u10nhDfNQAUAkFAJCjeYCaj6BUgAQBEy4AHT4vzrK+qJC+hbvSjgQEsCiQ4WzHg10DKKCgpwVXjQ/YMofHKDMdhArJ9jAx2wIAVgD7yKCg6Y09mwdj4TAMrd3q+SymAF6VFBFEpAvHvQ8ADGURwJEpAAhwn+860FjAEiEIQI1s4IbRe5SxVvzfQEGvNda1oOBJC1I3hgsvoAve4tBDIK8AAEvzx96pf+j34gQZ1ovkc2N2AE6OBqyEImPRgcUAHo7v7653GOEe7TfG/eA/V3Jr7/QI4HR8A8YAc8AECjFdAEFsdgBUJwQhg6kIG/Y//+43F1DTZAAU/nDfyMhxCqNQJw5DPnxwMHBXdNAFD8gg9HEAVPUAEtgH8UiByu1iQUMGPeBICqJxAPiAAZNxDLd1JiVRcEEWgdwATOUoEsiBuuxhUZKCjfN3xLZy4yQHkEMXqlJ2nHVwMxJRAOtAAcUGwtWISp8YJLEAMaiB6ot0wuRhAa8ECm932Xtls58DLpIxCBFgF1lABG+IWakQ8rUANLcAQFmDc0o3hOKH4JBAWqJRBfBgUKgCb/sDrtcRzGp0wacFEiAIZ+2BhgggNMJgA04AIuoAH7QjPwFhrpFAEQ/5ADBOB/BmE/q0YBObACGHBXKudSKrABbpQDORADQbAAEPAFzPKHqKgY/YABFmABu1QDQRYBJaBr90AosREFHWAC9JFrjIJLb/JQg5NXUgYBdrUEh3IAGOccA5CKzMgXYqgAIzACMzCN04gBmrMWKhCN2hiNFOB1CIEPQnAECqAASGAA5lZh4TiOluhvzdiOedFo+BCP8kgFjEYQ8CiP8hgvB9EP9dgv0AE4NueOAjmQBFmQBnmQCJmQCrmQDJkXFdCQEIkcoqYEEVmRNOEc0MFAKfMcLoUnGQk5rpORijOS+5ACxLaMFpmSLbEWQgABATCOMSAE7HgCAhADGKAA//+ABMulEMUFAMGhABQgABoYMtA4juP4DzfZhSq5lCBRZ7pUSUvQLskoEHJUcf/ANKTlbDK4AlazBKS3AEswAl5HBTvHSzXgQxtgAUjWX0zZlhzBQe+TAxogBBQgA/UEKydAATMAAA5iKVGQRvpoP9QiAAZAABCAA3L4O3FUiDRQVARwOMOjBF7olpR5EQoCKMbTOJoiKRozENTlVZLII1SQPJFhBEsQBATgfYDhTybgHvxVmbBZEYT3fTxgTQlBZlBgTOC0IWslAGgIGO5kerE5nLIJGBAABTsQfLYUTQQoXekxel+0Uf8QAFDgO8R5nRQBGBMAAnYHOgLSdDJwUrv/mR5UEAC0oUECkk1RoGDY2Z7HhgUBIDyas1EhUF0KsISqSVymtgSPpT5Gcmne6J4CmhBUcAQygANqNDUTMAILAALWIYOrci4yYFi2VIs/ED3nOKAaShAFeqBFljIL2jSZpnqrYgSGsmhAIyBXuIMduKHX2aE48KHoEaIgsFwkSlwqQHoQUFYQszdRwADJF5AuGpFrYaA1sAIZOhgPoAAygAAugFJSJC06ql+HhgNsBaVD2p5YEAMyEAFqNEWEAXQv8KDR5TrLYyhfQTuR8Wg+WKZZGps6twBNsAMqcIkCYAQEwC/wiUEzIAABc6fm2C8rEAEW4Ih2KgBCeR0PAAJubUilb+qeWtQBFkAXD4UfDhpXNZCLXWGM+IE8CNEPFGACHdAuFNelaeUzoRQEESV+j+qWrnYE8AMB4rZuZ8IhAgCrRTSrRyAAoGOPLiCruSpuRwAB1+Q6J2AtVCqkrcqQwJY3eYRjT3g2BkMQAQEAIfkEBQMAAQAsAAAAAAEAAQCAAAAAAAAAAgJMAQAh+QQFAwABACwAAAAAAQABAIAAAAAAAAACAkwBACH5BAUEAAEALAAAAAABAAEAgAAAAAAAAAICTAEAIfkEBQMA/wAsPQFkAIMAjwGH+vr56NF2jIyMPj48YlMUsJUshISEpaWmuZos3t7c0NDQro4osbGxknskQjYMe2ccvr6/x8fHy6Y0eHh4powoIiIkZGRkKCgn88xFXFxcfn58mYIkbFkWSz4McV8Z7MpC6urs4uLkVlZUzq011rI5kpKUHBwcXEoUUUQOEhIU2bc77+/vJR0EQkJE5ubkMDAxmpqcwaExTExMiHEgup8uinYclpaUsqJk5sJAMScJ0rI3Ojo8ampsxaczFhYUcnJ07NBfnp6cup5M1tbUbm5sqpYs68U8KSMHoIUn2trcRkZEGRUEDg4L7cVECgoM5sZUNjY0UlJUgm4cSkIM9tJE4r48CgYE5r484b5E6r482r5M3rY63r48Eg4E5ro8zrhYFjBo6PrksqiszNqosKYsBgII4tTYnGQguMrs2vbkwqTs6K44uO7M2NDYusxMQjBs6tSo5Jx8ghyA3LgQYFQoBAwQIDgc1njIMlJMfuKAkLiIpIo8oJ68zOLQ6u6w4rY4IBg0mp58cmpAIDBEQGCI0sLwSko0fGQIBhgUyNTYlHAsQjYo7rKAyrzAYGIYYqKkwHh0MjYQnooM1pxUTFoU0sKkLGDQ2L4oYHQYzrAohHacdFpovr7QkKLkTkpkGCgohpIkDgwYckZsLCDAlLQk+LI0wGwsCjAouK7MOiY8QHI4+ujk6nhI3NIUYHhszNr0moi46ujUooiMwqQg9NTcpHYgfqCczPTM8pww7KTMhGB88MLM3tT0AgYI2MLIzsA0UCgYZqIkNDZMglzUUDBEzqS4KMCAFGBQkIh41jCAurQg+MwUhHZckJZc1pwQaFpc+PrY3tTAzNTAfoqY0tJIalqU1u5MysLYvq6sCggwuMKkBAQYuNS4NFIUnEowbnYYnFyEnG5QYGh8foq0uJq4anCYlHo4Slw0Chg0cnZcup4MIAggglQYYlp0vsjEQhBAlHYM0sKAssLE5r4oRFxYFhBg+syA3ro0BgYMAgIM3ro8AgIE4ro04ro8BgYEAAAACP8A/wkcSLCgwYMIEypcyJDhvocQ/znxt6+hxYsYM2rcmBDiQx8/BGRgwrGkyZMoFXrcx0QDAAARLlRMSbOmTYcQ/ckYMmQFBBMzbwodKnTfBQYMJij4GZSo06cbWRpQIGOAgn9AoWrd2tBfhiEG/L1IwpSr2bMD972AwHRHgrJo40Ldl0IA1YcD3maVy3eoPxERNFDcB2UIBB9N+youGYSJUbYtUqTwIWPpCyb+Fmsu6UMngBVJFAxREOJlEgYtNqvWqJYB2wgRICj4HCHIjtW4GVoQ6M9Ehd8mTGRIEKFF59zIF658WPhw4uTQ0y5v8bbC8+jQl+8zQcQCSezgw4v/H0++vPnz6NOrX8++vep9/pw4YeIkIk749DNLj09y4nX3CIlFhAAHQPAPYv9J95ATGTBwgAwz7eNECxPAcAADQfAgE4AOmQDDCi6AAEASMiUoUE6pfTbBTH9FAMIQESgQImomckhXBhOIYMGL1pnokQk2wAbCDw/9o5MBFrzgmwwHrMBAChwqN5gJSQzRo0oPMTFBBBMYMGSREtb3D0Q7JJHAAFEq95AJIVhZJEI5RRGBDRdM8OVzEJkAQQhRpInldm1eCedDR0HwghN2Evmmgi8MkcRtfh70Y6CLGvRQXRFkIJEGdyaWpQEAMOBEpJZO6maC8GWggABQOsGpop4y/5gAVaSWWiSbp3a0FgMXCIQoCETYdyKDCiRhQY0ALodrj5UO5IQBLmhggg8+XCDACgaY4FhaxBqrX60K5tkmUMISxEQQL6Wr7goHQDlmt8eCW5CyCQxBbrO8ZQDDvjDYcMBsCthgQWYsqapABviCm5M/F1R5gT+D8dZrRfjJN5EJAoAwwUTvWqDAwRCHjCyAYmkwwbUgCPDDBC/MBEUET564kqtf8lYZABBYIEMGGViQQa/y+rojCESHCEISP+inhAs/hcuSBkjPhGiILhhN9BBEfKtwBTyL4LUIPAP9DxNRKKH1SheIYEJaFezM89sZiCB20MrRbffdeOet99589//tt5rwpaBtjTkxQW0KmDm9Et4eDeDiARVgyfAEsiURwQFKRKhduQrn2SRMG8LpjxIQgKDAAUG4trHMm4/sHkRM/HA509YpdAEEWHfmDxMmpKB5666vlxPpJewUgaAGPavxYIsPynmknhva8PEJbxfBEDswccEAL6QQMUGtB50l1AhfQBzy4A9g+cnXX56BmIo/D71OATv2QgjUX/cXCCssVYIADABRvOKXsDQRyjUvGJP5IIC+gXhlBaHaQZYmAAAFzC18QWOCAUJwLEIZpoH5AoELNDWmffgAAiO8znLktY8BDCEBGdjBAFrwAxcMgQdQcFdBdBKCBCjBXDZYnvP/Csghf7SgJytIYhJf0r8+WeoCCvBhQWCgMVTJL0rbscAEtrhFGwDABSWwQOROZK4DuCBYA6lABEIgAop5yiPiW06jGBiUCY0pX0xrAX1MMIF/NA14VzTgSu5HPZtVkGAsyZgLHBSBCjoRkMFrzyBhJrbRhYAB+rmUBRhgJgXAAE2s21zePOKP3zRFLLUrIXyqdYEKfIeAkfybLLWCqlna8pa4zKUuiRIUijWkeZxrXt54d4EXvOAC2/oTS4p5TBNQJJRwvJs/BgADGSUxCTbYgdbmtaAWwCAJSQRBBCAEzUBGigkCCAEEBHAyCKxgCJkblBMsEIIkHGCLAGyjKlco/00ZjmpMFSgBAA6wzfTBaADM41g5iUiqSrWwgq80CBNKEAKEwSeaZNzn3gYJUThVIDTU2pkMnKlCczbUI074QagKeqIXjIhyRDPdBHwgKZNCLyeVSYIMOjKAl9SGBxlIpwuSZqtYvu4h/tjBnjQAP0sp4TNBIJc/eGA6SIHPpqTyBxQY4AID+O4/Dw0BhCDiAwYAq6h7myYKDYCg6rk0CS2D3YcMgNa8JXVPbBUm+CqQgARIECIpaFIfuclQP8FnqyFgKj8lGsQJDCapURTBVeFYWJINwJ0HuMB8JuO7hFhyCBmY1gCaBIO17VM7dDvXSyAQhAO4lgECGCNCUpoEF//EJgkgYAAUYIlV9jxrNB/jCU8Y0LKO6AQGEIDZBGR7WtSK72zwYWlRSyrKXVq3IyYx6nW3y93ueve74A2veMdL3vKa97zoTa9618ve9rr3vfCNr3znS9/62ve++M2vfvfL3/76978ADrCAB0zgAhv4wAhOsIIXzOAGO/jBEI6whCdM4Qpb+MIYzrCGN8zhDnv4wyAOsYhHTOISm/jEKE6xilfM4ha7+MUwjrGMZ0zjGtv4xjjOsY53zOMe+/jHQA6ykIdM5CIb+chITrKSl8zkJjv5yVCOspSnTOUqW/nKWM6ylrfM5S57+ctgDrOYx0zmMpv5zACuwnxpMIIrwLf/AASQggqq0A/3IgAFHNABneusXiR8oAoF0EETVKCPK+gjvRTowQdiQIEqkKABMTDCelFAgynkoABSWIIH2EuAArDAAQhwwD4cEIP1bmADS3hAAbrwDwes9wgI8MASKDCDijxgvR4YAQccAGd/sIAC6i3ACIxQABpwoQEcoIAOZpDeGqhgBBT4wBUKIAEMfGAE6QX1DAgwggc4YAM6SG8D/oGCApzAAxToAhMaAOz0apoCHaBAA/ZxhG6vdwEb6EABUOAED/RAvToYAQZiAOkO5EDe/zxvDRqgj2HrgwLC3nS2k90AI9CgAEbAwL/R24ApLFzVn9YBDcad3gdQgAIb/2gAE1SdXi5UQQcS6AECKEAAFiDg1urFAQ7uoQI4EwAB7A0AEKpQBRXEIAYFCDoQsPAEIGDACOFWLxZwoAUsBEDoTVhvP7agDxWQQAg3AEIA2KuDOeugAQ/YuHqN3uZ+MJrZ691CD/SsD0fjPL396AfRRxADEhiBAixQrz7qrgKUHz3pgj904eWdg0snXu8k6AEH2jt4vV8hBh1gbxMqT3gCaL7yW1BBzz9P+Aage7362Hw/rkACFFBe9ax3b+rzfgUVeMDzqG8CnctOg9dXYQsNQAAG3nuFEbjavSpAc0M6cGj4MgEJkn7vPpYgED67lwVVaIL125uD2b/XCgTI/v/218uCSNP5vQ4wwubf6w8U6Lz57J0+BYygZvSrf/zq9QcBmoAD+K93HyyAcfXnXh2AA0bQD/63Xg9gBIaWgDk3X1TgZso3gRRIFEAnX0+HgO6FAj1wgHnXDyTQXgyod8+mdun1AUbABQiIAAvQXpunD/2gAkgAd+rFgJXnaP+QfOqlfYNHdFfQD4iHdz14BTJHeTBYex4weelFf5ynDzSAAriHXlwwAoTWcDrQA0hwgej1bCogAQGnAgaYdepVBSMgbPRXABSAbemFACqgAzGgAkbQAx0Qb+rVACSQBV4wAme3BK6XXtyGA882AxTwAEewXm2GAEVAAzWAhusVA4D/hgJIEAMjgANqWHI9sAEEEAP6gAMSIHHotQQnIHpUOAIjUAN9aF7M1gExoHpIF3kFoIPnFQOL1gCkSIWDmF5tpg8IIGwjoA9nJ24FsAUkMAJ0FgN11nvohQQ9QAKSiAMIWAUbsF6D1gNtmGgIEHjp1QNXIAGv2ADfVgBziF7ESAMEkInIJnB6d14m92goQAE4oANbcIDp1Y6U6IgjNwL4V15E2ABIcHYEYIKxqAIbkHbvqAM4sF5XUAU0EAPFx4YOSF51NwMO8AB894bq1WkbwAJMQAABtwX5OF4cIAFIQAAeEAM4oIIPKV52aAR8pwK6pwIfiF79YHEx4HIxSAJ1Z4cDYkheObABBFcAezZ4oieB5tUA7LZ6CLh6MeABDxCFIAlnDXAFP7h6OEByWygFtHgFOmcEtqdeV6ADFIAEI/ABH6ACDYCN5+WDkscBNHBzaCmTUCdqVmAF0lVecXh88PUArOZeAQEAIf4AOw==)", "_____no_output_____" ], [ "---\n# Summary\n\nYou have seen how we can predict what happens in a discrete dynamical system with an update rule of:\n$$ \\mathbf{a}_t = \\mathbf{W}\\mathbf{a}_{t-1}$$\n\nThe most important takeaway is that inspecting eigenvalues and eigenvectors enables you to predict how discrete dybamical systems evolve. Specifically:\n\n* If all eigenvalues are real and have absolute values above 1, the neural activities explode to infinity or negative infinity. \n\n* If all eigenvalues are real and have absolute values above 1, the neural activities decay to 0. \n\n* If all eigenvalues are real and at least one has an absolute value above 1, the neural activities explode to infinity or negative infinity, except for special cases where the initial condition lies along an eigenvector with an eigenvalue whose absolute value is below 1. \n\n* If eigenvalues are complex, the neural activities rotate in space and decay or explode depending on the amplitude of the complex eigenvalues.\n\n* Even finer details of the trajectories can be predicted by examining the exact relationship of eigenvalues and eigenvectors.\n\nImportantly, these ideas extend far beyond our toy neural circuit. Discrete dynamical systems with the same structure of update rule are common. While the exact dependencies on eigenvalues will change, we will see that we can still use eigenvalues/vectors to understand continuous dynamical systems in W2D2: Linear Dynamics. \n", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown", "markdown", "markdown", "markdown" ], [ "code", "code", "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown", "markdown", "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown", "markdown", "markdown" ] ]
cb57acdbe2261f431acf66e6480e2d1296232b80
235,322
ipynb
Jupyter Notebook
book-d2l-en/chapter_computer-vision/semantic-segmentation-and-dataset.ipynb
linked0/dlnd-deep-learning
f67a5be7a700c1a30fde71ebbb6a72c3bdd09fb7
[ "MIT" ]
null
null
null
book-d2l-en/chapter_computer-vision/semantic-segmentation-and-dataset.ipynb
linked0/dlnd-deep-learning
f67a5be7a700c1a30fde71ebbb6a72c3bdd09fb7
[ "MIT" ]
115
2020-01-28T22:21:35.000Z
2022-03-11T23:42:46.000Z
book-d2l-en/chapter_computer-vision/semantic-segmentation-and-dataset.ipynb
linked0/deep-learning
f67a5be7a700c1a30fde71ebbb6a72c3bdd09fb7
[ "MIT" ]
null
null
null
474.439516
121,576
0.938072
[ [ [ "# Semantic Segmentation and Data Sets\n\nIn our discussion of object detection issues in the previous sections, we only used rectangular bounding boxes to label and predict objects in images. In this section, we will look at semantic segmentation, which attempts to segment images into regions with different semantic categories. These semantic regions label and predict objects at the pixel level. Figure 9.10 shows a semantically-segmented image, with areas labeled \"dog\", \"cat\", and \"background\". As you can see, compared to object detection, semantic segmentation labels areas with pixel-level borders, for significantly greater precision.\n\n![Semantically-segmented image, with areas labeled \"dog\", \"cat\", and \"background\". ](../img/segmentation.svg)\n\n\n## Image Segmentation and Instance Segmentation\n\nIn the computer vision field, there are two important methods related to semantic segmentation: image segmentation and instance segmentation. Here, we will distinguish these concepts from semantic segmentation as follows:\n\n* Image segmentation divides an image into several constituent regions. This method generally uses the correlations between pixels in an image. During training, labels are not needed for image pixels. However, during prediction, this method cannot ensure that the segmented regions have the semantics we want. If we input the image in 9.10, image segmentation might divide the dog into two regions, one covering the dog's mouth and eyes where black is the prominent color and the other covering the rest of the dog where yellow is the prominent color.\n* Instance segmentation is also called simultaneous detection and segmentation. This method attempts to identify the pixel-level regions of each object instance in an image. In contrast to semantic segmentation, instance segmentation not only distinguishes semantics, but also different object instances. If an image contains two dogs, instance segmentation will distinguish which pixels belong to which dog.\n\n\n## Pascal VOC2012 Semantic Segmentation Data Set\n\nIn the semantic segmentation field, one important data set is Pascal VOC2012[1]. To better understand this data set, we must first import the package or module needed for the experiment.", "_____no_output_____" ] ], [ [ "import sys\nsys.path.insert(0, '..')\n\n%matplotlib inline\nimport d2l\nfrom mxnet import gluon, image, nd\nfrom mxnet.gluon import data as gdata, utils as gutils\nimport os\nimport sys\nimport tarfile", "_____no_output_____" ] ], [ [ "We download the archive containing this data set to the `../data` path. The archive is about 2GB, so it will take some time to download. After you decompress the archive, the data set is located in the `../data/VOCdevkit/VOC2012` path.", "_____no_output_____" ] ], [ [ "# This function has been saved in the d2l package for future use\ndef download_voc_pascal(data_dir='../data'):\n voc_dir = os.path.join(data_dir, 'VOCdevkit/VOC2012')\n url = ('http://host.robots.ox.ac.uk/pascal/VOC/voc2012'\n '/VOCtrainval_11-May-2012.tar')\n sha1 = '4e443f8a2eca6b1dac8a6c57641b67dd40621a49'\n fname = gutils.download(url, data_dir, sha1_hash=sha1)\n with tarfile.open(fname, 'r') as f:\n f.extractall(data_dir)\n return voc_dir\n\nvoc_dir = download_voc_pascal()", "_____no_output_____" ] ], [ [ "Go to `../data/VOCdevkit/VOC2012` to see the different parts of the data set. The `ImageSets/Segmentation` path contains text files that specify the training and testing examples. The `JPEGImages` and `SegmentationClass` paths contain the example input images and labels, respectively. These labels are also in image format, with the same dimensions as the input images to which they correspond. In the labels, pixels with the same color belong to the same semantic category. The `read_voc_images` function defined below reads all input images and labels to the memory.", "_____no_output_____" ] ], [ [ "# This function has been saved in the d2l package for future use\ndef read_voc_images(root=voc_dir, is_train=True):\n txt_fname = '%s/ImageSets/Segmentation/%s' % (\n root, 'train.txt' if is_train else 'val.txt')\n with open(txt_fname, 'r') as f:\n images = f.read().split()\n features, labels = [None] * len(images), [None] * len(images)\n for i, fname in enumerate(images):\n features[i] = image.imread('%s/JPEGImages/%s.jpg' % (root, fname))\n labels[i] = image.imread(\n '%s/SegmentationClass/%s.png' % (root, fname))\n return features, labels\n\ntrain_features, train_labels = read_voc_images()", "_____no_output_____" ] ], [ [ "We draw the first five input images and their labels. In the label images, white represents borders and black represents the background. Other colors correspond to different categories.", "_____no_output_____" ] ], [ [ "n = 5\nimgs = train_features[0:n] + train_labels[0:n]\nd2l.show_images(imgs, 2, n);", "_____no_output_____" ] ], [ [ "Next, we list each RGB color value in the labels and the categories they label.", "_____no_output_____" ] ], [ [ "# This constant has been saved in the d2l package for future use\nVOC_COLORMAP = [[0, 0, 0], [128, 0, 0], [0, 128, 0], [128, 128, 0],\n [0, 0, 128], [128, 0, 128], [0, 128, 128], [128, 128, 128],\n [64, 0, 0], [192, 0, 0], [64, 128, 0], [192, 128, 0],\n [64, 0, 128], [192, 0, 128], [64, 128, 128], [192, 128, 128],\n [0, 64, 0], [128, 64, 0], [0, 192, 0], [128, 192, 0],\n [0, 64, 128]]\n# This constant has been saved in the d2l package for future use\nVOC_CLASSES = ['background', 'aeroplane', 'bicycle', 'bird', 'boat',\n 'bottle', 'bus', 'car', 'cat', 'chair', 'cow',\n 'diningtable', 'dog', 'horse', 'motorbike', 'person',\n 'potted plant', 'sheep', 'sofa', 'train', 'tv/monitor']", "_____no_output_____" ] ], [ [ "After defining the two constants above, we can easily find the category index for each pixel in the labels.", "_____no_output_____" ] ], [ [ "colormap2label = nd.zeros(256 ** 3)\nfor i, colormap in enumerate(VOC_COLORMAP):\n colormap2label[(colormap[0] * 256 + colormap[1]) * 256 + colormap[2]] = i\n\n# This function has been saved in the d2l package for future use\ndef voc_label_indices(colormap, colormap2label):\n colormap = colormap.astype('int32')\n idx = ((colormap[:, :, 0] * 256 + colormap[:, :, 1]) * 256\n + colormap[:, :, 2])\n return colormap2label[idx]", "_____no_output_____" ] ], [ [ "For example, in the first example image, the category index for the front part of the airplane is 1 and the index for the background is 0.", "_____no_output_____" ] ], [ [ "y = voc_label_indices(train_labels[0], colormap2label)\ny[105:115, 130:140], VOC_CLASSES[1]", "_____no_output_____" ] ], [ [ "### Data Preprocessing\n\nIn the preceding chapters, we scaled images to make them fit the input shape of the model. In semantic segmentation, this method would require us to re-map the predicted pixel categories back to the original-size input image. It would be very difficult to do this precisely, especially in segmented regions with different semantics. To avoid this problem, we crop the images to set dimensions and do not scale them. Specifically, we use the random cropping method used in image augmentation to crop the same region from input images and their labels.", "_____no_output_____" ] ], [ [ "# This function has been saved in the d2l package for future use\ndef voc_rand_crop(feature, label, height, width):\n feature, rect = image.random_crop(feature, (width, height))\n label = image.fixed_crop(label, *rect)\n return feature, label\n\nimgs = []\nfor _ in range(n):\n imgs += voc_rand_crop(train_features[0], train_labels[0], 200, 300)\nd2l.show_images(imgs[::2] + imgs[1::2], 2, n);", "_____no_output_____" ] ], [ [ "### Data Set Classes for Custom Semantic Segmentation\n\nWe use the inherited `Dataset` class provided by Gluon to customize the semantic segmentation data set class `VOCSegDataset`. By implementing the `__getitem__` function, we can arbitrarily access the input image with the index `idx` and the category indexes for each of its pixels from the data set. As some images in the data set may be smaller than the output dimensions specified for random cropping, we must remove these example by using a custom `filter` function. In addition, we define the `normalize_image` function to normalize each of the three RGB channels of the input images.", "_____no_output_____" ] ], [ [ "# This class has been saved in the d2l package for future use\nclass VOCSegDataset(gdata.Dataset):\n def __init__(self, is_train, crop_size, voc_dir, colormap2label):\n self.rgb_mean = nd.array([0.485, 0.456, 0.406])\n self.rgb_std = nd.array([0.229, 0.224, 0.225])\n self.crop_size = crop_size\n features, labels = read_voc_images(root=voc_dir, is_train=is_train)\n self.features = [self.normalize_image(feature)\n for feature in self.filter(features)]\n self.labels = self.filter(labels)\n self.colormap2label = colormap2label\n print('read ' + str(len(self.features)) + ' examples')\n\n def normalize_image(self, img):\n return (img.astype('float32') / 255 - self.rgb_mean) / self.rgb_std\n\n def filter(self, imgs):\n return [img for img in imgs if (\n img.shape[0] >= self.crop_size[0] and\n img.shape[1] >= self.crop_size[1])]\n\n def __getitem__(self, idx):\n feature, label = voc_rand_crop(self.features[idx], self.labels[idx],\n *self.crop_size)\n return (feature.transpose((2, 0, 1)),\n voc_label_indices(label, self.colormap2label))\n\n def __len__(self):\n return len(self.features)", "_____no_output_____" ] ], [ [ "### Read the Data Set\n\nUsing the custom `VOCSegDataset` class, we create the training set and testing set instances. We assume the random cropping operation output images in the shape $320\\times 480$. Below, we can see the number of examples retained in the training and testing sets.", "_____no_output_____" ] ], [ [ "crop_size = (320, 480)\nvoc_train = VOCSegDataset(True, crop_size, voc_dir, colormap2label)\nvoc_test = VOCSegDataset(False, crop_size, voc_dir, colormap2label)", "read 1114 examples\n" ] ], [ [ "We set the batch size to 64 and define the iterators for the training and testing sets.", "_____no_output_____" ] ], [ [ "batch_size = 64\nnum_workers = 0 if sys.platform.startswith('win32') else 4\ntrain_iter = gdata.DataLoader(voc_train, batch_size, shuffle=True,\n last_batch='discard', num_workers=num_workers)\ntest_iter = gdata.DataLoader(voc_test, batch_size, last_batch='discard',\n num_workers=num_workers)", "_____no_output_____" ] ], [ [ "Print the shape of the first mini-batch. In contrast to image classification and object recognition, labels here are three-dimensional arrays.", "_____no_output_____" ] ], [ [ "for X, Y in train_iter:\n print(X.shape)\n print(Y.shape)\n break", "(64, 3, 320, 480)\n(64, 320, 480)\n" ] ], [ [ "## Summary\n\n* Semantic segmentation looks at how images can be segmented into regions with different semantic categories.\n* In the semantic segmentation field, one important data set is Pascal VOC2012.\n* Because the input images and labels in semantic segmentation have a one-to-one correspondence at the pixel level, we randomly crop them to a fixed size, rather than scaling them.\n\n## Exercises\n\n* Recall the content we covered in the [\"Image Augmentation\"](image-augmentation.md) section. Which of the image augmentation methods used in image classification would be hard to use in semantic segmentation?\n\n## Reference\n\n[1] Pascal VOC2012 data set. http://host.robots.ox.ac.uk/pascal/VOC/voc2012/\n\n## Scan the QR Code to [Discuss](https://discuss.mxnet.io/t/2448)\n\n![](../img/qr_semantic-segmentation-and-dataset.svg)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown" ]
[ [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ] ]
cb57b181a1ba595038da1781ed4f48b4640eb8ef
247,247
ipynb
Jupyter Notebook
notebooks/lssxcmb/logHIvsHI.ipynb
mehdirezaie/LSSutils
aa0505b4d711e591f8a54121ea103ca3e72bdfc8
[ "MIT" ]
1
2021-12-15T22:38:31.000Z
2021-12-15T22:38:31.000Z
notebooks/lssxcmb/logHIvsHI.ipynb
mehdirezaie/LSSutils
aa0505b4d711e591f8a54121ea103ca3e72bdfc8
[ "MIT" ]
3
2019-08-19T21:47:47.000Z
2020-08-25T17:57:19.000Z
notebooks/lssxcmb/logHIvsHI.ipynb
mehdirezaie/LSSutils
aa0505b4d711e591f8a54121ea103ca3e72bdfc8
[ "MIT" ]
null
null
null
438.381206
58,496
0.943142
[ [ [ "import fitsio as ft\nimport healpy as hp\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nimport sys\nsys.path.append('/users/PHS0336/medirz90/github/LSSutils')\nfrom lssutils.utils import make_hp\nfrom lssutils.lab import get_cl\nfrom lssutils.extrn.galactic.hpmaps import logHI\nfrom sklearn.linear_model import LinearRegression\nfrom lssutils.dataviz import setup_color", "_____no_output_____" ], [ "setup_color()", "_____no_output_____" ], [ "def run_linear(xtrain, ytrain,\n xtest, ytest, \n x, ix):\n \n reg2 = LinearRegression().fit(xtrain, ytrain)\n npred = reg2.predict(xtest)\n print(f'MSE: {((npred - ytest)**2).mean():.3f} MAE:{(abs(npred - ytest)).mean():.3f}')\n \n sfun = reg2.predict(x)\n return make_hp(1024, ix, sfun, True) / sfun.mean()", "_____no_output_____" ], [ "lh = logHI(nside_out=1024, path='/fs/ess/PHS0336/data/templates/NHI_HPX.fits')", "/users/PHS0336/medirz90/github/LSSutils/lssutils/extrn/galactic/hpmaps.py:96: RuntimeWarning: invalid value encountered in log10\n self.map = np.log10(nhi_c)\n" ], [ "df = ft.read('/fs/ess/PHS0336/data/rongpu/imaging_sys/tables/v3/nelg_features_bmzls_1024.fits')\nloghi = lh.map[:, np.newaxis]\nhi = 10**(loghi-20.)\n\nix = df['hpix']\nfrac = make_hp(1024, df['hpix'], df['fracgood'], True)\nmask = np.isfinite(frac)\nngal = make_hp(1024, df['hpix'], df['label'], True)\n\nprint(mask.sum())\nx1 = loghi #np.column_stack([loghi, loghi*loghi])\nx2 = hi #np.column_stack([hi, hi*hi])\n\nnp.random.seed(85)\ntrain_ix = np.random.choice(ix, size=int(0.8*ix.size), replace=False)\ntest_ix = np.setdiff1d(ix, train_ix)", "1552540\n" ], [ "sf_loghi = run_linear(x1[train_ix], ngal[train_ix],\n x1[test_ix], ngal[test_ix],\n x1[ix], ix)\nsf_loghi *= (ngal[ix]/sf_loghi[ix]).sum() / ngal[ix].sum()", "MSE: 9.753 MAE:2.466\n" ], [ "sf_hi = run_linear(x2[train_ix], ngal[train_ix],\n x2[test_ix], ngal[test_ix],\n x1[ix], ix)\nsf_hi *= (ngal[ix]/sf_hi[ix]).sum() / ngal[ix].sum()", "MSE: 9.770 MAE:2.472\n" ], [ "kw = dict(min=0.9, max=1.1, rot=-95, cmap=plt.cm.jet)\nhp.mollview(sf_hi, **kw)\nhp.mollview(sf_loghi, **kw)", "_____no_output_____" ], [ "hp.mollview(ngal/df['label'].mean(), **kw)", "_____no_output_____" ], [ "cl_null = get_cl(ngal, frac, mask, njack=0)\ncl_hi = get_cl(ngal, frac, mask, njack=0, selection_fn=sf_hi)\ncl_loghi = get_cl(ngal, frac, mask, njack=0, selection_fn=sf_loghi)", "_____no_output_____" ], [ "fg, ax = plt.subplots(nrows=2, figsize=(6, 8), sharex=True)\nfg.subplots_adjust(hspace=0.0)\n\nfor n_i, cl_i in zip(['No weight', 'HI', 'logHI'],\n [cl_null, cl_hi, cl_loghi]):\n \n \n ln = ax[0].plot(1000*cl_i['cl_gg']['l']*cl_i['cl_gg']['cl'], alpha=0.8, label=n_i)\n ax[1].plot(cl_i['cl_gg']['cl']/cl_null['cl_gg']['cl'], color=ln[0].get_color())\n \n \nax[0].legend()\nax[0].set(ylabel=r'$\\ell C_{\\ell}~[10^{-3}]$', xscale='log',)\nax[1].set(xlabel=r'$\\ell$', ylim=(0.0, 1.45), ylabel='$C_{\\ell} / Noweight$')", "_____no_output_____" ] ], [ [ "## Updated Galaxy Density Count", "_____no_output_____" ] ], [ [ "old = ft.read('/fs/ess/PHS0336/data/rongpu/imaging_sys/tables/v2/nelg_features_bmzls_1024_old.fits')\nnew = ft.read('/fs/ess/PHS0336/data/rongpu/imaging_sys/tables/v3/nelg_features_bmzls_1024.fits')", "_____no_output_____" ], [ "old.size, new.size", "_____no_output_____" ], [ "np.array_equal(old['hpix'], new['hpix'])", "_____no_output_____" ], [ "old['label'], new['label']", "_____no_output_____" ], [ "frac = make_hp(1024, new['hpix'], new['fracgood'], True)\nmask = np.isfinite(frac)\nmask.sum()", "_____no_output_____" ], [ "old['features'][:, 0]-new['features'][:, 0]", "_____no_output_____" ], [ "syst = make_hp(1024, new['hpix'], new['features'][:, 0])[:, np.newaxis]\nsyst.shape", "_____no_output_____" ], [ "nold = make_hp(1024, old['hpix'], old['label'])\nnnew = make_hp(1024, new['hpix'], new['label'])", "_____no_output_____" ], [ "cl_old = get_cl(nold, frac, mask, systematics=syst, njack=0, cross_only=True)\ncl_new = get_cl(nnew, frac, mask, systematics=syst, njack=0, cross_only=True)", "_____no_output_____" ], [ "plt.plot(cl_old['cl_gg']['cl'], label='Old')\nplt.plot(cl_new['cl_gg']['cl'], label='New')\nplt.legend()\n# plt.xscale('log')\nplt.yscale('log') #symlog', linthreshy=1.0e-6)\nplt.ylim(ymin=8.0e-9)\nplt.ylabel('C_gg')\nplt.xlabel(r'$\\ell$')", "_____no_output_____" ], [ "from lssutils.utils import histogram_cell", "_____no_output_____" ], [ "def plot(cl, **kw):\n \n lb = np.arange(0, 3000, 100)\n lb_, cl_ = histogram_cell(cl, bins=lb)\n \n al = kw.pop('alpha')\n lab = kw.pop('label')\n \n ln = plt.plot(cl, alpha=al, **kw)\n plt.plot(lb_, cl_, color=ln[0].get_color(), \n label=lab, marker='o', mfc='w', **kw)\n\nplot(cl_old['cl_sg'][0]['cl'], label='Old', alpha=0.5)\nplot(cl_new['cl_sg'][0]['cl'], label='New', alpha=0.5)\n\n\nplt.legend()\n\nplt.axhline(0)\nplt.ylim(-1.0e-8, 1.0e-8)\n# plt.yscale('symlog', linthreshy=1.0e-9)\nplt.ylabel('C_gs')\nplt.xlabel(r'$\\ell$')", "_____no_output_____" ] ] ]
[ "code", "markdown", "code" ]
[ [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cb57e97528629bc15e042a5e56e34322c2f9e3f5
27,789
ipynb
Jupyter Notebook
jupyterhub/notebooks/visualization/Table of widget keys and style keys.ipynb
just4jc/pipeline
3c7a4fa59c6363833766d2b55fa55ace6b6af351
[ "Apache-2.0" ]
1
2018-03-13T09:46:17.000Z
2018-03-13T09:46:17.000Z
jupyterhub/notebooks/visualization/Table of widget keys and style keys.ipynb
just4jc/pipeline
3c7a4fa59c6363833766d2b55fa55ace6b6af351
[ "Apache-2.0" ]
null
null
null
jupyterhub/notebooks/visualization/Table of widget keys and style keys.ipynb
just4jc/pipeline
3c7a4fa59c6363833766d2b55fa55ace6b6af351
[ "Apache-2.0" ]
2
2018-08-19T15:05:18.000Z
2020-08-13T16:31:48.000Z
36.855438
216
0.453957
[ [ [ "# Scroll down to get to the interesting tables...", "_____no_output_____" ], [ "# Construct list of properties of widgets\n\n\"Properties\" here is one of:\n\n+ `keys`\n+ `traits()`\n+ `class_own_traits()`\n\nCommon (i.e. uninteresting) properties are filtered out.", "_____no_output_____" ], [ "The dependency on astropy is for their Table. Replace it with pandas if you want...", "_____no_output_____" ] ], [ [ "import itertools\nfrom ipywidgets import *\nfrom IPython.display import display\nfrom traitlets import TraitError\n\nfrom astropy.table import Table, Column", "_____no_output_____" ] ], [ [ "# Function definitions", "_____no_output_____" ], [ "## Calculate \"interesting\" properties", "_____no_output_____" ] ], [ [ "def properties(widget, omit=None, source=None):\n \"\"\"\n Return a list of widget properties for a widget instance, omitting\n common properties.\n \n Parameters\n ----------\n \n widget : ipywidgets.Widget instance\n The widget for which the list of preoperties is desired.\n omit : list, optional\n List of properties to omit in the return value. Default is \n ``['layout', 'style', 'msg_throttle']``, and for `source='traits'\n is extended to add ``['keys', 'comm']``.\n source : str, one of 'keys', 'traits', 'class_own_traits', 'style_keys' optional\n Source of property list for widget. Default is ``'keys'``.\n \"\"\"\n if source is None:\n source = 'keys'\n valid_sources = ('keys', 'traits', 'class_own_traits', 'style_keys')\n if source not in valid_sources:\n raise ValueError('source must be one of {}'.format(', '.join(valid_sources)))\n if omit is None:\n omit = ['layout', 'style', 'msg_throttle']\n if source == 'keys':\n props = widget.keys\n elif source == 'traits':\n props = widget.traits()\n omit.extend(['keys', 'comm'])\n elif source == 'class_own_traits':\n props = widget.class_own_traits()\n elif source == 'style_keys':\n props = widget.style.keys\n props = [k for k in props if not k.startswith('_')] \n return [k for k in props if k not in omit]", "_____no_output_____" ] ], [ [ "## Create a table (cross-tab style) for which properties are available for which widgets\n\nThis is the only place astropy.table.Table is used, so delete if you want to.", "_____no_output_____" ] ], [ [ "def table_for_keys(keys, keys_info, source):\n unique_keys = set()\n for k in keys:\n unique_keys.update(keys_info[k])\n unique_keys = sorted(unique_keys)\n string_it = lambda x: 'X' if x else ''\n colnames = ['Property ({})'.format(source)] + keys\n\n columns = [Column(name=colnames[0], data=unique_keys)]\n for c in colnames[1:]:\n column = Column(name=c, data=[string_it(k in key_dict[c]) for k in unique_keys])\n columns.append(column)\n return Table(columns)", "_____no_output_____" ] ], [ [ "## List of widget objects...", "_____no_output_____" ] ], [ [ "widget_list = [\n IntSlider,\n FloatSlider,\n IntRangeSlider,\n FloatRangeSlider,\n IntProgress,\n FloatProgress,\n BoundedIntText,\n BoundedFloatText,\n IntText,\n FloatText,\n ToggleButton,\n Checkbox,\n Valid,\n Dropdown,\n RadioButtons,\n Select,\n SelectionSlider,\n SelectionRangeSlider,\n ToggleButtons,\n SelectMultiple,\n Text,\n Textarea,\n Label,\n HTML,\n HTMLMath,\n Image,\n Button,\n Play,\n DatePicker,\n ColorPicker,\n Box,\n HBox,\n VBox,\n Accordion,\n Tab\n]", "_____no_output_____" ] ], [ [ "## ...and their names", "_____no_output_____" ] ], [ [ "names = [wd.__name__ for wd in widget_list]", "_____no_output_____" ] ], [ [ "## Figure out the properties for each widget\n\nThe `try`/`except` below is to catch a couple of classes that *require* that `options` be passed on intialization.", "_____no_output_____" ] ], [ [ "property_source = 'keys'\nall_keys = []\nfor widget_class in widget_list:\n try:\n keys = properties(widget_class(), source=property_source)\n except TraitError as e:\n keys = properties(widget_class(options=(2,10)), source=property_source)\n finally:\n all_keys.append(keys)", "_____no_output_____" ] ], [ [ "Probably should have used a dict from the beginning...", "_____no_output_____" ] ], [ [ "key_dict = {k: v for k, v in zip(names, all_keys)}", "_____no_output_____" ] ], [ [ "## Define a few groups of widgets by widget interface type\n\nThis makes for nicer (i.e. more compact and readable) tables later on.", "_____no_output_____" ] ], [ [ "sliders = [k for k in key_dict.keys() if 'Slider' in k]\nbuttons = [k for k in key_dict.keys() if 'Button' in k]\ncontainers = ['Box', 'VBox', 'HBox', 'Accordion', 'Tab']\ntexts = [k for k in names if 'text' in k or 'Text' in k] + [k for k in names if 'HTML' in k] + ['Label']\nprogress = [k for k in names if 'Progress' in k]\nselects = ['Dropdown', 'Select', 'SelectMultiple']\nall_so_far = sliders + buttons + texts + containers + progress + selects\nothers = [k for k in names if k not in all_so_far]\n\nslider_keys = set()\n", "_____no_output_____" ] ], [ [ "# Tables of keys (synced properties)", "_____no_output_____" ], [ "## Sliders", "_____no_output_____" ] ], [ [ "table_for_keys(sliders, key_dict, source=property_source)", "_____no_output_____" ] ], [ [ "## Buttons", "_____no_output_____" ] ], [ [ "table_for_keys(buttons, key_dict, source=property_source)", "_____no_output_____" ] ], [ [ "## Containers", "_____no_output_____" ] ], [ [ "table_for_keys(containers, key_dict, source=property_source)", "_____no_output_____" ] ], [ [ "## Text", "_____no_output_____" ] ], [ [ "table_for_keys(texts, key_dict, source=property_source)", "_____no_output_____" ] ], [ [ "## Progress bars", "_____no_output_____" ] ], [ [ "table_for_keys(progress, key_dict, source=property_source)", "_____no_output_____" ] ], [ [ "# Select widgets", "_____no_output_____" ] ], [ [ "table_for_keys(selects, key_dict, source=property_source)", "_____no_output_____" ] ], [ [ "## Everything else", "_____no_output_____" ] ], [ [ "table_for_keys(others, key_dict, source=property_source)", "_____no_output_____" ], [ "property_source = 'style_keys'\nstyle_keys = []\nfor widget_class in widget_list:\n try:\n keys = properties(widget_class(), source=property_source)\n except TraitError as e:\n keys = properties(widget_class(options=(2,10)), source=property_source)\n except AttributeError:\n keys=''\n finally:\n style_keys.append(keys)", "_____no_output_____" ], [ "for w, s in zip(names, style_keys):\n print('{} has style keys: {}'.format(w, ', '.join(s)))", "IntSlider has style keys: description_width, handle_color\nFloatSlider has style keys: description_width, handle_color\nIntRangeSlider has style keys: description_width, handle_color\nFloatRangeSlider has style keys: description_width, handle_color\nIntProgress has style keys: bar_color, description_width\nFloatProgress has style keys: bar_color, description_width\nBoundedIntText has style keys: description_width\nBoundedFloatText has style keys: description_width\nIntText has style keys: description_width\nFloatText has style keys: description_width\nToggleButton has style keys: description_width\nCheckbox has style keys: description_width\nValid has style keys: description_width\nDropdown has style keys: description_width\nRadioButtons has style keys: description_width\nSelect has style keys: description_width\nSelectionSlider has style keys: description_width\nSelectionRangeSlider has style keys: description_width\nToggleButtons has style keys: description_width\nSelectMultiple has style keys: description_width\nText has style keys: description_width\nTextarea has style keys: description_width\nLabel has style keys: description_width\nHTML has style keys: description_width\nHTMLMath has style keys: description_width\nImage has style keys: \nButton has style keys: button_color, font_weight\nPlay has style keys: description_width\nDatePicker has style keys: description_width\nColorPicker has style keys: description_width\nBox has style keys: \nHBox has style keys: \nVBox has style keys: \nAccordion has style keys: \nTab has style keys: \n" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown", "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code" ], [ "markdown" ], [ "code", "code", "code" ] ]
cb57ea38de3e8230bd38adbcedf8b10e3ae4e4a4
151,555
ipynb
Jupyter Notebook
solved_coding_problems.ipynb
RoetGer/coding-practice
15290cba0d278266ccc7b1c751da30f27714371e
[ "MIT" ]
null
null
null
solved_coding_problems.ipynb
RoetGer/coding-practice
15290cba0d278266ccc7b1c751da30f27714371e
[ "MIT" ]
null
null
null
solved_coding_problems.ipynb
RoetGer/coding-practice
15290cba0d278266ccc7b1c751da30f27714371e
[ "MIT" ]
null
null
null
62.471146
2,988
0.54243
[ [ [ "<a href=\"https://colab.research.google.com/github/RoetGer/coding-practice/blob/main/solved_coding_problems.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ], [ "**From Leetcode - Maximum Subarray**\n\nGiven an integer array nums, find the contiguous subarray (containing at least one number) which has the largest sum and return its sum.\n\nSolution approach:\nKadane’s Algorithm:\n\n```\nInitialize:\n max_so_far = INT_MIN\n max_ending_here = 0\n\nLoop for each element of the array\n (a) max_ending_here = max_ending_here + a[i]\n (b) if(max_so_far < max_ending_here)\n max_so_far = max_ending_here\n (c) if(max_ending_here < 0)\n max_ending_here = 0\nreturn max_so_far\n```", "_____no_output_____" ] ], [ [ "import sys\nfrom typing import List\n\nclass Solution:\n def maxSubArray(self, nums: List[int]) -> int: \n max_so_far = -sys.maxsize - 1\n max_ending_here = 0\n size = len(nums) \n\n for i in range(0, size):\n max_ending_here = max_ending_here + nums[i]\n if (max_so_far < max_ending_here):\n max_so_far = max_ending_here\n \n if max_ending_here < 0:\n max_ending_here = 0 \n return max_so_far", "_____no_output_____" ], [ "sol = Solution()\n\nassert sol.maxSubArray([-2,1,-3,4,-1,2,1,-5,4]) == 6\nassert sol.maxSubArray([1]) == 1\nassert sol.maxSubArray([5,4,-1,7,8]) == 23\nassert sol.maxSubArray([-1]) == -1", "_____no_output_____" ] ], [ [ "**From Leetcode - Best Time to Buy and Sell Stocks 1**\n\nYou are given an array prices where prices[i] is the price of a given stock on the ith day.\n\nYou want to maximize your profit by choosing a single day to buy one stock and choosing a different day in the future to sell that stock.\n\nReturn the maximum profit you can achieve from this transaction. If you cannot achieve any profit, return 0.", "_____no_output_____" ] ], [ [ "class Solution:\n def maxProfit(self, prices: List[int]) -> int:\n current_min = prices[0]\n max_profit = 0\n\n for p in prices[1:]:\n profit = p - current_min\n\n if profit > max_profit:\n max_profit = profit\n\n if p < current_min:\n current_min = p\n\n return max_profit\n", "_____no_output_____" ], [ "sol = Solution()\n\nassert sol.maxProfit([7,1,5,3,6,4]) == 5\nassert sol.maxProfit([7,6,4,3,1]) == 0", "_____no_output_____" ], [ "ll = [7]\nfor i in ll[0:]:\n print(i)", "7\n" ] ], [ [ "** From Leetcode - Best Time to Buy and Sell stocks**\nYou are given an array prices where prices[i] is the price of a given stock on the ith day.\n\nFind the maximum profit you can achieve. You may complete as many transactions as you like (i.e., buy one and sell one share of the stock multiple times).\n\nNote: You may not engage in multiple transactions simultaneously (i.e., you must sell the stock before you buy again).", "_____no_output_____" ] ], [ [ "from typing import List\n\nclass Solution:\n def maxProfit(self, prices: List[int]) -> int:\n max_profit = 0\n \n for t in range(len(prices) - 1):\n if prices[t] < prices[t+1]:\n max_profit += prices[t+1] - prices[t]\n \n return max_profit\n", "_____no_output_____" ], [ "sol = Solution()\n\nassert sol.maxProfit([7,1,5,3,6,4]) == 7\nassert sol.maxProfit([1,2,3,4,5]) == 4\nassert sol.maxProfit([7,6,4,3,1]) == 0\n", "_____no_output_____" ], [ "sol.maxProfit([7,1,5,3,6,4])", "_____no_output_____" ] ], [ [ "**From Leetcode - Best Time to Buy and Sell Stocks with Cooldown**\n\nYou are given an array prices where prices[i] is the price of a given stock on the ith day.\n\nFind the maximum profit you can achieve. You may complete as many transactions as you like (i.e., buy one and sell one share of the stock multiple times) with the following restrictions:\n\nAfter you sell your stock, you cannot buy stock on the next day (i.e., cooldown one day).\nNote: You may not engage in multiple transactions simultaneously (i.e., you must sell the stock before you buy again).", "_____no_output_____" ] ], [ [ "from typing import List\n\nclass Solution:\n def maxProfit(self, prices: List[int]) -> int:\n max_profit = 0\n\n cooldown = False\n\n for t in range(len(prices) - 3):\n if cooldown:\n cooldown = False\n continue\n \n diff_t = prices[t+1] - prices[t]\n\n if diff_t < 0:\n continue\n else:\n if prices[t] > prices[t+2]\n diff_tp1 = prices[t+3] - prices[t+2]\n\n if diff_t < diff_tp\n\n if prices[t] < prices[t + 1]", "_____no_output_____" ], [ "class Solution:\n def maxProfit(self, prices: List[int]) -> int:\n# bought = False\n\n# for t in range(len(prices) - 2):\n# if (prices[t] > prices[t+1]) & not bought:\n# continue\n# elif prices[t]\n# max_profit += prices[t+1] - prices[t]\n \n for t in range(len(prices)):\n # TODO:\n # - Expand over the next 3 prices the tree\n # - Pick always maximizing action\n max_profit = self._maxProfit(prices, purchase_price=-1)\n\n return max_profit \n\n def _maxProfit(self, prices: List[int], purchase_price: int) -> int:\n lpr = len(prices)\n \n if lpr == 0:\n return 0\n\n if lpr == 1:\n return prices[0] - purchase_price if purchase_price > -1 else 0\n \n if purchase_price > -1:\n max_profit = max(\n (prices[0] - purchase_price) + self._maxProfit(prices[2:], purchase_price=-1),\n self._maxProfit(prices[1:], purchase_price=purchase_price)\n )\n else:\n max_profit = max(\n self._maxProfit(prices[1:], purchase_price=prices[0]),\n self._maxProfit(prices[1:], purchase_price=-1)\n )\n\n return max_profit", "_____no_output_____" ], [ "sol = Solution()\n\nassert sol.maxProfit([1,2,3,0,2]) == 3\nassert sol.maxProfit([1]) == 0\nassert sol.maxProfit([6,1,3,2,4,7]) == 6", "_____no_output_____" ], [ "sol.maxProfit([1,2,3,1,3])", "_____no_output_____" ], [ "sol.maxProfit([1,2,3,0,2])", "_____no_output_____" ], [ "sol.maxProfit([6,1,3,2,4,7])\n#sol.maxProfit([1,3,2,4,7])", "_____no_output_____" ], [ "sol.maxProfit([1,2,3,0,2]) \nsol.maxProfit([1,2,3]) ", "_____no_output_____" ], [ "%%time\nassert sol.maxProfit([48,12,60,93,97,42,25,64,17,56,85,93,9,48,52,42,58,85,81,84,69,36,1,54,23,15,72,15,11,94]) == 428", "_____no_output_____" ], [ "%%time\nsol.maxProfit([48,12,60,93,97,42,25,64,17,56,85,93,9,48,52,42,58,85,81,84,69,36,1,54,23,15,72,15,11,94])", "CPU times: user 88 µs, sys: 0 ns, total: 88 µs\nWall time: 93.2 µs\n" ], [ "sol.maxProfit([48,12,60,93,97,42,25,64,17,56,85,93,9,48,52,42,58,85,81,84,69,36,1,54,23,15,72,15,11,94])", "_____no_output_____" ], [ "%%time\nsol.maxProfit([70,4,83,56,94,72,78,43,2,86,65,100,94,56,41,66,3,33,10,3,45,94,15,12,78,60,58,0,58,15,21,7,11,41,12,96,83,77,47,62,27,19,40,63,30,4,77,52,17,57,21,66,63,29,51,40,37,6,44,42,92,16,64,33,31,51,36,0,29,95,92,35,66,91,19,21,100,95,40,61,15,83,31,55,59,84,21,99,45,64,90,25,40,6,41,5,25,52,59,61,51,37,92,90,20,20,96,66,79,28,83,60,91,30,52,55,1,99,8,68,14,84,59,5,34,93,25,10,93,21,35,66,88,20,97,25,63,80,20,86,33,53,43,86,53,55,61,77,9,2,56,78,43,19,68,69,49,1,6,5,82,46,24,33,85,24,56,51,45,100,94,26,15,33,35,59,25,65,32,26,93,73,0,40,92,56,76,18,2,45,64,66,64,39,77,1,55,90,10,27,85,40,95,78,39,40,62,30,12,57,84,95,86,57,41,52,77,17,9,15,33,17,68,63,59,40,5,63,30,86,57,5,55,47,0,92,95,100,25,79,84,93,83,93,18,20,32,63,65,56,68,7,31,100,88,93,11,43,20,13,54,34,29,90,50,24,13,44,89,57,65,95,58,32,67,38,2,41,4,63,56,88,39,57,10,1,97,98,25,45,96,35,22,0,37,74,98,14,37,77,54,40,17,9,28,83,13,92,3,8,60,52,64,8,87,77,96,70,61,3,96,83,56,5,99,81,94,3,38,91,55,83,15,30,39,54,79,55,86,85,32,27,20,74,91,99,100,46,69,77,34,97,0,50,51,21,12,3,84,84,48,69,94,28,64,36,70,34,70,11,89,58,6,90,86,4,97,63,10,37,48,68,30,29,53,4,91,7,56,63,22,93,69,93,1,85,11,20,41,36,66,67,57,76,85,37,80,99,63,23,71,11,73,41,48,54,61,49,91,97,60,38,99,8,17,2,5,56,3,69,90,62,75,76,55,71,83,34,2,36,56,40,15,62,39,78,7,37,58,22,64,59,80,16,2,34,83,43,40,39,38,35,89,72,56,77,78,14,45,0,57,32,82,93,96,3,51,27,36,38,1,19,66,98,93,91,18,95,93,39,12,40,73,100,17,72,93,25,35,45,91,78,13,97,56,40,69,86,69,99,4,36,36,82,35,52,12,46,74,57,65,91,51,41,42,17,78,49,75,9,23,65,44,47,93,84,70,19,22,57,27,84,57,85,2,61,17,90,34,49,74,64,46,61,0,28,57,78,75,31,27,24,10,93,34,19,75,53,17,26,2,41,89,79,37,14,93,55,74,11,77,60,61,2,68,0,15,12,47,12,48,57,73,17,18,11,83,38,5,36,53,94,40,48,81,53,32,53,12,21,90,100,32,29,94,92,83,80,36,73,59,61,43,100,36,71,89,9,24,56,7,48,34,58,0,43,34,18,1,29,97,70,92,88,0,48,51,53,0,50,21,91,23,34,49,19,17,9,23,43,87,72,39,17,17,97,14,29,4,10,84,10,33,100,86,43,20,22,58,90,70,48,23,75,4,66,97,95,1,80,24,43,97,15,38,53,55,86,63,40,7,26,60,95,12,98,15,95,71,86,46,33,68,32,86,89,18,88,97,32,42,5,57,13,1,23,34,37,13,65,13,47,55,85,37,57,14,89,94,57,13,6,98,47,52,51,19,99,42,1,19,74,60,8,48,28,65,6,12,57,49,27,95,1,2,10,25,49,68,57,32,99,24,19,25,32,89,88,73,96,57,14,65,34,8,82,9,94,91,19,53,61,70,54,4,66,26,8,63,62,9,20,42,17,52,97,51,53,19,48,76,40,80,6,1,89,52,70,38,95,62,24,88,64,42,61,6,50,91,87,69,13,58,43,98,19,94,65,56,72,20,72,92,85,58,46,67,2,23,88,58,25,88,18,92,46,15,18,37,9,90,2,38,0,16,86,44,69,71,70,30,38,17,69,69,80,73,79,56,17,95,12,37,43,5,5,6,42,16,44,22,62,37,86,8,51,73,46,44,15,98,54,22,47,28,11,75,52,49,38,84,55,3,69,100,54,66,6,23,98,22,99,21,74,75,33,67,8,80,90,23,46,93,69,85,46,87,76,93,38,77,37,72,35,3,82,11,67,46,53,29,60,33,12,62,23,27,72,35,63,68,14,35,27,98,94,65,3,13,48,83,27,84,86,49,31,63,40,12,34,79,61,47,29,33,52,100,85,38,24,1,16,62,89,36,74,9,49,62,89])", "ERROR:root:Internal Python error in the inspect module.\nBelow is the traceback from this internal error.\n\n" ], [ " from functools import lru_cache\n \n@lru_cache(maxsize=None)\ndef fibonacci(k):\n if k < 2:\n return k\n else:\n return fibonacci(k - 1) + fibonacci(k - 2)", "_____no_output_____" ], [ "class Solution:\n def maxProfit(self, prices: List[int]) -> int:\n \n if len(prices) <= 1:\n return 0\n\n for i, p in enumerate(prices):\n if p < prices[i+1]:\n max_profit = max(\n self.stock_bought(p, prices[(i+1):]),\n self.maxProfit(prices[(i+1):])\n )\n break\n else:\n max_profit = 0\n \n return max_profit\n\n def stock_bought(self, purchase_price: int, prices: List[int]) -> int:\n lp = len(prices)\n \n if lp == 1:\n return prices[0] - purchase_price\n\n if lp >= 3:\n p0 = prices[0]\n p1 = prices[1]\n p2 = prices[2]\n \n if p0 < p1 < p2:\n max_profit = self.stock_bought(purchase_price, prices[2:])\n return max_profit\n elif p0 > p1 > p2:\n max_profit = (p0 - purchase_price) + self.maxProfit(prices[2:])\n\n max_profit = max(\n (prices[0] - purchase_price) + self.maxProfit(prices[2:]),\n self.stock_bought(purchase_price, prices[1:])\n )\n\n return max_profit\n\n\nclass Solution:\n def maxProfit(self, prices: List[int]) -> int:\n lp = len(prices)\n p = prices[0]\n\n if lp <= 1:\n return 0\n\n p1 = prices[1]\n\n if p < p1:\n if lp == 2:\n return p1 - p\n\n p2 = prices[2]\n\n if lp == 3:\n return max(p1, p2) - p\n\n if p1 < p2:\n return self.hold_stock(p, prices[1:])\n # TODO: fix this part of the code. Either wrong or too slow :)\n ''' \n return max(\n self.hold_stock(p, prices[1:]),\n self.maxProfit(prices[1:])\n )\n\n p3 = prices[3]\n\n if (p1 - p) > (p3 - p2):\n return (p1 - p) + self.maxProfit(prices[3:])\n\n return self.maxProfit(prices[1:])\n\n return self.maxProfit(prices[1:])\n ''' \n def hold_stock(self, purchase_price: int, prices: List[int]) -> int:\n lp = len(prices)\n p = prices[0]\n\n if lp == 1:\n return p - purchase_price\n\n if lp > 2:\n p1 = prices[1]\n p2 = prices[2]\n\n if lp == 3:\n return max(p, p1, p2) - purchase_price\n else:\n p3 = prices[3]\n if (p1 > p2) & ((p1 - p) < (p3 - p2)):\n return (p - purchase_price) + self.maxProfit(prices[2:])\n else:\n return self.hold_stock(purchase_price, prices[1:])\n\n return max(p, p1) - purchase_price\n \n\n", "_____no_output_____" ] ], [ [ "**Retrieve the nth largest element of each row**", "_____no_output_____" ] ], [ [ "import numpy as np\n\nnp.random.seed(5)\ntest_mat = np.random.gamma(shape=1, scale=1, size=(100, 10))", "_____no_output_____" ], [ "test_mat[0,:]", "_____no_output_____" ], [ "n = 3\narg_sort_mat = np.argsort(test_mat, axis=1)\nnth_largest_idxs = arg_sort_mat[:, -n]\nnth_largest_idxs\n\ntest_mat[np.arange(test_mat.shape[0]), nth_largest_idxs]", "_____no_output_____" ], [ "np.argmax(test_mat, axis=1)", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code", "markdown", "code" ]
[ [ "markdown", "markdown" ], [ "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code" ] ]
cb57f40f7a1741aa663a051de89f3b1b28402381
152,796
ipynb
Jupyter Notebook
deep_neaural_network.ipynb
cervantes-loves-ai/100-Days-Of-ML-Code
e6673873be98572b6ba79a73495a024f96c44baf
[ "MIT" ]
null
null
null
deep_neaural_network.ipynb
cervantes-loves-ai/100-Days-Of-ML-Code
e6673873be98572b6ba79a73495a024f96c44baf
[ "MIT" ]
null
null
null
deep_neaural_network.ipynb
cervantes-loves-ai/100-Days-Of-ML-Code
e6673873be98572b6ba79a73495a024f96c44baf
[ "MIT" ]
null
null
null
105.014433
27,814
0.78254
[ [ [ "<a href=\"https://colab.research.google.com/github/cervantes-loves-ai/100-Days-Of-ML-Code/blob/master/deep_neaural_network.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>", "_____no_output_____" ] ], [ [ "!pip3 install torch", "Requirement already satisfied: torch in /usr/local/lib/python3.6/dist-packages (1.4.0)\n" ], [ "import torch\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport torch.nn as nn\nfrom sklearn import datasets", "_____no_output_____" ], [ " n_pts = 500\n X, y = datasets.make_circles(n_samples=n_pts, random_state=123, noise=0.1, factor=0.2)\n \n x_data = torch.Tensor(X)\n y_data = torch.Tensor(y.reshape(500, 1))\n print(y.shape)", "(500,)\n" ] ], [ [ "", "_____no_output_____" ] ], [ [ "def scatter_plot():\n plt.scatter(X[y==0, 0], X[y==0, 1])\n plt.scatter(X[y==1, 0], X[y==1, 1])", "_____no_output_____" ], [ "scatter_plot()", "_____no_output_____" ], [ "class Model(nn.Module):\n def __init__(self, input_size, H1, output_size):\n super().__init__() \n self.linear = nn.Linear(input_size, H1)\n self.linear2 = nn.Linear(H1, output_size)\n def forward(self, x):\n x = torch.sigmoid(self.linear(x))\n x = torch.sigmoid(self.linear2(x))\n return x\n def predict(self, x):\n pred = self.forward(x)\n if pred >= 0.5:\n return 1\n else:\n return 0", "_____no_output_____" ], [ "torch.manual_seed(2)\nmodel = Model(2, 4, 1)\nprint(list(model.parameters()))", "[Parameter containing:\ntensor([[ 0.1622, -0.1683],\n [ 0.1939, -0.0361],\n [ 0.3021, 0.1683],\n [-0.0813, -0.5717]], requires_grad=True), Parameter containing:\ntensor([ 0.1614, -0.6260, 0.0929, 0.0470], requires_grad=True), Parameter containing:\ntensor([[-0.1099, 0.4088, 0.0334, 0.2073]], requires_grad=True), Parameter containing:\ntensor([0.2116], requires_grad=True)]\n" ], [ "criterion = nn.BCELoss()\noptimizer = torch.optim.Adam(model.parameters(), lr=0.1)", "_____no_output_____" ], [ "epochs = 1000\nlosses = []\n\nfor i in range(epochs):\n y_pred = model.forward(x_data)\n loss = criterion(y_pred, y_data)\n print(\"epoch:\", i, \"loss:\", loss.item())\n losses.append(loss.item())\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()", "epoch: 0 loss: 0.03219418600201607\nepoch: 1 loss: 0.030535537749528885\nepoch: 2 loss: 0.029004238545894623\nepoch: 3 loss: 0.027608321979641914\nepoch: 4 loss: 0.02624848112463951\nepoch: 5 loss: 0.024970542639493942\nepoch: 6 loss: 0.0237661711871624\nepoch: 7 loss: 0.02260502427816391\nepoch: 8 loss: 0.021522527560591698\nepoch: 9 loss: 0.020521605387330055\nepoch: 10 loss: 0.01957591623067856\nepoch: 11 loss: 0.01868094876408577\nepoch: 12 loss: 0.017843440175056458\nepoch: 13 loss: 0.017055237665772438\nepoch: 14 loss: 0.016307087615132332\nepoch: 15 loss: 0.01560275536030531\nepoch: 16 loss: 0.014944714494049549\nepoch: 17 loss: 0.014326524920761585\nepoch: 18 loss: 0.013742816634476185\nepoch: 19 loss: 0.013193639926612377\nepoch: 20 loss: 0.012678024359047413\nepoch: 21 loss: 0.012191250920295715\nepoch: 22 loss: 0.011729706078767776\nepoch: 23 loss: 0.011293449439108372\nepoch: 24 loss: 0.0108829066157341\nepoch: 25 loss: 0.010496187023818493\nepoch: 26 loss: 0.010130534879863262\nepoch: 27 loss: 0.009784489870071411\nepoch: 28 loss: 0.009457437321543694\nepoch: 29 loss: 0.00914803147315979\nepoch: 30 loss: 0.00885443389415741\nepoch: 31 loss: 0.008575505577027798\nepoch: 32 loss: 0.008310830220580101\nepoch: 33 loss: 0.008059781044721603\nepoch: 34 loss: 0.007821227423846722\nepoch: 35 loss: 0.0075941490940749645\nepoch: 36 loss: 0.007378035224974155\nepoch: 37 loss: 0.007172503974288702\nepoch: 38 loss: 0.006976831704378128\nepoch: 39 loss: 0.006790097337216139\nepoch: 40 loss: 0.006611590273678303\nepoch: 41 loss: 0.006440929137170315\nepoch: 42 loss: 0.006277793552726507\nepoch: 43 loss: 0.006121731363236904\nepoch: 44 loss: 0.005972240585833788\nepoch: 45 loss: 0.005828934255987406\nepoch: 46 loss: 0.005691528785973787\nepoch: 47 loss: 0.005559717305004597\nepoch: 48 loss: 0.005433144513517618\nepoch: 49 loss: 0.005311435088515282\nepoch: 50 loss: 0.005194337572902441\nepoch: 51 loss: 0.00508164195343852\nepoch: 52 loss: 0.004973121918737888\nepoch: 53 loss: 0.004868523683398962\nepoch: 54 loss: 0.004767604172229767\nepoch: 55 loss: 0.004670174792408943\nepoch: 56 loss: 0.0045760939829051495\nepoch: 57 loss: 0.004485208075493574\nepoch: 58 loss: 0.0043973433785140514\nepoch: 59 loss: 0.004312324803322554\nepoch: 60 loss: 0.004230024293065071\nepoch: 61 loss: 0.004150299821048975\nepoch: 62 loss: 0.004073048010468483\nepoch: 63 loss: 0.003998138010501862\nepoch: 64 loss: 0.0039254664443433285\nepoch: 65 loss: 0.0038549257442355156\nepoch: 66 loss: 0.003786426968872547\nepoch: 67 loss: 0.0037198879290372133\nepoch: 68 loss: 0.0036552250385284424\nepoch: 69 loss: 0.003592353779822588\nepoch: 70 loss: 0.0035311956889927387\nepoch: 71 loss: 0.0034716823138296604\nepoch: 72 loss: 0.0034137594047933817\nepoch: 73 loss: 0.003357358742505312\nepoch: 74 loss: 0.0033024277072399855\nepoch: 75 loss: 0.0032488976139575243\nepoch: 76 loss: 0.003196725156158209\nepoch: 77 loss: 0.0031458567827939987\nepoch: 78 loss: 0.0030962591990828514\nepoch: 79 loss: 0.003047871869057417\nepoch: 80 loss: 0.003000668017193675\nepoch: 81 loss: 0.0029546006117016077\nepoch: 82 loss: 0.0029096342623233795\nepoch: 83 loss: 0.0028657326474785805\nepoch: 84 loss: 0.002822854556143284\nepoch: 85 loss: 0.002780974842607975\nepoch: 86 loss: 0.0027400609105825424\nepoch: 87 loss: 0.002700080629438162\nepoch: 88 loss: 0.002661003265529871\nepoch: 89 loss: 0.00262280716560781\nepoch: 90 loss: 0.0025854608975350857\nepoch: 91 loss: 0.0025489353574812412\nepoch: 92 loss: 0.0025132170412689447\nepoch: 93 loss: 0.0024782754480838776\nepoch: 94 loss: 0.002444082172587514\nepoch: 95 loss: 0.0024106246419250965\nepoch: 96 loss: 0.0023778812028467655\nepoch: 97 loss: 0.00234582950361073\nepoch: 98 loss: 0.002314446959644556\nepoch: 99 loss: 0.0022837205324321985\nepoch: 100 loss: 0.002253628568723798\nepoch: 101 loss: 0.0022241510450839996\nepoch: 102 loss: 0.0021952770184725523\nepoch: 103 loss: 0.0021669843699783087\nepoch: 104 loss: 0.002139260759577155\nepoch: 105 loss: 0.002112093148753047\nepoch: 106 loss: 0.0020854631438851357\nepoch: 107 loss: 0.002059360034763813\nepoch: 108 loss: 0.0020337665919214487\nepoch: 109 loss: 0.002008669311180711\nepoch: 110 loss: 0.0019840565510094166\nepoch: 111 loss: 0.001959920162335038\nepoch: 112 loss: 0.001936236978508532\nepoch: 113 loss: 0.0019130134023725986\nepoch: 114 loss: 0.001890217186883092\nepoch: 115 loss: 0.0018678525229915977\nepoch: 116 loss: 0.001845899736508727\nepoch: 117 loss: 0.0018243504455313087\nepoch: 118 loss: 0.0018031997606158257\nepoch: 119 loss: 0.0017824333626776934\nepoch: 120 loss: 0.001762037049047649\nepoch: 121 loss: 0.0017420107033103704\nepoch: 122 loss: 0.0017223407048732042\nepoch: 123 loss: 0.001703015179373324\nepoch: 124 loss: 0.0016840327298268676\nepoch: 125 loss: 0.0016653805505484343\nepoch: 126 loss: 0.0016470495611429214\nepoch: 127 loss: 0.0016290340572595596\nepoch: 128 loss: 0.001611328567378223\nepoch: 129 loss: 0.0015939234290271997\nepoch: 130 loss: 0.0015768109587952495\nepoch: 131 loss: 0.0015599830076098442\nepoch: 132 loss: 0.001543434802442789\nepoch: 133 loss: 0.0015271640149876475\nepoch: 134 loss: 0.0015111600514501333\nepoch: 135 loss: 0.001495416508987546\nepoch: 136 loss: 0.001479926984757185\nepoch: 137 loss: 0.00146468763705343\nepoch: 138 loss: 0.0014496938092634082\nepoch: 139 loss: 0.0014349408447742462\nepoch: 140 loss: 0.0014204197796061635\nepoch: 141 loss: 0.0014061304973438382\nepoch: 142 loss: 0.0013920606579631567\nepoch: 143 loss: 0.0013782171299681067\nepoch: 144 loss: 0.0013645843137055635\nepoch: 145 loss: 0.0013511641882359982\nepoch: 146 loss: 0.0013379520969465375\nepoch: 147 loss: 0.0013249375624582171\nepoch: 148 loss: 0.001312120701186359\nepoch: 149 loss: 0.0012995004653930664\nepoch: 150 loss: 0.0012870690552517772\nepoch: 151 loss: 0.0012748263543471694\nepoch: 152 loss: 0.0012627590913325548\nepoch: 153 loss: 0.0012508720392361283\nepoch: 154 loss: 0.001239163801074028\nepoch: 155 loss: 0.0012276188936084509\nepoch: 156 loss: 0.0012162462808191776\nepoch: 157 loss: 0.0012050350196659565\nepoch: 158 loss: 0.0011939837131649256\nepoch: 159 loss: 0.00118308758828789\nepoch: 160 loss: 0.0011723448988050222\nepoch: 161 loss: 0.001161755295470357\nepoch: 162 loss: 0.001151312026195228\nepoch: 163 loss: 0.0011410111328586936\nepoch: 164 loss: 0.0011308516841381788\nepoch: 165 loss: 0.0011208276264369488\nepoch: 166 loss: 0.001110946643166244\nepoch: 167 loss: 0.0011011917376890779\nepoch: 168 loss: 0.0010915660532191396\nepoch: 169 loss: 0.0010820686584338546\nepoch: 170 loss: 0.0010727024637162685\nepoch: 171 loss: 0.0010634545469656587\nepoch: 172 loss: 0.0010543285170570016\nepoch: 173 loss: 0.0010453223949298263\nepoch: 174 loss: 0.001036431873217225\nepoch: 175 loss: 0.0010276573011651635\nepoch: 176 loss: 0.0010189925087615848\nepoch: 177 loss: 0.0010104408720508218\nepoch: 178 loss: 0.0010020005283877254\nepoch: 179 loss: 0.0009936628630384803\nepoch: 180 loss: 0.0009854331146925688\nepoch: 181 loss: 0.000977308489382267\nepoch: 182 loss: 0.0009692860767245293\nepoch: 183 loss: 0.0009613658185116947\nepoch: 184 loss: 0.0009535477147437632\nepoch: 185 loss: 0.0009458237909711897\nepoch: 186 loss: 0.0009381982963532209\nepoch: 187 loss: 0.000930667098145932\nepoch: 188 loss: 0.0009232317097485065\nepoch: 189 loss: 0.0009158863686025143\nepoch: 190 loss: 0.0009086356149055064\nepoch: 191 loss: 0.0009014701936393976\nepoch: 192 loss: 0.0008943959837779403\nepoch: 193 loss: 0.0008874101331457496\nepoch: 194 loss: 0.0008805101970210671\nepoch: 195 loss: 0.0008736910531297326\nepoch: 196 loss: 0.0008669577073305845\nepoch: 197 loss: 0.0008603030000813305\nepoch: 198 loss: 0.000853732053656131\nepoch: 199 loss: 0.0008472400950267911\nepoch: 200 loss: 0.0008408257854171097\nepoch: 201 loss: 0.0008344882517121732\nepoch: 202 loss: 0.0008282229537144303\nepoch: 203 loss: 0.0008220353629440069\nepoch: 204 loss: 0.0008159208809956908\nepoch: 205 loss: 0.0008098799735307693\nepoch: 206 loss: 0.0008039107196964324\nepoch: 207 loss: 0.0007980072405189276\nepoch: 208 loss: 0.0007921744254417717\nepoch: 209 loss: 0.0007864079088903964\nepoch: 210 loss: 0.0007807145593687892\nepoch: 211 loss: 0.0007750833174213767\nepoch: 212 loss: 0.000769517500884831\nepoch: 213 loss: 0.000764021766372025\nepoch: 214 loss: 0.0007585844723507762\nepoch: 215 loss: 0.0007532087620347738\nepoch: 216 loss: 0.0007478984771296382\nepoch: 217 loss: 0.0007426467491313815\nepoch: 218 loss: 0.0007374578271992505\nepoch: 219 loss: 0.000732326356228441\nepoch: 220 loss: 0.0007272543734870851\nepoch: 221 loss: 0.0007222405401989818\nepoch: 222 loss: 0.0007172850309871137\nepoch: 223 loss: 0.0007123883115127683\nepoch: 224 loss: 0.0007075464818626642\nepoch: 225 loss: 0.0007027582614682615\nepoch: 226 loss: 0.0006980238249525428\nepoch: 227 loss: 0.0006933443364687264\nepoch: 228 loss: 0.0006887199124321342\nepoch: 229 loss: 0.0006841477588750422\nepoch: 230 loss: 0.0006796280504204333\nepoch: 231 loss: 0.0006751606706529856\nepoch: 232 loss: 0.0006707403226755559\nepoch: 233 loss: 0.0006663743988610804\nepoch: 234 loss: 0.0006620534113608301\nepoch: 235 loss: 0.0006577841704711318\nepoch: 236 loss: 0.0006535652792081237\nepoch: 237 loss: 0.0006493914406746626\nepoch: 238 loss: 0.0006452660891227424\nepoch: 239 loss: 0.0006411868380382657\nepoch: 240 loss: 0.000637155375443399\nepoch: 241 loss: 0.0006331633194349706\nepoch: 242 loss: 0.000629221263807267\nepoch: 243 loss: 0.0006253203609958291\nepoch: 244 loss: 0.0006214621243998408\nepoch: 245 loss: 0.0006176483002491295\nepoch: 246 loss: 0.0006138749304227531\nepoch: 247 loss: 0.0006101434701122344\nepoch: 248 loss: 0.0006064517656341195\nepoch: 249 loss: 0.0006028007483109832\nepoch: 250 loss: 0.0005991878570057452\nepoch: 251 loss: 0.0005956172244623303\nepoch: 252 loss: 0.0005920842522755265\nepoch: 253 loss: 0.0005885865539312363\nepoch: 254 loss: 0.000585127912927419\nepoch: 255 loss: 0.0005817074561491609\nepoch: 256 loss: 0.0005783202359452844\nepoch: 257 loss: 0.0005749681731685996\nepoch: 258 loss: 0.0005716508021578193\nepoch: 259 loss: 0.0005683699273504317\nepoch: 260 loss: 0.000565121416002512\nepoch: 261 loss: 0.0005619059666059911\nepoch: 262 loss: 0.0005587225314229727\nepoch: 263 loss: 0.0005555730313062668\nepoch: 264 loss: 0.0005524546140804887\nepoch: 265 loss: 0.0005493669304996729\nepoch: 266 loss: 0.0005463093402795494\nepoch: 267 loss: 0.0005432841135188937\nepoch: 268 loss: 0.0005402871174737811\nepoch: 269 loss: 0.0005373234744183719\nepoch: 270 loss: 0.0005343834054656327\nepoch: 271 loss: 0.000531477911863476\nepoch: 272 loss: 0.0005285947117954493\nepoch: 273 loss: 0.0005257386947050691\nepoch: 274 loss: 0.0005229118396528065\nepoch: 275 loss: 0.0005201117601245642\nepoch: 276 loss: 0.0005173399695195258\nepoch: 277 loss: 0.0005145882605575025\nepoch: 278 loss: 0.0005118677509017289\nepoch: 279 loss: 0.000509168254211545\nepoch: 280 loss: 0.000506494368892163\nepoch: 281 loss: 0.0005038485978730023\nepoch: 282 loss: 0.0005012199399061501\nepoch: 283 loss: 0.0004986204439774156\nepoch: 284 loss: 0.0004960420774295926\nepoch: 285 loss: 0.0004934868193231523\nepoch: 286 loss: 0.0004909555427730083\nepoch: 287 loss: 0.0004884460940957069\nepoch: 288 loss: 0.00048595949192531407\nepoch: 289 loss: 0.0004834922438021749\nepoch: 290 loss: 0.00048104903544299304\nepoch: 291 loss: 0.0004786259669344872\nepoch: 292 loss: 0.0004762238822877407\nepoch: 293 loss: 0.0004738394927699119\nepoch: 294 loss: 0.0004714786191470921\nepoch: 295 loss: 0.00046913776895962656\nepoch: 296 loss: 0.00046681813546456397\nepoch: 297 loss: 0.0004645165172405541\nepoch: 298 loss: 0.00046223506797105074\nepoch: 299 loss: 0.00045997308916412294\nepoch: 300 loss: 0.0004577272920869291\nepoch: 301 loss: 0.0004555026243906468\nepoch: 302 loss: 0.00045329719432629645\nepoch: 303 loss: 0.00045110794599168\nepoch: 304 loss: 0.0004489388957154006\nepoch: 305 loss: 0.0004467861435841769\nepoch: 306 loss: 0.0004446530365385115\nepoch: 307 loss: 0.0004425352090038359\nepoch: 308 loss: 0.00044043755042366683\nepoch: 309 loss: 0.0004383551422506571\nepoch: 310 loss: 0.00043628571438603103\nepoch: 311 loss: 0.00043423796887509525\nepoch: 312 loss: 0.0004322071617934853\nepoch: 313 loss: 0.0004301923036109656\nepoch: 314 loss: 0.0004281932779122144\nepoch: 315 loss: 0.0004262091824784875\nepoch: 316 loss: 0.00042424132698215544\nepoch: 317 loss: 0.00042228910024277866\nepoch: 318 loss: 0.0004203548014629632\nepoch: 319 loss: 0.0004184336285106838\nepoch: 320 loss: 0.00041652718209661543\nepoch: 321 loss: 0.0004146381688769907\nepoch: 322 loss: 0.00041276015690527856\nepoch: 323 loss: 0.00041090112063102424\nepoch: 324 loss: 0.0004090554139111191\nepoch: 325 loss: 0.00040722437552176416\nepoch: 326 loss: 0.0004054041637573391\nepoch: 327 loss: 0.0004036032478325069\nepoch: 328 loss: 0.00040181269287131727\nepoch: 329 loss: 0.0004000374174211174\nepoch: 330 loss: 0.00039827756700105965\nepoch: 331 loss: 0.000396525691030547\nepoch: 332 loss: 0.00039478985127061605\nepoch: 333 loss: 0.00039306821417994797\nepoch: 334 loss: 0.0003913597029168159\nepoch: 335 loss: 0.00038966245483607054\nepoch: 336 loss: 0.00038798025343567133\nepoch: 337 loss: 0.00038630791823379695\nepoch: 338 loss: 0.0003846485633403063\nepoch: 339 loss: 0.00038300087908282876\nepoch: 340 loss: 0.00038136745570227504\nepoch: 341 loss: 0.0003797460230998695\nepoch: 342 loss: 0.00037813495146110654\nepoch: 343 loss: 0.00037653575418516994\nepoch: 344 loss: 0.00037494918797165155\nepoch: 345 loss: 0.000373371469322592\nepoch: 346 loss: 0.0003718073130585253\nepoch: 347 loss: 0.00037025456549599767\nepoch: 348 loss: 0.00036871392512694\nepoch: 349 loss: 0.0003671812592074275\nepoch: 350 loss: 0.00036566215567290783\nepoch: 351 loss: 0.00036415219074115157\nepoch: 352 loss: 0.00036265634116716683\nepoch: 353 loss: 0.0003611661959439516\nepoch: 354 loss: 0.0003596886235754937\nepoch: 355 loss: 0.0003582209756132215\nepoch: 356 loss: 0.00035676552215591073\nepoch: 357 loss: 0.0003553204005584121\nepoch: 358 loss: 0.0003538801975082606\nepoch: 359 loss: 0.0003524547501001507\nepoch: 360 loss: 0.00035103532718494534\nepoch: 361 loss: 0.000349632027791813\nepoch: 362 loss: 0.0003482316096778959\nepoch: 363 loss: 0.00034684492857195437\nepoch: 364 loss: 0.0003454647376202047\nepoch: 365 loss: 0.0003440941509325057\nepoch: 366 loss: 0.0003427350602578372\nepoch: 367 loss: 0.00034138280898332596\nepoch: 368 loss: 0.0003400412097107619\nepoch: 369 loss: 0.000338708923663944\nepoch: 370 loss: 0.0003373867366462946\nepoch: 371 loss: 0.00033606935176067054\nepoch: 372 loss: 0.0003347631427459419\nepoch: 373 loss: 0.0003334660723339766\nepoch: 374 loss: 0.0003321769763715565\nepoch: 375 loss: 0.00033089687349274755\nepoch: 376 loss: 0.00032962398836389184\nepoch: 377 loss: 0.0003283581754658371\nepoch: 378 loss: 0.00032710633240640163\nepoch: 379 loss: 0.00032585402368567884\nepoch: 380 loss: 0.0003246143169235438\nepoch: 381 loss: 0.0003233839524909854\nepoch: 382 loss: 0.00032216025283560157\nepoch: 383 loss: 0.00032094225753098726\nepoch: 384 loss: 0.000319733313517645\nepoch: 385 loss: 0.0003185347595717758\nepoch: 386 loss: 0.00031734094955027103\nepoch: 387 loss: 0.00031615400803275406\nepoch: 388 loss: 0.00031497617601417005\nepoch: 389 loss: 0.00031380707514472306\nepoch: 390 loss: 0.00031264309654943645\nepoch: 391 loss: 0.0003114875580649823\nepoch: 392 loss: 0.00031033557024784386\nepoch: 393 loss: 0.0003091958933509886\nepoch: 394 loss: 0.0003080621245317161\nepoch: 395 loss: 0.0003069343511015177\nepoch: 396 loss: 0.00030581437749788165\nepoch: 397 loss: 0.0003046993224415928\nepoch: 398 loss: 0.0003035933477804065\nepoch: 399 loss: 0.00030249374685809016\nepoch: 400 loss: 0.0003013981622643769\nepoch: 401 loss: 0.0003003139281645417\nepoch: 402 loss: 0.0002992329827975482\nepoch: 403 loss: 0.0002981559664476663\nepoch: 404 loss: 0.0002970915811602026\nepoch: 405 loss: 0.00029603022267110646\nepoch: 406 loss: 0.00029497669311240315\nepoch: 407 loss: 0.0002939265687018633\nepoch: 408 loss: 0.0002928846806753427\nepoch: 409 loss: 0.00029184939921833575\nepoch: 410 loss: 0.0002908201713580638\nepoch: 411 loss: 0.0002897958329413086\nepoch: 412 loss: 0.000288778159301728\nepoch: 413 loss: 0.0002877686929423362\nepoch: 414 loss: 0.0002867623697966337\nepoch: 415 loss: 0.00028576081967912614\nepoch: 416 loss: 0.0002847659634426236\nepoch: 417 loss: 0.0002837795182131231\nepoch: 418 loss: 0.0002827938587870449\nepoch: 419 loss: 0.0002818186767399311\nepoch: 420 loss: 0.00028085053781978786\nepoch: 421 loss: 0.00027988440706394613\nepoch: 422 loss: 0.0002789240679703653\nepoch: 423 loss: 0.00027796989888884127\nepoch: 424 loss: 0.0002770186692941934\nepoch: 425 loss: 0.00027607669471763074\nepoch: 426 loss: 0.00027513873646967113\nepoch: 427 loss: 0.00027420479455031455\nepoch: 428 loss: 0.0002732758002821356\nepoch: 429 loss: 0.0002723524230532348\nepoch: 430 loss: 0.00027143716579303145\nepoch: 431 loss: 0.0002705246733967215\nepoch: 432 loss: 0.0002696161391213536\nepoch: 433 loss: 0.00026871421141549945\nepoch: 434 loss: 0.0002678175223991275\nepoch: 435 loss: 0.0002669227833393961\nepoch: 436 loss: 0.00026603732840158045\nepoch: 437 loss: 0.00026515524950809777\nepoch: 438 loss: 0.00026427803095430136\nepoch: 439 loss: 0.000263402471318841\nepoch: 440 loss: 0.00026253826217725873\nepoch: 441 loss: 0.00026167434407398105\nepoch: 442 loss: 0.00026081493706442416\nepoch: 443 loss: 0.00025996024487540126\nepoch: 444 loss: 0.0002591124502941966\nepoch: 445 loss: 0.0002582682063803077\nepoch: 446 loss: 0.0002574290265329182\nepoch: 447 loss: 0.0002565914182923734\nepoch: 448 loss: 0.0002557618136052042\nepoch: 449 loss: 0.0002549338969402015\nepoch: 450 loss: 0.000254112237598747\nepoch: 451 loss: 0.00025329360505566\nepoch: 452 loss: 0.0002524805022403598\nepoch: 453 loss: 0.00025167103740386665\nepoch: 454 loss: 0.00025086611276492476\nepoch: 455 loss: 0.0002500635455362499\nepoch: 456 loss: 0.00024926717742346227\nepoch: 457 loss: 0.00024847680469974875\nepoch: 458 loss: 0.00024768721777945757\nepoch: 459 loss: 0.00024690458667464554\nepoch: 460 loss: 0.00024612367269583046\nepoch: 461 loss: 0.00024534823023714125\nepoch: 462 loss: 0.0002445743593852967\nepoch: 463 loss: 0.00024380833201576024\nepoch: 464 loss: 0.00024303967074956745\nepoch: 465 loss: 0.000242282883846201\nepoch: 466 loss: 0.000241526635363698\nepoch: 467 loss: 0.0002407718711765483\nepoch: 468 loss: 0.0002400257217232138\nepoch: 469 loss: 0.00023927746224217117\nepoch: 470 loss: 0.00023853890888858587\nepoch: 471 loss: 0.00023780185438226908\nepoch: 472 loss: 0.00023706720094196498\nepoch: 473 loss: 0.0002363375824643299\nepoch: 474 loss: 0.00023561037960462272\nepoch: 475 loss: 0.00023489030718337744\nepoch: 476 loss: 0.00023416955082211643\nepoch: 477 loss: 0.0002334502642042935\nepoch: 478 loss: 0.00023273899569176137\nepoch: 479 loss: 0.00023203648743219674\nepoch: 480 loss: 0.00023133130162023008\nepoch: 481 loss: 0.00023062955006025732\nepoch: 482 loss: 0.00022993357561063021\nepoch: 483 loss: 0.00022924045333638787\nepoch: 484 loss: 0.00022854965936858207\nepoch: 485 loss: 0.0002278618630953133\nepoch: 486 loss: 0.0002271744451718405\nepoch: 487 loss: 0.0002264963259221986\nepoch: 488 loss: 0.00022581916709896177\nepoch: 489 loss: 0.00022514554439112544\nepoch: 490 loss: 0.0002244765782961622\nepoch: 491 loss: 0.00022380670998245478\nepoch: 492 loss: 0.00022314187663141638\nepoch: 493 loss: 0.00022248119057621807\nepoch: 494 loss: 0.00022182300745043904\nepoch: 495 loss: 0.0002211679966421798\nepoch: 496 loss: 0.00022051695850677788\nepoch: 497 loss: 0.00021986615320201963\nepoch: 498 loss: 0.00021922205633018166\nepoch: 499 loss: 0.00021857794490642846\nepoch: 500 loss: 0.00021793773339595646\nepoch: 501 loss: 0.00021730155276600271\nepoch: 502 loss: 0.0002166681515518576\nepoch: 503 loss: 0.00021603767527267337\nepoch: 504 loss: 0.00021541051683016121\nepoch: 505 loss: 0.00021478398412000388\nepoch: 506 loss: 0.00021416129311546683\nepoch: 507 loss: 0.0002135440445272252\nepoch: 508 loss: 0.00021292806195560843\nepoch: 509 loss: 0.0002123133890563622\nepoch: 510 loss: 0.00021170203399378806\nepoch: 511 loss: 0.00021109383669681847\nepoch: 512 loss: 0.00021049013594165444\nepoch: 513 loss: 0.00020988778851460665\nepoch: 514 loss: 0.00020928902085870504\nepoch: 515 loss: 0.00020869133004453033\nepoch: 516 loss: 0.00020809617126360536\nepoch: 517 loss: 0.000207504301215522\nepoch: 518 loss: 0.00020691516692750156\nepoch: 519 loss: 0.0002063288848148659\nepoch: 520 loss: 0.00020574562950059772\nepoch: 521 loss: 0.00020516554650384933\nepoch: 522 loss: 0.00020458496874198318\nepoch: 523 loss: 0.00020400861103553325\nepoch: 524 loss: 0.00020343596406746656\nepoch: 525 loss: 0.0002028665185207501\nepoch: 526 loss: 0.00020229778601787984\nepoch: 527 loss: 0.0002017325459746644\nepoch: 528 loss: 0.00020117074018344283\nepoch: 529 loss: 0.00020060667884536088\nepoch: 530 loss: 0.0002000469685299322\nepoch: 531 loss: 0.00019949192937929183\nepoch: 532 loss: 0.00019893741409759969\nepoch: 533 loss: 0.00019839023298118263\nepoch: 534 loss: 0.00019784070900641382\nepoch: 535 loss: 0.00019729239284060895\nepoch: 536 loss: 0.00019675052317325026\nepoch: 537 loss: 0.00019620478269644082\nepoch: 538 loss: 0.00019566739501897246\nepoch: 539 loss: 0.00019513186998665333\nepoch: 540 loss: 0.00019459325994830579\nepoch: 541 loss: 0.00019406269711907953\nepoch: 542 loss: 0.0001935330656124279\nepoch: 543 loss: 0.00019300426356494427\nepoch: 544 loss: 0.0001924812386278063\nepoch: 545 loss: 0.00019195683125872165\nepoch: 546 loss: 0.00019143351528327912\nepoch: 547 loss: 0.00019091619469691068\nepoch: 548 loss: 0.00019039907783735543\nepoch: 549 loss: 0.0001898827904369682\nepoch: 550 loss: 0.00018937088316306472\nepoch: 551 loss: 0.000188858961337246\nepoch: 552 loss: 0.00018835252558346838\nepoch: 553 loss: 0.00018784640997182578\nepoch: 554 loss: 0.00018734339391812682\nepoch: 555 loss: 0.00018683700182009488\nepoch: 556 loss: 0.0001863391080405563\nepoch: 557 loss: 0.0001858418545452878\nepoch: 558 loss: 0.00018534596893005073\nepoch: 559 loss: 0.00018485146574676037\nepoch: 560 loss: 0.00018435990205034614\nepoch: 561 loss: 0.0001838687458075583\nepoch: 562 loss: 0.00018338131485506892\nepoch: 563 loss: 0.00018289264698978513\nepoch: 564 loss: 0.00018240779172629118\nepoch: 565 loss: 0.0001819270837586373\nepoch: 566 loss: 0.00018144525529351085\nepoch: 567 loss: 0.000180968243512325\nepoch: 568 loss: 0.00018049032951239496\nepoch: 569 loss: 0.0001800171594368294\nepoch: 570 loss: 0.00017954359645955265\nepoch: 571 loss: 0.00017907374422065914\nepoch: 572 loss: 0.00017860319348983467\nepoch: 573 loss: 0.0001781383325578645\nepoch: 574 loss: 0.00017767242388799787\nepoch: 575 loss: 0.00017720781033858657\nepoch: 576 loss: 0.0001767466455930844\nepoch: 577 loss: 0.00017628622299525887\nepoch: 578 loss: 0.000175828390638344\nepoch: 579 loss: 0.00017537138774059713\nepoch: 580 loss: 0.0001749168150126934\nepoch: 581 loss: 0.00017446203855797648\nepoch: 582 loss: 0.00017401258810423315\nepoch: 583 loss: 0.0001735641562845558\nepoch: 584 loss: 0.0001731148367980495\nepoch: 585 loss: 0.00017266892245970666\nepoch: 586 loss: 0.00017222327005583793\nepoch: 587 loss: 0.0001717802369967103\nepoch: 588 loss: 0.00017134020163211972\nepoch: 589 loss: 0.00017090003530029207\nepoch: 590 loss: 0.00017046250286512077\nepoch: 591 loss: 0.00017002530512399971\nepoch: 592 loss: 0.00016958850028458983\nepoch: 593 loss: 0.00016915776359383017\nepoch: 594 loss: 0.00016872807464096695\nepoch: 595 loss: 0.0001682944712229073\nepoch: 596 loss: 0.0001678713015280664\nepoch: 597 loss: 0.00016744104505050927\nepoch: 598 loss: 0.00016701509593985975\nepoch: 599 loss: 0.00016659237735439092\nepoch: 600 loss: 0.00016617056098766625\nepoch: 601 loss: 0.00016575056361034513\nepoch: 602 loss: 0.0001653297513257712\nepoch: 603 loss: 0.00016491192218381912\nepoch: 604 loss: 0.00016449704708065838\nepoch: 605 loss: 0.00016407946532126516\nepoch: 606 loss: 0.000163668068125844\nepoch: 607 loss: 0.0001632550120120868\nepoch: 608 loss: 0.0001628450263524428\nepoch: 609 loss: 0.00016243543359450996\nepoch: 610 loss: 0.0001620277762413025\nepoch: 611 loss: 0.0001616214867681265\nepoch: 612 loss: 0.00016121794760692865\nepoch: 613 loss: 0.00016081453941296786\nepoch: 614 loss: 0.00016041220806073397\nepoch: 615 loss: 0.00016001211770344526\nepoch: 616 loss: 0.00015961355529725552\nepoch: 617 loss: 0.000159214177983813\nepoch: 618 loss: 0.00015881742001511157\nepoch: 619 loss: 0.00015842297580093145\nepoch: 620 loss: 0.000158029273734428\nepoch: 621 loss: 0.00015763469855301082\nepoch: 622 loss: 0.00015724571130704135\nepoch: 623 loss: 0.00015685611288063228\nepoch: 624 loss: 0.0001564662525197491\nepoch: 625 loss: 0.00015607856039423496\nepoch: 626 loss: 0.00015569002425763756\nepoch: 627 loss: 0.000155309506226331\nepoch: 628 loss: 0.000154924564412795\nepoch: 629 loss: 0.00015454040840268135\nepoch: 630 loss: 0.00015415970119647682\nepoch: 631 loss: 0.00015377986710518599\nepoch: 632 loss: 0.00015340196841862053\nepoch: 633 loss: 0.0001530251611256972\nepoch: 634 loss: 0.00015264943067450076\nepoch: 635 loss: 0.0001522729726275429\nepoch: 636 loss: 0.00015190034173429012\nepoch: 637 loss: 0.00015152545529417694\nepoch: 638 loss: 0.00015115409041754901\nepoch: 639 loss: 0.00015078409342095256\nepoch: 640 loss: 0.00015041524602565914\nepoch: 641 loss: 0.00015004571469035\nepoch: 642 loss: 0.00014968022878747433\nepoch: 643 loss: 0.00014931472833268344\nepoch: 644 loss: 0.00014894662308506668\nepoch: 645 loss: 0.0001485846150899306\nepoch: 646 loss: 0.00014822366938460618\nepoch: 647 loss: 0.00014786174870096147\nepoch: 648 loss: 0.00014750091941095889\nepoch: 649 loss: 0.0001471420400775969\nepoch: 650 loss: 0.00014678359730169177\nepoch: 651 loss: 0.0001464270317228511\nepoch: 652 loss: 0.00014606918557547033\nepoch: 653 loss: 0.0001457133621443063\nepoch: 654 loss: 0.00014536302478518337\nepoch: 655 loss: 0.000145007434184663\nepoch: 656 loss: 0.00014465772255789489\nepoch: 657 loss: 0.000144303819979541\nepoch: 658 loss: 0.00014395666948985308\nepoch: 659 loss: 0.00014360704517457634\nepoch: 660 loss: 0.00014326040400192142\nepoch: 661 loss: 0.00014291354455053806\nepoch: 662 loss: 0.0001425672380719334\nepoch: 663 loss: 0.00014222273603081703\nepoch: 664 loss: 0.00014187631313689053\nepoch: 665 loss: 0.000141535623697564\nepoch: 666 loss: 0.0001411939738318324\nepoch: 667 loss: 0.0001408532989444211\nepoch: 668 loss: 0.00014051584003027529\nepoch: 669 loss: 0.00014017359353601933\nepoch: 670 loss: 0.00013983504322823137\nepoch: 671 loss: 0.0001395004364894703\nepoch: 672 loss: 0.0001391608384437859\nepoch: 673 loss: 0.00013882706116419286\nepoch: 674 loss: 0.00013849676179233938\nepoch: 675 loss: 0.00013815933198202401\nepoch: 676 loss: 0.00013782856694888324\nepoch: 677 loss: 0.00013749452773481607\nepoch: 678 loss: 0.00013716734247282147\nepoch: 679 loss: 0.00013683612633030862\nepoch: 680 loss: 0.00013651224435307086\nepoch: 681 loss: 0.00013618158118333668\nepoch: 682 loss: 0.00013585283886641264\nepoch: 683 loss: 0.0001355279382551089\nepoch: 684 loss: 0.0001352040417259559\nepoch: 685 loss: 0.00013488110562320799\nepoch: 686 loss: 0.00013455709267873317\nepoch: 687 loss: 0.0001342361356364563\nepoch: 688 loss: 0.00013391347602009773\nepoch: 689 loss: 0.00013359250442590564\nepoch: 690 loss: 0.0001332755491603166\nepoch: 691 loss: 0.00013295677490532398\nepoch: 692 loss: 0.0001326389901805669\nepoch: 693 loss: 0.00013231835328042507\nepoch: 694 loss: 0.0001320039009442553\nepoch: 695 loss: 0.0001316898560617119\nepoch: 696 loss: 0.00013137448695488274\nepoch: 697 loss: 0.0001310590741923079\nepoch: 698 loss: 0.00013074940943624824\nepoch: 699 loss: 0.00013043648505117744\nepoch: 700 loss: 0.00013012703857384622\nepoch: 701 loss: 0.00012981520558241755\nepoch: 702 loss: 0.00012950817472301424\nepoch: 703 loss: 0.00012919823348056525\nepoch: 704 loss: 0.00012889080971945077\nepoch: 705 loss: 0.0001285861653741449\nepoch: 706 loss: 0.00012828016770072281\nepoch: 707 loss: 0.00012797336967196316\nepoch: 708 loss: 0.00012766991858370602\nepoch: 709 loss: 0.00012736667122226208\nepoch: 710 loss: 0.00012706346751656383\nepoch: 711 loss: 0.00012676441110670567\nepoch: 712 loss: 0.00012646096001844853\nepoch: 713 loss: 0.0001261620200239122\nepoch: 714 loss: 0.00012586262892000377\nepoch: 715 loss: 0.000125566337374039\nepoch: 716 loss: 0.00012526658247224987\nepoch: 717 loss: 0.0001249712222488597\nepoch: 718 loss: 0.00012467766646295786\nepoch: 719 loss: 0.00012438243720680475\nepoch: 720 loss: 0.00012408742622938007\nepoch: 721 loss: 0.00012379481631796807\nepoch: 722 loss: 0.0001235026866197586\nepoch: 723 loss: 0.00012321137182880193\nepoch: 724 loss: 0.00012292247265577316\nepoch: 725 loss: 0.00012263216194696724\nepoch: 726 loss: 0.0001223440922331065\nepoch: 727 loss: 0.00012205409439047799\nepoch: 728 loss: 0.00012176673044450581\nepoch: 729 loss: 0.0001214817602885887\nepoch: 730 loss: 0.00012119606981286779\nepoch: 731 loss: 0.00012091312237316743\nepoch: 732 loss: 0.00012062788300681859\nepoch: 733 loss: 0.00012034491373924538\nepoch: 734 loss: 0.00012006268661934882\nepoch: 735 loss: 0.00011978298425674438\nepoch: 736 loss: 0.00011950076441280544\nepoch: 737 loss: 0.000119221986096818\nepoch: 738 loss: 0.00011894369526999071\nepoch: 739 loss: 0.00011866480053868145\nepoch: 740 loss: 0.00011838959471788257\nepoch: 741 loss: 0.00011811415606644005\nepoch: 742 loss: 0.00011783764057327062\nepoch: 743 loss: 0.00011756151070585474\nepoch: 744 loss: 0.0001172874981421046\nepoch: 745 loss: 0.00011701599578373134\nepoch: 746 loss: 0.00011674354027491063\nepoch: 747 loss: 0.00011647121573332697\nepoch: 748 loss: 0.00011620091390796006\nepoch: 749 loss: 0.00011593131785048172\nepoch: 750 loss: 0.00011566461034817621\nepoch: 751 loss: 0.0001153963094111532\nepoch: 752 loss: 0.00011512864875840023\nepoch: 753 loss: 0.0001148635710706003\nepoch: 754 loss: 0.00011459769302746281\nepoch: 755 loss: 0.00011433082545408979\nepoch: 756 loss: 0.0001140694585046731\nepoch: 757 loss: 0.00011380379874026403\nepoch: 758 loss: 0.00011354064190527424\nepoch: 759 loss: 0.00011328119580866769\nepoch: 760 loss: 0.00011301970516797155\nepoch: 761 loss: 0.00011276168515905738\nepoch: 762 loss: 0.00011250079842284322\nepoch: 763 loss: 0.00011224207992199808\nepoch: 764 loss: 0.00011198454740224406\nepoch: 765 loss: 0.00011172558151884004\nepoch: 766 loss: 0.00011147002078359947\nepoch: 767 loss: 0.00011121630086563528\nepoch: 768 loss: 0.00011096114758402109\nepoch: 769 loss: 0.00011070695472881198\nepoch: 770 loss: 0.0001104558541555889\nepoch: 771 loss: 0.00011020225792890415\nepoch: 772 loss: 0.00010995256889145821\nepoch: 773 loss: 0.00010970066068693995\nepoch: 774 loss: 0.00010945047688437626\nepoch: 775 loss: 0.00010920115164481103\nepoch: 776 loss: 0.00010895313607761636\nepoch: 777 loss: 0.0001087040436686948\nepoch: 778 loss: 0.0001084583273041062\nepoch: 779 loss: 0.00010821162140928209\nepoch: 780 loss: 0.00010796383139677346\nepoch: 781 loss: 0.00010771998495329171\nepoch: 782 loss: 0.00010747720079962164\nepoch: 783 loss: 0.00010723071318352595\nepoch: 784 loss: 0.0001069866557372734\nepoch: 785 loss: 0.00010674463555915281\nepoch: 786 loss: 0.00010650341573636979\nepoch: 787 loss: 0.00010626422590576112\nepoch: 788 loss: 0.00010602253314573318\nepoch: 789 loss: 0.00010578372166492045\nepoch: 790 loss: 0.00010554598702583462\nepoch: 791 loss: 0.00010530884173931554\nepoch: 792 loss: 0.00010506740363780409\nepoch: 793 loss: 0.00010483190271770582\nepoch: 794 loss: 0.00010459439363330603\nepoch: 795 loss: 0.00010435940203024074\nepoch: 796 loss: 0.00010412451956653967\nepoch: 797 loss: 0.0001038892733049579\nepoch: 798 loss: 0.00010365639172960073\nepoch: 799 loss: 0.00010342458699597046\nepoch: 800 loss: 0.00010318934073438868\nepoch: 801 loss: 0.00010296054097125307\nepoch: 802 loss: 0.00010272932559018955\nepoch: 803 loss: 0.00010249551269225776\nepoch: 804 loss: 0.00010226645827060565\nepoch: 805 loss: 0.0001020371782942675\nepoch: 806 loss: 0.0001018071488942951\nepoch: 807 loss: 0.00010157754149986431\nepoch: 808 loss: 0.00010135193588212132\nepoch: 809 loss: 0.00010112387099070475\nepoch: 810 loss: 0.00010089804709423333\nepoch: 811 loss: 0.00010066972754430026\nepoch: 812 loss: 0.000100445315183606\nepoch: 813 loss: 0.00010021996422437951\nepoch: 814 loss: 9.999559551943094e-05\nepoch: 815 loss: 9.977191803045571e-05\nepoch: 816 loss: 9.954847337212414e-05\nepoch: 817 loss: 9.93255089269951e-05\nepoch: 818 loss: 9.910279186442494e-05\nepoch: 819 loss: 9.887948544928804e-05\nepoch: 820 loss: 9.865890024229884e-05\nepoch: 821 loss: 9.844095620792359e-05\nepoch: 822 loss: 9.822097490541637e-05\nepoch: 823 loss: 9.800052066566423e-05\nepoch: 824 loss: 9.77812524070032e-05\nepoch: 825 loss: 9.7562005976215e-05\nepoch: 826 loss: 9.734452760312706e-05\nepoch: 827 loss: 9.71318077063188e-05\nepoch: 828 loss: 9.691315790405497e-05\nepoch: 829 loss: 9.669709106674418e-05\nepoch: 830 loss: 9.648271952755749e-05\nepoch: 831 loss: 9.626799146644771e-05\nepoch: 832 loss: 9.605076775187626e-05\nepoch: 833 loss: 9.584105282556266e-05\nepoch: 834 loss: 9.562692139297724e-05\nepoch: 835 loss: 9.541289909975603e-05\nepoch: 836 loss: 9.520294406684116e-05\nepoch: 837 loss: 9.499072621110827e-05\nepoch: 838 loss: 9.477860294282436e-05\nepoch: 839 loss: 9.457186388317496e-05\nepoch: 840 loss: 9.436203254153952e-05\nepoch: 841 loss: 9.415207750862464e-05\nepoch: 842 loss: 9.394474909640849e-05\nepoch: 843 loss: 9.373787179356441e-05\nepoch: 844 loss: 9.352959750685841e-05\nepoch: 845 loss: 9.332559420727193e-05\nepoch: 846 loss: 9.311754547525197e-05\nepoch: 847 loss: 9.291139576816931e-05\nepoch: 848 loss: 9.270931332139298e-05\nepoch: 849 loss: 9.250506991520524e-05\nepoch: 850 loss: 9.229999704984948e-05\nepoch: 851 loss: 9.210231655742973e-05\nepoch: 852 loss: 9.189821139443666e-05\nepoch: 853 loss: 9.169684199150652e-05\nepoch: 854 loss: 9.149844117928296e-05\nepoch: 855 loss: 9.129611134994775e-05\nepoch: 856 loss: 9.109867824008688e-05\nepoch: 857 loss: 9.089922968996689e-05\nepoch: 858 loss: 9.07000940060243e-05\nepoch: 859 loss: 9.050362859852612e-05\nepoch: 860 loss: 9.030680666910484e-05\nepoch: 861 loss: 9.010958456201479e-05\nepoch: 862 loss: 8.991274080472067e-05\nepoch: 863 loss: 8.972126670414582e-05\nepoch: 864 loss: 8.952227653935552e-05\nepoch: 865 loss: 8.93327160156332e-05\nepoch: 866 loss: 8.913693454815075e-05\nepoch: 867 loss: 8.894557686289772e-05\nepoch: 868 loss: 8.875159255694598e-05\nepoch: 869 loss: 8.856235217535868e-05\nepoch: 870 loss: 8.836919005261734e-05\nepoch: 871 loss: 8.818104834062979e-05\nepoch: 872 loss: 8.799028000794351e-05\nepoch: 873 loss: 8.780261123320088e-05\nepoch: 874 loss: 8.761170465731993e-05\nepoch: 875 loss: 8.742438512854278e-05\nepoch: 876 loss: 8.723422070033848e-05\nepoch: 877 loss: 8.705013897269964e-05\nepoch: 878 loss: 8.686296496307477e-05\nepoch: 879 loss: 8.667590736877173e-05\nepoch: 880 loss: 8.648965740576386e-05\nepoch: 881 loss: 8.63044842844829e-05\nepoch: 882 loss: 8.612123929196969e-05\nepoch: 883 loss: 8.593656093580648e-05\nepoch: 884 loss: 8.575318497605622e-05\nepoch: 885 loss: 8.557221008231863e-05\nepoch: 886 loss: 8.538979454897344e-05\nepoch: 887 loss: 8.520949631929398e-05\nepoch: 888 loss: 8.502659329678863e-05\nepoch: 889 loss: 8.484692079946399e-05\nepoch: 890 loss: 8.466581493848935e-05\nepoch: 891 loss: 8.44847018015571e-05\nepoch: 892 loss: 8.430598973063752e-05\nepoch: 893 loss: 8.412856550421566e-05\nepoch: 894 loss: 8.395101758651435e-05\nepoch: 895 loss: 8.37706247693859e-05\nepoch: 896 loss: 8.359631465282291e-05\nepoch: 897 loss: 8.341664943145588e-05\nepoch: 898 loss: 8.324222289957106e-05\nepoch: 899 loss: 8.306422387249768e-05\nepoch: 900 loss: 8.289157267427072e-05\nepoch: 901 loss: 8.271679689642042e-05\nepoch: 902 loss: 8.254368731286377e-05\nepoch: 903 loss: 8.23687732918188e-05\nepoch: 904 loss: 8.219578739954159e-05\nepoch: 905 loss: 8.20224522612989e-05\nepoch: 906 loss: 8.185207116184756e-05\nepoch: 907 loss: 8.168168278643861e-05\nepoch: 908 loss: 8.150903886416927e-05\nepoch: 909 loss: 8.133878873195499e-05\nepoch: 910 loss: 8.116974640870467e-05\nepoch: 911 loss: 8.099913247860968e-05\nepoch: 912 loss: 8.08329350547865e-05\nepoch: 913 loss: 8.066314330790192e-05\nepoch: 914 loss: 8.049576717894524e-05\nepoch: 915 loss: 8.032978803385049e-05\nepoch: 916 loss: 8.016300853341818e-05\nepoch: 917 loss: 7.999882654985413e-05\nepoch: 918 loss: 7.983191608218476e-05\nepoch: 919 loss: 7.966810517245904e-05\nepoch: 920 loss: 7.950391591293737e-05\nepoch: 921 loss: 7.934164023026824e-05\nepoch: 922 loss: 7.917508628452197e-05\nepoch: 923 loss: 7.901652134023607e-05\nepoch: 924 loss: 7.885580998845398e-05\nepoch: 925 loss: 7.869461842346936e-05\nepoch: 926 loss: 7.853221177356318e-05\nepoch: 927 loss: 7.837197335902601e-05\nepoch: 928 loss: 7.821482722647488e-05\nepoch: 929 loss: 7.80552945798263e-05\nepoch: 930 loss: 7.789802475599572e-05\nepoch: 931 loss: 7.774090045131743e-05\nepoch: 932 loss: 7.758446736261249e-05\nepoch: 933 loss: 7.74274449213408e-05\nepoch: 934 loss: 7.727450429229066e-05\nepoch: 935 loss: 7.711891521466896e-05\nepoch: 936 loss: 7.696606917306781e-05\nepoch: 937 loss: 7.681000715820119e-05\nepoch: 938 loss: 7.665643352083862e-05\nepoch: 939 loss: 7.650430052308366e-05\nepoch: 940 loss: 7.635539805050939e-05\nepoch: 941 loss: 7.620576798217371e-05\nepoch: 942 loss: 7.605483551742509e-05\nepoch: 943 loss: 7.590616587549448e-05\nepoch: 944 loss: 7.575487688882276e-05\nepoch: 945 loss: 7.560845551779494e-05\nepoch: 946 loss: 7.546384586021304e-05\nepoch: 947 loss: 7.531743904110044e-05\nepoch: 948 loss: 7.517043559346348e-05\nepoch: 949 loss: 7.502353400923312e-05\nepoch: 950 loss: 7.488092523999512e-05\nepoch: 951 loss: 7.473582809325308e-05\nepoch: 952 loss: 7.459299376932904e-05\nepoch: 953 loss: 7.444778020726517e-05\nepoch: 954 loss: 7.43062628316693e-05\nepoch: 955 loss: 7.416427251882851e-05\nepoch: 956 loss: 7.402383198495954e-05\nepoch: 957 loss: 7.388459198409691e-05\nepoch: 958 loss: 7.374426786554977e-05\nepoch: 959 loss: 7.360691233770922e-05\nepoch: 960 loss: 7.34699351596646e-05\nepoch: 961 loss: 7.333247776841745e-05\nepoch: 962 loss: 7.319237192859873e-05\nepoch: 963 loss: 7.305717736016959e-05\nepoch: 964 loss: 7.291946531040594e-05\nepoch: 965 loss: 7.278451084857807e-05\nepoch: 966 loss: 7.264941814355552e-05\nepoch: 967 loss: 7.251768693095073e-05\nepoch: 968 loss: 7.238272519316524e-05\nepoch: 969 loss: 7.224918226711452e-05\nepoch: 970 loss: 7.211660704342648e-05\nepoch: 971 loss: 7.198522507678717e-05\nepoch: 972 loss: 7.185478898463771e-05\nepoch: 973 loss: 7.172233745222911e-05\nepoch: 974 loss: 7.159284723456949e-05\nepoch: 975 loss: 7.146492134779692e-05\nepoch: 976 loss: 7.133447797968984e-05\nepoch: 977 loss: 7.12047767592594e-05\nepoch: 978 loss: 7.107661076588556e-05\nepoch: 979 loss: 7.095046021277085e-05\nepoch: 980 loss: 7.082289812387899e-05\nepoch: 981 loss: 7.069638377288356e-05\nepoch: 982 loss: 7.056904723867774e-05\nepoch: 983 loss: 7.044338417472318e-05\nepoch: 984 loss: 7.03180703567341e-05\nepoch: 985 loss: 7.019217446213588e-05\nepoch: 986 loss: 7.006949454080313e-05\nepoch: 987 loss: 6.994428986217827e-05\nepoch: 988 loss: 6.982102058827877e-05\nepoch: 989 loss: 6.969844980631024e-05\nepoch: 990 loss: 6.95747003192082e-05\nepoch: 991 loss: 6.945297354832292e-05\nepoch: 992 loss: 6.933089025551453e-05\nepoch: 993 loss: 6.921024032635614e-05\nepoch: 994 loss: 6.909077637828887e-05\nepoch: 995 loss: 6.896930426592007e-05\nepoch: 996 loss: 6.884923641337082e-05\nepoch: 997 loss: 6.872917583677918e-05\nepoch: 998 loss: 6.861006113467738e-05\nepoch: 999 loss: 6.849251076346263e-05\n" ], [ "plt.plot(range(epochs), losses)\nplt.ylabel('Loss')\nplt.xlabel('epoch')\nplt.grid()", "_____no_output_____" ], [ "def plot_decision_boundary(X, y):\n x_span = np.linspace(min(X[:, 0]) -0.25, max(X[:, 0])+0.25)\n y_span = np.linspace(min(X[:, 1]) -0.25, max(X[:, 1])+0.25)\n xx, yy = np.meshgrid(x_span, y_span)\n grid = torch.Tensor(np.c_[xx.ravel(), yy.ravel()])\n pred_func = model.forward(grid)\n z = pred_func.view(xx.shape).detach().numpy()\n plt.contourf(xx, yy, z)", "_____no_output_____" ], [ "plot_decision_boundary(X,y)\nscatter_plot()", "_____no_output_____" ], [ "x = 0.25\ny = 0.25\npoint = torch.Tensor ([x, y])\nprediction = model.predict(point)\nplt.plot([x], [y], marker='o', markersize=10, color=\"red\")\nprint(\"Prediction is\" , prediction)\nplot_decision_boundary(X,y)", "Prediction is 1\n" ], [ "point1 = torch.Tensor([1.0, -1.0])\npoint2 = torch.Tensor([-1.0, 1.0])\nplt.plot(point1.numpy()[0], point1.numpy()[1], 'ro')\nplt.plot(point2.numpy()[0], point2.numpy()[1], 'ko')\nplot_fit(\"Trained Model\")\nprint(\"Red point positive probability = {}\".format(model.forward(point1).item())) \nprint(\"Black point positive probability = {}\".format(model.forward(point2).item())) \nprint(\"Red point belongs in class {}\".format(model.predict(point1))) \nprint(\"Black point belongs in class = {}\".format(model.predict(point2))) ", "_____no_output_____" ], [ "", "_____no_output_____" ] ] ]
[ "markdown", "code", "markdown", "code" ]
[ [ "markdown" ], [ "code", "code", "code" ], [ "markdown" ], [ "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code", "code" ] ]
cb57f424038f6ff602e2ead632d191664a402b64
2,272
ipynb
Jupyter Notebook
02-Learning_Path/02-03-Python/PYT-07-Python_One-Liners/2-pyhton-tricks.ipynb
nicode-io/The_Junior_Way
70fdfa8959c5f649f12264a043ddf296519f7508
[ "MIT" ]
1
2021-08-08T20:40:31.000Z
2021-08-08T20:40:31.000Z
02-Learning_Path/02-03-Python/PYT-07-Python_One-Liners/2-pyhton-tricks.ipynb
nicode-io/The_Junior_Way
70fdfa8959c5f649f12264a043ddf296519f7508
[ "MIT" ]
null
null
null
02-Learning_Path/02-03-Python/PYT-07-Python_One-Liners/2-pyhton-tricks.ipynb
nicode-io/The_Junior_Way
70fdfa8959c5f649f12264a043ddf296519f7508
[ "MIT" ]
null
null
null
18.322581
77
0.4375
[ [ [ "print(1)", "1\n" ] ], [ [ "List comprehension tricks", "_____no_output_____" ], [ "BEFORE", "_____no_output_____" ] ], [ [ "employees = { 'Alice' : 100000,\n 'Bob' : 99817,\n 'Carol' : 122905,\n 'Frank' : 88123,\n 'Eve' : 93121}\n\ntop_earners = []\nfor key, val in employees.items():\n if val >= 100000:\n top_earners.append((key,val))\n\nprint(top_earners)", "[('Alice', 100000), ('Carol', 122905)]\n" ] ], [ [ "AFTER MAGIC TRICK", "_____no_output_____" ] ], [ [ "employees_two = { 'Alice' : 100000,\n 'Bob' : 99817,\n 'Carol' : 122905,\n 'Frank' : 88123,\n 'Eve' : 93121}\n\ntop_earners_two = [(k, v) for k, v in employees.items() if v >= 100000]", "_____no_output_____" ] ] ]
[ "code", "markdown", "code", "markdown", "code" ]
[ [ "code" ], [ "markdown", "markdown" ], [ "code" ], [ "markdown" ], [ "code" ] ]
cb57f8b0dab660c03e3e0a595a66c2310350e15b
595
ipynb
Jupyter Notebook
tutorials/runnable.ipynb
rbracco/nbpro
55fb8459c1214d6801c5133e1c1624e28a36609c
[ "Apache-2.0" ]
1
2020-07-14T23:52:48.000Z
2020-07-14T23:52:48.000Z
tutorials/runnable.ipynb
rbracco/nbpro
55fb8459c1214d6801c5133e1c1624e28a36609c
[ "Apache-2.0" ]
1
2021-05-20T12:35:07.000Z
2021-05-20T12:35:07.000Z
tutorials/runnable.ipynb
rbracco/nbpro
55fb8459c1214d6801c5133e1c1624e28a36609c
[ "Apache-2.0" ]
1
2020-07-14T23:52:50.000Z
2020-07-14T23:52:50.000Z
14.875
31
0.468908
[ [ [ "z = 123", "_____no_output_____" ], [ "def hello(msg):\n print(\"hello\", msg)", "_____no_output_____" ] ] ]
[ "code" ]
[ [ "code", "code" ] ]