diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..9874ce4f89360f1a97e8f47808511b8f1d892cea 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,21 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +examples/climatology/climate_surface_doy001_hour00.nc filter=lfs diff=lfs merge=lfs -text +examples/climatology/climate_surface_doy001_hour03.nc filter=lfs diff=lfs merge=lfs -text +examples/climatology/climate_surface_doy001_hour06.nc filter=lfs diff=lfs merge=lfs -text +examples/climatology/climate_surface_doy001_hour09.nc filter=lfs diff=lfs merge=lfs -text +examples/climatology/climate_surface_doy001_hour12.nc filter=lfs diff=lfs merge=lfs -text +examples/climatology/climate_surface_doy001_hour15.nc filter=lfs diff=lfs merge=lfs -text +examples/climatology/climate_surface_doy001_hour18.nc filter=lfs diff=lfs merge=lfs -text +examples/climatology/climate_surface_doy001_hour21.nc filter=lfs diff=lfs merge=lfs -text +examples/climatology/climate_vertical_doy001_hour00.nc filter=lfs diff=lfs merge=lfs -text +examples/climatology/climate_vertical_doy001_hour03.nc filter=lfs diff=lfs merge=lfs -text +examples/climatology/climate_vertical_doy001_hour06.nc filter=lfs diff=lfs merge=lfs -text +examples/climatology/climate_vertical_doy001_hour09.nc filter=lfs diff=lfs merge=lfs -text +examples/climatology/climate_vertical_doy001_hour12.nc filter=lfs diff=lfs merge=lfs -text +examples/climatology/climate_vertical_doy001_hour15.nc filter=lfs diff=lfs merge=lfs -text +examples/climatology/climate_vertical_doy001_hour18.nc filter=lfs diff=lfs merge=lfs -text +examples/climatology/climate_vertical_doy001_hour21.nc filter=lfs diff=lfs merge=lfs -text +examples/merra-2/MERRA_pres_20200101.nc filter=lfs diff=lfs merge=lfs -text +examples/merra-2/MERRA2_sfc_20200101.nc filter=lfs diff=lfs merge=lfs -text diff --git a/examples/.cache/huggingface/.gitignore b/examples/.cache/huggingface/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..f59ec20aabf5842d237244ece8c81ab184faeac1 --- /dev/null +++ b/examples/.cache/huggingface/.gitignore @@ -0,0 +1 @@ +* \ No newline at end of file diff --git a/examples/.cache/huggingface/download/climatology/anomaly_variance_surface.nc.metadata b/examples/.cache/huggingface/download/climatology/anomaly_variance_surface.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..51c64a690244bc524e3d99153b7c31bc6f4ec51e --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/anomaly_variance_surface.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +6c21b953f7f60f3bb80a1007e49227c5c018a26e3eb7d1a080a8d7bcf3ab7dfc +1731287317.7842605 diff --git a/examples/.cache/huggingface/download/climatology/anomaly_variance_vertical.nc.metadata b/examples/.cache/huggingface/download/climatology/anomaly_variance_vertical.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..a396fa2331c626fb1d8e2256b0b0c9bf1d5e3c08 --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/anomaly_variance_vertical.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +4317f4c8c89b1e604bca44841bf085603c91b8c70f92b37bee536f99c83222bb +1731287317.8256514 diff --git a/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour00.nc.metadata b/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour00.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..bf4bfea1a0806bc29fb5c6d167541f14bb224a0f --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour00.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +6df405a6178c3222c4abd98eea6573ced38aa2ccbe0966647a68ac18103a4d1d +1731111987.3265555 diff --git a/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour03.nc.metadata b/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour03.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..b266e296d327a228cf42ceb3374099e67d306597 --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour03.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +b67bcff5c6df5306677a65de3f7c08485a8ba1c3ecdc60b7f346cf0642caa7f1 +1731111985.428959 diff --git a/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour06.nc.metadata b/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour06.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..fd072a0e3d6e2496aaff128d94ae6b04d0e06a47 --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour06.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +080cb1f7e8dad508fc78e40a5bbd30d03564b7a48bdd2916f296dc3dfed30c60 +1731111986.869001 diff --git a/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour09.nc.metadata b/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour09.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..18a3cd3525ac468f20973a819c050b5656ec7a04 --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour09.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +134e194a8f38828ec067f98e8c5c7dc4aed0131046b6ed839f75bcf6b98b5492 +1731111985.1479104 diff --git a/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour12.nc.metadata b/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour12.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..b7dc1361be1b2ab56a7b7dc131a9c52b6da67059 --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour12.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +974a79a5e68096c356b42f15cd9c955f0a1306cf6a942c682e09d6504742a6d1 +1731111980.916947 diff --git a/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour15.nc.metadata b/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour15.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..fea603150a8f1cb49944b13043775d38e302e02f --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour15.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +dea669f097cf1b9d775956a01e2bae52ae1856dfcc7abae5c6ee394949275c5e +1731111986.7854862 diff --git a/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour18.nc.metadata b/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour18.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..c0a42ba8ead2027a59a65fa47a130b3186dcca47 --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour18.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +3b06aabcd1f14cc3a4b3c5dd355a225646fe4b9e3b5b676ee2fb6e5d05fe31c9 +1731111986.2445679 diff --git a/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour21.nc.metadata b/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour21.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..f1a1e036292d01c198f7d81d7a584f7ed8ce0260 --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/climate_surface_doy001_hour21.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +ef90b12b58b73481f96c0caf5277d24cc05681003df0f6e83f5e85dc0b4d47b3 +1731111981.8565679 diff --git a/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour00.nc.metadata b/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour00.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..0e2aa5c1d1cad5cc2f6870174bf2f1e110cc5f07 --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour00.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +b5f75cc1a55b6f1beecf2271d3a8fe9914cb20fb87e471761b3b686a990ec0ef +1731112085.7071128 diff --git a/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour03.nc.metadata b/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour03.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..169dc54cc22423b00ef7d812ecd9b998bface555 --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour03.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +80a04c77e614e01ae26fdad1fc3c5a15fd6869d627ce4251447d64c3bb934916 +1731112080.982848 diff --git a/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour06.nc.metadata b/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour06.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..14f983905e36ff587b53572aa22112a0f0665506 --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour06.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +0f6fc10bbd055e1e7af63f70cfa24abd60794cd89822a95b9fb9194b07c7822f +1731112071.24669 diff --git a/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour09.nc.metadata b/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour09.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..54fd477dae6f4ef447075b99014129b87e59fa51 --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour09.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +70cb7c3e30902548b24330b61ecf493fdf1949bb434f45e9258434a80d91ee6d +1731112053.3633041 diff --git a/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour12.nc.metadata b/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour12.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..8d4df47781acbd5b97343a9277a4f597afd7e16f --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour12.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +a597136ae5b374e55b6f1113e218faeb3090a3d474f95bd6c043b4923238a32c +1731112082.503257 diff --git a/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour15.nc.metadata b/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour15.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..1fbca83a54a5dda08664a5e58d53eab0fdb6332a --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour15.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +6e1765b3fcf01aed2338b9df110a76a9e439703bb87c366b0d3868473ccceb46 +1731112055.9265618 diff --git a/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour18.nc.metadata b/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour18.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..3db49ef8c035200c54ecbed000d73b0d65db486f --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour18.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +66b27837717a6cd37d72d55c8de3cc24b2122e25173348be37b3a76d98c8d580 +1731112076.9637496 diff --git a/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour21.nc.metadata b/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour21.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..a3a2b4f49a9a7b6f7cfe65013c473d6386935792 --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/climate_vertical_doy001_hour21.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +65084e38b67342d03135e98092d893e6b8ad677a1b690ea39b3fbc4b91df6fdc +1731112084.0759785 diff --git a/examples/.cache/huggingface/download/climatology/musigma_surface.nc.metadata b/examples/.cache/huggingface/download/climatology/musigma_surface.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..e392145e502badb4253b803d953f08c34c536007 --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/musigma_surface.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +62f7145aa62d2d632ef9a32b3492a932b128006cffe61674a53ecb1f163ce0b6 +1731287317.6933951 diff --git a/examples/.cache/huggingface/download/climatology/musigma_vertical.nc.metadata b/examples/.cache/huggingface/download/climatology/musigma_vertical.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..2a77b7d68ae46a71320be590c55aa54668bcb55f --- /dev/null +++ b/examples/.cache/huggingface/download/climatology/musigma_vertical.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +b83b40a81a18a1e9dc1633f71f7186fbeda9b17a3c1ecc536afd1575e635a1a3 +1731287317.7353563 diff --git a/examples/.cache/huggingface/download/config.yaml.metadata b/examples/.cache/huggingface/download/config.yaml.metadata new file mode 100644 index 0000000000000000000000000000000000000000..72f725600e68a9a05a47c289343ba9b9125b322b --- /dev/null +++ b/examples/.cache/huggingface/download/config.yaml.metadata @@ -0,0 +1,3 @@ +514c3d061ad45e3338495da7c16b13aa20fa75b1 +4435167a11fd412e2dfb565e135eed07a33c7663 +1731287317.8794053 diff --git a/examples/.cache/huggingface/download/merra-2/MERRA2_sfc_20200101.nc.metadata b/examples/.cache/huggingface/download/merra-2/MERRA2_sfc_20200101.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..4ab1d4433a1dee0a0d71b521636ce657b5c21fda --- /dev/null +++ b/examples/.cache/huggingface/download/merra-2/MERRA2_sfc_20200101.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +1de1638ca1f1b44ca95a7c6908573414c44e633f1bb2e212b8a46afc866c741e +1731111936.8741355 diff --git a/examples/.cache/huggingface/download/merra-2/MERRA_pres_20200101.nc.metadata b/examples/.cache/huggingface/download/merra-2/MERRA_pres_20200101.nc.metadata new file mode 100644 index 0000000000000000000000000000000000000000..441c56d0c4592ffbc82d90987b17818f08b35a96 --- /dev/null +++ b/examples/.cache/huggingface/download/merra-2/MERRA_pres_20200101.nc.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +2cd9b405aa1d388fc0c6cbe6d71104bea770b48e07ba4364f067dc425d025af8 +1731111969.7144666 diff --git a/examples/PrithviWxC_inference.ipynb b/examples/PrithviWxC_inference.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..bc3f80fdd52ee77574defbeabae0a8cc5d2da8ed --- /dev/null +++ b/examples/PrithviWxC_inference.ipynb @@ -0,0 +1,3186 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# PrithviWxC\n", + "\n", + "This notebook will walk you through how to construct the model,\n", + "load the weights, build the dataset, and use the model for inference." + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [], + "source": [ + "import random\n", + "from pathlib import Path\n", + "\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import torch\n", + "from huggingface_hub import hf_hub_download, snapshot_download" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We now configure the backends and torch states, including setting the seeds for the RNGs." + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [], + "source": [ + "torch.jit.enable_onednn_fusion(True)\n", + "if torch.cuda.is_available():\n", + " print(f\"Using device: {torch.cuda.get_device_name()}\")\n", + " torch.backends.cudnn.benchmark = True\n", + " torch.backends.cudnn.deterministic = True\n", + "\n", + "random.seed(42)\n", + "if torch.cuda.is_available():\n", + " torch.cuda.manual_seed(42)\n", + "torch.manual_seed(42)\n", + "np.random.seed(42)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The model has approximately 2.3 billion parameters, so it\n", + "requires reasonable computational resources, but it is possible\n", + "to run it on a CPU." + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "if torch.cuda.is_available():\n", + " device = torch.device(\"cuda\")\n", + "else:\n", + " device = torch.device(\"cpu\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Dataloader\n", + "### Variables and times\n", + "\n", + "With the environment ready to go, we now need to set up the task.\n", + "The core model expects a fixed set of variables from the MERRA-2\n", + "dataset, which are prescribed below. The variables are comprised\n", + "of surface variables, surface static variables, and variables at\n", + "various vertical levels within the atmosphere. More details on the\n", + "MERRA-2 dataset can be found\n", + "[here](https://gmao.gsfc.nasa.gov/reanalysis/MERRA-2/).\n", + "\n", + "The MERRA-2 dataset includes data at longitudes of $-180^\\circ$\n", + "and $+180^\\circ$. This represents duplicate data, so we set a\n", + "padding variable to remove it.\n", + "\n", + "The input to the core model consists of these variables at two\n", + "different times. The time difference in hours between these samples\n", + "is passed to the model and set in the input_time variable.\n", + "\n", + "The model's task is to predict the fixed set of variables at a\n", + "target time, given the input data.\n", + "\n", + "For example, if the input times are 0900 and 1200, resulting in\n", + "an input_time of -3, then a lead_time of 6 would result in a\n", + "target time of 1800." + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [], + "source": [ + "surface_vars = [\n", + " \"EFLUX\",\n", + " \"GWETROOT\",\n", + " \"HFLUX\",\n", + " \"LAI\",\n", + " \"LWGAB\",\n", + " \"LWGEM\",\n", + " \"LWTUP\",\n", + " \"PS\",\n", + " \"QV2M\",\n", + " \"SLP\",\n", + " \"SWGNT\",\n", + " \"SWTNT\",\n", + " \"T2M\",\n", + " \"TQI\",\n", + " \"TQL\",\n", + " \"TQV\",\n", + " \"TS\",\n", + " \"U10M\",\n", + " \"V10M\",\n", + " \"Z0M\",\n", + "]\n", + "static_surface_vars = [\"FRACI\", \"FRLAND\", \"FROCEAN\", \"PHIS\"]\n", + "vertical_vars = [\"CLOUD\", \"H\", \"OMEGA\", \"PL\", \"QI\", \"QL\", \"QV\", \"T\", \"U\", \"V\"]\n", + "levels = [\n", + " 34.0,\n", + " 39.0,\n", + " 41.0,\n", + " 43.0,\n", + " 44.0,\n", + " 45.0,\n", + " 48.0,\n", + " 51.0,\n", + " 53.0,\n", + " 56.0,\n", + " 63.0,\n", + " 68.0,\n", + " 71.0,\n", + " 72.0,\n", + "]\n", + "padding = {\"level\": [0, 0], \"lat\": [0, -1], \"lon\": [0, 0]}\n", + "\n", + "lead_times = [12] # This varibale can be change to change the task\n", + "input_times = [-6] # This varibale can be change to change the task" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Data file\n", + "MERRA-2 data is available from 1980 to the present day,\n", + "at 3-hour temporal resolution. The dataloader we have provided\n", + "expects the surface data and vertical data to be saved in\n", + "separate files, and when provided with the directories, will\n", + "search for the relevant data that falls within the provided time range.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "45d1a1486bdc4dff82597d5cf87095f0", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Fetching 1 files: 0%| | 0/1 [00:00 dict[str, Tensor]:\n", + " \"\"\"Prepressing function for MERRA2 Dataset\n", + "\n", + " Args:\n", + " batch (dict): List of training samples, each sample should be a\n", + " dictionary with the following keys::\n", + "\n", + " 'sur_static': Numpy array of shape (3, lat, lon). For each pixel (lat, lon), the first dimension indexes sin(lat), cos(lon), sin(lon).\n", + " 'sur_vals': Torch tensor of shape (parameter, time, lat, lon).\n", + " 'sur_tars': Torch tensor of shape (parameter, time, lat, lon).\n", + " 'ulv_vals': Torch tensor of shape (parameter, level, time, lat, lon).\n", + " 'ulv_tars': Torch tensor of shape (parameter, level, time, lat, lon).\n", + " 'sur_climate': Torch tensor of shape (parameter, lat, lon)\n", + " 'ulv_climate': Torch tensor of shape (parameter, level, lat, lon)\n", + " 'lead_time': Integer.\n", + " 'input_time': Integer.\n", + "\n", + " padding: Dictionary with keys 'level', 'lat', 'lon', each of dim 2.\n", + "\n", + " Returns:\n", + " Dictionary with the following keys::\n", + "\n", + " 'x': [batch, time, parameter, lat, lon]\n", + " 'y': [batch, parameter, lat, lon]\n", + " 'static': [batch, parameter, lat, lon]\n", + " 'lead_time': [batch]\n", + " 'input_time': [batch]\n", + " 'climate (Optional)': [batch, parameter, lat, lon]\n", + "\n", + " Note:\n", + " Here, for x and y, 'parameter' is [surface parameter, upper level,\n", + " parameter x level]. Similarly for the static information we have\n", + " [sin(lat), cos(lon), sin(lon), cos(doy), sin(doy), cos(hod), sin(hod),\n", + " ...].\n", + " \"\"\" # noqa: E501\n", + " b0 = batch[0]\n", + " nbatch = len(batch)\n", + " data_keys = set(b0.keys())\n", + "\n", + " essential_keys = {\n", + " \"sur_static\",\n", + " \"sur_vals\",\n", + " \"sur_tars\",\n", + " \"ulv_vals\",\n", + " \"ulv_tars\",\n", + " \"input_time\",\n", + " \"lead_time\",\n", + " }\n", + "\n", + " climate_keys = {\n", + " \"sur_climate\",\n", + " \"ulv_climate\",\n", + " }\n", + "\n", + " all_keys = essential_keys | climate_keys\n", + "\n", + " if not essential_keys.issubset(data_keys):\n", + " raise ValueError(\"Missing essential keys.\")\n", + "\n", + " if not data_keys.issubset(all_keys):\n", + " raise ValueError(\"Unexpected keys in batch.\")\n", + "\n", + " # Bring all tensors from the batch into a single tensor\n", + " upl_x = torch.empty((nbatch, *b0[\"ulv_vals\"].shape))\n", + " upl_y = torch.empty((nbatch, *b0[\"ulv_tars\"].shape))\n", + "\n", + " sur_x = torch.empty((nbatch, *b0[\"sur_vals\"].shape))\n", + " sur_y = torch.empty((nbatch, *b0[\"sur_tars\"].shape))\n", + "\n", + " sur_sta = torch.empty((nbatch, *b0[\"sur_static\"].shape))\n", + "\n", + " lead_time = torch.empty((nbatch,), dtype=torch.float32)\n", + " input_time = torch.empty((nbatch,), dtype=torch.float32)\n", + "\n", + " for i, rec in enumerate(batch):\n", + " sur_x[i] = rec[\"sur_vals\"]\n", + " sur_y[i] = rec[\"sur_tars\"]\n", + "\n", + " upl_x[i] = rec[\"ulv_vals\"]\n", + " upl_y[i] = rec[\"ulv_tars\"]\n", + "\n", + " sur_sta[i] = rec[\"sur_static\"]\n", + "\n", + " lead_time[i] = rec[\"lead_time\"]\n", + " input_time[i] = rec[\"input_time\"]\n", + "\n", + " return_value = {\n", + " \"lead_time\": lead_time,\n", + " \"input_time\": input_time,\n", + " }\n", + "\n", + " # Reshape (batch, parameter, level, time, lat, lon) ->\n", + " # (batch, time, parameter, level, lat, lon)\n", + " upl_x = upl_x.permute((0, 3, 1, 2, 4, 5))\n", + " upl_y = upl_y.permute((0, 3, 1, 2, 4, 5))\n", + " # Reshape (batch, parameter, time, lat, lon) ->\n", + " # (batch, time, parameter, lat, lon)\n", + " sur_x = sur_x.permute((0, 2, 1, 3, 4))\n", + " sur_y = sur_y.permute((0, 2, 1, 3, 4))\n", + "\n", + " # Pad\n", + " padding_2d = (*padding[\"lon\"], *padding[\"lat\"])\n", + "\n", + " def pad2d(x):\n", + " return torch.nn.functional.pad(x, padding_2d, mode=\"constant\", value=0)\n", + "\n", + " padding_3d = (*padding[\"lon\"], *padding[\"lat\"], *padding[\"level\"])\n", + "\n", + " def pad3d(x):\n", + " return torch.nn.functional.pad(x, padding_3d, mode=\"constant\", value=0)\n", + "\n", + " sur_x = pad2d(sur_x).contiguous()\n", + " upl_x = pad3d(upl_x).contiguous()\n", + " sur_y = pad2d(sur_y).contiguous()\n", + " upl_y = pad3d(upl_y).contiguous()\n", + " return_value[\"static\"] = pad2d(sur_sta).contiguous()\n", + "\n", + " # Remove time for targets\n", + " upl_y = torch.squeeze(upl_y, 1)\n", + " sur_y = torch.squeeze(sur_y, 1)\n", + "\n", + " # We stack along the combined parameter x level dimension\n", + " return_value[\"x\"] = torch.cat(\n", + " (sur_x, upl_x.view(*upl_x.shape[:2], -1, *upl_x.shape[4:])), dim=2\n", + " )\n", + " return_value[\"y\"] = torch.cat(\n", + " (sur_y, upl_y.view(upl_y.shape[0], -1, *upl_y.shape[3:])), dim=1\n", + " )\n", + "\n", + " if climate_keys.issubset(data_keys):\n", + " sur_climate = torch.empty((nbatch, *b0[\"sur_climate\"].shape))\n", + " ulv_climate = torch.empty((nbatch, *b0[\"ulv_climate\"].shape))\n", + " for i, rec in enumerate(batch):\n", + " sur_climate[i] = rec[\"sur_climate\"]\n", + " ulv_climate[i] = rec[\"ulv_climate\"]\n", + " sur_climate = pad2d(sur_climate)\n", + " ulv_climate = pad3d(ulv_climate)\n", + "\n", + " return_value[\"climate\"] = torch.cat(\n", + " (\n", + " sur_climate,\n", + " ulv_climate.view(nbatch, -1, *ulv_climate.shape[3:]),\n", + " ),\n", + " dim=1,\n", + " )\n", + "\n", + " return return_value\n", + "\n", + "\n", + "def input_scalers(\n", + " surf_vars: list[str],\n", + " vert_vars: list[str],\n", + " levels: list[float],\n", + " surf_path: str | Path,\n", + " vert_path: str | Path,\n", + ") -> tuple[Tensor, Tensor]:\n", + " \"\"\"Reads the input scalers\n", + "\n", + " Args:\n", + " surf_vars: surface variables to be used.\n", + " vert_vars: vertical variables to be used.\n", + " levels: MERRA2 levels to use.\n", + " surf_path: path to surface scalers file.\n", + " vert_path: path to vertical level scalers file.\n", + "\n", + " Returns:\n", + " mu (Tensor): mean values\n", + " var (Tensor): varience values\n", + " \"\"\"\n", + " with h5py.File(Path(surf_path), \"r\", libver=\"latest\") as surf_file:\n", + " stats = [x.decode().lower() for x in surf_file[\"statistic\"][()]]\n", + " mu_idx = stats.index(\"mu\")\n", + " sig_idx = stats.index(\"sigma\")\n", + "\n", + " s_mu = torch.tensor([surf_file[k][()][mu_idx] for k in surf_vars])\n", + " s_sig = torch.tensor([surf_file[k][()][sig_idx] for k in surf_vars])\n", + "\n", + " with h5py.File(Path(vert_path), \"r\", libver=\"latest\") as vert_file:\n", + " stats = [x.decode().lower() for x in vert_file[\"statistic\"][()]]\n", + " mu_idx = stats.index(\"mu\")\n", + " sig_idx = stats.index(\"sigma\")\n", + "\n", + " lvl = vert_file[\"lev\"][()]\n", + " l_idx = [np.where(lvl == v)[0].item() for v in levels]\n", + "\n", + " v_mu = np.array([vert_file[k][()][mu_idx, l_idx] for k in vert_vars])\n", + " v_sig = np.array([vert_file[k][()][sig_idx, l_idx] for k in vert_vars])\n", + "\n", + " v_mu = torch.from_numpy(v_mu).view(-1)\n", + " v_sig = torch.from_numpy(v_sig).view(-1)\n", + "\n", + " mu = torch.cat((s_mu, v_mu), dim=0).to(torch.float32)\n", + " sig = torch.cat((s_sig, v_sig), dim=0).to(torch.float32).clamp(1e-4, 1e4)\n", + " return mu, sig\n", + "\n", + "\n", + "def static_input_scalers(\n", + " scalar_path: str | Path, stat_vars: list[str], unscaled_params: int = 7\n", + ") -> tuple[Tensor, Tensor]:\n", + " scalar_path = Path(scalar_path)\n", + "\n", + " with h5py.File(scalar_path, \"r\", libver=\"latest\") as scaler_file:\n", + " stats = [x.decode().lower() for x in scaler_file[\"statistic\"][()]]\n", + " mu_idx = stats.index(\"mu\")\n", + " sig_idx = stats.index(\"sigma\")\n", + "\n", + " mu = torch.tensor([scaler_file[k][()][mu_idx] for k in stat_vars])\n", + " sig = torch.tensor([scaler_file[k][()][sig_idx] for k in stat_vars])\n", + "\n", + " z = torch.zeros(unscaled_params, dtype=mu.dtype, device=mu.device)\n", + " o = torch.ones(unscaled_params, dtype=sig.dtype, device=sig.device)\n", + " mu = torch.cat((z, mu), dim=0).to(torch.float32)\n", + " sig = torch.cat((o, sig), dim=0).to(torch.float32)\n", + "\n", + " return mu, sig.clamp(1e-4, 1e4)\n", + "\n", + "\n", + "def output_scalers(\n", + " surf_vars: list[str],\n", + " vert_vars: list[str],\n", + " levels: list[float],\n", + " surf_path: str | Path,\n", + " vert_path: str | Path,\n", + ") -> Tensor:\n", + " surf_path = Path(surf_path)\n", + " vert_path = Path(vert_path)\n", + "\n", + " with h5py.File(surf_path, \"r\", libver=\"latest\") as surf_file:\n", + " svars = torch.tensor([surf_file[k][()] for k in surf_vars])\n", + "\n", + " with h5py.File(vert_path, \"r\", libver=\"latest\") as vert_file:\n", + " lvl = vert_file[\"lev\"][()]\n", + " l_idx = [np.where(lvl == v)[0].item() for v in levels]\n", + " vvars = np.array([vert_file[k][()][l_idx] for k in vert_vars])\n", + " vvars = torch.from_numpy(vvars).view(-1)\n", + "\n", + " var = torch.cat((svars, vvars), dim=0).to(torch.float32).clamp(1e-7, 1e7)\n", + "\n", + " return var\n", + "\n", + "\n", + "class SampleSpec:\n", + " \"\"\"\n", + " A data class to collect the information used to define a sample.\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " inputs: tuple[pd.Timestamp, pd.Timestamp],\n", + " lead_time: int,\n", + " target: pd.Timestamp | list[pd.Timestamp],\n", + " ):\n", + " \"\"\"\n", + " Args:\n", + " inputs: Tuple of timestamps. In ascending order.\n", + " lead_time: Lead time. In hours.\n", + " target: Timestamp of the target. Can be before or after the inputs.\n", + " \"\"\"\n", + " if not inputs[0] < inputs[1]:\n", + " raise ValueError(\n", + " \"Timestamps in `inputs` should be in strictly ascending order.\"\n", + " )\n", + "\n", + " self.inputs = inputs\n", + " self.input_time = (inputs[1] - inputs[0]).total_seconds() / 3600\n", + " self.lead_time = lead_time\n", + " self.target = target\n", + "\n", + " self.times = [*inputs, target]\n", + " self.stat_times = [inputs[-1]]\n", + "\n", + " @property\n", + " def climatology_info(self) -> tuple[int, int]:\n", + " \"\"\"Get the required climatology info.\n", + "\n", + " :return: information required to obtain climatology data. Essentially\n", + " this is the day of the year and hour of the day of the target\n", + " timestamp, with the former restricted to the interval [1, 365].\n", + " :rtype: tuple\n", + " \"\"\"\n", + " return (min(self.target.dayofyear, 365), self.target.hour)\n", + "\n", + " @property\n", + " def year(self) -> int:\n", + " return self.inputs[1].year\n", + "\n", + " @property\n", + " def dayofyear(self) -> int:\n", + " return self.inputs[1].dayofyear\n", + "\n", + " @property\n", + " def hourofday(self) -> int:\n", + " return self.inputs[1].hour\n", + "\n", + " def _info_str(self) -> str:\n", + " iso_8601 = \"%Y-%m-%dT%H:%M:%S\"\n", + "\n", + " return (\n", + " f\"Issue time: {self.inputs[1].strftime(iso_8601)}\\n\"\n", + " f\"Lead time: {self.lead_time} hours ahead\\n\"\n", + " f\"Input delta: {self.input_time} hours\\n\"\n", + " f\"Target time: {self.target.strftime(iso_8601)}\"\n", + " )\n", + "\n", + " @classmethod\n", + " def get(cls, timestamp: pd.Timestamp, dt: int, lead_time: int):\n", + " \"\"\"Given a timestamp and lead time, generates a SampleSpec object\n", + " describing the sample further.\n", + "\n", + " Args:\n", + " timestamp: Timstamp of the sample, Ie this is the larger of the two\n", + " input timstamps.\n", + " dt: Time between input samples, in hours.\n", + " lead_time: Lead time. In hours.\n", + "\n", + " Returns:\n", + " SampleSpec\n", + " \"\"\" # noqa: E501\n", + " assert dt > 0, \"dt should be possitive\"\n", + " lt = pd.to_timedelta(lead_time, unit=\"h\")\n", + " dt = pd.to_timedelta(dt, unit=\"h\")\n", + "\n", + " if lead_time >= 0:\n", + " timestamp_target = timestamp + lt\n", + " else:\n", + " timestamp_target = timestamp - dt + lt\n", + "\n", + " spec = cls(\n", + " inputs=(timestamp - dt, timestamp),\n", + " lead_time=lead_time,\n", + " target=timestamp_target,\n", + " )\n", + "\n", + " return spec\n", + "\n", + " def __repr__(self) -> str:\n", + " return self._info_str()\n", + "\n", + " def __str__(self) -> str:\n", + " return self._info_str()\n", + "\n", + "\n", + "class Merra2Dataset(Dataset):\n", + " \"\"\"MERRA2 dataset. The dataset unifies surface and vertical data as well as\n", + " optional climatology.\n", + "\n", + " Samples come in the form of a dictionary. Not all keys support all\n", + " variables, yet the general ordering of dimensions is\n", + " parameter, level, time, lat, lon\n", + "\n", + " Note:\n", + " Data is assumed to be in NetCDF files containing daily data at 3-hourly\n", + " intervals. These follow the naming patterns\n", + " MERRA2_sfc_YYYYMMHH.nc and MERRA_pres_YYYYMMHH.nc and can be located in\n", + " two different locations. Optional climatology data comes from files\n", + " climate_surface_doyDOY_hourHOD.nc and\n", + " climate_vertical_doyDOY_hourHOD.nc.\n", + "\n", + "\n", + " Note:\n", + " `_get_valid_timestamps` assembles a set of all timestamps for which\n", + " there is data (with hourly resolutions). The result is stored in\n", + " `_valid_timestamps`. `_get_valid_climate_timestamps` does the same with\n", + " climatology data and stores it in `_valid_climate_timestamps`.\n", + "\n", + " Based on this information, `samples` generates a list of valid samples,\n", + " stored in `samples`. Here the format is::\n", + "\n", + " [\n", + " [\n", + " (timestamp 1, lead time A),\n", + " (timestamp 1, lead time B),\n", + " (timestamp 1, lead time C),\n", + " ],\n", + " [\n", + " (timestamp 2, lead time D),\n", + " (timestamp 2, lead time E),\n", + " ]\n", + " ]\n", + "\n", + " That is, the outer list iterates over timestamps (init times), the\n", + " inner over lead times. Only valid entries are stored.\n", + " \"\"\"\n", + "\n", + " valid_vertical_vars = [\n", + " \"CLOUD\",\n", + " \"H\",\n", + " \"OMEGA\",\n", + " \"PL\",\n", + " \"QI\",\n", + " \"QL\",\n", + " \"QV\",\n", + " \"T\",\n", + " \"U\",\n", + " \"V\",\n", + " ]\n", + " valid_surface_vars = [\n", + " \"EFLUX\",\n", + " \"GWETROOT\",\n", + " \"HFLUX\",\n", + " \"LAI\",\n", + " \"LWGAB\",\n", + " \"LWGEM\",\n", + " \"LWTUP\",\n", + " \"PRECTOT\",\n", + " \"PS\",\n", + " \"QV2M\",\n", + " \"SLP\",\n", + " \"SWGNT\",\n", + " \"SWTNT\",\n", + " \"T2M\",\n", + " \"TQI\",\n", + " \"TQL\",\n", + " \"TQV\",\n", + " \"TS\",\n", + " \"U10M\",\n", + " \"V10M\",\n", + " \"Z0M\",\n", + " ]\n", + " valid_static_surface_vars = [\"FRACI\", \"FRLAND\", \"FROCEAN\", \"PHIS\"]\n", + "\n", + " valid_levels = [\n", + " 34.0,\n", + " 39.0,\n", + " 41.0,\n", + " 43.0,\n", + " 44.0,\n", + " 45.0,\n", + " 48.0,\n", + " 51.0,\n", + " 53.0,\n", + " 56.0,\n", + " 63.0,\n", + " 68.0,\n", + " 71.0,\n", + " 72.0,\n", + " ]\n", + "\n", + " timedelta_input = pd.to_timedelta(3, unit=\"h\")\n", + "\n", + " def __init__(\n", + " self,\n", + " time_range: tuple[str | pd.Timestamp, str | pd.Timestamp],\n", + " lead_times: list[int],\n", + " input_times: list[int],\n", + " data_path_surface: str | Path,\n", + " data_path_vertical: str | Path,\n", + " climatology_path_surface: str | Path | None = None,\n", + " climatology_path_vertical: str | Path | None = None,\n", + " surface_vars: list[str] | None = None,\n", + " static_surface_vars: list[str] | None = None,\n", + " vertical_vars: list[str] | None = None,\n", + " levels: list[float] | None = None,\n", + " roll_longitudes: int = 0,\n", + " positional_encoding: str = \"absolute\",\n", + " rtype: type = np.float32,\n", + " dtype: torch.dtype = torch.float32,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " data_path_surface: Location of surface data.\n", + " data_path_vertical: Location of vertical data.\n", + " climatology_path_surface: Location of (optional) surface\n", + " climatology.\n", + " climatology_path_vertical: Location of (optional) vertical\n", + " climatology.\n", + " surface_vars: Surface variables.\n", + " static_surface_vars: Static surface variables.\n", + " vertical_vars: Vertical variables.\n", + " levels: Levels.\n", + " time_range: Used to subset data.\n", + " lead_times: Lead times for generalized forecasting.\n", + " roll_longitudes: Set to non-zero value to data by random amount\n", + " along longitude dimension.\n", + " position_encoding: possible values are\n", + " ['absolute' (default), 'fourier'].\n", + " 'absolute' returns lat lon encoded in 3 dimensions using sine\n", + " and cosine\n", + " 'fourier' returns lat/lon to be encoded by model\n", + " returns lat/lon to be encoded by model\n", + " rtype: numpy data type used during read\n", + " dtype: torch data type of data output\n", + " \"\"\"\n", + "\n", + " self.time_range = (\n", + " pd.to_datetime(time_range[0]),\n", + " pd.to_datetime(time_range[1]),\n", + " )\n", + " self.lead_times = lead_times\n", + " self.input_times = input_times\n", + " self._roll_longitudes = list(range(roll_longitudes + 1))\n", + "\n", + " self._uvars = vertical_vars or self.valid_vertical_vars\n", + " self._level = levels or self.valid_levels\n", + " self._svars = surface_vars or self.valid_surface_vars\n", + " self._sstat = static_surface_vars or self.valid_static_surface_vars\n", + " self._nuvars = len(self._uvars)\n", + " self._nlevel = len(self._level)\n", + " self._nsvars = len(self._svars)\n", + " self._nsstat = len(self._sstat)\n", + "\n", + " self.rtype = rtype\n", + " self.dtype = dtype\n", + "\n", + " self.positional_encoding = positional_encoding\n", + "\n", + " self._data_path_surface = Path(data_path_surface)\n", + " self._data_path_vertical = Path(data_path_vertical)\n", + "\n", + " self.dir_exists(self._data_path_surface)\n", + " self.dir_exists(self._data_path_vertical)\n", + "\n", + " self._get_coordinates()\n", + "\n", + " self._climatology_path_surface = Path(climatology_path_surface) or None\n", + " self._climatology_path_vertical = (\n", + " Path(climatology_path_vertical) or None\n", + " )\n", + " self._require_clim = (\n", + " self._climatology_path_surface is not None\n", + " and self._climatology_path_vertical is not None\n", + " )\n", + "\n", + " if self._require_clim:\n", + " self.dir_exists(self._climatology_path_surface)\n", + " self.dir_exists(self._climatology_path_vertical)\n", + " elif (\n", + " climatology_path_surface is None\n", + " and climatology_path_vertical is None\n", + " ):\n", + " self._climatology_path_surface = None\n", + " self._climatology_path_vertical = None\n", + " else:\n", + " raise ValueError(\n", + " \"Either both or neither of\"\n", + " \"`climatology_path_surface` and\"\n", + " \"`climatology_path_vertical` should be None.\"\n", + " )\n", + "\n", + " if not set(self._svars).issubset(set(self.valid_surface_vars)):\n", + " raise ValueError(\"Invalid surface variable.\")\n", + "\n", + " if not set(self._sstat).issubset(set(self.valid_static_surface_vars)):\n", + " raise ValueError(\"Invalid static surface variable.\")\n", + "\n", + " if not set(self._uvars).issubset(set(self.valid_vertical_vars)):\n", + " raise ValueError(\"Inalid vertical variable.\")\n", + "\n", + " if not set(self._level).issubset(set(self.valid_levels)):\n", + " raise ValueError(\"Invalid level.\")\n", + "\n", + " @staticmethod\n", + " def dir_exists(path: Path) -> None:\n", + " if not path.is_dir():\n", + " raise ValueError(f\"Directory {path} does not exist.\")\n", + "\n", + " @property\n", + " def upper_shape(self) -> tuple:\n", + " \"\"\"Returns the vertical variables shape\n", + " Returns:\n", + " tuple: vertical variable shape in the following order::\n", + "\n", + " [VAR, LEV, TIME, LAT, LON]\n", + " \"\"\"\n", + " return self._nuvars, self._nlevel, 2, 361, 576\n", + "\n", + " @property\n", + " def surface_shape(self) -> tuple:\n", + " \"\"\"Returns the surface variables shape\n", + "\n", + " Returns:\n", + " tuple: surafce shape in the following order::\n", + "\n", + " [VAR, LEV, TIME, LAT, LON]\n", + " \"\"\"\n", + " return self._nsvars, 2, 361, 576\n", + "\n", + " def data_file_surface(self, timestamp: pd.Timestamp) -> Path:\n", + " \"\"\"Build the surfcae data file name based on timestamp\n", + "\n", + " Args:\n", + " timestamp: a timestamp\n", + "\n", + " Returns:\n", + " Path: constructed path\n", + " \"\"\"\n", + " pattern = \"MERRA2_sfc_%Y%m%d.nc\"\n", + " data_file = self._data_path_surface / timestamp.strftime(pattern)\n", + " return data_file\n", + "\n", + " def data_file_vertical(self, timestamp: pd.Timestamp) -> Path:\n", + " \"\"\"Build the vertical data file name based on timestamp\n", + "\n", + " Args:\n", + " timestamp: a timestamp\n", + "\n", + " Returns:\n", + " Path: constructed path\n", + " \"\"\"\n", + " pattern = \"MERRA_pres_%Y%m%d.nc\"\n", + " data_file = self._data_path_vertical / timestamp.strftime(pattern)\n", + " return data_file\n", + "\n", + " def data_file_surface_climate(\n", + " self,\n", + " timestamp: pd.Timestamp | None = None,\n", + " dayofyear: int | None = None,\n", + " hourofday: int | None = None,\n", + " ) -> Path:\n", + " \"\"\"\n", + " Returns the path to a climatology file based either on a timestamp or\n", + " the dayofyear / hourofday combination.\n", + " Args:\n", + " timestamp: A timestamp.\n", + " dayofyear: Day of the year. 1 to 366.\n", + " hourofday: Hour of the day. 0 to 23.\n", + " Returns:\n", + " Path: Path to climatology file.\n", + " \"\"\"\n", + " if timestamp is not None and (\n", + " (dayofyear is not None) or (hourofday is not None)\n", + " ):\n", + " raise ValueError(\n", + " \"Provide either timestamp or both dayofyear and hourofday.\"\n", + " )\n", + "\n", + " if timestamp is not None:\n", + " dayofyear = min(timestamp.dayofyear, 365)\n", + " hourofday = timestamp.hour\n", + "\n", + " file_name = f\"climate_surface_doy{dayofyear:03}_hour{hourofday:02}.nc\"\n", + " data_file = self._climatology_path_surface / file_name\n", + " return data_file\n", + "\n", + " def data_file_vertical_climate(\n", + " self,\n", + " timestamp: pd.Timestamp | None = None,\n", + " dayofyear: int | None = None,\n", + " hourofday: int | None = None,\n", + " ) -> Path:\n", + " \"\"\"Returns the path to a climatology file based either on a timestamp\n", + " or the dayofyear / hourofday combination.\n", + "\n", + " Args:\n", + " timestamp: A timestamp. dayofyear: Day of the year. 1 to 366.\n", + " hourofday: Hour of the day. 0 to 23.\n", + " Returns:\n", + " Path: Path to climatology file.\n", + " \"\"\"\n", + " if timestamp is not None and (\n", + " (dayofyear is not None) or (hourofday is not None)\n", + " ):\n", + " raise ValueError(\n", + " \"Provide either timestamp or both dayofyear and hourofday.\"\n", + " )\n", + "\n", + " if timestamp is not None:\n", + " dayofyear = min(timestamp.dayofyear, 365)\n", + " hourofday = timestamp.hour\n", + "\n", + " file_name = f\"climate_vertical_doy{dayofyear:03}_hour{hourofday:02}.nc\"\n", + " data_file = self._climatology_path_vertical / file_name\n", + " return data_file\n", + "\n", + " def _get_coordinates(self) -> None:\n", + " \"\"\"\n", + " Obtains the coordiantes (latitudes and longitudes) from a single data\n", + " file.\n", + " \"\"\"\n", + " timestamp = next(iter(self.valid_timestamps))\n", + "\n", + " file = self.data_file_surface(timestamp)\n", + " with h5py.File(file, \"r\", libver=\"latest\") as handle:\n", + " self.lats = lats = handle[\"lat\"][()].astype(self.rtype)\n", + " self.lons = lons = handle[\"lon\"][()].astype(self.rtype)\n", + "\n", + " deg_to_rad = np.pi / 180\n", + " self._embed_lat = np.sin(lats * deg_to_rad).reshape(-1, 1)\n", + "\n", + " self._embed_lon = np.empty((2, 1, len(lons)), dtype=self.rtype)\n", + " self._embed_lon[0, 0] = np.cos(lons * deg_to_rad)\n", + " self._embed_lon[1, 0] = np.sin(lons * deg_to_rad)\n", + "\n", + " @ft.cached_property\n", + " def lats(self) -> np.ndarray:\n", + " timestamp = next(iter(self.valid_timestamps))\n", + "\n", + " file = self.data_file_surface(timestamp)\n", + " with h5py.File(file, \"r\", libver=\"latest\") as handle:\n", + " return handle[\"lat\"][()].astype(self.rtype)\n", + "\n", + " @ft.cached_property\n", + " def lons(self) -> np.ndarray:\n", + " timestamp = next(iter(self.valid_timestamps))\n", + "\n", + " file = self.data_file_surface(timestamp)\n", + " with h5py.File(file, \"r\", libver=\"latest\") as handle:\n", + " return handle[\"lon\"][()].astype(self.rtype)\n", + "\n", + " @ft.cached_property\n", + " def position_signal(self) -> np.ndarray:\n", + " \"\"\"Generates the \"position signal\" that is part of the static\n", + " features.\n", + "\n", + " Returns:\n", + " Tensor: Torch tensor of dimension (parameter, lat, lon) containing\n", + " sin(lat), cos(lon), sin(lon).\n", + " \"\"\"\n", + "\n", + " latitudes, longitudes = np.meshgrid(\n", + " self.lats, self.lons, indexing=\"ij\"\n", + " )\n", + "\n", + " if self.positional_encoding == \"absolute\":\n", + " latitudes = latitudes / 360 * 2.0 * np.pi\n", + " longitudes = longitudes / 360 * 2.0 * np.pi\n", + " sur_static = np.stack(\n", + " [np.sin(latitudes), np.cos(longitudes), np.sin(longitudes)],\n", + " axis=0,\n", + " )\n", + " else:\n", + " sur_static = np.stack([latitudes, longitudes], axis=0)\n", + "\n", + " sur_static = sur_static.astype(self.rtype)\n", + "\n", + " return sur_static\n", + "\n", + " @ft.cached_property\n", + " def valid_timestamps(self) -> set[pd.Timestamp]:\n", + " \"\"\"Generates list of valid timestamps based on available files. Only\n", + " timestamps for which both surface and vertical information is available\n", + " are considered valid.\n", + " Returns:\n", + " list: list of timestamps\n", + " \"\"\"\n", + "\n", + " s_glob = self._data_path_surface.glob(\"MERRA2_sfc_????????.nc\")\n", + " s_files = [os.path.basename(f) for f in s_glob]\n", + " v_glob = self._data_path_surface.glob(\"MERRA_pres_????????.nc\")\n", + " v_files = [os.path.basename(f) for f in v_glob]\n", + "\n", + " s_re = re.compile(r\"MERRA2_sfc_(\\d{8}).nc\\Z\")\n", + " v_re = re.compile(r\"MERRA_pres_(\\d{8}).nc\\Z\")\n", + " fmt = \"%Y%m%d\"\n", + "\n", + " s_times = {\n", + " (datetime.strptime(m[1], fmt))\n", + " for f in s_files\n", + " if (m := s_re.match(f))\n", + " }\n", + " v_times = {\n", + " (datetime.strptime(m[1], fmt))\n", + " for f in v_files\n", + " if (m := v_re.match(f))\n", + " }\n", + "\n", + " times = s_times.intersection(v_times)\n", + "\n", + " # Each file contains a day at 3 hour intervals\n", + " times = {\n", + " t + timedelta(hours=i) for i in range(0, 24, 3) for t in times\n", + " }\n", + "\n", + " start_time, end_time = self.time_range\n", + " times = {pd.Timestamp(t) for t in times if start_time <= t <= end_time}\n", + "\n", + " return times\n", + "\n", + " @ft.cached_property\n", + " def valid_climate_timestamps(self) -> set[tuple[int, int]]:\n", + " \"\"\"Generates list of \"timestamps\" (dayofyear, hourofday) for which\n", + " climatology data is present. Only instances for which surface and\n", + " vertical data is available are considered valid.\n", + " Returns:\n", + " list: List of tuples describing valid climatology instances.\n", + " \"\"\"\n", + " if not self._require_clim:\n", + " return set()\n", + "\n", + " s_glob = self._climatology_path_surface.glob(\n", + " \"climate_surface_doy???_hour??.nc\"\n", + " )\n", + " s_files = [os.path.basename(f) for f in s_glob]\n", + "\n", + " v_glob = self._climatology_path_vertical.glob(\n", + " \"climate_vertical_doy???_hour??.nc\"\n", + " )\n", + " v_files = [os.path.basename(f) for f in v_glob]\n", + "\n", + " s_re = re.compile(r\"climate_surface_doy(\\d{3})_hour(\\d{2}).nc\\Z\")\n", + " v_re = re.compile(r\"climate_vertical_doy(\\d{3})_hour(\\d{2}).nc\\Z\")\n", + "\n", + " s_times = {\n", + " (int(m[1]), int(m[2])) for f in s_files if (m := s_re.match(f))\n", + " }\n", + " v_times = {\n", + " (int(m[1]), int(m[2])) for f in v_files if (m := v_re.match(f))\n", + " }\n", + "\n", + " times = s_times.intersection(v_times)\n", + "\n", + " return times\n", + "\n", + " def _data_available(self, spec: SampleSpec) -> bool:\n", + " \"\"\"\n", + " Checks whether data is available for a given SampleSpec object. Does so\n", + " using the internal sets with available data previously constructed. Not\n", + " by checking the file system.\n", + " Args:\n", + " spec: SampleSpec object as returned by SampleSpec.get\n", + " Returns:\n", + " bool: if data is availability.\n", + " \"\"\"\n", + " valid = set(spec.times).issubset(self.valid_timestamps)\n", + "\n", + " if self._require_clim:\n", + " sci = spec.climatology_info\n", + " ci = set(sci) if isinstance(sci, list) else set([sci]) # noqa: C405\n", + " valid &= ci.issubset(self.valid_climate_timestamps)\n", + "\n", + " return valid\n", + "\n", + " @ft.cached_property\n", + " def samples(self) -> list[tuple[pd.Timestamp, int, int]]:\n", + " \"\"\"\n", + " Generates list of all valid samlpes.\n", + " Returns:\n", + " list: List of tuples (timestamp, input time, lead time).\n", + " \"\"\"\n", + " valid_samples = []\n", + " dts = [(it, lt) for it in self.input_times for lt in self.lead_times]\n", + "\n", + " for timestamp in sorted(self.valid_timestamps):\n", + " timestamp_samples = []\n", + " for it, lt in dts:\n", + " spec = SampleSpec.get(timestamp, -it, lt)\n", + "\n", + " if self._data_available(spec):\n", + " timestamp_samples.append((timestamp, it, lt))\n", + "\n", + " if timestamp_samples:\n", + " valid_samples.append(timestamp_samples)\n", + "\n", + " return valid_samples\n", + "\n", + " def _to_torch(\n", + " self,\n", + " data: dict[str, Tensor | list[Tensor]],\n", + " dtype: torch.dtype = torch.float32,\n", + " ) -> dict[str, Tensor | list[Tensor]]:\n", + " out = {}\n", + " for k, v in data.items():\n", + " if isinstance(v, list):\n", + " out[k] = [torch.from_numpy(x).to(dtype) for x in v]\n", + " else:\n", + " out[k] = torch.from_numpy(v).to(dtype)\n", + "\n", + " return out\n", + "\n", + " def _lat_roll(\n", + " self, data: dict[str, Tensor | list[Tensor]], n: int\n", + " ) -> dict[str, Tensor | list[Tensor]]:\n", + " out = {}\n", + " for k, v in data.items():\n", + " if isinstance(v, list):\n", + " out[k] = [torch.roll(x, shifts=n, dims=-1) for x in v]\n", + " else:\n", + " out[k] = torch.roll(v, shifts=n, dims=-1)\n", + "\n", + " return out\n", + "\n", + " def _read_static_data(\n", + " self, file: str | Path, doy: int, hod: int\n", + " ) -> np.ndarray:\n", + " with h5py.File(file, \"r\", libver=\"latest\") as handle:\n", + " lats_surf = handle[\"lat\"]\n", + " lons_surf = handle[\"lon\"]\n", + "\n", + " nll = (len(lats_surf), len(lons_surf))\n", + "\n", + " npos = len(self.position_signal)\n", + " ntime = 4\n", + "\n", + " nstat = npos + ntime + self._nsstat\n", + " data = np.empty((nstat, *nll), dtype=self.rtype)\n", + "\n", + " for i, key in enumerate(self._sstat, start=npos + ntime):\n", + " data[i] = handle[key][()].astype(dtype=self.rtype)\n", + "\n", + " # [possition signal], cos(doy), sin(doy), cos(hod), sin(hod)\n", + " data[0:npos] = self.position_signal\n", + " data[npos + 0] = np.cos(2 * np.pi * doy / 366)\n", + " data[npos + 1] = np.sin(2 * np.pi * doy / 366)\n", + " data[npos + 2] = np.cos(2 * np.pi * hod / 24)\n", + " data[npos + 3] = np.sin(2 * np.pi * hod / 24)\n", + "\n", + " return data\n", + "\n", + " def _read_surface(\n", + " self, tidx: int, nll: tuple[int, int], handle: h5py.File\n", + " ) -> np.ndarray:\n", + " data = np.empty((self._nsvars, *nll), dtype=self.rtype)\n", + "\n", + " for i, key in enumerate(self._svars):\n", + " data[i] = handle[key][tidx][()].astype(dtype=self.rtype)\n", + "\n", + " return data\n", + "\n", + " def _read_levels(\n", + " self, tidx: int, nll: tuple[int, int], handle: h5py.File\n", + " ) -> np.ndarray:\n", + " lvls = handle[\"lev\"][()]\n", + " lidx = self._level_idxs(lvls)\n", + "\n", + " data = np.empty((self._nuvars, self._nlevel, *nll), dtype=self.rtype)\n", + "\n", + " for i, key in enumerate(self._uvars):\n", + " data[i] = handle[key][tidx, lidx][()].astype(dtype=self.rtype)\n", + "\n", + " return np.ascontiguousarray(np.flip(data, axis=1))\n", + "\n", + " def _level_idxs(self, lvls):\n", + " lidx = [np.argwhere(lvls == int(lvl)).item() for lvl in self._level]\n", + " return sorted(lidx)\n", + "\n", + " @staticmethod\n", + " def _date_to_tidx(date: datetime | pd.Timestamp, handle: h5py.File) -> int:\n", + " if isinstance(date, pd.Timestamp):\n", + " date = date.to_pydatetime()\n", + "\n", + " time = handle[\"time\"]\n", + "\n", + " t0 = time.attrs[\"begin_time\"][()].item()\n", + " d0 = f\"{time.attrs['begin_date'][()].item()}\"\n", + "\n", + " offset = datetime.strptime(d0, \"%Y%m%d\")\n", + "\n", + " times = [offset + timedelta(minutes=int(t + t0)) for t in time[()]]\n", + " return times.index(date)\n", + "\n", + " def _read_data(\n", + " self, file_pair: tuple[str, str], date: datetime\n", + " ) -> dict[str, np.ndarray]:\n", + " s_file, v_file = file_pair\n", + "\n", + " with h5py.File(s_file, \"r\", libver=\"latest\") as shandle:\n", + " lats_surf = shandle[\"lat\"]\n", + " lons_surf = shandle[\"lon\"]\n", + "\n", + " nll = (len(lats_surf), len(lons_surf))\n", + "\n", + " tidx = self._date_to_tidx(date, shandle)\n", + "\n", + " sdata = self._read_surface(tidx, nll, shandle)\n", + "\n", + " with h5py.File(v_file, \"r\", libver=\"latest\") as vhandle:\n", + " lats_vert = vhandle[\"lat\"]\n", + " lons_vert = vhandle[\"lon\"]\n", + "\n", + " nll = (len(lats_vert), len(lons_vert))\n", + "\n", + " tidx = self._date_to_tidx(date, vhandle)\n", + "\n", + " vdata = self._read_levels(tidx, nll, vhandle)\n", + "\n", + " data = {\"vert\": vdata, \"surf\": sdata}\n", + "\n", + " return data\n", + "\n", + " def _read_climate(\n", + " self, file_pair: tuple[str, str]\n", + " ) -> dict[str, np.ndarray]:\n", + " s_file, v_file = file_pair\n", + "\n", + " with h5py.File(s_file, \"r\", libver=\"latest\") as shandle:\n", + " lats_surf = shandle[\"lat\"]\n", + " lons_surf = shandle[\"lon\"]\n", + "\n", + " nll = (len(lats_surf), len(lons_surf))\n", + "\n", + " sdata = np.empty((self._nsvars, *nll), dtype=self.rtype)\n", + "\n", + " for i, key in enumerate(self._svars):\n", + " sdata[i] = shandle[key][()].astype(dtype=self.rtype)\n", + "\n", + " with h5py.File(v_file, \"r\", libver=\"latest\") as vhandle:\n", + " lats_vert = vhandle[\"lat\"]\n", + " lons_vert = vhandle[\"lon\"]\n", + "\n", + " nll = (len(lats_vert), len(lons_vert))\n", + "\n", + " lvls = vhandle[\"lev\"][()]\n", + " lidx = self._level_idxs(lvls)\n", + "\n", + " vdata = np.empty(\n", + " (self._nuvars, self._nlevel, *nll), dtype=self.rtype\n", + " )\n", + "\n", + " for i, key in enumerate(self._uvars):\n", + " vdata[i] = vhandle[key][lidx][()].astype(dtype=self.rtype)\n", + "\n", + " data = {\n", + " \"vert\": np.ascontiguousarray(np.flip(vdata, axis=1)),\n", + " \"surf\": sdata,\n", + " }\n", + "\n", + " return data\n", + "\n", + " def get_data_from_sample_spec(\n", + " self, spec: SampleSpec\n", + " ) -> dict[str, Tensor | int | float]:\n", + " \"\"\"Loads and assembles sample data given a SampleSpec object.\n", + "\n", + " Args:\n", + " spec (SampleSpec): Full details regarding the data to be loaded\n", + " Returns:\n", + " dict: Dictionary with the following keys::\n", + "\n", + " 'sur_static': Torch tensor of shape [parameter, lat, lon]. For\n", + " each pixel (lat, lon), the first 7 dimensions index sin(lat),\n", + " cos(lon), sin(lon), cos(doy), sin(doy), cos(hod), sin(hod).\n", + " Where doy is the day of the year [1, 366] and hod the hour of\n", + " the day [0, 23].\n", + " 'sur_vals': Torch tensor of shape [parameter, time, lat, lon].\n", + " 'sur_tars': Torch tensor of shape [parameter, time, lat, lon].\n", + " 'ulv_vals': Torch tensor of shape [parameter, level, time, lat, lon].\n", + " 'ulv_tars': Torch tensor of shape [parameter, level, time, lat, lon].\n", + " 'sur_climate': Torch tensor of shape [parameter, lat, lon].\n", + " 'ulv_climate': Torch tensor of shape [paramter, level, lat, lon].\n", + " 'lead_time': Float.\n", + " 'input_time': Float.\n", + "\n", + " \"\"\" # noqa: E501\n", + "\n", + " # We assemble the unique timestamps for which we need data.\n", + " vals_required = {*spec.times}\n", + " stat_required = {*spec.stat_times}\n", + "\n", + " # We assemble the unique data files from which we need value data\n", + " vals_file_map = defaultdict(list)\n", + " for t in vals_required:\n", + " data_files = (\n", + " self.data_file_surface(t),\n", + " self.data_file_vertical(t),\n", + " )\n", + " vals_file_map[data_files].append(t)\n", + "\n", + " # We assemble the unique data files from which we need static data\n", + " stat_file_map = defaultdict(list)\n", + " for t in stat_required:\n", + " data_files = (\n", + " self.data_file_surface(t),\n", + " self.data_file_vertical(t),\n", + " )\n", + " stat_file_map[data_files].append(t)\n", + "\n", + " # Load the value data\n", + " data = {}\n", + " for data_files, times in vals_file_map.items():\n", + " for time in times:\n", + " data[time] = self._read_data(data_files, time)\n", + "\n", + " # Combine times\n", + " sample_data = {}\n", + "\n", + " input_upl = np.stack([data[t][\"vert\"] for t in spec.inputs], axis=2)\n", + " sample_data[\"ulv_vals\"] = input_upl\n", + "\n", + " target_upl = data[spec.target][\"vert\"]\n", + " sample_data[\"ulv_tars\"] = target_upl[:, :, None]\n", + "\n", + " input_sur = np.stack([data[t][\"surf\"] for t in spec.inputs], axis=1)\n", + " sample_data[\"sur_vals\"] = input_sur\n", + "\n", + " target_sur = data[spec.target][\"surf\"]\n", + " sample_data[\"sur_tars\"] = target_sur[:, None]\n", + "\n", + " # Load the static data\n", + " data_files, times = stat_file_map.popitem()\n", + " time = times[0].dayofyear, times[0].hour\n", + " sample_data[\"sur_static\"] = self._read_static_data(\n", + " data_files[0], *time\n", + " )\n", + "\n", + " # If required load the surface data\n", + " if self._require_clim:\n", + " ci_year, ci_hour = spec.climatology_info\n", + "\n", + " surf_file = self.data_file_surface_climate(\n", + " dayofyear=ci_year,\n", + " hourofday=ci_hour,\n", + " )\n", + "\n", + " vert_file = self.data_file_vertical_climate(\n", + " dayofyear=ci_year,\n", + " hourofday=ci_hour,\n", + " )\n", + "\n", + " clim_data = self._read_climate((surf_file, vert_file))\n", + "\n", + " sample_data[\"sur_climate\"] = clim_data[\"surf\"]\n", + " sample_data[\"ulv_climate\"] = clim_data[\"vert\"]\n", + "\n", + " # Move the data from numpy to torch\n", + " sample_data = self._to_torch(sample_data, dtype=self.dtype)\n", + "\n", + " # Optionally roll\n", + " if len(self._roll_longitudes) > 0:\n", + " roll_by = random.choice(self._roll_longitudes)\n", + " sample_data = self._lat_roll(sample_data, roll_by)\n", + "\n", + " # Now that we have rolled, we can add the static data\n", + " sample_data[\"lead_time\"] = spec.lead_time\n", + " sample_data[\"input_time\"] = spec.input_time\n", + "\n", + " return sample_data\n", + "\n", + " def get_data(\n", + " self, timestamp: pd.Timestamp, input_time: int, lead_time: int\n", + " ) -> dict[str, Tensor | int]:\n", + " \"\"\"\n", + " Loads data based on timestamp and lead time.\n", + " Args:\n", + " timestamp: Timestamp.\n", + " input_time: time between input samples.\n", + " lead_time: lead time.\n", + " Returns:\n", + " Dictionary with keys 'sur_static', 'sur_vals', 'sur_tars',\n", + " 'ulv_vals', 'ulv_tars', 'sur_climate', 'ulv_climate',\n", + " 'lead_time'.\n", + " \"\"\"\n", + " spec = SampleSpec.get(timestamp, -input_time, lead_time)\n", + " sample_data = self.get_data_from_sample_spec(spec)\n", + " return sample_data\n", + "\n", + " def __getitem__(self, idx: int) -> dict[str, Tensor | int]:\n", + " \"\"\"\n", + " Loads data based on sample index and random choice of sample.\n", + " Args:\n", + " idx: Sample index.\n", + " Returns:\n", + " Dictionary with keys 'sur_static', 'sur_vals', 'sur_tars',\n", + " 'ulv_vals', 'ulv_tars', 'sur_climate', 'ulv_climate',\n", + " 'lead_time', 'input_time'.\n", + " \"\"\"\n", + " sample_set = self.samples[idx]\n", + " timestamp, input_time, lead_time, *nsteps = random.choice(sample_set)\n", + " sample_data = self.get_data(timestamp, input_time, lead_time)\n", + " return sample_data\n", + "\n", + " def __len__(self):\n", + " return len(self.samples)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [], + "source": [ + "# from PrithviWxC.dataloaders.merra2 import Merra2Dataset\n", + "\n", + "dataset = Merra2Dataset(\n", + " time_range=time_range,\n", + " lead_times=lead_times,\n", + " input_times=input_times,\n", + " data_path_surface=surf_dir,\n", + " data_path_vertical=vert_dir,\n", + " climatology_path_surface=surf_clim_dir,\n", + " climatology_path_vertical=vert_clim_dir,\n", + " surface_vars=surface_vars,\n", + " static_surface_vars=static_surface_vars,\n", + " vertical_vars=vertical_vars,\n", + " levels=levels,\n", + " positional_encoding=positional_encoding,\n", + ")\n", + "assert len(dataset) > 0, \"There doesn't seem to be any valid data.\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## The model\n", + "We are now ready to build the mdoel.\n", + "### Scalers\n", + "Additionally, the model takes as static parameters the mean\n", + "and variance values of the input variables and the variance\n", + "values of the target difference, i.e., the variance between\n", + "climatology and instantaneous variables. We have provided\n", + "data files containing these values, and here we load this data." + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [], + "source": [ + "# from PrithviWxC.dataloaders.merra2 import (\n", + "# input_scalers,\n", + "# output_scalers,\n", + "# static_input_scalers,\n", + "# )\n", + "\n", + "surf_in_scal_path = Path(\"./climatology/musigma_surface.nc\")\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.2300m.v1\",\n", + " filename=f\"climatology/{surf_in_scal_path.name}\",\n", + " local_dir=\".\",\n", + ")\n", + "\n", + "vert_in_scal_path = Path(\"./climatology/musigma_vertical.nc\")\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.2300m.v1\",\n", + " filename=f\"climatology/{vert_in_scal_path.name}\",\n", + " local_dir=\".\",\n", + ")\n", + "\n", + "surf_out_scal_path = Path(\"./climatology/anomaly_variance_surface.nc\")\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.2300m.v1\",\n", + " filename=f\"climatology/{surf_out_scal_path.name}\",\n", + " local_dir=\".\",\n", + ")\n", + "\n", + "vert_out_scal_path = Path(\"./climatology/anomaly_variance_vertical.nc\")\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.2300m.v1\",\n", + " filename=f\"climatology/{vert_out_scal_path.name}\",\n", + " local_dir=\".\",\n", + ")\n", + "\n", + "in_mu, in_sig = input_scalers(\n", + " surface_vars,\n", + " vertical_vars,\n", + " levels,\n", + " surf_in_scal_path,\n", + " vert_in_scal_path,\n", + ")\n", + "\n", + "output_sig = output_scalers(\n", + " surface_vars,\n", + " vertical_vars,\n", + " levels,\n", + " surf_out_scal_path,\n", + " vert_out_scal_path,\n", + ")\n", + "\n", + "static_mu, static_sig = static_input_scalers(\n", + " surf_in_scal_path,\n", + " static_surface_vars,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Task and additional configs\n", + "As previously mentioned, the PrithviWxC model's pretext task\n", + "involved predicting the desired variable at a specific lead\n", + "time. This was achieved by calculating the difference (delta)\n", + "compared to the climatological average at that time. This\n", + "operational mode is activated using the residual flag. Although\n", + "the model includes additional residual options, the core model\n", + "weights were not trained using these modes.\n", + "\n", + "Additionally, for training and evaluation, it is possible to\n", + "mask tokens in the model. The masking occurs after tokenization,\n", + "prior to the encoder layers. The model utilizes multi-axis\n", + "attention, with data broken down into a hierarchy of local and\n", + "global patches. Consequently, masking can be configured to mask\n", + "either small local patches or larger global patches. This\n", + "configuration is achieved via the `masking_mode` flag. It is\n", + "possible to set `masking_mode=both`. This does not mix the modes\n", + "but rather allows both modes to be used and swapped between,\n", + "primarily for training purposes. For this demonstration, we will\n", + "adjust the masking ratio to showcase the reconstruction\n", + "capabilities of the model.\n", + "\n", + "Finally, we can set up shifting. Primarily utilized in the\n", + "decoder, this enables alternate shifting of the attention\n", + "windows, similar to the SWIN model. This option necessitates\n", + "an even number of decoder blocks and is incompatible with the\n", + "encoder when masking is also employed." + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [], + "source": [ + "residual = \"climate\"\n", + "masking_mode = \"local\"\n", + "decoder_shifting = True\n", + "masking_ratio = 0.99" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Model init\n", + "We now have all the pieces to build the model. If you are\n", + "using the pretrained weights, a number of the model\n", + "hyperparameters are predetermined and included below. With\n", + "this configuration, the model will have approximately 2.3\n", + "billion parameters. Therefore, if you want to train the fully\n", + "unfrozen model, you will likely need to use a model distribution\n", + "approach, such as fully shared data parallelism (FSDP). To\n", + "further reduce the memory usage of the model when gradients are\n", + "required, there are two variables — `checkpoint_encoder` and\n", + "`checkpoint_decoder` — which enable activation checkpointing of\n", + "desired transformer layers." + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [], + "source": [ + "from functools import cached_property\n", + "from importlib.metadata import version\n", + "\n", + "from torch import Tensor\n", + "from torch.utils.checkpoint import checkpoint\n", + "\n", + "if version(\"torch\") > \"2.3.0\":\n", + " from torch.nn.attention import SDPBackend, sdpa_kernel\n", + "import numpy as np\n", + "import torch\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "\n", + "\n", + "# DropPath code is straight from timm\n", + "# (https://huggingface.co/spaces/Roll20/pet_score/blame/main/lib/timm/models/layers/drop.py)\n", + "def drop_path(\n", + " x: Tensor,\n", + " drop_prob: float = 0.0,\n", + " training: bool = False,\n", + " scale_by_keep: bool = True,\n", + ") -> Tensor:\n", + " \"\"\"Drop paths (Stochastic Depth) per sample (when applied in main path of\n", + " residual blocks). Taken form timm.\n", + "\n", + " Args:\n", + " x (Tensor): Input tensor.\n", + " drop_prob (float): Probability of dropping `x`, defaults to 0.\n", + " training (bool): Whether model is in in traingin of eval mode,\n", + " defaults to False.\n", + " scale_by_keep (bool): Whether the output should scaled by\n", + " (`1 - drop_prob`), defaults to True.\n", + " Returns:\n", + " Tensor: Tensor that may have randomly dropped with proability\n", + " `drop_path`\n", + " \"\"\"\n", + " if drop_prob == 0.0 or not training:\n", + " return x\n", + " keep_prob = 1 - drop_prob\n", + " shape = (x.shape[0],) + (1,) * (x.ndim - 1)\n", + " random_tensor = x.new_empty(shape).bernoulli_(keep_prob)\n", + " if keep_prob > 0.0 and scale_by_keep:\n", + " random_tensor.div_(keep_prob)\n", + " return x * random_tensor\n", + "\n", + "\n", + "class DropPath(nn.Module):\n", + " \"\"\"\n", + " Drop paths (Stochastic Depth) per sample (when applied in main path of\n", + " residual blocks).\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self, drop_prob: float | None = None, scale_by_keep: bool = True\n", + " ) -> None:\n", + " super(DropPath, self).__init__()\n", + " self.drop_prob = drop_prob\n", + " self.scale_by_keep = scale_by_keep\n", + "\n", + " def forward(self, x: Tensor) -> Tensor:\n", + " \"\"\"Runs drop path on input tensor\n", + "\n", + " Args:\n", + " x: input\n", + "\n", + " Returns:\n", + " tensor: output after drop_path\n", + " \"\"\"\n", + " return drop_path(x, self.drop_prob, self.training, self.scale_by_keep)\n", + "\n", + "\n", + "class Mlp(nn.Module):\n", + " \"\"\"\n", + " Multi layer perceptron.\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self, features: int, hidden_features: int, dropout: float = 0.0\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " features: Input/output dimension.\n", + " hidden_features: Hidden dimension.\n", + " dropout: Dropout.\n", + " \"\"\"\n", + " super().__init__()\n", + " self.net = nn.Sequential(\n", + " nn.Linear(features, hidden_features),\n", + " nn.GELU(),\n", + " nn.Dropout(dropout),\n", + " nn.Linear(hidden_features, features),\n", + " nn.Dropout(dropout),\n", + " )\n", + "\n", + " def forward(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Args:\n", + " x (Tesnor): Tensor of shape [..., channel]\n", + " Returns:\n", + " Tenosr: Tensor of same shape as x.\n", + " \"\"\"\n", + " return self.net(x)\n", + "\n", + "\n", + "class LayerNormPassThrough(nn.LayerNorm):\n", + " \"\"\"Normalising layer that allows the attention mask to be passed through\"\"\"\n", + "\n", + " def __init__(self, *args, **kwargs):\n", + " super().__init__(*args, **kwargs)\n", + "\n", + " def forward(\n", + " self, d: tuple[Tensor, Tensor | None]\n", + " ) -> tuple[Tensor, Tensor | None]:\n", + " \"\"\"Forwards function\n", + "\n", + " Args:\n", + " d (tuple): tuple of the data tensor and the attention mask\n", + " Returns:\n", + " output (Tensor): normalised output data\n", + " attn_mask (Tensor): the attention mask that was passed in\n", + " \"\"\"\n", + " input, attn_mask = d\n", + " output = F.layer_norm(\n", + " input, self.normalized_shape, self.weight, self.bias, self.eps\n", + " )\n", + " return output, attn_mask\n", + "\n", + "\n", + "class MultiheadAttention(nn.Module):\n", + " \"\"\"Multihead attention layer for inputs of shape\n", + " [..., sequence, features].\n", + " \"\"\"\n", + "\n", + " def __init__(self, features: int, n_heads: int, dropout: float) -> None:\n", + " \"\"\"\n", + " Args:\n", + " features: Number of features for inputs to the layer.\n", + " n_heads: Number of attention heads. Should be a factor of features.\n", + " (I.e. the layer uses features // n_heads.)\n", + " dropout: Dropout.\n", + " \"\"\" # noqa: E501\n", + " super().__init__()\n", + "\n", + " if (features % n_heads) != 0:\n", + " raise ValueError(\n", + " f\"Features '{features}' is not divisible by heads '{n_heads}'.\"\n", + " )\n", + "\n", + " self.features = features\n", + " self.n_heads = n_heads\n", + " self.dropout = dropout\n", + "\n", + " self.qkv_layer = torch.nn.Linear(features, features * 3, bias=False)\n", + " self.w_layer = torch.nn.Linear(features, features, bias=False)\n", + "\n", + " def forward(self, d: tuple[Tensor, Tensor | None]) -> Tensor:\n", + " \"\"\"\n", + " Args:\n", + " d (tuple): tuple containing Tensor of shape [..., sequence, features] and the attention mask\n", + " Returns:\n", + " Tensor: Tensor of shape [..., sequence, features]\n", + " \"\"\" # noqa: E501\n", + " x, attn_mask = d\n", + "\n", + " if not x.shape[-1] == self.features:\n", + " raise ValueError(\n", + " f\"Expecting tensor with last dimension size {self.features}.\"\n", + " )\n", + "\n", + " passenger_dims = x.shape[:-2]\n", + " B = passenger_dims.numel()\n", + " S = x.shape[-2]\n", + " C = x.shape[-1]\n", + " x = x.reshape(B, S, C)\n", + "\n", + " # x [B, S, C]\n", + " # q, k, v [B, H, S, C/H]\n", + " q, k, v = (\n", + " self.qkv_layer(x)\n", + " .view(B, S, self.n_heads, 3 * (C // self.n_heads))\n", + " .transpose(1, 2)\n", + " .chunk(chunks=3, dim=3)\n", + " )\n", + "\n", + " # Let us enforce either flash (A100+) or memory efficient attention.\n", + " if version(\"torch\") > \"2.3.0\":\n", + " with sdpa_kernel(\n", + " [SDPBackend.FLASH_ATTENTION, SDPBackend.EFFICIENT_ATTENTION]\n", + " ):\n", + " # x [B, H, S, C//H]\n", + " x = F.scaled_dot_product_attention(\n", + " q, k, v, attn_mask=attn_mask, dropout_p=self.dropout\n", + " )\n", + " else:\n", + " with torch.backends.cuda.sdp_kernel(\n", + " enable_flash=True, enable_math=False, enable_mem_efficient=True\n", + " ):\n", + " # x [B, H, S, C//H]\n", + " x = F.scaled_dot_product_attention(\n", + " q, k, v, dropout_p=self.dropout\n", + " )\n", + "\n", + " # x [B, S, C]\n", + " x = x.transpose(1, 2).view(B, S, C)\n", + "\n", + " # x [B, S, C]\n", + " x = self.w_layer(x)\n", + "\n", + " # Back to input shape\n", + " x = x.view(*passenger_dims, S, self.features)\n", + " return x\n", + "\n", + "\n", + "class Transformer(nn.Module):\n", + " \"\"\"\n", + " Transformer for inputs of shape [..., S, features].\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " features: int,\n", + " mlp_multiplier: int,\n", + " n_heads: int,\n", + " dropout: float,\n", + " drop_path: float,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " features: Number of features for inputs to the layer.\n", + " mlp_multiplier: Model uses features*mlp_multiplier hidden units.\n", + " n_heads: Number of attention heads. Should be a factor of features.\n", + " (I.e. the layer uses features // n_heads.) dropout: Dropout.\n", + " drop_path: DropPath.\n", + " \"\"\"\n", + " super().__init__()\n", + "\n", + " self.features = features\n", + " self.mlp_multiplier = mlp_multiplier\n", + " self.n_heads = n_heads\n", + " self.dropout = dropout\n", + " self.drop_path = (\n", + " DropPath(drop_path) if drop_path > 0.0 else nn.Identity()\n", + " )\n", + "\n", + " self.attention = nn.Sequential(\n", + " LayerNormPassThrough(features),\n", + " MultiheadAttention(features, n_heads, dropout),\n", + " )\n", + "\n", + " self.ff = nn.Sequential(\n", + " nn.LayerNorm(features),\n", + " Mlp(\n", + " features=features,\n", + " hidden_features=features * mlp_multiplier,\n", + " dropout=dropout,\n", + " ),\n", + " )\n", + "\n", + " def forward(self, d: tuple[Tensor, Tensor | None]) -> Tensor:\n", + " \"\"\"\n", + " Args:\n", + " x: Tensor of shape [..., sequence, features]\n", + " Returns:\n", + " Tensor: Tensor of shape [..., sequence, features]\n", + " \"\"\"\n", + " x, attn_mask = d\n", + " if not x.shape[-1] == self.features:\n", + " raise ValueError(\n", + " f\"Expecting tensor with last dimension size {self.features}.\"\n", + " )\n", + "\n", + " attention_x = self.attention(d)\n", + "\n", + " x = x + self.drop_path(attention_x)\n", + " x = x + self.drop_path(self.ff(x))\n", + "\n", + " return x\n", + "\n", + "\n", + "class _Shift(nn.Module):\n", + " \"\"\"Private base class for the shifter. This allows some behaviour to be\n", + " easily handled when the shifter isn't used.\n", + " \"\"\"\n", + "\n", + " def __init__(self):\n", + " super().__init__()\n", + "\n", + " self._shifted = False\n", + "\n", + " @torch.no_grad()\n", + " def reset(self) -> None:\n", + " \"\"\"\n", + " Resets the bool tracking whether the data is shifted\n", + " \"\"\"\n", + " self._shifted: bool = False\n", + "\n", + " def forward(self, data: Tensor) -> tuple[Tensor, dict[bool, None]]:\n", + " return data, {True: None, False: None}\n", + "\n", + "\n", + "class SWINShift(_Shift):\n", + " \"\"\"\n", + " Handles the shifting of patches similar to how SWIN works. However if we\n", + " shift the latitudes then the poles will wrap and potentially that might be\n", + " problematic. The possition tokens should handle it but masking is safer.\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " mu_shape: tuple[int, int],\n", + " global_shape: tuple[int, int],\n", + " local_shape: tuple[int, int],\n", + " patch_shape: tuple[int, int],\n", + " n_context_tokens: int = 2,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " mu_shape: the shape to the masking units\n", + " global_shape: number of global patches in lat and lon\n", + " local_shape: size of the local patches\n", + " patch_shape: patch size\n", + " n_context_token: number of additional context tokens at start of\n", + " _each_ local sequence\n", + " \"\"\"\n", + " super().__init__()\n", + "\n", + " self._mu_shape = ms = mu_shape\n", + " self._g_shape = gs = global_shape\n", + " self._l_shape = ls = local_shape\n", + " self._p_shape = ps = patch_shape\n", + " self._lat_patch = (gs[0], ls[0], gs[1], ls[1])\n", + " self._n_context_tokens = n_context_tokens\n", + "\n", + " self._g_shift_to = tuple(\n", + " int(0.5 * x / p) for x, p in zip(ms, ps, strict=False)\n", + " )\n", + " self._g_shift_from = tuple(\n", + " -int(0.5 * x / p) for x, p in zip(ms, ps, strict=False)\n", + " )\n", + "\n", + " # Define the attention masks for the shifted MaxViT.\n", + " nglobal = global_shape[0] * global_shape[1]\n", + " nlocal = (\n", + " local_shape[0] * local_shape[1] + self._n_context_tokens\n", + " ) # \"+ 1\" for leadtime\n", + "\n", + " lm = torch.ones((nglobal, 1, nlocal, nlocal), dtype=bool)\n", + " mwidth = int(0.5 * local_shape[1]) * local_shape[0]\n", + " lm[\n", + " : gs[1],\n", + " :,\n", + " self._n_context_tokens : mwidth + self._n_context_tokens,\n", + " self._n_context_tokens : mwidth + self._n_context_tokens,\n", + " ] = False\n", + " self.register_buffer(\"local_mask\", lm)\n", + "\n", + " gm = torch.ones((nlocal, 1, nglobal, nglobal), dtype=bool)\n", + " gm[: int(0.5 * ls[1]) * ls[0], :, : gs[1], : gs[1]] = False\n", + " self.register_buffer(\"global_mask\", gm)\n", + "\n", + " def _to_grid_global(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Shuffle and reshape the data from the global/local setting back to the\n", + " lat/lon grid setting\n", + " Args:\n", + " x: the data tensor to be shuffled.\n", + " Returns:\n", + " x: data in the global/local setting\n", + " \"\"\"\n", + " nbatch, *other = x.shape\n", + "\n", + " y1 = x.view(nbatch, *self._g_shape, *self._l_shape, -1)\n", + " y2 = y1.permute(0, 5, 1, 3, 2, 4).contiguous()\n", + "\n", + " s = y2.shape\n", + " return y2.view((nbatch, -1, s[2] * s[3], s[4] * s[5]))\n", + "\n", + " def _to_grid_local(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Shuffle and reshape the data from the local/global setting to the\n", + " lat/lon grid setting\n", + " Args:\n", + " x: the data tensor to be shuffled.\n", + " Returns:\n", + " x: data in the lat/lon setting.\n", + " \"\"\"\n", + " x = x.transpose(2, 1).contiguous()\n", + " return self._to_grid_global(x)\n", + "\n", + " def _from_grid_global(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Shuffle and reshape the data from the lat/lon grid to the global/local\n", + " setting\n", + " Args:\n", + " x: the data tensor to be shuffled.\n", + " Returns:\n", + " x: data in the global/local setting\n", + " \"\"\"\n", + " nbatch, *other = x.shape\n", + "\n", + " z1 = x.view(nbatch, -1, *self._lat_patch)\n", + " z2 = z1.permute(0, 2, 4, 3, 5, 1).contiguous()\n", + "\n", + " s = z2.shape\n", + " return z2.view(nbatch, s[1] * s[2], s[3] * s[4], -1)\n", + "\n", + " def _from_grid_local(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Shuffle and reshape the data from the lat/lon grid to the local/global\n", + " setting\n", + " Args:\n", + " x: the data tensor to be shuffled.\n", + " Returns:\n", + " x: data in the local/global setting\n", + " \"\"\"\n", + " x = self._from_grid_global(x)\n", + " return x.transpose(2, 1).contiguous()\n", + "\n", + " def _shift(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Shifts data in the gridded lat/lon setting by half the mask unit shape\n", + " Args:\n", + " x: data to be shifted\n", + " Returns:\n", + " x: either the hsifted or unshifted data\n", + " \"\"\"\n", + " shift = self._g_shift_from if self._shifted else self._g_shift_to\n", + " x_shifted = torch.roll(x, shift, (-2, -1))\n", + "\n", + " self._shifted = not self._shifted\n", + " return x_shifted\n", + "\n", + " def _sep_lt(self, x: Tensor) -> tuple[Tensor, Tensor]:\n", + " \"\"\"\n", + " Seperate off the leadtime from the local patches\n", + " Args:\n", + " x: data to have leadtime removed from\n", + " Returns:\n", + " lt: leadtime\n", + " x: data without the lead time in the local patch\n", + " \"\"\"\n", + " lt_it = x[:, : self._n_context_tokens, :, :]\n", + " x_stripped = x[:, self._n_context_tokens :, :, :]\n", + "\n", + " return lt_it, x_stripped\n", + "\n", + " def forward(self, data: Tensor) -> tuple[Tensor, Tensor]:\n", + " \"\"\"Shift or unshift the the data depending on whether the data is\n", + " already shifted, as defined by self._shifte.\n", + "\n", + " Args:\n", + " data: data to be shifted\n", + " Returns:\n", + " Tensor: shifted data Tensor\n", + " \"\"\"\n", + " lt, x = self._sep_lt(data)\n", + "\n", + " x_grid = self._to_grid_local(x)\n", + " x_shifted = self._shift(x_grid)\n", + " x_patched = self._from_grid_local(x_shifted)\n", + "\n", + " # Mask has to be repeated based on batch size\n", + " n_batch = x_grid.shape[0]\n", + " local_rep = [n_batch] + [1] * (self.local_mask.ndim - 1)\n", + " global_rep = [n_batch] + [1] * (self.global_mask.ndim - 1)\n", + "\n", + " if self._shifted:\n", + " attn_mask = {\n", + " True: self.local_mask.repeat(local_rep),\n", + " False: self.global_mask.repeat(global_rep),\n", + " }\n", + " else:\n", + " attn_mask = {True: None, False: None}\n", + "\n", + " return torch.cat((lt, x_patched), axis=1), attn_mask\n", + "\n", + "\n", + "class LocalGlobalLocalBlock(nn.Module):\n", + " \"\"\"\n", + " Applies alternating block and grid attention. Given a parameter n_blocks,\n", + " the entire module contains 2*n_blocks+1 transformer blocks. The first,\n", + " third, ..., last apply local (block) attention. The second, fourth, ...\n", + " global (grid) attention.\n", + "\n", + " This is heavily inspired by\n", + " Tu et al. \"MaxViT: Multi-Axis Vision Transformer\"\n", + " (https://arxiv.org/abs/2204.01697).\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " features: int,\n", + " mlp_multiplier: int,\n", + " n_heads: int,\n", + " dropout: float,\n", + " n_blocks: int,\n", + " drop_path: float,\n", + " shifter: nn.Module | None = None,\n", + " checkpoint: list[int] | None = None,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " features: Number of features for inputs to the layer.\n", + " mlp_multiplier: Model uses features*mlp_multiplier hidden units.\n", + " n_heads: Number of attention heads. Should be a factor of features.\n", + " (I.e. the layer uses features // n_heads.)\n", + " dropout: Dropout.\n", + " drop_path: DropPath.\n", + " n_blocks: Number of local-global transformer pairs.\n", + " \"\"\"\n", + " super().__init__()\n", + "\n", + " self.features = features\n", + " self.mlp_multiplier = mlp_multiplier\n", + " self.n_heads = n_heads\n", + " self.dropout = dropout\n", + " self.drop_path = drop_path\n", + " self.n_blocks = n_blocks\n", + " self._checkpoint = checkpoint or []\n", + "\n", + " if not all(0 <= c < 2 * n_blocks + 1 for c in self._checkpoint):\n", + " raise ValueError(\n", + " \"Checkpoints should be 0 <= i < 2*n_blocks+1. \"\n", + " f\"{self._checkpoint=}.\"\n", + " )\n", + "\n", + " self.transformers = nn.ModuleList(\n", + " [\n", + " Transformer(\n", + " features=features,\n", + " mlp_multiplier=mlp_multiplier,\n", + " n_heads=n_heads,\n", + " dropout=dropout,\n", + " drop_path=drop_path,\n", + " )\n", + " for _ in range(2 * n_blocks + 1)\n", + " ]\n", + " )\n", + "\n", + " self.evaluator = [\n", + " self._checkpoint_wrapper\n", + " if i in self._checkpoint\n", + " else lambda m, x: m(x)\n", + " for i, _ in enumerate(self.transformers)\n", + " ]\n", + "\n", + " self.shifter = shifter or _Shift()\n", + "\n", + " @staticmethod\n", + " def _checkpoint_wrapper(\n", + " model: nn.Module, data: tuple[Tensor, Tensor | None]\n", + " ) -> Tensor:\n", + " return checkpoint(model, data, use_reentrant=False)\n", + "\n", + " def forward(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Args:\n", + " x: Tensor of shape::\n", + "\n", + " [batch, global_sequence, local_sequence, features]\n", + "\n", + " Returns:\n", + " Tensor: Tensor of shape::\n", + "\n", + " [batch, global_sequence, local_sequence, features]\n", + " \"\"\"\n", + " if x.shape[-1] != self.features:\n", + " raise ValueError(\n", + " f\"Expecting tensor with last dimension size {self.features}.\"\n", + " )\n", + " if x.ndim != 4:\n", + " raise ValueError(\n", + " f\"Expecting tensor with exactly four dimensions. {x.shape=}.\"\n", + " )\n", + "\n", + " self.shifter.reset()\n", + " local: bool = True\n", + " attn_mask = {True: None, False: None}\n", + "\n", + " transformer_iter = zip(self.evaluator, self.transformers, strict=False)\n", + "\n", + " # First local block\n", + " evaluator, transformer = next(transformer_iter)\n", + " x = evaluator(transformer, (x, attn_mask[local]))\n", + "\n", + " for evaluator, transformer in transformer_iter:\n", + " local = not local\n", + " # We are making exactly 2*n_blocks transposes.\n", + " # So the output has the same shape as input.\n", + " x = x.transpose(1, 2)\n", + "\n", + " x = evaluator(transformer, (x, attn_mask[local]))\n", + "\n", + " if not local:\n", + " x, attn_mask = self.shifter(x)\n", + "\n", + " return x\n", + "\n", + "\n", + "class PatchEmbed(nn.Module):\n", + " \"\"\"\n", + " Patch embedding via 2D convolution.\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self, patch_size: int | tuple[int, ...], channels: int, embed_dim: int\n", + " ):\n", + " super().__init__()\n", + "\n", + " self.patch_size = patch_size\n", + " self.channels = channels\n", + " self.embed_dim = embed_dim\n", + "\n", + " self.proj = nn.Conv2d(\n", + " channels,\n", + " embed_dim,\n", + " kernel_size=patch_size,\n", + " stride=patch_size,\n", + " bias=True,\n", + " )\n", + "\n", + " def forward(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Args:\n", + " x: Tensor of shape [batch, channels, lat, lon].\n", + " Returns:\n", + " Tensor: Tensor with shape\n", + " [batch, embed_dim, lat//patch_size, lon//patch_size]\n", + " \"\"\"\n", + "\n", + " H, W = x.shape[-2:]\n", + "\n", + " if W % self.patch_size[1] != 0:\n", + " raise ValueError(\n", + " f\"Cannot do patch embedding for tensor of shape {x.size()}\"\n", + " \" with patch size {self.patch_size}. (Dimensions are BSCHW.)\"\n", + " )\n", + " if H % self.patch_size[0] != 0:\n", + " raise ValueError(\n", + " f\"Cannot do patch embedding for tensor of shape {x.size()}\"\n", + " f\" with patch size {self.patch_size}. (Dimensions are BSCHW.)\"\n", + " )\n", + "\n", + " x = self.proj(x)\n", + "\n", + " return x\n", + "\n", + "\n", + "class PrithviWxCEncoderDecoder(nn.Module):\n", + " \"\"\"\n", + " Hiera-MaxViT encoder/decoder code.\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " embed_dim: int,\n", + " n_blocks: int,\n", + " mlp_multiplier: float,\n", + " n_heads: int,\n", + " dropout: float,\n", + " drop_path: float,\n", + " shifter: nn.Module | None = None,\n", + " transformer_cp: list[int] | None = None,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " embed_dim: Embedding dimension\n", + " n_blocks: Number of local-global transformer pairs.\n", + " mlp_multiplier: MLP multiplier for hidden features in feed forward\n", + " networks.\n", + " n_heads: Number of attention heads.\n", + " dropout: Dropout.\n", + " drop_path: DropPath.\n", + " \"\"\"\n", + " super().__init__()\n", + "\n", + " self.embed_dim = embed_dim\n", + " self.n_blocks = n_blocks\n", + " self.mlp_multiplier = mlp_multiplier\n", + " self.n_heads = n_heads\n", + " self.dropout = dropout\n", + " self._transformer_cp = transformer_cp\n", + "\n", + " self.lgl_block = LocalGlobalLocalBlock(\n", + " features=embed_dim,\n", + " mlp_multiplier=mlp_multiplier,\n", + " n_heads=n_heads,\n", + " dropout=dropout,\n", + " drop_path=drop_path,\n", + " n_blocks=n_blocks,\n", + " shifter=shifter,\n", + " checkpoint=transformer_cp,\n", + " )\n", + "\n", + " def forward(self, x: torch.Tensor) -> torch.Tensor:\n", + " \"\"\"\n", + " Args:\n", + " x: Tensor of shape\n", + " [batch, global sequence, local sequence, embed_dim]\n", + " Returns:\n", + " Tensor of shape\n", + " [batch, mask_unit_sequence, local_sequence, embed_dim].\n", + " Identical in shape to the input x.\n", + " \"\"\"\n", + "\n", + " x = self.lgl_block(x)\n", + "\n", + " return x\n", + "\n", + "\n", + "class PrithviWxC(nn.Module):\n", + " \"\"\"Encoder-decoder fusing Hiera with MaxViT. See\n", + " - Ryali et al. \"Hiera: A Hierarchical Vision Transformer without the\n", + " Bells-and-Whistles\" (https://arxiv.org/abs/2306.00989)\n", + " - Tu et al. \"MaxViT: Multi-Axis Vision Transformer\"\n", + " (https://arxiv.org/abs/2204.01697)\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " in_channels: int,\n", + " input_size_time: int,\n", + " in_channels_static: int,\n", + " input_scalers_mu: Tensor,\n", + " input_scalers_sigma: Tensor,\n", + " input_scalers_epsilon: float,\n", + " static_input_scalers_mu: Tensor,\n", + " static_input_scalers_sigma: Tensor,\n", + " static_input_scalers_epsilon: float,\n", + " output_scalers: Tensor,\n", + " n_lats_px: int,\n", + " n_lons_px: int,\n", + " patch_size_px: tuple[int],\n", + " mask_unit_size_px: tuple[int],\n", + " mask_ratio_inputs: float,\n", + " embed_dim: int,\n", + " n_blocks_encoder: int,\n", + " n_blocks_decoder: int,\n", + " mlp_multiplier: float,\n", + " n_heads: int,\n", + " dropout: float,\n", + " drop_path: float,\n", + " parameter_dropout: float,\n", + " residual: str,\n", + " masking_mode: str,\n", + " positional_encoding: str,\n", + " decoder_shifting: bool = False,\n", + " checkpoint_encoder: list[int] | None = None,\n", + " checkpoint_decoder: list[int] | None = None,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " in_channels: Number of input channels.\n", + " input_size_time: Number of timestamps in input.\n", + " in_channels_static: Number of input channels for static data.\n", + " input_scalers_mu: Tensor of size (in_channels,). Used to rescale\n", + " input.\n", + " input_scalers_sigma: Tensor of size (in_channels,). Used to rescale\n", + " input.\n", + " input_scalers_epsilon: Float. Used to rescale input.\n", + " static_input_scalers_mu: Tensor of size (in_channels_static). Used\n", + " to rescale static inputs.\n", + " static_input_scalers_sigma: Tensor of size (in_channels_static).\n", + " Used to rescale static inputs.\n", + " static_input_scalers_epsilon: Float. Used to rescale static inputs.\n", + " output_scalers: Tensor of shape (in_channels,). Used to rescale\n", + " output.\n", + " n_lats_px: Total latitudes in data. In pixels.\n", + " n_lons_px: Total longitudes in data. In pixels.\n", + " patch_size_px: Patch size for tokenization. In pixels lat/lon.\n", + " mask_unit_size_px: Size of each mask unit. In pixels lat/lon.\n", + " mask_ratio_inputs: Masking ratio for inputs. 0 to 1.\n", + " embed_dim: Embedding dimension\n", + " n_blocks_encoder: Number of local-global transformer pairs in\n", + " encoder.\n", + " n_blocks_decoder: Number of local-global transformer pairs in\n", + " decoder.\n", + " mlp_multiplier: MLP multiplier for hidden features in feed forward\n", + " networks.\n", + " n_heads: Number of attention heads.\n", + " dropout: Dropout.\n", + " drop_path: DropPath.\n", + " parameter_dropout: Dropout applied to parameters.\n", + " residual: Indicates whether and how model should work as residual\n", + " model. Accepted values are 'climate', 'temporal' and 'none'\n", + " positional_encoding: possible values are\n", + " ['absolute' (default), 'fourier'].\n", + " 'absolute' lat lon encoded in 3 dimensions using sine and\n", + " cosine\n", + " 'fourier' lat/lon to be encoded using various frequencies\n", + " masking_mode: String ['local', 'global', 'both'] that controls the\n", + " type of masking used.\n", + " checkpoint_encoder: List of integers controlling if gradient\n", + " checkpointing is used on encoder.\n", + " Format: [] for no gradient checkpointing. [3, 7] for\n", + " checkpointing after 4th and 8th layer etc.\n", + " checkpoint_decoder: List of integers controlling if gradient\n", + " checkpointing is used on decoder.\n", + " Format: See `checkpoint_encoder`.\n", + " masking_mode: The type of masking to use\n", + " {'global', 'local', 'both'}\n", + " decoder_shifting: Whether to use swin shifting in the decoder.\n", + " \"\"\"\n", + " super().__init__()\n", + "\n", + " self.in_channels = in_channels\n", + " self.input_size_time = input_size_time\n", + " self.in_channels_static = in_channels_static\n", + " self.n_lats_px = n_lats_px\n", + " self.n_lons_px = n_lons_px\n", + " self.patch_size_px = patch_size_px\n", + " self.mask_unit_size_px = mask_unit_size_px\n", + " self.mask_ratio_inputs = mask_ratio_inputs\n", + " self.embed_dim = embed_dim\n", + " self.n_blocks_encoder = n_blocks_encoder\n", + " self.n_blocks_decoder = n_blocks_decoder\n", + " self.mlp_multiplier = mlp_multiplier\n", + " self.n_heads = n_heads\n", + " self.dropout = dropout\n", + " self.drop_path = drop_path\n", + " self.residual = residual\n", + " self._decoder_shift = decoder_shifting\n", + " self.positional_encoding = positional_encoding\n", + " self._checkpoint_encoder = checkpoint_encoder\n", + " self._checkpoint_decoder = checkpoint_decoder\n", + "\n", + " assert self.n_lats_px % self.mask_unit_size_px[0] == 0\n", + " assert self.n_lons_px % self.mask_unit_size_px[1] == 0\n", + " assert self.mask_unit_size_px[0] % self.patch_size_px[0] == 0\n", + " assert self.mask_unit_size_px[1] % self.patch_size_px[1] == 0\n", + "\n", + " if self.patch_size_px[0] != self.patch_size_px[1]:\n", + " raise NotImplementedError(\n", + " \"Current pixel shuffle symmetric patches.\"\n", + " )\n", + "\n", + " self.local_shape_mu = (\n", + " self.mask_unit_size_px[0] // self.patch_size_px[0],\n", + " self.mask_unit_size_px[1] // self.patch_size_px[1],\n", + " )\n", + " self.global_shape_mu = (\n", + " self.n_lats_px // self.mask_unit_size_px[0],\n", + " self.n_lons_px // self.mask_unit_size_px[1],\n", + " )\n", + "\n", + " assert input_scalers_mu.shape == (in_channels,)\n", + " assert input_scalers_sigma.shape == (in_channels,)\n", + " assert output_scalers.shape == (in_channels,)\n", + "\n", + " if self.positional_encoding != \"fourier\":\n", + " assert static_input_scalers_mu.shape == (in_channels_static,)\n", + " assert static_input_scalers_sigma.shape == (in_channels_static,)\n", + "\n", + " # Input shape [batch, time, parameter, lat, lon]\n", + " self.input_scalers_epsilon = input_scalers_epsilon\n", + " self.register_buffer(\n", + " \"input_scalers_mu\", input_scalers_mu.reshape(1, 1, -1, 1, 1)\n", + " )\n", + " self.register_buffer(\n", + " \"input_scalers_sigma\", input_scalers_sigma.reshape(1, 1, -1, 1, 1)\n", + " )\n", + "\n", + " # Static inputs shape [batch, parameter, lat, lon]\n", + " self.static_input_scalers_epsilon = static_input_scalers_epsilon\n", + " self.register_buffer(\n", + " \"static_input_scalers_mu\",\n", + " static_input_scalers_mu.reshape(1, -1, 1, 1),\n", + " )\n", + " self.register_buffer(\n", + " \"static_input_scalers_sigma\",\n", + " static_input_scalers_sigma.reshape(1, -1, 1, 1),\n", + " )\n", + "\n", + " # Output shape [batch, parameter, lat, lon]\n", + " self.register_buffer(\n", + " \"output_scalers\", output_scalers.reshape(1, -1, 1, 1)\n", + " )\n", + "\n", + " self.parameter_dropout = nn.Dropout2d(p=parameter_dropout)\n", + "\n", + " self.patch_embedding = PatchEmbed(\n", + " patch_size=patch_size_px,\n", + " channels=in_channels * input_size_time,\n", + " embed_dim=embed_dim,\n", + " )\n", + "\n", + " if self.residual == \"climate\":\n", + " self.patch_embedding_static = PatchEmbed(\n", + " patch_size=patch_size_px,\n", + " channels=in_channels + in_channels_static,\n", + " embed_dim=embed_dim,\n", + " )\n", + " else:\n", + " self.patch_embedding_static = PatchEmbed(\n", + " patch_size=patch_size_px,\n", + " channels=in_channels_static,\n", + " embed_dim=embed_dim,\n", + " )\n", + "\n", + " self.input_time_embedding = nn.Linear(1, embed_dim // 4, bias=True)\n", + " self.lead_time_embedding = nn.Linear(1, embed_dim // 4, bias=True)\n", + "\n", + " self.mask_token = nn.Parameter(torch.randn(1, 1, 1, self.embed_dim))\n", + " self._nglobal_mu = np.prod(self.global_shape_mu)\n", + " self._global_idx = torch.arange(self._nglobal_mu)\n", + "\n", + " self._nlocal_mu = np.prod(self.local_shape_mu)\n", + " self._local_idx = torch.arange(self._nlocal_mu)\n", + "\n", + " self.encoder = PrithviWxCEncoderDecoder(\n", + " embed_dim=embed_dim,\n", + " n_blocks=n_blocks_encoder,\n", + " mlp_multiplier=mlp_multiplier,\n", + " n_heads=n_heads,\n", + " dropout=dropout,\n", + " drop_path=drop_path,\n", + " transformer_cp=checkpoint_encoder,\n", + " )\n", + "\n", + " if n_blocks_decoder != 0:\n", + " if self._decoder_shift:\n", + " self.decoder_shifter = d_shifter = SWINShift(\n", + " self.mask_unit_size_px,\n", + " self.global_shape_mu,\n", + " self.local_shape_mu,\n", + " self.patch_size_px,\n", + " n_context_tokens=0,\n", + " )\n", + " else:\n", + " self.decoder_shifter = d_shifter = None\n", + "\n", + " self.decoder = PrithviWxCEncoderDecoder(\n", + " embed_dim=embed_dim,\n", + " n_blocks=n_blocks_decoder,\n", + " mlp_multiplier=mlp_multiplier,\n", + " n_heads=n_heads,\n", + " dropout=dropout,\n", + " drop_path=0.0,\n", + " shifter=d_shifter,\n", + " transformer_cp=checkpoint_decoder,\n", + " )\n", + "\n", + " self.unembed = nn.Linear(\n", + " self.embed_dim,\n", + " self.in_channels\n", + " * self.patch_size_px[0]\n", + " * self.patch_size_px[1],\n", + " bias=True,\n", + " )\n", + "\n", + " self.masking_mode = masking_mode.lower()\n", + " match self.masking_mode:\n", + " case \"local\":\n", + " self.generate_mask = self._gen_mask_local\n", + " case \"global\":\n", + " self.generate_mask = self._gen_mask_global\n", + " case \"both\":\n", + " self._mask_both_local: bool = True\n", + " self.generate_mask = self._gen_mask_both\n", + " case _:\n", + " raise ValueError(\n", + " f\"Masking mode '{masking_mode}' not supported\"\n", + " )\n", + "\n", + " def swap_masking(self) -> None:\n", + " self._mask_both_local = not self._mask_both_local\n", + "\n", + " @cached_property\n", + " def n_masked_global(self):\n", + " return int(self.mask_ratio_inputs * np.prod(self.global_shape_mu))\n", + "\n", + " @cached_property\n", + " def n_masked_local(self):\n", + " return int(self.mask_ratio_inputs * np.prod(self.local_shape_mu))\n", + "\n", + " @staticmethod\n", + " def _shuffle_along_axis(a, axis):\n", + " idx = torch.argsort(input=torch.rand(*a.shape), dim=axis)\n", + " return torch.gather(a, dim=axis, index=idx)\n", + "\n", + " def _gen_mask_local(self, sizes: tuple[int]) -> tuple[Tensor]:\n", + " \"\"\"\n", + " Args:\n", + " batch_size: Number of elements in batch\n", + " Returns:\n", + " Tuple of torch tensors. [indices masked, indices unmasked].\n", + " Each of these is a tensor of shape (batch, global sequene)\n", + " \"\"\"\n", + " # Identify which indices (values) should be masked\n", + "\n", + " maskable_indices = self._local_idx.view(1, -1).expand(*sizes[:2], -1)\n", + "\n", + " maskable_indices = self._shuffle_along_axis(maskable_indices, 2)\n", + "\n", + " indices_masked = maskable_indices[:, :, : self.n_masked_local]\n", + " indices_unmasked = maskable_indices[:, :, self.n_masked_local :]\n", + "\n", + " return indices_masked, indices_unmasked\n", + "\n", + " def _gen_mask_global(self, sizes: tuple[int]) -> tuple[Tensor]:\n", + " \"\"\"\n", + " Args:\n", + " batch_size: Number of elements in batch\n", + " Returns:\n", + " Tuple of torch tensors. [indices masked, indices unmasked].\n", + " Each of these is a tensor of shape (batch, global sequene)\n", + " \"\"\"\n", + " # Identify which indices (values) should be masked\n", + "\n", + " maskable_indices = self._global_idx.view(1, -1).expand(*sizes[:1], -1)\n", + "\n", + " maskable_indices = self._shuffle_along_axis(maskable_indices, 1)\n", + "\n", + " indices_masked = maskable_indices[:, : self.n_masked_global]\n", + " indices_unmasked = maskable_indices[:, self.n_masked_global :]\n", + "\n", + " return indices_masked, indices_unmasked\n", + "\n", + " def _gen_mask_both(self, sizes: tuple[int]) -> tuple[Tensor]:\n", + " if self._mask_both_local:\n", + " return self._gen_mask_local(sizes)\n", + " else:\n", + " return self._gen_mask_global(sizes)\n", + "\n", + " @staticmethod\n", + " def reconstruct_batch(\n", + " idx_masked: Tensor,\n", + " idx_unmasked: Tensor,\n", + " data_masked: Tensor,\n", + " data_unmasked: Tensor,\n", + " ) -> Tensor:\n", + " \"\"\"Reconstructs a tensor along the mask unit dimension. Batched\n", + " version.\n", + "\n", + " Args:\n", + " idx_masked: Tensor of shape `batch, mask unit sequence`.\n", + " idx_unmasked: Tensor of shape `batch, mask unit sequence`.\n", + " data_masked: Tensor of shape `batch, mask unit sequence, ...`.\n", + " Should have same size along mask unit sequence dimension as\n", + " idx_masked. Dimensions beyond the first two, marked here as ...\n", + " will typically be `local_sequence, channel` or\n", + " `channel, lat, lon`. These dimensions should agree with\n", + " data_unmasked.\n", + " data_unmasked: Tensor of shape `batch, mask unit sequence, ...`.\n", + " Should have same size along mask unit sequence dimension as\n", + " idx_unmasked. Dimensions beyond the first two, marked here as\n", + " ... will typically be `local_sequence, channel` or `channel,\n", + " lat, lon`. These dimensions should agree with data_masked.\n", + " Returns:\n", + " Tensor: Tensor of same shape as inputs data_masked and\n", + " data_unmasked. I.e. `batch, mask unit sequence, ...`. Index for\n", + " the total data composed of the masked and the unmasked part.\n", + " \"\"\"\n", + " dim: int = idx_masked.ndim\n", + "\n", + " idx_total = torch.argsort(\n", + " torch.cat([idx_masked, idx_unmasked], dim=-1), dim=-1\n", + " )\n", + " idx_total = idx_total.view(\n", + " *idx_total.shape, *[1] * (data_unmasked.ndim - dim)\n", + " )\n", + " idx_total = idx_total.expand(\n", + " *idx_total.shape[:dim], *data_unmasked.shape[dim:]\n", + " )\n", + "\n", + " data = torch.cat([data_masked, data_unmasked], dim=dim - 1)\n", + " data = torch.gather(data, dim=dim - 1, index=idx_total)\n", + "\n", + " return data, idx_total\n", + "\n", + " def fourier_pos_encoding(self, x_static: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Args\n", + " x_static: B x C x H x W. first two channels are lat, and lon\n", + " Returns\n", + " Tensor: Tensor of shape B x E x H x W where E is the embedding\n", + " dimension.\n", + " \"\"\"\n", + "\n", + " # B x C x H x W -> B x 1 x H/P x W/P\n", + " latitudes_patch = F.avg_pool2d(\n", + " x_static[:, [0]],\n", + " kernel_size=self.patch_size_px,\n", + " stride=self.patch_size_px,\n", + " )\n", + " longitudes_patch = F.avg_pool2d(\n", + " x_static[:, [1]],\n", + " kernel_size=self.patch_size_px,\n", + " stride=self.patch_size_px,\n", + " )\n", + "\n", + " modes = (\n", + " torch.arange(self.embed_dim // 4, device=x_static.device).view(\n", + " 1, -1, 1, 1\n", + " )\n", + " + 1.0\n", + " )\n", + " pos_encoding = torch.cat(\n", + " (\n", + " torch.sin(latitudes_patch * modes),\n", + " torch.sin(longitudes_patch * modes),\n", + " torch.cos(latitudes_patch * modes),\n", + " torch.cos(longitudes_patch * modes),\n", + " ),\n", + " axis=1,\n", + " )\n", + "\n", + " return pos_encoding # B x E x H/P x W/P\n", + "\n", + " def time_encoding(self, input_time, lead_time):\n", + " \"\"\"\n", + " Args:\n", + " input_time: Tensor of shape [batch].\n", + " lead_time: Tensor of shape [batch].\n", + " Returns:\n", + " Tensor: Tensor of shape [batch, embed_dim, 1, 1]\n", + " \"\"\"\n", + " input_time = self.input_time_embedding(input_time.view(-1, 1, 1, 1))\n", + " lead_time = self.lead_time_embedding(lead_time.view(-1, 1, 1, 1))\n", + "\n", + " time_encoding = torch.cat(\n", + " (\n", + " torch.cos(input_time),\n", + " torch.cos(lead_time),\n", + " torch.sin(input_time),\n", + " torch.sin(lead_time),\n", + " ),\n", + " axis=3,\n", + " )\n", + " return time_encoding\n", + "\n", + " def to_patching(self, x: Tensor) -> Tensor:\n", + " \"\"\"Transform data from lat/lon space to two axis patching\n", + "\n", + " Args: ->\n", + " x: Tesnor in lat/lon space (N, C, Nlat//P_0, Nlon//P_1)\n", + "\n", + " Returns:\n", + " Tensor in patch space (N, G, L, C)\n", + " \"\"\"\n", + " n_batch = x.shape[0]\n", + "\n", + " x = x.view(\n", + " n_batch,\n", + " -1,\n", + " self.global_shape_mu[0],\n", + " self.local_shape_mu[0],\n", + " self.global_shape_mu[1],\n", + " self.local_shape_mu[1],\n", + " )\n", + " x = x.permute(0, 2, 4, 3, 5, 1).contiguous()\n", + "\n", + " s = x.shape\n", + " return x.view(n_batch, s[1] * s[2], s[3] * s[4], -1)\n", + "\n", + " def from_patching(self, x: Tensor) -> Tensor:\n", + " \"\"\"Transform data from two axis patching to lat/lon space\n", + "\n", + " Args:\n", + " x: Tensor in patch space with shape (N, G, L, C*P_0*P_1)\n", + "\n", + " Returns:\n", + " Tensor: Tensor in lat/lon space\n", + " (N, C*P_0*P_1, Nlat//P_0, Nlon // P_1)\n", + " \"\"\"\n", + " n_batch = x.shape[0]\n", + "\n", + " x = x.view(\n", + " n_batch,\n", + " self.global_shape_mu[0],\n", + " self.global_shape_mu[1],\n", + " self.local_shape_mu[0],\n", + " self.local_shape_mu[1],\n", + " -1,\n", + " )\n", + " x = x.permute(0, 5, 1, 3, 2, 4).contiguous()\n", + "\n", + " s = x.shape\n", + " return x.view(n_batch, -1, s[2] * s[3], s[4] * s[5])\n", + "\n", + " def forward(self, batch: dict[str, torch.Tensor]) -> torch.Tensor:\n", + " \"\"\"\n", + " Args:\n", + " batch: Dictionary the following keys::\n", + "\n", + " 'x': Tensor of shape [batch, time, parameter, lat, lon]\n", + " 'y': Tensor of shape [batch, parameter, lat, lon]\n", + " 'static': Tensor of shape [batch, channel_static, lat, lon]\n", + " 'climate': Optional tensor of shape [batch, parameter, lat, lon]\n", + " 'input_time': Tensor of shape [batch]. Or none.\n", + " 'lead_time': Tensor of shape [batch]. Or none.\n", + "\n", + " Returns:\n", + " Tensor: Tensor of shape [batch, parameter, lat, lon].\n", + " \"\"\" # noqa: E501\n", + " x_rescaled = (batch[\"x\"] - self.input_scalers_mu) / (\n", + " self.input_scalers_sigma + self.input_scalers_epsilon\n", + " )\n", + " batch_size = x_rescaled.shape[0]\n", + "\n", + " if self.positional_encoding == \"fourier\":\n", + " x_static_pos = self.fourier_pos_encoding(batch[\"static\"])\n", + " x_static = (\n", + " batch[\"static\"][:, 2:] - self.static_input_scalers_mu[:, 3:]\n", + " ) / (\n", + " self.static_input_scalers_sigma[:, 3:]\n", + " + self.static_input_scalers_epsilon\n", + " )\n", + " else:\n", + " x_static = (batch[\"static\"] - self.static_input_scalers_mu) / (\n", + " self.static_input_scalers_sigma\n", + " + self.static_input_scalers_epsilon\n", + " )\n", + "\n", + " if self.residual == \"temporal\":\n", + " # We create a residual of same shape as y\n", + " index = torch.where(\n", + " batch[\"lead_time\"] > 0, batch[\"x\"].shape[1] - 1, 0\n", + " )\n", + " index = index.view(-1, 1, 1, 1, 1)\n", + " index = index.expand(batch_size, 1, *batch[\"x\"].shape[2:])\n", + " x_hat = torch.gather(batch[\"x\"], dim=1, index=index)\n", + " x_hat = x_hat.squeeze(1)\n", + " elif self.residual == \"climate\":\n", + " climate_scaled = (\n", + " batch[\"climate\"] - self.input_scalers_mu.view(1, -1, 1, 1)\n", + " ) / (\n", + " self.input_scalers_sigma.view(1, -1, 1, 1)\n", + " + self.input_scalers_epsilon\n", + " )\n", + "\n", + " # [batch, time, parameter, lat, lon]\n", + " # -> [batch, time x parameter, lat, lon]\n", + " x_rescaled = x_rescaled.flatten(1, 2)\n", + " # Parameter dropout\n", + " x_rescaled = self.parameter_dropout(x_rescaled)\n", + "\n", + " x_embedded = self.patch_embedding(x_rescaled)\n", + "\n", + " if self.residual == \"climate\":\n", + " static_embedded = self.patch_embedding_static(\n", + " torch.cat((x_static, climate_scaled), dim=1)\n", + " )\n", + " else:\n", + " static_embedded = self.patch_embedding_static(x_static)\n", + "\n", + " if self.positional_encoding == \"fourier\":\n", + " static_embedded += x_static_pos\n", + "\n", + " x_embedded = self.to_patching(x_embedded)\n", + " static_embedded = self.to_patching(static_embedded)\n", + "\n", + " time_encoding = self.time_encoding(\n", + " batch[\"input_time\"], batch[\"lead_time\"]\n", + " )\n", + "\n", + " tokens = x_embedded + static_embedded + time_encoding\n", + "\n", + " # Now we generate masks based on masking_mode\n", + " indices_masked, indices_unmasked = self.generate_mask(\n", + " (batch_size, self._nglobal_mu)\n", + " )\n", + " indices_masked = indices_masked.to(device=tokens.device)\n", + " indices_unmasked = indices_unmasked.to(device=tokens.device)\n", + " maskdim: int = indices_masked.ndim\n", + "\n", + " # Unmasking\n", + " unmask_view = (*indices_unmasked.shape, *[1] * (tokens.ndim - maskdim))\n", + " unmasked = torch.gather(\n", + " tokens,\n", + " dim=maskdim - 1,\n", + " index=indices_unmasked.view(*unmask_view).expand(\n", + " *indices_unmasked.shape, *tokens.shape[maskdim:]\n", + " ),\n", + " )\n", + "\n", + " # Encoder\n", + " x_encoded = self.encoder(unmasked)\n", + "\n", + " # Generate and position encode the mask tokens\n", + " # [1, 1, 1, embed_dim]\n", + " # -> [batch, global_seq_masked, local seq, embed_dim]\n", + " mask_view = (*indices_masked.shape, *[1] * (tokens.ndim - maskdim))\n", + " masking = self.mask_token.repeat(*static_embedded.shape[:3], 1)\n", + " masked = masking + static_embedded\n", + " masked = torch.gather(\n", + " masked,\n", + " dim=maskdim - 1,\n", + " index=indices_masked.view(*mask_view).expand(\n", + " *indices_masked.shape, *tokens.shape[maskdim:]\n", + " ),\n", + " )\n", + "\n", + " recon, _ = self.reconstruct_batch(\n", + " indices_masked, indices_unmasked, masked, x_encoded\n", + " )\n", + "\n", + " x_decoded = self.decoder(recon)\n", + "\n", + " # Output: [batch, global sequence, local sequence,\n", + " # in_channels * patch_size[0] * patch_size[1]]\n", + " x_unembed = self.unembed(x_decoded)\n", + "\n", + " # Reshape to [batch, global_lat, global_lon, local_lat, local_lon,\n", + " # in_channels * patch_size[0] * patch_size[1]]\n", + " x_out = self.from_patching(x_unembed)\n", + "\n", + " # Pixel shuffle to [batch, in_channels, lat, lon]\n", + " x_out = F.pixel_shuffle(x_out, self.patch_size_px[0])\n", + "\n", + " if self.residual == \"temporal\":\n", + " x_out = self.output_scalers * x_out + x_hat\n", + " elif self.residual == \"climate\":\n", + " x_out = self.output_scalers * x_out + batch[\"climate\"]\n", + " elif self.residual == \"none\":\n", + " x_out = (\n", + " self.output_scalers * x_out\n", + " + self.input_scalers_mu.reshape(1, -1, 1, 1)\n", + " )\n", + "\n", + " return x_out\n" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [], + "source": [ + "import yaml\n", + "\n", + "# from PrithviWxC.model import PrithviWxC\n", + "\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.2300m.v1\",\n", + " filename=\"config.yaml\",\n", + " local_dir=\".\",\n", + ")\n", + "\n", + "with open(\"./config.yaml\", \"r\") as f:\n", + " config = yaml.safe_load(f)\n", + "\n", + "model = PrithviWxC(\n", + " in_channels=config[\"params\"][\"in_channels\"],\n", + " input_size_time=config[\"params\"][\"input_size_time\"],\n", + " in_channels_static=config[\"params\"][\"in_channels_static\"],\n", + " input_scalers_mu=in_mu,\n", + " input_scalers_sigma=in_sig,\n", + " input_scalers_epsilon=config[\"params\"][\"input_scalers_epsilon\"],\n", + " static_input_scalers_mu=static_mu,\n", + " static_input_scalers_sigma=static_sig,\n", + " static_input_scalers_epsilon=config[\"params\"][\n", + " \"static_input_scalers_epsilon\"\n", + " ],\n", + " output_scalers=output_sig**0.5,\n", + " n_lats_px=config[\"params\"][\"n_lats_px\"],\n", + " n_lons_px=config[\"params\"][\"n_lons_px\"],\n", + " patch_size_px=config[\"params\"][\"patch_size_px\"],\n", + " mask_unit_size_px=config[\"params\"][\"mask_unit_size_px\"],\n", + " mask_ratio_inputs=masking_ratio,\n", + " embed_dim=config[\"params\"][\"embed_dim\"],\n", + " n_blocks_encoder=config[\"params\"][\"n_blocks_encoder\"],\n", + " n_blocks_decoder=config[\"params\"][\"n_blocks_decoder\"],\n", + " mlp_multiplier=config[\"params\"][\"mlp_multiplier\"],\n", + " n_heads=config[\"params\"][\"n_heads\"],\n", + " dropout=config[\"params\"][\"dropout\"],\n", + " drop_path=config[\"params\"][\"drop_path\"],\n", + " parameter_dropout=config[\"params\"][\"parameter_dropout\"],\n", + " residual=residual,\n", + " masking_mode=masking_mode,\n", + " decoder_shifting=decoder_shifting,\n", + " positional_encoding=positional_encoding,\n", + " checkpoint_encoder=[],\n", + " checkpoint_decoder=[],\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Load weights\n", + "We have provided unshared pretrained weights for the model,\n", + "which can now be loaded. The model can then be transferred\n", + "to the requested device." + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [], + "source": [ + "weights_path = Path(\"./weights/prithvi.wxc.2300m.v1.pt\")\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.2300m.v1\",\n", + " filename=weights_path.name,\n", + " local_dir=\"./weights\",\n", + ")\n", + "\n", + "state_dict = torch.load(weights_path, weights_only=False)\n", + "if \"model_state\" in state_dict:\n", + " state_dict = state_dict[\"model_state\"]\n", + "model.load_state_dict(state_dict, strict=True)\n", + "\n", + "if (hasattr(model, \"device\") and model.device != device) or not hasattr(\n", + " model, \"device\"\n", + "):\n", + " model = model.to(device)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Inference\n", + "We are now ready to perform inference on the model. The data\n", + "returned from the dataset class requires additional\n", + "preprocessing; therefore, after polling the dataset, we process\n", + "the data using the `preproc` function. This processed data is\n", + "then transferred to the device. To recover the masking, we can\n", + "save the torch RNG state and use it later. Finally, we run the\n", + "model in evaluation mode without generating the gradient graph." + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": {}, + "outputs": [], + "source": [ + "# from PrithviWxC.dataloaders.merra2 import preproc\n", + "\n", + "data = next(iter(dataset))\n", + "batch = preproc([data], padding)\n", + "\n", + "for k, v in batch.items():\n", + " if isinstance(v, torch.Tensor):\n", + " batch[k] = v.to(device)\n", + "\n", + "rng_state_1 = torch.get_rng_state()\n", + "with torch.no_grad():\n", + " model.eval()\n", + " out = model(batch)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Plotting" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAisAAAEjCAYAAADzFUHYAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAADycklEQVR4nOz9e6wtyXkWjD/V1b3W2vvsy5k5M+fMOJ4ZT0yCPggIcPjZJvA5hi8mEeKSgEVAimIRAsgOAgwEmcCHbRKGQIIiRSQkimSSD0WJflK4REHkAgSIgiE/i09AbrJjxx57PDOey9n77LP3Wqu7un9/VL/Vb1VXdVf36rX3PvvsR5o5e/Wlurq7uuqp933et0RVVRWucY1rXOMa17jGNS4pkouuwDWucY1rXOMa17hGF67JyjWucY1rXOMa17jUuCYr17jGNa5xjWtc41Ljmqxc4xrXuMY1rnGNS41rsnKNa1zjGte4xjUuNa7JyjWucY1rXOMa17jUuCYr17jGNa5xjWtc41IjvegKbIqyLPHCCy9gf38fQoiLrs41rnGNa1zjGteIQFVVuHfvHt7whjcgSbptJw88WXnhhRfw1FNPXXQ1rnGNa1zjGte4xgg8//zzeOMb39h5zANPVvb39wEAv+tP/B3IbBF1TlIA5QZ3nhTjz30QIMpx51XJsPOFqiBXIy82AklR1f+WKNNpPaBV0tx3lQDlLEEpBRIVThBdSm0J7DrGdzyBzlOzBGpWb6vbZpk27ZxvC4G3aTrO187LFEAiUMqoKm+MSgLFQt93xV4Zb2Pus/cdM/i6EzUPUdplVRIQapqyx9aHI1GIepeJp85d59HxQ+9VlLpNV4kwfxN821rnb+nZ0v3we/Y9k62irIBENH9fQgwdW1W+xP/7r77djONdeODJCrl+ZLZAGktWxIZk5ZJ5m3iH6HaOvn2hTtzsqz9IoSpUctjNdpVPZQJAJQVkVUJk/o+OHzcVEtRkBdOTlbpgA5UlrW0+lFIgSYaRFUNu6L1mCWTdpq22mTn/8rKc9s/PM2TF8+jLVL8TsWWyUtXlV4mAnLW3A3pgqjrqQQMXEYTYY7eFynxXdr3OE77rjb3lmPPkemThDKKsUEV0uqKsep+n+w6Ggt/zeZMV3h+LyAnOecMdW0PkhSZCRf1aYyQcDzxZGYs+Btj3kM8LoRmjSwpCf/ftCx1ntnk+Ck4g+P5KCoiy/xx+Xt9Ht+lHWUlhykiK5gb530NRpknrfJf8iKJClQqIwl//Km2IBz+GttP5HH0WmCFt07XAXGXwgSlESvpISohouETILd83KPJrbTJ4hsiOS+hizgmdu+ngPgViiMpFgKws525heUCxaT9zZcmKj2y4D8tnHufHxDzcoebnoa6SUJkWAXEYt0sm+qwTPsbedU6IQHQRC98+dyB3rUBA2GIUC+91R1iMOHxEh28j4sIJR+teAySGb3ePCZYlu2eeXc/O18a3SV74YBlrxfAN7qHfMWUMOS+mLrH167R+TlCfmP19Fqauc7tI0DXOB5v2XeeNTTwYLq4cWbmoWWLsYLqJH71VFhuI3b9D1g9CJYVlnXGPEaqK1lIQhmowzLUmsAr54A4IoedlnTNBR5AUJcpM9+whUvKwY5vulm1cN2aSQd+Trx+IOW9qjJ0YRZfvsbpcBktMCBfV5qbEg0RU+lCmAPL4468MWUkKeH3zsedOyQCnxlg2vYkLZSjhGItQR0qWik20JaGyRVGiCpQ7hS+YXE+bdCyXcQZFepVYhAaHXmsAsxL5rBfR5XTsdwmFz0Lad57PUuKKfelvt38J6cq2ga6yfUTD3e7uc393vZuQuyymPueNixY/bwN9GhfuJp8afYaDpACG8OhLPEQPR9fDiXlwsQgOgluYwfgsHkD/oBGjBdn0w+yLdrlsEMxVIzyunBCBoeO79lvHRupx+jqSlh6oaJcbQx6mHgj7okei3QwRWoRScrFtXLmx8EURjblGldgWBR+hAfwDvns8YbTbs+M8n9YtllC6RCPUd/gG/C7C2XXd8yQOLrkaCq5f2ZaWhX/rQ0mGr6/Z9mRoaj3clSIrLi5aODhkduw2vJAg1fc7UVUrrLVrH99GZGOMC2dTosKFr10WFFcP4h7Pf3eVVRktSWn9NvsDz1CoqpOojP3oY8lMTBl9EVhDSY0b8hz6ltzOPcZq0hXd4Z2lBwb/KSwS7vmbugqGDsgWyemyBAW0Pi1i0PE8fG6qvkhCX519v3ldHkQLBbf+jAEnJtsS3G4SDXQR0UNTj79XmqyEhLNdx075gMcSldA2Fz6y4JIP9zhOSkLHAH6thRudMhVC5IK2u/u7iE2f28hnXRGqJi8yTHJcy0rU4L+hidXS2HjExxaJmzeiXreMmEG9kgJVKlBKgUoKK+SUvglttq1QzYRzbke5HlIyhKh01nkgUYl1RYTOBcL6jFCZMUTOd36s2yvmXjqjfwJEkP+OtRj7SFhMLhZCKe1tY+5trGVmiPXHLX9oziF+jzEWGXourmUlXK9wNOZ5kpauSY4Zmx9GzUqZ+hNhxZ57Xhhqhou1XrjHhc5LHItMCDzsNpakUHm+sFtCrOCUSEef+JAG7MQQD6qDTUREUSI5XQOF0v+ZC0X2aAnrkVIJpBJVmqCSEpDCEJly3jSmJI98dzyketXUh1uCkhVr0O673s1Qru0e07U8udtoOw/DLtMElPGayAsAQ2DM4NUxOLtEJDaE10XpGTx85GSsJYNfY8j53gHQKSc04IYGo6Ez+kpux9XQm3/Js909bwjB8T3/vnfie3YNPInkIgS/5y287btHTtrGJF/sG1fc/X3BGEPgjqXX0UAdoIfje2i+LJ1Dy6YyQh/lEGV/bMPosoBsE10kxec2om2yh5DY5bbDl80ej2iRkBSlGdiJkADMcqIqwP1ol+uGnBD5oH9LZRMSFzVBAYByd2aIRCUTi1TQ9bN7rE6q3t717qiudIwUqFSc6SBZFcb6A2iSkzDpWpflyg3Dtiw4lPG3rlcpBUQp9GAZqIs2Veu/fUJZbz0CFgMiRaYdyHA5oTKCEPYxfeHUZr/bb3head/n7yMag61K1Gyd7X3kpYsY9WldgmX6SCS5nCbWoLjWF2s7wq6rruc71tq2CWLJyBgyuokuZZuCWx+GjMNXjqx0IfbB+PKz8G2hjzMGMeY7fozbGW1L1Do27HgoYqw0fKYfQ/4swiBtdw13+ySLmbaqrANpNYmw1MSknGfGauJzEbkkiZMSoVTr+qa+jnaGbzNQgGDVtMop7B6sWmR6ck/tZ970gJVMvNaaZr8zy3KjZFRlXElJQsdLvaRAHR3ErSBVIqJcAC5C+UvKFChnjqWFEY2p0v73fb7BGbqnOVcpvCTGhYqY9RPGWlCmIAjbCq0eAl/6/k0IRow1a1skZsi7vND0/pcMDxVZcREbssyPcS002ww57AInLfR3DJHxnTcGXXqZECGJsRBViV7rRpSAZIOs5Qbhx0t74OcEgmC2kQuoJiTVIjNuHF3vtrWEBnMu4BWqMtdLVgXEvaJFSsp5E0dv9tXXt55ObenppXBFoKcqCoj1HGLGctLfh7ECEYzLCjD3q11W9n2aiBFX8J00riE1a0gKERRzXMDCQYNDaLBx3T30XZUzQM2bY/Q1K+95LnyDkVDCnC8UEfSIQZ0/jpjJqkCbsNB5znaf9sWtj0vKQvVtkTfHEhEb1eOiM/zZl/na5G+KK5/g025QGeTiMVausml7SeS16HhRVkENk5vxeKiIfAg2EeaGhPPcOtIlrqd9lzV1v4uHmqyMcQVtO8Koz2wXivwZQjxCkUNjz42JKIq9ZnP/FYqdpleQUiBZKdulIoU3pNgcA9iuFxrwSXNSE5VynlrkZGiOk0rKFiExNeIko1C2Rob2FXajqoqeRpbb+0Watnu6QjWEJZXGRVWlCapaZJGsClRKW42E5SJlQmJHw0L6FTXrJilliIh4hKTc3WdZVaS2UmhyVDXny7h2XkFAKECuhbk/Peg0JMWtr3cgEh73D98dIgBOE+rTT/BreK/nI0wDLDhAe62eoWTPt29s8rnQ4BzWpQhzPUNczL/x13V1LbHrRvl+b9vaYRPNcYEYXZGkfee7413fmLnNnGVXnqzEiofMDLrn6+9K4a/mjeK6j9R4BWsRAyQf9F0CECISXUQhpgwX7jOlc5pn2FzbB1oPp+vj8yXpKucSgGyJaJtzy9b2as6IBHPzlPMM1VxC7WYo0wRlZt+TXDcWFKFKo6HkJMhy9RjiUROS5bq5JqFQmpikqUVQWuQkbzeeSvl7RSFlXSa3F8vmehyptJ8NJIRSxnWWrJr7r6RoRM4dRCVEUtzB26c3cUmKJiXQ5IBZUazzuFWlr6mmFSCBYlYBqk1c6EmY8gPlcbeWd5Xivt9wLAY91eaYUkgrFMzq3LHHc4Tq4BIXK0leh7slpD8JlW+Fb3siymghQ180DZ1P55rVnTewklyEW+a8o3pCxCM0rm4zWOXqkJVEDFZBjz1mipcz1VL2QD8hiarPgI9gjDCYX6MEgNoS5JIWHparZs1D4mLQypAjablkALQtKmkToVM98QgqmaCcS6OHoZBfF8WO1NE8c2ncUUZAy4gKoF0sAmgEvIUCwEmKQz7q34akOOQkREysctK0ObYoIArWEN026VxecKsLatKCdvh26PnoAaOdK8UbweNzCzkkxRCVlJfBSEokQamkO4MUzfFEXOoBT66FRU58LiarnHq7CgxuXXlH3PNCZXSVSS18iDsi5AYLoTX4MleWUPHkwkUsYfGVv2mCNvqXtnGrykUIa8dgk9BlX76uLrdQH0Jup5jyvehojy62Slbe9KY34dOf/nRr+3vf+17803/6T/Ge97wHP/zDP2zte+tb34qPfvSj26yWF2M/RA6XxMS6jC5CwNZFTlyLSIh0hM4PlR31MZBWIU1az4TrRuxyPULVlOoSyJ1SazB8YdYmZNdskZZ+BgCqGft0VKWJExPOGhHtkiUSIEsH6VSIYKSFZV0R6CAspqz6+NTzCXN3F7ueRVoKBcEinIiw0PMq06TWpTSTACsqpydXSpcVxZTBXD2asPQTFB+ZsCBheKJ7bAVA1C6dqra2dF1LVHReT3Sb51GIajsDYSWh60115veg3O+U1SfwTELkRSi0XExRVpAN1vLqK38IcdE6pMbCLRwxOC/zMhOWqQTkMbiIXCxDsFWy8ku/9EtQrNP93//7f+Orvuqr8O53v9ts++qv/mp85CMfMb9nswF2yokxBWGxyhuQlK4PXQ0p1s3TVYbJJttDXqaw4DRunMrkIuHaCKDtV5Yr1WLsZZqgmgHYsb/oELNPz5Q5zxzbEZ1E+1QqUGZ1vfczu571TILnpRGqMhYJeVZCrpQW4q6UdruQ24g1NsFcQxU8hCWkYykKIJdAGtPAHBJDHTm0lcq6dw/Hc62IIZdCY253/uUExXH3lDMK2XYIhmgGWO/A6uvMOzp4o32h/3d871FdNpEjl5xFuTdGfkspsw7w5+UhLPo63dc1AuMKxo3ihnK7olOrvLL519WvhAjGEPdPLLTFTBMUUQJybbMlNUuCQmG+7tV5EoQ+DBmPulaY7yIgQ8hJX6b1bWKrZOXxxx+3fv/Df/gP8eY3vxnveMc7zLb5fI4nnnhi8mtvo8H5zHHRLpEOrQvgHxzqK9jHeRqHm4bZV7+QCdEtTznnuuRFriiXB33Y7Wv5suiG1rXgYcpcN2KOrZ9LsSONjoSX58sBEXoe3K1k9rPEdzxhnc/a4iM2fBv9Xew2ob3JIoEoU2T3dP3laW65kizdTSqbaCGua8mLZuDsEd9WRaGJT+jYNLXFt2isUVbIdwkAFeSqNPcllCZtVSIafQmV4SEoXKfCLShdWpRON8+s/a31Wlp6IKJCe3qu5XMBRZTbZ7HxwSUa9LzICuRaXHQEljAERK6AJFJk60Zvme0jrcDkjonRvujr2NE/vvMs6wkjKL7kk2mhmrbsfOvagqh/U7QbcPHEZQorR9faY2PIRtc52yQv56ZZWa/X+Bf/4l/g/e9/P4Robujnf/7ncfv2bdy8eRPveMc78B3f8R24fft2sJzVaoXVamV+Hx8fm7+nblhjxG1DQsHOK8X/2AZUSdEiL6Q1IZBexs0NE6Mf0mGwPk1EY23hpKNMEyC1s9b6/LPahdSU0TXjML/dNPWB5QbIbdS1HIFQjZ7DkNT9FIWqIGstjFypJjfL7swmLzzEudbACJ8gl7QuWc9nzF1FRFISiWpvgUpKVHNpaXnKrE3MSilQ7AiomSYq5cxPsF0dSqxYdpsQHdexAmwClo4QSaFyKyWsv+kc0rqMtqBE1IXKrgQjLLWYWCi9bEJ6Okzr0gXfYoh98IXnWpYaRkrIfQPAIiHB+jh9UWixz7EDcx/BomsNyZ/lm0D6yiAiFpW2nte5CO/j13f/Pk+MGavPjaz8q3/1r3D37l285z3vMdu+5mu+Bu9+97vxzDPP4FOf+hT+7t/9u/iDf/AP4mMf+xjm87m3nOeeew4f+tCH2jvKypgAzaYN706q7t8+WB9PoIE1IlL791gMCrNlZtrBq/eShWICIbMPsZoWTnD4s3O1LK3yS/d3vIaG6kc6F2/nwjO+AhBlo/XQ/wlUUprZH32vOnFbYUKKrVIL6K+0trrw/VR7kabGEmP+5nAjheocM+XuTIdt88gfdr/cbF6mQLGjLSsuUeFunlBED+33ERSfNcUalF0XSw95GALXjcKJBwDvb9+1Kg8hae57GsLiK4MTFH1ME/Ek1w0pKD0uu5CbJbjdyS7dp0/ha+e4uV70OZVx3QC6P3SXqOhbwwyARVDI1WvqySdRaYIqos9LCgBsMhI1rkYsI0KRpmZccM+JXIqEo4uYbBJG3DWB7ouYjSq/7sVURB9MEFVVnQu1+sN/+A9jNpvhJ3/yJ4PHfP7zn8czzzyDH/uxH8PXfd3XeY/xWVaeeuopvOXd3440W3jP6XOR0DGbLrndh5hw5k2XhXdDfn0hwLRdqMq4dUIgE6n5PUJdHnJdyVXp1ZEA7eRrFQk+02YAdX3S7jXdd8MTy5XzFIpleg0RHbdenWnHSYjKwnz5YK+vg9qyUiLJK+s+3Qy43MrSiGQ9OpYucgLAWkZgoZcKqGYpynlqRUbp55IYMkjb1FygWDSunzJzyIqHpKgZIyiEnqieLjeLN0qni5wkE3Rr5XiC4SMvBC9xGaRNYH1YBUNUkrW2pIgiEGKNOBdMn+vHl1fF1+9oItImJ7wePjdOTDZtn2WTLJYuuMXV1cZZxzEXUV9iy77xwiVZ3usN7Ot9S7v0oS8bewhDF/4dS4qKfImP/X//Do6OjnBwcNB57LlYVj796U/j537u5/ATP/ETncc9+eSTeOaZZ/Dxj388eMx8Pg9aXXzo0me45sFYwtLns/NZLLQbAy3Lz9BsuD6TYNfKqaFtPj0KgJZPVxSVWeunSgXLTRHWxXDtSpcgV6jKpIJPVgpVmhgCUdbr2yiWCZaD6z94Urdk1c4myzPTEiFIT08h9hd17pZ2vQjuGjtdHUaIuNGHTC6U7FRrB3zQYdAlKpnqGXmaQBSyIS0zCe/iiy5BMTcgrWMok22VarePISaudmnW1NnNkRJjSYkNO/YSlIAlxUtONiEl/FwfMfGVzY9LqiChcd1CHF5Li+/1DYl+qYkKtaspXeKiBJK8du8xi0rIIkxExfxXVjVpacSvrXMcd4633BGWB152gtJMPkRZBScofWOAm9XaB2tFdJYmgbvM3UlmX5ZgV4sHV8/oSYfhIx0h78MQOcK2k6P6cC5k5SMf+Qhu376NP/JH/kjnca+++iqef/55PPnkkxtdrxU1wrQn7kdM5qgmYVP3bKqdEIpmAd1lhJLO9TFSnwVnCIsNMV9RimYtmVS0dCcSjVCN1oZBrh0VrpaE6knlcN9xazFBdl+VFF6yQB0inwm5aIVFu5lsuV/W3AtL6FaUSF8/BQAUj+wayw3VzUc8aJVi67oByxVZVfj6OTSA57sCZSqRnlVIVIJkVUKuBZJVTazmMJYWFMImLYCfuLgLMvKFGVPnXym0RoXdMwCoeWLqzetcJexvCbNWT1WLZn0EZajlJHSckJX+moaSkimO77OsRFheQroW6x5DVhgWhu0tuy6CW1R8ayjxLscdnsnC4cuFIsqaqKyb/o2HphuLiXNewkiKrZURKNEQlqF6CW7tcK0foqisfsm6D9fC2uEudvM2Ndvb21Jn0VTe//CUCVZfMk+sd9Bn0TJ1VhWSVWnloSJwF65r3XUJjalfQuWw63ss9KHzOGLGo6Bu6LLkWQGAsizxkY98BN/4jd+IlJmrT05O8MEPfhB/8k/+STz55JP4zd/8Tfztv/238dhjj+Frv/Zrh1/IkxTOJRYxS5KHZiS+jJz0d6hMn8l1qJ4jhkR1RQMROXKvW+zAK24FdANWs0TPhJqa28fk5H9tonkqVUHUjdIM4mllfZyWpcsjsDUdZO0eKXbaL0Sv5ipMh0CrDgPwJm2DAiopkdxfeu83ff20Wcun7nTKeWoRkz49DL8nnzVJu0/q7Zn+u9ipNGFZJMYETh1SIgWESiBkTVpW+h6Eomgh0rA4z6eo97nbKZ9KbVWpuEmckdViR7t83PaeKKCY6Y7JmxvFieaJsZg0z8ZDTgguiZjCvXMBGKOn6QOJasudEuVMIFk3UT+htYy6ErRZBMQhKkJpLZaSzXFutlju7iFLSlN2eALHoZjrtAuuJcKKBPIM6Bw8wWTL/YwSQNJMFtzrRvYDvE+yF1Zt+lRXDNylXXSDCfjkTDo5mbjlh8TyrkuqyyoWev5WNvEO5hAa43wRVkMsgFsnKz/3cz+Hz3zmM/hzf+7PWdullPhf/+t/4Ud+5Edw9+5dPPnkk3jnO9+JH//xH8f+/v5W6+QjGr3rdrAFsPhxJA4DGkLRpdUg+Kw9IT8zP56OG4oubQlPliTXjbuqqgmgkJVtLQF1WJX58MnqQOZVk8BtVULI9gfTBcpLAugGWkl/1FA5l9aAbjoXKVDJdtM2CxcCcMOGLSKTSoh5hmQuTfZbAJbGxYWJkqg7oz4Rsq4DrFlmKQXKXYlEVahW1PnWJEzW2h024gvAm1Kf/20tYAiYyB+ybBFhJCuQr/0VO41OpawzzdJ6PdWs6k7a5ikvSn/CSckAgpLIEqVKrN8Evr0T3GJSCm0VUcJYOQSL+DFgupptEJMQTBSQrLTFC9zi0Vh9AX/fFptbqrWsQla3TGZFaZ8jIFd6e6wok9qhryzAcRc5LvRSCkME3MEdbHsffESll6SoCoJPy1RlZc8G9PfM+zHSrNGaZy74gqpUh9CK8nZd639VhaSw9TruZFEfX1nbQlobTmS61gzqsphVUngXq4zB1snKu971Lvg0vDs7O/jpn/7prVzT7XBDoXp8rQi21fzFxWDE2mlwIVeFPXtwyvd0svRSKbKIXp5bLzcpE4cyxMJ/Xxxdgqo+jQ6Rlr7kTG4kCZ+t6BlaZawuau7XSNCxSVFqcrKbNcLT+sPwpd/Xg662JAhVNZYIOq6OdgEAUQsxdGckzd9QFTDPLGuMUArJca7dXmkCtb9AwgS+Pu2K756SonlfvF3Kela6OkzMO5bLCtlp/T7m5FvXVpamXto1pJ9hZbLmuqBFGslKRETLFw0moTtR3Z70NiW1vkbNmfun/rfYqWoXUBPtYiHCggIEBvURlhNOSHy/+fYgYSmF/m+dGFKSFMIKAw51rMYNNqtQppXl+tw0D4wL12XkRgNxcG0MWVu6+oze71zaVmXrWmy9HTp2UMoH1i7dgVD3t93PUZTaVcKtslycbx3r0aSF9CcWIVD2u23Kq2+0tuACMK5WoD3gc71MQhMzp2y3zrStKStprTTvZuvW+/3WF7de5hZnCZJ6n5onLetYyO0dK8htbb9MlpWLhC0KDFkV7N/uIJQoQK4qY2akAZN39sFomA6TWtNpVNYxvDpqLtqLrQVAMxj3w07yCknelMFnLo3YrTKCSt7o6G+Tb4WFP7oCXUs41sOs+TmCND95Y5GxOi5Jqy3rxQWJpPDOxVhxoImNWV2Yp45PePRQYrQyxjK0UkhWAmLd/upEUSI5za1rxSr53Y+UQkjVAsj3hCE+yRoQmUCZVhClfu/62ZCuKLE6X/O37G4YldSiZbWjo3yKHWG1WyIgZVbnT8n0f9QeaPVfvvJxOa+aQTiSmBAGE5SkCpKPjVETlEoJJGeJfv8q4ErxoW7KsgAgBHDGE4uR1YlZYYzeI0Kv4upHiHRQ9A9/jpyQoE2seHZaoG5rJYIDD4Fy5pTQ/SDNiPlESs0ad5CleWvpRLrdHL7IIkOmWWJCF1ZSSmYh4AO5bzAXSi8tQROhZKWsfbwMAI0GrkdnI5SyXa09AQZClRDrolmWo7aGhoZmvjo6n5SZ+/WsQh8Gc51D9yvpmTKT2PSsSWlA5MonyPe588mlTcdagSz18WqGqEzPhCtDVpJ1BSH9ro7Ypew1BJK81hIU7Q/HZZp9giRXtEQkxR7Q23oT0n1wcsHdNeZM1V7h2co9wq9TuxqSvL2f6hVy1bTDcLV4iwt4qVHKtT0gdsFeh6cxVdL5ZVprZ1YFynkKqSiZWmW5ZfRzSiBPc1a2Hqh94lcrXDfVYlOZJkhqImCRlkLVFgw71T7QJmZd6fv18bXljPzqud4m10C6tN9llfotXy3zryPua+4zQXEjQ74ntRZlRyC/IaDmwgw03P3Jo3oARzhbi2Zp0O2zGES5QnwkZQNNii8fCoexqpAV5SzRA3shjEh1dAoJIgP1+WkhgNO6DvU/3HVWUsp8t/9hhEMUDimxjrMtPi654lYSnj7fuB6VfYy5DWn/6yJRqJM+1tZlplvhVpeqFgfz6EVfX+lLqWBfr+53A5FAPFLRuo+IAdtaU6wmMt51xoDGxUMWkABpMRZN2aRa4K6n1rpmnshFy20bcPu4biNXH+MSsxgQ0SGrVLIqkbjWGjZGmYmbIxGg45qoscr6t6rFwqkUSAawlStDVrITBYGKdcIienVTH4odCgWsrERFFqP0fA/+bKn913PJUKIqo3vgSYRET9r+PnDy4jawKhUm74rLhEs0pM+XVTEp2n5pmhHx3ARuXWh9ILJuUN4PMuWSdkUUJRK2El8zW2oTB7ejMsnsfNlyGZksdiRQp/VPVqm2pjhEQK4U5MpDeOqwRLpesdO2WhS7NjlIT4HsfoXZvcprJqX3Q2HcgNNxOW4enyuq2JVYHQqsDwSKXRbRIdpr+3iJCWANqJWs2iLYMZhAOCvqJG6DwKwpstAWFYqkIbQIHL8md896qux139JjXANATWLq51/OKut5CyWM9SQ90xMCvnaSSfqm0Er8ZtWjbP51AwJcImPBVxa7Jz5ZsqwhRrdiW3nGgPeBvkFQ18WeEFHUoksGXIvkeaMrPDpZFdqqQsekkllM05Zbx4o2MqTE7v9CwmDAtqBQGgddRlsXY6xLzMrUBDHoMqSy12vz5crybbeeQaRgGbhCZEWUFdJT/YWktXCwWCRQc21KNyF9ZTPoVh6/qlDNfjWroGa6w+CpoEMEgbKU+syr3jVsHFeSpbbOhLW4VldmXF/ZJvyXNSYfUXFTL9szm+YY8mMC9iyGhxDyZE6+PC1UFpWTninL/AoA8jRHwj4kXUb446MPR80lkGoRLLmJfGnjXWjtRSM0a0hj2fJPu2ZdSkBFYtUEcMRpuj0UO2R5Yp29bKwqQPNOu0I6+XOgDqecSxQ7EsWutAiSFkfD6E7yPdTCWGpvaIUYh1wV+nmI1jaDTSN0Os53XUCuxSZWzGpZVdYJ5GmC9Exneu1KRd+V5TUUWcPDhr3lkftorYmHzmXDraoNIUGl3UxpbaXhmhNNEGD1N6F0+O49eglO6F7LyoSvE2h9IWMpNmHKfp3e0HxS5lh3za6B0ZThcv0Wi8rpe7yw+kzV6p984n6XZBniMGvyKYVcuqFwbD5hEiX0aF73f9Svui4t7h5vaVyMa6xsETxDVBzLk8nqrSqji3EJFJGarlQUMbgyZGV9kKLMUquzJ4tImWpBo1zrzrvYQfvjY7OHfkFpfQ4bYHxkwfebn6u3tUP1fG4fHrrsWoxCIiUTcuhLDOQ/BYBLWBr4XGH0u5LCIiW+48y186acECnpU9/zD62SwiImConXnRW6ZzpWFlVtVWkagDsbK3akEQknqoI8a+rpRtXQgn9lCis3ibluBuQ3BOS66hVLWyQKgNrNoGYJynmC9V6C/EaCfA+W9aSVA0W23Q9u2LD5m+4/xnLh5hsJWU1iE6/BJimbpNHn9a9WUltSThPL5WMRCk7eHILpc7e4/YUpi3tfnfBuVwzbaElslw5NkNyFB61zfVoPz6fjS73g5vjgAlnLde7R+/FcKr5ruAgJLN08UPo3dw3b362PuPDQX1+fQ9aVkJ4jZHExGaUDEB7RqihK8035ckHxuzGuJ5RWlF/7OkxjxvV5NRHwRSkWN7TL2lio0W+Ct/pUx53ke0ax+piuKNRqQJboK0NWVo8kKGaJ1bGQ9oRIyuqwnUOCQCKy7D6ZE9m+tHF9+HKYEOgcLlZ1P1LjkohwUfHBzXd8KGLILKKXsQ5MAYlqQgnNh+4MBHJVgsKSY2Bmco7eput467cvAVMkUSFLhxE9e9Jk2+/LhnedEVW1oweMYNd2Zylpp9W3SGYmTMivRVRY9XSuG0AtEsyOa5GxqlfGZa7HxJn5VakWy+b7KVaHAstHBdY3uaiTjvUTEx8pCVo3lIBY18LTumBrpV93FeBU1wELNcjVM0REKz3Hqo6w5FIlQJFArAWSlWiIYaCfJIJSzioT8QQAZSFsqwc73gunfPPcGGkxCw6uGnJiTu+ZQLlWlJBlhfQldA4v2/07FIgAOFYdT6hyKFKoC94wZY9lyOzzLCJq+p2IdWZCETRWnia230Jsn6i0WL9ECp/rpvV3bc0xGbZXqs4V1XZfJc6EziqHPbeWO4zWH0uba7mwrE2B0G03IkkfaxNILsrlv2k/t8QXWXyDuTJkRWVAtdN8lAqAKEU9c9UfEq0WS3A/rhJ6pmutvVE2K+hSsqMQWpEfLHsp4P+QfeURWSp3nYHAyfUS8q37cx5owlNK4Sz46CwPQCsLe8Ka+0KdtwH3o+a+WrIsEEShLR1VAp0p0ie2DtwXUGdwTTOTUVbUAyDlOElWCll9LmV7LXZEq1wr26t/tYC6wnr2nt/QP5NcID2rLU6l/qjdhd30tWVtUdHtO98Hir2yDpule2q7S6ynEem6EVmpk9jlNNgnHtdp3fns1LPKmqj0EZBSJYMjfXxExd3OiUupEuP6SVZJ7c7qvgZZQqq0sp4jJ2e9Flj2sPXzqd+Nx1I1bFV3RmI6JrW+1Os8yVtTt6bMLqLSLl+0+kYXpYSVeypmMuND30KGIcTM/PtcPhaJcYS1JmeTVzRbaNeOZ+LDy23r63TeKMqyLYpSJ4Ss3Td8WRGrHFdiwJ63917Y9XxRSz53WKzmJ6RDMd6FejKX7wrkA/xCV4aslBlad1MlqHUM+rdrMrVCm2lwQdMZ6H+bl5g6CVBd35wdlqt1J+FZUT3osdA/KoeIBFfe83PcGZguqH0NXheu0ufiYXJFNeVUnbMX92/0hE+4HRSf/VWBKBafII70GWqWGHbukpEE9fNi+yyhMKnY60yyRDTYlQEIJItEW+VOFeS6RCWl0cBQmdmJQr5nm0q0vgnGqqLmdsSNq2PgJFMtag1L2Twn/i89k3IudfmLBGohoOY67wkRld7EZCMjcESmO2tlhJ70TCu9ryYnSde1HUipvG6mlrUvgtC4lpVKCZQqQbWS2iq0sq0hPA9IyQgeWaZcspcUjYWpVzzKiUqhXdBqJgATCSisCJqh+U+6EliG+n4eAUjnVk6/MFQU6+ZWIa1U/QtCNc853H9QfRgpLMPHlQAgRXegAIXKMnFt1P2wwdxneeCidlc/5js2OV1bhMklAO38L/U+VZnkjXxfsiqAFRrCJAVksYYoUn8dWpOyNjHS/9b9lmVZqVpkpQmPdi0s/JzK65YK5aFyRf5duFpkpefG2z7auGOBRnXvWknMcWZAbD5YtyPgMxEOnssAcML9vC6eCqqnc/F1PiS61H/rf9Mzu0GXrCPwhoF3+B+DdeGC3MAH5PuIXXdPvp8GI4sM+fEvvN0Cd9lQvbiLrkyBMktr14wWDJM1hX9gPFeJK2rli/w1x9fXl433upJaa1I/oVpnxNwPrBMpdiSWt1LkNwRWN2uNylwPsp1ROpEkJWjp4Ja7EWQiBBVQEg0tU8rSEBa1lkCRaGvKWkfXENEAmm+Y63mqtEK5U5oII8pYSyHEPEzY1X/0hkZV9QKDQljbrDIQ/t21rytlglUFsp5QXwObsMXA7fNi3C6IvI4ZMBEOYKivCkC7YI02kcrwTEy61Hmtkvvcz6xfsomHTRYs3cq6gFjXZVO2aVr6whXVupYbx20kZNnob5iehnLJuBlvO+/Fsai4GW6TQlgJNbsWbQRsMsInu76148aKpK8MWVEzoJr1zA6cgb/JedJs4z5duYZRuVMkEM2eXYSsKNr3TWVqi4YVCk3EJrE1Jnzgc+uYKGF1AKHOo2u21JStdSyNwM1Osc/hpmW2yuuIumnNfLrM1w55KW5k4CuWBq/BTN0xlkXSE4VM0pXUsjcS03KLl5vvhocnV7KJ/AkRlRCK3Sbsvq4lEqnDyauEiJu+9nqfH18NF6EOISoMQlYbkRMf+soTnrq6wrxiLVHmElhKo00x57u6GmlbA8p5CeyU5lUJWaGCTVQI7sDbaZFwq32OHlRvlmVWV07YAFgkbIiFpcsNRNeLIUSWpcsJYAheuyYsxooq23lNwgPjsLAUn5XGZ30xx9YKORLOVlLWi5E2pEUUCgK1yatQmsSkslmnzFNu17U5aXGP76o3af7cPjY9U8aivLopzfOenTRrmLl9s5noMjlBlQqs9xKT54m7xgvH4teFK0NWTEIruqMKzUDhfke1C7ll+TCqfBhSwT80NRPNYnTQHyj9NlEDkidYswevphMQbb0MM5eaGZAzIJpj2TkAS93vs9oYYV1l8iC418W87aqBvdJFfSFtdbFIg8e86yM6PlDCN8D/YVFCM+t+Nghd5J23XJcmVJnrXsifWqawiGm5SNCaUYOTPrqn9ruj45pr+P8GdBeqrSy6HprYUvRR7bpjmpheRGpTYolKDHzkguCSjE2Jj1IJ1DIF7kskqyYcuamMbc0y9aBvn+l8eNSSkJVu27JChQjXjw9u/xPqj2AP1r73GiNedclJmaLl+qbyfZMqs2ZLwBLSRCaaElkf6vZpjYuoK6U6XyeGa1wA1Nbj2nrCEsvZ4mHW/6QClar0IqzOs3CzXidO7xazaKleSNSv52sfW1te0sZiU0Ea0gKgyWniupRkk6qf7tGyUBhrTFMfoHbD8AR0dSJNXjYPFiDN3+pQoljoyRZFeaXLBOmZvg6lRACasQaoJ2hzu9/mlm+1B6wPEuQ3YLJiVw7jUP61Zb24UmQl9DF7Oxphf0T8kZf1S6tKGHcLJza8EN81aQDL99v7VjdhQiIpqROlvy4dq06i4qKGYnJDcP+ysMhNxXQxAnaGXTIJNuVwsgZwS5C11TLTWuBupo4PvqxTxHe6l5LQ9Vk5rIWnZyz7ohRI69kBhR/zlNL5fuod5My1LVdCbQZNmnboy7XRtcK3SWUPnbxNzbX1JDERI5REDlgfaFEt6UeCiHD9dJGUPnLSRUqmOqcvtLFSAlhKrSmhCQd77uTqsc4xfX1/XSpBlhadC0XU32TLJcIzycL+JlvfYz1Z4vXh+o5QSoXeunq+Ad834iaJI/hINK+PacdUN+c4K8zbudGYa/p+E3nhkZchiwst3yFU1XoWbo4P160R2tfKups2/zakxtZ+ADCaGQAAaSZr900FabmKLKtLB3hkDa+XnfVbmu0WQSMdSa15U/PEZLUuFsJY/0mzqWbaCgKwSXxeB35IveAqucTJ0kxtNb+hNXtk+Q1PGCqUIr4/uFJkJZhvpPdkWALVKtFhpYmshXCBiA73BVCnk98A1gcwnSaFJZLostwpLRGfyVpJUE3670oCyQpIz/QuUer6UGZKoXwfeAPuIuKaFaBxR/nMw5CNCDBnlgNyh5nfHabaENEgIiM6/Jdqpk2LfS6dGJJitllEi65ta0OoA8ruFcjuAfl+ivWeaFmlYhfgareRZnE5/yBaoYTQfY7z3PM9ID+rs+HuVHqwVcJ2b4bIwAC3zzZIikxKqHKY6b3rWiKpUJEFZKFQrVMzJzdp7SU6CYmVEE+JtvRkVuoBUNlaAnsBL3IXARXTtVjX8bmKAhp13+Dt+565BsXd1wcroqjn/K527pKbLndPS3sXMbG03W2N9aDPTeTToQGNS0K7pTsmSmQt97i2eeJIsuAkKFskolmHjKPRlPAFV0VRGquLLtgWt1ZSGqLiTha13qqpr+0ql9azoGdTJXoB1XzPnmRZ91lrOkWpxx651OfTeXSvfB0xUyfHuhxKJhltHa5xZchKsaeQ7LS/FqGEN1DGJXQ5D1dmsyUCT8ntfpScrKg5oHZLnWsCtL2JmCAksmx3jrxMAAVFOBQJcNZk3aTvzOhqnFmLX3TnG/iFaZA0A8tOYCwwBXNxERR7Lm50krvst2vC1efQMxF6zZrdpnB7doM6syfti3f/+D4A5YSs63omcBNbufkzKAMtWU+onpVsSKgR06ZkVfNElXisKByxLpYV0LQjpz31leW6XEJEQCbhGV5al1Gw6Js00pUTc1wRyJfSSXSSCpQBNt+rmvT19A0Soah/Vm5TUoCAALJ2/YjOeqjMlUOMhbYLPtdnX4K6vvK6CFArM64UKHalNTB7MbMnXHbdGg0hYPeNoXPlqp0p23VPtTOGS3sNN7ZSNE91T+6gfD81i//lN5JWGg7r2oxEEDHV0Z96v9G6pX7rLBHuZKUtufkN4PS2QDm3Exy2viOEiUko513ZFeXi4MqQlRCCD4+TkwrWDMwMkMQKlDaT2dFEJCiqt9deE91pNvkZhKwgUuZHHJL8alZfcKZQzQXUTgpVL74GAKLQ64ckK9gmaKdf56JTV6fjzvq037Ix/1mmXeWWQb58HZ1ErqbGONpE1/iIi6+uMQhpR2LO890zFwnG6AW4CFsoHRZqkyHRtJ1WHQYQlZ7ViM2fPUTFpwsZSlRcohFLULYNKcvaHF2hpJa3UwLsm0ORoFonxvIBOCRyVnY/6xpiLbyJ8JJ189u3fIcFz2VC1oRgGWxbZ+SQR69iqrEhMRkSReR+o0OuFSIsbtJOe5tNOFr1Kf1Eput4e79bvn1iouzklKUUTcSSs1QJQNYie/0vHt1TptrKXOwILB8RTFhvqtLcQ8DyW+w2E8rG/eS/X0BbbBOl7zHfqz0CzjpWLUT1wa41+SF0A0Gi/2FZs2ZGTjyHcqtKNQ+Yy5VAtRL1+h6VDiFlrh3M6vNKYTrDMYmwAD0Iyd0c1VyYqIdKVljPdQRPdpQYDUxTv+ZDo/T+ooRRY/tMs6JsfNJuh2iEcCxCSVtV2hk9XfeUz/fM4et85LqyrCu+Ooe2mbIYOeOdUSXJbN+T46LUq0g3H3pjSbIJj3bz6Yp3lNe1xs7EmIKkANMQk1SWXqsJbVNlot06df266kOgY9OZshY1dFPtlyA3j0MiI4iKla5fVhCrxGt5JfSG6Tp6Fd/5wICIo0jw/EZWdTpEty7G1qHvm3Utw+7fvCX4+hE3K3hoYmSOdyy4VAY/r2vC71vFnS++GAvbGtMmLFWaoNiRtQg2wfKWQH4DRqQa0oHwfQDMRNy9JV8mat0fCqSngFzq/E9EVNwkiX3j7dT93NUhKwFwjUAM8+PndYJ9uOubypicKT9DJatmYBjo4+ez49aAIwElK6ikQrXSN0SmvORMR6zQ4MujnLgp07UUWD5sn0jPmRH5IhOM1YKFYnP3CgmVE485nfuhXeW7SYznEQfyunF4BbGJ/W9X3U292NLmokQrsZZZhVo1JtZ8H6bOur42OfFllm1hROK2mNwnm1pTusCPLVTiPdc9hqDKBEUuNaGoQ6MVEqte/G/XJSSSChKl+b59qfdFVqJCot05CiaBnkiq9rMlUW9JHXhbJyCU07cMQUCv0oUxhMUVhw61YsZec0y+li5rJn3jvqCGEk3UJL8+ge/rEuX6+ht+nHfRWdclZOprExU3VYO7kCvBm/SuJixqnunkj7sSZ48lOqfSDkzbsQX+ba+A5SmIaKJCwUTSGWlB/Qya5KRxLu1W2R3HDEm5cGXICne9+PZNBeHMtNQt/ZWarVJ3fuZ3wFzvLTs682cJzIBKltrKUgqtkxEAkJjZmyYHfvcHd/HwHAuVEeT2d24m8oWsDKzT8g3+QNUOh0abwLgfcVK7mnhYJF1/E+g1jeia7Y6Z199d5M3NKyGU7ujkqg49rgfemI7iIjGUqLialT5SEgJZWlJpC2+rWjDcZVnxiXVJcEtQa6dxJFWTP4Uie6SHqHDUSeUA/a0nTs6VGERF6nncmmMjgnzX6FprJ1QPKifkFvUKgT26NbsePteL/S2LQJ/D93f150Rm9Dl2GHRT/2YyFFqclYM/u3Spf3Ahq1zVockdyS/dcvTvuh71PZN4tsz0YqmrQ52lmtYQI4TEq14LVkBXYpFsJZCe0lIfTV2pf9R9+zCiMmbx0T5cGbKyCboebHDV2R3yr8S9lDEhoqHZcVUKpJledlvJCmotIZCYtVnKOt8EBLdo1Pfj8RvzdPxdnbFvvRHaLhjZ4dczxxvy0r7Xvmy8vIz6KqYOboSTt96RnXxXZITb0bqdbKL0vSdrQNT6pgp1tAppWKTH0lKTmjEItalNcpd0EQ2+b0rNikxKINODR9eChLGolGisI/z7TGpLyrw7sV2JRJ+vYES5YkXhqf1t1TdgB8/xhDG753RZLrwk2/MtB90/zjfdCtV1rhMiL1RPQmnq0LZKubCIT4DsNFGKANCOuGrqUVmEpQs80aMbPeiznvS5eHwrQtO5Oj9K89u9BkUqcRGtK+wndOpOevqS5jxNWEQhkJ006+e59x2z7pi5zzGJKQd4Ha4sWZmK2Zn021SeJ2IAwGBXD5UddZxTNv0WSYU0UZCy1Bk8VQJkWj9TpQmSlRbgun5fAA5DhjENmRWjlb9z8rmVTDnso1Juh2ksOM3s1O5oRw7YeYWqg7S03FWsM6bwb99g0EXefB23GaAKHeqn5noUqlinHSIs9g3FuYBCotrYiB+gbb2IJSpDkCYlCk8kD1lm6F+ZaNcPlM5GCwBpOsyMYYUzq8py5ySZGuQGboGesScbbsgVFEr0Fgxjdl4V/z66LBZu+S7GaE18bldXnO9eO+Qe9iEk2jdQ7WP59SvPPWn3t/7uLJ2L51jXLUS/3UkYbQsRlUQ1oczmPJZ2nv/bhn1esUiw3td5T3gdotx3rnC1Jbxt17+Slc70TITcQ1BLid4FUoMYMSZ24cqRlW2Yn4SPoLBsl+dJVELHpDMFQEGpBFUmoJBBFBLFjhaI+k9sbyLxlkrRNOL6Y29HA/lNztZv3uHW2xJWZp9epA+mo6x1JVUiIJmLy62bERwz/ckQfzvg7/y5TkWugNm9OvR6LaycKCTwFMaV1XOfkflRhmhUgDgBK6GPqKQ9ZdF+Ii2FSrDKU+O2SVOFVJZIZQmZlCgKCaUSrUUJlB2Vt2WAZqzkFp1SoMo96iorvYHoJCrA8HblYgjJ8JEHH+EeEClqlbFNkDvZjUok7R25l3yRfBy0KKzu76qWZYqTCjfs2dQllLep5ZpuMnmTVSQEnpuFjufnuRoXNzGgC93XxAv1u46rZhVwRvohff9qoRNSUj4wvkCqdV9johg3wJUhK4J80Jsi5kE7puXew0csyjYUSiWQsjTiRJGVKHf1SrEAIL3CVqdDo1sRzb/c6lLqgqI7YR5pYNVVtiNw2p3itGIPLrA1syiP+ZrDXbSuCz4fPlmhMqX9zpjVhEVWRuQJdLiCBuidXGwrNNklHn1ExYVLVERSGaJiaVgUtOhWiiBpcQlLK9st+122MifWlpdSmIULqUNOzhL7s/bkYOrDIKKihTSjyusKa7a2beC186ZCiNS3EELLT5TOtmTd7HOTjVHkHp/ocBc2bSt2NGFJGGFp9rM2UXd4PleQLyLISkzHMmH7+A0nN7TfTlLHyk0bsqDvrwIWwsQFUdAE3a8oACEd660SxrpiJTvsCjFW+lg10xMsys9S7ALFvro0JIVwZciKF9Qh0d9TlMd/yjI6FDmGSI1N2qXKBGnWfJXGNZRJqNMMTey+MzvwlJ2e6Y9CzdudhgSAyharuaSDu4i60n8rlreGDgstf7ApXEtQl1nV1fIA/oGia+0fXj5ZXNJTAaWEDgFkbSHoEtogj4qv3fgG+yG5U1xSMpSkhCCTMhglBNQkHO3oII6q1FoXE6qsEkNEWsd6LCEmb1EdyQfoTtwkjqRMweAuQHLxuUJrGsT0b6/Q1HUJscmBLw0/L8+Frym7KQdiiUpstJBP19Ln7nGPIS2G0bqxPFB9omTKvJ0sm2vwhUSBZpFWUUcl6oHfL+R30ykQeeFkiVun3QgidyVhXxhzlXavJyQKPdmWK93PZqcV8t3GtV0sBNQCEFlTN7mmxJkDxjZHH1ShQjnTKSLUDMhvNvqUaJfPkLHVPfah1KyExDqRibWiL+N06PSbv8gY0eRQ60lfdEQLiR60hKygZIYSEvLU/lhdnzrv5NJT/UGU8+Z4vtAVHU+kgwt4u8zOZi0LjyiXzgfaGpuxJnVTl5Rdg8iUz43l0RZ0EZPQNjLlUodnFvCqXQedHUzdLjfRpcSQk9A2YBgZydjqZnnNZDOpzN+8TBNFxFw/XNdCLiAOpbSlBVk7hLkqRaPXIrgTFEZaeD4WgpveoEL9jvhhadVkwDXraFRgcX+GxPBJQcug42lvVhsM6FdCsFYrNrowVlwPOd8EXcknvcfLtgbDFfiWztImpCujfocWjxVlZYT5cl1Britk92HWugFgliqopLayhPoU19pB5MXKQks6wboQitzR9assq0mT90W0rCnk8ulyG1G5s5PKrL3je64Fve+AVasvJ0olKx0Zl1a1uNfJFbYJOdmClWVLzVjjgx/8IIQQ1n9PPPGE2V9VFT74wQ/iDW94A3Z2dvCVX/mV+OVf/uXxF+xZ9MzCFk1WU7ijxqy/4kImJdJMIZkXqPaUiRbiKB1tB5+dpGdAds/uYLnZlVxEOm1zU1YldeK4cqY7nzKrTYyp7Y6h33S8O/Py1ZMIAL9e6D9fOaZ8Zybnm9G5+3z/0f2VKfuPrTCq5kCxW6HY0TMYk/3R1M/znjvaMREVUUe28CRq/D8ADRmQpdeK4iUvSdlJVDKpLHLStZ/+5senssSNxRqLWYFFVrQtNqlCmqkWIZOybBEwL1EhkGVlSJ8Qi8iICx/4CsMx60tZotIO6wUvl5+7Lc2JWVOG/We+Z49urZz597lIcp2MjP5LmPujqi0omvS0w8i1uL3C/LjS6eKZ9qPMBNRCmPOBph9x+xNtZXAITP1tq1mbbFXMstJ6Lg4RqlK/vqVKRWvdHxfUn6pF9zMcgkrqpUGKvRJqvz0+jMKDqln57b/9t+Pnfu7nzG8pm6/nH/2jf4R/8k/+Cf75P//n+NIv/VJ8+7d/O77qq74Kv/7rv479/f3hF+tbxG2kGLZrwbfY0NMYS4qbKwKIF0K6Aw83pwtZAWmJch/AOoFYCyQr0Z4BytriLZvZTLJGvaBfk3zOdgORCb2xHliRQNImO61IAmZS9pGFLstHCF1hlnQtfu1gObxurQ6q6UR916cFK8u0nqXUa9P0EhVffevjOFEB7LYxJt9Jn/Wkj5QMwZDjNcn2u4bImkJuH0NUnERuwZQDDC1xrPJvj10Y1o6KsV19er/exlcFcxcqbM5h5TKxu2nT/iwAdfkaQzRXm6BTq0KWxcT+ngii1H0MUOs1assJWSRNng/YViS6rn7WZMlovsfsflVbfe0UA7ou/odHJEjXq7L6QeuePOLcUF/C77lLpOsrn2to1KxZgwwA0nXT53JLLVkDu6wqvrapdrtdzbrC03opACAZIvQfdYUBSNPUsqYQqqrC93zP9+Dbvu3b8HVf93UAgB/+4R/GnTt38KM/+qP4i3/xL3rLW61WWK1W5vfx8XH7oAErz5pdkW4Z90VuSlRCYcnAZinQrf3k5ywSvULtbgVVJBAnEsJJdKXmTQ6DEjAr/2pRl2C++vr4WW32rrdZSeEYeXE/eu5KMm4hRiJay9p3EIzobex6IXSVVdVWnWLXTzTMc5GVFe5Hg6DPF2y1n47olT6iErKSxGAokQB6FhbsOM7nMgKYRcdxAxSlFt0u17oxUabbltvHIflW+v0QcaGqOJ25qGCLFLl+xdxg1fzbkUvEJS56v2Pd9KhrfXmK3GtY2xytiy53u4SlL8EdAFSe75Zn0ia4fYNc15YOxyJDqQFEaUf7uNc2E6Z6tXnqb5pjhLWfn29HJ/r1Lm523DJtSJNPm6LJTVjrQpYpU/9525VYyaZPLWd2BCJHiKiEwpeB8yMo7jg7RAyxVTcQAHz84x/HG97wBjz77LP4+q//enzyk58EAHzqU5/Ciy++iHe9613m2Pl8jne84x34xV/8xWB5zz33HA4PD81/Tz311LZvwULMjI1A0Tk+cBN+LEIm/b5z5lmB+TzH/HCJJFNIZKldQzdzlI/mtfnPmX2kMO4dI7B119BhCYYIXjO04yqy3Cgz9nfGrBauKdljdlbz2oKRtfe5cMM5feDPwHX3FDuapJRz3+BTnyf1+hllWq+67BLbWsAZHDw7XBak43Atb14XT48rB/C7aFy4rqVQRE7XMaHzeB1C4KHOqkxQFLL9/UUI6K334F5O1ts6rCq9lhVZBf+r6lVqu/+rrP+AtkvT5660vlnh/EePJ4KouG6kmHPc63O9W1c6A1Ha7lJjwc0rk66A+hxTdllbYApmZandOtZzSYT3P6rr0PvTdRfG0sOfEbl+KinMCvHNu7IFt5UUUHNdRrFDrii9MKFxQaXNeSR25SSlZJYVsuiqOV0v0EAj7/M8o3u8LttIbNWy8ta3vhU/8iM/gi/90i/FSy+9hG//9m/H7/t9vw+//Mu/jBdffBEAcOfOHeucO3fu4NOf/nSwzA984AN4//vfb34fHx9rwhKZDW/MIoJA+4USCaGQ4d7zB7z4PvO+D6HjeFp0Ii6rPEWRS8xmOZRKoABUtWsIsIV6bsejO/PKrIXUDP6MsEBY7h9TTh0K7VpaQlqVoN5ENmZRQqK09Sc9ax/bF1bpK9+2HjUzGkCvoUHHFTv+Bb7MYnpouwGgYFK+NzfA/nbcleRqdNualVHWIQRD3TRD8q70HR8iNYSQtYUsLb58LIWbPp/gi/irw5EJXnePROOOI+ugz1Lgs6oMhUWW+ic71mKoNWKii0LWl76EcjEIXYNvc0XyZj9rDmXWLqvKtZUhXbatHGRl5WkQmrwp3ALiv7YPlivJSSYZm52Yu6T4Yqv8eUP6M+262XLd6CL3enqNoLqvkUClyCrD+mDZsRoy/NtHJ3SbIGv7GGyVrHzN13yN+ft3/I7fgbe//e1485vfjB/+4R/G2972NgCAEE5jq6rWNo75fI75fB7cHwKFGQ9FX6hoH1GJJSmx0Rs+DMk8WqgE86xAkbMvNKlngBVMZkh+lqitASRaIxNrJdigzElG3ZHY63EI20UkPVluqToBcmFEqy5RkrXSX2nxWrLy+Jl5eTLccYdCLU0dKzSki5ZMNy4Bf5nk+vGGAna0j9iweE5U+kjKUFLiIqsfXD5ketpRB05cMqlwlmcoVGKsKVUgkqfre6a8KUDz3E0St1b4KrMO1laWisgM6t+oNUeFMIPCGIgKXnFuBdEiND73EUBE31+BJp8HjKbFO4gGEBv15iMnVgQg+34pusf9xksJVDNAzBptXLErjJsHay0kdV3EpG0rAYSaelfepNZq6RH3XF8ZNCFz3U9cGOsu4Mq/tgQiSBxNGLW0n2d+Q2B5C1jfrBpLtjvRcYmK1Yf5+5dtZp6dmqQQzjV0+caNG/gdv+N34OMf/zj+xJ/4EwCAF198EU8++aQ55uWXX25ZW8bA98DGrM/josutE4uxA8YU67GY5FuZsgmLAsp5BSjWsZMoFCLY2fkSECWAFd5JidDMIok1cQnNxLx+2LSZWVioP1QaCIodnYTNKkO2r+Fbj8g6RrCOvoAhKbTdTUblimYFALMWTVb/DdiunsgcQK7AloPahEtShrSxbMSUO3TOUBLjJnbbyXLkiQQy4GQ5g0LSpNCvEZx0OBYVACYpIumqTB6UHf/zMeGckmmxqJ2mlSYsadv92b6x+l8+MNbbLc3SWsCdmwnShfF7MVa52rIp7W1CNdoXi7RUdF9sG7oG5bpIz/dBcJ9+xQZYX7ncEkK/W8fMmn/VXKdO4ML2Lg0a153wfYB9v5toePQ5jY6ly1rLFzrlx/AW1xXSTnVWC4H1TSDf19Zbs19WEGuhJ5iuoHZbROWCrCkc50pWVqsVfvVXfxV/4A/8ATz77LN44okn8LM/+7P43b/7dwMA1us1/tN/+k/4zu/8zsFlJ0k5WiTbh65U5lUpoojKJunNo1axDZTvW5eF6lOg1gAUiXHrCAiUs7qsepYpRP2hrxmjZ5YS/tEAtsVFX6yZEdRHoIRo0u47nXVboOi/52QtUO5ULTO9mgFJR+dMnbhLTkJ14MRFl99DLpRo1pDqSoLkyafSKktWlriWJ1GjHCWZVFHtawwpGYqua4SIjCvGpRwti1mB+8uZ9xwAlrusyvW55M5JCmF16EZD5Lo1vZaO+l8nZ4qubNUS4BrslEBKiTh0yn4aPERWmroKQH9zjnvQWIMY+aa/zfVSnQtD16XeRknIVokhLV43rtvmTWXYjQv2t3kO9aEO2XCTOSaA3zIpgIK7bFdtcSjPXAtowkI6FaBxnfA1i5rEks1NmJBlNCSK1ydkzeXnE9yVr/nztKwprtvL456qj6zvQVgEi5LWWXVNBFY3ddp7S/8mAayFzoYNZhHkVxlCUjYgJ1OMt0PG4q2Slb/xN/4G/ugf/aN4+umn8fLLL+Pbv/3bcXx8jG/8xm+EEAJ/9a/+VfyDf/AP8CVf8iX4ki/5EvyDf/APsLu7iz/7Z//sRtedgowEyw5E77im7b7BI4qAeISTQ+GeQ9EVAGBiqrjYNwMEzVC9/mmadbLfloalakzp7Bxdmbqjr/2ulWr2uWOcIRkerQihkgDOEpSzxkQqFFDOKpQeszrVJeQM9I2liSJRW9s/bN2fcmblPUQkNoqMty+eRA1orCluW4slJbOBuhaOtQowyA7wevmIS+s+pDK6Fd2mRKcrNzlLUB0UQOIsZCdLZDMFkVQocp3VOdqTY8huZZEJNwRdzBXkrLm/iqx7zHpGbaFUiR4MlLD0MpXTZk1ZAl7BNtBYjLQOh5LUVcZd1HINUZme10cTA6PHcAk8fYs0X2FkBIBeTwxNGS64QJ9nsC4lUO74XCOs7k6duciYa0C4voXfl3teCK5FybizwK0yIniP3ZYbweotrO2cHRYLgXwfWN2Eyc1kWUxmFdyWEE1Q+kiIJ0v7EAwZe6UsgSHBIoNqMhCf/exn8Wf+zJ/BK6+8gscffxxve9vb8NGPfhTPPPMMAOBbv/VbcXZ2hve+9714/fXX8da3vhU/8zM/MyrHCl8baMzaOptoSwDmXmGz3hhsmguD4LoB3OyhvDyyttA6QuDiLlqzpfb7iwrWYO8uohUMh5P2IG70AFR23bES8SETNp9l9o25iiItmInUDCDMhytE09GKqkIZlYOjvkY9O6uY6b9ilhWxFvagVRMV9yPvykBr3ZNq3k3IkgK0iUqIoGxCSLoQKjeWxMQQF1UT61W96KSQFRKUKMl9mdjvgYiKL/NvmiosZgWKLEExz7E8naFc+bs/X5Zbozuqf1qunLnCbCe3+pBKCmCmsD7Tog1OZBKUKEtprmO0NNYg2cykK1lBZGW7jyoSTWpoMBeuC6myXEO9HjrmouLamK7v0LakNIQ+Yenp5QqAaKIKyerpkiHFX0flt34k7LvULiB7f9cSH6H9HDylgotGONu22vD+gv92j6FEdRYRk0BC+U5mmqTk+yybrAQwYze6bm6ik6SMEMn2kZNN5RKt8gbIJ0RVVZupaS4Yx8fHODw8xJv/nw9A7o5L7TfFCrRj9CRjLCVDozxcEIlZ5imWq0xHA61lMwNc6v2CXD6y9qsXwuqAOKmQayA/LM3HxZMUkWagmlVNEj0nhJfcOPqa9d/MEuJes/kNlPOyMbWvhTGZc1Ih1vZCdQAsTQGvB4/+sGbQs7L1gVd50nQKAUvKWALNiYohK7XLx9xCD1EhbIOwjLGscMRoW1SZ4N5ybiKCiMiptUQiS9OeSpWgWkmIrMR8f4XD3SWKMsH95Qz5MoWQFW7sriyyt8xT3D+dQ51mrcSObn6WLm0KEZU0bT9jcmv5+hcSD6/vzTvzwRBRSTJluQQBTWzVadYMXu435VmAkYvS25VqC335uXa90NpnSHxdDiVETAqhrV7cOumpF3f5JmuW70na/U2i4BXRtyMX2d9lm6j4lgLxneuSF7Lm+J6BJQT25JYxZTh1SXItRl49ChR7pUl/YJEOinxbShPRRhOkpqDA3w42tfgOQRchUadL/MY3PIejoyMcHBx0lnN11gaqsWm0gw9jNCTW8edESlyxYqhcIiwiqX34blIt5VgQSPxWi2U5TAejmFp+LVDt2MnizEfH12AhDQFtY51dYy6tWh1Fc8NV88GSiNeqm20Bqma64zcgd1dtnjazZvqXjvdYSkqVQO7mcOEjJ32J/tyVg12iEtKluCRlJhXWSm7NmjIFxkQRUUSQlCWUSjDbybGYN8++UAmwD+wt1laq/50sx+vJDlarDKs8RVr7ISi/y/5ihVeyG1jdn2myQ1Yy7vLp8bfL2r3kQ5cV1hDQVOH03gLibmaIMR+ABCPC3C0I6AVLsQsUama+Nb6ukUXAuAUkZFhM7fsQRhyPYBSUSVK2UzbXWye6D6m/5xJAtVvq72ktMLubABXqxVKr9jdN360SEAUgCwBCmCUrSgApBCrKIst0Nm7qAlNPltiRP5O+UGV+rFuOS7C6Ek662/kaSEmuM4Sv9wFVPyfTBtyUHDQpWukC9WRNNJOpHkuFby07jjFeCY6h38JDuZChDCwj767ouklEzZBVaYcilpxwQtKXzwJo57QgFErP7CrlEBWgIRhEAGTVkA/VRAlRp1Ky0Eszs6OOf1bqmStgrmMJCusOVle2PteZ3VmRDwzc1eMzpVvXoI/fl3jNM366M9rWKdzC4SEmfH2ePtAxhUqMajGVJRZZYUiKz3riIycuUZklCutSYraBuHbtIRhj3UBZopCXEieruclKS+DuLh6+7C5suJjn2F+s4MKnG3vkxhleB7BaZTgp59jbXVnf2mP795HvLrHMU5zcW6C8N7Nce/S3L+xc7uZ6HSN3faMBLuBUlljcOkFxM8HxazcgjlN9vRns9iobaxu/xzRTkDeXWN+f6Rm353vywZdy3d0OJVDNS3MM32e5QpW2nJjftKto+oImsk6YfEizYx2aC+hki8We03fNKlQ7dA2qpyci0DrHb3HhGhfa5sJdUoPrX0wGXIf0DI0sKuskcMVuPRkkUuwkkvQSlVq0LWp9UjmvvCQlJup1U1IC+IlJjLGASybEZdGsnCfSjmyx/Jgx+6zjNiAlY9KaD93nO9aXhGsxK3C0qul90h7ETehyHVHBtwHMPw42C2ICOhLeAtDWibIhIjwKggsEDfEA60DT9gfB6+iWGQrd85n5fWXRscLpALoiwnzEhLscouGkmneJio8g0LYuMrIJUQmd7yMwvD4+EJFRZYLlOkVRSBRrab8/tDtUsqiQuyaV4ay4VD5BJiUe27+Pk9kcJ6eaIO1kubUfqL+JrMDJ7gynr+82rhXJCMuMmdwd4XMsutzIsxtrrHK9dpe+pjCRTWo3QbawF6QxUVRIkGQKZT35EI7+qz6oDWa9NMd4NSgVsG5m80IB6asCLZUnmDasJiSUwLFKYdbBKXb1f/wapq9hJMAQCtkIh4US2nrCr10BVloBdn+u4JWaLTXTkNDYFN0ROt36BMiFZVzY7Wssb1Uod/0LBgrJ1vB2yAcJy+VuDrFffy8sD5M7oap8Ez+0rXOboMty2IdRsonBZ1xSxJCVja/RkTZ8U4xxX/EEXV3JunzWljTRz6tQsmHmpWh/RKRZUY2GxXRiCq0cEQCa5Fl8IxdDWuXXHx4T9AFOB+rUp9WpRpjsfQTFDJKz0uRFIZePTxDrunNcMbUvOdsmbskQSRlKPoaQjW2CLEH07IpCIp0pKJU0yQWVMJoUsiYAwDzNsZgV2Mny3mfKCTpZcvbmK+xkOc7yDK/cu4Ebi7V5X7y8vcUay/kM5XpmrHHVgrlkIlGoBKuV1sOkM6VXk3ZE+EDTZkj0TsL2SlZGVFnWOXpW9+bYPdR+Dm6Nk0kJLAqsVAKRCFRoymry/vitLNYExBOST8RdnKV6KYmdEkIJlHOB7F6ChBm4eLRPvq8HZh982pykdge7zZJCk6meiYJNVNyihE0O7MSUzd8hC4tLVEh7wt0+QC0W5teu2ueXM7II6XLWB4DaV81kyGflddYHo75ISgW11MO1lKTTU5YgH2gE+q6+Ceh3Rw/BoLXpOvBQWlZcbOqWCWEq60jnNZxIiT4BJd/vHhsiL7mSEEllZmv5MkXpHMs7ugo2UekSvgLQqfvTyso70ey0P1Semt5s81hKmhuwj+tMzsWvQdflxIl1DnKmvMQEsN+lG0IMIEr4uommpI+gTEFgQvC6gTYkQGk9w3PXOqKOuVJaUCtRIk0VHtu/Hx2STW2ejuf/yqTE3nyFu6c7uL+c4cZi3SJAonZ9cjcgCXQJ3IW1yvXfdC/Fug6Pri0yawDrmV6Pi4gLoCcN80x/f8d3dyHuZpjfTXTywxmglDB1IJzeW2B3f2k9R0IiS5RIrCgjQieRr0mZNfaS6Jjy2NzMkcjSfHplLlEUGWarJsFjsQOzBAYX03bpQsyqv7OmX/EJ6eu/ULDVmCUT/Lvakfpwaz8vi1tf+HXk2j7WRO40kry6QCDfq7RmBNrqlJ1oyxCPCErqctb7QHFTQczZw+gRwLoWRuXpt1L2TVSl8FpOxizf0oXYvF9uji/fmCxc5XFXmdFHXnKkots03IXzICDW9QYMFENzZriaAZpZtsqVynSUhJVKrA4KgLa2oOlE3HBL0rQAjGBQxyEb/2npZjfxzCw8ExV7P5t9h/a1wAmKM2sxh5jZS2npAly3DoDOHCf8XVk6ksT/91Bs6s6Z8rqxBCZ0LFA/u0SLTF2Rsarby2Ke4+bu2aBvRpWJTiqXtgXQVM7jeyd4/WzXIiq06OKN3RWO67WIskWBw92ld7Vonk8pX6YoV6mJdhOATbTXCcr1DGsAxUqYKDlVWwvmbIBUTIwqzlKUBzCEhUKgiSzTgECWqvVZYudN4ggtlOkeWzKiQt9PKez7KZt0BqubVdsqE6GXcUXMFU2GAt9y5ZhR3EX8yPJikZaKrgmrg+EkynUT8WR35dwf8l3sK50EENAuQ1khP2yS+JE1OlFAvl8asue6o90+zSUoPPkoSqEnlLOA1ZZ1sVNGrbbKiBwDYzwRYsDYe3XIilRItxwFEVw9dqJBZIooDl4GEReftUUmWsBZ1C6hRVbgbilMXggAdvpybs3g2hI2I7O0JrJNCDhMYix2rZbFw9nW2mddwCEi7rlsvy+8WMrSmOqBtuakz3KyDXLShXTicgtPj5wmyrt9KNxnsCwyvTK08x4srUlNHBdZ4X3OroWKk3SZlF6i4uKRnVPcWy1axz6+d4JUlrh7d9f65jmpWeZac7NczlDm0sqga7lW6O9aIEriU7Vbz8gLgVI1aQEqWSFZJSjruX8lK102kRVZWlFPHBQhVaylta5U75povizLRaLFnDTorgSEmrUWDM33bfcRRQyZQbkWyIesK4aoOFmw+X7KRkxIz7RIV+c/ElqIO9O6mpIxEpMh2yqvrl/BSIpz3VLCv7yHbHKf8IzElRIoD1XzG0C1kpD3pF50cEe73NOZUxmqS1KBJ4nzuWsUEkMcRVK1JpocQaIykbdhCtnDGFwZsgJsJ2zZxRTE5LxCS8nt4MLcQwYs84acLOZ5Q1ZIEEuhvtLxetd+dE4OBOlUJPTMamaLv7hFpEVegnllm+u1fnvIjCnX6mTsRQF9s5eYDLE+tw7gJyTuNiIXfPCfmnBsCl99QoSF399Q/Qu5IUnDwberMml0KnU0FODR7Xh+h8h5FxaZn9TszVco9hMsVxmKMml10It69XId3qsHLyIsQLfbpUwr0z51puUK5bwmM3UEjTxNDKERSugEdkkFsSiwyHJraQIA5nmKpDLJ8wj8mzDExbMuFSc3Zta/o5MoVlJgdlciuwfIlSYrOrMzUOyIYGhzCCbScNbkYyEyJ9YCcq0tN8IZkxOl1w0CwFL0CyxvCRMKTWWbLL6+5uBYWfT1NUnR91SZMGpCmWodUUt870lhoCCNMJi7zEMuGoWkta2FUmC+vzJEZSwpmYpshBYkHTIOV0NynEUfeclxWYnKRee8CBEWoL4fRlj2FyusbqRY3Zv7yQHQdHIBy4VYJy2/s/G5sg/SdeVwxbv522NVsVbbDSRiA9rRPz49Cg2OMimxmBWtfCZZooKEpM9iEiIil42gELrq5dvHCYz7LChUOuQqWicKSjr+bCY+XeUpZFLi5u4ZbmTrVhkhuJYW93deSqtTXaS59Y7pO6Hf+4uVzs+SlG3CmgHFIsFJOdcDE2DIujdHyw2lZ9wuSWAWwgpAqYSekddu1EpWOiy4dksVtcbMt54Sba+kdqOZZI/Mmhj6tvg3lMgS2C1t60wmoU4TpGx1YEAThwX0ysA8cq9M62jCesA3OUmMBaUyxIyijLITYYiIiSJymhB1ZXLZCFjlGth5WVt5ih0BCK2fMe9A2XU2a5oZwlhvB/tdky/Bzk8KgXKdNNlkA9mSAU1gykwCS4lkJaBkBiErzGsrXl/ursKxhuVLTVbnWeGNNOT5s2LIyNTj5dglP8SA/vDKkJUsUecyEEziqpm4nu7gQOWb306dOXkhwkKd3uHuEkfQHweJb6mDpUG+yKXuCAmcuCwUqpkA1glby6UxYROUx5LizgZ5Zwk0nUFir6nsDc/juTm4HgVoR/KksjTahRBB6XpnMe2Ov5Mp37/vnfeVv+1IILq+j8QAwF62ttrk8WqBszxj72SNvflqEFHpEy7PpDLEnUTrXdYaOv4sz7DI8lbnmyUKj+ycYifLcW8+x/HdXewenhmytXxNj5ZirnBwU4/AqzzVg44HPHqj2JUmKy9Qu1nqy6tlipPVPCoqihZLFEqgXCgzuPJvioi/+60BtsgTihZQJCJWH7MGklS7Zoq9ysp9BNRuIVqqwqwdVE9sCoH0JGlZa6h8Eu3qg5trKtEcJ9eAWgNJoZOrZaomMYUwehPvavEOUXH/tsDuJ1kJbQWelUApoFbSrAtlRd9IQMlK59xUUlvK5hLprp0fKGQd4WRltcpQrlLI3dzkXgL6CUcsITmPBU6nwJUhK1OgywpB+weVd06NIDQ4BGe3zkwyS5QRJQLAjcUaJ6WwBnX6eIjU+Cwc1t+1uVS6ftoabtpwACaMtTlG+Y/nJtNA/hPJCI2bCt2XD0UmJW5k6yiXjjl3RG6TqdvEUItPzDHbIjM+Qg0AB/MlFmne+31ReyXBOLeS5Epas8uiTLCT5TiYL60yiITE4s7ePatP8BGcg/kS+4uVdQ/3D2dY5hn250vTr9xbLXC33EEXKEKvKgUKWep0+opFydyXeA37+to31tjbXVlLCKxWGdQyNUtBGNdsThlPPZZINtnwrosEAEWiE7qpOpQ3abKwyhWw8xJQ3Eu0C2WXuVEATXJIs1MIyNMEs3vaKpOe2bqSZpkNO0xY52mpD5IwEUi0IGIJTVJ49I67ajLQJi8hd50hXSR8pcOUQLKq3dYLhXQ390bgUGROlSmUUmtXyIoWI3JVZWJE2/okLfzuShLpYhNPgDsObstDkDyMAtvZRALbvpcyZrCZeiYdmrHGXJsf694rz09BYj1OVsxHlgEFpCETvHMrVWIiF4SsTIbPrmUA+KDfRQDM4nY9xwCa0PhyoTTXKa0Pf6+exY8hJl3nPYjY5F76BLlDk9etS2msIcs808JWx0ReFLUbxrWcZXa5fVansVavWaLw2M59e9v8DGunTWVStcK1g2Gm7BZLmaJiGWKxlKhkhfX9GV47y/SChoRaT2FCZEutNTPfaKHtkr7JRqkSILfdvAkU1GmGxedTVBI4u11ZYmAAmN8VSNbA/C4glxXKTEDNBIpdgXwPgNDak+y+tn5QeLAo20Qhrblls6CgQCo1MVILex9vaqK0FwhsooDqe6stQqUbtcS0Nq01wRi4cLhMKyT7a2N59lowEmcZjVmFJFNeF44b4luoRLvi63aSzAtki6JFiF1MtdzG1GkSuiAeRs1KFsk2t4HzHKj6Zv68ww1qBgLWlplUOF75F4OkhFy5kljmTbMpcukkUFOtBEUu+pYE6FpSoMu0yYkV+XbNPieyh5MU3zOdipzMk7Bq/yKxKrfz6bvPrSilV1zMQYTEh/v5DCeruZV+n0BaI8z0My5UYnKe+GavYzth3/cU8/7XSlokmEKjYyKsqP4iqbBSCUAaH6X/E4sm/4qYNZOGUCbTUiWW3swStNeL49GCpZTavtwpAaQQ8wqrp9cm50wFvZikuJtBnupyKgnkNwA1E8juV8jua+KxeK0hF1ViW2zc3/Y+/a9cN/lP5DJ4OCpJieUai0uiYFaINySFxP6CzmPPy5vjxQ6NrgR0skJpL30QspZUMwV1c4U0UyYRIbcAAsD95cwisMVaYr6/0jmBFmvjhpxJNYnlNAZjywgJ7zcR5BOuDFk5D4x5gVPoaIaEj7o6i758GO5+Mpvz2SwH+UsXtSKdVrct8qYcM8NFGUxH7sthQjNmvtBbDHxiNZ8ADWjIDrl8Zh1ap6E6k8tKTHyIreumpMa1mvna8lpJLIsMr5/tWhldOflwrRGuKBoA8kSadgloAW2I3I/pMId+/4/Om1X11rW4tyhkK+sowf1WrHZt1tjSYdAUzswj7TpXzKVZek1wDHkptb4sWTXp7rV1ohbG1hEwclFYA+4yT3F/NkdxmkHtSuy+ULuZMr3eD6XaBwBkdtI2jq6EcaRLceEuRkig6CAenpygFuU6OprQ2khd4Fm1KyWMOD9ovZUlULdRH5Z5ilWeosgl5vPciMx5bqGh+jnaP7U2bgxiXOEPpcB2lqhzsaxMQT6Gssy+a3aRmT6XUWj/TGqrw+tsHz1fX8hakSUt8zwSbZFxMVXCoq4QvVaoKTOfzhKF3bTb7cPhHrMpKek6fyw5oDKntpjMkwKrMrXqvCrT0RlsOWFZlxIn+QzLIkOupLGeuBYUH1GJaStuMsS+Dn+bHfy6lHjl3g0ANkmhVbY5Wpqqm6e4fzpvBOtFArFOdJbc2t3j6lC4yNxE/7D1uRKUVihziQTYr++9Ji8mxDrR4l+e2yOVJQ73z7Cc5zhNFzhFhhvPJ6BmUuzYRMJHSijDaynhXXzQPCPZ3lZmMOnw+XEURWSOk2jCo3lzsMp39kWgXKWoZsq463wpDwhuf5kriZPlrO4bC5NFmef7cUlKbLv0HT9mzKJvdNtBK9XDSFbSjhnyReG8xI78vvvM7l0uIt9xPCKDC3JpIIhZXThEKvgseuoFIt1QUzKh7qaN22dqS8gU5W2TCHEMITVumURgXAwhMCfFzFhUciVxdNq4H1urGDOBtC9pH4lrOYoywclK534nK1ofGQkS9w36FSJnayUxr3OzEDgJ43l+7PP1fab7zfb7yxlWRwuItY4YErJCCVuQzteImc1yKJVYa8uIrMmMWqwlUKfJJ/KiZLNGkcyUJeSlemVSYSfL9crW+zu4j33sfTph+U8akBiX1tohkkHeECq5i+AQGSlTFs3D0u+b9Pi1GJfWKirTdoZdd62k1qrafdaWpEKxli3xPrfg+ibPWaIjzIpCYufGGQ72tCW7zwqx6djWihD1fA8xZH2TZTrGWjQJV4asnAcuwqw2dAYbY3Z3y+Whz77Zp0+Uy8NAKe23151DZIYRE/6bDzJuSvPY5EU+HYu7COAsUbg502b5sYSAD9IPksvHB7f+/L6mtM64HVRRSkNUiPAu67wqblI+V3jIEZrFuljmWUvnciNbYy/tDot+6WwPyzxDJrV41pfkj08M+rAsstrM37TVopTBxRFd8s7vN01KnCQlTo92dJRPYFE8ysoMaMJHQbPWAJvAEv0qVQEzBbWWmgjNFbJFYVaq9onlaWXru28qcbSzj53nU2S13jipjQW+VYsB0rIAQtrHiNJx9yQNyeE5WBohbhMFVOzQMxU6sd6OZ6VhKUxYt79eHS41NK5uVfd73B3Zt1L63nyFQiXeduXC1742GYe6rItDXE1DrxXc1uUHdHBlyEom4sRHm+I8B6nQoDEkGsgneAyV5w4sLRYeEHgtkfW6ZIhQcA0Mfdx8P2FI0iLfDIaTlHUpsZuurXfH3Saui4PvdxH7/udye+1kpdrtgq7n2xcD37MBwm0wltS4bdON7jlZzpDKEo/c0ESyj6RyK1zXcUSgl3lqkZ6dLDci8hBhIX0Jlf/KmXbfLNLcCGab+vgtmakzGfCBRy+Z8uqoO9fa6JL6xazAalFAlZmJ3FHLtBHBqmYtH+6KLQppWW9o1WaFxITbAoAq03b+IrbcgD7GTmewyAqsHj3DMpuj+swMsyN27tKvWQFsVw6HS3DIsuKWU6VAa6FBqVPlV3sKZjFVjqSCSGFy0QDtfE1doBw0RPLcZJIAWv0l72cfmZ916uU4NhnXLuukivpdMaB+V4asAFdr5gvE6xCGaGCGrPcSm8Njvxbl5k4n7xt4fOnNhy7W2FdPd/tuusZ+uvK2CZ+LI4S5LAwZ2CYZ6UPXtX37xhIYoE1Kur6xPtcQt9ydrOZIk2adG98aPAQeOdFFVHi7M8ezlP58kL6fz3A/nwUjwnwWhGWR4V5NdDKpjA6Kzn191eRQ4W369bNdff7ajlRy6xZypbrrUmWJvnaalLgrd7C+V/twkkqnDqidKuSm2Juv6pD/EifLWetZEIiwqFrjIhcFpLQXiPW5gAGY7/5wd4liscbJjTmO7y4wfzlFdq8OWXaieULkBWjyuPDFBnlGW7nWriNr3SFKHFeHI6ubCnLu/1b4ekmtxVsJPMtwh+VqkRUttw9vFz5XziYkZVvuZj5x21a0YNf1+3BlyIq++dm5kJQprxHTKPqux8sINfB1YNYXgntM6nx4ALDLyp4lCqfFDC+d7Zn9JBgLLfLngzu4ceuOz+/q6nV4fWeJMs9unhSTEIyuMtK61yy6euELAK/zGOISq1nps7bMaqKclxJ785Xp3LkQ1uvSG+ke9IHO51aBZZEZ9xAAQ2K6QG37fj7DsRNmr8oEWaJwb7UwVh1XfN4nEPZZKg/mS9ycnZnv4HR+hkwqfAGamBD4QMwJVyZVJymiVbAB4Nbj960QWy7+DC2MSoQlkwo7hznuzXMsb2a4f38G3JeYvyoxO2osKU1Ic01OmDBWr/NTp9Nf6d/FDmXRBbJ7CZLar0VhyqKoxbpnCYqbCiIrw9mtFezM2wF3HOCJoArA18/5rCe+vrQPm4w50dZgx7ra9S0PGZOmwhUiKwqzB9Ca0jc7jTXJ9x0Tq33pCuN1I0N8gxjpEUIm0K5r+LDrMdXTttCzI/J0kC6tgTr1+EeJWPj2jcWUZRGmIkCbEpfe8jtmZutS4rXVTlC0DXQTF261C4FnsAWcZIbw62CIvByvFi0rQl+dfNtkUmImFZ7cO8bzxzd1fqJEYol2DhhvWL/H/SOT0hB28/3N9PeW79kCZWMdqf8+Wc2xN1+Z6y3XqXENhdakodwevgkHx1pJY21xn8PN3TOoxQr5no5+Ob2xA/XiDLN7doZaYzVJbf1JKQGpNFHJ9yqofaWjn4oEa1lBniaQay0CLmYw+WEqWa+MXBMLX5ZrAJ3rIwHNEgTNwqeKLSHSEMRMqlb2a25NuQg3zkWL/WPPfShDl7vAzfdTlBWLlUoHm+U73RADiUvXccA4X6hb/o2UrXVR1BoRJU3H6hKi0MfrG+R4x9xXD9pGZGpV6mffRxy2QSy2ga56jiUyXW150+/FnZmtS4nTYmY0H7xd8Jl6l0WDuyBCGZHJGkCaFZ53JXT8FOD1XhaZiTx7881XkdZWx+ePb7bOc8lUV73omfG2f3txYo7PlQxqXdznVajEWmcGGXBW68lSGV56wq0LCe197y0vJRapDsndny+xvHGG1w93dL6ZXKK4N0P2aorZvdp9U9UEpgKSFbSmpALWj5Sobua4sb80uXVOljPcf+UG1JqJZCVQiqpOZgdgnTTm3xpKJf7lBBg4OfHtIzdbOlO4uXeGg/nSsvwC/f3cJtg4YjBiDHPHrtBY1leG73pWWQPKvDJkZSYLzGW4EV6ExiB0Td/LG1x2BHEZctzQ667KFKsyxY06xmCR5EAK3Jyd4SRv7Lkx4WohcWffR+n9CEZ+yIukraXZBpZ1DCe/Hm0bi20SmVDbDPm8fViX0nLRdS35EFqTxLXAEHHxkRZOWNztHLmSUW4lTpLMuR1tWiYlTvIZbs7O8MzuawD0e9hN1/iNu49Z1h1flmVXo0JYK2k9u7ks8Lg8wUG6xPFigc/eP0ReSotouNmBF2mOfF7nfHGaXZqUhsR0wRLhd6ylRs+N3tkizfHkIWv3j2a4d2eOk3sLa+FGgOlISoHZ/sokSiOr3CLLzbtf3Z/pyCVHKFtBL/xYZeH3a1aVdtx0XVm4qfy93RVu7d43Qu3Usaych2Widd5E45xbzphyY8e/WGyVrDz33HP4iZ/4Cfzar/0adnZ28Pt+3+/Dd37nd+K3/tbfao55z3vegx/+4R+2znvrW9+Kj370o4OulQqFVIRDHQlFJa3O3XUD9HXufTPxMYNDl/UlxmwfK4oaK55quXtko/+4X8yNdeVQrxffGpx207X57ctxQnoS9/76GnXrXXge/XmRkFj46uNuW5aZ2TYlkZm6bcaABraUDZ6cwLYEuGwx0dYqyA7x5eHzfcg7BtUpQHXgriNLJyYUvmjnCEUp8cL9A2+dYlbTPS1mWDli8RvpCjfSlSYtxQLzpMBBptWsK5XiEyePtVZaJ2Ezr0MmVZPa3bEM0L10EZWha9Is0hyLvRyP751Y20lDxMGzua6VJqmHu/oe75YC6yJBBZgVkAGYvDLlKjVrJvE1kbjbx1phGn6Swt1Ji3mOO3v3sJeuW1YU102+adDHWGH/g2A1VpcldPk//af/hPe97334vb/396IoCnzbt30b3vWud+FXfuVXcOPGDXPcV3/1V+MjH/mI+T2bBfIsTwD3Bfb93rR8H2IGDV+jHDrbjXUV+Y6P+bBSoQBJ95NjIQossmNgF/jV4ycscZk7WFlkpXbXpKnyPpvQM20P8uyHU8y8PnZVD/7893xLhGY1kmjw++oiW0OJjPscN7G6eMOnB+RoicrB4Nk3NPtzF6ZyAXGQJWEmFe6ud/DMLhjxBJ5YHJt6n+Qzi2i5lggqj/YB+p5fX+/ikdlpy815I13h1uykvlbt0kkVbi9OcFrYfeqj87MWAXEJCkdMUj3veQOesXnH7srW9bVdC9EizZGXEiJZIJkXRmMC1EJayj+Tlk2osRLBleAJJMINruielHh878QiKvOk8LumJ7BQ+M7bZJyKmcBtOkkKXW+TcrdKVv7dv/t31u+PfOQjuH37Nj72sY/h//w//0+zfT6f44knntjoWoskx7zfsGLNWun32OuNPX/TQSM2p8ZQS0qv2yVprtt0inb950mOO9kxin2JT99/FIBtTbEIkUdTEvMRhj42+73Sv/p9c1LCycm2iMqQsqcgNYQh7TH0rDcR89I7XpVpKxkcEEg0GCn+7iIqXat6d8G3dISLLsuNG67KB9RZovDa+gYend3HIsn1f7McqzLF6+tdPDo/M4J0Or+rfMK6lPoZqxSQzXt02wP9fnb3FSzLzLxX6jd8M37qWz53dthKcRB6/lMQyFAySv48icjxUPPXVjv6HS4KS1gM1JaT+vR0ple8JteXu8xB5UT5EFHhx1Diwpu7Z3h0fmYJ/buiDccQlCFkZBvW421ZpN1yxYDrnKtm5ehIZwl69NFHre0///M/j9u3b+PmzZt4xzvege/4ju/A7du3vWWsViusVo2o8/j4eFAdQh/0WGw6YADjQ16HkhYfxoRO03XnSY55kuOo2MWyJgNz6G1fvHgZx/kC94q5VQYNZsfFAo9L2/zrQ9f7CZGBlUNI+bHuOQvR35Esq9Qcu6xS8y/fxsui37Fw6zSWvADTtkeCr136rCtuW4qZhfuSFg61urgg1wa3nJBOhHK60CJ0HCH3i2vp8K0I7mZ2JivF6+vd2iWTm/d8kC0NmdtL10DaPcj7dCHrUuK4WOAAS5ORloj5ssxwv5hjMcutdk/tai+im/miHU1YgnXaYCkCH2H17Z8lyjybtUNUeLtZzAqzplSaqta79SXfQwI74zYtas3Opb9N5FRSYn++xO2dk9Z990UdboKxYxS987ETMrcf6irHZ7H2nbdJ33ZuZKWqKrz//e/H7//9vx9f9mVfZrZ/zdd8Dd797nfjmWeewac+9Sn83b/7d/EH/+AfxMc+9jHM5/NWOc899xw+9KEPtbbPkhxcMcY/zvNGqHH1DRquviDWAhNSW8doC4b4Ufl1luwjWLAGegTgUOpspM/ufgH/8/iN9b62+HWlUuzNllaZMR9m38fXtz+GoISOp79923y/hxIXoJuEjYFPE7MNdFnyYga1osfaAjRJDc2Ap9oiW0o8yLdTOnQ3CyvBJ6L1IUuawTC0Kq77+zhfYI9lRLuV3cOt7B5eWD6CF5cH3vNDIuQuVw0AQ1SOiwVupKuogSp0zGMzPanwIl1jF807c7Ndm/uo0xjEouveOFHh16H1kwqpCQuRiyVgEUvfoqpAXM6bRZbjRrbGzdmZSTC5KlPcK+Z4fH7S6qtjLe+bEJEpj9v0XPfY0Lnu9mrANURVVfE5hjfA+973PvzUT/0UfuEXfgFvfOMbg8d9/vOfxzPPPIMf+7Efw9d93de19vssK0899RT+7//2f2GxN00n3DUo+JjjGEwlohxjsh+arr3LX7pIcq+1gra9nB/gheXN1gBGOVD23LSW7rVHEA9DooQzSFcZVmVmrCPu8edBbseQlxCmqO+Qtudra7y9cIJMrqA++GbXvgzLoWUjTopZKz8KYAtdaaVbIisuMXEtJ30J4ZZF1lrFm//L61vUSz08u/sKa5e67R2pHbywvInX13Z8rW+JAk5SuKCzz2pKA6nvO+3DqsywLDO8tr4RHenl1p+7cnyRXxTKHnp2VMZJPmtZNNalxMtne2yNqaw+r52Izxd1xdG1UjInKYAdEPDK+gZ+695LrfI2xZB35bPoDp2QhdBX5qb92fIkx4ff+nM4OjrCwcFB57HnYln5y3/5L+Pf/Jt/g//8n/9zJ1EBgCeffBLPPPMMPv7xj3v3z+dzr8VlSsSY5Te13Fy2KJVYxJg4l1WKOfTzOZRnwAJ4db1nZmnUwaZCTU4U5klukZRMFOZZ7+MMyzLDvXIn+E4J3Hw6JYmZ8oOfon7kNhiDELHlIeRDI89mzkBFCC0aOEuUlV0VQIuIZIm9bgsNmjGWFBe+5GcueIZSuh9uiSQcyjOcZAu8vt61BughYtYuUfw8KXCcN5aRNyzuDp4t0/FkrfGRo5BmhedioeO6wtbdMh+b3dd9RjEzx/J2RXl7XKGyu3gqd/31gS906RPPcp1dmqppsmJvMBZ0WXinQF+Zof1TTsoIWyUrVVXhL//lv4x/+S//JX7+538ezz77bO85r776Kp5//nk8+eST26zaIHSZtC7K1UQYI9YdurZMCNz94z6jI7Vj3EGHAJZpVh+bGpdQX127PmJuvaE6uFhWdU4TNMfdlKe4KU+xrDJDXPquvXWh7EitC2ETn3AMUYmxqFjtUKJFWHzL0QPDo4J82MvWfkGu9C/RQP/SIMcTzfUREZ+7yF213BUTA8Dnl4fAog7vZ0iFTh7nC/F2Razu4ogh68rUS44skhz3MTeD9o10haKSOM4X3ggw7qrzucdoP93DbrrGQdpYWKk9LZIcc1ng04Ut1Of3SOHuAFp5Y7gLiEgrWdoO5s31yFVF+VIemZ1a5MTcl1fIPNxiNRRTk5Cp3cw+RNf5soQuv+9978OP/uiP4l//63+N/f19vPjiiwCAw8ND7Ozs4OTkBB/84AfxJ//kn8STTz6J3/zN38Tf/tt/G4899hi+9mu/dtC1FkJhIfxJ4bbB8gghEdOQFx/SFIwRS24jQVjIHwv4G76voS6SHIVsrk8d3JHYHWRlOpRn2E/OOi0CRFIA4EjtYlllWIgcmSiABLiZnGrrSwLcrE5xV+0GScsQbCqU3VTrMqYOMa7IVPhDyjvr4VhYgLgQWUKXhiPmePc6PmsFzcqXBVsJ3BM+6wpcXTdRyA3k5hK6X8wNWaH3dJie4rXkRuv+eG4iKq/vfuk61u96sN2Ty1GuBQBYyAKYASdqYco5KnYNUfGtKcNJYuEQMe7O2k9XeHR2v0XiOB6ZnQatatxFZkKxWQJB/m5nUgt0aWFTwr1ibnQoXUJZ+lamIidTkZBQWoax5bi46Ak5Yatk5fu///sBAF/5lV9pbf/IRz6C97znPZBS4n/9r/+FH/mRH8Hdu3fx5JNP4p3vfCd+/Md/HPv7+5PVo69RbEpmfC95ExN91+C9Sdj1FCr1LqLSZRJclpnJy8L1MfeLOQopW9qVEAkiosLr0vUcqJxllWFZaUvKItOuIuu/Msc95ScsnPwMwSaWtymIC9UB2Jy0uISlS8DNzeT3i7khpkMtJr5B0B3UXYtD4VgkgO71rh6dn2HNXAmuDsVNVBcqB2jWrPLlEiKsygyH2Zn5fSjP8H/c+Dx+9f6Tljuor95AfF6bIUQlpAE7RCOcB4BVbW3xwb1vnwaJns9BtuwkKvMkx7O7X8ALy0d6hdtkGVknjdaFrGAzqXBzdobHZvfbiTZTHaHVNSmjuph9W3C5+LCJyPUi6gBsj9xs3Q3UhZ2dHfz0T//0NqsQhSHhq+7xtD3EamPCvcZi28l9YsOGQx0caUcWaW6F+L6aN0SUCzFToVodl08I6yIfOJh/oTjAvjxrCXCBRuNC+5ZVhqwsRltepghLdp/v2AijmGt3WazcEHve6YfaSiElDrCMFtyGVnh2LTShtVdMAjFnNs//9i0BwZOkhaw6rluJZvQpIw/ufXRloDbfh8zxf9z4PD6ZPI4Xlwe9JMW9Vh+IZPS1my6xus/VyF26XSC3D2FdSnzRzhFuZfei6/HFO1/AS/kB7hcNQXITTXaBNDB8LTNOvsnt5CLWcjwEm4YUD4Wvn+MYOxkLoe++6N6H9oVXZ22gJMc2b6ergS5q98JQxGgNNnYtTPhBxH64vuMeS4+RVykyUeCN2WvIqxTLKsPLuVaAG9cX02/Q3250z5BIqtCHek/t4B4aApKJApkocFOeWkTlZnJq3EVfKPxq9SEf+xQ6p03yucQSFoLv+Q5xC5E1zYfYNa3cwbCV48LZv5uusV7Hk0s3y3JrH60kzvZ3hSr7xJgArOfgtstDeYob6WrQSug+8Gd6kC17o+10XQLJzLrCT9XOYPJ5Wuj1kh6fn+BWdm/woH+YnlqJ7fbTFe4V8yhyt5uuMZeF07aBLxR7eHx+Eu3icfuksa6XeEtX0xfx31PCV+bUBIaDE+ByAOm+MmQFuBif29R+xzHHjWWqm9QphqRYWhHYkTn1wqW4nemkfqsyw0t5mwwEw5AD99r3MVt1AIzrp6Ub8nysmXPPuUkGlwfP8WGqJHBjLC5D3ZMhUuhzKfrK1pYaBAnLEHDBbpeo9B7mwaii1GNV4fBZVWYsgqjrWF9dADvd/ola4FCeYSHb7fROdozPzw5xtyZaoboANinxPY+DbNkiBNy6Sb+HgH9bzYATIjrtdzWbneHJxREO09NB1zbXgtbKAHUkTtW+hkugyDXkc0UtkhzP7r5iXaML24m2GUZazgu+tA8XjStFVkI4j6idKUz1YxHKzhrCJibIWEsKB1lU/OXV5ybaLEsWlVDHwF0+mShahGEoqAwzGCcwkUK+69J9+FxPQ0kLYaoQ6SEWl6HfRExiOZMwzxNRtizRWvupS/cSWkDRXSTOd/wsUQipIArHheRbbM6NYuLw5QMJ1d+f3yTHkdrxkvBDeYq3Hn4S/++9p/HScr/3GoC9wCDV7cnFEe5k/szefQNubL9wKM9wkC5bRMklC/x93ZqdWLqXGLTCvdNTk/8lFQoH2RJfWO1Z1+LPgrv8vrDaQ7rozjkT+3zovb2s9ATrttfqfL4EY5voupfzIjIPBVkBzj/M2KdrcWc2Y8qbggRNNYvotrz49/FIpwXy1nFHjsiV9h+p3Ul8vHmVIldpi3SYf3uy/sboYzYhLVO00dh2FrpeDHlytS2H8izsNqhSdi33mECG1Bo+N0oXyaHtsw7Xji/k10eC5kkBpMBLy/1gBJPPJUXEiQ+KC1FgIZv38pnVLTw9f7VV3kLkeMPiriErLnzkaZ4UVkTNlBaA0He8ELnWf7BL0WrslEGbQFq0oUSlC0TGllVqrpcKbW3xRSgBmkjRoqv2vcQ9Lx/B5N/AVSInQ2A9jy0Sl4eGrABhF8q20Ze4JzSweEOAI6JEQsf49CBDMYag+KwqXHdC5z2eHmM/aTo07oI4lKdewkKupjEYKswdijEf8SY5U+xrxxOW2H1ufdzIsJDp2LS1Dl1XTFpyvqo2rYcTWpzPDZONRWvpijJthRG3znGsPFaG58A3/EIxRyCYxjx3n4jYFZMepEt88c4XJrHODZ0M3MmO8cnicX0ue27095QkhdrWSuhoKvo9rzK8KhtilwqFx+cnpj3wyEM3SZ5us+F+0K/R87vvNrHsTgXX6tx3zDYQnKROQGIeKrLiw0URGI6xxCHmvD7CE8JQAdgYLMsMS9g5ZWjGnlcpFsiNwPX5/JZONMfqRh8AF5/xj7Trw9z2RzsFNiUuU1rjuupzWEdWBTteIimMbFCdBuX+YNYcrgMB0EpE19Q5pq3bglhXp+Pm8SCERLVAO/mbuYeaRN6anegFNz3alYUovGVb4tl0iRvpCofp6cYWx7BLpHvyQaJgHqFDoHW/NiEqC5G3BrlDeWrVwb0evTNX3EzkFoV2B2Fu33e/sLa9/0jpHFFUJxdTEpEYxFyv65ht9olTWF8uf48dCZ0vww6V3iQ/xia4LEl0xuC8wulc8Jk1RQqh7l8WIse+PMOb5y95PyhXNGtQtrUmLqb+QEOalrGuIUKfeLxrZj0mJXafBovPSg/lKfY9gxJlDs7K2i3CBh9ajsEgMpoulBpgVWZIRV12xWbTnnWt3EgmTlB4+TpFfuElC/rYNsnhz8W1NhmNldrFUbmLF5Y38bv2P9MaCOdJjt10jdNihlmiLD2Na0kZ+ry6EArlDyETBe5kx/hc9Uj7urLJML3JhMY9161PJvSzJzKySHKcsJw4LsFdzHRWXFpcciEL7zOKeRaXhaRMhfPqI/mzrQY8qytDVnw4L1+aC5/5fCoR5VTYhJRsyzdLHwV/V8syMwPZTdY5uO/Wfb/UQV22jmPqNrnZe4x/NlPoIPi7cO99jECdW47mzBrCk9IRMeFkwv0GgzmDmHXEl1umC0RUgsLyhLRYO3h8fmysiYTH5yf4XHlo6VHcsNnQdcdg7Dd9KE/xOTxiuV2MnmjCrq7rO76THeOl/KBFNHm2YF9QwZLWLmMY4so25Wzgin4QwAMZLhIXX4NzwnnHknOEzI3nRVymsJZs+2P0ERUC6Vt4p0D/zoTCAZY4Lhe4W+5a0UUhHcRl+PCAza0tPpy3kBxoEucB7agh2habnJDq77qwxi6Y5rOadCU0tL6VEkAdfaJFmQvv+jDuvfn0O0BjMVxWKV5d7xnLDC39cK/YMeftySX+P4evb9XSGRPm33l+kgMl8EWz1/Gr9/VabpQojq/1MxShFAEhHMpTvJQfGHfkkdpl+zQZWUW4RMdYlvi7vsqk5aI0MByXo9e+IFx0ONaYpHAc/JyL7NR8iLVohBp7KK/KEpq4cGIyEwoHie4cycrSNTh2hR/T/tBg1PVxjrXiTJ3TYKo8LrHl+qIj+pIRLkSOFfwWDiIsIaIyhJC59egKvbfyw9QEYo4cK1HgSO0E09Z3kR8CEZUjtYNX831jgdD3aa9lNU9yk39oasR8yzHtmGvM5kmOg2xpxKsH6VInYBNFdN/RRwpi6kOWrMfSY01gPEkcF6Kw2l2XqyrquiK3/qZ3GdsXxKwLt0kW8m1jKLHcBA81WenCRWX12/Y5Mbio2UFslkYuzL0pT7UIt4ZPlOdD6KN6PD3GbXnPWGpI4Hu33D23TmNqi8um1rzOTKYO3AgvIo6Ua4cTxV53Rhm+jtlWH8NnzEUlLXFlSOdi6ly3GT5LprrmVapJiyeCLua5uu2MkprxqBTKjEwEbY588jYATPtd80F1PznDrewejvMFDjI9aUiFiu6fpnDVzhPb5ZaJAnfSY7xUHHgJJLe49eliuuC6SKgtbXJPLYI9wDJ50fBNBKdyI12TlQG4SFfSeWDKzsxtnH1ROkNNqFTGXbaq8pi6ZaLAm7JXMBMK60rWZRVYyBPclidsdp9jmWR4sTi0ygl1SlN8oLHEawi2QXD5bNJnEet6Dl0anqgIjaQhCtSh8xTzXJOwdAaVUD3812oTFWtW7YnqcbEqM7y2vmFW+aU6m/pHthdudeqLthnyXYwZYOmZ30mPcTRr3C8xy3xMqSfzPYcQYZnX1qAp8764fVBX39BbFkvrQL8fNPiE0Jv2h1eGrOiH0478eJDjyreJ87ae9L2HUH34R+8rI69SoARupqdmcL/LfNY+8PbxRHqE2/IUh7WI9+P5LpZViptJs+AZJyxPpEeteluC4Prv0DpCQ7GNGfYU6Gr31tpK8hR31S7ulTv9GogIchZai2UhCqwcAW1Lo1KiZT3hCA0wRIqs3875vtkvbaNFMI/UDg6ypRVddL+YW0LUV/N98LXGiJisygwvLG+ac2+kq2CWWl7HPtAgsilxyESBw/QURwWF84YXCt0GWuSTvfM76bHRscyTHHOWFG4Tq4p7DrX9XuLbQeRCmi/gwSQtHL5nWzys0UC+h9GnT9gW3EHmIgadqyL2Cr07d2A04tskN2v+uAMKYJuwb8sTzEUFQAAAbtcRR0sW3rquJG7L0/o4YFUJqx53yybPxKLSeprLoqCfEn3tiYgjEng7bV8495AZGLlq3DJaVo2E9tkCyC7rRwyZ5t8u1ZsPLqGB5pXiwMzmT9SC6VVS4zYBNDFZqRS/ev9Js51nZ310dt9eroC5McYsdMfX7JoCh/IMR8VubbVoh/VeVGQeCWEBePuLPkxp5ZhyYdmHDVerN+3A0A9lqoFmqLn5Gt3wkb28So07iMBJKglmeaQQANxMTrGotQ3LqsKqElhWEguhsKxSHJcLLERuERUAmIuqRVg4FiI3kRJTtqM+orsNtxEvuw9mhsnCzfn5vG5dYb2csPD3GHIZ8SRlyyqzZs6++gEw1g7fPgKJYrmlxnL9dM2Q2XGH8hS/cvZFADT5OC4WJhNtKhQ+t37EkBI3TX2atteyWbGQWzc6rg/bJAwLkeMwPcWdtLH4nEeEjE/U6hPFu2JYF33PJka0z9ura7Gakqg86G6iMXhoyMpQXIZQrWvYaOfmsH3EPEPqIvGbY6nzbLLfSgDKWFKOyzlQu4Aowqi5nqjPqZztOispaV4ATYSWQrukpia+PkLS1REPJTBTRH9Za6YwsWys+Nnt5GnNJiIy9A55XTNR+EPf6+Pc99A1kJrEhCPgEghKmMdFtZSh9vPLQzw+P8Gt2UkTlSQzo1sIiYtD4dFd2LZlIxMF3pi9Zn7ntftqITuyG08AClfuzGszkjANJRjc5cdJ21TochMB7WgiV//iw4NCdq5H3oG4iub9BxGhPBZAPWg5qcytcOZ61n+33LXKmgmlNS+FjgI6SFZYCGVZVTiWTibUmbHStAfqKdtMV8fr2xcrDN9WiHposTdOSqwO1ZN5OOTOpVBfXq4vMueochfItDOXUl18+g3Swaxqi5EhPqVNil3yw58xj06i1YpXZYrTYmbWCOKrVvMFEHlo9bJKg3qQy4hlnbH3sS0M3AQKV47RRXWV4UPMYA/YbZkEvTwqaAqrSkwZvkii2HMuO2m5HnVH4DwFvNdAy33QB+o4fEJbPsgAes0hQIcsL4TWm3x2fQtfvvNJ4yJaVQJzURmLyqoSuFvOjUUF0LoWoCEsnAgtZD6phWUKbDrYjRUiupFYoc6UH0vPcFlllvWMoo90/hL7XdN7WZWZd7Z5hHrl3qR5Fu4K3NYzctxabjZe0uiEnuuXLl7EUbZrxLLLMsNvnDyOJxbHJsyXk65D+AnlUKJykRmc9fux3bFTwZefhEjk0P55E/ePWwb9O69Duh8kTEGotkl4Lk/veY1LAZ9Q76IyM44RDRL4zNztsDhhcSN57qkdPJYe44BFAwG2qPZuOcdxucDaUy/uCuJ1fyI9Yq4nO9T2MpGY80bXzNUbmeMeS+SFkQmyrBwVu8GF/kJh0aGBrjf3DyMsIffRoTzFIfTMe1lmeGJxbBYjXIi2BWmbGqTzQmjhwW0gEwVeqaPwDuXppNe8V+50upk43pi91tJEPUzYlqXm4e0lN8SDMsCM7exaa+1M3GkO6YiHWlY4Yt8TuYmWVYbfPv8c2y7rfxsLyt1y1yJSvE6Uh8WnmbmZnNr3XC8J8CC6FvsWiAzBJ0Ds69Raa0V5wNvTPMmxVKkJoQWc7LRMxMpdO9xkT9FkvrrwGbxVnwQt8kv/8ud0KM+0SHZx2hLv9i2A+aC4fwjbIimhTMnzJMdnVrewrNLJNCN5leLlOp1/rMXEPSbWndSFKcrYBs6rXg9WD3nBeBAGlAdlJrYJiZqqwybT8eN1p/bZ4ha+bPF8fZ3UIih84OEi3btqFzflqWVBIWHummlaghoRsghMrGs5L7h5cGIHpyGDWB+Z8YXghsiwnRrfFuf6yuT5U3qtHSWwFE3b4MdRG6FtL+PAqg8nSzSLdxFa5deHB5EAD8G9cgerMjNaGD5YHtbrOR2p3d5n1tcO8yrFZ9a3zO8p+p6xg/tlJCpAf/j+VLi6rXlCXNaPvis65ipj7H2HXFw3k1PMhMLdtE6FXi5a5/iu10oC5oQ5k7YleP6WhbjnBV7nqbUJY8ETdcUcx+HO2omwuATAl7WUr01Fx+6zTKkU1v707FUrURmHL9JpWxbFBxX7ibZOvVIctMS7h/IMR4HzOPra6b1yB0dqB8syw53seGPXEu8vxhCWy2pZ4dhmHa92i94Al+VjH5JU7kGxqkyNoffNLQG0kNyyyrwaFo6WW8eZtc1YxEcsrIReLET3QcZYa8tlAK+vz6LD864AaC3GyEXDdGwmCisybSFyQLZzAfFrjEleFrofqtdVA0UAEWFZJLl59kfMjTemLRJRobWcpsKDEnnTByIl3m9kS/f24PeMW8B5E5WYwfa8iMhUK/SOBQ/T3MY6NvvJGZ5Ij3C33DXE4Pn8lokGIoTydQB+ywxA7iKdt0Xva9xAtPYQh28GvRTda+k8KHCtLRwXMXD6sr36EFM3d1Vo/s08lh5bepd5kmu3TtIW33KLjZUXYwOLypj7eZARsrDwFPuEGJcPEUyK2qJFMZdVinm12QKFHA+6G8i1Op4Hkv5Dto/v+77vw7PPPovFYoG3vOUt+C//5b+cex1I8LftgcKYidl/5wHKE9H330WD12Pqui1EbqwhlEjqsfQYT2WvWkLZvnfC3107AVuKu+Ucd8u5SRRHmAnVOp6HvC6EXnvoqg8w5/mtEej5UrizK6jls3L+t1tvOlcLYpsw1Xnt2uGgd70qdZTZ3XIXz+e3rJB6c21qA/V1eRu46u1hUxBJeaU4wL1yx7QrymgcCx0Cv2NIKA3CtMTBZegfLwLLOtQ+9N954cKncD/+4z+Ov/pX/yq+7/u+D1/xFV+BH/iBH8DXfM3X4Fd+5Vfw9NNPn0sdttlpXoRr5qp+VGPvixT8tAjhssrwidUTxnTc5fqJgZsVF7BdQj6LCr9Wn0XnMmBb0Shu3pXzho+UuCHlxvLBNEXcssKRV2krUd3n1o9gmaVmMFyIHAvYWW59II3MVbC0bRuH8hQvFQdAqV1z86Q77w2BhLpkLaPcSdTX7MmlRUx9rqQY91LL1XfJ9Sddbp6LwoV/Bf/kn/wTfNM3fRP+/J//8wCA7/me78FP//RP4/u///vx3HPPtY5frVZYrZocGMfHm4WnTdURuH7m88Q2yEnscvXnhYUorJnS0Hvm0RjP57d0B5RM8758HaJLUGLPA+rZNnJLpHkR6Epdvw103WvXYBAaQNpWr2H+dStNPyMqvBzeDnnUyCFg7NZ7colPnT5uFiJciAKZKFqaJxLmXltShoMnoAPskPQY+PqTw5SssM3aUySW5mRjEViLiret0LpFQ3CeBCd0HV8yvvPChbqB1us1Pvaxj+Fd73qXtf1d73oXfvEXf9F7znPPPYfDw0Pz31NPPbVRHbK649gU5xWJs23XDQ/ZvYzoT87VrruO0rln0ul/ZnVr0uyS3J3X5eKbCWUsLvQ330Z1fUIe4WZyiifSIzyVvYrH02PsyzPTVn3/bQOui4q2XTbE3v+Q3D4+sgPw3CnN7NvtsG/XkSPcZQQAnz59FCvl/76s5SCAaItKV5t42EjPY+lx69n2PQP+fjjImsJdfUOfZ59rcagb5aItMRd9/QslK6+88gqUUrhz5461/c6dO3jxxRe953zgAx/A0dGR+e/5558/j6oOwmXs0B90LFlYqBkkamLF//Ntp1nXzWSFu2p30o/OdSHF6JBschLuABcix0GyxG15D0+nr+GJ9OhCO4yLbNebWJiGJBUMkYSu6xeVxIla4ETZIe/031Gxi3lS4Jnd16xVlbkl1rdKcB8eNjISg0N5Zgmf+54laZiAZqKzcKw0gBbyhsjGvTq8OYTQej15leKV4gC/fPbGKNJyES6ZkH7rInApptBCCOt3VVWtbYT5fI75vJ27YlNMvd5PjAl6DHyzgCmtK77B87JYWo4CWUU53GfRymHBspqGsmAORVeoKf+9rmQrvNkV4fJztCC3FoGyv5ciaw1w55UEbJtuoD4M1bfwb450JATfOjK8bN+zdMOWAd2GViq1SAjHikz3KfD0/FXThvmqzl2D0DUhGYb95MzqJygxXNdzNELrwDFuH7IsM9wrd/DJ5W3cyu7hjdlr/rWKOvoVcvX6yqfzQ7gIvYu7svNFEJgLHYUee+wxSClbVpSXX365ZW05T4RWeN0EXSblTeE29qldQ2aQrFLr7wcBNLviCv9DeTr5x9aX/4YTFiCck6WvTdyWJ/Vx9gKKyyrD3XI3mCZ+KlwWq2GfqNFNCkcCSvpWukiXm/htKPj3Z2brvgEpUPY1QdkMd9Jjs/IxYD/n0LOlTLdcE9eIbtvv7kjt4LftfC5Yh5Bw291urQWFJnydhNpA07/vJ2ctEsPLnGry1Qc3Yy2/ro9kTZVb5kJHnNlshre85S342Z/9WXzt136t2f6zP/uz+ON//I9fYM00Qg17KhKzrSiQbZEXPvPgf19W4sI/5JfVPgA0CaS2JIaOGcy7IoVcEEGknC20EvRCKMxFZRZYXFYSB8kSL4t9fKFezO1hh4+wdMH3XcfoXOayQFq/l3mSW3oWwrKkSKDNhZbX6EYmCivPyqrMsELWSr1vDeplc25MFMxhrSEDYCWlc+EjF/zaiyTHvUJPMLKyWW6BtxWKPKNth/LUWn08eE+eumyLyPD72RYufJR5//vfj2/4hm/Al3/5l+Ptb387fvAHfxCf+cxn8Jf+0l8aVM5cFFgk1bl0AL7Ig01xVSwvFw2aDREO5Rm+ZPaiSco2T3ITwswx1bMOzdh56C+5g7hVxAc6jlu0urAQBZ5OX8dC5HixOLzyIa+xGUlDhGOKZIyLJMe8TgrY5YJsDVoTJhi7RhtZPahzC0vX8w7pSny/3X1d6w/FDN60Nhm5lyhz7gvFHKsyxUG2xJ7U640hAVCTMHJvDUnjvw3CElue77hyQNbvC+/N/vSf/tN49dVX8eEPfxif//zn8WVf9mX4t//23+KZZ54ZVd5FhlZNjW1bXrZBWi4DYfFhIQocl4tgFNCQSJFN0JfTJZSzxWdhIasK/SY8nR7htjzBcTnH3XIXd9XuxsRliEg1dH7ovE3KjnEH8cge95mP1eAsRAEkMLk5ADv1PicqK26qZ8TzKvVVlxFkYeGEZahLok93AujQ5XtqJxjCHAsSsz6eHmOZ2a4gAHgpry2mFBbD3ETUr/URh8silh2DSzGyvPe978V73/verV5jmwluphbn+rAN4rIt0nLRML7mJK8zUmY4wAoHyRJ3k13cLXejEkbFwn03vsHXJ7zliEm65hLBhTMroQUU9d/2WjPnueZQqH2OsWTEvKNYwuK6hUiXMPZ7IsISApXfIi6y8N6XlXJ/Ij//wwozKJfAG7PXmhw5U7nEnUGfL1Y5VfkL6HDpV2q37p3s2LgYX1jexKpM8fj8BLeye7oOHk1LV50fNFwKsrJNhMRAvt9T4DyJy5Sk5aoRFg5NIJrOKq9SfKE6MGn23WOHwGeRCZGSEInpg6traUcUyRZx4ccuRI572K7wdluItbrECm4JXGzL2z7PPBsmXW233DzJcQgtvHTJCc+AupBFb84On8vhmrSMA19/CXjwFtbU0YunxsKyEAUWsgAWwKvrPRznCxznC8yTArdmJ7idHRu3Uh8uexZdF1eerLg4z5ezbeIyJWm5aoSF38/HTp/Fm+cvmXf/mfUt3M42y3wMNM9/jAsp9ngextxct2iRE1pAkVtXAE10aB2cMW0wZp2k80Kf1aVrhWH33pd1VlqyfLTEr06m5CaKyLVuFThSO7hdl3NYH8O/pYUorPY4yu20RcvwVYbvmZGlgoc0cxJzGQdwN5rtEMDhTmPNOapzvQxpW24kz2XHQ0dWOB70j3/KgWITonJZdSocv3z2Rjw91wvNHcqz1sCxyXIJQ4lH3zbfasxd4MSFRwfx9zIlUbks6xeFnhENPKF7XogcK+ESn8IQGHLtWMkHmRvNJR8A8JnVLat9QZ7hM6tb9dpT+nyK5HiQZvYPKkJ9+z6L4iFQzhvKxUPt5zIM4EveTyW2a5EvxsmtL0MsJpfhHmNx+UeZLSIm1Gwq9K1tMgZTRhCNtaxsk6jwmcSmWLDZ87JK8WquQ5nfOHsVN5PTSa1UIVLi7nO3hdLax2a8fRgR0vr4vik+APkSvOntqRUWyrOhArA0KLTvUJ5Z7fRI7eBQntWRQo2r6c7s+JqoXDIYbUhZtMT399SO9331DfAuWdjE1cyTx1FoM8Fdmfux9BifzR/FG7PXost/kPBQkxUXF2Vp8ZkiLyLs9KJcQV3XHFqfLtFwI27UeTE+t34Eh/IUNxM7IRQwbYKlkJiWsKnQl1w/ZFXRv3UEESWLG4qprSchjUgs+khrrCWKk8GjagfzjggOTlTo+jz3xVGxizvZsRbpqhQv5wdYlhmKSurcGPV/RSXxRbPXAThWn/LBmtleFZD7oyscGcDm0T0Tp6MIhVPzceuN2Wv4jdUdvHn+UvC8GFxG99BDR1bcRnqZXEFTLah4Xmb6MVaVbZOhvvKP1A5uZzqB06dOD3E028VT2avWDH1ZxZlR+7LWxsINV3bdQL6stySqJa1KV9mxbXyKdjMl8ew7t5UGPSKiygVZQexybGuKry7LKjWr8gLAUbGLopJIhTIJ4g7TUzPLfaU4MO+UJiI8a6kPl6lvuipwc5IA0w7IY0Wrsf12zNj15vlLxrW1ELmxFMbWa9vPaCweOrLyoOC81np5GLEqMxzKMzw+P8Enzx7Hly2axTAXIsfN5NRrjXA7lCGuI+rE3HNCIdRDBlw3Guio1MsLHJeLSXKsxOK8rXJuFA+hS8tyt/brv1QcGIvJkdrxluWzBJGQ8TA9NZlR51Vad+aNYJEigG7WCcM+mz9q9DBca9A1uF2LajXc9julK23qQfhmR4I4oPu7HpqewrUQuWuFuc8plkj15f8JpdffNqG50FWXzxsP04d/WdZwIaxKv0ZgLDZ5l7TQ2Z5c4rhY4P939myLnHAdC2C7D1yC4WpNOnNodJCU0LnNOU7n05Gqf13JQVlsY60qIffaRUeStdLbB0LGaaXbo2JXL0IYUW9yjx4VuzhRCxSVNOfTtbmW5TA9bVlrXF0Lr8/D1C+NwYOg85kybxOV14WYNnMemszztLg89FP3yzp7uQirypABZ4gLaOhAFu228BwX+/FQPozH5yf4n8dvxOKwAGaw9CsxlpOxbjd+XsgS4FuhmYct3y3ndVk85FZHAb2o+onKkHrzdzhWiN0mW2OF5eEVjglusjfualtWKQpa6iCwxkoIaUd6cEpEaFb1nnIh1Ick30pMvxcST4/BJm6bTeDLVO2WG8zXRFFqPW1hSD/atRhiaIwMWVzc86fClSMrMZqUy/rBP2gJi/owiPxM9E7cckIfDR23J5eYJXv45NnjWFYp7qTHWFaZTnntGeyAcEflIzduaLRbjo8w8HWD+PWXVQYkS//9mHWGUrys9ntXXx5LVIbAHaynGrxD5XAS43PrhCKAukDRY2Q5IeEsYJvhafud7Ni8Vx/BG5ox19d+r1I/MdWkbJNnMpSwTEFUABiXJM8PtITtLubX5O3mXrkTXDokqg71PYeIz6b9cR9xGUMSrxRZ4Q9+SJz5ZSIvU3282xTZPgh5VQhdArF5PQue7+X4n8dvxKpM8Sk8rhcPS5f44p0vBFdT5QiRFN878M2WXBMyERV3ocOuTvK4trLQWkBTYCqScl7wJWwD2mHHS5ViTy5b/QR30/hWGCfS4h5PbexOdmwWtWtytHDCy8KimQM+dpC9TP3UFLhMmrzzzubKJxNceJ0J/1IMHBuv8xVpmZkKoessy8xa36wPl6e1bIhVlVoJxR+0D/uiP9yYgWkbg9DQ91R06DSAbjN96Jp30mP81r2X8KnTxwAAp8UM61LixeUB0kThdx58FnfSY0CGwxF9uhU+UN1MTnvJi1tmqCz7fDvraoygNobIXracO2PA6+OSD0p774If19pPURWyWbCTCC+RFP5sD+UZPru+hUN5qssqA+UOwL1yBy/nB3h69uoDZ13ZdmqGrjJjnlVf5MtUC3i6Vk/aT3mA+lzLmSiwrDLzPB+0tPljcbl6lwvAZSE1U0X/bNOiso1Vlae2bLlkhsiL7xqrMgNqfcHTs1exLDPcL+a4u9adSZoozBKF/3n8RvzOg8/qzkTmLUuHz898kKxwgJWl1ZhVui7H5cJ7vuv+CWlZ+DtYVmnLZbQpLhtRIQGrm/qe9hFCa/34ygJgJX/jiCEToW/BvFOp/z1Su9bKuaHjY2Hyt1TZA0FW3D7toiZlU+pcYkApA/IqxVJkJkqI3tsUEU7UDxFhuep46MnKRWPqj3dstsTYhF3cJB5TZszAN0Q86FpOQpaWWAvLZ1a3cKdeJ2iR5FjMctxIV3htfQOvr3cxqwnLq/m+0bP4IoI49OKB+voLK/ssbSsskjFj29eVNPoU+m8hchx4tCr8/CGEpU84PCR52yYkZQgpihH4hiKVAODOrF4LKoHx9bsuuaGh6b70+S75fiw9Rl6lXsJCIcwxGVGBpq84TE9HzfI3AddVXAThiArlHfhMQjqXkIXF544NbVuWGe6VjQXFdc12PcchkXkrZA/NEg7XZOWCcZ1PpcGUVhYiMTGk5aX8wHRMxqyfnuIg28fnzg4BAF9Y7eFX8EX44sXLeDw9xs3ktBWtY+5DFJiLCgshsKzshQW7fLQ846wRwAWsK+tKDiYpU2NoRNh5Zkj2Cfwo9wT9poRstCpvXqXdOW+YzkTn6jnFvjzDQuS4q3abcp38F+Y69bsMDSzuIMlJCg1gviR2seD9zNh+57ISlbHoWvwyBi7Bpb/zejHLULg6tUWeIBCAtTZViHy57+Cl4uBCU+yfl+D7epS8ghiyCN62MXSAGkpYUqE6dSx8XwxxAZow2y+avY5UKHx+eYh5UuA4X+BX1RvwSXkbb937BJ5O7Q7CtxryQtDCghXb1qTC1/sy653dTE6xFJm1366fPpYIC8dlJb480dq2CQtvP/r9NySCYGUMrtucu0q6+5tbWuZJQzruql3cK3fw2fxRQyT2kzOLsNCxsQvkGRcCe/eUkO52tvkaQ5epnUxFRvpSDHTBN+AO0YK4185qq5uJ4oOfILoaHlqGAUAwMohA39KqtuJsEh3kItZt5pKtoe1ySDu8PC32AcDUDHJbHcZlISpjMISo9IltQ8f35coAYASYX7p4EQDw2voG5klhzv/E6g7uqR08nh7jtrxn3GO0Tg/QEJSFEIa0ALStwFrkVjK6rvfmzt6WVWZywtC1h6wBdF5LMpwXYtqNm0XYet6JfZz7nfNBxTL516SC8HJ+YAYRvoIvL6Or7lbaBRb+vKxSHBW77Xr34DKREo6pl3aIWTvKhW/xyzH9uzdfSi2o9lpPPAgexywtQLid60Vaw1a7GPS1lSlz4DREPJ5gXc6WPAJ5lSI9hw/zsn78hE06gW3MeC8i10offKTFnUUdqR3crt0wd7Jj3C/mmEv90d0v5jhMtZ/4rtqts93aVpWGnNgg19BCKCyFsgbOUH4V2m+Xk+MgWdUWmtRoXWKwrWig8yxvCE7UAi8VB8bqsZD2bNU70IRQtmeTvudJYcr7adMZx1pUzPFCL7a4KjOcqAVWKsWt2UlvGYTz6qvOg/j2tZ8xK7T7LNAxhMWXM8kH12LnWh9cd59vQVu+n79PnuBwGXHvFzluha69EPkAqnKFyMrDiKk6icEZZiMb/jYHqKFWla4y3Igh6kRezg9wu15Vdy4Ly+X0ar6PRb0su0+7kgk9HVoIypXCFyHkpKaJBOL/GoFtmZn1ZFy0kq71PO+pB5Uh0WGhCJ4pEENyX833ATTi2lA6fg5v/pwkx4KtAUQdMY9U4onk6JwYtI4r9TNeIcNKpZjLAofyrHMw3dagdN6WuDFtpOuc0GDeRzg6127qWdNrWdmJ11whNv/bXdfHB9ct6S5z4YsQu+yT6yG4OnfykOFBMONvU6/Sp1UZAlotl2CJI+vOh5KI8esfqR08lh5jXUnjiiGLCpEUF3mlHdJLJxLIhcmvAlu3wi0tx+XCRAnFRksMiQ6LiuTaQjh7H8ZY4FYqxUq2dSBUnjsghdKeGyQwIcT0nIzlRuSYV1mUhqDvXuZJDqgdrMoUN9JV5wx6ioidi+hXNiWvvqUcQtfwPT/XeumzbMQmhnS3EWGhcoagL7x5nuSY15aVVZnhqKMuQxCy5nYRu5jElZviypOVKcWmFylcvUyi2Vhc9OJ2IaxUndBLNh9/yMpCmCc5TtRC75eaWM2THItEu2MOk8IQlUwkmAt9/qpq3hkRFSDcwbaysBKBS2zLDxfZUtbaKdcCGoKhhGWMyHYKF+GJWuBI7baSb3URlS6QfoVyqnDx7D7OvJaZvsRj/Dh6n/Mkx0G2NKn8Q3WhOgzBeZKTqfoDX1uLISy+OoRW6+5zB7VchxFtpiu9feia3DXk278QBZA2Kz0fqR0cQUeMxbbjUBvwuzentd4uH8YMtssqg9gwbj32OmMQK57cxrVdcP/uthYvvIwgkuL7TcSlRVpMXhV7P/mJ9T5lEZUQSHC7qgRuJivMRWVCmcnSYmdarQc7NO8or1IskONmcmo0K7T9qmIsUeGWt7kssFIpXqoFsEDzXPnMl4ggoeu7DUX2eF12zj1ELQzH6pEK1bIKXBbrydajuyLvs+84H5lxtS7BRIwD3UGx6Eqa1xV9Y66bACibe6d38XIdNdZXtzFtYYw+aApc3R7ukuGyuW0uI0m5yCyMpAlwsRA59mudwEv5gQljPsi0++Xp9PU6AsieIXCLSlOWwBGlXBdVaz9gP2s+86dn81T2an1cZjLiTj3bIQyxgFyEO2gMikqaRICPpTpRHFkx3NTlXSZvVwgbmmm7x8aCa2FoNWc+6+87L4Spo3CmALeKbLMNhZZf6CIsnMjeUzstcexQ0LfsE2fztPv8eACWsNsFLeVgZbamdiM3nyR7r4du19o2EJ4Gbojf/M3fxDd90zfh2Wefxc7ODt785jfj7/29v4f1em0dJ4Ro/ffP/tk/21a1HlpQp3dZXTNjEJs3JRbc0kKzct5x/Padz+IgXZpjF6LAYeI3FZMbyAVZUFaVaCWI453N2lhawh3BcbnAZ4pHe1dYvkaD43xhXHj3yh2TZXRfnmlC4AxaXdjmeiykhaHcKlMMCJtGCm6DpCzNoJ2eK9ntu5YvQmw/kIwvJiw6rk7h57uo8/rQf33HA7VLSO22NVos94sPfKzw/XdR2Frr+LVf+zWUZYkf+IEfwG/5Lb8F//t//2988zd/M+7fv4/v+q7vso79yEc+gq/+6q82vw8PDwdfb11mSEpm1u9wc/giE/rcIhdh9hqD2Pu5CogV2Lrun5hj57JoDV4Eyrfy5TufBOAPU+aWlWWljF5FJ4STVpjzspJm1WQeDcT/pXq8WBya326OD378VcEULiCg3Qb4LJbWbTHX9EQKtULHWbsgvRB3Cw15D34tgjbxk4ByKLZlQZmiP9mUlPT10bEzfqrHQhReLQtP0OZmJ3Zz47juI5+ImxAKee9yP/naQEgw755zCPSaJcYufTH0XBfr8hJoVr76q7/aIiBf/MVfjF//9V/H93//97fIys2bN/HEE09Mev2uBzjmI3zQBv0pFPYXde0YTBUJFMJKpUiF3cG8VBxgLnUm20dn9zETCqtKYMG+t4aU2CHLmUiQVyXmosJcFJZWZck6L05S3BWXeVZTn0n4okLZ9bUv1gXEI7r62gZF8SyEn4z6w8T9z+Se2jH7uma8sQm3Msc9sRAFFnX4fBemJKnbFMKOQWx9YgZRfzRQSOzevNcuQaxP62TKcM7rys3jw1DrnSvgJWJ1J9CGNnnXsaLmqXCuPczR0REeffTR1vZv+ZZvwZ//838ezz77LL7pm74Jf+Ev/AUkiZ8KrlYrrFYr8/v4+Hhr9X1YcdEDz5SItaqsSoq8aKfczqsUn1nfwqvrPQAwkRnkqtFJ3tozBJ5PhUcCEe6Wc7MCc1fej9CS8qHjx+K8SMq2yGwsgeXHLT1+/ZAGZVnZuoXczMz1vs/mj+JOaqfCHyqC5cdvk6CMFdmH63K+xGSK8m2dSurVsfjylgTX9AmAv9O+nD4xcNvUqsys50/Endr5SqV4CQdmwVZguKcg9H5b0YtMezQ1kTm3Uek3fuM38L3f+7347u/+bmv73//7fx9/6A/9Iezs7ODf//t/j7/+1/86XnnlFfydv/N3vOU899xz+NCHPtTavqwkcI6D7FBBmO94d9t5stQQtmWibV0nIneBD9uwqKyY+3BVpoawHLKQwBO1wHGhicWzs1dw6LgOXBBRscOVK0unwlPu31W7LXcEuRe24dq5KEvhZchovFI6oRY3jfsyBPueeyYKazVdoBksjopdk2tlSKqB2GM3bQdTmu+nICeTkCTHHTPm+iY6zNM3+8jmEALqWlL6XFS+9YliQAJ37f5pR8IVlcRL+QEO01McyrPOZ7/Ju7VEvhHluAu9dkFU1YCjAXzwgx/0kgWOX/qlX8KXf/mXm98vvPAC3vGOd+Ad73gHfuiHfqjz3O/+7u/Ghz/8YRwdHXn3+ywrTz31FP7v//Z/YbH3YLlqpkQX8dm2pWTUjHwiLUIIMRYVTlJ8+F37n8F+coa8SvHR4zdjVaY4SJe4ka7w9PxVfMXOJ4zAlltWllWF/aSpZ5MIrgldBoCXa/HbssrwheLACPjI/fN8fmvwAOULK9wGMRnbprZBVoa2iVWZ4tndL2iBtDzF46k/xNO1dnE3HBGUVW1mpzrcyu5ZqyL7cq1sG5fpffuwrf4iJvlaiND4yMNCFE0W4g5rZmxaCr7OU9c1QvldQvo0t1xqk4D+NrgO71Z2zzspvihr+vIkx4ff+nM4OjrCwcFB57GDa/gt3/It+Pqv//rOY970pjeZv1944QW8853vxNvf/nb84A/+YG/5b3vb23B8fIyXXnoJd+7cae2fz+eYz+dDqz0KXQmELhtaqdcvIUHZFFNZVfpICqDdQXyguVfM8czua/jSxYvIRIE3Za84Ilk/53fdP25eFcK+PDMuhq5wwxD4+9jWYLWJ5W9bSeCGtAkemn5U7ALpKUjK35WBkxMVU1aSYxkgxJwwbpOkTK0t2Vb4+dh6Dp3QxC5mSeDExSX5xvJGn2+HQLUvssY+tv183Wv35Vpxr+mmGJgHSBtlcV44KRrOe/mUsePo4Jb52GOP4bHHHos69nOf+xze+c534i1veQs+8pGPBHUoHP/jf/wPLBYL3Lx5c2jVolXiY3EeA3RoVnwRRGkrg96IMocMSEMif7pwI13pcFaR45XiAPvpCl+6eBG/ff453ExWuFsOI8xEZpr8Kgo+tRV3+7id4DbbX0yui213atsgKCHcL+ZWORQV5NMOkWbIl8aerCok7tW5LYaRum2916HEY+jxF7HGUwjukhlDrrcIjA9EWHzRQtHXGPLNeNYAi9G7EFyySS4hSHJ96sgybvnrqt+22iUvd92h9XGxten3Cy+8gK/8yq/E008/je/6ru/CF77wBbOPIn9+8id/Ei+++CLe/va3Y2dnB//xP/5HfNu3fRv+wl/4C4OtJ+syQ+iMBymSJ1TXqe/hPEKbN9UcDB2UpnD90P7D9NSkuf/k2eN428Fv4EtmL+p9CXCYrIywlltVyHKy8Ahqu7AQOZaimd27Jt4+TDEjPi+9Uuu6EedNZlVjbWSlUhyJXdxJwyJ9vmquVU7PxCjm2ClxXmb8yaw5W3QDh44JkZmuJRC4tqnPshhKOjcEQ5dVGRZy3OS0CVnRNvmGt5l3CNgiWfmZn/kZfOITn8AnPvEJvPGNb7T2kUwmyzJ83/d9H97//vejLEt88Rd/MT784Q/jfe9737aqFY1Yv6fPrDj2Q9z2y+a4zCQF2A5R6S3DITJ5leKVtQ5Xfip7FbflqXHfZCKxon2GYllJk1OFFiNcMLHtvXIn+I4uQ7TW1OLILmyDqAD1+y6Al4oDvDF7rdMV5C2vzHC/mOOV9Q08Nruvc/PUg9RlirDhuKiJ26ZWkymO8+0PLWDKtwG1288hI+77WQWO7Xzm3NkQ4XLqKm/puClt13C/TGBIcMSQ7T6MGeu21uu95z3vwXve857OY9xcLJtgWWaozulDnOJlTX2+u77JtjFlpMYmg9HQ0OTY/XxmpEVpPMyxMkTlqKSZyjDiQsfz66zra/iISt/Addmth9t2//nQ1zbmSYFVqUWyrxQHOJSn2ooSGCy4uJYyy1KEGK2KPBRTE5LzbgdTTUw2WUXdtzDpkGsD7QVMQxNSc03EjwHehG4+yza1u2FG2V6sVIp7RbenIkiAtuWiLMlyHH/OxU/RrtGL0Mfszgp8f18EtpWwbVAm2gA56SIt61LXe58NOkdqB7dZfoJlndVyYTLN8qRuvOwVO0fa56NJp6+3ZVZU0JFqh8bGYtN37yO6F2EpI0ylQWqVy9rBq+s9nMgFjuQO5klurVjLdQL0Xija4n4xx7qUmCUKB9lSuw4Ds23CmBWn+3AR3/vU3ziPWhmDGNF85/mw1wZbskH0fjFHIfvvl+p/PyhIYGXWx9jXrAdw9g3OkxxHxa5ZIgLo7g94W6BnuipTFKVEmtgTqtaEaEvWr6mWRbkyZKWoJCR7YDHZLN2HGPMBjnnwoeyafFuoXDreV7epOgx+7W1nho3FmI5rqPWEg4gKP25ZZThRC0NWlpXEzWRl/gaAl9VeIIJEmtT6+rdNUlwh5121i1eKAyv8MIRtDk7n7ZrhmGKw4kn9+DbKn8Pz6PDj5rIJIT1SOzjCTsvUz0NCX1vfwKpMMUsUHp+fYE8urePC9ex+vpfFYjk1piKdXd9wbJRf8HxmofGt2D2mXry90b+8LVqQui/m1p1QlJ9rQeehyvx+yPKXJgqPze63ynTPD2GTttR1bjFA23dlyIqLTYRYU58TOo9v6yrX96HQBzTG9BlTt20iZLYd26FtOqvyYVmlOBSn+KLZ6zhSO5gJ1XL1uAsPzoRiCxAWLaLCYaXRr7QbwrWomOuco1CZLHixqevHYEprifvufW2Btrn/+kAEhaJ6+G+eT4XKeHJxFIwmcXHegvNNwd8T/1a3Ze0CtvMtx5RvkVxPvxrTznzlzZOiRZBc8sKvC/aKybpTVBInqj2hdduTS1SojutSYj9dBd2U2yQqU+LKkpWrglDHYBJcjew46GN0P0xfBzWUFEXrSDbs9IZ0bEM7wUN5hkwUeCw9xm+sdL4fTj7WlWwp99c9HzWd41pVfK4f4HyjZNzyLovbZpuDF5/p6n8zrKBzXlCIJycqgCZz94u5SQ7ow0Xoc1xM/h62QFCmeLe8jC6rSUw5nVYXdh1uhXUxS5RFUGLv0SXS86SwQo7nSWG5qkJeAZ74cFWmlpvSh6622tcmY9rEFJNpwpUhK2uVAltk/C74QL5t+Br8Jh8m0A7hHHvMtrBJRxZ7bqjTOUiX2E/OrLwbLhHxhRjyaBLX6sL38+O/UBzg5fxgstmNd1Z4zu8uBlOTkL7BpuscOu9ELaxn9bnqEWuGOk8K7S6ShRksQivi+nAR+YI2AX+m27Z4uNc9r/OCbkNn0D8tZtZ5aaIwS9que9/zGn0/ddvzuTh9bcltM1Tn/XTVEhD7ENM+h7bLvuPXKj6B/sV/EQ8ozqszGSMUHYPYTmnsgMB/bwNDyvWRlKLeliYKN9KVWadnWWa4nR3jbrmLm8mpCTd2EUrkBgB3y12vruWe2gkSlaFuwSH7p8B5DV5TWM/62qx3VovmOYZmpQAs19AQPCgWqKmtH1MgNMnwkYc++CwzXfUNkRRenlePEoBXP+VprzHlhQhSX/vf1ILSVbdNJ9UcV56s+GYHQxqTD+cx+HJMTSCGXmPTGdaUz2hsWV2m24LtmyUKX7p40WhKfmN1B0/PXg1aWDh4BAknJzeTU9wtd/GF4sDsp/DXE7XorfsUUVAXjYuapfPrdQ0CoUEiZMbexiyU12dbiLFAXWQb6vpOt3EuJx5dA/xuuo4ucwqC7W6ne4t9f+tSYl1KU283RJtvs86duK+Zyk0HXCGysiolqsgXv3GY2yUziY6tDydtfQNJV4c/FDFlbfqMYzuuwjnu8fmJWVCQ1oQB9Ax6mQzLLskFt4BOPnZU7E66GCNwvqLE8ySta4dE9h3jwj2nr+Mc0sa3kYTwvMo4j/6LtBL095Rwv1mCG5obg1AbG2qpjblX3zG+dh26Nl2nr27rUuK0mGFdStxM+iNep8j83XduyECwLq+jgSbDlIM0leeGUMYShqkxxq96GawkhLGdYFdn90Wz1wE0epJFkuvFBSMS7bniWU5UFiLHnfQYr+b7555PYhvvjJ59l0l8ykFqTFnuOb7BKERaXFfQUJyn7qIP/F1NTRxirtuH0Pc4FJRLhP/rggiNL+/Itq06vmP6zuNttihl657ceyhqi8pJPsNe1liDhlpMNvU+cIRSBgzFQ0VWphBt+nyMhNgX7PrIu+p3np3LEIzxD3Pw2Uho/5SI6RAfm903kSD3Kp1F9unZq1gkeXA1XjcEme/zHe8KNkOYwpTchalmvpucO9Ug1QXemfsIVuxk5DKRdI4hz/+8+hKu/+K/p4JrmeBtma4Vuibf3levPksNPz91SEVXeb7zfOQJsO819Lf7Hc8ShUfnZ0jryKQuDNVEDm27/Pr3irmJlhqDK0lWtmmZ6NN2PCyYouPbZuc5toNclTYBcdG18imd5yMv99QOPrm8PZlbbyxB6fo9BF2z14tAiPyGZtA+F9GUWjTfTHKKdzYltv3uxpQ/9H7p+C7y7RIa9/gQhtQ/5ljfMX3kiZMZt77rUmKPaWnod5e+5rwts/w7KNi7AvR7EYjXAl2Z0XVVpkHNyiaIMXdvWuZltZ74TI6hGcBF1GdqHBcLveZLnVsD0Omu93E2eJG7u2oXgF5P5qXiAK/m+zjOF1vXS52nRWrI+9imhoFfw8WMEapNrCxTd/JTPYOLJItdA/42+7RQnxy65toZJPuOJ0zZ549F6P3O6qgk3qZvzs68x/a13Skm2V3jJHcdv7bawUwqU9ch174yZCUGU5m7XYa+zfpclpkr0G9ivWwY+m52oYW0c5Y7Y1VmuKd2Wgvc+daOoYgfCke+X8yxKlPrg3TrtOnAOPQep3p3U2hI3H1Tayp4eTPHCpR6iBN9110Cx65rxRzHMdSNMBbb1KxsWt4mdQuR05Blhf6mc/vIyGWYRI4hTPzeprL2xz6LmONuzs5G3deVJCvbbmSXoRHHos8ydJmsPFORwLEg4nBU7OJ+Mcet2Yn+rXbNujFIEHQTLcsMn1ndMuvGEPi9uLP8mM5kyLPYZKC7bIJYAFgrZ+CRqrXN3d9XB5e4APp9jJ08uOeOeQdd50z1Xi5bOWPLDL3/mfS/wxjdRyy4SymG8GyKECEj+KyGdF5s3c6rr50lyhL9DsWVISt5JYEJH/qm4XEXOYMdU85lIGCXoQ7u2i9HSq/Eu+Rq+qSxrJC7BwBeKQ5wv5h7l2MnlT7QHixjzdlUzlBcNAkB9ABDRKKLbMSUM3R/aBAz+0dob877e7oM38YY8Pfu27eNa8WQWTomVLfgNRyX0rbeSxcZirlmLPneJlwi5XMF50PWLZu4fpcaUxCIB8UFEguf8OxhBOUv4GtxuJllj6DJy6E8M2TlM+tbeHW9B0BrXtZlO7wQ0M91TOcTKmsMJiUtIwaa0Dn5Bbc5n9tgyDljrJObDNT8eWUddR06EHdhmwQzhLyUrfvraitZ0k1UfHWZmjCFCPmmpMgqy/POXzrbM39nLBrIxbbHLy4IfulsD1ltTSGNzUZlT1HBy4JtvIiQVmVMGa4JcUwdtoHJZ4GRHcCUnekUWJcSr6xvtMLreJbZ29mxJbI9lGfADHhhebO1fgiVyf+lv0OzeV94Y5/WYyoM6bjdQWPIoDIEqkz6D5oYXbPurmcU8/zGPpfQc6DtMolPrjUV+u6F2sQmbWHIuZtcp4v0DUGoDWxCitx26BuTskTh3kr3Uzd266y1G1gLfRPYmPGPrvnaagfLPDN1mQJXhqwUpcRmC7BrxAwMYy0RY0yHGwvYOsywtP+icJHX9j0TV0DMF6zbk0tNTBwsq1QvgOckSyNsIoCNLavP7D3Fc+4bCIYMFNsgIHngHjOpNroeze43JV9D6hC6l5hzMtauVZlYJCZmQJ6CZMbe69TtYBPCti3rHrWdTchQl5XGjElSYZHlyGoLxlTC565tIfLy2moHyyLD/nxpWVR8k7QhhOrKkJV1KVGdkzl5Gw0BCCviNx1sNj2fPuTYDy72w59qNjMGnQN8/R6Ocz1T4QvZrcrMRAWtygyfOn0cr6+1bsXnGgiRxU3U9V115+8qL+XWOuHQQJMraQ2YMRgzOIdQsHqlSblR2XSPYwfVMdcuAtcqlN6eyu1bUKYiEedtFSNy1nVdIjT8mG1apej7G/oddpEc/v3zvuVGhHg1NvoqZmkAn/vz+eOb2JuvcDBfto5z/waGPZcrQ1YIm7pt+gaRrhe2FTa7ZevDeZlZz6O8IXA7gZC1jNxBFNIMwIhtV2WGe8U8SFLMNi4uDVjn+vQOMQSlb9uUg0fXQHyWaxtnOmAQCA3Sm2AbZZ5HHYiYDN3Xhxgi2WWh6jvmohFjRfPt34aFx7VqDUWI5Lj9lo+4RGmmeo7psqy4Y+BJMcOrpzdwspxhb7G26th3nSHfx5UhK3mZtCwr5xlidx7C1Isa3N2P2fcR8o9z6McfKo/vm9o332ct8r3PFRPcnqgFjvOFWTDMOtfTmbs6iBgXzxhBauyzp4FrioHH1+EUKvEOrKksUagkaCHoG4zd8zYZvEPYlhVjk7pu4z6t8nvajW9/iIz2lUXnTUkmY61oRLp8xw61CIawqUUOCPdzXeRlmxPbUEqA+/kMd093AOjvxX2GvN/bVHR8ZcjKWkmUl5Txb4JtEpSxH1Of2G/Kerj7ppwFyaQ0z/ck1+JYMr0+Oj+zc6BI4ERJrFSK42KBu+udTpIy1XsLldPlhukCHyCGDBZEMHwDeWgg9W2nbWMGX379vmt3EY3Yaw+to69+Q7Ht82Pf36aEzSWk/PemJIXXl5fpkie6Dt/e9X1sStynIjuA//v2EZjYyLBN0UWE9hcrM/mhOsZEXa2VRDHgmV8ZsjIWU+R+GAIfM55CvDfWqjEUPlOyu+08zMRkFeirSxd8ptplniGbd58/Twrj9vO1G3qfPmtQjP6nrz2Mece9M+eIQZIfM9RVEVPnPqvZkIF825YIDjNgRhKFsRjy3ulZdlm3Yuo1pTVom1awzmMin9sQt6UPY/q9IQSnT2ezbcu7r8/KEoUsUVikuanDkHqIAdffKll505vehE9/+tPWtr/1t/4W/uE//Ifm92c+8xm8733vw3/4D/8BOzs7+LN/9s/iu77ruzCbtcNAu5A7Alvfg/UJltzBJjYcr68usYg5NkY4NhVJifngfMectx+brje2LtRJ0LPNS2k9w3UpkZY6N8oqSfG4PDH75kmBR2anXrLiEhX3b+pkpoqeGWJJAYaTkk3RVfeq1F2VSKreYzliifm2CXwXIRiCbdSva2AbSyY5ZDLeirSpey2GgPVd22thGvkeNiE5fd9viMyE3hcfJ6YWDvusOK6gf5My+7B1y8qHP/xhfPM3f7P5vbfXJK9RSuGP/JE/gscffxy/8Au/gFdffRXf+I3fiKqq8L3f+72DrqPKBILNlEMPYVP2eR66ER852SQi4TKK43xm2/MED/V0n+2yyDCTCjP2ro/zRWtV3nVgFsHLm/p5d5XXF0nSt81FV4c49BwiJn37iLgMrVdVita5U5AAX7lTlj+0LrHgdZ66nlUpoNBfZqidnLeFheukYqw9fbqq1rUDz3eK/m2otmaKMSMEn4WY/x5zvSHnbJ2s7O/v44knnvDu+5mf+Rn8yq/8Cp5//nm84Q1vAAB893d/N97znvfgO77jO3BwcDD4elOxyilD26Z+iUMHwKHHn1cUxdTXSZOyVabPj+36sMmlVJRaFLrIcmtFU3dRsFWZ4uXlHu7njfXPfV+hZ971LmIFryFBa8w2F9voYGIGVcXqJtmgMGRAHnPdbZXrIzTbqk8Mtn1tX/mbkMXYPjYmLNnFWJ3UGM2Sdf5E/Zvbhw3tz6cWDm+CMakNCFsnK9/5nd+Jv//3/z6eeuopvPvd78bf/Jt/07h4/ut//a/4si/7MkNUAOAP/+E/jNVqhY997GN45zvf2SpvtVphtVqZ38fHxwCAQklU7EFMySq7omHOQyOyLVyG0M4p4R3EO7ZRJ0DPmDon7upzicppMTNJj0LwvbOYZz1UVzKFOR84n0FVdXT8XfsAm8yMOTdEjGLOj4GUpfcZdl136jr46kTlhq7tXpeOC9XH9+yGEs1tWqk2LWOIW89nfRkayRaLvglYH8aQm5gQ9rHX4MeqyyKw/St/5a/g9/ye34NHHnkE//2//3d84AMfwKc+9Sn80A/9EADgxRdfxJ07d6xzHnnkEcxmM7z44oveMp977jl86EMfCl7TfWibsMpQWeeZJMqHbZKM8xQmDkGfT7rLFx0Ct7LwZ5rXWhUUMxTMwlKUEifFDMsis2YIU+tGYt+B2w59LospiMiUg2mlhtcnxuUQPFcl5ppCVp33MqZufWXyepwn+PVir913nG9/1zk+khTbHkOkpuv8GBdiF2I1UEC8tWYImenDFNFUMURnmxFTbt23KrD94Ac/2EkWAOCXfumX8OVf/uX4a3/tr5ltv/N3/k488sgj+FN/6k/hO7/zO3Hr1i1dWdGublVV3u0A8IEPfADvf//7ze/j42M89dRTwbpMaZnwpbQOmbUeBGLyIGHoAB/bifjCG1WZGOGsm1NAlUlQ3OtmT+V/076x7hqqlw+8A+/qzM97wATGEYBQGUJWG5U5RV3GlEn1Hnt+TDkXhUoJCFmZfzliLTTeckeQ7G1aCEVSBaUBXQngvGHWnm0x6CprjL6GT9Soj+rKgbOJBieop6u2qFn5lm/5Fnz913995zFvetObvNvf9ra3AQA+8YlP4NatW3jiiSfw3/7bf7OOef3115HnecviQpjP55jP563tRZWgCgwWYxEafNxBaggx2UqmTp7DgDXeKXI28HKjP4iID2hIeduAW0f3veSlnSZ/7RCUmPfILS9pUmKZN59b17uJsdy5HXMMEekbGEODoXvepqShHNguk/odndf1hiLpacdTkaRtkK0pQPWKbV9DSXOs6y6GBMWiz8VVlHKQiDkUPbXtfEO+c805npxLXf3aRU+cB5OVxx57DI899tioi/2P//E/AABPPvkkAODtb387vuM7vgOf//znzbaf+ZmfwXw+x1ve8pZR1yCEGOQU5Z0XNskrMZaodJUztUBtSpdT7Ewj5Gfm24iQkICWNCyq44P2zXp8xGaozmSspWTMwBZ7zpCypyAK2yYbm4LXj4hLqZJeEuM7vwux5V00fPeTyDLYbvosRmPcWK5WaQx85/dpdDbV5Pj0kEND8KdIiBhT1lBsmmhwa5qV//pf/ys++tGP4p3vfCcODw/xS7/0S/hrf+2v4Y/9sT+Gp59+GgDwrne9C7/tt/02fMM3fAP+8T/+x3jttdfwN/7G38A3f/M3j4oECmEs0XBf7JiHfBk1IJexTptiCFnzWXX6LB1DwjBDqeaHPPdtWE6A/oHRHRBDA8+YstsnCIB37iFitqEWIVjupvDUiz+DqUnW1OVNRX5i6kXH+K45hMR0tXF+/LZcnn2i5dDkIlZPE7N2UV9E1JRC31Bod+i4mH6Vb7sUSeHm8zl+/Md/HB/60IewWq3wzDPP4Ju/+Zvxrd/6reYYKSV+6qd+Cu9973vxFV/xFVZSuKEoVIKhKpFth7KNQegamy6MdQ0bXUTUiOc8ehb6HRs+7MNQS8pUFpQhg8qmx3QXIPx/xxwPaJJwgWHBBi7ZesAwxKIzFVHyWaFCGGod9LkrQ5oa/tunt+lDn9WlVbcBwmISynPBvHu+b3uXroZjyDjWRUw2LXvo8aKqqgf3a4MW2B4eHuK3/di3Qu62tSybIIZVngeBGWJC3JTUjDVXdoVz9y18eJkQer+LWeE73BsV4Mv1M8bVM6WbJzjY8GtvY+C9DKTiPDDVs+t7Xg8KOYokcpfRtTWlkHlKHc0Q9Flyxva9U+sM1ekKv/L1/whHR0e93pSHam2gKYjFthJtjQVPWd632qdvBeOpFh8csxSAj9R0LeDlzhq6SEHXSs6h/UD4/cZsH/JMhmpRpnDxNAcGyhpKLNwO8aoSk5iBl47xWYFiyh9SlxAuC5GhOkY8tyEWmz4Lz1TEh39rmxKXLgtMn0sppqwQJJoyXSsNMG6M8rmYYtfF8mEo8bkyZKVQCaoJyMhUKbp7r+PU1ddgqZFNcU3XRAiE05bHYNOcBj4MWc25L61033vcdGG9QiW9ocQhE66pQ0977SMoozQiU2LT8mLPHzLgb8tFxMvvO6Zv27awjWuF7jf2WjHPLRJ97X2IHitWAD3UghlDbtzvfuo8Rjz3D09Y2CcI7sth45sIbrrQ5ZB7vzJkJQZjiUjIX+jbZ11vwIvwkZchZfex86lzEIwpz0e+tkF6psKQdTZ8zyNWf8LziExOUIDhuhDfOxmrz/CVO7TtuFaLLlHutsnBhIPvA4Gpnucm+h5+7gbluN/OkIitWIyNeJryujFJEGm8GDoZ9k16XYv4ttz7V4asqFIA52QVGSKCHJsbYWjWzqGZJC8CsQP6UMQSHt+CeV0L1cWW5WJsW6D9o8WMY57lEGuAb3tMNE/s/thzL4O7aUpxbaxO5bIKen31Cm3zIea4mPc/4tmch1sJiB8HukTBQ8tykylyjM11E7Ng6BCjgBrwLV8ZshIaPDYdDIe+1K6GNOTDmDKz5UVkLj1PgrRptstttJHQ+5skqmJsfbfpHnkY0aVPcd1SQ0hd13Vcy05MpFTo2lOGhE9FRDfBxFav2P56SguNm2BvzGTXjbgKJXUcAp9bycW2reRXhqwA4wadTQbyscRkk+OH5CkgXESa7jHMvc86NDRcsKtOQ8nUWH3JhehKHmYCcZHosgicl15oU0vZlNh2pFnMdTlc0rZBnXwupT5si+B01cEXKr7JRLhLFLztJRKuDFkpVQJMLFQafP1zQOg6XQ3/ItN0xxKlMYuoxZw39np98D3TCxO8blDOlALC2HVyxuS1uMYDjgldN5PVYUzk1iaXH0FwAH8UlEt8hl4/liiFIqPGTByHRj+5uDJkJQbnurbIpmGgQy93Tr7XobgKA9OFiV6HYKR4dcp3E1vWg94erhRct1GM9aGLeHAx9GXU13ShS5N1gcLqPovJVGX6wMcOXz84JmswP27IfVwZslIpMYkF4dLPjkdEagxJmb4NhKJd3AyS56Wk7yJQsW3oXEWwMWVcu36uMQZTiqKHEpQHgdBsGm12kfc3QRRVl0UH6CcwU+LKkJU+XHhOiq7rDP3AJyhv6BoxU8Bt2L5wu5jztlGXLmy97VwyouFLTU64topccWzaFoeKbrt0LT4rTUhgfFlJzxTfdpdo2hVyd10/ZDmKqWN9vRgL/pC+dcixV4aslOUIzcpliY7oOif2I4xplEOqNDDBEt/eZckZujDeNkhTqNzBGWDHRnhcAnLihjWGfvvOu+qE5WG4x0uJIQJl3+8uK4IvIuuyEhwXMZbUbUcIRuh7+vSUXnfWgDDnK0NWgphypnARC6hNNXMYwJ6jigs0zKHbN9m3CQaVO0U0xQUSlK7QxS5rV0x55+m+Oy88qPV+6NE3YesKIR+bE8ZX7oOKEMkDNlpaoxyYMyyEq0NWSrHZgDDUZHne6Lv2FImjYq8RU85l/HjP0bXXlZBpKoRWmR163hR18F27a6XbywJ39V3gmqxcNmzSXlvvctOJx3mHgXfl06HtYzND+zD0/jZJBti3z8HVISsxuATm93PBtshCV9RA17H8+KneQczCcueF+lp8sNvGgBebAfey4jKQgLGE5LyIlo/8udftIoghwnrRz37Ktim69G2BZ2XOjSCmm+St8rWTLhdryO1qyoghD5dtEs21Rj6MHJuuDlnZ1LISQF8nFWMSDzVe33GjO8XzVq1ftIh0UyvaFG61+oOslIBYC2A2jqRcdpIRi5gU4Rc9aBK6xN0xeh2XMPQNekPz08SIz/vO69reN6DStpj3dWHtVwGQ9qYuIgMA7t14++u++1HCIkWC/RZKAO6zdq5N5VeckLrXdK5hytowz9GU6GwbsdaUa8tKGF1MO0Q2hq7FsOn5F2Iuv+yq+ikQ+khGRmNVeWJICnbKVif1sIC31dDgfVlIytBZ8SZljsk7MzhJnxKAsge7SlZmEA8NeH2EbJN078F6DoSv7gA0SUFz7+6xQgmICvp7lBUq0TwTThRadVNAUtT7hXN9DymqZNU8fzR18ULa17Gu69TDIj7OdYe0zphnbu6hY39w38i2MbYvuHJkJVb85zv+Ms1wL8y/3xXpct7ZJs/rej4RdZ+ba51AKKCa9Q/UXbhMbW4sfBaHy6hP8WGTem4j/8+g6yuBZCWQnCWQa5iRrEqBYq9EmVYQaM/Qu65D9xTTd7oTvRAqZ2B378H8XWmSQH83NgkGY4lo1caUR89CHyNQEXFLATWrUEmgSiuLFIgKzXMEoGZAOasasuPSBOnUfS1YvZt60vn8vkUhkKwFEmXfR8WISSkbomC2ywqVcEhpXReOLoLhoo/Q+Mjdphjb510ZsuJLCvegDwQh0ywhprOYzK3kbotdSK2LcMTkjInBlAu1hUIla5DLJykEyrn/vWziD39QsYlFEogf+LquP5Ykhv4OJTHcFoJu5JaLQFsAxFpArgWEAkTBTwDSk8QMeJVEy8JA5RhIe0Aaotvoey5CVsA6MRYgsnoIZmFIzN9gRANme6tMZzsf6EVNAoSTnaBKgEpq8lLW//r4kLnuWrQOcAdtqrupT9XUQddJAKKpH78/Oq9NvEzp1n1VUtTvVBerZv7322tRkd27vTXpsb60LHuReCjzrFxlbOqz7tLRTEpmfNtjrBSb4rwV+gQy/9bwzWD78pZcQyP0rC5CqBx7HHeZbFJP637Xwgzo9DegBzYiH2Q9EIXenqybgVkPyHzwFvXAqQc6c01pdlvbSuY+sgbBWTV4ECJSz0mV3tkM1oA9WBsCU9b/eQbyEFGxCEHHcdY5SfOvW44fruvGrjP/11zLE7lrjmV15cTM964qWdczAcoZamJUWz5S0SKn5ny3GauRlpLAcwm5s7ow5vrXZOWKYOhgGGsJmASXKArLp8wPKvI7/du6A65mFcq0apmFq2uSMhghor1puxzzDvrIfIxo3m0PIUub+P+3d7UxUlX3+7l32J1dQBZkW5YRCvSDtSkWIrUCiaWVykt4aVNjxC/FRGtourXY/WIjBsQabWuxSS0lbQlpP7TYpNrQVBvFgtZgK8VNutImxYC8KIvBv7IUdXdn7vl/uHPuPefc3zn33JnZmdnhPMlmZ+4597z87p1znvt7uyMe/GEf/khswuDmiLAdmXzwY1x74I8m67GcebMG5E1UJDml9nDz4yYToGzmyCGhgdHNJzfsY8KHHrxiOBd1U1c3a3GugLxxx+cZ+vUtCSix4QYESTETFnls4hx0miDduUA4L1UOvPuQoIj3UXh9csNArq18TLh2QY6vWYI8PHk+lMZNRJqvkDTAClGJD9NlQ1ZEj22HyxM2qn8V4iINIFTBixvLBx5KeSDoDCLS4lBbNML/Rb0/KHJiGpNX3rQjP4wcAL6BtMdpyTlRUTd2fzT5hJ7sQ6+hAOLNXd3I+cZVape/R8c8xOaG6ByBsI94QE6NgJFNU/6wrPURtSSqFkHSrAQsccyOADBJG0HWyNEy9UeThCAN1Jh15TrIMmAywSgjKJdFYwvi8bKRJNGiNEZUOSe9kTmsfEj11QnPaY59s2XICmdqKmMT7Wk2djXbOmkX0JGj6tAMDppiBIHkYJdjEJ9wcyPhk4znNc8PezwhNdzUxtmz5GmjsaSjhINn4ppRm5WqdQPkUFQlwkP0y/AFnwwACHI5qU9OVKjN3bRpUxoH2myiOGoK964I5od9lHS7Qg7xPEc8+MWYZKk+GOocRL+ONEIibs4A4BdhBY8Yt0QASvHmrsoyEO+BUbUNPi66X4pMqXNIg18EAiLWxxfuXpHQBDkGjAI+oXkJy+Xv6mcRqoYtaPdQynuRTwy474zk08TbpJ18bZFFw9IyZAWgJ64TBkVqbM9NK1PrkGYETZ+XE7JEaqlQk2bVyn/Ayt7qhU8fPq/LEJqE+ALRfnlfVxWSTHVPnIbFjnIwlUgBBfUcfk9Qa0TRk+ox3Rgh/2b9ET8eQ5mYiD4mqjNlOJeoV8lUQ23wvB35PPvNPt7Y1DnEmhPuBxG0lU0MeSISJpJduS9Bs5gbpgmWSk5o846elIgaEJuNn+U8yXQWbcDF+NxgApDj4yiG3zlyhmueBh2ZStOMWbVTZNE4RUKTK/shhQRMJlosl9QWAUmyohI3fk9E42AAPC9y3mY5T2LpbEJ4TtDO4uiqMdQstxRZqQaV2NCqbdvmibIZkJZQqtK2dPO3mTdlxqFyQ1jnulCIpUQqVfIh/LhzJZTVp0zYEEGGjF5OSCMYYogqD/vk0NnSvWJMCKgIDEmFkogooTduqU8gclS0QYJICJEgaplKRlRkISdpG35AahjK/5WncO6wyX0h4qdpkNElXtGTTFacqPij8vjC+cbjpsZMkZOoLKNmAiUW+WvwPsM5xsdyIzGJ8QL5exakm+eUa1L+HuQ86TNxZuII82U5iZ85cfHV86L1SZHhqExYfEWDFJmXyiQ61ybLR3Ug5mVBm4dSu4cgXw6Zz4/NujdmZOXgwYP40pe+RJa9+uqruP766wEAnpe8QD//+c+xadOmsRrauIFtjLvO5KQ7X1e/kigJ2/fRqGPSJUaSFscqWDrl4JpFGxZBiI6QFu2y+YdvUBM+9FAqeeETqVJfJEA2BGa8mBCN8iTMLVwDwXNMhMchhd3ysEymbjxMdtTUmUp04NE0UR8CRDu/auMX+/JHYm0Ery/+B+QFnfLRMPk6UPPIah7hRIXSqvCNKBA0KjyyJCDCeD1W1joJ10w0+YgaFXGsnKjI85YJiomceII2JIs5xct5YBPUjVsmBzqH3mpgS6zEeuo5Qc5LaMQAMzGKrn8x2b+oRVJB+caEYwjlLfrFcL+meNxlUpsP75tSnhNdFgcbJPpLPgBWgjEjK0uXLsXZs2elYw888AD279+Pz33uc9LxPXv2YNWqVdH3rq6uzP2F3s0WMeaXAchQMvVHSrH4CvNbZBpXSflO1UG4UELcxIQncfWYNB7xhyFuSsKTvZ2PgjIeEJtt+emdTeCbGJPsutQ8KSKiHuP3cbMRFu1vi7imCfNM2UwShbAK5hGKcIgmEv5Z56iZ5uzIoT5JqtBFlFBmjEAiLV6CANlqRuL65mutaiFEDYGqHVCJihpRYuynBASIfW0AGH1S+Lzi40ltildiJEmhCIoqh2ymFAado2quyJJEpkbwCLKQBnUsvqAZkhxqiWNCz9r2TURWlZGqSfSE6xZrUTyMTgZGJoYEpdhJkBONQ7bpWBaMGVlpb29HT09P9H10dBT79u1Db29vQpsydepUqa6DjIo0AhylZBknAioBYJ7G2bCkfE+B7qZkORaTJP6DKBOAcA7lYyUv8SRqcnLTZX4UIbYphmQCSPgziOMA9P4HpXYgyCuhgBwlWOcl4MeS2ptsmrFaIvG+ExUlui4AKYunek1NWpKwHk1c4r7izwmH04ojSaKeyucbCE8x+dSqOjWSYxOfqomNJKt/Q9b6IkJCAnhl7YpXAnyEx8CAHCcWilmHRyjpNEYUqRPnTREV00Zf6Ryp68dynrEvkTx4BLGphJDUEkzQvHBQmpioTCGxOo2b5IAraN54WdAW/oXh7MDo5HT/lLFam+rms7Jv3z6cP38ed9xxR6Kst7cXd911F+bNm4c777wTd999N3yfNiYODw9jeHg4+j40NAQgXAxbXquiMZ1waJ0NybaSuhUvx2JnQ2Gj4VA9wgGQ4W3UOy3kvpOH+FM3/0yVhx3F33UbkWgPpja6aI5RNc19I0xVJE2lsspcsu9TfhYZSV4W3yadg3ilDtypfaRs+mpmUpWcUKYDcdMD9OGs4VzU/pTvFkQljYRo56ZsmglfB64VEBwhbdqxHRsF1VSQ5nvhBQzM5wnieJ8hSWEBwPhnDTEUCWV0rkJUdHPyizJRaQR0GpeovExQIg2PQFiyEJW0fipFJXLT3ReUxk11ug4mlM08ZZIimXmqMM9X86BVN7Kye/durFy5ErNnz5aOP/TQQ1i+fDk6OzvxwgsvoK+vD+fPn8eWLVvIdh555BE8+OCD9RiyHjWwc6pIU59T2gYA0uZNIcuTJe30FfsRhJ/1jorquyz4wqjbeFQNBjUvEdRGRNr5hc+B0B+Lni7MbccHy+PwQg0KoGaWFAkaEOmNckyKOmE5lrxnauw1nyUSzgq2xFi4dibtiWTGIK6dNq26sMmm5bNIIydZNRqmDYK87wgzk60/g+1TP4c4bJbzYr+RIqIIEh4xwuuGGykih1jKfGVyLlbJZJrjL0VSRI0KJd94HvKFUbUIlKkkmKBnbKEGiZHn6+APB5nqq2PlY6MIDKW9Ee8VyiQkmop0/YkQ++QaQa/E4CN2Iue/L+k8UYyeQGJ4NX6dLR/AdBpjgFzm9W0xxjLRnG3btqWShcOHD0t+KWfOnMGcOXPw+9//Hrfccovx3B//+MfYvn07Lly4QJZTmpXZs2dj9uMPwe/syDCTCmGx+RtDni1s+XJbShhgSVgAgmRdIEkQdESGcjRkvtyuLrmQ+tk2gsI0BnWctG3fvj0+LjW1uKjq5P2o5iYVJnKmHYdGC5V2LoD6J5dTiWNRDsXlSCPMvNwvh5FSDqfq+SZCTWrZDHlFshIWiqzoFn8d8chqIrD3SZDrRN+VTU7WJJaflAkTgNoWFdJsA53vDeVEa3Kg5WVcA+AFepKSBpbzjGQFQMIRmtI6VEJOxHNNmhWdzwyPEtI9LFaKxH3Do3cEk5CqZQm1xWXTT4foTMuklz9KqGKtCj78CKfvfQAXLlzAlClTjHUza1Z6e3uxYcMGY525c+dK3/fs2YPp06dj/fr1qe0vXrwYQ0NDOHfuHGbMmJEoz+fzyOfzmcashcGvICpHsk5qjgepDU9ZpGkHS/m7omJV1LEcCWKRS9bnx3XQERyxjPcr9sdt3fw8VYVcLWy99FXypGbelCsLCy3ixTrIydoYjjQCZtZcMDD1PSJARGA84VX0iTMpTYwNuH+RxT2tJi9TI3QoExiHSp5V6LQVqsaNQ6xOERdVE8d8L87OqpSpuSPUzSeYIBMW0VE17k/9PZbPzanErTyGFHOBWUNTgXpfeSpXnTNFTQugEDQx0kZZ/W3NF1nzo6QRFfW8eoEiLLY5XWzKTA696r2kfif9UDS+OJWCa1kA/htkUr6ouCJ/+AJAmXEoc7du/apij8hMVrq7u9Hd3W1dnzGGPXv24Otf/zra2tpS6/f396OjowNTp07NNrASKlvgbc5RyIaoAhcX+bhO+F99ggfKiyipspe/R4uuUk8lFTpNSNRuhkUgjWyoUQiJ8irNY6ax6mzy6sYXF9DH2YSY0EnaTl07GoihsDqQ2hqVmPIPokmJIDJWdt6S8l+BXw5DVc2KKqjFStSecHKnyiwmE+XvNd6AJLJTVl2LpEW8/4KcTHxE0iJqGvhGaxoz5dwY9uFJG7HqpEn1XQlITYxCjFTTp9IC2S5PokYlULO5dkYfII0DLeVwG51ToVYlqpuym5HkRMkZkgYbcpAWdaQ3t1u0nYGckP4qE8zaNQ51fQQ8FMHvJC/xkCA+hCfWDyXzrbSWZdgzxtxn5a9//StOnDiBO++8M1H2pz/9CYODg1iyZAk6Oztx4MAB3H///bj77rsza0/8UviGTx2Yh0TYaxwFEx8ntSVKfg31iTpBSITPYlbFsLNk8xL5UDQDASCxUb+ExI1CakWUH2HlnvUp5Q14IuL9ivNnflJWAB2hoSMlKrlI8/fRO/jS7arQkZhoAZDqKve2yQRF3c+SD024YKiZWnUmH2rMXNYmU51KpClUS3DVsZkICwVK01INYYnqT0hGn+jayALKH0JFIky3xk6tNmYvEynhUMkJdW4W+MUg1W/F5phUbkkObMKiKZKSRmTl8RHnp4yfMgHGeXaS0T9qosConAETPvAgmtITKOn8GelgAJYDMGKflW/Mycru3buxdOlSfPrTn06UtbW1YefOnfjud7+LIAjwyU9+Etu3b8e3vvWtzP34H/rwhStXq0Q0iX4sN6h4HPJ3anFmEwDuyKmSGWoBjsaiGWP45Fn+TDw5qKYjHdJMQ1mRxTyVdq5tG6IGRPdeELEdyoymllv1qXmaVZ1yKVBaj3hBKf/QDTkWbO94Uzi42KeOlAAhaVHLqPsmMUYL/ySb87JC9CsQTUPROIr0BuAFlZmHKMIinq/brEzaUrV/9XNamK4JmbQZKURUR0Yq6SsNaT4riVw0Fnuk0dxjmbPF5IcimYw0/iWJcwzjVjMX6zQoVN6doB3Saxcic7pGQ53m48dBaaClgIoR+/15zMnKb3/7W23ZqlWrpGRwtQAXDkVSxIROaefbQvSBsIH2wvPhSo/T9uPIspkm+teoQsMFWnNORhJDbUbqUiZeH911oBx8I2JW4i8VLN8DxfBzTvlhmX5oabKrxi9Hl8mWymlia2KqaIxKtJIOOvOm+N2kWTH5I6T5mqj1xO+yGTRpDhJBbZfcTk+/5Tb5o6MIjEhe1E2Xb1B+yZyvQ92sJMKRYs41JYWzJQG2WtE00iH2WwvtURqMJMLCiTZrm4m6FZp5UtvVmG04pFT7FsREJSWqT5+6fnJtShpREc+tJ1ru3UCmRd5KLZxyEWzaMCETmShrWnRPwNRYUv1OSvofcNoPnXLmVfvN8uSrk2Ol8pUSiymkhqsy48qVEbtKkZahlnK8TvNTqUTmAPThxgBMjrUiuHlTZ37UfVdh1BxqnWaTfaukRWxfhW5IoYk1qT1RN4YkeaGdISmNi26j84p0FEkUsmv4zVZqirUlIRR0hCShISr3wVIidTgqdRgNJvip2hOdVssWtciAm6VP6h1PWUlKZLblJp4cfS9xohLBQ5Rq3/YhP61OtWgZsmKbbj9NmGmaFb6w25CCTGXqb7+21qu4m5x+/KrWgooG4p+pOhSqIXa1AOkAyomLZ7/RV0tqbPKgWL3xWZA5GYlTim2JYntpWhIqX4/pHFV7on7W9mNZPpb3jS5cnvZvkjc4ILlh0EngCNKjMRcB0EYUUf2ryEpYbEhKrbQjKkmpNmEaN/Wk+ackxmERbVMNGUnzRxmLRHEmUPlTknXkz2JOKiBrjq5sxytBy5AVW4hkxE9ZqNQyU4ZNa9iuAcTmoaLSBZ1SravHRUKj+sDYqlajyBFFbmkbf9Yb3DQm3p943aMxZFiPTX4sKnSJ4+T2Klu81DB4DjEZGxCHy6eSFKJMp7ExEZM0s6D6EJDl3rXNt2N6L5B6XhrRprWWGp8VInNtQktJkR4LzYvObKS2ZfpNqtdK3PB1qPfmmoV4UPVDzUDlY66EqNQ6L0q1iF0gyvd3wB0heYJA/bn8PC8AfMW5lpuE6p77SUHLkBV/xEPO8mVd3K9BOr9SUiA8oWvV8vE90xDYmgvEvCm1hEj2bDUUtqHBCQdmng+GmIPku2JonxxjDa9h4p1DvP1ExViVqxur7r1FlBZEatqUudUi9wWt1aHHR32mzhO1fjYmRhM5qVSjYxpnGtTIIo5kPiRaWyL6ukR1a/LyPVpOWQlCI2H7gGRsQxdaXiVRscksa6NdUR+6eNZZEfz+El9KKJUr714LRPOz5LQtm4nEMcjtxRqXKNRd9QsjiL1tJmQ1ItGEliErpgVdhZgQLFGWwSwgOcZCr1pPfK4RqDmoTqom044IapNI80eg2kgb71ip96VkdRbjFB1vAeUHqLG/miK79I7INDHREQu1Le4orFur1TZU0500FktTgQ1pTSMmpnqJOjxtuy4zc0bSlAaddjULdGYSSssC6O9LykmXIi06UJoY1aSkzaiaizfRRr2zp1ZgOc8oi0T9Gpp8bDUsVL6cRB2CsFBQSYvYh5RmX1jLpIeCiOjHmhdeRkH1eUmENvNzDU65YluVoGXIShaU2pM/TF0UBmUq4veSjZbAVu2sg03oM7Xgmt6dY2qLKrNV/9uOLQ1RynbiXPmdKLabYfLJTD1GEQ9Vdar2LWpbxOucyK0DRAQFMF8bryRrjHIWBKBavxGyzZRrmaq5SDHJ1Fp7l/U+y1o/y2Zei7cqVwpxE7Z5EV+Unj+DScPm1QCVvpjR5j03NuOoJypJlW/StBijM5VjFGmhSJG0jyXGofaRHFfaW8ZFIiOGP4v1ww7kY1l+hi1DVvL/B+TKeeQoHwxAFJ5nxQCpc9XPaRDvrWqcctU2VMfAtMXX9MTNjzFl08tCRCrZvNIcwCjoMnWmhW6nRTeF44nbiOqkEAepPYOmxUgMNdcjU/0aaEEytZnyUkETxtJ5Ni0l+VhscKY3NtuC0m7YbICVaF9EUESGyg8jldc402ql56XVJV8AmUFrpYNP+BxVS1jIlx0S2mLdmkXdg/zFhYnxq/ZsZS+RyY7Yl35+YnSgzoyrkheWA0rxa/5S0TJkpfM8Q47QmFDgoV0q+xPjzsUX9+mYZATiGlqHlWqGbIrKoOqYNCEm0mHOk2HvsAjo56z1QVDDTDU/Bh3JU51mxb50hCXuy1yuO26XhtxMTGupVajG96Ia7UgtfT7SCIT61J3VbFELgmLyH9D2m1GbUok5JusGqW7SY2EmERGZojK0pyNQpjpiuS6r8Fg6xIokKEs/psR+0fFALqfKKOjuTyqXEAd/U3cEiciUzxsN1zf+lvHEWlA+bpNAtTRif7+3DFnxRxlyxEUg2XcJ4CyBVFXBzCJ1GhuqzAQTcQg9s6tbZGuxKWbaeHR1E8w9/Jx07qLD7US2bnKMFttOm3vW62Qin40Oz67kOtsSURvSbael0WdrtTnX9L1WSCMfWTUoleRAoSKObLQNtm9yrsWGbdJKqJqLSkiPzTkmrQn/LI4zbd5p5aa21H5rAYqYqGUiRKIbXX/taw5SOtdo1XKgc79khWRSGr0cyUoRIPlFMZnUSUTsWKSebC/EWsWS6zUojL4ZLey7Jqcu9fy0cqqu6UdFgXL40vWp2lLF/2Tbo1ZDGDPYONzWo/+Kk8UpCF8bEH/OPp76kAwddNoQaj2ohSlHh2rez1XvEGIb6DQXzQgdeRDnYEMwGhWmbOsArdPQaGob+1I/iyD98aoAy7Bmtw5ZGWXwQW/MdgLO6BBmePdOtSBDIEW2q77JVEnYxnwmlXEwnxlfcCielwqVfRfjnBOmtNAcaf4f0ZiacLFOQ72SQGl9lWqk6eGvkK/8xXK1GYfpYaMSbchYEhOOrKZDbTsWG4iOFFb7tmcTbDUV9SI11HjUY+pDXLPlSdGBXz+dLE2ypsr0GZyZ8HDCtC/ApORWic8OkO3ebBmyomKsn+JsoidM0C06LOdJ5MrkMCiz6fSxmOqwnFeTiAWVGJqIohXBq/CFbLWAGoaaRsSksrGIVVdgS/goUFEEqtbBL8bzqMcGr0Mj+7aBzulR/V6tZkUfgpziaDrGDsYm1JMQ1NI5txmRdvuYZJ1Ve5T1utXjOo97ssJY+OMrBsNgQf1vwIqfosmIDpaYA7m4jIW5Y9TODFSpTb0ZQGXhTYQwi9/Lco4u1ahy2ZTrYCprNohzko4J31kQq2nV27VeobhAbbWWaRmYs8zL1ielGrIy3pFmarY9VxvmK2iR1NWq1lrvsdCii22ntdvI+6eadd7kKlAKwnAgvo8b22E2tZoYZ86cwezZsxs9DAcHBwcHB4cKcPr0acyaNctYZ9yTlSAI8Pbbb+OKK66A5zXnE/7Q0BBmz56N06dPY8qUKY0eTlPAySQJJ5MknEyScDJJwskkifEgE8YYLl68iEKhAN83q5bGvRnI9/1URtYsmDJlStPeNI2Ck0kSTiZJOJkk4WSShJNJEs0uk66uLqt64+dNVg4ODg4ODg6XJRxZcXBwcHBwcGhqOLJSB+TzeWzduhX5fL7RQ2kaOJkk4WSShJNJEk4mSTiZJNFqMhn3DrYODg4ODg4OrQ2nWXFwcHBwcHBoajiy4uDg4ODg4NDUcGTFwcHBwcHBoanhyIqDg4ODg4NDU8ORFQcHBwcHB4emhiMrNcbDDz+MpUuXYuLEiZg6dSpZx/O8xN+uXbukOgMDA1i2bBk6Oztx1VVXYfv27VYve2pG2Mjk1KlTWLduHSZNmoTu7m7cc889GBmRX9ncSjJRMXfu3MQ9cd9990l1bGTUati5cyfmzZuHjo4OLFq0CH/7298aPaS6YNu2bYn7oaenJypnjGHbtm0oFAro7OzEF7/4RRw9erSBI649XnrpJaxbtw6FQgGe5+GPf/yjVG4jg+HhYXz7299Gd3c3Jk2ahPXr1+PMmTN1nEVtkSaTO+64I3HfLF68WKozXmXiyEqNMTIygltvvRXf/OY3jfX27NmDs2fPRn8bN26MyoaGhnDzzTejUCjg8OHD+OlPf4rHHnsMO3bsGOvhjwnSZFIqlbBmzRpcunQJL7/8Mvbu3Ys//OEP6Ovri+q0mkwobN++XbontmzZEpXZyKjV8OSTT2Lz5s24//770d/fjxtvvBGrV6/GqVOnGj20uuAzn/mMdD8MDAxEZT/84Q+xY8cOPPHEEzh8+DB6enpw88034+LFiw0ccW1x6dIlLFiwAE888QRZbiODzZs34+mnn8bevXvx8ssv43//+x/Wrl2LUqlUr2nUFGkyAYBVq1ZJ980zzzwjlY9bmTCHMcGePXtYV1cXWQaAPf3009pzd+7cybq6uthHH30UHXvkkUdYoVBgQRDUeKT1g04mzzzzDPN9n7311lvRsd/97ncsn8+zCxcuMMZaVyYcc+bMYY8//ri23EZGrYbPf/7zbNOmTdKxa665ht13330NGlH9sHXrVrZgwQKyLAgC1tPTwx599NHo2EcffcS6urrYrl276jTC+kJdM21k8P7777O2tja2d+/eqM5bb73FfN9nf/nLX+o29rECtY9s3LiRfeUrX9GeM55l4jQrDUJvby+6u7tx/fXXY9euXQiCICp75ZVXsGzZMinz4MqVK/H222/jzTffbMBoxxavvPIK5s+fj0KhEB1buXIlhoeHceTIkahOq8vkBz/4AaZPn46FCxfi4Ycflkw8NjJqJYyMjODIkSNYsWKFdHzFihU4dOhQg0ZVXxw7dgyFQgHz5s3Dhg0bcPz4cQDAiRMnMDg4KMkmn89j2bJll41sbGRw5MgRjI6OSnUKhQLmz5/f0nI6ePAgPv7xj+Pqq6/GN77xDbzzzjtR2XiWybh/6/J4xEMPPYTly5ejs7MTL7zwAvr6+nD+/PlI7T84OIi5c+dK58yYMSMqmzdvXr2HPKYYHByM5scxbdo0tLe3Y3BwMKrTyjL5zne+g+uuuw7Tpk3Dq6++iu9973s4ceIEfvWrXwGwk1Er4fz58yiVSok5z5gxoyXnq+KGG27Ab37zG1x99dU4d+4cvv/972Pp0qU4evRoNH9KNidPnmzEcOsOGxkMDg6ivb0d06ZNS9Rp1Xto9erVuPXWWzFnzhycOHECDzzwAG666SYcOXIE+Xx+XMvEaVYsQDm7qX///Oc/rdvbsmULlixZgoULF6Kvrw/bt2/Hj370I6mO53nSd1Z2JFWPNwq1lgk1L8aYdLzZZaIii4zuvfdeLFu2DJ/97Gdx1113YdeuXdi9ezfefffdqD0bGbUaqGveyvPlWL16NW655RZce+21+PKXv4w///nPAIBf//rXUZ3LVTYiKpFBK8vptttuw5o1azB//nysW7cOzz77LP773/9G948O40EmTrNigd7eXmzYsMFYR33qz4LFixdjaGgI586dw4wZM9DT05NguVyVpz5JNAq1lElPTw/+8Y9/SMfee+89jI6ORvMdDzJRUY2MuAf/G2+8genTp1vJqJXQ3d2NXC5HXvNWnG8aJk2ahGuvvRbHjh3DV7/6VQCh5mDmzJlRnctJNjwyyiSDnp4ejIyM4L333pM0Ce+88w6WLl1a3wE3CDNnzsScOXNw7NgxAONbJk6zYoHu7m5cc801xr+Ojo6K2+/v70dHR0cU1rtkyRK89NJLks/Cc889h0KhUBUpqiVqKZMlS5bg9ddfx9mzZ6Njzz33HPL5PBYtWhTVaXaZqKhGRv39/QAQLcQ2MmoltLe3Y9GiRXj++eel488//3zTL6pjgeHhYfznP//BzJkzMW/ePPT09EiyGRkZwYsvvnjZyMZGBosWLUJbW5tU5+zZs3j99dcvGzm9++67OH36dLSOjGuZNMy1t0Vx8uRJ1t/fzx588EE2efJk1t/fz/r7+9nFixcZY4zt27eP/eIXv2ADAwPsjTfeYL/85S/ZlClT2D333BO18f7777MZM2aw22+/nQ0MDLCnnnqKTZkyhT322GONmlZVSJNJsVhk8+fPZ8uXL2evvfYa279/P5s1axbr7e2N2mg1mYg4dOgQ27FjB+vv72fHjx9nTz75JCsUCmz9+vVRHRsZtRr27t3L2tra2O7du9m///1vtnnzZjZp0iT25ptvNnpoY46+vj528OBBdvz4cfb3v/+drV27ll1xxRXR3B999FHW1dXFnnrqKTYwMMBuv/12NnPmTDY0NNTgkdcOFy9ejNYKANFv5OTJk4wxOxls2rSJzZo1i+3fv5+99tpr7KabbmILFixgxWKxUdOqCiaZXLx4kfX19bFDhw6xEydOsAMHDrAlS5awq666qiVk4shKjbFx40YGIPF34MABxhhjzz77LFu4cCGbPHkymzhxIps/fz77yU9+wkZHR6V2/vWvf7Ebb7yR5fN51tPTw7Zt2zZuQ3TTZMJYSGjWrFnDOjs72ZVXXsl6e3ulMGXGWksmIo4cOcJuuOEG1tXVxTo6OtinPvUptnXrVnbp0iWpno2MWg0/+9nP2Jw5c1h7ezu77rrr2IsvvtjoIdUFt912G5s5cyZra2tjhUKBfe1rX2NHjx6NyoMgYFu3bmU9PT0sn8+zL3zhC2xgYKCBI649Dhw4QK4bGzduZIzZyeDDDz9kvb297Morr2SdnZ1s7dq17NSpUw2YTW1gkskHH3zAVqxYwT72sY+xtrY29olPfIJt3LgxMd/xKhOPsRZJAerg4ODg4ODQknA+Kw4ODg4ODg5NDUdWHBwcHBwcHJoajqw4ODg4ODg4NDUcWXFwcHBwcHBoajiy4uDg4ODg4NDUcGTFwcHBwcHBoanhyIqDg4ODg4NDU8ORFQcHBwcHB4emhiMrDg4ODg4ODk0NR1YcHBwcHBwcmhqOrDg4ODg4ODg0Nf4fdA0ynRvPnSwAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "t2m = out[0, 12].cpu().numpy()\n", + "\n", + "lat = np.linspace(-90, 90, out.shape[-2])\n", + "lon = np.linspace(-180, 180, out.shape[-1])\n", + "X, Y = np.meshgrid(lon, lat)\n", + "\n", + "plt.contourf(X, Y, t2m, 100)\n", + "plt.gca().set_aspect(\"equal\")\n", + "plt.show()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.7" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/examples/PrithviWxC_rollout.ipynb b/examples/PrithviWxC_rollout.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..ba116a7a03039a94a63245fd2458bd93ca692bdb --- /dev/null +++ b/examples/PrithviWxC_rollout.ipynb @@ -0,0 +1,3670 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# PrithviWxC Rollout Inference\n", + "If you haven't already, take a look at the exmaple for the PrithviWxC core\n", + "model, as we will pass over the points covered there.\n", + "\n", + "Here we will introduce the PrithviWxC model that was trained furhter for\n", + "autoregressive rollout, a common strategy to increase accuracy and stability of\n", + "models when applied to forecasting-type tasks. " + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import random\n", + "from pathlib import Path\n", + "\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import torch\n", + "from huggingface_hub import hf_hub_download, snapshot_download\n", + "\n", + "# Set backend etc.\n", + "torch.jit.enable_onednn_fusion(True)\n", + "if torch.cuda.is_available():\n", + " torch.backends.cudnn.benchmark = True\n", + " torch.backends.cudnn.deterministic = True\n", + "\n", + "# Set seeds\n", + "random.seed(42)\n", + "if torch.cuda.is_available():\n", + " torch.cuda.manual_seed(42)\n", + "torch.manual_seed(42)\n", + "np.random.seed(42)\n", + "\n", + "# Set device\n", + "if torch.cuda.is_available():\n", + " device = torch.device(\"cuda\")\n", + "else:\n", + " device = torch.device(\"cpu\")\n", + "\n", + "# Set variables\n", + "surface_vars = [\n", + " \"EFLUX\",\n", + " \"GWETROOT\",\n", + " \"HFLUX\",\n", + " \"LAI\",\n", + " \"LWGAB\",\n", + " \"LWGEM\",\n", + " \"LWTUP\",\n", + " \"PS\",\n", + " \"QV2M\",\n", + " \"SLP\",\n", + " \"SWGNT\",\n", + " \"SWTNT\",\n", + " \"T2M\",\n", + " \"TQI\",\n", + " \"TQL\",\n", + " \"TQV\",\n", + " \"TS\",\n", + " \"U10M\",\n", + " \"V10M\",\n", + " \"Z0M\",\n", + "]\n", + "static_surface_vars = [\"FRACI\", \"FRLAND\", \"FROCEAN\", \"PHIS\"]\n", + "vertical_vars = [\"CLOUD\", \"H\", \"OMEGA\", \"PL\", \"QI\", \"QL\", \"QV\", \"T\", \"U\", \"V\"]\n", + "levels = [\n", + " 34.0,\n", + " 39.0,\n", + " 41.0,\n", + " 43.0,\n", + " 44.0,\n", + " 45.0,\n", + " 48.0,\n", + " 51.0,\n", + " 53.0,\n", + " 56.0,\n", + " 63.0,\n", + " 68.0,\n", + " 71.0,\n", + " 72.0,\n", + "]\n", + "padding = {\"level\": [0, 0], \"lat\": [0, -1], \"lon\": [0, 0]}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Lead time\n", + "When performing auto-regressive rollout, the intermediate steps require the\n", + "static data at those times and---if using `residual=climate`---the intermediate\n", + "climatology. We provide a dataloader that extends the MERRA2 loader of the\n", + "core model, adding in these additional terms. Further, it return target data for\n", + "the intermediate steps if those are required for loss terms. \n", + "\n", + "The `lead_time` flag still lets the target time for the model, however now it\n", + "only a single value and must be a positive integer multiple of the `-input_time`. " + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "lead_time = 3 # This variable can be change to change the task\n", + "input_time = -3 # This variable can be change to change the task" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Data file\n", + "MERRA-2 data is available from 1980 to the present day,\n", + "at 3-hour temporal resolution. The dataloader we have provided\n", + "expects the surface data and vertical data to be saved in\n", + "separate files, and when provided with the directories, will\n", + "search for the relevant data that falls within the provided time range.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "159bec6eee1846d680fe284324094487", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Fetching 1 files: 0%| | 0/1 [00:00 dict[str, Tensor]:\n", + " \"\"\"Prepressing function for MERRA2 Dataset\n", + "\n", + " Args:\n", + " batch (dict): List of training samples, each sample should be a\n", + " dictionary with the following keys::\n", + "\n", + " 'sur_static': Numpy array of shape (3, lat, lon). For each pixel (lat, lon), the first dimension indexes sin(lat), cos(lon), sin(lon).\n", + " 'sur_vals': Torch tensor of shape (parameter, time, lat, lon).\n", + " 'sur_tars': Torch tensor of shape (parameter, time, lat, lon).\n", + " 'ulv_vals': Torch tensor of shape (parameter, level, time, lat, lon).\n", + " 'ulv_tars': Torch tensor of shape (parameter, level, time, lat, lon).\n", + " 'sur_climate': Torch tensor of shape (parameter, lat, lon)\n", + " 'ulv_climate': Torch tensor of shape (parameter, level, lat, lon)\n", + " 'lead_time': Integer.\n", + " 'input_time': Integer.\n", + "\n", + " padding: Dictionary with keys 'level', 'lat', 'lon', each of dim 2.\n", + "\n", + " Returns:\n", + " Dictionary with the following keys::\n", + "\n", + " 'x': [batch, time, parameter, lat, lon]\n", + " 'y': [batch, parameter, lat, lon]\n", + " 'static': [batch, parameter, lat, lon]\n", + " 'lead_time': [batch]\n", + " 'input_time': [batch]\n", + " 'climate (Optional)': [batch, parameter, lat, lon]\n", + "\n", + " Note:\n", + " Here, for x and y, 'parameter' is [surface parameter, upper level,\n", + " parameter x level]. Similarly for the static information we have\n", + " [sin(lat), cos(lon), sin(lon), cos(doy), sin(doy), cos(hod), sin(hod),\n", + " ...].\n", + " \"\"\" # noqa: E501\n", + " b0 = batch[0]\n", + " nbatch = len(batch)\n", + " data_keys = set(b0.keys())\n", + "\n", + " essential_keys = {\n", + " \"sur_static\",\n", + " \"sur_vals\",\n", + " \"sur_tars\",\n", + " \"ulv_vals\",\n", + " \"ulv_tars\",\n", + " \"input_time\",\n", + " \"lead_time\",\n", + " }\n", + "\n", + " climate_keys = {\n", + " \"sur_climate\",\n", + " \"ulv_climate\",\n", + " }\n", + "\n", + " all_keys = essential_keys | climate_keys\n", + "\n", + " if not essential_keys.issubset(data_keys):\n", + " raise ValueError(\"Missing essential keys.\")\n", + "\n", + " if not data_keys.issubset(all_keys):\n", + " raise ValueError(\"Unexpected keys in batch.\")\n", + "\n", + " # Bring all tensors from the batch into a single tensor\n", + " upl_x = torch.empty((nbatch, *b0[\"ulv_vals\"].shape))\n", + " upl_y = torch.empty((nbatch, *b0[\"ulv_tars\"].shape))\n", + "\n", + " sur_x = torch.empty((nbatch, *b0[\"sur_vals\"].shape))\n", + " sur_y = torch.empty((nbatch, *b0[\"sur_tars\"].shape))\n", + "\n", + " sur_sta = torch.empty((nbatch, *b0[\"sur_static\"].shape))\n", + "\n", + " lead_time = torch.empty((nbatch,), dtype=torch.float32)\n", + " input_time = torch.empty((nbatch,), dtype=torch.float32)\n", + "\n", + " for i, rec in enumerate(batch):\n", + " sur_x[i] = rec[\"sur_vals\"]\n", + " sur_y[i] = rec[\"sur_tars\"]\n", + "\n", + " upl_x[i] = rec[\"ulv_vals\"]\n", + " upl_y[i] = rec[\"ulv_tars\"]\n", + "\n", + " sur_sta[i] = rec[\"sur_static\"]\n", + "\n", + " lead_time[i] = rec[\"lead_time\"]\n", + " input_time[i] = rec[\"input_time\"]\n", + "\n", + " return_value = {\n", + " \"lead_time\": lead_time,\n", + " \"input_time\": input_time,\n", + " }\n", + "\n", + " # Reshape (batch, parameter, level, time, lat, lon) ->\n", + " # (batch, time, parameter, level, lat, lon)\n", + " upl_x = upl_x.permute((0, 3, 1, 2, 4, 5))\n", + " upl_y = upl_y.permute((0, 3, 1, 2, 4, 5))\n", + " # Reshape (batch, parameter, time, lat, lon) ->\n", + " # (batch, time, parameter, lat, lon)\n", + " sur_x = sur_x.permute((0, 2, 1, 3, 4))\n", + " sur_y = sur_y.permute((0, 2, 1, 3, 4))\n", + "\n", + " # Pad\n", + " padding_2d = (*padding[\"lon\"], *padding[\"lat\"])\n", + "\n", + " def pad2d(x):\n", + " return torch.nn.functional.pad(x, padding_2d, mode=\"constant\", value=0)\n", + "\n", + " padding_3d = (*padding[\"lon\"], *padding[\"lat\"], *padding[\"level\"])\n", + "\n", + " def pad3d(x):\n", + " return torch.nn.functional.pad(x, padding_3d, mode=\"constant\", value=0)\n", + "\n", + " sur_x = pad2d(sur_x).contiguous()\n", + " upl_x = pad3d(upl_x).contiguous()\n", + " sur_y = pad2d(sur_y).contiguous()\n", + " upl_y = pad3d(upl_y).contiguous()\n", + " return_value[\"static\"] = pad2d(sur_sta).contiguous()\n", + "\n", + " # Remove time for targets\n", + " upl_y = torch.squeeze(upl_y, 1)\n", + " sur_y = torch.squeeze(sur_y, 1)\n", + "\n", + " # We stack along the combined parameter x level dimension\n", + " return_value[\"x\"] = torch.cat(\n", + " (sur_x, upl_x.view(*upl_x.shape[:2], -1, *upl_x.shape[4:])), dim=2\n", + " )\n", + " return_value[\"y\"] = torch.cat(\n", + " (sur_y, upl_y.view(upl_y.shape[0], -1, *upl_y.shape[3:])), dim=1\n", + " )\n", + "\n", + " if climate_keys.issubset(data_keys):\n", + " sur_climate = torch.empty((nbatch, *b0[\"sur_climate\"].shape))\n", + " ulv_climate = torch.empty((nbatch, *b0[\"ulv_climate\"].shape))\n", + " for i, rec in enumerate(batch):\n", + " sur_climate[i] = rec[\"sur_climate\"]\n", + " ulv_climate[i] = rec[\"ulv_climate\"]\n", + " sur_climate = pad2d(sur_climate)\n", + " ulv_climate = pad3d(ulv_climate)\n", + "\n", + " return_value[\"climate\"] = torch.cat(\n", + " (\n", + " sur_climate,\n", + " ulv_climate.view(nbatch, -1, *ulv_climate.shape[3:]),\n", + " ),\n", + " dim=1,\n", + " )\n", + "\n", + " return return_value\n", + "\n", + "\n", + "def input_scalers(\n", + " surf_vars: list[str],\n", + " vert_vars: list[str],\n", + " levels: list[float],\n", + " surf_path: str | Path,\n", + " vert_path: str | Path,\n", + ") -> tuple[Tensor, Tensor]:\n", + " \"\"\"Reads the input scalers\n", + "\n", + " Args:\n", + " surf_vars: surface variables to be used.\n", + " vert_vars: vertical variables to be used.\n", + " levels: MERRA2 levels to use.\n", + " surf_path: path to surface scalers file.\n", + " vert_path: path to vertical level scalers file.\n", + "\n", + " Returns:\n", + " mu (Tensor): mean values\n", + " var (Tensor): varience values\n", + " \"\"\"\n", + " with h5py.File(Path(surf_path), \"r\", libver=\"latest\") as surf_file:\n", + " stats = [x.decode().lower() for x in surf_file[\"statistic\"][()]]\n", + " mu_idx = stats.index(\"mu\")\n", + " sig_idx = stats.index(\"sigma\")\n", + "\n", + " s_mu = torch.tensor([surf_file[k][()][mu_idx] for k in surf_vars])\n", + " s_sig = torch.tensor([surf_file[k][()][sig_idx] for k in surf_vars])\n", + "\n", + " with h5py.File(Path(vert_path), \"r\", libver=\"latest\") as vert_file:\n", + " stats = [x.decode().lower() for x in vert_file[\"statistic\"][()]]\n", + " mu_idx = stats.index(\"mu\")\n", + " sig_idx = stats.index(\"sigma\")\n", + "\n", + " lvl = vert_file[\"lev\"][()]\n", + " l_idx = [np.where(lvl == v)[0].item() for v in levels]\n", + "\n", + " v_mu = np.array([vert_file[k][()][mu_idx, l_idx] for k in vert_vars])\n", + " v_sig = np.array([vert_file[k][()][sig_idx, l_idx] for k in vert_vars])\n", + "\n", + " v_mu = torch.from_numpy(v_mu).view(-1)\n", + " v_sig = torch.from_numpy(v_sig).view(-1)\n", + "\n", + " mu = torch.cat((s_mu, v_mu), dim=0).to(torch.float32)\n", + " sig = torch.cat((s_sig, v_sig), dim=0).to(torch.float32).clamp(1e-4, 1e4)\n", + " return mu, sig\n", + "\n", + "\n", + "def static_input_scalers(\n", + " scalar_path: str | Path, stat_vars: list[str], unscaled_params: int = 7\n", + ") -> tuple[Tensor, Tensor]:\n", + " scalar_path = Path(scalar_path)\n", + "\n", + " with h5py.File(scalar_path, \"r\", libver=\"latest\") as scaler_file:\n", + " stats = [x.decode().lower() for x in scaler_file[\"statistic\"][()]]\n", + " mu_idx = stats.index(\"mu\")\n", + " sig_idx = stats.index(\"sigma\")\n", + "\n", + " mu = torch.tensor([scaler_file[k][()][mu_idx] for k in stat_vars])\n", + " sig = torch.tensor([scaler_file[k][()][sig_idx] for k in stat_vars])\n", + "\n", + " z = torch.zeros(unscaled_params, dtype=mu.dtype, device=mu.device)\n", + " o = torch.ones(unscaled_params, dtype=sig.dtype, device=sig.device)\n", + " mu = torch.cat((z, mu), dim=0).to(torch.float32)\n", + " sig = torch.cat((o, sig), dim=0).to(torch.float32)\n", + "\n", + " return mu, sig.clamp(1e-4, 1e4)\n", + "\n", + "\n", + "def output_scalers(\n", + " surf_vars: list[str],\n", + " vert_vars: list[str],\n", + " levels: list[float],\n", + " surf_path: str | Path,\n", + " vert_path: str | Path,\n", + ") -> Tensor:\n", + " surf_path = Path(surf_path)\n", + " vert_path = Path(vert_path)\n", + "\n", + " with h5py.File(surf_path, \"r\", libver=\"latest\") as surf_file:\n", + " svars = torch.tensor([surf_file[k][()] for k in surf_vars])\n", + "\n", + " with h5py.File(vert_path, \"r\", libver=\"latest\") as vert_file:\n", + " lvl = vert_file[\"lev\"][()]\n", + " l_idx = [np.where(lvl == v)[0].item() for v in levels]\n", + " vvars = np.array([vert_file[k][()][l_idx] for k in vert_vars])\n", + " vvars = torch.from_numpy(vvars).view(-1)\n", + "\n", + " var = torch.cat((svars, vvars), dim=0).to(torch.float32).clamp(1e-7, 1e7)\n", + "\n", + " return var\n", + "\n", + "\n", + "class SampleSpec:\n", + " \"\"\"\n", + " A data class to collect the information used to define a sample.\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " inputs: tuple[pd.Timestamp, pd.Timestamp],\n", + " lead_time: int,\n", + " target: pd.Timestamp | list[pd.Timestamp],\n", + " ):\n", + " \"\"\"\n", + " Args:\n", + " inputs: Tuple of timestamps. In ascending order.\n", + " lead_time: Lead time. In hours.\n", + " target: Timestamp of the target. Can be before or after the inputs.\n", + " \"\"\"\n", + " if not inputs[0] < inputs[1]:\n", + " raise ValueError(\n", + " \"Timestamps in `inputs` should be in strictly ascending order.\"\n", + " )\n", + "\n", + " self.inputs = inputs\n", + " self.input_time = (inputs[1] - inputs[0]).total_seconds() / 3600\n", + " self.lead_time = lead_time\n", + " self.target = target\n", + "\n", + " self.times = [*inputs, target]\n", + " self.stat_times = [inputs[-1]]\n", + "\n", + " @property\n", + " def climatology_info(self) -> tuple[int, int]:\n", + " \"\"\"Get the required climatology info.\n", + "\n", + " :return: information required to obtain climatology data. Essentially\n", + " this is the day of the year and hour of the day of the target\n", + " timestamp, with the former restricted to the interval [1, 365].\n", + " :rtype: tuple\n", + " \"\"\"\n", + " return (min(self.target.dayofyear, 365), self.target.hour)\n", + "\n", + " @property\n", + " def year(self) -> int:\n", + " return self.inputs[1].year\n", + "\n", + " @property\n", + " def dayofyear(self) -> int:\n", + " return self.inputs[1].dayofyear\n", + "\n", + " @property\n", + " def hourofday(self) -> int:\n", + " return self.inputs[1].hour\n", + "\n", + " def _info_str(self) -> str:\n", + " iso_8601 = \"%Y-%m-%dT%H:%M:%S\"\n", + "\n", + " return (\n", + " f\"Issue time: {self.inputs[1].strftime(iso_8601)}\\n\"\n", + " f\"Lead time: {self.lead_time} hours ahead\\n\"\n", + " f\"Input delta: {self.input_time} hours\\n\"\n", + " f\"Target time: {self.target.strftime(iso_8601)}\"\n", + " )\n", + "\n", + " @classmethod\n", + " def get(cls, timestamp: pd.Timestamp, dt: int, lead_time: int):\n", + " \"\"\"Given a timestamp and lead time, generates a SampleSpec object\n", + " describing the sample further.\n", + "\n", + " Args:\n", + " timestamp: Timstamp of the sample, Ie this is the larger of the two\n", + " input timstamps.\n", + " dt: Time between input samples, in hours.\n", + " lead_time: Lead time. In hours.\n", + "\n", + " Returns:\n", + " SampleSpec\n", + " \"\"\" # noqa: E501\n", + " assert dt > 0, \"dt should be possitive\"\n", + " lt = pd.to_timedelta(lead_time, unit=\"h\")\n", + " dt = pd.to_timedelta(dt, unit=\"h\")\n", + "\n", + " if lead_time >= 0:\n", + " timestamp_target = timestamp + lt\n", + " else:\n", + " timestamp_target = timestamp - dt + lt\n", + "\n", + " spec = cls(\n", + " inputs=(timestamp - dt, timestamp),\n", + " lead_time=lead_time,\n", + " target=timestamp_target,\n", + " )\n", + "\n", + " return spec\n", + "\n", + " def __repr__(self) -> str:\n", + " return self._info_str()\n", + "\n", + " def __str__(self) -> str:\n", + " return self._info_str()\n", + "\n", + "\n", + "class Merra2Dataset(Dataset):\n", + " \"\"\"MERRA2 dataset. The dataset unifies surface and vertical data as well as\n", + " optional climatology.\n", + "\n", + " Samples come in the form of a dictionary. Not all keys support all\n", + " variables, yet the general ordering of dimensions is\n", + " parameter, level, time, lat, lon\n", + "\n", + " Note:\n", + " Data is assumed to be in NetCDF files containing daily data at 3-hourly\n", + " intervals. These follow the naming patterns\n", + " MERRA2_sfc_YYYYMMHH.nc and MERRA_pres_YYYYMMHH.nc and can be located in\n", + " two different locations. Optional climatology data comes from files\n", + " climate_surface_doyDOY_hourHOD.nc and\n", + " climate_vertical_doyDOY_hourHOD.nc.\n", + "\n", + "\n", + " Note:\n", + " `_get_valid_timestamps` assembles a set of all timestamps for which\n", + " there is data (with hourly resolutions). The result is stored in\n", + " `_valid_timestamps`. `_get_valid_climate_timestamps` does the same with\n", + " climatology data and stores it in `_valid_climate_timestamps`.\n", + "\n", + " Based on this information, `samples` generates a list of valid samples,\n", + " stored in `samples`. Here the format is::\n", + "\n", + " [\n", + " [\n", + " (timestamp 1, lead time A),\n", + " (timestamp 1, lead time B),\n", + " (timestamp 1, lead time C),\n", + " ],\n", + " [\n", + " (timestamp 2, lead time D),\n", + " (timestamp 2, lead time E),\n", + " ]\n", + " ]\n", + "\n", + " That is, the outer list iterates over timestamps (init times), the\n", + " inner over lead times. Only valid entries are stored.\n", + " \"\"\"\n", + "\n", + " valid_vertical_vars = [\n", + " \"CLOUD\",\n", + " \"H\",\n", + " \"OMEGA\",\n", + " \"PL\",\n", + " \"QI\",\n", + " \"QL\",\n", + " \"QV\",\n", + " \"T\",\n", + " \"U\",\n", + " \"V\",\n", + " ]\n", + " valid_surface_vars = [\n", + " \"EFLUX\",\n", + " \"GWETROOT\",\n", + " \"HFLUX\",\n", + " \"LAI\",\n", + " \"LWGAB\",\n", + " \"LWGEM\",\n", + " \"LWTUP\",\n", + " \"PRECTOT\",\n", + " \"PS\",\n", + " \"QV2M\",\n", + " \"SLP\",\n", + " \"SWGNT\",\n", + " \"SWTNT\",\n", + " \"T2M\",\n", + " \"TQI\",\n", + " \"TQL\",\n", + " \"TQV\",\n", + " \"TS\",\n", + " \"U10M\",\n", + " \"V10M\",\n", + " \"Z0M\",\n", + " ]\n", + " valid_static_surface_vars = [\"FRACI\", \"FRLAND\", \"FROCEAN\", \"PHIS\"]\n", + "\n", + " valid_levels = [\n", + " 34.0,\n", + " 39.0,\n", + " 41.0,\n", + " 43.0,\n", + " 44.0,\n", + " 45.0,\n", + " 48.0,\n", + " 51.0,\n", + " 53.0,\n", + " 56.0,\n", + " 63.0,\n", + " 68.0,\n", + " 71.0,\n", + " 72.0,\n", + " ]\n", + "\n", + " timedelta_input = pd.to_timedelta(3, unit=\"h\")\n", + "\n", + " def __init__(\n", + " self,\n", + " time_range: tuple[str | pd.Timestamp, str | pd.Timestamp],\n", + " lead_times: list[int],\n", + " input_times: list[int],\n", + " data_path_surface: str | Path,\n", + " data_path_vertical: str | Path,\n", + " climatology_path_surface: str | Path | None = None,\n", + " climatology_path_vertical: str | Path | None = None,\n", + " surface_vars: list[str] | None = None,\n", + " static_surface_vars: list[str] | None = None,\n", + " vertical_vars: list[str] | None = None,\n", + " levels: list[float] | None = None,\n", + " roll_longitudes: int = 0,\n", + " positional_encoding: str = \"absolute\",\n", + " rtype: type = np.float32,\n", + " dtype: torch.dtype = torch.float32,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " data_path_surface: Location of surface data.\n", + " data_path_vertical: Location of vertical data.\n", + " climatology_path_surface: Location of (optional) surface\n", + " climatology.\n", + " climatology_path_vertical: Location of (optional) vertical\n", + " climatology.\n", + " surface_vars: Surface variables.\n", + " static_surface_vars: Static surface variables.\n", + " vertical_vars: Vertical variables.\n", + " levels: Levels.\n", + " time_range: Used to subset data.\n", + " lead_times: Lead times for generalized forecasting.\n", + " roll_longitudes: Set to non-zero value to data by random amount\n", + " along longitude dimension.\n", + " position_encoding: possible values are\n", + " ['absolute' (default), 'fourier'].\n", + " 'absolute' returns lat lon encoded in 3 dimensions using sine\n", + " and cosine\n", + " 'fourier' returns lat/lon to be encoded by model\n", + " returns lat/lon to be encoded by model\n", + " rtype: numpy data type used during read\n", + " dtype: torch data type of data output\n", + " \"\"\"\n", + "\n", + " self.time_range = (\n", + " pd.to_datetime(time_range[0]),\n", + " pd.to_datetime(time_range[1]),\n", + " )\n", + " self.lead_times = lead_times\n", + " self.input_times = input_times\n", + " self._roll_longitudes = list(range(roll_longitudes + 1))\n", + "\n", + " self._uvars = vertical_vars or self.valid_vertical_vars\n", + " self._level = levels or self.valid_levels\n", + " self._svars = surface_vars or self.valid_surface_vars\n", + " self._sstat = static_surface_vars or self.valid_static_surface_vars\n", + " self._nuvars = len(self._uvars)\n", + " self._nlevel = len(self._level)\n", + " self._nsvars = len(self._svars)\n", + " self._nsstat = len(self._sstat)\n", + "\n", + " self.rtype = rtype\n", + " self.dtype = dtype\n", + "\n", + " self.positional_encoding = positional_encoding\n", + "\n", + " self._data_path_surface = Path(data_path_surface)\n", + " self._data_path_vertical = Path(data_path_vertical)\n", + "\n", + " self.dir_exists(self._data_path_surface)\n", + " self.dir_exists(self._data_path_vertical)\n", + "\n", + " self._get_coordinates()\n", + "\n", + " self._climatology_path_surface = Path(climatology_path_surface) or None\n", + " self._climatology_path_vertical = (\n", + " Path(climatology_path_vertical) or None\n", + " )\n", + " self._require_clim = (\n", + " self._climatology_path_surface is not None\n", + " and self._climatology_path_vertical is not None\n", + " )\n", + "\n", + " if self._require_clim:\n", + " self.dir_exists(self._climatology_path_surface)\n", + " self.dir_exists(self._climatology_path_vertical)\n", + " elif (\n", + " climatology_path_surface is None\n", + " and climatology_path_vertical is None\n", + " ):\n", + " self._climatology_path_surface = None\n", + " self._climatology_path_vertical = None\n", + " else:\n", + " raise ValueError(\n", + " \"Either both or neither of\"\n", + " \"`climatology_path_surface` and\"\n", + " \"`climatology_path_vertical` should be None.\"\n", + " )\n", + "\n", + " if not set(self._svars).issubset(set(self.valid_surface_vars)):\n", + " raise ValueError(\"Invalid surface variable.\")\n", + "\n", + " if not set(self._sstat).issubset(set(self.valid_static_surface_vars)):\n", + " raise ValueError(\"Invalid static surface variable.\")\n", + "\n", + " if not set(self._uvars).issubset(set(self.valid_vertical_vars)):\n", + " raise ValueError(\"Inalid vertical variable.\")\n", + "\n", + " if not set(self._level).issubset(set(self.valid_levels)):\n", + " raise ValueError(\"Invalid level.\")\n", + "\n", + " @staticmethod\n", + " def dir_exists(path: Path) -> None:\n", + " if not path.is_dir():\n", + " raise ValueError(f\"Directory {path} does not exist.\")\n", + "\n", + " @property\n", + " def upper_shape(self) -> tuple:\n", + " \"\"\"Returns the vertical variables shape\n", + " Returns:\n", + " tuple: vertical variable shape in the following order::\n", + "\n", + " [VAR, LEV, TIME, LAT, LON]\n", + " \"\"\"\n", + " return self._nuvars, self._nlevel, 2, 361, 576\n", + "\n", + " @property\n", + " def surface_shape(self) -> tuple:\n", + " \"\"\"Returns the surface variables shape\n", + "\n", + " Returns:\n", + " tuple: surafce shape in the following order::\n", + "\n", + " [VAR, LEV, TIME, LAT, LON]\n", + " \"\"\"\n", + " return self._nsvars, 2, 361, 576\n", + "\n", + " def data_file_surface(self, timestamp: pd.Timestamp) -> Path:\n", + " \"\"\"Build the surfcae data file name based on timestamp\n", + "\n", + " Args:\n", + " timestamp: a timestamp\n", + "\n", + " Returns:\n", + " Path: constructed path\n", + " \"\"\"\n", + " pattern = \"MERRA2_sfc_%Y%m%d.nc\"\n", + " data_file = self._data_path_surface / timestamp.strftime(pattern)\n", + " return data_file\n", + "\n", + " def data_file_vertical(self, timestamp: pd.Timestamp) -> Path:\n", + " \"\"\"Build the vertical data file name based on timestamp\n", + "\n", + " Args:\n", + " timestamp: a timestamp\n", + "\n", + " Returns:\n", + " Path: constructed path\n", + " \"\"\"\n", + " pattern = \"MERRA_pres_%Y%m%d.nc\"\n", + " data_file = self._data_path_vertical / timestamp.strftime(pattern)\n", + " return data_file\n", + "\n", + " def data_file_surface_climate(\n", + " self,\n", + " timestamp: pd.Timestamp | None = None,\n", + " dayofyear: int | None = None,\n", + " hourofday: int | None = None,\n", + " ) -> Path:\n", + " \"\"\"\n", + " Returns the path to a climatology file based either on a timestamp or\n", + " the dayofyear / hourofday combination.\n", + " Args:\n", + " timestamp: A timestamp.\n", + " dayofyear: Day of the year. 1 to 366.\n", + " hourofday: Hour of the day. 0 to 23.\n", + " Returns:\n", + " Path: Path to climatology file.\n", + " \"\"\"\n", + " if timestamp is not None and (\n", + " (dayofyear is not None) or (hourofday is not None)\n", + " ):\n", + " raise ValueError(\n", + " \"Provide either timestamp or both dayofyear and hourofday.\"\n", + " )\n", + "\n", + " if timestamp is not None:\n", + " dayofyear = min(timestamp.dayofyear, 365)\n", + " hourofday = timestamp.hour\n", + "\n", + " file_name = f\"climate_surface_doy{dayofyear:03}_hour{hourofday:02}.nc\"\n", + " data_file = self._climatology_path_surface / file_name\n", + " return data_file\n", + "\n", + " def data_file_vertical_climate(\n", + " self,\n", + " timestamp: pd.Timestamp | None = None,\n", + " dayofyear: int | None = None,\n", + " hourofday: int | None = None,\n", + " ) -> Path:\n", + " \"\"\"Returns the path to a climatology file based either on a timestamp\n", + " or the dayofyear / hourofday combination.\n", + "\n", + " Args:\n", + " timestamp: A timestamp. dayofyear: Day of the year. 1 to 366.\n", + " hourofday: Hour of the day. 0 to 23.\n", + " Returns:\n", + " Path: Path to climatology file.\n", + " \"\"\"\n", + " if timestamp is not None and (\n", + " (dayofyear is not None) or (hourofday is not None)\n", + " ):\n", + " raise ValueError(\n", + " \"Provide either timestamp or both dayofyear and hourofday.\"\n", + " )\n", + "\n", + " if timestamp is not None:\n", + " dayofyear = min(timestamp.dayofyear, 365)\n", + " hourofday = timestamp.hour\n", + "\n", + " file_name = f\"climate_vertical_doy{dayofyear:03}_hour{hourofday:02}.nc\"\n", + " data_file = self._climatology_path_vertical / file_name\n", + " return data_file\n", + "\n", + " def _get_coordinates(self) -> None:\n", + " \"\"\"\n", + " Obtains the coordiantes (latitudes and longitudes) from a single data\n", + " file.\n", + " \"\"\"\n", + " timestamp = next(iter(self.valid_timestamps))\n", + "\n", + " file = self.data_file_surface(timestamp)\n", + " with h5py.File(file, \"r\", libver=\"latest\") as handle:\n", + " self.lats = lats = handle[\"lat\"][()].astype(self.rtype)\n", + " self.lons = lons = handle[\"lon\"][()].astype(self.rtype)\n", + "\n", + " deg_to_rad = np.pi / 180\n", + " self._embed_lat = np.sin(lats * deg_to_rad).reshape(-1, 1)\n", + "\n", + " self._embed_lon = np.empty((2, 1, len(lons)), dtype=self.rtype)\n", + " self._embed_lon[0, 0] = np.cos(lons * deg_to_rad)\n", + " self._embed_lon[1, 0] = np.sin(lons * deg_to_rad)\n", + "\n", + " @ft.cached_property\n", + " def lats(self) -> np.ndarray:\n", + " timestamp = next(iter(self.valid_timestamps))\n", + "\n", + " file = self.data_file_surface(timestamp)\n", + " with h5py.File(file, \"r\", libver=\"latest\") as handle:\n", + " return handle[\"lat\"][()].astype(self.rtype)\n", + "\n", + " @ft.cached_property\n", + " def lons(self) -> np.ndarray:\n", + " timestamp = next(iter(self.valid_timestamps))\n", + "\n", + " file = self.data_file_surface(timestamp)\n", + " with h5py.File(file, \"r\", libver=\"latest\") as handle:\n", + " return handle[\"lon\"][()].astype(self.rtype)\n", + "\n", + " @ft.cached_property\n", + " def position_signal(self) -> np.ndarray:\n", + " \"\"\"Generates the \"position signal\" that is part of the static\n", + " features.\n", + "\n", + " Returns:\n", + " Tensor: Torch tensor of dimension (parameter, lat, lon) containing\n", + " sin(lat), cos(lon), sin(lon).\n", + " \"\"\"\n", + "\n", + " latitudes, longitudes = np.meshgrid(\n", + " self.lats, self.lons, indexing=\"ij\"\n", + " )\n", + "\n", + " if self.positional_encoding == \"absolute\":\n", + " latitudes = latitudes / 360 * 2.0 * np.pi\n", + " longitudes = longitudes / 360 * 2.0 * np.pi\n", + " sur_static = np.stack(\n", + " [np.sin(latitudes), np.cos(longitudes), np.sin(longitudes)],\n", + " axis=0,\n", + " )\n", + " else:\n", + " sur_static = np.stack([latitudes, longitudes], axis=0)\n", + "\n", + " sur_static = sur_static.astype(self.rtype)\n", + "\n", + " return sur_static\n", + "\n", + " @ft.cached_property\n", + " def valid_timestamps(self) -> set[pd.Timestamp]:\n", + " \"\"\"Generates list of valid timestamps based on available files. Only\n", + " timestamps for which both surface and vertical information is available\n", + " are considered valid.\n", + " Returns:\n", + " list: list of timestamps\n", + " \"\"\"\n", + "\n", + " s_glob = self._data_path_surface.glob(\"MERRA2_sfc_????????.nc\")\n", + " s_files = [os.path.basename(f) for f in s_glob]\n", + " v_glob = self._data_path_surface.glob(\"MERRA_pres_????????.nc\")\n", + " v_files = [os.path.basename(f) for f in v_glob]\n", + "\n", + " s_re = re.compile(r\"MERRA2_sfc_(\\d{8}).nc\\Z\")\n", + " v_re = re.compile(r\"MERRA_pres_(\\d{8}).nc\\Z\")\n", + " fmt = \"%Y%m%d\"\n", + "\n", + " s_times = {\n", + " (datetime.strptime(m[1], fmt))\n", + " for f in s_files\n", + " if (m := s_re.match(f))\n", + " }\n", + " v_times = {\n", + " (datetime.strptime(m[1], fmt))\n", + " for f in v_files\n", + " if (m := v_re.match(f))\n", + " }\n", + "\n", + " times = s_times.intersection(v_times)\n", + "\n", + " # Each file contains a day at 3 hour intervals\n", + " times = {\n", + " t + timedelta(hours=i) for i in range(0, 24, 3) for t in times\n", + " }\n", + "\n", + " start_time, end_time = self.time_range\n", + " times = {pd.Timestamp(t) for t in times if start_time <= t <= end_time}\n", + "\n", + " return times\n", + "\n", + " @ft.cached_property\n", + " def valid_climate_timestamps(self) -> set[tuple[int, int]]:\n", + " \"\"\"Generates list of \"timestamps\" (dayofyear, hourofday) for which\n", + " climatology data is present. Only instances for which surface and\n", + " vertical data is available are considered valid.\n", + " Returns:\n", + " list: List of tuples describing valid climatology instances.\n", + " \"\"\"\n", + " if not self._require_clim:\n", + " return set()\n", + "\n", + " s_glob = self._climatology_path_surface.glob(\n", + " \"climate_surface_doy???_hour??.nc\"\n", + " )\n", + " s_files = [os.path.basename(f) for f in s_glob]\n", + "\n", + " v_glob = self._climatology_path_vertical.glob(\n", + " \"climate_vertical_doy???_hour??.nc\"\n", + " )\n", + " v_files = [os.path.basename(f) for f in v_glob]\n", + "\n", + " s_re = re.compile(r\"climate_surface_doy(\\d{3})_hour(\\d{2}).nc\\Z\")\n", + " v_re = re.compile(r\"climate_vertical_doy(\\d{3})_hour(\\d{2}).nc\\Z\")\n", + "\n", + " s_times = {\n", + " (int(m[1]), int(m[2])) for f in s_files if (m := s_re.match(f))\n", + " }\n", + " v_times = {\n", + " (int(m[1]), int(m[2])) for f in v_files if (m := v_re.match(f))\n", + " }\n", + "\n", + " times = s_times.intersection(v_times)\n", + "\n", + " return times\n", + "\n", + " def _data_available(self, spec: SampleSpec) -> bool:\n", + " \"\"\"\n", + " Checks whether data is available for a given SampleSpec object. Does so\n", + " using the internal sets with available data previously constructed. Not\n", + " by checking the file system.\n", + " Args:\n", + " spec: SampleSpec object as returned by SampleSpec.get\n", + " Returns:\n", + " bool: if data is availability.\n", + " \"\"\"\n", + " valid = set(spec.times).issubset(self.valid_timestamps)\n", + "\n", + " if self._require_clim:\n", + " sci = spec.climatology_info\n", + " ci = set(sci) if isinstance(sci, list) else set([sci]) # noqa: C405\n", + " valid &= ci.issubset(self.valid_climate_timestamps)\n", + "\n", + " return valid\n", + "\n", + " @ft.cached_property\n", + " def samples(self) -> list[tuple[pd.Timestamp, int, int]]:\n", + " \"\"\"\n", + " Generates list of all valid samlpes.\n", + " Returns:\n", + " list: List of tuples (timestamp, input time, lead time).\n", + " \"\"\"\n", + " valid_samples = []\n", + " dts = [(it, lt) for it in self.input_times for lt in self.lead_times]\n", + "\n", + " for timestamp in sorted(self.valid_timestamps):\n", + " timestamp_samples = []\n", + " for it, lt in dts:\n", + " spec = SampleSpec.get(timestamp, -it, lt)\n", + "\n", + " if self._data_available(spec):\n", + " timestamp_samples.append((timestamp, it, lt))\n", + "\n", + " if timestamp_samples:\n", + " valid_samples.append(timestamp_samples)\n", + "\n", + " return valid_samples\n", + "\n", + " def _to_torch(\n", + " self,\n", + " data: dict[str, Tensor | list[Tensor]],\n", + " dtype: torch.dtype = torch.float32,\n", + " ) -> dict[str, Tensor | list[Tensor]]:\n", + " out = {}\n", + " for k, v in data.items():\n", + " if isinstance(v, list):\n", + " out[k] = [torch.from_numpy(x).to(dtype) for x in v]\n", + " else:\n", + " out[k] = torch.from_numpy(v).to(dtype)\n", + "\n", + " return out\n", + "\n", + " def _lat_roll(\n", + " self, data: dict[str, Tensor | list[Tensor]], n: int\n", + " ) -> dict[str, Tensor | list[Tensor]]:\n", + " out = {}\n", + " for k, v in data.items():\n", + " if isinstance(v, list):\n", + " out[k] = [torch.roll(x, shifts=n, dims=-1) for x in v]\n", + " else:\n", + " out[k] = torch.roll(v, shifts=n, dims=-1)\n", + "\n", + " return out\n", + "\n", + " def _read_static_data(\n", + " self, file: str | Path, doy: int, hod: int\n", + " ) -> np.ndarray:\n", + " with h5py.File(file, \"r\", libver=\"latest\") as handle:\n", + " lats_surf = handle[\"lat\"]\n", + " lons_surf = handle[\"lon\"]\n", + "\n", + " nll = (len(lats_surf), len(lons_surf))\n", + "\n", + " npos = len(self.position_signal)\n", + " ntime = 4\n", + "\n", + " nstat = npos + ntime + self._nsstat\n", + " data = np.empty((nstat, *nll), dtype=self.rtype)\n", + "\n", + " for i, key in enumerate(self._sstat, start=npos + ntime):\n", + " data[i] = handle[key][()].astype(dtype=self.rtype)\n", + "\n", + " # [possition signal], cos(doy), sin(doy), cos(hod), sin(hod)\n", + " data[0:npos] = self.position_signal\n", + " data[npos + 0] = np.cos(2 * np.pi * doy / 366)\n", + " data[npos + 1] = np.sin(2 * np.pi * doy / 366)\n", + " data[npos + 2] = np.cos(2 * np.pi * hod / 24)\n", + " data[npos + 3] = np.sin(2 * np.pi * hod / 24)\n", + "\n", + " return data\n", + "\n", + " def _read_surface(\n", + " self, tidx: int, nll: tuple[int, int], handle: h5py.File\n", + " ) -> np.ndarray:\n", + " data = np.empty((self._nsvars, *nll), dtype=self.rtype)\n", + "\n", + " for i, key in enumerate(self._svars):\n", + " data[i] = handle[key][tidx][()].astype(dtype=self.rtype)\n", + "\n", + " return data\n", + "\n", + " def _read_levels(\n", + " self, tidx: int, nll: tuple[int, int], handle: h5py.File\n", + " ) -> np.ndarray:\n", + " lvls = handle[\"lev\"][()]\n", + " lidx = self._level_idxs(lvls)\n", + "\n", + " data = np.empty((self._nuvars, self._nlevel, *nll), dtype=self.rtype)\n", + "\n", + " for i, key in enumerate(self._uvars):\n", + " data[i] = handle[key][tidx, lidx][()].astype(dtype=self.rtype)\n", + "\n", + " return np.ascontiguousarray(np.flip(data, axis=1))\n", + "\n", + " def _level_idxs(self, lvls):\n", + " lidx = [np.argwhere(lvls == int(lvl)).item() for lvl in self._level]\n", + " return sorted(lidx)\n", + "\n", + " @staticmethod\n", + " def _date_to_tidx(date: datetime | pd.Timestamp, handle: h5py.File) -> int:\n", + " if isinstance(date, pd.Timestamp):\n", + " date = date.to_pydatetime()\n", + "\n", + " time = handle[\"time\"]\n", + "\n", + " t0 = time.attrs[\"begin_time\"][()].item()\n", + " d0 = f\"{time.attrs['begin_date'][()].item()}\"\n", + "\n", + " offset = datetime.strptime(d0, \"%Y%m%d\")\n", + "\n", + " times = [offset + timedelta(minutes=int(t + t0)) for t in time[()]]\n", + " return times.index(date)\n", + "\n", + " def _read_data(\n", + " self, file_pair: tuple[str, str], date: datetime\n", + " ) -> dict[str, np.ndarray]:\n", + " s_file, v_file = file_pair\n", + "\n", + " with h5py.File(s_file, \"r\", libver=\"latest\") as shandle:\n", + " lats_surf = shandle[\"lat\"]\n", + " lons_surf = shandle[\"lon\"]\n", + "\n", + " nll = (len(lats_surf), len(lons_surf))\n", + "\n", + " tidx = self._date_to_tidx(date, shandle)\n", + "\n", + " sdata = self._read_surface(tidx, nll, shandle)\n", + "\n", + " with h5py.File(v_file, \"r\", libver=\"latest\") as vhandle:\n", + " lats_vert = vhandle[\"lat\"]\n", + " lons_vert = vhandle[\"lon\"]\n", + "\n", + " nll = (len(lats_vert), len(lons_vert))\n", + "\n", + " tidx = self._date_to_tidx(date, vhandle)\n", + "\n", + " vdata = self._read_levels(tidx, nll, vhandle)\n", + "\n", + " data = {\"vert\": vdata, \"surf\": sdata}\n", + "\n", + " return data\n", + "\n", + " def _read_climate(\n", + " self, file_pair: tuple[str, str]\n", + " ) -> dict[str, np.ndarray]:\n", + " s_file, v_file = file_pair\n", + "\n", + " with h5py.File(s_file, \"r\", libver=\"latest\") as shandle:\n", + " lats_surf = shandle[\"lat\"]\n", + " lons_surf = shandle[\"lon\"]\n", + "\n", + " nll = (len(lats_surf), len(lons_surf))\n", + "\n", + " sdata = np.empty((self._nsvars, *nll), dtype=self.rtype)\n", + "\n", + " for i, key in enumerate(self._svars):\n", + " sdata[i] = shandle[key][()].astype(dtype=self.rtype)\n", + "\n", + " with h5py.File(v_file, \"r\", libver=\"latest\") as vhandle:\n", + " lats_vert = vhandle[\"lat\"]\n", + " lons_vert = vhandle[\"lon\"]\n", + "\n", + " nll = (len(lats_vert), len(lons_vert))\n", + "\n", + " lvls = vhandle[\"lev\"][()]\n", + " lidx = self._level_idxs(lvls)\n", + "\n", + " vdata = np.empty(\n", + " (self._nuvars, self._nlevel, *nll), dtype=self.rtype\n", + " )\n", + "\n", + " for i, key in enumerate(self._uvars):\n", + " vdata[i] = vhandle[key][lidx][()].astype(dtype=self.rtype)\n", + "\n", + " data = {\n", + " \"vert\": np.ascontiguousarray(np.flip(vdata, axis=1)),\n", + " \"surf\": sdata,\n", + " }\n", + "\n", + " return data\n", + "\n", + " def get_data_from_sample_spec(\n", + " self, spec: SampleSpec\n", + " ) -> dict[str, Tensor | int | float]:\n", + " \"\"\"Loads and assembles sample data given a SampleSpec object.\n", + "\n", + " Args:\n", + " spec (SampleSpec): Full details regarding the data to be loaded\n", + " Returns:\n", + " dict: Dictionary with the following keys::\n", + "\n", + " 'sur_static': Torch tensor of shape [parameter, lat, lon]. For\n", + " each pixel (lat, lon), the first 7 dimensions index sin(lat),\n", + " cos(lon), sin(lon), cos(doy), sin(doy), cos(hod), sin(hod).\n", + " Where doy is the day of the year [1, 366] and hod the hour of\n", + " the day [0, 23].\n", + " 'sur_vals': Torch tensor of shape [parameter, time, lat, lon].\n", + " 'sur_tars': Torch tensor of shape [parameter, time, lat, lon].\n", + " 'ulv_vals': Torch tensor of shape [parameter, level, time, lat, lon].\n", + " 'ulv_tars': Torch tensor of shape [parameter, level, time, lat, lon].\n", + " 'sur_climate': Torch tensor of shape [parameter, lat, lon].\n", + " 'ulv_climate': Torch tensor of shape [paramter, level, lat, lon].\n", + " 'lead_time': Float.\n", + " 'input_time': Float.\n", + "\n", + " \"\"\" # noqa: E501\n", + "\n", + " # We assemble the unique timestamps for which we need data.\n", + " vals_required = {*spec.times}\n", + " stat_required = {*spec.stat_times}\n", + "\n", + " # We assemble the unique data files from which we need value data\n", + " vals_file_map = defaultdict(list)\n", + " for t in vals_required:\n", + " data_files = (\n", + " self.data_file_surface(t),\n", + " self.data_file_vertical(t),\n", + " )\n", + " vals_file_map[data_files].append(t)\n", + "\n", + " # We assemble the unique data files from which we need static data\n", + " stat_file_map = defaultdict(list)\n", + " for t in stat_required:\n", + " data_files = (\n", + " self.data_file_surface(t),\n", + " self.data_file_vertical(t),\n", + " )\n", + " stat_file_map[data_files].append(t)\n", + "\n", + " # Load the value data\n", + " data = {}\n", + " for data_files, times in vals_file_map.items():\n", + " for time in times:\n", + " data[time] = self._read_data(data_files, time)\n", + "\n", + " # Combine times\n", + " sample_data = {}\n", + "\n", + " input_upl = np.stack([data[t][\"vert\"] for t in spec.inputs], axis=2)\n", + " sample_data[\"ulv_vals\"] = input_upl\n", + "\n", + " target_upl = data[spec.target][\"vert\"]\n", + " sample_data[\"ulv_tars\"] = target_upl[:, :, None]\n", + "\n", + " input_sur = np.stack([data[t][\"surf\"] for t in spec.inputs], axis=1)\n", + " sample_data[\"sur_vals\"] = input_sur\n", + "\n", + " target_sur = data[spec.target][\"surf\"]\n", + " sample_data[\"sur_tars\"] = target_sur[:, None]\n", + "\n", + " # Load the static data\n", + " data_files, times = stat_file_map.popitem()\n", + " time = times[0].dayofyear, times[0].hour\n", + " sample_data[\"sur_static\"] = self._read_static_data(\n", + " data_files[0], *time\n", + " )\n", + "\n", + " # If required load the surface data\n", + " if self._require_clim:\n", + " ci_year, ci_hour = spec.climatology_info\n", + "\n", + " surf_file = self.data_file_surface_climate(\n", + " dayofyear=ci_year,\n", + " hourofday=ci_hour,\n", + " )\n", + "\n", + " vert_file = self.data_file_vertical_climate(\n", + " dayofyear=ci_year,\n", + " hourofday=ci_hour,\n", + " )\n", + "\n", + " clim_data = self._read_climate((surf_file, vert_file))\n", + "\n", + " sample_data[\"sur_climate\"] = clim_data[\"surf\"]\n", + " sample_data[\"ulv_climate\"] = clim_data[\"vert\"]\n", + "\n", + " # Move the data from numpy to torch\n", + " sample_data = self._to_torch(sample_data, dtype=self.dtype)\n", + "\n", + " # Optionally roll\n", + " if len(self._roll_longitudes) > 0:\n", + " roll_by = random.choice(self._roll_longitudes)\n", + " sample_data = self._lat_roll(sample_data, roll_by)\n", + "\n", + " # Now that we have rolled, we can add the static data\n", + " sample_data[\"lead_time\"] = spec.lead_time\n", + " sample_data[\"input_time\"] = spec.input_time\n", + "\n", + " return sample_data\n", + "\n", + " def get_data(\n", + " self, timestamp: pd.Timestamp, input_time: int, lead_time: int\n", + " ) -> dict[str, Tensor | int]:\n", + " \"\"\"\n", + " Loads data based on timestamp and lead time.\n", + " Args:\n", + " timestamp: Timestamp.\n", + " input_time: time between input samples.\n", + " lead_time: lead time.\n", + " Returns:\n", + " Dictionary with keys 'sur_static', 'sur_vals', 'sur_tars',\n", + " 'ulv_vals', 'ulv_tars', 'sur_climate', 'ulv_climate',\n", + " 'lead_time'.\n", + " \"\"\"\n", + " spec = SampleSpec.get(timestamp, -input_time, lead_time)\n", + " sample_data = self.get_data_from_sample_spec(spec)\n", + " return sample_data\n", + "\n", + " def __getitem__(self, idx: int) -> dict[str, Tensor | int]:\n", + " \"\"\"\n", + " Loads data based on sample index and random choice of sample.\n", + " Args:\n", + " idx: Sample index.\n", + " Returns:\n", + " Dictionary with keys 'sur_static', 'sur_vals', 'sur_tars',\n", + " 'ulv_vals', 'ulv_tars', 'sur_climate', 'ulv_climate',\n", + " 'lead_time', 'input_time'.\n", + " \"\"\"\n", + " sample_set = self.samples[idx]\n", + " timestamp, input_time, lead_time, *nsteps = random.choice(sample_set)\n", + " sample_data = self.get_data(timestamp, input_time, lead_time)\n", + " return sample_data\n", + "\n", + " def __len__(self):\n", + " return len(self.samples)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "import functools as ft\n", + "import random\n", + "from collections import defaultdict\n", + "from copy import deepcopy\n", + "from pathlib import Path\n", + "\n", + "import numpy as np\n", + "import pandas as pd\n", + "import torch\n", + "from torch import Tensor\n", + "\n", + "# from PrithviWxC.dataloaders.merra2 import Merra2Dataset, SampleSpec\n", + "\n", + "\n", + "def preproc(\n", + " batch: list[dict[str, int | float | Tensor]], padding: dict[tuple[int]]\n", + ") -> dict[str, Tensor]:\n", + " \"\"\"Prepressing function for MERRA2 Dataset\n", + "\n", + " Args:\n", + " batch (dict): List of training samples, each sample should be a\n", + " dictionary with the following keys::\n", + "\n", + " 'sur_static': Numpy array of shape (3, lat, lon). For each pixel (lat, lon), the first dimension indexes sin(lat), cos(lon), sin(lon).\n", + " 'sur_vals': Torch tensor of shape (parameter, time, lat, lon).\n", + " 'sur_tars': Torch tensor of shape (parameter, time, lat, lon).\n", + " 'ulv_vals': Torch tensor of shape (parameter, level, time, lat, lon).\n", + " 'ulv_tars': Torch tensor of shape (parameter, level, time, lat, lon).\n", + " 'sur_climate': Torch tensor of shape (nstep, parameter, lat, lon)\n", + " 'ulv_climate': Torch tensor of shape (nstep parameter, level, lat, lon)\n", + " 'lead_time': Integer.\n", + " 'input_time': Interger\n", + "\n", + " padding: Dictionary with keys 'level', 'lat', 'lon', each of dim 2.\n", + "\n", + " Returns:\n", + " Dictionary with the following keys::\n", + "\n", + " 'x': [batch, time, parameter, lat, lon]\n", + " 'ys': [batch, nsteps, parameter, lat, lon]\n", + " 'static': [batch, nstep, parameter, lat, lon]\n", + " 'lead_time': [batch]\n", + " 'input_time': [batch]\n", + " 'climate (Optional)': [batch, nsteps, parameter, lat, lon]\n", + "\n", + " Note:\n", + " Here, for x and ys, 'parameter' is [surface parameter, upper level,\n", + " parameter x level]. Similarly for the static information we have\n", + " [sin(lat), cos(lon), sin(lon), cos(doy), sin(doy), cos(hod), sin(hod),\n", + " ...].\n", + " \"\"\" # noqa: E501\n", + "\n", + " b0 = batch[0]\n", + " nbatch = len(batch)\n", + " data_keys = set(b0.keys())\n", + "\n", + " essential_keys = {\n", + " \"sur_static\",\n", + " \"sur_vals\",\n", + " \"sur_tars\",\n", + " \"ulv_vals\",\n", + " \"ulv_tars\",\n", + " \"input_time\",\n", + " \"lead_time\",\n", + " }\n", + "\n", + " climate_keys = {\n", + " \"sur_climate\",\n", + " \"ulv_climate\",\n", + " }\n", + "\n", + " all_keys = essential_keys | climate_keys\n", + "\n", + " if not essential_keys.issubset(data_keys):\n", + " raise ValueError(\"Missing essential keys.\")\n", + "\n", + " if not data_keys.issubset(all_keys):\n", + " raise ValueError(\"Unexpected keys in batch.\")\n", + "\n", + " # Bring all tensors from the batch into a single tensor\n", + " upl_x = torch.empty((nbatch, *b0[\"ulv_vals\"].shape))\n", + " upl_y = torch.empty((nbatch, *b0[\"ulv_tars\"].shape))\n", + "\n", + " sur_x = torch.empty((nbatch, *b0[\"sur_vals\"].shape))\n", + " sur_y = torch.empty((nbatch, *b0[\"sur_tars\"].shape))\n", + "\n", + " sur_sta = torch.empty((nbatch, *b0[\"sur_static\"].shape))\n", + "\n", + " lead_time = torch.empty(\n", + " (nbatch, *b0[\"lead_time\"].shape),\n", + " dtype=torch.float32,\n", + " )\n", + " input_time = torch.empty((nbatch,), dtype=torch.float32)\n", + "\n", + " for i, rec in enumerate(batch):\n", + " sur_x[i] = torch.Tensor(rec[\"sur_vals\"])\n", + " sur_y[i] = torch.Tensor(rec[\"sur_tars\"])\n", + "\n", + " upl_x[i] = torch.Tensor(rec[\"ulv_vals\"])\n", + " upl_y[i] = torch.Tensor(rec[\"ulv_tars\"])\n", + "\n", + " sur_sta[i] = torch.Tensor(rec[\"sur_static\"])\n", + "\n", + " lead_time[i] = rec[\"lead_time\"]\n", + " input_time[i] = rec[\"input_time\"]\n", + "\n", + " return_value = {\n", + " \"lead_time\": lead_time,\n", + " \"input_time\": input_time,\n", + " \"target_time\": torch.sum(lead_time).reshape(-1),\n", + " }\n", + "\n", + " # Reshape (batch, parameter, level, time, lat, lon)\n", + " # -> (batch, time, parameter, level, lat, lon)\n", + " upl_x = upl_x.permute((0, 3, 1, 2, 4, 5))\n", + " upl_y = upl_y.permute((0, 3, 1, 2, 4, 5))\n", + "\n", + " # Reshape (batch, parameter, time, lat, lon)\n", + " # -> (batch, time, parameter, lat, lon)\n", + " sur_x = sur_x.permute((0, 2, 1, 3, 4))\n", + " sur_y = sur_y.permute((0, 2, 1, 3, 4))\n", + "\n", + " # Pad\n", + " padding_2d = (*padding[\"lon\"], *padding[\"lat\"])\n", + "\n", + " def pad2d(x):\n", + " return torch.nn.functional.pad(x, padding_2d, mode=\"constant\", value=0)\n", + "\n", + " padding_3d = (*padding[\"lon\"], *padding[\"lat\"], *padding[\"level\"])\n", + "\n", + " def pad3d(x):\n", + " return torch.nn.functional.pad(x, padding_3d, mode=\"constant\", value=0)\n", + "\n", + " sur_x = pad2d(sur_x).contiguous()\n", + " upl_x = pad3d(upl_x).contiguous()\n", + " sur_y = pad2d(sur_y).contiguous()\n", + " upl_y = pad3d(upl_y).contiguous()\n", + " return_value[\"statics\"] = pad2d(sur_sta).contiguous()\n", + "\n", + " # We stack along the combined parameter level dimension\n", + " return_value[\"x\"] = torch.cat(\n", + " (sur_x, upl_x.view(*upl_x.shape[:2], -1, *upl_x.shape[4:])), dim=2\n", + " )\n", + " return_value[\"ys\"] = torch.cat(\n", + " (sur_y, upl_y.view(*upl_y.shape[:2], -1, *upl_y.shape[4:])), dim=2\n", + " )\n", + "\n", + " if climate_keys.issubset(data_keys):\n", + " sur_climate = torch.empty((nbatch, *b0[\"sur_climate\"].shape))\n", + " ulv_climate = torch.empty((nbatch, *b0[\"ulv_climate\"].shape))\n", + " for i, rec in enumerate(batch):\n", + " sur_climate[i] = rec[\"sur_climate\"]\n", + " ulv_climate[i] = rec[\"ulv_climate\"]\n", + " sur_climate = pad2d(sur_climate)\n", + " ulv_climate = pad3d(ulv_climate)\n", + "\n", + " ulv_climate = ulv_climate.view(\n", + " *ulv_climate.shape[:2], -1, *ulv_climate.shape[4:]\n", + " )\n", + " return_value[\"climates\"] = torch.cat((sur_climate, ulv_climate), dim=2)\n", + "\n", + " return return_value\n", + "\n", + "\n", + "class RolloutSpec(SampleSpec):\n", + " \"\"\"\n", + " A data class to collect the information used to define a rollout sample.\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " inputs: tuple[pd.Timestamp, pd.Timestamp],\n", + " lead_time: int,\n", + " target: pd.Timestamp,\n", + " ):\n", + " \"\"\"\n", + " Args:\n", + " inputs: Tuple of timestamps. In ascending order.\n", + " lead_time: Lead time. In hours.\n", + " target: Timestamp of the target. Can be before or after the inputs.\n", + " \"\"\"\n", + " super().__init__(inputs, lead_time, target)\n", + "\n", + " self.dt = dt = pd.Timedelta(lead_time, unit=\"h\")\n", + " self.inters = list(pd.date_range(inputs[-1], target, freq=dt))\n", + "\n", + " self._ctimes = deepcopy(self.inters)\n", + " self.stat_times = deepcopy(self.inters)\n", + "\n", + " self.stat_times.pop(-1)\n", + " self._ctimes.pop(0)\n", + " self.inters.pop(0)\n", + " self.inters.pop(-1)\n", + "\n", + " self.times = [*inputs, *self.inters, target]\n", + " self.targets = self.times[2:]\n", + " self.nsteps = len(self.times) - 2\n", + "\n", + " @property\n", + " def climatology_info(self) -> dict[pd.Timestamp, tuple[int, int]]:\n", + " \"\"\"Returns information required to obtain climatology data.\n", + " Returns:\n", + " list: list containing required climatology info.\n", + " \"\"\"\n", + " return [(min(t.dayofyear, 365), t.hour) for t in self._ctimes]\n", + "\n", + " def _info_str(self) -> str:\n", + " iso_8601 = \"%Y-%m-%dT%H:%M:%S\"\n", + "\n", + " inter_str = \"\\n\".join(t.strftime(iso_8601) for t in self.inters)\n", + "\n", + " return (\n", + " f\"Issue time: {self.inputs[1].strftime(iso_8601)}\\n\"\n", + " f\"Lead time: {self.lead_time} hours ahead\\n\"\n", + " f\"Target time: {self.target.strftime(iso_8601)}\\n\"\n", + " f\"Intermediate times: {inter_str}\"\n", + " )\n", + "\n", + " @classmethod\n", + " def get(cls, timestamp: pd.Timestamp, lead_time: int, nsteps: int):\n", + " \"\"\"Given a timestamp and lead time, generates a RolloutSpec object\n", + " describing the sample further.\n", + "\n", + " Args:\n", + " timestamp: Timstamp (issue time) of the sample.\n", + " lead_time: Lead time. In hours.\n", + "\n", + " Returns:\n", + " SampleSpec object.\n", + " \"\"\"\n", + " if lead_time > 0:\n", + " dt = pd.to_timedelta(lead_time, unit=\"h\")\n", + " timestamp_target = timestamp + nsteps * dt\n", + " else:\n", + " raise ValueError(\"Rollout is only forwards\")\n", + "\n", + " spec = cls(\n", + " inputs=(timestamp - dt, timestamp),\n", + " lead_time=lead_time,\n", + " target=timestamp_target,\n", + " )\n", + "\n", + " return spec\n", + "\n", + " def __repr__(self) -> str:\n", + " return self._info_str()\n", + "\n", + " def __str__(self) -> str:\n", + " return self._info_str()\n", + "\n", + "\n", + "class Merra2RolloutDataset(Merra2Dataset):\n", + " \"\"\"Dataset class that read MERRA2 data for performing rollout.\n", + "\n", + " Implementation details::\n", + "\n", + " Samples stores the list of valid samples. This takes the form\n", + " ```\n", + " [\n", + " [(timestamp 1, -input_time, n_steps)],\n", + " [(timestamp 2, -input_time, n_steps)],\n", + " ]\n", + " ```\n", + " The nested list is for compatibility reasons with Merra2Dataset. Note\n", + " that input time and n_steps are always the same value. For some reason\n", + " the sign of input_time is the opposite to that in Merra2Dataset\n", + " \"\"\"\n", + "\n", + " input_time_len = 2\n", + "\n", + " def __init__(\n", + " self,\n", + " time_range: tuple[str | pd.Timestamp, str | pd.Timestamp],\n", + " input_time: int | float | pd.Timedelta,\n", + " lead_time: int | float,\n", + " data_path_surface: str | Path,\n", + " data_path_vertical: str | Path,\n", + " climatology_path_surface: str | Path | None,\n", + " climatology_path_vertical: str | Path | None,\n", + " surface_vars: list[str],\n", + " static_surface_vars: list[str],\n", + " vertical_vars: list[str],\n", + " levels: list[float],\n", + " roll_longitudes: int = 0,\n", + " positional_encoding: str = \"absolute\",\n", + " ):\n", + " \"\"\"\n", + " Args:\n", + " time_range: time range to consider when building dataset\n", + " input_time: requested time between inputs\n", + " lead_time: requested time to predict\n", + " data_path_surface: path of surface data directory\n", + " data_path_vertical: path of vertical data directory\n", + " climatology_path_surface: path of surface climatology data\n", + " directory\n", + " climatology_path_vertical: path of vertical climatology data\n", + " directory\n", + " surface_vars: surface variables to return\n", + " static_surface_vars: static surface variables to return\n", + " vertical_vars: vertical variables to return\n", + " levels: MERA2 vertical levels to consider\n", + " roll_longitudes: Whether and now uch to randomly roll latitudes by.\n", + " Defaults to 0.\n", + " positional_encoding: The type of possitional encodeing to use.\n", + " Defaults to \"absolute\".\n", + "\n", + " Raises:\n", + " ValueError: If lead time is not integer multiple of input time\n", + " \"\"\"\n", + "\n", + " self._target_lead = lead_time\n", + "\n", + " if isinstance(input_time, int) or isinstance(input_time, float):\n", + " self.timedelta_input = pd.to_timedelta(-input_time, unit=\"h\")\n", + " else:\n", + " self.timedelta_input = -input_time\n", + "\n", + " lead_times = [self.timedelta_input / pd.to_timedelta(1, unit=\"h\")]\n", + "\n", + " super().__init__(\n", + " time_range,\n", + " lead_times,\n", + " [input_time],\n", + " data_path_surface,\n", + " data_path_vertical,\n", + " climatology_path_surface,\n", + " climatology_path_vertical,\n", + " surface_vars,\n", + " static_surface_vars,\n", + " vertical_vars,\n", + " levels,\n", + " roll_longitudes,\n", + " positional_encoding,\n", + " )\n", + "\n", + " nstep_float = (\n", + " pd.to_timedelta(self._target_lead, unit=\"h\") / self.timedelta_input\n", + " )\n", + "\n", + " if abs(nstep_float % 1) > 1e-5:\n", + " raise ValueError(\"Leadtime not multiple of input time\")\n", + "\n", + " self.nsteps = round(nstep_float)\n", + "\n", + " @ft.cached_property\n", + " def samples(self) -> list[tuple[pd.Timestamp, int, int]]:\n", + " \"\"\"Generates list of all valid samlpes.\n", + "\n", + " Returns:\n", + " List of tuples (timestamp, input time, lead time).\n", + " \"\"\"\n", + " valid_samples = []\n", + "\n", + " for timestamp in sorted(self.valid_timestamps):\n", + " timestamp_samples = []\n", + " for lt in self.lead_times:\n", + " spec = RolloutSpec.get(timestamp, lt, self.nsteps)\n", + "\n", + " if self._data_available(spec):\n", + " timestamp_samples.append(\n", + " (timestamp, self.input_times[0], lt, self.nsteps)\n", + " )\n", + "\n", + " if timestamp_samples:\n", + " valid_samples.append(timestamp_samples)\n", + "\n", + " return valid_samples\n", + "\n", + " def get_data_from_rollout_spec(\n", + " self, spec: RolloutSpec\n", + " ) -> dict[str, Tensor | int | float]:\n", + " \"\"\"Loads and assembles sample data given a RolloutSpec object.\n", + "\n", + " Args:\n", + " spec (RolloutSpec): Full details regarding the data to be loaded\n", + " Returns:\n", + " dict: Dictionary with keys 'sur_static', 'sur_vals', 'sur_tars',\n", + " 'ulv_vals', 'ulv_tars', 'sur_climate', 'ulv_climate',c'lead_time',\n", + " 'input_time'. For each, the value is as follows::\n", + "\n", + " {\n", + " 'sur_static': Torch tensor of shape [parameter, lat, lon]. For\n", + " each pixel (lat, lon), the first 7 dimensions index sin(lat),\n", + " cos(lon), sin(lon), cos(doy), sin(doy), cos(hod), sin(hod).\n", + " Where doy is the day of the year [1, 366] and hod the hour of\n", + " the day [0, 23].\n", + " 'sur_vals': Torch tensor of shape [parameter, time, lat, lon].\n", + " 'sur_tars': Torch tensor of shape [parameter, time, lat, lon].\n", + " 'ulv_vals': Torch tensor of shape\n", + " [parameter, level, time, lat, lon].\n", + " 'ulv_tars': Torch tensor of shape\n", + " [nsteps, parameter, level, time, lat, lon].\n", + " 'sur_climate': Torch tensor of shape\n", + " [nsteps, parameter, lat, lon].\n", + " 'ulv_climate': Torch tensor of shape\n", + " [nsteps, paramter, level, lat, lon].\n", + " 'lead_time': Float.\n", + " 'input_time': Float.\n", + " }\n", + "\n", + " \"\"\"\n", + "\n", + " # We assemble the unique timestamps for which we need data.\n", + " vals_required = {*spec.times}\n", + " stat_required = {*spec.stat_times}\n", + "\n", + " # We assemble the unique data files from which we need value data\n", + " vals_file_map = defaultdict(list)\n", + " for t in vals_required:\n", + " data_files = (\n", + " self.data_file_surface(t),\n", + " self.data_file_vertical(t),\n", + " )\n", + " vals_file_map[data_files].append(t)\n", + "\n", + " # We assemble the unique data files from which we need static data\n", + " stat_file_map = defaultdict(list)\n", + " for t in stat_required:\n", + " data_files = (\n", + " self.data_file_surface(t),\n", + " self.data_file_vertical(t),\n", + " )\n", + " stat_file_map[data_files].append(t)\n", + "\n", + " # Load the value data\n", + " data = {}\n", + " for data_files, times in vals_file_map.items():\n", + " for time in times:\n", + " data[time] = self._read_data(data_files, time)\n", + "\n", + " # Load the static data\n", + " stat = {}\n", + " for data_files, times in stat_file_map.items():\n", + " for time in times:\n", + " hod, doy = time.hour, time.dayofyear\n", + " stat[time] = self._read_static_data(data_files[0], hod, doy)\n", + "\n", + " # Combine times\n", + " sample_data = {}\n", + "\n", + " input_upl = np.stack([data[t][\"vert\"] for t in spec.inputs], axis=2)\n", + " sample_data[\"ulv_vals\"] = input_upl\n", + "\n", + " target_upl = np.stack([data[t][\"vert\"] for t in spec.targets], axis=2)\n", + " sample_data[\"ulv_tars\"] = target_upl\n", + "\n", + " input_sur = np.stack([data[t][\"surf\"] for t in spec.inputs], axis=1)\n", + " sample_data[\"sur_vals\"] = input_sur\n", + "\n", + " target_sur = np.stack([data[t][\"surf\"] for t in spec.targets], axis=1)\n", + " sample_data[\"sur_tars\"] = target_sur\n", + "\n", + " # Load the static data\n", + " static = np.stack([stat[t] for t in spec.stat_times], axis=0)\n", + " sample_data[\"sur_static\"] = static\n", + "\n", + " # If required load the climate data\n", + " if self._require_clim:\n", + " clim_data = {}\n", + " for ci in spec.climatology_info:\n", + " ci_year, ci_hour = ci\n", + "\n", + " surf_file = self.data_file_surface_climate(\n", + " dayofyear=ci_year,\n", + " hourofday=ci_hour,\n", + " )\n", + "\n", + " vert_file = self.data_file_vertical_climate(\n", + " dayofyear=ci_year,\n", + " hourofday=ci_hour,\n", + " )\n", + "\n", + " clim_data[ci] = self._read_climate((surf_file, vert_file))\n", + "\n", + " clim_surf = [clim_data[ci][\"surf\"] for ci in spec.climatology_info]\n", + " sample_data[\"sur_climate\"] = np.stack(clim_surf, axis=0)\n", + "\n", + " clim_surf = [clim_data[ci][\"vert\"] for ci in spec.climatology_info]\n", + " sample_data[\"ulv_climate\"] = np.stack(clim_surf, axis=0)\n", + "\n", + " # Move the data from numpy to torch\n", + " sample_data = self._to_torch(sample_data, dtype=self.dtype)\n", + "\n", + " # Optionally roll\n", + " if len(self._roll_longitudes) > 0:\n", + " roll_by = random.choice(self._roll_longitudes)\n", + " sample_data = self._lat_roll(sample_data, roll_by)\n", + "\n", + " # Now that we have rolled, we can add the static data\n", + " lt = torch.tensor([spec.lead_time] * self.nsteps).to(self.dtype)\n", + " sample_data[\"lead_time\"] = lt\n", + " sample_data[\"input_time\"] = spec.input_time\n", + "\n", + " return sample_data\n", + "\n", + " def get_data(\n", + " self, timestamp: pd.Timestamp, *args, **kwargs\n", + " ) -> dict[Tensor | int]:\n", + " \"\"\"Loads data based on timestamp and lead time.\n", + "\n", + " Args:\n", + " timestamp: Timestamp.\n", + " Returns:\n", + " Dictionary with keys 'sur_static', 'sur_vals', 'sur_tars',\n", + " 'ulv_vals', 'ulv_tars', 'sur_climate', 'ulv_climate',\n", + " 'lead_time', 'input_time'\n", + " \"\"\"\n", + " rollout_spec = RolloutSpec.get(\n", + " timestamp, self.lead_times[0], self.nsteps\n", + " )\n", + " sample_data = self.get_data_from_rollout_spec(rollout_spec)\n", + " return sample_data\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "# from PrithviWxC.dataloaders.merra2_rollout import Merra2RolloutDataset\n", + "\n", + "dataset = Merra2RolloutDataset(\n", + " time_range=time_range,\n", + " lead_time=lead_time,\n", + " input_time=input_time,\n", + " data_path_surface=surf_dir,\n", + " data_path_vertical=vert_dir,\n", + " climatology_path_surface=surf_clim_dir,\n", + " climatology_path_vertical=vert_clim_dir,\n", + " surface_vars=surface_vars,\n", + " static_surface_vars=static_surface_vars,\n", + " vertical_vars=vertical_vars,\n", + " levels=levels,\n", + " positional_encoding=positional_encoding,\n", + ")\n", + "assert len(dataset) > 0, \"There doesn't seem to be any valid data.\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Model\n", + "### Scalers and other hyperparameters\n", + "Again, this setup is similar as before." + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "# from PrithviWxC.dataloaders.merra2 import (\n", + "# input_scalers,\n", + "# output_scalers,\n", + "# static_input_scalers,\n", + "# )\n", + "\n", + "surf_in_scal_path = Path(\"./climatology/musigma_surface.nc\")\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.2300m.v1\",\n", + " filename=f\"climatology/{surf_in_scal_path.name}\",\n", + " local_dir=\".\",\n", + ")\n", + "\n", + "vert_in_scal_path = Path(\"./climatology/musigma_vertical.nc\")\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.2300m.v1\",\n", + " filename=f\"climatology/{vert_in_scal_path.name}\",\n", + " local_dir=\".\",\n", + ")\n", + "\n", + "surf_out_scal_path = Path(\"./climatology/anomaly_variance_surface.nc\")\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.2300m.v1\",\n", + " filename=f\"climatology/{surf_out_scal_path.name}\",\n", + " local_dir=\".\",\n", + ")\n", + "\n", + "vert_out_scal_path = Path(\"./climatology/anomaly_variance_vertical.nc\")\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.2300m.v1\",\n", + " filename=f\"climatology/{vert_out_scal_path.name}\",\n", + " local_dir=\".\",\n", + ")\n", + "\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.rollout.2300m.v1\",\n", + " filename=\"config.yaml\",\n", + " local_dir=\".\",\n", + ")\n", + "\n", + "in_mu, in_sig = input_scalers(\n", + " surface_vars,\n", + " vertical_vars,\n", + " levels,\n", + " surf_in_scal_path,\n", + " vert_in_scal_path,\n", + ")\n", + "\n", + "output_sig = output_scalers(\n", + " surface_vars,\n", + " vertical_vars,\n", + " levels,\n", + " surf_out_scal_path,\n", + " vert_out_scal_path,\n", + ")\n", + "\n", + "static_mu, static_sig = static_input_scalers(\n", + " surf_in_scal_path,\n", + " static_surface_vars,\n", + ")\n", + "\n", + "residual = \"none\"\n", + "masking_mode = \"local\"\n", + "decoder_shifting = True\n", + "masking_ratio = 0.99" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Model init\n", + "We can now build and load the pretrained weights, note that you should use the\n", + "rollout version of the weights." + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'weights\\\\prithvi.wxc.rollout.2300m.v1.pt'" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "weights_path = Path(\"./weights/prithvi.wxc.rollout.2300m.v1.pt\")\n", + "hf_hub_download(\n", + " repo_id=\"Prithvi-WxC/prithvi.wxc.rollout.2300m.v1\",\n", + " filename=weights_path.name,\n", + " local_dir=\"./weights\",\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "from functools import cached_property\n", + "from importlib.metadata import version\n", + "\n", + "from torch import Tensor\n", + "from torch.utils.checkpoint import checkpoint\n", + "\n", + "if version(\"torch\") > \"2.3.0\":\n", + " from torch.nn.attention import SDPBackend, sdpa_kernel\n", + "import numpy as np\n", + "import torch\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "\n", + "\n", + "# DropPath code is straight from timm\n", + "# (https://huggingface.co/spaces/Roll20/pet_score/blame/main/lib/timm/models/layers/drop.py)\n", + "def drop_path(\n", + " x: Tensor,\n", + " drop_prob: float = 0.0,\n", + " training: bool = False,\n", + " scale_by_keep: bool = True,\n", + ") -> Tensor:\n", + " \"\"\"Drop paths (Stochastic Depth) per sample (when applied in main path of\n", + " residual blocks). Taken form timm.\n", + "\n", + " Args:\n", + " x (Tensor): Input tensor.\n", + " drop_prob (float): Probability of dropping `x`, defaults to 0.\n", + " training (bool): Whether model is in in traingin of eval mode,\n", + " defaults to False.\n", + " scale_by_keep (bool): Whether the output should scaled by\n", + " (`1 - drop_prob`), defaults to True.\n", + " Returns:\n", + " Tensor: Tensor that may have randomly dropped with proability\n", + " `drop_path`\n", + " \"\"\"\n", + " if drop_prob == 0.0 or not training:\n", + " return x\n", + " keep_prob = 1 - drop_prob\n", + " shape = (x.shape[0],) + (1,) * (x.ndim - 1)\n", + " random_tensor = x.new_empty(shape).bernoulli_(keep_prob)\n", + " if keep_prob > 0.0 and scale_by_keep:\n", + " random_tensor.div_(keep_prob)\n", + " return x * random_tensor\n", + "\n", + "\n", + "class DropPath(nn.Module):\n", + " \"\"\"\n", + " Drop paths (Stochastic Depth) per sample (when applied in main path of\n", + " residual blocks).\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self, drop_prob: float | None = None, scale_by_keep: bool = True\n", + " ) -> None:\n", + " super(DropPath, self).__init__()\n", + " self.drop_prob = drop_prob\n", + " self.scale_by_keep = scale_by_keep\n", + "\n", + " def forward(self, x: Tensor) -> Tensor:\n", + " \"\"\"Runs drop path on input tensor\n", + "\n", + " Args:\n", + " x: input\n", + "\n", + " Returns:\n", + " tensor: output after drop_path\n", + " \"\"\"\n", + " return drop_path(x, self.drop_prob, self.training, self.scale_by_keep)\n", + "\n", + "\n", + "class Mlp(nn.Module):\n", + " \"\"\"\n", + " Multi layer perceptron.\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self, features: int, hidden_features: int, dropout: float = 0.0\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " features: Input/output dimension.\n", + " hidden_features: Hidden dimension.\n", + " dropout: Dropout.\n", + " \"\"\"\n", + " super().__init__()\n", + " self.net = nn.Sequential(\n", + " nn.Linear(features, hidden_features),\n", + " nn.GELU(),\n", + " nn.Dropout(dropout),\n", + " nn.Linear(hidden_features, features),\n", + " nn.Dropout(dropout),\n", + " )\n", + "\n", + " def forward(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Args:\n", + " x (Tesnor): Tensor of shape [..., channel]\n", + " Returns:\n", + " Tenosr: Tensor of same shape as x.\n", + " \"\"\"\n", + " return self.net(x)\n", + "\n", + "\n", + "class LayerNormPassThrough(nn.LayerNorm):\n", + " \"\"\"Normalising layer that allows the attention mask to be passed through\"\"\"\n", + "\n", + " def __init__(self, *args, **kwargs):\n", + " super().__init__(*args, **kwargs)\n", + "\n", + " def forward(\n", + " self, d: tuple[Tensor, Tensor | None]\n", + " ) -> tuple[Tensor, Tensor | None]:\n", + " \"\"\"Forwards function\n", + "\n", + " Args:\n", + " d (tuple): tuple of the data tensor and the attention mask\n", + " Returns:\n", + " output (Tensor): normalised output data\n", + " attn_mask (Tensor): the attention mask that was passed in\n", + " \"\"\"\n", + " input, attn_mask = d\n", + " output = F.layer_norm(\n", + " input, self.normalized_shape, self.weight, self.bias, self.eps\n", + " )\n", + " return output, attn_mask\n", + "\n", + "\n", + "class MultiheadAttention(nn.Module):\n", + " \"\"\"Multihead attention layer for inputs of shape\n", + " [..., sequence, features].\n", + " \"\"\"\n", + "\n", + " def __init__(self, features: int, n_heads: int, dropout: float) -> None:\n", + " \"\"\"\n", + " Args:\n", + " features: Number of features for inputs to the layer.\n", + " n_heads: Number of attention heads. Should be a factor of features.\n", + " (I.e. the layer uses features // n_heads.)\n", + " dropout: Dropout.\n", + " \"\"\" # noqa: E501\n", + " super().__init__()\n", + "\n", + " if (features % n_heads) != 0:\n", + " raise ValueError(\n", + " f\"Features '{features}' is not divisible by heads '{n_heads}'.\"\n", + " )\n", + "\n", + " self.features = features\n", + " self.n_heads = n_heads\n", + " self.dropout = dropout\n", + "\n", + " self.qkv_layer = torch.nn.Linear(features, features * 3, bias=False)\n", + " self.w_layer = torch.nn.Linear(features, features, bias=False)\n", + "\n", + " def forward(self, d: tuple[Tensor, Tensor | None]) -> Tensor:\n", + " \"\"\"\n", + " Args:\n", + " d (tuple): tuple containing Tensor of shape [..., sequence, features] and the attention mask\n", + " Returns:\n", + " Tensor: Tensor of shape [..., sequence, features]\n", + " \"\"\" # noqa: E501\n", + " x, attn_mask = d\n", + "\n", + " if not x.shape[-1] == self.features:\n", + " raise ValueError(\n", + " f\"Expecting tensor with last dimension size {self.features}.\"\n", + " )\n", + "\n", + " passenger_dims = x.shape[:-2]\n", + " B = passenger_dims.numel()\n", + " S = x.shape[-2]\n", + " C = x.shape[-1]\n", + " x = x.reshape(B, S, C)\n", + "\n", + " # x [B, S, C]\n", + " # q, k, v [B, H, S, C/H]\n", + " q, k, v = (\n", + " self.qkv_layer(x)\n", + " .view(B, S, self.n_heads, 3 * (C // self.n_heads))\n", + " .transpose(1, 2)\n", + " .chunk(chunks=3, dim=3)\n", + " )\n", + "\n", + " # Let us enforce either flash (A100+) or memory efficient attention.\n", + " if version(\"torch\") > \"2.3.0\":\n", + " with sdpa_kernel(\n", + " [SDPBackend.FLASH_ATTENTION, SDPBackend.EFFICIENT_ATTENTION]\n", + " ):\n", + " # x [B, H, S, C//H]\n", + " x = F.scaled_dot_product_attention(\n", + " q, k, v, attn_mask=attn_mask, dropout_p=self.dropout\n", + " )\n", + " else:\n", + " with torch.backends.cuda.sdp_kernel(\n", + " enable_flash=True, enable_math=False, enable_mem_efficient=True\n", + " ):\n", + " # x [B, H, S, C//H]\n", + " x = F.scaled_dot_product_attention(\n", + " q, k, v, dropout_p=self.dropout\n", + " )\n", + "\n", + " # x [B, S, C]\n", + " x = x.transpose(1, 2).view(B, S, C)\n", + "\n", + " # x [B, S, C]\n", + " x = self.w_layer(x)\n", + "\n", + " # Back to input shape\n", + " x = x.view(*passenger_dims, S, self.features)\n", + " return x\n", + "\n", + "\n", + "class Transformer(nn.Module):\n", + " \"\"\"\n", + " Transformer for inputs of shape [..., S, features].\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " features: int,\n", + " mlp_multiplier: int,\n", + " n_heads: int,\n", + " dropout: float,\n", + " drop_path: float,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " features: Number of features for inputs to the layer.\n", + " mlp_multiplier: Model uses features*mlp_multiplier hidden units.\n", + " n_heads: Number of attention heads. Should be a factor of features.\n", + " (I.e. the layer uses features // n_heads.) dropout: Dropout.\n", + " drop_path: DropPath.\n", + " \"\"\"\n", + " super().__init__()\n", + "\n", + " self.features = features\n", + " self.mlp_multiplier = mlp_multiplier\n", + " self.n_heads = n_heads\n", + " self.dropout = dropout\n", + " self.drop_path = (\n", + " DropPath(drop_path) if drop_path > 0.0 else nn.Identity()\n", + " )\n", + "\n", + " self.attention = nn.Sequential(\n", + " LayerNormPassThrough(features),\n", + " MultiheadAttention(features, n_heads, dropout),\n", + " )\n", + "\n", + " self.ff = nn.Sequential(\n", + " nn.LayerNorm(features),\n", + " Mlp(\n", + " features=features,\n", + " hidden_features=features * mlp_multiplier,\n", + " dropout=dropout,\n", + " ),\n", + " )\n", + "\n", + " def forward(self, d: tuple[Tensor, Tensor | None]) -> Tensor:\n", + " \"\"\"\n", + " Args:\n", + " x: Tensor of shape [..., sequence, features]\n", + " Returns:\n", + " Tensor: Tensor of shape [..., sequence, features]\n", + " \"\"\"\n", + " x, attn_mask = d\n", + " if not x.shape[-1] == self.features:\n", + " raise ValueError(\n", + " f\"Expecting tensor with last dimension size {self.features}.\"\n", + " )\n", + "\n", + " attention_x = self.attention(d)\n", + "\n", + " x = x + self.drop_path(attention_x)\n", + " x = x + self.drop_path(self.ff(x))\n", + "\n", + " return x\n", + "\n", + "\n", + "class _Shift(nn.Module):\n", + " \"\"\"Private base class for the shifter. This allows some behaviour to be\n", + " easily handled when the shifter isn't used.\n", + " \"\"\"\n", + "\n", + " def __init__(self):\n", + " super().__init__()\n", + "\n", + " self._shifted = False\n", + "\n", + " @torch.no_grad()\n", + " def reset(self) -> None:\n", + " \"\"\"\n", + " Resets the bool tracking whether the data is shifted\n", + " \"\"\"\n", + " self._shifted: bool = False\n", + "\n", + " def forward(self, data: Tensor) -> tuple[Tensor, dict[bool, None]]:\n", + " return data, {True: None, False: None}\n", + "\n", + "\n", + "class SWINShift(_Shift):\n", + " \"\"\"\n", + " Handles the shifting of patches similar to how SWIN works. However if we\n", + " shift the latitudes then the poles will wrap and potentially that might be\n", + " problematic. The possition tokens should handle it but masking is safer.\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " mu_shape: tuple[int, int],\n", + " global_shape: tuple[int, int],\n", + " local_shape: tuple[int, int],\n", + " patch_shape: tuple[int, int],\n", + " n_context_tokens: int = 2,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " mu_shape: the shape to the masking units\n", + " global_shape: number of global patches in lat and lon\n", + " local_shape: size of the local patches\n", + " patch_shape: patch size\n", + " n_context_token: number of additional context tokens at start of\n", + " _each_ local sequence\n", + " \"\"\"\n", + " super().__init__()\n", + "\n", + " self._mu_shape = ms = mu_shape\n", + " self._g_shape = gs = global_shape\n", + " self._l_shape = ls = local_shape\n", + " self._p_shape = ps = patch_shape\n", + " self._lat_patch = (gs[0], ls[0], gs[1], ls[1])\n", + " self._n_context_tokens = n_context_tokens\n", + "\n", + " self._g_shift_to = tuple(\n", + " int(0.5 * x / p) for x, p in zip(ms, ps, strict=False)\n", + " )\n", + " self._g_shift_from = tuple(\n", + " -int(0.5 * x / p) for x, p in zip(ms, ps, strict=False)\n", + " )\n", + "\n", + " # Define the attention masks for the shifted MaxViT.\n", + " nglobal = global_shape[0] * global_shape[1]\n", + " nlocal = (\n", + " local_shape[0] * local_shape[1] + self._n_context_tokens\n", + " ) # \"+ 1\" for leadtime\n", + "\n", + " lm = torch.ones((nglobal, 1, nlocal, nlocal), dtype=bool)\n", + " mwidth = int(0.5 * local_shape[1]) * local_shape[0]\n", + " lm[\n", + " : gs[1],\n", + " :,\n", + " self._n_context_tokens : mwidth + self._n_context_tokens,\n", + " self._n_context_tokens : mwidth + self._n_context_tokens,\n", + " ] = False\n", + " self.register_buffer(\"local_mask\", lm)\n", + "\n", + " gm = torch.ones((nlocal, 1, nglobal, nglobal), dtype=bool)\n", + " gm[: int(0.5 * ls[1]) * ls[0], :, : gs[1], : gs[1]] = False\n", + " self.register_buffer(\"global_mask\", gm)\n", + "\n", + " def _to_grid_global(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Shuffle and reshape the data from the global/local setting back to the\n", + " lat/lon grid setting\n", + " Args:\n", + " x: the data tensor to be shuffled.\n", + " Returns:\n", + " x: data in the global/local setting\n", + " \"\"\"\n", + " nbatch, *other = x.shape\n", + "\n", + " y1 = x.view(nbatch, *self._g_shape, *self._l_shape, -1)\n", + " y2 = y1.permute(0, 5, 1, 3, 2, 4).contiguous()\n", + "\n", + " s = y2.shape\n", + " return y2.view((nbatch, -1, s[2] * s[3], s[4] * s[5]))\n", + "\n", + " def _to_grid_local(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Shuffle and reshape the data from the local/global setting to the\n", + " lat/lon grid setting\n", + " Args:\n", + " x: the data tensor to be shuffled.\n", + " Returns:\n", + " x: data in the lat/lon setting.\n", + " \"\"\"\n", + " x = x.transpose(2, 1).contiguous()\n", + " return self._to_grid_global(x)\n", + "\n", + " def _from_grid_global(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Shuffle and reshape the data from the lat/lon grid to the global/local\n", + " setting\n", + " Args:\n", + " x: the data tensor to be shuffled.\n", + " Returns:\n", + " x: data in the global/local setting\n", + " \"\"\"\n", + " nbatch, *other = x.shape\n", + "\n", + " z1 = x.view(nbatch, -1, *self._lat_patch)\n", + " z2 = z1.permute(0, 2, 4, 3, 5, 1).contiguous()\n", + "\n", + " s = z2.shape\n", + " return z2.view(nbatch, s[1] * s[2], s[3] * s[4], -1)\n", + "\n", + " def _from_grid_local(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Shuffle and reshape the data from the lat/lon grid to the local/global\n", + " setting\n", + " Args:\n", + " x: the data tensor to be shuffled.\n", + " Returns:\n", + " x: data in the local/global setting\n", + " \"\"\"\n", + " x = self._from_grid_global(x)\n", + " return x.transpose(2, 1).contiguous()\n", + "\n", + " def _shift(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Shifts data in the gridded lat/lon setting by half the mask unit shape\n", + " Args:\n", + " x: data to be shifted\n", + " Returns:\n", + " x: either the hsifted or unshifted data\n", + " \"\"\"\n", + " shift = self._g_shift_from if self._shifted else self._g_shift_to\n", + " x_shifted = torch.roll(x, shift, (-2, -1))\n", + "\n", + " self._shifted = not self._shifted\n", + " return x_shifted\n", + "\n", + " def _sep_lt(self, x: Tensor) -> tuple[Tensor, Tensor]:\n", + " \"\"\"\n", + " Seperate off the leadtime from the local patches\n", + " Args:\n", + " x: data to have leadtime removed from\n", + " Returns:\n", + " lt: leadtime\n", + " x: data without the lead time in the local patch\n", + " \"\"\"\n", + " lt_it = x[:, : self._n_context_tokens, :, :]\n", + " x_stripped = x[:, self._n_context_tokens :, :, :]\n", + "\n", + " return lt_it, x_stripped\n", + "\n", + " def forward(self, data: Tensor) -> tuple[Tensor, Tensor]:\n", + " \"\"\"Shift or unshift the the data depending on whether the data is\n", + " already shifted, as defined by self._shifte.\n", + "\n", + " Args:\n", + " data: data to be shifted\n", + " Returns:\n", + " Tensor: shifted data Tensor\n", + " \"\"\"\n", + " lt, x = self._sep_lt(data)\n", + "\n", + " x_grid = self._to_grid_local(x)\n", + " x_shifted = self._shift(x_grid)\n", + " x_patched = self._from_grid_local(x_shifted)\n", + "\n", + " # Mask has to be repeated based on batch size\n", + " n_batch = x_grid.shape[0]\n", + " local_rep = [n_batch] + [1] * (self.local_mask.ndim - 1)\n", + " global_rep = [n_batch] + [1] * (self.global_mask.ndim - 1)\n", + "\n", + " if self._shifted:\n", + " attn_mask = {\n", + " True: self.local_mask.repeat(local_rep),\n", + " False: self.global_mask.repeat(global_rep),\n", + " }\n", + " else:\n", + " attn_mask = {True: None, False: None}\n", + "\n", + " return torch.cat((lt, x_patched), axis=1), attn_mask\n", + "\n", + "\n", + "class LocalGlobalLocalBlock(nn.Module):\n", + " \"\"\"\n", + " Applies alternating block and grid attention. Given a parameter n_blocks,\n", + " the entire module contains 2*n_blocks+1 transformer blocks. The first,\n", + " third, ..., last apply local (block) attention. The second, fourth, ...\n", + " global (grid) attention.\n", + "\n", + " This is heavily inspired by\n", + " Tu et al. \"MaxViT: Multi-Axis Vision Transformer\"\n", + " (https://arxiv.org/abs/2204.01697).\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " features: int,\n", + " mlp_multiplier: int,\n", + " n_heads: int,\n", + " dropout: float,\n", + " n_blocks: int,\n", + " drop_path: float,\n", + " shifter: nn.Module | None = None,\n", + " checkpoint: list[int] | None = None,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " features: Number of features for inputs to the layer.\n", + " mlp_multiplier: Model uses features*mlp_multiplier hidden units.\n", + " n_heads: Number of attention heads. Should be a factor of features.\n", + " (I.e. the layer uses features // n_heads.)\n", + " dropout: Dropout.\n", + " drop_path: DropPath.\n", + " n_blocks: Number of local-global transformer pairs.\n", + " \"\"\"\n", + " super().__init__()\n", + "\n", + " self.features = features\n", + " self.mlp_multiplier = mlp_multiplier\n", + " self.n_heads = n_heads\n", + " self.dropout = dropout\n", + " self.drop_path = drop_path\n", + " self.n_blocks = n_blocks\n", + " self._checkpoint = checkpoint or []\n", + "\n", + " if not all(0 <= c < 2 * n_blocks + 1 for c in self._checkpoint):\n", + " raise ValueError(\n", + " \"Checkpoints should be 0 <= i < 2*n_blocks+1. \"\n", + " f\"{self._checkpoint=}.\"\n", + " )\n", + "\n", + " self.transformers = nn.ModuleList(\n", + " [\n", + " Transformer(\n", + " features=features,\n", + " mlp_multiplier=mlp_multiplier,\n", + " n_heads=n_heads,\n", + " dropout=dropout,\n", + " drop_path=drop_path,\n", + " )\n", + " for _ in range(2 * n_blocks + 1)\n", + " ]\n", + " )\n", + "\n", + " self.evaluator = [\n", + " self._checkpoint_wrapper\n", + " if i in self._checkpoint\n", + " else lambda m, x: m(x)\n", + " for i, _ in enumerate(self.transformers)\n", + " ]\n", + "\n", + " self.shifter = shifter or _Shift()\n", + "\n", + " @staticmethod\n", + " def _checkpoint_wrapper(\n", + " model: nn.Module, data: tuple[Tensor, Tensor | None]\n", + " ) -> Tensor:\n", + " return checkpoint(model, data, use_reentrant=False)\n", + "\n", + " def forward(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Args:\n", + " x: Tensor of shape::\n", + "\n", + " [batch, global_sequence, local_sequence, features]\n", + "\n", + " Returns:\n", + " Tensor: Tensor of shape::\n", + "\n", + " [batch, global_sequence, local_sequence, features]\n", + " \"\"\"\n", + " if x.shape[-1] != self.features:\n", + " raise ValueError(\n", + " f\"Expecting tensor with last dimension size {self.features}.\"\n", + " )\n", + " if x.ndim != 4:\n", + " raise ValueError(\n", + " f\"Expecting tensor with exactly four dimensions. {x.shape=}.\"\n", + " )\n", + "\n", + " self.shifter.reset()\n", + " local: bool = True\n", + " attn_mask = {True: None, False: None}\n", + "\n", + " transformer_iter = zip(self.evaluator, self.transformers, strict=False)\n", + "\n", + " # First local block\n", + " evaluator, transformer = next(transformer_iter)\n", + " x = evaluator(transformer, (x, attn_mask[local]))\n", + "\n", + " for evaluator, transformer in transformer_iter:\n", + " local = not local\n", + " # We are making exactly 2*n_blocks transposes.\n", + " # So the output has the same shape as input.\n", + " x = x.transpose(1, 2)\n", + "\n", + " x = evaluator(transformer, (x, attn_mask[local]))\n", + "\n", + " if not local:\n", + " x, attn_mask = self.shifter(x)\n", + "\n", + " return x\n", + "\n", + "\n", + "class PatchEmbed(nn.Module):\n", + " \"\"\"\n", + " Patch embedding via 2D convolution.\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self, patch_size: int | tuple[int, ...], channels: int, embed_dim: int\n", + " ):\n", + " super().__init__()\n", + "\n", + " self.patch_size = patch_size\n", + " self.channels = channels\n", + " self.embed_dim = embed_dim\n", + "\n", + " self.proj = nn.Conv2d(\n", + " channels,\n", + " embed_dim,\n", + " kernel_size=patch_size,\n", + " stride=patch_size,\n", + " bias=True,\n", + " )\n", + "\n", + " def forward(self, x: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Args:\n", + " x: Tensor of shape [batch, channels, lat, lon].\n", + " Returns:\n", + " Tensor: Tensor with shape\n", + " [batch, embed_dim, lat//patch_size, lon//patch_size]\n", + " \"\"\"\n", + "\n", + " H, W = x.shape[-2:]\n", + "\n", + " if W % self.patch_size[1] != 0:\n", + " raise ValueError(\n", + " f\"Cannot do patch embedding for tensor of shape {x.size()}\"\n", + " \" with patch size {self.patch_size}. (Dimensions are BSCHW.)\"\n", + " )\n", + " if H % self.patch_size[0] != 0:\n", + " raise ValueError(\n", + " f\"Cannot do patch embedding for tensor of shape {x.size()}\"\n", + " f\" with patch size {self.patch_size}. (Dimensions are BSCHW.)\"\n", + " )\n", + "\n", + " x = self.proj(x)\n", + "\n", + " return x\n", + "\n", + "\n", + "class PrithviWxCEncoderDecoder(nn.Module):\n", + " \"\"\"\n", + " Hiera-MaxViT encoder/decoder code.\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " embed_dim: int,\n", + " n_blocks: int,\n", + " mlp_multiplier: float,\n", + " n_heads: int,\n", + " dropout: float,\n", + " drop_path: float,\n", + " shifter: nn.Module | None = None,\n", + " transformer_cp: list[int] | None = None,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " embed_dim: Embedding dimension\n", + " n_blocks: Number of local-global transformer pairs.\n", + " mlp_multiplier: MLP multiplier for hidden features in feed forward\n", + " networks.\n", + " n_heads: Number of attention heads.\n", + " dropout: Dropout.\n", + " drop_path: DropPath.\n", + " \"\"\"\n", + " super().__init__()\n", + "\n", + " self.embed_dim = embed_dim\n", + " self.n_blocks = n_blocks\n", + " self.mlp_multiplier = mlp_multiplier\n", + " self.n_heads = n_heads\n", + " self.dropout = dropout\n", + " self._transformer_cp = transformer_cp\n", + "\n", + " self.lgl_block = LocalGlobalLocalBlock(\n", + " features=embed_dim,\n", + " mlp_multiplier=mlp_multiplier,\n", + " n_heads=n_heads,\n", + " dropout=dropout,\n", + " drop_path=drop_path,\n", + " n_blocks=n_blocks,\n", + " shifter=shifter,\n", + " checkpoint=transformer_cp,\n", + " )\n", + "\n", + " def forward(self, x: torch.Tensor) -> torch.Tensor:\n", + " \"\"\"\n", + " Args:\n", + " x: Tensor of shape\n", + " [batch, global sequence, local sequence, embed_dim]\n", + " Returns:\n", + " Tensor of shape\n", + " [batch, mask_unit_sequence, local_sequence, embed_dim].\n", + " Identical in shape to the input x.\n", + " \"\"\"\n", + "\n", + " x = self.lgl_block(x)\n", + "\n", + " return x\n", + "\n", + "\n", + "class PrithviWxC(nn.Module):\n", + " \"\"\"Encoder-decoder fusing Hiera with MaxViT. See\n", + " - Ryali et al. \"Hiera: A Hierarchical Vision Transformer without the\n", + " Bells-and-Whistles\" (https://arxiv.org/abs/2306.00989)\n", + " - Tu et al. \"MaxViT: Multi-Axis Vision Transformer\"\n", + " (https://arxiv.org/abs/2204.01697)\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " in_channels: int,\n", + " input_size_time: int,\n", + " in_channels_static: int,\n", + " input_scalers_mu: Tensor,\n", + " input_scalers_sigma: Tensor,\n", + " input_scalers_epsilon: float,\n", + " static_input_scalers_mu: Tensor,\n", + " static_input_scalers_sigma: Tensor,\n", + " static_input_scalers_epsilon: float,\n", + " output_scalers: Tensor,\n", + " n_lats_px: int,\n", + " n_lons_px: int,\n", + " patch_size_px: tuple[int],\n", + " mask_unit_size_px: tuple[int],\n", + " mask_ratio_inputs: float,\n", + " embed_dim: int,\n", + " n_blocks_encoder: int,\n", + " n_blocks_decoder: int,\n", + " mlp_multiplier: float,\n", + " n_heads: int,\n", + " dropout: float,\n", + " drop_path: float,\n", + " parameter_dropout: float,\n", + " residual: str,\n", + " masking_mode: str,\n", + " positional_encoding: str,\n", + " decoder_shifting: bool = False,\n", + " checkpoint_encoder: list[int] | None = None,\n", + " checkpoint_decoder: list[int] | None = None,\n", + " ) -> None:\n", + " \"\"\"\n", + " Args:\n", + " in_channels: Number of input channels.\n", + " input_size_time: Number of timestamps in input.\n", + " in_channels_static: Number of input channels for static data.\n", + " input_scalers_mu: Tensor of size (in_channels,). Used to rescale\n", + " input.\n", + " input_scalers_sigma: Tensor of size (in_channels,). Used to rescale\n", + " input.\n", + " input_scalers_epsilon: Float. Used to rescale input.\n", + " static_input_scalers_mu: Tensor of size (in_channels_static). Used\n", + " to rescale static inputs.\n", + " static_input_scalers_sigma: Tensor of size (in_channels_static).\n", + " Used to rescale static inputs.\n", + " static_input_scalers_epsilon: Float. Used to rescale static inputs.\n", + " output_scalers: Tensor of shape (in_channels,). Used to rescale\n", + " output.\n", + " n_lats_px: Total latitudes in data. In pixels.\n", + " n_lons_px: Total longitudes in data. In pixels.\n", + " patch_size_px: Patch size for tokenization. In pixels lat/lon.\n", + " mask_unit_size_px: Size of each mask unit. In pixels lat/lon.\n", + " mask_ratio_inputs: Masking ratio for inputs. 0 to 1.\n", + " embed_dim: Embedding dimension\n", + " n_blocks_encoder: Number of local-global transformer pairs in\n", + " encoder.\n", + " n_blocks_decoder: Number of local-global transformer pairs in\n", + " decoder.\n", + " mlp_multiplier: MLP multiplier for hidden features in feed forward\n", + " networks.\n", + " n_heads: Number of attention heads.\n", + " dropout: Dropout.\n", + " drop_path: DropPath.\n", + " parameter_dropout: Dropout applied to parameters.\n", + " residual: Indicates whether and how model should work as residual\n", + " model. Accepted values are 'climate', 'temporal' and 'none'\n", + " positional_encoding: possible values are\n", + " ['absolute' (default), 'fourier'].\n", + " 'absolute' lat lon encoded in 3 dimensions using sine and\n", + " cosine\n", + " 'fourier' lat/lon to be encoded using various frequencies\n", + " masking_mode: String ['local', 'global', 'both'] that controls the\n", + " type of masking used.\n", + " checkpoint_encoder: List of integers controlling if gradient\n", + " checkpointing is used on encoder.\n", + " Format: [] for no gradient checkpointing. [3, 7] for\n", + " checkpointing after 4th and 8th layer etc.\n", + " checkpoint_decoder: List of integers controlling if gradient\n", + " checkpointing is used on decoder.\n", + " Format: See `checkpoint_encoder`.\n", + " masking_mode: The type of masking to use\n", + " {'global', 'local', 'both'}\n", + " decoder_shifting: Whether to use swin shifting in the decoder.\n", + " \"\"\"\n", + " super().__init__()\n", + "\n", + " self.in_channels = in_channels\n", + " self.input_size_time = input_size_time\n", + " self.in_channels_static = in_channels_static\n", + " self.n_lats_px = n_lats_px\n", + " self.n_lons_px = n_lons_px\n", + " self.patch_size_px = patch_size_px\n", + " self.mask_unit_size_px = mask_unit_size_px\n", + " self.mask_ratio_inputs = mask_ratio_inputs\n", + " self.embed_dim = embed_dim\n", + " self.n_blocks_encoder = n_blocks_encoder\n", + " self.n_blocks_decoder = n_blocks_decoder\n", + " self.mlp_multiplier = mlp_multiplier\n", + " self.n_heads = n_heads\n", + " self.dropout = dropout\n", + " self.drop_path = drop_path\n", + " self.residual = residual\n", + " self._decoder_shift = decoder_shifting\n", + " self.positional_encoding = positional_encoding\n", + " self._checkpoint_encoder = checkpoint_encoder\n", + " self._checkpoint_decoder = checkpoint_decoder\n", + "\n", + " assert self.n_lats_px % self.mask_unit_size_px[0] == 0\n", + " assert self.n_lons_px % self.mask_unit_size_px[1] == 0\n", + " assert self.mask_unit_size_px[0] % self.patch_size_px[0] == 0\n", + " assert self.mask_unit_size_px[1] % self.patch_size_px[1] == 0\n", + "\n", + " if self.patch_size_px[0] != self.patch_size_px[1]:\n", + " raise NotImplementedError(\n", + " \"Current pixel shuffle symmetric patches.\"\n", + " )\n", + "\n", + " self.local_shape_mu = (\n", + " self.mask_unit_size_px[0] // self.patch_size_px[0],\n", + " self.mask_unit_size_px[1] // self.patch_size_px[1],\n", + " )\n", + " self.global_shape_mu = (\n", + " self.n_lats_px // self.mask_unit_size_px[0],\n", + " self.n_lons_px // self.mask_unit_size_px[1],\n", + " )\n", + "\n", + " assert input_scalers_mu.shape == (in_channels,)\n", + " assert input_scalers_sigma.shape == (in_channels,)\n", + " assert output_scalers.shape == (in_channels,)\n", + "\n", + " if self.positional_encoding != \"fourier\":\n", + " assert static_input_scalers_mu.shape == (in_channels_static,)\n", + " assert static_input_scalers_sigma.shape == (in_channels_static,)\n", + "\n", + " # Input shape [batch, time, parameter, lat, lon]\n", + " self.input_scalers_epsilon = input_scalers_epsilon\n", + " self.register_buffer(\n", + " \"input_scalers_mu\", input_scalers_mu.reshape(1, 1, -1, 1, 1)\n", + " )\n", + " self.register_buffer(\n", + " \"input_scalers_sigma\", input_scalers_sigma.reshape(1, 1, -1, 1, 1)\n", + " )\n", + "\n", + " # Static inputs shape [batch, parameter, lat, lon]\n", + " self.static_input_scalers_epsilon = static_input_scalers_epsilon\n", + " self.register_buffer(\n", + " \"static_input_scalers_mu\",\n", + " static_input_scalers_mu.reshape(1, -1, 1, 1),\n", + " )\n", + " self.register_buffer(\n", + " \"static_input_scalers_sigma\",\n", + " static_input_scalers_sigma.reshape(1, -1, 1, 1),\n", + " )\n", + "\n", + " # Output shape [batch, parameter, lat, lon]\n", + " self.register_buffer(\n", + " \"output_scalers\", output_scalers.reshape(1, -1, 1, 1)\n", + " )\n", + "\n", + " self.parameter_dropout = nn.Dropout2d(p=parameter_dropout)\n", + "\n", + " self.patch_embedding = PatchEmbed(\n", + " patch_size=patch_size_px,\n", + " channels=in_channels * input_size_time,\n", + " embed_dim=embed_dim,\n", + " )\n", + "\n", + " if self.residual == \"climate\":\n", + " self.patch_embedding_static = PatchEmbed(\n", + " patch_size=patch_size_px,\n", + " channels=in_channels + in_channels_static,\n", + " embed_dim=embed_dim,\n", + " )\n", + " else:\n", + " self.patch_embedding_static = PatchEmbed(\n", + " patch_size=patch_size_px,\n", + " channels=in_channels_static,\n", + " embed_dim=embed_dim,\n", + " )\n", + "\n", + " self.input_time_embedding = nn.Linear(1, embed_dim // 4, bias=True)\n", + " self.lead_time_embedding = nn.Linear(1, embed_dim // 4, bias=True)\n", + "\n", + " self.mask_token = nn.Parameter(torch.randn(1, 1, 1, self.embed_dim))\n", + " self._nglobal_mu = np.prod(self.global_shape_mu)\n", + " self._global_idx = torch.arange(self._nglobal_mu)\n", + "\n", + " self._nlocal_mu = np.prod(self.local_shape_mu)\n", + " self._local_idx = torch.arange(self._nlocal_mu)\n", + "\n", + " self.encoder = PrithviWxCEncoderDecoder(\n", + " embed_dim=embed_dim,\n", + " n_blocks=n_blocks_encoder,\n", + " mlp_multiplier=mlp_multiplier,\n", + " n_heads=n_heads,\n", + " dropout=dropout,\n", + " drop_path=drop_path,\n", + " transformer_cp=checkpoint_encoder,\n", + " )\n", + "\n", + " if n_blocks_decoder != 0:\n", + " if self._decoder_shift:\n", + " self.decoder_shifter = d_shifter = SWINShift(\n", + " self.mask_unit_size_px,\n", + " self.global_shape_mu,\n", + " self.local_shape_mu,\n", + " self.patch_size_px,\n", + " n_context_tokens=0,\n", + " )\n", + " else:\n", + " self.decoder_shifter = d_shifter = None\n", + "\n", + " self.decoder = PrithviWxCEncoderDecoder(\n", + " embed_dim=embed_dim,\n", + " n_blocks=n_blocks_decoder,\n", + " mlp_multiplier=mlp_multiplier,\n", + " n_heads=n_heads,\n", + " dropout=dropout,\n", + " drop_path=0.0,\n", + " shifter=d_shifter,\n", + " transformer_cp=checkpoint_decoder,\n", + " )\n", + "\n", + " self.unembed = nn.Linear(\n", + " self.embed_dim,\n", + " self.in_channels\n", + " * self.patch_size_px[0]\n", + " * self.patch_size_px[1],\n", + " bias=True,\n", + " )\n", + "\n", + " self.masking_mode = masking_mode.lower()\n", + " match self.masking_mode:\n", + " case \"local\":\n", + " self.generate_mask = self._gen_mask_local\n", + " case \"global\":\n", + " self.generate_mask = self._gen_mask_global\n", + " case \"both\":\n", + " self._mask_both_local: bool = True\n", + " self.generate_mask = self._gen_mask_both\n", + " case _:\n", + " raise ValueError(\n", + " f\"Masking mode '{masking_mode}' not supported\"\n", + " )\n", + "\n", + " def swap_masking(self) -> None:\n", + " self._mask_both_local = not self._mask_both_local\n", + "\n", + " @cached_property\n", + " def n_masked_global(self):\n", + " return int(self.mask_ratio_inputs * np.prod(self.global_shape_mu))\n", + "\n", + " @cached_property\n", + " def n_masked_local(self):\n", + " return int(self.mask_ratio_inputs * np.prod(self.local_shape_mu))\n", + "\n", + " @staticmethod\n", + " def _shuffle_along_axis(a, axis):\n", + " idx = torch.argsort(input=torch.rand(*a.shape), dim=axis)\n", + " return torch.gather(a, dim=axis, index=idx)\n", + "\n", + " def _gen_mask_local(self, sizes: tuple[int]) -> tuple[Tensor]:\n", + " \"\"\"\n", + " Args:\n", + " batch_size: Number of elements in batch\n", + " Returns:\n", + " Tuple of torch tensors. [indices masked, indices unmasked].\n", + " Each of these is a tensor of shape (batch, global sequene)\n", + " \"\"\"\n", + " # Identify which indices (values) should be masked\n", + "\n", + " maskable_indices = self._local_idx.view(1, -1).expand(*sizes[:2], -1)\n", + "\n", + " maskable_indices = self._shuffle_along_axis(maskable_indices, 2)\n", + "\n", + " indices_masked = maskable_indices[:, :, : self.n_masked_local]\n", + " indices_unmasked = maskable_indices[:, :, self.n_masked_local :]\n", + "\n", + " return indices_masked, indices_unmasked\n", + "\n", + " def _gen_mask_global(self, sizes: tuple[int]) -> tuple[Tensor]:\n", + " \"\"\"\n", + " Args:\n", + " batch_size: Number of elements in batch\n", + " Returns:\n", + " Tuple of torch tensors. [indices masked, indices unmasked].\n", + " Each of these is a tensor of shape (batch, global sequene)\n", + " \"\"\"\n", + " # Identify which indices (values) should be masked\n", + "\n", + " maskable_indices = self._global_idx.view(1, -1).expand(*sizes[:1], -1)\n", + "\n", + " maskable_indices = self._shuffle_along_axis(maskable_indices, 1)\n", + "\n", + " indices_masked = maskable_indices[:, : self.n_masked_global]\n", + " indices_unmasked = maskable_indices[:, self.n_masked_global :]\n", + "\n", + " return indices_masked, indices_unmasked\n", + "\n", + " def _gen_mask_both(self, sizes: tuple[int]) -> tuple[Tensor]:\n", + " if self._mask_both_local:\n", + " return self._gen_mask_local(sizes)\n", + " else:\n", + " return self._gen_mask_global(sizes)\n", + "\n", + " @staticmethod\n", + " def reconstruct_batch(\n", + " idx_masked: Tensor,\n", + " idx_unmasked: Tensor,\n", + " data_masked: Tensor,\n", + " data_unmasked: Tensor,\n", + " ) -> Tensor:\n", + " \"\"\"Reconstructs a tensor along the mask unit dimension. Batched\n", + " version.\n", + "\n", + " Args:\n", + " idx_masked: Tensor of shape `batch, mask unit sequence`.\n", + " idx_unmasked: Tensor of shape `batch, mask unit sequence`.\n", + " data_masked: Tensor of shape `batch, mask unit sequence, ...`.\n", + " Should have same size along mask unit sequence dimension as\n", + " idx_masked. Dimensions beyond the first two, marked here as ...\n", + " will typically be `local_sequence, channel` or\n", + " `channel, lat, lon`. These dimensions should agree with\n", + " data_unmasked.\n", + " data_unmasked: Tensor of shape `batch, mask unit sequence, ...`.\n", + " Should have same size along mask unit sequence dimension as\n", + " idx_unmasked. Dimensions beyond the first two, marked here as\n", + " ... will typically be `local_sequence, channel` or `channel,\n", + " lat, lon`. These dimensions should agree with data_masked.\n", + " Returns:\n", + " Tensor: Tensor of same shape as inputs data_masked and\n", + " data_unmasked. I.e. `batch, mask unit sequence, ...`. Index for\n", + " the total data composed of the masked and the unmasked part.\n", + " \"\"\"\n", + " dim: int = idx_masked.ndim\n", + "\n", + " idx_total = torch.argsort(\n", + " torch.cat([idx_masked, idx_unmasked], dim=-1), dim=-1\n", + " )\n", + " idx_total = idx_total.view(\n", + " *idx_total.shape, *[1] * (data_unmasked.ndim - dim)\n", + " )\n", + " idx_total = idx_total.expand(\n", + " *idx_total.shape[:dim], *data_unmasked.shape[dim:]\n", + " )\n", + "\n", + " data = torch.cat([data_masked, data_unmasked], dim=dim - 1)\n", + " data = torch.gather(data, dim=dim - 1, index=idx_total)\n", + "\n", + " return data, idx_total\n", + "\n", + " def fourier_pos_encoding(self, x_static: Tensor) -> Tensor:\n", + " \"\"\"\n", + " Args\n", + " x_static: B x C x H x W. first two channels are lat, and lon\n", + " Returns\n", + " Tensor: Tensor of shape B x E x H x W where E is the embedding\n", + " dimension.\n", + " \"\"\"\n", + "\n", + " # B x C x H x W -> B x 1 x H/P x W/P\n", + " latitudes_patch = F.avg_pool2d(\n", + " x_static[:, [0]],\n", + " kernel_size=self.patch_size_px,\n", + " stride=self.patch_size_px,\n", + " )\n", + " longitudes_patch = F.avg_pool2d(\n", + " x_static[:, [1]],\n", + " kernel_size=self.patch_size_px,\n", + " stride=self.patch_size_px,\n", + " )\n", + "\n", + " modes = (\n", + " torch.arange(self.embed_dim // 4, device=x_static.device).view(\n", + " 1, -1, 1, 1\n", + " )\n", + " + 1.0\n", + " )\n", + " pos_encoding = torch.cat(\n", + " (\n", + " torch.sin(latitudes_patch * modes),\n", + " torch.sin(longitudes_patch * modes),\n", + " torch.cos(latitudes_patch * modes),\n", + " torch.cos(longitudes_patch * modes),\n", + " ),\n", + " axis=1,\n", + " )\n", + "\n", + " return pos_encoding # B x E x H/P x W/P\n", + "\n", + " def time_encoding(self, input_time, lead_time):\n", + " \"\"\"\n", + " Args:\n", + " input_time: Tensor of shape [batch].\n", + " lead_time: Tensor of shape [batch].\n", + " Returns:\n", + " Tensor: Tensor of shape [batch, embed_dim, 1, 1]\n", + " \"\"\"\n", + " input_time = self.input_time_embedding(input_time.view(-1, 1, 1, 1))\n", + " lead_time = self.lead_time_embedding(lead_time.view(-1, 1, 1, 1))\n", + "\n", + " time_encoding = torch.cat(\n", + " (\n", + " torch.cos(input_time),\n", + " torch.cos(lead_time),\n", + " torch.sin(input_time),\n", + " torch.sin(lead_time),\n", + " ),\n", + " axis=3,\n", + " )\n", + " return time_encoding\n", + "\n", + " def to_patching(self, x: Tensor) -> Tensor:\n", + " \"\"\"Transform data from lat/lon space to two axis patching\n", + "\n", + " Args: ->\n", + " x: Tesnor in lat/lon space (N, C, Nlat//P_0, Nlon//P_1)\n", + "\n", + " Returns:\n", + " Tensor in patch space (N, G, L, C)\n", + " \"\"\"\n", + " n_batch = x.shape[0]\n", + "\n", + " x = x.view(\n", + " n_batch,\n", + " -1,\n", + " self.global_shape_mu[0],\n", + " self.local_shape_mu[0],\n", + " self.global_shape_mu[1],\n", + " self.local_shape_mu[1],\n", + " )\n", + " x = x.permute(0, 2, 4, 3, 5, 1).contiguous()\n", + "\n", + " s = x.shape\n", + " return x.view(n_batch, s[1] * s[2], s[3] * s[4], -1)\n", + "\n", + " def from_patching(self, x: Tensor) -> Tensor:\n", + " \"\"\"Transform data from two axis patching to lat/lon space\n", + "\n", + " Args:\n", + " x: Tensor in patch space with shape (N, G, L, C*P_0*P_1)\n", + "\n", + " Returns:\n", + " Tensor: Tensor in lat/lon space\n", + " (N, C*P_0*P_1, Nlat//P_0, Nlon // P_1)\n", + " \"\"\"\n", + " n_batch = x.shape[0]\n", + "\n", + " x = x.view(\n", + " n_batch,\n", + " self.global_shape_mu[0],\n", + " self.global_shape_mu[1],\n", + " self.local_shape_mu[0],\n", + " self.local_shape_mu[1],\n", + " -1,\n", + " )\n", + " x = x.permute(0, 5, 1, 3, 2, 4).contiguous()\n", + "\n", + " s = x.shape\n", + " return x.view(n_batch, -1, s[2] * s[3], s[4] * s[5])\n", + "\n", + " def forward(self, batch: dict[str, torch.Tensor]) -> torch.Tensor:\n", + " \"\"\"\n", + " Args:\n", + " batch: Dictionary the following keys::\n", + "\n", + " 'x': Tensor of shape [batch, time, parameter, lat, lon]\n", + " 'y': Tensor of shape [batch, parameter, lat, lon]\n", + " 'static': Tensor of shape [batch, channel_static, lat, lon]\n", + " 'climate': Optional tensor of shape [batch, parameter, lat, lon]\n", + " 'input_time': Tensor of shape [batch]. Or none.\n", + " 'lead_time': Tensor of shape [batch]. Or none.\n", + "\n", + " Returns:\n", + " Tensor: Tensor of shape [batch, parameter, lat, lon].\n", + " \"\"\" # noqa: E501\n", + " x_rescaled = (batch[\"x\"] - self.input_scalers_mu) / (\n", + " self.input_scalers_sigma + self.input_scalers_epsilon\n", + " )\n", + " batch_size = x_rescaled.shape[0]\n", + "\n", + " if self.positional_encoding == \"fourier\":\n", + " x_static_pos = self.fourier_pos_encoding(batch[\"static\"])\n", + " x_static = (\n", + " batch[\"static\"][:, 2:] - self.static_input_scalers_mu[:, 3:]\n", + " ) / (\n", + " self.static_input_scalers_sigma[:, 3:]\n", + " + self.static_input_scalers_epsilon\n", + " )\n", + " else:\n", + " x_static = (batch[\"static\"] - self.static_input_scalers_mu) / (\n", + " self.static_input_scalers_sigma\n", + " + self.static_input_scalers_epsilon\n", + " )\n", + "\n", + " if self.residual == \"temporal\":\n", + " # We create a residual of same shape as y\n", + " index = torch.where(\n", + " batch[\"lead_time\"] > 0, batch[\"x\"].shape[1] - 1, 0\n", + " )\n", + " index = index.view(-1, 1, 1, 1, 1)\n", + " index = index.expand(batch_size, 1, *batch[\"x\"].shape[2:])\n", + " x_hat = torch.gather(batch[\"x\"], dim=1, index=index)\n", + " x_hat = x_hat.squeeze(1)\n", + " elif self.residual == \"climate\":\n", + " climate_scaled = (\n", + " batch[\"climate\"] - self.input_scalers_mu.view(1, -1, 1, 1)\n", + " ) / (\n", + " self.input_scalers_sigma.view(1, -1, 1, 1)\n", + " + self.input_scalers_epsilon\n", + " )\n", + "\n", + " # [batch, time, parameter, lat, lon]\n", + " # -> [batch, time x parameter, lat, lon]\n", + " x_rescaled = x_rescaled.flatten(1, 2)\n", + " # Parameter dropout\n", + " x_rescaled = self.parameter_dropout(x_rescaled)\n", + "\n", + " x_embedded = self.patch_embedding(x_rescaled)\n", + "\n", + " if self.residual == \"climate\":\n", + " static_embedded = self.patch_embedding_static(\n", + " torch.cat((x_static, climate_scaled), dim=1)\n", + " )\n", + " else:\n", + " static_embedded = self.patch_embedding_static(x_static)\n", + "\n", + " if self.positional_encoding == \"fourier\":\n", + " static_embedded += x_static_pos\n", + "\n", + " x_embedded = self.to_patching(x_embedded)\n", + " static_embedded = self.to_patching(static_embedded)\n", + "\n", + " time_encoding = self.time_encoding(\n", + " batch[\"input_time\"], batch[\"lead_time\"]\n", + " )\n", + "\n", + " tokens = x_embedded + static_embedded + time_encoding\n", + "\n", + " # Now we generate masks based on masking_mode\n", + " indices_masked, indices_unmasked = self.generate_mask(\n", + " (batch_size, self._nglobal_mu)\n", + " )\n", + " indices_masked = indices_masked.to(device=tokens.device)\n", + " indices_unmasked = indices_unmasked.to(device=tokens.device)\n", + " maskdim: int = indices_masked.ndim\n", + "\n", + " # Unmasking\n", + " unmask_view = (*indices_unmasked.shape, *[1] * (tokens.ndim - maskdim))\n", + " unmasked = torch.gather(\n", + " tokens,\n", + " dim=maskdim - 1,\n", + " index=indices_unmasked.view(*unmask_view).expand(\n", + " *indices_unmasked.shape, *tokens.shape[maskdim:]\n", + " ),\n", + " )\n", + "\n", + " # Encoder\n", + " x_encoded = self.encoder(unmasked)\n", + "\n", + " # Generate and position encode the mask tokens\n", + " # [1, 1, 1, embed_dim]\n", + " # -> [batch, global_seq_masked, local seq, embed_dim]\n", + " mask_view = (*indices_masked.shape, *[1] * (tokens.ndim - maskdim))\n", + " masking = self.mask_token.repeat(*static_embedded.shape[:3], 1)\n", + " masked = masking + static_embedded\n", + " masked = torch.gather(\n", + " masked,\n", + " dim=maskdim - 1,\n", + " index=indices_masked.view(*mask_view).expand(\n", + " *indices_masked.shape, *tokens.shape[maskdim:]\n", + " ),\n", + " )\n", + "\n", + " recon, _ = self.reconstruct_batch(\n", + " indices_masked, indices_unmasked, masked, x_encoded\n", + " )\n", + "\n", + " x_decoded = self.decoder(recon)\n", + "\n", + " # Output: [batch, global sequence, local sequence,\n", + " # in_channels * patch_size[0] * patch_size[1]]\n", + " x_unembed = self.unembed(x_decoded)\n", + "\n", + " # Reshape to [batch, global_lat, global_lon, local_lat, local_lon,\n", + " # in_channels * patch_size[0] * patch_size[1]]\n", + " x_out = self.from_patching(x_unembed)\n", + "\n", + " # Pixel shuffle to [batch, in_channels, lat, lon]\n", + " x_out = F.pixel_shuffle(x_out, self.patch_size_px[0])\n", + "\n", + " if self.residual == \"temporal\":\n", + " x_out = self.output_scalers * x_out + x_hat\n", + " elif self.residual == \"climate\":\n", + " x_out = self.output_scalers * x_out + batch[\"climate\"]\n", + " elif self.residual == \"none\":\n", + " x_out = (\n", + " self.output_scalers * x_out\n", + " + self.input_scalers_mu.reshape(1, -1, 1, 1)\n", + " )\n", + "\n", + " return x_out\n" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "ename": "", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[1;31mThe Kernel crashed while executing code in the current cell or a previous cell. \n", + "\u001b[1;31mPlease review the code in the cell(s) to identify a possible cause of the failure. \n", + "\u001b[1;31mClick here for more info. \n", + "\u001b[1;31mView Jupyter log for further details." + ] + } + ], + "source": [ + "import yaml\n", + "\n", + "# from PrithviWxC.model import PrithviWxC\n", + "\n", + "with open(\"./config.yaml\", \"r\") as f:\n", + " config = yaml.safe_load(f)\n", + "\n", + "model = PrithviWxC(\n", + " in_channels=config[\"params\"][\"in_channels\"],\n", + " input_size_time=config[\"params\"][\"input_size_time\"],\n", + " in_channels_static=config[\"params\"][\"in_channels_static\"],\n", + " input_scalers_mu=in_mu,\n", + " input_scalers_sigma=in_sig,\n", + " input_scalers_epsilon=config[\"params\"][\"input_scalers_epsilon\"],\n", + " static_input_scalers_mu=static_mu,\n", + " static_input_scalers_sigma=static_sig,\n", + " static_input_scalers_epsilon=config[\"params\"][\n", + " \"static_input_scalers_epsilon\"\n", + " ],\n", + " output_scalers=output_sig**0.5,\n", + " n_lats_px=config[\"params\"][\"n_lats_px\"],\n", + " n_lons_px=config[\"params\"][\"n_lons_px\"],\n", + " patch_size_px=config[\"params\"][\"patch_size_px\"],\n", + " mask_unit_size_px=config[\"params\"][\"mask_unit_size_px\"],\n", + " mask_ratio_inputs=masking_ratio,\n", + " embed_dim=config[\"params\"][\"embed_dim\"],\n", + " n_blocks_encoder=config[\"params\"][\"n_blocks_encoder\"],\n", + " n_blocks_decoder=config[\"params\"][\"n_blocks_decoder\"],\n", + " mlp_multiplier=config[\"params\"][\"mlp_multiplier\"],\n", + " n_heads=config[\"params\"][\"n_heads\"],\n", + " dropout=config[\"params\"][\"dropout\"],\n", + " drop_path=config[\"params\"][\"drop_path\"],\n", + " parameter_dropout=config[\"params\"][\"parameter_dropout\"],\n", + " residual=residual,\n", + " masking_mode=masking_mode,\n", + " decoder_shifting=decoder_shifting,\n", + " positional_encoding=positional_encoding,\n", + " checkpoint_encoder=[],\n", + " checkpoint_decoder=[],\n", + ")\n", + "\n", + "\n", + "state_dict = torch.load(weights_path, weights_only=False)\n", + "if \"model_state\" in state_dict:\n", + " state_dict = state_dict[\"model_state\"]\n", + "model.load_state_dict(state_dict, strict=True)\n", + "\n", + "if (hasattr(model, \"device\") and model.device != device) or not hasattr(\n", + " model, \"device\"\n", + "):\n", + " model = model.to(device)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Rollout\n", + "We are now ready to perform the rollout. Agin the data has to be run through a\n", + "preprocessor. However this time we use a preprocessor that can handle the\n", + "additional intermediate data. Also, rather than calling the model directly, we\n", + "have a conveient wrapper function that performs the interation. This also\n", + "simplifies the model loading when using a sharded cahckpoint. If you attempt to\n", + "perform training steps upton this function, we should use an aggressive number\n", + "of activation checkpoints as the memory consumption becomes quite high." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from torch import Tensor, nn\n", + "\n", + "\n", + "def rollout_iter(\n", + " nsteps: int,\n", + " model: nn.Module,\n", + " batch: dict[str, Tensor | int | float],\n", + ") -> Tensor:\n", + " \"\"\"A helper function for performing autoregressive rollout.\n", + "\n", + " Args:\n", + " nsteps (int): The number of rollout steps to take\n", + " model (nn.Module): A model.\n", + " batch (dict): A data dictionary common to the Prithvi models.\n", + "\n", + " Raises:\n", + " ValueError: If the number of steps isn't positive.\n", + "\n", + " Returns:\n", + " Tensor: the output of the model after nsteps autoregressive iterations.\n", + " \"\"\"\n", + " if nsteps < 1:\n", + " raise ValueError(\"'nsteps' shouold be a positive int.\")\n", + "\n", + " xlast = batch[\"x\"][:, 1]\n", + " batch[\"lead_time\"] = batch[\"lead_time\"][..., 0]\n", + "\n", + " # Save the masking ratio to be restored later\n", + " mask_ratio_tmp = model.mask_ratio_inputs\n", + "\n", + " for step in range(nsteps):\n", + " # After first step, turn off masking\n", + " if step > 0:\n", + " model.mask_ratio_inputs = 0.0\n", + "\n", + " batch[\"static\"] = batch[\"statics\"][:, step]\n", + " batch[\"climate\"] = batch[\"climates\"][:, step]\n", + " batch[\"y\"] = batch[\"ys\"][:, step]\n", + "\n", + " out = model(batch)\n", + "\n", + " batch[\"x\"] = torch.cat((xlast[:, None], out[:, None]), dim=1)\n", + " xlast = out\n", + "\n", + " # Restore the masking ratio\n", + " model.mask_ratio_inputs = mask_ratio_tmp\n", + "\n", + " return xlast\n" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "# from PrithviWxC.dataloaders.merra2_rollout import preproc\n", + "# from PrithviWxC.rollout import rollout_iter\n", + "\n", + "data = next(iter(dataset))\n", + "batch = preproc([data], padding)\n", + "\n", + "for k, v in batch.items():\n", + " if isinstance(v, torch.Tensor):\n", + " batch[k] = v.to(device)\n", + "\n", + "rng_state_1 = torch.get_rng_state()\n", + "with torch.no_grad():\n", + " model.eval()\n", + " out = rollout_iter(dataset.nsteps, model, batch)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Plotting" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAisAAAEjCAYAAADzFUHYAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAADvlUlEQVR4nOz9e6wtyVkeDj/d1b3W2rdzzsycmTljezw2BJIvQKIE+GxDiDF8GFsoUYCgECSEFS5BtlHAEBJuis1tQrgICYWbiCagCOE/fiQBEQUwCUQIHPxZ/MEl8NnENsae8VzP2Wefs9daffv+6H6r36p+q7q6V6+199lnP9LWXqu7urq6V3fVU+/7vG9FVVVVuMQlLnGJS1ziEpc4p4jPugGXuMQlLnGJS1ziEj5ckpVLXOISl7jEJS5xrnFJVi5xiUtc4hKXuMS5xiVZucQlLnGJS1ziEucal2TlEpe4xCUucYlLnGtckpVLXOISl7jEJS5xrnFJVi5xiUtc4hKXuMS5RnLWDdgUZVni4x//OI6OjhBF0Vk35xKXuMQlLnGJSwSgqircvn0bL3vZyxDHftvJPU9WPv7xj+Pxxx8/62Zc4hKXuMQlLnGJEfjoRz+KV7ziFd4y9zxZOTo6AgB89v/nO5Eki87+StXWlqiYNlHvtur1nW/IuXh5aqsEu86h57GPler07Yvy3SdQjssKZRz1bhtaPi7ba3HVZaNKIn0PqqQ9RtrG9/HtlYpQqqYNRXN+xdolbNsElYpQsp4jzs395cS9Spnya223RwU62/k22h4V3eOk7/b2Ko4QlRP3G1adVfOc0Db7u72t6nmu+srEmXk9Mbtf0vPhe54k2HUkp7t7v0P6rU377bhA510bCn5uVz8dFZW333bV6zpG6udD9tll+to09L7m+RLve88P6nHch3uerJDrJ0kWSFKTrBg/SLyll2aXqp+h5wopL5VptrmIhD2A2oOn97xxWz7KKyBlu4oKZfObxc1DXw58YWP2sriO5ecJOc5uV6miwXV4kbq3iWQl8pCVmM7fdqx8mw+hpKZDVqwmTklWKhUBs+nqCz6vQBrG1kH1uIjJLhElVYfQEXw/eyjPtcslu5yMpP7BcpLxIG27tx6vhRN0bt2etEs0OmUC63URHul6Q/a56va1YQxCJBz3PFnxYROrR2cA3iJsUkADeej5+Sx8F+3m56Bzu85rb+flCXq/5/dyERkfqbGJBZWN8srPp4R28G1xUQ2ug7fPJjq+a3ARRnt7jEic/bpmfzYxkcrZs0ibxMS5TEyk7a6yfYiKClHht6xUyj7G3G5bTILOG2DF4DgPRITOf1bn7gP9/pIlzt62DYyxVmwL22rH1NcYOobu4t5eaLKyKaSBtteqYO3jkAZt33FDXSS8vEQGfO4GCdKgTSh7zh+yXddvEQF+TCyVF77zz/w6aTsRDF6Gu2Ak2PdNui7XvbSfCV/b7e0uy00f+szSoWbrPpJCcJEPaftQokLmceoAbfdM88lxrP+7sU9wy/ABf+jA7yrPtxufe9xXdhkfMYuKtm5+fKctPa40CfzcrnrHwPes7ILADIXvfQCmGbQ3dcNPiVC32S5I4CVZ8WCshWLooL0p7IHNPk/fd1edQ/fblg9pv72vry2b3LNt1t1Xl4+4uMCJFf233U98P6HYwJLWR2Bsa02fi2gKF1BLUHxExTrG4baxyYdk+bDrnMoyYQ/uru/2Z9e2vjK27kYmHxY5C7Q4Seext0tkKpS4Tq1zkjDFgMqff8kqGXKOvjJTDfy8nlANo2v/ebBI3ZdkJdRV4rJESNYVwq5cR4DbXeFzOZwl+gjQtiGJFvm+zjaycAkDHJUPITyS26zvmSKcxT1zEZShFhYJtsVkSFtsq4FEKvq28Tp2hcq6f9xa6CODkquu44qzj4H8OxkDqvUcd9rrsDZJ19S3bUpwd5HL+rJJgMBZYhdkoO/enPd7d1+SFR+hkAYfadYc6voZgjGiUrJWuEhJSF27IjS2ZWXMvepz3XiPFTrdIcLHUD3AtnQD2yAuhhA30Lw/lJyEbLPbZJQX3CZS9M5QSDNjfj98CCnTd17AtLbY9UnfbetM3zH29pgdC4S5raTvY4THQwfD0OdsjNvIbsumUUISoRwbLRR6njHoex/PgwXFhfuSrExh/g91pYREpEh6Bp+Y1C5rfx4iRrVdD7YQVcJYK8CQwdY14Itk0irrIh28g7U/R0WJSsWDSIarjSHtti0stoZGuseu50UfEzgYcOtGnxC3z6riqj8EIXVWqktIiFDYFgtCCJGwSQInPyX6SVAxsRVhKOkaKh52aVOm0J+ERk6NDhe2rClToJNCYcOJAH9/tkFSpqjb5QYKDV0+ayJzX5IVwD3g+oSp0nE+0uKzVvB9fVYN2u8iLLyMTT4ILjGqjT6iQvtC3RhjEZpPQrKUSGWd9RS1VFjdWaM4mDWdf39M4mDhZUAOFUkgzQmMau5zvDblzVEcoSq6biafG0tqFz1DURJpQqOKtvOSCMDg2XKg5cI1AIeQJ9ty4RvQJd3IFO6MEOGsVE46bpOoJrsOeES3Q0S0odqhMQOc/TxxkmITll1oXS4K6F3tIx6u/aGExbZS8e++/C59uC9/6rED6y71KFNhLJFwuVtCtT7bcJN16nJZShoCEq8swrbKmjZYsUzN92TZTOGSuLNPo9lXJTEqpdrPCWkhmv22sNMTiRVnhT4uKkrz/mVN25ft6BEV5khSzlOUc9VcY4FyrhCtw0gbLxc37caqbqct2iYSU7EwZKnDcXVqoVaaTciCbTXoG+B3qV0B3CRgqJjWt813vo4VylGXL5rIBVm0bBGPAHfjponHXIOjXUZyA9n17zr55zbhsqxIE44+wuJKbGfX60OlIt2/heDCkJVKRc6bOxahwtlNdBRTwWUt2YRghR471TlC76FNTGwXCpGRKjEtJB2SYoMRkSgv6/9ATVjYvnKedsgJtYW3MSpKTZjsc3PCEeUlynlqbK/ylgi5QHXG1hvP22FbiVwme3KD6e05G8jIRajLwrC42B1bmbTbOvoTRySJDS6ELT0Wj84gHwNR2e6rPEaykCy/IWb3qPSfxwc6LqSOiD1CruN4mVCy4dJBbErkJB2OdL4QAuPTamjLX0OMyG1kjwd21mU+yEqDMG2/CODXEWrhGLq9D5tGGV0YsuLCUGY8dODVg9aIQTcEtouoLzz4vCM40d0AkWqVRLXeRDWDbtIM9MxaAdQWiKgovKSFLCZEHqqDLinRbSy69ag7NXEgKw6ArnXGrme1RkxlGnKi27gy64hyq7cuCmA+q4WTeXudrIBBeLykO+4+y7qNeYVkXTZlVP1eFVWj92gGi6TrMvIRjI6I0zGwlTMq361XcuMMtQhERa1BkawwXJvirNczqPdxGD2Y90TwAMMJUYg2pU+gu4lGolIYNHMOxdgEg2OwidviPMFnbZLGSEnX4iMWu7BAXXiyQtjlQzdE1zIWPv1KyLGEbZCeqdxlNMsn60lo/drSUpT6e11PoS0mlVJ6EJdcOHy7O/lbrNsW5aVhLSHrDICagORlTTSoTM5CF/IC0XIFAw3hApGTogCUao83rjdHtErq/QDUsh0hiLjpdqC11kj3wIUqiVCB7lOkSQoNGoZlxUdULPeYqxyJXisFFGlbpmSp9/t0KH2khe+nz/Yxrtm/ZDGS6g4VEUttdYUgu0KV48JNaLjFSWqn63y+9vuIjHQfXdYRVz28n05X5jalrXNRx6ICIDgrbohLxG7LWWIb0UAhZUItINsU4V4ossJ9chIT3PUDt23X0BjCcl5yrkgwBkzW0VSxQqQqTQziZddaYFs5SONhu1tskNajiiNRbBoX7iy1EqK8RLQUppOcZOR5S0I6+wpUeY4oMV/NKnf3vLp1VIYdGxHJoboTBfkJiBGj1s5E60q0spTMakJEpWNq7yEqxn8lH0PuGyIs2rKigEpovGuxwoj+N39xIQ/YZhu710LtcEUASUTDlwBOEs+GulzsNvDL6SVGqmvAcSV1cxE9F7kJGTxDSIkLRD6kPp1ckrwe7qbU58+7JKbv3L79myxkONRyMcU5OXyTd5dLTDpWOse2CMuFIit98eLbuIneJDsDLAwhESIS+lKz9+Vu6TsupLx9jhACNXSl4SqONGlBQzBsgiLdq3KeapJC2hASxJapQjGPm3Jce9Ks/dP8l8S7ZE2ptzWupR7rCQC2r9vrECGpsgxVliFKU/3dBSoj1QegJi50TqXq9jHSQpam+pqAKiGrVGNpyk1XZ6UiFPOuPsyOwBEjj5SnvEVQjL9IrsM4v7DYIbl2ACCqGGlZ19tCLCUUxjxWpBuSdXYopkp936mDew9T8/6EWGVigYSFuIFCrd48n4r+DLPPNwjLmvol8zycsEgT261b3nc0NoWe3+UiGusK2hYuFFmRsM0EPUA/yxwK34A9Ftu2pviS0UkrEdvWChsul0+U2zqW2BSTNiJb24JSLuqHoDhItQWFSIokAoWKEKsIJZqZ24oRlALiOTrwWU8scKsJJyYdkmJbV5KkJjVJ6wKqy/U/7BGg4zTojlZKNddVk7kqbu8TR5zXYkYppLnPNSOVJaJSzEwrClATlZBIGdf5tNUFAPTsvCYsXKfis3CEhgy7jh16zJDzDM2xUuy11+MLlwZqoiaJbvuOkyAlHxySiJAguXacCyRKbjSrzCbEZEz7zxpjCYWPqLhcpFJG5o5Gqqcb5dgqWXnVq16Fj3zkI53tb33rW/Hv//2/x1ve8hb8/M//vLHvNa95Dd773vcOPlep/H5lXwrqbRCaMYKjvnBhl5VlSjIi5VnhhEGy4ri2ueoOAY9Kce2zNSlte+0oIJkc+V7cNuNn1HG3VUlkakGgrBwptX4lakKho45VJW+d7kXRkoU8R5Sm/SSFbyeXj02GmOvHdWxUNCQnL2qNzAKNjqURLCeRtqbY90WCLZyVLCk2QQFqksItKUBrTemQDtFS0fdcte0ngqJDr6uua6mzLaCXdGlWqp5j+9xD9vZRcLh6JFeVS7sjtYV/jgu/xceXfNDrarF0KNJ2+hwVlWkN6rg+3M/vGNIS63u1u4Rp2558T4nQ7Mqh2CpZed/73oeCdaJ//Md/jC/6oi/CV3zFV+htb3rTm/DUU0/p77OZYM8dgCE3RLqZfZk8hz4koboZIiquSAwaOIbqYEKJjE0kfKsJu6wlQyKXerPj2jMgYV2fctY+vkMI2xCrVakAzGNURQXMgUol3f0wTdBGNstVCbUqoU5zRHlZRwlxdxERl6JAlChNLgzCkiSasJRZjjhNUGb19xgNyeGNSpRsyeH6FQ6luuJbwY1ja1V4+ndCFUdOkuIjKDY50fWJlhiXL739bM7+K/25q22x308gXgHlXDyFeC6pTXa9IeizSrV1u+uw90VVd1+njKXpceWnscmJtF2fo+y+C20ZUyTL4RPFqpW/76sJU+vCpeSJ9CwX8xiRw81uk3JgmCbHp++QkqPx7776OrqbDUmKVK8d1iyd96yz1wJbJisPP/yw8f3f/tt/i0/+5E/G61//er1tPp/jxo0b22yGF50H1LHsvN5vdJ792Tu7ZlS320ic/VvbRiV4C7DySCv30kvPz2u7qUIy45Yq6lqHGmsFr08iN3y7WpV1/hK2iKB9P3zLC/CyovvHgpG/QUXi7y35eLNmoItzoJgp5PsxknkMtSoRL2orTLzKWp3Lat1aT1RtaYmSxHQPAUBeExUABmHpwLamJI4LbdxHlbA/KitUiKBWpf79YkQoGsJSzCBG9LjICYUgG5YVQYviHrAFi6PDyhIVJmHyuU3q8saVIzvqnxD0u1/6hJtkfeqSnD7iE2p1IaLCRcZqDcRZ9ziXSDkEsphY2wuD6pBISmT0LXL/xSc9ksYMaImTWpU6FUBn8rNq+4diHoOWotjUGrApXP3NUDFrSOhyKIZGjfnacy6Twq3Xa/yn//Sf8I53vANR1N6c3/7t38YjjzyCa9eu4fWvfz1+4Ad+AI888oizntVqhdWqDfM8Pj4ObsOUseChacbH+jWnSlEcUr+zTFLrNsBDna0yLo0NJyh2nXRcqSJdX6gAWLt+6J5Shlcrpwq9O5WKgVls1FNpUsTP0/WphiaIstXzJPrTZuskQqUUsiMFtU4Qr0okpwrxqhbmxk1oM4CauDTkgYt1OXkhq0ucBL6+5Oax0RCVapGAwpjLudK6nq6lrL6mYgaUqewamkIsa6PfzWOWI8Ii7Ze31//rwbrfauM7d0tC/IO9r42+baEWGxIUx2sgWZqiYpfGyP48xv00VIPDXTjUPp2ojU2WXDDcr4IQ3jqbvG5XWaGcNUJ7wZKxCWGRrBb8e2+23okjSje1kJwledsZWfkv/+W/4ObNm3jLW96it735zW/GV3zFV+CJJ57Ahz70IXzP93wPvuALvgDvf//7MZ/Ldtgnn3wS73rXuzrb4wKIY/OB6Pww4uAzlgy0L9QUCeF8L+WYOl1Lv/P99ELzBfzibKSdMWuIAeqOwn6m7QRttI2TC/7dvg7eVn494nVaCdzidVkTFk/zySQthTy6UCrWsTb+ctpWzGnQamZrZHlIANW0T8WRvt+UhbbCDJErn0pAmzphz5ykKFV/bywq5eFcZ+O17xmRTZ5TpZjXn8s0Miwo/D/lRJFEq5JYlu93wW2F8Lt02vor8bN9jtCBVhKY9pEI137aHkoMJDcPWU3attQERa3Ne1Q2BERyv+j6HOto+drndfWs/CSELCE8X1Ef7OUm6nq40D4W6zGTJULMDq2XnNgC+oIKQqzm9lVJY4PLTW9vk47rc6fbbjQfvP0oWYYGDDdRVVXTUjcHvviLvxiz2Qy/+qu/6izz9NNP44knnsAv/dIv4cu+7MvEMpJl5fHHH8drv+T7kKQLo6xPibzp6pVjrTS2/qOPiLhITGios3P1YkoHv/TfCFf2Uxf5sLf17fPpdOwOyB5UpfO4QqLtY0tmbXF1Er7Mjn3r33CyYs9G1LpCerfSepY4KxAvC4gh0DyqqCgMF49BTHgSOZugEBIFzOe12yeJURzMWzEty6tSzGMUM9MSle1HoJDkwnbnsMyyJdeiWLcnJIqlz4oy9rghGKM3MY8fX38IaTE0IoyscDePWssWFLX2n6+7DEP93xXt4W5jXU96t+qQFNsaYidWrJQSCYlRv4PU8DW74jsrY50vez2vMZCIfadtjuUu+kiQK5DCtRr70LQTfceFIIRQDbHe5NkS7/2178GtW7dw5coVb9mdWFY+8pGP4D3veQ9++Zd/2VvusccewxNPPIEPfOADzjLz+dxpdfGBZof04vjCrbYNY9DvISyuxG/OATZQhGtnhXWWMzqFOOhh7bTJ8XK3hMRav0bn9XAQpcYiQdoPsg5QfTzVPdVRImUz4RLJ7XrBv1KwuOhOg3Wy9rahMAfqCKWqzEXkeOeZxEAeA7mqrSxJwTQtrdVF35FEtdtd+hQOtr5RmSovWavdPi1RMVw6Pa6ekJDjoW4XHynh6+XY24bBY+UMIDJjXUD8+PpcvacyiAp394SEk0vtdVkruUbCl9zNcIfm9TYueJUi+HzrZrnKcORXF7ovACzLir2MRUOG+hY11SRHOG+1qIdNIj5Up91+vkZZpRTKhYKdJdu8Rv/E1MbU2cdDLCtniZ2QlaeeegqPPPIIvuRLvsRb7oUXXsBHP/pRPPbYY4PPwUOXXUvZ039OWKR6emFoOIYruocwT9J2+OCL5OFJvYwyZYwobzPBShaaTtKzFdUfGy9qfU0KVdLtgOr6XbOgCF0VjFTGtMxEBQxLRKTKJqFZXLeHdTBRswhhpEptwqRrU3mJeBV3Bm27tXFR6W0dobHHx80jZnjnVMwqALWIL9+PkdyNWWdeC3Dr9hZu0gK0BIU6SinSB2gJTJLUGhWdbr8ln1xjVJOTboZaCv2sVNfdI5GUvogZXm93m6NsAPkYu6jglJAiaVzaGE5c+nQjtlWFtnGiQsea56s/29blTju127V13cR5m2CtcARrtpqT1i1qu3vqd8B09Ya4fQBoK6BhgWSokghVXpMQla+6FeRlnVuo6SN856XEie3JzR8hWlnmKQmWRTNKqD+ULeJ9kFIjcIIk9eOuRUyVZaUxRMqsvO1CKpU7YlVs88Qh3VsnK2VZ4qmnnsLXfM3XIGG+9JOTE7zzne/El3/5l+Oxxx7Dhz/8YXznd34nrl+/ji/90i8dfJ64AJB2o3UkbHoD+QzCJ6ANzVTYlxXQFfpGKDzWlHrtEMFdsacQz4TF+XISiRYAYot0UJnS+E+fqyJ2mHBliwwtPqjr0OZgz+y5eQHLBRDlMWJkrenYWotHIy91OXKzaCQxqiRG3AhMqyRC3LhF+lbalsAX9jPbzZ/NCGUK5IsIcVYh3280LKsKyWmJ5E6BqKwQr2qSFSNDhcaSYpOWIEtK895ZnVsVtyJaTlTy/ViTEnugI6Jiu3uGEJS+MqPJBq+HCIDLElNEcnl2fnMF4836DJ91wxbn2sfZ+Ve0FSQCoOrfIlNAzLLOGtYlBwHifZd9Hk5UfMJyu+/zZX8O0YLoBUStQTffqy9CpUpbVfU52We+ermuweMy6lwTlXW5oXhuI74NMLevVu136x11Ja8c6voyvrOJpD4P69NqmFYdqZ/lZGRTy43PTa6/n5ekcADwnve8B3/5l3+Jf/bP/pmxXSmFP/qjP8Iv/MIv4ObNm3jsscfwhje8Ae9+97txdHQ0+DylMlnh1HHhdpZESb0+xTmlVU+96zhsoBb3rS1Ups16PEndI/KXyzB5MnNrhO4LBZBZNpyRA9Bm2hKp1+VVIu2cs0pae5d2A83NeuzFB+NVBtwpNXkp56mOjAFa0te3oGJcNKHO2iLUnK9AZ2GWqKyM56dMgHyPiEvZnhdpTViSuF53yMhWK3SeHDw02b42sqrMY02yyqQlJUAtpjVyosQtUaktLzCuU9+bHpIymqCEalP6yjmICtAO9FERMeFqZAzmNoGQcsDYYcA+wrIJGeLWrdJqI9ec9K2LJLerm2iNnpMYEaI1n7BI7qNWGM9F/XV5U5NW/zc1IWK+n1QBKYTon66wtpynwBxut08ojKUzGDkhTVmiuskb6d1jLqga7fV18i7Ru00W6zkzZTXvr74O4Vr4hI0mkFi1lm9aSgNofhNL92evDQbAIC8hfaAPdiLWIdg6WXnjG98IScO7t7eHX//1X9/KOTu5U4IyUHatFpyYpBT6tyJLQ9QJrfO5azr5UnqsKXTertnWbCdPJmd3FjS7GRIeTGVts61IQixrhvEi8XassqasbK406tSumJqE1H+1JUXSvvBsssZLxIR0tpsnyiu9DkptwSiAOTrkRfvAG38z9pJel5zLslLX05yT6QzyRZuMKs6qJoLDdI9xBX5E/vi8WedHXyfdAybAXaR6LSROvHRdzbNRFVXtzixq+02l6uglg6hYCwtyolJfm0U+Alw7IkGZUCjrhUBQoiJCvI7a36cRr5J7xU5TT6B7UhO4lqR23T3Dm+lzA42BHSEkXdOQc0gpHGwRbV2naxLTrk0F0Dts6uQAIDkNb5RhQZ13z6VF7FY7DPCIPF8Gaf3Zat9ibhAV2zJtiOl5niVOhFxuXYJtQTbaVmrCQuck4uKD1v8VrL/l2YLt8uxziK5vkwn9hV4byCYpY24UzSziVamZZVxUOvTKNpsFZ4xddX2uAIUVtpYan5tJu2ya9pDZVZv5mpkMBRIn69Jwb+iwQZZXQ0z3r2IUB7avufFhJ63YmdfrU7ZTnQAMQR8/jpOQeFk0Ky270uZrOoYob2cwlDqeL1gIoEPe4lms70W8MjuuqCAfedyGQFM48ox+906ThDa2n0u9Bg693PQXATqUvmrvTyF3qFIyt2qRGh0YEZV8T6G0onzo/SABLScnXJNC7SeXTz0w95OTdh93BTgKjbGYcMIhHe+yVlC4cNlaTOJ1BLWqiQmPmOlYI6QkVhmAZUvoAO4ma57nyLyfgGx9koiCFP1ji3Cl7LT0WWUmQY6zCvHSbxGuLdURijkAFn5slElqchbnNWGJG+JSJBEi1UzkmvvlmlDJrhg2YVyaYnmb3NTbhd+5sVaoW3dZOcUG71aX0jnaXiFdf2622++da6kLNoFzIcoLYLkCSAdDYvlmAdKWUNFkxBIOUx0SmgVL23LWwyyQnRjQlhigEO+1EZHJ3eU2abUyCPM6SCN3rtxAuwJpViT0rW1imuojqHUFtTJN9O15LL2ItNCbYG2RrB5mReYxtApwcrclSbLbQZjFcD+xipEf1BeY3s7rvB5WhxsVALLu8vF0fFSUjb6EJVUS7rX2xSqWw8Uj+NLbG8LCiYy5QGHMInzkCKH2WuoZi7YoNESFd5R0L43v85qwEHGx/eLSterw5EaAyK0qPGRZk8cCndWBzVTndZhnKwCPtBiug04n0oqewYLlir0E+YFCvhcj249QLKImqZvp7qnb3P1vD7KVqkSXRwgGWVJC6g519XDSUkSGFSVe1wQgXjd/zF0iWVBou0TO4qw9VjWPK8/gq101M7qvpquQwK060n6AJXkT2kyWEsMFpLUsdb+mVhXr20zSUiZtLhayLvO1fcjNSVYVEt9yLR8llORNH+qyljQacqSQQOaTWI7p8lkjgC7xIBdrnvv1YVzs3hAOkYgZRKj0LnDqyi5t1MctqVLZ5nrtclFeiPeno0e0tDBVbgZSVEmEqpQjnDoJPAkZtC7QabUScGHISnJaIC7L2v+OzVTIdQKsehCigQSAERFimz8NK4cHRDx8MfJxUaGy1PRkPSHLTiezqxhuaIYJ2uHKkniWi7/qDoCsFN7LMs7j3N+QHv6ZJ31rj2/bzU2mXMRbt9VslHFthTkrCOkoqYOtdSIKMYrmxW7bmNwuaqKyl0DNhOeAfPqWVaJUQGGmAUKcAclpM9M1NBC1pYNmo9QWKXKKXFR2CDIdm+8rLK/FWF+JkC+YhcQmKlHb1vpP1l4EWUlCsSuXD4GRFnLvkDUlRNNhWy180Tqa2BB5KVEnTowBnNbb8j3TYqVJhkWe+GrUut2NtaTOq+ISwHatISZJMaPXiHR0c7G05bllDjBd5VNmW/WJT31CVGNfHzEZArY+Vwj6SUZDIPK8TTngjOZrLTQUwEC/QtlknwYs9xdB1LUU+n9fO6kO/qtzy3WtE2q+W1bz+hyVuB2AXjolFBeGrADNQH+30DNnHYLJOGS9im57jKSUJxNoiQrVnAbXVhnfKuhbH53O9gm/zy5n9UkuEnuNnqKZ8fNr5P8JJXNlADD0JukJicu6RAVgYlaW4TFeZbWPk393JFQiYmO7fiSdS1S0PWFlvZy6o6H1OxwCMh5Szc2VJLgly0I5r+tXq1JbV9pzy2v96POUleFeqo9po5fidYn9j2dtrhYVIWqegYI/U3Gk18UBrNDRjHQqLbmWoi/kLL2t0r/YS5DvqyZxW6TDSymBW7GoB8Z2Rk918PoqvU12T7Rr1gwSw/KoHIrAsSNxePmpYbmC4qy2qCR3W5dPVNbXJEUBEfpyp0jgVhV9flZHegewc9UArVWECEmcAbjDLHCcNJX22kbyuXz7fHlTjOthVsM6M21rVSFBOHePG8daRNoF2wUbCmdZTlio33JZXSC4hADZ+qFFtdRnMYa3cOcB6+Z/IctNYSZ2BAzNiw1OXspmvbGqSdGAuUxcuNDevBYhQIEft+inCaQttFNVcKIiiaWHZAu+MGRlfaiAWHVnDajFg0QyihmAubxirA2XdYYnkBuSJMxe9C50YTwOHqasO4C5Wda2+OgwQsXV+GZmRyIBtF6NBJ3siMX4m51EV2Br5wOw94dst9H1o8Yo9ppHOe26h3jqeHM7mgUVqa3catBqabi+hpOeaE/VGWgbLUveaEJIC0JJ1MqZOSjVdda6NbWu3Y7JXdc9MQkm3U+yphTzGNmRwvooQr6IkB3IWWQ1WZpxi0m/lWRIBAmVrWK0xIMTEGmbva8HIa6nyNayFBFQRFDLCOq0Jirc6sBJRVy03+3fjGtA2va4SUxUdsO8XdoSTlD4NiInoWnwyVLS0ZfQNTRRPIUyJ16h/VhcoNbHNX0Kz9PDNX1a+C+6fn0PVbdf4vBaVVz9BxGWRnRqpy7g4HeBr81Vl01kDQtguojyHJjPTEEvuyYAbc6XJW+nPBxzYlI1VhZtcS4KxMvuvcqv1GbceFU0EUce0sOObQMgzHPW98MWQ3evawiqJEIVtIhIjYtDVq7GKGYte4vKOrKCzJ6VirDerzt0DmM2U9YzBZrdSgTAJhiheVN4ojBeH6yydmdDA6qBWTeLpGEtYvlTeFtrLU7jbklpVkYPKQm4mgRq7CWVsjNKcM2KXCm0JTdUH+yXiC+811p0KkPEVZePOjNcO4yO/PSaDDaEhbuvOPL9WOdI0edoEqmVaRNV0xAVbe1gg185A5YP1OHKs+NKD05qVXXXsdJJ6GqiQtaU5bUYy4ciZIeN4JGJX8Vw2oEROBUixBmJUOne1xlY+cDLSVixV9adkKpGWUs2TZ1vZIhtiIrWqbCxxpcUkkc9EbiOJEhUTZYa6/7YAlkiKXyxQdrX1iV36jbJ0AJfgbAMhT0pktzeRo4TZRKXKokQrSWrIO+fSmubKeLvDPZWP8K/u9YEAtAhJdp10bPfACcqnJwUBTCf1eSlWRIjWq1RJQtRd8NRC+JVfTyVYW2oACOZnY0oLzsW75rItEsY6KhN28JktUdK9in12b7+WloSxQhFT8zcTnka4IZqcGHIShVHxgJrsBJvAeZA0Tle1cfke9RJRNZgVt/g9C5/Od0zGQ4uvOQr8QJds/8knYxFpuh77RZTOrIJOVDFqrtYYCcvAFh9/Q+X+FLxDmb0uhzm2h72CsExaveMmJvB0eyoCds1xLGFlUugEfwSoaXIGkO4TdYURorqRf/MAbBzTQrIF9CDP+kFFOvkpbDPUtXr9WQHNVHJ9+tBuqM3sW/1UCKgKpSqQpRFjY6iCe+1qolyCmeuUKUtSRmyYvCU6/sYbSsBdRpDrVoC0GdVLfa60YRRzo4d+I66QoNtS4upXRruevJhqBXFB54yAfC7dvQx1kKqgElc6Hsn94eVEiGkD+ptSyMy1boNsrrYcCwq2inD0aycHi2zmoz0gaw9ttWnsZwgV80CpN3w62qRMGtI22+pO2v3fRKSYgLu+2pqVEzIQQ5+cyxZuYtZhCwKHwsuDFnJF4Bis9eoaFe6xZ4prAMcyWkUEBdRbXUo6oGGoyU9bJCCSTS0Gl610SL1eR0L3zVuKq6o552AKkxRm8v15F5JtdLHk56iVHFtZaFZjJ4w1P5GPosnU2NfBxFSBnBbU3wLkxH4CsF8IcLuMS17152oJQxs3TXt9mYugXivvj9RUelssuRyiYvaOqVWJaImyoanpadVifM9f8SN/Tlf1CnTqROnJIe2+Z9cXuU8RnYQo1iQRcUkB8EkJdT9AqBIKxSL1kpB5yNyojOxcoIinKsakAAtUpVRPhLaa9enSdE6hlpGtUVlbVkqGBGQwrNtMhPn0HXYAlRn28taWFsVrWWNu3eojF2nGL7MtGhDRIm6/dTvWdE9PvQl/PLt1++fwyLELeCdfczKYszSreUy5DZZrmgpXFeXVa0FpeNaZtZk8UwMPKQ5UXVnvFo3FhYFCGvZSXqUiIVp25FAFGpdJcoI545OCkTLvJPPxe6Li4O6DerOyrCw0ATQDrKoQ9vbPl0MYU7atY5cizcC7W/N+2Wg7reK/iFD48KQlUqZ5EIyd0clM8FD7hS4osMWssWFOYOmWTnQJNGifY2IlndQpJuRQIMm1ecLteY6C9fii9J2w7LD9BuK5XshUS6amBhuYrRhW1+MdSoEPYvIygPIDZXhES92inj6LYokEkPJSR8UimLeEhmqW0fXWLlKiJBqa0raDQ2WQoI759xrrXtABbVqdDYklG7WcSr2EpxeT5AdxLUL6RAo9iqUqWcAk3QjHogWDqsOqZbQOywRjk3K24QG6xhRyaxABQz3D3fD0fcqqu9jG/bbTQ7H09QTuMZFt6cR7Opw4rImorwcDzXWon7B6qMJlZ2rQn+vDNLF+yxJ2EuZZ42ggwLO98NlpbUhrS0mbe8cB/dSIVWsOt9rclOnJmijHbtaOd0HJUInzzUazXdn1BAT4+o7bkchNsRCE5852hXTAeDOXdM6o1RdV6L82ac5AclbYtFZnR1wW4aa49v8VbF2K9lEhLtwygXAA89dBIRQzuTr0Pm45vwc3ZXoQ3BhyEqRQk4UwiG8vBzkS6Ysj6S09wlpeWItu+7aOlO7owrUHWCybEMHeQdRkC+7eb6LeWuh6bazqgNZ8/ZYvk+3jVl6pLp0aneUxqJVUd4kdeLnZC8zEQe+XRJe9fk/+8BXKvWt1ROSMXHMolqVipDtA9RN0f3WuhTLSkJuyHLWJSr+89T/Cx3KSu1sfp9VqWfUReN+Wl+piYqdSVZjS1qRDnGw/UHBJ/P8FnadvrKoLStRESFaxoiKWkQb5ybBcB6rgGJeW6ba668JY1REOozYBdtt6xMkDxIrF+3zQ31RJ4+FZZUYExU0BL73jJOTvjVnpARx0nEuC5IzX5OQQdvQtEhCfxYh5KozYp99odCdLNJ6bSAmzOWi3aLQUUTVcoUoSWoCc7DfuabO5JCRDk5URMKWU0IGs076rEXSbDIYF7FO6lbOYqyP6rooQapimdyNDOh2wEizn/R9dcbulkgX6/tQYFvOgIgl3OrrpIqGlFBnQ1aXmBEV29zrWj25o0koTPM/ZQXVWpjMzG9Ax0ZFPSunzoUyi/JrollQVFR6Vm9EQAnzW05YYjTrvyT14Ft5cr6Y5IALSU2m3llKgOcm4QIwjyVF9ofGmhRJFhN3Xf4XgKIgklPT5UbHFkxYyX/jfI/XwWboaWSQkyruEhWfVQWAEbWTN89JHX0UgVg2hc8XRIYiqtPq1LdAVAySMpagGBVW7QX01cfL2ljHiNYxkjtxmzNlbb5fcqh2QzAT+dr175fLlg4XfOHPQ8A7dNvFq83qlEyxp30uCyzfb7vF7fQMNnzZbyUdi/GOsSzV9nHGMT0uL52wsjOJ6Ypy23PIi7P6YEzEpP2d8koLcEWhLg95pnqJqDhySJGrxmh7s0QITwKpJ4ZNdCelo2iTarZ1UXbv1QMJuGwhKuqkqGpNHoLWMl3LFtrfzu6XeeoNWhQ1p0SUszYzti7Tk/7fuG3hRc83XIODr3PJ9+qOrdUH8HoiFDOWw6BnAOTnJt3C+kq33PIBoJzRQ0FJnVoiAgBl0SYKs316ujNxGN25KNA0fUfGdipXKgD8gVuVjZupQhuKXBlr6ehtgM5lwmF2IN0Owe4sXIsfFnuJYT70gVtQJCuKj8DoCClqH2lRWHJBPeDpgQJ6u61L6csM22l7ZH7O92srS3YArb9Qa0AtI6R3qjrB22G45aa9UGlQ9syWNxC8Rkyfwusx3DVDSA8RFjqmirRFRS1bTQpZQXj0Ff+v2xF432hlY9vSQXVEReM0tSJ5fLloXCsiS/ymra99Ds0MuuaA7ruuxrmrdXK6BmsNMsAkLaLJXkWym1lZRGfVvSpj2RILHQLD20kTICtDNwf1PdJ6YW39sZe0uEJyXW5x0dWk25906gRg6lMoKRzlWlHK1Nawc5YLmvg1facVfUMdNUUDtffTdOmQtaSYR51JcT32SZ6CCqqo72HBRLJcs6dWlRb+2ykbpP5qiPzq4pCVWH5RXaSFd2Lxun6RVUNWKCJIrSMjrTzPd2Dk4Ygj4zzZAXS2UKBNPJUv2igDHlYqregaVRHUaf05WdaZTttZW2Ut9WW6jPgqvjzaiEJrvfeRGDPvNDhztxZv5ItcUdu4ol/yZUt5UmyUcyUmcqPrcy8W2N2u9SS0cnaTZ0Kt6/aUKtIELcrb8O56yYP65WsjWey3K9IqH9/zp/87OFNnMIVJPPNGd7G+UncCxdwiGgNylwyyonR2Wqb6gXWFEqBeEW5UIVKoo5VmFeJ1pJ9FntdE1xe5DTQ84R1H7RqCfj9tC0arb2EkRnAZ2RYNrmfpg72aPCA8Kx73kCRkpUSZcd5+t9Eu+dCWcYVJE7h1RrLitIuUUrlhbll6R4nASG6jrovIFH5WcaTzTXELsHEEIwR2BJMU6UJ5ToCudUjDWigVPEooV4iUavvZokC0hBFJRJYQu02k6+nmluLWpK7LbXVNdVwynevSbtQ2ceXqmhl1y60l9VgaGe+eK6AAqN/FMjwp8MUhK8V+hWoh+EodnZ4mB0nd8UdFu2SONOEj8zLQilvjousGKmf1zJh84HT+ctE85L6cFlKYZxEhXsZQp/VCa+q0PS/QWmXQrJZbW2iszou7LNgDI2XDnN0203HbBMcOtZb0PDWhiY3vGsxC4zLzkj6FJ3Izo7ZkV1d9rY6oKx6x03TUZFXSHbOjbrpO+6WmF9lI5Be3z4Gdwr5tqzXbG2AdEcOSrXwmLjJCREG31mHZ6CMUsapQeshEPNAiI9XltMZYqFQFMLdbfghx0cCogBFubN9zm4zQu1tvq2A7AAZZtFxtH0BahtU7LmJoU4jrrjksoraryGc5tXO5SLoIAB2xLkXxddrCUjbUbltr4iksWxHlVZMlvCV/dFxpLaPhvmbVioEVyyeTFKiQtKJZ2r7MgEWKSiltZabFSGuLiPOWaSJCYwJfliFf1BMePqG2wSfJhYqwfKh15ZAgHRAIyJFcn6tPKgdYVy8MWXHBPYvsdj7avCvVE7UDEN1fniOBm4arJoyzTuFdATPBzaE7w57Zb5PnolxEKJYxFAuHjvOavJDlRopWANqZGSdVlQKqwnQ/0ZpIOrzbwXrt0G0yKRu+b+H8hCivDL97H+yEej5IuU/s7ZWKAGl1aZY7IspldxIHJRDM9oHywGXp6W2yE85nw+Ni6CMq7YbxREX6vBPwNnMTSWNdoXe6nLXRUXptntPYeMcB/q7K961SVRNN1EYGkXCXjufZZwk8LFkiJHZuldCwZQk+N3fckOm27LDfi1tKugu6ysfYfY9rEiG5ae3Em3xRRH0NuZkltz5nS2D4tratlbFNvz5kWUoi8zNvl+M7WXcqRIYAu19n01yHcU1x066sdv8szWPqdc5q/cf6KEaxMJfw4P2BpNMiaz+R7z73sR1RRlmx+XIdshW5//ka644FLhBZCfXftx2NLKpry7mJBC+jGoFQHVHQ1hsVUU1UHGuhkOWEW358xCUCUB4UKA+g80cQuYjXQHobiGOTGHAiAtSdYqVMl0WlWl1MVJCvPzKONxYyY0sVEGOvmLWF1lUyktGxWZSRittlMm3K6iileTwo1E1ydXV88vtRPUis2uUY+Ln5//QuUKpW0MzXiKo1MrUfPDuIdI6eTdGxvvi0gEICNn0tA6J3fETFJiZTEBVuUamKCFxwG+ouilSFalaimJV1fda7VjXbSq0/M983Y/kBlnHWtshyoiNNUiS4fjOfINYmVFOCkwAARvgy0J2cSO6cvsgiskD6hLpDPttZxEU3Neo115wRmzCjVDSRUcyKYrm6YR1jZ8Q2+q1Z3N3GIBEYvoQHuaVKpLV1ZQGtcSnnKcqFwvpIYfmg0vmb6uuguuT/9me9TXKFVu3zTAtlJqftmmKUzdlnIfadUy5X1WNkIC4MWZFg38zWrDv8WAlRUf/w+VHb6bX+7crr8hlyHrtcNCtRIIZacpJWE6cE5kzOmNQ1nU9c1FYVW89DJlF21iakuhmkLV2NnbvBICyF3NFQx8EjAnzZL2kFajB3lGTx8WlxbJEs317MgfiuexYH1KF6URIZWWU51LpCepdm6C3RgwIguBfMNkjt3ZyoiIP9QKIyBSGJlWlWKPnimI07qSoiVHmsJwgRWf1YkrmOK4gJULSYF9D3w14fqCoBsJBkgJ53f/slvYoksjWu0Rrkh7p7OtoYIfOrVJbK83Zw+PKpAJY4HzKhsUmIa5FGVxuoXN/kQyyj2n6o46pW6IiGgfYd5kSDw74fEiEy1+dyT65c6IaYW4pDQ9/XJnWrkhj5UYrVtQTLB2Odv6m+Hj85cWnkXEQ5KhorfdaSllIYH+r//N47L7tTdlNcGLJSxVXvjZnixrUWkbq+7GpldHhVXAFcOzNS5OhDpSpgr0Ch4nrdE+YmqVT90AH1WGl3pvw1cUU46H0s8iUqq05kUtGQJENgiEYIut+af9Wq0tEDlYpq8gFGBnqut7aySPeh6VADiIqEUtVhoX1LHPRFIgH1dSan0Hl1uvuHmTyDMFFHMCTqh8gHJx18e8jxNmEpeIbaxjIS1CbLLUSkJiILC7OyVGn9njbZKUzXj00OaR2hdWvit11A3muUxKVC4jhCX2gx0FhEVa1H05YdMftr294++JK88dQGNqToIaCfgPByheXO5hORMoHON0Vt4e2oV3zuupdKVB1Lju1e4tcwNO8S0HUpAXX/xPPgSJYUW5jbkht7aYGasJRzhewoqd0+s0inv6jPbZISl2vFR1ToOz3X6Umtf6zbau7nddnrjkkIcgc1BD54BXdcILIyFXyzID5bKwIIyRSDia/TrkgLs461a4cIix1K6UOtrWnOR4TDLsMtBt0atDiVVrq2c0ToCARhdieHbFove14hsRTtugO39EfG4nNGlJRwntx9j/rWPeEkJi7qDlYta586idFs60qIe9GG+FIPDTkeoFFxWVQkQhJKUuxjOGGp21CHzFdFNGAdVg/4QoYMlapqMtnr4q3fIb54of3frLfdN2R9Lz4g8LXGALKEmLNc41iPPoKOs9tF9bbl3ITF3eauBaOvnGsf5YvSScKMJVNakmFc/5zXy9sC7ZJ2hVUTeARlH3GpkghqVbuk7ezZUmg01+Tp7SwbLwl7pQVSqzhCtRd5woploiJF2kjHcBBRidftmGdPPNdXuhIHF0JJivQ5BBeWrAy9EfaxtjhOi2UnICZB2oIAaPP3rM6gUKVVs9pvBBU1D/kpdA6Z+tzmvbFneJRYr7I6Zs3CYW63O0siD2Xamtp5iGTf7AdAG07cQOoY6vVB2gRFfP0jPlPjsK1MtP5Pu7+/47YXcOOIikrrgrIDFo7eEJa2DS1h4QOKqKOSnuMhRGUi149NSsaQFAmkW6mtImhcQI05fIB2hdxCEdeIWa4gQOi07XINaCCIA5NWSQLbTpmAW+YiOvRc+TUy/e4iiUzZKQ86bbLeJVf+Jl+iOKkevV2wDoBNavhSFgBQsOgb475nAFDVy4nkZptciFclSqaJ60x8VN1PcJJCfY9uA7OqhExs+Fps/Dcr5nWWV4rApIlnXxACRefYoflBYlZmLYyK9t7ybkdayb2tM7DPmgAXjqxMdaN00i/Xmiv0Iwni2VHns5JnDQGJDAmFilGpGOntqE7hvu52cvlh/Z9n/OTrnOj7mEJHGpE4F4DOMURmaXIVAV1Xk5FpN4mAVXudPgGe3dH43DGkJzHcWEznYr9kFMljQyIszogAoVOO8zoz7vy4tq7ki1ag5iIsLvRaU5qBtnK5TbZEVPrKlEXcsZ5wlEWMYh3rnjVKyvrcikKi4/p9SJrfdMDCh4MhWGDIBWTPMl1kIVQUy7Pb+jLddsWp/efi+VToMw3uZqr+ymv9sSP9+Haev4ngIi52Ob7fniDwAbkON45ql11WiW00CX67vSA3+LpNT+BzY1GCM6ArDLbbbbiT7Ogkz6Kqdd2VaIEpGquKHcnUaW/Zur3s69fbrFc31OVcJkCsgMiKQFo9QLmcakISQlKAAePviLHuwpAV7soYDD67CrmJvMwIgawNJ0kJTUleRUycWBOXsoiQ76NOloVuB2fnAKmFr47qY2ZtEjQw3WTUlV5XyaineaGLeRtRY5u+CS4xoD17c83mKPuvmQMlaqwflZEvxhe5MNRErgkZ6vuULIGyqCdTLsIidT7Gsyw9Hw69RduQcLcPIBMVn9uHiIivjKRvydeqE/nTOXdUd85VHgNV5b0ejSajLQDRjRRl/DlotjXaFPs5VadRJz2B/dk4NVlhGtdrrwbK6qdcBGSIOwloJwv2/1CIqfMd0ToScRlyHpe+xbA0LKJmTbX6O+UJAYCqhJGMk0CaMbUE4rxto02qyPXD28Gj/egaY0bsXJqXEugECnBrcKmaxJFWjhjK5MtBbnTeh/D/QHclb5dGhUMW8rf/teU8q10/lCtMrmsDoiJNuAJxYchKMMa6cQaQkqCw5776osBOmsrMC0SNLTBKKuCwQjGLUd1RQBMpxAdFCrnudNQNw+apkgn0LNqzQlqEUSIttN84l2IJ7HJ5mXA7CqFtg61BME3F1BHxFNHFLNKdYKVo0ck2P4xNWgA3cSlmbZpxc22nph1skcO4AFC0nQBlMDYXnux5VjykhCcdNCA8M6EkxWdF4fuGuIE4abEFtSJYr6sJiIL7XWCp97WwFqbVIjIsJ+b/zv4+XuQaaC39CSFIN2a/I2jr6WTANc5vknFjDyMrfKC3YZ/DHpz73gt9vh7tB6+HrD51Xd0JA2m+KD0Ct3LFBRCtK1D/UjUaMU5eikVrZekkraSUCjO5rXwCRO8q9UdmRKRnAsqIi7SsANCKdDlhoXsf363za/FIRjtzLH+OK9V1A9Fx5ne7LW36iyIFsiOyBNtuxA3cPRMFAlxcssJvUJ+rZuDNHCM0CiYnPfDXw0I8owqYlShUhXptiVgTlLqNXXNiicYkWDSZCxvmTj7lgnUWACMtlosopm0eH7rWtlAnYLlkXNaNvogDF/GoY/rlzqnPZ6/rU03mSNQRCT6zNyc0tmLfCBXnIekNcTEy0/L/6D5b2gUUQFDGunmm0qdwkOvHdh9VOsMnu+YiAtZxnVzRigDSKy47BLW6qBDSbLoM2/1Gh+9wwdguIqmMvi7PM2trwKTnJOR42QrD3EACGTLIg22hYPUFhyQj6pAODj7gSuG2kbaGtt/1X2OpJcsJT4Kp1lW9NAqqJry3dYPV7iSAExs04lq+inqcm9o6CZLOB2iJCc+4Xbe/6xYn64rkNuLkUK3rdXjqKKComQzWFseS+uHYnPjYWWXNeyxNdur7UqlIr/he7LmTJAIBBGXCUGUbW5LC1HjnO9+JKIqMvxs3buj9VVXhne98J172spdhb28Pn//5n48/+ZM/mb4hW7yB5w0R1y80AsUqrVDsVU7RKf3nf2UzK0nvdDse8u2WCsYKw5SKuZjVyYQo3E53Us0fPwf5g2klaPoz2xiJf759tJ+ToA5hcpyHrpFfrw/UZn0tzbUVs1qwlh20WSD5PR4Kg9g0vzMRFf6d/mLrjyNWpf5zbZf2q7iEirvkhbbTPqkc1admJZJZIZKgKLEICb92a7thTbHr2UIae7M9/n1jft+xGJIw0Y4g8bWVv+chdRrvu/VHJIIyr7rOTVFY8bpJEV+Y/Yd93vp/S5J4WnleLt9rFt1j11MmteunoknIPNLb6DPvj8rGKmvfEyIpfB+VLWYRSrbGGScqtKQIfZYstcbvkZohzPyeAcHzXeve1VaUMgHy/a42xTjGxRYavdm2x9mtW1Y+7dM+De95z3v0d8UWWPp3/+7f4cd+7MfwH//jf8Snfuqn4vu///vxRV/0RfjzP/9zHB0dDTtR6M3a8IaGuHeCxLI9T5Zr1VoXaDDi0RX18aiFi2mF/JA6g0h0A1GkUBU3Zvems0jvyBYCoJk1sFmYHfrNrTHamsOWtietjG3dcIn4pMRVrvK0rc0y2+zns0sh0kFra6TZY153Zvm+e22OMo1QLBrCxjNjVkRq6u8hOQsIeubPn4Wo3RaqORlTxkVOQspL5aRzcl2LspanoMRxANrp4zruWFPa8MseMaIo2gxzA/nEtPY+20risrrQAqp2+0KeC3PW3n6OjevuWjh9kSI8JNpn3dHPMXOx6vMwwT2VsZOM2Z9prTIakG1LLp2jrqd2x8ZN4sraBdSeTy0lUWj72eUC4i5qSsUgLRXAc82QRYkL+m1NXJtkD4ZAPypowdS27rpc7aZutXdE+Jr62aLNdB8la1X9uRK3c+SHVacs0ENQxuA8a1aSJDGsKYSqqvDjP/7j+K7v+i582Zd9GQDg53/+5/Hoo4/iF3/xF/HP//k/F+tbrVZYrVp/xvHxsXziKSJ0AkSxEpkYsmqtr+7eugRIpKWa1297VcYo0zpHTF4CyUncSXRVNL7QGM0MqOlAkmW78BUXZOnj2HdplVi9DlF7ZZqw8GN5lkpet4Sh23m9LlNv3RZpZl93SMUiqq0kvs5b1QI125xtJ1Yyjxn4vFpEZQpiQsSiKGMvGdkGJCtOS2BiPShXd5sb5iAqQH0vJY1K/dmvl/FF9YREAkmDgSt6Re8bYd/254MyNSCGuysgDLb+343y67ZBmP3rAdz9jqk1RKJfu1pJZMqstdY1KCu6UVpnjKKJbELF93fCu63jaXV77q4Gui5rSnJnXG/OBLhNkju+BpSeELGUC3aGbiJAALRFmvrnqLSfeeh1e4xrCiAqkpA2JGu2F1uwsmzVDQQAH/jAB/Cyl70Mr371q/GVX/mV+L//9/8CAD70oQ/hmWeewRvf+EZddj6f4/Wvfz1+7/d+z1nfk08+iatXr+q/xx9/fCvtnjJNMIB6cOkJJTVcOIGQTPxi3fs5qqtZ/RDNSlSLEtkDBdbXSmNgBdrZi7GCsELHHcQtLZIvnKw03FVUzForTe1Trv+4e0gyI0t/+SJCvqgtHFSvO/xQrpfaa5tvyaVDptzsIML6SqTdW7YpW1/fnh3y15AK9vNLA2b/INojSmWQXDkuomK7b2ibhESV+s+1vw8uN5ILZRHXlpU8RrlShoA2BKbFIvw4G5KFhp9Dsjra7lHpr5x1n1fjvYnlbfleExafds/P22G6Frrvuf0nXl8cyX+e46T3jEDr21DbKW0C/Ul5V7h+BWAuJ6sdxrWz/sW+Xrou+k/9T+faWZ9gbrfICbtugLmOmDmA+hKqi/oZcjm56i+TxqXMf+vYnDzabb6I2Kpl5TWveQ1+4Rd+AZ/6qZ+KT3ziE/j+7/9+fM7nfA7+5E/+BM888wwA4NFHHzWOefTRR/GRj3zEWed3fMd34B3veIf+fnx8XBOWgT6zsWTEaUUZ4jAU6pFAa6fQ5yGQypcAcJS1a7HM6gyKBYB4bcfZmDMd3W6h05YsLVIoNJUpFkBZNn5pYYbUfHNeWxVHurMGUOd6KWtRcJxVxsySu57qY1ufNrmhJPAONjuIjBVOk2X7uZjVBMVOxkQL59mhsXFOM8VIz2hCn8WoiGqJneUS6iMjIQghGiHl++rJG2uJ3a6irLdLYc60blAEmPmNGFyWBtPqYVpcbHEzHwztOmwiEJJXxXRZym2iOl13jec+0vUK5MYGfx+NlWgcpIsTBNv1QvX5QOfjgl9XOSOLKbMWUOI7Xs62HgCtJbZUQDVr20zXR1Ybu9+idhmZgVMWuddzbWIOJtK7MEsRWWO4ldZevqBE1VlGwCXwL2aRjiTU7WbnG2tVGSyinShilp9/yDi8VbLy5je/WX/+jM/4DLzuda/DJ3/yJ+Pnf/7n8drXvhYAEEVWp1NVnW0c8/kc8/ncuX8b6M2DMoCohFpOYj0QhZZ3Lxhn11kCqHLo1OZV3D78oqWEdeLapdHD6KPGlGuHicZspshfOFckg11/vmfNMJROXYLsAEhOo1oUnJmJsDiMZFme2SRgmqtJ/AfUbdcWFocKv/5fm4MrmPe2c+8CbJzSi82fj20RE+nY3JHwbSxUXGrCkq/rm2PkY5HAXT1WmLJbV2ITFvo9KAV6vR6Q8Vsl0Gn3Xe4cKmsPIvpzJW/3RQEBbpemS0uio4tKfm3tfvuXty0tvFws1K/PIxABOp5PFIh0lCwTONC+V1EBoNFhFIu63TGFKsf1u4600bNY1ySRS/6+Sq5pWf9ikhhql7Soq1qbJKRStXXXnKxVLDKy1eKZ5MQdmVhaIdnrKxHyQ/Yq2H1HZD6XIa6fYJJwRi4fCTsNXT44OMBnfMZn4AMf+AD+0T/6RwCAZ555Bo899pgu8+yzz3asLVMgePYaUo6Tk0lCjmuMXeXWlZjLlUE0Jka7ttalUGxYT5rOQLsu0Clrf5by1/GXn0ITjfBna7YEyLM/sqRIVhz+omaKxGemIE6Gq8OAjloA2ugE41xCYq9uPZW2svBZYpsMrrX2GFBVJ0mhKLBtEKt+10ooQUlUibyIjfL2901g18NJD7mI6Hlen6aocuE3crhzbDcPX3TUBvfTmySyQjmLjAFY643YQMmfdU1WWW/aeUYjmeBHBVgSsm577Da27TTL2FYUWmLDZxFxD2T1f5so8fdTKi+FJFOuJm5x4RMNHf3DLCVRCqhMbguF7OprdOiI+Gfe74SiOxGLGmtOM4lsRLXcfWRacSur3V0RMl2PhopMrd4MWF2rrciGm6h5ZkyxPv3fjKj0JqPEgP0Dz92HnZKV1WqF//N//g8+7/M+D69+9atx48YN/OZv/ib+zt/5OwCA9XqN3/md38EP/dAPDa57qEkJCLdytAf0C25DMdytM3yw8Fpbovp+ResYcRYZM0uec4KLbWMmEOOr0RZ7RrVO8FkRzdzsDtkWB0rgHbT08pMvnJeVjnfZBnziWf7Z1fnRoAeYL38VA1BtHpUqbm6W9Cy4SEpUGdE//DcOIRRjykjH+OoZYnUhciShTiBXwUUqObg1JV5HeoG2SrWdeu2a6+ZZ4d9DBzQiKdzwU+xV+hzcBUizXSJIfL8N3gaX5dIgS5aloa7fJAhA153iS/fvapMmJc12KTkiB7eAFs3vEFWAOu2S/6iEYTEoGmuKWpskSPcdzXsesWO4pUWePJhlbUjuIFdfxImKVMaVY8pefNLVl5BOLt+TSW6hzOPHrIY8yN2zJYnFkHFzq2Tl277t2/AP/sE/wCtf+Uo8++yz+P7v/34cHx/ja77maxBFEb75m78ZP/iDP4hP+ZRPwad8yqfgB3/wB7G/v4+v+qqv2vjco9bZCXTrbIOYjE285TpOdgO1mUQjVdWLfTVhzRF7aWyXjwReJl4zN1Izg4wCvGMlbPN9W489G4rXwGwtdODLNocJgdwzXF1PpmXFQv4kE3ulGtMzmDZFmR28HcYd50DFZ9isrBiqF/j8GEQlqjqRP/RfxabodYwVJBkY/ZOXbkIylMg4y89yAAmKKuoKa4WBXp1GyK6W7YroZKFS9RIUUWNNjJaxQco5KkUDaaMVW3eKADBntQBQ7JV6HbFqUQJFhHgZtxYPZl2rz0uz70gvGgprALLPJ6JA1y1ApL/57kvqJv2KpWqtFxJ4/fy9A6B1XOWsDdvXfUXVHs/dOHHRvnO0HzCtKXbfoAkSu/aYUi6gOxD7BNK8DCcedq6lGEDJiLOd7p4mYELNRvSRHJHVau2qWS3m17mZIr/FZAhB0dubiVN9Iaw9vLzPAjMRIRkykm6VrPzVX/0V/uk//ad4/vnn8fDDD+O1r30t3vve9+KJJ54AAHz7t387Tk9P8da3vhUvvfQSXvOa1+A3fuM3hudYAfojaUL1Jdb+vjTlZRFNRkbMtOTdz0Pgy2VRu4hUu0KtdHwzO9UdStNpUwp546VmxxlmSeZGolmVPXulj3ZWXLs+Ap85UVSE7hRZZ1gmpg5Ht+u07SB7k72xDpSiD+xOOM5bkkKrk1YKzjT5Q9yRUXNRrZC26ohpOVEJsppMFJKcxKWXsDiP87iBpDIUJVrR/XBEAxFR6Sw82uRsiVRVJ5xLSlTzCNVKIVq6298um8FcP4DuMbk2oFI1QTEEuwDKhrTUDaxJakXXoEvVbieg69Jpz4OOGFtbPddR+xIp8/3Sz3xjoSC3EEXnAV3Czgde22Ij36f2czuwmseQ3ocIVzFvtxHpkQbcluiZ2zhp4SREysvCLUw+XZhL22NvoyraaKTm+LJtg+s64kJeE4nyTEXN75IdANlh1yqoy6/d1sF2e9eya8BFSkb2VxyDUncM0XtWVTW8NecIx8fHdQjzT/8bxHuL3vJjVzZ2raEyhkxMlb68T6dQOAaTsojrSIs8NmaaAPRqs+QTBerP6tTME0AmU0p7vb5iJkDTM6cKhgCMz7Kos5E6aYJPpV/MaMGt5n6s6nYSoaABhYgXncN+weO12Qbb5VPModfL4INFvI60eb/j6zVmQn4CLK0qHGJNIfjCiTmmIiqA37LiPS7ARURlijJGvk6Q301al4lAVmhBwjKtgMMcqskrVKwUcKrq32JeIFIVklnRhkPfThELhMW1+KcNGkTKg0KTok4ZR1JHyr4b32kfNmOAt6KWioVgkSsiJHdi67j23QLad1itWguoIQIW5io+4kD7OVmj9kqr8xLonnbeBTuqy3i/3O2htc5aN1u/JdjnBrL7GVdkGREUO08Mr4+7x6iPtMH7izhrSRhZU4ikaN0bI+FxVvc9rmjCYPeOaFkMn0wB7ufbLOzeX54u8dFvfBdu3bqFK1eueKu5MGsDRfHwHCWEIfoRKT15aNk+TJ2Iiyf54qjbZfYAtuuHm7m5f5R3DmVRZ7clywsRBLViMxOrM9QdBjNf+2ZvrjvSzjhbQmLPukiDQ9dU55OpjPPRcXb7aFaTH1ZGR6G1JHHTgThmJXrNHkBbSFyIkqpzo+y1c2wRbZ/bZ0picpYo1kLPOyuBefujVVWEKqqQ7uWmhmeWY4U5cFoT86iJk6+tUwAeKpDdTYGTBGoZGTNWDrd1r6qtJ0RUpPWZEodldt5MduYFcDtFcidu65xV4AZy7lI0oplUhRzoEBYSA3MYuV6s6DXftfLtLjJSqQrVghG2dT0JovchyqJ6zZnmPUqOleE6pjrIkgVYriNmYWnbUGfhlvQcBH5PbHeVXYbCpwHocHGbZJCL2D6nyyoDtJadqEQrNiZ3D+/30tqa0pKU9lormvxYiQ5psgRYa4oBbheOZxJl3JeBpKXdILwDvvPE4WPvhSErIcnRxtctR9uMxdSkRBqs7CgLDiIvVRHVAtu1Pbup/xsm1jl0rgDbEqJnNhUQkQWjGX9pdVCbkFSqLcvPSfAJ5aRjSNgYs/PzENR8v1u3S6EP1Nda7FUmIfEQE6PtqkJkuW98qHPpmDMV7vKxNSmAIIINeKZSltQmaxgobePfM18KYOF8Q60sqyxBvk46RES0OBkK1lp/Eu3nmO1Z+cYhk/N0P0MGIDpVtcZCVUhm7YM3P1qh3M+QnSYoGtJghy4DXWsbUGeCrmZlLxn1XWOkKuDaGvlRhOhWiuSESAudq6oHurQCmnNVzXFVESGalchVhXhpvccW+a8ioErk69DnEXQ8hruLi8X32hNEtNAkRV9l5BeN2v1oCQBZbuu8I2R5jXRuIn7ukllH7X6EXMt0fS7tjnk/u1ZU+/3XT7ZqrSRENqTy0ndJ7MyXEegcY0WLGUQF0Pc2bu5tOWMW3b5xz+HamTLARBfpqbPTFw5ow4UhKz5sqv+Q6grBUFLiIh2jBJOefBjULj5Yh5pPqwhaYkYzDaStdcWoz3ppbfBwUDquz0cuQeyUonYmZl5PbV6W3jt+rO44mEvH5c6xBytyNxBCnhlbEG2TFN8zIBEVTkwk2Pv5d3sfJy8SmXERJR+JKVZKk2XqiCtAJISGC6jZ5suyW5QxyiJGmuZQcYn0ao7TZA7crQlSYs2yY1VifrhGuZcju5tCPZ9CnZqzWG61A9rZrmvFa31sYOddAigPChTMElgpsvxFKFCi2mN1Ro0oviEtenmNoutaadtsfzctJZXwnGsNUBHpgRIA1DETdNG1MNcxfedWnNYV1Fo5ubuHk6VWhGy2nefBoYmQ7QbzkQm7b/FZZgBmIaYV6B15dKT6ObkirYqdLdxsS6uBou+dhIAH7ATsfegDJyj6rg5MZGpDTqMw0EsxYGy7MGTFl8WTl+nbF0JoNrGMDCEeY8qGho0mswLZ3frnp46BD/hGtA+3gEStNoXMsOUMHVKiO/ekW6cNaZ+vA7FBsy7p3bNzmmhCZeXJqJiQzfYR1+2xZiOR+Z2/pNwiEgoiJvz3s7UoLlLQR0w2hURshlpfgJq8JKrEKkM92Bax2dk2ehRtSSCQ+yMpke7lSFNrcRZ+jkJe12jvYIVsViBfKyxvLhDPi87Kz7EqoeYFikViDKydWS5vrwtVVLufmvaTZobDfGYaS0nRPrOVqhqRd1PuVAF77EFvHt5I1VaXEjEiZUYzGU0SrCZ8XydCRLEoqsYyVe0VjT4oRnrc1cvQo1LMu9FANqTkfaFLUdgWoqho+yYeSyFZbPs0Lragl65Hb6M+QyJBwsRJ17FXW2vt9sjXZP0ezA1U8W0BGEJQhiYtlfdtxwV9YchKFEBWQmDXsYusoL7jx6ZBp0HPF31Rqaru/IoIaES2rWiLz2jRWTG4LWPOZqqms6TU2S6zs0bR7VScMw8hmEut2OzN6tNoBlb7doX6lB4G6tneQak7aA4emUPfgTY6p/5sHjO3BtQhSdmM74FWk9SnRA5EViqkcYGsL0wKXfdRCOwIIk0iqSOmmSWYiLYZ6BeHa9EdJmGV1d1a13W2Rp7GKPZirE9TrG/PkOznhluIzknkpJw1Wov93BAVVjmlYo3AiUHUEC51JzYITzlTKNOqHuwby0jBxInVzRlmL8VI7taDZb7fOHwaCw7lRMKpAvZZe6PWEhEBLNJHHkwk6wqa4/izrxPqkeWouX7VlC/WMfIyRXor1s2ohehdS6YNbtm03R+Si4aLbgFZf2JcF5EFdmoelahB9VO5xOxfqgio5nJ/xLVvPPkgTXyorqioSUq+34S381RXpaNunvZAsjRKOhG78xugHZk6snXIOFzdj5YVwrZXi93EJTPomOY6xkZduJJtUVuKsqxDOauoDucEUBXKMqO2FpfOu0AzGZZdkUch6DBJ1tHYnQ7QEoZQ148r+7qkszHO4TENa1HjrNS6AJuYAO1L7YrMkUSvUwhde106ExAVXk9IfURoXC6jEDITqUoPqTosGTBnk/MC88N1h/wRJIuidql1PRXaWjVPc9y5s0Ayy81FHBdr3N2PUZA24KDQVhhCHUlnXcupglpGDmtBHVodryOg0cRI2i+gdZEUe2VtMTyNkT+Q6/tRqQpRFdXvroESmNVEqXIMgjbsQdHM5VEJ54B2N6GK9HucH1add1uytErvOBfQSsJSKWrGuAaL6Ngi3YgRl44OSRIaM9cNt7Ty42xIwmPex5UzJtI37rFcHy9nRFxRZBvQ6Z8q1japz/Kh7c96ixrlQ+Fdzf1+FNiquOrcCJ/wVEorHopNU5EPGcDGDHZEcHzZQVVcm9Sz06R+uPdzlM2MkKPPdcPNoyjqQadSdUdom7TbY9shSiIXY6B9ww7VPy9HbaDPZSNe5KZ6263D/wN+ggLUv9sm7hlu4aDPPhLh7RAccIW2bwKfFoaIC9fnkFBU51EBzKgHVYlRUPZ7x7dzF5zPPTpfrJFlSYcI7V9Z4i4A3E5rjYRFUpMZkBURsFJAkwGaz7BdLk9uUeDvDU9aSNovY4mIdQzsFaAIMzVv3Vd0L0ugtoqsY+160gSC6YJsNw8fFA39TdVcX3N+ukagJl5qRRYg2V1K1iaeNdiYUKAlBNyKBbRWHt7WKGsjX7jYlhOB1gVGMNfs4ZMnXodkLS5nNWE00Q2zJgs0t1CRiFhfK12LTQatCZwo1OcbWNQZz2INQHQJTWkB4RhrELDf46Fj54UhK4kqoUJyTWya6bMnKsPY15i9xxCOMWZ2+7xS+0z3UK5zTsSqQrFXAA1Z4bMU38zC9tu6OgcXuPtoE0gdR/85m5kbIypjrCdAl1QagtWRlg9+XBoXzlB0wzIwUCdDoDr5goIhbQtxGdmIVdmQlPp7pNASFi4anJXOmaHfberWtNjYW8gpaueHa6wAYKWa98Nsv5qVdR4XMHcWg6SVALpuj3qQBqJ5m98ozgGcNuG+qkKcRShVo+9JSu22MvV1cb00waw08vZEjauX8rpoWDN3ntcHAMpVvSp728YIyUsJ0tttuoJ8Ub9v62uMZFjX6wMtT1AtmmtYx/VChlmEaGm51li9pJWL7wI1gYiQHXUtHC0R6PYLvN+xCYMOkU5bnQhlHm6JV3vNlWUdqO+DGYrN26RJCCcjyowgBGDmXyIz0LyAYm5qXyoNn/5yDOGYan0wXt+QnvG+IyuTnMsK2/SRkbMSRbrcSNJgu1rW5gg1L1AcKCTHSs90bNEtz1tCL7Vt+o7XbZgeHcvR1il18l1XkQsSKfERFVHA1hAVNSsNkbYrEif09wyxgozFNlydNuGxLTRDyYwEukc6Gq2xqiCqdK6ZesYdAbMS6X6GvcV6lG5Lsrws14nR9nmae0lPuRdjncdQquqQVsxylPsRKiTAMtaDnhSNU6kK+ZVSR9TQs2fm8Wi3kQ6kPpasAHFjSTAHKp6VGrNaSxLPS62roVD4KCkBVd/rKm+XG7DXnALqQTJmeWyihMikQqUig5SoUyBVDVmIW+tBqSpEWcRIAgu5JXJC96kJYY+zCMlJ1IniM9/pSKdQ4OuVJXeBYh7BttK0hKHrzrEnU8bilnYmbf3dUcbqd8i6DDS/M4XdC641fWW+SdY6Bmal7qcIEhmhd3TKBU5DMKau6H60rOwC0kC1bTISUk9WKKclpi8t+mKW60yhABAdZciB2uzKwhbrwk0HkJkJtNrERM2M67A1a5uZFmH81yZSBlMvEwYXGbLLGB0gmZpZB5A2g1ef3iREQ7Jt7ZSNWdOmdaH0Z8J6hHXObr9EZoAuKbNdVxIOF2ssGutAXsS4c2fRWC9KbeWbszJTYTHLkRcxVo3rp0/UfnSwxK0ixpwRJiJBSTMQZLMC2WmC6nYKHNFSwRHil9LaTbqoEF1b1+7RPEZ5qtpIG7o9zJJUAVgdNFaQxv1iiFZPFcr9lqDYhKVk7qOI8tgUUS3sbQbKKClR2avYVFG9wnXUEhbAHECLgxLp7fY31TlISBtiaYSquLn+klkfLHdIvIyRnEQ6w26xR9GHkRbsdvVl7fd4zUOf2/6n6362w6DbPokmTWVatQTSaqeOxJHEr1wczidgqrbMVGgi3KglAToSblWp8tqqFifdCZULkiU4BFNbT6bEhSErSTQsVHQTTEZELF2ChCGRGfTZlwdDIi7zlNxBce0O2s9R3U512KJhVi4iIDN9wbZgtgQ6oYOuaJz6+O5+250kwSXedar37UtvTN9EVGhwHJK3ZCpxq42ZKoJJBicnNlFxbePg5+Flx5Ac23UFmM8w3UdNrmOF5MrdlgQI956e2byItRszayJ+aLC289SURYRkVmjLDNWbBEYUUfmrTdsIRHgAEuuucXSwxJ3DmWGNy6/GWg9Dlp7VciZn5CXyQlYQ1QpeSygjv0mURchuNoIXyyJYFrG2qEQsgyxQE5cKaAkLf68t9wNHrKpaVFtFjaWkSdTGLKfzl2odS34YNaJ7tjyAqmori6qAk6SugxGU5JT9ZqqN4CPikak6KkpHZTG3DEBtqN95Cgu23/V28UgO0y1EuhdntmAbnAh1JlzsvtO25nx2uLoEEjFXeYzoVCFqlpAgMfi20l8Yx+1oLI1Clv1ucHHIiiqQbNm1wsE74k0GrL5jpf19BEYSNRJc1qFElbh9Z6FXZC4pQyUz0QMwxI/cCtKJ8CHx6qwSOooWrS7GtHpQNJEtXpPRNRl3jpPEaCzSZNGILPtISYhuBOgnCCEYWscsLrAeoSFxncdHXFz3QQJ/hu1nl9/vvTTrPK95WROU0+VMr2fV0V9YIPdGqSqRAI0RuHO3khRefnV/abZbxaaVTpXIsgR5M4Ab7W/CnTVh0YQCqPYKlCrWaxhFTZqBKq3dOXlDTKpZ2UbiAKbA1rg5jKAk3SzLmkw1EUexKlGsUiSfmKFSFZaPlNrtS+LZchZBnQIHH6/dv2UaoVQR8kNagqPOQJvcrcmInRWWoDIAmeCCURHKJOospdG1ylr9DGluSKDL+gFneDDa58dIuqYqq+8TCJ4d9wyAHGDVrOwsoUGwiTY/nta6Iuuey+orTUI3JRzblioA0EtghODCkJVdwSYPQ4iKpAXogzQY+M7pGwxcYt1UFUhVgXxRm+RphmcIvKJ2gC9VVTP+0vQJ2+IzF7gvt9PhdDoqYc0LWxNgHROV6K7nY/nA43mBvYMVFrMce2k3dXvI77prV48PM9bemdX2MQSmU7/DteRaysEFyW2UlzGWa7krurOcYX2a6kU3UUSGKJEvIGiTmLKIapLRYynrE7Encdlxp4YOBLxcmubIkqTN0cKvo4hMC6GqBchR0rgS7HeqGSyrIqpzoAC1JgXoCmy5u0LIv1EVEYq8yeFSRO3CkIsSmUqAWYni5Usks6K1HK1r11d0h0TGwPIBIFkCalmHwaqsK9Q3UvbHMFZvp338cSXragwz3JtbVOpra48h17PhSrFX42b3hUfmREmTuqAy+zJD+G1DzEbZHJdUQNIuByNpTMiSQlasqoh0xBfpThaz/NxoI6eyJg8V518YspLGBZItmeQlcL/92AiMoecJgU8r4Mo8StsWaY7FtRPkZYw7y5mxfgsp0OsXqwQO6xcrWsedlWtdmhSbdGgtPO9M7bJSB2HPejiKqJP9tG5TG5qZzEpcvXJXhxePISa+39FpqWgsHzaZ4CBiYZelzxLx8BEV1zb7fFPBdV98zzA9a3ePrVXTq6glIPQHGHlBzJw4MCIs+OBKEBPqjczI21dWKk/iVgn6Xamimqg0A1yexwAj+ADq55zqYcmHJK0JgW9Ts1IPjJRtl9b0sZc3iJISs71Mu7SSuKwnNrMZVvMZVgcK82cSqNMmQihtrCQwraySZVR6NGlbYa0Q7XIL0+Wr01ivmcPbL4UM29+1FTaPuxM1dCdVWihbue+3lP6APtuuy5bIxIhnhdbPAbVVTppQhWBbbuopkMYFogHtuzBkxQUposFXjpfpIxYhxGNTd8C6UN7z9FlefJYWXcYS6O6lme6Qbt9ZWEsRtMeVRYwiiVEq1YQdti9+FaMW3AkzQvGz9N1C0CyHkRMOStW+t1hjkebYT2tV3ibWEd9v6yIIPuJg75c+9x0/FH31ucjMEE0N0H2/slIhKxROGqJCugjDOsKtKJZJXrTeGava1eCWFd8s07XPXhdpLE6zFHePF0YiPAPcrWDN1JP9HEWzr2TkTQ+yXHMSWeGuVl2kcSF9GlSFonEjYY+dg1xLql7n6mCxNsjXXprhynyJ48UCt+8ssAKw/6F2OMkb3unKOQOY5MNXxta9UdZZSdcWFWiWKKALrnQ4N4UMd+7/rERk3aeo59FuyYmpQfGFEtPE04jgApDMSiPXjyuL9SbEI2RyPdTqPwWqIROALbZjp1Bxv8A25IfoKzOFFmFK9JnhXdEaRhnHbDNVBZap/IjQC0fagKjpEDrriwDemQ3f3geeQCykrP6clNg7WOFwscZ+utb3zOXa2PQ3HmrdMNrAfh9uVRlzTl/doXDVuy5VR9MSSmCyUuH2cm6uvrxSJjkB/MQW7DfmCz3x85wmKIsYuRVZZOfAkd6JrFCjcyRJWK7r5ItVzlLsS1YQ6xrIFcCXBMjX9QrR+n6pdgjWqzGrCkD9Od3j+ZTaaJL2P4nraxJTrGMgKRFVtTtib7HGlXmtyeEBASoucWW+rEnLwQIvFtew/5ftvTRzmZj3Q7KSSMJ4KsdXb6dlP+wkkDpkmawpTMzMQ4YNDV5zz8X8JrZ7hz1nREx4nip+X10JShNVYoX6uT84WDpF5QQtSJ8gwtDu9+zIwbVn37YQ34+aldmOBbb2QDLVjNceUIZGZ/RZkkKSeZGlJY0LHC7WeOn2vl7B1lVv22D5hZLyJtiJmkJC+oKXNWfpqImoHDUdboigFOgXrPp+87HPwxCiM/TZ4+Wmdv/QveOdnQQiKqfLmSa8VR5rEaIEKW+J+BxY7pBI1YLR02KO0ztzxKpCMsu1oNpnTbl1d4H1aYpkVuDoYOkfUALeqTG5aaoiQr5WSGZmcsL5wRKnqsT69qzVieUx7BTstEQAjxaS1j5TcQmk0Lk5iLyURVSHkKe5OFDymfqV+RLJq17Es/tXkH5srhc/jeWce62GxRMRqMuSt4ut3UPCfdulU8XsOWqCBOKkm1ywWMdOFw5PWOjbBqB13aju2nTuBKT9zyBHCFGhicIQgqHfWanPOWeTcuACkRUXxkZI2HWM2TfFuYyZtvAA+QiMT+/ichUZwsOyJi1HB0ss1wkeOToBANzNZjhZzpAhQcnPb5nnddhe4/e2B5MOUdlwyXIbZMJezGq3jxHiK5BNyaox9e87JTZp2zZEuEb9DtKSFQqrLNGDYUkp3WlNJgs8s22IRc0+FoiAqol0yWsXx/o0Rd4QWAl5WS90CNTWmZfWB53cL3w5Bf7+cJcq4TRL27QAReUcJOtGRx3SQYQFqN/pvIiRpjnK/bi2sADGu9NaVsxZvj3ZkKJQiLxkSJDMmpXAWR8ipVooyhh7aVbrKh4BXtrbR/6X+1Cn5jo9un2WgFbSoAgePW1FsSc+Onqwserq+7qOoY4yOYtrUxcXt5o3QtbK8cX+XAnZABiRO4Cpd/K5JfuW1vC6nj1auRCcRV8X3Y9uoDQu3T/wjv38Q9E3e/fu77G8hGh2nMSFtqe1GZvq2k/XWGYJirKehWFemCvRNugMLhEnJ+jsC7Ga8I5CCgHM18roeNI01x1pn9Vi6O8aUn6mzMRm6yIxtknfx4DqGH28cC0hVqU+kiO5hvJ1olPVd0iqbZ5nmW11+YHg7hYayE/vzLFazoIsLVURoQRweqe10EhJBHkeGI6MkTO6rs618u3sOiWRJp0zUSXuAMjvJvV9auokl0dZxFqjBbRh4ByklbDXTItVKd4XO4TfxkP7d7CfrnF8sMRLLx0gem6O5CTqrNtjLzaoXUZMM0yuH54t2zw26kT9VXwhUhZRw2ELW1vCUgr7u9v4dv5dynrtIiY+/Yl0jyWr71SwRfz02d4Xcu6pJz0cF4asuDCFZYXq2Rb6zPOhA0lf5lKfKZcgvUTkDuI4Wqx0nVTH+jRFdTcxwi/rD2YnzJckJ0hr8nDRmZ2Yay/NRD0OCSIpoilRNYmdNX9jYZOOqerp+75p/cDmBGaoK8zlyuTPIw2odtSFy7o2lKTwiA4t+m5cQ2TOL1E/s+vTFKcsAuN0OevUxVE/tzEyJMgcQRpSxAd31bSVy25Qm6SI5CGtI/heUvvIThOjPhqE8yLG/v6646LyrXGWqBKPXT0G0Ioudd8iJR20noHZvMBemuGBvbu4/dACt+4usDqZIX5xVhMXzk2ZWJZbWIq9ElUMvYCiXkOocfFEx0krtCfRMj1LzKJik4i8iLXliCY2tjA2meUdcsLXY7Jhk1bAJCl9BM8HMcnjhBZV177ObzqhdYaTovsyGmgWu0NQz7Mp30bwjDVAf9CnHwD8Al2+72i+7KjJF2mOXMV6RpnMCmRN1sVqVno6YiIk6GyjnAIuMsJffPtF3kOmr7VoxJHLrLYIzeJiMsIRglk87lzrcvpXUrLuDDp+oMvINUEoyhgnFhEIRYfUBMCIKqKBjdxKjdiVkBVRuwI5r8NDkqSBi57jeZojWdSJFtO0XtKiWFthsZa7xxfqSgMuCcTp2T/NUuAIeAn7YjsA5rppjjEy8DoyBh+m63pQSdtJSujsGgAOkzVm8xwPzk+xPlS4dXWBj+1dw/rZPSR34s76POWsMvMvUW46VaFMgWqvQLKfY75YoyhjrABUp0qX4fe1UuZaTvJ6aHGHpPjS2OuJU9rePyJy2tJlWVFCLCShGDKGnffxbmz7Lg5ZUTlS1S9is83um5bbBL5Bo09U6SvrE+ma5/e7jaR9t1cL3WEezZdY5imyWCFXcUt0mN9YylkAdEP6ABjROlKb91BPY/tmB+tSYY2608jLtl1jCcQuEdrGTUjNlOQl1Gp5mqV49vahTpNPCFsnpes27CMv0grDfJ/hVmJumXYlaFkDQpCsghyUG2PvWqZ1CM82lhbpDZN0EK4Vc2eKWQnT9v0hNw/PxiyJfxdpriOdJNcXhfUfJmvMVI51kTjzA3ldAk0fOlM5DtMVrs6XeP7aAbJCYZkluHNngeKFOdSSwtUZWcuYRukwx8HVUzxydIKr8yVmcYGPH13Bx5+9Vv82zTGUkXe2l2mLb1GaC1rmRayFxDa4AJn3g5IViiw0kobJJiljCMq2dJK7nLCFIBrQngtDVkIh/Vh8gKBBYBc/6hitQohba4ieoD73iJBWFv2xLhSWeS3006GokUlYuOtHCu1bzHIczZc4TB3hA/zcIVoR6qQLNThTog/z5llZbcECMgQuUjOGxNCA5NoHuJ/PvmftNEtbQXaj3RgKOTqj/j/E4tIr0J3IBQXUg2I6L3B13qbhT+MCz94+xGo565Adrq2QonaoThvk3pypAserBbJYiYRfFfXgvUhqwv/S6X6nLl22sUTSbxpqUZG0WHzS9+D8Dh6c32nqTHCSzXHr4QVevLtfL6lwp17zKJkVuo/IsgRHB0s8sHcXN/Zv4yhZ6ncwvVG35W42wws3D5tjc8N9bN832+3t0qS4IC25QAnb+kjKttw3RrlzRkamxFZ73CeffBK//Mu/jD/7sz/D3t4ePudzPgc/9EM/hL/+1/+6LvOWt7wFP//zP28c95rXvAbvfe97B50rjXLM4uEdoQ0aBNZlglmcd/4PRejgMURrEDqzHUpa6nP6c42QO2hdKE0sjBfpyl28VBwYgwjPSWDPQsmEupdmOEzXo0SrLhymtck4K5X3HsxH/K5jjunDFARIekZDnsGNtTLCs3a8WuClO3sdawrgXsRNQilYNloLiHxMaO2kYzFEr4FRaa5riFWJVZZokkz35up8CRWXeBpXxHvCjwdkK+fdbKbJBNVNv91hsjaEkbT9xdW+ro/e6wf27gIww48Niw1rt/7PiGvf80L7xX6tTHCULHGULPHg/A5efeUF/YySFYe3wa7jOF9gpnK8/PAWAOBjJ1dxq1nkj9LSp6qoXWRoyUouCGft1cQBefE/l8uMrFc2QRmbFLIPQ9/Tc29Njs6JZeV3fud38La3vQ2f/dmfjTzP8V3f9V144xvfiD/90z/FwcGBLvemN70JTz31lP4+m43za08J+pHt/2PrkdA3iISY64eKIIcQF6kel1qciEsaF1geJLh7a69OgJV0k1DRLEb7ex2uJ9+L6bMs8OOuzpc4yWaYxcVWCMaUkNo3JYHZVA8zJOLoNEtxezk3TO7cYmBn8eToRl5U3RBTReuooBtVA7f7USqnMYCo9EHFZT1Y7pkD97pUOFyskbMkb651kaRBk9yakluGXC76e0wWjbtegsH32c+KPVFblwkO09paNI9zrMrEKCvWbz13s+Y4qoP/RwJd58zatyoT3M4XuD47Mep7Jj4ylgIgN9btYm7oSXLVrkHlExjzbS5XGWGRZE6CMoV2xNcHHiX17zC0jwjpB/vqPAsL81bP9N//+383vj/11FN45JFH8P73vx9//+//fb19Pp/jxo0bG51rHodZVlZl4vyxQm68fewmP9bQQcRrrh9obeEYWp5Iy4urPTw4P9XbSJT3yNEJngVw99YegHaQIusKgWcVXSRZrwjW7gyl38KwjDWdd902c1VcG7sgMmOelU2eVRubuI6MQbCHtJxkMxyvFqLbIgT2c9IHSVMC9BOWDlHxtqkyXDfmvm5oLOVDsd+tw2SN9Vzh9mqh1yQiDYQ0YHLQ/qxUUEXpHQzt37p+DwR3d5yLzwWRFLsuGiB953LtnzOS4sMVNgjb5a/PTnAlWXb2HS1WnRw3C8sdlMSljmqk3DGJpfcB0BHLSkJZOzX9YRLgvt7Aeum7x5w4TNWPhdaz6fmiAcfv1PF+61ZttnvwwQeN7b/927+NRx55BNeuXcPrX/96/MAP/AAeeeQRsY7VaoXVaqW/Hx8fD2qD7+Zuwx0Q8nJKnYizbIDOZagAcmxY2kzVfu3DdKXbRm165OgET5cxViczLXCkAYhCOnO2WNdhuu50qC647rlru9TB+spvC1M8KyF1DSUy9j0fS57XjbttmaciUZGsKCF5LCT3IYfOn2JlM9X7Hdt9sAkQTwbmIi12uxNV4oW7B1qoahMD0nlRVJ0Em8QkcYmsUFgkmZUwcpgVsu89m8e5+IzRdiILPjIdQk58ddhtmMe5JjL0fVUmhlWXEwhb8OpKfqm/szWFOEE5FCKi1oCxfRtExGW9CO3/ztqS7Pptx07wd0ZWqqrCO97xDvy9v/f38Omf/ul6+5vf/GZ8xVd8BZ544gl86EMfwvd8z/fgC77gC/D+978f8/m8U8+TTz6Jd73rXZ3tZObnN+I8iCGHWmJs4sJnOJ2yA4lLW+9ma8RowZzlw6bOeKZynMRzZEcKz9AideiKBldZgkS1upeQDjRkW9/+kBnIWTw7U51zU+vfEItfh7AUCqdZqqMuJEhaDH/iLtnSEqsKJUi/0k9IOvl/ODoL2flJjZQXCJCv7SSf6XdlHudAusRhusRJtsBzpwfGIMqzxdLKzXyNIhpkizLWOU9cupCZg3D0wXbRcHACEfK8SgOoq/yQSQhtO84XWmDM17bhoOeQXDY8vUFfxI6Un+kkm+EwXRti4bb89JaNoZOz84Kgdg9wlUVVVU2b49yBt73tbfi1X/s1/O7v/i5e8YpXOMs9/fTTeOKJJ/BLv/RL+LIv+7LOfsmy8vjjj+Of/68vx/ww7ZQfizEmtaGDwtDyoTPesUnAQmD7toFux0jXdZIt8JHjB/DS7f1OZllyC13dXyJVBV515cVBneuUL+o2X/qpyE7oTHVIfaGQ3AH8WSRR5LpUeOHuAZZZ0onA8K30yiEtEWEnVyMYC+911gWyBbksxb2dQTYge7Jt2fHl5+AJwpImxP+Vhy+JJJIIC2AmNQRaYsLdGxTNA0ALYiX3qctlGkqIV2WCKyzyhp4/30xZ6jPtfdx9w600He2Koz2LOEMaFcgqhWWZ4rn1IW7nC6yLBCf5rBMtaUc69kXo2GJijpNsrrVCh+lKW2xv54tgkuKyfoRqREJxFkRmTB+1OsnwM3///8GtW7dw5coVb9mdTBu/6Zu+Cb/yK7+C//W//peXqADAY489hieeeAIf+MAHxP3z+Vy0uEyNsS6hoab8IeVDZ7xTpG8fAtvNojue+QnQPH+37i6MQYeHL2+y9Ll9Tt452la2szCTTqVxGjrY7Ar8+TrN0lYL0Ogw+sDXqxmS4ZNbNEoVGaHNOkJIWt6BEZMqj8QVd8eEKkvXkLBrWwsDOVlZXlztaYLmWgFdgo7ciwvjd7AHWpsQhGAe5zjOF5qwXLHe8UXcTd0rPevSNbveQ+l4ft5FnOFQNeJeZFiWqW4fRQiti6RDWEKzV/tcOXR/NTnkk7ORlo+Q32OKOnaFPsJJOJduoKqq8E3f9E34z//5P+O3f/u38epXv7r3mBdeeAEf/ehH8dhjjw06V/0SmL7ks+jQhw4mYwazIRqDPl9qcH4XRz19D+ZhuqzXEkoT5HHcLH7YDmghq45KcHW+vGPm++0Z3VlhKsHspvWMIT32c0bPzrpUOMlmuJvVUXxcMErJs/rIiBRCGgpyBxF4hBDl+BGTuKnCiDLyaVFsuHKhEPLCTLpG4cM8TwhQ/w4Pzk/x4mpPb3PlBUrjQnRd8Mgg/p5KroQhLhu+nZOTZZkibVLQZixH/iKuCQSdx7bKSC563ib7GLt8Gpl9BbWJH8cndERcAHdkpSQ8dvWJh+mqE6Fkfw7BVO7sTc/pwzbHTtMNFN6urY7mb3vb2/CLv/iL+K//9b/i6OgIzzzzDADg6tWr2Nvbw8nJCd75znfiy7/8y/HYY4/hwx/+ML7zO78T169fx5d+6ZdufP4pRYiuOkP8r9sWTkrkxad1MY7dIG4/xFKxKut09yQipFDWsoiBtDFxNzPDoUIzF/nwdQZ9FpfQGegQUjAkymwqvcrU7ZPA9Sq260YiLGNBx0quoNZSx8SwoKy33bV2iJDwtV6y00STHfsYLqwNaSdPGsYjSdaFqu+VcKsPG+sKkTpREBqwvoz9LnPSzt+VUNLu239V3cWtoiZgnMi4Pve9q8f5wijHj7uanIptSKMCS6TijH5VJljFiUE+7EjBk2xuWqEtoiKFgfdNkOjcU0yIhrrEp5yMhdZxYUKXf+qnfgoA8Pmf//nG9qeeegpvectboJTCH/3RH+EXfuEXcPPmTTz22GN4wxvegHe/+904OjraZtMmm12HdPY+P+/U5wJat4xdfmyeDV/IcIiIig9k8zTX7oHT5Ux/fjEuO+HFUsdGZIM6lsN0afjW+8Bf7ON8YZiaXT75vuvjcBGqUPCOb5OOYAhZ7jtXn/txFhc6iydZVwBocajPHcTdRa7Q3Zy5DgF5VVwTbUp7LoSlqCKuMVFxrZ3KWa4TIih1OUaujXpaN5StVUkEK9FMFTjJZzhMTavf1eRUP78fufMgeAAs1+9I68v4V79tyf+mz9FxvtAjhY88cBBRWZapOIjSZyrnIlQAtOsHABZRS4CeL46MuiSycJwvcBsLYz9lzzWjJhMx34zd3pDv23A3T+Ey2gZ2abHZuhvIh729Pfz6r//6JOciN9BZuX42NeMPmQlLs+cx7qOhGPLCEq4kSzy8dwfPoZkppsDt5RyLWY47yxlWTRr2vIwxUwVu7B17Zy+A2bEMISp2fQ9byaUInCxMYeId+2xIFp9tktw+q4xowaPOvVQ4RSq69Raz3CAsnXDcgPwinNRIIlwbZaHQt94OuVTmaY6DxRp5EWOVJcjXSSfKx67DJim6nVZ+DptY8PeXD9j0LH7kTpvWYV2ozrVKRMUlsLX7Cum5csEefO1nYhFnuOXx4JLLJlUFFrHCrXxPLNdHZlZlgpNigTQqMLd0MuR2omuzdTS+iQMJZPk5ybrj6mNdbZwSQ+u0r3lZpqKeaAjIlbcNdK7vvLiBzgJTWTHGnHeqmTAA5ywj9PgpEGI16ds2j3PMD57HQ/MTfOTOg5jFBa7v1SF/J3szvHRarwuyXCd44e4BHpzfDf4Nh4ZmSi8xf7m5v/1Dd64DAB6an4wmRISxLkGpjm0RFpcVywdzaYo6x4Wkt3BlKAZkIa5NXrjlxWWlsd0jsZC3xKcxscvZeV6G1GMTNi7wtO8pPX9EWJ4+vWqIQyVSZqfBnxKhz7lrQOO6EiIXWaEM8r+IM01ebKIiPXO38j1cT2syx99hsrhIRIpfzyo2XeLUb1yxMsAOcYtt8j5u0pf0EZFNiYpUxzbJyxBcOLIiYSo9QOh5OKbQxkw1UG6KUPOmvf1qcqrN3eR6OckWmKkCV+ZLZKXS63icZHWkl2QF4u4fAINIhOsFlGZj8zjHQ/OTlmx5iORQTGl52dZxklndB/qtDtO1TnRGoCytHK5VbF0WFomkhGhgXKRCyomSF7HhbrLrt5cHkNaR0VYVpi+xQ4vXpTuZ2iLO8NjeLW1dodwhetVwFqpsw3Zh2M/tWDeC67hllRriWgmr5h1LowJofj77fQvtU7JKYd6sur6IMiyrFNfUXazKWrNypJaYx5k+J0BWnQxXkiWeWx/q+smCQt9tHVtfe+jeHmn31MLYvw0h/xQkZFvn3hWZuS/ICuEsrC6bWlz66uaY8jxDZhmhINICAB9dPoB1ebXOGpkBWVwvG3+SzXR0Q+gKry74XjJpH81wqa2+Y8/DbGPs87wp2fGdLyQMnSc546HOHNwak5ddIiNZWHiZUtmWFveCdbaLyd5uiMId5+uDJNAk0sKte48vXgJguoP4IoPOBfIcUUChGEJUCIdqqV00HERiaPs8zoCyJg+3i0WnnhAsyxSHaqk1K0RYrqq7uF0sMI+zel+MTpsWcYbHFy/hVr7X0bMMcffyMr6+Zcz9P0sysin6LDFT9Zf3FVk5K/ge3l0SjG3WP/Tcjy9e0h3Icb7AC6tDfPzOFdxeLXA0r2csOrV1oTqd9bqYbh2MMbBDOTfBrix/0nk3dTGaWiIlWjyyUhkJzvoWh7NBQl3AsbhfYGp/IweKMq0qfJutp8mLUhMW6fp4eboug2AIxPq59aGhmbIJCycrEjShZ6G3khWwDxu5JKIMq6j77BskRZ8nw0khR/yE4pq623HZAs0kI8pwTd3Fskoba0sj3IXpNno+O9JWlrZtw/o12tZnWeKYmozYBBHotoeS50lh5tuEayK4aT9535OVbVonxpyfw2b8u2zbWLMxh+8FpReJrC0Pz07wsr2b+PjpNe2CAepr/sidBzuZKCnV9XmA1IGOwRRWuKHERxJgjrG4+CLNeCiutCaLlLWVgwgO4LbEcDeOjSFh0z5rydDwa8kK4ntnOGE5TNaakNiWxpnKdeScnaytD6HkxOWm6pZrXS+cnKzK1IjcWVatHsw+jy0et59DV5tvNqHThhUlynBV3XVe1/X0tuEG4u3oAy/DBb+u935KgiKRk5AyfJurjl2TmLH95H1PVmwMIQ982zaIRKg/d5cYqxEBui8LERYqv5hleGR2u6Pwf3h2gv/vS6/E8WqBK/Nl04EnRqfDc0oM7cCnwhSkZSpX5VCS69I39A0Ydt08nTkRTD7ISwLcPisLJzQ+sS6Bu3QIPhJi6064FWiZJUb6fNcxALDfLGxHi9/Zlg+gfU7pGQ2ZcUruH5cmZZMcH6EEpT5PCsSNdQUp7EideZxpgnJSLPQ1+iwVff3dzWIfyzLFskrxfHakU+8DLRmy28wJE9Xx8vlNPJ+Z1hUf7HbY9e/CarJNuNx424JxvwbcuwtDVuZxhoUnmnEKn9mQF43jLMKph2JsBxfyovpePr6PvyREVNKoQKoK/L2H/gIfvPswnjm9gnWp8OJqX2cFBZqZqKqV/6sy0Sb20BdfMqG69vVhSOcV8lwO+W2kZ22TZzNEu7QqEyMduQ2btISEHktI46JDdmhbFiuvqwgwiY7LosNX7k3jQutrJKGwvaovX6GXsp1yUsE/P7c+xHG+wNXkFEdqqde6ITw0ryOD6H4epkun4JvD1UcNITEhEw2gJSz02SYsZHEJecY5aXO9PyfFAieNmJX6BrKiLOIM19QdLKsZUEILcG28Sj2HZ/JruKru4vn8yNs2ux2dDLoNEeKTrl0TDcC0bk2BMdewC+vM+R9FJ4IrbNXevw3h5HkjMtskJjamennTqMD/6+AZvGJxUyd0A7oRQpu09aywzWevDyHPgmRd6VpUciCfQQJf6TYUNqEhQuAS8HLrjCYtlhVGckHZ9dtp//fSzH1Ol9iVrR1jWz+O1FKLTNdlUgtH02U9wKL9/Yls+yLSfBbdMVbZMe8MHyQlwiKdw0VKhp7fSPWvhbdrLNQay2qGRdSm2OPfbyQ3a1IDiIQllKwN2T8Gtlutr8xZYhe6mPuGrEg4DwOabb71zYynCIMegk3uz5CXlz/grgGbZg9HaokjBVxPT/RxyzLFXy2v6cXi6HrHWlVo2y5mSXZ68qnEusB0RDgkUoQWc1s71rUh2LoPspTwgX+IBSY0rwtPfe9rl70qr7HNeh2krLL1Z/PecJcPYdVkmOW/kf2+UWSQ9EyEWHSHwPWu970DFIXzfH6ENCq0iJZrOuz3a0i/0nd+sqCQG4hICScqAPT3RWRG9FDKflfbfOfnrqcQYsHhIxkS6aP+z3Xc1NaVsdjEIt2H+5qsSOizwNhlppoNh3Q+u9Cs7JrAuYhKN/+JO+oAcT0TlYRzQ84/ZN+2MGVCpm1EGUl1EkGcxQVOmnT7FAnjIi8uIsIJgG+/j8Tw0GhA1sS4Fle03VikF1mXCrN5/Z+i09zlW2sId2dmlUJWqc5zKj3b234PN3XfcvBBe8hxU2BVpkDziD2TXzNEyuQGuhab7qBlleBmcYBlmRqaORu+65BcXjoCKc70Nt8xoXUPOV4iTWdFZKbWwlySlQCcBwvMtrHpNQ7toHwP7pi2XE1ODcvUJh1mn+98l9i1aDfUtWDnnKDvt9OFTuxHsF1A3C3ENSI2QqJvbB1LbpEYSVdCoPWMAJl02N+N1Y3t/82ijqQtIdj3/la+h+fXh1gXCdalwsOzk07ukW3m8Ql9pkPfHxqkKedKm1U2HT1A2uf2DbZza+J4q9jHraLefrPYx43kFpZRgmvxaYdU0fcxbh+gzfXiwy7dNJI1xv58ltaXTcnLJVnZEK6X/zwkDQvFeRiUp1jTgmOMG0dap4S2hSzedi9hKgse/WYUtXUlWeK5+BAn2aLXJQS4CQlZLnxal6xUomvHXp/HV78kCLZzo9h6qMNk3dTRZowFYKw1w92Ry0aIuixTPNcQFcJxvjAW0iT4CMvYDKlTExVdbzNoE1FZRBkWKtOrMm+CvsF+Vaa4iX392UbrHupaxxdRpgW7HKHXT9fHiYBLZHwetCWSpeesyUsRDUgrsMW27BRpVHofsl2b9LfhKpoSUxMUV0KiTdozRLRFKb19qbNdx1EkxqpM8LHlNbx8cRNAO7hQnb7wXjL3S20fei8knMfnSRJIkpXlufgQt/MF1kWCE8wM0sGjg2yNCHexADWpOM3SjvuHVkw2RLiq8IpmjXp94lh7UcDGasKtKHXZ7qKEPPKG3D+3i4VOfmjXZy9GuioTLGZdokKicjrPlWTZa3WbUhviw7JKg6wMY847djDVkUhRqsW0hJvFAQCIZCpEoxOyL0RsvGtI7XGRq/OIC0NW+jDFgDEWvk6DLArbGIDO0mIyxb0eU8fV5BTLMsWz6yM8Mrsd1AnzHBDXZycG8ZCSpa0sIa8eRJq3SSItFwUhzxSlN//o8gHcxgKzRufRBzufCFlmuLsGNrmB311kr6vTe37HMg59qxpz0DayNHA3z7oR1gL1OljS8R+8+7CxyJ5Pzyb1G7t+7ymEmQ/QY4jLJud3EYNbxX4T1lx/X5YpPrK+ricoQ+5VCPkg0nbeiMpFwH1DVoDNBg4+cE4ZpsXN6Jfoh0vszN0+NLsmwuICEVhy8/C6eWI616Bkz4qJtCziTHxepoIkwnUJw6ckwkOf0SvJErfzBWZxgcN0rcW3QL8bhv5zVxLXjNB2TkL0Nv7dkXDO1p+QtaNtA0U3NUsJNAsFSqntJYubHeX1f+9e199fXB1gXSq9wjhZXCTrjO8cEnZlTbHhIirzOMMcbar9qWFkq427lgEiJctmWYBn8qtOojJUUCtB62CYpek8WllccLm0zoPF5b4iK5vAlcr4rKw19xtCBlwj70JcZ8QFalEj5bgA6pkuDxWfx7kmJ/wPQEdP4EuuxY/zuYWmhmuAchHhvns5FXG+mpziOFlgFderbB+ma6+ORSIuh+kKAJxJ53h9EskJEc7qz5YGxShnZaIl2CRiYXXw88YVNItzvLg66Ahzn18famsef8Zcz5l9jrG/1bYsfnTN9mBnr1OzDVxTd3ET+52B9Vaxr9cM8kX/uEDXQQTErt9FRsa6xs4DfGJdF7ZNaC7JyiXOPTb1ydvi2EWcaUuIbWZflQn+/PajeGzvltfE79PFcLcQt7KcF5fQLqx4NDjxkPKTbNGxltg6EBvkblnFibH+EJWnqBrCulTGWjpU/0mTsI5bVGyQ9cRe58jebpMW6Tmh37oVWLZ5aHSbmqgpIitSfZKlZhNs8xmkayUryhzhlouxIN3JVXXX2U9klepkH7bhaxuREYmoAG2uGcmKeR4sErvCti0xl2RlQ4Ssfrlr7HqVzW1j0+R0dB/4/bianOLZ9RFu5wvcRj1g8NWDAVlES59tsiJ9P84XmrDY57fb6Nt/L0CaOdNSCYs401FCPOOwz5JhE4Z5nBtWLrrfx/lC18dJCv2GJ7kp7p2pdt9hsjbaILVDap/kmiExrT0Dpcyqn7T3PK4kS+PZeWF1aCzaKVlMuLuC6ucWglDskihzorZL98cizoASxhIAIRhyb1wRNMsqxbJIO6TlXnH/TA3puqXnoRoQ0XZJVi5x32Ie53qQI7JCpnqfRoXrU6S8JNzFZBOWi4q+wZNCm4/zRRsV4xGO2iHAklstqxSuJEv81fKacexM5ZipHC+u9kVhr09oK6W13xTLqiYX19M6hf7HVtewKhO8bO+meG0cnKhcwo1lldZEhWEeZ70WlRDYbi1fNllqx73q/tkmZN3PJVnZGc7jbPg8tmkofNlsh6DvXtTmfaVn6ADw2N4towzXoXA9gS2wBdDqElhysOO81m3YkMS49zp8C0Iu4gxI0BGVuta+of2uAZvC1a8kyw4BqqNuwgZ4l0WFzi9FhNFnr9jV0ixQZ/3I7DaeXR91Vgd3aeD4dY+xqgC7Wz7CxjbdIFwMaqwCzc4Zoiv03RuXFsWOfuL5VLiw1lfP/Qbpfg15Pi7Jyn2A85KXYyxCFhrbBIfpCusywTN3j/ApV55z6lLsZFyrMmmiXdoB68H5HdFN4KrrSrK8cAnnOKR1YXjG2z6Lgqs+HfkVFXhkdrtzz1dlgufiQ8NFBMiCWgkuQbWU9E3/tnFXp0E4VEvtEoICbsV7hquHXw9dJ33mA922omruJfQlM7O1ExJh6RP8+vQXXLsiiW2lOi5hgu7NpRtoYvhmi2cJKfzuvBKTKVcWnsLqQtE/J9m8taTsd9PNSwOVa7FJ/ieRE8kiQxaBUMKyzYXCdokxv5vveiURNQA8tz4E0L/AIiC7gFzLD9jPiLTMwwpdVwElUCOXUFYp3VZplj4lzsq6MjW4kNenEwLQCWe2NVUSbKKSVUqTTZfI1tVOX3bb+xFdchcFH3tJVkbivAwUU7lL7lXY6/gMuR+rMsGt1QIPzu8AqNOl+1KZ2wOULZjkCBnk+LaplxsIxb0q7u0beOtBrH/W5nIBuQgrX4SQPyv0e5PQltp4O1vgetrN9aP1D5D1D5RQjuPSqmJC+v2JDIaWtyERFY5FlGGFfrJyHtbiOY/YJMX/JVkREOLj3CZcC+n1WSXOq1UFcLetz++/Sf2++7EqE7ywOoSKS5xkCzw0Pxl0PpvU+K7B1rxI++2BjmOboc/3GkkhuPIeEezrkkKkZw73jg+Sa+92vsCKuYXsfD99s2q+j/QQfMBdVukkROUiWFU4bKsF1wjpvpOig3qWoJF+Hyl5XKhw9tIdJGMT8nYuyMpP/uRP4od/+Ifx9NNP49M+7dPw4z/+4/i8z/u8M2nLrjvvUIKxLSIyRbTDNtswBZGRznecL/Diag839m/j+uyks98HKXkXJyKS8JYfK20D2gXvXIPuvUostgHpXtjb+H2WwqRDs8LSfltoS9toAUMumOWDXFYprWVZlWnHPWRbAq6xnCFD3A73G/h9IYEtIEcGueAKseVYlvUijURUQjUznKTSb3w/RgnRfdh04cQzH6ne/e5345u/+Zvxkz/5k/jcz/1c/MzP/Aze/OY340//9E/xyle+cmft2OVAcFYE5TwQk6HYtM3STPg4X+Ajdx7EKw9f6kRkDLX0hGS23eT4eyFL8lm5sELhi/YZApdljIMv00Dv73PrQy2kXpapzqTsqwOoE56d99/+PMLlBrLRscawz3Tfj9RSlwO6SwnY55TIyP1MVAhTEO4zH71+7Md+DF/7tV+Lr/u6rwMA/PiP/zh+/dd/HT/1Uz+FJ598slN+tVphtVrp78fHxxudf5udwVm4Ze4VQjK0nWMtLPZ5iKgcJmtvfo9ttkk63lfXUMIytQ4l5Dl2RWydNZEJyTgc+jvaljPbgkZiXimZ4MeW17Ca1eUXceaMGqKEYlMTlYvmApIgudtCCQIRCtKk8EgsifzYRIV+N05MJOHvRSYskjCcL7+wKc50ZFuv13j/+9+Pf/2v/7Wx/Y1vfCN+7/d+TzzmySefxLve9a7J2nCvCgyB3RGT80CAQi0XLmEk7budL/CJkyPcePhjk7qYXLNu3zkksiK5goCwwWZbz7AtXg6J7LIXhdw1YeEraUvuG9rnCy/ncEX/2HXSfqnsh+88hAfnd3AlWYoCWmo3pZAfi/uBmLiwKlMslEkk+giCKx9Op1yTIfdWsd8lRWW7aCivaxFluNYstniz2O9kuOWCYMnCswtMdT7pvvFEeoR7UmD7/PPPoygKPProo8b2Rx99FM8884x4zHd8x3fgHe94h/5+fHyMxx9/fOO2bIO0TLnirYS+iJVtnOssicvQc0vl10WCRw9vDyYqIeX77lGoiJMIC2EoUTmPxHuKqLWx76hNUFxlxoIvDSC5FanuVx28wKwyqbFiMLeoAP5rvJ/JSAhcEUH2DH9lvWfaukJWEqv/tokIBxESEvNSuWWVGp+Bej8XAN8q9nFV3XW6pLaNXVt7xkZKnf2UGUAUmbHWVVV1thHm8znm8/nkbaAH2dcRjBkEQjvmqUjNNgjMECHiWVthXOJVjtDMpiEaBX5ePiBKFh47EytFG0jtt89LeURcz+cuLIT2szw2dw4fCELfj6EDNC1WCfhz50xBgAHKmJt3rCz6/EKWXht80Oi73svQWBmujLNkbVmozLBcEWlcWO6eheqSBTsZHOVfGQKJRD2a1nmeOq5UKyLsImJIpNSZjizXr1+HUqpjRXn22Wc71pYQbOIbCznOjtKYUvzo6rSnIDFSBMO2IA0E21hvJRT24LEuFQ7TlVOvIpn0fQRIivzh7oW+TKx2W6VzcrEmzQTP2wKaQ62I2gweSFjGXNvV5BS38r3Ocz9EDD1WWzUkLT9QuwiG9F2XJKWL0IHPmUIfltaqCSN3Pdc8Udwz+VVnnfSMG+TD4SblLqFr3PKGrospBOdZ3DuPMwyhenF/ke1hNpvhMz/zM/Gbv/mbxvbf/M3fxOd8zucMqsv2jW07tp2nx6Y/vn0qTOnrt331ts9+qvr5901FrEOP6ct3cpLNvInAQtpLpv0xpGtZprhdLGqTfzOQDj2e6sgqZfydFXRHu0E2YfqbGhRSLD3zvgSA/Pi+92UW5zhi7h/72eDHPDK73cnbcRmePA30GkEYlg9lWaW4WezXvwU9i8x9w38vCsGdx7UWhUgI/dmgMlTPgh3HEfrsG9YfxzmNegcIjEOjqM4KZ+4Gesc73oGv/uqvxmd91mfhda97HX72Z38Wf/mXf4lv/MZvHF2nvTrmrsAJy9TaF8J5Tvx21i4gF4iE3M4XOEzXBmmbqs2uuvjvJaXgD4FNouzQWD7onkXkzZTPpKuusdfFj3Pd7zHWE34M6VT6CKyUqXgRZRsLai8hg2tX6J5LFizKfbOs3Gn77UH/RnJL12f0z0XXdWS7PWmb3Ua+AGJWKXxs9QAA4Hp6YkQlkUhXo2eV5/NoVRmDMx9d/sk/+Sd44YUX8L3f+714+umn8emf/un4b//tv+GJJ56YpP4pVMhjsK38GNsQ7W7TNSQNtGPr2fS4ddFfx5Dz+NxBvmu23T19egqX5kIMpU38A/sYbYskNtw1NhHotmvvyGs28X2SFc1leZF+F58biJdPVRGcZOwSw8BDmEm8GnqcAY/fwSYA5A7iwthllWryYv/nxMMW1VJ7XzF70TjHX60fZJZ7U5zdR1j4OaX23wuE5szJCgC89a1vxVvf+tatn8flGtpWRzG1WJfQ11mPGVS2pSuZ0s005JxS+2cqx7pUuJ0vgGSYMG4oQu5f30y/795Jg6Vdp/2sjH3uzpNFb0wo9CLOsIr9eVIA2ZVJ/31h8VIZTlLsc9h9ka9vGkpmLslPi7lllSaxLbdijJEMaB1KbFpFgo/nVh9uKSnbffZzTmLeVZniT+++DEBt1bNJDdXpapcdWi0RrzHXtG2cC7Jy1jgLZf15z+8ydajyWbmIuPViFudYl/t4cVX/PXHwonM2HFLvGPSdI2R230dmhlhaQuFbKPIsiMwYwmKLbH3ou8eh223CMtRVN2YgvSQqLSg8eVmkYvK2sapNntLft3giBycBdnm9D+4wZ01uVAbMgdvNelF/tX6w3h5nnegkfk4fcbHLnUf9yiVZYbA7hl289OeZtGxKMM6KoEjRO6sywbpMcJis8Rc3H8IDe3cHRYVso12bwM4bIukljvPFRjqWbZAQfs9D7ofLPWO3L/Qa+bMwE4iq5CaSSA6//8f5wsivIj1/hgvoMk/K1mHnU5kzYk2D8SLOdJSNS/DapwXpS7PvIibX1F1DuyJBt1EgVov4Fp6YPa/PTTl6pARzY3DerCrAJVnxYpfmVFcHtklul7My3Z+10LYv3DiJSxTl+EC4voijTQiQHWYuna/v/Pz/GPg6775trvYQht4bF0mQ2rKpBWnt+O0k94/9jD23PsQrFjeN343IIq/Dt9bMJbYHOwFcKEKTs9kJ5ejzgi2rwN+Xa+oOltUMN7GPZVlHIwHMBdSELvP6KJR5Ea2Nc98sDppru9sSsaaOMaHO5/WZvCQrPTgrgS5hE6HuWYsitwVJFGnPgCXQwPHY4TE+9NKD2EszzA9y5wAUcn7XOULKjoVEiELvwS7QFxGzyb3wHd9HWGjflWSprSshglpJREvbiJDYYmkpTPqR2e2BV9viUocyDFK/bQtvbxXQxNG1thUA7YrhVgoiF0RIDKFscyyRCyk/ys3ioBMVZJxfsIjYJIXqsSOS9GSVRSYNISHnlbBckpV7AGdFWMYMfJsMlGPPNdSacJiukKgSx6sFVnvJJO4gF4Gy99tlh9YdAlsjMQZDn5khv9023It0nT7XEF8vaB7nOMkWWMV+wuKyDK3KpOP+Oc4Xnd+KvptuovBZ/pQLwd2PkMKU7e8rCAsgFrJugxMSbb0gXUxT940mIy0/xn6fbvaEq1PuFzCDe33OlrAQUaGoI21RaUj7NXU3yJ1ln/c86lWAS7ISjLPuLM6CsOyCqOzaCkCDyAN7d/H0rSt4ce9Az7THuiikbX0WHt+AOwauWf0Q18g2CcpUdfWJivn+vuupo8PM8/K0+UPacpwvOtuuJEtcT0+QVQq38j1jkcq+KJSz7m/OE6jf4/q+Pt3PFPePD/DcSnGtWcfHKCtY9W4W+6OSvfHvvA0fXl/vDT/mCycuK9amAblYzqNVBbgkK4PAZzi7nu1sKsA9by6hs3ZVPDg/xUuzfXz0+Boe27vVf4CATSKmfDqUIfXx2Tyv6zwTlU3cVX3hwyFWqFVZr769LhJNWCgHz0z5CeNz60Ojftf5yJIyjzPMkeFWvmfs74vy4RGKU/cz9/K6QuchEIGvngx0LXghJMVO9OnqnzuC2bglIx9eX++UNxZOnEDHdZ5wSVYCIM2Cdv2ibyvJ3L2ITa0S8zgH0iUePbyND37iYXzg+GHc2L8NNGnZvcc5tm+DfElWGjv6R5rRbxIBFNqmTY+b4n75xLs+S9lxvsBJ5l8MVXIN2VYYV/1X2HNE/YS9EOWlwDYcvO/bRj9o9+0hmg0euqyPCwhsWMQZbiQ3sYhyLKsEy2oGqO5K6/4ooTUWUe5sp+FiYuHVdG38+72ES7ISiLOeheyaqIQOJttwFU3hTgohL4fJGkcHS/zl8w8C1wHsQyeLG+qSmZKw+Cw2fNtxvtBuC8KVZDm5RWWXepRN4MpHA8gh3utS1QtbJl3hooQ6T4//+uZxjqvJKZZlilWZ4Mjx2m6iDdjUqnvWfRlhaJ/Gy9vHbhIObk9Gh0T/cIS8S7XYdoZFVD+P1+LGpaRMQvFMcRXPZO7FEYHaHSUtoGic7x7MVOvCmS5keIkwXBSLSkgirqkGu5C6ZirH41duAgBeOt3Hx06u4unT+o8sFnadPox15UjoIyoSrvRYhmxM7RbcFlGh3zKU6Epl+fZ1mWBd1u/ULJYHOZdrbWb9xnY5V74VWiCPFp2kBfT6IGW6XURZcAr5ewmbEI5d9ZGbik8XcYZrMYUgm9aRdkHEvBP5Yy5gWFtl7Aili477yrLC9Sau/efNnzvFS3getCrbnHHb1gXpvNIgPotzHKbAyx+8iY888xCWBwmevV1rEg4XV/DY4TEe27s1SvhqW1qk/CkuSO4fO0QbgDHLp+1Tu3+2ZQkbe1xIaLqvLG2bxYUmLASuV+ERVfxYO9KKn+Ph2Ynz/vNyizhDVinM0f9b8X6ILCo8Xfy9Cqlf27Svs4W4Q9A3NhCG3PdrjFCSa+aaRTJbotJEs1VJ6x6y0FnAEI0Wsbi3n4VQ3FdkZYio7aJgW8LJKV0DQ+pymeHt7S7yYp9vFud4+eEt4AbwkWceAgBURYS7t/bw7LNX8IEr1/HEgy/hxt6xMWt2wTfASSGxfdEtvmvg1ziUUPmei7N4BqZAn5ZJW1aKBLO4MKwqRFRmFknpI5n025ImBWjv7SOz27hdLLCIsw6RHDKgapISZUBcp1c/Uttd22oqnIVVOOScrvsfQlr6rBkkgtUuHgA31R5uFge4oWoxf5u8LW/qbNtzs9zT9ThXIY9ybYFZRJkOYb7IuK/IiguhrPpehK063zXOSsOw1jPo/oidWZzj4b07OH5ggVvH+8hX1NlFuHu8wP9v+SjWjyq88vClQa4WO9dJqBaFW1b4sXbiMftad4nzRlRCz62fCyHqZxZvnp8GqN85GgxprRdn2cAEXNQ3LaLMq4W5HzBFlEufFcYXWt6nN7qm7mpSAtTE4oa6jRvqtrGt/Vw0balAiVV8eVhMl1DWuIXSSS0skpfhrMfHC0NWVmWKaGOdwPljpvbLtOlqzVMRllBB6XnIqOpzExEO0yU++doLeG6+xN1shpdu72N9e4ZIVSiLCC+d7uPB+ak46wa6Yk66btsaI2kppNk/tZuvYWMTmpNsgZnKO9aAPmzyDGxLeL0pJKuVC9yKElKvC3aIOH9XKRcIWUCWaBOD1ZaWrt5A6n84SQHqgXLKbMi8rfSZYG87q4jE0KUexhAY6XoJQ1dkJg0JaVKMfazuZaWwiAosrXuZVRGWlcINdYJr8Sk+nD+Em2jS8NOz00QCLSvzuaxJrPv5sfeFiLR9CfWmio5dlVFw2QtDVu4HbNpRnAftioRtEpo1G/gJMzZ75uc9TJc4TJc4yRZ4aP8OPra4iuOX9hGpCi+9dIC/AHByOMOD87u4PjvRx7nCcl1uA99xWgDqyPvBiUxfThAJfc/ANoTBm2IsQZJ+c/45ZND3/V7zONdkhN5NexCQBrs5y5UB1AnH7M7+arPQHceySnFSdIXfPtiDsasPCdGQbJOoTNE3DcltIkGytoRaFW6kt3BD3TK0J7pdzeeFDltv/9uEZREVSKMKywp4VfICbsZ3cbPc16SFW2TqulPcLPcbEtslIC7CtenEfCiRkzCPM1QDiPclWbnEKJyVtSQkfDQUrnDn+fwEh2mta/ioKvHic0cAgJdeOsCd5QyfWBzhbz+cd1xCti4FcKd8522w20GDKREWu62uQbgPmw4IXOTrwtTEc8q6uIVNcs25rGbSNpuo3Mr3DH2S/bvzjp2vRSO5gOx1beZxponKrXzP0Mf4sEuyMRRnMXEKtcaEZMjluKbu4m+kzzZunBpHyLWlBDAtK0Dt8smqqLO9Pn+kjyEryzPRGstqhmvxabOPu5HWANxuo215DDapl4jVpWXlAuA8dSw2piYqUwhsh8DWs0iYxzkenN/B4UMr/FER4/jFA1RFhNVJrdL/3x97Ak88+FLHygKYA5/kX7dn6/Ygaotn7bbbVhcpcsXG1PlUhtSxSabfseDPSS2o7Scfrm0cdtRVVinj3tLChnzNoc66M1W7sF4IyJpiZ8Htw3lIJMldX/a2MQiJBguJuONt2ZSwXFN3cBQDQIR5FGNV1ZnYFhGQVjVpISJDRASAQW66KPT+NMqxiG6a7e5YZGpr3U3sT0ZObPfgJqHlU+DCkxXJHxvin3VhGz/YeTevboKz1qv4YGtZbPcN/T9arHAc1e4gAFifptg7WOF4tcC6UJjFuV5fiDBEVyBFoBBhWVtkRh+j8o2igXaJXYY2u0D6Hp9+yUVUeGZSTkJJPOsfQLuakxCtwLX0LlACJ9VCW20AeCPSdk1MhvQtu9ZJ9UWGcUiEqlu37Pa4pu7iM2Y3zfNEPH1ZCaAlJURAFlFLWpaVj7SYxwHQFhlOWK6pO1hWs94FEkNBz5IvEV/btsJbRhozV03OIeA+dANlVQwV6I/dJMZ/bIcQSobuR2xbhMvdKQSySkhWFpu0LJIM6V6OfF3/dlURIcsSnGYpFomkSXDnO+EiZzvaxz4/t6LYbeSWGJfrgTCVVWVTjUcI+o7ZpH5aC4iO7bseKacFB7+v5M7jVi7DmuCJIPGJFQ8bV9NHlw/o33ibM9yzmtycRWi7/fvbVhbbmsAJC4Un/430WYucdMGJiW8/kZb6u5vAEHFZVvRcUXK5HH9j/nHcLA7wEWHdIBtSFuAx41Of1cVXZ1aF56W9MGTlvOM8z3q2iamJyFSaFduV4nMNzVSBNM2RnbbnLYsYy3UC7NXH8oEqZFB3aSSkQZTaysnL2iJUPoREgQ35nbYZCbYN4jpTuUH6pJWVucViSHI9TlKoDh4dxEOTbw2Y9bbRHxlOsDDaOMQkHzpJ2nV/MdVvPNaayY/lx7ncQlfVXb2uzw11Gw+rAlfjFLBC05eV+bsQkSHXUB+GEJtFVOBafNrmZYkoGqnWtTyTXxWteENE1hw+C8ouxrdLsnIBcV6ICseuhZmh4LNtwHQN8fYkqu5syBUUqxKJKjFTBY4aoW0fUeEWFR86glsiVEWdJp6Smc3ifqsKP+9ZYCxhAfrz0XCEElj79+b1hgx29uzTtqRQh94R1HpyqUhhpfR7klmfBL19g8QYC+42n49diq2HWOXs41yE5aq6i8/f/wCOYtO9s4hSzKPmfa74b93ed05cuJbFBU5UbNIiuYooL8u1+NSKPkobt1CKT5Tt2kH3ulX/wpCVVZkCO+iUz9OS22extsvYZGCbdjRDIbl+QsqSrgEwrThEVqrC7ESIOPTl3ADMZG/0XxqY7QG+taa0nQ0nKn2++BBsiygOrXdMO/qIimlVoTWB3BmFeRvs951+S2kGLrkMhmYV5WHN/Perf+cMc2Q4KRbe2XGfhmDKfuMsxfab1i25FGlbGhX464un8RmzT+BqLN9rTlJCYFtZTLdP97sPiyjCsqqMRHIcyyqpBbfFfm+4O3+WuX6n85zE7TMukeIxz9UqzOAE4AKRlV1h0xc9NDFbyHLjUyCUoGzT8rHraCBv/WzmrXO0FApJXELNShS3U2BWoixiJKrEg/M7XjOyPVj0ma3t/W2emLqew3Sl9/PQ6fNoTQPOR1JA2w1EsO+/Lz8Oh+tec6IwdbbPq8lpcJSHbwY9xXOyraixTesb4waSnoHr6Qles/8X+JT0BFfjOvpvWRWaZMyjGMuqMKwoEhaR6riFCNxSUrt0ZILSWnMGjOpo0/G/avY8bhb7+Kv1g97y9nPhSsS3RGpmaN5hv7O1VZc//OEP42u/9mvx6le/Gnt7e/jkT/5k/Jt/82+wXpurSUZR1Pn76Z/+6W0168xxXgeVS7hRlPVrMl+sUTVuoLKIsJdmzlV2aXVdWm3XFmPaIO2DlHcFqC04j+3dwssXN/HyxU08bIVLn2ec50gljlC90aagZ8MF/cyw3CpTCPQ3jchxPZuhx0ufN22L1KYh7XSVuxafdoSzLv2Ji5AMQV9EUJ+I10arX7mLa+quFmp3zjvimViWKW57rHvbwtamPH/2Z3+GsizxMz/zM/hrf+2v4Y//+I/x9V//9bhz5w5+5Ed+xCj71FNP4U1vepP+fvXqVbu6XqzKBHO4Z0dDhFihM6xtYluk5jyEkZ4XtC4Bs7Ph4tt1kSBj7pdWs1LhZQfHYr0hYlbA3YFTuDLhZXs3vQPppsso3Ku/7RCdinlc65aTorH4Ppfg0s4bsioTXE1OO24YrlcJ0ZvQqsp07CpKdb2hmKrvmNqCsu2+RyItNny5cwhX1V0cxTmArmWEa0/GkhQiJlLOFW5h6SNLbdnaFbSsFJZV0smiu4jWuJHcwgeLhX6eQyMEfZbRIYJ8Xh8/flX2h20TttZLvelNbzIIyCd90ifhz//8z/FTP/VTHbJy7do13LhxY+Nzhr4Ym7LusbB/eJ+QcBOi5NJCjKljG+GnY8tOCa7/4J+BlryQoBUA8jLGajnTmpWDgyVmKsdxvtDWlaGhvYA5C7QTvl2fnQSt9DzOV3xvEhRg+65AV44VG0RSJIzRrGg9DAtzvtroV57Pj8Rjzotbh9c1pN/Y9XPo0q1IE1Tu9rG3rarS2B7iFuIko9WbQG8z2uWwpHCrjulKUpqctP/bZ2MRZ/hr80/gg6tHxSzazjafE8IKbNENJOHWrVt48MGu7+ztb387rl+/js/+7M/GT//0T6Ms3f651WqF4+Nj4+9ewRBzpWTq9Jk87f1jTbZ2HecJu1pd2CYyp1mK23cWtV4FwOxojaNFrR0Zm/eDd5Lrsl6UkJOjo2QZRFSGYhNT/ibnnBJDlhjoQ597w0UGpA7/o8sHsNTJruzzjCMVNOBIocrngai4+p5tntse/KZyTxGWVWpYPVZViVVV4lZZYFlVmiTQdvpbVkXnz9xf6T9X5M88ioNdPnRcm56fLCpphyQvojUWcYZXzF48V0EiQ7CzXusv/uIv8BM/8RP40R/9UWP7933f9+ELv/ALsbe3h9/6rd/Ct37rt+L555/Hd3/3d4v1PPnkk3jXu97lPdcUlgUXzoP//byRiLHYlrC2LxLItqT0lUniUieEA+qwZX0e61TiDM0xs9AWFUZSgNqqMiVRmep5GWNtm7rsrggr4I78O87r6Aq6F9Tu2/kCD89ONLE4aczuQ893UiywipjGCWknrf9YTD1THnLMJn1yiIsnpN4QN9CqTPHh/Bo+JX0pKDJHHydYYgjc9VMvVBju/hgKO1SeVmmuV2xOkUZFUPZlAu/TJFdOH2w3kF1HKKKqGnbX3vnOd/aShfe97334rM/6LP394x//OF7/+tfj9a9/PX7u537Oe+yP/uiP4nu/93tx69Ytcf9qtcJq1UZEHB8f4/HHH8c//19fjvnhxRCv2g/D1ARpW2Rum+6fsfqEbj3DRGEn2Qy3Vws898IRqrsJMC+Q7uV4+YM38eorLxhaklAtFP9/ki2wLpWRP+XB+R08PDsZ9Ltvk8Duwq3Xd9wmJEXSrBymKyM0/YhFVtlE0c46bFsReNtevriJq8kp0qjA7YErJE+FbTwL29abSMdsa8LJYb9j8zjHy+c38bf3PoJPSV/yHhtCZGxSwi02QFerYrucJBeUXTfVyV1B9feUfU6wrGZYlik+sr6O28ViYy3KFFidZPiZv///4NatW7hy5Yq37OBWvf3tb8dXfuVXesu86lWv0p8//vGP4w1veANe97rX4Wd/9md763/ta1+L4+NjfOITn8Cjjz7a2T+fzzGfz4c2+55CyCxi6nPsso5tEZVNIRGZooyRlzHKlUJylOGhaydIVYGH9+6I+hMbdpZae5CjMGmtk+khU+f1Wdiknm0SFQntvU50qLo9e6TPQFebIg2i6yLBYdqSnF1ESpz17zZVfWc5aErE6PnsEMvFTJOAvkUIJSsJ6VJs0GrLzvZYhEQiKK66pRWcDcISEJF4njG4ddevX8f169eDyn7sYx/DG97wBnzmZ34mnnrqKcRxvy/uD//wD7FYLHDt2rVB7VqXCtjhzd4kf4RP1HWecNYD49BBaqxVJcTaEqkKD107wRNXXnLm7eAItYjZWWnr9ozXDPUNqlNiCr1DH4Za1Oi36fuNTrI5ZnHRyakz1MRNhIeIim8Q2HXOmfNibZvyWN/zQFqmkDJSm+i3P84X+PD6Om6oW7gWr8TygElSbGJju3364Mu1sgmk7Mndtah2ZzWzsfboU21s7Wn++Mc/js///M/HK1/5SvzIj/wInnvuOb2PIn9+9Vd/Fc888wxe97rXYW9vD//zf/5PfNd3fRe+4Ru+4dxbT6Z8WTepaxsuorOua2qSMhbroiYxt4738eDDt3E0X2JdKrxs7yYAtw9W0qVw4kBWFTuz6rpUOEzWuB6QQ2WIr3kbmGr27MMY15/O9hvwTNg6ISBsAmG7goiokEvQtazCWZP/Xda1aZ1jrGkhx9hlXAuYfmz1AG7O9xrRajuRWFaqY8HglhKbtNj7OWwSYxMWl67F3s6tNXw15larkjVuoDbf0zzOtfbKhfNmadlaa37jN34DH/zgB/HBD34Qr3jFK4x9JJNJ0xQ/+ZM/iXe84x0oyxKf9EmfhO/93u/F2972tm01K3jG6xITbqIj2IYF5bw9UMD2Zk4uhFg6hlpViKjczWYoiwgP7N3Fjf3ben+fC2iI7ohnpeXaCV/9LvQRqF3oAMYIcTmmFFO7MIuLliA2959bV1ykxb6mdZHgJJvjMF2NjgyTcJZu2l2fj9bjGuvusxclHXpuoEtaauvKw7i2qBcF5CSg/l8IZEPpsiGQrC59wlt+DH22yRBva11nTVD63EAh+3xw9TFTjXtbe5rf8pa34C1veYu3jJ2LZROsygTVgA59bDnfsX0DwaZuo7PCtgS5hFGzqAEd1Cbun7yMMdvLcJjWmZftRQ5XZYLb+QJHjugdKkezGH7/7LbTej8+S40LkjVnV37pqc6zC5LSV6ftJrCJl605sl14vIz9eZvYlCCOPecYSL/zVAJ6XxlfX8HPT+tG3Sr224G+kupsUs5b+iQiM9I+wkJnJO7qYmwCY5MRyZLTPT//nuBmcaC/S2H0fb/lkOdriEXxXLiBzjO29VKfV23AeWvDJuSkb7uPgPRZUWwUZYy8iDFP206ujt5pB7WTbKEFmtKgxsty1w93+8xULpIgCefhWejDlJE7/efaTMi6LpXxm5xkrWmcE0f+e9rtrK0zy04o81TYZHIVgl0I2I01tzYgmmPeb77fFrLbRJPj+ewQz+RXsYgzLCJzmRjKaWJDIjXLatY53q6DkgDKdaa6TDd/StYpx9uyrGa4WewbCxreyvdEF1AIYTlLnP+ebwuQBhL63ofQhFQ+3+gQhHYkUybK2hSbdn5jOrMxg5aLpADtekCnyxmODpZsIcGlkQ7/xdUeHpyfNm0wZ2drYYCjevj/PuvQUFHymGeh77gx70rvOQN/500JiQ+zuMBJPqvFts3ARQRU+v3I5UgD3ywuDKLiwrY6+l3mnemceyixHKgj2gbs+s2VzNvM1QDwseU1vHx+Fa+aPY9lNQuqn5fjbpclUsMds2hW1yYS06bGd7+D3MrDE8C1+837e7M4CMqgPPTZtPs5e9sQZNV9aFnJqmRUNNBQdflov+qWO5VN69/0oRuLTc35m3RuM1V4CUtWKhwdLLFcJ+IMjNp+ks8wY7M2Guic1qBSYV0oHKZrne+D0NdxjBERhqLvuBAiNLV7ZtuDFwCDqBBxJB0K0I004uDhyoRNfPVTvn/bEp4Phe83DA3ZH1IfEUmpnM+SIllb/u/pw7iR3Oqsp0OrD0ugRSjpPwA8nx3hOF/gOG8tsw/PTnA9PcFVdRc30ltYRGuDtMiuJxlkRanrMInVsmyWb1B1wsE+Ya0E13O5y/HifDzNlzhzXGSSMtT9w5HEpbay1HXVJOQkn+njQ+qxMVPDF0E7y1m01IbzQExcx/gGJW99Pdek8+Iw1x0wzvq1S+vULjDl72eXCf09bctlXzlxEsLOdyVZ4maxj0UlEBOHUeBmsY/n8yN88O7DotuQvp9kC3wI1zFTOf7fVz+kSREAncANaDMbc1eSi9QQUSGidLPY166iVZni2bW5xtQkkyPhGeQpBKbC+XnSJwC/aVP4R6V6ed20z/5+ic2wyWy6t5MKcP0QMlaWaxsINuHwzeh426gNh8m62d61WIztJM4aki5g0/qmPM416Nm/3SwuDC0LfSc3oPu8YVbY80b6to1NnwtjvS7HOxwyAfC5gXQ9zXNwmC5ri0qVAqW1MGCU6ZT1HLeKffyfOzfw4qoVtPaRrXWp8Ae3Xo2/ffRXeDRtM7frFb9L4Jq6U29jVhNuieEkhaB1KqomKh9bXROtKttIFRGaQiAr7kM30LpIjEdnGyTFtX2bg0boi20mFTM7hrGzzG1iCmHk4GM8QlobWamQqgKnWYp5muM0S4376CMlHEbiMaZZsX8T78A24PkSffA7HrymMukPOqf127oGrr4ZtbRPSthnf+62Zzsi0l1hG23wWTjp9xpjpeT1bALdBm2dSXAr3wMAY62mrFL4WP4AjvOFjgrjgvuTvKtv6bufL6728X/iG0DDcT62uoZHZrdxPbkNxBA1M5K7p92X6qifVZni+ezQcEHR9XXaKVhDNlnCZMqx58KQFR+mfvFcxGAb5wqFb8ZwHjq/TbCRpYV1YLZGRSIpQE1UCLfvLLC3WCOz1u8Z2taZyvWfncRsys5hTLnQes7iOR87APWRl1GkxWHS3nUE07bqmhJDf7fQ8q53FgBUHD5L97WBnpUXV/s4yeZ4cH7HWC/q+fUhTrL5Ru5lAn8unzmt18Y5yRZaC1UvinkXN7EvRgx1tDRCTpXbxQLPrQ87gvG2vcMn31NNNrMB9Vw4smJ3NLuYIZzXDuNewyRugwk6PcJpliI7TbC3WOM0S7FIssEzBf08huSDmXgRxk2P21Y9nXo3nBGHnmOMTsisY7iFa1PXx1lOhKR7tovfihDyjtrllaUxs0GEhspKsK/xmdMreAZXOvtta1CfYD/0XLXVNsHz60OsygSftPc8llWGRZQZot5F5CYqz+dHeh9ZVLrn3k3U5VS4MGRlXSpUF8iaQDiv1zGmEw51nfRh09l2n0WFtCrLdQI1K5GoEntp3Un0+p8dbaO1aHQ57qobkCumc74dPB+2dcretk0MHbBs8AFJvA7r/g0Vco4pN+Xvu+3fYWz9m/5uPrhm42lc9J6X7+8rq+LSe/32vil+i3WhjHpeXO3jdr7A3zx8GlDAski1hYVHJPHIo5NigWfXR8ZaR2cVNi71l2PPc2HISgh8ors+caRdntc51Y9xL2Ebyv+hL7tvJjO2sySikpcxsizBfLFGEpdI4yJ4Zm6f+7SMMZt3n61tRU90jplwQBtS1zYHLKAdtNK46AxgafNO8jZIxMUlkqbPYzCV23IKbPs32BS+33BsXUOROgjqWd67k2ymrT/P3D3Cyxc3kUYFskphFaXIKoU0KnCraFf4ptXB53GOK8nSWIcMmMhyPbAOPrYC6LjC3anwurjwZCWEhISGu9nlXd+nxFD/Z0h53kG7TJpj6pii7KYYTVLYb7jM2tciUSVSh97BxrpQKMrYqCuNC+ylmfgcTj2LpzZMiV102NIgQwNIyAAkleGDIMFn+g/FpHqTCX6rsxpQs1I5B3naP7S+4LLNfUtVYXweCxfRPUvQ70rP7IfuXMdqL9GLZQLQ0Uh2+DEJfwHgdmNV2WaEZd+xhjHAyvzMF4nsw4UiK30ho/catiVSk8pt61xj6pZgd8p80AntsEM6xLxJsV+UMcqi/kxwzcYJM1UAqsCeY77AQ5dnypxx2GX6sMk93dYAFzrghBARvs83IGXWfaABjMragypdOz0/Ia4hCb5nYUq32XmxjPgI4dbO2XPP+H4XgaFnwX6G+DPSqXfgc7xtFGWM504P8OJqDzNV4DBZG1mT12VirE329OlVp7v6LD0B9kRtFhf3p2UlK+Ogi5kqvI3XR4vc8cFoStimbFcHxveFCs3s+u8VFGWMNYTcKFt48XinSH5s6TmYqcLpQrD1MmtAE5Y+nJXrZcp7KQ08fYPRkPJ5GSOJS13GV9bWNkjEpQ804+1zQ9orwmzy+0jWIr79PGHobzsEObuHSVzq7/z3522QtoW20WexmeK+D7EgArVLeV0oPHd6YPThL6p9AKYoesjkvTd6bsLf8zBZs2gg/yrTHBeGrNgISRw05gfoO2bbZnhfZzdEPNa3n89Ibb9y34zCZ1addAAcamruMRlnhdKWlLKIEau6M1hmSd0hpubA5po1S6SV7jdvMw1kQ57JMYPdtgncpsfmExEsqR57AEuVqY0IEWW64DuO3hNXDp9NIL2bo+uyLA+7hE067G1D6xj7HCUe1+A27ott/QuF/UzxiekmVvVNyoXWZfRxhuwiPMPthSIruwyr4wPVSeZe5Oo8Wy3sTtveZneEOlpmi/7obcDuGFyuAwCGCyhWJbIsQZYBsSprIrOo75Uz7LFUIkkBuvehb3ZuH9+5ri1YPnz3yd7vwlTkA4DhhhuKRJWdAdFHXKaGq+4pBsDTLPUOskMwtj2T/s4D6uLPRKI2vwdDzk2WnE3uPbf4jLEu8neQTwQP07Xh4nlxtTeobttFau8bo/cS3aR8wjbg+i8MWVkXarAbaCyRsF0oPHZ/auxqoPfqBwbOAiQy4Nq3K/jOyyOACGURATB/zxUS5EWMxSzXocySVUQiKPYgSQh5ZqaYPW9SVurMB8+ANyAdmxzrrXfDQaevbsCctQ8dkO2B2L4P0n0JGbzta56SdLhAbaX2TfWbTlVPKOmhe7WpFWeTZ8/oS6ys2nbSyTFjkuuYoXVRv+V3Kd2nlpVtEwffeX04S8vCVORgSrN/COzOgM9oXOZ+flxfefs4wN/xkaVlMcv1MUUZ4zQgC66o1xjhTjtPM+ApBokp6ghJBGaDD0zLLJlkds7Br0u6xtDrlsq5QrHtY/g1dUhPwL2yydKuyMWmlrSx2PT6hpId+/Ogc7HfnfoIFZc4yWc4bLavS9Wr5/O5El37QkXFvH/z5bca9N4GlzznKBqB7aYkpY9YkM+QBilp0Nm2MHHXsAkAfZbKbLsNrvPY20NnQNL+sohRFRGgWvHXfLFGokp93dptwvJD2C+4bbGx71lQWG6gpSMEfCDbtHPelrXDNjeHvM9jZoJ8hk8Ds2TNGIO+exMqgieUjvpoe7xBm33Pgu86tvX7b4JdEY5Nzm1blsac0yUoXgNYx/X3k2zW279IkyEeRRdyjEReslLV2b6bVBpTGQ4uDFkB/AzRtX+Kc0wlcts2NjZNTzArCD7XRAOHXSfBZ2KnQaBYN4PKzLxfNmEhSNYU6T5JGhoeUtlpt+dej+mg+2b7Y8mML7zcV65vvz0ouwZvH2LVEoOijLESYiaLMh7lm9+UfPSRERdiVaIsYkMMzo91kRifxWQqEuK6J9uMQtw0h86Qax/bN425365z8QlZFivsp2vcWi16rbo+uIIQXGOUNFEbMnnPB4ydF4asZKWsWRkbMeJCyI84xhrSF5Y5JULNwH2YYoa+aRumrJvEtRxRY12xtydxN1kc0CUpnHS5ZkT8OF4utN1D9ofArkMawF0DTSmQPg4aYMdAOm5sXb56pmxfSLm+42r9VI2YWftC22wTFxchm5I80DmHRDPScaGwydgm7R9KdMa8Z1MRHKmuvIxxvFrU+4Zei9C39aUasCdbJPjeb9J4kPchK/0i4uJ+FNjmhUK1A5fJFPkibPjEVlMQla0JFCcWTW5CflzCPdcsUuo4KAqoLCKgihAldZlYlboz464gDols0Pk6/v8Jxakh92uKQchl7ZAwdt8m4AM6YA7q2zzvGLRtlchX1Nk2ZL+J8b8D3z/WzbSNe+6rcxN32Jh3ZBcEB5D7Km8/t+H77jre0Mr0TK7uZrOgdhTVAGtWcMlzjryKUe1QVCvlBXDlCgjRCgx5kM+jv5jDFwJnl+Hl+HXZdfQluBt6X0XLyjpBWUSo8hiIzMFOmo268oVwkgLsRiPiuz8hg8Z0Foohg+n2MLQdsapGt90mRvz8tE+qe8j5KqssWfzs7XyfWX+o1UdqZ/uc032SrnnXGOsOdB07lOyEEBxyg23iovIRE1cZV7mx53QhiUvDykyWFMoE7j12YPsuDFnZNfrCOTeJAtj0mKEYQgzG1LtJuU0Elj64Oo8qr8W1UeLujDexjAx9QceSlLE6CLNsd+DyDcwSpMF0l4hU5W1DpCrjmoa0VyYFJqR9dA4f4eiD7xiJ2LhdSZHzOAlFU6ZsykcjSMsmRMd1r2sSFabxcdc97L0OITd2ArcQ9BGb0P5k6rHDJwrm7u8h0W7FEJ1QcMkReNWrXoWPfOQjxrZ/9a/+Ff7tv/23+vtf/uVf4m1vexv+x//4H9jb28NXfdVX4Ud+5Ecwm7kTrUnIixjVBoMCr+esMbQNQwV+ffXcD3DNdqoiQlREgIpQNW7ZfK2QNI/jct2GurpcTLaFiJ/H9dsOsb6MsaLIeo/hg2TIMc5Brwo8H7dq0THRuAGubwCuighlYNmhdU9+/Jj7J5ynCD1vFTnve9W4SitPXIBNxkLI3Vj0uQHrMv1kZtg5N7fOSJDebxeBmdp6EYJQa7WvbWPatXXLyvd+7/fi67/+6/X3w8ND/bkoCnzJl3wJHn74Yfzu7/4uXnjhBXzN13wNqqrCT/zETww6D5GVqZMOTYVN2jOFhWIT2C8lfyFt3/Z50gYAbZt426j9/H6VRYxiHdcdahF5rSs81NWHvllVnyVrjBjRP3M8AwuIZ4C1BzHvMaED9QhU+QR1h5CpLV7D1s7TV5e0P6pEIuN6xlzWGV4+xAoVOSxGfWhddZtbVkLqGENoQtd5s7GNcVCapA09D5U/N5YVADg6OsKNGzfEfb/xG7+BP/3TP8VHP/pRvOxlLwMA/OiP/ije8pa34Ad+4Adw5cqVwefbJH59TD0hmSY3waYExPXybBIGOoWLYVegNkmRE3QP8nVShylXEXCqAFUhav7qct3O1Jc0K1SMyqM0hrpzXPsmJSQTDnqcmNjt6GtXn6uhbxB0kqKAc4cgUtXG98qlSZH28f2+faHntH8T3/FB56N74bonPZaf0POGlqHrs++pZOmRXGQut9VYy8rYftJVd6h2pg+h2poh41vfRG0Itk5WfuiHfgjf933fh8cffxxf8RVfgX/5L/+ldvH8/u//Pj790z9dExUA+OIv/mKsViu8//3vxxve8IZOfavVCqvVSn8/Pj4GABRlBLCbvct0zmPPNYSI8AF2ClIwTLtgnrvvhZQsMbZ1w0Yoeeqroy9RFm8/rydSFcpVjAhApSqEDD0hv7vrd5NIVCh8OohgcPfKFkhJ3/6oiFANHFQ3bctU1qOoqacKmO2PReh9HHNsX/khYt6+84nEJ+R5c1mqfMf6XFYIex6KhsRwIhdqpQnRzfTX4e5b+/pOH87aOu86T1GGP6tbJSv/4l/8C/zdv/t38cADD+AP/uAP8B3f8R340Ic+hJ/7uZ8DADzzzDN49NFHjWMeeOABzGYzPPPMM2KdTz75JN71rnc5zxmamCoEIaxwSA6KMRiak6Hv+CnaMrYNU4jbQusIKWe/5N3ZbFx3gGz72Iyjrnswxn8vDg4DCUdbx/ZdE5FrxntOIofGYmj7idxMcd2Vqpz1+PYNLVfZVpfAtg8hcqMJzZhjAsg5dwuGaHIIfe9yiLB4jK5mKkuNPRH1uf+HYgp9z2Cy8s53vtNLFgDgfe97Hz7rsz4L3/It36K3/a2/9bfwwAMP4B//43+MH/qhH8JDDz0EAIgiYZZYVeJ2APiO7/gOvOMd79Dfj4+P8fjjjzvbMlSFPSb6ZGo2Ombg3sStc7+ic6+iCtVs2hfSt9/VuQXPjAM79F1G5AQPxEPaNFWYLJ2z0SV16h1zn84ohNd3n0N/g5ByY4mVfZzPitZnmeEum6G6l+7JNnSFRvaEJrwNQ3U0Uoj4lCLh4Zo3c5LnslJvC4PJytvf/nZ85Vd+pbfMq171KnH7a1/7WgDABz/4QTz00EO4ceMG/vf//t9GmZdeeglZlnUsLoT5fI75fN7ZXhXxxomMxpAOn1tkl+6a8Vk3x3VGUv6ITXJVbAJXKCbto5fe5Zu2YXQ6VQRpBWYfdGI5x7nsNlJnPNZi0qv5OEsLxhTnnrr9VN8u2sZ+e/F3GNqGc5DfBEBYu/uuvUEIkQl2L1p19bm0fKHtwRafKMyKNCTMm/oIX6i2fNx2BL6uc4yVE9D3asDxg8nK9evXcf369aGHAQD+8A//EADw2GOPAQBe97rX4Qd+4Afw9NNP622/8Ru/gfl8js/8zM8cdQ5C6E0cygyl8mN+sClwlgm4Nk1yNSVC8lzYZaRjaqEk6wRYJ9Tksu0cI2loXMI8iaRIn3vN1FLbGw1ILzHx7Xd1ptwiMbbuMeU4yBoS0o4pzjcEvvZsi2jdC3C11bpfQ1xLfaRnqNZnrPbHntC4C47L4dOHULrBJ2ljo566dfpdUT5Xkr1tSFu2pln5/d//fbz3ve/FG97wBly9ehXve9/78C3f8i34h//wH+KVr3wlAOCNb3wj/ubf/Jv46q/+avzwD/8wXnzxRXzbt30bvv7rv35UJNAYDE07vamGJKxN91CHdNEQVajy2IgGqoqoswozYWiocG+HVYV3pnbH7ezIx5IIThBc+/vqn+pZ9rVjBKIScGX65vsix6hgHDuGAEoYaK04lwi5F1KZgOvqIzVT6XTs8uL2UBfQCEGwv0FR3UcFEineB7mSPIZanNt6hrm7p8LWyMp8Pse73/1uvOtd78JqtcITTzyBr//6r8e3f/u36zJKKfzar/0a3vrWt+JzP/dzjaRwQ1EWMdATCSIeM/Qcg8pP01GPFqlNVDc/R0jWSl+oaGiZsdkxQ+Cru95eNnkimnvTdDiUh2VoFld+XnnH8OdkIwtKKHZxjh3AJhwSEali87uPzAzCWGtQX31UZ58OZxcYSoalNgZaYTplB17vUJdoaNTaUP1KfdDI96cnJHxIdFaIxRnYLOvwVIiqqjr7VmyA4+NjXL16Fa/6D9+DeH8xef1TEA5JIObLbTBIYEnq9pEZPneFTVKLh9Tt0n2EpFuXwIlK53eKqiAi1gtHpzPEmtLBrgjGkAGnB5L1wiYPEkLKbBOTkJldwraUhR4DTE9MtzH47XBADQ2759jG5Ms7qesZE/r0PVIZIJy4+MZO7ZK6u8SHv/b7cOvWrV5vyn2zNtC2XCuhAxOlp9bfrayZzhC50AySgQmYzgrbjEjxifA2yVcBoCYmQibbUdcj/EYbiWOHtmHK32DoDHUgqQgpf5ZEZVvwXdPG5GjM778BAfW2dwPrSFCdLtjka+S5h+QJIoyxYo+t15mocKCGRpr8TTGW9gmIJVwYslIW0eAXS2Klo5JrSWuZOM7nRrjbY5BbaFcpvvsQOdqzSzLFzz3leQfc49HkZKzuxHeuDQd7aTC6iATChZBrDSEYofdMcmeNbUOoZie0LWPrAbBda1/fuUbqZ4BhLqVtEhsCbw8/W7AYuPcEsuWaoy9b9CbWpQtDVkIwZuZtFnYMdiPM+boaxwMGDGPXfefelv4jGK6XxL6nEplxlZEIx1CX2FgX2sDjXBE8vTjHuhAajPj/exl2ZtrJ6hW0MX0C3m0hVJdD5e5LQjq1zghWP7+FvlgS2/cl5hs8Jgh9XmhWY5cMoDovGWx3iaqMpnE1hDLPyu16IAxKkCWp1IsoOL9o3wswiqk7HrCtEZ+QBez49xACFHIum/wMfAaAe8udM+VgQ3WNqXNoNtRN6hgCqc6xA4x9Xza5X311jynXV8em7bSJ2lbhE+mOER/7nq0N+sCt5z1yjCf2Mzx0TOi4ljacrA3FhSErqKLwQSa0yoGWERFbMN8DGJyrgD+oYxXufdv74LIU+RaYC1mMbiic5sgJErABjt9iyD2b4MUerBHxnHOUmHDCDvk8pOXf9sz4fkBQGPiAerh1Klgfw7dt8jsO0cfsCrxNgnVo6Bhhb+v0wSE6yTGrdjtwccjKhhjMMncVdRFaf8+LEZyTIwBjO+sx5Gdbi9FtWs95spQA05KTTcreDwjp9EPW7dlkraChx/rK2+9zHzGbwiJm1DfSejM41DxEr2Jj24RGOh8RqU0JldSGgVZKH2kBHJPHLekkLxRZmTriZCvrnGwLfW3Ykv81BBdhJnpuQoaxXWJyic1hD/Zih78BUfHBRZR81tS+dYaGJlK7UOAWCn4PttWn0Tns/1PXDwy6BpeeK0Qi4NPMDBmzLwxZqQL0Hb7ZjrfMGcyUO9j0ZXEx7C2/gCF6A9fsbRdCtFEIJSaBeS22JVg8bwNMpSotMJ06kiLUjXUWz1yngxdIy1R199U52io6hfuVWT58At5ziaFWmfMwQeuzzIS+gwGupCGrbOtjBvZPF4asREU0WNBKP1YE9jKetXsn9Lw+8jEmhG/LGLNK7BgLjk9UNmrwdh0TUldPmalICh9kzxtBsbGJEFfSXYUQDMmtcd7v00WE9Nv3RR31bT+3pEd6vqQcL9JkcQo3UI9+RWxPSF1SPei+Tz7XYsh2CReGrGiMcd1wonOvdmL8wR6oZzHqkI5xPewB2OWg4PO5BmNHZHWSaBDuO95Yg1P/r9RG1fScY1qdEH2fWsx7EdyW9xpc70Podk5aJJJjb5tsOYUh6JtwSgTD9Wz3ER9fnfx4X3uk8/W1qRmHttHvXxyyUkSTzHanQGiippB04oMw1ArAH3D74e178ENZeQiGvji+43wY29aBx7k6S2f5LT+TUdElIZEjY7JrO2GbZGYXuLSoXGzw0PC+9Z6GTBhcJIjX4yszOYa6pXzHbzpu2mPBkLbct5aVCTqiMQ/wts4ztC2DYT+wQwb/KTv9gRqP0W2Y2CoyJMvntgZJ2yLCv3Pi0UdChpzvXicsfRgTYbOpNUYyo3P3V0iETog2R2qrq+5Nrus8WKh8bqexdYXkrOkL0e5bUsG1VtbksC2VPckKnW0Y0rdt8ExcHLIy4IbZP8pY0nGeMjna2UQ3xlhX0pBjbYLkIk995ssxbRqIoYm11DLqdNiuQZ4TgLGkwj5uKnJyCTdsN1yokNdXj6tue59PeyMdG6rdsqN/fNogH8HaJvr0EecVm0xQXROkUILhI1C29cnXto09AhtYhC4OWQmA60c5T6RjCmwla6SPgIToPFxuI9+xoa6o0PoaSGus8H2hLhw9kyoiREX7v1JN5Eunc+9aPPi+ewW70LdcNExtaQghD6H7zhNcpA/wh2VzSJaoUCG2L7w7lHiOTaIYVP9Aa9HUGYqHrAk1VRJAwoUhK1G5HdKxqSkU6Gf+U3Vkm5o9q+Dk/jgf4dwjtCQh24NeSHbueB1BrYAy4G2SicrZm8zHYAzBusgEp48QuH7nsRaJ7bkWxwmZXccRfCTEB7o/odc79L4MFarb/Xqo5asvEk3KaTOVW27XCCVQQ8anC0NWpkLI7CQkudK9+JBJC9NJDDlUq3Huwgk3hP1scJJSzIcdK+0PGbTuheeoD/eD7oXQN4BvXn9br31fde6Lc3Kvh4avbhLtZRJp06XF67Tfp3jdbu+K0sN/yyFkbMg5xqRzuCi4r8iK9ENPIZILMU32DUYhL9IuYIvEXNYal6tkCEHpW/V1E7IzpSrfnHm1A0Ocu60pY56n0FndReiA7ifCsgl81quoiBCv6+cwXgNqXW8vZkC+D5Qz6nPO/702xeB+kmLfE35t9F7G6667slJAMY+0m7Y5QpejyUelgCqKUM5k0hLSJrMP705y4jX0EjradWxdbt1O/l2yyvW37aLgwpCVqByQFI4fF2ACdLHboebbTc2o28agGUCAS2XTiKcpV3wFxt9P3hHF60h3htyaInWguzLTt+eUk+L5IkNc8B23CYZGKN0vnbE7lFyaCHUH5bgwn3e1qvcDEcqkO/DZcFlhfMRgDDjZN54FNnC35bqEg5eJm/+lEghL0X3/qxhQp1RfZNwT+z9va9A1sfbz69LnYdXEeff6+lC3NTLaWSZAOesX8IeiT7i9qWZH7JvKS4HtTnCviNZC4XpA+9T325jt99W56UA6VtBaKQA566AKa98ZYmhUSWhdZ2rpm8Aq4KvjPFgdbDJMzxa3DkQFUM54OUBljLgU7bXE6+41VapraSyNwTrS5Yz/UUt0hlhpbAukREJixzvYCte79wfoHmeTFX4ML1sqgIp1rjPu1iWJ4aVt/FrqpSTkum3w6/T1R4ZlKG2IygyIIkCdtr9bK+znbfNPSIbC1S/39T3OcPsB574kK5fwwvUQ2gzc5eYKUde7jh0idgtBCEFpr8fczkOL6W92qza51+ZinCviMha7XDMnFK7oIymnDH0OTYLn2tf3+/U9S673QBrsyP1Arhwa8OwBvbTaRAQlzir9vVRALITBtwNn1NlWssGQBsRyVlsO7YEvhLDQ9ZAVg66Jt8lHVoxyZTeizoaC3B77+uPMvH6+D2jJzNB3VyJGEug+x+weAN1rNNoWmyQyXjf9zbr7u9Vlok77iXAa2wSrUt2mfh1O6ESyb9tQXJKV+whTziJDH0j7wXbN9M+DlapLltD6sFlnEa+BZFnvT+/UhCU7qDuRkIigexGbEhaJnPJ9HK5Iij7Swj8Pcavwc/J6bBLkq1cC1yVUkWnCbyNcoAd2lTXPl0BCABdZadsd563lwEb7XFbailLM6JrqtpUASjZzD9FM1MfDuJaoaK8jzizLQw8JIcLFr5tfowt0TU409dp1VaolUnHWbGMEISQHUlRW+hh+XRJZkurxEVLd3qx1LavGcmZbgbh1iJ+X/7dqt35fMJdhmFZnl7igXev5hWvWt60Hw67b5QPe5oM5tfVDAu9QXLOsfs2QWRd12vG627ZKNTPGZmChDiSy/NNj4BqUd4G+mVUoYXERVx690m7vzu66lq2uRWDMs9Ix4Vf0e9mEqb8u29pG9RFi9nzYAxJdT1SYJCXOKl2eBqpaeyITEYOsBFgqKhUBeYUyAdQ6aga2RgTKBm5+nClIpWurRaJkEaLz21ahqKgHcMPV5SEhcQ5ESXvNrnL6Wgh5W4aIGdWhv0upGYR7VqqWIBj/PdC/les36LiO5GsS20jtao4jgkUks24zq4OTzMzthhLP0fR55SxqLGvuCcau+6dLsnIGGDsg+46XBmufv9WeOe4S2zinNLseC01OKtmUXs7qTqmY1SQlLoAqa83v5WwYATwPuolQDLWAGVoFD8+J8i7Ja4mE+TvQIBTq2gl1CXFLQpRb+4S2oUeLwfULfPDutLWsrDIVu0ZhQDee9eFWh3r2HOmZeRVHKGamJoLaw3Uy9bZWh6JOWwujfX1EUCRiElv31t6vAt0qsK6dBmxlHaeKlqTZkCyhdLx936iNru02pOvsRdNG3i46H1md2meoaqIeI+OaJQITqjOKVd2XET+rokj3g5WKUEVg33dribkkKzvEFCQlRCgYOvjZA/x0LqJ+H7I9swYkv+rZ6CRs83/FrCX0opZsFioJ/ID+34vql7bbn+8dMtP9TPdMHKgd200/fJcM2ESAHye5Hfh+oGtu5+cs0m59NrF3EZLOfmHwtgc3iZhI5QiRNejGnUGbWaESn8CyHujouskFJLkOKP8IuXq4wFdbGJkFRbIMSdcobXddp/M6mmu0SUp7nRFQVOJvLh2jyYhw/rqdrSut3SYjVMfSOXfnnPU+7hJsnzubKPjvR3suOl7eli+6FnhXlFUxi1Ds1TonHqE0JbZGVn77t38bb3jDG8R9f/AHf4DP/uzPBgBEUffH+amf+il84zd+4+hzn/eZqs+M3ecjD51NTmlp6IPvOsxt3A3Q/GfPddW59rBZvOu3lnQRNOBJHXJHo+CxBsSFaf7G2qwjEkRtEkJ+m/P8LEsDtW2Rov3aZZaZx1F+EKBLJjj5sF0kdZRJ1T8DB3XokVV38z1pv5fCcwHQgMzDRNtnM0RLYrsJ7AGuj4D4yuo2WcSk811FplWlISpEtqUQYLKIRUU3TFoiKj7LkH3t0nXY183LVEmEKK+6BIwdU0qDfXOMtrCwNohalwBrFSc/al0/WyFWLhvc7UOamkpFHReW3WbbrWj294EkzxI907YqjpCc1r9twSxr9PuSq4iE2OUMLGpsexPMrZGVz/mcz8HTTz9tbPue7/kevOc978FnfdZnGdufeuopvOlNb9Lfr169uq1mOWe09n6OKQYLyYphzxr9YqhNzx91rt1eM2NXJE8iKvQ9KgDkZtkh+gGrxo5FRJenczP3g+F2EPIm8MGWCyF1RFA87LkKxXkj35Jrx75HdnmfnkFpTZA5gPDvNjnhAx4f4IbOxjlooLP3UZ1xUenPVRIZ5X2ugT4iEtpmEnJyQacLdA29wlPpPNZvaIdJA26Xj4+oSCRFImW0vyPCXVf6Pxe08nuhJDKDWu9hn5t/91mgXOXJcqHbO+LZE60mLKBX1LA0miO7DCfkfbAJDoe2kGWm6DjfA1YHQLFHOV667p9tToy3RlZmsxlu3Lihv2dZhl/5lV/B29/+9o415dq1a0bZTSDN9HzlhtQJhA8afceEWkg2gW1N6NYf6f98oJZIk+kWcUdyEFxroABtm3jbOh2hxfp91ijDOhLLJn5eJy8juWG4iZ+3kQ+wAFCm9Qts+/rHvLzS81KpyjC/nyVhkSxiPouafR+le0jf1YrIMhERdzZAaaDj27pRJo66BNFkrGQlolRHVcag0j7i4IoU6W2fE3GnLn5+ybJgiC/zejCMEbXvYzNwKTiicUqTnHONDZXjLh9JbBoxoif9bhxt/b57E+sybR8R12ROsth4agJaMhMXlXgPebsBNJaVqN3mccXZkAgSJxqG1cSRicQgMVxc7CjftSpa+60QaaSyBYUnopP6o232UTvTrPzKr/wKnn/+ebzlLW/p7Hv729+Or/u6r8OrX/1qfO3Xfi2+4Ru+AXEsP16r1Qqr1Up/Pz4+1p+37e6wzxEqUB0zYJnbuyG1ru+SdSD0XPosuby/HUQt83LULdcmEnO3wYZEUuz2GoOg434VM3m7ca4AAuk6T5m2gkQiPrbGgh839gX2/e47tYAV3WdJIr8GIbEIis+FEzrjFtvoICr2QBcy643y8A6EylZJN3KpU5bfq8DZt4T6XPV1VQKxivIKMfwDZdsmCp1uRZo+PQ931dXHm24d25oyhqj4fr9u++X9EiEkAmOQA+t7XIQ9e/w4XzkX6Qm14PTB5cbi+qC+NAo2QTH6sbjuR4u91sXjIym7wM7Iyn/4D/8BX/zFX4zHH3/c2P593/d9+MIv/ELs7e3ht37rt/Ct3/qteP755/Hd3/3dYj1PPvkk3vWud3W2RwWAtFt+m5jaAiLVy33GLguJJHoa2k7XzF0TFCuGvyuGNT+7zikN5pLrwNXGTRDiZqP7QOcqFVDNrOMty0zH6lBYURQB7f7/t3f2sVEUbxx/7s72WpBWoNrrAb+Cf/gSixCIQkkUA4IQXjQSAv5jTXwJxqpo/8GAAavEdzQRCVEJ0T8UTASjEYyigBpQsTaxookYyjuFQIBWlL7cPb8/rrs3M/vM7Oze3kuP+SRN73ZnZ2af29357jPPzHj1xvi1hcpDJeYvek9knhS24bLcxpEeXqBEuhG8xC8kI/IGweHK1/CmWPEOzrwyW9NBHDWUKiuccb5ifqmy0g1lKJG0BYvYFRJOILClW2/h1lt3+q2cjcFJ/WdneZUh/naqkUsqoSKbDI39/XTEHWsTbntE8EIJv5Xo4dDujnM5jhIz1vUsS5uqP/t7OL0sFCpPCjcaqb/rKH0tAAD0dysSz37HHC4F0A0dQkS9X6ifVatWkWKBZd++fVxcyrFjx6C2thY+/vhjWLBggfLY119/HZqbm+HChQvkfsqzMmrUKBj9wmoIl5V5OJPsorsIl9UosHMysFAjH2Rv/LSb2b2ubhci+9ZloSNadMuXpRdvVFVeqnMQ62q9OTjKVizGKBs9IquXeIOTAb0eXrK8dD96fbBQXTtsMKWdjvB6yWIY2IBUtmvAwu9IELd4D50GUFWGJTLSAkHyBp8I5k0FIxFHftY2R9r+OjkDZ9MXrP22zKRJRkLcd7FLgM9LM3bDZUg1m0YWV+T2W3n1Qsm8FpR9rLLdYn90yiKDfvuxuopELwuV3k98UbocvgtPljcbTG4dl/oOXMB1ooz3rLgt6kh5fXWe/clLl+DQiuVw4cIFqKioUKb17FlpbGyExYsXK9OMHj2a+75x40YYPnw4zJ8/3zX/yZMnQ2dnJ5w6dQqqq6sd+6PRKESjUeLI3OE27FbVTePIi+j7p4ZFsth9ykIZ7I1n34wuDZclkJTehmTaVWzX0frg0hUjNniqelDluiE+bGTDiNkyWM8HK0y8rtLsxf0vbsMIcKFzbiLGi+hTdeGJwsT6Y6d5t9KJ84Kopj53Ex/iUE8ujgKcb5fsWyv7mX3ohxPoeLul7oFU+ekYB3sb4W2xBAH7nRIsoqDwK16o40KJBGAkYv+3t/cl++sjdGkQXhb27T+cQDvGIpVPv1ggBIzWSBi7XInYkHTnqcSHn247PziuYR+ixW3UFQDfFScOJc/WuQHIBYtjlFG/R8YeIZVITYaXGiEUAmRiuhLl6TYiFx5wGZ7FSlVVFVRVVWmnR0TYuHEj3H///VBS4t5P09raCmVlZXDVVVd5qleq/1V84+BXjvWyWq26rPR/2TDNVL78f3Yfld5Rr3Dqj+3iYS+YBHvxCEFx9Hh9shS6cAIuWNClm8ca0ijLh9zu6yK38nJOHQ2QFiChJN9g2zcf8eKsEi2yqbGpoYReBKzMQ+XH/UqJHWsxOWr4KZlHhL++UufHCwGxu8xK5xAmwoykqmGe4j7OIyAIE7ariHLJ0+I9/eOGEknpyB+uToKAkYkXVlxk6nmxjlcJFh6+48eGsXtEDOxMpO2o8mRxgaSqOvfRvwMAfc+7CRTRzlLhyHSTpb6L3erOOB9W4Fn18+tl8UK2hIrOC6EFNeEci/XywlEOAMTIJPblRxygQeH0vOvbPOsxK99++y20t7fDgw8+6Nj3+eefQ0dHB9TX10N5eTns3LkTli9fDo888ohn7wnVhaK79ozuNOwAQAat6rzJU/9lOOIhhEZRbIDZ/BLCd/Yiptxz4g0qW9uCOUJZd7bx1xUfXm60VL58X3nyipQnx3orSIKzq4cVfa7eJkLAhIl9Mk3jnGnSnUy662RCxy0myBLDXD2E388x94xdRn9ft30t9jd8QhlhCNmCxd7W5y5Y0nVIj5KgvC4AtGhh96fO1TkiR2y82LxZ3LwvqTRJTlQE5Xlhj7WFENk1xdaJiflI8OcJAPYQYOpICl2HYyiRBEw6PVhcGq1AZ9mxenFAqm4Z5XGSEVZe0QlwFpF1AYkvRLJuHp28qS4gsfuHDbBlv4cSlhdGXo49jYFk5l4MES9poihSkHWxsmHDBpgyZQrceOONjn0lJSWwbt06ePrppyGZTMK1114Lzc3N8Nhjj3kuJ9QH/qY3VuXp8nyRPcjZ/Ww+bp4WqjtG/M697aqrBwDskDw6T8fF0z+PA1V/Kj2Fm/jwMnkSK0jE31e8+fgy+vdJBItXvB7n98FiHUvNEml1uYkxODrdjuxDB8Apaq1yxWPsY6kyIumuovTDHR11kAkTa7vV3+4G5XVRNX6U10QmzqnRNWncG17rrZ8VMmzjKotDsdNqiBnRy8LXJR1ro/a6+MfviKogg43dCGK0jR+houN98hqnIj4/VM8TKh9xNBA1kSErVKxhyuzIRj/xb2L7Yj+nCPOoRqyKZF2sfPjhh9J9s2bN4iaDywSVSztoqP473eBPN/c/VZZX2EbMjvKXpBUf0VTsi1vQrCywjixPQ1CSUfHEjI7sfqteYbAmupMHAaePda+LnZbxZknrTczj4pUwIbIsnCuyMvUjfnNH/RTXEiUWpN14XNcQs52YfyP1XRjpwwhPVrBQ9XEMk1c93KV7+vOkNmpcjxiOSLounX4kPtbGvfvILoMQIX68MXTXCUr3BwEVHCyrO9elpRBfKijPlnpZASK9sE1nkj3RYyOOWPPjUZHBr+sT4jyMjrTEM5FceZkYnq4adIAR4LqUdYSF28t2JhTN2kBegyMzKkszFiWosqi8OU+JxJWvarRkIsZr0G4oAY43Y9Ftz0J5SNJ5pW5InZtSNd15qp4hZkEyPp1Owy7alA1qlgmWIK5B9dtT5vmLiHOisNtlyIJudY8HUF8HAHwMDCVk0r9H+lpze6smfzYhYFcKNUS5f7IeOpjdravGiUNo+GjI3fIE4LuVghrVJOatg5fzowRKep83oeIXr14bKkjcd9kanhMxnShS2G4ddVlOoQLg/M4tlRJybguaohErft37vsqSeFZ0hmx5FTWy9CrXOdtougkXVTcTRNi3Y1q4sPUBcG+kVA2U7puDF9jzS09nrf6tqMBbKmYlVyj7iQkvl9j1R9WZ8gqGiW3cMYp4JpV3TfSKeemupcSbX68V221EBYKqZi5VNjbk+QheFSKYl9tPeGJS24O94FgxkVEcjdDlpYNf8eW1HBYxkNZ3PkTwru7EhRmXLVmTKtzn79ko6xGwxY3VvgmnpxIv2RQpFkUjVnThho4S8Sai6FDFl3gtTzetCq/BqBRuHiFxlEeYEGJijIIF21D6XdwLQH4TZjIXgQXboOtC3dQy/DamfupCLQ0QucTvo45zGxbuvctICNokGnBqm6/F33zeA3x3iH654nBpEXIItDifBxHMK+ZB1cutoc5lPAiL0tPhUZT4FSOymCWv4kR3nSUVVFeQVoyPbCZaScyb2B0kEyzsMy6USA1JDhNLTACkzz9RmvYcYw8xMzfhbbHzCHkTLKrRmDIuO7HCIuti8SJKVB4Vt+BZP8iGy6rQibcIElWXkOpNIIgA6fRNYE08xTcc1mdVZLtoJlXMjhW0y3qzdIJrqbl0qHgmjABAr7yLiVp3J73PbWSX/jXkJipUXXtu+BYfHiaTyzbShtNl+LS9TzIaye18dBt6OwA3gK4lv2TiIZFO+MbNFeOev2qdpiCQxa/QQ+Kd8wvZ+9iFDRkPt7UyM1emZJ2gkE6AbST9XBRf4sLgfBaGgIhL8ShUrHIBvHWdF41YUa14a6Ez2iaogFc3EVTI+Kmn+8gpb14W2RsDFWgmigNnXZgbmG3AhYnuLEQhQ2k8RzdgUn7jiWseWcJCXt90GTrYYkcIcLXKpo/x9nRRCckgBIm4wjKLKgbKPj5HIkU16sNNaKi6UWWel6DWksErInkTcnw9eA9SYOfnQai4bRPhR30FF0SrM/U+K1oA6HmeZPeLuE4QvcIzk0dvapYMcmSrJO5FHOIMwHhjJKby22VUNGJFx2sQhBDJVrSzjudFp35kfIKHuU9kDZ3uG7vbTKaWCNHxolBp2JvOQryhAdzEi7OOlviwvWSMkHGIFyYt163I2DmiOTV9qiz67nW7pt3iRlJpghuZJc1H57rsF5lu+VlryVCeimw0tuybsOytmJ0uPRdxChgJk11FGeeb5dlTdcqnPucTrzPYkismEwG01HXiNlqIEkWquYio+47yvMjWCQIAexCClQ4A+mezFerGiJlUWuY4xTwtbFpyYMrl2A0UPQcQ+Tf12SEoGNGdFNQfl054W9YRJrK4Fi9BkVTZFDpp3Mpye+uWvaW7NYRuDR+bNuLxHFjCwiyKlnhRjQ6yjyXeStL14/+zXUkA/E3F9v2y89KwUPN5UC5ot5FeOshEoddj3Mr2E1+iEydCiYRId9rg0mH3AQoHt1V3vZZFrfLLYl03sm4JWVeRY4I3j6JGb8Ze/UZVJ31QiCLO+q4bo6I7S61OOpWQltnD7RrSiXdxEzH29Pks9v3FpwMQ0jJlRoAefUnG0fQvxxLulbdt9mdhpedEIU0KlysGnU7CFSV6Ny47mZi4kJP1WSZqWLeXvc2ju95tG4CG21zy5i4LqlR1EciCI7MZb2DhdaIjGZxwAv/ChZp9Vn2ORNxEQrbgFzrSBYHfeJEgJ+/z+sbuRRDkatRFpngdoso2iDLRy6XXmC/EDcfKxC71pbw8OkOFZSsg+4EVJEEORVaWKYpHDxPFUSPOdBY+dPPGiKtpe4UUKOIcSOyzkIiFEeNgRMRnkbPbni+vr1f/jIpGrER6ECKKBaTZVSnTb/bCG/kVVGQ534+oXrHUe71V4iGYgFOvjYheHn5H+uhGvbPlqkQL2VXE3BCUF0eMhxHTyDw/XuI2vFwLgYkWX7FG3gSJn64E0XMyUMSHV/x0c3ANWg7WpnGb64XCj0AIdH4TRV6FIF5knhiVV1HnHrIXoWTQuXfcmn9VvBD/7OO7bjEScq4ZpIOiztir/ywoGrESSiCEk/L1PawfwaEu2e4J4odIRkRjihcPPdsgV4ZQTx104wAoZAJLdbxKfHjpS3VDDKQU+17J+nosy+2GotabCR653bNBJl00XgRIEB6UyxF2BtS8xo1oeHCKlUzEYD7s5taVaKETL0Oi0S1llcnGbQVJ+PL0rCQhbHlGmB+BmiNBtcqoiI5uD1Lby+ZbUJYvBADK5oUI5M1WcaysDhT2MvXEeWZvZdLcPWy8XGP5gLNFjuySC/urumLcRqAEMdOoql6G7BOEqGDjidJTIKjzVC2CqBUDwwT6cp9Z74yGBydIIkK+4ncAur3SacNs8e5hnqCiESuhPoQQET9AGVjX6Cq8/Eg6/ZU2wgNdtz7sOVHnp9rO1jGTC19WB9n5e32z12lIct0oyOqew9UfMiKbDbRuObIgTz/DW11jMFTxFlkcluq2PZfIGk7dAFQrrW6+meKlXrr1cJ0Izopf1CxXlc5PHrrihk2r+k1kYkomwryOjnLb5ieNyIAXK9gfp9IL3YCQwc3iIT4k1Id2WdxDWJYHsz3Uh8qyvE4KFRgBr1itzNvtu24+hYBsOfRMrkUfeBHZjrS5squXcpi0XmzpKr7EAMAciTVpmbLgaB+jdHTLVpJJBGc2J53UzFs5AsvnPtdykqhVP9cXwqTksyyNW1o/6b2kkeDlxbevrzt1jCLe1CKEOqkKmGPHjsGoUaPyXQ2DwWAwGAw+OHr0KIwcOVKZZsCLlWQyCSdOnIAhQ4ZAKJTbNyRdOjs7YdSoUXD06FGoqKjId3UKAmMTJ8YmToxNnBibODE2cTIQbIKI0NXVBfF4HMJhdef5gO8GCofDroqsUKioqCjYiyZfGJs4MTZxYmzixNjEibGJk0K3SWVlpVa6gRIHaDAYDAaD4TLFiBWDwWAwGAwFjRErOSAajcLKlSshGo3muyoFg7GJE2MTJ8YmToxNnBibOCk2mwz4AFuDwWAwGAzFjfGsGAwGg8FgKGiMWDEYDAaDwVDQGLFiMBgMBoOhoDFixWAwGAwGQ0FjxIrBYDAYDIaCxoiVgFm9ejVMmTIFBg0aBFdddRWZJhQKOf7Wr1/PpWlra4OpU6dCeXk5jBgxApqbm7UWeypEdGxy5MgRmDdvHgwePBiqqqrgiSeegJ6eHi5NMdlEZPTo0Y5rYtmyZVwaHRsVG+vWrYMxY8ZAWVkZTJw4Eb7//vt8VyknrFq1ynE9xGIxez8iwqpVqyAej0N5eTnccccdsH///jzWOHi+++47mDdvHsTjcQiFQvDpp59y+3Vs0N3dDY8//jhUVVXB4MGDYf78+XDs2LEcnkWwuNnkgQcecFw3kydP5tIMVJsYsRIwPT09sHDhQnj00UeV6TZu3AgnT560/xoaGux9nZ2dMGPGDIjH47Bv3z5466234LXXXoM1a9Zku/pZwc0miUQC5syZAxcvXoQffvgBNm3aBJ988gk0NTXZaYrNJhTNzc3cNbFixQp7n46Nio3NmzfD0qVLYfny5dDa2gq33XYbzJ49G44cOZLvquWEm266ibse2tra7H2vvPIKrFmzBtauXQv79u2DWCwGM2bMgK6urjzWOFguXrwI48aNg7Vr15L7dWywdOlS2Lp1K2zatAl++OEH+Oeff2Du3LmQSCRydRqB4mYTAIBZs2Zx1822bdu4/QPWJmjIChs3bsTKykpyHwDg1q1bpceuW7cOKysr8dKlS/a2F198EePxOCaTyYBrmjtkNtm2bRuGw2E8fvy4ve2jjz7CaDSKFy5cQMTitYlFbW0tvvHGG9L9OjYqNm699VZcsmQJt+2GG27AZcuW5alGuWPlypU4btw4cl8ymcRYLIYvvfSSve3SpUtYWVmJ69evz1ENc4v4zNSxwfnz57GkpAQ3bdpkpzl+/DiGw2H88ssvc1b3bEG1Iw0NDXj33XdLjxnINjGelTzR2NgIVVVVcMstt8D69eshmUza+/bu3QtTp07lZh6866674MSJE3Do0KE81Da77N27F+rq6iAej9vb7rrrLuju7oaWlhY7TbHb5OWXX4bhw4fD+PHjYfXq1VwXj46Niomenh5oaWmBmTNncttnzpwJe/bsyVOtcsuBAwcgHo/DmDFjYPHixXDw4EEAAGhvb4eOjg7ONtFoFKZOnXrZ2EbHBi0tLdDb28ulicfjUFdXV9R22rVrF1xzzTVw3XXXwcMPPwynT5+29w1kmwz4VZcHIs8//zxMnz4dysvL4ZtvvoGmpiY4c+aM7fbv6OiA0aNHc8dUV1fb+8aMGZPrKmeVjo4O+/wshg4dCqWlpdDR0WGnKWabPPnkkzBhwgQYOnQo/Pzzz/DMM89Ae3s7vPfeewCgZ6Ni4syZM5BIJBznXF1dXZTnKzJp0iT44IMP4LrrroNTp07BCy+8AFOmTIH9+/fb50/Z5vDhw/mobs7RsUFHRweUlpbC0KFDHWmK9RqaPXs2LFy4EGpra6G9vR2effZZmDZtGrS0tEA0Gh3QNjGeFQ2oYDfx75dfftHOb8WKFVBfXw/jx4+HpqYmaG5uhldffZVLEwqFuO/YH0gqbs8XQduEOi9E5LYXuk1EvNjoqaeegqlTp8LNN98MDz30EKxfvx42bNgAZ8+etfPTsVGxQf3mxXy+FrNnz4YFCxbA2LFj4c4774QvvvgCAADef/99O83lahsWPzYoZjstWrQI5syZA3V1dTBv3jzYvn07/PXXX/b1I2Mg2MR4VjRobGyExYsXK9OIb/1emDx5MnR2dsKpU6eguroaYrGYQ+VarjzxTSJfBGmTWCwGP/30E7ft3Llz0Nvba5/vQLCJSCY2siL4//77bxg+fLiWjYqJqqoqiEQi5G9ejOfrxuDBg2Hs2LFw4MABuOeeewAg5Tmoqamx01xOtrFGRqlsEIvFoKenB86dO8d5Ek6fPg1TpkzJbYXzRE1NDdTW1sKBAwcAYGDbxHhWNKiqqoIbbrhB+VdWVuY7/9bWVigrK7OH9dbX18N3333HxSx89dVXEI/HMxJFQRKkTerr6+H333+HkydP2tu++uoriEajMHHiRDtNodtEJBMbtba2AgDYD2IdGxUTpaWlMHHiRPj666+57V9//XXBP1SzQXd3N/z5559QU1MDY8aMgVgsxtmmp6cHdu/efdnYRscGEydOhJKSEi7NyZMn4ffff79s7HT27Fk4evSo/RwZ0DbJW2hvkXL48GFsbW3F5557Dq+88kpsbW3F1tZW7OrqQkTEzz77DN955x1sa2vDv//+G999912sqKjAJ554ws7j/PnzWF1djffddx+2tbXhli1bsKKiAl977bV8nVZGuNmkr68P6+rqcPr06fjrr7/ijh07cOTIkdjY2GjnUWw2YdmzZw+uWbMGW1tb8eDBg7h582aMx+M4f/58O42OjYqNTZs2YUlJCW7YsAH/+OMPXLp0KQ4ePBgPHTqU76plnaamJty1axcePHgQf/zxR5w7dy4OGTLEPveXXnoJKysrccuWLdjW1ob33Xcf1tTUYGdnZ55rHhxdXV32swIA7Hvk8OHDiKhngyVLluDIkSNxx44d+Ouvv+K0adNw3Lhx2NfXl6/TygiVTbq6urCpqQn37NmD7e3tuHPnTqyvr8cRI0YUhU2MWAmYhoYGBADH386dOxERcfv27Th+/Hi88sorcdCgQVhXV4dvvvkm9vb2cvn89ttveNttt2E0GsVYLIarVq0asEN03WyCmBI0c+bMwfLychw2bBg2NjZyw5QRi8smLC0tLThp0iSsrKzEsrIyvP7663HlypV48eJFLp2OjYqNt99+G2tra7G0tBQnTJiAu3fvzneVcsKiRYuwpqYGS0pKMB6P47333ov79++39yeTSVy5ciXGYjGMRqN4++23Y1tbWx5rHDw7d+4knxsNDQ2IqGeD//77DxsbG3HYsGFYXl6Oc+fOxSNHjuThbIJBZZN///0XZ86ciVdffTWWlJTg//73P2xoaHCc70C1SQixSKYANRgMBoPBUJSYmBWDwWAwGAwFjRErBoPBYDAYChojVgwGg8FgMBQ0RqwYDAaDwWAoaIxYMRgMBoPBUNAYsWIwGAwGg6GgMWLFYDAYDAZDQWPEisFgMBgMhoLGiBWDwWAwGAwFjRErBoPBYDAYChojVgwGg8FgMBQ0/wd8wQvbVeqjqwAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "t2m = out[0, 12].cpu().numpy()\n", + "\n", + "lat = np.linspace(-90, 90, out.shape[-2])\n", + "lon = np.linspace(-180, 180, out.shape[-1])\n", + "X, Y = np.meshgrid(lon, lat)\n", + "\n", + "plt.contourf(X, Y, t2m, 100)\n", + "plt.gca().set_aspect(\"equal\")\n", + "plt.show()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/examples/climatology/anomaly_variance_surface.nc b/examples/climatology/anomaly_variance_surface.nc new file mode 100644 index 0000000000000000000000000000000000000000..8d15de473e699d4fd89769dcd971119a03b300fc Binary files /dev/null and b/examples/climatology/anomaly_variance_surface.nc differ diff --git a/examples/climatology/anomaly_variance_vertical.nc b/examples/climatology/anomaly_variance_vertical.nc new file mode 100644 index 0000000000000000000000000000000000000000..61f1c52132380b79375d2f5f227dd027e64b1a37 Binary files /dev/null and b/examples/climatology/anomaly_variance_vertical.nc differ diff --git a/examples/climatology/climate_surface_doy001_hour00.nc b/examples/climatology/climate_surface_doy001_hour00.nc new file mode 100644 index 0000000000000000000000000000000000000000..4eee99dd0577ca1208dc01c26555d3f6e7eb52ad --- /dev/null +++ b/examples/climatology/climate_surface_doy001_hour00.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6df405a6178c3222c4abd98eea6573ced38aa2ccbe0966647a68ac18103a4d1d +size 20830158 diff --git a/examples/climatology/climate_surface_doy001_hour03.nc b/examples/climatology/climate_surface_doy001_hour03.nc new file mode 100644 index 0000000000000000000000000000000000000000..78d57e7a4d962164d547ab013ef825a80604ab3e --- /dev/null +++ b/examples/climatology/climate_surface_doy001_hour03.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b67bcff5c6df5306677a65de3f7c08485a8ba1c3ecdc60b7f346cf0642caa7f1 +size 20830158 diff --git a/examples/climatology/climate_surface_doy001_hour06.nc b/examples/climatology/climate_surface_doy001_hour06.nc new file mode 100644 index 0000000000000000000000000000000000000000..27ef0dcf515665782a8031aebb8e7aca8ff355c6 --- /dev/null +++ b/examples/climatology/climate_surface_doy001_hour06.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:080cb1f7e8dad508fc78e40a5bbd30d03564b7a48bdd2916f296dc3dfed30c60 +size 20830158 diff --git a/examples/climatology/climate_surface_doy001_hour09.nc b/examples/climatology/climate_surface_doy001_hour09.nc new file mode 100644 index 0000000000000000000000000000000000000000..efa0bf3ccc1d3d0c7e174d1fa57f1ff5683484bb --- /dev/null +++ b/examples/climatology/climate_surface_doy001_hour09.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:134e194a8f38828ec067f98e8c5c7dc4aed0131046b6ed839f75bcf6b98b5492 +size 20830158 diff --git a/examples/climatology/climate_surface_doy001_hour12.nc b/examples/climatology/climate_surface_doy001_hour12.nc new file mode 100644 index 0000000000000000000000000000000000000000..fa221976960847e2b5d17fa127d3cfb4c53a87fa --- /dev/null +++ b/examples/climatology/climate_surface_doy001_hour12.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:974a79a5e68096c356b42f15cd9c955f0a1306cf6a942c682e09d6504742a6d1 +size 20830158 diff --git a/examples/climatology/climate_surface_doy001_hour15.nc b/examples/climatology/climate_surface_doy001_hour15.nc new file mode 100644 index 0000000000000000000000000000000000000000..f8a630fcb5be85bed8bd10124cbc81ffbc5a2364 --- /dev/null +++ b/examples/climatology/climate_surface_doy001_hour15.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dea669f097cf1b9d775956a01e2bae52ae1856dfcc7abae5c6ee394949275c5e +size 20830158 diff --git a/examples/climatology/climate_surface_doy001_hour18.nc b/examples/climatology/climate_surface_doy001_hour18.nc new file mode 100644 index 0000000000000000000000000000000000000000..2a5ceb6e6c2b3e23175085085a0ef578d691f203 --- /dev/null +++ b/examples/climatology/climate_surface_doy001_hour18.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3b06aabcd1f14cc3a4b3c5dd355a225646fe4b9e3b5b676ee2fb6e5d05fe31c9 +size 20830158 diff --git a/examples/climatology/climate_surface_doy001_hour21.nc b/examples/climatology/climate_surface_doy001_hour21.nc new file mode 100644 index 0000000000000000000000000000000000000000..13f366c3802e63beff6bbae03c5a5fdadb56ab85 --- /dev/null +++ b/examples/climatology/climate_surface_doy001_hour21.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ef90b12b58b73481f96c0caf5277d24cc05681003df0f6e83f5e85dc0b4d47b3 +size 20830158 diff --git a/examples/climatology/climate_vertical_doy001_hour00.nc b/examples/climatology/climate_vertical_doy001_hour00.nc new file mode 100644 index 0000000000000000000000000000000000000000..9a51931d9b43d2f69d646a812db0109cd816eab0 --- /dev/null +++ b/examples/climatology/climate_vertical_doy001_hour00.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b5f75cc1a55b6f1beecf2271d3a8fe9914cb20fb87e471761b3b686a990ec0ef +size 116475398 diff --git a/examples/climatology/climate_vertical_doy001_hour03.nc b/examples/climatology/climate_vertical_doy001_hour03.nc new file mode 100644 index 0000000000000000000000000000000000000000..5fb988aff029f32fb5f36e83c86e8ca140ac3109 --- /dev/null +++ b/examples/climatology/climate_vertical_doy001_hour03.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:80a04c77e614e01ae26fdad1fc3c5a15fd6869d627ce4251447d64c3bb934916 +size 116475398 diff --git a/examples/climatology/climate_vertical_doy001_hour06.nc b/examples/climatology/climate_vertical_doy001_hour06.nc new file mode 100644 index 0000000000000000000000000000000000000000..d236e82f4f26f26c0f3d4ae15b53dbf5ec244253 --- /dev/null +++ b/examples/climatology/climate_vertical_doy001_hour06.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0f6fc10bbd055e1e7af63f70cfa24abd60794cd89822a95b9fb9194b07c7822f +size 116475398 diff --git a/examples/climatology/climate_vertical_doy001_hour09.nc b/examples/climatology/climate_vertical_doy001_hour09.nc new file mode 100644 index 0000000000000000000000000000000000000000..aa225cbc173a9eca35c393609730577e4000d164 --- /dev/null +++ b/examples/climatology/climate_vertical_doy001_hour09.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:70cb7c3e30902548b24330b61ecf493fdf1949bb434f45e9258434a80d91ee6d +size 116475398 diff --git a/examples/climatology/climate_vertical_doy001_hour12.nc b/examples/climatology/climate_vertical_doy001_hour12.nc new file mode 100644 index 0000000000000000000000000000000000000000..f4b0b384e6ce8f2890536396f8edd9df0e061c8a --- /dev/null +++ b/examples/climatology/climate_vertical_doy001_hour12.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a597136ae5b374e55b6f1113e218faeb3090a3d474f95bd6c043b4923238a32c +size 116475398 diff --git a/examples/climatology/climate_vertical_doy001_hour15.nc b/examples/climatology/climate_vertical_doy001_hour15.nc new file mode 100644 index 0000000000000000000000000000000000000000..4551686abb82484bf2515a6f6dc3bf0d4c69afb0 --- /dev/null +++ b/examples/climatology/climate_vertical_doy001_hour15.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6e1765b3fcf01aed2338b9df110a76a9e439703bb87c366b0d3868473ccceb46 +size 116475398 diff --git a/examples/climatology/climate_vertical_doy001_hour18.nc b/examples/climatology/climate_vertical_doy001_hour18.nc new file mode 100644 index 0000000000000000000000000000000000000000..3b4342cea1f59679496ba50c3bf88b33474f251c --- /dev/null +++ b/examples/climatology/climate_vertical_doy001_hour18.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:66b27837717a6cd37d72d55c8de3cc24b2122e25173348be37b3a76d98c8d580 +size 116475398 diff --git a/examples/climatology/climate_vertical_doy001_hour21.nc b/examples/climatology/climate_vertical_doy001_hour21.nc new file mode 100644 index 0000000000000000000000000000000000000000..78aa2a001f9de1cf095e13d59ac5b97daa0776d2 --- /dev/null +++ b/examples/climatology/climate_vertical_doy001_hour21.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:65084e38b67342d03135e98092d893e6b8ad677a1b690ea39b3fbc4b91df6fdc +size 116475398 diff --git a/examples/climatology/musigma_surface.nc b/examples/climatology/musigma_surface.nc new file mode 100644 index 0000000000000000000000000000000000000000..4ba7c8bb12f55491531e06fe7ea3b0a184128bc0 Binary files /dev/null and b/examples/climatology/musigma_surface.nc differ diff --git a/examples/climatology/musigma_vertical.nc b/examples/climatology/musigma_vertical.nc new file mode 100644 index 0000000000000000000000000000000000000000..f6415aec93712677beefb17bced1faede757d54f Binary files /dev/null and b/examples/climatology/musigma_vertical.nc differ diff --git a/examples/config.yaml b/examples/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4435167a11fd412e2dfb565e135eed07a33c7663 --- /dev/null +++ b/examples/config.yaml @@ -0,0 +1,20 @@ +params: + in_channels: 160 + input_size_time: 2 + in_channels_static: 8 + input_scalers_epsilon: 0.0 + static_input_scalers_epsilon: 0.0 + n_lats_px: 360 + n_lons_px: 576 + patch_size_px: [2, 2] + mask_unit_size_px: [30, 32] + embed_dim: 2560 + n_blocks_encoder: 12 + n_blocks_decoder: 2 + mlp_multiplier: 4 + n_heads: 16 + dropout: 0.0 + drop_path: 0.0 + parameter_dropout: 0.0 + checkpoint_encoder: [] + checkpoint_decoder: [] \ No newline at end of file diff --git a/examples/merra-2/MERRA2_sfc_20200101.nc b/examples/merra-2/MERRA2_sfc_20200101.nc new file mode 100644 index 0000000000000000000000000000000000000000..e9a61489fc05397e0c8153f74832505a6ecc833d --- /dev/null +++ b/examples/merra-2/MERRA2_sfc_20200101.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1de1638ca1f1b44ca95a7c6908573414c44e633f1bb2e212b8a46afc866c741e +size 101525285 diff --git a/examples/merra-2/MERRA_pres_20200101.nc b/examples/merra-2/MERRA_pres_20200101.nc new file mode 100644 index 0000000000000000000000000000000000000000..9d58023e68ef715d31fd5284747de9ce86883cbd --- /dev/null +++ b/examples/merra-2/MERRA_pres_20200101.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2cd9b405aa1d388fc0c6cbe6d71104bea770b48e07ba4364f067dc425d025af8 +size 337127950 diff --git a/examples/weights/.cache/huggingface/.gitignore b/examples/weights/.cache/huggingface/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..f59ec20aabf5842d237244ece8c81ab184faeac1 --- /dev/null +++ b/examples/weights/.cache/huggingface/.gitignore @@ -0,0 +1 @@ +* \ No newline at end of file diff --git a/examples/weights/.cache/huggingface/download/prithvi.wxc.2300m.v1.pt.metadata b/examples/weights/.cache/huggingface/download/prithvi.wxc.2300m.v1.pt.metadata new file mode 100644 index 0000000000000000000000000000000000000000..9f2c1ddb8374e74ed5b105dd0797e168f662a711 --- /dev/null +++ b/examples/weights/.cache/huggingface/download/prithvi.wxc.2300m.v1.pt.metadata @@ -0,0 +1,3 @@ +e9fdd56d7011c98ae591166c56e8b624d4f39e4a +9b3617e91f164833e4c155dc5035178591266731ef7ce471b12d1caa34d1b8d8 +1731282990.9262006 diff --git a/examples/weights/.cache/huggingface/download/prithvi.wxc.rollout.2300m.v1.pt.metadata b/examples/weights/.cache/huggingface/download/prithvi.wxc.rollout.2300m.v1.pt.metadata new file mode 100644 index 0000000000000000000000000000000000000000..8a3f7eed11b0128f939411c60c13ce451d3fd869 --- /dev/null +++ b/examples/weights/.cache/huggingface/download/prithvi.wxc.rollout.2300m.v1.pt.metadata @@ -0,0 +1,3 @@ +514c3d061ad45e3338495da7c16b13aa20fa75b1 +e66ef85d4e404465a5359b729f115d4bd7a5c5e8016c7b86b5f773d72cef8efa +1731287319.8335612 diff --git a/examples/weights/prithvi.wxc.2300m.v1.pt b/examples/weights/prithvi.wxc.2300m.v1.pt new file mode 100644 index 0000000000000000000000000000000000000000..7d620ba813e14c24e5ec691ebe2b6b38ad0ed06e --- /dev/null +++ b/examples/weights/prithvi.wxc.2300m.v1.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9b3617e91f164833e4c155dc5035178591266731ef7ce471b12d1caa34d1b8d8 +size 28447290466 diff --git a/examples/weights/prithvi.wxc.rollout.2300m.v1.pt b/examples/weights/prithvi.wxc.rollout.2300m.v1.pt new file mode 100644 index 0000000000000000000000000000000000000000..ee0fa8cc489d6e787a2ca1ce98e0dabc7e8789ce --- /dev/null +++ b/examples/weights/prithvi.wxc.rollout.2300m.v1.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e66ef85d4e404465a5359b729f115d4bd7a5c5e8016c7b86b5f773d72cef8efa +size 28447289145