diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..5d7f077bd6e1a90e4cb8544726b05f855a1e0d13 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +*.whl filter=lfs diff=lfs merge=lfs -text diff --git a/LHM/__init__.py b/LHM/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7a1e39e624fbf5d970acc4b05714f8b9f70830c6 --- /dev/null +++ b/LHM/__init__.py @@ -0,0 +1,15 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Empty diff --git a/LHM/__pycache__/__init__.cpython-310.pyc b/LHM/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cc52b0cc1a132cd412aa072a490811866d9d7315 Binary files /dev/null and b/LHM/__pycache__/__init__.cpython-310.pyc differ diff --git a/LHM/__pycache__/launch.cpython-310.pyc b/LHM/__pycache__/launch.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..993935e9d2dcf7d5bf108ebafad812eac5f5fef8 Binary files /dev/null and b/LHM/__pycache__/launch.cpython-310.pyc differ diff --git a/LHM/datasets/__init__.py b/LHM/datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..323127c7d93f0a57f90cc8649ee2a67b6b630762 --- /dev/null +++ b/LHM/datasets/__init__.py @@ -0,0 +1,16 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .mixer import MixerDataset diff --git a/LHM/datasets/__pycache__/__init__.cpython-310.pyc b/LHM/datasets/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..88338f4b204be95c7e383d2be143ed2d80c03143 Binary files /dev/null and b/LHM/datasets/__pycache__/__init__.cpython-310.pyc differ diff --git a/LHM/datasets/__pycache__/cam_utils.cpython-310.pyc b/LHM/datasets/__pycache__/cam_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2443ed5b8c2a069ff2237cc847b950749254844c Binary files /dev/null and b/LHM/datasets/__pycache__/cam_utils.cpython-310.pyc differ diff --git a/LHM/datasets/__pycache__/mixer.cpython-310.pyc b/LHM/datasets/__pycache__/mixer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2423c2c9f7d912d68b4304d2ed8fdbb107467bce Binary files /dev/null and b/LHM/datasets/__pycache__/mixer.cpython-310.pyc differ diff --git a/LHM/datasets/base.py b/LHM/datasets/base.py new file mode 100644 index 0000000000000000000000000000000000000000..cd3aa16b2393b429d2c10c18fd39f825e5a19ba0 --- /dev/null +++ b/LHM/datasets/base.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Peihao Li & Lingteng Qiu & Xiaodong Gu & Qi Zuo +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-10 18:47:56 +# @Function : dataset base + +import json +import pdb +import traceback +from abc import ABC, abstractmethod + +import numpy as np +import torch +from megfile import smart_exists, smart_open, smart_path_join +from PIL import Image + + +class BaseDataset(torch.utils.data.Dataset, ABC): + def __init__(self, root_dirs: str, meta_path: str): + super().__init__() + self.root_dirs = root_dirs + self.uids = self._load_uids(meta_path) + + def __len__(self): + return len(self.uids) + + @abstractmethod + def inner_get_item(self, idx): + pass + + def __getitem__(self, idx): + try: + return self.inner_get_item(idx) + except Exception as e: + traceback.print_exc() + print(f"[DEBUG-DATASET] Error when loading {self.uids[idx]}") + # raise e + return self.__getitem__((idx + 1) % self.__len__()) + + @staticmethod + def _load_uids(meta_path: str): + # meta_path is a json file + if meta_path == None: + uids = [] + else: + with open(meta_path, "r") as f: + uids = json.load(f) + + return uids + + @staticmethod + def _load_rgba_image(file_path, bg_color: float = 1.0): + """Load and blend RGBA image to RGB with certain background, 0-1 scaled""" + rgba = np.array(Image.open(smart_open(file_path, "rb"))) + rgba = torch.from_numpy(rgba).float() / 255.0 + rgba = rgba.permute(2, 0, 1).unsqueeze(0) + rgb = rgba[:, :3, :, :] * rgba[:, 3:4, :, :] + bg_color * ( + 1 - rgba[:, 3:, :, :] + ) + # rgba[:, :3, ...] * rgba[:, 3:, ...] + (1 - rgba[:, 3:, ...]) + return rgb + + @staticmethod + def _locate_datadir(root_dirs, uid, locator: str): + for root_dir in root_dirs: + datadir = smart_path_join(root_dir, uid, locator) + if smart_exists(datadir): + return root_dir + raise FileNotFoundError(f"Cannot find valid data directory for uid {uid}") diff --git a/LHM/datasets/bedlam.py b/LHM/datasets/bedlam.py new file mode 100644 index 0000000000000000000000000000000000000000..266615401013819b4e9ee7573d80b2babccac576 --- /dev/null +++ b/LHM/datasets/bedlam.py @@ -0,0 +1,493 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import glob + +# from megfile import smart_path_join, smart_open +import json +import os +import random +from collections import defaultdict +from typing import Union + +import cv2 +import numpy as np +import torch +from PIL import Image + +from LHM.datasets.base import BaseDataset +from LHM.datasets.cam_utils import ( + build_camera_principle, + build_camera_standard, + camera_normalization_objaverse, +) +from LHM.utils.proxy import no_proxy + +__all__ = ["BedlamDataset"] + + +class BedlamDataset(BaseDataset): + + def __init__( + self, + root_dirs: str, + meta_path: str, + sample_side_views: int, + render_image_res_low: int, + render_image_res_high: int, + render_region_size: int, + source_image_res: int, + repeat_num=1, + crop_range_ratio_hw=[1.0, 1.0], + valid_area_ratio=0.4, + debug=False, + **kwargs, + ): + super().__init__(root_dirs, meta_path) + self.sample_side_views = sample_side_views + self.render_image_res_low = render_image_res_low + self.render_image_res_high = render_image_res_high + if not ( + isinstance(render_region_size, list) + or isinstance(render_region_size, tuple) + ): + render_region_size = render_region_size, render_region_size # [H, W] + self.render_region_size = render_region_size + self.source_image_res = source_image_res + + self.uids = self.uids * repeat_num + self.crop_range_ratio_hw = crop_range_ratio_hw + self.debug = debug + self.valid_area_ratio = valid_area_ratio + print( + f"BedlamDataset, data_len:{len(self.uids)}, repeat_num:{repeat_num}, debug:{debug}" + ) + self.multiply = kwargs.get("multiply", 14) + + @staticmethod + def _load_pose(pose): + intrinsic = torch.eye(4) + intrinsic[0, 0] = pose["focal"][0] + intrinsic[1, 1] = pose["focal"][1] + intrinsic[0, 2] = pose["princpt"][0] + intrinsic[1, 2] = pose["princpt"][1] + intrinsic = intrinsic.float() + + c2w = torch.eye(4) + # c2w[:3, :3] = torch.tensor(pose["R"]) + # c2w[3, :3] = torch.tensor(pose["t"]) + c2w = c2w.float() + + return c2w, intrinsic + + def load_rgb_image_with_aug_bg(self, rgb_path, mask_path, bg_color): + rgb = np.array(Image.open(rgb_path)) + rgb = torch.from_numpy(rgb).float() / 255.0 + rgb = rgb.permute(2, 0, 1).unsqueeze(0) # [1, 3, H, W] + mask = None + + if mask_path is not None: + mask = np.array(Image.open(mask_path)) + mask = torch.from_numpy(mask).float() / 255.0 + mask = (mask > 0.5).float() + if len(mask.shape) == 3: + mask = mask[:, :, 0:1] + if len(mask.shape) == 2: + mask = mask.unsqueeze(-1) + mask = mask.permute(2, 0, 1).unsqueeze(0) # [1, 1, H, W] + rgb = torch.cat([rgb, mask], dim=1) # [1, 4, H, W] + else: + mask = rgb[:, 3:4, :, :] + + # erode mask + mask_np = (mask[0, 0].numpy() * 255).astype(np.uint8) + kernel_size, iterations = 3, 1 + kernel = np.ones((kernel_size, kernel_size), np.uint8) + mask_np = cv2.erode(mask_np, kernel, iterations=iterations) + mask = torch.from_numpy(mask_np).unsqueeze(0).unsqueeze(0) / 255.0 + mask = (mask > 0.5).float() + rgb = torch.cat([rgb[:, :3], mask], dim=1) # [1, 4, H, W] + + if rgb.shape[1] == 4: + rgb = rgb[:, :3, :, :] * rgb[:, 3:4, :, :] + bg_color * ( + 1 - rgb[:, 3:, :, :] + ) + + return rgb, mask + + def scale_intrs(self, intrs, ratio_x, ratio_y): + intrs[:, 0] = intrs[:, 0] * ratio_x + intrs[:, 1] = intrs[:, 1] * ratio_y + return intrs + + def uniform_sample_in_chunk(self, sample_num, sample_data): + chunks = np.array_split(sample_data, sample_num) + select_list = [] + for chunk in chunks: + select_list.append(np.random.choice(chunk)) + return select_list + + @no_proxy + def inner_get_item(self, idx): + """ + Loaded contents: + rgbs: [M, 3, H, W] + poses: [M, 3, 4], [R|t] + intrinsics: [3, 2], [[fx, fy], [cx, cy], [weight, height]] + """ + uid = self.uids[idx] + seq_id = uid["seq_id"] + all_frame_info = uid["all_frame_info"] + uid = os.path.join(self.root_dirs, seq_id) + valid_imgs = [ + e["frame_name"] + for e in all_frame_info + if e["valid_area_ratio"] > self.valid_area_ratio + ] + assert len(valid_imgs) >= 1 + + if self.sample_side_views + 1 <= len(valid_imgs): + cam_id_list = np.random.choice( + valid_imgs, self.sample_side_views + 1, replace=False + ) + else: + cam_id_list = np.random.choice( + valid_imgs, self.sample_side_views + 1, replace=True + ) + + assert self.sample_side_views + 1 == len(cam_id_list) + crop_ratio_h, crop_ratio_w = self.crop_range_ratio_hw + + frame_id_list = cam_id_list + + # source images + c2ws, intrs, rgbs, bg_colors, masks = [], [], [], [], [] + source_c2ws, source_intrs, source_rgbs = [], [], [] + smplx_params = [] + shape_param = None + for cam_id, frame_id in zip(cam_id_list, frame_id_list): + frame_path = os.path.join(uid, cam_id + ".png") + frame_name = os.path.splitext(os.path.basename(frame_path))[0] + smplx_path = os.path.join( + uid.replace("/png_post/", "/smplx/"), f"{frame_name}.json" + ) + + with open(smplx_path) as f: + smplx_param = { + k: torch.FloatTensor(v) + for k, v in json.load(f).items() + if "valid_area_ratio" not in k + } + + # if cam_id == 0: + shape_param = smplx_param["betas"] + + c2w, intrinsic = self._load_pose(smplx_param) + + bg_color = random.choice([0.0, 0.5, 1.0]) + rgb, mask = self.load_rgb_image_with_aug_bg( + frame_path, mask_path=None, bg_color=bg_color + ) + + # crop image to enlarge human area. + if (crop_ratio_h < 1.0) or (crop_ratio_w < 1.0): + img_size_hw = rgb.shape[2], rgb.shape[3] + h_crop, w_crop = round(img_size_hw[0] * crop_ratio_h), round( + img_size_hw[1] * crop_ratio_w + ) + h_crop_offset, w_crop_offset = round( + (img_size_hw[0] - h_crop) / 2 + ), round((img_size_hw[1] - w_crop) / 2) + rgb = rgb[ + :, + :, + h_crop_offset : h_crop_offset + h_crop, + w_crop_offset : w_crop_offset + w_crop, + ] + mask = mask[ + :, + :, + h_crop_offset : h_crop_offset + h_crop, + w_crop_offset : w_crop_offset + w_crop, + ] + intrinsic[0, 2] -= w_crop_offset + intrinsic[1, 2] -= h_crop_offset + + assert ( + abs(intrinsic[0, 2] * 2 - rgb.shape[-1]) <= 1 + ), f"{intrinsic[0, 2] * 2}, {rgb.shape[-1]}" + + c2ws.append(c2w) + rgbs.append(rgb) + bg_colors.append(bg_color) + intrs.append(intrinsic) + smplx_params.append(smplx_param) + masks.append(mask) + + c2ws = torch.stack(c2ws, dim=0) # [N, 4, 4] + intrs = torch.stack(intrs, dim=0) # [N, 4, 4] + rgbs = torch.cat(rgbs, dim=0) # [N, 3, H, W] + bg_colors = ( + torch.tensor(bg_colors, dtype=torch.float32).unsqueeze(-1).repeat(1, 3) + ) # [N, 3] + masks = torch.cat(masks, dim=0) # [N, 1, H, W] + + smplx_params_tmp = defaultdict(list) + for smplx in smplx_params: + for k, v in smplx.items(): + smplx_params_tmp[k].append(v) + for k, v in smplx_params_tmp.items(): + smplx_params_tmp[k] = torch.stack(v) + smplx_params = smplx_params_tmp + # TODO check different betas for same person + smplx_params["betas"] = shape_param + + # reference images + # TODO check prob + ref_idx = np.random.choice(self.sample_side_views + 1) + + cam_id_source_list = cam_id_list[ref_idx : ref_idx + 1] + frame_id_source_list = frame_id_list[ref_idx : ref_idx + 1] + + for cam_id, frame_id in zip(cam_id_source_list, frame_id_source_list): + frame_path = os.path.join(uid, cam_id + ".png") + frame_name = os.path.splitext(os.path.basename(frame_path))[0] + smplx_path = os.path.join( + uid.replace("/png_post/", "/smplx/"), f"{frame_name}.json" + ) + + with open(smplx_path) as f: + smplx_param = { + k: torch.FloatTensor(v) + for k, v in json.load(f).items() + if "valid_area_ratio" not in k + } + + c2w, intrinsic = self._load_pose(smplx_param) + + bg_color = 1.0 + rgb, mask = self.load_rgb_image_with_aug_bg( + frame_path, mask_path=None, bg_color=bg_color + ) + + # crop image to enlarge human area. + if (crop_ratio_h < 1.0) or (crop_ratio_w < 1.0): + img_size_hw = rgb.shape[2], rgb.shape[3] + h_crop, w_crop = round(img_size_hw[0] * crop_ratio_h), round( + img_size_hw[1] * crop_ratio_w + ) + h_crop_offset, w_crop_offset = round( + (img_size_hw[0] - h_crop) / 2 + ), round((img_size_hw[1] - w_crop) / 2) + rgb = rgb[ + :, + :, + h_crop_offset : h_crop_offset + h_crop, + w_crop_offset : w_crop_offset + w_crop, + ] + mask = mask[ + :, + :, + h_crop_offset : h_crop_offset + h_crop, + w_crop_offset : w_crop_offset + w_crop, + ] + intrinsic[0, 2] -= w_crop_offset + intrinsic[1, 2] -= h_crop_offset + + assert ( + abs(intrinsic[0, 2] * 2 - rgb.shape[-1]) <= 1 + ), f"{intrinsic[0, 2] * 2}, {rgb.shape[-1]}" + + source_c2ws.append(c2w) + source_intrs.append(intrinsic) + source_rgbs.append(rgb) + + source_c2ws = torch.stack(source_c2ws, dim=0) + source_intrs = torch.stack(source_intrs, dim=0) + source_rgbs = torch.cat(source_rgbs, dim=0) + + # adjust source image resolution + # TODO check 224x224 need to padding? + # ratio_x, ratio_y = self.source_image_res / source_rgbs.shape[3], self.source_image_res / source_rgbs.shape[2] + ratio = self.source_image_res / min(source_rgbs.shape[2:]) + tgt_size = int(ratio * source_rgbs.shape[2]), int(ratio * source_rgbs.shape[3]) + multiply = self.multiply + tgt_size = ( + int(tgt_size[0] / multiply) * multiply, + int(tgt_size[1] / multiply) * multiply, + ) + ratio_y, ratio_x = ( + tgt_size[0] / source_rgbs.shape[2], + tgt_size[1] / source_rgbs.shape[3], + ) + source_rgbs = torch.nn.functional.interpolate( + source_rgbs, size=tgt_size, mode="bicubic", align_corners=True + ) + source_rgbs = torch.clamp(source_rgbs, 0, 1) + source_intrs = self.scale_intrs(source_intrs, ratio_x=ratio_x, ratio_y=ratio_y) + + # adjust render image resolution and sample intended rendering region + render_image_res = np.random.randint( + self.render_image_res_low, self.render_image_res_high + 1 + ) + ratio = render_image_res / min(rgbs.shape[2:]) + tgt_size = int(ratio * rgbs.shape[2]), int(ratio * rgbs.shape[3]) + # multiply = 14 + # tgt_size = int(tgt_size[0] / multiply) * multiply, int(tgt_size[1] / multiply) * multiply + # ratio_y, ratio_x = tgt_size[0] / rgbs.shape[2], tgt_size[1] / rgbs.shape[3] + render_image = torch.nn.functional.interpolate( + rgbs, size=tgt_size, mode="bicubic", align_corners=True + ) + render_image = torch.clamp(render_image, 0, 1) + intrs = self.scale_intrs(intrs, ratio_x=ratio, ratio_y=ratio) + + render_mask = torch.nn.functional.interpolate( + masks, size=tgt_size, mode="bicubic", align_corners=True + ) + render_mask = torch.clamp(render_mask, 0, 1) + + assert ( + abs(intrs[0, 0, 2] * 2 - render_image.shape[3]) <= 1.1 + ), f"{intrs[0, 0, 2] * 2}, {render_image.shape}" + assert ( + abs(intrs[0, 1, 2] * 2 - render_image.shape[2]) <= 1.1 + ), f"{intrs[0, 1, 2] * 2}, {render_image.shape}" + + # anchors = torch.randint( + # 0, render_image_res - min(self.render_region_size) + 1, size=(self.sample_side_views + 1, 2)) + # crop_indices_h = torch.arange(0, self.render_region_size[0], device=render_image.device) + # crop_indices_w = torch.arange(0, self.render_region_size[1], device=render_image.device) + # index_h = (anchors[:, 0].unsqueeze(1) + crop_indices_h).view(-1, self.render_region_size[0], 1) + # index_w = (anchors[:, 1].unsqueeze(1) + crop_indices_w).view(-1, 1, self.render_region_size[1]) + # batch_indices = torch.arange(self.sample_side_views + 1, device=render_image.device).view(-1, 1, 1) + # cropped_render_image = render_image[batch_indices, :, index_h, index_w].permute(0, 3, 1, 2) + + ret = { + "uid": uid, + "source_c2ws": source_c2ws, # [N1, 4, 4] + "source_intrs": source_intrs, # [N1, 4, 4] + "source_rgbs": source_rgbs, # [N1, 3, H, W] + "render_image": render_image, # [N, 3, H, W] + "render_mask": render_mask, # [ N, 1, H, W] + "c2ws": c2ws, # [N, 4, 4] + "intrs": intrs, # [N, 4, 4] + # 'render_anchors': anchors, # [N, 2] + "render_full_resolutions": torch.tensor( + [tgt_size], dtype=torch.float32 + ).repeat( + self.sample_side_views + 1, 1 + ), # [N, 2] + "render_bg_colors": bg_colors, # [N, 3] + } + + # ['root_pose', 'body_pose', 'jaw_pose', 'leye_pose', 'reye_pose', 'lhand_pose', 'rhand_pose', 'expr', 'trans', 'betas'] + # 'smplx_params': smplx_params, # dict: body_pose:[N, 21, 3], + ret.update(smplx_params) + + return ret + + +if __name__ == "__main__": + import cv2 + + root_dir = "./train_data/bedlam/data/" + meta_path = "./train_data/bedlam/data/annots/valid_list.json" + dataset = BedlamDataset( + root_dirs=root_dir, + meta_path=meta_path, + sample_side_views=3, + render_image_res_low=384, + render_image_res_high=384, + render_region_size=(682, 384), + source_image_res=384, + valid_area_ratio=0.1, + debug=False, + ) + + for data in dataset: + print( + "source_c2ws.shape", + data["source_c2ws"].shape, + ) + print( + "source_intrs.shape", + data["source_intrs"].shape, + ) + print( + "source_rgbs.shape", + data["source_rgbs"].shape, + ) + print( + "render_image.shape", + data["render_image"].shape, + ) + print( + "c2ws.shape", + data["c2ws"].shape, + ) + print( + "intrs.shape", + data["intrs"].shape, + ) + # print("render_anchors.shape", data["render_anchors"].shape, ) + print( + "render_full_resolutions.shape", + data["render_full_resolutions"].shape, + ) + print( + "render_bg_colors.shape", + data["render_bg_colors"].shape, + ) + # print("smplx_params", data["smplx_params"].keys()) + print("smplx_params.body_pose.shape", data["body_pose"].shape) + print("smplx_params.expr.shape", data["expr"].shape) + print("smplx_params.betas.shape", data["betas"].shape) + os.makedirs("debug_vis/dataloader", exist_ok=True) + for i in range(data["source_rgbs"].shape[0]): + cv2.imwrite( + f"debug_vis/dataloader/source_rgbs_{i}.jpg", + ( + ( + data["source_rgbs"][i].permute(1, 2, 0).numpy()[:, :, (2, 1, 0)] + * 255 + ).astype(np.uint8) + ), + ) + print( + "source_rgbs", + data["source_rgbs"].shape, + ) + print("source_intrs", data["source_intrs"][i]) + + for i in range(data["render_image"].shape[0]): + cv2.imwrite( + f"debug_vis/dataloader/rgbs{i}.jpg", + ( + ( + data["render_image"][i] + .permute(1, 2, 0) + .numpy()[:, :, (2, 1, 0)] + * 255 + ).astype(np.uint8) + ), + ) + print( + "render_image", + data["render_image"].shape, + ) + print("render_full_resolutions", data["render_full_resolutions"][i]) + # print("render_anchors", data["render_anchors"][i]) + print("intrs", data["intrs"][i]) + xx diff --git a/LHM/datasets/bedlam_util.py b/LHM/datasets/bedlam_util.py new file mode 100644 index 0000000000000000000000000000000000000000..ed68c9ab5c7b5c97bf45232ec418b8a60fe8f5a0 --- /dev/null +++ b/LHM/datasets/bedlam_util.py @@ -0,0 +1,306 @@ +# Multi-HMR +# Copyright (c) 2024-present NAVER Corp. +# CC BY-NC-SA 4.0 license + +import os +# os.environ["PYOPENGL_PLATFORM"] = "egl" +# os.environ['EGL_DEVICE_ID'] = '0' + +import warnings +import pickle +import torch +import smplx +from tqdm import tqdm +import sys +import numpy as np +from PIL import Image, ImageOps, ImageFile +import random +import json +import tqdm +import cv2 +import traceback +ImageFile.LOAD_TRUNCATED_IMAGES = True # to avoid "OSError: image file is truncated" +from torch.utils.data import Dataset + +BEDLAM_DIR = "./train_data/bedlam/data" +SMPLX_DIR = "./pretrained_models/human_model_files" +ANNOT_DIR = "./train_data/bedlam/data/annots" + +class BEDLAMSeg(Dataset): + def __init__(self, + split='training', + training=False, + img_size=512, + root_dir=BEDLAM_DIR, + force_build_dataset=0, + n_iter=None, + subsample=1, + extension='png', + crops=[0], + flip=1, + res=None, + n=-1, + ): + super().__init__() + + self.name = 'bedlam' + self.annotations_dir = ANNOT_DIR + self.training = training + self.img_size = img_size + self.n_iter = n_iter + self.subsample = subsample + self.crops = crops # 0 is the default + self.flip = flip # 1 by default + + assert split in ['training', 'validation'] + + self.root_dir = root_dir + self.split = split + self.image_dir = os.path.join(self.root_dir, f"{self.split}") + self.mask_dir = os.path.join(self.root_dir, "masks") + + self.annot_file = os.path.join(self.annotations_dir, f"{self.name}_{split}.pkl") + # self.force_build_dataset = force_build_dataset + + self.annots = None + # if self.force_build_dataset or not os.path.isfile(self.annot_file): + # self.annots = self.build_dataset() + if self.annots is None: + with open(self.annot_file, 'rb') as f: + self.annots = pickle.load(f) + + self.imagenames = list(self.annots.keys()) + self.imagenames.sort() + + + def __len__(self): + return len(self.imagenames) + + def __repr__(self): + return f"{self.name}: split={self.split} - N={len(self.imagenames)}" + + def save_smplx_params_to_json(self, person, focal, princpt, valid_area_ratio, save_path): + smplx_params = {} + smplx_params["betas"] = person['smplx_shape'].reshape(11).tolist() + smplx_params["root_pose"] = person["smplx_root_pose"].reshape(3).tolist() + smplx_params['body_pose'] = person["smplx_body_pose"].tolist() + smplx_params['jaw_pose'] = person["smplx_jaw_pose"].reshape(3).tolist() + smplx_params['leye_pose'] = person["smplx_leye_pose"].reshape(3).tolist() + smplx_params['reye_pose'] = person["smplx_reye_pose"].reshape(3).tolist() + smplx_params['lhand_pose'] = person["smplx_left_hand_pose"].tolist() + smplx_params['rhand_pose'] = person["smplx_right_hand_pose"].tolist() + smplx_params['trans'] = person["smplx_transl"].reshape(3).tolist() + smplx_params['expr'] = np.zeros(10).tolist() + + smplx_params['focal'] = focal + smplx_params['princpt'] = princpt + smplx_params['valid_area_ratio'] = valid_area_ratio + +# for k, v in smplx_params.items(): +# print(k, np.array(v).shape) + + with open(save_path, 'w') as fp: + json.dump(smplx_params, fp) + + return smplx_params + + + def center_crop_and_resize(self, img, mask, princpt_x, princpt_y, fx, fy, area_ratio): + + ys, xs = np.where(mask > 0) + + if len(xs) == 0 or len(ys) == 0: + print(f"unvalid: no body") + return None + + x_min = np.min(xs) + x_max = np.max(xs) + y_min = np.min(ys) + y_max = np.max(ys) + + center_x, center_y = img.shape[1]//2, img.shape[0]//2 + + half_w = max(abs(center_x - x_min), abs(center_x - x_max)) + half_h = max(abs(center_y - y_min), abs(center_y - y_max)) + ratio = half_h / half_w + ratio_standard= 1280 / 720 + if ratio >= 1: + if ratio >= ratio_standard: + half_w = round(half_h / ratio_standard) + else: + half_h = round(half_w * ratio_standard) + else: + print(f"unvalid: h/w ratio:{ratio}") + return None + + assert abs(half_h / half_w - ratio_standard) < 0.1 + offset_x = center_x - half_w + offset_y = center_y - half_h + + new_img = img[offset_y: offset_y + 2*half_h, offset_x: offset_x + 2*half_w] + new_mask = mask[offset_y: offset_y + 2*half_h, offset_x: offset_x + 2*half_w] + + princpt_x -= offset_x + princpt_y -= offset_y + + new_img = cv2.resize(new_img, (img.shape[1], img.shape[0]), interpolation=cv2.INTER_CUBIC) + new_mask = cv2.resize(new_mask, (mask.shape[1], mask.shape[0]), interpolation=cv2.INTER_NEAREST) + + valid_area_ratio = np.sum(new_mask > 0) / new_mask.shape[0] / new_mask.shape[1] + if valid_area_ratio < area_ratio: + print(f"unvalid: area ratio:{valid_area_ratio}") + return None + + scale = img.shape[0] / 2. / half_h + + fx *= scale + princpt_x *= scale + + fy *= scale + princpt_y *= scale + + new_img = np.concatenate([new_img, new_mask[:, :, None]], axis=2) + + return new_img, princpt_x, princpt_y, fx, fy, valid_area_ratio + + + def __getitem__(self, idx): + imagename = self.imagenames[idx] + annot = self.annots[imagename].copy() + annot['imagename'] = imagename + + # find appropriate image_dir + img_path = os.path.join(self.image_dir, imagename) + + mask_path = os.path.join(self.mask_dir, imagename.replace("_6fps/png/", "/masks/").replace(".png", "_env.png")) + assert os.path.exists(mask_path), f"mask_path:{mask_path}" + + # Original size + real_width, real_height = annot['size'] + + # preprocessing the image + img_pil = Image.open(img_path) + if img_pil.mode != 'RGB': + img_pil = img_pil.convert('RGB') + + # BEDLAM specifc to correct the rotation issue + # https://github.com/pixelite1201/BEDLAM/blob/ebf8bb14a43de46cc74dca4c00c13e571b325726/visualize_ground_truth.py#L183 + if self.name == 'bedlam' and 'closeup' in imagename and self.split != 'test': + img_pil = img_pil.rotate(-90, expand=True) + + + # preprocessing the image + mask_pil = Image.open(mask_path) + # if mask_pil.mode != 'RGB': + # img_pil = img_pil.convert('RGB') + + # BEDLAM specifc to correct the rotation issue + # https://github.com/pixelite1201/BEDLAM/blob/ebf8bb14a43de46cc74dca4c00c13e571b325726/visualize_ground_truth.py#L183 + if self.name == 'bedlam' and 'closeup' in imagename and self.split != 'test': + mask_pil = mask_pil.rotate(-90, expand=True) + + img = np.asarray(img_pil) + mask = np.asarray(mask_pil) + mask = 255 * (mask < 1).astype(np.uint8) + + princpt, focal = annot['princpt'], annot['focal'] + + ret = self.center_crop_and_resize(img, mask, princpt[0], princpt[1], focal[0], focal[1], area_ratio=0.05) + if ret is None: + print(f"unvalid, img_path:{img_path}") + return + + new_img, princpt_x, princpt_y, fx, fy, valid_area_ratio = ret + # print(new_img.shape, princpt_x, princpt_y, fx, fy, "ori", princpt, focal) + + princpt = princpt_x, princpt_y + focal = fx, fy + + + save_path = img_path.replace("/png/", "/png_post/") + save_vis_path = img_path.replace("/png/", "/png_post_vis/") + save_smplx_path = img_path.replace("/png/", "/smplx/").replace(".png", ".json") + + os.makedirs(os.path.dirname(save_path), exist_ok=True) + os.makedirs(os.path.dirname(save_vis_path), exist_ok=True) + os.makedirs(os.path.dirname(save_smplx_path), exist_ok=True) + + cv2.imwrite(save_path, new_img[:, :, (2, 1, 0, 3)]) + cv2.imwrite(save_vis_path, np.hstack([np.concatenate([img, 255 * np.ones_like(mask[:,:, None])], axis=2), new_img])[:, :, (2, 1, 0, 3)]) + + # Humans + _humans = annot['humans'].copy() + # annot.pop('humans') + # if self.training: + humans = [hum for hum in _humans if hum['smplx_transl'][-1] > 0.01] # the person should be in front of the camera + # else: + # humans = [hum for hum in _humans] + + assert len(humans) == 1 + + self.save_smplx_params_to_json(humans[0], focal, princpt, valid_area_ratio, save_smplx_path) + + # return img_array, annot + +def create_annots(splits=['validation', 'training']): + for split in splits: + dataset = BEDLAM(split=split, force_build_dataset=1) + + +def visualize(split='validation', i=1500, res=None, extension='png', training=0, img_size=800): + # training - 52287 for a closeup + from utils import render_meshes, demo_color + model_neutral = smplx.create(SMPLX_DIR, 'smplx', gender='neutral', num_betas=11, use_pca=False, flat_hand_mean=True) + + dataset = BEDLAM(split=split, force_build_dataset=0, + res=res, extension=extension, + training=training, + img_size=img_size, + ) + print(dataset) + + img_array, annot = dataset.__getitem__(i) + + img_array = denormalize_rgb(img_array, imagenet_normalization=1) + verts_list = [] + for person in annot['humans']: + with torch.no_grad(): + verts = model_neutral( + global_orient=torch.from_numpy(person['smplx_root_pose']).reshape(1,-1), + body_pose=torch.from_numpy(person['smplx_body_pose']).reshape(1,-1), + jaw_pose=torch.from_numpy(person['smplx_jaw_pose']).reshape(1,-1), + leye_pose=torch.from_numpy(person['smplx_leye_pose']).reshape(1,-1), + reye_pose=torch.from_numpy(person['smplx_reye_pose']).reshape(1,-1), + left_hand_pose=torch.from_numpy(person['smplx_left_hand_pose']).reshape(1,-1), + right_hand_pose=torch.from_numpy(person['smplx_right_hand_pose']).reshape(1,-1), + betas=torch.from_numpy(person['smplx_shape']).reshape(1,-1), + transl=torch.from_numpy(person['smplx_transl']).reshape(1,-1), + ).vertices.cpu().numpy().reshape(-1,3) + verts_list.append(verts) + faces_list = [model_neutral.faces for _ in annot['humans']] + _color = [demo_color[0] for _ in annot['humans']] + pred_rend_array = render_meshes(img_array.copy(), + verts_list, + faces_list, + {'focal': annot['K'][[0,1],[0,1]], + 'princpt': annot['K'][[0,1],[-1,-1]]}, + alpha=0.7, + color=_color) + img_array = np.concatenate([img_array, np.asarray(pred_rend_array)], 1) + + fn = f"{dataset.name}_{split}_{i}.jpg" + Image.fromarray(img_array).save(fn) + print(f"open {fn}") + return 1 + + + +if __name__ == "__main__": + # exec(sys.argv[1]) + dataset = BEDLAMSeg(split="validation") + for i in tqdm.tqdm(range(len(dataset))): + try: + dataset.__getitem__(i) + except: + traceback.print_exc() + continue \ No newline at end of file diff --git a/LHM/datasets/cam_utils.py b/LHM/datasets/cam_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..70653ae2a7f612714f729c73f45e826109b7e0ff --- /dev/null +++ b/LHM/datasets/cam_utils.py @@ -0,0 +1,205 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import math +import torch + +""" +R: (N, 3, 3) +T: (N, 3) +E: (N, 4, 4) +vector: (N, 3) +""" + + +def compose_extrinsic_R_T(R: torch.Tensor, T: torch.Tensor): + """ + Compose the standard form extrinsic matrix from R and T. + Batched I/O. + """ + RT = torch.cat((R, T.unsqueeze(-1)), dim=-1) + return compose_extrinsic_RT(RT) + + +def compose_extrinsic_RT(RT: torch.Tensor): + """ + Compose the standard form extrinsic matrix from RT. + Batched I/O. + """ + return torch.cat([ + RT, + torch.tensor([[[0, 0, 0, 1]]], dtype=RT.dtype, device=RT.device).repeat(RT.shape[0], 1, 1) + ], dim=1) + + +def decompose_extrinsic_R_T(E: torch.Tensor): + """ + Decompose the standard extrinsic matrix into R and T. + Batched I/O. + """ + RT = decompose_extrinsic_RT(E) + return RT[:, :, :3], RT[:, :, 3] + + +def decompose_extrinsic_RT(E: torch.Tensor): + """ + Decompose the standard extrinsic matrix into RT. + Batched I/O. + """ + return E[:, :3, :] + + +def camera_normalization_objaverse(normed_dist_to_center, poses: torch.Tensor, ret_transform: bool = False): + assert normed_dist_to_center is not None + pivotal_pose = compose_extrinsic_RT(poses[:1]) + dist_to_center = pivotal_pose[:, :3, 3].norm(dim=-1, keepdim=True).item() \ + if normed_dist_to_center == 'auto' else normed_dist_to_center + + # compute camera norm (new version) + canonical_camera_extrinsics = torch.tensor([[ + [1, 0, 0, 0], + [0, 0, -1, -dist_to_center], + [0, 1, 0, 0], + [0, 0, 0, 1], + ]], dtype=torch.float32) + pivotal_pose_inv = torch.inverse(pivotal_pose) + camera_norm_matrix = torch.bmm(canonical_camera_extrinsics, pivotal_pose_inv) + + # normalize all views + poses = compose_extrinsic_RT(poses) + poses = torch.bmm(camera_norm_matrix.repeat(poses.shape[0], 1, 1), poses) + poses = decompose_extrinsic_RT(poses) + + if ret_transform: + return poses, camera_norm_matrix.squeeze(dim=0) + return poses + + +def get_normalized_camera_intrinsics(intrinsics: torch.Tensor): + """ + intrinsics: (N, 3, 2), [[fx, fy], [cx, cy], [width, height]] + Return batched fx, fy, cx, cy + """ + fx, fy = intrinsics[:, 0, 0], intrinsics[:, 0, 1] + cx, cy = intrinsics[:, 1, 0], intrinsics[:, 1, 1] + width, height = intrinsics[:, 2, 0], intrinsics[:, 2, 1] + fx, fy = fx / width, fy / height + cx, cy = cx / width, cy / height + return fx, fy, cx, cy + + +def build_camera_principle(RT: torch.Tensor, intrinsics: torch.Tensor): + """ + RT: (N, 3, 4) + intrinsics: (N, 3, 2), [[fx, fy], [cx, cy], [width, height]] + """ + fx, fy, cx, cy = get_normalized_camera_intrinsics(intrinsics) + return torch.cat([ + RT.reshape(-1, 12), + fx.unsqueeze(-1), fy.unsqueeze(-1), cx.unsqueeze(-1), cy.unsqueeze(-1), + ], dim=-1) + + +def build_camera_standard(RT: torch.Tensor, intrinsics: torch.Tensor): + """ + RT: (N, 3, 4) + intrinsics: (N, 3, 2), [[fx, fy], [cx, cy], [width, height]] + """ + E = compose_extrinsic_RT(RT) + fx, fy, cx, cy = get_normalized_camera_intrinsics(intrinsics) + I = torch.stack([ + torch.stack([fx, torch.zeros_like(fx), cx], dim=-1), + torch.stack([torch.zeros_like(fy), fy, cy], dim=-1), + torch.tensor([[0, 0, 1]], dtype=torch.float32, device=RT.device).repeat(RT.shape[0], 1), + ], dim=1) + return torch.cat([ + E.reshape(-1, 16), + I.reshape(-1, 9), + ], dim=-1) + + +def center_looking_at_camera_pose( + camera_position: torch.Tensor, look_at: torch.Tensor = None, up_world: torch.Tensor = None, + device: torch.device = torch.device('cpu'), + ): + """ + camera_position: (M, 3) + look_at: (3) + up_world: (3) + return: (M, 3, 4) + """ + # by default, looking at the origin and world up is pos-z + if look_at is None: + look_at = torch.tensor([0, 0, 0], dtype=torch.float32, device=device) + if up_world is None: + up_world = torch.tensor([0, 0, 1], dtype=torch.float32, device=device) + look_at = look_at.unsqueeze(0).repeat(camera_position.shape[0], 1) + up_world = up_world.unsqueeze(0).repeat(camera_position.shape[0], 1) + + z_axis = camera_position - look_at + z_axis = z_axis / z_axis.norm(dim=-1, keepdim=True) + x_axis = torch.cross(up_world, z_axis) + x_axis = x_axis / x_axis.norm(dim=-1, keepdim=True) + y_axis = torch.cross(z_axis, x_axis) + y_axis = y_axis / y_axis.norm(dim=-1, keepdim=True) + extrinsics = torch.stack([x_axis, y_axis, z_axis, camera_position], dim=-1) + return extrinsics + + +def surrounding_views_linspace(n_views: int, radius: float = 2.0, height: float = 0.8, device: torch.device = torch.device('cpu')): + """ + n_views: number of surrounding views + radius: camera dist to center + height: height of the camera + return: (M, 3, 4) + """ + assert n_views > 0 + assert radius > 0 + + theta = torch.linspace(-torch.pi / 2, 3 * torch.pi / 2, n_views, device=device) + projected_radius = math.sqrt(radius ** 2 - height ** 2) + x = torch.cos(theta) * projected_radius + y = torch.sin(theta) * projected_radius + z = torch.full((n_views,), height, device=device) + + camera_positions = torch.stack([x, y, z], dim=1) + extrinsics = center_looking_at_camera_pose(camera_positions, device=device) + + return extrinsics + + +def create_intrinsics( + f: float, + c: float = None, cx: float = None, cy: float = None, + w: float = 1., h: float = 1., + dtype: torch.dtype = torch.float32, + device: torch.device = torch.device('cpu'), + ): + """ + return: (3, 2) + """ + fx = fy = f + if c is not None: + assert cx is None and cy is None, "c and cx/cy cannot be used together" + cx = cy = c + else: + assert cx is not None and cy is not None, "cx/cy must be provided when c is not provided" + fx, fy, cx, cy, w, h = fx/w, fy/h, cx/w, cy/h, 1., 1. + intrinsics = torch.tensor([ + [fx, fy], + [cx, cy], + [w, h], + ], dtype=dtype, device=device) + return intrinsics diff --git a/LHM/datasets/mixer.py b/LHM/datasets/mixer.py new file mode 100644 index 0000000000000000000000000000000000000000..54a551c5fe3fefcf45997f47a61c12f00775400b --- /dev/null +++ b/LHM/datasets/mixer.py @@ -0,0 +1,120 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License");: +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import math +import pdb +from functools import partial + +import torch + +__all__ = ["MixerDataset"] + + +class MixerDataset(torch.utils.data.Dataset): + """Reference""" + + def __init__( + self, + split: str, + subsets: dict, + **dataset_kwargs, + ): + + self.subsets = [ + self._dataset_fn(subset, split)( + use_flame=subset["use_flame"], + src_head_size=subset.get("src_head_size", 448), + **dataset_kwargs, + ) + for subset in subsets + ] + self.virtual_lens = [ + math.ceil(subset_config["sample_rate"] * len(subset_obj)) + for subset_config, subset_obj in zip(subsets, self.subsets) + ] + + @staticmethod + def _dataset_fn(subset_config: dict, split: str): + name = subset_config["name"] + + dataset_cls = None + if name == "exavatar": + from .exavatar import ExAvatarDataset + + dataset_cls = ExAvatarDataset + elif name == "humman": + from .humman import HuMManDataset + + dataset_cls = HuMManDataset + elif name == "static_human": + from .static_human import StaticHumanDataset + + dataset_cls = StaticHumanDataset + elif name == "singleview_human": + from .singleview_human import SingleViewHumanDataset + + dataset_cls = SingleViewHumanDataset + elif name == "singleview_square_human": + from .singleview_square_human import SingleViewSquareHumanDataset + + dataset_cls = SingleViewSquareHumanDataset + elif name == "bedlam": + from .bedlam import BedlamDataset + + dataset_cls = BedlamDataset + elif name == "dna_human": + from .dna import DNAHumanDataset + + dataset_cls = DNAHumanDataset + elif name == "video_human": + from .video_human import VideoHumanDataset + + dataset_cls = VideoHumanDataset + elif name == "video_human_flame": + from .video_human_flame import VideoHumanFlameDataset + + dataset_cls = VideoHumanFlameDataset + elif name == "video_human_flame_dp": + from .video_human_flame_df import VideoHumanFlameDFDataset + + # add deepfashon random sample in video_human_flame + dataset_cls = VideoHumanFlameDFDataset + elif name == "objaverse": + from .objaverse import ObjaverseDataset + + dataset_cls = ObjaverseDataset + # elif name == 'mvimgnet': + # from .mvimgnet import MVImgNetDataset + # dataset_cls = MVImgNetDataset + else: + raise NotImplementedError(f"Dataset {name} not implemented") + + return partial( + dataset_cls, + root_dirs=subset_config["root_dirs"], + meta_path=subset_config["meta_path"][split], + ) + + def __len__(self): + return sum(self.virtual_lens) + + def __getitem__(self, idx): + subset_idx = 0 + virtual_idx = idx + while virtual_idx >= self.virtual_lens[subset_idx]: + virtual_idx -= self.virtual_lens[subset_idx] + subset_idx += 1 + real_idx = virtual_idx % len(self.subsets[subset_idx]) + return self.subsets[subset_idx][real_idx] diff --git a/LHM/launch.py b/LHM/launch.py new file mode 100644 index 0000000000000000000000000000000000000000..3d4d18f48fb0a7637eae5fef00d601b49f3de50f --- /dev/null +++ b/LHM/launch.py @@ -0,0 +1,35 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import argparse +import pdb + +from LHM.runners import REGISTRY_RUNNERS + + +def main(): + + parser = argparse.ArgumentParser(description="OpenLRM launcher") + parser.add_argument("runner", type=str, help="Runner to launch") + args, unknown = parser.parse_known_args() + + if args.runner not in REGISTRY_RUNNERS: + raise ValueError("Runner {} not found".format(args.runner)) + + RunnerClass = REGISTRY_RUNNERS[args.runner] + with RunnerClass() as runner: + runner.run() + + +if __name__ == "__main__": + main() diff --git a/LHM/losses/__init__.py b/LHM/losses/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4d2ba7ca8c7784b781b817a2e506482bd89bc511 --- /dev/null +++ b/LHM/losses/__init__.py @@ -0,0 +1,20 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .ball_loss import * +from .offset_loss import * +from .perceptual import * +from .pixelwise import * +from .tvloss import * diff --git a/LHM/losses/ball_loss.py b/LHM/losses/ball_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..f6b02cbaf554519f9398836fe766e12546a9a92a --- /dev/null +++ b/LHM/losses/ball_loss.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-10 19:08:35 +# @Function : ASAP loss +import pdb + +import torch +import torch.nn as nn + +__all__ = ["ASAP_Loss", "Heuristic_ASAP_Loss"] + + +class ASAP_Loss(nn.Module): + + def forward(self, scaling, r=1, **params): + """where r is the radius of the ball between max-axis and min-axis.""" + raise NotImplementedError( + "ASAP_Loss is not implemented yet in Inference version" + ) + + +class Heuristic_ASAP_Loss(nn.Module): + def __init__(self, group_dict, group_body_mapping): + super(Heuristic_ASAP_Loss, self).__init__() + + self.group_dict = group_dict # register weights fro different body parts + self.group_body_mapping = group_body_mapping # mapping of body parts to group + + def _heurisitic_loss(self, _ball_loss): + + _loss = 0.0 + for key in self.group_dict.keys(): + key_weights = self.group_dict[key] + group_mapping_idx = self.group_body_mapping[key] + _loss += key_weights * _ball_loss[:, group_mapping_idx].mean() + + return _loss + + def forward(self, scaling, r=5, **params): + """where r is the radius of the ball between max-axis and min-axis.""" + "human motion or rotation is very different in each body parts, for example, the head is more stable than the leg and hand, so we use heuristic_ball_loss" + + _scale = scaling + + _scale_min = torch.min(_scale, dim=-1)[0] + _scale_max = torch.max(_scale, dim=-1)[0] + + scale_ratio = _scale_max / (_scale_min + 1e-6) + + _ball_loss = torch.clamp(scale_ratio, min=r) - r + + return self._heurisitic_loss(_ball_loss) diff --git a/LHM/losses/offset_loss.py b/LHM/losses/offset_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..42bad90f2af18f9dc95b763f1de672a334e292d7 --- /dev/null +++ b/LHM/losses/offset_loss.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-10 19:08:56 +# @Function : ACAP Loss +import pdb + +import torch +import torch.nn as nn +import torch.nn.functional as F + +__all__ = ["ACAP_Loss", "Heuristic_ACAP_Loss"] + + +class ACAP_Loss(nn.Module): + """As close as possibel loss""" + + def forward(self, offset, d=0.05625, **params): + """Empirically, where d is the thresold of distance points leave from 1.8/32 = 0.0562.""" + + offset_loss = torch.clamp(offset.norm(p=2, dim=-1), min=d) - d + + return offset_loss.mean() + + +class Heuristic_ACAP_Loss(nn.Module): + """As close as possibel loss""" + + def __init__(self, group_dict, group_body_mapping): + super(Heuristic_ACAP_Loss, self).__init__() + + self.group_dict = group_dict # register weights fro different body parts + self.group_body_mapping = group_body_mapping # mapping of body parts to group + + def _heurisitic_loss(self, _offset_loss): + + _loss = 0.0 + for key in self.group_dict.keys(): + key_weights = self.group_dict[key] + group_mapping_idx = self.group_body_mapping[key] + _loss += key_weights * _offset_loss[:, group_mapping_idx].mean() + + return _loss + + def forward(self, offset, d=0.05625, **params): + """Empirically, where d is the thresold of distance points leave from human prior model, 1.8/32 = 0.0562.""" + "human motion or rotation is very different in each body parts, for example, the head is more stable than the leg and hand, so we use heuristic_ball_loss" + + _offset_loss = torch.clamp(offset.norm(p=2, dim=-1), min=d) - d + + return self._heurisitic_loss(_offset_loss) diff --git a/LHM/losses/perceptual.py b/LHM/losses/perceptual.py new file mode 100644 index 0000000000000000000000000000000000000000..5eead0d1a207e1863598d3400a4a42bd40549114 --- /dev/null +++ b/LHM/losses/perceptual.py @@ -0,0 +1,70 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import torch +import torch.nn as nn + +__all__ = ['LPIPSLoss'] + + +class LPIPSLoss(nn.Module): + """ + Compute LPIPS loss between two images. + """ + + def __init__(self, device, prefech: bool = False): + super().__init__() + self.device = device + self.cached_models = {} + if prefech: + self.prefetch_models() + + def _get_model(self, model_name: str): + if model_name not in self.cached_models: + import warnings + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', category=UserWarning) + import lpips + _model = lpips.LPIPS(net=model_name, eval_mode=True, verbose=False).to(self.device) + _model = torch.compile(_model) + self.cached_models[model_name] = _model + return self.cached_models[model_name] + + def prefetch_models(self): + _model_names = ['alex', 'vgg'] + for model_name in _model_names: + self._get_model(model_name) + + def forward(self, x, y, is_training: bool = True): + """ + Assume images are 0-1 scaled and channel first. + + Args: + x: [N, M, C, H, W] + y: [N, M, C, H, W] + is_training: whether to use VGG or AlexNet. + + Returns: + Mean-reduced LPIPS loss across batch. + """ + model_name = 'vgg' if is_training else 'alex' + loss_fn = self._get_model(model_name) + N, M, C, H, W = x.shape + x = x.reshape(N*M, C, H, W) + y = y.reshape(N*M, C, H, W) + image_loss = loss_fn(x, y, normalize=True).mean(dim=[1, 2, 3]) + batch_loss = image_loss.reshape(N, M).mean(dim=1) + all_loss = batch_loss.mean() + return all_loss diff --git a/LHM/losses/pixelwise.py b/LHM/losses/pixelwise.py new file mode 100644 index 0000000000000000000000000000000000000000..f936d9960041e49baf2ab1334e9639c219212ec2 --- /dev/null +++ b/LHM/losses/pixelwise.py @@ -0,0 +1,58 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import torch +import torch.nn as nn + +__all__ = ['PixelLoss'] + + +class PixelLoss(nn.Module): + """ + Pixel-wise loss between two images. + """ + + def __init__(self, option: str = 'mse'): + super().__init__() + self.loss_fn = self._build_from_option(option) + + @staticmethod + def _build_from_option(option: str, reduction: str = 'none'): + if option == 'mse': + return nn.MSELoss(reduction=reduction) + elif option == 'l1': + return nn.L1Loss(reduction=reduction) + else: + raise NotImplementedError(f'Unknown pixel loss option: {option}') + + @torch.compile + def forward(self, x, y): + """ + Assume images are channel first. + + Args: + x: [N, M, C, H, W] + y: [N, M, C, H, W] + + Returns: + Mean-reduced pixel loss across batch. + """ + N, M, C, H, W = x.shape + x = x.reshape(N*M, C, H, W) + y = y.reshape(N*M, C, H, W) + image_loss = self.loss_fn(x, y).mean(dim=[1, 2, 3]) + batch_loss = image_loss.reshape(N, M).mean(dim=1) + all_loss = batch_loss.mean() + return all_loss diff --git a/LHM/losses/tvloss.py b/LHM/losses/tvloss.py new file mode 100644 index 0000000000000000000000000000000000000000..77a13b69b6f9fcacc38940373bf8159b3cf61459 --- /dev/null +++ b/LHM/losses/tvloss.py @@ -0,0 +1,55 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import torch +import torch.nn as nn + +__all__ = ['TVLoss'] + + +class TVLoss(nn.Module): + """ + Total variance loss. + """ + + def __init__(self): + super().__init__() + + def numel_excluding_first_dim(self, x): + return x.numel() // x.shape[0] + + @torch.compile + def forward(self, x): + """ + Assume batched and channel first with inner sizes. + + Args: + x: [N, M, C, H, W] + + Returns: + Mean-reduced TV loss with element-level scaling. + """ + N, M, C, H, W = x.shape + x = x.reshape(N*M, C, H, W) + diff_i = x[..., 1:, :] - x[..., :-1, :] + diff_j = x[..., :, 1:] - x[..., :, :-1] + div_i = self.numel_excluding_first_dim(diff_i) + div_j = self.numel_excluding_first_dim(diff_j) + tv_i = diff_i.pow(2).sum(dim=[1,2,3]) / div_i + tv_j = diff_j.pow(2).sum(dim=[1,2,3]) / div_j + tv = tv_i + tv_j + batch_tv = tv.reshape(N, M).mean(dim=1) + all_tv = batch_tv.mean() + return all_tv diff --git a/LHM/models/ESRGANer_utils.py b/LHM/models/ESRGANer_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..38aeb99005e2d8c75a3d9c60eec52701a41d944e --- /dev/null +++ b/LHM/models/ESRGANer_utils.py @@ -0,0 +1,482 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-1 17:39:52 +# @Function : Function to improve face quality when training. + +import math +import os +import queue +import sys + +sys.path.append("./") +import threading + +import cv2 +import numpy as np +import torch +from basicsr.utils.download_util import load_file_from_url +from torch.nn import functional as F + +ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +import pdb + +import torch +from basicsr.archs.rrdbnet_arch import RRDBNet + + +def avaliable_device(): + if torch.cuda.is_available(): + current_device_id = torch.cuda.current_device() + device = f"cuda:{current_device_id}" + else: + device = "cpu" + + return device + + +class RealESRGANer: + """A helper class for upsampling images with RealESRGAN. + + Args: + scale (int): Upsampling scale factor used in the networks. It is usually 2 or 4. + model_path (str): The path to the pretrained model. It can be urls (will first download it automatically). + model (nn.Module): The defined network. Default: None. + tile (int): As too large images result in the out of GPU memory issue, so this tile option will first crop + input images into tiles, and then process each of them. Finally, they will be merged into one image. + 0 denotes for do not use tile. Default: 0. + tile_pad (int): The pad size for each tile, to remove border artifacts. Default: 10. + pre_pad (int): Pad the input images to avoid border artifacts. Default: 10. + half (float): Whether to use half precision during inference. Default: False. + """ + + def __init__( + self, + scale, + model_path, + dni_weight=None, + model=None, + tile=0, + tile_pad=10, + pre_pad=10, + half=False, + device=None, + gpu_id=None, + ): + self.scale = scale + self.tile_size = tile + self.tile_pad = tile_pad + self.pre_pad = pre_pad + self.mod_scale = None + self.half = half + + # initialize model + if gpu_id: + self.device = ( + torch.device(f"cuda:{gpu_id}" if torch.cuda.is_available() else "cpu") + if device is None + else device + ) + else: + self.device = ( + torch.device("cuda" if torch.cuda.is_available() else "cpu") + if device is None + else device + ) + + if isinstance(model_path, list): + # dni + assert len(model_path) == len( + dni_weight + ), "model_path and dni_weight should have the save length." + loadnet = self.dni(model_path[0], model_path[1], dni_weight) + else: + # if the model_path starts with https, it will first download models to the folder: weights + if model_path.startswith("https://"): + model_path = load_file_from_url( + url=model_path, + model_dir=os.path.join(ROOT_DIR, "weights"), + progress=True, + file_name=None, + ) + loadnet = torch.load(model_path, map_location=torch.device("cpu")) + + # prefer to use params_ema + if "params_ema" in loadnet: + keyname = "params_ema" + else: + keyname = "params" + model.load_state_dict(loadnet[keyname], strict=True) + + model.eval() + self.model = model.to(self.device) + if self.half: + self.model = self.model.half() + + def dni(self, net_a, net_b, dni_weight, key="params", loc="cpu"): + """Deep network interpolation. + + ``Paper: Deep Network Interpolation for Continuous Imagery Effect Transition`` + """ + net_a = torch.load(net_a, map_location=torch.device(loc)) + net_b = torch.load(net_b, map_location=torch.device(loc)) + for k, v_a in net_a[key].items(): + net_a[key][k] = dni_weight[0] * v_a + dni_weight[1] * net_b[key][k] + return net_a + + def pre_process(self, img): + """Pre-process, such as pre-pad and mod pad, so that the images can be divisible""" + img = torch.from_numpy(np.transpose(img, (2, 0, 1))).float() + self.img = img.unsqueeze(0).to(self.device) + if self.half: + self.img = self.img.half() + + # pre_pad + if self.pre_pad != 0: + self.img = F.pad(self.img, (0, self.pre_pad, 0, self.pre_pad), "reflect") + # mod pad for divisible borders + if self.scale == 2: + self.mod_scale = 2 + elif self.scale == 1: + self.mod_scale = 4 + if self.mod_scale is not None: + self.mod_pad_h, self.mod_pad_w = 0, 0 + _, _, h, w = self.img.size() + if h % self.mod_scale != 0: + self.mod_pad_h = self.mod_scale - h % self.mod_scale + if w % self.mod_scale != 0: + self.mod_pad_w = self.mod_scale - w % self.mod_scale + self.img = F.pad( + self.img, (0, self.mod_pad_w, 0, self.mod_pad_h), "reflect" + ) + + def process(self): + # model inference + self.output = self.model(self.img) + + def tile_process(self): + """It will first crop input images to tiles, and then process each tile. + Finally, all the processed tiles are merged into one images. + + Modified from: https://github.com/ata4/esrgan-launcher + """ + batch, channel, height, width = self.img.shape + output_height = height * self.scale + output_width = width * self.scale + output_shape = (batch, channel, output_height, output_width) + + # start with black image + self.output = self.img.new_zeros(output_shape) + tiles_x = math.ceil(width / self.tile_size) + tiles_y = math.ceil(height / self.tile_size) + + # loop over all tiles + for y in range(tiles_y): + for x in range(tiles_x): + # extract tile from input image + ofs_x = x * self.tile_size + ofs_y = y * self.tile_size + # input tile area on total image + input_start_x = ofs_x + input_end_x = min(ofs_x + self.tile_size, width) + input_start_y = ofs_y + input_end_y = min(ofs_y + self.tile_size, height) + + # input tile area on total image with padding + input_start_x_pad = max(input_start_x - self.tile_pad, 0) + input_end_x_pad = min(input_end_x + self.tile_pad, width) + input_start_y_pad = max(input_start_y - self.tile_pad, 0) + input_end_y_pad = min(input_end_y + self.tile_pad, height) + + # input tile dimensions + input_tile_width = input_end_x - input_start_x + input_tile_height = input_end_y - input_start_y + tile_idx = y * tiles_x + x + 1 + input_tile = self.img[ + :, + :, + input_start_y_pad:input_end_y_pad, + input_start_x_pad:input_end_x_pad, + ] + + # upscale tile + try: + with torch.no_grad(): + output_tile = self.model(input_tile) + except RuntimeError as error: + print("Error", error) + print(f"\tTile {tile_idx}/{tiles_x * tiles_y}") + + # output tile area on total image + output_start_x = input_start_x * self.scale + output_end_x = input_end_x * self.scale + output_start_y = input_start_y * self.scale + output_end_y = input_end_y * self.scale + + # output tile area without padding + output_start_x_tile = (input_start_x - input_start_x_pad) * self.scale + output_end_x_tile = output_start_x_tile + input_tile_width * self.scale + output_start_y_tile = (input_start_y - input_start_y_pad) * self.scale + output_end_y_tile = output_start_y_tile + input_tile_height * self.scale + + # put tile into output image + self.output[ + :, :, output_start_y:output_end_y, output_start_x:output_end_x + ] = output_tile[ + :, + :, + output_start_y_tile:output_end_y_tile, + output_start_x_tile:output_end_x_tile, + ] + + def post_process(self): + # remove extra pad + if self.mod_scale is not None: + _, _, h, w = self.output.size() + self.output = self.output[ + :, + :, + 0 : h - self.mod_pad_h * self.scale, + 0 : w - self.mod_pad_w * self.scale, + ] + # remove prepad + if self.pre_pad != 0: + _, _, h, w = self.output.size() + self.output = self.output[ + :, + :, + 0 : h - self.pre_pad * self.scale, + 0 : w - self.pre_pad * self.scale, + ] + return self.output + + @torch.no_grad() + def enhance(self, img, outscale=None, alpha_upsampler="realesrgan"): + h_input, w_input = img.shape[0:2] + # img: numpy + img = img.astype(np.float32) + if np.max(img) > 256: # 16-bit image + max_range = 65535 + print("\tInput is a 16-bit image") + else: + max_range = 255 + img = img / max_range + if len(img.shape) == 2: # gray image + img_mode = "L" + img = cv2.cvtColor(img, cv2.COLOR_GRAY2RGB) + elif img.shape[2] == 4: # RGBA image with alpha channel + img_mode = "RGBA" + alpha = img[:, :, 3] + img = img[:, :, 0:3] + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + if alpha_upsampler == "realesrgan": + alpha = cv2.cvtColor(alpha, cv2.COLOR_GRAY2RGB) + else: + img_mode = "RGB" + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + + # ------------------- process image (without the alpha channel) ------------------- # + self.pre_process(img) + if self.tile_size > 0: + self.tile_process() + else: + self.process() + output_img = self.post_process() + output_img = output_img.data.squeeze().float().cpu().clamp_(0, 1).numpy() + output_img = np.transpose(output_img[[2, 1, 0], :, :], (1, 2, 0)) + if img_mode == "L": + output_img = cv2.cvtColor(output_img, cv2.COLOR_BGR2GRAY) + + # ------------------- process the alpha channel if necessary ------------------- # + if img_mode == "RGBA": + if alpha_upsampler == "realesrgan": + self.pre_process(alpha) + if self.tile_size > 0: + self.tile_process() + else: + self.process() + output_alpha = self.post_process() + output_alpha = ( + output_alpha.data.squeeze().float().cpu().clamp_(0, 1).numpy() + ) + output_alpha = np.transpose(output_alpha[[2, 1, 0], :, :], (1, 2, 0)) + output_alpha = cv2.cvtColor(output_alpha, cv2.COLOR_BGR2GRAY) + else: # use the cv2 resize for alpha channel + h, w = alpha.shape[0:2] + output_alpha = cv2.resize( + alpha, + (w * self.scale, h * self.scale), + interpolation=cv2.INTER_LINEAR, + ) + + # merge the alpha channel + output_img = cv2.cvtColor(output_img, cv2.COLOR_BGR2BGRA) + output_img[:, :, 3] = output_alpha + + # ------------------------------ return ------------------------------ # + if max_range == 65535: # 16-bit image + output = (output_img * 65535.0).round().astype(np.uint16) + else: + output = (output_img * 255.0).round().astype(np.uint8) + + if outscale is not None and outscale != float(self.scale): + output = cv2.resize( + output, + ( + int(w_input * outscale), + int(h_input * outscale), + ), + interpolation=cv2.INTER_LANCZOS4, + ) + + return output, img_mode + + +class PrefetchReader(threading.Thread): + """Prefetch images. + + Args: + img_list (list[str]): A image list of image paths to be read. + num_prefetch_queue (int): Number of prefetch queue. + """ + + def __init__(self, img_list, num_prefetch_queue): + super().__init__() + self.que = queue.Queue(num_prefetch_queue) + self.img_list = img_list + + def run(self): + for img_path in self.img_list: + img = cv2.imread(img_path, cv2.IMREAD_UNCHANGED) + self.que.put(img) + + self.que.put(None) + + def __next__(self): + next_item = self.que.get() + if next_item is None: + raise StopIteration + return next_item + + def __iter__(self): + return self + + +class IOConsumer(threading.Thread): + + def __init__(self, opt, que, qid): + super().__init__() + self._queue = que + self.qid = qid + self.opt = opt + + def run(self): + while True: + msg = self._queue.get() + if isinstance(msg, str) and msg == "quit": + break + + output = msg["output"] + save_path = msg["save_path"] + cv2.imwrite(save_path, output) + print(f"IO worker {self.qid} is done.") + + +class ESRGANEasyModel: + def __init__( + self, model_path="./pretrained_models/RealESRGAN_x4plus.pth", face_enhance=True + ): + model = RRDBNet( + num_in_ch=3, + num_out_ch=3, + num_feat=64, + num_block=23, + num_grow_ch=32, + scale=4, + ) + self.net_scale = 4 + file_url = [ + "https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.0/RealESRGAN_x4plus.pth" + ] + if model_path is None: + model_path = os.path.join("weights", args.model_name + ".pth") + if not os.path.isfile(model_path): + ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + for url in file_url: + # model_path will be updated + model_path = load_file_from_url( + url=url, + model_dir=os.path.join("./", "pretrained_models"), + progress=True, + file_name=None, + ) + self.face_enhance = face_enhance + + dni_weight = None + + self.upsampler = RealESRGANer( + scale=self.net_scale, + model_path=model_path, + dni_weight=dni_weight, + model=model, + tile=0, + tile_pad=10, + pre_pad=0, + half=False, + ) + + self.upsampler.model.to(avaliable_device()) + if face_enhance: # Use GFPGAN for face enhancement + from gfpgan import GFPGANer + + self.face_enhancer = GFPGANer( + model_path="https://github.com/TencentARC/GFPGAN/releases/download/v1.3.0/GFPGANv1.3.pth", + upscale=4, + arch="clean", + channel_multiplier=2, + bg_upsampler=self.upsampler, + ) + else: + self.face_enhancer = None + + @torch.no_grad() + def __call__(self, img): + if self.face_enhancer is not None: + _, _, output = self.face_enhancer.enhance( + img, has_aligned=False, only_center_face=False, paste_back=True + ) + else: + output, _ = self.upsampler.enhance(img, outscale=4) + return output + + def __repr__(self): + return f"ESRGANEasyModel:\n {self.upsampler}" + + +if __name__ == "__main__": + + import time + + model = ESRGANEasyModel(face_enhance=True) + input_img = "./debug/face_debug/gt/head_gt_0.png" + + img_np = cv2.imread(input_img) + set1 = [ + "./debug/face_debug/gt/head_gt_0.png", + "./debug/face_debug/gt/head_gt_1.png", + "./debug/face_debug/gt/head_gt_2.png", + "./debug/face_debug/gt/head_gt_3.png", + "./debug/face_debug/gt/head_gt_4.png", + "./debug/face_debug/gt/head_gt_5.png", + "./debug/face_debug/gt/head_gt_6.png", + "./debug/face_debug/gt/head_gt_0.png", + ] + img_set1 = [cv2.imread(img_path) for img_path in set1] + + sr = model(img_set1[0]) + + s0 = time.time() + for img in img_set1: + + sr = model(img) diff --git a/LHM/models/__init__.py b/LHM/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ec3d982835761e2fbd7c4c557d964e852be23510 --- /dev/null +++ b/LHM/models/__init__.py @@ -0,0 +1,30 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .modeling_human_lrm import ( + ModelHumanLRM, + ModelHumanLRMSapdinoBodyHeadSD3, + ModelHumanLRMSapdinoBodyHeadSD3_5, + ModelHumanLRMSapdinoSD3, + ModelHumanLRMSD3, +) + +model_dict = { + "human_lrm": ModelHumanLRM, + "human_lrm_sd3": ModelHumanLRMSD3, + "human_lrm_sapdino_sd3": ModelHumanLRMSapdinoSD3, + "human_lrm_sapdino_bh_sd3": ModelHumanLRMSapdinoBodyHeadSD3, + "human_lrm_sapdino_bh_sd3_5": ModelHumanLRMSapdinoBodyHeadSD3_5, +} diff --git a/LHM/models/__pycache__/ESRGANer_utils.cpython-310.pyc b/LHM/models/__pycache__/ESRGANer_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2cfd24013195f64992751c18f5ecc5cbb1242b5e Binary files /dev/null and b/LHM/models/__pycache__/ESRGANer_utils.cpython-310.pyc differ diff --git a/LHM/models/__pycache__/__init__.cpython-310.pyc b/LHM/models/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..75db055bfde003fe8e78043389cecc456c79c9e8 Binary files /dev/null and b/LHM/models/__pycache__/__init__.cpython-310.pyc differ diff --git a/LHM/models/__pycache__/arcface_utils.cpython-310.pyc b/LHM/models/__pycache__/arcface_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a722fa489400aca9f8d8544512c22c8219f001d7 Binary files /dev/null and b/LHM/models/__pycache__/arcface_utils.cpython-310.pyc differ diff --git a/LHM/models/__pycache__/embedder.cpython-310.pyc b/LHM/models/__pycache__/embedder.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a13e2049fb3577b250594d7b76f148bf07a64509 Binary files /dev/null and b/LHM/models/__pycache__/embedder.cpython-310.pyc differ diff --git a/LHM/models/__pycache__/modeling_human_lrm.cpython-310.pyc b/LHM/models/__pycache__/modeling_human_lrm.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f4366e9060678346c1044d8a1d9a00ccd496dbc6 Binary files /dev/null and b/LHM/models/__pycache__/modeling_human_lrm.cpython-310.pyc differ diff --git a/LHM/models/__pycache__/transformer.cpython-310.pyc b/LHM/models/__pycache__/transformer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b397320cec01f53dced38e0092745a9b53d290d9 Binary files /dev/null and b/LHM/models/__pycache__/transformer.cpython-310.pyc differ diff --git a/LHM/models/__pycache__/transformer_dit.cpython-310.pyc b/LHM/models/__pycache__/transformer_dit.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..46070a6fbcfc3d32c42775797ae3ae037a64c20d Binary files /dev/null and b/LHM/models/__pycache__/transformer_dit.cpython-310.pyc differ diff --git a/LHM/models/__pycache__/utils.cpython-310.pyc b/LHM/models/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4e5d516f11edfb6f701828a78fd7415e193b1ba3 Binary files /dev/null and b/LHM/models/__pycache__/utils.cpython-310.pyc differ diff --git a/LHM/models/arcface_utils.py b/LHM/models/arcface_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..d4656df7d972ac3aef35ab33215e049856481006 --- /dev/null +++ b/LHM/models/arcface_utils.py @@ -0,0 +1,360 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-10 17:38:29 +# @Function : Arc-Similarity Loss +import sys + +sys.path.append(".") + +import pdb +from copy import deepcopy + +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.nn.parallel import DataParallel, DistributedDataParallel + + +def conv3x3(inplanes, outplanes, stride=1): + """A simple wrapper for 3x3 convolution with padding. + + Args: + inplanes (int): Channel number of inputs. + outplanes (int): Channel number of outputs. + stride (int): Stride in convolution. Default: 1. + """ + return nn.Conv2d( + inplanes, outplanes, kernel_size=3, stride=stride, padding=1, bias=False + ) + + +class BasicBlock(nn.Module): + """Basic residual block used in the ResNetArcFace architecture. + + Args: + inplanes (int): Channel number of inputs. + planes (int): Channel number of outputs. + stride (int): Stride in convolution. Default: 1. + downsample (nn.Module): The downsample module. Default: None. + """ + + expansion = 1 # output channel expansion ratio + + def __init__(self, inplanes, planes, stride=1, downsample=None): + super(BasicBlock, self).__init__() + self.conv1 = conv3x3(inplanes, planes, stride) + self.bn1 = nn.BatchNorm2d(planes) + self.relu = nn.ReLU(inplace=True) + self.conv2 = conv3x3(planes, planes) + self.bn2 = nn.BatchNorm2d(planes) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class IRBlock(nn.Module): + """Improved residual block (IR Block) used in the ResNetArcFace architecture. + + Args: + inplanes (int): Channel number of inputs. + planes (int): Channel number of outputs. + stride (int): Stride in convolution. Default: 1. + downsample (nn.Module): The downsample module. Default: None. + use_se (bool): Whether use the SEBlock (squeeze and excitation block). Default: True. + """ + + expansion = 1 # output channel expansion ratio + + def __init__(self, inplanes, planes, stride=1, downsample=None, use_se=True): + super(IRBlock, self).__init__() + self.bn0 = nn.BatchNorm2d(inplanes) + self.conv1 = conv3x3(inplanes, inplanes) + self.bn1 = nn.BatchNorm2d(inplanes) + self.prelu = nn.PReLU() + self.conv2 = conv3x3(inplanes, planes, stride) + self.bn2 = nn.BatchNorm2d(planes) + self.downsample = downsample + self.stride = stride + self.use_se = use_se + if self.use_se: + self.se = SEBlock(planes) + + def forward(self, x): + residual = x + out = self.bn0(x) + out = self.conv1(out) + out = self.bn1(out) + out = self.prelu(out) + + out = self.conv2(out) + out = self.bn2(out) + if self.use_se: + out = self.se(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.prelu(out) + + return out + + +class Bottleneck(nn.Module): + """Bottleneck block used in the ResNetArcFace architecture. + + Args: + inplanes (int): Channel number of inputs. + planes (int): Channel number of outputs. + stride (int): Stride in convolution. Default: 1. + downsample (nn.Module): The downsample module. Default: None. + """ + + expansion = 4 # output channel expansion ratio + + def __init__(self, inplanes, planes, stride=1, downsample=None): + super(Bottleneck, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) + self.bn1 = nn.BatchNorm2d(planes) + self.conv2 = nn.Conv2d( + planes, planes, kernel_size=3, stride=stride, padding=1, bias=False + ) + self.bn2 = nn.BatchNorm2d(planes) + self.conv3 = nn.Conv2d( + planes, planes * self.expansion, kernel_size=1, bias=False + ) + self.bn3 = nn.BatchNorm2d(planes * self.expansion) + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class SEBlock(nn.Module): + """The squeeze-and-excitation block (SEBlock) used in the IRBlock. + + Args: + channel (int): Channel number of inputs. + reduction (int): Channel reduction ration. Default: 16. + """ + + def __init__(self, channel, reduction=16): + super(SEBlock, self).__init__() + self.avg_pool = nn.AdaptiveAvgPool2d( + 1 + ) # pool to 1x1 without spatial information + self.fc = nn.Sequential( + nn.Linear(channel, channel // reduction), + nn.PReLU(), + nn.Linear(channel // reduction, channel), + nn.Sigmoid(), + ) + + def forward(self, x): + b, c, _, _ = x.size() + y = self.avg_pool(x).view(b, c) + y = self.fc(y).view(b, c, 1, 1) + return x * y + + +class ResNetArcFace(nn.Module): + """ArcFace with ResNet architectures. + + Ref: ArcFace: Additive Angular Margin Loss for Deep Face Recognition. + + Args: + block (str): Block used in the ArcFace architecture. + layers (tuple(int)): Block numbers in each layer. + use_se (bool): Whether use the SEBlock (squeeze and excitation block). Default: True. + """ + + def __init__( + self, + block="IRBlock", + layers=[2, 2, 2, 2], + use_se=False, + pretrain_model="./pretrained_models/arcface_resnet18.pth", + ): + if block == "IRBlock": + block = IRBlock + self.inplanes = 64 + self.use_se = use_se + super(ResNetArcFace, self).__init__() + + self.conv1 = nn.Conv2d(1, 64, kernel_size=3, padding=1, bias=False) + self.bn1 = nn.BatchNorm2d(64) + self.prelu = nn.PReLU() + self.maxpool = nn.MaxPool2d(kernel_size=2, stride=2) + self.layer1 = self._make_layer(block, 64, layers[0]) + self.layer2 = self._make_layer(block, 128, layers[1], stride=2) + self.layer3 = self._make_layer(block, 256, layers[2], stride=2) + self.layer4 = self._make_layer(block, 512, layers[3], stride=2) + self.bn4 = nn.BatchNorm2d(512) + self.dropout = nn.Dropout() + self.fc5 = nn.Linear(512 * 8 * 8, 512) + self.bn5 = nn.BatchNorm1d(512) + + # initialization + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.xavier_normal_(m.weight) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.Linear): + nn.init.xavier_normal_(m.weight) + nn.init.constant_(m.bias, 0) + + if pretrain_model is not None: + self.load_network(self, pretrain_model, strict=True, param_key=None) + else: + raise ValueError("Please specify the pretrain model path.") + + self.freeze() + + @staticmethod + def load_network(net, load_path, strict=True, param_key=None): + + def get_bare_model(net): + if isinstance(net, (DataParallel, DistributedDataParallel)): + net = net.module + return net + + net = get_bare_model(net) + load_net = torch.load(load_path, map_location=lambda storage, loc: storage) + if param_key is not None: + if param_key not in load_net and "params" in load_net: + param_key = "params" + load_net = load_net[param_key] + # remove unnecessary 'module.' + for k, v in deepcopy(load_net).items(): + if k.startswith("module."): + load_net[k[7:]] = v + load_net.pop(k) + ret = net.load_state_dict(load_net, strict=strict) + print(ret) + + def _make_layer(self, block, planes, num_blocks, stride=1): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2d( + self.inplanes, + planes * block.expansion, + kernel_size=1, + stride=stride, + bias=False, + ), + nn.BatchNorm2d(planes * block.expansion), + ) + layers = [] + layers.append( + block(self.inplanes, planes, stride, downsample, use_se=self.use_se) + ) + self.inplanes = planes + for _ in range(1, num_blocks): + layers.append(block(self.inplanes, planes, use_se=self.use_se)) + + return nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = self.prelu(x) + x = self.maxpool(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + x = self.bn4(x) + x = self.dropout(x) + x = x.view(x.size(0), -1) + x = self.fc5(x) + x = self.bn5(x) + + return x + + def freeze(self): + self.eval() + for param in self.parameters(): + param.requires_grad = False + + +if __name__ == "__main__": + model = ResNetArcFace() + model.cuda() + model.eval() + # model.eval() + + set1 = [ + "./debug/face_debug/gt/head_gt_0.png", + "./debug/face_debug/gt/head_gt_1.png", + "./debug/face_debug/gt/head_gt_2.png", + "./debug/face_debug/gt/head_gt_3.png", + "./debug/face_debug/gt/head_gt_4.png", + "./debug/face_debug/gt/head_gt_5.png", + "./debug/face_debug/gt/head_gt_6.png", + ] + import cv2 + + img_set1 = [cv2.imread(img_path, cv2.IMREAD_GRAYSCALE) for img_path in set1] + + F1_list = [] + + f1_scores = [] + for img in img_set1: + img = torch.from_numpy(img).unsqueeze(0).unsqueeze(0) / 255.0 + img = img.cuda() + F1 = model(img) + F1_list.append(F1) + for i in range(len(F1_list)): + for j in range(len(F1_list)): + f1_scores.append(F.l1_loss(F1_list[i], F1_list[j])) + + print(len(f1_scores)) + + f1_scores = torch.tensor(f1_scores) + print(f1_scores) + f1_scores = f1_scores.view(len(F1_list), len(F1_list)) + print(f1_scores) diff --git a/LHM/models/block.py b/LHM/models/block.py new file mode 100644 index 0000000000000000000000000000000000000000..efaf23232362829fac07b2bb30daeca8176f9f9e --- /dev/null +++ b/LHM/models/block.py @@ -0,0 +1,124 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import torch.nn as nn + +from .modulate import ModLN + + +class BasicBlock(nn.Module): + """ + Transformer block that is in its simplest form. + Designed for PF-LRM architecture. + """ + # Block contains a self-attention layer and an MLP + def __init__(self, inner_dim: int, num_heads: int, eps: float, + attn_drop: float = 0., attn_bias: bool = False, + mlp_ratio: float = 4., mlp_drop: float = 0.): + super().__init__() + self.norm1 = nn.LayerNorm(inner_dim, eps=eps) + self.self_attn = nn.MultiheadAttention( + embed_dim=inner_dim, num_heads=num_heads, + dropout=attn_drop, bias=attn_bias, batch_first=True) + self.norm2 = nn.LayerNorm(inner_dim, eps=eps) + self.mlp = nn.Sequential( + nn.Linear(inner_dim, int(inner_dim * mlp_ratio)), + nn.GELU(), + nn.Dropout(mlp_drop), + nn.Linear(int(inner_dim * mlp_ratio), inner_dim), + nn.Dropout(mlp_drop), + ) + + def forward(self, x): + # x: [N, L, D] + before_sa = self.norm1(x) + x = x + self.self_attn(before_sa, before_sa, before_sa, need_weights=False)[0] + x = x + self.mlp(self.norm2(x)) + return x + + +class ConditionBlock(nn.Module): + """ + Transformer block that takes in a cross-attention condition. + Designed for SparseLRM architecture. + """ + # Block contains a cross-attention layer, a self-attention layer, and an MLP + def __init__(self, inner_dim: int, cond_dim: int, num_heads: int, eps: float, + attn_drop: float = 0., attn_bias: bool = False, + mlp_ratio: float = 4., mlp_drop: float = 0.): + super().__init__() + self.norm1 = nn.LayerNorm(inner_dim, eps=eps) + self.cross_attn = nn.MultiheadAttention( + embed_dim=inner_dim, num_heads=num_heads, kdim=cond_dim, vdim=cond_dim, + dropout=attn_drop, bias=attn_bias, batch_first=True) + self.norm2 = nn.LayerNorm(inner_dim, eps=eps) + self.self_attn = nn.MultiheadAttention( + embed_dim=inner_dim, num_heads=num_heads, + dropout=attn_drop, bias=attn_bias, batch_first=True) + self.norm3 = nn.LayerNorm(inner_dim, eps=eps) + self.mlp = nn.Sequential( + nn.Linear(inner_dim, int(inner_dim * mlp_ratio)), + nn.GELU(), + nn.Dropout(mlp_drop), + nn.Linear(int(inner_dim * mlp_ratio), inner_dim), + nn.Dropout(mlp_drop), + ) + + def forward(self, x, cond): + # x: [N, L, D] + # cond: [N, L_cond, D_cond] + x = x + self.cross_attn(self.norm1(x), cond, cond, need_weights=False)[0] + before_sa = self.norm2(x) + x = x + self.self_attn(before_sa, before_sa, before_sa, need_weights=False)[0] + x = x + self.mlp(self.norm3(x)) + return x + + +class ConditionModulationBlock(nn.Module): + """ + Transformer block that takes in a cross-attention condition and another modulation vector applied to sub-blocks. + Designed for raw LRM architecture. + """ + # Block contains a cross-attention layer, a self-attention layer, and an MLP + def __init__(self, inner_dim: int, cond_dim: int, mod_dim: int, num_heads: int, eps: float, + attn_drop: float = 0., attn_bias: bool = False, + mlp_ratio: float = 4., mlp_drop: float = 0.): + super().__init__() + self.norm1 = ModLN(inner_dim, mod_dim, eps) + self.cross_attn = nn.MultiheadAttention( + embed_dim=inner_dim, num_heads=num_heads, kdim=cond_dim, vdim=cond_dim, + dropout=attn_drop, bias=attn_bias, batch_first=True) + self.norm2 = ModLN(inner_dim, mod_dim, eps) + self.self_attn = nn.MultiheadAttention( + embed_dim=inner_dim, num_heads=num_heads, + dropout=attn_drop, bias=attn_bias, batch_first=True) + self.norm3 = ModLN(inner_dim, mod_dim, eps) + self.mlp = nn.Sequential( + nn.Linear(inner_dim, int(inner_dim * mlp_ratio)), + nn.GELU(), + nn.Dropout(mlp_drop), + nn.Linear(int(inner_dim * mlp_ratio), inner_dim), + nn.Dropout(mlp_drop), + ) + + def forward(self, x, cond, mod): + # x: [N, L, D] + # cond: [N, L_cond, D_cond] + # mod: [N, D_mod] + x = x + self.cross_attn(self.norm1(x, mod), cond, cond, need_weights=False)[0] + before_sa = self.norm2(x, mod) + x = x + self.self_attn(before_sa, before_sa, before_sa, need_weights=False)[0] + x = x + self.mlp(self.norm3(x, mod)) + return x diff --git a/LHM/models/discriminator.py b/LHM/models/discriminator.py new file mode 100644 index 0000000000000000000000000000000000000000..31412138e21ae6fd689ab494a1036abf88d71662 --- /dev/null +++ b/LHM/models/discriminator.py @@ -0,0 +1,120 @@ +""" +Ported from Paella +""" + +import torch +from torch import nn + +from diffusers.configuration_utils import ConfigMixin, register_to_config +from diffusers.models.modeling_utils import ModelMixin + +import functools +# import torch.nn as nn +from taming.modules.util import ActNorm + + +# Discriminator model ported from Paella https://github.com/dome272/Paella/blob/main/src_distributed/vqgan.py +class Discriminator(ModelMixin, ConfigMixin): + @register_to_config + def __init__(self, in_channels=3, cond_channels=0, hidden_channels=512, depth=6): + super().__init__() + d = max(depth - 3, 3) + layers = [ + nn.utils.spectral_norm( + nn.Conv2d(in_channels, hidden_channels // (2**d), kernel_size=3, stride=2, padding=1) + ), + nn.LeakyReLU(0.2), + ] + for i in range(depth - 1): + c_in = hidden_channels // (2 ** max((d - i), 0)) + c_out = hidden_channels // (2 ** max((d - 1 - i), 0)) + layers.append(nn.utils.spectral_norm(nn.Conv2d(c_in, c_out, kernel_size=3, stride=2, padding=1))) + layers.append(nn.InstanceNorm2d(c_out)) + layers.append(nn.LeakyReLU(0.2)) + self.encoder = nn.Sequential(*layers) + self.shuffle = nn.Conv2d( + (hidden_channels + cond_channels) if cond_channels > 0 else hidden_channels, 1, kernel_size=1 + ) + # self.logits = nn.Sigmoid() + + + def forward(self, x, cond=None): + x = self.encoder(x) + if cond is not None: + cond = cond.view( + cond.size(0), + cond.size(1), + 1, + 1, + ).expand(-1, -1, x.size(-2), x.size(-1)) + x = torch.cat([x, cond], dim=1) + x = self.shuffle(x) + # x = self.logits(x) + return x + + + + +def weights_init(m): + classname = m.__class__.__name__ + if classname.find('Conv') != -1: + nn.init.normal_(m.weight.data, 0.0, 0.02) + elif classname.find('BatchNorm') != -1: + nn.init.normal_(m.weight.data, 1.0, 0.02) + nn.init.constant_(m.bias.data, 0) + + +class NLayerDiscriminator(nn.Module): + """Defines a PatchGAN discriminator as in Pix2Pix + --> see https://github.com/junyanz/pytorch-CycleGAN-and-pix2pix/blob/master/models/networks.py + """ + def __init__(self, input_nc=3, ndf=64, n_layers=3, use_actnorm=False): + """Construct a PatchGAN discriminator + Parameters: + input_nc (int) -- the number of channels in input images + ndf (int) -- the number of filters in the last conv layer + n_layers (int) -- the number of conv layers in the discriminator + norm_layer -- normalization layer + """ + super(NLayerDiscriminator, self).__init__() + if not use_actnorm: + # norm_layer = nn.BatchNorm2d + norm_layer = nn.InstanceNorm2d + else: + norm_layer = ActNorm + if type(norm_layer) == functools.partial: # no need to use bias as BatchNorm2d has affine parameters + # use_bias = norm_layer.func != nn.BatchNorm2d + use_bias = norm_layer.func != nn.InstanceNorm2d + else: + # use_bias = norm_layer != nn.BatchNorm2d + use_bias = norm_layer != nn.InstanceNorm2d + + kw = 4 + padw = 1 + sequence = [nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw), nn.LeakyReLU(0.2, False)] + nf_mult = 1 + nf_mult_prev = 1 + for n in range(1, n_layers): # gradually increase the number of filters + nf_mult_prev = nf_mult + nf_mult = min(2 ** n, 8) + sequence += [ + nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, kernel_size=kw, stride=2, padding=padw, bias=use_bias), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, False) + ] + + nf_mult_prev = nf_mult + nf_mult = min(2 ** n_layers, 8) + sequence += [ + nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, kernel_size=kw, stride=1, padding=padw, bias=use_bias), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, False) + ] + + sequence += [ + nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw, stride=1, padding=padw)] # output 1 channel prediction map + self.main = nn.Sequential(*sequence) + + def forward(self, input): + """Standard forward.""" + return self.main(input) diff --git a/LHM/models/embedder.py b/LHM/models/embedder.py new file mode 100644 index 0000000000000000000000000000000000000000..379721cf5c146cb29aca7695cff8558b0d23673c --- /dev/null +++ b/LHM/models/embedder.py @@ -0,0 +1,37 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import torch +import torch.nn as nn + + +class CameraEmbedder(nn.Module): + """ + Embed camera features to a high-dimensional vector. + + Reference: + DiT: https://github.com/facebookresearch/DiT/blob/main/models.py#L27 + """ + def __init__(self, raw_dim: int, embed_dim: int): + super().__init__() + self.mlp = nn.Sequential( + nn.Linear(raw_dim, embed_dim), + nn.SiLU(), + nn.Linear(embed_dim, embed_dim), + ) + + @torch.compile + def forward(self, x): + return self.mlp(x) diff --git a/LHM/models/encoders/__init__.py b/LHM/models/encoders/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7a1e39e624fbf5d970acc4b05714f8b9f70830c6 --- /dev/null +++ b/LHM/models/encoders/__init__.py @@ -0,0 +1,15 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Empty diff --git a/LHM/models/encoders/__pycache__/__init__.cpython-310.pyc b/LHM/models/encoders/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9b09815cd5aefd7fadc8b81ddcd057e1b4178e9b Binary files /dev/null and b/LHM/models/encoders/__pycache__/__init__.cpython-310.pyc differ diff --git a/LHM/models/encoders/__pycache__/dinov2_fusion_wrapper.cpython-310.pyc b/LHM/models/encoders/__pycache__/dinov2_fusion_wrapper.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3b3c9887e1e5f5935deb40461b3ee3640d55a729 Binary files /dev/null and b/LHM/models/encoders/__pycache__/dinov2_fusion_wrapper.cpython-310.pyc differ diff --git a/LHM/models/encoders/__pycache__/sapiens_warpper.cpython-310.pyc b/LHM/models/encoders/__pycache__/sapiens_warpper.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8258d4134d92e1e7509151887538eb201f95ba8a Binary files /dev/null and b/LHM/models/encoders/__pycache__/sapiens_warpper.cpython-310.pyc differ diff --git a/LHM/models/encoders/dino_wrapper.py b/LHM/models/encoders/dino_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..cb82225eb23c3c9b362a4f962c40addd18fbe5fc --- /dev/null +++ b/LHM/models/encoders/dino_wrapper.py @@ -0,0 +1,68 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import torch +import torch.nn as nn +from transformers import ViTImageProcessor, ViTModel +from accelerate.logging import get_logger + + +logger = get_logger(__name__) + + +class DinoWrapper(nn.Module): + """ + Dino v1 wrapper using huggingface transformer implementation. + """ + def __init__(self, model_name: str, freeze: bool = True, encoder_feat_dim: int = 384): + super().__init__() + self.model, self.processor = self._build_dino(model_name) + if freeze: + self._freeze() + + @torch.compile + def forward_model(self, inputs): + return self.model(**inputs, interpolate_pos_encoding=True) + + def forward(self, image): + # image: [N, C, H, W], on cpu + # RGB image with [0,1] scale and properly sized + inputs = self.processor(images=image, return_tensors="pt", do_rescale=False, do_resize=False).to(self.model.device) + # This resampling of positional embedding uses bicubic interpolation + outputs = self.forward_model(inputs) + last_hidden_states = outputs.last_hidden_state + return last_hidden_states + + def _freeze(self): + logger.warning(f"======== Freezing DinoWrapper ========") + self.model.eval() + for name, param in self.model.named_parameters(): + param.requires_grad = False + + @staticmethod + def _build_dino(model_name: str, proxy_error_retries: int = 3, proxy_error_cooldown: int = 5): + import requests + try: + model = ViTModel.from_pretrained(model_name, add_pooling_layer=False) + processor = ViTImageProcessor.from_pretrained(model_name) + return model, processor + except requests.exceptions.ProxyError as err: + if proxy_error_retries > 0: + print(f"Huggingface ProxyError: Retrying ({proxy_error_retries}) in {proxy_error_cooldown} seconds...") + import time + time.sleep(proxy_error_cooldown) + return DinoWrapper._build_dino(model_name, proxy_error_retries - 1, proxy_error_cooldown) + else: + raise err diff --git a/LHM/models/encoders/dinov2/__init__.py b/LHM/models/encoders/dinov2/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7a1e39e624fbf5d970acc4b05714f8b9f70830c6 --- /dev/null +++ b/LHM/models/encoders/dinov2/__init__.py @@ -0,0 +1,15 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Empty diff --git a/LHM/models/encoders/dinov2/__pycache__/__init__.cpython-310.pyc b/LHM/models/encoders/dinov2/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..702e9badf855a6652022ca9d14100aec007da065 Binary files /dev/null and b/LHM/models/encoders/dinov2/__pycache__/__init__.cpython-310.pyc differ diff --git a/LHM/models/encoders/dinov2/hub/__init__.py b/LHM/models/encoders/dinov2/hub/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b88da6bf80be92af00b72dfdb0a806fa64a7a2d9 --- /dev/null +++ b/LHM/models/encoders/dinov2/hub/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. diff --git a/LHM/models/encoders/dinov2/hub/__pycache__/__init__.cpython-310.pyc b/LHM/models/encoders/dinov2/hub/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c5a95a43b82ceef1ecc6a268b490bca1c2c693a9 Binary files /dev/null and b/LHM/models/encoders/dinov2/hub/__pycache__/__init__.cpython-310.pyc differ diff --git a/LHM/models/encoders/dinov2/hub/__pycache__/backbones.cpython-310.pyc b/LHM/models/encoders/dinov2/hub/__pycache__/backbones.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..006821ad89164ec3ff5dd5b77bef8e81be528b50 Binary files /dev/null and b/LHM/models/encoders/dinov2/hub/__pycache__/backbones.cpython-310.pyc differ diff --git a/LHM/models/encoders/dinov2/hub/__pycache__/utils.cpython-310.pyc b/LHM/models/encoders/dinov2/hub/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3bcd18f1b1d2b92fc9f520f4ab8ad175e8cfb068 Binary files /dev/null and b/LHM/models/encoders/dinov2/hub/__pycache__/utils.cpython-310.pyc differ diff --git a/LHM/models/encoders/dinov2/hub/backbones.py b/LHM/models/encoders/dinov2/hub/backbones.py new file mode 100644 index 0000000000000000000000000000000000000000..2fd8c4010204da1f1e413db66d24a87e2a39a358 --- /dev/null +++ b/LHM/models/encoders/dinov2/hub/backbones.py @@ -0,0 +1,166 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from enum import Enum +from typing import Union + +import torch + +from .utils import _DINOV2_BASE_URL, _make_dinov2_model_name + + +class Weights(Enum): + LVD142M = "LVD142M" + + +def _make_dinov2_model( + *, + arch_name: str = "vit_large", + img_size: int = 518, + patch_size: int = 14, + init_values: float = 1.0, + ffn_layer: str = "mlp", + block_chunks: int = 0, + num_register_tokens: int = 0, + interpolate_antialias: bool = False, + interpolate_offset: float = 0.1, + pretrained: bool = True, + weights: Union[Weights, str] = Weights.LVD142M, + **kwargs, +): + from ..models import vision_transformer as vits + + if isinstance(weights, str): + try: + weights = Weights[weights] + except KeyError: + raise AssertionError(f"Unsupported weights: {weights}") + + model_base_name = _make_dinov2_model_name(arch_name, patch_size) + vit_kwargs = dict( + img_size=img_size, + patch_size=patch_size, + init_values=init_values, + ffn_layer=ffn_layer, + block_chunks=block_chunks, + num_register_tokens=num_register_tokens, + interpolate_antialias=interpolate_antialias, + interpolate_offset=interpolate_offset, + ) + vit_kwargs.update(**kwargs) + model = vits.__dict__[arch_name](**vit_kwargs) + + if pretrained: + model_full_name = _make_dinov2_model_name(arch_name, patch_size, num_register_tokens) + url = _DINOV2_BASE_URL + f"/{model_base_name}/{model_full_name}_pretrain.pth" + state_dict = torch.hub.load_state_dict_from_url(url, map_location="cpu") + # ********** Modified by Zexin He in 2023-2024 ********** + state_dict = {k: v for k, v in state_dict.items() if 'mask_token' not in k} # DDP concern + if vit_kwargs.get("modulation_dim") is not None: + state_dict = { + k.replace('norm1', 'norm1.norm').replace('norm2', 'norm2.norm'): v + for k, v in state_dict.items() + } + model.load_state_dict(state_dict, strict=False) + else: + model.load_state_dict(state_dict, strict=True) + # ******************************************************** + + return model + + +def dinov2_vits14(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.LVD142M, **kwargs): + """ + DINOv2 ViT-S/14 model (optionally) pretrained on the LVD-142M dataset. + """ + return _make_dinov2_model(arch_name="vit_small", pretrained=pretrained, weights=weights, **kwargs) + + +def dinov2_vitb14(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.LVD142M, **kwargs): + """ + DINOv2 ViT-B/14 model (optionally) pretrained on the LVD-142M dataset. + """ + return _make_dinov2_model(arch_name="vit_base", pretrained=pretrained, weights=weights, **kwargs) + + +def dinov2_vitl14(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.LVD142M, **kwargs): + """ + DINOv2 ViT-L/14 model (optionally) pretrained on the LVD-142M dataset. + """ + return _make_dinov2_model(arch_name="vit_large", pretrained=pretrained, weights=weights, **kwargs) + + +def dinov2_vitg14(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.LVD142M, **kwargs): + """ + DINOv2 ViT-g/14 model (optionally) pretrained on the LVD-142M dataset. + """ + return _make_dinov2_model( + arch_name="vit_giant2", + ffn_layer="swiglufused", + weights=weights, + pretrained=pretrained, + **kwargs, + ) + + +def dinov2_vits14_reg(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.LVD142M, **kwargs): + """ + DINOv2 ViT-S/14 model with registers (optionally) pretrained on the LVD-142M dataset. + """ + return _make_dinov2_model( + arch_name="vit_small", + pretrained=pretrained, + weights=weights, + num_register_tokens=4, + interpolate_antialias=True, + interpolate_offset=0.0, + **kwargs, + ) + + +def dinov2_vitb14_reg(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.LVD142M, **kwargs): + """ + DINOv2 ViT-B/14 model with registers (optionally) pretrained on the LVD-142M dataset. + """ + return _make_dinov2_model( + arch_name="vit_base", + pretrained=pretrained, + weights=weights, + num_register_tokens=4, + interpolate_antialias=True, + interpolate_offset=0.0, + **kwargs, + ) + + +def dinov2_vitl14_reg(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.LVD142M, **kwargs): + """ + DINOv2 ViT-L/14 model with registers (optionally) pretrained on the LVD-142M dataset. + """ + return _make_dinov2_model( + arch_name="vit_large", + pretrained=pretrained, + weights=weights, + num_register_tokens=4, + interpolate_antialias=True, + interpolate_offset=0.0, + **kwargs, + ) + + +def dinov2_vitg14_reg(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.LVD142M, **kwargs): + """ + DINOv2 ViT-g/14 model with registers (optionally) pretrained on the LVD-142M dataset. + """ + return _make_dinov2_model( + arch_name="vit_giant2", + ffn_layer="swiglufused", + weights=weights, + pretrained=pretrained, + num_register_tokens=4, + interpolate_antialias=True, + interpolate_offset=0.0, + **kwargs, + ) diff --git a/LHM/models/encoders/dinov2/hub/classifiers.py b/LHM/models/encoders/dinov2/hub/classifiers.py new file mode 100644 index 0000000000000000000000000000000000000000..3f0841efa80ab3d564cd320d61da254af182606b --- /dev/null +++ b/LHM/models/encoders/dinov2/hub/classifiers.py @@ -0,0 +1,268 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from enum import Enum +from typing import Union + +import torch +import torch.nn as nn + +from .backbones import _make_dinov2_model +from .utils import _DINOV2_BASE_URL, _make_dinov2_model_name + + +class Weights(Enum): + IMAGENET1K = "IMAGENET1K" + + +def _make_dinov2_linear_classification_head( + *, + arch_name: str = "vit_large", + patch_size: int = 14, + embed_dim: int = 1024, + layers: int = 4, + pretrained: bool = True, + weights: Union[Weights, str] = Weights.IMAGENET1K, + num_register_tokens: int = 0, + **kwargs, +): + if layers not in (1, 4): + raise AssertionError(f"Unsupported number of layers: {layers}") + if isinstance(weights, str): + try: + weights = Weights[weights] + except KeyError: + raise AssertionError(f"Unsupported weights: {weights}") + + linear_head = nn.Linear((1 + layers) * embed_dim, 1_000) + + if pretrained: + model_base_name = _make_dinov2_model_name(arch_name, patch_size) + model_full_name = _make_dinov2_model_name(arch_name, patch_size, num_register_tokens) + layers_str = str(layers) if layers == 4 else "" + url = _DINOV2_BASE_URL + f"/{model_base_name}/{model_full_name}_linear{layers_str}_head.pth" + state_dict = torch.hub.load_state_dict_from_url(url, map_location="cpu") + linear_head.load_state_dict(state_dict, strict=True) + + return linear_head + + +class _LinearClassifierWrapper(nn.Module): + def __init__(self, *, backbone: nn.Module, linear_head: nn.Module, layers: int = 4): + super().__init__() + self.backbone = backbone + self.linear_head = linear_head + self.layers = layers + + def forward(self, x): + if self.layers == 1: + x = self.backbone.forward_features(x) + cls_token = x["x_norm_clstoken"] + patch_tokens = x["x_norm_patchtokens"] + # fmt: off + linear_input = torch.cat([ + cls_token, + patch_tokens.mean(dim=1), + ], dim=1) + # fmt: on + elif self.layers == 4: + x = self.backbone.get_intermediate_layers(x, n=4, return_class_token=True) + # fmt: off + linear_input = torch.cat([ + x[0][1], + x[1][1], + x[2][1], + x[3][1], + x[3][0].mean(dim=1), + ], dim=1) + # fmt: on + else: + assert False, f"Unsupported number of layers: {self.layers}" + return self.linear_head(linear_input) + + +def _make_dinov2_linear_classifier( + *, + arch_name: str = "vit_large", + layers: int = 4, + pretrained: bool = True, + weights: Union[Weights, str] = Weights.IMAGENET1K, + num_register_tokens: int = 0, + interpolate_antialias: bool = False, + interpolate_offset: float = 0.1, + **kwargs, +): + backbone = _make_dinov2_model( + arch_name=arch_name, + pretrained=pretrained, + num_register_tokens=num_register_tokens, + interpolate_antialias=interpolate_antialias, + interpolate_offset=interpolate_offset, + **kwargs, + ) + + embed_dim = backbone.embed_dim + patch_size = backbone.patch_size + linear_head = _make_dinov2_linear_classification_head( + arch_name=arch_name, + patch_size=patch_size, + embed_dim=embed_dim, + layers=layers, + pretrained=pretrained, + weights=weights, + num_register_tokens=num_register_tokens, + ) + + return _LinearClassifierWrapper(backbone=backbone, linear_head=linear_head, layers=layers) + + +def dinov2_vits14_lc( + *, + layers: int = 4, + pretrained: bool = True, + weights: Union[Weights, str] = Weights.IMAGENET1K, + **kwargs, +): + """ + Linear classifier (1 or 4 layers) on top of a DINOv2 ViT-S/14 backbone (optionally) pretrained on the LVD-142M dataset and trained on ImageNet-1k. + """ + return _make_dinov2_linear_classifier( + arch_name="vit_small", + layers=layers, + pretrained=pretrained, + weights=weights, + **kwargs, + ) + + +def dinov2_vitb14_lc( + *, + layers: int = 4, + pretrained: bool = True, + weights: Union[Weights, str] = Weights.IMAGENET1K, + **kwargs, +): + """ + Linear classifier (1 or 4 layers) on top of a DINOv2 ViT-B/14 backbone (optionally) pretrained on the LVD-142M dataset and trained on ImageNet-1k. + """ + return _make_dinov2_linear_classifier( + arch_name="vit_base", + layers=layers, + pretrained=pretrained, + weights=weights, + **kwargs, + ) + + +def dinov2_vitl14_lc( + *, + layers: int = 4, + pretrained: bool = True, + weights: Union[Weights, str] = Weights.IMAGENET1K, + **kwargs, +): + """ + Linear classifier (1 or 4 layers) on top of a DINOv2 ViT-L/14 backbone (optionally) pretrained on the LVD-142M dataset and trained on ImageNet-1k. + """ + return _make_dinov2_linear_classifier( + arch_name="vit_large", + layers=layers, + pretrained=pretrained, + weights=weights, + **kwargs, + ) + + +def dinov2_vitg14_lc( + *, + layers: int = 4, + pretrained: bool = True, + weights: Union[Weights, str] = Weights.IMAGENET1K, + **kwargs, +): + """ + Linear classifier (1 or 4 layers) on top of a DINOv2 ViT-g/14 backbone (optionally) pretrained on the LVD-142M dataset and trained on ImageNet-1k. + """ + return _make_dinov2_linear_classifier( + arch_name="vit_giant2", + layers=layers, + ffn_layer="swiglufused", + pretrained=pretrained, + weights=weights, + **kwargs, + ) + + +def dinov2_vits14_reg_lc( + *, layers: int = 4, pretrained: bool = True, weights: Union[Weights, str] = Weights.IMAGENET1K, **kwargs +): + """ + Linear classifier (1 or 4 layers) on top of a DINOv2 ViT-S/14 backbone with registers (optionally) pretrained on the LVD-142M dataset and trained on ImageNet-1k. + """ + return _make_dinov2_linear_classifier( + arch_name="vit_small", + layers=layers, + pretrained=pretrained, + weights=weights, + num_register_tokens=4, + interpolate_antialias=True, + interpolate_offset=0.0, + **kwargs, + ) + + +def dinov2_vitb14_reg_lc( + *, layers: int = 4, pretrained: bool = True, weights: Union[Weights, str] = Weights.IMAGENET1K, **kwargs +): + """ + Linear classifier (1 or 4 layers) on top of a DINOv2 ViT-B/14 backbone with registers (optionally) pretrained on the LVD-142M dataset and trained on ImageNet-1k. + """ + return _make_dinov2_linear_classifier( + arch_name="vit_base", + layers=layers, + pretrained=pretrained, + weights=weights, + num_register_tokens=4, + interpolate_antialias=True, + interpolate_offset=0.0, + **kwargs, + ) + + +def dinov2_vitl14_reg_lc( + *, layers: int = 4, pretrained: bool = True, weights: Union[Weights, str] = Weights.IMAGENET1K, **kwargs +): + """ + Linear classifier (1 or 4 layers) on top of a DINOv2 ViT-L/14 backbone with registers (optionally) pretrained on the LVD-142M dataset and trained on ImageNet-1k. + """ + return _make_dinov2_linear_classifier( + arch_name="vit_large", + layers=layers, + pretrained=pretrained, + weights=weights, + num_register_tokens=4, + interpolate_antialias=True, + interpolate_offset=0.0, + **kwargs, + ) + + +def dinov2_vitg14_reg_lc( + *, layers: int = 4, pretrained: bool = True, weights: Union[Weights, str] = Weights.IMAGENET1K, **kwargs +): + """ + Linear classifier (1 or 4 layers) on top of a DINOv2 ViT-g/14 backbone with registers (optionally) pretrained on the LVD-142M dataset and trained on ImageNet-1k. + """ + return _make_dinov2_linear_classifier( + arch_name="vit_giant2", + layers=layers, + ffn_layer="swiglufused", + pretrained=pretrained, + weights=weights, + num_register_tokens=4, + interpolate_antialias=True, + interpolate_offset=0.0, + **kwargs, + ) diff --git a/LHM/models/encoders/dinov2/hub/depth/__init__.py b/LHM/models/encoders/dinov2/hub/depth/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..91716e58ab6158d814df8c653644d9af4c7be65c --- /dev/null +++ b/LHM/models/encoders/dinov2/hub/depth/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from .decode_heads import BNHead, DPTHead +from .encoder_decoder import DepthEncoderDecoder diff --git a/LHM/models/encoders/dinov2/hub/depth/decode_heads.py b/LHM/models/encoders/dinov2/hub/depth/decode_heads.py new file mode 100644 index 0000000000000000000000000000000000000000..f455accad38fec6ecdd53460233a564c34f434da --- /dev/null +++ b/LHM/models/encoders/dinov2/hub/depth/decode_heads.py @@ -0,0 +1,747 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import copy +from functools import partial +import math +import warnings + +import torch +import torch.nn as nn + +from .ops import resize + + +# XXX: (Untested) replacement for mmcv.imdenormalize() +def _imdenormalize(img, mean, std, to_bgr=True): + import numpy as np + + mean = mean.reshape(1, -1).astype(np.float64) + std = std.reshape(1, -1).astype(np.float64) + img = (img * std) + mean + if to_bgr: + img = img[::-1] + return img + + +class DepthBaseDecodeHead(nn.Module): + """Base class for BaseDecodeHead. + + Args: + in_channels (List): Input channels. + channels (int): Channels after modules, before conv_depth. + conv_layer (nn.Module): Conv layers. Default: None. + act_layer (nn.Module): Activation layers. Default: nn.ReLU. + loss_decode (dict): Config of decode loss. + Default: (). + sampler (dict|None): The config of depth map sampler. + Default: None. + align_corners (bool): align_corners argument of F.interpolate. + Default: False. + min_depth (int): Min depth in dataset setting. + Default: 1e-3. + max_depth (int): Max depth in dataset setting. + Default: None. + norm_layer (dict|None): Norm layers. + Default: None. + classify (bool): Whether predict depth in a cls.-reg. manner. + Default: False. + n_bins (int): The number of bins used in cls. step. + Default: 256. + bins_strategy (str): The discrete strategy used in cls. step. + Default: 'UD'. + norm_strategy (str): The norm strategy on cls. probability + distribution. Default: 'linear' + scale_up (str): Whether predict depth in a scale-up manner. + Default: False. + """ + + def __init__( + self, + in_channels, + conv_layer=None, + act_layer=nn.ReLU, + channels=96, + loss_decode=(), + sampler=None, + align_corners=False, + min_depth=1e-3, + max_depth=None, + norm_layer=None, + classify=False, + n_bins=256, + bins_strategy="UD", + norm_strategy="linear", + scale_up=False, + ): + super(DepthBaseDecodeHead, self).__init__() + + self.in_channels = in_channels + self.channels = channels + self.conf_layer = conv_layer + self.act_layer = act_layer + self.loss_decode = loss_decode + self.align_corners = align_corners + self.min_depth = min_depth + self.max_depth = max_depth + self.norm_layer = norm_layer + self.classify = classify + self.n_bins = n_bins + self.scale_up = scale_up + + if self.classify: + assert bins_strategy in ["UD", "SID"], "Support bins_strategy: UD, SID" + assert norm_strategy in ["linear", "softmax", "sigmoid"], "Support norm_strategy: linear, softmax, sigmoid" + + self.bins_strategy = bins_strategy + self.norm_strategy = norm_strategy + self.softmax = nn.Softmax(dim=1) + self.conv_depth = nn.Conv2d(channels, n_bins, kernel_size=3, padding=1, stride=1) + else: + self.conv_depth = nn.Conv2d(channels, 1, kernel_size=3, padding=1, stride=1) + + self.relu = nn.ReLU() + self.sigmoid = nn.Sigmoid() + + def forward(self, inputs, img_metas): + """Placeholder of forward function.""" + pass + + def forward_train(self, img, inputs, img_metas, depth_gt): + """Forward function for training. + Args: + inputs (list[Tensor]): List of multi-level img features. + img_metas (list[dict]): List of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `depth/datasets/pipelines/formatting.py:Collect`. + depth_gt (Tensor): GT depth + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + depth_pred = self.forward(inputs, img_metas) + losses = self.losses(depth_pred, depth_gt) + + log_imgs = self.log_images(img[0], depth_pred[0], depth_gt[0], img_metas[0]) + losses.update(**log_imgs) + + return losses + + def forward_test(self, inputs, img_metas): + """Forward function for testing. + Args: + inputs (list[Tensor]): List of multi-level img features. + img_metas (list[dict]): List of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `depth/datasets/pipelines/formatting.py:Collect`. + + Returns: + Tensor: Output depth map. + """ + return self.forward(inputs, img_metas) + + def depth_pred(self, feat): + """Prediction each pixel.""" + if self.classify: + logit = self.conv_depth(feat) + + if self.bins_strategy == "UD": + bins = torch.linspace(self.min_depth, self.max_depth, self.n_bins, device=feat.device) + elif self.bins_strategy == "SID": + bins = torch.logspace(self.min_depth, self.max_depth, self.n_bins, device=feat.device) + + # following Adabins, default linear + if self.norm_strategy == "linear": + logit = torch.relu(logit) + eps = 0.1 + logit = logit + eps + logit = logit / logit.sum(dim=1, keepdim=True) + elif self.norm_strategy == "softmax": + logit = torch.softmax(logit, dim=1) + elif self.norm_strategy == "sigmoid": + logit = torch.sigmoid(logit) + logit = logit / logit.sum(dim=1, keepdim=True) + + output = torch.einsum("ikmn,k->imn", [logit, bins]).unsqueeze(dim=1) + + else: + if self.scale_up: + output = self.sigmoid(self.conv_depth(feat)) * self.max_depth + else: + output = self.relu(self.conv_depth(feat)) + self.min_depth + return output + + def losses(self, depth_pred, depth_gt): + """Compute depth loss.""" + loss = dict() + depth_pred = resize( + input=depth_pred, size=depth_gt.shape[2:], mode="bilinear", align_corners=self.align_corners, warning=False + ) + if not isinstance(self.loss_decode, nn.ModuleList): + losses_decode = [self.loss_decode] + else: + losses_decode = self.loss_decode + for loss_decode in losses_decode: + if loss_decode.loss_name not in loss: + loss[loss_decode.loss_name] = loss_decode(depth_pred, depth_gt) + else: + loss[loss_decode.loss_name] += loss_decode(depth_pred, depth_gt) + return loss + + def log_images(self, img_path, depth_pred, depth_gt, img_meta): + import numpy as np + + show_img = copy.deepcopy(img_path.detach().cpu().permute(1, 2, 0)) + show_img = show_img.numpy().astype(np.float32) + show_img = _imdenormalize( + show_img, + img_meta["img_norm_cfg"]["mean"], + img_meta["img_norm_cfg"]["std"], + img_meta["img_norm_cfg"]["to_rgb"], + ) + show_img = np.clip(show_img, 0, 255) + show_img = show_img.astype(np.uint8) + show_img = show_img[:, :, ::-1] + show_img = show_img.transpose(0, 2, 1) + show_img = show_img.transpose(1, 0, 2) + + depth_pred = depth_pred / torch.max(depth_pred) + depth_gt = depth_gt / torch.max(depth_gt) + + depth_pred_color = copy.deepcopy(depth_pred.detach().cpu()) + depth_gt_color = copy.deepcopy(depth_gt.detach().cpu()) + + return {"img_rgb": show_img, "img_depth_pred": depth_pred_color, "img_depth_gt": depth_gt_color} + + +class BNHead(DepthBaseDecodeHead): + """Just a batchnorm.""" + + def __init__(self, input_transform="resize_concat", in_index=(0, 1, 2, 3), upsample=1, **kwargs): + super().__init__(**kwargs) + self.input_transform = input_transform + self.in_index = in_index + self.upsample = upsample + # self.bn = nn.SyncBatchNorm(self.in_channels) + if self.classify: + self.conv_depth = nn.Conv2d(self.channels, self.n_bins, kernel_size=1, padding=0, stride=1) + else: + self.conv_depth = nn.Conv2d(self.channels, 1, kernel_size=1, padding=0, stride=1) + + def _transform_inputs(self, inputs): + """Transform inputs for decoder. + Args: + inputs (list[Tensor]): List of multi-level img features. + Returns: + Tensor: The transformed inputs + """ + + if "concat" in self.input_transform: + inputs = [inputs[i] for i in self.in_index] + if "resize" in self.input_transform: + inputs = [ + resize( + input=x, + size=[s * self.upsample for s in inputs[0].shape[2:]], + mode="bilinear", + align_corners=self.align_corners, + ) + for x in inputs + ] + inputs = torch.cat(inputs, dim=1) + elif self.input_transform == "multiple_select": + inputs = [inputs[i] for i in self.in_index] + else: + inputs = inputs[self.in_index] + + return inputs + + def _forward_feature(self, inputs, img_metas=None, **kwargs): + """Forward function for feature maps before classifying each pixel with + ``self.cls_seg`` fc. + Args: + inputs (list[Tensor]): List of multi-level img features. + Returns: + feats (Tensor): A tensor of shape (batch_size, self.channels, + H, W) which is feature map for last layer of decoder head. + """ + # accept lists (for cls token) + inputs = list(inputs) + for i, x in enumerate(inputs): + if len(x) == 2: + x, cls_token = x[0], x[1] + if len(x.shape) == 2: + x = x[:, :, None, None] + cls_token = cls_token[:, :, None, None].expand_as(x) + inputs[i] = torch.cat((x, cls_token), 1) + else: + x = x[0] + if len(x.shape) == 2: + x = x[:, :, None, None] + inputs[i] = x + x = self._transform_inputs(inputs) + # feats = self.bn(x) + return x + + def forward(self, inputs, img_metas=None, **kwargs): + """Forward function.""" + output = self._forward_feature(inputs, img_metas=img_metas, **kwargs) + output = self.depth_pred(output) + return output + + +class ConvModule(nn.Module): + """A conv block that bundles conv/norm/activation layers. + + This block simplifies the usage of convolution layers, which are commonly + used with a norm layer (e.g., BatchNorm) and activation layer (e.g., ReLU). + It is based upon three build methods: `build_conv_layer()`, + `build_norm_layer()` and `build_activation_layer()`. + + Besides, we add some additional features in this module. + 1. Automatically set `bias` of the conv layer. + 2. Spectral norm is supported. + 3. More padding modes are supported. Before PyTorch 1.5, nn.Conv2d only + supports zero and circular padding, and we add "reflect" padding mode. + + Args: + in_channels (int): Number of channels in the input feature map. + Same as that in ``nn._ConvNd``. + out_channels (int): Number of channels produced by the convolution. + Same as that in ``nn._ConvNd``. + kernel_size (int | tuple[int]): Size of the convolving kernel. + Same as that in ``nn._ConvNd``. + stride (int | tuple[int]): Stride of the convolution. + Same as that in ``nn._ConvNd``. + padding (int | tuple[int]): Zero-padding added to both sides of + the input. Same as that in ``nn._ConvNd``. + dilation (int | tuple[int]): Spacing between kernel elements. + Same as that in ``nn._ConvNd``. + groups (int): Number of blocked connections from input channels to + output channels. Same as that in ``nn._ConvNd``. + bias (bool | str): If specified as `auto`, it will be decided by the + norm_layer. Bias will be set as True if `norm_layer` is None, otherwise + False. Default: "auto". + conv_layer (nn.Module): Convolution layer. Default: None, + which means using conv2d. + norm_layer (nn.Module): Normalization layer. Default: None. + act_layer (nn.Module): Activation layer. Default: nn.ReLU. + inplace (bool): Whether to use inplace mode for activation. + Default: True. + with_spectral_norm (bool): Whether use spectral norm in conv module. + Default: False. + padding_mode (str): If the `padding_mode` has not been supported by + current `Conv2d` in PyTorch, we will use our own padding layer + instead. Currently, we support ['zeros', 'circular'] with official + implementation and ['reflect'] with our own implementation. + Default: 'zeros'. + order (tuple[str]): The order of conv/norm/activation layers. It is a + sequence of "conv", "norm" and "act". Common examples are + ("conv", "norm", "act") and ("act", "conv", "norm"). + Default: ('conv', 'norm', 'act'). + """ + + _abbr_ = "conv_block" + + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + padding=0, + dilation=1, + groups=1, + bias="auto", + conv_layer=nn.Conv2d, + norm_layer=None, + act_layer=nn.ReLU, + inplace=True, + with_spectral_norm=False, + padding_mode="zeros", + order=("conv", "norm", "act"), + ): + super(ConvModule, self).__init__() + official_padding_mode = ["zeros", "circular"] + self.conv_layer = conv_layer + self.norm_layer = norm_layer + self.act_layer = act_layer + self.inplace = inplace + self.with_spectral_norm = with_spectral_norm + self.with_explicit_padding = padding_mode not in official_padding_mode + self.order = order + assert isinstance(self.order, tuple) and len(self.order) == 3 + assert set(order) == set(["conv", "norm", "act"]) + + self.with_norm = norm_layer is not None + self.with_activation = act_layer is not None + # if the conv layer is before a norm layer, bias is unnecessary. + if bias == "auto": + bias = not self.with_norm + self.with_bias = bias + + if self.with_explicit_padding: + if padding_mode == "zeros": + padding_layer = nn.ZeroPad2d + else: + raise AssertionError(f"Unsupported padding mode: {padding_mode}") + self.pad = padding_layer(padding) + + # reset padding to 0 for conv module + conv_padding = 0 if self.with_explicit_padding else padding + # build convolution layer + self.conv = self.conv_layer( + in_channels, + out_channels, + kernel_size, + stride=stride, + padding=conv_padding, + dilation=dilation, + groups=groups, + bias=bias, + ) + # export the attributes of self.conv to a higher level for convenience + self.in_channels = self.conv.in_channels + self.out_channels = self.conv.out_channels + self.kernel_size = self.conv.kernel_size + self.stride = self.conv.stride + self.padding = padding + self.dilation = self.conv.dilation + self.transposed = self.conv.transposed + self.output_padding = self.conv.output_padding + self.groups = self.conv.groups + + if self.with_spectral_norm: + self.conv = nn.utils.spectral_norm(self.conv) + + # build normalization layers + if self.with_norm: + # norm layer is after conv layer + if order.index("norm") > order.index("conv"): + norm_channels = out_channels + else: + norm_channels = in_channels + norm = partial(norm_layer, num_features=norm_channels) + self.add_module("norm", norm) + if self.with_bias: + from torch.nnModules.batchnorm import _BatchNorm + from torch.nnModules.instancenorm import _InstanceNorm + + if isinstance(norm, (_BatchNorm, _InstanceNorm)): + warnings.warn("Unnecessary conv bias before batch/instance norm") + else: + self.norm_name = None + + # build activation layer + if self.with_activation: + # nn.Tanh has no 'inplace' argument + # (nn.Tanh, nn.PReLU, nn.Sigmoid, nn.HSigmoid, nn.Swish, nn.GELU) + if not isinstance(act_layer, (nn.Tanh, nn.PReLU, nn.Sigmoid, nn.GELU)): + act_layer = partial(act_layer, inplace=inplace) + self.activate = act_layer() + + # Use msra init by default + self.init_weights() + + @property + def norm(self): + if self.norm_name: + return getattr(self, self.norm_name) + else: + return None + + def init_weights(self): + # 1. It is mainly for customized conv layers with their own + # initialization manners by calling their own ``init_weights()``, + # and we do not want ConvModule to override the initialization. + # 2. For customized conv layers without their own initialization + # manners (that is, they don't have their own ``init_weights()``) + # and PyTorch's conv layers, they will be initialized by + # this method with default ``kaiming_init``. + # Note: For PyTorch's conv layers, they will be overwritten by our + # initialization implementation using default ``kaiming_init``. + if not hasattr(self.conv, "init_weights"): + if self.with_activation and isinstance(self.act_layer, nn.LeakyReLU): + nonlinearity = "leaky_relu" + a = 0.01 # XXX: default negative_slope + else: + nonlinearity = "relu" + a = 0 + if hasattr(self.conv, "weight") and self.conv.weight is not None: + nn.init.kaiming_normal_(self.conv.weight, a=a, mode="fan_out", nonlinearity=nonlinearity) + if hasattr(self.conv, "bias") and self.conv.bias is not None: + nn.init.constant_(self.conv.bias, 0) + if self.with_norm: + if hasattr(self.norm, "weight") and self.norm.weight is not None: + nn.init.constant_(self.norm.weight, 1) + if hasattr(self.norm, "bias") and self.norm.bias is not None: + nn.init.constant_(self.norm.bias, 0) + + def forward(self, x, activate=True, norm=True): + for layer in self.order: + if layer == "conv": + if self.with_explicit_padding: + x = self.pad(x) + x = self.conv(x) + elif layer == "norm" and norm and self.with_norm: + x = self.norm(x) + elif layer == "act" and activate and self.with_activation: + x = self.activate(x) + return x + + +class Interpolate(nn.Module): + def __init__(self, scale_factor, mode, align_corners=False): + super(Interpolate, self).__init__() + self.interp = nn.functional.interpolate + self.scale_factor = scale_factor + self.mode = mode + self.align_corners = align_corners + + def forward(self, x): + x = self.interp(x, scale_factor=self.scale_factor, mode=self.mode, align_corners=self.align_corners) + return x + + +class HeadDepth(nn.Module): + def __init__(self, features): + super(HeadDepth, self).__init__() + self.head = nn.Sequential( + nn.Conv2d(features, features // 2, kernel_size=3, stride=1, padding=1), + Interpolate(scale_factor=2, mode="bilinear", align_corners=True), + nn.Conv2d(features // 2, 32, kernel_size=3, stride=1, padding=1), + nn.ReLU(), + nn.Conv2d(32, 1, kernel_size=1, stride=1, padding=0), + ) + + def forward(self, x): + x = self.head(x) + return x + + +class ReassembleBlocks(nn.Module): + """ViTPostProcessBlock, process cls_token in ViT backbone output and + rearrange the feature vector to feature map. + Args: + in_channels (int): ViT feature channels. Default: 768. + out_channels (List): output channels of each stage. + Default: [96, 192, 384, 768]. + readout_type (str): Type of readout operation. Default: 'ignore'. + patch_size (int): The patch size. Default: 16. + """ + + def __init__(self, in_channels=768, out_channels=[96, 192, 384, 768], readout_type="ignore", patch_size=16): + super(ReassembleBlocks, self).__init__() + + assert readout_type in ["ignore", "add", "project"] + self.readout_type = readout_type + self.patch_size = patch_size + + self.projects = nn.ModuleList( + [ + ConvModule( + in_channels=in_channels, + out_channels=out_channel, + kernel_size=1, + act_layer=None, + ) + for out_channel in out_channels + ] + ) + + self.resize_layers = nn.ModuleList( + [ + nn.ConvTranspose2d( + in_channels=out_channels[0], out_channels=out_channels[0], kernel_size=4, stride=4, padding=0 + ), + nn.ConvTranspose2d( + in_channels=out_channels[1], out_channels=out_channels[1], kernel_size=2, stride=2, padding=0 + ), + nn.Identity(), + nn.Conv2d( + in_channels=out_channels[3], out_channels=out_channels[3], kernel_size=3, stride=2, padding=1 + ), + ] + ) + if self.readout_type == "project": + self.readout_projects = nn.ModuleList() + for _ in range(len(self.projects)): + self.readout_projects.append(nn.Sequential(nn.Linear(2 * in_channels, in_channels), nn.GELU())) + + def forward(self, inputs): + assert isinstance(inputs, list) + out = [] + for i, x in enumerate(inputs): + assert len(x) == 2 + x, cls_token = x[0], x[1] + feature_shape = x.shape + if self.readout_type == "project": + x = x.flatten(2).permute((0, 2, 1)) + readout = cls_token.unsqueeze(1).expand_as(x) + x = self.readout_projects[i](torch.cat((x, readout), -1)) + x = x.permute(0, 2, 1).reshape(feature_shape) + elif self.readout_type == "add": + x = x.flatten(2) + cls_token.unsqueeze(-1) + x = x.reshape(feature_shape) + else: + pass + x = self.projects[i](x) + x = self.resize_layers[i](x) + out.append(x) + return out + + +class PreActResidualConvUnit(nn.Module): + """ResidualConvUnit, pre-activate residual unit. + Args: + in_channels (int): number of channels in the input feature map. + act_layer (nn.Module): activation layer. + norm_layer (nn.Module): norm layer. + stride (int): stride of the first block. Default: 1 + dilation (int): dilation rate for convs layers. Default: 1. + """ + + def __init__(self, in_channels, act_layer, norm_layer, stride=1, dilation=1): + super(PreActResidualConvUnit, self).__init__() + + self.conv1 = ConvModule( + in_channels, + in_channels, + 3, + stride=stride, + padding=dilation, + dilation=dilation, + norm_layer=norm_layer, + act_layer=act_layer, + bias=False, + order=("act", "conv", "norm"), + ) + + self.conv2 = ConvModule( + in_channels, + in_channels, + 3, + padding=1, + norm_layer=norm_layer, + act_layer=act_layer, + bias=False, + order=("act", "conv", "norm"), + ) + + def forward(self, inputs): + inputs_ = inputs.clone() + x = self.conv1(inputs) + x = self.conv2(x) + return x + inputs_ + + +class FeatureFusionBlock(nn.Module): + """FeatureFusionBlock, merge feature map from different stages. + Args: + in_channels (int): Input channels. + act_layer (nn.Module): activation layer for ResidualConvUnit. + norm_layer (nn.Module): normalization layer. + expand (bool): Whether expand the channels in post process block. + Default: False. + align_corners (bool): align_corner setting for bilinear upsample. + Default: True. + """ + + def __init__(self, in_channels, act_layer, norm_layer, expand=False, align_corners=True): + super(FeatureFusionBlock, self).__init__() + + self.in_channels = in_channels + self.expand = expand + self.align_corners = align_corners + + self.out_channels = in_channels + if self.expand: + self.out_channels = in_channels // 2 + + self.project = ConvModule(self.in_channels, self.out_channels, kernel_size=1, act_layer=None, bias=True) + + self.res_conv_unit1 = PreActResidualConvUnit( + in_channels=self.in_channels, act_layer=act_layer, norm_layer=norm_layer + ) + self.res_conv_unit2 = PreActResidualConvUnit( + in_channels=self.in_channels, act_layer=act_layer, norm_layer=norm_layer + ) + + def forward(self, *inputs): + x = inputs[0] + if len(inputs) == 2: + if x.shape != inputs[1].shape: + res = resize(inputs[1], size=(x.shape[2], x.shape[3]), mode="bilinear", align_corners=False) + else: + res = inputs[1] + x = x + self.res_conv_unit1(res) + x = self.res_conv_unit2(x) + x = resize(x, scale_factor=2, mode="bilinear", align_corners=self.align_corners) + x = self.project(x) + return x + + +class DPTHead(DepthBaseDecodeHead): + """Vision Transformers for Dense Prediction. + This head is implemented of `DPT `_. + Args: + embed_dims (int): The embed dimension of the ViT backbone. + Default: 768. + post_process_channels (List): Out channels of post process conv + layers. Default: [96, 192, 384, 768]. + readout_type (str): Type of readout operation. Default: 'ignore'. + patch_size (int): The patch size. Default: 16. + expand_channels (bool): Whether expand the channels in post process + block. Default: False. + """ + + def __init__( + self, + embed_dims=768, + post_process_channels=[96, 192, 384, 768], + readout_type="ignore", + patch_size=16, + expand_channels=False, + **kwargs, + ): + super(DPTHead, self).__init__(**kwargs) + + self.in_channels = self.in_channels + self.expand_channels = expand_channels + self.reassemble_blocks = ReassembleBlocks(embed_dims, post_process_channels, readout_type, patch_size) + + self.post_process_channels = [ + channel * math.pow(2, i) if expand_channels else channel for i, channel in enumerate(post_process_channels) + ] + self.convs = nn.ModuleList() + for channel in self.post_process_channels: + self.convs.append(ConvModule(channel, self.channels, kernel_size=3, padding=1, act_layer=None, bias=False)) + self.fusion_blocks = nn.ModuleList() + for _ in range(len(self.convs)): + self.fusion_blocks.append(FeatureFusionBlock(self.channels, self.act_layer, self.norm_layer)) + self.fusion_blocks[0].res_conv_unit1 = None + self.project = ConvModule(self.channels, self.channels, kernel_size=3, padding=1, norm_layer=self.norm_layer) + self.num_fusion_blocks = len(self.fusion_blocks) + self.num_reassemble_blocks = len(self.reassemble_blocks.resize_layers) + self.num_post_process_channels = len(self.post_process_channels) + assert self.num_fusion_blocks == self.num_reassemble_blocks + assert self.num_reassemble_blocks == self.num_post_process_channels + self.conv_depth = HeadDepth(self.channels) + + def forward(self, inputs, img_metas): + assert len(inputs) == self.num_reassemble_blocks + x = [inp for inp in inputs] + x = self.reassemble_blocks(x) + x = [self.convs[i](feature) for i, feature in enumerate(x)] + out = self.fusion_blocks[0](x[-1]) + for i in range(1, len(self.fusion_blocks)): + out = self.fusion_blocks[i](out, x[-(i + 1)]) + out = self.project(out) + out = self.depth_pred(out) + return out diff --git a/LHM/models/encoders/dinov2/hub/depth/encoder_decoder.py b/LHM/models/encoders/dinov2/hub/depth/encoder_decoder.py new file mode 100644 index 0000000000000000000000000000000000000000..eb29ced67957a336e763b0e7c90c0eeaea36fea8 --- /dev/null +++ b/LHM/models/encoders/dinov2/hub/depth/encoder_decoder.py @@ -0,0 +1,351 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from collections import OrderedDict + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from .ops import resize + + +def add_prefix(inputs, prefix): + """Add prefix for dict. + + Args: + inputs (dict): The input dict with str keys. + prefix (str): The prefix to add. + + Returns: + + dict: The dict with keys updated with ``prefix``. + """ + + outputs = dict() + for name, value in inputs.items(): + outputs[f"{prefix}.{name}"] = value + + return outputs + + +class DepthEncoderDecoder(nn.Module): + """Encoder Decoder depther. + + EncoderDecoder typically consists of backbone and decode_head. + """ + + def __init__(self, backbone, decode_head): + super(DepthEncoderDecoder, self).__init__() + + self.backbone = backbone + self.decode_head = decode_head + self.align_corners = self.decode_head.align_corners + + def extract_feat(self, img): + """Extract features from images.""" + return self.backbone(img) + + def encode_decode(self, img, img_metas, rescale=True, size=None): + """Encode images with backbone and decode into a depth estimation + map of the same size as input.""" + x = self.extract_feat(img) + out = self._decode_head_forward_test(x, img_metas) + # crop the pred depth to the certain range. + out = torch.clamp(out, min=self.decode_head.min_depth, max=self.decode_head.max_depth) + if rescale: + if size is None: + if img_metas is not None: + size = img_metas[0]["ori_shape"][:2] + else: + size = img.shape[2:] + out = resize(input=out, size=size, mode="bilinear", align_corners=self.align_corners) + return out + + def _decode_head_forward_train(self, img, x, img_metas, depth_gt, **kwargs): + """Run forward function and calculate loss for decode head in + training.""" + losses = dict() + loss_decode = self.decode_head.forward_train(img, x, img_metas, depth_gt, **kwargs) + losses.update(add_prefix(loss_decode, "decode")) + return losses + + def _decode_head_forward_test(self, x, img_metas): + """Run forward function and calculate loss for decode head in + inference.""" + depth_pred = self.decode_head.forward_test(x, img_metas) + return depth_pred + + def forward_dummy(self, img): + """Dummy forward function.""" + depth = self.encode_decode(img, None) + + return depth + + def forward_train(self, img, img_metas, depth_gt, **kwargs): + """Forward function for training. + + Args: + img (Tensor): Input images. + img_metas (list[dict]): List of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `depth/datasets/pipelines/formatting.py:Collect`. + depth_gt (Tensor): Depth gt + used if the architecture supports depth estimation task. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + + x = self.extract_feat(img) + + losses = dict() + + # the last of x saves the info from neck + loss_decode = self._decode_head_forward_train(img, x, img_metas, depth_gt, **kwargs) + + losses.update(loss_decode) + + return losses + + def whole_inference(self, img, img_meta, rescale, size=None): + """Inference with full image.""" + return self.encode_decode(img, img_meta, rescale, size=size) + + def slide_inference(self, img, img_meta, rescale, stride, crop_size): + """Inference by sliding-window with overlap. + + If h_crop > h_img or w_crop > w_img, the small patch will be used to + decode without padding. + """ + + h_stride, w_stride = stride + h_crop, w_crop = crop_size + batch_size, _, h_img, w_img = img.size() + h_grids = max(h_img - h_crop + h_stride - 1, 0) // h_stride + 1 + w_grids = max(w_img - w_crop + w_stride - 1, 0) // w_stride + 1 + preds = img.new_zeros((batch_size, 1, h_img, w_img)) + count_mat = img.new_zeros((batch_size, 1, h_img, w_img)) + for h_idx in range(h_grids): + for w_idx in range(w_grids): + y1 = h_idx * h_stride + x1 = w_idx * w_stride + y2 = min(y1 + h_crop, h_img) + x2 = min(x1 + w_crop, w_img) + y1 = max(y2 - h_crop, 0) + x1 = max(x2 - w_crop, 0) + crop_img = img[:, :, y1:y2, x1:x2] + depth_pred = self.encode_decode(crop_img, img_meta, rescale) + preds += F.pad(depth_pred, (int(x1), int(preds.shape[3] - x2), int(y1), int(preds.shape[2] - y2))) + + count_mat[:, :, y1:y2, x1:x2] += 1 + assert (count_mat == 0).sum() == 0 + if torch.onnx.is_in_onnx_export(): + # cast count_mat to constant while exporting to ONNX + count_mat = torch.from_numpy(count_mat.cpu().detach().numpy()).to(device=img.device) + preds = preds / count_mat + return preds + + def inference(self, img, img_meta, rescale, size=None, mode="whole"): + """Inference with slide/whole style. + + Args: + img (Tensor): The input image of shape (N, 3, H, W). + img_meta (dict): Image info dict where each dict has: 'img_shape', + 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `depth/datasets/pipelines/formatting.py:Collect`. + rescale (bool): Whether rescale back to original shape. + + Returns: + Tensor: The output depth map. + """ + + assert mode in ["slide", "whole"] + ori_shape = img_meta[0]["ori_shape"] + assert all(_["ori_shape"] == ori_shape for _ in img_meta) + if mode == "slide": + depth_pred = self.slide_inference(img, img_meta, rescale) + else: + depth_pred = self.whole_inference(img, img_meta, rescale, size=size) + output = depth_pred + flip = img_meta[0]["flip"] + if flip: + flip_direction = img_meta[0]["flip_direction"] + assert flip_direction in ["horizontal", "vertical"] + if flip_direction == "horizontal": + output = output.flip(dims=(3,)) + elif flip_direction == "vertical": + output = output.flip(dims=(2,)) + + return output + + def simple_test(self, img, img_meta, rescale=True): + """Simple test with single image.""" + depth_pred = self.inference(img, img_meta, rescale) + if torch.onnx.is_in_onnx_export(): + # our inference backend only support 4D output + depth_pred = depth_pred.unsqueeze(0) + return depth_pred + depth_pred = depth_pred.cpu().numpy() + # unravel batch dim + depth_pred = list(depth_pred) + return depth_pred + + def aug_test(self, imgs, img_metas, rescale=True): + """Test with augmentations. + + Only rescale=True is supported. + """ + # aug_test rescale all imgs back to ori_shape for now + assert rescale + # to save memory, we get augmented depth logit inplace + depth_pred = self.inference(imgs[0], img_metas[0], rescale) + for i in range(1, len(imgs)): + cur_depth_pred = self.inference(imgs[i], img_metas[i], rescale, size=depth_pred.shape[-2:]) + depth_pred += cur_depth_pred + depth_pred /= len(imgs) + depth_pred = depth_pred.cpu().numpy() + # unravel batch dim + depth_pred = list(depth_pred) + return depth_pred + + def forward_test(self, imgs, img_metas, **kwargs): + """ + Args: + imgs (List[Tensor]): the outer list indicates test-time + augmentations and inner Tensor should have a shape NxCxHxW, + which contains all images in the batch. + img_metas (List[List[dict]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. + """ + for var, name in [(imgs, "imgs"), (img_metas, "img_metas")]: + if not isinstance(var, list): + raise TypeError(f"{name} must be a list, but got " f"{type(var)}") + num_augs = len(imgs) + if num_augs != len(img_metas): + raise ValueError(f"num of augmentations ({len(imgs)}) != " f"num of image meta ({len(img_metas)})") + # all images in the same aug batch all of the same ori_shape and pad + # shape + for img_meta in img_metas: + ori_shapes = [_["ori_shape"] for _ in img_meta] + assert all(shape == ori_shapes[0] for shape in ori_shapes) + img_shapes = [_["img_shape"] for _ in img_meta] + assert all(shape == img_shapes[0] for shape in img_shapes) + pad_shapes = [_["pad_shape"] for _ in img_meta] + assert all(shape == pad_shapes[0] for shape in pad_shapes) + + if num_augs == 1: + return self.simple_test(imgs[0], img_metas[0], **kwargs) + else: + return self.aug_test(imgs, img_metas, **kwargs) + + def forward(self, img, img_metas, return_loss=True, **kwargs): + """Calls either :func:`forward_train` or :func:`forward_test` depending + on whether ``return_loss`` is ``True``. + + Note this setting will change the expected inputs. When + ``return_loss=True``, img and img_meta are single-nested (i.e. Tensor + and List[dict]), and when ``resturn_loss=False``, img and img_meta + should be double nested (i.e. List[Tensor], List[List[dict]]), with + the outer list indicating test time augmentations. + """ + if return_loss: + return self.forward_train(img, img_metas, **kwargs) + else: + return self.forward_test(img, img_metas, **kwargs) + + def train_step(self, data_batch, optimizer, **kwargs): + """The iteration step during training. + + This method defines an iteration step during training, except for the + back propagation and optimizer updating, which are done in an optimizer + hook. Note that in some complicated cases or models, the whole process + including back propagation and optimizer updating is also defined in + this method, such as GAN. + + Args: + data (dict): The output of dataloader. + optimizer (:obj:`torch.optim.Optimizer` | dict): The optimizer of + runner is passed to ``train_step()``. This argument is unused + and reserved. + + Returns: + dict: It should contain at least 3 keys: ``loss``, ``log_vars``, + ``num_samples``. + ``loss`` is a tensor for back propagation, which can be a + weighted sum of multiple losses. + ``log_vars`` contains all the variables to be sent to the + logger. + ``num_samples`` indicates the batch size (when the model is + DDP, it means the batch size on each GPU), which is used for + averaging the logs. + """ + losses = self(**data_batch) + + # split losses and images + real_losses = {} + log_imgs = {} + for k, v in losses.items(): + if "img" in k: + log_imgs[k] = v + else: + real_losses[k] = v + + loss, log_vars = self._parse_losses(real_losses) + + outputs = dict(loss=loss, log_vars=log_vars, num_samples=len(data_batch["img_metas"]), log_imgs=log_imgs) + + return outputs + + def val_step(self, data_batch, **kwargs): + """The iteration step during validation. + + This method shares the same signature as :func:`train_step`, but used + during val epochs. Note that the evaluation after training epochs is + not implemented with this method, but an evaluation hook. + """ + output = self(**data_batch, **kwargs) + return output + + @staticmethod + def _parse_losses(losses): + import torch.distributed as dist + + """Parse the raw outputs (losses) of the network. + + Args: + losses (dict): Raw output of the network, which usually contain + losses and other necessary information. + + Returns: + tuple[Tensor, dict]: (loss, log_vars), loss is the loss tensor + which may be a weighted sum of all losses, log_vars contains + all the variables to be sent to the logger. + """ + log_vars = OrderedDict() + for loss_name, loss_value in losses.items(): + if isinstance(loss_value, torch.Tensor): + log_vars[loss_name] = loss_value.mean() + elif isinstance(loss_value, list): + log_vars[loss_name] = sum(_loss.mean() for _loss in loss_value) + else: + raise TypeError(f"{loss_name} is not a tensor or list of tensors") + + loss = sum(_value for _key, _value in log_vars.items() if "loss" in _key) + + log_vars["loss"] = loss + for loss_name, loss_value in log_vars.items(): + # reduce loss when distributed training + if dist.is_available() and dist.is_initialized(): + loss_value = loss_value.data.clone() + dist.all_reduce(loss_value.div_(dist.get_world_size())) + log_vars[loss_name] = loss_value.item() + + return loss, log_vars diff --git a/LHM/models/encoders/dinov2/hub/depth/ops.py b/LHM/models/encoders/dinov2/hub/depth/ops.py new file mode 100644 index 0000000000000000000000000000000000000000..15880ee0cb7652d4b41c489b927bf6a156b40e5e --- /dev/null +++ b/LHM/models/encoders/dinov2/hub/depth/ops.py @@ -0,0 +1,28 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import warnings + +import torch.nn.functional as F + + +def resize(input, size=None, scale_factor=None, mode="nearest", align_corners=None, warning=False): + if warning: + if size is not None and align_corners: + input_h, input_w = tuple(int(x) for x in input.shape[2:]) + output_h, output_w = tuple(int(x) for x in size) + if output_h > input_h or output_w > output_h: + if ( + (output_h > 1 and output_w > 1 and input_h > 1 and input_w > 1) + and (output_h - 1) % (input_h - 1) + and (output_w - 1) % (input_w - 1) + ): + warnings.warn( + f"When align_corners={align_corners}, " + "the output would more aligned if " + f"input size {(input_h, input_w)} is `x+1` and " + f"out size {(output_h, output_w)} is `nx+1`" + ) + return F.interpolate(input, size, scale_factor, mode, align_corners) diff --git a/LHM/models/encoders/dinov2/hub/depthers.py b/LHM/models/encoders/dinov2/hub/depthers.py new file mode 100644 index 0000000000000000000000000000000000000000..f88b7e9a41056594e3b3e66107feee98bffab820 --- /dev/null +++ b/LHM/models/encoders/dinov2/hub/depthers.py @@ -0,0 +1,246 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from enum import Enum +from functools import partial +from typing import Optional, Tuple, Union + +import torch + +from .backbones import _make_dinov2_model +from .depth import BNHead, DepthEncoderDecoder, DPTHead +from .utils import _DINOV2_BASE_URL, _make_dinov2_model_name, CenterPadding + + +class Weights(Enum): + NYU = "NYU" + KITTI = "KITTI" + + +def _get_depth_range(pretrained: bool, weights: Weights = Weights.NYU) -> Tuple[float, float]: + if not pretrained: # Default + return (0.001, 10.0) + + # Pretrained, set according to the training dataset for the provided weights + if weights == Weights.KITTI: + return (0.001, 80.0) + + if weights == Weights.NYU: + return (0.001, 10.0) + + return (0.001, 10.0) + + +def _make_dinov2_linear_depth_head( + *, + embed_dim: int, + layers: int, + min_depth: float, + max_depth: float, + **kwargs, +): + if layers not in (1, 4): + raise AssertionError(f"Unsupported number of layers: {layers}") + + if layers == 1: + in_index = [0] + else: + assert layers == 4 + in_index = [0, 1, 2, 3] + + return BNHead( + classify=True, + n_bins=256, + bins_strategy="UD", + norm_strategy="linear", + upsample=4, + in_channels=[embed_dim] * len(in_index), + in_index=in_index, + input_transform="resize_concat", + channels=embed_dim * len(in_index) * 2, + align_corners=False, + min_depth=0.001, + max_depth=80, + loss_decode=(), + ) + + +def _make_dinov2_linear_depther( + *, + arch_name: str = "vit_large", + layers: int = 4, + pretrained: bool = True, + weights: Union[Weights, str] = Weights.NYU, + depth_range: Optional[Tuple[float, float]] = None, + **kwargs, +): + if layers not in (1, 4): + raise AssertionError(f"Unsupported number of layers: {layers}") + if isinstance(weights, str): + try: + weights = Weights[weights] + except KeyError: + raise AssertionError(f"Unsupported weights: {weights}") + + if depth_range is None: + depth_range = _get_depth_range(pretrained, weights) + min_depth, max_depth = depth_range + + backbone = _make_dinov2_model(arch_name=arch_name, pretrained=pretrained, **kwargs) + + embed_dim = backbone.embed_dim + patch_size = backbone.patch_size + model_name = _make_dinov2_model_name(arch_name, patch_size) + linear_depth_head = _make_dinov2_linear_depth_head( + embed_dim=embed_dim, + layers=layers, + min_depth=min_depth, + max_depth=max_depth, + ) + + layer_count = { + "vit_small": 12, + "vit_base": 12, + "vit_large": 24, + "vit_giant2": 40, + }[arch_name] + + if layers == 4: + out_index = { + "vit_small": [2, 5, 8, 11], + "vit_base": [2, 5, 8, 11], + "vit_large": [4, 11, 17, 23], + "vit_giant2": [9, 19, 29, 39], + }[arch_name] + else: + assert layers == 1 + out_index = [layer_count - 1] + + model = DepthEncoderDecoder(backbone=backbone, decode_head=linear_depth_head) + model.backbone.forward = partial( + backbone.get_intermediate_layers, + n=out_index, + reshape=True, + return_class_token=True, + norm=False, + ) + model.backbone.register_forward_pre_hook(lambda _, x: CenterPadding(patch_size)(x[0])) + + if pretrained: + layers_str = str(layers) if layers == 4 else "" + weights_str = weights.value.lower() + url = _DINOV2_BASE_URL + f"/{model_name}/{model_name}_{weights_str}_linear{layers_str}_head.pth" + checkpoint = torch.hub.load_state_dict_from_url(url, map_location="cpu") + if "state_dict" in checkpoint: + state_dict = checkpoint["state_dict"] + model.load_state_dict(state_dict, strict=False) + + return model + + +def dinov2_vits14_ld(*, layers: int = 4, pretrained: bool = True, weights: Union[Weights, str] = Weights.NYU, **kwargs): + return _make_dinov2_linear_depther( + arch_name="vit_small", layers=layers, pretrained=pretrained, weights=weights, **kwargs + ) + + +def dinov2_vitb14_ld(*, layers: int = 4, pretrained: bool = True, weights: Union[Weights, str] = Weights.NYU, **kwargs): + return _make_dinov2_linear_depther( + arch_name="vit_base", layers=layers, pretrained=pretrained, weights=weights, **kwargs + ) + + +def dinov2_vitl14_ld(*, layers: int = 4, pretrained: bool = True, weights: Union[Weights, str] = Weights.NYU, **kwargs): + return _make_dinov2_linear_depther( + arch_name="vit_large", layers=layers, pretrained=pretrained, weights=weights, **kwargs + ) + + +def dinov2_vitg14_ld(*, layers: int = 4, pretrained: bool = True, weights: Union[Weights, str] = Weights.NYU, **kwargs): + return _make_dinov2_linear_depther( + arch_name="vit_giant2", layers=layers, ffn_layer="swiglufused", pretrained=pretrained, weights=weights, **kwargs + ) + + +def _make_dinov2_dpt_depth_head(*, embed_dim: int, min_depth: float, max_depth: float): + return DPTHead( + in_channels=[embed_dim] * 4, + channels=256, + embed_dims=embed_dim, + post_process_channels=[embed_dim // 2 ** (3 - i) for i in range(4)], + readout_type="project", + min_depth=min_depth, + max_depth=max_depth, + loss_decode=(), + ) + + +def _make_dinov2_dpt_depther( + *, + arch_name: str = "vit_large", + pretrained: bool = True, + weights: Union[Weights, str] = Weights.NYU, + depth_range: Optional[Tuple[float, float]] = None, + **kwargs, +): + if isinstance(weights, str): + try: + weights = Weights[weights] + except KeyError: + raise AssertionError(f"Unsupported weights: {weights}") + + if depth_range is None: + depth_range = _get_depth_range(pretrained, weights) + min_depth, max_depth = depth_range + + backbone = _make_dinov2_model(arch_name=arch_name, pretrained=pretrained, **kwargs) + + model_name = _make_dinov2_model_name(arch_name, backbone.patch_size) + dpt_depth_head = _make_dinov2_dpt_depth_head(embed_dim=backbone.embed_dim, min_depth=min_depth, max_depth=max_depth) + + out_index = { + "vit_small": [2, 5, 8, 11], + "vit_base": [2, 5, 8, 11], + "vit_large": [4, 11, 17, 23], + "vit_giant2": [9, 19, 29, 39], + }[arch_name] + + model = DepthEncoderDecoder(backbone=backbone, decode_head=dpt_depth_head) + model.backbone.forward = partial( + backbone.get_intermediate_layers, + n=out_index, + reshape=True, + return_class_token=True, + norm=False, + ) + model.backbone.register_forward_pre_hook(lambda _, x: CenterPadding(backbone.patch_size)(x[0])) + + if pretrained: + weights_str = weights.value.lower() + url = _DINOV2_BASE_URL + f"/{model_name}/{model_name}_{weights_str}_dpt_head.pth" + checkpoint = torch.hub.load_state_dict_from_url(url, map_location="cpu") + if "state_dict" in checkpoint: + state_dict = checkpoint["state_dict"] + model.load_state_dict(state_dict, strict=False) + + return model + + +def dinov2_vits14_dd(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.NYU, **kwargs): + return _make_dinov2_dpt_depther(arch_name="vit_small", pretrained=pretrained, weights=weights, **kwargs) + + +def dinov2_vitb14_dd(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.NYU, **kwargs): + return _make_dinov2_dpt_depther(arch_name="vit_base", pretrained=pretrained, weights=weights, **kwargs) + + +def dinov2_vitl14_dd(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.NYU, **kwargs): + return _make_dinov2_dpt_depther(arch_name="vit_large", pretrained=pretrained, weights=weights, **kwargs) + + +def dinov2_vitg14_dd(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.NYU, **kwargs): + return _make_dinov2_dpt_depther( + arch_name="vit_giant2", ffn_layer="swiglufused", pretrained=pretrained, weights=weights, **kwargs + ) diff --git a/LHM/models/encoders/dinov2/hub/utils.py b/LHM/models/encoders/dinov2/hub/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..9c6641404093652d5a2f19b4cf283d976ec39e64 --- /dev/null +++ b/LHM/models/encoders/dinov2/hub/utils.py @@ -0,0 +1,39 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import itertools +import math + +import torch +import torch.nn as nn +import torch.nn.functional as F + + +_DINOV2_BASE_URL = "https://dl.fbaipublicfiles.com/dinov2" + + +def _make_dinov2_model_name(arch_name: str, patch_size: int, num_register_tokens: int = 0) -> str: + compact_arch_name = arch_name.replace("_", "")[:4] + registers_suffix = f"_reg{num_register_tokens}" if num_register_tokens else "" + return f"dinov2_{compact_arch_name}{patch_size}{registers_suffix}" + + +class CenterPadding(nn.Module): + def __init__(self, multiple): + super().__init__() + self.multiple = multiple + + def _get_pad(self, size): + new_size = math.ceil(size / self.multiple) * self.multiple + pad_size = new_size - size + pad_size_left = pad_size // 2 + pad_size_right = pad_size - pad_size_left + return pad_size_left, pad_size_right + + @torch.inference_mode() + def forward(self, x): + pads = list(itertools.chain.from_iterable(self._get_pad(m) for m in x.shape[:1:-1])) + output = F.pad(x, pads) + return output diff --git a/LHM/models/encoders/dinov2/layers/__init__.py b/LHM/models/encoders/dinov2/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..77967aa6ccfae24c39b8e167c83dd77073fd68fb --- /dev/null +++ b/LHM/models/encoders/dinov2/layers/__init__.py @@ -0,0 +1,20 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# ****************************************************************************** +# Code modified by Zexin He in 2023-2024. +# Modifications are marked with clearly visible comments +# licensed under the Apache License, Version 2.0. +# ****************************************************************************** + +from .dino_head import DINOHead +from .mlp import Mlp +from .patch_embed import PatchEmbed +from .swiglu_ffn import SwiGLUFFN, SwiGLUFFNFused +# ********** Modified by Zexin He in 2023-2024 ********** +# Avoid using nested tensor for now, deprecating usage of NestedTensorBlock +from .block import Block, BlockWithModulation +# ******************************************************** +from .attention import MemEffAttention diff --git a/LHM/models/encoders/dinov2/layers/__pycache__/__init__.cpython-310.pyc b/LHM/models/encoders/dinov2/layers/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b4116b99d2e6f9a1effca6d1f4872b1d5b16363e Binary files /dev/null and b/LHM/models/encoders/dinov2/layers/__pycache__/__init__.cpython-310.pyc differ diff --git a/LHM/models/encoders/dinov2/layers/__pycache__/attention.cpython-310.pyc b/LHM/models/encoders/dinov2/layers/__pycache__/attention.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..59dfd78b60de93feebfa8e454cbcfed9d5636a1f Binary files /dev/null and b/LHM/models/encoders/dinov2/layers/__pycache__/attention.cpython-310.pyc differ diff --git a/LHM/models/encoders/dinov2/layers/__pycache__/block.cpython-310.pyc b/LHM/models/encoders/dinov2/layers/__pycache__/block.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8b31a679e2e12df593f8a1a7eb3c04e309ed211c Binary files /dev/null and b/LHM/models/encoders/dinov2/layers/__pycache__/block.cpython-310.pyc differ diff --git a/LHM/models/encoders/dinov2/layers/__pycache__/dino_head.cpython-310.pyc b/LHM/models/encoders/dinov2/layers/__pycache__/dino_head.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..86bc66622f883e4b9202e62084f9ea494e833d92 Binary files /dev/null and b/LHM/models/encoders/dinov2/layers/__pycache__/dino_head.cpython-310.pyc differ diff --git a/LHM/models/encoders/dinov2/layers/__pycache__/drop_path.cpython-310.pyc b/LHM/models/encoders/dinov2/layers/__pycache__/drop_path.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c821a1cfbb7434e731db30e747193f5186e3ee73 Binary files /dev/null and b/LHM/models/encoders/dinov2/layers/__pycache__/drop_path.cpython-310.pyc differ diff --git a/LHM/models/encoders/dinov2/layers/__pycache__/layer_scale.cpython-310.pyc b/LHM/models/encoders/dinov2/layers/__pycache__/layer_scale.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..de0f4a535bbc5da200fd319bb7ffa69995cfcef6 Binary files /dev/null and b/LHM/models/encoders/dinov2/layers/__pycache__/layer_scale.cpython-310.pyc differ diff --git a/LHM/models/encoders/dinov2/layers/__pycache__/mlp.cpython-310.pyc b/LHM/models/encoders/dinov2/layers/__pycache__/mlp.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..10caa21786883e68e28437ca9c3fea0e1956a800 Binary files /dev/null and b/LHM/models/encoders/dinov2/layers/__pycache__/mlp.cpython-310.pyc differ diff --git a/LHM/models/encoders/dinov2/layers/__pycache__/patch_embed.cpython-310.pyc b/LHM/models/encoders/dinov2/layers/__pycache__/patch_embed.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eff7797a91991e490cfee7dacc2de0f262ba3d34 Binary files /dev/null and b/LHM/models/encoders/dinov2/layers/__pycache__/patch_embed.cpython-310.pyc differ diff --git a/LHM/models/encoders/dinov2/layers/__pycache__/swiglu_ffn.cpython-310.pyc b/LHM/models/encoders/dinov2/layers/__pycache__/swiglu_ffn.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9ccf19307cc48b14bad4816190bf512665cc0de0 Binary files /dev/null and b/LHM/models/encoders/dinov2/layers/__pycache__/swiglu_ffn.cpython-310.pyc differ diff --git a/LHM/models/encoders/dinov2/layers/attention.py b/LHM/models/encoders/dinov2/layers/attention.py new file mode 100644 index 0000000000000000000000000000000000000000..0fb76ef2816164729a58cceb18d0f000cfb18777 --- /dev/null +++ b/LHM/models/encoders/dinov2/layers/attention.py @@ -0,0 +1,89 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/models/vision_transformer.py + +import logging +import os +import warnings + +from torch import Tensor +from torch import nn + + +logger = logging.getLogger("dinov2") + + +XFORMERS_ENABLED = os.environ.get("XFORMERS_DISABLED") is None +try: + if XFORMERS_ENABLED: + from xformers.ops import memory_efficient_attention, unbind + + XFORMERS_AVAILABLE = True + warnings.warn("xFormers is available (Attention)") + else: + warnings.warn("xFormers is disabled (Attention)") + raise ImportError +except ImportError: + XFORMERS_AVAILABLE = False + warnings.warn("xFormers is not available (Attention)") + + +class Attention(nn.Module): + def __init__( + self, + dim: int, + num_heads: int = 8, + qkv_bias: bool = False, + proj_bias: bool = True, + attn_drop: float = 0.0, + proj_drop: float = 0.0, + ) -> None: + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = head_dim**-0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim, bias=proj_bias) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x: Tensor) -> Tensor: + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + + q, k, v = qkv[0] * self.scale, qkv[1], qkv[2] + attn = q @ k.transpose(-2, -1) + + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class MemEffAttention(Attention): + def forward(self, x: Tensor, attn_bias=None) -> Tensor: + if not XFORMERS_AVAILABLE: + if attn_bias is not None: + raise AssertionError("xFormers is required for using nested tensors") + return super().forward(x) + + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads) + + q, k, v = unbind(qkv, 2) + + x = memory_efficient_attention(q, k, v, attn_bias=attn_bias) + x = x.reshape([B, N, C]) + + x = self.proj(x) + x = self.proj_drop(x) + return x diff --git a/LHM/models/encoders/dinov2/layers/block.py b/LHM/models/encoders/dinov2/layers/block.py new file mode 100644 index 0000000000000000000000000000000000000000..bf5b50118c1579fd30cda0c2d60b95c85eb04204 --- /dev/null +++ b/LHM/models/encoders/dinov2/layers/block.py @@ -0,0 +1,296 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/layers/patch_embed.py + +# ****************************************************************************** +# Code modified by Zexin He in 2023-2024. +# Modifications are marked with clearly visible comments +# licensed under the Apache License, Version 2.0. +# ****************************************************************************** + +import logging +import os +from typing import Callable, List, Any, Tuple, Dict +import warnings + +import torch +from torch import nn, Tensor + +from .attention import Attention, MemEffAttention +from .drop_path import DropPath +from .layer_scale import LayerScale +from .mlp import Mlp + + +logger = logging.getLogger("dinov2") + + +XFORMERS_ENABLED = os.environ.get("XFORMERS_DISABLED") is None +try: + if XFORMERS_ENABLED: + from xformers.ops import fmha, scaled_index_add, index_select_cat + + XFORMERS_AVAILABLE = True + warnings.warn("xFormers is available (Block)") + else: + warnings.warn("xFormers is disabled (Block)") + raise ImportError +except ImportError: + XFORMERS_AVAILABLE = False + + warnings.warn("xFormers is not available (Block)") + + +class Block(nn.Module): + def __init__( + self, + dim: int, + num_heads: int, + mlp_ratio: float = 4.0, + qkv_bias: bool = False, + proj_bias: bool = True, + ffn_bias: bool = True, + drop: float = 0.0, + attn_drop: float = 0.0, + init_values=None, + drop_path: float = 0.0, + act_layer: Callable[..., nn.Module] = nn.GELU, + norm_layer: Callable[..., nn.Module] = nn.LayerNorm, + attn_class: Callable[..., nn.Module] = Attention, + ffn_layer: Callable[..., nn.Module] = Mlp, + ) -> None: + super().__init__() + # print(f"biases: qkv: {qkv_bias}, proj: {proj_bias}, ffn: {ffn_bias}") + self.norm1 = norm_layer(dim) + self.attn = attn_class( + dim, + num_heads=num_heads, + qkv_bias=qkv_bias, + proj_bias=proj_bias, + attn_drop=attn_drop, + proj_drop=drop, + ) + self.ls1 = LayerScale(dim, init_values=init_values) if init_values else nn.Identity() + self.drop_path1 = DropPath(drop_path) if drop_path > 0.0 else nn.Identity() + + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = ffn_layer( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop, + bias=ffn_bias, + ) + self.ls2 = LayerScale(dim, init_values=init_values) if init_values else nn.Identity() + self.drop_path2 = DropPath(drop_path) if drop_path > 0.0 else nn.Identity() + + self.sample_drop_ratio = drop_path + + def forward(self, x: Tensor) -> Tensor: + def attn_residual_func(x: Tensor) -> Tensor: + return self.ls1(self.attn(self.norm1(x))) + + def ffn_residual_func(x: Tensor) -> Tensor: + return self.ls2(self.mlp(self.norm2(x))) + + if self.training and self.sample_drop_ratio > 0.1: + # the overhead is compensated only for a drop path rate larger than 0.1 + x = drop_add_residual_stochastic_depth( + x, + residual_func=attn_residual_func, + sample_drop_ratio=self.sample_drop_ratio, + ) + x = drop_add_residual_stochastic_depth( + x, + residual_func=ffn_residual_func, + sample_drop_ratio=self.sample_drop_ratio, + ) + elif self.training and self.sample_drop_ratio > 0.0: + x = x + self.drop_path1(attn_residual_func(x)) + x = x + self.drop_path1(ffn_residual_func(x)) # FIXME: drop_path2 + else: + x = x + attn_residual_func(x) + x = x + ffn_residual_func(x) + return x + + +# ********** Modified by Zexin He in 2023-2024 ********** +# Override forward with modulation input +class BlockWithModulation(Block): + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + + def forward(self, x: Tensor, mod: Tensor) -> Tensor: + def attn_residual_func(x: Tensor, mod: Tensor) -> Tensor: + return self.ls1(self.attn(self.norm1(x, mod))) + + def ffn_residual_func(x: Tensor, mod: Tensor) -> Tensor: + return self.ls2(self.mlp(self.norm2(x, mod))) + + if self.training and self.sample_drop_ratio > 0.1: + raise NotImplementedError("Modulation with drop path ratio larger than 0.1 is not supported yet") + elif self.training and self.sample_drop_ratio > 0.0: + x = x + self.drop_path1(attn_residual_func(x, mod)) + x = x + self.drop_path1(ffn_residual_func(x, mod)) # FIXME: drop_path2 + else: + x = x + attn_residual_func(x, mod) + x = x + ffn_residual_func(x, mod) + return x +# ******************************************************** + + +def drop_add_residual_stochastic_depth( + x: Tensor, + residual_func: Callable[[Tensor], Tensor], + sample_drop_ratio: float = 0.0, +) -> Tensor: + # 1) extract subset using permutation + b, n, d = x.shape + sample_subset_size = max(int(b * (1 - sample_drop_ratio)), 1) + brange = (torch.randperm(b, device=x.device))[:sample_subset_size] + x_subset = x[brange] + + # 2) apply residual_func to get residual + residual = residual_func(x_subset) + + x_flat = x.flatten(1) + residual = residual.flatten(1) + + residual_scale_factor = b / sample_subset_size + + # 3) add the residual + x_plus_residual = torch.index_add(x_flat, 0, brange, residual.to(dtype=x.dtype), alpha=residual_scale_factor) + return x_plus_residual.view_as(x) + + +def get_branges_scales(x, sample_drop_ratio=0.0): + b, n, d = x.shape + sample_subset_size = max(int(b * (1 - sample_drop_ratio)), 1) + brange = (torch.randperm(b, device=x.device))[:sample_subset_size] + residual_scale_factor = b / sample_subset_size + return brange, residual_scale_factor + + +def add_residual(x, brange, residual, residual_scale_factor, scaling_vector=None): + if scaling_vector is None: + x_flat = x.flatten(1) + residual = residual.flatten(1) + x_plus_residual = torch.index_add(x_flat, 0, brange, residual.to(dtype=x.dtype), alpha=residual_scale_factor) + else: + x_plus_residual = scaled_index_add( + x, brange, residual.to(dtype=x.dtype), scaling=scaling_vector, alpha=residual_scale_factor + ) + return x_plus_residual + + +attn_bias_cache: Dict[Tuple, Any] = {} + + +def get_attn_bias_and_cat(x_list, branges=None): + """ + this will perform the index select, cat the tensors, and provide the attn_bias from cache + """ + batch_sizes = [b.shape[0] for b in branges] if branges is not None else [x.shape[0] for x in x_list] + all_shapes = tuple((b, x.shape[1]) for b, x in zip(batch_sizes, x_list)) + if all_shapes not in attn_bias_cache.keys(): + seqlens = [] + for b, x in zip(batch_sizes, x_list): + for _ in range(b): + seqlens.append(x.shape[1]) + attn_bias = fmha.BlockDiagonalMask.from_seqlens(seqlens) + attn_bias._batch_sizes = batch_sizes + attn_bias_cache[all_shapes] = attn_bias + + if branges is not None: + cat_tensors = index_select_cat([x.flatten(1) for x in x_list], branges).view(1, -1, x_list[0].shape[-1]) + else: + tensors_bs1 = tuple(x.reshape([1, -1, *x.shape[2:]]) for x in x_list) + cat_tensors = torch.cat(tensors_bs1, dim=1) + + return attn_bias_cache[all_shapes], cat_tensors + + +def drop_add_residual_stochastic_depth_list( + x_list: List[Tensor], + residual_func: Callable[[Tensor, Any], Tensor], + sample_drop_ratio: float = 0.0, + scaling_vector=None, +) -> Tensor: + # 1) generate random set of indices for dropping samples in the batch + branges_scales = [get_branges_scales(x, sample_drop_ratio=sample_drop_ratio) for x in x_list] + branges = [s[0] for s in branges_scales] + residual_scale_factors = [s[1] for s in branges_scales] + + # 2) get attention bias and index+concat the tensors + attn_bias, x_cat = get_attn_bias_and_cat(x_list, branges) + + # 3) apply residual_func to get residual, and split the result + residual_list = attn_bias.split(residual_func(x_cat, attn_bias=attn_bias)) # type: ignore + + outputs = [] + for x, brange, residual, residual_scale_factor in zip(x_list, branges, residual_list, residual_scale_factors): + outputs.append(add_residual(x, brange, residual, residual_scale_factor, scaling_vector).view_as(x)) + return outputs + + +class NestedTensorBlock(Block): + + # ********** Modified by Zexin He in 2023-2024 ********** + warnings.warn("NestedTensorBlock is deprecated for now!", DeprecationWarning) + # ******************************************************** + + def forward_nested(self, x_list: List[Tensor]) -> List[Tensor]: + """ + x_list contains a list of tensors to nest together and run + """ + assert isinstance(self.attn, MemEffAttention) + + if self.training and self.sample_drop_ratio > 0.0: + + def attn_residual_func(x: Tensor, attn_bias=None) -> Tensor: + return self.attn(self.norm1(x), attn_bias=attn_bias) + + def ffn_residual_func(x: Tensor, attn_bias=None) -> Tensor: + return self.mlp(self.norm2(x)) + + x_list = drop_add_residual_stochastic_depth_list( + x_list, + residual_func=attn_residual_func, + sample_drop_ratio=self.sample_drop_ratio, + scaling_vector=self.ls1.gamma if isinstance(self.ls1, LayerScale) else None, + ) + x_list = drop_add_residual_stochastic_depth_list( + x_list, + residual_func=ffn_residual_func, + sample_drop_ratio=self.sample_drop_ratio, + scaling_vector=self.ls2.gamma if isinstance(self.ls1, LayerScale) else None, + ) + return x_list + else: + + def attn_residual_func(x: Tensor, attn_bias=None) -> Tensor: + return self.ls1(self.attn(self.norm1(x), attn_bias=attn_bias)) + + def ffn_residual_func(x: Tensor, attn_bias=None) -> Tensor: + return self.ls2(self.mlp(self.norm2(x))) + + attn_bias, x = get_attn_bias_and_cat(x_list) + x = x + attn_residual_func(x, attn_bias=attn_bias) + x = x + ffn_residual_func(x) + return attn_bias.split(x) + + def forward(self, x_or_x_list): + if isinstance(x_or_x_list, Tensor): + return super().forward(x_or_x_list) + elif isinstance(x_or_x_list, list): + if not XFORMERS_AVAILABLE: + raise AssertionError("xFormers is required for using nested tensors") + return self.forward_nested(x_or_x_list) + else: + raise AssertionError diff --git a/LHM/models/encoders/dinov2/layers/dino_head.py b/LHM/models/encoders/dinov2/layers/dino_head.py new file mode 100644 index 0000000000000000000000000000000000000000..0ace8ffd6297a1dd480b19db407b662a6ea0f565 --- /dev/null +++ b/LHM/models/encoders/dinov2/layers/dino_head.py @@ -0,0 +1,58 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import torch +import torch.nn as nn +from torch.nn.init import trunc_normal_ +from torch.nn.utils import weight_norm + + +class DINOHead(nn.Module): + def __init__( + self, + in_dim, + out_dim, + use_bn=False, + nlayers=3, + hidden_dim=2048, + bottleneck_dim=256, + mlp_bias=True, + ): + super().__init__() + nlayers = max(nlayers, 1) + self.mlp = _build_mlp(nlayers, in_dim, bottleneck_dim, hidden_dim=hidden_dim, use_bn=use_bn, bias=mlp_bias) + self.apply(self._init_weights) + self.last_layer = weight_norm(nn.Linear(bottleneck_dim, out_dim, bias=False)) + self.last_layer.weight_g.data.fill_(1) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=0.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + + def forward(self, x): + x = self.mlp(x) + eps = 1e-6 if x.dtype == torch.float16 else 1e-12 + x = nn.functional.normalize(x, dim=-1, p=2, eps=eps) + x = self.last_layer(x) + return x + + +def _build_mlp(nlayers, in_dim, bottleneck_dim, hidden_dim=None, use_bn=False, bias=True): + if nlayers == 1: + return nn.Linear(in_dim, bottleneck_dim, bias=bias) + else: + layers = [nn.Linear(in_dim, hidden_dim, bias=bias)] + if use_bn: + layers.append(nn.BatchNorm1d(hidden_dim)) + layers.append(nn.GELU()) + for _ in range(nlayers - 2): + layers.append(nn.Linear(hidden_dim, hidden_dim, bias=bias)) + if use_bn: + layers.append(nn.BatchNorm1d(hidden_dim)) + layers.append(nn.GELU()) + layers.append(nn.Linear(hidden_dim, bottleneck_dim, bias=bias)) + return nn.Sequential(*layers) diff --git a/LHM/models/encoders/dinov2/layers/drop_path.py b/LHM/models/encoders/dinov2/layers/drop_path.py new file mode 100644 index 0000000000000000000000000000000000000000..1d640e0b969b8dcba96260243473700b4e5b24b5 --- /dev/null +++ b/LHM/models/encoders/dinov2/layers/drop_path.py @@ -0,0 +1,34 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/layers/drop.py + + +from torch import nn + + +def drop_path(x, drop_prob: float = 0.0, training: bool = False): + if drop_prob == 0.0 or not training: + return x + keep_prob = 1 - drop_prob + shape = (x.shape[0],) + (1,) * (x.ndim - 1) # work with diff dim tensors, not just 2D ConvNets + random_tensor = x.new_empty(shape).bernoulli_(keep_prob) + if keep_prob > 0.0: + random_tensor.div_(keep_prob) + output = x * random_tensor + return output + + +class DropPath(nn.Module): + """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).""" + + def __init__(self, drop_prob=None): + super(DropPath, self).__init__() + self.drop_prob = drop_prob + + def forward(self, x): + return drop_path(x, self.drop_prob, self.training) diff --git a/LHM/models/encoders/dinov2/layers/layer_scale.py b/LHM/models/encoders/dinov2/layers/layer_scale.py new file mode 100644 index 0000000000000000000000000000000000000000..51df0d7ce61f2b41fa9e6369f52391dd7fe7d386 --- /dev/null +++ b/LHM/models/encoders/dinov2/layers/layer_scale.py @@ -0,0 +1,27 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# Modified from: https://github.com/huggingface/pytorch-image-models/blob/main/timm/models/vision_transformer.py#L103-L110 + +from typing import Union + +import torch +from torch import Tensor +from torch import nn + + +class LayerScale(nn.Module): + def __init__( + self, + dim: int, + init_values: Union[float, Tensor] = 1e-5, + inplace: bool = False, + ) -> None: + super().__init__() + self.inplace = inplace + self.gamma = nn.Parameter(init_values * torch.ones(dim)) + + def forward(self, x: Tensor) -> Tensor: + return x.mul_(self.gamma) if self.inplace else x * self.gamma diff --git a/LHM/models/encoders/dinov2/layers/mlp.py b/LHM/models/encoders/dinov2/layers/mlp.py new file mode 100644 index 0000000000000000000000000000000000000000..bbf9432aae9258612caeae910a7bde17999e328e --- /dev/null +++ b/LHM/models/encoders/dinov2/layers/mlp.py @@ -0,0 +1,40 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/layers/mlp.py + + +from typing import Callable, Optional + +from torch import Tensor, nn + + +class Mlp(nn.Module): + def __init__( + self, + in_features: int, + hidden_features: Optional[int] = None, + out_features: Optional[int] = None, + act_layer: Callable[..., nn.Module] = nn.GELU, + drop: float = 0.0, + bias: bool = True, + ) -> None: + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features, bias=bias) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features, bias=bias) + self.drop = nn.Dropout(drop) + + def forward(self, x: Tensor) -> Tensor: + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x diff --git a/LHM/models/encoders/dinov2/layers/patch_embed.py b/LHM/models/encoders/dinov2/layers/patch_embed.py new file mode 100644 index 0000000000000000000000000000000000000000..8b7c0804784a42cf80c0297d110dcc68cc85b339 --- /dev/null +++ b/LHM/models/encoders/dinov2/layers/patch_embed.py @@ -0,0 +1,88 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/layers/patch_embed.py + +from typing import Callable, Optional, Tuple, Union + +from torch import Tensor +import torch.nn as nn + + +def make_2tuple(x): + if isinstance(x, tuple): + assert len(x) == 2 + return x + + assert isinstance(x, int) + return (x, x) + + +class PatchEmbed(nn.Module): + """ + 2D image to patch embedding: (B,C,H,W) -> (B,N,D) + + Args: + img_size: Image size. + patch_size: Patch token size. + in_chans: Number of input image channels. + embed_dim: Number of linear projection output channels. + norm_layer: Normalization layer. + """ + + def __init__( + self, + img_size: Union[int, Tuple[int, int]] = 224, + patch_size: Union[int, Tuple[int, int]] = 16, + in_chans: int = 3, + embed_dim: int = 768, + norm_layer: Optional[Callable] = None, + flatten_embedding: bool = True, + ) -> None: + super().__init__() + + image_HW = make_2tuple(img_size) + patch_HW = make_2tuple(patch_size) + patch_grid_size = ( + image_HW[0] // patch_HW[0], + image_HW[1] // patch_HW[1], + ) + + self.img_size = image_HW + self.patch_size = patch_HW + self.patches_resolution = patch_grid_size + self.num_patches = patch_grid_size[0] * patch_grid_size[1] + + self.in_chans = in_chans + self.embed_dim = embed_dim + + self.flatten_embedding = flatten_embedding + + self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_HW, stride=patch_HW) + self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() + + def forward(self, x: Tensor) -> Tensor: + _, _, H, W = x.shape + patch_H, patch_W = self.patch_size + + assert H % patch_H == 0, f"Input image height {H} is not a multiple of patch height {patch_H}" + assert W % patch_W == 0, f"Input image width {W} is not a multiple of patch width: {patch_W}" + + x = self.proj(x) # B C H W + H, W = x.size(2), x.size(3) + x = x.flatten(2).transpose(1, 2) # B HW C + x = self.norm(x) + if not self.flatten_embedding: + x = x.reshape(-1, H, W, self.embed_dim) # B H W C + return x + + def flops(self) -> float: + Ho, Wo = self.patches_resolution + flops = Ho * Wo * self.embed_dim * self.in_chans * (self.patch_size[0] * self.patch_size[1]) + if self.norm is not None: + flops += Ho * Wo * self.embed_dim + return flops diff --git a/LHM/models/encoders/dinov2/layers/swiglu_ffn.py b/LHM/models/encoders/dinov2/layers/swiglu_ffn.py new file mode 100644 index 0000000000000000000000000000000000000000..5e9dafa4592a408f6874d54853e8f60db5c41f74 --- /dev/null +++ b/LHM/models/encoders/dinov2/layers/swiglu_ffn.py @@ -0,0 +1,72 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import os +from typing import Callable, Optional +import warnings + +from torch import Tensor, nn +import torch.nn.functional as F + + +class SwiGLUFFN(nn.Module): + def __init__( + self, + in_features: int, + hidden_features: Optional[int] = None, + out_features: Optional[int] = None, + act_layer: Callable[..., nn.Module] = None, + drop: float = 0.0, + bias: bool = True, + ) -> None: + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.w12 = nn.Linear(in_features, 2 * hidden_features, bias=bias) + self.w3 = nn.Linear(hidden_features, out_features, bias=bias) + + def forward(self, x: Tensor) -> Tensor: + x12 = self.w12(x) + x1, x2 = x12.chunk(2, dim=-1) + hidden = F.silu(x1) * x2 + return self.w3(hidden) + + +XFORMERS_ENABLED = os.environ.get("XFORMERS_DISABLED") is None +try: + if XFORMERS_ENABLED: + from xformers.ops import SwiGLU + + XFORMERS_AVAILABLE = True + warnings.warn("xFormers is available (SwiGLU)") + else: + warnings.warn("xFormers is disabled (SwiGLU)") + raise ImportError +except ImportError: + SwiGLU = SwiGLUFFN + XFORMERS_AVAILABLE = False + + warnings.warn("xFormers is not available (SwiGLU)") + + +class SwiGLUFFNFused(SwiGLU): + def __init__( + self, + in_features: int, + hidden_features: Optional[int] = None, + out_features: Optional[int] = None, + act_layer: Callable[..., nn.Module] = None, + drop: float = 0.0, + bias: bool = True, + ) -> None: + out_features = out_features or in_features + hidden_features = hidden_features or in_features + hidden_features = (int(hidden_features * 2 / 3) + 7) // 8 * 8 + super().__init__( + in_features=in_features, + hidden_features=hidden_features, + out_features=out_features, + bias=bias, + ) diff --git a/LHM/models/encoders/dinov2/models/__init__.py b/LHM/models/encoders/dinov2/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3fdff20badbd5244bf79f16bf18dd2cb73982265 --- /dev/null +++ b/LHM/models/encoders/dinov2/models/__init__.py @@ -0,0 +1,43 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import logging + +from . import vision_transformer as vits + + +logger = logging.getLogger("dinov2") + + +def build_model(args, only_teacher=False, img_size=224): + args.arch = args.arch.removesuffix("_memeff") + if "vit" in args.arch: + vit_kwargs = dict( + img_size=img_size, + patch_size=args.patch_size, + init_values=args.layerscale, + ffn_layer=args.ffn_layer, + block_chunks=args.block_chunks, + qkv_bias=args.qkv_bias, + proj_bias=args.proj_bias, + ffn_bias=args.ffn_bias, + num_register_tokens=args.num_register_tokens, + interpolate_offset=args.interpolate_offset, + interpolate_antialias=args.interpolate_antialias, + ) + teacher = vits.__dict__[args.arch](**vit_kwargs) + if only_teacher: + return teacher, teacher.embed_dim + student = vits.__dict__[args.arch]( + **vit_kwargs, + drop_path_rate=args.drop_path_rate, + drop_path_uniform=args.drop_path_uniform, + ) + embed_dim = student.embed_dim + return student, teacher, embed_dim + + +def build_model_from_cfg(cfg, only_teacher=False): + return build_model(cfg.student, only_teacher=only_teacher, img_size=cfg.crops.global_crops_size) diff --git a/LHM/models/encoders/dinov2/models/__pycache__/__init__.cpython-310.pyc b/LHM/models/encoders/dinov2/models/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0486c0e29f1ec58f15134faf8c4a5fdecf266ba3 Binary files /dev/null and b/LHM/models/encoders/dinov2/models/__pycache__/__init__.cpython-310.pyc differ diff --git a/LHM/models/encoders/dinov2/models/__pycache__/vision_transformer.cpython-310.pyc b/LHM/models/encoders/dinov2/models/__pycache__/vision_transformer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3918d49bba479911bb06f7a0027b0586ecd4387a Binary files /dev/null and b/LHM/models/encoders/dinov2/models/__pycache__/vision_transformer.cpython-310.pyc differ diff --git a/LHM/models/encoders/dinov2/models/vision_transformer.py b/LHM/models/encoders/dinov2/models/vision_transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..c90ac2be1fe294a0db6080cd24155629083d3ec9 --- /dev/null +++ b/LHM/models/encoders/dinov2/models/vision_transformer.py @@ -0,0 +1,443 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/main/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/models/vision_transformer.py + +# ****************************************************************************** +# Code modified by Zexin He in 2023-2024. +# Modifications are marked with clearly visible comments +# licensed under the Apache License, Version 2.0. +# ****************************************************************************** + +from functools import partial +import math +import logging +from typing import Sequence, Tuple, Union, Callable + +import torch +import torch.nn as nn +import torch.utils.checkpoint +from torch.nn.init import trunc_normal_ + +# ********** Modified by Zexin He in 2023-2024 ********** +# Avoid using nested tensor for now, deprecating usage of NestedTensorBlock +from ..layers import Mlp, PatchEmbed, SwiGLUFFNFused, MemEffAttention, Block, BlockWithModulation +# ******************************************************** + + +logger = logging.getLogger("dinov2") + + +def named_apply(fn: Callable, module: nn.Module, name="", depth_first=True, include_root=False) -> nn.Module: + if not depth_first and include_root: + fn(module=module, name=name) + for child_name, child_module in module.named_children(): + child_name = ".".join((name, child_name)) if name else child_name + named_apply(fn=fn, module=child_module, name=child_name, depth_first=depth_first, include_root=True) + if depth_first and include_root: + fn(module=module, name=name) + return module + + +class BlockChunk(nn.ModuleList): + def forward(self, x): + for b in self: + x = b(x) + return x + + +class DinoVisionTransformer(nn.Module): + def __init__( + self, + img_size=224, + patch_size=16, + in_chans=3, + embed_dim=768, + depth=12, + num_heads=12, + mlp_ratio=4.0, + qkv_bias=True, + ffn_bias=True, + proj_bias=True, + drop_path_rate=0.0, + drop_path_uniform=False, + init_values=None, # for layerscale: None or 0 => no layerscale + embed_layer=PatchEmbed, + act_layer=nn.GELU, + block_fn=Block, + # ********** Modified by Zexin He in 2023-2024 ********** + modulation_dim: int = None, + # ******************************************************** + ffn_layer="mlp", + block_chunks=1, + num_register_tokens=0, + interpolate_antialias=False, + interpolate_offset=0.1, + ): + """ + Args: + img_size (int, tuple): input image size + patch_size (int, tuple): patch size + in_chans (int): number of input channels + embed_dim (int): embedding dimension + depth (int): depth of transformer + num_heads (int): number of attention heads + mlp_ratio (int): ratio of mlp hidden dim to embedding dim + qkv_bias (bool): enable bias for qkv if True + proj_bias (bool): enable bias for proj in attn if True + ffn_bias (bool): enable bias for ffn if True + drop_path_rate (float): stochastic depth rate + drop_path_uniform (bool): apply uniform drop rate across blocks + weight_init (str): weight init scheme + init_values (float): layer-scale init values + embed_layer (nn.Module): patch embedding layer + act_layer (nn.Module): MLP activation layer + block_fn (nn.Module): transformer block class + ffn_layer (str): "mlp", "swiglu", "swiglufused" or "identity" + block_chunks: (int) split block sequence into block_chunks units for FSDP wrap + num_register_tokens: (int) number of extra cls tokens (so-called "registers") + interpolate_antialias: (str) flag to apply anti-aliasing when interpolating positional embeddings + interpolate_offset: (float) work-around offset to apply when interpolating positional embeddings + """ + super().__init__() + + # ********** Modified by Zexin He in 2023-2024 ********** + block_norm_layer = None + if modulation_dim is not None: + from ....modulate import ModLN + block_norm_layer = partial(ModLN, mod_dim=modulation_dim) + else: + block_norm_layer = nn.LayerNorm + block_norm_layer = partial(block_norm_layer, eps=1e-6) + # ******************************************************** + norm_layer = partial(nn.LayerNorm, eps=1e-6) + + self.num_features = self.embed_dim = embed_dim # num_features for consistency with other models + self.num_tokens = 1 + self.n_blocks = depth + self.num_heads = num_heads + self.patch_size = patch_size + self.num_register_tokens = num_register_tokens + self.interpolate_antialias = interpolate_antialias + self.interpolate_offset = interpolate_offset + + self.patch_embed = embed_layer(img_size=img_size, patch_size=patch_size, in_chans=in_chans, embed_dim=embed_dim) + num_patches = self.patch_embed.num_patches + + self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim)) + self.pos_embed = nn.Parameter(torch.zeros(1, num_patches + self.num_tokens, embed_dim)) + assert num_register_tokens >= 0 + self.register_tokens = ( + nn.Parameter(torch.zeros(1, num_register_tokens, embed_dim)) if num_register_tokens else None + ) + + if drop_path_uniform is True: + dpr = [drop_path_rate] * depth + else: + dpr = [x.item() for x in torch.linspace(0, drop_path_rate, depth)] # stochastic depth decay rule + + if ffn_layer == "mlp": + logger.info("using MLP layer as FFN") + ffn_layer = Mlp + elif ffn_layer == "swiglufused" or ffn_layer == "swiglu": + logger.info("using SwiGLU layer as FFN") + ffn_layer = SwiGLUFFNFused + elif ffn_layer == "identity": + logger.info("using Identity layer as FFN") + + def f(*args, **kwargs): + return nn.Identity() + + ffn_layer = f + else: + raise NotImplementedError + + blocks_list = [ + block_fn( + dim=embed_dim, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + proj_bias=proj_bias, + ffn_bias=ffn_bias, + drop_path=dpr[i], + # ********** Modified by Zexin He in 2023-2024 ********** + norm_layer=block_norm_layer, + # ******************************************************** + act_layer=act_layer, + ffn_layer=ffn_layer, + init_values=init_values, + ) + for i in range(depth) + ] + if block_chunks > 0: + self.chunked_blocks = True + chunked_blocks = [] + chunksize = depth // block_chunks + for i in range(0, depth, chunksize): + # this is to keep the block index consistent if we chunk the block list + chunked_blocks.append([nn.Identity()] * i + blocks_list[i : i + chunksize]) + self.blocks = nn.ModuleList([BlockChunk(p) for p in chunked_blocks]) + else: + self.chunked_blocks = False + self.blocks = nn.ModuleList(blocks_list) + + self.norm = norm_layer(embed_dim) + self.head = nn.Identity() + + # ********** Modified by Zexin He in 2023-2024 ********** + # hacking unused mask_token for better DDP + # self.mask_token = nn.Parameter(torch.zeros(1, embed_dim)) + # ******************************************************** + + self.init_weights() + + def init_weights(self): + trunc_normal_(self.pos_embed, std=0.02) + nn.init.normal_(self.cls_token, std=1e-6) + if self.register_tokens is not None: + nn.init.normal_(self.register_tokens, std=1e-6) + named_apply(init_weights_vit_timm, self) + + def interpolate_pos_encoding(self, x, w, h): + previous_dtype = x.dtype + npatch = x.shape[1] - 1 + N = self.pos_embed.shape[1] - 1 + if npatch == N and w == h: + return self.pos_embed + pos_embed = self.pos_embed.float() + class_pos_embed = pos_embed[:, 0] + patch_pos_embed = pos_embed[:, 1:] + dim = x.shape[-1] + w0 = w // self.patch_size + h0 = h // self.patch_size + # we add a small number to avoid floating point error in the interpolation + # see discussion at https://github.com/facebookresearch/dino/issues/8 + w0, h0 = w0 + self.interpolate_offset, h0 + self.interpolate_offset + + sqrt_N = math.sqrt(N) + sx, sy = float(w0) / sqrt_N, float(h0) / sqrt_N + patch_pos_embed = nn.functional.interpolate( + patch_pos_embed.reshape(1, int(sqrt_N), int(sqrt_N), dim).permute(0, 3, 1, 2), + scale_factor=(sx, sy), + mode="bicubic", + antialias=self.interpolate_antialias, + ) + + assert int(w0) == patch_pos_embed.shape[-2] + assert int(h0) == patch_pos_embed.shape[-1] + patch_pos_embed = patch_pos_embed.permute(0, 2, 3, 1).view(1, -1, dim) + return torch.cat((class_pos_embed.unsqueeze(0), patch_pos_embed), dim=1).to(previous_dtype) + + def prepare_tokens_with_masks(self, x, masks=None): + B, nc, w, h = x.shape + x = self.patch_embed(x) + if masks is not None: + # ********** Modified by Zexin He in 2023-2024 ********** + raise NotImplementedError("Masking is not supported in hacked DINOv2") + # x = torch.where(masks.unsqueeze(-1), self.mask_token.to(x.dtype).unsqueeze(0), x) + # ******************************************************** + + x = torch.cat((self.cls_token.expand(x.shape[0], -1, -1), x), dim=1) + x = x + self.interpolate_pos_encoding(x, w, h) + + if self.register_tokens is not None: + x = torch.cat( + ( + x[:, :1], + self.register_tokens.expand(x.shape[0], -1, -1), + x[:, 1:], + ), + dim=1, + ) + + return x + + def forward_features_list(self, x_list, masks_list): + x = [self.prepare_tokens_with_masks(x, masks) for x, masks in zip(x_list, masks_list)] + for blk in self.blocks: + x = blk(x) + + all_x = x + output = [] + for x, masks in zip(all_x, masks_list): + x_norm = self.norm(x) + output.append( + { + "x_norm_clstoken": x_norm[:, 0], + "x_norm_regtokens": x_norm[:, 1 : self.num_register_tokens + 1], + "x_norm_patchtokens": x_norm[:, self.num_register_tokens + 1 :], + "x_prenorm": x, + "masks": masks, + } + ) + return output + + # ********** Modified by Zexin He in 2023-2024 ********** + def forward_features(self, x, masks=None, mod=None): + if isinstance(x, list): + raise DeprecationWarning("forward_features_list is deprecated, use forward_features") + return self.forward_features_list(x, masks) + + x = self.prepare_tokens_with_masks(x, masks) + + if mod is None: + for blk in self.blocks: + x = blk(x) + else: + for blk in self.blocks: + x = blk(x, mod) + + x_norm = self.norm(x) + return { + "x_norm_clstoken": x_norm[:, 0], + "x_norm_regtokens": x_norm[:, 1 : self.num_register_tokens + 1], + "x_norm_patchtokens": x_norm[:, self.num_register_tokens + 1 :], + "x_prenorm": x, + "masks": masks, + } + # ******************************************************** + + def _get_intermediate_layers_not_chunked(self, x, n=1): + x = self.prepare_tokens_with_masks(x) + # If n is an int, take the n last blocks. If it's a list, take them + output, total_block_len = [], len(self.blocks) + blocks_to_take = range(total_block_len - n, total_block_len) if isinstance(n, int) else n + for i, blk in enumerate(self.blocks): + x = blk(x) + if i in blocks_to_take: + output.append(x) + assert len(output) == len(blocks_to_take), f"only {len(output)} / {len(blocks_to_take)} blocks found" + return output + + def _get_intermediate_layers_chunked(self, x, n=1): + x = self.prepare_tokens_with_masks(x) + output, i, total_block_len = [], 0, len(self.blocks[-1]) + # If n is an int, take the n last blocks. If it's a list, take them + blocks_to_take = range(total_block_len - n, total_block_len) if isinstance(n, int) else n + for block_chunk in self.blocks: + for blk in block_chunk[i:]: # Passing the nn.Identity() + x = blk(x) + if i in blocks_to_take: + output.append(x) + i += 1 + assert len(output) == len(blocks_to_take), f"only {len(output)} / {len(blocks_to_take)} blocks found" + return output + + def get_intermediate_layers( + self, + x: torch.Tensor, + n: Union[int, Sequence] = 1, # Layers or n last layers to take + reshape: bool = False, + return_class_token: bool = False, + norm=True, + ) -> Tuple[Union[torch.Tensor, Tuple[torch.Tensor]]]: + if self.chunked_blocks: + outputs = self._get_intermediate_layers_chunked(x, n) + else: + outputs = self._get_intermediate_layers_not_chunked(x, n) + if norm: + outputs = [self.norm(out) for out in outputs] + class_tokens = [out[:, 0] for out in outputs] + outputs = [out[:, 1 + self.num_register_tokens:] for out in outputs] + if reshape: + B, _, w, h = x.shape + outputs = [ + out.reshape(B, w // self.patch_size, h // self.patch_size, -1).permute(0, 3, 1, 2).contiguous() + for out in outputs + ] + if return_class_token: + return tuple(zip(outputs, class_tokens)) + return tuple(outputs) + + def forward(self, *args, is_training=False, **kwargs): + ret = self.forward_features(*args, **kwargs) + if is_training: + return ret + else: + return self.head(ret["x_norm_clstoken"]) + + +def init_weights_vit_timm(module: nn.Module, name: str = ""): + """ViT weight initialization, original timm impl (for reproducibility)""" + if isinstance(module, nn.Linear): + trunc_normal_(module.weight, std=0.02) + if module.bias is not None: + nn.init.zeros_(module.bias) + + +# ********** Modified by Zexin He in 2023-2024 ********** +# block class selected from Block and BlockWithModulation + +def _block_cls(**kwargs): + modulation_dim = kwargs.get("modulation_dim", None) + if modulation_dim is None: + block_cls = Block + else: + block_cls = BlockWithModulation + return block_cls + + +def vit_small(patch_size=16, num_register_tokens=0, **kwargs): + model = DinoVisionTransformer( + patch_size=patch_size, + embed_dim=384, + depth=12, + num_heads=6, + mlp_ratio=4, + block_fn=partial(_block_cls(**kwargs), attn_class=MemEffAttention), + num_register_tokens=num_register_tokens, + **kwargs, + ) + return model + + +def vit_base(patch_size=16, num_register_tokens=0, **kwargs): + model = DinoVisionTransformer( + patch_size=patch_size, + embed_dim=768, + depth=12, + num_heads=12, + mlp_ratio=4, + block_fn=partial(_block_cls(**kwargs), attn_class=MemEffAttention), + num_register_tokens=num_register_tokens, + **kwargs, + ) + return model + + +def vit_large(patch_size=16, num_register_tokens=0, **kwargs): + model = DinoVisionTransformer( + patch_size=patch_size, + embed_dim=1024, + depth=24, + num_heads=16, + mlp_ratio=4, + block_fn=partial(_block_cls(**kwargs), attn_class=MemEffAttention), + num_register_tokens=num_register_tokens, + **kwargs, + ) + return model + + +def vit_giant2(patch_size=16, num_register_tokens=0, **kwargs): + """ + Close to ViT-giant, with embed-dim 1536 and 24 heads => embed-dim per head 64 + """ + model = DinoVisionTransformer( + patch_size=patch_size, + embed_dim=1536, + depth=40, + num_heads=24, + mlp_ratio=4, + block_fn=partial(_block_cls(**kwargs), attn_class=MemEffAttention), + num_register_tokens=num_register_tokens, + **kwargs, + ) + return model + +# ******************************************************** diff --git a/LHM/models/encoders/dinov2_dpt.py b/LHM/models/encoders/dinov2_dpt.py new file mode 100644 index 0000000000000000000000000000000000000000..ed977e8d8981f1a7c63d3e1309a7520af3bf22a7 --- /dev/null +++ b/LHM/models/encoders/dinov2_dpt.py @@ -0,0 +1,291 @@ +import cv2 +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchvision +from torchvision.transforms import Compose + +# from openlrm.models.encoders.dpt_util.dinov2 import DINOv2 +from LHM.models.encoders.dpt_util.blocks import FeatureFusionBlock, _make_scratch +from LHM.models.encoders.dpt_util.transform import NormalizeImage, PrepareForNet, Resize + + +def _make_fusion_block(features, use_bn, size=None, use_conv1=True): + return FeatureFusionBlock( + features, + nn.ReLU(False), + deconv=False, + bn=use_bn, + expand=False, + align_corners=True, + size=size, + use_conv1=use_conv1, + ) + + +class ConvBlock(nn.Module): + def __init__(self, in_feature, out_feature): + super().__init__() + + self.conv_block = nn.Sequential( + nn.Conv2d(in_feature, out_feature, kernel_size=3, stride=1, padding=1), + nn.BatchNorm2d(out_feature), + nn.ReLU(True), + ) + + def forward(self, x): + return self.conv_block(x) + + +class DPTHead(nn.Module): + def __init__( + self, + in_channels, + features=256, + use_bn=False, + out_channels=[256, 512, 1024, 1024], + use_clstoken=False, + out_channel=384, + ): + super(DPTHead, self).__init__() + + self.use_clstoken = use_clstoken + self.projects = nn.ModuleList( + [ + nn.Conv2d( + in_channels=in_channels, + out_channels=out_channel, + kernel_size=1, + stride=1, + padding=0, + ) + for out_channel in out_channels + ] + ) + + # self.resize_layers = nn.ModuleList([ + # nn.ConvTranspose2d( + # in_channels=out_channels[0], + # out_channels=out_channels[0], + # kernel_size=4, + # stride=4, + # padding=0), + # nn.ConvTranspose2d( + # in_channels=out_channels[1], + # out_channels=out_channels[1], + # kernel_size=2, + # stride=2, + # padding=0), + # nn.Identity(), + # nn.Conv2d( + # in_channels=out_channels[3], + # out_channels=out_channels[3], + # kernel_size=3, + # stride=2, + # padding=1) + # ]) + + if use_clstoken: + self.readout_projects = nn.ModuleList() + for _ in range(len(self.projects)): + self.readout_projects.append( + nn.Sequential(nn.Linear(2 * in_channels, in_channels), nn.GELU()) + ) + + self.scratch = _make_scratch( + out_channels, + features, + groups=1, + expand=False, + ) + + self.scratch.stem_transpose = None + + self.scratch.refinenet1 = _make_fusion_block(features, use_bn) + self.scratch.refinenet2 = _make_fusion_block(features, use_bn) + self.scratch.refinenet3 = _make_fusion_block(features, use_bn) + self.scratch.refinenet4 = _make_fusion_block(features, use_bn, use_conv1=False) + + head_features_1 = features + head_features_2 = 32 + + # self.scratch.output_conv1 = nn.Conv2d(head_features_1, out_channnels, kernel_size=3, stride=1, padding=1) + + self.scratch.output_conv1 = nn.Conv2d( + head_features_1, out_channel, kernel_size=1, stride=1, padding=0 + ) + + # self.scratch.output_conv2 = nn.Sequential( + # nn.Conv2d(head_features_1 // 2, head_features_2, kernel_size=3, stride=1, padding=1), + # nn.ReLU(True), + # nn.Conv2d(head_features_2, 1, kernel_size=1, stride=1, padding=0), + # nn.ReLU(True), + # nn.Identity(), + # ) + + def forward(self, out_features, patch_h, patch_w): + out = [] + for i, x in enumerate(out_features): + if self.use_clstoken: + x, cls_token = x[0], x[1] + readout = cls_token.unsqueeze(1).expand_as(x) + x = self.readout_projects[i](torch.cat((x, readout), -1)) + else: + x = x[0] + + x = x.permute(0, 2, 1).reshape((x.shape[0], x.shape[-1], patch_h, patch_w)) + + x = self.projects[i](x) + # x = self.resize_layers[i](x) + + out.append(x) + + layer_1, layer_2, layer_3, layer_4 = out + + layer_1_rn = self.scratch.layer1_rn(layer_1) + layer_2_rn = self.scratch.layer2_rn(layer_2) + layer_3_rn = self.scratch.layer3_rn(layer_3) + layer_4_rn = self.scratch.layer4_rn(layer_4) + + path_4 = self.scratch.refinenet4( + layer_4_rn, size=layer_3_rn.shape[2:], scale_factor=1 + ) + path_3 = self.scratch.refinenet3( + path_4, layer_3_rn, size=layer_2_rn.shape[2:], scale_factor=1 + ) + path_2 = self.scratch.refinenet2( + path_3, layer_2_rn, size=layer_1_rn.shape[2:], scale_factor=1 + ) + path_1 = self.scratch.refinenet1(path_2, layer_1_rn, scale_factor=1) + + # path_4 = self.scratch.refinenet4(layer_1_rn, size=layer_2_rn.shape[2:], scale_factor=1) + # path_3 = self.scratch.refinenet3(path_4, layer_2_rn, size=layer_3_rn.shape[2:], scale_factor=1) + # path_2 = self.scratch.refinenet2(path_3, layer_3_rn, size=layer_4_rn.shape[2:], scale_factor=1) + # path_1 = self.scratch.refinenet1(path_2, layer_4_rn, scale_factor=1) + + out = self.scratch.output_conv1(path_1) + # out = F.interpolate(out, (int(patch_h * 14), int(patch_w * 14)), mode="bilinear", align_corners=True) + # out = self.scratch.output_conv2(out) + + return out + + +class DINODPT(nn.Module): + def __init__( + self, model_name="vitb", out_dim=384, use_bn=False, use_clstoken=False + ): + super(DINODPT, self).__init__() + + model_configs = { + "vits": { + "encoder": "vits", + "features": 64, + "out_channels": [48, 96, 192, 384], + }, + "vitb": { + "encoder": "vitb", + "features": 128, + "out_channels": [96, 192, 384, 768], + }, + "vitl": { + "encoder": "vitl", + "features": 256, + "out_channels": [256, 512, 1024, 1024], + }, + "vitg": { + "encoder": "vitg", + "features": 384, + "out_channels": [1536, 1536, 1536, 1536], + }, + } + + encoder = model_configs[model_name]["encoder"] + features = model_configs[model_name]["features"] + out_channels = model_configs[model_name]["out_channels"] + + self.intermediate_layer_idx = { + "vits": [2, 5, 8, 11], + "vitb": [2, 5, 8, 11], + "vitl": [4, 11, 17, 23], + "vitg": [9, 19, 29, 39], + } + + self.encoder = encoder + + # self.dino_model = DINOv2(model_name=encoder) + self.dino_model = torch.hub.load( + "facebookresearch/dinov2", f"dinov2_{encoder}14", pretrained=True + ) + self.dense_head = DPTHead( + self.dino_model.embed_dim, + features, + use_bn, + out_channels=out_channels, + use_clstoken=use_clstoken, + out_channel=out_dim, + ) + + self.dino_normlize = torchvision.transforms.Normalize( + mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225] + ) + + def forward(self, x, is_training=True): + x = self.dino_normlize(x) + + patch_h, patch_w = x.shape[-2] // 14, x.shape[-1] // 14 + + features = self.dino_model.get_intermediate_layers( + x, self.intermediate_layer_idx[self.encoder], return_class_token=True + ) + + feat = self.dense_head(features, patch_h, patch_w) + # print(x.shape, feat.shape) + # depth = F.relu(depth) + # return depth.squeeze(1) + out_global = None + return feat, out_global + + @torch.no_grad() + def infer_image(self, raw_image, input_size=518): + image, (h, w) = self.image2tensor(raw_image, input_size) + + depth = self.forward(image) + + depth = F.interpolate( + depth[:, None], (h, w), mode="bilinear", align_corners=True + )[0, 0] + + return depth.cpu().numpy() + + def image2tensor(self, raw_image, input_size=518): + transform = Compose( + [ + Resize( + width=input_size, + height=input_size, + resize_target=False, + keep_aspect_ratio=True, + ensure_multiple_of=14, + resize_method="lower_bound", + image_interpolation_method=cv2.INTER_CUBIC, + ), + NormalizeImage(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), + PrepareForNet(), + ] + ) + + h, w = raw_image.shape[:2] + + image = cv2.cvtColor(raw_image, cv2.COLOR_BGR2RGB) / 255.0 + + image = transform({"image": image})["image"] + image = torch.from_numpy(image).unsqueeze(0) + + DEVICE = ( + "cuda" + if torch.cuda.is_available() + else "mps" if torch.backends.mps.is_available() else "cpu" + ) + image = image.to(DEVICE) + + return image, (h, w) diff --git a/LHM/models/encoders/dinov2_dpt_wrapper.py b/LHM/models/encoders/dinov2_dpt_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..969ad10cd9e8c9198be77f3308e27f13742e89de --- /dev/null +++ b/LHM/models/encoders/dinov2_dpt_wrapper.py @@ -0,0 +1,94 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import torch +import torch.nn as nn +from accelerate.logging import get_logger + +from LHM.models.encoders.dinov2_dpt import DINODPT + +logger = get_logger(__name__) + + +class Dinov2DPTWrapper(nn.Module): + """ + Dinov2DPTWrapper using original implementation, hacked with modulation. + """ + + def __init__( + self, + model_name: str, + modulation_dim: int = None, + freeze: bool = True, + encoder_feat_dim: int = 384, + ): + super().__init__() + self.modulation_dim = modulation_dim + # self.model = self._build_dinov2(model_name, modulation_dim=modulation_dim) + # self.model = DINOBase(output_dim=384) + self.model = DINODPT(model_name="vitb", out_dim=encoder_feat_dim) + + if freeze: + if modulation_dim is not None: + raise ValueError( + "Modulated Dinov2 requires training, freezing is not allowed." + ) + self._freeze() + else: + for name, param in self.model.dino_model.named_parameters(): + if name == "mask_token": + param.requires_grad = False + + def _freeze(self): + logger.warning(f"======== Freezing Dinov2DPTWrapper ========") + self.model.dino_model.eval() + for name, param in self.model.dino_model.named_parameters(): + param.requires_grad = False + + @staticmethod + def _build_dinov2( + model_name: str, modulation_dim: int = None, pretrained: bool = True + ): + from importlib import import_module + + dinov2_hub = import_module(".dinov2.hub.backbones", package=__package__) + model_fn = getattr(dinov2_hub, model_name) + logger.debug(f"Modulation dim for Dinov2 is {modulation_dim}.") + model = model_fn(modulation_dim=modulation_dim, pretrained=pretrained) + return model + + @torch.compile + def forward(self, image: torch.Tensor, mod: torch.Tensor = None): + # image: [N, C, H, W] + # mod: [N, D] or None + # RGB image with [0,1] scale and properly sized + if self.modulation_dim is None: + assert mod is None, "Unexpected modulation input in dinov2 forward." + outs = self.model(image, is_training=True) + else: + assert ( + mod is not None + ), "Modulation input is required in modulated dinov2 forward." + outs = self.model(image, mod=mod, is_training=True) + + out_local, out_global = outs + if out_global is not None: + ret = torch.cat( + [out_local.permute(0, 2, 3, 1).flatten(1, 2), out_global.unsqueeze(1)], + dim=1, + ) + else: + ret = out_local.permute(0, 2, 3, 1).flatten(1, 2) + return ret diff --git a/LHM/models/encoders/dinov2_featup_wrapper.py b/LHM/models/encoders/dinov2_featup_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..5c821cb4d4722735effff2f5e353fa52790d19c0 --- /dev/null +++ b/LHM/models/encoders/dinov2_featup_wrapper.py @@ -0,0 +1,70 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import torch +import torch.nn as nn +from accelerate.logging import get_logger + +logger = get_logger(__name__) + + +class Dinov2FeatUpWrapper(nn.Module): + """ + Dinov2FeatUpWrapper using original implementation, hacked with modulation. + """ + def __init__(self, model_name: str, modulation_dim: int = None, freeze: bool = True, encoder_feat_dim: int = 384): + super().__init__() + self.modulation_dim = modulation_dim + self.model = torch.hub.load("mhamilton723/FeatUp", 'dinov2', use_norm=True) + + if freeze: + if modulation_dim is not None: + raise ValueError("Modulated Dinov2 requires training, freezing is not allowed.") + self._freeze() + else: + for name, param in self.model.named_parameters(): + if name == "model.0.model.mask_token": + param.requires_grad = False + + def _freeze(self): + logger.warning(f"======== Freezing Dinov2UnetWrapper ========") + self.model.model.eval() + for name, param in self.model.model.named_parameters(): + param.requires_grad = False + + @staticmethod + def _build_dinov2(model_name: str, modulation_dim: int = None, pretrained: bool = True): + from importlib import import_module + dinov2_hub = import_module(".dinov2.hub.backbones", package=__package__) + model_fn = getattr(dinov2_hub, model_name) + logger.debug(f"Modulation dim for Dinov2 is {modulation_dim}.") + model = model_fn(modulation_dim=modulation_dim, pretrained=pretrained) + return model + + @torch.compile + def forward(self, image: torch.Tensor, mod: torch.Tensor = None): + # image: [N, C, H, W] + # mod: [N, D] or None + # RGB image with [0,1] scale and properly sized + if self.modulation_dim is None: + assert mod is None, "Unexpected modulation input in dinov2 forward." + outs = self.model(image) + else: + assert mod is not None, "Modulation input is required in modulated dinov2 forward." + outs = self.model(image, mod=mod) + out_local = outs + out_local = nn.functional.avg_pool2d(out_local, stride=2, kernel_size=2) + ret = out_local.permute(0, 2, 3, 1).flatten(1, 2) + return ret diff --git a/LHM/models/encoders/dinov2_fusion_wrapper.py b/LHM/models/encoders/dinov2_fusion_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..3f387c4b33d3d487871b838ac423caefd9ca216b --- /dev/null +++ b/LHM/models/encoders/dinov2_fusion_wrapper.py @@ -0,0 +1,205 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import pdb + +import kornia +import torch +import torch.nn as nn +import torch.nn.functional as F +from accelerate.logging import get_logger + +logger = get_logger(__name__) + + +class DPTHead(nn.Module): + def __init__( + self, + in_channels, + inner_channels, + use_clstoken=False, + out_channel=1024, + ): + super(DPTHead, self).__init__() + + self.use_clstoken = use_clstoken + self.projects = nn.ModuleList( + [ + nn.Conv2d( + in_channels=in_channels, + out_channels=out_channel, + kernel_size=1, + stride=1, + padding=0, + ) + for out_channel in inner_channels + ] + ) + + if use_clstoken: + self.readout_projects = nn.ModuleList() + for _ in range(len(self.projects)): + self.readout_projects.append( + nn.Sequential(nn.Linear(2 * in_channels, in_channels), nn.GELU()) + ) + + self.output_conv = nn.Conv2d( + sum(inner_channels), out_channel, kernel_size=1, stride=1, padding=0 + ) + + def forward(self, out_features, patch_h, patch_w): + + out = [] + for i, x in enumerate(out_features): + if self.use_clstoken: + x, cls_token = x[0], x[1] + readout = cls_token.unsqueeze(1).expand_as(x) + x = self.readout_projects[i](torch.cat((x, readout), -1)) + else: + x = x[0] + + x = x.permute(0, 2, 1).reshape((x.shape[0], x.shape[-1], patch_h, patch_w)) + + x = self.projects[i](x) + + out.append(x) + + fusion_feats = torch.cat(out, dim=1) + + fusion_feats = self.output_conv(fusion_feats) + + return fusion_feats + + +class Dinov2FusionWrapper(nn.Module): + """ + Dinov2FusionWrapper using original implementation, hacked with modulation. + """ + + def __init__( + self, + model_name: str, + modulation_dim: int = None, + freeze: bool = True, + encoder_feat_dim: int = 384, + resolution=448, # DINOV2 default resolution + antialias=True, + ): + super().__init__() + self.modulation_dim = modulation_dim + self.model = self._build_dinov2(model_name, modulation_dim=modulation_dim) + + self.intermediate_layer_idx_info = { + "dinov2_vits14_reg": [2, 5, 8, 11], + "dinov2_vitb14_reg": [2, 5, 8, 11], + "dinov2_vitl14_reg": [4, 11, 17, 23], + "dinov2_vitg14_reg": [9, 19, 29, 39], + } + + self.intermediate_layer_idx = self.intermediate_layer_idx_info[model_name] + self.fusion_head = DPTHead( + in_channels=self.model.embed_dim, + inner_channels=[self.model.embed_dim] * 4, + out_channel=encoder_feat_dim, + ) + + self.resolution = resolution + self.antialias = antialias + + if freeze: + if modulation_dim is not None: + raise ValueError( + "Modulated Dinov2 requires training, freezing is not allowed." + ) + self._freeze() + + def _freeze(self): + logger.warning(f"======== Freezing Dinov2FusionWrapper ========") + self.model.eval() + for name, param in self.model.named_parameters(): + param.requires_grad = False + + def _preprocess_image( + self, image: torch.tensor, resolution: int = 1024 + ) -> torch.Tensor: + + _, __, H, W = image.shape + max_size = max(H, W) + H_pad = max_size - H + W_pad = max_size - W + pad_size = ( + W_pad // 2, + max_size - (W + W_pad // 2), + H_pad // 2, + max_size - (H + H_pad // 2), + 0, + 0, + 0, + 0, + ) + + image = F.pad(image, pad_size, value=1) + + image = kornia.geometry.resize( + image, + (resolution, resolution), + interpolation="bicubic", + align_corners=True, + antialias=self.antialias, + ) + + return image + + @staticmethod + def _build_dinov2( + model_name: str, modulation_dim: int = None, pretrained: bool = True + ): + from importlib import import_module + + dinov2_hub = import_module(".dinov2.hub.backbones", package=__package__) + model_fn = getattr(dinov2_hub, model_name) + logger.debug(f"Modulation dim for Dinov2 is {modulation_dim}.") + model = model_fn(modulation_dim=modulation_dim, pretrained=pretrained) + return model + + @torch.compile + def forward(self, image: torch.Tensor, mod: torch.Tensor = None): + # image: [N, C, H, W] + # mod: [N, D] or None + # RGB image with [0,1] scale and properly sized + + image = self._preprocess_image(image, self.resolution) + + patch_h, patch_w = ( + image.shape[-2] // self.model.patch_size, + image.shape[-1] // self.model.patch_size, + ) + + features = self.model.get_intermediate_layers( + image, self.intermediate_layer_idx, return_class_token=True + ) + + out_local = self.fusion_head(features, patch_h, patch_w) + + out_global = None + if out_global is not None: + ret = torch.cat( + [out_local.permute(0, 2, 3, 1).flatten(1, 2), out_global.unsqueeze(1)], + dim=1, + ) + else: + ret = out_local.permute(0, 2, 3, 1).flatten(1, 2) + + return ret diff --git a/LHM/models/encoders/dinov2_unet.py b/LHM/models/encoders/dinov2_unet.py new file mode 100644 index 0000000000000000000000000000000000000000..07e0e6b63f79da1dbee21fb0eec99c68498f342a --- /dev/null +++ b/LHM/models/encoders/dinov2_unet.py @@ -0,0 +1,264 @@ +#!/usr/bin/env python +# Copyright (c) Xuangeng Chu (xg.chu@outlook.com) + +import torch +import torchvision +import torch.nn as nn +import timm +from accelerate.logging import get_logger + +logger = get_logger(__name__) + + + +class DINOBase(nn.Module): + def __init__(self, output_dim=128, only_global=False): + super().__init__() + self.only_global = only_global + assert self.only_global == False + self.dino_model = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitb14', pretrained=True) + + # self.encoder = timm.create_model("resnet18", pretrained=True) + # del self.encoder.global_pool + # del self.encoder.fc + + # model_name = "dinov2_vits14_reg" + # modulation_dim = None + # self.dino_model = self._build_dinov2(model_name, modulation_dim=modulation_dim) + + self.dino_normlize = torchvision.transforms.Normalize( + mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225] + ) + + in_dim = self.dino_model.blocks[0].attn.qkv.in_features + hidden_dims=256 + out_dims=[256, 512, 1024, 1024] + # modules + self.projects = nn.ModuleList([ + nn.Conv2d( + in_dim, out_dim, kernel_size=1, stride=1, padding=0, + ) for out_dim in out_dims + ]) + + self.resize_layers = nn.ModuleList([ + nn.Sequential( + nn.Upsample(scale_factor=2, mode="bilinear", align_corners=True), + nn.Conv2d( + out_dims[0], out_dims[0], kernel_size=3, stride=1, padding=1), + nn.Upsample(scale_factor=2, mode="bilinear", align_corners=True), + nn.Conv2d( + out_dims[0], out_dims[0], kernel_size=3, stride=1, padding=1) + ), + nn.Sequential( + nn.Upsample(scale_factor=2, mode="bilinear", align_corners=True), + nn.Conv2d( + out_dims[1], out_dims[1], kernel_size=3, stride=1, padding=1) + ), + nn.Sequential( + nn.Conv2d( + out_dims[2], out_dims[2], kernel_size=3, stride=1, padding=1) + ), + nn.Sequential( + nn.Conv2d( + out_dims[3], out_dims[3], kernel_size=3, stride=2, padding=1) + ) + ]) + # self.layer_rn = nn.ModuleList([ + # nn.Conv2d(out_dims[0]+64, hidden_dims, kernel_size=3, stride=1, padding=1, bias=False), + # nn.Conv2d(out_dims[1]+128, hidden_dims, kernel_size=3, stride=1, padding=1, bias=False), + # nn.Conv2d(out_dims[2]+256, hidden_dims, kernel_size=3, stride=1, padding=1, bias=False), + # nn.Conv2d(out_dims[3]+512, hidden_dims, kernel_size=3, stride=1, padding=1, bias=False), + # ]) + self.layer_rn = nn.ModuleList([ + nn.Conv2d(out_dims[0]+3, hidden_dims, kernel_size=3, stride=1, padding=1, bias=False), + nn.Conv2d(out_dims[1]+3, hidden_dims, kernel_size=3, stride=1, padding=1, bias=False), + nn.Conv2d(out_dims[2]+3, hidden_dims, kernel_size=3, stride=1, padding=1, bias=False), + nn.Conv2d(out_dims[3]+3, hidden_dims, kernel_size=3, stride=1, padding=1, bias=False), + ]) + # self.layer_rn = nn.ModuleList([ + # nn.Conv2d(out_dims[0], hidden_dims, kernel_size=3, stride=1, padding=1, bias=False), + # nn.Conv2d(out_dims[1], hidden_dims, kernel_size=3, stride=1, padding=1, bias=False), + # nn.Conv2d(out_dims[2], hidden_dims, kernel_size=3, stride=1, padding=1, bias=False), + # nn.Conv2d(out_dims[3], hidden_dims, kernel_size=3, stride=1, padding=1, bias=False), + # ]) + + self.refinenet = nn.ModuleList([ + FeatureFusionBlock(hidden_dims, nn.ReLU(False), use_conv1=False), + FeatureFusionBlock(hidden_dims, nn.ReLU(False)), + FeatureFusionBlock(hidden_dims, nn.ReLU(False)), + FeatureFusionBlock(hidden_dims, nn.ReLU(False)), + ]) + self.output_conv = nn.Conv2d(hidden_dims, output_dim, kernel_size=3, stride=1, padding=1) + # self.output_gloabl_proj = nn.Linear(384, output_dim) + + @staticmethod + def _build_dinov2(model_name: str, modulation_dim: int = None, pretrained: bool = True): + from importlib import import_module + dinov2_hub = import_module(".dinov2.hub.backbones", package=__package__) + model_fn = getattr(dinov2_hub, model_name) + logger.debug(f"Modulation dim for Dinov2 is {modulation_dim}.") + model = model_fn(modulation_dim=modulation_dim, pretrained=pretrained) + return model + + def forward(self, images, output_size=None, is_training=True): + # enc_output = self.encoder.forward_intermediates(images, stop_early=True, intermediates_only=True) + # enc_out4 = enc_output[4] # 32 + # enc_out3 = enc_output[3] # 16 + # enc_out2 = enc_output[2] # 8 + # enc_out1 = enc_output[1] # 4 + + images = self.dino_normlize(images) + patch_h, patch_w = images.shape[-2]//14, images.shape[-1]//14 + + image_features = self.dino_model.get_intermediate_layers(images, 4) + + out_features = [] + for i, feature in enumerate(image_features): + feature = feature.permute(0, 2, 1).reshape( + (feature.shape[0], feature.shape[-1], patch_h, patch_w) + ) + feature = self.projects[i](feature) + feature = self.resize_layers[i](feature) + # print(enc_output[i+1].shape, feature.shape) + feature = torch.cat([ + nn.functional.interpolate(images, (feature.shape[-2], feature.shape[-1]), mode="bilinear", align_corners=True), + feature + ], dim=1 + ) + out_features.append(feature) + layer_rns = [] + for i, feature in enumerate(out_features): + layer_rns.append(self.layer_rn[i](feature)) + + path_4 = self.refinenet[0](layer_rns[3], size=layer_rns[2].shape[2:]) + path_3 = self.refinenet[1](path_4, layer_rns[2], size=layer_rns[1].shape[2:]) + path_2 = self.refinenet[2](path_3, layer_rns[1], size=layer_rns[0].shape[2:]) + path_1 = self.refinenet[3](path_2, layer_rns[0]) + out = self.output_conv(path_1) + + if output_size is not None: + out = nn.functional.interpolate(out, output_size, mode="bilinear", align_corners=True) + # out_global = image_features[-1][:, 0] + # out_global = self.output_gloabl_proj(out_global) + out_global = None + return out, out_global + + +class ResidualConvUnit(nn.Module): + """Residual convolution module. + """ + + def __init__(self, features, activation, bn): + """Init. + + Args: + features (int): number of features + """ + super().__init__() + + self.bn = bn + + self.groups=1 + + self.conv1 = nn.Conv2d( + features, features, kernel_size=3, stride=1, padding=1, bias=True, groups=self.groups + ) + + self.conv2 = nn.Conv2d( + features, features, kernel_size=3, stride=1, padding=1, bias=True, groups=self.groups + ) + + if self.bn==True: + self.bn1 = nn.BatchNorm2d(features) + self.bn2 = nn.BatchNorm2d(features) + + self.activation = activation + + self.skip_add = nn.quantized.FloatFunctional() + + def forward(self, x): + """Forward pass. + + Args: + x (tensor): input + + Returns: + tensor: output + """ + + out = self.activation(x) + out = self.conv1(out) + if self.bn==True: + out = self.bn1(out) + + out = self.activation(out) + out = self.conv2(out) + if self.bn==True: + out = self.bn2(out) + + if self.groups > 1: + out = self.conv_merge(out) + + return self.skip_add.add(out, x) + # return out + x + + +class FeatureFusionBlock(nn.Module): + """Feature fusion block. + """ + + def __init__(self, features, activation, deconv=False, bn=False, expand=False, align_corners=True, size=None, + use_conv1=True): + """Init. + + Args: + features (int): number of features + """ + super(FeatureFusionBlock, self).__init__() + + self.deconv = deconv + self.align_corners = align_corners + + self.groups=1 + + self.expand = expand + out_features = features + if self.expand==True: + out_features = features//2 + + self.out_conv = nn.Conv2d(features, out_features, kernel_size=1, stride=1, padding=0, bias=True, groups=1) + + if use_conv1: + self.resConfUnit1 = ResidualConvUnit(features, activation, bn) + self.skip_add = nn.quantized.FloatFunctional() + + self.resConfUnit2 = ResidualConvUnit(features, activation, bn) + + self.size=size + + def forward(self, *xs, size=None): + """Forward pass. + + Returns: + tensor: output + """ + output = xs[0] + + if len(xs) == 2: + res = self.resConfUnit1(xs[1]) + output = self.skip_add.add(output, res) + # output = output + res + + output = self.resConfUnit2(output) + + if (size is None) and (self.size is None): + modifier = {"scale_factor": 2} + elif size is None: + modifier = {"size": self.size} + else: + modifier = {"size": size} + output = nn.functional.interpolate( + output, **modifier, mode="bilinear", align_corners=self.align_corners + ) + output = self.out_conv(output) + return output diff --git a/LHM/models/encoders/dinov2_unet_wrapper.py b/LHM/models/encoders/dinov2_unet_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..df8bb2cc04161a58b38ff5eeb4d756d74484b04e --- /dev/null +++ b/LHM/models/encoders/dinov2_unet_wrapper.py @@ -0,0 +1,99 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import torch +import torch.nn as nn +from accelerate.logging import get_logger + +from LHM.models.encoders.dinov2_unet import DINOBase + +logger = get_logger(__name__) + + +class Dinov2UnetWrapper(nn.Module): + """ + Dino v2 wrapper using original implementation, hacked with modulation. + """ + + def __init__( + self, + model_name: str, + modulation_dim: int = None, + freeze: bool = True, + encoder_feat_dim: int = 384, + ): + super().__init__() + self.modulation_dim = modulation_dim + # self.model = self._build_dinov2(model_name, modulation_dim=modulation_dim) + self.model = DINOBase(output_dim=encoder_feat_dim) + assert model_name in ["no_avg", "avg_2"] + self.model_name = model_name + + if freeze: + if modulation_dim is not None: + raise ValueError( + "Modulated Dinov2 requires training, freezing is not allowed." + ) + self._freeze() + else: + for name, param in self.model.dino_model.named_parameters(): + if name == "mask_token": + param.requires_grad = False + + def _freeze(self): + logger.warning(f"======== Freezing Dinov2UnetWrapper ========") + self.model.dino_model.eval() + for name, param in self.model.dino_model.named_parameters(): + param.requires_grad = False + + @staticmethod + def _build_dinov2( + model_name: str, modulation_dim: int = None, pretrained: bool = True + ): + from importlib import import_module + + dinov2_hub = import_module(".dinov2.hub.backbones", package=__package__) + model_fn = getattr(dinov2_hub, model_name) + logger.debug(f"Modulation dim for Dinov2 is {modulation_dim}.") + model = model_fn(modulation_dim=modulation_dim, pretrained=pretrained) + return model + + @torch.compile + def forward(self, image: torch.Tensor, mod: torch.Tensor = None): + # image: [N, C, H, W] + # mod: [N, D] or None + # RGB image with [0,1] scale and properly sized + if self.modulation_dim is None: + assert mod is None, "Unexpected modulation input in dinov2 forward." + outs = self.model(image, is_training=True) + else: + assert ( + mod is not None + ), "Modulation input is required in modulated dinov2 forward." + outs = self.model(image, mod=mod, is_training=True) + + out_local, out_global = outs + + if self.model_name == "avg_2": + out_local = nn.functional.avg_pool2d(out_local, stride=2, kernel_size=2) + + if out_global is not None: + ret = torch.cat( + [out_local.permute(0, 2, 3, 1).flatten(1, 2), out_global.unsqueeze(1)], + dim=1, + ) + else: + ret = out_local.permute(0, 2, 3, 1).flatten(1, 2) + return ret diff --git a/LHM/models/encoders/dinov2_wrapper.py b/LHM/models/encoders/dinov2_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..8453ca68833e3cea5dd40055cfe869ee0ddf317a --- /dev/null +++ b/LHM/models/encoders/dinov2_wrapper.py @@ -0,0 +1,67 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import torch +import torch.nn as nn +from accelerate.logging import get_logger + + +logger = get_logger(__name__) + + +class Dinov2Wrapper(nn.Module): + """ + Dino v2 wrapper using original implementation, hacked with modulation. + """ + def __init__(self, model_name: str, modulation_dim: int = None, freeze: bool = True, encoder_feat_dim: int = 384): + super().__init__() + self.modulation_dim = modulation_dim + self.model = self._build_dinov2(model_name, modulation_dim=modulation_dim) + if freeze: + if modulation_dim is not None: + raise ValueError("Modulated Dinov2 requires training, freezing is not allowed.") + self._freeze() + + def _freeze(self): + logger.warning(f"======== Freezing Dinov2Wrapper ========") + self.model.eval() + for name, param in self.model.named_parameters(): + param.requires_grad = False + + @staticmethod + def _build_dinov2(model_name: str, modulation_dim: int = None, pretrained: bool = True): + from importlib import import_module + dinov2_hub = import_module(".dinov2.hub.backbones", package=__package__) + model_fn = getattr(dinov2_hub, model_name) + logger.debug(f"Modulation dim for Dinov2 is {modulation_dim}.") + model = model_fn(modulation_dim=modulation_dim, pretrained=pretrained) + return model + + @torch.compile + def forward(self, image: torch.Tensor, mod: torch.Tensor = None): + # image: [N, C, H, W] + # mod: [N, D] or None + # RGB image with [0,1] scale and properly sized + if self.modulation_dim is None: + assert mod is None, "Unexpected modulation input in dinov2 forward." + outs = self.model(image, is_training=True) + else: + assert mod is not None, "Modulation input is required in modulated dinov2 forward." + outs = self.model(image, mod=mod, is_training=True) + ret = torch.cat([ + outs["x_norm_clstoken"].unsqueeze(dim=1), + outs["x_norm_patchtokens"], + ], dim=1) + return ret diff --git a/LHM/models/encoders/dpt_util/__init__.py b/LHM/models/encoders/dpt_util/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/LHM/models/encoders/dpt_util/blocks.py b/LHM/models/encoders/dpt_util/blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..e562a4b387171d5afdcfab5a524b85b32981d891 --- /dev/null +++ b/LHM/models/encoders/dpt_util/blocks.py @@ -0,0 +1,151 @@ +import torch.nn as nn + + +def _make_scratch(in_shape, out_shape, groups=1, expand=False): + scratch = nn.Module() + + out_shape1 = out_shape + out_shape2 = out_shape + out_shape3 = out_shape + if len(in_shape) >= 4: + out_shape4 = out_shape + + if expand: + out_shape1 = out_shape + out_shape2 = out_shape * 2 + out_shape3 = out_shape * 4 + if len(in_shape) >= 4: + out_shape4 = out_shape * 8 + + scratch.layer1_rn = nn.Conv2d(in_shape[0], out_shape1, kernel_size=3, stride=1, padding=1, bias=False, groups=groups) + scratch.layer2_rn = nn.Conv2d(in_shape[1], out_shape2, kernel_size=3, stride=1, padding=1, bias=False, groups=groups) + scratch.layer3_rn = nn.Conv2d(in_shape[2], out_shape3, kernel_size=3, stride=1, padding=1, bias=False, groups=groups) + if len(in_shape) >= 4: + scratch.layer4_rn = nn.Conv2d(in_shape[3], out_shape4, kernel_size=3, stride=1, padding=1, bias=False, groups=groups) + + return scratch + + +class ResidualConvUnit(nn.Module): + """Residual convolution module. + """ + + def __init__(self, features, activation, bn): + """Init. + + Args: + features (int): number of features + """ + super().__init__() + + self.bn = bn + + self.groups=1 + + self.conv1 = nn.Conv2d(features, features, kernel_size=3, stride=1, padding=1, bias=True, groups=self.groups) + + self.conv2 = nn.Conv2d(features, features, kernel_size=3, stride=1, padding=1, bias=True, groups=self.groups) + + if self.bn == True: + self.bn1 = nn.BatchNorm2d(features) + self.bn2 = nn.BatchNorm2d(features) + + self.activation = activation + + self.skip_add = nn.quantized.FloatFunctional() + + def forward(self, x): + """Forward pass. + + Args: + x (tensor): input + + Returns: + tensor: output + """ + + out = self.activation(x) + out = self.conv1(out) + if self.bn == True: + out = self.bn1(out) + + out = self.activation(out) + out = self.conv2(out) + if self.bn == True: + out = self.bn2(out) + + if self.groups > 1: + out = self.conv_merge(out) + + return self.skip_add.add(out, x) + + +class FeatureFusionBlock(nn.Module): + """Feature fusion block. + """ + + def __init__( + self, + features, + activation, + deconv=False, + bn=False, + expand=False, + align_corners=True, + size=None, + use_conv1=True + ): + """Init. + + Args: + features (int): number of features + """ + super(FeatureFusionBlock, self).__init__() + + self.deconv = deconv + self.align_corners = align_corners + + self.groups=1 + + self.expand = expand + out_features = features + if self.expand == True: + out_features = features // 2 + + self.out_conv = nn.Conv2d(features, out_features, kernel_size=1, stride=1, padding=0, bias=True, groups=1) + + if use_conv1: + self.resConfUnit1 = ResidualConvUnit(features, activation, bn) + self.skip_add = nn.quantized.FloatFunctional() + + self.resConfUnit2 = ResidualConvUnit(features, activation, bn) + + + self.size=size + + def forward(self, *xs, size=None, scale_factor=2): + """Forward pass. + + Returns: + tensor: output + """ + output = xs[0] + + if len(xs) == 2: + res = self.resConfUnit1(xs[1]) + output = self.skip_add.add(output, res) + + output = self.resConfUnit2(output) + + if (size is None) and (self.size is None): + modifier = {"scale_factor": scale_factor} + elif size is None: + modifier = {"size": self.size} + else: + modifier = {"size": size} + + output = nn.functional.interpolate(output, **modifier, mode="bilinear", align_corners=self.align_corners) + + output = self.out_conv(output) + + return output diff --git a/LHM/models/encoders/dpt_util/transform.py b/LHM/models/encoders/dpt_util/transform.py new file mode 100644 index 0000000000000000000000000000000000000000..d079354c1bc140a917a74c8c331c342f7b98e2e6 --- /dev/null +++ b/LHM/models/encoders/dpt_util/transform.py @@ -0,0 +1,177 @@ +import cv2 +import numpy as np + + +class Resize(object): + """Resize sample to given size (width, height).""" + + def __init__( + self, + width, + height, + resize_target=True, + keep_aspect_ratio=False, + ensure_multiple_of=1, + resize_method="lower_bound", + image_interpolation_method=cv2.INTER_AREA, + ): + """Init. + + Args: + width (int): desired output width + height (int): desired output height + resize_target (bool, optional): + True: Resize the full sample (image, mask, target). + False: Resize image only. + Defaults to True. + keep_aspect_ratio (bool, optional): + True: Keep the aspect ratio of the input sample. + Output sample might not have the given width and height, and + resize behaviour depends on the parameter 'resize_method'. + Defaults to False. + ensure_multiple_of (int, optional): + Output width and height is constrained to be multiple of this parameter. + Defaults to 1. + resize_method (str, optional): + "lower_bound": Output will be at least as large as the given size. + "upper_bound": Output will be at max as large as the given size. (Output size might be smaller than given size.) + "minimal": Scale as least as possible. (Output size might be smaller than given size.) + Defaults to "lower_bound". + """ + self.__width = width + self.__height = height + + self.__resize_target = resize_target + self.__keep_aspect_ratio = keep_aspect_ratio + self.__multiple_of = ensure_multiple_of + self.__resize_method = resize_method + self.__image_interpolation_method = image_interpolation_method + + def constrain_to_multiple_of(self, x, min_val=0, max_val=None): + y = (np.round(x / self.__multiple_of) * self.__multiple_of).astype(int) + + if max_val is not None and y > max_val: + y = (np.floor(x / self.__multiple_of) * self.__multiple_of).astype(int) + + if y < min_val: + y = (np.ceil(x / self.__multiple_of) * self.__multiple_of).astype(int) + + return y + + def get_size(self, width, height): + # determine new height and width + scale_height = self.__height / height + scale_width = self.__width / width + + if self.__keep_aspect_ratio: + if self.__resize_method == "lower_bound": + # scale such that output size is lower bound + if scale_width > scale_height: + # fit width + scale_height = scale_width + else: + # fit height + scale_width = scale_height + elif self.__resize_method == "upper_bound": + # scale such that output size is upper bound + if scale_width < scale_height: + # fit width + scale_height = scale_width + else: + # fit height + scale_width = scale_height + elif self.__resize_method == "minimal": + # scale as least as possbile + if abs(1 - scale_width) < abs(1 - scale_height): + # fit width + scale_height = scale_width + else: + # fit height + scale_width = scale_height + else: + raise ValueError( + f"resize_method {self.__resize_method} not implemented" + ) + + if self.__resize_method == "lower_bound": + new_height = self.constrain_to_multiple_of( + scale_height * height, min_val=self.__height + ) + new_width = self.constrain_to_multiple_of( + scale_width * width, min_val=self.__width + ) + elif self.__resize_method == "upper_bound": + new_height = self.constrain_to_multiple_of( + scale_height * height, max_val=self.__height + ) + new_width = self.constrain_to_multiple_of( + scale_width * width, max_val=self.__width + ) + elif self.__resize_method == "minimal": + new_height = self.constrain_to_multiple_of(scale_height * height) + new_width = self.constrain_to_multiple_of(scale_width * width) + else: + raise ValueError(f"resize_method {self.__resize_method} not implemented") + + return (new_width, new_height) + + def __call__(self, sample): + width, height = self.get_size( + sample["image"].shape[1], sample["image"].shape[0] + ) + + # resize sample + sample["image"] = cv2.resize( + sample["image"], + (width, height), + interpolation=self.__image_interpolation_method, + ) + + if self.__resize_target: + if "depth" in sample: + sample["depth"] = cv2.resize( + sample["depth"], (width, height), interpolation=cv2.INTER_NEAREST + ) + + if "mask" in sample: + sample["mask"] = cv2.resize( + sample["mask"].astype(np.float32), + (width, height), + interpolation=cv2.INTER_NEAREST, + ) + + return sample + + +class NormalizeImage(object): + """Normlize image by given mean and std.""" + + def __init__(self, mean, std): + self.__mean = mean + self.__std = std + + def __call__(self, sample): + sample["image"] = (sample["image"] - self.__mean) / self.__std + + return sample + + +class PrepareForNet(object): + """Prepare sample for usage as network input.""" + + def __init__(self): + pass + + def __call__(self, sample): + image = np.transpose(sample["image"], (2, 0, 1)) + sample["image"] = np.ascontiguousarray(image).astype(np.float32) + + if "depth" in sample: + depth = sample["depth"].astype(np.float32) + sample["depth"] = np.ascontiguousarray(depth) + + if "mask" in sample: + sample["mask"] = sample["mask"].astype(np.float32) + sample["mask"] = np.ascontiguousarray(sample["mask"]) + + return sample diff --git a/LHM/models/encoders/sapiens_warpper.py b/LHM/models/encoders/sapiens_warpper.py new file mode 100644 index 0000000000000000000000000000000000000000..1e6998ec38108bcb5d32097c1140755cb01cd9bd --- /dev/null +++ b/LHM/models/encoders/sapiens_warpper.py @@ -0,0 +1,326 @@ +import functools +import gc +import multiprocessing as mp +import os +import pdb +import time +import traceback as tb +from argparse import ArgumentParser +from functools import partial +from multiprocessing import Pool, Process, cpu_count +from multiprocessing.pool import Pool +from typing import Union + +import cv2 +import kornia +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchvision +from accelerate.logging import get_logger +from tqdm import tqdm + +logger = get_logger(__name__) + +timings = {} +BATCH_SIZE = 64 + + +class AsyncWorkerExceptionsWrapper: + def __init__(self, callable): + self.__callable = callable + self._logger = mp.log_to_stderr() + + def __call__(self, *args, **kwargs): + try: + result = self.__callable(*args, **kwargs) + + except Exception as e: + self._logger.error(tb.format_exc()) + raise + + # It was fine, give a normal answer + return result + + +class AdhocImageDataset(torch.utils.data.Dataset): + def __init__(self, image_list, shape=None, mean=None, std=None): + self.image_list = image_list + if shape: + assert len(shape) == 2 + if mean or std: + assert len(mean) == 3 + assert len(std) == 3 + self.shape = shape + self.mean = torch.tensor(mean) if mean else None + self.std = torch.tensor(std) if std else None + + def __len__(self): + return len(self.image_list) + + def _preprocess(self, img): + if self.shape: + img = cv2.resize( + img, (self.shape[1], self.shape[0]), interpolation=cv2.INTER_LINEAR + ) + img = img.transpose(2, 0, 1) + img = torch.from_numpy(img) + img = img[[2, 1, 0], ...].float() # bgr2rgb + if self.mean is not None and self.std is not None: + mean = self.mean.view(-1, 1, 1) + std = self.std.view(-1, 1, 1) + img = (img - mean) / std + return img + + def __getitem__(self, idx): + orig_img_dir = self.image_list[idx] + orig_img = cv2.imread(orig_img_dir) + # orig_img = cv2.cvtColor(orig_img, cv2.COLOR_BGR2RGB) + img = self._preprocess(orig_img) + return orig_img_dir, orig_img, img + + +def warmup_model(model, batch_size): + # Warm up the model with a dummy input. + imgs = torch.randn(batch_size, 3, 1024, 768).to(dtype=torch.bfloat16).cuda() + s = torch.cuda.Stream() + s.wait_stream(torch.cuda.current_stream()) + with torch.cuda.stream(s), torch.no_grad(), torch.autocast( + device_type="cuda", dtype=torch.bfloat16 + ): + for i in range(3): + model(imgs) + torch.cuda.current_stream().wait_stream(s) + imgs = imgs.detach().cpu().float().numpy() + del imgs, s + + +def inference_model(model, imgs, dtype=torch.bfloat16): + # forward the model + with torch.no_grad(): + (results,) = model(imgs.to(dtype).cuda()) + imgs.cpu() + + return results + + +def fake_pad_images_to_batchsize(imgs): + return F.pad(imgs, (0, 0, 0, 0, 0, 0, 0, BATCH_SIZE - imgs.shape[0]), value=0) + + +def feat_save(feature, output_path): + pred_save_path = os.path.join( + output_path.replace(".jpg", ".npy") + .replace(".jpeg", ".npy") + .replace(".png", ".npy") + ) + np.save(pred_save_path, feature) + + +def load_model(checkpoint, use_torchscript=False): + if use_torchscript: + return torch.jit.load(checkpoint) + else: + return torch.export.load(checkpoint).module() + + +class SapiensWrapper(nn.Module): + def __init__( + self, + model_name: str, + freeze: bool = True, + encoder_feat_dim: int = 384, + resolution=1024, + antialias: bool = True, + ): + super().__init__() + self.model = self._build_sapiens(model_name) + self.resolution = resolution + + self.antialias = antialias + self.register_buffer( + "mean", torch.Tensor([0.4844, 0.4570, 0.4062]), persistent=False + ) + self.register_buffer( + "std", torch.Tensor([0.2295, 0.2236, 0.2256]), persistent=False + ) + + if freeze: + self._freeze() + else: + raise NotImplementedError( + "Fine-tuning is not supported yet." + ) # sapiens is too larger to finetune the model end-to-end. + + def _preprocess_image( + self, image: torch.tensor, resolution: int = 1024 + ) -> torch.Tensor: + + _, __, H, W = image.shape + max_size = max(H, W) + H_pad = max_size - H + W_pad = max_size - W + pad_size = ( + W_pad // 2, + max_size - (W + W_pad // 2), + H_pad // 2, + max_size - (H + H_pad // 2), + 0, + 0, + 0, + 0, + ) + + image = F.pad(image, pad_size, value=1) + + image = kornia.geometry.resize( + image, + (resolution, resolution), + interpolation="bicubic", + align_corners=True, + antialias=self.antialias, + ) + image = kornia.enhance.normalize(image, self.mean, self.std) + + return image + + @staticmethod + def _build_sapiens(model_name: str, pretrained: bool = True): + + logger.debug(f"Using Sapiens model: {model_name}") + USE_TORCHSCRIPT = "_torchscript" in model_name + + # build the model from a checkpoint file + model = load_model(model_name, use_torchscript=USE_TORCHSCRIPT) + if not USE_TORCHSCRIPT: + raise NotImplementedError + else: + dtype = torch.float32 # TorchScript models use float32 + model = model.cuda() + return model + + def _freeze(self): + logger.warning(f"======== Freezing Sapiens Model ========") + self.model.eval() + for name, param in self.model.named_parameters(): + param.requires_grad = False + + @torch.compile + def forward(self, image: torch.Tensor, mod: torch.Tensor = None): + # image: [N, C, H, W] + # mod: [N, D] or None + # RGB image with [0,1] scale and properly sized + + image = self._preprocess_image(image, self.resolution) + + # NOTE that, only supports + patch_h, patch_w = ( + image.shape[-2] // 16, + image.shape[-1] // 16, + ) + + with torch.no_grad(), torch.autocast(device_type="cuda", dtype=torch.bfloat16): + (out_local,) = self.model(image) + + out_global = None + if out_global is not None: + raise NotImplementedError("Global feature is not supported yet.") + else: + ret = out_local.permute(0, 2, 3, 1).flatten(1, 2) + + return ret + + +def main(): + parser = ArgumentParser() + parser.add_argument("checkpoint", help="Checkpoint file for pose") + parser.add_argument("--device", default="cuda:0", help="Device used for inference") + parser.add_argument( + "--batch_size", + type=int, + default=64, + help="Set batch size to do batch inference. ", + ) + parser.add_argument( + "--fp16", action="store_true", default=False, help="Model inference dtype" + ) + parser.add_argument( + "--shape", + type=int, + nargs="+", + default=[1024, 1024], + help="input image size (height, width)", + ) + + args = parser.parse_args() + + if len(args.shape) == 1: + input_shape = (3, args.shape[0], args.shape[0]) + elif len(args.shape) == 2: + input_shape = (3,) + tuple(args.shape) + else: + raise ValueError("invalid input shape") + + mp.log_to_stderr() + torch._inductor.config.force_fuse_int_mm_with_mul = True + torch._inductor.config.use_mixed_mm = True + + start = time.time() + + USE_TORCHSCRIPT = "_torchscript" in args.checkpoint + + # build the model from a checkpoint file + model = load_model(args.checkpoint, use_torchscript=USE_TORCHSCRIPT) + if not USE_TORCHSCRIPT: + dtype = torch.half if args.fp16 else torch.bfloat16 + model.to(dtype) + model = torch.compile(model, mode="max-autotune", fullgraph=True) + else: + dtype = torch.float32 # TorchScript models use float32 + model = model.cuda() + + imgs = torch.randn(2, 3, 1024, 1024).float().cuda() + + with torch.no_grad(), torch.autocast(device_type="cuda", dtype=torch.bfloat16): + (results,) = model(imgs.cuda()) + + ## no precision conversion needed for torchscript. run at fp32 + if not USE_TORCHSCRIPT: + dtype = torch.half if args.fp16 else torch.bfloat16 + model.to(dtype) + model = torch.compile(model, mode="max-autotune", fullgraph=True) + else: + dtype = torch.float32 # TorchScript models use float32 + model = model.to(args.device) + + image_names = [] + + pdb.set_trace() + + for batch_idx, (batch_image_name, batch_orig_imgs, batch_imgs) in tqdm( + enumerate(inference_dataloader), total=len(inference_dataloader) + ): + valid_images_len = len(batch_imgs) + batch_imgs = fake_pad_images_to_batchsize(batch_imgs) + results = inference_model(model, batch_imgs, dtype=dtype) + args_list = [ + ( + feat.cpu().float().numpy(), + os.path.join(args.output_root, os.path.basename(img_name)), + ) + for feat, img_name in zip(results[:valid_images_len], batch_image_name) + ] + feat_save_pool.run_async(args_list) + + feat_save_pool.finish() + + total_time = time.time() - start + fps = 1 / ((time.time() - start) / len(image_names)) + print( + f"\033[92mTotal inference time: {total_time:.2f} seconds. FPS: {fps:.2f}\033[0m" + ) + + +if __name__ == "__main__": + main() diff --git a/LHM/models/encoders/workpool.py b/LHM/models/encoders/workpool.py new file mode 100644 index 0000000000000000000000000000000000000000..ffe19f01f9106dfe87b0993dd13d00b32a5bf5e2 --- /dev/null +++ b/LHM/models/encoders/workpool.py @@ -0,0 +1,93 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import functools +import multiprocessing as mp +import traceback as tb +from multiprocessing.pool import Pool + + +class AsyncWorkerExceptionsWrapper: + def __init__(self, callable): + self.__callable = callable + self._logger = mp.log_to_stderr() + + def __call__(self, *args, **kwargs): + try: + result = self.__callable(*args, **kwargs) + + except Exception as e: + self._logger.error(tb.format_exc()) + raise + + # It was fine, give a normal answer + return result + + +class WorkerPool(Pool): + """Worker pool that runs a function on each value that is put(). + This pool is designed that if an exception is thrown in a child, the main process should stop + as well. + """ + + def __init__(self, func, *args, **kwargs): + self.func = func + super().__init__(*args, **kwargs) + + def _result_collector(self, result): + """ + Collects results from the pool and stores them in a list. + Args: + result: The result of the function that was run on the pool. + """ + if isinstance(result, (list, tuple)): + self.results.extend(result) + else: + self.results.append(result) + + def run(self, iterable, chunksize=1): + """Runs func on each item in iterable by using either map or starmap asynchronously. Also calls shutdown to finish up. + Args: + iterable: Iterable of items to run func on. + chunksize: Number of items to run func on at once. + Returns: + results from the map operation. + """ + if all(isinstance(x, (list, tuple)) for x in iterable): + results = self.starmap(self.func, iterable, chunksize) + else: + results = self.map(self.func, iterable) + return results + + def run_async(self, iterable, chunksize=1): + """Runs func on each item in iterable by using either map or starmap asynchronously. Also calls shutdown to finish up. + Args: + iterable: Iterable of items to run func on. + chunksize: Number of items to run func on at once. + Returns: + results from the map operation. + """ + self.results = [] + if all(isinstance(x, (list, tuple)) for x in iterable): + self.starmap_async( + AsyncWorkerExceptionsWrapper(self.func), + iterable, + chunksize, + callback=self._result_collector, + ) + else: + self.map_async( + AsyncWorkerExceptionsWrapper(self.func), + iterable, + chunksize, + callback=self._result_collector, + ) + return self.results + + def finish(self) -> None: + """Shutdown the pool and clean-up threads.""" + self.close() + self.join() diff --git a/LHM/models/encoders/xunet_wrapper.py b/LHM/models/encoders/xunet_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..c0759a2aa6e029d7e691214e5a0a7435e8cfc996 --- /dev/null +++ b/LHM/models/encoders/xunet_wrapper.py @@ -0,0 +1,111 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import torch +import torch.nn as nn +import timm +from accelerate.logging import get_logger + +logger = get_logger(__name__) + +class XUNet(nn.Module): + def __init__(self, model_name="swin_base_patch4_window12_384_in22k", encoder_feat_dim=384): + super(XUNet, self).__init__() + # Swin Transformer Encoder + self.encoder = timm.create_model(model_name, pretrained=True) + # swin + # del self.encoder.head + # del self.encoder.norm + # resnet + del self.encoder.global_pool + del self.encoder.fc + + # Decoder layers + # self.upconv4 = self.upconv_block(2048, 1024) # Upsample + # self.upconv3 = self.upconv_block(1024, 512) + # self.upconv2 = self.upconv_block(512, 256) + # self.upconv1 = self.upconv_block(256, 64) + + self.upconv4 = self.upconv_block(512, 256) # Upsample + self.upconv3 = self.upconv_block(256, 128) + self.upconv2 = self.upconv_block(128, 64) + # self.upconv1 = self.upconv_block(64, 64) + + self.out_conv = nn.Conv2d(64, encoder_feat_dim, kernel_size=1) + + + def upconv_block(self, in_channels, out_channels): + return nn.Sequential( + nn.ConvTranspose2d(in_channels, out_channels, kernel_size=2, stride=2), + nn.ReLU(inplace=True), + ) + + def forward(self, x): + # Encoder part using Swin Transformer + enc_output = self.encoder.forward_intermediates(x, stop_early=True, intermediates_only=True) + + # for e in enc_output: + # print(e.shape, x.shape) + + # Assuming output of the encoder is a list of feature maps + # Resize them according to UNet architecture + enc_out4 = enc_output[4] # Adjust according to the feature layers of Swin + enc_out3 = enc_output[3] + enc_out2 = enc_output[2] + enc_out1 = enc_output[1] + # enc_out0 = enc_output[0] + + # Decoder part + x = self.upconv4(enc_out4) + x = x + enc_out3 # s16, Skip connection + x = self.upconv3(x) + x = x + enc_out2 # s8 + x = self.upconv2(x) + x = x + enc_out1 # s4 + # x = self.upconv1(x) + # x = x + enc_out0 # s2 + + x = self.out_conv(x) + return x + + +class XnetWrapper(nn.Module): + """ + XnetWrapper using original implementation, hacked with modulation. + """ + def __init__(self, model_name: str, modulation_dim: int = None, freeze: bool = True, encoder_feat_dim: int = 384): + super().__init__() + self.modulation_dim = modulation_dim + self.model = XUNet(model_name=model_name, encoder_feat_dim=encoder_feat_dim) + + if freeze: + if modulation_dim is not None: + raise ValueError("Modulated SwinUnetWrapper requires training, freezing is not allowed.") + self._freeze() + + def _freeze(self): + logger.warning(f"======== Freezing SwinUnetWrapper ========") + self.model.eval() + for name, param in self.model.named_parameters(): + param.requires_grad = False + + @torch.compile + def forward(self, image: torch.Tensor, mod: torch.Tensor = None): + # image: [N, C, H, W] + # mod: [N, D] or None + # RGB image with [0,1] scale and properly sized + outs = self.model(image) + ret = outs.permute(0, 2, 3, 1).flatten(1, 2) + return ret diff --git a/LHM/models/modeling_human_lrm.py b/LHM/models/modeling_human_lrm.py new file mode 100644 index 0000000000000000000000000000000000000000..ab2bdbdee0f090fc5305cf635a24e3ac83a62ce4 --- /dev/null +++ b/LHM/models/modeling_human_lrm.py @@ -0,0 +1,1143 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu && Xiaodong Gu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-1 17:40:57 +# @Function : Main codes for LHM +import os +import pdb +import pickle +import time +from collections import defaultdict + +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from accelerate.logging import get_logger +from diffusers.utils import is_torch_version + +from LHM.models.arcface_utils import ResNetArcFace +from LHM.models.ESRGANer_utils import ESRGANEasyModel +from LHM.models.rendering.gs_renderer import GS3DRenderer, PointEmbed +from LHM.models.rendering.gsplat_renderer import GSPlatRenderer + +# from openlrm.models.stylegan2_utils import EasyStyleGAN_series_model +from LHM.models.utils import linear + +from .embedder import CameraEmbedder +from .rendering.synthesizer import TriplaneSynthesizer +from .transformer import TransformerDecoder + +logger = get_logger(__name__) + + +class ModelHumanLRM(nn.Module): + """ + Full model of the basic single-view large reconstruction model. + """ + + def __init__( + self, + transformer_dim: int, + transformer_layers: int, + transformer_heads: int, + transformer_type="cond", + tf_grad_ckpt=False, + encoder_grad_ckpt=False, + encoder_freeze: bool = True, + encoder_type: str = "dino", + encoder_model_name: str = "facebook/dino-vitb16", + encoder_feat_dim: int = 768, + num_pcl: int = 2048, + pcl_dim: int = 512, + human_model_path=None, + smplx_subdivide_num=2, + smplx_type="smplx", + gs_query_dim=None, + gs_use_rgb=False, + gs_sh=3, + gs_mlp_network_config=None, + gs_xyz_offset_max_step=1.8 / 32, + gs_clip_scaling=0.2, + shape_param_dim=100, + expr_param_dim=50, + fix_opacity=False, + fix_rotation=False, + use_face_id=False, + facesr=False, + use_stylegan2_prior=False, + **kwargs, + ): + super().__init__() + + self.gradient_checkpointing = tf_grad_ckpt + self.encoder_gradient_checkpointing = encoder_grad_ckpt + + # attributes + self.encoder_feat_dim = encoder_feat_dim + + # modules + # image encoder default dino-v2 + self.encoder = self._encoder_fn(encoder_type)( + model_name=encoder_model_name, + freeze=encoder_freeze, + encoder_feat_dim=encoder_feat_dim, + ) + + # learnable points embedding + skip_decoder = False + self.latent_query_points_type = kwargs.get( + "latent_query_points_type", "embedding" + ) + if self.latent_query_points_type == "embedding": + self.num_pcl = num_pcl # 2048 + self.pcl_embeddings = nn.Embedding(num_pcl, pcl_dim) # 1024 + elif self.latent_query_points_type.startswith("smplx"): + latent_query_points_file = os.path.join( + human_model_path, "smplx_points", f"{self.latent_query_points_type}.npy" + ) + pcl_embeddings = torch.from_numpy(np.load(latent_query_points_file)).float() + print( + f"==========load smplx points:{latent_query_points_file}, shape:{pcl_embeddings.shape}" + ) + self.register_buffer("pcl_embeddings", pcl_embeddings) + self.pcl_embed = PointEmbed(dim=pcl_dim) + elif self.latent_query_points_type.startswith("e2e_smplx"): + skip_decoder = True + self.pcl_embed = PointEmbed(dim=pcl_dim) # pcl dim 1024 + else: + raise NotImplementedError + print(f"==========skip_decoder:{skip_decoder}") + + # transformer + self.transformer = self.build_transformer( + transformer_type, + transformer_layers, + transformer_heads, + transformer_dim, + encoder_feat_dim, + **kwargs, + ) + + # renderer + cano_pose_type = kwargs.get("cano_pose_type", 0) + dense_sample_pts = kwargs.get("dense_sample_pts", 40000) + + # original 3DGS Raster + self.renderer = GS3DRenderer( + human_model_path=human_model_path, + subdivide_num=smplx_subdivide_num, + smpl_type=smplx_type, + feat_dim=transformer_dim, + query_dim=gs_query_dim, + use_rgb=gs_use_rgb, + sh_degree=gs_sh, + mlp_network_config=gs_mlp_network_config, + xyz_offset_max_step=gs_xyz_offset_max_step, + clip_scaling=gs_clip_scaling, + shape_param_dim=shape_param_dim, + expr_param_dim=expr_param_dim, + cano_pose_type=cano_pose_type, + fix_opacity=fix_opacity, + fix_rotation=fix_rotation, + decoder_mlp=kwargs.get("decoder_mlp", False), + skip_decoder=skip_decoder, + decode_with_extra_info=kwargs.get("decode_with_extra_info", None), + gradient_checkpointing=self.gradient_checkpointing, + apply_pose_blendshape=kwargs.get("apply_pose_blendshape", False), + dense_sample_pts=dense_sample_pts, + ) + + # face_id + self.use_face_id = use_face_id + self.facesr = facesr + self.use_stylegan2_prior = use_stylegan2_prior + + if self.use_face_id: + self.id_face_net = ResNetArcFace() + + if self.facesr: + self.faceESRGAN = ESRGANEasyModel() + if self.use_stylegan2_prior: + self.stylegan2_prior = EasyStyleGAN_series_model() # harm PSNR. + + def compute_discriminator_loss(self, data): + return -F.softplus(self.stylegan2_prior(data)).mean() # StyleGAN2 + + def train(self, mode=True): + super().train(mode) + if self.use_face_id: + # setting id_face_net to evaluation + self.id_face_net.eval() + + def build_transformer( + self, + transformer_type, + transformer_layers, + transformer_heads, + transformer_dim, + encoder_feat_dim, + **kwargs, + ): + return TransformerDecoder( + block_type=transformer_type, + num_layers=transformer_layers, + num_heads=transformer_heads, + inner_dim=transformer_dim, + cond_dim=encoder_feat_dim, + mod_dim=None, + gradient_checkpointing=self.gradient_checkpointing, + ) + + def get_last_layer(self): + return self.renderer.gs_net.out_layers["shs"].weight + + def hyper_step(self, step): + pass + + @staticmethod + def _encoder_fn(encoder_type: str): + encoder_type = encoder_type.lower() + assert encoder_type in [ + "dino", + "dinov2", + "dinov2_unet", + "resunet", + "dinov2_featup", + "dinov2_dpt", + "dinov2_fusion", + "sapiens", + ], "Unsupported encoder type" + if encoder_type == "dino": + from .encoders.dino_wrapper import DinoWrapper + + logger.info("Using DINO as the encoder") + return DinoWrapper + elif encoder_type == "dinov2": + from .encoders.dinov2_wrapper import Dinov2Wrapper + + logger.info("Using DINOv2 as the encoder") + return Dinov2Wrapper + elif encoder_type == "dinov2_unet": + from .encoders.dinov2_unet_wrapper import Dinov2UnetWrapper + + logger.info("Using Dinov2Unet as the encoder") + return Dinov2UnetWrapper + elif encoder_type == "resunet": + from .encoders.xunet_wrapper import XnetWrapper + + logger.info("Using XnetWrapper as the encoder") + return XnetWrapper + elif encoder_type == "dinov2_featup": + from .encoders.dinov2_featup_wrapper import Dinov2FeatUpWrapper + + logger.info("Using Dinov2FeatUpWrapper as the encoder") + return Dinov2FeatUpWrapper + elif encoder_type == "dinov2_dpt": + from .encoders.dinov2_dpt_wrapper import Dinov2DPTWrapper + + logger.info("Using Dinov2DPTWrapper as the encoder") + return Dinov2DPTWrapper + elif encoder_type == "dinov2_fusion": + from .encoders.dinov2_fusion_wrapper import Dinov2FusionWrapper + + logger.info("Using Dinov2FusionWrapper as the encoder") + return Dinov2FusionWrapper + elif encoder_type == "sapiens": + from .encoders.sapiens_warpper import SapiensWrapper + + logger.info("Using Sapiens as the encoder") + return SapiensWrapper + + def forward_transformer(self, image_feats, camera_embeddings, query_points): + """ + Applies forward transformation to the input features. + Args: + image_feats (torch.Tensor): Input image features. Shape [B, C, H, W]. + camera_embeddings (torch.Tensor): Camera embeddings. Shape [B, D]. + query_points (torch.Tensor): Query points. Shape [B, L, D]. + Returns: + torch.Tensor: Transformed features. Shape [B, L, D]. + """ + + B = image_feats.shape[0] + + if self.latent_query_points_type == "embedding": + range_ = torch.arange(self.num_pcl, device=image_feats.device) + x = self.pcl_embeddings(range_).unsqueeze(0).repeat((B, 1, 1)) # [B, L, D] + + elif self.latent_query_points_type.startswith("smplx"): + x = self.pcl_embed(self.pcl_embeddings.unsqueeze(0)).repeat( + (B, 1, 1) + ) # [B, L, D] + + elif self.latent_query_points_type.startswith("e2e_smplx"): + # Linear warp -> MLP + LayerNorm + x = self.pcl_embed(query_points) # [B, L, D] + + x = self.transformer( + x, + cond=image_feats, + mod=camera_embeddings, + ) # [B, L, D] + return x + + def forward_encode_image(self, image): + # encode image + + if self.training and self.encoder_gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + ckpt_kwargs = ( + {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + ) + image_feats = torch.utils.checkpoint.checkpoint( + create_custom_forward(self.encoder), + image, + **ckpt_kwargs, + ) + else: + image_feats = self.encoder(image) + return image_feats + + @torch.compile + def forward_latent_points(self, image, camera, query_points=None): + """ + Forward pass of the latent points generation. + Args: + image (torch.Tensor): Input image tensor of shape [B, C_img, H_img, W_img]. + camera (torch.Tensor): Camera tensor of shape [B, D_cam_raw]. + query_points (torch.Tensor, optional): Query points tensor. for example, smplx surface points, Defaults to None. + Returns: + torch.Tensor: Generated tokens tensor. + torch.Tensor: Encoded image features tensor. + """ + + B = image.shape[0] + + # encode image + # image_feats is cond texture + image_feats = self.forward_encode_image(image) + + assert ( + image_feats.shape[-1] == self.encoder_feat_dim + ), f"Feature dimension mismatch: {image_feats.shape[-1]} vs {self.encoder_feat_dim}" + + # # embed camera + # camera_embeddings = self.camera_embedder(camera) + # assert camera_embeddings.shape[-1] == self.camera_embed_dim, \ + # f"Feature dimension mismatch: {camera_embeddings.shape[-1]} vs {self.camera_embed_dim}" + + # transformer generating latent points + tokens = self.forward_transformer( + image_feats, camera_embeddings=None, query_points=query_points + ) + + return tokens, image_feats + + def forward( + self, + image, + source_c2ws, + source_intrs, + render_c2ws, + render_intrs, + render_bg_colors, + smplx_params, + **kwargs, + ): + + # image: [B, N_ref, C_img, H_img, W_img] + # source_c2ws: [B, N_ref, 4, 4] + # source_intrs: [B, N_ref, 4, 4] + # render_c2ws: [B, N_source, 4, 4] + # render_intrs: [B, N_source, 4, 4] + # render_bg_colors: [B, N_source, 3] + # smplx_params: Dict, e.g., pose_shape: [B, N_source, 21, 3], betas:[B, 100] + # kwargs: Dict, e.g., src_head_imgs + + assert ( + image.shape[0] == render_c2ws.shape[0] + ), "Batch size mismatch for image and render_c2ws" + assert ( + image.shape[0] == render_bg_colors.shape[0] + ), "Batch size mismatch for image and render_bg_colors" + assert ( + image.shape[0] == smplx_params["betas"].shape[0] + ), "Batch size mismatch for image and smplx_params" + assert ( + image.shape[0] == smplx_params["body_pose"].shape[0] + ), "Batch size mismatch for image and smplx_params" + assert len(smplx_params["betas"].shape) == 2 + + render_h, render_w = int(render_intrs[0, 0, 1, 2] * 2), int( + render_intrs[0, 0, 0, 2] * 2 + ) + query_points = None + if self.latent_query_points_type.startswith("e2e_smplx"): + query_points, smplx_params = self.renderer.get_query_points( + smplx_params, device=image.device + ) + + latent_points, image_feats = self.forward_latent_points( + image[:, 0], camera=None, query_points=query_points + ) # [B, N, C] + + # render target views + + render_results = self.renderer( + gs_hidden_features=latent_points, + query_points=query_points, + smplx_data=smplx_params, + c2w=render_c2ws, + intrinsic=render_intrs, + height=render_h, + width=render_w, + background_color=render_bg_colors, + additional_features={"image_feats": image_feats, "image": image[:, 0]}, + df_data=kwargs["df_data"], + ) + + N, M = render_c2ws.shape[:2] + assert ( + render_results["comp_rgb"].shape[0] == N + ), "Batch size mismatch for render_results" + assert ( + render_results["comp_rgb"].shape[1] == M + ), "Number of rendered views should be consistent with render_cameras" + + gs_attrs_list = render_results.pop("gs_attr") + + offset_list = [] + scaling_list = [] + for gs_attrs in gs_attrs_list: + offset_list.append(gs_attrs.offset_xyz) + scaling_list.append(gs_attrs.scaling) + offset_output = torch.stack(offset_list) + scaling_output = torch.stack(scaling_list) + + return { + "latent_points": latent_points, + "offset_output": offset_output, + "scaling_output": scaling_output, + **render_results, + } + + def hyper_step(self, step): + + self.renderer.hyper_step(step) + + @torch.no_grad() + def infer_single_view( + self, + image, + source_c2ws, + source_intrs, + render_c2ws, + render_intrs, + render_bg_colors, + smplx_params, + ): + # image: [B, N_ref, C_img, H_img, W_img] + # source_c2ws: [B, N_ref, 4, 4] + # source_intrs: [B, N_ref, 4, 4] + # render_c2ws: [B, N_source, 4, 4] + # render_intrs: [B, N_source, 4, 4] + # render_bg_colors: [B, N_source, 3] + # smplx_params: Dict, e.g., pose_shape: [B, N_source, 21, 3], betas:[B, 100] + assert ( + image.shape[0] == render_c2ws.shape[0] + ), "Batch size mismatch for image and render_c2ws" + assert ( + image.shape[0] == render_bg_colors.shape[0] + ), "Batch size mismatch for image and render_bg_colors" + assert ( + image.shape[0] == smplx_params["betas"].shape[0] + ), "Batch size mismatch for image and smplx_params" + assert ( + image.shape[0] == smplx_params["body_pose"].shape[0] + ), "Batch size mismatch for image and smplx_params" + assert len(smplx_params["betas"].shape) == 2 + render_h, render_w = int(render_intrs[0, 0, 1, 2] * 2), int( + render_intrs[0, 0, 0, 2] * 2 + ) + assert image.shape[0] == 1 + num_views = render_c2ws.shape[1] + query_points = None + + start_time = time.time() + if self.latent_query_points_type.startswith("e2e_smplx"): + # obtain subdivide smplx points and transform_matrix from predefined pose to zero-pose (null pose) + query_points, smplx_params = self.renderer.get_query_points( + smplx_params, device=image.device + ) + + # using DiT to predict query points features. + latent_points, image_feats = self.forward_latent_points( + image[:, 0], camera=None, query_points=query_points + ) # [B, N, C] + + gs_model_list, query_points, smplx_params = self.renderer.forward_gs( + gs_hidden_features=latent_points, + query_points=query_points, + smplx_data=smplx_params, + additional_features={"image_feats": image_feats, "image": image[:, 0]}, + ) + print(f"time elpased(forward gs model):{time.time() - start_time}") + start_time = time.time() + + # render target views + render_res_list = [] + for view_idx in range(num_views): + render_res = self.renderer.forward_animate_gs( + gs_model_list, + query_points, + self.renderer.get_single_view_smpl_data(smplx_params, view_idx), + render_c2ws[:, view_idx : view_idx + 1], + render_intrs[:, view_idx : view_idx + 1], + render_h, + render_w, + render_bg_colors[:, view_idx : view_idx + 1], + ) + render_res_list.append(render_res) + print( + f"time elpased(animate gs model per frame):{(time.time() - start_time)/num_views}" + ) + + out = defaultdict(list) + for res in render_res_list: + for k, v in res.items(): + out[k].append(v) + for k, v in out.items(): + # print(f"out key:{k}") + if isinstance(v[0], torch.Tensor): + out[k] = torch.concat(v, dim=1) + if k in ["comp_rgb", "comp_mask", "comp_depth"]: + out[k] = out[k][0].permute( + 0, 2, 3, 1 + ) # [1, Nv, 3, H, W] -> [Nv, 3, H, W] - > [Nv, H, W, 3] + else: + out[k] = v + return out + + def obtain_params(self, cfg): + # add all bias and LayerNorm params to no_decay_params + no_decay_params, decay_params = [], [] + + for name, module in self.named_modules(): + if isinstance(module, nn.LayerNorm): + no_decay_params.extend([p for p in module.parameters()]) + elif hasattr(module, "bias") and module.bias is not None: + no_decay_params.append(module.bias) + + # add remaining parameters to decay_params + _no_decay_ids = set(map(id, no_decay_params)) + decay_params = [p for p in self.parameters() if id(p) not in _no_decay_ids] + + # filter out parameters with no grad + decay_params = list(filter(lambda p: p.requires_grad, decay_params)) + no_decay_params = list(filter(lambda p: p.requires_grad, no_decay_params)) + + # Optimizer + opt_groups = [ + { + "params": decay_params, + "weight_decay": cfg.train.optim.weight_decay, + "lr": cfg.train.optim.lr, + "name": "decay", + }, + { + "params": no_decay_params, + "weight_decay": 0.0, + "lr": cfg.train.optim.lr, + "name": "no_decay", + }, + ] + + logger.info("======== Weight Decay Parameters ========") + logger.info(f"Total: {len(decay_params)}") + logger.info("======== No Weight Decay Parameters ========") + logger.info(f"Total: {len(no_decay_params)}") + + print(f"Total Params: {len(no_decay_params) + len(decay_params)}") + + return opt_groups + + +class ModelHumanLRMSD3(ModelHumanLRM): + def __init__( + self, + **kwargs, + ): + """LRMSD3 model with motion embedding""" + super(ModelHumanLRMSD3, self).__init__(**kwargs) + + pcl_dim = kwargs.get("pcl_dim", 1024) + + input_dim = kwargs.get("encoder_feat_dim", pcl_dim) + mid_dim = pcl_dim // 4 + self.motion_embed_mlp = nn.Sequential( + linear(input_dim, mid_dim), + nn.SiLU(), + linear(mid_dim, pcl_dim), + ) + + def forward_transformer( + self, image_feats, camera_embeddings, query_points, motion_embed=None + ): + """ + Applies forward transformation to the input features. + Args: + image_feats (torch.Tensor): Input image features. Shape [B, C, H, W]. + camera_embeddings (torch.Tensor): Camera embeddings. Shape [B, D]. + query_points (torch.Tensor): Query points. Shape [B, L, D]. + motion embed(torch.Tensor): Query points. Shape [B, L, D]. + Returns: + torch.Tensor: Transformed features. Shape [B, L, D]. + """ + + B = image_feats.shape[0] + + if self.latent_query_points_type == "embedding": + range_ = torch.arange(self.num_pcl, device=image_feats.device) + x = self.pcl_embeddings(range_).unsqueeze(0).repeat((B, 1, 1)) # [B, L, D] + + elif self.latent_query_points_type.startswith("smplx"): + x = self.pcl_embed(self.pcl_embeddings.unsqueeze(0)).repeat( + (B, 1, 1) + ) # [B, L, D] + + elif self.latent_query_points_type.startswith("e2e_smplx"): + # Linear warp -> MLP + LayerNorm + + x = self.pcl_embed(query_points) # [B, L, D] + + x = self.transformer( + x, + cond=image_feats, + mod=camera_embeddings, + temb=motion_embed, + ) # [B, L, D] + return x + + +class ModelHumanLRMSapdinoSD3(ModelHumanLRMSD3): + """LRMSapdinoSD3 model with motion embedding w.r.t Sapiens and Dino-V2 Decoder""" + + def __init__(self, **kwargs): + super(ModelHumanLRMSD3, self).__init__(**kwargs) + + # fine encoder + fine_encoder_type = kwargs["fine_encoder_type"] + fine_encoder_model_name = kwargs["fine_encoder_model_name"] + fine_encoder_feat_dim = kwargs["fine_encoder_feat_dim"] + fine_encoder_freeze = kwargs["fine_encoder_freeze"] + + self.fine_encoder_feat_dim = fine_encoder_feat_dim + + self.fine_encoder = self._encoder_fn(fine_encoder_type)( + model_name=fine_encoder_model_name, + freeze=fine_encoder_freeze, + encoder_feat_dim=fine_encoder_feat_dim, + ) + + pcl_dim = kwargs.get("pcl_dim", 1024) + + input_dim = kwargs.get("fine_encoder_feat_dim", pcl_dim) + mid_dim = input_dim // 2 + self.motion_embed_mlp = nn.Sequential( + linear(input_dim, mid_dim), + nn.SiLU(), + linear(mid_dim, pcl_dim), + ) + + def forward_fine_encode_image(self, image): + if self.training and self.encoder_gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + ckpt_kwargs = ( + {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + ) + image_feats = torch.utils.checkpoint.checkpoint( + create_custom_forward(self.fine_encoder), + image, + **ckpt_kwargs, + ) + else: + image_feats = self.fine_encoder(image) + return image_feats + + def forward_encode_image(self, image): + # encode image + + coarse_embed = super(ModelHumanLRMSD3, self).forward_encode_image(image) + fine_embed = self.forward_fine_encode_image(image) + + coarse_embed = F.pad( + coarse_embed, (0, fine_embed.shape[-1] - coarse_embed.shape[-1], 0, 0, 0, 0) + ) # the same as sd3, learnable + + merge_tokens = torch.cat([coarse_embed, fine_embed], dim=1) + return merge_tokens, coarse_embed, fine_embed + + def build_transformer( + self, + transformer_type, + transformer_layers, + transformer_heads, + transformer_dim, + encoder_feat_dim, + **kwargs, + ): + + return TransformerDecoder( + block_type=transformer_type, + num_layers=transformer_layers, + num_heads=transformer_heads, + inner_dim=transformer_dim, + cond_dim=kwargs.get("fine_encoder_feat_dim", 1024), + mod_dim=None, + gradient_checkpointing=self.gradient_checkpointing, + ) + + +class ModelHumanLRMSapdinoBodyHeadSD3(ModelHumanLRMSapdinoSD3): + """LRMSapdinoBodyHeadSD3 model with motion embedding w.r.t Sapiens and Dino-V2 Decoder + Head src image""" + + def obtain_facesr(self, head_image): + def tensor_to_image(head_image): + head_image = head_image.permute(0, 2, 3, 1) + head_image_numpy = head_image.detach().cpu().numpy() + head_image_numpy = (head_image_numpy * 255).astype(np.uint8) + + head_image_numpy = head_image_numpy[..., ::-1] # RGB2BGR + + return head_image_numpy + + def image_to_tensor(head_image_numpy): + head_image_numpy = head_image_numpy[..., ::-1] # BGR2RGB + head_image_tensor = ( + torch.from_numpy(head_image_numpy.copy()).permute(0, 3, 1, 2).float() + ) + head_image_tensor = head_image_tensor / 255.0 + return head_image_tensor + + device = head_image.device + B, V, C, H, W = head_image.shape + head_image = head_image.view(-1, C, H, W) + head_image_numpy = tensor_to_image(head_image) + + sr_head_image_list = [] + + for _i, head_image in enumerate(head_image_numpy): + sr_head_image = self.faceESRGAN(head_image) + sr_head_image_list.append(sr_head_image) + + sr_head_image_numpy = np.stack(sr_head_image_list, axis=0) + + head_image_tensor = image_to_tensor(sr_head_image_numpy) + _, _, new_H, new_W = head_image_tensor.shape + + head_image = head_image_tensor.view(B, V, C, new_H, new_W).to(device) + + return head_image + + def forward( + self, + image, + source_c2ws, + source_intrs, + render_c2ws, + render_intrs, + render_bg_colors, + smplx_params, + **kwargs, + ): + + # image: [B, N_ref, C_img, H_img, W_img] + # source_c2ws: [B, N_ref, 4, 4] + # source_intrs: [B, N_ref, 4, 4] + # render_c2ws: [B, N_source, 4, 4] + # render_intrs: [B, N_source, 4, 4] + # render_bg_colors: [B, N_source, 3] + # smplx_params: Dict, e.g., pose_shape: [B, N_source, 21, 3], betas:[B, 100] + # kwargs: Dict, e.g., src_head_imgs + head_image = kwargs["source_head_rgbs"] + if self.facesr: + head_image = self.obtain_facesr(head_image) + + assert ( + image.shape[0] == render_c2ws.shape[0] + ), "Batch size mismatch for image and render_c2ws" + assert ( + head_image.shape[0] == render_c2ws.shape[0] + ), "Batch size mismatch for image and render_c2ws" + assert ( + image.shape[0] == render_bg_colors.shape[0] + ), "Batch size mismatch for image and render_bg_colors" + assert ( + image.shape[0] == smplx_params["betas"].shape[0] + ), "Batch size mismatch for image and smplx_params" + assert ( + image.shape[0] == smplx_params["body_pose"].shape[0] + ), "Batch size mismatch for image and smplx_params" + assert len(smplx_params["betas"].shape) == 2 + + render_h, render_w = int(render_intrs[0, 0, 1, 2] * 2), int( + render_intrs[0, 0, 0, 2] * 2 + ) + + query_points = None + if self.latent_query_points_type.startswith("e2e_smplx"): + query_points, smplx_params = self.renderer.get_query_points( + smplx_params, device=image.device + ) + + latent_points, image_feats = self.forward_latent_points( + image[:, 0], head_image[:, 0], camera=None, query_points=query_points + ) # [B, N, C] + + # render target views + render_results = self.renderer( + gs_hidden_features=latent_points, + query_points=query_points, + smplx_data=smplx_params, + c2w=render_c2ws, + intrinsic=render_intrs, + height=render_h, + width=render_w, + background_color=render_bg_colors, + additional_features={"image_feats": image_feats, "image": image[:, 0]}, + df_data=kwargs["df_data"], + ) + + N, M = render_c2ws.shape[:2] + assert ( + render_results["comp_rgb"].shape[0] == N + ), "Batch size mismatch for render_results" + assert ( + render_results["comp_rgb"].shape[1] == M + ), "Number of rendered views should be consistent with render_cameras" + + gs_attrs_list = render_results.pop("gs_attr") + + offset_list = [] + scaling_list = [] + for gs_attrs in gs_attrs_list: + offset_list.append(gs_attrs.offset_xyz) + scaling_list.append(gs_attrs.scaling) + offset_output = torch.stack(offset_list) + scaling_output = torch.stack(scaling_list) + + return { + "latent_points": latent_points, + "offset_output": offset_output, + "scaling_output": scaling_output, + **render_results, + } + + def forward_encode_image(self, image, head_image): + # encode image + + body_embed = self.forward_fine_encode_image(image) + head_embed = super(ModelHumanLRMSD3, self).forward_encode_image(head_image) + + head_embed = F.pad( + head_embed, (0, body_embed.shape[-1] - head_embed.shape[-1], 0, 0, 0, 0) + ) # the same as sd3, learnable + + merge_tokens = torch.cat([body_embed, head_embed], dim=1) + + return merge_tokens, head_embed, body_embed + + @torch.compile + def forward_latent_points(self, image, head_image, camera, query_points=None): + """ + Forward pass of the latent points generation. + Args: + image (torch.Tensor): Input image tensor of shape [B, C_img, H_img, W_img]. + head_image (torch.Tensor): Input head image tensor of shape [B, C_img, H_img, W_img]. + camera (torch.Tensor): Camera tensor of shape [B, D_cam_raw]. + query_points (torch.Tensor, optional): Query points tensor. for example, smplx surface points, Defaults to None. + Returns: + torch.Tensor: Generated tokens tensor. + torch.Tensor: Encoded image features tensor. + """ + + B = image.shape[0] + + # encode image + # image_feats is cond texture + image_feats, head_feats, body_feats = self.forward_encode_image( + image, head_image + ) + + motion_feats = self.forward_moitonembed(body_feats) + + assert ( + image_feats.shape[-1] == self.fine_encoder_feat_dim + ), f"Feature dimension mismatch: {image_feats.shape[-1]} vs {self.fine_encoder_feat_dim}" + + tokens = self.forward_transformer( + image_feats, + camera_embeddings=None, + query_points=query_points, + motion_embed=motion_feats, + ) + + return tokens, image_feats + + +class ModelHumanLRMSapdinoBodyHeadSD3_5(ModelHumanLRMSapdinoBodyHeadSD3): + """Using SD3BodyHeadMMJointTransformerBlock""" + + def obtain_params(self, cfg): + # add all bias and LayerNorm params to no_decay_params + no_decay_params, decay_params = [], [] + + for name, module in self.named_modules(): + if isinstance(module, nn.LayerNorm): + no_decay_params.extend([p for p in module.parameters()]) + elif hasattr(module, "bias") and module.bias is not None: + no_decay_params.append(module.bias) + + # add remaining parameters to decay_params + _no_decay_ids = set(map(id, no_decay_params)) + decay_params = [p for p in self.parameters() if id(p) not in _no_decay_ids] + + # filter out parameters with no grad + decay_params = list(filter(lambda p: p.requires_grad, decay_params)) + no_decay_params = list(filter(lambda p: p.requires_grad, no_decay_params)) + + # Optimizer + opt_groups = [ + { + "params": decay_params, + "weight_decay": cfg.train.optim.weight_decay, + "lr": cfg.train.optim.lr, + "name": "decay", + }, + { + "params": no_decay_params, + "weight_decay": 0.0, + "lr": cfg.train.optim.lr, + "name": "no_decay", + }, + ] + + logger.info("======== Weight Decay Parameters ========") + logger.info(f"Total: {len(decay_params)}") + logger.info("======== No Weight Decay Parameters ========") + logger.info(f"Total: {len(no_decay_params)}") + + print(f"Total Params: {len(no_decay_params) + len(decay_params)}") + + return opt_groups + + def __init__(self, **kwargs): + super(ModelHumanLRMSapdinoBodyHeadSD3_5, self).__init__(**kwargs) + + pcl_dim = kwargs.get("pcl_dim", 1024) + + input_dim = kwargs.get("fine_encoder_feat_dim", pcl_dim) + mid_dim = input_dim // 2 + self.motion_embed_mlp = nn.Sequential( + linear(input_dim, mid_dim), + nn.SiLU(), + linear(mid_dim, pcl_dim * 2), + ) + + def forward_moitonembed(self, motion_tokens): + + motion_tokens = motion_tokens.mean(dim=1, keepdim=True) + + motion_tokens = self.motion_embed_mlp(motion_tokens).squeeze( + 1 + ) # [B, 2*D] # one for head, one for body + + return motion_tokens + + @torch.compile + def forward_latent_points(self, image, head_image, camera, query_points=None): + """ + Forward pass of the latent points generation. + Args: + image (torch.Tensor): Input image tensor of shape [B, C_img, H_img, W_img]. + head_image (torch.Tensor): Input head image tensor of shape [B, C_img, H_img, W_img]. + camera (torch.Tensor): Camera tensor of shape [B, D_cam_raw]. + query_points (torch.Tensor, optional): Query points tensor. for example, smplx surface points, Defaults to None. + Returns: + torch.Tensor: Generated tokens tensor. + torch.Tensor: Encoded image features tensor. + """ + + B = image.shape[0] + + # encode image + # image_feats is cond texture + image_feats, head_feats, body_feats = self.forward_encode_image( + image, head_image + ) + + motion_tokens = self.forward_moitonembed(body_feats) + + assert ( + image_feats.shape[-1] == self.fine_encoder_feat_dim + ), f"Feature dimension mismatch: {image_feats.shape[-1]} vs {self.fine_encoder_feat_dim}" + + # # embed camera + # camera_embeddings = self.camera_embedder(camera) + # assert camera_embeddings.shape[-1] == self.camera_embed_dim, \ + # f"Feature dimension mismatch: {camera_embeddings.shape[-1]} vs {self.camera_embed_dim}" + + # transformer generating latent points + + tokens = self.forward_transformer( + image_feats, + camera_embeddings=None, + query_points=query_points, + motion_embed=motion_tokens, + ) + + return tokens, image_feats + + def forward_encode_image(self, image, head_image): + # encode image + + body_embed = self.forward_fine_encode_image(image) # 4096 tokens + head_embed = super(ModelHumanLRMSD3, self).forward_encode_image( + head_image + ) # 1024 tokens + + head_embed = F.pad( + head_embed, (0, body_embed.shape[-1] - head_embed.shape[-1], 0, 0, 0, 0) + ) # the same as sd3, learnable + + merge_tokens = torch.cat([body_embed, head_embed], dim=1) + + return merge_tokens, head_embed, body_embed + + @torch.no_grad() + def infer_single_view( + self, + image, + head_image, + source_c2ws, + source_intrs, + render_c2ws, + render_intrs, + render_bg_colors, + smplx_params, + ): + # image: [B, N_ref, C_img, H_img, W_img] + # head_image : [B, N_ref, C_img, H_img, W_img] + # source_c2ws: [B, N_ref, 4, 4] + # source_intrs: [B, N_ref, 4, 4] + # render_c2ws: [B, N_source, 4, 4] + # render_intrs: [B, N_source, 4, 4] + # render_bg_colors: [B, N_source, 3] + # smplx_params: Dict, e.g., pose_shape: [B, N_source, 21, 3], betas:[B, 100] + assert ( + image.shape[0] == render_c2ws.shape[0] + ), "Batch size mismatch for image and render_c2ws" + assert ( + image.shape[0] == render_bg_colors.shape[0] + ), "Batch size mismatch for image and render_bg_colors" + assert ( + image.shape[0] == smplx_params["betas"].shape[0] + ), "Batch size mismatch for image and smplx_params" + assert ( + image.shape[0] == smplx_params["body_pose"].shape[0] + ), "Batch size mismatch for image and smplx_params" + assert len(smplx_params["betas"].shape) == 2 + + + if self.facesr: + head_image = self.obtain_facesr(head_image) + + assert image.shape[0] == 1 + + start_time = time.time() + + query_points = None + if self.latent_query_points_type.startswith("e2e_smplx"): + query_points, smplx_params = self.renderer.get_query_points( + smplx_params, device=image.device + ) + + latent_points, image_feats = self.forward_latent_points( + image[:, 0], head_image[:, 0], camera=None, query_points=query_points + ) # [B, N, C] + + self.renderer.hyper_step(10000000) # set to max step + + gs_model_list, query_points, smplx_params = self.renderer.forward_gs( + gs_hidden_features=latent_points, + query_points=query_points, + smplx_data=smplx_params, + additional_features={"image_feats": image_feats, "image": image[:, 0]}, + ) + + print(f"time elpased(forward gs model):{time.time() - start_time}") + return gs_model_list, query_points, smplx_params['transform_mat_neutral_pose'] + + + def animation_infer(self, gs_model_list, query_points, smplx_params, render_c2ws, render_intrs, render_bg_colors): + '''Inference code avoid repeat forward. + ''' + render_h, render_w = int(render_intrs[0, 0, 1, 2] * 2), int( + render_intrs[0, 0, 0, 2] * 2 + ) + # render target views + render_res_list = [] + num_views = render_c2ws.shape[1] + start_time = time.time() + + # render target views + render_res_list = [] + + for view_idx in range(num_views): + render_res = self.renderer.forward_animate_gs( + gs_model_list, + query_points, + self.renderer.get_single_view_smpl_data(smplx_params, view_idx), + render_c2ws[:, view_idx : view_idx + 1], + render_intrs[:, view_idx : view_idx + 1], + render_h, + render_w, + render_bg_colors[:, view_idx : view_idx + 1], + ) + render_res_list.append(render_res) + print( + f"time elpased(animate gs model per frame):{(time.time() - start_time)/num_views}" + ) + + out = defaultdict(list) + for res in render_res_list: + for k, v in res.items(): + if isinstance(v[0], torch.Tensor): + out[k].append(v.detach().cpu()) + else: + out[k].append(v) + for k, v in out.items(): + # print(f"out key:{k}") + if isinstance(v[0], torch.Tensor): + out[k] = torch.concat(v, dim=1) + if k in ["comp_rgb", "comp_mask", "comp_depth"]: + out[k] = out[k][0].permute( + 0, 2, 3, 1 + ) # [1, Nv, 3, H, W] -> [Nv, 3, H, W] - > [Nv, H, W, 3] + else: + out[k] = v + return out diff --git a/LHM/models/modulate.py b/LHM/models/modulate.py new file mode 100644 index 0000000000000000000000000000000000000000..8d2a0f0240cc1d596a9a544d56eac5ee7e03cc7d --- /dev/null +++ b/LHM/models/modulate.py @@ -0,0 +1,43 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import torch +import torch.nn as nn + + +class ModLN(nn.Module): + """ + Modulation with adaLN. + + References: + DiT: https://github.com/facebookresearch/DiT/blob/main/models.py#L101 + """ + def __init__(self, inner_dim: int, mod_dim: int, eps: float): + super().__init__() + self.norm = nn.LayerNorm(inner_dim, eps=eps) + self.mlp = nn.Sequential( + nn.SiLU(), + nn.Linear(mod_dim, inner_dim * 2), + ) + + @staticmethod + def modulate(x, shift, scale): + # x: [N, L, D] + # shift, scale: [N, D] + return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) + + def forward(self, x: torch.Tensor, mod: torch.Tensor) -> torch.Tensor: + shift, scale = self.mlp(mod).chunk(2, dim=-1) # [N, D] + return self.modulate(self.norm(x), shift, scale) # [N, L, D] diff --git a/LHM/models/rendering/__init__.py b/LHM/models/rendering/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7a1e39e624fbf5d970acc4b05714f8b9f70830c6 --- /dev/null +++ b/LHM/models/rendering/__init__.py @@ -0,0 +1,15 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Empty diff --git a/LHM/models/rendering/__pycache__/__init__.cpython-310.pyc b/LHM/models/rendering/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3b59b919cc6cd0c353ae91802e88b202802bdbee Binary files /dev/null and b/LHM/models/rendering/__pycache__/__init__.cpython-310.pyc differ diff --git a/LHM/models/rendering/__pycache__/gs_renderer.cpython-310.pyc b/LHM/models/rendering/__pycache__/gs_renderer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6e6253f19580a52bcaa1887ad25eb15f831866d0 Binary files /dev/null and b/LHM/models/rendering/__pycache__/gs_renderer.cpython-310.pyc differ diff --git a/LHM/models/rendering/__pycache__/gsplat_renderer.cpython-310.pyc b/LHM/models/rendering/__pycache__/gsplat_renderer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..06d2d9a06d98665d9d02717e0357e7de6fb6d793 Binary files /dev/null and b/LHM/models/rendering/__pycache__/gsplat_renderer.cpython-310.pyc differ diff --git a/LHM/models/rendering/__pycache__/mesh_utils.cpython-310.pyc b/LHM/models/rendering/__pycache__/mesh_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9b4d34e13ca0191436c16d1f7f0ce754cce8e348 Binary files /dev/null and b/LHM/models/rendering/__pycache__/mesh_utils.cpython-310.pyc differ diff --git a/LHM/models/rendering/__pycache__/smpl_x.cpython-310.pyc b/LHM/models/rendering/__pycache__/smpl_x.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e78611d65030fb63d150bd381331ad2311f03568 Binary files /dev/null and b/LHM/models/rendering/__pycache__/smpl_x.cpython-310.pyc differ diff --git a/LHM/models/rendering/__pycache__/smpl_x_voxel_dense_sampling.cpython-310.pyc b/LHM/models/rendering/__pycache__/smpl_x_voxel_dense_sampling.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f2243ad8c51b8f28400ca027480db3627846489c Binary files /dev/null and b/LHM/models/rendering/__pycache__/smpl_x_voxel_dense_sampling.cpython-310.pyc differ diff --git a/LHM/models/rendering/__pycache__/synthesizer.cpython-310.pyc b/LHM/models/rendering/__pycache__/synthesizer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4cd9a8aca607c68cbbca938f4c0cd965b751bd1b Binary files /dev/null and b/LHM/models/rendering/__pycache__/synthesizer.cpython-310.pyc differ diff --git a/LHM/models/rendering/gs_renderer.py b/LHM/models/rendering/gs_renderer.py new file mode 100644 index 0000000000000000000000000000000000000000..86a6f6b3b8ecfc52bd2bc90424082bcc4c99c3db --- /dev/null +++ b/LHM/models/rendering/gs_renderer.py @@ -0,0 +1,1678 @@ +import copy +import math +import os +import pdb +from collections import defaultdict +from dataclasses import dataclass, field + +import numpy as np +import omegaconf +import torch +import torch.nn as nn +import torch.nn.functional as F +from diff_gaussian_rasterization import ( + GaussianRasterizationSettings, + GaussianRasterizer, +) +from plyfile import PlyData, PlyElement +from pytorch3d.transforms import matrix_to_quaternion +from pytorch3d.transforms.rotation_conversions import quaternion_multiply + +from LHM.models.rendering.smpl_x import SMPLXModel, read_smplx_param +from LHM.models.rendering.smpl_x_voxel_dense_sampling import SMPLXVoxelMeshModel +from LHM.models.rendering.utils.sh_utils import RGB2SH, SH2RGB +from LHM.models.rendering.utils.typing import * +from LHM.models.rendering.utils.utils import MLP, trunc_exp +from LHM.models.utils import LinerParameterTuner, StaticParameterTuner +from LHM.outputs.output import GaussianAppOutput + + +def auto_repeat_size(tensor, repeat_num, axis=0): + repeat_size = [1] * tensor.dim() + repeat_size[axis] = repeat_num + return repeat_size + + +def aabb(xyz): + return torch.min(xyz, dim=0).values, torch.max(xyz, dim=0).values + + +def inverse_sigmoid(x): + + if isinstance(x, float): + x = torch.tensor(x).float() + + return torch.log(x / (1 - x)) + + +def generate_rotation_matrix_y(degrees): + theta = math.radians(degrees) + cos_theta = math.cos(theta) + sin_theta = math.sin(theta) + + R = [[cos_theta, 0, sin_theta], [0, 1, 0], [-sin_theta, 0, cos_theta]] + + return np.asarray(R, dtype=np.float32) + + +def getWorld2View2(R, t, translate=np.array([0.0, 0.0, 0.0]), scale=1.0): + Rt = np.zeros((4, 4)) + Rt[:3, :3] = R.transpose() + Rt[:3, 3] = t + Rt[3, 3] = 1.0 + + C2W = np.linalg.inv(Rt) + cam_center = C2W[:3, 3] + cam_center = (cam_center + translate) * scale + C2W[:3, 3] = cam_center + Rt = np.linalg.inv(C2W) + return np.float32(Rt) + + +def getProjectionMatrix(znear, zfar, fovX, fovY): + tanHalfFovY = math.tan((fovY / 2)) + tanHalfFovX = math.tan((fovX / 2)) + + top = tanHalfFovY * znear + bottom = -top + right = tanHalfFovX * znear + left = -right + + P = torch.zeros(4, 4) + + z_sign = 1.0 + + P[0, 0] = 2.0 * znear / (right - left) + P[1, 1] = 2.0 * znear / (top - bottom) + P[0, 2] = (right + left) / (right - left) + P[1, 2] = (top + bottom) / (top - bottom) + P[3, 2] = z_sign + P[2, 2] = z_sign * zfar / (zfar - znear) + P[2, 3] = -(zfar * znear) / (zfar - znear) + return P + + +def intrinsic_to_fov(intrinsic, w, h): + fx, fy = intrinsic[0, 0], intrinsic[1, 1] + fov_x = 2 * torch.arctan2(w, 2 * fx) + fov_y = 2 * torch.arctan2(h, 2 * fy) + return fov_x, fov_y + + +class Camera: + def __init__( + self, + w2c, + intrinsic, + FoVx, + FoVy, + height, + width, + trans=np.array([0.0, 0.0, 0.0]), + scale=1.0, + ) -> None: + self.FoVx = FoVx + self.FoVy = FoVy + self.height = height + self.width = width + self.world_view_transform = w2c.transpose(0, 1) + + self.zfar = 100.0 + self.znear = 0.01 + + self.trans = trans + self.scale = scale + + self.projection_matrix = ( + getProjectionMatrix( + znear=self.znear, zfar=self.zfar, fovX=self.FoVx, fovY=self.FoVy + ) + .transpose(0, 1) + .to(w2c.device) + ) + self.full_proj_transform = ( + self.world_view_transform.unsqueeze(0).bmm( + self.projection_matrix.unsqueeze(0) + ) + ).squeeze(0) + self.camera_center = self.world_view_transform.inverse()[3, :3] + + self.intrinsic = intrinsic + + @staticmethod + def from_c2w(c2w, intrinsic, height, width): + w2c = torch.inverse(c2w) + FoVx, FoVy = intrinsic_to_fov( + intrinsic, + w=torch.tensor(width, device=w2c.device), + h=torch.tensor(height, device=w2c.device), + ) + return Camera( + w2c=w2c, + intrinsic=intrinsic, + FoVx=FoVx, + FoVy=FoVy, + height=height, + width=width, + ) + + +class GaussianModel: + + def setup_functions(self): + + self.scaling_activation = torch.exp + self.scaling_inverse_activation = torch.log + + self.opacity_activation = torch.sigmoid + self.inverse_opacity_activation = inverse_sigmoid + self.rotation_activation = torch.nn.functional.normalize + + # rgb activation function + self.rgb_activation = torch.sigmoid + + def __init__(self, xyz, opacity, rotation, scaling, shs, use_rgb=False) -> None: + """ + Initializes the GSRenderer object. + Args: + xyz (Tensor): The xyz coordinates. + opacity (Tensor): The opacity values. + rotation (Tensor): The rotation values. + scaling (Tensor): The scaling values. + before_activate: if True, the output appearance is needed to process by activation function. + shs (Tensor): The spherical harmonics coefficients. + use_rgb (bool, optional): Indicates whether shs represents RGB values. Defaults to False. + """ + + self.setup_functions() + + self.xyz: Tensor = xyz + self.opacity: Tensor = opacity + self.rotation: Tensor = rotation + self.scaling: Tensor = scaling + self.shs: Tensor = shs # [B, SH_Coeff, 3] + + self.use_rgb = use_rgb # shs indicates rgb? + + def construct_list_of_attributes(self): + l = ["x", "y", "z", "nx", "ny", "nz"] + features_dc = self.shs[:, :1] + features_rest = self.shs[:, 1:] + + for i in range(features_dc.shape[1] * features_dc.shape[2]): + l.append("f_dc_{}".format(i)) + for i in range(features_rest.shape[1] * features_rest.shape[2]): + l.append("f_rest_{}".format(i)) + l.append("opacity") + for i in range(self.scaling.shape[1]): + l.append("scale_{}".format(i)) + for i in range(self.rotation.shape[1]): + l.append("rot_{}".format(i)) + return l + + def save_ply(self, path): + + xyz = self.xyz.detach().cpu().numpy() + normals = np.zeros_like(xyz) + + if self.use_rgb: + shs = RGB2SH(self.shs) + else: + shs = self.shs + + features_dc = shs[:, :1] + features_rest = shs[:, 1:] + + f_dc = ( + features_dc.float().detach().flatten(start_dim=1).contiguous().cpu().numpy() + ) + f_rest = ( + features_rest.float() + .detach() + .flatten(start_dim=1) + .contiguous() + .cpu() + .numpy() + ) + opacities = ( + inverse_sigmoid(torch.clamp(self.opacity, 1e-3, 1 - 1e-3)) + .detach() + .cpu() + .numpy() + ) + + scale = np.log(self.scaling.detach().cpu().numpy()) + rotation = self.rotation.detach().cpu().numpy() + + dtype_full = [ + (attribute, "f4") for attribute in self.construct_list_of_attributes() + ] + + elements = np.empty(xyz.shape[0], dtype=dtype_full) + attributes = np.concatenate( + (xyz, normals, f_dc, f_rest, opacities, scale, rotation), axis=1 + ) + elements[:] = list(map(tuple, attributes)) + el = PlyElement.describe(elements, "vertex") + PlyData([el]).write(path) + + def load_ply(self, path): + + plydata = PlyData.read(path) + + xyz = np.stack( + ( + np.asarray(plydata.elements[0]["x"]), + np.asarray(plydata.elements[0]["y"]), + np.asarray(plydata.elements[0]["z"]), + ), + axis=1, + ) + opacities = np.asarray(plydata.elements[0]["opacity"])[..., np.newaxis] + + features_dc = np.zeros((xyz.shape[0], 3, 1)) + features_dc[:, 0, 0] = np.asarray(plydata.elements[0]["f_dc_0"]) + features_dc[:, 1, 0] = np.asarray(plydata.elements[0]["f_dc_1"]) + features_dc[:, 2, 0] = np.asarray(plydata.elements[0]["f_dc_2"]) + + extra_f_names = [ + p.name + for p in plydata.elements[0].properties + if p.name.startswith("f_rest_") + ] + + extra_f_names = sorted(extra_f_names, key=lambda x: int(x.split("_")[-1])) + sh_degree = int(math.sqrt((len(extra_f_names) + 3) / 3)) - 1 + + print("load sh degree: ", sh_degree) + + features_extra = np.zeros((xyz.shape[0], len(extra_f_names))) + for idx, attr_name in enumerate(extra_f_names): + features_extra[:, idx] = np.asarray(plydata.elements[0][attr_name]) + # Reshape (P,F*SH_coeffs) to (P, F, SH_coeffs except DC) + # 0, 3, 8, 15 + features_extra = features_extra.reshape( + (features_extra.shape[0], 3, (sh_degree + 1) ** 2 - 1) + ) + + scale_names = [ + p.name + for p in plydata.elements[0].properties + if p.name.startswith("scale_") + ] + scale_names = sorted(scale_names, key=lambda x: int(x.split("_")[-1])) + scales = np.zeros((xyz.shape[0], len(scale_names))) + for idx, attr_name in enumerate(scale_names): + scales[:, idx] = np.asarray(plydata.elements[0][attr_name]) + + rot_names = [ + p.name for p in plydata.elements[0].properties if p.name.startswith("rot") + ] + rot_names = sorted(rot_names, key=lambda x: int(x.split("_")[-1])) + rots = np.zeros((xyz.shape[0], len(rot_names))) + for idx, attr_name in enumerate(rot_names): + rots[:, idx] = np.asarray(plydata.elements[0][attr_name]) + + xyz = torch.from_numpy(xyz).to(self.xyz) + opacities = torch.from_numpy(opacities).to(self.opacity) + rotation = torch.from_numpy(rots).to(self.rotation) + scales = torch.from_numpy(scales).to(self.scaling) + features_dc = torch.from_numpy(features_dc).to(self.shs) + features_rest = torch.from_numpy(features_extra).to(self.shs) + + shs = torch.cat([features_dc, features_rest], dim=2) + + if self.use_rgb: + shs = SH2RGB(shs) + else: + shs = shs + + self.xyz: Tensor = xyz + self.opacity: Tensor = self.opacity_activation(opacities) + self.rotation: Tensor = self.rotation_activation(rotation) + self.scaling: Tensor = self.scaling_activation(scales) + self.shs: Tensor = shs.permute(0, 2, 1) + + self.active_sh_degree = sh_degree + + def clone(self): + xyz = self.xyz.clone() + opacity = self.opacity.clone() + rotation = self.rotation.clone() + scaling = self.scaling.clone() + shs = self.shs.clone() + use_rgb = self.use_rgb + return GaussianModel(xyz, opacity, rotation, scaling, shs, use_rgb) + + +class GSLayer(nn.Module): + """W/O Activation Function""" + + def setup_functions(self): + + self.scaling_activation = trunc_exp # proposed by torch-ngp + self.scaling_inverse_activation = torch.log + + self.opacity_activation = torch.sigmoid + self.inverse_opacity_activation = inverse_sigmoid + self.rotation_activation = torch.nn.functional.normalize + + self.rgb_activation = torch.sigmoid + + def __init__( + self, + in_channels, + use_rgb, + clip_scaling=0.2, + init_scaling=-5.0, + init_density=0.1, + sh_degree=None, + xyz_offset=True, + restrict_offset=True, + xyz_offset_max_step=None, + fix_opacity=False, + fix_rotation=False, + use_fine_feat=False, + ): + super().__init__() + self.setup_functions() + + if isinstance(clip_scaling, omegaconf.listconfig.ListConfig) or isinstance( + clip_scaling, list + ): + self.clip_scaling_pruner = LinerParameterTuner(*clip_scaling) + else: + self.clip_scaling_pruner = StaticParameterTuner(clip_scaling) + self.clip_scaling = self.clip_scaling_pruner.get_value(0) + + self.use_rgb = use_rgb + self.restrict_offset = restrict_offset + self.xyz_offset = xyz_offset + self.xyz_offset_max_step = xyz_offset_max_step # 1.2 / 32 + self.fix_opacity = fix_opacity + self.fix_rotation = fix_rotation + self.use_fine_feat = use_fine_feat + + self.attr_dict = { + "shs": (sh_degree + 1) ** 2 * 3, + "scaling": 3, + "xyz": 3, + "opacity": None, + "rotation": None, + } + if not self.fix_opacity: + self.attr_dict["opacity"] = 1 + if not self.fix_rotation: + self.attr_dict["rotation"] = 4 + + self.out_layers = nn.ModuleDict() + for key, out_ch in self.attr_dict.items(): + if out_ch is None: + layer = nn.Identity() + else: + if key == "shs" and use_rgb: + out_ch = 3 + if key == "shs": + shs_out_ch = out_ch + layer = nn.Linear(in_channels, out_ch) + # initialize + if not (key == "shs" and use_rgb): + if key == "opacity" and self.fix_opacity: + pass + elif key == "rotation" and self.fix_rotation: + pass + else: + nn.init.constant_(layer.weight, 0) + nn.init.constant_(layer.bias, 0) + if key == "scaling": + nn.init.constant_(layer.bias, init_scaling) + elif key == "rotation": + if not self.fix_rotation: + nn.init.constant_(layer.bias, 0) + nn.init.constant_(layer.bias[0], 1.0) + elif key == "opacity": + if not self.fix_opacity: + nn.init.constant_(layer.bias, inverse_sigmoid(init_density)) + self.out_layers[key] = layer + + if self.use_fine_feat: + fine_shs_layer = nn.Linear(in_channels, shs_out_ch) + nn.init.constant_(fine_shs_layer.weight, 0) + nn.init.constant_(fine_shs_layer.bias, 0) + self.out_layers["fine_shs"] = fine_shs_layer + + def hyper_step(self, step): + self.clip_scaling = self.clip_scaling_pruner.get_value(step) + + def forward(self, x, pts, x_fine=None): + assert len(x.shape) == 2 + ret = {} + for k in self.attr_dict: + layer = self.out_layers[k] + + v = layer(x) + if k == "rotation": + if self.fix_rotation: + v = matrix_to_quaternion( + torch.eye(3).type_as(x)[None, :, :].repeat(x.shape[0], 1, 1) + ) # constant rotation + else: + # v = torch.nn.functional.normalize(v) + v = self.rotation_activation(v) + elif k == "scaling": + # v = trunc_exp(v) + v = self.scaling_activation(v) + + if self.clip_scaling is not None: + v = torch.clamp(v, min=0, max=self.clip_scaling) + elif k == "opacity": + if self.fix_opacity: + v = torch.ones_like(x)[..., 0:1] + else: + # v = torch.sigmoid(v) + v = self.opacity_activation(v) + elif k == "shs": + if self.use_rgb: + # v = torch.sigmoid(v) + v = self.rgb_activation(v) + + if self.use_fine_feat: + v_fine = self.out_layers["fine_shs"](x_fine) + v_fine = torch.tanh(v_fine) + v = v + v_fine + else: + if self.use_fine_feat: + v_fine = self.out_layers["fine_shs"](x_fine) + v = v + v_fine + v = torch.reshape(v, (v.shape[0], -1, 3)) + elif k == "xyz": + # TODO check + if self.restrict_offset: + max_step = self.xyz_offset_max_step + v = (torch.sigmoid(v) - 0.5) * max_step + if self.xyz_offset: + pass + else: + assert NotImplementedError + v = v + pts + k = "offset_xyz" + ret[k] = v + + ret["use_rgb"] = self.use_rgb + + return GaussianAppOutput(**ret) + + +class PointEmbed(nn.Module): + def __init__(self, hidden_dim=48, dim=128): + super().__init__() + + assert hidden_dim % 6 == 0 + + self.embedding_dim = hidden_dim + e = torch.pow(2, torch.arange(self.embedding_dim // 6)).float() * np.pi + e = torch.stack( + [ + torch.cat( + [ + e, + torch.zeros(self.embedding_dim // 6), + torch.zeros(self.embedding_dim // 6), + ] + ), + torch.cat( + [ + torch.zeros(self.embedding_dim // 6), + e, + torch.zeros(self.embedding_dim // 6), + ] + ), + torch.cat( + [ + torch.zeros(self.embedding_dim // 6), + torch.zeros(self.embedding_dim // 6), + e, + ] + ), + ] + ) + + self.register_buffer("basis", e) # 3 x 16 + + self.mlp = nn.Linear(self.embedding_dim + 3, dim) + self.norm = nn.LayerNorm(dim) + + @staticmethod + def embed(input, basis): + projections = torch.einsum("bnd,de->bne", input, basis) + embeddings = torch.cat([projections.sin(), projections.cos()], dim=2) + + return embeddings + + def forward(self, input): + # input: B x N x 3 + embed = self.mlp( + torch.cat([self.embed(input, self.basis), input], dim=2) + ) # B x N x C + embed = self.norm(embed) + return embed + + +class CrossAttnBlock(nn.Module): + """ + Transformer block that takes in a cross-attention condition. + Designed for SparseLRM architecture. + """ + + # Block contains a cross-attention layer, a self-attention layer, and an MLP + def __init__( + self, + inner_dim: int, + cond_dim: int, + num_heads: int, + eps: float = None, + attn_drop: float = 0.0, + attn_bias: bool = False, + mlp_ratio: float = 4.0, + mlp_drop: float = 0.0, + feedforward=False, + ): + super().__init__() + # TODO check already apply normalization + # self.norm_q = nn.LayerNorm(inner_dim, eps=eps) + # self.norm_k = nn.LayerNorm(cond_dim, eps=eps) + self.norm_q = nn.Identity() + self.norm_k = nn.Identity() + + self.cross_attn = nn.MultiheadAttention( + embed_dim=inner_dim, + num_heads=num_heads, + kdim=cond_dim, + vdim=cond_dim, + dropout=attn_drop, + bias=attn_bias, + batch_first=True, + ) + + self.mlp = None + if feedforward: + self.norm2 = nn.LayerNorm(inner_dim, eps=eps) + self.self_attn = nn.MultiheadAttention( + embed_dim=inner_dim, + num_heads=num_heads, + dropout=attn_drop, + bias=attn_bias, + batch_first=True, + ) + self.norm3 = nn.LayerNorm(inner_dim, eps=eps) + self.mlp = nn.Sequential( + nn.Linear(inner_dim, int(inner_dim * mlp_ratio)), + nn.GELU(), + nn.Dropout(mlp_drop), + nn.Linear(int(inner_dim * mlp_ratio), inner_dim), + nn.Dropout(mlp_drop), + ) + + def forward(self, x, cond): + # x: [N, L, D] + # cond: [N, L_cond, D_cond] + x = self.cross_attn( + self.norm_q(x), self.norm_k(cond), cond, need_weights=False + )[0] + if self.mlp is not None: + before_sa = self.norm2(x) + x = ( + x + + self.self_attn(before_sa, before_sa, before_sa, need_weights=False)[0] + ) + x = x + self.mlp(self.norm3(x)) + return x + + +class DecoderCrossAttn(nn.Module): + def __init__( + self, query_dim, context_dim, num_heads, mlp=False, decode_with_extra_info=None + ): + super().__init__() + self.query_dim = query_dim + self.context_dim = context_dim + + self.cross_attn = CrossAttnBlock( + inner_dim=query_dim, + cond_dim=context_dim, + num_heads=num_heads, + feedforward=mlp, + eps=1e-5, + ) + self.decode_with_extra_info = decode_with_extra_info + if decode_with_extra_info is not None: + if decode_with_extra_info["type"] == "dinov2p14_feat": + context_dim = decode_with_extra_info["cond_dim"] + self.cross_attn_color = CrossAttnBlock( + inner_dim=query_dim, + cond_dim=context_dim, + num_heads=num_heads, + feedforward=False, + eps=1e-5, + ) + elif decode_with_extra_info["type"] == "decoder_dinov2p14_feat": + from LHM.models.encoders.dinov2_wrapper import Dinov2Wrapper + + self.encoder = Dinov2Wrapper( + model_name="dinov2_vits14_reg", freeze=False, encoder_feat_dim=384 + ) + self.cross_attn_color = CrossAttnBlock( + inner_dim=query_dim, + cond_dim=384, + num_heads=num_heads, + feedforward=False, + eps=1e-5, + ) + elif decode_with_extra_info["type"] == "decoder_resnet18_feat": + from LHM.models.encoders.xunet_wrapper import XnetWrapper + + self.encoder = XnetWrapper( + model_name="resnet18", freeze=False, encoder_feat_dim=64 + ) + self.cross_attn_color = CrossAttnBlock( + inner_dim=query_dim, + cond_dim=64, + num_heads=num_heads, + feedforward=False, + eps=1e-5, + ) + + def resize_image(self, image, multiply): + B, _, H, W = image.shape + new_h, new_w = ( + math.ceil(H / multiply) * multiply, + math.ceil(W / multiply) * multiply, + ) + image = F.interpolate( + image, (new_h, new_w), align_corners=True, mode="bilinear" + ) + return image + + def forward(self, pcl_query, pcl_latent, extra_info=None): + out = self.cross_attn(pcl_query, pcl_latent) + if self.decode_with_extra_info is not None: + out_dict = {} + out_dict["coarse"] = out + if self.decode_with_extra_info["type"] == "dinov2p14_feat": + out = self.cross_attn_color(out, extra_info["image_feats"]) + out_dict["fine"] = out + return out_dict + elif self.decode_with_extra_info["type"] == "decoder_dinov2p14_feat": + img_feat = self.encoder(extra_info["image"]) + out = self.cross_attn_color(out, img_feat) + out_dict["fine"] = out + return out_dict + elif self.decode_with_extra_info["type"] == "decoder_resnet18_feat": + image = extra_info["image"] + image = self.resize_image(image, multiply=32) + img_feat = self.encoder(image) + out = self.cross_attn_color(out, img_feat) + out_dict["fine"] = out + return out_dict + return out + + +class GS3DRenderer(nn.Module): + def __init__( + self, + human_model_path, + subdivide_num, + smpl_type, + feat_dim, + query_dim, + use_rgb, + sh_degree, + xyz_offset_max_step, + mlp_network_config, + expr_param_dim, + shape_param_dim, + clip_scaling=0.2, + cano_pose_type=0, + decoder_mlp=False, + skip_decoder=False, + fix_opacity=False, + fix_rotation=False, + decode_with_extra_info=None, + gradient_checkpointing=False, + apply_pose_blendshape=False, + dense_sample_pts=40000, # only use for dense_smaple_smplx + ): + + super().__init__() + self.gradient_checkpointing = gradient_checkpointing + self.skip_decoder = skip_decoder + self.smpl_type = smpl_type + assert self.smpl_type in ["smplx", "smplx_0", "smplx_1", "smplx_2"] + + self.scaling_modifier = 1.0 + self.sh_degree = sh_degree + + if self.smpl_type == "smplx_0" or self.smpl_type == "smplx": + # Using pytorch3d dense sampling + self.smplx_model = SMPLXModel( + human_model_path, + gender="neutral", + subdivide_num=subdivide_num, + shape_param_dim=shape_param_dim, + expr_param_dim=expr_param_dim, + cano_pose_type=cano_pose_type, + apply_pose_blendshape=apply_pose_blendshape, + ) + elif self.smpl_type == "smplx_1": + raise NotImplementedError("inference version does not support") + elif self.smpl_type == "smplx_2": + self.smplx_model = SMPLXVoxelMeshModel( + human_model_path, + gender="neutral", + subdivide_num=subdivide_num, + shape_param_dim=shape_param_dim, + expr_param_dim=expr_param_dim, + cano_pose_type=cano_pose_type, + dense_sample_points=dense_sample_pts, + apply_pose_blendshape=apply_pose_blendshape, + ) + else: + raise NotImplementedError + + if not self.skip_decoder: + self.pcl_embed = PointEmbed(dim=query_dim) + self.decoder_cross_attn = DecoderCrossAttn( + query_dim=query_dim, + context_dim=feat_dim, + num_heads=1, + mlp=decoder_mlp, + decode_with_extra_info=decode_with_extra_info, + ) + + self.mlp_network_config = mlp_network_config + + # using to mapping transformer decode feature to regression features. as decode feature is processed by NormLayer. + if self.mlp_network_config is not None: + self.mlp_net = MLP(query_dim, query_dim, **self.mlp_network_config) + + self.gs_net = GSLayer( + in_channels=query_dim, + use_rgb=use_rgb, + sh_degree=self.sh_degree, + clip_scaling=clip_scaling, + init_scaling=-5.0, + init_density=0.1, + xyz_offset=True, + restrict_offset=True, + xyz_offset_max_step=xyz_offset_max_step, + fix_opacity=fix_opacity, + fix_rotation=fix_rotation, + use_fine_feat=( + True + if decode_with_extra_info is not None + and decode_with_extra_info["type"] is not None + else False + ), + ) + + def hyper_step(self, step): + self.gs_net.hyper_step(step) + + def forward_single_view( + self, + gs: GaussianModel, + viewpoint_camera: Camera, + background_color: Optional[Float[Tensor, "3"]], + ret_mask: bool = True, + ): + # Create zero tensor. We will use it to make pytorch return gradients of the 2D (screen-space) means + screenspace_points = ( + torch.zeros_like( + gs.xyz, dtype=gs.xyz.dtype, requires_grad=True, device=self.device + ) + + 0 + ) + try: + screenspace_points.retain_grad() + except: + pass + + bg_color = background_color + # Set up rasterization configuration + tanfovx = math.tan(viewpoint_camera.FoVx * 0.5) + tanfovy = math.tan(viewpoint_camera.FoVy * 0.5) + + raster_settings = GaussianRasterizationSettings( + image_height=int(viewpoint_camera.height), + image_width=int(viewpoint_camera.width), + tanfovx=tanfovx, + tanfovy=tanfovy, + bg=bg_color, + scale_modifier=self.scaling_modifier, + viewmatrix=viewpoint_camera.world_view_transform, + projmatrix=viewpoint_camera.full_proj_transform.float(), + sh_degree=self.sh_degree, + campos=viewpoint_camera.camera_center, + prefiltered=False, + debug=False, + ) + + rasterizer = GaussianRasterizer(raster_settings=raster_settings) + + means3D = gs.xyz + means2D = screenspace_points + opacity = gs.opacity + + # If precomputed 3d covariance is provided, use it. If not, then it will be computed from + # scaling / rotation by the rasterizer. + scales = None + rotations = None + cov3D_precomp = None + scales = gs.scaling + rotations = gs.rotation + + # If precomputed colors are provided, use them. Otherwise, if it is desired to precompute colors + # from SHs in Python, do it. If not, then SH -> RGB conversion will be done by rasterizer. + shs = None + colors_precomp = None + if self.gs_net.use_rgb: + colors_precomp = gs.shs.squeeze(1).float() + shs = None + else: + colors_precomp = None + shs = gs.shs.float() + + # Rasterize visible Gaussians to image, obtain their radii (on screen). + # NOTE that dadong tries to regress rgb not shs + with torch.autocast(device_type=self.device.type, dtype=torch.float32): + rendered_image, radii, rendered_depth, rendered_alpha = rasterizer( + means3D=means3D.float(), + means2D=means2D.float(), + shs=shs, + colors_precomp=colors_precomp, + opacities=opacity.float(), + scales=scales.float(), + rotations=rotations.float(), + cov3D_precomp=cov3D_precomp, + ) + + ret = { + "comp_rgb": rendered_image.permute(1, 2, 0), # [H, W, 3] + "comp_rgb_bg": bg_color, + "comp_mask": rendered_alpha.permute(1, 2, 0), + "comp_depth": rendered_depth.permute(1, 2, 0), + } + + # if ret_mask: + # mask_bg_color = torch.zeros(3, dtype=torch.float32, device=self.device) + # raster_settings = GaussianRasterizationSettings( + # image_height=int(viewpoint_camera.height), + # image_width=int(viewpoint_camera.width), + # tanfovx=tanfovx, + # tanfovy=tanfovy, + # bg=mask_bg_color, + # scale_modifier=self.scaling_modifier, + # viewmatrix=viewpoint_camera.world_view_transform, + # projmatrix=viewpoint_camera.full_proj_transform.float(), + # sh_degree=0, + # campos=viewpoint_camera.camera_center, + # prefiltered=False, + # debug=False + # ) + # rasterizer = GaussianRasterizer(raster_settings=raster_settings) + + # with torch.autocast(device_type=self.device.type, dtype=torch.float32): + # rendered_mask, radii = rasterizer( + # means3D = means3D, + # means2D = means2D, + # # shs = , + # colors_precomp = torch.ones_like(means3D), + # opacities = opacity, + # scales = scales, + # rotations = rotations, + # cov3D_precomp = cov3D_precomp) + # ret["comp_mask"] = rendered_mask.permute(1, 2, 0) + + return ret + + def animate_gs_model( + self, gs_attr: GaussianAppOutput, query_points, smplx_data, debug=False + ): + """ + query_points: [N, 3] + """ + + device = gs_attr.offset_xyz.device + + if debug: + N = gs_attr.offset_xyz.shape[0] + gs_attr.xyz = torch.ones_like(gs_attr.offset_xyz) * 0.0 + + rotation = matrix_to_quaternion( + torch.eye(3).float()[None, :, :].repeat(N, 1, 1) + ).to( + device + ) # constant rotation + opacity = torch.ones((N, 1)).float().to(device) # constant opacity + + gs_attr.opacity = opacity + gs_attr.rotation = rotation + # gs_attr.scaling = torch.ones_like(gs_attr.scaling) * 0.05 + # print(gs_attr.shs.shape) + + # build cano_dependent_pose + cano_smplx_data_keys = [ + "root_pose", + "body_pose", + "jaw_pose", + "leye_pose", + "reye_pose", + "lhand_pose", + "rhand_pose", + "expr", + "trans", + ] + + merge_smplx_data = dict() + for cano_smplx_data_key in cano_smplx_data_keys: + warp_data = smplx_data[cano_smplx_data_key] + cano_pose = torch.zeros_like(warp_data[:1]) + + if cano_smplx_data_key == "body_pose": + # A-posed + cano_pose[0, 15, -1] = -math.pi / 6 + cano_pose[0, 16, -1] = +math.pi / 6 + + merge_pose = torch.cat([warp_data, cano_pose], dim=0) + merge_smplx_data[cano_smplx_data_key] = merge_pose + + merge_smplx_data["betas"] = smplx_data["betas"] + merge_smplx_data["transform_mat_neutral_pose"] = smplx_data[ + "transform_mat_neutral_pose" + ] + + with torch.autocast(device_type=device.type, dtype=torch.float32): + mean_3d = ( + query_points + gs_attr.offset_xyz + ) # [N, 3] # canonical space offset. + + # matrix to warp predefined pose to zero-pose + transform_mat_neutral_pose = merge_smplx_data[ + "transform_mat_neutral_pose" + ] # [55, 4, 4] + num_view = merge_smplx_data["body_pose"].shape[0] # [Nv, 21, 3] + mean_3d = mean_3d.unsqueeze(0).repeat(num_view, 1, 1) # [Nv, N, 3] + query_points = query_points.unsqueeze(0).repeat(num_view, 1, 1) + transform_mat_neutral_pose = transform_mat_neutral_pose.unsqueeze(0).repeat( + num_view, 1, 1, 1 + ) + + # print(mean_3d.shape, transform_mat_neutral_pose.shape, query_points.shape, smplx_data["body_pose"].shape, smplx_data["betas"].shape) + mean_3d, transform_matrix = ( + self.smplx_model.transform_to_posed_verts_from_neutral_pose( + mean_3d, + merge_smplx_data, + query_points, + transform_mat_neutral_pose=transform_mat_neutral_pose, # from predefined pose to zero-pose matrix + device=device, + ) + ) # [B, N, 3] + + # rotation appearance from canonical space to view_posed + num_view, N, _, _ = transform_matrix.shape + transform_rotation = transform_matrix[:, :, :3, :3] + + rigid_rotation_matrix = torch.nn.functional.normalize( + matrix_to_quaternion(transform_rotation), dim=-1 + ) + I = matrix_to_quaternion(torch.eye(3)).to(device) + + # inference constrain + is_constrain_body = self.smplx_model.is_constrain_body + rigid_rotation_matrix[:, is_constrain_body] = I + gs_attr.scaling[is_constrain_body] = gs_attr.scaling[ + is_constrain_body + ].clamp(max=0.02) + + rotation_neutral_pose = gs_attr.rotation.unsqueeze(0).repeat(num_view, 1, 1) + + # TODO do not move underarm gs + + # QUATERNION MULTIPLY + rotation_pose_verts = quaternion_multiply( + rigid_rotation_matrix, rotation_neutral_pose + ) + # rotation_pose_verts = rotation_neutral_pose + + gs_list = [] + cano_gs_list = [] + for i in range(num_view): + gs_copy = GaussianModel( + xyz=mean_3d[i], + opacity=gs_attr.opacity, + # rotation=gs_attr.rotation, + rotation=rotation_pose_verts[i], + scaling=gs_attr.scaling, + shs=gs_attr.shs, + use_rgb=self.gs_net.use_rgb, + ) # [N, 3] + + if i == num_view - 1: + cano_gs_list.append(gs_copy) + else: + gs_list.append(gs_copy) + + return gs_list, cano_gs_list + + def forward_gs_attr(self, x, query_points, smplx_data, debug=False, x_fine=None): + """ + x: [N, C] Float[Tensor, "Np Cp"], + query_points: [N, 3] Float[Tensor, "Np 3"] + """ + device = x.device + if self.mlp_network_config is not None: + # x is processed by LayerNorm + x = self.mlp_net(x) + if x_fine is not None: + x_fine = self.mlp_net(x_fine) + + # NOTE that gs_attr contains offset xyz + gs_attr: GaussianAppOutput = self.gs_net(x, query_points, x_fine) + + return gs_attr + + def get_query_points(self, smplx_data, device): + with torch.no_grad(): + with torch.autocast(device_type=device.type, dtype=torch.float32): + # print(smplx_data["betas"].shape, smplx_data["face_offset"].shape, smplx_data["joint_offset"].shape) + positions, _, transform_mat_neutral_pose = ( + self.smplx_model.get_query_points(smplx_data, device=device) + ) # [B, N, 3] + smplx_data["transform_mat_neutral_pose"] = ( + transform_mat_neutral_pose # [B, 55, 4, 4] + ) + return positions, smplx_data + + def decoder_cross_attn_wrapper(self, pcl_embed, latent_feat, extra_info): + # if self.training and self.gradient_checkpointing: + # def create_custom_forward(module): + # def custom_forward(*inputs): + # return module(*inputs) + # return custom_forward + # ckpt_kwargs = {"use_reentrant": False} if is_torch_version(">=", "1.11.0") else {} + # gs_feats = torch.utils.checkpoint.checkpoint( + # create_custom_forward(self.decoder_cross_attn), + # pcl_embed.to(dtype=latent_feat.dtype), + # latent_feat, + # extra_info, + # **ckpt_kwargs, + # ) + # else: + gs_feats = self.decoder_cross_attn( + pcl_embed.to(dtype=latent_feat.dtype), latent_feat, extra_info + ) + return gs_feats + + def query_latent_feat( + self, + positions: Float[Tensor, "*B N1 3"], + smplx_data, + latent_feat: Float[Tensor, "*B N2 C"], + extra_info, + ): + device = latent_feat.device + if self.skip_decoder: + gs_feats = latent_feat + assert positions is not None + else: + assert positions is None + if positions is None: + positions, smplx_data = self.get_query_points(smplx_data, device) + + with torch.autocast(device_type=device.type, dtype=torch.float32): + pcl_embed = self.pcl_embed(positions) + + gs_feats = self.decoder_cross_attn_wrapper( + pcl_embed, latent_feat, extra_info + ) + + return gs_feats, positions, smplx_data + + def forward_single_batch( + self, + gs_list: list[GaussianModel], + c2ws: Float[Tensor, "Nv 4 4"], + intrinsics: Float[Tensor, "Nv 4 4"], + height: int, + width: int, + background_color: Optional[Float[Tensor, "Nv 3"]], + debug: bool = False, + ): + out_list = [] + self.device = gs_list[0].xyz.device + + for v_idx, (c2w, intrinsic) in enumerate(zip(c2ws, intrinsics)): + out_list.append( + self.forward_single_view( + gs_list[v_idx], + Camera.from_c2w(c2w, intrinsic, height, width), + background_color[v_idx], + ) + ) + + out = defaultdict(list) + for out_ in out_list: + for k, v in out_.items(): + out[k].append(v) + out = {k: torch.stack(v, dim=0) for k, v in out.items()} + out["3dgs"] = gs_list + + # debug = True + if debug: + import cv2 + + cv2.imwrite( + "fuck.png", + (out["comp_rgb"].detach().cpu().numpy()[0, ..., ::-1] * 255).astype( + np.uint8 + ), + ) + + return out + + @torch.no_grad() + def forward_cano_batch( + self, + gs_list: list[GaussianModel], + c2ws: Float[Tensor, "Nv 4 4"], + intrinsics: Float[Tensor, "Nv 4 4"], + background_color: Optional[Float[Tensor, "Nv 3"]], + height: int = 512, + width: int = 512, + debug: bool = False, + ): + """using to visualization.""" + degree_list = [0, 90, 180, 270] + out_list = [] + self.device = gs_list[0].xyz.device + + gs_list_copy = [gs_list[0].clone() for _ in range(len(degree_list))] + + rotation_gs_list = [] + + for rotation_degree, gs in zip(degree_list, gs_list_copy): + + _R = torch.eye(3).to(gs.xyz) + _R[-1, -1] *= -1 + _R[1, 1] *= -1 + + self_R = torch.from_numpy(generate_rotation_matrix_y(rotation_degree)).to( + _R + ) + _R = self_R @ _R + + gs.xyz = (_R @ gs.xyz.T).T + + _min, _max = aabb(gs.xyz) + center = (_min + _max) / 2 + gs.xyz -= center.unsqueeze(0) + + _R_quaternion = matrix_to_quaternion(_R) + gs.rotation = quaternion_multiply(_R_quaternion, gs.rotation) + + gs.xyz[..., -1] += 2.5 # move to (0, 0, 3) + rotation_gs_list.append(gs) + + intrinsics = torch.eye(4).to(intrinsics).unsqueeze(0) + intrinsics[0, 0, 0] = width + intrinsics[0, 1, 1] = height + intrinsics[0, 0, 2] = width / 2 + intrinsics[0, 1, 2] = height / 2 + + for v_idx, gs in enumerate(rotation_gs_list): + out_list.append( + self.forward_single_view( + rotation_gs_list[v_idx], + Camera.from_c2w(c2ws[0], intrinsics[0], height, width), + torch.ones_like(background_color[0]), + ) + ) + + out = defaultdict(list) + for out_ in out_list: + for k, v in out_.items(): + out[k].append(v) + out = {k: torch.stack(v, dim=0) for k, v in out.items()} + out["3dgs"] = rotation_gs_list + + if debug: + import cv2 + + for i in range(4): + cv2.imwrite( + f"fuck_{i}.png", + (out["comp_rgb"].detach().cpu().numpy()[i, ..., ::-1] * 255).astype( + np.uint8 + ), + ) + + return out + + def get_single_batch_smpl_data(self, smpl_data, bidx): + smpl_data_single_batch = {} + for k, v in smpl_data.items(): + smpl_data_single_batch[k] = v[ + bidx + ] # e.g. body_pose: [B, N_v, 21, 3] -> [N_v, 21, 3] + if k == "betas" or (k == "joint_offset") or (k == "face_offset"): + smpl_data_single_batch[k] = v[ + bidx : bidx + 1 + ] # e.g. betas: [B, 100] -> [1, 100] + return smpl_data_single_batch + + def get_single_view_smpl_data(self, smpl_data, vidx): + smpl_data_single_view = {} + for k, v in smpl_data.items(): + assert v.shape[0] == 1 + if ( + k == "betas" + or (k == "joint_offset") + or (k == "face_offset") + or (k == "transform_mat_neutral_pose") + ): + smpl_data_single_view[k] = v # e.g. betas: [1, 100] -> [1, 100] + else: + smpl_data_single_view[k] = v[ + :, vidx : vidx + 1 + ] # e.g. body_pose: [1, N_v, 21, 3] -> [1, 1, 21, 3] + return smpl_data_single_view + + def forward_gs( + self, + gs_hidden_features: Float[Tensor, "B Np Cp"], + query_points: Float[Tensor, "B Np_q 3"], + smplx_data, # e.g., body_pose:[B, Nv, 21, 3], betas:[B, 100] + additional_features: Optional[dict] = None, + debug: bool = False, + **kwargs, + ): + + batch_size = gs_hidden_features.shape[0] + + # obtain gs_features embedding, cur points position, and also smplx params + query_gs_features, query_points, smplx_data = self.query_latent_feat( + query_points, smplx_data, gs_hidden_features, additional_features + ) + + gs_attr_list = [] + for b in range(batch_size): + if isinstance(query_gs_features, dict): + gs_attr = self.forward_gs_attr( + query_gs_features["coarse"][b], + query_points[b], + None, + debug, + x_fine=query_gs_features["fine"][b], + ) + else: + gs_attr = self.forward_gs_attr( + query_gs_features[b], query_points[b], None, debug + ) + gs_attr_list.append(gs_attr) + + return gs_attr_list, query_points, smplx_data + + def forward_animate_gs( + self, + gs_attr_list, + query_points, + smplx_data, + c2w, + intrinsic, + height, + width, + background_color, + debug=False, + df_data=None, # deepfashion-style dataset + ): + batch_size = len(gs_attr_list) + out_list = [] + cano_out_list = [] # inference DO NOT use + + N_view = smplx_data["root_pose"].shape[1] + + + + for b in range(batch_size): + gs_attr = gs_attr_list[b] + query_pt = query_points[b] + # len(animatable_gs_model_list) = num_view + merge_animatable_gs_model_list, cano_gs_model_list = self.animate_gs_model( + gs_attr, + query_pt, + self.get_single_batch_smpl_data(smplx_data, b), + debug=debug, + ) + + animatable_gs_model_list = merge_animatable_gs_model_list[:N_view] + + assert len(animatable_gs_model_list) == c2w.shape[1] + + # gs render animated gs model. + out_list.append( + self.forward_single_batch( + animatable_gs_model_list, + c2w[b], + intrinsic[b], + height, + width, + background_color[b] if background_color is not None else None, + debug=debug, + ) + ) + + + + out = defaultdict(list) + for out_ in out_list: + for k, v in out_.items(): + out[k].append(v) + for k, v in out.items(): + if isinstance(v[0], torch.Tensor): + out[k] = torch.stack(v, dim=0) + else: + out[k] = v + + out["comp_rgb"] = out["comp_rgb"].permute( + 0, 1, 4, 2, 3 + ) # [B, NV, H, W, 3] -> [B, NV, 3, H, W] + out["comp_mask"] = out["comp_mask"].permute( + 0, 1, 4, 2, 3 + ) # [B, NV, H, W, 3] -> [B, NV, 1, H, W] + out["comp_depth"] = out["comp_depth"].permute( + 0, 1, 4, 2, 3 + ) # [B, NV, H, W, 3] -> [B, NV, 1, H, W] + + return out + + def forward( + self, + gs_hidden_features: Float[Tensor, "B Np Cp"], + query_points: Float[Tensor, "B Np 3"], + smplx_data, # e.g., body_pose:[B, Nv, 21, 3], betas:[B, 100] + c2w: Float[Tensor, "B Nv 4 4"], + intrinsic: Float[Tensor, "B Nv 4 4"], + height, + width, + additional_features: Optional[Float[Tensor, "B C H W"]] = None, + background_color: Optional[Float[Tensor, "B Nv 3"]] = None, + debug: bool = False, + **kwargs, + ): + + # need shape_params of smplx_data to get querty points and get "transform_mat_neutral_pose" + # only forward gs params + gs_attr_list, query_points, smplx_data = self.forward_gs( + gs_hidden_features, + query_points, + smplx_data=smplx_data, + additional_features=additional_features, + debug=debug, + ) + + out = self.forward_animate_gs( + gs_attr_list, + query_points, + smplx_data, + c2w, + intrinsic, + height, + width, + background_color, + debug, + df_data=kwargs["df_data"], + ) + out["gs_attr"] = gs_attr_list + + return out + + +def test(): + import cv2 + + human_model_path = "./pretrained_models/human_model_files" + smplx_data_root = "/data1/projects/ExAvatar_RELEASE/avatar/data/Custom/data/gyeongsik/smplx_optimized/smplx_params_smoothed" + shape_param_file = "/data1/projects/ExAvatar_RELEASE/avatar/data/Custom/data/gyeongsik/smplx_optimized/shape_param.json" + + batch_size = 1 + device = "cuda" + smplx_data, cam_param_list, ori_image_list = read_smplx_param( + smplx_data_root=smplx_data_root, shape_param_file=shape_param_file, batch_size=2 + ) + smplx_data_tmp = smplx_data + for k, v in smplx_data.items(): + smplx_data_tmp[k] = v.unsqueeze(0) + if (k == "betas") or (k == "face_offset") or (k == "joint_offset"): + smplx_data_tmp[k] = v[0].unsqueeze(0) + smplx_data = smplx_data_tmp + + gs_render = GS3DRenderer( + human_model_path=human_model_path, + subdivide_num=2, + smpl_type="smplx", + feat_dim=64, + query_dim=64, + use_rgb=False, + sh_degree=3, + mlp_network_config=None, + xyz_offset_max_step=1.8 / 32, + ) + + gs_render.to(device) + # print(cam_param_list[0]) + + c2w_list = [] + intr_list = [] + for cam_param in cam_param_list: + c2w = torch.eye(4).to(device) + c2w[:3, :3] = cam_param["R"] + c2w[:3, 3] = cam_param["t"] + c2w_list.append(c2w) + intr = torch.eye(4).to(device) + intr[0, 0] = cam_param["focal"][0] + intr[1, 1] = cam_param["focal"][1] + intr[0, 2] = cam_param["princpt"][0] + intr[1, 2] = cam_param["princpt"][1] + intr_list.append(intr) + + c2w = torch.stack(c2w_list).unsqueeze(0) + intrinsic = torch.stack(intr_list).unsqueeze(0) + + out = gs_render.forward( + gs_hidden_features=torch.zeros((batch_size, 2048, 64)).float().to(device), + query_points=None, + smplx_data=smplx_data, + c2w=c2w, + intrinsic=intrinsic, + height=int(cam_param_list[0]["princpt"][1]) * 2, + width=int(cam_param_list[0]["princpt"][0]) * 2, + background_color=torch.tensor([1.0, 1.0, 1.0]) + .float() + .view(1, 1, 3) + .repeat(batch_size, 2, 1) + .to(device), + debug=False, + ) + + for k, v in out.items(): + if k == "comp_rgb_bg": + print("comp_rgb_bg", v) + continue + for b_idx in range(len(v)): + if k == "3dgs": + for v_idx in range(len(v[b_idx])): + v[b_idx][v_idx].save_ply(f"./debug_vis/{b_idx}_{v_idx}.ply") + continue + for v_idx in range(v.shape[1]): + save_path = os.path.join("./debug_vis", f"{b_idx}_{v_idx}_{k}.jpg") + cv2.imwrite( + save_path, + (v[b_idx, v_idx].detach().cpu().numpy() * 255).astype(np.uint8), + ) + + +def test1(): + import cv2 + + human_model_path = "./pretrained_models/human_model_files" + device = "cuda" + + # root_dir = "/data1/projects/ExAvatar_RELEASE/avatar/data/Custom/data" + # meta_path = "/data1/projects/ExAvatar_RELEASE/avatar/data/Custom/data/data_list.json" + # dataset = ExAvatarDataset(root_dirs=root_dir, meta_path=meta_path, sample_side_views=3, + # render_image_res_low=384, render_image_res_high=384, + # render_region_size=(224, 224), source_image_res=384) + + # root_dir = "/data1/datasets1/3d_human_data/humman/humman_compressed" + # meta_path = "/data1/datasets1/3d_human_data/humman/humman_id_debug_list.json" + # dataset = HuMManDataset(root_dirs=root_dir, meta_path=meta_path, sample_side_views=3, + # render_image_res_low=384, render_image_res_high=384, + # render_region_size=(682, 384), source_image_res=384) + + # from openlrm.datasets.static_human import StaticHumanDataset + # root_dir = "./train_data/static_human_data" + # meta_path = "./train_data/static_human_data/data_id_list.json" + # dataset = StaticHumanDataset(root_dirs=root_dir, meta_path=meta_path, sample_side_views=7, + # render_image_res_low=384, render_image_res_high=384, + # render_region_size=(682, 384), source_image_res=384, + # debug=False) + + # from openlrm.datasets.singleview_human import SingleViewHumanDataset + # root_dir = "./train_data/single_view" + # meta_path = "./train_data/single_view/data_list.json" + # dataset = SingleViewHumanDataset(root_dirs=root_dir, meta_path=meta_path, sample_side_views=0, + # render_image_res_low=384, render_image_res_high=384, + # render_region_size=(682, 384), source_image_res=384, + # debug=False) + + from accelerate.utils import set_seed + + set_seed(1234) + from LHM.datasets.video_human import VideoHumanDataset + + root_dir = "./train_data/ClothVideo" + meta_path = "./train_data/ClothVideo/label/valid_id_with_img_list.json" + dataset = VideoHumanDataset( + root_dirs=root_dir, + meta_path=meta_path, + sample_side_views=7, + render_image_res_low=384, + render_image_res_high=384, + render_region_size=(682, 384), + source_image_res=384, + enlarge_ratio=[0.85, 1.2], + debug=False, + ) + + data = dataset[0] + + def get_smplx_params(data): + smplx_params = {} + smplx_keys = [ + "root_pose", + "body_pose", + "jaw_pose", + "leye_pose", + "reye_pose", + "lhand_pose", + "rhand_pose", + "expr", + "trans", + "betas", + ] + for k, v in data.items(): + if k in smplx_keys: + # print(k, v.shape) + smplx_params[k] = data[k] + return smplx_params + + smplx_data = get_smplx_params(data) + + smplx_data_tmp = {} + for k, v in smplx_data.items(): + smplx_data_tmp[k] = v.unsqueeze(0).to(device) + print(k, v.shape) + smplx_data = smplx_data_tmp + + c2ws = data["c2ws"].unsqueeze(0).to(device) + intrs = data["intrs"].unsqueeze(0).to(device) + render_images = data["render_image"].numpy() + render_h = data["render_full_resolutions"][0, 0] + render_w = data["render_full_resolutions"][0, 1] + render_bg_colors = data["render_bg_colors"].unsqueeze(0).to(device) + print("c2ws", c2ws.shape, "intrs", intrs.shape, intrs) + + gs_render = GS3DRenderer( + human_model_path=human_model_path, + subdivide_num=2, + smpl_type="smplx", + feat_dim=64, + query_dim=64, + use_rgb=False, + sh_degree=3, + mlp_network_config=None, + xyz_offset_max_step=1.8 / 32, + expr_param_dim=10, + shape_param_dim=10, + fix_opacity=True, + fix_rotation=True, + ) + gs_render.to(device) + + out = gs_render.forward( + gs_hidden_features=torch.zeros((1, 2048, 64)).float().to(device), + query_points=None, + smplx_data=smplx_data, + c2w=c2ws, + intrinsic=intrs, + height=render_h, + width=render_w, + background_color=render_bg_colors, + debug=False, + ) + os.makedirs("./debug_vis/gs_render", exist_ok=True) + for k, v in out.items(): + if k == "comp_rgb_bg": + print("comp_rgb_bg", v) + continue + for b_idx in range(len(v)): + if k == "3dgs": + for v_idx in range(len(v[b_idx])): + v[b_idx][v_idx].save_ply( + f"./debug_vis/gs_render/{b_idx}_{v_idx}.ply" + ) + continue + for v_idx in range(v.shape[1]): + save_path = os.path.join( + "./debug_vis/gs_render", f"{b_idx}_{v_idx}_{k}.jpg" + ) + img = ( + v[b_idx, v_idx].permute(1, 2, 0).detach().cpu().numpy() * 255 + ).astype(np.uint8) + print(img.shape, save_path) + if "mask" in k: + render_img = render_images[v_idx].transpose(1, 2, 0) * 255 + cv2.imwrite( + save_path, + np.hstack( + [np.tile(img, (1, 1, 3)), render_img.astype(np.uint8)] + ), + ) + else: + cv2.imwrite(save_path, img) + + +if __name__ == "__main__": + # test1() + test() + test() diff --git a/LHM/models/rendering/gsplat_renderer.py b/LHM/models/rendering/gsplat_renderer.py new file mode 100644 index 0000000000000000000000000000000000000000..b724a3c61b94acd3e9886d199a55c5c9722b7fca --- /dev/null +++ b/LHM/models/rendering/gsplat_renderer.py @@ -0,0 +1,148 @@ +import math +import pdb + +import torch + +try: + from gsplat.rendering import rasterization + + gsplat_enable = True +except: + gsplat_enable = False + +from LHM.models.rendering.gs_renderer import Camera, GaussianModel, GS3DRenderer +from LHM.models.rendering.utils.sh_utils import eval_sh +from LHM.models.rendering.utils.typing import * + +# self.xyz: Tensor = xyz +# self.opacity: Tensor = opacity +# self.rotation: Tensor = rotation +# self.scaling: Tensor = scaling +# self.shs: Tensor = shs # [B, SH_Coeff, 3] + + +class GSPlatRenderer(GS3DRenderer): + """Backed from GS3D, support batch-wise rendering of Gaussian splats.""" + + def __init__(self, **params): + if gsplat_enable is False: + raise ImportError("GSPlat is not installed, please install it first.") + else: + super(GSPlatRenderer, self).__init__(**params) + + def get_gaussians_properties(self, viewpoint_camera, gaussian_model): + + xyz = gaussian_model.xyz + opacity = gaussian_model.opacity + scales = gaussian_model.scaling + rotations = gaussian_model.rotation + cov3D_precomp = None + shs = None + if gaussian_model.use_rgb: + colors_precomp = gaussian_model.shs + else: + raise NotImplementedError + # shs = gaussian_model.shs + + # shs_view = gaussian_model.get_features.transpose(1, 2).view( + # -1, 3, (gaussian_model.max_sh_degree + 1) ** 2 + # ) + # dir_pp = gaussian_model.get_attribute( + # "xyz" + # ) - viewpoint_camera.camera_center.repeat( + # gaussian_model.get_features.shape[0], 1 + # ) + # dir_pp_normalized = dir_pp / dir_pp.norm(dim=1, keepdim=True) + # sh2rgb = eval_sh( + # gaussian_model.active_sh_degree, shs_view, dir_pp_normalized + # ) + # colors_precomp = torch.clamp_min(sh2rgb + 0.5, 0.0) + return xyz, shs, colors_precomp, opacity, scales, rotations, cov3D_precomp + + def forward_single_view( + self, + gaussian_model: GaussianModel, + viewpoint_camera: Camera, + background_color: Optional[Float[Tensor, "3"]], + ret_mask: bool = True, + ): + + xyz, shs, colors_precomp, opacity, scales, rotations, cov3D_precomp = ( + self.get_gaussians_properties(viewpoint_camera, gaussian_model) + ) + + intrinsics = viewpoint_camera.intrinsic + extrinsics = viewpoint_camera.world_view_transform.transpose( + 0, 1 + ).contiguous() # c2w -> w2c + + img_height = int(viewpoint_camera.height) + img_width = int(viewpoint_camera.width) + + colors_precomp = colors_precomp.squeeze(1) + opacity = opacity.squeeze(1) + + with torch.autocast(device_type=self.device.type, dtype=torch.float32): + render_rgbd, render_alphas, meta = rasterization( + means=xyz.float(), + quats=rotations.float(), + scales=scales.float(), + opacities=opacity.float(), + colors=colors_precomp.float(), + viewmats=extrinsics.unsqueeze(0).float(), + Ks=intrinsics.float().unsqueeze(0)[:, :3, :3], + width=img_width, + height=img_height, + near_plane=viewpoint_camera.znear, + far_plane=viewpoint_camera.zfar, + # radius_clip=3.0, + eps2d=0.3, # 3 pixel + render_mode="RGB+D", + backgrounds=background_color.unsqueeze(0).float(), + camera_model="pinhole", + ) + + render_rgbd = render_rgbd.squeeze(0) + render_alphas = render_alphas.squeeze(0) + + rendered_image = render_rgbd[:, :, :3] + rendered_depth = render_rgbd[:, :, 3:] + + ret = { + "comp_rgb": rendered_image, # [H, W, 3] + "comp_rgb_bg": background_color, + "comp_mask": render_alphas, + "comp_depth": rendered_depth, + } + + # if ret_mask: + # mask_bg_color = torch.zeros(3, dtype=torch.float32, device=self.device) + # raster_settings = GaussianRasterizationSettings( + # image_height=int(viewpoint_camera.height), + # image_width=int(viewpoint_camera.width), + # tanfovx=tanfovx, + # tanfovy=tanfovy, + # bg=mask_bg_color, + # scale_modifier=self.scaling_modifier, + # viewmatrix=viewpoint_camera.world_view_transform, + # projmatrix=viewpoint_camera.full_proj_transform.float(), + # sh_degree=0, + # campos=viewpoint_camera.camera_center, + # prefiltered=False, + # debug=False + # ) + # rasterizer = GaussianRasterizer(raster_settings=raster_settings) + + # with torch.autocast(device_type=self.device.type, dtype=torch.float32): + # rendered_mask, radii = rasterizer( + # means3D = means3D, + # means2D = means2D, + # # shs = , + # colors_precomp = torch.ones_like(means3D), + # opacities = opacity, + # scales = scales, + # rotations = rotations, + # cov3D_precomp = cov3D_precomp) + # ret["comp_mask"] = rendered_mask.permute(1, 2, 0) + + return ret diff --git a/LHM/models/rendering/mesh_utils.py b/LHM/models/rendering/mesh_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..b351cdd29787f5afd8f5e56ecd78b8497fde4abc --- /dev/null +++ b/LHM/models/rendering/mesh_utils.py @@ -0,0 +1,1005 @@ +'''Mesh Define +''' +import os +import pdb +from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple, Union + +import cv2 +import numpy as np +import torch +import trimesh +from numpy import ndarray +from torch import Tensor + + +def length(x: Union[Tensor, ndarray], eps=1e-20) -> Union[Tensor, ndarray]: + """length of an array (along the last dim). + + Args: + x (Union[Tensor, ndarray]): x, [..., C] + eps (float, optional): eps. Defaults to 1e-20. + + Returns: + Union[Tensor, ndarray]: length, [..., 1] + """ + if isinstance(x, np.ndarray): + return np.sqrt(np.maximum(np.sum(x * x, axis=-1, keepdims=True), eps)) + else: + return torch.sqrt(torch.clamp(dot(x, x), min=eps)) + +def safe_normalize(x: Union[Tensor, ndarray], eps=1e-20) -> Union[Tensor, ndarray]: + """normalize an array (along the last dim). + + Args: + x (Union[Tensor, ndarray]): x, [..., C] + eps (float, optional): eps. Defaults to 1e-20. + + Returns: + Union[Tensor, ndarray]: normalized x, [..., C] + """ + + return x / length(x, eps) + +def dot(x: Union[Tensor, ndarray], y: Union[Tensor, ndarray]) -> Union[Tensor, ndarray]: + """dot product (along the last dim). + + Args: + x (Union[Tensor, ndarray]): x, [..., C] + y (Union[Tensor, ndarray]): y, [..., C] + + Returns: + Union[Tensor, ndarray]: x dot y, [..., 1] + """ + if isinstance(x, np.ndarray): + return np.sum(x * y, -1, keepdims=True) + else: + return torch.sum(x * y, -1, keepdim=True) + +class Mesh: + """ + A torch-native trimesh class, with support for ``ply/obj/glb`` formats. + + Note: + This class only supports one mesh with a single texture image (an albedo texture and a metallic-roughness texture). + """ + def __init__( + self, + v: Optional[Tensor] = None, + f: Optional[Tensor] = None, + vn: Optional[Tensor] = None, + fn: Optional[Tensor] = None, + vt: Optional[Tensor] = None, + ft: Optional[Tensor] = None, + vc: Optional[Tensor] = None, # vertex color + albedo: Optional[Tensor] = None, + metallicRoughness: Optional[Tensor] = None, + device: Optional[torch.device] = None, + ): + """Init a mesh directly using all attributes. + + Args: + v (Optional[Tensor]): vertices, float [N, 3]. Defaults to None. + f (Optional[Tensor]): faces, int [M, 3]. Defaults to None. + vn (Optional[Tensor]): vertex normals, float [N, 3]. Defaults to None. + fn (Optional[Tensor]): faces for normals, int [M, 3]. Defaults to None. + vt (Optional[Tensor]): vertex uv coordinates, float [N, 2]. Defaults to None. + ft (Optional[Tensor]): faces for uvs, int [M, 3]. Defaults to None. + vc (Optional[Tensor]): vertex colors, float [N, 3]. Defaults to None. + albedo (Optional[Tensor]): albedo texture, float [H, W, 3], RGB format. Defaults to None. + metallicRoughness (Optional[Tensor]): metallic-roughness texture, float [H, W, 3], metallic(Blue) = metallicRoughness[..., 2], roughness(Green) = metallicRoughness[..., 1]. Defaults to None. + device (Optional[torch.device]): torch device. Defaults to None. + """ + self.device = device + self.v = v + self.vn = vn + self.vt = vt + self.f = f + self.fn = fn + self.ft = ft + # will first see if there is vertex color to use + self.vc = vc + # only support a single albedo image + self.albedo = albedo + # pbr extension, metallic(Blue) = metallicRoughness[..., 2], roughness(Green) = metallicRoughness[..., 1] + # ref: https://registry.khronos.org/glTF/specs/2.0/glTF-2.0.html + self.metallicRoughness = metallicRoughness + + self.ori_center = 0 + self.ori_scale = 1 + + @classmethod + def load(cls, path, resize=True, clean=False, renormal=True, retex=False, bound=0.9, front_dir='+z', **kwargs): + """load mesh from path. + + Args: + path (str): path to mesh file, supports ply, obj, glb. + clean (bool, optional): perform mesh cleaning at load (e.g., merge close vertices). Defaults to False. + resize (bool, optional): auto resize the mesh using ``bound`` into [-bound, bound]^3. Defaults to True. + renormal (bool, optional): re-calc the vertex normals. Defaults to True. + retex (bool, optional): re-calc the uv coordinates, will overwrite the existing uv coordinates. Defaults to False. + bound (float, optional): bound to resize. Defaults to 0.9. + front_dir (str, optional): front-view direction of the mesh, should be [+-][xyz][ 123]. Defaults to '+z'. + device (torch.device, optional): torch device. Defaults to None. + + Note: + a ``device`` keyword argument can be provided to specify the torch device. + If it's not provided, we will try to use ``'cuda'`` as the device if it's available. + + Returns: + Mesh: the loaded Mesh object. + """ + # obj supports face uv + if path.endswith(".obj"): + mesh = cls.load_obj(path, **kwargs) + # trimesh only supports vertex uv, but can load more formats + else: + try: + kwargs.pop('albedo_path') + except: + pass + mesh = cls.load_trimesh(path, **kwargs) + + # clean + if clean: + from kiui.mesh_utils import clean_mesh + vertices = mesh.v.detach().cpu().numpy() + triangles = mesh.f.detach().cpu().numpy() + vertices, triangles = clean_mesh(vertices, triangles, remesh=False) + mesh.v = torch.from_numpy(vertices).contiguous().float().to(mesh.device) + mesh.f = torch.from_numpy(triangles).contiguous().int().to(mesh.device) + + print(f"[INFO] load mesh, v: {mesh.v.shape}, f: {mesh.f.shape}") + # auto-normalize + if resize: + mesh.auto_size(bound=bound) + # auto-fix normal + if renormal or mesh.vn is None: + mesh.auto_normal() + print(f"[INFO] load mesh, vn: {mesh.vn.shape}, fn: {mesh.fn.shape}") + # auto-fix texcoords + if retex or (mesh.albedo is not None and mesh.vt is None): + mesh.auto_uv(cache_path=path) + print(f"[INFO] load mesh, vt: {mesh.vt.shape}, ft: {mesh.ft.shape}") + + # rotate front dir to +z + if front_dir != "+z": + # axis switch + if "-z" in front_dir: + T = torch.tensor([[1, 0, 0], [0, 1, 0], [0, 0, -1]], device=mesh.device, dtype=torch.float32) + elif "+x" in front_dir: + T = torch.tensor([[0, 0, 1], [0, 1, 0], [1, 0, 0]], device=mesh.device, dtype=torch.float32) + elif "-x" in front_dir: + T = torch.tensor([[0, 0, -1], [0, 1, 0], [1, 0, 0]], device=mesh.device, dtype=torch.float32) + elif "+y" in front_dir: + T = torch.tensor([[1, 0, 0], [0, 0, 1], [0, 1, 0]], device=mesh.device, dtype=torch.float32) + elif "-y" in front_dir: + T = torch.tensor([[1, 0, 0], [0, 0, -1], [0, 1, 0]], device=mesh.device, dtype=torch.float32) + else: + T = torch.tensor([[1, 0, 0], [0, 1, 0], [0, 0, 1]], device=mesh.device, dtype=torch.float32) + # rotation (how many 90 degrees) + if '1' in front_dir: + T @= torch.tensor([[0, -1, 0], [1, 0, 0], [0, 0, 1]], device=mesh.device, dtype=torch.float32) + elif '2' in front_dir: + T @= torch.tensor([[1, 0, 0], [0, -1, 0], [0, 0, 1]], device=mesh.device, dtype=torch.float32) + elif '3' in front_dir: + T @= torch.tensor([[0, 1, 0], [-1, 0, 0], [0, 0, 1]], device=mesh.device, dtype=torch.float32) + mesh.v @= T + mesh.vn @= T + + return mesh + + @classmethod + def load_processed(cls, path, resize=True, clean=False, renormal=True, retex=False, bound=0.9, front_dir='+z', scale=None, center=None, **kwargs): + """load mesh from path. + + Args: + path (str): path to mesh file, supports ply, obj, glb. + clean (bool, optional): perform mesh cleaning at load (e.g., merge close vertices). Defaults to False. + resize (bool, optional): auto resize the mesh using ``bound`` into [-bound, bound]^3. Defaults to True. + renormal (bool, optional): re-calc the vertex normals. Defaults to True. + retex (bool, optional): re-calc the uv coordinates, will overwrite the existing uv coordinates. Defaults to False. + bound (float, optional): bound to resize. Defaults to 0.9. + front_dir (str, optional): front-view direction of the mesh, should be [+-][xyz][ 123]. Defaults to '+z'. + device (torch.device, optional): torch device. Defaults to None. + + Note: + a ``device`` keyword argument can be provided to specify the torch device. + If it's not provided, we will try to use ``'cuda'`` as the device if it's available. + + Returns: + Mesh: the loaded Mesh object. + """ + # obj supports face uv + if path.endswith(".obj"): + mesh = cls.load_obj(path, **kwargs) + # trimesh only supports vertex uv, but can load more formats + else: + try: + kwargs.pop('albedo_path') + except: + pass + mesh = cls.load_trimesh(path, **kwargs) + + # clean + if clean: + from kiui.mesh_utils import clean_mesh + vertices = mesh.v.detach().cpu().numpy() + triangles = mesh.f.detach().cpu().numpy() + vertices, triangles = clean_mesh(vertices, triangles, remesh=False) + mesh.v = torch.from_numpy(vertices).contiguous().float().to(mesh.device) + mesh.f = torch.from_numpy(triangles).contiguous().int().to(mesh.device) + + print(f"[INFO] load mesh, v: {mesh.v.shape}, f: {mesh.f.shape}") + # auto-normalize + if resize: + mesh.ori_center = torch.tensor(center).to(mesh.v) + mesh.ori_scale = scale + mesh.v = (mesh.v - mesh.ori_center) * mesh.ori_scale + # auto-fix normal + if renormal or mesh.vn is None: + mesh.auto_normal() + print(f"[INFO] load mesh, vn: {mesh.vn.shape}, fn: {mesh.fn.shape}") + # auto-fix texcoords + if retex or (mesh.albedo is not None and mesh.vt is None): + mesh.auto_uv(cache_path=path) + print(f"[INFO] load mesh, vt: {mesh.vt.shape}, ft: {mesh.ft.shape}") + + # rotate front dir to +z + if front_dir != "+z": + # axis switch + if "-z" in front_dir: + T = torch.tensor([[1, 0, 0], [0, 1, 0], [0, 0, -1]], device=mesh.device, dtype=torch.float32) + elif "+x" in front_dir: + T = torch.tensor([[0, 0, 1], [0, 1, 0], [1, 0, 0]], device=mesh.device, dtype=torch.float32) + elif "-x" in front_dir: + T = torch.tensor([[0, 0, -1], [0, 1, 0], [1, 0, 0]], device=mesh.device, dtype=torch.float32) + elif "+y" in front_dir: + T = torch.tensor([[1, 0, 0], [0, 0, 1], [0, 1, 0]], device=mesh.device, dtype=torch.float32) + elif "-y" in front_dir: + T = torch.tensor([[1, 0, 0], [0, 0, -1], [0, 1, 0]], device=mesh.device, dtype=torch.float32) + else: + T = torch.tensor([[1, 0, 0], [0, 1, 0], [0, 0, 1]], device=mesh.device, dtype=torch.float32) + # rotation (how many 90 degrees) + if '1' in front_dir: + T @= torch.tensor([[0, -1, 0], [1, 0, 0], [0, 0, 1]], device=mesh.device, dtype=torch.float32) + elif '2' in front_dir: + T @= torch.tensor([[1, 0, 0], [0, -1, 0], [0, 0, 1]], device=mesh.device, dtype=torch.float32) + elif '3' in front_dir: + T @= torch.tensor([[0, 1, 0], [-1, 0, 0], [0, 0, 1]], device=mesh.device, dtype=torch.float32) + mesh.v @= T + mesh.vn @= T + + return mesh + # load from obj file + @classmethod + def load_obj(cls, path, albedo_path=None, device=None): + """load an ``obj`` mesh. + + Args: + path (str): path to mesh. + albedo_path (str, optional): path to the albedo texture image, will overwrite the existing texture path if specified in mtl. Defaults to None. + device (torch.device, optional): torch device. Defaults to None. + + Note: + We will try to read `mtl` path from `obj`, else we assume the file name is the same as `obj` but with `mtl` extension. + The `usemtl` statement is ignored, and we only use the last material path in `mtl` file. + + Returns: + Mesh: the loaded Mesh object. + """ + assert os.path.splitext(path)[-1] == ".obj" + + mesh = cls() + + # device + if device is None: + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + + mesh.device = device + + # load obj + with open(path, "r") as f: + lines = f.readlines() + + + def parse_f_v(fv): + # pass in a vertex term of a face, return {v, vt, vn} (-1 if not provided) + # supported forms: + # f v1 v2 v3 + # f v1/vt1 v2/vt2 v3/vt3 + # f v1/vt1/vn1 v2/vt2/vn2 v3/vt3/vn3 + # f v1//vn1 v2//vn2 v3//vn3 + xs = [int(x) - 1 if x != "" else -1 for x in fv.split("/")] + xs.extend([-1] * (3 - len(xs))) + return xs[0], xs[1], xs[2] + + vertices, texcoords, normals = [], [], [] + faces, tfaces, nfaces = [], [], [] + mtl_path = None + + for line in lines: + split_line = line.split() + # empty line + if len(split_line) == 0: + continue + prefix = split_line[0].lower() + # mtllib + if prefix == "mtllib": + mtl_path = split_line[1] + # usemtl + elif prefix == "usemtl": + pass # ignored + # v/vn/vt + elif prefix == "v": + vertices.append([float(v) for v in split_line[1:]]) + elif prefix == "vn": + normals.append([float(v) for v in split_line[1:]]) + elif prefix == "vt": + val = [float(v) for v in split_line[1:]] + texcoords.append([val[0], 1.0 - val[1]]) + elif prefix == "f": + vs = split_line[1:] + nv = len(vs) + v0, t0, n0 = parse_f_v(vs[0]) + for i in range(nv - 2): # triangulate (assume vertices are ordered) + v1, t1, n1 = parse_f_v(vs[i + 1]) + v2, t2, n2 = parse_f_v(vs[i + 2]) + faces.append([v0, v1, v2]) + tfaces.append([t0, t1, t2]) + nfaces.append([n0, n1, n2]) + + + mesh.v = torch.tensor(vertices, dtype=torch.float32, device=device) + mesh.vt = ( + torch.tensor(texcoords, dtype=torch.float32, device=device) + if len(texcoords) > 0 + else None + ) + mesh.vn = ( + torch.tensor(normals, dtype=torch.float32, device=device) + if len(normals) > 0 + else None + ) + + mesh.f = torch.tensor(faces, dtype=torch.int32, device=device) + mesh.ft = ( + torch.tensor(tfaces, dtype=torch.int32, device=device) + if len(texcoords) > 0 + else None + ) + mesh.fn = ( + torch.tensor(nfaces, dtype=torch.int32, device=device) + if len(normals) > 0 + else None + ) + + # see if there is vertex color + use_vertex_color = False + if mesh.v.shape[1] == 6: + use_vertex_color = True + mesh.vc = mesh.v[:, 3:] + mesh.v = mesh.v[:, :3] + print(f"[INFO] load obj mesh: use vertex color: {mesh.vc.shape}") + + + + # try to load texture image + if not use_vertex_color: + # try to retrieve mtl file + mtl_path_candidates = [] + if mtl_path is not None: + mtl_path_candidates.append(mtl_path) + mtl_path_candidates.append(os.path.join(os.path.dirname(path), mtl_path)) + mtl_path_candidates.append(path.replace(".obj", ".mtl")) + + + + mtl_path = None + for candidate in mtl_path_candidates: + if os.path.exists(candidate): + mtl_path = candidate + break + + # if albedo_path is not provided, try retrieve it from mtl + metallic_path = None + roughness_path = None + + if mtl_path is not None and albedo_path is None: + with open(mtl_path, "r") as f: + lines = f.readlines() + + for line in lines: + split_line = line.split() + # empty line + if len(split_line) == 0: + continue + prefix = split_line[0] + + if "map_Kd" in prefix: + # assume relative path! + albedo_path = os.path.join(os.path.dirname(path), split_line[1]) + print(f"[INFO] load obj mesh: use texture from: {albedo_path}") + elif "map_Pm" in prefix: + metallic_path = os.path.join(os.path.dirname(path), split_line[1]) + elif "map_Pr" in prefix: + roughness_path = os.path.join(os.path.dirname(path), split_line[1]) + + # still not found albedo_path, or the path doesn't exist + if albedo_path is None or not os.path.exists(albedo_path): + print(f"[INFO] load obj mesh: failed to load texture!") + mesh.albedo = None + else: + albedo = cv2.imread(albedo_path, cv2.IMREAD_UNCHANGED) + albedo = cv2.cvtColor(albedo, cv2.COLOR_BGR2RGB) + albedo = albedo.astype(np.float32) / 255 + print(f"[INFO] load obj mesh: load texture: {albedo.shape}") + mesh.albedo = torch.tensor(albedo, dtype=torch.float32, device=device) + + + # try to load metallic and roughness + if metallic_path is not None and roughness_path is not None: + print(f"[INFO] load obj mesh: load metallicRoughness from: {metallic_path}, {roughness_path}") + metallic = cv2.imread(metallic_path, cv2.IMREAD_UNCHANGED) + metallic = metallic.astype(np.float32) / 255 + roughness = cv2.imread(roughness_path, cv2.IMREAD_UNCHANGED) + roughness = roughness.astype(np.float32) / 255 + metallicRoughness = np.stack([np.zeros_like(metallic), roughness, metallic], axis=-1) + + mesh.metallicRoughness = torch.tensor(metallicRoughness, dtype=torch.float32, device=device).contiguous() + + return mesh + + @classmethod + def load_trimesh(cls, path, device=None): + """load a mesh using ``trimesh.load()``. + + Can load various formats like ``glb`` and serves as a fallback. + + Note: + We will try to merge all meshes if the glb contains more than one, + but **this may cause the texture to lose**, since we only support one texture image! + + Args: + path (str): path to the mesh file. + device (torch.device, optional): torch device. Defaults to None. + + Returns: + Mesh: the loaded Mesh object. + """ + mesh = cls() + + # device + if device is None: + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + + mesh.device = device + + # use trimesh to load ply/glb + _data = trimesh.load(path) + # always convert scene to mesh, and apply all transforms... + if isinstance(_data, trimesh.Scene): + print(f"[INFO] load trimesh: concatenating {len(_data.geometry)} meshes.") + _concat = [] + # loop the scene graph and apply transform to each mesh + scene_graph = _data.graph.to_flattened() # dict {name: {transform: 4x4 mat, geometry: str}} + for k, v in scene_graph.items(): + name = v['geometry'] + if name in _data.geometry and isinstance(_data.geometry[name], trimesh.Trimesh): + transform = v['transform'] + _concat.append(_data.geometry[name].apply_transform(transform)) + _mesh = trimesh.util.concatenate(_concat) + else: + _mesh = _data + + if _mesh.visual.kind == 'vertex': + vertex_colors = _mesh.visual.vertex_colors + vertex_colors = np.array(vertex_colors[..., :3]).astype(np.float32) / 255 + mesh.vc = torch.tensor(vertex_colors, dtype=torch.float32, device=device) + print(f"[INFO] load trimesh: use vertex color: {mesh.vc.shape}") + elif _mesh.visual.kind == 'texture': + _material = _mesh.visual.material + if isinstance(_material, trimesh.visual.material.PBRMaterial): + texture = np.array(_material.baseColorTexture).astype(np.float32) / 255 + # load metallicRoughness if present + if _material.metallicRoughnessTexture is not None: + metallicRoughness = np.array(_material.metallicRoughnessTexture).astype(np.float32) / 255 + mesh.metallicRoughness = torch.tensor(metallicRoughness, dtype=torch.float32, device=device).contiguous() + elif isinstance(_material, trimesh.visual.material.SimpleMaterial): + texture = np.array(_material.to_pbr().baseColorTexture).astype(np.float32) / 255 + else: + raise NotImplementedError(f"material type {type(_material)} not supported!") + mesh.albedo = torch.tensor(texture[..., :3], dtype=torch.float32, device=device).contiguous() + print(f"[INFO] load trimesh: load texture: {texture.shape}") + else: + mesh.albedo = None + print(f"[INFO] load trimesh: failed to load texture.") + + vertices = _mesh.vertices + + try: + texcoords = _mesh.visual.uv + texcoords[:, 1] = 1 - texcoords[:, 1] + except Exception as e: + texcoords = None + + try: + normals = _mesh.vertex_normals + except Exception as e: + normals = None + + # trimesh only support vertex uv... + faces = tfaces = nfaces = _mesh.faces + + mesh.v = torch.tensor(vertices, dtype=torch.float32, device=device) + mesh.vt = ( + torch.tensor(texcoords, dtype=torch.float32, device=device) + if texcoords is not None + else None + ) + mesh.vn = ( + torch.tensor(normals, dtype=torch.float32, device=device) + if normals is not None + else None + ) + + mesh.f = torch.tensor(faces, dtype=torch.int32, device=device) + mesh.ft = ( + torch.tensor(tfaces, dtype=torch.int32, device=device) + if texcoords is not None + else None + ) + mesh.fn = ( + torch.tensor(nfaces, dtype=torch.int32, device=device) + if normals is not None + else None + ) + + return mesh + + # sample surface (using trimesh) + def sample_surface(self, count: int): + """sample points on the surface of the mesh. + + Args: + count (int): number of points to sample. + + Returns: + torch.Tensor: the sampled points, float [count, 3]. + """ + _mesh = trimesh.Trimesh(vertices=self.v.detach().cpu().numpy(), faces=self.f.detach().cpu().numpy()) + points, face_idx = trimesh.sample.sample_surface(_mesh, count) + points = torch.from_numpy(points).float().to(self.device) + return points + + # aabb + def aabb(self): + """get the axis-aligned bounding box of the mesh. + + Returns: + Tuple[torch.Tensor]: the min xyz and max xyz of the mesh. + """ + return torch.min(self.v, dim=0).values, torch.max(self.v, dim=0).values + + # unit size + @torch.no_grad() + def auto_size(self, bound=0.9): + """auto resize the mesh. + + Args: + bound (float, optional): resizing into ``[-bound, bound]^3``. Defaults to 0.9. + """ + vmin, vmax = self.aabb() + self.ori_center = (vmax + vmin) / 2 + self.ori_scale = 2 * bound / torch.max(vmax - vmin).item() + self.v = (self.v - self.ori_center) * self.ori_scale + + def auto_normal(self): + """auto calculate the vertex normals. + """ + i0, i1, i2 = self.f[:, 0].long(), self.f[:, 1].long(), self.f[:, 2].long() + v0, v1, v2 = self.v[i0, :], self.v[i1, :], self.v[i2, :] + + face_normals = torch.cross(v1 - v0, v2 - v0) + + # Splat face normals to vertices + vn = torch.zeros_like(self.v) + vn.scatter_add_(0, i0[:, None].repeat(1, 3), face_normals) + vn.scatter_add_(0, i1[:, None].repeat(1, 3), face_normals) + vn.scatter_add_(0, i2[:, None].repeat(1, 3), face_normals) + + + + # Normalize, replace zero (degenerated) normals with some default value + vn = torch.where( + dot(vn, vn) > 1e-20, + vn, + torch.tensor([0.0, 0.0, 1.0], dtype=torch.float32, device=vn.device), + ) + vn = safe_normalize(vn) + + self.vn = vn + + + # Normalize, replace zero (degenerated) normals with some default value + face_normals= torch.where( + dot(face_normals, face_normals) > 1e-20, + face_normals, + torch.tensor([0.0, 0.0, 1.0], dtype=torch.float32, device=vn.device), + ) + face_normals= safe_normalize(face_normals) + self.fn = face_normals + + def auto_uv(self, cache_path=None, vmap=True): + """auto calculate the uv coordinates. + + Args: + cache_path (str, optional): path to save/load the uv cache as a npz file, this can avoid calculating uv every time when loading the same mesh, which is time-consuming. Defaults to None. + vmap (bool, optional): remap vertices based on uv coordinates, so each v correspond to a unique vt (necessary for formats like gltf). + Usually this will duplicate the vertices on the edge of uv atlas. Defaults to True. + """ + # try to load cache + if cache_path is not None: + cache_path = os.path.splitext(cache_path)[0] + "_uv.npz" + if cache_path is not None and os.path.exists(cache_path): + data = np.load(cache_path) + vt_np, ft_np, vmapping = data["vt"], data["ft"], data["vmapping"] + else: + import xatlas + + v_np = self.v.detach().cpu().numpy() + f_np = self.f.detach().int().cpu().numpy() + atlas = xatlas.Atlas() + atlas.add_mesh(v_np, f_np) + chart_options = xatlas.ChartOptions() + # chart_options.max_iterations = 4 + atlas.generate(chart_options=chart_options) + vmapping, ft_np, vt_np = atlas[0] # [N], [M, 3], [N, 2] + + # save to cache + if cache_path is not None: + np.savez(cache_path, vt=vt_np, ft=ft_np, vmapping=vmapping) + + vt = torch.from_numpy(vt_np.astype(np.float32)).to(self.device) + ft = torch.from_numpy(ft_np.astype(np.int32)).to(self.device) + self.vt = vt + self.ft = ft + + if vmap: + vmapping = torch.from_numpy(vmapping.astype(np.int64)).long().to(self.device) + self.align_v_to_vt(vmapping) + + def align_v_to_vt(self, vmapping=None): + """ remap v/f and vn/fn to vt/ft. + + Args: + vmapping (np.ndarray, optional): the mapping relationship from f to ft. Defaults to None. + """ + if vmapping is None: + ft = self.ft.view(-1).long() + f = self.f.view(-1).long() + vmapping = torch.zeros(self.vt.shape[0], dtype=torch.long, device=self.device) + vmapping[ft] = f # scatter, randomly choose one if index is not unique + + self.v = self.v[vmapping] + self.f = self.ft + + if self.vn is not None: + self.vn = self.vn[vmapping] + self.fn = self.ft + + def to(self, device): + """move all tensor attributes to device. + + Args: + device (torch.device): target device. + + Returns: + Mesh: self. + """ + self.device = device + for name in ["v", "f", "vn", "fn", "vt", "ft", "albedo", "vc", "metallicRoughness"]: + tensor = getattr(self, name) + if tensor is not None: + setattr(self, name, tensor.to(device)) + return self + + def write(self, path): + """write the mesh to a path. + + Args: + path (str): path to write, supports ply, obj and glb. + """ + if path.endswith(".ply"): + self.write_ply(path) + elif path.endswith(".obj"): + self.write_obj(path) + elif path.endswith(".glb") or path.endswith(".gltf"): + self.write_glb(path) + else: + raise NotImplementedError(f"format {path} not supported!") + + def write_ply(self, path): + """write the mesh in ply format. Only for geometry! + + Args: + path (str): path to write. + """ + + if self.albedo is not None: + print(f'[WARN] ply format does not support exporting texture, will ignore!') + + v_np = self.v.detach().cpu().numpy() + f_np = self.f.detach().cpu().numpy() + + _mesh = trimesh.Trimesh(vertices=v_np, faces=f_np) + _mesh.export(path) + + + def write_glb(self, path): + """write the mesh in glb/gltf format. + This will create a scene with a single mesh. + + Args: + path (str): path to write. + """ + + # assert self.v.shape[0] == self.vn.shape[0] and self.v.shape[0] == self.vt.shape[0] + if self.vt is not None and self.v.shape[0] != self.vt.shape[0]: + self.align_v_to_vt() + + import pygltflib + + f_np = self.f.detach().cpu().numpy().astype(np.uint32) + f_np_blob = f_np.flatten().tobytes() + + v_np = self.v.detach().cpu().numpy().astype(np.float32) + v_np_blob = v_np.tobytes() + + blob = f_np_blob + v_np_blob + byteOffset = len(blob) + + # base mesh + gltf = pygltflib.GLTF2( + scene=0, + scenes=[pygltflib.Scene(nodes=[0])], + nodes=[pygltflib.Node(mesh=0)], + meshes=[pygltflib.Mesh(primitives=[pygltflib.Primitive( + # indices to accessors (0 is triangles) + attributes=pygltflib.Attributes( + POSITION=1, + ), + indices=0, + )])], + buffers=[ + pygltflib.Buffer(byteLength=len(f_np_blob) + len(v_np_blob)) + ], + # buffer view (based on dtype) + bufferViews=[ + # triangles; as flatten (element) array + pygltflib.BufferView( + buffer=0, + byteLength=len(f_np_blob), + target=pygltflib.ELEMENT_ARRAY_BUFFER, # GL_ELEMENT_ARRAY_BUFFER (34963) + ), + # positions; as vec3 array + pygltflib.BufferView( + buffer=0, + byteOffset=len(f_np_blob), + byteLength=len(v_np_blob), + byteStride=12, # vec3 + target=pygltflib.ARRAY_BUFFER, # GL_ARRAY_BUFFER (34962) + ), + ], + accessors=[ + # 0 = triangles + pygltflib.Accessor( + bufferView=0, + componentType=pygltflib.UNSIGNED_INT, # GL_UNSIGNED_INT (5125) + count=f_np.size, + type=pygltflib.SCALAR, + max=[int(f_np.max())], + min=[int(f_np.min())], + ), + # 1 = positions + pygltflib.Accessor( + bufferView=1, + componentType=pygltflib.FLOAT, # GL_FLOAT (5126) + count=len(v_np), + type=pygltflib.VEC3, + max=v_np.max(axis=0).tolist(), + min=v_np.min(axis=0).tolist(), + ), + ], + ) + + # append texture info + if self.vt is not None: + + vt_np = self.vt.detach().cpu().numpy().astype(np.float32) + vt_np_blob = vt_np.tobytes() + + albedo = self.albedo.detach().cpu().numpy() + albedo = (albedo * 255).astype(np.uint8) + albedo = cv2.cvtColor(albedo, cv2.COLOR_RGB2BGR) + albedo_blob = cv2.imencode('.png', albedo)[1].tobytes() + + # update primitive + gltf.meshes[0].primitives[0].attributes.TEXCOORD_0 = 2 + gltf.meshes[0].primitives[0].material = 0 + + # update materials + gltf.materials.append(pygltflib.Material( + pbrMetallicRoughness=pygltflib.PbrMetallicRoughness( + baseColorTexture=pygltflib.TextureInfo(index=0, texCoord=0), + metallicFactor=0.0, + roughnessFactor=1.0, + ), + alphaMode=pygltflib.OPAQUE, + alphaCutoff=None, + doubleSided=True, + )) + + gltf.textures.append(pygltflib.Texture(sampler=0, source=0)) + gltf.samplers.append(pygltflib.Sampler(magFilter=pygltflib.LINEAR, minFilter=pygltflib.LINEAR_MIPMAP_LINEAR, wrapS=pygltflib.REPEAT, wrapT=pygltflib.REPEAT)) + gltf.images.append(pygltflib.Image(bufferView=3, mimeType="image/png")) + + # update buffers + gltf.bufferViews.append( + # index = 2, texcoords; as vec2 array + pygltflib.BufferView( + buffer=0, + byteOffset=byteOffset, + byteLength=len(vt_np_blob), + byteStride=8, # vec2 + target=pygltflib.ARRAY_BUFFER, + ) + ) + + gltf.accessors.append( + # 2 = texcoords + pygltflib.Accessor( + bufferView=2, + componentType=pygltflib.FLOAT, + count=len(vt_np), + type=pygltflib.VEC2, + max=vt_np.max(axis=0).tolist(), + min=vt_np.min(axis=0).tolist(), + ) + ) + + blob += vt_np_blob + byteOffset += len(vt_np_blob) + + gltf.bufferViews.append( + # index = 3, albedo texture; as none target + pygltflib.BufferView( + buffer=0, + byteOffset=byteOffset, + byteLength=len(albedo_blob), + ) + ) + + blob += albedo_blob + byteOffset += len(albedo_blob) + + gltf.buffers[0].byteLength = byteOffset + + # append metllic roughness + if self.metallicRoughness is not None: + metallicRoughness = self.metallicRoughness.detach().cpu().numpy() + metallicRoughness = (metallicRoughness * 255).astype(np.uint8) + metallicRoughness = cv2.cvtColor(metallicRoughness, cv2.COLOR_RGB2BGR) + metallicRoughness_blob = cv2.imencode('.png', metallicRoughness)[1].tobytes() + + # update texture definition + gltf.materials[0].pbrMetallicRoughness.metallicFactor = 1.0 + gltf.materials[0].pbrMetallicRoughness.roughnessFactor = 1.0 + gltf.materials[0].pbrMetallicRoughness.metallicRoughnessTexture = pygltflib.TextureInfo(index=1, texCoord=0) + + gltf.textures.append(pygltflib.Texture(sampler=1, source=1)) + gltf.samplers.append(pygltflib.Sampler(magFilter=pygltflib.LINEAR, minFilter=pygltflib.LINEAR_MIPMAP_LINEAR, wrapS=pygltflib.REPEAT, wrapT=pygltflib.REPEAT)) + gltf.images.append(pygltflib.Image(bufferView=4, mimeType="image/png")) + + # update buffers + gltf.bufferViews.append( + # index = 4, metallicRoughness texture; as none target + pygltflib.BufferView( + buffer=0, + byteOffset=byteOffset, + byteLength=len(metallicRoughness_blob), + ) + ) + + blob += metallicRoughness_blob + byteOffset += len(metallicRoughness_blob) + + gltf.buffers[0].byteLength = byteOffset + + + # set actual data + gltf.set_binary_blob(blob) + + # glb = b"".join(gltf.save_to_bytes()) + gltf.save(path) + + + def write_obj(self, path): + """write the mesh in obj format. Will also write the texture and mtl files. + + Args: + path (str): path to write. + """ + + mtl_path = path.replace(".obj", ".mtl") + albedo_path = path.replace(".obj", "_albedo.png") + metallic_path = path.replace(".obj", "_metallic.png") + roughness_path = path.replace(".obj", "_roughness.png") + + v_np = self.v.detach().cpu().numpy() + vt_np = self.vt.detach().cpu().numpy() if self.vt is not None else None + vn_np = self.vn.detach().cpu().numpy() if self.vn is not None else None + f_np = self.f.detach().cpu().numpy() + ft_np = self.ft.detach().cpu().numpy() if self.ft is not None else None + fn_np = self.fn.detach().cpu().numpy() if self.fn is not None else None + + with open(path, "w") as fp: + fp.write(f"mtllib {os.path.basename(mtl_path)} \n") + + for v in v_np: + fp.write(f"v {v[0]} {v[1]} {v[2]} \n") + + if vt_np is not None: + for v in vt_np: + fp.write(f"vt {v[0]} {1 - v[1]} \n") + + if vn_np is not None: + for v in vn_np: + fp.write(f"vn {v[0]} {v[1]} {v[2]} \n") + + fp.write(f"usemtl defaultMat \n") + for i in range(len(f_np)): + fp.write( + f'f {f_np[i, 0] + 1}/{ft_np[i, 0] + 1 if ft_np is not None else ""}/{fn_np[i, 0] + 1 if fn_np is not None else ""} \ + {f_np[i, 1] + 1}/{ft_np[i, 1] + 1 if ft_np is not None else ""}/{fn_np[i, 1] + 1 if fn_np is not None else ""} \ + {f_np[i, 2] + 1}/{ft_np[i, 2] + 1 if ft_np is not None else ""}/{fn_np[i, 2] + 1 if fn_np is not None else ""} \n' + ) + + with open(mtl_path, "w") as fp: + fp.write(f"newmtl defaultMat \n") + fp.write(f"Ka 1 1 1 \n") + fp.write(f"Kd 1 1 1 \n") + fp.write(f"Ks 0 0 0 \n") + fp.write(f"Tr 1 \n") + fp.write(f"illum 1 \n") + fp.write(f"Ns 0 \n") + if self.albedo is not None: + fp.write(f"map_Kd {os.path.basename(albedo_path)} \n") + if self.metallicRoughness is not None: + # ref: https://en.wikipedia.org/wiki/Wavefront_.obj_file#Physically-based_Rendering + fp.write(f"map_Pm {os.path.basename(metallic_path)} \n") + fp.write(f"map_Pr {os.path.basename(roughness_path)} \n") + + if self.albedo is not None: + albedo = self.albedo.detach().cpu().numpy() + albedo = (albedo * 255).astype(np.uint8) + cv2.imwrite(albedo_path, cv2.cvtColor(albedo, cv2.COLOR_RGB2BGR)) + + if self.metallicRoughness is not None: + metallicRoughness = self.metallicRoughness.detach().cpu().numpy() + metallicRoughness = (metallicRoughness * 255).astype(np.uint8) + cv2.imwrite(metallic_path, metallicRoughness[..., 2]) + cv2.imwrite(roughness_path, metallicRoughness[..., 1]) + +if __name__ == '__main__': + import os + import pdb + import sys + + import tqlt + sys.path.append('./') + obj_mesh = Mesh().load(path='./test.glb') + + pdb.set_trace() + + + diff --git a/LHM/models/rendering/smpl_x.py b/LHM/models/rendering/smpl_x.py new file mode 100644 index 0000000000000000000000000000000000000000..186a2bcbf20cf1467f13bc51c0fc54fcfe2b17a4 --- /dev/null +++ b/LHM/models/rendering/smpl_x.py @@ -0,0 +1,1508 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Xiaodong Gu, Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-01-08 21:42:24, Version 0.0, SMPLX + FLAME2019 +# @Function : SMPLX-related functions + +import copy +import math +import os +import os.path as osp +import pdb +import pickle +import sys + +sys.path.append("./") +from collections import defaultdict + +import numpy as np +import torch +import torch.nn as nn +import trimesh +from pytorch3d.ops import SubdivideMeshes, knn_points +from pytorch3d.structures import Meshes +from smplx.lbs import batch_rigid_transform +from torch.nn import functional as F + +from LHM.models.rendering.smplx import smplx +from LHM.models.rendering.smplx.vis_utils import render_mesh + +""" +Subdivide a triangle mesh by adding a new vertex at the center of each edge and dividing each face into four new faces. +Vectors of vertex attributes can also be subdivided by averaging the values of the attributes at the two vertices which form each edge. +This implementation preserves face orientation - if the vertices of a face are all ordered counter-clockwise, +then the faces in the subdivided meshes will also have their vertices ordered counter-clockwise. +If meshes is provided as an input, the initializer performs the relatively expensive computation of determining the new face indices. +This one-time computation can be reused for all meshes with the same face topology but different vertex positions. +""" + + +def avaliable_device(): + + import torch + + if torch.cuda.is_available(): + current_device_id = torch.cuda.current_device() + device = f"cuda:{current_device_id}" + else: + device = "cpu" + + return device + + +class SMPLX(object): + def __init__( + self, + human_model_path, + shape_param_dim=100, + expr_param_dim=50, + subdivide_num=2, + cano_pose_type=0, + ): + """SMPLX using pytorch3d subdivsion""" + super().__init__() + self.human_model_path = human_model_path + self.shape_param_dim = shape_param_dim + self.expr_param_dim = expr_param_dim + if shape_param_dim == 10 and expr_param_dim == 10: + self.layer_arg = { + "create_global_orient": False, + "create_body_pose": False, + "create_left_hand_pose": False, + "create_right_hand_pose": False, + "create_jaw_pose": False, + "create_leye_pose": False, + "create_reye_pose": False, + "create_betas": False, + "create_expression": False, + "create_transl": False, + } + self.layer = { + gender: smplx.create( + human_model_path, + "smplx", + gender=gender, + num_betas=self.shape_param_dim, + num_expression_coeffs=self.expr_param_dim, + use_pca=False, + use_face_contour=False, + flat_hand_mean=True, + **self.layer_arg, + ) + for gender in ["neutral", "male", "female"] + } + else: + self.layer_arg = { + "create_global_orient": False, + "create_body_pose": False, + "create_left_hand_pose": False, + "create_right_hand_pose": False, + "create_jaw_pose": False, + "create_leye_pose": False, + "create_reye_pose": False, + "create_betas": False, + "create_expression": False, + "create_transl": False, + } + self.layer = { + gender: smplx.create( + human_model_path, + "smplx", + gender=gender, + num_betas=self.shape_param_dim, + num_expression_coeffs=self.expr_param_dim, + use_pca=False, + use_face_contour=True, + flat_hand_mean=True, + **self.layer_arg, + ) + for gender in ["neutral", "male", "female"] + } + + self.face_vertex_idx = np.load( + osp.join(human_model_path, "smplx", "SMPL-X__FLAME_vertex_ids.npy") + ) + if shape_param_dim == 10 and expr_param_dim == 10: + print("not using flame expr") + else: + self.layer = { + gender: self.get_expr_from_flame(self.layer[gender]) + for gender in ["neutral", "male", "female"] + } + self.vertex_num = 10475 + self.face_orig = self.layer["neutral"].faces.astype(np.int64) + self.is_cavity, self.face = self.add_cavity() + with open( + osp.join(human_model_path, "smplx", "MANO_SMPLX_vertex_ids.pkl"), "rb" + ) as f: + hand_vertex_idx = pickle.load(f, encoding="latin1") + self.rhand_vertex_idx = hand_vertex_idx["right_hand"] + self.lhand_vertex_idx = hand_vertex_idx["left_hand"] + self.expr_vertex_idx = self.get_expr_vertex_idx() + + # SMPLX joint set + self.joint_num = ( + 55 # 22 (body joints: 21 + 1) + 3 (face joints) + 30 (hand joints) + ) + self.joints_name = ( + "Pelvis", + "L_Hip", + "R_Hip", + "Spine_1", + "L_Knee", + "R_Knee", + "Spine_2", + "L_Ankle", + "R_Ankle", + "Spine_3", + "L_Foot", + "R_Foot", + "Neck", + "L_Collar", + "R_Collar", + "Head", + "L_Shoulder", + "R_Shoulder", + "L_Elbow", + "R_Elbow", + "L_Wrist", + "R_Wrist", # body joints + "Jaw", + "L_Eye", + "R_Eye", # face joints + "L_Index_1", + "L_Index_2", + "L_Index_3", + "L_Middle_1", + "L_Middle_2", + "L_Middle_3", + "L_Pinky_1", + "L_Pinky_2", + "L_Pinky_3", + "L_Ring_1", + "L_Ring_2", + "L_Ring_3", + "L_Thumb_1", + "L_Thumb_2", + "L_Thumb_3", # left hand joints + "R_Index_1", + "R_Index_2", + "R_Index_3", + "R_Middle_1", + "R_Middle_2", + "R_Middle_3", + "R_Pinky_1", + "R_Pinky_2", + "R_Pinky_3", + "R_Ring_1", + "R_Ring_2", + "R_Ring_3", + "R_Thumb_1", + "R_Thumb_2", + "R_Thumb_3", # right hand joints + ) + self.root_joint_idx = self.joints_name.index("Pelvis") + self.joint_part = { + "body": range( + self.joints_name.index("Pelvis"), self.joints_name.index("R_Wrist") + 1 + ), + "face": range( + self.joints_name.index("Jaw"), self.joints_name.index("R_Eye") + 1 + ), + "lhand": range( + self.joints_name.index("L_Index_1"), + self.joints_name.index("L_Thumb_3") + 1, + ), + "rhand": range( + self.joints_name.index("R_Index_1"), + self.joints_name.index("R_Thumb_3") + 1, + ), + "lower_body": [ + self.joints_name.index("Pelvis"), + self.joints_name.index("R_Hip"), + self.joints_name.index("L_Hip"), + self.joints_name.index("R_Knee"), + self.joints_name.index("L_Knee"), + self.joints_name.index("R_Ankle"), + self.joints_name.index("L_Ankle"), + self.joints_name.index("R_Foot"), + self.joints_name.index("L_Foot"), + ], + } + + self.lower_body_vertex_idx = self.get_lower_body() + + self.neutral_body_pose = torch.zeros( + (len(self.joint_part["body"]) - 1, 3) + ) # 大 pose in axis-angle representation (body pose without root joint) + if cano_pose_type == 0: # exavatar-cano-pose + self.neutral_body_pose[0] = torch.FloatTensor([0, 0, 1]) + self.neutral_body_pose[1] = torch.FloatTensor([0, 0, -1]) + else: # + self.neutral_body_pose[0] = torch.FloatTensor([0, 0, math.pi / 9]) + self.neutral_body_pose[1] = torch.FloatTensor([0, 0, -math.pi / 9]) + + self.neutral_jaw_pose = torch.FloatTensor([1 / 3, 0, 0]) + + # subdivider + self.subdivide_num = subdivide_num + self.subdivider_list = self.get_subdivider(subdivide_num) + self.subdivider_cpu_list = self.get_subdivider_cpu(subdivide_num) + self.face_upsampled = ( + self.subdivider_list[-1]._subdivided_faces.cpu().numpy() + if self.subdivide_num > 0 + else self.face + ) + print("face_upsampled:", self.face_upsampled.shape) + self.vertex_num_upsampled = int(np.max(self.face_upsampled) + 1) + + def get_lower_body(self): + """using skinning to find lower body vertices.""" + lower_body_skinning_index = set(self.joint_part["lower_body"]) + skinning_weight = self.layer["neutral"].lbs_weights.float() + skinning_part = skinning_weight.argmax(1) + skinning_part = skinning_part.cpu().numpy() + lower_body_vertice_idx = [] + for v_id, v_s in enumerate(skinning_part): + if v_s in lower_body_skinning_index: + lower_body_vertice_idx.append(v_id) + + lower_body_vertice_idx = np.asarray(lower_body_vertice_idx) + + # debug + # template_v = self.layer["neutral"].v_template + # lower_body_v = template_v[lower_body_vertice_idx] + # save_ply("lower_body_v.ply", lower_body_v) + return lower_body_vertice_idx + + def get_expr_from_flame(self, smplx_layer): + flame_layer = smplx.create( + self.human_model_path, + "flame", + gender="neutral", + num_betas=self.shape_param_dim, + num_expression_coeffs=self.expr_param_dim, + ) + smplx_layer.expr_dirs[self.face_vertex_idx, :, :] = flame_layer.expr_dirs + return smplx_layer + + def set_id_info(self, shape_param, face_offset, joint_offset, locator_offset): + self.shape_param = shape_param + self.face_offset = face_offset + self.joint_offset = joint_offset + self.locator_offset = locator_offset + + def get_joint_offset(self, joint_offset): + device = joint_offset.device + batch_size = joint_offset.shape[0] + weight = torch.ones((batch_size, self.joint_num, 1)).float().to(device) + weight[:, self.root_joint_idx, :] = 0 + joint_offset = joint_offset * weight + return joint_offset + + def get_subdivider(self, subdivide_num): + vert = self.layer["neutral"].v_template.float().cuda() + face = torch.LongTensor(self.face).cuda() + mesh = Meshes(vert[None, :, :], face[None, :, :]) + + if subdivide_num > 0: + subdivider_list = [SubdivideMeshes(mesh)] + for i in range(subdivide_num - 1): + mesh = subdivider_list[-1](mesh) + subdivider_list.append(SubdivideMeshes(mesh)) + else: + subdivider_list = [mesh] + return subdivider_list + + def get_body_face_mapping(self): + face_vertex_idx = self.face_vertex_idx + face_vertex_set = set(face_vertex_idx) + face = self.face.reshape(-1).tolist() + face_label = [f in face_vertex_set for f in face] + face_label = np.asarray(face_label).reshape(-1, 3) + face_label = face_label.sum(-1) + face_id = np.where(face_label == 3)[0] + + head_face = self.face[face_id] + + body_set = set(np.arange(self.vertex_num)) + body_v_id = body_set - face_vertex_set + body_v_id = np.array(list(body_v_id)) + + body_face_id = np.where(face_label == 0)[0] + body_face = self.face[body_face_id] + + ret_dict = dict( + head=dict(face=head_face, vert=face_vertex_idx), + body=dict(face=body_face, vert=body_v_id), + ) + + return ret_dict + + def get_subdivider_cpu(self, subdivide_num): + vert = self.layer["neutral"].v_template.float() + face = torch.LongTensor(self.face) + mesh = Meshes(vert[None, :, :], face[None, :, :]) + + if subdivide_num > 0: + subdivider_list = [SubdivideMeshes(mesh)] + for i in range(subdivide_num - 1): + mesh = subdivider_list[-1](mesh) + subdivider_list.append(SubdivideMeshes(mesh)) + else: + subdivider_list = [mesh] + return subdivider_list + + def upsample_mesh_cpu(self, vert, feat_list=None): + face = torch.LongTensor(self.face) + mesh = Meshes(vert[None, :, :], face[None, :, :]) + if self.subdivide_num > 0: + if feat_list is None: + for subdivider in self.subdivider_cpu_list: + mesh = subdivider(mesh) + vert = mesh.verts_list()[0] + return vert + else: + feat_dims = [x.shape[1] for x in feat_list] + feats = torch.cat(feat_list, 1) + for subdivider in self.subdivider_cpu_list: + mesh, feats = subdivider(mesh, feats) + vert = mesh.verts_list()[0] + feats = feats[0] + feat_list = torch.split(feats, feat_dims, dim=1) + return vert, *feat_list + else: + if feat_list is None: + # for subdivider in self.subdivider_cpu_list: + # mesh = subdivider(mesh) + # vert = mesh.verts_list()[0] + return vert + else: + return vert, *feat_list + + def upsample_mesh(self, vert, feat_list=None, device="cuda"): + face = torch.LongTensor(self.face).to(device) + mesh = Meshes(vert[None, :, :], face[None, :, :]) + if self.subdivide_num > 0: + if feat_list is None: + for subdivider in self.subdivider_list: + mesh = subdivider(mesh) + vert = mesh.verts_list()[0] + return vert + else: + feat_dims = [x.shape[1] for x in feat_list] + feats = torch.cat(feat_list, 1) + for subdivider in self.subdivider_list: + mesh, feats = subdivider(mesh, feats) + vert = mesh.verts_list()[0] + feats = feats[0] + feat_list = torch.split(feats, feat_dims, dim=1) + return vert, *feat_list + else: + if feat_list is None: + # for subdivider in self.subdivider_list: + # mesh = subdivider(mesh) + # vert = mesh.verts_list()[0] + return vert + else: + # feat_dims = [x.shape[1] for x in feat_list] + # feats = torch.cat(feat_list,1) + # for subdivider in self.subdivider_list: + # mesh, feats = subdivider(mesh, feats) + # vert = mesh.verts_list()[0] + # feats = feats[0] + # feat_list = torch.split(feats, feat_dims, dim=1) + return vert, *feat_list + + def upsample_mesh_batch(self, vert, device="cuda"): + if self.subdivide_num > 0: + face = ( + torch.LongTensor(self.face) + .to(device) + .unsqueeze(0) + .repeat(vert.shape[0], 1, 1) + ) + mesh = Meshes(vert, face) + for subdivider in self.subdivider_list: + mesh = subdivider(mesh) + vert = torch.stack(mesh.verts_list(), dim=0) + else: + pass + return vert + + def add_cavity(self): + lip_vertex_idx = [2844, 2855, 8977, 1740, 1730, 1789, 8953, 2892] + is_cavity = np.zeros((self.vertex_num), dtype=np.float32) + is_cavity[lip_vertex_idx] = 1.0 + + cavity_face = [[0, 1, 7], [1, 2, 7], [2, 3, 5], [3, 4, 5], [2, 5, 6], [2, 6, 7]] + face_new = list(self.face_orig) + for face in cavity_face: + v1, v2, v3 = face + face_new.append( + [lip_vertex_idx[v1], lip_vertex_idx[v2], lip_vertex_idx[v3]] + ) + face_new = np.array(face_new, dtype=np.int64) + return is_cavity, face_new + + def get_expr_vertex_idx(self): + # FLAME 2020 has all vertices of expr_vertex_idx. use FLAME 2019 + """ + SMPLX + FLAME2019 Version + according to LBS weights to search related vertices ID + """ + + with open( + osp.join(self.human_model_path, "flame", "2019", "generic_model.pkl"), "rb" + ) as f: + flame_2019 = pickle.load(f, encoding="latin1") + vertex_idxs = np.where( + (flame_2019["shapedirs"][:, :, 300 : 300 + self.expr_param_dim] != 0).sum( + (1, 2) + ) + > 0 + )[ + 0 + ] # FLAME.SHAPE_SPACE_DIM == 300 + + # exclude neck and eyeball regions + flame_joints_name = ("Neck", "Head", "Jaw", "L_Eye", "R_Eye") + expr_vertex_idx = [] + flame_vertex_num = flame_2019["v_template"].shape[0] + is_neck_eye = torch.zeros((flame_vertex_num)).float() + is_neck_eye[ + flame_2019["weights"].argmax(1) == flame_joints_name.index("Neck") + ] = 1 + is_neck_eye[ + flame_2019["weights"].argmax(1) == flame_joints_name.index("L_Eye") + ] = 1 + is_neck_eye[ + flame_2019["weights"].argmax(1) == flame_joints_name.index("R_Eye") + ] = 1 + for idx in vertex_idxs: + if is_neck_eye[idx]: + continue + expr_vertex_idx.append(idx) + + expr_vertex_idx = np.array(expr_vertex_idx) + expr_vertex_idx = self.face_vertex_idx[expr_vertex_idx] + + return expr_vertex_idx + + def get_arm(self, mesh_neutral_pose, skinning_weight): + normal = ( + Meshes( + verts=mesh_neutral_pose[None, :, :], + faces=torch.LongTensor(self.face_upsampled).cuda()[None, :, :], + ) + .verts_normals_packed() + .reshape(self.vertex_num_upsampled, 3) + .detach() + ) + part_label = skinning_weight.argmax(1) + is_arm = 0 + for name in ("R_Shoulder", "R_Elbow", "L_Shoulder", "L_Elbow"): + is_arm = is_arm + (part_label == self.joints_name.index(name)) + is_arm = is_arm > 0 + is_upper_arm = is_arm * (normal[:, 1] > math.cos(math.pi / 3)) + is_lower_arm = is_arm * (normal[:, 1] <= math.cos(math.pi / 3)) + return is_upper_arm, is_lower_arm + + +class SMPLXModel(nn.Module): + def __init__( + self, + human_model_path, + gender, + subdivide_num, + expr_param_dim=50, + shape_param_dim=100, + cano_pose_type=0, + apply_pose_blendshape=False, + ) -> None: + super().__init__() + + # self.smpl_x = SMPLX( + # human_model_path=human_model_path, + # shape_param_dim=shape_param_dim, + # expr_param_dim=expr_param_dim, + # subdivide_num=subdivide_num, + # cano_pose_type=cano_pose_type, + # ) + + self.smpl_x = SMPLX( + human_model_path=human_model_path, + shape_param_dim=shape_param_dim, + expr_param_dim=expr_param_dim, + subdivide_num=subdivide_num, + cano_pose_type=cano_pose_type, + ) + self.smplx_layer = copy.deepcopy(self.smpl_x.layer[gender]) + + self.apply_pose_blendshape = apply_pose_blendshape + # register + self.smplx_init() + + def get_body_infos(self): + + head_id = torch.where(self.is_face == True)[0] + body_id = torch.where(self.is_face == False)[0] + return dict(head=head_id, body=body_id) + + def smplx_init(self): + """ + Initialize the sub-devided smplx model by registering buffers for various attributes + This method performs the following steps: + 1. Upsamples the mesh and other assets. + 2. Computes skinning weights, pose directions, expression directions, and various flags for different body parts. + 3. Reshapes and permutes the pose and expression directions. + 4. Converts the flags to boolean values. + 5. Registers buffers for the computed attributes. + Args: + self: The object instance. + Returns: + None + """ + + smpl_x = self.smpl_x + + # # upsample mesh and other assets + # xyz, _, _, _ = self.get_neutral_pose_human(jaw_zero_pose=False, use_id_info=False, device=device) + + skinning_weight = self.smplx_layer.lbs_weights.float() + + """ PCA regression function w.r.t vertices offset + """ + pose_dirs = self.smplx_layer.posedirs.permute(1, 0).reshape( + smpl_x.vertex_num, 3 * (smpl_x.joint_num - 1) * 9 + ) + expr_dirs = self.smplx_layer.expr_dirs.view( + smpl_x.vertex_num, 3 * smpl_x.expr_param_dim + ) + + is_rhand, is_lhand, is_face, is_face_expr, is_lower_body = ( + torch.zeros((smpl_x.vertex_num, 1)).float(), + torch.zeros((smpl_x.vertex_num, 1)).float(), + torch.zeros((smpl_x.vertex_num, 1)).float(), + torch.zeros((smpl_x.vertex_num, 1)).float(), + torch.zeros((smpl_x.vertex_num, 1)).float(), + ) + ( + is_rhand[smpl_x.rhand_vertex_idx], + is_lhand[smpl_x.lhand_vertex_idx], + is_face[smpl_x.face_vertex_idx], + is_face_expr[smpl_x.expr_vertex_idx], + is_lower_body[smpl_x.lower_body_vertex_idx], + ) = (1.0, 1.0, 1.0, 1.0, 1.0) + is_cavity = torch.FloatTensor(smpl_x.is_cavity)[:, None] + + # obtain subvided apperance + ( + _, + skinning_weight, + pose_dirs, + expr_dirs, + is_rhand, + is_lhand, + is_face, + is_face_expr, + is_lower_body, + is_cavity, + ) = smpl_x.upsample_mesh_cpu( + torch.ones((smpl_x.vertex_num, 3)).float(), + [ + skinning_weight, + pose_dirs, + expr_dirs, + is_rhand, + is_lhand, + is_face, + is_face_expr, + is_lower_body, + is_cavity, + ], + ) # upsample with dummy vertex + + pose_dirs = pose_dirs.reshape( + smpl_x.vertex_num_upsampled * 3, (smpl_x.joint_num - 1) * 9 + ).permute( + 1, 0 + ) # (J * 9, V * 3) + expr_dirs = expr_dirs.view( + smpl_x.vertex_num_upsampled, 3, smpl_x.expr_param_dim + ) + is_rhand, is_lhand, is_face, is_face_expr, is_lower_body = ( + is_rhand[:, 0] > 0, + is_lhand[:, 0] > 0, + is_face[:, 0] > 0, + is_face_expr[:, 0] > 0, + is_lower_body[:, 0] > 0, + ) + is_cavity = is_cavity[:, 0] > 0 + + # self.register_buffer('pos_enc_mesh', xyz) + # is legs + + self.register_buffer("skinning_weight", skinning_weight.contiguous()) + self.register_buffer("pose_dirs", pose_dirs.contiguous()) + self.register_buffer("expr_dirs", expr_dirs.contiguous()) + self.register_buffer("is_rhand", is_rhand.contiguous()) + self.register_buffer("is_lhand", is_lhand.contiguous()) + self.register_buffer("is_face", is_face.contiguous()) + self.register_buffer("is_lower_body", is_lower_body.contiguous()) + self.register_buffer("is_face_expr", is_face_expr.contiguous()) + self.register_buffer("is_cavity", is_cavity.contiguous()) + + def get_neutral_pose_human( + self, jaw_zero_pose, use_id_info, shape_param, device, face_offset, joint_offset + ): + + smpl_x = self.smpl_x + batch_size = shape_param.shape[0] + + zero_pose = torch.zeros((batch_size, 3)).float().to(device) + neutral_body_pose = ( + smpl_x.neutral_body_pose.view(1, -1).repeat(batch_size, 1).to(device) + ) # 大 pose + zero_hand_pose = ( + torch.zeros((batch_size, len(smpl_x.joint_part["lhand"]) * 3)) + .float() + .to(device) + ) + zero_expr = torch.zeros((batch_size, smpl_x.expr_param_dim)).float().to(device) + + if jaw_zero_pose: + jaw_pose = torch.zeros((batch_size, 3)).float().to(device) + else: + jaw_pose = ( + smpl_x.neutral_jaw_pose.view(1, 3).repeat(batch_size, 1).to(device) + ) # open mouth + + if use_id_info: + shape_param = shape_param + # face_offset = smpl_x.face_offset[None,:,:].float().to(device) + # joint_offset = smpl_x.get_joint_offset(self.joint_offset[None,:,:]) + face_offset = face_offset + joint_offset = ( + smpl_x.get_joint_offset(joint_offset) + if joint_offset is not None + else None + ) + + else: + shape_param = ( + torch.zeros((batch_size, smpl_x.shape_param_dim)).float().to(device) + ) + face_offset = None + joint_offset = None + + # smplx layer is smplx model + # ['vertices', 'joints', 'full_pose', 'global_orient', 'transl', 'v_shaped', 'betas', 'body_pose', 'left_hand_pose', 'right_hand_pose', 'expression', 'jaw_pose'] + + output = self.smplx_layer( + global_orient=zero_pose, + body_pose=neutral_body_pose, + left_hand_pose=zero_hand_pose, + right_hand_pose=zero_hand_pose, + jaw_pose=jaw_pose, + leye_pose=zero_pose, + reye_pose=zero_pose, + expression=zero_expr, + betas=shape_param, + face_offset=face_offset, + joint_offset=joint_offset, + ) + + mesh_neutral_pose_upsampled = smpl_x.upsample_mesh_batch( + output.vertices, device=device + ) + + mesh_neutral_pose = output.vertices + joint_neutral_pose = output.joints[ + :, : smpl_x.joint_num, : + ] # 大 pose human [B, 55, 3] + + # compute transformation matrix for making 大 pose to zero pose + neutral_body_pose = neutral_body_pose.view( + batch_size, len(smpl_x.joint_part["body"]) - 1, 3 + ) + zero_hand_pose = zero_hand_pose.view( + batch_size, len(smpl_x.joint_part["lhand"]), 3 + ) + + neutral_body_pose_inv = matrix_to_axis_angle( + torch.inverse(axis_angle_to_matrix(neutral_body_pose)) + ) + jaw_pose_inv = matrix_to_axis_angle( + torch.inverse(axis_angle_to_matrix(jaw_pose)) + ) + + zero_pose = zero_pose.unsqueeze(1) + jaw_pose_inv = jaw_pose_inv.unsqueeze(1) + + pose = torch.cat( + ( + zero_pose, + neutral_body_pose_inv, + jaw_pose_inv, + zero_pose, + zero_pose, + zero_hand_pose, + zero_hand_pose, + ), + dim=1, + ) + + pose = axis_angle_to_matrix(pose) # [B, 55, 3, 3] + + # transform_mat_neutral_pose is a function to warp neutral pose to zero pose (neutral pose is *-pose) + _, transform_mat_neutral_pose = batch_rigid_transform( + pose[:, :, :, :], joint_neutral_pose[:, :, :], self.smplx_layer.parents + ) # [B, 55, 4, 4] + + return ( + mesh_neutral_pose_upsampled, + mesh_neutral_pose, + transform_mat_neutral_pose, + ) + + def get_zero_pose_human( + self, shape_param, device, face_offset, joint_offset, return_mesh=False + ): + smpl_x = self.smpl_x + batch_size = shape_param.shape[0] + + zero_pose = torch.zeros((batch_size, 3)).float().to(device) + zero_body_pose = ( + torch.zeros((batch_size, (len(smpl_x.joint_part["body"]) - 1) * 3)) + .float() + .to(device) + ) + zero_hand_pose = ( + torch.zeros((batch_size, len(smpl_x.joint_part["lhand"]) * 3)) + .float() + .to(device) + ) + zero_expr = torch.zeros((batch_size, smpl_x.expr_param_dim)).float().to(device) + + face_offset = face_offset + joint_offset = ( + smpl_x.get_joint_offset(joint_offset) if joint_offset is not None else None + ) + output = self.smplx_layer( + global_orient=zero_pose, + body_pose=zero_body_pose, + left_hand_pose=zero_hand_pose, + right_hand_pose=zero_hand_pose, + jaw_pose=zero_pose, + leye_pose=zero_pose, + reye_pose=zero_pose, + expression=zero_expr, + betas=shape_param, + face_offset=face_offset, + joint_offset=joint_offset, + ) + joint_zero_pose = output.joints[:, : smpl_x.joint_num, :] # zero pose human + + if not return_mesh: + return joint_zero_pose + else: + raise NotImplementedError + mesh_zero_pose = output.vertices[0] # zero pose human + mesh_zero_pose_upsampled = smpl_x.upsample_mesh( + mesh_zero_pose + ) # zero pose human + return mesh_zero_pose_upsampled, mesh_zero_pose, joint_zero_pose + + def get_transform_mat_joint( + self, transform_mat_neutral_pose, joint_zero_pose, smplx_param + ): + """_summary_ + Args: + transform_mat_neutral_pose (_type_): [B, 55, 4, 4] + joint_zero_pose (_type_): [B, 55, 3] + smplx_param (_type_): dict + Returns: + _type_: _description_ + """ + + # 1. 大 pose -> zero pose + transform_mat_joint_1 = transform_mat_neutral_pose + + # 2. zero pose -> image pose + root_pose = smplx_param["root_pose"] + body_pose = smplx_param["body_pose"] + jaw_pose = smplx_param["jaw_pose"] + leye_pose = smplx_param["leye_pose"] + reye_pose = smplx_param["reye_pose"] + lhand_pose = smplx_param["lhand_pose"] + rhand_pose = smplx_param["rhand_pose"] + # trans = smplx_param['trans'] + + # forward kinematics + pose = torch.cat( + ( + root_pose.unsqueeze(1), + body_pose, + jaw_pose.unsqueeze(1), + leye_pose.unsqueeze(1), + reye_pose.unsqueeze(1), + lhand_pose, + rhand_pose, + ), + dim=1, + ) # [B, 55, 3] + pose = axis_angle_to_matrix(pose) # [B, 55, 3, 3] + posed_joints, transform_mat_joint_2 = batch_rigid_transform( + pose[:, :, :, :], joint_zero_pose[:, :, :], self.smplx_layer.parents + ) + transform_mat_joint_2 = transform_mat_joint_2 # [B, 55, 4, 4] + + # 3. combine 1. 大 pose -> zero pose and 2. zero pose -> image pose + transform_mat_joint = torch.matmul( + transform_mat_joint_2, transform_mat_joint_1 + ) # [B, 55, 4, 4] + + return transform_mat_joint, posed_joints + + def get_transform_mat_vertex(self, transform_mat_joint, nn_vertex_idxs): + batch_size = transform_mat_joint.shape[0] + skinning_weight = self.skinning_weight.unsqueeze(0).repeat(batch_size, 1, 1) + skinning_weight = skinning_weight.view(-1, skinning_weight.shape[-1])[ + nn_vertex_idxs.view(-1) + ].view( + nn_vertex_idxs.shape[0], nn_vertex_idxs.shape[1], skinning_weight.shape[-1] + ) + transform_mat_vertex = torch.matmul( + skinning_weight, + transform_mat_joint.view(batch_size, self.smpl_x.joint_num, 16), + ).view(batch_size, self.smpl_x.vertex_num_upsampled, 4, 4) + return transform_mat_vertex + + def get_posed_blendshape(self, smplx_param): + # posed_blendshape is only applied on hand and face, which parts are closed to smplx model + root_pose = smplx_param["root_pose"] + body_pose = smplx_param["body_pose"] + jaw_pose = smplx_param["jaw_pose"] + leye_pose = smplx_param["leye_pose"] + reye_pose = smplx_param["reye_pose"] + lhand_pose = smplx_param["lhand_pose"] + rhand_pose = smplx_param["rhand_pose"] + batch_size = root_pose.shape[0] + + pose = torch.cat( + ( + body_pose, + jaw_pose.unsqueeze(1), + leye_pose.unsqueeze(1), + reye_pose.unsqueeze(1), + lhand_pose, + rhand_pose, + ), + dim=1, + ) # [B, 54, 3] + # smplx pose-dependent vertex offset + pose = ( + axis_angle_to_matrix(pose) - torch.eye(3)[None, None, :, :].float().cuda() + ).view(batch_size, (self.smpl_x.joint_num - 1) * 9) + # (B, 54 * 9) x (54*9, V) + + smplx_pose_offset = torch.matmul(pose.detach(), self.pose_dirs).view( + batch_size, self.smpl_x.vertex_num_upsampled, 3 + ) + return smplx_pose_offset + + def lbs(self, xyz, transform_mat_vertex, trans): + batch_size = xyz.shape[0] + xyz = torch.cat( + (xyz, torch.ones_like(xyz[:, :, :1])), dim=-1 + ) # 大 pose. xyz1 [B, N, 4] + xyz = torch.matmul(transform_mat_vertex, xyz[:, :, :, None]).view( + batch_size, self.smpl_x.vertex_num_upsampled, 4 + )[ + :, :, :3 + ] # [B, N, 3] + xyz = xyz + trans.unsqueeze(1) + return xyz + + def lr_idx_to_hr_idx(self, idx): + # follow 'subdivide_homogeneous' function of https://pytorch3d.readthedocs.io/en/latest/_modules/pytorch3d/ops/subdivide_meshes.html#SubdivideMeshes + # the low-res part takes first N_lr vertices out of N_hr vertices + return idx + + def transform_to_posed_verts_from_neutral_pose( + self, mean_3d, smplx_data, mesh_neutral_pose, transform_mat_neutral_pose, device + ): + """ + Transform the mean 3D vertices to posed vertices from the neutral pose. + + mean_3d (torch.Tensor): Mean 3D vertices with shape [B*Nv, N, 3] + offset. + smplx_data (dict): SMPL-X data containing body_pose with shape [B*Nv, 21, 3] and betas with shape [B, 100]. + mesh_neutral_pose (torch.Tensor): Mesh vertices in the neutral pose with shape [B*Nv, N, 3]. + transform_mat_neutral_pose (torch.Tensor): Transformation matrix of the neutral pose with shape [B*Nv, 4, 4]. + device (torch.device): Device to perform the computation. + + Returns: + torch.Tensor: Posed vertices with shape [B*Nv, N, 3] + offset. + """ + + batch_size = mean_3d.shape[0] + shape_param = smplx_data["betas"] + face_offset = smplx_data.get("face_offset", None) + joint_offset = smplx_data.get("joint_offset", None) + if shape_param.shape[0] != batch_size: + num_views = batch_size // shape_param.shape[0] + # print(shape_param.shape, batch_size) + shape_param = ( + shape_param.unsqueeze(1) + .repeat(1, num_views, 1) + .view(-1, shape_param.shape[1]) + ) + if face_offset is not None: + face_offset = ( + face_offset.unsqueeze(1) + .repeat(1, num_views, 1, 1) + .view(-1, *face_offset.shape[1:]) + ) + if joint_offset is not None: + joint_offset = ( + joint_offset.unsqueeze(1) + .repeat(1, num_views, 1, 1) + .view(-1, *joint_offset.shape[1:]) + ) + + # smplx facial expression offset + try: + smplx_expr_offset = ( + smplx_data["expr"].unsqueeze(1).unsqueeze(1) * self.expr_dirs + ).sum( + -1 + ) # [B, 1, 1, 50] x [N_V, 3, 50] -> [B, N_v, 3] + except: + smplx_expr_offset = 0.0 + + mean_3d = mean_3d + smplx_expr_offset # 大 pose + + if self.apply_pose_blendshape: + smplx_pose_offset = self.get_posed_blendshape(smplx_data) + mask = ( + ((self.is_rhand + self.is_lhand + self.is_face_expr) > 0) + .unsqueeze(0) + .repeat(batch_size, 1) + ) + mean_3d[mask] += smplx_pose_offset[mask] + + # get nearest vertex + + # for hands and face, assign original vertex index to use sknning weight of the original vertex + nn_vertex_idxs = knn_points( + mean_3d[:, :, :], mesh_neutral_pose[:, :, :], K=1, return_nn=True + ).idx[ + :, :, 0 + ] # dimension: smpl_x.vertex_num_upsampled + # nn_vertex_idxs = self.lr_idx_to_hr_idx(nn_vertex_idxs) + mask = ( + ((self.is_rhand + self.is_lhand + self.is_face) > 0) + .unsqueeze(0) + .repeat(batch_size, 1) + ) + nn_vertex_idxs[mask] = ( + torch.arange(self.smpl_x.vertex_num_upsampled) + .to(device) + .unsqueeze(0) + .repeat(batch_size, 1)[mask] + ) + + # get transformation matrix of the nearest vertex and perform lbs + joint_zero_pose = self.get_zero_pose_human( + shape_param=shape_param, + device=device, + face_offset=face_offset, + joint_offset=joint_offset, + ) + + # NOTE that the question "joint_zero_pose" is different with (transform_mat_neutral_pose)'s joints. + transform_mat_joint, j3d = self.get_transform_mat_joint( + transform_mat_neutral_pose, joint_zero_pose, smplx_data + ) + + # compute vertices-LBS function + transform_mat_vertex = self.get_transform_mat_vertex( + transform_mat_joint, nn_vertex_idxs + ) + + mean_3d = self.lbs( + mean_3d, transform_mat_vertex, smplx_data["trans"] + ) # posed with smplx_param + + return mean_3d, transform_mat_vertex + + def get_query_points(self, smplx_data, device): + """transform_mat_neutral_pose is function to warp pre-defined posed to zero-pose""" + mesh_neutral_pose, mesh_neutral_pose_wo_upsample, transform_mat_neutral_pose = ( + self.get_neutral_pose_human( + jaw_zero_pose=True, + use_id_info=True, + shape_param=smplx_data["betas"], + device=device, + face_offset=smplx_data.get("face_offset", None), + joint_offset=smplx_data.get("joint_offset", None), + ) + ) + return ( + mesh_neutral_pose, + mesh_neutral_pose_wo_upsample, + transform_mat_neutral_pose, + ) + + def transform_to_posed_verts(self, smplx_data, device): + """_summary_ + Args: + smplx_data (_type_): e.g., body_pose:[B*Nv, 21, 3], betas:[B*Nv, 100] + """ + + # neutral posed verts + mesh_neutral_pose, _, transform_mat_neutral_pose = self.get_query_points( + smplx_data, device + ) + + # print(mesh_neutral_pose.shape, transform_mat_neutral_pose.shape, mesh_neutral_pose.shape, smplx_data["body_pose"].shape) + mean_3d, transform_matrix = self.transform_to_posed_verts_from_neutral_pose( + mesh_neutral_pose, + smplx_data, + mesh_neutral_pose, + transform_mat_neutral_pose, + device, + ) + + return mean_3d, transform_matrix + + +def read_smplx_param(smplx_data_root, shape_param_file, batch_size=1, device="cuda"): + import json + from glob import glob + + import cv2 + + data_root_path = osp.dirname(osp.dirname(smplx_data_root)) + + # load smplx parameters + smplx_param_path_list = sorted(glob(osp.join(smplx_data_root, "*.json"))) + print(smplx_param_path_list[:3]) + + smplx_params_all_frames = {} + for smplx_param_path in smplx_param_path_list: + frame_idx = int(smplx_param_path.split("/")[-1][:-5]) + with open(smplx_param_path) as f: + smplx_params_all_frames[frame_idx] = { + k: torch.FloatTensor(v) for k, v in json.load(f).items() + } + + with open(shape_param_file) as f: + shape_param = torch.FloatTensor(json.load(f)) + + smplx_params = {} + smplx_params["betas"] = shape_param.unsqueeze(0).repeat(batch_size, 1) + # smplx_params["betas"][0] = torch.zeros_like(smplx_params["betas"][0]) + # smplx_params["betas"] = torch.zeros_like(smplx_params["betas"]) + + select_frame_idx = [200, 400, 600] + smplx_params_tmp = defaultdict(list) + cam_param_list = [] + ori_image_list = [] + for b_idx in range(batch_size): + frame_idx = select_frame_idx[b_idx] + + for k, v in smplx_params_all_frames[frame_idx].items(): + smplx_params_tmp[k].append(v) + + with open( + osp.join(data_root_path, "cam_params", str(frame_idx) + ".json") + ) as f: + cam_param = { + k: torch.FloatTensor(v).cuda() for k, v in json.load(f).items() + } + cam_param_list.append(cam_param) + + img = cv2.imread(osp.join(data_root_path, "frames", str(frame_idx) + ".png")) + ori_image_list.append(img) + + for k, v in smplx_params_tmp.items(): + smplx_params[k] = torch.stack(smplx_params_tmp[k]) + + root_path = osp.dirname(smplx_data_root) + with open(osp.join(root_path, "face_offset.json")) as f: + face_offset = torch.FloatTensor(json.load(f)) + with open(osp.join(root_path, "joint_offset.json")) as f: + joint_offset = torch.FloatTensor(json.load(f)) + with open(osp.join(root_path, "locator_offset.json")) as f: + locator_offset = torch.FloatTensor(json.load(f)) + + smplx_params["locator_offset"] = locator_offset.unsqueeze(0).repeat( + batch_size, 1, 1 + ) + smplx_params["joint_offset"] = joint_offset.unsqueeze(0).repeat(batch_size, 1, 1) + smplx_params["face_offset"] = face_offset.unsqueeze(0).repeat(batch_size, 1, 1) + + for k, v in smplx_params.items(): + print(k, v.shape) + smplx_params[k] = v.to(device) + + return smplx_params, cam_param_list, ori_image_list + + +def test(): + import cv2 + + human_model_path = "./pretrained_models/human_model_files" + gender = "male" + # gender = "neutral" + + smplx_model = SMPLXMesh_Model(human_model_path, gender, subdivide_num=2) + smplx_model.to("cuda") + + smplx_data_root = "/data1/projects/ExAvatar_RELEASE/avatar/data/Custom/data/gyeongsik/smplx_optimized/smplx_params_smoothed" + shape_param_file = "/data1/projects/ExAvatar_RELEASE/avatar/data/Custom/data/gyeongsik/smplx_optimized/shape_param.json" + smplx_data, cam_param_list, ori_image_list = read_smplx_param( + smplx_data_root=smplx_data_root, shape_param_file=shape_param_file, batch_size=2 + ) + posed_verts = smplx_model.transform_to_posed_verts( + smplx_data=smplx_data, device="cuda" + ) + + smplx_face = smplx_model.smpl_x.face_upsampled + trimesh.Trimesh( + vertices=posed_verts[0].detach().cpu().numpy(), faces=smplx_face + ).export("./posed_obj1.obj") + trimesh.Trimesh( + vertices=posed_verts[1].detach().cpu().numpy(), faces=smplx_face + ).export("./posed_obj2.obj") + + neutral_posed_verts, _, _ = smplx_model.get_query_points( + smplx_data=smplx_data, device="cuda" + ) + smplx_face = smplx_model.smpl_x.face + trimesh.Trimesh( + vertices=neutral_posed_verts[0].detach().cpu().numpy(), faces=smplx_face + ).export("./neutral_posed_obj1.obj") + trimesh.Trimesh( + vertices=neutral_posed_verts[1].detach().cpu().numpy(), faces=smplx_face + ).export("./neutral_posed_obj2.obj") + + # batch_size = smplx_data['root_pose'].shape[0] + # root_pose = smplx_data['root_pose'] + # body_pose = smplx_data['body_pose'] + # jaw_pose = smplx_data['jaw_pose'] + # leye_pose = smplx_data['leye_pose'] + # reye_pose = smplx_data['reye_pose'] + # lhand_pose = smplx_data['lhand_pose'].view(batch_size, len(smplx.smpl_x.joint_part['lhand'])*3) + # rhand_pose = smplx_data['rhand_pose'].view(batch_size, len(smplx.smpl_x.joint_part['rhand'])*3) + # expr = smplx_data['expr'].view(batch_size, smplx.smpl_x.expr_param_dim) + # trans = smplx_data['trans'].view(batch_size, 3) + # shape = smplx_data["betas"] + # face_offset = smplx_data["face_offset"] + # joint_offset = smplx_data["joint_offset"] + + # smplx_layer = smplx.smplx_layer + # smplx_face = smplx.smpl_x.face + # output = smplx_layer(global_orient=root_pose, body_pose=body_pose, jaw_pose=jaw_pose, + # leye_pose=leye_pose, reye_pose=reye_pose, + # left_hand_pose=lhand_pose, right_hand_pose=rhand_pose, + # expression=expr, betas=shape, + # transl=trans, + # face_offset=face_offset, joint_offset=joint_offset) + # posed_verts = [e for e in output.vertices] + # trimesh.Trimesh(vertices=posed_verts[0].detach().cpu().numpy(), faces=smplx_face).export("./posed_obj1_from_zeropose.obj") + # trimesh.Trimesh(vertices=posed_verts[1].detach().cpu().numpy(), faces=smplx_face).export("./posed_obj2_from_zeropose.obj") + + for idx, (cam_param, img) in enumerate(zip(cam_param_list, ori_image_list)): + render_shape = img.shape[:2] + mesh_render, is_bkg = render_mesh( + posed_verts[idx], + smplx_face, + cam_param, + np.ones((render_shape[0], render_shape[1], 3), dtype=np.float32) * 255, + return_bg_mask=True, + ) + mesh_render = mesh_render.astype(np.uint8) + cv2.imwrite( + f"./debug_render_{idx}.jpg", + np.clip( + (0.9 * mesh_render + 0.1 * img) * (1 - is_bkg) + is_bkg * img, 0, 255 + ).astype(np.uint8), + ) + # cv2.imwrite(f"./debug_render_{idx}_img.jpg", np.clip(img, 0, 255).astype(np.uint8)) + # cv2.imwrite(f"./debug_render_{idx}_mesh.jpg", np.clip(mesh_render, 0, 255).astype(np.uint8)) + + +def read_smplx_param_humman( + imgs_root, smplx_params_root, img_size=896, batch_size=1, device="cuda" +): + import json + import os + from glob import glob + + import cv2 + from PIL import Image, ImageOps + + # Input images + suffixes = (".jpg", ".jpeg", ".png", ".webp") + img_path_list = [ + os.path.join(imgs_root, file) + for file in os.listdir(imgs_root) + if file.endswith(suffixes) and file[0] != "." + ] + + ori_image_list = [] + smplx_params_tmp = defaultdict(list) + + for img_path in img_path_list: + smplx_path = os.path.join( + smplx_params_root, os.path.splitext(os.path.basename(img_path))[0] + ".json" + ) + + # Open and reshape + img_pil = Image.open(img_path).convert("RGB") + img_pil = ImageOps.contain( + img_pil, (img_size, img_size) + ) # keep the same aspect ratio + # ori_w, ori_h = img_pil.size + # img_pil_pad = ImageOps.pad(img_pil, size=(img_size,img_size)) # pad with zero on the smallest side + # offset_w, offset_h = (img_size - ori_w) // 2, (img_size - ori_h) // 2 + + # img = np.array(img_pil_pad)[:, :, (2, 1, 0)] + img = np.array(img_pil)[:, :, (2, 1, 0)] + ori_image_list.append(img) + + with open(smplx_path) as f: + smplx_param = {k: torch.FloatTensor(v) for k, v in json.load(f).items()} + + for k, v in smplx_param.items(): + smplx_params_tmp[k].append(v) + + smplx_params = {} + for k, v in smplx_params_tmp.items(): + smplx_params[k] = torch.stack(smplx_params_tmp[k]) + + for k, v in smplx_params.items(): + print(k, v.shape) + smplx_params[k] = v.to(device) + + cam_param_list = [] + for i in range(smplx_params["focal"].shape[0]): + princpt = smplx_params["princpt"][i] + cam_param = {"focal": smplx_params["focal"][i], "princpt": princpt} + cam_param_list.append(cam_param) + return smplx_params, cam_param_list, ori_image_list + + +def test_humman(): + import cv2 + + human_model_path = "./pretrained_models/human_model_files" + # gender = "male" + gender = "neutral" + + smplx_model = SMPLXModel( + human_model_path, gender, shape_param_dim=10, expr_param_dim=10, subdivide_num=2 + ) + smplx_model.to("cuda") + + # root_dir = "./train_data/humman/humman_compressed" + # meta_path = "./train_data/humman/humman_id_list.json" + # dataset = HuMManDataset(root_dirs=root_dir, meta_path=meta_path, sample_side_views=3, + # render_image_res_low=384, render_image_res_high=384, + # render_region_size=(682, 384), source_image_res=384) + + # root_dir = "./train_data/static_human_data" + # meta_path = "./train_data/static_human_data/data_id_list.json" + # dataset = StaticHumanDataset(root_dirs=root_dir, meta_path=meta_path, sample_side_views=7, + # render_image_res_low=384, render_image_res_high=384, + # render_region_size=(682, 384), source_image_res=384, + # debug=False) + + # from openlrm.datasets.singleview_human import SingleViewHumanDataset + # root_dir = "./train_data/single_view" + # meta_path = "./train_data/single_view/data_SHHQ.json" + # dataset = SingleViewHumanDataset(root_dirs=root_dir, meta_path=meta_path, sample_side_views=0, + # render_image_res_low=384, render_image_res_high=384, + # render_region_size=(682, 384), source_image_res=384, + # debug=False) + + from accelerate.utils import set_seed + + set_seed(1234) + from LHM.datasets.video_human import VideoHumanDataset + + root_dir = "./train_data/ClothVideo" + meta_path = "./train_data/ClothVideo/label/valid_id_with_img_list.json" + dataset = VideoHumanDataset( + root_dirs=root_dir, + meta_path=meta_path, + sample_side_views=7, + render_image_res_low=384, + render_image_res_high=384, + render_region_size=(682, 384), + source_image_res=384, + enlarge_ratio=[0.85, 1.2], + debug=False, + ) + data = dataset[0] + # for idx, data in enumerate(dataset): + # if idx == 2: + # break + + def get_smplx_params(data): + smplx_params = {} + smplx_keys = [ + "root_pose", + "body_pose", + "jaw_pose", + "leye_pose", + "reye_pose", + "lhand_pose", + "rhand_pose", + "expr", + "trans", + "betas", + ] + for k, v in data.items(): + if k in smplx_keys: + # print(k, v.shape) + smplx_params[k] = data[k] + return smplx_params + + for k, v in data.items(): + if hasattr(v, "shape"): + print(k, v.shape) + + smplx_data = get_smplx_params(data) + smplx_data["betas"] = ( + smplx_data["betas"].unsqueeze(0).repeat(smplx_data["body_pose"].shape[0], 1) + ) + + smplx_data_tmp = {} + for k, v in smplx_data.items(): + smplx_data_tmp[k] = v.to("cuda") + print(k, v.shape) + smplx_data = smplx_data_tmp + + intrs = data["intrs"].to("cuda") + cam_param_list = [ + { + "focal": torch.tensor([e[0, 0], e[1, 1]]), + "princpt": torch.tensor([e[0, 2], e[1, 2]]), + } + for e in intrs + ] + print(cam_param_list[0]) + ori_image_list = [ + (e.permute(1, 2, 0)[:, :, (2, 1, 0)].numpy() * 255).astype(np.uint8) + for e in data["render_image"] + ] + + posed_verts = smplx_model.transform_to_posed_verts( + smplx_data=smplx_data, device="cuda" + ) + + os.makedirs("./debug_vis/smplx", exist_ok=True) + smplx_face = smplx_model.smpl_x.face_upsampled + trimesh.Trimesh( + vertices=posed_verts[0].detach().cpu().numpy(), faces=smplx_face + ).export("./debug_vis/smplx/posed_obj1.obj") + if len(posed_verts) > 1: + trimesh.Trimesh( + vertices=posed_verts[1].detach().cpu().numpy(), faces=smplx_face + ).export("./debug_vis/smplx/posed_obj2.obj") + + neutral_posed_verts, _, _ = smplx_model.get_query_points( + smplx_data=smplx_data, device="cuda" + ) + smplx_face = smplx_model.smpl_x.face + trimesh.Trimesh( + vertices=neutral_posed_verts[0].detach().cpu().numpy(), faces=smplx_face + ).export("./debug_vis/smplx/neutral_posed_obj1.obj") + if len(neutral_posed_verts) > 1: + trimesh.Trimesh( + vertices=neutral_posed_verts[1].detach().cpu().numpy(), faces=smplx_face + ).export("./debug_vis/smplx/neutral_posed_obj2.obj") + + for idx, (cam_param, img) in enumerate(zip(cam_param_list, ori_image_list)): + render_shape = img.shape[:2] + mesh_render, is_bkg = render_mesh( + posed_verts[idx], + smplx_face, + cam_param, + np.ones((render_shape[0], render_shape[1], 3), dtype=np.float32) * 255, + return_bg_mask=True, + ) + mesh_render = mesh_render.astype(np.uint8) + cv2.imwrite( + f"./debug_vis/smplx/debug_render_{idx}.jpg", + np.clip( + (0.9 * mesh_render + 0.1 * img) * (1 - is_bkg) + is_bkg * img, 0, 255 + ).astype(np.uint8), + ) + # cv2.imwrite(f"./debug_render_{idx}_img.jpg", np.clip(img, 0, 255).astype(np.uint8)) + # cv2.imwrite(f"./debug_render_{idx}_mesh.jpg", np.clip(mesh_render, 0, 255).astype(np.uint8)) + # if idx == 1: + # break + + +def generate_smplx_point(): + human_model_path = "./pretrained_models/human_model_files" + gender = "neutral" + subdivide_num = 1 + smplx_model = SMPLXModel( + human_model_path, + gender, + shape_param_dim=10, + expr_param_dim=10, + subdivide_num=subdivide_num, + cano_pose_type=1, + ) + smplx_model.to("cuda") + + # save_file = f"pretrained_models/human_model_files/smplx_points/smplx_subdivide{subdivide_num}.npy" + save_file = f"debug/smplx_points/smplx_subdivide{subdivide_num}.npy" + os.makedirs(os.path.dirname(save_file), exist_ok=True) + + smplx_data = {} + smplx_data["betas"] = torch.zeros((1, 10)).to(device="cuda") + mesh_neutral_pose, mesh_neutral_pose_wo_upsample, transform_mat_neutral_pose = ( + smplx_model.get_query_points(smplx_data=smplx_data, device="cuda") + ) + + pdb.set_trace() + + smplx_face = smplx_model.smpl_x.face_upsampled + + # trimesh.Trimesh( + # vertices=mesh_neutral_pose[0].detach().cpu().numpy(), faces=smplx_face + # ).export( + # f"pretrained_models/human_model_files/smplx_points/smplx_subdivide{subdivide_num}.obj" + # ) + + trimesh.Trimesh( + vertices=mesh_neutral_pose[0].detach().cpu().numpy(), faces=smplx_face + ).export(f"debug/smplx_points/smplx_subdivide{subdivide_num}.obj") + + np.save(save_file, mesh_neutral_pose[0].detach().cpu().numpy()) + + smplx_face = smplx_model.smpl_x.face + # save_file = f"pretrained_models/human_model_files/smplx_points/smplx.npy" + save_file = f"debug/smplx_points/smplx.npy" + + trimesh.Trimesh( + vertices=mesh_neutral_pose_wo_upsample[0].detach().cpu().numpy(), + faces=smplx_face, + process=False, + ).export(f"debug/smplx_points/smplx.obj") + np.save(save_file, mesh_neutral_pose_wo_upsample[0].detach().cpu().numpy()) + + +if __name__ == "__main__": + # test() + # test_humman() + generate_smplx_point() diff --git a/LHM/models/rendering/smpl_x_voxel_dense_sampling.py b/LHM/models/rendering/smpl_x_voxel_dense_sampling.py new file mode 100644 index 0000000000000000000000000000000000000000..9bb715793a7d77815c01b5da43a082970f877d03 --- /dev/null +++ b/LHM/models/rendering/smpl_x_voxel_dense_sampling.py @@ -0,0 +1,1738 @@ +# -*- coding: utf-8 -*-# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-01-08 21:42:24, Version 0.0, SMPLX + FLAME2019 + Voxel-Based Queries. +# @Function : SMPLX-related functions +# @Description : 1.canonical query, 2.offset, 3.blendshape -> 4.posed-view + +import copy +import math +import os +import os.path as osp +import pdb +import pickle +import sys + +sys.path.append("./") +from collections import defaultdict + +import numpy as np +import torch +import torch.nn as nn +import trimesh +from pytorch3d.io import load_ply, save_ply +from pytorch3d.ops import SubdivideMeshes, knn_points +from pytorch3d.structures import Meshes +from pytorch3d.transforms import axis_angle_to_matrix, matrix_to_axis_angle +from smplx.lbs import batch_rigid_transform +from torch.nn import functional as F + +from LHM.models.rendering.mesh_utils import Mesh +from LHM.models.rendering.smplx import smplx +from LHM.models.rendering.smplx.smplx.lbs import blend_shapes +from LHM.models.rendering.smplx.vis_utils import render_mesh + +""" +Subdivide a triangle mesh by adding a new vertex at the center of each edge and dividing each face into four new faces. +Vectors of vertex attributes can also be subdivided by averaging the values of the attributes at the two vertices which form each edge. +This implementation preserves face orientation - if the vertices of a face are all ordered counter-clockwise, +then the faces in the subdivided meshes will also have their vertices ordered counter-clockwise. +If meshes is provided as an input, the initializer performs the relatively expensive computation of determining the new face indices. +This one-time computation can be reused for all meshes with the same face topology but different vertex positions. +""" + + +def avaliable_device(): + + import torch + + if torch.cuda.is_available(): + current_device_id = torch.cuda.current_device() + device = f"cuda:{current_device_id}" + else: + device = "cpu" + + return device + + +class SMPLX_Mesh(object): + def __init__( + self, + human_model_path, + shape_param_dim=100, + expr_param_dim=50, + subdivide_num=2, + cano_pose_type=0, + ): + """SMPLX using dense sampling""" + super().__init__() + self.human_model_path = human_model_path + self.shape_param_dim = shape_param_dim + self.expr_param_dim = expr_param_dim + if shape_param_dim == 10 and expr_param_dim == 10: + self.layer_arg = { + "create_global_orient": False, + "create_body_pose": False, + "create_left_hand_pose": False, + "create_right_hand_pose": False, + "create_jaw_pose": False, + "create_leye_pose": False, + "create_reye_pose": False, + "create_betas": False, + "create_expression": False, + "create_transl": False, + } + self.layer = { + gender: smplx.create( + human_model_path, + "smplx", + gender=gender, + num_betas=self.shape_param_dim, + num_expression_coeffs=self.expr_param_dim, + use_pca=False, + use_face_contour=False, + flat_hand_mean=True, + **self.layer_arg, + ) + for gender in ["neutral", "male", "female"] + } + else: + self.layer_arg = { + "create_global_orient": False, + "create_body_pose": False, + "create_left_hand_pose": False, + "create_right_hand_pose": False, + "create_jaw_pose": False, + "create_leye_pose": False, + "create_reye_pose": False, + "create_betas": False, + "create_expression": False, + "create_transl": False, + } + self.layer = { + gender: smplx.create( + human_model_path, + "smplx", + gender=gender, + num_betas=self.shape_param_dim, + num_expression_coeffs=self.expr_param_dim, + use_pca=False, + use_face_contour=True, + flat_hand_mean=True, + **self.layer_arg, + ) + for gender in ["neutral", "male", "female"] + } + + self.face_vertex_idx = np.load( + osp.join(human_model_path, "smplx", "SMPL-X__FLAME_vertex_ids.npy") + ) + if shape_param_dim == 10 and expr_param_dim == 10: + print("not using flame expr") + else: + self.layer = { + gender: self.get_expr_from_flame(self.layer[gender]) + for gender in ["neutral", "male", "female"] + } + self.vertex_num = 10475 + self.face_orig = self.layer["neutral"].faces.astype(np.int64) + self.is_cavity, self.face = self.add_cavity() + with open( + osp.join(human_model_path, "smplx", "MANO_SMPLX_vertex_ids.pkl"), "rb" + ) as f: + hand_vertex_idx = pickle.load(f, encoding="latin1") + self.rhand_vertex_idx = hand_vertex_idx["right_hand"] + self.lhand_vertex_idx = hand_vertex_idx["left_hand"] + self.expr_vertex_idx = self.get_expr_vertex_idx() + + # SMPLX joint set + self.joint_num = ( + 55 # 22 (body joints: 21 + 1) + 3 (face joints) + 30 (hand joints) + ) + self.joints_name = ( + "Pelvis", + "L_Hip", + "R_Hip", + "Spine_1", + "L_Knee", + "R_Knee", + "Spine_2", + "L_Ankle", + "R_Ankle", + "Spine_3", + "L_Foot", + "R_Foot", + "Neck", + "L_Collar", + "R_Collar", + "Head", + "L_Shoulder", # 16 + "R_Shoulder", # 17 + "L_Elbow", + "R_Elbow", + "L_Wrist", + "R_Wrist", # body joints + "Jaw", + "L_Eye", + "R_Eye", # face joints + "L_Index_1", + "L_Index_2", + "L_Index_3", + "L_Middle_1", + "L_Middle_2", + "L_Middle_3", + "L_Pinky_1", + "L_Pinky_2", + "L_Pinky_3", + "L_Ring_1", + "L_Ring_2", + "L_Ring_3", + "L_Thumb_1", + "L_Thumb_2", + "L_Thumb_3", # left hand joints + "R_Index_1", + "R_Index_2", + "R_Index_3", + "R_Middle_1", + "R_Middle_2", + "R_Middle_3", + "R_Pinky_1", + "R_Pinky_2", + "R_Pinky_3", + "R_Ring_1", + "R_Ring_2", + "R_Ring_3", + "R_Thumb_1", + "R_Thumb_2", + "R_Thumb_3", # right hand joints + ) + self.root_joint_idx = self.joints_name.index("Pelvis") + self.joint_part = { + "body": range( + self.joints_name.index("Pelvis"), self.joints_name.index("R_Wrist") + 1 + ), + "face": range( + self.joints_name.index("Jaw"), self.joints_name.index("R_Eye") + 1 + ), + "lhand": range( + self.joints_name.index("L_Index_1"), + self.joints_name.index("L_Thumb_3") + 1, + ), + "rhand": range( + self.joints_name.index("R_Index_1"), + self.joints_name.index("R_Thumb_3") + 1, + ), + "lower_body": [ + self.joints_name.index("Pelvis"), + self.joints_name.index("R_Hip"), + self.joints_name.index("L_Hip"), + self.joints_name.index("R_Knee"), + self.joints_name.index("L_Knee"), + self.joints_name.index("R_Ankle"), + self.joints_name.index("L_Ankle"), + self.joints_name.index("R_Foot"), + self.joints_name.index("L_Foot"), + ], + "upper_body": [ + self.joints_name.index("Spine_1"), + self.joints_name.index("Spine_2"), + self.joints_name.index("Spine_3"), + self.joints_name.index("L_Collar"), + self.joints_name.index("R_Collar"), + self.joints_name.index("L_Shoulder"), + self.joints_name.index("R_Shoulder"), + self.joints_name.index("L_Elbow"), + self.joints_name.index("R_Elbow"), + self.joints_name.index("L_Wrist"), + self.joints_name.index("R_Wrist"), + ], + } + + self.lower_body_vertex_idx = self.get_body("lower_body") + self.upper_body_vertex_idx = self.get_body("upper_body") + + self.neutral_body_pose = torch.zeros( + (len(self.joint_part["body"]) - 1, 3) + ) # 大 pose in axis-angle representation (body pose without root joint) + if cano_pose_type == 0: # exavatar-cano-pose + self.neutral_body_pose[0] = torch.FloatTensor([0, 0, 1]) + self.neutral_body_pose[1] = torch.FloatTensor([0, 0, -1]) + else: # + self.neutral_body_pose[0] = torch.FloatTensor([0, 0, math.pi / 9]) + self.neutral_body_pose[1] = torch.FloatTensor([0, 0, -math.pi / 9]) + + self.neutral_jaw_pose = torch.FloatTensor([1 / 3, 0, 0]) + + # subdivider + + self.body_head_mapping = self.get_body_face_mapping() + + self.register_constrain_prior() + + def register_constrain_prior(self): + """As video cannot provide insufficient supervision for the canonical space, we add some human prior to constrain the rotation. Although it is a trick, it is very effective.""" + constrain_body = np.load( + "./pretrained_models/voxel_grid/human_prior_constrain.npz" + )["masks"] + + self.constrain_body_vertex_idx = np.where(constrain_body > 0)[0] + + def get_body(self, name): + """using skinning to find lower body vertices.""" + lower_body_skinning_index = set(self.joint_part[name]) + skinning_weight = self.layer["neutral"].lbs_weights.float() + skinning_part = skinning_weight.argmax(1) + skinning_part = skinning_part.cpu().numpy() + lower_body_vertice_idx = [] + for v_id, v_s in enumerate(skinning_part): + if v_s in lower_body_skinning_index: + lower_body_vertice_idx.append(v_id) + + lower_body_vertice_idx = np.asarray(lower_body_vertice_idx) + + return lower_body_vertice_idx + + def get_expr_from_flame(self, smplx_layer): + flame_layer = smplx.create( + self.human_model_path, + "flame", + gender="neutral", + num_betas=self.shape_param_dim, + num_expression_coeffs=self.expr_param_dim, + ) + smplx_layer.expr_dirs[self.face_vertex_idx, :, :] = flame_layer.expr_dirs + return smplx_layer + + def set_id_info(self, shape_param, face_offset, joint_offset, locator_offset): + self.shape_param = shape_param + self.face_offset = face_offset + self.joint_offset = joint_offset + self.locator_offset = locator_offset + + def get_joint_offset(self, joint_offset): + device = joint_offset.device + batch_size = joint_offset.shape[0] + weight = torch.ones((batch_size, self.joint_num, 1)).float().to(device) + weight[:, self.root_joint_idx, :] = 0 + joint_offset = joint_offset * weight + return joint_offset + + def get_subdivider(self, subdivide_num): + vert = self.layer["neutral"].v_template.float().cuda() + face = torch.LongTensor(self.face).cuda() + mesh = Meshes(vert[None, :, :], face[None, :, :]) + + if subdivide_num > 0: + subdivider_list = [SubdivideMeshes(mesh)] + for i in range(subdivide_num - 1): + mesh = subdivider_list[-1](mesh) + subdivider_list.append(SubdivideMeshes(mesh)) + else: + subdivider_list = [mesh] + return subdivider_list + + def get_body_face_mapping(self): + face_vertex_idx = self.face_vertex_idx + face_vertex_set = set(face_vertex_idx) + face = self.face.reshape(-1).tolist() + face_label = [f in face_vertex_set for f in face] + face_label = np.asarray(face_label).reshape(-1, 3) + face_label = face_label.sum(-1) + face_id = np.where(face_label == 3)[0] + + head_face = self.face[face_id] + + body_set = set(np.arange(self.vertex_num)) + body_v_id = body_set - face_vertex_set + body_v_id = np.array(list(body_v_id)) + + body_face_id = np.where(face_label == 0)[0] + body_face = self.face[body_face_id] + + ret_dict = dict( + head=dict(face=head_face, vert=face_vertex_idx), + body=dict(face=body_face, vert=body_v_id), + ) + + return ret_dict + + def add_cavity(self): + lip_vertex_idx = [2844, 2855, 8977, 1740, 1730, 1789, 8953, 2892] + is_cavity = np.zeros((self.vertex_num), dtype=np.float32) + is_cavity[lip_vertex_idx] = 1.0 + + cavity_face = [[0, 1, 7], [1, 2, 7], [2, 3, 5], [3, 4, 5], [2, 5, 6], [2, 6, 7]] + face_new = list(self.face_orig) + for face in cavity_face: + v1, v2, v3 = face + face_new.append( + [lip_vertex_idx[v1], lip_vertex_idx[v2], lip_vertex_idx[v3]] + ) + face_new = np.array(face_new, dtype=np.int64) + return is_cavity, face_new + + def get_expr_vertex_idx(self): + # FLAME 2020 has all vertices of expr_vertex_idx. use FLAME 2019 + """ + SMPLX + FLAME2019 Version + according to LBS weights to search related vertices ID + """ + + with open( + osp.join(self.human_model_path, "flame", "2019", "generic_model.pkl"), "rb" + ) as f: + flame_2019 = pickle.load(f, encoding="latin1") + vertex_idxs = np.where( + (flame_2019["shapedirs"][:, :, 300 : 300 + self.expr_param_dim] != 0).sum( + (1, 2) + ) + > 0 + )[ + 0 + ] # FLAME.SHAPE_SPACE_DIM == 300 + + # exclude neck and eyeball regions + flame_joints_name = ("Neck", "Head", "Jaw", "L_Eye", "R_Eye") + expr_vertex_idx = [] + flame_vertex_num = flame_2019["v_template"].shape[0] + is_neck_eye = torch.zeros((flame_vertex_num)).float() + is_neck_eye[ + flame_2019["weights"].argmax(1) == flame_joints_name.index("Neck") + ] = 1 + is_neck_eye[ + flame_2019["weights"].argmax(1) == flame_joints_name.index("L_Eye") + ] = 1 + is_neck_eye[ + flame_2019["weights"].argmax(1) == flame_joints_name.index("R_Eye") + ] = 1 + for idx in vertex_idxs: + if is_neck_eye[idx]: + continue + expr_vertex_idx.append(idx) + + expr_vertex_idx = np.array(expr_vertex_idx) + expr_vertex_idx = self.face_vertex_idx[expr_vertex_idx] + + return expr_vertex_idx + + def get_arm(self, mesh_neutral_pose, skinning_weight): + normal = ( + Meshes( + verts=mesh_neutral_pose[None, :, :], + faces=torch.LongTensor(self.face_upsampled).cuda()[None, :, :], + ) + .verts_normals_packed() + .reshape(self.vertex_num_upsampled, 3) + .detach() + ) + part_label = skinning_weight.argmax(1) + is_arm = 0 + for name in ("R_Shoulder", "R_Elbow", "L_Shoulder", "L_Elbow"): + is_arm = is_arm + (part_label == self.joints_name.index(name)) + is_arm = is_arm > 0 + is_upper_arm = is_arm * (normal[:, 1] > math.cos(math.pi / 3)) + is_lower_arm = is_arm * (normal[:, 1] <= math.cos(math.pi / 3)) + return is_upper_arm, is_lower_arm + + +class SMPLXVoxelMeshModel(nn.Module): + def __init__( + self, + human_model_path, + gender, + subdivide_num, + expr_param_dim=50, + shape_param_dim=100, + cano_pose_type=0, + body_face_ratio=3, + dense_sample_points=40000, + apply_pose_blendshape=False, + ) -> None: + super().__init__() + + self.smpl_x = SMPLX_Mesh( + human_model_path=human_model_path, + shape_param_dim=shape_param_dim, + expr_param_dim=expr_param_dim, + subdivide_num=subdivide_num, + cano_pose_type=cano_pose_type, + ) + self.smplx_layer = copy.deepcopy(self.smpl_x.layer[gender]) + + # register + self.apply_pose_blendshape = apply_pose_blendshape + self.cano_pose_type = cano_pose_type + self.dense_sample(body_face_ratio, dense_sample_points) + self.smplx_init() + + def rebuild_mesh(self, v, vertices_id, faces_id, num_dense_samples): + choice_vertices = v[vertices_id] + + new_mapping = dict() + + for new_id, vertice_id in enumerate(vertices_id): + new_mapping[vertice_id] = new_id + + faces_id_list = faces_id.reshape(-1).tolist() + + new_faces_id = [] + for face_id in faces_id_list: + new_faces_id.append(new_mapping[face_id]) + new_faces_id = torch.from_numpy(np.array(new_faces_id).reshape(faces_id.shape)) + + mymesh = Mesh(v=choice_vertices, f=new_faces_id) + + dense_sample_pts = mymesh.sample_surface(num_dense_samples).detach().cpu() + + return dense_sample_pts + + def dense_sample(self, body_face_ratio, dense_sample_points): + + buff_path = f"./pretrained_models/dense_sample_points/{self.cano_pose_type}_{dense_sample_points}.ply" + + if os.path.exists(buff_path): + dense_sample_pts, _ = load_ply(buff_path) + + _bin = dense_sample_points // (body_face_ratio + 1) + body_pts = int(_bin * body_face_ratio) + self.is_body = torch.arange(dense_sample_pts.shape[0]) + self.is_body[:body_pts] = 1 + self.is_body[body_pts:] = 0 + self.dense_pts = dense_sample_pts + else: + smpl_x = self.smpl_x + body_face_mapping = smpl_x.get_body_face_mapping() + face = smpl_x.face + template_verts = self.smplx_layer.v_template + + _bin = dense_sample_points // (body_face_ratio + 1) + + # build body mesh + body_pts = int(_bin * body_face_ratio) + body_dict = body_face_mapping["body"] + face = body_dict["face"] + verts = body_dict["vert"] + + dense_body_pts = self.rebuild_mesh(template_verts, verts, face, body_pts) + + # build face mesh + head_pts = int(_bin) + head_dict = body_face_mapping["head"] + head_face = head_dict["face"] + head_verts = head_dict["vert"] + dense_head_pts = self.rebuild_mesh( + template_verts, head_verts, head_face, head_pts + ) + + self.dense_pts = torch.cat([dense_body_pts, dense_head_pts], dim=0) + self.is_body = torch.arange(self.dense_pts.shape[0]) + self.is_body[:body_pts] = 1 + self.is_body[body_pts:] = 0 + + save_ply(buff_path, self.dense_pts) + + @torch.no_grad() + def voxel_smooth_register( + self, voxel_v, template_v, lbs_weights, k=3, smooth_k=30, smooth_n=3000 + ): + """Smooth KNN to handle skirt deformation.""" + + lbs_weights = lbs_weights.cuda() + + dist = knn_points( + voxel_v.unsqueeze(0).cuda(), + template_v.unsqueeze(0).cuda(), + K=1, + return_nn=True, + ) + mesh_dis = torch.sqrt(dist.dists) + mesh_indices = dist.idx.squeeze(0, -1) + knn_lbs_weights = lbs_weights[mesh_indices] + + mesh_dis = mesh_dis.squeeze() + + print(f"Using k = {smooth_k}, N={smooth_n} for LBS smoothing") + # Smooth Skinning + + knn_dis = knn_points( + voxel_v.unsqueeze(0).cuda(), + voxel_v.unsqueeze(0).cuda(), + K=smooth_k + 1, + return_nn=True, + ) + voxel_dis = torch.sqrt(knn_dis.dists) + voxel_indices = knn_dis.idx + voxel_indices = voxel_indices.squeeze()[:, 1:] + voxel_dis = voxel_dis.squeeze()[:, 1:] + + knn_weights = 1.0 / (mesh_dis[voxel_indices] * voxel_dis) + knn_weights = knn_weights / knn_weights.sum(-1, keepdim=True) # [N, K] + + def dists_to_weights( + dists: torch.Tensor, low: float = None, high: float = None + ): + if low is None: + low = high + if high is None: + high = low + assert high >= low + weights = dists.clone() + weights[dists <= low] = 0.0 + weights[dists >= high] = 1.0 + indices = (dists > low) & (dists < high) + weights[indices] = (dists[indices] - low) / (high - low) + return weights + + update_weights = dists_to_weights(mesh_dis, low=0.01).unsqueeze(-1) # [N, 1] + + from tqdm import tqdm + + for _ in tqdm(range(smooth_n)): + N, _ = update_weights.shape + new_lbs_weights_chunk_list = [] + for chunk_i in range(0, N, 1000000): + + knn_weights_chunk = knn_weights[chunk_i : chunk_i + 1000000] + voxel_indices_chunk = voxel_indices[chunk_i : chunk_i + 1000000] + + new_lbs_weights_chunk = torch.einsum( + "nk,nkj->nj", + knn_weights_chunk, + knn_lbs_weights[voxel_indices_chunk], + ) + new_lbs_weights_chunk_list.append(new_lbs_weights_chunk) + new_lbs_weights = torch.cat(new_lbs_weights_chunk_list, dim=0) + if update_weights is None: + knn_lbs_weights = new_lbs_weights + else: + knn_lbs_weights = ( + 1.0 - update_weights + ) * knn_lbs_weights + update_weights * new_lbs_weights + + return knn_lbs_weights + + def voxel_skinning_init(self, scale_ratio=1.05, voxel_size=256): + + skinning_weight = self.smplx_layer.lbs_weights.float() + + smplx_data = {"betas": torch.zeros(1, self.smpl_x.shape_param_dim)} + device = skinning_weight.device + + _, mesh_neutral_pose_wo_upsample, _ = self.get_neutral_pose_human( + jaw_zero_pose=True, + use_id_info=True, + shape_param=smplx_data["betas"], + device=device, + face_offset=smplx_data.get("face_offset", None), + joint_offset=smplx_data.get("joint_offset", None), + ) + + template_verts = mesh_neutral_pose_wo_upsample.squeeze(0) + + def scale_voxel_size(template_verts, scale_ratio=1.0): + min_values, _ = torch.min(template_verts, dim=0) + max_values, _ = torch.max(template_verts, dim=0) + + center = (min_values + max_values) / 2 + size = max_values - min_values + + scale_size = size * scale_ratio + + upper = center + scale_size / 2 + bottom = center - scale_size / 2 + + return torch.cat([bottom[:, None], upper[:, None]], dim=1) + + mini_size_bbox = scale_voxel_size(template_verts, scale_ratio) + z_voxel_size = voxel_size // 2 + + # build coordinate + x_range = np.linspace(0, voxel_size - 1, voxel_size) / ( + voxel_size - 1 + ) # from 0 to 255, + y_range = np.linspace(0, voxel_size - 1, voxel_size) / (voxel_size - 1) + z_range = np.linspace(0, z_voxel_size - 1, z_voxel_size) / (z_voxel_size - 1) + + x, y, z = np.meshgrid(x_range, y_range, z_range, indexing="ij") + coordinates = torch.from_numpy(np.stack([x, y, z], axis=-1)) + + coordinates[..., 0] = mini_size_bbox[0, 0] + coordinates[..., 0] * ( + mini_size_bbox[0, 1] - mini_size_bbox[0, 0] + ) + coordinates[..., 1] = mini_size_bbox[1, 0] + coordinates[..., 1] * ( + mini_size_bbox[1, 1] - mini_size_bbox[1, 0] + ) + coordinates[..., 2] = mini_size_bbox[2, 0] + coordinates[..., 2] * ( + mini_size_bbox[2, 1] - mini_size_bbox[2, 0] + ) + + coordinates = coordinates.view(-1, 3).float() + coordinates = coordinates.cuda() + + if os.path.exists(f"./pretrained_models/voxel_grid/voxel_{voxel_size}.pth"): + print(f"load voxel_grid voxel_{voxel_size}.pth") + voxel_flat = torch.load( + os.path.join(f"pretrained_models/voxel_grid/voxel_{voxel_size}.pth"), + map_location=avaliable_device(), + ) + else: + voxel_flat = self.voxel_smooth_register( + coordinates, template_verts, skinning_weight, k=1, smooth_n=3000 + ) + + torch.save( + voxel_flat, + os.path.join(f"pretrained_models/voxel_grid/voxel_{voxel_size}.pth"), + ) + + N, LBS_F = voxel_flat.shape + + # x, y, z, C + voxel_grid_original = voxel_flat.view( + voxel_size, voxel_size, z_voxel_size, LBS_F + ) + + # [W H D 55]->[55, D, H, W] + voxel_grid = voxel_grid_original.permute(3, 2, 1, 0) + + return voxel_grid, mini_size_bbox + + def smplx_init(self): + """ + Initialize the sub-devided smplx model by registering buffers for various attributes + This method performs the following steps: + 1. Upsamples the mesh and other assets. + 2. Computes skinning weights, pose directions, expression directions, and various flags for different body parts. + 3. Reshapes and permutes the pose and expression directions. + 4. Converts the flags to boolean values. + 5. Registers buffers for the computed attributes. + Args: + self: The object instance. + Returns: + None + """ + + def _query(weights, indx): + + weights = weights.squeeze(0) + assert weights.dim() == 2 + + return weights[indx] + + smpl_x = self.smpl_x + + # using KNN to query subdivided mesh + dense_pts = self.dense_pts.cuda() + template_verts = self.smplx_layer.v_template + + nn_vertex_idxs = knn_points( + dense_pts.unsqueeze(0).cuda(), + template_verts.unsqueeze(0).cuda(), + K=1, + return_nn=True, + ).idx + query_indx = nn_vertex_idxs.squeeze(0, -1).detach().cpu() + + skinning_weight = self.smplx_layer.lbs_weights.float() + + """ PCA regression function w.r.t vertices offset + """ + pose_dirs = self.smplx_layer.posedirs.permute(1, 0).reshape( + smpl_x.vertex_num, 3 * (smpl_x.joint_num - 1) * 9 + ) + expr_dirs = self.smplx_layer.expr_dirs.view( + smpl_x.vertex_num, 3 * smpl_x.expr_param_dim + ) + shape_dirs = self.smplx_layer.shapedirs.view( + smpl_x.vertex_num, 3 * smpl_x.shape_param_dim + ) + + ( + is_rhand, + is_lhand, + is_face, + is_face_expr, + is_lower_body, + is_upper_body, + is_constrain_body, + ) = ( + torch.zeros((smpl_x.vertex_num, 1)).float(), + torch.zeros((smpl_x.vertex_num, 1)).float(), + torch.zeros((smpl_x.vertex_num, 1)).float(), + torch.zeros((smpl_x.vertex_num, 1)).float(), + torch.zeros((smpl_x.vertex_num, 1)).float(), + torch.zeros((smpl_x.vertex_num, 1)).float(), + torch.zeros((smpl_x.vertex_num, 1)).float(), + ) + ( + is_rhand[smpl_x.rhand_vertex_idx], + is_lhand[smpl_x.lhand_vertex_idx], + is_face[smpl_x.face_vertex_idx], + is_face_expr[smpl_x.expr_vertex_idx], + is_lower_body[smpl_x.lower_body_vertex_idx], + is_upper_body[smpl_x.upper_body_vertex_idx], + is_constrain_body[smpl_x.constrain_body_vertex_idx], + ) = (1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) + + is_cavity = torch.FloatTensor(smpl_x.is_cavity)[:, None] + + skinning_weight = _query(skinning_weight, query_indx) + pose_dirs = _query(pose_dirs, query_indx) + shape_dirs = _query(shape_dirs, query_indx) + expr_dirs = _query(expr_dirs, query_indx) + is_rhand = _query(is_rhand, query_indx) + is_lhand = _query(is_lhand, query_indx) + is_face = _query(is_face, query_indx) + is_face_expr = _query(is_face_expr, query_indx) + is_lower_body = _query(is_lower_body, query_indx) + is_upper_body = _query(is_upper_body, query_indx) + is_constrain_body = _query(is_constrain_body, query_indx) + is_cavity = _query(is_cavity, query_indx) + + vertex_num_upsampled = self.dense_pts.shape[0] + + pose_dirs = pose_dirs.reshape( + vertex_num_upsampled * 3, (smpl_x.joint_num - 1) * 9 + ).permute(1, 0) + expr_dirs = expr_dirs.view(vertex_num_upsampled, 3, smpl_x.expr_param_dim) + shape_dirs = shape_dirs.view(vertex_num_upsampled, 3, smpl_x.shape_param_dim) + + ( + is_rhand, + is_lhand, + is_face, + is_face_expr, + is_lower_body, + is_upper_body, + is_constrain_body, + ) = ( + is_rhand[:, 0] > 0, + is_lhand[:, 0] > 0, + is_face[:, 0] > 0, + is_face_expr[:, 0] > 0, + is_lower_body[:, 0] > 0, + is_upper_body[:, 0] > 0, + is_constrain_body[:, 0] > 0, + ) + is_cavity = is_cavity[:, 0] > 0 + + # self.register_buffer('pos_enc_mesh', xyz) + self.register_buffer("skinning_weight", skinning_weight.contiguous()) + self.register_buffer("pose_dirs", pose_dirs.contiguous()) + self.register_buffer("expr_dirs", expr_dirs.contiguous()) + self.register_buffer("shape_dirs", shape_dirs.contiguous()) + self.register_buffer("is_rhand", is_rhand.contiguous()) + self.register_buffer("is_lhand", is_lhand.contiguous()) + self.register_buffer("is_face", is_face.contiguous()) + self.register_buffer("is_face_expr", is_face_expr.contiguous()) + self.register_buffer("is_lower_body", is_lower_body.contiguous()) + self.register_buffer("is_upper_body", is_upper_body.contiguous()) + self.register_buffer("is_constrain_body", is_constrain_body.contiguous()) + self.register_buffer("is_cavity", is_cavity.contiguous()) + + self.vertex_num_upsampled = vertex_num_upsampled + self.smpl_x.vertex_num_upsampled = vertex_num_upsampled # compatible with SMPLX + + voxel_skinning_weight, voxel_bbox = self.voxel_skinning_init(voxel_size=192) + self.register_buffer("voxel_ws", voxel_skinning_weight) + self.register_buffer("voxel_bbox", voxel_bbox) + + # self.query_voxel_debug() + + def get_body_infos(self): + + head_id = torch.where(self.is_face == True)[0] + body_id = torch.where(self.is_face == False)[0] + + is_lower_body = torch.where(self.is_lower_body == True)[0] + is_upper_body = torch.where(self.is_upper_body == True)[0] + is_rhand = torch.where(self.is_rhand == True)[0] + is_lhand = torch.where(self.is_lhand == True)[0] + + is_hand = torch.cat([is_rhand, is_lhand]) + + return dict( + head=head_id, + body=body_id, + lower_body=is_lower_body, + upper_body=is_upper_body, + hands=is_hand, + ) + + def query_voxel_debug(self): + + skinning_weight = self.smplx_layer.lbs_weights.float() + smplx_data = {"betas": torch.zeros(1, self.smpl_x.shape_param_dim)} + device = skinning_weight.device + + _, mesh_neutral_pose_wo_upsample, _ = self.get_neutral_pose_human( + jaw_zero_pose=True, + use_id_info=True, + shape_param=smplx_data["betas"], + device=device, + face_offset=smplx_data.get("face_offset", None), + joint_offset=smplx_data.get("joint_offset", None), + ) + + template_verts = mesh_neutral_pose_wo_upsample + + query_skinning = ( + self.query_voxel_skinning_weights(template_verts).squeeze(0).detach().cpu() + ) + skinning_weight = self.smplx_layer.lbs_weights.float() + + diff = torch.abs(query_skinning - skinning_weight) + + print(diff.sum()) + + def query_voxel_skinning_weights(self, vs): + """using voxel-based skinning method + vs: [B n c] + """ + voxel_bbox = self.voxel_bbox + + scale = voxel_bbox[..., 1] - voxel_bbox[..., 0] + center = voxel_bbox.mean(dim=1) + normalized_vs = (vs - center[None, None, :]) / scale[None, None] + # mapping to [-1, 1] **3 + normalized_vs = normalized_vs * 2 + normalized_vs.to(self.voxel_ws) + + B, N, _ = normalized_vs.shape + + query_ws = F.grid_sample( + self.voxel_ws.unsqueeze(0), # 1 C D H W + normalized_vs.reshape(1, 1, 1, -1, 3).to(self.voxel_ws), + align_corners=True, + padding_mode="border", + ) + query_ws = query_ws.view(B, -1, N) + query_ws = query_ws.permute(0, 2, 1) + + return query_ws # [B N C] + + def get_zero_pose_human( + self, shape_param, device, face_offset, joint_offset, return_mesh=False + ): + smpl_x = self.smpl_x + batch_size = shape_param.shape[0] + + zero_pose = torch.zeros((batch_size, 3)).float().to(device) + zero_body_pose = ( + torch.zeros((batch_size, (len(smpl_x.joint_part["body"]) - 1) * 3)) + .float() + .to(device) + ) + zero_hand_pose = ( + torch.zeros((batch_size, len(smpl_x.joint_part["lhand"]) * 3)) + .float() + .to(device) + ) + zero_expr = torch.zeros((batch_size, smpl_x.expr_param_dim)).float().to(device) + + face_offset = face_offset + joint_offset = ( + smpl_x.get_joint_offset(joint_offset) if joint_offset is not None else None + ) + output = self.smplx_layer( + global_orient=zero_pose, + body_pose=zero_body_pose, + left_hand_pose=zero_hand_pose, + right_hand_pose=zero_hand_pose, + jaw_pose=zero_pose, + leye_pose=zero_pose, + reye_pose=zero_pose, + expression=zero_expr, + betas=shape_param, + face_offset=face_offset, + joint_offset=joint_offset, + ) + joint_zero_pose = output.joints[:, : smpl_x.joint_num, :] # zero pose human + + if not return_mesh: + return joint_zero_pose + else: + raise NotImplementedError + + def get_transform_mat_joint( + self, transform_mat_neutral_pose, joint_zero_pose, smplx_param + ): + """_summary_ + Args: + transform_mat_neutral_pose (_type_): [B, 55, 4, 4] + joint_zero_pose (_type_): [B, 55, 3] + smplx_param (_type_): dict + Returns: + _type_: _description_ + """ + + # 1. 大 pose -> zero pose + transform_mat_joint_1 = transform_mat_neutral_pose + + # 2. zero pose -> image pose + root_pose = smplx_param["root_pose"] + body_pose = smplx_param["body_pose"] + jaw_pose = smplx_param["jaw_pose"] + leye_pose = smplx_param["leye_pose"] + reye_pose = smplx_param["reye_pose"] + lhand_pose = smplx_param["lhand_pose"] + rhand_pose = smplx_param["rhand_pose"] + # trans = smplx_param['trans'] + + # forward kinematics + + pose = torch.cat( + ( + root_pose.unsqueeze(1), + body_pose, + jaw_pose.unsqueeze(1), + leye_pose.unsqueeze(1), + reye_pose.unsqueeze(1), + lhand_pose, + rhand_pose, + ), + dim=1, + ) # [B, 55, 3] + pose = axis_angle_to_matrix(pose) # [B, 55, 3, 3] + posed_joints, transform_mat_joint_2 = batch_rigid_transform( + pose[:, :, :, :], joint_zero_pose[:, :, :], self.smplx_layer.parents + ) + transform_mat_joint_2 = transform_mat_joint_2 # [B, 55, 4, 4] + + # 3. combine 1. 大 pose -> zero pose and 2. zero pose -> image pose + if transform_mat_joint_1 is not None: + transform_mat_joint = torch.matmul( + transform_mat_joint_2, transform_mat_joint_1 + ) # [B, 55, 4, 4] + else: + transform_mat_joint = transform_mat_joint_2 + + return transform_mat_joint, posed_joints + + def get_transform_mat_vertex(self, transform_mat_joint, query_points, fix_mask): + batch_size = transform_mat_joint.shape[0] + + query_skinning = self.query_voxel_skinning_weights(query_points) + skinning_weight = self.skinning_weight.unsqueeze(0).repeat(batch_size, 1, 1) + query_skinning[fix_mask] = skinning_weight[fix_mask] + + transform_mat_vertex = torch.matmul( + skinning_weight, + transform_mat_joint.view(batch_size, self.smpl_x.joint_num, 16), + ).view(batch_size, self.smpl_x.vertex_num_upsampled, 4, 4) + return transform_mat_vertex + + def get_posed_blendshape(self, smplx_param): + # posed_blendshape is only applied on hand and face, which parts are closed to smplx model + root_pose = smplx_param["root_pose"] + body_pose = smplx_param["body_pose"] + jaw_pose = smplx_param["jaw_pose"] + leye_pose = smplx_param["leye_pose"] + reye_pose = smplx_param["reye_pose"] + lhand_pose = smplx_param["lhand_pose"] + rhand_pose = smplx_param["rhand_pose"] + batch_size = root_pose.shape[0] + + pose = torch.cat( + ( + body_pose, + jaw_pose.unsqueeze(1), + leye_pose.unsqueeze(1), + reye_pose.unsqueeze(1), + lhand_pose, + rhand_pose, + ), + dim=1, + ) # [B, 54, 3] + # smplx pose-dependent vertex offset + pose = ( + axis_angle_to_matrix(pose) - torch.eye(3)[None, None, :, :].float().cuda() + ).view(batch_size, (self.smpl_x.joint_num - 1) * 9) + # (B, 54 * 9) x (54*9, V) + + smplx_pose_offset = torch.matmul(pose.detach(), self.pose_dirs).view( + batch_size, self.smpl_x.vertex_num_upsampled, 3 + ) + return smplx_pose_offset + + def lbs(self, xyz, transform_mat_vertex, trans): + batch_size = xyz.shape[0] + xyz = torch.cat( + (xyz, torch.ones_like(xyz[:, :, :1])), dim=-1 + ) # 大 pose. xyz1 [B, N, 4] + xyz = torch.matmul(transform_mat_vertex, xyz[:, :, :, None]).view( + batch_size, self.vertex_num_upsampled, 4 + )[ + :, :, :3 + ] # [B, N, 3] + if trans is not None: + xyz = xyz + trans.unsqueeze(1) + return xyz + + def lr_idx_to_hr_idx(self, idx): + # follow 'subdivide_homogeneous' function of https://pytorch3d.readthedocs.io/en/latest/_modules/pytorch3d/ops/subdivide_meshes.html#SubdivideMeshes + # the low-res part takes first N_lr vertices out of N_hr vertices + return idx + + def transform_to_posed_verts_from_neutral_pose( + self, mean_3d, smplx_data, mesh_neutral_pose, transform_mat_neutral_pose, device + ): + """ + Transform the mean 3D vertices to posed vertices from the neutral pose. + + mean_3d (torch.Tensor): Mean 3D vertices with shape [B*Nv, N, 3] + offset. + smplx_data (dict): SMPL-X data containing body_pose with shape [B*Nv, 21, 3] and betas with shape [B, 100]. + mesh_neutral_pose (torch.Tensor): Mesh vertices in the neutral pose with shape [B*Nv, N, 3]. + transform_mat_neutral_pose (torch.Tensor): Transformation matrix of the neutral pose with shape [B*Nv, 4, 4]. + device (torch.device): Device to perform the computation. + + Returns: + torch.Tensor: Posed vertices with shape [B*Nv, N, 3] + offset. + """ + + batch_size = mean_3d.shape[0] + shape_param = smplx_data["betas"] + face_offset = smplx_data.get("face_offset", None) + joint_offset = smplx_data.get("joint_offset", None) + + if shape_param.shape[0] != batch_size: + num_views = batch_size // shape_param.shape[0] + # print(shape_param.shape, batch_size) + shape_param = ( + shape_param.unsqueeze(1) + .repeat(1, num_views, 1) + .view(-1, shape_param.shape[1]) + ) + if face_offset is not None: + face_offset = ( + face_offset.unsqueeze(1) + .repeat(1, num_views, 1, 1) + .view(-1, *face_offset.shape[1:]) + ) + if joint_offset is not None: + joint_offset = ( + joint_offset.unsqueeze(1) + .repeat(1, num_views, 1, 1) + .view(-1, *joint_offset.shape[1:]) + ) + + # smplx facial expression offset + + try: + smplx_expr_offset = ( + smplx_data["expr"].unsqueeze(1).unsqueeze(1) * self.expr_dirs + ).sum( + -1 + ) # [B, 1, 1, 50] x [N_V, 3, 50] -> [B, N_v, 3] + except: + print("no use flame params") + smplx_expr_offset = 0.0 + + mean_3d = mean_3d + smplx_expr_offset # 大 pose + + # get nearest vertex + + # for hands and face, assign original vertex index to use sknning weight of the original vertex + mask = ( + ((self.is_rhand + self.is_lhand + self.is_face) > 0) + .unsqueeze(0) + .repeat(batch_size, 1) + ) + + # compute vertices-LBS function + transform_mat_null_vertex = self.get_transform_mat_vertex( + transform_mat_neutral_pose, mean_3d, mask + ) + + null_mean_3d = self.lbs( + mean_3d, transform_mat_null_vertex, torch.zeros_like(smplx_data["trans"]) + ) # posed with smplx_param + + # blend_shape offset + blend_shape_offset = blend_shapes(shape_param, self.shape_dirs) + null_mean3d_blendshape = null_mean_3d + blend_shape_offset + + # get transformation matrix of the nearest vertex and perform lbs + joint_null_pose = self.get_zero_pose_human( + shape_param=shape_param, # target shape + device=device, + face_offset=face_offset, + joint_offset=joint_offset, + ) + + # NOTE that the question "joint_zero_pose" is different with (transform_mat_neutral_pose)'s joints. + transform_mat_joint, j3d = self.get_transform_mat_joint( + None, joint_null_pose, smplx_data + ) + + # compute vertices-LBS function + transform_mat_vertex = self.get_transform_mat_vertex( + transform_mat_joint, mean_3d, mask + ) + + posed_mean_3d = self.lbs( + null_mean3d_blendshape, transform_mat_vertex, smplx_data["trans"] + ) # posed with smplx_param + + # as we do not use transform port [...,:,3],so we simply compute chain matrix + neutral_to_posed_vertex = torch.matmul( + transform_mat_vertex, transform_mat_null_vertex + ) # [B, N, 4, 4] + + return posed_mean_3d, neutral_to_posed_vertex + + def get_query_points(self, smplx_data, device): + """transform_mat_neutral_pose is function to warp pre-defined posed to zero-pose""" + + mesh_neutral_pose, mesh_neutral_pose_wo_upsample, transform_mat_neutral_pose = ( + self.get_neutral_pose_human( + jaw_zero_pose=True, + use_id_info=False, # we blendshape at zero-pose + shape_param=smplx_data["betas"], + device=device, + face_offset=smplx_data.get("face_offset", None), + joint_offset=smplx_data.get("joint_offset", None), + ) + ) + + return ( + mesh_neutral_pose, + mesh_neutral_pose_wo_upsample, + transform_mat_neutral_pose, + ) + + def transform_to_posed_verts(self, smplx_data, device): + """_summary_ + Args: + smplx_data (_type_): e.g., body_pose:[B*Nv, 21, 3], betas:[B*Nv, 100] + """ + + # neutral posed verts + mesh_neutral_pose, _, transform_mat_neutral_pose = self.get_query_points( + smplx_data, device + ) + + # print(mesh_neutral_pose.shape, transform_mat_neutral_pose.shape, mesh_neutral_pose.shape, smplx_data["body_pose"].shape) + mean_3d, transform_matrix = self.transform_to_posed_verts_from_neutral_pose( + mesh_neutral_pose, + smplx_data, + mesh_neutral_pose, + transform_mat_neutral_pose, + device, + ) + + return mean_3d, transform_matrix + + def upsample_mesh_batch( + self, + smpl_x, + shape_param, + neutral_body_pose, + jaw_pose, + expression, + betas, + face_offset=None, + joint_offset=None, + device=None, + ): + """using blendshape to offset pts""" + + device = device if device is not None else avaliable_device() + + batch_size = shape_param.shape[0] + zero_pose = torch.zeros((batch_size, 3)).float().to(device) + zero_hand_pose = ( + torch.zeros((batch_size, len(smpl_x.joint_part["lhand"]) * 3)) + .float() + .to(device) + ) + + dense_pts = self.dense_pts.to(device) + dense_pts = dense_pts.unsqueeze(0).repeat(expression.shape[0], 1, 1) + + blend_shape_offset = blend_shapes(betas, self.shape_dirs) + + dense_pts = dense_pts + blend_shape_offset + + joint_zero_pose = self.get_zero_pose_human( + shape_param=shape_param, + device=device, + face_offset=face_offset, + joint_offset=joint_offset, + ) + + neutral_pose = torch.cat( + ( + zero_pose, + neutral_body_pose, + jaw_pose, + zero_pose, + zero_pose, + zero_hand_pose, + zero_hand_pose, + ), + dim=1, + ) # [B, 55, 3] + + neutral_pose = axis_angle_to_matrix( + neutral_pose.view(-1, 55, 3) + ) # [B, 55, 3, 3] + posed_joints, transform_mat_joint = batch_rigid_transform( + neutral_pose[:, :, :, :], joint_zero_pose[:, :, :], self.smplx_layer.parents + ) + + skinning_weight = self.skinning_weight.unsqueeze(0).repeat(batch_size, 1, 1) + + # B 55 4,4, B N 55 -> B N 4 4 + transform_mat_vertex = torch.einsum( + "blij,bnl->bnij", transform_mat_joint, skinning_weight + ) + mesh_neutral_pose_upsampled = self.lbs(dense_pts, transform_mat_vertex, None) + + return mesh_neutral_pose_upsampled + + def transform_to_neutral_pose( + self, mean_3d, smplx_data, mesh_neutral_pose, transform_mat_neutral_pose, device + ): + """ + Transform the mean 3D vertices to posed vertices from the neutral pose. + + mean_3d (torch.Tensor): Mean 3D vertices with shape [B*Nv, N, 3] + offset. + smplx_data (dict): SMPL-X data containing body_pose with shape [B*Nv, 21, 3] and betas with shape [B, 100]. + mesh_neutral_pose (torch.Tensor): Mesh vertices in the neutral pose with shape [B*Nv, N, 3]. + transform_mat_neutral_pose (torch.Tensor): Transformation matrix of the neutral pose with shape [B*Nv, 4, 4]. + device (torch.device): Device to perform the computation. + + Returns: + torch.Tensor: Posed vertices with shape [B*Nv, N, 3] + offset. + """ + + batch_size = mean_3d.shape[0] + shape_param = smplx_data["betas"] + face_offset = smplx_data.get("face_offset", None) + joint_offset = smplx_data.get("joint_offset", None) + if shape_param.shape[0] != batch_size: + num_views = batch_size // shape_param.shape[0] + # print(shape_param.shape, batch_size) + shape_param = ( + shape_param.unsqueeze(1) + .repeat(1, num_views, 1) + .view(-1, shape_param.shape[1]) + ) + if face_offset is not None: + face_offset = ( + face_offset.unsqueeze(1) + .repeat(1, num_views, 1, 1) + .view(-1, *face_offset.shape[1:]) + ) + if joint_offset is not None: + joint_offset = ( + joint_offset.unsqueeze(1) + .repeat(1, num_views, 1, 1) + .view(-1, *joint_offset.shape[1:]) + ) + + # smplx facial expression offset + smplx_expr_offset = ( + smplx_data["expr"].unsqueeze(1).unsqueeze(1) * self.expr_dirs + ).sum( + -1 + ) # [B, 1, 1, 50] x [N_V, 3, 50] -> [B, N_v, 3] + mean_3d = mean_3d + smplx_expr_offset # 大 pose + + def get_neutral_pose_human( + self, jaw_zero_pose, use_id_info, shape_param, device, face_offset, joint_offset + ): + + smpl_x = self.smpl_x + batch_size = shape_param.shape[0] + + zero_pose = torch.zeros((batch_size, 3)).float().to(device) + neutral_body_pose = ( + smpl_x.neutral_body_pose.view(1, -1).repeat(batch_size, 1).to(device) + ) # 大 pose + zero_hand_pose = ( + torch.zeros((batch_size, len(smpl_x.joint_part["lhand"]) * 3)) + .float() + .to(device) + ) + zero_expr = torch.zeros((batch_size, smpl_x.expr_param_dim)).float().to(device) + + if jaw_zero_pose: + jaw_pose = torch.zeros((batch_size, 3)).float().to(device) + else: + jaw_pose = ( + smpl_x.neutral_jaw_pose.view(1, 3).repeat(batch_size, 1).to(device) + ) # open mouth + + if use_id_info: + shape_param = shape_param + # face_offset = smpl_x.face_offset[None,:,:].float().to(device) + # joint_offset = smpl_x.get_joint_offset(self.joint_offset[None,:,:]) + face_offset = face_offset + joint_offset = ( + smpl_x.get_joint_offset(joint_offset) + if joint_offset is not None + else None + ) + + else: + shape_param = ( + torch.zeros((batch_size, smpl_x.shape_param_dim)).float().to(device) + ) + face_offset = None + joint_offset = None + + output = self.smplx_layer( + global_orient=zero_pose, + body_pose=neutral_body_pose, + left_hand_pose=zero_hand_pose, + right_hand_pose=zero_hand_pose, + jaw_pose=jaw_pose, + leye_pose=zero_pose, + reye_pose=zero_pose, + expression=zero_expr, + betas=shape_param, + face_offset=face_offset, + joint_offset=joint_offset, + ) + + # using dense sample strategy, and warp to neutral pose + mesh_neutral_pose_upsampled = self.upsample_mesh_batch( + smpl_x, + shape_param=shape_param, + neutral_body_pose=neutral_body_pose, + jaw_pose=jaw_pose, + expression=zero_expr, + betas=shape_param, + face_offset=face_offset, + joint_offset=joint_offset, + device=device, + ) + + mesh_neutral_pose = output.vertices + joint_neutral_pose = output.joints[ + :, : smpl_x.joint_num, : + ] # 大 pose human [B, 55, 3] + + # compute transformation matrix for making 大 pose to zero pose + neutral_body_pose = neutral_body_pose.view( + batch_size, len(smpl_x.joint_part["body"]) - 1, 3 + ) + zero_hand_pose = zero_hand_pose.view( + batch_size, len(smpl_x.joint_part["lhand"]), 3 + ) + + neutral_body_pose_inv = matrix_to_axis_angle( + torch.inverse(axis_angle_to_matrix(neutral_body_pose)) + ) + jaw_pose_inv = matrix_to_axis_angle( + torch.inverse(axis_angle_to_matrix(jaw_pose)) + ) + + zero_pose = zero_pose.unsqueeze(1) + jaw_pose_inv = jaw_pose_inv.unsqueeze(1) + + pose = torch.cat( + ( + zero_pose, + neutral_body_pose_inv, + jaw_pose_inv, + zero_pose, + zero_pose, + zero_hand_pose, + zero_hand_pose, + ), + dim=1, + ) + + pose = axis_angle_to_matrix(pose) # [B, 55, 3, 3] + + _, transform_mat_neutral_pose = batch_rigid_transform( + pose[:, :, :, :], joint_neutral_pose[:, :, :], self.smplx_layer.parents + ) # [B, 55, 4, 4] + + return ( + mesh_neutral_pose_upsampled, + mesh_neutral_pose, + transform_mat_neutral_pose, + ) + + +def read_smplx_param(smplx_data_root, shape_param_file, batch_size=1, device="cuda"): + import json + from glob import glob + + import cv2 + + data_root_path = osp.dirname(osp.dirname(smplx_data_root)) + + # load smplx parameters + smplx_param_path_list = sorted(glob(osp.join(smplx_data_root, "*.json"))) + print(smplx_param_path_list[:3]) + + smplx_params_all_frames = {} + for smplx_param_path in smplx_param_path_list: + frame_idx = int(smplx_param_path.split("/")[-1][:-5]) + with open(smplx_param_path) as f: + smplx_params_all_frames[frame_idx] = { + k: torch.FloatTensor(v) for k, v in json.load(f).items() + } + + with open(shape_param_file) as f: + shape_param = torch.FloatTensor(json.load(f)) + + smplx_params = {} + smplx_params["betas"] = shape_param.unsqueeze(0).repeat(batch_size, 1) + + select_frame_idx = [200, 400, 600] + smplx_params_tmp = defaultdict(list) + cam_param_list = [] + ori_image_list = [] + for b_idx in range(batch_size): + frame_idx = select_frame_idx[b_idx] + + for k, v in smplx_params_all_frames[frame_idx].items(): + smplx_params_tmp[k].append(v) + + with open( + osp.join(data_root_path, "cam_params", str(frame_idx) + ".json") + ) as f: + cam_param = { + k: torch.FloatTensor(v).cuda() for k, v in json.load(f).items() + } + cam_param_list.append(cam_param) + + img = cv2.imread(osp.join(data_root_path, "frames", str(frame_idx) + ".png")) + ori_image_list.append(img) + + for k, v in smplx_params_tmp.items(): + smplx_params[k] = torch.stack(smplx_params_tmp[k]) + + root_path = osp.dirname(smplx_data_root) + with open(osp.join(root_path, "face_offset.json")) as f: + face_offset = torch.FloatTensor(json.load(f)) + with open(osp.join(root_path, "joint_offset.json")) as f: + joint_offset = torch.FloatTensor(json.load(f)) + with open(osp.join(root_path, "locator_offset.json")) as f: + locator_offset = torch.FloatTensor(json.load(f)) + + smplx_params["locator_offset"] = locator_offset.unsqueeze(0).repeat( + batch_size, 1, 1 + ) + smplx_params["joint_offset"] = joint_offset.unsqueeze(0).repeat(batch_size, 1, 1) + smplx_params["face_offset"] = face_offset.unsqueeze(0).repeat(batch_size, 1, 1) + + for k, v in smplx_params.items(): + print(k, v.shape) + smplx_params[k] = v.to(device) + + return smplx_params, cam_param_list, ori_image_list + + +def test(): + import cv2 + + human_model_path = "./pretrained_models/human_model_files" + gender = "male" + # gender = "neutral" + + smplx_model = SMPLXMesh_Model(human_model_path, gender, subdivide_num=2) + smplx_model.to("cuda") + + smplx_data_root = "/data1/projects/ExAvatar_RELEASE/avatar/data/Custom/data/gyeongsik/smplx_optimized/smplx_params_smoothed" + shape_param_file = "/data1/projects/ExAvatar_RELEASE/avatar/data/Custom/data/gyeongsik/smplx_optimized/shape_param.json" + smplx_data, cam_param_list, ori_image_list = read_smplx_param( + smplx_data_root=smplx_data_root, shape_param_file=shape_param_file, batch_size=2 + ) + posed_verts = smplx_model.transform_to_posed_verts( + smplx_data=smplx_data, device="cuda" + ) + + smplx_face = smplx_model.smpl_x.face_upsampled + trimesh.Trimesh( + vertices=posed_verts[0].detach().cpu().numpy(), faces=smplx_face + ).export("./posed_obj1.obj") + trimesh.Trimesh( + vertices=posed_verts[1].detach().cpu().numpy(), faces=smplx_face + ).export("./posed_obj2.obj") + + neutral_posed_verts, _, _ = smplx_model.get_query_points( + smplx_data=smplx_data, device="cuda" + ) + smplx_face = smplx_model.smpl_x.face + trimesh.Trimesh( + vertices=neutral_posed_verts[0].detach().cpu().numpy(), faces=smplx_face + ).export("./neutral_posed_obj1.obj") + trimesh.Trimesh( + vertices=neutral_posed_verts[1].detach().cpu().numpy(), faces=smplx_face + ).export("./neutral_posed_obj2.obj") + + for idx, (cam_param, img) in enumerate(zip(cam_param_list, ori_image_list)): + render_shape = img.shape[:2] + mesh_render, is_bkg = render_mesh( + posed_verts[idx], + smplx_face, + cam_param, + np.ones((render_shape[0], render_shape[1], 3), dtype=np.float32) * 255, + return_bg_mask=True, + ) + mesh_render = mesh_render.astype(np.uint8) + cv2.imwrite( + f"./debug_render_{idx}.jpg", + np.clip( + (0.9 * mesh_render + 0.1 * img) * (1 - is_bkg) + is_bkg * img, 0, 255 + ).astype(np.uint8), + ) + # cv2.imwrite(f"./debug_render_{idx}_img.jpg", np.clip(img, 0, 255).astype(np.uint8)) + # cv2.imwrite(f"./debug_render_{idx}_mesh.jpg", np.clip(mesh_render, 0, 255).astype(np.uint8)) + + +def read_smplx_param_humman( + imgs_root, smplx_params_root, img_size=896, batch_size=1, device="cuda" +): + import json + import os + from glob import glob + + import cv2 + from PIL import Image, ImageOps + + # Input images + suffixes = (".jpg", ".jpeg", ".png", ".webp") + img_path_list = [ + os.path.join(imgs_root, file) + for file in os.listdir(imgs_root) + if file.endswith(suffixes) and file[0] != "." + ] + + ori_image_list = [] + smplx_params_tmp = defaultdict(list) + + for img_path in img_path_list: + smplx_path = os.path.join( + smplx_params_root, os.path.splitext(os.path.basename(img_path))[0] + ".json" + ) + + # Open and reshape + img_pil = Image.open(img_path).convert("RGB") + img_pil = ImageOps.contain( + img_pil, (img_size, img_size) + ) # keep the same aspect ratio + # ori_w, ori_h = img_pil.size + # img_pil_pad = ImageOps.pad(img_pil, size=(img_size,img_size)) # pad with zero on the smallest side + # offset_w, offset_h = (img_size - ori_w) // 2, (img_size - ori_h) // 2 + + # img = np.array(img_pil_pad)[:, :, (2, 1, 0)] + img = np.array(img_pil)[:, :, (2, 1, 0)] + ori_image_list.append(img) + + with open(smplx_path) as f: + smplx_param = {k: torch.FloatTensor(v) for k, v in json.load(f).items()} + + for k, v in smplx_param.items(): + smplx_params_tmp[k].append(v) + + smplx_params = {} + for k, v in smplx_params_tmp.items(): + smplx_params[k] = torch.stack(smplx_params_tmp[k]) + + for k, v in smplx_params.items(): + print(k, v.shape) + smplx_params[k] = v.to(device) + + cam_param_list = [] + for i in range(smplx_params["focal"].shape[0]): + princpt = smplx_params["princpt"][i] + cam_param = {"focal": smplx_params["focal"][i], "princpt": princpt} + cam_param_list.append(cam_param) + return smplx_params, cam_param_list, ori_image_list + + +def generate_smplx_point(): + + def get_smplx_params(data): + smplx_params = {} + smplx_keys = [ + "root_pose", + "body_pose", + "jaw_pose", + "leye_pose", + "reye_pose", + "lhand_pose", + "rhand_pose", + "expr", + "trans", + "betas", + ] + for k, v in data.items(): + if k in smplx_keys: + # print(k, v.shape) + smplx_params[k] = data[k].unsqueeze(0).cuda() + return smplx_params + + def sample_one(data): + smplx_keys = [ + "root_pose", + "body_pose", + "jaw_pose", + "leye_pose", + "reye_pose", + "lhand_pose", + "rhand_pose", + "trans", + ] + for k, v in data.items(): + if k in smplx_keys: + # print(k, v.shape) + data[k] = data[k][:, 0] + return data + + human_model_path = "./pretrained_models/human_model_files" + gender = "neutral" + subdivide_num = 1 + smplx_model = SMPLXVoxelMeshModel( + human_model_path, + gender, + shape_param_dim=10, + expr_param_dim=100, + subdivide_num=subdivide_num, + dense_sample_points=40000, + cano_pose_type=1, + ) + smplx_model.to("cuda") + + # save_file = f"pretrained_models/human_model_files/smplx_points/smplx_subdivide{subdivide_num}.npy" + save_file = f"debug/smplx_points/smplx_subdivide{subdivide_num}.npy" + os.makedirs(os.path.dirname(save_file), exist_ok=True) + + smplx_data = {} + smplx_data["betas"] = torch.zeros((1, 10)).to(device="cuda") + mesh_neutral_pose, mesh_neutral_pose_wo_upsample, transform_mat_neutral_pose = ( + smplx_model.get_query_points(smplx_data=smplx_data, device="cuda") + ) + + debug_pose = torch.load("./debug/pose_example.pth") + debug_pose["expr"] = torch.FloatTensor([0.0] * 100) + + smplx_data = get_smplx_params(debug_pose) + smplx_data = sample_one(smplx_data) + smplx_data["betas"] = torch.ones_like(smplx_data["betas"]) + + warp_posed, _ = smplx_model.transform_to_posed_verts_from_neutral_pose( + mesh_neutral_pose, + smplx_data, + mesh_neutral_pose, + transform_mat_neutral_pose, + "cuda", + ) + + # save_ply("warp_posed.ply", warp_posed[0].detach().cpu()) + save_ply( + "body_constrain_posed.ply", + warp_posed[0, smplx_model.is_constrain_body].detach().cpu(), + ) + + +if __name__ == "__main__": + generate_smplx_point() diff --git a/LHM/models/rendering/smplx/.gitignore b/LHM/models/rendering/smplx/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..7e842be886e86723693583140a83c25476ecd55d --- /dev/null +++ b/LHM/models/rendering/smplx/.gitignore @@ -0,0 +1,109 @@ +#### joe made this: http://goel.io/joe + +#####=== Python ===##### + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + diff --git a/LHM/models/rendering/smplx/LICENSE b/LHM/models/rendering/smplx/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..920431ec8e76530c66382a6f314e9c94c5d4e69f --- /dev/null +++ b/LHM/models/rendering/smplx/LICENSE @@ -0,0 +1,58 @@ +License + +Software Copyright License for non-commercial scientific research purposes +Please read carefully the following terms and conditions and any accompanying documentation before you download and/or use the SMPL-X/SMPLify-X model, data and software, (the "Model & Software"), including 3D meshes, blend weights, blend shapes, textures, software, scripts, and animations. By downloading and/or using the Model & Software (including downloading, cloning, installing, and any other use of this github repository), you acknowledge that you have read these terms and conditions, understand them, and agree to be bound by them. If you do not agree with these terms and conditions, you must not download and/or use the Model & Software. Any infringement of the terms of this agreement will automatically terminate your rights under this License + +Ownership / Licensees +The Software and the associated materials has been developed at the + +Max Planck Institute for Intelligent Systems (hereinafter "MPI"). + +Any copyright or patent right is owned by and proprietary material of the + +Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (hereinafter “MPG”; MPI and MPG hereinafter collectively “Max-Planck”) + +hereinafter the “Licensor”. + +License Grant +Licensor grants you (Licensee) personally a single-user, non-exclusive, non-transferable, free of charge right: + +To install the Model & Software on computers owned, leased or otherwise controlled by you and/or your organization; +To use the Model & Software for the sole purpose of performing non-commercial scientific research, non-commercial education, or non-commercial artistic projects; +Any other use, in particular any use for commercial, pornographic, military, or surveillance, purposes is prohibited. This includes, without limitation, incorporation in a commercial product, use in a commercial service, or production of other artifacts for commercial purposes. The Data & Software may not be used to create fake, libelous, misleading, or defamatory content of any kind excluding analyses in peer-reviewed scientific research. The Data & Software may not be reproduced, modified and/or made available in any form to any third party without Max-Planck’s prior written permission. + +The Data & Software may not be used for pornographic purposes or to generate pornographic material whether commercial or not. This license also prohibits the use of the Software to train methods/algorithms/neural networks/etc. for commercial, pornographic, military, surveillance, or defamatory use of any kind. By downloading the Data & Software, you agree not to reverse engineer it. + +No Distribution +The Model & Software and the license herein granted shall not be copied, shared, distributed, re-sold, offered for re-sale, transferred or sub-licensed in whole or in part except that you may make one copy for archive purposes only. + +Disclaimer of Representations and Warranties +You expressly acknowledge and agree that the Model & Software results from basic research, is provided “AS IS”, may contain errors, and that any use of the Model & Software is at your sole risk. LICENSOR MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE MODEL & SOFTWARE, NEITHER EXPRESS NOR IMPLIED, AND THE ABSENCE OF ANY LEGAL OR ACTUAL DEFECTS, WHETHER DISCOVERABLE OR NOT. Specifically, and not to limit the foregoing, licensor makes no representations or warranties (i) regarding the merchantability or fitness for a particular purpose of the Model & Software, (ii) that the use of the Model & Software will not infringe any patents, copyrights or other intellectual property rights of a third party, and (iii) that the use of the Model & Software will not cause any damage of any kind to you or a third party. + +Limitation of Liability +Because this Model & Software License Agreement qualifies as a donation, according to Section 521 of the German Civil Code (Bürgerliches Gesetzbuch – BGB) Licensor as a donor is liable for intent and gross negligence only. If the Licensor fraudulently conceals a legal or material defect, they are obliged to compensate the Licensee for the resulting damage. +Licensor shall be liable for loss of data only up to the amount of typical recovery costs which would have arisen had proper and regular data backup measures been taken. For the avoidance of doubt Licensor shall be liable in accordance with the German Product Liability Act in the event of product liability. The foregoing applies also to Licensor’s legal representatives or assistants in performance. Any further liability shall be excluded. +Patent claims generated through the usage of the Model & Software cannot be directed towards the copyright holders. +The Model & Software is provided in the state of development the licensor defines. If modified or extended by Licensee, the Licensor makes no claims about the fitness of the Model & Software and is not responsible for any problems such modifications cause. + +No Maintenance Services +You understand and agree that Licensor is under no obligation to provide either maintenance services, update services, notices of latent defects, or corrections of defects with regard to the Model & Software. Licensor nevertheless reserves the right to update, modify, or discontinue the Model & Software at any time. + +Defects of the Model & Software must be notified in writing to the Licensor with a comprehensible description of the error symptoms. The notification of the defect should enable the reproduction of the error. The Licensee is encouraged to communicate any use, results, modification or publication. + +Publications using the Model & Software +You acknowledge that the Model & Software is a valuable scientific resource and agree to appropriately reference the following paper in any publication making use of the Model & Software. + +Citation: + + +@inproceedings{SMPL-X:2019, + title = {Expressive Body Capture: 3D Hands, Face, and Body from a Single Image}, + author = {Pavlakos, Georgios and Choutas, Vasileios and Ghorbani, Nima and Bolkart, Timo and Osman, Ahmed A. A. and Tzionas, Dimitrios and Black, Michael J.}, + booktitle = {Proceedings IEEE Conf. on Computer Vision and Pattern Recognition (CVPR)}, + year = {2019} +} +Commercial licensing opportunities +For commercial uses of the Software, please send email to ps-license@tue.mpg.de + +This Agreement shall be governed by the laws of the Federal Republic of Germany except for the UN Sales Convention. diff --git a/LHM/models/rendering/smplx/README.md b/LHM/models/rendering/smplx/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e000e63af4569d8fae38346be370ba815662674d --- /dev/null +++ b/LHM/models/rendering/smplx/README.md @@ -0,0 +1,207 @@ +## SMPL-X: A new joint 3D model of the human body, face and hands together + +[[Paper Page](https://smpl-x.is.tue.mpg.de)] [[Paper](https://ps.is.tuebingen.mpg.de/uploads_file/attachment/attachment/497/SMPL-X.pdf)] +[[Supp. Mat.](https://ps.is.tuebingen.mpg.de/uploads_file/attachment/attachment/498/SMPL-X-supp.pdf)] + +![SMPL-X Examples](./images/teaser_fig.png) + +## Table of Contents + * [License](#license) + * [Description](#description) + * [News](#news) + * [Installation](#installation) + * [Downloading the model](#downloading-the-model) + * [Loading SMPL-X, SMPL+H and SMPL](#loading-smpl-x-smplh-and-smpl) + * [SMPL and SMPL+H setup](#smpl-and-smplh-setup) + * [Model loading](https://github.com/vchoutas/smplx#model-loading) + * [MANO and FLAME correspondences](#mano-and-flame-correspondences) + * [Example](#example) + * [Modifying the global pose of the model](#modifying-the-global-pose-of-the-model) + * [Citation](#citation) + * [Acknowledgments](#acknowledgments) + * [Contact](#contact) + +## License + +Software Copyright License for **non-commercial scientific research purposes**. +Please read carefully the [terms and conditions](https://github.com/vchoutas/smplx/blob/master/LICENSE) and any accompanying documentation before you download and/or use the SMPL-X/SMPLify-X model, data and software, (the "Model & Software"), including 3D meshes, blend weights, blend shapes, textures, software, scripts, and animations. By downloading and/or using the Model & Software (including downloading, cloning, installing, and any other use of this github repository), you acknowledge that you have read these terms and conditions, understand them, and agree to be bound by them. If you do not agree with these terms and conditions, you must not download and/or use the Model & Software. Any infringement of the terms of this agreement will automatically terminate your rights under this [License](./LICENSE). + +## Disclaimer + +The original images used for the figures 1 and 2 of the paper can be found in this link. +The images in the paper are used under license from gettyimages.com. +We have acquired the right to use them in the publication, but redistribution is not allowed. +Please follow the instructions on the given link to acquire right of usage. +Our results are obtained on the 483 × 724 pixels resolution of the original images. + +## Description + +*SMPL-X* (SMPL eXpressive) is a unified body model with shape parameters trained jointly for the +face, hands and body. *SMPL-X* uses standard vertex based linear blend skinning with learned corrective blend +shapes, has N = 10, 475 vertices and K = 54 joints, +which include joints for the neck, jaw, eyeballs and fingers. +SMPL-X is defined by a function M(θ, β, ψ), where θ is the pose parameters, β the shape parameters and +ψ the facial expression parameters. + +## News + +- 3 November 2020: We release the code to transfer between the models in the + SMPL family. For more details on the code, go to this [readme + file](./transfer_model/README.md). A detailed explanation on how the mappings + were extracted can be found [here](./transfer_model/docs/transfer.md). +- 23 September 2020: A UV map is now available for SMPL-X, please check the + Downloads section of the website. +- 20 August 2020: The full shape and expression space of SMPL-X are now available. + +## Installation + +To install the model please follow the next steps in the specified order: +1. To install from PyPi simply run: + ```Shell + pip install smplx[all] + ``` +2. Clone this repository and install it using the *setup.py* script: +```Shell +git clone https://github.com/vchoutas/smplx +python setup.py install +``` + +## Downloading the model + +To download the *SMPL-X* model go to [this project website](https://smpl-x.is.tue.mpg.de) and register to get access to the downloads section. + +To download the *SMPL+H* model go to [this project website](http://mano.is.tue.mpg.de) and register to get access to the downloads section. + +To download the *SMPL* model go to [this](http://smpl.is.tue.mpg.de) (male and female models) and [this](http://smplify.is.tue.mpg.de) (gender neutral model) project website and register to get access to the downloads section. + +## Loading SMPL-X, SMPL+H and SMPL + +### SMPL and SMPL+H setup + +The loader gives the option to use any of the SMPL-X, SMPL+H, SMPL, and MANO models. Depending on the model you want to use, please follow the respective download instructions. To switch between MANO, SMPL, SMPL+H and SMPL-X just change the *model_path* or *model_type* parameters. For more details please check the docs of the model classes. +Before using SMPL and SMPL+H you should follow the instructions in [tools/README.md](./tools/README.md) to remove the +Chumpy objects from both model pkls, as well as merge the MANO parameters with SMPL+H. + +### Model loading + +You can either use the [create](https://github.com/vchoutas/smplx/blob/c63c02b478c5c6f696491ed9167e3af6b08d89b1/smplx/body_models.py#L54) +function from [body_models](./smplx/body_models.py) or directly call the constructor for the +[SMPL](https://github.com/vchoutas/smplx/blob/c63c02b478c5c6f696491ed9167e3af6b08d89b1/smplx/body_models.py#L106), +[SMPL+H](https://github.com/vchoutas/smplx/blob/c63c02b478c5c6f696491ed9167e3af6b08d89b1/smplx/body_models.py#L395) and +[SMPL-X](https://github.com/vchoutas/smplx/blob/c63c02b478c5c6f696491ed9167e3af6b08d89b1/smplx/body_models.py#L628) model. The path to the model can either be the path to the file with the parameters or a directory with the following structure: +```bash +models +├── smpl +│   ├── SMPL_FEMALE.pkl +│   └── SMPL_MALE.pkl +│   └── SMPL_NEUTRAL.pkl +├── smplh +│   ├── SMPLH_FEMALE.pkl +│   └── SMPLH_MALE.pkl +├── mano +| ├── MANO_RIGHT.pkl +| └── MANO_LEFT.pkl +└── smplx + ├── SMPLX_FEMALE.npz + ├── SMPLX_FEMALE.pkl + ├── SMPLX_MALE.npz + ├── SMPLX_MALE.pkl + ├── SMPLX_NEUTRAL.npz + └── SMPLX_NEUTRAL.pkl +``` + + +## MANO and FLAME correspondences + +The vertex correspondences between SMPL-X and MANO, FLAME can be downloaded +from [the project website](https://smpl-x.is.tue.mpg.de). If you have extracted +the correspondence data in the folder *correspondences*, then use the following +scripts to visualize them: + +1. To view MANO correspondences run the following command: + +``` +python examples/vis_mano_vertices.py --model-folder $SMPLX_FOLDER --corr-fname correspondences/MANO_SMPLX_vertex_ids.pkl +``` + +2. To view FLAME correspondences run the following command: + +``` +python examples/vis_flame_vertices.py --model-folder $SMPLX_FOLDER --corr-fname correspondences/SMPL-X__FLAME_vertex_ids.npy +``` + +## Example + +After installing the *smplx* package and downloading the model parameters you should be able to run the *demo.py* +script to visualize the results. For this step you have to install the [pyrender](https://pyrender.readthedocs.io/en/latest/index.html) and [trimesh](https://trimsh.org/) packages. + +`python examples/demo.py --model-folder $SMPLX_FOLDER --plot-joints=True --gender="neutral"` + +![SMPL-X Examples](./images/example.png) + +## Modifying the global pose of the model + +If you want to modify the global pose of the model, i.e. the root rotation and +translation, to a new coordinate system for example, you need to take into +account that the model rotation uses the pelvis as the center of rotation. A +more detailed description can be found in the following +[link](https://www.dropbox.com/scl/fi/zkatuv5shs8d4tlwr8ecc/Change-parameters-to-new-coordinate-system.paper?dl=0&rlkey=lotq1sh6wzkmyttisc05h0in0). +If something is not clear, please let me know so that I can update the +description. + +## Citation + +Depending on which model is loaded for your project, i.e. SMPL-X or SMPL+H or SMPL, please cite the most relevant work below, listed in the same order: + +``` +@inproceedings{SMPL-X:2019, + title = {Expressive Body Capture: 3D Hands, Face, and Body from a Single Image}, + author = {Pavlakos, Georgios and Choutas, Vasileios and Ghorbani, Nima and Bolkart, Timo and Osman, Ahmed A. A. and Tzionas, Dimitrios and Black, Michael J.}, + booktitle = {Proceedings IEEE Conf. on Computer Vision and Pattern Recognition (CVPR)}, + year = {2019} +} +``` + +``` +@article{MANO:SIGGRAPHASIA:2017, + title = {Embodied Hands: Modeling and Capturing Hands and Bodies Together}, + author = {Romero, Javier and Tzionas, Dimitrios and Black, Michael J.}, + journal = {ACM Transactions on Graphics, (Proc. SIGGRAPH Asia)}, + volume = {36}, + number = {6}, + series = {245:1--245:17}, + month = nov, + year = {2017}, + month_numeric = {11} + } +``` + +``` +@article{SMPL:2015, + author = {Loper, Matthew and Mahmood, Naureen and Romero, Javier and Pons-Moll, Gerard and Black, Michael J.}, + title = {{SMPL}: A Skinned Multi-Person Linear Model}, + journal = {ACM Transactions on Graphics, (Proc. SIGGRAPH Asia)}, + month = oct, + number = {6}, + pages = {248:1--248:16}, + publisher = {ACM}, + volume = {34}, + year = {2015} +} +``` + +This repository was originally developed for SMPL-X / SMPLify-X (CVPR 2019), you might be interested in having a look: [https://smpl-x.is.tue.mpg.de](https://smpl-x.is.tue.mpg.de). + +## Acknowledgments + +### Facial Contour + +Special thanks to [Soubhik Sanyal](https://github.com/soubhiksanyal) for sharing the Tensorflow code used for the facial +landmarks. + +## Contact +The code of this repository was implemented by [Vassilis Choutas](vassilis.choutas@tuebingen.mpg.de). + +For questions, please contact [smplx@tue.mpg.de](smplx@tue.mpg.de). + +For commercial licensing (and all related questions for business applications), please contact [ps-licensing@tue.mpg.de](ps-licensing@tue.mpg.de). diff --git a/LHM/models/rendering/smplx/config_files/smpl2smplh.yaml b/LHM/models/rendering/smplx/config_files/smpl2smplh.yaml new file mode 100644 index 0000000000000000000000000000000000000000..98cc874831b65a71dcb15f4f4e6018841e80c08b --- /dev/null +++ b/LHM/models/rendering/smplx/config_files/smpl2smplh.yaml @@ -0,0 +1,25 @@ +datasets: + mesh_folder: + data_folder: 'transfer_data/meshes/smpl' +deformation_transfer_path: 'transfer_data/smpl2smplh_def_transfer.pkl' +mask_ids_fname: '' +summary_steps: 100 + +edge_fitting: + per_part: False + +optim: + type: 'trust-ncg' + maxiters: 100 + gtol: 1e-06 + +body_model: + model_type: "smplh" + # SMPL+H has no neutral model, so we have to manually select the gender + gender: "female" + # gender: "male" + folder: "transfer_data/body_models" + use_compressed: False + smplh: + betas: + num: 10 diff --git a/LHM/models/rendering/smplx/config_files/smpl2smplx.yaml b/LHM/models/rendering/smplx/config_files/smpl2smplx.yaml new file mode 100644 index 0000000000000000000000000000000000000000..aad7ac50b203a9efc5182025fc88f3c6f4fa43f3 --- /dev/null +++ b/LHM/models/rendering/smplx/config_files/smpl2smplx.yaml @@ -0,0 +1,26 @@ +datasets: + mesh_folder: + data_folder: 'transfer_data/meshes/smpl' +deformation_transfer_path: 'transfer_data/smpl2smplx_deftrafo_setup.pkl' +mask_ids_fname: 'smplx_mask_ids.npy' +summary_steps: 100 + +edge_fitting: + per_part: False + +optim: + type: 'trust-ncg' + maxiters: 100 + gtol: 1e-06 + +body_model: + model_type: "smplx" + gender: "neutral" + folder: "transfer_data/body_models" + use_compressed: False + use_face_contour: True + smplx: + betas: + num: 10 + expression: + num: 10 diff --git a/LHM/models/rendering/smplx/config_files/smplh2smpl.yaml b/LHM/models/rendering/smplx/config_files/smplh2smpl.yaml new file mode 100644 index 0000000000000000000000000000000000000000..223d33736f9ea74a6b3450073008b0488ac3ad52 --- /dev/null +++ b/LHM/models/rendering/smplx/config_files/smplh2smpl.yaml @@ -0,0 +1,24 @@ +datasets: + mesh_folder: + data_folder: 'transfer_data/meshes/smplh' +deformation_transfer_path: 'transfer_data/smplh2smpl_def_transfer.pkl' +mask_ids_fname: '' +summary_steps: 100 + +edge_fitting: + per_part: False + +optim: + type: 'trust-ncg' + maxiters: 100 + gtol: 1e-06 + +body_model: + model_type: "smpl" + gender: "neutral" + folder: "transfer_data/body_models" + use_compressed: False + use_face_contour: True + smpl: + betas: + num: 10 diff --git a/LHM/models/rendering/smplx/config_files/smplh2smplx.yaml b/LHM/models/rendering/smplx/config_files/smplh2smplx.yaml new file mode 100644 index 0000000000000000000000000000000000000000..682d0e665dc084b72e936be43371c0c9a3df299c --- /dev/null +++ b/LHM/models/rendering/smplx/config_files/smplh2smplx.yaml @@ -0,0 +1,26 @@ +datasets: + mesh_folder: + data_folder: 'transfer_data/meshes/smplh' +deformation_transfer_path: 'transfer_data/smplh2smplx_deftrafo_setup.pkl' +mask_ids_fname: 'smplx_mask_ids.npy' +summary_steps: 100 + +edge_fitting: + per_part: False + +optim: + type: 'trust-ncg' + maxiters: 100 + gtol: 1e-06 + +body_model: + model_type: "smplx" + gender: "neutral" + folder: "transfer_data/body_models" + use_compressed: False + use_face_contour: True + smplx: + betas: + num: 10 + expression: + num: 10 diff --git a/LHM/models/rendering/smplx/config_files/smplh2smplx_as.yaml b/LHM/models/rendering/smplx/config_files/smplh2smplx_as.yaml new file mode 100644 index 0000000000000000000000000000000000000000..70312486c4d4d3176b60b035fa32166b5c8a79fe --- /dev/null +++ b/LHM/models/rendering/smplx/config_files/smplh2smplx_as.yaml @@ -0,0 +1,26 @@ +datasets: + mesh_folder: + data_folder: 'transfer_data/meshes/amass_sample' +deformation_transfer_path: 'transfer_data/smplh2smplx_deftrafo_setup.pkl' +mask_ids_fname: 'smplx_mask_ids.npy' +summary_steps: 100 + +edge_fitting: + per_part: False + +optim: + type: 'trust-ncg' + maxiters: 100 + gtol: 1e-06 + +body_model: + model_type: "smplx" + gender: "neutral" + folder: "models" + use_compressed: False + use_face_contour: True + smplx: + betas: + num: 10 + expression: + num: 10 diff --git a/LHM/models/rendering/smplx/config_files/smplh2smplx_onepose.yaml b/LHM/models/rendering/smplx/config_files/smplh2smplx_onepose.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1a592b83bbb5e34099d1705927ead4c9b88011d5 --- /dev/null +++ b/LHM/models/rendering/smplx/config_files/smplh2smplx_onepose.yaml @@ -0,0 +1,27 @@ +datasets: + mesh_folder: + data_folder: 'transfer_data/meshes/amass_onepose' +deformation_transfer_path: 'transfer_data/smplh2smplx_deftrafo_setup.pkl' +mask_ids_fname: 'smplx_mask_ids.npy' +summary_steps: 100 + +edge_fitting: + per_part: False + +optim: + type: 'adam' + lr: 0.1 + maxiters: 10000 + gtol: 1e-06 + +body_model: + model_type: "smplx" + gender: "neutral" + folder: "models" + use_compressed: False + use_face_contour: True + smplx: + betas: + num: 10 + expression: + num: 10 diff --git a/LHM/models/rendering/smplx/config_files/smplx2smpl.yaml b/LHM/models/rendering/smplx/config_files/smplx2smpl.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7012fefda9ac367df616bdb6cb6f72567516e86a --- /dev/null +++ b/LHM/models/rendering/smplx/config_files/smplx2smpl.yaml @@ -0,0 +1,25 @@ +datasets: + mesh_folder: + data_folder: 'meshes/smplx' +deformation_transfer_path: 'transfer_data/smplx2smpl_deftrafo_setup.pkl' +mask_ids_fname: '' +summary_steps: 100 + +edge_fitting: + per_part: False + +optim: + type: 'lbfgs' + maxiters: 200 + gtol: 1e-06 + +body_model: + model_type: "smpl" + gender: "neutral" + ext: 'pkl' + folder: "transfer_data/body_models" + use_compressed: False + use_face_contour: True + smpl: + betas: + num: 10 diff --git a/LHM/models/rendering/smplx/config_files/smplx2smplh.yaml b/LHM/models/rendering/smplx/config_files/smplx2smplh.yaml new file mode 100644 index 0000000000000000000000000000000000000000..76275e011880f1ede64329f253bf236ab423abde --- /dev/null +++ b/LHM/models/rendering/smplx/config_files/smplx2smplh.yaml @@ -0,0 +1,27 @@ +datasets: + mesh_folder: + data_folder: 'meshes/smplx' +deformation_transfer_path: 'transfer_data/smplx2smplh_deftrafo_setup.pkl' +mask_ids_fname: '' +summary_steps: 100 + +edge_fitting: + per_part: False + +optim: + type: 'lbfgs' + maxiters: 200 + gtol: 1e-06 + +body_model: + model_type: "smplh" + # SMPL+H has no neutral model, so we have to manually select the gender + gender: "female" + # gender: "male" + ext: 'pkl' + folder: "transfer_data/body_models" + use_compressed: False + use_face_contour: True + smplh: + betas: + num: 10 diff --git a/LHM/models/rendering/smplx/examples/demo.py b/LHM/models/rendering/smplx/examples/demo.py new file mode 100644 index 0000000000000000000000000000000000000000..7a6fd5024f4ac05d9f5db336b769d84836b51c18 --- /dev/null +++ b/LHM/models/rendering/smplx/examples/demo.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +import os.path as osp +import argparse + +import numpy as np +import torch + +import smplx + + +def main(model_folder, + model_type='smplx', + ext='npz', + gender='neutral', + plot_joints=False, + num_betas=10, + sample_shape=True, + sample_expression=True, + num_expression_coeffs=10, + plotting_module='pyrender', + use_face_contour=False): + + model = smplx.create(model_folder, model_type=model_type, + gender=gender, use_face_contour=use_face_contour, + num_betas=num_betas, + num_expression_coeffs=num_expression_coeffs, + ext=ext) + print(model) + + betas, expression = None, None + if sample_shape: + betas = torch.randn([1, model.num_betas], dtype=torch.float32) + if sample_expression: + expression = torch.randn( + [1, model.num_expression_coeffs], dtype=torch.float32) + + output = model(betas=betas, expression=expression, + return_verts=True) + vertices = output.vertices.detach().cpu().numpy().squeeze() + joints = output.joints.detach().cpu().numpy().squeeze() + + print('Vertices shape =', vertices.shape) + print('Joints shape =', joints.shape) + + if plotting_module == 'pyrender': + import pyrender + import trimesh + vertex_colors = np.ones([vertices.shape[0], 4]) * [0.3, 0.3, 0.3, 0.8] + tri_mesh = trimesh.Trimesh(vertices, model.faces, + vertex_colors=vertex_colors) + + mesh = pyrender.Mesh.from_trimesh(tri_mesh) + + scene = pyrender.Scene() + scene.add(mesh) + + if plot_joints: + sm = trimesh.creation.uv_sphere(radius=0.005) + sm.visual.vertex_colors = [0.9, 0.1, 0.1, 1.0] + tfs = np.tile(np.eye(4), (len(joints), 1, 1)) + tfs[:, :3, 3] = joints + joints_pcl = pyrender.Mesh.from_trimesh(sm, poses=tfs) + scene.add(joints_pcl) + + pyrender.Viewer(scene, use_raymond_lighting=True) + elif plotting_module == 'matplotlib': + from matplotlib import pyplot as plt + from mpl_toolkits.mplot3d import Axes3D + from mpl_toolkits.mplot3d.art3d import Poly3DCollection + + fig = plt.figure() + ax = fig.add_subplot(111, projection='3d') + + mesh = Poly3DCollection(vertices[model.faces], alpha=0.1) + face_color = (1.0, 1.0, 0.9) + edge_color = (0, 0, 0) + mesh.set_edgecolor(edge_color) + mesh.set_facecolor(face_color) + ax.add_collection3d(mesh) + ax.scatter(joints[:, 0], joints[:, 1], joints[:, 2], color='r') + + if plot_joints: + ax.scatter(joints[:, 0], joints[:, 1], joints[:, 2], alpha=0.1) + plt.show() + elif plotting_module == 'open3d': + import open3d as o3d + + mesh = o3d.geometry.TriangleMesh() + mesh.vertices = o3d.utility.Vector3dVector( + vertices) + mesh.triangles = o3d.utility.Vector3iVector(model.faces) + mesh.compute_vertex_normals() + mesh.paint_uniform_color([0.3, 0.3, 0.3]) + + geometry = [mesh] + if plot_joints: + joints_pcl = o3d.geometry.PointCloud() + joints_pcl.points = o3d.utility.Vector3dVector(joints) + joints_pcl.paint_uniform_color([0.7, 0.3, 0.3]) + geometry.append(joints_pcl) + + o3d.visualization.draw_geometries(geometry) + else: + raise ValueError('Unknown plotting_module: {}'.format(plotting_module)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='SMPL-X Demo') + + parser.add_argument('--model-folder', required=True, type=str, + help='The path to the model folder') + parser.add_argument('--model-type', default='smplx', type=str, + choices=['smpl', 'smplh', 'smplx', 'mano', 'flame'], + help='The type of model to load') + parser.add_argument('--gender', type=str, default='neutral', + help='The gender of the model') + parser.add_argument('--num-betas', default=10, type=int, + dest='num_betas', + help='Number of shape coefficients.') + parser.add_argument('--num-expression-coeffs', default=10, type=int, + dest='num_expression_coeffs', + help='Number of expression coefficients.') + parser.add_argument('--plotting-module', type=str, default='pyrender', + dest='plotting_module', + choices=['pyrender', 'matplotlib', 'open3d'], + help='The module to use for plotting the result') + parser.add_argument('--ext', type=str, default='npz', + help='Which extension to use for loading') + parser.add_argument('--plot-joints', default=False, + type=lambda arg: arg.lower() in ['true', '1'], + help='The path to the model folder') + parser.add_argument('--sample-shape', default=True, + dest='sample_shape', + type=lambda arg: arg.lower() in ['true', '1'], + help='Sample a random shape') + parser.add_argument('--sample-expression', default=True, + dest='sample_expression', + type=lambda arg: arg.lower() in ['true', '1'], + help='Sample a random expression') + parser.add_argument('--use-face-contour', default=False, + type=lambda arg: arg.lower() in ['true', '1'], + help='Compute the contour of the face') + + args = parser.parse_args() + + model_folder = osp.expanduser(osp.expandvars(args.model_folder)) + model_type = args.model_type + plot_joints = args.plot_joints + use_face_contour = args.use_face_contour + gender = args.gender + ext = args.ext + plotting_module = args.plotting_module + num_betas = args.num_betas + num_expression_coeffs = args.num_expression_coeffs + sample_shape = args.sample_shape + sample_expression = args.sample_expression + + main(model_folder, model_type, ext=ext, + gender=gender, plot_joints=plot_joints, + num_betas=num_betas, + num_expression_coeffs=num_expression_coeffs, + sample_shape=sample_shape, + sample_expression=sample_expression, + plotting_module=plotting_module, + use_face_contour=use_face_contour) diff --git a/LHM/models/rendering/smplx/examples/demo_layers.py b/LHM/models/rendering/smplx/examples/demo_layers.py new file mode 100644 index 0000000000000000000000000000000000000000..8d4e18226c02a6c06c5158dc66276598ba96163a --- /dev/null +++ b/LHM/models/rendering/smplx/examples/demo_layers.py @@ -0,0 +1,181 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +import os.path as osp +import argparse + +import numpy as np +import torch + +import smplx + + +def main(model_folder, + model_type='smplx', + ext='npz', + gender='neutral', + plot_joints=False, + num_betas=10, + sample_shape=True, + sample_expression=True, + num_expression_coeffs=10, + plotting_module='pyrender', + use_face_contour=False): + + model = smplx.build_layer( + model_folder, model_type=model_type, + gender=gender, use_face_contour=use_face_contour, + num_betas=num_betas, + num_expression_coeffs=num_expression_coeffs, + ext=ext) + print(model) + + betas, expression = None, None + if sample_shape: + betas = torch.randn([1, model.num_betas], dtype=torch.float32) + if sample_expression: + expression = torch.randn( + [1, model.num_expression_coeffs], dtype=torch.float32) + + output = model(betas=betas, expression=expression, + return_verts=True) + vertices = output.vertices.detach().cpu().numpy().squeeze() + joints = output.joints.detach().cpu().numpy().squeeze() + + print('Vertices shape =', vertices.shape) + print('Joints shape =', joints.shape) + + if plotting_module == 'pyrender': + import pyrender + import trimesh + vertex_colors = np.ones([vertices.shape[0], 4]) * [0.3, 0.3, 0.3, 0.8] + tri_mesh = trimesh.Trimesh(vertices, model.faces, + vertex_colors=vertex_colors) + + mesh = pyrender.Mesh.from_trimesh(tri_mesh) + + scene = pyrender.Scene() + scene.add(mesh) + + if plot_joints: + sm = trimesh.creation.uv_sphere(radius=0.005) + sm.visual.vertex_colors = [0.9, 0.1, 0.1, 1.0] + tfs = np.tile(np.eye(4), (len(joints), 1, 1)) + tfs[:, :3, 3] = joints + joints_pcl = pyrender.Mesh.from_trimesh(sm, poses=tfs) + scene.add(joints_pcl) + + pyrender.Viewer(scene, use_raymond_lighting=True) + elif plotting_module == 'matplotlib': + from matplotlib import pyplot as plt + from mpl_toolkits.mplot3d import Axes3D + from mpl_toolkits.mplot3d.art3d import Poly3DCollection + + fig = plt.figure() + ax = fig.add_subplot(111, projection='3d') + + mesh = Poly3DCollection(vertices[model.faces], alpha=0.1) + face_color = (1.0, 1.0, 0.9) + edge_color = (0, 0, 0) + mesh.set_edgecolor(edge_color) + mesh.set_facecolor(face_color) + ax.add_collection3d(mesh) + ax.scatter(joints[:, 0], joints[:, 1], joints[:, 2], color='r') + + if plot_joints: + ax.scatter(joints[:, 0], joints[:, 1], joints[:, 2], alpha=0.1) + plt.show() + elif plotting_module == 'open3d': + import open3d as o3d + + mesh = o3d.geometry.TriangleMesh() + mesh.vertices = o3d.utility.Vector3dVector( + vertices) + mesh.triangles = o3d.utility.Vector3iVector(model.faces) + mesh.compute_vertex_normals() + mesh.paint_uniform_color([0.3, 0.3, 0.3]) + + geometry = [mesh] + if plot_joints: + joints_pcl = o3d.geometry.PointCloud() + joints_pcl.points = o3d.utility.Vector3dVector(joints) + joints_pcl.paint_uniform_color([0.7, 0.3, 0.3]) + geometry.append(joints_pcl) + + o3d.visualization.draw_geometries(geometry) + else: + raise ValueError('Unknown plotting_module: {}'.format(plotting_module)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='SMPL-X Demo') + + parser.add_argument('--model-folder', required=True, type=str, + help='The path to the model folder') + parser.add_argument('--model-type', default='smplx', type=str, + choices=['smpl', 'smplh', 'smplx', 'mano', 'flame'], + help='The type of model to load') + parser.add_argument('--gender', type=str, default='neutral', + help='The gender of the model') + parser.add_argument('--num-betas', default=10, type=int, + dest='num_betas', + help='Number of shape coefficients.') + parser.add_argument('--num-expression-coeffs', default=10, type=int, + dest='num_expression_coeffs', + help='Number of expression coefficients.') + parser.add_argument('--plotting-module', type=str, default='pyrender', + dest='plotting_module', + choices=['pyrender', 'matplotlib', 'open3d'], + help='The module to use for plotting the result') + parser.add_argument('--ext', type=str, default='npz', + help='Which extension to use for loading') + parser.add_argument('--plot-joints', default=False, + type=lambda arg: arg.lower() in ['true', '1'], + help='The path to the model folder') + parser.add_argument('--sample-shape', default=True, + dest='sample_shape', + type=lambda arg: arg.lower() in ['true', '1'], + help='Sample a random shape') + parser.add_argument('--sample-expression', default=True, + dest='sample_expression', + type=lambda arg: arg.lower() in ['true', '1'], + help='Sample a random expression') + parser.add_argument('--use-face-contour', default=False, + type=lambda arg: arg.lower() in ['true', '1'], + help='Compute the contour of the face') + + args = parser.parse_args() + + model_folder = osp.expanduser(osp.expandvars(args.model_folder)) + model_type = args.model_type + plot_joints = args.plot_joints + use_face_contour = args.use_face_contour + gender = args.gender + ext = args.ext + plotting_module = args.plotting_module + num_betas = args.num_betas + num_expression_coeffs = args.num_expression_coeffs + sample_shape = args.sample_shape + sample_expression = args.sample_expression + + main(model_folder, model_type, ext=ext, + gender=gender, plot_joints=plot_joints, + num_betas=num_betas, + num_expression_coeffs=num_expression_coeffs, + sample_shape=sample_shape, + sample_expression=sample_expression, + plotting_module=plotting_module, + use_face_contour=use_face_contour) diff --git a/LHM/models/rendering/smplx/examples/vis_flame_vertices.py b/LHM/models/rendering/smplx/examples/vis_flame_vertices.py new file mode 100644 index 0000000000000000000000000000000000000000..b8d6b9b33610876a9d555f87492b326b172692a7 --- /dev/null +++ b/LHM/models/rendering/smplx/examples/vis_flame_vertices.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +import os.path as osp +import argparse +import pickle + +import numpy as np +import torch +import open3d as o3d + +import smplx + + +def main(model_folder, corr_fname, ext='npz', + head_color=(0.3, 0.3, 0.6), + gender='neutral'): + + head_idxs = np.load(corr_fname) + + model = smplx.create(model_folder, model_type='smplx', + gender=gender, + ext=ext) + betas = torch.zeros([1, 10], dtype=torch.float32) + expression = torch.zeros([1, 10], dtype=torch.float32) + + output = model(betas=betas, expression=expression, + return_verts=True) + vertices = output.vertices.detach().cpu().numpy().squeeze() + joints = output.joints.detach().cpu().numpy().squeeze() + + print('Vertices shape =', vertices.shape) + print('Joints shape =', joints.shape) + + mesh = o3d.geometry.TriangleMesh() + mesh.vertices = o3d.utility.Vector3dVector(vertices) + mesh.triangles = o3d.utility.Vector3iVector(model.faces) + mesh.compute_vertex_normals() + + colors = np.ones_like(vertices) * [0.3, 0.3, 0.3] + colors[head_idxs] = head_color + + mesh.vertex_colors = o3d.utility.Vector3dVector(colors) + + o3d.visualization.draw_geometries([mesh]) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='SMPL-X Demo') + + parser.add_argument('--model-folder', required=True, type=str, + help='The path to the model folder') + parser.add_argument('--corr-fname', required=True, type=str, + dest='corr_fname', + help='Filename with the head correspondences') + parser.add_argument('--gender', type=str, default='neutral', + help='The gender of the model') + parser.add_argument('--ext', type=str, default='npz', + help='Which extension to use for loading') + parser.add_argument('--head', default='right', + choices=['right', 'left'], + type=str, help='Which head to plot') + parser.add_argument('--head-color', type=float, nargs=3, dest='head_color', + default=(0.3, 0.3, 0.6), + help='Color for the head vertices') + + args = parser.parse_args() + + model_folder = osp.expanduser(osp.expandvars(args.model_folder)) + corr_fname = args.corr_fname + gender = args.gender + ext = args.ext + head = args.head + head_color = args.head_color + + main(model_folder, corr_fname, ext=ext, + head_color=head_color, + gender=gender + ) diff --git a/LHM/models/rendering/smplx/examples/vis_mano_vertices.py b/LHM/models/rendering/smplx/examples/vis_mano_vertices.py new file mode 100644 index 0000000000000000000000000000000000000000..1741542a1808071cc35fa1fcdef01a869885ec7e --- /dev/null +++ b/LHM/models/rendering/smplx/examples/vis_mano_vertices.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +import os.path as osp +import argparse +import pickle + +import numpy as np +import torch +import open3d as o3d + +import smplx + + +def main(model_folder, corr_fname, ext='npz', + hand_color=(0.3, 0.3, 0.6), + gender='neutral', hand='right'): + + with open(corr_fname, 'rb') as f: + idxs_data = pickle.load(f) + if hand == 'both': + hand_idxs = np.concatenate( + [idxs_data['left_hand'], idxs_data['right_hand']] + ) + else: + hand_idxs = idxs_data[f'{hand}_hand'] + + model = smplx.create(model_folder, model_type='smplx', + gender=gender, + ext=ext) + betas = torch.zeros([1, 10], dtype=torch.float32) + expression = torch.zeros([1, 10], dtype=torch.float32) + + output = model(betas=betas, expression=expression, + return_verts=True) + vertices = output.vertices.detach().cpu().numpy().squeeze() + joints = output.joints.detach().cpu().numpy().squeeze() + + print('Vertices shape =', vertices.shape) + print('Joints shape =', joints.shape) + + mesh = o3d.geometry.TriangleMesh() + mesh.vertices = o3d.utility.Vector3dVector(vertices) + mesh.triangles = o3d.utility.Vector3iVector(model.faces) + mesh.compute_vertex_normals() + + colors = np.ones_like(vertices) * [0.3, 0.3, 0.3] + colors[hand_idxs] = hand_color + + mesh.vertex_colors = o3d.utility.Vector3dVector(colors) + + o3d.visualization.draw_geometries([mesh]) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='SMPL-X Demo') + + parser.add_argument('--model-folder', required=True, type=str, + help='The path to the model folder') + parser.add_argument('--corr-fname', required=True, type=str, + dest='corr_fname', + help='Filename with the hand correspondences') + parser.add_argument('--gender', type=str, default='neutral', + help='The gender of the model') + parser.add_argument('--ext', type=str, default='npz', + help='Which extension to use for loading') + parser.add_argument('--hand', default='right', + choices=['right', 'left', 'both'], + type=str, help='Which hand to plot') + parser.add_argument('--hand-color', type=float, nargs=3, dest='hand_color', + default=(0.3, 0.3, 0.6), + help='Color for the hand vertices') + + args = parser.parse_args() + + model_folder = osp.expanduser(osp.expandvars(args.model_folder)) + corr_fname = args.corr_fname + gender = args.gender + ext = args.ext + hand = args.hand + hand_color = args.hand_color + + main(model_folder, corr_fname, ext=ext, + hand_color=hand_color, + gender=gender, hand=hand + ) diff --git a/LHM/models/rendering/smplx/setup.py b/LHM/models/rendering/smplx/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..d0101486883a40f08ebc0634936a6c445e5580b2 --- /dev/null +++ b/LHM/models/rendering/smplx/setup.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems and the Max Planck Institute for Biological +# Cybernetics. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +import io +import os + +from setuptools import setup + +# Package meta-data. +NAME = 'smplx' +DESCRIPTION = 'PyTorch module for loading the SMPLX body model' +URL = 'http://smpl-x.is.tuebingen.mpg.de' +EMAIL = 'vassilis.choutas@tuebingen.mpg.de' +AUTHOR = 'Vassilis Choutas' +REQUIRES_PYTHON = '>=3.6.0' +VERSION = '0.1.28' + +here = os.path.abspath(os.path.dirname(__file__)) + +try: + FileNotFoundError +except NameError: + FileNotFoundError = IOError + +# Import the README and use it as the long-description. +# Note: this will only work if 'README.md' is present in your MANIFEST.in file! +try: + with io.open(os.path.join(here, 'README.md'), encoding='utf-8') as f: + long_description = '\n' + f.read() +except FileNotFoundError: + long_description = DESCRIPTION + +# Load the package's __version__.py module as a dictionary. +about = {} +if not VERSION: + with open(os.path.join(here, NAME, '__version__.py')) as f: + exec(f.read(), about) +else: + about['__version__'] = VERSION + +pyrender_reqs = ['pyrender>=0.1.23', 'trimesh>=2.37.6', 'shapely'] +matplotlib_reqs = ['matplotlib'] +open3d_reqs = ['open3d-python'] + +setup(name=NAME, + version=about['__version__'], + description=DESCRIPTION, + long_description=long_description, + long_description_content_type='text/markdown', + author=AUTHOR, + author_email=EMAIL, + python_requires=REQUIRES_PYTHON, + url=URL, + install_requires=[ + 'numpy>=1.16.2', + 'torch>=1.0.1.post2', + ], + extras_require={ + 'pyrender': pyrender_reqs, + 'open3d': open3d_reqs, + 'matplotlib': matplotlib_reqs, + 'all': pyrender_reqs + matplotlib_reqs + open3d_reqs + }, + packages=['smplx']) diff --git a/LHM/models/rendering/smplx/smplx/__init__.py b/LHM/models/rendering/smplx/smplx/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..886949df670691d1ef5995737cafa285224826c4 --- /dev/null +++ b/LHM/models/rendering/smplx/smplx/__init__.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from .body_models import ( + create, + SMPL, + SMPLH, + SMPLX, + MANO, + FLAME, + build_layer, + SMPLLayer, + SMPLHLayer, + SMPLXLayer, + MANOLayer, + FLAMELayer, +) diff --git a/LHM/models/rendering/smplx/smplx/body_models.py b/LHM/models/rendering/smplx/smplx/body_models.py new file mode 100644 index 0000000000000000000000000000000000000000..21b57d063d22953f1beb0c24d24e0216c8edd3a6 --- /dev/null +++ b/LHM/models/rendering/smplx/smplx/body_models.py @@ -0,0 +1,2664 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +import os +import os.path as osp +import pickle +from collections import namedtuple +from typing import Dict, Optional, Union + +import numpy as np +import torch +import torch.nn as nn + +from .lbs import blend_shapes, find_dynamic_lmk_idx_and_bcoords, lbs, vertices2landmarks +from .utils import ( + Array, + FLAMEOutput, + MANOOutput, + SMPLHOutput, + SMPLOutput, + SMPLXOutput, + Struct, + Tensor, + find_joint_kin_chain, + to_np, + to_tensor, +) +from .vertex_ids import vertex_ids as VERTEX_IDS +from .vertex_joint_selector import VertexJointSelector + +TensorOutput = namedtuple( + "TensorOutput", + [ + "vertices", + "joints", + "betas", + "expression", + "global_orient", + "body_pose", + "left_hand_pose", + "right_hand_pose", + "jaw_pose", + "transl", + "full_pose", + ], +) + + +class SMPL(nn.Module): + + NUM_JOINTS = 23 + NUM_BODY_JOINTS = 23 + SHAPE_SPACE_DIM = 300 + + def __init__( + self, + model_path: str, + kid_template_path: str = "", + data_struct: Optional[Struct] = None, + create_betas: bool = True, + betas: Optional[Tensor] = None, + num_betas: int = 10, + create_global_orient: bool = True, + global_orient: Optional[Tensor] = None, + create_body_pose: bool = True, + body_pose: Optional[Tensor] = None, + create_transl: bool = True, + transl: Optional[Tensor] = None, + dtype=torch.float32, + batch_size: int = 1, + joint_mapper=None, + gender: str = "neutral", + age: str = "adult", + vertex_ids: Dict[str, int] = None, + v_template: Optional[Union[Tensor, Array]] = None, + **kwargs, + ) -> None: + """SMPL model constructor + + Parameters + ---------- + model_path: str + The path to the folder or to the file where the model + parameters are stored + data_struct: Strct + A struct object. If given, then the parameters of the model are + read from the object. Otherwise, the model tries to read the + parameters from the given `model_path`. (default = None) + create_global_orient: bool, optional + Flag for creating a member variable for the global orientation + of the body. (default = True) + global_orient: torch.tensor, optional, Bx3 + The default value for the global orientation variable. + (default = None) + create_body_pose: bool, optional + Flag for creating a member variable for the pose of the body. + (default = True) + body_pose: torch.tensor, optional, Bx(Body Joints * 3) + The default value for the body pose variable. + (default = None) + num_betas: int, optional + Number of shape components to use + (default = 10). + create_betas: bool, optional + Flag for creating a member variable for the shape space + (default = True). + betas: torch.tensor, optional, Bx10 + The default value for the shape member variable. + (default = None) + create_transl: bool, optional + Flag for creating a member variable for the translation + of the body. (default = True) + transl: torch.tensor, optional, Bx3 + The default value for the transl variable. + (default = None) + dtype: torch.dtype, optional + The data type for the created variables + batch_size: int, optional + The batch size used for creating the member variables + joint_mapper: object, optional + An object that re-maps the joints. Useful if one wants to + re-order the SMPL joints to some other convention (e.g. MSCOCO) + (default = None) + gender: str, optional + Which gender to load + vertex_ids: dict, optional + A dictionary containing the indices of the extra vertices that + will be selected + """ + + self.gender = gender + self.age = age + + if data_struct is None: + if osp.isdir(model_path): + model_fn = "SMPL_{}.{ext}".format(gender.upper(), ext="pkl") + smpl_path = os.path.join(model_path, model_fn) + else: + smpl_path = model_path + assert osp.exists(smpl_path), "Path {} does not exist!".format(smpl_path) + + with open(smpl_path, "rb") as smpl_file: + data_struct = Struct(**pickle.load(smpl_file, encoding="latin1")) + + super(SMPL, self).__init__() + self.batch_size = batch_size + shapedirs = data_struct.shapedirs + if shapedirs.shape[-1] < self.SHAPE_SPACE_DIM: + print( + f"WARNING: You are using a {self.name()} model, with only" + f" {shapedirs.shape[-1]} shape coefficients.\n" + f"num_betas={num_betas}, shapedirs.shape={shapedirs.shape}, " + f"self.SHAPE_SPACE_DIM={self.SHAPE_SPACE_DIM}" + ) + num_betas = min(num_betas, shapedirs.shape[-1]) + else: + num_betas = min(num_betas, self.SHAPE_SPACE_DIM) + + if self.age == "kid": + v_template_smil = np.load(kid_template_path) + v_template_smil -= np.mean(v_template_smil, axis=0) + v_template_diff = np.expand_dims( + v_template_smil - data_struct.v_template, axis=2 + ) + shapedirs = np.concatenate( + (shapedirs[:, :, :num_betas], v_template_diff), axis=2 + ) + num_betas = num_betas + 1 + + self._num_betas = num_betas + shapedirs = shapedirs[:, :, :num_betas] + # The shape components + self.register_buffer("shapedirs", to_tensor(to_np(shapedirs), dtype=dtype)) + + if vertex_ids is None: + # SMPL and SMPL-H share the same topology, so any extra joints can + # be drawn from the same place + vertex_ids = VERTEX_IDS["smplh"] + + self.dtype = dtype + + self.joint_mapper = joint_mapper + + self.vertex_joint_selector = VertexJointSelector( + vertex_ids=vertex_ids, **kwargs + ) + + self.faces = data_struct.f + self.register_buffer( + "faces_tensor", + to_tensor(to_np(self.faces, dtype=np.int64), dtype=torch.long), + ) + + if create_betas: + if betas is None: + default_betas = torch.zeros([batch_size, self.num_betas], dtype=dtype) + else: + if torch.is_tensor(betas): + default_betas = betas.clone().detach() + else: + default_betas = torch.tensor(betas, dtype=dtype) + + self.register_parameter( + "betas", nn.Parameter(default_betas, requires_grad=True) + ) + + # The tensor that contains the global rotation of the model + # It is separated from the pose of the joints in case we wish to + # optimize only over one of them + if create_global_orient: + if global_orient is None: + default_global_orient = torch.zeros([batch_size, 3], dtype=dtype) + else: + if torch.is_tensor(global_orient): + default_global_orient = global_orient.clone().detach() + else: + default_global_orient = torch.tensor(global_orient, dtype=dtype) + + global_orient = nn.Parameter(default_global_orient, requires_grad=True) + self.register_parameter("global_orient", global_orient) + + if create_body_pose: + if body_pose is None: + default_body_pose = torch.zeros( + [batch_size, self.NUM_BODY_JOINTS * 3], dtype=dtype + ) + else: + if torch.is_tensor(body_pose): + default_body_pose = body_pose.clone().detach() + else: + default_body_pose = torch.tensor(body_pose, dtype=dtype) + self.register_parameter( + "body_pose", nn.Parameter(default_body_pose, requires_grad=True) + ) + + if create_transl: + if transl is None: + default_transl = torch.zeros( + [batch_size, 3], dtype=dtype, requires_grad=True + ) + else: + default_transl = torch.tensor(transl, dtype=dtype) + self.register_parameter( + "transl", nn.Parameter(default_transl, requires_grad=True) + ) + + if v_template is None: + v_template = data_struct.v_template + if not torch.is_tensor(v_template): + v_template = to_tensor(to_np(v_template), dtype=dtype) + # The vertices of the template model + self.register_buffer("v_template", v_template) + + j_regressor = to_tensor(to_np(data_struct.J_regressor), dtype=dtype) + self.register_buffer("J_regressor", j_regressor) + + # Pose blend shape basis: 6890 x 3 x 207, reshaped to 6890*3 x 207 + num_pose_basis = data_struct.posedirs.shape[-1] + # 207 x 20670 + posedirs = np.reshape(data_struct.posedirs, [-1, num_pose_basis]).T + self.register_buffer("posedirs", to_tensor(to_np(posedirs), dtype=dtype)) + + # indices of parents for each joints + parents = to_tensor(to_np(data_struct.kintree_table[0])).long() + parents[0] = -1 + self.register_buffer("parents", parents) + + lbs_weights = to_tensor(to_np(data_struct.weights), dtype=dtype) + self.register_buffer("lbs_weights", lbs_weights) + + @property + def num_betas(self): + return self._num_betas + + @property + def num_expression_coeffs(self): + return 0 + + def create_mean_pose(self, data_struct) -> Tensor: + pass + + def name(self) -> str: + return "SMPL" + + @torch.no_grad() + def reset_params(self, **params_dict) -> None: + for param_name, param in self.named_parameters(): + if param_name in params_dict: + param[:] = torch.tensor(params_dict[param_name]) + else: + param.fill_(0) + + def get_num_verts(self) -> int: + return self.v_template.shape[0] + + def get_num_faces(self) -> int: + return self.faces.shape[0] + + def extra_repr(self) -> str: + msg = [ + f"Gender: {self.gender.upper()}", + f"Number of joints: {self.J_regressor.shape[0]}", + f"Betas: {self.num_betas}", + ] + return "\n".join(msg) + + def forward_shape( + self, + betas: Optional[Tensor] = None, + ) -> SMPLOutput: + betas = betas if betas is not None else self.betas + v_shaped = self.v_template + blend_shapes(betas, self.shapedirs) + return SMPLOutput(vertices=v_shaped, betas=betas, v_shaped=v_shaped) + + def forward( + self, + betas: Optional[Tensor] = None, + body_pose: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + return_verts=True, + return_full_pose: bool = False, + pose2rot: bool = True, + **kwargs, + ) -> SMPLOutput: + """Forward pass for the SMPL model + + Parameters + ---------- + global_orient: torch.tensor, optional, shape Bx3 + If given, ignore the member variable and use it as the global + rotation of the body. Useful if someone wishes to predicts this + with an external model. (default=None) + betas: torch.tensor, optional, shape BxN_b + If given, ignore the member variable `betas` and use it + instead. For example, it can used if shape parameters + `betas` are predicted from some external model. + (default=None) + body_pose: torch.tensor, optional, shape Bx(J*3) + If given, ignore the member variable `body_pose` and use it + instead. For example, it can used if someone predicts the + pose of the body joints are predicted from some external model. + It should be a tensor that contains joint rotations in + axis-angle format. (default=None) + transl: torch.tensor, optional, shape Bx3 + If given, ignore the member variable `transl` and use it + instead. For example, it can used if the translation + `transl` is predicted from some external model. + (default=None) + return_verts: bool, optional + Return the vertices. (default=True) + return_full_pose: bool, optional + Returns the full axis-angle pose vector (default=False) + + Returns + ------- + """ + # If no shape and pose parameters are passed along, then use the + # ones from the module + global_orient = ( + global_orient if global_orient is not None else self.global_orient + ) + body_pose = body_pose if body_pose is not None else self.body_pose + betas = betas if betas is not None else self.betas + + apply_trans = transl is not None or hasattr(self, "transl") + if transl is None and hasattr(self, "transl"): + transl = self.transl + + full_pose = torch.cat([global_orient, body_pose], dim=1) + + batch_size = max(betas.shape[0], global_orient.shape[0], body_pose.shape[0]) + + if betas.shape[0] != batch_size: + num_repeats = int(batch_size / betas.shape[0]) + betas = betas.expand(num_repeats, -1) + + vertices, joints = lbs( + betas, + full_pose, + self.v_template, + self.shapedirs, + self.posedirs, + self.J_regressor, + self.parents, + self.lbs_weights, + pose2rot=pose2rot, + ) + + joints = self.vertex_joint_selector(vertices, joints) + # Map the joints to the current dataset + if self.joint_mapper is not None: + joints = self.joint_mapper(joints) + + if apply_trans: + joints += transl.unsqueeze(dim=1) + vertices += transl.unsqueeze(dim=1) + + output = SMPLOutput( + vertices=vertices if return_verts else None, + global_orient=global_orient, + body_pose=body_pose, + joints=joints, + betas=betas, + full_pose=full_pose if return_full_pose else None, + ) + + return output + + +class SMPLLayer(SMPL): + def __init__(self, *args, **kwargs) -> None: + # Just create a SMPL module without any member variables + super(SMPLLayer, self).__init__( + create_body_pose=False, + create_betas=False, + create_global_orient=False, + create_transl=False, + *args, + **kwargs, + ) + + def forward( + self, + betas: Optional[Tensor] = None, + body_pose: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + return_verts=True, + return_full_pose: bool = False, + pose2rot: bool = True, + **kwargs, + ) -> SMPLOutput: + """Forward pass for the SMPL model + + Parameters + ---------- + global_orient: torch.tensor, optional, shape Bx3x3 + Global rotation of the body. Useful if someone wishes to + predicts this with an external model. It is expected to be in + rotation matrix format. (default=None) + betas: torch.tensor, optional, shape BxN_b + Shape parameters. For example, it can used if shape parameters + `betas` are predicted from some external model. + (default=None) + body_pose: torch.tensor, optional, shape BxJx3x3 + Body pose. For example, it can used if someone predicts the + pose of the body joints are predicted from some external model. + It should be a tensor that contains joint rotations in + rotation matrix format. (default=None) + transl: torch.tensor, optional, shape Bx3 + Translation vector of the body. + For example, it can used if the translation + `transl` is predicted from some external model. + (default=None) + return_verts: bool, optional + Return the vertices. (default=True) + return_full_pose: bool, optional + Returns the full axis-angle pose vector (default=False) + + Returns + ------- + """ + model_vars = [betas, global_orient, body_pose, transl] + batch_size = 1 + for var in model_vars: + if var is None: + continue + batch_size = max(batch_size, len(var)) + device, dtype = self.shapedirs.device, self.shapedirs.dtype + if global_orient is None: + global_orient = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, -1, -1, -1) + .contiguous() + ) + if body_pose is None: + body_pose = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, self.NUM_BODY_JOINTS, -1, -1) + .contiguous() + ) + if betas is None: + betas = torch.zeros( + [batch_size, self.num_betas], dtype=dtype, device=device + ) + if transl is None: + transl = torch.zeros([batch_size, 3], dtype=dtype, device=device) + full_pose = torch.cat( + [ + global_orient.reshape(-1, 1, 3, 3), + body_pose.reshape(-1, self.NUM_BODY_JOINTS, 3, 3), + ], + dim=1, + ) + + vertices, joints = lbs( + betas, + full_pose, + self.v_template, + self.shapedirs, + self.posedirs, + self.J_regressor, + self.parents, + self.lbs_weights, + pose2rot=False, + ) + + joints = self.vertex_joint_selector(vertices, joints) + # Map the joints to the current dataset + if self.joint_mapper is not None: + joints = self.joint_mapper(joints) + + if transl is not None: + joints += transl.unsqueeze(dim=1) + vertices += transl.unsqueeze(dim=1) + + output = SMPLOutput( + vertices=vertices if return_verts else None, + global_orient=global_orient, + body_pose=body_pose, + joints=joints, + betas=betas, + full_pose=full_pose if return_full_pose else None, + ) + + return output + + +class SMPLH(SMPL): + + # The hand joints are replaced by MANO + NUM_BODY_JOINTS = SMPL.NUM_JOINTS - 2 + NUM_HAND_JOINTS = 15 + NUM_JOINTS = NUM_BODY_JOINTS + 2 * NUM_HAND_JOINTS + + def __init__( + self, + model_path, + kid_template_path: str = "", + data_struct: Optional[Struct] = None, + create_left_hand_pose: bool = True, + left_hand_pose: Optional[Tensor] = None, + create_right_hand_pose: bool = True, + right_hand_pose: Optional[Tensor] = None, + use_pca: bool = True, + num_pca_comps: int = 6, + num_betas=16, + flat_hand_mean: bool = False, + batch_size: int = 1, + gender: str = "neutral", + age: str = "adult", + dtype=torch.float32, + vertex_ids=None, + use_compressed: bool = True, + ext: str = "pkl", + **kwargs, + ) -> None: + """SMPLH model constructor + + Parameters + ---------- + model_path: str + The path to the folder or to the file where the model + parameters are stored + data_struct: Strct + A struct object. If given, then the parameters of the model are + read from the object. Otherwise, the model tries to read the + parameters from the given `model_path`. (default = None) + create_left_hand_pose: bool, optional + Flag for creating a member variable for the pose of the left + hand. (default = True) + left_hand_pose: torch.tensor, optional, BxP + The default value for the left hand pose member variable. + (default = None) + create_right_hand_pose: bool, optional + Flag for creating a member variable for the pose of the right + hand. (default = True) + right_hand_pose: torch.tensor, optional, BxP + The default value for the right hand pose member variable. + (default = None) + num_pca_comps: int, optional + The number of PCA components to use for each hand. + (default = 6) + flat_hand_mean: bool, optional + If False, then the pose of the hand is initialized to False. + batch_size: int, optional + The batch size used for creating the member variables + gender: str, optional + Which gender to load + dtype: torch.dtype, optional + The data type for the created variables + vertex_ids: dict, optional + A dictionary containing the indices of the extra vertices that + will be selected + """ + + self.num_pca_comps = num_pca_comps + # If no data structure is passed, then load the data from the given + # model folder + if data_struct is None: + # Load the model + if osp.isdir(model_path): + model_fn = "SMPLH_{}.{ext}".format(gender.upper(), ext=ext) + smplh_path = os.path.join(model_path, model_fn) + else: + smplh_path = model_path + assert osp.exists(smplh_path), "Path {} does not exist!".format(smplh_path) + + if ext == "pkl": + with open(smplh_path, "rb") as smplh_file: + model_data = pickle.load(smplh_file, encoding="latin1") + elif ext == "npz": + model_data = np.load(smplh_path, allow_pickle=True) + else: + raise ValueError("Unknown extension: {}".format(ext)) + data_struct = Struct(**model_data) + + if vertex_ids is None: + vertex_ids = VERTEX_IDS["smplh"] + + super(SMPLH, self).__init__( + model_path=model_path, + kid_template_path=kid_template_path, + data_struct=data_struct, + num_betas=num_betas, + batch_size=batch_size, + vertex_ids=vertex_ids, + gender=gender, + age=age, + use_compressed=use_compressed, + dtype=dtype, + ext=ext, + **kwargs, + ) + + self.use_pca = use_pca + self.num_pca_comps = num_pca_comps + self.flat_hand_mean = flat_hand_mean + + left_hand_components = data_struct.hands_componentsl[:num_pca_comps] + right_hand_components = data_struct.hands_componentsr[:num_pca_comps] + + self.np_left_hand_components = left_hand_components + self.np_right_hand_components = right_hand_components + if self.use_pca: + self.register_buffer( + "left_hand_components", torch.tensor(left_hand_components, dtype=dtype) + ) + self.register_buffer( + "right_hand_components", + torch.tensor(right_hand_components, dtype=dtype), + ) + + if self.flat_hand_mean: + left_hand_mean = np.zeros_like(data_struct.hands_meanl) + else: + left_hand_mean = data_struct.hands_meanl + + if self.flat_hand_mean: + right_hand_mean = np.zeros_like(data_struct.hands_meanr) + else: + right_hand_mean = data_struct.hands_meanr + + self.register_buffer( + "left_hand_mean", to_tensor(left_hand_mean, dtype=self.dtype) + ) + self.register_buffer( + "right_hand_mean", to_tensor(right_hand_mean, dtype=self.dtype) + ) + + # Create the buffers for the pose of the left hand + hand_pose_dim = num_pca_comps if use_pca else 3 * self.NUM_HAND_JOINTS + if create_left_hand_pose: + if left_hand_pose is None: + default_lhand_pose = torch.zeros( + [batch_size, hand_pose_dim], dtype=dtype + ) + else: + default_lhand_pose = torch.tensor(left_hand_pose, dtype=dtype) + + left_hand_pose_param = nn.Parameter(default_lhand_pose, requires_grad=True) + self.register_parameter("left_hand_pose", left_hand_pose_param) + + if create_right_hand_pose: + if right_hand_pose is None: + default_rhand_pose = torch.zeros( + [batch_size, hand_pose_dim], dtype=dtype + ) + else: + default_rhand_pose = torch.tensor(right_hand_pose, dtype=dtype) + + right_hand_pose_param = nn.Parameter(default_rhand_pose, requires_grad=True) + self.register_parameter("right_hand_pose", right_hand_pose_param) + + # Create the buffer for the mean pose. + pose_mean_tensor = self.create_mean_pose( + data_struct, flat_hand_mean=flat_hand_mean + ) + if not torch.is_tensor(pose_mean_tensor): + pose_mean_tensor = torch.tensor(pose_mean_tensor, dtype=dtype) + self.register_buffer("pose_mean", pose_mean_tensor) + + def create_mean_pose(self, data_struct, flat_hand_mean=False): + # Create the array for the mean pose. If flat_hand is false, then use + # the mean that is given by the data, rather than the flat open hand + global_orient_mean = torch.zeros([3], dtype=self.dtype) + body_pose_mean = torch.zeros([self.NUM_BODY_JOINTS * 3], dtype=self.dtype) + + pose_mean = torch.cat( + [ + global_orient_mean, + body_pose_mean, + self.left_hand_mean, + self.right_hand_mean, + ], + dim=0, + ) + return pose_mean + + def name(self) -> str: + return "SMPL+H" + + def extra_repr(self): + msg = super(SMPLH, self).extra_repr() + msg = [msg] + if self.use_pca: + msg.append(f"Number of PCA components: {self.num_pca_comps}") + msg.append(f"Flat hand mean: {self.flat_hand_mean}") + return "\n".join(msg) + + def forward( + self, + betas: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + body_pose: Optional[Tensor] = None, + left_hand_pose: Optional[Tensor] = None, + right_hand_pose: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + return_verts: bool = True, + return_full_pose: bool = False, + pose2rot: bool = True, + **kwargs, + ) -> SMPLHOutput: + """ """ + + # If no shape and pose parameters are passed along, then use the + # ones from the module + global_orient = ( + global_orient if global_orient is not None else self.global_orient + ) + body_pose = body_pose if body_pose is not None else self.body_pose + betas = betas if betas is not None else self.betas + left_hand_pose = ( + left_hand_pose if left_hand_pose is not None else self.left_hand_pose + ) + right_hand_pose = ( + right_hand_pose if right_hand_pose is not None else self.right_hand_pose + ) + + apply_trans = transl is not None or hasattr(self, "transl") + if transl is None: + if hasattr(self, "transl"): + transl = self.transl + + if self.use_pca: + left_hand_pose = torch.einsum( + "bi,ij->bj", [left_hand_pose, self.left_hand_components] + ) + right_hand_pose = torch.einsum( + "bi,ij->bj", [right_hand_pose, self.right_hand_components] + ) + + full_pose = torch.cat( + [global_orient, body_pose, left_hand_pose, right_hand_pose], dim=1 + ) + full_pose += self.pose_mean + + vertices, joints = lbs( + betas, + full_pose, + self.v_template, + self.shapedirs, + self.posedirs, + self.J_regressor, + self.parents, + self.lbs_weights, + pose2rot=pose2rot, + ) + + # Add any extra joints that might be needed + joints = self.vertex_joint_selector(vertices, joints) + if self.joint_mapper is not None: + joints = self.joint_mapper(joints) + + if apply_trans: + joints += transl.unsqueeze(dim=1) + vertices += transl.unsqueeze(dim=1) + + output = SMPLHOutput( + vertices=vertices if return_verts else None, + joints=joints, + betas=betas, + global_orient=global_orient, + body_pose=body_pose, + left_hand_pose=left_hand_pose, + right_hand_pose=right_hand_pose, + full_pose=full_pose if return_full_pose else None, + ) + + return output + + +class SMPLHLayer(SMPLH): + + def __init__(self, *args, **kwargs) -> None: + """SMPL+H as a layer model constructor""" + super(SMPLHLayer, self).__init__( + create_global_orient=False, + create_body_pose=False, + create_left_hand_pose=False, + create_right_hand_pose=False, + create_betas=False, + create_transl=False, + *args, + **kwargs, + ) + + def forward( + self, + betas: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + body_pose: Optional[Tensor] = None, + left_hand_pose: Optional[Tensor] = None, + right_hand_pose: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + return_verts: bool = True, + return_full_pose: bool = False, + pose2rot: bool = True, + **kwargs, + ) -> SMPLHOutput: + """Forward pass for the SMPL+H model + + Parameters + ---------- + global_orient: torch.tensor, optional, shape Bx3x3 + Global rotation of the body. Useful if someone wishes to + predicts this with an external model. It is expected to be in + rotation matrix format. (default=None) + betas: torch.tensor, optional, shape BxN_b + Shape parameters. For example, it can used if shape parameters + `betas` are predicted from some external model. + (default=None) + body_pose: torch.tensor, optional, shape BxJx3x3 + If given, ignore the member variable `body_pose` and use it + instead. For example, it can used if someone predicts the + pose of the body joints are predicted from some external model. + It should be a tensor that contains joint rotations in + rotation matrix format. (default=None) + left_hand_pose: torch.tensor, optional, shape Bx15x3x3 + If given, contains the pose of the left hand. + It should be a tensor that contains joint rotations in + rotation matrix format. (default=None) + right_hand_pose: torch.tensor, optional, shape Bx15x3x3 + If given, contains the pose of the right hand. + It should be a tensor that contains joint rotations in + rotation matrix format. (default=None) + transl: torch.tensor, optional, shape Bx3 + Translation vector of the body. + For example, it can used if the translation + `transl` is predicted from some external model. + (default=None) + return_verts: bool, optional + Return the vertices. (default=True) + return_full_pose: bool, optional + Returns the full axis-angle pose vector (default=False) + + Returns + ------- + """ + model_vars = [ + betas, + global_orient, + body_pose, + transl, + left_hand_pose, + right_hand_pose, + ] + batch_size = 1 + for var in model_vars: + if var is None: + continue + batch_size = max(batch_size, len(var)) + device, dtype = self.shapedirs.device, self.shapedirs.dtype + if global_orient is None: + global_orient = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, -1, -1, -1) + .contiguous() + ) + if body_pose is None: + body_pose = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, 21, -1, -1) + .contiguous() + ) + if left_hand_pose is None: + left_hand_pose = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, 15, -1, -1) + .contiguous() + ) + if right_hand_pose is None: + right_hand_pose = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, 15, -1, -1) + .contiguous() + ) + if betas is None: + betas = torch.zeros( + [batch_size, self.num_betas], dtype=dtype, device=device + ) + if transl is None: + transl = torch.zeros([batch_size, 3], dtype=dtype, device=device) + + # Concatenate all pose vectors + full_pose = torch.cat( + [ + global_orient.reshape(-1, 1, 3, 3), + body_pose.reshape(-1, self.NUM_BODY_JOINTS, 3, 3), + left_hand_pose.reshape(-1, self.NUM_HAND_JOINTS, 3, 3), + right_hand_pose.reshape(-1, self.NUM_HAND_JOINTS, 3, 3), + ], + dim=1, + ) + + vertices, joints = lbs( + betas, + full_pose, + self.v_template, + self.shapedirs, + self.posedirs, + self.J_regressor, + self.parents, + self.lbs_weights, + pose2rot=False, + ) + + # Add any extra joints that might be needed + joints = self.vertex_joint_selector(vertices, joints) + if self.joint_mapper is not None: + joints = self.joint_mapper(joints) + + if transl is not None: + joints += transl.unsqueeze(dim=1) + vertices += transl.unsqueeze(dim=1) + + output = SMPLHOutput( + vertices=vertices if return_verts else None, + joints=joints, + betas=betas, + global_orient=global_orient, + body_pose=body_pose, + left_hand_pose=left_hand_pose, + right_hand_pose=right_hand_pose, + full_pose=full_pose if return_full_pose else None, + ) + + return output + + +class SMPLX(SMPLH): + """ + SMPL-X (SMPL eXpressive) is a unified body model, with shape parameters + trained jointly for the face, hands and body. + SMPL-X uses standard vertex based linear blend skinning with learned + corrective blend shapes, has N=10475 vertices and K=54 joints, + which includes joints for the neck, jaw, eyeballs and fingers. + """ + + NUM_BODY_JOINTS = SMPLH.NUM_BODY_JOINTS + NUM_HAND_JOINTS = 15 + NUM_FACE_JOINTS = 3 + NUM_JOINTS = NUM_BODY_JOINTS + 2 * NUM_HAND_JOINTS + NUM_FACE_JOINTS + EXPRESSION_SPACE_DIM = 100 + NECK_IDX = 12 + + def __init__( + self, + model_path: str, + kid_template_path: str = "", + num_expression_coeffs: int = 10, + create_expression: bool = True, + expression: Optional[Tensor] = None, + create_jaw_pose: bool = True, + jaw_pose: Optional[Tensor] = None, + create_leye_pose: bool = True, + leye_pose: Optional[Tensor] = None, + create_reye_pose=True, + reye_pose: Optional[Tensor] = None, + use_face_contour: bool = False, + batch_size: int = 1, + gender: str = "neutral", + age: str = "adult", + dtype=torch.float32, + ext: str = "npz", + **kwargs, + ) -> None: + """SMPLX model constructor + + Parameters + ---------- + model_path: str + The path to the folder or to the file where the model + parameters are stored + num_expression_coeffs: int, optional + Number of expression components to use + (default = 10). + create_expression: bool, optional + Flag for creating a member variable for the expression space + (default = True). + expression: torch.tensor, optional, Bx10 + The default value for the expression member variable. + (default = None) + create_jaw_pose: bool, optional + Flag for creating a member variable for the jaw pose. + (default = False) + jaw_pose: torch.tensor, optional, Bx3 + The default value for the jaw pose variable. + (default = None) + create_leye_pose: bool, optional + Flag for creating a member variable for the left eye pose. + (default = False) + leye_pose: torch.tensor, optional, Bx10 + The default value for the left eye pose variable. + (default = None) + create_reye_pose: bool, optional + Flag for creating a member variable for the right eye pose. + (default = False) + reye_pose: torch.tensor, optional, Bx10 + The default value for the right eye pose variable. + (default = None) + use_face_contour: bool, optional + Whether to compute the keypoints that form the facial contour + batch_size: int, optional + The batch size used for creating the member variables + gender: str, optional + Which gender to load + dtype: torch.dtype + The data type for the created variables + """ + + # Load the model + if osp.isdir(model_path): + model_fn = "SMPLX_{}.{ext}".format(gender.upper(), ext=ext) + smplx_path = os.path.join(model_path, model_fn) + else: + smplx_path = model_path + assert osp.exists(smplx_path), "Path {} does not exist!".format(smplx_path) + + if ext == "pkl": + with open(smplx_path, "rb") as smplx_file: + model_data = pickle.load(smplx_file, encoding="latin1") + elif ext == "npz": + model_data = np.load(smplx_path, allow_pickle=True) + else: + raise ValueError("Unknown extension: {}".format(ext)) + + data_struct = Struct(**model_data) + + super(SMPLX, self).__init__( + model_path=model_path, + kid_template_path=kid_template_path, + data_struct=data_struct, + dtype=dtype, + batch_size=batch_size, + vertex_ids=VERTEX_IDS["smplx"], + gender=gender, + age=age, + ext=ext, + **kwargs, + ) + + lmk_faces_idx = data_struct.lmk_faces_idx + self.register_buffer( + "lmk_faces_idx", torch.tensor(lmk_faces_idx, dtype=torch.long) + ) + lmk_bary_coords = data_struct.lmk_bary_coords + self.register_buffer( + "lmk_bary_coords", torch.tensor(lmk_bary_coords, dtype=dtype) + ) + + self.use_face_contour = use_face_contour + if self.use_face_contour: + dynamic_lmk_faces_idx = data_struct.dynamic_lmk_faces_idx + dynamic_lmk_faces_idx = torch.tensor( + dynamic_lmk_faces_idx, dtype=torch.long + ) + self.register_buffer("dynamic_lmk_faces_idx", dynamic_lmk_faces_idx) + + dynamic_lmk_bary_coords = data_struct.dynamic_lmk_bary_coords + dynamic_lmk_bary_coords = torch.tensor(dynamic_lmk_bary_coords, dtype=dtype) + self.register_buffer("dynamic_lmk_bary_coords", dynamic_lmk_bary_coords) + + neck_kin_chain = find_joint_kin_chain(self.NECK_IDX, self.parents) + self.register_buffer( + "neck_kin_chain", torch.tensor(neck_kin_chain, dtype=torch.long) + ) + + if create_jaw_pose: + if jaw_pose is None: + default_jaw_pose = torch.zeros([batch_size, 3], dtype=dtype) + else: + default_jaw_pose = torch.tensor(jaw_pose, dtype=dtype) + jaw_pose_param = nn.Parameter(default_jaw_pose, requires_grad=True) + self.register_parameter("jaw_pose", jaw_pose_param) + + if create_leye_pose: + if leye_pose is None: + default_leye_pose = torch.zeros([batch_size, 3], dtype=dtype) + else: + default_leye_pose = torch.tensor(leye_pose, dtype=dtype) + leye_pose_param = nn.Parameter(default_leye_pose, requires_grad=True) + self.register_parameter("leye_pose", leye_pose_param) + + if create_reye_pose: + if reye_pose is None: + default_reye_pose = torch.zeros([batch_size, 3], dtype=dtype) + else: + default_reye_pose = torch.tensor(reye_pose, dtype=dtype) + reye_pose_param = nn.Parameter(default_reye_pose, requires_grad=True) + self.register_parameter("reye_pose", reye_pose_param) + + shapedirs = data_struct.shapedirs + if len(shapedirs.shape) < 3: + shapedirs = shapedirs[:, :, None] + if shapedirs.shape[-1] < self.SHAPE_SPACE_DIM + self.EXPRESSION_SPACE_DIM: + print( + f"WARNING: You are using a {self.name()} model, with only" + " 10 shape and 10 expression coefficients." + ) + expr_start_idx = 10 + expr_end_idx = 20 + num_expression_coeffs = min(num_expression_coeffs, 10) + else: + expr_start_idx = self.SHAPE_SPACE_DIM + expr_end_idx = self.SHAPE_SPACE_DIM + num_expression_coeffs + num_expression_coeffs = min( + num_expression_coeffs, self.EXPRESSION_SPACE_DIM + ) + + self._num_expression_coeffs = num_expression_coeffs + + expr_dirs = shapedirs[:, :, expr_start_idx:expr_end_idx] + self.register_buffer("expr_dirs", to_tensor(to_np(expr_dirs), dtype=dtype)) + if create_expression: + if expression is None: + default_expression = torch.zeros( + [batch_size, self.num_expression_coeffs], dtype=dtype + ) + else: + default_expression = torch.tensor(expression, dtype=dtype) + expression_param = nn.Parameter(default_expression, requires_grad=True) + self.register_parameter("expression", expression_param) + + def name(self) -> str: + return "SMPL-X" + + @property + def num_expression_coeffs(self): + return self._num_expression_coeffs + + def create_mean_pose(self, data_struct, flat_hand_mean=False): + # Create the array for the mean pose. If flat_hand is false, then use + # the mean that is given by the data, rather than the flat open hand + global_orient_mean = torch.zeros([3], dtype=self.dtype) + body_pose_mean = torch.zeros([self.NUM_BODY_JOINTS * 3], dtype=self.dtype) + jaw_pose_mean = torch.zeros([3], dtype=self.dtype) + leye_pose_mean = torch.zeros([3], dtype=self.dtype) + reye_pose_mean = torch.zeros([3], dtype=self.dtype) + + pose_mean = np.concatenate( + [ + global_orient_mean, + body_pose_mean, + jaw_pose_mean, + leye_pose_mean, + reye_pose_mean, + self.left_hand_mean, + self.right_hand_mean, + ], + axis=0, + ) + + return pose_mean + + def extra_repr(self): + msg = super(SMPLX, self).extra_repr() + msg = [msg, f"Number of Expression Coefficients: {self.num_expression_coeffs}"] + return "\n".join(msg) + + def forward( + self, + betas: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + body_pose: Optional[Tensor] = None, + left_hand_pose: Optional[Tensor] = None, + right_hand_pose: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + expression: Optional[Tensor] = None, + jaw_pose: Optional[Tensor] = None, + leye_pose: Optional[Tensor] = None, + reye_pose: Optional[Tensor] = None, + face_offset: Optional[Tensor] = None, # added by Gyeongsik + joint_offset: Optional[Tensor] = None, # added by Gyeongsik + locator_offset: Optional[Tensor] = None, # added by Gyeongsik + return_verts: bool = True, + return_full_pose: bool = False, + pose2rot: bool = True, + return_shaped: bool = True, + **kwargs, + ) -> SMPLXOutput: + """ + Forward pass for the SMPLX model + + Parameters + ---------- + global_orient: torch.tensor, optional, shape Bx3 + If given, ignore the member variable and use it as the global + rotation of the body. Useful if someone wishes to predicts this + with an external model. (default=None) + betas: torch.tensor, optional, shape BxN_b + If given, ignore the member variable `betas` and use it + instead. For example, it can used if shape parameters + `betas` are predicted from some external model. + (default=None) + expression: torch.tensor, optional, shape BxN_e + If given, ignore the member variable `expression` and use it + instead. For example, it can used if expression parameters + `expression` are predicted from some external model. + body_pose: torch.tensor, optional, shape Bx(J*3) + If given, ignore the member variable `body_pose` and use it + instead. For example, it can used if someone predicts the + pose of the body joints are predicted from some external model. + It should be a tensor that contains joint rotations in + axis-angle format. (default=None) + left_hand_pose: torch.tensor, optional, shape BxP + If given, ignore the member variable `left_hand_pose` and + use this instead. It should either contain PCA coefficients or + joint rotations in axis-angle format. + right_hand_pose: torch.tensor, optional, shape BxP + If given, ignore the member variable `right_hand_pose` and + use this instead. It should either contain PCA coefficients or + joint rotations in axis-angle format. + jaw_pose: torch.tensor, optional, shape Bx3 + If given, ignore the member variable `jaw_pose` and + use this instead. It should either joint rotations in + axis-angle format. + transl: torch.tensor, optional, shape Bx3 + If given, ignore the member variable `transl` and use it + instead. For example, it can used if the translation + `transl` is predicted from some external model. + (default=None) + return_verts: bool, optional + Return the vertices. (default=True) + return_full_pose: bool, optional + Returns the full axis-angle pose vector (default=False) + + Returns + ------- + output: ModelOutput + A named tuple of type `ModelOutput` + """ + # If no shape and pose parameters are passed along, then use the + # ones from the module + global_orient = ( + global_orient if global_orient is not None else self.global_orient + ) + body_pose = body_pose if body_pose is not None else self.body_pose + betas = betas if betas is not None else self.betas + + left_hand_pose = ( + left_hand_pose if left_hand_pose is not None else self.left_hand_pose + ) + right_hand_pose = ( + right_hand_pose if right_hand_pose is not None else self.right_hand_pose + ) + jaw_pose = jaw_pose if jaw_pose is not None else self.jaw_pose + leye_pose = leye_pose if leye_pose is not None else self.leye_pose + reye_pose = reye_pose if reye_pose is not None else self.reye_pose + expression = expression if expression is not None else self.expression + + apply_trans = transl is not None or hasattr(self, "transl") + if transl is None: + if hasattr(self, "transl"): + transl = self.transl + + if self.use_pca: + left_hand_pose = torch.einsum( + "bi,ij->bj", [left_hand_pose, self.left_hand_components] + ) + right_hand_pose = torch.einsum( + "bi,ij->bj", [right_hand_pose, self.right_hand_components] + ) + + full_pose = torch.cat( + [ + global_orient.reshape(-1, 1, 3), + body_pose.reshape(-1, self.NUM_BODY_JOINTS, 3), + jaw_pose.reshape(-1, 1, 3), + leye_pose.reshape(-1, 1, 3), + reye_pose.reshape(-1, 1, 3), + left_hand_pose.reshape(-1, 15, 3), + right_hand_pose.reshape(-1, 15, 3), + ], + dim=1, + ).reshape(-1, 165) + + # Add the mean pose of the model. Does not affect the body, only the + # hands when flat_hand_mean == False + full_pose += self.pose_mean + + batch_size = max(betas.shape[0], global_orient.shape[0], body_pose.shape[0]) + # Concatenate the shape and expression coefficients + scale = int(batch_size / betas.shape[0]) + if scale > 1: + betas = betas.expand(scale, -1) + expression = expression.expand(scale, -1) + shape_components = torch.cat([betas, expression], dim=-1) + + shapedirs = torch.cat([self.shapedirs, self.expr_dirs], dim=-1) + + # added by Gyeongsik (face_offset) + if face_offset is None: + vertices = self.v_template + else: + vertices = self.v_template + face_offset + with torch.autocast(device_type=body_pose.device.type, dtype=torch.float32): + vertices, joints = lbs( + shape_components, + full_pose, + vertices, + shapedirs, + self.posedirs, + self.J_regressor, + joint_offset, + locator_offset, + self.parents, # added by Gyeongsik (joint_offset and locator_offset) + self.lbs_weights, + pose2rot=pose2rot, + ) + + lmk_faces_idx = ( + self.lmk_faces_idx.unsqueeze(dim=0).expand(batch_size, -1).contiguous() + ) + lmk_bary_coords = self.lmk_bary_coords.unsqueeze(dim=0).repeat( + self.batch_size, 1, 1 + ) + if self.use_face_contour: + lmk_idx_and_bcoords = find_dynamic_lmk_idx_and_bcoords( + vertices, + full_pose, + self.dynamic_lmk_faces_idx, + self.dynamic_lmk_bary_coords, + self.neck_kin_chain, + pose2rot=True, + ) + dyn_lmk_faces_idx, dyn_lmk_bary_coords = lmk_idx_and_bcoords + + lmk_faces_idx = torch.cat([lmk_faces_idx, dyn_lmk_faces_idx], 1) + lmk_bary_coords = torch.cat( + [lmk_bary_coords.expand(batch_size, -1, -1), dyn_lmk_bary_coords], 1 + ) + + landmarks = vertices2landmarks( + vertices, self.faces_tensor, lmk_faces_idx, lmk_bary_coords + ) + + # Add any extra joints that might be needed + joints = self.vertex_joint_selector(vertices, joints) + # Add the landmarks to the joints + joints = torch.cat([joints, landmarks], dim=1) + # Map the joints to the current dataset + + if self.joint_mapper is not None: + joints = self.joint_mapper(joints=joints, vertices=vertices) + + if apply_trans: + joints += transl.unsqueeze(dim=1) + vertices += transl.unsqueeze(dim=1) + + v_shaped = None + if return_shaped: + v_shaped = self.v_template + blend_shapes(betas, self.shapedirs) + else: + v_shaped = Tensor(0) + + output = SMPLXOutput( + vertices=vertices if return_verts else None, + joints=joints, + betas=betas, + expression=expression, + global_orient=global_orient, + transl=transl, + body_pose=body_pose, + left_hand_pose=left_hand_pose, + right_hand_pose=right_hand_pose, + jaw_pose=jaw_pose, + v_shaped=v_shaped, + full_pose=full_pose if return_full_pose else None, + ) + return output + + +class SMPLXLayer(SMPLX): + def __init__(self, *args, **kwargs) -> None: + # Just create a SMPLX module without any member variables + super(SMPLXLayer, self).__init__( + create_global_orient=False, + create_body_pose=False, + create_left_hand_pose=False, + create_right_hand_pose=False, + create_jaw_pose=False, + create_leye_pose=False, + create_reye_pose=False, + create_betas=False, + create_expression=False, + create_transl=False, + *args, + **kwargs, + ) + + def forward( + self, + betas: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + body_pose: Optional[Tensor] = None, + left_hand_pose: Optional[Tensor] = None, + right_hand_pose: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + expression: Optional[Tensor] = None, + jaw_pose: Optional[Tensor] = None, + leye_pose: Optional[Tensor] = None, + reye_pose: Optional[Tensor] = None, + return_verts: bool = True, + return_full_pose: bool = True, + **kwargs, + ) -> TensorOutput: + """ + Forward pass for the SMPLX model + + Parameters + ---------- + global_orient: torch.tensor, optional, shape Bx3x3 + If given, ignore the member variable and use it as the global + rotation of the body. Useful if someone wishes to predicts this + with an external model. It is expected to be in rotation matrix + format. (default=None) + betas: torch.tensor, optional, shape BxN_b + If given, ignore the member variable `betas` and use it + instead. For example, it can used if shape parameters + `betas` are predicted from some external model. + (default=None) + expression: torch.tensor, optional, shape BxN_e + Expression coefficients. + For example, it can used if expression parameters + `expression` are predicted from some external model. + body_pose: torch.tensor, optional, shape BxJx3x3 + If given, ignore the member variable `body_pose` and use it + instead. For example, it can used if someone predicts the + pose of the body joints are predicted from some external model. + It should be a tensor that contains joint rotations in + rotation matrix format. (default=None) + left_hand_pose: torch.tensor, optional, shape Bx15x3x3 + If given, contains the pose of the left hand. + It should be a tensor that contains joint rotations in + rotation matrix format. (default=None) + right_hand_pose: torch.tensor, optional, shape Bx15x3x3 + If given, contains the pose of the right hand. + It should be a tensor that contains joint rotations in + rotation matrix format. (default=None) + jaw_pose: torch.tensor, optional, shape Bx3x3 + Jaw pose. It should either joint rotations in + rotation matrix format. + transl: torch.tensor, optional, shape Bx3 + Translation vector of the body. + For example, it can used if the translation + `transl` is predicted from some external model. + (default=None) + return_verts: bool, optional + Return the vertices. (default=True) + return_full_pose: bool, optional + Returns the full pose vector (default=False) + Returns + ------- + output: ModelOutput + A data class that contains the posed vertices and joints + """ + device, dtype = self.shapedirs.device, self.shapedirs.dtype + + model_vars = [ + betas, + global_orient, + body_pose, + transl, + expression, + left_hand_pose, + right_hand_pose, + jaw_pose, + ] + batch_size = 1 + for var in model_vars: + if var is None: + continue + batch_size = max(batch_size, len(var)) + + if global_orient is None: + global_orient = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, -1, -1, -1) + .contiguous() + ) + if body_pose is None: + body_pose = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, self.NUM_BODY_JOINTS, -1, -1) + .contiguous() + ) + if left_hand_pose is None: + left_hand_pose = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, 15, -1, -1) + .contiguous() + ) + if right_hand_pose is None: + right_hand_pose = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, 15, -1, -1) + .contiguous() + ) + if jaw_pose is None: + jaw_pose = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, -1, -1, -1) + .contiguous() + ) + if leye_pose is None: + leye_pose = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, -1, -1, -1) + .contiguous() + ) + if reye_pose is None: + reye_pose = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, -1, -1, -1) + .contiguous() + ) + if expression is None: + expression = torch.zeros( + [batch_size, self.num_expression_coeffs], dtype=dtype, device=device + ) + if betas is None: + betas = torch.zeros( + [batch_size, self.num_betas], dtype=dtype, device=device + ) + if transl is None: + transl = torch.zeros([batch_size, 3], dtype=dtype, device=device) + + # Concatenate all pose vectors + full_pose = torch.cat( + [ + global_orient.reshape(-1, 1, 3, 3), + body_pose.reshape(-1, self.NUM_BODY_JOINTS, 3, 3), + jaw_pose.reshape(-1, 1, 3, 3), + leye_pose.reshape(-1, 1, 3, 3), + reye_pose.reshape(-1, 1, 3, 3), + left_hand_pose.reshape(-1, self.NUM_HAND_JOINTS, 3, 3), + right_hand_pose.reshape(-1, self.NUM_HAND_JOINTS, 3, 3), + ], + dim=1, + ) + shape_components = torch.cat([betas, expression], dim=-1) + + shapedirs = torch.cat([self.shapedirs, self.expr_dirs], dim=-1) + + vertices, joints = lbs( + shape_components, + full_pose, + self.v_template, + shapedirs, + self.posedirs, + self.J_regressor, + self.parents, + self.lbs_weights, + pose2rot=False, + ) + + lmk_faces_idx = ( + self.lmk_faces_idx.unsqueeze(dim=0).expand(batch_size, -1).contiguous() + ) + lmk_bary_coords = self.lmk_bary_coords.unsqueeze(dim=0).repeat(batch_size, 1, 1) + if self.use_face_contour: + lmk_idx_and_bcoords = find_dynamic_lmk_idx_and_bcoords( + vertices, + full_pose, + self.dynamic_lmk_faces_idx, + self.dynamic_lmk_bary_coords, + self.neck_kin_chain, + pose2rot=False, + ) + dyn_lmk_faces_idx, dyn_lmk_bary_coords = lmk_idx_and_bcoords + + lmk_faces_idx = torch.cat([lmk_faces_idx, dyn_lmk_faces_idx], 1) + lmk_bary_coords = torch.cat( + [lmk_bary_coords.expand(batch_size, -1, -1), dyn_lmk_bary_coords], 1 + ) + + landmarks = vertices2landmarks( + vertices, self.faces_tensor, lmk_faces_idx, lmk_bary_coords + ) + + # Add any extra joints that might be needed + joints = self.vertex_joint_selector(vertices, joints) + # Add the landmarks to the joints + joints = torch.cat([joints, landmarks], dim=1) + # Map the joints to the current dataset + + if self.joint_mapper is not None: + joints = self.joint_mapper(joints=joints, vertices=vertices) + + if transl is not None: + joints += transl.unsqueeze(dim=1) + vertices += transl.unsqueeze(dim=1) + + output = TensorOutput( + vertices=vertices if return_verts else Tensor(0), + joints=joints, + betas=betas, + expression=expression, + global_orient=global_orient, + body_pose=body_pose, + left_hand_pose=left_hand_pose, + right_hand_pose=right_hand_pose, + jaw_pose=jaw_pose, + transl=transl if transl != None else Tensor(0), + full_pose=full_pose if return_full_pose else Tensor(0), + ) + + return output + + +class MANO(SMPL): + # The hand joints are replaced by MANO + NUM_BODY_JOINTS = 1 + NUM_HAND_JOINTS = 15 + NUM_JOINTS = NUM_BODY_JOINTS + NUM_HAND_JOINTS + + def __init__( + self, + model_path: str, + is_rhand: bool = True, + data_struct: Optional[Struct] = None, + create_hand_pose: bool = True, + hand_pose: Optional[Tensor] = None, + use_pca: bool = True, + num_pca_comps: int = 6, + flat_hand_mean: bool = False, + batch_size: int = 1, + dtype=torch.float32, + vertex_ids=None, + use_compressed: bool = True, + ext: str = "pkl", + **kwargs, + ) -> None: + """MANO model constructor + + Parameters + ---------- + model_path: str + The path to the folder or to the file where the model + parameters are stored + data_struct: Strct + A struct object. If given, then the parameters of the model are + read from the object. Otherwise, the model tries to read the + parameters from the given `model_path`. (default = None) + create_hand_pose: bool, optional + Flag for creating a member variable for the pose of the right + hand. (default = True) + hand_pose: torch.tensor, optional, BxP + The default value for the right hand pose member variable. + (default = None) + num_pca_comps: int, optional + The number of PCA components to use for each hand. + (default = 6) + flat_hand_mean: bool, optional + If False, then the pose of the hand is initialized to False. + batch_size: int, optional + The batch size used for creating the member variables + dtype: torch.dtype, optional + The data type for the created variables + vertex_ids: dict, optional + A dictionary containing the indices of the extra vertices that + will be selected + """ + + self.num_pca_comps = num_pca_comps + self.is_rhand = is_rhand + # If no data structure is passed, then load the data from the given + # model folder + if data_struct is None: + # Load the model + if osp.isdir(model_path): + model_fn = "MANO_{}.{ext}".format( + "RIGHT" if is_rhand else "LEFT", ext=ext + ) + mano_path = os.path.join(model_path, model_fn) + else: + mano_path = model_path + self.is_rhand = ( + True if "RIGHT" in os.path.basename(model_path) else False + ) + assert osp.exists(mano_path), "Path {} does not exist!".format(mano_path) + + if ext == "pkl": + with open(mano_path, "rb") as mano_file: + model_data = pickle.load(mano_file, encoding="latin1") + elif ext == "npz": + model_data = np.load(mano_path, allow_pickle=True) + else: + raise ValueError("Unknown extension: {}".format(ext)) + data_struct = Struct(**model_data) + + if vertex_ids is None: + vertex_ids = VERTEX_IDS["smplh"] + + super(MANO, self).__init__( + model_path=model_path, + data_struct=data_struct, + batch_size=batch_size, + vertex_ids=vertex_ids, + use_compressed=use_compressed, + dtype=dtype, + ext=ext, + **kwargs, + ) + + # add only MANO tips to the extra joints + self.vertex_joint_selector.extra_joints_idxs = to_tensor( + list(VERTEX_IDS["mano"].values()), dtype=torch.long + ) + + self.use_pca = use_pca + self.num_pca_comps = num_pca_comps + if self.num_pca_comps == 45: + self.use_pca = False + self.flat_hand_mean = flat_hand_mean + + hand_components = data_struct.hands_components[:num_pca_comps] + + self.np_hand_components = hand_components + + if self.use_pca: + self.register_buffer( + "hand_components", torch.tensor(hand_components, dtype=dtype) + ) + + if self.flat_hand_mean: + hand_mean = np.zeros_like(data_struct.hands_mean) + else: + hand_mean = data_struct.hands_mean + + self.register_buffer("hand_mean", to_tensor(hand_mean, dtype=self.dtype)) + + # Create the buffers for the pose of the left hand + hand_pose_dim = num_pca_comps if use_pca else 3 * self.NUM_HAND_JOINTS + if create_hand_pose: + if hand_pose is None: + default_hand_pose = torch.zeros( + [batch_size, hand_pose_dim], dtype=dtype + ) + else: + default_hand_pose = torch.tensor(hand_pose, dtype=dtype) + + hand_pose_param = nn.Parameter(default_hand_pose, requires_grad=True) + self.register_parameter("hand_pose", hand_pose_param) + + # Create the buffer for the mean pose. + pose_mean = self.create_mean_pose(data_struct, flat_hand_mean=flat_hand_mean) + pose_mean_tensor = pose_mean.clone().to(dtype) + # pose_mean_tensor = torch.tensor(pose_mean, dtype=dtype) + self.register_buffer("pose_mean", pose_mean_tensor) + + def name(self) -> str: + return "MANO" + + def create_mean_pose(self, data_struct, flat_hand_mean=False): + # Create the array for the mean pose. If flat_hand is false, then use + # the mean that is given by the data, rather than the flat open hand + global_orient_mean = torch.zeros([3], dtype=self.dtype) + pose_mean = torch.cat([global_orient_mean, self.hand_mean], dim=0) + return pose_mean + + def extra_repr(self): + msg = [super(MANO, self).extra_repr()] + if self.use_pca: + msg.append(f"Number of PCA components: {self.num_pca_comps}") + msg.append(f"Flat hand mean: {self.flat_hand_mean}") + return "\n".join(msg) + + def forward( + self, + betas: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + hand_pose: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + return_verts: bool = True, + return_full_pose: bool = False, + **kwargs, + ) -> MANOOutput: + """Forward pass for the MANO model""" + # If no shape and pose parameters are passed along, then use the + # ones from the module + global_orient = ( + global_orient if global_orient is not None else self.global_orient + ) + betas = betas if betas is not None else self.betas + hand_pose = hand_pose if hand_pose is not None else self.hand_pose + + apply_trans = transl is not None or hasattr(self, "transl") + if transl is None: + if hasattr(self, "transl"): + transl = self.transl + + if self.use_pca: + hand_pose = torch.einsum("bi,ij->bj", [hand_pose, self.hand_components]) + + full_pose = torch.cat([global_orient, hand_pose], dim=1) + full_pose += self.pose_mean + + vertices, joints = lbs( + betas, + full_pose, + self.v_template, + self.shapedirs, + self.posedirs, + self.J_regressor, + self.parents, + self.lbs_weights, + pose2rot=True, + ) + + # # Add pre-selected extra joints that might be needed + # joints = self.vertex_joint_selector(vertices, joints) + + if self.joint_mapper is not None: + joints = self.joint_mapper(joints) + + if apply_trans: + joints = joints + transl.unsqueeze(dim=1) + vertices = vertices + transl.unsqueeze(dim=1) + + output = MANOOutput( + vertices=vertices if return_verts else None, + joints=joints if return_verts else None, + betas=betas, + global_orient=global_orient, + hand_pose=hand_pose, + full_pose=full_pose if return_full_pose else None, + ) + + return output + + +class MANOLayer(MANO): + def __init__(self, *args, **kwargs) -> None: + """MANO as a layer model constructor""" + super(MANOLayer, self).__init__( + create_global_orient=False, + create_hand_pose=False, + create_betas=False, + create_transl=False, + *args, + **kwargs, + ) + + def name(self) -> str: + return "MANO" + + def forward( + self, + betas: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + hand_pose: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + return_verts: bool = True, + return_full_pose: bool = False, + **kwargs, + ) -> MANOOutput: + """Forward pass for the MANO model""" + device, dtype = self.shapedirs.device, self.shapedirs.dtype + if global_orient is None: + batch_size = 1 + global_orient = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, -1, -1, -1) + .contiguous() + ) + else: + batch_size = global_orient.shape[0] + if hand_pose is None: + hand_pose = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, 15, -1, -1) + .contiguous() + ) + if betas is None: + betas = torch.zeros( + [batch_size, self.num_betas], dtype=dtype, device=device + ) + if transl is None: + transl = torch.zeros([batch_size, 3], dtype=dtype, device=device) + + full_pose = torch.cat([global_orient, hand_pose], dim=1) + vertices, joints = lbs( + betas, + full_pose, + self.v_template, + self.shapedirs, + self.posedirs, + self.J_regressor, + self.parents, + self.lbs_weights, + pose2rot=False, + ) + + if self.joint_mapper is not None: + joints = self.joint_mapper(joints) + + if transl is not None: + joints = joints + transl.unsqueeze(dim=1) + vertices = vertices + transl.unsqueeze(dim=1) + + output = MANOOutput( + vertices=vertices if return_verts else None, + joints=joints if return_verts else None, + betas=betas, + global_orient=global_orient, + hand_pose=hand_pose, + full_pose=full_pose if return_full_pose else None, + ) + + return output + + +class FLAME(SMPL): + NUM_JOINTS = 5 + SHAPE_SPACE_DIM = 300 + EXPRESSION_SPACE_DIM = 100 + NECK_IDX = 0 + + def __init__( + self, + model_path: str, + data_struct=None, + num_expression_coeffs=10, + create_expression: bool = True, + expression: Optional[Tensor] = None, + create_neck_pose: bool = True, + neck_pose: Optional[Tensor] = None, + create_jaw_pose: bool = True, + jaw_pose: Optional[Tensor] = None, + create_leye_pose: bool = True, + leye_pose: Optional[Tensor] = None, + create_reye_pose=True, + reye_pose: Optional[Tensor] = None, + use_face_contour=False, + batch_size: int = 1, + gender: str = "neutral", + dtype: torch.dtype = torch.float32, + ext="pkl", + **kwargs, + ) -> None: + """FLAME model constructor + + Parameters + ---------- + model_path: str + The path to the folder or to the file where the model + parameters are stored + num_expression_coeffs: int, optional + Number of expression components to use + (default = 10). + create_expression: bool, optional + Flag for creating a member variable for the expression space + (default = True). + expression: torch.tensor, optional, Bx10 + The default value for the expression member variable. + (default = None) + create_neck_pose: bool, optional + Flag for creating a member variable for the neck pose. + (default = False) + neck_pose: torch.tensor, optional, Bx3 + The default value for the neck pose variable. + (default = None) + create_jaw_pose: bool, optional + Flag for creating a member variable for the jaw pose. + (default = False) + jaw_pose: torch.tensor, optional, Bx3 + The default value for the jaw pose variable. + (default = None) + create_leye_pose: bool, optional + Flag for creating a member variable for the left eye pose. + (default = False) + leye_pose: torch.tensor, optional, Bx10 + The default value for the left eye pose variable. + (default = None) + create_reye_pose: bool, optional + Flag for creating a member variable for the right eye pose. + (default = False) + reye_pose: torch.tensor, optional, Bx10 + The default value for the right eye pose variable. + (default = None) + use_face_contour: bool, optional + Whether to compute the keypoints that form the facial contour + batch_size: int, optional + The batch size used for creating the member variables + gender: str, optional + Which gender to load + dtype: torch.dtype + The data type for the created variables + """ + model_fn = f"FLAME_{gender.upper()}.{ext}" + flame_path = os.path.join(model_path, model_fn) + assert osp.exists(flame_path), "Path {} does not exist!".format(flame_path) + if ext == "npz": + file_data = np.load(flame_path, allow_pickle=True) + elif ext == "pkl": + with open(flame_path, "rb") as smpl_file: + file_data = pickle.load(smpl_file, encoding="latin1") + else: + raise ValueError("Unknown extension: {}".format(ext)) + data_struct = Struct(**file_data) + + super(FLAME, self).__init__( + model_path=model_path, + data_struct=data_struct, + dtype=dtype, + batch_size=batch_size, + gender=gender, + ext=ext, + **kwargs, + ) + + self.use_face_contour = use_face_contour + + self.vertex_joint_selector.extra_joints_idxs = to_tensor([], dtype=torch.long) + + if create_neck_pose: + if neck_pose is None: + default_neck_pose = torch.zeros([batch_size, 3], dtype=dtype) + else: + default_neck_pose = torch.tensor(neck_pose, dtype=dtype) + neck_pose_param = nn.Parameter(default_neck_pose, requires_grad=True) + self.register_parameter("neck_pose", neck_pose_param) + + if create_jaw_pose: + if jaw_pose is None: + default_jaw_pose = torch.zeros([batch_size, 3], dtype=dtype) + else: + default_jaw_pose = torch.tensor(jaw_pose, dtype=dtype) + jaw_pose_param = nn.Parameter(default_jaw_pose, requires_grad=True) + self.register_parameter("jaw_pose", jaw_pose_param) + + if create_leye_pose: + if leye_pose is None: + default_leye_pose = torch.zeros([batch_size, 3], dtype=dtype) + else: + default_leye_pose = torch.tensor(leye_pose, dtype=dtype) + leye_pose_param = nn.Parameter(default_leye_pose, requires_grad=True) + self.register_parameter("leye_pose", leye_pose_param) + + if create_reye_pose: + if reye_pose is None: + default_reye_pose = torch.zeros([batch_size, 3], dtype=dtype) + else: + default_reye_pose = torch.tensor(reye_pose, dtype=dtype) + reye_pose_param = nn.Parameter(default_reye_pose, requires_grad=True) + self.register_parameter("reye_pose", reye_pose_param) + + shapedirs = data_struct.shapedirs + if len(shapedirs.shape) < 3: + shapedirs = shapedirs[:, :, None] + if shapedirs.shape[-1] < self.SHAPE_SPACE_DIM + self.EXPRESSION_SPACE_DIM: + print( + f"WARNING: You are using a {self.name()} model, with only" + " 10 shape and 10 expression coefficients." + ) + expr_start_idx = 10 + expr_end_idx = 20 + num_expression_coeffs = min(num_expression_coeffs, 10) + else: + expr_start_idx = self.SHAPE_SPACE_DIM + expr_end_idx = self.SHAPE_SPACE_DIM + num_expression_coeffs + num_expression_coeffs = min( + num_expression_coeffs, self.EXPRESSION_SPACE_DIM + ) + + self._num_expression_coeffs = num_expression_coeffs + + expr_dirs = shapedirs[:, :, expr_start_idx:expr_end_idx] + self.register_buffer("expr_dirs", to_tensor(to_np(expr_dirs), dtype=dtype)) + + if create_expression: + if expression is None: + default_expression = torch.zeros( + [batch_size, self.num_expression_coeffs], dtype=dtype + ) + else: + default_expression = torch.tensor(expression, dtype=dtype) + expression_param = nn.Parameter(default_expression, requires_grad=True) + self.register_parameter("expression", expression_param) + + # The pickle file that contains the barycentric coordinates for + # regressing the landmarks + landmark_bcoord_filename = osp.join(model_path, "flame_static_embedding.pkl") + + with open(landmark_bcoord_filename, "rb") as fp: + landmarks_data = pickle.load(fp, encoding="latin1") + + lmk_faces_idx = landmarks_data["lmk_face_idx"].astype(np.int64) + self.register_buffer( + "lmk_faces_idx", torch.tensor(lmk_faces_idx, dtype=torch.long) + ) + lmk_bary_coords = landmarks_data["lmk_b_coords"] + self.register_buffer( + "lmk_bary_coords", torch.tensor(lmk_bary_coords, dtype=dtype) + ) + if self.use_face_contour: + face_contour_path = os.path.join(model_path, "flame_dynamic_embedding.npy") + contour_embeddings = np.load( + face_contour_path, allow_pickle=True, encoding="latin1" + )[()] + + dynamic_lmk_faces_idx = np.array( + contour_embeddings["lmk_face_idx"], dtype=np.int64 + ) + dynamic_lmk_faces_idx = torch.tensor( + dynamic_lmk_faces_idx, dtype=torch.long + ) + self.register_buffer("dynamic_lmk_faces_idx", dynamic_lmk_faces_idx) + + dynamic_lmk_b_coords = torch.tensor( + contour_embeddings["lmk_b_coords"], dtype=dtype + ) + self.register_buffer("dynamic_lmk_bary_coords", dynamic_lmk_b_coords) + + neck_kin_chain = find_joint_kin_chain(self.NECK_IDX, self.parents) + self.register_buffer( + "neck_kin_chain", torch.tensor(neck_kin_chain, dtype=torch.long) + ) + + @property + def num_expression_coeffs(self): + return self._num_expression_coeffs + + def name(self) -> str: + return "FLAME" + + def extra_repr(self): + msg = [ + super(FLAME, self).extra_repr(), + f"Number of Expression Coefficients: {self.num_expression_coeffs}", + f"Use face contour: {self.use_face_contour}", + ] + return "\n".join(msg) + + def forward( + self, + betas: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + neck_pose: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + expression: Optional[Tensor] = None, + jaw_pose: Optional[Tensor] = None, + leye_pose: Optional[Tensor] = None, + reye_pose: Optional[Tensor] = None, + return_verts: bool = True, + return_full_pose: bool = False, + pose2rot: bool = True, + **kwargs, + ) -> FLAMEOutput: + """ + Forward pass for the SMPLX model + + Parameters + ---------- + global_orient: torch.tensor, optional, shape Bx3 + If given, ignore the member variable and use it as the global + rotation of the body. Useful if someone wishes to predicts this + with an external model. (default=None) + betas: torch.tensor, optional, shape Bx10 + If given, ignore the member variable `betas` and use it + instead. For example, it can used if shape parameters + `betas` are predicted from some external model. + (default=None) + expression: torch.tensor, optional, shape Bx10 + If given, ignore the member variable `expression` and use it + instead. For example, it can used if expression parameters + `expression` are predicted from some external model. + jaw_pose: torch.tensor, optional, shape Bx3 + If given, ignore the member variable `jaw_pose` and + use this instead. It should either joint rotations in + axis-angle format. + jaw_pose: torch.tensor, optional, shape Bx3 + If given, ignore the member variable `jaw_pose` and + use this instead. It should either joint rotations in + axis-angle format. + transl: torch.tensor, optional, shape Bx3 + If given, ignore the member variable `transl` and use it + instead. For example, it can used if the translation + `transl` is predicted from some external model. + (default=None) + return_verts: bool, optional + Return the vertices. (default=True) + return_full_pose: bool, optional + Returns the full axis-angle pose vector (default=False) + + Returns + ------- + output: ModelOutput + A named tuple of type `ModelOutput` + """ + + # If no shape and pose parameters are passed along, then use the + # ones from the module + global_orient = ( + global_orient if global_orient is not None else self.global_orient + ) + jaw_pose = jaw_pose if jaw_pose is not None else self.jaw_pose + neck_pose = neck_pose if neck_pose is not None else self.neck_pose + + leye_pose = leye_pose if leye_pose is not None else self.leye_pose + reye_pose = reye_pose if reye_pose is not None else self.reye_pose + + betas = betas if betas is not None else self.betas + expression = expression if expression is not None else self.expression + + apply_trans = transl is not None or hasattr(self, "transl") + if transl is None: + if hasattr(self, "transl"): + transl = self.transl + + full_pose = torch.cat( + [global_orient, neck_pose, jaw_pose, leye_pose, reye_pose], dim=1 + ) + + batch_size = max(betas.shape[0], global_orient.shape[0], jaw_pose.shape[0]) + # Concatenate the shape and expression coefficients + scale = int(batch_size / betas.shape[0]) + if scale > 1: + betas = betas.expand(scale, -1) + expression = expression.expand(scale, -1) + shape_components = torch.cat([betas, expression], dim=-1) + shapedirs = torch.cat([self.shapedirs, self.expr_dirs], dim=-1) + + vertices, joints = lbs( + shape_components, + full_pose, + self.v_template, + shapedirs, + self.posedirs, + self.J_regressor, + self.parents, + self.lbs_weights, + pose2rot=pose2rot, + ) + + lmk_faces_idx = ( + self.lmk_faces_idx.unsqueeze(dim=0).expand(batch_size, -1).contiguous() + ) + lmk_bary_coords = self.lmk_bary_coords.unsqueeze(dim=0).repeat(batch_size, 1, 1) + if self.use_face_contour: + lmk_idx_and_bcoords = find_dynamic_lmk_idx_and_bcoords( + vertices, + full_pose, + self.dynamic_lmk_faces_idx, + self.dynamic_lmk_bary_coords, + self.neck_kin_chain, + pose2rot=True, + ) + dyn_lmk_faces_idx, dyn_lmk_bary_coords = lmk_idx_and_bcoords + lmk_faces_idx = torch.cat([lmk_faces_idx, dyn_lmk_faces_idx], 1) + lmk_bary_coords = torch.cat( + [lmk_bary_coords.expand(batch_size, -1, -1), dyn_lmk_bary_coords], 1 + ) + + landmarks = vertices2landmarks( + vertices, self.faces_tensor, lmk_faces_idx, lmk_bary_coords + ) + + # Add any extra joints that might be needed + joints = self.vertex_joint_selector(vertices, joints) + # Add the landmarks to the joints + joints = torch.cat([joints, landmarks], dim=1) + + # Map the joints to the current dataset + if self.joint_mapper is not None: + joints = self.joint_mapper(joints=joints, vertices=vertices) + + if apply_trans: + joints += transl.unsqueeze(dim=1) + vertices += transl.unsqueeze(dim=1) + + output = FLAMEOutput( + vertices=vertices if return_verts else None, + joints=joints, + betas=betas, + expression=expression, + global_orient=global_orient, + neck_pose=neck_pose, + jaw_pose=jaw_pose, + full_pose=full_pose if return_full_pose else None, + ) + return output + + +class FLAMELayer(FLAME): + def __init__(self, *args, **kwargs) -> None: + """FLAME as a layer model constructor""" + super(FLAMELayer, self).__init__( + create_betas=False, + create_expression=False, + create_global_orient=False, + create_neck_pose=False, + create_jaw_pose=False, + create_leye_pose=False, + create_reye_pose=False, + *args, + **kwargs, + ) + + def forward( + self, + betas: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + neck_pose: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + expression: Optional[Tensor] = None, + jaw_pose: Optional[Tensor] = None, + leye_pose: Optional[Tensor] = None, + reye_pose: Optional[Tensor] = None, + return_verts: bool = True, + return_full_pose: bool = False, + pose2rot: bool = True, + **kwargs, + ) -> FLAMEOutput: + """ + Forward pass for the SMPLX model + + Parameters + ---------- + global_orient: torch.tensor, optional, shape Bx3x3 + Global rotation of the body. Useful if someone wishes to + predicts this with an external model. It is expected to be in + rotation matrix format. (default=None) + betas: torch.tensor, optional, shape BxN_b + Shape parameters. For example, it can used if shape parameters + `betas` are predicted from some external model. + (default=None) + expression: torch.tensor, optional, shape BxN_e + If given, ignore the member variable `expression` and use it + instead. For example, it can used if expression parameters + `expression` are predicted from some external model. + jaw_pose: torch.tensor, optional, shape Bx3x3 + Jaw pose. It should either joint rotations in + rotation matrix format. + transl: torch.tensor, optional, shape Bx3 + Translation vector of the body. + For example, it can used if the translation + `transl` is predicted from some external model. + (default=None) + return_verts: bool, optional + Return the vertices. (default=True) + return_full_pose: bool, optional + Returns the full axis-angle pose vector (default=False) + + Returns + ------- + output: ModelOutput + A named tuple of type `ModelOutput` + """ + device, dtype = self.shapedirs.device, self.shapedirs.dtype + if global_orient is None: + batch_size = 1 + global_orient = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, -1, -1, -1) + .contiguous() + ) + else: + batch_size = global_orient.shape[0] + if neck_pose is None: + neck_pose = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, 1, -1, -1) + .contiguous() + ) + if jaw_pose is None: + jaw_pose = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, -1, -1, -1) + .contiguous() + ) + if leye_pose is None: + leye_pose = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, -1, -1, -1) + .contiguous() + ) + if reye_pose is None: + reye_pose = ( + torch.eye(3, device=device, dtype=dtype) + .view(1, 1, 3, 3) + .expand(batch_size, -1, -1, -1) + .contiguous() + ) + if betas is None: + betas = torch.zeros( + [batch_size, self.num_betas], dtype=dtype, device=device + ) + if expression is None: + expression = torch.zeros( + [batch_size, self.num_expression_coeffs], dtype=dtype, device=device + ) + if transl is None: + transl = torch.zeros([batch_size, 3], dtype=dtype, device=device) + + full_pose = torch.cat( + [global_orient, neck_pose, jaw_pose, leye_pose, reye_pose], dim=1 + ) + + shape_components = torch.cat([betas, expression], dim=-1) + shapedirs = torch.cat([self.shapedirs, self.expr_dirs], dim=-1) + + vertices, joints = lbs( + shape_components, + full_pose, + self.v_template, + shapedirs, + self.posedirs, + self.J_regressor, + self.parents, + self.lbs_weights, + pose2rot=False, + ) + + lmk_faces_idx = ( + self.lmk_faces_idx.unsqueeze(dim=0).expand(batch_size, -1).contiguous() + ) + lmk_bary_coords = self.lmk_bary_coords.unsqueeze(dim=0).repeat(batch_size, 1, 1) + if self.use_face_contour: + lmk_idx_and_bcoords = find_dynamic_lmk_idx_and_bcoords( + vertices, + full_pose, + self.dynamic_lmk_faces_idx, + self.dynamic_lmk_bary_coords, + self.neck_kin_chain, + pose2rot=False, + ) + dyn_lmk_faces_idx, dyn_lmk_bary_coords = lmk_idx_and_bcoords + lmk_faces_idx = torch.cat([lmk_faces_idx, dyn_lmk_faces_idx], 1) + lmk_bary_coords = torch.cat( + [lmk_bary_coords.expand(batch_size, -1, -1), dyn_lmk_bary_coords], 1 + ) + + landmarks = vertices2landmarks( + vertices, self.faces_tensor, lmk_faces_idx, lmk_bary_coords + ) + + # Add any extra joints that might be needed + joints = self.vertex_joint_selector(vertices, joints) + # Add the landmarks to the joints + joints = torch.cat([joints, landmarks], dim=1) + + # Map the joints to the current dataset + if self.joint_mapper is not None: + joints = self.joint_mapper(joints=joints, vertices=vertices) + + joints += transl.unsqueeze(dim=1) + vertices += transl.unsqueeze(dim=1) + + output = FLAMEOutput( + vertices=vertices if return_verts else None, + joints=joints, + betas=betas, + expression=expression, + global_orient=global_orient, + neck_pose=neck_pose, + jaw_pose=jaw_pose, + full_pose=full_pose if return_full_pose else None, + ) + return output + + +def build_layer( + model_path: str, model_type: str = "smpl", **kwargs +) -> Union[SMPLLayer, SMPLHLayer, SMPLXLayer, MANOLayer, FLAMELayer]: + """Method for creating a model from a path and a model type + + Parameters + ---------- + model_path: str + Either the path to the model you wish to load or a folder, + where each subfolder contains the differents types, i.e.: + model_path: + | + |-- smpl + |-- SMPL_FEMALE + |-- SMPL_NEUTRAL + |-- SMPL_MALE + |-- smplh + |-- SMPLH_FEMALE + |-- SMPLH_MALE + |-- smplx + |-- SMPLX_FEMALE + |-- SMPLX_NEUTRAL + |-- SMPLX_MALE + |-- mano + |-- MANO RIGHT + |-- MANO LEFT + |-- flame + |-- FLAME_FEMALE + |-- FLAME_MALE + |-- FLAME_NEUTRAL + + model_type: str, optional + When model_path is a folder, then this parameter specifies the + type of model to be loaded + **kwargs: dict + Keyword arguments + + Returns + ------- + body_model: nn.Module + The PyTorch module that implements the corresponding body model + Raises + ------ + ValueError: In case the model type is not one of SMPL, SMPLH, + SMPLX, MANO or FLAME + """ + + if osp.isdir(model_path): + model_path = os.path.join(model_path, model_type) + else: + model_type = osp.basename(model_path).split("_")[0].lower() + + if model_type.lower() == "smpl": + return SMPLLayer(model_path, **kwargs) + elif model_type.lower() == "smplh": + return SMPLHLayer(model_path, **kwargs) + elif model_type.lower() == "smplx": + return SMPLXLayer(model_path, **kwargs) + elif "mano" in model_type.lower(): + return MANOLayer(model_path, **kwargs) + elif "flame" in model_type.lower(): + return FLAMELayer(model_path, **kwargs) + else: + raise ValueError(f"Unknown model type {model_type}, exiting!") + + +def create( + model_path: str, model_type: str = "smpl", **kwargs +) -> Union[SMPL, SMPLH, SMPLX, MANO, FLAME]: + """Method for creating a model from a path and a model type + + Parameters + ---------- + model_path: str + Either the path to the model you wish to load or a folder, + where each subfolder contains the differents types, i.e.: + model_path: + | + |-- smpl + |-- SMPL_FEMALE + |-- SMPL_NEUTRAL + |-- SMPL_MALE + |-- smplh + |-- SMPLH_FEMALE + |-- SMPLH_MALE + |-- smplx + |-- SMPLX_FEMALE + |-- SMPLX_NEUTRAL + |-- SMPLX_MALE + |-- mano + |-- MANO RIGHT + |-- MANO LEFT + + model_type: str, optional + When model_path is a folder, then this parameter specifies the + type of model to be loaded + **kwargs: dict + Keyword arguments + + Returns + ------- + body_model: nn.Module + The PyTorch module that implements the corresponding body model + Raises + ------ + ValueError: In case the model type is not one of SMPL, SMPLH, + SMPLX, MANO or FLAME + """ + + # If it's a folder, assume + if osp.isdir(model_path): + model_path = os.path.join(model_path, model_type) + else: + model_type = osp.basename(model_path).split("_")[0].lower() + + if model_type.lower() == "smpl": + return SMPL(model_path, **kwargs) + elif model_type.lower() == "smplh": + return SMPLH(model_path, **kwargs) + elif model_type.lower() == "smplx": + return SMPLX(model_path, **kwargs) + elif "mano" in model_type.lower(): + return MANO(model_path, **kwargs) + elif "flame" in model_type.lower(): + return FLAME(model_path, **kwargs) + else: + raise ValueError(f"Unknown model type {model_type}, exiting!") diff --git a/LHM/models/rendering/smplx/smplx/joint_names.py b/LHM/models/rendering/smplx/smplx/joint_names.py new file mode 100644 index 0000000000000000000000000000000000000000..fcdb2e131850592c1c5ff7bb9eaf36841d21bcd6 --- /dev/null +++ b/LHM/models/rendering/smplx/smplx/joint_names.py @@ -0,0 +1,320 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +import numpy as np + +JOINT_NAMES = [ + "pelvis", + "left_hip", + "right_hip", + "spine1", + "left_knee", + "right_knee", + "spine2", + "left_ankle", + "right_ankle", + "spine3", + "left_foot", + "right_foot", + "neck", + "left_collar", + "right_collar", + "head", + "left_shoulder", + "right_shoulder", + "left_elbow", + "right_elbow", + "left_wrist", + "right_wrist", + "jaw", + "left_eye_smplhf", + "right_eye_smplhf", + "left_index1", + "left_index2", + "left_index3", + "left_middle1", + "left_middle2", + "left_middle3", + "left_pinky1", + "left_pinky2", + "left_pinky3", + "left_ring1", + "left_ring2", + "left_ring3", + "left_thumb1", + "left_thumb2", + "left_thumb3", + "right_index1", + "right_index2", + "right_index3", + "right_middle1", + "right_middle2", + "right_middle3", + "right_pinky1", + "right_pinky2", + "right_pinky3", + "right_ring1", + "right_ring2", + "right_ring3", + "right_thumb1", + "right_thumb2", + "right_thumb3", + "nose", + "right_eye", + "left_eye", + "right_ear", + "left_ear", + "left_big_toe", + "left_small_toe", + "left_heel", + "right_big_toe", + "right_small_toe", + "right_heel", + "left_thumb", + "left_index", + "left_middle", + "left_ring", + "left_pinky", + "right_thumb", + "right_index", + "right_middle", + "right_ring", + "right_pinky", + "right_eye_brow1", + "right_eye_brow2", + "right_eye_brow3", + "right_eye_brow4", + "right_eye_brow5", + "left_eye_brow5", + "left_eye_brow4", + "left_eye_brow3", + "left_eye_brow2", + "left_eye_brow1", + "nose1", + "nose2", + "nose3", + "nose4", + "right_nose_2", + "right_nose_1", + "nose_middle", + "left_nose_1", + "left_nose_2", + "right_eye1", + "right_eye2", + "right_eye3", + "right_eye4", + "right_eye5", + "right_eye6", + "left_eye4", + "left_eye3", + "left_eye2", + "left_eye1", + "left_eye6", + "left_eye5", + "right_mouth_1", + "right_mouth_2", + "right_mouth_3", + "mouth_top", + "left_mouth_3", + "left_mouth_2", + "left_mouth_1", + "left_mouth_5", # 59 in OpenPose output + "left_mouth_4", # 58 in OpenPose output + "mouth_bottom", + "right_mouth_4", + "right_mouth_5", + "right_lip_1", + "right_lip_2", + "lip_top", + "left_lip_2", + "left_lip_1", + "left_lip_3", + "lip_bottom", + "right_lip_3", + # Face contour + "right_contour_1", + "right_contour_2", + "right_contour_3", + "right_contour_4", + "right_contour_5", + "right_contour_6", + "right_contour_7", + "right_contour_8", + "contour_middle", + "left_contour_8", + "left_contour_7", + "left_contour_6", + "left_contour_5", + "left_contour_4", + "left_contour_3", + "left_contour_2", + "left_contour_1", +] + + +SMPLH_JOINT_NAMES = [ + "pelvis", + "left_hip", + "right_hip", + "spine1", + "left_knee", + "right_knee", + "spine2", + "left_ankle", + "right_ankle", + "spine3", + "left_foot", + "right_foot", + "neck", + "left_collar", + "right_collar", + "head", + "left_shoulder", + "right_shoulder", + "left_elbow", + "right_elbow", + "left_wrist", + "right_wrist", + "left_index1", + "left_index2", + "left_index3", + "left_middle1", + "left_middle2", + "left_middle3", + "left_pinky1", + "left_pinky2", + "left_pinky3", + "left_ring1", + "left_ring2", + "left_ring3", + "left_thumb1", + "left_thumb2", + "left_thumb3", + "right_index1", + "right_index2", + "right_index3", + "right_middle1", + "right_middle2", + "right_middle3", + "right_pinky1", + "right_pinky2", + "right_pinky3", + "right_ring1", + "right_ring2", + "right_ring3", + "right_thumb1", + "right_thumb2", + "right_thumb3", + "nose", + "right_eye", + "left_eye", + "right_ear", + "left_ear", + "left_big_toe", + "left_small_toe", + "left_heel", + "right_big_toe", + "right_small_toe", + "right_heel", + "left_thumb", + "left_index", + "left_middle", + "left_ring", + "left_pinky", + "right_thumb", + "right_index", + "right_middle", + "right_ring", + "right_pinky", +] + +SMPL_JOINT_NAMES = [ + "pelvis", + "left_hip", + "right_hip", + "spine1", + "left_knee", + "right_knee", + "spine2", + "left_ankle", + "right_ankle", + "spine3", + "left_foot", + "right_foot", + "neck", + "left_collar", + "right_collar", + "head", + "left_shoulder", + "right_shoulder", + "left_elbow", + "right_elbow", + "left_wrist", + "right_wrist", + "left_hand", + "right_hand", +] + + +class Body: + """ + Class for storing a single body pose. + """ + + def __init__(self, joints, joint_names): + assert joints.ndim > 1 + assert joints.shape[0] == len(joint_names) + self.joints = {} + for i, j in enumerate(joint_names): + self.joints[j] = joints[i] + + @staticmethod + def from_smpl(joints): + """ + Create a Body object from SMPL joints. + """ + return Body(joints, SMPL_JOINT_NAMES) + + @staticmethod + def from_smplh(joints): + """ + Create a Body object from SMPLH joints. + """ + return Body(joints, SMPLH_JOINT_NAMES) + + def _as(self, joint_names): + """ + Return a Body object with the specified joint names. + """ + joint_list = [] + for j in joint_names: + if j not in self.joints: + joint_list.append(np.zeros_like(self.joints["spine1"])) + else: + joint_list.append(self.joints[j]) + return np.stack(joint_list, axis=0) + + def as_smpl(self): + """ + Convert the body to SMPL joints. + """ + return self._as(SMPL_JOINT_NAMES) + + def as_smplh(self): + """ + Convert the body to SMPLH joints. + """ + return self._as(SMPLH_JOINT_NAMES) diff --git a/LHM/models/rendering/smplx/smplx/lbs.py b/LHM/models/rendering/smplx/smplx/lbs.py new file mode 100644 index 0000000000000000000000000000000000000000..1f528e63498b1be2df3b6df4a712e190b8d3b582 --- /dev/null +++ b/LHM/models/rendering/smplx/smplx/lbs.py @@ -0,0 +1,415 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from __future__ import absolute_import, division, print_function + +import pdb +from typing import List, Tuple + +import numpy as np +import torch +import torch.nn.functional as F + +from .utils import Tensor, rot_mat_to_euler + + +def find_dynamic_lmk_idx_and_bcoords( + vertices: Tensor, + pose: Tensor, + dynamic_lmk_faces_idx: Tensor, + dynamic_lmk_b_coords: Tensor, + neck_kin_chain: List[int], + pose2rot: bool = True, +) -> Tuple[Tensor, Tensor]: + """Compute the faces, barycentric coordinates for the dynamic landmarks + + + To do so, we first compute the rotation of the neck around the y-axis + and then use a pre-computed look-up table to find the faces and the + barycentric coordinates that will be used. + + Special thanks to Soubhik Sanyal (soubhik.sanyal@tuebingen.mpg.de) + for providing the original TensorFlow implementation and for the LUT. + + Parameters + ---------- + vertices: torch.tensor BxVx3, dtype = torch.float32 + The tensor of input vertices + pose: torch.tensor Bx(Jx3), dtype = torch.float32 + The current pose of the body model + dynamic_lmk_faces_idx: torch.tensor L, dtype = torch.long + The look-up table from neck rotation to faces + dynamic_lmk_b_coords: torch.tensor Lx3, dtype = torch.float32 + The look-up table from neck rotation to barycentric coordinates + neck_kin_chain: list + A python list that contains the indices of the joints that form the + kinematic chain of the neck. + dtype: torch.dtype, optional + + Returns + ------- + dyn_lmk_faces_idx: torch.tensor, dtype = torch.long + A tensor of size BxL that contains the indices of the faces that + will be used to compute the current dynamic landmarks. + dyn_lmk_b_coords: torch.tensor, dtype = torch.float32 + A tensor of size BxL that contains the indices of the faces that + will be used to compute the current dynamic landmarks. + """ + + dtype = vertices.dtype + batch_size = vertices.shape[0] + + if pose2rot: + aa_pose = torch.index_select(pose.view(batch_size, -1, 3), 1, neck_kin_chain) + rot_mats = batch_rodrigues(aa_pose.view(-1, 3)).view(batch_size, -1, 3, 3) + else: + rot_mats = torch.index_select( + pose.view(batch_size, -1, 3, 3), 1, neck_kin_chain + ) + + rel_rot_mat = ( + torch.eye(3, device=vertices.device, dtype=dtype) + .unsqueeze_(dim=0) + .repeat(batch_size, 1, 1) + ) + for idx in range(len(neck_kin_chain)): + rel_rot_mat = torch.bmm(rot_mats[:, idx], rel_rot_mat) + + y_rot_angle = torch.round( + torch.clamp(-rot_mat_to_euler(rel_rot_mat) * 180.0 / np.pi, max=39) + ).to(dtype=torch.long) + neg_mask = y_rot_angle.lt(0).to(dtype=torch.long) + mask = y_rot_angle.lt(-39).to(dtype=torch.long) + neg_vals = mask * 78 + (1 - mask) * (39 - y_rot_angle) + y_rot_angle = neg_mask * neg_vals + (1 - neg_mask) * y_rot_angle + + dyn_lmk_faces_idx = torch.index_select(dynamic_lmk_faces_idx, 0, y_rot_angle) + dyn_lmk_b_coords = torch.index_select(dynamic_lmk_b_coords, 0, y_rot_angle) + + return dyn_lmk_faces_idx, dyn_lmk_b_coords + + +def vertices2landmarks( + vertices: Tensor, faces: Tensor, lmk_faces_idx: Tensor, lmk_bary_coords: Tensor +) -> Tensor: + """Calculates landmarks by barycentric interpolation + + Parameters + ---------- + vertices: torch.tensor BxVx3, dtype = torch.float32 + The tensor of input vertices + faces: torch.tensor Fx3, dtype = torch.long + The faces of the mesh + lmk_faces_idx: torch.tensor L, dtype = torch.long + The tensor with the indices of the faces used to calculate the + landmarks. + lmk_bary_coords: torch.tensor Lx3, dtype = torch.float32 + The tensor of barycentric coordinates that are used to interpolate + the landmarks + + Returns + ------- + landmarks: torch.tensor BxLx3, dtype = torch.float32 + The coordinates of the landmarks for each mesh in the batch + """ + # Extract the indices of the vertices for each face + # BxLx3 + batch_size, num_verts = vertices.shape[:2] + device = vertices.device + + lmk_faces = torch.index_select( + faces, 0, lmk_faces_idx.view(-1).to(torch.long) + ).view(batch_size, -1, 3) + # The '.to(torch.long)'. + # added to make the trace work in c++, + # otherwise you get a runtime error in c++: + # 'index_select(): Expected dtype int32 or int64 for index' + + lmk_faces += ( + torch.arange(batch_size, dtype=torch.long, device=device).view(-1, 1, 1) + * num_verts + ) + + lmk_vertices = vertices.view(-1, 3)[lmk_faces].view(batch_size, -1, 3, 3) + + landmarks = torch.einsum("blfi,blf->bli", [lmk_vertices, lmk_bary_coords]) + return landmarks + + +def lbs( + betas: Tensor, + pose: Tensor, + v_template: Tensor, + shapedirs: Tensor, + posedirs: Tensor, + J_regressor: Tensor, + joint_offset: Tensor, # added by Gyeongsik + locator_offset: Tensor, # added by Gyeongsik + parents: Tensor, + lbs_weights: Tensor, + pose2rot: bool = True, +) -> Tuple[Tensor, Tensor]: + """Performs Linear Blend Skinning with the given shape and pose parameters + + Parameters + ---------- + betas : torch.tensor BxNB + The tensor of shape parameters + pose : torch.tensor Bx(J + 1) * 3 + The pose parameters in axis-angle format + v_template torch.tensor BxVx3 + The template mesh that will be deformed + shapedirs : torch.tensor 1xNB + The tensor of PCA shape displacements + posedirs : torch.tensor Px(V * 3) + The pose PCA coefficients + J_regressor : torch.tensor JxV + The regressor array that is used to calculate the joints from + the position of the vertices + joint_offset: torch.tensor BxJx3 + Joint offset to adjust joint locations + locator_offset: torch.tensor BxJx3 + Locator offset to adjust joint locations for the supervision (not used for the skinning) + parents: torch.tensor J + The array that describes the kinematic tree for the model + lbs_weights: torch.tensor N x V x (J + 1) + The linear blend skinning weights that represent how much the + rotation matrix of each part affects each vertex + pose2rot: bool, optional + Flag on whether to convert the input pose tensor to rotation + matrices. The default value is True. If False, then the pose tensor + should already contain rotation matrices and have a size of + Bx(J + 1)x9 + dtype: torch.dtype, optional + + Returns + ------- + verts: torch.tensor BxVx3 + The vertices of the mesh after applying the shape and pose + displacements. + joints: torch.tensor BxJx3 + The joints of the model + """ + batch_size = max(betas.shape[0], pose.shape[0]) + device, dtype = betas.device, betas.dtype + + # Add shape contribution + # print(betas.shape, shapedirs.shape) + v_shaped = v_template + blend_shapes(betas, shapedirs) + + # Get the joints + # NxJx3 array + J = vertices2joints(J_regressor, v_shaped) + if joint_offset is not None: + J = J + joint_offset + if locator_offset is not None: + J_lo = J + locator_offset + + # 3. Add pose blend shapes + # N x J x 3 x 3 + ident = torch.eye(3, dtype=dtype, device=device) + if pose2rot: + rot_mats = batch_rodrigues(pose.view(-1, 3)).view([batch_size, -1, 3, 3]) + + pose_feature = (rot_mats[:, 1:, :, :] - ident).view([batch_size, -1]) + # (N x P) x (P, V * 3) -> N x V x 3 + pose_offsets = torch.matmul(pose_feature, posedirs).view(batch_size, -1, 3) + else: + pose_feature = pose[:, 1:].view(batch_size, -1, 3, 3) - ident + rot_mats = pose.view(batch_size, -1, 3, 3) + + pose_offsets = torch.matmul(pose_feature.view(batch_size, -1), posedirs).view( + batch_size, -1, 3 + ) + + v_posed = pose_offsets + v_shaped + + # 4. Get the global joint location + J_transformed, A = batch_rigid_transform( + rot_mats, J, parents, dtype=dtype + ) # added by Gyeongsik + if locator_offset is not None: + J_transformed, _ = batch_rigid_transform( + rot_mats, J_lo, parents, dtype=dtype + ) # added by Gyeongsik + + # 5. Do skinning: + # W is N x V x (J + 1) + W = lbs_weights.unsqueeze(dim=0).expand([batch_size, -1, -1]) + # (N x V x (J + 1)) x (N x (J + 1) x 16) + num_joints = J_regressor.shape[0] + T = torch.matmul(W, A.view(batch_size, num_joints, 16)).view(batch_size, -1, 4, 4) + + homogen_coord = torch.ones( + [batch_size, v_posed.shape[1], 1], dtype=dtype, device=device + ) + v_posed_homo = torch.cat([v_posed, homogen_coord], dim=2) + + v_homo = torch.matmul(T, torch.unsqueeze(v_posed_homo, dim=-1)) + + verts = v_homo[:, :, :3, 0] + + return verts, J_transformed + + +def vertices2joints(J_regressor: Tensor, vertices: Tensor) -> Tensor: + """Calculates the 3D joint locations from the vertices + + Parameters + ---------- + J_regressor : torch.tensor JxV + The regressor array that is used to calculate the joints from the + position of the vertices + vertices : torch.tensor BxVx3 + The tensor of mesh vertices + + Returns + ------- + torch.tensor BxJx3 + The location of the joints + """ + + return torch.einsum("bik,ji->bjk", [vertices, J_regressor]) + + +def blend_shapes(betas: Tensor, shape_disps: Tensor) -> Tensor: + """Calculates the per vertex displacement due to the blend shapes + + + Parameters + ---------- + betas : torch.tensor Bx(num_betas) + Blend shape coefficients + shape_disps: torch.tensor Vx3x(num_betas) + Blend shapes + + Returns + ------- + torch.tensor BxVx3 + The per-vertex displacement due to shape deformation + """ + + # Displacement[b, m, k] = sum_{l} betas[b, l] * shape_disps[m, k, l] + # i.e. Multiply each shape displacement by its corresponding beta and + # then sum them. + blend_shape = torch.einsum("bl,mkl->bmk", [betas, shape_disps]) + return blend_shape + + +def batch_rodrigues( + rot_vecs: Tensor, + epsilon: float = 1e-8, +) -> Tensor: + """Calculates the rotation matrices for a batch of rotation vectors + Parameters + ---------- + rot_vecs: torch.tensor Nx3 + array of N axis-angle vectors + Returns + ------- + R: torch.tensor Nx3x3 + The rotation matrices for the given axis-angle parameters + """ + + batch_size = rot_vecs.shape[0] + device, dtype = rot_vecs.device, rot_vecs.dtype + + angle = torch.norm(rot_vecs + 1e-8, dim=1, keepdim=True) + rot_dir = rot_vecs / angle + + cos = torch.unsqueeze(torch.cos(angle), dim=1) + sin = torch.unsqueeze(torch.sin(angle), dim=1) + + # Bx1 arrays + rx, ry, rz = torch.split(rot_dir, 1, dim=1) + K = torch.zeros((batch_size, 3, 3), dtype=dtype, device=device) + + zeros = torch.zeros((batch_size, 1), dtype=dtype, device=device) + K = torch.cat([zeros, -rz, ry, rz, zeros, -rx, -ry, rx, zeros], dim=1).view( + (batch_size, 3, 3) + ) + + ident = torch.eye(3, dtype=dtype, device=device).unsqueeze(dim=0) + rot_mat = ident + sin * K + (1 - cos) * torch.bmm(K, K) + return rot_mat + + +def transform_mat(R: Tensor, t: Tensor) -> Tensor: + """Creates a batch of transformation matrices + Args: + - R: Bx3x3 array of a batch of rotation matrices + - t: Bx3x1 array of a batch of translation vectors + Returns: + - T: Bx4x4 Transformation matrix + """ + # No padding left or right, only add an extra row + return torch.cat([F.pad(R, [0, 0, 0, 1]), F.pad(t, [0, 0, 0, 1], value=1)], dim=2) + + +def batch_rigid_transform( + rot_mats: Tensor, joints: Tensor, parents: Tensor, dtype=torch.float32 +) -> Tensor: + """ + Applies a batch of rigid transformations to the joints + + Parameters + ---------- + rot_mats : torch.tensor BxNx3x3 + Tensor of rotation matrices + joints : torch.tensor BxNx3 + Locations of joints + parents : torch.tensor BxN + The kinematic tree of each object + dtype : torch.dtype, optional: + The data type of the created tensors, the default is torch.float32 + + Returns + ------- + posed_joints : torch.tensor BxNx3 + The locations of the joints after applying the pose rotations + rel_transforms : torch.tensor BxNx4x4 + The relative (with respect to the root joint) rigid transformations + for all the joints + """ + + joints = torch.unsqueeze(joints, dim=-1) + + rel_joints = joints.clone() + rel_joints[:, 1:] -= joints[:, parents[1:]] + + transforms_mat = transform_mat( + rot_mats.reshape(-1, 3, 3), rel_joints.reshape(-1, 3, 1) + ).reshape(-1, joints.shape[1], 4, 4) + + transform_chain = [transforms_mat[:, 0]] + for i in range(1, parents.shape[0]): + # Subtract the joint location at the rest pose + # No need for rotation, since it's identity when at rest + curr_res = torch.matmul(transform_chain[parents[i]], transforms_mat[:, i]) + transform_chain.append(curr_res) + + transforms = torch.stack(transform_chain, dim=1) + + # The last column of the transformations contains the posed joints + posed_joints = transforms[:, :, :3, 3] + + joints_homogen = F.pad(joints, [0, 0, 0, 1]) + + rel_transforms = transforms - F.pad( + torch.matmul(transforms, joints_homogen), [3, 0, 0, 0, 0, 0, 0, 0] + ) + + return posed_joints, rel_transforms diff --git a/LHM/models/rendering/smplx/smplx/utils.py b/LHM/models/rendering/smplx/smplx/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a014698ddecdbbdd4cfa04119ec29ffb2211e6e2 --- /dev/null +++ b/LHM/models/rendering/smplx/smplx/utils.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from typing import NewType, Union, Optional +from dataclasses import dataclass, asdict, fields +import numpy as np +import torch + +Tensor = NewType('Tensor', torch.Tensor) +Array = NewType('Array', np.ndarray) + + +@dataclass +class ModelOutput: + vertices: Optional[Tensor] = None + joints: Optional[Tensor] = None + full_pose: Optional[Tensor] = None + global_orient: Optional[Tensor] = None + transl: Optional[Tensor] = None + v_shaped: Optional[Tensor] = None + + def __getitem__(self, key): + return getattr(self, key) + + def get(self, key, default=None): + return getattr(self, key, default) + + def __iter__(self): + return self.keys() + + def keys(self): + keys = [t.name for t in fields(self)] + return iter(keys) + + def values(self): + values = [getattr(self, t.name) for t in fields(self)] + return iter(values) + + def items(self): + data = [(t.name, getattr(self, t.name)) for t in fields(self)] + return iter(data) + + +@dataclass +class SMPLOutput(ModelOutput): + betas: Optional[Tensor] = None + body_pose: Optional[Tensor] = None + + +@dataclass +class SMPLHOutput(SMPLOutput): + left_hand_pose: Optional[Tensor] = None + right_hand_pose: Optional[Tensor] = None + transl: Optional[Tensor] = None + + +@dataclass +class SMPLXOutput(SMPLHOutput): + expression: Optional[Tensor] = None + jaw_pose: Optional[Tensor] = None + + +@dataclass +class MANOOutput(ModelOutput): + betas: Optional[Tensor] = None + hand_pose: Optional[Tensor] = None + + +@dataclass +class FLAMEOutput(ModelOutput): + betas: Optional[Tensor] = None + expression: Optional[Tensor] = None + jaw_pose: Optional[Tensor] = None + neck_pose: Optional[Tensor] = None + + +def find_joint_kin_chain(joint_id, kinematic_tree): + kin_chain = [] + curr_idx = joint_id + while curr_idx != -1: + kin_chain.append(curr_idx) + curr_idx = kinematic_tree[curr_idx] + return kin_chain + + +def to_tensor( + array: Union[Array, Tensor], dtype=torch.float32 +) -> Tensor: + if torch.is_tensor(array): + return array + else: + return torch.tensor(array, dtype=dtype) + + +class Struct(object): + def __init__(self, **kwargs): + for key, val in kwargs.items(): + setattr(self, key, val) + + +def to_np(array, dtype=np.float32): + if 'scipy.sparse' in str(type(array)): + array = array.todense() + return np.array(array, dtype=dtype) + + +def rot_mat_to_euler(rot_mats): + # Calculates rotation matrix to euler angles + # Careful for extreme cases of eular angles like [0.0, pi, 0.0] + + sy = torch.sqrt(rot_mats[:, 0, 0] * rot_mats[:, 0, 0] + + rot_mats[:, 1, 0] * rot_mats[:, 1, 0]) + return torch.atan2(-rot_mats[:, 2, 0], sy) diff --git a/LHM/models/rendering/smplx/smplx/vertex_ids.py b/LHM/models/rendering/smplx/smplx/vertex_ids.py new file mode 100644 index 0000000000000000000000000000000000000000..0e7a4c36700f002da54a9e181eabbd47af2a95bc --- /dev/null +++ b/LHM/models/rendering/smplx/smplx/vertex_ids.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division + +# Joint name to vertex mapping. SMPL/SMPL-H/SMPL-X vertices that correspond to +# MSCOCO and OpenPose joints +vertex_ids = { + 'smplh': { + 'nose': 332, + 'reye': 6260, + 'leye': 2800, + 'rear': 4071, + 'lear': 583, + 'rthumb': 6191, + 'rindex': 5782, + 'rmiddle': 5905, + 'rring': 6016, + 'rpinky': 6133, + 'lthumb': 2746, + 'lindex': 2319, + 'lmiddle': 2445, + 'lring': 2556, + 'lpinky': 2673, + 'LBigToe': 3216, + 'LSmallToe': 3226, + 'LHeel': 3387, + 'RBigToe': 6617, + 'RSmallToe': 6624, + 'RHeel': 6787 + }, + 'smplx': { + 'nose': 9120, + 'reye': 9929, + 'leye': 9448, + 'rear': 616, + 'lear': 6, + 'rthumb': 8079, + 'rindex': 7669, + 'rmiddle': 7794, + 'rring': 7905, + 'rpinky': 8022, + 'lthumb': 5361, + 'lindex': 4933, + 'lmiddle': 5058, + 'lring': 5169, + 'lpinky': 5286, + 'LBigToe': 5770, + 'LSmallToe': 5780, + 'LHeel': 8846, + 'RBigToe': 8463, + 'RSmallToe': 8474, + 'RHeel': 8635 + }, + 'mano': { + 'thumb': 744, + 'index': 320, + 'middle': 443, + 'ring': 554, + 'pinky': 671, + } +} diff --git a/LHM/models/rendering/smplx/smplx/vertex_joint_selector.py b/LHM/models/rendering/smplx/smplx/vertex_joint_selector.py new file mode 100644 index 0000000000000000000000000000000000000000..17449726a45647709580c9b08f932a2e82cce2cc --- /dev/null +++ b/LHM/models/rendering/smplx/smplx/vertex_joint_selector.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from __future__ import absolute_import +from __future__ import print_function +from __future__ import division + +import numpy as np + +import torch +import torch.nn as nn + +from .utils import to_tensor + + +class VertexJointSelector(nn.Module): + + def __init__(self, vertex_ids=None, + use_hands=True, + use_feet_keypoints=True, **kwargs): + super(VertexJointSelector, self).__init__() + + extra_joints_idxs = [] + + face_keyp_idxs = np.array([ + vertex_ids['nose'], + vertex_ids['reye'], + vertex_ids['leye'], + vertex_ids['rear'], + vertex_ids['lear']], dtype=np.int64) + + extra_joints_idxs = np.concatenate([extra_joints_idxs, + face_keyp_idxs]) + + if use_feet_keypoints: + feet_keyp_idxs = np.array([vertex_ids['LBigToe'], + vertex_ids['LSmallToe'], + vertex_ids['LHeel'], + vertex_ids['RBigToe'], + vertex_ids['RSmallToe'], + vertex_ids['RHeel']], dtype=np.int32) + + extra_joints_idxs = np.concatenate( + [extra_joints_idxs, feet_keyp_idxs]) + + if use_hands: + self.tip_names = ['thumb', 'index', 'middle', 'ring', 'pinky'] + + tips_idxs = [] + for hand_id in ['l', 'r']: + for tip_name in self.tip_names: + tips_idxs.append(vertex_ids[hand_id + tip_name]) + + extra_joints_idxs = np.concatenate( + [extra_joints_idxs, tips_idxs]) + + self.register_buffer('extra_joints_idxs', + to_tensor(extra_joints_idxs, dtype=torch.long)) + + def forward(self, vertices, joints): + extra_joints = torch.index_select(vertices, 1, self.extra_joints_idxs.to(torch.long)) #The '.to(torch.long)'. + # added to make the trace work in c++, + # otherwise you get a runtime error in c++: + # 'index_select(): Expected dtype int32 or int64 for index' + joints = torch.cat([joints, extra_joints], dim=1) + + return joints diff --git a/LHM/models/rendering/smplx/tools/README.md b/LHM/models/rendering/smplx/tools/README.md new file mode 100644 index 0000000000000000000000000000000000000000..20ed7f524f39b56d79d9214183510d68fb72f8ac --- /dev/null +++ b/LHM/models/rendering/smplx/tools/README.md @@ -0,0 +1,42 @@ +## Removing Chumpy objects + +In a Python 2 virtual environment with [Chumpy](https://github.com/mattloper/chumpy) installed run the following to remove any Chumpy objects from the model data: + +```bash +python tools/clean_ch.py --input-models path-to-models/*.pkl --output-folder output-folder +``` + +## Merging SMPL-H and MANO parameters + +In order to use the given PyTorch SMPL-H module we first need to merge the SMPL-H and MANO parameters in a single file. After agreeing to the license and downloading the models, run the following command: + +```bash +python tools/merge_smplh_mano.py --smplh-fn SMPLH_FOLDER/SMPLH_GENDER.pkl \ + --mano-left-fn MANO_FOLDER/MANO_LEFT.pkl \ + --mano-right-fn MANO_FOLDER/MANO_RIGHT.pkl \ + --output-folder OUTPUT_FOLDER +``` + +where SMPLH_FOLDER is the folder with the SMPL-H files and MANO_FOLDER the one for the MANO files. + + +### SMPL-H version used in AMASS + +For AMASS, you should download the body with 16 betas, here is the process: + +``` +- Download the zip folder from "Models & Code" and extract it to get the folder `mano_v1_2` +- Download the zip folder from "Extended SMPL+H model" and extract it to get the folder `smplh` + +$ git clone https://github.com/vchoutas/smplx.git +$ cd smplx +$ python tools/merge_smplh_mano.py \ +--smplh-fn /path/to/smplh/female/model.npz \ +--mano-left-fn /path/to/mano_v1_2/models/MANO_LEFT.pkl \ +--mano-right-fn /path/to/mano_v1_2/models/MANO_RIGHT.pkl \ +--output-folder /path/to/smplh/merged + +cp /path/to/smplh/merged/model.pkl /path/to/smplx_models/smplh/SMPLH_FEMALE.pkl +``` + +In the end you get the smplh model required by smplx 'smplx_models/smplh/SMPLH_FEMALE.pkl' diff --git a/LHM/models/rendering/smplx/tools/__init__.py b/LHM/models/rendering/smplx/tools/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..098b529b7f169758710ab788be94fe5d83e51256 --- /dev/null +++ b/LHM/models/rendering/smplx/tools/__init__.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems and the Max Planck Institute for Biological +# Cybernetics. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +import clean_ch +import merge_smplh_mano diff --git a/LHM/models/rendering/smplx/tools/clean_ch.py b/LHM/models/rendering/smplx/tools/clean_ch.py new file mode 100644 index 0000000000000000000000000000000000000000..56874b374c5d25aeb4ace0aefb3570bd7b891c22 --- /dev/null +++ b/LHM/models/rendering/smplx/tools/clean_ch.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems and the Max Planck Institute for Biological +# Cybernetics. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division + +import argparse +import os +import os.path as osp + +import pickle + +from tqdm import tqdm +import numpy as np + + +def clean_fn(fn, output_folder='output'): + with open(fn, 'rb') as body_file: + body_data = pickle.load(body_file) + + output_dict = {} + for key, data in body_data.iteritems(): + if 'chumpy' in str(type(data)): + output_dict[key] = np.array(data) + else: + output_dict[key] = data + + out_fn = osp.split(fn)[1] + + out_path = osp.join(output_folder, out_fn) + with open(out_path, 'wb') as out_file: + pickle.dump(output_dict, out_file) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--input-models', dest='input_models', nargs='+', + required=True, type=str, + help='The path to the model that will be processed') + parser.add_argument('--output-folder', dest='output_folder', + required=True, type=str, + help='The path to the output folder') + + args = parser.parse_args() + + input_models = args.input_models + output_folder = args.output_folder + if not osp.exists(output_folder): + print('Creating directory: {}'.format(output_folder)) + os.makedirs(output_folder) + + for input_model in input_models: + clean_fn(input_model, output_folder=output_folder) diff --git a/LHM/models/rendering/smplx/tools/merge_smplh_mano.py b/LHM/models/rendering/smplx/tools/merge_smplh_mano.py new file mode 100644 index 0000000000000000000000000000000000000000..30d5ab8cdd5f6d0a355fda5587cd47c8f7045c4c --- /dev/null +++ b/LHM/models/rendering/smplx/tools/merge_smplh_mano.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems and the Max Planck Institute for Biological +# Cybernetics. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from __future__ import print_function + +import os +import os.path as osp +import pickle + +import argparse + +import numpy as np + + +def merge_models(smplh_fn, mano_left_fn, mano_right_fn, + output_folder='output'): + + if smplh_fn.endswith('.pkl'): + with open(smplh_fn, 'rb') as body_file: + body_data = pickle.load(body_file, encoding='latin1') + elif smplh_fn.endswith('.npz'): + body_data_np = np.load(smplh_fn) + body_data = {} + for key in body_data_np: + body_data[key] = body_data_np[key] + else: + raise ValueError('The body model file should be either a .pkl or a .npz file.') + + + with open(mano_left_fn, 'rb') as lhand_file: + lhand_data = pickle.load(lhand_file, encoding='latin1') + + with open(mano_right_fn, 'rb') as rhand_file: + rhand_data = pickle.load(rhand_file, encoding='latin1') + + out_fn = osp.split(smplh_fn)[1] + if out_fn.endswith('.npz'): + out_fn = out_fn.replace('.npz', '.pkl') + + + output_data = body_data.copy() + output_data['hands_componentsl'] = lhand_data['hands_components'] + output_data['hands_componentsr'] = rhand_data['hands_components'] + + output_data['hands_coeffsl'] = lhand_data['hands_coeffs'] + output_data['hands_coeffsr'] = rhand_data['hands_coeffs'] + + output_data['hands_meanl'] = lhand_data['hands_mean'] + output_data['hands_meanr'] = rhand_data['hands_mean'] + + for key, data in output_data.items(): + if 'chumpy' in str(type(data)): + output_data[key] = np.array(data) + else: + output_data[key] = data + + out_path = osp.join(output_folder, out_fn) + print(out_path) + print('Saving to {}'.format(out_path)) + with open(out_path, 'wb') as output_file: + pickle.dump(output_data, output_file) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--smplh-fn', dest='smplh_fn', required=True, + type=str, help='The path to the SMPLH model') + parser.add_argument('--mano-left-fn', dest='mano_left_fn', required=True, + type=str, help='The path to the left hand MANO model') + parser.add_argument('--mano-right-fn', dest='mano_right_fn', required=True, + type=str, help='The path to the right hand MANO model') + parser.add_argument('--output-folder', dest='output_folder', + required=True, type=str, + help='The path to the output folder') + + args = parser.parse_args() + + smplh_fn = args.smplh_fn + mano_left_fn = args.mano_left_fn + mano_right_fn = args.mano_right_fn + output_folder = args.output_folder + + if not osp.exists(output_folder): + print('Creating directory: {}'.format(output_folder)) + os.makedirs(output_folder) + + merge_models(smplh_fn, mano_left_fn, mano_right_fn, output_folder) diff --git a/LHM/models/rendering/smplx/transfer_data/support_data/github_data/amass_sample.npz b/LHM/models/rendering/smplx/transfer_data/support_data/github_data/amass_sample.npz new file mode 100644 index 0000000000000000000000000000000000000000..305ac69ab5c7d7340edf721a541ae6485a602303 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_data/support_data/github_data/amass_sample.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b7bd737171d3ac9c7b5e6423db0aec40fe532a4dd0004deeb566f246ee8eddf6 +size 2031571 diff --git a/LHM/models/rendering/smplx/transfer_model/README.md b/LHM/models/rendering/smplx/transfer_model/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e7f14c4f7758c90c402d7c2155e536c3840c5eae --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/README.md @@ -0,0 +1,253 @@ +# Model parameter transfer + +## Table of Contents + * [License](#license) + * [Description](#description) + * [Using the code](#using-the-code) + * [Data](#data) + * [Steps](#steps) + * [SMPL to SMPL-X](#smpl-to-smpl-x) + * [SMPL-X to SMPL](#smpl-x-to-smpl) + * [SMPL+H to SMPL](#smpl%2Bh-to-smpl) + * [SMPL to SMPL+H](#smpl-to-smpl%2Bh) + * [SMPL+H to SMPL-X](#smpl%2Bh-to-smpl-x) + * [SMPL-X to SMPL+H](#smpl-x-to-smpl%2Bh) + * [Visualize correspondences](visualize-correspondences) + * [Citation](#citation) + * [Acknowledgments](#acknowledgments) + * [Contact](#contact) + +## License + +Software Copyright License for **non-commercial scientific research purposes**. +Please read carefully the [terms and conditions](https://github.com/vchoutas/smplx/blob/master/LICENSE) and any accompanying documentation before you download and/or use the SMPL-X/SMPLify-X model, data and software, (the "Model & Software"), including 3D meshes, blend weights, blend shapes, textures, software, scripts, and animations. By downloading and/or using the Model & Software (including downloading, cloning, installing, and any other use of this github repository), you acknowledge that you have read these terms and conditions, understand them, and agree to be bound by them. If you do not agree with these terms and conditions, you must not download and/or use the Model & Software. Any infringement of the terms of this agreement will automatically terminate your rights under this [License](./LICENSE). + +## Description + +The repository contains code for converting model parameters of one model to +another. **Never** copy parameters between the models. You will not get the +same poses. SMPL, SMPL+H and SMPL-X shape spaces are **NOT** compatible, since +each model is the result of a different training process. +A more detailed explanation on how we extract correspondences +between the models and the loss function used to estimate the parameters can be +found [here](./docs/transfer.md). + +## Requirements + +1. Install [mesh](https://github.com/MPI-IS/mesh) +2. Start by cloning the SMPL-X repo: +```Shell +git clone https://github.com/vchoutas/smplx.git +``` +3. Run the following command to install all necessary requirements +```Shell + pip install -r requirements.txt +``` +4. Install the Torch Trust Region optimizer by following the instructions [here](https://github.com/vchoutas/torch-trust-ncg) +5. Install loguru +6. Install open3d +7. Install omegaconf + +## Using the code + +### Data + +Register on the [SMPL-X website](http://smpl-x.is.tue.mpg.de/), go to the +downloads section to get the correspondences and sample data, +by clicking on the *Model correspondences* button. +Create a folder +named `transfer_data` and extract the downloaded zip there. You should have the +following folder structure now: + +```bash +transfer_data +├── meshes +│   ├── smpl +│   ├── smplx +├── smpl2smplh_def_transfer.pkl +├── smpl2smplx_deftrafo_setup.pkl +├── smplh2smpl_def_transfer.pkl +├── smplh2smplx_deftrafo_setup.pkl +├── smplx2smpl_deftrafo_setup.pkl +├── smplx2smplh_deftrafo_setup.pkl +├── smplx_mask_ids.npy +``` + +### Steps + +First, break the motion into a set of pose `.obj` files. Depending on how the +SMPL-* parameters are stored this code will differ. For the example AMASS data +in this repository you can use the example code here: + +``` +python write_obj.py --model-folder ../models/ --motion-file ../transfer_data/support_data/github_data/amass_sample.npz --output-folder ../transfer_data/meshes/amass_sample/ +``` + +To run the `transfer_model` utility you will require a `.yaml` config file, +which can point to the location the output `.obj` files have been saved. Use the +templates in `config_files` in the root of this repository. To convert the +sample AMASS code to SMPL-X: + +``` +python -m transfer_model --exp-cfg config_files/smplh2smplx_as.yaml +``` + +Finally, the output `.obj` files have to be merged into a single motion +sequence. Example code to do this in a way that matches `SMPL-X` AMASS archives +can be found in `merge_output.py` and run as follows: + +``` +python merge_output.py --gender neutral ../output +``` + +Debug notes describing common problems encountered during this can be found +[here](https://github.com/gngdb/smplx/blob/debug/transfer_model/DEBUG_NOTES.md). +Problems are also discussed in +[two](https://github.com/vchoutas/smplx/issues/82) +[issues](https://github.com/vchoutas/smplx/issues/75). + +### SMPL to SMPL-X + +To run the code to convert SMPL meshes to SMPL-X parameters use the following command: + ```Shell + python -m transfer_model --exp-cfg config_files/smpl2smplx.yaml + ``` +This should be run from the top directory of the repository. + +The file *smpl2smplx.yaml* contains a sample configuration that reads meshes from a folder, +processes them and returns pkl files with SMPL-X parameters. To run on your own data create a folder +with SMPL meshes, in either ply or obj format, change the path in the config file and run the code. + +### SMPL-X to SMPL + +To run the code to convert SMPL-X meshes to SMPL parameters use the following command: + ```Shell + python main.py --exp-cfg config_files/smplx2smpl.yaml + ``` + +The file *smplx2smpl.yaml* contains a sample configuration that reads meshes from a folder, +processes them and returns pkl files with SMPL parameters. To run on your own data create a folder +with SMPL-X meshes, in either ply or obj format, change the path in the config file and run the code. +When creating the SMPL-X meshes, do not use the hand and face parameters. +Naturally, you will lose all hand and face information if you choose this, since +SMPL cannot model them. + + +### SMPL+H to SMPL + +To run the code to convert SMPL+H meshes to SMPL parameters use the following command from the root `smplx` directory: + ```Shell + python -m transfer_model --exp-cfg config_files/smplh2smpl.yaml + ``` +This should be run from the top directory of the repository. + +The file *smplh2smpl.yaml* contains a sample configuration that reads meshes from a folder, +processes them and returns pkl files with SMPL parameters. To run on your own data create a folder +with SMPL+H meshes, in either ply or obj format, change the path in the config file and run the code. +Note that using this direction means that you will lose information on the +hands. + + +### SMPL to SMPL+H + +To run the code to convert SMPL meshes to SMPL+H parameters use the following command: + ```Shell + python -m transfer_model --exp-cfg config_files/smpl2smplh.yaml + ``` +This should be run from the top directory of the repository. + +The file *smpl2smplh.yaml* contains a sample configuration that reads meshes from a folder, +processes them and returns pkl files with SMPL parameters. To run on your own data create a folder +with SMPL meshes, in either ply or obj format, change the path in the config file and run the code. + +### SMPL+H to SMPL-X + +To run the code to convert SMPL+H meshes to SMPL-X parameters use the following command: + ```Shell + python -m transfer_model --exp-cfg config_files/smplh2smplx.yaml + ``` +This should be run from the top directory of the repository. + +The file *smplh2smplx.yaml* contains a sample configuration that reads meshes from a folder, +processes them and returns pkl files with SMPL-X parameters. To run on your own data create a folder +with SMPL+H meshes, in either ply or obj format, change the path in the config file and run the code. + + +### SMPL-X to SMPL+H + +To run the code to convert SMPL-X meshes to SMPL+H parameters use the following command: + ```Shell + python -m transfer_model --exp-cfg config_files/smplx2smplh.yaml + ``` +This should be run from the top directory of the repository. + +The file *smplx2smpl.yaml* contains a sample configuration that reads meshes from a folder, +processes them and returns pkl files with SMPL+H parameters. To run on your own data create a folder +with SMPL-X meshes, in either ply or obj format, change the path in the config file and run the code. +Make sure that you do not use the jaw pose and expression parameters to generate +the meshes. + + +## Visualize correspondences + +To visualize correspondences: +```Shell +python vis_correspondences.py --exp-cfg configs/smpl2smplx.yaml --exp-opts colors_path PATH_TO_SMPL_COLORS +``` +You should then see the following image. Points with similar color are in +correspondence. +![Correspondence example](./docs/images/smpl_smplx_correspondence.png) + +## Citation + +Depending on which model is loaded for your project, i.e. SMPL-X or SMPL+H or SMPL, please cite the most relevant work: + +``` +@article{SMPL:2015, + author = {Loper, Matthew and Mahmood, Naureen and Romero, Javier and Pons-Moll, Gerard and Black, Michael J.}, + title = {{SMPL}: A Skinned Multi-Person Linear Model}, + journal = {ACM Transactions on Graphics, (Proc. SIGGRAPH Asia)}, + month = oct, + number = {6}, + pages = {248:1--248:16}, + publisher = {ACM}, + volume = {34}, + year = {2015} +} +``` + +``` +@article{MANO:SIGGRAPHASIA:2017, + title = {Embodied Hands: Modeling and Capturing Hands and Bodies Together}, + author = {Romero, Javier and Tzionas, Dimitrios and Black, Michael J.}, + journal = {ACM Transactions on Graphics, (Proc. SIGGRAPH Asia)}, + volume = {36}, + number = {6}, + pages = {245:1--245:17}, + series = {245:1--245:17}, + publisher = {ACM}, + month = nov, + year = {2017}, + url = {http://doi.acm.org/10.1145/3130800.3130883}, + month_numeric = {11} + } +``` + + +``` +@inproceedings{SMPL-X:2019, + title = {Expressive Body Capture: 3D Hands, Face, and Body from a Single Image}, + author = {Pavlakos, Georgios and Choutas, Vasileios and Ghorbani, Nima and Bolkart, Timo and Osman, Ahmed A. A. and Tzionas, Dimitrios and Black, Michael J.}, + booktitle = {Proceedings IEEE Conf. on Computer Vision and Pattern Recognition (CVPR)}, + year = {2019} +} +``` + + +## Acknowledgments +The code of this repository was implemented by [Vassilis Choutas](vassilis.choutas@tuebingen.mpg.de), +based on a Chumpy implementation from [Timo Bolkart](timo.bolkart@tuebingen.mpg.de). + +## Contact + +For questions, please contact [smplx@tue.mpg.de](smplx@tue.mpg.de). diff --git a/LHM/models/rendering/smplx/transfer_model/__init__.py b/LHM/models/rendering/smplx/transfer_model/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3f66c65c1f85ff4cabb28b6bc716e327757746a4 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + diff --git a/LHM/models/rendering/smplx/transfer_model/__main__.py b/LHM/models/rendering/smplx/transfer_model/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..2ffc2500de4cc466fd77ec3ba4acc70a621f6111 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/__main__.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +import os +import os.path as osp +import sys +import pickle + +import numpy as np +import open3d as o3d +import torch +from loguru import logger +from tqdm import tqdm + +from smplx import build_layer + +from .config import parse_args +from .data import build_dataloader +from .transfer_model import run_fitting +from .utils import read_deformation_transfer, np_mesh_to_o3d + + +def main() -> None: + exp_cfg = parse_args() + + if torch.cuda.is_available() and exp_cfg["use_cuda"]: + device = torch.device('cuda') + else: + device = torch.device('cpu') + if exp_cfg["use_cuda"]: + if input("use_cuda=True and GPU is not available, using CPU instead," + " would you like to continue? (y/n)") != "y": + sys.exit(3) + + logger.remove() + logger.add( + lambda x: tqdm.write(x, end=''), level=exp_cfg.logger_level.upper(), + colorize=True) + + output_folder = osp.expanduser(osp.expandvars(exp_cfg.output_folder)) + logger.info(f'Saving output to: {output_folder}') + os.makedirs(output_folder, exist_ok=True) + + model_path = exp_cfg.body_model.folder + body_model = build_layer(model_path, **exp_cfg.body_model) + logger.info(body_model) + body_model = body_model.to(device=device) + + deformation_transfer_path = exp_cfg.get('deformation_transfer_path', '') + def_matrix = read_deformation_transfer( + deformation_transfer_path, device=device) + + # Read mask for valid vertex ids + mask_ids_fname = osp.expandvars(exp_cfg.mask_ids_fname) + mask_ids = None + if osp.exists(mask_ids_fname): + logger.info(f'Loading mask ids from: {mask_ids_fname}') + mask_ids = np.load(mask_ids_fname) + mask_ids = torch.from_numpy(mask_ids).to(device=device) + else: + logger.warning(f'Mask ids fname not found: {mask_ids_fname}') + + data_obj_dict = build_dataloader(exp_cfg) + + dataloader = data_obj_dict['dataloader'] + + for ii, batch in enumerate(tqdm(dataloader)): + for key in batch: + if torch.is_tensor(batch[key]): + batch[key] = batch[key].to(device=device) + var_dict = run_fitting( + exp_cfg, batch, body_model, def_matrix, mask_ids) + paths = batch['paths'] + + for ii, path in enumerate(paths): + _, fname = osp.split(path) + + output_path = osp.join( + output_folder, f'{osp.splitext(fname)[0]}.pkl') + with open(output_path, 'wb') as f: + pickle.dump(var_dict, f) + + output_path = osp.join( + output_folder, f'{osp.splitext(fname)[0]}.obj') + mesh = np_mesh_to_o3d( + var_dict['vertices'][ii], var_dict['faces']) + o3d.io.write_triangle_mesh(output_path, mesh) + + +if __name__ == '__main__': + main() diff --git a/LHM/models/rendering/smplx/transfer_model/config/__init__.py b/LHM/models/rendering/smplx/transfer_model/config/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4711a91455b85e0da688c1a3ae6573fa1f36187d --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/config/__init__.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +from .cmd_parser import parse_args diff --git a/LHM/models/rendering/smplx/transfer_model/config/body_model_defaults.py b/LHM/models/rendering/smplx/transfer_model/config/body_model_defaults.py new file mode 100644 index 0000000000000000000000000000000000000000..658149cced69097c70f92d3d1e6f8500187aa978 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/config/body_model_defaults.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +from omegaconf import OmegaConf +from loguru import logger +from dataclasses import dataclass +from .utils_cfg import Variable, Pose + + +@dataclass +class PCA: + num_comps: int = 12 + flat_hand_mean: bool = False + + +@dataclass +class PoseWithPCA(Pose): + pca: PCA = PCA() + + +@dataclass +class Shape(Variable): + num: int = 10 + + +@dataclass +class Expression(Variable): + num: int = 10 + + +@dataclass +class SMPL: + betas: Shape = Shape() + global_rot: Pose = Pose() + body_pose: Pose = Pose() + translation: Variable = Variable() + + +@dataclass +class SMPLH(SMPL): + left_hand_pose: PoseWithPCA = PoseWithPCA() + right_hand_pose: PoseWithPCA = PoseWithPCA() + + +@dataclass +class SMPLX(SMPLH): + expression: Expression = Expression() + jaw_pose: Pose = Pose() + leye_pose: Pose = Pose() + reye_pose: Pose = Pose() + + +@dataclass +class MANO: + betas: Shape = Shape() + wrist_pose: Pose = Pose() + hand_pose: PoseWithPCA = PoseWithPCA() + translation: Variable = Variable() + + +@dataclass +class FLAME: + betas: Shape = Shape() + expression: Expression = Expression() + global_rot: Pose = Pose() + neck_pose: Pose = Pose() + jaw_pose: Pose = Pose() + leye_pose: Pose = Pose() + reye_pose: Pose = Pose() + + +@dataclass +class BodyModelConfig: + model_type: str = 'smplx' + use_compressed: bool = True + folder: str = 'models' + gender: str = 'neutral' + extra_joint_path: str = '' + ext: str = 'npz' + + num_expression_coeffs: int = 10 + + use_face_contour: bool = True + joint_regressor_path: str = '' + + smpl: SMPL = SMPL() + star: SMPL = SMPL() + smplh: SMPLH = SMPLH() + smplx: SMPLX = SMPLX() + mano: MANO = MANO() + flame: FLAME = FLAME() + + +conf = OmegaConf.structured(BodyModelConfig) diff --git a/LHM/models/rendering/smplx/transfer_model/config/cmd_parser.py b/LHM/models/rendering/smplx/transfer_model/config/cmd_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..474903e813e238d20e421b916a835d449babb960 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/config/cmd_parser.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +from __future__ import absolute_import +from __future__ import division + +import sys +import os + +import argparse +from loguru import logger + +from omegaconf import OmegaConf +from .defaults import conf as default_conf + + +def parse_args(argv=None) -> OmegaConf: + arg_formatter = argparse.ArgumentDefaultsHelpFormatter + + description = 'Model transfer script' + parser = argparse.ArgumentParser(formatter_class=arg_formatter, + description=description) + + parser.add_argument('--exp-cfg', type=str, dest='exp_cfg', + help='The configuration of the experiment') + parser.add_argument('--exp-opts', default=[], dest='exp_opts', + nargs='*', + help='Command line arguments') + + cmd_args = parser.parse_args() + + cfg = default_conf.copy() + if cmd_args.exp_cfg: + cfg.merge_with(OmegaConf.load(cmd_args.exp_cfg)) + if cmd_args.exp_opts: + cfg.merge_with(OmegaConf.from_cli(cmd_args.exp_opts)) + + return cfg diff --git a/LHM/models/rendering/smplx/transfer_model/config/dataset_defaults.py b/LHM/models/rendering/smplx/transfer_model/config/dataset_defaults.py new file mode 100644 index 0000000000000000000000000000000000000000..ce3e4f36a6e7f7f7c54948c68a18ee97a5ef2d00 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/config/dataset_defaults.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +from omegaconf import OmegaConf +from dataclasses import dataclass + + +@dataclass +class MeshFolder: + data_folder: str = 'data/meshes' + + +@dataclass +class DatasetConfig: + num_workers: int = 0 + name: str = 'mesh-folder' + mesh_folder: MeshFolder = MeshFolder() + + +conf = OmegaConf.structured(DatasetConfig) diff --git a/LHM/models/rendering/smplx/transfer_model/config/defaults.py b/LHM/models/rendering/smplx/transfer_model/config/defaults.py new file mode 100644 index 0000000000000000000000000000000000000000..1656c21c4b4df3827554e8f45d2aea0fd7327415 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/config/defaults.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +from typing import Tuple, Optional +from copy import deepcopy +# from yacs.config import CfgNode as CN +from dataclasses import dataclass +from omegaconf import OmegaConf + +from .loss_defaults import conf as loss_cfg, LossConfig +from .dataset_defaults import conf as dataset_cfg, DatasetConfig +from .optim_defaults import conf as optim_cfg, OptimConfig +from .body_model_defaults import conf as body_model_cfg, BodyModelConfig + + +@dataclass +class EdgeFitting: + per_part: bool = False + reduction: str = 'mean' + + +@dataclass +class VertexFitting: + per_part: bool = False + reduction: str = 'mean' + type: str = 'l2' + + +@dataclass +class Config: + use_cuda: bool = True + log_file: str = '/tmp/logs' + output_folder: str = 'output' + save_verts: bool = True + save_joints: bool = True + save_mesh: bool = False + save_img_summaries: bool = True + summary_steps: int = 5 + degrees: Tuple[float] = (90,) + float_type: str = 'float' + logger_level: str = 'INFO' + interactive: bool = True + batch_size: Optional[int] = 1 + color_path: str = 'data/smpl_with_colors.ply' + + optim: OptimConfig = optim_cfg + datasets: DatasetConfig = dataset_cfg + losses: LossConfig = loss_cfg + body_model: BodyModelConfig = body_model_cfg + + deformation_transfer_path: str = '' + mask_ids_fname: str = '' + + per_part: bool = True + edge_fitting: EdgeFitting = EdgeFitting() + + +conf = OmegaConf.structured(Config) diff --git a/LHM/models/rendering/smplx/transfer_model/config/loss_defaults.py b/LHM/models/rendering/smplx/transfer_model/config/loss_defaults.py new file mode 100644 index 0000000000000000000000000000000000000000..14fa090525eb69cde9079259bc176a1f71af713c --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/config/loss_defaults.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de +# from yacs.config import CfgNode as CN + +from typing import List, Tuple, Union +from omegaconf import OmegaConf +from loguru import logger +from dataclasses import dataclass, make_dataclass + + +@dataclass +class LossTemplate: + type: str = 'l2' + active: bool = False + weight: Tuple[float] = (0.0,) + requires_grad: bool = True + enable: int = 0 + + +@dataclass +class LossConfig: + type: str = 'smplify-x' + + +conf = OmegaConf.structured(LossConfig) diff --git a/LHM/models/rendering/smplx/transfer_model/config/optim_defaults.py b/LHM/models/rendering/smplx/transfer_model/config/optim_defaults.py new file mode 100644 index 0000000000000000000000000000000000000000..6dfc6accc586c8f8ec4fbe2315ca4312885b1714 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/config/optim_defaults.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +from typing import Tuple +from omegaconf import OmegaConf +from dataclasses import dataclass + + +@dataclass +class LBFGS: + line_search_fn: str = 'strong_wolfe' + max_iter: int = 50 + + +@dataclass +class SGD: + momentum: float = 0.9 + nesterov: bool = True + + +@dataclass +class ADAM: + betas: Tuple[float, float] = (0.9, 0.999) + eps: float = 1e-08 + amsgrad: bool = False + + +@dataclass +class RMSProp: + alpha: float = 0.99 + + +@dataclass +class TrustRegionNewtonCG: + max_trust_radius: float = 1000 + initial_trust_radius: float = 0.05 + eta: float = 0.15 + gtol: float = 1e-05 + + +@dataclass +class OptimConfig: + type: str = 'trust-ncg' + lr: float = 1.0 + gtol: float = 1e-8 + ftol: float = -1.0 + maxiters: int = 100 + + lbfgs: LBFGS = LBFGS() + sgd: SGD = SGD() + adam: ADAM = ADAM() + trust_ncg: TrustRegionNewtonCG = TrustRegionNewtonCG() + + +conf = OmegaConf.structured(OptimConfig) diff --git a/LHM/models/rendering/smplx/transfer_model/config/utils_cfg.py b/LHM/models/rendering/smplx/transfer_model/config/utils_cfg.py new file mode 100644 index 0000000000000000000000000000000000000000..9ea224389b2b9eff299665c384e236844b9e8f80 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/config/utils_cfg.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +from typing import Tuple +from dataclasses import dataclass + + +@dataclass +class Variable: + create: bool = True + requires_grad: bool = True + + +@dataclass +class Pose(Variable): + type: str = 'aa' diff --git a/LHM/models/rendering/smplx/transfer_model/data/__init__.py b/LHM/models/rendering/smplx/transfer_model/data/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c2892d560e3e9aa8c9b68186b537f35d04fe3bec --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/data/__init__.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +from .build import build_dataloader diff --git a/LHM/models/rendering/smplx/transfer_model/data/build.py b/LHM/models/rendering/smplx/transfer_model/data/build.py new file mode 100644 index 0000000000000000000000000000000000000000..e9ad474fbba91e3a03dd0545b4c2035731bfa24b --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/data/build.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +from typing import List, Tuple +import sys + +import torch +import torch.utils.data as dutils +from .datasets import MeshFolder + +from loguru import logger + + +def build_dataloader(exp_cfg): + dset_name = exp_cfg.datasets.name + if dset_name == 'mesh-folder': + mesh_folder_cfg = exp_cfg.datasets.mesh_folder + key, *_ = mesh_folder_cfg.keys() + value = mesh_folder_cfg[key] + logger.info(f'{key}: {value}\n') + dataset = MeshFolder(**mesh_folder_cfg) + else: + raise ValueError(f'Unknown dataset: {dset_name}') + + batch_size = exp_cfg.batch_size + num_workers = exp_cfg.datasets.num_workers + + logger.info( + f'Creating dataloader with B={batch_size}, workers={num_workers}') + dataloader = dutils.DataLoader(dataset, + batch_size=batch_size, + num_workers=num_workers, + shuffle=False) + + return {'dataloader': dataloader, 'dataset': dataset} diff --git a/LHM/models/rendering/smplx/transfer_model/data/datasets/__init__.py b/LHM/models/rendering/smplx/transfer_model/data/datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f2e82ef01b229281dcae08e4e2cf56e5f6d5cb73 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/data/datasets/__init__.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +from .mesh import MeshFolder diff --git a/LHM/models/rendering/smplx/transfer_model/data/datasets/mesh.py b/LHM/models/rendering/smplx/transfer_model/data/datasets/mesh.py new file mode 100644 index 0000000000000000000000000000000000000000..e15d8b908cd1e9b477b658eb70cbd0adeb8226a2 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/data/datasets/mesh.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +from typing import Optional, Tuple + +import sys +import os +import os.path as osp + +import numpy as np +from psbody.mesh import Mesh +import trimesh + +import torch +from torch.utils.data import Dataset +from loguru import logger + + +class MeshFolder(Dataset): + def __init__( + self, + data_folder: str, + transforms=None, + exts: Optional[Tuple] = None + ) -> None: + ''' Dataset similar to ImageFolder that reads meshes with the same + topology + ''' + if exts is None: + exts = ['.obj', '.ply'] + + self.data_folder = osp.expandvars(data_folder) + + logger.info( + f'Building mesh folder dataset for folder: {self.data_folder}') + + self.data_paths = np.array([ + osp.join(self.data_folder, fname) + for fname in os.listdir(self.data_folder) + if any(fname.endswith(ext) for ext in exts) + ]) + self.num_items = len(self.data_paths) + + def __len__(self) -> int: + return self.num_items + + def __getitem__(self, index): + mesh_path = self.data_paths[index] + + # Load the mesh + mesh = trimesh.load(mesh_path, process=False) + + return { + 'vertices': np.asarray(mesh.vertices, dtype=np.float32), + 'faces': np.asarray(mesh.faces, dtype=np.int32), + 'indices': index, + 'paths': mesh_path, + } diff --git a/LHM/models/rendering/smplx/transfer_model/docs/transfer.md b/LHM/models/rendering/smplx/transfer_model/docs/transfer.md new file mode 100644 index 0000000000000000000000000000000000000000..91947fe9038c0b793c2fc59ee0c9a99dc3b0f59d --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/docs/transfer.md @@ -0,0 +1,115 @@ +# Converting SMPL to SMPL-X + + + + + + + + + + + + +The SMPL body model [1] is in wide use in computer vision and graphics for both +research and industrial applications. While widely used, SMPL lacks details like +articulated hands and an expressive face. The SMPL-X model [3] addresses this +and includes both the face and hands. + +Many legacy applications and datasets are built on SMPL and people want to +"upgrade" them to SMPL-X. While SMPL-X is based on the SMPL technology, they are +not completely interchangeable. + +Importantly the shape and pose parameters of SMPL and SMPL-X seem tantalizingly +similar. Sadly, you can't just take them from one model and use them with the +other. In particular, the joint locations in SMPL-X differ from those in SMPL, +meaning that the pose (theta) parameters are not interchangeable. + +Here we describe a tool to convert back and forth between the models. This +involves fitting one model to the other to recover the right parameters. + +The first step in this process is to establish a mapping between SMPL and +SMPL-X, since their topologies differ. For this, we assume we have a SMPL-X +template mesh registered to the SMPL template. Now that the two surfaces match, +we compute and store the following quantities: + +* For each SMPL-X vertex find the nearest point on the SMPL mesh and store: + * The index $t_i$ of the triangle where the nearest point is located. + * Store the barycentric coordinates of the nearest point with respect to + the SMPL triangle $\left[a_i, b_i, c_i\right]$. + + +SMPL-X and SMPL share the same topology up to the neck, therefore the Barycentric coordinates of +these points are a permutation of `[1.0, 0.0, 0.0]`. We also store a mask of +valid vertices, to remove points that have no match between the two meshes, +such as the eyeballs or the inner mouth. If we color-code the correspondences +we end up with the following image, where the left mesh is SMPL and the right +one is SMPL-X: + +![Correspondences](./images/smpl_smplx_correspondence.png) + +Now that we have established the correspondences between the models, we can fit +SMPL-X to the SMPL annotations. +1. The first step is to build a mesh with the SMPL-X topology from the posed + SMPL annotations. + + 1. If $t_i$ is the index of the corresponding SMPL triangle for the i-th SMPL-X + vertex, then let $f_i \in \mathbb{N}^3$ be the 3 indices of the SMPL vertices that + form the triangle. + 2. Let $m_i$ be the binary mask value for the validity of this vertex. + 2. The i-th vertex is computed using the barycentrics $\left[a_i, b_i, c_i\right]$ as: + + $v_i^{SMPL-X} = a_i * v_{f_i^0}^{SMPL} + b_i * v_{f_i^1}^{SMPL} + c_i * v_{f_i^2}^{SMPL}$ + + 2. Now that we have a mesh in SMPL-X topology, we need to find the SMPL-X + parameters, i.e. pose $\theta$, shape $\beta$, expression $\psi$ and translation $\gamma$, that best explain it. + We use an iterative optimization scheme to + recover the parameters: + + 1. Optimize over the pose with a 3D edge term. Make sure that we only use + the valid edges, i.e. those whose both end points are found on both + meshes: + + $L_1\left(\theta\right) = \sum_{(i, j) \in \mathcal{E}} m_i m_j \left\lVert(v_i - v_j) - (\hat{v}_i - \hat{v}_j) \right\rVert_2^2$ + + 2. Optimize over the translation vector $\gamma$ to align the two models: + + $L_2\left({\gamma}\right) = \sum_{i} m_i \left\lVert v_i - \hat{v}_i \right\rVert$ + + 3. Optimize over all parameters, to get the tightest possible fit: + + $L_3\left((\theta, \beta, \psi, \gamma)\right) = \sum_{i} m_i \left\lVert v_i - \hat{v}_i \right\rVert_2^2$ + + +So now, if you have data in SMPL format, you can convert it to SMPL-X. This +should allow you to use it for training. + +For the inverse mapping, from SMPL-X to +SMPL, we follow a similar process to generate the correspondences and then optimize +over the SMPL parameters that best fit the +transferred mesh. Of course, if you choose to do this, you will lose all +information about the hands and the face, since SMPL is not able to model this. + +For SMPL and SMPL+H [2], the process is easier, since they share the same +topology. We can therefore skip the first step, since we already know the +correspondences, compute a SMPL or SMPL+H mesh and estimate the parameters of +the other model. If we wish to transfer SMPL+H annotations, such as the AMASS +motion capture data [4], to SMPL-X, then we can use the correspondences of the +SMPL to SMPL-X mapping. + +## Bibliography + +[1]: Loper, M., Mahmood, N., Romero, J., Pons-Moll, G., Black, M.J.: SMPL: A +skinned multi-person linear model. ACM Transactions on Graphics (TOG) - Proceedings of ACM SIGGRAPH Asia 34(6), 248:1–248:16 (2015) + +[2]: Romero, J., Tzionas, D., Black, M.J.: Embodied hands: Modeling and capturing +hands and bodies together. ACM Transactions on Graphics (TOG) - Proceedings +of ACM SIGGRAPH Asia 36(6), 245:1–245:17 (2017) + +[3]: Pavlakos, G., Choutas, V., Ghorbani, N., Bolkart, T., Osman, A.A.A., Tzionas, +D., Black, M.J.: Expressive body capture: 3D hands, face, and body from a single +image. In: Proceedings of the IEEE Conference on Computer Vision and Pattern +Recognition (CVPR). pp. 10967–10977 (2019) + +[4]: Mahmood, N., Ghorbani, N., Troje, N.F., Pons-Moll, G., Black, M.J.: Amass: +Archive of motion capture as surface shapes. ICCV (2019) diff --git a/LHM/models/rendering/smplx/transfer_model/losses/__init__.py b/LHM/models/rendering/smplx/transfer_model/losses/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d991ba0ddadfe1824d06d1bb336d52f9416d9a63 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/losses/__init__.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +from .losses import * diff --git a/LHM/models/rendering/smplx/transfer_model/losses/losses.py b/LHM/models/rendering/smplx/transfer_model/losses/losses.py new file mode 100644 index 0000000000000000000000000000000000000000..57d96fd26889fe31cad62a8eec4e0e08f478870e --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/losses/losses.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division + +import sys +import time +from typing import Callable, Iterator, Union, Optional, List + +import os.path as osp +import yaml +from loguru import logger + +import pickle + +import numpy as np + +import torch +import torch.autograd as autograd +import torch.nn as nn +import torch.nn.functional as F + +from .utils import get_reduction_method + +__all__ = [ + 'VertexEdgeLoss', + 'build_loss', +] + + +def build_loss(type='l2', reduction='mean', **kwargs) -> nn.Module: + logger.debug(f'Building loss: {type}') + if type == 'l2': + return WeightedMSELoss(reduction=reduction, **kwargs) + elif type == 'vertex-edge': + return VertexEdgeLoss(reduction=reduction, **kwargs) + elif type == 'l1': + return nn.L1Loss() + else: + raise ValueError(f'Unknown loss type: {type}') + + +class WeightedMSELoss(nn.Module): + def __init__(self, reduction='mean', **kwargs): + super(WeightedMSELoss, self).__init__() + self.reduce_str = reduction + self.reduce = get_reduction_method(reduction) + + def forward(self, input, target, weights=None): + diff = input - target + if weights is None: + return diff.pow(2).sum() / diff.shape[0] + else: + return ( + weights.unsqueeze(dim=-1) * diff.pow(2)).sum() / diff.shape[0] + + +class VertexEdgeLoss(nn.Module): + def __init__(self, norm_type='l2', + gt_edges=None, + gt_edge_path='', + est_edges=None, + est_edge_path='', + robustifier=None, + edge_thresh=0.0, epsilon=1e-8, + reduction='sum', + **kwargs): + super(VertexEdgeLoss, self).__init__() + + assert norm_type in ['l1', 'l2'], 'Norm type must be [l1, l2]' + self.norm_type = norm_type + self.epsilon = epsilon + self.reduction = reduction + assert self.reduction in ['sum', 'mean'] + logger.info(f'Building edge loss with' + f' norm_type={norm_type},' + f' reduction={reduction},' + ) + + gt_edge_path = osp.expandvars(gt_edge_path) + est_edge_path = osp.expandvars(est_edge_path) + assert osp.exists(gt_edge_path) or gt_edges is not None, ( + 'gt_edges must not be None or gt_edge_path must exist' + ) + assert osp.exists(est_edge_path) or est_edges is not None, ( + 'est_edges must not be None or est_edge_path must exist' + ) + if osp.exists(gt_edge_path) and gt_edges is None: + gt_edges = np.load(gt_edge_path) + if osp.exists(est_edge_path) and est_edges is None: + est_edges = np.load(est_edge_path) + + self.register_buffer( + 'gt_connections', torch.tensor(gt_edges, dtype=torch.long)) + self.register_buffer( + 'est_connections', torch.tensor(est_edges, dtype=torch.long)) + + def extra_repr(self): + msg = [ + f'Norm type: {self.norm_type}', + ] + if self.has_connections: + msg.append( + f'GT Connections shape: {self.gt_connections.shape}' + ) + msg.append( + f'Est Connections shape: {self.est_connections.shape}' + ) + return '\n'.join(msg) + + def compute_edges(self, points, connections): + edge_points = torch.index_select( + points, 1, connections.view(-1)).reshape(points.shape[0], -1, 2, 3) + return edge_points[:, :, 1] - edge_points[:, :, 0] + + def forward(self, gt_vertices, est_vertices, weights=None): + gt_edges = self.compute_edges( + gt_vertices, connections=self.gt_connections) + est_edges = self.compute_edges( + est_vertices, connections=self.est_connections) + + raw_edge_diff = (gt_edges - est_edges) + + batch_size = gt_vertices.shape[0] + if self.norm_type == 'l2': + edge_diff = raw_edge_diff.pow(2) + elif self.norm_type == 'l1': + edge_diff = raw_edge_diff.abs() + else: + raise NotImplementedError( + f'Loss type not implemented: {self.loss_type}') + if self.reduction == 'sum': + return edge_diff.sum() + elif self.reduction == 'mean': + return edge_diff.sum() / batch_size diff --git a/LHM/models/rendering/smplx/transfer_model/losses/utils.py b/LHM/models/rendering/smplx/transfer_model/losses/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..99d9feeaba888130ae22494bf47e450560eac584 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/losses/utils.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +import torch + +def get_reduction_method(reduction='mean'): + if reduction == 'mean': + return torch.mean + elif reduction == 'sum': + return torch.sum + elif reduction == 'none': + return lambda x: x + else: + raise ValueError('Unknown reduction method: {}'.format(reduction)) diff --git a/LHM/models/rendering/smplx/transfer_model/merge_output.py b/LHM/models/rendering/smplx/transfer_model/merge_output.py new file mode 100644 index 0000000000000000000000000000000000000000..f7e4d52d9e9a63d996c43d49bc1e071c554fc21e --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/merge_output.py @@ -0,0 +1,88 @@ +# merges the output of the main transfer_model script + +import torch +from pathlib import Path +import pickle +from scipy.spatial.transform import Rotation as R + +KEYS = [ +"transl", +"global_orient", +"body_pose", +"betas", +"left_hand_pose", +"right_hand_pose", +"jaw_pose", +"leye_pose", +"reye_pose", +"expression", +"vertices", +"joints", +"full_pose", +"v_shaped", +"faces" +] + +IGNORED_KEYS = [ +"vertices", +"faces", +"v_shaped" +] + +def aggregate_rotmats(x): + x = torch.cat(x, dim=0).detach().numpy() + s = x.shape[:-2] + x = R.from_matrix(x.reshape(-1, 3, 3)).as_rotvec() + x = x.reshape(s[0], -1) + return x + +aggregate_function = {k: lambda x: torch.cat(x, 0).detach().numpy() for k in KEYS} +aggregate_function["betas"] = lambda x: torch.cat(x, 0).mean(0).detach().numpy() + +for k in ["global_orient", "body_pose", "left_hand_pose", "right_hand_pose", "jaw_pose", "full_pose"]: + aggregate_function[k] = aggregate_rotmats + +def merge(output_dir, gender): + output_dir = Path(output_dir) + assert output_dir.exists() + assert output_dir.is_dir() + + # get list of all pkl files in output_dir with fixed length numeral names + pkl_files = [f for f in output_dir.glob("*.pkl") if f.stem != "merged"] + pkl_files = [f for f in sorted(pkl_files, key=lambda x: int(x.stem))] + assert "merged.pkl" not in [f.name for f in pkl_files] + + merged = {} + # iterate over keys and put all values in lists + keys = set(KEYS) - set(IGNORED_KEYS) + for k in keys: + merged[k] = [] + for pkl_file in pkl_files: + with open(pkl_file, "rb") as f: + data = pickle.load(f) + for k in keys: + if k in data: + merged[k].append(data[k]) + b = torch.cat(merged["betas"], 0) + print("betas:") + for mu, sigma in zip(b.mean(0), b.std(0)): + print(" {:.3f} +/- {:.3f}".format(mu, sigma)) + + # aggregate all values + for k in keys: + merged[k] = aggregate_function[k](merged[k]) + + # add gender + merged["gender"] = gender + + # save merged data to same output_dir + with open(output_dir / "merged.pkl", "wb") as f: + pickle.dump(merged, f) + +if __name__ == '__main__': + import argparse + parser = argparse.ArgumentParser(description='Merge output of transfer_model script') + parser.add_argument('output_dir', type=str, help='output directory of transfer_model script') + parser.add_argument('--gender', type=str, choices=['male', 'female', 'neutral'], help='gender of actor in motion sequence') + args = parser.parse_args() + merge(args.output_dir, args.gender) diff --git a/LHM/models/rendering/smplx/transfer_model/optimizers/__init__.py b/LHM/models/rendering/smplx/transfer_model/optimizers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6adf3793bf0c313677506966ef400b29b1da2c44 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/optimizers/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +from .optim_factory import build_optimizer +from .minimize import minimize diff --git a/LHM/models/rendering/smplx/transfer_model/optimizers/minimize.py b/LHM/models/rendering/smplx/transfer_model/optimizers/minimize.py new file mode 100644 index 0000000000000000000000000000000000000000..dd45c4111d52f65f71ac676d87a57cff24bb36c6 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/optimizers/minimize.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +from typing import List, Union, Callable, Optional, Dict +import torch +from loguru import logger +from tqdm import tqdm + +from transfer_model.utils import ( + from_torch, Tensor, Array, rel_change) + + +def minimize( + optimizer: torch.optim, + closure, + params: List[Tensor], + summary_closure: Optional[Callable[[], Dict[str, float]]] = None, + maxiters=100, + ftol=-1.0, + gtol=1e-9, + interactive=True, + summary_steps=10, + **kwargs +): + ''' Helper function for running an optimization process + Args: + - optimizer: The PyTorch optimizer object + - closure: The function used to calculate the gradients + - params: a list containing the parameters that will be optimized + Keyword arguments: + - maxiters (100): The maximum number of iterations for the + optimizer + - ftol: The tolerance for the relative change in the loss + function. + If it is lower than this value, then the process stops + - gtol: The tolerance for the maximum change in the gradient. + If the maximum absolute values of the all gradient tensors + are less than this, then the process will stop. + ''' + prev_loss = None + for n in tqdm(range(maxiters), desc='Fitting iterations'): + loss = optimizer.step(closure) + + if n > 0 and prev_loss is not None and ftol > 0: + loss_rel_change = rel_change(prev_loss, loss.item()) + + if loss_rel_change <= ftol: + prev_loss = loss.item() + break + + if (all([var.grad.view(-1).abs().max().item() < gtol + for var in params if var.grad is not None]) and gtol > 0): + prev_loss = loss.item() + break + + if interactive and n % summary_steps == 0: + logger.info(f'[{n:05d}] Loss: {loss.item():.4f}') + if summary_closure is not None: + summaries = summary_closure() + for key, val in summaries.items(): + logger.info(f'[{n:05d}] {key}: {val:.4f}') + + prev_loss = loss.item() + + # Save the final step + if interactive: + logger.info(f'[{n + 1:05d}] Loss: {loss.item():.4f}') + if summary_closure is not None: + summaries = summary_closure() + for key, val in summaries.items(): + logger.info(f'[{n + 1:05d}] {key}: {val:.4f}') + + return prev_loss diff --git a/LHM/models/rendering/smplx/transfer_model/optimizers/optim_factory.py b/LHM/models/rendering/smplx/transfer_model/optimizers/optim_factory.py new file mode 100644 index 0000000000000000000000000000000000000000..43b161e776df9bd1303d59ee408bb20cdf5721a0 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/optimizers/optim_factory.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +import sys + +from typing import NewType, List, Dict + +import torch +import torch.optim as optim +from loguru import logger +from torchtrustncg import TrustRegion + +Tensor = NewType('Tensor', torch.Tensor) + + +def build_optimizer(parameters: List[Tensor], + optim_cfg: Dict + ) -> Dict: + ''' Creates the optimizer + ''' + optim_type = optim_cfg.get('type', 'sgd') + logger.info(f'Building: {optim_type.title()}') + + num_params = len(parameters) + parameters = list(filter(lambda x: x.requires_grad, parameters)) + if num_params != len(parameters): + logger.info(f'Some parameters have requires_grad off') + + if optim_type == 'adam': + optimizer = optim.Adam(parameters, **optim_cfg.get('adam', {})) + create_graph = False + elif optim_type == 'lbfgs' or optim_type == 'lbfgsls': + optimizer = optim.LBFGS(parameters, **optim_cfg.get('lbfgs', {})) + create_graph = False + elif optim_type == 'trust_ncg' or optim_type == 'trust-ncg': + optimizer = TrustRegion( + parameters, **optim_cfg.get('trust_ncg', {})) + create_graph = True + elif optim_type == 'rmsprop': + optimizer = optim.RMSprop(parameters, **optim_cfg.get('rmsprop', {})) + create_graph = False + elif optim_type == 'sgd': + optimizer = optim.SGD(parameters, **optim_cfg.get('sgd', {})) + create_graph = False + else: + raise ValueError(f'Optimizer {optim_type} not supported!') + return {'optimizer': optimizer, 'create_graph': create_graph} + + +def build_scheduler(optimizer, sched_type='exp', + lr_lambda=0.1, **kwargs): + if lr_lambda <= 0.0: + return None + + if sched_type == 'exp': + return optim.lr_scheduler.ExponentialLR(optimizer, lr_lambda) + else: + raise ValueError('Unknown learning rate' + + ' scheduler: '.format(sched_type)) diff --git a/LHM/models/rendering/smplx/transfer_model/transfer_model.py b/LHM/models/rendering/smplx/transfer_model/transfer_model.py new file mode 100644 index 0000000000000000000000000000000000000000..22db75398148ac447ee56fd84f9cb249d279fd0f --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/transfer_model.py @@ -0,0 +1,396 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +from typing import Optional, Dict, Callable +import sys +import numpy as np +import torch +import torch.nn as nn + +from tqdm import tqdm + +from loguru import logger +from .utils import get_vertices_per_edge + +from .optimizers import build_optimizer, minimize +from .utils import ( + Tensor, batch_rodrigues, apply_deformation_transfer) +from .losses import build_loss + + +def summary_closure(gt_vertices, var_dict, body_model, mask_ids=None): + param_dict = {} + for key, var in var_dict.items(): + # Decode the axis-angles + if 'pose' in key or 'orient' in key: + param_dict[key] = batch_rodrigues( + var.reshape(-1, 3)).reshape(len(var), -1, 3, 3) + else: + # Simply pass the variable + param_dict[key] = var + body_model_output = body_model( + return_full_pose=True, get_skin=True, **param_dict) + est_vertices = body_model_output['vertices'] + if mask_ids is not None: + est_vertices = est_vertices[:, mask_ids] + gt_vertices = gt_vertices[:, mask_ids] + + v2v = (est_vertices - gt_vertices).pow(2).sum(dim=-1).sqrt().mean() + return { + 'Vertex-to-Vertex': v2v * 1000} + + +def build_model_forward_closure( + body_model: nn.Module, + var_dict: Dict[str, Tensor], + per_part: bool = True, + part_key: Optional[str] = None, + jidx: Optional[int] = None, + part: Optional[Tensor] = None +) -> Callable: + if per_part: + cond = part is not None and part_key is not None and jidx is not None + assert cond, ( + 'When per-part is True, "part", "part_key", "jidx" must not be' + ' None.' + ) + + def model_forward(): + param_dict = {} + for key, var in var_dict.items(): + if part_key == key: + param_dict[key] = batch_rodrigues( + var.reshape(-1, 3)).reshape(len(var), -1, 3, 3) + param_dict[key][:, jidx] = batch_rodrigues( + part.reshape(-1, 3)).reshape(-1, 3, 3) + else: + # Decode the axis-angles + if 'pose' in key or 'orient' in key: + param_dict[key] = batch_rodrigues( + var.reshape(-1, 3)).reshape(len(var), -1, 3, 3) + else: + # Simply pass the variable + param_dict[key] = var + + return body_model( + return_full_pose=True, get_skin=True, **param_dict) + else: + def model_forward(): + param_dict = {} + for key, var in var_dict.items(): + # Decode the axis-angles + if 'pose' in key or 'orient' in key: + param_dict[key] = batch_rodrigues( + var.reshape(-1, 3)).reshape(len(var), -1, 3, 3) + else: + # Simply pass the variable + param_dict[key] = var + + return body_model(return_full_pose=True, get_skin=True, + **param_dict) + return model_forward + + +def build_edge_closure( + body_model: nn.Module, + var_dict: Dict[str, Tensor], + edge_loss: nn.Module, + optimizer_dict, + gt_vertices: Tensor, + per_part: bool = True, + part_key: Optional[str] = None, + jidx: Optional[int] = None, + part: Optional[Tensor] = None +) -> Callable: + ''' Builds the closure for the edge objective + ''' + optimizer = optimizer_dict['optimizer'] + create_graph = optimizer_dict['create_graph'] + + if per_part: + params_to_opt = [part] + else: + params_to_opt = [p for key, p in var_dict.items() if 'pose' in key] + + model_forward = build_model_forward_closure( + body_model, var_dict, per_part=per_part, part_key=part_key, + jidx=jidx, part=part) + + def closure(backward=True): + if backward: + optimizer.zero_grad() + + body_model_output = model_forward() + est_vertices = body_model_output['vertices'] + + loss = edge_loss(est_vertices, gt_vertices) + if backward: + if create_graph: + # Use this instead of .backward to avoid GPU memory leaks + grads = torch.autograd.grad( + loss, params_to_opt, create_graph=True) + torch.autograd.backward( + params_to_opt, grads, create_graph=True) + else: + loss.backward() + + return loss + return closure + + +def build_vertex_closure( + body_model: nn.Module, + var_dict: Dict[str, Tensor], + optimizer_dict, + gt_vertices: Tensor, + vertex_loss: nn.Module, + mask_ids=None, + per_part: bool = True, + part_key: Optional[str] = None, + jidx: Optional[int] = None, + part: Optional[Tensor] = None, + params_to_opt: Optional[Tensor] = None, +) -> Callable: + ''' Builds the closure for the vertex objective + ''' + optimizer = optimizer_dict['optimizer'] + create_graph = optimizer_dict['create_graph'] + + model_forward = build_model_forward_closure( + body_model, var_dict, per_part=per_part, part_key=part_key, + jidx=jidx, part=part) + + if params_to_opt is None: + params_to_opt = [p for key, p in var_dict.items()] + + def closure(backward=True): + if backward: + optimizer.zero_grad() + + body_model_output = model_forward() + est_vertices = body_model_output['vertices'] + + loss = vertex_loss( + est_vertices[:, mask_ids] if mask_ids is not None else + est_vertices, + gt_vertices[:, mask_ids] if mask_ids is not None else gt_vertices) + if backward: + if create_graph: + # Use this instead of .backward to avoid GPU memory leaks + grads = torch.autograd.grad( + loss, params_to_opt, create_graph=True) + torch.autograd.backward( + params_to_opt, grads, create_graph=True) + else: + loss.backward() + + return loss + return closure + + +def get_variables( + batch_size: int, + body_model: nn.Module, + dtype: torch.dtype = torch.float32 +) -> Dict[str, Tensor]: + var_dict = {} + + device = next(body_model.buffers()).device + + if (body_model.name() == 'SMPL' or body_model.name() == 'SMPL+H' or + body_model.name() == 'SMPL-X'): + var_dict.update({ + 'transl': torch.zeros( + [batch_size, 3], device=device, dtype=dtype), + 'global_orient': torch.zeros( + [batch_size, 1, 3], device=device, dtype=dtype), + 'body_pose': torch.zeros( + [batch_size, body_model.NUM_BODY_JOINTS, 3], + device=device, dtype=dtype), + 'betas': torch.zeros([batch_size, body_model.num_betas], + dtype=dtype, device=device), + }) + + if body_model.name() == 'SMPL+H' or body_model.name() == 'SMPL-X': + var_dict.update( + left_hand_pose=torch.zeros( + [batch_size, body_model.NUM_HAND_JOINTS, 3], device=device, + dtype=dtype), + right_hand_pose=torch.zeros( + [batch_size, body_model.NUM_HAND_JOINTS, 3], device=device, + dtype=dtype), + ) + + if body_model.name() == 'SMPL-X': + var_dict.update( + jaw_pose=torch.zeros([batch_size, 1, 3], + device=device, dtype=dtype), + leye_pose=torch.zeros([batch_size, 1, 3], + device=device, dtype=dtype), + reye_pose=torch.zeros([batch_size, 1, 3], + device=device, dtype=dtype), + expression=torch.zeros( + [batch_size, body_model.num_expression_coeffs], + device=device, dtype=dtype), + ) + + # Toggle gradients to True + for key, val in var_dict.items(): + val.requires_grad_(True) + + return var_dict + + +def run_fitting( + exp_cfg, + batch: Dict[str, Tensor], + body_model: nn.Module, + def_matrix: Tensor, + mask_ids: Optional = None +) -> Dict[str, Tensor]: + ''' Runs fitting + ''' + vertices = batch['vertices'] + faces = batch['faces'] + + batch_size = len(vertices) + dtype, device = vertices.dtype, vertices.device + summary_steps = exp_cfg.get('summary_steps') + interactive = exp_cfg.get('interactive') + + # Get the parameters from the model + var_dict = get_variables(batch_size, body_model) + + # Build the optimizer object for the current batch + optim_cfg = exp_cfg.get('optim', {}) + + def_vertices = apply_deformation_transfer(def_matrix, vertices, faces) + + if mask_ids is None: + f_sel = np.ones_like(body_model.faces[:, 0], dtype=np.bool_) + else: + f_per_v = [[] for _ in range(body_model.get_num_verts())] + [f_per_v[vv].append(iff) for iff, ff in enumerate(body_model.faces) + for vv in ff] + f_sel = list(set(tuple(sum([f_per_v[vv] for vv in mask_ids], [])))) + vpe = get_vertices_per_edge( + body_model.v_template.detach().cpu().numpy(), body_model.faces[f_sel]) + + def log_closure(): + return summary_closure(def_vertices, var_dict, body_model, + mask_ids=mask_ids) + + edge_fitting_cfg = exp_cfg.get('edge_fitting', {}) + edge_loss = build_loss(type='vertex-edge', gt_edges=vpe, est_edges=vpe, + **edge_fitting_cfg) + edge_loss = edge_loss.to(device=device) + + vertex_fitting_cfg = exp_cfg.get('vertex_fitting', {}) + vertex_loss = build_loss(**vertex_fitting_cfg) + vertex_loss = vertex_loss.to(device=device) + + per_part = edge_fitting_cfg.get('per_part', True) + logger.info(f'Per-part: {per_part}') + # Optimize edge-based loss to initialize pose + if per_part: + for key, var in tqdm(var_dict.items(), desc='Parts'): + if 'pose' not in key: + continue + + for jidx in tqdm(range(var.shape[1]), desc='Joints'): + part = torch.zeros( + [batch_size, 3], dtype=dtype, device=device, + requires_grad=True) + # Build the optimizer for the current part + optimizer_dict = build_optimizer([part], optim_cfg) + closure = build_edge_closure( + body_model, var_dict, edge_loss, optimizer_dict, + def_vertices, per_part=per_part, part_key=key, jidx=jidx, + part=part) + + minimize(optimizer_dict['optimizer'], closure, + params=[part], + summary_closure=log_closure, + summary_steps=summary_steps, + interactive=interactive, + **optim_cfg) + with torch.no_grad(): + var[:, jidx] = part + else: + optimizer_dict = build_optimizer(list(var_dict.values()), optim_cfg) + closure = build_edge_closure( + body_model, var_dict, edge_loss, optimizer_dict, + def_vertices, per_part=per_part) + + minimize(optimizer_dict['optimizer'], closure, + params=var_dict.values(), + summary_closure=log_closure, + summary_steps=summary_steps, + interactive=interactive, + **optim_cfg) + + if 'translation' in var_dict: + optimizer_dict = build_optimizer([var_dict['translation']], optim_cfg) + closure = build_vertex_closure( + body_model, var_dict, + optimizer_dict, + def_vertices, + vertex_loss=vertex_loss, + mask_ids=mask_ids, + per_part=False, + params_to_opt=[var_dict['translation']], + ) + # Optimize translation + minimize(optimizer_dict['optimizer'], + closure, + params=[var_dict['translation']], + summary_closure=log_closure, + summary_steps=summary_steps, + interactive=interactive, + **optim_cfg) + + # Optimize all model parameters with vertex-based loss + optimizer_dict = build_optimizer(list(var_dict.values()), optim_cfg) + closure = build_vertex_closure( + body_model, var_dict, + optimizer_dict, + def_vertices, + vertex_loss=vertex_loss, + per_part=False, + mask_ids=mask_ids) + minimize(optimizer_dict['optimizer'], closure, + params=list(var_dict.values()), + summary_closure=log_closure, + summary_steps=summary_steps, + interactive=interactive, + **optim_cfg) + + param_dict = {} + for key, var in var_dict.items(): + # Decode the axis-angles + if 'pose' in key or 'orient' in key: + param_dict[key] = batch_rodrigues( + var.reshape(-1, 3)).reshape(len(var), -1, 3, 3) + else: + # Simply pass the variable + param_dict[key] = var + + body_model_output = body_model( + return_full_pose=True, get_skin=True, **param_dict) + var_dict.update(body_model_output) + var_dict['faces'] = body_model.faces + + return var_dict diff --git a/LHM/models/rendering/smplx/transfer_model/utils/__init__.py b/LHM/models/rendering/smplx/transfer_model/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..78c1bfe456318eb1b1b7e8c1cbd3c7c566e58751 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/utils/__init__.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +from .np_utils import to_np, rel_change +from .torch_utils import from_torch +from .timer import Timer, timer_decorator +from .typing import * +from .pose_utils import batch_rodrigues, batch_rot2aa +from .metrics import v2v +from .def_transfer import read_deformation_transfer, apply_deformation_transfer +from .mesh_utils import get_vertices_per_edge +from .o3d_utils import np_mesh_to_o3d diff --git a/LHM/models/rendering/smplx/transfer_model/utils/def_transfer.py b/LHM/models/rendering/smplx/transfer_model/utils/def_transfer.py new file mode 100644 index 0000000000000000000000000000000000000000..30d2f627b4929ff7f3ae45123ee08cd367ac1ef1 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/utils/def_transfer.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +import os +import os.path as osp +import pickle + +import numpy as np +import torch +from loguru import logger + +from .typing import Tensor + + +def read_deformation_transfer( + deformation_transfer_path: str, + device=None, + use_normal: bool = False, +) -> Tensor: + ''' Reads a deformation transfer + ''' + if device is None: + device = torch.device('cpu') + assert osp.exists(deformation_transfer_path), ( + 'Deformation transfer path does not exist:' + f' {deformation_transfer_path}') + logger.info( + f'Loading deformation transfer from: {deformation_transfer_path}') + # Read the deformation transfer matrix + with open(deformation_transfer_path, 'rb') as f: + def_transfer_setup = pickle.load(f, encoding='latin1') + if 'mtx' in def_transfer_setup: + def_matrix = def_transfer_setup['mtx'] + if hasattr(def_matrix, 'todense'): + def_matrix = def_matrix.todense() + def_matrix = np.array(def_matrix, dtype=np.float32) + if not use_normal: + num_verts = def_matrix.shape[1] // 2 + def_matrix = def_matrix[:, :num_verts] + elif 'matrix' in def_transfer_setup: + def_matrix = def_transfer_setup['matrix'] + else: + valid_keys = ['mtx', 'matrix'] + raise KeyError(f'Deformation transfer setup must contain {valid_keys}') + + def_matrix = torch.tensor(def_matrix, device=device, dtype=torch.float32) + return def_matrix + + +def apply_deformation_transfer( + def_matrix: Tensor, + vertices: Tensor, + faces: Tensor, + use_normals=False +) -> Tensor: + ''' Applies the deformation transfer on the given meshes + ''' + if use_normals: + raise NotImplementedError + else: + def_vertices = torch.einsum('mn,bni->bmi', [def_matrix, vertices]) + return def_vertices diff --git a/LHM/models/rendering/smplx/transfer_model/utils/mesh_utils.py b/LHM/models/rendering/smplx/transfer_model/utils/mesh_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..4d681b2f2ea235d01e1df0db64618d6f18683989 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/utils/mesh_utils.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Code from Chumpy and OpenDR. Placed here to avoid chumpy dependency +# The original code can be found in https://github.com/MPI-IS/mesh +import numpy as np +import scipy.sparse as sp + + +def row(A): + return A.reshape((1, -1)) + + +def col(A): + return A.reshape((-1, 1)) + + +def get_vert_connectivity(mesh_v, mesh_f): + """Returns a sparse matrix (of size #verts x #verts) where each nonzero + element indicates a neighborhood relation. For example, if there is a + nonzero element in position (15,12), that means vertex 15 is connected + by an edge to vertex 12.""" + + vpv = sp.csc_matrix((len(mesh_v), len(mesh_v))) + + # for each column in the faces... + for i in range(3): + IS = mesh_f[:, i] + JS = mesh_f[:, (i + 1) % 3] + data = np.ones(len(IS)) + ij = np.vstack((row(IS.flatten()), row(JS.flatten()))) + mtx = sp.csc_matrix((data, ij), shape=vpv.shape) + vpv = vpv + mtx + mtx.T + + return vpv + + +def get_vertices_per_edge(mesh_v, mesh_f): + """Returns an Ex2 array of adjacencies between vertices, where + each element in the array is a vertex index. Each edge is included + only once. If output of get_faces_per_edge is provided, this is used to + avoid call to get_vert_connectivity()""" + + vc = sp.coo_matrix(get_vert_connectivity(mesh_v, mesh_f)) + result = np.hstack((col(vc.row), col(vc.col))) + result = result[result[:, 0] < result[:, 1]] # for uniqueness + + return result diff --git a/LHM/models/rendering/smplx/transfer_model/utils/metrics.py b/LHM/models/rendering/smplx/transfer_model/utils/metrics.py new file mode 100644 index 0000000000000000000000000000000000000000..d7f8209423ce342a1137a1c4bfe22709cf0f0357 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/utils/metrics.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +import numpy as np +import torch + + +def v2v(x, y): + if torch.is_tensor(x): + return (x - y).pow(2).sum(dim=-1).sqrt().mean() + else: + return np.sqrt(np.power(x - y, 2)).sum(axis=-1).mean() diff --git a/LHM/models/rendering/smplx/transfer_model/utils/np_utils.py b/LHM/models/rendering/smplx/transfer_model/utils/np_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..d0cfa56dfd7cda764b127048683617ad437b48d2 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/utils/np_utils.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de +from __future__ import absolute_import +from __future__ import print_function +from __future__ import division + +import numpy as np + + +def rel_change(prev_val, curr_val): + return (prev_val - curr_val) / max([np.abs(prev_val), np.abs(curr_val), 1]) + + +def max_grad_change(grad_arr): + return grad_arr.abs().max() + + +def to_np(array, dtype=np.float32): + if hasattr(array, 'todense'): + array = array.todense() + return np.array(array, dtype=dtype) diff --git a/LHM/models/rendering/smplx/transfer_model/utils/o3d_utils.py b/LHM/models/rendering/smplx/transfer_model/utils/o3d_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..758f85614fa2083c8c0febc513d35413d66c2c20 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/utils/o3d_utils.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +import open3d as o3d +import torch + +Vector3d = o3d.utility.Vector3dVector +Vector3i = o3d.utility.Vector3iVector + +Mesh = o3d.geometry.TriangleMesh + + +def np_mesh_to_o3d(vertices, faces): + if torch.is_tensor(vertices): + vertices = vertices.detach().cpu().numpy() + if torch.is_tensor(faces): + faces = faces.detach().cpu().numpy() + mesh = Mesh() + mesh.vertices = Vector3d(vertices) + mesh.triangles = Vector3i(faces) + return mesh diff --git a/LHM/models/rendering/smplx/transfer_model/utils/pose_utils.py b/LHM/models/rendering/smplx/transfer_model/utils/pose_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a2dc90b6d66d4643275596883b0e5a7998adf670 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/utils/pose_utils.py @@ -0,0 +1,147 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +import sys +from typing import NewType, List, Dict, Optional +import os +import os.path as osp + +import pickle + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from omegaconf import OmegaConf +from loguru import logger + +from .typing import Tensor + + +def rotation_matrix_to_cont_repr(x: Tensor) -> Tensor: + assert len(x.shape) == 3, ( + f'Expects an array of size Bx3x3, but received {x.shape}') + return x[:, :3, :2] + + +def cont_repr_to_rotation_matrix( + x: Tensor +) -> Tensor: + ''' Converts tensor in continous representation to rotation matrices + ''' + batch_size = x.shape[0] + reshaped_input = x.view(-1, 3, 2) + + # Normalize the first vector + b1 = F.normalize(reshaped_input[:, :, 0].clone(), dim=1) + + dot_prod = torch.sum( + b1 * reshaped_input[:, :, 1].clone(), dim=1, keepdim=True) + # Compute the second vector by finding the orthogonal complement to it + b2 = F.normalize(reshaped_input[:, :, 1] - dot_prod * b1, dim=1) + # Finish building the basis by taking the cross product + b3 = torch.cross(b1, b2, dim=1) + rot_mats = torch.stack([b1, b2, b3], dim=-1) + + return rot_mats.view(batch_size, -1, 3, 3) + + +def batch_rodrigues( + rot_vecs: Tensor, + epsilon: float = 1e-8 +) -> Tensor: + ''' Calculates the rotation matrices for a batch of rotation vectors + Parameters + ---------- + rot_vecs: torch.tensor Nx3 + array of N axis-angle vectors + Returns + ------- + R: torch.tensor Nx3x3 + The rotation matrices for the given axis-angle parameters + ''' + assert len(rot_vecs.shape) == 2, ( + f'Expects an array of size Bx3, but received {rot_vecs.shape}') + + batch_size = rot_vecs.shape[0] + device = rot_vecs.device + dtype = rot_vecs.dtype + + angle = torch.norm(rot_vecs + epsilon, dim=1, keepdim=True, p=2) + rot_dir = rot_vecs / angle + + cos = torch.unsqueeze(torch.cos(angle), dim=1) + sin = torch.unsqueeze(torch.sin(angle), dim=1) + + # Bx1 arrays + rx, ry, rz = torch.split(rot_dir, 1, dim=1) + K = torch.zeros((batch_size, 3, 3), dtype=dtype, device=device) + + zeros = torch.zeros((batch_size, 1), dtype=dtype, device=device) + K = torch.cat([zeros, -rz, ry, rz, zeros, -rx, -ry, rx, zeros], dim=1) \ + .view((batch_size, 3, 3)) + + ident = torch.eye(3, dtype=dtype, device=device).unsqueeze(dim=0) + rot_mat = ident + sin * K + (1 - cos) * torch.bmm(K, K) + return rot_mat + + +def batch_rot2aa( + Rs: Tensor, epsilon: float = 1e-7 +) -> Tensor: + """ + Rs is B x 3 x 3 + void cMathUtil::RotMatToAxisAngle(const tMatrix& mat, tVector& out_axis, + double& out_theta) + { + double c = 0.5 * (mat(0, 0) + mat(1, 1) + mat(2, 2) - 1); + c = cMathUtil::Clamp(c, -1.0, 1.0); + + out_theta = std::acos(c); + + if (std::abs(out_theta) < 0.00001) + { + out_axis = tVector(0, 0, 1, 0); + } + else + { + double m21 = mat(2, 1) - mat(1, 2); + double m02 = mat(0, 2) - mat(2, 0); + double m10 = mat(1, 0) - mat(0, 1); + double denom = std::sqrt(m21 * m21 + m02 * m02 + m10 * m10); + out_axis[0] = m21 / denom; + out_axis[1] = m02 / denom; + out_axis[2] = m10 / denom; + out_axis[3] = 0; + } + } + """ + + cos = 0.5 * (torch.einsum('bii->b', [Rs]) - 1) + cos = torch.clamp(cos, -1 + epsilon, 1 - epsilon) + + theta = torch.acos(cos) + + m21 = Rs[:, 2, 1] - Rs[:, 1, 2] + m02 = Rs[:, 0, 2] - Rs[:, 2, 0] + m10 = Rs[:, 1, 0] - Rs[:, 0, 1] + denom = torch.sqrt(m21 * m21 + m02 * m02 + m10 * m10 + epsilon) + + axis0 = torch.where(torch.abs(theta) < 0.00001, m21, m21 / denom) + axis1 = torch.where(torch.abs(theta) < 0.00001, m02, m02 / denom) + axis2 = torch.where(torch.abs(theta) < 0.00001, m10, m10 / denom) + + return theta.unsqueeze(1) * torch.stack([axis0, axis1, axis2], 1) diff --git a/LHM/models/rendering/smplx/transfer_model/utils/timer.py b/LHM/models/rendering/smplx/transfer_model/utils/timer.py new file mode 100644 index 0000000000000000000000000000000000000000..027de5b83f67e241ac5d018e6732b9d27f7a2c91 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/utils/timer.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +import time +import numpy as np +import torch + +from loguru import logger + + +class Timer(object): + def __init__(self, name='', sync=False): + super(Timer, self).__init__() + self.elapsed = [] + self.name = name + self.sync = sync + + def __enter__(self): + if self.sync: + torch.cuda.synchronize() + self.start = time.perf_counter() + + def __exit__(self, type, value, traceback): + if self.sync: + torch.cuda.synchronize() + elapsed = time.perf_counter() - self.start + self.elapsed.append(elapsed) + logger.info(f'[{self.name}]: {np.mean(self.elapsed):.3f}') + + +def timer_decorator(sync=False, name=''): + def wrapper(method): + elapsed = [] + + def timed(*args, **kw): + if sync: + torch.cuda.synchronize() + ts = time.perf_counter() + result = method(*args, **kw) + if sync: + torch.cuda.synchronize() + te = time.perf_counter() + elapsed.append(te - ts) + logger.info(f'[{name}]: {np.mean(elapsed):.3f}') + return result + return timed + return wrapper diff --git a/LHM/models/rendering/smplx/transfer_model/utils/torch_utils.py b/LHM/models/rendering/smplx/transfer_model/utils/torch_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ffa820d4e429ae611c384842980b66d63b07aacc --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/utils/torch_utils.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +import numpy as np +import torch + + +def from_torch(x, dtype=np.float32): + if torch.is_tensor(x): + x = x.detach().cpu().numpy() + return x.astype(dtype) diff --git a/LHM/models/rendering/smplx/transfer_model/utils/typing.py b/LHM/models/rendering/smplx/transfer_model/utils/typing.py new file mode 100644 index 0000000000000000000000000000000000000000..c6ba3109d49f1e6c2496772efa4f422e2e06dca5 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/utils/typing.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de + +from typing import NewType, List, Union +import numpy as np +import torch + +__all__ = [ + 'Tensor', + 'Array', +] + +Tensor = NewType('Tensor', torch.Tensor) +Array = NewType('Array', np.ndarray) diff --git a/LHM/models/rendering/smplx/transfer_model/view_pkl.py b/LHM/models/rendering/smplx/transfer_model/view_pkl.py new file mode 100644 index 0000000000000000000000000000000000000000..0cafdef75f4056de3b42b5a3bad3f686264d1690 --- /dev/null +++ b/LHM/models/rendering/smplx/transfer_model/view_pkl.py @@ -0,0 +1,140 @@ +import os.path as osp +import argparse + +import numpy as np +import torch + +import pyrender +import trimesh + +import smplx + +from tqdm.auto import tqdm, trange + +from pathlib import Path + +def main(model_folder, + motion_file, + model_type='smplx', + ext='npz', + gender='neutral', + plot_joints=False, + num_betas=10, + sample_expression=True, + num_expression_coeffs=10, + use_face_contour=False): + + # open motion file + motion = np.load(motion_file, allow_pickle=True) + _motion = {} + for k,v in motion.items(): + if isinstance(v, np.ndarray): + print(k, motion[k].shape, motion[k].dtype) + if motion[k].dtype in (" kp_thresh and kps[2, i2] > kp_thresh: + cv2.line( + kp_mask, p1, p2, + color=colors[l], thickness=2, lineType=cv2.LINE_AA) + if kps[2, i1] > kp_thresh: + cv2.circle( + kp_mask, p1, + radius=3, color=colors[l], thickness=-1, lineType=cv2.LINE_AA) + if kps[2, i2] > kp_thresh: + cv2.circle( + kp_mask, p2, + radius=3, color=colors[l], thickness=-1, lineType=cv2.LINE_AA) + + # Blend the keypoints. + return cv2.addWeighted(img, 1.0 - alpha, kp_mask, alpha, 0) + +def vis_keypoints(img, kps, alpha=1): + # Convert from plt 0-1 RGBA colors to 0-255 BGR colors for opencv. + cmap = plt.get_cmap('rainbow') + colors = [cmap(i) for i in np.linspace(0, 1, len(kps) + 2)] + colors = [(c[2] * 255, c[1] * 255, c[0] * 255) for c in colors] + + # Perform the drawing on a copy of the image, to allow for blending. + kp_mask = np.copy(img) + + # Draw the keypoints. + for i in range(len(kps)): + p = kps[i][0].astype(np.int32), kps[i][1].astype(np.int32) + cv2.circle(kp_mask, p, radius=3, color=colors[i], thickness=-1, lineType=cv2.LINE_AA) + + # Blend the keypoints. + return cv2.addWeighted(img, 1.0 - alpha, kp_mask, alpha, 0) + +def render_mesh(mesh, face, cam_param, bkg, blend_ratio=1.0, return_bg_mask=False): + + mesh = mesh.cuda()[None,:,:] + face = torch.LongTensor(face.astype(np.int64)).cuda()[None,:,:] + cam_param = {k: v.cuda()[None,:] for k,v in cam_param.items()} + render_shape = (bkg.shape[0], bkg.shape[1]) # height, width + + batch_size, vertex_num = mesh.shape[:2] + textures = TexturesVertex(verts_features=torch.ones((batch_size,vertex_num,3)).float().cuda()) + mesh = torch.stack((-mesh[:,:,0], -mesh[:,:,1], mesh[:,:,2]),2) # reverse x- and y-axis following PyTorch3D axis direction + mesh = Meshes(mesh, face, textures) + + cameras = PerspectiveCameras(focal_length=cam_param['focal'], + principal_point=cam_param['princpt'], + device='cuda', + in_ndc=False, + image_size=torch.LongTensor(render_shape).cuda().view(1,2)) + raster_settings = RasterizationSettings(image_size=render_shape, blur_radius=0.0, faces_per_pixel=1, bin_size=0) + rasterizer = MeshRasterizer(cameras=cameras, raster_settings=raster_settings).cuda() + lights = PointLights(device='cuda') + shader = SoftPhongShader(device='cuda', cameras=cameras, lights=lights) + materials = Materials( + device='cuda', + specular_color=[[0.0, 0.0, 0.0]], + shininess=0.0 + ) + + # render + with torch.no_grad(): + renderer = MeshRendererWithFragments(rasterizer=rasterizer, shader=shader) + images, fragments = renderer(mesh, materials=materials) + + # background masking + is_bkg = (fragments.zbuf <= 0).float().cpu().numpy()[0] + render = images[0,:,:,:3].cpu().numpy() + fg = render * blend_ratio + bkg/255 * (1 - blend_ratio) + render = fg * (1 - is_bkg) * 255 + bkg * is_bkg + if return_bg_mask: + return render, is_bkg + return render diff --git a/LHM/models/rendering/smplx_gsavatar/__init__.py b/LHM/models/rendering/smplx_gsavatar/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..886949df670691d1ef5995737cafa285224826c4 --- /dev/null +++ b/LHM/models/rendering/smplx_gsavatar/__init__.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from .body_models import ( + create, + SMPL, + SMPLH, + SMPLX, + MANO, + FLAME, + build_layer, + SMPLLayer, + SMPLHLayer, + SMPLXLayer, + MANOLayer, + FLAMELayer, +) diff --git a/LHM/models/rendering/smplx_gsavatar/body_models.py b/LHM/models/rendering/smplx_gsavatar/body_models.py new file mode 100644 index 0000000000000000000000000000000000000000..975f2e07d8d77f13c6398d9948d83b33350d89af --- /dev/null +++ b/LHM/models/rendering/smplx_gsavatar/body_models.py @@ -0,0 +1,2415 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from typing import Optional, Dict, Union +import os +import os.path as osp + +import pickle + +import numpy as np + +import torch +import torch.nn as nn + +from .lbs import ( + lbs, vertices2landmarks, find_dynamic_lmk_idx_and_bcoords, blend_shapes) + +from .vertex_ids import vertex_ids as VERTEX_IDS +from .utils import ( + Struct, to_np, to_tensor, Tensor, Array, + SMPLOutput, + SMPLHOutput, + SMPLXOutput, + MANOOutput, + FLAMEOutput, + find_joint_kin_chain) +from .vertex_joint_selector import VertexJointSelector + + +class SMPL(nn.Module): + + NUM_JOINTS = 23 + NUM_BODY_JOINTS = 23 + SHAPE_SPACE_DIM = 300 + + def __init__( + self, model_path: str, + kid_template_path: str = '', + data_struct: Optional[Struct] = None, + create_betas: bool = True, + betas: Optional[Tensor] = None, + num_betas: int = 10, + create_global_orient: bool = True, + global_orient: Optional[Tensor] = None, + create_body_pose: bool = True, + body_pose: Optional[Tensor] = None, + create_transl: bool = True, + transl: Optional[Tensor] = None, + dtype=torch.float32, + batch_size: int = 1, + joint_mapper=None, + gender: str = 'neutral', + age: str = 'adult', + vertex_ids: Dict[str, int] = None, + v_template: Optional[Union[Tensor, Array]] = None, + **kwargs + ) -> None: + ''' SMPL model constructor + + Parameters + ---------- + model_path: str + The path to the folder or to the file where the model + parameters are stored + data_struct: Strct + A struct object. If given, then the parameters of the model are + read from the object. Otherwise, the model tries to read the + parameters from the given `model_path`. (default = None) + create_global_orient: bool, optional + Flag for creating a member variable for the global orientation + of the body. (default = True) + global_orient: torch.tensor, optional, Bx3 + The default value for the global orientation variable. + (default = None) + create_body_pose: bool, optional + Flag for creating a member variable for the pose of the body. + (default = True) + body_pose: torch.tensor, optional, Bx(Body Joints * 3) + The default value for the body pose variable. + (default = None) + num_betas: int, optional + Number of shape components to use + (default = 10). + create_betas: bool, optional + Flag for creating a member variable for the shape space + (default = True). + betas: torch.tensor, optional, Bx10 + The default value for the shape member variable. + (default = None) + create_transl: bool, optional + Flag for creating a member variable for the translation + of the body. (default = True) + transl: torch.tensor, optional, Bx3 + The default value for the transl variable. + (default = None) + dtype: torch.dtype, optional + The data type for the created variables + batch_size: int, optional + The batch size used for creating the member variables + joint_mapper: object, optional + An object that re-maps the joints. Useful if one wants to + re-order the SMPL joints to some other convention (e.g. MSCOCO) + (default = None) + gender: str, optional + Which gender to load + vertex_ids: dict, optional + A dictionary containing the indices of the extra vertices that + will be selected + ''' + + self.gender = gender + self.age = age + + if data_struct is None: + if osp.isdir(model_path): + model_fn = 'SMPL_{}.{ext}'.format(gender.upper(), ext='pkl') + smpl_path = os.path.join(model_path, model_fn) + else: + smpl_path = model_path + assert osp.exists(smpl_path), 'Path {} does not exist!'.format( + smpl_path) + + with open(smpl_path, 'rb') as smpl_file: + data_struct = Struct(**pickle.load(smpl_file, + encoding='latin1')) + + super(SMPL, self).__init__() + self.batch_size = batch_size + shapedirs = data_struct.shapedirs + if (shapedirs.shape[-1] < self.SHAPE_SPACE_DIM): + print(f'WARNING: You are using a {self.name()} model, with only' + ' 10 shape coefficients.') + num_betas = min(num_betas, 10) + else: + num_betas = min(num_betas, self.SHAPE_SPACE_DIM) + + if self.age=='kid': + v_template_smil = np.load(kid_template_path) + v_template_smil -= np.mean(v_template_smil, axis=0) + v_template_diff = np.expand_dims(v_template_smil - data_struct.v_template, axis=2) + shapedirs = np.concatenate((shapedirs[:, :, :num_betas], v_template_diff), axis=2) + num_betas = num_betas + 1 + + self._num_betas = num_betas + shapedirs = shapedirs[:, :, :num_betas] + # The shape components + self.register_buffer( + 'shapedirs', + to_tensor(to_np(shapedirs), dtype=dtype)) + + if vertex_ids is None: + # SMPL and SMPL-H share the same topology, so any extra joints can + # be drawn from the same place + vertex_ids = VERTEX_IDS['smplh'] + + self.dtype = dtype + + self.joint_mapper = joint_mapper + + self.vertex_joint_selector = VertexJointSelector( + vertex_ids=vertex_ids, **kwargs) + + self.faces = data_struct.f + self.register_buffer('faces_tensor', + to_tensor(to_np(self.faces, dtype=np.int64), + dtype=torch.long)) + + if create_betas: + if betas is None: + default_betas = torch.zeros( + [batch_size, self.num_betas], dtype=dtype) + else: + if torch.is_tensor(betas): + default_betas = betas.clone().detach() + else: + default_betas = torch.tensor(betas, dtype=dtype) + + self.register_parameter( + 'betas', nn.Parameter(default_betas, requires_grad=True)) + + # The tensor that contains the global rotation of the model + # It is separated from the pose of the joints in case we wish to + # optimize only over one of them + if create_global_orient: + if global_orient is None: + default_global_orient = torch.zeros( + [batch_size, 3], dtype=dtype) + else: + if torch.is_tensor(global_orient): + default_global_orient = global_orient.clone().detach() + else: + default_global_orient = torch.tensor( + global_orient, dtype=dtype) + + global_orient = nn.Parameter(default_global_orient, + requires_grad=True) + self.register_parameter('global_orient', global_orient) + + if create_body_pose: + if body_pose is None: + default_body_pose = torch.zeros( + [batch_size, self.NUM_BODY_JOINTS * 3], dtype=dtype) + else: + if torch.is_tensor(body_pose): + default_body_pose = body_pose.clone().detach() + else: + default_body_pose = torch.tensor(body_pose, + dtype=dtype) + self.register_parameter( + 'body_pose', + nn.Parameter(default_body_pose, requires_grad=True)) + + if create_transl: + if transl is None: + default_transl = torch.zeros([batch_size, 3], + dtype=dtype, + requires_grad=True) + else: + default_transl = torch.tensor(transl, dtype=dtype) + self.register_parameter( + 'transl', nn.Parameter(default_transl, requires_grad=True)) + + if v_template is None: + v_template = data_struct.v_template + if not torch.is_tensor(v_template): + v_template = to_tensor(to_np(v_template), dtype=dtype) + # The vertices of the template model + self.register_buffer('v_template', v_template) + + j_regressor = to_tensor(to_np( + data_struct.J_regressor), dtype=dtype) + self.register_buffer('J_regressor', j_regressor) + + # Pose blend shape basis: 6890 x 3 x 207, reshaped to 6890*3 x 207 + num_pose_basis = data_struct.posedirs.shape[-1] + # 207 x 20670 + posedirs = np.reshape(data_struct.posedirs, [-1, num_pose_basis]).T + self.register_buffer('posedirs', + to_tensor(to_np(posedirs), dtype=dtype)) + + # indices of parents for each joints + parents = to_tensor(to_np(data_struct.kintree_table[0])).long() + parents[0] = -1 + self.register_buffer('parents', parents) + + lbs_weights = to_tensor(to_np(data_struct.weights), dtype=dtype) + self.register_buffer('lbs_weights', lbs_weights) + + @property + def num_betas(self): + return self._num_betas + + @property + def num_expression_coeffs(self): + return 0 + + def create_mean_pose(self, data_struct) -> Tensor: + pass + + def name(self) -> str: + return 'SMPL' + + @torch.no_grad() + def reset_params(self, **params_dict) -> None: + for param_name, param in self.named_parameters(): + if param_name in params_dict: + param[:] = torch.tensor(params_dict[param_name]) + else: + param.fill_(0) + + def get_num_verts(self) -> int: + return self.v_template.shape[0] + + def get_num_faces(self) -> int: + return self.faces.shape[0] + + def extra_repr(self) -> str: + msg = [ + f'Gender: {self.gender.upper()}', + f'Number of joints: {self.J_regressor.shape[0]}', + f'Betas: {self.num_betas}', + ] + return '\n'.join(msg) + + def forward_shape( + self, + betas: Optional[Tensor] = None, + ) -> SMPLOutput: + betas = betas if betas is not None else self.betas + v_shaped = self.v_template + blend_shapes(betas, self.shapedirs) + return SMPLOutput(vertices=v_shaped, betas=betas, v_shaped=v_shaped) + + def forward( + self, + betas: Optional[Tensor] = None, + body_pose: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + return_verts=True, + return_full_pose: bool = False, + pose2rot: bool = True, + **kwargs + ) -> SMPLOutput: + ''' Forward pass for the SMPL model + + Parameters + ---------- + global_orient: torch.tensor, optional, shape Bx3 + If given, ignore the member variable and use it as the global + rotation of the body. Useful if someone wishes to predicts this + with an external model. (default=None) + betas: torch.tensor, optional, shape BxN_b + If given, ignore the member variable `betas` and use it + instead. For example, it can used if shape parameters + `betas` are predicted from some external model. + (default=None) + body_pose: torch.tensor, optional, shape Bx(J*3) + If given, ignore the member variable `body_pose` and use it + instead. For example, it can used if someone predicts the + pose of the body joints are predicted from some external model. + It should be a tensor that contains joint rotations in + axis-angle format. (default=None) + transl: torch.tensor, optional, shape Bx3 + If given, ignore the member variable `transl` and use it + instead. For example, it can used if the translation + `transl` is predicted from some external model. + (default=None) + return_verts: bool, optional + Return the vertices. (default=True) + return_full_pose: bool, optional + Returns the full axis-angle pose vector (default=False) + + Returns + ------- + ''' + # If no shape and pose parameters are passed along, then use the + # ones from the module + global_orient = (global_orient if global_orient is not None else + self.global_orient) + body_pose = body_pose if body_pose is not None else self.body_pose + betas = betas if betas is not None else self.betas + + apply_trans = transl is not None or hasattr(self, 'transl') + if transl is None and hasattr(self, 'transl'): + transl = self.transl + + full_pose = torch.cat([global_orient, body_pose], dim=1) + + batch_size = max(betas.shape[0], global_orient.shape[0], + body_pose.shape[0]) + + if betas.shape[0] != batch_size: + num_repeats = int(batch_size / betas.shape[0]) + betas = betas.expand(num_repeats, -1) + + vertices, joints, A = lbs(betas, full_pose, self.v_template, + self.shapedirs, self.posedirs, + self.J_regressor, self.parents, + self.lbs_weights, pose2rot=pose2rot, + return_affine_mat = True) + + joints = self.vertex_joint_selector(vertices, joints) + # Map the joints to the current dataset + if self.joint_mapper is not None: + joints = self.joint_mapper(joints) + + if apply_trans: + joints += transl.unsqueeze(dim=1) + vertices += transl.unsqueeze(dim=1) + A[:, :, :3, 3] += transl.unsqueeze(dim=1) + + output = SMPLOutput(vertices=vertices if return_verts else None, + global_orient=global_orient, + body_pose=body_pose, + joints=joints, + betas=betas, + full_pose=full_pose if return_full_pose else None, + A = A) + + return output + + +class SMPLLayer(SMPL): + def __init__( + self, + *args, + **kwargs + ) -> None: + # Just create a SMPL module without any member variables + super(SMPLLayer, self).__init__( + create_body_pose=False, + create_betas=False, + create_global_orient=False, + create_transl=False, + *args, + **kwargs, + ) + + def forward( + self, + betas: Optional[Tensor] = None, + body_pose: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + return_verts=True, + return_full_pose: bool = False, + pose2rot: bool = True, + **kwargs + ) -> SMPLOutput: + ''' Forward pass for the SMPL model + + Parameters + ---------- + global_orient: torch.tensor, optional, shape Bx3x3 + Global rotation of the body. Useful if someone wishes to + predicts this with an external model. It is expected to be in + rotation matrix format. (default=None) + betas: torch.tensor, optional, shape BxN_b + Shape parameters. For example, it can used if shape parameters + `betas` are predicted from some external model. + (default=None) + body_pose: torch.tensor, optional, shape BxJx3x3 + Body pose. For example, it can used if someone predicts the + pose of the body joints are predicted from some external model. + It should be a tensor that contains joint rotations in + rotation matrix format. (default=None) + transl: torch.tensor, optional, shape Bx3 + Translation vector of the body. + For example, it can used if the translation + `transl` is predicted from some external model. + (default=None) + return_verts: bool, optional + Return the vertices. (default=True) + return_full_pose: bool, optional + Returns the full axis-angle pose vector (default=False) + + Returns + ------- + ''' + model_vars = [betas, global_orient, body_pose, transl] + batch_size = 1 + for var in model_vars: + if var is None: + continue + batch_size = max(batch_size, len(var)) + device, dtype = self.shapedirs.device, self.shapedirs.dtype + if global_orient is None: + global_orient = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, -1, -1, -1).contiguous() + if body_pose is None: + body_pose = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand( + batch_size, self.NUM_BODY_JOINTS, -1, -1).contiguous() + if betas is None: + betas = torch.zeros([batch_size, self.num_betas], + dtype=dtype, device=device) + if transl is None: + transl = torch.zeros([batch_size, 3], dtype=dtype, device=device) + full_pose = torch.cat( + [global_orient.reshape(-1, 1, 3, 3), + body_pose.reshape(-1, self.NUM_BODY_JOINTS, 3, 3)], + dim=1) + + vertices, joints = lbs(betas, full_pose, self.v_template, + self.shapedirs, self.posedirs, + self.J_regressor, self.parents, + self.lbs_weights, + pose2rot=False) + + joints = self.vertex_joint_selector(vertices, joints) + # Map the joints to the current dataset + if self.joint_mapper is not None: + joints = self.joint_mapper(joints) + + if transl is not None: + joints += transl.unsqueeze(dim=1) + vertices += transl.unsqueeze(dim=1) + + output = SMPLOutput(vertices=vertices if return_verts else None, + global_orient=global_orient, + body_pose=body_pose, + joints=joints, + betas=betas, + full_pose=full_pose if return_full_pose else None) + + return output + + +class SMPLH(SMPL): + + # The hand joints are replaced by MANO + NUM_BODY_JOINTS = SMPL.NUM_JOINTS - 2 + NUM_HAND_JOINTS = 15 + NUM_JOINTS = NUM_BODY_JOINTS + 2 * NUM_HAND_JOINTS + + def __init__( + self, model_path, + kid_template_path: str = '', + data_struct: Optional[Struct] = None, + create_left_hand_pose: bool = True, + left_hand_pose: Optional[Tensor] = None, + create_right_hand_pose: bool = True, + right_hand_pose: Optional[Tensor] = None, + use_pca: bool = True, + num_pca_comps: int = 6, + flat_hand_mean: bool = False, + batch_size: int = 1, + gender: str = 'neutral', + age: str = 'adult', + dtype=torch.float32, + vertex_ids=None, + use_compressed: bool = True, + ext: str = 'pkl', + **kwargs + ) -> None: + ''' SMPLH model constructor + + Parameters + ---------- + model_path: str + The path to the folder or to the file where the model + parameters are stored + data_struct: Strct + A struct object. If given, then the parameters of the model are + read from the object. Otherwise, the model tries to read the + parameters from the given `model_path`. (default = None) + create_left_hand_pose: bool, optional + Flag for creating a member variable for the pose of the left + hand. (default = True) + left_hand_pose: torch.tensor, optional, BxP + The default value for the left hand pose member variable. + (default = None) + create_right_hand_pose: bool, optional + Flag for creating a member variable for the pose of the right + hand. (default = True) + right_hand_pose: torch.tensor, optional, BxP + The default value for the right hand pose member variable. + (default = None) + num_pca_comps: int, optional + The number of PCA components to use for each hand. + (default = 6) + flat_hand_mean: bool, optional + If False, then the pose of the hand is initialized to False. + batch_size: int, optional + The batch size used for creating the member variables + gender: str, optional + Which gender to load + dtype: torch.dtype, optional + The data type for the created variables + vertex_ids: dict, optional + A dictionary containing the indices of the extra vertices that + will be selected + ''' + + self.num_pca_comps = num_pca_comps + # If no data structure is passed, then load the data from the given + # model folder + if data_struct is None: + # Load the model + if osp.isdir(model_path): + model_fn = 'SMPLH_{}.{ext}'.format(gender.upper(), ext=ext) + smplh_path = os.path.join(model_path, model_fn) + else: + smplh_path = model_path + assert osp.exists(smplh_path), 'Path {} does not exist!'.format( + smplh_path) + + if ext == 'pkl': + with open(smplh_path, 'rb') as smplh_file: + model_data = pickle.load(smplh_file, encoding='latin1') + elif ext == 'npz': + model_data = np.load(smplh_path, allow_pickle=True) + else: + raise ValueError('Unknown extension: {}'.format(ext)) + data_struct = Struct(**model_data) + + if vertex_ids is None: + vertex_ids = VERTEX_IDS['smplh'] + + super(SMPLH, self).__init__( + model_path=model_path, + kid_template_path=kid_template_path, + data_struct=data_struct, + batch_size=batch_size, vertex_ids=vertex_ids, gender=gender, age=age, + use_compressed=use_compressed, dtype=dtype, ext=ext, **kwargs) + + self.use_pca = use_pca + self.num_pca_comps = num_pca_comps + self.flat_hand_mean = flat_hand_mean + + left_hand_components = data_struct.hands_componentsl[:num_pca_comps] + right_hand_components = data_struct.hands_componentsr[:num_pca_comps] + + self.np_left_hand_components = left_hand_components + self.np_right_hand_components = right_hand_components + if self.use_pca: + self.register_buffer( + 'left_hand_components', + torch.tensor(left_hand_components, dtype=dtype)) + self.register_buffer( + 'right_hand_components', + torch.tensor(right_hand_components, dtype=dtype)) + + if self.flat_hand_mean: + left_hand_mean = np.zeros_like(data_struct.hands_meanl) + else: + left_hand_mean = data_struct.hands_meanl + + if self.flat_hand_mean: + right_hand_mean = np.zeros_like(data_struct.hands_meanr) + else: + right_hand_mean = data_struct.hands_meanr + + self.register_buffer('left_hand_mean', + to_tensor(left_hand_mean, dtype=self.dtype)) + self.register_buffer('right_hand_mean', + to_tensor(right_hand_mean, dtype=self.dtype)) + + # Create the buffers for the pose of the left hand + hand_pose_dim = num_pca_comps if use_pca else 3 * self.NUM_HAND_JOINTS + if create_left_hand_pose: + if left_hand_pose is None: + default_lhand_pose = torch.zeros([batch_size, hand_pose_dim], + dtype=dtype) + else: + default_lhand_pose = torch.tensor(left_hand_pose, dtype=dtype) + + left_hand_pose_param = nn.Parameter(default_lhand_pose, + requires_grad=True) + self.register_parameter('left_hand_pose', + left_hand_pose_param) + + if create_right_hand_pose: + if right_hand_pose is None: + default_rhand_pose = torch.zeros([batch_size, hand_pose_dim], + dtype=dtype) + else: + default_rhand_pose = torch.tensor(right_hand_pose, dtype=dtype) + + right_hand_pose_param = nn.Parameter(default_rhand_pose, + requires_grad=True) + self.register_parameter('right_hand_pose', + right_hand_pose_param) + + # Create the buffer for the mean pose. + pose_mean_tensor = self.create_mean_pose( + data_struct, flat_hand_mean=flat_hand_mean) + if not torch.is_tensor(pose_mean_tensor): + pose_mean_tensor = torch.tensor(pose_mean_tensor, dtype=dtype) + self.register_buffer('pose_mean', pose_mean_tensor) + + def create_mean_pose(self, data_struct, flat_hand_mean=False): + # Create the array for the mean pose. If flat_hand is false, then use + # the mean that is given by the data, rather than the flat open hand + global_orient_mean = torch.zeros([3], dtype=self.dtype) + body_pose_mean = torch.zeros([self.NUM_BODY_JOINTS * 3], + dtype=self.dtype) + + pose_mean = torch.cat([global_orient_mean, body_pose_mean, + self.left_hand_mean, + self.right_hand_mean], dim=0) + return pose_mean + + def name(self) -> str: + return 'SMPL+H' + + def extra_repr(self): + msg = super(SMPLH, self).extra_repr() + msg = [msg] + if self.use_pca: + msg.append(f'Number of PCA components: {self.num_pca_comps}') + msg.append(f'Flat hand mean: {self.flat_hand_mean}') + return '\n'.join(msg) + + def forward( + self, + betas: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + body_pose: Optional[Tensor] = None, + left_hand_pose: Optional[Tensor] = None, + right_hand_pose: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + return_verts: bool = True, + return_full_pose: bool = False, + pose2rot: bool = True, + **kwargs + ) -> SMPLHOutput: + ''' + ''' + + # If no shape and pose parameters are passed along, then use the + # ones from the module + global_orient = (global_orient if global_orient is not None else + self.global_orient) + body_pose = body_pose if body_pose is not None else self.body_pose + betas = betas if betas is not None else self.betas + left_hand_pose = (left_hand_pose if left_hand_pose is not None else + self.left_hand_pose) + right_hand_pose = (right_hand_pose if right_hand_pose is not None else + self.right_hand_pose) + + apply_trans = transl is not None or hasattr(self, 'transl') + if transl is None: + if hasattr(self, 'transl'): + transl = self.transl + + if self.use_pca: + left_hand_pose = torch.einsum( + 'bi,ij->bj', [left_hand_pose, self.left_hand_components]) + right_hand_pose = torch.einsum( + 'bi,ij->bj', [right_hand_pose, self.right_hand_components]) + + full_pose = torch.cat([global_orient, body_pose, + left_hand_pose, + right_hand_pose], dim=1) + full_pose += self.pose_mean + + vertices, joints = lbs(betas, full_pose, self.v_template, + self.shapedirs, self.posedirs, + self.J_regressor, self.parents, + self.lbs_weights, pose2rot=pose2rot) + + # Add any extra joints that might be needed + joints = self.vertex_joint_selector(vertices, joints) + if self.joint_mapper is not None: + joints = self.joint_mapper(joints) + + if apply_trans: + joints += transl.unsqueeze(dim=1) + vertices += transl.unsqueeze(dim=1) + + output = SMPLHOutput(vertices=vertices if return_verts else None, + joints=joints, + betas=betas, + global_orient=global_orient, + body_pose=body_pose, + left_hand_pose=left_hand_pose, + right_hand_pose=right_hand_pose, + full_pose=full_pose if return_full_pose else None) + + return output + + +class SMPLHLayer(SMPLH): + + def __init__( + self, *args, **kwargs + ) -> None: + ''' SMPL+H as a layer model constructor + ''' + super(SMPLHLayer, self).__init__( + create_global_orient=False, + create_body_pose=False, + create_left_hand_pose=False, + create_right_hand_pose=False, + create_betas=False, + create_transl=False, + *args, + **kwargs) + + def forward( + self, + betas: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + body_pose: Optional[Tensor] = None, + left_hand_pose: Optional[Tensor] = None, + right_hand_pose: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + return_verts: bool = True, + return_full_pose: bool = False, + pose2rot: bool = True, + **kwargs + ) -> SMPLHOutput: + ''' Forward pass for the SMPL+H model + + Parameters + ---------- + global_orient: torch.tensor, optional, shape Bx3x3 + Global rotation of the body. Useful if someone wishes to + predicts this with an external model. It is expected to be in + rotation matrix format. (default=None) + betas: torch.tensor, optional, shape BxN_b + Shape parameters. For example, it can used if shape parameters + `betas` are predicted from some external model. + (default=None) + body_pose: torch.tensor, optional, shape BxJx3x3 + If given, ignore the member variable `body_pose` and use it + instead. For example, it can used if someone predicts the + pose of the body joints are predicted from some external model. + It should be a tensor that contains joint rotations in + rotation matrix format. (default=None) + left_hand_pose: torch.tensor, optional, shape Bx15x3x3 + If given, contains the pose of the left hand. + It should be a tensor that contains joint rotations in + rotation matrix format. (default=None) + right_hand_pose: torch.tensor, optional, shape Bx15x3x3 + If given, contains the pose of the right hand. + It should be a tensor that contains joint rotations in + rotation matrix format. (default=None) + transl: torch.tensor, optional, shape Bx3 + Translation vector of the body. + For example, it can used if the translation + `transl` is predicted from some external model. + (default=None) + return_verts: bool, optional + Return the vertices. (default=True) + return_full_pose: bool, optional + Returns the full axis-angle pose vector (default=False) + + Returns + ------- + ''' + model_vars = [betas, global_orient, body_pose, transl, left_hand_pose, + right_hand_pose] + batch_size = 1 + for var in model_vars: + if var is None: + continue + batch_size = max(batch_size, len(var)) + device, dtype = self.shapedirs.device, self.shapedirs.dtype + if global_orient is None: + global_orient = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, -1, -1, -1).contiguous() + if body_pose is None: + body_pose = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, 21, -1, -1).contiguous() + if left_hand_pose is None: + left_hand_pose = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, 15, -1, -1).contiguous() + if right_hand_pose is None: + right_hand_pose = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, 15, -1, -1).contiguous() + if betas is None: + betas = torch.zeros([batch_size, self.num_betas], + dtype=dtype, device=device) + if transl is None: + transl = torch.zeros([batch_size, 3], dtype=dtype, device=device) + + # Concatenate all pose vectors + full_pose = torch.cat( + [global_orient.reshape(-1, 1, 3, 3), + body_pose.reshape(-1, self.NUM_BODY_JOINTS, 3, 3), + left_hand_pose.reshape(-1, self.NUM_HAND_JOINTS, 3, 3), + right_hand_pose.reshape(-1, self.NUM_HAND_JOINTS, 3, 3)], + dim=1) + + vertices, joints = lbs(betas, full_pose, self.v_template, + self.shapedirs, self.posedirs, + self.J_regressor, self.parents, + self.lbs_weights, pose2rot=False) + + # Add any extra joints that might be needed + joints = self.vertex_joint_selector(vertices, joints) + if self.joint_mapper is not None: + joints = self.joint_mapper(joints) + + if transl is not None: + joints += transl.unsqueeze(dim=1) + vertices += transl.unsqueeze(dim=1) + + output = SMPLHOutput(vertices=vertices if return_verts else None, + joints=joints, + betas=betas, + global_orient=global_orient, + body_pose=body_pose, + left_hand_pose=left_hand_pose, + right_hand_pose=right_hand_pose, + full_pose=full_pose if return_full_pose else None) + + return output + + +class SMPLX(SMPLH): + ''' + SMPL-X (SMPL eXpressive) is a unified body model, with shape parameters + trained jointly for the face, hands and body. + SMPL-X uses standard vertex based linear blend skinning with learned + corrective blend shapes, has N=10475 vertices and K=54 joints, + which includes joints for the neck, jaw, eyeballs and fingers. + ''' + + NUM_BODY_JOINTS = SMPLH.NUM_BODY_JOINTS + NUM_HAND_JOINTS = 15 + NUM_FACE_JOINTS = 3 + NUM_JOINTS = NUM_BODY_JOINTS + 2 * NUM_HAND_JOINTS + NUM_FACE_JOINTS + EXPRESSION_SPACE_DIM = 100 + NECK_IDX = 12 + + def __init__( + self, model_path: str, + kid_template_path: str = '', + num_expression_coeffs: int = 10, + create_expression: bool = True, + expression: Optional[Tensor] = None, + create_jaw_pose: bool = True, + jaw_pose: Optional[Tensor] = None, + create_leye_pose: bool = True, + leye_pose: Optional[Tensor] = None, + create_reye_pose=True, + reye_pose: Optional[Tensor] = None, + use_face_contour: bool = False, + batch_size: int = 1, + gender: str = 'neutral', + age: str = 'adult', + dtype=torch.float32, + ext: str = 'npz', + **kwargs + ) -> None: + ''' SMPLX model constructor + + Parameters + ---------- + model_path: str + The path to the folder or to the file where the model + parameters are stored + num_expression_coeffs: int, optional + Number of expression components to use + (default = 10). + create_expression: bool, optional + Flag for creating a member variable for the expression space + (default = True). + expression: torch.tensor, optional, Bx10 + The default value for the expression member variable. + (default = None) + create_jaw_pose: bool, optional + Flag for creating a member variable for the jaw pose. + (default = False) + jaw_pose: torch.tensor, optional, Bx3 + The default value for the jaw pose variable. + (default = None) + create_leye_pose: bool, optional + Flag for creating a member variable for the left eye pose. + (default = False) + leye_pose: torch.tensor, optional, Bx10 + The default value for the left eye pose variable. + (default = None) + create_reye_pose: bool, optional + Flag for creating a member variable for the right eye pose. + (default = False) + reye_pose: torch.tensor, optional, Bx10 + The default value for the right eye pose variable. + (default = None) + use_face_contour: bool, optional + Whether to compute the keypoints that form the facial contour + batch_size: int, optional + The batch size used for creating the member variables + gender: str, optional + Which gender to load + dtype: torch.dtype + The data type for the created variables + ''' + + # Load the model + if osp.isdir(model_path): + model_fn = 'SMPLX_{}.{ext}'.format(gender.upper(), ext=ext) + smplx_path = os.path.join(model_path, model_fn) + else: + smplx_path = model_path + assert osp.exists(smplx_path), 'Path {} does not exist!'.format( + smplx_path) + + if ext == 'pkl': + with open(smplx_path, 'rb') as smplx_file: + model_data = pickle.load(smplx_file, encoding='latin1') + elif ext == 'npz': + model_data = np.load(smplx_path, allow_pickle=True) + else: + raise ValueError('Unknown extension: {}'.format(ext)) + + data_struct = Struct(**model_data) + + super(SMPLX, self).__init__( + model_path=model_path, + kid_template_path=kid_template_path, + data_struct=data_struct, + dtype=dtype, + batch_size=batch_size, + vertex_ids=VERTEX_IDS['smplx'], + gender=gender, age=age, ext=ext, + **kwargs) + + lmk_faces_idx = data_struct.lmk_faces_idx + self.register_buffer('lmk_faces_idx', + torch.tensor(lmk_faces_idx, dtype=torch.long)) + lmk_bary_coords = data_struct.lmk_bary_coords + self.register_buffer('lmk_bary_coords', + torch.tensor(lmk_bary_coords, dtype=dtype)) + + self.use_face_contour = use_face_contour + if self.use_face_contour: + dynamic_lmk_faces_idx = data_struct.dynamic_lmk_faces_idx + dynamic_lmk_faces_idx = torch.tensor( + dynamic_lmk_faces_idx, + dtype=torch.long) + self.register_buffer('dynamic_lmk_faces_idx', + dynamic_lmk_faces_idx) + + dynamic_lmk_bary_coords = data_struct.dynamic_lmk_bary_coords + dynamic_lmk_bary_coords = torch.tensor( + dynamic_lmk_bary_coords, dtype=dtype) + self.register_buffer('dynamic_lmk_bary_coords', + dynamic_lmk_bary_coords) + + neck_kin_chain = find_joint_kin_chain(self.NECK_IDX, self.parents) + self.register_buffer( + 'neck_kin_chain', + torch.tensor(neck_kin_chain, dtype=torch.long)) + + if create_jaw_pose: + if jaw_pose is None: + default_jaw_pose = torch.zeros([batch_size, 3], dtype=dtype) + else: + default_jaw_pose = torch.tensor(jaw_pose, dtype=dtype) + jaw_pose_param = nn.Parameter(default_jaw_pose, + requires_grad=True) + self.register_parameter('jaw_pose', jaw_pose_param) + + if create_leye_pose: + if leye_pose is None: + default_leye_pose = torch.zeros([batch_size, 3], dtype=dtype) + else: + default_leye_pose = torch.tensor(leye_pose, dtype=dtype) + leye_pose_param = nn.Parameter(default_leye_pose, + requires_grad=True) + self.register_parameter('leye_pose', leye_pose_param) + + if create_reye_pose: + if reye_pose is None: + default_reye_pose = torch.zeros([batch_size, 3], dtype=dtype) + else: + default_reye_pose = torch.tensor(reye_pose, dtype=dtype) + reye_pose_param = nn.Parameter(default_reye_pose, + requires_grad=True) + self.register_parameter('reye_pose', reye_pose_param) + + shapedirs = data_struct.shapedirs + if len(shapedirs.shape) < 3: + shapedirs = shapedirs[:, :, None] + if (shapedirs.shape[-1] < self.SHAPE_SPACE_DIM + + self.EXPRESSION_SPACE_DIM): + print(f'WARNING: You are using a {self.name()} model, with only' + ' 10 shape and 10 expression coefficients.') + expr_start_idx = 10 + expr_end_idx = 20 + num_expression_coeffs = min(num_expression_coeffs, 10) + else: + expr_start_idx = self.SHAPE_SPACE_DIM + expr_end_idx = self.SHAPE_SPACE_DIM + num_expression_coeffs + num_expression_coeffs = min( + num_expression_coeffs, self.EXPRESSION_SPACE_DIM) + + self._num_expression_coeffs = num_expression_coeffs + + expr_dirs = shapedirs[:, :, expr_start_idx:expr_end_idx] + self.register_buffer( + 'expr_dirs', to_tensor(to_np(expr_dirs), dtype=dtype)) + + if create_expression: + if expression is None: + default_expression = torch.zeros( + [batch_size, self.num_expression_coeffs], dtype=dtype) + else: + default_expression = torch.tensor(expression, dtype=dtype) + expression_param = nn.Parameter(default_expression, + requires_grad=True) + self.register_parameter('expression', expression_param) + + def name(self) -> str: + return 'SMPL-X' + + @property + def num_expression_coeffs(self): + return self._num_expression_coeffs + + def create_mean_pose(self, data_struct, flat_hand_mean=False): + # Create the array for the mean pose. If flat_hand is false, then use + # the mean that is given by the data, rather than the flat open hand + global_orient_mean = torch.zeros([3], dtype=self.dtype) + body_pose_mean = torch.zeros([self.NUM_BODY_JOINTS * 3], + dtype=self.dtype) + jaw_pose_mean = torch.zeros([3], dtype=self.dtype) + leye_pose_mean = torch.zeros([3], dtype=self.dtype) + reye_pose_mean = torch.zeros([3], dtype=self.dtype) + + pose_mean = np.concatenate([global_orient_mean, body_pose_mean, + jaw_pose_mean, + leye_pose_mean, reye_pose_mean, + self.left_hand_mean, self.right_hand_mean], + axis=0) + + return pose_mean + + def extra_repr(self): + msg = super(SMPLX, self).extra_repr() + msg = [ + msg, + f'Number of Expression Coefficients: {self.num_expression_coeffs}' + ] + return '\n'.join(msg) + + def forward( + self, + betas: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + body_pose: Optional[Tensor] = None, + left_hand_pose: Optional[Tensor] = None, + right_hand_pose: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + expression: Optional[Tensor] = None, + jaw_pose: Optional[Tensor] = None, + leye_pose: Optional[Tensor] = None, + reye_pose: Optional[Tensor] = None, + return_verts: bool = True, + return_full_pose: bool = False, + pose2rot: bool = True, + return_shaped: bool = True, + **kwargs + ) -> SMPLXOutput: + ''' + Forward pass for the SMPLX model + + Parameters + ---------- + global_orient: torch.tensor, optional, shape Bx3 + If given, ignore the member variable and use it as the global + rotation of the body. Useful if someone wishes to predicts this + with an external model. (default=None) + betas: torch.tensor, optional, shape BxN_b + If given, ignore the member variable `betas` and use it + instead. For example, it can used if shape parameters + `betas` are predicted from some external model. + (default=None) + expression: torch.tensor, optional, shape BxN_e + If given, ignore the member variable `expression` and use it + instead. For example, it can used if expression parameters + `expression` are predicted from some external model. + body_pose: torch.tensor, optional, shape Bx(J*3) + If given, ignore the member variable `body_pose` and use it + instead. For example, it can used if someone predicts the + pose of the body joints are predicted from some external model. + It should be a tensor that contains joint rotations in + axis-angle format. (default=None) + left_hand_pose: torch.tensor, optional, shape BxP + If given, ignore the member variable `left_hand_pose` and + use this instead. It should either contain PCA coefficients or + joint rotations in axis-angle format. + right_hand_pose: torch.tensor, optional, shape BxP + If given, ignore the member variable `right_hand_pose` and + use this instead. It should either contain PCA coefficients or + joint rotations in axis-angle format. + jaw_pose: torch.tensor, optional, shape Bx3 + If given, ignore the member variable `jaw_pose` and + use this instead. It should either joint rotations in + axis-angle format. + transl: torch.tensor, optional, shape Bx3 + If given, ignore the member variable `transl` and use it + instead. For example, it can used if the translation + `transl` is predicted from some external model. + (default=None) + return_verts: bool, optional + Return the vertices. (default=True) + return_full_pose: bool, optional + Returns the full axis-angle pose vector (default=False) + + Returns + ------- + output: ModelOutput + A named tuple of type `ModelOutput` + ''' + + # If no shape and pose parameters are passed along, then use the + # ones from the module + global_orient = (global_orient if global_orient is not None else + self.global_orient) + body_pose = body_pose if body_pose is not None else self.body_pose + betas = betas if betas is not None else self.betas + + left_hand_pose = (left_hand_pose if left_hand_pose is not None else + self.left_hand_pose) + right_hand_pose = (right_hand_pose if right_hand_pose is not None else + self.right_hand_pose) + jaw_pose = jaw_pose if jaw_pose is not None else self.jaw_pose + leye_pose = leye_pose if leye_pose is not None else self.leye_pose + reye_pose = reye_pose if reye_pose is not None else self.reye_pose + expression = expression if expression is not None else self.expression + + apply_trans = transl is not None or hasattr(self, 'transl') + if transl is None: + if hasattr(self, 'transl'): + transl = self.transl + + if self.use_pca: + left_hand_pose = torch.einsum( + 'bi,ij->bj', [left_hand_pose, self.left_hand_components]) + right_hand_pose = torch.einsum( + 'bi,ij->bj', [right_hand_pose, self.right_hand_components]) + + full_pose = torch.cat([global_orient.reshape(-1, 1, 3), + body_pose.reshape(-1, self.NUM_BODY_JOINTS, 3), + jaw_pose.reshape(-1, 1, 3), + leye_pose.reshape(-1, 1, 3), + reye_pose.reshape(-1, 1, 3), + left_hand_pose.reshape(-1, 15, 3), + right_hand_pose.reshape(-1, 15, 3)], + dim=1).reshape(-1, 165) + + # Add the mean pose of the model. Does not affect the body, only the + # hands when flat_hand_mean == False + full_pose += self.pose_mean + + batch_size = max(betas.shape[0], global_orient.shape[0], + body_pose.shape[0]) + # Concatenate the shape and expression coefficients + scale = int(batch_size / betas.shape[0]) + if scale > 1: + betas = betas.expand(scale, -1) + shape_components = torch.cat([betas, expression], dim=-1) + + shapedirs = torch.cat([self.shapedirs, self.expr_dirs], dim=-1) + + vertices, joints, A = lbs(shape_components, full_pose, self.v_template, + shapedirs, self.posedirs, + self.J_regressor, self.parents, + self.lbs_weights, pose2rot=pose2rot, + return_affine_mat = True) + + lmk_faces_idx = self.lmk_faces_idx.unsqueeze( + dim=0).expand(batch_size, -1).contiguous() + lmk_bary_coords = self.lmk_bary_coords.unsqueeze(dim=0).repeat( + self.batch_size, 1, 1) + if self.use_face_contour: + lmk_idx_and_bcoords = find_dynamic_lmk_idx_and_bcoords( + vertices, full_pose, self.dynamic_lmk_faces_idx, + self.dynamic_lmk_bary_coords, + self.neck_kin_chain, + pose2rot=True, + ) + dyn_lmk_faces_idx, dyn_lmk_bary_coords = lmk_idx_and_bcoords + + lmk_faces_idx = torch.cat([lmk_faces_idx, + dyn_lmk_faces_idx], 1) + lmk_bary_coords = torch.cat( + [lmk_bary_coords.expand(batch_size, -1, -1), + dyn_lmk_bary_coords], 1) + + landmarks = vertices2landmarks(vertices, self.faces_tensor, + lmk_faces_idx, + lmk_bary_coords) + + # Add any extra joints that might be needed + joints = self.vertex_joint_selector(vertices, joints) + # Add the landmarks to the joints + joints = torch.cat([joints, landmarks], dim=1) + # Map the joints to the current dataset + + if self.joint_mapper is not None: + joints = self.joint_mapper(joints=joints, vertices=vertices) + + if apply_trans: + joints += transl.unsqueeze(dim=1) + vertices += transl.unsqueeze(dim=1) + A[:, :, :3, 3] += transl.unsqueeze(dim=1) + + v_shaped = None + if return_shaped: + v_shaped = self.v_template + blend_shapes(betas, self.shapedirs) + output = SMPLXOutput(vertices=vertices if return_verts else None, + joints=joints, + betas=betas, + expression=expression, + global_orient=global_orient, + body_pose=body_pose, + left_hand_pose=left_hand_pose, + right_hand_pose=right_hand_pose, + jaw_pose=jaw_pose, + v_shaped=v_shaped, + full_pose=full_pose if return_full_pose else None, + A = A) + return output + + +class SMPLXLayer(SMPLX): + def __init__( + self, + *args, + **kwargs + ) -> None: + # Just create a SMPLX module without any member variables + super(SMPLXLayer, self).__init__( + create_global_orient=False, + create_body_pose=False, + create_left_hand_pose=False, + create_right_hand_pose=False, + create_jaw_pose=False, + create_leye_pose=False, + create_reye_pose=False, + create_betas=False, + create_expression=False, + create_transl=False, + *args, **kwargs, + ) + + def forward( + self, + betas: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + body_pose: Optional[Tensor] = None, + left_hand_pose: Optional[Tensor] = None, + right_hand_pose: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + expression: Optional[Tensor] = None, + jaw_pose: Optional[Tensor] = None, + leye_pose: Optional[Tensor] = None, + reye_pose: Optional[Tensor] = None, + return_verts: bool = True, + return_full_pose: bool = False, + **kwargs + ) -> SMPLXOutput: + ''' + Forward pass for the SMPLX model + + Parameters + ---------- + global_orient: torch.tensor, optional, shape Bx3x3 + If given, ignore the member variable and use it as the global + rotation of the body. Useful if someone wishes to predicts this + with an external model. It is expected to be in rotation matrix + format. (default=None) + betas: torch.tensor, optional, shape BxN_b + If given, ignore the member variable `betas` and use it + instead. For example, it can used if shape parameters + `betas` are predicted from some external model. + (default=None) + expression: torch.tensor, optional, shape BxN_e + Expression coefficients. + For example, it can used if expression parameters + `expression` are predicted from some external model. + body_pose: torch.tensor, optional, shape BxJx3x3 + If given, ignore the member variable `body_pose` and use it + instead. For example, it can used if someone predicts the + pose of the body joints are predicted from some external model. + It should be a tensor that contains joint rotations in + rotation matrix format. (default=None) + left_hand_pose: torch.tensor, optional, shape Bx15x3x3 + If given, contains the pose of the left hand. + It should be a tensor that contains joint rotations in + rotation matrix format. (default=None) + right_hand_pose: torch.tensor, optional, shape Bx15x3x3 + If given, contains the pose of the right hand. + It should be a tensor that contains joint rotations in + rotation matrix format. (default=None) + jaw_pose: torch.tensor, optional, shape Bx3x3 + Jaw pose. It should either joint rotations in + rotation matrix format. + transl: torch.tensor, optional, shape Bx3 + Translation vector of the body. + For example, it can used if the translation + `transl` is predicted from some external model. + (default=None) + return_verts: bool, optional + Return the vertices. (default=True) + return_full_pose: bool, optional + Returns the full pose vector (default=False) + Returns + ------- + output: ModelOutput + A data class that contains the posed vertices and joints + ''' + device, dtype = self.shapedirs.device, self.shapedirs.dtype + + model_vars = [betas, global_orient, body_pose, transl, + expression, left_hand_pose, right_hand_pose, jaw_pose] + batch_size = 1 + for var in model_vars: + if var is None: + continue + batch_size = max(batch_size, len(var)) + + if global_orient is None: + global_orient = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, -1, -1, -1).contiguous() + if body_pose is None: + body_pose = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand( + batch_size, self.NUM_BODY_JOINTS, -1, -1).contiguous() + if left_hand_pose is None: + left_hand_pose = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, 15, -1, -1).contiguous() + if right_hand_pose is None: + right_hand_pose = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, 15, -1, -1).contiguous() + if jaw_pose is None: + jaw_pose = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, -1, -1, -1).contiguous() + if leye_pose is None: + leye_pose = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, -1, -1, -1).contiguous() + if reye_pose is None: + reye_pose = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, -1, -1, -1).contiguous() + if expression is None: + expression = torch.zeros([batch_size, self.num_expression_coeffs], + dtype=dtype, device=device) + if betas is None: + betas = torch.zeros([batch_size, self.num_betas], + dtype=dtype, device=device) + if transl is None: + transl = torch.zeros([batch_size, 3], dtype=dtype, device=device) + + # Concatenate all pose vectors + full_pose = torch.cat( + [global_orient.reshape(-1, 1, 3, 3), + body_pose.reshape(-1, self.NUM_BODY_JOINTS, 3, 3), + jaw_pose.reshape(-1, 1, 3, 3), + leye_pose.reshape(-1, 1, 3, 3), + reye_pose.reshape(-1, 1, 3, 3), + left_hand_pose.reshape(-1, self.NUM_HAND_JOINTS, 3, 3), + right_hand_pose.reshape(-1, self.NUM_HAND_JOINTS, 3, 3)], + dim=1) + shape_components = torch.cat([betas, expression], dim=-1) + + shapedirs = torch.cat([self.shapedirs, self.expr_dirs], dim=-1) + + vertices, joints = lbs(shape_components, full_pose, self.v_template, + shapedirs, self.posedirs, + self.J_regressor, self.parents, + self.lbs_weights, + pose2rot=False, + ) + + lmk_faces_idx = self.lmk_faces_idx.unsqueeze( + dim=0).expand(batch_size, -1).contiguous() + lmk_bary_coords = self.lmk_bary_coords.unsqueeze(dim=0).repeat( + batch_size, 1, 1) + if self.use_face_contour: + lmk_idx_and_bcoords = find_dynamic_lmk_idx_and_bcoords( + vertices, full_pose, + self.dynamic_lmk_faces_idx, + self.dynamic_lmk_bary_coords, + self.neck_kin_chain, + pose2rot=False, + ) + dyn_lmk_faces_idx, dyn_lmk_bary_coords = lmk_idx_and_bcoords + + lmk_faces_idx = torch.cat([lmk_faces_idx, dyn_lmk_faces_idx], 1) + lmk_bary_coords = torch.cat( + [lmk_bary_coords.expand(batch_size, -1, -1), + dyn_lmk_bary_coords], 1) + + landmarks = vertices2landmarks(vertices, self.faces_tensor, + lmk_faces_idx, + lmk_bary_coords) + + # Add any extra joints that might be needed + joints = self.vertex_joint_selector(vertices, joints) + # Add the landmarks to the joints + joints = torch.cat([joints, landmarks], dim=1) + # Map the joints to the current dataset + + if self.joint_mapper is not None: + joints = self.joint_mapper(joints=joints, vertices=vertices) + + if transl is not None: + joints += transl.unsqueeze(dim=1) + vertices += transl.unsqueeze(dim=1) + + output = SMPLXOutput(vertices=vertices if return_verts else None, + joints=joints, + betas=betas, + expression=expression, + global_orient=global_orient, + body_pose=body_pose, + left_hand_pose=left_hand_pose, + right_hand_pose=right_hand_pose, + jaw_pose=jaw_pose, + transl=transl, + full_pose=full_pose if return_full_pose else None) + return output + + +class MANO(SMPL): + # The hand joints are replaced by MANO + NUM_BODY_JOINTS = 1 + NUM_HAND_JOINTS = 15 + NUM_JOINTS = NUM_BODY_JOINTS + NUM_HAND_JOINTS + + def __init__( + self, + model_path: str, + is_rhand: bool = True, + data_struct: Optional[Struct] = None, + create_hand_pose: bool = True, + hand_pose: Optional[Tensor] = None, + use_pca: bool = True, + num_pca_comps: int = 6, + flat_hand_mean: bool = False, + batch_size: int = 1, + dtype=torch.float32, + vertex_ids=None, + use_compressed: bool = True, + ext: str = 'pkl', + **kwargs + ) -> None: + ''' MANO model constructor + + Parameters + ---------- + model_path: str + The path to the folder or to the file where the model + parameters are stored + data_struct: Strct + A struct object. If given, then the parameters of the model are + read from the object. Otherwise, the model tries to read the + parameters from the given `model_path`. (default = None) + create_hand_pose: bool, optional + Flag for creating a member variable for the pose of the right + hand. (default = True) + hand_pose: torch.tensor, optional, BxP + The default value for the right hand pose member variable. + (default = None) + num_pca_comps: int, optional + The number of PCA components to use for each hand. + (default = 6) + flat_hand_mean: bool, optional + If False, then the pose of the hand is initialized to False. + batch_size: int, optional + The batch size used for creating the member variables + dtype: torch.dtype, optional + The data type for the created variables + vertex_ids: dict, optional + A dictionary containing the indices of the extra vertices that + will be selected + ''' + + self.num_pca_comps = num_pca_comps + self.is_rhand = is_rhand + # If no data structure is passed, then load the data from the given + # model folder + if data_struct is None: + # Load the model + if osp.isdir(model_path): + model_fn = 'MANO_{}.{ext}'.format( + 'RIGHT' if is_rhand else 'LEFT', ext=ext) + mano_path = os.path.join(model_path, model_fn) + else: + mano_path = model_path + self.is_rhand = True if 'RIGHT' in os.path.basename( + model_path) else False + assert osp.exists(mano_path), 'Path {} does not exist!'.format( + mano_path) + + if ext == 'pkl': + with open(mano_path, 'rb') as mano_file: + model_data = pickle.load(mano_file, encoding='latin1') + elif ext == 'npz': + model_data = np.load(mano_path, allow_pickle=True) + else: + raise ValueError('Unknown extension: {}'.format(ext)) + data_struct = Struct(**model_data) + + if vertex_ids is None: + vertex_ids = VERTEX_IDS['smplh'] + + super(MANO, self).__init__( + model_path=model_path, data_struct=data_struct, + batch_size=batch_size, vertex_ids=vertex_ids, + use_compressed=use_compressed, dtype=dtype, ext=ext, **kwargs) + + # add only MANO tips to the extra joints + self.vertex_joint_selector.extra_joints_idxs = to_tensor( + list(VERTEX_IDS['mano'].values()), dtype=torch.long) + + self.use_pca = use_pca + self.num_pca_comps = num_pca_comps + if self.num_pca_comps == 45: + self.use_pca = False + self.flat_hand_mean = flat_hand_mean + + hand_components = data_struct.hands_components[:num_pca_comps] + + self.np_hand_components = hand_components + + if self.use_pca: + self.register_buffer( + 'hand_components', + torch.tensor(hand_components, dtype=dtype)) + + if self.flat_hand_mean: + hand_mean = np.zeros_like(data_struct.hands_mean) + else: + hand_mean = data_struct.hands_mean + + self.register_buffer('hand_mean', + to_tensor(hand_mean, dtype=self.dtype)) + + # Create the buffers for the pose of the left hand + hand_pose_dim = num_pca_comps if use_pca else 3 * self.NUM_HAND_JOINTS + if create_hand_pose: + if hand_pose is None: + default_hand_pose = torch.zeros([batch_size, hand_pose_dim], + dtype=dtype) + else: + default_hand_pose = torch.tensor(hand_pose, dtype=dtype) + + hand_pose_param = nn.Parameter(default_hand_pose, + requires_grad=True) + self.register_parameter('hand_pose', + hand_pose_param) + + # Create the buffer for the mean pose. + pose_mean = self.create_mean_pose( + data_struct, flat_hand_mean=flat_hand_mean) + pose_mean_tensor = pose_mean.clone().to(dtype) + # pose_mean_tensor = torch.tensor(pose_mean, dtype=dtype) + self.register_buffer('pose_mean', pose_mean_tensor) + + def name(self) -> str: + return 'MANO' + + def create_mean_pose(self, data_struct, flat_hand_mean=False): + # Create the array for the mean pose. If flat_hand is false, then use + # the mean that is given by the data, rather than the flat open hand + global_orient_mean = torch.zeros([3], dtype=self.dtype) + pose_mean = torch.cat([global_orient_mean, self.hand_mean], dim=0) + return pose_mean + + def extra_repr(self): + msg = [super(MANO, self).extra_repr()] + if self.use_pca: + msg.append(f'Number of PCA components: {self.num_pca_comps}') + msg.append(f'Flat hand mean: {self.flat_hand_mean}') + return '\n'.join(msg) + + def forward( + self, + betas: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + hand_pose: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + return_verts: bool = True, + return_full_pose: bool = False, + **kwargs + ) -> MANOOutput: + ''' Forward pass for the MANO model + ''' + # If no shape and pose parameters are passed along, then use the + # ones from the module + global_orient = (global_orient if global_orient is not None else + self.global_orient) + betas = betas if betas is not None else self.betas + hand_pose = (hand_pose if hand_pose is not None else + self.hand_pose) + + apply_trans = transl is not None or hasattr(self, 'transl') + if transl is None: + if hasattr(self, 'transl'): + transl = self.transl + + if self.use_pca: + hand_pose = torch.einsum( + 'bi,ij->bj', [hand_pose, self.hand_components]) + + full_pose = torch.cat([global_orient, hand_pose], dim=1) + full_pose += self.pose_mean + + vertices, joints = lbs(betas, full_pose, self.v_template, + self.shapedirs, self.posedirs, + self.J_regressor, self.parents, + self.lbs_weights, pose2rot=True, + ) + + # # Add pre-selected extra joints that might be needed + # joints = self.vertex_joint_selector(vertices, joints) + + if self.joint_mapper is not None: + joints = self.joint_mapper(joints) + + if apply_trans: + joints = joints + transl.unsqueeze(dim=1) + vertices = vertices + transl.unsqueeze(dim=1) + + output = MANOOutput(vertices=vertices if return_verts else None, + joints=joints if return_verts else None, + betas=betas, + global_orient=global_orient, + hand_pose=hand_pose, + full_pose=full_pose if return_full_pose else None) + + return output + + +class MANOLayer(MANO): + def __init__(self, *args, **kwargs) -> None: + ''' MANO as a layer model constructor + ''' + super(MANOLayer, self).__init__( + create_global_orient=False, + create_hand_pose=False, + create_betas=False, + create_transl=False, + *args, **kwargs) + + def name(self) -> str: + return 'MANO' + + def forward( + self, + betas: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + hand_pose: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + return_verts: bool = True, + return_full_pose: bool = False, + **kwargs + ) -> MANOOutput: + ''' Forward pass for the MANO model + ''' + device, dtype = self.shapedirs.device, self.shapedirs.dtype + if global_orient is None: + batch_size = 1 + global_orient = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, -1, -1, -1).contiguous() + else: + batch_size = global_orient.shape[0] + if hand_pose is None: + hand_pose = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, 15, -1, -1).contiguous() + if betas is None: + betas = torch.zeros( + [batch_size, self.num_betas], dtype=dtype, device=device) + if transl is None: + transl = torch.zeros([batch_size, 3], dtype=dtype, device=device) + + full_pose = torch.cat([global_orient, hand_pose], dim=1) + vertices, joints = lbs(betas, full_pose, self.v_template, + self.shapedirs, self.posedirs, + self.J_regressor, self.parents, + self.lbs_weights, pose2rot=False) + + if self.joint_mapper is not None: + joints = self.joint_mapper(joints) + + if transl is not None: + joints = joints + transl.unsqueeze(dim=1) + vertices = vertices + transl.unsqueeze(dim=1) + + output = MANOOutput( + vertices=vertices if return_verts else None, + joints=joints if return_verts else None, + betas=betas, + global_orient=global_orient, + hand_pose=hand_pose, + full_pose=full_pose if return_full_pose else None) + + return output + + +class FLAME(SMPL): + NUM_JOINTS = 5 + SHAPE_SPACE_DIM = 300 + EXPRESSION_SPACE_DIM = 100 + NECK_IDX = 0 + + def __init__( + self, + model_path: str, + data_struct=None, + num_expression_coeffs=10, + create_expression: bool = True, + expression: Optional[Tensor] = None, + create_neck_pose: bool = True, + neck_pose: Optional[Tensor] = None, + create_jaw_pose: bool = True, + jaw_pose: Optional[Tensor] = None, + create_leye_pose: bool = True, + leye_pose: Optional[Tensor] = None, + create_reye_pose=True, + reye_pose: Optional[Tensor] = None, + use_face_contour=False, + batch_size: int = 1, + gender: str = 'neutral', + dtype: torch.dtype = torch.float32, + ext='pkl', + **kwargs + ) -> None: + ''' FLAME model constructor + + Parameters + ---------- + model_path: str + The path to the folder or to the file where the model + parameters are stored + num_expression_coeffs: int, optional + Number of expression components to use + (default = 10). + create_expression: bool, optional + Flag for creating a member variable for the expression space + (default = True). + expression: torch.tensor, optional, Bx10 + The default value for the expression member variable. + (default = None) + create_neck_pose: bool, optional + Flag for creating a member variable for the neck pose. + (default = False) + neck_pose: torch.tensor, optional, Bx3 + The default value for the neck pose variable. + (default = None) + create_jaw_pose: bool, optional + Flag for creating a member variable for the jaw pose. + (default = False) + jaw_pose: torch.tensor, optional, Bx3 + The default value for the jaw pose variable. + (default = None) + create_leye_pose: bool, optional + Flag for creating a member variable for the left eye pose. + (default = False) + leye_pose: torch.tensor, optional, Bx10 + The default value for the left eye pose variable. + (default = None) + create_reye_pose: bool, optional + Flag for creating a member variable for the right eye pose. + (default = False) + reye_pose: torch.tensor, optional, Bx10 + The default value for the right eye pose variable. + (default = None) + use_face_contour: bool, optional + Whether to compute the keypoints that form the facial contour + batch_size: int, optional + The batch size used for creating the member variables + gender: str, optional + Which gender to load + dtype: torch.dtype + The data type for the created variables + ''' + model_fn = f'FLAME_{gender.upper()}.{ext}' + flame_path = os.path.join(model_path, model_fn) + assert osp.exists(flame_path), 'Path {} does not exist!'.format( + flame_path) + if ext == 'npz': + file_data = np.load(flame_path, allow_pickle=True) + elif ext == 'pkl': + with open(flame_path, 'rb') as smpl_file: + file_data = pickle.load(smpl_file, encoding='latin1') + else: + raise ValueError('Unknown extension: {}'.format(ext)) + data_struct = Struct(**file_data) + + super(FLAME, self).__init__( + model_path=model_path, + data_struct=data_struct, + dtype=dtype, + batch_size=batch_size, + gender=gender, + ext=ext, + **kwargs) + + self.use_face_contour = use_face_contour + + self.vertex_joint_selector.extra_joints_idxs = to_tensor( + [], dtype=torch.long) + + if create_neck_pose: + if neck_pose is None: + default_neck_pose = torch.zeros([batch_size, 3], dtype=dtype) + else: + default_neck_pose = torch.tensor(neck_pose, dtype=dtype) + neck_pose_param = nn.Parameter( + default_neck_pose, requires_grad=True) + self.register_parameter('neck_pose', neck_pose_param) + + if create_jaw_pose: + if jaw_pose is None: + default_jaw_pose = torch.zeros([batch_size, 3], dtype=dtype) + else: + default_jaw_pose = torch.tensor(jaw_pose, dtype=dtype) + jaw_pose_param = nn.Parameter(default_jaw_pose, + requires_grad=True) + self.register_parameter('jaw_pose', jaw_pose_param) + + if create_leye_pose: + if leye_pose is None: + default_leye_pose = torch.zeros([batch_size, 3], dtype=dtype) + else: + default_leye_pose = torch.tensor(leye_pose, dtype=dtype) + leye_pose_param = nn.Parameter(default_leye_pose, + requires_grad=True) + self.register_parameter('leye_pose', leye_pose_param) + + if create_reye_pose: + if reye_pose is None: + default_reye_pose = torch.zeros([batch_size, 3], dtype=dtype) + else: + default_reye_pose = torch.tensor(reye_pose, dtype=dtype) + reye_pose_param = nn.Parameter(default_reye_pose, + requires_grad=True) + self.register_parameter('reye_pose', reye_pose_param) + + shapedirs = data_struct.shapedirs + if len(shapedirs.shape) < 3: + shapedirs = shapedirs[:, :, None] + if (shapedirs.shape[-1] < self.SHAPE_SPACE_DIM + + self.EXPRESSION_SPACE_DIM): + print(f'WARNING: You are using a {self.name()} model, with only' + ' 10 shape and 10 expression coefficients.') + expr_start_idx = 10 + expr_end_idx = 20 + num_expression_coeffs = min(num_expression_coeffs, 10) + else: + expr_start_idx = self.SHAPE_SPACE_DIM + expr_end_idx = self.SHAPE_SPACE_DIM + num_expression_coeffs + num_expression_coeffs = min( + num_expression_coeffs, self.EXPRESSION_SPACE_DIM) + + self._num_expression_coeffs = num_expression_coeffs + + expr_dirs = shapedirs[:, :, expr_start_idx:expr_end_idx] + self.register_buffer( + 'expr_dirs', to_tensor(to_np(expr_dirs), dtype=dtype)) + + if create_expression: + if expression is None: + default_expression = torch.zeros( + [batch_size, self.num_expression_coeffs], dtype=dtype) + else: + default_expression = torch.tensor(expression, dtype=dtype) + expression_param = nn.Parameter(default_expression, + requires_grad=True) + self.register_parameter('expression', expression_param) + + # The pickle file that contains the barycentric coordinates for + # regressing the landmarks + landmark_bcoord_filename = osp.join( + model_path, 'flame_static_embedding.pkl') + + with open(landmark_bcoord_filename, 'rb') as fp: + landmarks_data = pickle.load(fp, encoding='latin1') + + lmk_faces_idx = landmarks_data['lmk_face_idx'].astype(np.int64) + self.register_buffer('lmk_faces_idx', + torch.tensor(lmk_faces_idx, dtype=torch.long)) + lmk_bary_coords = landmarks_data['lmk_b_coords'] + self.register_buffer('lmk_bary_coords', + torch.tensor(lmk_bary_coords, dtype=dtype)) + if self.use_face_contour: + face_contour_path = os.path.join( + model_path, 'flame_dynamic_embedding.npy') + contour_embeddings = np.load(face_contour_path, + allow_pickle=True, + encoding='latin1')[()] + + dynamic_lmk_faces_idx = np.array( + contour_embeddings['lmk_face_idx'], dtype=np.int64) + dynamic_lmk_faces_idx = torch.tensor( + dynamic_lmk_faces_idx, + dtype=torch.long) + self.register_buffer('dynamic_lmk_faces_idx', + dynamic_lmk_faces_idx) + + dynamic_lmk_b_coords = torch.tensor( + contour_embeddings['lmk_b_coords'], dtype=dtype) + self.register_buffer( + 'dynamic_lmk_bary_coords', dynamic_lmk_b_coords) + + neck_kin_chain = find_joint_kin_chain(self.NECK_IDX, self.parents) + self.register_buffer( + 'neck_kin_chain', + torch.tensor(neck_kin_chain, dtype=torch.long)) + + @property + def num_expression_coeffs(self): + return self._num_expression_coeffs + + def name(self) -> str: + return 'FLAME' + + def extra_repr(self): + msg = [ + super(FLAME, self).extra_repr(), + f'Number of Expression Coefficients: {self.num_expression_coeffs}', + f'Use face contour: {self.use_face_contour}', + ] + return '\n'.join(msg) + + def forward( + self, + betas: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + neck_pose: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + expression: Optional[Tensor] = None, + jaw_pose: Optional[Tensor] = None, + leye_pose: Optional[Tensor] = None, + reye_pose: Optional[Tensor] = None, + return_verts: bool = True, + return_full_pose: bool = False, + pose2rot: bool = True, + **kwargs + ) -> FLAMEOutput: + ''' + Forward pass for the SMPLX model + + Parameters + ---------- + global_orient: torch.tensor, optional, shape Bx3 + If given, ignore the member variable and use it as the global + rotation of the body. Useful if someone wishes to predicts this + with an external model. (default=None) + betas: torch.tensor, optional, shape Bx10 + If given, ignore the member variable `betas` and use it + instead. For example, it can used if shape parameters + `betas` are predicted from some external model. + (default=None) + expression: torch.tensor, optional, shape Bx10 + If given, ignore the member variable `expression` and use it + instead. For example, it can used if expression parameters + `expression` are predicted from some external model. + jaw_pose: torch.tensor, optional, shape Bx3 + If given, ignore the member variable `jaw_pose` and + use this instead. It should either joint rotations in + axis-angle format. + jaw_pose: torch.tensor, optional, shape Bx3 + If given, ignore the member variable `jaw_pose` and + use this instead. It should either joint rotations in + axis-angle format. + transl: torch.tensor, optional, shape Bx3 + If given, ignore the member variable `transl` and use it + instead. For example, it can used if the translation + `transl` is predicted from some external model. + (default=None) + return_verts: bool, optional + Return the vertices. (default=True) + return_full_pose: bool, optional + Returns the full axis-angle pose vector (default=False) + + Returns + ------- + output: ModelOutput + A named tuple of type `ModelOutput` + ''' + + # If no shape and pose parameters are passed along, then use the + # ones from the module + global_orient = (global_orient if global_orient is not None else + self.global_orient) + jaw_pose = jaw_pose if jaw_pose is not None else self.jaw_pose + neck_pose = neck_pose if neck_pose is not None else self.neck_pose + + leye_pose = leye_pose if leye_pose is not None else self.leye_pose + reye_pose = reye_pose if reye_pose is not None else self.reye_pose + + betas = betas if betas is not None else self.betas + expression = expression if expression is not None else self.expression + + apply_trans = transl is not None or hasattr(self, 'transl') + if transl is None: + if hasattr(self, 'transl'): + transl = self.transl + + full_pose = torch.cat( + [global_orient, neck_pose, jaw_pose, leye_pose, reye_pose], dim=1) + + batch_size = max(betas.shape[0], global_orient.shape[0], + jaw_pose.shape[0]) + # Concatenate the shape and expression coefficients + scale = int(batch_size / betas.shape[0]) + if scale > 1: + betas = betas.expand(scale, -1) + shape_components = torch.cat([betas, expression], dim=-1) + shapedirs = torch.cat([self.shapedirs, self.expr_dirs], dim=-1) + + vertices, joints = lbs(shape_components, full_pose, self.v_template, + shapedirs, self.posedirs, + self.J_regressor, self.parents, + self.lbs_weights, pose2rot=pose2rot, + ) + + lmk_faces_idx = self.lmk_faces_idx.unsqueeze( + dim=0).expand(batch_size, -1).contiguous() + lmk_bary_coords = self.lmk_bary_coords.unsqueeze(dim=0).repeat( + batch_size, 1, 1) + if self.use_face_contour: + lmk_idx_and_bcoords = find_dynamic_lmk_idx_and_bcoords( + vertices, full_pose, self.dynamic_lmk_faces_idx, + self.dynamic_lmk_bary_coords, + self.neck_kin_chain, + pose2rot=True, + ) + dyn_lmk_faces_idx, dyn_lmk_bary_coords = lmk_idx_and_bcoords + lmk_faces_idx = torch.cat([lmk_faces_idx, + dyn_lmk_faces_idx], 1) + lmk_bary_coords = torch.cat( + [lmk_bary_coords.expand(batch_size, -1, -1), + dyn_lmk_bary_coords], 1) + + landmarks = vertices2landmarks(vertices, self.faces_tensor, + lmk_faces_idx, + lmk_bary_coords) + + # Add any extra joints that might be needed + joints = self.vertex_joint_selector(vertices, joints) + # Add the landmarks to the joints + joints = torch.cat([joints, landmarks], dim=1) + + # Map the joints to the current dataset + if self.joint_mapper is not None: + joints = self.joint_mapper(joints=joints, vertices=vertices) + + if apply_trans: + joints += transl.unsqueeze(dim=1) + vertices += transl.unsqueeze(dim=1) + + output = FLAMEOutput(vertices=vertices if return_verts else None, + joints=joints, + betas=betas, + expression=expression, + global_orient=global_orient, + neck_pose=neck_pose, + jaw_pose=jaw_pose, + full_pose=full_pose if return_full_pose else None) + return output + + +class FLAMELayer(FLAME): + def __init__(self, *args, **kwargs) -> None: + ''' FLAME as a layer model constructor ''' + super(FLAMELayer, self).__init__( + create_betas=False, + create_expression=False, + create_global_orient=False, + create_neck_pose=False, + create_jaw_pose=False, + create_leye_pose=False, + create_reye_pose=False, + *args, + **kwargs) + + def forward( + self, + betas: Optional[Tensor] = None, + global_orient: Optional[Tensor] = None, + neck_pose: Optional[Tensor] = None, + transl: Optional[Tensor] = None, + expression: Optional[Tensor] = None, + jaw_pose: Optional[Tensor] = None, + leye_pose: Optional[Tensor] = None, + reye_pose: Optional[Tensor] = None, + return_verts: bool = True, + return_full_pose: bool = False, + pose2rot: bool = True, + **kwargs + ) -> FLAMEOutput: + ''' + Forward pass for the SMPLX model + + Parameters + ---------- + global_orient: torch.tensor, optional, shape Bx3x3 + Global rotation of the body. Useful if someone wishes to + predicts this with an external model. It is expected to be in + rotation matrix format. (default=None) + betas: torch.tensor, optional, shape BxN_b + Shape parameters. For example, it can used if shape parameters + `betas` are predicted from some external model. + (default=None) + expression: torch.tensor, optional, shape BxN_e + If given, ignore the member variable `expression` and use it + instead. For example, it can used if expression parameters + `expression` are predicted from some external model. + jaw_pose: torch.tensor, optional, shape Bx3x3 + Jaw pose. It should either joint rotations in + rotation matrix format. + transl: torch.tensor, optional, shape Bx3 + Translation vector of the body. + For example, it can used if the translation + `transl` is predicted from some external model. + (default=None) + return_verts: bool, optional + Return the vertices. (default=True) + return_full_pose: bool, optional + Returns the full axis-angle pose vector (default=False) + + Returns + ------- + output: ModelOutput + A named tuple of type `ModelOutput` + ''' + device, dtype = self.shapedirs.device, self.shapedirs.dtype + if global_orient is None: + batch_size = 1 + global_orient = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, -1, -1, -1).contiguous() + else: + batch_size = global_orient.shape[0] + if neck_pose is None: + neck_pose = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, 1, -1, -1).contiguous() + if jaw_pose is None: + jaw_pose = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, -1, -1, -1).contiguous() + if leye_pose is None: + leye_pose = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, -1, -1, -1).contiguous() + if reye_pose is None: + reye_pose = torch.eye(3, device=device, dtype=dtype).view( + 1, 1, 3, 3).expand(batch_size, -1, -1, -1).contiguous() + if betas is None: + betas = torch.zeros([batch_size, self.num_betas], + dtype=dtype, device=device) + if expression is None: + expression = torch.zeros([batch_size, self.num_expression_coeffs], + dtype=dtype, device=device) + if transl is None: + transl = torch.zeros([batch_size, 3], dtype=dtype, device=device) + + full_pose = torch.cat( + [global_orient, neck_pose, jaw_pose, leye_pose, reye_pose], dim=1) + + shape_components = torch.cat([betas, expression], dim=-1) + shapedirs = torch.cat([self.shapedirs, self.expr_dirs], dim=-1) + + vertices, joints = lbs(shape_components, full_pose, self.v_template, + shapedirs, self.posedirs, + self.J_regressor, self.parents, + self.lbs_weights, pose2rot=False, + ) + + lmk_faces_idx = self.lmk_faces_idx.unsqueeze( + dim=0).expand(batch_size, -1).contiguous() + lmk_bary_coords = self.lmk_bary_coords.unsqueeze(dim=0).repeat( + batch_size, 1, 1) + if self.use_face_contour: + lmk_idx_and_bcoords = find_dynamic_lmk_idx_and_bcoords( + vertices, full_pose, self.dynamic_lmk_faces_idx, + self.dynamic_lmk_bary_coords, + self.neck_kin_chain, + pose2rot=False, + ) + dyn_lmk_faces_idx, dyn_lmk_bary_coords = lmk_idx_and_bcoords + lmk_faces_idx = torch.cat([lmk_faces_idx, + dyn_lmk_faces_idx], 1) + lmk_bary_coords = torch.cat( + [lmk_bary_coords.expand(batch_size, -1, -1), + dyn_lmk_bary_coords], 1) + + landmarks = vertices2landmarks(vertices, self.faces_tensor, + lmk_faces_idx, + lmk_bary_coords) + + # Add any extra joints that might be needed + joints = self.vertex_joint_selector(vertices, joints) + # Add the landmarks to the joints + joints = torch.cat([joints, landmarks], dim=1) + + # Map the joints to the current dataset + if self.joint_mapper is not None: + joints = self.joint_mapper(joints=joints, vertices=vertices) + + joints += transl.unsqueeze(dim=1) + vertices += transl.unsqueeze(dim=1) + + output = FLAMEOutput(vertices=vertices if return_verts else None, + joints=joints, + betas=betas, + expression=expression, + global_orient=global_orient, + neck_pose=neck_pose, + jaw_pose=jaw_pose, + full_pose=full_pose if return_full_pose else None) + return output + + +def build_layer( + model_path: str, + model_type: str = 'smpl', + **kwargs +) -> Union[SMPLLayer, SMPLHLayer, SMPLXLayer, MANOLayer, FLAMELayer]: + ''' Method for creating a model from a path and a model type + + Parameters + ---------- + model_path: str + Either the path to the model you wish to load or a folder, + where each subfolder contains the differents types, i.e.: + model_path: + | + |-- smpl + |-- SMPL_FEMALE + |-- SMPL_NEUTRAL + |-- SMPL_MALE + |-- smplh + |-- SMPLH_FEMALE + |-- SMPLH_MALE + |-- smplx + |-- SMPLX_FEMALE + |-- SMPLX_NEUTRAL + |-- SMPLX_MALE + |-- mano + |-- MANO RIGHT + |-- MANO LEFT + |-- flame + |-- FLAME_FEMALE + |-- FLAME_MALE + |-- FLAME_NEUTRAL + + model_type: str, optional + When model_path is a folder, then this parameter specifies the + type of model to be loaded + **kwargs: dict + Keyword arguments + + Returns + ------- + body_model: nn.Module + The PyTorch module that implements the corresponding body model + Raises + ------ + ValueError: In case the model type is not one of SMPL, SMPLH, + SMPLX, MANO or FLAME + ''' + + if osp.isdir(model_path): + model_path = os.path.join(model_path, model_type) + else: + model_type = osp.basename(model_path).split('_')[0].lower() + + if model_type.lower() == 'smpl': + return SMPLLayer(model_path, **kwargs) + elif model_type.lower() == 'smplh': + return SMPLHLayer(model_path, **kwargs) + elif model_type.lower() == 'smplx': + return SMPLXLayer(model_path, **kwargs) + elif 'mano' in model_type.lower(): + return MANOLayer(model_path, **kwargs) + elif 'flame' in model_type.lower(): + return FLAMELayer(model_path, **kwargs) + else: + raise ValueError(f'Unknown model type {model_type}, exiting!') + + +def create( + model_path: str, + model_type: str = 'smpl', + **kwargs +) -> Union[SMPL, SMPLH, SMPLX, MANO, FLAME]: + ''' Method for creating a model from a path and a model type + + Parameters + ---------- + model_path: str + Either the path to the model you wish to load or a folder, + where each subfolder contains the differents types, i.e.: + model_path: + | + |-- smpl + |-- SMPL_FEMALE + |-- SMPL_NEUTRAL + |-- SMPL_MALE + |-- smplh + |-- SMPLH_FEMALE + |-- SMPLH_MALE + |-- smplx + |-- SMPLX_FEMALE + |-- SMPLX_NEUTRAL + |-- SMPLX_MALE + |-- mano + |-- MANO RIGHT + |-- MANO LEFT + + model_type: str, optional + When model_path is a folder, then this parameter specifies the + type of model to be loaded + **kwargs: dict + Keyword arguments + + Returns + ------- + body_model: nn.Module + The PyTorch module that implements the corresponding body model + Raises + ------ + ValueError: In case the model type is not one of SMPL, SMPLH, + SMPLX, MANO or FLAME + ''' + + # If it's a folder, assume + if osp.isdir(model_path): + model_path = os.path.join(model_path, model_type) + else: + model_type = osp.basename(model_path).split('_')[0].lower() + + if model_type.lower() == 'smpl': + return SMPL(model_path, **kwargs) + elif model_type.lower() == 'smplh': + return SMPLH(model_path, **kwargs) + elif model_type.lower() == 'smplx': + return SMPLX(model_path, **kwargs) + elif 'mano' in model_type.lower(): + return MANO(model_path, **kwargs) + elif 'flame' in model_type.lower(): + return FLAME(model_path, **kwargs) + else: + raise ValueError(f'Unknown model type {model_type}, exiting!') diff --git a/LHM/models/rendering/smplx_gsavatar/joint_names.py b/LHM/models/rendering/smplx_gsavatar/joint_names.py new file mode 100644 index 0000000000000000000000000000000000000000..b7326ffdeaf5b61e616c31cfddd18249143e2118 --- /dev/null +++ b/LHM/models/rendering/smplx_gsavatar/joint_names.py @@ -0,0 +1,240 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +JOINT_NAMES = [ + 'pelvis', + 'left_hip', + 'right_hip', + 'spine1', + 'left_knee', + 'right_knee', + 'spine2', + 'left_ankle', + 'right_ankle', + 'spine3', + 'left_foot', + 'right_foot', + 'neck', + 'left_collar', + 'right_collar', + 'head', + 'left_shoulder', + 'right_shoulder', + 'left_elbow', + 'right_elbow', + 'left_wrist', + 'right_wrist', + 'jaw', + 'left_eye_smplhf', + 'right_eye_smplhf', + 'left_index1', + 'left_index2', + 'left_index3', + 'left_middle1', + 'left_middle2', + 'left_middle3', + 'left_pinky1', + 'left_pinky2', + 'left_pinky3', + 'left_ring1', + 'left_ring2', + 'left_ring3', + 'left_thumb1', + 'left_thumb2', + 'left_thumb3', + 'right_index1', + 'right_index2', + 'right_index3', + 'right_middle1', + 'right_middle2', + 'right_middle3', + 'right_pinky1', + 'right_pinky2', + 'right_pinky3', + 'right_ring1', + 'right_ring2', + 'right_ring3', + 'right_thumb1', + 'right_thumb2', + 'right_thumb3', + 'nose', + 'right_eye', + 'left_eye', + 'right_ear', + 'left_ear', + 'left_big_toe', + 'left_small_toe', + 'left_heel', + 'right_big_toe', + 'right_small_toe', + 'right_heel', + 'left_thumb', + 'left_index', + 'left_middle', + 'left_ring', + 'left_pinky', + 'right_thumb', + 'right_index', + 'right_middle', + 'right_ring', + 'right_pinky', + 'right_eye_brow1', + 'right_eye_brow2', + 'right_eye_brow3', + 'right_eye_brow4', + 'right_eye_brow5', + 'left_eye_brow5', + 'left_eye_brow4', + 'left_eye_brow3', + 'left_eye_brow2', + 'left_eye_brow1', + 'nose1', + 'nose2', + 'nose3', + 'nose4', + 'right_nose_2', + 'right_nose_1', + 'nose_middle', + 'left_nose_1', + 'left_nose_2', + 'right_eye1', + 'right_eye2', + 'right_eye3', + 'right_eye4', + 'right_eye5', + 'right_eye6', + 'left_eye4', + 'left_eye3', + 'left_eye2', + 'left_eye1', + 'left_eye6', + 'left_eye5', + 'right_mouth_1', + 'right_mouth_2', + 'right_mouth_3', + 'mouth_top', + 'left_mouth_3', + 'left_mouth_2', + 'left_mouth_1', + 'left_mouth_5', # 59 in OpenPose output + 'left_mouth_4', # 58 in OpenPose output + 'mouth_bottom', + 'right_mouth_4', + 'right_mouth_5', + 'right_lip_1', + 'right_lip_2', + 'lip_top', + 'left_lip_2', + 'left_lip_1', + 'left_lip_3', + 'lip_bottom', + 'right_lip_3', + # Face contour + 'right_contour_1', + 'right_contour_2', + 'right_contour_3', + 'right_contour_4', + 'right_contour_5', + 'right_contour_6', + 'right_contour_7', + 'right_contour_8', + 'contour_middle', + 'left_contour_8', + 'left_contour_7', + 'left_contour_6', + 'left_contour_5', + 'left_contour_4', + 'left_contour_3', + 'left_contour_2', + 'left_contour_1', +] + + +SMPLH_JOINT_NAMES = [ + 'pelvis', + 'left_hip', + 'right_hip', + 'spine1', + 'left_knee', + 'right_knee', + 'spine2', + 'left_ankle', + 'right_ankle', + 'spine3', + 'left_foot', + 'right_foot', + 'neck', + 'left_collar', + 'right_collar', + 'head', + 'left_shoulder', + 'right_shoulder', + 'left_elbow', + 'right_elbow', + 'left_wrist', + 'right_wrist', + 'left_index1', + 'left_index2', + 'left_index3', + 'left_middle1', + 'left_middle2', + 'left_middle3', + 'left_pinky1', + 'left_pinky2', + 'left_pinky3', + 'left_ring1', + 'left_ring2', + 'left_ring3', + 'left_thumb1', + 'left_thumb2', + 'left_thumb3', + 'right_index1', + 'right_index2', + 'right_index3', + 'right_middle1', + 'right_middle2', + 'right_middle3', + 'right_pinky1', + 'right_pinky2', + 'right_pinky3', + 'right_ring1', + 'right_ring2', + 'right_ring3', + 'right_thumb1', + 'right_thumb2', + 'right_thumb3', + 'nose', + 'right_eye', + 'left_eye', + 'right_ear', + 'left_ear', + 'left_big_toe', + 'left_small_toe', + 'left_heel', + 'right_big_toe', + 'right_small_toe', + 'right_heel', + 'left_thumb', + 'left_index', + 'left_middle', + 'left_ring', + 'left_pinky', + 'right_thumb', + 'right_index', + 'right_middle', + 'right_ring', + 'right_pinky', +] diff --git a/LHM/models/rendering/smplx_gsavatar/lbs.py b/LHM/models/rendering/smplx_gsavatar/lbs.py new file mode 100644 index 0000000000000000000000000000000000000000..bc1bfab459d4bd10c1a521f1145fb09dabe13a36 --- /dev/null +++ b/LHM/models/rendering/smplx_gsavatar/lbs.py @@ -0,0 +1,405 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from __future__ import absolute_import +from __future__ import print_function +from __future__ import division + +from typing import Tuple, List +import numpy as np + +import torch +import torch.nn.functional as F + +from .utils import rot_mat_to_euler, Tensor + + +def find_dynamic_lmk_idx_and_bcoords( + vertices: Tensor, + pose: Tensor, + dynamic_lmk_faces_idx: Tensor, + dynamic_lmk_b_coords: Tensor, + neck_kin_chain: List[int], + pose2rot: bool = True, +) -> Tuple[Tensor, Tensor]: + ''' Compute the faces, barycentric coordinates for the dynamic landmarks + + + To do so, we first compute the rotation of the neck around the y-axis + and then use a pre-computed look-up table to find the faces and the + barycentric coordinates that will be used. + + Special thanks to Soubhik Sanyal (soubhik.sanyal@tuebingen.mpg.de) + for providing the original TensorFlow implementation and for the LUT. + + Parameters + ---------- + vertices: torch.tensor BxVx3, dtype = torch.float32 + The tensor of input vertices + pose: torch.tensor Bx(Jx3), dtype = torch.float32 + The current pose of the body model + dynamic_lmk_faces_idx: torch.tensor L, dtype = torch.long + The look-up table from neck rotation to faces + dynamic_lmk_b_coords: torch.tensor Lx3, dtype = torch.float32 + The look-up table from neck rotation to barycentric coordinates + neck_kin_chain: list + A python list that contains the indices of the joints that form the + kinematic chain of the neck. + dtype: torch.dtype, optional + + Returns + ------- + dyn_lmk_faces_idx: torch.tensor, dtype = torch.long + A tensor of size BxL that contains the indices of the faces that + will be used to compute the current dynamic landmarks. + dyn_lmk_b_coords: torch.tensor, dtype = torch.float32 + A tensor of size BxL that contains the indices of the faces that + will be used to compute the current dynamic landmarks. + ''' + + dtype = vertices.dtype + batch_size = vertices.shape[0] + + if pose2rot: + aa_pose = torch.index_select(pose.view(batch_size, -1, 3), 1, + neck_kin_chain) + rot_mats = batch_rodrigues( + aa_pose.view(-1, 3)).view(batch_size, -1, 3, 3) + else: + rot_mats = torch.index_select( + pose.view(batch_size, -1, 3, 3), 1, neck_kin_chain) + + rel_rot_mat = torch.eye( + 3, device=vertices.device, dtype=dtype).unsqueeze_(dim=0).repeat( + batch_size, 1, 1) + for idx in range(len(neck_kin_chain)): + rel_rot_mat = torch.bmm(rot_mats[:, idx], rel_rot_mat) + + y_rot_angle = torch.round( + torch.clamp(-rot_mat_to_euler(rel_rot_mat) * 180.0 / np.pi, + max=39)).to(dtype=torch.long) + neg_mask = y_rot_angle.lt(0).to(dtype=torch.long) + mask = y_rot_angle.lt(-39).to(dtype=torch.long) + neg_vals = mask * 78 + (1 - mask) * (39 - y_rot_angle) + y_rot_angle = (neg_mask * neg_vals + + (1 - neg_mask) * y_rot_angle) + + dyn_lmk_faces_idx = torch.index_select(dynamic_lmk_faces_idx, + 0, y_rot_angle) + dyn_lmk_b_coords = torch.index_select(dynamic_lmk_b_coords, + 0, y_rot_angle) + + return dyn_lmk_faces_idx, dyn_lmk_b_coords + + +def vertices2landmarks( + vertices: Tensor, + faces: Tensor, + lmk_faces_idx: Tensor, + lmk_bary_coords: Tensor +) -> Tensor: + ''' Calculates landmarks by barycentric interpolation + + Parameters + ---------- + vertices: torch.tensor BxVx3, dtype = torch.float32 + The tensor of input vertices + faces: torch.tensor Fx3, dtype = torch.long + The faces of the mesh + lmk_faces_idx: torch.tensor L, dtype = torch.long + The tensor with the indices of the faces used to calculate the + landmarks. + lmk_bary_coords: torch.tensor Lx3, dtype = torch.float32 + The tensor of barycentric coordinates that are used to interpolate + the landmarks + + Returns + ------- + landmarks: torch.tensor BxLx3, dtype = torch.float32 + The coordinates of the landmarks for each mesh in the batch + ''' + # Extract the indices of the vertices for each face + # BxLx3 + batch_size, num_verts = vertices.shape[:2] + device = vertices.device + + lmk_faces = torch.index_select(faces, 0, lmk_faces_idx.view(-1)).view( + batch_size, -1, 3) + + lmk_faces += torch.arange( + batch_size, dtype=torch.long, device=device).view(-1, 1, 1) * num_verts + + lmk_vertices = vertices.view(-1, 3)[lmk_faces].view( + batch_size, -1, 3, 3) + + landmarks = torch.einsum('blfi,blf->bli', [lmk_vertices, lmk_bary_coords]) + return landmarks + + +def lbs( + betas: Tensor, + pose: Tensor, + v_template: Tensor, + shapedirs: Tensor, + posedirs: Tensor, + J_regressor: Tensor, + parents: Tensor, + lbs_weights: Tensor, + pose2rot: bool = True, + return_affine_mat = False, +) -> Tuple[Tensor, Tensor]: + ''' Performs Linear Blend Skinning with the given shape and pose parameters + + Parameters + ---------- + betas : torch.tensor BxNB + The tensor of shape parameters + pose : torch.tensor Bx(J + 1) * 3 + The pose parameters in axis-angle format + v_template torch.tensor BxVx3 + The template mesh that will be deformed + shapedirs : torch.tensor 1xNB + The tensor of PCA shape displacements + posedirs : torch.tensor Px(V * 3) + The pose PCA coefficients + J_regressor : torch.tensor JxV + The regressor array that is used to calculate the joints from + the position of the vertices + parents: torch.tensor J + The array that describes the kinematic tree for the model + lbs_weights: torch.tensor N x V x (J + 1) + The linear blend skinning weights that represent how much the + rotation matrix of each part affects each vertex + pose2rot: bool, optional + Flag on whether to convert the input pose tensor to rotation + matrices. The default value is True. If False, then the pose tensor + should already contain rotation matrices and have a size of + Bx(J + 1)x9 + dtype: torch.dtype, optional + + Returns + ------- + verts: torch.tensor BxVx3 + The vertices of the mesh after applying the shape and pose + displacements. + joints: torch.tensor BxJx3 + The joints of the model + ''' + + batch_size = max(betas.shape[0], pose.shape[0]) + device, dtype = betas.device, betas.dtype + + # Add shape contribution + v_shaped = v_template + blend_shapes(betas, shapedirs) + + # Get the joints + # NxJx3 array + J = vertices2joints(J_regressor, v_shaped) + + # 3. Add pose blend shapes + # N x J x 3 x 3 + ident = torch.eye(3, dtype=dtype, device=device) + if pose2rot: + rot_mats = batch_rodrigues(pose.view(-1, 3)).view( + [batch_size, -1, 3, 3]) + + pose_feature = (rot_mats[:, 1:, :, :] - ident).view([batch_size, -1]) + # (N x P) x (P, V * 3) -> N x V x 3 + pose_offsets = torch.matmul( + pose_feature, posedirs).view(batch_size, -1, 3) + else: + pose_feature = pose[:, 1:].view(batch_size, -1, 3, 3) - ident + rot_mats = pose.view(batch_size, -1, 3, 3) + + pose_offsets = torch.matmul(pose_feature.view(batch_size, -1), + posedirs).view(batch_size, -1, 3) + + v_posed = pose_offsets + v_shaped + # 4. Get the global joint location + J_transformed, A = batch_rigid_transform(rot_mats, J, parents, dtype=dtype) + + # 5. Do skinning: + # W is N x V x (J + 1) + W = lbs_weights.unsqueeze(dim=0).expand([batch_size, -1, -1]) + # (N x V x (J + 1)) x (N x (J + 1) x 16) + num_joints = J_regressor.shape[0] + T = torch.matmul(W, A.view(batch_size, num_joints, 16)) \ + .view(batch_size, -1, 4, 4) + + homogen_coord = torch.ones([batch_size, v_posed.shape[1], 1], + dtype=dtype, device=device) + v_posed_homo = torch.cat([v_posed, homogen_coord], dim=2) + v_homo = torch.matmul(T, torch.unsqueeze(v_posed_homo, dim=-1)) + + verts = v_homo[:, :, :3, 0] + + if return_affine_mat: + return verts, J_transformed, A + else: + return verts, J_transformed + + +def vertices2joints(J_regressor: Tensor, vertices: Tensor) -> Tensor: + ''' Calculates the 3D joint locations from the vertices + + Parameters + ---------- + J_regressor : torch.tensor JxV + The regressor array that is used to calculate the joints from the + position of the vertices + vertices : torch.tensor BxVx3 + The tensor of mesh vertices + + Returns + ------- + torch.tensor BxJx3 + The location of the joints + ''' + + return torch.einsum('bik,ji->bjk', [vertices, J_regressor]) + + +def blend_shapes(betas: Tensor, shape_disps: Tensor) -> Tensor: + ''' Calculates the per vertex displacement due to the blend shapes + + + Parameters + ---------- + betas : torch.tensor Bx(num_betas) + Blend shape coefficients + shape_disps: torch.tensor Vx3x(num_betas) + Blend shapes + + Returns + ------- + torch.tensor BxVx3 + The per-vertex displacement due to shape deformation + ''' + + # Displacement[b, m, k] = sum_{l} betas[b, l] * shape_disps[m, k, l] + # i.e. Multiply each shape displacement by its corresponding beta and + # then sum them. + blend_shape = torch.einsum('bl,mkl->bmk', [betas, shape_disps]) + return blend_shape + + +def batch_rodrigues( + rot_vecs: Tensor, + epsilon: float = 1e-8, +) -> Tensor: + ''' Calculates the rotation matrices for a batch of rotation vectors + Parameters + ---------- + rot_vecs: torch.tensor Nx3 + array of N axis-angle vectors + Returns + ------- + R: torch.tensor Nx3x3 + The rotation matrices for the given axis-angle parameters + ''' + + batch_size = rot_vecs.shape[0] + device, dtype = rot_vecs.device, rot_vecs.dtype + + angle = torch.norm(rot_vecs + 1e-8, dim=1, keepdim=True) + rot_dir = rot_vecs / angle + + cos = torch.unsqueeze(torch.cos(angle), dim=1) + sin = torch.unsqueeze(torch.sin(angle), dim=1) + + # Bx1 arrays + rx, ry, rz = torch.split(rot_dir, 1, dim=1) + K = torch.zeros((batch_size, 3, 3), dtype=dtype, device=device) + + zeros = torch.zeros((batch_size, 1), dtype=dtype, device=device) + K = torch.cat([zeros, -rz, ry, rz, zeros, -rx, -ry, rx, zeros], dim=1) \ + .view((batch_size, 3, 3)) + + ident = torch.eye(3, dtype=dtype, device=device).unsqueeze(dim=0) + rot_mat = ident + sin * K + (1 - cos) * torch.bmm(K, K) + return rot_mat + + +def transform_mat(R: Tensor, t: Tensor) -> Tensor: + ''' Creates a batch of transformation matrices + Args: + - R: Bx3x3 array of a batch of rotation matrices + - t: Bx3x1 array of a batch of translation vectors + Returns: + - T: Bx4x4 Transformation matrix + ''' + # No padding left or right, only add an extra row + return torch.cat([F.pad(R, [0, 0, 0, 1]), + F.pad(t, [0, 0, 0, 1], value=1)], dim=2) + + +def batch_rigid_transform( + rot_mats: Tensor, + joints: Tensor, + parents: Tensor, + dtype=torch.float32 +) -> Tensor: + """ + Applies a batch of rigid transformations to the joints + + Parameters + ---------- + rot_mats : torch.tensor BxNx3x3 + Tensor of rotation matrices + joints : torch.tensor BxNx3 + Locations of joints + parents : torch.tensor BxN + The kinematic tree of each object + dtype : torch.dtype, optional: + The data type of the created tensors, the default is torch.float32 + + Returns + ------- + posed_joints : torch.tensor BxNx3 + The locations of the joints after applying the pose rotations + rel_transforms : torch.tensor BxNx4x4 + The relative (with respect to the root joint) rigid transformations + for all the joints + """ + + joints = torch.unsqueeze(joints, dim=-1) + + rel_joints = joints.clone() + rel_joints[:, 1:] -= joints[:, parents[1:]] + + transforms_mat = transform_mat( + rot_mats.reshape(-1, 3, 3), + rel_joints.reshape(-1, 3, 1)).reshape(-1, joints.shape[1], 4, 4) + + transform_chain = [transforms_mat[:, 0]] + for i in range(1, parents.shape[0]): + # Subtract the joint location at the rest pose + # No need for rotation, since it's identity when at rest + curr_res = torch.matmul(transform_chain[parents[i]], + transforms_mat[:, i]) + transform_chain.append(curr_res) + + transforms = torch.stack(transform_chain, dim=1) + + # The last column of the transformations contains the posed joints + posed_joints = transforms[:, :, :3, 3] + + joints_homogen = F.pad(joints, [0, 0, 0, 1]) + + rel_transforms = transforms - F.pad( + torch.matmul(transforms, joints_homogen), [3, 0, 0, 0, 0, 0, 0, 0]) + + return posed_joints, rel_transforms diff --git a/LHM/models/rendering/smplx_gsavatar/utils.py b/LHM/models/rendering/smplx_gsavatar/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..0ee5c9a68c74a59a6abf8e8ffc79241fcab14bb6 --- /dev/null +++ b/LHM/models/rendering/smplx_gsavatar/utils.py @@ -0,0 +1,128 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from typing import NewType, Union, Optional +from dataclasses import dataclass, asdict, fields +import numpy as np +import torch + +Tensor = NewType('Tensor', torch.Tensor) +Array = NewType('Array', np.ndarray) + + +@dataclass +class ModelOutput: + vertices: Optional[Tensor] = None + joints: Optional[Tensor] = None + full_pose: Optional[Tensor] = None + global_orient: Optional[Tensor] = None + transl: Optional[Tensor] = None + v_shaped: Optional[Tensor] = None + + def __getitem__(self, key): + return getattr(self, key) + + def get(self, key, default=None): + return getattr(self, key, default) + + def __iter__(self): + return self.keys() + + def keys(self): + keys = [t.name for t in fields(self)] + return iter(keys) + + def values(self): + values = [getattr(self, t.name) for t in fields(self)] + return iter(values) + + def items(self): + data = [(t.name, getattr(self, t.name)) for t in fields(self)] + return iter(data) + + +@dataclass +class SMPLOutput(ModelOutput): + betas: Optional[Tensor] = None + body_pose: Optional[Tensor] = None + A: Optional[Tensor] = None + + +@dataclass +class SMPLHOutput(SMPLOutput): + left_hand_pose: Optional[Tensor] = None + right_hand_pose: Optional[Tensor] = None + transl: Optional[Tensor] = None + + +@dataclass +class SMPLXOutput(SMPLHOutput): + expression: Optional[Tensor] = None + jaw_pose: Optional[Tensor] = None + A: Optional[Tensor] = None + + +@dataclass +class MANOOutput(ModelOutput): + betas: Optional[Tensor] = None + hand_pose: Optional[Tensor] = None + + +@dataclass +class FLAMEOutput(ModelOutput): + betas: Optional[Tensor] = None + expression: Optional[Tensor] = None + jaw_pose: Optional[Tensor] = None + neck_pose: Optional[Tensor] = None + + +def find_joint_kin_chain(joint_id, kinematic_tree): + kin_chain = [] + curr_idx = joint_id + while curr_idx != -1: + kin_chain.append(curr_idx) + curr_idx = kinematic_tree[curr_idx] + return kin_chain + + +def to_tensor( + array: Union[Array, Tensor], dtype=torch.float32 +) -> Tensor: + if torch.is_tensor(array): + return array + else: + return torch.tensor(array, dtype=dtype) + + +class Struct(object): + def __init__(self, **kwargs): + for key, val in kwargs.items(): + setattr(self, key, val) + + +def to_np(array, dtype=np.float32): + if 'scipy.sparse' in str(type(array)): + array = array.todense() + return np.array(array, dtype=dtype) + + +def rot_mat_to_euler(rot_mats): + # Calculates rotation matrix to euler angles + # Careful for extreme cases of eular angles like [0.0, pi, 0.0] + + sy = torch.sqrt(rot_mats[:, 0, 0] * rot_mats[:, 0, 0] + + rot_mats[:, 1, 0] * rot_mats[:, 1, 0]) + return torch.atan2(-rot_mats[:, 2, 0], sy) diff --git a/LHM/models/rendering/smplx_gsavatar/vertex_ids.py b/LHM/models/rendering/smplx_gsavatar/vertex_ids.py new file mode 100644 index 0000000000000000000000000000000000000000..0e7a4c36700f002da54a9e181eabbd47af2a95bc --- /dev/null +++ b/LHM/models/rendering/smplx_gsavatar/vertex_ids.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division + +# Joint name to vertex mapping. SMPL/SMPL-H/SMPL-X vertices that correspond to +# MSCOCO and OpenPose joints +vertex_ids = { + 'smplh': { + 'nose': 332, + 'reye': 6260, + 'leye': 2800, + 'rear': 4071, + 'lear': 583, + 'rthumb': 6191, + 'rindex': 5782, + 'rmiddle': 5905, + 'rring': 6016, + 'rpinky': 6133, + 'lthumb': 2746, + 'lindex': 2319, + 'lmiddle': 2445, + 'lring': 2556, + 'lpinky': 2673, + 'LBigToe': 3216, + 'LSmallToe': 3226, + 'LHeel': 3387, + 'RBigToe': 6617, + 'RSmallToe': 6624, + 'RHeel': 6787 + }, + 'smplx': { + 'nose': 9120, + 'reye': 9929, + 'leye': 9448, + 'rear': 616, + 'lear': 6, + 'rthumb': 8079, + 'rindex': 7669, + 'rmiddle': 7794, + 'rring': 7905, + 'rpinky': 8022, + 'lthumb': 5361, + 'lindex': 4933, + 'lmiddle': 5058, + 'lring': 5169, + 'lpinky': 5286, + 'LBigToe': 5770, + 'LSmallToe': 5780, + 'LHeel': 8846, + 'RBigToe': 8463, + 'RSmallToe': 8474, + 'RHeel': 8635 + }, + 'mano': { + 'thumb': 744, + 'index': 320, + 'middle': 443, + 'ring': 554, + 'pinky': 671, + } +} diff --git a/LHM/models/rendering/smplx_gsavatar/vertex_joint_selector.py b/LHM/models/rendering/smplx_gsavatar/vertex_joint_selector.py new file mode 100644 index 0000000000000000000000000000000000000000..4b8298bd5e087731f86c1c699703b5219e046c5c --- /dev/null +++ b/LHM/models/rendering/smplx_gsavatar/vertex_joint_selector.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from __future__ import absolute_import +from __future__ import print_function +from __future__ import division + +import numpy as np + +import torch +import torch.nn as nn + +from .utils import to_tensor + + +class VertexJointSelector(nn.Module): + + def __init__(self, vertex_ids=None, + use_hands=True, + use_feet_keypoints=True, **kwargs): + super(VertexJointSelector, self).__init__() + + extra_joints_idxs = [] + + face_keyp_idxs = np.array([ + vertex_ids['nose'], + vertex_ids['reye'], + vertex_ids['leye'], + vertex_ids['rear'], + vertex_ids['lear']], dtype=np.int64) + + extra_joints_idxs = np.concatenate([extra_joints_idxs, + face_keyp_idxs]) + + if use_feet_keypoints: + feet_keyp_idxs = np.array([vertex_ids['LBigToe'], + vertex_ids['LSmallToe'], + vertex_ids['LHeel'], + vertex_ids['RBigToe'], + vertex_ids['RSmallToe'], + vertex_ids['RHeel']], dtype=np.int32) + + extra_joints_idxs = np.concatenate( + [extra_joints_idxs, feet_keyp_idxs]) + + if use_hands: + self.tip_names = ['thumb', 'index', 'middle', 'ring', 'pinky'] + + tips_idxs = [] + for hand_id in ['l', 'r']: + for tip_name in self.tip_names: + tips_idxs.append(vertex_ids[hand_id + tip_name]) + + extra_joints_idxs = np.concatenate( + [extra_joints_idxs, tips_idxs]) + + self.register_buffer('extra_joints_idxs', + to_tensor(extra_joints_idxs, dtype=torch.long)) + + def forward(self, vertices, joints): + extra_joints = torch.index_select(vertices, 1, self.extra_joints_idxs) + joints = torch.cat([joints, extra_joints], dim=1) + + return joints diff --git a/LHM/models/rendering/synthesizer.py b/LHM/models/rendering/synthesizer.py new file mode 100644 index 0000000000000000000000000000000000000000..3833cfeb681e1a8f61d8244a36d03c6631d8b3de --- /dev/null +++ b/LHM/models/rendering/synthesizer.py @@ -0,0 +1,208 @@ +# ORIGINAL LICENSE +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# Modified by Zexin He in 2023-2024. +# The modifications are subject to the same license as the original. + + +import itertools +import torch +import torch.nn as nn + +from .utils.renderer import ImportanceRenderer +from .utils.ray_sampler import RaySampler + + +class ShiftedSoftplus(nn.Module): + def __init__(self): + super().__init__() + + def forward(self, x): + return nn.functional.softplus(x - 1) + + +class OSGDecoder(nn.Module): + """ + Triplane decoder that gives RGB and sigma values from sampled features. + Using ReLU here instead of Softplus in the original implementation. + + Reference: + EG3D: https://github.com/NVlabs/eg3d/blob/main/eg3d/training/triplane.py#L112 + """ + def __init__(self, n_features: int, + hidden_dim: int = 64, num_layers: int = 4, activation: nn.Module = nn.ReLU): + super().__init__() + self.net = nn.Sequential( + nn.Linear(3 * n_features, hidden_dim), + activation(), + *itertools.chain(*[[ + nn.Linear(hidden_dim, hidden_dim), + activation(), + ] for _ in range(num_layers - 2)]), + nn.Linear(hidden_dim, 1 + 3), + ) + # init all bias to zero + for m in self.modules(): + if isinstance(m, nn.Linear): + nn.init.zeros_(m.bias) + + @torch.compile + def forward(self, sampled_features, ray_directions): + # Aggregate features by mean + # sampled_features = sampled_features.mean(1) + # Aggregate features by concatenation + _N, n_planes, _M, _C = sampled_features.shape + sampled_features = sampled_features.permute(0, 2, 1, 3).reshape(_N, _M, n_planes*_C) + x = sampled_features + + N, M, C = x.shape + x = x.contiguous().view(N*M, C) + + x = self.net(x) + x = x.view(N, M, -1) + rgb = torch.sigmoid(x[..., 1:])*(1 + 2*0.001) - 0.001 # Uses sigmoid clamping from MipNeRF + sigma = x[..., 0:1] + + return {'rgb': rgb, 'sigma': sigma} + + +class TriplaneSynthesizer(nn.Module): + """ + Synthesizer that renders a triplane volume with planes and a camera. + + Reference: + EG3D: https://github.com/NVlabs/eg3d/blob/main/eg3d/training/triplane.py#L19 + """ + + DEFAULT_RENDERING_KWARGS = { + 'ray_start': 'auto', + 'ray_end': 'auto', + 'box_warp': 2., + 'white_back': False, + 'disparity_space_sampling': False, + 'clamp_mode': 'softplus', + 'sampler_bbox_min': -1., + 'sampler_bbox_max': 1., + } + + def __init__(self, triplane_dim: int, samples_per_ray: int): + super().__init__() + + # attributes + self.triplane_dim = triplane_dim + self.rendering_kwargs = { + **self.DEFAULT_RENDERING_KWARGS, + 'depth_resolution': samples_per_ray // 2, + 'depth_resolution_importance': samples_per_ray // 2, + } + + # renderings + self.renderer = ImportanceRenderer() + self.ray_sampler = RaySampler() + + # modules + self.decoder = OSGDecoder(n_features=triplane_dim) + + def forward(self, planes, cameras, anchors, resolutions, bg_colors, region_size: int): + # planes: (N, 3, D', H', W') + # cameras: (N, M, D_cam) + # anchors: (N, M, 2) + # resolutions: (N, M, 1) + # bg_colors: (N, M, 1) + # region_size: int + assert planes.shape[0] == cameras.shape[0], "Batch size mismatch for planes and cameras" + assert planes.shape[0] == anchors.shape[0], "Batch size mismatch for planes and anchors" + assert cameras.shape[1] == anchors.shape[1], "Number of views mismatch for cameras and anchors" + N, M = cameras.shape[:2] + + cam2world_matrix = cameras[..., :16].view(N, M, 4, 4) + intrinsics = cameras[..., 16:25].view(N, M, 3, 3) + + # Create a batch of rays for volume rendering + ray_origins, ray_directions = self.ray_sampler( + cam2world_matrix=cam2world_matrix.reshape(-1, 4, 4), + intrinsics=intrinsics.reshape(-1, 3, 3), + resolutions=resolutions.reshape(-1, 1), + anchors=anchors.reshape(-1, 2), + region_size=region_size, + ) + assert N*M == ray_origins.shape[0], "Batch size mismatch for ray_origins" + assert ray_origins.dim() == 3, "ray_origins should be 3-dimensional" + + # Perform volume rendering + rgb_samples, depth_samples, weights_samples = self.renderer( + planes.repeat_interleave(M, dim=0), self.decoder, ray_origins, ray_directions, self.rendering_kwargs, + bg_colors=bg_colors.reshape(-1, 1), + ) + + # Reshape into 'raw' neural-rendered image + Himg = Wimg = region_size + rgb_images = rgb_samples.permute(0, 2, 1).reshape(N, M, rgb_samples.shape[-1], Himg, Wimg).contiguous() + depth_images = depth_samples.permute(0, 2, 1).reshape(N, M, 1, Himg, Wimg) + weight_images = weights_samples.permute(0, 2, 1).reshape(N, M, 1, Himg, Wimg) + + return { + 'images_rgb': rgb_images, + 'images_depth': depth_images, + 'images_weight': weight_images, + } + + def forward_grid(self, planes, grid_size: int, aabb: torch.Tensor = None): + # planes: (N, 3, D', H', W') + # grid_size: int + # aabb: (N, 2, 3) + if aabb is None: + aabb = torch.tensor([ + [self.rendering_kwargs['sampler_bbox_min']] * 3, + [self.rendering_kwargs['sampler_bbox_max']] * 3, + ], device=planes.device, dtype=planes.dtype).unsqueeze(0).repeat(planes.shape[0], 1, 1) + assert planes.shape[0] == aabb.shape[0], "Batch size mismatch for planes and aabb" + N = planes.shape[0] + + # create grid points for triplane query + grid_points = [] + for i in range(N): + grid_points.append(torch.stack(torch.meshgrid( + torch.linspace(aabb[i, 0, 0], aabb[i, 1, 0], grid_size, device=planes.device), + torch.linspace(aabb[i, 0, 1], aabb[i, 1, 1], grid_size, device=planes.device), + torch.linspace(aabb[i, 0, 2], aabb[i, 1, 2], grid_size, device=planes.device), + indexing='ij', + ), dim=-1).reshape(-1, 3)) + cube_grid = torch.stack(grid_points, dim=0).to(planes.device) + + features = self.forward_points(planes, cube_grid) + + # reshape into grid + features = { + k: v.reshape(N, grid_size, grid_size, grid_size, -1) + for k, v in features.items() + } + return features + + def forward_points(self, planes, points: torch.Tensor, chunk_size: int = 2**20): + # planes: (N, 3, D', H', W') + # points: (N, P, 3) + N, P = points.shape[:2] + + # query triplane in chunks + outs = [] + for i in range(0, points.shape[1], chunk_size): + chunk_points = points[:, i:i+chunk_size] + + # query triplane + chunk_out = self.renderer.run_model_activated( + planes=planes, + decoder=self.decoder, + sample_coordinates=chunk_points, + sample_directions=torch.zeros_like(chunk_points), + options=self.rendering_kwargs, + ) + outs.append(chunk_out) + + # concatenate the outputs + point_features = { + k: torch.cat([out[k] for out in outs], dim=1) + for k in outs[0].keys() + } + return point_features diff --git a/LHM/models/rendering/utils/__init__.py b/LHM/models/rendering/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2c772e4fa331c678cfff50884be94d7d31835b34 --- /dev/null +++ b/LHM/models/rendering/utils/__init__.py @@ -0,0 +1,9 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. diff --git a/LHM/models/rendering/utils/__pycache__/__init__.cpython-310.pyc b/LHM/models/rendering/utils/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..480ac7f4668b7bdd4650c99d2f66f23345d5c12d Binary files /dev/null and b/LHM/models/rendering/utils/__pycache__/__init__.cpython-310.pyc differ diff --git a/LHM/models/rendering/utils/__pycache__/math_utils.cpython-310.pyc b/LHM/models/rendering/utils/__pycache__/math_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7a1faac72c01a13751596085abee8a8c38fbe61b Binary files /dev/null and b/LHM/models/rendering/utils/__pycache__/math_utils.cpython-310.pyc differ diff --git a/LHM/models/rendering/utils/__pycache__/ray_marcher.cpython-310.pyc b/LHM/models/rendering/utils/__pycache__/ray_marcher.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4996f47b6f7599aaa6ed79a22cf12b1cd6cb6ed4 Binary files /dev/null and b/LHM/models/rendering/utils/__pycache__/ray_marcher.cpython-310.pyc differ diff --git a/LHM/models/rendering/utils/__pycache__/ray_sampler.cpython-310.pyc b/LHM/models/rendering/utils/__pycache__/ray_sampler.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0e475cb0c3d77de6f6cd8dbe540d3809ebd1de35 Binary files /dev/null and b/LHM/models/rendering/utils/__pycache__/ray_sampler.cpython-310.pyc differ diff --git a/LHM/models/rendering/utils/__pycache__/renderer.cpython-310.pyc b/LHM/models/rendering/utils/__pycache__/renderer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..45c3a1a5e554c6f51ed8bf8faf93a80b14c3467e Binary files /dev/null and b/LHM/models/rendering/utils/__pycache__/renderer.cpython-310.pyc differ diff --git a/LHM/models/rendering/utils/__pycache__/sh_utils.cpython-310.pyc b/LHM/models/rendering/utils/__pycache__/sh_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..163b86e3f7daf424d9b4b86112fcb0282a67fefc Binary files /dev/null and b/LHM/models/rendering/utils/__pycache__/sh_utils.cpython-310.pyc differ diff --git a/LHM/models/rendering/utils/__pycache__/typing.cpython-310.pyc b/LHM/models/rendering/utils/__pycache__/typing.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f19f118b02df9bf45b4e68e58a9f19e7217ac7bf Binary files /dev/null and b/LHM/models/rendering/utils/__pycache__/typing.cpython-310.pyc differ diff --git a/LHM/models/rendering/utils/__pycache__/utils.cpython-310.pyc b/LHM/models/rendering/utils/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9328383d08e0d0a85230c7b60a6984bdc8eb437e Binary files /dev/null and b/LHM/models/rendering/utils/__pycache__/utils.cpython-310.pyc differ diff --git a/LHM/models/rendering/utils/math_utils.py b/LHM/models/rendering/utils/math_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..4cf9d2b811e0acbc7923bc9126e010b52cb1a8af --- /dev/null +++ b/LHM/models/rendering/utils/math_utils.py @@ -0,0 +1,118 @@ +# MIT License + +# Copyright (c) 2022 Petr Kellnhofer + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +import torch + +def transform_vectors(matrix: torch.Tensor, vectors4: torch.Tensor) -> torch.Tensor: + """ + Left-multiplies MxM @ NxM. Returns NxM. + """ + res = torch.matmul(vectors4, matrix.T) + return res + + +def normalize_vecs(vectors: torch.Tensor) -> torch.Tensor: + """ + Normalize vector lengths. + """ + return vectors / (torch.norm(vectors, dim=-1, keepdim=True)) + +def torch_dot(x: torch.Tensor, y: torch.Tensor): + """ + Dot product of two tensors. + """ + return (x * y).sum(-1) + + +def get_ray_limits_box(rays_o: torch.Tensor, rays_d: torch.Tensor, box_side_length): + """ + Author: Petr Kellnhofer + Intersects rays with the [-1, 1] NDC volume. + Returns min and max distance of entry. + Returns -1 for no intersection. + https://www.scratchapixel.com/lessons/3d-basic-rendering/minimal-ray-tracer-rendering-simple-shapes/ray-box-intersection + """ + o_shape = rays_o.shape + rays_o = rays_o.detach().reshape(-1, 3) + rays_d = rays_d.detach().reshape(-1, 3) + + + bb_min = [-1*(box_side_length/2), -1*(box_side_length/2), -1*(box_side_length/2)] + bb_max = [1*(box_side_length/2), 1*(box_side_length/2), 1*(box_side_length/2)] + bounds = torch.tensor([bb_min, bb_max], dtype=rays_o.dtype, device=rays_o.device) + is_valid = torch.ones(rays_o.shape[:-1], dtype=bool, device=rays_o.device) + + # Precompute inverse for stability. + invdir = 1 / rays_d + sign = (invdir < 0).long() + + # Intersect with YZ plane. + tmin = (bounds.index_select(0, sign[..., 0])[..., 0] - rays_o[..., 0]) * invdir[..., 0] + tmax = (bounds.index_select(0, 1 - sign[..., 0])[..., 0] - rays_o[..., 0]) * invdir[..., 0] + + # Intersect with XZ plane. + tymin = (bounds.index_select(0, sign[..., 1])[..., 1] - rays_o[..., 1]) * invdir[..., 1] + tymax = (bounds.index_select(0, 1 - sign[..., 1])[..., 1] - rays_o[..., 1]) * invdir[..., 1] + + # Resolve parallel rays. + is_valid[torch.logical_or(tmin > tymax, tymin > tmax)] = False + + # Use the shortest intersection. + tmin = torch.max(tmin, tymin) + tmax = torch.min(tmax, tymax) + + # Intersect with XY plane. + tzmin = (bounds.index_select(0, sign[..., 2])[..., 2] - rays_o[..., 2]) * invdir[..., 2] + tzmax = (bounds.index_select(0, 1 - sign[..., 2])[..., 2] - rays_o[..., 2]) * invdir[..., 2] + + # Resolve parallel rays. + is_valid[torch.logical_or(tmin > tzmax, tzmin > tmax)] = False + + # Use the shortest intersection. + tmin = torch.max(tmin, tzmin) + tmax = torch.min(tmax, tzmax) + + # Mark invalid. + tmin[torch.logical_not(is_valid)] = -1 + tmax[torch.logical_not(is_valid)] = -2 + + return tmin.reshape(*o_shape[:-1], 1), tmax.reshape(*o_shape[:-1], 1) + + +def linspace(start: torch.Tensor, stop: torch.Tensor, num: int): + """ + Creates a tensor of shape [num, *start.shape] whose values are evenly spaced from start to end, inclusive. + Replicates but the multi-dimensional bahaviour of numpy.linspace in PyTorch. + """ + # create a tensor of 'num' steps from 0 to 1 + steps = torch.arange(num, dtype=torch.float32, device=start.device) / (num - 1) + + # reshape the 'steps' tensor to [-1, *([1]*start.ndim)] to allow for broadcastings + # - using 'steps.reshape([-1, *([1]*start.ndim)])' would be nice here but torchscript + # "cannot statically infer the expected size of a list in this contex", hence the code below + for i in range(start.ndim): + steps = steps.unsqueeze(-1) + + # the output starts at 'start' and increments until 'stop' in each dimension + out = start[None] + steps * (stop - start)[None] + + return out diff --git a/LHM/models/rendering/utils/ray_marcher.py b/LHM/models/rendering/utils/ray_marcher.py new file mode 100644 index 0000000000000000000000000000000000000000..8c686c196e043f44e2276f16b4a32e596c802e40 --- /dev/null +++ b/LHM/models/rendering/utils/ray_marcher.py @@ -0,0 +1,68 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. +# +# Modified by Zexin He in 2023-2024. +# The modifications are subject to the same license as the original. + + +""" +The ray marcher takes the raw output of the implicit representation and uses the volume rendering equation to produce composited colors and depths. +Based off of the implementation in MipNeRF (this one doesn't do any cone tracing though!) +""" + +import torch +import torch.nn as nn + + +class MipRayMarcher2(nn.Module): + def __init__(self, activation_factory): + super().__init__() + self.activation_factory = activation_factory + + def run_forward(self, colors, densities, depths, rendering_options, bg_colors=None): + deltas = depths[:, :, 1:] - depths[:, :, :-1] + colors_mid = (colors[:, :, :-1] + colors[:, :, 1:]) / 2 + densities_mid = (densities[:, :, :-1] + densities[:, :, 1:]) / 2 + depths_mid = (depths[:, :, :-1] + depths[:, :, 1:]) / 2 + + # using factory mode for better usability + densities_mid = self.activation_factory(rendering_options)(densities_mid) + + density_delta = densities_mid * deltas + + alpha = 1 - torch.exp(-density_delta) + + alpha_shifted = torch.cat([torch.ones_like(alpha[:, :, :1]), 1-alpha + 1e-10], -2) + weights = alpha * torch.cumprod(alpha_shifted, -2)[:, :, :-1] + + composite_rgb = torch.sum(weights * colors_mid, -2) + weight_total = weights.sum(2) + composite_depth = torch.sum(weights * depths_mid, -2) / weight_total + + # clip the composite to min/max range of depths + composite_depth = torch.nan_to_num(composite_depth, float('inf')) + composite_depth = torch.clamp(composite_depth, torch.min(depths), torch.max(depths)) + + if rendering_options.get('white_back', False): + composite_rgb = composite_rgb + 1 - weight_total + else: + assert bg_colors is not None, "Must provide bg_colors if white_back is False" + composite_rgb = composite_rgb + bg_colors.unsqueeze(-1) * (1 - weight_total) + + # rendered value scale is 0-1, comment out original mipnerf scaling + # composite_rgb = composite_rgb * 2 - 1 # Scale to (-1, 1) + + return composite_rgb, composite_depth, weights + + + def forward(self, colors, densities, depths, rendering_options, bg_colors=None): + composite_rgb, composite_depth, weights = self.run_forward(colors, densities, depths, rendering_options, bg_colors=bg_colors) + + return composite_rgb, composite_depth, weights diff --git a/LHM/models/rendering/utils/ray_sampler.py b/LHM/models/rendering/utils/ray_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..6ab9594be66d02df79ec2295dbd064906f748c2c --- /dev/null +++ b/LHM/models/rendering/utils/ray_sampler.py @@ -0,0 +1,84 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. +# +# Modified by Zexin He in 2023-2024. +# The modifications are subject to the same license as the original. + + +""" +The ray sampler is a module that takes in camera matrices and resolution and batches of rays. +Expects cam2world matrices that use the OpenCV camera coordinate system conventions. +""" + +import torch + +class RaySampler(torch.nn.Module): + def __init__(self): + super().__init__() + self.ray_origins_h, self.ray_directions, self.depths, self.image_coords, self.rendering_options = None, None, None, None, None + + @torch.compile + def forward(self, cam2world_matrix, intrinsics, resolutions, anchors, region_size): + """ + Create batches of rays and return origins and directions. + + cam2world_matrix: (N, 4, 4) + intrinsics: (N, 3, 3) + resolutions: (N, 1) + anchors: (N, 2) + region_size: int + + ray_origins: (N, M, 3) + ray_dirs: (N, M, 2) + """ + + N, M = cam2world_matrix.shape[0], region_size**2 + cam_locs_world = cam2world_matrix[:, :3, 3] + fx = intrinsics[:, 0, 0] + fy = intrinsics[:, 1, 1] + cx = intrinsics[:, 0, 2] + cy = intrinsics[:, 1, 2] + sk = intrinsics[:, 0, 1] + + uv = torch.stack(torch.meshgrid( + torch.arange(region_size, dtype=torch.float32, device=cam2world_matrix.device), + torch.arange(region_size, dtype=torch.float32, device=cam2world_matrix.device), + indexing='ij', + )) + uv = uv.flip(0).reshape(2, -1).transpose(1, 0) + uv = uv.unsqueeze(0).repeat(cam2world_matrix.shape[0], 1, 1) + + # anchors are indexed as normal (row, col) but uv is indexed as (x, y) + x_cam = (uv[:, :, 0].view(N, -1) + anchors[:, 1].unsqueeze(-1)) * (1./resolutions) + (0.5/resolutions) + y_cam = (uv[:, :, 1].view(N, -1) + anchors[:, 0].unsqueeze(-1)) * (1./resolutions) + (0.5/resolutions) + z_cam = torch.ones((N, M), device=cam2world_matrix.device) + + x_lift = (x_cam - cx.unsqueeze(-1) + cy.unsqueeze(-1)*sk.unsqueeze(-1)/fy.unsqueeze(-1) - sk.unsqueeze(-1)*y_cam/fy.unsqueeze(-1)) / fx.unsqueeze(-1) * z_cam + y_lift = (y_cam - cy.unsqueeze(-1)) / fy.unsqueeze(-1) * z_cam + + cam_rel_points = torch.stack((x_lift, y_lift, z_cam, torch.ones_like(z_cam)), dim=-1) + + _opencv2blender = torch.tensor([ + [1, 0, 0, 0], + [0, -1, 0, 0], + [0, 0, -1, 0], + [0, 0, 0, 1], + ], dtype=torch.float32, device=cam2world_matrix.device).unsqueeze(0).repeat(N, 1, 1) + + cam2world_matrix = torch.bmm(cam2world_matrix, _opencv2blender) + + world_rel_points = torch.bmm(cam2world_matrix, cam_rel_points.permute(0, 2, 1)).permute(0, 2, 1)[:, :, :3] + + ray_dirs = world_rel_points - cam_locs_world[:, None, :] + ray_dirs = torch.nn.functional.normalize(ray_dirs, dim=2) + + ray_origins = cam_locs_world.unsqueeze(1).repeat(1, ray_dirs.shape[1], 1) + + return ray_origins, ray_dirs diff --git a/LHM/models/rendering/utils/renderer.py b/LHM/models/rendering/utils/renderer.py new file mode 100644 index 0000000000000000000000000000000000000000..628fc029877f8e069feb20d3b310ef4692b4a4bc --- /dev/null +++ b/LHM/models/rendering/utils/renderer.py @@ -0,0 +1,303 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. +# +# Modified by Zexin He in 2023-2024. +# The modifications are subject to the same license as the original. + + +""" +The renderer is a module that takes in rays, decides where to sample along each +ray, and computes pixel colors using the volume rendering equation. +""" + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from .ray_marcher import MipRayMarcher2 +from . import math_utils + +def generate_planes(): + """ + Defines planes by the three vectors that form the "axes" of the + plane. Should work with arbitrary number of planes and planes of + arbitrary orientation. + + Bugfix reference: https://github.com/NVlabs/eg3d/issues/67 + """ + return torch.tensor([[[1, 0, 0], + [0, 1, 0], + [0, 0, 1]], + [[1, 0, 0], + [0, 0, 1], + [0, 1, 0]], + [[0, 0, 1], + [0, 1, 0], + [1, 0, 0]]], dtype=torch.float32) + +def project_onto_planes(planes, coordinates): + """ + Does a projection of a 3D point onto a batch of 2D planes, + returning 2D plane coordinates. + + Takes plane axes of shape n_planes, 3, 3 + # Takes coordinates of shape N, M, 3 + # returns projections of shape N*n_planes, M, 2 + """ + N, M, C = coordinates.shape + n_planes, _, _ = planes.shape + coordinates = coordinates.unsqueeze(1).expand(-1, n_planes, -1, -1).reshape(N*n_planes, M, 3) + inv_planes = torch.linalg.inv(planes).unsqueeze(0).expand(N, -1, -1, -1).reshape(N*n_planes, 3, 3) + projections = torch.bmm(coordinates, inv_planes) + return projections[..., :2] + +def sample_from_planes(plane_axes, plane_features, coordinates, mode='bilinear', padding_mode='zeros', box_warp=None): + assert padding_mode == 'zeros' + N, n_planes, C, H, W = plane_features.shape + _, M, _ = coordinates.shape + plane_features = plane_features.view(N*n_planes, C, H, W) + + coordinates = (2/box_warp) * coordinates # add specific box bounds + + projected_coordinates = project_onto_planes(plane_axes, coordinates).unsqueeze(1) + output_features = torch.nn.functional.grid_sample(plane_features, projected_coordinates.float(), mode=mode, padding_mode=padding_mode, align_corners=False).permute(0, 3, 2, 1).reshape(N, n_planes, M, C) + return output_features + +def sample_from_3dgrid(grid, coordinates): + """ + Expects coordinates in shape (batch_size, num_points_per_batch, 3) + Expects grid in shape (1, channels, H, W, D) + (Also works if grid has batch size) + Returns sampled features of shape (batch_size, num_points_per_batch, feature_channels) + """ + batch_size, n_coords, n_dims = coordinates.shape + sampled_features = torch.nn.functional.grid_sample(grid.expand(batch_size, -1, -1, -1, -1), + coordinates.reshape(batch_size, 1, 1, -1, n_dims), + mode='bilinear', padding_mode='zeros', align_corners=False) + N, C, H, W, D = sampled_features.shape + sampled_features = sampled_features.permute(0, 4, 3, 2, 1).reshape(N, H*W*D, C) + return sampled_features + +class ImportanceRenderer(torch.nn.Module): + """ + Modified original version to filter out-of-box samples as TensoRF does. + + Reference: + TensoRF: https://github.com/apchenstu/TensoRF/blob/main/models/tensorBase.py#L277 + """ + def __init__(self): + super().__init__() + self.activation_factory = self._build_activation_factory() + self.ray_marcher = MipRayMarcher2(self.activation_factory) + self.plane_axes = generate_planes() + + def _build_activation_factory(self): + def activation_factory(options: dict): + if options['clamp_mode'] == 'softplus': + return lambda x: F.softplus(x - 1) # activation bias of -1 makes things initialize better + else: + assert False, "Renderer only supports `clamp_mode`=`softplus`!" + return activation_factory + + def _forward_pass(self, depths: torch.Tensor, ray_directions: torch.Tensor, ray_origins: torch.Tensor, + planes: torch.Tensor, decoder: nn.Module, rendering_options: dict): + """ + Additional filtering is applied to filter out-of-box samples. + Modifications made by Zexin He. + """ + + # context related variables + batch_size, num_rays, samples_per_ray, _ = depths.shape + device = depths.device + + # define sample points with depths + sample_directions = ray_directions.unsqueeze(-2).expand(-1, -1, samples_per_ray, -1).reshape(batch_size, -1, 3) + sample_coordinates = (ray_origins.unsqueeze(-2) + depths * ray_directions.unsqueeze(-2)).reshape(batch_size, -1, 3) + + # filter out-of-box samples + mask_inbox = \ + (rendering_options['sampler_bbox_min'] <= sample_coordinates) & \ + (sample_coordinates <= rendering_options['sampler_bbox_max']) + mask_inbox = mask_inbox.all(-1) + + # forward model according to all samples + _out = self.run_model(planes, decoder, sample_coordinates, sample_directions, rendering_options) + + # set out-of-box samples to zeros(rgb) & -inf(sigma) + SAFE_GUARD = 8 + DATA_TYPE = _out['sigma'].dtype + colors_pass = torch.zeros(batch_size, num_rays * samples_per_ray, 3, device=device, dtype=DATA_TYPE) + densities_pass = torch.nan_to_num(torch.full((batch_size, num_rays * samples_per_ray, 1), -float('inf'), device=device, dtype=DATA_TYPE)) / SAFE_GUARD + colors_pass[mask_inbox], densities_pass[mask_inbox] = _out['rgb'][mask_inbox], _out['sigma'][mask_inbox] + + # reshape back + colors_pass = colors_pass.reshape(batch_size, num_rays, samples_per_ray, colors_pass.shape[-1]) + densities_pass = densities_pass.reshape(batch_size, num_rays, samples_per_ray, densities_pass.shape[-1]) + + return colors_pass, densities_pass + + def forward(self, planes, decoder, ray_origins, ray_directions, rendering_options, bg_colors=None): + # self.plane_axes = self.plane_axes.to(ray_origins.device) + + if rendering_options['ray_start'] == rendering_options['ray_end'] == 'auto': + ray_start, ray_end = math_utils.get_ray_limits_box(ray_origins, ray_directions, box_side_length=rendering_options['box_warp']) + is_ray_valid = ray_end > ray_start + if torch.any(is_ray_valid).item(): + ray_start[~is_ray_valid] = ray_start[is_ray_valid].min() + ray_end[~is_ray_valid] = ray_start[is_ray_valid].max() + depths_coarse = self.sample_stratified(ray_origins, ray_start, ray_end, rendering_options['depth_resolution'], rendering_options['disparity_space_sampling']) + else: + # Create stratified depth samples + depths_coarse = self.sample_stratified(ray_origins, rendering_options['ray_start'], rendering_options['ray_end'], rendering_options['depth_resolution'], rendering_options['disparity_space_sampling']) + + # Coarse Pass + colors_coarse, densities_coarse = self._forward_pass( + depths=depths_coarse, ray_directions=ray_directions, ray_origins=ray_origins, + planes=planes, decoder=decoder, rendering_options=rendering_options) + + # Fine Pass + N_importance = rendering_options['depth_resolution_importance'] + if N_importance > 0: + _, _, weights = self.ray_marcher(colors_coarse, densities_coarse, depths_coarse, rendering_options, bg_colors=bg_colors) + + depths_fine = self.sample_importance(depths_coarse, weights, N_importance) + + colors_fine, densities_fine = self._forward_pass( + depths=depths_fine, ray_directions=ray_directions, ray_origins=ray_origins, + planes=planes, decoder=decoder, rendering_options=rendering_options) + + all_depths, all_colors, all_densities = self.unify_samples(depths_coarse, colors_coarse, densities_coarse, + depths_fine, colors_fine, densities_fine) + + # Aggregate + rgb_final, depth_final, weights = self.ray_marcher(all_colors, all_densities, all_depths, rendering_options, bg_colors=bg_colors) + else: + rgb_final, depth_final, weights = self.ray_marcher(colors_coarse, densities_coarse, depths_coarse, rendering_options, bg_colors=bg_colors) + + return rgb_final, depth_final, weights.sum(2) + + def run_model(self, planes, decoder, sample_coordinates, sample_directions, options): + plane_axes = self.plane_axes.to(planes.device) + sampled_features = sample_from_planes(plane_axes, planes, sample_coordinates, padding_mode='zeros', box_warp=options['box_warp']) + + out = decoder(sampled_features, sample_directions) + if options.get('density_noise', 0) > 0: + out['sigma'] += torch.randn_like(out['sigma']) * options['density_noise'] + return out + + def run_model_activated(self, planes, decoder, sample_coordinates, sample_directions, options): + out = self.run_model(planes, decoder, sample_coordinates, sample_directions, options) + out['sigma'] = self.activation_factory(options)(out['sigma']) + return out + + def sort_samples(self, all_depths, all_colors, all_densities): + _, indices = torch.sort(all_depths, dim=-2) + all_depths = torch.gather(all_depths, -2, indices) + all_colors = torch.gather(all_colors, -2, indices.expand(-1, -1, -1, all_colors.shape[-1])) + all_densities = torch.gather(all_densities, -2, indices.expand(-1, -1, -1, 1)) + return all_depths, all_colors, all_densities + + def unify_samples(self, depths1, colors1, densities1, depths2, colors2, densities2): + all_depths = torch.cat([depths1, depths2], dim = -2) + all_colors = torch.cat([colors1, colors2], dim = -2) + all_densities = torch.cat([densities1, densities2], dim = -2) + + _, indices = torch.sort(all_depths, dim=-2) + all_depths = torch.gather(all_depths, -2, indices) + all_colors = torch.gather(all_colors, -2, indices.expand(-1, -1, -1, all_colors.shape[-1])) + all_densities = torch.gather(all_densities, -2, indices.expand(-1, -1, -1, 1)) + + return all_depths, all_colors, all_densities + + def sample_stratified(self, ray_origins, ray_start, ray_end, depth_resolution, disparity_space_sampling=False): + """ + Return depths of approximately uniformly spaced samples along rays. + """ + N, M, _ = ray_origins.shape + if disparity_space_sampling: + depths_coarse = torch.linspace(0, + 1, + depth_resolution, + device=ray_origins.device).reshape(1, 1, depth_resolution, 1).repeat(N, M, 1, 1) + depth_delta = 1/(depth_resolution - 1) + depths_coarse += torch.rand_like(depths_coarse) * depth_delta + depths_coarse = 1./(1./ray_start * (1. - depths_coarse) + 1./ray_end * depths_coarse) + else: + if type(ray_start) == torch.Tensor: + depths_coarse = math_utils.linspace(ray_start, ray_end, depth_resolution).permute(1,2,0,3) + depth_delta = (ray_end - ray_start) / (depth_resolution - 1) + depths_coarse += torch.rand_like(depths_coarse) * depth_delta[..., None] + else: + depths_coarse = torch.linspace(ray_start, ray_end, depth_resolution, device=ray_origins.device).reshape(1, 1, depth_resolution, 1).repeat(N, M, 1, 1) + depth_delta = (ray_end - ray_start)/(depth_resolution - 1) + depths_coarse += torch.rand_like(depths_coarse) * depth_delta + + return depths_coarse + + def sample_importance(self, z_vals, weights, N_importance): + """ + Return depths of importance sampled points along rays. See NeRF importance sampling for more. + """ + with torch.no_grad(): + batch_size, num_rays, samples_per_ray, _ = z_vals.shape + + z_vals = z_vals.reshape(batch_size * num_rays, samples_per_ray) + weights = weights.reshape(batch_size * num_rays, -1) # -1 to account for loss of 1 sample in MipRayMarcher + + # smooth weights + weights = torch.nn.functional.max_pool1d(weights.unsqueeze(1).float(), 2, 1, padding=1) + weights = torch.nn.functional.avg_pool1d(weights, 2, 1).squeeze() + weights = weights + 0.01 + + z_vals_mid = 0.5 * (z_vals[: ,:-1] + z_vals[: ,1:]) + importance_z_vals = self.sample_pdf(z_vals_mid, weights[:, 1:-1], + N_importance).detach().reshape(batch_size, num_rays, N_importance, 1) + return importance_z_vals + + def sample_pdf(self, bins, weights, N_importance, det=False, eps=1e-5): + """ + Sample @N_importance samples from @bins with distribution defined by @weights. + Inputs: + bins: (N_rays, N_samples_+1) where N_samples_ is "the number of coarse samples per ray - 2" + weights: (N_rays, N_samples_) + N_importance: the number of samples to draw from the distribution + det: deterministic or not + eps: a small number to prevent division by zero + Outputs: + samples: the sampled samples + """ + N_rays, N_samples_ = weights.shape + weights = weights + eps # prevent division by zero (don't do inplace op!) + pdf = weights / torch.sum(weights, -1, keepdim=True) # (N_rays, N_samples_) + cdf = torch.cumsum(pdf, -1) # (N_rays, N_samples), cumulative distribution function + cdf = torch.cat([torch.zeros_like(cdf[: ,:1]), cdf], -1) # (N_rays, N_samples_+1) + # padded to 0~1 inclusive + + if det: + u = torch.linspace(0, 1, N_importance, device=bins.device) + u = u.expand(N_rays, N_importance) + else: + u = torch.rand(N_rays, N_importance, device=bins.device) + u = u.contiguous() + + inds = torch.searchsorted(cdf, u, right=True) + below = torch.clamp_min(inds-1, 0) + above = torch.clamp_max(inds, N_samples_) + + inds_sampled = torch.stack([below, above], -1).view(N_rays, 2*N_importance) + cdf_g = torch.gather(cdf, 1, inds_sampled).view(N_rays, N_importance, 2) + bins_g = torch.gather(bins, 1, inds_sampled).view(N_rays, N_importance, 2) + + denom = cdf_g[...,1]-cdf_g[...,0] + denom[denom= 0 + coeff = (deg + 1) ** 2 + assert sh.shape[-1] >= coeff + + result = C0 * sh[..., 0] + if deg > 0: + x, y, z = dirs[..., 0:1], dirs[..., 1:2], dirs[..., 2:3] + result = ( + result - C1 * y * sh[..., 1] + C1 * z * sh[..., 2] - C1 * x * sh[..., 3] + ) + + if deg > 1: + xx, yy, zz = x * x, y * y, z * z + xy, yz, xz = x * y, y * z, x * z + result = ( + result + + C2[0] * xy * sh[..., 4] + + C2[1] * yz * sh[..., 5] + + C2[2] * (2.0 * zz - xx - yy) * sh[..., 6] + + C2[3] * xz * sh[..., 7] + + C2[4] * (xx - yy) * sh[..., 8] + ) + + if deg > 2: + result = ( + result + + C3[0] * y * (3 * xx - yy) * sh[..., 9] + + C3[1] * xy * z * sh[..., 10] + + C3[2] * y * (4 * zz - xx - yy) * sh[..., 11] + + C3[3] * z * (2 * zz - 3 * xx - 3 * yy) * sh[..., 12] + + C3[4] * x * (4 * zz - xx - yy) * sh[..., 13] + + C3[5] * z * (xx - yy) * sh[..., 14] + + C3[6] * x * (xx - 3 * yy) * sh[..., 15] + ) + + if deg > 3: + result = ( + result + + C4[0] * xy * (xx - yy) * sh[..., 16] + + C4[1] * yz * (3 * xx - yy) * sh[..., 17] + + C4[2] * xy * (7 * zz - 1) * sh[..., 18] + + C4[3] * yz * (7 * zz - 3) * sh[..., 19] + + C4[4] * (zz * (35 * zz - 30) + 3) * sh[..., 20] + + C4[5] * xz * (7 * zz - 3) * sh[..., 21] + + C4[6] * (xx - yy) * (7 * zz - 1) * sh[..., 22] + + C4[7] * xz * (xx - 3 * yy) * sh[..., 23] + + C4[8] + * (xx * (xx - 3 * yy) - yy * (3 * xx - yy)) + * sh[..., 24] + ) + return result + + +def RGB2SH(rgb): + return (rgb - 0.5) / C0 + + +def SH2RGB(sh): + return sh * C0 + 0.5 diff --git a/LHM/models/rendering/utils/typing.py b/LHM/models/rendering/utils/typing.py new file mode 100644 index 0000000000000000000000000000000000000000..dee9f967c21f94db1ad939d7dead156d86748752 --- /dev/null +++ b/LHM/models/rendering/utils/typing.py @@ -0,0 +1,40 @@ +""" +This module contains type annotations for the project, using +1. Python type hints (https://docs.python.org/3/library/typing.html) for Python objects +2. jaxtyping (https://github.com/google/jaxtyping/blob/main/API.md) for PyTorch tensors + +Two types of typing checking can be used: +1. Static type checking with mypy (install with pip and enabled as the default linter in VSCode) +2. Runtime type checking with typeguard (install with pip and triggered at runtime, mainly for tensor dtype and shape checking) +""" + +# Basic types +from typing import ( + Any, + Callable, + Dict, + Iterable, + List, + Literal, + NamedTuple, + NewType, + Optional, + Sized, + Tuple, + Type, + TypeVar, + Union, +) + +# Tensor dtype +# for jaxtyping usage, see https://github.com/google/jaxtyping/blob/main/API.md +from jaxtyping import Bool, Complex, Float, Inexact, Int, Integer, Num, Shaped, UInt + +# Config type +from omegaconf import DictConfig + +# PyTorch Tensor type +from torch import Tensor + +# Runtime type checking decorator +from typeguard import typechecked as typechecker diff --git a/LHM/models/rendering/utils/utils.py b/LHM/models/rendering/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a3cb9c8d57c5e2063ff7b95da18a1d8ad2601103 --- /dev/null +++ b/LHM/models/rendering/utils/utils.py @@ -0,0 +1,111 @@ +import torch +import torch.nn as nn +from torch.autograd import Function +from torch.cuda.amp import custom_bwd, custom_fwd + +from LHM.models.rendering.utils.typing import * + + +def get_activation(name): + if name is None: + return lambda x: x + name = name.lower() + if name == "none": + return lambda x: x + elif name == "lin2srgb": + return lambda x: torch.where( + x > 0.0031308, + torch.pow(torch.clamp(x, min=0.0031308), 1.0 / 2.4) * 1.055 - 0.055, + 12.92 * x, + ).clamp(0.0, 1.0) + elif name == "exp": + return lambda x: torch.exp(x) + elif name == "shifted_exp": + return lambda x: torch.exp(x - 1.0) + elif name == "trunc_exp": + return trunc_exp + elif name == "shifted_trunc_exp": + return lambda x: trunc_exp(x - 1.0) + elif name == "sigmoid": + return lambda x: torch.sigmoid(x) + elif name == "tanh": + return lambda x: torch.tanh(x) + elif name == "shifted_softplus": + return lambda x: F.softplus(x - 1.0) + elif name == "scale_-11_01": + return lambda x: x * 0.5 + 0.5 + else: + try: + return getattr(F, name) + except AttributeError: + raise ValueError(f"Unknown activation function: {name}") + + +class MLP(nn.Module): + def __init__( + self, + dim_in: int, + dim_out: int, + n_neurons: int, + n_hidden_layers: int, + activation: str = "relu", + output_activation: Optional[str] = None, + bias: bool = True, + ): + super().__init__() + layers = [ + self.make_linear( + dim_in, n_neurons, is_first=True, is_last=False, bias=bias + ), + self.make_activation(activation), + ] + for i in range(n_hidden_layers - 1): + layers += [ + self.make_linear( + n_neurons, n_neurons, is_first=False, is_last=False, bias=bias + ), + self.make_activation(activation), + ] + layers += [ + self.make_linear( + n_neurons, dim_out, is_first=False, is_last=True, bias=bias + ) + ] + self.layers = nn.Sequential(*layers) + self.output_activation = get_activation(output_activation) + + def forward(self, x): + x = self.layers(x) + x = self.output_activation(x) + return x + + def make_linear(self, dim_in, dim_out, is_first, is_last, bias=True): + layer = nn.Linear(dim_in, dim_out, bias=bias) + return layer + + def make_activation(self, activation): + if activation == "relu": + return nn.ReLU(inplace=True) + elif activation == "silu": + return nn.SiLU(inplace=True) + else: + raise NotImplementedError + + +class _TruncExp(Function): # pylint: disable=abstract-method + # Implementation from torch-ngp: + # https://github.com/ashawkey/torch-ngp/blob/93b08a0d4ec1cc6e69d85df7f0acdfb99603b628/activation.py + @staticmethod + @custom_fwd(cast_inputs=torch.float32) + def forward(ctx, x): # pylint: disable=arguments-differ + ctx.save_for_backward(x) + return torch.exp(x) + + @staticmethod + @custom_bwd + def backward(ctx, g): # pylint: disable=arguments-differ + x = ctx.saved_tensors[0] + return g * torch.exp(torch.clamp(x, max=15)) + + +trunc_exp = _TruncExp.apply diff --git a/LHM/models/transformer.py b/LHM/models/transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..18738a8adb32bccf273a238165acf21eb368cf87 --- /dev/null +++ b/LHM/models/transformer.py @@ -0,0 +1,277 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Xiaodong Gu & Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-1 17:49:25 +# @Function : transformer_block + +import pdb +from functools import partial +from typing import Any, Dict, Optional, Tuple, Union + +import torch +import torch.nn as nn +from accelerate.logging import get_logger +from diffusers.utils import is_torch_version + +logger = get_logger(__name__) + + +class TransformerDecoder(nn.Module): + """ + Transformer blocks that process the input and optionally use condition and modulation. + """ + + motion_embed_type = ["sd3_mm_cond", "sd3_mm_bh_cond"] + + def __init__( + self, + block_type: str, + num_layers: int, + num_heads: int, + inner_dim: int, + cond_dim: int = None, + mod_dim: int = None, + gradient_checkpointing=False, + eps: float = 1e-6, + ): + super().__init__() + self.gradient_checkpointing = gradient_checkpointing + self.block_type = block_type + + if ( + block_type == "sd3_cond" + or block_type == "sd3_mm_cond" + or block_type == "sd3_mm_bh_cond" + ): + # dual_attention_layers = list(range(num_layers//2)) + dual_attention_layers = [] + self.layers = nn.ModuleList( + [ + self._block_fn(inner_dim, cond_dim, mod_dim)( + num_heads=num_heads, + eps=eps, + context_pre_only=i == num_layers - 1, + use_dual_attention=( + True if i in dual_attention_layers else False + ), + ) + for i in range(num_layers) + ] + ) + else: + self.layers = nn.ModuleList( + [ + self._block_fn(inner_dim, cond_dim, mod_dim)( + num_heads=num_heads, + eps=eps, + ) + for _ in range(num_layers) + ] + ) + + self.norm = nn.LayerNorm(inner_dim, eps=eps) + + if self.block_type in [ + "cogvideo_cond", + "sd3_cond", + "sd3_mm_cond", + "sd3_mm_bh_cond", + ]: + self.linear_cond_proj = nn.Linear(cond_dim, inner_dim) + + @property + def block_type(self): + return self._block_type + + @block_type.setter + def block_type(self, block_type): + assert block_type in [ + "basic", + "cond", + "mod", + "cond_mod", + "sd3_cond", + "sd3_mm_cond", + "sd3_mm_bh_cond", + "cogvideo_cond", + ], f"Unsupported block type: {block_type}" + self._block_type = block_type + + def _block_fn(self, inner_dim, cond_dim, mod_dim): + assert inner_dim is not None, f"inner_dim must always be specified" + if self.block_type == "basic": + assert ( + cond_dim is None and mod_dim is None + ), f"Condition and modulation are not supported for BasicBlock" + from .block import BasicBlock + + logger.debug(f"Using BasicBlock") + return partial(BasicBlock, inner_dim=inner_dim) + elif self.block_type == "cond": + assert ( + cond_dim is not None + ), f"Condition dimension must be specified for ConditionBlock" + assert ( + mod_dim is None + ), f"Modulation dimension is not supported for ConditionBlock" + from .block import ConditionBlock + + logger.debug(f"Using ConditionBlock") + return partial(ConditionBlock, inner_dim=inner_dim, cond_dim=cond_dim) + elif self.block_type == "mod": + logger.error(f"modulation without condition is not implemented") + raise NotImplementedError( + f"modulation without condition is not implemented" + ) + elif self.block_type == "cond_mod": + assert ( + cond_dim is not None and mod_dim is not None + ), f"Condition and modulation dimensions must be specified for ConditionModulationBlock" + from .block import ConditionModulationBlock + + logger.debug(f"Using ConditionModulationBlock") + return partial( + ConditionModulationBlock, + inner_dim=inner_dim, + cond_dim=cond_dim, + mod_dim=mod_dim, + ) + elif self.block_type == "cogvideo_cond": + logger.debug(f"Using CogVideoXBlock") + from LHM.models.transformer_dit import CogVideoXBlock + + # assert inner_dim == cond_dim, f"inner_dim:{inner_dim}, cond_dim:{cond_dim}" + return partial(CogVideoXBlock, dim=inner_dim, attention_bias=True) + elif self.block_type == "sd3_cond": + logger.debug(f"Using SD3JointTransformerBlock") + from LHM.models.transformer_dit import SD3JointTransformerBlock + + return partial(SD3JointTransformerBlock, dim=inner_dim, qk_norm="rms_norm") + elif self.block_type == "sd3_mm_cond": + logger.debug(f"Using SD3MMJointTransformerBlock") + from LHM.models.transformer_dit import SD3MMJointTransformerBlock + + return partial( + SD3MMJointTransformerBlock, dim=inner_dim, qk_norm="rms_norm" + ) + elif self.block_type == "sd3_mm_bh_cond": + logger.debug(f"Using SD3MMJointTransformerBlock") + from LHM.models.transformer_dit import SD3BodyHeadMMJointTransformerBlock + + return partial( + SD3BodyHeadMMJointTransformerBlock, dim=inner_dim, qk_norm="rms_norm" + ) + else: + raise ValueError( + f"Unsupported block type during runtime: {self.block_type}" + ) + + def assert_runtime_integrity( + self, x: torch.Tensor, cond: torch.Tensor, mod: torch.Tensor + ): + assert x is not None, f"Input tensor must be specified" + if self.block_type == "basic": + assert ( + cond is None and mod is None + ), f"Condition and modulation are not supported for BasicBlock" + elif "cond" in self.block_type: + assert ( + cond is not None and mod is None + ), f"Condition must be specified and modulation is not supported for ConditionBlock" + elif self.block_type == "mod": + raise NotImplementedError( + f"modulation without condition is not implemented" + ) + else: + assert ( + cond is not None and mod is not None + ), f"Condition and modulation must be specified for ConditionModulationBlock" + + def forward_layer( + self, layer: nn.Module, x: torch.Tensor, cond: torch.Tensor, mod: torch.Tensor + ): + if self.block_type == "basic": + return layer(x) + elif self.block_type == "cond": + return layer(x, cond) + elif self.block_type == "mod": + return layer(x, mod) + else: + return layer(x, cond, mod) + + def forward( + self, + x: torch.Tensor, + cond: torch.Tensor = None, + mod: torch.Tensor = None, + temb: torch.Tensor = None, + ) -> torch.Tensor: + """ + Forward pass of the transformer model. + Args: + x (torch.Tensor): Input tensor of shape [N, L, D]. + cond (torch.Tensor, optional): Conditional tensor of shape [N, L_cond, D_cond] or None. Defaults to None. + mod (torch.Tensor, optional): Modulation tensor of shape [N, D_mod] or None. Defaults to None. + temb (torch.Tensor, optional): Modulation tensor of shape [N, D_mod] or None. Defaults to None. # For SD3_MM_Cond, temb means MotionCLIP + Returns: + torch.Tensor: Output tensor of shape [N, L, D]. + """ + + # x: [N, L, D] + # cond: [N, L_cond, D_cond] or None + # mod: [N, D_mod] or None + self.assert_runtime_integrity(x, cond, mod) + + if self.block_type in [ + "cogvideo_cond", + "sd3_cond", + "sd3_mm_cond", + "sd3_mm_bh_cond", + ]: + cond = self.linear_cond_proj(cond) + for layer in self.layers: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + ckpt_kwargs: Dict[str, Any] = ( + {"use_reentrant": False} + if is_torch_version(">=", "1.11.0") + else {} + ) + + if self.block_type not in self.motion_embed_type: + x, cond = torch.utils.checkpoint.checkpoint( + create_custom_forward(layer), + x, + cond, + **ckpt_kwargs, + ) + else: + x, cond = torch.utils.checkpoint.checkpoint( + create_custom_forward(layer), + x, + cond, + temb, + **ckpt_kwargs, + ) + else: + x, cond = layer( + hidden_states=x, + encoder_hidden_states=cond, + temb=temb, + # image_rotary_emb=None, + ) + + x = self.norm(x) + else: + for layer in self.layers: + x = self.forward_layer(layer, x, cond, mod) + x = self.norm(x) + + return x diff --git a/LHM/models/transformer_dit.py b/LHM/models/transformer_dit.py new file mode 100644 index 0000000000000000000000000000000000000000..552668434407ffe12f08c2c5266eed5d69a8e2d4 --- /dev/null +++ b/LHM/models/transformer_dit.py @@ -0,0 +1,722 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu & Xiaodong Gu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-1 17:41:38 +# @Function : Transformer Block + + +from functools import partial +from typing import Any, Dict, Optional, Tuple, Union + +import torch +import torch.nn as nn +import torch.nn.functional as F + +assert hasattr(F, "scaled_dot_product_attention"), print( + "AttnProcessor2_0 requires PyTorch 2.0, to use it, please upgrade PyTorch to 2.0." +) +import pdb + +from diffusers.models.attention import Attention, FeedForward +from diffusers.models.attention_processor import ( + CogVideoXAttnProcessor2_0, + JointAttnProcessor2_0, +) +from diffusers.models.normalization import AdaLayerNormContinuous, AdaLayerNormZero + + +class CogVideoXBlock(nn.Module): + r""" + Transformer block used in [CogVideoX](https://github.com/THUDM/CogVideo) model. + + Parameters: + dim (`int`): + The number of channels in the input and output. + num_attention_heads (`int`): + The number of heads to use for multi-head attention. + attention_head_dim (`int`): + The number of channels in each head. + time_embed_dim (`int`): + The number of channels in timestep embedding. + dropout (`float`, defaults to `0.0`): + The dropout probability to use. + activation_fn (`str`, defaults to `"gelu-approximate"`): + Activation function to be used in feed-forward. + attention_bias (`bool`, defaults to `False`): + Whether or not to use bias in attention projection layers. + qk_norm (`bool`, defaults to `True`): + Whether or not to use normalization after query and key projections in Attention. + norm_elementwise_affine (`bool`, defaults to `True`): + Whether to use learnable elementwise affine parameters for normalization. + norm_eps (`float`, defaults to `1e-5`): + Epsilon value for normalization layers. + final_dropout (`bool` defaults to `False`): + Whether to apply a final dropout after the last feed-forward layer. + ff_inner_dim (`int`, *optional*, defaults to `None`): + Custom hidden dimension of Feed-forward layer. If not provided, `4 * dim` is used. + ff_bias (`bool`, defaults to `True`): + Whether or not to use bias in Feed-forward layer. + attention_out_bias (`bool`, defaults to `True`): + Whether or not to use bias in Attention output projection layer. + """ + + def __init__( + self, + dim: int, + num_heads: int, + # num_attention_heads: int, + # attention_head_dim: int, + # time_embed_dim: int, + dropout: float = 0.0, + activation_fn: str = "gelu-approximate", + attention_bias: bool = False, + qk_norm: bool = True, + norm_elementwise_affine: bool = True, + eps: float = 1e-5, + # norm_eps: float = 1e-5, + final_dropout: bool = True, + ff_inner_dim: Optional[int] = None, + ff_bias: bool = True, + attention_out_bias: bool = True, + ): + super().__init__() + norm_eps = eps + num_attention_heads = num_heads + attention_head_dim = dim // num_attention_heads + assert attention_head_dim * num_attention_heads == dim + + # 1. Self Attention + self.norm1 = nn.LayerNorm( + dim, elementwise_affine=norm_elementwise_affine, eps=norm_eps, bias=True + ) + self.norm1_context = nn.LayerNorm( + dim, elementwise_affine=norm_elementwise_affine, eps=norm_eps, bias=True + ) + + self.attn1 = Attention( + query_dim=dim, + dim_head=attention_head_dim, + heads=num_attention_heads, + qk_norm="layer_norm" if qk_norm else None, + eps=1e-6, + bias=attention_bias, + out_bias=attention_out_bias, + processor=CogVideoXAttnProcessor2_0(), + ) + + # 2. Feed Forward + self.norm2 = nn.LayerNorm( + dim, elementwise_affine=norm_elementwise_affine, eps=norm_eps, bias=True + ) + self.norm2_context = nn.LayerNorm( + dim, elementwise_affine=norm_elementwise_affine, eps=norm_eps, bias=True + ) + + self.ff = FeedForward( + dim, + dropout=dropout, + activation_fn=activation_fn, + final_dropout=final_dropout, + inner_dim=ff_inner_dim, + bias=ff_bias, + ) + + def forward( + self, + hidden_states: torch.Tensor, + encoder_hidden_states: torch.Tensor, + temb: torch.Tensor = None, + image_rotary_emb: Optional[Tuple[torch.Tensor, torch.Tensor]] = None, + ) -> torch.Tensor: + text_seq_length = encoder_hidden_states.size(1) + + # norm & modulate + # norm_hidden_states, norm_encoder_hidden_states, gate_msa, enc_gate_msa = self.norm1( + # hidden_states, encoder_hidden_states, temb + # ) + norm_hidden_states = self.norm1(hidden_states) + norm_encoder_hidden_states = self.norm1_context(encoder_hidden_states) + + # attention + attn_hidden_states, attn_encoder_hidden_states = self.attn1( + hidden_states=norm_hidden_states, + encoder_hidden_states=norm_encoder_hidden_states, + image_rotary_emb=image_rotary_emb, + ) + + hidden_states = hidden_states + attn_hidden_states + encoder_hidden_states = encoder_hidden_states + attn_encoder_hidden_states + + # norm & modulate + # norm_hidden_states, norm_encoder_hidden_states, gate_ff, enc_gate_ff = self.norm2( + # hidden_states, encoder_hidden_states, temb + # ) + norm_hidden_states = self.norm2(hidden_states) + norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) + + # feed-forward + norm_hidden_states = torch.cat( + [norm_encoder_hidden_states, norm_hidden_states], dim=1 + ) + ff_output = self.ff(norm_hidden_states) + + hidden_states = hidden_states + ff_output[:, text_seq_length:] + encoder_hidden_states = encoder_hidden_states + ff_output[:, :text_seq_length] + + return hidden_states, encoder_hidden_states + + +def _chunked_feed_forward( + ff: nn.Module, hidden_states: torch.Tensor, chunk_dim: int, chunk_size: int +): + # "feed_forward_chunk_size" can be used to save memory + if hidden_states.shape[chunk_dim] % chunk_size != 0: + raise ValueError( + f"`hidden_states` dimension to be chunked: {hidden_states.shape[chunk_dim]} has to be divisible by chunk size: {chunk_size}. Make sure to set an appropriate `chunk_size` when calling `unet.enable_forward_chunking`." + ) + + num_chunks = hidden_states.shape[chunk_dim] // chunk_size + ff_output = torch.cat( + [ff(hid_slice) for hid_slice in hidden_states.chunk(num_chunks, dim=chunk_dim)], + dim=chunk_dim, + ) + return ff_output + + +class QKNormJointAttnProcessor2_0: + """Attention processor used typically in processing the SD3-like self-attention projections.""" + + def __init__(self): + if not hasattr(F, "scaled_dot_product_attention"): + raise ImportError( + "AttnProcessor2_0 requires PyTorch 2.0, to use it, please upgrade PyTorch to 2.0." + ) + + def __call__( + self, + attn: Attention, + hidden_states: torch.FloatTensor, + encoder_hidden_states: torch.FloatTensor = None, + attention_mask: Optional[torch.FloatTensor] = None, + *args, + **kwargs, + ) -> torch.FloatTensor: + residual = hidden_states + + input_ndim = hidden_states.ndim + if input_ndim == 4: + batch_size, channel, height, width = hidden_states.shape + hidden_states = hidden_states.view( + batch_size, channel, height * width + ).transpose(1, 2) + context_input_ndim = encoder_hidden_states.ndim + if context_input_ndim == 4: + batch_size, channel, height, width = encoder_hidden_states.shape + encoder_hidden_states = encoder_hidden_states.view( + batch_size, channel, height * width + ).transpose(1, 2) + + batch_size = encoder_hidden_states.shape[0] + + # `sample` projections. + query = attn.to_q(hidden_states) + key = attn.to_k(hidden_states) + value = attn.to_v(hidden_states) + + # `context` projections. + encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) + encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) + encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) + + # attention + query = torch.cat([query, encoder_hidden_states_query_proj], dim=1) + key = torch.cat([key, encoder_hidden_states_key_proj], dim=1) + value = torch.cat([value, encoder_hidden_states_value_proj], dim=1) + + inner_dim = key.shape[-1] + head_dim = inner_dim // attn.heads + query = query.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + key = key.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + value = value.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + + if attn.norm_q is not None: + query = attn.norm_q(query) + if attn.norm_k is not None: + key = attn.norm_k(key) + + hidden_states = F.scaled_dot_product_attention( + query, key, value, dropout_p=0.0, is_causal=False + ) + hidden_states = hidden_states.transpose(1, 2).reshape( + batch_size, -1, attn.heads * head_dim + ) + hidden_states = hidden_states.to(query.dtype) + + # Split the attention outputs. + hidden_states, encoder_hidden_states = ( + hidden_states[:, : residual.shape[1]], + hidden_states[:, residual.shape[1] :], + ) + + # linear proj + hidden_states = attn.to_out[0](hidden_states) + # dropout + hidden_states = attn.to_out[1](hidden_states) + if not attn.context_pre_only: + encoder_hidden_states = attn.to_add_out(encoder_hidden_states) + + if input_ndim == 4: + hidden_states = hidden_states.transpose(-1, -2).reshape( + batch_size, channel, height, width + ) + if context_input_ndim == 4: + encoder_hidden_states = encoder_hidden_states.transpose(-1, -2).reshape( + batch_size, channel, height, width + ) + + return hidden_states, encoder_hidden_states + + +class SD3JointTransformerBlock(nn.Module): + r""" + A Transformer block following the MMDiT architecture, introduced in Stable Diffusion 3. + + Reference: https://arxiv.org/abs/2403.03206 + + Parameters: + dim (`int`): The number of channels in the input and output. + num_attention_heads (`int`): The number of heads to use for multi-head attention. + attention_head_dim (`int`): The number of channels in each head. + context_pre_only (`bool`): Boolean to determine if we should add some blocks associated with the + processing of `context` conditions. + """ + + def __init__( + self, + dim: int, + num_heads: int, + eps: float, + # num_attention_heads: int, + # attention_head_dim: int, + context_pre_only: bool = False, + qk_norm: Optional[str] = None, + use_dual_attention: bool = False, + ): + super().__init__() + num_attention_heads = num_heads + attention_head_dim = dim // num_attention_heads + assert attention_head_dim * num_attention_heads == dim + + self.use_dual_attention = use_dual_attention + self.context_pre_only = context_pre_only + + self.norm1 = nn.LayerNorm(dim) + + self.norm1_context = nn.LayerNorm(dim) + + """Attention processor used typically in processing + the SD3-like self-attention projections.""" + + processor = JointAttnProcessor2_0() + + # qk_norm rms_norm + self.attn = Attention( + query_dim=dim, + cross_attention_dim=None, + added_kv_proj_dim=dim, + dim_head=attention_head_dim, + heads=num_attention_heads, + out_dim=dim, + context_pre_only=context_pre_only, + bias=True, + processor=processor, + qk_norm=qk_norm, + eps=eps, + ) + + # SD-3.5 + if use_dual_attention: + self.attn2 = Attention( + query_dim=dim, + cross_attention_dim=None, + dim_head=attention_head_dim, + heads=num_attention_heads, + out_dim=dim, + bias=True, + processor=processor, + qk_norm=qk_norm, + eps=eps, + ) + else: + self.attn2 = None + + self.norm2 = nn.LayerNorm(dim, elementwise_affine=False, eps=eps) + self.ff = FeedForward(dim=dim, dim_out=dim, activation_fn="gelu-approximate") + + if not context_pre_only: + self.norm2_context = nn.LayerNorm(dim, elementwise_affine=False, eps=eps) + self.ff_context = FeedForward( + dim=dim, dim_out=dim, activation_fn="gelu-approximate" + ) + else: + self.norm2_context = None + self.ff_context = None + + # let chunk size default to None + self._chunk_size = None + self._chunk_dim = 0 + + # Copied from diffusers.models.attention.BasicTransformerBlock.set_chunk_feed_forward + def set_chunk_feed_forward(self, chunk_size: Optional[int], dim: int = 0): + # Sets chunk feed-forward + self._chunk_size = chunk_size + self._chunk_dim = dim + + def forward( + self, + hidden_states: torch.FloatTensor, + encoder_hidden_states: torch.FloatTensor, + temb: torch.FloatTensor = None, + ): + """ + Forward pass of the transformer_dit model. + Args: + hidden_states (torch.FloatTensor): Input hidden states. Query Points features + encoder_hidden_states (torch.FloatTensor): Encoder hidden states. Context features + temb (torch.FloatTensor, optional): Optional tensor for embedding. Defaults to None. + Returns: + Tuple[torch.FloatTensor, torch.FloatTensor]: Tuple containing the updated hidden states and encoder hidden states. + """ + + + norm_hidden_states = self.norm1(hidden_states) + norm_encoder_hidden_states = self.norm1_context(encoder_hidden_states) + + # Attention. + # norma hidden states [B, L, D] - > multi-head atten [B, num_head, L, D/num_head] + attn_output, context_attn_output = self.attn( + hidden_states=norm_hidden_states, + encoder_hidden_states=norm_encoder_hidden_states, + ) + + # Process attention outputs for the `hidden_states`. + # attn_output = gate_msa.unsqueeze(1) * attn_output + hidden_states = hidden_states + attn_output + + # ffd + + if self.use_dual_attention: + attn_output2 = self.attn2(hidden_states=norm_hidden_states) + # attn_output2 = gate_msa2.unsqueeze(1) * attn_output2 + hidden_states = hidden_states + attn_output2 + + norm_hidden_states = self.norm2(hidden_states) + # norm_hidden_states = norm_hidden_states * (1 + scale_mlp[:, None]) + shift_mlp[:, None] + if self._chunk_size is not None: + # "feed_forward_chunk_size" can be used to save memory + ff_output = _chunked_feed_forward( + self.ff, norm_hidden_states, self._chunk_dim, self._chunk_size + ) + else: + ff_output = self.ff(norm_hidden_states) + # ff_output = gate_mlp.unsqueeze(1) * ff_output + + hidden_states = hidden_states + ff_output + + # Process attention outputs for the `encoder_hidden_states`. + if self.context_pre_only: + encoder_hidden_states = None + else: + # context_attn_output = c_gate_msa.unsqueeze(1) * context_attn_output + encoder_hidden_states = encoder_hidden_states + context_attn_output + + norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) + # norm_encoder_hidden_states = norm_encoder_hidden_states * (1 + c_scale_mlp[:, None]) + c_shift_mlp[:, None] + if self._chunk_size is not None: + # "feed_forward_chunk_size" can be used to save memory + context_ff_output = _chunked_feed_forward( + self.ff_context, + norm_encoder_hidden_states, + self._chunk_dim, + self._chunk_size, + ) + else: + context_ff_output = self.ff_context(norm_encoder_hidden_states) + # encoder_hidden_states = encoder_hidden_states + c_gate_mlp.unsqueeze(1) * context_ff_output + encoder_hidden_states = encoder_hidden_states + context_ff_output + + return hidden_states, encoder_hidden_states + + +class SD3MMJointTransformerBlock(nn.Module): + r""" + A Transformer block following the MMDiT architecture, introduced in Stable Diffusion 3. + + Reference: https://arxiv.org/abs/2403.03206 + + Parameters: + dim (`int`): The number of channels in the input and output. + num_attention_heads (`int`): The number of heads to use for multi-head attention. + attention_head_dim (`int`): The number of channels in each head. + context_pre_only (`bool`): Boolean to determine if we should add some blocks associated with the + processing of `context` conditions. + """ + + def __init__( + self, + dim: int, + num_heads: int, + eps: float, + # num_attention_heads: int, + # attention_head_dim: int, + context_pre_only: bool = False, + qk_norm: Optional[str] = None, + use_dual_attention: bool = False, + ): + super().__init__() + num_attention_heads = num_heads + attention_head_dim = dim // num_attention_heads + assert attention_head_dim * num_attention_heads == dim + + self.use_dual_attention = use_dual_attention + self.context_pre_only = context_pre_only + + context_norm_type = ( + "ada_norm_continous" if context_pre_only else "ada_norm_zero" + ) + + self.norm1 = AdaLayerNormZero(dim) + + if context_norm_type == "ada_norm_continous": + self.norm1_context = AdaLayerNormContinuous( + dim, + dim, + elementwise_affine=False, + eps=1e-6, + bias=True, + norm_type="layer_norm", + ) + elif context_norm_type == "ada_norm_zero": + self.norm1_context = AdaLayerNormZero(dim) + else: + raise ValueError( + f"Unknown context_norm_type: {context_norm_type}, currently only support `ada_norm_continous`, `ada_norm_zero`" + ) + + processor = JointAttnProcessor2_0() + + # qk_norm rms_norm + self.attn = Attention( + query_dim=dim, + cross_attention_dim=None, + added_kv_proj_dim=dim, + dim_head=attention_head_dim, + heads=num_attention_heads, + out_dim=dim, + context_pre_only=context_pre_only, + bias=True, + processor=processor, + qk_norm=qk_norm, + eps=eps, + ) + + self.norm2 = nn.LayerNorm(dim, elementwise_affine=False, eps=eps) + self.ff = FeedForward(dim=dim, dim_out=dim, activation_fn="gelu-approximate") + + if not context_pre_only: + self.norm2_context = nn.LayerNorm(dim, elementwise_affine=False, eps=eps) + self.ff_context = FeedForward( + dim=dim, dim_out=dim, activation_fn="gelu-approximate" + ) + else: + self.norm2_context = None + self.ff_context = None + + # let chunk size default to None + self._chunk_size = None + self._chunk_dim = 0 + + # Copied from diffusers.models.attention.BasicTransformerBlock.set_chunk_feed_forward + def set_chunk_feed_forward(self, chunk_size: Optional[int], dim: int = 0): + # Sets chunk feed-forward + self._chunk_size = chunk_size + self._chunk_dim = dim + + def forward( + self, + hidden_states: torch.FloatTensor, + encoder_hidden_states: torch.FloatTensor, + temb: torch.FloatTensor = None, + ): + """ + Forward pass of the transformer_dit model. + Args: + hidden_states (torch.FloatTensor): Input hidden states. Query Points features + encoder_hidden_states (torch.FloatTensor): Encoder hidden states. Context features + motion embed:(torch.FloatTensor, optional): Optional tensor for embedding. Defaults to None. + Returns: + Tuple[torch.FloatTensor, torch.FloatTensor]: Tuple containing the updated hidden states and encoder hidden states. + """ + + if temb is None: + pdb.set_trace() + + norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1( + hidden_states, emb=temb + ) + + if self.context_pre_only: + norm_encoder_hidden_states = self.norm1_context(encoder_hidden_states, temb) + else: + ( + norm_encoder_hidden_states, + c_gate_msa, + c_shift_mlp, + c_scale_mlp, + c_gate_mlp, + ) = self.norm1_context(encoder_hidden_states, emb=temb) + + # Attention. + # norma hidden states [B, L, D] - > multi-head atten [B, num_head, L, D/num_head] + attn_output, context_attn_output = self.attn( + hidden_states=norm_hidden_states, + encoder_hidden_states=norm_encoder_hidden_states, + ) + + # Process attention outputs for the `hidden_states`. + attn_output = gate_msa.unsqueeze(1) * attn_output + hidden_states = hidden_states + attn_output + + # ffd + + norm_hidden_states = self.norm2(hidden_states) + norm_hidden_states = ( + norm_hidden_states * (1 + scale_mlp[:, None]) + shift_mlp[:, None] + ) + if self._chunk_size is not None: + # "feed_forward_chunk_size" can be used to save memory + ff_output = _chunked_feed_forward( + self.ff, norm_hidden_states, self._chunk_dim, self._chunk_size + ) + else: + ff_output = self.ff(norm_hidden_states) + ff_output = gate_mlp.unsqueeze(1) * ff_output + hidden_states = hidden_states + ff_output + + # Process attention outputs for the `encoder_hidden_states`. + if self.context_pre_only: + encoder_hidden_states = None + else: + context_attn_output = c_gate_msa.unsqueeze(1) * context_attn_output + encoder_hidden_states = encoder_hidden_states + context_attn_output + + norm_encoder_hidden_states = self.norm2_context(encoder_hidden_states) + norm_encoder_hidden_states = ( + norm_encoder_hidden_states * (1 + c_scale_mlp[:, None]) + + c_shift_mlp[:, None] + ) + if self._chunk_size is not None: + # "feed_forward_chunk_size" can be used to save memory + context_ff_output = _chunked_feed_forward( + self.ff_context, + norm_encoder_hidden_states, + self._chunk_dim, + self._chunk_size, + ) + else: + context_ff_output = self.ff_context(norm_encoder_hidden_states) + encoder_hidden_states = ( + encoder_hidden_states + c_gate_mlp.unsqueeze(1) * context_ff_output + ) + encoder_hidden_states = encoder_hidden_states + context_ff_output + + return hidden_states, encoder_hidden_states + + +class SD3BodyHeadMMJointTransformerBlock(nn.Module): + r""" + BodyHead Transformer block following the MMDiT architecture, introduced in Stable Diffusion 3. + + Reference: https://arxiv.org/abs/2403.03206 + + Parameters: + dim (`int`): The number of channels in the input and output. + num_attention_heads (`int`): The number of heads to use for multi-head attention. + attention_head_dim (`int`): The number of channels in each head. + context_pre_only (`bool`): Boolean to determine if we should add some blocks associated with the + processing of `context` conditions. + """ + + def __init__( + self, + dim: int, + num_heads: int, + eps: float, + # num_attention_heads: int, + # attention_head_dim: int, + context_pre_only: bool = False, + qk_norm: Optional[str] = None, + use_dual_attention: bool = False, + ): + super().__init__() + + self.head_dit = SD3MMJointTransformerBlock( + dim, + num_heads, + eps, + context_pre_only=context_pre_only, + qk_norm=qk_norm, + use_dual_attention=use_dual_attention, + ) + self.body_dit = SD3MMJointTransformerBlock( + dim, + num_heads, + eps, + context_pre_only=context_pre_only, + qk_norm=qk_norm, + use_dual_attention=use_dual_attention, + ) + + def forward( + self, + hidden_states: torch.FloatTensor, + encoder_hidden_states: torch.FloatTensor, + temb: torch.FloatTensor = None, + ): + """Default, last 1 / 4 is head""" + + _, N, _ = hidden_states.shape + body_size = int(N * 0.75) + + body_hidden_states, head_hidden_states = ( + hidden_states[:, :body_size], + hidden_states[:, body_size:], + ) + + _, temb_N = temb.shape + temb_size = temb_N // 2 + body_temb, head_temb = temb[:, :temb_size], temb[:, temb_size:] + + # body: 4096, head 1024, Sapiens & DINO + body_encoder_hidden_states, head_encoder_hidden_states = ( + encoder_hidden_states[:, :4096], + encoder_hidden_states[:, 4096:], + ) + + head_states, head_encoder_hidden_states = self.head_dit( + head_hidden_states, head_encoder_hidden_states, head_temb + ) + hidden_states = torch.cat([body_hidden_states, head_states], dim=1) + hidden_states, body_encoder_hidden_states = self.body_dit( + hidden_states, body_encoder_hidden_states, body_temb + ) + + if body_encoder_hidden_states is not None: + encoder_hidden_states = torch.cat( + [body_encoder_hidden_states, head_encoder_hidden_states], dim=1 + ) + else: + encoder_hidden_states = None + + return hidden_states, encoder_hidden_states diff --git a/LHM/models/utils.py b/LHM/models/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..0ae914f2387e15b32355caa1fd8735d50d6b0866 --- /dev/null +++ b/LHM/models/utils.py @@ -0,0 +1,38 @@ +import torch.nn as nn + + +def linear(*args, **kwargs): + """ + Create a linear module. + """ + return nn.Linear(*args, **kwargs) + + +class LinerParameterTuner: + def __init__(self, start, start_value, end_value, end): + self.start = start + self.start_value = start_value + self.end_value = end_value + self.end = end + self.total_steps = self.end - self.start + + def get_value(self, step): + if step < self.start: + return self.start_value + elif step > self.end: + return self.end_value + + current_step = step - self.start + + ratio = current_step / self.total_steps + + current_value = self.start_value + ratio * (self.end_value - self.start_value) + return current_value + + +class StaticParameterTuner: + def __init__(self, v): + self.v = v + + def get_value(self, step): + return self.v diff --git a/LHM/outputs/__pycache__/base.cpython-310.pyc b/LHM/outputs/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c5ffba683d2b79956acf1c11ad8f3daa344fe52f Binary files /dev/null and b/LHM/outputs/__pycache__/base.cpython-310.pyc differ diff --git a/LHM/outputs/__pycache__/output.cpython-310.pyc b/LHM/outputs/__pycache__/output.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..32ce5f5560d509cda5f747cc8ab3b0dcba7d6600 Binary files /dev/null and b/LHM/outputs/__pycache__/output.cpython-310.pyc differ diff --git a/LHM/outputs/base.py b/LHM/outputs/base.py new file mode 100644 index 0000000000000000000000000000000000000000..471882235ac0347a72624420e78e2d94eacfd7e8 --- /dev/null +++ b/LHM/outputs/base.py @@ -0,0 +1,114 @@ +"""Copy from diffusers +""" + +# Copyright 2024 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Generic utilities +""" + +import importlib +from collections import OrderedDict +from dataclasses import fields, is_dataclass +from typing import Any, Tuple + +import numpy as np + + +class BaseOutput(OrderedDict): + """ + Base class for all model outputs as dataclass. Has a `__getitem__` that allows indexing by integer or slice (like a + tuple) or strings (like a dictionary) that will ignore the `None` attributes. Otherwise behaves like a regular + Python dictionary. + + + + You can't unpack a [`BaseOutput`] directly. Use the [`~utils.BaseOutput.to_tuple`] method to convert it to a tuple + first. + + + """ + + def __post_init__(self) -> None: + class_fields = fields(self) + + # Safety and consistency checks + if not len(class_fields): + raise ValueError(f"{self.__class__.__name__} has no fields.") + + first_field = getattr(self, class_fields[0].name) + other_fields_are_none = all( + getattr(self, field.name) is None for field in class_fields[1:] + ) + + if other_fields_are_none and isinstance(first_field, dict): + for key, value in first_field.items(): + self[key] = value + else: + for field in class_fields: + v = getattr(self, field.name) + if v is not None: + self[field.name] = v + + def __delitem__(self, *args, **kwargs): + raise Exception( + f"You cannot use ``__delitem__`` on a {self.__class__.__name__} instance." + ) + + def setdefault(self, *args, **kwargs): + raise Exception( + f"You cannot use ``setdefault`` on a {self.__class__.__name__} instance." + ) + + def pop(self, *args, **kwargs): + raise Exception( + f"You cannot use ``pop`` on a {self.__class__.__name__} instance." + ) + + def update(self, *args, **kwargs): + raise Exception( + f"You cannot use ``update`` on a {self.__class__.__name__} instance." + ) + + def __getitem__(self, k: Any) -> Any: + if isinstance(k, str): + inner_dict = dict(self.items()) + return inner_dict[k] + else: + return self.to_tuple()[k] + + def __setattr__(self, name: Any, value: Any) -> None: + if name in self.keys() and value is not None: + # Don't call self.__setitem__ to avoid recursion errors + super().__setitem__(name, value) + super().__setattr__(name, value) + + def __setitem__(self, key, value): + # Will raise a KeyException if needed + super().__setitem__(key, value) + # Don't call self.__setattr__ to avoid recursion errors + super().__setattr__(key, value) + + def __reduce__(self): + if not is_dataclass(self): + return super().__reduce__() + callable, _args, *remaining = super().__reduce__() + args = tuple(getattr(self, field.name) for field in fields(self)) + return callable, args, *remaining + + def to_tuple(self) -> Tuple[Any, ...]: + """ + Convert self to a tuple containing all the attributes/keys that are not `None`. + """ + return tuple(self[k] for k in self.keys()) diff --git a/LHM/outputs/output.py b/LHM/outputs/output.py new file mode 100644 index 0000000000000000000000000000000000000000..fbd941982d25b178b335750ba5d16a97760b4259 --- /dev/null +++ b/LHM/outputs/output.py @@ -0,0 +1,26 @@ +"""A class to define GSNet Output +""" + +from dataclasses import dataclass + +import numpy as np +from torch import Tensor + +from .base import BaseOutput + + +@dataclass +class GaussianAppOutput(BaseOutput): + """ + Output of the Gaussian Appearance output. + + Attributes: + + """ + + offset_xyz: Tensor + opacity: Tensor + rotation: Tensor + scaling: Tensor + shs: Tensor + use_rgb: bool diff --git a/LHM/runners/__init__.py b/LHM/runners/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b10e91b0e21cca673f2a68d10f10fcd00ebf0bfc --- /dev/null +++ b/LHM/runners/__init__.py @@ -0,0 +1,21 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from LHM.utils.registry import Registry + +REGISTRY_RUNNERS = Registry() + +from .infer import * +# from .train import * \ No newline at end of file diff --git a/LHM/runners/__pycache__/__init__.cpython-310.pyc b/LHM/runners/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d8c02b352b2a621ceb830e4679fac736e10eb8e2 Binary files /dev/null and b/LHM/runners/__pycache__/__init__.cpython-310.pyc differ diff --git a/LHM/runners/__pycache__/abstract.cpython-310.pyc b/LHM/runners/__pycache__/abstract.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0376115339cb1f417f48fe7fc5ed7ab60c33fe1f Binary files /dev/null and b/LHM/runners/__pycache__/abstract.cpython-310.pyc differ diff --git a/LHM/runners/abstract.py b/LHM/runners/abstract.py new file mode 100644 index 0000000000000000000000000000000000000000..76916e805a5cfbf333d2d63e8607811939a5a639 --- /dev/null +++ b/LHM/runners/abstract.py @@ -0,0 +1,27 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from abc import ABC, abstractmethod + + +class Runner(ABC): + """Abstract runner class""" + + def __init__(self): + pass + + @abstractmethod + def run(self): + pass diff --git a/LHM/runners/infer/__init__.py b/LHM/runners/infer/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ac00bdd90052e5e3574fe6fd98451bd23e5d9d1e --- /dev/null +++ b/LHM/runners/infer/__init__.py @@ -0,0 +1,16 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .human_lrm import HumanLRMInferrer diff --git a/LHM/runners/infer/__pycache__/__init__.cpython-310.pyc b/LHM/runners/infer/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f53e9d46c430fab9bd6c3198fcd478bc296966fc Binary files /dev/null and b/LHM/runners/infer/__pycache__/__init__.cpython-310.pyc differ diff --git a/LHM/runners/infer/__pycache__/base_inferrer.cpython-310.pyc b/LHM/runners/infer/__pycache__/base_inferrer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c8ced6612f74e9215c6b15a4aed679a920359846 Binary files /dev/null and b/LHM/runners/infer/__pycache__/base_inferrer.cpython-310.pyc differ diff --git a/LHM/runners/infer/__pycache__/human_lrm.cpython-310.pyc b/LHM/runners/infer/__pycache__/human_lrm.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cc92d140440823d0e52fdc35855cb59d8f3f210c Binary files /dev/null and b/LHM/runners/infer/__pycache__/human_lrm.cpython-310.pyc differ diff --git a/LHM/runners/infer/__pycache__/utils.cpython-310.pyc b/LHM/runners/infer/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5008c1501009fe5c4efd859ea651205bb79c458b Binary files /dev/null and b/LHM/runners/infer/__pycache__/utils.cpython-310.pyc differ diff --git a/LHM/runners/infer/base_inferrer.py b/LHM/runners/infer/base_inferrer.py new file mode 100644 index 0000000000000000000000000000000000000000..8ee26b27969a0de94a9f47864037d4d1f0d53401 --- /dev/null +++ b/LHM/runners/infer/base_inferrer.py @@ -0,0 +1,62 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from abc import abstractmethod + +import torch +from accelerate import Accelerator +from accelerate.logging import get_logger + +from LHM.runners.abstract import Runner + +logger = get_logger(__name__) + + +class Inferrer(Runner): + + EXP_TYPE: str = None + + def __init__(self): + super().__init__() + + torch._dynamo.config.disable = True + self.accelerator = Accelerator() + + self.model: torch.nn.Module = None + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + pass + + @property + def device(self): + return self.accelerator.device + + @abstractmethod + def _build_model(self, cfg): + pass + + @abstractmethod + def infer_single(self, *args, **kwargs): + pass + + @abstractmethod + def infer(self): + pass + + def run(self): + self.infer() diff --git a/LHM/runners/infer/human_lrm.py b/LHM/runners/infer/human_lrm.py new file mode 100644 index 0000000000000000000000000000000000000000..85b95b1f82d755b66bcfd90cfbe77d975cdd677f --- /dev/null +++ b/LHM/runners/infer/human_lrm.py @@ -0,0 +1,984 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu & Xiaodong Gu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-1 17:30:37 +# @Function : Inference code for human_lrm model + +import argparse +import os +import pdb +import time + +import cv2 +import numpy as np +import torch +from accelerate.logging import get_logger +from omegaconf import OmegaConf +from PIL import Image +from tqdm.auto import tqdm + +from engine.pose_estimation.pose_estimator import PoseEstimator +from engine.SegmentAPI.SAM import Bbox, SAM2Seg +from LHM.datasets.cam_utils import ( + build_camera_principle, + build_camera_standard, + create_intrinsics, + surrounding_views_linspace, +) +from LHM.models.modeling_human_lrm import ModelHumanLRM +from LHM.runners import REGISTRY_RUNNERS +from LHM.runners.infer.utils import ( + calc_new_tgt_size_by_aspect, + center_crop_according_to_mask, + prepare_motion_seqs, + resize_image_keepaspect_np, +) +from LHM.utils.face_detector import FaceDetector +from LHM.utils.hf_hub import wrap_model_hub +from LHM.utils.logging import configure_logger +# from LHM.utils.video import images_to_video +from LHM.utils.ffmpeg_utils import images_to_video + +from .base_inferrer import Inferrer + +logger = get_logger(__name__) + + +def avaliable_device(): + if torch.cuda.is_available(): + current_device_id = torch.cuda.current_device() + device = f"cuda:{current_device_id}" + else: + device = "cpu" + + return device + + +def resize_with_padding(img, target_size, padding_color=(255, 255, 255)): + target_w, target_h = target_size + h, w = img.shape[:2] + + ratio = min(target_w / w, target_h / h) + new_w = int(w * ratio) + new_h = int(h * ratio) + + resized = cv2.resize(img, (new_w, new_h), interpolation=cv2.INTER_AREA) + + dw = target_w - new_w + dh = target_h - new_h + top = dh // 2 + bottom = dh - top + left = dw // 2 + right = dw - left + + padded = cv2.copyMakeBorder( + resized, + top=top, + bottom=bottom, + left=left, + right=right, + borderType=cv2.BORDER_CONSTANT, + value=padding_color, + ) + + return padded + + +def get_bbox(mask): + height, width = mask.shape + pha = mask / 255.0 + pha[pha < 0.5] = 0.0 + pha[pha >= 0.5] = 1.0 + + # obtain bbox + _h, _w = np.where(pha == 1) + + whwh = [ + _w.min().item(), + _h.min().item(), + _w.max().item(), + _h.max().item(), + ] + + box = Bbox(whwh) + + # scale box to 1.05 + scale_box = box.scale(1.1, width=width, height=height) + return scale_box + + +def infer_preprocess_image( + rgb_path, + mask, + intr, + pad_ratio, + bg_color, + max_tgt_size, + aspect_standard, + enlarge_ratio, + render_tgt_size, + multiply, + need_mask=True, +): + """inferece + image, _, _ = preprocess_image(image_path, mask_path=None, intr=None, pad_ratio=0, bg_color=1.0, + max_tgt_size=896, aspect_standard=aspect_standard, enlarge_ratio=[1.0, 1.0], + render_tgt_size=source_size, multiply=14, need_mask=True) + + """ + + rgb = np.array(Image.open(rgb_path)) + rgb_raw = rgb.copy() + + bbox = get_bbox(mask) + bbox_list = bbox.get_box() + + rgb = rgb[bbox_list[1] : bbox_list[3], bbox_list[0] : bbox_list[2]] + mask = mask[bbox_list[1] : bbox_list[3], bbox_list[0] : bbox_list[2]] + + h, w, _ = rgb.shape + assert w < h + cur_ratio = h / w + scale_ratio = cur_ratio / aspect_standard + + target_w = int(min(w * scale_ratio, h)) + offset_w = (target_w - w) // 2 + # resize to target ratio. + if offset_w > 0: + rgb = np.pad( + rgb, + ((0, 0), (offset_w, offset_w), (0, 0)), + mode="constant", + constant_values=255, + ) + mask = np.pad( + mask, + ((0, 0), (offset_w, offset_w)), + mode="constant", + constant_values=0, + ) + else: + offset_w = -offset_w + rgb = rgb[:,offset_w:-offset_w,:] + mask = mask[:,offset_w:-offset_w] + + # resize to target ratio. + + rgb = np.pad( + rgb, + ((0, 0), (offset_w, offset_w), (0, 0)), + mode="constant", + constant_values=255, + ) + + mask = np.pad( + mask, + ((0, 0), (offset_w, offset_w)), + mode="constant", + constant_values=0, + ) + + rgb = rgb / 255.0 # normalize to [0, 1] + mask = mask / 255.0 + + mask = (mask > 0.5).astype(np.float32) + rgb = rgb[:, :, :3] * mask[:, :, None] + bg_color * (1 - mask[:, :, None]) + + # resize to specific size require by preprocessor of smplx-estimator. + rgb = resize_image_keepaspect_np(rgb, max_tgt_size) + mask = resize_image_keepaspect_np(mask, max_tgt_size) + + # crop image to enlarge human area. + rgb, mask, offset_x, offset_y = center_crop_according_to_mask( + rgb, mask, aspect_standard, enlarge_ratio + ) + if intr is not None: + intr[0, 2] -= offset_x + intr[1, 2] -= offset_y + + # resize to render_tgt_size for training + + tgt_hw_size, ratio_y, ratio_x = calc_new_tgt_size_by_aspect( + cur_hw=rgb.shape[:2], + aspect_standard=aspect_standard, + tgt_size=render_tgt_size, + multiply=multiply, + ) + + rgb = cv2.resize( + rgb, dsize=(tgt_hw_size[1], tgt_hw_size[0]), interpolation=cv2.INTER_AREA + ) + mask = cv2.resize( + mask, dsize=(tgt_hw_size[1], tgt_hw_size[0]), interpolation=cv2.INTER_AREA + ) + + if intr is not None: + + # ******************** Merge *********************** # + intr = scale_intrs(intr, ratio_x=ratio_x, ratio_y=ratio_y) + assert ( + abs(intr[0, 2] * 2 - rgb.shape[1]) < 2.5 + ), f"{intr[0, 2] * 2}, {rgb.shape[1]}" + assert ( + abs(intr[1, 2] * 2 - rgb.shape[0]) < 2.5 + ), f"{intr[1, 2] * 2}, {rgb.shape[0]}" + + # ******************** Merge *********************** # + intr[0, 2] = rgb.shape[1] // 2 + intr[1, 2] = rgb.shape[0] // 2 + + rgb = torch.from_numpy(rgb).float().permute(2, 0, 1).unsqueeze(0) # [1, 3, H, W] + mask = ( + torch.from_numpy(mask[:, :, None]).float().permute(2, 0, 1).unsqueeze(0) + ) # [1, 1, H, W] + return rgb, mask, intr + + +def parse_configs(): + + parser = argparse.ArgumentParser() + parser.add_argument("--config", type=str) + parser.add_argument("--infer", type=str) + args, unknown = parser.parse_known_args() + + cfg = OmegaConf.create() + cli_cfg = OmegaConf.from_cli(unknown) + + # parse from ENV + if os.environ.get("APP_INFER") is not None: + args.infer = os.environ.get("APP_INFER") + if os.environ.get("APP_MODEL_NAME") is not None: + cli_cfg.model_name = os.environ.get("APP_MODEL_NAME") + + args.config = args.infer if args.config is None else args.config + if args.config is not None: + cfg_train = OmegaConf.load(args.config) + cfg.source_size = cfg_train.dataset.source_image_res + try: + cfg.src_head_size = cfg_train.dataset.src_head_size + except: + cfg.src_head_size = 112 + cfg.render_size = cfg_train.dataset.render_image.high + _relative_path = os.path.join( + cfg_train.experiment.parent, + cfg_train.experiment.child, + os.path.basename(cli_cfg.model_name).split("_")[-1], + ) + + cfg.save_tmp_dump = os.path.join("exps", "save_tmp", _relative_path) + cfg.image_dump = os.path.join("exps", "images", _relative_path) + cfg.video_dump = os.path.join("exps", "videos", _relative_path) # output path + + if args.infer is not None: + cfg_infer = OmegaConf.load(args.infer) + cfg.merge_with(cfg_infer) + cfg.setdefault( + "save_tmp_dump", os.path.join("exps", cli_cfg.model_name, "save_tmp") + ) + cfg.setdefault("image_dump", os.path.join("exps", cli_cfg.model_name, "images")) + cfg.setdefault( + "video_dump", os.path.join("dumps", cli_cfg.model_name, "videos") + ) + cfg.setdefault("mesh_dump", os.path.join("dumps", cli_cfg.model_name, "meshes")) + + cfg.motion_video_read_fps = 6 + cfg.merge_with(cli_cfg) + + cfg.setdefault("logger", "INFO") + + assert cfg.model_name is not None, "model_name is required" + + return cfg, cfg_train + + +@REGISTRY_RUNNERS.register("infer.human_lrm") +class HumanLRMInferrer(Inferrer): + + EXP_TYPE: str = "human_lrm_sapdino_bh_sd3_5" + # EXP_TYPE: str = "human_lrm_sd3" + + def __init__(self): + super().__init__() + + self.cfg, cfg_train = parse_configs() + # print(self.cfg.logger) + configure_logger( + stream_level=self.cfg.logger.stream_level, + log_level=self.cfg.logger.log_level, + ) # logger function + + self.facedetect = FaceDetector( + "./pretrained_models/gagatracker/vgghead/vgg_heads_l.trcd", + device=avaliable_device(), + ) + self.pose_estimator = PoseEstimator( + "./pretrained_models/human_model_files/", device=avaliable_device() + ) + self.parsingnet = SAM2Seg() + + self.model: ModelHumanLRM = self._build_model(self.cfg).to(self.device) + + self.motion_dict = dict() + + def _build_model(self, cfg): + from LHM.models import model_dict + + hf_model_cls = wrap_model_hub(model_dict[self.EXP_TYPE]) + + model = hf_model_cls.from_pretrained(cfg.model_name) + return model + + def _default_source_camera( + self, + dist_to_center: float = 2.0, + batch_size: int = 1, + device: torch.device = torch.device("cpu"), + ): + # return: (N, D_cam_raw) + canonical_camera_extrinsics = torch.tensor( + [ + [ + [1, 0, 0, 0], + [0, 0, -1, -dist_to_center], + [0, 1, 0, 0], + ] + ], + dtype=torch.float32, + device=device, + ) + canonical_camera_intrinsics = create_intrinsics( + f=0.75, + c=0.5, + device=device, + ).unsqueeze(0) + source_camera = build_camera_principle( + canonical_camera_extrinsics, canonical_camera_intrinsics + ) + return source_camera.repeat(batch_size, 1) + + def _default_render_cameras( + self, + n_views: int, + batch_size: int = 1, + device: torch.device = torch.device("cpu"), + ): + # return: (N, M, D_cam_render) + render_camera_extrinsics = surrounding_views_linspace( + n_views=n_views, device=device + ) + render_camera_intrinsics = ( + create_intrinsics( + f=0.75, + c=0.5, + device=device, + ) + .unsqueeze(0) + .repeat(render_camera_extrinsics.shape[0], 1, 1) + ) + render_cameras = build_camera_standard( + render_camera_extrinsics, render_camera_intrinsics + ) + return render_cameras.unsqueeze(0).repeat(batch_size, 1, 1) + + def infer_video( + self, + planes: torch.Tensor, + frame_size: int, + render_size: int, + render_views: int, + render_fps: int, + dump_video_path: str, + ): + N = planes.shape[0] + render_cameras = self._default_render_cameras( + n_views=render_views, batch_size=N, device=self.device + ) + render_anchors = torch.zeros(N, render_cameras.shape[1], 2, device=self.device) + render_resolutions = ( + torch.ones(N, render_cameras.shape[1], 1, device=self.device) * render_size + ) + render_bg_colors = ( + torch.ones( + N, render_cameras.shape[1], 1, device=self.device, dtype=torch.float32 + ) + * 1.0 + ) + + frames = [] + for i in range(0, render_cameras.shape[1], frame_size): + frames.append( + self.model.synthesizer( + planes=planes, + cameras=render_cameras[:, i : i + frame_size], + anchors=render_anchors[:, i : i + frame_size], + resolutions=render_resolutions[:, i : i + frame_size], + bg_colors=render_bg_colors[:, i : i + frame_size], + region_size=render_size, + ) + ) + # merge frames + frames = {k: torch.cat([r[k] for r in frames], dim=1) for k in frames[0].keys()} + # dump + os.makedirs(os.path.dirname(dump_video_path), exist_ok=True) + for k, v in frames.items(): + if k == "images_rgb": + images_to_video( + images=v[0], + output_path=dump_video_path, + fps=render_fps, + gradio_codec=self.cfg.app_enabled, + ) + + def crop_face_image(self, image_path): + rgb = np.array(Image.open(image_path)) + rgb = torch.from_numpy(rgb).permute(2, 0, 1) + bbox = self.facedetect(rgb) + head_rgb = rgb[:, int(bbox[1]) : int(bbox[3]), int(bbox[0]) : int(bbox[2])] + head_rgb = head_rgb.permute(1, 2, 0) + head_rgb = head_rgb.cpu().numpy() + return head_rgb + + @torch.no_grad() + def parsing(self, img_path): + parsing_out = self.parsingnet(img_path=img_path, bbox=None) + + alpha = (parsing_out.masks * 255).astype(np.uint8) + + return alpha + + def infer_single( + self, + image_path: str, + motion_seqs_dir, + motion_img_dir, + motion_video_read_fps, + export_video: bool, + export_mesh: bool, + dump_tmp_dir: str, # require by extracting motion seq from video, to save some results + dump_image_dir: str, + dump_video_path: str, + shape_param=None, + ): + + if os.path.exists(dump_video_path): + return + source_size = self.cfg.source_size + render_size = self.cfg.render_size + # render_views = self.cfg.render_views + render_fps = 30 + # mesh_size = self.cfg.mesh_size + # mesh_thres = self.cfg.mesh_thres + # frame_size = self.cfg.frame_size + # source_cam_dist = self.cfg.source_cam_dist if source_cam_dist is None else source_cam_dist + aspect_standard = 5.0 / 3 + motion_img_need_mask = self.cfg.get("motion_img_need_mask", False) # False + vis_motion = self.cfg.get("vis_motion", False) # False + + parsing_mask = self.parsing(image_path) + + # prepare reference image + image, _, _ = infer_preprocess_image( + image_path, + mask=parsing_mask, + intr=None, + pad_ratio=0, + bg_color=1.0, + max_tgt_size=896, + aspect_standard=aspect_standard, + enlarge_ratio=[1.0, 1.0], + render_tgt_size=source_size, + multiply=14, + need_mask=True, + ) + try: + src_head_rgb = self.crop_face_image(image_path) + except: + print("w/o head input!") + src_head_rgb = np.zeros((112, 112, 3), dtype=np.uint8) + + import cv2 + + try: + src_head_rgb = cv2.resize( + src_head_rgb, + dsize=(self.cfg.src_head_size, self.cfg.src_head_size), + interpolation=cv2.INTER_AREA, + ) # resize to dino size + except: + src_head_rgb = np.zeros( + (self.cfg.src_head_size, self.cfg.src_head_size, 3), dtype=np.uint8 + ) + + src_head_rgb = ( + torch.from_numpy(src_head_rgb / 255.0).float().permute(2, 0, 1).unsqueeze(0) + ) # [1, 3, H, W] + + # save masked image for vis + # save_ref_img_path = os.path.join( + # dump_tmp_dir, "refer_" + os.path.basename(image_path) + # ) + save_ref_img_path = os.path.join( + dump_tmp_dir, "output.png" + ) + vis_ref_img = (image[0].permute(1, 2, 0).cpu().detach().numpy() * 255).astype( + np.uint8 + ) + Image.fromarray(vis_ref_img).save(save_ref_img_path) + + # read motion seq + + motion_name = os.path.dirname( + motion_seqs_dir[:-1] if motion_seqs_dir[-1] == "/" else motion_seqs_dir + ) + motion_name = os.path.basename(motion_name) + + if motion_name in self.motion_dict: + motion_seq = self.motion_dict[motion_name] + else: + motion_seq = prepare_motion_seqs( + motion_seqs_dir, + motion_img_dir, + save_root=dump_tmp_dir, + fps=motion_video_read_fps, + bg_color=1.0, + aspect_standard=aspect_standard, + enlarge_ratio=[1.0, 1, 0], + render_image_res=render_size, + multiply=16, + need_mask=motion_img_need_mask, + vis_motion=vis_motion, + ) + self.motion_dict[motion_name] = motion_seq + + camera_size = len(motion_seq["motion_seqs"]) + + device = "cuda" + dtype = torch.float32 + shape_param = torch.tensor(shape_param, dtype=dtype).unsqueeze(0) + + self.model.to(dtype) + + smplx_params = motion_seq['smplx_params'] + smplx_params['betas'] = shape_param.to(device) + gs_model_list, query_points, transform_mat_neutral_pose = self.model.infer_single_view( + image.unsqueeze(0).to(device, dtype), + src_head_rgb.unsqueeze(0).to(device, dtype), + None, + None, + render_c2ws=motion_seq["render_c2ws"].to(device), + render_intrs=motion_seq["render_intrs"].to(device), + render_bg_colors=motion_seq["render_bg_colors"].to(device), + smplx_params={ + k: v.to(device) for k, v in smplx_params.items() + }, + ) + + start_time = time.time() + batch_dict = dict() + batch_size = 40 # avoid memeory out! + + for batch_i in range(0, camera_size, batch_size): + with torch.no_grad(): + # TODO check device and dtype + # dict_keys(['comp_rgb', 'comp_rgb_bg', 'comp_mask', 'comp_depth', '3dgs']) + + keys = [ + "root_pose", + "body_pose", + "jaw_pose", + "leye_pose", + "reye_pose", + "lhand_pose", + "rhand_pose", + "trans", + "focal", + "princpt", + "img_size_wh", + "expr", + ] + batch_smplx_params = dict() + batch_smplx_params["betas"] = shape_param.to(device) + batch_smplx_params['transform_mat_neutral_pose'] = transform_mat_neutral_pose + for key in keys: + batch_smplx_params[key] = motion_seq["smplx_params"][key][ + :, batch_i : batch_i + batch_size + ].to(device) + + res = self.model.animation_infer(gs_model_list, query_points, batch_smplx_params, + render_c2ws=motion_seq["render_c2ws"][ + :, batch_i : batch_i + batch_size + ].to(device), + render_intrs=motion_seq["render_intrs"][ + :, batch_i : batch_i + batch_size + ].to(device), + render_bg_colors=motion_seq["render_bg_colors"][ + :, batch_i : batch_i + batch_size + ].to(device), + ) + + for accumulate_key in ["comp_rgb", "comp_mask"]: + if accumulate_key not in batch_dict: + batch_dict[accumulate_key] = [] + batch_dict[accumulate_key].append(res[accumulate_key].detach().cpu()) + del res + torch.cuda.empty_cache() + + for accumulate_key in ["comp_rgb", "comp_mask"]: + batch_dict[accumulate_key] = torch.cat(batch_dict[accumulate_key], dim=0) + + print(f"time elapsed: {time.time() - start_time}") + rgb = batch_dict["comp_rgb"].detach().cpu().numpy() # [Nv, H, W, 3], 0-1 + mask = batch_dict["comp_mask"].detach().cpu().numpy() # [Nv, H, W, 3], 0-1 + mask[mask < 0.5] = 0.0 + + rgb = rgb * mask + (1 - mask) * 1 + rgb = np.clip(rgb * 255, 0, 255).astype(np.uint8) + + if vis_motion: + # print(rgb.shape, motion_seq["vis_motion_render"].shape) + + vis_ref_img = np.tile( + cv2.resize(vis_ref_img, (rgb[0].shape[1], rgb[0].shape[0]))[ + None, :, :, : + ], + (rgb.shape[0], 1, 1, 1), + ) + rgb = np.concatenate( + [rgb, motion_seq["vis_motion_render"], vis_ref_img], axis=2 + ) + + os.makedirs(os.path.dirname(dump_video_path), exist_ok=True) + + images_to_video( + rgb, + output_path=dump_video_path, + fps=render_fps, + gradio_codec=False, + verbose=True, + ) + + def infer(self, gradio_demo_image=None, gradio_motion_file=None, gradio_masked_image=None, gradio_video_save_path=None): + image_paths = [] + + if gradio_demo_image is None: + if os.path.isfile(self.cfg.image_input): + omit_prefix = os.path.dirname(self.cfg.image_input) + image_paths.append(self.cfg.image_input) + else: + omit_prefix = self.cfg.image_input + suffixes = (".jpg", ".jpeg", ".png", ".webp", ".JPG") + for root, dirs, files in os.walk(self.cfg.image_input): + for file in files: + if file.endswith(suffixes): + image_paths.append(os.path.join(root, file)) + image_paths.sort() + else: + omit_prefix = os.path.dirname(gradio_demo_image) + image_paths.append(gradio_demo_image) + + # alloc to each DDP worker + image_paths = image_paths[ + self.accelerator.process_index :: self.accelerator.num_processes + ] + + for image_path in tqdm(image_paths, + disable=not self.accelerator.is_local_main_process, + ): + + # prepare dump paths + image_name = os.path.basename(image_path) + uid = image_name.split(".")[0] + subdir_path = os.path.dirname(image_path).replace(omit_prefix, "") + subdir_path = ( + subdir_path[1:] if subdir_path.startswith("/") else subdir_path + ) + print("subdir_path and uid:", subdir_path, uid) + + if gradio_motion_file is None: + # setting config + motion_seqs_dir = self.cfg.motion_seqs_dir + else: + motion_seqs_dir = gradio_motion_file + + motion_name = os.path.dirname( + motion_seqs_dir[:-1] if motion_seqs_dir[-1] == "/" else motion_seqs_dir + ) + + motion_name = os.path.basename(motion_name) + + + if gradio_video_save_path is not None: + dump_video_path = gradio_video_save_path + else: + dump_video_path = os.path.join( + self.cfg.video_dump, + subdir_path, + motion_name, + f"{uid}.mp4", + ) + # print(dump_video_path) + if gradio_masked_image is not None: + dump_image_dir = os.path.dirname(gradio_masked_image) + else: + dump_image_dir = os.path.join( + self.cfg.image_dump, + subdir_path, + ) + # dump_tmp_dir = os.path.join(self.cfg.image_dump, subdir_path, "tmp_res") + os.makedirs(dump_image_dir, exist_ok=True) + # os.makedirs(dump_tmp_dir, exist_ok=True) + print(image_path, motion_seqs_dir, dump_image_dir, dump_video_path) + # try: + shape_pose = self.pose_estimator(image_path) + assert shape_pose.is_full_body, f"The input image is illegal, {shape_pose.msg}" + + self.infer_single( + image_path, + motion_seqs_dir=motion_seqs_dir, + motion_img_dir=None, + motion_video_read_fps=30, + export_video=False, + export_mesh=False, + dump_tmp_dir=dump_image_dir, + dump_image_dir=dump_image_dir, + dump_video_path=dump_video_path, + shape_param=shape_pose.beta, + ) + # if gradio_masked_image is not None: + # os.system("cp {} {}".format()) + # if gradio_video_save_path is not None: + # os.system("cp {} {}".format(dump_video_path, gradio_video_save_path)) + return True + # except: + # print("no find human or not full body!") + # return False + + +@REGISTRY_RUNNERS.register("infer.human_lrm_video") +class HumanLRMVideoInferrer(HumanLRMInferrer): + """video reconstruction for in the wild data""" + + EXP_TYPE: str = "human_lrm_sapdino_bh_sd3_5" + + def infer_single( + self, + image_path: str, + motion_seqs_dir, + motion_img_dir, + motion_video_read_fps, + export_video: bool, + export_mesh: bool, + dump_tmp_dir: str, # require by extracting motion seq from video, to save some results + dump_image_dir: str, + dump_video_path: str, + ): + source_size = self.cfg.source_size + render_size = self.cfg.render_size + # render_views = self.cfg.render_views + render_fps = self.cfg.render_fps + # mesh_size = self.cfg.mesh_size + # mesh_thres = self.cfg.mesh_thres + # frame_size = self.cfg.frame_size + # source_cam_dist = self.cfg.source_cam_dist if source_cam_dist is None else source_cam_dist + aspect_standard = 5.0 / 3 + motion_img_need_mask = self.cfg.get("motion_img_need_mask", False) # False + vis_motion = self.cfg.get("vis_motion", False) # False + + parsing_mask = self.parsing(image_path) + + save_dir = os.path.join(dump_image_dir, "rgb") + if os.path.exists(save_dir): + return + + # prepare reference image + image, _, _ = infer_preprocess_image( + image_path, + mask=parsing_mask, + intr=None, + pad_ratio=0, + bg_color=1.0, + max_tgt_size=896, + aspect_standard=aspect_standard, + enlarge_ratio=[1.0, 1.0], + render_tgt_size=source_size, + multiply=14, + need_mask=True, + ) + src_head_rgb = self.crop_face_image(image_path) + + import cv2 + + try: + src_head_rgb = cv2.resize( + src_head_rgb, + dsize=(self.cfg.src_head_size, self.cfg.src_head_size), + interpolation=cv2.INTER_AREA, + ) # resize to dino size + except: + src_head_rgb = np.zeros( + (self.cfg.src_head_size, self.cfg.src_head_size, 3), dtype=np.uint8 + ) + + src_head_rgb = ( + torch.from_numpy(src_head_rgb / 255.0).float().permute(2, 0, 1).unsqueeze(0) + ) # [1, 3, H, W] + + # save masked image for vis + save_ref_img_path = os.path.join( + dump_tmp_dir, "refer_" + os.path.basename(image_path) + ) + vis_ref_img = (image[0].permute(1, 2, 0).cpu().detach().numpy() * 255).astype( + np.uint8 + ) + + Image.fromarray(vis_ref_img).save(save_ref_img_path) + + # read motion seq + + if not os.path.exists(motion_seqs_dir): + return + + motion_seq = prepare_motion_seqs( + motion_seqs_dir, + os.path.basename(image_path), + save_root=dump_tmp_dir, + fps=motion_video_read_fps, + bg_color=1.0, + aspect_standard=aspect_standard, + enlarge_ratio=[1.0, 1, 0], + render_image_res=render_size, + multiply=16, + need_mask=motion_img_need_mask, + vis_motion=vis_motion, + ) + motion_seqs = motion_seq["motion_seqs"] + + device = "cuda" + dtype = torch.float32 + self.model.to(dtype) + + start_time = time.time() + + with torch.no_grad(): + # TODO check device and dtype + # dict_keys(['comp_rgb', 'comp_rgb_bg', 'comp_mask', 'comp_depth', '3dgs']) + render_intrs = motion_seq["render_intrs"].to(device) + render_intrs[..., 0, 0] *= 2 + render_intrs[..., 1, 1] *= 2 + render_intrs[..., 0, 2] *= 2 + render_intrs[..., 1, 2] *= 2 + # smplx_params["focal"] *= 2 + # smplx_params["princpt"] *= 2 + # smplx_params["img_size_wh"] *= 2 + + res = self.model.infer_single_view( + image.unsqueeze(0).to(device, dtype), + src_head_rgb.unsqueeze(0).to(device, dtype), + None, + None, + render_c2ws=motion_seq["render_c2ws"].to(device), + render_intrs=render_intrs, + render_bg_colors=motion_seq["render_bg_colors"].to(device), + smplx_params={ + k: v.to(device) for k, v in motion_seq["smplx_params"].items() + }, + ) + + print(f"time elapsed: {time.time() - start_time}") + rgb = res["comp_rgb"].detach().cpu().numpy() # [Nv, H, W, 3], 0-1 + mask = res["comp_mask"].detach().cpu().numpy() # [Nv, H, W, 3], 0-1 + # mask[mask > 0.5] = 1.0 + # mask[mask < 0.4] = 0.0 + rgb = rgb * mask + (1 - mask) * 1 + + rgb = np.clip(rgb * 255, 0, 255).astype(np.uint8) + mask = np.clip(mask * 255, 0, 255).astype(np.uint8) + rgba_numpy = np.concatenate([rgb, mask], axis=-1) + + for rgb_i, (rgba, motion_seq) in enumerate(zip(rgba_numpy, motion_seqs)): + + rgb_i = int(os.path.basename(motion_seq).replace(".json", "")) + save_file = os.path.join(dump_image_dir, "rgb", f"{rgb_i:05d}.png") + os.makedirs(os.path.dirname(save_file), exist_ok=True) + Image.fromarray(rgba).save(save_file) + + def infer(self): + + image_paths = [] + + omit_prefix = self.cfg.image_input + suffixes = (".jpg", ".jpeg", ".png", ".webp") + + front_view_dict = dict() + with open(os.path.join(self.cfg.image_input, "front_view.txt"), "r") as f: + for line in f.readlines(): + name, idx = line.strip().split(" ") + idx = int(idx) + front_view_dict[name] = idx + + for root, dirs, files in os.walk(self.cfg.image_input): + for dir in dirs: + if dir in front_view_dict: + idx = front_view_dict[dir] + else: + raise ValueError("no front view") + img_path = os.path.join(root, dir, f"{idx:06d}.png") + if dir in front_view_dict: + print(img_path) + image_paths.append(img_path) + + image_paths.sort() + + # alloc to each DDP worke + image_paths = image_paths[ + self.accelerator.process_index :: self.accelerator.num_processes + ] + + for image_path in tqdm( + image_paths, disable=not self.accelerator.is_local_main_process + ): + + # prepare dump paths + image_name = os.path.basename(image_path) + uid = image_name.split(".")[0] + subdir_path = os.path.dirname(image_path).replace(omit_prefix, "") + subdir_path = ( + subdir_path[1:] if subdir_path.startswith("/") else subdir_path + ) + print("subdir_path and uid:", subdir_path, uid) + + # setting config + motion_seqs_dir = self.cfg.motion_seqs_dir + motion_name = os.path.dirname( + motion_seqs_dir[:-1] if motion_seqs_dir[-1] == "/" else motion_seqs_dir + ) + motion_name = os.path.basename(motion_name) + dump_video_path = os.path.join( + self.cfg.video_dump.replace("videos", "videos_benchmark"), + subdir_path, + motion_name, + f"{uid}.mp4", + ) + dump_image_dir = os.path.join( + self.cfg.image_dump.replace("images", "images_benchmark"), + subdir_path, + ) + + dump_tmp_dir = os.path.join(self.cfg.image_dump, subdir_path, "tmp_res") + os.makedirs(dump_image_dir, exist_ok=True) + os.makedirs(dump_tmp_dir, exist_ok=True) + + item_name = os.path.basename(os.path.dirname(image_path)) + + self.infer_single( + image_path, + motion_seqs_dir=os.path.join(self.cfg.motion_seqs_dir, item_name), + motion_img_dir=self.cfg.motion_img_dir, + motion_video_read_fps=self.cfg.motion_video_read_fps, + export_video=self.cfg.export_video, + export_mesh=self.cfg.export_mesh, + dump_tmp_dir=dump_tmp_dir, + dump_image_dir=dump_image_dir, + dump_video_path=dump_video_path, + ) diff --git a/LHM/runners/infer/utils.py b/LHM/runners/infer/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..85c33b32433a915e57d40b9fc04faf4b39fe3a16 --- /dev/null +++ b/LHM/runners/infer/utils.py @@ -0,0 +1,864 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-10 18:56:08 +# @Function : FUNCTION_DESCRIPTION + +import glob +import json +import math +import os +import pdb +from collections import defaultdict + +import cv2 +import decord +import numpy as np +import torch +from PIL import Image +from pytorch3d.io import save_ply +from pytorch3d.transforms import axis_angle_to_matrix, matrix_to_axis_angle + + +def generate_rotation_matrix_y(degrees): + theta = math.radians(degrees) + cos_theta = math.cos(theta) + sin_theta = math.sin(theta) + + R = [[cos_theta, 0, sin_theta], [0, 1, 0], [-sin_theta, 0, cos_theta]] + + return np.asarray(R, dtype=np.float32) + + +def scale_intrs(intrs, ratio_x, ratio_y): + if len(intrs.shape) >= 3: + intrs[:, 0] = intrs[:, 0] * ratio_x + intrs[:, 1] = intrs[:, 1] * ratio_y + else: + intrs[0] = intrs[0] * ratio_x + intrs[1] = intrs[1] * ratio_y + return intrs + + +def calc_new_tgt_size(cur_hw, tgt_size, multiply): + ratio = tgt_size / min(cur_hw) + tgt_size = int(ratio * cur_hw[0]), int(ratio * cur_hw[1]) + tgt_size = ( + int(tgt_size[0] / multiply) * multiply, + int(tgt_size[1] / multiply) * multiply, + ) + ratio_y, ratio_x = tgt_size[0] / cur_hw[0], tgt_size[1] / cur_hw[1] + return tgt_size, ratio_y, ratio_x + + +def calc_new_tgt_size_by_aspect(cur_hw, aspect_standard, tgt_size, multiply): + assert abs(cur_hw[0] / cur_hw[1] - aspect_standard) < 0.03 + tgt_size = tgt_size * aspect_standard, tgt_size + tgt_size = ( + int(tgt_size[0] / multiply) * multiply, + int(tgt_size[1] / multiply) * multiply, + ) + ratio_y, ratio_x = tgt_size[0] / cur_hw[0], tgt_size[1] / cur_hw[1] + return tgt_size, ratio_y, ratio_x + + +def _load_pose(pose): + intrinsic = torch.eye(4) + intrinsic[0, 0] = pose["focal"][0] + intrinsic[1, 1] = pose["focal"][1] + intrinsic[0, 2] = pose["princpt"][0] + intrinsic[1, 2] = pose["princpt"][1] + intrinsic = intrinsic.float() + + c2w = torch.eye(4) + # c2w[:3, :3] = torch.tensor(pose["R"]) + # c2w[3, :3] = torch.tensor(pose["t"]) + c2w = c2w.float() + + return c2w, intrinsic + + +def img_center_padding(img_np, pad_ratio): + + ori_w, ori_h = img_np.shape[:2] + + w = round((1 + pad_ratio) * ori_w) + h = round((1 + pad_ratio) * ori_h) + + if len(img_np.shape) > 2: + img_pad_np = np.zeros((w, h, img_np.shape[2]), dtype=np.uint8) + else: + img_pad_np = np.zeros((w, h), dtype=np.uint8) + offset_h, offset_w = (w - img_np.shape[0]) // 2, (h - img_np.shape[1]) // 2 + img_pad_np[ + offset_h : offset_h + img_np.shape[0] :, offset_w : offset_w + img_np.shape[1] + ] = img_np + + return img_pad_np + + +def resize_image_keepaspect_np(img, max_tgt_size): + """ + similar to ImageOps.contain(img_pil, (img_size, img_size)) # keep the same aspect ratio + """ + h, w = img.shape[:2] + ratio = max_tgt_size / max(h, w) + new_h, new_w = round(h * ratio), round(w * ratio) + return cv2.resize(img, dsize=(new_w, new_h), interpolation=cv2.INTER_AREA) + + +def center_crop_according_to_mask(img, mask, aspect_standard, enlarge_ratio): + """ + img: [H, W, 3] + mask: [H, W] + """ + ys, xs = np.where(mask > 0) + + if len(xs) == 0 or len(ys) == 0: + raise Exception("empty mask") + + x_min = np.min(xs) + x_max = np.max(xs) + y_min = np.min(ys) + y_max = np.max(ys) + + center_x, center_y = img.shape[1] // 2, img.shape[0] // 2 + + half_w = max(abs(center_x - x_min), abs(center_x - x_max)) + half_h = max(abs(center_y - y_min), abs(center_y - y_max)) + half_w_raw = half_w + half_h_raw = half_h + aspect = half_h / half_w + + if aspect >= aspect_standard: + half_w = round(half_h / aspect_standard) + else: + half_h = round(half_w * aspect_standard) + + # not exceed original image + if half_h > center_y: + half_w = round(half_h_raw / aspect_standard) + half_h = half_h_raw + if half_w > center_x: + half_h = round(half_w_raw * aspect_standard) + half_w = half_w_raw + + if abs(enlarge_ratio[0] - 1) > 0.01 or abs(enlarge_ratio[1] - 1) > 0.01: + enlarge_ratio_min, enlarge_ratio_max = enlarge_ratio + enlarge_ratio_max_real = min(center_y / half_h, center_x / half_w) + enlarge_ratio_max = min(enlarge_ratio_max_real, enlarge_ratio_max) + enlarge_ratio_min = min(enlarge_ratio_max_real, enlarge_ratio_min) + enlarge_ratio_cur = ( + np.random.rand() * (enlarge_ratio_max - enlarge_ratio_min) + + enlarge_ratio_min + ) + half_h, half_w = round(enlarge_ratio_cur * half_h), round( + enlarge_ratio_cur * half_w + ) + + assert half_h <= center_y + assert half_w <= center_x + assert abs(half_h / half_w - aspect_standard) < 0.03 + + offset_x = center_x - half_w + offset_y = center_y - half_h + + new_img = img[offset_y : offset_y + 2 * half_h, offset_x : offset_x + 2 * half_w] + new_mask = mask[offset_y : offset_y + 2 * half_h, offset_x : offset_x + 2 * half_w] + + return new_img, new_mask, offset_x, offset_y + + +def preprocess_image( + rgb_path, + mask_path, + intr, + pad_ratio, + bg_color, + max_tgt_size, + aspect_standard, + enlarge_ratio, + render_tgt_size, + multiply, + need_mask=True, +): + """inferece + image, _, _ = preprocess_image(image_path, mask_path=None, intr=None, pad_ratio=0, bg_color=1.0, + max_tgt_size=896, aspect_standard=aspect_standard, enlarge_ratio=[1.0, 1.0], + render_tgt_size=source_size, multiply=14, need_mask=True) + + """ + + rgb = np.array(Image.open(rgb_path)) + rgb_raw = rgb.copy() + if pad_ratio > 0: + rgb = img_center_padding(rgb, pad_ratio) + + rgb = rgb / 255.0 # normalize to [0, 1] + if need_mask: + if rgb.shape[2] < 4: + if mask_path is not None: + mask = np.array(Image.open(mask_path)) + else: + from rembg import remove + + # rembg cuda version -> error + mask = remove(rgb_raw[:, :, (2, 1, 0)])[:, :, -1] # np require [bgr] + print("rmbg mask: ", mask.min(), mask.max(), mask.shape) + if pad_ratio > 0: + mask = img_center_padding(mask, pad_ratio) + mask = mask / 255.0 + else: + # rgb: [H, W, 4] + assert rgb.shape[2] == 4 + mask = rgb[:, :, 3] # [H, W] + else: + # just placeholder + mask = np.ones_like(rgb[:, :, 0]) + + mask = (mask > 0.5).astype(np.float32) + rgb = rgb[:, :, :3] * mask[:, :, None] + bg_color * (1 - mask[:, :, None]) + + # resize to specific size require by preprocessor of smplx-estimator. + rgb = resize_image_keepaspect_np(rgb, max_tgt_size) + mask = resize_image_keepaspect_np(mask, max_tgt_size) + + # crop image to enlarge human area. + rgb, mask, offset_x, offset_y = center_crop_according_to_mask( + rgb, mask, aspect_standard, enlarge_ratio + ) + if intr is not None: + intr[0, 2] -= offset_x + intr[1, 2] -= offset_y + + # resize to render_tgt_size for training + + tgt_hw_size, ratio_y, ratio_x = calc_new_tgt_size_by_aspect( + cur_hw=rgb.shape[:2], + aspect_standard=aspect_standard, + tgt_size=render_tgt_size, + multiply=multiply, + ) + + rgb = cv2.resize( + rgb, dsize=(tgt_hw_size[1], tgt_hw_size[0]), interpolation=cv2.INTER_AREA + ) + mask = cv2.resize( + mask, dsize=(tgt_hw_size[1], tgt_hw_size[0]), interpolation=cv2.INTER_AREA + ) + + if intr is not None: + + # ******************** Merge *********************** # + intr = scale_intrs(intr, ratio_x=ratio_x, ratio_y=ratio_y) + assert ( + abs(intr[0, 2] * 2 - rgb.shape[1]) < 2.5 + ), f"{intr[0, 2] * 2}, {rgb.shape[1]}" + assert ( + abs(intr[1, 2] * 2 - rgb.shape[0]) < 2.5 + ), f"{intr[1, 2] * 2}, {rgb.shape[0]}" + + # ******************** Merge *********************** # + intr[0, 2] = rgb.shape[1] // 2 + intr[1, 2] = rgb.shape[0] // 2 + + rgb = torch.from_numpy(rgb).float().permute(2, 0, 1).unsqueeze(0) # [1, 3, H, W] + mask = ( + torch.from_numpy(mask[:, :, None]).float().permute(2, 0, 1).unsqueeze(0) + ) # [1, 1, H, W] + return rgb, mask, intr + + +def extract_imgs_from_video(video_file, save_root, fps): + print(f"extract_imgs_from_video:{video_file}") + vr = decord.VideoReader(video_file) + for i in range(0, len(vr), fps): + frame = vr[i].asnumpy() + save_path = os.path.join(save_root, f"{i:05d}.jpg") + cv2.imwrite(save_path, frame[:, :, (2, 1, 0)]) + + +def predict_motion_seqs_from_images(image_folder: str, save_root, fps=6): + id_name = os.path.splitext(os.path.basename(image_folder))[0] + if os.path.isfile(image_folder) and ( + image_folder.endswith("mp4") or image_folder.endswith("move") + ): + save_frame_root = os.path.join(save_root, "extracted_frames", id_name) + if not os.path.exists(save_frame_root): + os.makedirs(save_frame_root, exist_ok=True) + extract_imgs_from_video( + video_file=image_folder, save_root=save_frame_root, fps=fps + ) + else: + print("skip extract_imgs_from_video......") + image_folder = save_frame_root + + image_folder_abspath = os.path.abspath(image_folder) + print(f"predict motion seq:{image_folder_abspath}") + save_smplx_root = image_folder + "_smplx_params_mhmr" + if not os.path.exists(save_smplx_root): + cmd = f"cd thirdparty/multi-hmr && python infer_batch.py --data_root {image_folder_abspath} --out_folder {image_folder_abspath} --crop_head --crop_hand --pad_ratio 0.2 --smplify" + os.system(cmd) + else: + print("skip predict smplx.........") + return save_smplx_root, image_folder + + +def render_smplx_mesh( + smplx_params, render_intrs, human_model_path="./pretrained_models/human_model_files" +): + from LHM.models.rendering.smplx import smplx + from LHM.models.rendering.smplx.vis_utils import render_mesh + + layer_arg = { + "create_global_orient": False, + "create_body_pose": False, + "create_left_hand_pose": False, + "create_right_hand_pose": False, + "create_jaw_pose": False, + "create_leye_pose": False, + "create_reye_pose": False, + "create_betas": False, + "create_expression": False, + "create_transl": False, + } + + smplx_layer = smplx.create( + human_model_path, + "smplx", + gender="neutral", + num_betas=10, + num_expression_coeffs=100, + use_pca=False, + use_face_contour=False, + flat_hand_mean=True, + **layer_arg, + ) + + body_pose = smplx_params["body_pose"] + num_view = body_pose.shape[0] + shape_param = smplx_params["betas"] + if "expr" not in smplx_params: + # supports v2.0 data format + smplx_params["expr"] = torch.zeros((num_view, 100)) + + output = smplx_layer( + global_orient=smplx_params["root_pose"], + body_pose=smplx_params["body_pose"].view(num_view, -1), + left_hand_pose=smplx_params["lhand_pose"].view(num_view, -1), + right_hand_pose=smplx_params["rhand_pose"].view(num_view, -1), + jaw_pose=smplx_params["jaw_pose"], + leye_pose=smplx_params["leye_pose"], + reye_pose=smplx_params["reye_pose"], + expression=smplx_params["expr"], + betas=smplx_params["betas"].unsqueeze(0).repeat(num_view, 1), # 10 blendshape + transl=smplx_params["trans"], + face_offset=None, + joint_offset=None, + ) + + smplx_face = smplx_layer.faces.astype(np.int64) + mesh_render_list = [] + for v_idx in range(num_view): + intr = render_intrs[v_idx] + cam_param = { + "focal": torch.tensor([intr[0, 0], intr[1, 1]]), + "princpt": torch.tensor([intr[0, 2], intr[1, 2]]), + } + render_shape = int(cam_param["princpt"][1] * 2), int( + cam_param["princpt"][0] * 2 + ) # require h, w + mesh_render, is_bkg = render_mesh( + output.vertices[v_idx], + smplx_face, + cam_param, + np.ones((render_shape[0], render_shape[1], 3), dtype=np.float32) * 255, + return_bg_mask=True, + ) + mesh_render = mesh_render.astype(np.uint8) + mesh_render_list.append(mesh_render) + mesh_render = np.stack(mesh_render_list) + return mesh_render + + +def prepare_motion_seqs( + motion_seqs_dir, + image_folder, + save_root, + fps, + bg_color, + aspect_standard, + enlarge_ratio, + render_image_res, + need_mask, + multiply=16, + vis_motion=False, + motion_size=500, # only support 12s videos +): + """ + Prepare motion sequences for rendering. + + Args: + motion_seqs_dir (str): Directory path of motion sequences. + image_folder (str): Directory path of source images. + save_root (str): Directory path to save the motion sequences. + fps (int): Frames per second for the motion sequences. + bg_color (tuple): Background color in RGB format. + aspect_standard (float): Standard human aspect ratio (height/width). + enlarge_ratio (float): Ratio to enlarge the source images. + render_image_res (int): Resolution of the rendered images. + need_mask (bool): Flag indicating whether masks are needed. + multiply (int, optional): Multiply factor for image size. Defaults to 16. + vis_motion (bool, optional): Flag indicating whether to visualize motion. Defaults to False. + + Returns: + dict: Dictionary containing the prepared motion sequences. + 'render_c2ws': camera to world matrix [B F 4 4] + 'render_intrs': intrins matrix [B F 4 4] + 'render_bg_colors' bg_colors [B F 3] + 'smplx_params': smplx_params -> ['betas', + 'root_pose', 'body_pose', + 'jaw_pose', 'leye_pose', + 'reye_pose', 'lhand_pose', + 'rhand_pose', + 'trans', 'expr', 'focal', + 'princpt', + 'img_size_wh' + ] + 'rgbs': imgs w.r.t motions + 'vis_motion_render': rendering smplx motion + + Raises: + AssertionError: If motion_seqs_dir is None and image_folder is None. + + """ + + if motion_seqs_dir is None: + assert image_folder is not None + motion_seqs_dir, image_folder = predict_motion_seqs_from_images( + image_folder, save_root, fps + ) + + motion_seqs = sorted(glob.glob(os.path.join(motion_seqs_dir, "*.json"))) + motion_seqs = motion_seqs[:motion_size] + + # source images + c2ws, intrs, rgbs, bg_colors, masks = [], [], [], [], [] + smplx_params = [] + shape_param = None + + for idx, smplx_path in enumerate(motion_seqs): + + if image_folder is not None: + file_name = os.path.splitext(os.path.basename(smplx_path))[0] + frame_path = os.path.join(image_folder, file_name + ".png") + if not os.path.exists(frame_path): + frame_path = os.path.join(image_folder, file_name + ".jpg") + with open(smplx_path) as f: + smplx_raw_data = json.load(f) + smplx_param = { + k: torch.FloatTensor(v) + for k, v in smplx_raw_data.items() + if "pad_ratio" not in k + } + + if idx == 0: + shape_param = smplx_param["betas"] + + c2w, intrinsic = _load_pose(smplx_param) + intrinsic_raw = intrinsic.clone() + if "expr" not in smplx_raw_data: + # supports v2.0 data format + max_tgt_size = int(max(smplx_param["img_size_wh"])) + else: + max_tgt_size = int(smplx_param["img_size_wh"][0]) + smplx_param.pop("expr") + + flame_path = smplx_path.replace("smplx_params", "flame_params") + smplx_param["expr"] = torch.FloatTensor([0.0] * 100) + + smplx_param["expr"] = torch.FloatTensor([0.0] * 100) + + c2ws.append(c2w) + bg_colors.append(bg_color) + intrs.append(intrinsic) + # intrs.append(intrinsic_raw) + smplx_params.append(smplx_param) + + c2ws = torch.stack(c2ws, dim=0) # [N, 4, 4] + intrs = torch.stack(intrs, dim=0) # [N, 4, 4] + bg_colors = ( + torch.tensor(bg_colors, dtype=torch.float32).unsqueeze(-1).repeat(1, 3) + ) # [N, 3] + + if len(rgbs) > 0: + rgbs = torch.cat(rgbs, dim=0) # [N, 3, H, W] + # masks = torch.cat(masks, dim=0) # [N, 1, H, W] + + smplx_params_tmp = defaultdict(list) + for smplx in smplx_params: + for k, v in smplx.items(): + smplx_params_tmp[k].append(v) + for k, v in smplx_params_tmp.items(): + smplx_params_tmp[k] = torch.stack(v) # [Nv, xx, xx] + smplx_params = smplx_params_tmp + # TODO check different betas for same person + smplx_params["betas"] = shape_param + + if vis_motion: + motion_render = render_smplx_mesh(smplx_params, intrs) + else: + motion_render = None + + # add batch dim + for k, v in smplx_params.items(): + smplx_params[k] = v.unsqueeze(0) + # print(k, smplx_params[k].shape, "motion_seq") + c2ws = c2ws.unsqueeze(0) + intrs = intrs.unsqueeze(0) + bg_colors = bg_colors.unsqueeze(0) + if len(rgbs) > 0: + rgbs = rgbs.unsqueeze(0) + # print(f"c2ws:{c2ws.shape}, intrs:{intrs.shape}, rgbs:{rgbs.shape if len(rgbs) > 0 else None}") + + motion_seqs_ret = {} + motion_seqs_ret["render_c2ws"] = c2ws + motion_seqs_ret["render_intrs"] = intrs + motion_seqs_ret["render_bg_colors"] = bg_colors + motion_seqs_ret["smplx_params"] = smplx_params + motion_seqs_ret["rgbs"] = rgbs + motion_seqs_ret["vis_motion_render"] = motion_render + motion_seqs_ret["motion_seqs"] = motion_seqs + + return motion_seqs_ret + + +def prepare_motion_single( + motion_seqs_dir, + image_name, + save_root, + fps, + bg_color, + aspect_standard, + enlarge_ratio, + render_image_res, + need_mask, + multiply=16, + vis_motion=False, +): + """ + Prepare motion sequences for rendering. + + Args: + motion_seqs_dir (str): Directory path of motion sequences. + image_folder (str): Directory path of source images. + save_root (str): Directory path to save the motion sequences. + fps (int): Frames per second for the motion sequences. + bg_color (tuple): Background color in RGB format. + aspect_standard (float): Standard human aspect ratio (height/width). + enlarge_ratio (float): Ratio to enlarge the source images. + render_image_res (int): Resolution of the rendered images. + need_mask (bool): Flag indicating whether masks are needed. + multiply (int, optional): Multiply factor for image size. Defaults to 16. + vis_motion (bool, optional): Flag indicating whether to visualize motion. Defaults to False. + + Returns: + dict: Dictionary containing the prepared motion sequences. + 'render_c2ws': camera to world matrix [B F 4 4] + 'render_intrs': intrins matrix [B F 4 4] + 'render_bg_colors' bg_colors [B F 3] + 'smplx_params': smplx_params -> ['betas', + 'root_pose', 'body_pose', + 'jaw_pose', 'leye_pose', + 'reye_pose', 'lhand_pose', + 'rhand_pose', + 'trans', 'expr', 'focal', + 'princpt', + 'img_size_wh' + ] + 'rgbs': imgs w.r.t motions + 'vis_motion_render': rendering smplx motion + + Raises: + AssertionError: If motion_seqs_dir is None and image_folder is None. + + """ + + motion_seqs = [ + os.path.join( + motion_seqs_dir, + image_name.replace(".jpg", ".png").replace(".png", ".json"), + ) + for _ in range(4) + ] + axis_list = [0, 60, 180, 240] + + # source images + c2ws, intrs, rgbs, bg_colors, masks = [], [], [], [], [] + smplx_params = [] + shape_param = None + + for idx, smplx_path in enumerate(motion_seqs): + + with open(smplx_path) as f: + smplx_raw_data = json.load(f) + smplx_param = { + k: torch.FloatTensor(v) + for k, v in smplx_raw_data.items() + if "pad_ratio" not in k + } + + if idx == 0: + shape_param = smplx_param["betas"] + + c2w, intrinsic = _load_pose(smplx_param) + intrinsic_raw = intrinsic.clone() + if "expr" not in smplx_raw_data: + # supports v2.0 data format + max_tgt_size = int(max(smplx_param["img_size_wh"])) + else: + max_tgt_size = int(smplx_param["img_size_wh"][0]) + smplx_param.pop("expr") + + flame_path = smplx_path.replace("smplx_params", "flame_params") + if os.path.exists(flame_path): + with open(flame_path) as f: + flame_param = json.load(f) + smplx_param["expr"] = torch.FloatTensor(flame_param["expcode"]) + + # replace with flame's jaw_pose + smplx_param["jaw_pose"] = torch.FloatTensor(flame_param["posecode"][3:]) + smplx_param["leye_pose"] = torch.FloatTensor(flame_param["eyecode"][:3]) + smplx_param["reye_pose"] = torch.FloatTensor(flame_param["eyecode"][3:]) + + else: + smplx_param["expr"] = torch.FloatTensor([0.0] * 100) + + root_rotate_matrix = axis_angle_to_matrix(smplx_param["root_pose"]) + rotate = generate_rotation_matrix_y(axis_list[idx]) + rotate = torch.from_numpy(rotate).float() + rotate = rotate @ root_rotate_matrix + new_rotate_axis = matrix_to_axis_angle(rotate) + + smplx_param["root_pose"] = new_rotate_axis + + c2ws.append(c2w) + bg_colors.append(bg_color) + intrs.append(intrinsic) + # intrs.append(intrinsic_raw) + smplx_params.append(smplx_param) + + c2ws = torch.stack(c2ws, dim=0) # [N, 4, 4] + intrs = torch.stack(intrs, dim=0) # [N, 4, 4] + bg_colors = ( + torch.tensor(bg_colors, dtype=torch.float32).unsqueeze(-1).repeat(1, 3) + ) # [N, 3] + + if len(rgbs) > 0: + rgbs = torch.cat(rgbs, dim=0) # [N, 3, H, W] + # masks = torch.cat(masks, dim=0) # [N, 1, H, W] + + smplx_params_tmp = defaultdict(list) + for smplx in smplx_params: + for k, v in smplx.items(): + smplx_params_tmp[k].append(v) + for k, v in smplx_params_tmp.items(): + smplx_params_tmp[k] = torch.stack(v) # [Nv, xx, xx] + smplx_params = smplx_params_tmp + # TODO check different betas for same person + smplx_params["betas"] = shape_param + + if vis_motion: + motion_render = render_smplx_mesh(smplx_params, intrs) + else: + motion_render = None + + # add batch dim + for k, v in smplx_params.items(): + smplx_params[k] = v.unsqueeze(0) + # print(k, smplx_params[k].shape, "motion_seq") + c2ws = c2ws.unsqueeze(0) + intrs = intrs.unsqueeze(0) + bg_colors = bg_colors.unsqueeze(0) + if len(rgbs) > 0: + rgbs = rgbs.unsqueeze(0) + # print(f"c2ws:{c2ws.shape}, intrs:{intrs.shape}, rgbs:{rgbs.shape if len(rgbs) > 0 else None}") + + motion_seqs = {} + motion_seqs["render_c2ws"] = c2ws + motion_seqs["render_intrs"] = intrs + motion_seqs["render_bg_colors"] = bg_colors + motion_seqs["smplx_params"] = smplx_params + motion_seqs["rgbs"] = rgbs + motion_seqs["vis_motion_render"] = motion_render + + return motion_seqs + + +def prepare_motion_lrmbench( + motion_seqs_dir, + image_name, + save_root, + fps, + bg_color, + aspect_standard, + enlarge_ratio, + render_image_res, + need_mask, + multiply=16, + vis_motion=False, +): + """ + Prepare motion sequences for rendering. + + Args: + motion_seqs_dir (str): Directory path of motion sequences. + image_folder (str): Directory path of source images. + save_root (str): Directory path to save the motion sequences. + fps (int): Frames per second for the motion sequences. + bg_color (tuple): Background color in RGB format. + aspect_standard (float): Standard human aspect ratio (height/width). + enlarge_ratio (float): Ratio to enlarge the source images. + render_image_res (int): Resolution of the rendered images. + need_mask (bool): Flag indicating whether masks are needed. + multiply (int, optional): Multiply factor for image size. Defaults to 16. + vis_motion (bool, optional): Flag indicating whether to visualize motion. Defaults to False. + + Returns: + dict: Dictionary containing the prepared motion sequences. + 'render_c2ws': camera to world matrix [B F 4 4] + 'render_intrs': intrins matrix [B F 4 4] + 'render_bg_colors' bg_colors [B F 3] + 'smplx_params': smplx_params -> ['betas', + 'root_pose', 'body_pose', + 'jaw_pose', 'leye_pose', + 'reye_pose', 'lhand_pose', + 'rhand_pose', + 'trans', 'expr', 'focal', + 'princpt', + 'img_size_wh' + ] + 'rgbs': imgs w.r.t motions + 'vis_motion_render': rendering smplx motion + + Raises: + AssertionError: If motion_seqs_dir is None and image_folder is None. + + """ + import json + + axis_list = list(range(0, 360, 30)) + + motion_seqs = [ + os.path.join( + motion_seqs_dir, + image_name.replace(".jpg", ".png") + .replace(".jpeg", ".png") + .replace(".PNG", ".png") + .replace(".png", ".json"), + ) + for _ in range(len(axis_list)) + ] + + # source images + c2ws, intrs, rgbs, bg_colors, masks = [], [], [], [], [] + smplx_params = [] + shape_param = None + + for idx, smplx_path in enumerate(motion_seqs): + + with open(smplx_path) as f: + smplx_raw_data = json.load(f) + smplx_param = { + k: torch.FloatTensor(v) + for k, v in smplx_raw_data.items() + if "pad_ratio" not in k + } + + if idx == 0: + shape_param = smplx_param["betas"] + + c2w, intrinsic = _load_pose(smplx_param) + intrinsic_raw = intrinsic.clone() + if "expr" not in smplx_raw_data: + # supports v2.0 data format + max_tgt_size = int(max(smplx_param["img_size_wh"])) + else: + max_tgt_size = int(smplx_param["img_size_wh"][0]) + smplx_param.pop("expr") + + flame_path = smplx_path.replace("smplx_params", "flame_params") + if os.path.exists(flame_path): + with open(flame_path) as f: + flame_param = json.load(f) + smplx_param["expr"] = torch.FloatTensor(flame_param["expcode"]) + + # replace with flame's jaw_pose + smplx_param["jaw_pose"] = torch.FloatTensor(flame_param["posecode"][3:]) + smplx_param["leye_pose"] = torch.FloatTensor(flame_param["eyecode"][:3]) + smplx_param["reye_pose"] = torch.FloatTensor(flame_param["eyecode"][3:]) + + else: + smplx_param["expr"] = torch.FloatTensor([0.0] * 100) + + smplx_param["expr"] = torch.FloatTensor([0.0] * 100) + + root_rotate_matrix = axis_angle_to_matrix(smplx_param["root_pose"]) + rotate = generate_rotation_matrix_y(axis_list[idx]) + rotate = torch.from_numpy(rotate).float() + rotate = rotate @ root_rotate_matrix + new_rotate_axis = matrix_to_axis_angle(rotate) + + smplx_param["root_pose"] = new_rotate_axis + + c2ws.append(c2w) + bg_colors.append(bg_color) + intrs.append(intrinsic) + # intrs.append(intrinsic_raw) + smplx_params.append(smplx_param) + + c2ws = torch.stack(c2ws, dim=0) # [N, 4, 4] + intrs = torch.stack(intrs, dim=0) # [N, 4, 4] + bg_colors = ( + torch.tensor(bg_colors, dtype=torch.float32).unsqueeze(-1).repeat(1, 3) + ) # [N, 3] + + if len(rgbs) > 0: + rgbs = torch.cat(rgbs, dim=0) # [N, 3, H, W] + # masks = torch.cat(masks, dim=0) # [N, 1, H, W] + + smplx_params_tmp = defaultdict(list) + for smplx in smplx_params: + for k, v in smplx.items(): + smplx_params_tmp[k].append(v) + for k, v in smplx_params_tmp.items(): + smplx_params_tmp[k] = torch.stack(v) # [Nv, xx, xx] + smplx_params = smplx_params_tmp + # TODO check different betas for same person + smplx_params["betas"] = shape_param + + if vis_motion: + motion_render = render_smplx_mesh(smplx_params, intrs) + else: + motion_render = None + + # add batch dim + for k, v in smplx_params.items(): + smplx_params[k] = v.unsqueeze(0) + # print(k, smplx_params[k].shape, "motion_seq") + c2ws = c2ws.unsqueeze(0) + intrs = intrs.unsqueeze(0) + bg_colors = bg_colors.unsqueeze(0) + if len(rgbs) > 0: + rgbs = rgbs.unsqueeze(0) + + motion_seqs = {} + motion_seqs["render_c2ws"] = c2ws + motion_seqs["render_intrs"] = intrs + motion_seqs["render_bg_colors"] = bg_colors + motion_seqs["smplx_params"] = smplx_params + motion_seqs["rgbs"] = rgbs + motion_seqs["vis_motion_render"] = motion_render + + return motion_seqs diff --git a/LHM/utils/__init__.py b/LHM/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7a1e39e624fbf5d970acc4b05714f8b9f70830c6 --- /dev/null +++ b/LHM/utils/__init__.py @@ -0,0 +1,15 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Empty diff --git a/LHM/utils/__pycache__/__init__.cpython-310.pyc b/LHM/utils/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8a7f7ac9bcb0ae0daf696271addc6128d40a1ac4 Binary files /dev/null and b/LHM/utils/__pycache__/__init__.cpython-310.pyc differ diff --git a/LHM/utils/__pycache__/face_detector.cpython-310.pyc b/LHM/utils/__pycache__/face_detector.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..31b392a23b51a74593a15010077f384deb9e0ae4 Binary files /dev/null and b/LHM/utils/__pycache__/face_detector.cpython-310.pyc differ diff --git a/LHM/utils/__pycache__/hf_hub.cpython-310.pyc b/LHM/utils/__pycache__/hf_hub.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8de585c0a864c2545683fc4c92320390fbdd66c4 Binary files /dev/null and b/LHM/utils/__pycache__/hf_hub.cpython-310.pyc differ diff --git a/LHM/utils/__pycache__/logging.cpython-310.pyc b/LHM/utils/__pycache__/logging.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b3f6b48a08a989a038fe23b25b5b26434496f602 Binary files /dev/null and b/LHM/utils/__pycache__/logging.cpython-310.pyc differ diff --git a/LHM/utils/__pycache__/registry.cpython-310.pyc b/LHM/utils/__pycache__/registry.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1472df0de26ccc65d2493b8ddcd30dc303eb75c4 Binary files /dev/null and b/LHM/utils/__pycache__/registry.cpython-310.pyc differ diff --git a/LHM/utils/__pycache__/video.cpython-310.pyc b/LHM/utils/__pycache__/video.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fa836bafba7d3f60184554870f787ec6930bc832 Binary files /dev/null and b/LHM/utils/__pycache__/video.cpython-310.pyc differ diff --git a/LHM/utils/bbox.py b/LHM/utils/bbox.py new file mode 100644 index 0000000000000000000000000000000000000000..999efb902fb5d360ef6d146f5a63f2a7bea00ddf --- /dev/null +++ b/LHM/utils/bbox.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2024-08-30 20:50:27 +# @Function : The class defines Bbox + +import copy + +import cv2 +import numpy as np +import torch + + +class Bbox: + def __init__(self, box, mode="whwh"): + + assert len(box) == 4 + assert mode in ["whwh", "xywh"] + self.box = box + self.mode = mode + + def to_xywh(self): + + if self.mode == "whwh": + + l, t, r, b = self.box + + center_x = (l + r) / 2 + center_y = (t + b) / 2 + width = r - l + height = b - t + return Bbox([center_x, center_y, width, height], mode="xywh") + else: + return self + + def to_whwh(self): + + if self.mode == "whwh": + return self + else: + + cx, cy, w, h = self.box + l = cx - w // 2 + t = cy - h // 2 + r = cx + w - (w // 2) + b = cy + h - (h // 2) + + return Bbox([l, t, r, b], mode="whwh") + + def area(self): + + box = self.to_xywh() + _, __, w, h = box.box + + return w * h + + def offset(self, offset_w, offset_h): + + assert self.mode == "whwh" + + self.box[0] += offset_w + self.box[1] += offset_h + self.box[2] += offset_w + self.box[3] += offset_h + + def get_box(self): + + return list(map(int, self.box)) + + def to_xywh_ratio(self, ori_w, ori_h): + + cx, cy, w, h = self.to_xywh().get_box() + cx = cx / ori_w + cy = cy / ori_h + w = w / ori_w + h = h / ori_h + + return cx, cy, w, h + + def scale_bbox(self, ori_w, ori_h, new_w, new_h): + """scale bbox as image scale""" + + assert self.mode == "whwh" + + cx, cy, w, h = self.to_xywh_ratio(ori_w, ori_h) + + cx = cx * new_w + cy = cy * new_h + w = w * new_w + h = h * new_h + + l = cx - w // 2 + t = cy - h // 2 + r = cx + w - (w // 2) + b = cy + h - (h // 2) + + return Bbox([l, t, r, b], mode="whwh") + + def scale(self, scale, width, height): + """scale bbox with scale factor""" + new_box = self.to_xywh() + cx, cy, w, h = new_box.get_box() + w = w * scale + h = h * scale + + l = cx - w // 2 + t = cy - h // 2 + r = cx + w - (w // 2) + b = cy + h - (h // 2) + + l = int(max(l, 0)) + t = int(max(t, 0)) + r = int(min(r, width)) + b = int(min(b, height)) + + return Bbox([l, t, r, b], mode="whwh") + + def __repr__(self): + box = self.to_whwh() + l, t, r, b = box.box + + return f"BBox(left={l}, top={t}, right={r}, bottom={b})" diff --git a/LHM/utils/compile.py b/LHM/utils/compile.py new file mode 100644 index 0000000000000000000000000000000000000000..08972a23daf1c046c327ce93fc667b706a3ec65b --- /dev/null +++ b/LHM/utils/compile.py @@ -0,0 +1,35 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from accelerate.logging import get_logger + + +logger = get_logger(__name__) + + +def configure_dynamo(config: dict): + try: + import torch._dynamo + logger.debug(f'Configuring torch._dynamo.config with {config}') + for k, v in config.items(): + if v is None: + logger.debug(f'Skipping torch._dynamo.config.{k} with None') + continue + if hasattr(torch._dynamo.config, k): + logger.warning(f'Overriding torch._dynamo.config.{k} from {getattr(torch._dynamo.config, k)} to {v}') + setattr(torch._dynamo.config, k, v) + except ImportError: + logger.debug('torch._dynamo not found, skipping') + pass diff --git a/LHM/utils/face_detector.py b/LHM/utils/face_detector.py new file mode 100644 index 0000000000000000000000000000000000000000..11e391c26e90c9ee000c6ce736e36c4725e0b513 --- /dev/null +++ b/LHM/utils/face_detector.py @@ -0,0 +1,183 @@ +#!/usr/bin/env python +# Copyright (c) Xuangeng Chu (xg.chu@outlook.com) +# Modified based on code from Orest Kupyn (University of Oxford). + +import os +import pdb +import sys + +import numpy as np +import torch +import torchvision + +sys.path.append("./") + + +def expand_bbox(bbox, scale=1.1): + xmin, ymin, xmax, ymax = bbox.unbind(dim=-1) + cenx, ceny = (xmin + xmax) / 2, (ymin + ymax) / 2 + # ceny = ceny - (ymax - ymin) * 0.05 + extend_size = torch.sqrt((ymax - ymin) * (xmax - xmin)) * scale + xmine, xmaxe = cenx - extend_size / 2, cenx + extend_size / 2 + ymine, ymaxe = ceny - extend_size / 2, ceny + extend_size / 2 + expanded_bbox = torch.stack([xmine, ymine, xmaxe, ymaxe], dim=-1) + return torch.stack([xmine, ymine, xmaxe, ymaxe], dim=-1) + + +def nms( + boxes_xyxy, + scores, + flame_params, + confidence_threshold: float = 0.5, + iou_threshold: float = 0.5, + top_k: int = 1000, + keep_top_k: int = 100, +): + for pred_bboxes_xyxy, pred_bboxes_conf, pred_flame_params in zip( + boxes_xyxy.detach().float(), + scores.detach().float(), + flame_params.detach().float(), + ): + pred_bboxes_conf = pred_bboxes_conf.squeeze(-1) # [Anchors] + conf_mask = pred_bboxes_conf >= confidence_threshold + + pred_bboxes_conf = pred_bboxes_conf[conf_mask] + pred_bboxes_xyxy = pred_bboxes_xyxy[conf_mask] + pred_flame_params = pred_flame_params[conf_mask] + + # Filter all predictions by self.nms_top_k + if pred_bboxes_conf.size(0) > top_k: + topk_candidates = torch.topk( + pred_bboxes_conf, k=top_k, largest=True, sorted=True + ) + pred_bboxes_conf = pred_bboxes_conf[topk_candidates.indices] + pred_bboxes_xyxy = pred_bboxes_xyxy[topk_candidates.indices] + pred_flame_params = pred_flame_params[topk_candidates.indices] + + # NMS + idx_to_keep = torchvision.ops.boxes.nms( + boxes=pred_bboxes_xyxy, scores=pred_bboxes_conf, iou_threshold=iou_threshold + ) + + final_bboxes = pred_bboxes_xyxy[idx_to_keep][:keep_top_k] # [Instances, 4] + final_scores = pred_bboxes_conf[idx_to_keep][:keep_top_k] # [Instances, 1] + final_params = pred_flame_params[idx_to_keep][ + :keep_top_k + ] # [Instances, Flame Params] + return final_bboxes, final_scores, final_params + + +class VGGHeadDetector(torch.nn.Module): + def __init__(self, model_path, device): + super().__init__() + self.image_size = 640 + self._device = device + self.model_path = model_path + + self._init_models() + + def _init_models(self): + self.model = torch.jit.load(self.model_path, map_location="cpu") + self.model.to(self._device).eval() + + def forward(self, image_tensor, conf_threshold=0.5): + if not hasattr(self, "model"): + self._init_models() + image_tensor = image_tensor.to(self._device).float() + image, padding, scale = self._preprocess(image_tensor) + bbox, scores, flame_params = self.model(image) + bbox, vgg_results = self._postprocess( + bbox, scores, flame_params, conf_threshold + ) + if bbox is None: + print("VGGHeadDetector: No face detected: {}!".format(image_key)) + return None, None + vgg_results["normalize"] = {"padding": padding, "scale": scale} + # bbox + bbox = bbox.clip(0, self.image_size) + bbox[[0, 2]] -= padding[0] + bbox[[1, 3]] -= padding[1] + bbox /= scale + bbox = bbox.clip(0, self.image_size / scale) + + return vgg_results, bbox + + @torch.no_grad() + def detect_face(self, image_tensor): + # image_tensor [3, H, W] + _, bbox = self.forward(image_tensor=image_tensor) + return expand_bbox(bbox, scale=1.65).long() + + def _preprocess(self, image): + _, h, w = image.shape + if h > w: + new_h, new_w = self.image_size, int(w * self.image_size / h) + else: + new_h, new_w = int(h * self.image_size / w), self.image_size + scale = self.image_size / max(h, w) + image = torchvision.transforms.functional.resize( + image, (new_h, new_w), antialias=True + ) + pad_w = self.image_size - image.shape[2] + pad_h = self.image_size - image.shape[1] + image = torchvision.transforms.functional.pad( + image, + (pad_w // 2, pad_h // 2, pad_w - pad_w // 2, pad_h - pad_h // 2), + fill=127, + ) + image = image.unsqueeze(0).float() / 255.0 + return image, np.array([pad_w // 2, pad_h // 2]), scale + + def _postprocess(self, bbox, scores, flame_params, conf_threshold): + # flame_params = {"shape": 300, "exp": 100, "rotation": 6, "jaw": 3, "translation": 3, "scale": 1} + bbox, scores, flame_params = nms( + bbox, scores, flame_params, confidence_threshold=conf_threshold + ) + if bbox.shape[0] == 0: + return None, None + max_idx = ( + ((bbox[:, 3] - bbox[:, 1]) * (bbox[:, 2] - bbox[:, 0])).argmax().long() + ) + bbox, flame_params = bbox[max_idx], flame_params[max_idx] + if bbox[0] < 5 and bbox[1] < 5 and bbox[2] > 635 and bbox[3] > 635: + return None, None + # flame + posecode = torch.cat([flame_params.new_zeros(3), flame_params[400:403]]) + vgg_results = { + "rotation_6d": flame_params[403:409], + "translation": flame_params[409:412], + "scale": flame_params[412:], + "shapecode": flame_params[:300], + "expcode": flame_params[300:400], + "posecode": posecode, + } + return bbox, vgg_results + + +class FaceDetector: + def __init__(self, model_path, device): + self.model = VGGHeadDetector(model_path=model_path, device=device) + + @torch.no_grad() + def __call__(self, image_tensor): + return self.model.detect_face(image_tensor) + + def __repr__(self): + return f"Model: {self.model}" + + +if __name__ == "__main__": + from PIL import Image + + device = "cuda" + model_path = "./pretrained_models/gagatracker/vgghead/vgg_heads_l.trcd" + easy_head_detect = FaceDetector(model_path=model_path, device=device) + + rgb_path = "./man_1.png" + rgb = np.array(Image.open(rgb_path)) + rgb = torch.from_numpy(rgb).permute(2, 0, 1) + bbox = easy_head_detect(rgb) + head_rgb = rgb[:, int(bbox[1]) : int(bbox[3]), int(bbox[0]) : int(bbox[2])] + head_rgb = head_rgb.permute(1, 2, 0) + head_rgb = head_rgb.cpu().numpy() + Image.fromarray(head_rgb).save("head_rgb.png") diff --git a/LHM/utils/ffmpeg_utils.py b/LHM/utils/ffmpeg_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a8696eff12c581cfbe7f517abad87de76377a744 --- /dev/null +++ b/LHM/utils/ffmpeg_utils.py @@ -0,0 +1,54 @@ +import os +import pdb +import torch +import numpy as np +import imageio +import cv2 +import imageio.v3 as iio + +VIDEO_TYPE_LIST = {'.avi','.mp4','.gif','.AVI','.MP4','.GIF'} + +def encodeffmpeg(inputs, frame_rate, output, format="png"): + """output: need video_name""" + assert ( + os.path.splitext(output)[-1] in VIDEO_TYPE_LIST + ), "output is the format of video, e.g., mp4" + assert os.path.isdir(inputs), "input dir is NOT file format" + + inputs = inputs[:-1] if inputs[-1] == "/" else inputs + + output = os.path.abspath(output) + + cmd = ( + f"ffmpeg -r {frame_rate} -pattern_type glob -i '{inputs}/*.{format}' " + + f'-vcodec libx264 -crf 10 -vf "pad=ceil(iw/2)*2:ceil(ih/2)*2" ' + + f"-pix_fmt yuv420p {output} > /dev/null 2>&1" + ) + + print(cmd) + + output_dir = os.path.dirname(output) + if os.path.exists(output): + os.remove(output) + os.makedirs(output_dir, exist_ok=True) + + print("encoding imgs to video.....") + os.system(cmd) + print("video done!") + +def images_to_video(images, output_path, fps, gradio_codec: bool, verbose=False, bitrate="20M"): + os.makedirs(os.path.dirname(output_path), exist_ok=True) + frames = [] + for i in range(images.shape[0]): + if isinstance(images, torch.Tensor): + frame = (images[i].permute(1, 2, 0).cpu().numpy() * 255).astype(np.uint8) + assert frame.shape[0] == images.shape[2] and frame.shape[1] == images.shape[3], \ + f"Frame shape mismatch: {frame.shape} vs {images.shape}" + assert frame.min() >= 0 and frame.max() <= 255, \ + f"Frame value out of range: {frame.min()} ~ {frame.max()}" + else: + frame = images[i] + frames.append(frame) + + frames = np.stack(frames) + iio.imwrite(output_path,frames,fps=fps,codec="libx264",pixelformat="yuv420p",bitrate=bitrate,macro_block_size=16) \ No newline at end of file diff --git a/LHM/utils/hf_hub.py b/LHM/utils/hf_hub.py new file mode 100644 index 0000000000000000000000000000000000000000..b9ba0df56983a407d20c2c656a82c1ad15487ca5 --- /dev/null +++ b/LHM/utils/hf_hub.py @@ -0,0 +1,25 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import torch.nn as nn +from huggingface_hub import PyTorchModelHubMixin + + +def wrap_model_hub(model_cls: nn.Module): + class HfModel(model_cls, PyTorchModelHubMixin): + def __init__(self, config: dict): + super().__init__(**config) + self.config = config + return HfModel diff --git a/LHM/utils/logging.py b/LHM/utils/logging.py new file mode 100644 index 0000000000000000000000000000000000000000..798639965bc4d1b9acd1c9bd5c1e95d2edf255b3 --- /dev/null +++ b/LHM/utils/logging.py @@ -0,0 +1,48 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os +import logging +from tqdm.auto import tqdm + + +class TqdmStreamHandler(logging.StreamHandler): + def emit(self, record): + tqdm.write(self.format(record)) + + +def configure_logger(stream_level, log_level, file_path = None): + # print(stream_level) + _stream_level = stream_level.upper() + _log_level = log_level.upper() + _project_level = _log_level + + _formatter = logging.Formatter("[%(asctime)s] %(name)s: [%(levelname)s] %(message)s") + + _stream_handler = TqdmStreamHandler() + _stream_handler.setLevel(_stream_level) + _stream_handler.setFormatter(_formatter) + + if file_path is not None: + os.makedirs(os.path.dirname(file_path), exist_ok=True) + _file_handler = logging.FileHandler(file_path) + _file_handler.setLevel(_log_level) + _file_handler.setFormatter(_formatter) + + _project_logger = logging.getLogger(__name__.split('.')[0]) + _project_logger.setLevel(_project_level) + _project_logger.addHandler(_stream_handler) + if file_path is not None: + _project_logger.addHandler(_file_handler) diff --git a/LHM/utils/preprocess.py b/LHM/utils/preprocess.py new file mode 100644 index 0000000000000000000000000000000000000000..4724a4c5ed6cba9e16dac265bbbaf105a0b57dd6 --- /dev/null +++ b/LHM/utils/preprocess.py @@ -0,0 +1,88 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import numpy as np +import rembg +import cv2 + + +class Preprocessor: + + """ + Preprocessing under cv2 conventions. + """ + + def __init__(self): + self.rembg_session = rembg.new_session( + providers=["CUDAExecutionProvider", "CPUExecutionProvider"], + ) + + def preprocess(self, image_path: str, save_path: str, rmbg: bool = True, recenter: bool = True, size: int = 512, border_ratio: float = 0.2): + image = self.step_load_to_size(image_path=image_path, size=size*2) + if rmbg: + image = self.step_rembg(image_in=image) + else: + image = cv2.cvtColor(image, cv2.COLOR_BGR2BGRA) + if recenter: + image = self.step_recenter(image_in=image, border_ratio=border_ratio, square_size=size) + else: + image = cv2.resize( + src=image, + dsize=(size, size), + interpolation=cv2.INTER_AREA, + ) + return cv2.imwrite(save_path, image) + + def step_rembg(self, image_in: np.ndarray) -> np.ndarray: + image_out = rembg.remove( + data=image_in, + session=self.rembg_session, + ) + return image_out + + def step_recenter(self, image_in: np.ndarray, border_ratio: float, square_size: int) -> np.ndarray: + assert image_in.shape[-1] == 4, "Image to recenter must be RGBA" + mask = image_in[..., -1] > 0 + ijs = np.nonzero(mask) + # find bbox + i_min, i_max = ijs[0].min(), ijs[0].max() + j_min, j_max = ijs[1].min(), ijs[1].max() + bbox_height, bbox_width = i_max - i_min, j_max - j_min + # recenter and resize + desired_size = int(square_size * (1 - border_ratio)) + scale = desired_size / max(bbox_height, bbox_width) + desired_height, desired_width = int(bbox_height * scale), int(bbox_width * scale) + desired_i_min, desired_j_min = (square_size - desired_height) // 2, (square_size - desired_width) // 2 + desired_i_max, desired_j_max = desired_i_min + desired_height, desired_j_min + desired_width + # create new image + image_out = np.zeros((square_size, square_size, 4), dtype=np.uint8) + image_out[desired_i_min:desired_i_max, desired_j_min:desired_j_max] = cv2.resize( + src=image_in[i_min:i_max, j_min:j_max], + dsize=(desired_width, desired_height), + interpolation=cv2.INTER_AREA, + ) + return image_out + + def step_load_to_size(self, image_path: str, size: int) -> np.ndarray: + image = cv2.imread(image_path, cv2.IMREAD_UNCHANGED) + height, width = image.shape[:2] + scale = size / max(height, width) + height, width = int(height * scale), int(width * scale) + image_out = cv2.resize( + src=image, + dsize=(width, height), + interpolation=cv2.INTER_AREA, + ) + return image_out diff --git a/LHM/utils/profiler.py b/LHM/utils/profiler.py new file mode 100644 index 0000000000000000000000000000000000000000..92ba79973308b627d5b20bdd7bb09eac138c93ad --- /dev/null +++ b/LHM/utils/profiler.py @@ -0,0 +1,30 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from torch.profiler import profile + + +class DummyProfiler(profile): + def __init__(self): + pass + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + def step(self): + pass diff --git a/LHM/utils/proxy.py b/LHM/utils/proxy.py new file mode 100644 index 0000000000000000000000000000000000000000..1a0c6e53f0a0c412debc866d172926a4c0401bba --- /dev/null +++ b/LHM/utils/proxy.py @@ -0,0 +1,45 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os + +NO_PROXY = "OPENLRM_NO_DATA_PROXY" in os.environ + +def no_proxy(func): + """Decorator to disable proxy but then restore after the function call.""" + def wrapper(*args, **kwargs): + # http_proxy, https_proxy, HTTP_PROXY, HTTPS_PROXY, all_proxy + http_proxy = os.environ.get('http_proxy') + https_proxy = os.environ.get('https_proxy') + HTTP_PROXY = os.environ.get('HTTP_PROXY') + HTTPS_PROXY = os.environ.get('HTTPS_PROXY') + all_proxy = os.environ.get('all_proxy') + os.environ['http_proxy'] = '' + os.environ['https_proxy'] = '' + os.environ['HTTP_PROXY'] = '' + os.environ['HTTPS_PROXY'] = '' + os.environ['all_proxy'] = '' + try: + return func(*args, **kwargs) + finally: + os.environ['http_proxy'] = http_proxy + os.environ['https_proxy'] = https_proxy + os.environ['HTTP_PROXY'] = HTTP_PROXY + os.environ['HTTPS_PROXY'] = HTTPS_PROXY + os.environ['all_proxy'] = all_proxy + if NO_PROXY: + return wrapper + else: + return func diff --git a/LHM/utils/registry.py b/LHM/utils/registry.py new file mode 100644 index 0000000000000000000000000000000000000000..421a735f82899c50884cd5b5a27e71757b2eb813 --- /dev/null +++ b/LHM/utils/registry.py @@ -0,0 +1,35 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class Registry: + """Registry class""" + + def __init__(self): + self._registry = {} + + def register(self, name): + """Register a module""" + def decorator(cls): + assert name not in self._registry, 'Module {} already registered'.format(name) + self._registry[name] = cls + return cls + return decorator + + def __getitem__(self, name): + """Get a module""" + return self._registry[name] + + def __contains__(self, name): + return name in self._registry diff --git a/LHM/utils/rot6d.py b/LHM/utils/rot6d.py new file mode 100644 index 0000000000000000000000000000000000000000..bac67e1d403404b1594b2e7a53476db1ce8b7ea1 --- /dev/null +++ b/LHM/utils/rot6d.py @@ -0,0 +1,466 @@ +from typing import Optional + +import torch +import torch.nn.functional as F + + +def quaternion_to_axis_angle(quaternions: torch.Tensor) -> torch.Tensor: + """ + Convert rotations given as quaternions to axis/angle. + + Args: + quaternions: quaternions with real part first, + as tensor of shape (..., 4). + + Returns: + Rotations given as a vector in axis angle form, as a tensor + of shape (..., 3), where the magnitude is the angle + turned anticlockwise in radians around the vector's + direction. + """ + norms = torch.norm(quaternions[..., 1:], p=2, dim=-1, keepdim=True) + half_angles = torch.atan2(norms, quaternions[..., :1]) + angles = 2 * half_angles + eps = 1e-6 + small_angles = angles.abs() < eps + sin_half_angles_over_angles = torch.empty_like(angles) + sin_half_angles_over_angles[~small_angles] = ( + torch.sin(half_angles[~small_angles]) / angles[~small_angles] + ) + # for x small, sin(x/2) is about x/2 - (x/2)^3/6 + # so sin(x/2)/x is about 1/2 - (x*x)/48 + sin_half_angles_over_angles[small_angles] = ( + 0.5 - (angles[small_angles] * angles[small_angles]) / 48 + ) + return quaternions[..., 1:] / sin_half_angles_over_angles + + +def matrix_to_quaternion(matrix: torch.Tensor) -> torch.Tensor: + """ + Convert rotations given as rotation matrices to quaternions. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + + Returns: + quaternions with real part first, as tensor of shape (..., 4). + """ + if matrix.size(-1) != 3 or matrix.size(-2) != 3: + raise ValueError(f"Invalid rotation matrix shape {matrix.shape}.") + + batch_dim = matrix.shape[:-2] + m00, m01, m02, m10, m11, m12, m20, m21, m22 = torch.unbind( + matrix.reshape(batch_dim + (9,)), dim=-1 + ) + + q_abs = _sqrt_positive_part( + torch.stack( + [ + 1.0 + m00 + m11 + m22, + 1.0 + m00 - m11 - m22, + 1.0 - m00 + m11 - m22, + 1.0 - m00 - m11 + m22, + ], + dim=-1, + ) + ) + + # we produce the desired quaternion multiplied by each of r, i, j, k + quat_by_rijk = torch.stack( + [ + # pyre-fixme[58]: `**` is not supported for operand types `Tensor` and + # `int`. + torch.stack([q_abs[..., 0] ** 2, m21 - m12, m02 - m20, m10 - m01], dim=-1), + # pyre-fixme[58]: `**` is not supported for operand types `Tensor` and + # `int`. + torch.stack([m21 - m12, q_abs[..., 1] ** 2, m10 + m01, m02 + m20], dim=-1), + # pyre-fixme[58]: `**` is not supported for operand types `Tensor` and + # `int`. + torch.stack([m02 - m20, m10 + m01, q_abs[..., 2] ** 2, m12 + m21], dim=-1), + # pyre-fixme[58]: `**` is not supported for operand types `Tensor` and + # `int`. + torch.stack([m10 - m01, m20 + m02, m21 + m12, q_abs[..., 3] ** 2], dim=-1), + ], + dim=-2, + ) + + # We floor here at 0.1 but the exact level is not important; if q_abs is small, + # the candidate won't be picked. + flr = torch.tensor(0.1).to(dtype=q_abs.dtype, device=q_abs.device) + quat_candidates = quat_by_rijk / (2.0 * q_abs[..., None].max(flr)) + + # if not for numerical problems, quat_candidates[i] should be same (up to a sign), + # forall i; we pick the best-conditioned one (with the largest denominator) + + return quat_candidates[ + F.one_hot(q_abs.argmax(dim=-1), num_classes=4) > 0.5, : + ].reshape(batch_dim + (4,)) + + +def _sqrt_positive_part(x: torch.Tensor) -> torch.Tensor: + """ + Returns torch.sqrt(torch.max(0, x)) + but with a zero subgradient where x is 0. + """ + ret = torch.zeros_like(x) + positive_mask = x > 0 + ret[positive_mask] = torch.sqrt(x[positive_mask]) + return ret + + +def matrix_to_axis_angle(matrix: torch.Tensor) -> torch.Tensor: + """ + Convert rotations given as rotation matrices to axis/angle. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + + Returns: + Rotations given as a vector in axis angle form, as a tensor + of shape (..., 3), where the magnitude is the angle + turned anticlockwise in radians around the vector's + direction. + """ + return quaternion_to_axis_angle(matrix_to_quaternion(matrix)) + + +def euler_angles_to_axis_angle( + euler_angles: torch.Tensor, convention: str +) -> torch.Tensor: + """ + Convert rotations given as Euler angles in radians to axis/angle. + + Args: + euler_angles: Euler angles in radians as tensor of shape (..., 3). + convention: Convention string of three uppercase letters from + {"X", "Y", and "Z"}. + + Returns: + Rotations given as a vector in axis angle form, as a tensor + of shape (..., 3), where the magnitude is the angle + turned anticlockwise in radians around the vector's + direction. + """ + return matrix_to_axis_angle(euler_angles_to_matrix(euler_angles, convention)) + + +def euler_angles_to_matrix(euler_angles: torch.Tensor, convention: str) -> torch.Tensor: + """ + Convert rotations given as Euler angles in radians to rotation matrices. + + Args: + euler_angles: Euler angles in radians as tensor of shape (..., 3). + convention: Convention string of three uppercase letters from + {"X", "Y", and "Z"}. + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + if euler_angles.dim() == 0 or euler_angles.shape[-1] != 3: + raise ValueError("Invalid input euler angles.") + if len(convention) != 3: + raise ValueError("Convention must have 3 letters.") + if convention[1] in (convention[0], convention[2]): + raise ValueError(f"Invalid convention {convention}.") + for letter in convention: + if letter not in ("X", "Y", "Z"): + raise ValueError(f"Invalid letter {letter} in convention string.") + matrices = [ + _axis_angle_rotation(c, e) + for c, e in zip(convention, torch.unbind(euler_angles, -1)) + ] + # return functools.reduce(torch.matmul, matrices) + return torch.matmul(torch.matmul(matrices[0], matrices[1]), matrices[2]) + + +def _axis_angle_rotation(axis: str, angle: torch.Tensor) -> torch.Tensor: + """ + Return the rotation matrices for one of the rotations about an axis + of which Euler angles describe, for each value of the angle given. + + Args: + axis: Axis label "X" or "Y or "Z". + angle: any shape tensor of Euler angles in radians + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + + cos = torch.cos(angle) + sin = torch.sin(angle) + one = torch.ones_like(angle) + zero = torch.zeros_like(angle) + + if axis == "X": + R_flat = (one, zero, zero, zero, cos, -sin, zero, sin, cos) + elif axis == "Y": + R_flat = (cos, zero, sin, zero, one, zero, -sin, zero, cos) + elif axis == "Z": + R_flat = (cos, -sin, zero, sin, cos, zero, zero, zero, one) + else: + raise ValueError("letter must be either X, Y or Z.") + + return torch.stack(R_flat, -1).reshape(angle.shape + (3, 3)) + + +def axis_angle_to_quaternion(axis_angle: torch.Tensor) -> torch.Tensor: + """ + Convert rotations given as axis/angle to quaternions. + + Args: + axis_angle: Rotations given as a vector in axis angle form, + as a tensor of shape (..., 3), where the magnitude is + the angle turned anticlockwise in radians around the + vector's direction. + + Returns: + quaternions with real part first, as tensor of shape (..., 4). + """ + angles = torch.norm(axis_angle, p=2, dim=-1, keepdim=True) + half_angles = angles * 0.5 + eps = 1e-6 + small_angles = angles.abs() < eps + sin_half_angles_over_angles = torch.empty_like(angles) + sin_half_angles_over_angles[~small_angles] = ( + torch.sin(half_angles[~small_angles]) / angles[~small_angles] + ) + # for x small, sin(x/2) is about x/2 - (x/2)^3/6 + # so sin(x/2)/x is about 1/2 - (x*x)/48 + sin_half_angles_over_angles[small_angles] = ( + 0.5 - (angles[small_angles] * angles[small_angles]) / 48 + ) + quaternions = torch.cat( + [torch.cos(half_angles), axis_angle * sin_half_angles_over_angles], dim=-1 + ) + return quaternions + + +def axis_angle_to_matrix(axis_angle: torch.Tensor) -> torch.Tensor: + """ + Convert rotations given as axis/angle to rotation matrices. + + Args: + axis_angle: Rotations given as a vector in axis angle form, + as a tensor of shape (..., 3), where the magnitude is + the angle turned anticlockwise in radians around the + vector's direction. + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + return quaternion_to_matrix(axis_angle_to_quaternion(axis_angle)) + + +def quaternion_to_matrix(quaternions: torch.Tensor) -> torch.Tensor: + """ + Convert rotations given as quaternions to rotation matrices. + + Args: + quaternions: quaternions with real part first, + as tensor of shape (..., 4). + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + r, i, j, k = torch.unbind(quaternions, -1) + # pyre-fixme[58]: `/` is not supported for operand types `float` and `Tensor`. + two_s = 2.0 / (quaternions * quaternions).sum(-1) + + o = torch.stack( + ( + 1 - two_s * (j * j + k * k), + two_s * (i * j - k * r), + two_s * (i * k + j * r), + two_s * (i * j + k * r), + 1 - two_s * (i * i + k * k), + two_s * (j * k - i * r), + two_s * (i * k - j * r), + two_s * (j * k + i * r), + 1 - two_s * (i * i + j * j), + ), + -1, + ) + return o.reshape(quaternions.shape[:-1] + (3, 3)) + + +def axis_angle_to_euler_angles(axis_angle: torch.Tensor) -> torch.Tensor: + """ + Convert rotations given as Euler angles in radians to axis/angle. + + Args: + axis_angle: Rotations given as a vector in axis angle form, + as a tensor of shape (..., 3), where the magnitude is + the angle turned anticlockwise in radians around the + vector's direction. + Returns: + Rotations given as a vector in axis angle form, as a tensor + of shape (..., 3), where the magnitude is the angle + turned anticlockwise in radians around the vector's + direction. + """ + return matrix_to_euler_angles(axis_angle_to_matrix(axis_angle), "XYZ") + + +def _angle_from_tan( + axis: str, other_axis: str, data, horizontal: bool, tait_bryan: bool +) -> torch.Tensor: + """ + Extract the first or third Euler angle from the two members of + the matrix which are positive constant times its sine and cosine. + + Args: + axis: Axis label "X" or "Y or "Z" for the angle we are finding. + other_axis: Axis label "X" or "Y or "Z" for the middle axis in the + convention. + data: Rotation matrices as tensor of shape (..., 3, 3). + horizontal: Whether we are looking for the angle for the third axis, + which means the relevant entries are in the same row of the + rotation matrix. If not, they are in the same column. + tait_bryan: Whether the first and third axes in the convention differ. + + Returns: + Euler Angles in radians for each matrix in data as a tensor + of shape (...). + """ + + i1, i2 = {"X": (2, 1), "Y": (0, 2), "Z": (1, 0)}[axis] + if horizontal: + i2, i1 = i1, i2 + even = (axis + other_axis) in ["XY", "YZ", "ZX"] + if horizontal == even: + return torch.atan2(data[..., i1], data[..., i2]) + if tait_bryan: + return torch.atan2(-data[..., i2], data[..., i1]) + return torch.atan2(data[..., i2], -data[..., i1]) + + +def _index_from_letter(letter: str) -> int: + if letter == "X": + return 0 + if letter == "Y": + return 1 + if letter == "Z": + return 2 + raise ValueError("letter must be either X, Y or Z.") + + +def matrix_to_euler_angles(matrix: torch.Tensor, convention: str) -> torch.Tensor: + """ + Convert rotations given as rotation matrices to Euler angles in radians. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + convention: Convention string of three uppercase letters. + + Returns: + Euler angles in radians as tensor of shape (..., 3). + """ + if len(convention) != 3: + raise ValueError("Convention must have 3 letters.") + if convention[1] in (convention[0], convention[2]): + raise ValueError(f"Invalid convention {convention}.") + for letter in convention: + if letter not in ("X", "Y", "Z"): + raise ValueError(f"Invalid letter {letter} in convention string.") + if matrix.size(-1) != 3 or matrix.size(-2) != 3: + raise ValueError(f"Invalid rotation matrix shape {matrix.shape}.") + i0 = _index_from_letter(convention[0]) + i2 = _index_from_letter(convention[2]) + tait_bryan = i0 != i2 + if tait_bryan: + central_angle = torch.asin( + matrix[..., i0, i2] * (-1.0 if i0 - i2 in [-1, 2] else 1.0) + ) + else: + central_angle = torch.acos(matrix[..., i0, i0]) + + o = ( + _angle_from_tan( + convention[0], convention[1], matrix[..., i2], False, tait_bryan + ), + central_angle, + _angle_from_tan( + convention[2], convention[1], matrix[..., i0, :], True, tait_bryan + ), + ) + return torch.stack(o, -1) + + +def rotation_6d_to_matrix(d6: torch.Tensor) -> torch.Tensor: + """ + Converts 6D rotation representation by Zhou et al. [1] to rotation matrix + using Gram--Schmidt orthogonalisation per Section B of [1]. + Args: + d6: 6D rotation representation, of size (*, 6) + + Returns: + batch of rotation matrices of size (*, 3, 3) + + [1] Zhou, Y., Barnes, C., Lu, J., Yang, J., & Li, H. + On the Continuity of Rotation Representations in Neural Networks. + IEEE Conference on Computer Vision and Pattern Recognition, 2019. + Retrieved from http://arxiv.org/abs/1812.07035 + """ + + a1, a2 = d6[..., :3], d6[..., 3:] + b1 = F.normalize(a1, dim=-1) + b2 = a2 - (b1 * a2).sum(-1, keepdim=True) * b1 + b2 = F.normalize(b2, dim=-1) + b3 = torch.cross(b1, b2, dim=-1) + return torch.stack((b1, b2, b3), dim=-2) + + +def matrix_to_rotation_6d(matrix: torch.Tensor) -> torch.Tensor: + """ + Converts rotation matrices to 6D rotation representation by Zhou et al. [1] + by dropping the last row. Note that 6D representation is not unique. + Args: + matrix: batch of rotation matrices of size (*, 3, 3) + + Returns: + 6D rotation representation, of size (*, 6) + + [1] Zhou, Y., Barnes, C., Lu, J., Yang, J., & Li, H. + On the Continuity of Rotation Representations in Neural Networks. + IEEE Conference on Computer Vision and Pattern Recognition, 2019. + Retrieved from http://arxiv.org/abs/1812.07035 + """ + return matrix[..., :2, :].clone().reshape(*matrix.size()[:-2], 6) + + +def axis_angle_to_rotation_6d(axis_angle: torch.Tensor) -> torch.Tensor: + return matrix_to_rotation_6d(axis_angle_to_matrix(axis_angle)) + + +def rotation_6d_to_axis_angle(d6: torch.Tensor) -> torch.Tensor: + return matrix_to_axis_angle(rotation_6d_to_matrix(d6)) + + +def check_root_pose(root_pose): + # 检查人物整体任意一个轴旋转不超过20度 + if not isinstance(root_pose, torch.Tensor): + root_pose = torch.tensor(root_pose) + PI = 3.1415926 + root_euler = axis_angle_to_euler_angles(root_pose) + root_euler[0] = (root_euler[0] + 2 * PI) % (2 * PI) - PI + + if ( + abs(root_euler[0]) <= PI / 9 + and abs(root_euler[1]) < PI / 9 + and abs(root_euler[2]) < PI / 9 + ): + return True + + return False + + +if __name__ == "__main__": + with open( + "/mnt/data/peihao/HumanVideoProcessing/train_data/deepfashion_processed/smplx_params/MEN-Denim-id_00000089-02_7_additional.json", + "r", + ) as f: + smplx_param = json.load(f) + + orient_pose = torch.tensor(smplx_param["body_pose"][0]) + orient_euler = axis_angle_to_euler_angles(orient_pose) + print(orient_euler) diff --git a/LHM/utils/scheduler.py b/LHM/utils/scheduler.py new file mode 100644 index 0000000000000000000000000000000000000000..7fc151d816e2787f37f9bea02b0945e06a933c01 --- /dev/null +++ b/LHM/utils/scheduler.py @@ -0,0 +1,42 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import math +from torch.optim.lr_scheduler import LRScheduler +from accelerate.logging import get_logger + + +logger = get_logger(__name__) + + +class CosineWarmupScheduler(LRScheduler): + def __init__(self, optimizer, warmup_iters: int, max_iters: int, initial_lr: float = 1e-10, last_iter: int = -1): + self.warmup_iters = warmup_iters + self.max_iters = max_iters + self.initial_lr = initial_lr + super().__init__(optimizer, last_iter) + + def get_lr(self): + logger.debug(f"step count: {self._step_count} | warmup iters: {self.warmup_iters} | max iters: {self.max_iters}") + if self._step_count <= self.warmup_iters: + return [ + self.initial_lr + (base_lr - self.initial_lr) * self._step_count / self.warmup_iters + for base_lr in self.base_lrs] + else: + cos_iter = self._step_count - self.warmup_iters + cos_max_iter = self.max_iters - self.warmup_iters + cos_theta = cos_iter / cos_max_iter * math.pi + cos_lr = [base_lr * (1 + math.cos(cos_theta)) / 2 for base_lr in self.base_lrs] + return cos_lr diff --git a/LHM/utils/video.py b/LHM/utils/video.py new file mode 100644 index 0000000000000000000000000000000000000000..99f5ba30a9c5a7d831ef29b0f88e3b2cfd384ade --- /dev/null +++ b/LHM/utils/video.py @@ -0,0 +1,44 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os +import numpy as np +import imageio +import torch + +def images_to_video(images, output_path, fps, gradio_codec: bool, verbose=False): + # images: torch.tensor (T, C, H, W), 0-1 or numpy: (T, H, W, 3) 0-255 + os.makedirs(os.path.dirname(output_path), exist_ok=True) + frames = [] + for i in range(images.shape[0]): + if isinstance(images, torch.Tensor): + frame = (images[i].permute(1, 2, 0).cpu().numpy() * 255).astype(np.uint8) + assert frame.shape[0] == images.shape[2] and frame.shape[1] == images.shape[3], \ + f"Frame shape mismatch: {frame.shape} vs {images.shape}" + assert frame.min() >= 0 and frame.max() <= 255, \ + f"Frame value out of range: {frame.min()} ~ {frame.max()}" + else: + frame = images[i] + frames.append(frame) + frames = np.stack(frames) + if gradio_codec: + imageio.mimwrite(output_path, frames, fps=fps, quality=10) + else: + # imageio.mimwrite(output_path, frames, fps=fps, codec='mpeg4', quality=10) + imageio.mimwrite(output_path, frames, fps=fps, quality=10) + + if verbose: + print(f"Using gradio codec option {gradio_codec}") + print(f"Saved video to {output_path}") diff --git a/README.md b/README.md index f469f1e1f2c72d26cff4b67965f8ab200367e6e7..cb347981b8b7d35114ba5e63ae2cd09d240bfa3c 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,14 @@ --- -title: LHM ZeroDebug -emoji: 🏆 +title: LHM +emoji: ⚡ colorFrom: red -colorTo: red +colorTo: indigo sdk: gradio -sdk_version: 5.21.0 +sdk_version: 5.20.1 app_file: app.py pinned: false license: apache-2.0 -short_description: A debug space for LHM running on ZeroGPU +short_description: Large Animatable Human Model --- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference diff --git a/app_lhm.py b/app_lhm.py new file mode 100644 index 0000000000000000000000000000000000000000..e929b8bd9e1066a8f58c73569136429d0c655111 --- /dev/null +++ b/app_lhm.py @@ -0,0 +1,272 @@ +# Copyright (c) 2023-2024, Qi Zuo +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os +from PIL import Image +import numpy as np +import gradio as gr +import base64 + +import subprocess +import os + +def install_cuda_toolkit(): +# CUDA_TOOLKIT_URL = "https://developer.download.nvidia.com/compute/cuda/11.8.0/local_installers/cuda_11.8.0_520.61.05_linux.run" +# # CUDA_TOOLKIT_URL = "https://developer.download.nvidia.com/compute/cuda/12.2.0/local_installers/cuda_12.2.0_535.54.03_linux.run" +# CUDA_TOOLKIT_FILE = "/tmp/%s" % os.path.basename(CUDA_TOOLKIT_URL) +# subprocess.call(["wget", "-q", CUDA_TOOLKIT_URL, "-O", CUDA_TOOLKIT_FILE]) +# subprocess.call(["chmod", "+x", CUDA_TOOLKIT_FILE]) +# subprocess.call([CUDA_TOOLKIT_FILE, "--silent", "--toolkit"]) + + os.environ["CUDA_HOME"] = "/usr/local/cuda" + os.environ["PATH"] = "%s/bin:%s" % (os.environ["CUDA_HOME"], os.environ["PATH"]) + os.environ["LD_LIBRARY_PATH"] = "%s/lib:%s" % ( + os.environ["CUDA_HOME"], + "" if "LD_LIBRARY_PATH" not in os.environ else os.environ["LD_LIBRARY_PATH"], + ) + # Fix: arch_list[-1] += '+PTX'; IndexError: list index out of range + os.environ["TORCH_CUDA_ARCH_LIST"] = "8.0;8.6" + +install_cuda_toolkit() + +def launch_pretrained(): + from huggingface_hub import snapshot_download, hf_hub_download + hf_hub_download(repo_id="DyrusQZ/LHM_Runtime", repo_type='model', filename='assets.tar', local_dir="./") + os.system("tar -xvf assets.tar && rm assets.tar") + hf_hub_download(repo_id="DyrusQZ/LHM_Runtime", repo_type='model', filename='LHM-0.5B.tar', local_dir="./") + os.system("tar -xvf LHM-0.5B.tar && rm LHM-0.5B.tar") + hf_hub_download(repo_id="DyrusQZ/LHM_Runtime", repo_type='model', filename='LHM_prior_model.tar', local_dir="./") + os.system("tar -xvf LHM_prior_model.tar && rm LHM_prior_model.tar") + +def launch_env_not_compile_with_cuda(): + os.system("pip install chumpy") + os.system("pip uninstall -y basicsr") + os.system("pip install git+https://github.com/hitsz-zuoqi/BasicSR/") + os.system("pip install git+https://github.com/hitsz-zuoqi/sam2/") + # os.system("pip install git+https://github.com/ashawkey/diff-gaussian-rasterization/") + # os.system("pip install git+https://github.com/camenduru/simple-knn/") + os.system("pip install --no-index --no-cache-dir pytorch3d -f https://dl.fbaipublicfiles.com/pytorch3d/packaging/wheels/py310_cu121_pyt251/download.html") + +# def launch_env_compile_with_cuda(): +# # simple_knn +# os.system("wget oss://virutalbuy-public/share/aigc3d/data/for_lingteng/LHM/simple_knn.zip && wget oss://virutalbuy-public/share/aigc3d/data/for_lingteng/LHM/simple_knn-0.0.0.dist-info.zip") +# os.system("unzip simple_knn.zip && unzip simple_knn-0.0.0.dist-info.zip") +# os.system("mv simple_knn /usr/local/lib/python3.10/site-packages/") +# os.system("mv simple_knn-0.0.0.dist-info /usr/local/lib/python3.10/site-packages/") + +# # diff_gaussian +# os.system("wget oss://virutalbuy-public/share/aigc3d/data/for_lingteng/LHM/diff_gaussian_rasterization.zip && wget oss://virutalbuy-public/share/aigc3d/data/for_lingteng/LHM/diff_gaussian_rasterization-0.0.0.dist-info.zip") +# os.system("unzip diff_gaussian_rasterization.zip && unzip diff_gaussian_rasterization-0.0.0.dist-info.zip") +# os.system("mv diff_gaussian_rasterization /usr/local/lib/python3.10/site-packages/") +# os.system("mv diff_gaussian_rasterization-0.0.0.dist-info /usr/local/lib/python3.10/site-packages/") + +# # pytorch3d +# os.system("wget oss://virutalbuy-public/share/aigc3d/data/for_lingteng/LHM/pytorch3d.zip && wget oss://virutalbuy-public/share/aigc3d/data/for_lingteng/LHM/pytorch3d-0.7.8.dist-info.zip") +# os.system("unzip pytorch3d.zip && unzip pytorch3d-0.7.8.dist-info.zip") +# os.system("mv pytorch3d /usr/local/lib/python3.10/site-packages/") +# os.system("mv pytorch3d-0.7.8.dist-info /usr/local/lib/python3.10/site-packages/") + +launch_pretrained() +launch_env_not_compile_with_cuda() +# launch_env_compile_with_cuda() + +def assert_input_image(input_image): + if input_image is None: + raise gr.Error("No image selected or uploaded!") + +def prepare_working_dir(): + import tempfile + working_dir = tempfile.TemporaryDirectory() + return working_dir + +def init_preprocessor(): + from LHM.utils.preprocess import Preprocessor + global preprocessor + preprocessor = Preprocessor() + +def preprocess_fn(image_in: np.ndarray, remove_bg: bool, recenter: bool, working_dir): + image_raw = os.path.join(working_dir.name, "raw.png") + with Image.fromarray(image_in) as img: + img.save(image_raw) + image_out = os.path.join(working_dir.name, "rembg.png") + success = preprocessor.preprocess(image_path=image_raw, save_path=image_out, rmbg=remove_bg, recenter=recenter) + assert success, f"Failed under preprocess_fn!" + return image_out + +def get_image_base64(path): + with open(path, "rb") as image_file: + encoded_string = base64.b64encode(image_file.read()).decode() + return f"data:image/png;base64,{encoded_string}" + + +def demo_lhm(infer_impl): + + def core_fn(image: str, video_params, working_dir): + image_raw = os.path.join(working_dir.name, "raw.png") + with Image.fromarray(image) as img: + img.save(image_raw) + + base_vid = os.path.basename(video_params).split("_")[0] + smplx_params_dir = os.path.join("./assets/sample_motion", base_vid, "smplx_params") + + dump_video_path = os.path.join(working_dir.name, "output.mp4") + dump_image_path = os.path.join(working_dir.name, "output.png") + # print(video_params) + status = infer_impl( + gradio_demo_image=image_raw, + gradio_motion_file=smplx_params_dir, + gradio_masked_image=dump_image_path, + gradio_video_save_path=dump_video_path + ) + if status: + return dump_image_path, dump_video_path + else: + return None, None + + _TITLE = '''LHM: Large Animatable Human Model''' + + _DESCRIPTION = ''' + Reconstruct a human avatar in 0.2 seconds with A100! + ''' + + with gr.Blocks(analytics_enabled=False) as demo: + + # + logo_url = "./assets/rgba_logo_new.png" + logo_base64 = get_image_base64(logo_url) + gr.HTML( + f""" +
+
+

Large Animatable Human Model

+
+
+ """ + ) + gr.HTML( + """

Notes: Please input full-body image in case of detection errors.

""" + ) + + # DISPLAY + with gr.Row(): + + with gr.Column(variant='panel', scale=1): + with gr.Tabs(elem_id="openlrm_input_image"): + with gr.TabItem('Input Image'): + with gr.Row(): + input_image = gr.Image(label="Input Image", image_mode="RGBA", height=480, width=270, sources="upload", type="numpy", elem_id="content_image") + # EXAMPLES + with gr.Row(): + examples = [ + ['assets/sample_input/joker.jpg'], + ['assets/sample_input/anime.png'], + ['assets/sample_input/basket.png'], + ['assets/sample_input/ai_woman1.JPG'], + ['assets/sample_input/anime2.JPG'], + ['assets/sample_input/anime3.JPG'], + ['assets/sample_input/boy1.png'], + ['assets/sample_input/choplin.jpg'], + ['assets/sample_input/eins.JPG'], + ['assets/sample_input/girl1.png'], + ['assets/sample_input/girl2.png'], + ['assets/sample_input/robot.jpg'], + ] + gr.Examples( + examples=examples, + inputs=[input_image], + examples_per_page=20, + ) + + with gr.Column(): + with gr.Tabs(elem_id="openlrm_input_video"): + with gr.TabItem('Input Video'): + with gr.Row(): + video_input = gr.Video(label="Input Video",height=480, width=270, interactive=False) + + examples = [ + # './assets/sample_motion/danaotiangong/danaotiangong_origin.mp4', + './assets/sample_motion/ex5/ex5_origin.mp4', + './assets/sample_motion/girl2/girl2_origin.mp4', + './assets/sample_motion/jntm/jntm_origin.mp4', + './assets/sample_motion/mimo1/mimo1_origin.mp4', + './assets/sample_motion/mimo2/mimo2_origin.mp4', + './assets/sample_motion/mimo4/mimo4_origin.mp4', + './assets/sample_motion/mimo5/mimo5_origin.mp4', + './assets/sample_motion/mimo6/mimo6_origin.mp4', + './assets/sample_motion/nezha/nezha_origin.mp4', + './assets/sample_motion/taiji/taiji_origin.mp4' + ] + + gr.Examples( + examples=examples, + inputs=[video_input], + examples_per_page=20, + ) + with gr.Column(variant='panel', scale=1): + with gr.Tabs(elem_id="openlrm_processed_image"): + with gr.TabItem('Processed Image'): + with gr.Row(): + processed_image = gr.Image(label="Processed Image", image_mode="RGBA", type="filepath", elem_id="processed_image", height=480, width=270, interactive=False) + + with gr.Column(variant='panel', scale=1): + with gr.Tabs(elem_id="openlrm_render_video"): + with gr.TabItem('Rendered Video'): + with gr.Row(): + output_video = gr.Video(label="Rendered Video", format="mp4", height=480, width=270, autoplay=True) + + # SETTING + with gr.Row(): + with gr.Column(variant='panel', scale=1): + submit = gr.Button('Generate', elem_id="openlrm_generate", variant='primary') + + + working_dir = gr.State() + submit.click( + fn=assert_input_image, + inputs=[input_image], + queue=False, + ).success( + fn=prepare_working_dir, + outputs=[working_dir], + queue=False, + ).success( + fn=core_fn, + inputs=[input_image, video_input, working_dir], # video_params refer to smpl dir + outputs=[processed_image, output_video], + ) + + demo.queue() + demo.launch() + + +def launch_gradio_app(): + + os.environ.update({ + "APP_ENABLED": "1", + "APP_MODEL_NAME": "./exps/releases/video_human_benchmark/human-lrm-500M/step_060000/", + "APP_INFER": "./configs/inference/human-lrm-500M.yaml", + "APP_TYPE": "infer.human_lrm", + "NUMBA_THREADING_LAYER": 'omp', + }) + + from LHM.runners import REGISTRY_RUNNERS + RunnerClass = REGISTRY_RUNNERS[os.getenv("APP_TYPE")] + with RunnerClass() as runner: + demo_lhm(infer_impl=runner.infer) + + +if __name__ == '__main__': + + launch_gradio_app() diff --git a/configs/accelerate-train-1gpu.yaml b/configs/accelerate-train-1gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..67f92fae83ad40f4ad56be98264cb8873ec2ca43 --- /dev/null +++ b/configs/accelerate-train-1gpu.yaml @@ -0,0 +1,16 @@ +compute_environment: LOCAL_MACHINE +debug: false +distributed_type: MULTI_GPU +downcast_bf16: 'no' +gpu_ids: all +machine_rank: 0 +main_training_function: main +mixed_precision: bf16 +num_machines: 1 +num_processes: 1 +rdzv_backend: static +same_network: true +tpu_env: [] +tpu_use_cluster: false +tpu_use_sudo: false +use_cpu: false diff --git a/configs/accelerate-train-deepspeed.yaml b/configs/accelerate-train-deepspeed.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a36c66f6f575aa1dd33099fd4f49db1087c048e3 --- /dev/null +++ b/configs/accelerate-train-deepspeed.yaml @@ -0,0 +1,23 @@ +compute_environment: LOCAL_MACHINE +debug: false +deepspeed_config: + gradient_accumulation_steps: 1 + gradient_clipping: 1.0 + offload_optimizer_device: none + offload_param_device: none + zero3_init_flag: false + zero_stage: 2 +distributed_type: DEEPSPEED +downcast_bf16: 'no' +enable_cpu_affinity: false +machine_rank: 0 +main_training_function: main +mixed_precision: bf16 +num_machines: 1 +num_processes: 8 +rdzv_backend: static +same_network: true +tpu_env: [] +tpu_use_cluster: false +tpu_use_sudo: false +use_cpu: false diff --git a/configs/accelerate-train.yaml b/configs/accelerate-train.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4f0557131aa2c1bded4cb4cfdc1cc58a3b25765b --- /dev/null +++ b/configs/accelerate-train.yaml @@ -0,0 +1,16 @@ +compute_environment: LOCAL_MACHINE +debug: false +distributed_type: MULTI_GPU +downcast_bf16: 'no' +gpu_ids: all +machine_rank: 0 +main_training_function: main +mixed_precision: bf16 +num_machines: 1 +num_processes: 8 +rdzv_backend: static +same_network: true +tpu_env: [] +tpu_use_cluster: false +tpu_use_sudo: false +use_cpu: false diff --git a/configs/infer-gradio.yaml b/configs/infer-gradio.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0a16a4306ce5e19d52245035f88fc8e213656d2e --- /dev/null +++ b/configs/infer-gradio.yaml @@ -0,0 +1,7 @@ +source_size: 336 +render_size: 288 +render_views: 100 +render_fps: 25 +frame_size: 2 +mesh_size: 384 +mesh_thres: 3.0 diff --git a/configs/inference/human-lrm-1B.yaml b/configs/inference/human-lrm-1B.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7b24c3a377547a29bcaafdf3b36b64a01e3a5c02 --- /dev/null +++ b/configs/inference/human-lrm-1B.yaml @@ -0,0 +1,168 @@ +# LHM-1B +experiment: + type: lrm + seed: 42 + parent: video_human_benchmark + child: human-lrm-1B +model: + # image encoder + model_name: SapDinoLRMBHSD3_5 + encoder_type: dinov2_fusion + encoder_model_name: "dinov2_vitl14_reg" + encoder_feat_dim: 1024 # dinov2 embeding size 1024 + encoder_freeze: False + + fine_encoder_type: sapiens + fine_encoder_model_name: "./pretrained_models/sapiens/pretrained/checkpoints/sapiens_1b/sapiens_1b_epoch_173_torchscript.pt2" # sapiens pretrained model path + fine_encoder_feat_dim: 1536 # sapiens embeding size 1024 + fine_encoder_freeze: True + + + use_face_id: True + + # points embeddings + # num_pcl: 10240 + latent_query_points_type: "e2e_smplx_sub1" + pcl_dim: 1024 + + facesr: True + + + # transformer + # # camera_embed_dim: 1024 + # transformer_dim: 512 + # transformer_layers: 12 + # transformer_heads: 8 + + transformer_type: "sd3_mm_bh_cond" # multi-modal attention. + transformer_heads: 16 # 30 + transformer_dim: 1024 # 30 * 64=1920 + transformer_layers: 15 # 30 + tf_grad_ckpt: true + encoder_grad_ckpt: true + + # for gs renderer + human_model_path: "./pretrained_models/human_model_files" + smplx_subdivide_num: 1 + smplx_type: "smplx_2" + gs_query_dim: 1024 + gs_use_rgb: True + gs_sh: 3 + dense_sample_pts: 40000 # 4,000 + gs_mlp_network_config: + n_neurons: 512 + n_hidden_layers: 2 + activation: silu + # gs_xyz_offset_max_step: 0.05625 # 1.8 / 32 + # gs_clip_scaling: 0.2 # avoid too large Sphere + gs_xyz_offset_max_step: 1. # 1.8 / 32 + gs_clip_scaling: [100, 0.01, 0.05, 3000] # [start, start_v, end_v, end] + expr_param_dim: 100 + shape_param_dim: 10 + + fix_opacity: False + fix_rotation: False + cano_pose_type: 1 # 0 means exavatar-pose 1 indicates REC-MV pose + +dataset: + subsets: + - name: video_human_flame + root_dirs: "./train_data/ClothVideo" + meta_path: + train: "./train_data/ClothVideo/label/valid_id_with_img_list_clean_30W.json" + val: "./train_data/ClothVideo/label/valid_id_with_img_list_val.json" + sample_rate: 1.0 + use_flame: True + src_head_size: 112 + - name: video_human_flame_v2 + root_dirs: "./train_data/ClothVideo" + meta_path: + train: "./train_data/ClothVideo/label/valid_synthetic_data_train.json" + val: "./train_data/ClothVideo/label/valid_synthetic_data_val.json" + sample_rate: 1.0 + use_flame: True + src_head_size: 112 + sample_side_views: 5 + source_image_res: 1024 + src_head_size: 112 + render_image: + low: 512 + high: 512 + region: null + num_train_workers: 4 + multiply: 16 # dino features + num_val_workers: 2 + pin_mem: true + repeat_num: 1 + +train: + mixed_precision: bf16 # REPLACE THIS BASED ON GPU TYPE + find_unused_parameters: false + loss_func: + pixel_loss: l1 # L1 or MSE + ball_loss: + type: heuristic # heuristic ball_loss + group: + head: 1. + lower_body: 100. + upper_body: 1000. + hands: 10000. + offset_loss: + type: classical # heuristic ball_loss + group: + head: 1. + lower_body: 1. + upper_body: 100. + hands: 1000. + loss: + pixel_weight: 0.0 + masked_pixel_weight: 1.0 + masked_head_weight: 0.0 + perceptual_weight: 1.0 + # tv_weight: 5e-4 + tv_weight: -1 + mask_weight: 1.0 + face_id_weight: 0.05 + asap_weight: 10.0 # ball loss + acap_weight: 1000.0 # offset loss + optim: + lr: 4e-5 + weight_decay: 0.05 + beta1: 0.9 + beta2: 0.95 + clip_grad_norm: 0.1 # diffusion model + scheduler: + type: cosine + warmup_real_iters: 0 + batch_size: 2 # REPLACE THIS (PER GPU) + accum_steps: 1 # REPLACE THIS + epochs: 60 # REPLACE THIS + debug_global_steps: null + +val: + batch_size: 2 + global_step_period: 1000 + debug_batches: 10 + +saver: + auto_resume: True + checkpoint_root: None + checkpoint_global_steps: 1000 + checkpoint_keep_level: 60 + +logger: + stream_level: WARNING + log_level: INFO + log_root: ./exps/logs + tracker_root: ./exps/trackers + enable_profiler: false + trackers: + - tensorboard + image_monitor: + train_global_steps: 100 + samples_per_log: 4 + +compile: + suppress_errors: true + print_specializations: true + disable: true \ No newline at end of file diff --git a/configs/inference/human-lrm-500M.yaml b/configs/inference/human-lrm-500M.yaml new file mode 100644 index 0000000000000000000000000000000000000000..636b0365850bc0f626e119d66caecc2a9ef6b43c --- /dev/null +++ b/configs/inference/human-lrm-500M.yaml @@ -0,0 +1,160 @@ +# LHM-500M +experiment: + type: lrm + seed: 42 + parent: video_human_benchmark + child: human-lrm-500M +model: + # image encoder + model_name: SapDinoLRMBHSD3_5 + encoder_type: dinov2_fusion + encoder_model_name: "dinov2_vitl14_reg" + encoder_feat_dim: 1024 # dinov2 embeding size 1024 + encoder_freeze: False + + fine_encoder_type: sapiens + fine_encoder_model_name: "./pretrained_models/sapiens/pretrained/checkpoints/sapiens_1b/sapiens_1b_epoch_173_torchscript.pt2" # sapiens pretrained model path + fine_encoder_feat_dim: 1536 # sapiens embeding size 1024 + fine_encoder_freeze: True + + use_face_id: True + + # points embeddings + # num_pcl: 10240 + latent_query_points_type: "e2e_smplx_sub1" + pcl_dim: 1024 + facesr: True + + transformer_type: "sd3_mm_bh_cond" # multi-modal BH attention. + transformer_heads: 16 # 30 + transformer_dim: 1024 # 30 * 64=1920 + transformer_layers: 5 # 30 + tf_grad_ckpt: true + encoder_grad_ckpt: true + + # for gs renderer + human_model_path: "./pretrained_models/human_model_files" + smplx_subdivide_num: 1 + smplx_type: "smplx_2" + gs_query_dim: 1024 + gs_use_rgb: True + gs_sh: 3 + dense_sample_pts: 40000 # 4,000 + gs_mlp_network_config: + n_neurons: 512 + n_hidden_layers: 2 + activation: silu + # gs_xyz_offset_max_step: 0.05625 # 1.8 / 32 + # gs_clip_scaling: 0.2 # avoid too large Sphere + gs_xyz_offset_max_step: 1. # 1.8 / 32 + gs_clip_scaling: [100, 0.01, 0.05, 3000] # [start, start_v, end_v, end] + expr_param_dim: 100 + shape_param_dim: 10 + + fix_opacity: False + fix_rotation: False + cano_pose_type: 1 # 0 means exavatar-pose 1 indicates REC-MV pose + +dataset: + subsets: + - name: video_human_flame + root_dirs: "./train_data/ClothVideo" + meta_path: + train: "./train_data/ClothVideo/label/valid_id_with_img_list_clean_30W.json" + val: "./train_data/ClothVideo/label/valid_id_with_img_list_val.json" + sample_rate: 1.0 + use_flame: True + src_head_size: 112 + - name: video_human_flame_v2 + root_dirs: "./train_data/ClothVideo" + meta_path: + train: "./train_data/ClothVideo/label/valid_synthetic_data_train.json" + val: "./train_data/ClothVideo/label/valid_synthetic_data_val.json" + sample_rate: 1.0 + use_flame: True + src_head_size: 112 + sample_side_views: 5 + source_image_res: 1024 + src_head_size: 112 + render_image: + low: 512 + high: 512 + region: null + num_train_workers: 4 + multiply: 16 # dino features + num_val_workers: 2 + pin_mem: true + repeat_num: 1 + +train: + mixed_precision: bf16 # REPLACE THIS BASED ON GPU TYPE + find_unused_parameters: false + loss_func: + pixel_loss: l1 # L1 or MSE + ball_loss: + type: heuristic # heuristic ball_loss + group: + head: 1. + lower_body: 100. + upper_body: 1000. + hands: 10000. + offset_loss: + type: classical # heuristic ball_loss + group: + head: 1. + lower_body: 1. + upper_body: 100. + hands: 1000. + loss: + pixel_weight: 0.0 + masked_pixel_weight: 1.0 + masked_head_weight: 0.0 + perceptual_weight: 1.0 + # tv_weight: 5e-4 + tv_weight: -1 + mask_weight: 1.0 + face_id_weight: 0.05 + asap_weight: 10.0 # ball loss + acap_weight: 1000.0 # offset loss + optim: + lr: 4e-5 + weight_decay: 0.05 + beta1: 0.9 + beta2: 0.95 + clip_grad_norm: 0.1 # diffusion model + scheduler: + type: cosine + warmup_real_iters: 0 + batch_size: 4 # REPLACE THIS (PER GPU) + accum_steps: 1 # REPLACE THIS + epochs: 60 # REPLACE THIS + debug_global_steps: null + +val: + batch_size: 2 + global_step_period: 1000 + debug_batches: 10 + +saver: + auto_resume: True + load_model: None + checkpoint_root: ./exps/checkpoints + checkpoint_global_steps: 1000 + checkpoint_keep_level: 60 + +logger: + stream_level: WARNING + log_level: INFO + log_root: ./exps/logs + tracker_root: ./exps/trackers + enable_profiler: false + trackers: + - tensorboard + image_monitor: + train_global_steps: 100 + samples_per_log: 4 + +compile: + suppress_errors: true + print_specializations: true + disable: true diff --git a/engine/BiRefNet/LICENSE b/engine/BiRefNet/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..485921e8271834871ff2baafb892b79ef6a75e75 --- /dev/null +++ b/engine/BiRefNet/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 ZhengPeng + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/engine/BiRefNet/README.md b/engine/BiRefNet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b674546cd64d1f3f39947c423fc50076b798527a --- /dev/null +++ b/engine/BiRefNet/README.md @@ -0,0 +1,364 @@ +

Bilateral Reference for High-Resolution Dichotomous Image Segmentation

+ +
+ Peng Zheng 1,4,5,6,  + Dehong Gao 2,  + Deng-Ping Fan 1*,  + Li Liu 3,  + Jorma Laaksonen 4,  + Wanli Ouyang 5,  + Nicu Sebe 6 +
+ +
+ 1 Nankai University  2 Northwestern Polytechnical University  3 National University of Defense Technology  +
+ 4 Aalto University  5 Shanghai AI Laboratory  6 University of Trento  +
+ +
+   +   +   +   +   +   +   +   +
+ +
+   +   +   +
+ + +| *DIS-Sample_1* | *DIS-Sample_2* | +| :------------------------------: | :-------------------------------: | +| | | + +This repo is the official implementation of "[**Bilateral Reference for High-Resolution Dichotomous Image Segmentation**](https://arxiv.org/pdf/2401.03407)" (___CAAI AIR 2024___). + +> [!note] +> **We need more GPU resources** to push forward the performance of BiRefNet, especially on *matting* tasks, higher-resolution inference (*2K*), and more *efficient* model design. If you are happy to cooperate, please contact me at zhengpeng0108@gmail.com. + +## News :newspaper: +* **`Oct 26, 2024`:** We added the [guideline of conducting fine-tuning on custom data](https://github.com/ZhengPeng7/BiRefNet?tab=readme-ov-file#pen-fine-tuning-on-custom-data) with existing weights. +* **`Oct 6, 2024`:** We uploaded the [BiRefNet-matting](https://huggingface.co/ZhengPeng7/BiRefNet-matting) model for general trimap-free matting use. +* **`Sep 24, 2024`:** We uploaded the [BiRefNet_lite-2K](https://huggingface.co/ZhengPeng7/BiRefNet_lite-2K) model, which takes inputs in a much higher resolution (2560x1440). We also added the [notebook](https://github.com/ZhengPeng7/BiRefNet/blob/main/tutorials/BiRefNet_inference_video.ipynb) for inference on videos. +* **`Sep 7, 2024`:** Thanks to [Freepik](https://www.freepik.com) for supporting me with GPUs for more extensive experiments, especially on BiRefNet for 2K inference! +* **`Aug 30, 2024`:** We uploaded notebooks in `tutorials` to run the inference and ONNX conversion locally. +* **`Aug 23, 2024`:** Our BiRefNet is now officially released [online](https://www.sciopen.com/article/10.26599/AIR.2024.9150038) on CAAI AIR journal. And thanks to the [press release](https://www.eurekalert.org/news-releases/1055380). +* **`Aug 19, 2024`:** We uploaded the ONNX model files of all weights in the [GitHub release](https://github.com/ZhengPeng7/BiRefNet/releases/tag/v1) and [GDrive folder](https://drive.google.com/drive/u/0/folders/1kZM55bwsRdS__bdnsXpkmH6QPyza-9-N). Check out the **ONNX conversion** part in [model zoo](https://github.com/ZhengPeng7/BiRefNet?tab=readme-ov-file#model-zoo) for more details. +* **`Jul 30, 2024`:** Thanks to @not-lain for his kind efforts in adding BiRefNet to the official huggingface.js [repo](https://github.com/huggingface/huggingface.js/blob/3a8651fbc6508920475564a692bf0e5b601d9343/packages/tasks/src/model-libraries-snippets.ts#L763). +* **`Jul 28, 2024`:** We released the [Colab demo for box-guided segmentation](https://colab.research.google.com/drive/1B6aKZ3ekcvKMkSBn0N5mCASLUYMp0whK). +* **`Jul 15, 2024`:** We deployed our BiRefNet on [Hugging Face Models](https://huggingface.co/ZhengPeng7/BiRefNet) for users to easily load it in one line code. +* **`Jun 21, 2024`:** We released and uploaded the Chinese version of our original paper to my [GDrive](https://drive.google.com/file/d/1aBnJ_R9lbnC2dm8dqD0-pzP2Cu-U1Xpt/view). +* **`May 28, 2024`:** We hold a [model zoo](https://github.com/ZhengPeng7/BiRefNet?tab=readme-ov-file#model-zoo) with well-trained weights of our BiRefNet in different sizes and for different tasks, including general use, matting segmentation, DIS, HRSOD, COD, etc. +* **`May 7, 2024`:** We also released the [Colab demo for multiple images inference](https://colab.research.google.com/drive/14Dqg7oeBkFEtchaHLNpig2BcdkZEogba). Many thanks to @rishabh063 for his support on it. +* **`Apr 9, 2024`:** Thanks to [Features and Labels Inc.](https://fal.ai/) for deploying a cool online BiRefNet [inference API](https://fal.ai/models/fal-ai/birefnet/playground) and providing me with strong GPU resources for 4 months on more extensive experiments! +* **`Mar 7, 2024`:** We released BiRefNet codes, the well-trained weights for all tasks in the original papers, and all related stuff in my [GDrive folder](https://drive.google.com/drive/folders/1s2Xe0cjq-2ctnJBR24563yMSCOu4CcxM). Meanwhile, we also deployed our BiRefNet on [Hugging Face Spaces](https://huggingface.co/spaces/ZhengPeng7/BiRefNet_demo) for easier online use and released the [Colab demo for inference and evaluation](https://colab.research.google.com/drive/1MaEiBfJ4xIaZZn0DqKrhydHB8X97hNXl). +* **`Jan 7, 2024`:** We released our paper on [arXiv](https://arxiv.org/pdf/2401.03407). + + +## :rocket: Load BiRefNet in _ONE LINE_ by HuggingFace, check more: [![BiRefNet](https://img.shields.io/badge/%F0%9F%A4%97%20Hugging%20Face-Models-blue)](https://huggingface.co/ZhengPeng7/birefnet) +```python +from transformers import AutoModelForImageSegmentation +birefnet = AutoModelForImageSegmentation.from_pretrained('zhengpeng7/BiRefNet', trust_remote_code=True) +``` +## :flight_arrival: Inference Partner: +We are really happy to collaborate with [FAL](https://fal.ai) to deploy the **inference API** of BiRefNet. You can access this service via the link below: ++ https://fal.ai/models/fal-ai/birefnet + +Our BiRefNet has achieved SOTA on many similar HR tasks: + +**DIS**: [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/bilateral-reference-for-high-resolution/dichotomous-image-segmentation-on-dis-te1)](https://paperswithcode.com/sota/dichotomous-image-segmentation-on-dis-te1?p=bilateral-reference-for-high-resolution) [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/bilateral-reference-for-high-resolution/dichotomous-image-segmentation-on-dis-te2)](https://paperswithcode.com/sota/dichotomous-image-segmentation-on-dis-te2?p=bilateral-reference-for-high-resolution) [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/bilateral-reference-for-high-resolution/dichotomous-image-segmentation-on-dis-te3)](https://paperswithcode.com/sota/dichotomous-image-segmentation-on-dis-te3?p=bilateral-reference-for-high-resolution) [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/bilateral-reference-for-high-resolution/dichotomous-image-segmentation-on-dis-te4)](https://paperswithcode.com/sota/dichotomous-image-segmentation-on-dis-te4?p=bilateral-reference-for-high-resolution) [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/bilateral-reference-for-high-resolution/dichotomous-image-segmentation-on-dis-vd)](https://paperswithcode.com/sota/dichotomous-image-segmentation-on-dis-vd?p=bilateral-reference-for-high-resolution) + +
Figure of Comparison on DIS Papers with Codes (by the time of this work): + + + + + +
+
+ +**COD**:[![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/bilateral-reference-for-high-resolution/camouflaged-object-segmentation-on-cod)](https://paperswithcode.com/sota/camouflaged-object-segmentation-on-cod?p=bilateral-reference-for-high-resolution) [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/bilateral-reference-for-high-resolution/camouflaged-object-segmentation-on-nc4k)](https://paperswithcode.com/sota/camouflaged-object-segmentation-on-nc4k?p=bilateral-reference-for-high-resolution) [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/bilateral-reference-for-high-resolution/camouflaged-object-segmentation-on-camo)](https://paperswithcode.com/sota/camouflaged-object-segmentation-on-camo?p=bilateral-reference-for-high-resolution) [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/bilateral-reference-for-high-resolution/camouflaged-object-segmentation-on-chameleon)](https://paperswithcode.com/sota/camouflaged-object-segmentation-on-chameleon?p=bilateral-reference-for-high-resolution) + +
Figure of Comparison on COD Papers with Codes (by the time of this work): + + + +
+
+ +**HRSOD**: [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/bilateral-reference-for-high-resolution/rgb-salient-object-detection-on-davis-s)](https://paperswithcode.com/sota/rgb-salient-object-detection-on-davis-s?p=bilateral-reference-for-high-resolution) [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/bilateral-reference-for-high-resolution/rgb-salient-object-detection-on-hrsod)](https://paperswithcode.com/sota/rgb-salient-object-detection-on-hrsod?p=bilateral-reference-for-high-resolution) [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/bilateral-reference-for-high-resolution/rgb-salient-object-detection-on-uhrsd)](https://paperswithcode.com/sota/rgb-salient-object-detection-on-uhrsd?p=bilateral-reference-for-high-resolution) [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/bilateral-reference-for-high-resolution/salient-object-detection-on-duts-te)](https://paperswithcode.com/sota/salient-object-detection-on-duts-te?p=bilateral-reference-for-high-resolution) [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/bilateral-reference-for-high-resolution/salient-object-detection-on-dut-omron)](https://paperswithcode.com/sota/salient-object-detection-on-dut-omron?p=bilateral-reference-for-high-resolution) + +
Figure of Comparison on HRSOD Papers with Codes (by the time of this work): + + + + + +
+
+ +#### Try our online demos for inference: + ++ **Inference and evaluation** of your given weights: [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/1MaEiBfJ4xIaZZn0DqKrhydHB8X97hNXl) ++ **Online Inference with GUI** with adjustable resolutions: [![Hugging Face Spaces](https://img.shields.io/badge/%F0%9F%A4%97%20Hugging%20Face-Spaces-blue)](https://huggingface.co/spaces/ZhengPeng7/BiRefNet_demo) ++ Online **Multiple Images Inference** on Colab: [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/14Dqg7oeBkFEtchaHLNpig2BcdkZEogba) + + + + + +## Model Zoo + +> For more general use of our BiRefNet, I extended the original academic one to more general ones for better real-life application. +> +> Datasets and datasets are suggested to be downloaded from official pages. But you can also download the packaged ones: [DIS](https://drive.google.com/drive/folders/1hZW6tAGPJwo9mPS7qGGGdpxuvuXiyoMJ), [HRSOD](https://drive.google.com/drive/folders/18_hAE3QM4cwAzEAKXuSNtKjmgFXTQXZN), [COD](https://drive.google.com/drive/folders/1EyHmKWsXfaCR9O0BiZEc3roZbRcs4ECO), [Backbones](https://drive.google.com/drive/folders/1cmce_emsS8A5ha5XT2c_CZiJzlLM81ms). +> +> Find performances (almost all metrics) of all models in the `exp-TASK_SETTINGS` folders in [[**stuff**](https://drive.google.com/drive/folders/1s2Xe0cjq-2ctnJBR24563yMSCOu4CcxM)]. + + + +
Models in the original paper, for comparison on benchmarks: + +| Task | Training Sets | Backbone | Download | +| :---: | :-------------------------: | :-----------: | :----------------------------------------------------------: | +| DIS | DIS5K-TR | swin_v1_large | [google-drive](https://drive.google.com/file/d/1J90LucvDQaS3R_-9E7QUh1mgJ8eQvccb/view) | +| COD | COD10K-TR, CAMO-TR | swin_v1_large | [google-drive](https://drive.google.com/file/d/1tM5M72k7a8aKF-dYy-QXaqvfEhbFaWkC/view) | +| HRSOD | DUTS-TR | swin_v1_large | [google-drive](https://drive.google.com/file/d/1f7L0Pb1Y3RkOMbqLCW_zO31dik9AiUFa/view) | +| HRSOD | DUTS-TR, HRSOD-TR | swin_v1_large | [google-drive](https://drive.google.com/file/d/1WJooyTkhoDLllaqwbpur_9Hle0XTHEs_/view) | +| HRSOD | DUTS-TR, UHRSD-TR | swin_v1_large | [google-drive](https://drive.google.com/file/d/1Pu1mv3ORobJatIuUoEuZaWDl2ylP3Gw7/view) | +| HRSOD | HRSOD-TR, UHRSD-TR | swin_v1_large | [google-drive](https://drive.google.com/file/d/1xEh7fsgWGaS5c3IffMswasv0_u-aVM9E/view) | +| HRSOD | DUTS-TR, HRSOD-TR, UHRSD-TR | swin_v1_large | [google-drive](https://drive.google.com/file/d/13FaxyyOwyCddfZn2vZo1xG1KNZ3cZ-6B/view) | + +
+ + + +
Models trained with customed data (general, matting), for general use in practical application: + +| Task | Training Sets | Backbone | Test Set | Metric (S, wF[, HCE]) | Download | +| :-----------------------: | :----------------------------------------------------------: | :-----------: | :-------: | :-------------------: | :----------------------------------------------------------: | +| **general use** | DIS5K-TR,DIS-TEs, DUTS-TR_TE,HRSOD-TR_TE,UHRSD-TR_TE, HRS10K-TR_TE, TR-P3M-10k, TE-P3M-500-NP, TE-P3M-500-P, TR-humans | swin_v1_large | DIS-VD | 0.911, 0.875, 1069 | [google-drive](https://drive.google.com/file/d/1_IfUnu8Fpfn-nerB89FzdNXQ7zk6FKxc/view) | +| **general use** | DIS5K-TR,DIS-TEs, DUTS-TR_TE,HRSOD-TR_TE,UHRSD-TR_TE, HRS10K-TR_TE, TR-P3M-10k, TE-P3M-500-NP, TE-P3M-500-P, TR-humans | swin_v1_tiny | DIS-VD | 0.882, 0.830, 1175 | [google-drive](https://drive.google.com/file/d/1fzInDWiE2n65tmjaHDSZpqhL0VME6-Yl/view) | +| **general use** | DIS5K-TR, DIS-TEs | swin_v1_large | DIS-VD | 0.907, 0.865, 1059 | [google-drive](https://drive.google.com/file/d/1P6NJzG3Jf1sl7js2q1CPC3yqvBn_O8UJ/view) | +| **general matting** | P3M-10k (except TE-P3M-500-NP), TR-humans, AM-2k, AIM-500, Human-2k (synthesized with BG-20k), Distinctions-646 (synthesized with BG-20k), HIM2K, PPM-100 | swin_v1_large | TE-P3M-500-NP | 0.979, 0.988 | [google-drive](https://drive.google.com/file/d/1Nlcg58d5bvE-Tbbm8su_eMQba10hdcwQ/view) | +| **portrait matting** | [P3M-10k](https://github.com/JizhiziLi/P3M), [humans](https://huggingface.co/datasets/schirrmacher/humans) | swin_v1_large | P3M-500-P | 0.983, 0.989 | [google-drive](https://drive.google.com/file/d/1uUeXjEUoD2XF_6YjD_fsct-TJp7TFiqh) | + +
+ + + +
Segmentation with box guidance: + ++ Given box guidance: [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/1B6aKZ3ekcvKMkSBn0N5mCASLUYMp0whK) + +
+ + + +
Model efficiency: + +> Screenshot from the original paper. All tests are conducted on a single A100 GPU. + + + +
+ + + +
ONNX conversion: + +> We converted from `.pth` weights files to `.onnx` files. +> We referred a lot to the [Kazuhito00/BiRefNet-ONNX-Sample](https://github.com/Kazuhito00/BiRefNet-ONNX-Sample), many thanks to @Kazuhito00. + ++ Check our [Colab demo for ONNX conversion](https://colab.research.google.com/drive/1z6OruR52LOvDDpnp516F-N4EyPGrp5om) or the [notebook file for local running](https://drive.google.com/file/d/1cgL2qyvOO5q3ySfhytypX46swdQwZLrJ), where you can do the conversion/inference by yourself and find all relevant info. ++ As tested, BiRefNets with SwinL (default backbone) cost `~90%` more time (the inference costs `~165ms` on an A100 GPU) using ONNX files. Meanwhile, BiRefNets with SwinT (lightweight) cost `~75%` more time (the inference costs `~93.8ms` on an A100 GPU) using ONNX files. Input resolution is `1024x1024` as default. ++ The results of the original pth files and the converted onnx files are slightly different, which is acceptable. ++ Pay attention to the compatibility among `onnxruntime-gpu, CUDA, and CUDNN` (we use `torch==2.0.1, cuda=11.8` here). + +
+ + + +## Third-Party Creations + +> Concerning edge devices with less computing power, we provide a lightweight version with `swin_v1_tiny` as the backbone, which is x4+ faster and x5+ smaller. The details can be found in [this issue](https://github.com/ZhengPeng7/BiRefNet/issues/11#issuecomment-2041033576) and links there. + +We found there've been some 3rd party applications based on our BiRefNet. Many thanks for their contribution to the community! +Choose the one you like to try with clicks instead of codes: +1. **Applications**: + + + Thanks [**camenduru/text-behind-tost**](https://github.com/camenduru/text-behind-tost): this project employed BiRefNet to extract foreground subjects and **add texts between the subjects and background**, which looks amazing especially for videos. Check their [tweets](https://x.com/camenduru/status/1856290408294220010) for more examples. + +

+ + + Thanks [**briaai/RMBG-2.0**](https://huggingface.co/briaai/RMBG-2.0): this project trained BiRefNet with their **high-quality private data**, which brings improvement on the DIS task. Note that their weights are for only **non-commercial use** and are **not aware of transparency** due to training in the DIS task setting, which focuses only on predicting binary masks. + +

+ + + Thanks [**lldacing/ComfyUI_BiRefNet_ll**](https://github.com/lldacing/ComfyUI_BiRefNet_ll): this project further upgrade the **ComfyUI node** for BiRefNet with both our **latest weights** and **the legacy ones**. + +

+ + + Thanks [**MoonHugo/ComfyUI-BiRefNet-Hugo**](https://github.com/MoonHugo/ComfyUI-BiRefNet-Hugo): this project further upgrade the **ComfyUI node** for BiRefNet with our **latest weights**. + +

+ + + Thanks [**lbq779660843/BiRefNet-Tensorrt**](https://github.com/lbq779660843/BiRefNet-Tensorrt) and [**yuanyang1991/birefnet_tensorrt**](https://github.com/yuanyang1991/birefnet_tensorrt): they both provided the project to convert BiRefNet to **TensorRT**, which is faster and better for deployment. Their repos offer solid local establishment (Win and Linux) and [colab demo](https://colab.research.google.com/drive/1r8GkFPyMMO0OkMX6ih5FjZnUCQrl2SHV?usp=sharing), respectively. And @yuanyang1991 kindly offered the comparison among the inference efficiency of naive PyTorch, ONNX, and TensorRT on an RTX 4080S: + +| Methods | [Pytorch](https://drive.google.com/file/d/1_IfUnu8Fpfn-nerB89FzdNXQ7zk6FKxc/view) | [ONNX](https://drive.google.com/drive/u/0/folders/1kZM55bwsRdS__bdnsXpkmH6QPyza-9-N) | TensorRT | +|:------------------------------------------------------------------------------------:|:--------------:|:--------------:|:--------------:| +|        First Inference Time       | 0.71s | 5.32s | **0.17s** | + +| Methods | [Pytorch](https://drive.google.com/file/d/1_IfUnu8Fpfn-nerB89FzdNXQ7zk6FKxc/view) | [ONNX](https://drive.google.com/drive/u/0/folders/1kZM55bwsRdS__bdnsXpkmH6QPyza-9-N) | TensorRT | +|:------------------------------------------------------------------------------------:|:--------------:|:--------------:|:--------------:| +| Avg Inf Time (excluding 1st) | 0.15s | 4.43s | **0.11s** | + + + Thanks [**dimitribarbot/sd-webui-birefnet**](https://github.com/dimitribarbot/sd-webui-birefnet): this project allows to add a BiRefNet section to the original **Stable Diffusion WebUI**'s Extras tab. +

+ + + Thanks [**fal.ai/birefnet**](https://fal.ai/models/birefnet): this project on `fal.ai` encapsulates BiRefNet **online** with more useful options in **UI** and **API** to call the model. +

+ + + Thanks [**ZHO-ZHO-ZHO/ComfyUI-BiRefNet-ZHO**](https://github.com/ZHO-ZHO-ZHO/ComfyUI-BiRefNet-ZHO): this project further improves the **UI** for BiRefNet in ComfyUI, especially for **video data**. +

+ + + + + Thanks [**viperyl/ComfyUI-BiRefNet**](https://github.com/viperyl/ComfyUI-BiRefNet): this project packs BiRefNet as **ComfyUI nodes**, and makes this SOTA model easier use for everyone. +

+ + + Thanks [**Rishabh**](https://github.com/rishabh063) for offering a demo for the [easier multiple images inference on colab](https://colab.research.google.com/drive/14Dqg7oeBkFEtchaHLNpig2BcdkZEogba). + +2. **More Visual Comparisons** + + Thanks [**twitter.com/ZHOZHO672070**](https://twitter.com/ZHOZHO672070) for the comparison with more background-removal methods in images: + + + + + Thanks [**twitter.com/toyxyz3**](https://twitter.com/toyxyz3) for the comparison with more background-removal methods in videos: + + + + + + +## Usage + +#### Environment Setup + +```shell +# PyTorch==2.0.1 is used for faster training with compilation. +conda create -n birefnet python=3.9 -y && conda activate birefnet +pip install -r requirements.txt +``` + +#### Dataset Preparation + +Download combined training / test sets I have organized well from: [DIS](https://drive.google.com/drive/folders/1hZW6tAGPJwo9mPS7qGGGdpxuvuXiyoMJ)--[COD](https://drive.google.com/drive/folders/1EyHmKWsXfaCR9O0BiZEc3roZbRcs4ECO)--[HRSOD](https://drive.google.com/drive/folders/18_hAE3QM4cwAzEAKXuSNtKjmgFXTQXZN) or the single official ones in the `single_ones` folder, or their official pages. You can also find the same ones on my **BaiduDisk**: [DIS](https://pan.baidu.com/s/1O_pQIGAE4DKqL93xOxHpxw?pwd=PSWD)--[COD](https://pan.baidu.com/s/1RnxAzaHSTGBC1N6r_RfeqQ?pwd=PSWD)--[HRSOD](https://pan.baidu.com/s/1_Del53_0lBuG0DKJJAk4UA?pwd=PSWD). + +#### Weights Preparation + +Download backbone weights from [my google-drive folder](https://drive.google.com/drive/folders/1s2Xe0cjq-2ctnJBR24563yMSCOu4CcxM) or their official pages. + +## Run + +```shell +# Train & Test & Evaluation +./train_test.sh RUN_NAME GPU_NUMBERS_FOR_TRAINING GPU_NUMBERS_FOR_TEST +# Example: ./train_test.sh tmp-proj 0,1,2,3,4,5,6,7 0 + +# See train.sh / test.sh for only training / test-evaluation. +# After the evaluation, run `gen_best_ep.py` to select the best ckpt from a specific metric (you choose it from Sm, wFm, HCE (DIS only)). +``` + +### :pen: Fine-tuning on Custom Data + +
Guideline: + + +> Suppose you have some custom data, fine-tuning on it tends to bring improvement. + +1. **Pre-requisites**: you have put your datasets in the path `${data_root_dir}/TASK_NAME/DATASET_NAME`. For example, `${data_root_dir}/DIS5K/DIS-TR` and `${data_root_dir}/General/TR-HRSOD`, where `im` and `gt` are both in each dataset folder. +2. **Change an existing task to your custom one**: replace all `'General'` (with single quotes) in the whole project with `your custom task name` as the screenshot of vscode given below shows: +3. **Adapt settings**: + + `sys_home_dir`: path to the root folder, which contains codes / datasets / weights / ... -- project folder / data folder / backbone weights folder are `${sys_home_dir}/codes/dis/BiRefNet / ${sys_home_dir}/datasets/dis/General / ${sys_home_dir}/weights/cv/swin_xxx`, respectively. + + `testsets`: your validation set. + + `training_set`: your training set. + + `lambdas_pix_last`: adapt the weights of different losses if you want, especially for the difference between segmentation (classification task) and matting (regression task). +4. **Use existing weights**: if you want to use some existing weights to fine-tune that model, please refer to the `resume` argument in `train.py`. Attention: the epoch of training continues from the epochs the weights file name indicates (e.g., `244` in `BiRefNet-general-epoch_244.pth`), instead of `1`. So, if you want to fine-tune `50` more epochs, please specify the epochs as `294`. `\#Epochs, \#last epochs for validation, and validation step` are set in `train.sh`. +5. Good luck to your training :) If you still have questions, feel free to leave issues (recommended way) or contact me. + +
+ + + +## Well-trained weights: + +Download the `BiRefNet-{TASK}-{EPOCH}.pth` from [[**stuff**](https://drive.google.com/drive/folders/1s2Xe0cjq-2ctnJBR24563yMSCOu4CcxM)] and [the release page](https://github.com/ZhengPeng7/BiRefNet/releases) of this repo. Info of the corresponding (predicted\_maps/performance/training\_log) weights can be also found in folders like `exp-BiRefNet-{TASK_SETTINGS}` in the same directory. + +You can also download the weights from the release of this repo. + +The results might be a bit different from those in the original paper, you can see them in the `eval_results-BiRefNet-{TASK_SETTINGS}` folder in each `exp-xx`, we will update them in the following days. Due to the very high cost I used (A100-80G x 8), which many people cannot afford (including myself....), I re-trained BiRefNet on a single A100-40G only and achieved the performance on the same level (even better). It means you can directly train the model on a single GPU with 36.5G+ memory. BTW, 5.5G GPU memory is needed for inference in 1024x1024. (I personally paid a lot for renting an A100-40G to re-train BiRefNet on the three tasks... T_T. Hope it can help you.) + +But if you have more and more powerful GPUs, you can set GPU IDs and increase the batch size in `config.py` to accelerate the training. We have made all these kinds of things adaptive in scripts to seamlessly switch between single-card training and multi-card training. Enjoy it :) + +## Some of my messages: + +This project was originally built for DIS only. But after the updates one by one, I made it larger and larger with many functions embedded together. Finally, you can **use it for any binary image segmentation tasks**, such as DIS/COD/SOD, medical image segmentation, anomaly segmentation, etc. You can eaily open/close below things (usually in `config.py`): ++ Multi-GPU training: open/close with one variable. ++ Backbone choices: Swin_v1, PVT_v2, ConvNets, ... ++ Weighted losses: BCE, IoU, SSIM, MAE, Reg, ... ++ Training tricks: multi-scale supervision, freezing backbone, multi-scale input... ++ Data collator: loading all in memory, smooth combination of different datasets for combined training and test. ++ ... +I really hope you enjoy this project and use it in more works to achieve new SOTAs. + + +### Quantitative Results + +

+ +

+ + + +### Qualitative Results + +

+ +

+ + +## Acknowledgement: + +Many of my thanks to the companies / institutes below. ++ [FAL](https://fal.ai). ++ [Freepik](https://www.freepik.com). ++ [Redmond.ai](https://redmond.ai). + + +### Citation + +``` +@article{zheng2024birefnet, + title={Bilateral Reference for High-Resolution Dichotomous Image Segmentation}, + author={Zheng, Peng and Gao, Dehong and Fan, Deng-Ping and Liu, Li and Laaksonen, Jorma and Ouyang, Wanli and Sebe, Nicu}, + journal={CAAI Artificial Intelligence Research}, + volume = {3}, + pages = {9150038}, + year={2024} +} +``` + + + +## Contact + +Any questions, discussions, or even complaints, feel free to leave issues here (recommended) or send me e-mails (zhengpeng0108@gmail.com) or book a meeting with me: [calendly.com/zhengpeng0108/30min](https://calendly.com/zhengpeng0108/30min). You can also join the Discord Group (https://discord.gg/d9NN5sgFrq) or QQ Group (https://qm.qq.com/q/y6WPy7WOIK) if you want to talk a lot publicly. + diff --git a/engine/BiRefNet/__pycache__/config.cpython-310.pyc b/engine/BiRefNet/__pycache__/config.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..673c71f56441c12553e6e45f6c54e056c01b0238 Binary files /dev/null and b/engine/BiRefNet/__pycache__/config.cpython-310.pyc differ diff --git a/engine/BiRefNet/__pycache__/dataset.cpython-310.pyc b/engine/BiRefNet/__pycache__/dataset.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b987cac6cd8e81440e85d8eac0c8788b039f09ad Binary files /dev/null and b/engine/BiRefNet/__pycache__/dataset.cpython-310.pyc differ diff --git a/engine/BiRefNet/__pycache__/image_proc.cpython-310.pyc b/engine/BiRefNet/__pycache__/image_proc.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b5b439afcd46d947847e6bace9553dd1249de579 Binary files /dev/null and b/engine/BiRefNet/__pycache__/image_proc.cpython-310.pyc differ diff --git a/engine/BiRefNet/__pycache__/utils.cpython-310.pyc b/engine/BiRefNet/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ce5e86b6d2c9fef7a7de299ce0e30480528227ca Binary files /dev/null and b/engine/BiRefNet/__pycache__/utils.cpython-310.pyc differ diff --git a/engine/BiRefNet/config.py b/engine/BiRefNet/config.py new file mode 100644 index 0000000000000000000000000000000000000000..ec2a40aecdf5c3af056a79edcd6f5ecb1b4ec1ff --- /dev/null +++ b/engine/BiRefNet/config.py @@ -0,0 +1,201 @@ +import os +import math + + +class Config(): + def __init__(self) -> None: + # PATH settings + # Make up your file system as: SYS_HOME_DIR/codes/dis/BiRefNet, SYS_HOME_DIR/datasets/dis/xx, SYS_HOME_DIR/weights/xx + self.sys_home_dir = [os.path.expanduser('~'), '/mnt/data'][0] # Default, custom + self.data_root_dir = os.path.join(self.sys_home_dir, 'datasets/dis') + + # TASK settings + self.task = ['DIS5K', 'COD', 'HRSOD', 'General', 'General-2K', 'Matting'][0] + self.testsets = { + # Benchmarks + 'DIS5K': ','.join(['DIS-VD', 'DIS-TE1', 'DIS-TE2', 'DIS-TE3', 'DIS-TE4'][:1]), + 'COD': ','.join(['CHAMELEON', 'NC4K', 'TE-CAMO', 'TE-COD10K']), + 'HRSOD': ','.join(['DAVIS-S', 'TE-HRSOD', 'TE-UHRSD', 'DUT-OMRON', 'TE-DUTS']), + # Practical use + 'General': ','.join(['DIS-VD', 'TE-P3M-500-NP']), + 'General-2K': ','.join(['DIS-VD', 'TE-P3M-500-NP']), + 'Matting': ','.join(['TE-P3M-500-NP', 'TE-AM-2k']), + }[self.task] + datasets_all = '+'.join([ds for ds in (os.listdir(os.path.join(self.data_root_dir, self.task)) if os.path.isdir(os.path.join(self.data_root_dir, self.task)) else []) if ds not in self.testsets.split(',')]) + self.training_set = { + 'DIS5K': ['DIS-TR', 'DIS-TR+DIS-TE1+DIS-TE2+DIS-TE3+DIS-TE4'][0], + 'COD': 'TR-COD10K+TR-CAMO', + 'HRSOD': ['TR-DUTS', 'TR-HRSOD', 'TR-UHRSD', 'TR-DUTS+TR-HRSOD', 'TR-DUTS+TR-UHRSD', 'TR-HRSOD+TR-UHRSD', 'TR-DUTS+TR-HRSOD+TR-UHRSD'][5], + 'General': datasets_all, + 'General-2K': datasets_all, + 'Matting': datasets_all, + }[self.task] + self.prompt4loc = ['dense', 'sparse'][0] + + # Faster-Training settings + self.load_all = False # Turn it on/off by your case. It may consume a lot of CPU memory. And for multi-GPU (N), it would cost N times the CPU memory to load the data. + self.compile = True # 1. Trigger CPU memory leak in some extend, which is an inherent problem of PyTorch. + # Machines with > 70GB CPU memory can run the whole training on DIS5K with default setting. + # 2. Higher PyTorch version may fix it: https://github.com/pytorch/pytorch/issues/119607. + # 3. But compile in Pytorch > 2.0.1 seems to bring no acceleration for training. + self.precisionHigh = True + + # MODEL settings + self.ms_supervision = True + self.out_ref = self.ms_supervision and True + self.dec_ipt = True + self.dec_ipt_split = True + self.cxt_num = [0, 3][1] # multi-scale skip connections from encoder + self.mul_scl_ipt = ['', 'add', 'cat'][2] + self.dec_att = ['', 'ASPP', 'ASPPDeformable'][2] + self.squeeze_block = ['', 'BasicDecBlk_x1', 'ResBlk_x4', 'ASPP_x3', 'ASPPDeformable_x3'][1] + self.dec_blk = ['BasicDecBlk', 'ResBlk'][0] + + # TRAINING settings + self.batch_size = 4 + self.finetune_last_epochs = [ + 0, + { + 'DIS5K': -40, + 'COD': -20, + 'HRSOD': -20, + 'General': -40, + 'General-2K': -20, + 'Matting': -20, + }[self.task] + ][1] # choose 0 to skip + self.lr = (1e-4 if 'DIS5K' in self.task else 1e-5) * math.sqrt(self.batch_size / 4) # DIS needs high lr to converge faster. Adapt the lr linearly + self.size = (1024, 1024) if self.task not in ['General-2K'] else (2560, 1440) # wid, hei + self.num_workers = max(4, self.batch_size) # will be decrease to min(it, batch_size) at the initialization of the data_loader + + # Backbone settings + self.bb = [ + 'vgg16', 'vgg16bn', 'resnet50', # 0, 1, 2 + 'swin_v1_t', 'swin_v1_s', # 3, 4 + 'swin_v1_b', 'swin_v1_l', # 5-bs9, 6-bs4 + 'pvt_v2_b0', 'pvt_v2_b1', # 7, 8 + 'pvt_v2_b2', 'pvt_v2_b5', # 9-bs10, 10-bs5 + ][6] + self.lateral_channels_in_collection = { + 'vgg16': [512, 256, 128, 64], 'vgg16bn': [512, 256, 128, 64], 'resnet50': [1024, 512, 256, 64], + 'pvt_v2_b2': [512, 320, 128, 64], 'pvt_v2_b5': [512, 320, 128, 64], + 'swin_v1_b': [1024, 512, 256, 128], 'swin_v1_l': [1536, 768, 384, 192], + 'swin_v1_t': [768, 384, 192, 96], 'swin_v1_s': [768, 384, 192, 96], + 'pvt_v2_b0': [256, 160, 64, 32], 'pvt_v2_b1': [512, 320, 128, 64], + }[self.bb] + if self.mul_scl_ipt == 'cat': + self.lateral_channels_in_collection = [channel * 2 for channel in self.lateral_channels_in_collection] + self.cxt = self.lateral_channels_in_collection[1:][::-1][-self.cxt_num:] if self.cxt_num else [] + + # MODEL settings - inactive + self.lat_blk = ['BasicLatBlk'][0] + self.dec_channels_inter = ['fixed', 'adap'][0] + self.refine = ['', 'itself', 'RefUNet', 'Refiner', 'RefinerPVTInChannels4'][0] + self.progressive_ref = self.refine and True + self.ender = self.progressive_ref and False + self.scale = self.progressive_ref and 2 + self.auxiliary_classification = False # Only for DIS5K, where class labels are saved in `dataset.py`. + self.refine_iteration = 1 + self.freeze_bb = False + self.model = [ + 'BiRefNet', + 'BiRefNetC2F', + ][0] + + # TRAINING settings - inactive + self.preproc_methods = ['flip', 'enhance', 'rotate', 'pepper', 'crop'][:4] + self.optimizer = ['Adam', 'AdamW'][1] + self.lr_decay_epochs = [1e5] # Set to negative N to decay the lr in the last N-th epoch. + self.lr_decay_rate = 0.5 + # Loss + if self.task in ['Matting']: + self.lambdas_pix_last = { + 'bce': 30 * 1, + 'iou': 0.5 * 0, + 'iou_patch': 0.5 * 0, + 'mae': 100 * 1, + 'mse': 30 * 0, + 'triplet': 3 * 0, + 'reg': 100 * 0, + 'ssim': 10 * 1, + 'cnt': 5 * 0, + 'structure': 5 * 0, + } + elif self.task in ['General', 'General-2K']: + self.lambdas_pix_last = { + 'bce': 30 * 1, + 'iou': 0.5 * 1, + 'iou_patch': 0.5 * 0, + 'mae': 100 * 1, + 'mse': 30 * 0, + 'triplet': 3 * 0, + 'reg': 100 * 0, + 'ssim': 10 * 1, + 'cnt': 5 * 0, + 'structure': 5 * 0, + } + else: + self.lambdas_pix_last = { + # not 0 means opening this loss + # original rate -- 1 : 30 : 1.5 : 0.2, bce x 30 + 'bce': 30 * 1, # high performance + 'iou': 0.5 * 1, # 0 / 255 + 'iou_patch': 0.5 * 0, # 0 / 255, win_size = (64, 64) + 'mae': 30 * 0, + 'mse': 30 * 0, # can smooth the saliency map + 'triplet': 3 * 0, + 'reg': 100 * 0, + 'ssim': 10 * 1, # help contours, + 'cnt': 5 * 0, # help contours + 'structure': 5 * 0, # structure loss from codes of MVANet. A little improvement on DIS-TE[1,2,3], a bit more decrease on DIS-TE4. + } + self.lambdas_cls = { + 'ce': 5.0 + } + + # PATH settings - inactive + self.weights_root_dir = os.path.join(self.sys_home_dir, 'weights/cv') + self.weights = { + 'pvt_v2_b2': os.path.join(self.weights_root_dir, 'pvt_v2_b2.pth'), + 'pvt_v2_b5': os.path.join(self.weights_root_dir, ['pvt_v2_b5.pth', 'pvt_v2_b5_22k.pth'][0]), + 'swin_v1_b': os.path.join(self.weights_root_dir, ['swin_base_patch4_window12_384_22kto1k.pth', 'swin_base_patch4_window12_384_22k.pth'][0]), + 'swin_v1_l': os.path.join(self.weights_root_dir, ['swin_large_patch4_window12_384_22kto1k.pth', 'swin_large_patch4_window12_384_22k.pth'][0]), + 'swin_v1_t': os.path.join(self.weights_root_dir, ['swin_tiny_patch4_window7_224_22kto1k_finetune.pth'][0]), + 'swin_v1_s': os.path.join(self.weights_root_dir, ['swin_small_patch4_window7_224_22kto1k_finetune.pth'][0]), + 'pvt_v2_b0': os.path.join(self.weights_root_dir, ['pvt_v2_b0.pth'][0]), + 'pvt_v2_b1': os.path.join(self.weights_root_dir, ['pvt_v2_b1.pth'][0]), + } + + # Callbacks - inactive + self.verbose_eval = True + self.only_S_MAE = False + self.SDPA_enabled = False # Bugs. Slower and errors occur in multi-GPUs + + # others + self.device = [0, 'cpu'][0] # .to(0) == .to('cuda:0') + + self.batch_size_valid = 1 + self.rand_seed = 7 + run_sh_file = [f for f in os.listdir('.') if 'train.sh' == f] + [os.path.join('..', f) for f in os.listdir('..') if 'train.sh' == f] + if run_sh_file: + with open(run_sh_file[0], 'r') as f: + lines = f.readlines() + self.save_last = int([l.strip() for l in lines if "'{}')".format(self.task) in l and 'val_last=' in l][0].split('val_last=')[-1].split()[0]) + self.save_step = int([l.strip() for l in lines if "'{}')".format(self.task) in l and 'step=' in l][0].split('step=')[-1].split()[0]) + + +# Return task for choosing settings in shell scripts. +if __name__ == '__main__': + import argparse + + + parser = argparse.ArgumentParser(description='Only choose one argument to activate.') + parser.add_argument('--print_task', action='store_true', help='print task name') + parser.add_argument('--print_testsets', action='store_true', help='print validation set') + args = parser.parse_args() + + config = Config() + for arg_name, arg_value in args._get_kwargs(): + if arg_value: + print(config.__getattribute__(arg_name[len('print_'):])) + diff --git a/engine/BiRefNet/dataset.py b/engine/BiRefNet/dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..0bb06b42991cc51ca4ba4639f283a76ef606559d --- /dev/null +++ b/engine/BiRefNet/dataset.py @@ -0,0 +1,153 @@ +import os + +import cv2 +from PIL import Image +from torch.utils import data +from torchvision import transforms +from tqdm import tqdm + +from .config import Config +from .image_proc import preproc +from .utils import path_to_image + +Image.MAX_IMAGE_PIXELS = None # remove DecompressionBombWarning +config = Config() +_class_labels_TR_sorted = ( + "Airplane, Ant, Antenna, Archery, Axe, BabyCarriage, Bag, BalanceBeam, Balcony, Balloon, Basket, BasketballHoop, Beatle, Bed, Bee, Bench, Bicycle, " + "BicycleFrame, BicycleStand, Boat, Bonsai, BoomLift, Bridge, BunkBed, Butterfly, Button, Cable, CableLift, Cage, Camcorder, Cannon, Canoe, Car, " + "CarParkDropArm, Carriage, Cart, Caterpillar, CeilingLamp, Centipede, Chair, Clip, Clock, Clothes, CoatHanger, Comb, ConcretePumpTruck, Crack, Crane, " + "Cup, DentalChair, Desk, DeskChair, Diagram, DishRack, DoorHandle, Dragonfish, Dragonfly, Drum, Earphone, Easel, ElectricIron, Excavator, Eyeglasses, " + "Fan, Fence, Fencing, FerrisWheel, FireExtinguisher, Fishing, Flag, FloorLamp, Forklift, GasStation, Gate, Gear, Goal, Golf, GymEquipment, Hammock, " + "Handcart, Handcraft, Handrail, HangGlider, Harp, Harvester, Headset, Helicopter, Helmet, Hook, HorizontalBar, Hydrovalve, IroningTable, Jewelry, Key, " + "KidsPlayground, Kitchenware, Kite, Knife, Ladder, LaundryRack, Lightning, Lobster, Locust, Machine, MachineGun, MagazineRack, Mantis, Medal, MemorialArchway, " + "Microphone, Missile, MobileHolder, Monitor, Mosquito, Motorcycle, MovingTrolley, Mower, MusicPlayer, MusicStand, ObservationTower, Octopus, OilWell, " + "OlympicLogo, OperatingTable, OutdoorFitnessEquipment, Parachute, Pavilion, Piano, Pipe, PlowHarrow, PoleVault, Punchbag, Rack, Racket, Rifle, Ring, Robot, " + "RockClimbing, Rope, Sailboat, Satellite, Scaffold, Scale, Scissor, Scooter, Sculpture, Seadragon, Seahorse, Seal, SewingMachine, Ship, Shoe, ShoppingCart, " + "ShoppingTrolley, Shower, Shrimp, Signboard, Skateboarding, Skeleton, Skiing, Spade, SpeedBoat, Spider, Spoon, Stair, Stand, Stationary, SteeringWheel, " + "Stethoscope, Stool, Stove, StreetLamp, SweetStand, Swing, Sword, TV, Table, TableChair, TableLamp, TableTennis, Tank, Tapeline, Teapot, Telescope, Tent, " + "TobaccoPipe, Toy, Tractor, TrafficLight, TrafficSign, Trampoline, TransmissionTower, Tree, Tricycle, TrimmerCover, Tripod, Trombone, Truck, Trumpet, Tuba, " + "UAV, Umbrella, UnevenBars, UtilityPole, VacuumCleaner, Violin, Wakesurfing, Watch, WaterTower, WateringPot, Well, WellLid, Wheel, Wheelchair, WindTurbine, Windmill, WineGlass, WireWhisk, Yacht" +) +class_labels_TR_sorted = _class_labels_TR_sorted.split(", ") + + +class MyData(data.Dataset): + def __init__(self, datasets, image_size, is_train=True): + self.size_train = image_size + self.size_test = image_size + self.keep_size = not config.size + self.data_size = config.size + self.is_train = is_train + self.load_all = config.load_all + self.device = config.device + valid_extensions = [".png", ".jpg", ".PNG", ".JPG", ".JPEG"] + + if self.is_train and config.auxiliary_classification: + self.cls_name2id = { + _name: _id for _id, _name in enumerate(class_labels_TR_sorted) + } + self.transform_image = transforms.Compose( + [ + transforms.Resize(self.data_size[::-1]), + transforms.ToTensor(), + transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), + ][self.load_all or self.keep_size :] + ) + self.transform_label = transforms.Compose( + [ + transforms.Resize(self.data_size[::-1]), + transforms.ToTensor(), + ][self.load_all or self.keep_size :] + ) + dataset_root = os.path.join(config.data_root_dir, config.task) + # datasets can be a list of different datasets for training on combined sets. + self.image_paths = [] + for dataset in datasets.split("+"): + image_root = os.path.join(dataset_root, dataset, "im") + self.image_paths += [ + os.path.join(image_root, p) + for p in os.listdir(image_root) + if any(p.endswith(ext) for ext in valid_extensions) + ] + self.label_paths = [] + for p in self.image_paths: + for ext in valid_extensions: + ## 'im' and 'gt' may need modifying + p_gt = p.replace("/im/", "/gt/")[: -(len(p.split(".")[-1]) + 1)] + ext + file_exists = False + if os.path.exists(p_gt): + self.label_paths.append(p_gt) + file_exists = True + break + if not file_exists: + print("Not exists:", p_gt) + + if len(self.label_paths) != len(self.image_paths): + set_image_paths = set( + [os.path.splitext(p.split(os.sep)[-1])[0] for p in self.image_paths] + ) + set_label_paths = set( + [os.path.splitext(p.split(os.sep)[-1])[0] for p in self.label_paths] + ) + print("Path diff:", set_image_paths - set_label_paths) + raise ValueError( + f"There are different numbers of images ({len(self.label_paths)}) and labels ({len(self.image_paths)})" + ) + + if self.load_all: + self.images_loaded, self.labels_loaded = [], [] + self.class_labels_loaded = [] + # for image_path, label_path in zip(self.image_paths, self.label_paths): + for image_path, label_path in tqdm( + zip(self.image_paths, self.label_paths), total=len(self.image_paths) + ): + _image = path_to_image(image_path, size=config.size, color_type="rgb") + _label = path_to_image(label_path, size=config.size, color_type="gray") + self.images_loaded.append(_image) + self.labels_loaded.append(_label) + self.class_labels_loaded.append( + self.cls_name2id[label_path.split("/")[-1].split("#")[3]] + if self.is_train and config.auxiliary_classification + else -1 + ) + + def __getitem__(self, index): + + if self.load_all: + image = self.images_loaded[index] + label = self.labels_loaded[index] + class_label = ( + self.class_labels_loaded[index] + if self.is_train and config.auxiliary_classification + else -1 + ) + else: + image = path_to_image( + self.image_paths[index], size=config.size, color_type="rgb" + ) + label = path_to_image( + self.label_paths[index], size=config.size, color_type="gray" + ) + class_label = ( + self.cls_name2id[self.label_paths[index].split("/")[-1].split("#")[3]] + if self.is_train and config.auxiliary_classification + else -1 + ) + + # loading image and label + if self.is_train: + image, label = preproc(image, label, preproc_methods=config.preproc_methods) + # else: + # if _label.shape[0] > 2048 or _label.shape[1] > 2048: + # _image = cv2.resize(_image, (2048, 2048), interpolation=cv2.INTER_LINEAR) + # _label = cv2.resize(_label, (2048, 2048), interpolation=cv2.INTER_LINEAR) + + image, label = self.transform_image(image), self.transform_label(label) + + if self.is_train: + return image, label, class_label + else: + return image, label, self.label_paths[index] + + def __len__(self): + return len(self.image_paths) diff --git a/engine/BiRefNet/eval_existingOnes.py b/engine/BiRefNet/eval_existingOnes.py new file mode 100644 index 0000000000000000000000000000000000000000..5231f36c947262bed538c9b6e968c40d7c6eeafb --- /dev/null +++ b/engine/BiRefNet/eval_existingOnes.py @@ -0,0 +1,148 @@ +import os +import argparse +from glob import glob +import prettytable as pt + +from evaluation.metrics import evaluator +from config import Config + + +config = Config() + + +def do_eval(args): + # evaluation for whole dataset + # dataset first in evaluation + for _data_name in args.data_lst.split('+'): + pred_data_dir = sorted(glob(os.path.join(args.pred_root, args.model_lst[0], _data_name))) + if not pred_data_dir: + print('Skip dataset {}.'.format(_data_name)) + continue + gt_src = os.path.join(args.gt_root, _data_name) + gt_paths = sorted(glob(os.path.join(gt_src, 'gt', '*'))) + print('#' * 20, _data_name, '#' * 20) + filename = os.path.join(args.save_dir, '{}_eval.txt'.format(_data_name)) + tb = pt.PrettyTable() + tb.vertical_char = '&' + if config.task == 'DIS5K': + tb.field_names = ["Dataset", "Method", "maxFm", "wFmeasure", 'MAE', "Smeasure", "meanEm", "HCE", "maxEm", "meanFm", "adpEm", "adpFm", 'mBA', 'maxBIoU', 'meanBIoU'] + elif config.task == 'COD': + tb.field_names = ["Dataset", "Method", "Smeasure", "wFmeasure", "meanFm", "meanEm", "maxEm", 'MAE', "maxFm", "adpEm", "adpFm", "HCE", 'mBA', 'maxBIoU', 'meanBIoU'] + elif config.task == 'HRSOD': + tb.field_names = ["Dataset", "Method", "Smeasure", "maxFm", "meanEm", 'MAE', "maxEm", "meanFm", "wFmeasure", "adpEm", "adpFm", "HCE", 'mBA', 'maxBIoU', 'meanBIoU'] + elif config.task == 'General': + tb.field_names = ["Dataset", "Method", "maxFm", "wFmeasure", 'MAE', "Smeasure", "meanEm", "HCE", "maxEm", "meanFm", "adpEm", "adpFm", 'mBA', 'maxBIoU', 'meanBIoU'] + elif config.task == 'General-2K': + tb.field_names = ["Dataset", "Method", "maxFm", "wFmeasure", 'MAE', "Smeasure", "meanEm", "HCE", "maxEm", "meanFm", "adpEm", "adpFm", 'mBA', 'maxBIoU', 'meanBIoU'] + elif config.task == 'Matting': + tb.field_names = ["Dataset", "Method", "Smeasure", "maxFm", "meanEm", 'MSE', "maxEm", "meanFm", "wFmeasure", "adpEm", "adpFm", "HCE", 'mBA', 'maxBIoU', 'meanBIoU'] + else: + tb.field_names = ["Dataset", "Method", "Smeasure", 'MAE', "maxEm", "meanEm", "maxFm", "meanFm", "wFmeasure", "adpEm", "adpFm", "HCE", 'mBA', 'maxBIoU', 'meanBIoU'] + for _model_name in args.model_lst[:]: + print('\t', 'Evaluating model: {}...'.format(_model_name)) + pred_paths = [p.replace(args.gt_root, os.path.join(args.pred_root, _model_name)).replace('/gt/', '/') for p in gt_paths] + # print(pred_paths[:1], gt_paths[:1]) + em, sm, fm, mae, mse, wfm, hce, mba, biou = evaluator( + gt_paths=gt_paths, + pred_paths=pred_paths, + metrics=args.metrics.split('+'), + verbose=config.verbose_eval + ) + if config.task == 'DIS5K': + scores = [ + fm['curve'].max().round(3), wfm.round(3), mae.round(3), sm.round(3), em['curve'].mean().round(3), int(hce.round()), + em['curve'].max().round(3), fm['curve'].mean().round(3), em['adp'].round(3), fm['adp'].round(3), + mba.round(3), biou['curve'].max().round(3), biou['curve'].mean().round(3), + ] + elif config.task == 'COD': + scores = [ + sm.round(3), wfm.round(3), fm['curve'].mean().round(3), em['curve'].mean().round(3), em['curve'].max().round(3), mae.round(3), + fm['curve'].max().round(3), em['adp'].round(3), fm['adp'].round(3), int(hce.round()), + mba.round(3), biou['curve'].max().round(3), biou['curve'].mean().round(3), + ] + elif config.task == 'HRSOD': + scores = [ + sm.round(3), fm['curve'].max().round(3), em['curve'].mean().round(3), mae.round(3), + em['curve'].max().round(3), fm['curve'].mean().round(3), wfm.round(3), em['adp'].round(3), fm['adp'].round(3), int(hce.round()), + mba.round(3), biou['curve'].max().round(3), biou['curve'].mean().round(3), + ] + elif config.task == 'General': + scores = [ + fm['curve'].max().round(3), wfm.round(3), mae.round(3), sm.round(3), em['curve'].mean().round(3), int(hce.round()), + em['curve'].max().round(3), fm['curve'].mean().round(3), em['adp'].round(3), fm['adp'].round(3), + mba.round(3), biou['curve'].max().round(3), biou['curve'].mean().round(3), + ] + elif config.task == 'General-2K': + scores = [ + fm['curve'].max().round(3), wfm.round(3), mae.round(3), sm.round(3), em['curve'].mean().round(3), int(hce.round()), + em['curve'].max().round(3), fm['curve'].mean().round(3), em['adp'].round(3), fm['adp'].round(3), + mba.round(3), biou['curve'].max().round(3), biou['curve'].mean().round(3), + ] + elif config.task == 'Matting': + scores = [ + sm.round(3), fm['curve'].max().round(3), em['curve'].mean().round(3), mse.round(5), + em['curve'].max().round(3), fm['curve'].mean().round(3), wfm.round(3), em['adp'].round(3), fm['adp'].round(3), int(hce.round()), + mba.round(3), biou['curve'].max().round(3), biou['curve'].mean().round(3), + ] + else: + scores = [ + sm.round(3), mae.round(3), em['curve'].max().round(3), em['curve'].mean().round(3), + fm['curve'].max().round(3), fm['curve'].mean().round(3), wfm.round(3), + em['adp'].round(3), fm['adp'].round(3), int(hce.round()), + mba.round(3), biou['curve'].max().round(3), biou['curve'].mean().round(3), + ] + + for idx_score, score in enumerate(scores): + scores[idx_score] = '.' + format(score, '.3f').split('.')[-1] if score <= 1 else format(score, '<4') + records = [_data_name, _model_name] + scores + tb.add_row(records) + # Write results after every check. + with open(filename, 'w+') as file_to_write: + file_to_write.write(str(tb)+'\n') + print(tb) + + +if __name__ == '__main__': + # set parameters + parser = argparse.ArgumentParser() + parser.add_argument( + '--gt_root', type=str, help='ground-truth root', + default=os.path.join(config.data_root_dir, config.task)) + parser.add_argument( + '--pred_root', type=str, help='prediction root', + default='./e_preds') + parser.add_argument( + '--data_lst', type=str, help='test dataset', + default=config.testsets.replace(',', '+')) + parser.add_argument( + '--save_dir', type=str, help='candidate competitors', + default='e_results') + parser.add_argument( + '--check_integrity', type=bool, help='whether to check the file integrity', + default=False) + parser.add_argument( + '--metrics', type=str, help='candidate competitors', + default='+'.join(['S', 'MAE', 'E', 'F', 'WF', 'MBA', 'BIoU', 'MSE', 'HCE'][:100 if 'DIS5K' in config.task else -1])) + args = parser.parse_args() + args.metrics = '+'.join(['S', 'MAE', 'E', 'F', 'WF', 'MBA', 'BIoU', 'MSE', 'HCE'][:100 if sum(['DIS-' in _data for _data in args.data_lst.split('+')]) else -1]) + + os.makedirs(args.save_dir, exist_ok=True) + try: + args.model_lst = [m for m in sorted(os.listdir(args.pred_root), key=lambda x: int(x.split('epoch_')[-1]), reverse=True) if int(m.split('epoch_')[-1]) % 1 == 0] + except: + args.model_lst = [m for m in sorted(os.listdir(args.pred_root))] + + # check the integrity of each candidates + if args.check_integrity: + for _data_name in args.data_lst.split('+'): + for _model_name in args.model_lst: + gt_pth = os.path.join(args.gt_root, _data_name) + pred_pth = os.path.join(args.pred_root, _model_name, _data_name) + if not sorted(os.listdir(gt_pth)) == sorted(os.listdir(pred_pth)): + print(len(sorted(os.listdir(gt_pth))), len(sorted(os.listdir(pred_pth)))) + print('The {} Dataset of {} Model is not matching to the ground-truth'.format(_data_name, _model_name)) + else: + print('>>> skip check the integrity of each candidates') + + # start engine + do_eval(args) diff --git a/engine/BiRefNet/evaluation/metrics.py b/engine/BiRefNet/evaluation/metrics.py new file mode 100644 index 0000000000000000000000000000000000000000..ae1da7d81551e8ee40a69caf5952b57eb16cde2d --- /dev/null +++ b/engine/BiRefNet/evaluation/metrics.py @@ -0,0 +1,791 @@ +import os +from tqdm import tqdm +import cv2 +from PIL import Image +import numpy as np +from scipy.ndimage import convolve, distance_transform_edt as bwdist +from skimage.morphology import skeletonize +from skimage.morphology import disk +from skimage.measure import label + + +_EPS = np.spacing(1) +_TYPE = np.float64 + + +def evaluator(gt_paths, pred_paths, metrics=['S', 'MAE', 'E', 'F', 'WF', 'MBA', 'BIoU', 'MSE', 'HCE'], verbose=False): + # define measures + if 'E' in metrics: + EM = EMeasure() + if 'S' in metrics: + SM = SMeasure() + if 'F' in metrics: + FM = FMeasure() + if 'MAE' in metrics: + MAE = MAEMeasure() + if 'MSE' in metrics: + MSE = MSEMeasure() + if 'WF' in metrics: + WFM = WeightedFMeasure() + if 'HCE' in metrics: + HCE = HCEMeasure() + if 'MBA' in metrics: + MBA = MBAMeasure() + if 'BIoU' in metrics: + BIoU = BIoUMeasure() + + if isinstance(gt_paths, list) and isinstance(pred_paths, list): + # print(len(gt_paths), len(pred_paths)) + assert len(gt_paths) == len(pred_paths) + + for idx_sample in tqdm(range(len(gt_paths)), total=len(gt_paths)) if verbose else range(len(gt_paths)): + gt = gt_paths[idx_sample] + pred = pred_paths[idx_sample] + + pred = pred[:-4] + '.png' + valid_extensions = ['.png', '.jpg', '.PNG', '.JPG', '.JPEG'] + file_exists = False + for ext in valid_extensions: + if os.path.exists(pred[:-4] + ext): + pred = pred[:-4] + ext + file_exists = True + break + if file_exists: + pred_ary = cv2.imread(pred, cv2.IMREAD_GRAYSCALE) + else: + print('Not exists:', pred) + + gt_ary = cv2.imread(gt, cv2.IMREAD_GRAYSCALE) + pred_ary = cv2.resize(pred_ary, (gt_ary.shape[1], gt_ary.shape[0])) + + if 'E' in metrics: + EM.step(pred=pred_ary, gt=gt_ary) + if 'S' in metrics: + SM.step(pred=pred_ary, gt=gt_ary) + if 'F' in metrics: + FM.step(pred=pred_ary, gt=gt_ary) + if 'MAE' in metrics: + MAE.step(pred=pred_ary, gt=gt_ary) + if 'MSE' in metrics: + MSE.step(pred=pred_ary, gt=gt_ary) + if 'WF' in metrics: + WFM.step(pred=pred_ary, gt=gt_ary) + if 'HCE' in metrics: + ske_path = gt.replace('/gt/', '/ske/') + if os.path.exists(ske_path): + ske_ary = cv2.imread(ske_path, cv2.IMREAD_GRAYSCALE) + ske_ary = ske_ary > 128 + else: + ske_ary = skeletonize(gt_ary > 128) + ske_save_dir = os.path.join(*ske_path.split(os.sep)[:-1]) + if ske_path[0] == os.sep: + ske_save_dir = os.sep + ske_save_dir + os.makedirs(ske_save_dir, exist_ok=True) + cv2.imwrite(ske_path, ske_ary.astype(np.uint8) * 255) + HCE.step(pred=pred_ary, gt=gt_ary, gt_ske=ske_ary) + if 'MBA' in metrics: + MBA.step(pred=pred_ary, gt=gt_ary) + if 'BIoU' in metrics: + BIoU.step(pred=pred_ary, gt=gt_ary) + + if 'E' in metrics: + em = EM.get_results()['em'] + else: + em = {'curve': np.array([np.float64(-1)]), 'adp': np.float64(-1)} + if 'S' in metrics: + sm = SM.get_results()['sm'] + else: + sm = np.float64(-1) + if 'F' in metrics: + fm = FM.get_results()['fm'] + else: + fm = {'curve': np.array([np.float64(-1)]), 'adp': np.float64(-1)} + if 'MAE' in metrics: + mae = MAE.get_results()['mae'] + else: + mae = np.float64(-1) + if 'MSE' in metrics: + mse = MSE.get_results()['mse'] + else: + mse = np.float64(-1) + if 'WF' in metrics: + wfm = WFM.get_results()['wfm'] + else: + wfm = np.float64(-1) + if 'HCE' in metrics: + hce = HCE.get_results()['hce'] + else: + hce = np.float64(-1) + if 'MBA' in metrics: + mba = MBA.get_results()['mba'] + else: + mba = np.float64(-1) + if 'BIoU' in metrics: + biou = BIoU.get_results()['biou'] + else: + biou = {'curve': np.array([np.float64(-1)])} + + return em, sm, fm, mae, mse, wfm, hce, mba, biou + + +def _prepare_data(pred: np.ndarray, gt: np.ndarray) -> tuple: + gt = gt > 128 + pred = pred / 255 + if pred.max() != pred.min(): + pred = (pred - pred.min()) / (pred.max() - pred.min()) + return pred, gt + + +def _get_adaptive_threshold(matrix: np.ndarray, max_value: float = 1) -> float: + return min(2 * matrix.mean(), max_value) + + +class FMeasure(object): + def __init__(self, beta: float = 0.3): + self.beta = beta + self.precisions = [] + self.recalls = [] + self.adaptive_fms = [] + self.changeable_fms = [] + + def step(self, pred: np.ndarray, gt: np.ndarray): + pred, gt = _prepare_data(pred, gt) + + adaptive_fm = self.cal_adaptive_fm(pred=pred, gt=gt) + self.adaptive_fms.append(adaptive_fm) + + precisions, recalls, changeable_fms = self.cal_pr(pred=pred, gt=gt) + self.precisions.append(precisions) + self.recalls.append(recalls) + self.changeable_fms.append(changeable_fms) + + def cal_adaptive_fm(self, pred: np.ndarray, gt: np.ndarray) -> float: + adaptive_threshold = _get_adaptive_threshold(pred, max_value=1) + binary_predcition = pred >= adaptive_threshold + area_intersection = binary_predcition[gt].sum() + if area_intersection == 0: + adaptive_fm = 0 + else: + pre = area_intersection / np.count_nonzero(binary_predcition) + rec = area_intersection / np.count_nonzero(gt) + adaptive_fm = (1 + self.beta) * pre * rec / (self.beta * pre + rec) + return adaptive_fm + + def cal_pr(self, pred: np.ndarray, gt: np.ndarray) -> tuple: + pred = (pred * 255).astype(np.uint8) + bins = np.linspace(0, 256, 257) + fg_hist, _ = np.histogram(pred[gt], bins=bins) + bg_hist, _ = np.histogram(pred[~gt], bins=bins) + fg_w_thrs = np.cumsum(np.flip(fg_hist), axis=0) + bg_w_thrs = np.cumsum(np.flip(bg_hist), axis=0) + TPs = fg_w_thrs + Ps = fg_w_thrs + bg_w_thrs + Ps[Ps == 0] = 1 + T = max(np.count_nonzero(gt), 1) + precisions = TPs / Ps + recalls = TPs / T + numerator = (1 + self.beta) * precisions * recalls + denominator = np.where(numerator == 0, 1, self.beta * precisions + recalls) + changeable_fms = numerator / denominator + return precisions, recalls, changeable_fms + + def get_results(self) -> dict: + adaptive_fm = np.mean(np.array(self.adaptive_fms, _TYPE)) + changeable_fm = np.mean(np.array(self.changeable_fms, dtype=_TYPE), axis=0) + precision = np.mean(np.array(self.precisions, dtype=_TYPE), axis=0) # N, 256 + recall = np.mean(np.array(self.recalls, dtype=_TYPE), axis=0) # N, 256 + return dict(fm=dict(adp=adaptive_fm, curve=changeable_fm), + pr=dict(p=precision, r=recall)) + + +class MAEMeasure(object): + def __init__(self): + self.maes = [] + + def step(self, pred: np.ndarray, gt: np.ndarray): + pred, gt = _prepare_data(pred, gt) + + mae = self.cal_mae(pred, gt) + self.maes.append(mae) + + def cal_mae(self, pred: np.ndarray, gt: np.ndarray) -> float: + mae = np.mean(np.abs(pred - gt)) + return mae + + def get_results(self) -> dict: + mae = np.mean(np.array(self.maes, _TYPE)) + return dict(mae=mae) + + +class MSEMeasure(object): + def __init__(self): + self.mses = [] + + def step(self, pred: np.ndarray, gt: np.ndarray): + pred, gt = _prepare_data(pred, gt) + + mse = self.cal_mse(pred, gt) + self.mses.append(mse) + + def cal_mse(self, pred: np.ndarray, gt: np.ndarray) -> float: + mse = np.mean((pred - gt) ** 2) + return mse + + def get_results(self) -> dict: + mse = np.mean(np.array(self.mses, _TYPE)) + return dict(mse=mse) + + +class SMeasure(object): + def __init__(self, alpha: float = 0.5): + self.sms = [] + self.alpha = alpha + + def step(self, pred: np.ndarray, gt: np.ndarray): + pred, gt = _prepare_data(pred=pred, gt=gt) + + sm = self.cal_sm(pred, gt) + self.sms.append(sm) + + def cal_sm(self, pred: np.ndarray, gt: np.ndarray) -> float: + y = np.mean(gt) + if y == 0: + sm = 1 - np.mean(pred) + elif y == 1: + sm = np.mean(pred) + else: + sm = self.alpha * self.object(pred, gt) + (1 - self.alpha) * self.region(pred, gt) + sm = max(0, sm) + return sm + + def object(self, pred: np.ndarray, gt: np.ndarray) -> float: + fg = pred * gt + bg = (1 - pred) * (1 - gt) + u = np.mean(gt) + object_score = u * self.s_object(fg, gt) + (1 - u) * self.s_object(bg, 1 - gt) + return object_score + + def s_object(self, pred: np.ndarray, gt: np.ndarray) -> float: + x = np.mean(pred[gt == 1]) + sigma_x = np.std(pred[gt == 1], ddof=1) + score = 2 * x / (np.power(x, 2) + 1 + sigma_x + _EPS) + return score + + def region(self, pred: np.ndarray, gt: np.ndarray) -> float: + x, y = self.centroid(gt) + part_info = self.divide_with_xy(pred, gt, x, y) + w1, w2, w3, w4 = part_info['weight'] + pred1, pred2, pred3, pred4 = part_info['pred'] + gt1, gt2, gt3, gt4 = part_info['gt'] + score1 = self.ssim(pred1, gt1) + score2 = self.ssim(pred2, gt2) + score3 = self.ssim(pred3, gt3) + score4 = self.ssim(pred4, gt4) + + return w1 * score1 + w2 * score2 + w3 * score3 + w4 * score4 + + def centroid(self, matrix: np.ndarray) -> tuple: + h, w = matrix.shape + area_object = np.count_nonzero(matrix) + if area_object == 0: + x = np.round(w / 2) + y = np.round(h / 2) + else: + # More details can be found at: https://www.yuque.com/lart/blog/gpbigm + y, x = np.argwhere(matrix).mean(axis=0).round() + return int(x) + 1, int(y) + 1 + + def divide_with_xy(self, pred: np.ndarray, gt: np.ndarray, x, y) -> dict: + h, w = gt.shape + area = h * w + + gt_LT = gt[0:y, 0:x] + gt_RT = gt[0:y, x:w] + gt_LB = gt[y:h, 0:x] + gt_RB = gt[y:h, x:w] + + pred_LT = pred[0:y, 0:x] + pred_RT = pred[0:y, x:w] + pred_LB = pred[y:h, 0:x] + pred_RB = pred[y:h, x:w] + + w1 = x * y / area + w2 = y * (w - x) / area + w3 = (h - y) * x / area + w4 = 1 - w1 - w2 - w3 + + return dict(gt=(gt_LT, gt_RT, gt_LB, gt_RB), + pred=(pred_LT, pred_RT, pred_LB, pred_RB), + weight=(w1, w2, w3, w4)) + + def ssim(self, pred: np.ndarray, gt: np.ndarray) -> float: + h, w = pred.shape + N = h * w + + x = np.mean(pred) + y = np.mean(gt) + + sigma_x = np.sum((pred - x) ** 2) / (N - 1) + sigma_y = np.sum((gt - y) ** 2) / (N - 1) + sigma_xy = np.sum((pred - x) * (gt - y)) / (N - 1) + + alpha = 4 * x * y * sigma_xy + beta = (x ** 2 + y ** 2) * (sigma_x + sigma_y) + + if alpha != 0: + score = alpha / (beta + _EPS) + elif alpha == 0 and beta == 0: + score = 1 + else: + score = 0 + return score + + def get_results(self) -> dict: + sm = np.mean(np.array(self.sms, dtype=_TYPE)) + return dict(sm=sm) + + +class EMeasure(object): + def __init__(self): + self.adaptive_ems = [] + self.changeable_ems = [] + + def step(self, pred: np.ndarray, gt: np.ndarray): + pred, gt = _prepare_data(pred=pred, gt=gt) + self.gt_fg_numel = np.count_nonzero(gt) + self.gt_size = gt.shape[0] * gt.shape[1] + + changeable_ems = self.cal_changeable_em(pred, gt) + self.changeable_ems.append(changeable_ems) + adaptive_em = self.cal_adaptive_em(pred, gt) + self.adaptive_ems.append(adaptive_em) + + def cal_adaptive_em(self, pred: np.ndarray, gt: np.ndarray) -> float: + adaptive_threshold = _get_adaptive_threshold(pred, max_value=1) + adaptive_em = self.cal_em_with_threshold(pred, gt, threshold=adaptive_threshold) + return adaptive_em + + def cal_changeable_em(self, pred: np.ndarray, gt: np.ndarray) -> np.ndarray: + changeable_ems = self.cal_em_with_cumsumhistogram(pred, gt) + return changeable_ems + + def cal_em_with_threshold(self, pred: np.ndarray, gt: np.ndarray, threshold: float) -> float: + binarized_pred = pred >= threshold + fg_fg_numel = np.count_nonzero(binarized_pred & gt) + fg_bg_numel = np.count_nonzero(binarized_pred & ~gt) + + fg___numel = fg_fg_numel + fg_bg_numel + bg___numel = self.gt_size - fg___numel + + if self.gt_fg_numel == 0: + enhanced_matrix_sum = bg___numel + elif self.gt_fg_numel == self.gt_size: + enhanced_matrix_sum = fg___numel + else: + parts_numel, combinations = self.generate_parts_numel_combinations( + fg_fg_numel=fg_fg_numel, fg_bg_numel=fg_bg_numel, + pred_fg_numel=fg___numel, pred_bg_numel=bg___numel, + ) + + results_parts = [] + for i, (part_numel, combination) in enumerate(zip(parts_numel, combinations)): + align_matrix_value = 2 * (combination[0] * combination[1]) / \ + (combination[0] ** 2 + combination[1] ** 2 + _EPS) + enhanced_matrix_value = (align_matrix_value + 1) ** 2 / 4 + results_parts.append(enhanced_matrix_value * part_numel) + enhanced_matrix_sum = sum(results_parts) + + em = enhanced_matrix_sum / (self.gt_size - 1 + _EPS) + return em + + def cal_em_with_cumsumhistogram(self, pred: np.ndarray, gt: np.ndarray) -> np.ndarray: + pred = (pred * 255).astype(np.uint8) + bins = np.linspace(0, 256, 257) + fg_fg_hist, _ = np.histogram(pred[gt], bins=bins) + fg_bg_hist, _ = np.histogram(pred[~gt], bins=bins) + fg_fg_numel_w_thrs = np.cumsum(np.flip(fg_fg_hist), axis=0) + fg_bg_numel_w_thrs = np.cumsum(np.flip(fg_bg_hist), axis=0) + + fg___numel_w_thrs = fg_fg_numel_w_thrs + fg_bg_numel_w_thrs + bg___numel_w_thrs = self.gt_size - fg___numel_w_thrs + + if self.gt_fg_numel == 0: + enhanced_matrix_sum = bg___numel_w_thrs + elif self.gt_fg_numel == self.gt_size: + enhanced_matrix_sum = fg___numel_w_thrs + else: + parts_numel_w_thrs, combinations = self.generate_parts_numel_combinations( + fg_fg_numel=fg_fg_numel_w_thrs, fg_bg_numel=fg_bg_numel_w_thrs, + pred_fg_numel=fg___numel_w_thrs, pred_bg_numel=bg___numel_w_thrs, + ) + + results_parts = np.empty(shape=(4, 256), dtype=np.float64) + for i, (part_numel, combination) in enumerate(zip(parts_numel_w_thrs, combinations)): + align_matrix_value = 2 * (combination[0] * combination[1]) / \ + (combination[0] ** 2 + combination[1] ** 2 + _EPS) + enhanced_matrix_value = (align_matrix_value + 1) ** 2 / 4 + results_parts[i] = enhanced_matrix_value * part_numel + enhanced_matrix_sum = results_parts.sum(axis=0) + + em = enhanced_matrix_sum / (self.gt_size - 1 + _EPS) + return em + + def generate_parts_numel_combinations(self, fg_fg_numel, fg_bg_numel, pred_fg_numel, pred_bg_numel): + bg_fg_numel = self.gt_fg_numel - fg_fg_numel + bg_bg_numel = pred_bg_numel - bg_fg_numel + + parts_numel = [fg_fg_numel, fg_bg_numel, bg_fg_numel, bg_bg_numel] + + mean_pred_value = pred_fg_numel / self.gt_size + mean_gt_value = self.gt_fg_numel / self.gt_size + + demeaned_pred_fg_value = 1 - mean_pred_value + demeaned_pred_bg_value = 0 - mean_pred_value + demeaned_gt_fg_value = 1 - mean_gt_value + demeaned_gt_bg_value = 0 - mean_gt_value + + combinations = [ + (demeaned_pred_fg_value, demeaned_gt_fg_value), + (demeaned_pred_fg_value, demeaned_gt_bg_value), + (demeaned_pred_bg_value, demeaned_gt_fg_value), + (demeaned_pred_bg_value, demeaned_gt_bg_value) + ] + return parts_numel, combinations + + def get_results(self) -> dict: + adaptive_em = np.mean(np.array(self.adaptive_ems, dtype=_TYPE)) + changeable_em = np.mean(np.array(self.changeable_ems, dtype=_TYPE), axis=0) + return dict(em=dict(adp=adaptive_em, curve=changeable_em)) + + +class WeightedFMeasure(object): + def __init__(self, beta: float = 1): + self.beta = beta + self.weighted_fms = [] + + def step(self, pred: np.ndarray, gt: np.ndarray): + pred, gt = _prepare_data(pred=pred, gt=gt) + + if np.all(~gt): + wfm = 0 + else: + wfm = self.cal_wfm(pred, gt) + self.weighted_fms.append(wfm) + + def cal_wfm(self, pred: np.ndarray, gt: np.ndarray) -> float: + # [Dst,IDXT] = bwdist(dGT); + Dst, Idxt = bwdist(gt == 0, return_indices=True) + + # %Pixel dependency + # E = abs(FG-dGT); + E = np.abs(pred - gt) + Et = np.copy(E) + Et[gt == 0] = Et[Idxt[0][gt == 0], Idxt[1][gt == 0]] + + # K = fspecial('gaussian',7,5); + # EA = imfilter(Et,K); + K = self.matlab_style_gauss2D((7, 7), sigma=5) + EA = convolve(Et, weights=K, mode="constant", cval=0) + # MIN_E_EA = E; + # MIN_E_EA(GT & EA np.ndarray: + """ + 2D gaussian mask - should give the same result as MATLAB's + fspecial('gaussian',[shape],[sigma]) + """ + m, n = [(ss - 1) / 2 for ss in shape] + y, x = np.ogrid[-m: m + 1, -n: n + 1] + h = np.exp(-(x * x + y * y) / (2 * sigma * sigma)) + h[h < np.finfo(h.dtype).eps * h.max()] = 0 + sumh = h.sum() + if sumh != 0: + h /= sumh + return h + + def get_results(self) -> dict: + weighted_fm = np.mean(np.array(self.weighted_fms, dtype=_TYPE)) + return dict(wfm=weighted_fm) + + +class HCEMeasure(object): + def __init__(self): + self.hces = [] + + def step(self, pred: np.ndarray, gt: np.ndarray, gt_ske): + # pred, gt = _prepare_data(pred, gt) + + hce = self.cal_hce(pred, gt, gt_ske) + self.hces.append(hce) + + def get_results(self) -> dict: + hce = np.mean(np.array(self.hces, _TYPE)) + return dict(hce=hce) + + + def cal_hce(self, pred: np.ndarray, gt: np.ndarray, gt_ske: np.ndarray, relax=5, epsilon=2.0) -> float: + # Binarize gt + if(len(gt.shape)>2): + gt = gt[:, :, 0] + + epsilon_gt = 128#(np.amin(gt)+np.amax(gt))/2.0 + gt = (gt>epsilon_gt).astype(np.uint8) + + # Binarize pred + if(len(pred.shape)>2): + pred = pred[:, :, 0] + epsilon_pred = 128#(np.amin(pred)+np.amax(pred))/2.0 + pred = (pred>epsilon_pred).astype(np.uint8) + + Union = np.logical_or(gt, pred) + TP = np.logical_and(gt, pred) + FP = pred - TP + FN = gt - TP + + # relax the Union of gt and pred + Union_erode = Union.copy() + Union_erode = cv2.erode(Union_erode.astype(np.uint8), disk(1), iterations=relax) + + # --- get the relaxed False Positive regions for computing the human efforts in correcting them --- + FP_ = np.logical_and(FP, Union_erode) # get the relaxed FP + for i in range(0, relax): + FP_ = cv2.dilate(FP_.astype(np.uint8), disk(1)) + FP_ = np.logical_and(FP_, 1-np.logical_or(TP, FN)) + FP_ = np.logical_and(FP, FP_) + + # --- get the relaxed False Negative regions for computing the human efforts in correcting them --- + FN_ = np.logical_and(FN, Union_erode) # preserve the structural components of FN + ## recover the FN, where pixels are not close to the TP borders + for i in range(0, relax): + FN_ = cv2.dilate(FN_.astype(np.uint8), disk(1)) + FN_ = np.logical_and(FN_, 1-np.logical_or(TP, FP)) + FN_ = np.logical_and(FN, FN_) + FN_ = np.logical_or(FN_, np.logical_xor(gt_ske, np.logical_and(TP, gt_ske))) # preserve the structural components of FN + + ## 2. =============Find exact polygon control points and independent regions============== + ## find contours from FP_ + ctrs_FP, hier_FP = cv2.findContours(FP_.astype(np.uint8), cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE) + ## find control points and independent regions for human correction + bdies_FP, indep_cnt_FP = self.filter_bdy_cond(ctrs_FP, FP_, np.logical_or(TP,FN_)) + ## find contours from FN_ + ctrs_FN, hier_FN = cv2.findContours(FN_.astype(np.uint8), cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE) + ## find control points and independent regions for human correction + bdies_FN, indep_cnt_FN = self.filter_bdy_cond(ctrs_FN, FN_, 1-np.logical_or(np.logical_or(TP, FP_), FN_)) + + poly_FP, poly_FP_len, poly_FP_point_cnt = self.approximate_RDP(bdies_FP, epsilon=epsilon) + poly_FN, poly_FN_len, poly_FN_point_cnt = self.approximate_RDP(bdies_FN, epsilon=epsilon) + + # FP_points+FP_indep+FN_points+FN_indep + return poly_FP_point_cnt+indep_cnt_FP+poly_FN_point_cnt+indep_cnt_FN + + def filter_bdy_cond(self, bdy_, mask, cond): + + cond = cv2.dilate(cond.astype(np.uint8), disk(1)) + labels = label(mask) # find the connected regions + lbls = np.unique(labels) # the indices of the connected regions + indep = np.ones(lbls.shape[0]) # the label of each connected regions + indep[0] = 0 # 0 indicate the background region + + boundaries = [] + h,w = cond.shape[0:2] + ind_map = np.zeros((h, w)) + indep_cnt = 0 + + for i in range(0, len(bdy_)): + tmp_bdies = [] + tmp_bdy = [] + for j in range(0, bdy_[i].shape[0]): + r, c = bdy_[i][j,0,1],bdy_[i][j,0,0] + + if(np.sum(cond[r, c])==0 or ind_map[r, c]!=0): + if(len(tmp_bdy)>0): + tmp_bdies.append(tmp_bdy) + tmp_bdy = [] + continue + tmp_bdy.append([c, r]) + ind_map[r, c] = ind_map[r, c] + 1 + indep[labels[r, c]] = 0 # indicates part of the boundary of this region needs human correction + if(len(tmp_bdy)>0): + tmp_bdies.append(tmp_bdy) + + # check if the first and the last boundaries are connected + # if yes, invert the first boundary and attach it after the last boundary + if(len(tmp_bdies)>1): + first_x, first_y = tmp_bdies[0][0] + last_x, last_y = tmp_bdies[-1][-1] + if((abs(first_x-last_x)==1 and first_y==last_y) or + (first_x==last_x and abs(first_y-last_y)==1) or + (abs(first_x-last_x)==1 and abs(first_y-last_y)==1) + ): + tmp_bdies[-1].extend(tmp_bdies[0][::-1]) + del tmp_bdies[0] + + for k in range(0, len(tmp_bdies)): + tmp_bdies[k] = np.array(tmp_bdies[k])[:, np.newaxis, :] + if(len(tmp_bdies)>0): + boundaries.extend(tmp_bdies) + + return boundaries, np.sum(indep) + + # this function approximate each boundary by DP algorithm + # https://en.wikipedia.org/wiki/Ramer%E2%80%93Douglas%E2%80%93Peucker_algorithm + def approximate_RDP(self, boundaries, epsilon=1.0): + + boundaries_ = [] + boundaries_len_ = [] + pixel_cnt_ = 0 + + # polygon approximate of each boundary + for i in range(0, len(boundaries)): + boundaries_.append(cv2.approxPolyDP(boundaries[i], epsilon, False)) + + # count the control points number of each boundary and the total control points number of all the boundaries + for i in range(0, len(boundaries_)): + boundaries_len_.append(len(boundaries_[i])) + pixel_cnt_ = pixel_cnt_ + len(boundaries_[i]) + + return boundaries_, boundaries_len_, pixel_cnt_ + + +class MBAMeasure(object): + def __init__(self): + self.bas = [] + self.all_h = 0 + self.all_w = 0 + self.all_max = 0 + + def step(self, pred: np.ndarray, gt: np.ndarray): + # pred, gt = _prepare_data(pred, gt) + + refined = gt.copy() + + rmin = cmin = 0 + rmax, cmax = gt.shape + + self.all_h += rmax + self.all_w += cmax + self.all_max += max(rmax, cmax) + + refined_h, refined_w = refined.shape + if refined_h != cmax: + refined = np.array(Image.fromarray(pred).resize((cmax, rmax), Image.BILINEAR)) + + if not(gt.sum() < 32*32): + if not((cmax==cmin) or (rmax==rmin)): + class_refined_prob = np.array(Image.fromarray(pred).resize((cmax-cmin, rmax-rmin), Image.BILINEAR)) + refined[rmin:rmax, cmin:cmax] = class_refined_prob + + pred = pred > 128 + gt = gt > 128 + + ba = self.cal_ba(pred, gt) + self.bas.append(ba) + + def get_disk_kernel(self, radius): + return cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (radius*2+1, radius*2+1)) + + def cal_ba(self, pred: np.ndarray, gt: np.ndarray) -> np.ndarray: + """ + Calculate the mean absolute error. + + :return: ba + """ + + gt = gt.astype(np.uint8) + pred = pred.astype(np.uint8) + + h, w = gt.shape + + min_radius = 1 + max_radius = (w+h)/300 + num_steps = 5 + + pred_acc = [None] * num_steps + + for i in range(num_steps): + curr_radius = min_radius + int((max_radius-min_radius)/num_steps*i) + + kernel = self.get_disk_kernel(curr_radius) + boundary_region = cv2.morphologyEx(gt, cv2.MORPH_GRADIENT, kernel) > 0 + + gt_in_bound = gt[boundary_region] + pred_in_bound = pred[boundary_region] + + num_edge_pixels = (boundary_region).sum() + num_pred_gd_pix = ((gt_in_bound) * (pred_in_bound) + (1-gt_in_bound) * (1-pred_in_bound)).sum() + + pred_acc[i] = num_pred_gd_pix / num_edge_pixels + + ba = sum(pred_acc)/num_steps + return ba + + def get_results(self) -> dict: + mba = np.mean(np.array(self.bas, _TYPE)) + return dict(mba=mba) + + +class BIoUMeasure(object): + def __init__(self, dilation_ratio=0.02): + self.bious = [] + self.dilation_ratio = dilation_ratio + + def mask_to_boundary(self, mask): + h, w = mask.shape + img_diag = np.sqrt(h ** 2 + w ** 2) + dilation = int(round(self.dilation_ratio * img_diag)) + if dilation < 1: + dilation = 1 + # Pad image so mask truncated by the image border is also considered as boundary. + new_mask = cv2.copyMakeBorder(mask, 1, 1, 1, 1, cv2.BORDER_CONSTANT, value=0) + kernel = np.ones((3, 3), dtype=np.uint8) + new_mask_erode = cv2.erode(new_mask, kernel, iterations=dilation) + mask_erode = new_mask_erode[1 : h + 1, 1 : w + 1] + # G_d intersects G in the paper. + return mask - mask_erode + + def step(self, pred: np.ndarray, gt: np.ndarray): + pred, gt = _prepare_data(pred, gt) + + bious = self.cal_biou(pred=pred, gt=gt) + self.bious.append(bious) + + def cal_biou(self, pred, gt): + pred = (pred * 255).astype(np.uint8) + pred = self.mask_to_boundary(pred) + gt = (gt * 255).astype(np.uint8) + gt = self.mask_to_boundary(gt) + gt = gt > 128 + + bins = np.linspace(0, 256, 257) + fg_hist, _ = np.histogram(pred[gt], bins=bins) # ture positive + bg_hist, _ = np.histogram(pred[~gt], bins=bins) # false positive + fg_w_thrs = np.cumsum(np.flip(fg_hist), axis=0) + bg_w_thrs = np.cumsum(np.flip(bg_hist), axis=0) + TPs = fg_w_thrs + Ps = fg_w_thrs + bg_w_thrs # positives + Ps[Ps == 0] = 1 + T = max(np.count_nonzero(gt), 1) + + ious = TPs / (T + bg_w_thrs) + return ious + + def get_results(self) -> dict: + biou = np.mean(np.array(self.bious, dtype=_TYPE), axis=0) + return dict(biou=dict(curve=biou)) diff --git a/engine/BiRefNet/gen_best_ep.py b/engine/BiRefNet/gen_best_ep.py new file mode 100644 index 0000000000000000000000000000000000000000..6e75512e4f382303974db9d5314fca2618f6b980 --- /dev/null +++ b/engine/BiRefNet/gen_best_ep.py @@ -0,0 +1,85 @@ +import os +from glob import glob +import numpy as np +from config import Config + + +config = Config() + +eval_txts = sorted(glob('e_results/*_eval.txt')) +print('eval_txts:', [_.split(os.sep)[-1] for _ in eval_txts]) +score_panel = {} +sep = '&' +metrics = ['sm', 'wfm', 'hce'] # we used HCE for DIS and wFm for others. +if 'DIS5K' not in config.task: + metrics.remove('hce') + +for metric in metrics: + print('Metric:', metric) + current_line_nums = [] + for idx_et, eval_txt in enumerate(eval_txts): + with open(eval_txt, 'r') as f: + lines = [l for l in f.readlines()[3:] if '.' in l] + current_line_nums.append(len(lines)) + for idx_et, eval_txt in enumerate(eval_txts): + with open(eval_txt, 'r') as f: + lines = [l for l in f.readlines()[3:] if '.' in l] + for idx_line, line in enumerate(lines[:min(current_line_nums)]): # Consist line numbers by the minimal result file. + properties = line.strip().strip(sep).split(sep) + dataset = properties[0].strip() + ckpt = properties[1].strip() + if int(ckpt.split('--epoch_')[-1].strip()) < 0: + continue + targe_idx = { + 'sm': [5, 2, 2, 5, 5, 2], + 'wfm': [3, 3, 8, 3, 3, 8], + 'hce': [7, -1, -1, 7, 7, -1] + }[metric][['DIS5K', 'COD', 'HRSOD', 'General', 'General-2K', 'Matting'].index(config.task)] + if metric != 'hce': + score_sm = float(properties[targe_idx].strip()) + else: + score_sm = int(properties[targe_idx].strip().strip('.')) + if idx_et == 0: + score_panel[ckpt] = [] + score_panel[ckpt].append(score_sm) + + metrics_min = ['hce', 'mae'] + max_or_min = min if metric in metrics_min else max + score_max = max_or_min(score_panel.values(), key=lambda x: np.sum(x)) + + good_models = [] + for k, v in score_panel.items(): + if (np.sum(v) <= np.sum(score_max)) if metric in metrics_min else (np.sum(v) >= np.sum(score_max)): + print(k, v) + good_models.append(k) + + # Write + with open(eval_txt, 'r') as f: + lines = f.readlines() + info4good_models = lines[:3] + metric_names = [m.strip() for m in lines[1].strip().strip('&').split('&')[2:]] + testset_mean_values = {metric_name: [] for metric_name in metric_names} + for good_model in good_models: + for idx_et, eval_txt in enumerate(eval_txts): + with open(eval_txt, 'r') as f: + lines = f.readlines() + for line in lines: + if set([good_model]) & set([_.strip() for _ in line.split(sep)]): + info4good_models.append(line) + metric_scores = [float(m.strip()) for m in line.strip().strip('&').split('&')[2:]] + for idx_score, metric_score in enumerate(metric_scores): + testset_mean_values[metric_names[idx_score]].append(metric_score) + + if 'DIS5K' in config.task: + testset_mean_values_lst = ['{:<4}'.format(int(np.mean(v_lst[:-1]).round())) if name == 'HCE' else '{:.3f}'.format(np.mean(v_lst[:-1])).lstrip('0') for name, v_lst in testset_mean_values.items()] # [:-1] to remove DIS-VD + sample_line_for_placing_mean_values = info4good_models[-2] + numbers_placed_well = sample_line_for_placing_mean_values.replace(sample_line_for_placing_mean_values.split('&')[1].strip(), 'DIS-TEs').strip().split('&')[3:] + for idx_number, (number_placed_well, testset_mean_value) in enumerate(zip(numbers_placed_well, testset_mean_values_lst)): + numbers_placed_well[idx_number] = number_placed_well.replace(number_placed_well.strip(), testset_mean_value) + testset_mean_line = '&'.join(sample_line_for_placing_mean_values.replace(sample_line_for_placing_mean_values.split('&')[1].strip(), 'DIS-TEs').split('&')[:3] + numbers_placed_well) + '\n' + info4good_models.append(testset_mean_line) + info4good_models.append(lines[-1]) + info = ''.join(info4good_models) + print(info) + with open(os.path.join('e_results', 'eval-{}_best_on_{}.txt'.format(config.task, metric)), 'w') as f: + f.write(info + '\n') diff --git a/engine/BiRefNet/image_proc.py b/engine/BiRefNet/image_proc.py new file mode 100644 index 0000000000000000000000000000000000000000..40cd827e91a58ee1676943bd880fdc5e3136b2b4 --- /dev/null +++ b/engine/BiRefNet/image_proc.py @@ -0,0 +1,116 @@ +import random +from PIL import Image, ImageEnhance +import numpy as np +import cv2 + + +def refine_foreground(image, mask, r=90): + if mask.size != image.size: + mask = mask.resize(image.size) + image = np.array(image) / 255.0 + mask = np.array(mask) / 255.0 + estimated_foreground = FB_blur_fusion_foreground_estimator_2(image, mask, r=r) + image_masked = Image.fromarray((estimated_foreground * 255.0).astype(np.uint8)) + return image_masked + + +def FB_blur_fusion_foreground_estimator_2(image, alpha, r=90): + # Thanks to the source: https://github.com/Photoroom/fast-foreground-estimation + alpha = alpha[:, :, None] + F, blur_B = FB_blur_fusion_foreground_estimator( + image, image, image, alpha, r) + return FB_blur_fusion_foreground_estimator(image, F, blur_B, alpha, r=6)[0] + + +def FB_blur_fusion_foreground_estimator(image, F, B, alpha, r=90): + if isinstance(image, Image.Image): + image = np.array(image) / 255.0 + blurred_alpha = cv2.blur(alpha, (r, r))[:, :, None] + + blurred_FA = cv2.blur(F * alpha, (r, r)) + blurred_F = blurred_FA / (blurred_alpha + 1e-5) + + blurred_B1A = cv2.blur(B * (1 - alpha), (r, r)) + blurred_B = blurred_B1A / ((1 - blurred_alpha) + 1e-5) + F = blurred_F + alpha * \ + (image - alpha * blurred_F - (1 - alpha) * blurred_B) + F = np.clip(F, 0, 1) + return F, blurred_B + + +def preproc(image, label, preproc_methods=['flip']): + if 'flip' in preproc_methods: + image, label = cv_random_flip(image, label) + if 'crop' in preproc_methods: + image, label = random_crop(image, label) + if 'rotate' in preproc_methods: + image, label = random_rotate(image, label) + if 'enhance' in preproc_methods: + image = color_enhance(image) + if 'pepper' in preproc_methods: + image = random_pepper(image) + return image, label + + +def cv_random_flip(img, label): + if random.random() > 0.5: + img = img.transpose(Image.FLIP_LEFT_RIGHT) + label = label.transpose(Image.FLIP_LEFT_RIGHT) + return img, label + + +def random_crop(image, label): + border = 30 + image_width = image.size[0] + image_height = image.size[1] + border = int(min(image_width, image_height) * 0.1) + crop_win_width = np.random.randint(image_width - border, image_width) + crop_win_height = np.random.randint(image_height - border, image_height) + random_region = ( + (image_width - crop_win_width) >> 1, (image_height - crop_win_height) >> 1, (image_width + crop_win_width) >> 1, + (image_height + crop_win_height) >> 1) + return image.crop(random_region), label.crop(random_region) + + +def random_rotate(image, label, angle=15): + mode = Image.BICUBIC + if random.random() > 0.8: + random_angle = np.random.randint(-angle, angle) + image = image.rotate(random_angle, mode) + label = label.rotate(random_angle, mode) + return image, label + + +def color_enhance(image): + bright_intensity = random.randint(5, 15) / 10.0 + image = ImageEnhance.Brightness(image).enhance(bright_intensity) + contrast_intensity = random.randint(5, 15) / 10.0 + image = ImageEnhance.Contrast(image).enhance(contrast_intensity) + color_intensity = random.randint(0, 20) / 10.0 + image = ImageEnhance.Color(image).enhance(color_intensity) + sharp_intensity = random.randint(0, 30) / 10.0 + image = ImageEnhance.Sharpness(image).enhance(sharp_intensity) + return image + + +def random_gaussian(image, mean=0.1, sigma=0.35): + def gaussianNoisy(im, mean=mean, sigma=sigma): + for _i in range(len(im)): + im[_i] += random.gauss(mean, sigma) + return im + + img = np.asarray(image) + width, height = img.shape + img = gaussianNoisy(img[:].flatten(), mean, sigma) + img = img.reshape([width, height]) + return Image.fromarray(np.uint8(img)) + + +def random_pepper(img, N=0.0015): + img = np.array(img) + noiseNum = int(N * img.shape[0] * img.shape[1]) + for i in range(noiseNum): + randX = random.randint(0, img.shape[0] - 1) + randY = random.randint(0, img.shape[1] - 1) + img[randX, randY] = random.randint(0, 1) * 255 + return Image.fromarray(img) diff --git a/engine/BiRefNet/inference.py b/engine/BiRefNet/inference.py new file mode 100644 index 0000000000000000000000000000000000000000..b8ea38b82bd618020c026565c35f59b85ec18007 --- /dev/null +++ b/engine/BiRefNet/inference.py @@ -0,0 +1,97 @@ +import os +import argparse +from glob import glob +from tqdm import tqdm +import cv2 +import torch + +from dataset import MyData +from models.birefnet import BiRefNet, BiRefNetC2F +from utils import save_tensor_img, check_state_dict +from config import Config + + +config = Config() + + +def inference(model, data_loader_test, pred_root, method, testset, device=0): + model_training = model.training + if model_training: + model.eval() + for batch in tqdm(data_loader_test, total=len(data_loader_test)) if 1 or config.verbose_eval else data_loader_test: + inputs = batch[0].to(device) + # gts = batch[1].to(device) + label_paths = batch[-1] + with torch.no_grad(): + scaled_preds = model(inputs)[-1].sigmoid() + + os.makedirs(os.path.join(pred_root, method, testset), exist_ok=True) + + for idx_sample in range(scaled_preds.shape[0]): + res = torch.nn.functional.interpolate( + scaled_preds[idx_sample].unsqueeze(0), + size=cv2.imread(label_paths[idx_sample], cv2.IMREAD_GRAYSCALE).shape[:2], + mode='bilinear', + align_corners=True + ) + save_tensor_img(res, os.path.join(os.path.join(pred_root, method, testset), label_paths[idx_sample].replace('\\', '/').split('/')[-1])) # test set dir + file name + if model_training: + model.train() + return None + + +def main(args): + # Init model + + device = config.device + if args.ckpt_folder: + print('Testing with models in {}'.format(args.ckpt_folder)) + else: + print('Testing with model {}'.format(args.ckpt)) + + if config.model == 'BiRefNet': + model = BiRefNet(bb_pretrained=False) + elif config.model == 'BiRefNetC2F': + model = BiRefNetC2F(bb_pretrained=False) + weights_lst = sorted( + glob(os.path.join(args.ckpt_folder, '*.pth')) if args.ckpt_folder else [args.ckpt], + key=lambda x: int(x.split('epoch_')[-1].split('.pth')[0]), + reverse=True + ) + for testset in args.testsets.split('+'): + print('>>>> Testset: {}...'.format(testset)) + data_loader_test = torch.utils.data.DataLoader( + dataset=MyData(testset, image_size=config.size, is_train=False), + batch_size=config.batch_size_valid, shuffle=False, num_workers=config.num_workers, pin_memory=True + ) + for weights in weights_lst: + if int(weights.strip('.pth').split('epoch_')[-1]) % 1 != 0: + continue + print('\tInferencing {}...'.format(weights)) + state_dict = torch.load(weights, map_location='cpu', weights_only=True) + state_dict = check_state_dict(state_dict) + model.load_state_dict(state_dict) + model = model.to(device) + inference( + model, data_loader_test=data_loader_test, pred_root=args.pred_root, + method='--'.join([w.rstrip('.pth') for w in weights.split(os.sep)[-2:]]), + testset=testset, device=config.device + ) + + +if __name__ == '__main__': + # Parameter from command line + parser = argparse.ArgumentParser(description='') + parser.add_argument('--ckpt', type=str, help='model folder') + parser.add_argument('--ckpt_folder', default=sorted(glob(os.path.join('ckpt', '*')))[-1], type=str, help='model folder') + parser.add_argument('--pred_root', default='e_preds', type=str, help='Output folder') + parser.add_argument('--testsets', + default=config.testsets.replace(',', '+'), + type=str, + help="Test all sets: DIS5K -> 'DIS-VD+DIS-TE1+DIS-TE2+DIS-TE3+DIS-TE4'") + + args = parser.parse_args() + + if config.precisionHigh: + torch.set_float32_matmul_precision('high') + main(args) diff --git a/engine/BiRefNet/inference_img.py b/engine/BiRefNet/inference_img.py new file mode 100644 index 0000000000000000000000000000000000000000..ae598d1a8754f3b6dadac45e3de046f1bfb24c55 --- /dev/null +++ b/engine/BiRefNet/inference_img.py @@ -0,0 +1,89 @@ +# Imports +import pdb +import time + +import torch +import tqlt.utils as tu +from models.birefnet import BiRefNet +from PIL import Image +from torchvision import transforms + +# # Option 1: loading BiRefNet with weights: +from transformers import AutoModelForImageSegmentation + +# # Option-3: Loading model and weights from local disk: +from utils import check_state_dict + +# birefnet = AutoModelForImageSegmentation.from_pretrained( +# "zhengpeng7/BiRefNet", trust_remote_code=True, local +# ) + +# # Option-2: loading weights with BiReNet codes: +# birefnet = BiRefNet.from_pretrained('zhengpeng7/BiRefNet') +imgs = tu.next_files("./in_the_wild", ".png") + + +birefnet = BiRefNet(bb_pretrained=False) +state_dict = torch.load("./BiRefNet-general-epoch_244.pth", map_location="cpu") +state_dict = check_state_dict(state_dict) +birefnet.load_state_dict(state_dict) + + +# Load Model +device = "cuda" +torch.set_float32_matmul_precision(["high", "highest"][0]) + +birefnet.to(device) +birefnet.eval() +print("BiRefNet is ready to use.") + +# Input Data +transform_image = transforms.Compose( + [ + transforms.Resize((1024, 1024)), + transforms.ToTensor(), + transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), + ] +) + + +import os +from glob import glob + +from image_proc import refine_foreground + +src_dir = "./images_todo" +image_paths = glob(os.path.join(src_dir, "*")) +dst_dir = "./predictions" +os.makedirs(dst_dir, exist_ok=True) +for image_path in imgs: + + print("Processing {} ...".format(image_path)) + image = Image.open(image_path) + input_images = transform_image(image).unsqueeze(0).to("cuda") + + # Prediction + start = time.time() + + with torch.no_grad(): + preds = birefnet(input_images)[-1].sigmoid().cpu() + + print(time.time() - start) + pred = preds[0].squeeze() + + # Save Results + file_ext = os.path.splitext(image_path)[-1] + pred_pil = transforms.ToPILImage()(pred) + pred_pil = pred_pil.resize(image.size) + pred_pil.save(image_path.replace(src_dir, dst_dir).replace(file_ext, "-mask.png")) + image_masked = refine_foreground(image, pred_pil) + image_masked.putalpha(pred_pil) + image_masked.save( + image_path.replace(src_dir, dst_dir).replace(file_ext, "-subject.png") + ) + + # Save Results + file_ext = os.path.splitext(image_path)[-1] + pred_pil = transforms.ToPILImage()(pred) + pred_pil = pred_pil.resize(image.size) + pred_pil.save(image_path.replace(src_dir, dst_dir).replace(file_ext, "-mask.png")) diff --git a/engine/BiRefNet/loss.py b/engine/BiRefNet/loss.py new file mode 100644 index 0000000000000000000000000000000000000000..3a13fa57b2c8fe3944abf994e2fbe1f904f6aad6 --- /dev/null +++ b/engine/BiRefNet/loss.py @@ -0,0 +1,246 @@ +import torch +from torch import nn +import torch.nn.functional as F +from torch.autograd import Variable +from math import exp +from config import Config + + +class ContourLoss(torch.nn.Module): + def __init__(self): + super(ContourLoss, self).__init__() + + def forward(self, pred, target, weight=10): + ''' + target, pred: tensor of shape (B, C, H, W), where target[:,:,region_in_contour] == 1, + target[:,:,region_out_contour] == 0. + weight: scalar, length term weight. + ''' + # length term + delta_r = pred[:,:,1:,:] - pred[:,:,:-1,:] # horizontal gradient (B, C, H-1, W) + delta_c = pred[:,:,:,1:] - pred[:,:,:,:-1] # vertical gradient (B, C, H, W-1) + + delta_r = delta_r[:,:,1:,:-2]**2 # (B, C, H-2, W-2) + delta_c = delta_c[:,:,:-2,1:]**2 # (B, C, H-2, W-2) + delta_pred = torch.abs(delta_r + delta_c) + + epsilon = 1e-8 # where is a parameter to avoid square root is zero in practice. + length = torch.mean(torch.sqrt(delta_pred + epsilon)) # eq.(11) in the paper, mean is used instead of sum. + + c_in = torch.ones_like(pred) + c_out = torch.zeros_like(pred) + + region_in = torch.mean( pred * (target - c_in )**2 ) # equ.(12) in the paper, mean is used instead of sum. + region_out = torch.mean( (1-pred) * (target - c_out)**2 ) + region = region_in + region_out + + loss = weight * length + region + + return loss + + +class IoULoss(torch.nn.Module): + def __init__(self): + super(IoULoss, self).__init__() + + def forward(self, pred, target): + b = pred.shape[0] + IoU = 0.0 + for i in range(0, b): + # compute the IoU of the foreground + Iand1 = torch.sum(target[i, :, :, :] * pred[i, :, :, :]) + Ior1 = torch.sum(target[i, :, :, :]) + torch.sum(pred[i, :, :, :]) - Iand1 + IoU1 = Iand1 / Ior1 + # IoU loss is (1-IoU1) + IoU = IoU + (1-IoU1) + # return IoU/b + return IoU + + +class StructureLoss(torch.nn.Module): + def __init__(self): + super(StructureLoss, self).__init__() + + def forward(self, pred, target): + weit = 1+5*torch.abs(F.avg_pool2d(target, kernel_size=31, stride=1, padding=15)-target) + wbce = F.binary_cross_entropy_with_logits(pred, target, reduction='none') + wbce = (weit*wbce).sum(dim=(2,3))/weit.sum(dim=(2,3)) + + pred = torch.sigmoid(pred) + inter = ((pred * target) * weit).sum(dim=(2, 3)) + union = ((pred + target) * weit).sum(dim=(2, 3)) + wiou = 1-(inter+1)/(union-inter+1) + + return (wbce+wiou).mean() + + +class PatchIoULoss(torch.nn.Module): + def __init__(self): + super(PatchIoULoss, self).__init__() + self.iou_loss = IoULoss() + + def forward(self, pred, target): + win_y, win_x = 64, 64 + iou_loss = 0. + for anchor_y in range(0, target.shape[0], win_y): + for anchor_x in range(0, target.shape[1], win_y): + patch_pred = pred[:, :, anchor_y:anchor_y+win_y, anchor_x:anchor_x+win_x] + patch_target = target[:, :, anchor_y:anchor_y+win_y, anchor_x:anchor_x+win_x] + patch_iou_loss = self.iou_loss(patch_pred, patch_target) + iou_loss += patch_iou_loss + return iou_loss + + +class ThrReg_loss(torch.nn.Module): + def __init__(self): + super(ThrReg_loss, self).__init__() + + def forward(self, pred, gt=None): + return torch.mean(1 - ((pred - 0) ** 2 + (pred - 1) ** 2)) + + +class ClsLoss(nn.Module): + """ + Auxiliary classification loss for each refined class output. + """ + def __init__(self): + super(ClsLoss, self).__init__() + self.config = Config() + self.lambdas_cls = self.config.lambdas_cls + + self.criterions_last = { + 'ce': nn.CrossEntropyLoss() + } + + def forward(self, preds, gt): + loss = 0. + for _, pred_lvl in enumerate(preds): + if pred_lvl is None: + continue + for criterion_name, criterion in self.criterions_last.items(): + loss += criterion(pred_lvl, gt) * self.lambdas_cls[criterion_name] + return loss + + +class PixLoss(nn.Module): + """ + Pixel loss for each refined map output. + """ + def __init__(self): + super(PixLoss, self).__init__() + self.config = Config() + self.lambdas_pix_last = self.config.lambdas_pix_last + + self.criterions_last = {} + if 'bce' in self.lambdas_pix_last and self.lambdas_pix_last['bce']: + self.criterions_last['bce'] = nn.BCELoss() + if 'iou' in self.lambdas_pix_last and self.lambdas_pix_last['iou']: + self.criterions_last['iou'] = IoULoss() + if 'iou_patch' in self.lambdas_pix_last and self.lambdas_pix_last['iou_patch']: + self.criterions_last['iou_patch'] = PatchIoULoss() + if 'ssim' in self.lambdas_pix_last and self.lambdas_pix_last['ssim']: + self.criterions_last['ssim'] = SSIMLoss() + if 'mae' in self.lambdas_pix_last and self.lambdas_pix_last['mae']: + self.criterions_last['mae'] = nn.L1Loss() + if 'mse' in self.lambdas_pix_last and self.lambdas_pix_last['mse']: + self.criterions_last['mse'] = nn.MSELoss() + if 'reg' in self.lambdas_pix_last and self.lambdas_pix_last['reg']: + self.criterions_last['reg'] = ThrReg_loss() + if 'cnt' in self.lambdas_pix_last and self.lambdas_pix_last['cnt']: + self.criterions_last['cnt'] = ContourLoss() + if 'structure' in self.lambdas_pix_last and self.lambdas_pix_last['structure']: + self.criterions_last['structure'] = StructureLoss() + + def forward(self, scaled_preds, gt): + loss = 0. + for _, pred_lvl in enumerate(scaled_preds): + if pred_lvl.shape != gt.shape: + pred_lvl = nn.functional.interpolate(pred_lvl, size=gt.shape[2:], mode='bilinear', align_corners=True) + for criterion_name, criterion in self.criterions_last.items(): + _loss = criterion(pred_lvl.sigmoid(), gt) * self.lambdas_pix_last[criterion_name] + loss += _loss + # print(criterion_name, _loss.item()) + return loss + + +class SSIMLoss(torch.nn.Module): + def __init__(self, window_size=11, size_average=True): + super(SSIMLoss, self).__init__() + self.window_size = window_size + self.size_average = size_average + self.channel = 1 + self.window = create_window(window_size, self.channel) + + def forward(self, img1, img2): + (_, channel, _, _) = img1.size() + if channel == self.channel and self.window.data.type() == img1.data.type(): + window = self.window + else: + window = create_window(self.window_size, channel) + if img1.is_cuda: + window = window.cuda(img1.get_device()) + window = window.type_as(img1) + self.window = window + self.channel = channel + return 1 - (1 + _ssim(img1, img2, window, self.window_size, channel, self.size_average)) / 2 + + +def gaussian(window_size, sigma): + gauss = torch.Tensor([exp(-(x - window_size//2)**2/float(2*sigma**2)) for x in range(window_size)]) + return gauss/gauss.sum() + + +def create_window(window_size, channel): + _1D_window = gaussian(window_size, 1.5).unsqueeze(1) + _2D_window = _1D_window.mm(_1D_window.t()).float().unsqueeze(0).unsqueeze(0) + window = Variable(_2D_window.expand(channel, 1, window_size, window_size).contiguous()) + return window + + +def _ssim(img1, img2, window, window_size, channel, size_average=True): + mu1 = F.conv2d(img1, window, padding = window_size//2, groups=channel) + mu2 = F.conv2d(img2, window, padding = window_size//2, groups=channel) + + mu1_sq = mu1.pow(2) + mu2_sq = mu2.pow(2) + mu1_mu2 = mu1*mu2 + + sigma1_sq = F.conv2d(img1*img1, window, padding=window_size//2, groups=channel) - mu1_sq + sigma2_sq = F.conv2d(img2*img2, window, padding=window_size//2, groups=channel) - mu2_sq + sigma12 = F.conv2d(img1*img2, window, padding=window_size//2, groups=channel) - mu1_mu2 + + C1 = 0.01**2 + C2 = 0.03**2 + + ssim_map = ((2*mu1_mu2 + C1)*(2*sigma12 + C2))/((mu1_sq + mu2_sq + C1)*(sigma1_sq + sigma2_sq + C2)) + + if size_average: + return ssim_map.mean() + else: + return ssim_map.mean(1).mean(1).mean(1) + + +def SSIM(x, y): + C1 = 0.01 ** 2 + C2 = 0.03 ** 2 + + mu_x = nn.AvgPool2d(3, 1, 1)(x) + mu_y = nn.AvgPool2d(3, 1, 1)(y) + mu_x_mu_y = mu_x * mu_y + mu_x_sq = mu_x.pow(2) + mu_y_sq = mu_y.pow(2) + + sigma_x = nn.AvgPool2d(3, 1, 1)(x * x) - mu_x_sq + sigma_y = nn.AvgPool2d(3, 1, 1)(y * y) - mu_y_sq + sigma_xy = nn.AvgPool2d(3, 1, 1)(x * y) - mu_x_mu_y + + SSIM_n = (2 * mu_x_mu_y + C1) * (2 * sigma_xy + C2) + SSIM_d = (mu_x_sq + mu_y_sq + C1) * (sigma_x + sigma_y + C2) + SSIM = SSIM_n / SSIM_d + + return torch.clamp((1 - SSIM) / 2, 0, 1) + + +def saliency_structure_consistency(x, y): + ssim = torch.mean(SSIM(x,y)) + return ssim diff --git a/engine/BiRefNet/make_a_copy.sh b/engine/BiRefNet/make_a_copy.sh new file mode 100644 index 0000000000000000000000000000000000000000..97a35fbef8adf18d8ea22e3148e31a926ae930b5 --- /dev/null +++ b/engine/BiRefNet/make_a_copy.sh @@ -0,0 +1,18 @@ +#!/bin/bash +# Set dst repo here. +repo=$1 +mkdir ../${repo} +mkdir ../${repo}/evaluation +mkdir ../${repo}/models +mkdir ../${repo}/models/backbones +mkdir ../${repo}/models/modules +mkdir ../${repo}/models/refinement + +cp ./*.sh ../${repo} +cp ./*.py ../${repo} +cp ./evaluation/*.py ../${repo}/evaluation +cp ./models/*.py ../${repo}/models +cp ./models/backbones/*.py ../${repo}/models/backbones +cp ./models/modules/*.py ../${repo}/models/modules +cp ./models/refinement/*.py ../${repo}/models/refinement +cp -r ./.git* ../${repo} diff --git a/engine/BiRefNet/models/__pycache__/birefnet.cpython-310.pyc b/engine/BiRefNet/models/__pycache__/birefnet.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ba49e8f0791668805ba55baf76deb41ca699114f Binary files /dev/null and b/engine/BiRefNet/models/__pycache__/birefnet.cpython-310.pyc differ diff --git a/engine/BiRefNet/models/backbones/__pycache__/build_backbone.cpython-310.pyc b/engine/BiRefNet/models/backbones/__pycache__/build_backbone.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7eb9f18f7abd7681643f9d570169f3999fd61fe8 Binary files /dev/null and b/engine/BiRefNet/models/backbones/__pycache__/build_backbone.cpython-310.pyc differ diff --git a/engine/BiRefNet/models/backbones/__pycache__/pvt_v2.cpython-310.pyc b/engine/BiRefNet/models/backbones/__pycache__/pvt_v2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b5f9ce803d094b81a086931108127a0128163361 Binary files /dev/null and b/engine/BiRefNet/models/backbones/__pycache__/pvt_v2.cpython-310.pyc differ diff --git a/engine/BiRefNet/models/backbones/__pycache__/swin_v1.cpython-310.pyc b/engine/BiRefNet/models/backbones/__pycache__/swin_v1.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b488543815044106e64f60bab7d6ee878d2ecaed Binary files /dev/null and b/engine/BiRefNet/models/backbones/__pycache__/swin_v1.cpython-310.pyc differ diff --git a/engine/BiRefNet/models/backbones/build_backbone.py b/engine/BiRefNet/models/backbones/build_backbone.py new file mode 100644 index 0000000000000000000000000000000000000000..ea342105a93d844787c84080315b4e84f97b10c8 --- /dev/null +++ b/engine/BiRefNet/models/backbones/build_backbone.py @@ -0,0 +1,117 @@ +from collections import OrderedDict + +import torch +import torch.nn as nn +from torchvision.models import ( + ResNet50_Weights, + VGG16_BN_Weights, + VGG16_Weights, + resnet50, + vgg16, + vgg16_bn, +) + +from engine.BiRefNet.config import Config +from engine.BiRefNet.models.backbones.pvt_v2 import ( + pvt_v2_b0, + pvt_v2_b1, + pvt_v2_b2, + pvt_v2_b5, +) +from engine.BiRefNet.models.backbones.swin_v1 import ( + swin_v1_b, + swin_v1_l, + swin_v1_s, + swin_v1_t, +) + +config = Config() + + +def build_backbone(bb_name, pretrained=True, params_settings=""): + if bb_name == "vgg16": + bb_net = list( + vgg16(pretrained=VGG16_Weights.DEFAULT if pretrained else None).children() + )[0] + bb = nn.Sequential( + OrderedDict( + { + "conv1": bb_net[:4], + "conv2": bb_net[4:9], + "conv3": bb_net[9:16], + "conv4": bb_net[16:23], + } + ) + ) + elif bb_name == "vgg16bn": + bb_net = list( + vgg16_bn( + pretrained=VGG16_BN_Weights.DEFAULT if pretrained else None + ).children() + )[0] + bb = nn.Sequential( + OrderedDict( + { + "conv1": bb_net[:6], + "conv2": bb_net[6:13], + "conv3": bb_net[13:23], + "conv4": bb_net[23:33], + } + ) + ) + elif bb_name == "resnet50": + bb_net = list( + resnet50( + pretrained=ResNet50_Weights.DEFAULT if pretrained else None + ).children() + ) + bb = nn.Sequential( + OrderedDict( + { + "conv1": nn.Sequential(*bb_net[0:3]), + "conv2": bb_net[4], + "conv3": bb_net[5], + "conv4": bb_net[6], + } + ) + ) + else: + bb = eval("{}({})".format(bb_name, params_settings)) + if pretrained: + bb = load_weights(bb, bb_name) + return bb + + +def load_weights(model, model_name): + save_model = torch.load( + config.weights[model_name], map_location="cpu", weights_only=True + ) + model_dict = model.state_dict() + state_dict = { + k: v if v.size() == model_dict[k].size() else model_dict[k] + for k, v in save_model.items() + if k in model_dict.keys() + } + # to ignore the weights with mismatched size when I modify the backbone itself. + if not state_dict: + save_model_keys = list(save_model.keys()) + sub_item = save_model_keys[0] if len(save_model_keys) == 1 else None + state_dict = { + k: v if v.size() == model_dict[k].size() else model_dict[k] + for k, v in save_model[sub_item].items() + if k in model_dict.keys() + } + if not state_dict or not sub_item: + print( + "Weights are not successully loaded. Check the state dict of weights file." + ) + return None + else: + print( + 'Found correct weights in the "{}" item of loaded state_dict.'.format( + sub_item + ) + ) + model_dict.update(state_dict) + model.load_state_dict(model_dict) + return model diff --git a/engine/BiRefNet/models/backbones/pvt_v2.py b/engine/BiRefNet/models/backbones/pvt_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..85345aef218bbff8ca6b98bb3b742ae6f500f88c --- /dev/null +++ b/engine/BiRefNet/models/backbones/pvt_v2.py @@ -0,0 +1,637 @@ +import math +from functools import partial + +import torch +import torch.nn as nn +from timm.layers import DropPath, to_2tuple, trunc_normal_ + +from engine.BiRefNet.config import Config + +config = Config() + + +class Mlp(nn.Module): + def __init__( + self, + in_features, + hidden_features=None, + out_features=None, + act_layer=nn.GELU, + drop=0.0, + ): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.dwconv = DWConv(hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=0.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + def forward(self, x, H, W): + x = self.fc1(x) + x = self.dwconv(x, H, W) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +class Attention(nn.Module): + def __init__( + self, + dim, + num_heads=8, + qkv_bias=False, + qk_scale=None, + attn_drop=0.0, + proj_drop=0.0, + sr_ratio=1, + ): + super().__init__() + assert ( + dim % num_heads == 0 + ), f"dim {dim} should be divided by num_heads {num_heads}." + + self.dim = dim + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = qk_scale or head_dim**-0.5 + + self.q = nn.Linear(dim, dim, bias=qkv_bias) + self.kv = nn.Linear(dim, dim * 2, bias=qkv_bias) + self.attn_drop_prob = attn_drop + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + self.sr_ratio = sr_ratio + if sr_ratio > 1: + self.sr = nn.Conv2d(dim, dim, kernel_size=sr_ratio, stride=sr_ratio) + self.norm = nn.LayerNorm(dim) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=0.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + def forward(self, x, H, W): + B, N, C = x.shape + q = ( + self.q(x) + .reshape(B, N, self.num_heads, C // self.num_heads) + .permute(0, 2, 1, 3) + ) + + if self.sr_ratio > 1: + x_ = x.permute(0, 2, 1).reshape(B, C, H, W) + x_ = self.sr(x_).reshape(B, C, -1).permute(0, 2, 1) + x_ = self.norm(x_) + kv = ( + self.kv(x_) + .reshape(B, -1, 2, self.num_heads, C // self.num_heads) + .permute(2, 0, 3, 1, 4) + ) + else: + kv = ( + self.kv(x) + .reshape(B, -1, 2, self.num_heads, C // self.num_heads) + .permute(2, 0, 3, 1, 4) + ) + k, v = kv[0], kv[1] + + if config.SDPA_enabled: + x = ( + torch.nn.functional.scaled_dot_product_attention( + q, + k, + v, + attn_mask=None, + dropout_p=self.attn_drop_prob, + is_causal=False, + ) + .transpose(1, 2) + .reshape(B, N, C) + ) + else: + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + + return x + + +class Block(nn.Module): + + def __init__( + self, + dim, + num_heads, + mlp_ratio=4.0, + qkv_bias=False, + qk_scale=None, + drop=0.0, + attn_drop=0.0, + drop_path=0.0, + act_layer=nn.GELU, + norm_layer=nn.LayerNorm, + sr_ratio=1, + ): + super().__init__() + self.norm1 = norm_layer(dim) + self.attn = Attention( + dim, + num_heads=num_heads, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + attn_drop=attn_drop, + proj_drop=drop, + sr_ratio=sr_ratio, + ) + # NOTE: drop path for stochastic depth, we shall see if this is better than dropout here + self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop, + ) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=0.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + def forward(self, x, H, W): + x = x + self.drop_path(self.attn(self.norm1(x), H, W)) + x = x + self.drop_path(self.mlp(self.norm2(x), H, W)) + + return x + + +class OverlapPatchEmbed(nn.Module): + """Image to Patch Embedding""" + + def __init__( + self, img_size=224, patch_size=7, stride=4, in_channels=3, embed_dim=768 + ): + super().__init__() + img_size = to_2tuple(img_size) + patch_size = to_2tuple(patch_size) + + self.img_size = img_size + self.patch_size = patch_size + self.H, self.W = img_size[0] // patch_size[0], img_size[1] // patch_size[1] + self.num_patches = self.H * self.W + self.proj = nn.Conv2d( + in_channels, + embed_dim, + kernel_size=patch_size, + stride=stride, + padding=(patch_size[0] // 2, patch_size[1] // 2), + ) + self.norm = nn.LayerNorm(embed_dim) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=0.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + def forward(self, x): + x = self.proj(x) + _, _, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = self.norm(x) + + return x, H, W + + +class PyramidVisionTransformerImpr(nn.Module): + def __init__( + self, + img_size=224, + patch_size=16, + in_channels=3, + num_classes=1000, + embed_dims=[64, 128, 256, 512], + num_heads=[1, 2, 4, 8], + mlp_ratios=[4, 4, 4, 4], + qkv_bias=False, + qk_scale=None, + drop_rate=0.0, + attn_drop_rate=0.0, + drop_path_rate=0.0, + norm_layer=nn.LayerNorm, + depths=[3, 4, 6, 3], + sr_ratios=[8, 4, 2, 1], + ): + super().__init__() + self.num_classes = num_classes + self.depths = depths + + # patch_embed + self.patch_embed1 = OverlapPatchEmbed( + img_size=img_size, + patch_size=7, + stride=4, + in_channels=in_channels, + embed_dim=embed_dims[0], + ) + self.patch_embed2 = OverlapPatchEmbed( + img_size=img_size // 4, + patch_size=3, + stride=2, + in_channels=embed_dims[0], + embed_dim=embed_dims[1], + ) + self.patch_embed3 = OverlapPatchEmbed( + img_size=img_size // 8, + patch_size=3, + stride=2, + in_channels=embed_dims[1], + embed_dim=embed_dims[2], + ) + self.patch_embed4 = OverlapPatchEmbed( + img_size=img_size // 16, + patch_size=3, + stride=2, + in_channels=embed_dims[2], + embed_dim=embed_dims[3], + ) + + # transformer encoder + dpr = [ + x.item() for x in torch.linspace(0, drop_path_rate, sum(depths)) + ] # stochastic depth decay rule + cur = 0 + self.block1 = nn.ModuleList( + [ + Block( + dim=embed_dims[0], + num_heads=num_heads[0], + mlp_ratio=mlp_ratios[0], + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop_rate, + attn_drop=attn_drop_rate, + drop_path=dpr[cur + i], + norm_layer=norm_layer, + sr_ratio=sr_ratios[0], + ) + for i in range(depths[0]) + ] + ) + self.norm1 = norm_layer(embed_dims[0]) + + cur += depths[0] + self.block2 = nn.ModuleList( + [ + Block( + dim=embed_dims[1], + num_heads=num_heads[1], + mlp_ratio=mlp_ratios[1], + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop_rate, + attn_drop=attn_drop_rate, + drop_path=dpr[cur + i], + norm_layer=norm_layer, + sr_ratio=sr_ratios[1], + ) + for i in range(depths[1]) + ] + ) + self.norm2 = norm_layer(embed_dims[1]) + + cur += depths[1] + self.block3 = nn.ModuleList( + [ + Block( + dim=embed_dims[2], + num_heads=num_heads[2], + mlp_ratio=mlp_ratios[2], + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop_rate, + attn_drop=attn_drop_rate, + drop_path=dpr[cur + i], + norm_layer=norm_layer, + sr_ratio=sr_ratios[2], + ) + for i in range(depths[2]) + ] + ) + self.norm3 = norm_layer(embed_dims[2]) + + cur += depths[2] + self.block4 = nn.ModuleList( + [ + Block( + dim=embed_dims[3], + num_heads=num_heads[3], + mlp_ratio=mlp_ratios[3], + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop_rate, + attn_drop=attn_drop_rate, + drop_path=dpr[cur + i], + norm_layer=norm_layer, + sr_ratio=sr_ratios[3], + ) + for i in range(depths[3]) + ] + ) + self.norm4 = norm_layer(embed_dims[3]) + + # classification head + # self.head = nn.Linear(embed_dims[3], num_classes) if num_classes > 0 else nn.Identity() + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=0.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + def init_weights(self, pretrained=None): + if isinstance(pretrained, str): + logger = 1 + # load_checkpoint(self, pretrained, map_location='cpu', strict=False, logger=logger) + + def reset_drop_path(self, drop_path_rate): + dpr = [x.item() for x in torch.linspace(0, drop_path_rate, sum(self.depths))] + cur = 0 + for i in range(self.depths[0]): + self.block1[i].drop_path.drop_prob = dpr[cur + i] + + cur += self.depths[0] + for i in range(self.depths[1]): + self.block2[i].drop_path.drop_prob = dpr[cur + i] + + cur += self.depths[1] + for i in range(self.depths[2]): + self.block3[i].drop_path.drop_prob = dpr[cur + i] + + cur += self.depths[2] + for i in range(self.depths[3]): + self.block4[i].drop_path.drop_prob = dpr[cur + i] + + def freeze_patch_emb(self): + self.patch_embed1.requires_grad = False + + @torch.jit.ignore + def no_weight_decay(self): + return { + "pos_embed1", + "pos_embed2", + "pos_embed3", + "pos_embed4", + "cls_token", + } # has pos_embed may be better + + def get_classifier(self): + return self.head + + def reset_classifier(self, num_classes, global_pool=""): + self.num_classes = num_classes + self.head = ( + nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity() + ) + + def forward_features(self, x): + B = x.shape[0] + outs = [] + + # stage 1 + x, H, W = self.patch_embed1(x) + for i, blk in enumerate(self.block1): + x = blk(x, H, W) + x = self.norm1(x) + x = x.reshape(B, H, W, -1).permute(0, 3, 1, 2).contiguous() + outs.append(x) + + # stage 2 + x, H, W = self.patch_embed2(x) + for i, blk in enumerate(self.block2): + x = blk(x, H, W) + x = self.norm2(x) + x = x.reshape(B, H, W, -1).permute(0, 3, 1, 2).contiguous() + outs.append(x) + + # stage 3 + x, H, W = self.patch_embed3(x) + for i, blk in enumerate(self.block3): + x = blk(x, H, W) + x = self.norm3(x) + x = x.reshape(B, H, W, -1).permute(0, 3, 1, 2).contiguous() + outs.append(x) + + # stage 4 + x, H, W = self.patch_embed4(x) + for i, blk in enumerate(self.block4): + x = blk(x, H, W) + x = self.norm4(x) + x = x.reshape(B, H, W, -1).permute(0, 3, 1, 2).contiguous() + outs.append(x) + + return outs + + # return x.mean(dim=1) + + def forward(self, x): + x = self.forward_features(x) + # x = self.head(x) + + return x + + +class DWConv(nn.Module): + def __init__(self, dim=768): + super(DWConv, self).__init__() + self.dwconv = nn.Conv2d(dim, dim, 3, 1, 1, bias=True, groups=dim) + + def forward(self, x, H, W): + B, N, C = x.shape + x = x.transpose(1, 2).view(B, C, H, W).contiguous() + x = self.dwconv(x) + x = x.flatten(2).transpose(1, 2) + + return x + + +def _conv_filter(state_dict, patch_size=16): + """convert patch embedding weight from manual patchify + linear proj to conv""" + out_dict = {} + for k, v in state_dict.items(): + if "patch_embed.proj.weight" in k: + v = v.reshape((v.shape[0], 3, patch_size, patch_size)) + out_dict[k] = v + + return out_dict + + +class pvt_v2_b0(PyramidVisionTransformerImpr): + def __init__(self, **kwargs): + super(pvt_v2_b0, self).__init__( + patch_size=4, + embed_dims=[32, 64, 160, 256], + num_heads=[1, 2, 5, 8], + mlp_ratios=[8, 8, 4, 4], + qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), + depths=[2, 2, 2, 2], + sr_ratios=[8, 4, 2, 1], + drop_rate=0.0, + drop_path_rate=0.1, + ) + + +class pvt_v2_b1(PyramidVisionTransformerImpr): + def __init__(self, **kwargs): + super(pvt_v2_b1, self).__init__( + patch_size=4, + embed_dims=[64, 128, 320, 512], + num_heads=[1, 2, 5, 8], + mlp_ratios=[8, 8, 4, 4], + qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), + depths=[2, 2, 2, 2], + sr_ratios=[8, 4, 2, 1], + drop_rate=0.0, + drop_path_rate=0.1, + ) + + +class pvt_v2_b2(PyramidVisionTransformerImpr): + def __init__(self, in_channels=3, **kwargs): + super(pvt_v2_b2, self).__init__( + patch_size=4, + embed_dims=[64, 128, 320, 512], + num_heads=[1, 2, 5, 8], + mlp_ratios=[8, 8, 4, 4], + qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), + depths=[3, 4, 6, 3], + sr_ratios=[8, 4, 2, 1], + drop_rate=0.0, + drop_path_rate=0.1, + in_channels=in_channels, + ) + + +class pvt_v2_b3(PyramidVisionTransformerImpr): + def __init__(self, **kwargs): + super(pvt_v2_b3, self).__init__( + patch_size=4, + embed_dims=[64, 128, 320, 512], + num_heads=[1, 2, 5, 8], + mlp_ratios=[8, 8, 4, 4], + qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), + depths=[3, 4, 18, 3], + sr_ratios=[8, 4, 2, 1], + drop_rate=0.0, + drop_path_rate=0.1, + ) + + +class pvt_v2_b4(PyramidVisionTransformerImpr): + def __init__(self, **kwargs): + super(pvt_v2_b4, self).__init__( + patch_size=4, + embed_dims=[64, 128, 320, 512], + num_heads=[1, 2, 5, 8], + mlp_ratios=[8, 8, 4, 4], + qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), + depths=[3, 8, 27, 3], + sr_ratios=[8, 4, 2, 1], + drop_rate=0.0, + drop_path_rate=0.1, + ) + + +class pvt_v2_b5(PyramidVisionTransformerImpr): + def __init__(self, **kwargs): + super(pvt_v2_b5, self).__init__( + patch_size=4, + embed_dims=[64, 128, 320, 512], + num_heads=[1, 2, 5, 8], + mlp_ratios=[4, 4, 4, 4], + qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), + depths=[3, 6, 40, 3], + sr_ratios=[8, 4, 2, 1], + drop_rate=0.0, + drop_path_rate=0.1, + ) diff --git a/engine/BiRefNet/models/backbones/swin_v1.py b/engine/BiRefNet/models/backbones/swin_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..0501373b4f0ccf75022a7293328fa2625da0eb2e --- /dev/null +++ b/engine/BiRefNet/models/backbones/swin_v1.py @@ -0,0 +1,765 @@ +# -------------------------------------------------------- +# Swin Transformer +# Copyright (c) 2021 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ze Liu, Yutong Lin, Yixuan Wei +# -------------------------------------------------------- + +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.utils.checkpoint as checkpoint +from timm.layers import DropPath, to_2tuple, trunc_normal_ + +from engine.BiRefNet.config import Config + +config = Config() + + +class Mlp(nn.Module): + """Multilayer perceptron.""" + + def __init__( + self, + in_features, + hidden_features=None, + out_features=None, + act_layer=nn.GELU, + drop=0.0, + ): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +def window_partition(x, window_size): + """ + Args: + x: (B, H, W, C) + window_size (int): window size + + Returns: + windows: (num_windows*B, window_size, window_size, C) + """ + B, H, W, C = x.shape + x = x.view(B, H // window_size, window_size, W // window_size, window_size, C) + windows = ( + x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C) + ) + return windows + + +def window_reverse(windows, window_size, H, W): + """ + Args: + windows: (num_windows*B, window_size, window_size, C) + window_size (int): Window size + H (int): Height of image + W (int): Width of image + + Returns: + x: (B, H, W, C) + """ + B = int(windows.shape[0] / (H * W / window_size / window_size)) + x = windows.view( + B, H // window_size, W // window_size, window_size, window_size, -1 + ) + x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, H, W, -1) + return x + + +class WindowAttention(nn.Module): + """Window based multi-head self attention (W-MSA) module with relative position bias. + It supports both of shifted and non-shifted window. + + Args: + dim (int): Number of input channels. + window_size (tuple[int]): The height and width of the window. + num_heads (int): Number of attention heads. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set + attn_drop (float, optional): Dropout ratio of attention weight. Default: 0.0 + proj_drop (float, optional): Dropout ratio of output. Default: 0.0 + """ + + def __init__( + self, + dim, + window_size, + num_heads, + qkv_bias=True, + qk_scale=None, + attn_drop=0.0, + proj_drop=0.0, + ): + + super().__init__() + self.dim = dim + self.window_size = window_size # Wh, Ww + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = qk_scale or head_dim**-0.5 + + # define a parameter table of relative position bias + self.relative_position_bias_table = nn.Parameter( + torch.zeros((2 * window_size[0] - 1) * (2 * window_size[1] - 1), num_heads) + ) # 2*Wh-1 * 2*Ww-1, nH + + # get pair-wise relative position index for each token inside the window + coords_h = torch.arange(self.window_size[0]) + coords_w = torch.arange(self.window_size[1]) + coords = torch.stack( + torch.meshgrid([coords_h, coords_w], indexing="ij") + ) # 2, Wh, Ww + coords_flatten = torch.flatten(coords, 1) # 2, Wh*Ww + relative_coords = ( + coords_flatten[:, :, None] - coords_flatten[:, None, :] + ) # 2, Wh*Ww, Wh*Ww + relative_coords = relative_coords.permute( + 1, 2, 0 + ).contiguous() # Wh*Ww, Wh*Ww, 2 + relative_coords[:, :, 0] += self.window_size[0] - 1 # shift to start from 0 + relative_coords[:, :, 1] += self.window_size[1] - 1 + relative_coords[:, :, 0] *= 2 * self.window_size[1] - 1 + relative_position_index = relative_coords.sum(-1) # Wh*Ww, Wh*Ww + self.register_buffer("relative_position_index", relative_position_index) + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop_prob = attn_drop + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + trunc_normal_(self.relative_position_bias_table, std=0.02) + self.softmax = nn.Softmax(dim=-1) + + def forward(self, x, mask=None): + """Forward function. + + Args: + x: input features with shape of (num_windows*B, N, C) + mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None + """ + B_, N, C = x.shape + qkv = ( + self.qkv(x) + .reshape(B_, N, 3, self.num_heads, C // self.num_heads) + .permute(2, 0, 3, 1, 4) + ) + q, k, v = ( + qkv[0], + qkv[1], + qkv[2], + ) # make torchscript happy (cannot use tensor as tuple) + + q = q * self.scale + + if config.SDPA_enabled: + x = ( + torch.nn.functional.scaled_dot_product_attention( + q, + k, + v, + attn_mask=None, + dropout_p=self.attn_drop_prob, + is_causal=False, + ) + .transpose(1, 2) + .reshape(B_, N, C) + ) + else: + attn = q @ k.transpose(-2, -1) + + relative_position_bias = self.relative_position_bias_table[ + self.relative_position_index.view(-1) + ].view( + self.window_size[0] * self.window_size[1], + self.window_size[0] * self.window_size[1], + -1, + ) # Wh*Ww, Wh*Ww, nH + relative_position_bias = relative_position_bias.permute( + 2, 0, 1 + ).contiguous() # nH, Wh*Ww, Wh*Ww + attn = attn + relative_position_bias.unsqueeze(0) + + if mask is not None: + nW = mask.shape[0] + attn = attn.view(B_ // nW, nW, self.num_heads, N, N) + mask.unsqueeze( + 1 + ).unsqueeze(0) + attn = attn.view(-1, self.num_heads, N, N) + attn = self.softmax(attn) + else: + attn = self.softmax(attn) + + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B_, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class SwinTransformerBlock(nn.Module): + """Swin Transformer Block. + + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads. + window_size (int): Window size. + shift_size (int): Shift size for SW-MSA. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set. + drop (float, optional): Dropout rate. Default: 0.0 + attn_drop (float, optional): Attention dropout rate. Default: 0.0 + drop_path (float, optional): Stochastic depth rate. Default: 0.0 + act_layer (nn.Module, optional): Activation layer. Default: nn.GELU + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + """ + + def __init__( + self, + dim, + num_heads, + window_size=7, + shift_size=0, + mlp_ratio=4.0, + qkv_bias=True, + qk_scale=None, + drop=0.0, + attn_drop=0.0, + drop_path=0.0, + act_layer=nn.GELU, + norm_layer=nn.LayerNorm, + ): + super().__init__() + self.dim = dim + self.num_heads = num_heads + self.window_size = window_size + self.shift_size = shift_size + self.mlp_ratio = mlp_ratio + assert ( + 0 <= self.shift_size < self.window_size + ), "shift_size must in 0-window_size" + + self.norm1 = norm_layer(dim) + self.attn = WindowAttention( + dim, + window_size=to_2tuple(self.window_size), + num_heads=num_heads, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + attn_drop=attn_drop, + proj_drop=drop, + ) + + self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop, + ) + + self.H = None + self.W = None + + def forward(self, x, mask_matrix): + """Forward function. + + Args: + x: Input feature, tensor size (B, H*W, C). + H, W: Spatial resolution of the input feature. + mask_matrix: Attention mask for cyclic shift. + """ + B, L, C = x.shape + H, W = self.H, self.W + assert L == H * W, "input feature has wrong size" + + shortcut = x + x = self.norm1(x) + x = x.view(B, H, W, C) + + # pad feature maps to multiples of window size + pad_l = pad_t = 0 + pad_r = (self.window_size - W % self.window_size) % self.window_size + pad_b = (self.window_size - H % self.window_size) % self.window_size + x = F.pad(x, (0, 0, pad_l, pad_r, pad_t, pad_b)) + _, Hp, Wp, _ = x.shape + + # cyclic shift + if self.shift_size > 0: + shifted_x = torch.roll( + x, shifts=(-self.shift_size, -self.shift_size), dims=(1, 2) + ) + attn_mask = mask_matrix + else: + shifted_x = x + attn_mask = None + + # partition windows + x_windows = window_partition( + shifted_x, self.window_size + ) # nW*B, window_size, window_size, C + x_windows = x_windows.view( + -1, self.window_size * self.window_size, C + ) # nW*B, window_size*window_size, C + + # W-MSA/SW-MSA + attn_windows = self.attn( + x_windows, mask=attn_mask + ) # nW*B, window_size*window_size, C + + # merge windows + attn_windows = attn_windows.view(-1, self.window_size, self.window_size, C) + shifted_x = window_reverse(attn_windows, self.window_size, Hp, Wp) # B H' W' C + + # reverse cyclic shift + if self.shift_size > 0: + x = torch.roll( + shifted_x, shifts=(self.shift_size, self.shift_size), dims=(1, 2) + ) + else: + x = shifted_x + + if pad_r > 0 or pad_b > 0: + x = x[:, :H, :W, :].contiguous() + + x = x.view(B, H * W, C) + + # FFN + x = shortcut + self.drop_path(x) + x = x + self.drop_path(self.mlp(self.norm2(x))) + + return x + + +class PatchMerging(nn.Module): + """Patch Merging Layer + + Args: + dim (int): Number of input channels. + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + """ + + def __init__(self, dim, norm_layer=nn.LayerNorm): + super().__init__() + self.dim = dim + self.reduction = nn.Linear(4 * dim, 2 * dim, bias=False) + self.norm = norm_layer(4 * dim) + + def forward(self, x, H, W): + """Forward function. + + Args: + x: Input feature, tensor size (B, H*W, C). + H, W: Spatial resolution of the input feature. + """ + B, L, C = x.shape + assert L == H * W, "input feature has wrong size" + + x = x.view(B, H, W, C) + + # padding + pad_input = (H % 2 == 1) or (W % 2 == 1) + if pad_input: + x = F.pad(x, (0, 0, 0, W % 2, 0, H % 2)) + + x0 = x[:, 0::2, 0::2, :] # B H/2 W/2 C + x1 = x[:, 1::2, 0::2, :] # B H/2 W/2 C + x2 = x[:, 0::2, 1::2, :] # B H/2 W/2 C + x3 = x[:, 1::2, 1::2, :] # B H/2 W/2 C + x = torch.cat([x0, x1, x2, x3], -1) # B H/2 W/2 4*C + x = x.view(B, -1, 4 * C) # B H/2*W/2 4*C + + x = self.norm(x) + x = self.reduction(x) + + return x + + +class BasicLayer(nn.Module): + """A basic Swin Transformer layer for one stage. + + Args: + dim (int): Number of feature channels + depth (int): Depths of this stage. + num_heads (int): Number of attention head. + window_size (int): Local window size. Default: 7. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set. + drop (float, optional): Dropout rate. Default: 0.0 + attn_drop (float, optional): Attention dropout rate. Default: 0.0 + drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0 + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None + use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False. + """ + + def __init__( + self, + dim, + depth, + num_heads, + window_size=7, + mlp_ratio=4.0, + qkv_bias=True, + qk_scale=None, + drop=0.0, + attn_drop=0.0, + drop_path=0.0, + norm_layer=nn.LayerNorm, + downsample=None, + use_checkpoint=False, + ): + super().__init__() + self.window_size = window_size + self.shift_size = window_size // 2 + self.depth = depth + self.use_checkpoint = use_checkpoint + + # build blocks + self.blocks = nn.ModuleList( + [ + SwinTransformerBlock( + dim=dim, + num_heads=num_heads, + window_size=window_size, + shift_size=0 if (i % 2 == 0) else window_size // 2, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop, + attn_drop=attn_drop, + drop_path=( + drop_path[i] if isinstance(drop_path, list) else drop_path + ), + norm_layer=norm_layer, + ) + for i in range(depth) + ] + ) + + # patch merging layer + if downsample is not None: + self.downsample = downsample(dim=dim, norm_layer=norm_layer) + else: + self.downsample = None + + def forward(self, x, H, W): + """Forward function. + + Args: + x: Input feature, tensor size (B, H*W, C). + H, W: Spatial resolution of the input feature. + """ + + # calculate attention mask for SW-MSA + Hp = int(np.ceil(H / self.window_size)) * self.window_size + Wp = int(np.ceil(W / self.window_size)) * self.window_size + img_mask = torch.zeros((1, Hp, Wp, 1), device=x.device) # 1 Hp Wp 1 + h_slices = ( + slice(0, -self.window_size), + slice(-self.window_size, -self.shift_size), + slice(-self.shift_size, None), + ) + w_slices = ( + slice(0, -self.window_size), + slice(-self.window_size, -self.shift_size), + slice(-self.shift_size, None), + ) + cnt = 0 + for h in h_slices: + for w in w_slices: + img_mask[:, h, w, :] = cnt + cnt += 1 + + mask_windows = window_partition( + img_mask, self.window_size + ) # nW, window_size, window_size, 1 + mask_windows = mask_windows.view(-1, self.window_size * self.window_size) + attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2) + attn_mask = ( + attn_mask.masked_fill(attn_mask != 0, float(-100.0)) + .masked_fill(attn_mask == 0, float(0.0)) + .to(x.dtype) + ) + + for blk in self.blocks: + blk.H, blk.W = H, W + if self.use_checkpoint: + x = checkpoint.checkpoint(blk, x, attn_mask) + else: + x = blk(x, attn_mask) + if self.downsample is not None: + x_down = self.downsample(x, H, W) + Wh, Ww = (H + 1) // 2, (W + 1) // 2 + return x, H, W, x_down, Wh, Ww + else: + return x, H, W, x, H, W + + +class PatchEmbed(nn.Module): + """Image to Patch Embedding + + Args: + patch_size (int): Patch token size. Default: 4. + in_channels (int): Number of input image channels. Default: 3. + embed_dim (int): Number of linear projection output channels. Default: 96. + norm_layer (nn.Module, optional): Normalization layer. Default: None + """ + + def __init__(self, patch_size=4, in_channels=3, embed_dim=96, norm_layer=None): + super().__init__() + patch_size = to_2tuple(patch_size) + self.patch_size = patch_size + + self.in_channels = in_channels + self.embed_dim = embed_dim + + self.proj = nn.Conv2d( + in_channels, embed_dim, kernel_size=patch_size, stride=patch_size + ) + if norm_layer is not None: + self.norm = norm_layer(embed_dim) + else: + self.norm = None + + def forward(self, x): + """Forward function.""" + # padding + _, _, H, W = x.size() + if W % self.patch_size[1] != 0: + x = F.pad(x, (0, self.patch_size[1] - W % self.patch_size[1])) + if H % self.patch_size[0] != 0: + x = F.pad(x, (0, 0, 0, self.patch_size[0] - H % self.patch_size[0])) + + x = self.proj(x) # B C Wh Ww + if self.norm is not None: + Wh, Ww = x.size(2), x.size(3) + x = x.flatten(2).transpose(1, 2) + x = self.norm(x) + x = x.transpose(1, 2).view(-1, self.embed_dim, Wh, Ww) + + return x + + +class SwinTransformer(nn.Module): + """Swin Transformer backbone. + A PyTorch impl of : `Swin Transformer: Hierarchical Vision Transformer using Shifted Windows` - + https://arxiv.org/pdf/2103.14030 + + Args: + pretrain_img_size (int): Input image size for training the pretrained model, + used in absolute postion embedding. Default 224. + patch_size (int | tuple(int)): Patch size. Default: 4. + in_channels (int): Number of input image channels. Default: 3. + embed_dim (int): Number of linear projection output channels. Default: 96. + depths (tuple[int]): Depths of each Swin Transformer stage. + num_heads (tuple[int]): Number of attention head of each stage. + window_size (int): Window size. Default: 7. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4. + qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float): Override default qk scale of head_dim ** -0.5 if set. + drop_rate (float): Dropout rate. + attn_drop_rate (float): Attention dropout rate. Default: 0. + drop_path_rate (float): Stochastic depth rate. Default: 0.2. + norm_layer (nn.Module): Normalization layer. Default: nn.LayerNorm. + ape (bool): If True, add absolute position embedding to the patch embedding. Default: False. + patch_norm (bool): If True, add normalization after patch embedding. Default: True. + out_indices (Sequence[int]): Output from which stages. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. + use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False. + """ + + def __init__( + self, + pretrain_img_size=224, + patch_size=4, + in_channels=3, + embed_dim=96, + depths=[2, 2, 6, 2], + num_heads=[3, 6, 12, 24], + window_size=7, + mlp_ratio=4.0, + qkv_bias=True, + qk_scale=None, + drop_rate=0.0, + attn_drop_rate=0.0, + drop_path_rate=0.2, + norm_layer=nn.LayerNorm, + ape=False, + patch_norm=True, + out_indices=(0, 1, 2, 3), + frozen_stages=-1, + use_checkpoint=False, + ): + super().__init__() + + self.pretrain_img_size = pretrain_img_size + self.num_layers = len(depths) + self.embed_dim = embed_dim + self.ape = ape + self.patch_norm = patch_norm + self.out_indices = out_indices + self.frozen_stages = frozen_stages + + # split image into non-overlapping patches + self.patch_embed = PatchEmbed( + patch_size=patch_size, + in_channels=in_channels, + embed_dim=embed_dim, + norm_layer=norm_layer if self.patch_norm else None, + ) + + # absolute position embedding + if self.ape: + pretrain_img_size = to_2tuple(pretrain_img_size) + patch_size = to_2tuple(patch_size) + patches_resolution = [ + pretrain_img_size[0] // patch_size[0], + pretrain_img_size[1] // patch_size[1], + ] + + self.absolute_pos_embed = nn.Parameter( + torch.zeros(1, embed_dim, patches_resolution[0], patches_resolution[1]) + ) + trunc_normal_(self.absolute_pos_embed, std=0.02) + + self.pos_drop = nn.Dropout(p=drop_rate) + + # stochastic depth + dpr = [ + x.item() for x in torch.linspace(0, drop_path_rate, sum(depths)) + ] # stochastic depth decay rule + + # build layers + self.layers = nn.ModuleList() + for i_layer in range(self.num_layers): + layer = BasicLayer( + dim=int(embed_dim * 2**i_layer), + depth=depths[i_layer], + num_heads=num_heads[i_layer], + window_size=window_size, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop_rate, + attn_drop=attn_drop_rate, + drop_path=dpr[sum(depths[:i_layer]) : sum(depths[: i_layer + 1])], + norm_layer=norm_layer, + downsample=PatchMerging if (i_layer < self.num_layers - 1) else None, + use_checkpoint=use_checkpoint, + ) + self.layers.append(layer) + + num_features = [int(embed_dim * 2**i) for i in range(self.num_layers)] + self.num_features = num_features + + # add a norm layer for each output + for i_layer in out_indices: + layer = norm_layer(num_features[i_layer]) + layer_name = f"norm{i_layer}" + self.add_module(layer_name, layer) + + self._freeze_stages() + + def _freeze_stages(self): + if self.frozen_stages >= 0: + self.patch_embed.eval() + for param in self.patch_embed.parameters(): + param.requires_grad = False + + if self.frozen_stages >= 1 and self.ape: + self.absolute_pos_embed.requires_grad = False + + if self.frozen_stages >= 2: + self.pos_drop.eval() + for i in range(0, self.frozen_stages - 1): + m = self.layers[i] + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def forward(self, x): + """Forward function.""" + x = self.patch_embed(x) + + Wh, Ww = x.size(2), x.size(3) + if self.ape: + # interpolate the position embedding to the corresponding size + absolute_pos_embed = F.interpolate( + self.absolute_pos_embed, size=(Wh, Ww), mode="bicubic" + ) + x = x + absolute_pos_embed # B Wh*Ww C + + outs = [] # x.contiguous()] + x = x.flatten(2).transpose(1, 2) + x = self.pos_drop(x) + for i in range(self.num_layers): + layer = self.layers[i] + x_out, H, W, x, Wh, Ww = layer(x, Wh, Ww) + + if i in self.out_indices: + norm_layer = getattr(self, f"norm{i}") + x_out = norm_layer(x_out) + + out = ( + x_out.view(-1, H, W, self.num_features[i]) + .permute(0, 3, 1, 2) + .contiguous() + ) + outs.append(out) + + return tuple(outs) + + def train(self, mode=True): + """Convert the model into training mode while keep layers freezed.""" + super(SwinTransformer, self).train(mode) + self._freeze_stages() + + +def swin_v1_t(): + model = SwinTransformer( + embed_dim=96, depths=[2, 2, 6, 2], num_heads=[3, 6, 12, 24], window_size=7 + ) + return model + + +def swin_v1_s(): + model = SwinTransformer( + embed_dim=96, depths=[2, 2, 18, 2], num_heads=[3, 6, 12, 24], window_size=7 + ) + return model + + +def swin_v1_b(): + model = SwinTransformer( + embed_dim=128, depths=[2, 2, 18, 2], num_heads=[4, 8, 16, 32], window_size=12 + ) + return model + + +def swin_v1_l(): + model = SwinTransformer( + embed_dim=192, depths=[2, 2, 18, 2], num_heads=[6, 12, 24, 48], window_size=12 + ) + return model diff --git a/engine/BiRefNet/models/birefnet.py b/engine/BiRefNet/models/birefnet.py new file mode 100644 index 0000000000000000000000000000000000000000..9b5198c8408f5fdc7771e8a23eb92914a5fc0466 --- /dev/null +++ b/engine/BiRefNet/models/birefnet.py @@ -0,0 +1,704 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from einops import rearrange +from huggingface_hub import PyTorchModelHubMixin +from kornia.filters import laplacian + +from engine.BiRefNet.config import Config +from engine.BiRefNet.dataset import class_labels_TR_sorted + +from .backbones.build_backbone import build_backbone +from .modules.aspp import ASPP, ASPPDeformable +from .modules.decoder_blocks import BasicDecBlk, ResBlk +from .modules.lateral_blocks import BasicLatBlk +from .refinement.refiner import Refiner, RefinerPVTInChannels4, RefUNet +from .refinement.stem_layer import StemLayer + + +def image2patches( + image, + grid_h=2, + grid_w=2, + patch_ref=None, + transformation="b c (hg h) (wg w) -> (b hg wg) c h w", +): + if patch_ref is not None: + grid_h, grid_w = ( + image.shape[-2] // patch_ref.shape[-2], + image.shape[-1] // patch_ref.shape[-1], + ) + patches = rearrange(image, transformation, hg=grid_h, wg=grid_w) + return patches + + +def patches2image( + patches, + grid_h=2, + grid_w=2, + patch_ref=None, + transformation="(b hg wg) c h w -> b c (hg h) (wg w)", +): + if patch_ref is not None: + grid_h, grid_w = ( + patch_ref.shape[-2] // patches[0].shape[-2], + patch_ref.shape[-1] // patches[0].shape[-1], + ) + image = rearrange(patches, transformation, hg=grid_h, wg=grid_w) + return image + + +class BiRefNet( + nn.Module, + PyTorchModelHubMixin, + library_name="birefnet", + repo_url="https://github.com/ZhengPeng7/BiRefNet", + tags=[ + "Image Segmentation", + "Background Removal", + "Mask Generation", + "Dichotomous Image Segmentation", + "Camouflaged Object Detection", + "Salient Object Detection", + ], +): + def __init__(self, bb_pretrained=True): + super(BiRefNet, self).__init__() + self.config = Config() + self.epoch = 1 + self.bb = build_backbone(self.config.bb, pretrained=bb_pretrained) + + channels = self.config.lateral_channels_in_collection + + if self.config.auxiliary_classification: + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + self.cls_head = nn.Sequential( + nn.Linear(channels[0], len(class_labels_TR_sorted)) + ) + + if self.config.squeeze_block: + self.squeeze_module = nn.Sequential( + *[ + eval(self.config.squeeze_block.split("_x")[0])( + channels[0] + sum(self.config.cxt), channels[0] + ) + for _ in range(eval(self.config.squeeze_block.split("_x")[1])) + ] + ) + + self.decoder = Decoder(channels) + + if self.config.ender: + self.dec_end = nn.Sequential( + nn.Conv2d(1, 16, 3, 1, 1), + nn.Conv2d(16, 1, 3, 1, 1), + nn.ReLU(inplace=True), + ) + + # refine patch-level segmentation + if self.config.refine: + if self.config.refine == "itself": + self.stem_layer = StemLayer( + in_channels=3 + 1, + inter_channels=48, + out_channels=3, + norm_layer="BN" if self.config.batch_size > 1 else "LN", + ) + else: + self.refiner = eval( + "{}({})".format(self.config.refine, "in_channels=3+1") + ) + + if self.config.freeze_bb: + # Freeze the backbone... + print(self.named_parameters()) + for key, value in self.named_parameters(): + if "bb." in key and "refiner." not in key: + value.requires_grad = False + + def forward_enc(self, x): + if self.config.bb in ["vgg16", "vgg16bn", "resnet50"]: + x1 = self.bb.conv1(x) + x2 = self.bb.conv2(x1) + x3 = self.bb.conv3(x2) + x4 = self.bb.conv4(x3) + else: + x1, x2, x3, x4 = self.bb(x) + if self.config.mul_scl_ipt == "cat": + B, C, H, W = x.shape + x1_, x2_, x3_, x4_ = self.bb( + F.interpolate( + x, size=(H // 2, W // 2), mode="bilinear", align_corners=True + ) + ) + x1 = torch.cat( + [ + x1, + F.interpolate( + x1_, size=x1.shape[2:], mode="bilinear", align_corners=True + ), + ], + dim=1, + ) + x2 = torch.cat( + [ + x2, + F.interpolate( + x2_, size=x2.shape[2:], mode="bilinear", align_corners=True + ), + ], + dim=1, + ) + x3 = torch.cat( + [ + x3, + F.interpolate( + x3_, size=x3.shape[2:], mode="bilinear", align_corners=True + ), + ], + dim=1, + ) + x4 = torch.cat( + [ + x4, + F.interpolate( + x4_, size=x4.shape[2:], mode="bilinear", align_corners=True + ), + ], + dim=1, + ) + elif self.config.mul_scl_ipt == "add": + B, C, H, W = x.shape + x1_, x2_, x3_, x4_ = self.bb( + F.interpolate( + x, size=(H // 2, W // 2), mode="bilinear", align_corners=True + ) + ) + x1 = x1 + F.interpolate( + x1_, size=x1.shape[2:], mode="bilinear", align_corners=True + ) + x2 = x2 + F.interpolate( + x2_, size=x2.shape[2:], mode="bilinear", align_corners=True + ) + x3 = x3 + F.interpolate( + x3_, size=x3.shape[2:], mode="bilinear", align_corners=True + ) + x4 = x4 + F.interpolate( + x4_, size=x4.shape[2:], mode="bilinear", align_corners=True + ) + class_preds = ( + self.cls_head(self.avgpool(x4).view(x4.shape[0], -1)) + if self.training and self.config.auxiliary_classification + else None + ) + if self.config.cxt: + x4 = torch.cat( + ( + *[ + F.interpolate( + x1, size=x4.shape[2:], mode="bilinear", align_corners=True + ), + F.interpolate( + x2, size=x4.shape[2:], mode="bilinear", align_corners=True + ), + F.interpolate( + x3, size=x4.shape[2:], mode="bilinear", align_corners=True + ), + ][-len(self.config.cxt) :], + x4, + ), + dim=1, + ) + return (x1, x2, x3, x4), class_preds + + def forward_ori(self, x): + ########## Encoder ########## + (x1, x2, x3, x4), class_preds = self.forward_enc(x) + if self.config.squeeze_block: + x4 = self.squeeze_module(x4) + ########## Decoder ########## + features = [x, x1, x2, x3, x4] + if self.training and self.config.out_ref: + features.append(laplacian(torch.mean(x, dim=1).unsqueeze(1), kernel_size=5)) + scaled_preds = self.decoder(features) + return scaled_preds, class_preds + + def forward(self, x): + scaled_preds, class_preds = self.forward_ori(x) + class_preds_lst = [class_preds] + return [scaled_preds, class_preds_lst] if self.training else scaled_preds + + +class Decoder(nn.Module): + def __init__(self, channels): + super(Decoder, self).__init__() + self.config = Config() + DecoderBlock = eval(self.config.dec_blk) + LateralBlock = eval(self.config.lat_blk) + + if self.config.dec_ipt: + self.split = self.config.dec_ipt_split + N_dec_ipt = 64 + DBlock = SimpleConvs + ic = 64 + ipt_cha_opt = 1 + self.ipt_blk5 = DBlock( + 2**10 * 3 if self.split else 3, + [N_dec_ipt, channels[0] // 8][ipt_cha_opt], + inter_channels=ic, + ) + self.ipt_blk4 = DBlock( + 2**8 * 3 if self.split else 3, + [N_dec_ipt, channels[0] // 8][ipt_cha_opt], + inter_channels=ic, + ) + self.ipt_blk3 = DBlock( + 2**6 * 3 if self.split else 3, + [N_dec_ipt, channels[1] // 8][ipt_cha_opt], + inter_channels=ic, + ) + self.ipt_blk2 = DBlock( + 2**4 * 3 if self.split else 3, + [N_dec_ipt, channels[2] // 8][ipt_cha_opt], + inter_channels=ic, + ) + self.ipt_blk1 = DBlock( + 2**0 * 3 if self.split else 3, + [N_dec_ipt, channels[3] // 8][ipt_cha_opt], + inter_channels=ic, + ) + else: + self.split = None + + self.decoder_block4 = DecoderBlock( + channels[0] + + ( + [N_dec_ipt, channels[0] // 8][ipt_cha_opt] if self.config.dec_ipt else 0 + ), + channels[1], + ) + self.decoder_block3 = DecoderBlock( + channels[1] + + ( + [N_dec_ipt, channels[0] // 8][ipt_cha_opt] if self.config.dec_ipt else 0 + ), + channels[2], + ) + self.decoder_block2 = DecoderBlock( + channels[2] + + ( + [N_dec_ipt, channels[1] // 8][ipt_cha_opt] if self.config.dec_ipt else 0 + ), + channels[3], + ) + self.decoder_block1 = DecoderBlock( + channels[3] + + ( + [N_dec_ipt, channels[2] // 8][ipt_cha_opt] if self.config.dec_ipt else 0 + ), + channels[3] // 2, + ) + self.conv_out1 = nn.Sequential( + nn.Conv2d( + channels[3] // 2 + + ( + [N_dec_ipt, channels[3] // 8][ipt_cha_opt] + if self.config.dec_ipt + else 0 + ), + 1, + 1, + 1, + 0, + ) + ) + + self.lateral_block4 = LateralBlock(channels[1], channels[1]) + self.lateral_block3 = LateralBlock(channels[2], channels[2]) + self.lateral_block2 = LateralBlock(channels[3], channels[3]) + + if self.config.ms_supervision: + self.conv_ms_spvn_4 = nn.Conv2d(channels[1], 1, 1, 1, 0) + self.conv_ms_spvn_3 = nn.Conv2d(channels[2], 1, 1, 1, 0) + self.conv_ms_spvn_2 = nn.Conv2d(channels[3], 1, 1, 1, 0) + + if self.config.out_ref: + _N = 16 + self.gdt_convs_4 = nn.Sequential( + nn.Conv2d(channels[1], _N, 3, 1, 1), + nn.BatchNorm2d(_N) if self.config.batch_size > 1 else nn.Identity(), + nn.ReLU(inplace=True), + ) + self.gdt_convs_3 = nn.Sequential( + nn.Conv2d(channels[2], _N, 3, 1, 1), + nn.BatchNorm2d(_N) if self.config.batch_size > 1 else nn.Identity(), + nn.ReLU(inplace=True), + ) + self.gdt_convs_2 = nn.Sequential( + nn.Conv2d(channels[3], _N, 3, 1, 1), + nn.BatchNorm2d(_N) if self.config.batch_size > 1 else nn.Identity(), + nn.ReLU(inplace=True), + ) + + self.gdt_convs_pred_4 = nn.Sequential(nn.Conv2d(_N, 1, 1, 1, 0)) + self.gdt_convs_pred_3 = nn.Sequential(nn.Conv2d(_N, 1, 1, 1, 0)) + self.gdt_convs_pred_2 = nn.Sequential(nn.Conv2d(_N, 1, 1, 1, 0)) + + self.gdt_convs_attn_4 = nn.Sequential(nn.Conv2d(_N, 1, 1, 1, 0)) + self.gdt_convs_attn_3 = nn.Sequential(nn.Conv2d(_N, 1, 1, 1, 0)) + self.gdt_convs_attn_2 = nn.Sequential(nn.Conv2d(_N, 1, 1, 1, 0)) + + def forward(self, features): + if self.training and self.config.out_ref: + outs_gdt_pred = [] + outs_gdt_label = [] + x, x1, x2, x3, x4, gdt_gt = features + else: + x, x1, x2, x3, x4 = features + outs = [] + + if self.config.dec_ipt: + patches_batch = ( + image2patches( + x, + patch_ref=x4, + transformation="b c (hg h) (wg w) -> b (c hg wg) h w", + ) + if self.split + else x + ) + x4 = torch.cat( + ( + x4, + self.ipt_blk5( + F.interpolate( + patches_batch, + size=x4.shape[2:], + mode="bilinear", + align_corners=True, + ) + ), + ), + 1, + ) + p4 = self.decoder_block4(x4) + m4 = ( + self.conv_ms_spvn_4(p4) + if self.config.ms_supervision and self.training + else None + ) + if self.config.out_ref: + p4_gdt = self.gdt_convs_4(p4) + if self.training: + # >> GT: + m4_dia = m4 + gdt_label_main_4 = gdt_gt * F.interpolate( + m4_dia, size=gdt_gt.shape[2:], mode="bilinear", align_corners=True + ) + outs_gdt_label.append(gdt_label_main_4) + # >> Pred: + gdt_pred_4 = self.gdt_convs_pred_4(p4_gdt) + outs_gdt_pred.append(gdt_pred_4) + gdt_attn_4 = self.gdt_convs_attn_4(p4_gdt).sigmoid() + # >> Finally: + p4 = p4 * gdt_attn_4 + _p4 = F.interpolate(p4, size=x3.shape[2:], mode="bilinear", align_corners=True) + _p3 = _p4 + self.lateral_block4(x3) + + if self.config.dec_ipt: + patches_batch = ( + image2patches( + x, + patch_ref=_p3, + transformation="b c (hg h) (wg w) -> b (c hg wg) h w", + ) + if self.split + else x + ) + _p3 = torch.cat( + ( + _p3, + self.ipt_blk4( + F.interpolate( + patches_batch, + size=x3.shape[2:], + mode="bilinear", + align_corners=True, + ) + ), + ), + 1, + ) + p3 = self.decoder_block3(_p3) + m3 = ( + self.conv_ms_spvn_3(p3) + if self.config.ms_supervision and self.training + else None + ) + if self.config.out_ref: + p3_gdt = self.gdt_convs_3(p3) + if self.training: + # >> GT: + # m3 --dilation--> m3_dia + # G_3^gt * m3_dia --> G_3^m, which is the label of gradient + m3_dia = m3 + gdt_label_main_3 = gdt_gt * F.interpolate( + m3_dia, size=gdt_gt.shape[2:], mode="bilinear", align_corners=True + ) + outs_gdt_label.append(gdt_label_main_3) + # >> Pred: + # p3 --conv--BN--> F_3^G, where F_3^G predicts the \hat{G_3} with xx + # F_3^G --sigmoid--> A_3^G + gdt_pred_3 = self.gdt_convs_pred_3(p3_gdt) + outs_gdt_pred.append(gdt_pred_3) + gdt_attn_3 = self.gdt_convs_attn_3(p3_gdt).sigmoid() + # >> Finally: + # p3 = p3 * A_3^G + p3 = p3 * gdt_attn_3 + _p3 = F.interpolate(p3, size=x2.shape[2:], mode="bilinear", align_corners=True) + _p2 = _p3 + self.lateral_block3(x2) + + if self.config.dec_ipt: + patches_batch = ( + image2patches( + x, + patch_ref=_p2, + transformation="b c (hg h) (wg w) -> b (c hg wg) h w", + ) + if self.split + else x + ) + _p2 = torch.cat( + ( + _p2, + self.ipt_blk3( + F.interpolate( + patches_batch, + size=x2.shape[2:], + mode="bilinear", + align_corners=True, + ) + ), + ), + 1, + ) + p2 = self.decoder_block2(_p2) + m2 = ( + self.conv_ms_spvn_2(p2) + if self.config.ms_supervision and self.training + else None + ) + if self.config.out_ref: + p2_gdt = self.gdt_convs_2(p2) + if self.training: + # >> GT: + m2_dia = m2 + gdt_label_main_2 = gdt_gt * F.interpolate( + m2_dia, size=gdt_gt.shape[2:], mode="bilinear", align_corners=True + ) + outs_gdt_label.append(gdt_label_main_2) + # >> Pred: + gdt_pred_2 = self.gdt_convs_pred_2(p2_gdt) + outs_gdt_pred.append(gdt_pred_2) + gdt_attn_2 = self.gdt_convs_attn_2(p2_gdt).sigmoid() + # >> Finally: + p2 = p2 * gdt_attn_2 + _p2 = F.interpolate(p2, size=x1.shape[2:], mode="bilinear", align_corners=True) + _p1 = _p2 + self.lateral_block2(x1) + + if self.config.dec_ipt: + patches_batch = ( + image2patches( + x, + patch_ref=_p1, + transformation="b c (hg h) (wg w) -> b (c hg wg) h w", + ) + if self.split + else x + ) + _p1 = torch.cat( + ( + _p1, + self.ipt_blk2( + F.interpolate( + patches_batch, + size=x1.shape[2:], + mode="bilinear", + align_corners=True, + ) + ), + ), + 1, + ) + _p1 = self.decoder_block1(_p1) + _p1 = F.interpolate(_p1, size=x.shape[2:], mode="bilinear", align_corners=True) + + if self.config.dec_ipt: + patches_batch = ( + image2patches( + x, + patch_ref=_p1, + transformation="b c (hg h) (wg w) -> b (c hg wg) h w", + ) + if self.split + else x + ) + _p1 = torch.cat( + ( + _p1, + self.ipt_blk1( + F.interpolate( + patches_batch, + size=x.shape[2:], + mode="bilinear", + align_corners=True, + ) + ), + ), + 1, + ) + p1_out = self.conv_out1(_p1) + + if self.config.ms_supervision and self.training: + outs.append(m4) + outs.append(m3) + outs.append(m2) + outs.append(p1_out) + return ( + outs + if not (self.config.out_ref and self.training) + else ([outs_gdt_pred, outs_gdt_label], outs) + ) + + +class SimpleConvs(nn.Module): + def __init__(self, in_channels: int, out_channels: int, inter_channels=64) -> None: + super().__init__() + self.conv1 = nn.Conv2d(in_channels, inter_channels, 3, 1, 1) + self.conv_out = nn.Conv2d(inter_channels, out_channels, 3, 1, 1) + + def forward(self, x): + return self.conv_out(self.conv1(x)) + + +########### + + +class BiRefNetC2F( + nn.Module, + PyTorchModelHubMixin, + library_name="birefnet_c2f", + repo_url="https://github.com/ZhengPeng7/BiRefNet_C2F", + tags=[ + "Image Segmentation", + "Background Removal", + "Mask Generation", + "Dichotomous Image Segmentation", + "Camouflaged Object Detection", + "Salient Object Detection", + ], +): + def __init__(self, bb_pretrained=True): + super(BiRefNetC2F, self).__init__() + self.config = Config() + self.epoch = 1 + self.grid = 4 + self.model_coarse = BiRefNet(bb_pretrained=True) + self.model_fine = BiRefNet(bb_pretrained=True) + self.input_mixer = nn.Conv2d(4, 3, 1, 1, 0) + self.output_mixer_merge_post = nn.Sequential( + nn.Conv2d(1, 16, 3, 1, 1), nn.Conv2d(16, 1, 3, 1, 1) + ) + + def forward(self, x): + x_ori = x.clone() + ########## Coarse ########## + x = F.interpolate( + x, + size=[s // self.grid for s in self.config.size[::-1]], + mode="bilinear", + align_corners=True, + ) + + if self.training: + scaled_preds, class_preds_lst = self.model_coarse(x) + else: + scaled_preds = self.model_coarse(x) + ########## Fine ########## + x_HR_patches = image2patches( + x_ori, patch_ref=x, transformation="b c (hg h) (wg w) -> (b hg wg) c h w" + ) + pred = F.interpolate( + ( + scaled_preds[-1] + if not (self.config.out_ref and self.training) + else scaled_preds[1][-1] + ), + size=x_ori.shape[2:], + mode="bilinear", + align_corners=True, + ) + pred_patches = image2patches( + pred, patch_ref=x, transformation="b c (hg h) (wg w) -> (b hg wg) c h w" + ) + t = torch.cat([x_HR_patches, pred_patches], dim=1) + x_HR = self.input_mixer(t) + + pred_patches = image2patches( + pred, patch_ref=x_HR, transformation="b c (hg h) (wg w) -> b (c hg wg) h w" + ) + if self.training: + scaled_preds_HR, class_preds_lst_HR = self.model_fine(x_HR) + else: + scaled_preds_HR = self.model_fine(x_HR) + if self.training: + if self.config.out_ref: + [outs_gdt_pred, outs_gdt_label], outs = scaled_preds + [outs_gdt_pred_HR, outs_gdt_label_HR], outs_HR = scaled_preds_HR + for idx_out, out_HR in enumerate(outs_HR): + outs_HR[idx_out] = self.output_mixer_merge_post( + patches2image( + out_HR, + grid_h=self.grid, + grid_w=self.grid, + transformation="(b hg wg) c h w -> b c (hg h) (wg w)", + ) + ) + return [ + ( + [ + outs_gdt_pred + outs_gdt_pred_HR, + outs_gdt_label + outs_gdt_label_HR, + ], + outs + outs_HR, + ), + class_preds_lst, + ] # handle gt here + else: + return [ + scaled_preds + + [ + self.output_mixer_merge_post( + patches2image( + scaled_pred_HR, + grid_h=self.grid, + grid_w=self.grid, + transformation="(b hg wg) c h w -> b c (hg h) (wg w)", + ) + ) + for scaled_pred_HR in scaled_preds_HR + ], + class_preds_lst, + ] + else: + return scaled_preds + [ + self.output_mixer_merge_post( + patches2image( + scaled_pred_HR, + grid_h=self.grid, + grid_w=self.grid, + transformation="(b hg wg) c h w -> b c (hg h) (wg w)", + ) + ) + for scaled_pred_HR in scaled_preds_HR + ] diff --git a/engine/BiRefNet/models/modules/__pycache__/aspp.cpython-310.pyc b/engine/BiRefNet/models/modules/__pycache__/aspp.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e57180fe23aba13a8ad3856875c30c59552ac838 Binary files /dev/null and b/engine/BiRefNet/models/modules/__pycache__/aspp.cpython-310.pyc differ diff --git a/engine/BiRefNet/models/modules/__pycache__/decoder_blocks.cpython-310.pyc b/engine/BiRefNet/models/modules/__pycache__/decoder_blocks.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a1e5712931ce08802c8a40e3045553df4ddfdf4b Binary files /dev/null and b/engine/BiRefNet/models/modules/__pycache__/decoder_blocks.cpython-310.pyc differ diff --git a/engine/BiRefNet/models/modules/__pycache__/deform_conv.cpython-310.pyc b/engine/BiRefNet/models/modules/__pycache__/deform_conv.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..56a5fb83f47030c6392410540aa488e70ff890b1 Binary files /dev/null and b/engine/BiRefNet/models/modules/__pycache__/deform_conv.cpython-310.pyc differ diff --git a/engine/BiRefNet/models/modules/__pycache__/lateral_blocks.cpython-310.pyc b/engine/BiRefNet/models/modules/__pycache__/lateral_blocks.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4d6e34ccf8010540eec9380c2830f0a5c5954734 Binary files /dev/null and b/engine/BiRefNet/models/modules/__pycache__/lateral_blocks.cpython-310.pyc differ diff --git a/engine/BiRefNet/models/modules/__pycache__/utils.cpython-310.pyc b/engine/BiRefNet/models/modules/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9ebb6289d68cefa5601514e411c832ba4ed27fae Binary files /dev/null and b/engine/BiRefNet/models/modules/__pycache__/utils.cpython-310.pyc differ diff --git a/engine/BiRefNet/models/modules/aspp.py b/engine/BiRefNet/models/modules/aspp.py new file mode 100644 index 0000000000000000000000000000000000000000..1faf88b3d04b0da8ea84aa721371572ec5ff44f4 --- /dev/null +++ b/engine/BiRefNet/models/modules/aspp.py @@ -0,0 +1,183 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from engine.BiRefNet.config import Config +from engine.BiRefNet.models.modules.deform_conv import DeformableConv2d + +config = Config() + + +class _ASPPModule(nn.Module): + def __init__(self, in_channels, planes, kernel_size, padding, dilation): + super(_ASPPModule, self).__init__() + self.atrous_conv = nn.Conv2d( + in_channels, + planes, + kernel_size=kernel_size, + stride=1, + padding=padding, + dilation=dilation, + bias=False, + ) + self.bn = nn.BatchNorm2d(planes) if config.batch_size > 1 else nn.Identity() + self.relu = nn.ReLU(inplace=True) + + def forward(self, x): + x = self.atrous_conv(x) + x = self.bn(x) + + return self.relu(x) + + +class ASPP(nn.Module): + def __init__(self, in_channels=64, out_channels=None, output_stride=16): + super(ASPP, self).__init__() + self.down_scale = 1 + if out_channels is None: + out_channels = in_channels + self.in_channelster = 256 // self.down_scale + if output_stride == 16: + dilations = [1, 6, 12, 18] + elif output_stride == 8: + dilations = [1, 12, 24, 36] + else: + raise NotImplementedError + + self.aspp1 = _ASPPModule( + in_channels, self.in_channelster, 1, padding=0, dilation=dilations[0] + ) + self.aspp2 = _ASPPModule( + in_channels, + self.in_channelster, + 3, + padding=dilations[1], + dilation=dilations[1], + ) + self.aspp3 = _ASPPModule( + in_channels, + self.in_channelster, + 3, + padding=dilations[2], + dilation=dilations[2], + ) + self.aspp4 = _ASPPModule( + in_channels, + self.in_channelster, + 3, + padding=dilations[3], + dilation=dilations[3], + ) + + self.global_avg_pool = nn.Sequential( + nn.AdaptiveAvgPool2d((1, 1)), + nn.Conv2d(in_channels, self.in_channelster, 1, stride=1, bias=False), + ( + nn.BatchNorm2d(self.in_channelster) + if config.batch_size > 1 + else nn.Identity() + ), + nn.ReLU(inplace=True), + ) + self.conv1 = nn.Conv2d(self.in_channelster * 5, out_channels, 1, bias=False) + self.bn1 = ( + nn.BatchNorm2d(out_channels) if config.batch_size > 1 else nn.Identity() + ) + self.relu = nn.ReLU(inplace=True) + self.dropout = nn.Dropout(0.5) + + def forward(self, x): + x1 = self.aspp1(x) + x2 = self.aspp2(x) + x3 = self.aspp3(x) + x4 = self.aspp4(x) + x5 = self.global_avg_pool(x) + x5 = F.interpolate(x5, size=x1.size()[2:], mode="bilinear", align_corners=True) + x = torch.cat((x1, x2, x3, x4, x5), dim=1) + + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + + return self.dropout(x) + + +##################### Deformable +class _ASPPModuleDeformable(nn.Module): + def __init__(self, in_channels, planes, kernel_size, padding): + super(_ASPPModuleDeformable, self).__init__() + self.atrous_conv = DeformableConv2d( + in_channels, + planes, + kernel_size=kernel_size, + stride=1, + padding=padding, + bias=False, + ) + self.bn = nn.BatchNorm2d(planes) if config.batch_size > 1 else nn.Identity() + self.relu = nn.ReLU(inplace=True) + + def forward(self, x): + x = self.atrous_conv(x) + x = self.bn(x) + + return self.relu(x) + + +class ASPPDeformable(nn.Module): + def __init__(self, in_channels, out_channels=None, parallel_block_sizes=[1, 3, 7]): + super(ASPPDeformable, self).__init__() + self.down_scale = 1 + if out_channels is None: + out_channels = in_channels + self.in_channelster = 256 // self.down_scale + + self.aspp1 = _ASPPModuleDeformable( + in_channels, self.in_channelster, 1, padding=0 + ) + self.aspp_deforms = nn.ModuleList( + [ + _ASPPModuleDeformable( + in_channels, + self.in_channelster, + conv_size, + padding=int(conv_size // 2), + ) + for conv_size in parallel_block_sizes + ] + ) + + self.global_avg_pool = nn.Sequential( + nn.AdaptiveAvgPool2d((1, 1)), + nn.Conv2d(in_channels, self.in_channelster, 1, stride=1, bias=False), + ( + nn.BatchNorm2d(self.in_channelster) + if config.batch_size > 1 + else nn.Identity() + ), + nn.ReLU(inplace=True), + ) + self.conv1 = nn.Conv2d( + self.in_channelster * (2 + len(self.aspp_deforms)), + out_channels, + 1, + bias=False, + ) + self.bn1 = ( + nn.BatchNorm2d(out_channels) if config.batch_size > 1 else nn.Identity() + ) + self.relu = nn.ReLU(inplace=True) + self.dropout = nn.Dropout(0.5) + + def forward(self, x): + x1 = self.aspp1(x) + x_aspp_deforms = [aspp_deform(x) for aspp_deform in self.aspp_deforms] + x5 = self.global_avg_pool(x) + x5 = F.interpolate(x5, size=x1.size()[2:], mode="bilinear", align_corners=True) + x = torch.cat((x1, *x_aspp_deforms, x5), dim=1) + + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + + return self.dropout(x) diff --git a/engine/BiRefNet/models/modules/decoder_blocks.py b/engine/BiRefNet/models/modules/decoder_blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..2b3212692890a65c922a991446988c84f5affb06 --- /dev/null +++ b/engine/BiRefNet/models/modules/decoder_blocks.py @@ -0,0 +1,73 @@ +import torch +import torch.nn as nn + +from engine.BiRefNet.config import Config +from engine.BiRefNet.models.modules.aspp import ASPP, ASPPDeformable + +config = Config() + + +class BasicDecBlk(nn.Module): + def __init__(self, in_channels=64, out_channels=64, inter_channels=64): + super(BasicDecBlk, self).__init__() + inter_channels = in_channels // 4 if config.dec_channels_inter == "adap" else 64 + self.conv_in = nn.Conv2d(in_channels, inter_channels, 3, 1, padding=1) + self.relu_in = nn.ReLU(inplace=True) + if config.dec_att == "ASPP": + self.dec_att = ASPP(in_channels=inter_channels) + elif config.dec_att == "ASPPDeformable": + self.dec_att = ASPPDeformable(in_channels=inter_channels) + self.conv_out = nn.Conv2d(inter_channels, out_channels, 3, 1, padding=1) + self.bn_in = ( + nn.BatchNorm2d(inter_channels) if config.batch_size > 1 else nn.Identity() + ) + self.bn_out = ( + nn.BatchNorm2d(out_channels) if config.batch_size > 1 else nn.Identity() + ) + + def forward(self, x): + x = self.conv_in(x) + x = self.bn_in(x) + x = self.relu_in(x) + if hasattr(self, "dec_att"): + x = self.dec_att(x) + x = self.conv_out(x) + x = self.bn_out(x) + return x + + +class ResBlk(nn.Module): + def __init__(self, in_channels=64, out_channels=None, inter_channels=64): + super(ResBlk, self).__init__() + if out_channels is None: + out_channels = in_channels + inter_channels = in_channels // 4 if config.dec_channels_inter == "adap" else 64 + + self.conv_in = nn.Conv2d(in_channels, inter_channels, 3, 1, padding=1) + self.bn_in = ( + nn.BatchNorm2d(inter_channels) if config.batch_size > 1 else nn.Identity() + ) + self.relu_in = nn.ReLU(inplace=True) + + if config.dec_att == "ASPP": + self.dec_att = ASPP(in_channels=inter_channels) + elif config.dec_att == "ASPPDeformable": + self.dec_att = ASPPDeformable(in_channels=inter_channels) + + self.conv_out = nn.Conv2d(inter_channels, out_channels, 3, 1, padding=1) + self.bn_out = ( + nn.BatchNorm2d(out_channels) if config.batch_size > 1 else nn.Identity() + ) + + self.conv_resi = nn.Conv2d(in_channels, out_channels, 1, 1, 0) + + def forward(self, x): + _x = self.conv_resi(x) + x = self.conv_in(x) + x = self.bn_in(x) + x = self.relu_in(x) + if hasattr(self, "dec_att"): + x = self.dec_att(x) + x = self.conv_out(x) + x = self.bn_out(x) + return x + _x diff --git a/engine/BiRefNet/models/modules/deform_conv.py b/engine/BiRefNet/models/modules/deform_conv.py new file mode 100644 index 0000000000000000000000000000000000000000..43f5e57f487104da78f0847b48e3208d83572f5e --- /dev/null +++ b/engine/BiRefNet/models/modules/deform_conv.py @@ -0,0 +1,66 @@ +import torch +import torch.nn as nn +from torchvision.ops import deform_conv2d + + +class DeformableConv2d(nn.Module): + def __init__(self, + in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1, + bias=False): + + super(DeformableConv2d, self).__init__() + + assert type(kernel_size) == tuple or type(kernel_size) == int + + kernel_size = kernel_size if type(kernel_size) == tuple else (kernel_size, kernel_size) + self.stride = stride if type(stride) == tuple else (stride, stride) + self.padding = padding + + self.offset_conv = nn.Conv2d(in_channels, + 2 * kernel_size[0] * kernel_size[1], + kernel_size=kernel_size, + stride=stride, + padding=self.padding, + bias=True) + + nn.init.constant_(self.offset_conv.weight, 0.) + nn.init.constant_(self.offset_conv.bias, 0.) + + self.modulator_conv = nn.Conv2d(in_channels, + 1 * kernel_size[0] * kernel_size[1], + kernel_size=kernel_size, + stride=stride, + padding=self.padding, + bias=True) + + nn.init.constant_(self.modulator_conv.weight, 0.) + nn.init.constant_(self.modulator_conv.bias, 0.) + + self.regular_conv = nn.Conv2d(in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=self.padding, + bias=bias) + + def forward(self, x): + #h, w = x.shape[2:] + #max_offset = max(h, w)/4. + + offset = self.offset_conv(x)#.clamp(-max_offset, max_offset) + modulator = 2. * torch.sigmoid(self.modulator_conv(x)) + + x = deform_conv2d( + input=x, + offset=offset, + weight=self.regular_conv.weight, + bias=self.regular_conv.bias, + padding=self.padding, + mask=modulator, + stride=self.stride, + ) + return x diff --git a/engine/BiRefNet/models/modules/lateral_blocks.py b/engine/BiRefNet/models/modules/lateral_blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..a00b0ccab45e899e6055d774d9e751004f433f6a --- /dev/null +++ b/engine/BiRefNet/models/modules/lateral_blocks.py @@ -0,0 +1,21 @@ +from functools import partial + +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F + +from engine.BiRefNet.config import Config + +config = Config() + + +class BasicLatBlk(nn.Module): + def __init__(self, in_channels=64, out_channels=64, inter_channels=64): + super(BasicLatBlk, self).__init__() + inter_channels = in_channels // 4 if config.dec_channels_inter == "adap" else 64 + self.conv = nn.Conv2d(in_channels, out_channels, 1, 1, 0) + + def forward(self, x): + x = self.conv(x) + return x diff --git a/engine/BiRefNet/models/modules/prompt_encoder.py b/engine/BiRefNet/models/modules/prompt_encoder.py new file mode 100644 index 0000000000000000000000000000000000000000..1243c30604e982ac1bdc57fa402ca2722ca1bb56 --- /dev/null +++ b/engine/BiRefNet/models/modules/prompt_encoder.py @@ -0,0 +1,233 @@ +from typing import Any, Optional, Tuple, Type + +import numpy as np +import torch +import torch.nn as nn + + +class PromptEncoder(nn.Module): + def __init__( + self, + embed_dim=256, + image_embedding_size=1024, + input_image_size=(1024, 1024), + mask_in_chans=16, + activation=nn.GELU, + ) -> None: + super().__init__() + """ + Codes are partially from SAM: https://github.com/facebookresearch/segment-anything/blob/6fdee8f2727f4506cfbbe553e23b895e27956588/segment_anything/modeling/prompt_encoder.py. + + Arguments: + embed_dim (int): The prompts' embedding dimension + image_embedding_size (tuple(int, int)): The spatial size of the + image embedding, as (H, W). + input_image_size (int): The padded size of the image as input + to the image encoder, as (H, W). + mask_in_chans (int): The number of hidden channels used for + encoding input masks. + activation (nn.Module): The activation to use when encoding + input masks. + """ + super().__init__() + self.embed_dim = embed_dim + self.input_image_size = input_image_size + self.image_embedding_size = image_embedding_size + self.pe_layer = PositionEmbeddingRandom(embed_dim // 2) + + self.num_point_embeddings: int = 4 # pos/neg point + 2 box corners + point_embeddings = [ + nn.Embedding(1, embed_dim) for i in range(self.num_point_embeddings) + ] + self.point_embeddings = nn.ModuleList(point_embeddings) + self.not_a_point_embed = nn.Embedding(1, embed_dim) + + self.mask_input_size = ( + 4 * image_embedding_size[0], + 4 * image_embedding_size[1], + ) + self.mask_downscaling = nn.Sequential( + nn.Conv2d(1, mask_in_chans // 4, kernel_size=2, stride=2), + LayerNorm2d(mask_in_chans // 4), + activation(), + nn.Conv2d(mask_in_chans // 4, mask_in_chans, kernel_size=2, stride=2), + LayerNorm2d(mask_in_chans), + activation(), + nn.Conv2d(mask_in_chans, embed_dim, kernel_size=1), + ) + self.no_mask_embed = nn.Embedding(1, embed_dim) + + def get_dense_pe(self) -> torch.Tensor: + """ + Returns the positional encoding used to encode point prompts, + applied to a dense set of points the shape of the image encoding. + + Returns: + torch.Tensor: Positional encoding with shape + 1x(embed_dim)x(embedding_h)x(embedding_w) + """ + return self.pe_layer(self.image_embedding_size).unsqueeze(0) + + def _embed_points( + self, + points: torch.Tensor, + labels: torch.Tensor, + pad: bool, + ) -> torch.Tensor: + """Embeds point prompts.""" + points = points + 0.5 # Shift to center of pixel + if pad: + padding_point = torch.zeros((points.shape[0], 1, 2), device=points.device) + padding_label = -torch.ones((labels.shape[0], 1), device=labels.device) + points = torch.cat([points, padding_point], dim=1) + labels = torch.cat([labels, padding_label], dim=1) + point_embedding = self.pe_layer.forward_with_coords( + points, self.input_image_size + ) + point_embedding[labels == -1] = 0.0 + point_embedding[labels == -1] += self.not_a_point_embed.weight + point_embedding[labels == 0] += self.point_embeddings[0].weight + point_embedding[labels == 1] += self.point_embeddings[1].weight + return point_embedding + + def _embed_boxes(self, boxes: torch.Tensor) -> torch.Tensor: + """Embeds box prompts.""" + boxes = boxes + 0.5 # Shift to center of pixel + coords = boxes.reshape(-1, 2, 2) + corner_embedding = self.pe_layer.forward_with_coords( + coords, self.input_image_size + ) + corner_embedding[:, 0, :] += self.point_embeddings[2].weight + corner_embedding[:, 1, :] += self.point_embeddings[3].weight + return corner_embedding + + def _embed_masks(self, masks: torch.Tensor) -> torch.Tensor: + """Embeds mask inputs.""" + mask_embedding = self.mask_downscaling(masks) + return mask_embedding + + def _get_batch_size( + self, + points: Optional[Tuple[torch.Tensor, torch.Tensor]], + boxes: Optional[torch.Tensor], + masks: Optional[torch.Tensor], + ) -> int: + """ + Gets the batch size of the output given the batch size of the input prompts. + """ + if points is not None: + return points[0].shape[0] + elif boxes is not None: + return boxes.shape[0] + elif masks is not None: + return masks.shape[0] + else: + return 1 + + def _get_device(self) -> torch.device: + return self.point_embeddings[0].weight.device + + def forward( + self, + points: Optional[Tuple[torch.Tensor, torch.Tensor]], + boxes: Optional[torch.Tensor], + masks: Optional[torch.Tensor], + ) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Embeds different types of prompts, returning both sparse and dense + embeddings. + + Arguments: + points (tuple(torch.Tensor, torch.Tensor) or none): point coordinates + and labels to embed. + boxes (torch.Tensor or none): boxes to embed + masks (torch.Tensor or none): masks to embed + + Returns: + torch.Tensor: sparse embeddings for the points and boxes, with shape + BxNx(embed_dim), where N is determined by the number of input points + and boxes. + torch.Tensor: dense embeddings for the masks, in the shape + Bx(embed_dim)x(embed_H)x(embed_W) + """ + bs = self._get_batch_size(points, boxes, masks) + sparse_embeddings = torch.empty( + (bs, 0, self.embed_dim), device=self._get_device() + ) + if points is not None: + coords, labels = points + point_embeddings = self._embed_points(coords, labels, pad=(boxes is None)) + sparse_embeddings = torch.cat([sparse_embeddings, point_embeddings], dim=1) + if boxes is not None: + box_embeddings = self._embed_boxes(boxes) + sparse_embeddings = torch.cat([sparse_embeddings, box_embeddings], dim=1) + + if masks is not None: + dense_embeddings = self._embed_masks(masks) + else: + dense_embeddings = self.no_mask_embed.weight.reshape(1, -1, 1, 1).expand( + bs, -1, self.image_embedding_size[0], self.image_embedding_size[1] + ) + + return sparse_embeddings, dense_embeddings + + +class PositionEmbeddingRandom(nn.Module): + """ + Positional encoding using random spatial frequencies. + """ + + def __init__(self, num_pos_feats: int = 64, scale: Optional[float] = None) -> None: + super().__init__() + if scale is None or scale <= 0.0: + scale = 1.0 + self.register_buffer( + "positional_encoding_gaussian_matrix", + scale * torch.randn((2, num_pos_feats)), + ) + + def _pe_encoding(self, coords: torch.Tensor) -> torch.Tensor: + """Positionally encode points that are normalized to [0,1].""" + # assuming coords are in [0, 1]^2 square and have d_1 x ... x d_n x 2 shape + coords = 2 * coords - 1 + coords = coords @ self.positional_encoding_gaussian_matrix + coords = 2 * np.pi * coords + # outputs d_1 x ... x d_n x C shape + return torch.cat([torch.sin(coords), torch.cos(coords)], dim=-1) + + def forward(self, size: Tuple[int, int]) -> torch.Tensor: + """Generate positional encoding for a grid of the specified size.""" + h, w = size + device: Any = self.positional_encoding_gaussian_matrix.device + grid = torch.ones((h, w), device=device, dtype=torch.float32) + y_embed = grid.cumsum(dim=0) - 0.5 + x_embed = grid.cumsum(dim=1) - 0.5 + y_embed = y_embed / h + x_embed = x_embed / w + + pe = self._pe_encoding(torch.stack([x_embed, y_embed], dim=-1)) + return pe.permute(2, 0, 1) # C x H x W + + def forward_with_coords( + self, coords_input: torch.Tensor, image_size: Tuple[int, int] + ) -> torch.Tensor: + """Positionally encode points that are not normalized to [0,1].""" + coords = coords_input.clone() + coords[:, :, 0] = coords[:, :, 0] / image_size[1] + coords[:, :, 1] = coords[:, :, 1] / image_size[0] + return self._pe_encoding(coords.to(torch.float)) # B x N x C + + +class LayerNorm2d(nn.Module): + def __init__(self, num_channels: int, eps: float = 1e-6) -> None: + super().__init__() + self.weight = nn.Parameter(torch.ones(num_channels)) + self.bias = nn.Parameter(torch.zeros(num_channels)) + self.eps = eps + + def forward(self, x: torch.Tensor) -> torch.Tensor: + u = x.mean(1, keepdim=True) + s = (x - u).pow(2).mean(1, keepdim=True) + x = (x - u) / torch.sqrt(s + self.eps) + x = self.weight[:, None, None] * x + self.bias[:, None, None] + return x diff --git a/engine/BiRefNet/models/modules/utils.py b/engine/BiRefNet/models/modules/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..7f05b0be8581d23180f6138751a8fd57d1135bcd --- /dev/null +++ b/engine/BiRefNet/models/modules/utils.py @@ -0,0 +1,51 @@ +import torch.nn as nn + + +def build_act_layer(act_layer): + if act_layer == "ReLU": + return nn.ReLU(inplace=True) + elif act_layer == "SiLU": + return nn.SiLU(inplace=True) + elif act_layer == "GELU": + return nn.GELU() + + raise NotImplementedError(f"build_act_layer does not support {act_layer}") + + +def build_norm_layer( + dim, norm_layer, in_format="channels_last", out_format="channels_last", eps=1e-6 +): + layers = [] + if norm_layer == "BN": + if in_format == "channels_last": + layers.append(to_channels_first()) + layers.append(nn.BatchNorm2d(dim)) + if out_format == "channels_last": + layers.append(to_channels_last()) + elif norm_layer == "LN": + if in_format == "channels_first": + layers.append(to_channels_last()) + layers.append(nn.LayerNorm(dim, eps=eps)) + if out_format == "channels_first": + layers.append(to_channels_first()) + else: + raise NotImplementedError(f"build_norm_layer does not support {norm_layer}") + return nn.Sequential(*layers) + + +class to_channels_first(nn.Module): + + def __init__(self): + super().__init__() + + def forward(self, x): + return x.permute(0, 3, 1, 2) + + +class to_channels_last(nn.Module): + + def __init__(self): + super().__init__() + + def forward(self, x): + return x.permute(0, 2, 3, 1) diff --git a/engine/BiRefNet/models/refinement/__pycache__/refiner.cpython-310.pyc b/engine/BiRefNet/models/refinement/__pycache__/refiner.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e619759dfdcd6eb04cfa004bf4d106333377520c Binary files /dev/null and b/engine/BiRefNet/models/refinement/__pycache__/refiner.cpython-310.pyc differ diff --git a/engine/BiRefNet/models/refinement/__pycache__/stem_layer.cpython-310.pyc b/engine/BiRefNet/models/refinement/__pycache__/stem_layer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..799bd368c910826c65e749fd36fd8ba3dde76aaa Binary files /dev/null and b/engine/BiRefNet/models/refinement/__pycache__/stem_layer.cpython-310.pyc differ diff --git a/engine/BiRefNet/models/refinement/refiner.py b/engine/BiRefNet/models/refinement/refiner.py new file mode 100644 index 0000000000000000000000000000000000000000..7daf74116874127739ce0ef4506252bbb0c3030e --- /dev/null +++ b/engine/BiRefNet/models/refinement/refiner.py @@ -0,0 +1,253 @@ +from collections import OrderedDict + +import torch +import torch.nn as nn +import torch.nn.functional as F +from torchvision.models import resnet50, vgg16, vgg16_bn + +from engine.BiRefNet.config import Config +from engine.BiRefNet.dataset import class_labels_TR_sorted +from engine.BiRefNet.models.backbones.build_backbone import build_backbone +from engine.BiRefNet.models.modules.decoder_blocks import BasicDecBlk +from engine.BiRefNet.models.modules.lateral_blocks import BasicLatBlk +from engine.BiRefNet.models.refinement.stem_layer import StemLayer + + +class RefinerPVTInChannels4(nn.Module): + def __init__(self, in_channels=3 + 1): + super(RefinerPVTInChannels4, self).__init__() + self.config = Config() + self.epoch = 1 + self.bb = build_backbone(self.config.bb, params_settings="in_channels=4") + + lateral_channels_in_collection = { + "vgg16": [512, 256, 128, 64], + "vgg16bn": [512, 256, 128, 64], + "resnet50": [1024, 512, 256, 64], + "pvt_v2_b2": [512, 320, 128, 64], + "pvt_v2_b5": [512, 320, 128, 64], + "swin_v1_b": [1024, 512, 256, 128], + "swin_v1_l": [1536, 768, 384, 192], + } + channels = lateral_channels_in_collection[self.config.bb] + self.squeeze_module = BasicDecBlk(channels[0], channels[0]) + + self.decoder = Decoder(channels) + + if 0: + for key, value in self.named_parameters(): + if "bb." in key: + value.requires_grad = False + + def forward(self, x): + if isinstance(x, list): + x = torch.cat(x, dim=1) + ########## Encoder ########## + if self.config.bb in ["vgg16", "vgg16bn", "resnet50"]: + x1 = self.bb.conv1(x) + x2 = self.bb.conv2(x1) + x3 = self.bb.conv3(x2) + x4 = self.bb.conv4(x3) + else: + x1, x2, x3, x4 = self.bb(x) + + x4 = self.squeeze_module(x4) + + ########## Decoder ########## + + features = [x, x1, x2, x3, x4] + scaled_preds = self.decoder(features) + + return scaled_preds + + +class Refiner(nn.Module): + def __init__(self, in_channels=3 + 1): + super(Refiner, self).__init__() + self.config = Config() + self.epoch = 1 + self.stem_layer = StemLayer( + in_channels=in_channels, + inter_channels=48, + out_channels=3, + norm_layer="BN" if self.config.batch_size > 1 else "LN", + ) + self.bb = build_backbone(self.config.bb) + + lateral_channels_in_collection = { + "vgg16": [512, 256, 128, 64], + "vgg16bn": [512, 256, 128, 64], + "resnet50": [1024, 512, 256, 64], + "pvt_v2_b2": [512, 320, 128, 64], + "pvt_v2_b5": [512, 320, 128, 64], + "swin_v1_b": [1024, 512, 256, 128], + "swin_v1_l": [1536, 768, 384, 192], + } + channels = lateral_channels_in_collection[self.config.bb] + self.squeeze_module = BasicDecBlk(channels[0], channels[0]) + + self.decoder = Decoder(channels) + + if 0: + for key, value in self.named_parameters(): + if "bb." in key: + value.requires_grad = False + + def forward(self, x): + if isinstance(x, list): + x = torch.cat(x, dim=1) + x = self.stem_layer(x) + ########## Encoder ########## + if self.config.bb in ["vgg16", "vgg16bn", "resnet50"]: + x1 = self.bb.conv1(x) + x2 = self.bb.conv2(x1) + x3 = self.bb.conv3(x2) + x4 = self.bb.conv4(x3) + else: + x1, x2, x3, x4 = self.bb(x) + + x4 = self.squeeze_module(x4) + + ########## Decoder ########## + + features = [x, x1, x2, x3, x4] + scaled_preds = self.decoder(features) + + return scaled_preds + + +class Decoder(nn.Module): + def __init__(self, channels): + super(Decoder, self).__init__() + self.config = Config() + DecoderBlock = eval("BasicDecBlk") + LateralBlock = eval("BasicLatBlk") + + self.decoder_block4 = DecoderBlock(channels[0], channels[1]) + self.decoder_block3 = DecoderBlock(channels[1], channels[2]) + self.decoder_block2 = DecoderBlock(channels[2], channels[3]) + self.decoder_block1 = DecoderBlock(channels[3], channels[3] // 2) + + self.lateral_block4 = LateralBlock(channels[1], channels[1]) + self.lateral_block3 = LateralBlock(channels[2], channels[2]) + self.lateral_block2 = LateralBlock(channels[3], channels[3]) + + if self.config.ms_supervision: + self.conv_ms_spvn_4 = nn.Conv2d(channels[1], 1, 1, 1, 0) + self.conv_ms_spvn_3 = nn.Conv2d(channels[2], 1, 1, 1, 0) + self.conv_ms_spvn_2 = nn.Conv2d(channels[3], 1, 1, 1, 0) + self.conv_out1 = nn.Sequential(nn.Conv2d(channels[3] // 2, 1, 1, 1, 0)) + + def forward(self, features): + x, x1, x2, x3, x4 = features + outs = [] + p4 = self.decoder_block4(x4) + _p4 = F.interpolate(p4, size=x3.shape[2:], mode="bilinear", align_corners=True) + _p3 = _p4 + self.lateral_block4(x3) + + p3 = self.decoder_block3(_p3) + _p3 = F.interpolate(p3, size=x2.shape[2:], mode="bilinear", align_corners=True) + _p2 = _p3 + self.lateral_block3(x2) + + p2 = self.decoder_block2(_p2) + _p2 = F.interpolate(p2, size=x1.shape[2:], mode="bilinear", align_corners=True) + _p1 = _p2 + self.lateral_block2(x1) + + _p1 = self.decoder_block1(_p1) + _p1 = F.interpolate(_p1, size=x.shape[2:], mode="bilinear", align_corners=True) + p1_out = self.conv_out1(_p1) + + if self.config.ms_supervision: + outs.append(self.conv_ms_spvn_4(p4)) + outs.append(self.conv_ms_spvn_3(p3)) + outs.append(self.conv_ms_spvn_2(p2)) + outs.append(p1_out) + return outs + + +class RefUNet(nn.Module): + # Refinement + def __init__(self, in_channels=3 + 1): + super(RefUNet, self).__init__() + self.encoder_1 = nn.Sequential( + nn.Conv2d(in_channels, 64, 3, 1, 1), + nn.Conv2d(64, 64, 3, 1, 1), + nn.BatchNorm2d(64), + nn.ReLU(inplace=True), + ) + + self.encoder_2 = nn.Sequential( + nn.MaxPool2d(2, 2, ceil_mode=True), + nn.Conv2d(64, 64, 3, 1, 1), + nn.BatchNorm2d(64), + nn.ReLU(inplace=True), + ) + + self.encoder_3 = nn.Sequential( + nn.MaxPool2d(2, 2, ceil_mode=True), + nn.Conv2d(64, 64, 3, 1, 1), + nn.BatchNorm2d(64), + nn.ReLU(inplace=True), + ) + + self.encoder_4 = nn.Sequential( + nn.MaxPool2d(2, 2, ceil_mode=True), + nn.Conv2d(64, 64, 3, 1, 1), + nn.BatchNorm2d(64), + nn.ReLU(inplace=True), + ) + + self.pool4 = nn.MaxPool2d(2, 2, ceil_mode=True) + ##### + self.decoder_5 = nn.Sequential( + nn.Conv2d(64, 64, 3, 1, 1), nn.BatchNorm2d(64), nn.ReLU(inplace=True) + ) + ##### + self.decoder_4 = nn.Sequential( + nn.Conv2d(128, 64, 3, 1, 1), nn.BatchNorm2d(64), nn.ReLU(inplace=True) + ) + + self.decoder_3 = nn.Sequential( + nn.Conv2d(128, 64, 3, 1, 1), nn.BatchNorm2d(64), nn.ReLU(inplace=True) + ) + + self.decoder_2 = nn.Sequential( + nn.Conv2d(128, 64, 3, 1, 1), nn.BatchNorm2d(64), nn.ReLU(inplace=True) + ) + + self.decoder_1 = nn.Sequential( + nn.Conv2d(128, 64, 3, 1, 1), nn.BatchNorm2d(64), nn.ReLU(inplace=True) + ) + + self.conv_d0 = nn.Conv2d(64, 1, 3, 1, 1) + + self.upscore2 = nn.Upsample(scale_factor=2, mode="bilinear", align_corners=True) + + def forward(self, x): + outs = [] + if isinstance(x, list): + x = torch.cat(x, dim=1) + hx = x + + hx1 = self.encoder_1(hx) + hx2 = self.encoder_2(hx1) + hx3 = self.encoder_3(hx2) + hx4 = self.encoder_4(hx3) + + hx = self.decoder_5(self.pool4(hx4)) + hx = torch.cat((self.upscore2(hx), hx4), 1) + + d4 = self.decoder_4(hx) + hx = torch.cat((self.upscore2(d4), hx3), 1) + + d3 = self.decoder_3(hx) + hx = torch.cat((self.upscore2(d3), hx2), 1) + + d2 = self.decoder_2(hx) + hx = torch.cat((self.upscore2(d2), hx1), 1) + + d1 = self.decoder_1(hx) + + x = self.conv_d0(d1) + outs.append(x) + return outs diff --git a/engine/BiRefNet/models/refinement/stem_layer.py b/engine/BiRefNet/models/refinement/stem_layer.py new file mode 100644 index 0000000000000000000000000000000000000000..968cb7fa2bfbb1c16a03c6f288ba3bc534386872 --- /dev/null +++ b/engine/BiRefNet/models/refinement/stem_layer.py @@ -0,0 +1,44 @@ +import torch.nn as nn + +from engine.BiRefNet.models.modules.utils import build_act_layer, build_norm_layer + + +class StemLayer(nn.Module): + r"""Stem layer of InternImage + Args: + in_channels (int): number of input channels + out_channels (int): number of output channels + act_layer (str): activation layer + norm_layer (str): normalization layer + """ + + def __init__( + self, + in_channels=3 + 1, + inter_channels=48, + out_channels=96, + act_layer="GELU", + norm_layer="BN", + ): + super().__init__() + self.conv1 = nn.Conv2d( + in_channels, inter_channels, kernel_size=3, stride=1, padding=1 + ) + self.norm1 = build_norm_layer( + inter_channels, norm_layer, "channels_first", "channels_first" + ) + self.act = build_act_layer(act_layer) + self.conv2 = nn.Conv2d( + inter_channels, out_channels, kernel_size=3, stride=1, padding=1 + ) + self.norm2 = build_norm_layer( + out_channels, norm_layer, "channels_first", "channels_first" + ) + + def forward(self, x): + x = self.conv1(x) + x = self.norm1(x) + x = self.act(x) + x = self.conv2(x) + x = self.norm2(x) + return x diff --git a/engine/BiRefNet/requirements.txt b/engine/BiRefNet/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..6dbf4b8d19fa556ac2863eb1aa51b5a8a14afcb8 --- /dev/null +++ b/engine/BiRefNet/requirements.txt @@ -0,0 +1,16 @@ +--extra-index-url https://download.pytorch.org/whl/cu118 +torch==2.0.1 +--extra-index-url https://download.pytorch.org/whl/cu118 +torchvision==0.15.2 +numpy<2 +opencv-python +timm +scipy +scikit-image +kornia +einops + +tqdm +prettytable + +huggingface_hub diff --git a/engine/BiRefNet/rm_cache.sh b/engine/BiRefNet/rm_cache.sh new file mode 100644 index 0000000000000000000000000000000000000000..0d372902f214666fd049e84e259c1c809d24334b --- /dev/null +++ b/engine/BiRefNet/rm_cache.sh @@ -0,0 +1,25 @@ +#!/bin/bash +rm -rf __pycache__ */__pycache__ */*/__pycache__ + +# Val +rm -r tmp* + +# Train +rm slurm* +rm -r ckpt +rm nohup.out* +rm nohup.log* + +# Eval +rm -r evaluation/eval-* +rm -r tmp* +rm -r e_logs/ + +# System +rm core-*-python-* + +# Inference cache +rm -rf images_todo/ +rm -rf predictions/ + +clear diff --git a/engine/BiRefNet/sub.sh b/engine/BiRefNet/sub.sh new file mode 100644 index 0000000000000000000000000000000000000000..ad0b4e1af6ccce5befef05ed94e386fc2e24edcc --- /dev/null +++ b/engine/BiRefNet/sub.sh @@ -0,0 +1,17 @@ +#!/bin/sh +# Example: ./sub.sh tmp_proj 0,1,2,3 3 --> Use 0,1,2,3 for training, release GPUs, use GPU:3 for inference. + +module load gcc/11.2.0 cuda/11.8 cudnn/8.6.0_cu11x && cpu_core_num=6 +# module load compilers/cuda/11.8 compilers/gcc/12.2.0 cudnn/8.4.0.27_cuda11.x && cpu_core_num=32 + +export PYTHONUNBUFFERED=1 + +method=${1:-"BSL"} +devices=${2:-0} +gpu_num=$(($(echo ${devices%%,} | grep -o "," | wc -l)+1)) + +sbatch --nodes=1 -p vip_gpu_ailab -A ai4bio \ + --gres=gpu:${gpu_num} --ntasks-per-node=1 --cpus-per-task=$((gpu_num*cpu_core_num)) \ + ./train_test.sh ${method} ${devices} + +hostname diff --git a/engine/BiRefNet/test.sh b/engine/BiRefNet/test.sh new file mode 100644 index 0000000000000000000000000000000000000000..69e561067c91ef012078b75202d2ae4b4cdfc72b --- /dev/null +++ b/engine/BiRefNet/test.sh @@ -0,0 +1,24 @@ +devices=${1:-0} +pred_root=${2:-e_preds} + +# Inference + +CUDA_VISIBLE_DEVICES=${devices} python inference.py --pred_root ${pred_root} + +echo Inference finished at $(date) + +# Evaluation +log_dir=e_logs && mkdir ${log_dir} + +task=$(python3 config.py --print_task) +testsets=$(python3 config.py --print_testsets) + +testsets=(`echo ${testsets} | tr ',' ' '`) && testsets=${testsets[@]} + +for testset in ${testsets}; do + # python eval_existingOnes.py --pred_root ${pred_root} --data_lst ${testset} > ${log_dir}/eval_${testset}.out + nohup python eval_existingOnes.py --pred_root ${pred_root} --data_lst ${testset} > ${log_dir}/eval_${testset}.out 2>&1 & +done + + +echo Evaluation started at $(date) diff --git a/engine/BiRefNet/train.py b/engine/BiRefNet/train.py new file mode 100644 index 0000000000000000000000000000000000000000..8e428d5ad57ace990984bd4cdaad8879b68a7810 --- /dev/null +++ b/engine/BiRefNet/train.py @@ -0,0 +1,257 @@ +import os +import datetime +import argparse +import torch +import torch.nn as nn +import torch.optim as optim +from torch.autograd import Variable + +from config import Config +from loss import PixLoss, ClsLoss +from dataset import MyData +from models.birefnet import BiRefNet, BiRefNetC2F +from utils import Logger, AverageMeter, set_seed, check_state_dict + +from torch.utils.data.distributed import DistributedSampler +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group + + +parser = argparse.ArgumentParser(description='') +parser.add_argument('--resume', default=None, type=str, help='path to latest checkpoint') +parser.add_argument('--epochs', default=120, type=int) +parser.add_argument('--ckpt_dir', default='ckpt/tmp', help='Temporary folder') +parser.add_argument('--testsets', default='DIS-VD+DIS-TE1+DIS-TE2+DIS-TE3+DIS-TE4', type=str) +parser.add_argument('--dist', default=False, type=lambda x: x == 'True') +parser.add_argument('--use_accelerate', action='store_true', help='`accelerate launch --multi_gpu train.py --use_accelerate`. Use accelerate for training, good for FP16/BF16/...') +args = parser.parse_args() + +if args.use_accelerate: + from accelerate import Accelerator + accelerator = Accelerator( + mixed_precision=['no', 'fp16', 'bf16', 'fp8'][1], + gradient_accumulation_steps=1, + ) + args.dist = False + +config = Config() +if config.rand_seed: + set_seed(config.rand_seed) + +# DDP +to_be_distributed = args.dist +if to_be_distributed: + init_process_group(backend="nccl", timeout=datetime.timedelta(seconds=3600*10)) + device = int(os.environ["LOCAL_RANK"]) +else: + device = config.device + +epoch_st = 1 +# make dir for ckpt +os.makedirs(args.ckpt_dir, exist_ok=True) + +# Init log file +logger = Logger(os.path.join(args.ckpt_dir, "log.txt")) +logger_loss_idx = 1 + +# log model and optimizer params +# logger.info("Model details:"); logger.info(model) +if args.use_accelerate and accelerator.mixed_precision != 'no': + config.compile = False +logger.info("datasets: load_all={}, compile={}.".format(config.load_all, config.compile)) +logger.info("Other hyperparameters:"); logger.info(args) +print('batch size:', config.batch_size) + +if os.path.exists(os.path.join(config.data_root_dir, config.task, args.testsets.strip('+').split('+')[0])): + args.testsets = args.testsets.strip('+').split('+') +else: + args.testsets = [] + + +def prepare_dataloader(dataset: torch.utils.data.Dataset, batch_size: int, to_be_distributed=False, is_train=True): + # Prepare dataloaders + if to_be_distributed: + return torch.utils.data.DataLoader( + dataset=dataset, batch_size=batch_size, num_workers=min(config.num_workers, batch_size), pin_memory=True, + shuffle=False, sampler=DistributedSampler(dataset), drop_last=True + ) + else: + return torch.utils.data.DataLoader( + dataset=dataset, batch_size=batch_size, num_workers=min(config.num_workers, batch_size, 0), pin_memory=True, + shuffle=is_train, drop_last=True + ) + + +def init_data_loaders(to_be_distributed): + # Prepare datasets + train_loader = prepare_dataloader( + MyData(datasets=config.training_set, image_size=config.size, is_train=True), + config.batch_size, to_be_distributed=to_be_distributed, is_train=True + ) + print(len(train_loader), "batches of train dataloader {} have been created.".format(config.training_set)) + test_loaders = {} + for testset in args.testsets: + _data_loader_test = prepare_dataloader( + MyData(datasets=testset, image_size=config.size, is_train=False), + config.batch_size_valid, is_train=False + ) + print(len(_data_loader_test), "batches of valid dataloader {} have been created.".format(testset)) + test_loaders[testset] = _data_loader_test + return train_loader, test_loaders + + +def init_models_optimizers(epochs, to_be_distributed): + # Init models + if config.model == 'BiRefNet': + model = BiRefNet(bb_pretrained=True and not os.path.isfile(str(args.resume))) + elif config.model == 'BiRefNetC2F': + model = BiRefNetC2F(bb_pretrained=True and not os.path.isfile(str(args.resume))) + if args.resume: + if os.path.isfile(args.resume): + logger.info("=> loading checkpoint '{}'".format(args.resume)) + state_dict = torch.load(args.resume, map_location='cpu', weights_only=True) + state_dict = check_state_dict(state_dict) + model.load_state_dict(state_dict) + global epoch_st + epoch_st = int(args.resume.rstrip('.pth').split('epoch_')[-1]) + 1 + else: + logger.info("=> no checkpoint found at '{}'".format(args.resume)) + if not args.use_accelerate: + if to_be_distributed: + model = model.to(device) + model = DDP(model, device_ids=[device]) + else: + model = model.to(device) + if config.compile: + model = torch.compile(model, mode=['default', 'reduce-overhead', 'max-autotune'][0]) + if config.precisionHigh: + torch.set_float32_matmul_precision('high') + + # Setting optimizer + if config.optimizer == 'AdamW': + optimizer = optim.AdamW(params=model.parameters(), lr=config.lr, weight_decay=1e-2) + elif config.optimizer == 'Adam': + optimizer = optim.Adam(params=model.parameters(), lr=config.lr, weight_decay=0) + lr_scheduler = torch.optim.lr_scheduler.MultiStepLR( + optimizer, + milestones=[lde if lde > 0 else epochs + lde + 1 for lde in config.lr_decay_epochs], + gamma=config.lr_decay_rate + ) + logger.info("Optimizer details:"); logger.info(optimizer) + logger.info("Scheduler details:"); logger.info(lr_scheduler) + + return model, optimizer, lr_scheduler + + +class Trainer: + def __init__( + self, data_loaders, model_opt_lrsch, + ): + self.model, self.optimizer, self.lr_scheduler = model_opt_lrsch + self.train_loader, self.test_loaders = data_loaders + if args.use_accelerate: + self.train_loader, self.model, self.optimizer = accelerator.prepare(self.train_loader, self.model, self.optimizer) + for testset in self.test_loaders.keys(): + self.test_loaders[testset] = accelerator.prepare(self.test_loaders[testset]) + if config.out_ref: + self.criterion_gdt = nn.BCELoss() + + # Setting Losses + self.pix_loss = PixLoss() + self.cls_loss = ClsLoss() + + # Others + self.loss_log = AverageMeter() + + def _train_batch(self, batch): + if args.use_accelerate: + inputs = batch[0]#.to(device) + gts = batch[1]#.to(device) + class_labels = batch[2]#.to(device) + else: + inputs = batch[0].to(device) + gts = batch[1].to(device) + class_labels = batch[2].to(device) + scaled_preds, class_preds_lst = self.model(inputs) + if config.out_ref: + (outs_gdt_pred, outs_gdt_label), scaled_preds = scaled_preds + for _idx, (_gdt_pred, _gdt_label) in enumerate(zip(outs_gdt_pred, outs_gdt_label)): + _gdt_pred = nn.functional.interpolate(_gdt_pred, size=_gdt_label.shape[2:], mode='bilinear', align_corners=True).sigmoid() + _gdt_label = _gdt_label.sigmoid() + loss_gdt = self.criterion_gdt(_gdt_pred, _gdt_label) if _idx == 0 else self.criterion_gdt(_gdt_pred, _gdt_label) + loss_gdt + # self.loss_dict['loss_gdt'] = loss_gdt.item() + if None in class_preds_lst: + loss_cls = 0. + else: + loss_cls = self.cls_loss(class_preds_lst, class_labels) * 1.0 + self.loss_dict['loss_cls'] = loss_cls.item() + + # Loss + loss_pix = self.pix_loss(scaled_preds, torch.clamp(gts, 0, 1)) * 1.0 + self.loss_dict['loss_pix'] = loss_pix.item() + # since there may be several losses for sal, the lambdas for them (lambdas_pix) are inside the loss.py + loss = loss_pix + loss_cls + if config.out_ref: + loss = loss + loss_gdt * 1.0 + + self.loss_log.update(loss.item(), inputs.size(0)) + self.optimizer.zero_grad() + if args.use_accelerate: + accelerator.backward(loss) + else: + loss.backward() + self.optimizer.step() + + def train_epoch(self, epoch): + global logger_loss_idx + self.model.train() + self.loss_dict = {} + if epoch > args.epochs + config.finetune_last_epochs: + if config.task == 'Matting': + self.pix_loss.lambdas_pix_last['mae'] *= 1 + self.pix_loss.lambdas_pix_last['mse'] *= 0.9 + self.pix_loss.lambdas_pix_last['ssim'] *= 0.9 + else: + self.pix_loss.lambdas_pix_last['bce'] *= 0 + self.pix_loss.lambdas_pix_last['ssim'] *= 1 + self.pix_loss.lambdas_pix_last['iou'] *= 0.5 + self.pix_loss.lambdas_pix_last['mae'] *= 0.9 + + for batch_idx, batch in enumerate(self.train_loader): + self._train_batch(batch) + # Logger + if batch_idx % 20 == 0: + info_progress = 'Epoch[{0}/{1}] Iter[{2}/{3}].'.format(epoch, args.epochs, batch_idx, len(self.train_loader)) + info_loss = 'Training Losses' + for loss_name, loss_value in self.loss_dict.items(): + info_loss += ', {}: {:.3f}'.format(loss_name, loss_value) + logger.info(' '.join((info_progress, info_loss))) + info_loss = '@==Final== Epoch[{0}/{1}] Training Loss: {loss.avg:.3f} '.format(epoch, args.epochs, loss=self.loss_log) + logger.info(info_loss) + + self.lr_scheduler.step() + return self.loss_log.avg + + +def main(): + + trainer = Trainer( + data_loaders=init_data_loaders(to_be_distributed), + model_opt_lrsch=init_models_optimizers(args.epochs, to_be_distributed) + ) + + for epoch in range(epoch_st, args.epochs+1): + train_loss = trainer.train_epoch(epoch) + # Save checkpoint + # DDP + if epoch >= args.epochs - config.save_last and epoch % config.save_step == 0: + torch.save( + trainer.model.module.state_dict() if to_be_distributed or args.use_accelerate else trainer.model.state_dict(), + os.path.join(args.ckpt_dir, 'epoch_{}.pth'.format(epoch)) + ) + if to_be_distributed: + destroy_process_group() + + +if __name__ == '__main__': + main() diff --git a/engine/BiRefNet/train.sh b/engine/BiRefNet/train.sh new file mode 100644 index 0000000000000000000000000000000000000000..6821cac71d905e20cfe6073d1906af98a7396f44 --- /dev/null +++ b/engine/BiRefNet/train.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# Run script +# Settings of training & test for different tasks. +method="$1" +task=$(python3 config.py --print_task) +case "${task}" in + 'DIS5K') epochs=200 && val_last=50 && step=5 ;; + 'COD') epochs=150 && val_last=50 && step=5 ;; + 'HRSOD') epochs=150 && val_last=50 && step=5 ;; + 'General') epochs=250 && val_last=50 && step=2 ;; + 'General-2K') epochs=250 && val_last=30 && step=2 ;; + 'Matting') epochs=100 && val_last=30 && step=2 ;; +esac +testsets=NO # Non-existing folder to skip. +# testsets=TE-COD10K # for COD + +# Train +devices=$2 +nproc_per_node=$(echo ${devices%%,} | grep -o "," | wc -l) + +to_be_distributed=`echo ${nproc_per_node} | awk '{if($e > 0) print "True"; else print "False";}'` + +echo Training started at $(date) +if [ ${to_be_distributed} == "True" ] +then + # Adapt the nproc_per_node by the number of GPUs. Give 8989 as the default value of master_port. + echo "Multi-GPU mode received..." + CUDA_VISIBLE_DEVICES=${devices} \ + torchrun --standalone --nproc_per_node $((nproc_per_node+1)) \ + train.py --ckpt_dir ckpt/${method} --epochs ${epochs} \ + --testsets ${testsets} \ + --dist ${to_be_distributed} \ + --resume xx/xx-epoch_244.pth \ + # --use_accelerate +else + echo "Single-GPU mode received..." + CUDA_VISIBLE_DEVICES=${devices} \ + python train.py --ckpt_dir ckpt/${method} --epochs ${epochs} \ + --testsets ${testsets} \ + --dist ${to_be_distributed} \ + --resume xx/xx-epoch_244.pth + # --use_accelerate +fi + +echo Training finished at $(date) diff --git a/engine/BiRefNet/train_test.sh b/engine/BiRefNet/train_test.sh new file mode 100644 index 0000000000000000000000000000000000000000..a8080c0cb37d71b74ccf14c74fc0467f238e3661 --- /dev/null +++ b/engine/BiRefNet/train_test.sh @@ -0,0 +1,12 @@ +#!/bin/sh +# Example: `setsid nohup ./train_test.sh BiRefNet_tmp 0,1,2,3,4,5,6,7 0 &>nohup.log &` + +method=${1:-"BSL"} +devices=${2:-"0,1,2,3,4,5,6,7"} + +bash train.sh ${method} ${devices} + +devices_test=${3:-0} +bash test.sh ${devices_test} + +hostname diff --git a/engine/BiRefNet/utils.py b/engine/BiRefNet/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..e97a98c6be6ea9cd672e101ca0f49668d6522307 --- /dev/null +++ b/engine/BiRefNet/utils.py @@ -0,0 +1,99 @@ +import logging +import os +import torch +from torchvision import transforms +import numpy as np +import random +import cv2 +from PIL import Image + + +def path_to_image(path, size=(1024, 1024), color_type=['rgb', 'gray'][0]): + if color_type.lower() == 'rgb': + image = cv2.imread(path) + elif color_type.lower() == 'gray': + image = cv2.imread(path, cv2.IMREAD_GRAYSCALE) + else: + print('Select the color_type to return, either to RGB or gray image.') + return + if size: + image = cv2.resize(image, size, interpolation=cv2.INTER_LINEAR) + if color_type.lower() == 'rgb': + image = Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)).convert('RGB') + else: + image = Image.fromarray(image).convert('L') + return image + + + +def check_state_dict(state_dict, unwanted_prefixes=['_orig_mod.', 'module.']): + for k, v in list(state_dict.items()): + for unwanted_prefix in unwanted_prefixes: + if k.startswith(unwanted_prefix): + state_dict[k[len(unwanted_prefix):]] = state_dict.pop(k) + break + return state_dict + + +def generate_smoothed_gt(gts): + epsilon = 0.001 + new_gts = (1-epsilon)*gts+epsilon/2 + return new_gts + + +class Logger(): + def __init__(self, path="log.txt"): + self.logger = logging.getLogger('BiRefNet') + self.file_handler = logging.FileHandler(path, "w") + self.stdout_handler = logging.StreamHandler() + self.stdout_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) + self.file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) + self.logger.addHandler(self.file_handler) + self.logger.addHandler(self.stdout_handler) + self.logger.setLevel(logging.INFO) + self.logger.propagate = False + + def info(self, txt): + self.logger.info(txt) + + def close(self): + self.file_handler.close() + self.stdout_handler.close() + + +class AverageMeter(object): + """Computes and stores the average and current value""" + def __init__(self): + self.reset() + + def reset(self): + self.val = 0.0 + self.avg = 0.0 + self.sum = 0.0 + self.count = 0.0 + + def update(self, val, n=1): + self.val = val + self.sum += val * n + self.count += n + self.avg = self.sum / self.count + + +def save_checkpoint(state, path, filename="latest.pth"): + torch.save(state, os.path.join(path, filename)) + + +def save_tensor_img(tenor_im, path): + im = tenor_im.cpu().clone() + im = im.squeeze(0) + tensor2pil = transforms.ToPILImage() + im = tensor2pil(im) + im.save(path) + + +def set_seed(seed): + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + np.random.seed(seed) + random.seed(seed) + torch.backends.cudnn.deterministic = True diff --git a/engine/SegmentAPI/SAM.py b/engine/SegmentAPI/SAM.py new file mode 100644 index 0000000000000000000000000000000000000000..6c6a1b502f992949e234570c9807af9373a07338 --- /dev/null +++ b/engine/SegmentAPI/SAM.py @@ -0,0 +1,540 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2024-08-30 16:26:10 +# @Function : SAM2 Segment class + +import sys + +sys.path.append("./") +import copy +import os +import pdb +import tempfile +import time +from bisect import bisect_left +from dataclasses import dataclass + +import cv2 +import numpy as np +import PIL +import torch +from pytorch3d.ops import sample_farthest_points +from sam2.build_sam import build_sam2 +from sam2.sam2_image_predictor import SAM2ImagePredictor +from torchvision import transforms + +from engine.BiRefNet.models.birefnet import BiRefNet +from engine.ouputs import BaseOutput +from engine.SegmentAPI.base import BaseSeg, Bbox +from engine.SegmentAPI.img_utils import load_image_file + +SAM2_WEIGHT = "pretrained_models/sam2/sam2.1_hiera_large.pt" +BIREFNET_WEIGHT = "pretrained_models/BiRefNet-general-epoch_244.pth" + + +def avaliable_device(): + if torch.cuda.is_available(): + current_device_id = torch.cuda.current_device() + device = f"cuda:{current_device_id}" + else: + device = "cpu" + + return device + + +@dataclass +class SegmentOut(BaseOutput): + masks: np.ndarray + processed_img: np.ndarray + alpha_img: np.ndarray + + +def distance(p1, p2): + return np.sqrt(np.sum((p1 - p2) ** 2)) + + +def FPS(sample, num): + n = sample.shape[0] + center = np.mean(sample, axis=0) + select_p = [] + L = [] + for i in range(n): + L.append(distance(sample[i], center)) + p0 = np.argmax(L) + select_p.append(p0) + L = [] + for i in range(n): + L.append(distance(p0, sample[i])) + select_p.append(np.argmax(L)) + for i in range(num - 2): + for p in range(n): + d = distance(sample[select_p[-1]], sample[p]) + if d <= L[p]: + L[p] = d + select_p.append(np.argmax(L)) + return select_p, sample[select_p] + + +def fill_mask(alpha): + # alpha = np.pad(alpha, ((1, 1), (1, 1)), mode="constant", constant_values=0) + h, w = alpha.shape[:2] + + mask = np.zeros((h + 2, w + 2), np.uint8) + alpha = (alpha * 255).astype(np.uint8) + im_floodfill = alpha.copy() + retval, image, mask, rect = cv2.floodFill(im_floodfill, mask, (0, 0), 255) + im_floodfill_inv = cv2.bitwise_not(im_floodfill) + + alpha = alpha | im_floodfill_inv + alpha = alpha.astype(np.float32) / 255.0 + + # return alpha[1 : h - 1, 1 : w - 1, ...] + return alpha + + +def erode_and_dialted(mask, kernel_size=3, iterations=1): + kernel = np.ones((kernel_size, kernel_size), np.uint8) + + eroded_mask = cv2.erode(mask, kernel, iterations=iterations) + + dilated_mask = cv2.dilate(eroded_mask, kernel, iterations=iterations) + + return dilated_mask + + +def eroded(mask, kernel_size=3, iterations=1): + kernel = np.ones((kernel_size, kernel_size), np.uint8) + eroded_mask = cv2.erode(mask, kernel, iterations=iterations) + + return eroded_mask + + +def model_type(model): + print(next(model.parameters()).device) + + +class SAM2Seg(BaseSeg): + RATIO_MAP = [[512, 1], [1280, 0.6], [1920, 0.4], [3840, 0.2]] + + def tocpu(self): + self.box_prior.cpu() + self.image_predictor.model.cpu() + torch.cuda.empty_cache() + + def tocuda(self): + self.box_prior.cuda() + self.image_predictor.model.cuda() + + def __init__( + self, + config="sam2.1_hiera_l.yaml", + matting_config="resnet50", + background=(1.0, 1.0, 1.0), + wo_supres=False, + ): + super().__init__() + + self.device = avaliable_device() + + try: + sam2_image_model = build_sam2(config, SAM2_WEIGHT) + except: + config = os.path.join("./configs/sam2.1/", config) # sam2.1 case + sam2_image_model = build_sam2(config, SAM2_WEIGHT) + + self.image_predictor = SAM2ImagePredictor(sam2_image_model) + + self.box_prior = None + + # Robust-Human-Matting + + # self.matting_predictor = MattingNetwork(matting_config).eval().cuda() + # self.matting_predictor.load_state_dict(torch.load(MATTING_WEIGHT)) + + self.background = background + self.wo_supers = wo_supres + + def clean_up(self): + self.tmp.cleanup() + + def collect_inputs(self, inputs): + return dict( + img_path=inputs["img_path"], + bbox=inputs["bbox"], + ) + + def _super_resolution(self, input_path): + + low = os.path.abspath(input_path) + high = self.tmp.name + + super_weights = os.path.abspath("./pretrained_models/RealESRGAN_x4plus.pth") + hander = os.path.join(SUPRES_PATH, "inference_realesrgan.py") + + cmd = f"python {hander} -n RealESRGAN_x4plus -i {low} -o {high} --model_path {super_weights} -s 2" + + os.system(cmd) + + return os.path.join(high, os.path.basename(input_path)) + + def predict_bbox(self, img, scale=1.0): + + ratio = self.ratio_mapping(img) + + # uint8 + # [0 1] + img = np.asarray(img).astype(np.float32) / 255.0 + height, width, _ = img.shape + + # [C H W] + img_tensor = torch.from_numpy(img).permute(2, 0, 1) + + bgr = torch.tensor([1.0, 1.0, 1.0]).view(3, 1, 1).cuda() # Green background. + rec = [None] * 4 # Initial recurrent states. + + # predict matting + with torch.no_grad(): + img_tensor = img_tensor.unsqueeze(0).to(self.device) + fgr, pha, *rec = self.matting_predictor( + img_tensor.to(self.device), + *rec, + downsample_ratio=ratio, + ) # Cycle the recurrent states. + + pha[pha < 0.5] = 0.0 + pha[pha >= 0.5] = 1.0 + pha = pha[0].permute(1, 2, 0).detach().cpu().numpy() + + # obtain bbox + _h, _w, _ = np.where(pha == 1) + + whwh = [ + _w.min().item(), + _h.min().item(), + _w.max().item(), + _h.max().item(), + ] + + box = Bbox(whwh) + + # scale box to 1.05 + scale_box = box.scale(1.00, width=width, height=height) + + return scale_box, pha[..., 0] + + def birefnet_predict_bbox(self, img, scale=1.0): + + # img: RGB-order + + if self.box_prior == None: + from engine.BiRefNet.utils import check_state_dict + + birefnet = BiRefNet(bb_pretrained=False) + state_dict = torch.load(BIREFNET_WEIGHT, map_location="cpu") + state_dict = check_state_dict(state_dict) + birefnet.load_state_dict(state_dict) + device = avaliable_device() + torch.set_float32_matmul_precision(["high", "highest"][0]) + + birefnet.to(device) + self.box_prior = birefnet + self.box_prior.eval() + self.box_transform = transforms.Compose( + [ + transforms.Resize((1024, 1024)), + transforms.ToTensor(), + transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), + ] + ) + print("BiRefNet is ready to use.") + else: + device = avaliable_device() + self.box_prior.to(device) + + height, width, _ = img.shape + + image = PIL.Image.fromarray(img) + + input_images = self.box_transform(image).unsqueeze(0).to("cuda") + with torch.no_grad(): + preds = self.box_prior(input_images)[-1].sigmoid().cpu() + pha = (preds[0]).squeeze(0).detach().numpy() + + pha = cv2.resize(pha, (width, height)) + + masks = copy.deepcopy(pha[..., None]) + + masks[masks < 0.3] = 0.0 + masks[masks >= 0.3] = 1.0 + + # obtain bbox + _h, _w, _ = np.where(masks == 1) + + whwh = [ + _w.min().item(), + _h.min().item(), + _w.max().item(), + _h.max().item(), + ] + + box = Bbox(whwh) + + # scale box to 1.05 + scale_box = box.scale(scale=scale, width=width, height=height) + + return scale_box, pha + + def rembg_predict_bbox(self, img, scale=1.0): + + height, width, _ = img.shape + + with torch.no_grad(): + img_rmbg = img[..., ::-1] # rgb2bgr + img_rmbg = remove(img_rmbg) + img_rmbg = img_rmbg[..., :3] + pha = copy.deepcopy(img_rmbg[..., -1:]) + + masks = copy.deepcopy(pha) + + masks[masks < 1.0] = 0.0 + masks[masks >= 1.0] = 1.0 + + # obtain bbox + _h, _w, _ = np.where(masks == 1) + + whwh = [ + _w.min().item(), + _h.min().item(), + _w.max().item(), + _h.max().item(), + ] + + box = Bbox(whwh) + + # scale box to 1.05 + scale_box = box.scale(scale=scale, width=width, height=height) + + return scale_box, pha[..., 0].astype(np.float32) / 255.0 + + def yolo_predict_bbox(self, img, scale=1.0, threshold=0.2): + if self.prior == None: + from ultralytics import YOLO + + pdb.set_trace() + + height, width, _ = img.shape + + with torch.no_grad(): + results = yolo_seg(img[..., ::-1]) + for result in results: + masks = result.masks.data[result.boxes.cls == 0] + if masks.shape[0] >= 1: + masks[masks >= threshold] = 1 + masks[masks < threshold] = 0 + masks = masks.sum(dim=0) + + pha = masks.detach().cpu().numpy() + pha = cv2.resize(pha, (width, height), interpolation=cv2.INTER_AREA)[..., None] + + pha[pha >= 0.5] = 1 + pha[pha < 0.5] = 0 + + masks = copy.deepcopy(pha) + + pha = pha * 255.0 + # obtain bbox + _h, _w, _ = np.where(masks == 1) + + whwh = [ + _w.min().item(), + _h.min().item(), + _w.max().item(), + _h.max().item(), + ] + + box = Bbox(whwh) + + # scale box to 1.05 + scale_box = box.scale(scale=scale, width=width, height=height) + + return scale_box, pha[..., 0].astype(np.float32) / 255.0 + + def ratio_mapping(self, img): + + my_ratio_map = self.RATIO_MAP + + ratio_landmarks = [v[0] for v in my_ratio_map] + + ratio_v = [v[1] for v in my_ratio_map] + h, w, _ = img.shape + + max_length = min(h, w) + + low_bound = bisect_left( + ratio_landmarks, max_length, lo=0, hi=len(ratio_landmarks) + ) + + if 0 == low_bound: + return 1.0 + elif low_bound == len(ratio_landmarks): + return ratio_v[-1] + else: + _l = ratio_v[low_bound - 1] + _r = ratio_v[low_bound] + + _l_land = ratio_landmarks[low_bound - 1] + _r_land = ratio_landmarks[low_bound] + cur_ratio = _l + (_r - _l) * (max_length - _l_land) / (_r_land - _l_land) + + return cur_ratio + + def get_img(self, img_path, sup_res=True): + + img = cv2.imread(img_path) + img = img[..., ::-1].copy() # bgr2rgb + + if self.wo_supers: + return img + + return img + + def compute_coords(self, pha, bbox): + + node_prompts = [] + + H, W = pha.shape + y_indices, x_indices = np.indices((H, W)) + coors = np.stack((x_indices, y_indices), axis=-1) + + # reduce the effect from pha + # pha = eroded((pha * 255).astype(np.uint8), 3, 3) / 255.0 + + pha_coors = np.repeat(pha[..., None], 2, axis=2) + coors_points = (coors * pha_coors).sum(axis=0).sum(axis=0) / (pha.sum() + 1e-6) + node_prompts.append(coors_points.tolist()) + + _h, _w = np.where(pha > 0.5) + + sample_ps = torch.from_numpy(np.stack((_w, _h), axis=-1).astype(np.float32)).to( + avaliable_device() + ) + + # positive prompts + node_prompts_fps, _ = sample_farthest_points(sample_ps[None], K=5) + node_prompts_fps = ( + node_prompts_fps[0].detach().cpu().numpy().astype(np.int32).tolist() + ) + + node_prompts.extend(node_prompts_fps) + node_prompts_label = [1 for _ in range(len(node_prompts))] + + return node_prompts, node_prompts_label + + def _forward(self, img_path, bbox, sup_res=True): + + img = self.get_img(img_path, sup_res) + + if bbox is None: + # bbox, pha = self.predict_bbox(img) + # bbox, pha = self.rembg_predict_bbox(img, 1.01) + # bbox, pha = self.yolo_predict_bbox(img) + bbox, pha = self.birefnet_predict_bbox(img, 1.01) + + box = bbox.to_whwh() + bbox = box.get_box() + + point_coords, point_coords_label = self.compute_coords(pha, bbox) + + self.image_predictor.set_image(img) + + masks, scores, logits = self.image_predictor.predict( + point_coords=point_coords, + point_labels=point_coords_label, + box=bbox, + multimask_output=False, + ) + + alpha = masks[0] + + # fill-mask NO USE + # alpha = fill_mask(alpha) + # alpha = erode_and_dialted( + # (alpha * 255).astype(np.uint8), kernel_size=3, iterations=3 + # ) + # alpha = alpha.astype(np.float32) / 255.0 + + img_float = img.astype(np.float32) / 255.0 + process_img = ( + img_float * alpha[..., None] + (1 - alpha[..., None]) * self.background + ) + process_img = (process_img * 255).astype(np.uint8) + + # using for draw box + # process_img = cv2.rectangle(process_img, bbox[:2], bbox[2:], (0, 0, 255), 2) + process_img = process_img.astype(np.float) / 255.0 + + process_pha_img = ( + img_float * pha[..., None] + (1 - pha[..., None]) * self.background + ) + + return SegmentOut( + masks=alpha, processed_img=process_img, alpha_img=process_pha_img[...] + ) + + @torch.no_grad() + def __call__(self, **inputs): + + self.tmp = tempfile.TemporaryDirectory() + + self.collect_inputs(inputs) + + out = self._forward(**inputs) + + self.clean_up() + return out + + +def get_parse(): + import argparse + + parser = argparse.ArgumentParser(description="") + parser.add_argument("-i", "--input", required=True, help="input path") + parser.add_argument("-o", "--output", required=True, help="output path") + parser.add_argument("--mask", action="store_true", help="mask bool") + parser.add_argument( + "--wo_super_reso", action="store_true", help="whether using super_resolution" + ) + args = parser.parse_args() + return args + + +def main(): + + opt = get_parse() + img_list = os.listdir(opt.input) + img_names = [os.path.join(opt.input, img_name) for img_name in img_list] + + os.makedirs(opt.output, exist_ok=True) + + model = SAM2Seg(wo_supres=opt.wo_super_reso) + + for img in img_names: + + print(f"processing {img}") + out = model(img_path=img, bbox=None) + + save_path = os.path.join(opt.output, os.path.basename(img)) + + alpha = fill_mask(out.masks) + alpha = erode_and_dialted( + (alpha * 255).astype(np.uint8), kernel_size=3, iterations=3 + ) + save_img = alpha + cv2.imwrite(save_path, save_img) + + +if __name__ == "__main__": + + main() diff --git a/engine/SegmentAPI/__pycache__/SAM.cpython-310.pyc b/engine/SegmentAPI/__pycache__/SAM.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e3dcc91debe6011913dbc1199d266a2dffa88d02 Binary files /dev/null and b/engine/SegmentAPI/__pycache__/SAM.cpython-310.pyc differ diff --git a/engine/SegmentAPI/__pycache__/base.cpython-310.pyc b/engine/SegmentAPI/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..62600c8bf646677eb138e39af802cedf73a8344f Binary files /dev/null and b/engine/SegmentAPI/__pycache__/base.cpython-310.pyc differ diff --git a/engine/SegmentAPI/__pycache__/img_utils.cpython-310.pyc b/engine/SegmentAPI/__pycache__/img_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1b13748cd201231d13d4423d5d980ecb6d47a89b Binary files /dev/null and b/engine/SegmentAPI/__pycache__/img_utils.cpython-310.pyc differ diff --git a/engine/SegmentAPI/base.py b/engine/SegmentAPI/base.py new file mode 100644 index 0000000000000000000000000000000000000000..9464f4ad28b5f423417b73ef5f28525ab42ba693 --- /dev/null +++ b/engine/SegmentAPI/base.py @@ -0,0 +1,240 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2024-08-30 20:50:27 +# @Function : The class defines bbox, base-seg module + +import copy + +import cv2 +import numpy as np +import torch + + +class BaseModel(object): + """ + Simple BaseModel + """ + + def cuda(self): + self.model.cuda() + return self + + def cpu(self): + self.model.cpu() + return self + + def float(self): + self.model.float() + return self + + def to(self, device): + self.model.to(device) + return self + + def eval(self): + self.model.eval() + + return self + + def train(self): + self.model.train() + return self + + def __call__(self, x): + raise NotImplementedError + + def __repr__(self): + + return f"model: \n{self.model}" + + +def get_dtype_string(arr): + if arr.dtype == np.uint8: + return "uint8" + elif arr.dtype == np.float32: + return "float32" + elif arr.dtype == np.float64: + return "float" + else: + return "unknow" + + +class BaseSeg(BaseModel): + def __init__(self): + pass + + +class Bbox: + def __init__(self, box, mode="whwh"): + + assert len(box) == 4 + assert mode in ["whwh", "xywh"] + self.box = box + self.mode = mode + + def to_xywh(self): + + if self.mode == "whwh": + + l, t, r, b = self.box + + center_x = (l + r) / 2 + center_y = (t + b) / 2 + width = r - l + height = b - t + return Bbox([center_x, center_y, width, height], mode="xywh") + else: + return self + + def to_whwh(self): + + if self.mode == "whwh": + return self + else: + + cx, cy, w, h = self.box + l = cx - w // 2 + t = cy - h // 2 + r = cx + w - (w // 2) + b = cy + h - (h // 2) + + return Bbox([l, t, r, b], mode="whwh") + + def area(self): + + box = self.to_xywh() + _, __, w, h = box.box + + return w * h + + def get_box(self): + return list(map(int, self.box)) + + def scale(self, scale, width, height): + new_box = self.to_xywh() + cx, cy, w, h = new_box.get_box() + w = w * scale + h = h * scale + + l = cx - w // 2 + t = cy - h // 2 + r = cx + w - (w // 2) + b = cy + h - (h // 2) + + l = int(max(l, 0)) + t = int(max(t, 0)) + r = int(min(r, width)) + b = int(min(b, height)) + + return Bbox([l, t, r, b], mode="whwh") + + def __repr__(self): + box = self.to_whwh() + l, t, r, b = box.box + + return f"BBox(left={l}, top={t}, right={r}, bottom={b})" + + +class Image: + """TODO need to debug""" + + TYPE_ORDER = ["uint8", "float32", "float"] + ORDER = ["RGB", "BGR"] + MODE = ["numpy"] + + def __init__(self, input, order="RGB", type_mode="uint8"): + """Only support 3 Channel Image""" + if isinstance(input, str): + self.data = self.read_image(input, type_mode, order) + else: + self.data = self.get_image(input, type_mode, order) + + self.order = order + self.type_mode = type_mode + + def get_image(self, input, type_mode, order): + if isinstance(input, Image): + return input.to_numpy(type_mode, order) + elif isinstance(input, np.ndarray): + self.data = input + self.order = "RGB" # default + self.type_mode = get_dtype_string(input) + + return self.to_numpy(type_mode, order) + else: + raise NotImplementedError + + def to_numpy(self, type_mode="uint8", order="RGB"): + + data = copy.deepcopy(self.data) + + if not order == self.order: + return data[..., ::-1] # only support RGB -> BGR or BGR -> RGB + + if self.type_mode == type_mode: + return data + else: + if self.type_mode == "float32": + return (self.data / 255.0).astype(np.float32) + elif self.type_mode == "float": + return (self.data / 255.0).astype(np.float64) + + def to_tensor(self, order): + data = self.to_numpy(type_mode="float32", order=order) + return torch.from_numpy(data) + + def read_image( + self, + path, + mode, + order, + ): + """read an image file into various formats and color mode. + + Args: + path (str): path to the image file. + mode (Literal["float", "uint8", "pil", "torch", "tensor"], optional): returned image format. Defaults to "float". + float: float32 numpy array, range [0, 1]; + uint8: uint8 numpy array, range [0, 255]; + pil: PIL image; + torch/tensor: float32 torch tensor, range [0, 1]; + order (Literal["RGB", "RGBA", "BGR", "BGRA"], optional): channel order. Defaults to "RGB". + + Note: + By default this function will convert RGBA image to white-background RGB image. Use ``order="RGBA"`` to keep the alpha channel. + + Returns: + Union[np.ndarray, PIL.Image, torch.Tensor]: the image array. + """ + + if mode == "pil": + return Image.open(path).convert(order) + + img = cv2.imread(path, cv2.IMREAD_UNCHANGED) + + # cvtColor + if len(img.shape) == 3: # ignore if gray scale + if order in ["RGB", "RGBA"]: + if img.shape[-1] == 4: + img = cv2.cvtColor(img, cv2.COLOR_BGRA2RGBA) + elif img.shape[-1] == 3: + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + + # mix background + if img.shape[-1] == 4 and "A" not in order: + img = img.astype(np.float32) / 255 + img = img[..., :3] * img[..., 3:] + (1 - img[..., 3:]) + + # mode + if mode == "uint8": + if img.dtype != np.uint8: + img = (img * 255).astype(np.uint8) + elif mode == "float": + if img.dtype == np.uint8: + img = img.astype(np.float32) / 255 + else: + raise ValueError(f"Unknown read_image mode {mode}") + + return img diff --git a/engine/SegmentAPI/configs/sam2.1_hiera_l.yaml b/engine/SegmentAPI/configs/sam2.1_hiera_l.yaml new file mode 100644 index 0000000000000000000000000000000000000000..57d9d08531b9d371f2cb074028824f718d58a775 --- /dev/null +++ b/engine/SegmentAPI/configs/sam2.1_hiera_l.yaml @@ -0,0 +1,120 @@ +# @package _global_ + +# Model +model: + _target_: sam2.modeling.sam2_base.SAM2Base + image_encoder: + _target_: sam2.modeling.backbones.image_encoder.ImageEncoder + scalp: 1 + trunk: + _target_: sam2.modeling.backbones.hieradet.Hiera + embed_dim: 144 + num_heads: 2 + stages: [2, 6, 36, 4] + global_att_blocks: [23, 33, 43] + window_pos_embed_bkg_spatial_size: [7, 7] + window_spec: [8, 4, 16, 8] + neck: + _target_: sam2.modeling.backbones.image_encoder.FpnNeck + position_encoding: + _target_: sam2.modeling.position_encoding.PositionEmbeddingSine + num_pos_feats: 256 + normalize: true + scale: null + temperature: 10000 + d_model: 256 + backbone_channel_list: [1152, 576, 288, 144] + fpn_top_down_levels: [2, 3] # output level 0 and 1 directly use the backbone features + fpn_interp_model: nearest + + memory_attention: + _target_: sam2.modeling.memory_attention.MemoryAttention + d_model: 256 + pos_enc_at_input: true + layer: + _target_: sam2.modeling.memory_attention.MemoryAttentionLayer + activation: relu + dim_feedforward: 2048 + dropout: 0.1 + pos_enc_at_attn: false + self_attention: + _target_: sam2.modeling.sam.transformer.RoPEAttention + rope_theta: 10000.0 + feat_sizes: [32, 32] + embedding_dim: 256 + num_heads: 1 + downsample_rate: 1 + dropout: 0.1 + d_model: 256 + pos_enc_at_cross_attn_keys: true + pos_enc_at_cross_attn_queries: false + cross_attention: + _target_: sam2.modeling.sam.transformer.RoPEAttention + rope_theta: 10000.0 + feat_sizes: [32, 32] + rope_k_repeat: True + embedding_dim: 256 + num_heads: 1 + downsample_rate: 1 + dropout: 0.1 + kv_in_dim: 64 + num_layers: 4 + + memory_encoder: + _target_: sam2.modeling.memory_encoder.MemoryEncoder + out_dim: 64 + position_encoding: + _target_: sam2.modeling.position_encoding.PositionEmbeddingSine + num_pos_feats: 64 + normalize: true + scale: null + temperature: 10000 + mask_downsampler: + _target_: sam2.modeling.memory_encoder.MaskDownSampler + kernel_size: 3 + stride: 2 + padding: 1 + fuser: + _target_: sam2.modeling.memory_encoder.Fuser + layer: + _target_: sam2.modeling.memory_encoder.CXBlock + dim: 256 + kernel_size: 7 + padding: 3 + layer_scale_init_value: 1e-6 + use_dwconv: True # depth-wise convs + num_layers: 2 + + num_maskmem: 7 + image_size: 1024 + # apply scaled sigmoid on mask logits for memory encoder, and directly feed input mask as output mask + sigmoid_scale_for_mem_enc: 20.0 + sigmoid_bias_for_mem_enc: -10.0 + use_mask_input_as_output_without_sam: true + # Memory + directly_add_no_mem_embed: true + no_obj_embed_spatial: true + # use high-resolution feature map in the SAM mask decoder + use_high_res_features_in_sam: true + # output 3 masks on the first click on initial conditioning frames + multimask_output_in_sam: true + # SAM heads + iou_prediction_use_sigmoid: True + # cross-attend to object pointers from other frames (based on SAM output tokens) in the encoder + use_obj_ptrs_in_encoder: true + add_tpos_enc_to_obj_ptrs: true + proj_tpos_enc_in_obj_ptrs: true + use_signed_tpos_enc_to_obj_ptrs: true + only_obj_ptrs_in_the_past_for_eval: true + # object occlusion prediction + pred_obj_scores: true + pred_obj_scores_mlp: true + fixed_no_obj_ptr: true + # multimask tracking settings + multimask_output_for_tracking: true + use_multimask_token_for_obj_ptr: true + multimask_min_pt_num: 0 + multimask_max_pt_num: 1 + use_mlp_for_obj_ptr_proj: true + # Compilation flag + compile_image_encoder: False \ No newline at end of file diff --git a/engine/SegmentAPI/img_utils.py b/engine/SegmentAPI/img_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..b669922bf7295abd13119694ce038746bb57dcc8 --- /dev/null +++ b/engine/SegmentAPI/img_utils.py @@ -0,0 +1,63 @@ +import numpy as np +import PIL.Image +import PIL.ImageOps + + +def exif_transpose(img): + if not img: + return img + + exif_orientation_tag = 274 + + # Check for EXIF data (only present on some files) + if ( + hasattr(img, "_getexif") + and isinstance(img._getexif(), dict) + and exif_orientation_tag in img._getexif() + ): + exif_data = img._getexif() + orientation = exif_data[exif_orientation_tag] + + # Handle EXIF Orientation + if orientation == 1: + # Normal image - nothing to do! + pass + elif orientation == 2: + # Mirrored left to right + img = img.transpose(PIL.Image.FLIP_LEFT_RIGHT) + elif orientation == 3: + # Rotated 180 degrees + img = img.rotate(180) + elif orientation == 4: + # Mirrored top to bottom + img = img.rotate(180).transpose(PIL.Image.FLIP_LEFT_RIGHT) + elif orientation == 5: + # Mirrored along top-left diagonal + img = img.rotate(-90, expand=True).transpose(PIL.Image.FLIP_LEFT_RIGHT) + elif orientation == 6: + # Rotated 90 degrees + img = img.rotate(-90, expand=True) + elif orientation == 7: + # Mirrored along top-right diagonal + img = img.rotate(90, expand=True).transpose(PIL.Image.FLIP_LEFT_RIGHT) + elif orientation == 8: + # Rotated 270 degrees + img = img.rotate(90, expand=True) + + return img + + +def load_image_file(file, mode="RGB"): + # Load the image with PIL + img = PIL.Image.open(file) + + if hasattr(PIL.ImageOps, "exif_transpose"): + # Very recent versions of PIL can do exit transpose internally + img = PIL.ImageOps.exif_transpose(img) + else: + # Otherwise, do the exif transpose ourselves + img = exif_transpose(img) + + img = img.convert(mode) + + return np.array(img) diff --git a/engine/__init__.py b/engine/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/engine/__pycache__/__init__.cpython-310.pyc b/engine/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e4e2507cac763f542e0c9f3678a9ac4e87fc143b Binary files /dev/null and b/engine/__pycache__/__init__.cpython-310.pyc differ diff --git a/engine/__pycache__/ouputs.cpython-310.pyc b/engine/__pycache__/ouputs.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ae681e9b5d9e68eb762e92f75c354a181049efd0 Binary files /dev/null and b/engine/__pycache__/ouputs.cpython-310.pyc differ diff --git a/engine/ouputs.py b/engine/ouputs.py new file mode 100644 index 0000000000000000000000000000000000000000..471882235ac0347a72624420e78e2d94eacfd7e8 --- /dev/null +++ b/engine/ouputs.py @@ -0,0 +1,114 @@ +"""Copy from diffusers +""" + +# Copyright 2024 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Generic utilities +""" + +import importlib +from collections import OrderedDict +from dataclasses import fields, is_dataclass +from typing import Any, Tuple + +import numpy as np + + +class BaseOutput(OrderedDict): + """ + Base class for all model outputs as dataclass. Has a `__getitem__` that allows indexing by integer or slice (like a + tuple) or strings (like a dictionary) that will ignore the `None` attributes. Otherwise behaves like a regular + Python dictionary. + + + + You can't unpack a [`BaseOutput`] directly. Use the [`~utils.BaseOutput.to_tuple`] method to convert it to a tuple + first. + + + """ + + def __post_init__(self) -> None: + class_fields = fields(self) + + # Safety and consistency checks + if not len(class_fields): + raise ValueError(f"{self.__class__.__name__} has no fields.") + + first_field = getattr(self, class_fields[0].name) + other_fields_are_none = all( + getattr(self, field.name) is None for field in class_fields[1:] + ) + + if other_fields_are_none and isinstance(first_field, dict): + for key, value in first_field.items(): + self[key] = value + else: + for field in class_fields: + v = getattr(self, field.name) + if v is not None: + self[field.name] = v + + def __delitem__(self, *args, **kwargs): + raise Exception( + f"You cannot use ``__delitem__`` on a {self.__class__.__name__} instance." + ) + + def setdefault(self, *args, **kwargs): + raise Exception( + f"You cannot use ``setdefault`` on a {self.__class__.__name__} instance." + ) + + def pop(self, *args, **kwargs): + raise Exception( + f"You cannot use ``pop`` on a {self.__class__.__name__} instance." + ) + + def update(self, *args, **kwargs): + raise Exception( + f"You cannot use ``update`` on a {self.__class__.__name__} instance." + ) + + def __getitem__(self, k: Any) -> Any: + if isinstance(k, str): + inner_dict = dict(self.items()) + return inner_dict[k] + else: + return self.to_tuple()[k] + + def __setattr__(self, name: Any, value: Any) -> None: + if name in self.keys() and value is not None: + # Don't call self.__setitem__ to avoid recursion errors + super().__setitem__(name, value) + super().__setattr__(name, value) + + def __setitem__(self, key, value): + # Will raise a KeyException if needed + super().__setitem__(key, value) + # Don't call self.__setattr__ to avoid recursion errors + super().__setattr__(key, value) + + def __reduce__(self): + if not is_dataclass(self): + return super().__reduce__() + callable, _args, *remaining = super().__reduce__() + args = tuple(getattr(self, field.name) for field in fields(self)) + return callable, args, *remaining + + def to_tuple(self) -> Tuple[Any, ...]: + """ + Convert self to a tuple containing all the attributes/keys that are not `None`. + """ + return tuple(self[k] for k in self.keys()) diff --git a/engine/pose_estimation/.gitignore b/engine/pose_estimation/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..6bf4455a8c7c5afcc067f43ac88b163365b2aac7 --- /dev/null +++ b/engine/pose_estimation/.gitignore @@ -0,0 +1,24 @@ +*__pycache__ +demo_out + +.multihmr +._.DS_Store +.DS_Store +tmp_data +*.jpg +._*.jpg +example_data +motionshop_case +motionshop_eval_results +motionshop_testset +.idea + +visual_results + +.ipynb_checkpoints + +dataset +data +logs +.vscode +workspace \ No newline at end of file diff --git a/engine/pose_estimation/__init__.py b/engine/pose_estimation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/engine/pose_estimation/blocks/__init__.py b/engine/pose_estimation/blocks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a34f856ba3e4821bd737f232356e1e4350c51c04 --- /dev/null +++ b/engine/pose_estimation/blocks/__init__.py @@ -0,0 +1,10 @@ + +from .camera_embed import FourierPositionEncoding + +from .dinov2 import Dinov2Backbone + +from .cross_attn_transformer import TransformerDecoder + +from .smpl_layer import SMPL_Layer + +#from .detector import DetectionModel \ No newline at end of file diff --git a/engine/pose_estimation/blocks/camera_embed.py b/engine/pose_estimation/blocks/camera_embed.py new file mode 100644 index 0000000000000000000000000000000000000000..4c78f3268744b4c799494e64f57254a7340745ad --- /dev/null +++ b/engine/pose_estimation/blocks/camera_embed.py @@ -0,0 +1,58 @@ +# Multi-HMR +# Copyright (c) 2024-present NAVER Corp. +# CC BY-NC-SA 4.0 license + +import torch +from torch import nn +import numpy as np + +class FourierPositionEncoding(nn.Module): + def __init__(self, n, num_bands, max_resolution): + """ + Module that generate Fourier encoding - no learning involved + """ + super().__init__() + + self.num_bands = num_bands + self.max_resolution = [max_resolution] * n + + @property + def channels(self): + """ + Return the output dimension + """ + num_dims = len(self.max_resolution) + encoding_size = self.num_bands * num_dims + encoding_size *= 2 # sin-cos + encoding_size += num_dims # concat + + return encoding_size + + def forward(self, pos): + """ + Forward pass that take rays as input and generate Fourier positional encodings + """ + fourier_pos_enc = _generate_fourier_features(pos, num_bands=self.num_bands, max_resolution=self.max_resolution) + return fourier_pos_enc + + +def _generate_fourier_features(pos, num_bands, max_resolution): + """Generate fourier features from a given set of positions and frequencies""" + b, n = pos.shape[:2] + device = pos.device + + # Linear frequency sampling + min_freq = 1.0 + freq_bands = torch.stack([torch.linspace(start=min_freq, end=res / 2, steps=num_bands, device=device) for res in max_resolution], dim=0) + + # Stacking + per_pos_features = torch.stack([pos[i, :, :][:, :, None] * freq_bands[None, :, :] for i in range(b)], 0) + per_pos_features = per_pos_features.reshape(b, n, -1) + + # Sin-Cos + per_pos_features = torch.cat([torch.sin(np.pi * per_pos_features), torch.cos(np.pi * per_pos_features)], dim=-1) + + # Concat with initial pos + per_pos_features = torch.cat([pos, per_pos_features], dim=-1) + + return per_pos_features \ No newline at end of file diff --git a/engine/pose_estimation/blocks/cross_attn_transformer.py b/engine/pose_estimation/blocks/cross_attn_transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..df28bc9f133c3807f3a8bef45f1de89651f3de7c --- /dev/null +++ b/engine/pose_estimation/blocks/cross_attn_transformer.py @@ -0,0 +1,359 @@ +# Multi-HMR +# Copyright (c) 2024-present NAVER Corp. +# CC BY-NC-SA 4.0 license + +from typing import Callable, Optional +import torch +from torch import nn +from inspect import isfunction +from einops import rearrange + +class AdaptiveLayerNorm1D(torch.nn.Module): + """ + Code modified from https://github.com/shubham-goel/4D-Humans/blob/a0def798c7eac811a63c8220fcc22d983b39785e/hmr2/models/components/t_cond_mlp.py#L7 + """ + def __init__(self, data_dim: int, norm_cond_dim: int): + super().__init__() + if data_dim <= 0: + raise ValueError(f"data_dim must be positive, but got {data_dim}") + if norm_cond_dim <= 0: + raise ValueError(f"norm_cond_dim must be positive, but got {norm_cond_dim}") + self.norm = torch.nn.LayerNorm( + data_dim + ) # TODO: Check if elementwise_affine=True is correct + self.linear = torch.nn.Linear(norm_cond_dim, 2 * data_dim) + torch.nn.init.zeros_(self.linear.weight) + torch.nn.init.zeros_(self.linear.bias) + + def forward(self, x: torch.Tensor, t: torch.Tensor) -> torch.Tensor: + # x: (batch, ..., data_dim) + # t: (batch, norm_cond_dim) + # return: (batch, data_dim) + x = self.norm(x) + alpha, beta = self.linear(t).chunk(2, dim=-1) + + # Add singleton dimensions to alpha and beta + if x.dim() > 2: + alpha = alpha.view(alpha.shape[0], *([1] * (x.dim() - 2)), alpha.shape[1]) + beta = beta.view(beta.shape[0], *([1] * (x.dim() - 2)), beta.shape[1]) + + return x * (1 + alpha) + beta + + +def normalization_layer(norm: Optional[str], dim: int, norm_cond_dim: int = -1): + """ + Code modified from https://github.com/shubham-goel/4D-Humans/blob/a0def798c7eac811a63c8220fcc22d983b39785e/hmr2/models/components/t_cond_mlp.py#L48 + """ + if norm == "batch": + return torch.nn.BatchNorm1d(dim) + elif norm == "layer": + return torch.nn.LayerNorm(dim) + elif norm == "ada": + assert norm_cond_dim > 0, f"norm_cond_dim must be positive, got {norm_cond_dim}" + return AdaptiveLayerNorm1D(dim, norm_cond_dim) + elif norm is None: + return torch.nn.Identity() + else: + raise ValueError(f"Unknown norm: {norm}") + + +def exists(val): + "Code modified from https://github.com/shubham-goel/4D-Humans/blob/a0def798c7eac811a63c8220fcc22d983b39785e/hmr2/models/components/pose_transformer.py#L17" + return val is not None + + +def default(val, d): + "Code modified from https://github.com/shubham-goel/4D-Humans/blob/a0def798c7eac811a63c8220fcc22d983b39785e/hmr2/models/components/pose_transformer.py#L21" + if exists(val): + return val + return d() if isfunction(d) else d + + +class PreNorm(nn.Module): + """ + Code modified from https://github.com/shubham-goel/4D-Humans/blob/a0def798c7eac811a63c8220fcc22d983b39785e/hmr2/models/components/pose_transformer.py#L27 + """ + def __init__(self, dim: int, fn: Callable, norm: str = "layer", norm_cond_dim: int = -1): + super().__init__() + self.norm = normalization_layer(norm, dim, norm_cond_dim) + self.fn = fn + + def forward(self, x: torch.Tensor, *args, **kwargs): + if isinstance(self.norm, AdaptiveLayerNorm1D): + return self.fn(self.norm(x, *args), **kwargs) + else: + return self.fn(self.norm(x), **kwargs) + + +class FeedForward(nn.Module): + """ + Code modified from https://github.com/shubham-goel/4D-Humans/blob/a0def798c7eac811a63c8220fcc22d983b39785e/hmr2/models/components/pose_transformer.py#L40 + """ + def __init__(self, dim, hidden_dim, dropout=0.0): + super().__init__() + self.net = nn.Sequential( + nn.Linear(dim, hidden_dim), + nn.GELU(), + nn.Dropout(dropout), + nn.Linear(hidden_dim, dim), + nn.Dropout(dropout), + ) + + def forward(self, x): + return self.net(x) + + +class Attention(nn.Module): + """ + Code modified from https://github.com/shubham-goel/4D-Humans/blob/a0def798c7eac811a63c8220fcc22d983b39785e/hmr2/models/components/pose_transformer.py#L55 + """ + def __init__(self, dim, heads=8, dim_head=64, dropout=0.0): + super().__init__() + inner_dim = dim_head * heads + project_out = not (heads == 1 and dim_head == dim) + + self.heads = heads + self.scale = dim_head**-0.5 + + self.attend = nn.Softmax(dim=-1) + self.dropout = nn.Dropout(dropout) + + self.to_qkv = nn.Linear(dim, inner_dim * 3, bias=False) + + self.to_out = ( + nn.Sequential(nn.Linear(inner_dim, dim), nn.Dropout(dropout)) + if project_out + else nn.Identity() + ) + + def forward(self, x, mask=None): + + qkv = self.to_qkv(x).chunk(3, dim=-1) + # n --> the num query dimension + + # TODO reshape b into b2 n and mask. + q, k, v = map(lambda t: rearrange(t, "b n (h d) -> b h n d", h=self.heads), qkv) + + if mask is not None: + q, k, v = [x * mask[:, None, :, None] for x in [q, k, v]] + + # q, k, v: [13:51:03.400365] torch.Size([22, 1, 256]) + #q, k ,vk after reshape: torch.Size([16, 8, 1, 32]) + dots = torch.matmul(q, k.transpose(-1, -2)) * self.scale + + if mask is not None: + dots = dots - (1 - mask)[:, None, None, :] * 10e10 + + attn = self.attend(dots) + + if mask is not None: # Just for good measure; this is probably overkill + attn = attn * mask[:, None, None, :] + + attn = self.dropout(attn) + + out = torch.matmul(attn, v) + + # out shape :torch.Size([16, 8, 1, 32]) + + out = rearrange(out, "b h n d -> b n (h d)") + return self.to_out(out) + + +class CrossAttention(nn.Module): + "Code modified from https://github.com/shubham-goel/4D-Humans/blob/a0def798c7eac811a63c8220fcc22d983b39785e/hmr2/models/components/pose_transformer.py#L89" + def __init__(self, dim, context_dim=None, heads=8, dim_head=64, dropout=0.0): + super().__init__() + inner_dim = dim_head * heads + project_out = not (heads == 1 and dim_head == dim) + + self.heads = heads + self.scale = dim_head**-0.5 + + self.attend = nn.Softmax(dim=-1) + self.dropout = nn.Dropout(dropout) + + context_dim = default(context_dim, dim) + self.to_kv = nn.Linear(context_dim, inner_dim * 2, bias=False) + self.to_q = nn.Linear(dim, inner_dim, bias=False) + + self.to_out = ( + nn.Sequential(nn.Linear(inner_dim, dim), nn.Dropout(dropout)) + if project_out + else nn.Identity() + ) + + def forward(self, x, context=None, mask=None): + + context = default(context, x) + k, v = self.to_kv(context).chunk(2, dim=-1) + q = self.to_q(x) + q, k, v = map(lambda t: rearrange(t, "b n (h d) -> b h n d", h=self.heads), [q, k, v]) + + if mask is not None: + q = q * mask[:, None, :, None] + dots = torch.matmul(q, k.transpose(-1, -2)) * self.scale + if mask is not None: + dots = dots - (1 - mask).float()[:, None, :, None] * 1e6 + attn = self.attend(dots) + attn = self.dropout(attn) + + out = torch.matmul(attn, v) + + if mask is not None: # Just for good measure; this is probably overkill + out = out * mask[:, None, :, None] + out = rearrange(out, "b h n d -> b n (h d)") + return self.to_out(out) + +class TransformerCrossAttn(nn.Module): + "Code modified from https://github.com/shubham-goel/4D-Humans/blob/a0def798c7eac811a63c8220fcc22d983b39785e/hmr2/models/components/pose_transformer.py#L160" + def __init__( + self, + dim: int, + depth: int, + heads: int, + dim_head: int, + mlp_dim: int, + dropout: float = 0.0, + norm: str = "layer", + norm_cond_dim: int = -1, + context_dim: Optional[int] = None, + ): + super().__init__() + self.layers = nn.ModuleList([]) + for _ in range(depth): + sa = Attention(dim, heads=heads, dim_head=dim_head, dropout=dropout) + ca = CrossAttention( + dim, context_dim=context_dim, heads=heads, dim_head=dim_head, dropout=dropout + ) + ff = FeedForward(dim, mlp_dim, dropout=dropout) + self.layers.append( + nn.ModuleList( + [ + PreNorm(dim, sa, norm=norm, norm_cond_dim=norm_cond_dim), + PreNorm(dim, ca, norm=norm, norm_cond_dim=norm_cond_dim), + PreNorm(dim, ff, norm=norm, norm_cond_dim=norm_cond_dim), + ] + ) + ) + + def forward(self, x: torch.Tensor, *args, context=None, context_list=None, mask=None): + + if context_list is None: + context_list = [context] * len(self.layers) + + if len(context_list) != len(self.layers): + raise ValueError(f"len(context_list) != len(self.layers) ({len(context_list)} != {len(self.layers)})") + + for i, (self_attn, cross_attn, ff) in enumerate(self.layers): + if mask is not None: + try: + x = x * mask[:, :, None] + except: + print("see ") + import pdb; pdb.set_trace() + x = self_attn(x, mask=mask, *args) + x + x = cross_attn(x, mask=mask, *args, context=context_list[i]) + x + x = ff(x, *args) + x + + if mask is not None: + x = x * mask[:, :, None] + + return x + +class DropTokenDropout(nn.Module): + "Code modified from https://github.com/shubham-goel/4D-Humans/blob/a0def798c7eac811a63c8220fcc22d983b39785e/hmr2/models/components/pose_transformer.py#L204" + def __init__(self, p: float = 0.1): + super().__init__() + if p < 0 or p > 1: + raise ValueError( + "dropout probability has to be between 0 and 1, " "but got {}".format(p) + ) + self.p = p + + def forward(self, x: torch.Tensor): + # x: (batch_size, seq_len, dim) + if self.training and self.p > 0: + zero_mask = torch.full_like(x[0, :, 0], self.p).bernoulli().bool() + # TODO: permutation idx for each batch using torch.argsort + if zero_mask.any(): + x = x[:, ~zero_mask, :] + return x + + +class ZeroTokenDropout(nn.Module): + "Code modified from https://github.com/shubham-goel/4D-Humans/blob/a0def798c7eac811a63c8220fcc22d983b39785e/hmr2/models/components/pose_transformer.py#L223" + def __init__(self, p: float = 0.1): + super().__init__() + if p < 0 or p > 1: + raise ValueError( + "dropout probability has to be between 0 and 1, " "but got {}".format(p) + ) + self.p = p + + def forward(self, x: torch.Tensor): + # x: (batch_size, seq_len, dim) + if self.training and self.p > 0: + zero_mask = torch.full_like(x[:, :, 0], self.p).bernoulli().bool() + # Zero-out the masked tokens + x[zero_mask, :] = 0 + return x + + +class TransformerDecoder(nn.Module): + "Code modified from https://github.com/shubham-goel/4D-Humans/blob/a0def798c7eac811a63c8220fcc22d983b39785e/hmr2/models/components/pose_transformer.py#L301" + def __init__( + self, + num_tokens: int, + token_dim: int, + dim: int, + depth: int, + heads: int, + mlp_dim: int, + dim_head: int = 64, + dropout: float = 0.0, + emb_dropout: float = 0.0, + emb_dropout_type: str = 'drop', + norm: str = "layer", + norm_cond_dim: int = -1, + context_dim: Optional[int] = None, + skip_token_embedding: bool = False, + ): + super().__init__() + if not skip_token_embedding: + self.to_token_embedding = nn.Linear(token_dim, dim) + else: + self.to_token_embedding = nn.Identity() + if token_dim != dim: + raise ValueError( + f"token_dim ({token_dim}) != dim ({dim}) when skip_token_embedding is True" + ) + + self.pos_embedding = nn.Parameter(torch.randn(1, num_tokens, dim)) + if emb_dropout_type == "drop": + self.dropout = DropTokenDropout(emb_dropout) + elif emb_dropout_type == "zero": + self.dropout = ZeroTokenDropout(emb_dropout) + elif emb_dropout_type == "normal": + self.dropout = nn.Dropout(emb_dropout) + + self.transformer = TransformerCrossAttn( + dim, + depth, + heads, + dim_head, + mlp_dim, + dropout, + norm=norm, + norm_cond_dim=norm_cond_dim, + context_dim=context_dim, + ) + + def forward(self, inp: torch.Tensor, *args, context=None, context_list=None, mask=None): + x = self.to_token_embedding(inp) + b, n, _ = x.shape + + x = self.dropout(x) + #x += self.pos_embedding[:, :n] + x += self.pos_embedding[:, 0][:, None, :] # For now, we don't wish to embed a position. We might in future versions though. + x = self.transformer(x, *args, context=context, context_list=context_list, mask=mask) + return x diff --git a/engine/pose_estimation/blocks/detector.py b/engine/pose_estimation/blocks/detector.py new file mode 100644 index 0000000000000000000000000000000000000000..45faed9a3415db29059ec08ed01c4282221e2475 --- /dev/null +++ b/engine/pose_estimation/blocks/detector.py @@ -0,0 +1,177 @@ +from __future__ import annotations + +import os +import os.path as osp +from collections import defaultdict +import time +from mmpose.apis.inference import batch_inference_pose_model + +import numpy as np +import torch +import torch.nn as nn +import scipy.signal as signal + +from ultralytics import YOLO +from mmpose.apis import ( + init_pose_model, + get_track_id, + vis_pose_result, +) + +ROOT_DIR = osp.abspath(f"{__file__}/../../") +VIT_DIR = osp.join(ROOT_DIR, "third-party/ViTPose") + +VIS_THRESH = 0.5 +BBOX_CONF = 0.5 +TRACKING_THR = 0.1 +MINIMUM_FRMAES = 15 +MINIMUM_JOINTS = 6 + +class DetectionModel(object): + def __init__(self, pose_model_ckpt, device, with_tracker=True): + + # ViTPose + pose_model_cfg = osp.join(VIT_DIR, 'configs/wholebody/2d_kpt_sview_rgb_img/topdown_heatmap/coco-wholebody/ViTPose_huge_wholebody_256x192.py') + #'vitpose-h-multi-coco.pth') + self.pose_model = init_pose_model(pose_model_cfg, pose_model_ckpt, device=device) + + # YOLO + bbox_model_ckpt = osp.join(ROOT_DIR, 'checkpoints', 'yolov8x.pt') + if with_tracker: + self.bbox_model = YOLO(bbox_model_ckpt) + else: + self.bbox_model = None + + self.device = device + self.initialize_tracking() + + def initialize_tracking(self, ): + self.next_id = 0 + self.frame_id = 0 + self.pose_results_last = [] + self.tracking_results = { + 'id': [], + 'frame_id': [], + 'bbox': [], + } + + def xyxy_to_cxcys(self, bbox, s_factor=1.05): + cx, cy = bbox[[0, 2]].mean(), bbox[[1, 3]].mean() + scale = max(bbox[2] - bbox[0], bbox[3] - bbox[1]) / 200 * s_factor + return np.array([[cx, cy, scale]]) + + def compute_bboxes_from_keypoints(self, s_factor=1.2): + X = self.tracking_results['keypoints'].copy() + mask = X[..., -1] > VIS_THRESH + + bbox = np.zeros((len(X), 3)) + for i, (kp, m) in enumerate(zip(X, mask)): + bb = [kp[m, 0].min(), kp[m, 1].min(), + kp[m, 0].max(), kp[m, 1].max()] + cx, cy = [(bb[2]+bb[0])/2, (bb[3]+bb[1])/2] + bb_w = bb[2] - bb[0] + bb_h = bb[3] - bb[1] + s = np.stack((bb_w, bb_h)).max() + bb = np.array((cx, cy, s)) + bbox[i] = bb + + bbox[:, 2] = bbox[:, 2] * s_factor / 200.0 + self.tracking_results['bbox'] = bbox + + def compute_bbox(self, img): + bboxes = self.bbox_model.predict( + img, device=self.device, classes=0, conf=BBOX_CONF, save=False, verbose=False + )[0].boxes.xyxy.detach().cpu().numpy() + + bboxes = [{'bbox': bbox} for bbox in bboxes] + imgs = [img for _ in range(len(bboxes))] + return bboxes, imgs + + def batch_detection(self, bboxes, imgs, batch_size=32): + all_poses = [] + all_bboxes = [] + for i in range(0, len(bboxes), batch_size): + poses, bbox_xyxy = batch_inference_pose_model( + self.pose_model, + imgs[i:i+batch_size], + bboxes[i:i+batch_size], + return_heatmap=False) + all_poses.append(poses) + all_bboxes.append(bbox_xyxy) + all_poses = np.concatenate(all_poses) + all_bboxes = np.concatenate(all_bboxes) + return all_poses, all_bboxes + + def track(self, img, fps, length): + # bbox detection + bboxes = self.bbox_model.predict( + img, device=self.device, classes=0, conf=BBOX_CONF, save=False, verbose=False + )[0].boxes.xyxy.detach().cpu().numpy() + + pose_results = [{'bbox': bbox} for bbox in bboxes] + + + pose_results, self.next_id = get_track_id( + pose_results, + self.pose_results_last, + self.next_id, + use_oks=False, + tracking_thr=TRACKING_THR, + use_one_euro=True, + fps=fps) + + for pose_result in pose_results: + + _id = pose_result['track_id'] + xyxy = pose_result['bbox'] + bbox = xyxy# self.xyxy_to_cxcys(xyxy) + + self.tracking_results['id'].append(_id) + self.tracking_results['frame_id'].append(self.frame_id) + self.tracking_results['bbox'].append(bbox) + + self.frame_id += 1 + self.pose_results_last = pose_results + + def process(self, fps): + + for key in ['id', 'frame_id', 'bbox']: + self.tracking_results[key] = np.array(self.tracking_results[key]) + #self.compute_bboxes_from_keypoints() + + output = defaultdict(lambda: defaultdict(list)) + ids = np.unique(self.tracking_results['id']) + + for _id in ids: + idxs = np.where(self.tracking_results['id'] == _id)[0] + + for key, val in self.tracking_results.items(): + if key == 'id': continue + output[_id][key] = val[idxs] + + # Smooth bounding box detection + ids = list(output.keys()) + for _id in ids: + if len(output[_id]['bbox']) < MINIMUM_FRMAES: + del output[_id] + continue + + kernel = int(int(fps/2) / 2) * 2 + 1 + smoothed_bbox = np.array([signal.medfilt(param, kernel) for param in output[_id]['bbox'].T]).T + output[_id]['bbox'] = smoothed_bbox + + return output + + def visualize(self, img, pose_results): + vis_img = vis_pose_result( + self.pose_model, + img, + pose_results, + dataset=self.pose_model.cfg.data['test']['type'], + dataset_info = None, #self.pose_model.cfg.data['test'].get('dataset_info', None), + kpt_score_thr=0.3, + radius=4, + thickness=1, + show=False + ) + return vis_img \ No newline at end of file diff --git a/engine/pose_estimation/blocks/dinov2.py b/engine/pose_estimation/blocks/dinov2.py new file mode 100644 index 0000000000000000000000000000000000000000..302ba52b287b59b7ecef55166ebe42d595050c78 --- /dev/null +++ b/engine/pose_estimation/blocks/dinov2.py @@ -0,0 +1,28 @@ +# Multi-HMR +# Copyright (c) 2024-present NAVER Corp. +# CC BY-NC-SA 4.0 license +import os +current_dir_path = os.path.dirname(__file__) +import torch +from torch import nn + +class Dinov2Backbone(nn.Module): + def __init__(self, name='dinov2_vitb14', pretrained=False, *args, **kwargs): + super().__init__() + self.name = name + self.encoder = torch.hub.load(current_dir_path+'/../dinov2', self.name, pretrained=pretrained, source='local') + self.patch_size = self.encoder.patch_size + self.embed_dim = self.encoder.embed_dim + + def forward(self, x): + """ + Encode a RGB image using a ViT-backbone + Args: + - x: torch.Tensor of shape [bs,3,w,h] + Return: + - y: torch.Tensor of shape [bs,k,d] - image in patchified mode + """ + assert len(x.shape) == 4 + y = self.encoder.get_intermediate_layers(x)[0] # ViT-L+896x896: [bs,4096,1024] - [bs,nb_patches,emb] + return y + diff --git a/engine/pose_estimation/blocks/smpl_layer.py b/engine/pose_estimation/blocks/smpl_layer.py new file mode 100644 index 0000000000000000000000000000000000000000..f4fd8857e8ae9c26de9cb1d26d486fa7ed58ecf1 --- /dev/null +++ b/engine/pose_estimation/blocks/smpl_layer.py @@ -0,0 +1,231 @@ +# Multi-HMR +# Copyright (c) 2024-present NAVER Corp. +# CC BY-NC-SA 4.0 license + +import torch +from torch import nn +from torch import nn +import smplx +import torch +import numpy as np +import pose_utils +from pose_utils import inverse_perspective_projection, perspective_projection +import roma +import pickle +import os +from pose_utils.constants_service import SMPLX_DIR +from pose_utils.rot6d import rotation_6d_to_matrix +from smplx.lbs import vertices2joints + + +class SMPL_Layer(nn.Module): + """ + Extension of the SMPL Layer with information about the camera for (inverse) projection the camera plane. + """ + + def __init__( + self, + smpl_dir, + type="smplx", + gender="neutral", + num_betas=10, + kid=False, + person_center=None, + *args, + **kwargs, + ): + super().__init__() + + # Args + assert type == "smplx" + self.type = type + self.kid = kid + self.num_betas = num_betas + self.bm_x = smplx.create( + smpl_dir, "smplx", gender=gender, use_pca=False, flat_hand_mean=True, num_betas=num_betas + ) + + # Primary keypoint - root + self.joint_names = eval(f"pose_utils.get_{self.type}_joint_names")() + self.person_center = person_center + self.person_center_idx = None + if self.person_center is not None: + self.person_center_idx = self.joint_names.index(self.person_center) + + def forward( + self, + pose, + shape, + loc, + dist, + transl, + K, + expression=None, # facial expression + rot6d=False, + j_regressor=None, + ): + """ + Args: + - pose: pose of the person in axis-angle - torch.Tensor [bs,24,3] + - shape: torch.Tensor [bs,10] + - loc: 2D location of the pelvis in pixel space - torch.Tensor [bs,2] + - dist: distance of the pelvis from the camera in m - torch.Tensor [bs,1] + Return: + - dict containing a bunch of useful information about each person + """ + + if loc is not None and dist is not None: + assert pose.shape[0] == shape.shape[0] == loc.shape[0] == dist.shape[0] + POSE_TYPE_LENGTH = 6 if rot6d else 3 + if self.type == "smpl": + assert len(pose.shape) == 3 and list(pose.shape[1:]) == [24, POSE_TYPE_LENGTH] + elif self.type == "smplx": + assert len(pose.shape) == 3 and list(pose.shape[1:]) == [ + 53, + POSE_TYPE_LENGTH, + ] # taking root_orient, body_pose, lhand, rhan and jaw for the moment + else: + raise NameError + assert len(shape.shape) == 2 and ( + list(shape.shape[1:]) == [self.num_betas] or list(shape.shape[1:]) == [self.num_betas + 1] + ) + if loc is not None and dist is not None: + assert len(loc.shape) == 2 and list(loc.shape[1:]) == [2] + assert len(dist.shape) == 2 and list(dist.shape[1:]) == [1] + + bs = pose.shape[0] + + out = {} + + # No humans + if bs == 0: + return {} + + # Low dimensional parameters + kwargs_pose = { + "betas": shape, + } + kwargs_pose["global_orient"] = self.bm_x.global_orient.repeat(bs, 1) + kwargs_pose["body_pose"] = pose[:, 1:22].flatten(1) + kwargs_pose["left_hand_pose"] = pose[:, 22:37].flatten(1) + kwargs_pose["right_hand_pose"] = pose[:, 37:52].flatten(1) + kwargs_pose["jaw_pose"] = pose[:, 52:53].flatten(1) + + if expression is not None: + kwargs_pose["expression"] = expression.flatten(1) # [bs,10] + else: + kwargs_pose["expression"] = self.bm_x.expression.repeat(bs, 1) + + # default - to be generalized + kwargs_pose["leye_pose"] = self.bm_x.leye_pose.repeat(bs, 1) + kwargs_pose["reye_pose"] = self.bm_x.reye_pose.repeat(bs, 1) + # kwargs_pose['pose2rot'] = not rot6d + # Forward using the parametric 3d model SMPL-X layer + output = self.bm_x(pose2rot=not rot6d, **kwargs_pose) + verts = output.vertices + j3d = output.joints # 45 joints + + if rot6d: + R = rotation_6d_to_matrix(pose[:, 0]) + else: + R = roma.rotvec_to_rotmat(pose[:, 0]) + + # Apply global orientation on 3D points + pelvis = j3d[:, [0]] + j3d = (R.unsqueeze(1) @ (j3d - pelvis).unsqueeze(-1)).squeeze(-1) + + # Apply global orientation on 3D points - bis + verts = (R.unsqueeze(1) @ (verts - pelvis).unsqueeze(-1)).squeeze(-1) + + # Location of the person in 3D + if transl is None: + if K.dtype == torch.float16: + # because of torch.inverse - not working with float16 at the moment + transl = inverse_perspective_projection( + loc.unsqueeze(1).float(), K.float(), dist.unsqueeze(1).float() + )[:, 0] + transl = transl.half() + else: + transl = inverse_perspective_projection(loc.unsqueeze(1), K, dist.unsqueeze(1))[:, 0] + + # Updating transl if we choose a certain person center + transl_up = transl.clone() + + # Definition of the translation depend on the args: 1) vanilla SMPL - 2) computed from a given joint + if self.person_center_idx is None: + # Add pelvis to transl - standard way for SMPLX layer + transl_up = transl_up + pelvis[:, 0] + else: + # Center around the joint because teh translation is computed from this joint + person_center = j3d[:, [self.person_center_idx]] + verts = verts - person_center + j3d = j3d - person_center + + # Moving into the camera coordinate system + j3d_cam = j3d + transl_up.unsqueeze(1) + verts_cam = verts + transl_up.unsqueeze(1) + + # Projection in camera plane + if j_regressor is not None: + # for smplify + j3d_cam = vertices2joints(j_regressor, verts_cam) + j2d = perspective_projection(j3d_cam, K) + v2d = perspective_projection(verts_cam, K) + + out.update( + { + "v3d": verts_cam, # in 3d camera space + "j3d": j3d_cam, # in 3d camera space + "j2d": j2d, + "v2d": v2d, + "transl": transl, # translation of the primary keypoint + "transl_pelvis": transl.unsqueeze(1) - person_center - pelvis, # root=pelvis + "j3d_world": output.joints, + } + ) + + return out + + def forward_local(self, pose, shape): + N, J, L = pose.shape + if N < 1: + return None + kwargs_pose = { + "betas": shape, + } + if J == 53: + kwargs_pose["global_orient"] = self.bm_x.global_orient.repeat(N, 1) + kwargs_pose["body_pose"] = pose[:, 1:22].flatten(1) + kwargs_pose["left_hand_pose"] = pose[:, 22:37].flatten(1) + kwargs_pose["right_hand_pose"] = pose[:, 37:52].flatten(1) + kwargs_pose["jaw_pose"] = pose[:, 52:53].flatten(1) + elif J==55: + kwargs_pose["global_orient"] = self.bm_x.global_orient.repeat(N, 1) + kwargs_pose["body_pose"] = pose[:, 1:22].flatten(1) + kwargs_pose["left_hand_pose"] = pose[:, 25:40].flatten(1) + kwargs_pose["right_hand_pose"] = pose[:, 40:55].flatten(1) + kwargs_pose["jaw_pose"] = pose[:, 22:23].flatten(1) + else: + raise ValueError(f"pose dim error, should be 53 or 55, but got {J}") + kwargs_pose["expression"] = self.bm_x.expression.repeat(N, 1) + + # default - to be generalized + kwargs_pose["leye_pose"] = self.bm_x.leye_pose.repeat(N, 1) + kwargs_pose["reye_pose"] = self.bm_x.reye_pose.repeat(N, 1) + + output = self.bm_x(**kwargs_pose) + return output + def convert_standard_pose(self, poses): + # pose: N, J, 3 + n = poses.shape[0] + poses = torch.cat( + [ + poses[:, :22], + poses[:, 52:53], + self.bm_x.leye_pose.repeat(n, 1, 1), + self.bm_x.reye_pose.repeat(n, 1, 1), + poses[:, 22:52], + ], + dim=1, + ) + return poses diff --git a/engine/pose_estimation/dinov2/.github/workflows/lint.yaml b/engine/pose_estimation/dinov2/.github/workflows/lint.yaml new file mode 100644 index 0000000000000000000000000000000000000000..77176256982688097db1c52073c329a23b352ebf --- /dev/null +++ b/engine/pose_estimation/dinov2/.github/workflows/lint.yaml @@ -0,0 +1,38 @@ +name: Lint + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + run-linters: + name: Run linters + runs-on: ubuntu-20.04 + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.9 + cache: 'pip' + cache-dependency-path: '**/requirements*.txt' + - name: Install Python (development) dependencies + run: | + pip install -r requirements-dev.txt + - name: Run flake8 + run: | + flake8 + - name: Run black + if: always() + run: | + black --check dinov2 + - name: Run pylint + if: always() + run: | + pylint --exit-zero dinov2 diff --git a/engine/pose_estimation/dinov2/.gitignore b/engine/pose_estimation/dinov2/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..f6893ca30f324f6ed3e18ae9c726af8377c57c69 --- /dev/null +++ b/engine/pose_estimation/dinov2/.gitignore @@ -0,0 +1,11 @@ +build/ +dist/ +*.egg-info/ +**/__pycache__/ + +**/.ipynb_checkpoints +**/.ipynb_checkpoints/** + +*.swp + +.vscode/ diff --git a/engine/pose_estimation/dinov2/CODE_OF_CONDUCT.md b/engine/pose_estimation/dinov2/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000000000000000000000000000000000..3232ed665566ec047ce55a929db1581dbda266a1 --- /dev/null +++ b/engine/pose_estimation/dinov2/CODE_OF_CONDUCT.md @@ -0,0 +1,80 @@ +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to make participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic +address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a +professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies within all project spaces, and it also applies when +an individual is representing the project or its community in public spaces. +Examples of representing a project or community include using an official +project e-mail address, posting via an official social media account, or acting +as an appointed representative at an online or offline event. Representation of +a project may be further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when there is a +reasonable belief that an individual's behavior may have a negative impact on +the project or its community. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at . All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq diff --git a/engine/pose_estimation/dinov2/CONTRIBUTING.md b/engine/pose_estimation/dinov2/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..afc89823fc90b920f0758f50e4d808df6a884a34 --- /dev/null +++ b/engine/pose_estimation/dinov2/CONTRIBUTING.md @@ -0,0 +1,31 @@ +# Contributing to DINOv2 +We want to make contributing to this project as easy and transparent as +possible. + +## Pull Requests +We actively welcome your pull requests. + +1. Fork the repo and create your branch from `main`. +2. If you've added code that should be tested, add tests. +3. If you've changed APIs, update the documentation. +4. Ensure the test suite passes. +5. Make sure your code lints. +6. If you haven't already, complete the Contributor License Agreement ("CLA"). + +## Contributor License Agreement ("CLA") +In order to accept your pull request, we need you to submit a CLA. You only need +to do this once to work on any of Meta's open source projects. + +Complete your CLA here: + +## Issues +We use GitHub issues to track public bugs. Please ensure your description is +clear and has sufficient instructions to be able to reproduce the issue. + +Meta has a [bounty program](https://www.facebook.com/whitehat/) for the safe +disclosure of security bugs. In those cases, please go through the process +outlined on that page and do not file a public issue. + +## License +By contributing to DINOv2, you agree that your contributions will be licensed +under the LICENSE file in the root directory of this source tree. diff --git a/engine/pose_estimation/dinov2/LICENSE b/engine/pose_estimation/dinov2/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..5471dc10377b76db85a2feca7a99a7eef4980ba8 --- /dev/null +++ b/engine/pose_estimation/dinov2/LICENSE @@ -0,0 +1,203 @@ + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/engine/pose_estimation/dinov2/MODEL_CARD.md b/engine/pose_estimation/dinov2/MODEL_CARD.md new file mode 100644 index 0000000000000000000000000000000000000000..21b9bf295c8cab14e782e1a7a1d051be9e501088 --- /dev/null +++ b/engine/pose_estimation/dinov2/MODEL_CARD.md @@ -0,0 +1,272 @@ +# Model Card for DINOv2-S/B/L/g + +These are Vision Transformer models trained following the method described in the papers: +"DINOv2: Learning Robust Visual Features without Supervision" +and +"Vision Transformers Need Registers". + +We provide 8 models: +- 1 ViT-g trained from scratch with 3 ViT-S/B/L models distilled from the ViT-g, without registers. +- 1 ViT-g trained from scratch with 3 ViT-S/B/L models distilled from the ViT-g, with registers. + +## Model Details +The model takes an image as input and returns a class token and patch tokens, and optionally 4 register tokens. + +The embedding dimension is: +- 384 for ViT-S. +- 768 for ViT-B. +- 1024 for ViT-L. +- 1536 for ViT-g. + +The models follow a Transformer architecture, with a patch size of 14. In the case of registers, we add 4 register tokens, learned during training, to the input sequence after the patch embedding. + +For a 224x224 image, this results in 1 class token + 256 patch tokens, and optionally 4 register tokens. + +The models can accept larger images provided the image shapes are multiples of the patch size (14). +If this condition is not verified, the model will crop to the closest smaller multiple of the patch size. + +### Model Description + +- **Developed by:** Meta AI +- **Model type:** Vision Transformer +- **License:** Apache License 2.0 + +- **Repository:** https://github.com/facebookresearch/dinov2 +- **Paper:** https://arxiv.org/abs/2304.07193 +- **Demo:** https://dinov2.metademolab.com/ + +## Uses + +The models are vision backbones providing multi-purpose features for downstream tasks. + +### Direct Use + +The models can be used without fine-tuning, with downstream classifiers as simple as linear layers, to obtain competitive results: +- on depth estimation, semantic segmentation, using linear layers. +- on image classification, using k-NN classifiers on the class token. +- on image classification, with logistic regression classifiers applied on the class token. +- on image classification, with a linear layer applied on the class token and the average of the patch tokens. +- on image retrieval using nearest neighbors. + +### Downstream Use + +It is technically possible to perform fine-tuning on the models, for small gains (we measured +2% on ImageNet-1k classification). +We recommend keeping this as a very last step and only when necessary, as the features already provide good performance out-of-the-box. + +## Bias, Risks, and Limitations + +Despite improvements thanks to the training method not using annotations, we still observe significant biases in our models toward rich households from Western countries. + +### Recommendations + +We expect fine-tuning will increase the biases in the features produced by the model as they will be tuned to the fine-tuning labels. + +## How to Get Started with the Model + +Use the code below to get started with the model. + +```python +import torch + +# DINOv2 +dinov2_vits14 = torch.hub.load('facebookresearch/dinov2', 'dinov2_vits14') +dinov2_vitb14 = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitb14') +dinov2_vitl14 = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitl14') +dinov2_vitg14 = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitg14') + +# DINOv2 with registers +dinov2_vits14_reg = torch.hub.load('facebookresearch/dinov2', 'dinov2_vits14_reg') +dinov2_vitb14_reg = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitb14_reg') +dinov2_vitl14_reg = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitl14_reg') +dinov2_vitg14_reg = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitg14_reg') +``` + +## Training Details + +### Training Data + +- **Training data:** LVD-142M (see paper) +- **Training regime:** fp16 using PyTorch-FSDP mixed-precision. + +### Training Procedure + +- **Training objective:** + - DINO self-distillation loss with multi-crop + - iBOT masked-image modeling loss + - KoLeo regularization on [CLS] tokens +- **Architectures:** + - ViT-S (21M params): Patch size 14, embedding dimension 384, 6 heads, MLP FFN + - ViT-B (86M params): Patch size 14, embedding dimension 768, 12 heads, MLP FFN + - ViT-L (0.3B params): Patch size 14, embedding dimension 1024, 16 heads, MLP FFN + - ViT-g (1.1B params): Patch size 14, embedding dimension 1536, 24 heads, SwiGLU FFN +- **Distillation:** + - Distillation follows the standard DINOv2 pretraining procedure, except the teacher is a pretrained ViT-g, frozen. + +## Evaluation + +We refer users to the associated papers for the evaluation protocols. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ImageNet-1kNYU-Depth v2SUN-RGBDADE20kiNaturalist 2018Oxford-H
modelwith
registers
classif. (acc)classif. (acc)classif. V2 (acc)depth (RMSE)depth (RMSE)segm. (mAP)classif. (acc)retrieval (mAP)
k-NNlinearlinearlinear
4 layers
NYU-D transfermultiscalelinearnearest neighbor
ViT-S/14:x:79.0%81.1%70.8%0.4170.43147.269.5%43.2
ViT-S/14:white_check_mark:79.1%80.9%71.0%N/AN/AN/A67.6%39.5
ViT-B/14:x:82.1%84.5%74.9%0.3620.40051.376.3%49.5
ViT-B/14:white_check_mark:82.0%84.6%75.6%N/AN/AN/A73.8%51.0
ViT-L/14:x:83.5%86.3%77.6%0.3330.39653.179.8%54.0
ViT-L/14:white_check_mark:83.8%86.7%78.5%N/AN/AN/A80.9%55.7
ViT-g/14:x:83.5%86.5%78.4%0.2980.36253.081.6%52.3
ViT-g/14:white_check_mark:83.7%87.1%78.8%N/AN/AN/A81.5%58.2
+ +## Environmental Impact + +- **Hardware Type:** Nvidia A100 +- **Hours used:** 22,000 for ViT-g, 4,500 for ViT-S distillation, 5,300 for ViT-B distillation, 8,000 for ViT-L distillation +- **Cloud Provider:** Private infra +- **Compute Region:** USA +- **Carbon Emitted:** 7t CO2eq + +#### Hardware + +Nvidia A100 GPUs + +#### Software + +PyTorch 2.0, +xFormers 0.0.18 + +**BibTeX** + +``` +@misc{oquab2023dinov2, + title={DINOv2: Learning Robust Visual Features without Supervision}, + author={Oquab, Maxime and Darcet, Timothée and Moutakanni, Theo and Vo, Huy and Szafraniec, Marc and Khalidov, Vasil and Fernandez, Pierre and Haziza, Daniel and Massa, Francisco and El-Nouby, Alaaeldin and Howes, Russell and Huang, Po-Yao and Xu, Hu and Sharma, Vasu and Li, Shang-Wen and Galuba, Wojciech and Rabbat, Mike and Assran, Mido and Ballas, Nicolas and Synnaeve, Gabriel and Misra, Ishan and Jegou, Herve and Mairal, Julien and Labatut, Patrick and Joulin, Armand and Bojanowski, Piotr}, + journal={arXiv:2304.07193}, + year={2023} +} +@misc{darcet2023vitneedreg, + title={Vision Transformers Need Registers}, + author={Darcet, Timothée and Oquab, Maxime and Mairal, Julien and Bojanowski, Piotr}, + journal={arXiv:2309.16588}, + year={2023} +} +``` diff --git a/engine/pose_estimation/dinov2/README.md b/engine/pose_estimation/dinov2/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c188088b08f10f25a5b84704dfb60a8f8af399e4 --- /dev/null +++ b/engine/pose_estimation/dinov2/README.md @@ -0,0 +1,620 @@ +:new: [2023-10-26] *Added DINOv2 backbones with registers, following [Vision Transformers Need Registers](https://arxiv.org/abs/2309.16588).* + +# DINOv2: Learning Robust Visual Features without Supervision + +**[Meta AI Research, FAIR](https://ai.facebook.com/research/)** + +Maxime Oquab, +Timothée Darcet, +Théo Moutakanni, +Huy V. Vo, +Marc Szafraniec, +Vasil Khalidov, +Patrick Labatut, +Armand Joulin, +Piotr Bojanowski + +[[`Paper #1`](https://arxiv.org/abs/2304.07193)] [`Paper #2`](https://arxiv.org/abs/2309.16588)] [[`Blog`](https://ai.facebook.com/blog/dino-v2-computer-vision-self-supervised-learning/)] [[`Demo`](https://dinov2.metademolab.com)] [[`BibTeX`](#citing-dinov2)] + +PyTorch implementation and pretrained models for DINOv2. For details, see the papers: **[DINOv2: Learning Robust Visual Features without Supervision](https://arxiv.org/abs/2304.07193)** and **[Vision Transformers Need Registers](https://arxiv.org/abs/2309.16588)**. + +DINOv2 models produce high-performance visual features that can be directly employed with classifiers as simple as linear layers on a variety of computer vision tasks; these visual features are robust and perform well across domains without any requirement for fine-tuning. The models were pretrained on a dataset of 142 M images without using any labels or annotations. + +https://github.com/facebookresearch/dinov2/assets/60359573/f168823e-7922-415a-b429-578badf5c356 + +
+ Visualization of the three first principal components of the patch features of all frames, mapped to RGB values. +
+ +## Pretrained models + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
model# of
params
with
registers
ImageNet
k-NN
ImageNet
linear
download
ViT-S/14 distilled21 M:x:79.0%81.1%backbone only
ViT-S/14 distilled21 M:white_check_mark:79.1%80.9%backbone only
ViT-B/14 distilled86 M:x:82.1%84.5%backbone only
ViT-B/14 distilled86 M:white_check_mark:82.0%84.6%backbone only
ViT-L/14 distilled300 M:x:83.5%86.3%backbone only
ViT-L/14 distilled300 M:white_check_mark:83.8%86.7%backbone only
ViT-g/141,100 M:x:83.5%86.5%backbone only
ViT-g/141,100 M:white_check_mark:83.7%87.1%backbone only
+ +### Pretrained backbones (via PyTorch Hub) + +Please follow the instructions [here](https://pytorch.org/get-started/locally/) to install PyTorch (the only required dependency for loading the model). Installing PyTorch with CUDA support is strongly recommended. + +A corresponding [model card](MODEL_CARD.md) is included in the repository. + +```python +import torch + +# DINOv2 +dinov2_vits14 = torch.hub.load('facebookresearch/dinov2', 'dinov2_vits14') +dinov2_vitb14 = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitb14') +dinov2_vitl14 = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitl14') +dinov2_vitg14 = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitg14') + +# DINOv2 with registers +dinov2_vits14_reg = torch.hub.load('facebookresearch/dinov2', 'dinov2_vits14_reg') +dinov2_vitb14_reg = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitb14_reg') +dinov2_vitl14_reg = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitl14_reg') +dinov2_vitg14_reg = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitg14_reg') +``` + +### Pretrained heads - Image classification + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
backbonewith
registers
download
ImageNet
ViT-S/14 distilled:x: + linear head (1 layer, + 4 layers) +
ViT-S/14 distilled:white_check_mark: + linear head (1 layer, + 4 layers) +
ViT-B/14 distilled:x: + linear head (1 layer, + 4 layers) +
ViT-B/14 distilled:white_check_mark: + linear head (1 layer, + 4 layers) +
ViT-L/14 distilled:x: + linear head (1 layer, + 4 layers) +
ViT-L/14 distilled:white_check_mark: + linear head (1 layer, + 4 layers) +
ViT-g/14:x: + linear head (1 layer, + 4 layers) +
ViT-g/14:white_check_mark: + linear head (1 layer, + 4 layers) +
+ +The (full) classifier models can be loaded via PyTorch Hub: + +```python +import torch + +# DINOv2 +dinov2_vits14_lc = torch.hub.load('facebookresearch/dinov2', 'dinov2_vits14_lc') +dinov2_vitb14_lc = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitb14_lc') +dinov2_vitl14_lc = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitl14_lc') +dinov2_vitg14_lc = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitg14_lc') + +# DINOv2 with registers +dinov2_vits14_reg_lc = torch.hub.load('facebookresearch/dinov2', 'dinov2_vits14_reg_lc') +dinov2_vitb14_reg_lc = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitb14_reg_lc') +dinov2_vitl14_reg_lc = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitl14_reg_lc') +dinov2_vitg14_reg_lc = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitg14_reg_lc') +``` + +### Pretrained heads - Depth estimation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
backbonedownload head
NYUdKITTI
ViT-S/14 distilled + linear (1 layer, + 4 layers), + DPT + + linear (1 layer, + 4 layers), + DPT +
ViT-B/14 distilled + linear (1 layer, + 4 layers), + DPT + + linear (1 layer, + 4 layers), + DPT +
ViT-L/14 distilled + linear (1 layer, + 4 layers), + DPT + + linear (1 layer, + 4 layers), + DPT +
ViT-g/14 + linear (1 layer, + 4 layers), + DPT + + linear (1 layer, + 4 layers), + DPT +
+ +### Pretrained heads - Semantic segmentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
backbonedownload modeldownload head
ADE20KADE20KVOC2012
ViT-S/14 distilled + linear, + multi-scale + + linear, + multi-scale +
ViT-B/14 distilled + linear, + multi-scale + + linear, + multi-scale +
ViT-L/14 distilled + linear, + multi-scale + + linear, + multi-scale +
ViT-g/14 + Mask2Former + + linear, + multi-scale + + linear, + multi-scale +
+ +## Installation + +The training and evaluation code requires PyTorch 2.0 and [xFormers](https://github.com/facebookresearch/xformers) 0.0.18 as well as a number of other 3rd party packages. Note that the code has only been tested with the specified versions and also expects a Linux environment. To setup all the required dependencies for training and evaluation, please follow the instructions below: + +*[conda](https://docs.conda.io/projects/conda/en/latest/user-guide/getting-started.html)* **(Recommended)** - Clone the repository and then create and activate a `dinov2` conda environment using the provided environment definition: + +```shell +conda env create -f conda.yaml +conda activate dinov2 +``` + +*[pip](https://pip.pypa.io/en/stable/getting-started/)* - Clone the repository and then use the provided `requirements.txt` to install the dependencies: + +```shell +pip install -r requirements.txt +``` + +For dense tasks (depth estimation and semantic segmentation), there are additional dependencies (specific versions of `mmcv` and `mmsegmentation`) which are captured in the `extras` dependency specifications: + +*[conda](https://docs.conda.io/projects/conda/en/latest/user-guide/getting-started.html)* **(Recommended)**: + +```shell +conda env create -f conda-extras.yaml +conda activate dinov2-extras +``` + +*[pip](https://pip.pypa.io/en/stable/getting-started/)*: + +```shell +pip install -r requirements.txt -r requirements-extras.txt +``` + +## Data preparation + +### ImageNet-1k + +The root directory of the dataset should hold the following contents: + +- `/test/ILSVRC2012_test_00000001.JPEG` +- `/test/[..]` +- `/test/ILSVRC2012_test_00100000.JPEG` +- `/train/n01440764/n01440764_10026.JPEG` +- `/train/[...]` +- `/train/n15075141/n15075141_9993.JPEG` +- `/val/n01440764/ILSVRC2012_val_00000293.JPEG` +- `/val/[...]` +- `/val/n15075141/ILSVRC2012_val_00049174.JPEG` +- `/labels.txt` + +The provided dataset implementation expects a few additional metadata files to be present under the extra directory: + +- `/class-ids-TRAIN.npy` +- `/class-ids-VAL.npy` +- `/class-names-TRAIN.npy` +- `/class-names-VAL.npy` +- `/entries-TEST.npy` +- `/entries-TRAIN.npy` +- `/entries-VAL.npy` + +These metadata files can be generated (once) with the following lines of Python code: + +```python +from dinov2.data.datasets import ImageNet + +for split in ImageNet.Split: + dataset = ImageNet(split=split, root="", extra="") + dataset.dump_extra() +``` + +Note that the root and extra directories do not have to be distinct directories. + +### ImageNet-22k + +Please adapt the [dataset class](dinov2/data/datasets/image_net_22k.py) to match your local setup. + +
+ +:warning: To execute the commands provided in the next sections for training and evaluation, the `dinov2` package should be included in the Python module search path, i.e. simply prefix the command to run with `PYTHONPATH=.`. + +## Training + +### Fast setup: training DINOv2 ViT-L/16 on ImageNet-1k + +Run DINOv2 training on 4 A100-80GB nodes (32 GPUs) in a SLURM cluster environment with submitit: + +```shell +python dinov2/run/train/train.py \ + --nodes 4 \ + --config-file dinov2/configs/train/vitl16_short.yaml \ + --output-dir \ + train.dataset_path=ImageNet:split=TRAIN:root=:extra= +``` + +Training time is approximately 1 day and the resulting checkpoint should reach 81.6% on k-NN eval and 82.9% on linear eval. + +The training code saves the weights of the teacher in the `eval` folder every 12500 iterations for evaluation. + +### Long setup: training DINOv2 ViT-L/14 on ImageNet-22k + +Run DINOv2 training on 12 A100-80GB nodes (96 GPUs) in a SLURM cluster environment with submitit: + +```shell +python dinov2/run/train/train.py \ + --nodes 12 \ + --config-file dinov2/configs/train/vitl14.yaml \ + --output-dir \ + train.dataset_path=ImageNet22k:root=:extra= +``` + +Training time is approximately 3.3 days and the resulting checkpoint should reach 82.0% on k-NN eval and 84.5% on linear eval. + +The training code saves the weights of the teacher in the `eval` folder every 12500 iterations for evaluation. + + +## Evaluation + +The training code regularly saves the teacher weights. In order to evaluate the model, run the following evaluation on a single node: + +### k-NN classification on ImageNet-1k + +```shell +python dinov2/run/eval/knn.py \ + --config-file /config.yaml \ + --pretrained-weights /eval/training_24999/teacher_checkpoint.pth \ + --output-dir /eval/training_24999/knn \ + --train-dataset ImageNet:split=TRAIN:root=:extra= \ + --val-dataset ImageNet:split=VAL:root=:extra= +``` + +### Logistic regression classification on ImageNet-1k + +```shell +python dinov2/run/eval/log_regression.py \ + --config-file /config.yaml \ + --pretrained-weights /eval/training_24999/teacher_checkpoint.pth \ + --output-dir /eval/training_24999/logreg \ + --train-dataset ImageNet:split=TRAIN:root=:extra= \ + --val-dataset ImageNet:split=VAL:root=:extra= +``` + +### Linear classification with data augmentation on ImageNet-1k + +```shell +python dinov2/run/eval/linear.py \ + --config-file /config.yaml \ + --pretrained-weights /eval/training_24999/teacher_checkpoint.pth \ + --output-dir /eval/training_24999/linear \ + --train-dataset ImageNet:split=TRAIN:root=:extra= \ + --val-dataset ImageNet:split=VAL:root=:extra= +``` + +We release the weights from evaluating the different models: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
modelwith
registers
ImageNet
top-1
linear evaluation
ViT-S/14 distilled:x:81.1%linear head weights
ViT-S/14 distilled:white_check_mark:80.8%linear head weights
ViT-B/14 distilled:x:84.5%linear head weights
ViT-B/14 distilled:white_check_mark:84.4%linear head weights
ViT-L/14 distilled:x:86.3%linear head weights
ViT-L/14 distilled:white_check_mark:86.5%linear head weights
ViT-g/14:x:86.5%linear head weights
ViT-g/14:white_check_mark:87.0%linear head weights
+ +The performance of the provided pretrained model weights can be evaluated as follows on ImageNet-1k: + +```shell +python dinov2/run/eval/linear.py \ + --config-file dinov2/configs/eval/vitg14_pretrain.yaml \ + --pretrained-weights https://dl.fbaipublicfiles.com/dinov2/dinov2_vitg14/dinov2_vitg14_pretrain.pth \ + --train-dataset ImageNet:split=TRAIN:root=:extra= \ + --val-dataset ImageNet:split=VAL:root=:extra= +``` + +## Notebooks + +A few notebooks are provided to help the community leverage the models and code: + +
    +
  • Depth estimation - How to load and use the depth heads in combination with a matching backbone via mmcv
  • +
  • Semantic segmentation - How to load and use the segmentation heads in combination with a matching backbone via mmcv, and also how to load and use the Mask2Former-based segmentation model trained on ADE20K
  • +
+ +## License + +DINOv2 code and model weights are released under the Apache License 2.0. See [LICENSE](LICENSE) for additional details. + +## Contributing + +See [contributing](CONTRIBUTING.md) and the [code of conduct](CODE_OF_CONDUCT.md). + +## Citing DINOv2 + +If you find this repository useful, please consider giving a star :star: and citation :t-rex:: + +``` +@misc{oquab2023dinov2, + title={DINOv2: Learning Robust Visual Features without Supervision}, + author={Oquab, Maxime and Darcet, Timothée and Moutakanni, Theo and Vo, Huy V. and Szafraniec, Marc and Khalidov, Vasil and Fernandez, Pierre and Haziza, Daniel and Massa, Francisco and El-Nouby, Alaaeldin and Howes, Russell and Huang, Po-Yao and Xu, Hu and Sharma, Vasu and Li, Shang-Wen and Galuba, Wojciech and Rabbat, Mike and Assran, Mido and Ballas, Nicolas and Synnaeve, Gabriel and Misra, Ishan and Jegou, Herve and Mairal, Julien and Labatut, Patrick and Joulin, Armand and Bojanowski, Piotr}, + journal={arXiv:2304.07193}, + year={2023} +} +``` + +``` +@misc{darcet2023vitneedreg, + title={Vision Transformers Need Registers}, + author={Darcet, Timothée and Oquab, Maxime and Mairal, Julien and Bojanowski, Piotr}, + journal={arXiv:2309.16588}, + year={2023} +} +``` diff --git a/engine/pose_estimation/dinov2/conda-extras.yaml b/engine/pose_estimation/dinov2/conda-extras.yaml new file mode 100644 index 0000000000000000000000000000000000000000..71574c4d32e31c1e134ffb2102daa86a14867bb8 --- /dev/null +++ b/engine/pose_estimation/dinov2/conda-extras.yaml @@ -0,0 +1,24 @@ +name: dinov2-extras +channels: + - defaults + - pytorch + - nvidia + - xformers + - conda-forge +dependencies: + - python=3.9 + - pytorch::pytorch=2.0.0 + - pytorch::pytorch-cuda=11.7.0 + - pytorch::torchvision=0.15.0 + - omegaconf + - torchmetrics=0.10.3 + - fvcore + - iopath + - xformers::xformers=0.0.18 + - pip + - pip: + - git+https://github.com/facebookincubator/submitit + - --extra-index-url https://pypi.nvidia.com + - cuml-cu11 + - mmcv-full==1.5.0 + - mmsegmentation==0.27.0 diff --git a/engine/pose_estimation/dinov2/conda.yaml b/engine/pose_estimation/dinov2/conda.yaml new file mode 100644 index 0000000000000000000000000000000000000000..35dfc30adc275da51b58ff2340dd1d53d2cb9250 --- /dev/null +++ b/engine/pose_estimation/dinov2/conda.yaml @@ -0,0 +1,22 @@ +name: dinov2 +channels: + - defaults + - pytorch + - nvidia + - xformers + - conda-forge +dependencies: + - python=3.9 + - pytorch::pytorch=2.0.0 + - pytorch::pytorch-cuda=11.7.0 + - pytorch::torchvision=0.15.0 + - omegaconf + - torchmetrics=0.10.3 + - fvcore + - iopath + - xformers::xformers=0.0.18 + - pip + - pip: + - git+https://github.com/facebookincubator/submitit + - --extra-index-url https://pypi.nvidia.com + - cuml-cu11 diff --git a/engine/pose_estimation/dinov2/dinov2/__init__.py b/engine/pose_estimation/dinov2/dinov2/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ae847e46898077fe3d8701b8a181d7b4e3d41cd9 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/__init__.py @@ -0,0 +1,6 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +__version__ = "0.0.1" diff --git a/engine/pose_estimation/dinov2/dinov2/configs/__init__.py b/engine/pose_estimation/dinov2/dinov2/configs/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..68e0830c62ea19649b6cd2361995f6df309d7640 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/configs/__init__.py @@ -0,0 +1,22 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import pathlib + +from omegaconf import OmegaConf + + +def load_config(config_name: str): + config_filename = config_name + ".yaml" + return OmegaConf.load(pathlib.Path(__file__).parent.resolve() / config_filename) + + +dinov2_default_config = load_config("ssl_default_config") + + +def load_and_merge_config(config_name: str): + default_config = OmegaConf.create(dinov2_default_config) + loaded_config = load_config(config_name) + return OmegaConf.merge(default_config, loaded_config) diff --git a/engine/pose_estimation/dinov2/dinov2/configs/eval/vitb14_pretrain.yaml b/engine/pose_estimation/dinov2/dinov2/configs/eval/vitb14_pretrain.yaml new file mode 100644 index 0000000000000000000000000000000000000000..117d0f027ca26cd8ce6c010bb78d5a8fac42c70e --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/configs/eval/vitb14_pretrain.yaml @@ -0,0 +1,6 @@ +student: + arch: vit_base + patch_size: 14 +crops: + global_crops_size: 518 # this is to set up the position embeddings properly + local_crops_size: 98 \ No newline at end of file diff --git a/engine/pose_estimation/dinov2/dinov2/configs/eval/vitb14_reg4_pretrain.yaml b/engine/pose_estimation/dinov2/dinov2/configs/eval/vitb14_reg4_pretrain.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d53edc04a0761b4b35c147d63e04d55c90092c8f --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/configs/eval/vitb14_reg4_pretrain.yaml @@ -0,0 +1,9 @@ +student: + arch: vit_base + patch_size: 14 + num_register_tokens: 4 + interpolate_antialias: true + interpolate_offset: 0.0 +crops: + global_crops_size: 518 # this is to set up the position embeddings properly + local_crops_size: 98 \ No newline at end of file diff --git a/engine/pose_estimation/dinov2/dinov2/configs/eval/vitg14_pretrain.yaml b/engine/pose_estimation/dinov2/dinov2/configs/eval/vitg14_pretrain.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a96dd5b117b4d59ee210b65037821f1b3e3f16e3 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/configs/eval/vitg14_pretrain.yaml @@ -0,0 +1,7 @@ +student: + arch: vit_giant2 + patch_size: 14 + ffn_layer: swiglufused +crops: + global_crops_size: 518 # this is to set up the position embeddings properly + local_crops_size: 98 \ No newline at end of file diff --git a/engine/pose_estimation/dinov2/dinov2/configs/eval/vitg14_reg4_pretrain.yaml b/engine/pose_estimation/dinov2/dinov2/configs/eval/vitg14_reg4_pretrain.yaml new file mode 100644 index 0000000000000000000000000000000000000000..15948f8589ea0a6e04717453eb88c18388e7f1b2 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/configs/eval/vitg14_reg4_pretrain.yaml @@ -0,0 +1,10 @@ +student: + arch: vit_giant2 + patch_size: 14 + ffn_layer: swiglufused + num_register_tokens: 4 + interpolate_antialias: true + interpolate_offset: 0.0 +crops: + global_crops_size: 518 # this is to set up the position embeddings properly + local_crops_size: 98 \ No newline at end of file diff --git a/engine/pose_estimation/dinov2/dinov2/configs/eval/vitl14_pretrain.yaml b/engine/pose_estimation/dinov2/dinov2/configs/eval/vitl14_pretrain.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7a984548bd034f762d455419d7193917fa462dd8 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/configs/eval/vitl14_pretrain.yaml @@ -0,0 +1,6 @@ +student: + arch: vit_large + patch_size: 14 +crops: + global_crops_size: 518 # this is to set up the position embeddings properly + local_crops_size: 98 \ No newline at end of file diff --git a/engine/pose_estimation/dinov2/dinov2/configs/eval/vitl14_reg4_pretrain.yaml b/engine/pose_estimation/dinov2/dinov2/configs/eval/vitl14_reg4_pretrain.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0e2bc4e7b24b1a64d0369a24927996d0f184e283 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/configs/eval/vitl14_reg4_pretrain.yaml @@ -0,0 +1,9 @@ +student: + arch: vit_large + patch_size: 14 + num_register_tokens: 4 + interpolate_antialias: true + interpolate_offset: 0.0 +crops: + global_crops_size: 518 # this is to set up the position embeddings properly + local_crops_size: 98 \ No newline at end of file diff --git a/engine/pose_estimation/dinov2/dinov2/configs/eval/vits14_pretrain.yaml b/engine/pose_estimation/dinov2/dinov2/configs/eval/vits14_pretrain.yaml new file mode 100644 index 0000000000000000000000000000000000000000..afbdb4ba14f1c97130a25b579360f4d817cda495 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/configs/eval/vits14_pretrain.yaml @@ -0,0 +1,6 @@ +student: + arch: vit_small + patch_size: 14 +crops: + global_crops_size: 518 # this is to set up the position embeddings properly + local_crops_size: 98 \ No newline at end of file diff --git a/engine/pose_estimation/dinov2/dinov2/configs/eval/vits14_reg4_pretrain.yaml b/engine/pose_estimation/dinov2/dinov2/configs/eval/vits14_reg4_pretrain.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d25fd638389bfba9220792302dc9dbf5d9a2406a --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/configs/eval/vits14_reg4_pretrain.yaml @@ -0,0 +1,9 @@ +student: + arch: vit_small + patch_size: 14 + num_register_tokens: 4 + interpolate_antialias: true + interpolate_offset: 0.0 +crops: + global_crops_size: 518 # this is to set up the position embeddings properly + local_crops_size: 98 \ No newline at end of file diff --git a/engine/pose_estimation/dinov2/dinov2/configs/ssl_default_config.yaml b/engine/pose_estimation/dinov2/dinov2/configs/ssl_default_config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ccaae1c3174b21bcaf6e803dc861492261e5abe1 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/configs/ssl_default_config.yaml @@ -0,0 +1,118 @@ +MODEL: + WEIGHTS: '' +compute_precision: + grad_scaler: true + teacher: + backbone: + sharding_strategy: SHARD_GRAD_OP + mixed_precision: + param_dtype: fp16 + reduce_dtype: fp16 + buffer_dtype: fp32 + dino_head: + sharding_strategy: SHARD_GRAD_OP + mixed_precision: + param_dtype: fp16 + reduce_dtype: fp16 + buffer_dtype: fp32 + ibot_head: + sharding_strategy: SHARD_GRAD_OP + mixed_precision: + param_dtype: fp16 + reduce_dtype: fp16 + buffer_dtype: fp32 + student: + backbone: + sharding_strategy: SHARD_GRAD_OP + mixed_precision: + param_dtype: fp16 + reduce_dtype: fp16 + buffer_dtype: fp32 + dino_head: + sharding_strategy: SHARD_GRAD_OP + mixed_precision: + param_dtype: fp16 + reduce_dtype: fp32 + buffer_dtype: fp32 + ibot_head: + sharding_strategy: SHARD_GRAD_OP + mixed_precision: + param_dtype: fp16 + reduce_dtype: fp32 + buffer_dtype: fp32 +dino: + loss_weight: 1.0 + head_n_prototypes: 65536 + head_bottleneck_dim: 256 + head_nlayers: 3 + head_hidden_dim: 2048 + koleo_loss_weight: 0.1 +ibot: + loss_weight: 1.0 + mask_sample_probability: 0.5 + mask_ratio_min_max: + - 0.1 + - 0.5 + separate_head: false + head_n_prototypes: 65536 + head_bottleneck_dim: 256 + head_nlayers: 3 + head_hidden_dim: 2048 +train: + batch_size_per_gpu: 64 + dataset_path: ImageNet:split=TRAIN + output_dir: . + saveckp_freq: 20 + seed: 0 + num_workers: 10 + OFFICIAL_EPOCH_LENGTH: 1250 + cache_dataset: true + centering: "centering" # or "sinkhorn_knopp" +student: + arch: vit_large + patch_size: 16 + drop_path_rate: 0.3 + layerscale: 1.0e-05 + drop_path_uniform: true + pretrained_weights: '' + ffn_layer: "mlp" + block_chunks: 0 + qkv_bias: true + proj_bias: true + ffn_bias: true + num_register_tokens: 0 + interpolate_antialias: false + interpolate_offset: 0.1 +teacher: + momentum_teacher: 0.992 + final_momentum_teacher: 1 + warmup_teacher_temp: 0.04 + teacher_temp: 0.07 + warmup_teacher_temp_epochs: 30 +optim: + epochs: 100 + weight_decay: 0.04 + weight_decay_end: 0.4 + base_lr: 0.004 # learning rate for a batch size of 1024 + lr: 0. # will be set after applying scaling rule + warmup_epochs: 10 + min_lr: 1.0e-06 + clip_grad: 3.0 + freeze_last_layer_epochs: 1 + scaling_rule: sqrt_wrt_1024 + patch_embed_lr_mult: 0.2 + layerwise_decay: 0.9 + adamw_beta1: 0.9 + adamw_beta2: 0.999 +crops: + global_crops_scale: + - 0.32 + - 1.0 + local_crops_number: 8 + local_crops_scale: + - 0.05 + - 0.32 + global_crops_size: 224 + local_crops_size: 96 +evaluation: + eval_period_iterations: 12500 diff --git a/engine/pose_estimation/dinov2/dinov2/configs/train/vitg14.yaml b/engine/pose_estimation/dinov2/dinov2/configs/train/vitg14.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d05cf0d59e07ac6e4a2b0f9bdcb6131d7c508962 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/configs/train/vitg14.yaml @@ -0,0 +1,26 @@ +dino: + head_n_prototypes: 131072 + head_bottleneck_dim: 384 +ibot: + separate_head: true + head_n_prototypes: 131072 +train: + batch_size_per_gpu: 12 + dataset_path: ImageNet22k + centering: sinkhorn_knopp +student: + arch: vit_giant2 + patch_size: 14 + drop_path_rate: 0.4 + ffn_layer: swiglufused + block_chunks: 4 +teacher: + momentum_teacher: 0.994 +optim: + epochs: 500 + weight_decay_end: 0.2 + base_lr: 2.0e-04 # learning rate for a batch size of 1024 + warmup_epochs: 80 + layerwise_decay: 1.0 +crops: + local_crops_size: 98 \ No newline at end of file diff --git a/engine/pose_estimation/dinov2/dinov2/configs/train/vitl14.yaml b/engine/pose_estimation/dinov2/dinov2/configs/train/vitl14.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d9b491dcc6a522c71328fc2933dd0501123c8f6b --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/configs/train/vitl14.yaml @@ -0,0 +1,26 @@ +dino: + head_n_prototypes: 131072 + head_bottleneck_dim: 384 +ibot: + separate_head: true + head_n_prototypes: 131072 +train: + batch_size_per_gpu: 32 + dataset_path: ImageNet22k + centering: sinkhorn_knopp +student: + arch: vit_large + patch_size: 14 + drop_path_rate: 0.4 + ffn_layer: swiglufused + block_chunks: 4 +teacher: + momentum_teacher: 0.994 +optim: + epochs: 500 + weight_decay_end: 0.2 + base_lr: 2.0e-04 # learning rate for a batch size of 1024 + warmup_epochs: 80 + layerwise_decay: 1.0 +crops: + local_crops_size: 98 \ No newline at end of file diff --git a/engine/pose_estimation/dinov2/dinov2/configs/train/vitl16_short.yaml b/engine/pose_estimation/dinov2/dinov2/configs/train/vitl16_short.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3e7e72864c92175a1354142ac1d64da8070d1e5e --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/configs/train/vitl16_short.yaml @@ -0,0 +1,6 @@ +# this corresponds to the default config +train: + dataset_path: ImageNet:split=TRAIN + batch_size_per_gpu: 64 +student: + block_chunks: 4 diff --git a/engine/pose_estimation/dinov2/dinov2/distributed/__init__.py b/engine/pose_estimation/dinov2/dinov2/distributed/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..23226f4536bf5acf4ffac242e9903d92863b246d --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/distributed/__init__.py @@ -0,0 +1,270 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import os +import random +import re +import socket +from typing import Dict, List + +import torch +import torch.distributed as dist + +_LOCAL_RANK = -1 +_LOCAL_WORLD_SIZE = -1 + + +def is_enabled() -> bool: + """ + Returns: + True if distributed training is enabled + """ + return dist.is_available() and dist.is_initialized() + + +def get_global_size() -> int: + """ + Returns: + The number of processes in the process group + """ + return dist.get_world_size() if is_enabled() else 1 + + +def get_global_rank() -> int: + """ + Returns: + The rank of the current process within the global process group. + """ + return dist.get_rank() if is_enabled() else 0 + + +def get_local_rank() -> int: + """ + Returns: + The rank of the current process within the local (per-machine) process group. + """ + if not is_enabled(): + return 0 + assert 0 <= _LOCAL_RANK < _LOCAL_WORLD_SIZE + return _LOCAL_RANK + + +def get_local_size() -> int: + """ + Returns: + The size of the per-machine process group, + i.e. the number of processes per machine. + """ + if not is_enabled(): + return 1 + assert 0 <= _LOCAL_RANK < _LOCAL_WORLD_SIZE + return _LOCAL_WORLD_SIZE + + +def is_main_process() -> bool: + """ + Returns: + True if the current process is the main one. + """ + return get_global_rank() == 0 + + +def _restrict_print_to_main_process() -> None: + """ + This function disables printing when not in the main process + """ + import builtins as __builtin__ + + builtin_print = __builtin__.print + + def print(*args, **kwargs): + force = kwargs.pop("force", False) + if is_main_process() or force: + builtin_print(*args, **kwargs) + + __builtin__.print = print + + +def _get_master_port(seed: int = 0) -> int: + MIN_MASTER_PORT, MAX_MASTER_PORT = (20_000, 60_000) + + master_port_str = os.environ.get("MASTER_PORT") + if master_port_str is None: + rng = random.Random(seed) + return rng.randint(MIN_MASTER_PORT, MAX_MASTER_PORT) + + return int(master_port_str) + + +def _get_available_port() -> int: + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + # A "" host address means INADDR_ANY i.e. binding to all interfaces. + # Note this is not compatible with IPv6. + s.bind(("", 0)) + port = s.getsockname()[1] + return port + + +_TORCH_DISTRIBUTED_ENV_VARS = ( + "MASTER_ADDR", + "MASTER_PORT", + "RANK", + "WORLD_SIZE", + "LOCAL_RANK", + "LOCAL_WORLD_SIZE", +) + + +def _collect_env_vars() -> Dict[str, str]: + return {env_var: os.environ[env_var] for env_var in _TORCH_DISTRIBUTED_ENV_VARS if env_var in os.environ} + + +def _is_slurm_job_process() -> bool: + return "SLURM_JOB_ID" in os.environ + + +def _parse_slurm_node_list(s: str) -> List[str]: + nodes = [] + # Extract "hostname", "hostname[1-2,3,4-5]," substrings + p = re.compile(r"(([^\[]+)(?:\[([^\]]+)\])?),?") + for m in p.finditer(s): + prefix, suffixes = s[m.start(2) : m.end(2)], s[m.start(3) : m.end(3)] + for suffix in suffixes.split(","): + span = suffix.split("-") + if len(span) == 1: + nodes.append(prefix + suffix) + else: + width = len(span[0]) + start, end = int(span[0]), int(span[1]) + 1 + nodes.extend([prefix + f"{i:0{width}}" for i in range(start, end)]) + return nodes + + +def _check_env_variable(key: str, new_value: str): + # Only check for difference with preset environment variables + if key in os.environ and os.environ[key] != new_value: + raise RuntimeError(f"Cannot export environment variables as {key} is already set") + + +class _TorchDistributedEnvironment: + def __init__(self): + self.master_addr = "127.0.0.1" + self.master_port = 0 + self.rank = -1 + self.world_size = -1 + self.local_rank = -1 + self.local_world_size = -1 + + if _is_slurm_job_process(): + return self._set_from_slurm_env() + + env_vars = _collect_env_vars() + if not env_vars: + # Environment is not set + pass + elif len(env_vars) == len(_TORCH_DISTRIBUTED_ENV_VARS): + # Environment is fully set + return self._set_from_preset_env() + else: + # Environment is partially set + collected_env_vars = ", ".join(env_vars.keys()) + raise RuntimeError(f"Partially set environment: {collected_env_vars}") + + if torch.cuda.device_count() > 0: + return self._set_from_local() + + raise RuntimeError("Can't initialize PyTorch distributed environment") + + # Slurm job created with sbatch, submitit, etc... + def _set_from_slurm_env(self): + # logger.info("Initialization from Slurm environment") + job_id = int(os.environ["SLURM_JOB_ID"]) + node_count = int(os.environ["SLURM_JOB_NUM_NODES"]) + nodes = _parse_slurm_node_list(os.environ["SLURM_JOB_NODELIST"]) + assert len(nodes) == node_count + + self.master_addr = nodes[0] + self.master_port = _get_master_port(seed=job_id) + self.rank = int(os.environ["SLURM_PROCID"]) + self.world_size = int(os.environ["SLURM_NTASKS"]) + assert self.rank < self.world_size + self.local_rank = int(os.environ["SLURM_LOCALID"]) + self.local_world_size = self.world_size // node_count + assert self.local_rank < self.local_world_size + + # Single node job with preset environment (i.e. torchrun) + def _set_from_preset_env(self): + # logger.info("Initialization from preset environment") + self.master_addr = os.environ["MASTER_ADDR"] + self.master_port = os.environ["MASTER_PORT"] + self.rank = int(os.environ["RANK"]) + self.world_size = int(os.environ["WORLD_SIZE"]) + assert self.rank < self.world_size + self.local_rank = int(os.environ["LOCAL_RANK"]) + self.local_world_size = int(os.environ["LOCAL_WORLD_SIZE"]) + assert self.local_rank < self.local_world_size + + # Single node and GPU job (i.e. local script run) + def _set_from_local(self): + # logger.info("Initialization from local") + self.master_addr = "127.0.0.1" + self.master_port = _get_available_port() + self.rank = 0 + self.world_size = 1 + self.local_rank = 0 + self.local_world_size = 1 + + def export(self, *, overwrite: bool) -> "_TorchDistributedEnvironment": + # See the "Environment variable initialization" section from + # https://pytorch.org/docs/stable/distributed.html for the complete list of + # environment variables required for the env:// initialization method. + env_vars = { + "MASTER_ADDR": self.master_addr, + "MASTER_PORT": str(self.master_port), + "RANK": str(self.rank), + "WORLD_SIZE": str(self.world_size), + "LOCAL_RANK": str(self.local_rank), + "LOCAL_WORLD_SIZE": str(self.local_world_size), + } + if not overwrite: + for k, v in env_vars.items(): + _check_env_variable(k, v) + + os.environ.update(env_vars) + return self + + +def enable(*, set_cuda_current_device: bool = True, overwrite: bool = False, allow_nccl_timeout: bool = False): + """Enable distributed mode + + Args: + set_cuda_current_device: If True, call torch.cuda.set_device() to set the + current PyTorch CUDA device to the one matching the local rank. + overwrite: If True, overwrites already set variables. Else fails. + """ + + global _LOCAL_RANK, _LOCAL_WORLD_SIZE + if _LOCAL_RANK >= 0 or _LOCAL_WORLD_SIZE >= 0: + raise RuntimeError("Distributed mode has already been enabled") + torch_env = _TorchDistributedEnvironment() + torch_env.export(overwrite=overwrite) + + if set_cuda_current_device: + torch.cuda.set_device(torch_env.local_rank) + + if allow_nccl_timeout: + # This allows to use torch distributed timeout in a NCCL backend + key, value = "NCCL_ASYNC_ERROR_HANDLING", "1" + if not overwrite: + _check_env_variable(key, value) + os.environ[key] = value + + dist.init_process_group(backend="nccl") + dist.barrier() + + # Finalize setup + _LOCAL_RANK = torch_env.local_rank + _LOCAL_WORLD_SIZE = torch_env.local_world_size + _restrict_print_to_main_process() diff --git a/engine/pose_estimation/dinov2/dinov2/eval/__init__.py b/engine/pose_estimation/dinov2/dinov2/eval/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b88da6bf80be92af00b72dfdb0a806fa64a7a2d9 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. diff --git a/engine/pose_estimation/dinov2/dinov2/eval/depth/__init__.py b/engine/pose_estimation/dinov2/dinov2/eval/depth/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b88da6bf80be92af00b72dfdb0a806fa64a7a2d9 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/depth/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. diff --git a/engine/pose_estimation/dinov2/dinov2/eval/depth/ops/__init__.py b/engine/pose_estimation/dinov2/dinov2/eval/depth/ops/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..78181c29581a281b5f42cf12078636aaeb43b5a5 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/depth/ops/__init__.py @@ -0,0 +1,6 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from .wrappers import resize diff --git a/engine/pose_estimation/dinov2/dinov2/eval/depth/ops/wrappers.py b/engine/pose_estimation/dinov2/dinov2/eval/depth/ops/wrappers.py new file mode 100644 index 0000000000000000000000000000000000000000..15880ee0cb7652d4b41c489b927bf6a156b40e5e --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/depth/ops/wrappers.py @@ -0,0 +1,28 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import warnings + +import torch.nn.functional as F + + +def resize(input, size=None, scale_factor=None, mode="nearest", align_corners=None, warning=False): + if warning: + if size is not None and align_corners: + input_h, input_w = tuple(int(x) for x in input.shape[2:]) + output_h, output_w = tuple(int(x) for x in size) + if output_h > input_h or output_w > output_h: + if ( + (output_h > 1 and output_w > 1 and input_h > 1 and input_w > 1) + and (output_h - 1) % (input_h - 1) + and (output_w - 1) % (input_w - 1) + ): + warnings.warn( + f"When align_corners={align_corners}, " + "the output would more aligned if " + f"input size {(input_h, input_w)} is `x+1` and " + f"out size {(output_h, output_w)} is `nx+1`" + ) + return F.interpolate(input, size, scale_factor, mode, align_corners) diff --git a/engine/pose_estimation/dinov2/dinov2/eval/knn.py b/engine/pose_estimation/dinov2/dinov2/eval/knn.py new file mode 100644 index 0000000000000000000000000000000000000000..f3a4845da1313a6db6b8345bb9a98230fcd24acf --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/knn.py @@ -0,0 +1,404 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import argparse +from functools import partial +import json +import logging +import os +import sys +from typing import List, Optional + +import torch +from torch.nn.functional import one_hot, softmax + +import dinov2.distributed as distributed +from dinov2.data import SamplerType, make_data_loader, make_dataset +from dinov2.data.transforms import make_classification_eval_transform +from dinov2.eval.metrics import AccuracyAveraging, build_topk_accuracy_metric +from dinov2.eval.setup import get_args_parser as get_setup_args_parser +from dinov2.eval.setup import setup_and_build_model +from dinov2.eval.utils import ModelWithNormalize, evaluate, extract_features + + +logger = logging.getLogger("dinov2") + + +def get_args_parser( + description: Optional[str] = None, + parents: Optional[List[argparse.ArgumentParser]] = None, + add_help: bool = True, +): + parents = parents or [] + setup_args_parser = get_setup_args_parser(parents=parents, add_help=False) + parents = [setup_args_parser] + parser = argparse.ArgumentParser( + description=description, + parents=parents, + add_help=add_help, + ) + parser.add_argument( + "--train-dataset", + dest="train_dataset_str", + type=str, + help="Training dataset", + ) + parser.add_argument( + "--val-dataset", + dest="val_dataset_str", + type=str, + help="Validation dataset", + ) + parser.add_argument( + "--nb_knn", + nargs="+", + type=int, + help="Number of NN to use. 20 is usually working the best.", + ) + parser.add_argument( + "--temperature", + type=float, + help="Temperature used in the voting coefficient", + ) + parser.add_argument( + "--gather-on-cpu", + action="store_true", + help="Whether to gather the train features on cpu, slower" + "but useful to avoid OOM for large datasets (e.g. ImageNet22k).", + ) + parser.add_argument( + "--batch-size", + type=int, + help="Batch size.", + ) + parser.add_argument( + "--n-per-class-list", + nargs="+", + type=int, + help="Number to take per class", + ) + parser.add_argument( + "--n-tries", + type=int, + help="Number of tries", + ) + parser.set_defaults( + train_dataset_str="ImageNet:split=TRAIN", + val_dataset_str="ImageNet:split=VAL", + nb_knn=[10, 20, 100, 200], + temperature=0.07, + batch_size=256, + n_per_class_list=[-1], + n_tries=1, + ) + return parser + + +class KnnModule(torch.nn.Module): + """ + Gets knn of test features from all processes on a chunk of the train features + + Each rank gets a chunk of the train features as well as a chunk of the test features. + In `compute_neighbors`, for each rank one after the other, its chunk of test features + is sent to all devices, partial knns are computed with each chunk of train features + then collated back on the original device. + """ + + def __init__(self, train_features, train_labels, nb_knn, T, device, num_classes=1000): + super().__init__() + + self.global_rank = distributed.get_global_rank() + self.global_size = distributed.get_global_size() + + self.device = device + self.train_features_rank_T = train_features.chunk(self.global_size)[self.global_rank].T.to(self.device) + self.candidates = train_labels.chunk(self.global_size)[self.global_rank].view(1, -1).to(self.device) + + self.nb_knn = nb_knn + self.max_k = max(self.nb_knn) + self.T = T + self.num_classes = num_classes + + def _get_knn_sims_and_labels(self, similarity, train_labels): + topk_sims, indices = similarity.topk(self.max_k, largest=True, sorted=True) + neighbors_labels = torch.gather(train_labels, 1, indices) + return topk_sims, neighbors_labels + + def _similarity_for_rank(self, features_rank, source_rank): + # Send the features from `source_rank` to all ranks + broadcast_shape = torch.tensor(features_rank.shape).to(self.device) + torch.distributed.broadcast(broadcast_shape, source_rank) + + broadcasted = features_rank + if self.global_rank != source_rank: + broadcasted = torch.zeros(*broadcast_shape, dtype=features_rank.dtype, device=self.device) + torch.distributed.broadcast(broadcasted, source_rank) + + # Compute the neighbors for `source_rank` among `train_features_rank_T` + similarity_rank = torch.mm(broadcasted, self.train_features_rank_T) + candidate_labels = self.candidates.expand(len(similarity_rank), -1) + return self._get_knn_sims_and_labels(similarity_rank, candidate_labels) + + def _gather_all_knn_for_rank(self, topk_sims, neighbors_labels, target_rank): + # Gather all neighbors for `target_rank` + topk_sims_rank = retrieved_rank = None + if self.global_rank == target_rank: + topk_sims_rank = [torch.zeros_like(topk_sims) for _ in range(self.global_size)] + retrieved_rank = [torch.zeros_like(neighbors_labels) for _ in range(self.global_size)] + + torch.distributed.gather(topk_sims, topk_sims_rank, dst=target_rank) + torch.distributed.gather(neighbors_labels, retrieved_rank, dst=target_rank) + + if self.global_rank == target_rank: + # Perform a second top-k on the k * global_size retrieved neighbors + topk_sims_rank = torch.cat(topk_sims_rank, dim=1) + retrieved_rank = torch.cat(retrieved_rank, dim=1) + results = self._get_knn_sims_and_labels(topk_sims_rank, retrieved_rank) + return results + return None + + def compute_neighbors(self, features_rank): + for rank in range(self.global_size): + topk_sims, neighbors_labels = self._similarity_for_rank(features_rank, rank) + results = self._gather_all_knn_for_rank(topk_sims, neighbors_labels, rank) + if results is not None: + topk_sims_rank, neighbors_labels_rank = results + return topk_sims_rank, neighbors_labels_rank + + def forward(self, features_rank): + """ + Compute the results on all values of `self.nb_knn` neighbors from the full `self.max_k` + """ + assert all(k <= self.max_k for k in self.nb_knn) + + topk_sims, neighbors_labels = self.compute_neighbors(features_rank) + batch_size = neighbors_labels.shape[0] + topk_sims_transform = softmax(topk_sims / self.T, 1) + matmul = torch.mul( + one_hot(neighbors_labels, num_classes=self.num_classes), + topk_sims_transform.view(batch_size, -1, 1), + ) + probas_for_k = {k: torch.sum(matmul[:, :k, :], 1) for k in self.nb_knn} + return probas_for_k + + +class DictKeysModule(torch.nn.Module): + def __init__(self, keys): + super().__init__() + self.keys = keys + + def forward(self, features_dict, targets): + for k in self.keys: + features_dict = features_dict[k] + return {"preds": features_dict, "target": targets} + + +def create_module_dict(*, module, n_per_class_list, n_tries, nb_knn, train_features, train_labels): + modules = {} + mapping = create_class_indices_mapping(train_labels) + for npc in n_per_class_list: + if npc < 0: # Only one try needed when using the full data + full_module = module( + train_features=train_features, + train_labels=train_labels, + nb_knn=nb_knn, + ) + modules["full"] = ModuleDictWithForward({"1": full_module}) + continue + all_tries = {} + for t in range(n_tries): + final_indices = filter_train(mapping, npc, seed=t) + k_list = list(set(nb_knn + [npc])) + k_list = sorted([el for el in k_list if el <= npc]) + all_tries[str(t)] = module( + train_features=train_features[final_indices], + train_labels=train_labels[final_indices], + nb_knn=k_list, + ) + modules[f"{npc} per class"] = ModuleDictWithForward(all_tries) + + return ModuleDictWithForward(modules) + + +def filter_train(mapping, n_per_class, seed): + torch.manual_seed(seed) + final_indices = [] + for k in mapping.keys(): + index = torch.randperm(len(mapping[k]))[:n_per_class] + final_indices.append(mapping[k][index]) + return torch.cat(final_indices).squeeze() + + +def create_class_indices_mapping(labels): + unique_labels, inverse = torch.unique(labels, return_inverse=True) + mapping = {unique_labels[i]: (inverse == i).nonzero() for i in range(len(unique_labels))} + return mapping + + +class ModuleDictWithForward(torch.nn.ModuleDict): + def forward(self, *args, **kwargs): + return {k: module(*args, **kwargs) for k, module in self._modules.items()} + + +def eval_knn( + model, + train_dataset, + val_dataset, + accuracy_averaging, + nb_knn, + temperature, + batch_size, + num_workers, + gather_on_cpu, + n_per_class_list=[-1], + n_tries=1, +): + model = ModelWithNormalize(model) + + logger.info("Extracting features for train set...") + train_features, train_labels = extract_features( + model, train_dataset, batch_size, num_workers, gather_on_cpu=gather_on_cpu + ) + logger.info(f"Train features created, shape {train_features.shape}.") + + val_dataloader = make_data_loader( + dataset=val_dataset, + batch_size=batch_size, + num_workers=num_workers, + sampler_type=SamplerType.DISTRIBUTED, + drop_last=False, + shuffle=False, + persistent_workers=True, + ) + num_classes = train_labels.max() + 1 + metric_collection = build_topk_accuracy_metric(accuracy_averaging, num_classes=num_classes) + + device = torch.cuda.current_device() + partial_module = partial(KnnModule, T=temperature, device=device, num_classes=num_classes) + knn_module_dict = create_module_dict( + module=partial_module, + n_per_class_list=n_per_class_list, + n_tries=n_tries, + nb_knn=nb_knn, + train_features=train_features, + train_labels=train_labels, + ) + postprocessors, metrics = {}, {} + for n_per_class, knn_module in knn_module_dict.items(): + for t, knn_try in knn_module.items(): + postprocessors = { + **postprocessors, + **{(n_per_class, t, k): DictKeysModule([n_per_class, t, k]) for k in knn_try.nb_knn}, + } + metrics = {**metrics, **{(n_per_class, t, k): metric_collection.clone() for k in knn_try.nb_knn}} + model_with_knn = torch.nn.Sequential(model, knn_module_dict) + + # ============ evaluation ... ============ + logger.info("Start the k-NN classification.") + _, results_dict = evaluate(model_with_knn, val_dataloader, postprocessors, metrics, device) + + # Averaging the results over the n tries for each value of n_per_class + for n_per_class, knn_module in knn_module_dict.items(): + first_try = list(knn_module.keys())[0] + k_list = knn_module[first_try].nb_knn + for k in k_list: + keys = results_dict[(n_per_class, first_try, k)].keys() # keys are e.g. `top-1` and `top-5` + results_dict[(n_per_class, k)] = { + key: torch.mean(torch.stack([results_dict[(n_per_class, t, k)][key] for t in knn_module.keys()])) + for key in keys + } + for t in knn_module.keys(): + del results_dict[(n_per_class, t, k)] + + return results_dict + + +def eval_knn_with_model( + model, + output_dir, + train_dataset_str="ImageNet:split=TRAIN", + val_dataset_str="ImageNet:split=VAL", + nb_knn=(10, 20, 100, 200), + temperature=0.07, + autocast_dtype=torch.float, + accuracy_averaging=AccuracyAveraging.MEAN_ACCURACY, + transform=None, + gather_on_cpu=False, + batch_size=256, + num_workers=5, + n_per_class_list=[-1], + n_tries=1, +): + transform = transform or make_classification_eval_transform() + + train_dataset = make_dataset( + dataset_str=train_dataset_str, + transform=transform, + ) + val_dataset = make_dataset( + dataset_str=val_dataset_str, + transform=transform, + ) + + with torch.cuda.amp.autocast(dtype=autocast_dtype): + results_dict_knn = eval_knn( + model=model, + train_dataset=train_dataset, + val_dataset=val_dataset, + accuracy_averaging=accuracy_averaging, + nb_knn=nb_knn, + temperature=temperature, + batch_size=batch_size, + num_workers=num_workers, + gather_on_cpu=gather_on_cpu, + n_per_class_list=n_per_class_list, + n_tries=n_tries, + ) + + results_dict = {} + if distributed.is_main_process(): + for knn_ in results_dict_knn.keys(): + top1 = results_dict_knn[knn_]["top-1"].item() * 100.0 + top5 = results_dict_knn[knn_]["top-5"].item() * 100.0 + results_dict[f"{knn_} Top 1"] = top1 + results_dict[f"{knn_} Top 5"] = top5 + logger.info(f"{knn_} classifier result: Top1: {top1:.2f} Top5: {top5:.2f}") + + metrics_file_path = os.path.join(output_dir, "results_eval_knn.json") + with open(metrics_file_path, "a") as f: + for k, v in results_dict.items(): + f.write(json.dumps({k: v}) + "\n") + + if distributed.is_enabled(): + torch.distributed.barrier() + return results_dict + + +def main(args): + model, autocast_dtype = setup_and_build_model(args) + eval_knn_with_model( + model=model, + output_dir=args.output_dir, + train_dataset_str=args.train_dataset_str, + val_dataset_str=args.val_dataset_str, + nb_knn=args.nb_knn, + temperature=args.temperature, + autocast_dtype=autocast_dtype, + accuracy_averaging=AccuracyAveraging.MEAN_ACCURACY, + transform=None, + gather_on_cpu=args.gather_on_cpu, + batch_size=args.batch_size, + num_workers=5, + n_per_class_list=args.n_per_class_list, + n_tries=args.n_tries, + ) + return 0 + + +if __name__ == "__main__": + description = "DINOv2 k-NN evaluation" + args_parser = get_args_parser(description=description) + args = args_parser.parse_args() + sys.exit(main(args)) diff --git a/engine/pose_estimation/dinov2/dinov2/eval/linear.py b/engine/pose_estimation/dinov2/dinov2/eval/linear.py new file mode 100644 index 0000000000000000000000000000000000000000..1bd4c5de5a041be8a188f007257d1e91b6d6921e --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/linear.py @@ -0,0 +1,625 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import argparse +from functools import partial +import json +import logging +import os +import sys +from typing import List, Optional + +import numpy as np +import torch +import torch.nn as nn +from torch.nn.parallel import DistributedDataParallel +from fvcore.common.checkpoint import Checkpointer, PeriodicCheckpointer + +from dinov2.data import SamplerType, make_data_loader, make_dataset +from dinov2.data.transforms import make_classification_eval_transform, make_classification_train_transform +import dinov2.distributed as distributed +from dinov2.eval.metrics import MetricType, build_metric +from dinov2.eval.setup import get_args_parser as get_setup_args_parser +from dinov2.eval.setup import setup_and_build_model +from dinov2.eval.utils import ModelWithIntermediateLayers, evaluate +from dinov2.logging import MetricLogger + + +logger = logging.getLogger("dinov2") + + +def get_args_parser( + description: Optional[str] = None, + parents: Optional[List[argparse.ArgumentParser]] = None, + add_help: bool = True, +): + parents = parents or [] + setup_args_parser = get_setup_args_parser(parents=parents, add_help=False) + parents = [setup_args_parser] + parser = argparse.ArgumentParser( + description=description, + parents=parents, + add_help=add_help, + ) + parser.add_argument( + "--train-dataset", + dest="train_dataset_str", + type=str, + help="Training dataset", + ) + parser.add_argument( + "--val-dataset", + dest="val_dataset_str", + type=str, + help="Validation dataset", + ) + parser.add_argument( + "--test-datasets", + dest="test_dataset_strs", + type=str, + nargs="+", + help="Test datasets, none to reuse the validation dataset", + ) + parser.add_argument( + "--epochs", + type=int, + help="Number of training epochs", + ) + parser.add_argument( + "--batch-size", + type=int, + help="Batch Size (per GPU)", + ) + parser.add_argument( + "--num-workers", + type=int, + help="Number de Workers", + ) + parser.add_argument( + "--epoch-length", + type=int, + help="Length of an epoch in number of iterations", + ) + parser.add_argument( + "--save-checkpoint-frequency", + type=int, + help="Number of epochs between two named checkpoint saves.", + ) + parser.add_argument( + "--eval-period-iterations", + type=int, + help="Number of iterations between two evaluations.", + ) + parser.add_argument( + "--learning-rates", + nargs="+", + type=float, + help="Learning rates to grid search.", + ) + parser.add_argument( + "--no-resume", + action="store_true", + help="Whether to not resume from existing checkpoints", + ) + parser.add_argument( + "--val-metric-type", + type=MetricType, + choices=list(MetricType), + help="Validation metric", + ) + parser.add_argument( + "--test-metric-types", + type=MetricType, + choices=list(MetricType), + nargs="+", + help="Evaluation metric", + ) + parser.add_argument( + "--classifier-fpath", + type=str, + help="Path to a file containing pretrained linear classifiers", + ) + parser.add_argument( + "--val-class-mapping-fpath", + type=str, + help="Path to a file containing a mapping to adjust classifier outputs", + ) + parser.add_argument( + "--test-class-mapping-fpaths", + nargs="+", + type=str, + help="Path to a file containing a mapping to adjust classifier outputs", + ) + parser.set_defaults( + train_dataset_str="ImageNet:split=TRAIN", + val_dataset_str="ImageNet:split=VAL", + test_dataset_strs=None, + epochs=10, + batch_size=128, + num_workers=8, + epoch_length=1250, + save_checkpoint_frequency=20, + eval_period_iterations=1250, + learning_rates=[1e-5, 2e-5, 5e-5, 1e-4, 2e-4, 5e-4, 1e-3, 2e-3, 5e-3, 1e-2, 2e-2, 5e-2, 0.1], + val_metric_type=MetricType.MEAN_ACCURACY, + test_metric_types=None, + classifier_fpath=None, + val_class_mapping_fpath=None, + test_class_mapping_fpaths=[None], + ) + return parser + + +def has_ddp_wrapper(m: nn.Module) -> bool: + return isinstance(m, DistributedDataParallel) + + +def remove_ddp_wrapper(m: nn.Module) -> nn.Module: + return m.module if has_ddp_wrapper(m) else m + + +def _pad_and_collate(batch): + maxlen = max(len(targets) for image, targets in batch) + padded_batch = [ + (image, np.pad(targets, (0, maxlen - len(targets)), constant_values=-1)) for image, targets in batch + ] + return torch.utils.data.default_collate(padded_batch) + + +def create_linear_input(x_tokens_list, use_n_blocks, use_avgpool): + intermediate_output = x_tokens_list[-use_n_blocks:] + output = torch.cat([class_token for _, class_token in intermediate_output], dim=-1) + if use_avgpool: + output = torch.cat( + ( + output, + torch.mean(intermediate_output[-1][0], dim=1), # patch tokens + ), + dim=-1, + ) + output = output.reshape(output.shape[0], -1) + return output.float() + + +class LinearClassifier(nn.Module): + """Linear layer to train on top of frozen features""" + + def __init__(self, out_dim, use_n_blocks, use_avgpool, num_classes=1000): + super().__init__() + self.out_dim = out_dim + self.use_n_blocks = use_n_blocks + self.use_avgpool = use_avgpool + self.num_classes = num_classes + self.linear = nn.Linear(out_dim, num_classes) + self.linear.weight.data.normal_(mean=0.0, std=0.01) + self.linear.bias.data.zero_() + + def forward(self, x_tokens_list): + output = create_linear_input(x_tokens_list, self.use_n_blocks, self.use_avgpool) + return self.linear(output) + + +class AllClassifiers(nn.Module): + def __init__(self, classifiers_dict): + super().__init__() + self.classifiers_dict = nn.ModuleDict() + self.classifiers_dict.update(classifiers_dict) + + def forward(self, inputs): + return {k: v.forward(inputs) for k, v in self.classifiers_dict.items()} + + def __len__(self): + return len(self.classifiers_dict) + + +class LinearPostprocessor(nn.Module): + def __init__(self, linear_classifier, class_mapping=None): + super().__init__() + self.linear_classifier = linear_classifier + self.register_buffer("class_mapping", None if class_mapping is None else torch.LongTensor(class_mapping)) + + def forward(self, samples, targets): + preds = self.linear_classifier(samples) + return { + "preds": preds[:, self.class_mapping] if self.class_mapping is not None else preds, + "target": targets, + } + + +def scale_lr(learning_rates, batch_size): + return learning_rates * (batch_size * distributed.get_global_size()) / 256.0 + + +def setup_linear_classifiers(sample_output, n_last_blocks_list, learning_rates, batch_size, num_classes=1000): + linear_classifiers_dict = nn.ModuleDict() + optim_param_groups = [] + for n in n_last_blocks_list: + for avgpool in [False, True]: + for _lr in learning_rates: + lr = scale_lr(_lr, batch_size) + out_dim = create_linear_input(sample_output, use_n_blocks=n, use_avgpool=avgpool).shape[1] + linear_classifier = LinearClassifier( + out_dim, use_n_blocks=n, use_avgpool=avgpool, num_classes=num_classes + ) + linear_classifier = linear_classifier.cuda() + linear_classifiers_dict[ + f"classifier_{n}_blocks_avgpool_{avgpool}_lr_{lr:.5f}".replace(".", "_") + ] = linear_classifier + optim_param_groups.append({"params": linear_classifier.parameters(), "lr": lr}) + + linear_classifiers = AllClassifiers(linear_classifiers_dict) + if distributed.is_enabled(): + linear_classifiers = nn.parallel.DistributedDataParallel(linear_classifiers) + + return linear_classifiers, optim_param_groups + + +@torch.no_grad() +def evaluate_linear_classifiers( + feature_model, + linear_classifiers, + data_loader, + metric_type, + metrics_file_path, + training_num_classes, + iteration, + prefixstring="", + class_mapping=None, + best_classifier_on_val=None, +): + logger.info("running validation !") + + num_classes = len(class_mapping) if class_mapping is not None else training_num_classes + metric = build_metric(metric_type, num_classes=num_classes) + postprocessors = {k: LinearPostprocessor(v, class_mapping) for k, v in linear_classifiers.classifiers_dict.items()} + metrics = {k: metric.clone() for k in linear_classifiers.classifiers_dict} + + _, results_dict_temp = evaluate( + feature_model, + data_loader, + postprocessors, + metrics, + torch.cuda.current_device(), + ) + + logger.info("") + results_dict = {} + max_accuracy = 0 + best_classifier = "" + for i, (classifier_string, metric) in enumerate(results_dict_temp.items()): + logger.info(f"{prefixstring} -- Classifier: {classifier_string} * {metric}") + if ( + best_classifier_on_val is None and metric["top-1"].item() > max_accuracy + ) or classifier_string == best_classifier_on_val: + max_accuracy = metric["top-1"].item() + best_classifier = classifier_string + + results_dict["best_classifier"] = {"name": best_classifier, "accuracy": max_accuracy} + + logger.info(f"best classifier: {results_dict['best_classifier']}") + + if distributed.is_main_process(): + with open(metrics_file_path, "a") as f: + f.write(f"iter: {iteration}\n") + for k, v in results_dict.items(): + f.write(json.dumps({k: v}) + "\n") + f.write("\n") + + return results_dict + + +def eval_linear( + *, + feature_model, + linear_classifiers, + train_data_loader, + val_data_loader, + metrics_file_path, + optimizer, + scheduler, + output_dir, + max_iter, + checkpoint_period, # In number of iter, creates a new file every period + running_checkpoint_period, # Period to update main checkpoint file + eval_period, + metric_type, + training_num_classes, + resume=True, + classifier_fpath=None, + val_class_mapping=None, +): + checkpointer = Checkpointer(linear_classifiers, output_dir, optimizer=optimizer, scheduler=scheduler) + start_iter = checkpointer.resume_or_load(classifier_fpath or "", resume=resume).get("iteration", -1) + 1 + + periodic_checkpointer = PeriodicCheckpointer(checkpointer, checkpoint_period, max_iter=max_iter) + iteration = start_iter + logger.info("Starting training from iteration {}".format(start_iter)) + metric_logger = MetricLogger(delimiter=" ") + header = "Training" + + for data, labels in metric_logger.log_every( + train_data_loader, + 10, + header, + max_iter, + start_iter, + ): + data = data.cuda(non_blocking=True) + labels = labels.cuda(non_blocking=True) + + features = feature_model(data) + outputs = linear_classifiers(features) + + losses = {f"loss_{k}": nn.CrossEntropyLoss()(v, labels) for k, v in outputs.items()} + loss = sum(losses.values()) + + # compute the gradients + optimizer.zero_grad() + loss.backward() + + # step + optimizer.step() + scheduler.step() + + # log + if iteration % 10 == 0: + torch.cuda.synchronize() + metric_logger.update(loss=loss.item()) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + print("lr", optimizer.param_groups[0]["lr"]) + + if iteration - start_iter > 5: + if iteration % running_checkpoint_period == 0: + torch.cuda.synchronize() + if distributed.is_main_process(): + logger.info("Checkpointing running_checkpoint") + periodic_checkpointer.save("running_checkpoint_linear_eval", iteration=iteration) + torch.cuda.synchronize() + periodic_checkpointer.step(iteration) + + if eval_period > 0 and (iteration + 1) % eval_period == 0 and iteration != max_iter - 1: + _ = evaluate_linear_classifiers( + feature_model=feature_model, + linear_classifiers=remove_ddp_wrapper(linear_classifiers), + data_loader=val_data_loader, + metrics_file_path=metrics_file_path, + prefixstring=f"ITER: {iteration}", + metric_type=metric_type, + training_num_classes=training_num_classes, + iteration=iteration, + class_mapping=val_class_mapping, + ) + torch.cuda.synchronize() + + iteration = iteration + 1 + + val_results_dict = evaluate_linear_classifiers( + feature_model=feature_model, + linear_classifiers=remove_ddp_wrapper(linear_classifiers), + data_loader=val_data_loader, + metrics_file_path=metrics_file_path, + metric_type=metric_type, + training_num_classes=training_num_classes, + iteration=iteration, + class_mapping=val_class_mapping, + ) + return val_results_dict, feature_model, linear_classifiers, iteration + + +def make_eval_data_loader(test_dataset_str, batch_size, num_workers, metric_type): + test_dataset = make_dataset( + dataset_str=test_dataset_str, + transform=make_classification_eval_transform(), + ) + test_data_loader = make_data_loader( + dataset=test_dataset, + batch_size=batch_size, + num_workers=num_workers, + sampler_type=SamplerType.DISTRIBUTED, + drop_last=False, + shuffle=False, + persistent_workers=False, + collate_fn=_pad_and_collate if metric_type == MetricType.IMAGENET_REAL_ACCURACY else None, + ) + return test_data_loader + + +def test_on_datasets( + feature_model, + linear_classifiers, + test_dataset_strs, + batch_size, + num_workers, + test_metric_types, + metrics_file_path, + training_num_classes, + iteration, + best_classifier_on_val, + prefixstring="", + test_class_mappings=[None], +): + results_dict = {} + for test_dataset_str, class_mapping, metric_type in zip(test_dataset_strs, test_class_mappings, test_metric_types): + logger.info(f"Testing on {test_dataset_str}") + test_data_loader = make_eval_data_loader(test_dataset_str, batch_size, num_workers, metric_type) + dataset_results_dict = evaluate_linear_classifiers( + feature_model, + remove_ddp_wrapper(linear_classifiers), + test_data_loader, + metric_type, + metrics_file_path, + training_num_classes, + iteration, + prefixstring="", + class_mapping=class_mapping, + best_classifier_on_val=best_classifier_on_val, + ) + results_dict[f"{test_dataset_str}_accuracy"] = 100.0 * dataset_results_dict["best_classifier"]["accuracy"] + return results_dict + + +def run_eval_linear( + model, + output_dir, + train_dataset_str, + val_dataset_str, + batch_size, + epochs, + epoch_length, + num_workers, + save_checkpoint_frequency, + eval_period_iterations, + learning_rates, + autocast_dtype, + test_dataset_strs=None, + resume=True, + classifier_fpath=None, + val_class_mapping_fpath=None, + test_class_mapping_fpaths=[None], + val_metric_type=MetricType.MEAN_ACCURACY, + test_metric_types=None, +): + seed = 0 + + if test_dataset_strs is None: + test_dataset_strs = [val_dataset_str] + if test_metric_types is None: + test_metric_types = [val_metric_type] * len(test_dataset_strs) + else: + assert len(test_metric_types) == len(test_dataset_strs) + assert len(test_dataset_strs) == len(test_class_mapping_fpaths) + + train_transform = make_classification_train_transform() + train_dataset = make_dataset( + dataset_str=train_dataset_str, + transform=train_transform, + ) + training_num_classes = len(torch.unique(torch.Tensor(train_dataset.get_targets().astype(int)))) + sampler_type = SamplerType.SHARDED_INFINITE + # sampler_type = SamplerType.INFINITE + + n_last_blocks_list = [1, 4] + n_last_blocks = max(n_last_blocks_list) + autocast_ctx = partial(torch.cuda.amp.autocast, enabled=True, dtype=autocast_dtype) + feature_model = ModelWithIntermediateLayers(model, n_last_blocks, autocast_ctx) + sample_output = feature_model(train_dataset[0][0].unsqueeze(0).cuda()) + + linear_classifiers, optim_param_groups = setup_linear_classifiers( + sample_output, + n_last_blocks_list, + learning_rates, + batch_size, + training_num_classes, + ) + + optimizer = torch.optim.SGD(optim_param_groups, momentum=0.9, weight_decay=0) + max_iter = epochs * epoch_length + scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, max_iter, eta_min=0) + checkpointer = Checkpointer(linear_classifiers, output_dir, optimizer=optimizer, scheduler=scheduler) + start_iter = checkpointer.resume_or_load(classifier_fpath or "", resume=resume).get("iteration", -1) + 1 + train_data_loader = make_data_loader( + dataset=train_dataset, + batch_size=batch_size, + num_workers=num_workers, + shuffle=True, + seed=seed, + sampler_type=sampler_type, + sampler_advance=start_iter, + drop_last=True, + persistent_workers=True, + ) + val_data_loader = make_eval_data_loader(val_dataset_str, batch_size, num_workers, val_metric_type) + + checkpoint_period = save_checkpoint_frequency * epoch_length + + if val_class_mapping_fpath is not None: + logger.info(f"Using class mapping from {val_class_mapping_fpath}") + val_class_mapping = np.load(val_class_mapping_fpath) + else: + val_class_mapping = None + + test_class_mappings = [] + for class_mapping_fpath in test_class_mapping_fpaths: + if class_mapping_fpath is not None and class_mapping_fpath != "None": + logger.info(f"Using class mapping from {class_mapping_fpath}") + class_mapping = np.load(class_mapping_fpath) + else: + class_mapping = None + test_class_mappings.append(class_mapping) + + metrics_file_path = os.path.join(output_dir, "results_eval_linear.json") + val_results_dict, feature_model, linear_classifiers, iteration = eval_linear( + feature_model=feature_model, + linear_classifiers=linear_classifiers, + train_data_loader=train_data_loader, + val_data_loader=val_data_loader, + metrics_file_path=metrics_file_path, + optimizer=optimizer, + scheduler=scheduler, + output_dir=output_dir, + max_iter=max_iter, + checkpoint_period=checkpoint_period, + running_checkpoint_period=epoch_length, + eval_period=eval_period_iterations, + metric_type=val_metric_type, + training_num_classes=training_num_classes, + resume=resume, + val_class_mapping=val_class_mapping, + classifier_fpath=classifier_fpath, + ) + results_dict = {} + if len(test_dataset_strs) > 1 or test_dataset_strs[0] != val_dataset_str: + results_dict = test_on_datasets( + feature_model, + linear_classifiers, + test_dataset_strs, + batch_size, + 0, # num_workers, + test_metric_types, + metrics_file_path, + training_num_classes, + iteration, + val_results_dict["best_classifier"]["name"], + prefixstring="", + test_class_mappings=test_class_mappings, + ) + results_dict["best_classifier"] = val_results_dict["best_classifier"]["name"] + results_dict[f"{val_dataset_str}_accuracy"] = 100.0 * val_results_dict["best_classifier"]["accuracy"] + logger.info("Test Results Dict " + str(results_dict)) + + return results_dict + + +def main(args): + model, autocast_dtype = setup_and_build_model(args) + run_eval_linear( + model=model, + output_dir=args.output_dir, + train_dataset_str=args.train_dataset_str, + val_dataset_str=args.val_dataset_str, + test_dataset_strs=args.test_dataset_strs, + batch_size=args.batch_size, + epochs=args.epochs, + epoch_length=args.epoch_length, + num_workers=args.num_workers, + save_checkpoint_frequency=args.save_checkpoint_frequency, + eval_period_iterations=args.eval_period_iterations, + learning_rates=args.learning_rates, + autocast_dtype=autocast_dtype, + resume=not args.no_resume, + classifier_fpath=args.classifier_fpath, + val_metric_type=args.val_metric_type, + test_metric_types=args.test_metric_types, + val_class_mapping_fpath=args.val_class_mapping_fpath, + test_class_mapping_fpaths=args.test_class_mapping_fpaths, + ) + return 0 + + +if __name__ == "__main__": + description = "DINOv2 linear evaluation" + args_parser = get_args_parser(description=description) + args = args_parser.parse_args() + sys.exit(main(args)) diff --git a/engine/pose_estimation/dinov2/dinov2/eval/log_regression.py b/engine/pose_estimation/dinov2/dinov2/eval/log_regression.py new file mode 100644 index 0000000000000000000000000000000000000000..5f36ec134e0ce25697428a0b3f21cdc2f0145645 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/log_regression.py @@ -0,0 +1,444 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import argparse +import gc +import logging +import sys +import time +from typing import List, Optional + +from cuml.linear_model import LogisticRegression +import torch +import torch.backends.cudnn as cudnn +import torch.distributed +from torch import nn +from torch.utils.data import TensorDataset +from torchmetrics import MetricTracker + +from dinov2.data import make_dataset +from dinov2.data.transforms import make_classification_eval_transform +from dinov2.distributed import get_global_rank, get_global_size +from dinov2.eval.metrics import MetricType, build_metric +from dinov2.eval.setup import get_args_parser as get_setup_args_parser +from dinov2.eval.setup import setup_and_build_model +from dinov2.eval.utils import evaluate, extract_features +from dinov2.utils.dtype import as_torch_dtype + + +logger = logging.getLogger("dinov2") + +DEFAULT_MAX_ITER = 1_000 +C_POWER_RANGE = torch.linspace(-6, 5, 45) +_CPU_DEVICE = torch.device("cpu") + + +def get_args_parser( + description: Optional[str] = None, + parents: Optional[List[argparse.ArgumentParser]] = None, + add_help: bool = True, +): + parents = parents or [] + setup_args_parser = get_setup_args_parser(parents=parents, add_help=False) + parents = [setup_args_parser] + parser = argparse.ArgumentParser( + description=description, + parents=parents, + add_help=add_help, + ) + parser.add_argument( + "--train-dataset", + dest="train_dataset_str", + type=str, + help="Training dataset", + ) + parser.add_argument( + "--val-dataset", + dest="val_dataset_str", + type=str, + help="Validation dataset", + ) + parser.add_argument( + "--finetune-dataset-str", + dest="finetune_dataset_str", + type=str, + help="Fine-tuning dataset", + ) + parser.add_argument( + "--finetune-on-val", + action="store_true", + help="If there is no finetune dataset, whether to choose the " + "hyperparameters on the val set instead of 10%% of the train dataset", + ) + parser.add_argument( + "--metric-type", + type=MetricType, + choices=list(MetricType), + help="Metric type", + ) + parser.add_argument( + "--train-features-device", + type=str, + help="Device to gather train features (cpu, cuda, cuda:0, etc.), default: %(default)s", + ) + parser.add_argument( + "--train-dtype", + type=str, + help="Data type to convert the train features to (default: %(default)s)", + ) + parser.add_argument( + "--max-train-iters", + type=int, + help="Maximum number of train iterations (default: %(default)s)", + ) + parser.set_defaults( + train_dataset_str="ImageNet:split=TRAIN", + val_dataset_str="ImageNet:split=VAL", + finetune_dataset_str=None, + metric_type=MetricType.MEAN_ACCURACY, + train_features_device="cpu", + train_dtype="float64", + max_train_iters=DEFAULT_MAX_ITER, + finetune_on_val=False, + ) + return parser + + +class LogRegModule(nn.Module): + def __init__( + self, + C, + max_iter=DEFAULT_MAX_ITER, + dtype=torch.float64, + device=_CPU_DEVICE, + ): + super().__init__() + self.dtype = dtype + self.device = device + self.estimator = LogisticRegression( + penalty="l2", + C=C, + max_iter=max_iter, + output_type="numpy", + tol=1e-12, + linesearch_max_iter=50, + ) + + def forward(self, samples, targets): + samples_device = samples.device + samples = samples.to(dtype=self.dtype, device=self.device) + if self.device == _CPU_DEVICE: + samples = samples.numpy() + probas = self.estimator.predict_proba(samples) + return {"preds": torch.from_numpy(probas).to(samples_device), "target": targets} + + def fit(self, train_features, train_labels): + train_features = train_features.to(dtype=self.dtype, device=self.device) + train_labels = train_labels.to(dtype=self.dtype, device=self.device) + if self.device == _CPU_DEVICE: + # both cuML and sklearn only work with numpy arrays on CPU + train_features = train_features.numpy() + train_labels = train_labels.numpy() + self.estimator.fit(train_features, train_labels) + + +def evaluate_model(*, logreg_model, logreg_metric, test_data_loader, device): + postprocessors = {"metrics": logreg_model} + metrics = {"metrics": logreg_metric} + return evaluate(nn.Identity(), test_data_loader, postprocessors, metrics, device) + + +def train_for_C(*, C, max_iter, train_features, train_labels, dtype=torch.float64, device=_CPU_DEVICE): + logreg_model = LogRegModule(C, max_iter=max_iter, dtype=dtype, device=device) + logreg_model.fit(train_features, train_labels) + return logreg_model + + +def train_and_evaluate( + *, + C, + max_iter, + train_features, + train_labels, + logreg_metric, + test_data_loader, + train_dtype=torch.float64, + train_features_device, + eval_device, +): + logreg_model = train_for_C( + C=C, + max_iter=max_iter, + train_features=train_features, + train_labels=train_labels, + dtype=train_dtype, + device=train_features_device, + ) + return evaluate_model( + logreg_model=logreg_model, + logreg_metric=logreg_metric, + test_data_loader=test_data_loader, + device=eval_device, + ) + + +def sweep_C_values( + *, + train_features, + train_labels, + test_data_loader, + metric_type, + num_classes, + train_dtype=torch.float64, + train_features_device=_CPU_DEVICE, + max_train_iters=DEFAULT_MAX_ITER, +): + if metric_type == MetricType.PER_CLASS_ACCURACY: + # If we want to output per-class accuracy, we select the hyperparameters with mean per class + metric_type = MetricType.MEAN_PER_CLASS_ACCURACY + logreg_metric = build_metric(metric_type, num_classes=num_classes) + metric_tracker = MetricTracker(logreg_metric, maximize=True) + ALL_C = 10**C_POWER_RANGE + logreg_models = {} + + train_features = train_features.to(dtype=train_dtype, device=train_features_device) + train_labels = train_labels.to(device=train_features_device) + + for i in range(get_global_rank(), len(ALL_C), get_global_size()): + C = ALL_C[i].item() + logger.info( + f"Training for C = {C:.5f}, dtype={train_dtype}, " + f"features: {train_features.shape}, {train_features.dtype}, " + f"labels: {train_labels.shape}, {train_labels.dtype}" + ) + logreg_models[C] = train_for_C( + C=C, + max_iter=max_train_iters, + train_features=train_features, + train_labels=train_labels, + dtype=train_dtype, + device=train_features_device, + ) + + gather_list = [None for _ in range(get_global_size())] + torch.distributed.all_gather_object(gather_list, logreg_models) + + logreg_models_gathered = {} + for logreg_dict in gather_list: + logreg_models_gathered.update(logreg_dict) + + for i in range(len(ALL_C)): + metric_tracker.increment() + C = ALL_C[i].item() + evals = evaluate_model( + logreg_model=logreg_models_gathered[C], + logreg_metric=metric_tracker, + test_data_loader=test_data_loader, + device=torch.cuda.current_device(), + ) + logger.info(f"Trained for C = {C:.5f}, accuracies = {evals}") + + best_stats, which_epoch = metric_tracker.best_metric(return_step=True) + best_stats_100 = {k: 100.0 * v for k, v in best_stats.items()} + if which_epoch["top-1"] == i: + best_C = C + logger.info(f"Sweep best {best_stats_100}, best C = {best_C:.6f}") + + return best_stats, best_C + + +def eval_log_regression( + *, + model, + train_dataset, + val_dataset, + finetune_dataset, + metric_type, + batch_size, + num_workers, + finetune_on_val=False, + train_dtype=torch.float64, + train_features_device=_CPU_DEVICE, + max_train_iters=DEFAULT_MAX_ITER, +): + """ + Implements the "standard" process for log regression evaluation: + The value of C is chosen by training on train_dataset and evaluating on + finetune_dataset. Then, the final model is trained on a concatenation of + train_dataset and finetune_dataset, and is evaluated on val_dataset. + If there is no finetune_dataset, the value of C is the one that yields + the best results on a random 10% subset of the train dataset + """ + + start = time.time() + + train_features, train_labels = extract_features( + model, train_dataset, batch_size, num_workers, gather_on_cpu=(train_features_device == _CPU_DEVICE) + ) + val_features, val_labels = extract_features( + model, val_dataset, batch_size, num_workers, gather_on_cpu=(train_features_device == _CPU_DEVICE) + ) + val_data_loader = torch.utils.data.DataLoader( + TensorDataset(val_features, val_labels), + batch_size=batch_size, + drop_last=False, + num_workers=0, + persistent_workers=False, + ) + + if finetune_dataset is None and finetune_on_val: + logger.info("Choosing hyperparameters on the val dataset") + finetune_features, finetune_labels = val_features, val_labels + elif finetune_dataset is None and not finetune_on_val: + logger.info("Choosing hyperparameters on 10% of the train dataset") + torch.manual_seed(0) + indices = torch.randperm(len(train_features), device=train_features.device) + finetune_index = indices[: len(train_features) // 10] + train_index = indices[len(train_features) // 10 :] + finetune_features, finetune_labels = train_features[finetune_index], train_labels[finetune_index] + train_features, train_labels = train_features[train_index], train_labels[train_index] + else: + logger.info("Choosing hyperparameters on the finetune dataset") + finetune_features, finetune_labels = extract_features( + model, finetune_dataset, batch_size, num_workers, gather_on_cpu=(train_features_device == _CPU_DEVICE) + ) + # release the model - free GPU memory + del model + gc.collect() + torch.cuda.empty_cache() + finetune_data_loader = torch.utils.data.DataLoader( + TensorDataset(finetune_features, finetune_labels), + batch_size=batch_size, + drop_last=False, + ) + + if len(train_labels.shape) > 1: + num_classes = train_labels.shape[1] + else: + num_classes = train_labels.max() + 1 + + logger.info("Using cuML for logistic regression") + + best_stats, best_C = sweep_C_values( + train_features=train_features, + train_labels=train_labels, + test_data_loader=finetune_data_loader, + metric_type=metric_type, + num_classes=num_classes, + train_dtype=train_dtype, + train_features_device=train_features_device, + max_train_iters=max_train_iters, + ) + + if not finetune_on_val: + logger.info("Best parameter found, concatenating features") + train_features = torch.cat((train_features, finetune_features)) + train_labels = torch.cat((train_labels, finetune_labels)) + + logger.info("Training final model") + logreg_metric = build_metric(metric_type, num_classes=num_classes) + evals = train_and_evaluate( + C=best_C, + max_iter=max_train_iters, + train_features=train_features, + train_labels=train_labels, + logreg_metric=logreg_metric.clone(), + test_data_loader=val_data_loader, + eval_device=torch.cuda.current_device(), + train_dtype=train_dtype, + train_features_device=train_features_device, + ) + + best_stats = evals[1]["metrics"] + + best_stats["best_C"] = best_C + + logger.info(f"Log regression evaluation done in {int(time.time() - start)}s") + return best_stats + + +def eval_log_regression_with_model( + model, + train_dataset_str="ImageNet:split=TRAIN", + val_dataset_str="ImageNet:split=VAL", + finetune_dataset_str=None, + autocast_dtype=torch.float, + finetune_on_val=False, + metric_type=MetricType.MEAN_ACCURACY, + train_dtype=torch.float64, + train_features_device=_CPU_DEVICE, + max_train_iters=DEFAULT_MAX_ITER, +): + cudnn.benchmark = True + + transform = make_classification_eval_transform(resize_size=224) + target_transform = None + + train_dataset = make_dataset(dataset_str=train_dataset_str, transform=transform, target_transform=target_transform) + val_dataset = make_dataset(dataset_str=val_dataset_str, transform=transform, target_transform=target_transform) + if finetune_dataset_str is not None: + finetune_dataset = make_dataset( + dataset_str=finetune_dataset_str, transform=transform, target_transform=target_transform + ) + else: + finetune_dataset = None + + with torch.cuda.amp.autocast(dtype=autocast_dtype): + results_dict_logreg = eval_log_regression( + model=model, + train_dataset=train_dataset, + val_dataset=val_dataset, + finetune_dataset=finetune_dataset, + metric_type=metric_type, + batch_size=256, + num_workers=0, # 5, + finetune_on_val=finetune_on_val, + train_dtype=train_dtype, + train_features_device=train_features_device, + max_train_iters=max_train_iters, + ) + + results_dict = { + "top-1": results_dict_logreg["top-1"].cpu().numpy() * 100.0, + "top-5": results_dict_logreg.get("top-5", torch.tensor(0.0)).cpu().numpy() * 100.0, + "best_C": results_dict_logreg["best_C"], + } + logger.info( + "\n".join( + [ + "Training of the supervised logistic regression on frozen features completed.\n" + "Top-1 test accuracy: {acc:.1f}".format(acc=results_dict["top-1"]), + "Top-5 test accuracy: {acc:.1f}".format(acc=results_dict["top-5"]), + "obtained for C = {c:.6f}".format(c=results_dict["best_C"]), + ] + ) + ) + + torch.distributed.barrier() + return results_dict + + +def main(args): + model, autocast_dtype = setup_and_build_model(args) + eval_log_regression_with_model( + model=model, + train_dataset_str=args.train_dataset_str, + val_dataset_str=args.val_dataset_str, + finetune_dataset_str=args.finetune_dataset_str, + autocast_dtype=autocast_dtype, + finetune_on_val=args.finetune_on_val, + metric_type=args.metric_type, + train_dtype=as_torch_dtype(args.train_dtype), + train_features_device=torch.device(args.train_features_device), + max_train_iters=args.max_train_iters, + ) + return 0 + + +if __name__ == "__main__": + description = "DINOv2 logistic regression evaluation" + args_parser = get_args_parser(description=description) + args = args_parser.parse_args() + sys.exit(main(args)) diff --git a/engine/pose_estimation/dinov2/dinov2/eval/metrics.py b/engine/pose_estimation/dinov2/dinov2/eval/metrics.py new file mode 100644 index 0000000000000000000000000000000000000000..52be81a859dddde82da93c3657c35352d2bb0a48 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/metrics.py @@ -0,0 +1,113 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from enum import Enum +import logging +from typing import Any, Dict, Optional + +import torch +from torch import Tensor +from torchmetrics import Metric, MetricCollection +from torchmetrics.classification import MulticlassAccuracy +from torchmetrics.utilities.data import dim_zero_cat, select_topk + + +logger = logging.getLogger("dinov2") + + +class MetricType(Enum): + MEAN_ACCURACY = "mean_accuracy" + MEAN_PER_CLASS_ACCURACY = "mean_per_class_accuracy" + PER_CLASS_ACCURACY = "per_class_accuracy" + IMAGENET_REAL_ACCURACY = "imagenet_real_accuracy" + + @property + def accuracy_averaging(self): + return getattr(AccuracyAveraging, self.name, None) + + def __str__(self): + return self.value + + +class AccuracyAveraging(Enum): + MEAN_ACCURACY = "micro" + MEAN_PER_CLASS_ACCURACY = "macro" + PER_CLASS_ACCURACY = "none" + + def __str__(self): + return self.value + + +def build_metric(metric_type: MetricType, *, num_classes: int, ks: Optional[tuple] = None): + if metric_type.accuracy_averaging is not None: + return build_topk_accuracy_metric( + average_type=metric_type.accuracy_averaging, + num_classes=num_classes, + ks=(1, 5) if ks is None else ks, + ) + elif metric_type == MetricType.IMAGENET_REAL_ACCURACY: + return build_topk_imagenet_real_accuracy_metric( + num_classes=num_classes, + ks=(1, 5) if ks is None else ks, + ) + + raise ValueError(f"Unknown metric type {metric_type}") + + +def build_topk_accuracy_metric(average_type: AccuracyAveraging, num_classes: int, ks: tuple = (1, 5)): + metrics: Dict[str, Metric] = { + f"top-{k}": MulticlassAccuracy(top_k=k, num_classes=int(num_classes), average=average_type.value) for k in ks + } + return MetricCollection(metrics) + + +def build_topk_imagenet_real_accuracy_metric(num_classes: int, ks: tuple = (1, 5)): + metrics: Dict[str, Metric] = {f"top-{k}": ImageNetReaLAccuracy(top_k=k, num_classes=int(num_classes)) for k in ks} + return MetricCollection(metrics) + + +class ImageNetReaLAccuracy(Metric): + is_differentiable: bool = False + higher_is_better: Optional[bool] = None + full_state_update: bool = False + + def __init__( + self, + num_classes: int, + top_k: int = 1, + **kwargs: Any, + ) -> None: + super().__init__(**kwargs) + self.num_classes = num_classes + self.top_k = top_k + self.add_state("tp", [], dist_reduce_fx="cat") + + def update(self, preds: Tensor, target: Tensor) -> None: # type: ignore + # preds [B, D] + # target [B, A] + # preds_oh [B, D] with 0 and 1 + # select top K highest probabilities, use one hot representation + preds_oh = select_topk(preds, self.top_k) + # target_oh [B, D + 1] with 0 and 1 + target_oh = torch.zeros((preds_oh.shape[0], preds_oh.shape[1] + 1), device=target.device, dtype=torch.int32) + target = target.long() + # for undefined targets (-1) use a fake value `num_classes` + target[target == -1] = self.num_classes + # fill targets, use one hot representation + target_oh.scatter_(1, target, 1) + # target_oh [B, D] (remove the fake target at index `num_classes`) + target_oh = target_oh[:, :-1] + # tp [B] with 0 and 1 + tp = (preds_oh * target_oh == 1).sum(dim=1) + # at least one match between prediction and target + tp.clip_(max=1) + # ignore instances where no targets are defined + mask = target_oh.sum(dim=1) > 0 + tp = tp[mask] + self.tp.append(tp) # type: ignore + + def compute(self) -> Tensor: + tp = dim_zero_cat(self.tp) # type: ignore + return tp.float().mean() diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation/__init__.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b88da6bf80be92af00b72dfdb0a806fa64a7a2d9 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation/hooks/__init__.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation/hooks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..738cc2d2069521ea0353acd0cb0a03e3ddf1fa51 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation/hooks/__init__.py @@ -0,0 +1,6 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from .optimizer import DistOptimizerHook diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation/hooks/optimizer.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation/hooks/optimizer.py new file mode 100644 index 0000000000000000000000000000000000000000..f593f26a84475bbf7ebda9607a4d10914b13a443 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation/hooks/optimizer.py @@ -0,0 +1,40 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +try: + import apex +except ImportError: + print("apex is not installed") + +from mmcv.runner import OptimizerHook, HOOKS + + +@HOOKS.register_module() +class DistOptimizerHook(OptimizerHook): + """Optimizer hook for distributed training.""" + + def __init__(self, update_interval=1, grad_clip=None, coalesce=True, bucket_size_mb=-1, use_fp16=False): + self.grad_clip = grad_clip + self.coalesce = coalesce + self.bucket_size_mb = bucket_size_mb + self.update_interval = update_interval + self.use_fp16 = use_fp16 + + def before_run(self, runner): + runner.optimizer.zero_grad() + + def after_train_iter(self, runner): + runner.outputs["loss"] /= self.update_interval + if self.use_fp16: + # runner.outputs['loss'].backward() + with apex.amp.scale_loss(runner.outputs["loss"], runner.optimizer) as scaled_loss: + scaled_loss.backward() + else: + runner.outputs["loss"].backward() + if self.every_n_iters(runner, self.update_interval): + if self.grad_clip is not None: + self.clip_grads(runner.model.parameters()) + runner.optimizer.step() + runner.optimizer.zero_grad() diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation/utils/__init__.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b88da6bf80be92af00b72dfdb0a806fa64a7a2d9 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation/utils/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation/utils/colormaps.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation/utils/colormaps.py new file mode 100644 index 0000000000000000000000000000000000000000..e6ef604b2c75792e95e438abfd51ab03d40de340 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation/utils/colormaps.py @@ -0,0 +1,362 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +ADE20K_COLORMAP = [ + (0, 0, 0), + (120, 120, 120), + (180, 120, 120), + (6, 230, 230), + (80, 50, 50), + (4, 200, 3), + (120, 120, 80), + (140, 140, 140), + (204, 5, 255), + (230, 230, 230), + (4, 250, 7), + (224, 5, 255), + (235, 255, 7), + (150, 5, 61), + (120, 120, 70), + (8, 255, 51), + (255, 6, 82), + (143, 255, 140), + (204, 255, 4), + (255, 51, 7), + (204, 70, 3), + (0, 102, 200), + (61, 230, 250), + (255, 6, 51), + (11, 102, 255), + (255, 7, 71), + (255, 9, 224), + (9, 7, 230), + (220, 220, 220), + (255, 9, 92), + (112, 9, 255), + (8, 255, 214), + (7, 255, 224), + (255, 184, 6), + (10, 255, 71), + (255, 41, 10), + (7, 255, 255), + (224, 255, 8), + (102, 8, 255), + (255, 61, 6), + (255, 194, 7), + (255, 122, 8), + (0, 255, 20), + (255, 8, 41), + (255, 5, 153), + (6, 51, 255), + (235, 12, 255), + (160, 150, 20), + (0, 163, 255), + (140, 140, 140), + (250, 10, 15), + (20, 255, 0), + (31, 255, 0), + (255, 31, 0), + (255, 224, 0), + (153, 255, 0), + (0, 0, 255), + (255, 71, 0), + (0, 235, 255), + (0, 173, 255), + (31, 0, 255), + (11, 200, 200), + (255, 82, 0), + (0, 255, 245), + (0, 61, 255), + (0, 255, 112), + (0, 255, 133), + (255, 0, 0), + (255, 163, 0), + (255, 102, 0), + (194, 255, 0), + (0, 143, 255), + (51, 255, 0), + (0, 82, 255), + (0, 255, 41), + (0, 255, 173), + (10, 0, 255), + (173, 255, 0), + (0, 255, 153), + (255, 92, 0), + (255, 0, 255), + (255, 0, 245), + (255, 0, 102), + (255, 173, 0), + (255, 0, 20), + (255, 184, 184), + (0, 31, 255), + (0, 255, 61), + (0, 71, 255), + (255, 0, 204), + (0, 255, 194), + (0, 255, 82), + (0, 10, 255), + (0, 112, 255), + (51, 0, 255), + (0, 194, 255), + (0, 122, 255), + (0, 255, 163), + (255, 153, 0), + (0, 255, 10), + (255, 112, 0), + (143, 255, 0), + (82, 0, 255), + (163, 255, 0), + (255, 235, 0), + (8, 184, 170), + (133, 0, 255), + (0, 255, 92), + (184, 0, 255), + (255, 0, 31), + (0, 184, 255), + (0, 214, 255), + (255, 0, 112), + (92, 255, 0), + (0, 224, 255), + (112, 224, 255), + (70, 184, 160), + (163, 0, 255), + (153, 0, 255), + (71, 255, 0), + (255, 0, 163), + (255, 204, 0), + (255, 0, 143), + (0, 255, 235), + (133, 255, 0), + (255, 0, 235), + (245, 0, 255), + (255, 0, 122), + (255, 245, 0), + (10, 190, 212), + (214, 255, 0), + (0, 204, 255), + (20, 0, 255), + (255, 255, 0), + (0, 153, 255), + (0, 41, 255), + (0, 255, 204), + (41, 0, 255), + (41, 255, 0), + (173, 0, 255), + (0, 245, 255), + (71, 0, 255), + (122, 0, 255), + (0, 255, 184), + (0, 92, 255), + (184, 255, 0), + (0, 133, 255), + (255, 214, 0), + (25, 194, 194), + (102, 255, 0), + (92, 0, 255), +] + +ADE20K_CLASS_NAMES = [ + "", + "wall", + "building;edifice", + "sky", + "floor;flooring", + "tree", + "ceiling", + "road;route", + "bed", + "windowpane;window", + "grass", + "cabinet", + "sidewalk;pavement", + "person;individual;someone;somebody;mortal;soul", + "earth;ground", + "door;double;door", + "table", + "mountain;mount", + "plant;flora;plant;life", + "curtain;drape;drapery;mantle;pall", + "chair", + "car;auto;automobile;machine;motorcar", + "water", + "painting;picture", + "sofa;couch;lounge", + "shelf", + "house", + "sea", + "mirror", + "rug;carpet;carpeting", + "field", + "armchair", + "seat", + "fence;fencing", + "desk", + "rock;stone", + "wardrobe;closet;press", + "lamp", + "bathtub;bathing;tub;bath;tub", + "railing;rail", + "cushion", + "base;pedestal;stand", + "box", + "column;pillar", + "signboard;sign", + "chest;of;drawers;chest;bureau;dresser", + "counter", + "sand", + "sink", + "skyscraper", + "fireplace;hearth;open;fireplace", + "refrigerator;icebox", + "grandstand;covered;stand", + "path", + "stairs;steps", + "runway", + "case;display;case;showcase;vitrine", + "pool;table;billiard;table;snooker;table", + "pillow", + "screen;door;screen", + "stairway;staircase", + "river", + "bridge;span", + "bookcase", + "blind;screen", + "coffee;table;cocktail;table", + "toilet;can;commode;crapper;pot;potty;stool;throne", + "flower", + "book", + "hill", + "bench", + "countertop", + "stove;kitchen;stove;range;kitchen;range;cooking;stove", + "palm;palm;tree", + "kitchen;island", + "computer;computing;machine;computing;device;data;processor;electronic;computer;information;processing;system", + "swivel;chair", + "boat", + "bar", + "arcade;machine", + "hovel;hut;hutch;shack;shanty", + "bus;autobus;coach;charabanc;double-decker;jitney;motorbus;motorcoach;omnibus;passenger;vehicle", + "towel", + "light;light;source", + "truck;motortruck", + "tower", + "chandelier;pendant;pendent", + "awning;sunshade;sunblind", + "streetlight;street;lamp", + "booth;cubicle;stall;kiosk", + "television;television;receiver;television;set;tv;tv;set;idiot;box;boob;tube;telly;goggle;box", + "airplane;aeroplane;plane", + "dirt;track", + "apparel;wearing;apparel;dress;clothes", + "pole", + "land;ground;soil", + "bannister;banister;balustrade;balusters;handrail", + "escalator;moving;staircase;moving;stairway", + "ottoman;pouf;pouffe;puff;hassock", + "bottle", + "buffet;counter;sideboard", + "poster;posting;placard;notice;bill;card", + "stage", + "van", + "ship", + "fountain", + "conveyer;belt;conveyor;belt;conveyer;conveyor;transporter", + "canopy", + "washer;automatic;washer;washing;machine", + "plaything;toy", + "swimming;pool;swimming;bath;natatorium", + "stool", + "barrel;cask", + "basket;handbasket", + "waterfall;falls", + "tent;collapsible;shelter", + "bag", + "minibike;motorbike", + "cradle", + "oven", + "ball", + "food;solid;food", + "step;stair", + "tank;storage;tank", + "trade;name;brand;name;brand;marque", + "microwave;microwave;oven", + "pot;flowerpot", + "animal;animate;being;beast;brute;creature;fauna", + "bicycle;bike;wheel;cycle", + "lake", + "dishwasher;dish;washer;dishwashing;machine", + "screen;silver;screen;projection;screen", + "blanket;cover", + "sculpture", + "hood;exhaust;hood", + "sconce", + "vase", + "traffic;light;traffic;signal;stoplight", + "tray", + "ashcan;trash;can;garbage;can;wastebin;ash;bin;ash-bin;ashbin;dustbin;trash;barrel;trash;bin", + "fan", + "pier;wharf;wharfage;dock", + "crt;screen", + "plate", + "monitor;monitoring;device", + "bulletin;board;notice;board", + "shower", + "radiator", + "glass;drinking;glass", + "clock", + "flag", +] + + +VOC2012_COLORMAP = [ + (0, 0, 0), + (128, 0, 0), + (0, 128, 0), + (128, 128, 0), + (0, 0, 128), + (128, 0, 128), + (0, 128, 128), + (128, 128, 128), + (64, 0, 0), + (192, 0, 0), + (64, 128, 0), + (192, 128, 0), + (64, 0, 128), + (192, 0, 128), + (64, 128, 128), + (192, 128, 128), + (0, 64, 0), + (128, 64, 0), + (0, 192, 0), + (128, 192, 0), + (0, 64, 128), +] + + +VOC2012_CLASS_NAMES = [ + "", + "aeroplane", + "bicycle", + "bird", + "boat", + "bottle", + "bus", + "car", + "cat", + "chair", + "cow", + "diningtable", + "dog", + "horse", + "motorbike", + "person", + "pottedplant", + "sheep", + "sofa", + "train", + "tvmonitor", +] diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/__init__.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6c678fdf8f1dee14d7cf9be70af14e6f9a1441c3 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/__init__.py @@ -0,0 +1,8 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from .core import * # noqa: F403 +from .models import * # noqa: F403 +from .ops import * # noqa: F403 diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/__init__.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..92599806fbd221c1418d179892a0f46dc0b7d4db --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/__init__.py @@ -0,0 +1,11 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from mmseg.core.evaluation import * # noqa: F403 +from mmseg.core.seg import * # noqa: F403 + +from .anchor import * # noqa: F403 +from .box import * # noqa: F403 +from .utils import * # noqa: F403 diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/anchor/__init__.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/anchor/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e71ac4d6e01462221ae01aa16d0e1231cda7e2e7 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/anchor/__init__.py @@ -0,0 +1,6 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from .point_generator import MlvlPointGenerator # noqa: F403 diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/anchor/builder.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/anchor/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..6dba90e22de76d2f23a86d3c057f196d55a99690 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/anchor/builder.py @@ -0,0 +1,21 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import warnings + +from mmcv.utils import Registry, build_from_cfg + +PRIOR_GENERATORS = Registry("Generator for anchors and points") + +ANCHOR_GENERATORS = PRIOR_GENERATORS + + +def build_prior_generator(cfg, default_args=None): + return build_from_cfg(cfg, PRIOR_GENERATORS, default_args) + + +def build_anchor_generator(cfg, default_args=None): + warnings.warn("``build_anchor_generator`` would be deprecated soon, please use " "``build_prior_generator`` ") + return build_prior_generator(cfg, default_args=default_args) diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/anchor/point_generator.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/anchor/point_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..574d71939080e22284fe99087fb2e7336657bd97 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/anchor/point_generator.py @@ -0,0 +1,205 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import numpy as np +import torch +from torch.nn.modules.utils import _pair + +from .builder import PRIOR_GENERATORS + + +@PRIOR_GENERATORS.register_module() +class MlvlPointGenerator: + """Standard points generator for multi-level (Mlvl) feature maps in 2D + points-based detectors. + + Args: + strides (list[int] | list[tuple[int, int]]): Strides of anchors + in multiple feature levels in order (w, h). + offset (float): The offset of points, the value is normalized with + corresponding stride. Defaults to 0.5. + """ + + def __init__(self, strides, offset=0.5): + self.strides = [_pair(stride) for stride in strides] + self.offset = offset + + @property + def num_levels(self): + """int: number of feature levels that the generator will be applied""" + return len(self.strides) + + @property + def num_base_priors(self): + """list[int]: The number of priors (points) at a point + on the feature grid""" + return [1 for _ in range(len(self.strides))] + + def _meshgrid(self, x, y, row_major=True): + yy, xx = torch.meshgrid(y, x) + if row_major: + # warning .flatten() would cause error in ONNX exporting + # have to use reshape here + return xx.reshape(-1), yy.reshape(-1) + + else: + return yy.reshape(-1), xx.reshape(-1) + + def grid_priors(self, featmap_sizes, dtype=torch.float32, device="cuda", with_stride=False): + """Generate grid points of multiple feature levels. + + Args: + featmap_sizes (list[tuple]): List of feature map sizes in + multiple feature levels, each size arrange as + as (h, w). + dtype (:obj:`dtype`): Dtype of priors. Default: torch.float32. + device (str): The device where the anchors will be put on. + with_stride (bool): Whether to concatenate the stride to + the last dimension of points. + + Return: + list[torch.Tensor]: Points of multiple feature levels. + The sizes of each tensor should be (N, 2) when with stride is + ``False``, where N = width * height, width and height + are the sizes of the corresponding feature level, + and the last dimension 2 represent (coord_x, coord_y), + otherwise the shape should be (N, 4), + and the last dimension 4 represent + (coord_x, coord_y, stride_w, stride_h). + """ + + assert self.num_levels == len(featmap_sizes) + multi_level_priors = [] + for i in range(self.num_levels): + priors = self.single_level_grid_priors( + featmap_sizes[i], level_idx=i, dtype=dtype, device=device, with_stride=with_stride + ) + multi_level_priors.append(priors) + return multi_level_priors + + def single_level_grid_priors(self, featmap_size, level_idx, dtype=torch.float32, device="cuda", with_stride=False): + """Generate grid Points of a single level. + + Note: + This function is usually called by method ``self.grid_priors``. + + Args: + featmap_size (tuple[int]): Size of the feature maps, arrange as + (h, w). + level_idx (int): The index of corresponding feature map level. + dtype (:obj:`dtype`): Dtype of priors. Default: torch.float32. + device (str, optional): The device the tensor will be put on. + Defaults to 'cuda'. + with_stride (bool): Concatenate the stride to the last dimension + of points. + + Return: + Tensor: Points of single feature levels. + The shape of tensor should be (N, 2) when with stride is + ``False``, where N = width * height, width and height + are the sizes of the corresponding feature level, + and the last dimension 2 represent (coord_x, coord_y), + otherwise the shape should be (N, 4), + and the last dimension 4 represent + (coord_x, coord_y, stride_w, stride_h). + """ + feat_h, feat_w = featmap_size + stride_w, stride_h = self.strides[level_idx] + shift_x = (torch.arange(0, feat_w, device=device) + self.offset) * stride_w + # keep featmap_size as Tensor instead of int, so that we + # can convert to ONNX correctly + shift_x = shift_x.to(dtype) + + shift_y = (torch.arange(0, feat_h, device=device) + self.offset) * stride_h + # keep featmap_size as Tensor instead of int, so that we + # can convert to ONNX correctly + shift_y = shift_y.to(dtype) + shift_xx, shift_yy = self._meshgrid(shift_x, shift_y) + if not with_stride: + shifts = torch.stack([shift_xx, shift_yy], dim=-1) + else: + # use `shape[0]` instead of `len(shift_xx)` for ONNX export + stride_w = shift_xx.new_full((shift_xx.shape[0],), stride_w).to(dtype) + stride_h = shift_xx.new_full((shift_yy.shape[0],), stride_h).to(dtype) + shifts = torch.stack([shift_xx, shift_yy, stride_w, stride_h], dim=-1) + all_points = shifts.to(device) + return all_points + + def valid_flags(self, featmap_sizes, pad_shape, device="cuda"): + """Generate valid flags of points of multiple feature levels. + + Args: + featmap_sizes (list(tuple)): List of feature map sizes in + multiple feature levels, each size arrange as + as (h, w). + pad_shape (tuple(int)): The padded shape of the image, + arrange as (h, w). + device (str): The device where the anchors will be put on. + + Return: + list(torch.Tensor): Valid flags of points of multiple levels. + """ + assert self.num_levels == len(featmap_sizes) + multi_level_flags = [] + for i in range(self.num_levels): + point_stride = self.strides[i] + feat_h, feat_w = featmap_sizes[i] + h, w = pad_shape[:2] + valid_feat_h = min(int(np.ceil(h / point_stride[1])), feat_h) + valid_feat_w = min(int(np.ceil(w / point_stride[0])), feat_w) + flags = self.single_level_valid_flags((feat_h, feat_w), (valid_feat_h, valid_feat_w), device=device) + multi_level_flags.append(flags) + return multi_level_flags + + def single_level_valid_flags(self, featmap_size, valid_size, device="cuda"): + """Generate the valid flags of points of a single feature map. + + Args: + featmap_size (tuple[int]): The size of feature maps, arrange as + as (h, w). + valid_size (tuple[int]): The valid size of the feature maps. + The size arrange as as (h, w). + device (str, optional): The device where the flags will be put on. + Defaults to 'cuda'. + + Returns: + torch.Tensor: The valid flags of each points in a single level \ + feature map. + """ + feat_h, feat_w = featmap_size + valid_h, valid_w = valid_size + assert valid_h <= feat_h and valid_w <= feat_w + valid_x = torch.zeros(feat_w, dtype=torch.bool, device=device) + valid_y = torch.zeros(feat_h, dtype=torch.bool, device=device) + valid_x[:valid_w] = 1 + valid_y[:valid_h] = 1 + valid_xx, valid_yy = self._meshgrid(valid_x, valid_y) + valid = valid_xx & valid_yy + return valid + + def sparse_priors(self, prior_idxs, featmap_size, level_idx, dtype=torch.float32, device="cuda"): + """Generate sparse points according to the ``prior_idxs``. + + Args: + prior_idxs (Tensor): The index of corresponding anchors + in the feature map. + featmap_size (tuple[int]): feature map size arrange as (w, h). + level_idx (int): The level index of corresponding feature + map. + dtype (obj:`torch.dtype`): Date type of points. Defaults to + ``torch.float32``. + device (obj:`torch.device`): The device where the points is + located. + Returns: + Tensor: Anchor with shape (N, 2), N should be equal to + the length of ``prior_idxs``. And last dimension + 2 represent (coord_x, coord_y). + """ + height, width = featmap_size + x = (prior_idxs % width + self.offset) * self.strides[level_idx][0] + y = ((prior_idxs // width) % height + self.offset) * self.strides[level_idx][1] + prioris = torch.stack([x, y], 1).to(dtype) + prioris = prioris.to(device) + return prioris diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/__init__.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bf35a613f81acd77ecab2dfb75a722fa8e5c0787 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from .builder import * # noqa: F403 +from .samplers import MaskPseudoSampler # noqa: F403 diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/builder.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..9538c0de3db682c2b111b085a8a1ce321c76a9ff --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/builder.py @@ -0,0 +1,19 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from mmcv.utils import Registry, build_from_cfg + +BBOX_SAMPLERS = Registry("bbox_sampler") +BBOX_CODERS = Registry("bbox_coder") + + +def build_sampler(cfg, **default_args): + """Builder of box sampler.""" + return build_from_cfg(cfg, BBOX_SAMPLERS, default_args) + + +def build_bbox_coder(cfg, **default_args): + """Builder of box coder.""" + return build_from_cfg(cfg, BBOX_CODERS, default_args) diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/samplers/__init__.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/samplers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..19c363e3fabc365d92aeaf1e78189d710db279e9 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/samplers/__init__.py @@ -0,0 +1,6 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from .mask_pseudo_sampler import MaskPseudoSampler # noqa: F403 diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/samplers/base_sampler.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/samplers/base_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..c45cec3ed7af5b49bb54b92d6e6bcf59b06b4c99 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/samplers/base_sampler.py @@ -0,0 +1,92 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from abc import ABCMeta, abstractmethod + +import torch + +from .sampling_result import SamplingResult + + +class BaseSampler(metaclass=ABCMeta): + """Base class of samplers.""" + + def __init__(self, num, pos_fraction, neg_pos_ub=-1, add_gt_as_proposals=True, **kwargs): + self.num = num + self.pos_fraction = pos_fraction + self.neg_pos_ub = neg_pos_ub + self.add_gt_as_proposals = add_gt_as_proposals + self.pos_sampler = self + self.neg_sampler = self + + @abstractmethod + def _sample_pos(self, assign_result, num_expected, **kwargs): + """Sample positive samples.""" + pass + + @abstractmethod + def _sample_neg(self, assign_result, num_expected, **kwargs): + """Sample negative samples.""" + pass + + def sample(self, assign_result, bboxes, gt_bboxes, gt_labels=None, **kwargs): + """Sample positive and negative bboxes. + + This is a simple implementation of bbox sampling given candidates, + assigning results and ground truth bboxes. + + Args: + assign_result (:obj:`AssignResult`): Bbox assigning results. + bboxes (Tensor): Boxes to be sampled from. + gt_bboxes (Tensor): Ground truth bboxes. + gt_labels (Tensor, optional): Class labels of ground truth bboxes. + + Returns: + :obj:`SamplingResult`: Sampling result. + + Example: + >>> from mmdet.core.bbox import RandomSampler + >>> from mmdet.core.bbox import AssignResult + >>> from mmdet.core.bbox.demodata import ensure_rng, random_boxes + >>> rng = ensure_rng(None) + >>> assign_result = AssignResult.random(rng=rng) + >>> bboxes = random_boxes(assign_result.num_preds, rng=rng) + >>> gt_bboxes = random_boxes(assign_result.num_gts, rng=rng) + >>> gt_labels = None + >>> self = RandomSampler(num=32, pos_fraction=0.5, neg_pos_ub=-1, + >>> add_gt_as_proposals=False) + >>> self = self.sample(assign_result, bboxes, gt_bboxes, gt_labels) + """ + if len(bboxes.shape) < 2: + bboxes = bboxes[None, :] + + bboxes = bboxes[:, :4] + + gt_flags = bboxes.new_zeros((bboxes.shape[0],), dtype=torch.uint8) + if self.add_gt_as_proposals and len(gt_bboxes) > 0: + if gt_labels is None: + raise ValueError("gt_labels must be given when add_gt_as_proposals is True") + bboxes = torch.cat([gt_bboxes, bboxes], dim=0) + assign_result.add_gt_(gt_labels) + gt_ones = bboxes.new_ones(gt_bboxes.shape[0], dtype=torch.uint8) + gt_flags = torch.cat([gt_ones, gt_flags]) + + num_expected_pos = int(self.num * self.pos_fraction) + pos_inds = self.pos_sampler._sample_pos(assign_result, num_expected_pos, bboxes=bboxes, **kwargs) + # We found that sampled indices have duplicated items occasionally. + # (may be a bug of PyTorch) + pos_inds = pos_inds.unique() + num_sampled_pos = pos_inds.numel() + num_expected_neg = self.num - num_sampled_pos + if self.neg_pos_ub >= 0: + _pos = max(1, num_sampled_pos) + neg_upper_bound = int(self.neg_pos_ub * _pos) + if num_expected_neg > neg_upper_bound: + num_expected_neg = neg_upper_bound + neg_inds = self.neg_sampler._sample_neg(assign_result, num_expected_neg, bboxes=bboxes, **kwargs) + neg_inds = neg_inds.unique() + + sampling_result = SamplingResult(pos_inds, neg_inds, bboxes, gt_bboxes, assign_result, gt_flags) + return sampling_result diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/samplers/mask_pseudo_sampler.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/samplers/mask_pseudo_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..3e67ea61ed0fd65cca0addde1893a3c1e176bf15 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/samplers/mask_pseudo_sampler.py @@ -0,0 +1,45 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/ZwwWayne/K-Net/blob/main/knet/det/mask_pseudo_sampler.py + +import torch + +from ..builder import BBOX_SAMPLERS +from .base_sampler import BaseSampler +from .mask_sampling_result import MaskSamplingResult + + +@BBOX_SAMPLERS.register_module() +class MaskPseudoSampler(BaseSampler): + """A pseudo sampler that does not do sampling actually.""" + + def __init__(self, **kwargs): + pass + + def _sample_pos(self, **kwargs): + """Sample positive samples.""" + raise NotImplementedError + + def _sample_neg(self, **kwargs): + """Sample negative samples.""" + raise NotImplementedError + + def sample(self, assign_result, masks, gt_masks, **kwargs): + """Directly returns the positive and negative indices of samples. + + Args: + assign_result (:obj:`AssignResult`): Assigned results + masks (torch.Tensor): Bounding boxes + gt_masks (torch.Tensor): Ground truth boxes + Returns: + :obj:`SamplingResult`: sampler results + """ + pos_inds = torch.nonzero(assign_result.gt_inds > 0, as_tuple=False).squeeze(-1).unique() + neg_inds = torch.nonzero(assign_result.gt_inds == 0, as_tuple=False).squeeze(-1).unique() + gt_flags = masks.new_zeros(masks.shape[0], dtype=torch.uint8) + sampling_result = MaskSamplingResult(pos_inds, neg_inds, masks, gt_masks, assign_result, gt_flags) + return sampling_result diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/samplers/mask_sampling_result.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/samplers/mask_sampling_result.py new file mode 100644 index 0000000000000000000000000000000000000000..270ffd35a5f120dd0560a7fea7fe83ef0bab66bb --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/samplers/mask_sampling_result.py @@ -0,0 +1,63 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/ZwwWayne/K-Net/blob/main/knet/det/mask_pseudo_sampler.py + +import torch + +from .sampling_result import SamplingResult + + +class MaskSamplingResult(SamplingResult): + """Mask sampling result.""" + + def __init__(self, pos_inds, neg_inds, masks, gt_masks, assign_result, gt_flags): + self.pos_inds = pos_inds + self.neg_inds = neg_inds + self.pos_masks = masks[pos_inds] + self.neg_masks = masks[neg_inds] + self.pos_is_gt = gt_flags[pos_inds] + + self.num_gts = gt_masks.shape[0] + self.pos_assigned_gt_inds = assign_result.gt_inds[pos_inds] - 1 + + if gt_masks.numel() == 0: + # hack for index error case + assert self.pos_assigned_gt_inds.numel() == 0 + self.pos_gt_masks = torch.empty_like(gt_masks) + else: + self.pos_gt_masks = gt_masks[self.pos_assigned_gt_inds, :] + + if assign_result.labels is not None: + self.pos_gt_labels = assign_result.labels[pos_inds] + else: + self.pos_gt_labels = None + + @property + def masks(self): + """torch.Tensor: concatenated positive and negative boxes""" + return torch.cat([self.pos_masks, self.neg_masks]) + + def __nice__(self): + data = self.info.copy() + data["pos_masks"] = data.pop("pos_masks").shape + data["neg_masks"] = data.pop("neg_masks").shape + parts = [f"'{k}': {v!r}" for k, v in sorted(data.items())] + body = " " + ",\n ".join(parts) + return "{\n" + body + "\n}" + + @property + def info(self): + """Returns a dictionary of info about the object.""" + return { + "pos_inds": self.pos_inds, + "neg_inds": self.neg_inds, + "pos_masks": self.pos_masks, + "neg_masks": self.neg_masks, + "pos_is_gt": self.pos_is_gt, + "num_gts": self.num_gts, + "pos_assigned_gt_inds": self.pos_assigned_gt_inds, + } diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/samplers/sampling_result.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/samplers/sampling_result.py new file mode 100644 index 0000000000000000000000000000000000000000..aaee3fe55aeb8c6da7edefbbd382d94b67b6a6b4 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/box/samplers/sampling_result.py @@ -0,0 +1,152 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import torch + + +class SamplingResult: + """Bbox sampling result. + + Example: + >>> # xdoctest: +IGNORE_WANT + >>> from mmdet.core.bbox.samplers.sampling_result import * # NOQA + >>> self = SamplingResult.random(rng=10) + >>> print(f'self = {self}') + self = + """ + + def __init__(self, pos_inds, neg_inds, bboxes, gt_bboxes, assign_result, gt_flags): + self.pos_inds = pos_inds + self.neg_inds = neg_inds + self.pos_bboxes = bboxes[pos_inds] + self.neg_bboxes = bboxes[neg_inds] + self.pos_is_gt = gt_flags[pos_inds] + + self.num_gts = gt_bboxes.shape[0] + self.pos_assigned_gt_inds = assign_result.gt_inds[pos_inds] - 1 + + if gt_bboxes.numel() == 0: + # hack for index error case + assert self.pos_assigned_gt_inds.numel() == 0 + self.pos_gt_bboxes = torch.empty_like(gt_bboxes).view(-1, 4) + else: + if len(gt_bboxes.shape) < 2: + gt_bboxes = gt_bboxes.view(-1, 4) + + self.pos_gt_bboxes = gt_bboxes[self.pos_assigned_gt_inds.long(), :] + + if assign_result.labels is not None: + self.pos_gt_labels = assign_result.labels[pos_inds] + else: + self.pos_gt_labels = None + + @property + def bboxes(self): + """torch.Tensor: concatenated positive and negative boxes""" + return torch.cat([self.pos_bboxes, self.neg_bboxes]) + + def to(self, device): + """Change the device of the data inplace. + + Example: + >>> self = SamplingResult.random() + >>> print(f'self = {self.to(None)}') + >>> # xdoctest: +REQUIRES(--gpu) + >>> print(f'self = {self.to(0)}') + """ + _dict = self.__dict__ + for key, value in _dict.items(): + if isinstance(value, torch.Tensor): + _dict[key] = value.to(device) + return self + + def __nice__(self): + data = self.info.copy() + data["pos_bboxes"] = data.pop("pos_bboxes").shape + data["neg_bboxes"] = data.pop("neg_bboxes").shape + parts = [f"'{k}': {v!r}" for k, v in sorted(data.items())] + body = " " + ",\n ".join(parts) + return "{\n" + body + "\n}" + + @property + def info(self): + """Returns a dictionary of info about the object.""" + return { + "pos_inds": self.pos_inds, + "neg_inds": self.neg_inds, + "pos_bboxes": self.pos_bboxes, + "neg_bboxes": self.neg_bboxes, + "pos_is_gt": self.pos_is_gt, + "num_gts": self.num_gts, + "pos_assigned_gt_inds": self.pos_assigned_gt_inds, + } + + @classmethod + def random(cls, rng=None, **kwargs): + """ + Args: + rng (None | int | numpy.random.RandomState): seed or state. + kwargs (keyword arguments): + - num_preds: number of predicted boxes + - num_gts: number of true boxes + - p_ignore (float): probability of a predicted box assigned to \ + an ignored truth. + - p_assigned (float): probability of a predicted box not being \ + assigned. + - p_use_label (float | bool): with labels or not. + + Returns: + :obj:`SamplingResult`: Randomly generated sampling result. + + Example: + >>> from mmdet.core.bbox.samplers.sampling_result import * # NOQA + >>> self = SamplingResult.random() + >>> print(self.__dict__) + """ + from mmdet.core.bbox import demodata + from mmdet.core.bbox.assigners.assign_result import AssignResult + from mmdet.core.bbox.samplers.random_sampler import RandomSampler + + rng = demodata.ensure_rng(rng) + + # make probabalistic? + num = 32 + pos_fraction = 0.5 + neg_pos_ub = -1 + + assign_result = AssignResult.random(rng=rng, **kwargs) + + # Note we could just compute an assignment + bboxes = demodata.random_boxes(assign_result.num_preds, rng=rng) + gt_bboxes = demodata.random_boxes(assign_result.num_gts, rng=rng) + + if rng.rand() > 0.2: + # sometimes algorithms squeeze their data, be robust to that + gt_bboxes = gt_bboxes.squeeze() + bboxes = bboxes.squeeze() + + if assign_result.labels is None: + gt_labels = None + else: + gt_labels = None + + if gt_labels is None: + add_gt_as_proposals = False + else: + add_gt_as_proposals = True # make probabalistic? + + sampler = RandomSampler( + num, pos_fraction, neg_pos_ub=neg_pos_ub, add_gt_as_proposals=add_gt_as_proposals, rng=rng + ) + self = sampler.sample(assign_result, bboxes, gt_bboxes, gt_labels) + return self diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/utils/__init__.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6cdc9e19352f50bc2d5433c412ff71186c5df019 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/utils/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from .dist_utils import reduce_mean +from .misc import add_prefix, multi_apply diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/utils/dist_utils.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/utils/dist_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..7dfed42da821cd94e31b663d86b20b8f09799b30 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/utils/dist_utils.py @@ -0,0 +1,15 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import torch.distributed as dist + + +def reduce_mean(tensor): + """ "Obtain the mean of tensor on different GPUs.""" + if not (dist.is_available() and dist.is_initialized()): + return tensor + tensor = tensor.clone() + dist.all_reduce(tensor.div_(dist.get_world_size()), op=dist.ReduceOp.SUM) + return tensor diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/utils/misc.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/utils/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..e07579e7b182b62153e81fe637ffd0f3081ef2a3 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/core/utils/misc.py @@ -0,0 +1,47 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from functools import partial + + +def multi_apply(func, *args, **kwargs): + """Apply function to a list of arguments. + + Note: + This function applies the ``func`` to multiple inputs and + map the multiple outputs of the ``func`` into different + list. Each list contains the same type of outputs corresponding + to different inputs. + + Args: + func (Function): A function that will be applied to a list of + arguments + + Returns: + tuple(list): A tuple containing multiple list, each list contains \ + a kind of returned results by the function + """ + pfunc = partial(func, **kwargs) if kwargs else func + map_results = map(pfunc, *args) + return tuple(map(list, zip(*map_results))) + + +def add_prefix(inputs, prefix): + """Add prefix for dict. + + Args: + inputs (dict): The input dict with str keys. + prefix (str): The prefix to add. + + Returns: + + dict: The dict with keys updated with ``prefix``. + """ + + outputs = dict() + for name, value in inputs.items(): + outputs[f"{prefix}.{name}"] = value + + return outputs diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/ops/modules/__init__.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/ops/modules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..49aa8fe612fd4c088e294707c5ee16bd1cb5b5e7 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/ops/modules/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/fundamentalvision/Deformable-DETR/tree/main/models/ops/modules +# https://github.com/chengdazhi/Deformable-Convolution-V2-PyTorch/tree/pytorch_1.0.0 + +from .ms_deform_attn import MSDeformAttn diff --git a/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/ops/modules/ms_deform_attn.py b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/ops/modules/ms_deform_attn.py new file mode 100644 index 0000000000000000000000000000000000000000..d8b4fa23712e87d1a2682b57e71ee37fe8524cff --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/segmentation_m2f/ops/modules/ms_deform_attn.py @@ -0,0 +1,185 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import math +import warnings + +import torch +import torch.nn.functional as F +from torch import nn +from torch.autograd import Function +from torch.cuda.amp import custom_fwd +from torch.nn.init import constant_, xavier_uniform_ + + +class MSDeformAttnFunction(Function): + @staticmethod + @custom_fwd(cast_inputs=torch.float32) + def forward( + ctx, value, value_spatial_shapes, value_level_start_index, sampling_locations, attention_weights, im2col_step + ): + output = ms_deform_attn_core_pytorch( + value, + value_spatial_shapes, + # value_level_start_index, + sampling_locations, + attention_weights, + ) + return output + + +def ms_deform_attn_core_pytorch(value, value_spatial_shapes, sampling_locations, attention_weights): + # for debug and test only, + # need to use cuda version instead + N_, S_, M_, D_ = value.shape + _, Lq_, M_, L_, P_, _ = sampling_locations.shape + value_list = value.split([H_ * W_ for H_, W_ in value_spatial_shapes], dim=1) + sampling_grids = 2 * sampling_locations - 1 + sampling_value_list = [] + for lid_, (H_, W_) in enumerate(value_spatial_shapes): + # N_, H_*W_, M_, D_ -> N_, H_*W_, M_*D_ -> N_, M_*D_, H_*W_ -> N_*M_, D_, H_, W_ + value_l_ = value_list[lid_].flatten(2).transpose(1, 2).reshape(N_ * M_, D_, H_, W_) + # N_, Lq_, M_, P_, 2 -> N_, M_, Lq_, P_, 2 -> N_*M_, Lq_, P_, 2 + sampling_grid_l_ = sampling_grids[:, :, :, lid_].transpose(1, 2).flatten(0, 1) + # N_*M_, D_, Lq_, P_ + sampling_value_l_ = F.grid_sample( + value_l_, sampling_grid_l_, mode="bilinear", padding_mode="zeros", align_corners=False + ) + sampling_value_list.append(sampling_value_l_) + # (N_, Lq_, M_, L_, P_) -> (N_, M_, Lq_, L_, P_) -> (N_, M_, 1, Lq_, L_*P_) + attention_weights = attention_weights.transpose(1, 2).reshape(N_ * M_, 1, Lq_, L_ * P_) + output = (torch.stack(sampling_value_list, dim=-2).flatten(-2) * attention_weights).sum(-1).view(N_, M_ * D_, Lq_) + return output.transpose(1, 2).contiguous() + + +def _is_power_of_2(n): + if (not isinstance(n, int)) or (n < 0): + raise ValueError("invalid input for _is_power_of_2: {} (type: {})".format(n, type(n))) + return (n & (n - 1) == 0) and n != 0 + + +class MSDeformAttn(nn.Module): + def __init__(self, d_model=256, n_levels=4, n_heads=8, n_points=4, ratio=1.0): + """Multi-Scale Deformable Attention Module. + + :param d_model hidden dimension + :param n_levels number of feature levels + :param n_heads number of attention heads + :param n_points number of sampling points per attention head per feature level + """ + super().__init__() + if d_model % n_heads != 0: + raise ValueError("d_model must be divisible by n_heads, " "but got {} and {}".format(d_model, n_heads)) + _d_per_head = d_model // n_heads + # you'd better set _d_per_head to a power of 2 + # which is more efficient in our CUDA implementation + if not _is_power_of_2(_d_per_head): + warnings.warn( + "You'd better set d_model in MSDeformAttn to make " + "the dimension of each attention head a power of 2 " + "which is more efficient in our CUDA implementation." + ) + + self.im2col_step = 64 + + self.d_model = d_model + self.n_levels = n_levels + self.n_heads = n_heads + self.n_points = n_points + self.ratio = ratio + self.sampling_offsets = nn.Linear(d_model, n_heads * n_levels * n_points * 2) + self.attention_weights = nn.Linear(d_model, n_heads * n_levels * n_points) + self.value_proj = nn.Linear(d_model, int(d_model * ratio)) + self.output_proj = nn.Linear(int(d_model * ratio), d_model) + + self._reset_parameters() + + def _reset_parameters(self): + constant_(self.sampling_offsets.weight.data, 0.0) + thetas = torch.arange(self.n_heads, dtype=torch.float32) * (2.0 * math.pi / self.n_heads) + grid_init = torch.stack([thetas.cos(), thetas.sin()], -1) + grid_init = ( + (grid_init / grid_init.abs().max(-1, keepdim=True)[0]) + .view(self.n_heads, 1, 1, 2) + .repeat(1, self.n_levels, self.n_points, 1) + ) + for i in range(self.n_points): + grid_init[:, :, i, :] *= i + 1 + + with torch.no_grad(): + self.sampling_offsets.bias = nn.Parameter(grid_init.view(-1)) + constant_(self.attention_weights.weight.data, 0.0) + constant_(self.attention_weights.bias.data, 0.0) + xavier_uniform_(self.value_proj.weight.data) + constant_(self.value_proj.bias.data, 0.0) + xavier_uniform_(self.output_proj.weight.data) + constant_(self.output_proj.bias.data, 0.0) + + def forward( + self, + query, + reference_points, + input_flatten, + input_spatial_shapes, + input_level_start_index, + input_padding_mask=None, + ): + """ + :param query (N, Length_{query}, C) + :param reference_points (N, Length_{query}, n_levels, 2), range in [0, 1], top-left (0,0), bottom-right (1, 1), including padding area + or (N, Length_{query}, n_levels, 4), add additional (w, h) to form reference boxes + :param input_flatten (N, \\sum_{l=0}^{L-1} H_l \\cdot W_l, C) + :param input_spatial_shapes (n_levels, 2), [(H_0, W_0), (H_1, W_1), ..., (H_{L-1}, W_{L-1})] + :param input_level_start_index (n_levels, ), [0, H_0*W_0, H_0*W_0+H_1*W_1, H_0*W_0+H_1*W_1+H_2*W_2, ..., H_0*W_0+H_1*W_1+...+H_{L-1}*W_{L-1}] + :param input_padding_mask (N, \\sum_{l=0}^{L-1} H_l \\cdot W_l), True for padding elements, False for non-padding elements + + :return output (N, Length_{query}, C) + """ + # print(query.shape) + # print(reference_points.shape) + # print(input_flatten.shape) + # print(input_spatial_shapes.shape) + # print(input_level_start_index.shape) + # print(input_spatial_shapes) + # print(input_level_start_index) + + N, Len_q, _ = query.shape + N, Len_in, _ = input_flatten.shape + assert (input_spatial_shapes[:, 0] * input_spatial_shapes[:, 1]).sum() == Len_in + + value = self.value_proj(input_flatten) + if input_padding_mask is not None: + value = value.masked_fill(input_padding_mask[..., None], float(0)) + + value = value.view(N, Len_in, self.n_heads, int(self.ratio * self.d_model) // self.n_heads) + sampling_offsets = self.sampling_offsets(query).view(N, Len_q, self.n_heads, self.n_levels, self.n_points, 2) + attention_weights = self.attention_weights(query).view(N, Len_q, self.n_heads, self.n_levels * self.n_points) + attention_weights = F.softmax(attention_weights, -1).view(N, Len_q, self.n_heads, self.n_levels, self.n_points) + + if reference_points.shape[-1] == 2: + offset_normalizer = torch.stack([input_spatial_shapes[..., 1], input_spatial_shapes[..., 0]], -1) + sampling_locations = ( + reference_points[:, :, None, :, None, :] + + sampling_offsets / offset_normalizer[None, None, None, :, None, :] + ) + elif reference_points.shape[-1] == 4: + sampling_locations = ( + reference_points[:, :, None, :, None, :2] + + sampling_offsets / self.n_points * reference_points[:, :, None, :, None, 2:] * 0.5 + ) + else: + raise ValueError( + "Last dim of reference_points must be 2 or 4, but get {} instead.".format(reference_points.shape[-1]) + ) + output = MSDeformAttnFunction.apply( + value, + input_spatial_shapes, + input_level_start_index, + sampling_locations, + attention_weights, + self.im2col_step, + ) + output = self.output_proj(output) + return output diff --git a/engine/pose_estimation/dinov2/dinov2/eval/setup.py b/engine/pose_estimation/dinov2/dinov2/eval/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..959128c0673cc51036dbf17dcc4ee68a037988fb --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/setup.py @@ -0,0 +1,75 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import argparse +from typing import Any, List, Optional, Tuple + +import torch +import torch.backends.cudnn as cudnn + +from dinov2.models import build_model_from_cfg +from dinov2.utils.config import setup +import dinov2.utils.utils as dinov2_utils + + +def get_args_parser( + description: Optional[str] = None, + parents: Optional[List[argparse.ArgumentParser]] = None, + add_help: bool = True, +): + parser = argparse.ArgumentParser( + description=description, + parents=parents or [], + add_help=add_help, + ) + parser.add_argument( + "--config-file", + type=str, + help="Model configuration file", + ) + parser.add_argument( + "--pretrained-weights", + type=str, + help="Pretrained model weights", + ) + parser.add_argument( + "--output-dir", + default="", + type=str, + help="Output directory to write results and logs", + ) + parser.add_argument( + "--opts", + help="Extra configuration options", + default=[], + nargs="+", + ) + return parser + + +def get_autocast_dtype(config): + teacher_dtype_str = config.compute_precision.teacher.backbone.mixed_precision.param_dtype + if teacher_dtype_str == "fp16": + return torch.half + elif teacher_dtype_str == "bf16": + return torch.bfloat16 + else: + return torch.float + + +def build_model_for_eval(config, pretrained_weights): + model, _ = build_model_from_cfg(config, only_teacher=True) + dinov2_utils.load_pretrained_weights(model, pretrained_weights, "teacher") + model.eval() + model.cuda() + return model + + +def setup_and_build_model(args) -> Tuple[Any, torch.dtype]: + cudnn.benchmark = True + config = setup(args) + model = build_model_for_eval(config, args.pretrained_weights) + autocast_dtype = get_autocast_dtype(config) + return model, autocast_dtype diff --git a/engine/pose_estimation/dinov2/dinov2/eval/utils.py b/engine/pose_estimation/dinov2/dinov2/eval/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..c50576b1940587ee64b7a422e2e96b475d60fd39 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/eval/utils.py @@ -0,0 +1,146 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import logging +from typing import Dict, Optional + +import torch +from torch import nn +from torchmetrics import MetricCollection + +from dinov2.data import DatasetWithEnumeratedTargets, SamplerType, make_data_loader +import dinov2.distributed as distributed +from dinov2.logging import MetricLogger + + +logger = logging.getLogger("dinov2") + + +class ModelWithNormalize(torch.nn.Module): + def __init__(self, model): + super().__init__() + self.model = model + + def forward(self, samples): + return nn.functional.normalize(self.model(samples), dim=1, p=2) + + +class ModelWithIntermediateLayers(nn.Module): + def __init__(self, feature_model, n_last_blocks, autocast_ctx): + super().__init__() + self.feature_model = feature_model + self.feature_model.eval() + self.n_last_blocks = n_last_blocks + self.autocast_ctx = autocast_ctx + + def forward(self, images): + with torch.inference_mode(): + with self.autocast_ctx(): + features = self.feature_model.get_intermediate_layers( + images, self.n_last_blocks, return_class_token=True + ) + return features + + +@torch.inference_mode() +def evaluate( + model: nn.Module, + data_loader, + postprocessors: Dict[str, nn.Module], + metrics: Dict[str, MetricCollection], + device: torch.device, + criterion: Optional[nn.Module] = None, +): + model.eval() + if criterion is not None: + criterion.eval() + + for metric in metrics.values(): + metric = metric.to(device) + + metric_logger = MetricLogger(delimiter=" ") + header = "Test:" + + for samples, targets, *_ in metric_logger.log_every(data_loader, 10, header): + outputs = model(samples.to(device)) + targets = targets.to(device) + + if criterion is not None: + loss = criterion(outputs, targets) + metric_logger.update(loss=loss.item()) + + for k, metric in metrics.items(): + metric_inputs = postprocessors[k](outputs, targets) + metric.update(**metric_inputs) + + metric_logger.synchronize_between_processes() + logger.info(f"Averaged stats: {metric_logger}") + + stats = {k: metric.compute() for k, metric in metrics.items()} + metric_logger_stats = {k: meter.global_avg for k, meter in metric_logger.meters.items()} + return metric_logger_stats, stats + + +def all_gather_and_flatten(tensor_rank): + tensor_all_ranks = torch.empty( + distributed.get_global_size(), + *tensor_rank.shape, + dtype=tensor_rank.dtype, + device=tensor_rank.device, + ) + tensor_list = list(tensor_all_ranks.unbind(0)) + torch.distributed.all_gather(tensor_list, tensor_rank.contiguous()) + return tensor_all_ranks.flatten(end_dim=1) + + +def extract_features(model, dataset, batch_size, num_workers, gather_on_cpu=False): + dataset_with_enumerated_targets = DatasetWithEnumeratedTargets(dataset) + sample_count = len(dataset_with_enumerated_targets) + data_loader = make_data_loader( + dataset=dataset_with_enumerated_targets, + batch_size=batch_size, + num_workers=num_workers, + sampler_type=SamplerType.DISTRIBUTED, + drop_last=False, + shuffle=False, + ) + return extract_features_with_dataloader(model, data_loader, sample_count, gather_on_cpu) + + +@torch.inference_mode() +def extract_features_with_dataloader(model, data_loader, sample_count, gather_on_cpu=False): + gather_device = torch.device("cpu") if gather_on_cpu else torch.device("cuda") + metric_logger = MetricLogger(delimiter=" ") + features, all_labels = None, None + for samples, (index, labels_rank) in metric_logger.log_every(data_loader, 10): + samples = samples.cuda(non_blocking=True) + labels_rank = labels_rank.cuda(non_blocking=True) + index = index.cuda(non_blocking=True) + features_rank = model(samples).float() + + # init storage feature matrix + if features is None: + features = torch.zeros(sample_count, features_rank.shape[-1], device=gather_device) + labels_shape = list(labels_rank.shape) + labels_shape[0] = sample_count + all_labels = torch.full(labels_shape, fill_value=-1, device=gather_device) + logger.info(f"Storing features into tensor of shape {features.shape}") + + # share indexes, features and labels between processes + index_all = all_gather_and_flatten(index).to(gather_device) + features_all_ranks = all_gather_and_flatten(features_rank).to(gather_device) + labels_all_ranks = all_gather_and_flatten(labels_rank).to(gather_device) + + # update storage feature matrix + if len(index_all) > 0: + features.index_copy_(0, index_all, features_all_ranks) + all_labels.index_copy_(0, index_all, labels_all_ranks) + + logger.info(f"Features shape: {tuple(features.shape)}") + logger.info(f"Labels shape: {tuple(all_labels.shape)}") + + assert torch.all(all_labels > -1) + + return features, all_labels diff --git a/engine/pose_estimation/dinov2/dinov2/fsdp/__init__.py b/engine/pose_estimation/dinov2/dinov2/fsdp/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ed454480e0b76e761d657cc40fd097bd339d15a2 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/fsdp/__init__.py @@ -0,0 +1,157 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import os +from typing import Any + +import torch +import dinov2.distributed as distributed +from functools import partial +from fvcore.common.checkpoint import Checkpointer +from torch.distributed.fsdp import FullyShardedDataParallel as FSDP +from torch.distributed.fsdp import ShardingStrategy +from torch.distributed.fsdp import MixedPrecision +from torch.distributed.fsdp import StateDictType +from torch.distributed.fsdp.sharded_grad_scaler import ShardedGradScaler +from torch.distributed.fsdp.wrap import ModuleWrapPolicy +from torch.distributed.fsdp._runtime_utils import _reshard + + +def get_fsdp_wrapper(model_cfg, modules_to_wrap=set()): + sharding_strategy_dict = { + "NO_SHARD": ShardingStrategy.NO_SHARD, + "SHARD_GRAD_OP": ShardingStrategy.SHARD_GRAD_OP, + "FULL_SHARD": ShardingStrategy.FULL_SHARD, + } + + dtype_dict = { + "fp32": torch.float32, + "fp16": torch.float16, + "bf16": torch.bfloat16, + } + + mixed_precision_config = MixedPrecision( + param_dtype=dtype_dict[model_cfg.mixed_precision.param_dtype], + reduce_dtype=dtype_dict[model_cfg.mixed_precision.reduce_dtype], + buffer_dtype=dtype_dict[model_cfg.mixed_precision.buffer_dtype], + ) + + sharding_strategy_config = sharding_strategy_dict[model_cfg.sharding_strategy] + + local_rank = distributed.get_local_rank() + + fsdp_wrapper = partial( + FSDP, + sharding_strategy=sharding_strategy_config, + mixed_precision=mixed_precision_config, + device_id=local_rank, + sync_module_states=True, + use_orig_params=True, + auto_wrap_policy=ModuleWrapPolicy(modules_to_wrap), + ) + return fsdp_wrapper + + +def is_fsdp(x): + return isinstance(x, FSDP) + + +def is_sharded_fsdp(x): + return is_fsdp(x) and x.sharding_strategy is not ShardingStrategy.NO_SHARD + + +def free_if_fsdp(x): + if is_sharded_fsdp(x): + handles = x._handles + true_list = [True for h in handles] + _reshard(x, handles, true_list) + + +def get_fsdp_modules(x): + return FSDP.fsdp_modules(x) + + +def reshard_fsdp_model(x): + for m in get_fsdp_modules(x): + free_if_fsdp(m) + + +def rankstr(): + return f"rank_{distributed.get_global_rank()}" + + +class FSDPCheckpointer(Checkpointer): + def save(self, name: str, **kwargs: Any) -> None: + """ + Dump model and checkpointables to a file. + + Args: + name (str): name of the file. + kwargs (dict): extra arbitrary data to save. + """ + if not self.save_dir or not self.save_to_disk: + return + + data = {} + with FSDP.state_dict_type(self.model, StateDictType.LOCAL_STATE_DICT): + data["model"] = self.model.state_dict() + + # data["model"] = self.model.state_dict() + for key, obj in self.checkpointables.items(): + data[key] = obj.state_dict() + data.update(kwargs) + + basename = f"{name}.{rankstr()}.pth" + save_file = os.path.join(self.save_dir, basename) + assert os.path.basename(save_file) == basename, basename + self.logger.info("Saving checkpoint to {}".format(save_file)) + with self.path_manager.open(save_file, "wb") as f: + torch.save(data, f) + self.tag_last_checkpoint(basename) + + def load(self, *args, **kwargs): + with FSDP.state_dict_type(self.model, StateDictType.LOCAL_STATE_DICT): + return super().load(*args, **kwargs) + + def has_checkpoint(self) -> bool: + """ + Returns: + bool: whether a checkpoint exists in the target directory. + """ + save_file = os.path.join(self.save_dir, f"last_checkpoint.{rankstr()}") + return self.path_manager.exists(save_file) + + def get_checkpoint_file(self) -> str: + """ + Returns: + str: The latest checkpoint file in target directory. + """ + save_file = os.path.join(self.save_dir, f"last_checkpoint.{rankstr()}") + try: + with self.path_manager.open(save_file, "r") as f: + last_saved = f.read().strip() + except IOError: + # if file doesn't exist, maybe because it has just been + # deleted by a separate process + return "" + # pyre-fixme[6]: For 2nd param expected `Union[PathLike[str], str]` but got + # `Union[bytes, str]`. + return os.path.join(self.save_dir, last_saved) + + def tag_last_checkpoint(self, last_filename_basename: str) -> None: + """ + Tag the last checkpoint. + + Args: + last_filename_basename (str): the basename of the last filename. + """ + if distributed.is_enabled(): + torch.distributed.barrier() + save_file = os.path.join(self.save_dir, f"last_checkpoint.{rankstr()}") + with self.path_manager.open(save_file, "w") as f: + f.write(last_filename_basename) # pyre-ignore + + +ShardedGradScaler = ShardedGradScaler diff --git a/engine/pose_estimation/dinov2/dinov2/hub/__init__.py b/engine/pose_estimation/dinov2/dinov2/hub/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b88da6bf80be92af00b72dfdb0a806fa64a7a2d9 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/hub/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. diff --git a/engine/pose_estimation/dinov2/dinov2/hub/backbones.py b/engine/pose_estimation/dinov2/dinov2/hub/backbones.py new file mode 100644 index 0000000000000000000000000000000000000000..53fe83719d5107eb77a8f25ef1814c3d73446002 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/hub/backbones.py @@ -0,0 +1,156 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from enum import Enum +from typing import Union + +import torch + +from .utils import _DINOV2_BASE_URL, _make_dinov2_model_name + + +class Weights(Enum): + LVD142M = "LVD142M" + + +def _make_dinov2_model( + *, + arch_name: str = "vit_large", + img_size: int = 518, + patch_size: int = 14, + init_values: float = 1.0, + ffn_layer: str = "mlp", + block_chunks: int = 0, + num_register_tokens: int = 0, + interpolate_antialias: bool = False, + interpolate_offset: float = 0.1, + pretrained: bool = True, + weights: Union[Weights, str] = Weights.LVD142M, + **kwargs, +): + from ..models import vision_transformer as vits + + if isinstance(weights, str): + try: + weights = Weights[weights] + except KeyError: + raise AssertionError(f"Unsupported weights: {weights}") + + model_base_name = _make_dinov2_model_name(arch_name, patch_size) + vit_kwargs = dict( + img_size=img_size, + patch_size=patch_size, + init_values=init_values, + ffn_layer=ffn_layer, + block_chunks=block_chunks, + num_register_tokens=num_register_tokens, + interpolate_antialias=interpolate_antialias, + interpolate_offset=interpolate_offset, + ) + vit_kwargs.update(**kwargs) + model = vits.__dict__[arch_name](**vit_kwargs) + + if pretrained: + model_full_name = _make_dinov2_model_name(arch_name, patch_size, num_register_tokens) + url = _DINOV2_BASE_URL + f"/{model_base_name}/{model_full_name}_pretrain.pth" + state_dict = torch.hub.load_state_dict_from_url(url, map_location="cpu") + model.load_state_dict(state_dict, strict=True) + + return model + + +def dinov2_vits14(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.LVD142M, **kwargs): + """ + DINOv2 ViT-S/14 model (optionally) pretrained on the LVD-142M dataset. + """ + return _make_dinov2_model(arch_name="vit_small", pretrained=pretrained, weights=weights, **kwargs) + + +def dinov2_vitb14(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.LVD142M, **kwargs): + """ + DINOv2 ViT-B/14 model (optionally) pretrained on the LVD-142M dataset. + """ + return _make_dinov2_model(arch_name="vit_base", pretrained=pretrained, weights=weights, **kwargs) + + +def dinov2_vitl14(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.LVD142M, **kwargs): + """ + DINOv2 ViT-L/14 model (optionally) pretrained on the LVD-142M dataset. + """ + return _make_dinov2_model(arch_name="vit_large", pretrained=pretrained, weights=weights, **kwargs) + + +def dinov2_vitg14(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.LVD142M, **kwargs): + """ + DINOv2 ViT-g/14 model (optionally) pretrained on the LVD-142M dataset. + """ + return _make_dinov2_model( + arch_name="vit_giant2", + ffn_layer="swiglufused", + weights=weights, + pretrained=pretrained, + **kwargs, + ) + + +def dinov2_vits14_reg(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.LVD142M, **kwargs): + """ + DINOv2 ViT-S/14 model with registers (optionally) pretrained on the LVD-142M dataset. + """ + return _make_dinov2_model( + arch_name="vit_small", + pretrained=pretrained, + weights=weights, + num_register_tokens=4, + interpolate_antialias=True, + interpolate_offset=0.0, + **kwargs, + ) + + +def dinov2_vitb14_reg(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.LVD142M, **kwargs): + """ + DINOv2 ViT-B/14 model with registers (optionally) pretrained on the LVD-142M dataset. + """ + return _make_dinov2_model( + arch_name="vit_base", + pretrained=pretrained, + weights=weights, + num_register_tokens=4, + interpolate_antialias=True, + interpolate_offset=0.0, + **kwargs, + ) + + +def dinov2_vitl14_reg(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.LVD142M, **kwargs): + """ + DINOv2 ViT-L/14 model with registers (optionally) pretrained on the LVD-142M dataset. + """ + return _make_dinov2_model( + arch_name="vit_large", + pretrained=pretrained, + weights=weights, + num_register_tokens=4, + interpolate_antialias=True, + interpolate_offset=0.0, + **kwargs, + ) + + +def dinov2_vitg14_reg(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.LVD142M, **kwargs): + """ + DINOv2 ViT-g/14 model with registers (optionally) pretrained on the LVD-142M dataset. + """ + return _make_dinov2_model( + arch_name="vit_giant2", + ffn_layer="swiglufused", + weights=weights, + pretrained=pretrained, + num_register_tokens=4, + interpolate_antialias=True, + interpolate_offset=0.0, + **kwargs, + ) diff --git a/engine/pose_estimation/dinov2/dinov2/hub/classifiers.py b/engine/pose_estimation/dinov2/dinov2/hub/classifiers.py new file mode 100644 index 0000000000000000000000000000000000000000..3f0841efa80ab3d564cd320d61da254af182606b --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/hub/classifiers.py @@ -0,0 +1,268 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from enum import Enum +from typing import Union + +import torch +import torch.nn as nn + +from .backbones import _make_dinov2_model +from .utils import _DINOV2_BASE_URL, _make_dinov2_model_name + + +class Weights(Enum): + IMAGENET1K = "IMAGENET1K" + + +def _make_dinov2_linear_classification_head( + *, + arch_name: str = "vit_large", + patch_size: int = 14, + embed_dim: int = 1024, + layers: int = 4, + pretrained: bool = True, + weights: Union[Weights, str] = Weights.IMAGENET1K, + num_register_tokens: int = 0, + **kwargs, +): + if layers not in (1, 4): + raise AssertionError(f"Unsupported number of layers: {layers}") + if isinstance(weights, str): + try: + weights = Weights[weights] + except KeyError: + raise AssertionError(f"Unsupported weights: {weights}") + + linear_head = nn.Linear((1 + layers) * embed_dim, 1_000) + + if pretrained: + model_base_name = _make_dinov2_model_name(arch_name, patch_size) + model_full_name = _make_dinov2_model_name(arch_name, patch_size, num_register_tokens) + layers_str = str(layers) if layers == 4 else "" + url = _DINOV2_BASE_URL + f"/{model_base_name}/{model_full_name}_linear{layers_str}_head.pth" + state_dict = torch.hub.load_state_dict_from_url(url, map_location="cpu") + linear_head.load_state_dict(state_dict, strict=True) + + return linear_head + + +class _LinearClassifierWrapper(nn.Module): + def __init__(self, *, backbone: nn.Module, linear_head: nn.Module, layers: int = 4): + super().__init__() + self.backbone = backbone + self.linear_head = linear_head + self.layers = layers + + def forward(self, x): + if self.layers == 1: + x = self.backbone.forward_features(x) + cls_token = x["x_norm_clstoken"] + patch_tokens = x["x_norm_patchtokens"] + # fmt: off + linear_input = torch.cat([ + cls_token, + patch_tokens.mean(dim=1), + ], dim=1) + # fmt: on + elif self.layers == 4: + x = self.backbone.get_intermediate_layers(x, n=4, return_class_token=True) + # fmt: off + linear_input = torch.cat([ + x[0][1], + x[1][1], + x[2][1], + x[3][1], + x[3][0].mean(dim=1), + ], dim=1) + # fmt: on + else: + assert False, f"Unsupported number of layers: {self.layers}" + return self.linear_head(linear_input) + + +def _make_dinov2_linear_classifier( + *, + arch_name: str = "vit_large", + layers: int = 4, + pretrained: bool = True, + weights: Union[Weights, str] = Weights.IMAGENET1K, + num_register_tokens: int = 0, + interpolate_antialias: bool = False, + interpolate_offset: float = 0.1, + **kwargs, +): + backbone = _make_dinov2_model( + arch_name=arch_name, + pretrained=pretrained, + num_register_tokens=num_register_tokens, + interpolate_antialias=interpolate_antialias, + interpolate_offset=interpolate_offset, + **kwargs, + ) + + embed_dim = backbone.embed_dim + patch_size = backbone.patch_size + linear_head = _make_dinov2_linear_classification_head( + arch_name=arch_name, + patch_size=patch_size, + embed_dim=embed_dim, + layers=layers, + pretrained=pretrained, + weights=weights, + num_register_tokens=num_register_tokens, + ) + + return _LinearClassifierWrapper(backbone=backbone, linear_head=linear_head, layers=layers) + + +def dinov2_vits14_lc( + *, + layers: int = 4, + pretrained: bool = True, + weights: Union[Weights, str] = Weights.IMAGENET1K, + **kwargs, +): + """ + Linear classifier (1 or 4 layers) on top of a DINOv2 ViT-S/14 backbone (optionally) pretrained on the LVD-142M dataset and trained on ImageNet-1k. + """ + return _make_dinov2_linear_classifier( + arch_name="vit_small", + layers=layers, + pretrained=pretrained, + weights=weights, + **kwargs, + ) + + +def dinov2_vitb14_lc( + *, + layers: int = 4, + pretrained: bool = True, + weights: Union[Weights, str] = Weights.IMAGENET1K, + **kwargs, +): + """ + Linear classifier (1 or 4 layers) on top of a DINOv2 ViT-B/14 backbone (optionally) pretrained on the LVD-142M dataset and trained on ImageNet-1k. + """ + return _make_dinov2_linear_classifier( + arch_name="vit_base", + layers=layers, + pretrained=pretrained, + weights=weights, + **kwargs, + ) + + +def dinov2_vitl14_lc( + *, + layers: int = 4, + pretrained: bool = True, + weights: Union[Weights, str] = Weights.IMAGENET1K, + **kwargs, +): + """ + Linear classifier (1 or 4 layers) on top of a DINOv2 ViT-L/14 backbone (optionally) pretrained on the LVD-142M dataset and trained on ImageNet-1k. + """ + return _make_dinov2_linear_classifier( + arch_name="vit_large", + layers=layers, + pretrained=pretrained, + weights=weights, + **kwargs, + ) + + +def dinov2_vitg14_lc( + *, + layers: int = 4, + pretrained: bool = True, + weights: Union[Weights, str] = Weights.IMAGENET1K, + **kwargs, +): + """ + Linear classifier (1 or 4 layers) on top of a DINOv2 ViT-g/14 backbone (optionally) pretrained on the LVD-142M dataset and trained on ImageNet-1k. + """ + return _make_dinov2_linear_classifier( + arch_name="vit_giant2", + layers=layers, + ffn_layer="swiglufused", + pretrained=pretrained, + weights=weights, + **kwargs, + ) + + +def dinov2_vits14_reg_lc( + *, layers: int = 4, pretrained: bool = True, weights: Union[Weights, str] = Weights.IMAGENET1K, **kwargs +): + """ + Linear classifier (1 or 4 layers) on top of a DINOv2 ViT-S/14 backbone with registers (optionally) pretrained on the LVD-142M dataset and trained on ImageNet-1k. + """ + return _make_dinov2_linear_classifier( + arch_name="vit_small", + layers=layers, + pretrained=pretrained, + weights=weights, + num_register_tokens=4, + interpolate_antialias=True, + interpolate_offset=0.0, + **kwargs, + ) + + +def dinov2_vitb14_reg_lc( + *, layers: int = 4, pretrained: bool = True, weights: Union[Weights, str] = Weights.IMAGENET1K, **kwargs +): + """ + Linear classifier (1 or 4 layers) on top of a DINOv2 ViT-B/14 backbone with registers (optionally) pretrained on the LVD-142M dataset and trained on ImageNet-1k. + """ + return _make_dinov2_linear_classifier( + arch_name="vit_base", + layers=layers, + pretrained=pretrained, + weights=weights, + num_register_tokens=4, + interpolate_antialias=True, + interpolate_offset=0.0, + **kwargs, + ) + + +def dinov2_vitl14_reg_lc( + *, layers: int = 4, pretrained: bool = True, weights: Union[Weights, str] = Weights.IMAGENET1K, **kwargs +): + """ + Linear classifier (1 or 4 layers) on top of a DINOv2 ViT-L/14 backbone with registers (optionally) pretrained on the LVD-142M dataset and trained on ImageNet-1k. + """ + return _make_dinov2_linear_classifier( + arch_name="vit_large", + layers=layers, + pretrained=pretrained, + weights=weights, + num_register_tokens=4, + interpolate_antialias=True, + interpolate_offset=0.0, + **kwargs, + ) + + +def dinov2_vitg14_reg_lc( + *, layers: int = 4, pretrained: bool = True, weights: Union[Weights, str] = Weights.IMAGENET1K, **kwargs +): + """ + Linear classifier (1 or 4 layers) on top of a DINOv2 ViT-g/14 backbone with registers (optionally) pretrained on the LVD-142M dataset and trained on ImageNet-1k. + """ + return _make_dinov2_linear_classifier( + arch_name="vit_giant2", + layers=layers, + ffn_layer="swiglufused", + pretrained=pretrained, + weights=weights, + num_register_tokens=4, + interpolate_antialias=True, + interpolate_offset=0.0, + **kwargs, + ) diff --git a/engine/pose_estimation/dinov2/dinov2/hub/depth/__init__.py b/engine/pose_estimation/dinov2/dinov2/hub/depth/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..91716e58ab6158d814df8c653644d9af4c7be65c --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/hub/depth/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from .decode_heads import BNHead, DPTHead +from .encoder_decoder import DepthEncoderDecoder diff --git a/engine/pose_estimation/dinov2/dinov2/hub/depth/decode_heads.py b/engine/pose_estimation/dinov2/dinov2/hub/depth/decode_heads.py new file mode 100644 index 0000000000000000000000000000000000000000..f455accad38fec6ecdd53460233a564c34f434da --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/hub/depth/decode_heads.py @@ -0,0 +1,747 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import copy +from functools import partial +import math +import warnings + +import torch +import torch.nn as nn + +from .ops import resize + + +# XXX: (Untested) replacement for mmcv.imdenormalize() +def _imdenormalize(img, mean, std, to_bgr=True): + import numpy as np + + mean = mean.reshape(1, -1).astype(np.float64) + std = std.reshape(1, -1).astype(np.float64) + img = (img * std) + mean + if to_bgr: + img = img[::-1] + return img + + +class DepthBaseDecodeHead(nn.Module): + """Base class for BaseDecodeHead. + + Args: + in_channels (List): Input channels. + channels (int): Channels after modules, before conv_depth. + conv_layer (nn.Module): Conv layers. Default: None. + act_layer (nn.Module): Activation layers. Default: nn.ReLU. + loss_decode (dict): Config of decode loss. + Default: (). + sampler (dict|None): The config of depth map sampler. + Default: None. + align_corners (bool): align_corners argument of F.interpolate. + Default: False. + min_depth (int): Min depth in dataset setting. + Default: 1e-3. + max_depth (int): Max depth in dataset setting. + Default: None. + norm_layer (dict|None): Norm layers. + Default: None. + classify (bool): Whether predict depth in a cls.-reg. manner. + Default: False. + n_bins (int): The number of bins used in cls. step. + Default: 256. + bins_strategy (str): The discrete strategy used in cls. step. + Default: 'UD'. + norm_strategy (str): The norm strategy on cls. probability + distribution. Default: 'linear' + scale_up (str): Whether predict depth in a scale-up manner. + Default: False. + """ + + def __init__( + self, + in_channels, + conv_layer=None, + act_layer=nn.ReLU, + channels=96, + loss_decode=(), + sampler=None, + align_corners=False, + min_depth=1e-3, + max_depth=None, + norm_layer=None, + classify=False, + n_bins=256, + bins_strategy="UD", + norm_strategy="linear", + scale_up=False, + ): + super(DepthBaseDecodeHead, self).__init__() + + self.in_channels = in_channels + self.channels = channels + self.conf_layer = conv_layer + self.act_layer = act_layer + self.loss_decode = loss_decode + self.align_corners = align_corners + self.min_depth = min_depth + self.max_depth = max_depth + self.norm_layer = norm_layer + self.classify = classify + self.n_bins = n_bins + self.scale_up = scale_up + + if self.classify: + assert bins_strategy in ["UD", "SID"], "Support bins_strategy: UD, SID" + assert norm_strategy in ["linear", "softmax", "sigmoid"], "Support norm_strategy: linear, softmax, sigmoid" + + self.bins_strategy = bins_strategy + self.norm_strategy = norm_strategy + self.softmax = nn.Softmax(dim=1) + self.conv_depth = nn.Conv2d(channels, n_bins, kernel_size=3, padding=1, stride=1) + else: + self.conv_depth = nn.Conv2d(channels, 1, kernel_size=3, padding=1, stride=1) + + self.relu = nn.ReLU() + self.sigmoid = nn.Sigmoid() + + def forward(self, inputs, img_metas): + """Placeholder of forward function.""" + pass + + def forward_train(self, img, inputs, img_metas, depth_gt): + """Forward function for training. + Args: + inputs (list[Tensor]): List of multi-level img features. + img_metas (list[dict]): List of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `depth/datasets/pipelines/formatting.py:Collect`. + depth_gt (Tensor): GT depth + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + depth_pred = self.forward(inputs, img_metas) + losses = self.losses(depth_pred, depth_gt) + + log_imgs = self.log_images(img[0], depth_pred[0], depth_gt[0], img_metas[0]) + losses.update(**log_imgs) + + return losses + + def forward_test(self, inputs, img_metas): + """Forward function for testing. + Args: + inputs (list[Tensor]): List of multi-level img features. + img_metas (list[dict]): List of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `depth/datasets/pipelines/formatting.py:Collect`. + + Returns: + Tensor: Output depth map. + """ + return self.forward(inputs, img_metas) + + def depth_pred(self, feat): + """Prediction each pixel.""" + if self.classify: + logit = self.conv_depth(feat) + + if self.bins_strategy == "UD": + bins = torch.linspace(self.min_depth, self.max_depth, self.n_bins, device=feat.device) + elif self.bins_strategy == "SID": + bins = torch.logspace(self.min_depth, self.max_depth, self.n_bins, device=feat.device) + + # following Adabins, default linear + if self.norm_strategy == "linear": + logit = torch.relu(logit) + eps = 0.1 + logit = logit + eps + logit = logit / logit.sum(dim=1, keepdim=True) + elif self.norm_strategy == "softmax": + logit = torch.softmax(logit, dim=1) + elif self.norm_strategy == "sigmoid": + logit = torch.sigmoid(logit) + logit = logit / logit.sum(dim=1, keepdim=True) + + output = torch.einsum("ikmn,k->imn", [logit, bins]).unsqueeze(dim=1) + + else: + if self.scale_up: + output = self.sigmoid(self.conv_depth(feat)) * self.max_depth + else: + output = self.relu(self.conv_depth(feat)) + self.min_depth + return output + + def losses(self, depth_pred, depth_gt): + """Compute depth loss.""" + loss = dict() + depth_pred = resize( + input=depth_pred, size=depth_gt.shape[2:], mode="bilinear", align_corners=self.align_corners, warning=False + ) + if not isinstance(self.loss_decode, nn.ModuleList): + losses_decode = [self.loss_decode] + else: + losses_decode = self.loss_decode + for loss_decode in losses_decode: + if loss_decode.loss_name not in loss: + loss[loss_decode.loss_name] = loss_decode(depth_pred, depth_gt) + else: + loss[loss_decode.loss_name] += loss_decode(depth_pred, depth_gt) + return loss + + def log_images(self, img_path, depth_pred, depth_gt, img_meta): + import numpy as np + + show_img = copy.deepcopy(img_path.detach().cpu().permute(1, 2, 0)) + show_img = show_img.numpy().astype(np.float32) + show_img = _imdenormalize( + show_img, + img_meta["img_norm_cfg"]["mean"], + img_meta["img_norm_cfg"]["std"], + img_meta["img_norm_cfg"]["to_rgb"], + ) + show_img = np.clip(show_img, 0, 255) + show_img = show_img.astype(np.uint8) + show_img = show_img[:, :, ::-1] + show_img = show_img.transpose(0, 2, 1) + show_img = show_img.transpose(1, 0, 2) + + depth_pred = depth_pred / torch.max(depth_pred) + depth_gt = depth_gt / torch.max(depth_gt) + + depth_pred_color = copy.deepcopy(depth_pred.detach().cpu()) + depth_gt_color = copy.deepcopy(depth_gt.detach().cpu()) + + return {"img_rgb": show_img, "img_depth_pred": depth_pred_color, "img_depth_gt": depth_gt_color} + + +class BNHead(DepthBaseDecodeHead): + """Just a batchnorm.""" + + def __init__(self, input_transform="resize_concat", in_index=(0, 1, 2, 3), upsample=1, **kwargs): + super().__init__(**kwargs) + self.input_transform = input_transform + self.in_index = in_index + self.upsample = upsample + # self.bn = nn.SyncBatchNorm(self.in_channels) + if self.classify: + self.conv_depth = nn.Conv2d(self.channels, self.n_bins, kernel_size=1, padding=0, stride=1) + else: + self.conv_depth = nn.Conv2d(self.channels, 1, kernel_size=1, padding=0, stride=1) + + def _transform_inputs(self, inputs): + """Transform inputs for decoder. + Args: + inputs (list[Tensor]): List of multi-level img features. + Returns: + Tensor: The transformed inputs + """ + + if "concat" in self.input_transform: + inputs = [inputs[i] for i in self.in_index] + if "resize" in self.input_transform: + inputs = [ + resize( + input=x, + size=[s * self.upsample for s in inputs[0].shape[2:]], + mode="bilinear", + align_corners=self.align_corners, + ) + for x in inputs + ] + inputs = torch.cat(inputs, dim=1) + elif self.input_transform == "multiple_select": + inputs = [inputs[i] for i in self.in_index] + else: + inputs = inputs[self.in_index] + + return inputs + + def _forward_feature(self, inputs, img_metas=None, **kwargs): + """Forward function for feature maps before classifying each pixel with + ``self.cls_seg`` fc. + Args: + inputs (list[Tensor]): List of multi-level img features. + Returns: + feats (Tensor): A tensor of shape (batch_size, self.channels, + H, W) which is feature map for last layer of decoder head. + """ + # accept lists (for cls token) + inputs = list(inputs) + for i, x in enumerate(inputs): + if len(x) == 2: + x, cls_token = x[0], x[1] + if len(x.shape) == 2: + x = x[:, :, None, None] + cls_token = cls_token[:, :, None, None].expand_as(x) + inputs[i] = torch.cat((x, cls_token), 1) + else: + x = x[0] + if len(x.shape) == 2: + x = x[:, :, None, None] + inputs[i] = x + x = self._transform_inputs(inputs) + # feats = self.bn(x) + return x + + def forward(self, inputs, img_metas=None, **kwargs): + """Forward function.""" + output = self._forward_feature(inputs, img_metas=img_metas, **kwargs) + output = self.depth_pred(output) + return output + + +class ConvModule(nn.Module): + """A conv block that bundles conv/norm/activation layers. + + This block simplifies the usage of convolution layers, which are commonly + used with a norm layer (e.g., BatchNorm) and activation layer (e.g., ReLU). + It is based upon three build methods: `build_conv_layer()`, + `build_norm_layer()` and `build_activation_layer()`. + + Besides, we add some additional features in this module. + 1. Automatically set `bias` of the conv layer. + 2. Spectral norm is supported. + 3. More padding modes are supported. Before PyTorch 1.5, nn.Conv2d only + supports zero and circular padding, and we add "reflect" padding mode. + + Args: + in_channels (int): Number of channels in the input feature map. + Same as that in ``nn._ConvNd``. + out_channels (int): Number of channels produced by the convolution. + Same as that in ``nn._ConvNd``. + kernel_size (int | tuple[int]): Size of the convolving kernel. + Same as that in ``nn._ConvNd``. + stride (int | tuple[int]): Stride of the convolution. + Same as that in ``nn._ConvNd``. + padding (int | tuple[int]): Zero-padding added to both sides of + the input. Same as that in ``nn._ConvNd``. + dilation (int | tuple[int]): Spacing between kernel elements. + Same as that in ``nn._ConvNd``. + groups (int): Number of blocked connections from input channels to + output channels. Same as that in ``nn._ConvNd``. + bias (bool | str): If specified as `auto`, it will be decided by the + norm_layer. Bias will be set as True if `norm_layer` is None, otherwise + False. Default: "auto". + conv_layer (nn.Module): Convolution layer. Default: None, + which means using conv2d. + norm_layer (nn.Module): Normalization layer. Default: None. + act_layer (nn.Module): Activation layer. Default: nn.ReLU. + inplace (bool): Whether to use inplace mode for activation. + Default: True. + with_spectral_norm (bool): Whether use spectral norm in conv module. + Default: False. + padding_mode (str): If the `padding_mode` has not been supported by + current `Conv2d` in PyTorch, we will use our own padding layer + instead. Currently, we support ['zeros', 'circular'] with official + implementation and ['reflect'] with our own implementation. + Default: 'zeros'. + order (tuple[str]): The order of conv/norm/activation layers. It is a + sequence of "conv", "norm" and "act". Common examples are + ("conv", "norm", "act") and ("act", "conv", "norm"). + Default: ('conv', 'norm', 'act'). + """ + + _abbr_ = "conv_block" + + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + padding=0, + dilation=1, + groups=1, + bias="auto", + conv_layer=nn.Conv2d, + norm_layer=None, + act_layer=nn.ReLU, + inplace=True, + with_spectral_norm=False, + padding_mode="zeros", + order=("conv", "norm", "act"), + ): + super(ConvModule, self).__init__() + official_padding_mode = ["zeros", "circular"] + self.conv_layer = conv_layer + self.norm_layer = norm_layer + self.act_layer = act_layer + self.inplace = inplace + self.with_spectral_norm = with_spectral_norm + self.with_explicit_padding = padding_mode not in official_padding_mode + self.order = order + assert isinstance(self.order, tuple) and len(self.order) == 3 + assert set(order) == set(["conv", "norm", "act"]) + + self.with_norm = norm_layer is not None + self.with_activation = act_layer is not None + # if the conv layer is before a norm layer, bias is unnecessary. + if bias == "auto": + bias = not self.with_norm + self.with_bias = bias + + if self.with_explicit_padding: + if padding_mode == "zeros": + padding_layer = nn.ZeroPad2d + else: + raise AssertionError(f"Unsupported padding mode: {padding_mode}") + self.pad = padding_layer(padding) + + # reset padding to 0 for conv module + conv_padding = 0 if self.with_explicit_padding else padding + # build convolution layer + self.conv = self.conv_layer( + in_channels, + out_channels, + kernel_size, + stride=stride, + padding=conv_padding, + dilation=dilation, + groups=groups, + bias=bias, + ) + # export the attributes of self.conv to a higher level for convenience + self.in_channels = self.conv.in_channels + self.out_channels = self.conv.out_channels + self.kernel_size = self.conv.kernel_size + self.stride = self.conv.stride + self.padding = padding + self.dilation = self.conv.dilation + self.transposed = self.conv.transposed + self.output_padding = self.conv.output_padding + self.groups = self.conv.groups + + if self.with_spectral_norm: + self.conv = nn.utils.spectral_norm(self.conv) + + # build normalization layers + if self.with_norm: + # norm layer is after conv layer + if order.index("norm") > order.index("conv"): + norm_channels = out_channels + else: + norm_channels = in_channels + norm = partial(norm_layer, num_features=norm_channels) + self.add_module("norm", norm) + if self.with_bias: + from torch.nnModules.batchnorm import _BatchNorm + from torch.nnModules.instancenorm import _InstanceNorm + + if isinstance(norm, (_BatchNorm, _InstanceNorm)): + warnings.warn("Unnecessary conv bias before batch/instance norm") + else: + self.norm_name = None + + # build activation layer + if self.with_activation: + # nn.Tanh has no 'inplace' argument + # (nn.Tanh, nn.PReLU, nn.Sigmoid, nn.HSigmoid, nn.Swish, nn.GELU) + if not isinstance(act_layer, (nn.Tanh, nn.PReLU, nn.Sigmoid, nn.GELU)): + act_layer = partial(act_layer, inplace=inplace) + self.activate = act_layer() + + # Use msra init by default + self.init_weights() + + @property + def norm(self): + if self.norm_name: + return getattr(self, self.norm_name) + else: + return None + + def init_weights(self): + # 1. It is mainly for customized conv layers with their own + # initialization manners by calling their own ``init_weights()``, + # and we do not want ConvModule to override the initialization. + # 2. For customized conv layers without their own initialization + # manners (that is, they don't have their own ``init_weights()``) + # and PyTorch's conv layers, they will be initialized by + # this method with default ``kaiming_init``. + # Note: For PyTorch's conv layers, they will be overwritten by our + # initialization implementation using default ``kaiming_init``. + if not hasattr(self.conv, "init_weights"): + if self.with_activation and isinstance(self.act_layer, nn.LeakyReLU): + nonlinearity = "leaky_relu" + a = 0.01 # XXX: default negative_slope + else: + nonlinearity = "relu" + a = 0 + if hasattr(self.conv, "weight") and self.conv.weight is not None: + nn.init.kaiming_normal_(self.conv.weight, a=a, mode="fan_out", nonlinearity=nonlinearity) + if hasattr(self.conv, "bias") and self.conv.bias is not None: + nn.init.constant_(self.conv.bias, 0) + if self.with_norm: + if hasattr(self.norm, "weight") and self.norm.weight is not None: + nn.init.constant_(self.norm.weight, 1) + if hasattr(self.norm, "bias") and self.norm.bias is not None: + nn.init.constant_(self.norm.bias, 0) + + def forward(self, x, activate=True, norm=True): + for layer in self.order: + if layer == "conv": + if self.with_explicit_padding: + x = self.pad(x) + x = self.conv(x) + elif layer == "norm" and norm and self.with_norm: + x = self.norm(x) + elif layer == "act" and activate and self.with_activation: + x = self.activate(x) + return x + + +class Interpolate(nn.Module): + def __init__(self, scale_factor, mode, align_corners=False): + super(Interpolate, self).__init__() + self.interp = nn.functional.interpolate + self.scale_factor = scale_factor + self.mode = mode + self.align_corners = align_corners + + def forward(self, x): + x = self.interp(x, scale_factor=self.scale_factor, mode=self.mode, align_corners=self.align_corners) + return x + + +class HeadDepth(nn.Module): + def __init__(self, features): + super(HeadDepth, self).__init__() + self.head = nn.Sequential( + nn.Conv2d(features, features // 2, kernel_size=3, stride=1, padding=1), + Interpolate(scale_factor=2, mode="bilinear", align_corners=True), + nn.Conv2d(features // 2, 32, kernel_size=3, stride=1, padding=1), + nn.ReLU(), + nn.Conv2d(32, 1, kernel_size=1, stride=1, padding=0), + ) + + def forward(self, x): + x = self.head(x) + return x + + +class ReassembleBlocks(nn.Module): + """ViTPostProcessBlock, process cls_token in ViT backbone output and + rearrange the feature vector to feature map. + Args: + in_channels (int): ViT feature channels. Default: 768. + out_channels (List): output channels of each stage. + Default: [96, 192, 384, 768]. + readout_type (str): Type of readout operation. Default: 'ignore'. + patch_size (int): The patch size. Default: 16. + """ + + def __init__(self, in_channels=768, out_channels=[96, 192, 384, 768], readout_type="ignore", patch_size=16): + super(ReassembleBlocks, self).__init__() + + assert readout_type in ["ignore", "add", "project"] + self.readout_type = readout_type + self.patch_size = patch_size + + self.projects = nn.ModuleList( + [ + ConvModule( + in_channels=in_channels, + out_channels=out_channel, + kernel_size=1, + act_layer=None, + ) + for out_channel in out_channels + ] + ) + + self.resize_layers = nn.ModuleList( + [ + nn.ConvTranspose2d( + in_channels=out_channels[0], out_channels=out_channels[0], kernel_size=4, stride=4, padding=0 + ), + nn.ConvTranspose2d( + in_channels=out_channels[1], out_channels=out_channels[1], kernel_size=2, stride=2, padding=0 + ), + nn.Identity(), + nn.Conv2d( + in_channels=out_channels[3], out_channels=out_channels[3], kernel_size=3, stride=2, padding=1 + ), + ] + ) + if self.readout_type == "project": + self.readout_projects = nn.ModuleList() + for _ in range(len(self.projects)): + self.readout_projects.append(nn.Sequential(nn.Linear(2 * in_channels, in_channels), nn.GELU())) + + def forward(self, inputs): + assert isinstance(inputs, list) + out = [] + for i, x in enumerate(inputs): + assert len(x) == 2 + x, cls_token = x[0], x[1] + feature_shape = x.shape + if self.readout_type == "project": + x = x.flatten(2).permute((0, 2, 1)) + readout = cls_token.unsqueeze(1).expand_as(x) + x = self.readout_projects[i](torch.cat((x, readout), -1)) + x = x.permute(0, 2, 1).reshape(feature_shape) + elif self.readout_type == "add": + x = x.flatten(2) + cls_token.unsqueeze(-1) + x = x.reshape(feature_shape) + else: + pass + x = self.projects[i](x) + x = self.resize_layers[i](x) + out.append(x) + return out + + +class PreActResidualConvUnit(nn.Module): + """ResidualConvUnit, pre-activate residual unit. + Args: + in_channels (int): number of channels in the input feature map. + act_layer (nn.Module): activation layer. + norm_layer (nn.Module): norm layer. + stride (int): stride of the first block. Default: 1 + dilation (int): dilation rate for convs layers. Default: 1. + """ + + def __init__(self, in_channels, act_layer, norm_layer, stride=1, dilation=1): + super(PreActResidualConvUnit, self).__init__() + + self.conv1 = ConvModule( + in_channels, + in_channels, + 3, + stride=stride, + padding=dilation, + dilation=dilation, + norm_layer=norm_layer, + act_layer=act_layer, + bias=False, + order=("act", "conv", "norm"), + ) + + self.conv2 = ConvModule( + in_channels, + in_channels, + 3, + padding=1, + norm_layer=norm_layer, + act_layer=act_layer, + bias=False, + order=("act", "conv", "norm"), + ) + + def forward(self, inputs): + inputs_ = inputs.clone() + x = self.conv1(inputs) + x = self.conv2(x) + return x + inputs_ + + +class FeatureFusionBlock(nn.Module): + """FeatureFusionBlock, merge feature map from different stages. + Args: + in_channels (int): Input channels. + act_layer (nn.Module): activation layer for ResidualConvUnit. + norm_layer (nn.Module): normalization layer. + expand (bool): Whether expand the channels in post process block. + Default: False. + align_corners (bool): align_corner setting for bilinear upsample. + Default: True. + """ + + def __init__(self, in_channels, act_layer, norm_layer, expand=False, align_corners=True): + super(FeatureFusionBlock, self).__init__() + + self.in_channels = in_channels + self.expand = expand + self.align_corners = align_corners + + self.out_channels = in_channels + if self.expand: + self.out_channels = in_channels // 2 + + self.project = ConvModule(self.in_channels, self.out_channels, kernel_size=1, act_layer=None, bias=True) + + self.res_conv_unit1 = PreActResidualConvUnit( + in_channels=self.in_channels, act_layer=act_layer, norm_layer=norm_layer + ) + self.res_conv_unit2 = PreActResidualConvUnit( + in_channels=self.in_channels, act_layer=act_layer, norm_layer=norm_layer + ) + + def forward(self, *inputs): + x = inputs[0] + if len(inputs) == 2: + if x.shape != inputs[1].shape: + res = resize(inputs[1], size=(x.shape[2], x.shape[3]), mode="bilinear", align_corners=False) + else: + res = inputs[1] + x = x + self.res_conv_unit1(res) + x = self.res_conv_unit2(x) + x = resize(x, scale_factor=2, mode="bilinear", align_corners=self.align_corners) + x = self.project(x) + return x + + +class DPTHead(DepthBaseDecodeHead): + """Vision Transformers for Dense Prediction. + This head is implemented of `DPT `_. + Args: + embed_dims (int): The embed dimension of the ViT backbone. + Default: 768. + post_process_channels (List): Out channels of post process conv + layers. Default: [96, 192, 384, 768]. + readout_type (str): Type of readout operation. Default: 'ignore'. + patch_size (int): The patch size. Default: 16. + expand_channels (bool): Whether expand the channels in post process + block. Default: False. + """ + + def __init__( + self, + embed_dims=768, + post_process_channels=[96, 192, 384, 768], + readout_type="ignore", + patch_size=16, + expand_channels=False, + **kwargs, + ): + super(DPTHead, self).__init__(**kwargs) + + self.in_channels = self.in_channels + self.expand_channels = expand_channels + self.reassemble_blocks = ReassembleBlocks(embed_dims, post_process_channels, readout_type, patch_size) + + self.post_process_channels = [ + channel * math.pow(2, i) if expand_channels else channel for i, channel in enumerate(post_process_channels) + ] + self.convs = nn.ModuleList() + for channel in self.post_process_channels: + self.convs.append(ConvModule(channel, self.channels, kernel_size=3, padding=1, act_layer=None, bias=False)) + self.fusion_blocks = nn.ModuleList() + for _ in range(len(self.convs)): + self.fusion_blocks.append(FeatureFusionBlock(self.channels, self.act_layer, self.norm_layer)) + self.fusion_blocks[0].res_conv_unit1 = None + self.project = ConvModule(self.channels, self.channels, kernel_size=3, padding=1, norm_layer=self.norm_layer) + self.num_fusion_blocks = len(self.fusion_blocks) + self.num_reassemble_blocks = len(self.reassemble_blocks.resize_layers) + self.num_post_process_channels = len(self.post_process_channels) + assert self.num_fusion_blocks == self.num_reassemble_blocks + assert self.num_reassemble_blocks == self.num_post_process_channels + self.conv_depth = HeadDepth(self.channels) + + def forward(self, inputs, img_metas): + assert len(inputs) == self.num_reassemble_blocks + x = [inp for inp in inputs] + x = self.reassemble_blocks(x) + x = [self.convs[i](feature) for i, feature in enumerate(x)] + out = self.fusion_blocks[0](x[-1]) + for i in range(1, len(self.fusion_blocks)): + out = self.fusion_blocks[i](out, x[-(i + 1)]) + out = self.project(out) + out = self.depth_pred(out) + return out diff --git a/engine/pose_estimation/dinov2/dinov2/hub/depth/encoder_decoder.py b/engine/pose_estimation/dinov2/dinov2/hub/depth/encoder_decoder.py new file mode 100644 index 0000000000000000000000000000000000000000..eb29ced67957a336e763b0e7c90c0eeaea36fea8 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/hub/depth/encoder_decoder.py @@ -0,0 +1,351 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from collections import OrderedDict + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from .ops import resize + + +def add_prefix(inputs, prefix): + """Add prefix for dict. + + Args: + inputs (dict): The input dict with str keys. + prefix (str): The prefix to add. + + Returns: + + dict: The dict with keys updated with ``prefix``. + """ + + outputs = dict() + for name, value in inputs.items(): + outputs[f"{prefix}.{name}"] = value + + return outputs + + +class DepthEncoderDecoder(nn.Module): + """Encoder Decoder depther. + + EncoderDecoder typically consists of backbone and decode_head. + """ + + def __init__(self, backbone, decode_head): + super(DepthEncoderDecoder, self).__init__() + + self.backbone = backbone + self.decode_head = decode_head + self.align_corners = self.decode_head.align_corners + + def extract_feat(self, img): + """Extract features from images.""" + return self.backbone(img) + + def encode_decode(self, img, img_metas, rescale=True, size=None): + """Encode images with backbone and decode into a depth estimation + map of the same size as input.""" + x = self.extract_feat(img) + out = self._decode_head_forward_test(x, img_metas) + # crop the pred depth to the certain range. + out = torch.clamp(out, min=self.decode_head.min_depth, max=self.decode_head.max_depth) + if rescale: + if size is None: + if img_metas is not None: + size = img_metas[0]["ori_shape"][:2] + else: + size = img.shape[2:] + out = resize(input=out, size=size, mode="bilinear", align_corners=self.align_corners) + return out + + def _decode_head_forward_train(self, img, x, img_metas, depth_gt, **kwargs): + """Run forward function and calculate loss for decode head in + training.""" + losses = dict() + loss_decode = self.decode_head.forward_train(img, x, img_metas, depth_gt, **kwargs) + losses.update(add_prefix(loss_decode, "decode")) + return losses + + def _decode_head_forward_test(self, x, img_metas): + """Run forward function and calculate loss for decode head in + inference.""" + depth_pred = self.decode_head.forward_test(x, img_metas) + return depth_pred + + def forward_dummy(self, img): + """Dummy forward function.""" + depth = self.encode_decode(img, None) + + return depth + + def forward_train(self, img, img_metas, depth_gt, **kwargs): + """Forward function for training. + + Args: + img (Tensor): Input images. + img_metas (list[dict]): List of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `depth/datasets/pipelines/formatting.py:Collect`. + depth_gt (Tensor): Depth gt + used if the architecture supports depth estimation task. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + + x = self.extract_feat(img) + + losses = dict() + + # the last of x saves the info from neck + loss_decode = self._decode_head_forward_train(img, x, img_metas, depth_gt, **kwargs) + + losses.update(loss_decode) + + return losses + + def whole_inference(self, img, img_meta, rescale, size=None): + """Inference with full image.""" + return self.encode_decode(img, img_meta, rescale, size=size) + + def slide_inference(self, img, img_meta, rescale, stride, crop_size): + """Inference by sliding-window with overlap. + + If h_crop > h_img or w_crop > w_img, the small patch will be used to + decode without padding. + """ + + h_stride, w_stride = stride + h_crop, w_crop = crop_size + batch_size, _, h_img, w_img = img.size() + h_grids = max(h_img - h_crop + h_stride - 1, 0) // h_stride + 1 + w_grids = max(w_img - w_crop + w_stride - 1, 0) // w_stride + 1 + preds = img.new_zeros((batch_size, 1, h_img, w_img)) + count_mat = img.new_zeros((batch_size, 1, h_img, w_img)) + for h_idx in range(h_grids): + for w_idx in range(w_grids): + y1 = h_idx * h_stride + x1 = w_idx * w_stride + y2 = min(y1 + h_crop, h_img) + x2 = min(x1 + w_crop, w_img) + y1 = max(y2 - h_crop, 0) + x1 = max(x2 - w_crop, 0) + crop_img = img[:, :, y1:y2, x1:x2] + depth_pred = self.encode_decode(crop_img, img_meta, rescale) + preds += F.pad(depth_pred, (int(x1), int(preds.shape[3] - x2), int(y1), int(preds.shape[2] - y2))) + + count_mat[:, :, y1:y2, x1:x2] += 1 + assert (count_mat == 0).sum() == 0 + if torch.onnx.is_in_onnx_export(): + # cast count_mat to constant while exporting to ONNX + count_mat = torch.from_numpy(count_mat.cpu().detach().numpy()).to(device=img.device) + preds = preds / count_mat + return preds + + def inference(self, img, img_meta, rescale, size=None, mode="whole"): + """Inference with slide/whole style. + + Args: + img (Tensor): The input image of shape (N, 3, H, W). + img_meta (dict): Image info dict where each dict has: 'img_shape', + 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `depth/datasets/pipelines/formatting.py:Collect`. + rescale (bool): Whether rescale back to original shape. + + Returns: + Tensor: The output depth map. + """ + + assert mode in ["slide", "whole"] + ori_shape = img_meta[0]["ori_shape"] + assert all(_["ori_shape"] == ori_shape for _ in img_meta) + if mode == "slide": + depth_pred = self.slide_inference(img, img_meta, rescale) + else: + depth_pred = self.whole_inference(img, img_meta, rescale, size=size) + output = depth_pred + flip = img_meta[0]["flip"] + if flip: + flip_direction = img_meta[0]["flip_direction"] + assert flip_direction in ["horizontal", "vertical"] + if flip_direction == "horizontal": + output = output.flip(dims=(3,)) + elif flip_direction == "vertical": + output = output.flip(dims=(2,)) + + return output + + def simple_test(self, img, img_meta, rescale=True): + """Simple test with single image.""" + depth_pred = self.inference(img, img_meta, rescale) + if torch.onnx.is_in_onnx_export(): + # our inference backend only support 4D output + depth_pred = depth_pred.unsqueeze(0) + return depth_pred + depth_pred = depth_pred.cpu().numpy() + # unravel batch dim + depth_pred = list(depth_pred) + return depth_pred + + def aug_test(self, imgs, img_metas, rescale=True): + """Test with augmentations. + + Only rescale=True is supported. + """ + # aug_test rescale all imgs back to ori_shape for now + assert rescale + # to save memory, we get augmented depth logit inplace + depth_pred = self.inference(imgs[0], img_metas[0], rescale) + for i in range(1, len(imgs)): + cur_depth_pred = self.inference(imgs[i], img_metas[i], rescale, size=depth_pred.shape[-2:]) + depth_pred += cur_depth_pred + depth_pred /= len(imgs) + depth_pred = depth_pred.cpu().numpy() + # unravel batch dim + depth_pred = list(depth_pred) + return depth_pred + + def forward_test(self, imgs, img_metas, **kwargs): + """ + Args: + imgs (List[Tensor]): the outer list indicates test-time + augmentations and inner Tensor should have a shape NxCxHxW, + which contains all images in the batch. + img_metas (List[List[dict]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. + """ + for var, name in [(imgs, "imgs"), (img_metas, "img_metas")]: + if not isinstance(var, list): + raise TypeError(f"{name} must be a list, but got " f"{type(var)}") + num_augs = len(imgs) + if num_augs != len(img_metas): + raise ValueError(f"num of augmentations ({len(imgs)}) != " f"num of image meta ({len(img_metas)})") + # all images in the same aug batch all of the same ori_shape and pad + # shape + for img_meta in img_metas: + ori_shapes = [_["ori_shape"] for _ in img_meta] + assert all(shape == ori_shapes[0] for shape in ori_shapes) + img_shapes = [_["img_shape"] for _ in img_meta] + assert all(shape == img_shapes[0] for shape in img_shapes) + pad_shapes = [_["pad_shape"] for _ in img_meta] + assert all(shape == pad_shapes[0] for shape in pad_shapes) + + if num_augs == 1: + return self.simple_test(imgs[0], img_metas[0], **kwargs) + else: + return self.aug_test(imgs, img_metas, **kwargs) + + def forward(self, img, img_metas, return_loss=True, **kwargs): + """Calls either :func:`forward_train` or :func:`forward_test` depending + on whether ``return_loss`` is ``True``. + + Note this setting will change the expected inputs. When + ``return_loss=True``, img and img_meta are single-nested (i.e. Tensor + and List[dict]), and when ``resturn_loss=False``, img and img_meta + should be double nested (i.e. List[Tensor], List[List[dict]]), with + the outer list indicating test time augmentations. + """ + if return_loss: + return self.forward_train(img, img_metas, **kwargs) + else: + return self.forward_test(img, img_metas, **kwargs) + + def train_step(self, data_batch, optimizer, **kwargs): + """The iteration step during training. + + This method defines an iteration step during training, except for the + back propagation and optimizer updating, which are done in an optimizer + hook. Note that in some complicated cases or models, the whole process + including back propagation and optimizer updating is also defined in + this method, such as GAN. + + Args: + data (dict): The output of dataloader. + optimizer (:obj:`torch.optim.Optimizer` | dict): The optimizer of + runner is passed to ``train_step()``. This argument is unused + and reserved. + + Returns: + dict: It should contain at least 3 keys: ``loss``, ``log_vars``, + ``num_samples``. + ``loss`` is a tensor for back propagation, which can be a + weighted sum of multiple losses. + ``log_vars`` contains all the variables to be sent to the + logger. + ``num_samples`` indicates the batch size (when the model is + DDP, it means the batch size on each GPU), which is used for + averaging the logs. + """ + losses = self(**data_batch) + + # split losses and images + real_losses = {} + log_imgs = {} + for k, v in losses.items(): + if "img" in k: + log_imgs[k] = v + else: + real_losses[k] = v + + loss, log_vars = self._parse_losses(real_losses) + + outputs = dict(loss=loss, log_vars=log_vars, num_samples=len(data_batch["img_metas"]), log_imgs=log_imgs) + + return outputs + + def val_step(self, data_batch, **kwargs): + """The iteration step during validation. + + This method shares the same signature as :func:`train_step`, but used + during val epochs. Note that the evaluation after training epochs is + not implemented with this method, but an evaluation hook. + """ + output = self(**data_batch, **kwargs) + return output + + @staticmethod + def _parse_losses(losses): + import torch.distributed as dist + + """Parse the raw outputs (losses) of the network. + + Args: + losses (dict): Raw output of the network, which usually contain + losses and other necessary information. + + Returns: + tuple[Tensor, dict]: (loss, log_vars), loss is the loss tensor + which may be a weighted sum of all losses, log_vars contains + all the variables to be sent to the logger. + """ + log_vars = OrderedDict() + for loss_name, loss_value in losses.items(): + if isinstance(loss_value, torch.Tensor): + log_vars[loss_name] = loss_value.mean() + elif isinstance(loss_value, list): + log_vars[loss_name] = sum(_loss.mean() for _loss in loss_value) + else: + raise TypeError(f"{loss_name} is not a tensor or list of tensors") + + loss = sum(_value for _key, _value in log_vars.items() if "loss" in _key) + + log_vars["loss"] = loss + for loss_name, loss_value in log_vars.items(): + # reduce loss when distributed training + if dist.is_available() and dist.is_initialized(): + loss_value = loss_value.data.clone() + dist.all_reduce(loss_value.div_(dist.get_world_size())) + log_vars[loss_name] = loss_value.item() + + return loss, log_vars diff --git a/engine/pose_estimation/dinov2/dinov2/hub/depth/ops.py b/engine/pose_estimation/dinov2/dinov2/hub/depth/ops.py new file mode 100644 index 0000000000000000000000000000000000000000..15880ee0cb7652d4b41c489b927bf6a156b40e5e --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/hub/depth/ops.py @@ -0,0 +1,28 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import warnings + +import torch.nn.functional as F + + +def resize(input, size=None, scale_factor=None, mode="nearest", align_corners=None, warning=False): + if warning: + if size is not None and align_corners: + input_h, input_w = tuple(int(x) for x in input.shape[2:]) + output_h, output_w = tuple(int(x) for x in size) + if output_h > input_h or output_w > output_h: + if ( + (output_h > 1 and output_w > 1 and input_h > 1 and input_w > 1) + and (output_h - 1) % (input_h - 1) + and (output_w - 1) % (input_w - 1) + ): + warnings.warn( + f"When align_corners={align_corners}, " + "the output would more aligned if " + f"input size {(input_h, input_w)} is `x+1` and " + f"out size {(output_h, output_w)} is `nx+1`" + ) + return F.interpolate(input, size, scale_factor, mode, align_corners) diff --git a/engine/pose_estimation/dinov2/dinov2/hub/depthers.py b/engine/pose_estimation/dinov2/dinov2/hub/depthers.py new file mode 100644 index 0000000000000000000000000000000000000000..f88b7e9a41056594e3b3e66107feee98bffab820 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/hub/depthers.py @@ -0,0 +1,246 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from enum import Enum +from functools import partial +from typing import Optional, Tuple, Union + +import torch + +from .backbones import _make_dinov2_model +from .depth import BNHead, DepthEncoderDecoder, DPTHead +from .utils import _DINOV2_BASE_URL, _make_dinov2_model_name, CenterPadding + + +class Weights(Enum): + NYU = "NYU" + KITTI = "KITTI" + + +def _get_depth_range(pretrained: bool, weights: Weights = Weights.NYU) -> Tuple[float, float]: + if not pretrained: # Default + return (0.001, 10.0) + + # Pretrained, set according to the training dataset for the provided weights + if weights == Weights.KITTI: + return (0.001, 80.0) + + if weights == Weights.NYU: + return (0.001, 10.0) + + return (0.001, 10.0) + + +def _make_dinov2_linear_depth_head( + *, + embed_dim: int, + layers: int, + min_depth: float, + max_depth: float, + **kwargs, +): + if layers not in (1, 4): + raise AssertionError(f"Unsupported number of layers: {layers}") + + if layers == 1: + in_index = [0] + else: + assert layers == 4 + in_index = [0, 1, 2, 3] + + return BNHead( + classify=True, + n_bins=256, + bins_strategy="UD", + norm_strategy="linear", + upsample=4, + in_channels=[embed_dim] * len(in_index), + in_index=in_index, + input_transform="resize_concat", + channels=embed_dim * len(in_index) * 2, + align_corners=False, + min_depth=0.001, + max_depth=80, + loss_decode=(), + ) + + +def _make_dinov2_linear_depther( + *, + arch_name: str = "vit_large", + layers: int = 4, + pretrained: bool = True, + weights: Union[Weights, str] = Weights.NYU, + depth_range: Optional[Tuple[float, float]] = None, + **kwargs, +): + if layers not in (1, 4): + raise AssertionError(f"Unsupported number of layers: {layers}") + if isinstance(weights, str): + try: + weights = Weights[weights] + except KeyError: + raise AssertionError(f"Unsupported weights: {weights}") + + if depth_range is None: + depth_range = _get_depth_range(pretrained, weights) + min_depth, max_depth = depth_range + + backbone = _make_dinov2_model(arch_name=arch_name, pretrained=pretrained, **kwargs) + + embed_dim = backbone.embed_dim + patch_size = backbone.patch_size + model_name = _make_dinov2_model_name(arch_name, patch_size) + linear_depth_head = _make_dinov2_linear_depth_head( + embed_dim=embed_dim, + layers=layers, + min_depth=min_depth, + max_depth=max_depth, + ) + + layer_count = { + "vit_small": 12, + "vit_base": 12, + "vit_large": 24, + "vit_giant2": 40, + }[arch_name] + + if layers == 4: + out_index = { + "vit_small": [2, 5, 8, 11], + "vit_base": [2, 5, 8, 11], + "vit_large": [4, 11, 17, 23], + "vit_giant2": [9, 19, 29, 39], + }[arch_name] + else: + assert layers == 1 + out_index = [layer_count - 1] + + model = DepthEncoderDecoder(backbone=backbone, decode_head=linear_depth_head) + model.backbone.forward = partial( + backbone.get_intermediate_layers, + n=out_index, + reshape=True, + return_class_token=True, + norm=False, + ) + model.backbone.register_forward_pre_hook(lambda _, x: CenterPadding(patch_size)(x[0])) + + if pretrained: + layers_str = str(layers) if layers == 4 else "" + weights_str = weights.value.lower() + url = _DINOV2_BASE_URL + f"/{model_name}/{model_name}_{weights_str}_linear{layers_str}_head.pth" + checkpoint = torch.hub.load_state_dict_from_url(url, map_location="cpu") + if "state_dict" in checkpoint: + state_dict = checkpoint["state_dict"] + model.load_state_dict(state_dict, strict=False) + + return model + + +def dinov2_vits14_ld(*, layers: int = 4, pretrained: bool = True, weights: Union[Weights, str] = Weights.NYU, **kwargs): + return _make_dinov2_linear_depther( + arch_name="vit_small", layers=layers, pretrained=pretrained, weights=weights, **kwargs + ) + + +def dinov2_vitb14_ld(*, layers: int = 4, pretrained: bool = True, weights: Union[Weights, str] = Weights.NYU, **kwargs): + return _make_dinov2_linear_depther( + arch_name="vit_base", layers=layers, pretrained=pretrained, weights=weights, **kwargs + ) + + +def dinov2_vitl14_ld(*, layers: int = 4, pretrained: bool = True, weights: Union[Weights, str] = Weights.NYU, **kwargs): + return _make_dinov2_linear_depther( + arch_name="vit_large", layers=layers, pretrained=pretrained, weights=weights, **kwargs + ) + + +def dinov2_vitg14_ld(*, layers: int = 4, pretrained: bool = True, weights: Union[Weights, str] = Weights.NYU, **kwargs): + return _make_dinov2_linear_depther( + arch_name="vit_giant2", layers=layers, ffn_layer="swiglufused", pretrained=pretrained, weights=weights, **kwargs + ) + + +def _make_dinov2_dpt_depth_head(*, embed_dim: int, min_depth: float, max_depth: float): + return DPTHead( + in_channels=[embed_dim] * 4, + channels=256, + embed_dims=embed_dim, + post_process_channels=[embed_dim // 2 ** (3 - i) for i in range(4)], + readout_type="project", + min_depth=min_depth, + max_depth=max_depth, + loss_decode=(), + ) + + +def _make_dinov2_dpt_depther( + *, + arch_name: str = "vit_large", + pretrained: bool = True, + weights: Union[Weights, str] = Weights.NYU, + depth_range: Optional[Tuple[float, float]] = None, + **kwargs, +): + if isinstance(weights, str): + try: + weights = Weights[weights] + except KeyError: + raise AssertionError(f"Unsupported weights: {weights}") + + if depth_range is None: + depth_range = _get_depth_range(pretrained, weights) + min_depth, max_depth = depth_range + + backbone = _make_dinov2_model(arch_name=arch_name, pretrained=pretrained, **kwargs) + + model_name = _make_dinov2_model_name(arch_name, backbone.patch_size) + dpt_depth_head = _make_dinov2_dpt_depth_head(embed_dim=backbone.embed_dim, min_depth=min_depth, max_depth=max_depth) + + out_index = { + "vit_small": [2, 5, 8, 11], + "vit_base": [2, 5, 8, 11], + "vit_large": [4, 11, 17, 23], + "vit_giant2": [9, 19, 29, 39], + }[arch_name] + + model = DepthEncoderDecoder(backbone=backbone, decode_head=dpt_depth_head) + model.backbone.forward = partial( + backbone.get_intermediate_layers, + n=out_index, + reshape=True, + return_class_token=True, + norm=False, + ) + model.backbone.register_forward_pre_hook(lambda _, x: CenterPadding(backbone.patch_size)(x[0])) + + if pretrained: + weights_str = weights.value.lower() + url = _DINOV2_BASE_URL + f"/{model_name}/{model_name}_{weights_str}_dpt_head.pth" + checkpoint = torch.hub.load_state_dict_from_url(url, map_location="cpu") + if "state_dict" in checkpoint: + state_dict = checkpoint["state_dict"] + model.load_state_dict(state_dict, strict=False) + + return model + + +def dinov2_vits14_dd(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.NYU, **kwargs): + return _make_dinov2_dpt_depther(arch_name="vit_small", pretrained=pretrained, weights=weights, **kwargs) + + +def dinov2_vitb14_dd(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.NYU, **kwargs): + return _make_dinov2_dpt_depther(arch_name="vit_base", pretrained=pretrained, weights=weights, **kwargs) + + +def dinov2_vitl14_dd(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.NYU, **kwargs): + return _make_dinov2_dpt_depther(arch_name="vit_large", pretrained=pretrained, weights=weights, **kwargs) + + +def dinov2_vitg14_dd(*, pretrained: bool = True, weights: Union[Weights, str] = Weights.NYU, **kwargs): + return _make_dinov2_dpt_depther( + arch_name="vit_giant2", ffn_layer="swiglufused", pretrained=pretrained, weights=weights, **kwargs + ) diff --git a/engine/pose_estimation/dinov2/dinov2/hub/utils.py b/engine/pose_estimation/dinov2/dinov2/hub/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..9c6641404093652d5a2f19b4cf283d976ec39e64 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/hub/utils.py @@ -0,0 +1,39 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import itertools +import math + +import torch +import torch.nn as nn +import torch.nn.functional as F + + +_DINOV2_BASE_URL = "https://dl.fbaipublicfiles.com/dinov2" + + +def _make_dinov2_model_name(arch_name: str, patch_size: int, num_register_tokens: int = 0) -> str: + compact_arch_name = arch_name.replace("_", "")[:4] + registers_suffix = f"_reg{num_register_tokens}" if num_register_tokens else "" + return f"dinov2_{compact_arch_name}{patch_size}{registers_suffix}" + + +class CenterPadding(nn.Module): + def __init__(self, multiple): + super().__init__() + self.multiple = multiple + + def _get_pad(self, size): + new_size = math.ceil(size / self.multiple) * self.multiple + pad_size = new_size - size + pad_size_left = pad_size // 2 + pad_size_right = pad_size - pad_size_left + return pad_size_left, pad_size_right + + @torch.inference_mode() + def forward(self, x): + pads = list(itertools.chain.from_iterable(self._get_pad(m) for m in x.shape[:1:-1])) + output = F.pad(x, pads) + return output diff --git a/engine/pose_estimation/dinov2/dinov2/layers/__init__.py b/engine/pose_estimation/dinov2/dinov2/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..05a0b61868e43abb821ca05a813bab2b8b43629e --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/layers/__init__.py @@ -0,0 +1,11 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from .dino_head import DINOHead +from .mlp import Mlp +from .patch_embed import PatchEmbed +from .swiglu_ffn import SwiGLUFFN, SwiGLUFFNFused +from .block import NestedTensorBlock +from .attention import MemEffAttention diff --git a/engine/pose_estimation/dinov2/dinov2/layers/attention.py b/engine/pose_estimation/dinov2/dinov2/layers/attention.py new file mode 100644 index 0000000000000000000000000000000000000000..0fb76ef2816164729a58cceb18d0f000cfb18777 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/layers/attention.py @@ -0,0 +1,89 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/models/vision_transformer.py + +import logging +import os +import warnings + +from torch import Tensor +from torch import nn + + +logger = logging.getLogger("dinov2") + + +XFORMERS_ENABLED = os.environ.get("XFORMERS_DISABLED") is None +try: + if XFORMERS_ENABLED: + from xformers.ops import memory_efficient_attention, unbind + + XFORMERS_AVAILABLE = True + warnings.warn("xFormers is available (Attention)") + else: + warnings.warn("xFormers is disabled (Attention)") + raise ImportError +except ImportError: + XFORMERS_AVAILABLE = False + warnings.warn("xFormers is not available (Attention)") + + +class Attention(nn.Module): + def __init__( + self, + dim: int, + num_heads: int = 8, + qkv_bias: bool = False, + proj_bias: bool = True, + attn_drop: float = 0.0, + proj_drop: float = 0.0, + ) -> None: + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = head_dim**-0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim, bias=proj_bias) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x: Tensor) -> Tensor: + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + + q, k, v = qkv[0] * self.scale, qkv[1], qkv[2] + attn = q @ k.transpose(-2, -1) + + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class MemEffAttention(Attention): + def forward(self, x: Tensor, attn_bias=None) -> Tensor: + if not XFORMERS_AVAILABLE: + if attn_bias is not None: + raise AssertionError("xFormers is required for using nested tensors") + return super().forward(x) + + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads) + + q, k, v = unbind(qkv, 2) + + x = memory_efficient_attention(q, k, v, attn_bias=attn_bias) + x = x.reshape([B, N, C]) + + x = self.proj(x) + x = self.proj_drop(x) + return x diff --git a/engine/pose_estimation/dinov2/dinov2/layers/block.py b/engine/pose_estimation/dinov2/dinov2/layers/block.py new file mode 100644 index 0000000000000000000000000000000000000000..930787b262faac4f2264797496faff75ac56b7cc --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/layers/block.py @@ -0,0 +1,260 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/layers/patch_embed.py + +import logging +import os +from typing import Callable, List, Any, Tuple, Dict +import warnings + +import torch +from torch import nn, Tensor + +from .attention import Attention, MemEffAttention +from .drop_path import DropPath +from .layer_scale import LayerScale +from .mlp import Mlp + + +logger = logging.getLogger("dinov2") + + +XFORMERS_ENABLED = os.environ.get("XFORMERS_DISABLED") is None +try: + if XFORMERS_ENABLED: + from xformers.ops import fmha, scaled_index_add, index_select_cat + + XFORMERS_AVAILABLE = True + warnings.warn("xFormers is available (Block)") + else: + warnings.warn("xFormers is disabled (Block)") + raise ImportError +except ImportError: + XFORMERS_AVAILABLE = False + + warnings.warn("xFormers is not available (Block)") + + +class Block(nn.Module): + def __init__( + self, + dim: int, + num_heads: int, + mlp_ratio: float = 4.0, + qkv_bias: bool = False, + proj_bias: bool = True, + ffn_bias: bool = True, + drop: float = 0.0, + attn_drop: float = 0.0, + init_values=None, + drop_path: float = 0.0, + act_layer: Callable[..., nn.Module] = nn.GELU, + norm_layer: Callable[..., nn.Module] = nn.LayerNorm, + attn_class: Callable[..., nn.Module] = Attention, + ffn_layer: Callable[..., nn.Module] = Mlp, + ) -> None: + super().__init__() + # print(f"biases: qkv: {qkv_bias}, proj: {proj_bias}, ffn: {ffn_bias}") + self.norm1 = norm_layer(dim) + self.attn = attn_class( + dim, + num_heads=num_heads, + qkv_bias=qkv_bias, + proj_bias=proj_bias, + attn_drop=attn_drop, + proj_drop=drop, + ) + self.ls1 = LayerScale(dim, init_values=init_values) if init_values else nn.Identity() + self.drop_path1 = DropPath(drop_path) if drop_path > 0.0 else nn.Identity() + + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = ffn_layer( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop, + bias=ffn_bias, + ) + self.ls2 = LayerScale(dim, init_values=init_values) if init_values else nn.Identity() + self.drop_path2 = DropPath(drop_path) if drop_path > 0.0 else nn.Identity() + + self.sample_drop_ratio = drop_path + + def forward(self, x: Tensor) -> Tensor: + def attn_residual_func(x: Tensor) -> Tensor: + return self.ls1(self.attn(self.norm1(x))) + + def ffn_residual_func(x: Tensor) -> Tensor: + return self.ls2(self.mlp(self.norm2(x))) + + if self.training and self.sample_drop_ratio > 0.1: + # the overhead is compensated only for a drop path rate larger than 0.1 + x = drop_add_residual_stochastic_depth( + x, + residual_func=attn_residual_func, + sample_drop_ratio=self.sample_drop_ratio, + ) + x = drop_add_residual_stochastic_depth( + x, + residual_func=ffn_residual_func, + sample_drop_ratio=self.sample_drop_ratio, + ) + elif self.training and self.sample_drop_ratio > 0.0: + x = x + self.drop_path1(attn_residual_func(x)) + x = x + self.drop_path1(ffn_residual_func(x)) # FIXME: drop_path2 + else: + x = x + attn_residual_func(x) + x = x + ffn_residual_func(x) + return x + + +def drop_add_residual_stochastic_depth( + x: Tensor, + residual_func: Callable[[Tensor], Tensor], + sample_drop_ratio: float = 0.0, +) -> Tensor: + # 1) extract subset using permutation + b, n, d = x.shape + sample_subset_size = max(int(b * (1 - sample_drop_ratio)), 1) + brange = (torch.randperm(b, device=x.device))[:sample_subset_size] + x_subset = x[brange] + + # 2) apply residual_func to get residual + residual = residual_func(x_subset) + + x_flat = x.flatten(1) + residual = residual.flatten(1) + + residual_scale_factor = b / sample_subset_size + + # 3) add the residual + x_plus_residual = torch.index_add(x_flat, 0, brange, residual.to(dtype=x.dtype), alpha=residual_scale_factor) + return x_plus_residual.view_as(x) + + +def get_branges_scales(x, sample_drop_ratio=0.0): + b, n, d = x.shape + sample_subset_size = max(int(b * (1 - sample_drop_ratio)), 1) + brange = (torch.randperm(b, device=x.device))[:sample_subset_size] + residual_scale_factor = b / sample_subset_size + return brange, residual_scale_factor + + +def add_residual(x, brange, residual, residual_scale_factor, scaling_vector=None): + if scaling_vector is None: + x_flat = x.flatten(1) + residual = residual.flatten(1) + x_plus_residual = torch.index_add(x_flat, 0, brange, residual.to(dtype=x.dtype), alpha=residual_scale_factor) + else: + x_plus_residual = scaled_index_add( + x, brange, residual.to(dtype=x.dtype), scaling=scaling_vector, alpha=residual_scale_factor + ) + return x_plus_residual + + +attn_bias_cache: Dict[Tuple, Any] = {} + + +def get_attn_bias_and_cat(x_list, branges=None): + """ + this will perform the index select, cat the tensors, and provide the attn_bias from cache + """ + batch_sizes = [b.shape[0] for b in branges] if branges is not None else [x.shape[0] for x in x_list] + all_shapes = tuple((b, x.shape[1]) for b, x in zip(batch_sizes, x_list)) + if all_shapes not in attn_bias_cache.keys(): + seqlens = [] + for b, x in zip(batch_sizes, x_list): + for _ in range(b): + seqlens.append(x.shape[1]) + attn_bias = fmha.BlockDiagonalMask.from_seqlens(seqlens) + attn_bias._batch_sizes = batch_sizes + attn_bias_cache[all_shapes] = attn_bias + + if branges is not None: + cat_tensors = index_select_cat([x.flatten(1) for x in x_list], branges).view(1, -1, x_list[0].shape[-1]) + else: + tensors_bs1 = tuple(x.reshape([1, -1, *x.shape[2:]]) for x in x_list) + cat_tensors = torch.cat(tensors_bs1, dim=1) + + return attn_bias_cache[all_shapes], cat_tensors + + +def drop_add_residual_stochastic_depth_list( + x_list: List[Tensor], + residual_func: Callable[[Tensor, Any], Tensor], + sample_drop_ratio: float = 0.0, + scaling_vector=None, +) -> Tensor: + # 1) generate random set of indices for dropping samples in the batch + branges_scales = [get_branges_scales(x, sample_drop_ratio=sample_drop_ratio) for x in x_list] + branges = [s[0] for s in branges_scales] + residual_scale_factors = [s[1] for s in branges_scales] + + # 2) get attention bias and index+concat the tensors + attn_bias, x_cat = get_attn_bias_and_cat(x_list, branges) + + # 3) apply residual_func to get residual, and split the result + residual_list = attn_bias.split(residual_func(x_cat, attn_bias=attn_bias)) # type: ignore + + outputs = [] + for x, brange, residual, residual_scale_factor in zip(x_list, branges, residual_list, residual_scale_factors): + outputs.append(add_residual(x, brange, residual, residual_scale_factor, scaling_vector).view_as(x)) + return outputs + + +class NestedTensorBlock(Block): + def forward_nested(self, x_list: List[Tensor]) -> List[Tensor]: + """ + x_list contains a list of tensors to nest together and run + """ + assert isinstance(self.attn, MemEffAttention) + + if self.training and self.sample_drop_ratio > 0.0: + + def attn_residual_func(x: Tensor, attn_bias=None) -> Tensor: + return self.attn(self.norm1(x), attn_bias=attn_bias) + + def ffn_residual_func(x: Tensor, attn_bias=None) -> Tensor: + return self.mlp(self.norm2(x)) + + x_list = drop_add_residual_stochastic_depth_list( + x_list, + residual_func=attn_residual_func, + sample_drop_ratio=self.sample_drop_ratio, + scaling_vector=self.ls1.gamma if isinstance(self.ls1, LayerScale) else None, + ) + x_list = drop_add_residual_stochastic_depth_list( + x_list, + residual_func=ffn_residual_func, + sample_drop_ratio=self.sample_drop_ratio, + scaling_vector=self.ls2.gamma if isinstance(self.ls1, LayerScale) else None, + ) + return x_list + else: + + def attn_residual_func(x: Tensor, attn_bias=None) -> Tensor: + return self.ls1(self.attn(self.norm1(x), attn_bias=attn_bias)) + + def ffn_residual_func(x: Tensor, attn_bias=None) -> Tensor: + return self.ls2(self.mlp(self.norm2(x))) + + attn_bias, x = get_attn_bias_and_cat(x_list) + x = x + attn_residual_func(x, attn_bias=attn_bias) + x = x + ffn_residual_func(x) + return attn_bias.split(x) + + def forward(self, x_or_x_list): + if isinstance(x_or_x_list, Tensor): + return super().forward(x_or_x_list) + elif isinstance(x_or_x_list, list): + if not XFORMERS_AVAILABLE: + raise AssertionError("xFormers is required for using nested tensors") + return self.forward_nested(x_or_x_list) + else: + raise AssertionError diff --git a/engine/pose_estimation/dinov2/dinov2/layers/dino_head.py b/engine/pose_estimation/dinov2/dinov2/layers/dino_head.py new file mode 100644 index 0000000000000000000000000000000000000000..0ace8ffd6297a1dd480b19db407b662a6ea0f565 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/layers/dino_head.py @@ -0,0 +1,58 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import torch +import torch.nn as nn +from torch.nn.init import trunc_normal_ +from torch.nn.utils import weight_norm + + +class DINOHead(nn.Module): + def __init__( + self, + in_dim, + out_dim, + use_bn=False, + nlayers=3, + hidden_dim=2048, + bottleneck_dim=256, + mlp_bias=True, + ): + super().__init__() + nlayers = max(nlayers, 1) + self.mlp = _build_mlp(nlayers, in_dim, bottleneck_dim, hidden_dim=hidden_dim, use_bn=use_bn, bias=mlp_bias) + self.apply(self._init_weights) + self.last_layer = weight_norm(nn.Linear(bottleneck_dim, out_dim, bias=False)) + self.last_layer.weight_g.data.fill_(1) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=0.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + + def forward(self, x): + x = self.mlp(x) + eps = 1e-6 if x.dtype == torch.float16 else 1e-12 + x = nn.functional.normalize(x, dim=-1, p=2, eps=eps) + x = self.last_layer(x) + return x + + +def _build_mlp(nlayers, in_dim, bottleneck_dim, hidden_dim=None, use_bn=False, bias=True): + if nlayers == 1: + return nn.Linear(in_dim, bottleneck_dim, bias=bias) + else: + layers = [nn.Linear(in_dim, hidden_dim, bias=bias)] + if use_bn: + layers.append(nn.BatchNorm1d(hidden_dim)) + layers.append(nn.GELU()) + for _ in range(nlayers - 2): + layers.append(nn.Linear(hidden_dim, hidden_dim, bias=bias)) + if use_bn: + layers.append(nn.BatchNorm1d(hidden_dim)) + layers.append(nn.GELU()) + layers.append(nn.Linear(hidden_dim, bottleneck_dim, bias=bias)) + return nn.Sequential(*layers) diff --git a/engine/pose_estimation/dinov2/dinov2/layers/drop_path.py b/engine/pose_estimation/dinov2/dinov2/layers/drop_path.py new file mode 100644 index 0000000000000000000000000000000000000000..1d640e0b969b8dcba96260243473700b4e5b24b5 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/layers/drop_path.py @@ -0,0 +1,34 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/layers/drop.py + + +from torch import nn + + +def drop_path(x, drop_prob: float = 0.0, training: bool = False): + if drop_prob == 0.0 or not training: + return x + keep_prob = 1 - drop_prob + shape = (x.shape[0],) + (1,) * (x.ndim - 1) # work with diff dim tensors, not just 2D ConvNets + random_tensor = x.new_empty(shape).bernoulli_(keep_prob) + if keep_prob > 0.0: + random_tensor.div_(keep_prob) + output = x * random_tensor + return output + + +class DropPath(nn.Module): + """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).""" + + def __init__(self, drop_prob=None): + super(DropPath, self).__init__() + self.drop_prob = drop_prob + + def forward(self, x): + return drop_path(x, self.drop_prob, self.training) diff --git a/engine/pose_estimation/dinov2/dinov2/layers/layer_scale.py b/engine/pose_estimation/dinov2/dinov2/layers/layer_scale.py new file mode 100644 index 0000000000000000000000000000000000000000..51df0d7ce61f2b41fa9e6369f52391dd7fe7d386 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/layers/layer_scale.py @@ -0,0 +1,27 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# Modified from: https://github.com/huggingface/pytorch-image-models/blob/main/timm/models/vision_transformer.py#L103-L110 + +from typing import Union + +import torch +from torch import Tensor +from torch import nn + + +class LayerScale(nn.Module): + def __init__( + self, + dim: int, + init_values: Union[float, Tensor] = 1e-5, + inplace: bool = False, + ) -> None: + super().__init__() + self.inplace = inplace + self.gamma = nn.Parameter(init_values * torch.ones(dim)) + + def forward(self, x: Tensor) -> Tensor: + return x.mul_(self.gamma) if self.inplace else x * self.gamma diff --git a/engine/pose_estimation/dinov2/dinov2/layers/mlp.py b/engine/pose_estimation/dinov2/dinov2/layers/mlp.py new file mode 100644 index 0000000000000000000000000000000000000000..bbf9432aae9258612caeae910a7bde17999e328e --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/layers/mlp.py @@ -0,0 +1,40 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/layers/mlp.py + + +from typing import Callable, Optional + +from torch import Tensor, nn + + +class Mlp(nn.Module): + def __init__( + self, + in_features: int, + hidden_features: Optional[int] = None, + out_features: Optional[int] = None, + act_layer: Callable[..., nn.Module] = nn.GELU, + drop: float = 0.0, + bias: bool = True, + ) -> None: + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features, bias=bias) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features, bias=bias) + self.drop = nn.Dropout(drop) + + def forward(self, x: Tensor) -> Tensor: + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x diff --git a/engine/pose_estimation/dinov2/dinov2/layers/patch_embed.py b/engine/pose_estimation/dinov2/dinov2/layers/patch_embed.py new file mode 100644 index 0000000000000000000000000000000000000000..8b7c0804784a42cf80c0297d110dcc68cc85b339 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/layers/patch_embed.py @@ -0,0 +1,88 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/layers/patch_embed.py + +from typing import Callable, Optional, Tuple, Union + +from torch import Tensor +import torch.nn as nn + + +def make_2tuple(x): + if isinstance(x, tuple): + assert len(x) == 2 + return x + + assert isinstance(x, int) + return (x, x) + + +class PatchEmbed(nn.Module): + """ + 2D image to patch embedding: (B,C,H,W) -> (B,N,D) + + Args: + img_size: Image size. + patch_size: Patch token size. + in_chans: Number of input image channels. + embed_dim: Number of linear projection output channels. + norm_layer: Normalization layer. + """ + + def __init__( + self, + img_size: Union[int, Tuple[int, int]] = 224, + patch_size: Union[int, Tuple[int, int]] = 16, + in_chans: int = 3, + embed_dim: int = 768, + norm_layer: Optional[Callable] = None, + flatten_embedding: bool = True, + ) -> None: + super().__init__() + + image_HW = make_2tuple(img_size) + patch_HW = make_2tuple(patch_size) + patch_grid_size = ( + image_HW[0] // patch_HW[0], + image_HW[1] // patch_HW[1], + ) + + self.img_size = image_HW + self.patch_size = patch_HW + self.patches_resolution = patch_grid_size + self.num_patches = patch_grid_size[0] * patch_grid_size[1] + + self.in_chans = in_chans + self.embed_dim = embed_dim + + self.flatten_embedding = flatten_embedding + + self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_HW, stride=patch_HW) + self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() + + def forward(self, x: Tensor) -> Tensor: + _, _, H, W = x.shape + patch_H, patch_W = self.patch_size + + assert H % patch_H == 0, f"Input image height {H} is not a multiple of patch height {patch_H}" + assert W % patch_W == 0, f"Input image width {W} is not a multiple of patch width: {patch_W}" + + x = self.proj(x) # B C H W + H, W = x.size(2), x.size(3) + x = x.flatten(2).transpose(1, 2) # B HW C + x = self.norm(x) + if not self.flatten_embedding: + x = x.reshape(-1, H, W, self.embed_dim) # B H W C + return x + + def flops(self) -> float: + Ho, Wo = self.patches_resolution + flops = Ho * Wo * self.embed_dim * self.in_chans * (self.patch_size[0] * self.patch_size[1]) + if self.norm is not None: + flops += Ho * Wo * self.embed_dim + return flops diff --git a/engine/pose_estimation/dinov2/dinov2/layers/swiglu_ffn.py b/engine/pose_estimation/dinov2/dinov2/layers/swiglu_ffn.py new file mode 100644 index 0000000000000000000000000000000000000000..5e9dafa4592a408f6874d54853e8f60db5c41f74 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/layers/swiglu_ffn.py @@ -0,0 +1,72 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import os +from typing import Callable, Optional +import warnings + +from torch import Tensor, nn +import torch.nn.functional as F + + +class SwiGLUFFN(nn.Module): + def __init__( + self, + in_features: int, + hidden_features: Optional[int] = None, + out_features: Optional[int] = None, + act_layer: Callable[..., nn.Module] = None, + drop: float = 0.0, + bias: bool = True, + ) -> None: + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.w12 = nn.Linear(in_features, 2 * hidden_features, bias=bias) + self.w3 = nn.Linear(hidden_features, out_features, bias=bias) + + def forward(self, x: Tensor) -> Tensor: + x12 = self.w12(x) + x1, x2 = x12.chunk(2, dim=-1) + hidden = F.silu(x1) * x2 + return self.w3(hidden) + + +XFORMERS_ENABLED = os.environ.get("XFORMERS_DISABLED") is None +try: + if XFORMERS_ENABLED: + from xformers.ops import SwiGLU + + XFORMERS_AVAILABLE = True + warnings.warn("xFormers is available (SwiGLU)") + else: + warnings.warn("xFormers is disabled (SwiGLU)") + raise ImportError +except ImportError: + SwiGLU = SwiGLUFFN + XFORMERS_AVAILABLE = False + + warnings.warn("xFormers is not available (SwiGLU)") + + +class SwiGLUFFNFused(SwiGLU): + def __init__( + self, + in_features: int, + hidden_features: Optional[int] = None, + out_features: Optional[int] = None, + act_layer: Callable[..., nn.Module] = None, + drop: float = 0.0, + bias: bool = True, + ) -> None: + out_features = out_features or in_features + hidden_features = hidden_features or in_features + hidden_features = (int(hidden_features * 2 / 3) + 7) // 8 * 8 + super().__init__( + in_features=in_features, + hidden_features=hidden_features, + out_features=out_features, + bias=bias, + ) diff --git a/engine/pose_estimation/dinov2/dinov2/logging/__init__.py b/engine/pose_estimation/dinov2/dinov2/logging/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..04a7f02204316d4d1ef38bf6080dae3d66241c25 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/logging/__init__.py @@ -0,0 +1,102 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import functools +import logging +import os +import sys +from typing import Optional + +import dinov2.distributed as distributed +from .helpers import MetricLogger, SmoothedValue + + +# So that calling _configure_logger multiple times won't add many handlers +@functools.lru_cache() +def _configure_logger( + name: Optional[str] = None, + *, + level: int = logging.DEBUG, + output: Optional[str] = None, +): + """ + Configure a logger. + + Adapted from Detectron2. + + Args: + name: The name of the logger to configure. + level: The logging level to use. + output: A file name or a directory to save log. If None, will not save log file. + If ends with ".txt" or ".log", assumed to be a file name. + Otherwise, logs will be saved to `output/log.txt`. + + Returns: + The configured logger. + """ + + logger = logging.getLogger(name) + logger.setLevel(level) + logger.propagate = False + + # Loosely match Google glog format: + # [IWEF]yyyymmdd hh:mm:ss.uuuuuu threadid file:line] msg + # but use a shorter timestamp and include the logger name: + # [IWEF]yyyymmdd hh:mm:ss logger threadid file:line] msg + fmt_prefix = "%(levelname).1s%(asctime)s %(process)s %(name)s %(filename)s:%(lineno)s] " + fmt_message = "%(message)s" + fmt = fmt_prefix + fmt_message + datefmt = "%Y%m%d %H:%M:%S" + formatter = logging.Formatter(fmt=fmt, datefmt=datefmt) + + # stdout logging for main worker only + if distributed.is_main_process(): + handler = logging.StreamHandler(stream=sys.stdout) + handler.setLevel(logging.DEBUG) + handler.setFormatter(formatter) + logger.addHandler(handler) + + # file logging for all workers + if output: + if os.path.splitext(output)[-1] in (".txt", ".log"): + filename = output + else: + filename = os.path.join(output, "logs", "log.txt") + + if not distributed.is_main_process(): + global_rank = distributed.get_global_rank() + filename = filename + ".rank{}".format(global_rank) + + os.makedirs(os.path.dirname(filename), exist_ok=True) + + handler = logging.StreamHandler(open(filename, "a")) + handler.setLevel(logging.DEBUG) + handler.setFormatter(formatter) + logger.addHandler(handler) + + return logger + + +def setup_logging( + output: Optional[str] = None, + *, + name: Optional[str] = None, + level: int = logging.DEBUG, + capture_warnings: bool = True, +) -> None: + """ + Setup logging. + + Args: + output: A file name or a directory to save log files. If None, log + files will not be saved. If output ends with ".txt" or ".log", it + is assumed to be a file name. + Otherwise, logs will be saved to `output/log.txt`. + name: The name of the logger to configure, by default the root logger. + level: The logging level to use. + capture_warnings: Whether warnings should be captured as logs. + """ + logging.captureWarnings(capture_warnings) + _configure_logger(name, level=level, output=output) diff --git a/engine/pose_estimation/dinov2/dinov2/logging/helpers.py b/engine/pose_estimation/dinov2/dinov2/logging/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..c6e70bb15505cbbc4c4732b069ee919bf921a74f --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/logging/helpers.py @@ -0,0 +1,194 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from collections import defaultdict, deque +import datetime +import json +import logging +import time + +import torch + +import dinov2.distributed as distributed + + +logger = logging.getLogger("dinov2") + + +class MetricLogger(object): + def __init__(self, delimiter="\t", output_file=None): + self.meters = defaultdict(SmoothedValue) + self.delimiter = delimiter + self.output_file = output_file + + def update(self, **kwargs): + for k, v in kwargs.items(): + if isinstance(v, torch.Tensor): + v = v.item() + assert isinstance(v, (float, int)) + self.meters[k].update(v) + + def __getattr__(self, attr): + if attr in self.meters: + return self.meters[attr] + if attr in self.__dict__: + return self.__dict__[attr] + raise AttributeError("'{}' object has no attribute '{}'".format(type(self).__name__, attr)) + + def __str__(self): + loss_str = [] + for name, meter in self.meters.items(): + loss_str.append("{}: {}".format(name, str(meter))) + return self.delimiter.join(loss_str) + + def synchronize_between_processes(self): + for meter in self.meters.values(): + meter.synchronize_between_processes() + + def add_meter(self, name, meter): + self.meters[name] = meter + + def dump_in_output_file(self, iteration, iter_time, data_time): + if self.output_file is None or not distributed.is_main_process(): + return + dict_to_dump = dict( + iteration=iteration, + iter_time=iter_time, + data_time=data_time, + ) + dict_to_dump.update({k: v.median for k, v in self.meters.items()}) + with open(self.output_file, "a") as f: + f.write(json.dumps(dict_to_dump) + "\n") + pass + + def log_every(self, iterable, print_freq, header=None, n_iterations=None, start_iteration=0): + i = start_iteration + if not header: + header = "" + start_time = time.time() + end = time.time() + iter_time = SmoothedValue(fmt="{avg:.6f}") + data_time = SmoothedValue(fmt="{avg:.6f}") + + if n_iterations is None: + n_iterations = len(iterable) + + space_fmt = ":" + str(len(str(n_iterations))) + "d" + + log_list = [ + header, + "[{0" + space_fmt + "}/{1}]", + "eta: {eta}", + "{meters}", + "time: {time}", + "data: {data}", + ] + if torch.cuda.is_available(): + log_list += ["max mem: {memory:.0f}"] + + log_msg = self.delimiter.join(log_list) + MB = 1024.0 * 1024.0 + for obj in iterable: + data_time.update(time.time() - end) + yield obj + iter_time.update(time.time() - end) + if i % print_freq == 0 or i == n_iterations - 1: + self.dump_in_output_file(iteration=i, iter_time=iter_time.avg, data_time=data_time.avg) + eta_seconds = iter_time.global_avg * (n_iterations - i) + eta_string = str(datetime.timedelta(seconds=int(eta_seconds))) + if torch.cuda.is_available(): + logger.info( + log_msg.format( + i, + n_iterations, + eta=eta_string, + meters=str(self), + time=str(iter_time), + data=str(data_time), + memory=torch.cuda.max_memory_allocated() / MB, + ) + ) + else: + logger.info( + log_msg.format( + i, + n_iterations, + eta=eta_string, + meters=str(self), + time=str(iter_time), + data=str(data_time), + ) + ) + i += 1 + end = time.time() + if i >= n_iterations: + break + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + logger.info("{} Total time: {} ({:.6f} s / it)".format(header, total_time_str, total_time / n_iterations)) + + +class SmoothedValue: + """Track a series of values and provide access to smoothed values over a + window or the global series average. + """ + + def __init__(self, window_size=20, fmt=None): + if fmt is None: + fmt = "{median:.4f} ({global_avg:.4f})" + self.deque = deque(maxlen=window_size) + self.total = 0.0 + self.count = 0 + self.fmt = fmt + + def update(self, value, num=1): + self.deque.append(value) + self.count += num + self.total += value * num + + def synchronize_between_processes(self): + """ + Distributed synchronization of the metric + Warning: does not synchronize the deque! + """ + if not distributed.is_enabled(): + return + t = torch.tensor([self.count, self.total], dtype=torch.float64, device="cuda") + torch.distributed.barrier() + torch.distributed.all_reduce(t) + t = t.tolist() + self.count = int(t[0]) + self.total = t[1] + + @property + def median(self): + d = torch.tensor(list(self.deque)) + return d.median().item() + + @property + def avg(self): + d = torch.tensor(list(self.deque), dtype=torch.float32) + return d.mean().item() + + @property + def global_avg(self): + return self.total / self.count + + @property + def max(self): + return max(self.deque) + + @property + def value(self): + return self.deque[-1] + + def __str__(self): + return self.fmt.format( + median=self.median, + avg=self.avg, + global_avg=self.global_avg, + max=self.max, + value=self.value, + ) diff --git a/engine/pose_estimation/dinov2/dinov2/loss/__init__.py b/engine/pose_estimation/dinov2/dinov2/loss/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d6b0115b74edbd74b324c9056a57fade363c58fd --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/loss/__init__.py @@ -0,0 +1,8 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from .dino_clstoken_loss import DINOLoss +from .ibot_patch_loss import iBOTPatchLoss +from .koleo_loss import KoLeoLoss diff --git a/engine/pose_estimation/dinov2/dinov2/loss/dino_clstoken_loss.py b/engine/pose_estimation/dinov2/dinov2/loss/dino_clstoken_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..c31808e36e6c38ee6dae13ba0443bf1946242117 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/loss/dino_clstoken_loss.py @@ -0,0 +1,99 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import torch +import torch.distributed as dist +import torch.nn.functional as F +from torch import nn + + +class DINOLoss(nn.Module): + def __init__( + self, + out_dim, + student_temp=0.1, + center_momentum=0.9, + ): + super().__init__() + self.student_temp = student_temp + self.center_momentum = center_momentum + self.register_buffer("center", torch.zeros(1, out_dim)) + self.updated = True + self.reduce_handle = None + self.len_teacher_output = None + self.async_batch_center = None + + @torch.no_grad() + def softmax_center_teacher(self, teacher_output, teacher_temp): + self.apply_center_update() + # teacher centering and sharpening + return F.softmax((teacher_output - self.center) / teacher_temp, dim=-1) + + @torch.no_grad() + def sinkhorn_knopp_teacher(self, teacher_output, teacher_temp, n_iterations=3): + teacher_output = teacher_output.float() + world_size = dist.get_world_size() if dist.is_initialized() else 1 + Q = torch.exp(teacher_output / teacher_temp).t() # Q is K-by-B for consistency with notations from our paper + B = Q.shape[1] * world_size # number of samples to assign + K = Q.shape[0] # how many prototypes + + # make the matrix sums to 1 + sum_Q = torch.sum(Q) + if dist.is_initialized(): + dist.all_reduce(sum_Q) + Q /= sum_Q + + for it in range(n_iterations): + # normalize each row: total weight per prototype must be 1/K + sum_of_rows = torch.sum(Q, dim=1, keepdim=True) + if dist.is_initialized(): + dist.all_reduce(sum_of_rows) + Q /= sum_of_rows + Q /= K + + # normalize each column: total weight per sample must be 1/B + Q /= torch.sum(Q, dim=0, keepdim=True) + Q /= B + + Q *= B # the columns must sum to 1 so that Q is an assignment + return Q.t() + + def forward(self, student_output_list, teacher_out_softmaxed_centered_list): + """ + Cross-entropy between softmax outputs of the teacher and student networks. + """ + # TODO: Use cross_entropy_distribution here + total_loss = 0 + for s in student_output_list: + lsm = F.log_softmax(s / self.student_temp, dim=-1) + for t in teacher_out_softmaxed_centered_list: + loss = torch.sum(t * lsm, dim=-1) + total_loss -= loss.mean() + return total_loss + + @torch.no_grad() + def update_center(self, teacher_output): + self.reduce_center_update(teacher_output) + + @torch.no_grad() + def reduce_center_update(self, teacher_output): + self.updated = False + self.len_teacher_output = len(teacher_output) + self.async_batch_center = torch.sum(teacher_output, dim=0, keepdim=True) + if dist.is_initialized(): + self.reduce_handle = dist.all_reduce(self.async_batch_center, async_op=True) + + @torch.no_grad() + def apply_center_update(self): + if self.updated is False: + world_size = dist.get_world_size() if dist.is_initialized() else 1 + + if self.reduce_handle is not None: + self.reduce_handle.wait() + _t = self.async_batch_center / (self.len_teacher_output * world_size) + + self.center = self.center * self.center_momentum + _t * (1 - self.center_momentum) + + self.updated = True diff --git a/engine/pose_estimation/dinov2/dinov2/loss/ibot_patch_loss.py b/engine/pose_estimation/dinov2/dinov2/loss/ibot_patch_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..6732cda0c311c69f193669ebc950fc8665871442 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/loss/ibot_patch_loss.py @@ -0,0 +1,151 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import torch +import torch.distributed as dist +import torch.nn.functional as F +from torch import nn + +import logging + + +logger = logging.getLogger("dinov2") + + +try: + from xformers.ops import cross_entropy + + def lossfunc(t, s, temp): + s = s.float() + t = t.float() + if s.ndim == 2: + return -cross_entropy(s.unsqueeze(0), t.unsqueeze(0), temp, bw_inplace=True).squeeze(0) + elif s.ndim == 3: + return -cross_entropy(s, t, temp, bw_inplace=True) + +except ImportError: + + def lossfunc(t, s, temp): + return torch.sum(t * F.log_softmax(s / temp, dim=-1), dim=-1) + + +class iBOTPatchLoss(nn.Module): + def __init__(self, patch_out_dim, student_temp=0.1, center_momentum=0.9): + super().__init__() + self.student_temp = student_temp + self.center_momentum = center_momentum + self.register_buffer("center", torch.zeros(1, 1, patch_out_dim)) + self.updated = True + self.reduce_handle = None + self.len_teacher_patch_tokens = None + self.async_batch_center = None + + @torch.no_grad() + def softmax_center_teacher(self, teacher_patch_tokens, teacher_temp): + self.apply_center_update() + # teacher centering and sharpening + # + # WARNING: + # as self.center is a float32, everything gets casted to float32 afterwards + # + # teacher_patch_tokens = teacher_patch_tokens.float() + # return F.softmax((teacher_patch_tokens.sub_(self.center.to(teacher_patch_tokens.dtype))).mul_(1 / teacher_temp), dim=-1) + + return F.softmax((teacher_patch_tokens - self.center) / teacher_temp, dim=-1) + + # this is experimental, keep everything in float16 and let's see what happens: + # return F.softmax((teacher_patch_tokens.sub_(self.center)) / teacher_temp, dim=-1) + + @torch.no_grad() + def sinkhorn_knopp_teacher(self, teacher_output, teacher_temp, n_masked_patches_tensor, n_iterations=3): + teacher_output = teacher_output.float() + # world_size = dist.get_world_size() if dist.is_initialized() else 1 + Q = torch.exp(teacher_output / teacher_temp).t() # Q is K-by-B for consistency with notations from our paper + # B = Q.shape[1] * world_size # number of samples to assign + B = n_masked_patches_tensor + dist.all_reduce(B) + K = Q.shape[0] # how many prototypes + + # make the matrix sums to 1 + sum_Q = torch.sum(Q) + if dist.is_initialized(): + dist.all_reduce(sum_Q) + Q /= sum_Q + + for it in range(n_iterations): + # normalize each row: total weight per prototype must be 1/K + sum_of_rows = torch.sum(Q, dim=1, keepdim=True) + if dist.is_initialized(): + dist.all_reduce(sum_of_rows) + Q /= sum_of_rows + Q /= K + + # normalize each column: total weight per sample must be 1/B + Q /= torch.sum(Q, dim=0, keepdim=True) + Q /= B + + Q *= B # the columns must sum to 1 so that Q is an assignment + return Q.t() + + def forward(self, student_patch_tokens, teacher_patch_tokens, student_masks_flat): + """ + Cross-entropy between softmax outputs of the teacher and student networks. + student_patch_tokens: (B, N, D) tensor + teacher_patch_tokens: (B, N, D) tensor + student_masks_flat: (B, N) tensor + """ + t = teacher_patch_tokens + s = student_patch_tokens + loss = torch.sum(t * F.log_softmax(s / self.student_temp, dim=-1), dim=-1) + loss = torch.sum(loss * student_masks_flat.float(), dim=-1) / student_masks_flat.sum(dim=-1).clamp(min=1.0) + return -loss.mean() + + def forward_masked( + self, + student_patch_tokens_masked, + teacher_patch_tokens_masked, + student_masks_flat, + n_masked_patches=None, + masks_weight=None, + ): + t = teacher_patch_tokens_masked + s = student_patch_tokens_masked + # loss = torch.sum(t * F.log_softmax(s / self.student_temp, dim=-1), dim=-1) + loss = lossfunc(t, s, self.student_temp) + if masks_weight is None: + masks_weight = ( + (1 / student_masks_flat.sum(-1).clamp(min=1.0)) + .unsqueeze(-1) + .expand_as(student_masks_flat)[student_masks_flat] + ) + if n_masked_patches is not None: + loss = loss[:n_masked_patches] + loss = loss * masks_weight + return -loss.sum() / student_masks_flat.shape[0] + + @torch.no_grad() + def update_center(self, teacher_patch_tokens): + self.reduce_center_update(teacher_patch_tokens) + + @torch.no_grad() + def reduce_center_update(self, teacher_patch_tokens): + self.updated = False + self.len_teacher_patch_tokens = len(teacher_patch_tokens) + self.async_batch_center = torch.sum(teacher_patch_tokens.mean(1), dim=0, keepdim=True) + if dist.is_initialized(): + self.reduce_handle = dist.all_reduce(self.async_batch_center, async_op=True) + + @torch.no_grad() + def apply_center_update(self): + if self.updated is False: + world_size = dist.get_world_size() if dist.is_initialized() else 1 + + if self.reduce_handle is not None: + self.reduce_handle.wait() + _t = self.async_batch_center / (self.len_teacher_patch_tokens * world_size) + + self.center = self.center * self.center_momentum + _t * (1 - self.center_momentum) + + self.updated = True diff --git a/engine/pose_estimation/dinov2/dinov2/loss/koleo_loss.py b/engine/pose_estimation/dinov2/dinov2/loss/koleo_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..b5cbcd91e0fc0b857f477b0910f957f02a6c4335 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/loss/koleo_loss.py @@ -0,0 +1,48 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import logging + +import torch +import torch.nn as nn +import torch.nn.functional as F + +# import torch.distributed as dist + + +logger = logging.getLogger("dinov2") + + +class KoLeoLoss(nn.Module): + """Kozachenko-Leonenko entropic loss regularizer from Sablayrolles et al. - 2018 - Spreading vectors for similarity search""" + + def __init__(self): + super().__init__() + self.pdist = nn.PairwiseDistance(2, eps=1e-8) + + def pairwise_NNs_inner(self, x): + """ + Pairwise nearest neighbors for L2-normalized vectors. + Uses Torch rather than Faiss to remain on GPU. + """ + # parwise dot products (= inverse distance) + dots = torch.mm(x, x.t()) + n = x.shape[0] + dots.view(-1)[:: (n + 1)].fill_(-1) # Trick to fill diagonal with -1 + # max inner prod -> min distance + _, I = torch.max(dots, dim=1) # noqa: E741 + return I + + def forward(self, student_output, eps=1e-8): + """ + Args: + student_output (BxD): backbone output of student + """ + with torch.cuda.amp.autocast(enabled=False): + student_output = F.normalize(student_output, eps=eps, p=2, dim=-1) + I = self.pairwise_NNs_inner(student_output) # noqa: E741 + distances = self.pdist(student_output, student_output[I]) # BxD, BxD -> B + loss = -torch.log(distances + eps).mean() + return loss diff --git a/engine/pose_estimation/dinov2/dinov2/models/__init__.py b/engine/pose_estimation/dinov2/dinov2/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3fdff20badbd5244bf79f16bf18dd2cb73982265 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/models/__init__.py @@ -0,0 +1,43 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import logging + +from . import vision_transformer as vits + + +logger = logging.getLogger("dinov2") + + +def build_model(args, only_teacher=False, img_size=224): + args.arch = args.arch.removesuffix("_memeff") + if "vit" in args.arch: + vit_kwargs = dict( + img_size=img_size, + patch_size=args.patch_size, + init_values=args.layerscale, + ffn_layer=args.ffn_layer, + block_chunks=args.block_chunks, + qkv_bias=args.qkv_bias, + proj_bias=args.proj_bias, + ffn_bias=args.ffn_bias, + num_register_tokens=args.num_register_tokens, + interpolate_offset=args.interpolate_offset, + interpolate_antialias=args.interpolate_antialias, + ) + teacher = vits.__dict__[args.arch](**vit_kwargs) + if only_teacher: + return teacher, teacher.embed_dim + student = vits.__dict__[args.arch]( + **vit_kwargs, + drop_path_rate=args.drop_path_rate, + drop_path_uniform=args.drop_path_uniform, + ) + embed_dim = student.embed_dim + return student, teacher, embed_dim + + +def build_model_from_cfg(cfg, only_teacher=False): + return build_model(cfg.student, only_teacher=only_teacher, img_size=cfg.crops.global_crops_size) diff --git a/engine/pose_estimation/dinov2/dinov2/models/vision_transformer.py b/engine/pose_estimation/dinov2/dinov2/models/vision_transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..13b44ae3c4e32f79ed793f3dafc58b0016dbaa99 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/models/vision_transformer.py @@ -0,0 +1,396 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/main/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/models/vision_transformer.py + +from functools import partial +import math +import logging +from typing import Sequence, Tuple, Union, Callable + +import torch +import torch.nn as nn +import torch.utils.checkpoint +from torch.nn.init import trunc_normal_ + +from dinov2.layers import Mlp, PatchEmbed, SwiGLUFFNFused, MemEffAttention, NestedTensorBlock as Block + + +logger = logging.getLogger("dinov2") + + +def named_apply(fn: Callable, module: nn.Module, name="", depth_first=True, include_root=False) -> nn.Module: + if not depth_first and include_root: + fn(module=module, name=name) + for child_name, child_module in module.named_children(): + child_name = ".".join((name, child_name)) if name else child_name + named_apply(fn=fn, module=child_module, name=child_name, depth_first=depth_first, include_root=True) + if depth_first and include_root: + fn(module=module, name=name) + return module + + +class BlockChunk(nn.ModuleList): + def forward(self, x): + for b in self: + x = b(x) + return x + + +class DinoVisionTransformer(nn.Module): + def __init__( + self, + img_size=224, + patch_size=16, + in_chans=3, + embed_dim=768, + depth=12, + num_heads=12, + mlp_ratio=4.0, + qkv_bias=True, + ffn_bias=True, + proj_bias=True, + drop_path_rate=0.0, + drop_path_uniform=False, + init_values=None, # for layerscale: None or 0 => no layerscale + embed_layer=PatchEmbed, + act_layer=nn.GELU, + block_fn=Block, + ffn_layer="mlp", + block_chunks=1, + num_register_tokens=0, + interpolate_antialias=False, + interpolate_offset=0.1, + ): + """ + Args: + img_size (int, tuple): input image size + patch_size (int, tuple): patch size + in_chans (int): number of input channels + embed_dim (int): embedding dimension + depth (int): depth of transformer + num_heads (int): number of attention heads + mlp_ratio (int): ratio of mlp hidden dim to embedding dim + qkv_bias (bool): enable bias for qkv if True + proj_bias (bool): enable bias for proj in attn if True + ffn_bias (bool): enable bias for ffn if True + drop_path_rate (float): stochastic depth rate + drop_path_uniform (bool): apply uniform drop rate across blocks + weight_init (str): weight init scheme + init_values (float): layer-scale init values + embed_layer (nn.Module): patch embedding layer + act_layer (nn.Module): MLP activation layer + block_fn (nn.Module): transformer block class + ffn_layer (str): "mlp", "swiglu", "swiglufused" or "identity" + block_chunks: (int) split block sequence into block_chunks units for FSDP wrap + num_register_tokens: (int) number of extra cls tokens (so-called "registers") + interpolate_antialias: (str) flag to apply anti-aliasing when interpolating positional embeddings + interpolate_offset: (float) work-around offset to apply when interpolating positional embeddings + """ + super().__init__() + norm_layer = partial(nn.LayerNorm, eps=1e-6) + + self.num_features = self.embed_dim = embed_dim # num_features for consistency with other models + self.num_tokens = 1 + self.n_blocks = depth + self.num_heads = num_heads + self.patch_size = patch_size + self.num_register_tokens = num_register_tokens + self.interpolate_antialias = interpolate_antialias + self.interpolate_offset = interpolate_offset + + self.patch_embed = embed_layer(img_size=img_size, patch_size=patch_size, in_chans=in_chans, embed_dim=embed_dim) + num_patches = self.patch_embed.num_patches + + self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim)) + self.pos_embed = nn.Parameter(torch.zeros(1, num_patches + self.num_tokens, embed_dim)) + assert num_register_tokens >= 0 + self.register_tokens = ( + nn.Parameter(torch.zeros(1, num_register_tokens, embed_dim)) if num_register_tokens else None + ) + + if drop_path_uniform is True: + dpr = [drop_path_rate] * depth + else: + dpr = [x.item() for x in torch.linspace(0, drop_path_rate, depth)] # stochastic depth decay rule + + if ffn_layer == "mlp": + logger.info("using MLP layer as FFN") + ffn_layer = Mlp + elif ffn_layer == "swiglufused" or ffn_layer == "swiglu": + logger.info("using SwiGLU layer as FFN") + ffn_layer = SwiGLUFFNFused + elif ffn_layer == "identity": + logger.info("using Identity layer as FFN") + + def f(*args, **kwargs): + return nn.Identity() + + ffn_layer = f + else: + raise NotImplementedError + + blocks_list = [ + block_fn( + dim=embed_dim, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + proj_bias=proj_bias, + ffn_bias=ffn_bias, + drop_path=dpr[i], + norm_layer=norm_layer, + act_layer=act_layer, + ffn_layer=ffn_layer, + init_values=init_values, + ) + for i in range(depth) + ] + if block_chunks > 0: + self.chunked_blocks = True + chunked_blocks = [] + chunksize = depth // block_chunks + for i in range(0, depth, chunksize): + # this is to keep the block index consistent if we chunk the block list + chunked_blocks.append([nn.Identity()] * i + blocks_list[i : i + chunksize]) + self.blocks = nn.ModuleList([BlockChunk(p) for p in chunked_blocks]) + else: + self.chunked_blocks = False + self.blocks = nn.ModuleList(blocks_list) + + self.norm = norm_layer(embed_dim) + self.head = nn.Identity() + + self.mask_token = nn.Parameter(torch.zeros(1, embed_dim)) + + self.init_weights() + + def init_weights(self): + trunc_normal_(self.pos_embed, std=0.02) + nn.init.normal_(self.cls_token, std=1e-6) + if self.register_tokens is not None: + nn.init.normal_(self.register_tokens, std=1e-6) + named_apply(init_weights_vit_timm, self) + + def interpolate_pos_encoding(self, x, w, h): + previous_dtype = x.dtype + npatch = x.shape[1] - 1 + N = self.pos_embed.shape[1] - 1 + if npatch == N and w == h: + return self.pos_embed + pos_embed = self.pos_embed.float() + class_pos_embed = pos_embed[:, 0] + patch_pos_embed = pos_embed[:, 1:] + dim = x.shape[-1] + w0 = w // self.patch_size + h0 = h // self.patch_size + M = int(math.sqrt(N)) # Recover the number of patches in each dimension + assert N == M * M + kwargs = {} + if self.interpolate_offset: + # Historical kludge: add a small number to avoid floating point error in the interpolation, see https://github.com/facebookresearch/dino/issues/8 + # Note: still needed for backward-compatibility, the underlying operators are using both output size and scale factors + sx = float(w0 + self.interpolate_offset) / M + sy = float(h0 + self.interpolate_offset) / M + kwargs["scale_factor"] = (sx, sy) + else: + # Simply specify an output size instead of a scale factor + kwargs["size"] = (w0, h0) + patch_pos_embed = nn.functional.interpolate( + patch_pos_embed.reshape(1, M, M, dim).permute(0, 3, 1, 2), + mode="bicubic", + antialias=self.interpolate_antialias, + **kwargs, + ) + assert (w0, h0) == patch_pos_embed.shape[-2:] + patch_pos_embed = patch_pos_embed.permute(0, 2, 3, 1).view(1, -1, dim) + return torch.cat((class_pos_embed.unsqueeze(0), patch_pos_embed), dim=1).to(previous_dtype) + + def prepare_tokens_with_masks(self, x, masks=None): + B, nc, w, h = x.shape + x = self.patch_embed(x) + if masks is not None: + x = torch.where(masks.unsqueeze(-1), self.mask_token.to(x.dtype).unsqueeze(0), x) + + x = torch.cat((self.cls_token.expand(x.shape[0], -1, -1), x), dim=1) + x = x + self.interpolate_pos_encoding(x, w, h) + + if self.register_tokens is not None: + x = torch.cat( + ( + x[:, :1], + self.register_tokens.expand(x.shape[0], -1, -1), + x[:, 1:], + ), + dim=1, + ) + + return x + + def forward_features_list(self, x_list, masks_list): + x = [self.prepare_tokens_with_masks(x, masks) for x, masks in zip(x_list, masks_list)] + for blk in self.blocks: + x = blk(x) + + all_x = x + output = [] + for x, masks in zip(all_x, masks_list): + x_norm = self.norm(x) + output.append( + { + "x_norm_clstoken": x_norm[:, 0], + "x_norm_regtokens": x_norm[:, 1 : self.num_register_tokens + 1], + "x_norm_patchtokens": x_norm[:, self.num_register_tokens + 1 :], + "x_prenorm": x, + "masks": masks, + } + ) + return output + + def forward_features(self, x, masks=None): + if isinstance(x, list): + return self.forward_features_list(x, masks) + + x = self.prepare_tokens_with_masks(x, masks) + + for blk in self.blocks: + x = blk(x) + + x_norm = self.norm(x) + return { + "x_norm_clstoken": x_norm[:, 0], + "x_norm_regtokens": x_norm[:, 1 : self.num_register_tokens + 1], + "x_norm_patchtokens": x_norm[:, self.num_register_tokens + 1 :], + "x_prenorm": x, + "masks": masks, + } + + def _get_intermediate_layers_not_chunked(self, x, n=1): + x = self.prepare_tokens_with_masks(x) + # If n is an int, take the n last blocks. If it's a list, take them + output, total_block_len = [], len(self.blocks) + blocks_to_take = range(total_block_len - n, total_block_len) if isinstance(n, int) else n + for i, blk in enumerate(self.blocks): + x = blk(x) + if i in blocks_to_take: + output.append(x) + assert len(output) == len(blocks_to_take), f"only {len(output)} / {len(blocks_to_take)} blocks found" + return output + + def _get_intermediate_layers_chunked(self, x, n=1): + x = self.prepare_tokens_with_masks(x) + output, i, total_block_len = [], 0, len(self.blocks[-1]) + # If n is an int, take the n last blocks. If it's a list, take them + blocks_to_take = range(total_block_len - n, total_block_len) if isinstance(n, int) else n + for block_chunk in self.blocks: + for blk in block_chunk[i:]: # Passing the nn.Identity() + x = blk(x) + if i in blocks_to_take: + output.append(x) + i += 1 + assert len(output) == len(blocks_to_take), f"only {len(output)} / {len(blocks_to_take)} blocks found" + return output + + def get_intermediate_layers( + self, + x: torch.Tensor, + n: Union[int, Sequence] = 1, # Layers or n last layers to take + reshape: bool = False, + return_class_token: bool = False, + norm=True, + ) -> Tuple[Union[torch.Tensor, Tuple[torch.Tensor]]]: + if self.chunked_blocks: + outputs = self._get_intermediate_layers_chunked(x, n) + else: + outputs = self._get_intermediate_layers_not_chunked(x, n) + if norm: + outputs = [self.norm(out) for out in outputs] + class_tokens = [out[:, 0] for out in outputs] + outputs = [out[:, 1 + self.num_register_tokens :] for out in outputs] + if reshape: + B, _, w, h = x.shape + outputs = [ + out.reshape(B, w // self.patch_size, h // self.patch_size, -1).permute(0, 3, 1, 2).contiguous() + for out in outputs + ] + if return_class_token: + return tuple(zip(outputs, class_tokens)) + return tuple(outputs) + + def forward(self, *args, is_training=False, **kwargs): + ret = self.forward_features(*args, **kwargs) + if is_training: + return ret + else: + return self.head(ret["x_norm_clstoken"]) + + +def init_weights_vit_timm(module: nn.Module, name: str = ""): + """ViT weight initialization, original timm impl (for reproducibility)""" + if isinstance(module, nn.Linear): + trunc_normal_(module.weight, std=0.02) + if module.bias is not None: + nn.init.zeros_(module.bias) + + +def vit_small(patch_size=16, num_register_tokens=0, **kwargs): + model = DinoVisionTransformer( + patch_size=patch_size, + embed_dim=384, + depth=12, + num_heads=6, + mlp_ratio=4, + block_fn=partial(Block, attn_class=MemEffAttention), + num_register_tokens=num_register_tokens, + **kwargs, + ) + return model + + +def vit_base(patch_size=16, num_register_tokens=0, **kwargs): + model = DinoVisionTransformer( + patch_size=patch_size, + embed_dim=768, + depth=12, + num_heads=12, + mlp_ratio=4, + block_fn=partial(Block, attn_class=MemEffAttention), + num_register_tokens=num_register_tokens, + **kwargs, + ) + return model + + +def vit_large(patch_size=16, num_register_tokens=0, **kwargs): + model = DinoVisionTransformer( + patch_size=patch_size, + embed_dim=1024, + depth=24, + num_heads=16, + mlp_ratio=4, + block_fn=partial(Block, attn_class=MemEffAttention), + num_register_tokens=num_register_tokens, + **kwargs, + ) + return model + + +def vit_giant2(patch_size=16, num_register_tokens=0, **kwargs): + """ + Close to ViT-giant, with embed-dim 1536 and 24 heads => embed-dim per head 64 + """ + model = DinoVisionTransformer( + patch_size=patch_size, + embed_dim=1536, + depth=40, + num_heads=24, + mlp_ratio=4, + block_fn=partial(Block, attn_class=MemEffAttention), + num_register_tokens=num_register_tokens, + **kwargs, + ) + return model diff --git a/engine/pose_estimation/dinov2/dinov2/run/__init__.py b/engine/pose_estimation/dinov2/dinov2/run/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b88da6bf80be92af00b72dfdb0a806fa64a7a2d9 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/run/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. diff --git a/engine/pose_estimation/dinov2/dinov2/run/eval/knn.py b/engine/pose_estimation/dinov2/dinov2/run/eval/knn.py new file mode 100644 index 0000000000000000000000000000000000000000..d11918445cdfe415fe58ac8b3ad0bf29702e3457 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/run/eval/knn.py @@ -0,0 +1,59 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import logging +import os +import sys + +from dinov2.eval.knn import get_args_parser as get_knn_args_parser +from dinov2.logging import setup_logging +from dinov2.run.submit import get_args_parser, submit_jobs + + +logger = logging.getLogger("dinov2") + + +class Evaluator: + def __init__(self, args): + self.args = args + + def __call__(self): + from dinov2.eval.knn import main as knn_main + + self._setup_args() + knn_main(self.args) + + def checkpoint(self): + import submitit + + logger.info(f"Requeuing {self.args}") + empty = type(self)(self.args) + return submitit.helpers.DelayedSubmission(empty) + + def _setup_args(self): + import submitit + + job_env = submitit.JobEnvironment() + self.args.output_dir = self.args.output_dir.replace("%j", str(job_env.job_id)) + logger.info(f"Process group: {job_env.num_tasks} tasks, rank: {job_env.global_rank}") + logger.info(f"Args: {self.args}") + + +def main(): + description = "Submitit launcher for DINOv2 k-NN evaluation" + knn_args_parser = get_knn_args_parser(add_help=False) + parents = [knn_args_parser] + args_parser = get_args_parser(description=description, parents=parents) + args = args_parser.parse_args() + + setup_logging() + + assert os.path.exists(args.config_file), "Configuration file does not exist!" + submit_jobs(Evaluator, args, name="dinov2:knn") + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/engine/pose_estimation/dinov2/dinov2/run/eval/linear.py b/engine/pose_estimation/dinov2/dinov2/run/eval/linear.py new file mode 100644 index 0000000000000000000000000000000000000000..e1dc3293e88512a5cf885ab775dc08e01aed6724 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/run/eval/linear.py @@ -0,0 +1,59 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import logging +import os +import sys + +from dinov2.eval.linear import get_args_parser as get_linear_args_parser +from dinov2.logging import setup_logging +from dinov2.run.submit import get_args_parser, submit_jobs + + +logger = logging.getLogger("dinov2") + + +class Evaluator: + def __init__(self, args): + self.args = args + + def __call__(self): + from dinov2.eval.linear import main as linear_main + + self._setup_args() + linear_main(self.args) + + def checkpoint(self): + import submitit + + logger.info(f"Requeuing {self.args}") + empty = type(self)(self.args) + return submitit.helpers.DelayedSubmission(empty) + + def _setup_args(self): + import submitit + + job_env = submitit.JobEnvironment() + self.args.output_dir = self.args.output_dir.replace("%j", str(job_env.job_id)) + logger.info(f"Process group: {job_env.num_tasks} tasks, rank: {job_env.global_rank}") + logger.info(f"Args: {self.args}") + + +def main(): + description = "Submitit launcher for DINOv2 linear evaluation" + linear_args_parser = get_linear_args_parser(add_help=False) + parents = [linear_args_parser] + args_parser = get_args_parser(description=description, parents=parents) + args = args_parser.parse_args() + + setup_logging() + + assert os.path.exists(args.config_file), "Configuration file does not exist!" + submit_jobs(Evaluator, args, name="dinov2:linear") + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/engine/pose_estimation/dinov2/dinov2/run/eval/log_regression.py b/engine/pose_estimation/dinov2/dinov2/run/eval/log_regression.py new file mode 100644 index 0000000000000000000000000000000000000000..cdf02181122de72cfa463ef38494967219df9cf3 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/run/eval/log_regression.py @@ -0,0 +1,59 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import logging +import os +import sys + +from dinov2.eval.log_regression import get_args_parser as get_log_regression_args_parser +from dinov2.logging import setup_logging +from dinov2.run.submit import get_args_parser, submit_jobs + + +logger = logging.getLogger("dinov2") + + +class Evaluator: + def __init__(self, args): + self.args = args + + def __call__(self): + from dinov2.eval.log_regression import main as log_regression_main + + self._setup_args() + log_regression_main(self.args) + + def checkpoint(self): + import submitit + + logger.info(f"Requeuing {self.args}") + empty = type(self)(self.args) + return submitit.helpers.DelayedSubmission(empty) + + def _setup_args(self): + import submitit + + job_env = submitit.JobEnvironment() + self.args.output_dir = self.args.output_dir.replace("%j", str(job_env.job_id)) + logger.info(f"Process group: {job_env.num_tasks} tasks, rank: {job_env.global_rank}") + logger.info(f"Args: {self.args}") + + +def main(): + description = "Submitit launcher for DINOv2 logistic evaluation" + log_regression_args_parser = get_log_regression_args_parser(add_help=False) + parents = [log_regression_args_parser] + args_parser = get_args_parser(description=description, parents=parents) + args = args_parser.parse_args() + + setup_logging() + + assert os.path.exists(args.config_file), "Configuration file does not exist!" + submit_jobs(Evaluator, args, name="dinov2:logreg") + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/engine/pose_estimation/dinov2/dinov2/run/submit.py b/engine/pose_estimation/dinov2/dinov2/run/submit.py new file mode 100644 index 0000000000000000000000000000000000000000..4d1f718e704cf9a48913422404c25a7fcc50e738 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/run/submit.py @@ -0,0 +1,122 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import argparse +import logging +import os +from pathlib import Path +from typing import List, Optional + +import submitit + +from dinov2.utils.cluster import ( + get_slurm_executor_parameters, + get_slurm_partition, + get_user_checkpoint_path, +) + + +logger = logging.getLogger("dinov2") + + +def get_args_parser( + description: Optional[str] = None, + parents: Optional[List[argparse.ArgumentParser]] = None, + add_help: bool = True, +) -> argparse.ArgumentParser: + parents = parents or [] + slurm_partition = get_slurm_partition() + parser = argparse.ArgumentParser( + description=description, + parents=parents, + add_help=add_help, + ) + parser.add_argument( + "--ngpus", + "--gpus", + "--gpus-per-node", + default=8, + type=int, + help="Number of GPUs to request on each node", + ) + parser.add_argument( + "--nodes", + "--nnodes", + default=1, + type=int, + help="Number of nodes to request", + ) + parser.add_argument( + "--timeout", + default=2800, + type=int, + help="Duration of the job", + ) + parser.add_argument( + "--partition", + default=slurm_partition, + type=str, + help="Partition where to submit", + ) + parser.add_argument( + "--use-volta32", + action="store_true", + help="Request V100-32GB GPUs", + ) + parser.add_argument( + "--comment", + default="", + type=str, + help="Comment to pass to scheduler, e.g. priority message", + ) + parser.add_argument( + "--exclude", + default="", + type=str, + help="Nodes to exclude", + ) + return parser + + +def get_shared_folder() -> Path: + user_checkpoint_path = get_user_checkpoint_path() + if user_checkpoint_path is None: + raise RuntimeError("Path to user checkpoint cannot be determined") + path = user_checkpoint_path / "experiments" + path.mkdir(exist_ok=True) + return path + + +def submit_jobs(task_class, args, name: str): + if not args.output_dir: + args.output_dir = str(get_shared_folder() / "%j") + + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + executor = submitit.AutoExecutor(folder=args.output_dir, slurm_max_num_timeout=30) + + kwargs = {} + if args.use_volta32: + kwargs["slurm_constraint"] = "volta32gb" + if args.comment: + kwargs["slurm_comment"] = args.comment + if args.exclude: + kwargs["slurm_exclude"] = args.exclude + + executor_params = get_slurm_executor_parameters( + nodes=args.nodes, + num_gpus_per_node=args.ngpus, + timeout_min=args.timeout, # max is 60 * 72 + slurm_signal_delay_s=120, + slurm_partition=args.partition, + **kwargs, + ) + executor.update_parameters(name=name, **executor_params) + + task = task_class(args) + job = executor.submit(task) + + logger.info(f"Submitted job_id: {job.job_id}") + str_output_dir = os.path.abspath(args.output_dir).replace("%j", str(job.job_id)) + logger.info(f"Logs and checkpoints will be saved at: {str_output_dir}") diff --git a/engine/pose_estimation/dinov2/dinov2/run/train/train.py b/engine/pose_estimation/dinov2/dinov2/run/train/train.py new file mode 100644 index 0000000000000000000000000000000000000000..c2366e9bf79765e6abcd70dda6b43f31cb7093eb --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/run/train/train.py @@ -0,0 +1,59 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import logging +import os +import sys + +from dinov2.logging import setup_logging +from dinov2.train import get_args_parser as get_train_args_parser +from dinov2.run.submit import get_args_parser, submit_jobs + + +logger = logging.getLogger("dinov2") + + +class Trainer(object): + def __init__(self, args): + self.args = args + + def __call__(self): + from dinov2.train import main as train_main + + self._setup_args() + train_main(self.args) + + def checkpoint(self): + import submitit + + logger.info(f"Requeuing {self.args}") + empty = type(self)(self.args) + return submitit.helpers.DelayedSubmission(empty) + + def _setup_args(self): + import submitit + + job_env = submitit.JobEnvironment() + self.args.output_dir = self.args.output_dir.replace("%j", str(job_env.job_id)) + logger.info(f"Process group: {job_env.num_tasks} tasks, rank: {job_env.global_rank}") + logger.info(f"Args: {self.args}") + + +def main(): + description = "Submitit launcher for DINOv2 training" + train_args_parser = get_train_args_parser(add_help=False) + parents = [train_args_parser] + args_parser = get_args_parser(description=description, parents=parents) + args = args_parser.parse_args() + + setup_logging() + + assert os.path.exists(args.config_file), "Configuration file does not exist!" + submit_jobs(Trainer, args, name="dinov2:train") + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/engine/pose_estimation/dinov2/dinov2/train/__init__.py b/engine/pose_estimation/dinov2/dinov2/train/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5f1752922d04fff0112eb7796be28ff6b68c6073 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/train/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from .train import get_args_parser, main +from .ssl_meta_arch import SSLMetaArch diff --git a/engine/pose_estimation/dinov2/dinov2/train/ssl_meta_arch.py b/engine/pose_estimation/dinov2/dinov2/train/ssl_meta_arch.py new file mode 100644 index 0000000000000000000000000000000000000000..3ccf15e904ebeb6134dfb4f5c99da4fc8d41b8e4 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/train/ssl_meta_arch.py @@ -0,0 +1,400 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from functools import partial +import logging + +import torch +from torch import nn + +from dinov2.loss import DINOLoss, iBOTPatchLoss, KoLeoLoss +from dinov2.models import build_model_from_cfg +from dinov2.layers import DINOHead +from dinov2.utils.utils import has_batchnorms +from dinov2.utils.param_groups import get_params_groups_with_decay, fuse_params_groups +from dinov2.fsdp import get_fsdp_wrapper, ShardedGradScaler, get_fsdp_modules, reshard_fsdp_model + +from dinov2.models.vision_transformer import BlockChunk + + +try: + from xformers.ops import fmha +except ImportError: + raise AssertionError("xFormers is required for training") + + +logger = logging.getLogger("dinov2") + + +class SSLMetaArch(nn.Module): + def __init__(self, cfg): + super().__init__() + self.cfg = cfg + self.fp16_scaler = ShardedGradScaler() if cfg.compute_precision.grad_scaler else None + + student_model_dict = dict() + teacher_model_dict = dict() + + student_backbone, teacher_backbone, embed_dim = build_model_from_cfg(cfg) + student_model_dict["backbone"] = student_backbone + teacher_model_dict["backbone"] = teacher_backbone + logger.info(f"OPTIONS -- architecture : embed_dim: {embed_dim}") + + if cfg.student.pretrained_weights: + chkpt = torch.load(cfg.student.pretrained_weights) + logger.info(f"OPTIONS -- pretrained weights: loading from {cfg.student.pretrained_weights}") + student_backbone.load_state_dict(chkpt["model"], strict=False) + + self.embed_dim = embed_dim + self.dino_out_dim = cfg.dino.head_n_prototypes + + self.do_dino = cfg.dino.loss_weight > 0 + self.do_koleo = cfg.dino.koleo_loss_weight > 0 + self.do_ibot = cfg.ibot.loss_weight > 0 + self.ibot_separate_head = cfg.ibot.separate_head + + logger.info("OPTIONS -- DINO") + if self.do_dino: + logger.info(f"OPTIONS -- DINO -- loss_weight: {cfg.dino.loss_weight}") + logger.info(f"OPTIONS -- DINO -- head_n_prototypes: {cfg.dino.head_n_prototypes}") + logger.info(f"OPTIONS -- DINO -- head_bottleneck_dim: {cfg.dino.head_bottleneck_dim}") + logger.info(f"OPTIONS -- DINO -- head_hidden_dim: {cfg.dino.head_hidden_dim}") + self.dino_loss_weight = cfg.dino.loss_weight + dino_head = partial( + DINOHead, + in_dim=embed_dim, + out_dim=cfg.dino.head_n_prototypes, + hidden_dim=cfg.dino.head_hidden_dim, + bottleneck_dim=cfg.dino.head_bottleneck_dim, + nlayers=cfg.dino.head_nlayers, + ) + self.dino_loss = DINOLoss(self.dino_out_dim) + if self.do_koleo: + logger.info("OPTIONS -- DINO -- applying KOLEO regularization") + self.koleo_loss = KoLeoLoss() + + else: + logger.info("OPTIONS -- DINO -- not using DINO") + + if self.do_dino or self.do_ibot: + student_model_dict["dino_head"] = dino_head() + teacher_model_dict["dino_head"] = dino_head() + + logger.info("OPTIONS -- IBOT") + logger.info(f"OPTIONS -- IBOT -- loss_weight: {cfg.ibot.loss_weight}") + logger.info(f"OPTIONS -- IBOT masking -- ibot_mask_ratio_tuple: {cfg.ibot.mask_ratio_min_max}") + logger.info(f"OPTIONS -- IBOT masking -- ibot_mask_sample_probability: {cfg.ibot.mask_sample_probability}") + if self.do_ibot: + self.ibot_loss_weight = cfg.ibot.loss_weight + assert max(cfg.ibot.mask_ratio_min_max) > 0, "please provide a positive mask ratio tuple for ibot" + assert cfg.ibot.mask_sample_probability > 0, "please provide a positive mask probability for ibot" + self.ibot_out_dim = cfg.ibot.head_n_prototypes if self.ibot_separate_head else cfg.dino.head_n_prototypes + self.ibot_patch_loss = iBOTPatchLoss(self.ibot_out_dim) + if self.ibot_separate_head: + logger.info(f"OPTIONS -- IBOT -- loss_weight: {cfg.ibot.loss_weight}") + logger.info(f"OPTIONS -- IBOT -- head_n_prototypes: {cfg.ibot.head_n_prototypes}") + logger.info(f"OPTIONS -- IBOT -- head_bottleneck_dim: {cfg.ibot.head_bottleneck_dim}") + logger.info(f"OPTIONS -- IBOT -- head_hidden_dim: {cfg.ibot.head_hidden_dim}") + ibot_head = partial( + DINOHead, + in_dim=embed_dim, + out_dim=cfg.ibot.head_n_prototypes, + hidden_dim=cfg.ibot.head_hidden_dim, + bottleneck_dim=cfg.ibot.head_bottleneck_dim, + nlayers=cfg.ibot.head_nlayers, + ) + student_model_dict["ibot_head"] = ibot_head() + teacher_model_dict["ibot_head"] = ibot_head() + else: + logger.info("OPTIONS -- IBOT -- head shared with DINO") + + self.need_to_synchronize_fsdp_streams = True + + self.student = nn.ModuleDict(student_model_dict) + self.teacher = nn.ModuleDict(teacher_model_dict) + + # there is no backpropagation through the teacher, so no need for gradients + for p in self.teacher.parameters(): + p.requires_grad = False + logger.info(f"Student and Teacher are built: they are both {cfg.student.arch} network.") + + def forward(self, inputs): + raise NotImplementedError + + def backprop_loss(self, loss): + if self.fp16_scaler is not None: + self.fp16_scaler.scale(loss).backward() + else: + loss.backward() + + def forward_backward(self, images, teacher_temp): + n_global_crops = 2 + assert n_global_crops == 2 + n_local_crops = self.cfg.crops.local_crops_number + + global_crops = images["collated_global_crops"].cuda(non_blocking=True) + local_crops = images["collated_local_crops"].cuda(non_blocking=True) + + masks = images["collated_masks"].cuda(non_blocking=True) + mask_indices_list = images["mask_indices_list"].cuda(non_blocking=True) + n_masked_patches_tensor = images["n_masked_patches"].cuda(non_blocking=True) + n_masked_patches = mask_indices_list.shape[0] + upperbound = images["upperbound"] + masks_weight = images["masks_weight"].cuda(non_blocking=True) + + n_local_crops_loss_terms = max(n_local_crops * n_global_crops, 1) + n_global_crops_loss_terms = (n_global_crops - 1) * n_global_crops + + do_dino = self.do_dino + do_ibot = self.do_ibot + + # loss scales + ibot_loss_scale = 1.0 / n_global_crops + + # teacher output + @torch.no_grad() + def get_teacher_output(): + x, n_global_crops_teacher = global_crops, n_global_crops + teacher_backbone_output_dict = self.teacher.backbone(x, is_training=True) + teacher_cls_tokens = teacher_backbone_output_dict["x_norm_clstoken"] + teacher_cls_tokens = teacher_cls_tokens.chunk(n_global_crops_teacher) + # watch out: these are chunked and cat'd in reverse so A is matched to B in the global crops dino loss + teacher_cls_tokens = torch.cat((teacher_cls_tokens[1], teacher_cls_tokens[0])) + ibot_teacher_patch_tokens = teacher_backbone_output_dict["x_norm_patchtokens"] + _dim = ibot_teacher_patch_tokens.shape[-1] + n_cls_tokens = teacher_cls_tokens.shape[0] + + if do_ibot and not self.ibot_separate_head: + buffer_tensor_teacher = ibot_teacher_patch_tokens.new_zeros(upperbound + n_cls_tokens, _dim) + buffer_tensor_teacher[:n_cls_tokens].copy_(teacher_cls_tokens) + torch.index_select( + ibot_teacher_patch_tokens.flatten(0, 1), + dim=0, + index=mask_indices_list, + out=buffer_tensor_teacher[n_cls_tokens : n_cls_tokens + n_masked_patches], + ) + tokens_after_head = self.teacher.dino_head(buffer_tensor_teacher) + teacher_cls_tokens_after_head = tokens_after_head[:n_cls_tokens] + masked_teacher_patch_tokens_after_head = tokens_after_head[ + n_cls_tokens : n_cls_tokens + n_masked_patches + ] + elif do_ibot and self.ibot_separate_head: + buffer_tensor_teacher = ibot_teacher_patch_tokens.new_zeros(upperbound, _dim) + torch.index_select( + ibot_teacher_patch_tokens.flatten(0, 1), + dim=0, + index=mask_indices_list, + out=buffer_tensor_teacher[:n_masked_patches], + ) + teacher_cls_tokens_after_head = self.teacher.dino_head(teacher_cls_tokens) + masked_teacher_patch_tokens_after_head = self.teacher.ibot_head(buffer_tensor_teacher)[ + :n_masked_patches + ] + else: + teacher_cls_tokens_after_head = self.teacher.dino_head(teacher_cls_tokens) + masked_teacher_ibot_softmaxed_centered = None + + if self.cfg.train.centering == "centering": + teacher_dino_softmaxed_centered_list = self.dino_loss.softmax_center_teacher( + teacher_cls_tokens_after_head, teacher_temp=teacher_temp + ).view(n_global_crops_teacher, -1, *teacher_cls_tokens_after_head.shape[1:]) + self.dino_loss.update_center(teacher_cls_tokens_after_head) + if do_ibot: + masked_teacher_patch_tokens_after_head = masked_teacher_patch_tokens_after_head.unsqueeze(0) + masked_teacher_ibot_softmaxed_centered = self.ibot_patch_loss.softmax_center_teacher( + masked_teacher_patch_tokens_after_head[:, :n_masked_patches], teacher_temp=teacher_temp + ) + masked_teacher_ibot_softmaxed_centered = masked_teacher_ibot_softmaxed_centered.squeeze(0) + self.ibot_patch_loss.update_center(masked_teacher_patch_tokens_after_head[:n_masked_patches]) + + elif self.cfg.train.centering == "sinkhorn_knopp": + teacher_dino_softmaxed_centered_list = self.dino_loss.sinkhorn_knopp_teacher( + teacher_cls_tokens_after_head, teacher_temp=teacher_temp + ).view(n_global_crops_teacher, -1, *teacher_cls_tokens_after_head.shape[1:]) + + if do_ibot: + masked_teacher_ibot_softmaxed_centered = self.ibot_patch_loss.sinkhorn_knopp_teacher( + masked_teacher_patch_tokens_after_head, + teacher_temp=teacher_temp, + n_masked_patches_tensor=n_masked_patches_tensor, + ) + + else: + raise NotImplementedError + + return teacher_dino_softmaxed_centered_list, masked_teacher_ibot_softmaxed_centered + + teacher_dino_softmaxed_centered_list, masked_teacher_ibot_softmaxed_centered = get_teacher_output() + reshard_fsdp_model(self.teacher) + + loss_dict = {} + + loss_accumulator = 0 # for backprop + student_global_backbone_output_dict, student_local_backbone_output_dict = self.student.backbone( + [global_crops, local_crops], masks=[masks, None], is_training=True + ) + + inputs_for_student_head_list = [] + + # 1a: local crops cls tokens + student_local_cls_tokens = student_local_backbone_output_dict["x_norm_clstoken"] + inputs_for_student_head_list.append(student_local_cls_tokens.unsqueeze(0)) + + # 1b: global crops cls tokens + student_global_cls_tokens = student_global_backbone_output_dict["x_norm_clstoken"] + inputs_for_student_head_list.append(student_global_cls_tokens.unsqueeze(0)) + + # 1c: global crops patch tokens + if do_ibot: + _dim = student_global_backbone_output_dict["x_norm_clstoken"].shape[-1] + ibot_student_patch_tokens = student_global_backbone_output_dict["x_norm_patchtokens"] + buffer_tensor_patch_tokens = ibot_student_patch_tokens.new_zeros(upperbound, _dim) + buffer_tensor_patch_tokens[:n_masked_patches].copy_( + torch.index_select(ibot_student_patch_tokens.flatten(0, 1), dim=0, index=mask_indices_list) + ) + if not self.ibot_separate_head: + inputs_for_student_head_list.append(buffer_tensor_patch_tokens.unsqueeze(0)) + else: + student_global_masked_patch_tokens_after_head = self.student.ibot_head(buffer_tensor_patch_tokens)[ + :n_masked_patches + ] + + # 2: run + _attn_bias, cat_inputs = fmha.BlockDiagonalMask.from_tensor_list(inputs_for_student_head_list) + outputs_list = _attn_bias.split(self.student.dino_head(cat_inputs)) + + # 3a: local crops cls tokens + student_local_cls_tokens_after_head = outputs_list.pop(0).squeeze(0) + + # 3b: global crops cls tokens + student_global_cls_tokens_after_head = outputs_list.pop(0).squeeze(0) + + # 3c: global crops patch tokens + if do_ibot and not self.ibot_separate_head: + student_global_masked_patch_tokens_after_head = outputs_list.pop(0).squeeze(0)[:n_masked_patches] + + if n_local_crops > 0: + dino_local_crops_loss = self.dino_loss( + student_output_list=student_local_cls_tokens_after_head.chunk(n_local_crops), + teacher_out_softmaxed_centered_list=teacher_dino_softmaxed_centered_list, + ) / (n_global_crops_loss_terms + n_local_crops_loss_terms) + + # store for display + loss_dict["dino_local_crops_loss"] = dino_local_crops_loss + + # accumulate loss + loss_accumulator += self.dino_loss_weight * dino_local_crops_loss + + # process global crops + loss_scales = 2 # this is here since we process global crops together + + if do_dino: + # compute loss + dino_global_crops_loss = ( + self.dino_loss( + student_output_list=[student_global_cls_tokens_after_head], + teacher_out_softmaxed_centered_list=[ + teacher_dino_softmaxed_centered_list.flatten(0, 1) + ], # these were chunked and stacked in reverse so A is matched to B + ) + * loss_scales + / (n_global_crops_loss_terms + n_local_crops_loss_terms) + ) + + loss_dict["dino_global_crops_loss"] = dino_global_crops_loss + + # accumulate loss + loss_accumulator += self.dino_loss_weight * dino_global_crops_loss + + student_cls_tokens = student_global_cls_tokens + + if self.do_koleo: + koleo_loss = self.cfg.dino.koleo_loss_weight * sum( + self.koleo_loss(p) for p in student_cls_tokens.chunk(2) + ) # we don't apply koleo loss between cls tokens of a same image + loss_accumulator += koleo_loss + loss_dict["koleo_loss"] = ( + koleo_loss / loss_scales + ) # this is to display the same losses as before but we can remove eventually + + if do_ibot: + # compute loss + ibot_patch_loss = ( + self.ibot_patch_loss.forward_masked( + student_global_masked_patch_tokens_after_head, + masked_teacher_ibot_softmaxed_centered, + student_masks_flat=masks, + n_masked_patches=n_masked_patches, + masks_weight=masks_weight, + ) + * loss_scales + * ibot_loss_scale + ) + + # store for display + loss_dict["ibot_loss"] = ibot_patch_loss / 2 + + # accumulate loss + loss_accumulator += self.ibot_loss_weight * ibot_patch_loss + + self.backprop_loss(loss_accumulator) + + self.fsdp_synchronize_streams() + + return loss_dict + + def fsdp_synchronize_streams(self): + if self.need_to_synchronize_fsdp_streams: + torch.cuda.synchronize() + self.student.dino_head._streams = ( + self.teacher.dino_head._streams + ) = self.student.backbone._streams = self.teacher.backbone._streams + self.need_to_synchronize_fsdp_streams = False + + def update_teacher(self, m): + student_param_list = [] + teacher_param_list = [] + with torch.no_grad(): + for k in self.student.keys(): + for ms, mt in zip(get_fsdp_modules(self.student[k]), get_fsdp_modules(self.teacher[k])): + student_param_list += ms.params + teacher_param_list += mt.params + torch._foreach_mul_(teacher_param_list, m) + torch._foreach_add_(teacher_param_list, student_param_list, alpha=1 - m) + + def train(self): + super().train() + self.teacher.eval() + + def get_maybe_fused_params_for_submodel(self, m): + params_groups = get_params_groups_with_decay( + model=m, + lr_decay_rate=self.cfg.optim.layerwise_decay, + patch_embed_lr_mult=self.cfg.optim.patch_embed_lr_mult, + ) + fused_params_groups = fuse_params_groups(params_groups) + logger.info("fusing param groups") + + for g in fused_params_groups: + g["foreach"] = True + return fused_params_groups + + def get_params_groups(self): + all_params_groups = [] + for m in self.student.values(): + all_params_groups += self.get_maybe_fused_params_for_submodel(m) + return all_params_groups + + def prepare_for_distributed_training(self): + logger.info("DISTRIBUTED FSDP -- preparing model for distributed training") + if has_batchnorms(self.student): + raise NotImplementedError + # below will synchronize all student subnetworks across gpus: + for k, v in self.student.items(): + self.teacher[k].load_state_dict(self.student[k].state_dict()) + student_model_cfg = self.cfg.compute_precision.student[k] + self.student[k] = get_fsdp_wrapper(student_model_cfg, modules_to_wrap={BlockChunk})(self.student[k]) + teacher_model_cfg = self.cfg.compute_precision.teacher[k] + self.teacher[k] = get_fsdp_wrapper(teacher_model_cfg, modules_to_wrap={BlockChunk})(self.teacher[k]) diff --git a/engine/pose_estimation/dinov2/dinov2/train/train.py b/engine/pose_estimation/dinov2/dinov2/train/train.py new file mode 100644 index 0000000000000000000000000000000000000000..473b8d01473654182de9f91c94a2d8720fe096a5 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/train/train.py @@ -0,0 +1,318 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import argparse +import logging +import math +import os +from functools import partial + +from fvcore.common.checkpoint import PeriodicCheckpointer +import torch + +from dinov2.data import SamplerType, make_data_loader, make_dataset +from dinov2.data import collate_data_and_cast, DataAugmentationDINO, MaskingGenerator +import dinov2.distributed as distributed +from dinov2.fsdp import FSDPCheckpointer +from dinov2.logging import MetricLogger +from dinov2.utils.config import setup +from dinov2.utils.utils import CosineScheduler + +from dinov2.train.ssl_meta_arch import SSLMetaArch + + +torch.backends.cuda.matmul.allow_tf32 = True # PyTorch 1.12 sets this to False by default +logger = logging.getLogger("dinov2") + + +def get_args_parser(add_help: bool = True): + parser = argparse.ArgumentParser("DINOv2 training", add_help=add_help) + parser.add_argument("--config-file", default="", metavar="FILE", help="path to config file") + parser.add_argument( + "--no-resume", + action="store_true", + help="Whether to not attempt to resume from the checkpoint directory. ", + ) + parser.add_argument("--eval-only", action="store_true", help="perform evaluation only") + parser.add_argument("--eval", type=str, default="", help="Eval type to perform") + parser.add_argument( + "opts", + help=""" +Modify config options at the end of the command. For Yacs configs, use +space-separated "PATH.KEY VALUE" pairs. +For python-based LazyConfig, use "path.key=value". + """.strip(), + default=None, + nargs=argparse.REMAINDER, + ) + parser.add_argument( + "--output-dir", + "--output_dir", + default="", + type=str, + help="Output directory to save logs and checkpoints", + ) + + return parser + + +def build_optimizer(cfg, params_groups): + return torch.optim.AdamW(params_groups, betas=(cfg.optim.adamw_beta1, cfg.optim.adamw_beta2)) + + +def build_schedulers(cfg): + OFFICIAL_EPOCH_LENGTH = cfg.train.OFFICIAL_EPOCH_LENGTH + lr = dict( + base_value=cfg.optim["lr"], + final_value=cfg.optim["min_lr"], + total_iters=cfg.optim["epochs"] * OFFICIAL_EPOCH_LENGTH, + warmup_iters=cfg.optim["warmup_epochs"] * OFFICIAL_EPOCH_LENGTH, + start_warmup_value=0, + ) + wd = dict( + base_value=cfg.optim["weight_decay"], + final_value=cfg.optim["weight_decay_end"], + total_iters=cfg.optim["epochs"] * OFFICIAL_EPOCH_LENGTH, + ) + momentum = dict( + base_value=cfg.teacher["momentum_teacher"], + final_value=cfg.teacher["final_momentum_teacher"], + total_iters=cfg.optim["epochs"] * OFFICIAL_EPOCH_LENGTH, + ) + teacher_temp = dict( + base_value=cfg.teacher["teacher_temp"], + final_value=cfg.teacher["teacher_temp"], + total_iters=cfg.teacher["warmup_teacher_temp_epochs"] * OFFICIAL_EPOCH_LENGTH, + warmup_iters=cfg.teacher["warmup_teacher_temp_epochs"] * OFFICIAL_EPOCH_LENGTH, + start_warmup_value=cfg.teacher["warmup_teacher_temp"], + ) + + lr_schedule = CosineScheduler(**lr) + wd_schedule = CosineScheduler(**wd) + momentum_schedule = CosineScheduler(**momentum) + teacher_temp_schedule = CosineScheduler(**teacher_temp) + last_layer_lr_schedule = CosineScheduler(**lr) + + last_layer_lr_schedule.schedule[ + : cfg.optim["freeze_last_layer_epochs"] * OFFICIAL_EPOCH_LENGTH + ] = 0 # mimicking the original schedules + + logger.info("Schedulers ready.") + + return ( + lr_schedule, + wd_schedule, + momentum_schedule, + teacher_temp_schedule, + last_layer_lr_schedule, + ) + + +def apply_optim_scheduler(optimizer, lr, wd, last_layer_lr): + for param_group in optimizer.param_groups: + is_last_layer = param_group["is_last_layer"] + lr_multiplier = param_group["lr_multiplier"] + wd_multiplier = param_group["wd_multiplier"] + param_group["weight_decay"] = wd * wd_multiplier + param_group["lr"] = (last_layer_lr if is_last_layer else lr) * lr_multiplier + + +def do_test(cfg, model, iteration): + new_state_dict = model.teacher.state_dict() + + if distributed.is_main_process(): + iterstring = str(iteration) + eval_dir = os.path.join(cfg.train.output_dir, "eval", iterstring) + os.makedirs(eval_dir, exist_ok=True) + # save teacher checkpoint + teacher_ckp_path = os.path.join(eval_dir, "teacher_checkpoint.pth") + torch.save({"teacher": new_state_dict}, teacher_ckp_path) + + +def do_train(cfg, model, resume=False): + model.train() + inputs_dtype = torch.half + fp16_scaler = model.fp16_scaler # for mixed precision training + + # setup optimizer + + optimizer = build_optimizer(cfg, model.get_params_groups()) + ( + lr_schedule, + wd_schedule, + momentum_schedule, + teacher_temp_schedule, + last_layer_lr_schedule, + ) = build_schedulers(cfg) + + # checkpointer + checkpointer = FSDPCheckpointer(model, cfg.train.output_dir, optimizer=optimizer, save_to_disk=True) + + start_iter = checkpointer.resume_or_load(cfg.MODEL.WEIGHTS, resume=resume).get("iteration", -1) + 1 + + OFFICIAL_EPOCH_LENGTH = cfg.train.OFFICIAL_EPOCH_LENGTH + max_iter = cfg.optim.epochs * OFFICIAL_EPOCH_LENGTH + + periodic_checkpointer = PeriodicCheckpointer( + checkpointer, + period=3 * OFFICIAL_EPOCH_LENGTH, + max_iter=max_iter, + max_to_keep=3, + ) + + # setup data preprocessing + + img_size = cfg.crops.global_crops_size + patch_size = cfg.student.patch_size + n_tokens = (img_size // patch_size) ** 2 + mask_generator = MaskingGenerator( + input_size=(img_size // patch_size, img_size // patch_size), + max_num_patches=0.5 * img_size // patch_size * img_size // patch_size, + ) + + data_transform = DataAugmentationDINO( + cfg.crops.global_crops_scale, + cfg.crops.local_crops_scale, + cfg.crops.local_crops_number, + global_crops_size=cfg.crops.global_crops_size, + local_crops_size=cfg.crops.local_crops_size, + ) + + collate_fn = partial( + collate_data_and_cast, + mask_ratio_tuple=cfg.ibot.mask_ratio_min_max, + mask_probability=cfg.ibot.mask_sample_probability, + n_tokens=n_tokens, + mask_generator=mask_generator, + dtype=inputs_dtype, + ) + + # setup data loader + + dataset = make_dataset( + dataset_str=cfg.train.dataset_path, + transform=data_transform, + target_transform=lambda _: (), + ) + # sampler_type = SamplerType.INFINITE + sampler_type = SamplerType.SHARDED_INFINITE + data_loader = make_data_loader( + dataset=dataset, + batch_size=cfg.train.batch_size_per_gpu, + num_workers=cfg.train.num_workers, + shuffle=True, + seed=start_iter, # TODO: Fix this -- cfg.train.seed + sampler_type=sampler_type, + sampler_advance=0, # TODO(qas): fix this -- start_iter * cfg.train.batch_size_per_gpu, + drop_last=True, + collate_fn=collate_fn, + ) + + # training loop + + iteration = start_iter + + logger.info("Starting training from iteration {}".format(start_iter)) + metrics_file = os.path.join(cfg.train.output_dir, "training_metrics.json") + metric_logger = MetricLogger(delimiter=" ", output_file=metrics_file) + header = "Training" + + for data in metric_logger.log_every( + data_loader, + 10, + header, + max_iter, + start_iter, + ): + current_batch_size = data["collated_global_crops"].shape[0] / 2 + if iteration > max_iter: + return + + # apply schedules + + lr = lr_schedule[iteration] + wd = wd_schedule[iteration] + mom = momentum_schedule[iteration] + teacher_temp = teacher_temp_schedule[iteration] + last_layer_lr = last_layer_lr_schedule[iteration] + apply_optim_scheduler(optimizer, lr, wd, last_layer_lr) + + # compute losses + + optimizer.zero_grad(set_to_none=True) + loss_dict = model.forward_backward(data, teacher_temp=teacher_temp) + + # clip gradients + + if fp16_scaler is not None: + if cfg.optim.clip_grad: + fp16_scaler.unscale_(optimizer) + for v in model.student.values(): + v.clip_grad_norm_(cfg.optim.clip_grad) + fp16_scaler.step(optimizer) + fp16_scaler.update() + else: + if cfg.optim.clip_grad: + for v in model.student.values(): + v.clip_grad_norm_(cfg.optim.clip_grad) + optimizer.step() + + # perform teacher EMA update + + model.update_teacher(mom) + + # logging + + if distributed.get_global_size() > 1: + for v in loss_dict.values(): + torch.distributed.all_reduce(v) + loss_dict_reduced = {k: v.item() / distributed.get_global_size() for k, v in loss_dict.items()} + + if math.isnan(sum(loss_dict_reduced.values())): + logger.info("NaN detected") + raise AssertionError + losses_reduced = sum(loss for loss in loss_dict_reduced.values()) + + metric_logger.update(lr=lr) + metric_logger.update(wd=wd) + metric_logger.update(mom=mom) + metric_logger.update(last_layer_lr=last_layer_lr) + metric_logger.update(current_batch_size=current_batch_size) + metric_logger.update(total_loss=losses_reduced, **loss_dict_reduced) + + # checkpointing and testing + + if cfg.evaluation.eval_period_iterations > 0 and (iteration + 1) % cfg.evaluation.eval_period_iterations == 0: + do_test(cfg, model, f"training_{iteration}") + torch.cuda.synchronize() + periodic_checkpointer.step(iteration) + + iteration = iteration + 1 + metric_logger.synchronize_between_processes() + return {k: meter.global_avg for k, meter in metric_logger.meters.items()} + + +def main(args): + cfg = setup(args) + + model = SSLMetaArch(cfg).to(torch.device("cuda")) + model.prepare_for_distributed_training() + + logger.info("Model:\n{}".format(model)) + if args.eval_only: + iteration = ( + FSDPCheckpointer(model, save_dir=cfg.train.output_dir) + .resume_or_load(cfg.MODEL.WEIGHTS, resume=not args.no_resume) + .get("iteration", -1) + + 1 + ) + return do_test(cfg, model, f"manual_{iteration}") + + do_train(cfg, model, resume=not args.no_resume) + + +if __name__ == "__main__": + args = get_args_parser(add_help=True).parse_args() + main(args) diff --git a/engine/pose_estimation/dinov2/dinov2/utils/__init__.py b/engine/pose_estimation/dinov2/dinov2/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b88da6bf80be92af00b72dfdb0a806fa64a7a2d9 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/utils/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. diff --git a/engine/pose_estimation/dinov2/dinov2/utils/cluster.py b/engine/pose_estimation/dinov2/dinov2/utils/cluster.py new file mode 100644 index 0000000000000000000000000000000000000000..3df87dc3e1eb4f0f8a280dc3137cfef031886314 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/utils/cluster.py @@ -0,0 +1,95 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from enum import Enum +import os +from pathlib import Path +from typing import Any, Dict, Optional + + +class ClusterType(Enum): + AWS = "aws" + FAIR = "fair" + RSC = "rsc" + + +def _guess_cluster_type() -> ClusterType: + uname = os.uname() + if uname.sysname == "Linux": + if uname.release.endswith("-aws"): + # Linux kernel versions on AWS instances are of the form "5.4.0-1051-aws" + return ClusterType.AWS + elif uname.nodename.startswith("rsc"): + # Linux kernel versions on RSC instances are standard ones but hostnames start with "rsc" + return ClusterType.RSC + + return ClusterType.FAIR + + +def get_cluster_type(cluster_type: Optional[ClusterType] = None) -> Optional[ClusterType]: + if cluster_type is None: + return _guess_cluster_type() + + return cluster_type + + +def get_checkpoint_path(cluster_type: Optional[ClusterType] = None) -> Optional[Path]: + cluster_type = get_cluster_type(cluster_type) + if cluster_type is None: + return None + + CHECKPOINT_DIRNAMES = { + ClusterType.AWS: "checkpoints", + ClusterType.FAIR: "checkpoint", + ClusterType.RSC: "checkpoint/dino", + } + return Path("/") / CHECKPOINT_DIRNAMES[cluster_type] + + +def get_user_checkpoint_path(cluster_type: Optional[ClusterType] = None) -> Optional[Path]: + checkpoint_path = get_checkpoint_path(cluster_type) + if checkpoint_path is None: + return None + + username = os.environ.get("USER") + assert username is not None + return checkpoint_path / username + + +def get_slurm_partition(cluster_type: Optional[ClusterType] = None) -> Optional[str]: + cluster_type = get_cluster_type(cluster_type) + if cluster_type is None: + return None + + SLURM_PARTITIONS = { + ClusterType.AWS: "learnlab", + ClusterType.FAIR: "learnlab", + ClusterType.RSC: "learn", + } + return SLURM_PARTITIONS[cluster_type] + + +def get_slurm_executor_parameters( + nodes: int, num_gpus_per_node: int, cluster_type: Optional[ClusterType] = None, **kwargs +) -> Dict[str, Any]: + # create default parameters + params = { + "mem_gb": 0, # Requests all memory on a node, see https://slurm.schedmd.com/sbatch.html + "gpus_per_node": num_gpus_per_node, + "tasks_per_node": num_gpus_per_node, # one task per GPU + "cpus_per_task": 10, + "nodes": nodes, + "slurm_partition": get_slurm_partition(cluster_type), + } + # apply cluster-specific adjustments + cluster_type = get_cluster_type(cluster_type) + if cluster_type == ClusterType.AWS: + params["cpus_per_task"] = 12 + del params["mem_gb"] + elif cluster_type == ClusterType.RSC: + params["cpus_per_task"] = 12 + # set additional parameters / apply overrides + params.update(kwargs) + return params diff --git a/engine/pose_estimation/dinov2/dinov2/utils/config.py b/engine/pose_estimation/dinov2/dinov2/utils/config.py new file mode 100644 index 0000000000000000000000000000000000000000..c9de578787bbcb376f8bd5a782206d0eb7ec1f52 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/utils/config.py @@ -0,0 +1,72 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import math +import logging +import os + +from omegaconf import OmegaConf + +import dinov2.distributed as distributed +from dinov2.logging import setup_logging +from dinov2.utils import utils +from dinov2.configs import dinov2_default_config + + +logger = logging.getLogger("dinov2") + + +def apply_scaling_rules_to_cfg(cfg): # to fix + if cfg.optim.scaling_rule == "sqrt_wrt_1024": + base_lr = cfg.optim.base_lr + cfg.optim.lr = base_lr + cfg.optim.lr *= math.sqrt(cfg.train.batch_size_per_gpu * distributed.get_global_size() / 1024.0) + logger.info(f"sqrt scaling learning rate; base: {base_lr}, new: {cfg.optim.lr}") + else: + raise NotImplementedError + return cfg + + +def write_config(cfg, output_dir, name="config.yaml"): + logger.info(OmegaConf.to_yaml(cfg)) + saved_cfg_path = os.path.join(output_dir, name) + with open(saved_cfg_path, "w") as f: + OmegaConf.save(config=cfg, f=f) + return saved_cfg_path + + +def get_cfg_from_args(args): + args.output_dir = os.path.abspath(args.output_dir) + args.opts += [f"train.output_dir={args.output_dir}"] + default_cfg = OmegaConf.create(dinov2_default_config) + cfg = OmegaConf.load(args.config_file) + cfg = OmegaConf.merge(default_cfg, cfg, OmegaConf.from_cli(args.opts)) + return cfg + + +def default_setup(args): + distributed.enable(overwrite=True) + seed = getattr(args, "seed", 0) + rank = distributed.get_global_rank() + + global logger + setup_logging(output=args.output_dir, level=logging.INFO) + logger = logging.getLogger("dinov2") + + utils.fix_random_seeds(seed + rank) + logger.info("git:\n {}\n".format(utils.get_sha())) + logger.info("\n".join("%s: %s" % (k, str(v)) for k, v in sorted(dict(vars(args)).items()))) + + +def setup(args): + """ + Create configs and perform basic setups. + """ + cfg = get_cfg_from_args(args) + os.makedirs(args.output_dir, exist_ok=True) + default_setup(args) + apply_scaling_rules_to_cfg(cfg) + write_config(cfg, args.output_dir) + return cfg diff --git a/engine/pose_estimation/dinov2/dinov2/utils/dtype.py b/engine/pose_estimation/dinov2/dinov2/utils/dtype.py new file mode 100644 index 0000000000000000000000000000000000000000..80f4cd74d99faa2731dbe9f8d3a13d71b3f8e3a8 --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/utils/dtype.py @@ -0,0 +1,37 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + + +from typing import Dict, Union + +import numpy as np +import torch + + +TypeSpec = Union[str, np.dtype, torch.dtype] + + +_NUMPY_TO_TORCH_DTYPE: Dict[np.dtype, torch.dtype] = { + np.dtype("bool"): torch.bool, + np.dtype("uint8"): torch.uint8, + np.dtype("int8"): torch.int8, + np.dtype("int16"): torch.int16, + np.dtype("int32"): torch.int32, + np.dtype("int64"): torch.int64, + np.dtype("float16"): torch.float16, + np.dtype("float32"): torch.float32, + np.dtype("float64"): torch.float64, + np.dtype("complex64"): torch.complex64, + np.dtype("complex128"): torch.complex128, +} + + +def as_torch_dtype(dtype: TypeSpec) -> torch.dtype: + if isinstance(dtype, torch.dtype): + return dtype + if isinstance(dtype, str): + dtype = np.dtype(dtype) + assert isinstance(dtype, np.dtype), f"Expected an instance of nunpy dtype, got {type(dtype)}" + return _NUMPY_TO_TORCH_DTYPE[dtype] diff --git a/engine/pose_estimation/dinov2/dinov2/utils/param_groups.py b/engine/pose_estimation/dinov2/dinov2/utils/param_groups.py new file mode 100644 index 0000000000000000000000000000000000000000..9a5d2ff627cddadc222e5f836864ee39c865208f --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/utils/param_groups.py @@ -0,0 +1,103 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from collections import defaultdict +import logging + + +logger = logging.getLogger("dinov2") + + +def get_vit_lr_decay_rate(name, lr_decay_rate=1.0, num_layers=12, force_is_backbone=False, chunked_blocks=False): + """ + Calculate lr decay rate for different ViT blocks. + Args: + name (string): parameter name. + lr_decay_rate (float): base lr decay rate. + num_layers (int): number of ViT blocks. + Returns: + lr decay rate for the given parameter. + """ + layer_id = num_layers + 1 + if name.startswith("backbone") or force_is_backbone: + if ( + ".pos_embed" in name + or ".patch_embed" in name + or ".mask_token" in name + or ".cls_token" in name + or ".register_tokens" in name + ): + layer_id = 0 + elif force_is_backbone and ( + "pos_embed" in name + or "patch_embed" in name + or "mask_token" in name + or "cls_token" in name + or "register_tokens" in name + ): + layer_id = 0 + elif ".blocks." in name and ".residual." not in name: + layer_id = int(name[name.find(".blocks.") :].split(".")[2]) + 1 + elif chunked_blocks and "blocks." in name and "residual." not in name: + layer_id = int(name[name.find("blocks.") :].split(".")[2]) + 1 + elif "blocks." in name and "residual." not in name: + layer_id = int(name[name.find("blocks.") :].split(".")[1]) + 1 + + return lr_decay_rate ** (num_layers + 1 - layer_id) + + +def get_params_groups_with_decay(model, lr_decay_rate=1.0, patch_embed_lr_mult=1.0): + chunked_blocks = False + if hasattr(model, "n_blocks"): + logger.info("chunked fsdp") + n_blocks = model.n_blocks + chunked_blocks = model.chunked_blocks + elif hasattr(model, "blocks"): + logger.info("first code branch") + n_blocks = len(model.blocks) + elif hasattr(model, "backbone"): + logger.info("second code branch") + n_blocks = len(model.backbone.blocks) + else: + logger.info("else code branch") + n_blocks = 0 + all_param_groups = [] + + for name, param in model.named_parameters(): + name = name.replace("_fsdp_wrapped_module.", "") + if not param.requires_grad: + continue + decay_rate = get_vit_lr_decay_rate( + name, lr_decay_rate, num_layers=n_blocks, force_is_backbone=n_blocks > 0, chunked_blocks=chunked_blocks + ) + d = {"params": param, "is_last_layer": False, "lr_multiplier": decay_rate, "wd_multiplier": 1.0, "name": name} + + if "last_layer" in name: + d.update({"is_last_layer": True}) + + if name.endswith(".bias") or "norm" in name or "gamma" in name: + d.update({"wd_multiplier": 0.0}) + + if "patch_embed" in name: + d.update({"lr_multiplier": d["lr_multiplier"] * patch_embed_lr_mult}) + + all_param_groups.append(d) + logger.info(f"""{name}: lr_multiplier: {d["lr_multiplier"]}, wd_multiplier: {d["wd_multiplier"]}""") + + return all_param_groups + + +def fuse_params_groups(all_params_groups, keys=("lr_multiplier", "wd_multiplier", "is_last_layer")): + fused_params_groups = defaultdict(lambda: {"params": []}) + for d in all_params_groups: + identifier = "" + for k in keys: + identifier += k + str(d[k]) + "_" + + for k in keys: + fused_params_groups[identifier][k] = d[k] + fused_params_groups[identifier]["params"].append(d["params"]) + + return fused_params_groups.values() diff --git a/engine/pose_estimation/dinov2/dinov2/utils/utils.py b/engine/pose_estimation/dinov2/dinov2/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..68f8e2c3be5f780bbb7e00359b5ac4fd0ba0785f --- /dev/null +++ b/engine/pose_estimation/dinov2/dinov2/utils/utils.py @@ -0,0 +1,95 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +import logging +import os +import random +import subprocess +from urllib.parse import urlparse + +import numpy as np +import torch +from torch import nn + + +logger = logging.getLogger("dinov2") + + +def load_pretrained_weights(model, pretrained_weights, checkpoint_key): + if urlparse(pretrained_weights).scheme: # If it looks like an URL + state_dict = torch.hub.load_state_dict_from_url(pretrained_weights, map_location="cpu") + else: + state_dict = torch.load(pretrained_weights, map_location="cpu") + if checkpoint_key is not None and checkpoint_key in state_dict: + logger.info(f"Take key {checkpoint_key} in provided checkpoint dict") + state_dict = state_dict[checkpoint_key] + # remove `module.` prefix + state_dict = {k.replace("module.", ""): v for k, v in state_dict.items()} + # remove `backbone.` prefix induced by multicrop wrapper + state_dict = {k.replace("backbone.", ""): v for k, v in state_dict.items()} + msg = model.load_state_dict(state_dict, strict=False) + logger.info("Pretrained weights found at {} and loaded with msg: {}".format(pretrained_weights, msg)) + + +def fix_random_seeds(seed=31): + """ + Fix random seeds. + """ + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + np.random.seed(seed) + random.seed(seed) + + +def get_sha(): + cwd = os.path.dirname(os.path.abspath(__file__)) + + def _run(command): + return subprocess.check_output(command, cwd=cwd).decode("ascii").strip() + + sha = "N/A" + diff = "clean" + branch = "N/A" + try: + sha = _run(["git", "rev-parse", "HEAD"]) + subprocess.check_output(["git", "diff"], cwd=cwd) + diff = _run(["git", "diff-index", "HEAD"]) + diff = "has uncommitted changes" if diff else "clean" + branch = _run(["git", "rev-parse", "--abbrev-ref", "HEAD"]) + except Exception: + pass + message = f"sha: {sha}, status: {diff}, branch: {branch}" + return message + + +class CosineScheduler(object): + def __init__(self, base_value, final_value, total_iters, warmup_iters=0, start_warmup_value=0, freeze_iters=0): + super().__init__() + self.final_value = final_value + self.total_iters = total_iters + + freeze_schedule = np.zeros((freeze_iters)) + + warmup_schedule = np.linspace(start_warmup_value, base_value, warmup_iters) + + iters = np.arange(total_iters - warmup_iters - freeze_iters) + schedule = final_value + 0.5 * (base_value - final_value) * (1 + np.cos(np.pi * iters / len(iters))) + self.schedule = np.concatenate((freeze_schedule, warmup_schedule, schedule)) + + assert len(self.schedule) == self.total_iters + + def __getitem__(self, it): + if it >= self.total_iters: + return self.final_value + else: + return self.schedule[it] + + +def has_batchnorms(model): + bn_types = (nn.BatchNorm1d, nn.BatchNorm2d, nn.BatchNorm3d, nn.SyncBatchNorm) + for name, module in model.named_modules(): + if isinstance(module, bn_types): + return True + return False diff --git a/engine/pose_estimation/dinov2/hubconf.py b/engine/pose_estimation/dinov2/hubconf.py new file mode 100644 index 0000000000000000000000000000000000000000..d3664e2cc4846b065a99eb5080fb598b7b6c9319 --- /dev/null +++ b/engine/pose_estimation/dinov2/hubconf.py @@ -0,0 +1,15 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + + +from dinov2.hub.backbones import dinov2_vitb14, dinov2_vitg14, dinov2_vitl14, dinov2_vits14 +from dinov2.hub.backbones import dinov2_vitb14_reg, dinov2_vitg14_reg, dinov2_vitl14_reg, dinov2_vits14_reg +from dinov2.hub.classifiers import dinov2_vitb14_lc, dinov2_vitg14_lc, dinov2_vitl14_lc, dinov2_vits14_lc +from dinov2.hub.classifiers import dinov2_vitb14_reg_lc, dinov2_vitg14_reg_lc, dinov2_vitl14_reg_lc, dinov2_vits14_reg_lc +from dinov2.hub.depthers import dinov2_vitb14_ld, dinov2_vitg14_ld, dinov2_vitl14_ld, dinov2_vits14_ld +from dinov2.hub.depthers import dinov2_vitb14_dd, dinov2_vitg14_dd, dinov2_vitl14_dd, dinov2_vits14_dd + + +dependencies = ["torch"] diff --git a/engine/pose_estimation/dinov2/notebooks/depth_estimation.ipynb b/engine/pose_estimation/dinov2/notebooks/depth_estimation.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..717739a3c93d3077426c14e6af86c1aa45365053 --- /dev/null +++ b/engine/pose_estimation/dinov2/notebooks/depth_estimation.ipynb @@ -0,0 +1,482 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "# Copyright (c) Meta Platforms, Inc. and affiliates." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Depth Estimation \"Open" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "import sys\n", + "\n", + "INSTALL = False # Switch this to install dependencies\n", + "if INSTALL: # Try installing package with extras\n", + " REPO_URL = \"https://github.com/facebookresearch/dinov2\"\n", + " !{sys.executable} -m pip install -e {REPO_URL}'[extras]' --extra-index-url https://download.pytorch.org/whl/cu117 --extra-index-url https://pypi.nvidia.com\n", + "else:\n", + " REPO_PATH = \"\" # Specify a local path to the repository (or use installed package instead)\n", + " sys.path.append(REPO_PATH)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Utilities" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "import math\n", + "import itertools\n", + "from functools import partial\n", + "\n", + "import torch\n", + "import torch.nn.functional as F\n", + "\n", + "from dinov2.eval.depth.models import build_depther\n", + "\n", + "\n", + "class CenterPadding(torch.nn.Module):\n", + " def __init__(self, multiple):\n", + " super().__init__()\n", + " self.multiple = multiple\n", + "\n", + " def _get_pad(self, size):\n", + " new_size = math.ceil(size / self.multiple) * self.multiple\n", + " pad_size = new_size - size\n", + " pad_size_left = pad_size // 2\n", + " pad_size_right = pad_size - pad_size_left\n", + " return pad_size_left, pad_size_right\n", + "\n", + " @torch.inference_mode()\n", + " def forward(self, x):\n", + " pads = list(itertools.chain.from_iterable(self._get_pad(m) for m in x.shape[:1:-1]))\n", + " output = F.pad(x, pads)\n", + " return output\n", + "\n", + "\n", + "def create_depther(cfg, backbone_model, backbone_size, head_type):\n", + " train_cfg = cfg.get(\"train_cfg\")\n", + " test_cfg = cfg.get(\"test_cfg\")\n", + " depther = build_depther(cfg.model, train_cfg=train_cfg, test_cfg=test_cfg)\n", + "\n", + " depther.backbone.forward = partial(\n", + " backbone_model.get_intermediate_layers,\n", + " n=cfg.model.backbone.out_indices,\n", + " reshape=True,\n", + " return_class_token=cfg.model.backbone.output_cls_token,\n", + " norm=cfg.model.backbone.final_norm,\n", + " )\n", + "\n", + " if hasattr(backbone_model, \"patch_size\"):\n", + " depther.backbone.register_forward_pre_hook(lambda _, x: CenterPadding(backbone_model.patch_size)(x[0]))\n", + "\n", + " return depther" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load pretrained backbone" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using cache found in /private/home/plabatut/.cache/torch/hub/facebookresearch_dinov2_main\n" + ] + }, + { + "data": { + "text/plain": [ + "DinoVisionTransformer(\n", + " (patch_embed): PatchEmbed(\n", + " (proj): Conv2d(3, 384, kernel_size=(14, 14), stride=(14, 14))\n", + " (norm): Identity()\n", + " )\n", + " (blocks): ModuleList(\n", + " (0-11): 12 x NestedTensorBlock(\n", + " (norm1): LayerNorm((384,), eps=1e-06, elementwise_affine=True)\n", + " (attn): MemEffAttention(\n", + " (qkv): Linear(in_features=384, out_features=1152, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=384, out_features=384, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (ls1): LayerScale()\n", + " (drop_path1): Identity()\n", + " (norm2): LayerNorm((384,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): Mlp(\n", + " (fc1): Linear(in_features=384, out_features=1536, bias=True)\n", + " (act): GELU(approximate='none')\n", + " (fc2): Linear(in_features=1536, out_features=384, bias=True)\n", + " (drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (ls2): LayerScale()\n", + " (drop_path2): Identity()\n", + " )\n", + " )\n", + " (norm): LayerNorm((384,), eps=1e-06, elementwise_affine=True)\n", + " (head): Identity()\n", + ")" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "BACKBONE_SIZE = \"small\" # in (\"small\", \"base\", \"large\" or \"giant\")\n", + "\n", + "\n", + "backbone_archs = {\n", + " \"small\": \"vits14\",\n", + " \"base\": \"vitb14\",\n", + " \"large\": \"vitl14\",\n", + " \"giant\": \"vitg14\",\n", + "}\n", + "backbone_arch = backbone_archs[BACKBONE_SIZE]\n", + "backbone_name = f\"dinov2_{backbone_arch}\"\n", + "\n", + "backbone_model = torch.hub.load(repo_or_dir=\"facebookresearch/dinov2\", model=backbone_name)\n", + "backbone_model.eval()\n", + "backbone_model.cuda()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load pretrained depth head" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "load checkpoint from http path: https://dl.fbaipublicfiles.com/dinov2/dinov2_vits14/dinov2_vits14_nyu_dpt_head.pth\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Downloading: \"https://dl.fbaipublicfiles.com/dinov2/dinov2_vits14/dinov2_vits14_nyu_dpt_head.pth\" to /private/home/plabatut/.cache/torch/hub/checkpoints/dinov2_vits14_nyu_dpt_head.pth\n", + "100%|██████████| 160M/160M [00:06<00:00, 27.2MB/s] \n" + ] + }, + { + "data": { + "text/plain": [ + "DepthEncoderDecoder(\n", + " (backbone): DinoVisionTransformer()\n", + " (decode_head): DPTHead(\n", + " align_corners=False\n", + " (loss_decode): ModuleList(\n", + " (0): SigLoss()\n", + " (1): GradientLoss()\n", + " )\n", + " (conv_depth): HeadDepth(\n", + " (head): Sequential(\n", + " (0): Conv2d(256, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (1): Interpolate()\n", + " (2): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (3): ReLU()\n", + " (4): Conv2d(32, 1, kernel_size=(1, 1), stride=(1, 1))\n", + " )\n", + " )\n", + " (relu): ReLU()\n", + " (sigmoid): Sigmoid()\n", + " (reassemble_blocks): ReassembleBlocks(\n", + " (projects): ModuleList(\n", + " (0): ConvModule(\n", + " (conv): Conv2d(384, 48, kernel_size=(1, 1), stride=(1, 1))\n", + " )\n", + " (1): ConvModule(\n", + " (conv): Conv2d(384, 96, kernel_size=(1, 1), stride=(1, 1))\n", + " )\n", + " (2): ConvModule(\n", + " (conv): Conv2d(384, 192, kernel_size=(1, 1), stride=(1, 1))\n", + " )\n", + " (3): ConvModule(\n", + " (conv): Conv2d(384, 384, kernel_size=(1, 1), stride=(1, 1))\n", + " )\n", + " )\n", + " (resize_layers): ModuleList(\n", + " (0): ConvTranspose2d(48, 48, kernel_size=(4, 4), stride=(4, 4))\n", + " (1): ConvTranspose2d(96, 96, kernel_size=(2, 2), stride=(2, 2))\n", + " (2): Identity()\n", + " (3): Conv2d(384, 384, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))\n", + " )\n", + " (readout_projects): ModuleList(\n", + " (0-3): 4 x Sequential(\n", + " (0): Linear(in_features=768, out_features=384, bias=True)\n", + " (1): GELU(approximate='none')\n", + " )\n", + " )\n", + " )\n", + " (convs): ModuleList(\n", + " (0): ConvModule(\n", + " (conv): Conv2d(48, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " (1): ConvModule(\n", + " (conv): Conv2d(96, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " (2): ConvModule(\n", + " (conv): Conv2d(192, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " (3): ConvModule(\n", + " (conv): Conv2d(384, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " )\n", + " )\n", + " (fusion_blocks): ModuleList(\n", + " (0): FeatureFusionBlock(\n", + " (project): ConvModule(\n", + " (conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1))\n", + " )\n", + " (res_conv_unit1): None\n", + " (res_conv_unit2): PreActResidualConvUnit(\n", + " (conv1): ConvModule(\n", + " (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (activate): ReLU(inplace=True)\n", + " )\n", + " (conv2): ConvModule(\n", + " (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (activate): ReLU(inplace=True)\n", + " )\n", + " )\n", + " )\n", + " (1-3): 3 x FeatureFusionBlock(\n", + " (project): ConvModule(\n", + " (conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1))\n", + " )\n", + " (res_conv_unit1): PreActResidualConvUnit(\n", + " (conv1): ConvModule(\n", + " (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (activate): ReLU(inplace=True)\n", + " )\n", + " (conv2): ConvModule(\n", + " (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (activate): ReLU(inplace=True)\n", + " )\n", + " )\n", + " (res_conv_unit2): PreActResidualConvUnit(\n", + " (conv1): ConvModule(\n", + " (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (activate): ReLU(inplace=True)\n", + " )\n", + " (conv2): ConvModule(\n", + " (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (activate): ReLU(inplace=True)\n", + " )\n", + " )\n", + " )\n", + " )\n", + " (project): ConvModule(\n", + " (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (activate): ReLU(inplace=True)\n", + " )\n", + " )\n", + ")" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import urllib\n", + "\n", + "import mmcv\n", + "from mmcv.runner import load_checkpoint\n", + "\n", + "\n", + "def load_config_from_url(url: str) -> str:\n", + " with urllib.request.urlopen(url) as f:\n", + " return f.read().decode()\n", + "\n", + "\n", + "HEAD_DATASET = \"nyu\" # in (\"nyu\", \"kitti\")\n", + "HEAD_TYPE = \"dpt\" # in (\"linear\", \"linear4\", \"dpt\")\n", + "\n", + "\n", + "DINOV2_BASE_URL = \"https://dl.fbaipublicfiles.com/dinov2\"\n", + "head_config_url = f\"{DINOV2_BASE_URL}/{backbone_name}/{backbone_name}_{HEAD_DATASET}_{HEAD_TYPE}_config.py\"\n", + "head_checkpoint_url = f\"{DINOV2_BASE_URL}/{backbone_name}/{backbone_name}_{HEAD_DATASET}_{HEAD_TYPE}_head.pth\"\n", + "\n", + "cfg_str = load_config_from_url(head_config_url)\n", + "cfg = mmcv.Config.fromstring(cfg_str, file_format=\".py\")\n", + "\n", + "model = create_depther(\n", + " cfg,\n", + " backbone_model=backbone_model,\n", + " backbone_size=BACKBONE_SIZE,\n", + " head_type=HEAD_TYPE,\n", + ")\n", + "\n", + "load_checkpoint(model, head_checkpoint_url, map_location=\"cpu\")\n", + "model.eval()\n", + "model.cuda()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load sample image" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAoAAAAHgCAIAAAC6s0uzAAABamlDQ1BJQ0MgUHJvZmlsZQAAeJx1kL1Lw1AUxU+rUtA6iA4dHDKJQ9TSCnZxaCsURTBUBatTmn4JbXwkKVJxE1cp+B9YwVlwsIhUcHFwEEQHEd2cOim4aHjel1TaIt7H5f04nHO5XMAbUBkr9gIo6ZaRTMSktdS65HuDh55TqmayqKIsCv79u+vz0fXeT4hZTbt2ENlPXJfOLpd2ngJTf/1d1Z/Jmhr939RBjRkW4JGJlW2LCd4lHjFoKeKq4LzLx4LTLp87npVknPiWWNIKaoa4SSynO/R8B5eKZa21g9jen9VXl8Uc6lHMYRMmGIpQUYEEBeF//NOOP44tcldgUC6PAizKREkRE7LE89ChYRIycQhB6pC4c+t+D637yW1t7xWYbXDOL9raQgM4naGT1dvaeAQYGgBu6kw1VEfqofbmcsD7CTCYAobvKLNh5sIhd3t/DOh74fxjDPAdAnaV868jzu0ahZ+BK/0HFylqvLiAv9gAAQAASURBVHicrP1psHVLlhiErSEz997nnDt845vq1avqGrqGrpJoIfVUomXUjSSwDRFYAyAJQ8jwx+AfKMIORzjCxhgThiAc/mEby9gNEcYOhMMggxAt1JJopJ6kVnd1qau7a3j16s3vG+50pr0z11r+kXvvk2efc+/7GpxR9b1998mdw5pz5cqV+IUvflZVzQwAzExE8jMR5YfxfUrJzMAoxph/ijGqKhEhIiKamZmNz6qqqmeL6uzsjNAuLi6++PnPNU2z3qy6bSsiqppUzMwUFCF/nqRNUdu222wTGANQSl1Vh1deeeQDfeYHPmkmwXHqYkoJAJNKHk/uNA/SOdd1XR45IjRNc3Z2MpvNiIh9x+xjp++9+/Tn/tovv/rKYyMmDADgvd+0681m45wjItXEzITmvQeAPLMMECIa/1VNZOOzMnMGDg5lGAbml1CU8f3hT5M/i/cMAIg71ADu4WvE2tAHImIenoGOLY+Iy5+UYy5byB+OL8cZTcY5/GsTEA0TpLG+Y6+q5Ricc7nxZNp13bvvvvv9d578uT/3p1999VUzQ63UkDxGXfmaJBlTDagEgMaQfHCVwopdUlUwrw5VxHGFyKaYadlQAcBMzQRMCBQNyAAN0gFeiunrBBd5LiMQ8kMGo5qNcC5bI3JjgwPSdmAc+WVkIuiHatbjWjN6mDG1navqrk1J7T/4f/3Fv/W3f/HBo4d/+p/4Iw8e3COPH3548Vf+i59HqjtJ83lzOj996fEnTk7OPvWpT3zms59su9Vq0zbNiaFmBul7J2LmEEJKaT6fr1abDz/8sG1bYHLOzWazGr2BMtu22xDDYrGoqxkmapctdCrruF1vnj1/8vTph8+un3z07KMPbp7xAIBNlMW9xz/2tf9OMz89Pz///pvf/gv/x//DT/3DX/vH//v/WD2rzh8++ps//1/9O//7v/DoweKf/Cf/2OuffDl1aTF/8OGHl//6v/Vvf+ozb3z7zbf+mT/7Tz9+9OrV1c3rr762Wi7/7X/r33z51ddubm6IiBmlLzH3d+/+WdM0koyIRCSE8Du/8+0/+U//s5//wc89OL+3Wq3+6l/9L9rt9vr6crvdMiMRrdfrq6urjIWU1MzEehbOxJmpdMRsxh0zM3OWEgR2yCCIqAqDGFFVHUnLIIKRqmYBWPLL2EKWt1ks7whj6D2/qaoqf+6cy59kVkLE2WzWNE1Gca7jvUfEGKMqZAnZtu1yufTep9SllEQkN66qMUYRy882lPyMiISWTAGAAMEMEIHQJEsaBSa6WX1ufu+HX//U9eVFx+rZzefz0/Oztm1TSllEjMQ/Ch8zBDUAAEK0QiJp/8BEZjKOZ8dl0rNqKbVwmHgPW8D+vwCg3VE5OXm5E4OKAABkvTggbHyYOb8A9/rpvfNqVjE1909WKL/z4TuX3fLvzWs2QIP1ev38ydP19RI7AQCtwjWqgJ1yU3fmgbEJN6l143x2o79V9GehaZlAMyKJKJP72Eimj8m/psk5d3NzU9d1pl1EFBEFy7gfMUFEiAZAZoYAZpJp4v333//BL3y2aZqUOk0xS20RdY7NVERhh0tTVe99pnIzTSl1XfI+eu/JMEXxvj45OTGDNkVmIKdZsXl23aBBAcgMAW2iJku7ZAKx20A3QXNZ/2M/OdoCgN3yflomFkD5XL6cjKqUBXcYDXeP86i23uMfNEAzUECwXkYBEYnAZrORZCLi2BCJiBidJGFfAziQpCbBhyQEyGZkCEmsCnXSLTsPBiklQoeIznHXCTJm8QVmqIioCAB4CMjjACznVc5lYq8cgug2vEwo4WPHoKohBCByzhEgMxNR27ZZcGf6Z+9MMctuVb28fH59vbx//xQRU0pVVaWUkCF/O6AAcwshBJHe/hYR5zhzEHkCxJS2zOw8ISKYLW9uPnz7Q29+c3mzXi43m5uL5dXF1cV6vaZBU2U15IgREQnYkZmopcqHzPu5L9XU00lPMBBjzLOezSpmFhGH5L1XTYC9QlJNAKSqRMAciEg0AoCIbNstGA1CBnxweSLvvffOer2GQWSlFOu6Xq1WXdeFEFSBmVNKh3gcCxRiepAtBnicXwZKP+B3cqaD0VYo5gm67xARuXJeYJSLn1Fhr1artm3rup7NZsPiIePUZbMgV67rOstJKExwIvLee78zEXJlABhH248qazVEA8j9GpCqWo9WVQBksmEllusA6Dj3o7xTdnE3j/QDxp7cSjBOZFQpM2/jt3IwAwbHARjk1swAQPrZ6eX11ezMhXmz3XbhtH7l5ZfjR+9XCCYCTE3TnD+4P6/q7XKzWq027ZZrbwZd1zVcgWGMkZjcuPwtx30ImpEOEGEknRH3E0lUmjYZi2BWVdXlzfW9e/eY2QYDPNsviJhXaoMC1swkYD24VHW9bruuu76+ZkZH6OvKzMyiYf/5CNuMcmYex57NBRHznsCUyIHRYnHy6U+/cr3cDBMxM/Heu+iyCBhnNMxryocT4jiqhkswTrj0d6t69xv8ePHd/2rFV4OwuIPuD6fwux1eCaIJfBBRTQABEAx2smz4FoioqioAWK+2GYNmZpZim5DVBYfKMabZLMRuk1Kaze5fXl7OT7iNMlucbtbqXAU2GP6MbdsyGpKicY9PZEQxIwEj05F1J5i9bXYlmR0aKLcZNz00jrF/SQlmRjRtsySYLkZJhuyy+dt1HTMbQkrJCL33SZ1tLdsXiDifz/PCCJhiF5vmJGnMBnTfCyIAMHOMMYTsXMFRQ5sZqKBDBSPElFLbtqxudX3zvW9/59HpI4/k2UkIzhMwKEivgFERiIicJyQjopRSSgkBFycz5xwzpZSWy6VzLlQuhEBERgAA2+0WALz352entQ9qiYgcc7dtzQBUQEVFjBIAIBGiIUDlgyNmJEesqoRmqmDQNI1DWi6v33nnHSJSiQCQUnKON5vNdrsdBOCoZnc+oVEYjoJuQrGqirxzhpXIAgDVZJZ17c6biIjIYLr7Kvc1trmD/KDYSioa36eUMqbyJ/nzrGVTStvtdrPZbDabqqq893kFzMwpdXkBX9f1fD4Xka7bdl3Xtm2MMctqZkbk0jgYSXR8uQNUroNgZjrIP96tYXoTYWykVDeHjHBM6exxEBQrCtjjsh0HmRlOpLftPicoRfp0AEeGRAXPA4CaiESARLRN8Waz9p4JjVsMjhe+Poe0hCSgVLkFzjDUm2ptjOubK1FtfO0UYkqCDEAE5Eq75nAQpSzAnlf79W7GfUppdNGUCngsbRu3221VVc651Wq13W5ns9lhR/lzRBQxRMpOHlPKHRHjem2bzWY2WzgH3XajqtnDs95uCp00kv6eK1jVui62bcfsAMExKSgCv/zyy+/86q+//PKrmZhym8zctm2hP2x4PgKW/qFUcreX0li5o84h5A8qFLyxY8+9AZRYK4h1WvMFmaGsf6h1Pm60oLpTxocUVXSBiOi9B4PLy8teE6DFGAM7QU1d8q7y7LebjXeokjabVQgBICG59Saa1qImEr1jJmBGZlTrHDqRpAhgBAAK2c2lar3n4xgoFF6s3I3QCRjvwO8I21vAaCJCxOhQAUMIZlZaikTEzIacFzF57di2raq+9dZb27Q+P7u/Wq3I4ei6JGYAcM7N5/PNpp3P513XmZlRT6ghhOCCorKyoaZos6ZidCj46PTBaT0jo1W8Wa1Wq9Wqi3HdtVx7QjIwREPspcQowbOg6LpuXs0IcLPZ1HXtvQ+VIwJBI6IknWPIDnDnHAMKGSLGGJ2jZlapzQaWtFH3AJiqqGbJwYhoJt7joqkR7fnz58vl0nnadFl2oXPuyZMnZpYNPkQUESIa3a4l8I9yUwajQD+GiaYclU3WO1mM4LDyg8ITW6J+pIpye2hCSPkht5OHXe4hppS899niWS6Xy+WSiEIIdV03TQMAzL63yQDMLDurswUWYxzleYy71X/ZL4Ie2JEEIBlkeUEeQjAc5kWY18EfJ/TQwHDYpTmUFappv35pGeiIrp7MCmQhIvQ/a7kX9oJlpFsAyE5wUBUAIesUbrrNXJsGg3UpuOrx2b3Hm6V27bUkY0MyV7m5X2jlaNFcPr8ANVDYpsSsjM4ROttn+ENSO4TUqG73AbS3YTbajKox+0My2pbLZV3Xw8p21x31i2EYpYNn6VQBgIiIkAiWy9VisTCT1c2NqjZVbWaIBqDZa02UeaGHO6EDVFVVgdhJ55Lj1DSLruuc8009m89PMiLbbp0dMgac+UhEnAsDdi2L6RHl48SxWLsfwrDQ4sd/mvw6KYefjygo6typJ6xnltJimHRRUtjkzYGZ+TFlokKK5yOLQigYqRdeeQ+IHCI8efKk65L3PsYNEQMAKgRfmQJzYEaRTQhVu41qTIAU6hg5NCcACpSINMZ1jG1weW9JACCvsCSbyJYFLdItPujSWDmKwaNmxN2QmYDoEL9mdrQtRDQDM3PO3Vyvtl2s6zrXVzBiQEEAEImA2csH83lzfvZoXp+IyIcffmgs7737wSuvvC6WYoze+7quiTnL64cPH8YoALDZbEqLPMa4artNt4mSkkYzO52dxjZuLm8oQVy1cdtera8363WULqEhIwAQg1nv4mZGMzEThyBdzFYCmqCBqm7Xm1kV8kso9u0Wi4VDcs4hjStCUUtni/nZYl57B70Y6geZFU/XdZr6LrL/7NH9B1Xtkezy6rlojG3XdR0RzGaz6+vr7XabPdUiUkrwUXEWIn4PFxPMlub+WEYNOtXfYzuU9Vn/YTaYSt0/ipry5fhwGMAx/lquMvNz9h2GEJqm8d43TdM087zkFemthxBCXj0PW78dFBy6m4IRoQJov6wcZYUqDIsfYh73j8V2m4O7HUkEU8uwGOBs2UOU+9tB4Bg77ANHDlGD/ZbwbvE9oguKTssPJ1bXDr+Z9wDQALMrwyyKdiygwNFu2paCrx1XQI7Dq9W83azW0kUFAAQyF6qT2s9OF7UP1x9dXG1W5J0hbbrtHBo36f5wyhOayHvAAP1WzRiwcBRWiIjsVcHM1ICZb9ars3jmnAMyZiYVIlIxVUXILmdEBCIkIkAZF5fzefP06dNnz56dn59670VSpte8h5E9cmbEzKomIpJ2M8rIMENVWC23dV1fX61+65vf/fZ336zremA2MVOR6JgLs/qIippITCK8ex/xNgV2FOCHRHCgAAygkOC3UKiZYWHu3aF9J+3fRgZ3fzUUGpxSZQu7l6PRltmml6Aw7EcOu1CzGT17dtF1XVVVkTFU89W6VSWPDObVGBCIbR3NV/OuTSnidrMFcu3VMjjq2vUs4IMHc41LAI2pJXREDEYKRoYGJnkDmizT3IH9tGdTH+rgo2xyiMqSz80M7EhrR02xAV9QvkEiRFwul0+fX+RVjoJleeecU9hsYxd8MIQoycxijHzK6LiaVR98+M7f/Bs//6f+qT9T1f76+rrruu3g5vHeLxan19fXeblshKCQ15FXV1e/8/VfUwBXB/a+ruvH919eYMXG3XIdqrlsu9S1hrbtutVmqYyOiIjMBJHZqXOuD64BaLtNVfm85MpaYb1ez2azEBwzDjuF0nXdvG6c54orAADQ3u8X42KxYEcB/AiTfqvKzDkadSH3CkAXJ3Pv/Wq1urq6MkmqiQjMYLvdXlxcOOfyTmcWaxPD6BBBEwI4tKjKT/IqPJfSqwy4Y8aseNDocEcP7iyjMoNh5TN+O8QEiA12bR5hXuPmxtfrddOszYzI5aio0W0wOA/Mex6De1Q1pdTvLVjGJ4EKINoQk4hZrZqK5oi2pGBagGIcuWE/ZkPIITYlU0z5qAfadJmxk8myW/j1sosMe6bWrIOh9DyNu8VYeg+nEnKCUzNDyHvBaGaK0Gnyvlp13dPLixBcCCEtW3T8IFTPyD+PqGZKaGgJkiKE4B+cn8/Jz66X17Hbdi0mirFzh6jND6Pvvnxv5WbJvqtk9MCMNW2IqkiqbRtDcM65GNNms7l3756pMLMTVlLVZD1W89o662AiIuuNOAWA5dKefPTs4cP7i8Xi5uY6SqorD+LGeIFxdQ4AsYtEhMhmkJJ03abrutVqs1rdXF3dPH361JAQ8eTkZLNZzWazbEOoqvdV8LzeJABQscE1jvuTKmmFMtfAx+kt2JfdH8tmd7Rzx7f9qAztlkCtkVennxw8H/75AoVKD/mh9s2/7s9CcswdDCJ1Pp9fXV21bXtycpaYPni+/JVf+sY2dpvtTahO0GpVFVtG3RhSVZ2hr9Vwm1QSe1WTzUsPT/7IP/ITc1+rrAGRGEzNVAFYLa9vjnv2RuMgl1E0TEAEx5wZR8tRUJfi5rBy8dN07eWcq+s6L2EREQb9gQ4BIKVU1UREMcau69wZ5QhHInrw4MEPffUreYlzeXnpvM9k0DSNjXvJO/Oot4RSSufn5+y9EgiCCaCicyzb5MGhaOy23WZ7095c3Vy3Fn1TUR9FnJe/fbglDPuUi8Uia0dVRdWU0mxez2bBOWcmnhgARr+0p4oARQTREFS6tpnVjpAc4+DLJSI0DwCttnXwAzwJwJJI08yI4IP33l9eX42utfV6fXFxEWO/Vi6pNFu0IxwO8VWW8adSZY4Po9YxEAOjHFxaLMtgzzPPoxPb9stt5FQOsixd1/Xh2UMIdy40xGPn2L2UUsZO/io7pcsBZCsKhgX62CAacD93MlCF3mnTr2vBACE3i4gZqlm7i4hqUtVscJQe+2Fevb7EApKICKhm063xF5FL2K9dd/iCfY4uXdKT1nZ/Imr25xQ/IWIUkQDIvOy26223mIkCLkIlYg9cdSUhQtcxKkLSZIqs5J07efDw/N6Dd5599N5HT8gxKriRCEZLCvblwoQOSkRmfO9HPE3nkJIg4qZtQ3DE3mLsui5jK/MPI8kAdURkQBlgxEjmQA1VEQDB4MmTp5+4edUTMrsYOykOtORIvTxCMAIQREbElHS93q7X667rRKSNSVVnixPvq/V6vV5vnSNVzSrfhv0h0C1oVmEEhdIqOa0Ey2iRlWiGA99vieZJg3cr74m4779Cm+zH9L9m+/Kgr7tbLud1N33f8WvmTAAYvDWHumo3/uLXHAzVv/S+evLkZr3edl0Hc67m82dX8Qtf/MKrn3z03e+89Wu/+q0vffErX/ihTzVz++7bb/7CL/z6K6+9jBS4jTfddQMBQJ8v26TcSouq7FiSgAHYsIkDCqZIAmajD3AYocA+lx7C55BHflc2yoRCjrZwVPKqSIzROTfu1QFAjiIezGVlRudINSH3qBSR7XbdxvbevXshhPV21cXIrmd8731WwKvV6vT0VGA4fmOoYoT84ME9dHy92ZKpgBKxI5+27er6Zq3WpTZBEhAj886TQ5JejiMi0bgQtBy1e7pY1D70ER5m6/XaOVdVFRGpincVIopIVVWI6EMwM02JiDNv1qHK7uW8OBt6IUTM25xmliOZVVXVP3hwjwkuLp/F1GaplePIttttCGG73eZApBhThuqEVUtxf4zO0cxGhtnDGqoo5q2zvEc0WZ+UaEVEQyu3J62IHL6NHnrDa5DeY8miOP+6F0w3iO4MumzlZDVsgw98XLWP7WTGHIUbEYEaQrZ78lK4dzIjIhECAhD11iFAJr8Rhv1qpRiz4Wif9wp73ITpwTusfengmOVxKTRdK/dKtpBFL6SDdyMc+spLTzLIK3siutms51XN4C9urhofqpMzjWlW+QdVc2nblSQ1a5kQwRmSmUOIErmqzk5OP3zytJWubk4cmiGAiqAZEanpMFsFsEwMAECEZiAiaoqI7DAfdhjPumW1WiI7P7N3XRcBgM/P23YDADc3q7OzNSJmVjEzZmaGdts55wCIARIYMzun2y7m4AgRXSz8t7/99pe+9MXZ/RNZx8yrvq5IRVtSlCw4EC2l2Elq15t2G7uuizEN+Cbvq36JPPDbiM5sSeTVQCZiot6MHck9P0PhBzCzDJMSzSNxjIxXVhgJ+pCAjgrl8mFPpR2SX25zdOUfHB7oqXIwXMp/Rz7cp9fjCzU4YICJZjpqzyEiImlhNu2RCnPXxSw4zCAjIskKbX7v5MErL73yyVcfgegv/vyv/+jv/5rS5vvv/8aP/ejvv7lp33zz6vzBq1XlTlGdzoi6q+fvXm9Tc6+O6y0SgvWbqBlkaGAgJmIghQVzBCbl1KxYHU7Qp6pIe5GuRxFalhI+UBAGjZ9OB7C3CBhdhWbmmGPbhhAQMau07XadN2VExDk6OZtXXWD2beyyYx8RY4w5HLJt281m03Xdzc1Ns5gT8ma7nd2b55W0bluqPAAsl+t5s0ADjQkNmKCqKlZcrbZAaAjOuS7F4GoAIHKZm1Q1qagqO0wp5R7NjIiW642IuirM500GRTLNGrRpGkk6W3giYnbtNubWvPfsMpBGP7PliWeF6gbDIoTwO7/zOz/5kz+pqpeXlwDQdhs13Ww2FxcXGfje+3EfbXSAqSpl3BogYHZZEhJosR88SGUc4pAniDYFZR1nOrpw83qjR2/2HQ5LPsVeyORfR71VMtG4nD1kt7EmFQeTsoAd/8zPOc4OEdu2xRzzCJBlaekrLgXXOE1VBRiOE4jm4KsegEnEIFRO8oagI1BznjzvjiPbICdHrX9ozmYdb2YGMp4SxH038kj5o7qeiE3cX96UECj9W2WxwrXZBwFkh3OeL0A+WwEABqZmCQDQWkmg2AGst5s1+wDEqI9PT9ak737wzJ0GY4pq2iZWcFXVgoApml1fXswWJ9ebm+E40Dj6PXtkqiRwZ2sPdh8Z2J4YLfckEFElAwk3bUsAjsO6XWcvNIKNMQIiggRJIsCgAIaoLOpLVde4XF5+9NGTBw/vee9VRTVtNi0zV1WVophZjHGzaa+urm5uViJiigDj6SY2056DFMEIgQHEDG2IEB615kCXU7PoUCmW+DuKWhj4cALqEuVlF+OvJXUeUYR4RLKP2vdoX4e9HNZ8wXJ0vocvD5U0wB6rmPX+ZzNVAQDI8UEAkGPRMa1rODtr6p/5v/yFP/8//RfuLcLq8oknfP+DD/6f/9f/8xc+/7/81Gsv/ebXv3/v5NGs4eDdpq28c6KQxNabyIaqhsNeOWqmxkMz+cVnvbeUOYr0El82SpCPb7k/JnUrbAu4ZZ0BCI44n5UPngkgdREIY4wfffTR/XsvbTabk7NPfvKTr333u982hEePHsV4+tprr43yPaXknHMuqCo6NrPNeuu9z8kZ5vM5ejOmCnC2ODHJK0u5urqaz+dE0MW4bNc37TLzRh0aRkZCJjBEtlFWYM6P4fvFbn9mRkQYKYQ6T4cZVXvDq0spa9YYYwgBBte0Yz/K01Eb5TKbzfKh3qqqvv/97//kT/7kl7/85b/za1/P4sXM2ra9urqKMWY3/oAX2kmzAbwvSA9wIPrHB0mmlEYxmEUYIgbnsSz5QB4CoJbSoBQXpRDA/WQvk2HfMfKy5fKrMgVHXurkdg5X2IjAzGrSi1BCNCQkMwU1ZtY+INSC92TIDm0I0h6GR6Nn7i6ZgwpDtNRRLjsK8/Gl2d63t8Fh5LhxEVzw4FS4jkod8nYFgpgRMjkGQ0XXqW3atmI/r2rq0im71xb3vq/rTdsBcSCqnEspGaCrwtPvfT9u2lQ3QuBKbi9HMJn2qI9HYtXhjHaBzVtgBJxUlsvlvGnq4G1rowNtNPpU1RGnlHL0S84tRZy3LnqfErM/OQnPnj27uV55NiKK0VwI3nsw7Nrl8+eXOcCk61J2RCPCGOljQ3Yb0711ZKnqAMBMmSlUrmsTohuttqOqd3imSSqWo3AoX040621lHOdUBx/s8N7m49pv6mMUJNxC8XcPL5fbPrlTVWeOVQB2znWph2HeQ1qtVszcSXLED+6dB0cXF09effXVJBEkLU5mGNzy5no+q2eh8o5St1HcIM3W65vLy8sYI9NMokaTxjsTNRDEfMw2u5GyvaKTUX0sUg4nNTEyjuArV7s1DcBey0Wzez9laZi37pqmyf6qtm1zngBm3m7XSdl5r5ZibK+uLtv26ee/8BlmvlktY0yqimQppTEgNgvcHPAvYM+fXWxgmw0gRLx//77XGThed5GcJ0FntF6vzSSpIWFC3VoXQbliMWEAYk9oyOTIDASp32Xcxs7MmqZB1y83x/1pX1fkuGtbTyQisZMqNJq2VVU751PsyLsUdb1eT9ykWf+W8koVmqZ5//13v/KVr/zhP/yHr6+vnz9/7oi7bUtEXZueP39e1/XgrivDXBRA4UgI4ceUYW13zK1lBEN0iA6R1dGAhqjvXf3s98CdU2S3HQsyrrgBFam3wzzlrXQbz/hlcgOwvHF7OJ5DGV6Of2T8bCtkUVmug6lPkTT8iQQEaEpIeaQMRqaEGILDTskAVJDGDWnVPjx5T2Hks0cFwPf5sXjY++ro7jgMboO9H/dOH5WCd09nl/7F4vPhlASW40SAqKJgm9Y6M3LVmrqa3AkgErHCeWg+cfbw+fX7a4hC7Im6dYs+oPMpxXffffvRo4fX222Y1Q4OZC4iosFhCA8MhpIVpX9Pu5QapXvBzIB6az3GlCpFJu/9drtt29YxjU7IEfGSbBQ0AMAppdTnLNxutyHUH330dLlcPnp8r9vmrJMhxvTRR0/ffffd6+slIlZVZWZ5dZu1b6ZGzAeTQA1kl1DQ+i3t8jwoIoYQujaZCQ4T2UEpb8kPKQMzAA5hdXcZW5u4Io/WKf/MxHBb5YPB7ALEytlNPrxNl8BthL5nr/TAnNS5pQudrPDGh5T6rAVEUNcBAC4uLtq2bQ3dzG9iWpzfW3cSkX/4R37kvadPX3n94T/wB34cw0wiLbfta3VlapV3to31ojo/qeJmTTgzYASVZJjjn01y0tN+YIowXZF/zNbvhG/vAObABVOIHULp8Kuj9DDyBTPnc8AAkHeOQM2SxBiBCHOiCdY+dyxADrqpqgodry5XH330UV3Nzs7Ozs7O5vM5kQshKMLzJ09yJofVanV2diYiKanDPpxeRBi9im7WN11qQ90I6Hq7TaQcmByTUCA25HzIlQiVHDMTubzqErOqagYLGLddZGZH2YDOme8ogzY0ddpaUgGAnOkCABC5qup8PiKDohREiJh911dXV5/73A/+9E//tKq8/fbb19fX0B/ZoKx9MxRHaFux4WrDJsXdpZTXZqOfbA9pOKjWfUHRr+/LPaDepziQZc7RcdhjSRg4nicu3hRElXUYly8nn49wGxfEo4nQr1KGsy17egERjbKPngwQNKchATCJKW+fOOcq522zVlVFzdHU2dyx4UiSqmbNXqYoGWdaenrgTudcqcrNbMjNhVOhfXsLJcv3b2zP8sXhnETWwaZKBooQnNO8FFZIAKt2q9tORFz9cKYKwg1i3WrNsJGUAKqmXm+7um6ePn1uZjmLM6ruKeBSsuj+jveIvIkHY0IHJWjySzVERzgk7sgcfnOzOj8/n89OsqcrR0CUvmsARTAi4N7jDabg2DPT86fXm81WxACgqqqLi6vvfve77733Xs7toqpg5Difbx79/gAACLwLi0PNBIqIBiBiRLv5mhkRIJloBOw320a67EW2YfYaTeAzioOj5HL4ZvyqbORonR1FHCsFJU4XZEU7Rz+9ayH74gPb/Wm4o3m0g8r9sX0AQBxNUXHOg0JKKSVJKYUAV1dXqpp4HpW5ab76B37/vZdfWyv+gT/4k7/5m99zp/yP/GP/+Gpz8/Vvfmt2erI4P5GlkoeX56ypvZ6H64un9Oo9BPPEKpHADERg4p76GH8A7OPoBbXvPtDUeoxk1t4T0uW/1vNgyYlTlBX0SVniqCqqeeeSKAGq9LuYOeipaRoRSSqnp6di2G67s7OzbInO5/Ocf4qZU0r1fPbo4ePvfOc7mSlijHkflzyJal3Ptl0rInW9SN4DmqKtus1HN88jClXeTDzmgEpHDISATKyAyAoAxEQOgHKPAJDjgEIIla8dh8y2+XgoswN0NSCTFwVVUIUxczJ7R8hUHMIZOY6dv7y8fP2NT/7UT/2Uc+6dd97JWXq6rqvr2Ztvvrlerdi5lLSqXLnyKyOe9g+nHKeB29BtRgAIk8QA1q/MeslW5NvPLfT6ifaD8obYwGPDgXHiA41NVWyuAog4qGEsChT6CRF3ef6HlkfHZK6/W46bEXBWwAzGgEaIYIBEDAhECN4xGogIgnKRSX7sbrSddjxyi694wnF3F8QjknGQSAXcwI5ss97eRU6bZ9ivo/NoyUCSJDAFQ0TxIECdQQvapVj7ugJ36twrszORzUfaqndRzFehbdvVahVVTFJKXWWNywH4tm/pHLFKdr/mN/37cRtmPF0ABUUSkaplN5GYdl3nGavgus12s9mcnZ5k5s84yUTA7AF0yA8NiP3uqST13jNxXbubm5ubm3lT8eXl5S/+0q/XdX16ck5EKgBAOeR92HPBnDM2o8HMALU0u4gop8BlRrP+MICqIBIzxrg7Rd4/aJ+t5ZAhJ+VQPx19f/h5ydXHlNxxQhlsveNXIAzNHmnztnIb3Y9cUUBgX0JByTlH9fd4VGnIoZMDRpiIyDl0zs3ns2fPni2Xy+r+Irar07PqP/qZf+/t974Yk4o21zebX/67/+Ubn3p5s9ls1upD8yu/8jcRsZ7Nnd54hG69bG8uSUUNwAxEjWDwByJkIyEnizjYAL4bp7CfBLSUEYdCcALDjwX7HeaOiBDBaPPlGIW6aTar5fPnzwU1JnPOxYT5coLFYuGcOzk5yTUfPXqUFLzzT568LyJNPX/27NlsNnv55ZdtyOveu7VVQwjMHhH7nJGSTBWMGLHdbq6uLg0AHNwsV5eba62IHWkCj+SBzAcGY4dEAApGKNIfJNWcedh63RljXMxPK3a9ywoRgFJScgzAlWvm83keWx2alLTrkvfk2A8r4OxotVGJinQPHz788R/7CSK6urparVYfffTUkqWkz58/f/LkSVVValjXHgcHOOwvlV4QTbfhbrTqxn2rocVdHQeYw3lKgWBmKnvyIafYMjPivUGOBDaO/3DA+1S3E3Rl5VEpwr5uHoE5tjl+OESomRJkQxEwZ8tHylIa0UBCCGqJJBGgp/7w+qHVMrzpp1uOeaKbByV6XEnj/lf9v3rrEqgPxdxfpUwMqcNeco6QvLDI6zZHCJpaSaq67lokD0CrFC+vl4yOkSoXHlWnF6uOokJF6Mihw06++Y3fruaVmCLiV7/05f4mDUa0iXVPU5DtTcN2yc8G0Q8TIE5Ms+xKrkMgdFVV3dzcnJ+e5V7Mdhc8FJC1IfxKAMjXIcdN1PWsbSMYSbLvfe8tImZ2VJxaVtUhO1uOvRqVUz8wIgNA09EcQ1Xps4Wo5cw4ORn9mBd+QhylsTJM9lb8lR8eAgfuFLslEdzd/rHuxlXUXYvXSe9He3lxI+OFhdf0fLBznM/AqPaZ4q+uLlJKs3YTqurhif2zf+q/V83r5aar63MACBW27cb7ut3K4vQEIHWdsq8tXc9mM4updowaISX2PBhfuTs1RNCszWB6EP7j5O/E/oB9B9Jt5tQdbR42CAUkJx3hsF+Tt0Jns9mbb765XF0DgSDHtvX1XBFSyokFYwghk/Gmax0HcpyzT5jZZrMZjOZUzZpnz5699+775+fnq9Vqs9mcnJzl/MAUPANvUwIDIt6s1l3XzU5mrnKtdp1FMJ8kBSRU8C4IM8Nwls8AgVRVkuYbMHIAdt4FyMFQDrhqahlyM6aUENj7SqXzvjLD8eqCEMJsVpfaYiQeGy4b+PEf/1Hv/bOnz5PE9957b7lcAlDctu+8805d15LMBTfKmd+teXQUd7f8OdG//ZEexD5pz5j4D4a1rBZpdKFXe2KKWsjkUm1PtFo5hr1RoWSf9lh54liGwg1eNlU+lCHZ1isxBQMANUIk0rwnzSqS6qqCXv+hmtEx4QO3CLQ8+PKD3Rz3/QovIg/xwDLeNbgPJTuovzeYYUAKMG5dSBQDdc6JqYJ1JqoCLazAZlU985Vv+TRUJxAqoBVCMmWw5dV1U7vmZNGKXrx38alPvuHgFsordcwOGUOyMETMWeNHzMVODkevCjnqxVchdtvcTD7/E2MXY6yb4cIT5zAfAxg0hyOSPotKr5WZvXPUqeTQza5bi0hdz4gmygyzY23wMmXJxcNVBIM/EBVgyPYyhGVZsXQbT6nfCp//hrpnr5RhnP9tykBYuzcDOo4YQ5M6R998rOYoShHmMCQAGcaDhRdaRiv7sNMcTwPQX+IWQlitUtd1Z6Fer5+fhub0jYfPLy/nJ43zLJaQ4qn3KmDzoHKNpLV3kkwDWdw6wCYE7SKqaRpOIvbOHoDhkjEwArx1JXFbOaosp5ryFoD/Nyi5BSIiJBhkdLYvEfH89PTBgwfk6Onltarm/BJEtNmszCyfOanr+sOnH15evH9+73HtcbFYIOInzj4x5PSoZDhhnN2PdV2v1+t8cVlKiRwjqpm2bVsxP3r06EL1nffe/ujJk8126zw5QnasKaEaGBoTEzvHVuRPUATvfeX6/cukkp3kOJxFtv7ETkJE55xSXn+zcy77KnI++XEDGBHHBLEAIBJfe+21+/cfrtdL7/1b3//ezc3NbDa7eH79rW99y3u/3XSLxWLTbvNx4QnfFfja/XkUdwdqe0yBsBeGqYV9P9DDuD7OfewaLNWODasIRNxJ/IMBTCgQCpU86uD+TVb5Y0ahYlTjmzENy/hhGR47wqqM9FTLM0EwI8QcPatJfOMRkYnytYY25LaEQdMTUS+TR8gUi9cJUnYL3P33h4bCHkyKw77Tj/uZ4JigDfpV2XQ3elKy71rVcmJv6K+FRVOLKsGz8559yIfuVDW2XfBN46uG6hsTJVTVi2fPvQudpG1Mjx+fzuvGKY2iiYZgPCTi0uDaxcSraSFomBmBh7RnNi4is5LDfPDLkBBTF5k8mK02W/an3rm2Ta1Axd55U2g1JtU0xiLkiRkAMzd1SCmJdPm4cEq63W6///13r6+vL69iVVVmgLgLIiihqTpkIwO13ozivPwyRQPF4USHCiB4IhIBIq+KGSAqgLRLUkNsRJjdmjBOEwHRZezRflQtwAiKPeukqIOIexw1uvRHJtkL1BpSJ4781reTgyGpJEczM8r+9tyjIo0DyPtPRbqM/EYHAdS/298yMdqxzWCy5AmOB1t78pBxzLtNTQTAbddrCFIUk2wtIZIqgCIjq0FwFaNDgOvr6/a1R62tzQSjnp6eqoBqvLy5RkRy3HWd976LknM2+ao7PWky9rvt2lWVQBQAJBxDXE0NwAgN++iYPY/xyLSjn3OCTeohnP1YAxPuxOmeK2wPQ4WhXaJ7ovVz3h0ENsvGIgMaASKgGLAREZr2LNluutNQzVnBmQdCC0Shs613LioK6cX1h8mWqmeQ6Fu/9Z17955z7Srvuu32q1/9oYvnH7399tv379//0g995fR0kVICpKTivXeORLskVs0aSpXvfAVXVH20Wn+gjr/15O11wqvNsxC6GkS6YEbKcI1L1plzXoGNfHCMwJCiM4FETX2CiNfR4aWmp8+vr9Nift/a6Cgw+U5iAmxFqaqU2LFjF6KYAiu5KBTmZ0yEzGb9FWjeEaK13YZMX3nl1YeP7l9dP0+qz549u7y+Ruab6+v3PvioS+rZVU0dJbF3bcxpv5TJq6qqqKHjCvMuGCaEQQ1Dzuo1JDvs8UkDbec0F5NzpZm3+uO/NuzyjvLQmEu6GJmv6HQMOjEzFe0Zdhev2vdBo5jNRnYftoN78sfMsngR7fl5Qn6gRyIoR8kzUfZm5jXGXgiREgsCADgBVnHEV5tN9fhR5Xy32TpHkUg15YxaiEjDNZSIyOx2XRRXOkLRb/5v/zxo0pH98kDVdhkaUBH6o60AoKPgykeKc5YuHDrJP/V3GZY3CWG/2s7olt5gt2E1BjpsChAQCDB4BZWoqcbk3WobP7y+qkJ4cDJ30L3EuFyDbe26DteA335ygVx5RWhXb3zhB8KZuQHcezIIRsfIXmDU3ipztCtGnTfu3fYHl4eYOhxjp1UBKMaoSt77y+urugl+iD7IiMmKpFRdmYKzG8Q5F0Louu7p06fjAUHcWZcvVHYqp6C5vE3lnEPCkWeq0HTbiIVUncDk0Gg6apGVkn18Mwr6yTlUHAzP2yyyo532PCMAOOWZfX1vk68mL/PNB4efFBp3L4b5aPtl72VrI2CzfCkXAQhEhCJqhGNCgJzyafyTmbMPdrFYAAA5Pjk5SSktmLsuLZfL+XyePaiOeLFYtG3rBl/IHdM/BH6B2cOA7ReltMPGyzKB6u7hzvqjQB9hx94pZj8HbjabRTWr61osqapzoamrtm1F5OTk5Kd+6qdijM+vnnddV9+/r6p5k9i5QEQ/8aM/9ld/7q9fXl1XVZU5sWs7QCOqurZtqrrt0mp5dXHxjGkGRO+/976qOnQ5pDl2gog+oMTWPAP0m5QuEPZit8vc2q7bd6+u8/n+qqq7KDAYnYOJQ+xcN5zTpSGlYvaLiKJYRMQ+XTxo0zSn89njx4+TxK7rxOyjjz7K0Z3vf/jBk2dPfRUyNznnJKbF6UkOjQ4BKRuomkSimTGzGIQQ8kUFoxIdV35jXrydvJ74Ng9oHvbZv0zjM2lhbOcolU7I6Sjz2uA+LIdU9ntYmDzsCfYj4nHSr1l/+etYb1SToMpIo4df1VyZ2Kcwba0o5QqYDsCCt0djlVi4m90m5RDI4/vDmmWdyaisWAhlNxI417axbVtZNDF1wbmXz+/frG8uJL7z5ElM3byZicRO0qc+9amTZu5G0XQ4GihQeBu5lDWzmlRN4w0NOfp8lGIGYGYiggiV5+VymR7cc3UwM/aOUxaUNhnASP35spDsKMtR01VVldu05SA/Hh+o+QbLUZX2oVvqgCylBKMlqDQuhA55Y0ocxblh21spHocw9MT8MY7o0RaZTK180/fV21L9G7Xd8syKCNve6ZSrFnlZR0adoL5PhQM2upTLIcHgJjl6qqrkE3ZccvhYM3/OTDkdEjM2tVsul8OGRRQRlhQ7UVVFCCGkLpLjlFIg2qyXb37nu4jYxfVms3n88NFXvvIV7z0CxBgd8e/CQNsbs5THOW6ps6dHp3WGmrRPD4fScyijiwJKH2QJKCLK6xkRaRZzZIqivqpcFYwwJRFT7/3Nzc3pfCZiMYqqzucNeXeiJ6vValbVNzerT7z62quvfsLMTHE2W3zitdcvLr+RL6ECgHyPrJEQUdQ2pY4cKUhq265LT549nc0rRIyChM6AzUAF1TonnofgTgBDE5HI7GezWUrJETnnRKJjTClh3WUDazz64pxj5/oTaRkUogKQE1RVVYXgEVFVwMTMHHNo6sub6xC89/69995r2zYleXZx8fbb75hyXdfZr25mIbj18uZkPhMRANu22+Vync24DN5ZPV+tbnISR0Rk50UE8kICBn9uj469jFRjGYc9ckepgEtsTuRqyU2HCn6PRO7YQP04sTdp+WgQ4sc0sauW/2+q/YIhJfPEIoL97PbmOE6ZittuYF+I3VEOdVD5VVbktw3+tveHNxTuhoEAOl01TcZcfqKqbdu2M3aO265bpxR8CM6fJjuh4MjWcSNmEIXZzOGDx48Y0JUTyGUCmpEsxoj/yTTHvxHRTLIas2HrPrt8s7uPiJB3YijfV+p978dzznVdVwqmkkydc4iSPxyze9+BtjsIdzcHtCzgiKjrurx/1u85keVOVTeIaEpQeHfNdhedTruA3Zgnv45/jvMqEEmTOwAO96gm9Fc2cnQ8++1PwTJaqSWKJxpl/DefK9CewHfXj+TSnw9htt662uXnO0REl/oAOcoOtIH8cxSo91Uy7drkvX/w4MHy+iaHzmqR7qAPF8ioV6t8iCkBwKd/4I0stNEgC1AeRnW40T4xm47g8dgeGxwTuIfNHv+paIoOJM4OSlM3Sc6SvcuF2yNLFQBijK++/gkf9KOLZ7DumL33Pllyzm03cnN5vT49cc6Rd+T1ankzm81OT89PTs4CU4ptzgZM5LabzV/+y3+5jSmEOmtrM6yquuu6lDpy3G1adAoJVOHZ88vvvPl9RGQXzJIBEPkczaYqxCASY1TnAImYsWnq89NFG0VVHYV8gnmzWrZti0ghBGayvMo0MkPvKnKu9p6ZQVStlyd9ZmOzUAVmbtutab4FOa+wU0oRAJbL5fX1zdXN9d/5lV/13oPzffJ5s5S64Jv1arlaLu/fP7+5uZnP55/9PV/5zne+4z3HKPnqpLqe5b1wZhdjQkSRMd39zlbeccc+O5f0M0rO8WW51i+/LZuaUNRRMjv86ahKhluod3woA573Ce84GetgBxqWd4aBmWlMqFD7YJ0QoiI4R6MpMxnennQqrqKCCUseM0TKKZTuSdxvHw8+NNslzy8VzQRKuzcHqXMOJzJWzuV6u3YULjcbXN/cq84acSfKD1wTUvvmm2/ev//Qd3q1uvriV7+wOJ2n2O3dgGFFutExJ/AomsctDYBSjU0cdFO3ZF+pPwECqhZjZCZAJKLlalVVnpljbK0/VkXlhzCQqQ6ZWcrBHIXFBF4Ax0F2KH/zgSgtAvdHX7qZgaKYGefLy/Qo4QIAFsEXEwwd1cdGaZ+ddEIZk/qwTytj5TJbdVmNbPohFJx/qIAtX6d+4Iwq6QQActSqITLQqIBhWEAfXs6xG1iWtuP2FQ20p/0Ik0FKyswnJ/P+grwB3dJFMSV0OZeTmkWV7DD07F59+ZUQwmp1U+6ebDYbM8sJaSegG58PeXsPQZajUUb9N61WgvRu3Tx+qEPsyWF3ZX0s9LSZGQiCG+UmAKSUyLuzh4uO7Hp9sd5uwuwEkFJK3oemaXISjHyI1jknqkzkkNq2rUJQVQL03pvicrm6WW1eeumlq8ubfBwsxsjMqmAgQAYGHz199uFHz957++mzp8/Pzl9yzhkpYb7xDfKoFDoUZGcGAqBMwIzMPCO/WW7ynjoR1fUsm4A80B4Nxy6qqkIO5CgzGgM6T6paVYGZc+Ck995MVYwdIlqMMXukr6+vN217vbz51b/7a+PgQ6iyk7mu/JMnH33h85/92te+9jd+7q8Fx1/96lfPTs+/+c2/X1XNYt60XVosFuv1OoSgqobALmT9EWNEMgNA2+0ZYeE5LBn8UCVP9GspHkclfUg/I1fuqasDorrjzUTEvWA5ZIq9dmh3jSANmeSMUEQYoPaB8k2XBg4Jh6MsAJDPOpeTLXnwkP7v+HNXuZBUhRG7wwzuryUmO0iZDe/sd1wX9UddBsNABsTuQswQSQ03KaEKromIZjWeVYsFufb5NXaGkjO/us9/9nOni5P24sq1bVt2POJ79AjtqzrUHMx0i5MkLzgGjsrb+5iVygB6i5K8OOc9M67X63vnp6Gu2naThbgiqO5mfQiO0sNzWKd8eYtEy00PYfdDLFy+qS2pxE0anOfAzFQoJwBQzddj3b5y0h23lBVKBisNC6Pdm6HscmvfXUoitj79BRYvrfh3WsYZTQCV94BJpygeDbJy5IiA2reQUZ+rjSGXh3CYGgGD3mfvxsFUtZdk2SzbbDb5+tg8KUYih6q9uxvVUtvlXfvl1TUzr7art956K6X06NGj1197DQDcsV3tCZ2Uem4y4BIpk5x2vSlyi949pMDJ9K2E52G1/XdqCaAPDylft13nqvDyK6/4cP/k5CTftB2TSpL1cuU/4Z1zGpOIOF+ZmSVB7xFgu91eX17MZrP53EJddQkANMYoKS3Ozohovd6261Uzh+ChCgGNHdfLm+3TJ5f37993viI28gERLWdENEG0qBYGH6NIJJo7ZlVhxhhbAFqtblIXZ7NZ3pv3zjNz5koRAcAQakNSGGW0AgAaOOcco3cNogXnCbVtEzMyo5nmfCNdih988ME3fuM3va/yKtaH2szYuSw2QOF/8i/9jx8/vH9z8XSxWLCv6rr+gz/x43/9r//XdV2fnsy3263jQWugiykCQDLtUy8CACnuDqeo2XHiKfm3fJ7EG5d6Y3weWXgkzvFhJJ5Dehtp5zaSG8uLKONDhdcPuKyDAJa3GJWJNcmicjU50w0O03HDCjgT/KT9w39vW3GWkIEDD9ZEBRyVeLfOFBFv0SYAvalRNl5Wm8jzfn3oXQuwza6+JBG7pgn32L96MlfE5fpmcTJ/7ZWXtusVOHDjHirulwJS/f9uHeLBIMqNwAyNok0b4xqEKXWxS3FGDXmHWizjdgtR2JubGeIegQ6YOAKdPSjftghGzWeQENGwDwHg4abM7OgeW+jbNwKQwbyYei2Qj6xpEJFsF22Ixa9U1CmGemsEVtndAIfpWnY3OzM7ym+IiHh4RA8HA2Wc2lGCK+YLcIsX7u5/dw/9P0OCfrAhQ68y82q1atv2/Pwciqg0iclUVIzJ5WsDRqrLKY4/+clPEtGsrkfX36G74jb5crdsQtyTAtO5HJSjOji/V92LFYCpEaBjRt/xV+oztu6NX8BEBJjq+ayqKmZGUuJKGNerzfLmalY3iEYEXbth70xwvV5KSsHxdrtdrVZPnz51oZrNT4IPZhaCG5Jm8PPVattC5VNdhSp0AO69d58sV9398+CcQ69VTSpgygAQo4bAHmdVVTkmU+y6tNls1ut1XmEjMpGtVzeXl5fOufl8Pp8388pxzzIEgMw8n8/FIGlCBtEoEhHBh3CyWCAaYYVos3mz3dp6vXTe+yrE2BHCs2fP3nnn3d/+7d9erVbMPoQakVNKVe1BzTv46IMn//P/2b/yhc//wLtvv3P/3um98/sigux+4sd+dD6f/5X//L9IAghw/9GDfAZ6UTVt2/q6sbZldmoybkNl7TuifkJOJS0dStQR6RPJProYx/qlv/ooRR3TDVMyPuz6kBoPSfTW9m33vtR/igJgp7M5m5ooMiGapt3tDgpWbs+NczGzvXTQtw/mEIxj78NIdhV039s8UdhHG79jvnd/UjKydN2W2TEEUUZHRNF07sIbJ/c+M7/3reVzI3v11Ze92axulqtrN/EWjpNMKcGeq3kXB7/r3giGYyflYa9RAZc6UlWxSPc4LqxzlGZOf5E2icjZ4OSc2EP5vGBKKaVdR3ef3xp188F7G52KuUjvSchaqY/czsLdeiWBuDNsyUCOIhX37bKJjD7Kh8eoqt81L789+jwq4CNsZgQIBjvXdGlFjqx+KAgAcp7NPR4bU3znMmZUpuKUBRxbuE8gMOKrJwzAfLaciATME+WADiQLITx9+nS1Wo2kwsP9pvkWATMDsOAZh6tPzYSZz87OYLhZ0rMbDb7buHdv4rd4uj5W105+OorWiSSdsO6oa8sW+jrYe3p7OOMOZcRIzOvtarPxzrl5M2tllZI6HzTQW29+jwnAjBEVCVREzDunEtUSaLp49vTDp0/e+t7bP/wP/v4vfPHLzlF27zNj3KZPfepTTHR9fS2pNYGrZ9fvvvPk1VfeqKs5gkOvzmFUcYGZKgCrG5+kcey9d0SwXK/ef//DZ88uiIjJf+pTn26a+YMH905O5tfX1ym1MbIBGgiZkDlkCr72vlKgNm6jtG1nqhqcn81m2tQMJgLk3GzexNh2XccOnfB6vW7b9p133vnt3/6dm5sb771zIR9qYIdmFrttUy/A4GTeMGEV+OxkETz5WSNqi9Ozn/7D//APfenLzy6ev//eBz/7sz97fn5fVW9urmbzk67det9fyjJw2s5veYft1TNIQXhWWKsTqhiZcWy2FCOHlScqsGyv5O7x7dGDAAC7Xc47pGhZBMyUoGf/3WnaqBIMah8oqYoSEZCpabkBXM500uyhB2giWo9ykxX7viXvlI3s9YJ7iBvbVwB3zKzJ9Y9CuxQIJX49h9V20zSVqq6ub9b3qnkTJLYvh8Urrnlf4Tq2n/nMp5uqXq9uANFNsD5IZBiuMdhpUBjShI57sTpkmMsybrILiIiikdDlmsSQLdy6rmZ1YyDOOTJYLpf3z88dh5TWHLylnmrNdhd9WHGursTohExLiVaA8gi+i8QRNHrUmf3gKKbxZq5ds7TzGQLskUzZaTmGQ4k8CRUeXbWjfTMoEhsnOBn33W+OUS2WGncc2EQlH7ZGB77xfaj2upYAy5ke8thkwOUhrnEMipAFnJkR9ZlE224TKpdD7UbgjL2UXhYzG3MFm+V7HSylxEjjGmKiaMehjtfKjuMp8XIoOvcZb4zPl8kn48Bu0+hH3yOiakLniy72UCYaY8S6rtfbdkS3pM4754nvnZ1vW5nXjaoHCwyRqDk9Pe267vn1M2S3Xq83q3Xbtpv18urqan1zfX5+fv/+/foLn3v+/GmMLQCIxHGXh5k/+donVqubpx+9f3X1/m/8xt8/O324mJ9V4XTdbnNiEF85FYfgzs7OiRD4gUhkRwA6axZMftuuzayuZjF2MfrFYoG4aGZhu11fX1/HiAAz733btrPF/ORkYVYZ4bbzzy9bREQytcREJ01dVZWIpZQePLi3Xi9Xq9Xp2eLi4mKz2Xz44Qe/+Zu/uVyuYNB53jMREvkQHGpczBoC+Obf/8ZPfu1HGe2N119DRERG9s4HF6qH98/F0Azb7eav/rW/+YnXXt50sYsCJp4qEWGHXdd5cl3sqhA0e+xu2SraCfdCahUCdupKseJehJFNxugz2Jdvt8kHMyNk00EUJ4HdftBhSg8AOH5sAY5Jkvync0HNpN/5zfdXqwKGUK2fXZ+9vKjZr0Tz4skzxxi99zkhDOxLhrJxHHVNsXiDQgEdjrwHxbHxIyIC2v6945gXgXbQQgHw3ee36ePhIV/wXL7J44wx1k0wkXUnN0IX1VIcnbBbAH/5ldff315bCPcfnHexRQREdCVcEBH6wAKbSK4REKMKMd2L1DWzMbVN74VGRcQkXQ9Ngaqq5vO5cywigJrHLKKr1Wo+n/e5eQugmOmBbZTLXj7nstiRVemeIYmFthvRk+9dKUDcX9ozvimF74BCHhPzHv476XFs4ag+m4x/eJyeDz4sR1vY9YIKiIdXY96m2l+8i72+dNraoVC4u5EcV1nAZ3dQ0jm3XC7zUeAx8A0RD6dQGEZ7+MIxdhpp7GsyzsN2Jjw/qXCE2wtJesi9Y5tla7f1TkTYb1eO7gQdj0LVdY3AQ4ACiuRUcagiRHR6ehpaQRKRlqmZzajr2vfefqdZzC8uLp4/v3z6/Nnl84vLi2fvvfde6hIAfOlLn/uBH/iBR3UlSiEEk+ScS6mrQ1BL333z27JdLxazs7P617/+3baNr776CbRaDOtqASTkmNkxNbWvnXOqaZsqR957JlQD8T4458Ds7PQUzFJsTetmVp+ePFJL772H3hQkieRbEV3la8TGiKsmXN1cACgjee9DcEREDIgUQp1ThWy32/fff3+5vlkul9/85jevrq6bptlutwAAJsxMSKrdO29/yAgXTy9/5B/80h/+qX9YU0dEs6ZCRGIfQs2+QiZTEIOmaf7YH/npn/trf/PJRx+K2b3ze/PZ6Xa7VRATZIQY27qquq7znu3YquuQwm1/x+E2qru7jILxUM4c1ilvUyh7v63cNuyx2ZGGy9tnaWA2wJw/BjxCXG00pURKQIwunxrtL7Ya+FFEypgSAFDoM4/g0PUdorJkqANpvwcNLNxyfSN6vPLkPNLHCsnbhsTOI4BFQXbqaK3Rp67qukWoH81PHs/m1YP798/PkUklSkw5tdsuEWh51ggOEDNC0Mzy1mkJqVKXq6X+kABSpoaqqZum8cxgoGbOsZkRg5qtVuvFYpHPJ4hGLIzEEo7lMLJuxt3Cbrpcux2C/QlFM1Ppta+IEPVOCMSdhD2uPHbXYjLuZ70oP5x+REc8Ckepp5g4f6wOBoDD+KCPLXcqxV7xK5YHr/Y/p35lfdjgHdLhaBnvA9wHnSG6HOyajwKPzhUcor1Gis1f7bTyLnOr2TCBMRSc8m0aBb6I9nz4owYtt1om7uuRILXonfaSrxWwKkrZ/iSZ/gjPThIwmaEBUH+ZrjrmfNW8iKSo+U6hfCFYt25FMiVD08wNOqPk0RPWhN5uLv/e13/j7Ozk4uJqudqsVivTVFXVZz/9KedZkzx++Ojy6iI+1+ub1euvv+49b9uN956Zl8vlW2+9dfXko7px3ss777x1//794Jsk3rkayRkZe+e4cjzzrnbMItHNGjNFNJNWNNXO1XXtuHcPtG17eXkZY/Pw0f3FYvHo0QNrN8wwOHiZyFXVDB1vWk3SxRhFUuXDvXv3PDETqCp75wNrSh9++GHdhKfPnn3wwQfL1XVd16pahyoL/cCcUry6uPizf/qP31xdr5eX//Q/9ace3DtfLpfzWR27WVZLVeXrumbv8/0T683285/99L/9b/6rv/Qrf+fb3/72r/zqbxKCITx8cJ+IrldLxyQSmfs1EBxwwYSjD82v0lyDA0lVyLQpnx5V3mUvh88lBcKxchurjsOeSKpeQSBkgcPYY87M2EHjg3aRwBQBqU/mrweL2sO+yu5KljkUQWUjeSA7iNleCzBYHoe63PZ9CbfJQ9s/2jSB20RN9L14RhVNAuxak+u4rbvKgnRd11T+wXx++uhhFdw6dY7IkXMT7VtelXWIp17zag5c2qnq0c0y1h+D/QDMOa6qajab5Y3eESiWJF8csl6vRdS5PoB2zP1tfcz3bV674xbQbap3NAazPM0nB7NpVkjbKR0fZyoAyqMyKnO/3VYODYhigjh5CXvYpcMhHW1zOL9kwwDHWrcq5tt4ctDlH2M1D2OYAv+oWPnYMsJ5+ETJwOHOvTyiIz+Ml2gBgJXJT2QHsN5JbgAAbdv2piHujWqXaBB7M7mUOGUUTOmpzupWFMZPbL+UICpfjo3nZAX7kAQAqBwXCrjnTSZKKS0WixgjAr/2+idvVmvovYvoXaXWqdh8ccpBFBKgEnqmaj5vRLddl4Kv792bnZycSdqqauVYJeVj9++8846KrbZt3jRtQuWZVbVt22fPnsm2ChWmuHKe7p0/XK9kMTtnXxsCEpEPCAHMAwYkZsy3HQiCiDClrUGqgmuq0MxqIrq6umjbNqU+dO7hw8ftzbMubkQUEVNKW+2ck6aedWmTAZtSQrST2ZwQU+q8Z++9C66uw3J1/fx5ev/D90Skrmb5pJD3AURPT+br9frJk+f/4r/4Z/+xP/rHzKTdbpoqrDfLR/fvxa67d+9ef9sEWkqdmRgLM8+aoKo//A/83q9+5Ycuri5Xy/U3vvGNn/0v/9pv/Ma3BODRw/OYRERcFTablsjJXsDpNNinlNGlPDnUwUc5ZZ8jdm8+lo/gFsH1saWsf/TbnoyN+tyQZgSoqAKGABU56sQ5FyEB9S7SpP2uZXaR7k4l7c9UcW+mh0w0+WnCUOOfh+pgfGNmeMupVDiWlOOOMnJ9+QYAokoFgGqiukld1Hi/WSAREIQ6vPbKq/bKA1KIkgg55Cs/RxlRBlKNToxJl8PVBTsH9bgHvKtJRoNeB9WmaZqmweIsr1qCIkRNRUXEe++QE+okjvo2BOzPfIq5QzYo0WCKSUxMB33MZeWJaXMHGsyGW8RxlyligpURVWXLL9h+MbU9Pr/jkxcsR6lwn9U5X3gyqXAI/Mm8Dlv+2GlO6iBml1avaDebzRjqPLHzylYPsYaIPLzM9ExEsL+cLRKY6ISQcIjtKkWGFft5+ch3WWEiFHD/OMquwhTUOxh6yifTyIZACoD+VuzcmgosTs+ykaqqzJ7Za9ttY6pnJ9XMRV13XavCAK6ZVcg1otW1z9kZTavKc9u2jjGl2Lat9z4m4VBVVROjiKiIIHBdze7fe3ja+CRrH6yBJlTz4BugxofGmIAQyCEEU88UmD0AAMYYW4kbICTvGN18Xi1mc++5aZp53VxeXq62q6dPnhO6R48fiHPbVnOqLBHrtmukxoUagAiQcKeuLOe98tx2m5Oz06Zpfv3Xv/7GJz+VIQAA3vuTxWK1unnppUff/Pu/rQZ//l/5l374q182jefn513lLy4uKu+32+1iPo8xOseUL1NzOSMvMBESRBGT9mQ+a6oH+NLjz/3AG1/7iR+/uLzquvSv/mv/GwAgom3b5tNxMGGHAaElEQL0ocmjKii1wqEOvo3Ny2bv0o6Fbsj/Hiq8ox/CPhPdVmfUU2b56hVTBDH1BmhgKpTz5BCpKg5ZVnDwBiOiEUo+Slh0SkSGBrIzec0M9iOZbxs/lgs1AMgrQJjuAU+V8YFA/t2WESMj+3eiQgiEyRRFCFwraSsRPSrYw8cP9MG9SzMPBGqQ89eUevQA1jC+6esIlNq3NO3HdbCZZf+M9362aJwbjmKq9gYTIgMqEYAykQBst9smVMw8JtfOsBmobW8ksEcxVDwf0P3BRBBYkiRNRcoIGg7+TrecS3d02WyvIAq/NwLZgbt4opwmKM+goyLD0dEBD4VKHfwi5SgDj11/bP2S+v//WHDMlblfpgbvYNXVPnhiBkTmMRJ7oPgpCx0+c4G+UaFaIaGgQBMiEvWp8MeArxKJtr98McjngnqHynSm2aU8JHejAr9H28/VomhWwFBKWyLnqOtiCHW+/iTGGIJT1RRVEohiF9X5ysAjkrFDCGgEIN4xIlCfYtkxRFVtmobQ2nZbVRURx5Solaurqy4KEQVfA0CMURWa+XnbkSk555Jy05wb1uwrDKRmSJ6wZqpDqJm9mTG36/VyvVIC57gKDk/ms2ZWBefNpGlmROxvfCdpu+2ePrmseQzPYWaOra5WKyBCpzkTuPMUKica0aByHkBms5mk7vHjx3/8f/AnfuEXfmE+P+m6zrlexL/+6mu/9mu/8Xt+z+f/h3/mzz54cG+7WTa1r4N3bKAnMcamqZGormvvXWZA3hlLhI6a2puZSAemIbiU5JOvvvTaKy+xr/7cP//P/ev/xr/5iU+98eTZJRGJAlqEfQV2lCBHuWH7q+EJC5S6c8IXE516h4KcjKEUMoelFD6TRo42ng2jPulAnsjg+J3NGibqj0bnDByG46UyqpozGEYV6+966kXoXpkYH1b+8kIL+rLaIax0uF1mgpoXafm27so/CS0BgEM28wrMuG63l+tr8ucJBWeniUG6tkIyBCXcKeBxrKNimMxBBfJdrWW1En87OwuRiPJdpIumSSmZKDomopSSqXniJAkRzSCf57m6uql98N4XopCK2IEjTuZ9ENzlyRm/MrOUJN8YNZD73tp3v00t/bdW+EiPUj8CjWFZh6PF2ze2DxUwTqiwLx8flpXbK8GFByvyu6F0UO7q1I7tkUwoe6zZv3wBpT42i4g5XxIR0X5YnEgqYYiI+a7M8u4mAtB9Bdyjr1B4uP9sZtmkG13ck5olX9iQzgURx8srx8n2kdv7i48JZm9DRL5ApmxQVfPWJiHlvrouJxMFMyR0wbvZLERl81VlMxVm8AAJsSVQSdbMAqI5i13cNlUdY5vv+4sxsguG/OZ336pns4cPHnddUoXZbHF+fk7Y3L83V1iJxM0GiWryC/YOPBoA+UDcMAXvmn7XKcX5fO4cEapncwyzOhCaiHjiUAXvKgDYbDYiutm0rk751LuZVVVF4NsONpsthx5L+f4VMzM1IGZmVTGzx48f/8k/+Sf/w7/4H77y6idyhXtnpx98+N5bb7715/75P/3P/FN/crNZffD+eyCxqfy9s5PNZlP7IBIzagiRmfpVRy+RgQjVJDgPgOg5hNBt28XJSdd1qRNXVX/kH/mpX/21v/dXfvbnzh48kISikYpjdYeIPvrnRJVOvi1fTjh3FBGHIuuQx0cZPmlnUm7TaqUpUPa753MqvkspnZ09qJwHXQH3/MjUHxGcaIrd4G8ZWKGAP2bVCwW/YwEKVYUD8PZgx72Rl/L5xb3Qh1I0z9EhrS2xoxABFS3Jqt3UTTg1BRdwVrWgZubVhDESuHHTF4aAdSuChMfJ5Jjn0i9txdYvIuZwc0TMRzKy25mIYux6t7MomAV2NgTp9a5mAyLabDYpaVU5LW6NLYmgnGQJ04kTckJMmWJK26LrOqCdtDW7TbvsqdKxhXFgRDSxzsqPD78t9SscLI4PPzywBl7QRpu63w8bv+3XcbQlig+HMal5u/nyuyuH9gdkX992O5721iHLmKqOmQszq9udAzhQnHt1yvma9UuUTLRHsWajWw939zTcdpzjgFz3UFBSVP7JMVm+H2w4+AuDe6nPtq0y5KsBRHAuSDIRA/bNLLAycBIQUK/JHDk1REiEsFgsVqsVk54192K39d7PmyqlVFWV85UC1YIu5HSP/VGue/ceVO4kySq4mXpZzE9FquVagm/AKXnyYWboVdgoO3Bxu4lV7WusVTqE5L2r6xoNVGLlfAbs+dn9qlpd3dxkQweGS2dDCHVY0Eq2sdtsNiMMey+xAQKqRu8r5zjG9tVXX/03/rf/u//wP/qL2db/Wz//X/1DP/m1f/1f+1/96B/4/Wcnzc/83/9vb731Fkv85Cc/+ejRo0ePHl1ePq/rEGPsdUnGT5LsvcjIpgHLdfDOOU3RMbqmmc1ove3Oz8/+/J//849eeu3f+Xf/vS988YsffPDBQJl6iGXYF/0lvZUqzQ5s+qMavfx18Ngd8/AVsQvwAqLjNla93TTMnrE+OwJZf7Kgi312MxHNAlZEhBmGXZ7iiqRdnuO9ZgekTETBoQFxtwQbBr8z9UdhW6q2yaR20viFl8ITUTniJaGhZxOBKKrQASjC1dXVvLqPjAnFI0OrQNQROpPhaAfu1Fs+CNF2HRHxsBIlhiRS3siAiESYI7vZ7SKfm2Y2n88JMKWEhDq468wgmYLsLu4FNSInIt5Vl9fLerZAU1AhMAUFtbxrZ7QXXFoSaJSWmcflEWUjVoSdi0mZ2Po4DjSA9Wajlq+N3MUCDDIO9j2u/XWbfUa8Xd+KiGB9CGyPNhwucbIjtF6OdqKY9xC/I50jmB5HNXEGTD4cl+zjSjDfcA491Y5u0mIXZ5+GsDB7tThnxbDjBBp9OAMIYJ+ID/XNdKDjOAx67wUAqOXDQmKm0K/s2m7bdtvFyTylRAD9xhLuw7nXqLvHfvwA5dxg3/Ewni0egTD81F/5rHqYXltFabC2p/Y77q8YDrFTtHM8aBwRO0U0pLzm1/H05uCyYxSzpJoXwapgpMiU1iYiPjhLPio7RnJElSdgwEQQiQ2MT/yp4bXExKFCxBRjZx0BgHGSTkQMVbT2XKmCsfNNUzUu2JmZRTFfLSAmB9uTs1NiL5Kv/nYcGADUoqk1FZsKqqgqEde+IfRmkZmNLFTeIYgIkENq1us1QiRgM1Yx75k5VTO/7eDiaq0KCBUqBPTOjInq4H01BwDPDh3MG/rj/8RPf+1HfiiEIGIfffQ/evXll1577TVJHar+o3/0j/zar/3a3/75X/rlX/36zab7o//oH/nEG6/E2CEqxOQUZy5AEmMfKa1Th96ZI2L2VcPMaEDOzbkGteB93LZnVbXeLn/gpQf/0p/7M3/rb/zsb3/zm5//whuXF2ndboN3XYpkYEm8c5SMGWOMhqaMHLhLHQcvItXWDIG9ayVFNQwOBjdkdk9npjCzzG/EBakP5iHcntytPNde5oef1B+59VCu5nIYA9TzlDfr1CdA4OhwE1hAZsS4hZeAXdcZQYpdxUFUraKmqlMXe21NJGDMrIOuHR1oo1vYBl0Mo1trSHZ7VOmWpozkBILj6nHwNpdb4LTnWOrbINrBwcpL1VEVdremYoGHQwMoj8EZztSwVQW89kpEta/Wqy7N2b90/6rhpQeOMvfEiKEVd2iOAfRyNQck6xDukXee8rKViHIMQp9RnXMuuui9Pz09ndWNqiZJtJ+8wsyw1FsAyATWC8G8W5ZjWqzIvGGmarsgrwlZ5GV3eboLEZk5J/RPpmjgve/atGm3iIyDUiktrPLPscCoY/oyvT9rRNILulXhmMC9o/4tivkuA22fRves78lPhyOZ9GiD8wCL4JH/NuU25XR3UdVxyZLTM8I+/CfD3h//1A8xeX8bV98xztu6e5Fy+O0I3h1N4u7UbzESw7zgHd6MFVLqRAQJnHPENYmjJIb5Pk1wzrFj75iZwFiVAFSCAACIdsCOGMDUkhqpahXCbDZjRnNc1/XlxXXui4iYMptjU0MIDgCzkUqEkN2wotBPRAHROefyRiAjICMg9Ulb8qiaEEJd1+160253N4oyMzsP5MKmFsmHNwJQHzSeUmrmM+dcCN7XfbLYT3ziE03TAMDrr79uktq2JQJGeuWVVz796U//8O/5fb/0S7/4y3/3l//Sf/L/+Yf+0B/83Oc+651j5wNy3LZ105hZt10HrpQxocyRAhiqEDlPlKfGDtnXm83m/ORs1XYPX3v4F/79f/eP/4k/9dvffeul+y89enC22m4MwAAEjQKmNiqRenVVUEliYmYSk5mBY2TqRIBdHVhMxQBUmcjASodqThG072f9eJesFWViW8MB49xNq2WFwmy1jEYYbv/jYe3iiM0MCBlZVPXgEuKRbPGW3u9QtLeNdjw3u+vilg3vw5d7fPe7LBPg2OAWlXwsy0wBVNVl9zCoI0xoqprViGTRSsN1hGZmYIUVAKOjIMWoqtkllZ1yo75k5nxVXFbMs1zqGhFUxWwMVJoOFwavGhENTAv5VEDlhmkoE4GIHO7DT1YPGfwFKBkRzKKA5ajRbdt1KdroOTzmGJw0u5OMWSoecy1iv/b9eG16WF6kcmkTFAO7q/J+y7m+4oFyzbkdJoM5JP2SH363FHpU+cHvktbNTESyF1qPff6x2vc2NQyFs640uY6O/7axHbWQ7tDch+OfaN/hWwWEEkGTMo7XDABVrUM0ZnSejTwQR4lVVTEFR55YgRQgB1sgszNDh2RkYIbBEaGkzp8szIy9qyuuqjqJtW13/3zRJQBA772IqQEZUyBGMkMmU8k+WzMwQFNTRMsmBCM7T86RIwZCxwigRIhggJrzeIcQGGabzTMcxFYOggvB523dHOadZYUiKlA7hB9777PrOl+HRUSO0NAxs3ckklLqUkpf+vIPvvzKo9c/+cov/MLf+rmf/dlv/9ZvffGLX3ztlVebe/e5mWWBdlKHCNJJ8iAhaiAEJnYuBK+QKcSiSn3StF1nZCnFL3/5i3/pL/3HP/MzP/Mz/6d//+IjiAqvvPp4tVl77wyFKlIDDk3bdWJa+5D1FqhGBgQEdiKSr9F0zPnixd7KHG1+UkMwJcAjCRRvo7dR+95Be6Uyvk0DHb7BYSUNwDYoURNDl+UkmFm73aKqcwyM5Khp+sgA7V3XvQs63waGxYChGHNpN4xvSqf6HXr6timPLyepN+9q5M7zpbdhQQAYnJkoWDIjMBFZtVK/dF8dgRoJIKIwqijbkHg532uB45lFgLw3bP01y2lMzQhGRKSQRhBkX+5isZg3sxB8tltxl89v3we7PwEdcoPlP5fLZbjXkGOQVM6KAdMxFVBS20BJmHKQNiEBS5L1er3tWudcviDlUJWM6qeUwmOzvbm3X7mvf1uiihdYJ31shcPKL/jJ3fxZlL2Agxck6zuGN3meDON31ZrtuR1w03apuDF0orFKaQLFlHAgNhxcxjt+Hv+H2SMDk/Fn7O/rRSyavFXBl7O448+jb0oUqxmTQp+JFqHnUAPrvWk4jAlQzFRARBJxMuvDXrz3hB6AMK+eGRmZHAciE3HOEYHEmhgcYZJ2ubxeb5an/lRSt5FYN3PvsGvbyi/A0DsfyVJUYiQ0NEEgUckpbchyP4ZgqCagDOCcD5z30Y2ySZzv+yUiZudc1qCp5TIiRETU0IUAAFnU5LzfUYUYVPOpIUZEUGvmDQ6R7c65nPpfVdvtmgjzshhRHj++/7Wf+LFXXn70y7/wi3/3V3/l1//Orz5+/Pinfuqnf+j3fJW8FxH2rtskIiBgrLw5R86hD+KdmolGRgpca0xEGtTYaHtx/Xu/8OV/43/xv/4X/sQ/941vfOM/+U//v3/jb/78J157ZRO7dRfJcQIwIGIvUQmAiTEpM1kF+egX5cjhptGYum2bN7mTGYEqgiIo7kIcJ/R5hxq+g+/KyhOuOSxlcBkWxSzpEI0xaE5IMd6bN7OqlvWyD7hSAyDnwuASn94qcbSUCvLwp8N52cGe8ajLj04NC4/mx8i6j8vucGjW538FzAGimhIYghF2KsnspfOF1Z4InKo5TAiQFA1c3ugygEnyZzPbrNfZ7lABZk9EKaa8KmXmEDiltN1uq6q6d3Z+cnKSUsx3X+dwzTILyajPxtWkDbsUjjjXDCHc3NzcO6+Zmcip9tEZhLur5svW9vWuDdtlBgCm/VUEy+Xy6mZdVR6Ru5Scc6bpbmiWxWyaam5ED9EgAF/MV3lY7UVU1KFiuKPyYeOTRg76Gons+GJrj2P1SINld6UKnwz1Be2GQ5qGIg6LmaGwoyf97j782J5usRjKoe4PGDMV9L183FSmqvdOW3tnIhwYCqMRUFqxAJYvERnIH0SiWlKVJF1AREQmxryF3MsipczfiETsAI3JsWNCR+wYY2w3y9X77767Xi/jw80bb7zhqiCSCCD43r2MiIGZLCGyAuVAcTRM+dJfBEJyDIqQ1NQUGdkhM+brPBlzKgJiRnaIiPkaQUSXpwDgGIfU6wqIKKmfYTYmvPfMfRxo27aLk5mZaRLvmcAq70QsJwiLsfXeE2EIzsy62DpzIfhPfuLV6ms//tqrL//Wb/3OW9///n/2V/7zbYo/+KUvnp6eAgCaVgigRrU3x+CCOU4EZgDks9L3oWJ0rqbUJVDU5aZ24Qtf/sKXfvj3RI3/8X/2s89/+7sAAAytAHtIER4+fFjXMxM4nddx2zoXNryKMYpI1dSaZLPZgCgTMaAhjPKyJ4yBZEYqLbXvhGhzoWOZ5HH/gpwDCj9SbrMv8wCSict3RJMCcOq6ew9fbkK1sZtdTYOsCMbxEFF/p431uvuw3xcXcWX98Sschoujs3v484Vb1hc/fVk2i30ctalaPr6at1PblKwJdDYXT6jqECOAECioF3A9dAih2DIUkZwoChEJHYAgIhMpYj5KFGNUQ1ALzs+b2WKxAOhPEo8HvxD7M7o7YTosdjMW8x4zEIL2B7TX63VK2jQVUZrYdIwoB9LVDo5LISJYfyT3erm6Wa2z60IBEVFEx9jl0qyD2wVxEUuX/Xj5p1vXlxPhfludo7g8fDkxMqC3k17kMNK02TtZrk+qONoxL0KsExU70b7ln7d9dbTBnjULsm7bdgj7PKI1x696KO03eDjrEphjtVvG2TvJJhO5zVY4+nKUPrdNfGKyGAgAgHE+TqmqSIRoaKPw7f14+XNVHS+vDJWD5MWQEIgIwZnlc9eWcSJqgbBfQQJ6XzHBzc3V8+fP5/M5ETx9+rRt28zjr732+v1HDyU5M+M+INwhIpEDI0SMTjiBmTH3R3pEwPoIMvAMjokIHCMzsiMy9SEf5hY0QBu3p8wsEaP33hEDucp5GlxozDyb1U0dUC2l2LbbGLuT07lzBKDOVYNHmjNrMDPVtWNyjtq2VVQzCVUI7p5HntXzs5Pzxy+9/Pd/65t/5+/96lbil7/0xUenp/eqGmMkFEnCiN5bPnyeNIs+I0QwVe1A8aSpNzdr6NYQXIddQPkDX/vh//df/AvPn15cX19fXF2/9+4HT54/f/rh02/8xm9dPH3KQJfACqqA1Ul1fn6eUuq6aGbEXM1mses0X/lqeeFpgOZyhuRej+zswrt586jRX1LsC+rgw14ynogQCA1BEQiAAQFUElSOLYmJAOe03gzZDZOjJo/lnzpuSYyMM478YF5lfSnS+PdC+5YZHe3uYFQvKlrtwPTPA2AANCVCQFMzUWtTnJ/fh8WsAwVVBFAyGVDpYPBu5dSSvRdINWfSoPG6JLPMDL0mVkWDummapqmqyswy32ZmUFUDIUaAne++9NdOhSYCDHHq2+22rutCN+7uPmLAMX3f7tseBNQfKDJS1ZR0025vbm5UwDvqkiCnqqpEpL+Adl9Wjsg79qZM8XjrFHZtGtmxJI6HYvoFGeBQ+u/T2ADeY8cS7mjkcL6HemJk1JENjn5etjBh79K4+VilXo5hbCQr4PHukY9X53d2VMLBBsVb9ljU3DsCvntPmm8NOSxHZVYZZXrbJ5NRDb+YmmGOdCpsVlWgfsAAQxBKtpjrakYuRAEgIGLCAEBmEbkjNgRnyjWDc44BmdE77rotIodmBtqpViLN1dVV27avv/7G48ePRfJWKwzWT0oiAMQOvXecLPXxksiMqpgQmDiRMZJnch6CQ++ZGFElJwf2TACEZFk6j3lnnXOzukIKYpmS+/fM2DRNCM5UnOPtetPF7Xq9NpPZrAYAHxxYBqAxO2Zuu62KInIIQRG896lr2WDx8FFVNWqwODu79+jhr//mN7735ncDWPXGp0/Oz3C15ZSa1AIzzGpoZuA5IAAh5LRfIVRpC2bw/KqpZ9DeAHT1jCD5z33y8edefwxJbLlG53Xbpgg3N6v/9C//lQ8+fLoRebpcRoTltn3zu29/8NFHbbtlx2Imqst2Q0SOyCx7E4AHi4pskNTDnSojrZbUUuyoghXxqhNeLnnwRXQw7LPk8I4Q8+2YWeQaEZHBLFSklnMZJUNvrEmsj+LGcufVjl0OsRv/gaTNSelxn0/v5vHDCneYv8Wvv4uFzfj5pOWALJiP/CQTVdIO7bWXH3FTiQnlFaMqOgJEBXN588YMRgU8psTCIb+P2zvIJQAQQlgsFjnBpIjE2I15ngEABycZWaF393E5UkDv5QZrY0dM2+12jJ0bz2KOn1C/QNghLG9OA4zKz0Ssjd3V5TURkSdRBQMREzHngsm21A0jeib3Iheq6wjcj1pzxc9ke8Hsu0+OLgrHCodLqLL+LfbveGEOwD5B3MF+h50W9kT5bfnVXZqv1L5HLYyP1b6wTxjlm3zl5aEBUc50WBvimL79KDzhgGcOFXj2OUPhKTmoooB4R5DUpMdDWEGB3+knQ3CfWWHQKO7ufUQAADJgAAGIMcYoXdclZeccuoZEyTEAIHgzzAYbkiAykCeMZqJEgYMa3Nys1ttN8JUops1mPj9rmvnZ2b1PfepTTD7GGIfc6X3XpgjK7IiAuT84k/3JIqAmhLk2ODZHyI7YgSNzVQDQ4Jzz/dk/7312UeZ5M3Nd16bcdqISU9upRJO8YaRqAmYhcGw5VA5ARaJZ1XZb59k5BwjM/XZyHaq2bc3MN40omIFbnIAabDtf1y+/+oq7qKrTxexk8fZb37t58uS5c9WHTy6/99b66QU8edfM0PnQ1L4K7H2oK++5qioOHhGb+UwU0Dk1m80WN5V6YlCr2Klq8HXbdcDO++pB1fyJH//9T5Y3337ywRXK1tEG5Pqd3/cLv/iLz2+ufue7b7774fMHjx+YSYqCngAAGMkILV/ibACGKACAtuOOUgEj9hvFt1H75M1t8uEoKRZdFBJbzTwiEaCBKBo4JAUI5CCp5WWQqHOuuNUnB+RiWaZd3yK1DocxKfvSph/nqOBLru8ntZ9rtgDLNJPd3aU8wloihfKNvcygyNluqP3py4+4DtpusU9IoGxgiIbmRDQnl8kNjfFWlpO0cW/sF9u01ycni9PT0xz/LCKI/eEzAMBeFAJAYbgdYrHAQX5Q1WxrP31+Uc/mTdOYRiJGTNlMztPTnBNkmPagnkdRBV3XrTbbzXrrnJOeihiHqxe897C7FzOPYg9nh8QB+zQxGfYRlbn7mwAywU41SlluY5vDaG3bj7/Y/zwfz5nmbT46i5JoxvmOGm5fMUyJtfyk/HMyi0NbodSdR+dbVEA1s8HmzVGUm80mh8XCcBx5zFTVG1+a3ej5oiY4HNIEMpNBqtrOBC8+nwCthB6gmAECH1bOZXJA4hA+JUx2kCysn/78/ZhmtuvqyqkKM4qIATTB3yw37TZtt6mZ195XjEG7iJRvBKzMMEUwMPYOwYuAd8M1Pkxt23YpqiEQq+Bicdq27Xx+8vjlV12YiQi5CgUwxy6bAhl7YofeIZERYgh+HB4GpvmC87YTGgAwYx3yhXQphBA85wwq+ZPK+ZyKi4hAgcAIDT1FEecZUBkNUKvai4iphOBEpMoNJqHKqySJ1G035itEJEDnHBMCoQtsZqCC2UtqBExQM8RESWfzE64CMc4cdc+fd88u//63fu28U726CesrIgKjJM+3amjAjERUeSYiX1fsnBiAY/LBzMQRIrr++nPF4FzwSqyAzXz26mc+88l7p87Lr7775k1MEPjx/Uf/3T/204Lw9d/6rW+9+b3/9K/83PnD01A5SYqIsU2g6l2FO7MSEFFMzSz7F/OpvEwbw1l2D7tz7UfEVylGyiUNFOeDx2ojWZY5K0bJg4QxxpwVAA2cd13XOYZ5Vadtm/mHmJxzdRVms1nBQTZ6iHvtVdoNsrfwG9mkjwUr7ikYJ1Iy3fCmdwWMnpWjYgcKaVb+VOqmnfFQxKONSVcy5CfV+r6Seu87FQKtnH/v+bMv/6EfnZ2ftilmR2KURAQgZkiC6gBgu92GELLNCIPII6Ku65i8d06Hg//L5fL09GQ+n4cQMkTyrYImfQJu2xluuxlORNgEfJNqzNy27Xw+J+/IwDmXV95jI2yTADXKcWSEbivtcr3pr4CWBNb76kZ6UlXal5WD/aiHqDrUxLfJ9MOaMNLWx12XhLdYdoctj5ryjsYA4A41fHScR8uoAyZKYiTZkvgm2q7kH9hHNBT8PPlXRMYwuv1x0mazyWfecD+WxMxAhy2gMVq499XtBa2UD+Mg+z9HY3F/hV12dIj33RvMh5OPKNoJtMs6E1Yva8aYhiz//fH6PBJiqus6dtE5R0SiyTF47wlDTLpZt/MTns9PLq7bpmlUlZ0LHIicBAIMzAboVdjzNncaYzJTlw/jthtml5I1zfz09NS72hQQnJoQGaIRoRk4RwjknAuBRrz3QRio+V61RdUQkQ+MiCl1ZuYcMQcmzCYUIjqCIVgEYoyxjZmoz8/PYydExM537SqlrUNwZDmkwyQKQBOafCFSpoeUutUqet9VVTUCmSinsjBVTQgA4AwICciACYPj5Be1d55rU6srQP87X/+t0NkMXPPwtZxXC4EY0bMLIeQDwVA5qCtwDD6AY3AOmHqCyxWYwBEwggI4BzFCK3px/WA++8T5/fXFB1GBHRDQvbPTL3zuMw8e3PvMZz797/0//oPnT69f+8TLMYqRAvaBS8w+gXkyVUECRx4GN2RVVaWuYsY+W2sf0XaEDif6ZuTcCeVP6Lz8tSdaA0TUMW++GZgF5oqcRxF2CQXNpIuhqdk5tbTPDsWpyHIAQx06SD932/PIREflZ38Gp5jCHWJ2zHs40UaTr0rVcFtrKKoMhkBEmmRxdhJO5sqImrICFgBQc2AKIISu67qqqjJeEXE+ny+Xy3y01zlnpiLRORdji2rnJ4vF2UleqgIYY39bXjrwiw6rQTt0rOfRl2HupVBzzq3X67OzMyKyJDlX7WhxjIuEEii5qW3XbjYbEUEgRUCcBs2qJpFIfOSY9m2aadJXWaaNDPuC5e1/ow7O7ugJTZdKohznhP5uM+LGnw7otdyhzNuIY53+JRyw5SEopjCxPR1Wdj2pXJqQdyihiUY0szEtaBmERUQ5Kf9g1fYg6jNY9fo2p4EeFSpAv6jFHF6MiHmNS4RjzHHvnhncq7AP8zG4D4sCR/ECAkZHpdhoMRzirmwfCnTkrk1Ri+s+EbEPna0qM0sphRCSgHO0Wm43m6hqMYqZNU2jgN7n25TZsQdE6xN6O/QOQbK/Kp9lmM1mjoAZU+QY3WzWnJ+eO+dU861R7BgA8n4KkmRxD/kIQL6qKN9tAUDEQETeATMG74ihQ46xQ0DCfHu3EaEr9vLz1nXOY0VEdV0BtECoCsGhafQOZ3VVV5zaTpWapmJkonx/AuRs7Zm5ttttSimlKoTgnOtDxUGQggII5aQhDGzZ182MlXdBUl3Vb333ncsnz05do9GeWmoUa8gXB3BNXjlERmCq64rqAI5hVkOowHmoA+RU0maAAExA0F9UbwD1AtZrSqlSeqU+vcTLznDpsUvx5ubq/vni/fff/eEf+qGX/uV/+W/8V//1z/31v0UEL7/8yrZL6+3Gh3obW3K8XW+qKqctaxExeKfKkhLRbovLzBA1ryLKxMsjx9m+NXyUII+WifaFQab3NGygAGZaVxWbSkyIOZabsrvRe9/FvZUoAIw59idqrK8zSA8isnGxcexsRSktx3fl4HtV8QLadzJlK8dTdnewIQgHNUMIQgoWEamN3fnLj5uTedtumDnf7m69VuzD2RwCr1dbZm7bGEJYr7aPHr4UKvftb39bVYPzANB1nSc+OT+ZzWah8v2dvoNPGPZu/526745KYTiQxbvJkOu6brPZjFZe7zNRmWArF2ZKKW3buN1uU1JyzmzcGAYYwkTHZ8W9HgdouhffA9ipT+DBU3qr/Zi7MD1+aHgEwgQUWCz1Drs+MpJbl2h0WGdQyQQgh12XK2wscpZP4hhHmi41x/i+5PmJLBjxAvu8cVv0tZqJaoySUkop5esBc+IFzP7nYx6zQ8YeNzLHzZtjDHzXaqD3CA6GIJQ2BBrartmj3H5oXZUMsjM4OK+/cyMkYjmyV7U3Rr2rEM1MPMP52VlK+uSj51c3q5Oz16qqikkQgRicA+cwMBB7A1UVQyYM3pmZiRgaMNJmsxHmqqqYIIQwn8/qplIxRHPOMZtC5jtEBGNCRO/IMSIaABLkHeJe9BMRo4JqTMrGiBqCA1S15NghGQNkNZwnKyKqJiJtG4kBTSrnqoq6mDS1oB2gIQmZmgkB1qHyHHRIi911nQ/syBmIar6+SVXVV67HshojG5mCKapDov7+b1BVAVucnl0+f+ut9947uXf/wzffac4fbCSiVqCKIKjaIVZMDMjMrRBGQXAcEUgdm1MQAoeEwABgjoRJGJkZuhQAoPa6arvL67O5f61ePF1dX9VxNmvW7RZBX3np0Qfvfv8zb/zAyU//4a/84Bf/9t/+xV//+jcWp4uH52c3m3XlsY2x4j5VQ55yjBH65Z0AEJjpPrFNgmbysw7Z1KGQMIfEfwcLjD8RIBIKWE7RSKYmOp83KKpdzBsJSAySvPeGRzgasj1ng/OgfI8oYGXYUK5jhGi3iDjcezk+939+vJkxvQCwtFF2DH7QRTnmUsQlFUMFNSXbWHzp/vnsZLGG2B80YcrBHWC96e9ywPPDhw/Pzs6ePn368OFDRLy4uMh9Z2dIXdfnJ6dN06SUVJOqDGIaywXW5N+7jawx1+jEKDMzNVyvt1VV5agK55zmq5OgPyMIBQLUbL3ZbLcdIpJzImqmzCxSWgO91BMRz3TbkOBO4turpphZ7jZhfdjm5P1IQ6Xhkn8qHybAxP2MMHd0PTGGJvba0GnevYPy5on9h8I2tD1MlTQ3aXlUeEcHedQaG4Yx0MDOcDc0E5H1ZgMAzjnE3UnH3Fke29AIFibsXhclO+2hYz8oGocEbeMeUo4hGMxfsyGVaWlJAIDtn3ovUJBt0/GgV/9veQtICZLRq5439kREwDz1m2FDMMRZ6qIIPHhw/63vv/f88uqb3/r2H/xDf6zrOkQKVZjNAzMzee8bIgV0ZkLkmRqRm9gJgVBANJPYqvdMII68d3XlEfucVpo6ov5EHyJyTqlB5D2HfPefiVl/KaRDYgbnCBFU1USTtMzs64qIU0reczYtBlxA7gegT85x7/RMNTnvAGCTWiQ9v3cmIsGx8xz8PIdkAkC+oxBRJUbPtfOUd2qj5FteOiDrt5ZNASIQGogZKwMroAqqmeq23XIIGkJiqu7f3z65uEYy6NS6pIyWEJkEOCqYU7G4WfoquLqyzYaC16pOzLFxwCEgow9QeeYqESljWDQgAg4ocVo+r/38fvCyxg8Iu3ZTB59ieunB/cunz5aXFy8/fHjv5PTzP/DGr//6l37hF3/5N7/zPQN48PD83r2Ttm37BQ8iGBmSgA3EDJkBSiN4jGDFodj+dYQTc3xkipJoD6sd4eKcsBpQVRezuSOWQUQgIjB578tUlKUQu83HWzLs5M/JeCbj3I1qMpGipYO+pgvfnXw4ZhwjoulUJR/222kEZs8cRTRwOJkxoweOIoKgKAYGINrH1ZlDZBG7f/++c+7evXuqenFx8e6774WKicjUmqa5f/9+YJddwSL9gZBMFkMIxpElOQGq7RJZjCIVCh1wOJls3nZdBwDe+1xNkiEiSL/vkK/gyG60zbbLVmHOyJwbFN1B3kwGIJqqIFSI/X1U4xhGSV7iu0TMHtYHoQPHytFJmRnktOIHODskr/xmslKcfFg+j/9OKKxvLV+FsSP5nTOgaCrbKP3dVkWn00UbHDBqSf0ldd5mH5Q7+oeKKr+X3iGNZiaW2rZ9/vx5G7umqvPlWn37uZbqcC8wAgx50IZNJkJSG5J1W39XRzlUy8ftinVD/jNvMebbL8r6uAsJ3mNFYhhV6hiXlNE7YvkQfRNwIeZUkYaI+ZZ44N7cQCQRqes6MwgRVRUuFotZ1TDTn/kzf+aNN944OTtLwgpyctIAAJFz+XrjnFmLPSEzBHQxmpkIIgbnoRIRMOOsfVNKBCIqURIzo/MAQGiI5gio3+UkdghiqgpoaIagBH3uD0eokLMIGEOoQlDHfQCLKCAhIRowOwD4vb/3927Xm5TSw3v3Ea2qPDNfry436yWhKCRHOKsrVWAkx0TIxOAcg1rKOZYlAlDTNDnlsg2XrWVq9yBArKoKioCIxAaAmFQR6GqzOX386BNf/OLXf/ZvdIRgct4nvM804RKYAyEV02QpcV1XwN57p4omQFY7hu0GlMA2UNdwD9xiJmjJkgMA72A+q65Dt7qpUR/V9W/GNG+aqCKpa5rm9ddffffd92bzmgjrwH/wx37ksz/wxne+853f+tbv/MbXv/H06WVz7ySE4GufVLquQ0CHzsyi5uvMAXWaLOhQZB1y6yEFHpYJrdouwHa/msGsadCADJIlA1CwWVU1TRPpeOOIqCZ47Oo/xN0lCjAE7gDkGw73dK2ZKQIX4zmc+O1Z+o9E0ux08G0rXTguA8tOqfYkFkU6jaePH87vnYGopAgehPKaHgDAENQMzFze63327OLhw4cicnFx8ezZs6ap81Gi8/Pzs7MzMkgSiXFi0RRDF0S3G9BO6BzRvoeDLkEA0B8niFECu+xvHFsYxbyZxhjbtt22kdmTR0mmpgCITChjs+N9PkDQny8BAOz3L16o7OlU2wUnj0Rpw2oSoF8nlaqxmOBd1mVZ2e64Q3sIrh4HdgjAssEhSKlYE9Oui33FD6qWUgc77OxANN6AZEVE4mQKhwEUx6ewP9qhqV1figBG+dpX9lzXddd1KSU3dykNzmei0dk9JA4iAGDyh1BFxPGYWWmvAID3NC6txlkMKplxvwAAueMevEzc5cSz7EipD+8sf7JhrxdzYP/wa15fDnbtxsw4eO+9akqp88xXV1cAcHV58+FH73/2s581EzD68le/8iM/8iMPHjyIYuzwwf0HSKlpKsaAwGrJLAIAogfzyhbZtUigZqzqnIEnMHaAaKBCkIwsE7LPl/4SMTNxPglljtgzMpH1fkREyycgANTIIREyOCY0BGIMITDnsK+YqY6zC5KRge7fvw/npqrterVer2dNM5s3zXUwSMzYthFQmbndrDpD5xxYp5acYzShbGIw9+FdJpI9ZAkE0fodFiIGU4uS0NjYkSEgSkrIJIorkUeffuPs9dd/7be/Y/PTE7MYBUlcxYgoZqrqSYNzyYTBlMlVFToHroK6hjlDZ5AANp0st1zVsHAIYESqQm2ChL6epydrZqwUz2bNto3eO6v8ZrtanC3sfeikRQUfnMn2wel8/sXP/+CnPvkHf98Pv/3227/07e8/e/ZstVo1TTOrGwVLKeUFCQ7eGkPLdjYcOGZKeivJfqw80SKHmru0EUcZrmBmgAYMaAZ1XesmDR2RmVVVVVVVZ+k2CWA2zSIJd4qLcTzjQ57x5BMr4rmgUMD71XYJ/MuyG8zBknrizzuU3mNJYJQSqiXUlz/1+smDM0T0SC2aEgAAqRmAIOQ04/1tSO+8887l5WVVVdvterFYrNdrE3n8+PHZySkAmCTchUGBSMxeIxx81GVEFfbycMDW/jjHoY+OxEOjjIi6lLquq6r+guHdbNHyBn72nHdd55xXNVVjdqoaRQDA+9Cr7SFyC8Bwoq6KgQ17Kofj1L0/C+07vhxD7cZPj0rnF7Q0dyReHGIpPy8CE/bW0+O9zqVqRERCNwEyEUFxz2DZvg43cIwNj09cKNcSjHlrCgbEHeWokv9F03gJWTmqIt4CkTDnWjKzrHE3m8319XVwvm23OUjYrA97M+tzvauCmXVyJNUoHDDwWETioQJm5hh3rryyBbE0GbkNK3tJVmIhn+gzw7x7nT1G5Tn7IWxVcoBV7quqvKoC9EfvTs7P5vO5SPzwww/X6+XpYtF1nff+ZH567/yBiHzxB7/4+c//4Hy2wLxoZt+225PTivMSHhEVDfqjuaZ5m0acc03TOOeYIEay2ofgYrvqOgFgSck0IZGBMFf5Ol7HObMeVJ699yZiLGaEQ7AYoxFB1KSDcAAEMwFQ5oCIKSUzUQWRPkpDVVPqunYTQkhti4iXlxer1fKt77/1/vvvM/Nytb64eLbeLFMSRE5tJ0bE0HWdSfSeM9yqqkpJUkqa00SAgSAAG0gXE0MQ0qgRknMBBAjzaYvUGUKn2qp96ff98NPvvfPeb3/nldqJilF2DpIBJsCE1CEmgFW3TZt15x2pOtVFFZYKtWPn/n+U/XnQbEl2H4adczLz3ltV37681++97vd6ep2tMTM9AAhgBstgIUFKIglSAiASoMQwF1FeRNqhoGxG2PqHfzpM0iFbdthhB2WLIsV9B0hRAAhoCBAzwKzAzPQMeu9++7dV1b03M8/xHyczK6u+7w3pGx2v66u6S97Mk2c/v+PEy/l84QY/8bwAACIjOEELxkC75eDUCEjv7RRb6xigMTZCZObD44O79+/dunEzjB5ZCJk4TK3Zeur60dbs+KUP371//+7d+w8ePHj8+PFyuVTdxUYSwiQUEfiqBoU16daCFitnTynn25C1deV9vfHpKhnZWhOjt5qUZ42EFHIe/WgvbcMrOWHN60SkDnilp1fJjP82AhtzcArXzKQnHmvbvNgqG00gYHPXXz4Cx46obewCYHawB4biGJzBESu7HlITZWTFOwNoW+fHPoa+bVvmKByefeZ2N2kBQCWZHga13ZLJ/r/an5zEs85fWlPmVQk5rDyriBiEjTF+HDXb0HuvVVwkpJtzuZwbg9ZaY4nQAIYQOTI6ZyLjxXy56EdrNStS1C4jgFblgYTGADOTIaE11Y8lIqD+VwYfo6esLSGiyifhkEyzFbtcWSpFGF9yyqSF5IxJAgAsTEiXAxKICJElSR2VXibBpa6ymWJKsFRdLAt4wVVWBSIykjJ9JkwtGtNTogpHA6lgWidcb63PSfSEgs7peuUBpFbQpYG29uktSTTK7Fa/rqctlNQPZhZCIvIhGCCkTb1tbRigsNMRCRDA4oSRY4yf//WvQFa8VJjt7+/v7u4CwPn5+b17984vlvBkMJsNx1L5c8Ne4PVL0pdSfSNrv0pyDGhezxW+K1VnDCWaK+7uollqWRFkM30xPyVSxEciogfvv3c3RgBummbiGmdsM3Wz2fbWbMd7/5GPvPKh7/j43uHRdGvPWjuOy8ZBY5sOqQVjjSGDzEaCrs4QYxyFu46MgWEIxoBzLo4g0vTDomm66MPgewBo2yY5ul1wDo2JavsSESEbZHQoorkgCtUFIhAkkmUi8r43xjjrQCJH70c2xuhnjh4RQwwShEP0fiCivl8YY+aLReea0Yflol+czR8+fIiIGPgDT98+Pj4+vThXpI6maWIcW2cuLpYxemvM+dlZ2zbqIOPUdkWbyWA0RuaLtm2ZpZcRBQcigyAiDVGcD865JeOZxf3v/thXxvnjB4+3ukmIXnreNVMAsMbGRU/OTdEaMeHRPAzouha7tu/vb5020DTgLDLvyjiePCSCrb196AWUgsMIEu12F+6/a2P83nfOHmxPX7N4vrPP6Cy0h3vte/cfP17OdydTs/TiuUUcyJ/359jAh+fjK3sOu+vj8f69hw8fjOPd4L/63vsLsfPBd81kcX7BHJYwYiuRYhNsLh/PibFCUIVFVL+E7N2dTPeXywtCNhZAIggBWmayzoYwsvKVGJxpxjEQ2aENzRidCLdm0RqcL2+ayc4AYwgLii3SxANZ4xsKjjpxIY6g8a3sSWYAFKAU80qKuyAmlxGy1ggLpHbjoE2xrDGwinsyBwmKV1Gn0RjdrpINcRZZgVIIAKBGJOEqs2f1vfr5RFILoo0Ez/Vkz5rvIeJexNHw+/7i+MVnp/tbPURvg3POMmxFjIJBQNAQIAGiIxtC6LoOAHRjaFXSs88+i7zCplgJ2ooBQZYi5TNcpRcksQeC2SbWq0gS+EauiNA8FzREGmxWDmWttcZpIQoANE0TQjg9PfM+KrTk5brYWr/bcORittLq2UcUFCrWmy5Afbt1OYEFpbaUHpVfoXbDgto0VEayMch0UJqTYnqmUMvKhU5ZxAIAmFxvI2tjxATcTcS4qh/FHFMkDX+iKYPJsVguYyk++TI5V0xX1lE2FOqNl9I/i2BGRAIEFgMrK7kutAVYYbmICNUVONYiAArEEFX0qmlojBn9cHZ+quj8TesOW2eM4bip7BfK3Bheea5cOtL3VbS3XEJrCsqKKiCHHuqdqdNaf5lXrEZRSPFRnW1ntgsNpFZuCETphtpEyLl2Op0eH19/+eWXt7a2ptNp13XOpfRgPZDUKYxEBAaYAzMSkVWYaLSIEmOUgKNAiGPXdWEc8lCFiFQz0NAAIlprnLECrHgXABCj5p2t3C0iwhKJSLM3SumU1haqgqiTMI5jytUK43K51BkIIYzGak/xj33sY9/61rcAoGmaBw8eWGvB0KNHj0Bs27bOudmkBQBj0CNCbkyOiMZa56JG0Ms0EhEjM/MwDDpORyalbRsTmE9PT4HwA889d/q1f97s42w6Hfs+tA0YiILQmqWEaWuD97OmOZV+C6EfBufckgLw0nJnJ620nSfyDTrDgRAIgNlg7BzKxMynXVxetAajDe10etbPp7SFw6KdNB++fefzX/tS99wHiDiid5bkIszExXkfGIDBIDJIO50c7u0v5/OLb3zz0dLPdve3t7cf3L3nnCFLghw8G04iShs7aiS7JlfJUBJpEQN3bUsmjMNiOp1ChCgQPABBN5tECTF6RuDIYz80DbJlA+CMDcYgIVrbQGPJDBJVz2YOBE6BIrQpdc0uyj6qOfC/8dA7bBo6TzD3Nz5vPGvjqkp4r10CcMWw68HAOtsvD/LRR6LlMMy2Jk3TDMgGDTNj6h+l57K2EIMYrLE4jEstN/Lez2azg4ODpmk4+FoA6/95vRZo4/GXX1hEhDBZ71n0lhMcGcUAYQ79sp9MJoQUY5xOpyqVZd1Ism3jfbg4XwzDAECaRXJ5jq6UDeWbIgDKQSBIq+oUQMYVWhYI5M6XmS+rby85xmWt9wgiGlv479VpwBtjQxREWElZZlSAP+HCoDGpLExEsNISrtDCVM7XM2Jg1XGvFifqACwaKCTFcVWzVLgq1iqIAc2NKu9VMqAuU0UtY3ScxV8NlaZSPmN2fNUvxcIGgQyhEAoRiDMpBxA4cvAcI4F0TYIpmHRNTRJlbFr2etlJnlPBV6JXD9skDOSiPOmSmMrxUB6hYEmYzdZCvVgJ6TIYEQHk0iimrK+e0GoHA4CC4KbbUJv8AEAz6SaT2a2bzzz//ItPPXWjaWdb06nLoezij6lj3kiggRIy0JAmeZEIETAYax2EQN77GHokcc6KaEe+9OaISATaKYElEqCGpdVBIhIRTaEQFXKleykiavy+73tr7TiO6jTu+17LRt988/Uvf/nLzrlbt25puslsNgOAtk1NC5bL5ePHj5um6WZT771wCCFYa8d+IZKSthBga2tL599YG2OZVUTEpmmiCJIw8zimmQ/OaVoDR2Dm0/PzcQgHBwdwdPzw0cMWYH8622o6huhBFmOPrVvEHhHO43K77R6fn+7v7SByjKExjTOAEAWgIRfjOCzObDtBayR4BBAeYz8YCZG5QeYxmtZM7MQZhCBxuTzcnR7tbT948P7u7r5rnfjYNE046XdsO/e9lyhEfYyRDFo3BP/m+4/NxG07C4QX84vdne04BkBsmoYXXjddCnVEMIYpJxWqUlWc1d77rmsJ4+nZOYdBHA2DFzYhctdNQYCE0ZAACEForSEzDEtL1jkHCMGIM3ZiHCQVzRIhIRhnt7amidozFULZ1xWLKCRas1C8KpflSgFc7n+ZzZbtrE8tN6Eqkn2lzNp4BDwhYnylbBYRtIaB9w8Pjq9f9zEM0auotVhKTvS2KCzCbJ2xYBJ7Ojo62traIkL1J4cQUjY/JB9sMTfrx9evUQu//Fktv82ImkJcIco49k3TtG07juP+/j4AAbAawSLIzISJepajPz8/Xy60IzeGXA5Yz93GnNbctnDStbUXXrN9L1EGVnfeuKGIIDKAQBJCQGhQp6jkAlyhWzEA1D5wSGL4ishoEZklRK0TWzwKkHdaUWwBAKKaTRpGreuJWQpgCCKiFMEPKKQ/8Zr0XR9DynqFgtcIpGgMiKiA+ADAOdaAWZYnAc9c0HopY51i1gak8o3XpBLyAjG7EpcqR3FxK2cJIZCsBOoGyWFWv6rl2wx91TMPAOo5rxfdXioZV+5Wi+oYvZp6iEhka6Uz6zlSLP4i3fUc64wxhjDlYKvvR6df33FvZ//6jVt37nzg5s1bCsk+nU67pmEARVdW49WkPqzRAIn6Y1AZMTILEBm0HiVZgyiE4kfbda0x4L1HEkCOHJ1rjMFiLNIKSpDKshKBNhYUEUWQrr3rfd+r3HXOLRYLTTIe+9RgA4AQzaNHJ8Pg9/b2Jk17+/ZtZj47uzDGhcAh8DiG8/P5cvTGmHEIalL3qOtOTdM4a5WDuaZpmkYkOw+YrW0EEbwn0rIMaRouxIOMMQ7GWUTT+xERD5575lt33zt7680XDq+1LDvTyQTdlNw4Bh/D9vY2Bx8v+sOdbV4soWmO2EMTIQQwDhoL1gP1YBtoe7AWggdhGHs7v9jiAJGgj9dnzXyU5Rad+ovp1lY/7xugZ28//YWvfLXZPVQMNjLWbs0u5vNROyMxL2McBZfjeLHsBSAKt207DIOoZTIE72PxlIiAgu/6MZYNqFqR7jIt8hzH0WD/znvv/tiP/cDuzuTN17/VNZPzswWQGXrf+zEwEMEweCLLnufzBQMbQ5OmRWTrgLxMuglLCCEYZECMAlakaZrAXkQE1kTdk47Ccyout3lOEdIbd9v4Rps31Jdc/lD/W/O3+syrRXKWoJcj7mkXG3p8evbCCx/d2ds9GZagSKXWQlzz1KohxVoHrIA4W1uzyWQiIkrKmeEGrQ8pPOhK0Xv5uPTmVGQwrFymWuxv5vP5dDp98cUXvfd9P969e1ckTieTcRwRcTqdRu97P56eno7jiMaAOu6sDSEQriqI6uxcIkCUHNRLFjwRaH0wZdTGYrhUXv4rvOi1TCrWEkmWooAIyYqO6y5Q1Z8q/zMWEP964XXAtTAoBoS1lCcQiAiFuHI/l8EUnpIXGEgbrGq7kix4mKPaCVKl9ulp6XJcLdzGrqgXFFe+hHwOAggIpxsYJACI6+738rmI0jL++lf9yeiE0trMl9y0Issx54Jaa6Vx7BkuESrkkrZqXdI5dT26VIezLs1MBhPQACgBiph6rxKhITSGkCDdA4EwBS8AIWd1iOL/MAsIWGdEqKxCGbAlJSQEQQC0ZKjRiWmZeTrdun79+rPPPnt0dNw0zaSbta1TdytVMh6AraUYhRmRJEethAiQ2BqVT8aN2knFM0fmiCjWEpLVxAXnDCJaJGutPgIJtDk3V2gk+c+o0bioLlC0HGHpB/VDav6ztfbi4mIclsYYbbBBRDHG4+NjY8zFxcVisbg4PRORra2tYRhUbBtjlsslALTTCQCAUAihbVtMRUdJqEy6zjnXtG3XdW3rm6ZRwgDiafa+FDAcYwxz0JcKnjkKEI4+EpE/PmjvPPPul7968s1vvf76m4ftZKvrms4Z5wD5dDKZts04LOddN51OmtZOPRtjmq4DY6B1YCwAwHQKMcJkmur7QgA/ABEEDzw1/XB2eiEv79v9yfkYxhYX47Kx7njv4OH7927demY+Xw4QwcGFBCMIgnHwDy/mQHaJ+PDiomnBE+3v79977x4ieu+NMWJoGAaMIAK64erNhSmlMa2FTqn3oTk0xsBHPvzSzrSbOoLIy+VAtp1fLBf9cjksGGG5XHIkS4uxH4JA27lu0gByRGATZtMpRIYYqDFACDHFQTxHIOHAULHUQjUbm7EwYf39ail4ydITUQflplsY1mVTrUBfKWWvlLX1wZeSRWC9v19OQoeI4IFnuzueIxAZ50ShbNJzUy6UZioTohVha5u9vd3JZFIonkPI0iuDHlSW++Wjfp/yhuWqJ70Us1hrl8vlbDb74Ac/eHh4eHFx8eu//vlhGJqm8WMcfa8xv3EcF4sFABhy6ksTkRAV1GYtEAuV17SMZ004cUJw0e+KIFlJFFzJmPrydHOFKKpy6MuF+YnJQKzl08Ygy4WISIBcA06lyynrEJtajqIulkmuBVJ5aAmmiohi39dR4Y33Qo3+qi8aasCZtfXdIADM7tZLw1O0tZIypu51jXESAao1B0gYA+VMeCpBUN2cqtGkiV05GABRkGKMrMlo+dWSDyAGETG02mz1PGuSQcnkB0hy0dkGqh1Y5lMQStuAkolmjJHIUtUppBlG7YIqiBlbWhvRGxOv2tsiAsCYsgFTcEhymIYIjDE6hzrtZI1qydeuXTs6una0fzSdzgw59TwXzQElipBumTJ7CKAiWc1fQjGGjCERocYyE6Iwuxi9MSayFyBrLaJ0rlEtJ1mTMURMIXMGMDkNU13KnLsiarkqAGhsPn0Zgs7e4mLe9/0G2TBz0zRqkMXRP3jwgIgWi4WyI8n55BGEiPwYjTHOOYPSNI1G01UIOefacRyGoW3Htm0TQzetJrprLjcg65nOuXEctZYFEW3TCaHn+Mi66XPPHRt3/vrbD87OTvoBF3MOIwoQx9ZYI7w9myEwM9+9/37equSce+rGjRu3nzbOoXVgiKyNPvi+Bz92REZ4fn6x9WD4tYfvffef/lk72e+DB2sbsRhH7/3+8bV37/3W1nJprGWOw7znxgyLEZgvlv398wvXdgPi3QcPyLqunbimuVheWEvjOBpHZIjQhjjWqmqRPWUJoGLOk0nX9wuOcHSw/+Du28C+a9vowzAsgX3bmMg2CotrRgRlL8TYNE1jXYQo7Id+aI9szKxYibGZtDXTyDsaoRJUVx5YsVolEJE1hoSV6rwhNZ8kaxBRsg8Z0/684niynFp7dBmAqdzS5aUQsedwfOOpdmvaD4OZtYP3iAgYSUBNWU5wAbr7we7s7Ozt7SV3hDHWWh8GwlVSu+6Bcmy8dq1xXJaFAKD26aZDQSICdF336NGjg4ODT37yk7u7u3fv3n377bcfP368vb1NRMuhB5YRRt2H3vtJN2PmwNFao6UdugM3pmklStc9G3n8azHXIn3TQSl9biW2BRR0LU19NdflQ226aY0jZAM0jSGLzDKWPC6BnJymNZVaQY24AphLYk+ThGGVaJsXhAGk2PqS2H8SP8y8YXDX2kZKRUbQeDJU+kqtgkAl6VfzlK3qEt1XQiy/gkCJcZZZKhMrsspwghzdQBBeR8mIMZJhnUx1JOgoSUiEjSEAieyJyJosrtarkOq71YSxmtg8zeWE+tq8rNn9gCAG8ySvyF5lmz6t1lEQsXVNflYswYJae8sTmpimIbBkDBkiEkAdjrW2nXTHx8c3b96cTrfUB9s2LRG1raIfEzNo9nsUQazrudUtxAAogmTB2OS4KQVdiCgxGhTg0PtAAiCiyI7GGB/GEvwudVaK5uhj8N5HSTll2lFjGLyIeD9oAYUKYCKyZEpFVln0yB4RZ7NZqjQzdhzH+/fvq26h5hoAjOMoC3TOBc+IGMZeRLquY1Y/Nsxms6Ksj2PQZCtmtk0XQqfGrjZs0Lyw6dZMX9y5lpB2dnbu339wcXGxFOrJnk8mXz4/McMIPFpDwmwBJcTj6dQv+8evv9401jjzqR/8wZs/+CPPPvvsCy+80G5PYTqF6QSGAawDQxACCEAIMA7gYzw7PXn4aPLNd1+etv7WtW9cPJ4hXpyczWwzXw5RgMldu33nm2+8dePadfReIvjFeDF4CHEeoneWmi4In1zMwTrbuPl8vlgsuukEBaKEGAJUATUNAOuREyAsc0qCExEtRTt79PiTr36UQE7PTizhcn5OQAaFowcE4AjCkUPkpGyxsNP0LoYw+sV8rn6/GGOMQGgQ1P+8WmipuOXlQ0QQCzcT3auXxaGyxvo+NZuC9Uv4Epcu5wsSyqb3uGYFNX+AJ4xcOcrlQSLi2bB86cUPdFuzniIIa3TMkhvHkYiRCFcFuoiI9vj4OMQxxFFARu8dGQMYOdbaU1mzWsquz+Bmm5rCXyA7zSuNJhXevPfee7dv3/6u7/rk9vbu6enpe++99xu/8ds3bh6quEVEMql0xxCZ1oUQGET3rbIegMsO402hu8FwUzXjhtzVS5DrNuwGECF5IDdmQ8+vpRQiAotilcFGuBEFqWhem7HVeiIREDk1Flib6hysBQCOa29UBBjlpo2QpUIJTJZpKSFzJAABSjJy5aC+kqRgJcsrgVFJtXImlfkgUpu0TFT5ULwFyGvPMoQZALnyRcegmhIpfhIgQEJQKin0IQRrbHl9Mmv+4bXhVXpSeVO6VI++IYPLVG/cs9CD3rZpmnJVzA09MSfrIaImVRGtLizTWM+/IdU2WASJyDjrnGvbdnd3d3d3dzKZaf2uiDRN07YTSsUukDUwAWbdQcYYS9opICodCiAJWtT2gkBkY4xB40NRK3QLvB2pxkAIwXuF6ylTkR0qwXs/Bl+M4BCCMU7lrvex70cRidEzMwGyc8qni2gPIYy+F5HFYoGIXdephylt/5wokFKmCfu+j2N0zhWEgBiZiBFxPp9rWYRzTl9ET2gFRUQDELouercQQtO1bTsBAO89GaugtkuPCGYJ5rfffe9wZyd4v1wu2Ydp20GIv/mt39nu2t/z+378h37sRz766nc89dwH5gc3Ly7O3pnPHz16dPbeO/P5PMZ4eO24H4aLi4vpZHKwu3ewt3/t2vH09u1DY+F7llND0LbTk9PQ+5N337n3+utLfx7HcTTQtrPpZOedd+5ut62fn2OMS+YY2BtyO7tkXOj7k/NlcDR1TruEoXWISZ8tAhgROa5oTJmW5B5fLi9EjJEFbt95uu97ACADbevCKN4vGkuL0TOz9h8Ucc45BZtMNeHa4zWCMwYDIwkjCILJnBmznl1v8w05V2+rFZu9yhiVZJusabeQARC//YHF5aWfNaiaIfPK/de58b/50BdMsNiFmVua7e0004nnfgjBOKuYnZz7ahOqsZH2vo1+wFx8KlRVHK2/bUkYqXl0/ZmqFK3aFNZtQJS+JJS+HyeTyenp449+9MPPP/9813Xz+fyLX/ziV7/6zZs392MV3N64P1mTLWlkZszmdRlqLRgAVtU4kF1klLMB60NEBKKAGLIqGFik3IyvGo/qNYhIICW1llEQBVCK6xJyftbGMl++YSbHivvnZGgAILMmEspGgsqy1FSdIsDqhybBkw1tRCLFY1lZeGtrWi8955a0vIIdTUVTiRh4RbhlMMKCAiuZpNDH2eiFGEBWrUWS9hD8msjkiIjGWhExxtauMx22ISOMhpxpnIhwZIRUnl7evb6hsvWcVAiFmFE7ylVuZahZQ8EcLUsWN3Ht19ncanKUwWnWC2Twc2NyeQYAgFibtCKVYcYYa0lEDFlmaJqmaScAcHh4OJ1O9/b2VPoiYucaADAGXWMEYvSivHIcg2Y5AUDTWiCjGWGrJLsU0F1F0HXAmmJNZEXGxJqZrbV+TKbkMAycC7tLBpn3YRg04yzJ4Pl8yVkDGMdRJyqEkUMsFc/MrFIQAFjiMAwa8RURXOXnpzkp9XV6gkETwigSnXPLhBBOml9NOdWobuwR5nPNmi5jVt/49vauCC4WvbPNYrH4+te//vDhQ+fc6UXcnU2HIYDYCGaIEtG0O9uvv/3OzWvXfvd/8B/86O/53R/92HdwS198+82/+w/+zr/85V//7Gc/+/o3X1fyUQX+r/yV/+O3vvWtb3zjGzdu3PjkJ1791X/1r156/qXWNU3TvPDCC7eff/GZ55+f7B9agOPj4xdffPG1r3zxX/3aZ9956+2Bw9O3bv/6G+8t7SLGES30Q2xdE9EMHJcxPDybDxyRcWd3V839aduIgGuafhyCj9qZWFO7y2wrDygLlwxTEVWqnnnmmcg+hHE2m549PgGxAKAAbs658+UcyIzLUR046AgRQwhRwtnJKQEAR4msedExxqbtJrNpLDE4TIW8ab9kGVf2fuJKa5tIHUUpgCfMSpA2FyBQTr3U7coZ4qIW6jWPLdwA6/QFxb6Vq/F2VgK+hPPWA6waANYUUUhprxhjPLh5/fDG9T70Htk4FzgSYNO2uislSyJDAAIxRltrH1AdNXOp2dnGOeWEWn1Yu5ZF23+qm+7i/GJ/f//k5NErH/7Iyy+/TM7O5/Mvfekr3/zmN69d21VaWU2H/iMgqOBnZQAJ+L6U3OC6Q698LmlZUGwgAzn2BllAAgDanLYll16hFjDlA6KgbEaglcJqP2c9UfUdrjyQ1vCE0wvnkqhMfIar3iZFXtZvnVEfknEMKJLdw5T68D1R3SsWYflG/6TKq68V9MmRCKa+VU0JxX+QrxKVvlIZf2n0HDduUo+QL9WjX35c+VfpRFgAFXQ4XWiMQb1PmToEuCqjEtYXeuOD5tRh/jI1RzIUNductPcfsLAgAa01PFcXtN5HK9qZIUZNRUxTEWMktMaYrmtns62mnVhrd3f39/f3Z9tbGtrUbriNsdZa7VXHzHEQjhzCGGMAYEuALFzwvTE5VLyPIbA+Li+TQUAEgxg0CBVjjDGM4+C9NyQxV9sPw2ASRDbku/lhGNRRXF5QWfw4jn3f67UcPCKqmZXXN0YOepr6pRNHXieMmgKVG6gTtWjVWSn09Y7QoFUKjphIAGr8cggxyXXq+zGEAIDzxcJa+9prr/36r3/+M5/5DI/RbG1Nm8YgkYD3cWt79/W33/l3fuIPfupTn3761u3ej3/jH/7Dv/rX/9pvfe01IOgifODOtU+8eGdnZ2cymXRdd+upp+7sbXe3nvpdH3y+bdtXPvIdi/fe/sKvfdYY85UvfeU3Xnvz+PrRD//u3/sTP/VTr7766s1rh267e+G7X927dfw//Itf+Ht/7x/MpvuvvPKJf/rz/+Tg+v7Fcjm1rQGaj+NiuSS083HoJpOzxXLazS7mZ0SgoNAQgoiQMguhGsIP1gyqVXCKiM7OztoWXn3147/5+V8dx3FsnG2bcRHQOB4HTpCTGGNEQ7GP5Z7MPPpRYrQZoUj1PzBUdGUAMER8SWrUA7t8lB3KnIS1FIcirFgx5M9ZCpfygkQ2V3I3ufxcMli1RalJ7srhrc4kBAADRpGIwhgFMTBfu3bEhJFAAHzw+jq5qEcrjxLpqi86FQ+sxr3OgGBdBj/puJKb61WtM94rfDwE7/f29u7de/+jH/rw888/H0LgcfzlX/7ld9+9f/Pm9X4YADbhs0XWovAVf085LMBQPABFSIgIgpqjOXEcktmE2fivb1gDRGwI3dohWf2krDzdrLZHCwv5t5yr8tCN08pylLMlV/hsDL6cqa7I4txIzihtD57D2JgQlLVdo2QFpoqMMgivEQNkR2t+/RQmh4paNt5iRTCXpqEIpNXECnPlw6lvhZXbnHKSZG2OlzNpBSZFKgOwShPT8Rebr1ZZNDVZTWBELLRU5v2SSI4bFjCiIbI5prPK8c7LR1XEXwRSPld5tZQcpGCWMZIFa23TdNPpbHtnr+u6rpvu7u7u7Oy0k05tay3bazs36yattcoXyQD7VYy2FCIjChGZXF4sObpE2nU4Jnk5DEMJ0KpYHYYRUYLv1RLK4BsRAAY/Nk0DqZNgEsBFvSgCWO+Joq24V0UQmheqI1R39BrN5GUtArVeaOYVo9X0bBGJEbuu012IqUY8+/85AnCMTbGhY4wcARE9x/l84b3f2dkRwZOTk69+9as3nnpmOXdE0E1sHwdx+ObDuzc/9MJ3/ugP3Xruhde+8dqf+7N/Fhhe+MAHfvxHfmQYljHOmXmxWDy6uNgCjgYne9uDxKObTzln2qY5Gy4enJ0sOZw8enBw66l/74MvhDG+9sXf/Mn/5r/57u/9rp/9j372B77/Ux/+8EtHt5/+d3/6J5/9yCv/+f/6P/8Pf+Lff+4DL37+C79+9NRxJIgMaNxiPA+hf/Dw8Rh5MpkAwMXFRSEkANCmfcJrKnuh9HpudQ4nk8np6fmP/Z4fMIbeevuNndnMe+/HUcj0fa+o+iwxMHvvXbMVQupIBoaEpe/7EGTmwCLlUgIxxri2SeSn5F9pALEqnoSkAQNeJVjShtkQTLL2FnlLrqzbWpA9oQHsGvPPw0hR4RVPuGRwVtt/PS2XUEIQRCBikKZrd4/2hjgEQDEozM45Z6zkmFRMzXG0TWxEBAtXsJg1/r4xlMvvAxW7vDz05XKpSf9+jG3b3r373vd813c/99xzzLxcLj/72c+enJ7duHFtvliIYF0r8qTH4ZOtlg3XazlKXMQYI1U1lChEVMVxC8FuzPilZaByTwUqlBxbrQX2k6bridPIULZKGQZWRGoAY4WJWt+NcnS5zEMaIQBQvb6ss7V+uQCUdUQREcWLW19TZiYCIrMmgVJswsG64AHdYJdISOfHUEp45tyob4MIgQyQQWMJoebdRabW754XEOsxSJU4VkQOrG+hDQ1Gx1zr2OXm62tVO0Jq6OwrVBCsjDkiYgbMHRULMEhhHMaYtmsnk0nbTra2tre3t7tuOp1OZ7MZ2ZTLZq1tWts0rm1b1xiNxVDOBjdIgiknI7/1iiYRUbR0HRHQsMQiOFXuqt9YD+8HRByHQaVpSilAg4aYeRl6FWZ+GEP0Ne9Tq1fvxiHWJTHMrNK32MorOtdZrXyJV+7lMvnFk4/WqE6mgWoiCxCBAImAeRh8jNEYBddLDtgYhJm7bjIu++U4LBeDRD4+PHrz9Tds027NWj/2ghBiFEP3zpe/71PfNVh56/Te//fv/k03mzz3zJ2pbfyiR+DBufnFcrZ3aBDtZBKJ3nx87r75+gdfenFvunUe/cWDR3Zv188mInJyvnj4+J0H7zyQpf99P/KDxtn/7D/5X/3wj37vT/3UT33qM5+58/zLt19+6TM//uOf++IXv/Ojr3zxC7/Zn8yJrDHufL4YOS764Xy+dLYliwiwnC80HmGMCVEQRdOsCimKCOR2XnmeVyyu67qzs/Pj48N33nqDQ2waywjaVV2QWACI2TMzB46dwYSunwGox9Ezg+ucMcargm5EK4wZFGBXgNfwk+sNoswBEuup3cWc9wPIuv8S1py1kn8qrql/g82K64k1tahiAbPe6ejKyzcpEAQF1IuFhsahv3X92tbujgdGZ4yhGNNDNQXdpyQVSnSuAXW914adV4/427/P5c/1iyFi2zrvvTXGNM3Z2ckPfOrTWvZ3fn7+K7/yK48ePd47PGRmaxvKTVivnIjLqkD9dKzM3zQ72UhSll2dkKWSDjCL3yu1nifs/1UhPwAIgUKO1ukGV87bhlx/0pRipYtdeRROd/nmtJ5cRkSSa50Li1yNfO0pIpcqkKQ6dFJL0UUlXdSi5XwCAIBcyvYvk2nNFeJz40zI6hQRQZWyWL8Xracj1GOG9dSqsr68Xr2aDy6qV4HVybtaTxa45JWp5kf1oQQhUqa3yBsNjEnO1cJ8oWb5csocRgAwxjRNM5lMtra2Jt10a2tne3t3Mpm0besyMrNWKOlBhMl7sD4zBld/6vhDCMxEBokIOBUFITIAMqb8t6ZpNCEZkmwbQwjAwiGy8DAMAGCN08UtFJUyqkIEUHBbLMIVWCSqjEwGrvfehwHUigIocr1eeszYnMWrtPp13dtR4sRK8p4j50aNxqx8/jYBehFAgiAWQEGIzGM/WGvjxcVicS4is9lsGIYv/dZXb926hYTDOHKMAWTLwQdu3nnq4Pjk7OKrX/jSjRs33r93d29n11Mcx9G1bnu61bkpGGIP2Jjo5fT+6fSVaWfb8+X49jvvPLx3goEmbnpw63Dn1kvPHN/Enn/1s7/2G7/5G9/36U++/NwLf+ev//d/46/9jT/5v/zPnnv5I5/+vu//Zw/OTk5Ovu93fc/f+ft/54P7ryAasq7rphfzIQr3ff/UU09ppMA5y8xROEYWQdB8Bg3YIWg0RHQ/otae5qQHgMBRELrGPXhwb2trJiJawuRHL+q4iRgUkFgABYIfCESAWERY202C8m1dlAgRDWkFNqoKyAK0yQxrFpd+qnivyFozoppXFO1hfQ+WnbhqonCZLdQE9gSufrXVu8GspLIoBESj61HYOjfOw8HRERv0gUmEpY7BV/fUXRlZRKAI4PoBl0n/yumAdRF1pWiEhA7Yee8fP374qe/53jt37gjw+++/90u/+C9jjIeHB0IUY7TWaZi6dufWNy98Od9YIahQIU83GOsGyy6BK9Q6I7X/Kl8xZizly2+9sSSYRXi5UICRAAQFAXMR6priVpuzlx6xmlJZaf1YmWsAIAICa8blxlNEUlFsHam9PNoNCKeNYUjusJ0kFiSPH2QXdG38rYyqdb0HUQSv0DEvL035qZ6W+sX1S/Ux1g99knaip5kq7qv6nGKkSORUAl5SNtQBJmnPb9wxTaMk39fqKSxIleqZNJNIhgRQtRgEgBL+KG6IvDqV7iI6sQp5gYht67qum0wmO9s7k8msm06m021FkCg57SlrEHMkqUyFglaSGDD1REn23OpKRZCCcCcizIE5BAmpwj3V6QmwRB9i9MLJnAUAYQhZfjvXapq0+nsBgIAAQYE4gAutJlTOGGOIow5YUt1E0GSCPOFlZdUs1gVd+68i58TiWduMqk4GZa+tXp8lcgaHEQkAqvQwMy8XF3qVqgKTtrVEF4vhnffuHu/uo2lDfzHb3okPTmkZbu8/dWPLv/L8B7/0pS/dfPppsLBgFke7QdoxOvRNO/E+OsB24B3bHraz/dkuzpfLpqN+wGWPUfZ297e3Dz7w3IvPP/38H/7pn/2dt976u3/7b3/9tdeOZvunj89++id+6v/xV/8/n/j4Jz/1qU/9rb/+3/7QZ77/6a987t779z/yI6/cvf/gnXfvIZoQWASn02kYeomBbFsIIAqX5j9Z3Szw8sV2TBqJiFxcXBgDB3u7F/MTBDYWz06WCIaIvO/JNFo/Jjl8E2MAAOcckdXcdUQo7hMCFhRjjG2avJEtc2RZkyZpJAjFn4xVnW7mk5kSUNvelByueu+vSYdEutXOFRFG2Ci1gHW2CRVLYQGUKwTfkzi25pogERoSkCjSNM327s7JOEbNG2Ox1goIIam2pJcrjJLGCqhUY9avUX/O31DCVlp3uG1M65VH2zTz+TyE8Yd/+Idv3rxJBt95551//I//B2Pp4OBA1zhEWS6XTdMFXnMn1ny5epCiJa9sPsolN9USrkmjejY11aqcU3bylVdVm30Tr6NMWn0OQNY2r1rFzTP1G4HSbad+6yvXBSryqqWXwk6V8dcjrGdg456bgxQGluIYLMuKWSjWM1OsiuJHBQAghIRH8cTEvXW6WjPd6s9lNaGCktZ/Sw5nPcJy/8IRlO+XP+t3qefn8suiZrHJ1dEpQJZLuKp5qqVUGdUTrlFeWh1aRBtUArWtm81mk8lkMplMp9Odvd3tnd2dvf12Mm2aZjqdNk1jTaM30WxetYPJACCr4CweXSKyVCGZiyhQM1EG7DJGEKJwXF3ExWkcgvZI0L6BQa1zhU8aB59BJSUEHsdR87k4w6CWEiM/qAfb6211rVOSMyKkyuCRU8+AtH/1wMqGromnpg3IrvgCval3yLAkq5XVz6UCW8eDLMgCkSVGAOi6zhm7WCym0+nW1tak2377nfd9hBt3buNsNiAEgK987evz+XLabv2hf+cPjsueWFrXQeQxxGEYfBz39/d2dna2dncOjo/0Xx/D6fnZ6P3J6SlZbDrnOjfGEE7Gf/H3/8Vf+Uv/l3/wD3/+6Onbf+4v/IV//z/8o7/++S+EED/9vZ/+hV/8xS/+9leneztb+7uvv//W7/53f/zLX/7ywf7hJz7xyU9/+tPXr19/9OjRZDIxiBcXF6oV6StrhEJj23mXrbb/hqzSmRnH8fr168vFBcfQNBYiK+jeGEPxzWiE3lob/QCRDQo5G0E0vU4FcFkvzKBpNXvZ+Hx5P218eXn7bHCMb3NcCSJ09Znrd8NLUnbjKCeoo1v/jZlvEFEI4fD4qOs6IFxRcvZ+rXo+UqLM8naWqqzXmi1WXInqbzY+f/tB6+6dTrtPfepTu1vbAPCtb33zl3/plw/3Z1pIrgg4AmKMGbw3xqAk9orrUfHLU7bBUgFWwcLC0OEJMgAgIqpWAZl61kRjObOWZPnDE5enunIzu6Bm1nLJhkO5Agpj7Zyk1lcmQ82VVkAfKxGr/yrmUS2oREQgZx0XaGjRO2OukIGMz5FmsngmNh5kyIhIFBaoYzlreJBrL8IxeYqIBFfGbj3JK3JnXva91vsys/ZdFxFnUiNYTS2DvFEBQJHMdRRE2RwVRiJhRkKkUnQkACLrOnLunYubS7+KXSFiWdyVY6Y6X0RWy1GKoQFE43P6CkWQUAWUv7W1dbB/OJ1Ot7d3vNeC11Z5HBloGqv5zwrkVOCvAQDBlAEohJap+jRjlXh4WSut6HMVl5VsN3NklZ2afh84lQsXT6MuVO4Bh6zdEkLKcI7R6ytruJpTchwoYhfzCgs23Somhn5525bxcy6JJKIIqe5LRAp3Y0bJIDAhBGEG8CDEzFqLFQLHGPu+nxlDxuzt7T1+/HjsB+fc/s7+g/uPv+a+tb+///RLzePTkw9ev/Z3fu6f2tn2/+xn//gf+SM/8/TNZ/7y//kv/doXf/13ffpTs92t/a1uMpldf+oGI4Uo1tqu65qDnTnCCOGcQ3u0G95taXe7Abr2zDM3mr1HuP35L3zxz/9v/gv48//FX/5//t9fuv2Bwxs3ljGejcFubw0k5753s/bn//nP/Zk/86f+0B/4g/fv3fvIx24+c/vZb37zd5RUjo+P33rrd6DUWAIAFiC81fJtHJTru/Iq0K1bt9rWnZ49mrY2hMFae3F+MY5BxDBzyU43hvwYAFn5jPd+uRxCYLJgIMmbyGwzCHl5hI7kSnqDq7Tn+ifdoEVjAMgohOunFW/H+pd5A15N51cPA1d5MFcfK5mYWUQU9jE42/oYDg8PydnAI5G1iIpeRyDOudEHRCREQtFyCUYwgIRkJcPyiLBI2Hg8VbE0HWXQ7OMCWMG6W8A6G0ZPRIQSOcYYp7Pu4uJib7t59dVPTBsLGF977Vu/8sv/0+7BITOHSLaZahhKIxUtMghHZV7V9OmfRIRmpTuwiPIsRwCpHTWgcI4+orFG1MAHuyb8GEujLsRVg8lsZmGSUOl0AggiK5Q15tK+t0rewczHEaIE/RVWsPVX0AcWJd2sYTMBAGU/dnJoszrNs8cymf8QYhQAay0hAkjTuKKI6ODhkggUIUXk1g5lepaWGUSFuhHWSF7Nr436QEmBwdEYkzy6GkCXiAAIomA5mIv/kFhkDaUsRWvQ6rCipLWS7DoWiVzaSitxMYOFMQYUQsQYBiLbuYbYCqeUuiiCBILAEq217IubsnKHGIoxGjIIyBEgI7OJAHDZkwwiJWGtFldlsUQktZNOc8z6iixA0JTZFiT1g2hLUbUJMtezSfALlt6Cwctk0rVNu7tzsNVtdU3njDVIHOJift51nYAwY4yCBoDQAI0hSIqtaNOFAIQGTWRdwbRYyiiSeBOJIThHKCDZNAVmYJYYMbcDYQ6DHwc/avXw6L3GHY0xBOjHlHtF4q21SjTGWWbQMus4LJRlSowoASWqjIxZshKRMdqzNgWDEYkEOYYy81SFooyydXKJnnG1UxCxLf08kFQYJGWICABCjAQGyGSPCEbFJGEWgcmkA5DOuiEO25Pu/sV5A9DZ5bV9e3Lv9fmDt3Z3d3cij8v5d71w5+/91f/39Vn7R//YH/u9f/D3fO+Pfvqf/NzPf/W3vnb3wf2j/b3t7e2jo6P9/f1O8aibpus6fbXucL+xpy99wCzm88PDw6eeeoqI5hfL7/zMD//eP/yHv/Clr/zN/9dff3x6cnRw8K8/96u/67s++YPf9eqt/a233njzjd/+rcdv3/udL772nZ985X//X/4f/vSf/jPf/4M/Qjxw9DduXds+3H3vNx+2WzshemcaAzAOAwkBMAoRyhCiNUaECS0A6j6IkRGdCFtLtjX9MBzfOODhpBHfuMnFuWduBDvAASzxyIEpMkV2o48nZ4uegSEe2E5G5hCdM/0Y290JAEDk1tgYuWtai8ZLJLKDjwasyQ5BRDAKWwEgUYhWYI5J2uqejCCCBIYrD02iYxRhzl5lLUFZDxHK6oZKBowgGTay0kefEMDKPuz6no6BCSMhE5BoaZ+yaTHOoo8zoDj4ZtY0zx692fXOuMDMITpjJEYhHPwYCTh43YYWEuIQA48oBd0mSm7CWo/vCj2UpPS9QRFAReIpUS5gFmaezrq7d+/evn37u179iMq23/qtr/3qr/7r3f09Vf8LUEtiW0mkrrG/zQHkXlJYOUUBRJO6AVawK7iWKFRmFiT73CErj8VXmUWXlgVLqcxZG8l669ONWSoPrcdfBPBlz3bRpy4TxGpaKnoqjy4+1Zxykht/XuVyX/9wZWetK4pANsYpIFq5VI88DymZmIIIG4nohLSuVJaXvfwsEcmbamUPGQMBilJISeLntrLMrC0IYmQ0ZIyr71rP2KVXvvy+a3Ar5QWL7xqyybXScpABVh2NkFJ/aEQUrJPUVngX2hmJV/nJlKk0kYf3Xp17URI8coxxuVxq6xsAkJzJWYK46l/FZElbU7lk9UUKqSidhOBNBRbGIWjOs+IW5mlPJyOgmvyiPuScVBUz9FV+X5Xj6vbAGDXImlRkjSIr0EcRqxp7wmy8wiW2WDwEZp2qKxUTNta3zLOsI9VA7umpjIKZARkgl6UZbNt2a2trPp/P5/PpZNL3/bTtQggnjx6rKc8MTz9947/+r/9vX/ryl//4n/qTv/8P/MSf/lN/8v6D+2++/dbd9+4hUdu2BwcH169f397ezq2IJTtpGFgK1IkIvvvuu+/fvX98fPwDP/RDb7755l/77/7bn//5f/qf/Kk/8f2f+r7jo4PHDx/88i//kvf+5Zdf/pVf+ZXnX3z2zp07v/Irv3L3/uPf/MIXEWEymZyfninWpjFU6vQk5XAoVnmpNw1qVIQgzrkYPYKMQ3CNBYDrR8eBl4w0DsFH7kc/9N4YZ10zX56enZ09OjkVER/5/PyCGWazzhgTh4GZIWYfSXJXFBFAEkP6DHQpCFvv97XP6d/cTa4sHxHh5QqLouxWfBI22PUTjsuP/jaHinAQSG1V8lUSotWWNs6eLi5uPffcdDoVqwmPBcwBtRg9xljoNovOxIltXcC8McqNV4VslmlCUAVXrKzKI4AxbhiX0+n05OTRhz/8wVdeecWhMMjXv/71X/u1zx/s7zOCMTZ4rrcbYoHTW83Ihh6QZRKuym9Tli+IlrIkj+mGyMkWqt4Wr/AzlE27fq1kd+Maa6j/vHLxTEbaqqXmhlSuPxdGU96xPqEWBuWhXKF6FV5ZBIOev5pPrGdyxbA2XMq1mMH1w6SGASvZUzivSggdYlmR+rnVW6QpLTMgeTkxqzzlcnU+paEKScouWa2UihxmtmTJIDMSGmuspMjCaruWOdwgjGqe1aWzKtQpLLsoE+V9V2rfetPoJEfLCiJDJsXSGV55ZZn2pmmSBWwbItJsZ11N/V4xNzTIahuXKCHzu2oR1wLzxiaczoSAkU82xsREKiu4OqzW1fuyFmmhBYFDauFMRJrtzMy6dNkvrTcpsF8E1XMBkHElcQtt1MyUmQ2g0GqBag5Q43LUv9bbSv91+QAAdfWnG0p9laj3K28Eq0O1zm5vb8/n82EY0A+OjJ3OOIRhGKRpCNDHgIgvPXf7i1/4jT/+s//xH/2P/tmf+JN/6qUPffATH/9o/Cg65/q+7/veOddNW8xMBsDplou5O7K+wu3bT9+8efN//IVf+KVf+h9/4Ad+4I//sZ/9Yz/zk888ffPBvXtvvfGtz3/uX4/94mBv9/T0NEb/hS98YWtryxjzm7/5m69981vb21snJycFYoyyD09EdLa8H5umaZoGSfwwxghEZMhZhyH4SdedX5x3ne3ny5vX91rrlotz10w9y+CZxdiu4yhvv/f+3ffvn5ydL5fetdYYY61xzu3t7QFRP44bxUXGGEFRusVUAqfuxv8/pKBcchqv2PIl8VQYSP0NPsE0etJzN26IV45KKwUg6QRZUlayn2iM4eDaMRoSCSWkYoyB7Mssq5/uXyX9rFJMazGzwZTXBsoRAUrJRvVmkYwZxmXbuocP77/00ksvvfQigCyH/qtf/e1vfOMbx0dHYwzOtgpAUz8OYBN/Y+OgquYEMUW5CInQcOU23zABKXmFy+zptqu70BOuGSLp/hsvXgQbrlu0hZnWwZUigOGStN545Y0T8AolYFNRAFjVtKyaxJVDz039HLJ9sApqRsko07CaGN1JDDk0Srl3AiISaIn5KvuMiASiXkoGkAR5LRiPmh+Y1qK0mk+iF6tYcv36mP29hCvISSV0demqOqR/RxTMfeyJCDDpBYgIYqxN5l2FRLGp1tTTWxSgmoQ2zKyy29XmRpIye2vUggxgdB3K97kfMENWznTtNGOobduunTRNs729PZ1OjTHTadc2E9u4DV3nsodDkkd3NVpmRmaN3ikrLItSLi+fKefQaSpUcfSV4F8IIcSQ8vtyVzST4V2Lb1Dzisv86IgFUd3vSdYi1wJ4tdxE7Evm5VXcsywZsgYnmAOsSvdXYYIi5IqSVG5a9qbOgTFGiCREdVfpDE9mk+3d7UW/WA590xhmjmQskaaZNc72w9A05umnjl96efvx/ff//J//cy+8+OJP/uRPf+KT37O3t9e1rutS143go4ItJ/IAgNwOQccfY3TOfeaHfuBDH3zBGHN4tP/m66+fnZ+89eY3vv7bX2tbe+PmtbffeBtRgGRra2s5jETaPR3HcWybJoSgQQqofHi6b7a2tkTEjz0KlZZQEgQRgCD6Yda52db04YPTj778kf3tnbcf3tva2rKOupl5cP/hu3fvLeb9N775hnMEZCazDrIC0batc24Ioe97TG7t9Gi0jiAUARxjZEABErmyGdi3O/CSsrWm2ta0sX4VVMJ749cree/l03g9iyvtesr2gZ6TwmZgkFAEED3w9sHebG9njCFCFEmShSFhJBQDANeVBh2SLX/UXObbTEr+JrHUPEfqtkIRPD8/f+WVj374wx8mon5YfO1r3/j6a691k6lpHPYyjuOkmy2Xy9pcA4BkrlcR0PIv5QQrqQymDcZavlkJyEpJwzUht1bFW35nliIrcZXvvnKtiAjKinHXHFwrRGJG7KuFCqyPoR5qfc5l2sKNUrnqPibD35TLDa5w2i6rTTpVtXKpnLRMsjLe1J5W3afCIAB1ZrIBAM7NmhAVaCYyc0orQETQUQgAANFmhVt6ZWHJCvLqe0FWTHMCRKOyJamZCYOUURIISXGXJI6WhWj0TESxAnet6aEs2ZWzVBNSkVUiUq/p5T18mfBAZRUmwqj3Xn1btVGIqG26qR6TyWw20xQeY4xwYtkFmqaWvpIdy8ysqMhZWktkFhHNzKJ15PMsdNc8WytRGyNzEureR81hFpEQ9SeOMUru4Ou9Shctg2TEVblaAYopQjpNtZBUzptyThGNkk3VywSfdp8BhDL+zbT/WkEhspSLkq21xV+CiMZQckQnj1jy4GgGXN/3i4vFMAwQoyE0xgZhSXmtGDgOw0A82d6aHB3vv/ve23/8P/6Z2ezwJ37iJ1599dWbzzx9584H9vf3Z7MZAGstcnkJEBCJzIIGHBmBSMhHh/u//du/9Y/+/t/65V/5hf/rf/VfPX3j4Gtf+cJ0MhmE9w92kECrVcg6opTcNI7j4eHhw0eP8v7NggeRIwPzbDr13vdL0pdtrAUAcRBjnE27xWIRQpy69jHA93z3d4tIRNNMd+7fv3//3sN7D+5/+cvfAICtnWkpCmVma+1sNlMF0S+XMUZLJIRGovd+jGwAmNk4q9F9ZhYNVa7vF0aATeb0b3sUTrghwMqOKB/qzX75hCu/ufzr6qEIKKBR5FgBEaIwAYo1Q/DXn33WTrsRIwGJoHEFu14jkivkYAB1Pq/GZuuH1a+6wfo32FYt/5KGQtD3g4h8/OMfu3XrFjP7MHzuc5/7nW+9eXx8HKMsl4NzDoSisDGmBG7TFCDmckuqZrL6sXpuOUSSN2DDcEFEqBoT1R/0voUV6vlZl9+Ur7gBbZF5BFa6zOZz19fvSjqQVcj537T8lzzSBbhAU2mIyOAqDrQxRXmQq9BvuU/hmNUANHq0qtiphsKa/ZJ/EhIUAGYJvFa4vBqtJhowQCqN3fSorzaJIGeu6llEvCRDTlMftUsVAkat9DSGhBBJrFMSSq8fIwMgc6y9x+VlN8zHy5O/8WvZM/VK1b7l+qrN1UwnlFJ1otyPVt+LiLSpatO0bdNMJxNNH81JSUg2eS6MISJLtipkr5QDff0yWilxx6ywQrahjTEg2gl41dhK5Z92G1XDlzO6hWQfz2q/AGnsSU2uMrHGGOSCKorF9V2mUf/M+qKvYx+Sazc5Fa2uU3sV6YCiKUrRO9cAQYs36PL6Js9NRXgxAtoSKvIiEcB2Xbe/v+/74fHjx8OAyNHHaA1a65jZumY5DFuzyda04zD2izhpm1c//srZefxL/6e/bBsaBp507od/7Efv3Lnz9NNP37lzZ3d3d3d3vwQXtJrrYnF2dnZ2973333zzzc/961/9J//4nwPAP/u5vzmZNpNm93t+13d99rOf3dra2Zf9vu/39/f9GGaAfT8Ow2CtfWpnr+/7Bw8eKC2JgHBM4XYiIOrnvTGpV0eIXlGIrbUNuXEcp9Mp+7BYLAHg2TvPfeWrX5pM9+7df/yrv/q5N954FwjaiXZKDipKRdg5p9LXEg5DLyKtdRZVGmEy9A2NIW5bi4aCcF53JcsnYg/Um6hmxYU46i1Wi6Sa112Wo1cK4NU52ZdZX7K6FgBljRUEBIsIrE3kQSglKwkLGeOFg8G960cjcETgyFEAOblkqscyEQGnyrDEWABAxBY2XfPiy6OvhEEJfa0ZyprR8LGPfeypp65ba733v/qrv/b2228fX3sqxohETTvp+95aS2LJuhx6xgIslbRaXj2Oi8UDQITa+9OYtTJHNfkVhQLK9akcZc1ihrS3aWPSy8uuPkglxmpoi7z2xWVXhoqlYbtsqgtry78+z1eOpKazWgCnR69cx+mhtS+hnFk9IoqYssqJmiEKJNC3emCp1XplCGa5VU8UI6JmAwGLtlcmJO2LJxmBIXnEZOUSqFVjWI0EWRiAggjzGDz7GKqMHlFMfGOxaWxLljKukSBGEVtlKSthICXtqhYDG2tRb9ryE1bKFmiLOv2TknwSEeGV5yC/l1KvxFimKIKsSNSsGg1RZprSNM2k7WazmbVOrd5CP5pf5pxDwpw/5QxSiak3NunX+qcxTi9Mi5hzOLVIaU10YUlaXFM7ykJrnnD+yxojunMBoGmacRzHKtZTrNsoTEyY+InEXBmMqeYey5QSEWIT0ZchQfZ41RskW+rAFUkXKqK8UrUAJiLNwS56Q07eQpZowNZ7RERQc4UFwRqDgCxEMJm0TWOnbbO7u728mDMzR68o8TF6RvCRfYx9GC/Ozpnw4uJivlz03n7iE9+hrtdFv/yNz/36z/2TfxICMGi1N+QZA2YggKgWlYGXXnj21s2nPvMD3/WTP/WHv/OTn/DLCzebfecnX/2Xv/SLltA6QsSdnZ0QOQSez+8tl0sR8d7P53NDq3wCVYlKJZsfxmiN91Gz8QBABT8wWGv7vm+a5vTs7Md+5DO7u7tf+fJvvf7eu1/72jcB4PBwV/tiIJoQehGZTNrZbK8xVoAx48gRoHOOBERCYxteDkm/YdQEwOi1DIRAAqJVwAn4tkehRhLgdQEJ+c/CDKFi1BvSFyu6+vYPWvMHrt8TN/mDVuWoUMhsNv2JYww71w7t1nQeRjGoilq9PQs3xnWPbJmRTQFcztj4c+OzJNVbTdgIAJPJ7JVXXtnenk0m00ePHn32s58dhv6pp55Caq2VYRhijJPJrECrgMFc10WlBGJj3OXAhFRl9L+1OcUouMIbAi2IJAFSPNNNZzWuoMPzXNDKOYbZdF5x5EsdGOtf9UtepaGuurwVUQ2Vybsy7DJLqqVsPfMb35efpNL+lPOW8ZdnbQx4bbokgSgRkaBItl027lxujoiSKs7XZJh+YEJDmpcLmWeqAqsdN4XWzXdgRlqh80iqrcIIPPTex6Dd9AKnxsyIaAEBxDlDBG3rrLXGqhkXOARsGoOuxAVjZGNR8/xLWlDJSoWrjloC1XRORIqaJgVRq0qtz68Q60lbzUxFcta6pmmKGarFPJO208xblRlN4xCxaZwawWqJWuIyKl3Ry87wMrFSEj0w9dCVbL5Tjok6qwpirO+g18bKs0JkCyVLBlhT5h5CABEAzB2TIjODgJCq9cLMMfhC9iogEMFUHTUAILIvVEpk5BKrQUSQBBO+Ypq6HXLuIWaVXc/XpPGiGeirqM9Ke60UzPaiZjEzolWtSPtKEdHObLq9PTs/PR2GwRmLKHH0jBACR+HlMDw6PTt7+Gi+WAzBD6MX5MXYA0BgBoD93Z2joyPNz0j7kUEyFEPbtmAwxmhQZrNZ29D2bPqhD76EwvOLJS0uDg/3P/3pT//iL/zS7s7hdLqlsCcXFxdjCA8fPjTW7e7uOufm87m1FgEsGbSQYgcSRAyiMUgGLQrEGNR9aI2LMbIgIkUGH+H9B/f/+t/6W//sF/6lIMxmLSIul4PO3tnZ+WzW7e3ttdZZa1kiBzAIIhzGIUYiAAkxQiRrmFm7twChtTZxyxJGTFSmla+0hih3iS9R3ZQwrqKnG+KppvmaVSYxuZ5J820OvXKD8ERytmgeIavhq/+peEYAAG2A1gf/zI1rdtYt/QJyY3XMmXFKAzFGYKxLMov0FZHssq9eb/1tVy11ESnl8COqXYAIiGnHPvuB55qm8T6+++77b7zxBgDt7x/FGNE4Q+TaiRY8YAYGGscRlICyta7+CsnAXd57nUWNQMznc32ZgirCzIjQOac737WNoteW4Chj0DysHP5RayBWvCZlbxbjNVFPZQFDlagMVfiqJiM1wHXq6tBsuUNZ3WoYm/J1/YYrwOR6aZxzHDblSuG/xWeY0z1U0K5hreSkGA0crOyh/JTkopRKz0CwgrG8IDNoSVJ+btI8vPfOOYgJaYFZxuidc4imVA1Za8cICVdTABEi4OjHcQj9OHgfx5ygJxKUtQeO1prJpG0mHSIu+qVB6CYNEbWuUYkSIxMJgAFYyR5jk7OoSKCVjlUr3UQKarGmGyFzBm0oLLvcuQQyizclhKCe8LSaVS8EY4zihwCAwmhM2k7bGek3inXlyCQIIcIY07K6xhljAMRaGzjqOBEMkVWmpp19IXuDEVENynWwSQaAEH0M2imd9UEFcaUQXkWTqbzNGIMp8xqtbYhGjtE5F0JQV65IcjXVjOySuoOQSoHjhrJSlqB8X6aaI2cAlqjFwzqZcb1qvxwhhDKr2p1Jb6hLAwBAChOw2tcAGitYQ4NputZYRBIJkZnHfvCI4xgY4mLeL8O4XA4X88XFog8cRURwzJtCWLGA+l4Q1CNSXk1EVBKDIRF2xlw74ofL+ae+73ehwDvvvDPt2q5rYtt953d+56/+q18fgw8hLBf9GOJyHB49etQ07WI5NE0DQpPJJOZDN1eZfGIIQ7CkMSIQQEKKMQoZZuYYDUg37b74la9+/gtfPjjYkaAhCBn6ZWS4fu3w4OBARJrWhdGLSNQacQGOPox94KbVjB8QMmZkvri4uNZO++U8CPR9r1AqwsEYU0BcGSH7op9QwCkrLiQisCLFFakUh1ZNReX1iagU9NQUWBZ7nSZXFnPZ4zUNF0ZB6irJNrgWRXGMIjEitFvTo5tPXQw9NCaE0LomO2BWfJ4AgdZCn1DFgOzlYeVNQUUw1+dwTHI3jZWUgumtt9557bXXZrPZYrEIIUxn2ypu+/k84+cZss6wiEjw0ZBlZhYQiZGVvwOSBQaOEgOrLyvG2LYgU9HdqKpukdmqYmjkzFqNmRnKkYlhiEHEWtsk7AdhZshpKSTasAERUyir5hg144b1eFWZqDI59RReppsNnWbj11oklHPKTdaZRaoBlcpNV86Rysiun5KsWAnlNGYGMCEEY0AdWdk+c8aYYVgiYgGMzcRtYgyVPmG0GIyrhNtJ0wJAIFIDzhjTtq1IgtYqHdEFJTLHKJ5jjNGPcRxHdTuPIShcA5BWHwlAtBZt44Cw7/seuLFmNpu0bSsiWDoPZk8E0Uo6EpEmOoGgCqSyXrVipB+KjYuprg2u9O3Xa5fns8Rc084nIjK27F51MkPutWCQtABJe8c2TdO5xllH2UEdJCFUZCPKsBbRxhRuICISkASBbqDiR4mw13XE9CsUHXHlsGEuqFUCFd4Z5QIBa63COOqrWdtE9FzmKqf45lk1AECmSr5bgeuSZqdzqo8SyaXPtIaGtrqw7JpMnJR3be0DkFKppGpBjeiO6zGFwhNXW4y0/mwFxWWMiTwaa9uuG4eBhzEK+8jLoT95fPbw5HT0/myxOD+fBwYkG2JgGVfzn+uMGXQzrhSgSr9Juv6jR48MyjiOr7/x1qNHD4yl60fHW7tnz975wPd///f/d3/9by0X/enpeSRYLoZh8MbanZ226zrXTkCo7+dN0xBB0zTL5RBjNE1jrSWhKJ5FkMGQ03xARoghkHECEBlijJPp1DoaxtEJ9/3oAxwf7Tz//POz2azv+8Vi4b0HYBSWGGP0JqOpK9Yqx2icYTJgCI3r/Wgbp50KQYS0XX3kyAIEvKZuMa47aQAKDLnIJf24HIV9rU/m5jmXvxS57G/OP111cwBtiZaflYu8CHM1lPJV58boD566JoYEOAhrEjivaxjF7wKQQZDWY7cW1mRMcdUm3pr2QKyzqAv1r4r5EPE8nouI9wl4Pca5Ep+PzLxUKrTWttbVWiFUSJBq0pUaf8yBw2EY5heLJCok8Y5KNiTzy3uJUaw1TesQcVj2SuipYyhHAC1cTzPOzBGEDBEgc0A0qsIWNgbrId7LmsqGrN34UP6saWXjnMt3ftKZqxMQ0BBV7SXqD7UeB5XXVATVOVyJWwNVqk4xpJTrl+GVERpymQAYADjJHTJEhFYYQxJjSLYxxmijGxZGY5i5H8dEyO1UBPzovY/9OAzDEAIDgIYOY4yCACERKyJGgZgcL65tu+mkbRoXomhL+SAskY0xgCxCxhikytWZUoV9jFzmeWPeDCJIhFyZpvKAhQ2VJJ2KL8Cqt29elyI/MkK1rBXD6KHqSNu2KKAeIM3NaYy1joxFShiZLKztGaxmH0jxXrpGvXwp22iFf77KC2NmoGjJqQOZV2DIK40tLVulVqr1JoKENmZguPQNWRFGjCqcEpREPoEo9dVh1nToTdVTNakyUcYki5CZER0Bc5UcpyPN67IqdC4b8ErVs7y75p2okV3fs+yyy1uJsr/UGisiDIKGhEvvIGCECLJcLh89ejRfDvPFIop4Hxf9yILGQYzR2PSIwKBIgiIiKVgYlaOuOAALWGAfIhEb6hp77/6Df/Wvfu3wYGdnZ+v+0f3trZ35RX/9qZuz2ezrX/tm03Rny3ME04+DiDx89Gjv4Gj58PHJycn2zkxn7+TkZGtrR7u+2tZ2ronSDMPQ+545MmgXXgNoQghd18UYY/QX84u9ve2+772HO3euHx8fX7t2bWdn5+TkZL7wSGJQkDT4xHUJBTMbskICxo5h1Al31o4SqOSHrqL7EnBVlZCZ2xVwQFcy2EKicFVWs1Sa9L/xeJK0ri+/UgYn5QBQCIHVbBNEDMDBwLWnb4ozRIhxBEOBozGuHvx6p0UUwQioQU4QYEHN0U/F6QqGpUPp+z6GlWairxpjrMtLmLmIc90YJ49PNeI1DEPXdYvFoptNC6YoEfWQ9lXbtmX6Qk7VwcpYVIUCDI0hhItzdd+p/CjutTGMMyLtPFqgdkJga61xTUIzA5QEkqg8K8sJJAZAIPVcciqcrUEtLsHBX6V84fpR5qo+U55AQJdp4vLnWiQjonAqgC7krrujMKNKUaNaUwEgESgcllY5rqhmhD6rbSfFTDHGZJcqxRgUNRNN0utVvG1Nt8vrGNeEEATAjx6RgAyk5GcwrtmaTETkbOl94MGHcfTjGAavCbcmISMSYq4gKwtBRIImCJP3gxr01jBHsYgCSFg6JjWtlewJUKpIr2c2J7b8q7l6sq731HGEei0wx3c3vkTEZG0DEFrjLCJqsCbbsrZtW21ib4xxzjhnWpds2jL5nAu0iEhNVc6wX03TCELxTGDGkCKikqAeY2QQkBKCzWrHSrBtciLMDgBmNo0z5CJGyjk+OR06wU6nkh1IdIVEyJxnb0Xq9bTURF1WEwAMoirqNdfDGkknX5KpMamPtnEb4y/vVZLRyp/0hA6n+apU1FTtBWFMHUmAkAXHIVwsFidnF4tF3/c9EEVhIuIoIYQIAlE40X9mnoYIU/fcvDUQIAMlhjh6b61FsD7iO++89/DRg+vHR7t729dPj69fv3F6fvH0rWevHV9/7bXXnnvuBUC8d//+m2/e3dvd+uk/8tOPHp783M/93PHxsQ+DAAzD4JwLYWya5uLiYjKZbO/uhDBGiBRJkERYAiBB27YKeWYMWkez6dbjR+fPPHN05/q14+NjFcznFyf9MIcYDIjnkBN1RTcycwRISpxBYkQQQKC+783uLoXsM0s7GRCJDMUMC19mXkQAUtFj/eVlUVrTqnL+WqSVdS8yq+a3T7pP/aC0kdfJqZbBAmC03APTeSJikJiwF99sT5vt2cIPgQAIrLXIAimNjAGAUvlSsveQJcUFKk+nbZpOt24IofQwEc7XrFRIAyAxMtHKyVNgbhAREJb9UkCG4IlsM5mOIaB1mrwHZftl1Pi+H4kIzBpAFySGKMw8jqMyNRHROsUSpi2Q35IBoepAiOTqCMq1uYipW8gYfNd1HJmQnDPJKSqSkGYpUo4qAQBA8jqsNu1qR63lK615ugRS2zvJnekAEHI0ZN2FIiKApJwHktTJT5RyeqEVRCRAKblXNQlKPntDMNTMWtNq9CfNGjUmRdAhpwhJNUIoBTaETTORVXMhBkCNLrum9d4zRx9j51xk75wbRu+ca9oOEcdxJGO3trYmk9nJyclyWCz6cbHsc+2pPirqU0hMkb56GGcZwMdA1ESDMYoPjGjU4QagidcUw6qhng6ytoestVI5wnDNGl7To2vtZGO7lmuLBVZOwOy0J7REZJxViGZEdM4pMnDTNM4Zidw0ToVxcTYkVQMw5jyUPP2gd0gaJ0IZm8Vi5IF6YuuFK+JEZ8DYNRlPVXKsnqZ83Dk3mUw452nrvsPsCtDgse41QEOGKLtUyhRpTUOe4cw9AEuFYdlNZQ5FsKp8W81zoVvMcZPVU4ALGZe7KX+gHPYuF9abVKSUYNHaXqv4hsIEkG0s0Dws5svFYtGP4xglCIIPIcbYdV0fxuVi8Bx7DivFQrtBVN26Ms1IMYUjR0SMMfYSQwjG4jR2IvL49MSHwILXjq6/Gd8koqOjIwB45533nn322f/0P/1fPHXz1uHh8d/87/+2gNjGLYeFc24Yg+KcD0P/wQ9+8Nq1a+++8c7g+/nyYrFcoDWFV4z9gIgIiDE0lk4fX3zvd3/k4x//2OmDuzHG4JdAOGlbDv709FSjy4hQqsMDw+hjZLDWFjvfOee67vT87MbWFhGhNUCp/CEyEwkLbdi6l0VjDdgAG7zxqtjfBoWUM/WrK/fslRfW3xRS2aAHADCAEQEIGQQQTCbS5Tg8dXhTnBEIIQTbatQpjTx3FlfvlFHfkHZdgHVDy56enpZNmzRchnoo6pstjIYzzwdgqdKMNfLnnNNmKbp7DTmyOS7CzBwZVk4nxgyKDwWJ0MYwKjXH3K+bsjtRlUrdWqVUMYSgTjnN6moyXw4+giChiRx93r0xsslWkXOt6cwwDGH0WrRCmnKLSGswX5t2ajlq7rnBwespLuZIWVd+crMXuRTKrXkW5shi/Q2XjmB1dDNTVWQhIkDRjFUG0bwMIBAkBpGsKUTWVuVJwCf7CVMeQfCxGjkSkXPOOEttqxFZ670xZmLdwcGBIZeYNcflcqkh2HnfPz6/WGhPu5BwHrQ1ukQkopIQVuakxEGYXdO0hhoGGkcOYRDGGMWAIAhRFIkGKQYFFU8amxKSThERFdlQT3i9UmnzrPseih6Ttqg2c0KmyqObHoEWcE0qEFHXdV2nzQTJIIElhZl0K3ONtWgDQC0DKAMmImMbk+E4SkXchoEOlVogOX1BBVJKZiRXjDyoEgY5leus8gfbtu373ntyzumG0twLGT2RdS7llqv2loXZSvri2n4xuNL3RYV9wQXR6WRmIgsrmwaUdMtCFJmq+8IAph5wUGI3qzJfZtbWw2WnhBCs1fa0VKdbUwVjV26u9GZMy9HHKMAIQCKpW3CIASDp165BMU4kttheXCwAILAAxxITv8zimVPDKJY0WkOEhliQBS8Wiwjt/UePAQjRXCyWbdO++uqrP//z//yZl57/8/+7/+3NG0+/++57IvLgwQNllcr0ACDGOJlMFovxox/9qLX2F37hF7vOOefaaatSn1mUSUqMztDp+bC/0/3wpz/58U+8QgbeW56XBHJjMcSROTRNO/QjIwROrZaCxOTDxGQqxBiZwDnrvbe2GXNyqzJELvLskijM2yr7tXMUoxa6m9dUro7SImlN9F51/rf5dW2bZ5K7PEgAUIMg6iUiiEQCg0RG2D08EEvWNH5gR0ZbT1POty9j1kTgRMYqQzUwDAAIVi3UNClVBwIRASC8otlT3Hj/2gIYx7EAIzSuU9rd4HG8XkErIpACkFB4WmEKukuh0oyKyNHj9PQ0/ZrzMn3UBjUcQRhBCMMQOCdP+jEU69nkqg8Ug7gKL4mkLn2Y6vTTQZX3o7xCEcOSTcYiSq9Y7HW6QVwX9VfR34acwAzCUIRx4SDF4MuzuvKHa3i1BAUBQPvcxYwvRpgSp02FRKb4DxwFAV2TEoWMMdpsQB/atNPZbIaIk9l0GAZE03UdEXnvl8MwXy7m82UIYblcnpyc9H3vgwTPHFNbQhHUZGxZ12dRzRPmfmBrrWVczPuL8wVIbJpm2rXnpxfWUtfYprHOGuecdRhjLJ3qpUJgWN1zfSaJNId3Te0tZCabbFTpUKEZDeQ4JeVmtNY4FY3GNfpo51zXNZPJRE+zZIiobdts++qjCVjAFAJIXFVJ0RA0TRPCiGhcxo5QdsnZgi9bBhROj5IapzLSh9FyivcjoiFiEFulC1hrp9OpKjq2TXa5AaPjTyQtoJnPTdOU9OkNJ4G2ZBDJ7SwRU8m4CGQNfOW5eQIDpcpsrVcNAEiADAXJbSjrq4gkwZatHN2FAErYpa4OwKoLeBSxWdJb6zyzSEQg7cdcqrRZgnqrfPCRGYDJ4HRrpr4E5hTaZAABhe+Hy2EsaywzW5d6SorIOI5NawVosVwux+HR6cl1186mWzdu3BjH/oUXXvjQhz40DsFae3Ex/8KXvoiIi8Ui5lbNzrnFYmEs/eIv/dI4jrZBsqQFQj6MCMY5S2j9MEjkZT9/6dnb3/nJj+3udNuT7uTxQ4vQOTuEiEQhjDHGtm3n8yUYyqIOEbWHMhrbiIAxBoWJwHNARBAYY1A3j5pqpVabn4DCkRmdRknXw736Z8UJlVquvM/GsaFb/9sfGxxg7TdOCxlBNJDFzGMYj29d3z7YG2OQSl0WQgxJ/CbmAKAoqvUwQQu31AVdv3mOcK1YsP5UyoQ2hJ+eqwJoHEdKPVgUI4nHOMQYrSGoj3wtMyfmxwiU060RrLqF6zglaAGUmNzqmUGMNcwcmVFS1YFORN8PAKglfeM4Ztc05V0HgUNrDUfxY1hhKROimAKry8ywsibraC7W87OhWOjhzKr74ZUCdbUOaqnw5jkbOxYqeipPKVm7JfxZbNYN7X6D6VCVwFIyk5mZcjPto8MDABjHEQi3traaptFqCw1ShMBj8EPfl/7qOIR+9OM43rx5c7FYGGPefvfd8/NzRf8ZhvH0/EwdFSdnZ4gYomg3+DF4Zja5zHTtrSszKNFS8DCOzIwAzIqcQMwKy8EJoSyGvu8n07a2TaWqRLo8/3kxr3BFqh+lnFaynYuaT9oBqVT3Zn8fERlrbW6o0DS267rkjjZWc6Gdc0RpyYqZrvdX8IqVV4NZRJbL5WQyq85J6CiYvV6Iq7cREWHR1RzHsYjJ5NIILCImK9YirEggWqmvsepxHInJ2p6ISiNCSfplsFW/h1pGMqDGnct4KNd8g6wykEWSwkVExrSFVjcOyhW9mI31NYGNa3sk682ImOJTms9ljFENvmgkxVYuG0EnUdcuZewbhw0BRw5xNpttbW3NZrNFvxzGMcaIJAaMcZYMBmYDa7AEXM0MigCk7hcEmmmDggRgFCw6MIMh4OjQ1VEDdV3cvn37Z37mZ/7FZ/+nX/qlX/r4x151zn3wgx/8mZ/5mb/4F/+i9jVaLBbOub4fdna2bz3/zJe//NXptJtuzUSUBwsRCcOy78fBH+7tb8+mYex/9Ec+g+JBAgS/OD+nlC0I0+n09Oy87xdN056enkNu/xyYKfuiNSmkQRM5NI0VJBtZxvj48eMXnrvTdd3YL2KMWFdaXuXdzb/C2pIioqRe9084XzbOL6fVxJ837NVmdPleLnkp4Co+zCxAVCCgEVHTMg6ODq1z8ziatN1TaZ/TEqHcZz2/e3kFTNs62/0WTaP4+TqaKKmc2biGmQVABFBLCABE2BinFJZtd43iAKIG6jnlgIigRGesQCzSmojAoD4YbXZhqedM+WDwsWJIwpyVKYgKPgLIIhxigssQIOf60Rtj9AWHYdDhTSYT9Z5phDILVtEE3RHIs3hWexpiDCIiPjrnjCF1WVlHgOjQaoI1ql0iKeAN6itEEmE/RjWmiUhAJ020t55EFpFkZ6/AGVLWJYOmD4PCU6hygVSFbBMeMhtLxhoAkGg1HhCDLC4GkV6fO5s1ZDoAjMJIRp1xzmoFqnTdtKPZMAy9H4XRNY1pOkLjrG3bhgTazh0dHezs7KAFY8zh4eFsMjk7PX/33fcfPz5FMcMwLAZmhkeP5gwxRL9czgFgHKIGOE8enwJACGGMQUTato1RxnEcfUY+Atcve2Nw7AfvvSEyJtVh29wxt+yryiEVOTKHokpT7Md+8LPZrEHgkWnoAcU5N5t0k8kESGzjYoxxHAVIGFkEjLLpXI1ndAGj5O5nG4oOpNLelX+SiKytHb9iCLTECBGFMNXCOTLktrZmerfZpLOua5vWaJUdogFstdECCwo11jCv8JKMazBijNG1beLRng15ay2iAEcRIxij2nmIhkxgTasmEVYMQsnpSH0/OmvaZhsROaQIQmudzcRmEtAlEqBYGjnEMM5mM/VYIBhnnTQYQiCLDTTeDxJtMGiEIghZ4wk4QkSMMXXIoJVzeOVjKKKUiGIMmsekHSyJbPSsZb4ojAgirLpN2zQxRjDZYY6YE0FAmNBm5zmh52hNoxJPWYkWzVlrR79UyapjgCoklKxkAEOk5VkE4KW3hhBhHAO0NDHbs9HvLpfj/ftOUMbRimBg771hSyIBwRpqTaPhEkREYwMCgikqVLHIY4ygdaKCKOjQmIAT18HAZGS61RmmCTnxfvtgOmnaH//RHx6o/RN/4n/+B/7A7//EJz5x584dY+Un/tC/94/+0T9q2xZJYmQOsLez+8zTNy/OT+/duzdGi1GcwMy2w2Ieot+zsw889xIYMo7Eih+Wk6kFi9LK6XiCbuKZp9MpsqcQYx9GJg7Ode18OI04glXuyDM3FTYNhwjeta0QYpCG7ID9eRxt15rArSAZA4QBwINwY02MUnzRjLmd+spIYExdc0CdKFE3JAAmONvMGBNzkHw+V3AFABoRXRmNZUdr55QivS/r+peFcdI1FWNbJIIEQh/ZogEfW+NYxO7M5lbAteMwzshCHwNHsqWpSZa4mnkAbIyBbNkjQsqrMGSxQhioFYwQEu5jicJyxomNVZZBbQ4m1UNV4azImKrStH7z8tCNN69duGU8vN7Wqr5EIQPrGdRhlBoqqLTpYtlojFBDI/qscRyJNGhku651ziFJjBGtWdWwqn6affrMzCgGTfYoErMECZpGqQ5bMBTDyver86CR10SCVQZyKZtUHaJEMcs3iEjkmNlHGf2or6N+/hjj9vY2ORd9qrtl5n4cuqZlFtc2xpgIMnW2cV3giGhu37499kPTNIeH+9F7AD46Ojg4Pjw/Pz86OHj++efffee909Pz9967u1wMQwgc4ezszHs/hiFyWCx6AI4BxnEchsEYQ86q81nHI4JerWZmEfFeMztoYwU3CONKzXTjkJRhAM65xjUmBVYb55xzKfjHEQjFZHRlqHamCkIW3ujgXYhnQ7nWzzGXnmfzbnWAIQ2EO+e0VggzlFjKgTZpHVFSrlCSOKJV9TnHitWsXcURIvsYLRVhI6s0OqxiEIXe1NmgrfFK2lQh+HJysbdYW/dlYaxUrbdqmibFeiXk0F7q4yRIiMgIJuheR03YEQRjV1USRGSyiyMlWhsj4op9r56C0ZBVNRyiQUpYXSQWyfNqzJmrUclHhTIsIp38ZBuwgo3wcrlUTMcNAijcH7N5vfITjIFcS7ksGxG3trZu3rzZNM2Dh4+0FaBr0GRsZwGOMWVfmKp2HBFFACKUWm09PHsiwxytNSTiGock1prd3d3t7e39/d3Dw8PpdArAOzs7t2/ffvbO2yDw2c9+9u/+3b8PCN/5nR/f2dnZ29vT0CERta158823b926dXR09MYb70yta12zPD0f4vx4tvvy8x8/2tv33ocY+zicXjw+ffyIzO7E2OXF3CKJIUOmMTaEcQweM16KzuswHyfTbhiWjWkhAgGaxjkiH1gdPRKDc82iXyo9pKkusx032f6Ve1lkjduvtmT+jDlUl+XUFayjZhn/Njzk8jDqP7MMBc2R4BgNkrD4GJj5+Knj2Ww2qJTUVEHVM0VM5tir+6y8TSt9opygkDqy8ldXbKg4M9fcbpma+SqYDqh4q2IaJM/Lej1l/S9UPG5jOsqI1VG0Jubz3XTvacKIVF3qajdsLYkXi75kSKqcMDkzhDkiovdeIxkKOuNHrcqwCMwECIYMSK4PY2ayhIQMSTfRBDgWQFYcAlSLmDCFVIgIGGJMDdqEtJ4SlSUSETMwJ4Aba/PkMDAyIqp9GatD+WbQ9qWaJkBQ2CWgAZIQmIHato0x7h8cMPMw+Pl8fnRweHJyYozZ29nyfjg/P+8m7umbN7qus5aOjg9ffPH509PTt95652h///W33prP52Pwi8VCJAppklRERPCgHFDttmEYxjHotik1JLoEpVFPvd8u773LdJX/TF/2fR88juM4adxk2iFiL+y9b0xnjDHkwEaogqmFMwKoPVy2WLWHq01b6K0oQJB9v9kgtkXEYkJ5lKZpDDnNWy5ezbZpjDHqiJboK8EZJTfc0/cXbSlhCKt+R5E9QdL/SLTZ7apWG7IuWLg/5paCzKx1+TbHbnJP+LU06bSp861EhAw0rZ3ECQCQARpAC5yImkgQomXFogqEiGKFBWOMMZoojLnsBBGNSbB3qv7rXgOQEjR1zlnrQNOvIrMIJeB3EhHGFejVapyU+1VobiaIEQEhJFRAGE3VbFL02teMq17umgAgl2yVb4qzGhHNtmm7rmnbyXQ6m83Oz8/PF/NxHAEZ0KT8waQipzk0xnnvEamkSpUJx0jWmhilaayITGdN0zSzSbe7t3N0dLC7t91Nmq3t6fXr159++umt3a3XXvs6AOzubI3D0rXN5z73myKwvT0JqU9i104nw+PTk5OzO3fubG+/xj5cnJ9c3z/8nk9817XdgziMi4v5GKIxYKNM2mY2mWzPppOt9vHj+yLSWOca65z1fhiGQQgDR1LMwSF0XQeRu6brz/zOZCt6AAgCxMgchQgFCI2MQ9QaxdQFFZEEMXVUA7gkETfYu6wfcOm4/H3NOuCSQr/+iKtjw4mvwro8r08jRAGyNI59tzVZDL0wttPJ45OTjzz7Ids2S+aSjFm8d2U8+W0QL5Ue1c+yimpWhpu14xU1F5M374G1F5b6OdUHqMRq/efGhfWgIdsZG3sDAACoZGIrp6vdlWXWlPWoWNXMl2L4FmaaszENV4UNeqb3wVrSyGV5kA+jtbYhxJxuo09RwC5kzf1acTTn1FHJLOIjYyoix5hj8+qxDywhsnb00+hYjGUOEz6RtVYrNzBjRIiIrggzKwK2yrmu6wBw0S/H4BvrmqZR6KuUhsMSlsN0irPp1jAMbdvu7Oy8/fa7jx8/vnZ03Lbu0eMHT117aW/vxsNHD6ylGzeu6yscHu4S3bn/4N7du3eb1no/bu/M7t57AABDiFpRLVUG6TAMyv5iFO8HrEHE0upLCLGmjVr3uiwIRQpqZjkh3S1Gb01rjAnCi/kyhLC1tdW29vT0vGkaTQkBAEKwGzbZOvD4Zdagh8bhIGufUil/tfQtNq56SmfTbQ39lpBEjOBcggWOMaIIIlprFcokP50ZElKuEJoqyz2fsyIwARCJAAYgNcF11jELYMoY11HpS8XodS9DTilXMqvridWo9d6D90UkM7OKYcxgIJpHEg2qMS25sEtEAFPRMDPHuIKBVE1lFRpndtaKiDFeWbxzLhuxinxhgTRuqlsSi+nIIpBr3zHD2arvwZrGWjsMA+RQbko3M8Y5p98Xo6RoP5cZVP7e5Y3slF2EEAwJEXVdt7u7e35+/vDk8fn5xXK59N6PmUOmaRHRObStZl+brA+pJCBoEzIzERKZSdtsbc/293efunFtf393Z2v76Ojo2rVr16499dRTT/XL8XOf+9zOzvTs7MwY0/f9wcGuMWa5GJxtdcL7vnfOvPfeezdv3rx9+/Y3vvJbf/ynf/bZp58hz+cPHz9ePJi5FiM/vjgZwzDbnm5Np421ljCGsbGmdYphRcwwDAOCYQ5gLIMYY7wP4+gxwMR14KUxzcAh+sG0nfcewFhnJXrTOHIWEEsEFxFJNEC5GeOXHP5cbfknCKdaPJdvNkTDiodU8FVrD0KgJ5iI3/7Qp3pm51wcvANyzo4cZwfb7e7WGEMkMM4YIs+RRHnLWvenwq/gKlajn5O7TPetCt0itNQzxlxAWbWtZhRhkZjS/RJ7vIKBYnbvwLr0vTytG1N2eabqN+Fckp95YvHwqFHFMQpzFOkr/6263xmANfNwGIZSv6RPUatlHPthGJTDqpPHGROjeK+IAakpVYyiyVnqlYYYMTvPgzACSqk/Vk+7AOb2mDEyANRVqgWjSi2rGDkEVvaULBIyGoBUCzsK++C1KFQIfQixXxKRRQomDjSY3jTW6eVbW9s20un5qY/h9rN3FovFvYcPDg8PX3jhubfeeut33vjWSy8+f/ro8cOHD9uueemF57d3Zl3rRCRGD9IcHux9x3d89K233nr3nbvTaRdjwhk2bLz3xtgQRl5lyoAKYFwlsUu93CUkVmtda7voyd6qfKuImErJsxVlpm2n2tLgQ4s2BvBjAEHXWLIWMYOBW02I0YGpKFxBbhVKKA7SDU5RJFYRw/UBRNY0mmCltJTtYKfbylrLYdSUB+ZAK+8pYeJchIhtjncAgCYx5QemNH+QVb+Bsr8A1IKMnmMDHWSU1pIYrLNHRIrYWmabM26tpjeHEASx6To/n0ulpxIBWWsMstWwvQqbHKlHBEgoWsxrog4rt5lqK3q+cZaDZhulDCmFpTcoRSmps/oxJcVzjOotV0UWJUKElCJanCvMHEXcOuOT7AMDgJJvtXHomDWUq+uY58ob22i22vb29t7hwXK5nM/ni8WiHwfvfZ/TEtUA0PTP7KJSgFV17BsfvG3c1mzmXLu/v7+zs3Xjxo1r14+Ojg6Ojw53d3d3d3e3ZtvT6XR///Ab3/jGb/zGlz70oRfv3bsnIlYTo/yKsXjvQ2DnmkePTl77xrdCCLd2j+/cuNWBOT052Wu3mj28f/++YZg1Xdva6e60aRpE4RDYh2nXIWFjHZB478cYGKz2BuUYh8G3rkGU6XRytHu0PPPzs4VpKSYw97SmEsa2bcmYIJwqqRmF2QACIlcMX13Ul/k/rs9/zQdqHlL9tHkaXLrD+glrGvaGwo3VhZfldwAxZMI4OmOI6KI/v3bjaXHGaycaQ0QkMWZFonbxUi10nyQibZFkhWUoGeX6S4pRtACj5puFDTEzovbAMYU9lWdcqbAUXnB50ms5vTEdhVfW61czoJpRIqLK0cLgNqZVv1QOrqxKt1bfu9PTx4mbD6MxxpENPsagLWnTzZU1cOqzjQAACvyEyDGIcAiRmZumsWRjBkzXPROqUDoiGgPFqWAUIJPZxyiCDBQFBRgRhExkDizLYSSiKBDCWOZQ330EdoQGTBxH7TvWtu3i/nh0dHR4cPTWu29/643XX375ZSG89/DB0f7BnTt37r7/7sXFRdu6Gzefun79+OBwr3VWIjetG4RD8G1Lx0d7164djeO4HIdvvvaGc2ZQt7MQkiXS9BZQkBBVlDCL5EItUGHZXCllL5P+6nNuGlHTiYiQMc65prHF0afSAIA0jV1SlxgBACR1zq+pBapUQWWuYZWBXAw4zGaTMQYzEMpaiNdaYxxmJ3Chz5TlZC0iNo0NY3ZIBo858FHtgss6slp7VvdmNohZYojZ8xkjaAGC/tkYixJZUOlNEZLrPY7ExpSNnDoc2IxWLTmOoF5iAOAwihCiBY7W2khkrSHCEpYSSWo4EYhYAaqXqdZpysoaYxAtExtjrCjAPRbfBnDIJB1KmR5ljPGmadVkN8bE7BqBnGsiOVSviPF+GEzVoxAqxldLgpqf1GOGbG0rPVgHrjHdpJnKVETGcVQdXRlCEbolWpE1Ei5ajrW2mbhJ23VdBwC7u7vGmIODvYODg27S7u3tTCYT51pEdE2Dzr797jsGgShxKkSMws62KuZFEIAmk3Ycx8Y1d+/eXSwXr954bnFy9ujsYoLN/s1rNsiy6Xgcjq8dcgNmim3rppNmOVxw9NN2ykRkIEZW6MMggRGIRIIoHY3L8OKtWxM3a3l5uLX/YPmo92M/+MY6BJQQYwjUNWBImAMLQOprpFWlxU2ywYTXBdJqV9aiF6pv1oRZVTmyJt4A6qyO8rgIYq6Cg15d9aSfRMhQCMGRQcBx7IPBZm9bOofIxpDRFk9ERCQAEiJYFEFmKMhol1527VgLJhX0PsnKr16m8kPftCjFhS5jAnsTRFFl97Lc3ZiUS6wnfeB1fWRjLhBNSY8t816SxYqaH7P1WT4X3qpCN3NMoyq/xo+1V3Hbtnt7B/oUlc2MEHKxrIhof1AdmIKCpigekQAyCwuIQAic27AQs4QoajiuNJtMXI6hMEfF3gqBVekpHkWENFTd4SokhmEAQoVessYKQIgjIlprI+Dgdcmw7XYenZ5cv378/PMvvP32W8a8dnx8PAzL999/9+bNmx//+MdPTh5tz6aHh/uHB3tEgAQscRwjAnAcln0gtB/9yMuTyeTRyWnfL2bbOzxfRkQlGEMacYeY+U7hg2WHlOXeUDPLn4ibcnG19JfoCFEAxFrLIuM4IopzDiElVPsITeOstTEKQAsA1pnW2ci+uoOiKLLm0q9/v8Jw2LBxlWZi9MYYzX92+TDGKbSqntN1nWSXUiqj5MABYoxt2yoiSE3DQGU3rbUxlgTNjcZqp9SkOJqU9UFoaBzHDD0hpfMVAWjAVS3yElcCgGLYcUr0WxXf65hVXE0mE91cbDThaORA1loy6HL/MZ0TBb8sLugQk3FWThARbQsfYySDxpjStCCXDClXZQAtZzcALCHYLMZEJKU8Zv9cLSxNASpZmQSpO6GeRjkurusYc1FlUQtqJkOlHVNVuac7V3WgKEIixpjpdLq1teXDoEWPJdFa9Z7i6YEKgxMRnTNd1ym/0jCZtXZ3b2d/f1ffZWtrZmwDgODcwwePE6MRnk6ni8Wi67rgmXKmCGS9dtJ2Iti4bm9r9tpXf/v3/+7fe23v6P033qXJNu/7O88+03M/2hiMH2VgDkO/MMZYawCtRRrjOO+HwDFEDchDCKHrJjIG8fz+u3df/cirN3afev/du0c7B4u+X5hlJOj7niP0i2F2fGSthRCBkIAIUJPcrxQ5Za9tbPwNAbxx5pUfNmQEQK2ob8rguhLx8h14PSJZjpHjtGniondNM0Rud2a7Tx1Hg6JsigUIFS9BEuB8BdpTveCT5iGVx+B6sR0ilkLPBIWTZZtSUnlAlr4rfUTvVPq61yReyHFDSCMmALN67mqWXU1QcrKXzWZy9xVeQdcSrbtApepJUl5HgfdijH3f932v6RVqtmrqStdNbOp7KsxRsSw8AFHaUbiqd0yPKwCwIfIw+hA5Rt0t7H0Qgcg8jl6yNRxBtN0GIjrQlHkJHKMwIvrIDEjWCEIIPAyembuuG4Pvx2H00RgjlDQMffQoMfBcEf8Z8OxiPgM3mUy++a3XP/yRDz733HPzxflyOd/d3YXILGF7e3bjxrWuscJhMmmZI/tgLUmI5GzkQBAZ4mw23d/fPdjb3d6ZXcwvRAiAWYIhkxkN6/xns2wTgLcIFayKki9vg43VB4D1jbM6VBERAO/jYrGAyNrXz6JhjhcXc2OIQabiBCwRGFQJp/4PCzmb9jILwBybKZZQYe6YgSFVADeN+pxbk/ElCKDw37zxBFFMJhIiIMCgDY6ydlwEVQjaFCTNGOfUd4PkYzAm2mxzIyIIEpIKfoVVYWaFwDTGEIBI5NQzL6VEQQKHsikomwuRnbMxRjKgqp5kVCxmNs4hCoBl7cTO2HQtCpTWoupYjCEJ4NEnARxjqk+TjHijqfIlVTvG2DTOGlfMREAxGcBcJSURQQi6QYhBRCw5Q4bQAiGU9Agi5WMlY0v1JzXlRfOTcxpXHdovZHmZAnUVSv6atZYIwKARAMCU126gsemcpm1KzKjW5JTj2dIemHDWTWKMmqZARCF6ZAEAJScyjsiga2GMX//6a7o/nHPz+bxt2+VyaWj1FABQyF6Fo9rZ2fnSN770X/7Zv/Dic8+f3H0IIshysL27DEPTNUyD61oSEeIQowKMWEuAGIN471lwDAEpoa/E4Ofni9s3n/nM933mrddef/vtN566dmPWtg2a7clsMQ4P+1HInA/j/s6utRYlRJPS/ZGFBK+c3nqXpQ/rTohyJtbm2VVOsg0Jl/bpJRmcNrJ6Ktf5jeQsqbXnrtmErHmUMfrA8eatm2bSRofRB8dSOtcyM4g0zgXh+vIiWDcmoUg3W9ILKcFopNzgckbJa4AcOykvnJXo5BmmVc4IFhFbJ7XXw+JcC8/ZWkpUW42vWANFpgIAom6JFZwNXcpTjRk4Wp/IORxbdnWtpOtO0O/7vldTFRGHYej7vkiUusmoMUYkGGMMoBCTQPBBWQBETXVJaWuL5VK3X2TmnK/BCOyTLRvB6D63NoVIx9HHGJs2KdExyjD4GOMw+HEc0QVhJLRNg8W+VwsjJ0ABkmR/XfPo0aPj4+PpdPr2229/5MMvuwYXi8W1o4Pz8/P5+YUIO4POmZ3tnXEcptOpeIVuEIneGsMAo+8bZyad+8AHnnnnnXfOLpYX8+X9+w/dbOo5ep9c+sEvJtOtcRz7vteaHFNVoClhGIvaWWtjv9X7ZENgX0m1UPKosxbY+9EMBhGbySRGD8QxhouLC5HJDDoRmbQNAY0+zCYTXWIA1HJkylnEqneqkMBsS6nAS9ZPjM4l16tzrkhfzKlMxhhnrUF0bVvkAbAqYSGhQ1CpW0NEVMeG7jJmFlwFZWuFVaVm27Yh46JrbpdWkBcK935ANEXEeu+HwZMx1lqNxrmmKfudstNI51zbqDRgQwiuMUhORPwwT470idG0I82lN9pcVygnK0QHNsZo7CpvGVcR3JWSWpQVNcQBoG3btLlQRZ3x3gOTCNfcSV/HUVJurGmUzRhjojAaijHSqiYQ1WbVSR7HkYxxTUMAirKpqyw5GFTUoKJpxVy11Y+jawy5FdIc5FYxTdPEcSjcj7J/Qp9bOJ661pQjlUmA7HIwls7PzyeTVgMZzGKdBbTL5cVvf+3rXWfm87m1VgTG4J1tRZLVNY5jY1PlFQK0rXv06MFPfs8PfuLVjw2LQSL7fljOF8aRm5pFHFxnlrwEAzH6yN5a0zQNAAlgjHHox74fIxpCCMFLZA5xXI7TZvrgvbth6T/9PZ+an1/ce/z+/mxnCN4i4c5eCGOcbVtjWusWywtrLQcfosLcgcCqy4WyALjK9sUK0lgqYQyVoC0coPxUU9dlXlGzlMs8pPyKlVV9+XzJbb/H5WhbOyzD9tE+O+yjB8rqBYLW5VskqPWASyl+ZQDlJ2a2hdnVb6W0Xrin5PQZzJDLUNWzX5qCFdaSao7l13q3p0FUw6KMUbdhc9SkXN1BX2OFUlnbWCqN6jvAuhEGWU3GrHmUc1S1L4fkwkGucnN0inSbaVcJ55y1FGMcYwiRJazkB7PmrBGzjJE1DVhEWIAIfRiNNrcWiCEyhygsCD6yRVIcHWYOMfoYA3NYDLLuqxBMxK6BpWEYgLnUIhvjHjy4d/vO087Zi4uLl1564bd+6ysxxmdu3VosFpZgd3fbWbLWgESO3mYMkBAic8g8BQ8P9oyxw+Dvvv/gm6+/ARKF8ezi/HQcmFkQnXPCQV1/OocbIrasPKzrm3CVR2hjw2z8WW8bREQ0iEabx/XjkEBanA0cLy4WzLyzNevRb00nKms14YiFJ5OJgtqXrGCp6n1V6Cp5qK6jDRUwZ+gUXq8EUKLCAIAiVHJ5rUVM/N0ZlZpKrqJ5uxylxHGj1D37sIRLOO/HenvX01IkXNmelDKQA4AAcILQzEdRl4sBKjlAWxaodRGlSSKfTNd12tdBNQBlEYqGmLTJGE2ubyYC1d51r1lrcyQr7T5RbFckdeEyM5m1sFF+nZUrWEQIyFpLLtU3E5FxVlsbQqkTy6q5jk3nQRer73sOoWg/mCsFislhqp56StjGrjoxl1dQ8dw0DWWLp7ARzAXBOleJ/kER/ZCQiMhaVYNEvfrGGOYUt5pMZ2GM1sDpydkv/uK/vHXrlrroABJeehTmMairU1U37ci5s7X1+NHDVz/5CWvN6cVjDuN00sahDyJN03gJ1NlhHJhiiAHzhChTHAbfjz4woElbdblcNmh87F987sUXbz+3OL5YnJ6fnT4+2Nk9OzvnwV8/OHREQHL37d/pjKMnpBYXUwcq8bnOE1b1vhvi9tt8eeU36XvcNHMvX1Vvn8T5ZQWKuXF/ZnaNGzkcXD+mxvoQwJIET8Y4Q2IopPeCLJXWhi1XSeUyOVYFKud6X6x0kLItdZYkpS9teo8hEWvJh1h73uWnls9cjaPcSo1RrsryNpSIWh4DlHDsKmOiXLUxgHJ5YbJZaRURNqb0zRVeLwosAg8y49AdqDAFekLpfJcyLxQjvsoPjyBlSmMVfCqCX+3XEIIWcqiDKITUli6E4H2Ikb1PgZ8igIEREVvXiMh0Ot3b2wtx5BCZebFYaDT04f0HH/zgy0QQ/fDSC8+fPHp055lnrh8fTqfT2XQKwAia02SQtQZV63eTqo4i1trppHvm6Zuta0Y/7O9uLZfL33mj5zAdQ1wsFq2zY4gc+tTt8pKNKxCZSRGCNuihXrKNnwg3d9PmyflBMUoIbFJOLAAQcxhDqgTd3d6aL/vWNYFZvFfPx3I5L/Nf5GihLsquVKrKylV6aWzVWkuUWLOzLQBYa4phR0SKC6UyW0QsGUCW3AJPR41o1GWt4jaOa+2Y9HH6U2lRXMac7llRfmZ2bAwaQ5qcXM8nc9A2vuqvrvcjIubWNBiZtW2pSkdNaWzb1jUpubrrOk07UtVBR1s8BEVRKJ5eIts0jUQum0s1YCCcTDoyCctWsr86xhhs4BB1O+hPwCyMtm2q/OTk4NXXSeLTgiYMI6K1psj+tIuJWudqgc0ZFavwh2RdAJhcE1wUGszVjDqxpcit1uOLlmNyK5oimDUburoVEpK14L3fbRpDFoAABMjevXv/7r2zV24+c3J2Vqi9OPyUewELACuAyXx+bg29f++dew/uPn74cK/Z2t3dba3r4zDi6JrGtu4snIGB2EdVIAkJkVjMYtkv5mNkRrIQgoQo0RvXbNnZ8x94buK6eTj147g16Zilde7mzZsnZycT24CJwLy7s2MACZLo04bKinuAVCV/ZGQrWOfqV4q9jc9rX16K3D/p5Ccd8m3t5rWDBRDBmn7on711o5l2FxJFgAAtGWMMUzYagUoItn7BJw1MbeuE9lKkC+ekKkSsVJOVKKpxPSBJPsJc6VE/r1AMVCZLoXLO8qMIVL1n0THpUleDPGgsjGbjWbpJIMd664konKtIBRW9pYq34mxrXAmyJ7y+Vn/yIagJwyLBewBwjanfEQiNgPaE4JjcCfq2unt9DAZtDBxiDAmmAIBQWAQhcIwjlwFwjquu81PtVwIDS9/3muYzm0w1SSTG+OabbxuC7e2th4/u3376FWvttaOj46MDlHh0dAQSLy7O9nZ3rM3IGAJFuYbUbM6w9r9jmXbt0eGuwG0RefDgQQz9a8P7h4db77///nzZEwgawzFoV4+yQOnOCKgo9RX91IoOrruDVq+ZP20oUuU0zqGQEIKivBlEh8ZYi4iDD+Pj0+3ZFqXpN9tb0+Vy6f2guMf5ZZPVi4gls68Qj3bZ6rpO4aWKBayletakvpnWNCUhKLsksSQikYEiaJWThhCisLOuyOCijBbdrpA6M0tkFFBMaUREQ6n2MWd11bsJAIxBa1faAyKa7J0GgOQSBzRgTZPiBSmXzfu8oaBEcwySZjKp+SvZFVT2uM5bHnDMWy7rCtaleSBSWJLlctmPHnPOVMzgdHora20YfQgkEo0xKBKN6cexbZ1zTkenyYwbzLTMA2Znvogsl0soQGZEtoLxazkyAAEAAElEQVSCofX+Y1i8gGqLAxVjugTg9a0RsW26UjGli2JyOUmRsvoglchCBhEtJqNcq8JYoraCbVzs+57F2Bi/9a3XAaEfR2Z2rvEx6BprmXhaIMBxGJrGGoMPHj76nu/+5G984fMo8KFnX7Zb+ybi9t4uzM8uzi/spDGO1GDxcbTWGmMJUARjEC2nCkRalCEik8nk7lvvffqTn9qezt75nTfjcphM2tZZLbjRbmZ+WG7tbt1++plp29BaaRCCkIAm21VySDbF0tX7vTDt+rRK6OJVrt36UPF/5bFBKvV9Ll+BiBAiOBOEpTGz/V10FpghRmMsEWmbQuYg2gtsXfo+aYQigkioNMBVtl61gXV/rialfIgxKoPOs7b2RoVhlXHUAv7KoeQBZX12vU6gKKq1a6i+Q81uoGJYsL6i5SmyitZIgaOqA8aIDLBym8RcXK9D4owx1HadMQaQfVAwZywKR63+q+ELV4kcBgEBzIUxJcpeLLB0K17LMhDOWNPrMQx9i4uLi2EYtmYTzc05Pj7+vu97+uzs7L133tyZTRfLi1dufuja9aNhsZzPzyddY61dzi/CbGqMMyDCApBqKpbLJTM3Ge6YhZmDJZl27eHudpDgx8nTt64NA04mk/niXESiQD+OMrLJHl3YyF3MevEGXdViuKzUBuFtfC4uSsml/ukmyAJamC0GVIlEBnr73fduXL82m836fgnArTPGuMViQdmHjOtHTTDM3HXddDq11sboi/TVihFEbFyj9GmrTtX1sJmZo6eMyaU2WSJmScJVSdG5piY2FYdY6aP1dGlLNxFBBkjJj2spJDVFYTbxcaW/MuQMlGEYY0zu34JUhYgqXfSNfPQckj+AmQVWykQx03FtfblWXjWqrQPQVt9d152cnQOAZkR674dhwCr+ahKsm1M/ADOjMaXlqDGGYdXUyObSZwAAQgNkrW1bTYlPHuaCSaLDLtdSToouNi4AxJQWh8LkbGtNDuVCgpoXkcbYevNWAnhVKb7GXW2XSM5Ak9zARAaZeTH03vvu/8fbf8ffkl31gehaa+9d4YRfvjn0vbdvB3VutQICARZBZNkYDBjPGPzx2GSeH8nPHo0ZB2SC8bOxn58BYwQ8j2cMjDHRVrQIEhIKjVpSJ3VON/3iCXWqau+15o9VtU+d8/vdRp43n6mPPq1zz69O1Q5rr7y+yyXCCAzPPPecCNS1B0BBYWZrEkCJsTMRAWRjME+TEIIz8NVf/VWwc+2dP/dvV7/xvz+1cfz45tbu9t5otJ/kqXHEwETgJUhgYywKCDNS4ut6Op0F0TYnrDrBbDZFEQP45BNP7F/bkdqvDAZXR/smzfrDgfd+PB6vrq7OZtPZZNrr9QBAsRh18xFa3P5uBZos8+SuaIjHGTpy4bAAWrpu9v2RDORVfrt0f/ynyqOxL1eOr1Oe1BxQGAFNe4i46YnS+vxgeeRL04xxM6UN25FVpPloyv2wY/DBYhGwyIKHtvu5y3bjN4cnzG1lXtyeSKm2FbR6RV+OdCBboWPsLokraFNwY3x3ibk751oUEYxP6PLc0EUYaK/48GiLIAmLJ6E2lNLcEENKSzvdcL3WbdBuGwCAtdQV+WQ6SI1CAFEMgAhAiwYsi7qI+sOzLFMFuSzL69evTyaTe++6+w2ve6i+/55nnnnKIKaZO765Qcfo2tWrxpi1lSEKh6quOCSJgvwZ7/1sVhXTMviqdi7P8yRJENBZAnaGQCT1AathniVnevmx7d2dYa/PzNOinExqRHSJWezABXETjjwPUeLionY1P5Cd53TFc1e6I6I2zI43VMGHqjbGZGkCIbxy9RoAbKyvIsYcXQtAGsKUFoUtz/Ner6fuBNGWi0miixBC0Fxr59I5jCLMM5O7seSWzInZQ4ec5npDaw1DC2nunANyvtPvL8435i7pn0ILrWxd0l0rRPU+NNnaAhwTYQAECSINxzHokLTDnfde0VTUMaAKR1xhDQZbtEFrtBQnlyD27VanqMy1c/G+iocrBshZPAAba/Jezzk3nk5CEDWCbdvVtBH51DQVdm19VEweBoCm6KUj4aiF47DGAUCaJM7NE6S50xcBEW2nMxi3ZVSRsWDbc9e2ja00qVt3JE1TpQfdS2P07GNUBfTOVgRDVHpAiUTRUAEBwFhKkoTZl76elYVN0iTtF9PxY489ludNV2Z17UjrSIsMJ7BX2LDxaHb7bbe87rUPlDdeeej1rxWSk6dOVKNSK5d2Zwfr62tTnhljpG4PE4tBNGlSjopJUQJQCCI1G4PBVwS4trJijbl+9ZphROErV19ODFVc7c9Gg3yY5nkAvr5z/dZbL2VJithkqnsR4Gizzt0MrWy66bUka6V1wB7mGK8iUw9fi8IPvPBSTfASz1n6pyVTBh8gbJ46gc5UvhYSZwywaL0zEpJWHwUJHKTTzvXIB0I34QlAQw7UMZrnhw26p7q9uOP4jT+JB6D7YmEUgXlHsqPsUeqcAYiJD3o8rNUDqdaG5mZ3w8PxkHffK50GfEcKYGMwBFx8iCgKtMjcfo0YkNjGjFslmtVxrac6psiq1465Qdo6vCZHbgMihlADmKgKkIGl8bdcUqGDlSg5yuC4sNba2WyWJgkQeO97vd5Kf5Akyf7B3t7ezi23nLtwy5nr16/NJtOiKE6eOGENSuAQQq+fI4u1pOaWdihqx09qDBE1aURInoCy1DEjr/SZc2Uoe3t7s2dmU56ladpPkhDY3+S03ew4RU/GYRncXbEl+on/ZEEOEDDUoVKvIxlDaC1aAOYAKBCCbG/vhlCvDAe9flKWhQqqrgqly652Uky86vf76lFAROW8xmgxkhXRBgQNrSKiMKpYANBC2A6YaOutjbCFSp+al9e4KzsJR9JpaRxrDeLEW88NiiCRgkO19zfg0tzVcbuLGR+r7hxoU6CttRLYGWuwyVYra4xOMr1BTVXFsaK2RiDmH2kmc7xfJNgWtrORmuwBmsQoAlRf9Kwq9R7VG2LX4Yqa0n8bQ+Yx98oYBT40xmG32gJN9GlJe8o0e7woCuz4/3HR/4xNgRw7Z7T1nkvTBjYgccYY08ldh9Y3pi6ExgcDhIAIqPmA1hitWVSEFmYGxcTC0IB7o4hXeB8wxvT7eVV5731/NdndufaJT3zizJkze+MxgzCzs6nWlEXGZQiYrSUaDHoHe5MvfcsX33bbreOhe/MXfdFtpy/WocqypK7LWJgwmo2MMVyyMU0deeIcubQoivF4wgG8DwSQYcLM0+n02GB9dXV4bOXEeHt3Mhqvrg7rshhzKCbTSVH085yIzp8/X5fToihWVvPu2RQREpGw4AM78vh3f7J0w+cuaI/gKjdPxer2Xzr826WMMFWSsn5vZWNdDAEzahMXZFRJQJ9Tu+JWWgF2ev4CgFXVGw+lhoe2pjMKIem4drvSBRdjvSD6tGYO3UBeFEXYqYtfMpEXvcEYeYTW7Mbch+4443mIY+jaCl35h4jq4+r8pMnGard/mQLilEWC7k7D1BAUb0HRcDRnqigK26aHyOLVcKgYYep0i4pj03HNNXHUJjFqly/EU0XafLuOioANzEKTd1qWZb/fB4CXXnqJQ33vvXfffsflelZUVTWZjPI81xZ1eZJq/ovaMXVdK/dJ05QQQ6h12RNjE2vRSl2XAcVYyrKEmVfXckQcjUY7O3tlHSrfto5eJBWZR3ReLfehu2vN56MUF+iIokgGHr2IoBNkLIqCiHpZvz8cWiJflYBcjMcAUNf19vZ2mh3P01SbV0onlhlCKIpCA37qb9R8Kw259fv9mBU1v2Cem9NIKdO6c6zVViqR1KV1gcTd9HXtvXbvkaIoXDoHgsDW8I0jjNbbEsPqDked4WU1Iz/vu4eIRGAMxnWL843F64hoyQAZoqBtl7IsSxJblqW0Dd2oLVlmotj6mspOq10/VyygdYA3Fa5EoS6rKjjner2eiEyn07KujTGpS8g6WoS/QNT4sUdE1zj2Q1HWIqJqk2dVXBpckYbRy7wyOIQAYNI01QIBlabGmKqqsizDRc7QCle1R8FaCx3w0SbkL00EQe9PndM4sXR8CcYYQ069hJFUIqNzxhmLbQ26xM56jNDmlpcQws7Ozic/+eidd96mrjJhREPsa41qVbOSiAiFCKuqstYiwe23356mqVlbe+1DD9T7lS/9lRdeLssySeyptRNk0HuPdq66JWSyJC9FiqKYTqchEDMTGGutJ6p9ubW1debMGS4gz3MUnk5GYACRXJqIyNXr1we9rJ8n1tper9c4TUHzPAC5YaVIHaW5c4QXueOyz1b+LIH9f/pqhNpRXm5Y5D/Nf1nQYH9laPO0RrDWBhYRsYYQEebRHDRIhBhAlp7TFUDxLZGJWUdNXA4A1MspqAxinnHXvkOkI267YgM6rmBtiNu8iZp/YEvg0HIf00LZ6Z3c1MwYibUE7X8BwHuvJbl6RFUkq74MJBp20oAut/g71lrlLMYYYQxBw1SUtGUSnhkAjUlEJIQ6Fu+2BmjdfjbRJlYYBwXb8zXXFIiEiKaTma85TVMEoy2JOpbrPCyNRAZA1fDUJTGQM19k8TFzBREBGVqNmki154Z5KrnHBaQWC0zFlTI7MDSZFf2hXd84gch7O9dX+3k/y/tZKhxC8GgEECupyVEQ0U7PmkCLmOjycuy7bihAQIvARGwRMUttWZarPQvebm4OBTxDoMRUPpB1ZTUjixaJmUnaJLugR3PuWhARQ0CGAJqkXBGRpv81ICBQE/mGjuiFjuBpBVKog3hGB45R66HDdFbsT0Zp6hLr8jwP1mxPJjXi6nCwvTvt9/vOOGiTaxAxTXNW81TAoinrYMP8XWli08TapIdtvNNaiygogOCtsdaGVmQCYtMRT4JBMHVTXd1AO0WqUEFNgOwDAyIAWcPMdfDxGEV5qf7bpszJkDEEhAQgornNtiuhSSi1qa5Yk6vsnLWJYAN4p4PJsqwsy/F4rCpXfJ0gsARBFktSzeWTYnwCkCWjXkYi6OV9EFHUCy84xy0X0Xob3XBmBrIuNSJSBY+IYAgCDIdDAJjNZtx2C20dwt45A5hCwxMEMSFH4/HYZTkRZTYXETRW1aAQAvtAWIuIFm8BgDPiMmec7fVXZrWMptWlWy9c3jz2O7/zOysrK2dOnwRfAQfkMnXWew/WeAbnHIukLk2y/ODggBkIUAL38lwkSPBpmqZO2UPGzMxTdUc7g9Y2yEhcV9qjiTkQoXOpRfJkkUBBhFFAIBgQEBTPiFYYfR044MtXbmhxLAdAocQ5riur7XIZCBPSNqYSmH3qEmS4686LKVUmT89ePPfRD37s0U8+Wo6rOy7dngx703IcHLPUEEoKdT9zPnhMXUW+mqbjcU2WPJeMYJNs2BuOr+zVBbzm3GtWMSvDJE3dWr4xSpMbN26kltmjZ05X8okvn73+UpaYy9mt7GtHKRAJ+AAcCD0wGiOVRp6gExud66AiQkdJ2CalC0EAtJYEFWGqVQS5ibo2Zm7DBxbt2hiko9YabKRS+18UYLW+AISQmQOIQhuatqi3zuHG7va5E7dZawpfVCRCaBMLZBT7kAAIDAAIgu9Iv66UjUy+HRpr9YSIzJNlmv+2BnVMelrSFCI7wBiFboGljljIzjvj5y4bPXxpmAQP+aK7qnEMCdd1LSgIDVpImi74OqJNTG0Ol4iQIWi8EHMUAuwknULMmmEOIVibSCeVIN6jJiO1eHIROAw73oKo/GLbn1hZnmIvcJu60l2QloMvRKe6Q5qTUXsntfjyOuXEGUWB1lwhAJhMJpcuXFhbW/Xe9zfWkqRFmYBWUdDNZY7L1Ro6dv5SEdXU9a86ZmutsNWkGPXSp2mKxGVZGsAQyWbBXbyA6Q0ADE1jhDmFkAC3QC7tlLs3SMew7lIvdNQ7HdJkMhmNvEHK8zxLnTF2PB4bBDPsHxwc9Pv9rN8YXkRknLOd3AXbroPWJqVp6pJMl1T5e+OxNDZuemeQCzkQyAvaapwFM2tpLnbygLDNZuIWqDxOf+kJ8UR0l0IXIcsyavGh1AaNyXTR665iWA3E2WwGALF6XkeufmlIG0rO85wZ1CWAEilW6rqWCJ3d+GnnDf70ip6thh+hQRJrrd4fkbySJJnNZnqcgU0w1vqYxc0AkGWZemiiv1qQyrJEBCJyqY35WSBCRD4UarL3hwOT9j72rnd/9qmnv+O7v+dbvuVbfu3Xfu3RRx+97+67EXxVcl1Vg8FKURXWWgUbAWBgSawzBhVbxjOjQOAAZem9sdbmwx4AkAbCEY1p+BURSOsJiPF7ImpaNJJBREIBIKPHEFiQRLD2vL+//+STT8a1ihH7uOPRT5alLk3d3t4eIZw+fTpNUwMhT7Jer/fYY49dPHtpWs5mfnb8+EblZxiBGUzTLgURa5bxeMygDa9IREajESISgG5KAEEiJMqyDK0JvtbD4qvaGBNCfePGblEUfdeUYy2xdfwc/M+vfi2Ip5s/Qal26YaGRJce2H7DrVIALZs12OBYR1Dk6aw4duLEyupqLA5ClSaHBqJqgVn+ev7XeISpDdwws1UQbVlkcnIokbh7zrHj8o2/i+cNOrJkaSGgw0eWFlS6RDbP4Gji0MpPI3tSDqKdwtA0HNwYQ2SlTSiVFgNERLSAR9mcD1XkldjaT4hWpIbONscbQtuWNW5eXJ9onVMbRmJm7JQtxZk2N7fBWn2CpmlohaUuIMu81V13QbhzmcUk27gFIYSYHcptIDPP81OntlZWVkRECzqn02mer/tQ2xZjL4SgqB3YUkH0diLOsVY4BO3d3lWGAAAwtbNSgKhBXCFoUIuthjuFMWq/LeWTdADb9CQ02e8K0quhEqWiNua1TCTtFXWymAClqoBzDgidcyEQCk8mkxCylWG/rnnvYD/LsmE/mUwmg3ytrINzLkkyRARmp54VAGsb6asiR3WaJYZChHG/GgnUStMmu6pigXmvOtPCIcXQgzbJio/lpk29bXh0x6hdOMwdHE1jFkpvsU0p0i/NYl69IItABGdOkkQbZ2V5GkLQVCwiQmpgtDk0zKJ9UVuw0MJoEzXFao2ArypQtwBhXIo4i6i66UOsSVyaqeuV2iLgtgK+ArbM7Ni0rEYLmgmAqqpSraipRwKFgZtnROp8idCA8TWjIWvtaHv7J3/yJ8sa/uvvf/AnfuInvuO7vudPPvyhj3/sT45trp05dbKqZjVznucamIilRN77eEhDCAJANhFNb6FGsjawZawJIrr7cwCvdgsYDKWuTc7ScD8LIgAKAvq2jUTtw6OPPpompJUIcTpzmidBQWvtdDpdXeldu77/li98rZYd1nVtUE6cOmmc3T3YTdO0rIqt42tlPWNkYTGuGZgEIMLJZHJjd6euPAiJoEFbl3VRFBdPXxgOh5NZwXUNgWd1Za1NsjSpuJjNgMg5F7huFFPnDknellARsUXwP3zDkeZvh8iXv4G2MIekhZZcTDxa+ufCb1/1QhZpS5YjJwSAoq7OnTye5FklrAPS3AUNDiu/Czd5dlcOxs/RZFJX7hxIGRZFZpxP/NAVSNDRuOMTo0iDQzJ4STYfHmiXwzbSSwTadBVsWt7OXWHUwqsG8czsWduuLSRkmTb3QcTHQF1VNyZdNOK74+yyeGwq7oPcfP/iE+KooJMl0X0FaeFYC6uEiNba8XictNCAzCwwN4ij5A7tFQVtfHh3XzDiMRFMp9PZbHbq1Knjx4+fPn1ybW21l+XW2rX1FUvGe5/lGaKIvlQLlMNcIrbT12K1Nlcc0fvGuOxqGN6DNOk/hojqwMF7RGOQg4BK3/kCHpWwINzUCwJoY7tGx1fL2XYwmbvMaEmli/9UdqB5vCEETQIUEUNUlmVhqdfrVbNid38vz3NBM55OBoOB9h4oy3LQy/Tnti32VVvL2oTIBsEkdv5ocR6wUzYTdbUw71BiW61ynhJIHfBL0QYMKthagykK4JiRt3R2OgQ8tzWpE7UpikKNYNOpJwaAJE20zIzbyKUW/6QtdqaaxR1NjojmeXmIoKptYN++tLHyyRgkMq1+6UMdf0VIsbYi6vGw4GuRfr+vvXsVKtIZqy4qIxgFsO6/5jzmWb8l/CYLPRJn0wAqsDEmHwz390aqnd+4cYMD3H//vR/4g99/45s+7yd/8ie/57u++7Y77/j997/vyWeev3D+rE1Q/GxlZaUbJhgOh/ohz3NmJmpczCp02QetODLGMCJKaCXuXP9Qfc4LE4JAAEDlcSTQVJJ1LJPBYDAaF08++eSJEyf8Ys/EVoVtKjh8Xed5vjZcOdgdvfGNb1xdXa1mE936Y8c3H3jowQ+85wNJkgSuKTXFuBCEADx3RQAAwGRajCfTWltd+UBECLg3Hr/5wTf2er3J7n4QDhKKcgYVeu3DaqisKkCsQ+ONF0SZK9ntqdQt1l7OS0e+I4+XqPrwbdj4k+fS99CaHP3zV/k+mgLdG0hAtHaq86XJkqzfK4MPiEgIIMYY7Y+kmAZ6uI98b7wOqwWRsduuIQsdUdQYap3F0rC6b9VVQ4Qxl0R/iE1fvYabtipMlBALy9r5piueY31kI9dbiTgej9UWgQ5uV5IkVY3UtitnmaPeR1HKTTILCoS6boAC4jCi3aB8ZD7Zlj0FXli4eE5ijFntTr0/yzIQ0czhGJCey7DWn9kOyTTBWog8tOFN8fzro7TPStxIXa44weiI1gQiZynLMoWYePHFFwd9szocti0ErMEmP8s5M0/zlyYLggCbNGv2nUbL2PBONFps0JV5pa+DQFHO6roWMglSVdXSQvguED22tL94iQiwhlIDgu0SIcAc+AU6ihp0yDLuo0pBBSvWXPRYMYmIXgCFD0YT55xLs4ODA5ErZ8+etYmtKj+dlWsrwzRN1X4e9HNmTlzinNOkWiASNCHIaDRaW1uz1oa61kImgsYMjUatDql1wyw0n8COdgUAIAQEiETS9DLCjoIcabU73+VTY7WcHIgICYVESMgQ18zISKjtcoUEjMbawbX9kaKVjJo/aG3Ub+IBVOxu2+I9ec9NLayxLSlKPI+RMpnZWIo1C8pP2qLeOfFQB37ZdCAhjTGJbfDnBRpYXC1MqOsmOTRPMzRqnTeP8h2MSZ1XYt1sVuR5Xge2abJ3sF8zVHV97Nixzc3NH/7hH37/+9//0z/9U9/wzd/y0T/58Mf+5COXLl1aHw7qshKRmIntEmOMmc1mvV7POSdI1lokQy2Iguafq6JDbbVVF8AEEYXQSIsN3hwDpV7QpvXt9hIAXLt27eGHH97Y2NrZ2YVF9t2QgYCA5Hm+v797/vQJALj77rudc8BJEDaG0zS/9/77fuu3fjtgWN/aSPNkVs+AQmS6eqQBsKirOjCi8dDsXV2WFmBlZaUOVR38eDohASasfOnFN70fOBR1RYSBeVJXdV2niV1kkoKIfOiwLx/8m4vMKD6OFNULD+n848gbjhSPUcouHMkg0h4xLWLuba5kw77nwEyYaEMTQBZBAQGznFrVPHiJ9UEnzzfOTv/ZSI4F1oYgAL51eeuXUUgvcZkjlyPe0PXEdt/dGfGCasCdjmPze9o/Rd08OqxC23ShtXQ7Xs5OGlRkBDG1u8u7oZF5IIuNLEIIQdQ5umBjxeONEZ+25SPWWtNKWd/2YYVWQVmaqTGm3+9rvWmTAobzCr8uE+/usVkESIoCXkTUKYfAyj7W19fX19dD8NPpdH19XSsx0FCWp977umZEBNKGlnO+Kajlzgtp6ogInY2TToy8DrC9s3Pl2o0gnCSJr7UsB4WbPAndfm4IvgGMiGQ63wgmIGHhrmXfJdYuaWFbJtclNr2oddhmWWZtMp0V6lEEUHhCv72zt7624tLsYDzZ3t4WHp47fSrP86KsEHHYzw01fQOt9hVoZKI1xrLI/v7+YDBQzUlNH5RGW6U2GK8DU3m2FKmRjp9ZGqVWf7hQzh5CYAG7WPcSCQARpS1mUJmKi5Fg7JQX42IxXuikZFs7z9synfIeafMnrLXS9tdTDaOqGqQqaHAQ1VOC2AYCIpKdwXnqRjPyNnk4imGDJEh1XavXQaWv9z5JktTZVgA3KNOIKBJUE9CuTYlNVUx6P0+5j153nVRu8tmsUsZ59epVROjl+TSfXb1+7fLtt/3u7/3u7/7e7/6bn/+5b/3Wb73jNXe+613vGu2NTp06hYHR2ISoqiqQVGPhiJimKWhowFhjTM2B2BO0jjSSNoNFrG26d2CjNDdRkkjw1PpFYqWDQRJBY8wrr7xy/fr+qVNn5rYjCQTQ1VBsGSWVft6rqooILl++RAR1XSeWxLksTzePb6LBG/s797/u/pKrSmqLQA2emAUAsrYu6/3RuK5rJFuXM2sSYJ5OpyePnayq6sqVK+W08GUFAEJYlDMk8hyq4I21UFdB2CSOiyrSfCz+PywJl0SJrsCR4hdlLh3jl0ufETF6oeNfsXNDFHiIC163bg1SVILmY1ssakXE4eZ6b22lCr4MdYYpWlDPyjzhSwgJWyAsEBTEI9BzdSig6Y0grA5tQsvMgoDU2K/Scsa69vHQdscUJV9Xr++u7JKsZZ6z1KWd6P4zypulvKTmMLct3vR4Qxt1Zubo8povsQh2NgA6AM7QWs9dnggdoxMaySCa7ImAIgvSOnK6EIJtkQuJSEuSmiPaPlDt17rtp6bP1wwszQ+KYk/dEUgtCG0HHR4RtUcUd/qN60ZEtkhE2lIG2lKZsiyLotja2trY2BCRxJKv6hI4HfTVv6oOPWCNQbWphjB3D3AniQYAXGvlRwmhAafJlF988cVr164R2sSZupoigDNJ1cbaIXqQlFYB2rJJEGnSGpuFZ0D0ImTAxrlzJwDWlTGwaBB3JYoOr83LS1V1Y+YgbMiWZbm3P9pYX88yur69QwTD4dCmSdKCUJpE85sJWYQEQZPVyDqHaNbW1rIsS53z2LB7bOsF0DeiqKFhIgQwHds3Jg/GvzIf6vvUVtFokHVJADfZSZq51qZK6Q1L9Eyd8Ef3vMRkybhc6qtXvEZo87T1Zu89YeMJV5ePvkuFXAQfjGOL4fC4v9ERHVqniIggWiKDiIDIoWb2IcyVgKbCPnEYyGDDOkLbBhuEmFkyLIqCoMlvsrbp76s/l9YXjYjWZkQzz4GI9nZ2EwuI0u/n1tr9/f3zt9zS7/f/h7/5N9/1nnf/k3/yT/7iX/xLj33qYw8//PCJ48fznmMJa4NhMRmvra0dHBw00R8CACSDSODAaBqu4maT+i1gTgPUtiCkNhWr9hUJEDSxBmMMGASiUJZ14Mp76/2zzz4rAFVVe+9V3h827QxBWRb9Xra7twMMa2trXDcpGsEKAPQG+YnTJ55/4fmtE1uzcoIGBIUIvBciEDaINJ3Ort3YqTyTRQ7gMqetnQeDwfbuzoHsGgHNPiuralxMbeIIzLQoXJYCYV3VxiIaAkLGuTjsysV4WmFR4YZFpr1wAjo/OfKv8b8EGFHwuj85LLY63zcQQV0xHAUStTZsAAFCEVjZXGdCFkShmoPDRMQTkQTWrW0GrLNu25cffnUcQFf8IaKNqnQ8uvpfs4hIFf/atTzil3ogO+7cheZF3AkwR2dsWMSKWro5Kub6QGwNTeh0MdLAT+zONGd8rZ82+oEj1wgRb3kRpLqxvJdGK5o2NZe+Xb4fBbymHOuMVMONz/Te196HFrSI2zJTADAtmm507sGiaIn81BiDnX2J2kyTqdPqQ9PpVM0y9RH1ej0VyXVdb545672vfZkm/aqq+oOe95WC7yj6IADYBjSdDS1kq849H7TQrCJqGLu7+9t7+7PZDBEVz8QY4+sQbxNpIrua4n+UcjyfPjQ4DtxkGR3CVosbgZ1aWOiARjGzwgSGELxnTeqx1o5GIyJilCTJyrLc299fW1sj43b3RwAvO+fOnDxBCEQNSqImQgrXhA5JEJsONidOnEiSxKAgumZ3pPFnGDLQuuixcwS6c9RIJiJKU8CqE2liS845Rpu4lIiEG2VUJ6WCk9omvl1lX2+LeVLYVhDE2AR1miVAR1lRW1YfqM16deyq2DWOKJmbLNyGTpSAWbkfd6AhjEEzBx5hZuNtqL1m2EcYyKhGAIC6bYqiDG3TCw2gaLxDo61GLBpCRAnsa0ZELT3I8xyNAobYFj0XNbwdrU/n3Pp6XtYVCF699sqZM6cMQuKMcy5JbFHOXnzx+dtuu/zrv/7r/+F/+9X//T/++tf/+a87e+HWP/qDP3z2xVcuXjgvgL3BkAB7vZ5OyhrLIJYQQNBZVuMbxQAimXblKS4ImDncnmjqpbrqpCnfJ0AIwSQulLWI9yyvvPIKAJRVRWQZFqSRai0AyMxpmg4Gg5dffPkv/6WvW19fZ2YCqaoK0dShXt/cuPeB+z/79JNksRzPrCUB3/qHDIsww6SY7o8OBCmI8hMnzJZMWZbT6TQ1FgUqDiEEBgkI3teZobSX18GTNZnNx+MD9FUQ1vU/LHWOEoEAqvG3N3Rl0kL1nX4gJDlapC2tDB41gMOvbt6DINBUMXVVhJZnCRCKoXTYn1QzAUBr6rq2iSOywIBoluxctciX3hJnJ23osGsZIuKCw7PL3XDRBu2K5/mEO/JcWvU5fhPfB61J131F15LuDkg6/dHiN1GJjmJbzRrtx6JBzbqui6IYj8cq2KKrDVvPm7KPaIDGYXNnBJHXd3/bHR52bFYi8u2l50FHNRqNdnd39w8OpkWhA9MRaoNhaPWYyKSw1YWxxZoOMeW91RviGJSxKtvS+s5oHKidZzrwYQAALMVsAo0ZUdd1XRWzaOU08+Vmbb33jXtL055D0Kn6ui6rSjtGBAZAw4KeQ5AGN1j56Xg8NsZNxoVqP1XpG/ZvIITaIogE6qxhV2PrGIJH02SXTiKxRfsvKmcRRlGXSBffOdfv9yP1E1kAULAOANjbO7h2fVtxoVfWNoy1mkCumJT6Fk1oIiJABvb6jUZDMbafMyZNUySSRVBoRKy911qOKBSjgiitUhuNNmriKUBtIjS2vYOazP9OrZosmhQqWfUgJEkyGAyoTaEioqaCqMMRvPfr6+s6Ta1Qj7kL8ZA26WztgncVTQAga5IstYlDQzFNVC9NjyBrjHVJmql5oCujaqvepruT53nUDHRVtdeyMSZNcmuS5l1J4torCqfBYKC/AgAgNM6iIZcmZE2e54gIguPxeHt7e9gfIAr7GoGF/cqwf/rUib3d7XPnzly8dP4vfv03fPf/42/t7Y+//Cu/8s577nv8s8+Op5VLcjIWjdX6KO89AhNK4gxIHQGuI2xLnudojRCSs2jnASNsLYQAQs7aNG2UaSAG5CBAuDJcG4/Hjz352VvOn1aVKGqZ2mIongXdEeccItxzz12Dfu5DHfVy7/1w2P+CL/qCO++6oz/se66BNF/dO6dkRsxcTMvZrPLMIQTvg2nwxCUf9D2HikNAKIOvONTC6IzJEnDGc7CJCyKjYiqEo+lEWr4ZWt3wSBNriZVFil1Sprt/7V5L33R5cpctdH/bFYf6VwMYXTe46GGSwADgElP60mT2oBidu3Au7/XEkBAaY7Jezsyh9twJrHSxJw+PBFu3a3Q4LTGxJtcx8sHDArU74SiBusuEh0yTpSd0b4PF2HD84ZHL132L7/Q7hJZ9iCZedZIdlIM0/q5FV3OXicdxSpsiv7S1HKtvQZ3QjccjrlKcrDL9+KHf6wU1LTvZUkuzW9KSoo0iLNH7Zw+51uNvu9sfH5VlWUTBVBuuW8SVpmmaptaQAIcQqAbFxInPiXPXuUCnq4Q+RNnBAj0ICWMIYWVlpZzVk+l1731ZHlhrG4QpCCIkEkC0VwUaQQAmRIg9czpTiyuPiCKh8Y2jxUP+pSUak06kQ0Qmk4n3fjgcalxgNpt5740xvV6vKArVW2ezCtGEEHp5miTJyy9fybPkjkuXEDHNeoRCsZsNWWOcUhOisZ2kMOX4QXxM/wGhbgcCADDWRrhH6Lh2vPeIauAis6/rmtDqDqoGmc6T++YAxbE2RgGfuY2nQOcA6kwBQGVwK8xckiSKErN0BtVnEGmA2zyMRvADR/WCiEQCAGgVUDNBM0fN7Jbs2w4sdnwgtXXntQ/WWjJW+xAIBkdonBWRRgBLLK9oAbEDigglFkPTNpurajAYQBuOCSEoRIWuPxGhMYDkA2dZtrd38PSTn906cdxXdZLYEHyaJlVZEsDm5ube3l4I4e677/j//st//XM//wu/81u//dYv/ZJz58798Qc/eOPGjfvvvTvLsn1E1MZNVps10aCXV7X0+/26rqu6draJlEmbb4WtgyGuQ1NmGgQNC6HClkIIDM35OBhPPv7xjydZWo0mkWwO8RBOkmQ6nSbJSRG48847dYVNA32KNk0ms+L0mZNf9hVfXvqSCKwlYRBRc9yCmGJaT6ez8XTKLM7ZNLXMPJ1OxUtZlmnfCYIHlsBBGK1R7Kfa+1q4qquKfelrAkaiOoRgSRb5MwDQTYqAu9FfWbKAj7pUqHc5ORySeZ/j1Qyj/VFQJzJACCFLXFlXnsGlSR180ssH66ulBAbpRHkx/pYRcDExG1kOS0zoolS1VzQvaa45doZ42CrtSrjuWe3aqd0nHOaYc0Wjdc9GYbOkAXQleqRdgIU9ix+ido+dqKHaFr5jR3ZZdqxpCO344RBCfZxddOIhomqg3FYxWWvV06tq76DfX19b444FHx8SZ9olncjUuksqi6gg0rEUoaPrRPuVmqbrpsnYdM61HW2NMXme5700WupElFpHBKqjNHtH88VU7h9HG9pLl1TNr7hx0OpnmUuGw6HG3rz3NnFdHwOi1oxC1OABmFAAGWkOXrNEHhIgeOEwP2ZdGlvSFJcWSrXv8Xg6nU7VNJHA1axMrBv0+qqpQFvQ5T1r9cvjjz35/IsvaFtyAVIVSrhBsVB9zhqM3gXqgLJF2S8IS764Lg2rCdvustFvQltXFjqpgvpYbvyZNtYI6bCjMwkAqqpSCzX6RVRIqEBSTS7GnrsL1aYWQ13XJkn6/X6v14spBdA6h+P0I+liiyqjwyYiPQLqNkjyTNcnSZI0y2LjZL3i86lTuo1tApeaxdY4QmMTp/8zLjEucWmSZKlLEx1YlvX6/aExLst6RLYsa0R0aaJOizTvuTRDY4mMF8gHwyTvXbl+7aUr20mSCIS6rICFgFNnnHNZkpw6cWJ1deXKlVcuXL68trbxlV/+5X/n7/6PVR3e8qVfevHSbR/9+CendX3s+Imy9npGsiwjAZJ55DuuhrUUXSPY9nJoKAT1vGEQDgKCJEgM2BSSoiGX3rhx49HHns2yTHGhYeFS4TFvvTybTRHg8uXLujVkQETKulb4rcFweP/994/G+7UvDQERWUOmKVU3lQ8Ho0kITEQNniVi7b1zrg6+qMpJORsX04NiMq3KikPNYRbqMvgauKgrAbCJS7IszTObOCV+XCwzg0UB2ZVMC7M6JDu6x+fwrz53oXv4XU1g6xD4ZcyPAyIhnFbl+rEtl2fciZ8CAMHCBPW/DAvjj4M8PM7D/NySYnK1BuJ82k2mCC09IoQFGKPDi7L01shflnYisq3uE5a0m+6Zj0/ARSUoyrAlbVHHqUr+AveBALDghV9619KH7qQaDisSQlA+rr4751wvz0PbTTn+sPvN0gpgx8uNbYIMdxpIRKDKwxQZ1yq67CJH08pVjYsry1Ojx1qKJguoABMRBIGAgIAMzPrsSAaRUJq3t7EA7mT2ImKaJNpWBbTXG1rvmaHB6CYCZKzYYwMvhyCMiEG0ABLJorTRu+5GxGuJTg4T3tL3RFZHbUxDBqLAhNZOJpMkSRTfeDabjcdjAG0abUDQOffss887Y19z150ba6uhLhGFxSsnBfaIVos7TVtf3oRdsTXsFMsAkcgZ40T7OrfIQYgYYwSIGBNZm3UT0jQWzQDTJ6s3whjTcgZaOvzcAVOL4lnfwm0cVBUv/cY5FzyDIAKJsDBY40IIIAjG5VmvqirtI1RVlSEr7JVsuhqzptN3N0IVGmNMmmWCELQwwVowxiSOOGiOknT0zhiB9hJwnqo6r1wx1Eg1lgYgVumwxkpaM13Dvc39zooIoQFLdZhlWSZAZVm6pLe3t+e9f+nlayGAs2lNARNVFmsk6wxWofbeJ9adOnFyZ1Q4Q3fcdddP/PiPv/e97/3//Iufed1DD527cMsffOC/nji2tTLoAZl+rz+djPI8B0MJJeqTNxY1/KHIJtwpzdJZq9MCCEkMCAmACBISs7CgMNRcO4Bnn3tBBBCIEdA4kHlTnMgGENH7ajgcXr169fy5E2vrq3VdW2rqP9XmTvOMma2lsiyMIQ5ACBZNzaxOv3JWT8uyLEult5hwYBKHRJWvK4BQe/UeJRJkKt57lQtBOMmz1BpB2B+PRtNJ0u8BaMnInyEdDSwXDccJLsmIbqKD4BGoHdJJWu7ef7NL2gxyFomGbEuW4JmBkKzxEhhk4/gWJlakAXkFAGyhrfWwQ2fAjWwS7mREzM0J7iQ/wqKUafqSRjAsJRFsQv3zFM3ub7C1Nf+s+S54p7u/wi4SUEdAHumv1g9ROEVB1PzEOO81ZBW6GkM3AiEiwm1REAIgw6HNOrT9ulMSQhR4miIHSIgCRVHWdbA2SdMUkavSF0VRFAWQKHuS1tDnjr89zi5q/caYaHF2VwMWTt3c/F0yyuN6qhiOZpAqdGVZEiUxXaudZktOyADQhX0xIKFDN93xYKeRQOhcg14/TdOdvf08y6Yzz1JPpzNmVq2QmVFYQkgS6yyJoIgVEWL2LA3Q/CLqOLRIsEvE09VC4p1LixNJJbI/ajCJbFmWaeqcs0UxIepvbW1pHuxwMEiMnUxGK8PBlWs3er3e2XNnksTmaYbWtHhPzIxkWfMuYi5C3AIUgPZjszduXnASdzx+QETvg7UW0TCAaE5WB+VfhbzST+vPoIgtiohomih+nvVDCMEHT8EYg0CExjpXV15EOEhd+UZpQO1YTNx2wxWRpmC6qkySqCqpf4oJz9SWdcW8BC2x02fGHYl8MHTyHKE9s0GCECLP14HItlifJmp6UVVVvVIT2dBLhJfSAWs6BVkjCJ6DSxONNaRJVnMIIQAaJOvrWkReePnq9evXT586VQcWgKKs1JEDqI3L2Ht2xrjcMbMwrg8HdV3vXr92+223P/7oY29845t++qd/+ru+6zu+6mv//COf+PinH3/iwQfuQ3JCDslWPgz6TR2EsVhVVVnMdMuiYtSYUKZRnoCsswbmffpINPUv7TnvgejZZ58lgtF0Utc1BzAuei8XsuXVsJ7N/Jd8yZesrazW1VitEEEyxhRFkeYZIo7HY/HBWWIP4kGb9AKRAJW+nhblZDIZDAYaoqqKWQghCAeQUDddWDSFghGYocEr5eC9p7HJsiR39tr2XuVrMkYIiYgXE/LjKV5igN3T0T3US990T/rccXzor69yLQlmlGWDFQGkBTRkZvFeDKb9Xm9lSIkDaBJZoq9OFO9ZAADYSNNYUrkWovDyjOKUu9I6CojG/0xtHS1AxP+fL1xXElCnh273hihmuq+Ejuq6dFa7Zk1X3C7pFEuCX2Jgtb2/qqqo/sfi964CAgACQdr8tCagixIzyLv/bSnjCNN8gYFKo0PoAKbT6YE0xkeSuWgEL7mO48+jFw4AmgqZTqFUFH5HqjjYuborX5aldpjRA4Nt0fNgMMiTtFVQmhcDCCIawNDurf4/AmDToP1QDlQ7wrhWzXyNW19du3LlWp7nZlR4X2i2iI5VUdOtpX6/r4UuzCyC3vtZVYXADIK8QD+ICDI/vV3KiYt55OnVy/umULUsS+89AGsWNDPPZrPpZOKcI8SqLPu9Xi/Pi/EEEdM0HU3GSZLsj0dlVQNQEDbBAIprdoENAkqTfBfVx5bAwDZAZ9S1U9v5ivYcgzZsPD8s2EbHTePTjlayaRsgaqRDpWZk6NAW+8ZoBXY8Il1x1dR/a3Z669nWOyOZVVWVtf+kTp5I/ImSurWWeZ5E3R7tJh4RQvB17duIdXSVzcnYWI2siw9xK5GIWrSymP/VLC8iiATTsEydIbZJiLECIi7CrK6IyLm0mFXGuP3x5Mknn0zSzfWN43l/9fO/4Iu/8Iu/8AMf+IP77rkDABKX+VBRW7vMQQ1HMSBo4NjWxng83dhYu3Tp0g/90I+8//0f+Ef/6B/c/+Brz5w/9+EPffD8+dOnT51wrlFiWuBoyQYDZ+xkMuoalDpTTecGAABCS0CEbXjFNGmqgZKkmEyefvrZlbUhMyQuq6GWTm/ydl8ak7rX6xkDDzzwQL/f368n3vvUIQAqRr0Evnbt2t7+DhmoiioxtvJeyYaMCYJlWY1Go9Q6CJznPUdmXFVKpWVdhRDYh8bVYSwJCIgQ1nXtOZS+5qosqoL7PQ/QIHHOozALEaXu1T2/3UPdCJebthAERORDbRWWHnL4WhI6N3mskIBmriFiQJjV1ebxk/mgF7BFOG/6VzV6AMauRwCK/6xcKx6f7mSpU4XbFQdK8PP4UCNZ4k43L1qQgnOVf/H7m11dsb90fzz82Lpouo/qDkavrsEHnbB2aBMCAUDa7TFH1VAxKhEdsXl6tQdGVSIN2EBTudo+p+WvbQ0fgLJXYFGndOXLaIEdXh/qlH/oXzUo2IrquX4ThTcsUg91wp9dU1jNI0SM+Fz6pZoyaZqmaWKQQNR13Ew8+rjngni+JkGbY+kAYqw9rkOkhPXVYb/flxs7kUgAAEhQEADTJEmdVYuzqme+ZkGo6wDT6Ww2E2bEbq2RIVChND9qkdjixKWjrMRvIg2ofabAgQpu3Ov1DJEhqpnLsoxByl6vVxWzqqpCYF+HrN976rMvHT/+pBZ1cMLaB7bZcYsCgXmesh4DwHFZdFQhqB8BiCi0uxn7fupoiUyYKyrNA+u6dm2Sl28LSU2nGCk6inT5lQI12TgSlfqKo/ta5XTD4Dq97tWYllZyx9TFqCjodPR7NZSNMdIiV3Obvd8V2yEERaeSVgluIxGEjf1tiYiVQgwBNshrTc00IQW1702zlwjGzLUxJILgo4terWGFEXU2MQ5DEJcku7v77/2vH7hx48bZs2fPbN4CgPvjydNPP7t/MM6yZDya5L1Mm29yEMGgS1FzAEALgYmqqt5cW72xu/fcc8+95q67fvt3fue3f/u3f/Gd7/zv/spf/qqv/bpPfPxjn3n0ycuXL62vrYj46CwkRJMmqU+lEwVDRKB5ziaLwp4TGAUUbkkgeADc2d77+J8+vLGxMZtVIQRmwJv3U3fWGoCLFy9CUxXWrLPUda/X994/99wzk8kksa4UQY1YEyGBINZ1PRqPd/dHvV5vMplo0Ho6naI1AWRWzjQjWhAEKOpt3vtZVTbdwBIHElRhH08mx4dDZpaWRAGw245lzmMP41JG7rpo4iIe3Wd3SUzc5GFHrFj7DRJg1wjWFzVH2Bg0EDxvnTjm0mQ0m5pUYWOWbUtqAB87cF0CBKLma2Sb+k/E1q4J3OhAgJqBarv5QdTJ72XWBuzzKsx43rpyOhLWkqyFjhA9Uk4vSdwohGJdbDx13VfEVYhvdG2vVh1z/FW3r4u6WVmDH+3Sx4WTxaxsaPW4uI7dMZsWp1eNqha1J9HDVlUVmiZW2iWCKDDiE6BTGdVdT+4UoiwtZnfdoqiLCSA6HtdmvOR5rmiU+qiqLK1Bm2bOOq1GkE4UpLMOEqBriIe4t3pbXCiKfWHFuNytrKxUVaUQHzwrGoMeUAXhyqC/sbEByMUYC1TB4DVgBsxkAETpyqA2AtMpCwCCwLzuXBZxauKydMUwGqPIRCrwlOPXVWPrrKysjEajyWSiK3P+/PlLly4988wzauTVwR87tvqZz3zmtksXT546YZkIU11hQjHGIAvZVtIs1qp1vehemEJTjRNaexE6XTqIyAsbMtbY+LRoZaryFCl/NptpkTdAGhOaNJ8NEauqSltbmTqpUmr4TiaTEEK/3w8hTKfTbvmQCmwkMgDWWnU2Rukbj6G06RpR1dB3adrOkkKmU22+VMbV5Ao052Xh4Q2HbXcTCc3CIQQRQARNCmPWz2DIAPRazOfo8SYiRnDOfuITn/i5n/s313e2e/lgd/e/PPCGtzzwwANnz56uAn/i4U8cP7Z1fWf7hNkaDHrAYlzjVGDU1l6IVWUNpf18UszW19eTNL9x48b58xdM4v7aX/v2d7/nv/zo//T2B1/70M7OtY995MO33377+TPHpY1tcVlCi4AmrR/LWottDL7VhFpzaT5dVTf56o3rDz/8yD13372390pd18YlItyVF93tm0wmPsCtly8yM7JkWSahDMErl57NZru7uyjBe58Y6+tCJFibAmPFXJb1ZDKZTqfe29msVnYxm83SXu59VRSzqPwRoOfAlSoEbBJHRJWvLaIPrKyhKIoAogBP0HCnua68IIDh6Esk9gs/+sKOO2EuAgTkKIDow7+Nn0mIsUkN6t6gerkHCSG4NF3f3PAcyBhGAGCA6PECUSYM0iLbK5g9iIR5O5SOQIxvjzKOOt5fylzmS49ssnQQPCIlPggQkkWwAhhabAHDDG0K58LQoztrSUxyU17mo0s26tfcSeTh9upKVmZWhh5dUvFXOoAmO1eECJwziNJiGggAxFJgzd0NXkCI0CJZBKP1M0smJsxjh6waCrAACwro//SfBOiM1VyewWAwHA6TJMl6edbLBYFsk40850fKf41JkyTPsixNnbUgErwP3nMIhoBDLewJhQD1fxKAwCAYYZTFTnZqbxjjjHGIBoDQWJcmikeT5UnWy12aAEHlK0ExxjBCkiSGkNhbEAzeAlpACkgBJYAERgnAnqUGoBCkqrwWqXKbh2W0/QCAIUIAQnTWOmvTnhlNR2U92zq+aa0t6wrEBi8oBL7majbM6MRmby0LfZqtJGU/l7Vhsrk66GUpBjFsDWTAhjBBUJBFBhKyQglQ0jRXsC3+KLQJBIs0YwCIGUJomkPrWWeQIIDGuiwNIIDIImmWiYhayVeuXHnpykv9fp6mDtEIE6CtK3nqmRfqylubADV1fsZaQ86luTNWAoe6yUKae3Sd9cJ1kKKsy1ldzurJuKjqoDm63nMIWn+stiBpDq2EOtS1tdTrZc6ZJLGWnDNJdPi3QouMIQCoqorIAGCogzOOgKjtPN8NZOioNANZa+XViJlMJjrsLMsaQDQEdNYLm8QlecYIVfA1By8c480qy9W/Yi0hirVkLRmjLwIii2gQjUtzgcQHBHQcJNS1AXSGEouWxJKQMAQvogEgAkqQcqQcIAVxABmYDEwGlAvlYnpCuUAu2BMzEOqLZCA5SCouhyQzaTJYW03znKwDl1My+PjHP/UTP/Uzd7zmvrd8yZff98D9b/6iL/yv7/qP/8/v+fZf+1/+LdTTb/+2v3rt2o0kzZ9/+eX90SQgKXtFFCsIdSAObC2T8SBk0VJIDJ8+vooynexdveeu2/79/+/f3XnHa37tV399dXj8y7/i6/cP/Mf+9JEaia0Dl9VMFUsp4rKcXJJkuU1SEQw1UzCGLQZDKCIBgIVQUBUQEkGgBMS+/MIVZPDeCwahGkxl2ZJ3BhIQB5igSwMCGM5yc+3qK5//pnvXhgnzNKD3XJsssSYNAYioqv1nHv3swRiu70gp694cq2h1hrayOPZlTbB3MDsYVWAsGETrDiZTMBaAtrf3RSTLMpckSFSLn/lyxlUwgqlJRYJ4tk0m8biYlgD70zGVwZYB6qD2sQNh5NKyALFgYNC+ogGEkbUVkK48oijLZRXhXQAAiH5Y7H4/l2ewIIw0+NU1afQKIFpRwQgj6ysACph6skwB7YzMjAy6jAM5csW0PHvrBcmTifgpBgY0LgFCr2lsBIBCBgnEiOgyNOAdSEymCcBRAzkJwABMBCHU3lciQeUSsycCIrAnTpxQ5Xc2m2VZNplMbGJ8HVBlfIPmi4iNA0rlXFcr0c/RH9sVaarRN/xwMQd4yXbpSsEoiZdesfQh/rcrubtvOXxFKyFu59Lb4VAcWjk1tTgPWlSqfM0Ys7a2lqbpeDzW20Kn8KnrmbTGRNDK+ApqABbqOQEtvrrjDV6YoI4/FnVgx0+o8JZJkigWjzHmxvbuqVOnWMTZJEmTclZYmk+WNStTFMNWRLC7/lHGwGLUP/6JiMjY/qAHAEVRVFU1mUwIEzUIhNCRdTax1hpniQBJyiLozdPp1IdQB0TWvIOldPR2X2jhOHUT1vDm1jBRE7NRTQ6Ae73ebFpEDLWqqmazWb/f39vbc845cqurq3t7e2q2KWxWFPNgVO+ZBz6YGTqVYCEE72sR0ZrY+FsJXPs6UoW0rpG4cYhobQMRHIOmXdGu+fZZ1ti4qtHGGnHWvM1WqTVti1ydtTGm1+spZUobOlUhrfbrZDJJe7ltzx22fpS4pOp5jtOMjnTNWNErbgEz12WJaU+JvtmLlmhJw7qimfHYpposnFbpGHpHnl/dZmhwkcBoJ+bGjIeqKH73d//zyZMn3/e+9z38yU8kibv//vvvuOOO0Wj0z//5z1jr7r3vvixL6rpOkmRnZ0dE1jdWQbDf782mhQaDjTGK56zHWbt3b2xsjMeT3d3dCxfOZ1n2Td/0LT/wA3/rB3/gh++7775XXv7sxz/28K233nrs+CYzW7QsRsjY1HBdG0QyNoTgywrAkDOIhjoTVwcdIIUQDLmXX345BI1QkNIVQgM1jYgsrBEk732aJMHDF7zpTZubm2UxdWSCtqO1xgZTlmWWZV/wBV/w9r/3DzZWN48d23zNXbcP+jY1iTFgSPZ2b9zY3rMmLauq1+upl6Usa2a+fPmSHpDJdDqZTHTHIyWEAILIwr6pNTAAMBqNAggtRglFYmrv4hWYowc0Rt9ayP0uSXQZwud+LcmaoyioHSEAABhAjf2RSF3XxtmV1VVjjCUIPIcw6z4hnpGl74+4E8R0PNIL6oIIANi11eHjj48GgxXmCgCSJAES1L65QdkfIOkC6Tk/ejJ6dJfsYOg0RThS2sVxxG9CBwO5O+HuvnY/LEnrrpDojjOKje7Dl5bjsKSH1lMXrXxuQ85FUQwGg+l0qs7MLMvKsgzd4tfFUUWGFbW57uC74idqCYfHozcigvZuipwXUYiIfagDKqC0CHnvq1kpfccBqsqPYCrBW0JjHHNTT9AO0iBKAOJOPfThZYmyPxIlUYOkT4Dsg3oyk6bqkRV+3yaO0IKgICEYY01dh8ms1NpNIgoihMTMgCysQSUQIUQyiwH4eC2tD3SIO5a9QaduyvvKGNPv94uimEwm1pgsy6qqGo/H6t2UBB2mq6uru7u7w+HKpz712OsfenBzY127E2kPrq6wEVw4hMyMqg6R6Y4qiq5erxdjFlHUtbpCx38bYwqUqDyoqgo72okxBmAhGCwtIkcU25HOo/8pwndgm8wsbQCVGnHYrNtcfQFodraTVKIXL7Zp6pJ3CCFpGkuo6DXc6usSGYdunDAKgkWFt9Hv2xsWoOOPuBABDIB3zolYaNzd8Nyzzz/11FMf+siHn3rqqROnjs9ms//4H3/73JkTly9ffs1r7rx2ffvTn/702traeDxOsySxTt3yJPD888/feuutCinKIoigQrqu67ouiWxZFFmWalHfjRvX7rvvnn/6T//ZP/2n/+xd7/rPX/blX3Lq9PlHPvnwyy9fue++ewC5nFTWkATP3Exb2AsJs/clkx00k9AmVghq5hmTcF0/8sgjCFCWVVXVRMaH0HTlAYn15QawCqHfGwJcu/ee+6xJvKmBQT3xVVUCQBA2SfLmP/eWd/zj3ld9zTce31r9yMc+esedFy5eOru2tmZcsrdfjMYzJpempqqq/f392Wx26dKFtbU159z+/v6s5WaHGakICAi34hMANNOiy8dac3XOvqhFedRHEFHXCyxtbHCJz+v9N6WEm1+HZaFeBAjAHKFi1QYBqWufoKmCXz22tnn8WIni24gJwLIqsMR8Dl+RsRMY6PRHWpI7ImJvvfXChz704V4vE5FZVaRJPqtKYxDBKARn57GsxRaHlwluYoDiIcf9zRboSOF3WEx274kcpzuAeA+1YdSlARw5fuy0XJU2LoVtsFPtS9vpwgQNCBwaY1ZXV4lof39/Op1KG8SVDjaIDu7IPYviFhaUDNV6kDq1QNCa79DGXxGF2TMDmsbe8r4mIpeYqqqyLAOG3d1dlPozjz5x/szJrY113FhZX13xzD54RJSG9gBAi3F1leY1VLKoOUVeHA1BY4yi0KeZU0FFgJrm0zBVJBH0LLPae1/Vs7IMZjIrppNZECayKILAIiGwnvbWNyBNzTu12T1xAXWjwyGguyWyIUBuRVcIsLu9Y48d03TicjbTwYcQyrJOEkHEqvQrg97a2tr29nVmuH79+uXLl/I8oc7lvQfSXtwLgGjMXM/qxGVgQ2ihv00LQaU2RBx/tCeWKJA6SFIsrLFe7iRRd685wbekq7+NECiKlhxprOueif/Vd1Gb7RUpVmkeAtskMW37amojQfo59qiP3+id+jpAJLICXnHQWFowgHkDhwBikGVecKAllgDwKqK3uQggABDZhATqukZrAE2e5w8//DAiXr58+WC8DwCnTm2NRqOXX35lMBisrKxYa0ejUb/fr+oZIq6urn7q04//6v/6y3Vd/7W/9tfvueeuQX84no5EEZsRY4GsMhNrjUiysbFx7dqV2267OB6P3/rWr/y7f/dHfuiHfujeBx66euXKh//kE7dfvrS+vooEQMZaAQniq5o9aCiJBH0A05JpwwMFQmAOo9HosUcfv+OOy7Pah5pdklgiCJ51h4m1LwiiEBhEQoCzZ8+Gmi1SAAIRYHTOAGFVhOlkDCY7deasIKxtHZtNx+9670fCuz6ysYnDwTpLurM92R9Pw9gDwC0XLujRmE6nV65eLYoi5tC0vMXHDRRp2l9y0HI1mBVVWVUOxAtHSH0AIOE2zb/JOZpDQHew4USE28gRLeYGwc1F6dIlh0AYu3+akw6i6noBhAGQAYhJwBrFIeETmxvk7KwcC2GeZhH+83McTFcG6UFAoKiQHL7fPvDgfR/4wAem0xkRWKS27hCJbFMnqu0+tFtxp49NPHtdQdjVl6NCFPlOVxB2pwQ3MUAPv+XwmnbFavfOw4L88GYsLVxnC7uWq/4TnTPWzrv0JEmSJMn6+mqaJtevX9/e3o5+0a5NsDTH0IExaY2GOo4ZEQHmvLXr0F6aS+T7oNYriUHC1h0NAOyDBv/GBwc7O3u+LvM02VhfraqKQIxtgCOg6yMSQkAADxFNsdMH/mafAUB8UBxsEen3+0VZEaVVWYowB6o8l1VNRHXNRen3xuPJuCjKytehVgiCBpNStFkhAEhgBjVzF5D8uprBzTYx6ihEhDiXN8aa7e3tlZWVwWAAIorrKyIaTQheqtKLyMqgd+zYsenkuclkYq1N0xy48foqdwgy32UzByxDbusIQwcmDBHLslTgRlr01lLb+I957p5ihrKuvK80ua9JL7KEwta6IBwCE5vUOmetaPGinYOp6WBUkB8cHGh2dFQHAUATbfQ2hQmbzWb9FsEDEY1zgAgimrIOrc9fpbv+V+9sugKLRKVcpKmAJyJRCBxEEBBCCHFrCAwhIgRg9kYk9twCaDJK/yzxSw2dCGkrRqgbVWwymTz22GN33PWa0WiUZdlsNmMW59ze3t5gZZgkyfb29mAwmEwmWZ60VQNwz/333XHHnVtbW+94x48//fTTt9xyLrSI4oioLuhGX0EkZ+u6Hg4H4/GIiO65544f/8f/5D/9xu/821/8Nw899NDGxtYnP/nwzs7exYu3EKFwAG4gwwRA2IcgiGmzFEqZAMDCAiZJr7x85YN/9Ee33X7ndP8gSRIB8r42pMloonlACIBIWZbt7+wmDm69eBkRi9ksTWzpgzFiEyqKstfroUlmVX11e8cDHExnvvTHTx0H9FVV7Y1muzs7XuyxzZNnzh87ceIEs7927dr29rbCuGInGigiAqHJzQfQyglqsSIYhAj29/crX7tOJg2qkI515ND2i4zM9iZJpoelLx71/ZGXHAp0HhZ7WsjEINC2b1KUGGYOEFyWnjh7GhMLVZs+fOjtUa4dOYYuM0dsq5easu8j4p50fGv9Da974PqNvcR1geKabD0AaMEyAZDb7k9HrMhS7HZpUaLmHj8fOYHul5HhHvmnI2fe/VPXH760cIdleRxkd6i6Ggp3tbq6OhgMer2eYt1F4Pi9vb3PfvazL774ovI1aMEfonkUWgz9sJhlFpmOcnIA0vQcbAMh1AGmiGOLU4uezKZGJbBGf3W/lDNqaN8Ys7W11esN0rTpEdQs1HyROK6VLGqR0jHBpZMuFxPrmNkgGWPW19fzPA91qUabQhSxYM1hVtWTWVlWde2lrP3+wXQ0mU5nZRXmbScEoc0tYJDmydqpKXYsgJuc0sN0Im2OnqKrAwAK5Hlf66TVPRBhF421aZrqsHd2dp577oXBYGVjY200KTS4QERBvb4g6vaIC4WN5efIWi101h1R5cy1/Q1jdFbBEwCg22+ja9Tq2qoYCy2m1eGDpj3+uAUsi7Zv63tAadsiqS1bFIU2AtEMwa6aoiPRp81ms7IouK6BWaMJ44MDha3g1h0XY4GRhk0jF1oNW2P2jEHZDRARobGCBGTaxFGIe7SkXR0+nkddBEBoDIABISALQCAYQqhC4x6vy8ZiS9N0Oiv29/f7/b76n2NKh/ceUc9A+LIve+sv/uIvfvM3f/OHP/xRbNXrxkPQxJ48EYgEInAGj21u9rLs6iuv3H7nHa9cvfKGN77pn/3zf1HV4aHXvSHrDT/y0U+Mx1NAQ1mfyHoOujKCwOKFfSfHockaAparV69WNSdJIj5oo1hrLSC3eDUCLUBBv9+/cePGmz//TVub6+wrYKnrmkTEB+1LFli857LyQejB1z70/Auv1EIH09m4CNs7k+vb043jp9785i96/Rs+7/Tp07u7u4899sTOzs5sNmPmCCmvohdwIfwXqyQ0Z95amzpzY3e/9HUgEgSVGk2ZLItmWi1tH3bYNTN3EWcPn2g4JFNe5Xp1+tF1BoBu80QEQBZnrOeQ9nuYuv3JOIAYt4DhuiRfbj6CBXHTKtbLGWQAQMAEbAnkgQfvf+973xtCLRIEKNQMpCGHaB+1HEfgSA01HnvoyIwlSXPEWrS/PfzlkffIooLTffXShyNvjhYqHNppWUwN796GnVZF0NGqRGQ8Hk+nU83JirVARy5O5FnUPrljYs5LPBux3fnh4b3XxtwIdmltI2QgIoaay2KsbD1zybHNLfHK7LxxibGK0QFBQI0JEVF0uCPJV1p7PQpj37ZyasQn0LA/OL61+dmnnkNqHJ7CDIDeh8l4GkKoqwDIk8m00LZKQQAZ0DRhJPW1tLD7ul4CLIC1zAu6pKOOHHkS4tZga/bpZ0acTCabm5uIUpWlJkExs7UkQMH7ybhQC48AnnnmmbNnTo3H47JU1zGDLOB9RgpBRGZg9oKooBBqK8e8AQBQS1G7RENbCAsLxySawmAMEMFCyjcBtACTtkUVZmaQpuFxkKZ3QreWSQVwDOsSUZIkyl7j4iguSiTsJEmKolANwFpLSYJtFa8+MCZCd6mXmkIvABBjUPvew4JmGZAMwTxpHBo3YCPC25vbDRXqMJh4JDt6PwA2p9vo984lzIwhbGxsPPjg/ZpIZa2dTscEWPkaWvAyPacAkKfZ8ePHPvzhj/93//1fOnfuXAhhOpudPXv2R//e//z61z/0N/7G37hw4cLx48d3d3cBQFc+SRJ1irQq5sxa2thY29/fz/P83nvv/cEf/MH3vOc9P/ZjP/bgAw8eP378k598+MSxjePHt9I8y4mqalJVJTcwuAKBAT2gATKAiCSA8sQTjwPAwcFBXddC2HBaRABBIq6DMVrBEQb9/o0r8JVf+VZjaDYdGwuhqrVFROU18b4QSq5cvz4r6yzvi4gA7e1POISzZ07ff/+DKytr+3vjl1566bmXHgcAIqjr2hkbs3HrujYWDbXYLyoKEMigBqPbjumGSCoJ06LooQleQFvNg+YLA3SCuBR7qzMCYPOnDs/BjoH0Z4u6m19HSormFdyUlUSFHQVAoJLSg5w7d9KkySyUYJCI6rrWGu7IabtDPXKQOnzEiEbU6JrYeDeDroxIE1MjQH9sa+2+++/d3x0hSDWbmgUUi7lLmTkwL5S3LkysVYsiv8ZFO106JlRXqMTryAd27zliNY9IEJgHLLFjOHILoLj0HFkMdi6NJ97crafitjOBZqloJ5nIZLkT21COqdq08tPQmjhdVhjXpJlSZ7O7UieOFhGtm5dm6Ru1101ZltDud57nSZKsr62WZUEEWeIcGYOg+FAAgMCEiMAN9KNoWcSrORigY7voB9U5ksSur68Tga9q4JCkVg0jZh4X073d/Z29/b39yd7+qKqDViZwm2Gopqqwn092/lrqvjGuW3f7lsYZ3Q9dTUvvvHbt2sHBQWylYC0BACFmWba1tYWdFkA7OzsxXdlzUJ6OiEE4ejh0QyvvZ1U1m800CTyWA1VVVfsycO3b/vCm01NIJaIxRseLbR57pNtICKb1D3OnykgFTORW0Uusz1TC0wAEtggt0TMRCRixgUxSXGZoDVxEnM1m3KYWqh9bWnyPSAxxL7qeLYzoBIhEDq1BNIDq5DPz4C4RWWscUdvvQbWqRZ2yqxBT+z9o2TnNv9efWLO+tfnQQw9ev36118uYfRPHJZO6pCxmvTwHkcS5xLlnnnnhwx/++Ld8yzf8w3/4DweDoajL3fus3/uGb/hLf/AHf/DmN7/5ve/9w1XNhrWNWNJNrKpKJDhnADhJ7OrqkAiuXbt29913/97v/d5DDz30C//2FxDM617/Rs/46KOPv/TCy4GBbGYoSdIeM0vgwLWEIOKBgx5GP5s98cQTg37K3NRTGLOQNCqCwg27cM4gwKWL521mAIP3FRnWQhf2QmhmRfW+933gqc8+u7u7/0d/+IdJr3/9xvU77rjjq7/ma970BW8eDodPPvnkww9/4srVF7WGUz0odWhS9uq6zntpzN1zxioVGWNs4rr0ryFLBJjMylld1aGhikb68EK9SbOzrNKkbQfUYvxKm+oRWV/XP3eYI/2fu5rnY7eldhAIYMjm6bFTJ9lgDaylmzFTcvnni2K4+1cBBmzF0BzoqOuUBm0ppt9bX1f9fv7aB+7//d//+HA4rP0Bs0+zvCxLBS3RsxU5o+80so3PjRZJFN7dtx6Wst0JHFZVDg/6ZuoMdCpDlhZFWo07Lln3+66c1g+LezxXoCJGKLQuO24L6pm52ygm+vei1zE+U39SlqViJoQQTAcIsFlDY7CNoJtOg8W4CPonMg2/01dErjeZTHReFWKv18uyhIgcGSIY9LJBr58kCaCEECyxYoxIo1QBsycBJLJELPOG6vFIxIkbY9RpVntvrBWAEGoEBDD9fp5nGZGaTTFvxXrPAQNPdc0xhLm6Y9uG8FU1Q4J+v1eWpQTWxGACIIG607f4sOilQ5BhcacOU5QxpihKDmE4HK6srADw9rXrYJEZva96vZ7msWurhmef3RmPx9rKIknTmht5zAg2TUQkiEC7UxovjMwiSZIsT5QkuM0ZNh2YbmiDtXErEVEVuJj1cyRVKzVaawN7DqIMURXB0LbpVXqbzWaq+enY1Iyz1moTRmOc9yyCxjhhRWaG2awismSt1oIniYZROUmSEIJWtijojYJP6XvjUY1TA1AvIwAY0Z4CGshnQLKIGhBEIgfNqWy9drBcmHT4uEcHX2sgsp4PDj7Nsj//tV/3i//mnRfOnZ8cjICZBGpmAAkhVNVsMBjUdf3MM8990zf9xe/6ru988LX3r6ysAECSJFXprXXKIe+5/8F/8A/+0V133fNjP/ZjW1sbKysr6htoe2+LMQ2EPjOXVbG6NiyK4vr1q5cvXzLGfcd3fMe73/3uH/2f/t5r7r5zNj7x8J9+oqqqs2dO5YO1YjpF5Dp4ErZAImwNoTEgvLe/89hjnzl79uz+6IDZ63Fn9tLQj3qVBLQ2zAckuHjxFjCSpqYqq8BMloDBmaQsw2/95u8GTE6fX/3V/+1XyZjbb798/Pjxfp7Wdf3Uk59V/RKJfV0G8apeMHOofRS66ieLHovIjljbZDEHgipUBI3CuLu/d3x1RQjZBwnBGFPXtbM2tFkRqmIBgFpxXfbeFbpdsw3bMDnOBzA/FF1u0JURSwS0xPY1okRErNjaPgiAS5Kd6ejCrbdTlpQcRCRNElEsfbvQeHuJscAhGax3NngOiPqWufuQQ2iLVrQLJYVQC4eLl275/Dfd/9LL1/r93BijgYQlyafIglH36c68a2d0xWdX6OLiddMjdihKejPhfeRPoGPU3uztR65gN3bbfS91etdr8Wg0VlZXV/v9fnxO7MHQyrZ5AxlpISfVbmbmsqrqtteYNBHZmPZFh4fa3X4iIpx7MqM9qla4tVar+hwZ51zqjK9KJMgSCywCjVOxLot5HBfQGAMs2piv++potCGiQFNNX9f1aDSazWYiIj4453yoE2eGwz4g93uZtfPEHGbWcJT+zzgLhAySZClig1uSpmmWpc5YS+bUqVO9XpZaZ62tfRk6gC3yOVzQpnt0d18nEhHTiqKo67Lf7589ezZ1NlRlP0vrsshTl2UJIiZJUpYynU51sxR3BQCybD417sTCTVuAG6MVuilxETQPIMZTl44Joio/DUxptFl12dW0jWEOaU1e6Cgc0poLOtkQQp7nKysrmqusg+n1evFpatjppLR5M3daC1Pbj2s0GmHbJ7E72m4uoUQHnSJ8KWyhDxwCiCAYIAQ02n1hiSdAR/rqLh368GdepEkqZAyAfNEXf+Hf+Jt/7aMf+3iWp+pqUuVjOByKiPdVliXWwrd/+7d98Z/7cysrKyKi4jlJ06ZSmQgA1tfXv+t7v/ed73zn2trGJz7xp3qs1ItgbaKOBFUfB/2smI6sNSurg7IqdnZu3HffPf/hP/yHu++5+3//9V/P+oPP+/w3EyWffOTRa9d2iZrSCWMTEXFJgoZCXQHR7u7uh/74j5LEeu/REgCweIZARMwSQlDz1xjjnNnZ3T59eu3kqS2QAMhq8YdQG2Py3soLL72ys3Owubl1sD++fn37bW972z133Zk688ILz33mU49sb19XXIhpcVDV02pWSmBnbJ5maZpaR2SaFqUNMQvoeyNlAoC6EzlAEPbeM8DBaFRz0AwAIlKakMDMvgEeiRZtG9DU/WsweBftDVgUbHJI4i5dS4e9+6sjrwBzeUxENQdx2FtbqSRUoXbOaa5W1JVf5VFHDqM5+IvJz+p2buMpbRAntcYArq+uvOnz3khqMkIwBttWfsvMgjrZQNgmX+BNkpvi57imR8j1Rfl95D1yc+l7WKbehC/fVFc4vHbxUiE3mylccNDeq8qktPoIEV3bXFY6LpQl6ai8IDqiI6cGRDIOcSHHKq5DnKAseGysIkZFMdD1zfq69t5bS44aB/Xx41u33X754i3ne71MObjOJU1ytYcAoK7rspiFEFxioKMzxsXpcnm9X61DREwzV5ZFPSuIKM+zXpptbW1meYIN4g+IovW3h6+qZlrE7KuamdUoDCE4k4jIcDiEwI4MEa2trUAHeaM7nu5Wdj+0FHKEw0rNcR18XdevvPLKtVeubG1tnT17Ou9lIpLnaZ7nlgyhECECPP/880QUm9XbNBmNRiGEEOq4QTGgEFOKugsV5bEOvjvObm0uM2uSlBoiEB3pjWuWbKeT7pwwaH4A4xy7VDEcDtfW1qy1a2trk8lEIxR6T8zYUqqGaByEEOo6Ko5VVU2ns6IoFe5K1QiFW4lvnO8LIiKG2lOn0yIighCgAbJCyEgBMGgXkCXh+//f5eval+VwbeVv/+0f1lBIXZUgUhQFIuZ5tre3N5tMX3nxpY3VtXf8o3/07375nTvXbyASBDbG+lY5A0AQAGMB4Mu+4it+9ud//ju/8zs/8pFPhBCSJOEmfd0RWREMIRTFJE2dr8s8TQ3i5sbas888c9vlS7feevEbv/Gbvvu7v/uJJ5685eKtd73m3mefff7G9r6xibMpIrregBlAUUSEX37llb39SqH0vPeCTbkas3CAtiebCd6nSbK7e/DmN795OByABCIAQpukgK72XAcQpnvuvd/Z9MaNnc3Nzf29nU9/+pHHHn1kd+cGh1BMpzvb2wf7u76qEAKSGItJatPMpZmLZKl0lVinUHTR7SwATVZpCLHvggHY2d+rYhZLW5jOtW9CxYgAmmvV8UW/anhx6bqZCFhim68iKY54GoshQqJZKPvrq2vHt5iQmbWDuDKoI5+zJHG6z+wKEVkq5pw/rfXCh5p8XWslzOXLl974hvsO9vcVK7g7H0XxEhFoO+keFpOHBeGStFtapvjwpQ9HCvLu3I7805Ey+8i3LP01DnXpm/hBgyLq3HPObWxsnD59+syZM2o9hAbnYSE8pkaGhhBipBZaS0hakE6a9ywzEaEwTiSy7CVxuLSqUTjpf7Ms00emabqysnLq1KnTZ06dPX9ufWstgHgOCEbIkEtKH0IQPeTOOTQkMHf2dl+qGUChg4XCbRo2AGg1S5ZlzpnzZ8+urAymxRgFrIl9fvQ8EwApfoWmgAGAMaYs636/f+bUWQAoigJZNJ+8LAtr7cqg312EJd0oXksy+NDuN+FDIsqSVA0X9RO8/PLLly7e8toH7l9dGeZZWs6mgZtcp82NweOPP3pwcKBror2V0M4hMKlNyGp3qlGqpNW6WiPYdLWuJc1JFpMhoM02iIJtrrl0ph9nLdDNRp7DteoiKIyRNnxtOhY41zoDF+IL2FrSkVDVfI8+bSUS5nmcRVpfd9SQoO2JhETGWMQWWRWwVRZMR300cV/a63M0fzV9YfmS4K2lg93dD/7RH9x15+1lWeR5bgz1B/nm+npdlgd7u6PRyDnXH/Q++ME/+YVf+IXf/M3feO7pz4LBxhMugkTBe0AEEQ7Mge+48zU/+vf//r/5tz8/ns5u3Ng5fvykThY1kGRcah0JpNZUs2maORE+efL4ZDKazaYPPnj/z/7sz917772/9Vu/nfb6b/i8z+cgzz3/0mhc2KwfvBcBFhBEQPvMM89A60XTGXnvNSaCDXY/G6Sqqvr9vgg88OB9RFSMx6UPpQ+eyQsxuCzv5/3BI5/8zIf++KO/+qu/+ulPf/ozn/lUVRYh+Nm0GI32W79OAGRjsN/v53meusQZa5AMkiVjySTWxa6g2Gb5AUDlS+89BO6ewSxLdvf3yrqSGLFWodtaI0sGRowkdE9r95slUXKkaDiS4d/s53PqUbhKFoWQREQFqlw7viWprSGgIUvGIiHiUo0fHiV3l8YAi8yzM2yeUy8iSGD2IQSLiNooI0vSu+6680Mf+WQ/y0fTQmGuVR/EpkbWwKKE7848vnVpfIfF3uHvu99wJzIaZ9j91c0W9/AaSRtR6P6cFpF94m+pi3PU+VOaupjCurq6urq6ysyK5q9YQlo5pz/XEC8tQhZEY0g9kCq5lRtmWVYU5eKSGk1JCR2giS7XjmmuUe7qDXmeT6dTa0wIdWpdr9fr9Xp5LzPOTafj0Wg06OVIGAQFaFqU1lpC8kFCCKgRJqEQvJA2oiFUziQsLBwAUDl1A0ZtbSKCdR0yh3VdlqVHhNOnT549c2o2q7aLfWst4qxLGAwiLEQg7IXRGGcMeu9PnTgtEl586flemhVFked5VVWE6Cz1+pnsLWS5t6S4pOotqJ+iaPdzAlO6EgAvyD5UzjWvnkwmzz/z9P33P3jm1IkXX3z55ZeuTCaT3d3dUIfBoPfss1efe+6548e3wEjmMmau6zrP06h8wDzpgZjZGAvAKqJUirfjmSPZQluuo7IZEYi0ooZ1cxVgIXhFSmHEBpPctF2SFKJEeaIE9tJ0n9U4a2iLQwBgd3dXkwRv3LiR57mmN4cg1s7rK0yykNgclcK6rplF/a7SwZdGxCSZd0XUiUSvOzPXNSuAr4kdjdpdAkRAZdGaTsUi8rn7mufbDTwX0nNnNoYQfvmXfvEHfuDv3nnnhWG/H0IoAgtCVc3UykeCPEv3d/cuXzo7GR988k//lJn/wnC4sXXcWMshkLHGWl/X1jlq01nyvPdXv+3b77vvvp/6qZ/69//+1970ptcVkwkzE3EIAYHr5lCr35iZYTDo13X97LNPX758kYj+wl/4+re//X/8ru/6rnMXL013rz/+5JOXKMn7vSTPy3KWplkI/tEnnkSEoigr77Ms99yUTosPgQHVnYuMiFmWicDtt98+nRW+LrPEMZks75ezenRw8NGPv+93//O7/uSjn7i+vV/XtUttktrr16+BBBAiIWa2htJeL0kNAFuTtRreHBQIWlaJTYKCQYQQfFVVnj2yWGvZmIAizBCQiEbTalaVmGZxm4iIyHiNeREAg0hooL9uIi+6rD7+87Ck+G+6jvy56jQaUq5DhVly7MypUkIgICGuPTUlzssdgw4/c2nAHYDrprpkqf8TERBiaBpHBSsigCDALnF33nnn+XMnbuzsMiAain2UGgkBRoEaunI+joAOpY0cXoXuf28y+mVDvvunpTsPP6qrqoQOpGX3mZFxLA0P53H+uUlhjMmyTG24LMtUyI3HY0WqU4yFmGIAi67aJU1F+XKLaG+1dyE3aUq+u3rMqKhtcfBRP4gT0VMhoro7EkFRFNooxxg8duxYf9BDgl6vV1ZFgMFkOgXgPM0CiPhQliUD6tv7eWaMCUEAJU3zWeXjglAHZFgFiGZLImKSJF64qEpHxMCImCbJ3v44TZPTp0+maXrl2g1smxiKCGKDiQPiiUyQYAyGEBDgxIkTBwd77DldSatqFkKwxiDA+vr69evXj9SNmBdqE7sWUUtFoWNICTatc9FXDf5lVZVCxjk3noyee+6Ze++9v9cbnD17dndn/7HHHtvZvTEYDI4fLx5//PHXvvaBJElmxUzDYG3cF1Gdq82QuA0wB02GiiFD5nltld4cK3+IFvI7pFlhihNUItRE+sOKo/5Xt4M78JD62yUbWrPJmJnIxpuZWaPBAJAkSUxiIGOsiA+sDsn4fTTioTVr4rDjhxCC97V1Sed4UbtBCk1mpLEGFmA7P8drQfrOL0bEyeTAWtrczLfW165f357NZknqyuCbvlhk0iSdzWYbG2u9Xm9/fz+EcOP69YcffvhLvuyt0HaCAiDrUtUqal86Y421wvzAg6/9mZ/5F294wxt++Id/5O6770rzfDoOzjkjnJgksHCbKp8YCiFgatfWVqfTCbO8+c1v+pf/8mc+/vGP/8AP/MBb3vLnHnz95gvPPbM7Gp89ezZNeyy8vzf6k4987NLlC5NpQWSqqgoMaJp141AbmxmDLD7Lsv39XQA4efLk+tax7StX0mxA5F5++cYnPvHIe9/zX//Vz//C+tpwffOEMWZ3f89W5MOsKqcigoxZ2t/cXE+SxIsHCDZFX85PemunNr5T02BgcdT7Qwja/lajvERIQBCCMUYAyrpWkzESDLIASXygiGjVY3NQP7fNX5LKS9dhCQKvGrWNbe5IAABFQhX82taxleObu9UEEA2gr2tLBhGEFhSFVx9J+3aMLGg+UYkwf5rQpk6jAAA2c4mgKcqKLB7b2nzj573+P/zqb28eW5vOZtLABTd02ZVwR7346EU5rNEcOY0jn9z9U7Q5uo9aevvSuw7vxxLzWhpz/NK0Rb0a3yUiLZrc3t6ezWbOueFwOJvNRqORdBAPoC1O73rzou+lLEt1SnOb1NMCZbh4TxTbzA34X+hUnkjjM9QTgjpkbrrgoUI/aCKG+ldF5GBvP8tXfAivXLsKgfu9DKVxchZF6TlkWXZ8a3Njcz1PnDVYczP+OJK5GtFRU4jIJBGcXZIs98FXVTUYDDY2Nm5s76VpurayMpvNikLBvxo1QUQAhDnoINkLAu7t7W2srq2trpVlaQxZY9QFiAgKrhkRGZdUtI6C1T17S3QYITalLCtLxlhbVVVijLX2ueeee8sXPhR8vbt949SpMydPnpyenCLJY4/J+fNnv+Ebvn4yHu3u7ua9lExjGjLrjjRQ8lEgKl50fHsMTBhQ8jBRgsYUpyggAQCRYsBCSVf9DdHL59umIFGcSwc9Jv4q5s9r8QwAjMfjjY2NKK3j/sZjAqhi0cxhRlppFJ8Z/fZEFN+/9Gr94HQAElFsW6hn/Qyg6Lu6P4gInxsX/jMugbouE+c+//M//y+87eve8+73nj592hkbgqDYLMskNJpHlmVExOLzPB+N9vv9y88/99y1K1e2trYIHQAANonoAGDIAZKvS+tcOZttbm197/d+32233f6jP/qjTz752bvvvms0GkFg7WyRJUlZ19i2V2EGlxhAi2CeeOLxc+fOvfLKS29965f/xDt+/Du/8zvPnr+4u7v76OOPr62tnTt77vr29gc+8Eeve92DV65ctTaptK0nzFkBcKP79vPs2rVr999/5+233Snep3n+/PMvferTj//n33vv//Lvf31lZeM191wuptVoMi5nIc3SsizGk1Evd47McLiepf3gUVisTcAEDrUzCKZpe6XHxDMDoL5uNptphwZjjPbPDqWA98AiJMbYxBoH3lrEplWXepsNgkCrRELbXl2lOzY46hqVmPs7GUTgaOTneLrx5sHdRY38VYlFYQ+YDZECTW6dPOGFa1ZfDjWBIgRqAVxv9tj4ZecIdLVSIVIgv5YNtb9SNgIAtq5rm5gkSQSNQXfXXXcB/LaeUlFHRKO/iMYh0Mwd3PHUvcrIlsTk0s1LE+gyl6WJHbnWN3t7/L77zK5cPHxFG5TaosYmTwoEAKqqUsDxwWBgjNnZ2YkAvxF7wbctYpY0DH11nucq+fI8VyCtqqqGw+FoNOna0E2mgwB0gPUhIqYelRSgq6UHxloyxly9epUM3nfffZvrGzv7V+u6NgQbq2uJ086ptqz9dFbs7R5s71wnosuXLt57910bG2uz2ayf9cs2OzpG+AAAae5UMG1fHSIKwU+nU+MSAHYu3djYYHmCmdfXVxUcuywrzVrS3iiGUASIqCiKfj5YXV390z/90631jV6vd3BQZVmWOLezs33s2LHJZLKzsxOtrrgacU/jRhPNtTEtczpqicQ5B9xoM8wNYvMTTzzxlrd8qbqXy2I2HA5f//rXnzx5/NFHHx2PDu6//17FWrHWBgiqk7XO/2ZG1lpFjosh1a6ZyJ6jitbdQWOMMTYegShotQ93m7rFOjC9TXOhlwhMjVRp+9JoyVB8bL/f10TuLMtawazR3HZU1gIzEIEauEkCIuy9EnaM+EYyBgBjGpDkKKTjkRGRJLGmGybohA/m29GeUYhoBZ2DeOTx/DMvItrf27vvvnu/7/u+zzn37v/87n5/qOZ9XQdgqapKBJxrVkmdCvv7+8a4F1547tixY/qcKH2h5QnWpcI+zTJ1TX/113zN5cuX/9W/+lc/8zP/6vWvf9BUXjwgUVXPnHNVPbPWhRBSa2Z1pZ0ch8Ph9vaNPO+9/vWv++mf/n9/+tOPfu/3f/+DDz2wsrr6xBNPXL16/fnnnxeBsiyNcTE7BKTRyJMkKcuakAAgz/Nr1/mtb31rr9e7dvXKf3nX7737Xe9/33v/gGx+6dLttZdr118QoboSFjOZThH86urqyjBPrQOwCEZtiQC1IDvnjMQs2qZ7h6JFjMdjrfgwxqkHRcPPWZb5eqbtOIkocYlhQGQGmFVldMgBWWRwiGWjLrfQpIBCWlADACBIAgEIheVIVQzbQHL3myXuCjeXAq9yiYhBZBAgXFtbK31FzhKgQZNoIxMJgPIqT16yBKARN82fmr8iIHZNvgXkJYFgIc2ZKJSz1JkQZme3Vv/i17zlP/3O+/vDXkBb1N4YgxCCVDYxhAKQdIVrfHfEi+8OrnsPdmxWakGUutKx647rTlJax1pkxNym2/FcyZpbI5H3dX/eVdK7axdfpEzWtHaAJrs6Z7Is1/RUIlKAe0UqjyIzQlBFedxVJuIK6Ocsy7IsU6m2srJycHBQlgWiAHBde2MMAKoPM3Jz2+nniI03A7nFL20NkaYcs6x8YEiGyYmTJ4uyevr55zc3eqnLAKDXW7EuvXbt2gvPv7izs3ftxk4M6n/26WtVnb/2wfsHg/WZnwQOta8bdzrCwXjU7/eRUYWurm2eOF+DBE8m9d7P6jK1Vkj6GV44e/L5F69Yl2VZtncwMsZlmWlWjLD2lCRJXVVElKQWWbzD8WRPnfzMMC3KldUNsMnVG/s1GyLRAF70hx91DJqgrwg4l2uOiVKiOoJaFgNIElgAAIxlMi53z764//E/fezr//zXvfLKSxtrq+trfZRw1223bAzsI488stJzd955JxjrvW/6SSNECYcElqymExORSOu3TLK4g5X3FsBaK4JERlt46FZ67dlgE2bWfCWv2Jkiouts0BlLbUU1AGjb8DRNiSgI13VthH0dEJEDDYfDuq6zJPXeJ9YRkYIqKIRTbzgIIEDOZvlsNkvyXjWbQVkDQJI5Zfb1dJplGRrw3tvcQYm9vCleKoqizZ5rrHmV7vEUswRjSeqiKn02GABLqGv1b0uQMHeJN4VnaAx7r/sGAMLACDbNoVliVAUFQFrVVoPKKBJQ8dGaXQ8gIuzH4/HW1tZd97/2+/7WD545f/Hhhx9+5JFHELLEZkhgbeJ95atZkiSDPEPEUIUQxJpkPJ7t7B1sbh1jYSLT8hANVAMAACUAYKzxvrbW3n777f/4H7/j/vvv++t//TvvvP2W1dXV8Xhs0sR7n6R5XdfOpcycmAQEHBnvOU9T4HDtyivnL555/qVn/+e///a/+lf/6tve9hdec+edo9HosUc/S8bUHopZZa21lpi5yQMAI0BIhBhE6iAeCJjsr//mf/nZn/3ZD3zgD2+77dLayTN1Xd8YXauqCqYsUCOII0QDvWEPDbnUoqG6Lg0GoBIZDaO2o0UbEDUhIDCAZx5NJsruAgMLAggSIVFgnkyn/d6m6Rsoyr41Dg0R+IwSl5xLNw9u7MBFIbIWQkpmgn6KnsAxq5izQtxq840zDFhABAEMkgiD2sEdtkzzAOiC2IvXf6vozb0pLZTkhRBJdg8OLtx7B673ggkuSB9MZkyNUBEHsoyQhIW3RJa+JFY642myMSTUzB5QBAJwYAmk3BuBEAgFJCCABQDl+MyMCCsrK3fccUfyrvdnWbY3LoxmlyCkLmEFFqeF/jxxTF0pCIfEbXcC0Z24JJi7i4uH3NR46IKO/D5yG7pSVm5iPsaL5qkxGLEAiUgrN1QpjlW88fkLer2ItJCWh230uq5XVlaMMWVZ9vv9EMK1a9dUOy7LUpGJqNODdmlrD2tb0rlaNyYqawaAyWTinDt+/Hiop3Xte73elVeuPv/Cc+99z4dYYDiwvcFKVXn1hwPAH33wD4rp+E1veuOwD63ma5i5LAut+tVqnHYr570EvPdgyKGpva9rn+f5mTNn0KQ7O/tpmooPLkmjia9aWgjBOgciRVH0+/2tra26rouiadZrjPHCXlPbDDTy8qj4xZG7qY2+Y6V61N5a1RBjkZIO6dTJrT/95GeOH9/6ojd//mR0UNd1P0/zPD179mySJO993/s2NzcvXLh0MBkbi71eT3gOwykiHOZJnrogKi+jHmaMcU0eeAxJNB5sSyRtVBgIETBiPc416PaD4jodNkl1bdWovXbtWr/fD577/b5WCoUWJEtEFIvDBwEtdC4KdSyrEFVTu0GYkSbLV6dmrTXO2bqWxtynuvG1Bl1YPTWGTKslkzLW0ODHiffedAKBiELSNDCdzKbWWmcT1I4F3qO1gFjOSmO8a1pbzndQHf8M0qyd1la2XoTJZDJYW7t8+fK3fuu3vv71r//ABz7w2Sef+9SnH5mOpoNe7gzmw2HqbF1W/X7fGDy2sXkw2hsf7I8PDjY3Nwkb51Or1TWHjZlVZugYqqrKe71v+7Zve/DBB//e29/+u7/77je+8YHxeKxgOwpDhvMOVJRlSWjBRK9evTYajW+//fZf+qVf+uxnn/7mb/7mCxcvarK9LniSNJsFbUxKWzurOnVwcLC6svJbv/k7v/Irv7K2tvaa19w+mUy0E4zSs0F0LjMQjLXkjDUJQxiPGhivWmoQyVwGIKEO1oohQgTFGS0mxd7eHrZpntBCeEa+5L2fTcZrg6EYGu3uFNOynydefO0ZPKxaKOvZ5tp6OcKimJC1GbmqnPtjuM3mg8iZjzrLh5n//4VXUGZpgIgCSG/Q3zp2LEmSaT0BkQDc3IBNh7ju1RVPsCiVuwNurNtoCkOUUE31kbTeMotkjTGhqo1z7BWiIVy+fOn+++/58Mc+lfV7DGDA+FBpboghw0eZ/FEwdA2UrgnYvXlJtCyZsF2hvnQd/vJIOX14sbosW72FrWo7/94YBbcS7xlRrDWa5VuWRRwh80JuVFzu7l/NUVhgiDgcDlWqWWvH4/H+/n5ZlltbWzFeCwDRK8CLmTvdRY4SpTue6CgmoiRJNOx37Nix7e1tg4xodnf3n3zs8SeeeO748dV+v2+tDYLT6Ww6K4wxzH5vb+8DH3h/mrpbbz2l+oGqGnv7u8gyHA7TNIdWndIjhC0oBBFaa30I4itr3drqsK7D9es3EGUw7KOxo9FERADRJUkTGTVGAFTRBoCqmMXpA6gTtAXe6mzrEt0fSRtRBEbtBNuCvC7haUCWmUfjybmzJ9/9nt+/ce3a2772q4qiAPb9fj4crCZJ8ra3ve3DH/7w6ur6+tZmCCH4yraAzE12UuuMUe1HRJBE2pYPiGiJjcHWHI96J2vcWlrXDggIamLd/MjoW2yLOgnx5jaipqTi2/Y13nsOgsCRZYcWlBQRNVRpranKUpdIRWld12mWkSUlHjBAIcq8ubJrm1ytTDUJZo5olyqYjclFhGJHZEQiG0JA9siiHQViRanimTAzBxFqdk2xqIAZjEOegWnOQpR8AADIOvuuZ1KaPaXxZDJYXUXEc+cvnD17/vLl25966plnnnnqmaeefuzxz7zw7HMEYowJ7PNeNuwPDMLqYFjVszxNAAWEEWNcgKXtEk8UM2AIAJIk4cBkzL333vuv//W//uVf/uW3v/3vP/DA3SKh1+tpKDQ0/XHnaP4AAILGmIODgyeeeOLixVvf9a53jcbjb/iGb/jQhz504cKF7e3tNHVVVakaba2dTqcxASKEMJvNVldXtfPYieOn1EtczCa6uYiYJIlxhkFclgNAVdXFbGKdeuZEpBYRZ0xVjkMIzpg8zx2Y6XRaFMUtF2/NGV544cWslyNiXQVQ1K1WxdcplNPR5vmzxf7IrQy++lu+ae9gdzIZISKEUI9Ga2trla8rDoKAxpRVpW3+mm26ienyf+fFCCRgAEWg9NVwfW11a6MMAQU0GcIzc1t/JfBq8uimr+DW0FcKF1U4Wi9dixmALEJio+auVF6W1WAw+IIveNMff/RTvTzdPxgbl1hrDRnQJI42vtW9DsunyDKok70Z/xSlS/xSn7Mkb7qviDcvCfVX2cjun6ST1rt0RdYWBxZzd5lZPWbQxobjPTHb5dVHCx2hxW1F6Xg8ds4dO3ZsMBhoWR408cvGmokGVhQhkRFElhdPRdw75bPj8Xh9bWU2mz3yyCP9ft8i7e2Otm9cL4ri+PF1AHAutdZCCFVVXbp0qZoVL774okEBg08//Vkkf+zYseFwmKTJeHfv5ZeuTqfjixcvrqysEFmNtoo0TT9DCGTmjndrbc3MwSfODHq5s7S5uTmdFD5nY63CqwEh+Sbl0jpXVZX6GKLxF3wd6eSww/lzueKCx6U7fIpESAT17zu7e+fOnnziiSd+8Zeuf8s3/aXhubPXrt0QCZsba1manjhx4o/+6I/e8IY3BBANH2DbhAoAQEg6kQJuW1QphcRzCIsqQmRAKMIQQmh7plJQf3IMCZsWFaHbVFik6WyGHa9ACEE1pyzLFHMjSRKyJp5NFjDGpM7NZjMEk2cJABqkLEk1RUSYq7J0rkEOARZf1clKAkTSmkEhBOZ5JldouzW0klIfA+yZ2rYTyPPav852EIAQUa/Xo8YhSUgEIhwCobHWOusAKVRl8N5lGQhICE03QwAwijHdPNnmeaLNFhEBKHhvkuT8hQsnj5+44/ZLL7zwwue94bXPP/vco4995sXnX9jZ2Uldkjh7Y/v6Lecv+HI2m46BPbCWepjuwuKC2ifMTAhkDIcAAKfPnv2hH/qhu+666/u///vruj537hwA1XWtuCWIouea0DibVFUVUPJ+b3tnz7oXH3vssbd+xVdcvXr1qaeeGg6He3s7IqIK9GAw0EJH6fQUcc5573d399S/FfkGIiYuS9M0hACIs9nMzzCEoF6TWRGMicmDAglUsxJAOLWhCDQF7/1kUhRFoURLRFXZAG1Cx8uoqaPXt7fvufO2u++4/eyp071ePqtKz3WaphD8bH/kKn7m8Sc100IQEjLaLHLJMINFztx8AIAOz5SjPv+38gFYlBeISMaIsBUIBGVVnT22afK0qgqyZJEAkBlV8wJE4QUOHylhib3H7xEX3DzzBIi20SoRBU2/ahF5bSi9dY3b0xjjEisQLl645Y7bz734yvW8l1alBzR1qI0xwgEtLk0pjuDw/CP1dN22XYkCi2zxVdSNeP/SPXjUXsIim+vezLz88Ibvt+WM0MJaqVyJIpt5DsSvwibK1MNLETcJWyQKRcLSxnDGmOFwmOe5pjZ03GvzvNalOmBY5OBdDUY6ziJEHA6H3vurV6/2+33vPQQajUaps3XtN48dK4oChPL+cLq9XZbl5cuXP/yhD4YQkqRvDD766KN7B/sXLly49fIl9uHxxx+dzWaJM3VdhzbTuLt3zJ4oaVYM2VjkigF40MuPbW6MDsbTma+qKgm29t455zmwZ9Q6DQYiYhEW0cXsxvWpzSqHo9LdjyQP6Xibo7Yki+qaLKreIkKJTZD2Rwer6+tJkrzzne/8G//DXz935lRVzaaTGSCfOHFib+/x559/9uzZ85ODkSbvhMCtdBRjHbbmL6OkLlOhonHTcVUfJg91C0cfbxyzhIUGmsASkbb0vboyQbjLGrROXRgITc21iGRZpp18XJqIiNpkNk2IqCgKNXxdklRl4ZxDJIjIxmXZ7+cuy5DZOYuI3nubJMrr54tGWvdsotpKbfK/ALJ2UNYSKRbt8dHmdM5dZSJzrI/mHLVCXZEMQwimxRrzvtK3G5POiYEaDA3lbcPhsCzrela5LDPGQAhe2Do6febM1vradHrr+XNnz5w+9eyzT//xH/9xnqbGIAqMRwdDXP3Mpx4ZDofrJ06ICCKDlisDz9s0NRMHarQELyLGOg7BJunXvu1tt95220/+5E/+yq/8r6973f1BpCgKRc4KLC5JVUNKe3luXF3XJ0+erKqqLOuHHnro1KlTTzzxxOXLl9QhpMxB6xt9qDiApp4o8vbBwfhNb3pjURQPP/zJtbWV0HZD8d4XRWGMqYOvah8EjDGzskYAEJqWszRNtzY2xpOD8fhAhFdWB0TkfRUCaT+u0WhE1JSA6zeKW9A9SnVdG4Gv+9qvOn382PRgFELd6w8FmZkN5qtZVo2nWT/zdSk11mUFhOZQaFLJ5Ujd+mZi9lVk8JFyZOmGhXcRgg8WjIhQ6tZOHWNCAEAWtCgAAZtRalNrkPng4SgudJg7tZxfZxRIABANEQCjQFjEurCIyEEA1bqq0NgQ6pXV3hvf+LrHf+U/bh1fDyFw4Lr2iGiTVP0YS7ONMrW7yvplFCTUyc86zAeXfgWHBHPk/ksL0b0ZO5vNh+ov48O6j+2erk6kEKLsZPZdvql/jZHa6Ps9ckhdbUOjv5PJRFOgtTkrd+pZocWjhracqbtWcUbQke7xr2pB1nXdCHXgLMsmk4mI+JpIoK6LLEnX1ze8vwagvj2s6/r82XOfHgxefvnlJNkqisnFixe3dw8efvhTo9Hk2LFj48lsPDro9/Ms6xERGNKqEWxS4bz3vqql18saKYJIBhyhWBr0063NtSs3dpLElnVd1TPrUikbx7LaUmrVGWO49TFGhh5zdujPOmDd3QQA5lqTyzSIEEKjTbVm1oL6AgDMQJldHwx293ZEZGNj6zf+029+6Ze85bZbLwqCQbuxvn7bbbc99dRTGxsbsQ9090AagwpqjSjRXTxP1OpQhUHSqnpsE1yJEoPohWPuRpehcIu1wm2ontp0iqiriQhqwxRHIYQ0TYuiWF9fVwd4dPg754y1IYSyrPM8r+u6gfNtH26TxBhTVTMdKyBqbn9RFINOWxF1VCBqWlmD7BbPEREhGeg0ZQEARNEoI7ZZDtYZYOGACBi/j1YyAIAEIipmhXPOEKAl7oS6u0emqWhChMBZ1svzYjQardsErQVCiwhcAYpzbu34VuJsv59vbW0w84vPP59l2WDQCyGwhJdffvmxz3z6wTzPVjZBAFCCZ2NtlMGRCWjOFxmnCUNkTPDeGHP33fe84x3/+PM+7/O+//v/1okTx06fPr2/f6A/uXHjhlLa9es3BEAErCXv+W//7R92zt19970//uPv+JEf+X/dffdrnnvuOY2vg4izFDzoPmpMQf3bd95552g0evjhT6ZpqqWM3GaA1nVdBs57vdFo1MsyS64oitQlp06crMvi+WefRoRLt14ofVlVlTEogkROKaSq6iTBJEmKWaXdCZVsoidJ4Uvf8sWvG/ZzFFhbW6nrUoCLqkwHmQQGZxzRytrKaG+/1+uVxsymxWE2johgUKuBGZvk58On+1WE7s2O/+dyv8JhokDw1XBzLV8bFqH24lE7TSEIgpV4xJZfsfQZD6n42mmYmiwCERHtskoEzJrYzuJDpChrmr6hAIQchOsSrXPO3Xv3a245//tXrm67tEfWGONCUBlw0wAwHKUgRGkfKTh+v/SNXpG7dYXx4Sd374Gj1JAlKbj0EDxkV3Vt0K6K0BWi3UeFDkZxnODSq7mN1DJzbIcuIlpMHJ8cZU+M23GbOLO0zYdnF1eDmTU4ZK0Fgclkokc3hADGEODBwYGIrK+vv/DCC+pkruv6ypUrd9zxmt3dXSI7m1Xnzt0yWJs++uijz794ZTSaqA9mOFx1SabdAoAlJjEp/629d860yV+MwoRS11Xey1bXhnsHo2NbG8ZNKu+L2YyI+v2+TjyE4MmjgAhqZTAzizbrbNe5K42O3PfDatySt5OZAZYhWbr3z8rSOtdfGRpLk9EBg6TW/cZv/MZXfPmX3nXXnavDFZGwurKyubFx4/r1CxcvarpKdy/UceK9N8aqjzoCjka6ApaOMi0iAoG11Sgzm9bvEomhVT7mEFdKG5EC4wQJG7Eda9YBoKqqJEk0vgsAaZqmaRqgKZkTkTRNqcUVgVZeSgyseK8b5JxzLsXW8aM2lrpzYt2dtKAN1KQpgTHzWE/cINMCdQEwQdIcmcRi2WbyN60QbHsb+LomROecZgUze0SrxBd3vYEjbu3ywWAwns5QmzJhDBsbtAY828QNBivrm+V9991njKmqCoCstZPx1Fr7+ONPBoG77n3t2tqa9kkEEcAGClCnrMNmNeqRlGOLCCAK85kzZ77ru7/nrrvufsc73vGe97z/llvOrqysOOfOn7+gRbQnT54EY9dWVpn5q77qqy5evPjv/t2/A6Bv/MZvfOc73/npTz966tSJ6XQavE+SJp8rJjAi4sHBuKr8Bz/4x6dOnbpw4YI6ORR+PrqjyVBRFAYFhYOf3XLm5Hh88MqLT4cAf+dHfvCBhx545tmn3vve9z751BPT6bSXZeVM1FOtgj/PcxGcTKfdEyQiSduO8E/+5KM/8RM/9d//lW+99dKFYS8XYJc5Zg4QnHFZbzA+c+qlF14CARQgIuA6cjktwekydkSN5i/ZSP8XXEdKBEQM4g0ispR1dcuZU5LYQryIOGMQUBAEGuezMCPNveVdHrLEjrqfueWQIsI69/hX7iLLNu4627iSJHjvrUaMCMpZcfzY5lu++M2/9Cv/qd/H6axESkLgxGUssziZ7ji6E+6KohgrXbq5K3u6Pz8sRA9Ptftl1xxZenX3h9LiBiw9ZolBxAFEA7ebPhN9m9yp0I3DPqxMdOc1t/ZaZEpl5b4F0IkUj4di1bh4Hbk+0Z9Z13XimubEzrkTx49Pp9Mb164MV/qPP/7YF37hF25vb29vb/eGgwsXLr7vfe/f3NxcWVkrxhPn0jzv744K70OSJEVZTkb7RTG54/bbNLjIzMhRmEkbpmy839Y0xVFEgACpNWsrw/r06TowPvdSWVezslxZGTBDWZba303rmkJVa+kktnIIOhH3VyeApVWSNvIqHRcIAMRNPPwoY8zu7u6g39/cXN/a2prs741G4+PHT3z26WfPnz8fQsjTbHVteOzYsStXroxHo5XV9YgxGXNEdcvqto0BdsCWEVHtXcQYrxQEIyKWrOYbWGuZfQhojHFJqrSH7fFR8tAN9d4zCLTKHyJmaR7TApxzRVH0ej1m1ixolyaatywiLE1PzBDq1fVNRayOOOe6gGmaa884NIaC6ff7iDZw8MK5cXoirLVETU1E0tjNlU55Nps1+KWslcQVQGN0aI4zgBA5tAgszMEwWed8XYuIgCBpcqrUtZp0ipc7JwYRhUVUtsjzPCwEL0xg07w/mhQA2HBPACAS75kheF/54JI06/WPnzxV+fDMM8/MitJYns7Kzc3NoiieeOIJk/QvXrx48tQp1CIoEYAAYKKn11pSLVnHFFiQ7HQ6Mxb39vZCCPfef9/3fN/3fsEXvnllZUXjqa0iCFmWFWXlnJtMJte3byRJ8sILL7z97W//y3/5mzc3NwGgKAoOQauHdVvTrDebzYxx3gdr7WAwePzxx2ezmbV2f3+/1+up36sdm61DRYh5r7+xvloW02eefgYB3v53/tZXffVX3nLLLd772289uzJI//AP1x7+5Cf39vaybKgsyBiovdegW7/X087lIuKsjbE5EUmS7I8/8siHP/x3vvNv/pUv/ZI/d/z4FqIB4DRNpfIuTTa3tgaDfjGalMU0MUkpFZIQQIM62vp16BC3/L/n8sIWCQRqDhvHj4EzFESssdaiMUItBDMHZkYwQHNx22VHXU9MlHqI+H+Q9ufBtqXZXSC21jfs6Yx3fnO+l/lyqCErU1WVmkolqSQQIKAFkk2rhd2EgogmuoPoCLfbbv6wTYcbg7HpaLvpwCbARAsKJAYLEEKiEWhEqlJWlaSSqkqVVTm/8b47nXuGPX3ft5b/WHvvu++59xXl6B0ZL8/dZ59vf+Oa128RuVahEkT3AIAIFAIDnyvGALJkjYwDQrkCACCxtdYHuvvcnTQG4GCV9sQA4Cn0YbSx53PuByX1e6x6FVvX1AJuLXUdqe0+8HmlsyOmfQbfZ/lr/LvrT99sCw0b1tQLz+mrsJ0m2r1Ia414DnW5G3VfDsJeDcHuvWvD7JQM6ZLUiZNIY+qqiPR0uK5l7Mlc3DNHC3Hv2LkYi0IPRkCSqXyoh6Mkia65unr88P79B++//JEPffnLX148XKDR1trlcknOJ0ny0gsvekf37t0TXptEkRqNtrc3R6ORMWYwSEWa614NAGmaascAUNc1aRRNRc5tVfkoine3t04WK60x+HqQpUVVAgFwSGILAHXlOZzZFUUDhgvCU7d21PPldFOxtt+6f9sw1GYHiuVmbYsyszI2juPT+bws8zSO/sD3fWr/0YOvfuXLs9nJe++997GPfawq8uVyKbr7crkcjaeI2AWdydKotpKgcEG5KeZoRFStBAXQ+DW0aosiaM3cCHYhMCI24dMAZVk61+S+i0NOdFxpShCzsYHVrMU4KXw6TdPj4+M8z3d2dgJTt6tDa2aIogjIh0Dit5ZNGCdJE8WNjU0QEauqRpTcmCgAo2DTay0JvmJlaaQwAImyVlo556qqTNMMWq4AvRqMAm5AKKlEMgnoXUCjxc2IiNoqBkoH2Wq1iuO4LMs4jooiZ04ik0lAKEloS6NFKQpOW1TaGGPLokjSFBiJWSkkVGLkt1GCqIcM1iRKR1UdHj16NJ/P87IGdRrH8b0HjyqP9+/ff/nll+/cff4MbYacVOrk1uMjQmRVlovlMs9zgQo4PT09Pjmcz+dlWVZVtf/4YD6fLxaL1Wq1XOaLxaIoiuPZSVOWKoQ//Ef+yKMHD4fD7Cd+4u8BwNbWhgQkinOKCIzREjJC5LVuAjM3Njbee++9P/Nn/sxHPvKRn/mZn8mybLlc6hYlTQFOJ9O6Kt5/930m+Mv/5//qj/7gDzz/7B1APjw8VEpNh+nHXn050oqDf/vttw9PckaApu6eo8bKEhRi1O7kRmpEZkZt7O7eIPj6b/y///7Dh49/8I/+wPPPPRsnSQjBRNZTGE1H2XCwOp1nSeyqJpkKAFAxBR/CWRJgu3ZnDIWoJ1R9c9dFIiBXnx/16YlSigMUvrx5+5l0lC2DJ5BcLBRvVSBAYk1gUHMvELpPPS6SaDyvC3GbvqwYQJHwB+9C57LsSLoRQZuaadBNtiJTHNm9ne0/8oc+9dM/80sbm6Mqr6xNA0GfD3UE7htoJ6Gt0HeBEar+fbiMrfa5Y58DwQXOhBf00f5Xly7b2v2u5X4fmLkzufe5+1q3+1efWa413hFr3SKK9CO/+q/u9/PinU5uwJ4JIc/zKIrEFExtrqq1lsBbm4yHaV1WHKp3331bAT/33J39/YPVsqhDgEDGmMlkulwu33jjDReCVWo1X1zZveNcFUd6d3c7S2OtNaBAxQlOVlfXyCBi8ME5LxANjRYeRZX3xGAUTsbD6WhINEvS4cnxQkgJERurEBtUJmZu0a3PTW+nUCKeM2wSnWPV/fstvTibHACA8xjC3ZpWVRVFpigKawbehd/93S99zyc/MUizL/3eF//5v/iXd+7c2dnarF2ZZZm1drGYd/KHMaarT6d6gVRdV2VRjDGS44uI3fw0HUMAUOLkRdSN8AHQWD5bDds0ZWhNU5w4ibvawFKNtXtpFzKzubl5eHgYx3HgJnrLe58NBwDQRM/2pAdpvyN8RKAUozFU18vlUmubJEm3Rot8tRVFzMwEcRy3cdHUpRS7du+hVkkSe2+qqoriyFd1t8M79R0RQWsi9kxKKl0Gb4wxOgKlAEqlFCMwQu29MdbaCJXmZgUVaGgsmMRKaUQVvFdKeU8AipiUUsQKlRKvs/eeIWgTRZmKar+zd3VVVKTsZhRFUTQcjD788qsysUXt3n//fWb23hdlKXWl8jw/PT0Vhjqfz2ez2Wq1EhZ+3F7OBQDQGkNgqaoiNAwRBYTHeSc48Gma/quf+/nxeGytvn371nw+F8nVGMMAqJSx1hojVcNbasmSByUC9zPPPHPt2rVr166dnp7OZrPlcrlarTam4/3H+5GBv/Tf/B++/1Pf9fyzzwyz2NWlc1WaqBACU9iaTD7yoQ+yD1kaf/lr7x4dnuR5jtikoXvvEY3ULxeC15IdBQCesKi8RrW7t/G7v/eV995778d+7Ee/49u/NYnTQN7XfjqZ7l29ks/mZR3AB09n+QgAIPQMEeE8xYbzIvU3uL6ZZ+A8Ie3/hIgIVV5XH7h1w6ZJWc5BQwpnqc+yKxGbOLw1Mv60bnQ/BwANyNAv/QvM1PqfAnMgBFbIzAHYQOPDuKBsebexOX35wx/8Zz/zS0lky9rVdaklX/6CSRkucI7uq04R4ctcdGszCz3fcJ+S9leoz+rwvFq8xp6/mfXoM91Lx9U1uPZwR9+7/vTb6a/Hxfb7H9bavLTDa7PUn5BO0JOa4d0kqBaWazBKr17bI1eXFoGmCum99945ONjPhqMoNsqpuq6dq9/6+terqhqNRkkaVVXlwRvFHsLW1vZ0MopjiwoEq6Z5vVJKGUQdSOJCtfdn8WhKqdqFg4MDQJMNRtvbWwAwmg0Wi0WxKpVSFASSOuRUkKdW1WA6cwGfyVuqB+TSelDOYvrgPBu+eGB63zbJAP1NYq12zg3SbLFYTG/ePDw++s3Pfe77vve7lYJ/8wv/+nOf+9wnPvGJ0SATypgkafd2bCv1AkAITmakKw69ZlmRdelUYWY+y9NnDsymp7h3hRPwfLIvETGCsGFjDCMobBg/t5o3AOR5LlYQ7/1kYyowL1rroii0tkkSlWUZPHW8sxEIxNTTFkmMQcnonPOdhaYpQBK8TH2XCNAtOjOjanqbDodVVVVVLUMwcSRp04yKUTWItoi+7rlmsKlXCIhAxKg9gatDHKVEZJLIxgmhblzAzAwBUDzAYAVEk8DYWBKU69ojYu2rqqqCb2CKfe3yfCXZB+lo/NyLH5CJLcsyz8vZYnlyclKW5cnJyePHj4+Ojmanp48ePXrw4MGTJ4e9/SYdlFEDERijsiyT6ssdFQ1tMr0A6ctbBOmJiJBhe3tbKcjzXBR91cKeE5HUTOunWQtH1Brkz7/9t/82ANy8efP4+Hhra2s0GglW7ttfe+PP/6d/7j/7T/+TF+7eRsWrxVGeL8URwYG9FygC2Nvd+ehHXy3L0kSjN9544969e6uyMDpKIlsCKCJkHS4LAEYw3rsqhDSyeVlHUfR/+j/+pf/df/mff++nvjsbpjqOV1V57dbNd974epYlhXNVUEpdwoPPLoVAvXN62Vn+Bhz3m2HG0DNbamSCoAfJaHerDI6IrNaglWQVIANKgj5yVyviUlbSJ0p9nqiaP5uzrZRgOcuq+j5rYOYQ2FwYgAJoYrRcWe3sbH3bay995nNfHU4yTRRFUfDUNylf5GSXDn6N5z2NW3ftdNz3rFuXVYa62OYaG1trn1sLWyfZiV+q12TzVd/T2h9gx+H6RLzPRNempX+ze6zT6vrIRxd/uDaH/ceoh0CEiFpr0cYkpiayWiyiiFhXVVGsdrY2ktjeeeZWmRcHBwePHz+uq5IIlNHO18hw7fqVJIkUYCDHzIPBIE3iyKo7t2+Nx0NjtQYGRKUghNYaLIS1+Z9RSraUY0Zj48OjJ2+//XYSD67fvOEqF1t188bV4+N4tRTEKw0AtSdmJr/qT+/FHdL/jJdZXPorvrb6XYOtxtystSwaIgQi4ZraRG+++faHPvDC4eHxG298/WMf/bg15ld+8Rd3d3c//tFvEXdmX+QSdTNJEkT2nuM4lrxoaKu6qvMwosxMCEohY7fQCqW4D5+NXcICxPCYJDEzS6KaDFlb4713ztm4sXawbwzX3Y6S/FFBrBSftLWWuSmBrBSIgRM7/xEhIgZHznltsDOhAyhr4yTRiCiuCuHZRVEMh0MmZkaBxhQLecNHkeM4Pjo6mkwmSZLWtQshACitldSfVsoAgOB1EHVmD/SBFAUAIFSRUaiM1RD8fO7zyWTiibVWtQ+rYgENdHZT/M85J8MXbhdCKMv66OhESjjXvhL7sHOuWOUnJycns6OiKJbzxeHh4SovZ7PZ1772tXv37jUMFRsDhNIqBGIGrXEymVy7dkW1ZUaFmjIzMNfOdRHvq9UqnMfubz1TZ3AcWmupL1mWpZRO6fguAFRVJZK0c8F70toKjnrfXhiC07rRpw8PD6Moun///nA4tNZubGwE4vFkdO/ePaVpe3MyGqbAMblSawuxIjAAwOSYcWd7+2Mf+5btvRub00kS2a+/9WZZ1qxBI2jbhNRRAyNxxjZQ6xBClsVVVbhVzSHcuHH9L/+V/55Rfdd3fyKObRZH8TCbbGzk4aisK7GQnR1GhUgAELj5cIFin6elTyOGa99efJjP63jdA5Gxh4vZjbvPqizK69wojQDGGPTE2KiqJLWSWFxU64J+n0+tUZj2LYQAzEExKK2AiIlCaLyN3WNEEEIwUriNpSFGRGBWQB4VEvnNyfSTn/zkZz731Tixnn0datXjfH2i+bSr78Dnngp78cmuzW69+9OnzsclrQ3+Ihvr+wv7z3fNXrqE8lVHZy8+Br3lXxtIXyZ4mhywxra7wa5tlP4rLvaTWmygjtAL0cTW3ymBuFJ8SRKTlFI7O9vPXL+WZVmZF/fv33/w6PH+/r7RVoCxJpNJlqYhePJ1lmVpmi6X8zi2167sjAYZcxADqYgjItoLNVGgEVEr3QwFgSTWQJvIxnVdv/nmm9Pp9ObNm9qYUFe7O1tFURBBXXnnlrE1IUkAoKrPDCRrU/q0dezmufv34jxf+vPuGURUqBBRGN50On708PHmxvTNN98cZukrr7wyHA5+/dd+5caNa9evXlssTweDgcg93a4Wa39XjSOEEvGMpHZ8rr9hemu9rrsHJgXMTQJ6Y1WWAFpuAyagocKNP6ku6gZeP4jxU4cQkiQRdouuZsY4TqUnaZoyc1eVCwAkHAwRdRyHEFztg2Jhw9BWekDERqBra1EQkYD0ioW8Q/IxxhR1iai0NgAS3G6NiZRSRkeYmG6LCqBeoGCUrqrKBUZUJLhYAZiUC2UIYZnndV2v8gartSiKukZmZghE5GsnuTFVVZVlnuel9342my0Wi/l8nuf5YrV88uTJ48ePHzx4MJ/POZzRBIlxli1jjJlONwejoVKKg4QjNEVXvfeVq30I8+WCiKSF7uTKBuW2JBr3DGOdJY9bYB9s0hobF2AURVVVZFkmz6RpKskR3tN0Oha/cghB6cbq1G370KDEN1AbzCy5Z2Ih39nb+qt/9a/9lb/y1z7w0jN//I//0Y98+EPP3bn9/AvPxTaysQ2Ur1YrBk1M3vvd3d1V4e7cfma5WlR1+fDR47KsGYIGCxrEHEVAjZLCwMyuKpIsXa1WUWQ0mKp0C15tbY3+m//L/+PPL+bf/we/D3GURNH2ld0vv39/srF1slisycfiVe0Oq7Cc7gjgeVXtUpfwN+DN/95LARRluffMDbJaswmhlsoQjMDEKLgECKHN1Vxj5Gt9WLuPiJ21GQBQNYr0GutlZgIOxIHIaMTABKCaiP5GGNdEIYqisqyfuXXj1Y88+/tvvWfjbFVWSVu7quMcl3al36e1zxfJ5cWx9QkW966unYvN9j9wz/S99nDfd8gXFl74WZ/ZX3z+4nr027l0+H3e3MW1ivDeRV9/42npsxzoMfiOLIoSIIccOAgP1lovl8skSbyjjckkjuOtjU3c4itXd+8cHRVl/eTJkzzPt7a2NiZTQNJagwuPHj/Y3BhZw1mWDdKEqYbzs91eWmsjCKaBKATWViU2rrFelVWWZbu7uw8fPn733XdfffXVmzev13UdWX316s3Hj/YPDg4eHxwqQGEty7yQF1wcfEfcu6vTJLpJOL8cqr1/Dvr7aXOLbfxwHMf5ajHKBnmeZ0n06NF+lmV37z7PPvze73351o2bsmSmyTjSLSJHQEQBn2qc7gSqrYIgLXcm6G4zMDOiBpDD3UQFMgJ5dr5q1aammrK1NssysSFLkXBrbeBz5X6FVcuOCi2MzGKxmG5uiEAg1pFsMCAiFQJqTSEEAkBghMAEdaWt0VbUUwVKeVdVde0qPxg2bNtGUVmWou9qra3WnQeks8lLBYUoSrwnpTSw8j74UCrlEVEy5eq6DswiKwQEBp2lqVIqACql8qKYna7KsiyKoqhKwTpeLpfkw8HBQVm50/aazWbHRwcPHz7c399v2VJjkxfDLwAAkrF2OBxev3pN5FTBtmvjH4GIyrparJbz5cIYY3VTxlFbwxyE8UstKaUUABJ7ELaAgABa2y6hH3uuH0kDa+HNrRzzDiIUAKqqEOwqbuLdKq31arXa2bvy4MEDY+1oNCryCtGhBoYASIhnaGsAIK4Kib9rQK/K8vj4aGdvO03Tt969/3/7v/8NpYAJ/vSf/uHv+a5Pbm5On7/73ObmFFQgBlYIgHs7O8zsXIUMg8Hg/oOHp6endQhAyEyd06k7L1kaVVUZx1Zr7arSAC7mq/FkeOP61v/zr/+dJydHn/ruT96+enW0sRENUiodItJ5kt4Sk8YfxP1EnW+CufYf+AYPr1H47qrLarq1OdzZLIIjBA2YxEntPQAAEQdCEJkckYSIXA6u3KfD5851+0ExSBy+GJ87507DfUMQDdF0whUwsELFoEAzBgXIITDzeDx8+eWXf+tLbw8nUQBGfza8/iysccGL07TGw/gpOmLH8Nae/2aWRH7Vb6HPLPtTtsbR+5P7NF67tpaXauTdk33GgBe4F7SKLJ1PZ7p0b1282cUWCWnoOLoQDuHI3TNaRavV6nQ2q/Z2t7e346ThB6L43nn2mbIsFbBSCsQJmFf7j9has7ExGQ6HcSIJQqyUkoKd2MbvAIjr7owNWIyiKCJUEaExkXnGbm3tXLt27bnn7kTGUvBXr+yFYDcm0+tXr73z3rvv3X8wO5kj4ngwnK9yZj4nbYmS1FP7Ot1ibeGesmoSDb5egWNt42Hr8pAkV++9QlBoZrPZ8fHxcJB+8IMf/L0vffFrb3797nN3uq0luT1EJHUpZPIlLMt76tRTpRQo027DPoLEGd4LtVWnkBUid/UEoU0t894fHBxYa5MkSbKUBKyUG9dvNGzgtQXrH1tHMiJmWZamA/FSO+dG00nwXhtTtVjQIVC3c4wxyhhgDt4zMxApZbJseJwfMaEIE+LgODsCWmtthbhII4gIoNLBsKqORVQ4OnowGk02phsIioiUtYCgTJNIXRRFkVfz+byu69lifnR4slwuH+0/efTo0cHh4f7+/qNHj/b39+/fvw/MLfqjAgBtDJFnImvteDze3d3t0qCFDIb2ZAVfi9F+Npv1HFskZnlZelA4GAwAQHy02naxFDrLmmJ8TeycRQNRc3IZUKEPQXiqSD/cuh4kMbddkYbcmSbYmIkaDt3hq0RRdHI6u3v37te//tb/5r/4L+69/+Cf/ON/vHflSp6fdPQKFPXy5NkY7T0JtJm4BrIsi2JcFflitZpsbGTJgAJ47//BP/hnn/77P51E+tu//bXv/e7vev6FZ+/cvrG3t6O13tiYpGmssZFB5ZQdHZ14RSogo/jYz5w4vi4QgAIxMDJorXWSOOdKD8/evf7pf/DP7z948J/8x//xVpxON7cevv3ORYJ28eof+TVmjOcf+2Zae9orZBR1Xd964QUVm9yXwftBFEXGOgqBSQFoRGBUqAICK0Q6Z+7t892n9aQlhl5L0Bo01RfWNWAiZkStDCIjNJ6xM26DSIEAII5jD/Dyyx969jc/93D/UNuoi2qG7iffkDvShXjdi8/0mRD10O37nLWL3lob9kXKi60ZeU02ObfMF9A2Lo6i/+Raz5826ouCRX8Und++E4Ww9cP1Xfpnm+8pr+hrVF0kqjj8qClHIzUSGr1nOBxKesNqVUzGQ2YWvN8QQprG4/GQSaJtVaidM/FHP/ZqVVVFsRoPM3FTSYQReU9MwI1bqzFFthEWDR8CNsZkma6dH47GOwGuX786GAwC+WvTK97XAWIUdIXIbm5uv/XOe298/c2qrEU3opZGUi+Xt7+yHa1fm5O1z72lR4AzUKqzRUQpQIpMyhjjqtpYZW3Egaqqevz48YsvPf/gwQOjMY6jV1999fd//8ve0XgyzItKcmclU1Pqy3rvJU+3c3zIt8wcQggApgl9l1JIDXCHJ9Kd2AdKaQUARmsJEdJaG6NlWh49evTkyZO7d+/euHWzOYBSAMN7qyy1xQr7uexC+uPhsJjPBRYRlAp1rbQWdtJNaSN2tDjMWmvQGgAwBGXMeDxOkgSs8mUp+NLSt7WZ71KiDSpf1wi6qqqf//mf//Vf/8wP/y9+5FmG2Wx2cnJyMl+cnJzs7+8fHR1JLMLXv/rGW2+9VTsPqIiprUDTCCjG2slkcuXKNUmRstYSQqidTGxdl8wMzPP50vtazOPWxgJZH5oaTdCtfqd9dp2XDaxMkyI4HA65BTYXriaDEg2YyMv2aTiul/Bv2xHWnmPiDFxWtnOfiBujAFRnQAYA6Xmappub2xS+9qM/+qNvfPXrP/3TPz0ajcryFFtbCPTST7GBFjBy9DroFcLaGIOgFvPVaulCCFrZja290WiQL+ef+eznfuVXP6s1vPTC9e/91He98sorH3zuhStXrt28eTMbjofDoYlsNhhG0XsPHu8DQ2tlV93xsVYj6tp74hBFkWAaI6tA7uR0dvXGxq/82udV8P/hD/4xE9koTny17NO0iwStI3pr9PbiA5d+ftqFT1NsAk03Nz0FZTSFYLV2VWWsCa5mhQoQQZNCAhCeKdGCfX7ErRbU50e9lwIzIzGohl51Q6MuykDGi6hQmQAMyAhKkkzEN8MIqC0hcCBlo52t7e/42Lf81D/6l+PtaE7OGFt7p5X1xErCOxUAsWoyCKUzihmlaknHXfi8agjn2We3ifvjubhC/elYY8PQ8vvOldgRcdUCaKjzMP391eoYW58x99+4xiPXeH/3rjUe0G+//VMjNnZyWei19vtDxvMB4aDQBS8odEqDwPkao4FJISmNRN4jaK0ZFIOq6xBqr0aD2WLpgg9MHHxsFQIoraxWSjI7LYfg0OhskJQlMLMyBrXRyiIgEzMoHxw3VebEJcxEBDZRSqlaExGamKhJ/bRGR9boRGeDOLKxhJvZOKnqcjg0UZzGiR5kMSIbq37/K28wKqNtUVTz1VICymRCLDIze+oMjFDXXik2xrjaAwCaLoVAUj76yKAq8LlQgHaSFYIAUwEAkQ8CH1Z7p5RCwKWrf/fLX/3Ed35HAP3G174+/Ogrt27d8FxXVT4YDBExBAfAiNzhqAgbq6oqihKxEJRlDQDgJZdGEzU2A5TCK61HsKHCTK52zEzBJ0lMFFSLkzUaDa5du/LgwQMAAGKrDSrFBFmcLd1SqloggtgzJSs0MI0mU+fc44cPppNNgiodjQEtsascg4qUipxziFqryNUUpyNgBoVF7lBBliXMTEorVEnMwABBGZ0cL+c7e7tK1wDknNMG0GiSqGylfXAKwTlnFCqgxWLxT/7JT//9n/zHP/H3f/Lx/sHF8w4ACJAN0snW5mg0iW3UN9OJfVWsCVW9you5qNrqfBx7Q2sURJEBEN7mrUWCYNp6vswMAtfZGj+74y9F3lozhiLyiskqAGAKXnA+EQgBQ/Ai6QIiB+qEZg8OlFgQEYCJSZJ3uCm8RkTBKCXB4QDAOhBTZGMissYIpAYCMIvR0QPDaLhx585dJqNVhmDFEqxVjKCMafxW3JaGRMVKgY2kdjJ6b4CAOACrECpQSEh57fOjJSIONrZ2ssx7/+b92Rf/X/8Q4B8+c3X3+77v+z7ykQ/fvn07Hk1v3H7WpqlNYpvE+/v78/lclDRXByDSiAEVUQCFBgwzMwIYxUppshg4zMrndnd/41d+Bwv4tte+NdvcVYVXHpRRWHsgx4EYNaJ26Eix4EYhMirWAEjAfchY4rPPPdrbp5NegWLQBIoBAAIwAQcx5oZgQWmliDkQsUJQuLg1jG9uOleRq6MoUSaSmpnIipgrBERutH4FBGBBEqfaTcvY4EmS76IfJFBOUGYDOQDQBoU8ivXFe6kfr5mZPDEphVZS/wxc5nhDRGIKPkRxUrlqMBi98MILSfIvi3IFoHtKalNLuAPfWmMhcJ6J9lkLnGfDF2WfNeZKF4Kq+lLJ2k/gvITSl0EuXchvIFKtyQrd82tDgAscd+1mx0GVauRfUeO6ca1179KRIjYxtC27FkxmCgGNsWLzCCEQSHk4pbU2gFEUjQbD4TDTGkMI2JMwOsaPnW7d5bOeH533XinjnAuBnPOy/xAx1E4lcZqmnTdLrGrYhiYhKHlSLKVFWdR1LcCDxpitrS3nwuxk/vDhY9BCSSPdVhMKIYgPRqMy1kRJLLZiKdlmTBPW76UOT8+I0hA7PjP7nJeWzlZf2GczUqQzIk30la985fs/9T06jd59993bt27keb61Me3mTbUVkUMIeZ4DgFhBJWhZwpe01kR16yNQPa2IiCg0II5n0XPdthS/vkRLybfb29ubm5vyxjiOFXYkWBtj6rpUUmLB2s4N6b2PbCKBeHVdpyYyxrgQsixjoizLvPdKIXCLuQggvZYBamgSBmZHR+ONqbJ2OBxSkxAMopqfIVkSs2fpW+3dcDh89/37RVEoBMlYFVNNJ+N6XzvngvdlVRHR8fGhRIlCSw10W3WxObrYZH9p7KTYdWm1L8XqLnlBr2kqomJ0+yEwn5mUiAi4lYb1WZhWR506OV7aUkqpNpJOrMMImrip7MsctDYS2NUtrkxX7Vzc2hKY2VpbVX44HEqdtNFgGMJZ/Wxo2LkSQUEkti4eG0ErdaZgyLfcxrfLpwAhiiLvgyQxa63jOJ5MJmmazg/2f/Inf+rvfRom4+SVV155/oXntNZRlEynm8yotV0sFlXpENGYqF0IqeXJ3cS2xjDQ1i7z1c1ru599/Xd87V776McmsQleedfOOSJw6ErMra9gTxmTPzu61z2z9hMJEYNergoCqMbaocE1ZBYRozg+mB3vvfisMtrnVScBO+ccneEXXezV+feueyrP/jxvqcaeitj+udYgAogUA2dBNn2WprX2wUEgCu7ZZ29/13d9xy/+4mdG42SZF50zo+OOqvOoI8J6JeOzxi+9Dxcm+uIUPI3/Pa3NbiCqi6e/zPn3TV5rDz9tQzxN/sCeZ04pxYyd/fbSpi7ORvdnf0V1AxXcgPrWdS2/TdM0Gw4UGqWUorDKl8ZcvbZ3ZZBmGlWUWGwkPFQdTBM0RrPAQax16rwZn6A5coLjo7VWChotzTX1KkKL+y8c3DRhOrrjqUQ0GGbL5TJfFcwcRVEUuygyV67uLhaLynljjK4q9k6ECaUUMrcRuEYOjEqboJg2BzfoFnvZe0ZtOsxtvOC5WJOQsHcBAMpytFvr9PT0l3/5l//A933Pzs7ebDbb2BznZTEcRNDaGGV00LO7IAoCQ2P87zfeX8T+snIb96faGn/dmsoAJas1TVMxDIgIolVzAK3VQs6E6IczdMkzf7pSarFYpNlQa62MUVoXy2XzldahjUCBVgrUHM6cbwiny8V4YwpEjKCjSFRebN0B2Hp8mrA4rY2UvdX6X/78z12/fjUASIwun/OEEQAoRO45wpVWGs+KIzXrhKwUMjZiAVyoZnbx9PUnnHyjUjfSFTayZrPKIBoPISuUv7QC0ZFb9UsabNTfrn2tgm8r/lJHRDW3YGfYOMWDUiqIoigtKA2smM/BpzTCq2qyqpI08seeKIgY14RDK2DGxs5vjGqTp9vN5mS3KIEyBtZaBUVMioiQ2VU1IioADt6RD8HVdXl6epJqPZlOh8Ph7PTkV//db/7Kr/2mMfDMMze2t7cBIE4y58mHvBGCmCVZG0A4EZ8xewSlDHMgorKuru5tfP53vnzr1q2N7S1ltFLgkRCZVSOoocKWvypxEjXHAdYY3hklv4T7nv0L53gwCsqjwpYFBKLARES7V/ZYcM7jSGstN885JrqGLrwLoNFWO5Txbq9qVIy9ACZog+R7xAfa895ub8SOAUMDd9JuLxIVSndeH6XMax/7ll/4hc9orTVgCEFpJQIXEYmlBkC1MS9dm2fOgz4rujib3VcX7198oBMx+g1enLI1Ji2UqJ+o193vmrqUWF/sw7/32343utXtXESi/vZf1M3S0xrpOtZ4C5iZWasmBFqooTCDEILYtbKhydJsEEfbW5vXrl9J03QwGCRJhIK4xISSoidjb3Phuz7oHkSXlH/vMIC01hsbG4hojNHaOudC7YTuy9PCHkKLuyZOyibpGRMKTIHLskZjtje3Vsv80aP9yWTy5MkTa5p4lkirEILRWqGyVlsrgMYS9AQatfektQJ1Xn0BoHNavYLzoNDt9K6nmbXPM7dxH1prhXB0dPS5z33h+7/ve3b3Nsm7th5Jc3WksGNC1Kuo4b3USBdQaAbw7YxppZQnUj0VvOtb15T8SW1lgp2dHURswnYQBbqkrmuiQtArnXNZlq1WK7FAiEA2HW2enp4i4ubmJpNHE8mbJNuYmUGCuqMIGABRk2qgMwF88FpraAtcRpGVGV6tVt7XWZbFcWw0FmEVQtDGGm1BLLYE2pg8z6uKoig6Xa6q6kyTa4epQPKgFSMq4C75mJG5cZMBnNmQWZAlAfnfE/y4RvKU7vTYcz8MLYh3QxOhSSLwrtOkJUtbKCbLqdW9RHDhoMzouwTfNgW8l0sNSoOxtjtQWlmBC10tl5KdPBqNOnmOiF599VUASNNUKR2CYw6IrHWjrnUBXzIECbdWSjEbhcZ5h+ps7BoVaACRffmsThc3lnUG5hqInM+rGpEnG2OllHP18Wx+crqQsCytjY0T5xwRMCCFmhEVIzO3iqYiIGb23idxWnOOiJ7Aajg9PQ0bUwIQXSEwQ2fkOMfnlFRMYXXOHntxNftrfenq9/8M3mtAozUi1hTyIt+7ckVmGxR21BIRpVDYRXjqM1batY+MTaZ4Qz365fIApU5oQ4T6XZRx9Ck5AGEDON60fi6vg8iH4ACQgk9sFACfvX3nez75yq/++henk/EiX4FIW09nmULcFT5VG770h2vsp3uGenl10NlLn67F9hnw2Wx2M9Wjv0/ryVpr/Z/3e7LWz0vZJ7akFgDEK9BZm7kNwqKnBG+vTY5CrbRyrmq7pLmtCtdlXjUQHGVFPtQrtbO9sTXdmG5MjFUhBKPQGAUkmCQkiX7MjK2Dl3qBHqBQbHbOubKs33zz7TzPn3nmGTRWR7G2NtaR0Q2cJBrdmdahF0+n2yxJrXWSxsPhMI7j09PFk/1HN2/dvvvcs/lylef5PDK1p9FwwMy1ayDsjVZABEJ6tCaiyrnONAfMClg3iowCUqU/hxfdqVzqfNT62QA7G+fZKQLFWimllUqjeLlc3n/4YO/KltJ6tVqlyVApZe2Z+iszJrWnhIXAuf2poHdMWnG7eWOfB/T3ZNdPcQZEURRCY71ExLKqBMpK/L6mhctvZWUlMXda6zzPs3R4truoJgBrrYljaNyZgZnZe9QaQjBag3RPVPi61gyj0aisqwiz0Xgc6tpa29AWRGzNEthGWXoXrLWukKQsMHFUHBzaKFmzBwigaWjtQAqlwG47Jz1BuZkWpP55uHhgL06dXCGsu67ajptLf6h7mO2dGQCRiVhrrdsaGyEEsR1RQKMsACgNYvsRIxAARJHx1CQy1XU9n8/zvGJqgCojY2/dulVWeVEUaZoyB6P0fD6/du1GkiSuehLJZW1dVd02xTaQpd/bLvpSa625tWTgufFqQZkA1gYpYM9Ng0pp5z1i479UGlxVGWPysoTFIo5SiY4GAKWtMU0mAjnfxEm2uzeKorwsFICnsDnd2NvbK6pyVZfAhFqh0RgCMxOw1lrSG1piCkzCe4W9Xc5c8bwYfSbMwRnb5vYEJjby7FRbB0UZXcyXH372dpQ00XygFAETUQDm80Gd3ABxnIvB7hPkbs80fRDOCoLR689innrPwHly1PQcBQkLdf/dAAASX45IREXlUHtPPN0Yf+I7v/1Xfu2LcRKVZckAYvkUCgvELJUQzxmJSCBv+p3g1izwzXC+p/Hj7vM34MH9aTovepx7phNw+q1dJIv9n1zKZeGyReq+7f4MLU6h6knTT/sJtvpQR5vE/SSHUHRKZmzDf6LhcChn0hgTx/FwOLxxZXc8HGqNaZxExkaRia2pinxtZqjZukTAgr4rPAQb5RKcC8aY2WyGRo83ptZECMq7oLmJQvJMulUOZZ9JgJJgPgj3jeO0ripEzLLs+vWrABAZa6197u6zp6enIbiTk1MiqD2FsLJJXBSlHHilQNASlYLIKAWWmX27m/tzWJPrWF0ncvZ3Dp+XlOVju+2bFVEKBZ2GFdo4evTwcfHSi9eubMtjjdSsm2aF/IkuIpyyTcgB7+soytaWmAgIQr8KVn+n6TazFs/jXUvaWKf0aHVWByJN066yltT6FT9fWdaJVgxBaysoknEWh7oC1KF2iKisJe+t1UWxyobDqiriJAEAliq/HIoyHw/GADCMhvliIZZ/AGiqTzIDnWeTADUFhcZaWznvHIQQkiQpKye+yZbuNwxbIXr2AEA9AZdbkXTtoLbLd3Y8105l/w62/aHzJ6vbDNiGFp+Ry8YHfCY/Kd0aNFsU7wYgpa3AyMxIChGLoljNV9Kao0DUQFQCgLbADCHA1tbkYx/72ObG9vPPP/+d3/ldv/fF3/2L//VfBICPfexbDg8P49gOBgNAYcZc13VZNviU0+k0yzKpBhHH8Xw+z4tCZrKLEqCLITLE1CsdK1YZZgYGVKAQtLZKqaomYtTWMLMPgcgrUlEUMSAqE0LIy6LxDWmtNVmttBh+dLOg3teeCRGrokzjiIiGgyxK4s3trb2dXWLuDP0MsiCEqACor+3ieZ34IqO9+Cc2MrQ03+qYCAAgKDHinicilpNtMRumrFTwHuWUCRFuQ+r6U3dOecWznl6Q8yRqBAUZppnq3iha+US0m7OIaNnqSqlzrtz+VVVV2tSXVb52BMzB7+1sv/TincePn1hrKx8aTFZC1K0QIJu/19gafekmsT+V34ANrzGni7z8ovO8v+fgfEWHi+0/jTGvtfa03178inukpP9bOfAd623YHpHqAIIv4+v99e7a8d6HwFmWZFmGTSFhmyQouYBd/E4URUmSjAaDl1/+0LUre0lq4zjWSM5V0nhfHAOFopQgyn/N/mj72dzZ3N5JkmSQDVVbt7itZda4Qk0Lyl8UlcQEIWoARQRKGaWc/OT4+FhicwAgz/Odrc07t5/RRlltDo9PnHOx1aAMAESKmdloo9pzId7SEAjII7NWipUSRCcAQPQ9a+eZebnPzLr5ZD6TuM8dQsQoipSC4N3m5vZ0On706FGaxVd2tsX2K/ura7OLfDHGIIIoPZ2/Y40By2HjM9t4kMSkTtiCFq2l3Z/AzBL7miSJBHkBB+HHnf1ZXieADDKcjY0NZU1RFExo4yiKonK1itJEcBDlyTMzkvB+KYOoFCrFzFmcEFFd14PRKM/z2vvxeIzM1rboKAEcBcHzFGBqYGRCNrBarRQ2ZnmBeVo7dkJzGbmFSg4az0pu9DnK2fFEQjh33vuclVuVpXeemENjSGyeaT27RKwQdesRwDaqThndfGgd+VVVee8ePz6MDDgH0FJYY8D7M1KXpdGdZ5+9e/fu1atXJ5PJzs7OaDoZDAbZcLC1tbWxsTEajQbDsUY1HI6J6E/8iT/xh/7QH/rLf/kv/8y/+Jm9vR2lYDQa7WxvDwap1N2yxgbvsyx5++13ZMc1paUJbt26ESfJycmJ974oCmaWbYmIwgvPBJD26mIkQSEQEoVAnplRRRIzIaUgpdCkmOjEAy1YJYLzY4wpvDPGRNbGcWyVRkTQiWYGIBH+mIMydjweJ1nqKdhRFmrnnQ9MrBC0wnOyQgvHgSB1l/vU+FLae4EFNqu/Rjp97Rp8R0RSXFTVzt6uTuNaKLDREo7aD3Npt9g5rkTnNfJOgFNttLZGBUghtKlobWshePHu+xC4VwFJ6FIj4OnWBN18rc48CFITjTlYLYmGUbFa7u3tffITn/hbf+fTG5sjqisGsMbULijQAS4x9cAF/tcei8sFnP7MXqSV3dr0hdb+r87NYy/zuP+5c752/34D7gsXln9tGdaeWRtaf2mVUnVdd3FY1BrfiJoRwfld1YEqdJ1sHUVNkJF4klxdc7ONmqqxkRYkDm2Vjo0FoDSOBsPMamWtVqC8J9SNs+pMg2zLdbTaNiGeCxDTWldlffXqVQEZUMqAQhPFVZFXVZUkmRQlBcAQQlnWZVmKdYSIBBLIOVcUhRSHz/M8z/PJcLS7u2uHg8VitbU9mc2Ow/bW8fExkR8MBmVZmjRlV8o+VkppVMxc1JWA13cBKbqNRkY+K+QsfvbOOnp+sfpLw91qUltogZkDMBCmafbo0aNrV/a2t3cRNKFqVYGuQGzdmRxbZNAGeaPjhf09xqi1wj7nW5N1sLXVS5vMrHVjUaiqSsr/BW586kdHR8KGhVaKO3AwGKQhW61WVVXtbEyl0I38PIoiJAatkiTRxkjZPgohTVP2XmzXIYTBaAQARBRnGYDS1jLz9t5eWRSy8YB8CMFYiwzOOfYhSRJldHCktC7rapBMZ7MZaBBYEh8KpXS7w6Uue6MREILW2C1uqys06pAC7IKn5BjRU+wZRNRTVBr7HraJ8l0QXzfzwlGYuSiK+Xy+XEqZcxAP3zpJQPjIyy9sb29bpaPIMJFSam9vzxijlU3TdGdn56WXXvrQhz60u3sljmMbR4AIqNvIGOkqdj4POUTf+q3f+rf+1t/6kf/pT/7ET/zEw4f3f+AHfuAXf/GXB1mmFBCR8244HH7pS597+eUP/PiP//h3fMd3RlH0mc985m/+zb/5pS99RSpAXL9xI4qiPM/rui6KSmsl+QVtv6nVwFqxgxFCSyoVMnHwXmtN7AGadEdjmrzwjmdL/LMcK4Uox7lD9WpBSuOyypVWVid1XS/zMkmK0Whk04yh9HnhQ9AIqBX6y5EVoNkA5wKk17jjRcZxkXojAyhEpZBE4GBiXpbF3ZsfzsajEirJoZfBOmpqeTUcpI+80diC1wWC3t5r6wzJzUAKGXohKUKDOrNKfyG6oUntkXZ4BMxNiJdkvCmlQnAAEFytteHgX3rphQ+8cOvNt9+Pk9QzOOe0tq32oJh8R8WUUoLxQT1Qhc6a2tdI+mepo0Rrc73GYvmCV2BtkTomx2cGyR4dbImdbpEOhat16909Sb2gm4u9FbqvWpT2tX52b4GegiULQ2ce37MS9C2L1SEE55wYoEKL8SszU9clAMex7SyNXfvWWgikIw0AzteA2Xgyuvvsc2maEpGKlAzAKB34XA+b8TY5C11Ab2ePNcxVURSnp6d5VU6nU4kejHS0WCyW84VzbnfXjMfjPM9FWJa4zSiKAhMjxGkyGAzmp8vffP3zRuOLL75YlnVRFI8fPASAa9euWWu1xiovtNZFcePho8fLvBB8Te9cN89aa++91M/p7gh4vTBpiQMXpRwAmAlaS2N/+VoSf07EptbzTUSECACT4YiIrDHGmDhNhsOhHFRhVNikMUCaRmL+FZYsKyuhy6qtU2SMUSZyznXVCwDAGCPOWuyZELv9Rg2mCjKLotxYERExiZPgSVAhhVt30NxCWRDRGLNaNeYQYTmD0agRT9wZhLK495hZ9pvo2YBYF0UIwfpQ1GWapuIFFQZPrWu/LIokisej6ePHj6eqqZboAlgbAcBilXsPURTx+RMnIzXGMBEi6gYamhA14rmzb4whHyThijiEEOI4DqGWkXbtaK0jYwWpo+MHzFyWdVVVRO7RoyfMXdki8L7hsuKL1RquXNl75ZVXrlzZm0wm169f39zcHA6HaRbv7OxMp9PERsYorfV8Pl8t5/v7+3VRPnz4cGNjIzhf1tVkMtFac3CuKnydp2kMCICKg0Njgdl5stYiArHweJSeA8PO7u6P/uiP/rE/9sdOTo7quv47f+d//J7v+R5mEHK0XC4//MG7/+Qf/aPnnnsOjQHmj3zk5R/5kR/5/d///V/6pV/61V/9d7/8K7/KDNbq7e3da9e2iqJYrRZyLpgbx5Jq6mFjoCZtgRGq2kdR5H2tFQAHBQgMwIQA5AMykG+DVQFcKBvKHIi0auJOHHkn2nYJANoYRLSRNsaQC8ezE2ttOigWWbK9uVUV5XK+GkYWvPZcCcyyakUuhB7wznlKvkbe14jzUy86Y3g++KAxytLpzqZTJMfWUfBMghqrWyCaTvlsyIKYLfny1NDuRawDEbEPrWm9sb0J922CNsrSaN1BljKzZI4RUacBN2IaIShqw5nPD1uujcn44x//6Btff19pBM9aa+9rE8e+CcpvomrPjtEF6zmetzNcOrmXctZzs3OZXaL/ucfhmj/XmDr0NGm6LAG3Y+TdTy7KYtiLFu5IzNqT/ef7H9rH1oOx+bzKLg93bNhaK2VeREXWWqtG2UryPEfAwTDLsixJkslovLu9MxoN0iyJpJQ6MfdC86S/TRlaRKHz1JbA6w0qAEBZlvv7+1IlDVFHUYOA+Pjx462tLenVcrks60oKswfPtQvOuap0RV5tbTVSzuc//zv7jw9u37mFyFVVHR4dbG9tjsdjBL59+9ZqtZKiLsxYVDVzA80oxsAmcFchEsoMSHKOPNa4opXvNphSqjP2dtOLrQasmtRJ358QeYAYg+eyLDcm0+XqlABXy3w0Gk2Vdc5nmZV5kAMGvZDabmHhzPajPAEHNtiYozvjh7xRwqZUWyVaACi4l2TFLI03eVwAUNd1WVTY6srYGMwVIorZf7AxFSNznudZljGhtRYQy7KhpPKrJrgGERFVCGJX0FoXy6Vw7uVqWXsOIXQ1A4TTR5GRITx+sr+3sxvHcVnXiBjHqTXWe3/v/XsPHz4EgLKqEFFiwjtNVIwi1ijvPQfVhYN0opJMiFEa40ZzzQaJSK7WahGwREgtiuL48FgstIhNiYW+jVMp+LZv+/iLL764vb09HA4nk40sy6R+32g0StM0isxoNJJ6z1FkFVCUpgDA3qNSoECq2ZTFqiw35yezYZYeHx6VRb5cLopVzhpRQQjhZHa8XJ3efubO7du39/au6jhFbYT6WS2ePmH8AaBFsgMEABtFw+FwMhm99dZbR0cnGxsbCFDX9XQyffPNN//kD33vM8/cRKPY18ysbLy7t7u9vfWJT3ziyZMnjx7tf+G3futnf/Znf+7n/tWjR48A4M6dZ6Q2w2KxAMUGG2hrUb/FEGLjRCm1XCziLCMf4DLjZZ/+cC/0QbY2spKhNfQcib0PIVQVGqu01nXtiB7Pl4vTw+HHX/mWdDyeHc0IFDFprQ0q55xmCAjMjbwr3ehJyevK0kX6ycwIoM4xCiHXREQa0XsPRi3z5ebNK8lw4JgCMDWhCNSl6l2kABcnpLta3tRYlcX4LEQSROhvA9RlxprZI2ot4qibuSVENACADGsWdGZB+uTmeIqZHoCZszT6yMsf/tmf/TkfPDOEwEQsAQJaa3m+E2MZoC1Fftb7i3N6YWxnT67N+NqHi38+7VcycnVZsPHapF8qc3Wfu9Gdo/L9kLwe6+p3uN/sGpPu9nrXFPZgY/s/F92L2+IBYn5kxCgyABjHsUbI89zXTk0Rx5xlyWAwiG3UvUIiIduWRc46M93JFmEFGpWoI92cVFV1Op8Nhze01s5VURKLhXn3yt7GdCtOMh9C7UJZ1PP53DmXJIkQytls9sUvfhERP/CBDzDzaDR5cniwsbGxsTlBxHy5OjjYB6DpdDPLsqpyp4uV8JuqcqeLeVXk3YR7Cs43xQaoK2QLAMoorV2oVkW1XBWdBtBNNV0I6uELqCy9lRKuzVXljmcnaRJ96UtfGqRRmiXk3TDLVFN+KsgyEQFzC5MEIMUPOt+SthH2oASx5+MU9tmOgsQsgi3wmdx3zjE3Aa6iVRtjbBx1qp74VkWKN8Y0sU51PRqN5vO5tbYoCgpweno6mU5FUACBmPVeEpYay6hW0mZosajkdSEAYmOJafRjAF/X0s5wOCRgZbRzbjgceu/ffPPt3/3d383z/DO/+Vlj9XK5lBE1sC0hSLpU8D6KImttFCXNRGmtle3kGO99WZaS+RYoGASllA9EAGlqiqIRm+JYfeu3fnxnZ/fKlSvT6XRvb297e3s6nY5Gk/F4nKbp6enJ3bt3JWlKayvqQSN7toH0Ym0UR11wpcSHAwAFhyShiKyUGg0yDj6yJjJaAb/77rsc/CKf+9pYaxF4/9FDDiS+1WvXbqAFYAXtS5mb7Dc5Z6qtvAQANoqYfFEUADAaTpigKsvgvHPOGEXkgTwqQDTAcuLRWHPt+vUrV6589GMf+5Ef+ZG3337785//rd/4jd/4Bz/5aSJAhBs3rlptiqIoyzKO46qqUCsijuNYGZPneZxlIQRzVjS+I1BCIVV7mxvqDwDQOHAllVG1/ngGBODARIECa6ODY8jz/Pjo5L23qoMnR3sb0+3RNLEREcQ2LouVVZqZAwWAJrhDkLk7JRLxklJIF4kzNj7kNT8mSi2EQKSVqSns3rhmsmRZl53tRCkl6lJHYNfexudimc5diMhMQEzsJTdr7QFhwNwaX7E1/AiBNW0ImOli8fsjDACKAAEIQIMSOElqqXZZ5Ts7Wz/4R//QP/ipfzEYZ6hAqSakENYJGctGgwvsrc+Q1gSQvqbSf6aviV66Kh3bW7vf7xK26AFPmdPzPomntNP1R3rbWRWwZ9DuWriU8QOcWUS5tRH1X9pTp5o/O2bTvQhaLV/1UvIVQpIk4+EgSSIiGg6HcXRWmIGbNTlXq7HdwwxKAMhZtxICM0vtUiAu82I2m21tbgvdd66qqsIYE9lE3I1KqeVyKdV4iqJYrJaTycTV1f7+/v7+flEUg9FQKgoAQGCy1g4Gg93d3eVy+ejRo52dnRs3bj1755miqN7Qb1X1vYODo6IslEITWV+G2tXyau99WdZJkigTMerASMFXVVUURVeVVliX974Xt3UmJ52XfNeOLsvqOOdUHDOzMaYqnbUxggq9eCilUGzLRATAHVtVbfBOA7CsQmerwDYQT5hclxvd6uIk9fu63d7fcmVZytDEvtqp0eJ6l5arqhLnYllXRVEg4pYxWZYdHx9Ph1vQVM6RaiqaOYC23jkFiFoZHZksYmYElcUpee89jceDvKwk81jeaIxBxd4T1T4bjTJEZt7c3nZVdf/ho9dff/2t9967e/fuc9fvRjaRvSplCToyFMdxXVXDYVYUxXK5Es2h2459qosI165d/c5PfPvV3b2Njcnm5ubW1laaRcPhcDgcbu5sD4fDyCbj8VhCDrU2oujLEksj+WoxGA6bxSWmELQxgroPQFJ5wpgmEhIaiYqUUqgUEKHE3xPFcQzAo9EoS2IAstpI+BscsXMOPKXJYLFYPHnyuKqqKq+IYHN7N00HqlGjJbQe2iNMrR7sBc8uBJ/nOSIIqDgzL/Nco97d2zZWoCo0+Vp2AABQCEobZlaI0+n0tdde+/jHP/5jP/Zj/9v/8j9//fXX/+2//bf/33/8T8Ucefv2MyK9PXnyxBpT1pUWN4fYCrCb7a57SjTINfLYEMnuDgBIRV/Q0ACiNpH5FACQkMExGWu/9u79t96+/52vfHgUpwpBWRMFgZTps/xzds2Ool7KJi5eDVXtEWoRiK21FfnJxmRze9sBOU22DXjUWgsgEjdBMMznzbQA0OaGXKLyUQ+npbu4hZ3vm9z4PP5Exw5QIPGbTkPfWgwAkiTfpGV1UxCAoygiguefexYANHIQyJXQQsfxmWsaoC0Nu9bLy4Z06fyuMcWLv+pP1jdutr+0F193JqFc4J0XW+vR7rP73NOJ+7/qyKu6DJLp4qjXXt31R1i7c44hoAJUrDQkaRRFkXjZlVLj4UCcqXEcW2sjo5BD55hUWhE2GD+eAqNCbJGxEQBVF1KPTTcIGToHXlEUk9F4PBnFgj7vGAXDwRrnnKcQGR2Y8rKoXI1axdYURWFMtLOzNxyOT05OsmxIRLXzWqHEE+1ubW5tbiwWp1WxOjo4WC6XH/jAh69c2X3v3gMB2+qKGSNqRGLGqnJ17W0cMyIRdaFeZVmKXxZa8UWSIrHnd+xtgzPJd43QdAuapgPva8QIJWBHq8FgsLW9Cd7FcSwmX4keElsWtZAjcpy7FJc1524/vVVWrR8pbYyRgHNuTdNEJIRbYrKEpqRpakeRtO+cE94snwWUPxsMlstlFEVorSSk6dYFpbWR4DTvve1cRUQdgzTGAiIonQ1HgJiiIiIK1Or9zEzW2lVdM5NniqLoq7//tb/7d//u+++//6EPfeijH//4YDBI4uzFF1/8Z//sn4/H4+Vy2R0cpZR3Lo7jJ08OieDjH3/1yu6OxAxvb+1sbW1t7e5sbW1JpvhwOEzTNEkSg2owzJRSgAjgxX8L0CjvANxWvgIKJE47hBb0hsl5Z4zxjqy1WinnQgh1FEUKlW41EGLPzBo1aMvsCVkphcq2lkZCRO+cjWOj9HgckOHGjRtxHCujHz16tFwuKYDY/4+PD51ztXfPPff8tRu3RpMpBDE3m9CilTXitAIEBa2M6JxjhvF4jAhVWUcmqn1ttcaGqgRAAtBAnlEpbYJ32liGoLUuyzJJkvF4+MpHPvyRlz/0Y//Rf/iX/uu/+Du/81t/42/8jV/7tc/LGK9c2WDU1tr5chGZuHFj0RkiUO9DB510phi0FCkAqC5iGREZAoAOgYW1SeyjhFQjcgBOhpmb5w8e79/Y2UmMzfPSNKSyn4nUBG8/jeB/YwZ88SKUKML4ZDa/def5dDTIqeYWX/2MxVIjapi+5n3hdWtqoSSM9DuJiIAEkgTQStiSF9AnPt1BwBZu1pwxjEtUbiXaAAEDqwDEzHVZmCi+efPmH/7D3/2v/qdfHY6HRVlr3URsXuRAfd1urdP94fU501oLcJkA8rT1uJQfd6yRz9dpuPjzNT699lj/h6FXb7j7lnoQFmvf9jfx+d6eg4PG8/Gx3U3sApFQxXEcp4lEQQsDFnNiGkdKKaCQJMmtG9c2Njak2q7kBgAqDRJooES8YoXAiKj6BhwNHdtVss+YGYAmk8nu7m6SJKiAm9wnXZZl7ZywkCRJRqMRN8ZhIz5IrXVgzobDycaGKLtV6Xd3t3d3tgWDua7Lq9f2tjYmx8fHJyeny+Vid+/a7du3337n3eWyRH+GuVHXtRZMLGZmlpheAKhrn5fFfD6v66A17OxdoV5hODiL6+4v6Hl0m8s2g8RzOedGwyuuyvf3D65d2dNajwdN/UcJZuqOcSO+dMhKbaI2gZIgqUYhbuPpuqA/eV7URInqkpvyvNaNlZoIBHUuiLM2CtZa4cqIKJJK13gIYXt7ezabVauV1nqyuVmsCjFQB3cGnwAt7wdAT3SWwtT6VIP3WhvdONlYGwPMznlWLDnBURT/0i/94n/71/673d3d09PTv/7X//qf+JH/5ac+9anRYLi1tcUMq9WqcxaI9LCzvf21r7356U//j5/61KeUUrFGsUVDKzWhUp3ljEJQWjcacgiAilwdmKgmBN1mEOkAQTdoGLq1izb0VKz0o+FIrOtaW62bWeo2uUxCCxmEoJABW2+i0EYFiMwASqFWcZzmNh9PJ3meb/itsqxlXYLzvnbGmMVi8ejhg6qq8ip//vkXh9NNZAT2WqvOWHKO6CB6H1arFQCkacoMZVlaa0Nga62yFkJw3smjTKisYfYiPUj+bBeUYBSh4uEgfu7uM8+/dPfbvvXjX//61+fz+a/+6r/7H/6H/w8DKAWTjbHWpqzcarXqOrPGhlESc1EDiPMRRPtl5ra8mO5OkMBLIpzxMGyexgDKoo4Gyfv7Bze295+9dk0UzV6IUXc2mRXDujX3qZf0sz+VUmNbPp/l4lPY2t5GazyXbDW2Zudu7kG8yHj2J1xG9rF1HgFAo7sjAYOSGBrF0CI0dDW5qcWiv6h3CXGw1hpAYlY9Q4QGAIKgz/dDWDwzR5Gpg8sGgw9/+EM/9/O/miRx7ZqiAnz+athPO5A17tuu3LqGujbsb/xVn7CuPaZ6mSdrl+qlfMAFjXZNMoALLtuLfHHtV2vc92kPtPfPNdVnwNwzn8qvrLVpEomCC6qxZ8Y2iuOYmaIoGgzSyWicZvHW5tbOzs7u7nbUZjRKQAC2MVaNYAQKgAHPdknXzf5SAsDm5ua9e/eOj4/H43E6HFRVFQKFEPK8FMukUirLMq2tUub09DQwV84dHB0ZY7a3t4louVzu7e1d3b3GHIxWRjWQzlVVKcQsywAaHe75559/9Hh/Pv8ClIUL4ejoyHu/tbNDRPfv3z86mhsDo9F4MIgnG1MisqtIwAqUUienc4nvbcIsejDF3ZT2BnvRSd/AVzHzYDDg4Gaz2e07N+fz+enp6XQ6FTFIFHRmCk0xErA2blO6G3Yrwm/oqta05s3+7uqHMVOTJrDuFRLXrPdNAWBrrbYmjmLpcBd9DQBpmorIFUIQ0/RyuRwOhwbRWjufzzc2NpgZuWmTmohrxQjWWgTUSgcKzBInorS2wCR7IngpMMDOucYFqNT+/v7f/YlPv/TSS1EU3bx588UPvPQTf/fv//Iv//IP/dAP/dqv/VocWylSK55gidST3l65cmV3d1drDVQDNDVTUSEiu7qsqipKExYsBTZEXkcRILuqUEjB+STL8nzlA2SDsa9rE6fMQaCYm1XmZp+naXp4eJilQ0mZgV5YZbsNhNfrRn8WZtMEocomYdAGKKAyQAzamMwM3ThJMiJAZYHRWnt4eDifLU5PT7MsQ62s1UVVOl8z83N3XxiMRwDKGAsgITiqvwPl83y+RFkIBc4551zgMBxlgAxIQBxYgv4aQxYiiBIcyOmuKiWX4Ml7b5KEq/LWs3du3boJRN/28dd++If+g9/64u/+7L/4uV/6lc8gQmBIU+s8nRM1WgrQMza325UQoMmRQQBg6rM/bIInmo0hzIKYdZzWrk61NVg+fnJwdXtraIyrSq3E0CbkjpnOrMFnM3OBZfTPztlX53vaMlPQWldVtbG5ubGxESAQAmsF/kzqBWh4apOwdL7xM0HkvBMz9LACuY2LFPxWbnUzGQt1+Lj96h2qQbmX9ATDbcxJP1pKGCddwF2Vd4gs+cLdu5/61Lf/21/67O7eznKZdwJ+N7yOBa2xRmwkPg8XvupPwUWGd/GZtRb6n0VHoV7xduyplZ1W9DTWu9Z+/99uCJ3bj9tg1P63fT69Jl32JxNabbL7eV9H75yX0K6ZtQ1uAPa+YuYsyybjcZYlO9s72zubW9ON4TCbTCZWKebgvW/zMHtbrQ/xAiDawEUGLH+WZR5F0WQ0Fn+SjK8si6qqEVGYX56XnkgZU3s/HA5PTk6kz4eHhwLZs1qtEptGUeMTLYpiOEjqopRw3Lqup5vbxhhQ6tlnn3333fu//cXfAaW2trZEq3jy5EkUJa+++szW1tZgNBTNuyiK6XQ6nU6NMQcHBzZOuMXmzPPydD4XdthJxMzcpTh3S9CblnYVjK7r2ihwzi2Xy2duXn9yeHTr1i2BnGwxeHVn/kVkpRqFTN7eLD01JinsBcxjqzf3JTxJ5BXcg/4mlMfSNJXUZ0T0FGQJ2txrqOu6z8jjNCnLUoIADg4OrhqjlCGioigiEyulUGsrVnGlux1YlIU1MRHXdRVFkZTTkoKvIQRqg+HLshiMR865NM2+9KUvff7znx+Px599/fMvvfT8Bz/4wR/4gR94/fXX/69/5a+OxsPBYCDagLiunXORtTLkxWKhjanKMgKPiNRERUidY0VkI2PLumAG5qYoELYZHVEUkXeRsYEpOGes7bZxq8YgoKL2psh5ov5CG7tOJBmbjVlVPmilCRqlQVLVEFhqRrAirXXtq8hY0DpOk5g4hKBQp2mapmlsI/Lh8PDQe+9Lz8zj8fjk5OQrX/kyKHzppZeiJGP2wsGwdX+0qh+XZVksV0rheDwGAIlIdz4kSQLBC3MNrlZaAyr2XlCEtTHUhhxKg0r8CJHmqkDEanEanF8ul97Ve3t7n/zO79ja2vrBH/zBL//+V7/whS+88fU3+7jofQLYWi4vaEHY/COWgVZg7Sib1KflAE2+RR18pHVVVZPR+P2Dg+tPNu32jkWEXhwMtj7Q7g70uGCfm/RZ49qFYqJoObfWVgGXq/LGjTvpcLAgBw2PpM5E3FO+1681Gb1/TvvkgrnNGW8ZqGKEM7CXs7if/mdubUKIaAAtSKHwJjSltawqBMAG2osZEQwoQHCushYZw2gYfezVD/3KL39WcW0gBAVGR7V3iMYoJPbAAZl9OPOD9keoL9T9XZvWi6yRW40QW2UREdemqftVN2B1IbG4i3DpU8Bumc+v6NmvLk59f067m91IRUpq83cRm+ooWmsUYHdpKgRSqqnXLi10Oo1u7Y+9HajEf1fXTRIOUBPia5SKIpMkUTaIdna2NqeTNI3j2BqNAIY58rVryGhgBKUQCUnk1N54m/h48URqbYkqEeiMVcNxOhhlyhoERYD5vPz6195nhJs3buWlW7z/QDyjzrkoiufzZVX5sqwWizyKorIIwLPTWZ7EOJlMRqOR+PliiIyJCHXtq5Pj09FodHV3rw7hxtXtZ5/ZfeftmFjw7v3Vq1dv3X6GQJ2cnBzNj+89eN85l6XxcDDWWj+4d/+dd98TQYSZPZNYXNMkiaJoMpnUdb1arZbLJYCKIsOtjVcWXCmpHxqkxI0ylpEch8AqUvDk8GBvb2eMw6997Wv2pee2t7e9r5ljAG2tNkaFEMrSJ0lGREoJ6IpPksSYiKBvau7gsRhRMsqc9344jKSGEiMYVAE4slFeleJzJQLvSZkQihIRjTEKtUj3RVUCsxETt3PiNa9rP7Tx6elpHKdxlkky0mhjA4HiyEgRmmq1jKJIWytZsdoT+6B8KIvT4WQSqWi1XHmlstEIlEdEZL88XQwGg7qu03SQ2KRYlJzw7PD097/8tQ9+4IUbe3sHD/d/4+D42eefe+bGdQ08m82890lsK8eCU2aNeM0BEZanc2CwSgfOjY4Us/c+ENkoQRVpja72sUml0qpCRgjknLHoCmRPVV2DhiiKqmqRqSGRUyYGJiAEVOI7kxBnOZ+SQyygY13EYnvQRZvpcAS5YQYAAExEClkwWYOomEqBD8oYIEoHmdJkY4gTYyx6coy0v78fQuACELGqnKvqr/zOFw3x7du3h8OhymIfHEBtpFJhUwYqALpFMQuBk3QKDHlZjabxojiM4oxBAypitjZydW2tZaUQSCKzUIv4ogiAABWRMgZCQAZmD8EvTg7n8/lyuQwhzBbzSNd7OwOEW0lC169vvPv+yZMnT4qioLZ+V0PBCAFAm9g5h6C5rXWmoQ3uZZbUOCZJmWMU3tFYrxvNVCMTBR0nq0DpZPrbb7+XTjfHsTG+HmhFwUMg1LaWrFwTMZUSRqEQCagJ8+pguXum0y5lOCJAhlJTZUAxJB4MQ/AuGO0UD65un0Q0ZxomiS191cLtMne6PoJGYu4wsFA8dCBMNACA+GtZofM+sG+jqGT3gFHALLmIWNUrVGQ0EwEBBC+VPJAZulKh2lgbxdqYM3vXGptZu7A37CbElIhrd+eZ2x//6Iuf+8IbW1tTrl3wLcgUolZasNZ7QDDrGn3/6jPObhP0Fce+jLbGaNe4Zr/PcCEhuP/bi8O89M7FPvcZ/1rfLp3Mp8lZ0qsmgdV7RBS/addyzz3ctOy8i9pLfhXH6Xg8TuNI4lYE9244yOLYWmuZGjgqMDIbglpKfeGjbyfoCyvUIrkwc2STOE7FRGy0DaSOj2ePHz+OB4PB6SzLsul0urGxcXp6OpvNEXG1Wh0cHBCRQuMdDYfDjY2Nzc3NQWYW8+X7793bu7K7ubk5Hg6YWVSc4XB4eHA8GR+NptM4ju/evXtwdPLGG29ubm1pGz05PH777bcPjk6IaO/KzrVr19I0nR2fLBaLd999d7msphsTpZRA22ull8tlWZYhMCBGUWOj1lrXtade9UBuweuREUELwrNsmziOjcLgvQdcLBZ7O1unp6dvvvmmMUYKBOV5jsiCTtWl7ULrLBDADdWLwpBhclvxQvRC2RtywrVWpi0x2ctQApAisnVdlbVkoMnSl3nh6rqrVCiKVAgku2g+n0NraRgMBpPJxDmXDNIg6B9as3OSo4KISitN2jkX6pqAQQEgA3lQ6KpKQtahBb6QLtV1fXx8LON1zom39f79B9euXRuNRnmeO+cIWCvbGgOgm6InT55AgxbEFGpuGKW4nxs5mxsTfTDGACoVGV+WWusojquKyrpsfStBxwkQAaiGqJ47blKbqPN2P+08qrV0WBTvKjZWouawgIbmdDTeImviQYbWxNAD2zk4OPDeLxaLNKU8zyVVrK7r3d3dvdtpFJkuagyVYgpVVUnOFQAE56Uj3ntmiKMUtcEGIwlbLHgP2kKTqoIKkCAwgW4x08HXy+WydiX5IC2fnp4y8ypfHR/Pjo6OnhwcnZ6eaq3H46FzVRQZck1RyzzPXe2VNUophaBi61yIIuscK62kqp8QH+ectZFSSsqvaWMl6kKwm6BXgwSRAwRmzQjvvP/eRz/4IpAr6zpSyKiD91pbRkUXCPX6OhFfDNTiVoFWkg7F7ImMMYuy2L6yN9naZETNyD4ISlRfkYMeR7hI57mH2A8ASAyBIDQZvcznzat4Rjy5DTRZ0/rkSTndiAoADV7GvTp222leXec4UOnLNB144sl09Nprr33uC284X3sfgm+scMwUWEpbn8vXhsuY00XedpGJwvkz8zQOuvZwnztebHDtztN8xv0NsdaNPt9dO9vcyyTpjmU3k/0n+7YBPKtxdg7jWpZTvo2jVLWXba4YEbMs29jYuHJld3NzW9x7Qpsia/rz4L0UzGrkkj7QGp/3jzJzaAu/EFFgGgwGWmultScui+L4+Lisatb6+Pi4rmvn3GKxquu6KAohKFXpvPfWkrXWUyDg6zdvYKgn4yloxcRP9g+SKHZpnBhtjRpvTOcnszzPWSlAnQyy3d3dsqwPjg5P9h+vymo6nV67dmM4Hlut83yZ57lSamdnZ3t7m1G52ud5fnJycnh4yAqlt84FBqiq6t69+4NBlqapUkpwoLix8PeWDLGJ/SG2cUREjMoHtzndstbubu9Mp+NyNTs9PQ0hjMdjbnEimSU7oIm/6BrsZ+BgD5NL0gQlhEqWWymlrSGiqq6MMQFYvMv9nSydJyJUIGlXnVNWdk+7zbgois3Nzf39fahdlmWPHj1i5t3d3ePj4zzP0zRVWoHiYrXqeJgxBhGUgsAN0KlqMnC8jaLQIn4LKiEAJEkSJ8njx4+h58y21j558iRN062trSzLiqLwIYh7UoyodV0iIjPMFnNXVUqpKIqkYJFSBhqa49sQqqC0ASI0cahLrWM5AizVLTlGRHGHgau1soDnjmd3GUl7jZ4WL9Kx3pZEAvRyYQlAQnsUi8m6zeXDXj1KwaWBM4e9Pz09Lcta8BpXq9X777/PzIeHh8EmV65cSeOIiRA1oEZkDVyX+ezoUAEYo1igjAGRJStJNaVuhHrIluDQqoMaACWkUgFDCBxCUeTz+Xy5mpMPVb6az+d1Xe8fHh0eHj45PFgs8+Pj46OT09lstli64FxsjU5iSSIvywEAFEVxdHjiCBBhMhlro6NIrVarfpJ9tz/jOCYi7wkxGGO0iUIIDBI0QFo3BC0AA8CDx4/u3Ly2maVVWQJoxUxMBkmps3jUjvU+jdR3lyYIIJY7MCKctDVVlsXyuasfTMbDha+tVhQCIVOLxLnO6ftkvFc3kIn7j3W/pUawlhzURgnsIAq6BKe2cdWxg068lkNt+owWzrOotVfKnSRJXPAhBGKI0+zZZ5+9enWaF05rTSFopRkhBBJTttZnBbT73PEbTGvXUegFOl3k1l2H10Iqug73JZfu204b7r+xa7+biu7+2rv6fbiU7/ZHd05q6QHu9zmc/CT0imzLtz3Lczeos16JGXO5XBpjBoOBYEV576MoGY0m4qSU5BPEBnq46RIDIjsA1WbIMAHDuc549go60cGHFidLqLyykeDbeU/HJ7OqqjyDUsZ7Ojk5BSnao6NVXpaVU0ZrBEZQxiLq2Wy+v39gjRRkDCcnJ0WxQsQkjjY3Rtev7A3j4WAwaDEFOY7j7e3t999/fzoeRVF0MlsUlZvP5/fv3/+93/uq1hCChJDB5sbg6vUbAJCm6dWrV3d3dw+Ojx49elQUpTHWWBtFURzHdV3PZjNBsOiISOvP6oqqN+vovQfieJBqE21ON9LY5vny+rUrzz5zbTabSYNd0YU24AA6+Ml2AltPY0/YktWRZ+q6lsoZUhmNiFArE0dC5jo7EDMvl8skSSRvWFzaSRpHsV1W1WK5lEBi4fRRFDlXJUk0mUxOjo+11hsbG4LFPRwOu+INdVGUZTkej43VWuJrtE50KsRFGw3MgvmsjeS8ZszsvUPU0O6oOI6xrYrWqewSqTcYDJb5qjpdomlD/Jq+OZlea21ZlknSuL/aGZND2mD4gRw9xLr2UNfpcAjg6ro2VsfWFnWBiLoFzQYFDXxEu7TMDEgicrW4gHJzjVGvq79yBwFbZZpbUtPRBKCeB1Gk4dFoJLZuARw9Pp4tFoskScqyzLLs4cOHi8WCtcVQX7lyJUkS0AacB2MU8uz4eDGfI8BoMECCxeI0jmNEkFB/iSdEFhwVVEozE5IHQkBm1dBJ72rDvijyoii898UqXy6XRVHki+XJ/PTRo0cns/lsdjqbLw6Ojk5OZvP5PJAyxiRJg5itFWSpjaIoyzJtoqIoDg8Pv/a1dwNDHKs0TZNsIukPVem4rYYrsosIbZ5CIA8KtYoAAMlrrb1z1sTBO21U8PDmu++8/PxdbUzlvQHQCkMICBrORz+cseGnY2IohqCQEIBZB5YybqDRM8VpMt7d8pp95SMVBQC60MoZte9XQOox4EtjYhCROQiEl27KODE0dSHFvn0OghvObxVjIqUMgGKGS4A4njLS5pL0RE+BELz3165d+f7v/9SnP/1Pt7c3cqy9C4FaeRaIzwGkn3vFRZbWTMR5OaD/1cXHvsmrz8n6P1xjn5eOvc+Pu8fEfvgN3oUXlO9+C91j/ft9OaBNqu6qWfHZIAIZw93vmNk5V1VYlmVZllLvoVOOuRXVGwbQumcIUJy1a8bn/oezHipEVJJoO0lSAEDQZV3NZrMQuC5rwHyxzL33cRwXZR3HsbVxmg4AlOAZVVWVpulqtfryl7+8u70lybtHR0eo+LF+nCbxajGPjd3cmGRZGkIAQGNtpM32zubu9uayyJN6MDudv/POO2+98wARdrY3rl67kmVZJIZba5wLIQRP5GuPiDdu3Lhx48bJycl8vlzluZijoXWrt9LrGf42AEokLjbLoaw2NrGIACE411ZBCD6EkCSJMUbCoIRPIKKcui4LSHwExhhPJKyRmaU0J7UZSuLH1dYoo7k1SnNokoP7FpQQgrFmPp9nWSaafZIkURTN53NjrWjeiCgqtQRbrVaruq5CcCHw7tUrEpPlvU/TmMivVgtjjLUaAFxd2zT1VWWUUrqBsgNkYAIArVSZ50kco1GLk9MQgrVxlCR1XWej0bVr18TnLUJDAE7TdLlc5nk+Go3iKAVYykYSLqiQkyQxBowxjoK11vtKuBoRAWKbc0Vai1OGEZGqSmud51U6QEE4QQW6rcChVTBRFHxQSrcKTO8iFmiyvurWPdM7n0/jwQqABf60kdVak54cQVF6BGGmqwhZlrVSRmsrLhtBbmHmqqrKquZQl/nq+vXro40tYAKCfLF84403xHJrjGGG+WxmrWWGKErOSAcHIAbVahHMwIEZyddKW9S6zOehXi2XS1fVVV0URTGbzRaLRZ7n9+89PDo6qn04XSwPD44Oj4/q2itttUFEjKycDJDE6yRJtra2xuPx5uZmnudVVT158uSdd975wm9/eTaTXCmzs7NLwKvVigklJzCE4IJXSqmmVqaEVnhAUSACKVCgomzw+Ph4Y//gxt6OAvDeIyCHYDRwV6jjItmUab9I9ZkJkTQqz0pse0Zpo/M8371xbbg5zSkoQE1AwJLevUbf+ALqVp939g54V823y/gPotrK6RYtqGHAhEwo8h8ASJEkZkZsYIIQNBMyglnjcGucr8+Z5FulNQBYaxBxsVyk2fijr77yUz/1T/NiBa2zBxvAqdbOed7C3mdCF6ZzvfjB0xhn91vqRYRDj8n1f9s93LczX8qJv5mr32D/JnRKz/lMp9bIfIk7XP7ULUYSAEiKUTeEtVcQkVLYVaaLbEREdV1rjd6Fuq5dVVMLF6wUWGu16WoqiPxuAICgWaNAngN2YYGIyMgEJOXP+8MsimK2mKOxNkoV8snxrCgqZrRRLJUHvfdJ4hEFXwaKogBGz0SAnnixykU5KIqVcCZltPf+3Xv3Y6OZAiKm6UsCA2SjyDkX22g4HGoDVmHu6gf37wVy3/PJ74iTVCYtXy2LoqhrVblSChV776lm55xn2tzc3N7etja2UTSdTsUlWRTFalWIw1JrQRW23R5BIAWA2mKHXslhNBoZY4bZYGM6DbVbLpeS7lxVldSlYGZr4yZEpV3KztAUWmczESEq16ZNNyoLNZnWocX6DsF5r52rEfUZupzCunBxHAvvB4CztCXdFAKRagpEVFSlHPLVanXjxo08zxMbzQGSJKmqKs9zESastaiVC7Wrg0w4t5Zk51zSnpSDJ0+YeXd31xVFmRfD8UjcaBKAfevWrUDAzFLhSnYsEYluLUJGd14QcXt7q6oqZvgDf+APiNHbl41fnIiwxWBjZtQ6tJnNVemyLOMEXelEJ16tVkmSSFi4c04ZQwGMNr16kq1srRQAJFFMDWJdK4TKmVrnwd3VGaX7mHG6S8rC1jestRangxzbOI6JYG9vT+4YYx48eMDMRVHIVBw+Ofi6wuX8NF8unnv2hcFgZIaDvFh97atflSlVSjHDcpGLOGWMgTZ1lRlCCOwJiExkAICDB1BMxEzImlx5fHxYlWVZlnmeHx8fnxzNjo+P5/P58fFs/+CwyMvZ/HSRF84FYlTKxJFCxDi2WmujcJAmk8loNBptbm4Mh8MksdeuPGOMGQxeE//Ow/3ZV77ylddff/3++w8JYDxOJ5PpIEuKsgYIEEjc0o0kxKwAyQellPNeKUMaPQGY+O1HD0ej0TRNlKIQAIhYij3gOVcgq/V8qDNizo1GywDErAA0yQNACItqdffay5xYH0qtNRIDIBlWPfmp41aKoYuZ71N3xKY0et9M2I3tbK8ihEA+1IEcsOYLLFWKDCIiaqW0VdoSsHgMDJ9ntH1Wh5epocLwXVWjVmma1q7c3Nz8U3/qT/7UT/7T4cg6JMHCdc5pqwT8v9/gWsvf+LrI6jr+2v+WL9M118SINa6/Jg3geYV1bbwX+/CNe9t/XScHiFjSCRZ9y3k3hDYSp4nS5NaRTI1u1vBgPFN8K2I/HA6zLOlqhDnnIBBYUErFcSxBWE1/sAVl1LoJAuImHlgyZ7TWrJiIkM4AJajNzzs5OZ2friYbm4Ph9NGjR0Xl8qIIeVFVFYLyTMu8KKpaFD5xiUnFHmutMVEURVeuXKEYTk5Oi6KQrNCTo8NhloyHg/v379+4dmWQJS28Rg3alGV58/q1r735FkN48cUXT+crAnP/wcPZbCY5LePxqKqqVbF0zilrEBE8KqUg0Hvvvffmm29NpxtRHM9mMwm63tnZ2dqCsiwlz7IsS2ul2IMHAGQNLQYQIobgEICBOAQiTxTyYplmDVMRY4PgdZy3aYP3FEUgk8Btvawm5a+vNGMTK8CtR6Cqqtg21QwluFpSEzqVfbFYVFW1u7srkS9EVLta1OLaVQFC7WsmkBgxiZzy3mtjpqNxURTj8TgvV+kgSweZcw5YAZIx2Eq9GkB575wLUQQhkPdtzJe1VPk4jrMkrX1wdR0nCRC98MIL3/raR588eUIt1j+32Bfek0yL5Ex770ej0dtvv8sMP/uz//yTn/wkUAi+SQ0icm18DDNzIKf0WZqWTFGcDso8T5JU63q5XGrt0jjVWiOEEAIF0MzA1GSqAvCZ6ZIlFJw5YSZEc4ld7rLjfJ7UnAVOd0oP9lBmtdZEEEWJc06ENtn5RHRwcDCfz7t4xuPjw6oqqqqqKnfzxu0bN2/OZrOjo6M4TgEktDucLhcioBhjQCRvCaWkELx3ropDJOF10j2xx5Rl6eq6qqr5fH58fHx8NDs6OjrYf3I8O83z/PDgKC+rxWLhCaDFPREYc6tVHEdZlkxG4+l0urk1HQwG1uoksQheK7Vazsbj8cb05osf+PD3fs8n/vSP/amDg6PXX3/9X//Cv31w75HWMJqMBlkKCvM8946MMaiV914i2bhlez6AJ44juyrKe/sH6sruyEREzqASgDYJeW/CqC9oIJdYooWSswTgIQIgceWraDQY7W3XHHwIMWoKREAt3sE5HUzxOYoN0IZ09fEYGLjJvWYAlEwjpZRqsfQ7iicNN1S9r0sqBmyKkaDkTDOeEYWn7bw+J2vFW2DGOI5XRW6YNSqt1QdffCHLbJoleVUzOaVioThE9b9vn5+f3zY9qVVKeG1qoMfSusN5satwPpr6bB4v89rCeYGg/zD0dNn+w+dW67zLvOFw5/32dJbpexHajUWCjmMraTxrQk/3Z3NHAwIiNeXqjFLMvFqtIqPrul4sT09Pk8koy3Qq26ObHwA6M0fLztOoWYvue1aNUQFz48KgtkYYM4s8vlqtbJz4gA8fPvbeK2WKqgoMITixggqHSNNBR5uiKFouc4nvePz48fXrV2UsQhOD89XJaV3X169dXa0KSTeQN4oIrzXcffbO4/3DOEpX+XuPnxwC8872dl3Xp6en9+7dCyFMNydZlknp7CiKF4sFA0vQr5hnkyQ5Pj4OgbRWaToQ+21jpQfl6to5YA6drN1tsCRJJI5mMBjExtpIi6Gp83cKne3Cm6FnBZGrM1G2SEyNd18ScLENiRQ36mKxALKdw5I7Gq+UyCUSKJDnOQCMRiPnXFFWiGitJe7S/6msijTJxJqilDo5Pt7Y3T06eGKMSZOBGMcoQBQZVKYpQhBYKerYiTIGACrf+JuNMdZGZVkuFqvRxtTXNROhUtdu3vwLf+Ev/PCP/KmPfstH7j18IOdInLvOVTJHIgpkWXZ8dPDjP/5nfvzHf/y1116r6yqKY621wKppIEQkCIgISBp1VVUMFDwgYhwnVVUlw1hrTd4rrbMsS9IYFUJT1bEG1FKAHUAgebFz2yAiqrN6GNCZ+p5CiC69RDAQeCdoKIAQAYWolTKoFIBDbOp0DYfDJic7jmWzzWaz0FxOsGKq0peFQ63ee/f95hUA1sSrolosVqgNKIiiCJQC33IJpQQjtqoLY0yRL2XRT09PJY1isVquVqvDw8Ojw5PT09Ojo5ODo+P56WKxWJyczr33ReWiKNI2kuezOBESYYzZnG5Mp9M4sUlkI6OHg6E2KoqsMTpLE2s0AIOvR8PhxuTm3WfvfPTVl//Uj/zwO++/99u//cV//Qu/cLD/JAQYTbLJeFjXrvbOGu1rb7Qp69pYWwcCZo26Jk6y9OHR0Xg4SjdjkHg37xW0hsPWv3Y2/5cJTaIEMzMEZkbQComZQk1u89bVdHOy4hqYAYERmJgD0PmaSxoQ8Iyk93mE3JRj2D/RQExAiGxQS5I/AFBbh5fb+nL9S0hue5QlsEPehYbP89fuxXQ+vees063MLvYuQPShvn7j2vd+93f97M//0ubm5HS54hZ+mgg0XmjhvK0bW/M6NgVWz+Xvnj8AZ77Vjg1f0kNZmwsQa/1n1lruf9tn7Xi+bvEa41xbM2wDbVqHn+p0HTxXXpC6rdW6uxp8hj44Cbfxcq1uavqzBwpd8J6C0QgA1uoQwvHx4ZW9LUkXIfIaU1QsXjRZN4DG1AnI2igIuvNHMgfnnHNVrCwSM6CAMIuGJ8845+bLxXA8IS51ZEWEL11ox2sQMYq0tbF0vhYfc10rpYqylBl78803rbVpOhiNRmWZL0/n49Hw/sn95+7cPp7NVqvVaHRltVqsVisZSF2U6WA0HA7feudeXVajwWA0miilEfHWrVsh+OVyPpuf5nm+yleLxSIxkv5sQoMVqkS/T9NU0Knqus7zvPOUM7SQPNjAJ3X7p7kJIBiBzDwcDgHIey8wxeL3Um0RIWGu4uHu7Mydh9+31UCJyNrIhyDqkTB4AGjKBAXngo/jODChNpG1QrOVUsPhsLE/OYeI8/lca52mKYr/gjHNksFgEJyP41jyPkHrjY2Ng4MjV5Wj0Ui2t9IaQVlQzICMAn5nbWytRWNUYFe5EHiVl6PRyBW5MQZBq2ZKNYdgoihfrLLRiL3/g3/wD/6v/1f/0d/79E+++MJzR0dHiCjGeeecMdF4NHJ1rZXK0vjwgP/cn/tzH3/ttXy1TKJItjII9qY2iN4qBcjkGRDIB6O0iYzWFlAlwxEAa2vY+bKoUWKviESpSZJER0lrQ2aFiprgQYAgGTsAAFWZp9kwBCdqN7Y8r130M39Zcz6RRIVWyIRIFCAQQ0Bu0kMlJRcEA1JrY1Rd+zSNZTXTNN7e3oxj630dxzaO7ePHj4uilq+89wiPax9MZN9///3xdOPoeD4cDKM08UHlRRUnGRFkWcataoXEzjujNGsqq8rXDoDqupTUA8l0J8ajo+P33rt3enqa5+XR0dHpbL7MV4/3n9R1zai11jJRrY6uEXFjMhkOh0mSDNJsPBlmWTIcDoHCMM0QMY7jNI0dhSiK0mSACBBqhpCldnTr+vVrex//6Lf8iT/+x9585+0vfOG3X3/99XffewIAWaajNLVx7KUyoHPWRITgHRmlHCmF5qtvvj155SPTZFCvFoZBt4goBAyBUAAqiJqQCFEM8BwfCQjIIL4LTxQrrYFOTk5fvPVaMFAuq1gbpZSX8EZmgYNmZsXnACE6pgO9Ggx95YdFe2EM4IlIAxJLABp67xFAKKQoMXVdd0Z48U2gMq2LSiNqBMWybTrO8e+9WpZzXjZBVqwQ4dVXP/JvfvGXGUhrdK6yNiaB+X5KsFX/wjbsqPuzP/i+ZLD2q2+m208ZxSV96D53jqu+g7nj/X0p6WKzHfNWvfyi7pneujIzMDcDR0Qp0dzpXqJmdS9Vbf6oPCyl4uI4tsaKITSOY8Hc11o31QI0QA+3RCpjAIAC9j0hwxjFrJmbQCHhuBLQK7RelNfRaBTHNk2TNE21SSSoSrekrRtgM5lKdUGF3LvaucXT+fzw6ChNkixLCHixyKuqsjauKhecHw2H4sSC4PM8d4HLstzc3JydLvPSQQsoWNf1aDQaj4dxmmitozRZrVbHT06ePHlyfHIiceDcRswJP1ZKMaOYVaFNzG2PwLmNEULQGrXW0/Ho5PBgtVpFaZLn+dWrV52rJBSxe75DUdZaS8UhMRoLbkMng8kzIQTnPTMvFgsKLBksssree6U0AEvMvNgwpfHVasUtCuZ4PBaYEe99WdUyFqVUm9LMaZouFovFYkHOLZdLCXcSHcFTANQcArPEDytyjiRSXZu6qrU2g8FQKZ1lA0Tc2NwB2RBESZIhIhpTLJfOOSkW5L3/s3/2z/69T/9kWRUMFGpRl0W2KENwzMF7CiECgH/4D39qa2vz2pUrSikIjplDYBNFoEBpC0Dga2bWxojtwXsvJQuQSGmrrAUDOrJlmfvG+K9YKaUQggsEiBqUUaoBsRfthoMDZbQW/O0+7EbnYGxJTfNHEGrLDIAMxICsFIaaGIJGRVLEGCA4pxVyCHVdS8Fj1SaGRVHUpO0ppZRKkkRO+uNHB7PTE+aQpoNlvioe1NbG792/PxoNjDGb29vGxqfzWVGV0yyVZGQEqupaaxSkCO89sS/zIoTgfS2YlVK3cbVaFWU9n8/n8+XJyelyuTw6Pjk+ni2XSyIimQWlrI1MZLFR7+qNyXQwGGxvb09GwzRNB4PBME2sNmiNWG4acxFYY0xsW/MeEXmHmphYYXjm1vXtrenLH/rgD//Qf/Dmm2++/oXPf+lLX3n8+HFRgI0wy7LY2No7X3lQRmtNxNrYMi/efv/ec9euJ1oxKmYi8lpMuEIqAbCnTV2k4QEwQgQXAhBqXfoKAAajYbY5qThYaxUBEQUFRGwAPYsb+FzhB3w6k6IOaRIUUKNsIAWldYvUQeSDGKh7tL0zpikmFLAOpbRW9ty7FJ5jwOekv6df/e8VAyOjgju3n/nYt7zy65/9nY2N8bIomYMCNMZAWyvx0mb7umY32otzcbGHnTL6DZj6xXb6f/b5OrdAbp163QWGXPxt9+q1lvt962aVz/MhOK/cd+ppN/yuG33TN7ZGAlGRuVWOEVkpFcd2PB6vVqvnn3/uzp07Ozs7aRrJmVFKSYKN5BEykIImK42INAIgoDZdh51zdV1K6i8AlHlRFMVwPJDX7ezsKGWstcZGsbGz2enm5lZ/MhsJtQVWg8t89gjkvG+1T8jzEuJoc3uHGff397c3p6vVKuMsjqKDw0MiSpJEKZ1l2YPf/cpvvv7ZOBmcnC6cC1VVxbGdz0sGuHplM8lSrfXG9tbGxsZgMNhaLefz+Wq1cs6L36dT4pmbwBkxD3p/AcgFEVvLsLWxqALvvPPOt33ra0B+Pp+PRqOyLNM0Fa/tZDLJ87ytFMTGNGoxIkoUtIy7Az6UzC6BZpSI0ybFq1ezTMQFalFO5RIxS5K88zwXZxIRSPBOVVXe+3QwlJmfTjaWq3y1Ws1m83Q4WCwWRVnvXr1azUsJ72Jm1AYgVD4gYtKUNdRaG2NjAJKIJucKRDRxDARJltZ1DSHkeR48AfNytfiNz/z6Zz/72Y9//JW33npLRMZhkmZx4qt6ka9EnrPW5nl+9+4z/91/+98D0Z//8//ZM7dvu9rZOFHsXFUopbRR0LpCgvd4ljYdAAUtLgCAryoTRVEUMRMQ1XXtPSVJEsQbh6ABCMV2JQIfIiEoSJJksVhwG13RY8PQOoYAABAAu6wXeUaxMAJJbEWlUfzBiFDXEo/aoX93ZjMxRIts1OAoCWAtmKouq6qoXG1sFAK+d+/e4/3He1c/4h4+kTNLzMfHx4PhHjBog6C1qPrO1+RDUaykZEFVVYvFoq5rIprNZrPZbLlcns7zPM9ns9nh0fHp6elymc/n86JyAgwjISbGGGOEE9tI2dFoNJmONzc3I6Ol9pT02VjdPmy65D1k0kqDQkJABGTSyKywyOdGmySydnMy/dhHX3j+7vvvv//GG2989evvPnr06PDwMF+WBGCsVa1v2Ds/mozfP3iikD/+oQ8X85lzFPW0k05/a/zf563Qss8JGoexUiowAUJd11fv3BhNx6dUx9ZC6QITIRORAt2E4Z0P71XrodFnrKEJj1AIAMRM7IkImcU60lQFljgZqTRHvu0eQFscRaDKtLLiQgJJiEJE6EVBX+RYfF5ZbG9KebSGZDMzIynWUWQ+8Ynv+PXP/o7SaIwiQXA1ls4XK+7a7DZrd7ObDrjs6tjkOVr5P+Pqt9Zn59zSa6HCoVc/bu3ta9y3a+RsZi7kMWNL3Hvz0HHonqO3yUY4kwC454stioI5SBRoNhikSWSMvvvs7bt3n93e3jRSpCE4MVxpdeaNRmo2n0ZQRofQiguKFRqFbDQCUJ7nHJqKp8vV3EY6ThIAkNOYJAmgAaDagdYa/FkpJ+r1H3qA2/3pJWBtUCthHhRCCEZT8FevXh0OR6enpxuTsczP7s5O7VxdLJkwcJEkye7u7mA4uXlTB0Yi8nXNHEpXz+fz5WKZl8Vbb71PBJPxYLK5kWVZkiRlWS9Xq6qqJFNCKsx0ho2yLHtKPBMzaiVhg01qAVFRFNvb24vToy///le+/Vtfmx0fO+eyLJEBCtxxN8POOe9JgIHkT2zqJjUZw6gVE4jJLEkS733wJGRXSHZfCoRWJuuimdIkiyNTV46Zy6LSDZZWs0vJB2OMc9rVnpklT4kQRqPRKi8Xi8Xu7q4n8lLmtvFCNchbwIpCsFEkLmFJtUClpNjGRpyCAoWYWAtE08lGVVWg1Rc+9/m/8L//r7761TeuXr0yzFIiGmRDFmTjopDIIMUQahcn0fHh4Qc+yyebAAABAABJREFUcPdzn/vNr3zle3e2trNhCuRUIxUFbZPgnAQNMLMgpcgHVCqKEMCAZIsyM7P3QSPUdS1yCTKAAsEOES565vxSAADaSE0OQlF8xVGHrdIrJxDbdJQme0RIvoiwDqViEhM3TgFgDtTmd/XKaSiRn4wxxqpAygeIE7u1vWGsYsZA/vHjh6enp8A4HI6rYpUkyWQymc1mN27cYOYiz5VSIXiQqgxlGeqKlVqtVsHVy+UykFNKlXlxenpaFIV3dHhwfHI6W61WT/aPlvlquVweHR3N58uirqraiyIrtjE5yFFsxDG/OR4aYyaTyTBLQwjDURYbG0UmiiIbndWx5hbunhsrGoLixuEOqICAAijFVAMDcIituX71CjJduXrj4cOH9+7de3zw5GQ2X+QrVzsmIsIkSYu6Go9Gj54cvDN+f2syNtZAHYJEQgL3yqG2NLZHb5t/EZkRiSNr53URRcYT3nr+DmiFNaPBQARNnjEQEZxLFF/Xo86a7ZiCAmYCxgYqgBnb+BipjsltlA8Rh8AhnBlNVVObXIuuKyI9gApNWDsys6HAIvRLReSOMbQfznltoYHXUj3PYoCgAjpj02eeeebDH37uja+9HSWpFLILrn4aX+8GucZm1hjtRY7bJ/H4dA34/9+rM0WK8TBcqDYI3zTL7/NRbOGgu6/WGuls72IN697Y70DHwLTWksaTpqmYhrRBrfVwOByNRuIwtsNBliUIDbJrFxDAzIpJKYWtXImISgMRazirLGaMQVYFFJK0s1gs5L27e3t1kTMzIhOHLMuMPmexQGxgE7ppXBNQmoBqCOIxFepvtLbWPtk/KIrqxvVBkVdHR0fW2pYCgpRJcc7N5/M8zw8Oj+I4TQZZqAUHoEmgGgwGcZoMBjWTAoCjoyPBX9ze3t3Y2FgsFkopgUewNhJAn85uzMyNMsTQaUiCjDGbzbY2J8aY6XT63nvvvXD3uel4jIh17UMIwjil4A/2SguLC4Dauu5E0DbIusWra6JxnMvzPEljCcKSNeVe0AcRKdQUuCwraIskhhCm06nUH0yyATf6feyqktqqw1KfTikzGWXWxFvbwyiKxDzbFIJFSY1twru4X+DF+8Y2SzQej733EntZlmWSZEysjeEiB4B/82/+zZe//MZrr72yv7+PaKqqKos8hMAIRKSNIHQ2gF9pmhbFSsHm67/5mW959SNRbJgZ0DEHIvZ1LRG8IjqI5ywEbgOXgjEBEXUUATnvqaqqNI7aEo3Oe9LKkCEMCG2xElCKvVRRDKCEHTr5rlMMFEBHW4W2Qc80BQDMQTUuCcUUnHPkPZHnFtIVGtjIZlmNMUqD9y7UTiR4qc0lJpNbt+LRaJCm8de//vXVKpdK6refuz0aDItiNZlMBpkqy/LgYP/Os9eAINLKlUVd14C8XC59XS0Wi+VyKWljZVkul8uqqg4Oj5bL5Wq1Ol3Mxfi8XK7K2lMAEZrFIaK1ZgjGqixLRqPRdDq9feOG+KSzNJNORlqJ7m4jAwB9cFwAQK1AoYREteQ6NOwZWAMqg9575ysb6d3dXYYj2t1BptEgW6yWR7OT45PTogp5XhD5EEJJYTgaffnNt779ox9BYzpxWOhJyxc6ffcsUOOMa7ZPRNqUrh7ubG5e2V3VdaQNBCIEpRUEVlrThcCdS8nyOX1JoWw/bLE1gM9UMjGkdcGqDcVat+MiotLaam0RNTU9bmQ/09dE4Sk85ryAIOkWiE1SMxIxMQGH8Wj4nd/27V/60lvTSXJ8OtPaCA+42ODaixrJ4nxw08VnOj508ebT+vwNHljrSecj7K/QxZf216y/it0Q1niSaq9+MFcnNIHk+V3WWzFjdl4lagwdDcyvOICHw6EPdVmWOzvbg8FA/EzMwfsaOERRZIyWJLtGcBONioGZAzlu4fSgkdeAiL1jMTqVZelDLVyqqgpqACuorus0G0dR5AMwnzNvaKW6Ld4FnQo/7phlIEdECDpJEmZk8szI3JSa1wBlWed5PhoPuugkMZRnWfbKK684Hw4ODpbLpY4MAKxWK+fqJMkIVCiDUkbqE0uEc1EUb731TpxE0+l0MpmsVqvFYpHnpRgPGgGlc9bwObwdRCRgV9enp/OXP/SB2clRqIqHj/e3NzfjOC7L0vsglmcZrHBQ7z0i2bZMr7VWGaM1gzoLHkRokM5Wq5VuawN3WwUAgNG7AK3ri7BRoEV9YeZOIR4MBovFKstCHMd1WZVlIZFWAsiV57lSJoRQ1tUgiT2B995G2juJauEGNwqRiVAURmalNRMhMiASU5Hng8EoOI+I3oVgvI4icm4wHFarVVEUk0kqGcAaMYlM7aG1jZNkOasWnTGKTFWUg8HgwYMH9+/f397etgoLX6dpWpYlsU8SwXSjONaolWbUGgDIUwjBOw/WWqobwa6qKmTKsmSQZkopqxUaLT4xZgYFSitAFdCBVJklH2lTOYeIGqxzDpkFsE+JWCqagHDvBoQSsNW6EAEZnPfBOQmHDm1wUGeaUkqJbgoA1loJFIiiSMoxKQXGKK1KrdH5KpB7+ODJ/HThyN955tZ4PDo6Orp18/kQgJmcq5eLU0Coqur4+LiqSwAoy5wDhRAEaMU5J27fqqpmp/PVajWbzWaz+fHx8XKZu8Z0p0CBJzZGK6NRgwKdJNF4PN7YmO5ubw+HmVLKVTUgTaYjY0wSR9ZasT9ja32ROQ8hKBuT1K4HACADoiJDP7deMQ2SlJnZh+FooBVSqCnUTM7ViQIOoIuiysu6LipXFJWvjcUvfuX3Xrz7/NAmIQQJJMaOzHJrAeVzWBzMAr6BjOBrF6f2ZLW88+wtsprKOjVpESpQyAoNKqN04NDX2dYZ+WXG4E7PZGKGIFQUFSMjA4sM3Skboip1NJCIEDVzMBLhKMWVG2y35hVn/r8+a4HLro6CMAOLotH4y0jOvI2y559/7plbV+bLlcj+a87zbkgdUe61eaYQ93lex5nWqHm/V9+gt5e+/dLnqQcAonvoAU9j5P3OP60b/W/7D3QDvHi/G2bHudsWGnFbohaFxpVVXtf1eDicTCbT6TRNUwD23lurI2vEy2g0AqBqN66SpHIkRFRtDTCpQshaI6JqC0KIh1LUoxAcIA0Gg9PlSimVJWkURdOpVMcz/bkSabAviPRnoDelUozIYFuYkqUkA/PGxkYSRYpVHMcnJycaELS6d++e1L/Durp58+ZqtXr3vXestRln4qAqa09EkbHWRHVd18EDwGAwsNZXdX10dAQAe3t7cRyfnJx2ABciQTKHxtKDjAhiKCYiIpVl2dtvv/1d3/ltV69efXDv3fl87pyz1k4mk7LMxawtx68LpJKbjS9TwLnoDH9HQqXk1XVdN5HelWs7w8GTkjR6rcVjpNpQedXUYEEikp6InVaM6kVRlGV1dHTUE8Yb8S5NUyYQg3Acx1r7NkOxOaDYMH4OIWhjPDlrLDEhAqMCrYCV4BITkWauqoqCz7Ls2WefPT0tnrmJVqNSynsl8b0iASSCropS2cImUawAUTEiPrz3/gdeeN4MBkQNpjERgW6qiIqiDwASmA3eyeYJwRNrIBZCv6hKpWCQDQDRVZVmq4wGCURgFEReRANEDBw8I6Kv6sRGHBxyEBlRKwDUgEoUMq2MFFRqcCixcTwqQmaWtAIi1kp3UN4gBwypLR3HIiTJgUVkrTEEgv8fZ38Wq1mWnQdia+3hzP94pxgyInKsqmTNlcWhKbIptoYWGm4bbUvuhgH7RYBh2X7wAAs2DBgNw2j4zW8GGmi0LdOAWjBFtEZSIkWyyBIpsUoiWaw5h8jMiMgY7vSPZ9p7r+WHdc65J25ElmgfJCL/+w9n2MMav/UtwDiONURJbBF8EsVGRw8fPN5ut2maIofV5fnbn/sSEFNwGsAYBQyXq4s4ibpK/eCFA/zy8lLK8Lbb/b4qm7rdbDZlWV9eXlZVU9dtCEFrgYCAMpqZhZktikxk9WKxODw8WMwm8/k0yzJt0EdG5qjzQ/SY4o0BAI0mBNAdc1xvdigSQaVReRN6vAIzyz1EUTTJM6sV0FIBRQa0QWvtvnFKKWMiyrLgJs1+t19fFHk+WU5NRczsMCDxixlfxqvX3fuBgkbddYOifDJZ3j5pOFhjNGAIgRRACBoVEA+0HteE+TWNi/Cc6hkJ587lUEqJPyzO8fgmERk4oLJX2QzoiBAQ0dMVKLlbJ4MyGAQlfnpcl5lB6QFFe9WXFhQB1nV5fHz89a9//Vd/7R/Pl9Ptds/PK5vnHhKvKvPU87W2n6Y7xyM1KKqXfu3feZ4XD0navahWr1kGP0GvD88yXHqwg8azOCzrsQmiuoay13tzjn84nBkAbE9AeHBw8OrduycnJ1mWoRIMSwQd167pgFq99kVEBWK7o1WaUbR6RyIfy/ljK9LTt+2smDBz4xsAautmsVhcrDch+NbVN26evPPOO7/9L/5geXzIo4ph733omxOMJwgVo2IgEnyWsf3IkMcovn37RpqmFxcX0yxr27bWqLXe7TZlVcbGglaSZxUVwkB5kb311luXl5d1XXsX6roFZdI0dS60zgOAb6hpGimb6YAtRE+ePMmybDKZNE0zsAOKHuqnlYZMROfFajWZTb/1rW/9D//G/0ADP33yWPK7RFQUmfgiWZaJr9OBz1U1mUyulqjWga+Wh5YSIGtlEieTicxjFHfSXKEWRkNhN5Rt0jTNYHXJqBpjptNp0zTCca1AJGzkXENEk8lkv9/n+STLMjFNnKslmEHUpQAoMAA5R0qxsZaJhB0FyFVVhRnu97uiKIrJxLWtELxIUQoM9r7Wt2/fPjqYpGnaNJXg9lvv8VpRAwMyaFTOufl8vlwupUipLMu8KJRSACwB8+A8Mwt2fb/fiyGILvgg5+xgGT44E8VpmrZ1FUJom0ZIaYyJTGS7oJFSDCGEoPpq6uDZWut9Ky5+H+J2Mt2ifcl5UF0YTCmFcN04NtqwDk3TgOIe5AgSqRp2uuoLkwBAzg895CWKIiTSBgEoipJyXytldrvdrVs3VqvV9773wV/9K/+xjnC9XjME7z0iPHz4UBReXZem7/G13W5Xq9Vms6vbRrTv+fll7drtdlvvawDwRJFBImKEyHRAsCiKsiyZFNnJyfHx4WGWJXme53nqWzdZFFVVycgLGeVgxjEzIZjeGQjEqLQ2WiKz3JeKpGm62WzaxkXaiLcndDfW2kAuz9Omyat6J8S0VdNGVofA3oOOjMZ0Xtz+wud/ir0zbiOnZSIladGxmB15wINJT0RKoVF6X5ZHb92Ns5QQEhtBIPaBIx28j1ATEeuXV7EOEcrxdMMV8iYoodygjkdWAXrqWeqe9zHGbyqlhKrO2lhrTQGI6SrQJXqHNfkQUCWI0LatNZH33phIIO890Gx0T6FU2ojMYmZEDV3XzBBrxcjvfPkL/+Af/GMkRtAMqAyB4MdCEMQHIhORGIwonWlB9gAQkTFXhc/jgPCgfsZKEftc41gvYo9qGyut8Vjz6Lim8IapHU41VpMwUrQv2lA88jykRQkAgvR140ElK5Ejg/FBPbnouNJXPB657ADO6lYDKgQs6ypN0/l8fvPkBJCmRaIVuaqKhUyYIYnToSmn/F+wcyyUMExBrs6eQwAApftH4FoBxBan0+Lw8Pj09DS0bFWstQXmLI4cBURKE/XZz7zy3o8n2zYOVCttnHOtc6pvQxn6Zj4dgIEUsVZa+7YxxihtxVhUWu92m9de/TxzqHa7WPPl+VN7fBRFuqy2++32jAwQt8EvD+ZtVe92uxCcUibWej7JfJK0PjRN07TOOddYxRU0bS1y3DnXNI6uWkup7XZf161wYYYQOJCUAfcshJoRCVgx+9alaUotLYr5D773Xvkf1Z/97Nvn55c/fu/+l770RfK029ba2DRNy3I3BN/EoPG+TbJCpszGiehsmVaR+7Is8yKLYtu2rUUj9opSKoTAQIDctDUgp2lqrEYVC5e16nktBoHets4odM5ZbQBJYNXeU5alZxfn56tLYHV84ySE1jmntGZSCKiVpuC0NiGEQKQZfWBrYmAAAgxBc4g1QKhAzbS2eZF456QSZrvdKo3OOVDq9s2Tv/W3/tbf+3t/L7YRB0LAyATZxc451FaGBYCErTPLsjSKFYOQapXbHcZxU/XuvkalWQqpxVkvy9IqDQBxHPvW7/a7fDqJ08i3dRzHQp9UN941TmsLjK52DhwAFEXBaIMHReCclxizdyG3sd/vTZ4DkWXWAHVVmSRhIsUUKHCLylqggFYBIDCD1UAUmkZby56C81abIfKMiquqHDSxAuTQcQX71iFi3TRKKRW4rapmV7KOvGsA2Ebq8HhWVhtjC2uSNhhHsDy5RQzr7TkBhcBM6r0PPtrsKrmcEEaWZXl+ft40zX5fbTab3bbc7Xbb7b4sS++D5F9kNUdRhMhGoQLIE7tYzJfLeV5keZ5nWTKdTuM4LuLEKRNH8STLpUZLoQJG4E78aq0VA/ugrLLGoEEAiLVRNhJDVnrnKoY8SRV16Q9jlLWaOUTBs2ZI4yjN0mIxx6QOuq7JtS1wA9CU++2iSN+494pqL6py37ZY+YYiAqVa1yhSCWpwjISA1ismRoUILmAIBlVIja+rREdBwWVbvnnnVhwnm7Zkg4EcaIWBlNaOWQNIt4axOhjk7fhP1YHLmJk8swYDyIHaEIJWymhkZq2wrmvqqxWkBQIxMwUbRxQgBDLWKq1tFCljGDXogQhz+BfNbrfL81zwKUZbUXXqOe//Si0N1tyVHlKdunLOJXEaGG7euvFX//Iv/5N/9jvFZNo471yjlFJdcZ7qHhBHrviVwkP1spD1+OrXBu7TvNLha9e+/2nq8ycfL9XNP+E+AUB0qupZBsfGgbwY21wv3uQQ/wFQg3Mp35cIJwBJHJiI7t69Kzk2pVSSRH2z2HaAvL/0Dp+/Wx7UfPBeAHvWgoSgpaFKkidxHM9mi11VhhC0NrPp4vXXX/+Db38gSGClIEmS3W4jyu/KdiECROirYMW4BmKxD51rTg4Pj46OdrvdycEiBPbebzbb/X4v/A9AvFqtlFLWWsUQRdF+vxXyhlhhZDEBAX2Etm2rttGmjaKo3Ffr3VbkOPYdREzfy1b8acSuuuCl0y05PKN00zSo4Ic//OHJ0cGXv/zlj++/v9lsbt26Rb5l5rquxaTlvtKpbVvhFBwP76B6+1Q6iPMq8BzVp4GHJdEhk3unf7ApQepDjBEqTfkoSRLnGuagtZFYtNbWxlHTNG/cu3v67Nx7H2dZ1DjvnHCYyFUuLy/n87kg0q21wNDUtdKqqduiyECI8pVTWru27ngGKDAQB0zTODRVURRvvfXWX/pLv/yN3/ndNE2bpumhKpQkSds1YIXxepByqa7KKzKV7xofOecGpIM8VFNWEChKUhVFu9WqmM3CZlNVeyLPPrQS4QBWSqEC57spVqiF8lqsGbDWuTaKImY0xgiu8JU7d1aXl8Z01NxisYm7BujkDoui4EBVXUqaQCZa+GoGwhwi8qGVgPlgIWlUMoObzUaAV0LIqhScnp4yGKk1EFrvKIoCw7bcC6lkHMdKQdcYOITZbPbeux+cn58LKk0oPsQD9t7v99V2uy33dVVVwsfO3NlwEu1QCqT/x2w+PT4+ns+nRVHM5tMkSfI8jeM4y7LYWgF1ImJdlxJc6Zb00ACxX8yC5BoW9vi1+AxSvT1EAqIoCr5RxkaoJpOJD2xt7L13Tds2NYe22pYnx8e3bhxZxfvtpm1dcMCIjgGJiZFDIKWBQQscOQQCBKWMAgVaSoCstd4FJJodLBcHS7kT5xw7H0IgjTCwVMLzDvXLNj6M3Dxm1h3umEhIXwYV9HyCcpCgKAgMNKbveCZiZ+SbKRxFQIXTwBB5pRRwlxccq1iA57xs6OPj3ad9SF0SV55dnBRf+9pXfuO3fgeQJKanlUIQ5tvuy4iKGa+Qrp1SVMPlxgePHvXF94eNPY4DjIfyxV9hf4x91p88K8M9jJUxvhBXhysP+/q1YMyJ0WtZHnF9DBcae/DDVYY/RYElSSfRTk5Oyu0ui6MbJ0dZliVRbCNttOl0n3eo9Wg0woBqEG3bvYTA3AUGoihq27YpmxA4isxyuRQ4tIAs4jhmhcQddmmxOND4gQI2CgMxEaVJAgII6EJ8XaAGgTReGyViAO9ckiRtW2s2VdsUedp6Ojs7kwBs2dRPzy+stcLsrxiER4yInAuKiUJXVhskJdO6jvpAoygM5xxCV06m+ton6sl7EZGIERkZhQSzX4QwVMq2wRdF/t57H7zzzjuTaX7r1i0ZkNjqKIqsveJukx6FIm3bDuHVEWBBr4S6KDqzCN+yLEWoyfCK+BtW9TCA4win4MuG04bg65okCiqLxHbt+YJwdUmZNxBpg3VTFvl8t9sKVZbUFieJYeqywkTUNE6ysIi43++gDFmWRVF0evpskudKQRQZ37o4nwK5W7duXF6ea/0LURT9/jd+d70u80khzDAyzrprEM4hBK11McmMMYgsNayTaUFETe3FlPTe1/vSOTebzci1VVUmUbzbrAFAKnDaulqtLrIsQwW7zVaUorU2jROxsTqhH0Lb1M4HMW5Es2qtrbXr9Xq9XmuNgp+SGtKLpxdaa+E4Q2WVRgC4uDwX+Z4kSQjBuQb6RJWk3o0x2uB2u5XXwpQisyb1YMLJLEBZALDW7vcVgxIKyckk77Y5aCFwBQBrY2bYbrcIKEbAs2fPNpuN0hDHcZqm3BOOlmVdlmVZlk3t2tZ774dV08sQQMQ0TafT6Y3jw6Ojo8VilqRxmqaR0WmaGquKIku0FakiqHyZNSFlg1FfhLF0VX3Buuo54OR9GRzlryxFAAAslGmaus1zzcwKMMzmHOjpJ48pzaZZOp/kALxZ79qmadvGY2qMapuwWp8XcTrLi7Zqbd+1TykFREABULFiooCBFCrQalvu7735mbwoLttaxYacp+7Gnqcr/hQWcOpLyERQDpcTsTbwvIooJeqCfGHcbYgBERUq70hZZYxxnoQ4oZczVxPU/4pNYpPQujiKFCvnnImiMCJNHY6xPoCRwgC5stiDgbSxAHD79u2f/ul3/uBf/ZvFYravHLCodTWgv8bnH9QVwKeqt2vrAD5FI46/86IOxhcc6Be/9tKzXbvbazf54kCN72H4guqTKNcMmrGuHcyaa1bn+BGkwsRqE0JYzOaXl5eL2eT46Gg+n6dx1HdSIkSlgPm5U/VJC6ShW+WLzyiGvDFGesYBQFEU2+1Wa+0Da60jTKq6CYG1tjdu3Hjt7q1nz54BBYPoGYSZL0tT6o0GfD7zHUJQI7rv+Xw6m0+JSClo23a73+VpYowp63a1KYnohz/84de//nVjjAa0VjtHUj5LVCNpjxQcAIDSV+eXhKUoMyICuoo3DBJE9V0uZAAAAFnC0VezJgopio3i6Mfvvfv48eP54rMAEELY7/fRYua9T9N4wKtjxwMc9U3FUYT1EMPgvkxlmHFRukopae0QQhjozIYRE2LL4LuC4MEaGy4aet6P4YXcfBzHm82maZqucyLgrizzuCCitm2M0YhKWiTFcay0BuY0T5q6Xi6Xgdo4y3a7XZbZJImAiIPbbtez2UQh50XKvkGtlYKTk6PdbvMLv/Dzs9nkT//0T997770BL2aMYSatOhhtnNg8SQGgbduzs7P33n93Op2ayMqYiASsqqral/P5fJipNE2rfVnX9Xq9nk6nLnQcHXVdalQSPJDWj52ujWOllFC5GWMIuKoqaYjZq1L3gx9+X8J+WZbNZpP9fivEq845RoV9ibZROkmS1epCVM4Q7fChHfbpbrcDALGlhr0sU6lRCRBBagrEctqXrbW6Knd1XVurd2VVV609O3NeAYDkwpumASSxHsqyrKqKiLS5Ms4k4tU0TV21fSp2gJKAtdYYlWVZHMfz+fzendt5nqdpvFgs8iwNwaVpGsVGuPMi1Z1Ta51lmXQoGZTNSJ0/Vx4ietoOnZh79sD+hziE6GWFtz5Y4DiOUW0jo5bzKbt2s0KrVfBus9n4tvFtQ544TeoQPnzy9PRse/dwOsmnykbeMTMgk0GllfahDRw0KlasGb1zaZ5VW3d4+0aD5MnHaAFAAwa8ymN2muZTPOCxQrmmFIiDVOKpq0+ukD3chScZABgYUUuDTuyZ7KQS4Vo/eObOxTVVVSVJ4l3QGkRA9D6ZXOpTHcSru+yJ3LTWTOzbJs/Tn/+5n/1X/+rfWKuhVCLvBRYNoMR86PttXymCsS57UbG99NIwUlrwgtobPuJR+HqwhvpLPwfD/glXHL78Uq0/HKFji3wujY/PNY69fubxlA+H7trWSq72imI6jmMOnsgslrPz8/PXXnv1s5/97HQ6HaivSGC3GiNtOiMUriINIwVPiIqIoDdzuW93KF5g8B3iIE3j2WyCRgupVIe8Z5TeL+989Us//tF7Hz96WFWVtSZWqnaeB3rVweVlkBYiKJEZZmbWCk5OjosiM7bjHZPVyIEArdLqBz/83unpKXaOoO+KHJRq29YYIzXJCBrRoUMXuSi2Td9JN4SuvxMxCeWkLGnVw6y4d8tkiBCx705HUmunlApM3nttbAjwwYcfvf35nxJ3XKqAFABiBzeTVvPCysTM0hUnhEDAg5rkkUGp+xYOcifClKT6bvYiiIeVppQqm0oc36H0UESeeCHUk4b2ycJYmL3T9Hi3211cnB0dnYQQZsWkaZpImzjriB5RKWMEJ+GDZ2MVAOjI1tt95I33/vz8dB4cAMSxJaLLi7OiKJQSlJ8VHXZycrLf7//SX/pLxydHb7/99scff/zuu+8+ffrUmlRGOpCT0i9m3u+3SRIxkHj/g0AXcjFxUlerFTIsl8vVajWdTkXzicOnDJIPg7GCHDoJ5r1MZZ5PJPIh26asK7FadruNaNk0zdfrddyDDaMoimIj5kJZls53jwYAWRIbY7bbrSgz6aYgihARJVctNpbWWpIm/UoD8YwRUaEhHwTiRESAhoFcUysNURQFpuAxmy6r2isFRVEIFqeXS1zWVbdmagFgd4rZGCOQAwBgRIEyMAACxbFNkmS5XC6Xy9lstlwukiSxVseRzbKsaarprGDmNI4UDlgh0RaxrApZRWokSAfhhiOWwLHgGmSv9D+l0N1q01OgW6WtwiLN0FNo6iKLXBXtd9u2qp1zbeNC6+M4PmuqZ4+fPDnb3lzmxyc3t1Uba6OBiAEDBUAjhRwcgiJEtKCc4l1dFQezfDGr2sbEEREB9YJuJFoVQxgJ2rHMH2CD3Jcjio+LWoUQOHgAQCV8dv2vRnq6C/UhUGdVq8AURUmHLgxBoD8AIHxFw9CZJ588ef31113bYhRF1gp/kkQI4WXHWGnB4ENgh3NxTUtgtFKvvX7vS1/67J/+2Y9m85lzTthxe/JV1UGjCHsKzU9Nso6v9VK197xSefkNj1+MbJBrofWry7305z/hEtcOHB2fdqprdzU8yOCcDXIWRouDiKyxRF5rfXF2/tWvfuWtN94oiiJN4zSOtEalQQGj6lTsEPcYPXIYW6zQO2p8lWbmjt+NUakBPJlt9juGDtFW17UPGAI3TTMt8lfv3WZwT0/Pt9t9CJymcV13bYmFCRekqY3QvFBAABN3/ern06lzDoOP84yIjE69IyFJeHZ28eP3PpzMVFPuI62453OmviaHmRG0UgEUArSRE3szSGg9MgYAm6ahEMILrCbQeRIKGRG6AmBEHEqBQ9dDULk2xJP44OjgvffeK8tyOplIuLJt2/l83jaVgJ+FGEEpZeNYWLSJCFEhUB+W6KCwcv4OitwfYiwP4Pa6rplZnDYK3DS12AqigHmESbSRIQ6oEJiJgzXGGGOt0cY65/b7rZZWTllSlrvJdBocl2WZYApCH982kjnWxhjLotWYQxzHqMxkMimrHUOIjF1dnocQRK8IESYA2CR2zt28eXOz2Wx3m1deeWU+W9y+ffvk5OTb3/72kyfPZAU45wyqIs20xv1+n2VZWZaX65VaY5JEg39ZVXuNStpIeO+lxdbZ2TPv/WI201pfXl6KQSp6wuqOA3I6nYorrJSq61rYu6QQHCMFLJyaTpKjl5eXq9VqMpkMTYqIfQhhsViEEC4ut3LDAJCnGSJLej5NU2lC1W1SDXVdy7RKGfR+v7fW7rblYFKIUhc57j3JvQnLk40MUNBaE/C+qkmZvJgTQRRFXS9tAESJNwu8AIZsjvcBIDgXesveIqJQX0vyhRGEQnyxWEhp72Ixi6LIWsMcrLWxjULoynaRcACLDJp4CEhcbRYGIR7hK7nBA9Cyt92VhPpDCLWvfXBMXdBOC/mzUvPZxACdPf3k8uysLMv9ZgtIwfkQQpRmddM8Ojt/+mR1MIleuX0Hla7LqmmaaV4Ag0LPLmBgozQABiLQyhBprc/WF599+8sh0o4psUldVZ2V/+eR18/LZBypVRGLHDwRCUwVYERp0CcXhusowGHSQYlGtZ6uzja+SqeAP3j/w5Pjm2maMiGIhvxz+YJI/Xlh5LwaY0AbQDiYz/7iL/3id77zI2uMQMWGO1BKCVZ29MByN9JW+Xq0eTxAg758UVMON/Piv4P+Hqve8TvXVttLL4qfPjLjj664I/qPxioQUXKyz316LTcsK5v7SI4aYbiYkch778uyfO3VuycnJ/P5NIpMnufGCHdvd5IuyTrilGZmIo/ddZn55cYE9sBy733beAAlvsv2wz0CA2qtddO4/a5CbeIoYe+KInvt3t3ZbPbu+/c/efJUu0iZzt0XYhChF9JaKYWBOIoiQWCmWWKtZvIArE0nqb33qKOPP374gx/8SBszzQuRtsydYcFANjJBzH8ErSNlgjCCRVEEah+cHxRVA4DIPQu0V8p0GEcOiKiVJs+A1CtfAtDIABwGDIW2pmkaRL7/0cerzW5yOIM+VBhFUV3uJAgJAHGcYpdjBu99B2tnYGbTd2eS6RBVKk4w9vgJGbE0TYVwQxys3XY/CMRhPXcLBihQiFUkwFcxjORsjWtjhYgo4Lg0TVncpeC0jrXW5W6XZdmuLLXWdd1kWQY8cOQGZlYmAqWySYFASZKg1lmWbbfbk5MTItrtdrJQkyTJJkXTNCaO5tFB43wSd5HeR48effTRR9PpFEB5kqRpx5NMHIQ1AgDSOJFTOedk4mQoQgjb7V4Cxcy831dxHO/3e9k+klvNsiy4RiBXRVFInyuJPWhrJR+vNBjdBQZCCCUiM++3uySK15cr7/1kMkHFdV23deOcazwPtlG1LyWq1DRNkpSyOyRCCz2MzlOoqkprvdvtYhttNhshPhvkRl3XUloiijxJMueaupHWZ8SMTev2uypJptA3Fb5cneve3ITOFkeJ6AzJFLpqQSbBm05WaA1E1HpX1/V+v8/yJE/SOI6TJDFaAXCWxkP02BjDnnXfDVNGXvLx0IO6iQlHXi8NvYlG0lVQaUOobxA4qNBqS0QQqGanFEKg3XZ9eX62366rqmqbSiy5JEuD54ePHj+5bBaL6I17b7Cn9WZjjdlsy8v99mA2n9rIAAZHBgCUKCAG4gABE3P8yq0WiDR6GgzursSGsCPpYwVDNHrQC/J6yGqPxT4ikgAJkTq6QAgUaOwdDQ87Cq0BARu0vezti8VHbmRntThvHjx48Oabb967d4+IXBui2Hjv4QVNM+gApRRT1+omADOwtIIKIShpHUIeEBn9vXt33nj95qMnl0E6VaDwtEqoU1h4grQEH5Tf+LJjFTvop2v/jr8w/Gp48aL2HX9tuOjggrxUIf3/eozVvLweJga6Cb4CWyFeAb+pr6cS5Sf26ZBOG+Zbvj+dTARFaa2dTadN0yjgKDLaamD23hP52HQ9aPsx90QDB/h1cwyujDJR/x2qiJmlPOzGjRunp6frzU5HsdXKOVdtdoBqVmQKdZxEk0n+U5//3MHR4f2PHhCAA4cIWiOzJmKFIPUMKrFpmuZZ6r1XgG3dJFH3rKIwnPNPTx+8//5Hu30pXf+UUnVTCaOeGsDAVocAioAQNAMLgskqHadRFKmncHp20ba11qrvdiDAJQ+gr+YIe+ZBgA4SiFKu3w2IlAu3wc2mU+/hgw8+uDn/4tABsG3b2XJR78ue3BE9UVvWeZ4rpYfCpCGkOZhi4vMlSVJVlSwPCSBD35JS8G5pknnvJaUtc4GIxhgpGu5CfE0j1oAkm733VVUlWeq9d76Nomi73e732/l8GUXRxcXFdHIQxzEqqKpKIzZVhV1KPiAapXVV7iSCrU3MjHJaAJAqXjER1ut1URTUQbt1lhW7XUnE8/kyuHa3gzRNX3nlFQlytG0bRUbQaoLmCyGs12tZyWmcSE7BjTpSVFWlABHxyZMn0n+iqqr5fL5erwNTURRt08gt1XW735fe+4vVRnpAab3v95QiYPJODAIJUzNjlmXb7d57mk6nu91KaEqH0p2q9ZIwRsTgnaiHEMJm0y0JUbda66qqJIYssLv1ep0kSVmWCjUi7rc7GR/ngtZN1yojTdfbnW8dc4C+7p8YdrvdbH6AgDKVq9WKiNq2TZOoo7FDlIJPUYK9nLlqloqgxspDSFouLi6i2BwulmVZRsZGaaI1TqeFsGeLUat6AL88pkAQJDsAvSmPfU53sM773BYMJoLqkVljFKHW2hOJ+wtMVuN6tXn44f3Tp4+RIbhWIkla66Z2T548eXza3H6luHvzDjeudd4Ys6srr6FqXXN5hsuDiTbIhBw0IwtHjFJ1qA5OjtLFtFaMUZcFUBolQQXSGwag08HXMcXPqbZuRkJnZzAzQxDQDAABiBUiKVQ91sHyWikVJbFkg7XWAMpRYMIhAYyjSgf2wXtvLi5W3/3u94+Pb+R5aowJnmXPDEiZsWqBK8aoDi2GvY2PiFpp70JsbeMDsjpYzH/hF37+//H/+vuHh8uyLJnQjxzNPtoJwxkAAFENUdPxML3os44/ffFrPHJbn79tHIa7v4cO1DCcvDctr0w8eN4CGC43eK7DpcffH/YDjOqRxu7sOLwsJpWE0eSbWZbJ1wbfiIik0iNL0zSN8zyXEgJ5NGutMZqZfPDApJQOwOB9CGG4ERQIfBed7ueiBwYPljUiMhMzMHdSnpkmWapv3KDwZLPbF0Wxutyt223btrFRxhildJ7nqnXHx8cmjnbbsq4bpVRVVRx8iIJvXWQUAEQ2skohcZqmzjVZMsnzPIksB9qtNwBQltWjR4+ddGNUZjqbaKOUNAQjBmTyAXUnFKQqQGsNEIgYgNI4wVmhFMRxfHZ+uV6vd2VXFiyCXiFoq0MAiVhiX0EhlnsIQWGXYBsnaH0It2+ffPOb//ILb91LsjTVMfeFKyaOqrqOQZmu+jliZqV0Le0fNEpNSBzHorMRUUS2JAXlxmTGB8RWVVUhBKOt2CUiLgenfAhzDcaZGAoi/qqmNpFVSkmdiXNut9u1rZ/P51qpKE03qwtRn1FknIMkjrXRwMjkURmJrSmlnGuVUpPJDBGF2b+u29u3b5+dnS2Xh8vl8uLiwkbJdnsujCJMeHl5Gdk4juPpdJplWVEU4uhLkLMPxvr1ei0WjDGmtomEKySG32F2esprY8xuX0ojI0C13e4C+e12KyGNvHWr1QqZBIQcQiDA7WY7+I5KKY2w3++JKE1ziRPIye/fv//222/XdSu559PT8/l8GkLYVY0IQGYWAk4pLSOiJEnE9W/bNkkS+aHMpszsbrvvN6wWk4K7aAdJP6jtvgwhGKOAutqnIp9s9+Urd5uqqsS4kfk6Pj5k8oixGFWDaBK9CyB7Mwz2HELXP9RyR/zZtSBkrxhu3bphrU2zxNqortooNqFvR41aK2uqqppMJmVJ1AMIxHrwrZOtIfNIRKoHW6m+mk519eitfG1wowUnEWnj2bdMsbHB83s//uEPv/+DNIvbuq7LMrIJATPCxcXFg0+2t27mN27caMqdZsXeO1SkwAFsXB1q2u/3X/+pt+MohsZRAAAVmEnR5eX6a+980TMFhMQYH9rOx1Ad5JWZhdYicLdlBg0yCHDdV4tQH8G40tDEGlhrDUghdNpBNrJsvUFfDHk9o6OONkduotdKg6+sGBy51tUGUa/X2ydPnrz++utiwkAAHPWxGRQGMwMoIg+IiNT16ezaxzAyEJFG5YPz3ltljFGfefONe6/c3Gw2zIwoZMMvJJj5CiOODNeuOFZp1wyCsTq8dlxTtKP7h6429Hk/ddCv42v9O49hksazRT0O8NrZhiuKhcGjWCIAyJ6RHNXABCSFK7LVq6oaWspPp9M0TReLxXw+l+hlHGeyPZg8UTBaKWUAoA1eA/fl188Ny9V40nXzBQB6mBQMYDxE1MDHJ4cu+NXjp3mRXlxcdIwEwJEx7AMiB98mNkqWUd16DmFa5Lvdbr/d5ZOoKAqtNfkguifJUkTM08Raq5B3611VVZvNVtoTubY1NlJKZUmqUWS38x4QrVKgdNdsDhEFQ6SC8P4QUbBWzyaFVbrj9D+/3Oz23jW654VQSlndMWMgGGs1cZfq01oDCKGYAgARRmIKR0ny6NGjBw8e3L59G5RO006aF0WBoMWxE2UTQuj7TmFZVtwTBSul9Kg5UlmWw7AP4WhZG1prIY8U50yAvmIfoJTM9jR41nYNoUWHCRWUqLrW+7IssywzxlgbS0k384aZlbXWt0TUkXaJ2YcaFPUcYaQ0WBuBVk8fPZpOp4cHxx999NHqcrOYHyBiVTbz2RIIKYB3NJsubJIQURKpYfUuFovT01PVA85FvDjnBJstTnCeFeNhgR5vaK11TSuvRd5JXrxqOirv/X6/Xq+7hktKKaWqphXUG/SOe5Ikke1MisZ5Y0zbCKAsbp3/5PGTLMuqer2vSq3146fPmLlpg7VWXCZBe8mAW2ul9x8A1HVHKyZkT7IOpXxL4tWtd0opifrL1i7L0vfwmrJsNIKEKChwWTdN46qqkXkXW7xt27jIrLVN1YwFyyD0rtn9UmGutabGiRZ0TVvX9b7c1vvy/Py8rmsEnkwmWmOaRJPJxBjlfSOFuTqy+7qyxshNSvp8CDuLdyuKNvRNp3u0aeciC9J7SB5L6AUAmIiCz5O0xeobf/B73/ve9xjC6vxCdorW2lFblg0R3bo1OTk5obaKENu2ZSIP2jFt9jtSaufotc/e/p/8T//mH/7W7373239apBk5YuCKWpslk4OFjmxsu+0TvDc48n+4y7qx+tQ0oljhnavDnQdMREAtc9Baa4MAmgOFl+U3xwJWioCNiUCroejp+nX7ElwjsMMHDx7dunXLmAwUBrFukAE6B/fqQZ7TiKPTcRfVNMZ4CkophsCeb9268YUvfv7Xf/23FvPJtqwQtESTsiwRKq8RoOzlSnesva4N2U/QvvAyhT2sYzWC1H/aSV481Vh1Yd8nkl+oJO7v/8rCGq7Vb5vn8vACUpcyvr5fXgghSPm/Uqqu67ZtsyzTWtd1vVgssiw5PDxcLhfKGOfcZJKLL+Vcw+St1UpL3IyZkV8YKO5Qb/0GZmZCYNVn5bk3FLq2DQCskINvo9g45yZF9o+/9e0vf+Wr0yLzBNvtvphOQghWmzSK97g1WbLf7xUFYkKmxOjpyVGe5xpVlmWxjTrjw+iqqnwbdpttcM36cgUA3vsk0bGxIrnyLCuKLIqeaxmCiBw63l2llELFChGRY0YFgRg9Bh+MxSJN/HzKzHEcPzs7557GVjSl1abxTUBSqBGA6AoBh6C11oJzEd2/37vFcnZ8fPz973//nXfekdSpNFiV0OKQ5OOOUbkLgYiRJNpRxl9MY3GITS/yRMcMC6BpmuBJ9jH0FFqSfdSmM+GFunssCrEH24sEl7Pleb7ZbELgNE2rqsqyedPWvmmiOC73e1G3dV3m+aSpm1AG7nxEnqZT750BlaVFmuSg9Wwx11rbOFLKuECMCpSeLZb7/X6zOZ9MJjZOAP1isbi4uCiK4uDg4OnTp4JBEzJe51sffNP6pmkEtbS63EomRUaJR4ElwboPauByvYnj2LlGIGBSOBvHcSAvRhUABN9FlTwTEVVto/q4zqDsBeNWlqVQmW42GwCQ6uGqqghGWczQ+TfDNtd9sVldt8OmlsP17Qi7sjGmELpcg0zoEGJhDsyBAxmr67q+vFxJoZTchswddUXeXWEujiJ/Y/E4qOHOcgsEV8A+klXXNM1qtdpsNp988smNk+ODg4Plcl7VbZ4lURQZ42TJEZGOLWHXt2q/39d1DQjUU3N3eRYghbpvDUNKKYLgia21eZp576WvRXA+S1LnXFNWkTYK+Zu/9/t/8PvfzLPEN21kZJVC3ZQESmudTycTpXwIVDWIunUO0BBwAK6dT4o0bLb//n/wy/fefOvXfvXvJ/MJBfShBUIGnh0d5POpJw9syHkFqPpYERIPKoZUl/69Jq5fFPtjHUREqFjp7nUIHbhkWBX9l7tlwITKKmutjiziwIF4deZOa/Q63vhAddM+ePjwzt27r7/xamT0kI7uBfSVPlJKCe84ABBKTDx4AA2oRg3trdKBQuBgTfyVr3zpn/2z3wKANE1329JqZYzy3g81l1f/9uHoa8eLHup4pF70NeF5q+TacS0jO+jC8aD/hOOll+BRxHv4dByCHhT2i8/Vtu2AktXaSuGgtDaSEG5d15KYCSHcvHnTWKWwox02PSOSUmq735Nvh0Sp8wGBjTFAAeAq5DBYIYhIJCp6MNIUE0nx/QtTDwBQ7UsbmVmR/yf/yX9vvd4+evgwzYo8L4wyGrX4Ugpwv9+3+32cduAaYxZ5ntd1e3F27ltnddcGmECVZQmBGIJR2nuvACJjvPc20kWaKK10X91Lz/VRB7H2+9lkZGZkY5TWkfek0IWgjEdtwFodR6ZtrSQImVGKZySrqrVmRqFmMzpSfYPFyEaDMFVKWZtI/5mbN29+50/+bf3XmzRN27Y1JtJaCfRJwhVdJH/EJywXwh7GIs8iqkWQz9BHrmQqpRRH/hygW5PJDPqcxbCuBB8gHHPykacglUgAoIzZlaV4mWVZ5rmGQLGxSZ4bq8/Ozg4OFkRewE1N00ZRIqpduLJ3u+1sPldogcFEsScG8pPJjJmVMs45yXcK6hV6quE8zzfroJRaXCyEAkJrjQwKcfBuFaALrBBa77wLxkQD2xeMfAvVVxIP/rEoiRBC23prNTNLRll8F+8bRJTviEEmrQwNdIMWQqirRkLB1GM+Li8vpUBIZme/32trhnwnkMAputzQYL1FUTSwcxtjiIJSajCVBtAcEaV5JmeQCK1MK0mzKeI4iZrWbXZ7sd4YOIoi55vVapWmKY+KyK/JGXhBxA3f8VIzK9/hLj0pavj8/PzBgwc3b5wcHx8XRXFwsDg+Pr5cr05OTtq6mU6ncm+OglV6Op1eXJ4Lk5dvnSRB6rrWGrW+EmViUsj953kuo8QDrRiA0ahRfeMb3/j9b/zOfDZpyiqObXCehFUXUaEi6prMV3Vpibz3BIoQW6I2gNJWauu/9jM/+/HTx//8G3/43/nLv1xtS95VQLzdbF69+aqOo4ABAckHCkEpxR16DBSxYiAECfZdLwPtj/H2HHYZIlIgYUrmEFzbyhANDy4HAHQtEfpTobGijIlImiANukYsOw4dpZpxLjDT+fnFe++9d3xyuFzOZR2LF/XyicfO5R0XmIpH7H2rlBHHSWvDEG7eOP7lX/6Ff/EvvjlbTIpJti+7ggoZBgJWI8Aw8HNNEWBkjFxTtD9BH4+92/Gnvcepxud8qRd77ZH5z+Fqv6hcx6caGUrPueb8PFGl1lqplDlIf9nLy0thBRKb6+joyBhzfnH6xhtvyK9OT09v3zyJ4/j09JSZkihKUykF7h19UMyeO5d3wG5c2c1EQT0P16cXKtURUSJJiOCaNo6S1+7duX//o+9//3t3796z8ULq/Zu6JueX01lqo8P5vKrqIbFX7vZnZ2fbzb5pGoXBWmvjJIqi0DrvPYVgEGIbB99yoEBO2ziOY2VNliax1VYjcgeP4lGl1vCCiJDZKq2UqrhC7JYuEbWtl/K6PE3qum4ad9XxlyiOY3FTurWguh0oclzoi5kZUcVxfHFxcXx8XDWw3++ZIEkShaptvYCWB+YvpZQyWmGHdh7IIiRKjF223kgGcXBeRXA3TfPs2bOiKESj53kujxBCUMryiCUtBKd1NKyfYQELvibLsrpt0zQtikLSycvZ3GSZ3m4/ePfd1996I89zmyQhuKauFeJ8Pm/bNsknxkSghOfBta6NbCTc3XJvEmiRQ5w2idPkeS7OdwhhOp1uNps8z0Uf53neVLXqmUbkDINeYebY0mC4d88ysvuca0IIMlDWWuHxbtumqoJYn0mSADJxYKAQCEEZY8T9RQRjVKSM914CSESklVksFlI25pybTCYCOxfe6el0SkiDZlWsxJ3FHsIpjrJkhWT5+Y762IiZ5XuWSvnJbDYr68r3nbKY2TOU210UGUD2rfOBrI0l0AV97PfRo0dZlrm2HnbrIPHGyvhFidRLKsWMITCHLoNLhEgBsV2tNnVdn56dz6aTg4ODs7OLONG73U4p1XqXJbGMkjEmixMJ0iAiY9fw2HsvGVK8cre0zJqE2SWvLCMg6LZJHP3mb/7mv/jN35xPZ66ulFJtV9msJNDUevI+BGairlWJ9w3aZF81+8a3xEqZ3XpnFRweHn7z93738PYBJPbydF/vdpO84DQ6vHWDhDUzEPsARKCAmAOSJoXMwF0Xh4CgP2XocFR+onoR7b0HCYz1OZTQV/OPpKgkE/uKFaWVjQZlLCthjPAlImYhjyZmNkmWKaWcbz55+uT0/Hw+nw5hk061KMCep5Alig4A0Dnd3WV600/ieOJaGVQu+Ol0+uUvfeF3fuebsbEX641SGMdZ1dTjapxr2m4k918SQx6v7/E7429+moZ+6ZLtnqtXzNfuarz6X7wTeF4Bj+2Ga482OMGDoJFPrY1CX6UqwR/mILuxLMvJZCKb8+joKM/z+/fvLw/mbdumefqDH/zgs597SxJ7u/0mTzOReoGASRgZ0VMYLTgEgFFnB3lTnvoKlXDl9nLXEhWIEaFtGqXARJY4NHVZFNnd26988uCBx1UaxdNZcXJyYhRuN2tyXqAZsgnLsqqrdlvumZGJwHAIgao6OC/Kz6DSWvoWK6KQxokyurZa267cNk1TCf2xRqWBObBwziAOUQbqYIpgNTIiWx2CCXGcBfYuIGjnwqyYnPt127a26+Ie4jgmBkZC1MBd7oqIgn+OoUzitBJhvnfn+OOPP3799de11oRe966PxJNF+yIicRAuAqU7yioYmWLYH9RnRkGiqSGIvRVCYALB2aZpOqTWZI50r9hFmQ1qAFRnAZgoSpRarzfSImk5W67X68y5xXKpowakw1JZlvt9J6CDE/wtKFXXFTOnaSZLom7bPM9RKWoaNAalAVSSSNBYHDtRvfJcbevLsjYmKorpYrE4Pjy6uLiQFhFVVc1mMyJKXKq1lqGe5IVMroySvB4Elkh2gTFHUVTX9Xy5ED5no7SNzGKxICKp0BXTAQC8C6y68fFtFTr2sY5FLssyhVoptdlsZrNZx01t+jEEoL7rNvQt56DPTBdd+6ZuSbRt29aNRB2U6ehCAWCALBVF0TRN1dTDdCsTV5OtUkoi5whKKaNMIrW/MkqPHz++d+9OCEGpaCzlxiJlMLn6ncsgdfZ41Uo1EDMTgAII3ntQyhiuqsa5UFXVdrdfb7azWXp2dnZ4eHh2dnZyclRkuUTX2rRN4lgiKIi4r0opu+g6DoifSqSU6eqJ+52iepRTlmXe+z/45jd/65//ZpJY39ZlWcY2EkZYRPQUpEc1dHU0LWrTEHk0jnhV1ruq1drGcbzdbL/+zhefPTv93d//vTc++0aNVIWmBq99u7hxND1ceGRxf9ETIkjsWvouKJZqLyAGYnhRLQwS+1qwQTx+q4IaM2D1knw08j3cT5ZxFEtoCkELmfaQUrnSFzRkNNBMJhNm3u39fr9/+PDhK3dupWkMV9OJ/Hwokjl0hAwvC8NqrQFQaw1MITggAE2vvvrqV7/6he997wfWGkAtsTXJOF4/Q0+2AvByz/LTjpcqPHiZ/n4xFPzSq4wH99oGeKndcO23L73rfjM/93QSjRQbv6fmBwF3HB8fK6WEgc9a+/Tp0yRJZrMZAHzrW9/63Gc+myTJerfdb9aTaT6bTAE6QxVAAAU6BNf3WR/s6I78ZFADANzHqFVH08HCYjkssg6FL3iN4Mn5MMmL/+w//Rubzea3fudPvvOd79z/4INXX7372r07l+fnSinvGmnvUddt65y1URrFTeMAlekI0SRKjwZVgEA+RFa3bRuCJ2sV6DxPy7qp9lvhIep/8hw0fbCchiXOzFpjR/+hlNIGQXvHjngymQCjdJLA3kTz3iNa6qgVpCIZjDFaYQjB+9aYyFpL5Im9pMqOj4/v3//oZ3+2SpLERkYaKojmFhTVcGMM5L3nnq4IRmEo6A1q0TQD9Ab6CmNEZALnXFU1cdytusFyMsZIW+hhEUoomKBjwJdrVVV1cXFBREVabDYbpVQxmyVJIjXBRnXwe0R0zpk4JedEB4rvUkwLubftdjuZTLI8h15OVU0D0NkHWiMzS1/3ui6lAnwymdy+fbuua6O0MHsT0eXl5Z07d6R2VqqnnHOKu+AB97MwjjkL9GzYaNT3gKrqUoKB0i9EvHzxROXkiAhKO+c0dgFkCcbk+UQaBgBAksZKqbzIJIgqU5kkkaTPETG4jl1LVLKETLBzc5Ugsck7udtuOhDEWpJIHhHNZpP9fu+p8/7rlos08cGFnoAzBG4cPzu9QMAoigTOHfqyiLFBP/w7HhPZoUN7Ourb5BljADuZw8xKgYgXVBxp471vWw8ATbMNIfSVVJeLxSJLupDA8mBR70sZcKnGjqIIKIwFo/ftoO8FSCioCBmT7373u//tr/3qfD6v63pfVVmSir3IzB0GCBUieudDF4Z0TVAt4Gpb7lrvWAFrDsoAPHn05J/8k3+CSpHGfVuuyp1VelPt3rz5OkZGRxYCIUFAAgi1IzKKGFQvZvsGRy9JF14T+GN9TETKXIXZwojMrks29WUsagSn7byFPhpxzVfkPnYtPzcAIEVjAPzo0aP9fh9FhoH1OMMPahgs7roLIoy8Q0m7cFcjFbSV/g/BWtsyHx0d/fRP//S/+TffPb6xuLhcM7NCe5V5HNQYC6z6JePCPc77pVrtJyjpFx1r7oNdMCpD+jSPeTxwn+ajw2g5DrPY67brce9B7Q06Q+x+idaKXSkcEYvFwhjz9OnTPM+Pj4/Pzs7Ksrx3715d1z/+8Y8++9nPHhwc7Pf77XYbGx3FRtZ9VYHWKDWXffrBD5eGDht2fYhGEXLFTMBXeMvhuYS+v+vyy9C0VRTZxWLx0z/905eXl9XBYru63G22WinvW6t0E2i7X1trjdbBeW2sUVrCL0opIoZA5CloywEAua5b5xpj1GazIeDFwYG1ejKZFEUxnU4lj0V4ZbjgqNHkMIPMjICMCMBKqSSKFUaMCo0FNuW+stYmCQjGEhDbtk2zJLRBR9paXVdOHO44TgCgrusQuGmqKIoQ0Fq7220ns2nbtpK5t2BEcmmtRA0Ma0OcOWbWRg/3TD3Didy5eFFDUFf0sfTBNcYYbeM49p4kwTboJO2VqAEAIPYSiAYA733jWq11kiSNc3mec88jPTmeCL1UaNvdrpEWzoiYZdnQQkBcg912W8zms9lss9sysw9+t9tZa1Ep17ZCPElE6/V6eTDfVbvj7BARHz16dPPmTWHyCr713jd1zcxFUbz++uuLxcI5F8fx48ePX3vttTRNPYXtdlsUhXOu3u8HXTWgsZRApdrOuRTyLAk4n52dJWlsIzOYNWkaJ0lUVZVMSp4UEl7uo6+tFOP2+omsjQcrRwZWKDv6G0ClIM9zra1v3ZCPF26psakXQogig5xyn86QObJxJKqIIex2u8mkQMVN28oybpxUi8AQaAwezi42dV1LVId6OK6Mw9XOff7FsObHokvkCTMLvECcLejqdLtB5sBKEVBodzvnXJopyYlMp9MHD7YCX4+MNcacnk3SKAYAiQdsNpssy5Koa2sh8Z7Q0wyoHu4g2iRJku9973u/8iu/cpDnTdME54UZewBLMwIRueA9hRCcQLHqtvGQEOJmt2dljbXIynufJ8WTJ6cffXA/yqNiOn369Gnj2uniqC2r5cmRZ9LS8cwTMpjYaAgehjgTIHQRM8LrQv7TxD70Tq3WV8a672k1oS8cHcn55yC9zB0sGfG5LtEAIExKzrkQHACY+XJ2dnoaxxH5drNa/dm//ZNf+ou/mMSxcy11vUS8jSIiUFbyvBqkOoUBgIBBIaJGZiZg1EZrYAJm0oAhuARbaNovf/71n/7629/+0x/kxXSzKy0RgFLAChhQ2NkUK2LUml9SgDs8J49sYRi5BWOdByPOv2HJjj/lT4UuXynjkXUJ4zsZ6+/hPGNN3F8xDIpYEqgA0IGe0AxBSyJgbqJoJs9kjAohIKrJZIKIjx8/ns/nx8fH2+0WEQ8PDzebzeMnj27fvn3jxo3Wu/OLVRLFFdJkvths95NprrUhF5IkETfQe+8YAMCorhyNgGRlDncbrti/IYSAEAiIIQytrxmQmClwZFMmZuG7QQ6+ZubjV6J3fuaNH//w3Vmhq3Kfx1kbtNOqQeNszMYE5w0YTco4iDHe8d77oJVF1FZ3YpTYMwTPtWvDyc0bf+Wv/JUf/uDHf/Jn3z0+PAKtnODqhW2RSEmLWaIo6sJZgzmlFAYwzKwsaO+ZKYqgABt8HQrVLFPn08ePN+IqEaPW2ijPOvjGpdEEEY2OvKO6WgsBp3ei94kDWWPAEQGvNuvVZnvjxg3nHGq01go3staaCJxzWhlrDBHJthfNSsAu+MCEgBxoCLcqwLqsFAOhY+YojTh4F6h0exvFWus8z50LEjciIgosgT7nuniJiFrvvYznfruz1paBIqWaUhBGUDZlkmeEoNhD8HmatG0bJWlVt03rPVO73U8mk7Yn4UEGo0wIfLg4Qa1XZxd5nqdRhqQm2TSLc8/e6Lhuw3w+ny0OsmKqtH7y+LFVJOgSEceqmFobr1arxWKx31dxnB4f36jrOk8L731sE41KVKCoTFHw3vsoBF1b8ezTJBOVBgA3b7xSVVWUxEQkcePpdLbbbbTW3nccpQYNBEiTPIQQ5xNmRmhEhUMfw5c63TTJJL6dZ0YplWeFBD+applP50Nso6r2XYi+l8uC44vj2KIAXjqBoE0ku1sW56RYeO+NTgYb15ggRWXAKG5oMZ1vS7eryjt379rY7HYbIp/Ett6tgcIArR+E0pDO6KWTAhB7EpUyTNL5EwHAWp2mKQA1dQ19oTARtY2iQETsyTvC6TR9erZd71xs9OVlNZ/PkzRezheb7bM0TSk4IsrzPIpMHMeZtpPJxMRRMZkgorJWaQzMIfjYGtdUmnGZZd/94z/9R//N3zuK0jrsQwiBg0JQyhCwiSwRUaDWUwgBFEraZr3deu+PP3PvwcdPgofIxj4wa3DoPFa1gvPd5nM3P1etQ3Xp8mS2Wq0+89nXF9MJIvqmJvIsHTydB4Csz2IEBV4cdIYsIIFIZg0Anq7kOQtdHxBAYCZiB9jaiHrIRZ+TVUjMiOgpoELul5Po2izLtYkREVg63qBSAziuVVIcFFwIDUDQiojISBgHgMvSl2X55NnTjz/++M6dOwKxNMai6ksbxZJ6mcf5nOYfCXdRQNqY+Xz+uc997lv/9gfGmCLLyrLUqAhIouRwVaz8EpNkfLmxgnyp8TLWsvy8q8oveK5jlflpb7746f/fx2AlDApPKS1wx0EoiDAKIazXa2vt8fGxc+7s7Gw2m0kTUERcLJZZlomY2G22s/lEmrtNZ0XTNGkcib3GEABAwcuCB6wAwp/n6YZBE+9S2ijByLLRiK/de1UFfPdH759uz4ok11pv9zsS03aofkZAoynQEDF+7hLMwftpMQGgd776tePj48v1dveH/0oCetzn4Qaje8BYvji8amSxCQAtijDLsrpx0+k0BNjtyu2+0lpbbZumMWkswlHgS23jhzVclqXu07dxHEuyYCjJlU0RRdZYy8ELR0rbdkT2vu8NIP2bmdm7roGg7FoZFuccUNeJgZwnGpXhKiW/leBqXdeIXfcIOXmfdmVJXgoWTOIo4o6UZamNlZzFbDarquro5k13vgrODaUyQvOZFdNOD+U5M8uGFSWqTMevaZOoKcs4y1B6TzkWsJjYlxLFNcYkkXLOQaCiKACgglLEU57nRVEkSSLO07DaifxQcSfZR2aWZ9lut0Qk3BTyREqpKErW63WWJdvtloEkQq61yvO8aZxgtSZFGscxog4haKMElmz6btBSBibZ2bZt8zyXTXd6epplmUABkv6grs1tRETCjSNrI89zWYrsWmZWXcyPiFECGLLHQwht20W/uzWvsGkaBWiMCcR13RKox09OEVEwHBJHES+WwnWWoWvSbxC2V+vfdN8n8kpFcWyTOObJZKh08k7CbGL/QV21zGsAiKIosZFSUFWVsfr06bMkSWazSQgBOEg3pziOMxt/8uRZPimWy/l0MTfGoII4jhWDzFGaZPfv3/+1X/s1RGQI3hMAKmUUP++xsHKuSZJkX9WIWNZ1Xdd/+2//7T/8wY+++ft/OpsWbRu0tj44UBTHsWvrjiHE8263y9MIgI6OjmjEz4Wj3hIvSrCXSraxTGaQNDAx+aZpQnC27+F95e/Cc2puOBWiMsYaY7g/p3zSz07XR9mHrplKCEH6uBlrrfN+v98DoAAT7t+/f+PGDWszZgrBB2IA8Cz6l+2ofAheptW4w9x2HCDMVFUVmOidd975nd//g2enF86DUgqF46EfHEQE1OOhuqYRXxzNsTJ+8SfXfNNhEQ/vjNXSp2l9fpm18dLv/+Tj2mRzF+IwYrAPrUxN37q5LMsoiu7cuSPVLwcHB8JO55y7deuW6J66ruu6ttrUdb1areaTom188O0kz5gZGChgXzIGBNgng68P6TVTZrzPBeo+1sEveXwOsdXLxSzPkktUgFzXZRcxluJCQM+sELVBR2CUllwGMHKnMhkZtTWJjap6n8ZJU7Uh8OlF9VmFQ4kIPg9nuBa0GI1qR2QhORoiQqWzDJvWNy7EcbxcLgNfVlUdxV2Ji+i/xrWTeOaw678rAjdJkrraQ0+2J7pESlnk6TuYaPCSWRQqDAQ15Fa1UQPMigOB6uxlqYKFnoSWiNIozrJss98455wLIu4l0di2XmvNHARtK0oF0RKR1kaS03Kq4VdEVBSFjWJmrqrqxs2bH3/0oC3LIewcx3HjWkQ8Oj7e7kpR23GSNHUjiQbJ4hN5cHR6+nQ2m1hr1xcXs+V8u9kopSZZ3rQ1eS//aa0Xs5lWvq7LkrwxKkmi4FpjEq3RWjObTfI8zbLEWmutFqqmg4MDMVmSJNFdYVVHviGFMUmSrNfrvuCnI4ObzWZ5njvfeu/X6/VkkhPRZKJFZxdFYa0ty9oYk08y7MvDxJjrLadICtIE9SboCqVUXZUdh1FPw8LMURQL5FsWofhDkjM2kHrv0XSYMmnZKSuKu3ZbrbUaqA/dAUam6+LnXeAYC0at9aNHjz739ud96y4vL1XPGgajNn/XdO3437GY8j2LbRrHSRRrpay1WZbleS71V865umqhqxFwAOQdIWIbat+0QyR5RzsbGWH+iuO4bjqemVleOOfcg4cnt07SNI1jm+f5fDqZ5IWrymkxef/99/+b//evtG1ttC7Lcsw0DADAHUSrDd5au6/qKIoI4OlHp/+n//x/d/vOKzfPL5Zzq40Ngff7fZzFxkY+VERgbay1rvZ7pUAW8Hw+HYsCHoVCr8U4h6OTbEwAoKGjEEBEg4q4L0noE+eRMdjPo5xQAXq+Ak5qrRV2FRndsulvYLjcIF9l8HuYDqDEQvf7fZIkTJP9fu+apq7x2em5wC5CABecBIihr7mUpML4eV6U3XClfbvsSBN4uVz8zM/8zP/nV//RyfHR+fk5GoOosMdzjW4XP+3846v8hBuA53Xt2IS85vDhp3u6196//nSf4oKPvcOX/nx02x3YpKqkQh+Z0ZiIuWMrlHJM4dyReiQJBAkm4smTp1Fkm6ZxTXvr9g0AMMZcXFxMiowApFBSATAh6G4oCEExACgAYrxKBBPCtTt87sXzT3TtQETNgAoneTGZFIAUx5Y4uKYObcuoOASWfrcOjDFsABn7TD8CsJa8v1JplETWKOLN5WZ5eNS0rQdoQwd8GESz7AQ9yo3BaNnIY8ox6EtglkBuYAyBdvsqjsu2dUajh679izIaWxiysyKjBZhjNMpEiKoQFbjdrmVU+42qnXNaW22lOQwRdXaA1h3LLgzaNLTi6ok0V9YOgr6qhPI6I6IoinygAR+Q57kAXgZzJI5TRK6qZlhmovUlNCIu+/LgUCn1/vvvv/7Wm8aY8/Pz2MT7quq6dxgdx/Enjx7Nl4ddXBQ6P0b+NMYg6t16c+PGDfEFo8js1mtxDZu2Fk6M5XIZRRFzUNawc0MqFxHbuhHbX6ZA8sTyc+ec8C/KaAjjo8R1ZSjEjEiSZL/f654qPEkSaalUFIXSnTMqP0HUQ5G3MWY2m9R1WxSFDL7wpVDP6yTuvgymQCClG1IcWZGMggZo29b7VpLuqqd8EhoQ7721No463soudsUDrERYhY0xhnxXcwIAngKi1Vq3rQ822AAEykbm4cNPvvbOTyulBHgs2m4AFasRewG8IBXHJmlkrNY6MlYSt2I4zmYTRKyqrGka50Kbt+xD0zQldo0lEMC5QNRI3kqQB0aha721tqnb9YqZuSiKs7MLoelwIcSJtdYeHywvnj2b5sXhweLJw0/+6T/+h1VVRVpyvcoHZOLeL1QMFEJog29bL6j1xrn33z/9P/wf/5f37t1brVavv3bvF3/xL/zDf/i7BwcH3tvERh5ahQbRGVTMvNlsoiiqm/LVe7ens4nCK1ztYKm8VF5dczOwmyRkRA1ICthLcg4b7wE6JHzomViGH3Ig8tKBDXqcAaNWoNBTMPpavPY5lSQLT3aZMkmWpiaKIgH6+77/1yeffPLDH/1oNp9baymwMRqNxqEJGo96JwC/+HgBrjAdzBwncQgcglNgvvqlL/79X/1HPnT2l1Zdf0ViFuY/jdez5MzPBWEGzfcTlNz405ce43Neu3l+PoT+k0/y0ndenObxffZPcEWbLpJuMNKJ2JgoTWNEfPjw4cHBweXlpQiLKDZa6+VyKWFDAHj06NFsMk2TPM/zy8s1kT9Yzuu60QiIEWNn8nd3RchjHCArHjV7vhoBwudQ0AhMV0MtKIbBzGRmA7yvGw7u+Pjw+4p21S6O7W69IeeNMY4YEEGpwAQU0GjjmEExMnZAaNBoNDIEYhdibc+fPju6eauqagbYlB1HgQh07qEQwz0PVtEwawMa4gpMS4GZ4zieMNZ1M5nU0j+HmbM0DkzAyIQS5hXfBXrOo7quZ9NC0odlWWaLJEkSccJCCEojszLG2DhumoYZuYdWaaOhh/lI03txqUMIQyshiXNExkrik5lD6xrvsO+4HicpdKPk2raVNpRKKaHolozm8FsxGsRAke43RLTZbF599dWPPvqI+7hu69rJZCLsyovjo916LTHhtm13u51oEanTjaJIRimf5k+fPi1mhQJIs6xtmsvVBSLKUFDHKd1orX3TGKOSJGniWErpRMdLKlcYg+V1HMeTyUQC0aL45VmGahxhxpYQMfUlmMYYiVvKn0p34WUJIyVJ3CUpfAcBSdPUKI0MSimrTRLFglSPbSThfWRYrVY3P/MZqYUNIaRJR7uNiIK68r6rMIEe7Cr9IcqyTJIEoQNIy3qLdNR3wiVmRmCtQFnFfeMEjcq7LiBpTFY37XZfiVB+4403tNbvvfeeVI03PmSZ9qEjUBMDdNiwagRClGMA+0RRlCRRHMdJGsVxPJnk4gFz8OW+EsS4UHtOp76uvZgszF3TeEFRKaVaRBdI/Gnx2/ZVHYDD2Xkcx9vtdjmfzRfTttwr5sV8fnl2+uMffr/c7ZFDU4uQB4Ar6Q2gQiBHHAKbyCJi3bb3Pzr/z//P/9s3P/PWbreL08SH8PV3vvrs6eV7736UJtFqc3l4uNyXDRKEwHXZkG+jyLZAr7xyyxiFNFYEL88zvnA813MPGST47L3XGolC0zQAbLV2rpUmjIMAH4Wjh+noIJ8KzeDXDIJ+eK1GRB/MLNCNKI7N0dHR6elpnERZll1cqNXFRV3vHz9+en5+fnx8jChdz3ygIKCcl2OR+6vS0Fu1Jy/dbss4TuM4rup2uZj9R3/tP/hnv/nbi/l8tVoR9C0QAIbhetG4u6bq/p16cXjga98fyeiXeLEvauUX3x+0Dv9ED/ilh+rK456DuYvcFJGNPQmwRCaFzE/CdKg4io00+Iyi6MMPP8zzfLfbnZ+f37n9inz/0aNHN24cK2XKssySuG09AE3tRDRUlykBUF3TeQZEIBQdTAjEDCO2EAAQNczdPX/qRLimLfLUGMMKb948mc2Wt26+8kd/+EdPHz8LgZABFCqlGcGzN4hEoIABmVVARA1KaTaMGtCiYoTtend+drYt98qAjuKiKLqizP6i9HzrqmE6xEfRiAQoeGmtFREFT31LGU6SZD6bEcH55do5n6Zp3TgKrZxKlCX3drQxRiuQtOjx8fHjRw8ZyBiT5ykA1HWtTUd35b0PvYCGHgMoR9043/dRkAxlmqZ1XQsmdjKZkA/OOfGkjTGKgrz23mvT9VAKwTmHgxskBtyAyRR5yl3As6PYFdfn6dOn9+7dM3GUpilie35+fu/evfV6nRTFarWqdjsp7hTwqvh5oo+ZuW27ZUlE8/lcBPdsOtFGLZdLrTVq7du6rmulompfim1xeDTf7/eXl5f7/V5MJWW0UmoymVhr15vVZFowsw9OG6WNWuSL0BO2iJfMPUI76rssD08kBk2WZWK+LKeL+Xz+5MkTIhB+LkkkswUJYAxJXLGrho3cVRmFMJvNVquV6FTRmmLADbwTiFf9gngUZsiyjCFEkSEfZM1IJl5rjYQEGBgChUB+2DVKgRTptqEhBcgIAOQDKiF6g/Pz87/7d//u7/3eN+/evbvdrCJtxgIHnpeNA2ER9JJWls3gYMlDLRaLxXQSW7Ocz8QQFLj4drsVempB/ldV1TSRWPbDdBCB9w1zrfuGMZvNLspSY9T56tmsyKuqaqoqz6LEmNjoRx+dri4uybe9rvKISKELzBJRCK4NwoEFgLjZbh9+svsv/q//+1dffbVpGpRSe6A7t2995ctfePTo8YOHz/76X//r9z/68I++9W0ACK7D5DvfLubT6Wzim1YSE/3kPhfyfKkO5lEbVpZyHgDuSIoYqNtW0jnGOSctfsfC5wrJAcDcN2FUVoxjelkkVebFhzb0NJZKdQBDc+PGjbOzM+eca1rvvYki5vDhhx+++urd5XKZJImHIEE8ZtZGsaPReQHwallL9I+pR0j391q7VgdK4sQTvPPOV3/913/buVbiSyToPdQdRjfAmItyUJbjh8FRuPhFLTh8X41qLn/C2V7898Vpe+mnL9W+nz7rnQEoOYJh20jm1zlHBHFsh9uuayf+hwzR4dFyaOZzdnamlNrtdpvN5uTkRpZlaZp6R23bZllxcXkZWS3UxEqpwMN4oqQ9WEkgGgEkUYxMPU4K+FqFEo+0bq+GiXoyD/mC1qr1jpCm88mXvvLF8/PL26/c/No7X/nTb33n8vzSewpMnhiM0gxEZBig7+0FCIxBgVIMEIiAlY1CCBfnl2dnF86DiSPRvtdso04cvwwTIGMoT4vIWmttggHjXIiiKM+xqqo0jQ8Ws8v1tm0boXIU8Ivuq3LlJE3TTCe5OI5ZEgskVWuM49goHUIA7GJKItOZO1xSWZaoQCKxTVsPbJQS7SiKgrljZtZa+1bK4nm73ZqeSdRaC4BlVQ2hYPHGiEhYheWQiCv0wCL5phhkzjlQKK+ldcFkMvvkk0+kluzB/fvL5VLKQrbbPSgT+oZ9Q7rUuTaNY0bcbNaL5XK33RqjQaneQwKQUmBjOdBmsxl8fTE44jgWCpEkSZxvnW/zIju/OBPnT1pEdFsYaJIWIkm10hKm9t6nWdK0adPWxMEYo01n3Ij3/ODhx9vtVjLHQtUihfJZlq3Xa4SuRY9EjFXfbk/sXWYW6qsoigTeKMOuNCJDZGzbtuI6W2MUdJwegphDRPJBxzZL0hCCVtboCPDK5AohMIckEbKtK7NVbDWhTlNKUQBZPFmcrNdrIvj1X//1X/mVX5lO57vdJjI2KLdYzLRGIgiBhsIK6LxhCTJ2gVBR4cxdb5WORCLSy9l0mIg0jfM8RZ577zebzW63q6pqt6/btt1udV2bssSmUSJnqqrSGgG0uONijyql9lUTyE3zLITQlGUWWXb1jdde22+3q4tLDg4ZpCXMWFRKVrXtmngzA3rX1m3zf/kv/vbbb799sVohYscJ63iz36wuzi/OTk+Olj/37/3MgwcPFEIc27b1iKgxtG198827aRpLDTr3GVJRlNdkxUs9pUFWSN5XDCyNSnSktdboXmD2Xu+LEn6Ao/e9jzSiQtD9F8L46j60QgZHCBpVFEVRlDCjeuXO7bzIpIhNcjA2TgHg3XffX6/XnruqlcEWvpLLowNAyDavVlsn04nECpalZjUeHiz+4i/9zH5bRpGVrThWq3TNCRsN1nDAC6r0xW9eS5mMFTa8zDwZT9jw5/jprl1l/IzjFy+edjiEwu3ad2BEHivWsYRYRfrLfp7P51mWib0iom273TJ37OdKaQB47733kjyTUsu6bpnZu4CIbdsSozQJEueb6Srpe3WrhF0/BsKXxjj4akCQGUkKzbo3wHunjPbkloeL88uzJ8+eHB4entw4SpMkMtaiZuc1gVYKKCCNUVTCHB96gAxaaxXqpmkuLi4AII5tud01ZYV0RbgxngJ8mWveLYK+xmAAwQ7FS9PJZLFYTPI0jWPZe3H/QnDmoj+SNGrbdrPZ3L59S/xCRLTamF5JDEakfH+320m5rWgd4rBarQSxLN3RVV/SIHgfSdbuq5L7NnyiM8qylKSvhGrl0m6Ui1I9/WToyR2HKJlQuMj4iP+32+3e/MxnyrIUNN/FxYW1Vhrdi0iN49g1jVxCRO16veYQiiyTi06nUyYyxqxWK1eXcWzbtq6qymojnC4qig4OF0ka+dB672NjZ7PZdDqVm0+zZLFYKKMXi4VYIeKqDtECeXZx2eWGhabD9JwGWZZJnjhJEkEvCxOk4HIFLSHxIdk4om/kTYmvyr/Ula4p2XQS5e74QDQy0BAeF19WqY5XQUL6um8NKT6rWGZDeFzMOGEmEWfomtTiHgM8wM24Bzd89NFHiBBCuHHjhlKQpmkgx8zDHI1l1zB0g6wbw7yNMcw9esuHYT0419R1DcRZniyWs1u3b9x79c6t2zfu3b1z6+aN5WK+XMwPD5aHh8ujo4Plcr5czqWATTqOdBhypTwJfwisVitr7bs/+uFyvjCoHj96iMRATHRF3snMoDAwOefa4OVPRiTg9++f/6/+N//rtz//+aenp6hVYEJk5xqlVGKjzXbVtPxX/8O/bBDfe++9yFgg3O32RilZsSc3jq3VSj+35cdibfzn+HUAJnxOhovSIe9E/LJ3kemyV4gI6qpWk0eRNgBQSkvk2RgD2gRgz8QI8t9zt4RdRxDoUwlXYao4jj/3uc957xeLxcHBwWw2Y2ZCeHZ+9uP3P6C+pGFQDGpUit4ZsH30Y7zaZAVYa02cAIAxpq0rBJrk2Ttf+TITxNaGEJTWopnatkUG4DCEjoeTD/gFWayy3AebYDz6wxgN7u9YH48nY6xf5TvD2Yb5G4/1tTm+puDHFxriFeNPB+sYRwGl4ULDBMsDDqPHzJJmExI+yTmdPTtFBvJBzCit9bNnp0SklT07Ozs/v8zzvK7rqm2Mttyn/X2QzS9kx3JpBABQOOSr5LqEEAC5M0evXE/9HL0LAAAjoFYB2CYxAbXegwZW/L0ffDeKzOHhYZIkRimtdRLFGhV5pzuub2RmDcg+eOe8923wsj+dD9qaJ8+eSi5wUhRCysPMEAhC1xu1u1XJpsAVtdt4VIfIoTFGASKyNQqRJQg8n0+LoogiIyCdMbGA8M4rpabFhJlPT5+1db1cztM0tlotl8uiKIR3Xvf9y8qyFPyUvGOMqarKe5+kcZ5mg2gW4LrcngCFJJ1ZVZXAkeQMzLzflxcXF2JmyYIZoLldFpy7BS/VUyLTRQoPpNO678+6327n87kEurMs2+120+kUujqfzvwvyxIRJ9Ppk08+aaoKtUalmEMU22enT54+e6w0tHUpxoE89fn5ORHty21TbmUAiSjSV7ZCnueCb8rzfLlcRkmstRbTKoqMpIZFwgghsyRT8zwXD0zOkGXZYrGQTEQURdLgIUnjg4ODwZkWfPJyuZxMJrKetVHSjVgGpHM+iJxzci2RsFEUFZPc+VY+or51kmhi8a1lbGUnyp8Sc5aFlKapqASZ+i6uaEzbtjLISZIwo9bWmAhAyaTIZo/juCiKsql/53e+eePGiawE4ZySMxhjImMVoFFaS9kpMRAbpSNjNSp5f+iWbVSHVRweeb1en56e7na7/X7vmrZ1dQghTdPJZDKbzW7fvn3j5uGrr73yuc995rXX7t29+8ort2/MZ9Pjk6PpbDKbTSR/rBS0ba0UFFlijdKA5X6rAO+///HPfP3ri/ns/fd+rAGDb3nkTYkeAqCmqaR/r6dQNU3j2sdPTv9n//P/cWD+8XvvVk1du7bxrg0etAqBZdiJ4M6dm5eXl+cX59K+ekCeHx4tDw4OYmOHortBNOELjtk1twc7Ru7nGGGhr67E/sUgt+u6Dtw1BRlc4c7oUR1tqokTYwywEshrd87eAGMIRDSQhxMRaGVMhKiVMgaADg4W8+nk/Pxc6i6MMdLB9/3333/11bs3b960JgrkJSVjQI8fDAGVQnF/e6n83POrPvmstVaAWqvbr9z60pfe+s6fvZtPsrJqxI7APrQyILzHylXeH2vEQTePr4UvUJa/eLz40YszN9bEf56TvPS3z38fAV5CgYY9jBZ6+2j8WiSytXa/q7z3RVFsNpvLy8uOOj9JhEZU9MdsNnv08GMJgTaNI/Ii4j0QAFlrAwGRl6BNUAqCwAUpjHivBuOAELjLJPTYgd5YAQClDLPrhk7pxreeiZCY4K233vqv/sv/+u23furuq68++PBRua8N6rquA3Icm6Zt0caIyCGwAqUk40k+kLJIWgNB27jVamWsUgi655EZDg3oej7RsVGlADvxpK6GVwNKoTmqHvRvCLlhwCLPiyK7uFgFCuKhyvlV3+4Ggeq6zrLUKPXw4cdf/fJXis/lgmYSTgal++BSH3odkLpiNAj2VbKzkgeVHwogSyDWAvvyFKQ0paqqELpuwcYYWWTiKIuhEHr+QsFJKaXEbxtwuYNrJTsly7KHDx8e3TiZzWYSaxWltdls0jQt66ppmsvLy5OTk8Plcr1eA5G4pxdnz5aHh4jYlKVBtVgsgg+ixWfLZdy25IPJ9Ga7ElTgsLZ133QPewCa0ijRzsVioa0ZUpVa68lkQrSFPgYoFonEe2SUJDA+EDTGcexciwrSNEVEAXuLoS+eMTNPJhOx3kIIcRIxdRB0sWCGS3f5aY0arWCq79279/TpU9866FprsDjZuucg6xLJquNvH07LcOUPDEm+JEnKslbKtK3P87xuXJBm6FoFckqrEJiZojg5O72QK9K+GiwAo3VAXi6XdbUbBNRgo4voE/CabHN11RAexeqmbiqhLMvNZpOnHfJcggFWK6sVK1RFFkKIIpPliYQW6qr13jOHNuvwWdNpIcbE6elpUjf73Sax5sP3n/3yL37l5snxg4/uh+CAJQzTbUxPwjLEjWtZaVTYtq0nUsbuy/Lnf/EvBKaL1brxLoqiPE9lqJVSs3zuAi0Ws1demd2+ffuDDz5Okqgsa0RtdOS9RwV3774ipluSJHV1BckcAF8wAr1eE9Sqlw9qCJ6RSERFvh1S/uKm+L61BnCXUR00jqhSVH18ArTWDEoam8ogXNEqh76KSV4ncaatbb0HAMOBYhu99tprFxcXYsV33OJKXVxcfPjhx/P5vCgKCWMqpRRc0ZPKlXgEvHrxUKgBIYSgATl4Yj5cLr725S/92XfejSJbN25gMZWdr/WVlhpueqwbdEc78pJS9JHC+1QsHI5SAmMJPpz/pT8Zv3hxXl/88vMnuR7UfVHf9y71ONOsETtoq8hNjvHyYi1Fn2VZCsGCFGZEUfTkyZNnz86Ojo6a1u93m9lsttnv0ig2ehS5VayUFUouYlIEwsg9UF4xM7DcbYBRWoG571149WhKvgMKtTYAxMHb2B4cLX/u537m7/zK3/lbf/N/8errrz1+/JSA4zQryx37oKQXvQZmkDIs2T1KqRbAKqU06tg+Ozs9euXWyVGWWCWRXkm3iIcRQqAXqEmHqbm2FBWDABaN0gEhMlYpFVpvrZ5OJrvtlr3WSjV1LZEehUghiGdrrTFKxbF1TXtxefb666+fnT8TYIRMk6xYa+1kMpHqiCFNINAq6jGTsdYMXbt1USqCCpb08GazmRYTIWCRsK2YHW3vk8luH04OUpLfITOAemb8wQ0FgP1+j4iD15hNp21ZE9F+vwcAa2JRQsaY2WwmZc1SCDSZTEJwp6enAj9Oi8JWVV3XRVE8evRosVi4ugZiH1pEFNfz7Ozs1TfeyJnFzJfHFCACAAw5LBb0k1ZxbCU+HMexMZVMqzyySOG6rgUjLbcU+poc6PscyNMNOHBR3qKfQgjL5XK1Wg3EKcPaGIaRuqLnrrXRycmJgLaKothttlL9LOteWiQJOkk2I/UYwN73YoSuc6LcHiIiagBkDkVRNK1nQCljdc4xi/3KiEAMu135ne98x+gOp8Pdwu64nY3VxpnBouI+ATE8juT4qesPLfEzNYCBHQVscbfbaaPm01kc10SR1lWSJDpLRX9boCFnLzmItvVt206mOQW4WF1WVRUCI+Ljx48ZKIs0xvHDBw//4r//lVdu3vjk0UMFrICIrspbCZAIHAUi8iSgNmJUAbiqqp/6whems/mzs3O73rjg0zTN80wGUGu9SqosT4rp7Bd+8ecBebPflFV7eHh0dnZmjDFRVNhCcg1aYdt0EPFBIL9UMvPYxepfYG/Yd7n0oeWzueJgDu5Kuw97WfWdRplZK2VMhKABAPv+gZKPQJRef1f4OOxJ8UTDhiCzqYHY37l7+/j4WPIiEhYryzK07sP3P9iu1q5uDCpyfmhdNzxPAOY+u9nHNq/009Wi4d6GcC0CvXrv7t17x9WIBlaeaqx9eXTQp9Shf9rxYoj4mrAefzpc7toXXvrbF0/1k38yBt3By7R7vzgYsfNaxpFqItputxLTW6/Xzrk4Ti8v13GcnpzclEZ1Av/55JNPQKmqaqTxHwBsNjtWWgpm6rr13iNoyd3KSvJM48y0ZHa7+wSNoJ+/0ytUeb8GtFKGCbWywAjE5W5PPnztp79+ufK//lv/3DEtjo4n06nERn3rImtr117tFqIQAoJmbVQUB2MmBwe1awlgt14vJpN7t2/pvgJY9aQZURQZvH4z49UyHtju1rua4KA0WKt9cE3TCNe0aHellDVGd24Eaq2Pjg6zJGma6vjw6M7d27vdjlx7cnhwcLhkoCSNh2izMpoRRJxR30W4LEtxyGDkF4rnKjReYnF27FFJIgTCklMQSSTHsIMG10r0R+j5dABAztnn/zpS5SRLy7JcLBYBeL1eP7h///LycjafC8Wmc04KhABgcNnrus7zXJJQJycncRznecHez2az/X7fNM1isVitVk1VC32b6NfdblfXdWhb6OqIGumvp7UGrQiBEJIsl4uKI5Wm+YC0AqAQHHMAIOZgbffQbVtPJvnwjjHqxo3jqtpPJhPpyzmdTg8PO1JrIhL+CpFxw46WKZCR5B4QIEecRChgZmZZVNK7Qpxk6XokeOD9fj+k1QGvetLhKOVEfRkSAFgby2LIiykxAigGrY11PrROSNDZ+RCIAZQy0SefPPEBhqbIIqNDcHmRMbN4RHL/sgsAQJYKAEgQW9bSIDTG+oaZ27bdbnZnF+ebzUaSIE1VO9fliUXgJ3GUJnGapnmeTyb5bDY7ODg4PFoeLOcnR8eL2TS4drdZG6PSyPimfOfLP/XanVdOn3zi27Kq913JIhAReQreexc4eA4EgZFANc4H4jaE+eKAUF1st5tttdlWZVnvduXFxfrycrVeb3a7/Xq9/ejjh0R05+7di/XFw4cP5/OJ63tje+/FMkuSRPC71yTAT5bS177GvfuLiCG4q92KChkEoDLedMOkq6H3kVLKGgAIDEzABAPRL/Tqz3cdLVlsxDybpGnqfRfAkHI0Y4xZHsyfPXsmAn2S5+v1Wimz3W7LsgYAhdoY4bmNRFUQAisckAbDKDyvYxQBKUattQ9OK2TG0Lrj4+Ovf/3rv/r3/2kxj7333ClUZiJ+XrPC8/pyrNqh94Z5xHA2/OSaTXSlZkZR6+Gc+Hx2czxVL33zxfdfvDcABYDXPr2mp+XemZlZDyFo7qPQiLjfV0RdHx5BZMh0Hh8fC0HBdDpl5rqupRJhvV4vFrM4jtfrtYgPDCwZiDzP43hgDPYA0Guxvu2g3CSP1isrwjC6bWS+gkD3to5p2xqRkUADRsbo3P6Vv/YXfvPX/+Xl5f7G8c2WyCZxFJumKr1rJ7MU+cp3jGwCCltiAlTGPFutPvjoozhNLlf7r37lC7m1AG5sfg0GCjxfgz6Mm5he462IFBC70mtJxyqlfNsqpfM83ey93ElVVVmWNU2FiJE2vm2tteStNphG8e0bJyGEy8vLu3deQcTJZCIlrSL9nXPS+0xSBszsnZdtRX3LlNCzZMgMihkkEyoJzqqqFGDTdFUcEoxFRAnGAsDggcmFAKBpmjiJ5CO5GQFFW2vbqhQ1KbQJADCfL1erVZokSqnlQbJerxeLhRTaOuekwENZS84JFXld7ZOiQNIAcHLzlquqxfJgu948fvz4zTff3O/3y+VSbl5k4na7Xa1Ws2KCiMzoXCDuWhQwc9s2zLxcLh89eiQDiIjSFFnqYYaEujGmJ7kEAGjbVjiZJdIjrrBQZBwcHPzoRz8CgLIsd7ud9L9LkuT8/FwGf8BGiekjug17MKnYplVVEZHgtuI4Ngrrul4ul7KDkiQpy1LaCXNfDDIg9cb7fdjaoiPzbLIr94jaRqauW2MibXXgChTHcaK469z86PHTP/ved+PUtr2OJ/JK2XLfvPqZN+fTyWq1kiEa37zp+6eJ6cDM8oV+yfc7mhUhMKP3/uzszOoO8N+4VqL3svKx7+WsFQVtyFJgqqrGDSuhTtMsFk7QenXxyvFBmsSPH37c1iUCIYVAQhHRbcYAILUPzEyMgQmVbVxQJvYEDx4+KaYTWaVKKa0rjSDza61tm8skibJJsjw42O/3H358f7FYPH16DqwI+E/+5Hv/3f/4L6ZpulqtiqLwZXtN9o7/vCZye00BHXRr9B0F0IYgFcFSNR4ChRBQCaqcCfiaApYLdVEH7pYBMwdgrQxxV1XonUTC/JDbEnCl1AEppVUa2UgrDfj6vVenRXZ5fo7MYqgS0W5XfvDBB1I9JsBarTWiHtMn8Qi9/PyBzHzlSJEMB4TgsiT+0ud/6vh4QUTSR1MAsX7Uzhp7gN9YOz4vaq/arI6PYVcMG2M8Ny/V7vDpVtK193/CGZ7/+ZW5wC+zy4a7vebvDi90TwSBqBF10zilDIASDG0cx+fn54gocFbxtwTeIr7FwwefhBAuLi4IeF921ffee+9IFlPnSAHD8wM4/pOIuJ9pRLzKB4/WYmRiZKVIYWANuNtsEfHWrdtlgJbDH3/vz5bHh7PDJSg1mUwk6pjm2eHRclZMDCpjTFpMDo9PoqJ4dPrs3Y/u69gG4Jsn86986QtFEtneJ7o2L4qHu3puB9IISI/4XB5Io0LEyNg4tsYYq7RYNrKdvPdRZKIocnXTuiZN0yyJJRl5cLA4ODiYToo4slVVDVhcAJDuv5JFhr4ZGXReL0r2XfVwMAkID/FnZhbCZEE2xXEsDX3FtRWqBNE98jU5uepRr0IHwQjD/cuy0X1PdZmgpmkODw+rqrJRJGtgu91Ksmn4/qC/gVmqWS7PL5KiAO9907RVtb28bNsWmI+OjqRc+PDgmAldG7yjcl8DK4VGK7tYLrnPARsTDdkvImHviiXR6D1JVxwZFtPXSeMIFXF+fi4wLtE6Dx8+rOtaMGiSWZeG2ev1OssyURXiVZv+GMIn41iCUgoVMLOE+tI0PT4+bprm6dOnMhRZlklMXsZQMvdaa1QsoYJhNY5NQ+79JAG+BSbvKU7Sum7PLlaPHj8tyzpKUmD1yaMnP37vgx+/98Hjp6f373/04YePJI8+AHyAQwhwfHwo3GGimXAEJhW3WIwGa+3h4aFEZQFAWwOq26qD38bMu+1+vV5vt/umaVwbmqYR6nLXeoXamshoO6AINapJns0m00lexHEss7BYzibT/DNvvFGk8eXps7rcIXhkr1WvegAYgVABK2YMjJ6AAD0pF9gH9ARn56umdU8en16sther7WZXlfumrNq2CeW+WV1unYf1dlfXbVrkp2dniOjImyhKi/zi4mI+z2/dumWMyfNJ6Fs8vShgh7l+Uaoz86iE48pJC32XUnlD9iCOGr3wdXXeQWsRNKLuKB1Ft/cjIj/0Pfek937o7a2UaluHiIa7UJKfz+dvvPHGZrPb7/dSoSi39dFHH73xxhvHx4dSwuhc8Ewc3HBPw7K+pqWGsUBAEC/TB+4dlMPDw3fe+eqv/+Zv58WE2SvotNYQYR/+HXbmNaU4/tr4oi8K6/GUfNp9jifjmkJ98Rg/3fj9/iTqz3OeF+9HZlS2zSBGh8Rhd04AgXcCwMHBgXNuu93t91vnXFXViLjblZvN5vT09OBwUZZlpNXl5eWkyJlZSpucA/IOAIRHArokCA0rZrifLls2zCMidrRlV0dw3hrjmzq2tm3b2EbOuVdfe+0XfuFrH9z/OCkmZ6vLL/3U2xdxtFtdTorcWbvfbBEZlYrjWJlo37YX+/3j09PT9Xo6LRrXQAi/9Eu/uJhO27qM0gheZniFEHCUaLimcccjT0SskDuAFRmrkiRpGiciuI8ikjEKACZZftlcVlU1n89nk2KzXZXbndaago/6WlURiyLioQdZVFWVxgldlQwGgUwYYwclJD+Re67rWrrSivImIgF7dw60923b+j7RKz7c+PFF8mqtCYmARSGJSiaipmkCk/c+sl2u94tf/CKaCBG3m41c4uz8bD6fZ1kh95Pm+Xa9dk3TtjUzt65+8vBh0zRFPpUkSFmWn/nMZxBxu91uNhtjzOnp6WQykfyroHVkoQCAtTYAG6UlzC6uvIgUIVHKsiwEJzR8kmGRSRzYReR5B9C4+Kky72VZ3rp165NPPrlx44aMQwhBjIyiKJ49ezabzQYnVWFX1S3DK1kMIdOWLSb7Qu7ZGNPUVVEUZVlKfF68lu7R+tJP6lvDit2APQJLroiIwKpt26Ojoz/61rf/9b/+1mxx8MYbb83m2NTtbr//5MnjcrdfrVY/+tG7Hz34GACoQ4op6oECSsFisQAkyY4P7ChyLbFaZLrltQQGyrLsGuAoVNKJltAzadIAtNvtjDFZluR5Hlndd2BURVGYni50kJNN4zrpCjjJC/LBKO1b9+jBR/fff9capaSgUCid95UxJjBQYEIVCHrdD40PxkTOt57Z7cs4zVab0lobyqbk2hgTGaO1jkzXPzGKoap3X/36VwDg+9///nq7revgnFPKNK37uZ/92tHREYBsKNW2tdYvl7FqxAMxVhwd1mYUP5PtDyNop9ipDEG4EQZ3BUZ9f6HjobSICICAKK1/uzAhAPQFC4KzkTmVsLOsQ+8DABgf3ID7v3v37mazu3//PgDUdS1mclVV77///mSSx1HknVPKQLi6oUEuX9OOzBJX7l7IYvWhRWJrrSPK8/ydd975x7/x24KzECyJVkB0VUAybBJ8PqgIz+vgF2UuvwwgPZbUw4vhh/hC1Hp8vGhPXbulQT0wMwDDp1hh4+8P2oJHOZvB5go9vcOgj0NXId4VTUrc8vT0dACGyCbcbDbetyGEcl/nBW42mydPnsSv3kVEof1zjsg7pRTgOHDUO46Mw3KUdwKyen4Rj0dHaw1ErDUFZ4wBol3V2iT+6Z/9me/98MdtQ+9+8P6jhx/fWC535+cnx0cUxfvdLo5MW9Xr9br1dL7db7zL8mK6mG82qyKL/8O//NfeeOONui4To/3zhpSIe6VUVVUarheYIUoXsKthl0dQ0hmqr7oeAsJ5msbGrp3XgJMsZ+LpYtq6ZrPZXF6ec/DLg3mRJgKzkvzTYrEQRkkhpBQ+W9lpUu8oSpQQASCKIgGwDPMrtpTkJq216/Xa9+2VhCJDdJLWOo5jVErKz1arVRRFQ/aRR7V53e7rneZBMaBWTdMgqFc/89ajjz76+OOPj45vSM5vt9uJBVxVleq7DAXnxOdOkmi1Wkn0Vf6VFTibzaIo2m630+lUQFJCjn1+fj6bzeq6XiwWcRw/e/JEaodOT0/jTHrldh6tjWMOQboCFEVhTMockiTZbDYC8wYAwRseHh4OtUyDMyqIceo5MrXWkuSWCi6l1H6/l/Tw0LjaGMMEAzhcBlBrrRjbtpXyYtFDUoHdNA0FEfdKbkDISURCXhNxcn7xZriHj4mG1oqyYvLN3/+X//X/8++89dm3f+r2nSwvmLlp3NnZmTFR3a6sjYtpUv34RzT060VRBgwcFrNiOp3utuvOJdVdd1HodYBkKCSiAABZls3ncyLaVzV13Eyd6ybmgjGqqhu93ZblzDnnHUlqA5HFpx+FQyTljDL7EpuZTCZpmt6/f/+D997Ns4Sc994L+KquS62RiIS43RMGAk+Ku2QWbne7KIq9awUTrpRC1K0P5EMI7FxADho7wLlz+8XRfLPZ/tG3//U3v/mdycwAR3Gcek8AcPfuXaVUZHXb1CIeQ0/Vee3A52G8g5LqgqZAvXoKzEzcg8+NQkRJEnWjN8I/D/5eL4girTWI0hUO5i58HbB3eQdrMoQgXb+YWaku+MTMRmvtfGusJiKtk6Ojo48//jhNchun5+fn6/WaW3//owevvvG6stHT8wuBpEsRhfdeGS3JMCbu2bkYERUDsKBkiRkImAKDBJqIkAMQ37u9/Ku/9PXf/t1vz+cTF5CC0TYKVDKSNjqKjAIwBq2OAKiuW++JEAgUSDlczzSKiKHXUkqp0BsynW4GoaRoBDEI/JzDNBbT4w027LFr0nysQXlUbzrYR/IhACDC6PQSs1UDFaVcQsyL8boZbmOYNjFOBxhIHKVxlGZpYXQErIBVWW4nk4lARUBpRnWx2p6ent545S7U/qNnz+q6vkXaMW92++kMdUDn2shaAxgZFQAUk9LCJNxqrZGR2BMHYkKFzBCAQYrlUCEHEAeFSDFrpVkDswGN4B0QRRpCvbm9TP/m/+i//89/63e+8/0HJydHD1e7VRk+/uisDdUQv43jCAwQ8tKa1pXr02evvXr8F37+5165dULcRHlEDFbrwYPsHCOFgCpKkxCCD61VVthIiD0CMiMRKIUAKJFypRRzUEyefRyZummNxiSKfaC6bpeL6XZzuds1JoqICJDv3Lnz8OMHsU0QcXW+evPN1zlAZOM4SrJk61qPoChwVXYt3IU7aVpMkEFMHI0qoHYueE8E3HqnrRnSJQK3aZomjZMospfnl7du3XKtMzrSYCezuKqq1epSa9U07Xw+j6zVWofet/beR7EV4ghA9o3P8xw1ogLR1lrrNE1D66QUlZt6fXmRZ0WkEIFbdtoqAIgiE0UT71sABlDOubqutdZFMZUAWBSnl6vNfrueTqe7sp7Mpp7C7HDOSEpDWsT7Z6ssSdJYGwWtJ6WM7FC5rlJKMTVNY7VRgaUZW2RtbKO6agJxlmebzSaxRuqtkzipqmpQdaJxxfXsIUgmTXPR/YKH2O12AFAUhTRBms/ndV0fHh6enZ0N8GBAFgexbVsfnNZa+MtUTy0OAJJdXq/XzJzGkcTGxeJBxVmeEAVjFZHxPQ95CCFJohCcQeXbJooixeIWIysbFfGDJ2f/t//yv3rlzt2DW3fS+UKnKdqo3O1L587Wa1IKIovaXK53DOA8AQAQW60U8m5Tf+HtNzV7amvyNbLj4IwCYdsSmZblSZraLI5m06TcV0qpkEXIEx9a13ijFLJiQqstMnrHHChJjHfh4mJVFEUcGaWBObCriiSKzDyJLGpjIqva1jknDR+thggQQz2ZJH/0h3/wZ3/4jXlmwAOwUoActIKIAZkheO9cg4jOO9AKjQpMddNAyBTG231j40hbZMXONaCRfcNAzqMxkdKWAKs2hNAyh+0n5b/+v/9xCDwpJrvtbpojcFPudm+8fnz71pKpdQ0qpXzTEiL1SJbrx8u6ISmliFlpBM8hBGQCJPKtc41CQSmzxJ+ZCBg1GmZHwVFwXTAXkFghotKxSVJQxnuPUmnCpFCR90ZrAPQtBecpgAYLoJgxsZlV1nsSEJWojyuSVWMMM8zn05OTk8efPGWE4+Nja+2zZ0+I6MGDBwKPFOada97btedkZoDnFNiLOsZ7r5R+8803f/f3vo0Kql2lbcQhKK0UdplR1dMYIV5VFkq1jGg3pZQecV90GxUR+pQM9jlz1VcP08vysi860AAv8XqvHWPlPX60l41GtwDgBVd7fOlrpxrsgMGylgfxXtpVBkSUkkdxTSSWQESPHj2SlJjgHjebDQBst9v9fl9MMu98XZeQptCnRV1wyo9CuABSTjuu9Hn+Ka4WN125+zDcLXOoqurO3dv/6X/2N17913/yD/7JbxPAwWKZTdKyDCdHh0R0dv5sPp+G4Nfknj07txp+8Re+/qUvfH6+KPI04eDJtfp5Cl/oMxr8fNzihTGn5xhfRzZN0zQAPY4R0NqYLrcHBwdt2zZ1k2SpMWZaTD73uc9997vf+fKXvxxbW5ZlPJu3bVsURZZls9nMGCNG5yDlBYcs8C7ucchyXRtZARmFEKTPgXxHnCdrrfAx1XWtdUtEeZFI4dBms8nzQlLOSikIoQfZWgDwPfMwWJTXQgomzfsE3d00zZMnT4Qsoq7r7XYL0p/c+8ViUe5rZi7Lej6fr1aryWQi9eVlWR6fnDR1HUJYLucW1fn5eZJnZVnO53PvyRijDbZ1fXl5mZycVFU1my32+0oS3oBJFifCZjWZ5GdnZydHx01by8OK4b7flWdnZx27qnfaGgDQ1ljqaKq22+18Pld99yGlVFEUy2VHk9I0zXq9Pj4+rqpquVwKMRwz13WdZZn0DRwgV0opQQ9hDzUSxw4AJKRERGVZDlKiF01KKdUza1byc0k9jBLVqLVxvo2SuK0bpZSyUZREzkNVVb/xG7+htT48PJRRdc61upVypv1uBz0E9/Hjx+J1yfoMgW2sjYGTkxO5eVlsRBRGnoN8lGVZbI0Q9bSNs9ZGxobAGlBAzmK5ympUPZtmWZaXl5c20nNgxgQV7na7NC9MHCmltO6cPNe0kzzjQBrZNfUf//G/+cY3vrGYz325ZxDpKs14gqSqGMharKoKtW6dY6V2ZX10cmO/qeI04TIkaVrWVeucsbEwmDrnEXXwHChoha1z5b7WhqyNQuDEGtdUn3njTeea1fqSCO7duRPHMbNHfYV1/8nHS+KUAETMfdgZ+iIF4DCW5xLG5L5UgYgUdnXk1BPFc1+/M9BJDX72EO4aPIehrSpRp93kOwb6lAai8j5MJpPbt28/+PgRKzTGLBaL7Xbtvf/w/sfz2fLzX3jbWi3LkUb4+5+gdYYP8Hmwj1JaGfPGG2988Yuf+bPv/riYzpwHCoA9uTQQg0YFoBQopSUYgojovQsMHACQhOCwH8fxgNJQsccKQWt9rajm5ccgqcdS+9O+OQz3WGvC8+p8eEcpM14Ww1y+9Pv9T9QwVUMGCLWSNBgAOOfW6zUgTadTCU+KjfLee++9/fbb0q3Bt43Qze/LGhFdGzarSwCIbeIgMDdWG0QgwK5tGPRkZAqRtJCojZ+REKQIuF9qwKgQCQGpB8VrrZNUr9eXJsp+/i/8zGd/6u0f/fjd73znu+/d/xABzs7OZD08efxMXvz8v/flL37h84fLhY1QAVutbBTXdVDYtWIaNhIrVHxFNEEv2Lm9TUMAelhsIA0BEdq2BVQhsFKKgY0xBwcHkn9tmsa3DoiVUlmWvPXWW48fP37t3p2iKNI0btu2bqoo6mDPAqyw1gJ2yNiBlMd7z6FrABDH8a7ci1uMPWRBQrtZlq0uLqUJj9a66z7ZlB9+uJZ2thL3Pj8/Fz0hudWhGlV8uBBC8CQnB4YQgoBxJKgo0CQZooODA0R88uTJjTu3hRJLvhACJ0lS162olvV6vVzOQandbpfnqbXWIgj302q9XSyXnzx4eHJ4oJSKYjufL87PLpIkefLkyXK5lGfpo7IQRYaIFovFan05m82ISLJaRIS6kxhpmjI5MQ6Ggl1Zfrbv0SQ5csm51HVNBBIRlfpp4UuXsPZ+v5feUNLweEicy2RJYFkC3YKjFiSwxPzFrwghxFHXAENSA4JVLorO6xDDS1aL1Exba5um0pEFgKZpojhN0/jDH/34n/7Tf3p08/ZsMpd5z5K0ruuy2onXLnlxBZ0B3VcPo/eeGb2HLgqqukkXHcA9CMD0jY2F59laW+kqtAEZPLHwp3pP2GeLlFIS3/YeqrK+uFjpnjeNgEzUVFXFqJIkiUyslErjJDivUTH6xNg//va3fvM3/lmRp841AJYloAeMipkDqxDIu+BCCI4DUqSUubjcfOlr7+T55Pzyyb5uojyqW4ceU5s2rTc6CZ4QbJrkdV03da2U2VdNmqaXF+cA+8+//WYcJQJAuzg/OzttEOHOnTvMzExoOqdRv0j8NBIEg7Adv6004v+Xsv8Ktm3NzsOwMf4w04o775PPuaFv9+2AbqCbzSASIMCCGC1aJMyyGGxSpF0upwe/qCRLJbv0JOnBpQeXA+WybMElkaJFUmAACAIgiNDoRgPd6EbnG849ae+zw8oz/GEMP4w55157n9O0verUqR3WnmvOf/5zxG98H3HsJj5ijDKApLtmVozXiIl6g6aVMcYCgNKmAz+LlioiasRrTHzUEZSKTdBKSfQmv++9LzO3g+oxRoVtSLi7uzvdGV/MFvLHxiR1XQ0Gg5OTk0ePHk2mg9BJh8pjL9BN+bq/2k59dsvlbNlHeSo0qtFo9IUv/NjXvv69JEmaesMoiZfSqLQITGi5PqWNknRNKYXe+whEMqvVfqAMyCBK5txWd40MDygFum2mokZmVnzlrfl1Sfz2av6wX93IzGCrjd8vsdx0xGtvVh24TFa4/6Bt9w/XEdrbziYy1XXDzKBwXW4o+t3d3TzPV6tVmqZnZ2fL5Uqg0XVdu7paLpfybMfgZrNZVZeKIUmSPM0ALEBIjAUgQgghWHXF+C27BV5hsSZgYa6+WkOFGDUigUKlFAGz99Yorahu1vu7w4N/7Q/8gS/86LMXz10dmqa5uLzURgl/8sHBwcP791CxBlYaNQIFFxkTa4kCxat4BQBknKhfYaWu4d0Q8aqZDddunPxIKeV8qEov58/Mk8lkuVzu7u4y83q9DsEhsnS8fFPNZrPJZHT7+AiQgVi67DYx2iji6EM7u7ndfE3TNPrQ81K1hIKdOCt2DJFFlm/sWh5UmYASwzoYFDJlIaRFArgjIt1RUfb7Rw5OkbXWxNGgkchMnG6SJED8/vvvv/nmm1VV7e8dJElydHQkMzZSShU3IAkftxguZYwJTQNAMcbNZjMdDvZuHS1nc2MMKHXv3r2njz/a3981WgOAsGAOBpqIlFUIuF4tdyfTy8tLcWwMJDNFkv1L+IIKhN1iMBhokzFjVTVJ0tLRa23TNHcuCOGadGFb6cYkOT+/HI/H/XTQYrGQkrXoMYgPXiwWWZZhN0axXC6LohDfDADi1EU9AqAl1ZIdMpvN9namPcZNuERCCBTlKUYJIxBRkFlStFfKYDcdJKv6S7/0K86527dv7+/vj0ajLEmdc7PZzNWNwNpl2TWqz33uc++9/4HEbVJMkih4drm4fXTonfjdthvFzIL66a2NTPFKlr9ZrbMsQ90iEpbLNSLaRMWgULHQcAkPVDs3UXvvomcfF0ttk1GIUqUAYhe81YojpUnyld/60j/5uf9uZzqOMWrAAIqBCSVVYgYIkTx55qg1Fkle1X69Xv7kT/7EYDh1keyAiWi5KkPEsvbzxappYl3XChQRzedzeTg3m02e5xeXF3/mT/7RH/mRzz19+vT733tPwI8A4Gp37/bO/v4+cDuLJTYffjgB1GtfzL1Ovbjy7YEOWdK2ZtD+vh8NJQR1lXTJXVAtJTtAl4XGGAXw5L33zvUHkUhCa+1jVOrKoKGgoLuJ+LaBXBTFm2++uf7G74t1m0wmZ2euqprz88unT58PR2/0/qN1J4wAvTzTttV7zer0uyfEiJqSxNy9e/djH3vwwfsfpfmIGbVuhxqtFYAcalRaa0BSSi7bKwU6aBdC33nFLZ42onain7uWe4/u8963mrhXjBNbBvoVKDWAYv5X1Tp+uHtWr17+9vF563UVtWz54+13bjtvABDYiDgDZgbEqq611s778Xj83nvvPXhwXwj5mqZZLVY7OzsAcHp6mhfper1yzqWZhTnovT1tDUVo2AFQu3RoALAHRbdrxYqJYAt9RQhKKlH9qWppo2jGCNxqu/rg8iKt63WCaZbaN9+8Q06YkKNE8YvFIkmM1qi0UoDeN8RRZ4nWGH2gGAFN/4kAoK/mj9tib796/Z7sfhj7mj8AWGtr79I0JYYQKq3Re58kWQhuMhnF6Ou6FN75GKPSkCfp/fv3naurTel8s7+/X65X4/Gw8UGO1qdWsvcEgwNbp6e2wPliK/vSkbX27OxMMm+ttbWJtDyVUuv1SlyvuGS5oizLQge/cs6JKqI88IlNUQEH7gFWAt8V7Yf79+9fXFwMiqHEAdPp9PHTJ2pnt1xXWmuVJNYkTdNQ8Aqyy/MzoRR+/uzJ3t5emiQxhBC9rqrFYpEXxWa5HIyn0+nuarVZLzebssysybKsrusYXbX2QhO23KyFu0NpLIrCN63fBQAh1Kyauu/1uhC0NcVw0CKiqyDM3rIC3ntjEgkpmqYRpyjrEGOcTCZC0SAWczgcLhaL4XC4u7srKyBDBMJtqZSScUrokki5iVLglUWeTCZEoLUWGFdfNpeHLqEEABlI8mCllGTS1lrJ/r2LWtunz57/o3/0jx48eLOp6m984xvCyeW9967O89ymyc7Ozs7OzmQ0Dq0WOCBioBhj1KiUUqghHxRorPceUYhZLPjIHcmDnFiixU7qJEmM0tEH59xkMhITH2MsyxoRlb56KBARFIZItfNl3eRNvTMeOeeqqsmyomm8bELf1KlNbKq/9jtf/X//vb+7tzMBAI4hxsigIjIzR9EwIAqBQmynkIKrzs/Kn/5TP/nojYerdZkQUZMwIytDYEashqOdqnSz2eLpR0/Kat1tnpCkNs2MAviTP/1TaZr/7m9/JfoGiKy1HD0RfOITn5iMhtqgUhC8k9w+Av//5YMRsUcQM7Nw+XWFxnaVbjhgwWf1ET9DG/t2FU2FyEDc0hQxijxgcF64ccQCmCSJzNDV7baturlu6DlGRtR37tx5cXr24YcfTqfT6XQqhLF1XS8Wi2pdFUXRY3g7b/N6oK9cNLT+5tpCiLm3Cvd2pp/77Ge+973HO4NMYJnG6NQaa63psl5tUKmWiJUhctSkyCiIzDFE0ADMGtEqrZSKDAyMgAaBgYwRVe3w2ip073e3zq9vybympNz/Fb6u+4iI8pC86piv/Nn1v2IE6AQ0GJiZenYTUP3MDwvRiiDOyrIUxKAU9JLECqx0Z2eHQT1+8tHh4WHjXVlX2pp1VT5885FSaj6fo5o0TU0hliXAjhq5oJSPJmqtEcgYiMRBUVtWae+ekK4wM0sQ2PdOhC9adYGouJ0+uNtsNmLgKLg81QTBNbW1FiFxjbPWxlA1TXN4MF0sFkxkbIqKkRUCGGMUMGhtjKk9CZtSv+B9DLcdrGy9CLrR8+5GKADSWlMDSqFwF6zXZeN8lhWuaXu37QwPB22QfNibTo72987OXw6LAQBcXp7v7+6JKWSAPkmSRqO0EqU43DSNAkw6FUXJXUIIshrGGO99YqzAiGTMpigGMlWCiBKDyz4R1yIo6yRNoROek2cbpB/MxFHGCp0UtyXDE4aWs7Oz6e7OcrEKIUwmk9FolOcDkRxM0zTPBhITiMylgOOIg2jxVlWVpjYxuizLg4O9k9Oz4XBcV5X3fjrdjd6F6PZ3987OTgWVaa1umkpquUphMcjlTLxCcXISGaR5llRJX/IlItAIjJLtKaUY2QVPdS1cmNIFz7JsPp/L3RTNYACYz+cSiQpZtCQZ8/lcay3U3JKhSp9YTgYABHMu7WTuBHR7pPT8cjYej72PSlFVNcNhIZtNGskAoLWRoWelIhH5QBQ9gqbY2oTf+I3fuLyYh/j+V7763xJDmtqm8QBgDYQA8gw9fHj31tHx3t7eel0SQU9Fjq32DhweHgKAMSYE1ydb2KpcC3k+28RQB3MBIMmqMVCWpJPJaLUaOeeIrhkiIopRAcSqrGWWLDFWadhUpUksIobgBlk+nYw18u9+9bf/Xz/7/9zf2Y3BOddYbaJ3yrQGS4BKgTCSiYQKCQBenp//m3/hz96+c+dyPssGhWFWxUFdN2lh6so7zxXSixePP/jg8aDIINLlxXxvd6hssrOz8+3vPv4rf/W//+Yb93/lV36VySskCo2yJkssEdy7e0drbYyK3jnnZProh+c/N7u/ANfyTgDiSAykuK0IwxY7AnRlZCKKdKW+oLUG1FpbpYwyRhj+ZVNRIADQSgFGZmaIDFFuDIKW4LLLr/ozZ0RlUJBNW2JJzFQUxd27tx8/ftyjD2Kk1Wr10UdPbh/v37t3j3zUiRUO6xsO7CqfQ+ipK0HyEb7mh0Q9YzAYPHx4/+ho6urKKtRaJdKdUGhFklpxoltCVMUQdVQ6WFBaGSIVDIYQiKJWGhUwECrQSrepWaRikChlXFWjQWMMU+R+SIlec/+2zxC2fG1/XTdcae8PoC0hXOsc3/hD3Grjb2+I7fdsp7x9CsVd7bofpWiH1ZiZWczN3t7ecrn0LsYYe2IgYRiuvVNKiWhPlmWL2Xw4HK3Lqna+yFKpFiSJURrAsVJKdZygmju0XddsYbq2sxkiAxGwBlTKKBURNQBkWSZZV11XiEBERZaFEBRGjREZNPJonK/Wi7xIiQg5GjSYWu9cWZZJYqw2IQRWGl4Bovfrpq6PgIOMmeNNdQ0p4kntFxGLori8nIvEV1nWodN4v5ydxxgzmxwc7FlrB8NC4aHMGibGNq5K0pHRaYwxyVIAkJIjdUy8sk4a2/GPKExkiAJL7oPO4DxHgg5O5b0HYFFPkqRKeBBXqxUiWtvikhBR2FQYSEPrhntkjTHGBS9eRBJoGe+R52t2ORciSTnscDgU39PqIw0GElsPBgPhlVwsFkqpNLWnp6eT0dB7v7mcTae7xiQEMB5NbGo9YJYWQusxnU6NMUarxWKxu7vDTKtNLWqGqqNOlBhFvCAzE0cxO0meRSI0WobKEkoBgIEq76X124/6COxodjGHAbSTF8xZlsmyiMKj1nqxWMj6j0Yj4YcS/kUAKIoCWgBzJjVkqf/L8K6YSIE3yoeGEJwLWtt+GFequ2ISiagJXljZNXNVVcPx5NmzZ1/96u8WRfH1r3/9Yx97O8vz2Gp7iKCQ8jHUdX1xcfH4g6cAoDXkeSqb2RijlWxRkETfGENUx8iBSOJIeWdfbpFk12pjBoMkSazV5+eXznOWpLvTSXB+tVl7H3GrXhpjVMrW3i1Wa9RGaxyNRkxYFB4AFIM12ij4wfe+95//7b99tL8XfCOskqyiUoohKFYBgBl9BCIgBkBT1vXJyeVf/Wt/8f6DBxfzC5sXngEAi3xnZ7c4OXl5cnry0UdPv/Pt78ZIWuFsVh8f7n7hY2+kWZKm6WK9GE3yT33qE7PLi/nlxWa91AiE0DRVCO7Rg8PpdErRR0Dnm95UKqVucP1u2/EbIXtvE3oQssxcAgB0kx2xYxPjbtIvhKDQSMONiNQWM64cWClJc1utoH5aQXXCQiK8G9sKFm7bduZour5LW1i31ngfmePOzs7+/u75+eVwOJQxD631ixcvzs/vHR8fczvsqLHTq3mty9kmzAIAZtXVLAEArDaMCED37t375Lsf/9V/+aWjo0MJ+a1WLZlNp2MjzREiMAhkdQs8AaAI6/W6qoICRm7pS6xNQEPwjhmEBGCx4EQbgyo2NQMI17loPW771OtnKwv2+vuLXVWt96Pd19emmG68offBsOWktz/01R/eOFTjnTIaFIowJ3fqLuJgXr58maZpDKyVGo1Gq9VK4uXZbLYqq6Ni73xzvtls0jTdVKXw/jOzatCHZjIcDAYD753WWpGUZYBVF88BsuoK99xTvcQ+SpCiNCqDQADR+6iUCcGnaUYUU5sE55VSDDHLE++9UkjB5YmlGDQKWUsUhlxjDIDykQDV9nJtB3k3FqrTkHjNr7gFYbEMziGiuENZtLIs15vl4eGh0gMfmqbcrNdLREyzxCg9Gg3mc58YOxi2fExNaKSA2avXiR0XxBAiiq6RbOMsy5r1Stiv2rFLbhn4pPgsWJ6ObbHFum82m/V6PRgMkiTxPlRVNR6P+2dYGy3QIW6J+7Gu6zRNszRdrleilyftW2bebDYcWJrKgoVuscfOFUWBiskTxdiSQocmy3tK6pYAJwSntdlsNkfHd7XWTGyt5AE6y7Lzs1OJ8HxwVqeIbT9oNBolxgDAYrFI05QpZFmxXK5NYo1CuWTnBQOVCTI8xtiUtQCjhsPRcDhEhh7CvVyum6Y5PDx0tUeG1WKZ5q0aWAhhOBwKM6uIJgkeYjabiWcVDmrJMtM0FdkMZpYAUcyXDB3190WWSCpn1toYua5bJhBxxkJSprUWUGTtGkSMMZ6evHz33XefvThdrsrBYLBar2Pky8tLrZQxyqACq40xezu7O5OpAMcAFCBCBFDIHAF5Z2eEiForIlLKRK7bR6xV41AAUKRt61d+LvWbLMt2dnYkXtnDPYnRQ1h3tUh5gpAVAmPT+KqqqiprmmYyGk93xuv1Wg8HTdN8/Qff/9n/8v9x6/CAog+NQyUT7Z4jURR0MIQQQwACBEZGWJer/+n/7H98eOt4U5bFYBwYfAzEfHa+vrh4/NWvfvUH7z0eDnPnKTHw9ttvfOrdjw0HRZraxXJGzGmxn+f50fHB7Pzs4vKcKPgQgY0xWJWbhw8fjocDxBbqmCRXOMTXW+cfngHL3DAyMERmVsiISnXNq96adQ6StWrHuxERlTEmSdPUJKm0KvqEqm85SUAsHydbSHVg/qsuQGeUiKhF0wEAtl3otg1ZFMWtW7cuLmabzWYwGAyHQwHX/eAH7+/u7j969AAAYozGtMIm/anDdX92Y40QNQBFYGmcFMNBVW2GxfiLX/zCr/zKlzSC1soolKqg1T36l3uMVZIkhowEF4hI2lxNhmidWsvMWsFgMIi+qV0wWk2n04vzc2u1czHLk7quESBGKRpc6aV0vcNWNxfauab2g27Yd74uyNP/8EahG69XqrcDsdavbIlZ9ner8xlx+8R0p6EmwbtSSqgKVKfpm6apOFd5UCnCelUS0XA4jKElVzo/uzw8PMzSwjVhtS4V4sGBdS6G4Oe8Bm0MMKMCikmCioECowi+SJ7L16ITAEXsumttGx6olSIj90wpwwxKafkfABAhBH/lIpHQXMNzAQADiN4RvpLy3lhbABANzr4HIrDp3jn1R5bkcjAY1I0Lng4PD8/OL7TGTbnabDZJkkwmk1uHB/O5DSFU5WYyHtV1OR6PdyZT4SCUeoPQcQTnffCi/S6oH4kIJUOCniQWwGgrfjo4L7uXWUTLvZRMe7EdqaxGptVmPZ6Mm6aZFNNmvkzS1IcQvAfmxFpjNBBzJLOluSRhNHd1MymXiXRuURTCEBJj1NpW1VxOVWZnlVLO1z40aWYR8fT09M0335Rjys4hitbq3d1dOXObJOVyJewcgtOu69JY7Tx2KooWsYkx6g51pbWOQIJEk/lvof3RWooFwMzWJnW9FMSZ1rosSwYqslzwxnIySZK9ePFCyAVGo9FwOKyqarq7471/+fLl4eEhdQgpY8xgMPjoo4/29/elPH55eSmOE7r2geAn+rp9nwpLl1reIOwrsj2Y215An7GIEJk8ic65xNjlcvn06VMi+oVf+OUv/IHPP31xcnE5994boxRiCC41ViWptXYyboFp3vu6dkprREYAVKrclPdu3RqPBvOLc2bJfVsLQB0xgJwJAHjvB3kBSMaqPMmA2DlnrQ5BFYP8+PhYyDtd8N57IjE7rYyj9977uF6vk8T4xiVW3z469In58PnTX/nlfz4qcooeIimNHMmTBwDURkXtfCRgIGQmpdGH+P6H5//ev/e/vnX7zmq12ju4VdYNAz59/vIb3/jmN772zdPzS2MQANbr6qf++B/5xLtvH+zt1tUqBOe9n0xGIbLW+u6DhzHg+fm5jHjEGCXZJArGqDSzTVNFCn3mo1sShZtGWF6vVhmhz2sjyc5H6htqLNtSorEewNz7L2OsUirEmCSotYyiteW0NlFhkvkiDex9E0KblIqMtOrobtQWO4qc8NVsDGCfkZOoJL399tvPnj1bLtfU0e8BwNnZ2WKxqKomTW1mk0hR0ArtEQCAX5N/XCVJQuYPbfjJzBSjc/Xd27f+6B/+3Je/+rtHh8cCP8mSbpLSWufqG4mjrKzWmhk1qmC1AqO1RgYiVsDj4cDVVdPMjQKjQGs1Gg6qSi3L0igNXd+aiJhCL+u4dc78qowgvJIrv+6HcVs4aPs9/ZnDdY8OW/57+/9tb/3DToBbAQsQltGXL1/2aTF3QhzcUfr1BcY0TZ0LzoUsy/JNlaTm5fn5/v5eWZZFYvtPaXuxMjnH/TxSO+TenslWkaO78wqUBgakDuvG0AO4mOM2boK29HsJFUOPXsZu2Pv1bXjsas6v6k/D9UexX7G+kFjXGwA4Pj5u+ZyVWq1WVuF0Or1169blxfl6vZb2CjIYY5LUWGsF9ZpngxaZz1Jyb++12aLm76kkZGooS1MKUQZmiqKo63o4LLz325OIfUQsFcvpdCqMsMZq77qRJ4AQvDSr+odZSLWEOwIZeo6tvb09aXxuNhtkmEwmSpk0TafT6YsXL8QJCWQ6hCDCA9IdXy6XWusksZKgr5cLBLXZVGVZEgFqc/r8xd17dxTgcrng6HsZKJumiFhVFaJ6+fLl8MF9RJRyOgAQYD4oFouFMWY0GnnvBaZnklS0jzjEiiruNCrWm41I88oQV2wFiJS1reiylN+rTTkYDC4vL5l5d3eXiGTCuCzLo6MjSUeop+iJUdZNugNiZOXgACDXy5E6aEXSlwZDCKJ0Kzhz7khMvfeCkDZKO+cI8MGDB7/wi/+3e/duP3v27PTsXJtkMBgIu6e11sfIde29r8q11O2HwyHiZrOpjLVVVU2mo3IDDx7cGwwGl2cvjb4ixbuxpdsOZYyo2BjDsW1J7EymQLwyq7JuiGg6nYYQFouVcy3Qkpl701eWJbIb5MXuzuTi5VluzWY5++qXf8vXFTAjsYj69nkeEUWHxKqsK7DamMRT/PDJ+X/wH/5vHrzxaDFf5uOxd7wpm1/9tS/95pd+a7kqJ0WKAAr4z/yZn/zMZz41HY+8r6xRm7UbFDlRGonLjdtUtVbZetWcvTwvNxWiTmwWY2SO3vt3P/lxRFYaMaI0SyX6v1kGu24f+tPeNuwUhPoqEJHItAAS8E1bzR3/sboiEkcZRZLJyxtGBoV7KpJzTjEopsAt1g8Aul7htYJra2OZAK5kaAFaDBEliQGAR48efeUrX5GqFCIWRbG42Hzrm7+/v79/eLjPzLGrfQO+xjMBAAN1Lllxm04CMGitbWIisbXau9rY9LOf+5Evf/l3E6NcXSZJgqnlljPTSQVve+fxFXMyIGJitBZWemZRVxoUmTk6WC7nTVVOH94/3N+NMRZZErmt0ccYfWz7Z9hOR1xNjjKzkHZsb328Xkm+Yd+3XvH6t9cc+Y1sGF7xFn3Itu19u1PCPrJTIHsHiICoJSY7Ozu7deuWVBr7OEu0YqxWRFBVlfdxMtmpqkp6bHmee29ms1lR5FprqzRx7IPEtvUkSISuMAOAQLG7in4KnAGA2pvAzExAwLrdF53/Bo3MhIgEijtYWS/zAF3bAhEQlOo896urjYjA6sauYwSNugsErjDJcnDd6qoqIorRF8MBM08mo5cvT/amE2YmjkWe4d4eUajrejQYSjQDjE3tg6cYWNywVAUECie8KAcHB5vNpi4r+SxjjNamKWtlTc08GBT7+/svX76ULG13dyqiwiE4ebhaUb8YJZCSKrQUq5fL5cHBAfXMoLGVLZE0t699SWTknJdnZL1eyzulW/nRd77DjEdHR9bazWYj4ID1ej0cDtfrtQztiA8uikI8d5pamTM2hXnjjTfOL2aj0eRyPh+OBtZaBTwY5qfPL40xi+Vca31+eSnCSmIrqsYZbRtX90RUUugGAGk2yygR+WCMcTFIs1aYIrTWRqtNvTk6OpIBJJkvEopmCU1ms9l6vZYO9GAwkHkkyTkkngCAvb09UYLqAXHUzX1J4iuBab+eEu5JM1hgaPI2CTL60qK4agkUGlcpzEySeu8Tm6Rp+nM/93N377/xg/feL0ZjbRJpPwNiCF5rTYwxRmAWwLZsoSQxIfgiT43SyLC/twMxABMAyaiqpHr9Zoa+xkMtiIxbtmGlDRaDTCIPTpK9/V3V0cs33kmspjtSbiQ2SjdVfclxfzp5/uTJy9MX1WoZfNOOI7YDv8AMPsQYIwd0PqKxEQAYvvnd0//0P/n3f+RHP3dyehZJfe+77339a7/3C//8NxRCIFAIB/vjv/DH/tjxraPd/R1EDr5WSHVV7u6MyBNo4wKPx8Xpy8ff/c77xLiaXSqd1k0ZAqXWVFV1997t4bBgIABCbPMNAUD9/+KAO9fWxhzEgbs5XcXU5WzkvetXcvsIXVEHiSGxqTySwriw/VkKiOFa/Vkz2SRJWunf7h5tSfbK35rONnYetM1iZReG/YPddqJAFIaJANTTp08ff/DhaDQqigyBqeMiaM8GbriT3m72C6GwU0P2rjE20RpjcLduHX3yU2+vlm39oXNCLV1c7xclq1OAgAqIkVkBW2ut0VKUSES3xJg0Te7dveOcd3WVJbqsQ5IkjBKrcgiB6hqNlkgWABRyBOpCkNbbXbPvW752OyN/nQ8GuHIbXXADLGOs/fuZWaPGroCKAO3X3H3dFQzEFSMgiWDW1WS1PHsBEdfrtdggoQiQ3ybaGKVWm00+HSNiCFSWy9lsJnCMshQ9OCw3dVlWaZqGkLYblFViGRUTAEdWCgJfdYEFaQXQovS3VoClcMPUzjXJTFNX1cfIxKCuj88rgKsFQUQmlF4VXV/b7ToBgga8+pNeyVjAEX28gpJhI0GHdCWGLMu8i8vlcjAY7O/vLxaL+Xx+7/5dYb3Z39/lSL5xaqS2uwzSOe6ewBhCIE3CJSuszoJ/7rYNpmkKsdWbahonh0rT1FotbERCCqE1TiYTCbfLsizLUhCwAtNN0zTLU2ttXdfGCPwV5TQkPJfJYwBSCpVKkyTJityYxIhAggunp6fHx8fHx7eHg6FR2lotok91XU4mo+Vy7lw7DSxPQZ7nQuUYAhljd3d3m6Z1ikmSHB4enr548fjxB2+8/aZbOSLSRtVNzLPicr5IkjRJMmvteDxFxOne3uLyMnTCLXLCAuGWHupqtWICqb3LMhpjgElWWHihEVHqAZKy70ymXZdaD4fDljI6tnNEfXF1PB5XVdWX5WXPSK1OrrQXWfLeS1AiE5j9gy+ps9xoYxQzCysyIsrayt4rsjzG6HwtDT/nXFW3A9bQMchKzAdSbwNDFNMkqeuSmXcn08FgILFXjNEYnaagtfauUcAaryCuzG1zMMZosB+GUUREIUJHER8E+JOaJEmYG0gz3pl4751z3jcILZpXEg9mbuoyTVPyrt5svKtDXRmNsYnMqp9FZOYYObRQkEiatdYuhG98//n/5f/6n73x5tsfffTRD95/7+d//ue//OWvEwMCRIbPfPqtH/+Jn3h0Z6/FXmjwPhgNhJxkKTNHjUoZbWxejD5KXv7LX/31j7/7yXK1joSARisCpZfr1R/44ueLovC+kYQYW2UfRFAKFfGNbAe65bpZZeQtgQBmRoqw9fN+pPCGAe8ro20Wqm2fHXXvou6pDyF4lmY+oun0yuQ4JDZd1nOLosDAa1/IWiut0+kU33jz4a//2m8OhxQjl+UGAPJ88MEHH9y5d3c6fcDMEaL3vsfN8hbO6LVL0546sgQZTVNnWW6MmYzxs5/59D/9+V/c3993zmHn1/M8l8pVv4IoOZh0RLRCpVihUlY4NiAxWmMxSNI0d3V5fnmJwGmWNE1jtTLDrCyBmYM2FKIQR6xWKydMoZGVQYrILUTr9RNWN3LTHxaIve5X/Q5oDwM3de+vPgK2Ng1ejSJcvfoFl+ExmfGX90vM3jRNojBNskt/SQTGtOMxy+VadWORBwcHaWrn8/nR8YHQriFqWVsCZk8oOCzqTltqwm0NX2HLyI0R+obK1aW2O0HAbgwAIDxWwtnCzAqVpLgdRSgwAwMBKARAgPDKRtreY5KJ8hVGGxBFOUKcM8q6CW7EtxUnNRwWzHx2fhljNhwOHzx48OTxh2VZHh0cCmAqMVYUC7q5uJYEQ4DK2BWmutQzikUmImGTkBQqOl/XNWg1GAzEjssoi/e+qipBOwvFxGjUUjIlSfLi9PnJyYn0OOUEhsMhKqjWZV/2lGdBHIOkbT19TwhBeyM0I/J+EVEQGgrn3KDI14ulBvSRUmNNroINRVGcnp7iaOyqer1YjooBMzPxzs4uxAiw1soOh1lVVdraJDFHR/fPT07yPD2+dSRsl9bawWCQprl3UaRqARSHEGMMPmqjZHhdtnTajVR57xWCQlBaxeCNMQrR2NZNyvpI/C0IZKEGk+oOdzWwvb299Xq9WC03mw0iStm/v1ntViTCjgxEng6pEPS7yHTiwY1vFQ+ZRZjIM5O4f+qQ7dK8VxqIgmIQVFSa2TzPl6sFAsjskI+RGVErqdqBQmABqWmJrsgHpRGBB4Miy1Kt9WJ2kSbm7q3j9WqJiqvNCgC0QWYOwWvdspD2+RwiamWZucdFokKlVJIkMlwgzendvR3hQ/Xes49GIZCiEIPzaDC6JjKtLs+1AoyhKTdZkkZqlWEjU4zsIxMjoPYcSCsX3NMXF/+n//P/4eDw9n/5s3/nn/zCzz998txqEO/7p/70H/uxH/vcrVtHSqlYL+t6ba3VSjNEbTQRYjtBp7wnYm2tPTo+eO+DD27due0dOReAEFFtyuqNN964c++uYOalfqok/AYpuirm0FvC3urCVsmw96DUKQy2llf2BpLQcUhAA0KZd4VxaZErAO3gOLSCLrYzRK3ICzMIYhERGQgRkySR97de4BVEmHzxGgfMHQKoaZrEprdu3do/2N2sqyTJ9vf3Y+PKsry8vDw5Odnb2xkMBpEjXPes25/VW97+yCCLh9g2YKrKeyfUufcf3BWRcESUCBRbbLaVH8qf966IiEArZtYoinNtNpwkCUfa25l63zDHLEtWq2i0Go2HISqWkk4CzJEZldEhyGwsAjEapbSMmcuG/qFAu9e+bkQeryZw/de4JeXU/2Tbs8J1HcpuQ1yrrPZfyIONqInAmMQo62rvm3B0+3AwGCyXy/39fd1KTPLJycm9u/eZmvlsQUR5ns/nS2DlvS/retD2TkKMkUJUGhCRGKX3qwhk2DwCSxlcLlNJE1d2JBIgAYicFyDxFaD6leXajiTgKvIQ960QXpMEI/RsOO0I8tbWkqOB9Fa69UFQHIFDCNoYY9KiKAaDZr1e2rQYDwef/OQnnz17lt5NsyxJbZIkien0dDuPa5TSROA7vSMRNjbGrNdrRA/dEy7bVdyztVYn1ph2iE4ybGGTmM1mcnzpTG82q7JEpcytW7eIKM8HMcbFYiFvFi4z1en+dgvFAEkPzZPjMHP0QWVKHhnJ+c7Ozm7duvO9733PGJMPW0VSKUHv7u6en58PBoMsy0JwAHB5ebm/v98KCiEuFisiGk7yl2fnOzs7yqr9/X1UEKN3DkaDQZ7nlxez3V29u7svYz/WZgiCQVPr9bqXju8pmkMI6/VaUmGZhxb/Kj5PKVWWJSIKsbNS6uXLl5LDpWlal5VSSrBRQid5eno6HA6tNmVdiRChjFRh25Bu4dxifCXX7Pu+ki4LYhwRy7KU7UStTGEFACJ+JZQRbeXZKq21AgMdUpI5IippbL/zzoPVRiBm2vvoZQAUWCkMMWigGCkGZ60OTajr+vT08s/9uT+plPoH/+AfGwN/62/9rcOD/eXsAiIVebp0NXTiekQBEUNoC4RiIQWTJVvCao2mNYxpmnoXy7JEhNFgOJ2MmXk+n8slKIUx+roOidLeO41cpAlSdK42SjPIdBEwQ4zsAgWKQkbdkGaGF2cXP/2n//T33//g3/n3//enLy/zzDKAj/C3/sb/8NEb94+PDrRG55uq2mD0CjmGoBATa61JjTGqawqsNpXzVFfre/du/czP/Nm/83d/7tbxUZYlWZrkebpabf7gH/qi1jpSZI5iUrpQA5gZIt3Ij3o7sOVBr8YCmVlJF68j2IOW+ipuJ8e95+5GrttSk9Y6MvTGavuzZGYthoAAMURltLXWIEQiYECtsEu02vgA29TObNu+7ogy12jSNA0+Tqfjz3zmM1/+rd8uioyITF6s12vv45PHH+3uTu/evQsajDEMV67i+nJwZ0O5/xREFiyaMUrYDARBsLe39/nPf/7Xf/3X8zzvIZQdX8zVg6Q6Pn1ZBQUEUquhiFonqRkV+Wg8YIjj0QAAZBi/KGpmHuQFR6pdI96orGuttHAphxbl34bPgAxAQgByw03CVpzV/9/FH9fZifn/C5Krv983fMyrR2Zm6kSstk+pfxsRaVQcSQEaY+bzed/bOz09/cQnPoGIREBEgkEV8+e9F8xeCCF0xPRGqdC9LBttWtFC6Iso0H1PBlCoOEhw/EQkHHUA1L4FWbV/xQCma950ECTuMlaGNjMW2Bag7NE+1LhaH+6kNaQPfHMBNQDFrmnaAsuBkiSRPEy6d8Nh4ZzbbNYHe3tJkty5c6euy+l03AvHhhAEZBUCKdVilIjaaVHZnzZNeqciZyvPuXM+0SZJEkKQiR256slkUlWb9XotIWZVVZPJRMaQAMBaa6OR8rIMiYlnGg6HFFqwhZyeUkrrNrqXeRjotA6dc6ZpEHG1Wp2cnEhBWwZnjTGr5bKqqtFoJO5Zpo0FbUREIqslTe6iKJaXl8IZXG4qcZl37t8h72YXc5lYe/Lkyf7+vsK2FoqgnXN1VW02m+FwiFobkxDRfL4YDAtZmfV6XRRZjL6qNkme+KaVHRSnKDIJzDydTsWDqm4WCACkwSwpoLV2PB4XRXF5eSlZKTPPZrOdnZ0QQlVV0i2Wa1Ft+78lzZWFgm6wRwrFUinVSltrV6tVkrTzEfKkyLNgjNEG+za8Usomyns/KgYAal2Wd+7c+Zmf+Zn/3X/0n967d/tisWx8S8BCTBpVCFEnbQ0gBGdtC5V/6623zs7O/tSf+smf+smfKNLk9ORF4yql2r0UY1QaVMcmJLUr7krlMUbgdgjFGoOGRZYqSZLhCETnA0BlWba7g9L8dM4xgFTUd3d3QuNicE1dYgxW6UieYwdEIiaSP+JAHJlcNAB4dPvhv/i13/iv/t4/jgxpCvnA/s//F3/zi1/4vMIYvStX6xAJQixsGiFoQBeJImitQxOQEI1BrWOIe3t7T58/U9oQN5//wqc/8cm3/87f+YUffO97g0G+Mx3tHuzv7+8zM+BVVtr6FFTMSipf/Loa5LbR4A450X8NnbYgEfeLLH/Yt0tk5AygjZKNSbaPDABCkoiKgSDGGKKTCAkABPOPiJ3YK/eJ6Q0v+WoG3PEcUUBUMicwnU4PDw9PT0+ttS6CMN6dnp7KCHyWZa8txPev/oP7aipAK/dLFBBbmWvxqg8fPvzKV76yndDIiJExSQjysAQFKKS1iKiR278EZqWMVUWaDQaDzCYy0rdarYxJd6fjixlba/NiIlmLMjpJksistc2KvK5rF0KMZNOEmX2nO/baVx8BbbtSefVVr/ZXyL23uOFZtw/AW4kgdDH49sdhh+jro13eam6BmBJ3RctnrT0/Pz88PJzNZswso2KirC6HEsBLXdflpgK4sFm6rspiNKzrOkkSMkbqBDFGozT8EL43RlAAzKrrVSF3mi0MEbDDFyCQTAtey1YVcpR89l+91Nvr2dUI2la6eqXZwVv4tRuBjtYmSRLXTcvkeb6/b8LL8+VyOZ1OR6MRUByNRq5uWk+52bSOLbTADcRWekjW2TkXmYgoy7I0TYG4S2oTGf+IMRajYQfEaDNXGRXDrocnvkHKrZvNhhGyLKuqlshpOp2K2o9RWrgYdafPiFtgYLHyACCUyFLjFZN9dnYmNXBr7bNnzz72mU+E6LVRqUqCj977yWTS7y7pOneOPDSNn4yHMUZU6vDw8L333jOp0cA20cwk6XWMUWtdVdVkd2+13GhtQ2jDuNViMRqNmqba2dkBbHdvCCHLMpEOTPIsekfBByaNygcnrdk+GwYAoZTpe67y1MtT8OzZs/F4vL+/f3Z2lqbp4eFhkiRnZ2fC7y2NZOzqQ/3DQkQy6CK+HDqx4a4+FIno7Ox0Op3KYva+XNjKANu2sbzT1dFaO5s5YwwrTQ188YtfONgfP3/+fLp/AMoJQN173zQ+z7Pg2+EWIjLaxBj39yff+MY3/uSf/Ol33313vVo8ffxhCMGgqlvhrNiiq3TbY4IODiYmvmma1CZKKWw5OgwACGFPkiRCy9M0fjQaSRIvebx464P9fYOwbkrfuFQrF2MIPksTwZ8TERETcWQKRD5SDKSz6WKxeP7hhyIn9Mf++Bd/6qf++Gc+9cnEIkdvtd0sHAIr1MCEoJW1McbUWGZUoEEjMxhlmCjLsrpc7+/veorz1fLBw/t/9s/9uR/84PL3v/Xto+HwydNnDx78wSRJlAHXBOywygwRQXeKBuo1ooNdC6q3GL2pRESKBB3CtLeitCU53xtlbvtcVxrkzIw9M4HCrcpujLEFrmJXsm0HaIkRFW1hXLbDAhQu6CtrCiAtSQSwqJxzEDlEt7s7vXP36Nnzx4NhGryKDajE1iH+3je/c7B/S2ubppY5RgpJmroYlEKl1GZdpmnK1BacAVipK1iWkJnJ8GjslIuUwfEkf+fjb3z9618fDEbIqtqUWZYBiZBcKzjoyQMicaBIrGKWZaiQGRONibYcKTqXaDyYjGezWW61VmCSdDQYKGO0Joplkau6rnJtNhjzNB1mqR8Nzy9neTHcVOVwOCpMslqtHMcGyJjEew+gEZGYrdUhBNWFkCEEZkzTtGk8IqKMr6BsAGlCXvPW2HdzQZQnr+okvZXvvXX/5v7Pow+olVjhPgqWNwSKSZY23g2LQYzx2bNnt27dMlbXTeW931Q1Ac4WS2MSZdLlpsyyzGbpR8+evvHGG5nNUpMrNrWrq6ZJoqCWWuSI9zFJEiBUqoVECileJIrgmBlJPKlsWYUsmkURwDCzgh5KBmJ+iBmQ0CgWQUMEifw6/9265Qh0IxBRSkn9WZ4w6DZ0v0S9SoQ2PTUHKNSKFQdWhJow+pCnWVR4Pr+4dbh7enpqNKfWhADOOWF5DYEYtUmy5XKeZZnW7Qi/WGH5rKbxWmNwsShSCq16eYyxKAxiJv7Pe5emCYAWam4AsDYtimFVNXXtqqqZTrUQ8AKonZ095xxHCMGbTAfnow9G6cY7tDYfFM45oxVT1EYrrUFpIgZQ3sc0TbMsaZrKGBN9MMZkSVoMcpsY7/1qtciK1CQamHYOD777ne/cv3+fG4eJMXk6n8/TNFVyklUteJ8mlDrREZt0kDrnZpdnh7sjt1ocHh6WZTlfLr33t+7emc1mLf4oNMpyuV7funUkZJ8HBwdKKVgtnK8ROUksEI1Goxi5KIYIajKcnFcngpNLErPZxGpTxhiZqNpUSmGWZb5xvnHj0Wi2uEzTdDabjcfjNE+YeWfv9gcffPBwOBhNxhKbS1l7tVxXZQ0AWpl0YCXgdq4hClprwdykacocm8alaUrEWZaE4NI0Dc5rjUdHR+J3xXAbqxh0iG5TksxAS9S1XC4pQpqnq9UqYTAGqrqaTsb/4X/w7/4v/1f/Tr1aWGMDEVIcZHnVlM45rSwAhRiNTQmJUSmjf/mX/wUiI9B0NI4+aK0j6wimcrWKrBgUAxAhamY0xjTB195prSVcixDIBW0QHCm0xhjFZK2unc8G6dTszudLrBtQOBoNNAdD1Xx29vDBvRBWy7MSmQ0H7wISoUbvvZgsT+SJQ0TWifN+1XibJh++9x4R/I/+6l9cb1Z/9I/84c9+5lMAZI1ZzuYhBIKQqyTaxDmnDDpXR+10qryL2liZ+k4S7bi0RtfkTJYwJhmm1fPyc+/+RNyMfuNLv8lARCFGeOvR7TxB750FFVgBM7CKCIBC0U9aaxMVMzMxIffhrHizXnFHI6BWgYkoIkVBFyGicHGIkWCGlhmVaLuwHxQaqyExESAyKESFioEVMqGwzwZuQ3NixtKFPM9tMgokY2+WiKC1m4CAgJ2FV4jYaRy+GkFIVaqHUN+/f3+xWDx9+nQw2MnznIiWy+V8Pn/y5Mlo/HHvwRglCBHq0OH91EHv+fvI4nVRS/ueQZF98t13v/2tb2VJWlUNIlaN64HKfeDQH4eZBWJnUUm1pK8ODYb5bDG31mpjQ4zCJCDXLl0+ijDMiwhgrCqKIt2USimCfDAosKx8lsUYU6UBFKHywQu4NLT8JphmNkaPqBDBOSfD2gKH6SMdWd7IhIg9nmt7BW5keDd+jluveF0Cc9s3S4Cc5/mTJ0/yPJ/cur3ZbDblRtCeWuvp7q5rA+oWTHR2dnZ8fGxtKnq0qkPrOefqynBCzrm9nWmMEYi1QSJSgC3sqe1HIm61sZmZkTsG6WulY2pFSNrrfO3d78lPbqyD2uI67fatQkTZZn3IKb+iDuS/vbA3Ahoi4i6XEqDT3t5eWZZZWrT9HlR6S8VIkk75IFlM2XJ5nitlBG8lbYvYUYRKYsrcEkwKukqGU+X9y+Uyz/OqqqTaLOcpCW6vNg/dwH6e55JnQ9c5U1sEnMa275RMMca4Xq93d/fb9oG16/U6S3ORAXjy5Ek+myulRoOhEJMtl0upn0s6ldpkfjnb399rmqau6+Ccc36xWAiSq1ytRYhX0GRS0m+zf4ByvWHm3b2p1vru3btSAFgul87VSWq8b6IHKarLWpWbtSTf8u1gMMjzXODWy+VyOBwKh2I3rxWsTb33wvQUhFUZYDweS5tZYMyS6DPQer3e39+fzWbGptL3lVsjyZ94azltmUSSDl+fDMnXPVeBMKX0QFmpeAtuC0Gfn5/LXDW00oH0qU996j/5j/+jf/d/+x8MhyMfKAKv16s8zwJFYVuSQCHNrGS3u7vTf/bPfvXD9z/4a3/l38rzVACAANAXtHirKykrJjwq8kpNapQOIdgkres6SdMkSWrvBBDTNM1oUCxjKNerncmkKZej0Wg8yhFhs1oJvS93aQADERNE8i4oY4N3AXSIvnYeUc/nS0/wd/+b//yNB/fXm6USZU7X1DXZQlMTys1Ga12F0qa6LCtWMRI2VW1MotAQIyJGGaDXCTObpNiUze/+ztc3pb/74P6/+Bf/8gc/+N5bbz56efJsMFDHx8cA7XPQ18kEgdv7ixvls956yAPfPlx80+QyM0Ps1ZDkT2krR+rfbIzR2iqlcEs3AbuBXaKglBIYslgGC5ilxXZeRERK6R+G1r5WHd2+Bt0xOsnGzfP86OhIOgre+93dXUFLnZyc9NtUdckHEQUfJQPoV6o/+I2XnIBGQCaOoWmaw8N9Wfo8z01imZkIQiDnggxgGVSKQYsMI0H0oschr8hA3jfaYJqmwtndd4CkGmy1LrJsOp1OJpO9vT2DKBMI4+FgOBzmWaINpmkyHgzGg4HR2miUsp5SQBSyLBNRZLl5SrdmRameS+v6JeOV8+iXmrZyX8Rri9Pvku3lusII4DWnorqXTLAopURDZrlcTsYTsfjr9XoymVxeXkojUE5bLF3vAIQbYT6fbzab1WrVUiSGAACoVQzbW1MpZRA1XvHC46vnDwBbshZI1x0hwM3u+Gu9L27BoOT2CbqnL952s7Cm+3cTKN4eQbfGS0pDEs0YY8bjsRAeCYPjcDgUSZm+iithX/+UiuGOMVZVJbo64lzFrG8XaaRmJYGjiOGI3RdXJ/kTdCAggebKJ4pXFsLIPM/X67XAtWLHLB06JcG+BCK46MFgoDri5RCc/Lkk6yGEJ0+eyBTy048+KrLMKAUA5XojHvdw/8A3bn45k8pqnueptRzjzs7OZlMlSSZJpE7s+fk5EWXjcYzx6Ojo4uVZWZaypHmejwbDoihm8wu5rqapvG/kbMWWee/lkRQqShlAEqSVnPx6ve7763KxMsUu7DHCG8WMo1E7uCWXL4HLZrMRmBsA+OA25VobtVgsxJ+FEOSw8kHiUyVI6kk5ZHLJd6JV0mKTjrJEQi1yWynq0BKxmxKWCofgA5bzBRH92I/92Hw+z7IEAJLEuk6fToB1o2GRp1kIYbNZN01zfLzzg/ee/OzP/uyzZ8+sSUxilTKyMfpnQcRm+g0pJkhssgQlRJTlgxijCH+Nx+OiyJIksUblWbK/O/VNfbi3X+SpQlzO5jFGoACRiAIzE3bWO5ILsfER0CKYzbpp6uA8XczcT/7rf2g4nax9oxIblbZ5Bkbp1AZkD5CORhHVaGeqktTkqUpTgoQh0Tr3gevGodKAhlh7wiQfPnt++g/+4T/+z/6P/8Uf+aM/bhL7j//pP7HWVlW12bif/umfHg6HssidGbmCXm4vyw0bAtfcJDBfUxhs7Zhqr7T7+dVvb5ggYxKtdWesrsyLfCMEz3ILuGsR9jgGvN6HvnFk5ghA11Tit9/Rd02yLGuCB4CdnZ3j4+PLi6XoDVhrsyx//vz5k4+eJW89ShIjZDHCjBijqA+prc+72atTV5PULUM0EWmlAeDdd9/9tX/5G0mS9V0cQUgpogylHB+ZeyHC3sNda5dKdBkoWrFKPmhttGZrW7EUbdRkMhFzyUpF8sFT01QU/M50UrsmSQ2vy82myrNEKVVualSYZ4n3XhdZjNEY7Zz3oTE6kYKt0KNf7QOFyG0EwAoFMdTfDL5ySCQl7mt/261J/5Pt3dZvMnktl0vJRXp0lTBWLv1mOB4bY4QhSGhyEVFwWMJGVBRFUeS1c3VdWyvThIyIi8VyOh55HzmGNKbKtuC93ooBQN8JISJgxUg/hJHl2ohSf99/2PbYXoHW9eI1F97faEmdsasEbNcJrr+z/VZr3THpQJZl+/tWHHPwJOvGzCKzKtxVYoulDyS/HY/H4kTrupbYS6BPYt878tR2HkYsuFRHBccroYA0ibMskwybmdM0FYsvwj4iYi9uwHsv0AQJaqWTKgGQNP6FUjHP06ra9BGblNGm02m5qV6+fJkPivv37z97+sQ3bnd39/nz53lWNHUzGY5kgjxJkvViebi3rwAkRGDmhw8fXlxcnJ+fj8fjpmmE3jKUpTiqnZ0diQwkKZTudYwxku/vCEPMssxV5Wq1SlOLiFW1ljl1MVsXFxfHx8fC/iEbSSiOY4wXFxd379598eJFmmebzUaGjJ1zQrUhpFqr1Wq5XEoqPxgM5B7JYkoHV7JbIRqULwTbJfGorKf4s6qqhB5TeuqCh3C+ZmYZA+Ut9aQu6cH+a2qx083jDz/6pV/6xfe+9/2jo6PVep0aDS1/IfbmWM4HEVNrxdgeH+28ePHia1/72qfe/aRzrnYNgJL6fGsxkABU7MAZs9lsOCxQschhybXU3iEq8l6wWs43o2FeA2imi3IzLNLVctnU5eXZS+IAHQRSAUdmiTMjcSTWNnWOXCQCDoSbym/qZlikv/DPfnO9/o9v374Vox8Ni8EgHxa5UtooLc+RVVruYFWXw+FQI65K76IaFgNlzLp01tpiOFBK1w6//OXfOz2b/8hnP1E37ktf+fJ//d/818LSCgDvvPWm3BcOIjbfd5Su4MTMLM83MysGuqKaAKSWSY9ksWIkIoYIzKgYAPuubas6uO2e29vURf/bk6LdwCMRAQsWvR2glwNp07Lj9cZHKdWfWPdD7gCt/EPmgAEkVO9ngRBxPB4/evToyUe/URRDIkqSNM/z995777333rt3/440UZQ1wAyMSmGMUWvbpvvXYWnIxMywVUnGDvqfGNP48OjBw2/9/nfOzi4AEJQJ3ksSrBR5ra3Aawk6B8eIul0qrZUCVByj564dq7VmREMSSQWZfLdWUmhMUlOVzd7eHnnfhODDwDm3uzudz+d5mhSjqfC7AqgiS2SXKyatFVGUjKquGyKwRik0Pl6NS4kh7F3FdgjU34wt/yr0xVcE3zf8zY0bdGO79K5Xqm0hhKZpFotFmimZ/b0CbigVYtTGrDcbIprNZoPBYDQaJklycnJSZGY6nZZlmVh7fn6+O52EEIoiY+YYGC2zkvJvNxDFvSBa25AliAJHEu/4Ci/Y1k7Y2gCvvqe9TLrypjcCFEQNwIgE1731aw/Yv7TWaDS0sCyd54n40egajiQgIOZYlmvvG2u1gJLko2U7yUpaa50Lkr3RdUwHAAjATWhg5VbKfZEsULLM9XottluyMWut+Ly6rvsgSaRERqNRWVf9Lup3lPc+T9twWzI86pSJldKIKJQag2J4dHQkT7So9R0eHkYfGq6L4cB7X603QOQaNx6Py7L0PpnNZg8fPpxdXM5ms/V6eXBwsHd4ePHyJfkgrJxKqXQ4HAM8e/bs0RsPy7LEdmDBXF6uhVtbarNJatbrpdVChwtSEN7fP1ytVj44gUmOx2OOpJQRwJp47t6/yhUZY6qqypJUyJum0+nF2flgMHB1Y7Upsvzy8vLlyelkMkmsTYxdrVZgLTJQiI4bClFrHX2oNqXUnxtXK0CjtEZFIcYYkSHSFehJggy5+977xWIht6NVv8iyEII2arVaSxIsVaUQ3Gw2WywWSgMzZWkaiVwM3vsksc5509loo1BdDckwcZzs7vzar311NBrtTKZKKZNY2ED3hsgd6IGZyYfFYnF4uG80QpLWdSux3ESa7oxVVN77ED3EYJRSSBpJcyyr8sXzj1xVE5FWKgTPMkjI7UcIVolBOUeECEotF+uXF0uTZUmSMurJcPKbv/41Y74RogMZzAUZBVTIQF2YPR1PrbWHh/vjkZW4anc6KYoCkMejyXR3J0ky1Pbb3/+wqd0X/uAX/+Vvfulv/9//C23MZDw8Pb34sc+9WxTFYjmbTqflehFCkGf2VRsIrxjGG3aGe7AqEpOYC+yWNPS573aeo5TCjurEmlRr8RQaOnYBbgErLFEOUQtIVFprayKTUgpbGiJW2rLzEthhm3NeAb7Mq2e8/bWEe2iE9hoODg4ePnz4+PETQSIAwHA4fHby4gfff/9Hf+yzoA0zM3FvjLb/V1sk0b2BfDUtFotmjPrYW2+/fHlubBpcIALU4j67sAKuhKGgs85ad3AfJbMfSZYneZ5aa13ljLHOOcAoD0CMEbEllDfGMESb6DRNNcCq3GjFWvFoNIqcNJvNs2fPkzw72N/13jdNEyMORsOyXHvvh6NhkWXz2SIw1U1prJWT6aOq7SulK8rOdt/GTlVCXIhS0Dcyu/rqTQq6Gz6mu/Zuup9osVjInlgul2PMjTHCpibLJT4AANbr9c50ulg0q9Uqy7LRaKSU2myqvb2DGAMmyWKxjgwiqYSJCJmliC0XsXwibLnGdq9HCdX77aS36EeuzwuBlnfy9b5++x4Z+INrV7r1tb4Rmrwao2y/lEYZh1NKGdvSjCulbJJkSRpCsCqRKoJgYiW0ksqeXEg7dmJtH8rked7i8kOQunGPUe/upuo9t/yJHFP6OxI2yRt6gQ3xRr2PF8oIIgLiJE+hGzKWWyBZoEzEKgVyGkopgU8zo6jTF/ng8PDww48eX15eHu4feO9d3SwWi92dPUmApBPx5MmTg4OD+XwuSW1ZlvPlYrprJFw4ff48s0mM8fHjx5IOfv/bv//2u++G6N977z3RbiKi0WS6uzt1LpTVWhusqs2Lk/kwz/IiC8Gt10uhBHn+/KmUmmWdvfdWGykMmE5pijqtX0QkYGPM7u7ucr6QXq9oa45GI4EoywmIpxQ6676xLT+UtZJ9K9/2YHIpDkuE1H90GyqR55aRo31tmfUgzSPpHcROzoEjPXv+tFxvZMgCgLXWhTFGlHe6UjxR1B2xQ5IkGkFCqOPjnW9+85s/+qM/6l3ANt8N1IF4EUiBEkb9EEK53ihkq42sJACAwhgYFRDHRCtQmoIbZmkD/MFi9tEHH27WS2BWHXkLbVE9BxLOWWRENLqpmrwY372/7+DZYr0OBMogU/Pxd97a352mqc2y5Pzs5Wq1evny5WK+BoAEVWBSAMvlHADml2eRwRrwr8yUiEXY2dltvH968nNEQVsDQHmerRarj7/ztrE6SYZltWmfF+5Jaq+Moer6wdglwdLv0x0DgBDVgdScuwGN7npjb6L7HtN2nK2U0tYkSQZd2n3FIwmsACMAd40ARCQCmbO4cZJEgSj0lFid/WpFIAyLjtx1Gye2SXohSinoHOp4PL5///77778PkEm3Q8YnXrw8vZzPR+NBe3rtfLpmGdLtLOtrTeS2wWJmjWCUDt7dun20t7d3cTlr6zCxzf7bNZL/ARGUVt3VIhFBCE5pwxxBoUiLt+sOEQC0Aq1gWAz6VebokCm4RmttjLZej1XB0Q+LbDws1hXlWTLI8zTPpqNRVVUaoBhMhuPxpswuLy+tUeL2XOO98VHGh7fkCpiZOtT8FkbpqrKE1+dcmVsys35z9D3sbde7vW6SRuzu7kp5TWplAGCMqap6sVhKtO69x05RWDqX/bCs975pHDOs1+uW9yfj2WzmvW98kGoeBa+1RrQh+h6gd+OssFW9BdWJKMglwhZPVn+x3XpcRX5XO4Sv+sfbW6VzaZpfV0WArR/eeGmtgbGv3ELLaKM1KjHTRqM0yOVb1VH2iy3u/6QdBtWaiIxp59SFMRgA+uZl7+D74RY5N1l5uaei+dqPADBzjF5rXZZlT08hHX0pqybGNr5V7uu3ltA/yYlJ5bavhJdlLf74B+99P8+Ksq6qqhrm+eXlpaShbe+Q4mKx2N/fjzG+//77R0dHQtS8WCxms9l0d08ptVwuxUlPp9Nnz54Vg3wyHp2fNfOzM9FJm81maZoWRSGjt3t7eyLikue5Bh6Px42ry3KNiMfHx7PZ7OTkZDAYlWWZJe2sc57nClTfDy7LMssykRGU+rOcUtNIH1fLJSyXS4FGy7pJAi1Ttk1TaY1ya/oevCyaYLWkni/tA6lzwJbukOyi0I0jyi2T4yBi01RVtclsomwiaUOaWkQuy9K72DTNbHaR5kVZ1kmSAkDjXZsnMShUrDTatlsh+QYQtyhRo+raffOb33z77bclu+p3dW8k5YEFgKZpDo/2RZILEXVudZKaxFoDCFaRNwiJtUDxm7/3/nd///cQkb2LMXrnjNXIDAwRKHaZArW6AMjMkeDO/Xs2yb/2rW9VVZjsTgD1cn55tPfgL//lP5dlCZP0GbmRvkPkxXwVQnj58mXjKlHImM1DWa0Xi9mmXIpcZAgACEbBaLxbu8Cs6k0VyKep1YmOy/LgYHzv3h3naqtaAyhhKyBu9zTF+25bHhCIFiMjq57jiBiQOi6K3gRR73QBiSEyRInpW8eiEJTI4EL7oS2fFcunCD2w940cR6YnBLMph6B+OCWEHlDFbR/hylgZABDqrG0Vo94lSGDedzhijAcHB/fu3ZvNFqPR0BhzcXERYzw/P3/58mWeP9CmFVeMMaapAVAcw7bN3fYc+EomBy0TOqRpOkb11ltvXX7lt41VUqOLMTJgjJGUFB4UYLRKb+1LIOYQyCa68eKKjAKMHBKjahcTa5PUxhhRo9TrtVbeY88xFrzzTS0pSJINlVIKYl1u7t29XbsGgUbDYrNeHh7eTZJkMsqHebapK0QNxPOwNDp1JE0HjgAycUURWRqTvO01SRB8fQbfo41kfyjVSkTIjewxRzf8CkoVnkSdW/WdPwGYWGsbV5+fn7/11ltN09RNyz0EHYrHex8JmLmuaxmd1NqWZZnnedP405fn63VJBOvVJs9zH2JC0bARTNaVL2RpaVxlw22HmOn6yepuNGtrC4IGvBJ7afcDvyYi3PqJlrZK76G3Pe62ndp+A2I7s9+7RtWBuXqnKAxQ4hFNpz0n7UB5p9hfKS8z83ispWUrFlDavfL+Pl0WAyrGvXfnPaS/LMvpdJqmqQjmhBAmkx3p+wpNlex8wRtrrWMZ2LTUV7LI4r28b3Qnk7e3t9c0zXq9LorhbDabTCbj8Vgr44IvyzJPkocPH6426+nuDjA23onYaD4c5MOB2PFVuVbGpHl+fPu2jP3IZGo2HhSDfLGcK8AQwt7eHhAnxjLzZDJCxKLIAvFsdnnn7t2Tk5MsK/amOzQeEceqLpMkidGfnr4oiuHezu7FxcxaO8h3ZTWKohDyegGfS7N8s9nIBcqqeu/H4zEiLpfL8Xg8n8/Ozs4ODg4Qcb1eS/08hLCzM9lsgvSDh8Ph5eWlbGmJOfqlkzsiiXiPVpNgSO4acSDqlFo6sBuKpmxEIto4nyQREVNrhY/FGLVer4dFbozxjUvTlAldDBrVcDiM0RtliCjLEiJqQyWtfeN6w+29z7NssVg9e/Zif393yzh00XxXG0CGvmDQb3hpbUamPDWRIE2MQv793/vmz/+jfzSdTuezhVx4DyaIPjAgMRMjgQJEZgF5qeM7t533AeDP//n/3tls/o//6S9lmeYIn3jnPtDGQFQGlA6jwVBN8xACooa7e0qZGN8OIUj2eXpJztXaKOfKsizrul4uS9fE1cZ99zvvzRcbZY0ymGR2NrtYr5fz2eYP/6EvTEZDYB+8x44SR7UTHK/vLt1wJb3FkLXijvmHO4ra/iUCbF0i1FkP4Y8HrZWVjQDcpcVtpochRKVbrm9jTF27LEuGw2HsKAqkoIKI0QeTtRPn0JHW9ybLXJWLpVnL13IRyRj6SI2ZR+PBo0ePTk+/JLZJsP5lWX3zm9+6ffu2Nm3u1TPXKLjiEFdb2rd0HTDMHfSamJij8w0zPnx4/8mzp4tV6Zrgl0vXNEmSrNfOjIfGJiFcMarLfqLAWaKVbstBQkN/997tb3/nB4nNE6MQAIk1oELFtp00EDVNRKwqxxCHo8JYpU1irWFmq3E8LCY7k/V6PRqNzs/P93amk9Ewz/P1ZnXn9q2L2eVmXea7U46xLMska9t+67JEVFVVaQUk089dVtdb5L5HztdZNeRy+gJ7X4Dq91Z/j2KMgjUQcR7oaDt7X96TiBljxAz1+ZaMmsh4jPd+MBhMJpP55QURbDaVMSZN04vzWTHIWOuqcQq4aXxqEwBgwi41JyR5AIQ8JF75P6UVXLMa3aPSPgxdBPpqBvz6SknvXElS7O5P1Bajp8Qo4iklgO2XV7DNtEUjzJ2os+SUtCVOR1ukY9JYDVvcLPJOiU2FvMJ0ZML9TZQagxhWOY4IDcnTZK3t1ACdMUprK9PAUksUCaPeVYhIgFQ4jTHIIGoisv8BSJhJ0jQVmgUJv4xRSSfUvVwux9PJcDhkiqBwPB674IExSZIkz1arlQ2peHrn3HA4bJpmOp3+1m/91p3j24k2Njcffvjhyakej8fIEGIQzPzx8XHTVNxZrhCCtuloNPro8WMkRora6npVLlcLuZaqqoaDwXg4KtcbYWEz5lAmmsqyRFZSVxN3KyM08r8My0nxxjkn1BxyRwRv6JybTqcChdtsNsJZIbhlRBRabHm/rLC4LmlP5nkui7+9bSQ+k/JGO3Ek3jFJtyVVYksdHOUIJk3SNB0Oh8UgXy3XzIoYrNXOOQUMqBBYKSUzG6bjt9/eNvJZAHB6elqW5WQy2v4tbpEMy/mEEMbTVt7RWhtbVkDerNZ5ql0dv/vtb/13//DvH+7tV5uSpcQMSimFADFGiiwMjUQQGRgxIjJAVhSBYuCAEWxqHjy882/8+T8xHo9vTbOjo6NyufJVWRTZYDAoV5skSZiQOQAorQkEA0UASmWp2CUyOhmPM9eE+/fS4KGq+OJ8tlyu7966nQ2yYpgnySf+/j/8bxHgjTceed8YjQwUg+sDWdzCfhpU3CGZt+Pvzn+xdBNCCJIYSH1dAwYiafwRtSz3sePA6iyMkDS07b8oRmYLriL/IWII7SCMjyFJksFggKhJZq06IQPuZEkRRbtuKz0gZt4q9LUv9UOLePIKIRwc7u3v70sPRuCa0+l0vV5/8MEHRCTlcnGBEgT9K143LGxvqZk5NbrI07u37+xOxnfu3tIG5SFE5hBCXddZVmyLFbXPA7BwONd1fXJycn5+bozRCEYrrTGSD52Om0El40wCHKdu0FPoGwURY4wpy83OznQyGkxGg+ib0SDfP9hVCNboYZGnqT3Y2x8UuUY43N8bFvm9O7fv3DpOjD7c3RkNiiJLjWmnvrfmvqFzJDd7I1tr0tE6XidU6/+XH4q+lQRDYnRiN/LYuwoAWC7XNslkTkMeezHlMuICqC8vW5JYaaEhovcxRp4vF1Uts2exrpqqqmrnJXuOoSuGKySE2AanV11PRESlsWvV49brmkTjdr4LGl+nTnE9VrtSDdsuG2A7KIkCRO8/cfsIsFUYR6lFd6NN7UdAFCJruVnMHMKVflk/DAodkFVCYFk3uRFaX8Ho5KNVi9WnvpDTD+kWRWY6NUzJ/ABA4iFJAeWY/WCopGh5nhdZrkAoFb1YKDkNAd8xc9LJ9BKRRFcSmqRpWtaVSaxkjSZN5LHKirwYDr793e8Q8MGtW8ropmmaphkMi9n8UoDfm9XaCpuYc0RUFJmw8cUYd3Z2pLm7nM8VQPTuwYMHq9Xqg/feE3R0XZdZlo2GQ6XUycmJOMXZbJYk2XA43myqsqwByFqtNXrfbDabzWYj3rTvBcjmnM/niOhcc3h4mGWZQJqFoB4AlsulUkbaBKFj1LKtmnKb4MrGkEep3cwxyiMj9FjMrDSIneGu3ouIEKlpWpGl7T0gTWXnnKvqJDH37t/Z391DRKO0MUYKYNRShRMAqH7m4eqJ6HZpNxoqJyzBhDzy2xayqVqKzYuLC6lQxhibponBVZs1x5BabTR+4+tf+7t/57/aGU/Wy1VLp88tEydFpsjEmgl9oECAaCMjMRibglZR2szl+uLypW/Knelof39nZ3roHYWICpPNplmv60DKO/aemTWCpagia0YbGV0TtI51tWzqleKGXKUwlptlDPV4lO5OR++//72Xp8+eP/1ovVp4V3/m05++c3c0yFPJO+TpphaA1nlA3n6iCV8Zurjhv/pFY46oWnq1/vZR97p6tBUi6CRJimJobaqUUqj11rAlIiAycYt5jDESARqN2nYkU1uFcb55bv3XhEAIV7HVlelHfvXd/StGn2XZm2++KfHFer0GgCRNTWI//PDD1WpV17Vs0HZnMxCgcFj/K3zw9rfMKMmEMeb27ePJeLS3M/6RT32y2qxSq5UC55y2Zl1ulNG8ZVJluoSIIkHTeJGZY45vvPmQyBkNiVW7051WPbtLyDRgL2Nutij45YFXyKNhoRUeHOxba5LETobDLDEhOGNVltrpeLQzGQt7eFFkuTW3jg6OD/d3p+Pjo73d6cRqHBaFoLMRCIEUolbKCGL7ldfWkxb6cAQ6JGTs2Fq2/0RidpmZa8cNsZ3iB1CIerFciqmS5iJ3wXuPVTm/uGRGra1CPZ8t6qqReuZisaqqqm58Xde1d96FGGOgKL7k1S0FXYn4ygWCEqf8qhvu/5YJt13vtV/dDFlkNa6BJq6ix46poGcL6X9F13EWN/5WbT1g/SdG8tSRuki6JpmsZNL9cWJHL6w6xnbZPGLu+1smX/RVjSRJAKhpGhl2ktMT9xk6tkvvvTxfEiBKTL1t/TW23Rn5OMEZyXQZM1dVxVtw+hjjYDAwiZWfhxD2Dg/kbahbgcUHDx7M53MKfm9nV4q6QFEQEkBxb2+HOSoFiIzYti0EjLbZbGaz2Ww2K8tyb28vhABJUte1MFFLtdMonWXZs2fPdnZ2AEDIpaVK3IYmTD3TQJ6nWZbleU7AgkCW007TZDAorDUS4ud5LkAz4V6Vju9ms8myQpCr/VB73xSQTPfGVuStTChJEqXbIoesz2azKcvS14242/7p6wPi9qEDalxNPoxGox/90c+en10oBeV6ZYwBIo2oTTuFIh+67YNvPEqqE96QaKzfuu1GJQapLyrVVC3pqRS0OHgDYBGA+Cu/9eVf/uVfvn37toQIRC3cJ3Yd0AhMBBHa9JcYGFDZRNuUSWSqEBHLzWq1WoTgNuv5pt64GMqmrppaae2jK+uNpxghEgAhsWIAYiRGYKRUk+YQ6o3VSiNoZIhOMTH5nelwWKTL+fl7P/jON7/+tcePHwPx5z73I+PxUCspskXdvaDzvtteo/NU14xhf0Ov7BIF5qswX7Y6X2dMkrqCSRL5LCHfqFwDXUpxw2chcXQNUpsGyBAKM4uGLLKSL7atOrCScF++kH+qDwGuuUC4ntfDtQMRhTt3b+3v78rmq5tGItMXpyez2Uz4KMSf2U6PCQAIUDxx5Nd44quzVJpR6A89cxwNi9E41wj37t75/Od/dDavJRx2LiRJJhPJ1ErBAzNShEAQY8s1AwDVpjzY293dmbimUhgFagjEVrf6YogtNkcKHT14Ukxhkpg8z7VGo9Eak+d5lifMzNELRZwPTTHI8jx1TT0aDsajIk/M0f5ultjbR4cP7t2ZTkbRuzy1qW2tczcwfa2neyNq6+5I7JxxmxBfu6Pq6iAyOCh2VopsumOEwI5tX8wubuXTvVIeIvRgH/HQddV4F6qqipGlp9g03pOUaqEde70u8QFtlR2lgd07XYVaUuHtH/b258aTc+O1/Su4FqhdA5n3v+2/kMeyP4LYStVNLOIrTaOtE8M+cRQfzMy+aThGuX09NFcgP77jvepvUG8yiEhAbWVZSswnPU7xwtsdGbH+kgQLDFsSOxHNFSctHyH3t8+kJfaStrRgiwS9JW8W8gpmrptK6h/S7zw/Pxe0lDCEK0DpTSBDlqSubpxzt27d2tnZQcT9/f3Hjx9fXl7GGIPzTVUnxkowJxGD7qZsV6vVZDIS8mpwTkoyOknSNL1z6+5ms7k4nw2KESI2jd/d3d3d3TXG7O/vSz2fmUWZeLlcGmPquhR9X631ZrNZr9dFPhBt8r7MIy7q5cuXh4eHsnrT6XQ+n0svf7sTL1msrLnpqLwlnOpvPQAQB5u0/f4eAeCc87VUBCpJPEJHyXIV3gERUV3XZVki8YMHD376p3/q6dPnRVEE10jIAvGaXb3+OHQ/p5ZvC7dkicUFtO/vJkHkjovtveoVeg/BI9Nvf+W3fukXfzG1SVNW6/VaKQPSJFJaKQOgPHEIgRCUMgQKGAkUo9XKRgZQWh5z+YiqXLu63KxWl/NLRo4c1tWaFHny6/WyrFeoKXDjqSb0rAKxA3TGAiLneYrMGjHLMmTI0tQarMvlm2/c+9f+yBfOL16Ox4O6Kj/64MN6U77z9ptpZsU+9Y/zVSgPAAACAgd6DbdUG8p0iSVyz7YRO0PFMbbqv0xb6NGu5gzcMvaAMsAKWt68/kXIwpB7pRKbZVma5H1adMOpvXp6W6+t5tm/8n1bf6HbYOETn/iEMXo8mciwo8BYnjx7GjoZTmNfz78FAL0bpq3G540zCdEbhdbqw/09a1TjNvfu3npwb3e58kqpxvvZYuWCrCz1/FAi4suE3kUiUIBlWV5cXDx6cG+Q29DUrmN1FyYE2f19GZC3hn/kogaDgbXaWk1EWZYkVndSEMoYxRx9XSmGxBir8WB3b2cyAgrDIptOxqkxh/t7bz58MBjmqU20RqtVb5pf9R/b67C1MtSnwt1NuzLcttOHlxczSw3NGBOZtNY2SaiTWusHMJTRQigtoNP5fD4cDlfr9XyxcDHYLCUE0KoJfr3alGXpfdyUdVVVdV27xm8XY5mwH9Xtr+WGZ+0uDGW0vbc7r/W4N3+uZHD+RhVaUsB2QmPbPdNVp7l9G26lFzeO37eUuCtob6Od20PJI9u1oLAbwJUvhAaOOrl1cY3yAMvBBZ+V57npRIUlIevhvtILEF9O3dST/FBamLDV2O46jq1hEqkf7IrPsSMhkd2iulExGctBRHFLSimh5i9Xa8FYFUXx8uSUQmya5uLiIhsOL8/PX56cnL548dHjD5aL2fNnT7LU1tVGCC6kCNwScyIwc5Jko9FkMJqs12WapsvlUgQPmLnebGR59/b2Hj165L1//PixiDEMB6O+n9LveWm7zmYzieDlb2/dujUattNZfT12d3cXESX5lhU4OztDxKZpTk5OxHv1ioTc0ZiEjslEHBh0fdx+Y/T8U0KApToONex6DTJrIBjybbIquQXEUWscjQef/exnP/9jn5PxMIm6+k342le72bZ6K3DD9hN1tE4k8YE449VqJY9/CKFIE6Pxa7/9ld/57S8f7O2FENbrUmsbA/kYInfdeu7ruoqYvQ87e/sPHr2xu7uf5gUxoNIKdXtzQIdAvgnRB+d8WVYU2/K4lLWlaOE7QjEJf9vnhTlLkizLmtorVhBJAyoN053RcGDfeeeR95BanWXJ7PJys9kMBkX0viUJESx2aDngrqwKX5mCV53Lq4Zi27Iqda3oG6/on69gJdamyiQAILq/8l75J74/Rt9K/zLD1Xjhq+fyiglsXyj/AK4PIF3bDXgzP5CXMUZpZI537tw5PDwUFM9oNHr06NH+/v7Lly/Pz89lF1JL2Xr12dsngdhmw70zlt8SIOEVDxcyTcejYZ4BRQT64he/ePfOTtM0IRAodD50Jb6AqBkRsatkKi0UOT64cr1ipnfe+dhgmMUYpfDbNI3GdoJItrukMgLDCd20SZpZVDIoyXmeJolB5CJPtVLWaIWQJImEzuPxeDgqLEKWWGTanY6txiLP9nf37hwf2URLez0xQhtLnRd5Pa7v+g3jPhWGrXQQOqshtUdEDCHIVGL/w74Ttlqt5BoDRcmGWyHk7iW2qaoaIoix7SCwwtq51WoFADHGunYyGiEgoL44xnTVu2Il5/ZDt9a2/7tWrN6awOve0pHPIfUk5v3+gTaOidtryB3ACgCkQ6k6MpYriwZtZ11ttQBuOOy4NeLc4wa4S036uQDv/Wg0ih3AcNvzcdc57uO5/ucSz7WCZV0vQL4YDoe6o8wUxyz5JXa4RdUNz/QQ7v7IoRtKlkoGEQlzqhR7JSbQqDQq4XA+Pz8nH6zSGjA6HxqnGJqyqpfL5XL5zd/7RmqT4+NjcboSNFxcnk2mo52dHWGuELIRMT2ReWdnZ7VazefzyWQCAEVRvP/++6enp/Wm/vDDD09OTtbr9cnJye7u7nK5vLy8PD8/n81mTdMISEo2pxiZ9rkG7ldG0PsyLyS2XmhGpNhjOjpuOZpzbrlceu+lhyqZHHQ2VyDl206u/78vLcg7Jb9ULTD4Kp3ol7ofKcYOOIKIkjHnefr5z//Ym2++mWVZcF7qUjc8rt6q8WxvbPmJghZ+f839XMEdenhwy1aNiBz97/3OV3/rS18C4s1m4zrUt7AFMqEL5AIxodLaJonW+vnz5x//xCc+9ZnP5Hk+GA19iNamRECACi2CQlahifWm8U3wNW1WpasdkSIHvooU0DdUl54CcwQKDIRWJ4lJFWitrEIzHu16H5smKtUyBlL0iHz33q2/9Jf+9WfPzwDJB3777XesUcQBiJFa4pEbl4+vTw6vyl3bNrO/rf2vYqtLeDVb0dsc4a7Zenhv2i6llNLtQfrQjfkKb6Ve6WRt37Xu59iXnAHwKhy48fphwYVSINbHGPPGG29Iz0asvzC7vnz5UrY7bWnnbdu4G4FJG9wBEiCjQL01K5mgciE4rXG6M0mMBoDJZPTOO++sK44xxkCr1apq6k7UNvSNeiJWSgVP8/l8NV8AwLNnTybj4cMH95RSVVUhgGCsuJsY2a5E9WfYP1qCl5NrlIkFxPbOZWmSpWmWpKNBkdqkKLJhkSVGaYQsS4xRCmEyGu9Nd/I8FxXFRF+xFsMPCeX6B3Kr7Hxd5kFfWXzokqS+jyUv1QrytAgRtSWcEFsYtha70GbG3fCrAOtjjEVRAOB8Pvfe+0De+6bF/rWgku2T77fa9sj8ttXov9j2rz8sFe5/e/VXinviju1jMkfm2NfoWqjw9ZRX6yvijhuv/qb3z5VqtTIJImVZ0r9BVlvw4XIXuknBG4NSV1cq1lxKyhLiCEiCOuyrZGOy/tKnlyYIM8tAMCJKiN3HENTCeUB8qnigvtAqXkfeLOsglCy9U5FSc1PVt2/fnk6nzPz8+XP59OjDw/sPfvC97x/s7X/60592deNc3TQVcyvYIJXnzWZzcXGx2Wx6dxW7jTqdTnd3d0VgO03T27dvA8ByuTw7Ozs/P1+v15/85Ce/+93v5nk+HIzkQkajUS+xUBTFcDiczWayzjIWpTquMa11CE6gN6I5KPwnPRuRBNDD4XBnZzdJ0uFwlGWZ3EopDscOzNyvKnfE3dvrI9V1ub+LxaKVppCSz5ab5K590G8bazUA9Dyjx8fHD+7f//jHPzadTqMPsquvIRC3TSuA0OG1G5VbuH7/QdtPijEmuEZaG8xc16VUYr7/7W998+u/l2gFFJsug3eNB1AKTYQusEZgRufCk6dPf/wn//hbb73lnIuM1iQHB0figKUGFFxsGkfUNvh87eUfNUGBZk/1pmzKKlRNqJpYO2o8+KiIDaBFSIzkt7lRab1xWglNm8qyxFiYTofvvvvOT/3UH0REhXCwt99FRVeVUWutZKI3HrFtb7dtZ7ZdTO8FdfeGvrLVrioIhWJLzQSsjEmkg9alBzefa6IWfiWf0rMCICIASl9S8bV7vGXqe6vOkgdsSdlcf8ErPrjfc609gnh0dHRwcCCTDzJgx8wnJyd1U8n1bG/ZV48P130PXo/vxLsAMTLsTMZJkuR5Gpw/Pj7+8R//A4tVxcyESkCJ3nsf+2y4bSFLIPzy5cvNZrVaLp8+fXp4ePjmm28Kdkx8khi+HvbCAJHIWCsgUjkZQWEILREiMkR5znsrn6bpYJi34yhaMcWdnR1A4kgcY57n+/v7R0cHk+koT7O+OLkVB9003NChMGgLOtQvF3WDNLiVcvVVUMl+oCU9pj7HFSckiALqJF98bIvbQifkvZ9MJgL8KYoiy4rNZiMNudVqI4Wmvsq0fRNfa1AED7/thl/ng3H7DTd+dWNNtt92fVu3JyP4Z4kOZQG3A51tj4tbfJ+qo3vty/jS5ZVvZcQIOk4G2SpC+ydoLCkP9FDV/oPEKfYTX1JfESspe2+5XMoYvQQHksTIx3WJshVP379H7nLP0iXGV+5gf2k94le+XSwW0+lUyiGSBwNAVVVlWbq6Wczmq8VyOp4kxp6/PLPWLhaLy/ML37iDvf2yLEPjNOAwLzTwerHc29s7Ozs7Oz+VZU/TNMaWkE9Ct8lkcnFxwczC9syMO5Pp2dmZ3AsBasms+TvvvCNqVHLykvVK6NAy2zgnTPrSVWmaRghhlFLOOQASRSkAcM6JGJRwmXnfYsW3w53tkqmsP3Q9C1nbHjItorkCiRCybtcpN8hZ9TTOsNUP2v6VHFDoKqfT6f37999999033njDN1f16ldf17wsXz3soRPe6O81d8Ie/VaRM/zggw9+53d+xxjV9zhkuWSILgIDKACF2sgtu7i4+Df/wp9/5513ZLuOx2NrbYyMWiNoJnlwFEUwqCFSvanrarNZLaty7aoquBqZrFHAMfimKtdVuXZNFXzj6iq4RiEASB+ErUm8jwBoTSqF3DzPy2o9mY5++qf/xK1bR9Pp6Pbt2yG4Vuh+ywj0T9YNE6HU6xPIV30NbgVMiCjEkNsWtR+1tdYStHSERG0TsP1cbjF3IjECSKqDX0FHw7Bto9Qr5qvbckzMV0K0AhBoYQJbpwXIApXu/xIAkDMEE2PUVukkvv3O/dqtZvPz9WrjHQyLPdfgt3///eCZXETibQu49VJMmqKiqICN1laqfJECUFQgOsYIoLRNCFAZWxQFEnP0mcaHxwc/8vbdzWxpm9V8A3W0LiaRNIOOrBiBsC1GVY2rA53P1w2p+dqfzcp7h4Mf/8OfmRQQ6pniGtkBh9QYqcUN0iJFy02EhprVxjJOB/bhvcP7tw+zFClWITZKoXPNYDBwrlYKtEaOLtE6MUpzaGKweeYppsYO8jTVMMmNxSZV4e7R5PhgHJpllts0Ta21xqYWIdWKKRgNkZw2rWAkR0A2CqwCC4REFMgHarr9p7HD1Ck08rQAK2jj67ZrRcDBxTwtsiSfzRZEZJQOQkxKzJGM0pL4Gq1d07im8XVlFQprIADFGMuyDMQuhJcX55GIGUFpY4y2hog8uQiBFbGifsodAEArmU2KbX26c5bY/hOMJaNiVII07ELO/sGjLenDq84FIBEHpoBACllCNKO0URa5TWShUyWK0QvcafsIBlVidGK01QoVM0RUrLagAAJoSNO0GI6rxgt1SE8gIumUiAQDUJKYGL00jgHIGMXMLW9GjHmei78cDosQnHN1CE4wR2LcpY/bND5JMmE1EQ/de33dcYD0dRqx9fLt5XyxXK6JpEFQt6osIVitY4zzy5nojE3Hk8XlpUJIspQRJjvTQHG2XgTkZbXZPTrYPTq4WM51lizXq9F0+uLly01ZHd+6/eLFWVX5yWQvMCzWq9PTU601U8s9fn5+vpjNgTjUTay9JqydL4aj2vnZ/HIyHXtXNa7Mi+Tu3duf+cxn3njjjc1mE4K7uDhDBUfHhxcXZ7PZRbleBdeU6w0yxMhV1eztHeSDYVU1TGBskqRZVuTK6MFo7ELUNml8SPPscj4rikLCIHGiVVU1Ta21Wq2W6/VKIIS640in4Ci4xIiwmUOOgzw1CppqQ8EpoOiiBq0YpCZfrTcGlcwO1LUTR9UnQACAyCIeQJGZAFgDi3iaNQryVD96cPvurf3jg/GbD299+t03j45300xpQ8gBORjFBmKiGKMDaMkrQCtMDGktc+UGMTE6hIDIARiMrkMkQFAaABKF2JQFuWff+d1n3/pqYL8q14EjI0YGH8mFWPvACkN0ja9rVykFlWsCwd/4m/+TT/3oZyPGwC7NjFV+VOidSTJIKctI66ZuVhEa1OCZSCnPXFNomMsQykgbHx2oTU2elIvkiQOTp9gEHwEJVe2i46YhF1UY7OT5UK/Wc1RsTIKk2YHlRIUwyfVf+jf+xL/91/7UZLC0kABpIhVBR9ABtGNmhawQOoRxRIrAATEgKmUIVRCiSq3AICmOFFp9cYhIUTFoVMhIIRLFPnMmDsThqomDSiEqpRLTqvRqrSlahAQAW7AR+0gNQ0REijAeT/Mk9y62uTJ6VhwhBg7U+e0uUDNSNWRmAFYICgEVXHFBM7Ow2782oLhyzBAku+XYVpzu3bv33e98fzqxWVZYa5u6vLi4WK/XO+OR1ipuKbYCckd6xkRtwfP09HQwzKfTcV3XqK45++00azKZuOZcdnxRFJ/+9KeVUt/67kfpGABgtVplB3vOOa0SbRImTxEIUGM7n+OasF6V6/VaPbh1586dL37xi4+fPHn6/LlipRRQ9MxY183FxYVw1kyn03duvb23tzecJHmer5ab5yenq+Vmtlwsl0vhSeAOVKwAYoxG0FsGQwjcJpatio6AuUyaRFLL9aZ2cTQoytpHAux6tEQkwbtE6HLm1wMovVUPIYCWmRMAEEErJGIAJBLV+3YCWEyGUAtJxi9GPG4xIPLWmBN0YB/5VjgQ+vr2arXx+75pmiJr+W+VsgIduRFjbQee12PA14SEcqtvpLxdeMt9qLsdz/JVkaY/4DWqy+2wDzvMmupeVx+N7Wf1CDVB+ggUWeEVuatUKaVH22fD2LHF9R8ti9bnmkIlQUQxhr4NCQAyTVuWpaS8dd3mWFmWScW4qqrDw0NpB8hmk2xvGzAodWMEbJqmr16Kbw4hiMazc248Hi8WK0mmpWwrUGEdwsXFxcOHD2ez2d27dyU0sdbevXv3q1/9qqyG1no4HFbVJjWWBoPLy0sR3SrLUqopeZ4vl0tmRqOapsHEGGOGebFYLJg5T7OPPvposVjcunVLLvyzn/3se++9V9f1l770pSzLBMyCuoVlrNdra1Pn3Hw+19ZYaxObLhaLNM/G4/H5+bngoiX1l9u6Wq2KohC9QmZer9eIKLRuQk82m11I58h7X2TtmK/cL0ku+9yFiKpq0z87MV7Bf1arlfeNMJ/0xeq+MdHvLu7my0PHViaFdBmUstaO96lcby4vL2VkK4aW4VJrTSAj/qiYA7ST+uBcjISKpUKQppn33lhljIoxuroMTW4zM5/Pnz59ulqtyqqSAyZJSq1QfGBudy8qk2XZbLmwJv3rf/2vHx8fXy7nshV93TKYpsDT6TTEGRER+T4ilPVpmiDFv0Y1q9XK10ma2DS1UhkiagufoeNflFVqmhbu6n0TY9b/KkmSsnJVVe3v7+dFcX7xMrHJDYux9aTf/CEIu/5W1ayrEEQiput48rb7SwydbHxrZ9BIfGaMMV15srdLWhu4ElnqOjgtwiZR7TRHW4ICvLK6r6adN9Jx+V+s/LVhkit3S4BXajT931+DCQyHw7fffvu9H3ygtZbYXyl1evryg/cfjz/9iTRLpOqCqrOTKGoNaBLNhGdnZz/4wQ/2D3bH4yERZWkKgXnLkvZJvaBRrNVK6Rjjzu7krbfeOjk5KQGrquJIl5eXeZomSUIIwBiYXCCrGT1WZaOUNIn5B35T1/W9hw8++clPolbvf/h4uVyPJzuTyeStNx/t7Ozcvn1bazufz40xHKmuNyGEyHDv3r08Gzx5/uz73/9+PxYyGAw4xrqurUm1dBGU4hCZ2zFcpRQzxRgHRW69jZEPDg5ens98oG14pGIKwVtMEDHLsrpynXCHbpmjWnGt1+CN+29Vx2rJXQdFnn+xC9LolWyAO+BAv1F6/h2x78aYi4uL6XQqnH99p1PgPCFIsNHOugD1Owh4C72HiKLE0O4r1QsXXXVrWs+KLHw121uruyyN6oqTZNvTd9t1+4G8ItaWC+xXhrvWybYDbp9hIjGOYjj6epe1VgY3qdPlVB3kTYIY2QbQ0Wb1pnwbrtx/nHyE/DzPc2lbqm7cM3YDLfP5fG9vTyk1mUx8JwC8s7OjlJLWgOto9xHRe1+kiYixJ4mVT1mv1xrbmEh6sXLCaZoyglJqOBwuFgtrbQZgbdIHH7u7u7Jo4vvH4/F8Ph8V6cuXJ8651Wp1fHx8gbP9/X1pVI+H03xQ5HnKzE3TaKOyLKsbNzu/ODo6MsaIJJH0a6pNOUfFzM+fPy+K4uLi4v79+/P5XOoBEnYAgASLABBjnM/nUmQm4tls5py7uLiYjEeC3mgZCJJE4P0A4L3f399/9uxZnuej0Wg+n8vKS29Y9r8UoqU63Ud1fV1Bhm773cIdfbRkn/Lq73jo5uB7q9ofUG56CK2llvsi0AHPfm9vL0kSETCuqir4KK1cuVNSsIkU5dMNQJoVQohNRBQcEI2KSV2VRWqtUUmSeFfPlpdCXcKoQohZlvc80jGyUhhCcD5OJsPT87PJeOdv/M1/e29vb7FYIKKEKS5QnuchEjgFoEaDISIyVwK0jJ0KBcXofTTGS5jPSUQFSWIaH63elqBXYnzEPYpTE9tSlmvEkcQoTWjk8Vyv1+vNJrEZAMhw7Y3oGa6/+gBacVujvXo/tQpHKIG1QkAC2uZX6KfzQUgLtNYxUpa1BKgEiNB/OjNIceMKw4WIRCyhs1IKu0YGsOJXEg9EFMnUGw5YCC/Nle28/urefE25iJkRW/4mxPYsj4+P33zzze9//z1EzNJUkH5Pnz59482HMkVAREy0BecDRHbOeRc3m408QjL13zS1RdODVPsPbdtpRoVNsFYjcoxxPBm++8mP//KXfi/PE6M0M66WG3EViVXW2kgspYPIdWKsUqapHIzT89llWuR37979+Mc/dv/+fULY290/PDwcTabee+/iycnJYnEhnf8005FEQ43TfPDGG2/s7Ox85StfEehZ0zRK6B5bCjSUUb+ON7QNPDnEfDRC5fJIt4/TxKbPT84pA6u0jEmkSWqMFvxdaw5a73hj56kO79f3Dq4CK2aEG+9WSlslnTBZf+54QNuIuNu4fZwoEGbpt4nGuwTUeZ4ZncQYvPfGjLCTNOg2hwJAuK4MCFc+uBvp0zJSdPXb3qJB561vHIGZUbCi6gpPoLaQKYAEW+RZ/eOqO33Wljv6FXlH7Pbidq1CslVmFr5DrbV31zjFwpacgyAQpUMpbcsOkXHFfiWpABEh8jY1tBxHSil9mzN26kbyHhmKret6NptNp1MAEK1vY4xsGwmbRJFQ4FdZltV1PR4OpTslGgaSNe7v759fXigF6/VaSjKw2ZDF/qKkM9c2oYVFtdNPFNzyarVyVd00zWAwmM/nxOHi7NxqE6Pf39+XYRgp5CiNHKmJTZKYR48e/dIv/vOdyfTWrVtZUbx49my+XJycnBwfHx8dHb3//kqq/RJqyI0TIhG5wM1mUxRFVuSqm+gVrQhZhLqufdPSWT958uTevXtZlokwyXQ6lSdUgNOC1ZIuai9YIpCcHkFNV4BH6G86Ikqa3peLpC/DzOKcVCdSKV9LBCZdZGtTeQzlitI05fkCETnmGiG1SZMXzEzA69VmtVqtVqvaNURkTdIWMLXxMciEjNxfoLhczHcmYwWkASj6qtqcn58vZrNBnkduWyRJkihlyrIUoxsJ9vb23n/84cffefcv/MxfFJULybllqxSRvffOhxCCV7EYZDIm0xMrUScw07bPo1VKpUYzAREQeSIdmGI0vQMmIp20VN7e+zRNB4PBbDbLssx7FKelO4Vm35H0AYDqtI/6h5puUDBjddAAAQAASURBVE725oVZ99JCghNrtQIjMCOyZMiRYh850RaEZft/bRKttXg9MSMMIANRzIwcIgVRx5Jd2nPV9Xa1zxC2vxacQN/Fb6P/buTJ9H6u/x8AgAWCJ01fgCuVDJnNkhgtaKV8iEmSfPzjH//Od763WCzyoyNmttbO5/MXL15kWZZlCXGkuGVYsb0Atvzy5cv54vLOnTvSvGmXHW90jkkoM4uiaGpR222nch89enS2rL/97e9hmsxmc2tN7oK13iSFI1QomRNYRuTIXPvGMa0PDw/ni0UxGEwmk4ODPaGfLAapa8rZbHZ+cTmbLXxoBGbcuBjKOsuKfDBARGYYjca7u7vf+c53bt062qxWkj/5RqafUem2DakUWm0AORKPRiOtdZrYpmnyPDdJen5+rnXujPPBNU3DEaT5L+GLtbaJ9XW05DXkpCyO2poVQ0SALk0Ehn6aELVE/arrcfYbug/oiAg61LdSqBO7Wq2Gw/FisRK1K9kbVmtWILZMWgl4/QWgRXgErteBiUCpHkOgRO6sfwb6F3f1FeZrVZb2nFkjwI2IpD/C1b6/Wo0WFgHXMZP9O9vcFJS010EpxaABkRiYo/NIDJqQOUkSIojel2U5nk6xK+BLHC3BjbjPjnv5CpsjTMXM7FzTe2WpPIeOY4uZrW3H4WSHCGxK0keB+0pIJAl0URQywzoYDGSXeu9j9LJzpN2AiM45cdjQEVMnSeK9U4pFeFgplSeZjCH5ujk+PhbaSFmZuqnGk9F6fnF8fCwZMzMPh8PNcjUcDmOM6/V6MBiEEIzSwTUi0zuYjJBhMZuHEC4uLgaDPEtSRMyyTAZ2lVJ3bt0u15vT01MprsiJ9QUYAJIUf1gMtNZ37tw5OTlhZmOT0WhEMTjnpL5trQ3OqzS9uLiQAsPp6enh4WGapk+fPrXW7u7uWtvydYi5TK313s/n8z43oG72Wu4mbFGHSltGVkMKD9SBsfu17QsSsp+lyMFdTycEJwhcyey11lOCEILVbYMpSYz3EREHeWETk6bp2cV50zRaIwdSSjFq55ysj9ZaARPTIE1cuXGJ2r1/u6nLJx8+Zgry0Qo1RTImAVCXl5fD4ZAiuOAHg8G3vvv+j37uM3/xf/Azo8m4rmvUqi9vZlnGqBeLhfB+MLMxRgI7uZAY2ip0ZHZNS8ypG12bYJ3TRiXGAkRpJoFWOgb0zjCl7RB/ysyJNX3DQhQ/qSs1JUkSuqJR74MBgNVr0t/tx18uQDGIOHGPbKIOtYaAxCHKiPZ1vinoHS1DB9tUgaNChYgi/UAkI4IchQOwI0Qq8sJaC6BCF22o6yCszl223/agAUZA1RphJjB9xsB8RbN5w8BJdAyAsauGK6VC5NYHRNrf33/77bcff/ChxAKDwWCxWDz+8MntW3ezLOkjAkmdmYEgIigievOtR2V5JOedpqkPLgRChP5UVDsQHJ0Lw+FQKbWYL13TGGOyNM/T7JOf/OTFxcV8vhwOB875zaZUSpkkQdTWmAgRmCOjlgY48sV8I82V4XCY5QlDZAqoOGlMVVWbzarcrENw2pgQnPd+sd6EEPZ2D6rGHxzo8XgCAI8evTGfzwWK1TSNb5wxCTNbY4UlBQDSxGLHsVUUReMqlSTaKABKrVEIWoMCk9qkq1RzXhTC119XoqbAUpntDPq1ebh2G3U90L781QaqCACKGVFhjDFQ7LVRxUxLqQ26wjUDSPhZuTAcDquq0lomtWxVlQcHB9AVtHVHtcivjEbIjobODfY/V0oxY0dtAwAInZD21dOlEAmYFTPhVtEJ8SotRmyLLlvBmbypTYIRr/SDu482vUe/dp7Q4sEEbSqroXqqd1mKqkrTtMPDtwPHXfGKVMfALGmiLH5sx8CU6vCZUt3tJ/fKsuwWhGlrUL5vJ8sYtyj6iXqMVGgljZA/6SfKYozWtBgCocfpvQURFcOh/CTPcxHsE8WOnsxZvKxSajwei7buZrMRJ6c0imuZz+fvvPOOMF0Yo2IMRPHi5enh3gErkRTD5WItSkd5nvrGiS27uLhAJquN6EkDwN7h4dnJSQjhxYsXg8HAGDMaDE9OTuq6Ho1GMsQoIQtzKzWxWC1ha9rq5fnZerna2dmR0V5hDhmPx4LAOjg4ODk5EUEk6FQN0tT2zXjvfWJUX2/o73i/K2KMAqPTWjeN630zAAgxGXZzoj3GuK8DiS/fzpKpmwKXDFtrRDRZYqLGLDGC+dLalnXtvUfU4+FIGtUXs8sYY4xBKRMVJFnOMYiqoGvqLDHROwXx7bfeCL756IP3KQrggKXcB6AUQ13Xk8mkqT0oTNP0+cnLL3z+c//WX/nLNk0uLy9NkimFaHSms7quCUAebW2UJetiIAJtMKUkZpLoOxIGS9FhiRxCqJouyldKFUqj5iBzJBQCIToiEkUQ7xtua4QtOl1aNp5iTycn/bgb3lGJBXzdq41yWtqHq+xWSBHF5yhWxKFzfq195F5ceasXa9MEEQNTS7aBgNAaE0RGJk/Bh1bmBDsl757Vsd8GvbPrsohrxeeOKR9b2mQAA4wdy8drLnL7FAHAAEZBR0vGTZJLcWL1xz/29sXZuXNNlmUCVJnNFs+fPy8GD7VpWRGI+rF3zUxa48HBgVBOAkCIHuJVCYiI+vY6ESnVzXV0X0TyAHDn1tEf+uIX/t7f/6fD4bCqGx8CMSzXmyTZSZRGxEgBIwUOTEprrY2dLdaBgZm1waODQ7LBN/Ui+Kpx6/W6dlVVlbVzq021Wq02m2q13IzHz4fDsVJ6MBwOB4NHjx4B8K/+6q9G7aWWm6Z59MEY45pKIdokkdqdGEqFrdhnniaoTVn7wSBnRq1M7QJz1NY0Ph4dHKDWs9ksBmc6pRTZKgCglFAY9tFM31Bteyx9wtdLGXJXE1Ndn08cRl9x5a1xpsRapZSIPgHAer0+OjqSSloIIU0TBkgSKyq2PbNP72K7TaIACa+XkTt/AwAM0HfOWmfde6NO3VP1eXB/gCsfzFpQ4q/sVJIiwVU02T4M7dVJ9t+/5EHrI+J+6FPspowP6Y5ysj9anqf94quO1FB1lAjQdYl6Rc4Yo1UttssYS0TCnIWIPdOkFGBWq43q5HqSJJH+a57nwhwpiITxeCw3UcB0ACDMFamVedYougXWWjZG9IK4444Wy24SK/XJyWRHittnZ2dqX8GyBev54BSiTdPpdCpXaq2VfH2xmBVFJhiCpmm8b5h5Ot0lZqVhPp9zK31qN6v1YDCwWt29e19mFKuqOjo6unj5UrjilVLf//73pVw8n88PDw+TJCmGg/V6PZ/P0zSV2ul8Po8xboiSJDk7O8uyDIhTm2xWa6nV++CZWdpYdV1Pp9PDw8PZbAYAEr40TUMUet0qhrheryUbRkStMUbqKUL7JEYeIkE+I7bFQ9upUnKr4Guoo1LZftD6WSaxUfLEwRUikkejgUwrtbwoGncmI+9j470xqizr8WS02WzW5UY2YU1KAlpjTLWpE40UfFOufuKP/VED/OHjD6tyLS7NKtM0DZMGrRsfmXC13BDwaDR6/8OP/tAf+cM/8zM/44Kn2ueDERH5GGyasHfQicAaIV9TontRypZIjM6SlJk9+OBbraUYI3pkZuTaJrq93oS1RiDtr/rrKGhERARoGReyLCnLWsbfpd8xm81cE7KsqKrq1ZZo3+XtXdKWZRAc5VVULXNrwNxqOQBRuAbgajMHAO4ySa2NMlraz62xwit4ilGAHMMWYRYiJjYzxgQmJXq+3Dop6Pti/axKRzPO3KF4EIGRO6nWK+WNq+vprvO1L8BWQqTfrPLze/fuHR8f1nW9Xq8lpI0xvvfeB865GAi30nAxgNDSJnCe/38Y+89nS5JkPxBz9xCZeeSVpavFdPdMjyTeGzwAtrsPWKhd7pI0gkszkruk0Wj8G/ll+Yk0EgCNeMAbvNE906KqS159j0oREe784BlxT1XPgHtsrOfWFedkRka4/PnvVx0cHJAAj4PqdxGEaAUh/22Mg5LWTqeTGIPOX0qKBwcHP/rBx2fn17PZjBk2m13X9qvNth1CZElRWDAwh5hS5BSxD7Hrh29fvf7DF1++fvtmvd7qhfV9f3u7Xt1ublarN2cXb9++ffHy9Zu35wySBFbrzfnFxZs3Z2/fvn358uV2uz05PCIijklTKOfcbrdF4NJK1JkQVHI7lhijM6apagMyn00qZw8P5g/unRiLHBPHoWmavm27XTufz701lpBAGWGU0UkxBZoa300EotLDsqCAzsFSnv/TI6SKSYi4P4pa/CJksFsGVI+wET0hde214FmPIDg3nU5FsKoqa+60K3DvDCAYyBPA722kdzwmIggi3HlxuYMzjsNU+8dw/63ef5/sg9/pMANowAnv+GP9eyp3XXyw2uhCuaDmVZ+jJjGc9bLU6erzLZ0/2Euk9ItS3q+db/zI32Sz9JaiclQqQBdQy8LMrORNJvNZrtfr5XKpuK3b21sAsHu8hpBrIXGUILSaLheX4L3Xwq+ChFUPcbfb6JDx06dPh2EY+o5TdJW/ODu/vr7+5ptvrLVnZ28mk3qxWGy32+m0WSwWKlhprf3hD3/46NGjGONms0pDGNrOEliC68vLEPqu2/V9q03r7Xarf6XKKJeXl7e3t9vt9uTkRH2zNmjX6/V+SSalUFXV1dVV13UpxLZtnXM6kj6fzxV+AQCbzcZaq4xXu93uD3/4g3NOEdEhBE2yb25ulPVaB/80oAkhqAAzAMQ46BBtCEFx5jpzXOa8KYtnlASXaNR5LNPYOgZd/rBYxVKx1NJuqXJba4+PDw8Pl97bqqqspdrbyWRS1/7k6Pjw6MAZO8bHOtNnTN/3TV0RyNvXN//sv/wni9nkqy//kOIwrRttl/QxgCGd9xvJkQzNZotf/vr5f/3f/M//1b/6V93Qg1DghIZCGqEJxhhXVdZa5hhCCP3QD10IwephJMXfGGesJaMAkRi5RIEh8dDHvu+7MMQwcjCkJEMIIcagg3ddZ7Kagj4F1WcEAGVHARkb51pt2k+CZa8c/Z6rggzCUvDm6FYAhKMy1NLYDFanPBYk9t8fEYmMtdYab4xh7UpZ8+7vaCQdcjFSjDGuroSMclFnQXS6KzLrkGXmuh8dsNYHgQqj9XhrZDBxZEmJozZfC3vGu7bsboxyRAmlsf4AWrpB/vzzz7tuJ5JCCK9fvx6G8Pbt2Zs3ZypjIoJa+Skxpr7SEHbrTSkHZfbsu54xj01+FkEd6VGrN51OUorrzYpAPv30ewTQte1kUm+7dtu16/V6vdlwEiDTh8QJQpLIqQ9RwCI4Fnr99uLy4na76/oQ19vd1eVN2/bDEPoueF8vlofz+dJam5JcX1/f3t4+e/bt8+fPf/ub33/xxRcvXrzUnpyaTvWyPA50J21pKzCnODOUUW0NhCtr7p8enx4ffvj0SeNdiP3h8uD68vLi7Oz48EBJsOvaN42mN7by1lunrSNrrbN2LEnvxUy6RDpjrrO3wigIJQgtu4GsIWuKc1InOpvNrLaRiLTRq3aHiIxR+sOgtMYiSQibpslh3J2vLY7hfa/8nZ5u3n9EeMcAR+/qHxPdCfrevT+Y8v6Q607MvC8sqH9CGatFuf9dnG6xhmp8NQOWXENWZ6C/o8O1ZVQsZHJ8LaMVoiXIsK9yg5qtao9TRGrvYyZ/sNZqGjSbzZxzmhdCFlaaTCZKpKx9MpUb0mdxcHBQiBX1FCiSVm2cJuKXl5fqpfQKNRrWiFkBw5qohX6YTaYpxN1me319LSKb21tlzEAUAN5mDmdE1K2uU0DT6fTrr7/ebDbMvFgsQugLCRcRhL5fzGYa0CggWT/u9vZ2vV7re2oSrJ91cnLyi1/8QkNVJdFbr9fMfHx8qEoheqb0Ljabjea12gZWHg+tqN+7d282mz1//lzZMUvlRvWRNKzRg1ASVq0BIKKeU/1aC/haSFAVjZRnspULUx2Plha0F6AN2s1mM2R5SvUlYU8lkLKyZOJoCDnFyrvppDk8OLCW6tpPp9PKmclkUlXu4OCgbioCJEBjCTg5gygsKVycXf93/+t/dricf/3VHyUGyiMAemzVuvZ9b33VDaGezH7922f/l//r/+6f/JN/EpMAWUYAGgtCgVPgxADa+JhOp9Pp1DlX+8pXzlqLPA4Uhaxjoc5MvU5KqWv7GGNIcb3abrfttmv7GLTUzAwhk2YPWYpKr1P1krVoocs+ZP1ybQEgiiJMATi3ftJ7X5cvyI0kVvrUZAypk0GRsYLImBXH9UfaaEJE57yIhMje+8BCRBoKwF1vWPT3mbkMAUZmnUZDRGbQ6FxnvdQT75u4lKnO9+P+kiUzMy0WC/2W1t8E2HlbKmzwnddoDQUBwAAWvkDn3MNH97/3yUdaWKgnjbGWRb755pu+D2GIaomMubP7AgwpT5jsTc5Azk5SSvoLAABAMUblMa6qSvkNmFnZKO6fnvwv/tt/YQwhoiO0SLvd7uZmdb263XWtMYass9YLGuPqbTus1i2hJ1Nd3dyeXVyvV20SA8YCkq+axeFRM5kBEFrXtv3Lly836+3t7frq6urLL7/86quvvvzyy5ubm8LDrldonG2aJuX52pE+EIlgfPYqBrJZ3RiU+/dOF/PpbrueTKvlYh77rqkcx6CnySAcHiwOlvPKW2tkPp3k2jNqYUofqC67IdKgCzM6VxJDdqspE/QXbEiSDDIn3IOm39E77EHt45hXZQqIQrysp8W691RN714CVNihcS8vVFKO7+wqLP+/77b/p7zGN4e7geb82uOwzIVEABC+89n62u12yqp4FwVmLT81PQpvURbGzWajhni/75sy6ZvJklMlhlBLXciVOE8cad+rnNJ9TXi9YPWgOtVa0nSFGpVWcfH3agL0ftW7l8hPRSqn06lGhJPJBFhUzmi9XquTVlfXtu2TJ0+Wy/lms1E6yaOjo+12++LFC2t9VVVv3ry5vLxU/JfiwtRz60EoPpgI4tClENrtVnNEteyffPKJyhIzc1VVi8ViNpv97Gc/s5nuUS9bo4SU0np9yxyJIPSDWvOjoyPvvZK1rVYr3agxxpubG2U1UctYmuUKop7P57e3t2ru0yiKBdpf14Z6zKrvkueOZG/cqGR7OqwhImXDFLSzyfTdmMlHd7tdwXkVozc+XDNiKbz3ROidnc1m6oYrZ6bT6cnR4cP7DwQYCYATcAx9V3nz9TcX//V/9Y8Pl/Pf/PqXu/VKRLQeCQBCOA7VWJcEnHPGuWfPnv2f/s//25/+9KcxSeA7fNl+OJvdkhBRVbmmqbz3zlgVDaus81ZHGcffB3gnQ00pxcgaf+x2Xd9p6qtaJmq0ebfrNpudYs2K4dJJNsiAUA1x9hOz916mgED2DjXu1Zk0zx1ZaceOLzMnJS0v7zxCsfLXKttqjPPeG3KQeez3rdwwDCH2GldF5qaZApAw3vFKgggC5zGUfRc7/hOBcjm6JMqaFdBiNtVYQ3OdsXhlUDkx/oQpTKPmo3pfA6j/2+02zPF73/uo63fM0Vo7nU6NcV9/9ezN6zPlThYZG5DMo3ak5rvybsGheGhIXCKIsnZoRvJbGKG5iJIIYT6d/IO//5dD300nTVV7Iuja7Thpx9L2oetD2w+7tjfkAvP1zWq12fYDb9vu629fnp1fCZiqnpDzTT09Pj59/MGHn//gh3//7//9n/z4p23brlar169fX1xcrddra3xxbKMYInMIoQ9DZthnvUIcJUFAW8LaAmyaxqBIjFVVGYInjx7OZ9Oha7vd5mCxbGq/nE+fPnp4dDCfNh5RCFPlLBEYAkfGIhlAR8YbMiAlYCREQ6BgS2aWTFFZSqb7/qnkiPpSG1Fi9n3nIcLGmKpy0+nUmhHSWWpxRPZd6tTxgOXj8j4Ds346f9fDyn+qXl1edw6+VJX3hITvgk007/1+ubD3Ikvcg2Rj5pGGHAjucz2ObCp5ZEWTY81ElTo4ZbICzrNGkrHlxQQgokHUPaBmSFVvKU8y0B4QzFo7mUyGLEqvA0UlG9PJFnW36sV1YoeZlWBLF6QQIN/e3p6fnztjdb7o+vpSm9CHh4fz6QxY5tOZM0SAp6enl5eXj588HEKntQFFdSgcrK7r+Xzetq0Cu0II8/lUc5r5fK51dUT03h4cHMxmsxcvXmjgcn19vVwuP//88wcPHmiaq6HMYrFQheCjoyP9LxG9evVKC0uLxaLv2xQGjmG9vr29vdUau96+ukxFfWpuPZ1O60njKn+zug0p3r9/fzKZLBaL65tL/X2NMnXxS6O3sI6UOEa3t251TXO1c6w5ve4QbVuoiy2RsW4YJWChjNsa4yE3cuCEFIcYNEa31lpDk2mjJeja22ZS3b9/Om1qYDESvcFp7b7645v//X/3z++fnnzxxRdt21prrRujPd28KSUB2Ox2VdOElN6cvf3v/4//w49/+pMhJNU+iiOWedyNqKK9GaStN2utNQaNMVoDt5a0VaHtGOecOouic6ARTEgxhlHTvg+x7xXxFmKmCi9Veo2B9PFpXBuG9N55+ZMvEfnufHA5ueN3MgA2mzeRNBJ5lhBWa2X6pJKwMu2gNYQ2v/M7JWLJrJ+Ye3O68ZhZEEDp6dMdY3+5nnJ5DKOMMXxH5IqZ7cOHD5Uqve9bIjTGalH7u2vxHZu436Jji8TMjx49+t73vvfi5ev5fFlXDZE9P3v91VdfPX7y0NqxyKAlHRyHdFW2eK/xLCACCKhsmfuhxJ3ZNRS6OIQBCJkjIm43q/lsMZ1+EEL4D//hPzx8/Hi1TpvN0Pc9snjrAMBbZ4xFBDGUWISFEvQDd30KcXd2fj2dTocYr69v276zvprP57PZ7JPvffrRhx//+Mc//bu/+7urq5uUUt8HAOi6zqprIUEap8dK/RkAnCNmFhhTebKGCLTkqKmMJohdjE1lJnU1DP20mXhvN5vuwQdPDg+WIBH4wDm33ewEsesjAOPEIIKydyABM5uM49+23TAMmc5MENEajHHM1cb9TSh7e3E/sS5VMvVDxmhpK6KInrfJpEZEja4AAAwKjDg5IvNdWWwAQBkHh+4OSXauQrDfsRUZSalG/OH72+/PhMbvJtkyDg1j2VHF3Y5hwagl+o5qoXMGYERIYc6fJCey2jhU/2ettWRTZssSEZ0mKiVrychnLSTIHrxLyzOa+liiISNQtEZXVY2O7aasjOSc03rv1dXV4eHhZrPRbFgTOx0PM8YwCIOWo1PTNLe3twq/Kl3qggGezWYhRcUoaYOTCHabLQEOiHVdf/vtMwV/LWZzxqSZsd7+er0+P788PDzUyqEu+L1790RE58VVNOLt27fL5TLG+OjRo2EYZrOJxoIXFxff//735/P59fX1119/vVgs5vO5iNzc3PzqN7/WEOTq6orzePrR0dHl5SUAbLdbddXqAi0ZTUb7vl8ul0R0cHCgbeY3b97ovFbkpAzM6gUJhJkfPny43a2JKKS02+2m00ajFnWBWvkUAZ3/KaZT67rK0IKIOndkjEE0OjbWdUPTVPtJAu8R/412wNvCAQJ7IJ0YB1vXQKIfhWTI4HQ6NaYDAHtkHzx48OWXX8a+mx0sfv/71//qf/nXJ4fLL//w+3a3cYTMLGlMpxIgx5gYELFq6l3Xtm37f/gf/vuPP/64GwbnKoMYOWGMkKW3TFbrGqK6Uh7NFJEWVyB3gpih9mMnKCWXeHT5+jt6FpgRBYbB9L0DAKy0lesAgHlkYWzbnkMcJSwFteSz67v1eqNMIKLUj/9JVT/k97RPFUqizSbilACYRqooRhAlm+QRLMyav2GhTFHst6/UMTFoAfnO0YiIIeKUhFHZar2vrfUpjnAbvKtU64JIsV2Y0zOR0dPthwvldyxLtI6cNwroSqKldv2798xgRmxJ4QnZG8QEFoCU+Ec/+vzZt8/btq2rpqqqxWL55s3Zi29fffLpx87vkT/sXfe+ay8PoFjV8gu6aiBojQNBLWszBxFommo+bfqQTo6WTx49ZEknhwfIY5Nvvdlaa8WDc+TN2OYki4nh7eXlru+tNSLSvXy13W436+2m3TnnHjx49ODBg916c+/evcPD488//9GvfvWrl9++0FbTyekRWGuMQYDIiaNoBWy3WxHdDXsAABlDBPqAtU4VY9QZu74P1gAIHyxmN6vN6cnJEEJTHT198tgSEiZOYTKZvMWLzbpNMThfWTdONVCO4wCA7IgS0kk1Q0g0iuWZPDX03lamQvfDd5PjzGzHWHj8TowR8ujLZFKrHWmaJsaAOCEikpJl3mkswp4DHs/NXtXr7vDsnyLdvXp49ioigoqOvrtyARABRCMw+n/9JiABIu1VfkriC3eR6RgPlRKxOl0YRU7YGDceVER1XZPJRCuNatCb6TiRqd5XiS9CJvg0owaR1yNNexqXLLEcQs40n/fv349ZCkmdeoxD27K1tq49cxRJCmTx3mrltm05hB5xZKZ0VV0ctrrbzWYDuXstIsfHx9PptO/DYrGY14uLi7P5fN73/Xw+3+12Osh7fHw8DJ1iiB48eLBrNwR4c3PDzOvddj6f9zEc3ztdzuZ/93e/+Pzzz3e73fX19cOHD3e7nSbHCvOs63q5nOtqbDab+cHy4uLiJz/5icbci8VCH4duoYuLi9N7x4uDH/37f//vNWJYrVYnJyeIqMBma+2DBw+++eYbALh3755z7uDg4OrqRvvB+w3XxWJBRJvN5vj05Pb2VivAWmbo213btmdnZxpOWWN0Mrhsb115yjQsyrN/t/kzAAoARLDve+/ru+29V1h+z1Lp8SczVoM0nOr73lZ+1kz0g8asyBoUHcE3th5pR13kp48frje3q/PzL37/+r/9l3/18QdPvvrjFzfXl4ho8c7wRoGUIqCx1gBQn+IQw7/4r/7l0w8/vLq5WS4PrXeAmIZUWlclHBeRNFJmjobXZBWQupaUUoh6UqxlLcnECTUFewgoANq4NUFC3xljOkRxNMIRhoGcBS0SAABJ5b0YQ9ZSQWbpuFTuvLwfx9+d+mJYBADu2lhEBHskEzKWgRn3oCR66vf+CZqKGCQ1yCAkwgKY55fy7+xBt1IU72ptMo7c9bmqvHeFd4O8glm9jgGzwK7mn7DHUWjPzs4U06vscTe3az3M2f2WN3xPQkvzaBQBRVRrLXIXu8PDww8//PAPX3xtjUtJrPEhxK+++uaDD5943xBZkZSGYIwR4f1GnYjsW/BiQPUeFDHLnIR0f9QF9NS27XJ5sF7fxsgnx0cnx4fb3e7k9Liq/Xq9ztMIzAw0hDCYqqqm06YPKTJ0YVjvWmZ1KlF0itw4ZthsNq9fvzYg3377crfbOVchYt/3z58//+lPf1zXdaFfZObEQsz9MBhnnbHOeYXPqb+xvsIUUgrqpLuuC0H1XC306d7JEQB+8+wlh+CsefTo0WI2RRJj5dWrF4vFwaMHD1+kN5HJ+6qpvPcecCyKKnRWLfhiMZlM6uvbddu2QISEBGyML4tJRLpB3m39jsZIDxUSGYMi6oaJOQGKMcZaqrwnFGauKl9wtjimueOwL8CdjO7daZG96WAYe8law9Ek+C4C0wgyZ7CICH+KiG4fFJ33zzt0qsUOFn+vgaOUuG8vFE17zAnMQc9qSf54b2RLkyH9Qmu8MaubaapUUNOSB1d0q1trhxBLqQ0RUwjT6VT5MYioaZqqql6+fKmOVn1h4Vm8vLxUNkr1/ZqRj5SNzMrxtN1uq6rabDYHBwdd22o3lJkV9gxAwzD4ulLpAr08S6ap6hcvXqxWY5U1hEAGUkrz+fzFq5fl1m5ubp48ebLebj/65NPASckUDw4Ozs7OmBnROIdEdHJyogk6AIcQDg4O3r59eyAHs9lsOp/drlcPHz+aT2d/8zd/89Of/vTN21cpJWMNM19fXzdNc3x6IiwKbUt5gnY2m11dXSHi1dWVunDFQl9dXVVVdXl5qf5e2a0PDw+7rru9vT0+Plag1nw6kmr5ylpr+13b92MVQeukup4aTnlfF0AQ5PKjjDMOVUqDoqlLdaQ01EpHrPwhMwuwtZUyfu+Dv8AQ5xjOWptSSCmhIVI6dxTvfQjbyWTyweMn/7f/7+//m3/+l5//4JPf/PKXr1+9qOva0agTICIpCXMQIGsMM/Shp9r/s3/xL05OTr755psPP/4oCcswjPiePD04dklYiEjGUs2I7y05aJm5YGMQjQoJwCgTJDpTpOdbxrouaihTOy8iOigMLOJMDGytnUxrvQx1vcw8mUy6UMJfRkP85+iuilfLpmCfKitXnkYXCzKWVEszCJAV+VHehEEMkrXWeGeM4QTGWcpKvYhokAQ4iYQsrQuIVVWNhDnG5DiB3vNZoiOUWKrM4/WXXnWxS4gGAG1VOeZ4cXHmnDs9PXWWhpDuUuq9l+x9V/K8Zl4g9N6LpKpywxA//fTT589eKX0jp+Scu7q6evvm/MOPHpNBGcO9O6r9vMR3GvXFjOo97OdVGkEoDmu9XhuS+WQaQuCYlsuDruseP350fX09Xyzqul4vNtfX12/fnAmZwIkEOez6vicanVAckkCy3q02W8rS1iys0ybM3HdtSlsAuL6+vbq6evv6/PPPP7t3754xBvOMAQB473UywTlLWS9lGIIiJDXT8s6Uk6DGWkSI2PvaGh+G9PrNRYUOQAU/pPbu9PjIOH/SLIY+GLOKQaraWWu0elFVVVV5Edbyvq+rGDillDjEwGTIGJMiF3Yb7X/AXnCjq1rqUURkHaEhI0bf3BiDko6OjnS8W2Qk0S2hn3o4wtHVMaNOBUgePIfcuSGwgqXNo9CxMWDM/lsAQHfwfna8f/D0pI3N3zuT9840XclO9oNZRAR5R94R91ijOU9wai2rGN+SZhXIVRgGfdyl7IxZFdjm0W2iUddMK7249yoBdV3XZMybN2/0MrQZPJlMVFoHMwhLRLTAqwyLNk9D6T8BQJCCjGZoGAbNaHXSSa9B0WTeuxDCZrNR6mZjzG63Ozk6ZuaDgwPtyRHR1dXV69evtasKLH3f37/3sGmam9ur8/NzxVQfHR2stptvnj87PDzUbax44JOTk5RS1+2UMEdHXVWnyFX+d7/73cOHD2Mca+C6tnVdW9ecnp6+fv36Jz/5yTAMTVUbY/SaAUBBXvqYnHPX19eqSaxjSypmHEJQJqwCjVauJQWZd12raxtTGLuauThfcOOY5RN2u6vyta5STlnuZnw1G+Ys6IlomFmnDEpirZG6sTo7kBRf5rLIqZbQnTMSk3MmJY3JmMAhjog/a20I7Xw+/9/8r/6zp0+fPv/6q/OzN9Pp1IAojkSra4EFwRiDIaSQkrX2H/6jfzRbzK9vb5qmGWfknCe0YChwgpDxwHEkk8kbMjMj5hnFPAhnATFGJopanQ4hFTHsGGM5pmMhwcBglQTBAhMajFltXRhTkhBaXS7FymhAk7KKOTPTe4nenRl43zGTjHlw9pp3faVx+CiFGO9ia8gBPWbUi82SsmjwDqqsYQWOobky0YpIAQEgGAAl9ivmvwzsCCAUydf9umNxvThCZUdMuJ3NZqrG3HW7kTF4bCm/sxKyJ/ohuahdLGT2i2OId3h4+NFHH33z9bOqqnvuKt+E2D179uz+g+NmUqvByivyzkeICCLtZyf4rv0twAeVZFmtVojIEoFxOmtC37Vtu5xPq8ol4eVyPp3PjDEX55ciwgnQKigm6MzfMAwadtmQlOQTM0DGWtuHYRiG2nlEub1dv379um+Hn/3sp59++r26rrt+5xWAnjilVFVj74SIASCkSIDa3EqF28GOsWeOJVVAgw3CZDJ58OBBN4Tjo9PEyomanPeffvrp2dtz69xsNgsJwsBIUUQFiV3lrKQoiHWtmpSIHo+PD6tJs16vb1ebtm0RvI7BEFFKgbOWhs0jvLDHZaomSb9f1/VsNtXNeXx8GELw3jrXXF9fq4EuRWzIneC8EUf09Z84OWIAU37chMh7frYkwSR7qgySf5p3Bee47U5r4b2Nuu+JSyjAPEbB78WRKSWXBXS10asTnzFT3mgIpR63bdtmOkdEHQ9Ve8rMOm6kJmkYBiKrkJyUksG7sFqP5VhpQCyqZzHGppmq+2zbbcoodEVI6cVcX19rFtU0zWazUZbKEELkmHCEJWovTW9NG8wiopClvl97713lV6vVbDZTH3B7e6vBeByGpqoyV2W4ubl5/PgxIsY4qJzf69evf/jDH+q6KRz6+Pj4enX7+tWrx48fbzY7a+3z588BwFrSOWbvvV7n4YFfLBaqpqDY481m8+zZs5///OevX7/21bTvQ9cN6o/rqtZlTCmFvjfOKZrpzZs39+7dG4Zht+tOT081MNLMWBdK46HNZqNFJq0eazqrxQBv/MuXL72xw9DpJIWOx8AebXgp5uv7a4NfR030g/bB59qtKKAteDdziDEmBh071nLdGH5ldQfnzJCihqE2C17pdte/GoZhOrWPDj7UsYumqYa2BedgZOwCEWROCCgCbT8sl8tPPvnM19VmszFZNdl4N3QdZF6w0PVazlFaAsygBGPumDhRRGlonFNkRkgpZ4G5dqpRQjFlmLlXNdowRjtKtS6mkur3fZ9CgEyToHMBl5eXi/nBGASPydj7jc/3bcj+YX9n2QUAtAGMgDxOJ4R9SBMR5dEPa95VDR+G+E53LBO5p5QILSNrT4RFiIzkFFRxSu/bn3JV2TdrxlXsgLroMeI5evRDbO7Njj64vr58++ZVu9s6Y5mjMyPCG43VQjQDCajKJADwXRtYr5iMoEEgAnbI3//BR9+++JJhe3B0hAZWq/jlV88ePHj0+effTxwAExIYgzGGGGJlK2McI4nkFdwnyEQgg3GsWgshWGd10HUxnw1t0AKsZUwiKJzC0FSOyIaU5s30hm6aqo4s5Kht+16GqqrW/WCtZRaOISVBHJtAja+IjIBB8ijQ9R2ntFqt1jc3xpgf//gHn33/E2/M0G0hBV/PQhgaX5nahBAqb1NKRC5lfnwi6vs2xrhYLETG4qEOI1qryytAtp5U7XbnKB0vPISbWdPE3c1kOjUiglB517arxdT2u3C5vaVUN01DQEozTQRNXRMRS0TEiXNmWs0bqiGYsGuBLtYDpyCMXeys9QIYY3S1S4LIKaVQeR9ib5DqZhpjhJCG0FpnmsovZpPKmdvba2NxsTy03s2mM83s79+fkwDd0WiIek1rDTAVJJGoUUHMvFMMrDoSCQCYQYvTROOzZmZAQbhjCEEwkuWrRQDREpW0+Z19r868MrbvBk0BDaldA2NszJyCmLUotJ6sh1AnTOyoPQh1XavAe2nQAsBsNkPEodsBQO19SknRoYhm1/UGiUePb9p2O5021hKiIBAgb7YrRc9674ehC4G9t9v1rbWj3FLoW2ewrmuOLoWgSVgUWa1WsIfqUttd7CmSSIjoHKKwxM12NZ9M+747ODlq27bdsvd11++msybsdnUzcRaALQpwYgJs234+n89mNZG9vl1r1OW979qha4e+71EodLer6zZ27XzSxMg3bbe6XsU+kqd20zbNdDqdM68vLy/v37+/2Ww2m918vjTGVRXv1pvdevPg9N627SVxXbm+A5C0OJyDka+++Xq1uvn0e585CyDBGuQUAH1iZmHlSK+kmkxrjTl0TMsYozm01tuVLAwAVqvVbDbZbrdD1xpjQh+9tdv1SiesEFFVRBWuvN1uC2USIRLakAKn5MgyR4ipcb6p6/Vq65wTeV+ysChnxBit3eNzFfHOpZT6vNmIKEAAABSMQzTGeAHuAqQUIaBQJ1LXk+12KwCVpdB3lXPkTN+1Mw+1c7/+zS9VgxnReF/HGBEcJyaiISawVRTeDf3y5Pj+0yc4qzdtN5lMqqZBNF03eAbnnMQESCSAeUwOxiYlY9IifLDW+sr2fc/ALFw3HmDouuissdM6ht5Z6tphoMRxUCRmSikmtmgSc0ySUmyoBvIhIfUB0bBITBxCVAy1OFNVFRhY77YHBweqWnt6716MAQBSGpm0/4SjfbfyuheOA8KAwIaYQ0QRJMMxkTExRGFDY8g7zjullKyKpxqxzjuqhE0SBARrRqEUg2p8WBWj1Wij8VU1RVMJJzI2CaMjLSnmUWwAFhRiABylYgn3OPv2y2AiSW/IGLREdHp6Op/PDw4W3pnn33ytdHSRk2bTIiLCSo5JaP48NydgRm0h4mKx+Oijj7766qthGDQwJ6I3b96cnBwdHC6sNSkFEcBcvtM7kZz7f/fNc4UzF/pBJ6ldt+01VlU+Gw1++76vqrHQpOcWeGyBgAAztG1vbVIWBWMkm1epR1Y/Gbq+bdvdbte3/dC2zrkPPnzy8ccfV1XV73aV94gOQBUURDiRNdpPUjio1oi0GDidTodhcG6kjygdJjX9IXHXdUXcZjqZAIA2d9u2Z5Da+Rij89W9e/eIbBfA2DEI1XSQJRJaQ0bL4LpQs3kA5L5vwMZd3+12Xdqmvu+Ntc65oevquha4q9soaEv2xhYpixg2TQNCi8VCrfNsNjNGqxSQJ3ag1C2YWVvCpbFatp3eOGfyjlLwgXdT1f3zVr7GdxUa3t94auwAGUHto2RKDRmlFO4I90v8Xig1NDuZTqfWjphYten6BK21+g4hBC1vUla/UVoJnZE1eR7fe9/3ra5M13UWra6NAhE0NGmaZoTsjpRbVaGp4twnBoC2bR88eKBYKkUVqfPQXFlEYmANI1JKgKC/ICIpiXOVDtFgZkBTHNnhwTGMbFNJFSdViEy5JPWONCNc3W6ms2axWADA8fFxSqnvByLabrcffvihjt62bfu73/1Oz4WIaKFVD0JZk4uLCyCrM0XKEAKEr169stYeHx+en5+r0O/V1ZWIXF1dLZdLDTgKTGEIUY+Vni9jjO5DrdJr1T3l6equ6/Rc65SRJqDMXFVVTEPbtt5YRNFZUF1tGSGxkEQQQFlHXF0JJGsbANi2OzUsalvK4dUzoug5rU/2wyCZ2r20NkpOT0S1U2wwxxhZQEQGosRRCwaTutJzVFWVxeq3v/3txcVFCqPYJeuoOhpm7mMSBE4pJG6a6cnJiSpkVFWlD3paN6WQrntg/zSVYFR3oO6HlAVaKI8k1XWtY75jbQxMt1llaAU752Ias7oYI+UMOCWyVOuRgbH9mdTfhBAokXXm6urq7du39++PQgAiqGMjf9a53FmAO/jbe7VSxCxrynfTR2lvCJiZGQ0ik4H89BMiAIrNiquQXbWuTAo60jK2GBIngJGrMr/u/oqIAO+mHHGvfLv/q/t2zLKIIWeMqevJ0eHJ+fnbzdnaIqnDGH07AKIYoAQJ360Jv28oVQkjpaaqP/n4e29evdaV0c7N2dnZdvvhbD4R0TmjkV5RkigF/3cvt3xzP+qRrE+paZaunSCOB6/vEE1Iabtt356fXV/fGOMsIDNYSzFELWKD0NBHLT8qxLxpGmbo+55jCiGs1qurqysHVNf1hx89/d5HHzdNo2qDfd9qZC2ZS9lkOdiUOXQAYDKZ6NZ3mQNv37eV9iHHpMlH2/q6aWIIVV2LiDGIQFVlojAgKdyMAgCASi+LQFU755zWfLy3VaVKsZUxWFWubVuGZDfr9c2tsTivJuvtZhiGuvFpVB7N3PEwIvQksvobAGCGuq500EvnB2gk4WNJEYwho0/nbuwHdDD6T/FplJxAcrqKWWVMCDH3YPZ31r7f3T9y++9cfoqIOhhmLMVwd/b00egeDcMQQ1APrQGHde7q6koVZPt+BI1rL0YykEqroACgkCibmSCzq6ZyHFJKzo88GCPuxiRnx0q1tTbGYTzYKemqhhCIsHwiAOhUKxHN5/PVaqVBlYr4AkBVVdo2G/VkMuOEdxWS6KeErOG63+/UqWVOl4rt0oqu/rJK5+52u4cPH65WN23bbrfb5XLpvd/cXg7D0A/Dq1eviMbWY0rp9PT0+vr69PQ0Zv5FRQnEOGjPdbVaaWG8rush8na7vb0dgc06J6MldILRgr948WIYBtVH0kvFjCTXkEXRavqeWplQXPFsNgOAGKNy7ekVaiCl/x039l54R4WgBpHHH+WHmLsqBlALSyzRmtGBaT+izP5aa1VgJsZYLkxEMFsnfQom551ElDIpG2sCLiIpOjISU1P5lJIAWyLv7W9/9evXr1/vNq3ut5RSAhHBJCmwMEhiQWN95ZrplNDoVIiGazoyJxnHUCKzfaM6giJZdK/GGNUM7vsJ51xKIw4RAKraTHgyDEPfBSJb1yQC/TCUt2Xmvu9jROvIWqsjL0QkzMXuaaqtjC4HBwdtHljf71v9yVe2BrLfHtWLTSnl56WTbCN1F2fgm947aa6gJK8qz/ru0ISMrpo5Rj3FAOCcU0hmyk2KYlJwlCmCO2uHd3Ti8G7u/l2nCQC2richBGt9SmlxsHz8+OlmtRZgFABgta1IoOJFEhnd3ZzyexY2t0bGAZiDg4OHDx9+/bWKEtZD6Fc3t8+ePZvNJwcHi8ScUtShb+GxQC8qpLd3rXf+uPAY5IaNZGSvtXbXdfp4tu0upSTCm9vb9Xp7fnahd6cLxMwIxhCCJT1IGp8SUd9dHyxSaqqu67rdVt98OpktpvWjBw8/+uijpqli7h6FYfxoJdMv16m5uGYA0+kUMpahBM5lNn90eykZMioABwBar8bcYrTWdkMASETUDT2hOTw8tNt+u92CJBAhQ95bnZIvb55SstYg+hAG5uSM1JV5+uQxIF1eXve9i5ziEMgaADTGJAZgIJRBK/kgxpghxW4YGhFGYCRmtsYDjsJbxjiNM3jEcYwVntJEYWaV3FIEloiM1eh8Sr9ztAyToNwNI/354/fOgSk/Kv5+HHrmqMurEylExClh5ltQc6kOAInqulaOlKpq9CFOJhPlH9bpkfJEipvUjEqxAt5XaEYWQMw4L520gXcrxsYYRLKZykOVjjRrCWHQI+1spen49fV11TTqazEzX2q+rq5FRLqum81mxT4OfSTptOOosGprrebKiIhgDDll9tAQBBFD6IdhODw8TCntdrvXr19/9NEHm83mzZs3zHz//v047G6++ebw8FByeppSevny5fHxcdM02+324OBA4WOXl5eTyaTrdjoKrGGcc+7s7Gy2ONCDcHFx8erVq8lkojPB3377+tGDx2dnZ0oRrHwmOu6s1n+EENtKCUYAYBh6AFAiqlLkcM6VZurY+8wvvS8i6oeWs+Bj2T97Rj/pQ4lDqydXXamO3ChhTgi8b3N0IxljqsoWVIdu8iEEW8K+XL2DjOwr3Zmy0Z1zfbuz1hIwx+Sn/qs//PEPX3zhnCkABWEktCmlqIqjZAUYkVxdW+t2u52QWSwW7a5rmiYMcZO21u0Rwoex7AzF9aoDoDFN53F8kcqaFNcF2ZnpzkTEoRrZP4wxztp413IC5gSAMXDvgjrFqnJJRIt8Ik5EzIAxRsU3IGJKUoKt9yz//qv4AlVLz26C87yJAWR1JSmP+hT7oH+YD6CzrnKuIrIMAECqMEF4p6WW0ji3zczKXJY0ZVLMEO+hPvfiFSJied/XluzxvXvR79uQNALyjhlAZvPldL5Y3VwnYJNRYwDAkkTeeXL7RrB8oTteV9OR+ejpB8+/fcuSEHGxWMQhfPPNN/cfnM7nU2ONSBJB5nFORUQUhV8W+rtPoqQ7JfusJ5Oid9b3PRAimoury6urm27o1dQKYhpSGZ3ScU3ltmQWIqX0xJubm5sbrpzX6q6ztqqqTz96cnp66pxLoTfGMKehH4rLVGicPu+C41BjbfJ4g2SuPv01DZyxlGSZq6pmZuaoWJV9cXJFhAHZruuGNBCR84Y3UZJoQdh756yZNI1WhARBxI6gXEeATBgJJHpZbXab9a2khEAIyDGhICMp+xqOo7FovU2AwzB0Xc88Mq1rxqatYo2v1Z7mPY2Id2dVCkLyLj67QytQCbAKhiDv1ztElqCI4B4K8LvJ9J87qJTZHFliTEEE1P7GPKxJWXRILX7KU0MKVYNcX9HF17vQMqz6y1LQ02fXdV3mgIUxxUlpX+0u5vxAzZwxI9nWkHmh8/hvqOuJtZaTdO1gjKmryc3t6tGjB/q32+22BA3l+kumqLGCMcagxBhvb2/THq8TM4chKd9WTENMgyE3ZDi3c265XD579uzjjz/+wx/+sFptDg6OAK6stVokV9rXk5MT72tFUaleVlVVFxcXqpqlo5yatRPZGKOCtBGxmjQppfV6fXl5qRmqiFyeX+y61jm3WCyYebVaqcS95JKSVoz0pBu6E3XWbaYVb13J4i30oeiycKYDU4en/kM/OnRtqVfxHigvckxJBCFwYmGRO6/DeUZWq9D6KWPhLWfMqmZSYsuUybrLFtW3CjqzxBx5pGbkmJwhg9hu17WvppP6+dff/ObXv2aOzIigesaMiGiIASMnNJY5IZkEEobkPHjjhLHd9ZPp2C4h1bjV8TwWtR5oRphkOUGl/oyIwmOcVyYdSh5fUFfI4shMp1NmXq+3GgtqfSU3s1AEQwimN9YRZsqXYtP0v7PZTHtzktXPShz53TP+3jEXkczSNM77MLMgiQAza06vP9VktCQ8IiIogCN9Qho7yvk9xzdPmEcTCYXNOLDLxcXuDb+QjArEuqpq18qHftc6lX9yobsXAeMqEbHOOecnk9n9B49cVafIMRtNtR0CbJ3ZW4JsbVlIAFkMoIoaGcDKOiI6Pj5+9OjRZrPRw7lYLIxx3z5/uV5vEcgY954lfc+qlkXbz5lkb3BTRJw3SKKSWNY7ADi7uFDIZRaOVor8O25CzgPR+8uRQkDEynnvfeXtbDo9PT398MMPHz24N5vUKAmUAYKZM5uaZhLq+xXtTBlOrIhZLdiWwkBpa5UbCSGoS9MIVPOAMhtakuzdboOI1lCIQ+jaSeUnVV17N5k2KqRKBIvFYrFYHCyWy/lCFe6Uge/e6eHDB6fzWYMpQkoInEJvLBoaH2scQkopcgpprImllESwbdu+H1RJJgrc3t5u1jul4HdkYhhlDBSAqn+phcGYxTz2nuYImNQzzCAMkvYAhCJjWYlxdMOIKAzCo35I+R8IEhrCd2Z73tsq+k0tUQJKCKHvOsnZjNr0IrGgZWTNFPWf2kUDFfvLJPLqJvWb+idjUBKCPrvQD3EYOfo16wohaLqgXeSqqjRMgRyj5A46VVW1XC7r2ouIpnqqmue91zHjzWanabrOIymNlLpA1V0BAJ0FGiIrYYXeXReGGKPS7OnWGtrOALbb9fr2VqVztZQ7nU4Xi8XDhw/7vidrZovlvQcP+zA8efJEWQP/8Ic/VFV1fHysFbm//du/ffnyJeVxo/OrS7RmupinlLTRq+uz37RWwJSGEfP5/MG9+8Dy9u3bxWLx4MGD4qLKymtCbK1VNHJ5WNoI0N9XbGqRatbb1K2oa65PR3vbJXzZ7XaqYKF7uJzolIJzPsaESKUjoL5n17Z6hPMjFiLLQEOK7dBrV6uUu4ksZPR7cQNqHrHIF46IGUZJWvK0ZKzB8zdvf/WrX6UULBKHmIYRqcCAMXJkFqTEEgEEDaBhQNUABqCUJCUJITnn5vO5ZgKYRzdFlHH/jouGs7gI5Bx3yCzi5VnQOOCuoJMRhWOzNLi1RhccRl7o8eyPCxJFH5nq6ABgQZ7PZ0sRIbSSODt7I/J+nP2e93rvVSy5ARRIufgU1LaXgoTsFd6132qMk33FtvHdizjpkFJCYMgdHzUpuf5n3ru20QThaAP3nW4xUPu/v+89rSCIwBCSJRCgqpkcH59eXp7HOHBMUdiOaqwMAASU4B2XaeCdNeKsiaiP0zn3wZPHL54/s2QkMRqazKYXFxdnZ2fTaaM6wbo14c8EC3emfH/Cfe9HzCMrwmbbisjt7frVq1cA5OuKQhSRmPv/MQrkhrGuyxiDEOAYTZNBcpaWy+Xh4cHhYjmdThE5xoCgnGTBGKqqqZotZiaaltRfn42CKXQCUrMQ5UnQyeP9Vr+MhUot+Iwzedq8UerUuq7RYNsGAnDOtG0f+sE5N1/MdJ1r52vnjcEoI95BrZtBFBHleQi7YVu5tm0FYhi6+eLAN00/xCg8DEMag7Ux3iSixBxiBMTtrrtZ3UpiAOm67tXbs4P5KD7ThYGIwhBBkFEQ0Y3aeXdVOHiX+DT3uzLfJCQZ8dF3nH/vbYD39/cefoRIpcZAsuLY/n81auGMARFgpXcAAA0ROLdgy7tpbRmR1BmrHlHma6TtdouIWuzNGoWdEymVDDW+lFv7zKmkgDHGQnwBYDS6QkTNy7WMURbBWqOZ7vHx8YsXLwKPbFZlwriI4ungSowxJQEA771I1Cvpg2smNbQ7IZQo2007nZKIKBkvwah2PAydzj5pLKKN4clkstltV6vVZDIBgLbtp9OpOrzT01MtTU+n09Vq9fTpU5UUBIBhGK6urgpH7rfffqvRJABsNhtXV9pi1JUsU6RKJ7nZ7M7Pz1VDcLFYFEyZ3mlJFJSks6yDvmDk2Q4aNhW4k96p5scGEZjj0HnvlUE9ZjNSjOO+TYwpxSwYHDKIKY4AaR4jY+PUxuuYkwJ/onAlriTNxvgYB8qK0SQAlpjZmDvUNAEYo5s5WTIsYXV984tf/CIMnTGGQ2RmYRAyACjCIQkQAiKDEDlBIOOMc0mwGyLZZIwdiUQYQ0gAylfl7QjHExFQL3PnewTiyJ2io3rjNHOKkm0CIFo9CCEEBWukFNWn1nWd0lj544zhYmYRo6ePwAPE3A4YEKSuvZqpGFEJHWWPcwm/U619zw68d95TSpJCAVVJGkMKyvR/xbIZlZKrauM9GlWH23elwiyIAswaQ+sbVtYaY0Lu+KbAOhSu2F3JMzvlslMGA+7fhXynd1b+aQmtQKqqWll/q3riqt3BwVHXde1uE4YYCZzmbZra78mek0Apmpc4y1orzMXinpwePXjw4PXr11XVsBJXgXzz7NuHDx/WtSeDzFoFyG1geCfZvQt25B1Id/kpESkecrVabTabbdsTUWQJw9hMjWNYIzEOOFb5WXvP+py8HSGLlrCqqqPDg9Oj49l8onGfhZiLycndsauDCCopndYkeVTnNlqy0x6bZKkc732Mo2KM5LC69AJVucFau9lsSpGzlGv0nYfdLoR+uZx7X6vDICKb2QebpsFcRFG7Q1lW+rI/4xguz197a/5nP/vRdHbQ9aEL4fziYg2saLQc1ujyEnPUmEYV0b21q9vNzc1N7T4MIXjnhmFYTGdaw5wfLFJKfQ4VBTgNQRNNPXVE+2MDhHRXEbrb+9n86YszQ5bk+t7+ISxeU0Rgj2RVUQsl/dU/z3wLNGRZBe3QF7OYMgdk0zQpiZZqREQ7uJwRrSNrRHYAI/7WOe+9cz6EoH507Bca55xbr9eKr9aAQCTpw9XwPGV5pfl8rt/Uj0AkRUVVVSVh0D7uzc2NMpBPp9NSgoYRnGliFl/Sjarhgr5hiqMAsO4ZRLx/ekxECkL+4IOP1uv1YrHY7XbKRL1td5vN5qOPPnr16hUAXF1dAXyirvfo6OiPf/xKl2W73TZNo1LBi8WimjQffvjh2dnZt99+e3ywVIPunGvbto8h7FJdTSaTyXa7ffjw4cuXL5XAMsa4WCycq7SMfHV1pcQauakxlnn16Wy3W8rQ3BL3KG9J2w6admy3W91vpWE/mhRJBRiv/ULtodCeinNJ+NRqhyxwlM/UnU6tcVUfdqvVqq4nxhgUJjSI0odgjGl8BcMI0WIGMjAmTIWrAZFFxjAcGdEqp8OkaVbX7d/+7d92beuc04FdIkpoYwiCIGiAAJDIGgSKwgAjR5UIIBLACPL3rmbm3W7XNJW1Vu02ERUR+DxxO+rE6POSzHaSRv6+MTE1xuhR1mpHDOytSymlxAYlIiKONlBEYkoj3w9ASlYjgFqDmBiZYzPRwROj0FRd5yEGrVMaY1T7/Ls++E99k0VY0rja+QGN/WzYU3fQ9UFDImLIKeny6MUIBSQT9gmAjOVeHom7jXfGO5X+VSL9EmfrfijeVHRckln3Z/G17xm3fReGqBIQYhgYiEAsp2CcXR4ebzabq8tLZeQPITrnAse6cint0U/LXYp9F1HuU0iKTCaTH3z+2c3NzWazsb7y3oeQ3r59++LVy8+//9kQemNMIbDWYdYS+cK72U/I4ueyV0URCd77pmn6vl2tVn1IACCAxcwpGRgBqr5HHwZfub2HqiB1OTk+9t4fHSyn0+lsUqeULKEhNojOGe8nKSWFb4TQa8FZ5080Xk4jR2CsKqffNBmr2TRN27beWz35BUKp7A0pBcmU/QCgox02yyeEFIlgGAaOcdo00+lUW9fKg7HdbpkZR2cPtavqutEUDRWogGjuMSJ+/vnnMXJIGKJ03XB9e9NOJyEOzCkkQGCikekpF1e7uq67dlAa567rzs7OjpYHfd8b5WWMoxIc2rH+2TS1XrOxqLZePYRejIgoe3mCVOqHwJKxMgggKY2dNkgsuS0kWbJJvRQjoDEjV7UeAeCUkqQICOqNptNpVVUj0QoiCiSJqv2pj6k49cDJu0rdpzFmGHrlMlQkncZSWshZLpe4B4AqKC3vvYJvtaGVUtKOgE6tvH79uqmq/T5lSmk2m3Xdzhj03gKAem5FyG82G+8mzKxKt7ebtabL6/X65ORoGJJW9nR7AEDbtk0zlbGUUsVxCKpXWSRjDKekIGFdSWut/kibr+v1+vr6+ujk5OXLV23b/vjHP1ZcVdd1v/vd737yk588fvx4vdtqCTfGeHBwAACbzUb5n1Xfd7fbvb04Pz09ffz48b17926vLolsF4ZNu1sul4HT+fl5DHx4eDidNU3TnJycKJWV9uBvbm7UeetWKdwmmOlRQwjgaTab7Xa73W5X15NO1eZRCm+z3hcRbTab09NTrS1r7m4taUsSEfWflDXfODe2SpO4bdtqMgUy5NwQeompdEBDGMWmIkNVVdfX185VrvImm1fkUTZHEJwZRaBZ4C4YzUaYmdmaEU7DEjk1db1br/7mb/5mu9kQUd+H2tcppRAGQUZDLCgIXdcfnpwAmsDJMKIhIiu5+xMDG3IiMnQdSrVYzBQkVVc+hFDXPoRxFJN5JJ9JKVkaw/27c0o5w9trXeuJqKoqxpSSNFCvwjqEUDnXA1hrRGhQLTUeNUVDCERVShJSMiGE0M9mk8yCYEPsE49ACjUIlLHKd94RAO7yPdRJGRzZ1EcWkTRATnbHhKc4iD0/N7JTjc/aGiDDIohAiJKU7dr2feutW23XMUbnbBiGzHOeVG9NHWqpvsCe6nlxSSZD38tdlGUsYYTsJcQ2Xy4wB0BAa+pqkhbp4Ojk8vKy3W2stWIMwEiEC+ihwK/xrixwl2LDO6lr226Xy/mTp4/+7j/+ylV1HgsOr16+eXj/wXw+7fq2duN7vhcvlPfcex7w3Z9qQrBcLl+/fmtdNQyRLA19UKstkgDQWgNgAaCq7diSMcZaY9ApnPXBg3uWTFU7EkAUQ4onF6FxcTnr+qmEqNLVhj1JV2MMAGv5qwBkdNEmk4nO7KYRCnuH4qFM8Lvb7dR6qtIcIm63W06JhWOM3lvvPUhCIItkcQQFiAiwIOJkMvF146wzZE1xUYgOZ0R2sTiIQa5v15eX1yGI9/b4cN622xB6LSoZpCEFEUkxEoGGd23baktyvd2GEF69OQOO8/l8Nm22zg1t17at8RUR3NzcHB0diYgxZnu1NSjqJwDg9PgkxGCt7Ya+rmuVh1K9bQFgjlrUgsyIKXEEl5awukSL5KzJnRh9HCklFqmcZ0PajUYzhrcEWPuq4JW22w2RyVtCcp3QaMlOk1qdDdWsSxMgDeOqqtLUWR+6mmOdQ1MHzMw6QqMI5JR4Op3WdV2PHUqxljQwEhHNLUpuJ5K0OaK3XK5HXTIzM0dVhStZWik1mawJ4ZxTfrQx7Iv91DTMHOK4G5umefv2raaYOnalB+3q5oazD6vr+u3bt9fX1x999NFsNtPf0TAlpXR9fd334fT0VJfie9/7no4AIGJNWNf11dXVdrtNKTTNdOqnVVXd3Nwsl8vLy8sPPvjg4uJitb5R70tEqlYUwjin65w7PDwsosVl6EuNmspOKPOdc9UwdNaSLo76OXUhi8Vis9nc3t7qsqi/HLpdsZsjv7ExzIwEZfRRG5Mpw9YUYslK00GkAai2U1TJUYAnkwlzdIaGLpZMGtIoqCVG2qF3ZIicZlWgNgiJcQT3QQbrNbXvuu4//sf/eHt766xVL96DUanErg9oTV1XL169/Ht/8ZenD+5fXF7fbtZDSDo6hYQ297ZCCGRIfDbRmfFfjaTJwGzFnagNVGepz7SkUhplYu6MGAPej1m14vaTSVXvc8d3dDbO24IwDxyttUMMta9i5GCSYtqPjg9d5YsDQ5L35Y3+1Kukv7ppme/ip5QULwYiIOkubUtZbQKRyN5JoqGxmmyU04R5HhoRtXWFiDGO1n786L0y3L77/O51fneA4096sfJ9C++4S7FkjK8a5tP7929ubp5/sx5icNboI2EAhVyBaFzxJ94XMqJM/+crlyKfnp6W86z74Ozs7OLq8uBgAZnmm4gIMH2HLLBEEOUZlOcn+YQQ0dHRUdM0nUokgdBefUlEAMWMeoisgAIls9Su7aSpnCVrjUUQEASxzjoyKr1V7iv33irjbB8GRLTZ+xYoh/6mtujUVeRseKRWUFGzmAl7tfeW50FphG1nfYWUkqpXqlXSrqQIAQBkBIpzzlnvXWXKy7pSpksER9NZ1w3r1UbQTKfTXddfXFx89fU3zcQBTohotdkF6b2tODFzNGYcGdSmICe4vb01ZFarlUG8vLxst03btpoYBeH5fH51daNSNgcHy5ubG5Sx+FN5651pmqbtemABcSCJOfHdo4Fc8gJvLBKGu7TY7AdkiGjt2AIAAE0oSJSQdQQh7zabIfLBYumM5dwxVYumXT399ZiZ9J1zHIIueNd1iEZ1eBCx6zpNImOMqrhXEoXLy0sVF6qqarfbzedLrd+qRwwheK+lvxGA03Wjwrw+66pymkxgRsLrDIzuFnRIRMZi3fimqdStKnkkZrZhTUf0SMZ+8MZaSzGyLhpLlC41TXN2diZ5bGYYhkldO2OUnLlpmmGIaPq6rpfLZQhJRQx/9KMfxb/7O2PMz372szdv3vR9G0P12WefvXnzZjabpbRWQOXh4eGbN28ODg6qqvryyy//83/81zrd9Ovf/ma5WLx+/XY+n6/X68lkopQjk8lkOp2u1jdadey6TvWLrLWbze7rr7/WypBqTpRyoq5n3/dDPwpgVFVFBEoeov0gEdGst+RtWsmnDJ2T3PHR3GvMrYG1Rq2fIswIYI0BY7oYDAikaADFICEA4sBJQLEmvXOGIk+barvdxtQAi+CY8WCepdHrj8Kk389IBSYcQlJOeAEIw6AckM+++vrNmzNLbrvZee+berrdbgUpJHaTmpm//Prbv/7H/9n3Pvt0u9sdHMh623pLSYCZCY2SSepHVG48+1ppQ6g10NTwEXJGyyMmf1T13k8wirlTC6bR5zB0MQZrCRIgEhjwlQ3Jp5SsqO9PSnqqxjlm6UMZB6DBuiqFoNAwjnfzvkiCop3HP0MD/a7HKsafZaTaKGY2ZQ7X8tL9j5nEyZArnri4Ehox4YmINNTT9anqxnlfPA3lN1HXtP8+5fLwXRqD/Zf8mR+MiCQyIEDMzFpWtkbZia8uzm5vriwZIGUt/xNkJcV57y+TXg0RxZSGMJycHH322We//f0XxpihD13fpRRev3r74N597x2nCAAoIHtViP012r8B3Sv6/pDH9fQwP3jw4Le//8N8vuyGkdUWEX3lUkrAApKMta5ys9lsNpspj2DZcAbEgACAIdWjRqVVMaYQkkgfBjSkipJd1zVNY5xV7KvNJO82z7yXKYUx4t5DX5uMiC7/NHuzSS6L8Eymddd1fd9Pmso6JyJV5bRZEkJApGlTCRpjjMtjnZQlxO++ppqIrKlm06Xqol9d37bt9snjB0fHh199800IYTGbbHYdp8R3uQKlzCPjnGt3PZKklLx3220bY0zCta/Wqy2g0bGT8/NrBawCc9d3IoLAnJxBIkB9H8k4Bchk1OPjTmNnRS9bgJFAY6YixUGav+Tn5ZwPoTfGAEvfdykFjrHrut//4cvT09N6ZDIx2kPVcCdlCgV9h1IvKkW2qmqUK1R/YbPZSIYla8ObMhGuLoJzzlWVtngRsSSveqeG6Pj4+ObmSivDym7WdTsA0Cq3Lq/+1V6NmjV0MMZMp1NldCnIXn24sjcIUDeeDNTeJ+eYOXEo6ePh4eG33357cHBgLBow0ZimaYx3j54+ubq8QcTTxcIYE+P4iefn59PpdLlcXp5fEMHVxTkRHSynm81GQw0iuri4ePTokUE6ODp8/vy59/7o6Gi9Xp+dnd2sbh8+fHh9eaHn5fr6WrvXWqhYLBZ/+OPvVWVBfbCIOGeK1ISiGe7du6f6dPpxugm146PJZfEiipnQBdHDqOkpZFoMIlqv19piNcZYSylZ7dqwZngCnFhQayGqaT1mMSklZw0BKoNs5UwKkcB4iwZS4GQIhAMPvagktSQkAuCUmEiH8k1KKXAYPx2MMCYWIhpCAkkWyRiDKF99+c0fv/iCmQdm731IHFJP1sXARLTd7W5vb//6H/9nP/zJj7UA4FxYLBa3t7cGkHJlVUSAmPLgmcnYi+wz7vT4ssVW/FpSEHJxnGaPaELtgBmFBQciqutaosQYMQRrrXemY1Y8pDFI1ikj4b5fiJmBLoTQeK+9swjDfzrtzdb+nakkolExDxGRRLV2iyMojmPfH+17YkLrnANBEETKvlNRbgQAoEB2zLI0Kv0bRGGAf2Is6j1fvu+23r2LP+t9AcDSCBGw+U1BpV8G5uXh4f2Hj7tux8De2jQk+I7Pf++avntl1hCzRaCTkxP53e9TZK2vbrbrly9fnhwffvbZJzEAmbHSu+/Lv7us+1/oy5FJoPABevLkyYtXb6y1jaEYIxk0lowxljTJNtbR8cFSy4NERHQnYTtSE4AQGZSU0nibZCjGiGYs40jWSCl2MAkryFA9bhkLpndn2L13moep67V5kF8j0/V6LZkLZkyjkZ2rtRaqybriSHe7ndoLRPLeAlkRRILixckWPIIIAqEhY8AAkSVryJqqqZcH0/Pzy+fffnu4nDPzarWtvU8CMTIQ6Qepudf+KDMDwwBDCGFVb5pQpSQyl81uS9YBwDAM6/X64uKqrmuDAECSgqtsHII0sl7fzifTNPQSKzEAKESEBCxJmcXIOWNIhGPOJJgZBYy1jGOjBczd8DSOJGhOYoppBDQNXUcE52/erq5vqtPTbrsrwRki7na7sjnJGgIlqk3F+iiRk9IFaH6p9G3K6KRuWPu+2i9QbR8VlxxriU2jfU1m7ttWW8UiSeHQ+mtEVKgwxkEpAM3/tJRtjDKKj3hs/Vpb0cwMyIm52+50V7Rt2zQz9UYGUS9eeW9ijPfu3dORm6Zpri8unavW6/Xteq397Nls1vf9arWqqmY+nz979uzg4OD3v//98fGhohA0iFGPu9lsKuuGIc7n881m88nH3/v+D3/0/Pnz6+vrk/v3lElba8taS7h37958PlfDfXp6+u/+3b978OBB3/dt2242m2EYjo6ONpvN2dmbqmrUlSqIWiFjWrQosaC2ZiATUekZVBSVjnit1+uDg4MC+9AolsY5e5UHMFq1NkTCY5KqMRAzS0yISChExlVWRATYqwwfR+eodnXXdQDsvEmSaiXwnlYEqQuRqCIi1OQhJs6ozPEMkjBzEkZGEVEoUBwSeDBor6+vfvGLX1RuhDgRWU3UgKiPu6qqrm5v/vqv//oHP/h8t9tZ53a7rqqqxWzStq12nEUEM3cCZs5LGFMXReeRcy4ETpk7IaVEZBFNCMmNucCd3NNdlR6xFF2UTrht28L1iAjOOQ3ZwcS+C8zROZPSCKfKiJmx8NB13f37pzbLwSEivJ/18l7H9/2XiECeuEFEyLTHJq82pyCQYKypcalJaPeX0GqrMTAiotbM9lwMiIiaiCSsZxn3+l/vXRXm0fD33K2IEL5/C6Ml/lNVa0S0d45zbN8CESJZ5yr2w717926uLi4vziwZtA7SHaN9eQv9gJQ5rADG2WTRsnwICs87OT36wQ9+8NWXX+vzYEmbzerVq1ePHz+srLvzGRnNC7m6W67+TwYUKoUUhLXDd3x8/ObNma8rrV8hInByztWNJ6LGV0dHB3uxYUp5dpDHD4JRz1ISMztjk3BIkYQoYwI1rCv0SWW/ag9STbCSHkgePSyXrW5e100xO8biELq27RaLhXpfr+LtUTSvUvDtXscrOeuc9wiGyDKCYhudH1Nw0UE0QgOGxAgaXzVj4ijSNFVKaberlstFwX+tVuvauyEwEsY9KU0t6OnciDGGBYFT3w86JZVSipHfnJ0dLpd93+523cuXL4+ODubTibVWELx3YJ1ReSNmQxRDyKVWRSCP3aZStQMAg8TCIcYkUUSMdagjs9kBa6iRUvK+6Xct94yIKYSu65wzn332WQHyKLu47qJCo6EQUCIUvhMZhKKnlpvE9G4jVmue6l2Ojo7u349qniaTCdGw3W4VJ6wJRAhh2jTqHu5OmrWFrkQncPSzivnTXrgWBlyWj9VqR934oe+ms2YYBg0CNLPPAlyUUgKiEHvdLbrrStaSUlLglfeerL+5uVHxwa7rmmY6mUw0zlPZ2hjjixcvPv/880ePHn377bfrze3YzjejfEUK8euvv9ZbUOTOarU6Ojl2lf/FL35xcnRYEGo6+KuRjbZRb29vJ5PJwcFB46sWttO66ULUpM17f3t7e3BwoNIL+lx056s/LlmdVhS0Xq3Xtp+96f0W++jsKLwzmdQaygxDVoKzNvbDkO6YOlKKFgQBHKEh8BatbSrniGio3DAMxtkYk7U2pFRXbrPZODLIkoRRGA0ZO8bQegHWWpWGjzGCBUQc+mBQrHeKFxmGGGOsvU+JlRchRVnvds5766qLy+uf/9Xf//Cjj29XK00uJWMS59Np27Y7HrKnBN3zxlkex5qRSCFgoLuidPpSZsosIMqy7Ytp1X8WH1zssPeeLScOnq2IhEgAIaWEJCnptSknyegXAmjvWQzi4eFh8eiomn8go7av3BEsf9dRFTdZwhq9RxEhAu0B7v+oFE2JxjaWMSOJKeNdfoJjMVkQKY3SGlZEwJCrKzCUQIQI+Z1Us+Tc77nY9zzrd79TTNz+n9v3/lj27EWwfr5cnpyc7LbrlAIByKhb8/4bMUJRcJW9WS4tFQiRMNd19eTJk1cvXw/DMJ/PjaWUwtXV1atXrz54/AQFTKY+2ne0nHGw+J7fLcyUZAQxP3I+PT29uVkZY7RQjIgEMl9M67q2CE3T2NxZZOYMjhu5YYwxWCASYgjEGCO5AKibMomEECJzYy3GWBYh5dFvbTvFTISku2H/CwAoG9o557zZ7XbWka/sdtMys7GYhlG+ymUScIV0dV1XVVbVtERAOAEaY1EVLEIcQiJtbRpn9c5c3SRAFKwqZ41yryfv6Pr25nvf+7hpmqaZPnr49Nm3r589eyEigKM6W3kEeuy1/VxXTkSYRRDX63VKab3dEZGM3BQUQvKuct4CJ2vQOQcpOueEk6s8IhgkEJCYUh7zxwwsJCJJHNKghQFdn9l8UXaw2XtpGcB7n7gmQEkzhZ19//vfHyWN3t2lY8fBmmLFGO4GTqpcSYYsnV2wS4WvQ8uk1loVwVTXslqtdOhIfd5ut+v7HoCnTaOF0LquNbKmDL4tXyOiBmr6iVVVpdS1batNaETph5YlrtY3gAvVGNArV5pJEamqCngs3hgzmlHNRPUj1KudnJxc7VoYYRakNEbb7db7erFYrFYrXdLFYqEc459++qnWdY3F4+Pj7Xb7+PHjy8tLZNSeGTN/++23ZM3jx49X2835+fnN6lazXucckf3qq6/u3bv38uXL5XJ5fX3dNM3BwcF01jx79oyZF9NZmoZhT1k5Zm7nPZjbSG0hItZ4fSK73e74+FgR1JSbuAqfLvAZbbSncURCiFQkYwAArTzHGJjZIhHSwDyGWUTjzBhGA2i9QURvbVVVzqAxxltKlWPmYMkYU7Hz1qTQ+abmBJhiCEHG8VBBuDNlIhJFSMZ6GOfma0rRIh4eHv7oRz/68g9/0Gij70IfQ9M0V9e3fRh+/pd/9eH3Ptp2rYhI1+l7Kk5NMUbMPMTS9QQ0Y3NXo2edYdE43jmzl8xpyjwiJ4qjFVFOj6Q1Fc5D2PqGo6ELkawxxjiHYEgzSUYKIWhy7L1n7pihsJ4NMXgw09lMZ9i+O0+Ed61T/k5a/P6rhMspJeUf1YSX914CSVt1+c2lRO1I40QZSnGKUCw2AIBQ5X3lm5FEDAEMCiCk8d1UeKb0wiS/xjXcy633c+j38unyIkQoWYVGgoAEACzivZ9MJkcn946Oj9W+lKh8P9aQd1/ls9W2WjuWeXU+5MGDB4XLpmmm2+325cvXpfG2n++/94b7q7//Be/1pBFxNpkcHR3p+bTWTpv65OTk3r17B/NZoY3UldIr19NLGbElMkqFcwbplMYbAECuf9b1BICcrcq8kJpFlSLXwF93rfalSn0YcvlCC2iawaiZ1igS8I6MpjgntVDW2vl8buyoIBtCiGnQsTkA6GMfQhiGvpTgdGJSU2rjXUppiEEjDAA4PT45OFw8evTw5z//i+PjYyX9d8YKJOdHjkm9WvWFmgsCGmWo8L7SsRBEVKojZva+Vj0fdV1N0xiD0+lUmNUp6lWVaoFWF9WZMTPH1Lbt9fX15eXl7e2tzhHtP+5saEbItJrO+Xw+mdQnJydPnjxZLpeHBwd1Vc3nc8W4hRBIQJM/n/W3y8JqmVcyLEufWqE0UWe2Wq1sZr2oqkobKHo9um8LVafSNOo9asiCmR87ZSVBfZUdq2GWCujiCOEBaxVpP6gWISIqp4qSNSofCGemC93PzKyjULmanawla2kymWghXZfij3/8ozFGq7W67dUZK1pKx7dCCPP5/Pb21rqRL/3Jkyc6U3vv3r3Hjx8T0cXFBQAcHx9/8cUXetfGGI1Lrq+vOSu6p8ynEUL48ssvtZJ//+R0uVxqTVJlg5W5+sMPP5xOp6XYI3sS6Lr46o+1WaMdYt2l2+1WZ8DUJeue0Q91Wdxef1l3lA4avBPP5W6Rc25Se+dN7avae2cMAacwcAwo7AylMMS+G9odS6y8nc8mVVVNZ81yPtVykdpxxZCXatm+1RqGQbL9Ubv78ccf63yX3iaR2e52y+Xyr/+Lf/LRRx/xnrBPMU3driUCLb9r+RoyZZUGXnolxYSqJy5JWLHexRwBgDFutP9AOhCvn6i1bt3VJXjVzMFaa41SYqH33ldWAyA9ZVqkgay0rcTpWgT6rh8CGHnj/9yr/NW+9S6xe3FMpfZQAhHI+aT5joiA2UvGCgZ2f8PIu3nmvhXSGBf20l/8Dv9G+dv9/+5/HwBIlWsEBfLSiAigFbYInrCez46Wi5NHDz8g41VrSfsHiAaAooBSlIvge5FL9p8TgxNrrcBQ1/HjT++THYACIhryk/r4xfPLFy+vvZ33Q2IBTgKCnCTFUZ2RABXFA9lH6urp7DmQiyIalYAka+T4YOIoLifuZDk5XEzunxw4BENUV1UMoVSx9AvnnAGwiJW1k6pyKKp8aQiIqG1bnWYJ/VA5TwDOGAIwKHHofGWL58YMOyRAjmnaTCrnheOkqRDFOaMZpHYB1XRqCVotsvqnEILWBjUCBeC+b7tul1IgAkRJKVisiRyKCX3kRN54EYlDgMQpJUJUrcDSB1KzzhyjMJBNaIIgVQ2bqpkdNvNlH8NiOf35X/7o008eWLtbNPPYhlk9RSGDlsCkJNZ6TZ8QMTEI0K5rmWi123V9iokSmMi423Vt2wonA2lWmdPltMLkKE0bX9d1M5kCEqYh9TtMA8Seh7bfbmLXpr5Lfbdd37bb9WZ1k0Kv+jC189ZA4iFxRInICVldlLXW1vUErTHO+8mMfIWu9pM5k+yGNkp0dRU4CWEfY98PKXEIsW97ZxwK1r4mQAQxxum8tAZy6r2YI3NUUaAMVq+Pjk6s9QzEQIKo5TxtCqQUqsqlFHS6l5nX220znfYhqBPVyM9ab63X8RIFFgGQph11PRmG2DTTBAKG2qG/Wa8q3wx9RDDW+Olk7l1tyCEYTfiGrNYCAETQbXeOjCMzqbwjE7o+dH272Q5t50iLt8b7+ubqYn17nUK/nE+9BW9hOW8I4sFiMqntZnVTOdNUzltaTGeQeGiHL377xcXbszSEN29exTgcnRw+fPwgSXz+/Pnt7e2zr76eNbOpn+xut7WpLNpJVX/y0ceL6UxiSkOI/cASj08OX716JSLXq9tq0nz4vY+/99mnk/liu1v1fSuSjg+Wj+7fq6yxSLXz3lgOA4dhaHft7qbtVyHtyCaBENNABlhiP7QCSRkrR0MfAoqgCHGSMEAMLEH1rRHEOcucGl81zgoPBMO0kolPmFYON7M6NGbjoPc4kLQVxdqJN1JZlNiHbju0G4m9DF3qdrxbx/XKDH3VX7ndZc3rR0v7YOkbmyi0BxO/mNQT71MYUKIx2KW+jR07IJuSdLthGzkOnAaRy/Xmpg03bdhGMtOFaRbrLn346edHDx8NWnFMmq+jNaauHKFUtbPWKhm8egL1JQQoiGQtECVBEWy7IbEwY4wsgpoXVFXFLMBAQCkJM8Q4lu547AQrMec4KKUhtTrRADFwAAMhJUQBEiSonCUCg0AoiUNd14gCwNNpYwkJZLNdPX782BgTBUb+f3RKnymMimMjAGJBAEIZxxTzMAWOQGYAScLMKaAwgRAgkVMELYIhtAQILJIYgSsd/EvgbFM1cwYLVOt5scYQUYwDSkBJ3W4DPKAklDSbzWaLOQMCGTHOUUViFainVmK/HrDvUIvVLd9BHP8K0aAQChEYjSO0+Kr/w3f/4O6l39TQ2Ho3n89zRM/a3oc9ukEALgNh8E6cNXbLjDFKWXD//n3NmbSmCgBv3769vLxENNtty1mkr/g22COJ3X9vDXx0o5TAR8EpmgBVo176mK/oNlWMaLkkDnG/QAF0J+kDmadXgyOtUhZCJREpaZzGuWoF9lPz4uY14paM1qYMHinRvWZRLiufaFlSVLprL3Ur8VrJUHOEqxh1raLrY7+jmI5xKNWLHPqNjdXZbHb//v2PP/74k08++eijjz744IOmqabTZhh2i+UspoEl6jCMSh7pmqeUhj5qlKBBvcYQ2uRTbKrepk7vUMbRiEjps2rBVuNrALi6ulJGfm0BbLYrTd9VnDWlMAzDEHp+l4h0vB07Bge6SdSUKNQo7QnDmYye04zWOU+ZPbSErkrUoKzRksHtJUbWfaIPyGTdkRJc6/ZQIiq9C80my8prZVXPlGal+hHOOe0iF+xuXddHR0daWy75q4ZrkCeG9QI0R5GRLCK0basLq3tJxYtSSpo3L5dLDQV0biqEsNlsnj17Vte10qdPJhM9UF988YXu8OfPnx8eHmqreDab/eY3v7m+vjbGKOGzyjAA8NnZ2XK51Oo3AGy3Wx0ijzFqEBNjPDk5+fjjjyeTyZdffvnixYsQwsHBwf17D7Rn/Ktf/Wqz2aj4vGKVFQhWZgpKRUoyvr3k0JvNZoRuMTMzokRODBI5aRKfMkGgAVS2fGuMksk0tZ/Pp42vlBpDN5KOpgCAQdW/Gw+pfqLyd+puLC1qBZBbQykOm81mMZs0lZvUjbdOYpLEiDh0vSHrrE+RY0giEkK6vLp58eJFCKntu9v16vb21tXV/+P/9f/84osv1MKMmXRKClHUJ1usmfd+lOHItdayW2CP4XnEeOcyzB6hJ2s4W5J1yXh7ybQ8nFldjTGaIagFU1NQ6m1jKIAjYVkpqaaUHjx4oHLFxV6VUwzvvhBHf5y7g3d+br/yOib0d9bgrjSb38eMn4Ij1IP3Zmf0pvb7X5DLVMXdyF6tuyTZ+8WM8oL/Ca+sQ/V+KmzzPxCQtRzNzAA6DSIi5Otquphvd6vpbLZe3yZO+T6TCBSeCtrD0exfH4gAapxOzKGqqo8++uibb75VWgktwZ2fn58cLat6JBM2mSkpqyK+09MWEX173is+7607eu+V/acY1mKpy/pSVqaEdLf/9Duq22UznZg2w/R3ME+CxjytK1lQQU0nM0MW+kbUMYcRW8h7ROeQR4olE++VQrfaC/XN+05lLLghIZKqWzvn+iGmlKIwoSVrNNGRXIehxEnEEAMhjG2bEeyGmbMeAOq6bnwznU51NOv//W//Q0zu4aN7t+vtZFJVVZ2EAUBJdlLWHVMv610dgt4iDn27mE1TSsqx0Pd9U7nKaYcvAEDiuF73oVtrPJH3zBhCrdfr+XyujEuKUe/6XeIQJTbTCQAorY/GKzCy6Y6wOIuUxm08GpTtdnt4uHz79i1k1InN7FHFxJRWmfbXtbHattuykfYQHAYxM0uL7Ha7wtVQTAwRaSrGWfNO/RwzQpYjLJGlwvSGYYhxpc/FWqtDYnVTX19fi0hd10MfS5SmUY6WAVkIALTfjIzFEwNA13Vani2lb+0drNfrvu8V6KceS5HYr169UuGH29vb2WxWGDdDCGdnZ1oC1QK4MaSqySGE29vbJ0+eIOJsNjs5OVmtVsMQr6+vDw4O2nZ7eXn57bffPn36NGU139VqpQ1s/XN988PDQyJijvfu3ZvNZi+ff3txcYFoZrOF+n4dOCEiQoNm7NFy4KHtIHE1abRupG5eU7SqdgycmLftzhuLiMY7QiMyyi0AR0tYeUJJkpIFdmQqa8hbg4114/RwjFEgSeIEBgBQADiFvhv6PoRx2NQ5Z72TJIhoyVTOC6XGVwjkDc0nU45JEnd9aIeeBFCAQTjpjJDKdRKAWOvJOl9XKaUkGFOMfZzMpv/m3/67fyTyg08/HIZQVRXAWCYFACSbxlMA1loFwezb9+JXELFkWZxZNiVjDBERQIqd2XcqxfHoeR+ysJslCNl1IaK3TpC6rpM4/j4RKdFviby3u/WDBw/q2jOzNzalO1BqMeB4B8p6z5+pjg4XBBZAtm/5jnjvJftVYmNSYpWLyOkiKn2FDr4SIeS3RSTmSM6avblTyOAeeVf377212r9cfDcz3n+pchPgO79m9/4AiTA7J8RR3NigyGQ6r5ppSqFupt1qheMQyAhX2Y9TynXvX5neYeKRhEU7SW9eX1Su0iorSzy7vDg+OTw4WAxDd/d4cNQH3q+t548YP4Xone2l3ym9Xsgj4fvrgnky3RhDZKw1JRcxFtUB60a3Wdu8hN6U53Q14h5vE6GwZ1gySZiTTrWPs78arasJ0Msrj3O/t7F3ZtBay0yY6VuFBQGQiFnuXHJiEUEWITHGKCSBmWWcXUObOY8NjuolkOMPyK0R51ztag2lq6paHJ1cXFxMp9Pf/Pp3X37zdQjBkhNtUwAo9b+IhJC0cqvVqhh6rZkfHh7ev3/fEId2S0QgPJ/Odl0fQl/76rZfaRyTspaqPtYYk4jo9IvWRQqMXN2z7km9O849Wl38/U6MiJABRcZqnV97kFrG0PfnXGJRgLeO3ucFEXV4OriidDPqsUqANQyD9iwxVy/0HTTFL3RXWhJwzmnurj/CjIUZhdhyc26z2WiIEEJoZtPJZFKQX7oOqq3U9/3p6almYM6529tbAPDGM8cQxqguxpiSIEoIwblKB5PUGlaV2+12IklFkHRaSZHhZ2dn1lr1xKvVSjmzrq6uPvroo+VyOZ/Pm6a5vDx/+/atdmr/+Mc/atv4zZs3WlxZLg8BoGmat29fHx4ePnz4ULNhpXXb3K50qdfrNREtFgvlyzw8PLy6ujk7O/vggw9E5Pj4eLPZ6WEcKaWkU/z2kMYCjPF2xDDGcbK8H9ohdA3VtjJCLvRDVbm+J18rlg3r6YRj4JgkBUkxojiEoe+8Nc6jRSFmZ0xVGeeckNG8ExLLyIM3phA4lhlDjKPdTyl5X1ljLJIhMiE4BFvXzpCROG/qoe+HYTCASZLEZIgEDDIiKAtbNMbMZrPT09Pr2/UYWxP6ugoxnd4/+o+//Lvlsjk4OOhjQBYADEFjGsgehYwxSURxNiJJZBzLSRxSGkUUzAirFLV7Gq8wj7OUpbRTCm9Eo90rmydlcQJXewPIcZRYreuaGRRmaAATZco2YGACAjJgrT04ODB7o03yp+BIACNFtY4bKRmtOpD33Fvx9CIcs+Mvfgf22rGajxljIt+VDPdcKXHi/cqlVlwIDCKCoEEK8o77wPziPfDTu+7vu7e1f4MjVrlc7R5tNJC2gZkZEQQJIpA1YRiaplkeHiDJ4dBfhiHGqINGjGAEUbvIeU5L81MQANVHRExJBUjGGqB37rNPvvfm5Vs1rIiIgBcXF+tHD6bTJoGAVlMtEZkUYtrTDXx3FQQA9yEt+toveEKe2S+rTxmFP2YJ6Y4MAXXcOst0axai6S8Rhdw/LuZejab3XmXYRxyZIYijZo5iL0v12GbJEchBCRFxFgkutU1vnba9FUK+f+9a5NTijIK29NgwkCROmCwa4613jmgMP7FUJtJYL9JX2TckAESa90yn00cfPF2tVm3bXl9fffXsq8mkRjT9MIQQnPM4qkSMbxJCQJCm9iklAGma5qc//fFiseDU8zAq+k0mk6aB7bYl65uqBs44gz1EKKJolVIy7yPvEavGITCzc8xcJY46iHEHdYZSdxqxfkiiKL+HD+//8Y9fHR4eVlWNuXqhf7V/YouV4TK74pxGWkojPEaQKYhgu92GvpeUOEYUAWZJqWma3W5nEG2GN1cK4kKMAAo/0V6y+iFdvZRS3/fOVbrN9FH2u7Z2fhiG9c0toqmqSlLqdru6qjhGgygpiSSyzqKmuZz50UTPQq5vj2NvOkGumxkRz8/P67rWpLaoBmmVm5mbprm6ulLpwOfPn798+XKxWLx+/Vo7R69evdJEs+/78/PzqnIi8vTp0/Pz881mpWIequZUxqY19FRnv9vtvvrqK33oSv1mjDk9Pb2+vl6tVgaw60Z9T92oVVUZg5rgStc2TRNTGHZBvxNjHFW5QtCdRiNNbNc0S185QOn6zlrrrelDH/oWhR0Cp8DAq5tL70ztXe2dVJawFoecYLfZ6JYgQzpJycyQGCVZAmdMAJAUQCixcIiO7KDdIoS6ct4QIHhLmJJDnFrbG+o5YWKhlJiVMydk3kcA8K5+/PTDm9WvXeVlGLzzSlWWQHxT/+qXv/lH/+gfxcAxxslkEuOgw9AafIwxQXa6PIpAcAnlTZ6Lc9kmUCYbKDlSOQsmj2MVa1N+Ry1YPpLYdV0cBiCy1ouIt47QWGt9jGCshqSATEgicnR0NJ9PAQCFYxTELBu/51OhZFZC7/wzuytERGDIRNDALHvjJ8Ums+hcpo5lg7Yq1K0bM472IY0SHbxXq1c3oYSDWUhIHdndRM94PQgCIKzwaQDAMqAMACWuyBf//iSxlB9pz2v/bksqNn6t+sOCRFhPpn3fT2eLuNzd3NyMSwwAROb9Hu2de9fHUJ6fMQqlDtp3/N3vvphOZ1VVdf1uGPqvnz2bLRazeY0szBxDIsPjLWKh7nwPof6OfwK4CwxLjUX2aKpgzN4CAHjvDWAUJYAaQa0UyTpCwLHexVy6HSJikBik4OXU+6YQowp4pVRqvFplbdut1q6LQjjmSXltfnvvDRGBMUgGCQUIUfKbIyq07Z2iR4kSQiZbBgCBxIwEhggqm3WWYoiBkkTa4yVIeTpLXb6IhBj0G2qXr1fXdXWsLKE/+8mPL69vum4AoK4PwxAQBxA9wIICBCjAMbL3brfb/eAHP/iLv/gLh3h1+bbrujipAUCpFRBxt9s0zZTviGY8Z+ziZrMhoqapNpsdIqp1Hov2yADgUemOdS48dsyaGkJmsStLpFjl7Xa7Xq9/8pOfvH37Vmn6Y9aqGyP9KM5WhLauR1UM3WP6tWJPtO5KWURWg6oCQtZdxJl/VGM4bcHO53MNleq6vrm5KeWilNJ8Pi20WYvFAgC6blATqW+rjVgRads2Rl4sFiIynU61Uq2e0jmLiMpPmSMYxNxj1uN2dXUFRaJKRFHuIqIJpU7lAoDi1ZVz1Fq7Xq+1nXl9fa2iC9bay8tL55wxeHJyst1ut9vtbDb79ttvq6pq2/78/HwYBh3J1RN3e3urNM7K7azMkZC4RAk3Nzc+y1cYgw8fPu77vnZ+u32r6A2NepumYY76bjfrlVbIzV0HfJSQUn+via+WKiVFDpGB+90OnDdVhZJAgkUA5m6zbneb1fUVp+i9PVjM5vNpGvrQeSJqQ0QALaKCCr5p3SUFYCBQuCqkFDiN4yiTyYQIUwwCUntjjfPeeRJmmdaeeRr6ARGtd203pL7z3oMhg0BkWASQHz58eH5+fnVzXVfNZredTCZDiiLSNM3q9vbZty+ePn7ifR1jBLIhJO9HGnB9iIyg9LQxRsq1LpfR0bj3Kg6vpH2lgKSPz2YaS8g0W/slRhFp2xHzj4jCrP0OY0aOdnUJOrGjRriu65OTI+ecsp1Ihi4jIjCpP/tzr/d8cDlE+S40ybsrW8vYlxx9ESI6XxtjCpmPIIiw5gCII+a5eKuqqqzx+s7MrBzJ+xfwXr773qXuX+f/39uB7In3hJn0buTu93T5jTExJuecqyvjKxUvU9wHZEoUgTH9LfFC+aRRLTPHU4jILNbYp0+f/u53XyBi3TiWKMIXFxfX19d1c3pX3xZQ+prCV4XISkH+3nK8t0yYUTB7HzpumrLijgzvdWQhB1POm5jZdjQvsZk3HxENYOQxcbfWGqR2nMAZu7Z3YWmeDC4bnXPvRAuw6g/IOaIR979/L7Kn+1TyM1GsTRp5VjVZFE09LCvwSiVihPKZAWOMkawebwvDuLWkPfKxGIAiSYR0ROTm5uazzz779NPv/+t//W+sr72vv/n6+Wq1qpwPIXVdR4qZB6OFmKOjox99/v1/+A//6ujoaOi2VVV1Q9+27WI21RjTORMCAXDhEyhPBBE1YVWTLSLK+V70hcQKgWrj9CoMl7hPHMlgmT7Xs68RutI4X99cfmI++fnPf/5v/s3/Z7fbVdUIgEwpcRoBlgziwDnn6trrAJU+Rz3n6qU0WNGczFrrXFVEAkwWVpIs2Vv2Hma9DTMKsTEzEwFz3O12JycnMcbMS3Drfa3+NYSgsRoRTadTnbTWjEctJjOPshwAiFjYoPQ6daZWL3W5XCrL483NzcHBgWLBFN6lkaIiv5bLZYxxvV4XPG15QBrinJ2d3b9/X/PX169fisjBwcHz588Xi0WMcTabnJ+fz2az29vb4t1PT0/Pz8+NMcp8vlqtdCkq64aq0qz36urKWqvmW/ksJabZbPb27duUkqou7nY7AL65uSmHAhGBqB165dLSxvlkMvHGXrUXYsiINNZzDEa4rmsjHONAQpVFcKbvdsN2e3tzeX150W43lXPOzMqhGztKICySEmgagCTOWEQjIixqFsZckPku7PbO9O1WRAyAgYQciKMAVAa59sO8qWMSY4ElslgD00mDZLbbrTAwR2/d06dP35y9nTROT7pz42D6bDp//uzb2Wx27949SaDaaB58qdZk/zc6yxTFOWfdmIHo0dPglbmI9UqxSO95l31fkpJYS8WsMTOKKFxUW1d933OI4MZRTBi1QVlEnDIsstS1Pzk8skhp1C8aZzSI7iph330xjJ5o79qSMIfQhxDseOhKEfiOaxNyhxEAyBrvPaJBICRULItOsRNBcQr6FsYYLRGpC9esh/dgRu+50js/utcbfe87772++5sAYAHonSI7gWKZmSOi0VPNHMlUzWRq12s7aaqmdl0LPPoYISAp5btxZfY/lYSFlZJNjHGIHGNcLGcfffTB2dmFiCIRrIi8fv16Np/MZjNv1F0hWaMay3QH8torOGRqbHm3EE+FfXtPflUtJrIo8VbOYu/YgMtWSymRNb6uNpvNfDrLkd2dnWVhS4ZjCjwCmtSbKlIaRyku0l8OISgCRVMf/QWFxmBOsjPIEEsqU8pBzGCQR/S5CNIYP+rJH4YBoPSBIIU4DINzwVprrWeM5Ekk9X2UTowxVVPX1UTz8nKziEiIAkAoQhZRnj798PEj6Ibh8PAIyDLDNx8+32w2q9X6m2++efXqFRENQ7y9vb1/evTZZ5/de3jvH//n/8VkUnvvU+g0u725uWmaajKtw5DG6jeAqjKru9WPVrx6iYR0clqBVKqtW/xZjLFpsKoqltH/OTIFtVEOoWZ41tpnz5794Ac/+OEPf/DLX/56MpkMQ9AxWQS03nVd58inFHSIaD+H1rRG2642k3tr6dWYccS84AO0bKUKjDp0q4QSWvkokLGu67wfsaaKqNKGaMrM2Hpt2oUCgNPTU2MMZ/G7p0+f6sqkFEsgpaWCjGYgReeCUnFNp7vdbrlcXl1d6e28fPmyruvJZDKfzy8uLuq61mHrxWLxxz/+cbFY6CD79fX1fD7XZ7FYLJT56+rqqm1bpepU76v9aRGZTGrFlKWUNpvNYrHQKndd1xcXF1oVmEwm6/V6uVw6Moenh8Mw3N7eMvNkMlG4Rtu2q+sbDbnOz8+Pj4+991dXV4iy2WyWy6VGA7rymkPrCbLW1s73fVt5LyHEfjAEKQik6AFCEkMgoRdJod2tr69SHLw18+ns8f17s9lEJzvGaQVrQaTSohpySkkGRlGslAACCyCAUiVwTCkJWZPi0HbbEC0RaDcq9B2K73FnrWcWIzBrGhdjYOFqjOZn08o5x2kg49rBtt1wfHz42Sefvnj5atpMVGWBY6qsU3qo58++ravm+OhQN2Rktu+OxZf9T2QEUggJEcnBfs8FALRxWxxwsY2Skcm6FMVwxQhEjKjUEFw6ZfoUOAEReWOVKoTIjO6aQEUmQgjW1c2k9pXre60V5YSH33H27ziqMbndfzGIMMcQQoyD9Z4ItRO/D+eW3E5yzqU0EjkkENTmXVSvL8wJIHP97xl269UBjwh4ys3K4lbeK69+15uCViNglDP/c693HDCONFA6/vteS2zcjK6qCSSlNJ3OLq4v54sDZri8PM/ML2Mnb+8T7qITVXrRUqE2VFjGKfvPPvvs4uJCxza0HXV+frk8mC8WC/XsKQpARBlZF0pquOd6sahM7/80ZeI0tY8lNRERZwtVWxCRlEBDfmWcR8QUJUWppqNkjWIKNLEwWaNbPd9+hcdaqwB9yHUSZQaWjJrjLLFp8viK+l1nrcqDQy4EZUm7UUhkWjeArAAiItpst/Wk0UKlflNRjkag63eEFlEJRiGEnghCO9bhjbN1XTtjCcVYFEjDwNZaawynFO+Qus77atLMmXkueO/0AQAlkMePnorIMAy//vWvf/nLX/Z9f3V19Q/+wc8//vDp06dP7927d3R05Jxpd5u6rq9Blsvl6uY6pcQpVL6RGJSyLmYWT53SWS6X6qXUEBfEOI9t1+RKTYwQEZUoalKN1HqRx8eq83nW2hjuivPr9Xq9Xv/0pz91rvrVr341ny8QcQhhOq3VrQKANSbG2HcdiCBAl91Mu9stFgtCnM1mXTtIYuNIE3FdefWaIyw59/XLfzXn00BEn3hKScRo2HR7e6vHuPQvY4yKjdL6sz79w8PDm5uVtXaxWISsX6nNcsgYulIAz72uUStpvV4/ePAAcWymqLYKAOjEjjr1ctIfP358fn6uWsj6zb7vj46OptPpzc3N48eP5/P5q1evnDOLxUI3p7KCaD0sxqi0lGdnZ7jXSi/MHpPJBAAuLi70a0fm6Oio67rz83PvvZJtichqtdKjutmsdBKpFCRcXel0Q0g8xHBwdKhcmP2uHYaOEEPXOWtj35Elg+SYJQxOJPKw2267XUsolSExfj6dmdMTjZA0GAKygiiM1vvGqzWElFKsYkpJEpdqlve2DB/mEZ3YNJUxuF6vm9rrejqmGFpOgYwDRm8NkMGY6sMFkvSddZgWs/m9eydDSJdXq5v1egjp4cP7APDti5feeo3hdrsdC2tL5fLy8mC50HIgIjKPJk4bTHqFfpQGv+tZxhhFxr6YNkT1p7lSAilxSUa1LFfSCW27pCGllKp6pHwvXDrW+OnU6MCe4gxSBr1PJ5P1ZiMiu93u4+99pFs6xqH4b9grPL+XLzIIpzusVjbniUb0CfgxOIgpk5hCLo9nU8ApcRL2ru7C4H1NRCGx9pX0AGrzIoQwpAgIQ4qVaWLWTCuV0XINkNusfzJx38/U1UypppGImH3+6neTYH0fLUG/mwTvl6CRiECYAQGJrKsUINdMJrN+sd2uY4jWWuPHEn95k3LdsJe26nsSjzrCp/eOHz58+PXXXyuhPCJuNpvrq1vFGBdP6YwdhrCXhpaixDsPrzhg/WfJLfSV8litUl/st81KGlqWGxE5AaIZhgEnWFCCin2F3LErBefy2Jxzm93WWksISdgAqPnQUpuabP2IksSYLNKZGYzHMceyhqWSqfaarI6Qj3ovIkI4kreRACEnkRjH+ZlBegadl6gq57XNzMwpjGFNisOQKdpHprCQhFgMGuMsIplRwKu5VwmaEILmIl/+4fcnRwd/9Vd/9ejRg8XiwDmnxVUy1PfBWtvMpkPXjhdPIxmQTiCUSoC2NtfrtQbmiEb9q7IjYa7fgoLUnDV7mlFpGIe18lyjKKlF8UkAsNvtXrx48emnn/7sZz9h5t/+9rfWec3VtOJERGHoRjBt3+vf9n0HIMo8VXgNva9ijLPZrO9bPTt58F3PDleVFxHnjPfWe6tbru/HufZS4I15NBxzJ0LjAFVW0FXSYVONQnS1Dw8PVa4gjGhz0ArBdDqdTCYjiXyO2Mrm15RdK70i0jSNUm6pv9xut1qun81mWqMuwFftCGw2mydPnmhW2jTNvXv3UgohhPv37zPz27dvdV5OO9MxxpI367FSCYoy7qwZ//HxsQo0IQuyPPngqfLiKZh8uVxOp835+blzy+12q0m5ttWj8DAM0+nU+mo6nR4eHsYhOOe8IRAJXQcikmLjfOy7LvRxCN12YwAH3qQYHYmS6sznc2OMARTGdtdHYaUR9nVlrYkxgjWAQNr8BwwggdO+xczGlJKwJDAA2+02pIiIXbdTad6h65um8b62QCzIwohUOdtMJ4aYZzWhncwnk+mMgXQAoY+xqqrF4qCqqlevXhPUIjLxNVntyMLbszfTSfP48UNmCH1vzB0Zk16bus8ybod7leSQSTaModJM6fsWss78vuUvEbAxJg1jLBuG5L0HiF076I7ljOSgTC8xmiMymldwSov5fLGYxTgAjCQHKQsSs7yjcSfvljDLapf8UJkn1DwSYUmgSw25pDoAIAjeeWMMI4EQ57HVlJJVkJrEtttpu0cNeFNPx+omiwggqlPHIhpd3vw974bvZsD7LglxHMBFREWpw3dedv+3FfktIAhIRJqMAyChARRrfdNM6+lk0+6M87P5PMa4CStmts5EieVSmFmAWa9bWBPrdxYaIcZofPXg4b1Xr16lFA4PD0OKu7Z7/fr16enp6empmdTFp+7VnyEPhN2B9/bv/D0HXL75XmC4H8WETFtf/lydohLjaWriMrdliZKI3mlFh0xJqPArzRLAoEUD74Y/JZICkRQjZBpbbeAZg2U+Z6xOc0JEb5xIGoZBEELoNZgYC0HMwANSJltHlBSSARBiZusrAFA+mpgCEtTWEmAcQqQYY0RR3y9928UwOH+YktjEQERoDZLqtwAAEVpyMp8+vH/65R9+f3l5Xtf++PhYo1S0xtUVcBxiHEKoqybMpkmSBRziaNk1bwCAXDwH5QWczRbqcdU9TKfTMsxD1qq/lMRBEmLQ8nKZ7YtZ/gUygFm/iWDQp7ZtLy4uROTv/b2/1w39H//wZdM0VeW323axWHjvnR1NkublOmOdMly8hALqINu2jZyytPBd20n/RJ+FThxpoMB54ouItJ1Woiv9fi4dd87NMWswAIDWh09PTwFA6xwq0ue9VWYSNXwFk6JbriyLAo9TSldXV8y8XC77vr+8vFRqGgBQn6rNbMUt611oYUNE5vN527a73W6xWCjo8uLioqpGEpurqyvtTKvTJaIYB2NwPp8rmKvrOp3qFhEFT6lE0nK53Gw25+fnOpfy9tXrw5NjM4rfxZRC30cN2larlWoD6wSgVqF2ux1Zp4iww+XBMHTWWgSorG1n09cvX3a77W9/+avlfOYtWWtrX4npmBUeAWht1/Ua+W27TnPZIQ4iUvd101SI6MDecQ/ASGeokjqjXUFLyq3BoGe87YfIoiA+Iqi8t9bGyN4H5wOgQWuMdcaTt8Y6S0QMUNd+Npsa5w8Oj49P27YPV9er0113oHWaIVxeXg6DCAARTSdNjPHy8vLgYDFtGuVL2PdSdDfX+52iaOZZ1PqWlt+8t8MwUgNx4dvfY3YEABKI+UdapCG0QsnuSZiUmp+1RU8PnbPT6QQRDg4O9OmnTErKzPQd2NU7pjvjp7KJHs/X+CCKPc9sB7JXMBtvXMXFXY3W4F4Pl1mlB0fKprJuIqLzeOrE73DLjLKXN+JemvsnXemffN2FQQXevXfLsO+A4R0s9J1/IrIiQUCMcVVVHR2f3q42Q9e7uqnqlpm7bpciy74yJUACARBCU3Sj9mMcQHHOcYoffvjh1dXVL/7274DM0GuaS+fnl4pPESHKcF9NnuRdkFeGfv0JEEG5/rK9yk8LNN9kiaE8GPeOzLBORGy2Ix/vvvPQnp+OfxhjdH9Pp9PId1q/RGSyxmLJ29Qixxhd8Z25V40jyt8rvTNnPYAxdjMyDLEPyfoRGlbseMqi3+q5NadHHrWVCYRADAIicoiRZRDoEABgbAMD9ENXEjWkQREKToQtC1gSArxDeVSV+/DDp7vd33vz5s2TJ0/UjndhsGK7rrs4e9O3rQi6yjtXrW9vaj9ChHRV95+OMSYlSUmGYafxjeJmFbwjjNbYclhSkhQHjTwwA6Mkg5644NWjeGeE+5SnC0ZZ39n0n/7Tf3pwcPBv/vW/nU6nztnb29vpdAqStIZGRGV+OlcgeLfbaa+x63fKy6FpH+RhRDPKQtzRzOprv7ahzbDCgqsbQFfDmLv4TPNvziKsVVVdXV3dv3/fe79arY6OjjabVdM0fd9XVaWTygWQz8xFT6LAHTabTdM0McbNZuOz5oFqX5a5ZBWJ0ke/2WxWq5X6VGvt2dnZj3/846Ojo7/9279VkPP9+6chBF23i4sL59xqtWqa5vr6uu/7Fy9eHBwcKI662PHpdKptaa2jdl334MGDr776yjn39OnTr7766vnz5/P5XCSpkMNmswGA9WZ1cnq83qxiUoA6Xt3eOOeqqmKQm5ur6+vr48Ojjz/4EABub64P5gtm/tWvfnVzdflXP/+Lw/nydnUtMW02m27bWmvb0Gk9pe2iMebi8rZtW51ZstbOFzMCQeG68e12bCiMIAxJIOicCSOdFqhoDydhQAYkxhgYJAlJjsCScy4GhWs7Y209nThfGVXpmE+9r8gaZRazvmqms3tg+5Bubldt2x8fH67WN2dnZ8CBiNbb9ubmRj3cbrd5+/r1o0cPirejPU3xMqyxX8zTnyKwIozKZuPcEq6q5j0fg7l2GlKkPeioTudDpuozGYZtvOMQdJMXqKC+23w+NwatQSIAScLJEBKa+J3Z0T3PlOeAR3suIgKizSnVkRRgBcqlmMahjLtwBEdbTc5qMiIICISYxSSyWcC9akGZyMCxyKwtXtFk9M9c5/tw6D/plfd/jTU8yOGdftO+9wf5B3ePanwLISJxrloslkdHx2/fvgFB5yp/4NNlGLreWOIxblDvCwAg44feJX9lFBkJhm6op/bp06dff/WsbdvK11VVCaerq6urq6vZZGItpRSTMPEIfi6XCaDldlDM2H5Uol8UW1+OU/m1fcAqj+L2d2lx2cQm81Kpt9MQyXsviYUYBSwZznSPmo+mcDeOVc4AGqJMHK8me6djnZkgovwojZxnY4cY9kbxxvqJtc5ZzUv4boh2pFoUjomzakIm1dNitJ6NlJJxI37Sex/TmPdrktc0FXIKcQCdb2VvxAGwtR5JSd9II9nlcvmzn/3sL//yLw8ODvQaUpeGYfj666/Pz99KiqeHB7Np1TRN3+40CimDVdk6gzGOyG42K59VU/RgV74REU6ACCJCe76NhRUnrB46nxbEPbyicbaUr8d9zxxCePHixcOHj3/2s58dHh7+/ndfXFxcHBwsEM3hwcLkgZamqbVaCzmZHoagSGNmnkwmMUgYOpjUCILACEgoiEAogGK99d4LR0MAkhAwxcGSQWO98zENCGCN8c6BSNM06/WaEIe+N8ZwTJZM6Ac0oP3s2WymOrvL5RJRvLfb7bjTyqMHYMrMMLrhc/+bvK9vbq4mk4kxowBUSkkxqJD7x1rw17mpMhdnrf36669V0eTy8nK5XE4mk8lk8uLFC++tQsO0QsDM5+fnmqFqMHR8fKz1cPXNh4eH2ukvo6vKKDKfz9++favDS/OD5evXL5l5WjcAYJ3pVx0ZVGe8Wq222+2nn346n8+vrq40UDBIlsxuvbm6vkwhTiaT129e/t//x//x008+/pf/8p+vVzcW4eHsUQzhMAwvv01t2/Z9EuSUhiHEEMLNzY0AE4iv7L3jo9mkqZwBiBxgFyJldW1ElAQxhpRS1w1J2JCLMYUYhxj0tIYgzrlep3oAhyH9/0j7015NluQ8EDQzd4/l3c6W682bd6viVmSJEgUBRAsQhGnoe//RxvyARgsYskeaJkcSRVEki7Xdm3fJ5azvFosvZvPBwv3EycwqUpoXhVsn3zXCw8OWxx57bBhjXfGx9yTTOi8366qqBGB5PIb4+PTULKtF5RbOVWAsGYfGAfHZ+ekm8nLZhjC+evVqv98L8zffvYvRp5Tauj4cDgo7PXr0qGSuiJhUP6CqUnqAlus+QUSESWMghKkND/LcxrnlL2ZHM8Fy+5R8QP8ugjNjRrbKZzGTJSGrAnMmPJc79J+YPhYHrD+dUgIQmklQAeqBSTHmmFtnDbk5J2nK1hiKrdCqh766WCwIp7qVet/yEf3Cj/qXD4+2LON7T/J7dC0iASizmCYWtMp9FawYp//l3wYCSKhsW7Kn548Oh8PxeGCERV03zaLve5NnKb93fA8yd4bc/iQiUjfVMAybzebHP/7xf/ovf9XULQCFEMdx+Pbb7zer1fn5qWaiqhx2/z0FI3oIJs9/WkqTXIF8dfYWTOOAdLlLNbeghWrjtFakyZOhB2M0Yi62VVU1+ElxsBCsypVjZuaoqCDllSz0GcoqxKP3gMhT3+eDKk4pe2PmBdR1HTnkGILLMatvnlIuFjIoIsCChCmFlCBGzwxA1CCiq9CQWsbsvCcEKaXkIDJLSkwElCQRIoIBC4QsbMyUuK9WK+UyIEld12bov//++7/8y78korZ2Lz/9ZLFoOUVXV8LRAJXAc70+UcgBEfu+f/func6NYWYE0x0H9QGKCY/j6NwkN01EgqAVAQ35dfgPAHASQmMNgqAhSbkhu+yEQ3f0MdT1lff+xYsXX37x1TiO6/Xae694RUopBH88Hi8vL1+9evWrX/1KB9qsVqvD4ViIG/3QzyNlfcgMktHdqEjJnANvLMZ0Lw+uF06hlOnLOVSuQUQRdo5CEGutal9obLff79WVqrttmmYce02C67oOYSoBElFVNX0/3tzcPH782FpSKNtaawx679frE/2UQj46CEunWm23W2ttVVXaufs7v/M7f/d3f/cnf/Inbduen59/9tlnd3d3P/zwg3ZnXVxcqMIXZRWX/X6vk4b1Qi+XS4XlS1szAGjjk/7K3d2d9365WarS1nG3v7m5qWqnReWQZxH+7Gc/CyH80R//MwDo+973w+l6s14uvPd3d3enm5Pr6+v/9Jf/909+8pP/5X/506Z2t1fXr998H1mudrf77TYmvNt1MXGMcX/s+m4cx1EgNU2zaOtnT59+8uxRWxlhD5xYos7w0T5g3RjKMBiGkQWJJCYJIY4hRQYRlBhFo1wkBggJU2JEUb6tiJgkpvfMTMaISDeEDRg0VUhp8NGBjZFRIhpbUSVOrDP/4k/++Scvnl9fXl1eXgI6FE0ufYy+73tFIObJKOdRXYR2GA88TQ6Y5uvqttSdE2PQuE3HBZaSB8wEegEAWZKIErYpC9JpwctUThIyAiMUB6+vFjOoi6ajnROP7+UkLIFyvew3PeS+vsOc50Po3cHMnBJLRERLxmc/WhywMcZUzhgjAkj2HgwAZhA7HbWqAhszmw6g0rZ6g6c4WWD5wLN8eKi/5fny31KtVzoUT549s6DnnywlEB0xxDo3V4nXQspF2pyexhiBJUW/WCwOh52wMNyrqxQ0A+G+U2hCtgUAQZs1DQsRvXjx4tffvOqOfV23oWlE5HA4XF1drVYLoumKltBs+raHihzFzb9nCucv6RU1mTmlZTO1IHPQRj2Z6hYpNG3JFKNZuj5KxEdZ6S2lxCCQoV1rbQhlSBarvIDeMyV7S3neiNrWqq5lkg2BEhZInmgkmaFj82xt9ZqUJ79O2kYcDN+TxjX/0DquxSrGOAydMa4beiKqKovocrjAtasAEmh4KT5GYQSWSNGi0Z40jHHCSGMIVVUJoq2ruq6//fbbf/jFz5ft4g9+73cuLi4c4fGwjTEGP6wXS72NdQK82lb1KwUX4gRNM52XVuXVTA/jWFJ2slNUG0K4vr5OKSGQQu5TKgwwjkOhcUoGugkNIh4Oh6ZZHA6HkxOjQGvTNClPwlCBHm2AaZrm22+/Vd5TLtN6a+3JyYlOJ5xHSBqeS8aa5mUFmnFkaMafNMaoQIearQJKExHnElSM8fLyUg8Ac2uyZo2an3HWmRmGgVkKKU+da9boULF+p2okxhjt/1GGbVVVl5eXJycnWhLWftyU0vn5+Q8//PAv/+W/1ARXa8l6nLqxEfHu7k5rvc+fP9/v96qRqZdJk2BrraZKBWnX5jpdSZ14qIJZWs1ZNi2gFHp/VU18i88+++zVq1ftavnVV19tNptut+u643q9fnzxaBzHd+/e/fznP/uDP/zDZ08e3dzdbtbL08cXVVv9+3//7199+83F6dnheut9Cine3e72XV9VlW1bS2azWT0+23zy/MnJquY4IBtnDCFau5jIOBJjpo+llAY/cgJBEsbej+M4xshJmADTmJCIkUUkCSNSSAIpIQunQFkGpzLOObdo165eVFXDYGMSEhhDcGRC3ycQ5xxDaprqk0+enGwWj5+cnZ49Oj3dfPfdd9fX1+v12pLhxHc3t/SIlA+v1iYGtkaMmUa9AbAxxs6SOZoJE8WswsHMKcn9PZiryJyp0WVLFytXOkJTHnOiF70gjpNxiFGnoQAwABa2l3yA3MIslQIAFc/SYpC+yhKnPF7dl/YIqDJE+dRM0FcXvHzzfZasB5B1D/V1rTMq5wk+yCFlclcfeZSzKG+efus3++MPk2P4EIImmCrPGk7cfzsYRAGAqm5TSqvVZuz7ytrb68umadq2Pe4PAvdlsOlnpsOaHUQ+KxWjZxBJcnp6+tOf/vTP/+z/cq5WQDLGeHV19eTJk/V66b0XTjMXi7P/fuRCQubczx1wRnc55aKsyfM7V6uVrl2W570fJ8LMyCjGKFxTVdXYD+oY9LPAUth9SVi7QdRHmqzAAACWJrl85ZQ2da3NV4gomfKjkU3f9ynHlepoS2SqVl4zHkVZq2oSVeCsDj9xJcgLYykoQi5DhpAwU3VijEAYI6kAljMWgQVSSt6AgaTRJEpSpgZjMkmk67q6ahHRj6Oei60MC2uy+Md//Mcc0+//5Ccnm7PucFdXzcnJye5uaq9SKX+dGjsPvRHRmkpoShzVb3Vdt91uz8/PSy2g4BN6z6tHscZtNhuVfdaXtDk15Xm3inAYZ0Wk0EBSnnjhvedJaFoQpw6/x48fv3jx4nd+53d2u93hcLi+vtntdkprgqwWq1c2m7CUcr9gvuGn4Tld14lAjBFGjnnoJGf6qJ4FlGIeSYxxsWq7rgOg7XarXcVlCv1qtdIWNU0Qq8oVt22tKaM+9JC0X4g5+jyWmIiGYdhsVoj45s0bY4xiAG3bvn37FhHVg6aUVFhDZ0Lo5Am9Ww+HQ9vWxpizs7PtdtsPHSJ+9913xpgffvhBm4wBSPuGU0pPnz5VRQ7v/ZMnT47H49OnT9+9e7derwt9eowj5D7pEL3Kdsbor6+vnzx50vfjH//xHz9+/PjV998dDoff/Z3f+/zly9vb29vbW50ecX19/a/+1b9aLBaJAyQ+dt2x6/7bX//V3/ztf//kk+e77giAArTdH33iullUVaW//uTx2fnpqqnsOA4WuLJkDRqDaie0vuhj7Lpuv9+HEJJAisKAwqjMwZA4RamcExnJGEYMmYQREy/aVmIaJ4pc1bZtu1ysVpuqWRBaJEtoRTD45GPfCAROIYTFogGAkXtrrXF0erpxbq1zsdq6uarqt+ltiOMwdt4vtcCv0Zgfx0IBKYkpmEmLI3gik0TEOZtSUoxdphx3LHcWFNQtTb1z0z3CrF9LRIGT5g8pC9GkPKClZEra7LRctjpCTe0c831UGvz9/TK30vNn5r6qxBAE9+GCTAqGRIhpJuNIuT6dWCxN38AIquwRY4yjR0MlhyEiQxpSq4vRL5kw/JLozXO8D93Ne294L8h475/lU4hoCUszMrxXcRZIgEJGXeY0yyWMY10tVysZjt1V37fLdeJwenYxjsH7AYmQhYURUTXHAVgZC1O/LikIPc391UKUtbBa1k+ene33e0SzXq8Ph8P27rC925+enPsh6OTElKIUrF9Y1T6UtlpO6b309/5U8v9iCgCAJGqSlBeqtKZCEF0ul0gy9r1Aaqq1iMTIMSZjEhByYh+DIITkyWpndwQQHdnNMXCMxtkwDilMIwKVS6V+paoqHwKoknX2LpoRskRABpy2O9/zelSTkkSQE1nTZN4Nls/qkWugoHExMwOKgCQOMbCqJRz2u8dPn4z9sa7bMYSqbfV2Ekvoas/goBJGThhFOMWqIkZGY8Y0TslK3TX1YhzH3e52tVq1ovUt/OlP/vDTZ08vLy9fPn9urbW2qttFE8P13XZ/OKwXyz54ZgYi61zQ0T1dl0Qis6q+aH+ZRSBDzXJBZPZdZ63RPElEJDGwqGBnShx9GIdeVktDiIYQrYjEIDHGpl3psiB5AVXt0GgmphS6bicSYrIikqIPeT6MZv4+eB+obu0nm6fMT38kX717d/W3f/u33vtjfySYxtRT6cHI95gyhyGDz9N1txLCCDh1Oqm+lfKojTGlEcI5N6RhtVpaWy2XRqHa29tbnfGne14zTm3EBABm0OnCGmfkbNgrtKO3wzD4sm7r9clx3wFL3dSnm5NDNw1A1O7bUvw+7PbOWBQI/ZBGf3t7vd/vtVTvLDlru67bIiKiM265XGoRQfOzlBJa55xrlosQQuDkU7TMKSXlYJ+crA+H3Tj2682iac+HYWhSdXd3l0A2m03fg2bkFxePu25ghpPzszeXb376J//sy9/96tWrV79+9Usg/PLLL59//tnbt5d///d/uz47Oz09j8Px8cnpYXvz3//mv/+X//SX29u7L168cEM0QzgSdv4Q07Be1WcnG2uwtnB+tjhZW4KeGAgFEaOIRAwRPHHcH8cxBZ9CiOMYvDcpgU+eiFjEpzQmStSEGAOnbozWWgLgEJy1RBVHttYS2v3QA0KzWImtzGJ9+vhJvVxW66VbtmIocnCsA+oNxBFCMMzxmMjZgBhDAMYQoo3bBvpPTqsFnH1y0vztcNhumZHG292Ipl2tFam1TZUMDpAgMVmXQhROyhdrnLEGgUhiUJsZI0OCunHGWlVCxZkG0WQ5UwSAwGkKR4TV15I1Poa6rjkwh6h63eohmKVxFbIMw3B2dqYODMkUl2TyzETrppqUyL34hL6JU7TGsIAwI0JKkWNCFGAxiCKQEicRJMeSQggmRSF0xgmCCFlyxrUhJOtqouIRmYSIMKXYxZERUCSE1LRNu1gxA8ukKqqRgogAiTALJIQHSMDcv96j6IYAQOdWysSfz643RwwiksdSGRFWfgACvp8Bf/Qx92eaIDZNs1qtbu+uj+NoHS2Xy8VyqcC6yVDblOHJ/ZdMrhAEBBMnxRA0JtpsNhovL+rNRHtJ8fXrN+fn503tVDhQZpJVzDofSAOH+7CinG355xzfKM8gTt296hpNFqUq4dWU4OZFDyHoODCeFYPn05QhK6QUcrXM6IhEpDJGNIlbcWEk3c+ByJUzZpbE5c2SxapERAn9iKiOVmnYANC2rWaWBXcNs3G2nCVbNTXvus57zwzL9VrzRZhY/hMbkIhk0oOZjmqSlXC11HUIwZq43++vrq4eP378yYtaf265aC7fxbZtT09PUxj1BJXYPI59CEFFx0Ie2KdRajUNbEbmVFxL6fdHRGuNZmY2z7SIs2+YrqWZBCNjjEpKE5m0QpmZp+qRFEqdtaQLlVKKYSy5Y5xpw67WJyEEEWya5quvvmqa5he/+MXbt2/bui1yB5JrGZLrXu/tNJXroywnrvun4GPKJgMArTcr8A44OOdUAUOTeBV40V44veL6bZq8QkZQJM9+KdMsIFMRdcry4XAIIeikXn2n5OrGMAyPHj3Sfi3KQ59sVamSxpMnT7755htd6ZSSHlvf9y9evNBUe7laKUp/eXm53Jzo+Wrr0Wa5gsz5SimN43hxcVEqiyml5WJ1enp6fXOl4V0B3n/0ox/99V//9U//+R/XtfvVr371+eef/6t/9a/2+70fkw5revTo/OXLl49OTzkG17avvv36//V//p/fff31yXqpAzG9RDAkzE3T1NYtV4vNagmSDIgWa1AHv5Mid1NPiwcax7Hvwjh6icIJAEBIRCCmEEIafBxDjAlSxBhZaOKj+WHgqiqFEiJqFq21tF6vV5v1ZrNZbtbL5dItFnpZoYDA4T7ULlXYAqKI913X3dzcxJispRefPq/r+t31TW2ciKTg0VWo/ashihiTU6uyRSVNNdTGVSZPQ7Jtjbl1uFjLchgwQ5gpE611U/kYVPBL98NUwkMEgLquE3NMHkmWy3axWHjvAR/Ueh/mRQ9dw8zXcNZyKcbc5upy8YLFmMPsjtMDVj2i8ouISDT1F5U7fbFYrDcnkIOA8kNlBfIH72UQ4YNc9r1T+8deZYD3ByH/4w54/qXl/jTGLNar05Nz730KAQDX63XfdcPQTSobQAkSiJgHTV36LQgwbbU0FVZHa6tHjx4VY1HXdc98dXX17t27zz7/FFnN7j1nj6eOaSH8bVc0W8npSSLCXA/XCpN2dKjvlDxtEGfSHAVprGtXiHOSuzmLi+LMtbZ54N20vtbqz4WsF1hY05i1XdTZiEiMXus6OIu29DH37mUr+BCWy2XMVcwiDc15ejxkYFZElNO0XC7HMJGENfnW1imkaYiK955oKuTU2CZEmqq/lfejQVM1raZxWuEjlOAHIvr6229/9rOf/d7v/a7kiUZ1XXs/LJfLw+EwdMeJdcmsVXCDmEJoqgoAJIpVXyXgnENASIxGauuEJqZJoSmWs1Nr0g39YrWkrHMSQyr3jLFkxTBPU+QkjwFOyRRgX5UiYgYbmXmz2azX63EcnXPMcHV1NQz+5OTkyy+/HIbhuD9qOEIPCepQtMSd01BAuWalDAH5mmoiq1vl9PRUZUk0QEGceAlacFUBLIWp1V9SZq0r4aWAxiYrljDfL5H27VxfX/JqrS2/uoB6iyXhwiHQbF55eVqhUNjZ1XXTNCpKZa1Vec66rk9OTohIZyowcz8Mel7L5XJRN+M4QmJI3HV9W9XH43G9Xt/e3lZVtdvd6SAmLQG0bXt5efnys0+//PLLm5ub7777LoRwdXVVVdXv/d7v/fSnP/3hh+9evHjx1RdfXl5fhRBOTk5WS4d4fnd3d3NzzSk2TVVR/cO3r/6f//v/vmrqk9N1fzg2VV1uLrU86/W6XTTH45FQTtermLhyNqYgKU4ZXpyIOX3gEELwMcZJlJeIyBgyEKP0Y+hHH6IIYxRi4eDHyMkZK4REhERAAICL1XJlN1Vll8vlcrM+OTlpl4tm0VIu1evtViLy+6QqD4fXgMwBiODhcOj7ngTbtn385AIAhhAFgVM0Bo04YI4pSgSyNWglUUTSpFkLwI2rYoyEoFBfcata1Jg74PJHyfxkknoGAKjqSjdeXdeVc5oSRGZESQnQ0GazUQ7NFCib98ulH/XB5Q2IlFIqMh2TAc9HpVxofYVZ/wYAHYmGRGiMM8aokjMAiYBSf9WSRx8gz5RrmoXW7IhseYO8l87NuEEfHjbOIGi8T3PvG4hULWT+qupCijBmaNviDIV/74+P/xjqHQ7O1aenpz4M29tbH8eqahbLpVpwk1NDEZn3Ms8CDEbQ7nsGox3i4WS9+fLLL3/xs6/NNNY0cUqXl5fnF6fGoGb5RASEOclP71WCP4yk3vNhACBJM7z7UkGJieZOV+eoz6MeZaPAjM+FOOvxyg7Y5VEzuk01DVKFv5QZ+TCrSatbLamnZlFN2/phhBy7xMhqHL337UyqsPy6DpQtJUYNbJXPqT2g+k69bZSXq26AiKqmFm2+0y3InhGrqrLOEIj3gzEGgNiYlFLTTBzRZ0+e1M6tT040pQshiPAnnzz/5JNPUNLhMMToNchwzp2s1ncxWGsr6wKMmvSnlBQCTfGeFYKImo8uFgsFoAAmDovOl9RyrxK4NHja7/fr9bo0B8cU4T7wstkZM5HLWUUPkhBAo4QYY9/3h8PhcDjo+kguOI3j2PcjIt7d3V1fX0+/2I8xT4Yuf8x5AyWG00tWKvcmd5+XLKeQqxExBI9YiYi19TAMqnWsV2q73eqXaFjAua6v26YMb8huOOUvnAQCNRLS7afWVt8sqMNcJxvd971GwOVC3N3dbTabm5sbndq03W5BZLVa3d3dqZAkZMGQzckJZ1376+trdbFae67rWruJNIZrmkoZDO8u36kQtHXm66+//vTTTzebzb/5N//m+++/v7u7+7u/+7umaX784x+/fPny7u4uxvjpp59qSt22SEiPHp/fXt9YZD8e+3H4//yHP9+crEI36JZ2dcUwiQKKj8um7Yfu7vZmuWzPTjfkLBGFmHwIwzBEH9UBKxOwT4GjFiy1cgjGoBGBxD5E71OIEhlUUJdZsvlCYw1aA4aqtlosFquzk7Zt69rVbbNer5vFwtU1zqZ3TNEbyNzOMDPHadSKWpsYp3BzGAZkMMbUzj5+dH7ojmNIPgbhhAkMkNPyvx+JyICwsOgkO1ZMaNrVzjmJSctVi8WC4X464dwRwkN9x3KEuluKIkJVVYkDMCplOsWo0u6S29yL3vI/8XHvdDOXQuJkdVNKmoOVNIansQrTdCPrnLXWkNPcdX4KzBz8MIx9jGzMBBQVa+y9n6t4lltgjk1+9Gg/9MH3Lz2kb5VoZrrQwAYQFIL+R33ww3+icQ5SNGDqRVtXLZqdg3r0/XK51mvDIJWZusJVs+q9xRUQIEECAuLsGq2lL7/8/Ltv36SYNNbuU3rz5u3JyckXX37GHBEF6AHrrOiEzWOrstXgnox9f3WJCIREWN3VMAySEQ9NSkQEkEuC+N6VKBdDcwJtxCyvzn9UH3pv5wHvU0+UOn590EyWoapdzBOcIGc2eK/pKoVtqIAeGac4pFrVQpctX6L2SLJ4NTOXUQdd19mqappmikJCBANEZIAIiWS6fzgmgxTS2PdHaytVZvbeV1Xz6NEjANWdEUQ4Ozs7PT3d7e98PxhjlovGGGOQNHPSjixJ3B0OhUemf6CdRE7mIdE9xYOnKwWzZm7N7ZT4pjHf5H1DCjGUG6muaxUVQdGiAHMKQ/R916nWLgCoVoFkpVx1wMMw+JAWi8XZ2ZkOHnj16jv1IkWKudQXmFlTihy6TTSxElW4PAtSRDSxVt+fMw+x1sY4dW2ykLVWSzwqnqyu13uvCtKr1UovK2V9IpgBhirMp3KbmDuL1L5on1vXdSqOIbnPhGe359u3b5XZVD7lnHv06FFKqeu6y3fvTk5OlOesxQslKJXVu7u7I7qnt+jPac69Wq2229tp8rElANhvd68Ph6fPn2mAojPWzs/PT09Pnz9//vbt22+//fbpJ0+/+OKLruvC6BfLhV7ZpqkksSN88viisvT9m5u7uxsDOHJUzJyZBz91RdfNcrff7na7xaI5WW+Wy6VIisI6M2MYfIpq2UGRkm6IGu+mKMCoEWRV28QcQggh+hSTgDCKpCRoKwBDpnK1dVVdt8vFZrNar9ebk5PFYuEaV9f1arWqmlrzY93VxZPlMpoUEA5mdBb1KzGmxWLZtt2ibqwzvveLRcMciUaU5GOCyMZWxlAC6UPSuh4gxhmkrEZKdx0AVNYoUTT4WCAcmSVwxf9RfuhtUvBCtWmLphIR6ywCGmelgCh5vMp7X/uhm3/vMZ04GIAp6xBmmhSN7kWsJs8qgIRaR7euqpvGuRrQIEKWcQTIchExxhBGjYybZqFt03qmpTF17vKKy/yoi51HJLnhSyD37n6IRL/vnhWPLhnwPPItf8wjgvlXTAft6haxWS7qfR1jHIbBVK5pFkd7KHf1lFQhIBqRe/xWpSiJCA3o2Ao1Ioum/fLLL//mb/5GMTEN2G+2dxf7s5OTNQCzpFkwgu+d1Tx+KYBbiTz0mhFMrSwlaymEQKWkMzMSFXQuBYUNJ+11mAkYAQCi0VI/zAAcdRgySU8nxQnxXl9p8uhp1jWvX9u0057Qvkx5qOFljFksFn2ekQcZSyknmzL1V1Gm+a9XrtGESZd9GIaUpjEVYfR6KyILERlBsiTAyUcC8d6nYPfHbhzHs4tzRdRTMuPYq8o5WgQAi8QS765vvv7668ePH3/22adNXWujqrU2ELVtiwKd7xSh1fTOOYdgdMqbzfoSRKRpbsrM55Tp3HrPlCDmeDy6qq5cPQ7eGudsNYyDUq5U+R1RnLEpJQFBEiQBwdyfMymreD9oTFOEVmwOMpi5aRgATk5OXrx48e2333777beVrWLWGSixEeZhG8Ul00xCqNxWJvcOmSySJXLffV6MS9/3WtxV9pkx5u7uTt28UqB1DbWiX2K1/EMSs074OI6qwau1Vc5F1onxEKJS8bWBTbfHpJLx+o3yzx8/fvzdD9/qaOHtduucu729VZ96dnamtWSlTOrcJO/92elp13XB27apCHDsh7ZtU4hDf7y9vfXe73Y756Y5UcvlEghPT09/9atfPXv2bLPZlHaDzz77bBz7qm289zqzSETImMghhcgST0/Wh92dAUh+5JTapmqqcz8E732MvNvphOn27vbm7u5utVq8eP5J0+r+hxBC34+j9+MYUoQkEINm8BjRJUwJSIi5wLCMIcQQoo8xBp4a9cAgQkqpbVvtFmva9vR0c3Z21iwXm9NTHcdiK1O3ja3c/GZHREFgUMXeiaAw+aeHI1Zd3TaL1dNntl0uFk3bHfZX8YpjwgkKFJSQEhokQoMgJACJRWV8EAkRRECkqethGBTEUgPVj8F7b3VibkbjijGRh3EA5BzUUqZDhxCjF3G5bjXxMIzG3QZEuNzCHz4+6n3nz8/T3+KMOHNilFdIRMyJiGRqf5pEqnMvbe4ABInMqqWoR27vB4Yaa9FaVwoW5UjeCxfgAx8MH40tpi7e9xnNH4YjIsIIFoRUMro43fd+uzwDSvEiSswKj1PVbDanwzAcdnd120QfmqZZrja7u62PAeW+PUsIULAYI4FJ9ggAeALsJxzvyZMnm83Gj6HObmy327+7ullulgYmZbD5Qnx4IecBC0+krdn5E5JAyUHVKZYIaAIV89wummYACqJTvowaVjVwGU1CmIz+pEejJ1JwwmJwS3o3NRrmfVzAzHnVyhiDxtzPfMwUf6XwKLZpALXbUpupigPQZVffXL5NEyOlYSfJcHcOCzQmMICiglrMIYT+2N3e3tpqyrOdsZU1MUbhsW1bUjQixsVicTz2q3YBp3x2dvL55y+bpgERPTXCqeZ02G2nZ2jqVlSQHHLApMZXPYHC5lpmhsSJU6b+Tgs7juMwjJsTo36Fck2rqo2OLSuBPzMLJB3PlbJis3NusVggyvEo6tXUb6kbDiEIqKyjaM+JmuLlcukHr4ehGKzNI0PUHZZoScFhyE4XHk5T0bAvpajDEvQCqYokGdE3aBdQXdc61lcvro6vUI1J/Tma0fq0xbbEKDHGtm2Ln9YATnNWXfkw+sVqWQrVjx49+vWvf73f76umruv6u+++u7i4WCwW33777VdffbXOmffp6amO7tZNpRVi3ZOnp6dEpH0+RFTVoodxe3t77PZXV1eIqElw3/dKeP7hzetPP/30Jz/5yXa7VXOptAbdCQDQLBdlV6SU0EAIoXaGgJ0xd1eXX//615vlKoy+63rtD/LeD35smubQHffX15++eHF2cVpVVVM5ROzGIcZ47LpxDKNPMfIYxPsYEwMAk4rY3NN/TNA+LgzMiYEREAgQiIwxar6t5rXW2sV6dXJ+pkMyqqauqooItH5GRAwgyAKkFrGYrLJD1EThNG+YEZEF15tTWMvFk8eSwu21CyHsdjstXtQVG6TICUBIGHlSWUdlqBIRWk4chfvBi+io4EoEvB8BoK5rI6JEVHWiupfmqRfPK6MIKSWDFEU5vxMbRqMHZraWqmoifmJmQf6mNOk32O1pHRInSAwAZPQwZsL+KR8SMggwCAEQWZyS/ik4BgBhIATBpGpiiOJ9tLZyzunYRFWd9Dx+1F/OkecSBOguLeclIjDzwTM3KjARv97vX5r7VqtOCQB0kPJ7Pvj+rfeHRYiCRCnGlGLdNOfnjzSs3t3dGbM8ERm6vuvvK8HTyRDOfbCmXzFGR8anmFIwZJl5tVo8f/78F7/4BSIG7/UOv7y8fPHiubFIBIIwwYogmQn94KzmIUlOxCcHM+GEIFpmxtw7qxZK9z0gA0yUlrquIa+1doDMU5kHW1Mw8yegZEhqR+btp+UaqNUrWRfmGXDzKK+qKgaGNFGi1MFQZgPqlywWC5XzNVkMRPFYyb2wpZmY7xXdAFiAQCUt27Y1SCUWQZauOw5jN47j4L1SnU9PTzenZ9puYA0liaXYpglWXTvmarl8fHKyttZS1kw2SCklS+3xeNwdjtEHZ6YSuKbmzPexC2T/qpm9xtKgfa4iJNMQKl2ud+8uE7Niejq/HQAWiwXgNDmuAAyQ80v9Uc3bAKCqrDHm7Ozs9vbWWntzc6NCiZq4sOBmswkhpZT6fry6urrnoVRV2VqK4hYhi1JNVC/OzIvFQouvKv0PWb67aWpF0dVnqxE3xixXJ4onKVDBme+t0YnS6JQXPd+QunvVs6aUtBJMBOM4haFarNVhCSGE1WqlIULtqhCCohFlwUMIZA0aCimq7LNKbXzzzTeT0jjiixcvrq+vh2FQMOPu7u7i4mK5XMYQVHJSg4bE4bDvlNb++PHjR48eNU3z61//6vd///f7vr+9vWWQv/iLv7i9vf3n//yf64lrpzUREEHf96pYovepMWYYOgO4S8EhjEP/3//6r7c3t7fXN1dXN977FCfFVmOMNe5wOHz1xWfn5+dVZRnBGhNT6rrueOxj5JC49yH45IOEwJGBiFIWM1DDKZICgFoufZ7IgCaXhNaSAUQWA9hUddu22tZY17WtpvVEM1knIrJEWZP/Xlu3FFymez8loZkpB1u3duyHum767rBYLR+b57aqbq+vBYGsQUZt8WdmMlKRC1ndDyGDNJz0SgFQ36sCT1WywCI4XyL4cidKpsjofrCEkthYBXKM9/7ArDiN5iOleQE+Nh+iOJTf5IOnnSwikiCxql+p+9a7jwABkEm0RM+MzjkQIjJkjDFGeJpxwoI6RVgkMacQAkcPIkQ2q/hpAjZZG4YpAynY7Xs+5aOZ3nuxxT/6+NANTzG7iDIO7mcUcylf61aYOTZmZE5IZMhyROvc6enpD98eXF37YWiaZr1eD2NHeI+yqul3ZACA5Z7KpHe7JmQCbJ1x5J48efTq1avgg3UuxWiMOR6716/fvvzs+dQHxumeFJo91nzV5os4P+H5gjJI8L4yVu1m8aksUeFoxVhCSMYYH4PmCpirgGqpC5Y4obgT5xDmIs9ElAf3Ykr3mZym0Yq0qLvt+tHMuo805xYRlSrVEHvIxFdrrbVuSvsytqzphebHWuNUk80J9DZT3hMApMRK0tHESw33MAyHu1vVuFCk8fHFRVvXVVX5oY8xrpnJuhDSydlpSgl9YJ4G6mVKoRJ8UN3/xMPMIQVZk+IYwiQeklKKMEls6nVRbQHOzG1N1Mpsvma50Jl92kvz+s0bRJz8JbP6QlfZ3W632+1evnyhlWYAEEZmHsdR2XDaHXRzeUVEq5ON4uS73e6Xv/zlarV68eLFarXph+Hy8vLk5Gy1WnnfAcDXX3/9q1/++ssvvnj69Kl6cWOM5g0Fai4Bsl59yaiGWiI9u+KeNTLTOYCqd8HMfX9smgaAvR8uLs6urm5U2qVEabqXAEqYr00mIpnobq1ljogQwlQHdcuVRgY+DNaZw7FzlTHGuMqwRB+G1bJVGrAxCMDff/9tXbsY/d3djY7g7Pt+t91qTKkiG5eXl4fDgXJPRFXXSjlMo18+faKhxjAMh8PhZHMWY3TVxKU/Pz+/vb1Vq13X9en52f/2v/1vf//3f/9Xf/VXT548efbsWdu2zOycWa/XyrM7Ho8/vPkupbRer8nZ4XjAFB3hd6++eff63auvv7m5uem7AQBc3bi6SSm1VaPjGp8+fmSMsZWr6+o4DLe3t8M4qJizH0OKEhKPMTFTYhjGMWZfiyjGGIMCAIZQZgUFvRPJIIA4snpp6tq1ldtsNicnJ+qA1f4QWiJCysQUKeT5qD54/oA83UtomvBhrPGRrasjJ2OrekmCuElJZUwO273eEcPgFVJy1q1pdeiOamGAqK7rMXi9u2OMrqkBKbAgiIQIMg19UVuhVf/5pkVDCpVPtzBMt7b3XFWVRhO6DarK6ohPETFIcaZtWczv/Ez1TpE5JMssia0llUPXTQ44pTopTMSrxKmQHoTRGFosFsZaZk17EjMDmpSSjkn0Y4Q0YUKrzUlVVcIoMOGCMUZTOUwydxzvudu57yhp8X2m94FzLfl3OVN+WJDCTMV9MDxSREAIcioMADJNlLj/TH6zmhtAxKpqQKRZLvbbMcbojKGpxSiVNBRRrLUxJZhpM8G98AowgDBwEk7h0ePzH/3oy//8n/9qsz5BMHXVhji+e/dus1ktVy0RVlVT0DZ6mKnPz/83PTAXUJ1zkNnnlHuTrJsOr641fewR74V2dTtq/prymG71vjIxKSDGqAyjcuVsnmNYekYh16cVrNPdVmjM+igHPPpxukwpsfZozoZ/lY9gboPTlVHfw1rnpqndebFYaBwQQ5qIiyk1i0UIYbc73N3dGRQVf6jrOoQxpSBS7be3tq7aZrnbKROenHOwgsS2IichUZwghAnbnwqWrI4ZAKz1GiWwyHLZhjCNwuUEygtzOr+TH9yuuixd12nOFwbVjMRx9Ij45MkTrUoiouoX7rs++n6/3282m6pqvI8aPBpjQpigC/3+EMIR0SIc+k4PWyWL/9t/+28x8hdffNH1vfdxGLwxLsZ4d3e3vdt98803P/7RjzR0KLcQ4gRCao1No4GShWgqr1lyuToAE8KhbCmf5cl2u127WKpO9d3d3TiOVWW1RVX9XBHhSpk4phdROVwFP9BjWywWGilqdVYVdXTraspewsdJSiwrsj179kyLIzp2sOs6QvTee+91pq9elHEcz87O9MtVwS2E8PTi0f5uqz4bACrXqCi01uzLibTtYhg9CxyPx9PT0z/8wz9k5levXr179y6ltFotdP6SMs91Wtzbt2+///77tm3PTjfG0N3V5evvf/jFP/w8xhhDQjSApu9GXJimaUMIZ2dnpycbJHCVRcK+77e73fF4HELs+pGFfOKQOCRJUZIkTsAsCQURRFggiQjTPR+nYG9kkCaxaGhr65xraqe5r+4ENSyUKVdqWxBIBZmKoQcARClumDMQLSLIAMCAGDGJSEI9DCJydbsAoNVmc3dzE0IMIQx4z2gB2wKAJSMIxiCQIaIkHKYpHZPZSSl5DymliqCgGhqllfMFAIaJy1uO0JhpziYApCSAoridtVakMGMQEY0gsDxEdj/eklT+KyK5FYtVRUudUUqRADmrvKmGBE3qs0hkBAjR6JUjnXYALMwMmFKIo0956jmhRbg/TtFhfSUuQKDZmHn5WNY7/3sKu2dv0JFEepYyK32Wu3Ju35jZFtc7WxqCLEWoW6O4ekTUnYNotKUJ0FgLItXJyUkcfXc4KqzXd4fdbsccjXEx+lL2A2BDRmaZ6PTlCZSRTyAi+NmnL7/+1TfM4uqqruthNNvt7vZ2u1qtlPlcEg75ILWFf8IjewvLzFHY0dQe4JyrG6fZkjFNyiPnyk9wZt/ZPPCVM9trCtPSfS8TzGKfwszSA9BGmmI09Z0TKyonT6U623ejujGFXbQNNIRAhrLXn6IHTWfnv6L3FeEkBFFCsJSSNi9578cQqqo6HDrd90pTUt3NyjpNzpbWucocD4ckEyTunLG0nKKQzDIzpflYASijwBeMRAoq6KFq289yuXS2rqpKZzyUi4OzB2T4brIvWXPj0aNHSCQiqvjofSRnT05a5ujq5vT8whqnPj7GKAiuruZBqC5jQDTG1HXlvV8u13/0R//s5OTs5z//uff+s8+/XK9b5+q+79+8eXN1ee29/8lPfkLTXFVUzFaPsOhsUO5Bmu+WIiGSI7Z7MqeZRuGyNm9QFhvXTEJXXrHcOaWucKzUQ+Ms+S5IDGf5wPtqS2U0CizTL1JKmspTHqmEiF3XqYrvZrO53W0v3NnFxcXxcFAmlEpYqOt9/Pix8qWVUj6Mox6Aum1FLPzYpZQWi8Xbt29VaFpvpO++//78/HyxWGx3dwAwDIPq0eZgC2bLxcYYhQ2dc99/+9312zePH51vb2+v3r1DRE5ibcWSDv1QVdUYk43RWVivlqeblSMGRO/Dvjtud4d914cIwxgT4Oh5DMmHGBliEmZJnCLqwLskwmzYEpIBJFvlblEiIoNE4JxxzjhrNJBq21avGiICTd7XGKO9pBMXVDWOM2iXExvWNqGYVXSKOyCiycRMVTelEdiqbgVS3S5X6zCOgXe7kLxycVMMAGCdUeekI3ZU2oysQURk4RRTCkk3SdvALB0qWKB6ow/9jVobzt1rYAznTEMht2LVdRPKQ/bu/Nve+6P8BPNESgBk0Pw3prImzMwJEA2CEUYyRCZPUyjGbZJDSIjT/SUiKYohnO5Z0ryVMLPQtW1o5kk+4jJ+05Oz+iyUy4eIlGnC/Bs+jpiVsN5fGiGgKXbBGUowX9/5NmKGqmpOT0+7455jqKpquVwej3uJKMjlzVCacWeDoJmZ073EFRk8HrqmbT/99JNf/vJrsCrnCyJye3v77NkTg9LHVNdO8wbJNMLftHKICLnZrkQS+stEucJRTwNlm7bKbbv3cMQ8RtHLaa01SKWkJ7lSklKKccotKCt7qL3WZiHJqluYs/CUG4L1DDArPJRFNnmIIeTCDOasHRE1/IRJEPje+BbwRxUbCHOHVWacHQ5djBENpZSin+ToFosFRx9jtNamFLz3iYMBXC1bREk+dP3BkGNAZl4sFk3TCFskU05Kj19/y+ZJUN77yNMUSAZBYVtVq80mpeRTJENRGK0RxDRxA1EErbE6Y1Xdm/oeMvc5ZcoXaL8/jvFOaWjr9Xq9PlksVoe+SyBrsgBQ1xOyjYiAUy1ccgCrbClF9b/88stHjx7917/6b//HP/wff/qnf7pYrN6+fXt3u/3hhx/evn377Nknn718mSOtqYdhEkxwrgDplLvA9T2Uh1FquKjvCSHoXATKc6a1kO/zOPr1er3f73XjVZWdGAD59gGAAqARkTFINCVbZTfqj1ZVJYZVe1VduG4bnbyrjl+zZG272m63IjIEf3p6Ktu7xWKxWi5/td/r7EJtiNJDVWO93W415/Per09OMPPJAycbuPRn667zMbhYPXv2yevX35dUHjPxSk+trp2p3LJdmPzQk1VKwcXJ5ury7e31zZsfXl++fff06dOhH1+/vtSIGtB4Py4ae7I53ayatrHJh93xeDweu3489qMPEmIKCUNiHzlEiQlj4iisN2LUwEkiiQgwojV4v7f1ahpEg2gNVs6BsLaNtW3brpbW2slvISIRPmxfFJE4y4BJwJDJc5Ow7Mm5KU4pMDNylmTiiIgGAcUsVxs1DsPQpZT8MDIzR2+MaawRxBg5pURgLBk2IBxDCghcltTmGWicx3MpcpN0rkzOfYv1c87pXBPmpGrJRJSSEHDZ6jSjPWc7d+9HPnQiD/zOtE4TwANT98q9hSyfLQklGWdtpYOMYGpMtQKJp/6CqUDDzCDSNMvJQs4MrBDC7DAYp5G9H/qR944ffoNbLadzH3mwotEyTamC4unpQf9T8U8AMJfo/PA9ZRVYOd95SMB6vb6+fHfYHYdhQBYfQgg6YwcSR0AAuRc8K17z/r/MUVJVO5H06NGjb7/9oTv2lFkq2+3u8vLyyaNHIlELt5VzaTraCTD/yKoBYJHLQiVjE8A9tVhv8uRDSql1taYL6/Va31Dl6evqVxRMhtwuYozRklVOlKdx65wpr5RZ7/OrWBwkEalIgq6AvpNyh5J+7TwAUrKbOiT18VrzmzOHS1bNnBvpRFIMy+VSMjNIj1Dp0Jxxy2E4dl23qBtjwFrr/VDXtXNOqa3Hw0EA4hjMwqjaFEvCPDm1QLvz/WetresWgGEPiGjrytaV70YA2Gw2q9Xq5uZuv98TkTF27lp4pgkq8T5k0QjDp3uULKXEDPvuqJXCq6ur05PzR48eKYROZBE7AxP4bG0FACrzKbP2bhFRoUdjzPHQX5w//tf/+l//X//h//2zn/38yZMn37767vXr1wC03e4vLsJyudQrVdjU6gXn+Ws5VJOHnTEzEeZ8YIIKSgCnSaqiDjEmzTKtMXVVCTMCOGtZczQBROSYCJAAkSik5KZ5UBOaVdJrZXKFEFDAGaMdnOodFYWuspZ13/cFo+u67uzsLOZG+eurKx0JvN1u70U/fBi7XloRkbquj8ejOvvg/fmjE2PMzfbO4SRpon1TT58+fXd1qTmic+78/NFut9tsTkc/lAFTerv9/Fe//OSTT4wxjaskdxirtNNisVgvKkK5efd2e3vz1VdfffXVV//p//ufY0qmqtZ1s9/vK+fW6+Vms2ob54djN8bDoTt0Qz96H2JgGCMOIYWYIkuM7BNHTpr+MnPiWTMJgFaCLRIRoSj1ChHRqLdX/o+1VV27pm6axlaOjIGHyRDkHEiY0yytLDaQEVkR8Jl+rb4hlvKTISVOExFZS4RtVTlLBrBtqtur66vry+P+MCZhjgREhlAEWRPj2DqTGEIIWCA3DiKSYBoQrrBKabaEmcsp9q2qKo7l1ZlIMpAWGYmspi4lPH3PKpcFKb5m7lbLe1gi5GI8PHAX98VjXShrK2stgKp6a4GMAdggGEPeez90IsIshHaxWAgil3G7czchggTKfsffnLB+9Jn503PHnL+ERMDY+wmMJcJA/JgWNCICqdH/iIwnTbPZp+MjIsiKyiys+VY/HFMKiNj1RzHGWmNyWJRmrW9TiglUgDhmZmAASjGdnp4+ffrkH372y/Pzc7Udw9hdXl4/Oj+vKhdCQJW6/UD74rc8RCacoVy/st2jMKSJkauVp+PxOAxD1SxwNqNXrSciGqScfDgRUbTMe79c1mrO1FvrgRUVLZhpJigKulwu9fudcyFOey5lWj/n1tj57qTcCpVm6owxjwNTU67ZmOQ2YtV4AgCbRyHp7dR1HWtOzKxAKBBpIXO73T559Pj8/BwRt9tbAGBBAIkxcorWLgxIOcf3/Nl0ZYmqqmKOxlnt6l4sFhIGEdHxt5l/1DtXqd8CgLmEuu6UlO6jPY0VxtHrFlQthdvddr/fq79Zrk5X33/Xtu3zp8+CT6v1YhoaYxU6SxyToogajhBa51zlmpTS6HtV1Q4h/O7v/v5f/MVf/O1//zsdP1zX1RdffPHv/t2/K5lficn0D72jUhY7041UNMhE2JgpPJKcv5bGD31/ZuNzSrGu691upyy5HC7cDx5Os/ZKybF8KWRQ1pXU5RqGAQUAq2EYdNxWeZtuDN0VNzc3zjkfgp7vbrdTT6m5uCpZajChIi0hBA10Hj9+TEQKZd/c3EDik5MTZ+vVatV1nX6wbZc+JmZomoVz9vr6erVaHY/Htm1PTk4uLy8111eG9n6/V03Ki9Mza62SAQ2Sc846Gg7bm6ur2jnnzOefvwwhHI/9s2fPru72d/udcfbkbL3erE7WbQrj9u6q48Z7nwRCTINnBvRRRp9ikgQYGGLiKJI4ZQ01A8CGrDWAJDqKFBEnQVsWRjFAYBBYOCXU+oI1RGScJYW+CBl1rifDVMecbGaCe/cjs+EHzExyH4PevwoCCMwMESSbaxGJKREYRGzbtnLGEsY0orAdY9d1kkYha42xFpk5sAQfjDHWSF1RVU08UCWyUJY9KNCOcRZnyGI5zpIS4NQMYktuuly2xjmY+Z7pbb8Buf2oxUZEgcQS83JN/AfITN5y+yCiISI0zmmc/X4DLUtEER+G6SMsVVMbchGo/Hq5KOWQJvSxOIt/7FEc8MM3U8ZcQb05PKQGl0UzxnxcAYQzrP3RtSvnWQ7aWrtarXzfaUq3XC5ROI6+bVuqJo77fI30Mf3z/glENBbTOI4Mxjn37Nmzb1/9MAyDyDRTqNsf7u7unj59zDEZcpwZ+791dd6PwuYhHuXidErJIpWX9GLHGClGay1aizmsUwdMgG3bKl8A8t2SZoIyOGvJlRlcnzIRWpNRLejqS5pHKnRZsrTyd474JiaFiPR9p4RtnU93v4jGqH3XuFV/WtMXyDKzGb2YmmpSSqt2sT49OR6PmsbFGNvlAhFvb69TSk3ThDgS2uQHAGLmw+Fw7kdbteoUERGA8qpPeBSIiCRtuo0pVFUVqmqi5ywr5+oQwm63VywhO/J7uRIASCGGKSjKdzvhMAzjOCbmcQxayDwej/v9vmmaYUzv3r1j5u+///7NmzePHz9++uzx+fkpCTStzo0YCRCQtRvV2brAsADw+vXrYRi6biBrQghff/316empYh9/9Ed/VMImKPzBrIkmmTKmF0jDIEQcx9E5WwwZ5aYDRTIKn3m6cXN9wTnc7Y6aGU8S3zxhgMxcCPbq/nM0cG8NMc/g0ktjkNSXF+6eRgYFBLbTzIxxGMeiW6kNxGnq2znqTkPEw3ZnrT09PT0ej8aYV69eaRBGRMvl8vGzp5wghHA4HFTvsKoaRAwh6PxKXTdGWJ+evL28rB1pQ1QI4Xg8LpftZrN59erVzc3Np88/UdFpzYGMRWMMRR/GISavvLw3b944Z+4O+/1+f3d39/nnn9d13TQVEuzvbvtufxQOISWBIUofIguNIUYwkZMgJeEkKECs8AIAASGRtWgNAHIGZpRPRAIISRimtCzGiM6CIWMMEBqtAVnDIIiQhJkFldSTpgLcXKCwmAv9p0Vb7sriIcgwCIEwAAkmVfBV4+I5gaQURj8M3g+IaC2R5xSDiBhOSMZUDkEcUWVcXddKrtYNE0VAEtIk06bmgma9Q/NETfIsE0sT3bK8Wa3TYrHSNEyPvfi2uXEuf5eQ/UPTPfOywiw8C1CmBE1vE0EQJEPWVIQEhjjrbLBEFW9QssJ0kNYo4RHMnIElItM4pmJw1P1hht8/GiiUl+ZvmL4EzPwT974g8TxfKo7m48MYEFDgwW+X3+OHpVzRuTMqEQBy2G+JYNku/NBbZ54/fx6Et9vtmPWbEJHwQS0TckhIZEWSSg9GAQ2xX758+fd///ebzWmMXu3amzdvTk9Pm2rSNE8cP3oKv+VRjBTkjT61vlRuHvFpf4uPk3+lHGqoWdRukJREE52pUoukfXXFIqtfKYzoKb5hzqo0jR6DOjwlYKtjLlc3pYRoSuBZhoCqjTs7O9P3KEdGtdHhHq6YKEtK8Q2zUe3KhlH+yMQeqqpuf+jHQXs/NpuNDpvr+6OqAacQTe2cseQqRNnv9957O2H4DgCiMIAQkcUKgBEx5SjE1RUdp4MREZX7j5GV4GqyCOh9wSafgjoYkIkuzsxjnCSXD8fj4dCN47g7HiSLeo7HYCypNb+5uTndnDx99vizzz778Zdf1E2l11oSJw53d3d3d3cpSl3Xz549897f3Nwojv348dPbd3c6Qc85V1XNZ599dnFxsVyuCVN6aBQAYE5lL5cbZ6V6NV564Yhovv3KG5RspV5TYQ9NNLVDrK5andNQSiEppXEMSA9ief0h/af2NenhqZNTVlqYZEAmXZdSplXy13q9ZgS9Rsvlkn1QhtR2O8EMtXU6bFjF7+q6VtT6Xk7LpxcvXjCz6nWISN02ZI21tu97772geffu3bNnz5xzdW3X67XKbCFi1TYhhD/4gz9QApqoRJ11zByT7/ue0mh0fDrL69evLy+vbm9vb/fdEEWbjL0fandxc3N1vLtCSKM0x2OXGEJI3nOIEiKjqYCMKJmUUpnnIgiEhASWcOpAytaPhQ2SIAgI8PQkpBzr1NXU/ltXNLXJznp5mVlYVW4EHqRf82RoDj6XQI1TIGsACIAtTvQLSUkAvB85hRTGoeui98agtTaFmIIXkZQQgJwkss45Y61VebXJ2AoYiyTgc+WiCNcXVycZiqOZWBsZG1NEcz+DS3ERLcHw9P736Uj3nuUhQlv27XsLov9SInNJG4oDM0Qg0/KW5CRN+mQsLACgrYYhBBVFds4uFqsQwnsOXwP6clTlSN87tg8fMgPSH3zkg0Ko2m2YuXyZNcJYAHr4foWFwSDNdbRzlq3tOro5QNFXkQRILNgsV82itYdqCIMYNHXVbhb+Zh99zEgdSEwMIILMTGhwmoSspGsmkBQtEhlgoIh2/PSLi1c/2N7vrVtwhAR4uw1v3l1/9tmLxIMxaEQ3h8xdznsLhLNznAcWJdYTQeeqwGzJsCB7T0TeD01TseLM1ipCqDaxqiyQVM001n4MXWtbsmDpXldL38+5WKurH/KUQO+nEJ5ncpgxsI5BNMZYUw3DUFXOGlL+i0YJMU8Q894Hn2LgcThY61KUEHzKKgQpirYeseBisei6zllLpNXiJoTUdVv1gmM/qFIPSySDBoWjB4Dlcrnf7gjtcnEyjj4laZpFSokQLQiliJjefPfNi+XG1hVzNMY4xJTEIhEgGcPMzlqJxiBFEAXbfX9IzLn5uPJ+YE5V5WIMVVVrQfRwONR10w+haZqqXijbDpH8BPNy3/ddPx4Oh64bkkjwSrJwwSctSalgZBjj9fXtfr/fbQ8pis5s0AiGiPrRHHuMzFfbu9dXu7Efrq6umPnt27ft4lsO0Rhsmqaq3cnJql1YH459d6sBZ7EUKomsZc4QvHOOyBGZvu/ruhrHkUH6cdD5PMWqQuLd7V1VVRITIqI1ImCtC0GLGtb7YLJw1bRXJRqLRBhjcFUjkFxlrIj3WhuWYeiMMcvlUoR02tJi0YQQTjar3W5XN64UmyH3hqlpU+0LnXDVdx0tl411EuKiqkM/KEBijFF5VEQ8HA4n9uyw36/XK2a+uDg3hk5ONiEERLCIV7sbY7FpmsRhuWpBaOiO7XJxc3OzWDRNYzeb81ffd2Tk2fPHlz+8QYGnj59c3Vy/fft20S6bptntDqt2sd8fnRuWyzZY27Y1EazXLe/7umr3t2nfj30/Xl7vuwi2qs+Wrm3r/d27L774zAe+uQvMGwDYe/Bc+WHkJCDGGSBizwlRhtEzCANzZBC2gI2ryUUCdAaJCNWeACVOVNUiKACCFAVClIZMVdlGyNoKwbSLFbnK2ioFLZyAAULVdRKwaAQhJSGaqsFlCzGzCEICtBhziwdl3NKiKUPgRKKgQZHInIIXjr477rd33WFvhEUSjwOiqet2HMdxGC1SjEzkq2VjoUbf13UtkGKMlogcsYE+hiScIleYGDD6gII6iobQkMWUJAZOjIJmiGxNYhA/ekIchr5ta5F0dn5SO5QUwBAo9k5ZQOJjYgyTrRaQSfp6KpATgCbaiEiIkVOMUbc3p2SNCTHGmMAigBC5drkaORqDwHGC+qKQFnNDkigkBEApidssgjA6CxO1a1aNZkEEi4RAkLuS+CGgrfCewq2TqybSHrLsXCckwMzmAxVPxFkXDLJMU8H8/0nzgPXbIMtq5a4TnL0E1trgh+V6fTwe9vvtcrls65qInBsQFb5NAEhTWqBybg8iDMkV55QSWSKiEONysfj000//7m9/vraLuq5FIKW4vdsfL45Nay2i8PsRx4dhy3vPzP9ZtohkvrH+HSdtyEn8uXxEWzUQp6KIy7NdUUexuupwOCjWZ3LzEs1knzUQ0XusFHcL+FN4iZoiKwTEuUZYpKNKbqTDg9PUF+9L3K2f0lwqxbTb7aIPi/PzCWm3FhG1Wl8qQCU6izGqSIJkzMN7j3Qv11XqnYjY9/3+bltbZ+oa7qUhAAhF7nF+k1kCmqnf3t4OY7i6utLiQkFH53m/LniMUV0RAKA1FGW/3/d93w9e82+YsdAlAwwZWZkeIaTr6+v/+B//o86A0u5YXclxHFeb9eXl5XF/UGWDCdT1aAAVJNY65fPnz1eLpfe+rhclrqfM5C/VgYxYYOE0hBQ1q5jHvwawkPsCp3AM1k4SuwDQdR1kH6mAMM5YCAXwN1lFVQc86OWYAzD7/T7GCCKr1arsap3EELJe1WKxgHvGXL3ZbDQTUt1pbfw9Ozuz1j569Gi73VZVdXt7ezwe9VXN2pVTreT/Q98R0c3NjeqeIiKhFZHI6ebmpu9r3T+afOeK/jR+EQDevXt3fn6+22+/++bV+flpSmkcus1mdXZ2drJeDsNw3trr6+sffvhBiyzA99rXqqFtjNvv91pE67qu85BS4pRABCdrCMwcszVDRDIoMIG0VSMkQICAIrmlkBA5MaIAGkQkFAJkjimCCM1Df8nCAJAZJ/pQUrEG0DCnXzFzlmvwabIbBSMRkcBqLgwiCiHwNGgQAYZh6A6H3W532G2Jk7XWEDhnRCoiEEmSEksMPqmTQWNCCAKTilFidpXBpD3HYhBB5/6hECIwylS9ToiTiShcOY0gMCMlkDFRflhYfc/7zs0vzsrD89WQLMkAOeuVov8sgkjOkTEG4N5q5W7Z4p4m06G2Tos4Zho3IEAfOaQPH8UEzY5ZmH+jH8FcSQWWD95wn2GXR0mLP3TApEkwZCB6djRT6IBTVbWklQigQl5msVhdXFxUzux2d3c3V865/jhq9qYron1dRHb61Lx8jaKFrmK7RaSqqk8//fTXv/o2hLGurHM2pbjf74/HY1Wvx5EtZaMPH3GrH1up37gnjKEQJoGFUuEoiO7cpw6Db5oasjWciriJvUwykJQbc0vEJLOGXcido7p9FXM2uesXZzCUgpnF+G42m7mXWq1WJcMumIa+VFpgU9KMOY1jn1KI0e/3+/1+P6kBh6C6IurFjTGKiqcUvIfa6Y0aMPdITOl1ZmQE73f7u83JSv0E0dReh4hETjiKCFlntAjqJMa4Wm2Wy/V2+0PXdZJ4vT7Rry0eSJu11E6NKQ19f3FxIQBd14lICKEfBuVJpZR8ZH0zM6uZmJG5JpIdEWk5cxxH/XL13FPl9YdpzTXsULTWh6G2zjnjnPvkk09+9KMf1bXTuQKcqSt6HRXSUEKcIm+chVmmmDdMhLsStVTGusrpHOLyzrOzcyX9UW7/VZhEL64xpsy4LI5cffzkunJsx1kZn3Ot2hozzSAyBhGLGgYAHI/H1WqlI0/01bqu3717p3MP9/u9Ntjc3t6q2OezZ89ub2//4A/+4PLykoj04ynrKKn+iSodhuAVQB7HcRg71ZcG4N1ud3Jy0jTN69evv/7m1y9fvnzx7NnFxcXPfvaz5XJ5PHSXl5ftomHmk5OT3W737MlTkLS9ueUQx/4IAIe3++3uMI6jTIrlY9M0WkMJIZycrFNKu2OHLDHGQ9cPcVIqIjACwopRitLJ0Y8jxwQsjavWm9WyaQEGAjQGUYRjUEZCYhCBwIkTQ2JtpKjIuYmfL4bAEJAwc1ROkCa+ADCJUchUUi07p/gbLg02U5svyazJDbI/SyCIaMgRADMfdvvDftvvd34YUko+xFqEXGXt1DIQQvBlzCJzZB7GDhGV8WCsnQinZIAlCaAICguLACGhCBPZpCcPLIQAGEJCU4lM7ey67bWEREQiSU2vPKywwMwrF1OsxfCSMOgf6iYg92cWVz1teGbK1l7vBQFQlVDQuEezUpHEaV7SXiwWWW5TiIx8kJd/6BTKM+XX33tpfmqQidDqGefuVluNMbdEl2udreXHWNCzByHwewN9Z4vI2n01tUawOOdS9IvVer1e6x0bhrFdLprFsusOFEgEVeQEc9fze/QonMh1hiVpmMzMy+Xy888//4ef/bJyCxEhA97716/fnpyuY4ymumfD68aef+F84X77Q2ZKNPhwBlEBitWJ6rZT46K2rK7rFKaRghr8vrfzEO+tcMzzBPUP5URQntwwv+SYBR21njEMg9Zi9SALHVcdTFGchhwWAMA4jqoJoA5Smc8x+vK7AGCdUylEzck0L18uW+UsaBRZ1GLLT2CWyO6P3X67a6raLVb5QuQrQlY4EpG1lYgwiKu4qqqLi4vtdrvbHaq6UWqPplO65toRBDmmLnqZ+mTX930/pvvHvf5XSlyqsDTN6J1QiqnrXoehzmIpLRMo7A9ayjVgHRmsY4zONS9evHj27Bkijv2Q2Sv3WkI4U2KJceo+wiy8WpAGNyOLppR8GFiiMXaxWIzjOHq/Xq8BQDUFFWLRrmLdcnE2fCnNpgsrdKHvwawXXSIwXUYlChwOh9Vq1bat0qZKaFLyAxUzGYah6zotOWt5IuWBmABwOByePn1aVdXhcGia5uzsVKVU3r59q5fJ54f+il5ZZW+1bdv1/fn5+fX1tZKzNifrz7/47PXr13/2Z3/2ox/96MsffbVcrbd3O508r5Hr2A8/vP7u+ZPHyOl42LVN5f0I/dEQIKKxdHd3h4iUby5tRzl2ndrx4/EYYiCoJWcLggQiavy9HwVBFR4MgTVYW2MNWkRnbFVVliDrP2BMaRyDNY7tBDpaY5xF1XuujJ3DMFotFSkmDvSPwCnGqE1xORG6rwEzAjHHCIhSLiIRqYaiZPEiIQRm74dhGPb7/f5uJ8ELpxR8CuKtJzuR7WHy90F3aEpBoNVe+cPhQMaoHKkhQuGJ55aSpBQZkOyE1KoaByAJaKN0qozSOzCbLGOxAIFqnHQW73te7T0HXAxIfpIBJKXJxhafp3VlUcma2SxCY6YDoFkXzMQXASjdgJqHuLomoikO+k0tM4LveboPfDDOvOz98/mZDE3f56UPvFsCEZ4FW3lN/lEIevLB5ZimXqu81lLGE5ERTgKk0ler9cknLz7/5S/+oW2WTdN0XYdkcOLyCTMjlFXO1GjQ3FGrtsjMxrqUhIhevnz561+9CnEEIWvtOPavXr365MWT09MNgwAIIX7Yb1ZY3B9f8dlbIUs2auRlcCrTFk+m94n6FcW7ikAg8PQn5QfmUZow00KSGWuOs2xk4b9A7grgLIlOeTiu5NECeoQKBWvgWQ54jhiXW1cFIpbr1Xa7PT89U7KMhsY6R0d/zoJTP9G2rXJ8tECbUrJkyACzykXV+isxD05QHzMO/dXV5cRAsTYrDxTiNwoAGWOhVnb/OFTtav38+YuUvhvHMaR4dnb27bffIpIm3zFGdSSQgX21p0TkQ+j7UWU+mTnyFC5MNwcBMBblVNKmo3zhFAeGPJ8KihABsoAhA4Bc15XVc2jcctnqYC7n3Gq1CuNQ9DrUExdGVeEkxzyXMGWTMQUEufOtqipkSTwdwH6/b9u2seZ4PA7DqJ05egzM6Xicahlu0u6GAgMSESLEGESkbRd932MeqasJsapZaQvQkydPdAawEvROT09Lm5xyjDGPWNc9r/98+vTpf/2v//Xzzz8vJQkiur6+VuBht9vd3Ym6apVc1m387t27Fy9eaMB3PB51Zy4Wi8Visd5sjsfjycnJ7e1tCOHq6mq3233xxRdffPb5n/3Zn3/z6tt/8S/+xcuXLwHg7bs3X3755c9/9veffvrp7u5GRAildtXQd0QEICGE7rDv+56DT2JEkMwk4RRC8F5Skq4f+8Ejqi8AhIlelRiSxCjCAmoQCcRZVztjDSLEReUqa5qmsoYgaYYqPrKzlmVi1amTqCpbV44sGnvf/ldNd2sAoLmQBWMmMc0GxZc6RQJRr1a2K+QkL0zj1CYPFySFcdzv9/3xcNgfjocDxIAgKXhthWyayk56t0EkqUWaInWZPJOIVHVNpkZE4MjMkiKrHYsJBFmCjrlBRgDQHivNQUUm6XulQRVTlpUj1di+32I7977FUsF9fiIyw58xd/HpqzGlGALlQBC15T3LfMJksYmIUggKwinJlCx578/Pz7MhvS8TzC3/dEj5mEXedySTe0ICfHAKc89SztHA+4oI+ig7p3xqcg2/xTPdL2ImRau/fNjcdV/qYwREY7U/Fc365HRzcj4cdovl+nA4iKRh6FOMSOjIBF+avWB+ZFP4hkBEJADGgPBy2b58+eIf/uFXp6enzOycG6359a+++ek/+0NL+cAQROShiMnkg+UDSOQj5zhfXELFb/VTmpiq2ZWsCGNtO449sEB2qDyTCFZvavIAYMgaGphHdBV6YUm7JQPXugjqXEuMrNBoociq+F8prGq2Wr5Z7zHNJEqWXFQqldJlDBrjgCZasjGo6GZV6Vh6l1KKPrBEANhsNsWrFRR0wlRJuuOx744xxrpmIs0GMIEAEqckZIxhETHkwE0M3vXpycl+f3t7OwzDsl1opylk/LaqqtxuQTp8Asn6kPb7ozYgdeNARFrMSymFkFRmz1YmhTQPlfLdPnHLNbNUr4mIbduyxPv8PkQyVu/Ek5MT59z5+fnJ6Sb6cHNzZ7Mom65DYcPpTaFfYvJIjxLaa3miGFyC+9GEGmckTuv1+njsdF7N8XjILhZVuFtEFEDmmb5B+TkNQVSjVMMg/VFE1M4iyMlrlx+6x1TMeSKvMqvzBgBrrU4GfPHihW5gTZu0+6Cqqru7O+bJPprcl6UI+ZMnT0oRerfbrtcbPQVFGhSvbtsWUPb7/TiOf/3Xf/2jL3/0//hf/9fvvvvuz//8z3/v937vs88+u7q+FJGnT5/ut7c//vFXu+12tVgedts4DtaY7fXNOPphDPvtvq7r4zAS2cSMiCEJpSiM4+iP/ShAIIR5MyBZFoycIqcYuMRJhOAM1c5WjhpXLRs0xjSODKqsLwCQtclVhhOExOVWrSpbO8cWUEBSSCmUUFjDaJj19SYQ/WTKQWGJqhOIjv0p9Sk93mKOmDlOYwgYwYx9v91ury/fpRiTH0HEgAhLDEldqNaSdBcBC5J47wERgA/bO0RcnWysMWGMKYqhaQqBJPWFQATCwEpCloQMjBPpR8dJ5SMkbQDR/TMlQJND4glyeNh3VJ7BrBGtS4K5TUAyDKnRTMk9EjPgpPVmjHGuss4ZY4hs0riAYO7Y5lehaRrMCsrzAvADRyD4oXOdO6bpX7MnP3DA+TvzLxQLICKcO51EhEgXSX6LA74vA+d/gio/684AAER5b4CiiBiyCSKiqZuF9x4Znj5/cfk9pyQ3Nzcioj6gqCK/54AFytlycScqt2KJvvjii++/fx3z7PQTc3p5fXN5dfPs8Wm5bCJSKt55aWQejv0m7zudZ57Lq2Gz/lMhUMrjZtX9TArDifV09JkJEtW0+GHaHbMqgkIixXCXKK9cVMyj3VXjXkFj/d3CgFAQby7jUDxK+VrNj5UgU8Y3abVGfflud2gWrQLmzhljJqnk+y3IYq2tap2jN43SK/Mkyha0SDHGMPTBj7JYGCIBAFNmbAkAsHUIYkQQcblYxZCE4eT8zFp7c3Nz6I5nZ2c8tWZN4oVFL0WZPupvuq47HA7dOCDi0PuCpqapCMJ6vfUuhSzPKZIQTYE0ShFOoVE/dIMPRLReTVNsa1V5MEbroyigjk3tgMtqA+oLtcpu8ozIEEKRtkh5ZGSJXUhA97N+dhxG5xwYYua2bWJMTdMcDvvj8ajoMc6I0ylLfBQ0RXeaTrCJIWgKq5ZL/9ATbJtGkeq+71VZWk2SVnlpmqweiUgh8fLry+VyuVy8fv1Ge7E+//xzjX4WizbGoBtewwgAUDGs58+fFyAaADRNDyEsFktrrQ+jBlhN06pjXiwW/bH/5S9/+fu///tnZ2f/4T/8h5efffpHf/RH//W//NWXX35uCYa+/+zTF91hTxKvrq6O/VBydCLQ9DpFgcQ+Dygb2R/7LiSuqipFtqAlCsuAKSYfg4/MIMPgAdhZWzvT1nVVO4NCKJU1iACcEiTtahdEQiQBIXFkjKn0QoAkQEY0MUbvvSQGZIkpwiSVYwC1YUTFN9Q0lMlgc1uh6S/NOjhwmrY7Uee03VEdcHc8brfb47HnFJIPwtERogBL0lBARCrrXNNUlSNzL4xKRHVVIREy6vVq21ZytKfFS2dMEkh47zpUvDwqXZ+suoaUEiEgio5+YmZD77mD9F5/zdyrlSeZWbX9U56jyrnpqMSsIveYPCIiqrSTUR+uU+cllwuZo1YGAUBljjT8h8nJPnCf04EBlURoHiU8PB0pDvg3nQtM067eP+UEIjlDE0mQZ0ROVgg+/njPBwPCVIRQeSyShy9nFjGAHb1HEGOcCK7WZ/7sGHQ1ARQGJEI/jPBeyo+TTnmSaKzhIU+pjIlZjKlOTzZffvHZ3/z3vzs5OWGGlFJdNTc3d48fnWKagGsBUSXPEijALDaZL/qHi6tySxNvhUBr+MoIFRGbhcQ05B/Hceh6zYnV1UlGY3AaTG3nm0bTX80VitvgPB3BZkVWyQU/Ilqv15pVaAcnZ4hbIdlyPCKiB5CyckiJ+/S/iUPwQ5CpdB05GWeBDJqk+GEI4/n5uea41lpdLudcCjHEsV3UdT1diKL1EePkJoloDKPePLrvFQBW6Kzsdd0YAsjM1Xqt+VDi6MiIyLt373CBFxcXb9++DaGvqjqEkKLEGMmaoh+itUy1ZcJT3TfGyKWEoXkDCE0zgrzI5K5UvNcZgwWOy9Cc5mR1Xavecl3XL168UJvS1HXlLAAobZiIhmFomikB1ek3fd+JcKFwcyal67YxeZCtiEhMHrzm9wpIhDixl4/HY13Xx2OHiGdnZyoRtd/veQZ4lr1a8O1yh3vvEzPR1K6mj/1+r5vTGhNj3O12t7e32tut37nZbPQ9IhOYvNlsvvnmG/3apql/+OH7i4sLa+3hcCiV+zdvXispAQC0qKwMF3XGfd+rgJfubc22JY+P1G1mG6uM65TSkydPtne7GOPf/M3ffPXVV//23/7bv/jL//vx48c//vFX//W//Jc//uOfhnH0w3B2cnp3dcnB+6Enoq7rdR1Wq1U3euesj52uvHGWxyGEBCrUQGSNJVsBYYrsYwohRS5SdEKknUJRkkspYSVRm2EECDABWNKSWU5GhY0x1pIxBsSIMAMWXJcAE4eQRA1dQjRgokxlv2mA7UxTgu8pOVBsBQCUacEppcGHNOlTMydIyR+Px9vb293dVhJLCs5YbCtk8cOYUlo4A4ySWKNIi2SNFeskJu89WVPXNSdhBeRYQAiztgZIEiRteGEOCIxowBhgTgwibEya/AKKUtU2m01du3HsER1o2fMhwIso73likntrrA5YbWOp6cBMkUbvUCJSmi8iTt5UUEEryHNEFAFSBXtOCQA0iHzPncnDNBfhnkJUEoa5f33Pjzz0LFJO9p4FPXM3iMiooiAP6r5ze/VbIOgPfPD0w7kY/N67c0tGyQZYgAVW6xPv/dPnL9788B2S1dKkMUYY0mymmw4JBgDlFuUEgojAioxxJLSPHl1U1TTOr2kaBnl3efXs6cX5+WngRJKnyaZ7/4S5HvnBWcz/mBZEPRBkcSjNXTRt0mi98Kc4dxoo4leAR90KahyVeVt4ufpzWopT6Xnn3H6/n1OfirXVbFWf1xG2BcQLIajIH+Zh2qUBSdMvVeTQg08p+eCJqG1a770Sntt2qftguVz2fX84dJqOKIsVgE5PT2P0eo7jOFpL0QctA9vKFXhHWUJo7DiO3eFYGUoxxBiruo06hCfDGDF6EkBHIggRT09PD8c9cLJIACySDrt90zQnJyfqy6uqOh67lEPjw+FQtU3vRyGUKMJYeqwpz7rALCWhYycUEwaA2plhCGoXiIBQGMUSMGLfDW3bbjarcRxFUtuuN5tNXdfLxUI1WFKaGKQ6kff0dMJaFJ9gTsz3dQSadWdNehQhAIDydYmIrE2cdHiGunznXIqqoKvKG6hXQc+6FFa1+cdaq9VWEQlZRBoRm6Y9HA4mT7BWg6WUPQ0RttttjPFwOJycnHAGsYdhUDaynp2Wh+/u7oZhOD8/0/3TNM1qtRKBy8vLq6srY2i5XLRtu1gs9Evu7u60xfzi4kLvAj2v/X6vHC4tmuhl1YXa7/fH/eHdm7c6r+XRo0fm/Hy73b548eLb71598sknf/qnf/qXf/mXnzx7+tOf/uF//k9/+eOvvrwmub56yzEddnsQ8XEMKfb9iGhEg4BuiDHu9we9fK9fv0UiABQkLZwZY3TcOlBgAA2kUkpNXTXOgSSCydYDwBiiJTRgCEQk+ShE1pALHFgAyDCIIBARoKQk1li9SZVgr1ar8Awi+6T2p6S2M/BvZosnkh3OKmV6o8UwQbJ+HFOUEIIqoozjaJBEkMOE1kJiMjAMHjEsl23fjwbRuQXHVLvKkglTNQSSJBU86LpheVLFMWgUG1m0gCuErErEyEAEQgAJgJjBuYnr4Ed/dnZCRg1d5ojNfEzhAzNrM8hUUFMdlbKNtYxSwmJDqAIakqk5+ocaUufqyrlMITUJBDPIRMAhjDFGbQSKnJ48eUJEISX1jsYaziL8IpNI6NyhftRHzgIjLGXK+acwV2b1JcoENAXlJauQZqhyijAy8vEbM+DJq8L7ie69r3rQQfTwZZiR01y7bBf9YrVqlouUQoqeowECmi3u/IOzibDZMSJYJEE8Ozt7+uzJD9+/Xq1WSbhtlyLdd999t1wum6aBxLknpFL663srm6M8AmR4GN3MVwSyXLMCmIZcSql0cUwdfokz8Y+UC1O4AwVH0ncOwyAi8z4Q3W2IqCKUzjll9+jxqE0sfUEKDyq2JvcgDD7ovWOO0/yipGq6i8Wibdvr62sAUA2scRxt5Q6Hw2ZzOoVmzNfX10qftrbiPLcuJdlut4jS1k27qJmnozWWWMdlO2uMqBQrEQ1DbwBH33ddt1hvNPVhBGOcblIisNYScIwxjL5y1hmqjO1RiKCp6kVb+7FXT//o0aPXr1/f3NwITB3SYwxjDOGYFFxNcRrhVzjkAMAhACtB3RBZThOl2eSRVs4ZFGGB4u1SSrUzbe1A0nq1Wi6XT58+Zeb1em2tWSzamJWcOYs1quNMSccqzLojchNFwRU1gBvHMca4bFr10CGOumKci38ioli3BnlF+KlpGlVL1m1TUkzIMMncNili7DMdT79TU3bdS+M4Xl5e/uhHP9LNoM1FiqPorivV8RjjZrNer9c//PCDOtr9fs8sT5483mzW33zzjc4Jvrq60ux5tVo9f/785ubm5z//+bNnz5Quvt1uz8/PiWi73epB3t3dqRypzktfr9dt22632++///7nP//5F199eXFxcTgczs/P7+7urLV/8if//Ifvvv/ss8+cpW+/+Zq9XzTV7u7WjyOiMMhuezDGubryY9BlUQK5llSQyOC9nuJEWWAZtfALyfuo1Rm9ag5JB2eklPrRA9LoGQkMkiVl23FMPTMLEmO0nFQ7iYgYFJEoqLIACmOC3OQmOJlgKAlTvnYzu/eg6AgA6hw4F5s1rgKhGMfD4XA89MEnAPI+eu8JJjiKY2LmBiauCTPoDInKOv0JSyayIDKRTSlJSonh8voGEStXu6ZGNIlZRKlXyrBFEEIDBikyA2p70uTDtIm81K0gO+APXVp5mIfyxpq35BNMCBBj0g0JWROQiNRLUJ7wONk9IGUWExEgGzKagWAu8UzrrEeLoLRwmlS0AOD9Zcd7SjbM//jo42H6iwVnVRQW8lqUSCsnhAL3Pht+ewasj4/oZH34KIeeIzgDwADaus5Vu1iv16vlxvfDgEcR4SiO7qHwh6dK+fD1DHSoF6QUm6Z6+eLTN2/eDH60ecbq7d3usO+Wi3UUn5IYMwl9vA9xzxYOBCEPSSzXoGx30JBKQU7MfaViCsaoG13PuniC4gsLNEpZZbqg1pq/zk22opH6B89UrkpxVy+epst68IfDQRPfUhhWP1SCht1uh1nlQxnjddWqUIuaHi2pLhfr73/4thS5Y+SceTcxRknRGjQe9YobS8aYBImZaZLNswXqsLYax/Hu7u7k/MK6CGhM5Zhj0b9GSQBgEK01yFrRScMwHPZbPc6maVI6ej8YcovFou+Hw7FHRO9jlAgAEwzg74kIMmOV4yy9IEAlZAEwkWnbtrLTiEYlEyn1mrIA/Xq9zl1AXV3XOudDiwXqUGWSUuEYU4xJ0WubNc7KBqZMsFTk32aRZ8kc+JRiidKU/XQ4HIzxNS41fhKR69ubtm6U/AUAx+NRT03TWfjAUoiItVNbtk53ds5peU/RF91FmiIoKU/jOW1sK2URDd2Ox4NiM/v9XudkxOkRlsul7vCTk5NhGK6vrzebjSbWIvIHf/AHKaU3b970ff/JJ58on2u5XFaVHYZhvz/o1jJ5auH5+aOvvvrKWvvmzZvdbvfLX/7yyZMn2tSuv/748eP99vaTTz6RFG8vLzXhQxRI3IcwBt8YyyH4FLtuGH0EQ4nDarUqAKYulCGHZhqkHaOPkkTE+4FZqspWxjprJIYYGTE6axigG0VSBABrsDLinADQOPqQpjoiUdDuJGtV7QkTiPNpHMMwDFVdM07gBCIia2stwoREzigveilJlB4wN1YiUJosQgTvYwpRO8q2N9vtdtt3HSfRtBhAnGNjDIfIzALROQAIAigiQ/DLti6FDwAt0SAzhBhSSl0K1loGDCxI5JwDY9M0FeYjRj7GaCyFEBFFsZDJWKm9/QBwnp3sg6pwoWJoMEqIMpc3nmkw6N0HAMY4Y60xjogETQaFWUAgMVhKk1q1iTE2i6WaYlENEwQAEO2ZmSe/v4GWO/NlxSu/9897nHnuxRIKyEMG1qzWUODY4nf+6UpY+qDCxiqUp/nh5j+wmEU0brFYnF08HschxDGEMaXEMclDZLwcbrnYIiIMUxcBijEGSJ4+ffz555///Be/3Gw2jGAqB1K9vbw6OTmzlkTYGDcOnVahAN7fQ1P1YHaoSgGYHUAiIk4gE2wCMevsAAAAS+KUe+pposJbPVg9ZU1ACxqpLaea4uiYOcWW1QvGmTiwXqrcdWBEpOs6zdgUSwQAdZ/MXAScC70LMmtMf3G322kCpPGYPgMAx+PRmgqEvEQQenTxpKotIu73ncL7iKaqKpfFjY1BEE6CVHj/LELTHaK2u2msH2Pf9+M4WleRcWZWATII0acxeVV29YMPIWy3N7c3V9vtrZ71OI4AE4CvQ5Oub24QzTB49foK7A/DqIm+iABQjCyStKUKs/iUc7X3AwBU1iaAse8AoGmazWqxWq18ilVtrbXDMCiA2TaNAoCKympEpcXUYRi03olIysmKWYLjw8BRMg0EcxMaqTmTzAWL05bQ0ym59eCjiJyfn5M1N3e3mr8+fvyYmfUwNC9XTUrNocvP6TeX3aW7SA+73FB1XeuRFy84YQkpaYatn72+vloul1dXV8+fP9dpgFpz0a5fTZHVMatMh2qSvHv3rsDOb9++ffnypY64IKK+PwKAxg1VVWmUo4Lb33///XK5/uKLL87Pzz/55JP98aDp8unpRnfR3famtk6LNcft1g/dOAwcPAAMgwcgIOt9AKDRx77vGYSI2rYeR9W0mWzl1JadZ46LiAoMaEeBxlJpaqgjAWRA7yOzEAJFHmBSmQhRYowM6tdDSgIAdWUBAFkYwXl/PB5dXS1EyFmbBe9oagZWeqAI6LRrlqKICJNvmMeRMsl1QdKWqekeCV03DMMQQkyRtZah1CAfIuaR9SiAhIIJQ0qp73rQi75s66qq9BSsrRJzjNNoc2bwPhKxtvoQkgATWRQERFYaNCcQVNhTt/fpZu2cE0jamASIgDouk2HWijO/R8okipJs6IMQ0SAzabJRbo2yLCYLPpcMWGZJtp64bmnJyLY2iajNR8Qi5Dh3MbPVnnck37/6T3nMTQF+cFTlgQ8f5W3/ow4YlFsPAIisFbwPD1eyoqlotkd2tVqdnp4d9tvjbjv0Joye5P6gkUAHFhcdGQBAZXrdRyISoq8b99lnn37/+odhGGzdEFpBc3V5fXZ6/uji3BIwA0ie1SzAHyzjdKgZiIbsiYmIOepCGWtKNKD3cIwRAay9j9HKKRtjACaDq752uVwWPipq9yeiWiI1yiUgUuaqFqUw6wmUXEqRZ7V9+j2qNVh2m8m9cSKi9drSKOy9196eMu9BmVM6fW8Ivq7rdrVcrFcqva1nNAxdjNGSodxN6JyTrEWr5h6BBKdbSN0Y57G4mjNZDuJR5S9QMIWQOAAniSmE4bA/3t3dXV5ebm+ufRj0V/w4GmPIOGNM5dzJ2cnrN28Oh6641bquCS20VFVV13VFbCjG2DRVWXkAWC6XXTe16TtjjTG1M4vF4uTkRMURRQStUWLzer1G5q7rTtcb3mzUmUGOujR0zas3TUeoqrnmwP0NXLLhgn9MliImZjYWBWxKSUmbGnhpMmpczcyXl5frk83Z2VlbN8YY9WHaI0SZ4l7yqkJEpyzr2PW9FvJV2Fkyd4GIJNd91UOr4gpneou6Un1n0zTb7VZDQ6WAIaLCjACgeh0ppbOzMwBQTvXZ2ZmIfPfdd13XKbqgua+IaBiEiNfX14rGr1ar8/Pzr776KsZ4OHQ3NzcppfV6KQjr9RKzbILm8ctFO/R8eTzWdb1NaRxHizAMQwJkoJQ4CWt9FADGcVwul8653e4AkkTyZEYDgpBiCtrsB8l7LwB1U1ljJkrPJNZWMcCh9yyGGREYJUmKVeC8E5REyAgwYrLjpJljBRkhhHQ89sZWSLYCNMYxgwEWxEkMGqYeGJkGEj6wSIJQBrbONxUzA4MkTklU12gcJ9EYnXmnKURKPPl4VEEroSQYIqEgsBBWlpJwnRgAhNE5Dpx0zAwz9/0IAIvFwk4SQyNaJxwBDYBB5shqGoGIyBiWREQ6BgaRNGkRvucJF2+gKY1qY8HMVxXLLxmb1UeJTZVYXp601slMSVDovktCREiAcpslsKjMftFL0C5jzhIZH3rWcjwf9xS/4YEfKGTlJz/SHDz3u+W/k3/5Lb/x0YdgHlYtBJAm7UEQ0GHGXEKMyXIRGQBp2+VmsznZnPWHffBD9CH58NtOVXLUktHklOnp6/X65cuXf/ezf2its8YoNeD6+nq9Wrm2neDZKA9rDR9hQb8XrWCefUJEtavVPJXklaZ6ktUdM5m22eBen1Hx4nphtsP0t1QhSO+r0iejtcZyIUsVWd9T17XWtzjPdnWzgXSSk2ZNdAqnWuf9aXiLuRaiKV27XJKzEPzl5dVyuTDGcEwxxexpzHK5TMF7711lmKO1BMKaCOb9JIk5xolwp0iZkrNUUoqZgWOMSjHlmLykCImPh9319fVhe9hubw+HQ4ijiPDk3cV7P4Yjka0cPX36FIF+9rOfd12HIZBA4yptJVIuBpFVJ6cPzntDo5m2bStLzFxZt1g2i7qx1m5Wa2MppRhCAIFVu3BNzcyQ++IcmejDYtnoJkhZY08yNT3OGrvL5sGZJnOcFDFxHqpHiSmlaY67gMA0Vuvm5kZ9xrEfNd1Ugl4RxNAsSuO23W6nImhlJ8z27dRVrEwCk3VJNXABgHEYttvtl19+WTQ+NapLWesgpRRjiDEqg+/q6qp4a8jVSnX5L1++7PteSdHK84q5vU0/2/f9zc3NZrM5Pd2cnZ2pMb24uDAWmbnvRp0RWdf12dlZXbdEpCC2j0HDgnEcz85OPn3x/HDYEcpytUjWftP3dV1DiiGEu25cLBaREyeIkb33vR9FRDvZ+v5YDNx9DjApLoOeqUZmjgwhYq62RgZ9p5DlBBwjCFtEFg5xavBDnMiuKs2BGI0xaEhYYoxd16E1VVNreAcAhFTEnhGV0/S+nbs3x7MmmeJ9U0oi1vs4jiOHGEIchsEPo/qslBLzZBg1jRRmNtYzo4BgJBCDgqMXcQwxyWQK0HtEk1ICwuijprDj2CNK0y6NMTF4IQOQAI0AAbPW7RBAA26lCMQYXWUm1s5seLw2/pVz0dOeKzRwJkOllHjaqNP54qz1jt6Tec8BaEkCdS6qEpB1K+qbF4uFyQqGuaI7lbPx/R4ZmQcH8yvy/qX6DY8PPpiLpyAfrdiW9+tP/w87YAQzrfVM0GR6Scvw+dCJcns3J+GkVmaz2Qz9sT92HOLD457+y+kBsF4e1hlKkkAqsp9//vmbd5ddN6BFImtM3G73h8Nh1bbMrDMrLOUkunRYA+CDlX0/6rmPSsqoCmtTSi7Xm40xRSxNV7CQYjjradg8MclmrfCCWyqhpvTvzjNdyFJckqWmNK0sOxKyvhpnwvN7sIwmLgpW63D1lAfF13WtAlIpTnu6aVqiqSAXRw8ArrJ6gZqm6VKRYxVmTjEUi6wuk5kFJznDpHJUzqpFbhchhGARx3E0OphMUhzG3fb29evv3715o1QYMtA0TUoqFjYll5YopHS1vWoWy88+++xw6P72b/92bkmdMcLcuAYRVaIPSSrlmQMYxEobujhYu6zruqlqg1LX9bJd1E0VY7TG2KysqQYXAXQsVYyRzJTs+r6vmkZ3RYxxt9srCc5aq5KT8NDEq7mkzL8rXrAYmhAC8P19jrMJaIUSpcoYF2fnu91Or6ky+LhIScyGTMwtFCIqLFzIAbrlNCvl3K8c8xgW3Y0mi1lqYKFPqoaGioNqSq156pMnT7QfSSM5ZXhpGKFK97pjNWRZrVZZLU6MMV1/ePvD1cXFxXLVXlz8rmIYNzc3b99eAsDz50+fPHnSLFpNekTk7u7ueDz++EdfvtrerVart3c/6G7cbQ9d13Wd32xOlIvn0zDGoAZX3f/xeMxrMt1cOqyFIVPYEOrKEYGxSICYYU+KMQn7GI2rJWEIgsJgDabkJZbeayJw1sYkMHqJxjrCqmZmNFESo3WL9WhDNZE8lAbLDCKsV5w+kgO85w8KOKFmBIHKM9lbUCb8SSr7gRAREogBM8Zgsw0UwjF4IEzCDKLtJ8wBIIFQSgyYyhZFBeoEmRmR7xV2AAEAAElEQVRFEqBI1PkTBkiYQSSyENFms0HExMGB0WM2mHnAD1ncAADAuXfmfuuWu2bukDirN/OsK1dVGTT4JnrgsBim6UMxS15rKjKXxpyWPYnIJNZUFlwP470766NOoRw8fpD45q968KmHZ4c5gLtHucsV/x+HoAHeY0cXvzV9qcDMB1NVVcH7JMnZerlcxtPTcej6Y+f7YX5AMKXLFNN9zj4HigEgpoDGIuJqtXr06NHPf/5LJOsMpSR9f9xutxcX5wbB+1hVVpf3vUBGr8FHH5LrcyUg0mdWq5VkgfuqqogmMcJCsyqnqWGv5i6ais13Es40sABAR9Drjao9RSLinCvKi3oYWgVUCFoTDmWypDzoQ49Tf1pV6ZlZtX+nBGgci5zyyeasaZrd8YhZCNMYKyYNwyCQ9Ncpq1gYY2JUJhpUVTUMnbWVMUaQEdFYq/mKD4mZyVkWDCH4GKDrXEoxRj/0IuIIx+54c325vb0NwVuqOAWOkCQwc1Xbqq5xmnOc7naH/WH73XffxTAB7KULSwRVk6RyTbkHOM/50mOuqipxqK1bLBYEaB1VxjrnEsftdrter7XCqj44hGABl5tlEYRylen7Xkv4zpijnwSot9vto0ePnHMghO5jtD4AvQrqFG2etIGIpOK0fmrr0g3g6kq7jNTkOee6rtNrquIYCg7rcWr9W51r2XJ8L8AyTd3Q/TYMw2q1mjMMVEpTadX6Q/pVuje6rvPeP3786OTkRH/RGHN2dvbq1Svv/e/8zu/s93sR0Y5hFdLSm0JvAV0rXYeLi4u7u7vT01Mi+sUvfrHb7c7Ozl68eHF6enp+fv7u3bu3b99WrlEg+uXLl1XVpJT2h9ubm5sk/OjRo/Pz8xBGPbA3b96oZ339+rVzrj8Mh8Ph7u7u4sXnZO2xPxLZu7s7/d3FYmGt3e52fd9bWwEAwWRkuUj4oRJ5JmAfETFJDmEdGdf13eFwbJeqh8oIgsgpeGBumqnVxFpLmESAEzOijRYFGAQNpQi2csGnzFGAYkYUvyaiD5PgElrBNDHpXnFdnUrwvZKEvQ+KyoqI9179RUbLILEASEoySkgpgTM4KTZgYLYpJY5A2BiLBiFN2kwxRmNFmcO6LCpT6pwTQIgxCRAJGgfIwsDMYlCp7DkiKX207/EhCk3p48Y2ZR0SnFJeKBVcZZaU7zd5HknZ/PkWAEBQuEgr4rog2hqq25JZ26gmqDKB2JlOtbyfvD7AROcvffQU4EHw9D6Dujzm7K35t+nf/xMZcPaIeD8NEQrgAAA4JZr3vofImpqZTd2aekluWS/WJ4/g8u0bZraEWr+ckkhKAACCAIRWOGphn1AQwRowKUVCefn86d3N5d3dHbiGalzWi29evXr69Ol6vSKj8uWACIlZWBAJhADRGCfsP4xENK4EEGNc1w1KpCJHCQOHaK3VwrRx1HWDc864yfkRUYyTb2bRKOw+G1Z4Wcts3dCrur0gGVdVAmhsDDGlmGSCE0JiTEzGjt5rf7rqFfgYXF1t9ztbOUsmZdRU7bIiQpRJXn3f28qhIUGIMfqYtvvD/tARGc1sQEtGzKtFzRzIgKuMcyal6P1QW5Oir6oqjN6SQYE05XZmHMfVct2ulrvdAUUOxyORNXUzeC/dcPHkBBHjODZV1e/3fuxDCBy8QOqPh6Hr6roy5iQlNoE03THOkXGAtFmvY4zBSTOCsc2b12/RVMMwHPuxG7wyco2hkCSEBBSttQnEEJjK9OPYVLZpqpRCVVlCU9f1yWYFKiu9XMQYnKmatvVxrOtasAphJAPGSp05q0Tkw2Bl8p0K/HL0yA0Yevz4UT8Mo/er1coao2Pv9CqrzLJzbuyPrq3Bqn6vb5oGOEoKtq7HcXR1FWMUgKqpp3PPPtUZgcSOjBXsuq5ZLg6Hgxo4RfU1KiqFCcWQ1TG7qWjHkrUkFcglIuFECJxiXbUpxaYyzgBBEtGONS8iy+V6GHrESflP+4zVzbdtq9od2luvp7lcLsMwHg4HizSMncaFYRyaplk0Vdcdjsc9QfTeX717IyLXcVg0VkTWpyeff/5SRDTL96Fzzh27XgTbtiVj9vv99d21T75d1G1V19XSIVwfj2/fXu52O4N4t+/6hJtHz87t2g/DkuXY3X36aN2uGmsqMHbsjsd9Z6lOEYlIleeZBYAiM6KxYMZ+Z42zYityECOiMAeBFABDgMDgxUhKWkmxSBUYkIqFExsUiDEsDCZAw0DGjJIMM2uphSGluCbyQ2fxBFIEI2ytgOgcRJxucRKWWVIx4aeAEGMkAS2XxiTBx+CFGdMQhq7z3vthHIZhGIaQfBImSxwlaT1YpszaEQbW9DdFFkCSBAbNwUtTWYiIlivnhBgB0AAJ+JCQLACMPsao3ZUkgoQWkHS8vR9HIFwsFn4MCMNmsVxUmvjqzxKiUUAKp7OS7D4tswr6YgIGYcwDCdRyphiIQERCHJUokOKIACAJEIypiAwasq4mU5GrBAyQ+hrUONVLlzjF6AEghFEEz5bnZDOBgywiSmJCEGEDIHCPac8j6fdy6/IQgYmenf1qyZUpd76ommZ2JRocKF6gFdj54Pn3+27+5zLgcnTKUb6Xg84HLQ/PjViStXa5XBJwfzzeXL1Tllrf94Kk+SIAxBiRFL4AzfXzF6tKBmmJ2xCdnK6fPHlydXXVLivtrzDGvH37tm2b2hldJ8pAHyKKTGHXbxkHibPGwTgJQRuCadw6AJSGXUWHiMh73zQT4hRiFJG6VunRKdPVtAZm6qb6Ks0ajYo2J+TxdgDAnEqzpvbyppSWy2X0YYYGM029RvcDWZumsZVT7YiyXYio67rtdnt6eqovtW2rUw0g91NpKqm45Wq1OhwO5+enXdeRwXEctfWzaZqUYVXnXNcN6CaZsNvb27Ozs6qqxmHw3ofgmTmGIKAJgdA0lIlcnl6nMIDmbVVVcQwqhXh7e7vcnOi5FHflnFMemWKz1t5rWunxT3/DNCdqtVqVoFiX12WN3Iw0+Bjj0A9KEs4lNwkh1Dkpt9YGTpJbHkVElbl0q6SU9NgUtuUsXJUL1VRwXcy1BgVIlB9XUjH9IRFp27YfBskzkfRJzr1tZXOW7VrqrwCgmPAwDGQsEQVmxZB11yk4fDweUURR6JTSfr9/9OjR119/fX5+ru88PT29vLx89uyZ3lB6XbS5XLeuwvW6mBr5Tbw2RF2Btl2GEM7Pz8/PzxUWChxSStvt9tNPP/3iiy8UudEeVvXHifn8/NzWlTEmcRjHkQAPfdc0zW63SykN41jX9SeffDIMQwxehRsfPXq0XC+EOPjUDXG/3w/DwDzpyOadX0TLweeUaCJw3Hc/TDZKr4XS34wxBKiorA6z0u8JySAJI1iAXDodERGAjXlQlnrPDErpw8EHUCeyMN6jbswSY0yBY5hgZ218UOzheDxmvbn7PVA+q//NqhDvG7fCCWDmyjm1P3Gm/wO5CqZvbuoFWZNSZABjHIN478lQEjVHSUQgVy4+xG/nSY5+fTnUgqiHEDglRDKGECYxLJW4AgDCSSOz3LPGGMjTlwt2IAIKDYoIkdXq7/RDOgvwn/AoeeqHDyJKOaEtp4YPIesPU+fyfvngShQf/D+ZAX/s8KddNXfA5ciisCECEbKVMOMiPXnypD/uL9+9McZZq0O6YjE0CUSERXh+6JKvYUoJCQWgaZqnTx//6leVWsBx9FVVXV5ePnp00V6cfYTahqy+77ecB5HJzZoTTxXzFB21Gmrlyz4u9Vc1hcrqUDMEGkwgagl2ds8Q0aTXoUgL5hKv5K6nlCfZ6am1bQsAKuVagsfiyHXpVBFQj7YIY00koMjW2tPTUz2F3W5XVVXbtoiIJJqHEZHKZcQYow9NW9n8gKz2lVIiNCJye3urUV5TL47SE5pJgzkrY6iLijFmhQFU95mXyuo/tWNVsw1TO3LWVHV3HF68eLHfHb7//nsVsun7vnRhSW6BcM4Zg1lFxFbOEJFKL7VNpQNuc609NU3TVDUiWjf5MA34jHEA3DQWAFhi0dzGTOgoiHGMsXKOdRilNUVwivJ8w5QSy8RpKoOWlaUiualRr6nOJFZKsMZzkOUIFNhYVU5R3xJ86NG6rA5t8kSQwpm3eQYiaBHXVXrMx+NRBaeUf2eMOT09PRwOBnG336s5896/fPlSG96UKL7ZbK6urhTH1v+GEFTuQxtt9dbabDY6DfDx48faqrTdbpXi8OLFy9VqFZmNRQ0K9XLc3e2urm50OJKSpTXRH8cxpuRTjDEai8mH6IPGOspAVJhHm8dqsE3TLNbLunZAkoQZRQcphpB0s8E9PIcx5yIhRNAZ7zAxegSS3Ava3+clej/6FJNPbVsvVsuxHyJ7AaXjkSHtuhGRaO1UEXDunvAoIkmEpnqWwL0Y371ZK2/W1xBRizhhjH6MKSXvY/KhP/bH41GHRWqYqC7qgYFDFpkaDmVK1SYdDcnCvAAUgp8UoetapWoBzSToOENTOQ900fHGgmq/pw1WN/VivUogLOzIpBSIJk06eYjKyuyhI0pJoEyiwPzIe39KAHwc58eDiIZUVVN1Ix54BGYGRj9OV9baarFY6PoQEeDHBxOVb4AHruojr0KJaWavzt0nPiy/6uXOlcd7q/5h+lse/3854Pufzz6YkFgehAaIBkFQyDpK0RPak5PT589f9H1ft1ci6XiMmCexO+dE8YH7b6ByDYVZJBFVMUYg3KzXn3/++T/84pv1Ygk1E2Df91dX16frjXVE0/mrx5qTHT7COC8PEckmWIimIkTpxDB5JqtaPWtt0zTeD8YYlfLRgy2FZMozc7z31lalcyDl0WlqczU4xTw9Pv+QalxYhRkLaGlw6jwpbrhwahRCVBelgbkmUsMwOFsXiUrV4WvbFmEap1O6jZnZWaqqagy+XS609xQECQkEm6bxMTCDtWa/OxLa2lW7rtMEq8pjfTUIAAADSI4IzMgJpgIzWKst2qBZ49TW3FRgqK7d6enpanPSd8Pf/cPPb29vT09PJ3XurDcigswRYOo1REQCbpql3r117RCgdF4RkTFUW6c5t7VTn7TWjdTJNbXVLL9EQrqY2+22aRrB+/GiiXmxWAQ/ljJ5Cc8VcNZ3asW93GwaBvk8V6PwqPXSmCxAqIs2DAM5+/jx477vtfyv0U/MSiwFNSnHoHoahUVVEIIinJkyr14DvkIX2G63jy6eODJN3eBsDrx6xNPT07u7O0UFjTE6llEjHh01qLXkvu/10ihun1I6u3hsnHv99u3l5aX2KbFEZtaZpFN2KNS27Xq9rut6tV4sl8vFYrE2Jgpz9DFGP4zEy8u373TMnIpja69wCP7Fi09c47rhOI5jSLy92++PR/Wy8DDDQNCgW1iRAKAQY0opMVtEdbo8CZgzsKCII6uBfooSUjppFsvNOoSQAoMWT9PEZbcEIlQsBk50EAvZ7PI9wwhnrqj0z6RSONTsTYQlQQwcY+TAcfQ6MKPrumHwIUUtjKp9nzvg6WvhfoI95GGv+t36vDEOMUrmW2mtIbCq64C1FrIDBoBkQvTJWgtkYoyCwAwxxkfPn5WMIvuVBA/1JTAzlZh57ndkht967wl1tAmH4GPu4i3nVarLNM32ZmDt96ayjDHGFMZ8GMoFmaaOqv7Ge95K/292CWZedvZemaVuE4IIyB+kl+VrP2RgiaiSp37zpA0AH/hg/CcoYX34KBTwh1nvVPyd/JyIIAEikLbzMgChqypEkUS2qs7Pz7d3N5d+LLG8Mfj/Y+5Pm2VJjuxAUFXNfIuIG3d5a+ZLJJBAAQUWq1lksWV66lf0D22Zr90zX2YoI8KekWkhWeyuBU0UUIUll7fdNRZ3NzPV+XDMLPy+lwlWCatAukAS990b4eFhbq7L0aNH53kkd7okJkHxRFWlzNw2NmY2oqZpvvjBD96/uweaF+fgvb++vr5/8uTy6lxNsb3r1SEhM9TuHwtEL+4HL37joNlW20MrnItkHdAi9k1OixPN85EfH6oaQnKNr6GflCOVAQx1EajIJ1mZG58eKz+w5OmHcF2w7wgRkLLUEiacKz4UDmC73Z6dnYHLejweMQwA/kM1k/iHvkWqsVqtDuPxbL3BBR+Px8vLy8N49EXjCZ94fX2zWq00mRP/cHO/Xq+vrq4q/O48u1yzwcRDx14a8SGE+Tih++U4Ta7p1u2aiPrVcHNz9+LFi4fD8S//8i+/91m3Hobd4YAK9zzPzolID0/Qtm3XNCBRqypZIvO+yQduI2agVvoSF4VR5IiqGtMc0zy0A3qmsVyAE6Q8HbjLc8gDQup54LGQ0Q5d44vyFPIMKhEY2E94ZeXoAQY3zLayGWXdaZrI5TnzyCZTSogn6jaTxUxTLgiNqqJUXN1wjBHOmwqD1JeOdtDd8TOmPrRtW0eYAMTD9WCr4EoANniW+/v7aZqatjk7O4sxXl1dXV9fv3v37tPPPsMK/OY3v/nlL3/58uVLkB7mOXz55ZfjON7f31d62tnZ2Xa7ffbs2bDqhmFQs81ms1r3ZtZ1Xev8WCzDuD8ue6LWm+Hy6eXd3V0IiZ0/7u6Ox1HYO8ciIcRkKUG3Ry2RcjIMss3uCyMszUwlE7XyYhZQIUYTNTIRoaZp2r5ru4G9m2JCt7w58EWY2HMRaRcRItZ0UkqpZqTa5PJ/Btf8yHgax3kyYwjazMcZHUfTNI3jPI4z5NtwKIR2a+q2SKk/sO+W01smkkAgZkvSZClFnZtkzjlxqgDuFwR7YXYZJXJBY4pmnKtpqFZU0K58KGzsqTtu4UpPV1Vh8LxRhQAJ4HstHXCpyNYbxESmifgUyagQk8bj8ehcg3C271f52WRvp7WlTE36aGL8Bz9861G/oyBc+yhRXp5nGUDQIhz5+HO5NFP9l2TAWRwqf5hJaRAmZhZ3kscUccSaohJJ03RK3HV90w2XV0/v7u6a4zFZmqcJRkqVxPhjRL6Gk/HUYpvOztZf/PAHf/7nf14Txxjj9d3t9nwDFQgRYYY0HbZjLOfN0FA9/yIgYipy4cysmtDinXmzhUBrhSOD/AaFVSKCq6s4ITOjZZYWgyq5DH7A40SlBFW3YEoJGXDtKqkwuInh/LUsB6NcDXFKaTWsKyAZQmqaxjTXILnQbkMIu/094OK+75vGWy5/NtW5tm3b9p1zDkrLIek8R2YOM/TnaJrCdJwcu13cxxjX67X72olRnGcRKlIrdW3Fe5/qrXRiLCGlcRwxRZjFI2DYbDbf+973fvazn/3v/8efv3r5yszmEsqkxXjj9TA0jRNp2rYVzpJPIQSE81lxuQzVqNVT3KBMs1I1yoTJWAZJVdy76zpyAt8D0D6lZJpjJtyyAq078DBR5YWeF+6mLwIpWHlwmjATEM08T5482e+OlmvqB1aHsYAYJYRviqxaFybSCrKNop0VWdMYIzQIURjGlQOTV9XLy0tMShjHcbvd3t0+IIXF1m2a5v7m9tnLFzc3N3Cuqorixd3dXXbbRkBuBt/BQzw8PMzz/Mknn8QYv/rqq3fvrr33//yf//PtdvvmzTfwx3/1V3+FkQ8XFxeQ6bi5u337/t2bd2//+Kd/2Pd97fRT1eP+cIiJLDnmN2/eIEoAbDNN0w++96lavHu4bZrufvfw/ubO+46V5hhFlPkReolHKSqFEM3YmEQoKcFWOBYzi5rQ+eica1s/J2Vmx2rM8DrzPLPINM9t4zSZkDgmgb6/NE4YfQq5GtK2qKP5R20ndjJi/GgwO2filapSjGEep8PDYbfbH4/H6ZgpGjHGqKe6ciiAGc6vduqKPVkzy2wjZmeM8LeFSeFSnMoWyfLU82jqNXuLpDpNU99zjd1xzVdXV3UmDRXRAi2qBsClTqHGwhUR5XGMVMJZ59hU5znAaNX7ld9evVeR3PHeE3GOPFThTs2ylU0ptW0PdIdMIqkj9wHwkFfsowZX5g+TseVfa82lnsT4w7cs31sf0nryb7lFJjUg+C+vAUtNhY201Mdpia4wsznREE2TiJi4s+3F3d3ddnvx5MmT43G/32HymrRtO84nHe0cOjFzGXqTUkqa4BLgeJ4/f35xcXF3fZfDZLPb27vd5dXZdk1Qglsww8p1fctRPVlZaEGPuBeuOWiFjlVV45KwE2rXB7ZjrgJQ9rKyINDjS81laFL1x1qKT4gHm8bVr1whTbZTdzLiUxQjcfHIvSo6ut/voZkXQmByEEjalx4k9GSD5oNTocjUNA0x+6bhsvnmeQ4hvXjx4uHhIcbopBGx43G6vX9omgYPJFhdSAgOh0PTNDEGjz5EaIOMCkfinIAd5l3jfXLOYVz0erMl49vb27u7ByYR33zxxRe3t7f52uaZSp8Vnn+AV4QuXjZX5hyP+0NqGsgcVaQBrzc6jf6uTA14RHzxuhQPDw+N78hl7Ktt27bNRKoYc8aDlYHnG4YBEDQO5Hn1r1oEmXF3EMPhN8fjEaUBVW2a5vz8vB16NEq1bQt5Z1wDl9oHAj64c6xGTdxpgT9hGyCx6LpOSFOZKYJUGLyBWKShEf/t9/vz4+iIj8fjxcUFesDMbL1e73a7u7u7s9X68vLy7du3zIyWLUDlNzc397vdOI4/+MEPNpvN/f3927ev3759OwzDp59+Oofx+vr6hy9+cHV1Ja5BDA2ggpnbtn3y5ElSFaG+7+McLKaubV/vfou68sPDQ9t2h8NhGAYWubu/JyIS3u0OKanzUhtXIJEmAuadmVmMSdXmENCDK96pqjJ1Xdc4bn1jZlNITEyOWt90jamqsakFVdvv90oWwmRmhLG7hS2Fpe67PPRptRrQRl/dWw0plg4YLpeym2HNjfUkIhZtGsN+f3h42B/3hxDyFNeUkoJLbZKMVT9EMpOZWwCkxT3wMkMD1cOMjIXEJbN5DmbmnHnvvSNVTY7bwtqt2zXvKFXv5enTq5oILZ2NLsRhqum2chEn+1uubZ5nLW27XOaJ4ZHJ/S+MTho8wlkil4gcERPFhZIgZ5W6LHatiRJhVv23+8iPzX79Ov/ZF+NlQqzlhn7wLkMVgai6v0UQVl8ktJhj9I9Bwnp8CYS1I8LHALEUIxIhEVCuXNtcPrma5+nqybPXr18z7duhD+OxbVuxxHLaTFT2sRSyKxroY4xCLMR927z65NP3b9+tmzXs1MPDw7ub64vLbf7uSkmUSyr2GIh4lARb4Tfhn/B8TdsK51QDuwRKQEgFfBlfWCk5ISSwEsAsQGLaNE1apC84OZKnap2ZeUlbJSI0FyGdmscpzqF2+sKZ+YUOMO40AoIQQtN0ZhzCXFBTwl8rKWwcR99kz5RSGseYUipjCY4ppc1mM42jaocRDre3tyklJ839/T1YkbvdbjVsjofpcNxhKQ6HA+DKeZ5FyPsVvkjTdJpsF47DMFByMaT6lbfbi/1xfPebm9X795eXT0qrcXbbZ2dnWB8v0noPsHe9XjtmTN0IITjBpmNWjdPU9x2GgFbmDhbZKNWaOhVVbdgXOAy4JeAKH7ca64nXhlKfVCgYP8SUqhAPsN9aoQBAgiupjn8cR0wBgqYjAhcUxeEO651FBRpJMC4eoZ4vsti8ML7zPEMWeLNe1+zEOWfpJH1VRyxjbAP2fGVcw6nf39xePn3S9/1vf/vb58+fY5NcXV1NhyO46yC9o+QfY7y5uUlml5eXMc4PD3cpRTPbnK1++9vfxjT/0R/90e3tLaS++mE4OzvDVe33x4bp/Zu3Dw8Pcwjb7faz733qvW+7Xpi//ur1q1evfvGffg5soO8G51zSwMzs/M3N3WGcNpvt/jCT+Gk6xpSYIemI1EdTSkoOyRezoCMST3Hf962X6KY5hqgzUgcRaZzOlMEzRzrP42Hcp5Scd957UhUm45NnK8kfNU3TD20HOqH7kP5T0jvO/GEieBQzy3OJgh4P093d/f39/cPdbhxnRFFaFF7NLFFNYcH05ApApg9yuGVDCpEx+SYXIGLI882I2Xkfw5GZieDBUwRcqZjdMq9Wq65rYDeapgGUUreKFZWY4lzzN6oHl2tgtbRIf/HYUq3jZnRdSVHJrIdzC6n85XriJgLmQS9A0zSaiBwJu2WdOL/3MRpsCzmO5e8/Pmpc+2h51ciJfeR9qcYi6Dwv0bAupGo/eMs/igMuSTA54tyVjxhhuXbFionzrWncrLf77T6E8Mmnn+73DykG37Uppg+yey384WWAn4ouKDPHND99enV5eXl7czcMAzg4b9++e/78+dWTrcU8+11NXdYsTKiKl9U5lVeliBpaKQhVjIWINBFoJqhkWFLYxJqX1DIGF2IUiKw4Q0wRrtHKnHaAlrX/pObW5+fn9/f3oCUz83pYgaSKBem6DilmXRARwWRfKV35IaSURhCOcAH7/R5tKqgp4qbEmP1xSqnr2lCm2IYQoP6/Xq2wn9q23T0c5nnebrfim2meiWgaQwz38xymMQCl/MUvfoFBAsR5xvvZ2TqE5L2cnZ2/f//++vZus9kQZqGIMzUSPd9eTs/Cmzfv+n51cXEB0kfTdBAfvtxeog0MJhvA2cWTJzHOh8NhNXSQjxCzi4uLGCMwQC5lKgCYiFRCGaOLbADO7O3btwg7drsdKGzr1dnNeAdfiMGOwG+JKIS5xuxmhsCrRlTVSaMUwgXtqHToys9CXnt9fd33PVJJaPsR0WEa7+/v1+s1mEdIT2tkgGtOKcGZWVH/wAcBNF6v1uM4Qjrj6dOnKKB0TQOeF/YDNE0rw5EK5+X+/v729pacSKHHg4Q/juN6vQ4hjON4dnaWihYNFvb+/n4YhourKwDmRunps6t371Qt/vjHPxahlMKrV59Upt7t/V0MiqDkbL25vLxsh56Z265rfGsaRcRUpfGfffb5//a//btV1xJRbvRiipqmaRrnuWm6aY5kAsrh8rnGo62qlLLljXmuO+ERaNt2PXTBY1TwYXcMSVWoqOVQcGQkogUUaRvXNF7nEGNo2wYTcEval5+R7Xa7Xq/JiQg5x8UauGJVOKWYlZPNVGdmDiHNxzEli0Hv7+/v7x8Ou+MEoWly8xQrkzelFBZAdIGFuaTTygyEncwoiXp2zCUxNtZkzufGtpgSaNwhRCPeHw+b1Xq1WhkYKt7DR65WqymGzomIzGH/+fc/G1bdGAOeo2qfqQQTcuLLnnhYIiIIfkuhJ4TAnGdAMWcJd+fcPE3omG6aBma5bVvvIW6TdemXhZjjcd+2fpryoPSmG5IZVqvG92amC8Zi9Sx1k+Cfy8umx2nx8jXLN8aS9Fsh+dpizN1iE9pin3zo1OgfPwMuh4CGl9l4ZOWyFLAEO2navlutVsdPP/3szevXr19/1XfdYd5VDvUySKkpSD65kRWNSRHp+uaTT16gTDUnG8cjE93e3m63GxYBI8Y3ng08WKv4QL0X+Z+A5gt7ra5p/WfTNKgd4sCugtMSkbbNeY8VlXzsA6SnUOSBI0kphTkxixNpW48kCddpZWzcdrsRkfFw5MIDhPUEDRUBIMa+DsOQ9Te8DyF1nUf+pEXZ3Hs/DANKaK4IGO12O0yagoEW6Zsi0IhowHufVKf93nuP4RpEdBinGKMpT2MYwyxJmF3bdyJyOEzv378/HA7v3r3bnK3nef7Rj34EYabNeoAt+tnP/uanf/SHWAeNSiRt08eWnj59/u7m/vrufrO9/OSTT96/v8FlCHsU/5BTmhk6WGKc8cVzSM6sRYmCc2ODom0GfnGcDm2Z/EjF2UiR19HCynbOoR8xFaI77IVbDFqA78GWwCgheNBU0hTAvFT4z7gLtYSMZYQvwY3uuq5pupQSckQ03SIthnurZemcypuh5KyqdUrg0l4gCAjzhLIrZU5WwNaCwx6GYRiG/e6IPp+U0rNnz6Kpa5toOYmHonhKabfbAa9GKoaIpKWsTNk0DfjMyYyZLy632JarVX91dYErRyMTFnC1WkFWDCuc5sTM7B3a6GOMwux9++b11y9evPjtr3798PCw7p+3bRNCOh6PZ4c+ET/sj+M4xcQxakiUUGshNSOzXOxnEs6tECTixbIibKkQKZcObGY+jDcWIztHwUS4dU6EY9JoViYLEqkRKYuZJVVLeXB47gJH5GfCwibipOhFUO6vzcpPZKkU71NKKU7z4TCGOYUQjsdxvz+O0xjLICASRsxnmKJUHQNTgqHkTH2txVcm/NdZIR4vD3oMADJzjIlIphD9HL0XDC9g71zilBL7LPm36geEp54laeZSJeL6NNEiBay70cxYrc5hrB/qnLOi/0ygX5T4FfeL4NSz8T8lwdUjqmY6d0hpGNau6conEnMuTlvp1tJCff3ASS2vih7nxx/8hj7Kkj2Llt/jc/IXh7KK8XfVOj841T86BP2o03wJ+HKm6YsSsfHhMJ5dXM5hDCG8ePlyHA/7/Y4eJ820uKn8EUoAdW/H1DX+s1effPPNm2mcnXPMzix9+eWX5+dnZ9u1JSNSsqyNgD79cnIq1HTWRESPKiu4DJhCPLRhTr7JcVA/DAiywLIhIufc4XCAjUbCykXbMqXEwoBwmVyKyQotCIZVSqsoHAaUNzJJJMaaedeSLUg9CFQhhjCOY9v2+I649zXJizG2Q09EIAHt93uwiGs6Xmswqup87jQFnBBDcNLsj/uma7O+TKLd8XB9fT3PoWmah909hGG1CHJdX1+DWPuzn/3s4uLiv/vv/jvne0l2HOfbu4f/82c/f/r06eXlE8Qi0ZScd2334x//+Pbm7ubm5vLyCfPtX/7lX755/a7OFAOJF4s5zyM2yWro+r4XsxzipGDqyBrKzPMAV4puVKBAqZg2y5OMPanNMSxgfIuWaeRYdnyv6oBd0XlGOou1mudZyGqCVW86/lldOMKI6+trdNxeXl7udrtxHKNTMMhSSvf398+ePUMLLBEhLQZ6jOADZ5Mi5HJ61hZBvZQDF5NSAiUScwPPz89hL549e/b5558HTfv9/v7+3srsQkRvqfDdnHMXFxcovkDO5erq6ub2Pfwx1Ch3u92vfvOb8/Pz84uzeZ5jnNfr9bNnzzabTdO4yiLU3JeyVCPiEGKcRi1bkZxLZNMYQkivX78dhoG92+33qpo0BbUwx+NxnGZVpXGOqhYTKLjpcQbMaPwTIyF2LOqIiBonuN1937a+SSklo91hTEqJyDfKSYhVE4mSK3oOQiykxOacNE3jhBsvXd90Xbdarc7ONuvNgKiiVKDJyvw7M44pQVmdTEUIFdY4xXEcdw8HDBmMMR6PE4ZemxmEPpBu6gL0ttIYkm8355ZkHMuJ98ZEBeRIKbHQAppWlPVCMuAWzrm1W0VNzNw0jr2PMVIZdg4dtJQSsTPj0hybXb6gQWvBUa7207uM+VMBnGOcNWc1eZ6plumElEnLGYA8TcYDZ9hEF3qunFmZBjVyW+Sd+a+PYfml/1762vKmDyHl332ICGmWArOF8jN0IH/XO1kXwyT+0RzwiYqVY7EqjwW2Wv6TEAHphVMJF1dPRfjp/uUcxr/75S9jbGghz43DHk+AWf6eiMQxkazX688/e/XXP/s5M6Oi9rDbX9/eDcPQ9S2XgXpmj1zsx079g5MT5UKvL9LHMWXRZniy8shlEjJseoWpkV5gByd0yttpTjCVHBoEIuwC7NSu68I0enHwuFRKC7WbBTAaHk4wb5umU9UsAyIMSLwpc2GRk8GOIyHDOSvJogaJTeNSCkTUNI2mJOydc8m0d00I4/EwKdM4Tilp12d/D54w2legbYTca57n+7uHu9v7P/mTP9lsNt51ZO5nP/tPP/hBvL8/PH369PLpk816413z9ddfHw8jSPI3Nze73e5nf/2fbt6/f/HiBU6OO4XUP6UwDEPrG3HZ06jq9myNdDAlEzayJMyNF+Ksb+XLbAwu84twj+BsXOfqk6ll3kCNpWpjMe7OPM/7/f7i4oJKTUtV2zbzeGMR6HZZOsDg+HGXAXXA3ByPRwDy0xi4ttKSvX79GtA3lUo24gmwAeqmlSJm5MpkLSolSS58b1Q69vv90DVYwN3hYMwhxl7k8ukTKFt1Xffb3/52HEfkqUhtUYDf7/ebzWa73SIguLi4cM793d/93ZOnl0tAG5vncDhAh45Mp8Px+u27+5vbpu9ECMj8arXqugEkLOfc8XhcdZu2bUV9VMRGgi8yF1m01bCZpxjmxMwxqCmHlOaQQtBMb1YQU3NZquATTGTMTjjVgbtN06QUmsb1fd80ruu6xruUZLNZXYZzdvKwP8YYiZKQeSHxgilJROaFWcwZN65ZDb330rd+s9ls18N2u724uNhuN03feO/RBIX7Dg8RMbYIslbHkZktpePxOI0Ba344HHHN8zzHUAZ4aGJ2OdO17EKS6SmnMaEyxI0pW9fCdiFHLsc6TJjex5r7zrNRZQJ2qqoxap8omWpUYVAlPHYdgDHQU8wM1f1lgqSnKqkxiWmeSM/E6XEhFts1hEC5YhJQO5AlzJlneDTlUiFn6KCQiAMVYzMjS1KMpJk556sPriiClP7Dil3zR7Tn+uzXr7b00x875voMsprJafJEjYAfOxH58CR5mtF/aRvSdx7MbCacP1WZWXIqXG2HJ58bNLfb85cvXx53D/9n+D/nGFwR/louwcfed/ENlYVJ7enTp5uzr969fb/ZbGE9379/f7ZePR+eQmFQclxcz7yg7SWSRd2+fgtsI3AQiKhpGnGEn2FGkSRxEVIAD77mqeM44gVmxuIhlaWllRN+1JWRDFYEynElFUauWwqnxS9RF4QHrRbfFt3lqgtUpHC+qChqIVyQImMpIt53wDzZIlh6U1ayNOTWzDxPcT8eVfXu/gGZNwpFt7e3AAnCQph6vz/e3t6HEP76P/387fXNdrv9V//qX7189dk333xzd3s/Hid2vl+tt2fn2+3F4TD++3//5//hP/yHH/7wh9uziz/4gz/4gz/4g39/e4uGmarUSETeC/jOYZo5WRin+h0Rc8BZVr+F+buuDM/Q0mgkhULFfGJLNU3DxDWvraFSbX1eZp+1/ydvDMldUrEkDb7MgqwFMAgrIoq6u7tbr9coMzunKMAfj8du6BFJXF9fg6yErNeXCV24++U5El3UompOzAvaRCX61Q0TQthsNs+fP4fI9vX1NdDvTz/9dBiGN2/evHnzBtd/fX395s2bruu++eabzWbz5MmT2/fXKFff3l0/efLk7u4OXwrhxTRNaGLebreQRDCzOGWi1qzz4ZAnGPZ9f3Z21rZt7JSRvjhBTUFESG0cZ9907J2SYYQcWGnTnOYphmBhTkE1BmUnTqDWZOzY8MVRjhfnPVNIpNGIvXMaQ9M0m2FVvbX3nsWNcxznME9xmiYmA6+eiJI5cDtaL0LGruu7dr0ehq7p+36zWZ2v19vt9vzibLVauRP9YKn4nxu4p2l6eNjP4ySWB9/O8zzPYZ5DjBENfiklU0YIaWbMJwZMPmBHTTgzshfOIJtEhlNWJnlcwvxge+AHcT6GgGvWRKqa1GFvowJS4ss2F+MSJSv9Kdl7IebLPnjpaYQoRq08ifqccuHna2EnLQ1+xpmKyuzSC1qenjmHEDBHtR/Wtd63LMcsLefSv36Xz/rYsyxd6be+ILvq0ln2wcL+5z+Dlf5RHfCjEUmnT8lCXlarrcpkRkKy2Z4d97um61599lmM8y//9m9+9au/8/l2/q7FOt17oqSR2TG7fmi///3v393eI/Z0zt3d3r8d3l9cXAhZjKnJM8VwBqFHIU/ZppQ++Kzq4Uq7fRZ5DhN6UxkVL+R8KI+5omEJkwFCY5tnq58+FycPMcPXSLKxIx2LQixGRIrgA0JROBLgn3D5qup9Vo1GVdKYUBK2MssWc9phylUVOhsphBrf4CFJKZkGfGUwfaLobrfrhpWqBk0iEmP2Meybph8a7zDYR4sWBOwEkrkUZk30+pu3/8v//P/85OWrTz/9FMICJu6Xv/y7h4e9c83d3cPP/+aXr9+8OxynP/8P//vr16//x//xf4RUIRazhiwpBdVMjksatptt33ciQ+MFqaHI6YvXpw60eSoA8tIk1eTYypyyxGRqpeh7uk1W2M6IMIC6L2M1Khp+S5sCqzcMAxqLqaTdqrrdbuGKzMy7Fukj+rOB62IA8H6/v7y8BEn77OzMew9yFvabFZFnvAvxQShy4iICah7+CQe5Xq9jjFdXV0+ePEHxkpnfvHkDX/uH/+ynX/zoh89fvkDQGUL48Y9/PM/z119/TadWFtvtdiICte3dbodiOcKF92/fqipuxzi6lCyEyTk3DANmKB3SYQp6/XB98+6mWw1PL59uLy622+36bNMOfYxxf/9wf3s3jqNjoI7knJtCnENcrVYhJDNmcoliipYsmzDnnBKCzhRjEmq8cw4TC1iZWYyRiK+GoR/a9XrdNFltmHLLLA+rLlpE311eQ2Z24oVXXeuceO9Xq/78bL1arVarfrVaDU2z3gx930uDqO5U4IBbIWYiSSkdDuP9/f34sMfGSEmRyYUQQkgoKpmZknA0IlIyjDGsvkeppJvEy1C77kBmpgXlR4nkZNzyhqygESwyvGzjvcGNgSBpaZ7n8/MzBLUwI2GarW3IvCmpWi3YsWBk8LeAlOXyhGghspGj0lz/ouKMa93EucZ771wZKWhSk1cywYqlEKf5KEWDaPl4ghDwcTL6gYemhZuEX6g+dfmuj7/RB7/PX8pImfmR2/r2pPGD8/zjZsDf4oOZiEiJJauUElLvRCRhDmbmvYthfv78+R/98R9/8803NNViQBXV/JZ4hEuBmdVMDADU8+fPnz9//v79jW8bMwthur2/e3h4uLrYmolZ7V7P58+LZbknvV5uXR0iUjsV8GOMUDOFi0JUWJMtq3LW6YR4o5kP5KCQVZdzZY6QK1OqkXit2MVy1GGrsK01wNYi38GlE6bKg6SUQspSlHhZ27bA4HGRvsg66pSVpZ1zFQ5qfKZm4PeasqO6v9u9v34fY4opzx8Ei/sYRnPknHfM3vv9eJznebu9uLu7c9KMx7nrOlX64z/+Z//T//R/++KLLzDD6uz8kpn/9pe/+l//7f+XnbRte319Pc/zZrV++fLlX/7lX6aUzksPUg2KMeWbiMBWRbIrC2FI7xwoGFCWwMp771HZkjIo8ERNz0phkUjgNVWzhfI+N/5qKTtVhwrkBlZjkQczF2XKeoOYGfScu7s7IPNokcJmgJxySokswkXtdrum70Tk5uaGiLbbLTwZhKNLFJKqF88M8PL7ygyoqAA25zzPOmQpDzM7Ozt79erV1dXV4TCCJffHf/zHv/rNr1EAJiLMbcQJX33yqar+5Cc/2e1219fXf/eLX0J19c3bb3796183ZWS1c+77n3769u3bu5ubcRznOaoSKBH4XCwF1gpyaS9evHj27MVme7HZbNjJPM/3+93d3c18zHN/+rZryzwSM0NEO03BOSeNlzkxk4g614iAxpGfQSr2N7eCmDrnoAANdUxw0NrGqUboSokjSNBII0CGU8pK0czsKMvXdH2zWvXb7Xa9Hvqh67pONFVwohr9GorBCqvq8Tjt9/vj8Tgex9pnmEphuLpSJcoekDO8wfKofYUX/njhMDKwhx1qC8jQPsrJWEwYYprk2JxroKtrZmrmnYsxkLiY5nHMHUcppZubm77v126DacH2OLe2BCutVUN4GRzgSFlQKFp5lLBzUoh1QZawhIiA1k52UmA1MxQ+cKMB2lXviykOqloJETXnMTMjZRamR405ixzsQ+/7u4+6AvX2uYwsy98r/S3HPzoEnXn5H+QH9asVCRNJFJ1z1DRMiuTp6dOnn3/++d/+7OfL4KWmaMt7+ejzRFLSxISpGk+ePPnqm9eDd67xZunh4eH6+nqz6kWEDDDsI1i/Ro4pM1Q/5J3TIhogInEZk4xzgF/UMpUBGSoRoT3DzLquC6GUeEWQK9RqMf4LfhBIrRW01DINVEsXEKxYRY/hA5C+WIGac15bIHEtZV3nHJU9V7HcuoFyGUYYoHffod5pzLzb7SolJBb5C8tSIZrScbVasTAGKMWg6GHtuqEqdzZNA75GjGm1Wt/e3nnf7veH9zd32+2267rDeASAaWZdl/UgIWiF+iUzQ4wJSTyA/ZSS5I4kLUkMidA8xRAz/xnLiyuBkBDCFy2letzlojUW+tVQ9908z02Dtc2mP9euknZ9pkHBEQKNcM5pnJexgpbp9Lhll5eX6DzGP8Haw9CFtm2nMQA+6ft+TpnZh13BzAh0oAaFyfPLjYcyx8kKlL5JMPaZGTPqvfe47N3h8KMf/WgYhnfv3u33x4eHh6ZrLy8v/8W/+BdU5MCI6ObmBtvPS8YMzs/Pt9vt2Wp9e3v79ddfX19fIwYCQfrh4QEf9Pn3fvD6zdchhN0ud4e3rVfV29vb+/v7EML5+fmPfvSjP/zDf/bjH//hxcWFkkzT9P7du3Ec5xTHcZ7HeTweiQjaUszcdd39/T22+vF4bPtVjZQRlYpQslPYISLCkp8LMlVtnActdRiG8/Pzvu9jmokGfDUUp41kHGci9YP0rYf3hYQcdj4c8DB0fd+CQmilizqloOzMEnOuDRVUKUstHo/Hw+EwjfPxeAR0wcyZllUclarCXyZLwqfUvD6tJ2atLX3byftWq2UEIb9c+nts03hpYPPjg/KEiEFBloWIjsfjdrvBa25vb8/Pz4f1SlMWIlx6r5oPQAPDzJgQzpIagt1UAsdE5fXpsQJdNcXwmks3Vr8gTF9KSZidcwhntZTkcA22SKN4kfWqaUrJl6FVH5yf6EPv+22v+XZe9AcREv3Ogx+Du/9EbUgAWok5573CuUXAEyWnZiYkrBZZTFmazTzN7C6fvvzJuzdvbm5uICO9Xq12Dw/C3mISZuUSYwqZkQmrGqsXpq5ppimwpxfPn7x98+TNmze+bbyTVT/87d/+6urq6uLiQmMkdmqnlWVmxmNGyXk42rp8p5YyM0tJ+76D5BCY2zWCY+arq6v7+3vsoTkkI0ma+SS+kVR4fvWJpbIzzCyFSESOxbEc94fG+RSiFxejIug+Ho+wQap6PB69NKYkzmk0Ed80zTjOMNZt37nGhxRrTszM8xTJRCSDbG1XdK2N29Y7x0LWODGzw25/dnZ22B2H9cqMTZmE5ziGZDrvphiSWtN4nQKzi+McY+SeSU2U58PUdV1QI6JpPDZNN7TD/f191zTTNHVNE6YkjrpG5hi7YVifnSHCYKNVP4jIbrdrO/GuaxrPYkZpTnO02JCQotkDnX8ts0UNq64fhl7L2Mezs9XhcDQ7jVSzBdnNkjLzPE7DegVumm/bOcaQUr9eT9NkrBjbzJaIqGk758RM0b1zf39fm4IqhsEl3yUINZN4n+0RuniBYWy3m3keh6GbpmPFNqpmHnt3PEyYOVHCFxDNUrXjGGqEt+x2O8t6FxGWCyk+LgMKKkTERTUspTS03evdPqw3fT+gr3q73r59+/4Xv/jFN29ebzabTz755M37dxDF7LpuaDtAC9icQWczizHPrjh/cnbxdPvq+5987/Wrb7755ssvvxzDOIax7fvb29vLy8vj7vjs6QvIeP31X/+1SR5adXl5+a9+/M9+8pOffPrpq8urK+h/3T7sj/e3Dw8PN3e32A9UYgXxfn88PH35/OzyYhxH1ImjKs0p2bFtW9/wFKITp1kn22I0pnxHiJmFRGia1FhUoxNpO3+2atjmtt10XROIiF0iU05t22gInevCkCVdQwjTPJux916ZmsaBi+C8SOvMUdBgYk4kUSIlnkkbT6wmrKqtlzkGL03jWo6H6f5wvD3EY5iDi6lJKVm2iEykIk5VWTyKqI7Z1BJKtmAaAs1KhUqZYo4OhZlZE2tSNDp6TiwiTpOZGHvvmUkLPTPG6Jz33qc4iwiZtCQtiUSVJvu8pHScQ991+8PRSdhuVtFGtvTNb399db7idpNSIiNVB8MlIgq5y2wtofKhzMRkKQJwtnmeiVTYYopFpMgfj8dxnrz3LA0zO9+osLSdSRuVRYTNERGLixaJKaY5hInZQgzi3Gq9Vm40BCP2vlHVGGYuzMdl+svMjj+kYVL2nbKErKoLyz9brtMR1zYnZmYTSpZwL0oKTsZU1CDzUU54yh5xhvqvf6o+4NOxaEyqzh8ZpxiJiLE5Zurai4uLV69excOPv/zyy9evX7e+AfHSKEkjlE5ZtekpfxXhpIqQf06x7/unT59+9dVXQO3MzHv/7t27q6urSJSSfVCnsI/A7Y8PyZ1RBEcojkIIM83olYTaBi2aRD9wAEDhwKbhxag7wCnCecoNTGelQ9fP9UXwSMp0TPzpYb+rMluhSA0TAljnUkpF+jHnambW9Q1CLpclM8U5t91upeguTdMUYuBxNGWiRJK3ct/3t/evp2kaxxForQgNw9C2rVmWSEwp7ff7+h3L3RGc/+HhobZmYVWRVqLnGNfsF2MecPR935CEFCU5IoLMFpfBFTHG1nllldKXxcxEAqVlrFuVREaUDQEKEUHXuIgAJsW61QF8WLC6mGZ2OBx4oZlXN8YSsSCXi3+HwwFqFavV6nDYQa/x8vLyq6++ORwO2JkIj7BD7u/vqdSxsIaVRlvBj6y/xgyoIy70TZHoV3Qdp62vD0WfCz7+8vLyzZs3/+kXf/Ob3/xmWK9+8pOffPb9zw+Hw/v373ExzAyUL4QA/SzLdUSDqHXb+bZtv//973/++ed3d3c313dffvnl27dv37x5g2vD9Oj1ev2jH/3o6tnTzz777MWLF2B4gbswTdM4He/v79+/fz89PIzzVAvYJILVcM6FaQJX6+bmBpRpZk5kKQTvfdd1MenhcFAqkx819+O40io2TRNLI46NhEUAjaBqWBcQzqDiWGyKJ11VhxxmNSacUqi1G5+1SFOM0VlOtZk0mRLl0eZTCnVf4eMOh0OMCgpFTGEBhFp9uku4n8wwSJAAnAozRgwgI8/7kBnZpCmDLI0zSGlzF2JxJJwvA18Q569xpEOOKEtSFccYNVnuuYixb3vTeZ7nm+u7s6e9LtpqlZHxLng5pW9a84BtTSkLE4kI02mWNh4lzi0YudDDhclMxKpqaszsPNLfzD5JSZkZaJ/JaTyJftRB8/c4vqtSq7+jmwh3rnzBBflZl+n3h9nw0tfUUOCf3gFT9sEfpN6c2edeLWEK73q9fvHixf311fF4HMfx/vauxYABBPXCVOsKzMQGxUpNSS3Nc6aGtm378uXLL7/8Eo8rbMqbN+9evXoF37CoBOd1FKMqZbK8amUSo1ovNDPvXX2oVqtV07XoDEG7USlWnQokdaMvyzxwh4CwpBCeudRr8eLa18HM8K+pMKWR4CrZ4XAAoWa9XkP8Ada2nhyGO0YlomEYhlW39BbOOUtGaiAzcynRIf8wZVVlJ6o0hcn7drfbpQjtgr5awxCCamZlE9F6vfZF2b82HCOrw0Sg3W43xwjKNPxu5WP3fe+bCiZntlTf93GanXPzPGKA3Zs3byD7db69BOyPT8dtSqX3gEvFve/7OkvDOTeFWYs+ZTZ2MeJi0HqExTk/OxvHWYrcFSYr4CNqRXMZD+Fee8n+soozH49HZvfmzbuLi4sYFb/HoEPn3DzP0Oh+eHjINzcEaFODFI3ACL+kYsorZ5vL0ZRZT205tPBdY4ydb3D9IgI62G+//uqbb77puu5f/It/8ezZM3wQtM+ur68dYShQDNMsRY3EMleARSSlVhOFObVt++zpi+fPXv70pz/d7Xbv3l2rapoDVgx5ebcacP5xHHe73ddff/3w8IAOguPxuDvs0zRi88/zTCKo32MjYfGfPXv2N3/zNxBt5qKyEmPsuh4RcJhDDb+szN7AhmRmdtw6SGhp3/fr9dB1HZFCzVEtpTKJlpk9C3tXAE+KhathZuadNJ6ZvbAjphTJzHmfThPpg2hjlnA7jLRpmhR0nuZpmsa5BK/MSYhKrweR1nYMZmMmNjVSNHcwE7FUbydmKlalQai0GUXNAQRlwV9yzmHWkwid0PhFFab6SKIk3qUcEBDMYwixaVLXtFHjNE3roTNpx3G8vr5uzy6YWcSfxvAwm2Y8jyiPqbBMV464pymFfMl2soHAEUt6k90YGi5OroOSGbE6M41xjnEmopSsaZr1+gzMUV/mYWupLrvCoKZFsZw/TLQ+6g76HR6sftPyei0MEioV9/Ip33GGx4W/erbfiwMmInQlUWJmZ5TIvPeMRFbJRImIHa1Wq+fPPr25vjs7O9/tDgjrykVbZgIixDBGzIdodBqhxk4xhqZxn3/++V/91V/BpDLz8Xh88+bd97//vaTqHi+QmUGouq7s8pboQpcU9lHktJt3ux06i4BqornicMxj1LhIAxKRL2N9q4eoVFU8OfC7VaYY9pfZiTA8YowRRVBwSmvt04ooHQmjrcFKySp7uNbN89y0zhWdEHyoxoAfYNxxebhTYQ4iEjUBTpnnGUPcYI6plG1sIcqIiAExaQih71eS+/lcSgkOBvOF5jKyIhW1GnDNqHRcLDIMa9u281m7/+7uDh8hIhfn53AMY1JIOxWHmlsm4MNWq5UvI6d8UZUCdRy2u2ma8XAA1I+0T4uB92V4H3wD7uCjaLc8eKCjs6XjcZZSAIZTRxPa+fk5M6M/py511qhqMssUZCvcZe89PBCksmKZdbF0q1h/bEgt7AF8KEqzNzc3mZx/HBEvgrE1p/SrX/26bds//df/PaBvZK4QtJrn+Vd/+3dN01xsz3GFEIiIMYgI7iBaxy8uLqCQFUJCOf+zzz4zM0shlYPZXr9+XZ+Xm5ubm5sblLStNN1pnL33h8MB+rKgbWfJra4zs6urK2z1EgogCEjjNDnnVqtNSPfH47Ft+hrIprTg3HHWP1Gyvm+992ZJxCWN1Zcj/GryyGqpd1nVkKYBPxCRMjeaMEFJ0PyUYTlWi2CDEhERa6IQ4zRNx3ma5ziHlKm2hf9RuTKaSUYkC1svxMLCjr2DopeYGS27a4jo5MrzZVTfIywsp9owESHPkUIC5VxvfjTnw8yIGE3nXdNqsnmOU0iOkzEdj+M0Td633i8AWxOg3NloFjpxzuOZIe0gQiIuxYw/4zWqynyKJ5xzkGFPycBKKeuTiDSmuSadUKmsMpbVolrtdHrcvLSw7R8PiyT6KFutf+XSaf3B75fOvi6yiNRTPj6h8EKfanmef3IHXK/DzKBRRYVYmArB2LWdmc42i8iLT169e3f9/v3N1dXVN199LQaOe1qcsMz4BUdRMpmeiMQosTrnPvnkBTorKor7/v37Tz75BHtsGRkt14I+CojMiBdcvlAnAJrCXp+dnYUUSTDpTCpAilPB/YBPpIXejPSr5r7jdGAIkzP5tlFVY2q6VpxTMmGOmqA8d5zG1WZtAacNq1UPSdUYIwkDwaumuQa5VtiwqWjxZ1+SiBatftjBaKOC/45TmqbJmM24aZoUFQkQfHzlYx+P+e1UUNzqWVFV9d6Dj4NRDURZQGaepr7vNUYv4kVSyI0NTeMcsbAYmWdxrSPSh4c9Hmm0snRtix6SNIccOjgHdTpcG9xYzrnnGUqc6LKFuZmmCT05tF7Xij7wcDjalHUhGCkplRLDMmCXBVUEgqBwpTc3Ny9evICCNwb/3d/fV04ZYjWUV7z34mi17lOK3vm285wy3A3WFe4ObhO2lhXyAc5DRNBnBioLABz7zRWufpnP0UzTxN6LyNWTJ03TPHv27P3tjSZt23Y8jjc3N9vt9rPPPvv666/fv3+POAYD4xDaz/OMqMV7f3f3AKi8aZo3b95UrbHj/gEELqxPSml7cXE8HjFFuOqfMzOGEDji0ebDNDZN470wOxGXkjkn7Jp5iiKe2cWozA7WWESS2hxGRE7on8aTVYHcYnAYIaxZYifr9SBC8zz3QwcRIxESz413w9D1fY8wBUUGnCTEHDQjrlJLzjkv7MWpKllafpyppSQ1J17ESXHKD2Agl6sYZicMo1ib3P5LRGxIfbk6VMM83MJRSuk0HbdiuWZcSp8CxwEjZ6dMhuD/3Kn7Not2mZlmaStOZNMUui4JsxpN0ySkTC4phZCYs6o2M5MJySkmEBFTEKVPc1dV6wDZfKlL8JkowxVq5rxHnk8Z1qbs4k2ThhgjxlF477tuSNGYHD1awJMY1rcaeWauA33N8qC86mi/1X99OFWjvLfmvvWNxd5++6mqKf7gwn5fGXAOJbLzc6eEncs1iaomU2b55NVnX3311d3dHTuJIYLIUzLXGr9IZhiaZiQzmVnquk4Tee9fvnwJDQF824eHh5ubm+fPn1PZi/XCHsH2Hx1LZ6ZqqlmbVxwjXq4RfTf0+/2+bfqlq6PCv4exWJ4ZLtm7lkzUTlnaojqF8klOaoHpmZl4N88zO4kxMuvusMfTXnsGVLXIa3hmxgTWlFLbtuM4juOYUuiQFDuH2ThauMHH41FVk5EqxRiNXYxxtz9gKUCmKDvc4MmaplmtVtD0AQDLJXfE0TQNKuWp6GOjZxR/bdsWCvU4aVMk10tAY7jsly9fXl1dzfPctW0IwTlp29ZiludsGmeWBTcqA7xpGlxSyCqkWcwhpWQpoS+WMa6x2LUaQDjFnJYE3FgWBdf6+FWih5c8PhlBFaoSm80GE/Tu7u7wsiriDR+PPI+yVnMsVToPJRMogmHCHbJ2VDqA7lZPhs2PrBG/CSGAt4UYqB9W0DNBu/YcQtO25+fnZ2dn13e38zyDjI2vs3vYa4je+3E+3t7eqqpncBrMe6/MYZxi02CYwXQ4mhkesbvjGCGw6vPezlJiZvO7d+/fv6+aSkiOEZvO8yziQYmAdydhp863DYB0MKGaprm/vcM9UqOkWf4+JTNW733frVAdmBfDwYoDJmZjobZrhmHwTR6NwJyf667rzs7Ozs+2eL4Am01TqLs3Ft0uzoKUQONIRFiKcc/dHdnvLpIkTkbTNM1zjEbR1J1onmLERpKpzyRmGg0ds1QGwhFnzC8P3GUWcebUjSHQwvviPewyP0O4mFa1ooicg+Z5ns2oKVM9vM9aeImMyz73rp1DAOTmmWPUlELTOmdW+yHNTNiTfIsfquLPIhLClLLwJMHK4WJqTe3kHpida1DldQ5IW8CdMkohhJSCqiayrhtc0wVNIs5/NGXhg/T3Y3v+3caevk3K4lsy47Tgln/gbr/LkSMJ/ijB+71B0ERExMRGaAImKiFDKn3TROycN+2ePH3+2efff/36tbCPNimTc4xWs5z3gmlvQmYspyYwVXUJI9b1/PwM48crDeqrr74+P78YOlnerbIOj7Ca5VEqK6fBFxlzUOgSTJVwxKV7hArLLC2m9qKPs+zFjBsjQYkxCnuludZ6a8ewqmLWiohAuqHGvFRaD2OMUISvQWgNMFMpYqVFw8wwDN7LYZcJU3BU+O6Hw2Ec5zmGkDSEEJIB3kYyB22jesJhGIgULZUist/vwXjabLZEdHd3l2nY8xxLnZULUgRjh1wNjjzTlc1gi6lIoHRdAxS3guqHw6Hvuwz2FpkU5OUQlkLeACQfADvWGR5XC91p6Lq7hwcvsl6vK1YBUjECaE9Sycb4Lm4hg1XjWV+0f/EFod14fX395MmTw+GwGjbCPqYZVOplzR47E7MKwAo+HA5kJ3K1K9+u67q2bff7PaKoCmLXgGOaJkikQe4be8/M1uv1Ybcfx/H8/BxAepxnhAg393dd12F+Bk6IlQdkzUQYPeUcxxgQ3tU2OTM7HA745W63Q8CR11CDL0MsUH1YynhVB4y/IqTDpprCPGVUeYWVqejRxcXFl19+ebE9T0VemwvohbTbDy2CLTw4tTDvnCOnRqlru9V68A2krfOzLiIIDbfb7Wq9xsbufEeEeWJJT97B1UfSTFNS6E0yiVECMaraE13UBZGJqtkcQ0yJWELpYVVVE3Hg4Bo5YlU29MsRqsEOdkUI5LJHMgtcRv3UTKZxvusa77I9zEkqqVlW8pFSMZHSOF5tCIkQZdDYSdM0zTSF/XESEWuaBuF40EoJVFUmZ1yQZCYxIiMhc87JIskrBAIyo5hiWsTl5SLZSLxz3rfMED07kaqYmciSphAmPHfONYWpKmS54mOLCmvFqOjv1yC0sPzf3un6sde04oDtMdaoqszfmjPjjfxBsxP/nkhY9fOQbmY5BRURYUmMu57jVrZhPOx+8MWPDofD/3rz3lt3OOw674wJCmtmAgcsxKmo80zTpAn8vdnEtZ3vQw8gGtG6OP/6zZtnz59/79MXRJlrUC9MzZZkuOXjlHKnQHV4WV5KRHa7nXPnXAZbYno89oQvsla+SBBXlpBbyDJXZ9Y0jXctSxYWh6l1mfDsoTOwXq9BB7XcRFgVAHLdtKbdcDDImVwZqujLSL71eh3j3PadLzMN8V7E/qi7ayL09XrfAAYKITVNR0Sw70ibSsW3n+c5JUP7CrzCkrymqqjt1c7OOoJis1kdDgd9rLEOYBNiT8wMl4NQY/fwsNls8BrPkiA/ObQaExxwN/QaMOgtYD5SpVzldWgaIATOOV/S7lAG7hLkVswwkf58cwYomIosfk3c8fPQNdrkev84jpeXl9gGEN9o2xZY7vE4Nr4jE7guzJQkos1m8+LFC6wGbsEcs9w0/to0zeFwQAaMYAJ7qdaki1ew8/PzGGPlc8UYz8/PHx4e0BI9jiPEq1fsVN/OScdxfvv2/TyPxjTOkxCHEK6vry2mtm2dyLI0Ps9zOuSJ164wtFer1WEaU0rvbq5xi0MIzLZerwGPz2UkF3z8EpUB361t2/3+6Jw7jEcUYkUEhQBQEOYUN+v15dXV8Xh89uzZeHfvfN60JEjC2ElmXwKMQaSFHcgZdaO+71b9YJbMyPtGRIh0tRrOz8+7ruv7FZ4aUASSUUhxCjMqoDFGZpBdjSgRqXfinIfHEhGUTlCEqgEKAiPvG2aHByfG5JwLsCRmSuLY+SYHfDFEVbJEBCVlccyORaBGycR1CDG2nwgXnDx7Mu9917QimY/GGC+wyNKsDAjBX3DxoThmPKosnhoVn2Pl8eiIiIceny7sVdWUTRnST8rkiN1iahwzk5ASVcGNalRTSilFRGCU6S+OnTDlR56ZXek7pxK+GGmMoPikGGPb9t57VVwMVexnmVZ9V5pbX/Rtaeu3e98PDrgDcLOpBBknRmdK3n+7Ay6fLNX18O+VhFUvAi1VzJyHChosrxEJpIjMSPzZxeUP/+BHv/3tb372l//RuyZoaoSNTMRrSmTknKiSF0lF2URPI9MdZkBeXV19+umnb968EZ/v7vv37188vfTeQwEYmVmMs3s8QHuJYOTkGJAQZ48CgwsXQpShFc5jtlhE0AFVA0Ba3CrgmUgRYPRRumvbdre/xzmrM0Du1XXZDlIxNM41qmm1GsB+wmXXwkyJHPOaLxJlBsjsvQfIM4XZe48y8+6w9+JEZDzOZ2erKQaMVGN24zijbFklYbuug0oict/66UXXItasd7/fuyIULIt5QTHOzPzwoPj6MNkYAyWlcYuKXYBJrZ+ILAe+v8piOOeQYvZ9D2V/K2M84B5qaAJ8GCsfgx72Ywn8u7bpYzgIe3EU08xiKUyOjdicYySRuNEpTK3Pg5aPxyMqyjc3N+v1+vb29smTJxgJhccSF6ZFSmW322GhkJtSMdnn5+dTSIfDAem7KxLWoBHBc6/Xa/RQaaHvwfFAGQO5I6rI2AMVpkYIgg358PCAvRRSngQXVTWlpmnGOez3ezJDdihiMcbdbgdO2fG4b5rcJYWaLsKm3W7HzH3fq9Lt7b2Vfneku9j8UiRlEP1g3UIIFZStRRCsybt377qmZeYnT55gZnPTtXOI7CRqGlyPjI1K3gMaP/7bNI1RMkr+dEjbtuIhmmZnZ9vNZoOupDLOFnOFHTNP43w8jlNM43G2gpeGEBrn0D3vve+71jkXUnCuqFASZR1mTfDlZgwpHvEtOZlCikDInBCLa9rtxWXXDsfjceJpfNiJE8d+ToGVnRdVc9jAxCklw3xfO02+sWx2qO+7Kprhi8iMK2CYFG2W0mKneE0IwdjhdphxSurbPGMxatKU5hTTLjVNI2TmhZ04yRJpFYjW0sFlBdibppmZpcSyzMbs1GJN1k/+T1gYYE/jvVclYnYklhV/jZk0RA0xzmNU9d6DhqmqvnNe/BIUrE9Eze8/sOdmxo/7UOuVqH67zgY7Z4t6Nlh5NUOTcrYlKkOLpHnpa8uJhTKzSH/fDpiI2Kg8b5Lh9MeHiIj3FGl7dvGDH/zg7euv37z+0rtc8U2ln8zMVE/3kov4Wg4JldvOO7d9+fL5+5trKqW7h4eH29vbp0+fEgmMKdXb8y1gP/zZqWaMONeIiKzxzWq1ymWkZn0c9znrTY/uYt1z+BnlxppLwRWB1XI8HpkcSl+WVbQCOC+Hw4ja0nq9ZnbeP4q/cG3AJE8Lwqyq6D9piqyglISvgISqqs41ZhRjilHN8e7h4Jw7ztOIOQckFme83fuWSEIY8ZAfj8fXr193XQMP4UtLBhHB6dZNqUX9kYiOx2PXNWdna/iYWA6k/jWmwf1qmmYYcmNxvV+g1az7IVevLaZUJlIwIfoRkcTJOQe/iIRyGIb7+3tImEHBu21b77JAAWq9h8PBipa4lYka8zxvNhtNKXf4WCKN1foQ0dnZGRqualn37u4OnzhLDCEYpZiCc86Lh6PCxYcQvvzyy+xdmuZwOCRjxAoAhIBb6EIOSYueKBYKP8iCfAQ4pLK37u/vu67DgKPdbvfm+vaXv/zl5ny72Wxev36tZKvVCrkUbp9rG4mCimyM0bmsZ5myzHWDBamcr4onE9H9/T3kIGpSpbGq5EsIAUxsLvysfExaIYqHw/758+fS+N3d/TAMpBZNX758+ZM//MMvf/vb7Xa7Pxxx75hHxCVJY5hyqT4jKJ5FRJqViPiG+77HLiUiUmta13bdZrNZr9fDMKDvBckcp3g8HO4edrcPu8PhMI1hHGcS1mSN99M0ElHft6vVaiCZOIkk52X5GNZcCvHuNE1mnCgLyeFOpZSYpO1X2+327PxKRILqFGN/tgnjFEIwopCMk3loSBkZmpUJT/oppE4pOSfO+bbxwiQibdPiMrggvVZk7Mp25Wo9lGmegtlpxHXxLtb3/XSckRGllNDuQQXBzl+EjI1VDY25C2d2+icDQ04paQwhaIw1CqSTnBnujmCYn1ImiovktHJBk1zDTxuRKmEkYPWIUljTfz9fRHTylN+Z/la4a+nmiT408gsf8Zhg9B3FTcuzMtPvwQFnccr6b/jgogwtYAOePJyx895Mu9X6Bz/84dfffHlz/S6FmZw42Ahjcii0GJEjYyZm1pr/4eNSUmJ69uzZ5Tev3717B2MRUnzz5s12uwXdt2ka1fgBHJGPrBle+uQ/Glk1xVAVMACDIG+DdPjHd4tr53VpjAE1RiodehwR3KFACMYsgMfb29sYNcYDeD1chPEArsL21VIrcE4mp4m8d8bq25ZTSmbEnFQ1BBeCX3TN1lKxqipT33fH4/jw8OCkMUag58/OzlFoFPHAguq+xNdkNlyA93k2ADDtWISf8Fh6L3DVAKXxsy99RylTHLvqD4KmRhyVVi4rNJAad0+z1gwPvhPdVjX7pMJ3AyuemQ/7Q4W4w4zhaw3gVvg2YM7A6C4vL3FHjsdjV5jMVKYFVxD+7OzMCjRCRIBAuq4TkZhmz74uNYhCVpTDsX/Oz88xVug4BSxj7W2tSXzFTrQ0fSKzb8rU57rNqMxu4jIkA58+z/P79zc3N3d/97e/nsZglIb1CmcIIUXNVDjnPRaBiaLGkKL3nsySEZnOYWbh4ziFmOW6kAcT2zzPmh5V3dCkYGYu5rFxMc7gE9hCk0jKeOPGNxcXFymlfr3yLDHGVT90Xfdnf/Zn/+7f/bubm5v11t8+3G82G8+CEIqIpBej5BuJKQs5KFF+QFrrVx1qHwhN1uvh7Oxse36+2WyapmPwj5gxG+F4nK5v795f305TGMdxnoJIVm8FD6jrujmmaeha33jvV+u2YfFQyiilQWYPszZN03KorZmy95oSkXjvu35wvp2maX8Yxym0bTvH4zxNIhyDKlnrG5YIRlZIZhYVDGPGlo6gEHdd1zXOg24GUUliYkF6I4W9D1dVjWQ0ZXKsyVjqM8JOvPcirvFdkEjEyqqq1DjnHKZKFJ9HdQ5xNsCOlIw0mz4+DZ4hM4tBY4wp5DYkIhYvpmxs3nt2jRKJeAPmVUcHln7iFI1IMCB8DiZlfG8Ngisw9rt98Ad/NbPfDT5/8OLHkda3vuwR6frj1yydHbP/vWXAshSIXuTBjy4IQZlzTs1RoO3ZxRc/+NE3X3715vXXbOoE5TeEaQircx4sH2mJtc6PYR6G4dWrT77++uuu61LUlNLd3d3NzU3FMWwxPAQhWb6Yx1dfT1tvgIgc58k5EWLQBXEqNEzVW1U3R/2BCnahpTW+hoTOuWnM9lcXIxxgIpmdGc/zDN4pF0QReg5mRs4rsVrlbOs0HkWoOtq6yPM8RxHkhexcDZBTSqrGLNM0MTnE/uhhSGZzjK5pHHOMuTEDHqiqdyGMRTVU0LCkeTIxxBkArqLIV7esiGD18JsaWNTgnTwVtimZWds2634AJbimy1JkMVIRHur7PqGc6RzOX/ug4ONRSY0hh88okKMWReV53u/3gC9rzlEf+xpIwc8h1/RlEsN6vUbzFdxtXV6kswCiY2FZIxypgRRYCzVNwcIiOKgO2EqfJa65Pu24nmmanjx5gp83m83d3Z2qjuMIReL9fv//+bf/9qd/9Ec//vGPwRIHEzglXxis5iiXzGMSsGNijEzSdR0e3XlKY26+B517zvmTOCLCvpHCh8JXW61zDVu8M6aQNKTCIipNcavVCi3aIrLenjli773G9OOf/qHv2v/H//y/UFn/n/zRT3/+s59TIbEXHDX/Nal2fdP3LTvruq7xDcKstm036+327HwYVn2/KttMiSSlME3h5vb+9Tfv3717H5OO4zzPOahl71gtpjDOYY6p79t+6FarlXLqe1ux856NxXH2gng882I6NwzDMkc0s5hQNU+7wwjC+XrQ4zRqMm82p4wYNY6iy29R45RSLJ27zNx13dA1beu9hxCzsYoYLVlCZgayfghL4QiJoQi62aM8EiB8zTpqbITojggONZs4gfZy2XspJbZTsmhmzuVefzsd+R4W0+q8b4GBM3NKj2QTEzQ2jVW1G/raiy8iJhZVaWFGvK/jrb6T7fwR5voBIXf5yuomHzUOLW/it35E/ZOV9Hfp9ZeOmX+/JKzsg/N1nLaIEKV6oU6cWXLixTnftZ988ukXX/xoOo67hzsic+JNc8G1fM8MqfMJ8SgejtsQwtOnT6+uru53DzAcYU5v3759/vy5923KwuKPApMPlmkJp5iWpFgy14lZSHiep5Vf7Q8P3vsUT+Jkyxit7m8AepV0A5Mas8RamqapaXPCh7omRr/BDSDDNuV5iiGEeYqoM7EQe+ecg+dzZQTsPM+ukd1hL4Qn1jVN470jYrBQiWiacpJqJFMI8zzv9/vD4dB2A2L2lJIxSwE/uSCfxXhBzYrm+UiY9EzUdd00TUO/BqK7OVshkuDFrAItwiNamutRKq4MGiwjmpospuLjs95T0zTH49Eowaeie2oYBlNufJeiaaKk5JyYcd5Z4pldSqkFH0qZRcwSas+1Sgokv+szxgC/BeAUy0sFmAIPGZ9eDRaADZCVcM0oAdS3wPdjQCHId078r3/12/V6jZ7dym2GeORSh6taw/xEFV6blWzyBA/mQntsxCFDRWp+e3uLIuvPf/7z169f/9Ef/3O1eHFxAVW1GiUnMshusHcQrupX7nAY56TRKMxpCgnhRQWoU0rish1k5qSaQpbOxjCQ42HE3nDisc7jOLJRSsk1GdDWUq33pSPufHNGTIfx+PLly//Ln/1fHx52P/zhD//iL/7i8vJydba6fX/rFr3RmgVe6gB7W69W2EJOHIB9KGLWx1xZhDMeHmI8juH27v72bhdN5ymmlMe8N9YSUYymKc5Rj1MYQozKMRzx8K548J45V9aUyEoknef2ZDOi6piT2TxNNyER0RwV0dXBLITUOCE6qekFTZQ8M8P7BgWSZQaZmsaJCJsC0MS9q3hJ/vQUvWvk0RSg3IIB48PMYrkuUNewRO0mOe15NANYVYOmRvMgh/r7GCMccH2ZCB5AW3ogIlIyTw6blvOgMyXK3C5hsM1TnELIc0f8ZrMx4xhUfIPPTSktUIdT4eNjb7o07JSN+Lc0EX3r63nRwfzBt/j4PN/qlb/1l3jL77kGfCLSP06CHREpZ4ZeSkGYm6Yjs/XZ5uXLl9fv3s7TcZomKJdWd1XfS4vuJhxm1nUNujN/9KMf/fs//w/eeRGxGO/v71EJNjNw8WgR6SzPQIs4iKzK1BCZAHg8Ho8wemOYpxB4wYl3C5FCs+qwuf4JuQ4MB/6bmztdi9b1WPSY6rgbDAJCRaRmQt77RNaImBlwdXg77z2zmDHUKIlI2JRMmNS0OsK0GGN3PB5jTIfDYZ7iau3RMpBSIvFd15kdUkpeBJ6SiFTjOOZWq2L5PUDy9Xod5gDmjikp5YosJhphJgxSfClT5ev6pzLBmxbz5AuE6Jbg6jiGql6E9enaASuWha7GEdwTePSKTmNjLAtah8MBWRTCgo4aIJxaiNwVE8aBGjYXiTRmhhaHL3MRwDZCCl7xYXw67o6ImPJ+3GMBd7vdixcvGidop4EqNTqJa+8W1gSLT2W2lRbuiRVCZm0rQpiCyAzlgPXqDNi7c+7+/v7nP/852qV2u916ve77nkgdC9h2t7e33LZ5fcbpt19+3TSN9y1uX822s+EmJhMNSVPE3TFKOY3Gik1j6zx4dvvdIWdjTESkMUV3mhEpC7Wmu93Duh+6rttut//yT//V+3fXZKaqv/jFL7quC2EahjzhkYiI1CyRa1zmf6W+XzVNa5a889i3wzA0TQPjY8ZZ0VZ1DlHVEvGc9DiFlCyZMgmxc75NyaJqiNFiiEmjmpIlpTRIUhL2IrKSwZosFwKya9P4aQKfLkfkLOScY9V5nlI6ag4Nq2q0Mjsycs5lLN045uYrheYB7rL3vnXee2cpqqlYS2zeOdQF6svYyJRce6LuoyaYSsgiWeSSkKJKqRCrgWSan0foMFNxqykl5Tx6qAIPmiy7z4X9TKVzLMaI2abFl+cWD+eA7ZlZYWKXntJ5nsM0hTirUtM0bdurqpL6oqakqtWh1DNbGTP89z/4W1DrytXKj39Fv77VZ398hu96Wf3Zfn9a0I+OE4WPiKo0Ry7tkqopCauaeOfVC/v12fbp0+e7+4evv/lSWKTwTZICHaokN4xcOq2CE2maJik9e/bk4uLi7u6u8S3M5evXrzebTdc1y9CMymUxuSpJg8PM+IPlU674G/pwXBGXWUaLtEim8QKYUdivSk2iUqLLFp+sRnO5FLoofNazhSKmOoaZcgwhGGMYYgwAihfEBNi1ygfJF0AnwcVpmkScKYt3fd8fDiMzTyE5l5M8IppCYMvAKaw2lyo4EaE8jO9YJTW89yEG9HTWqYLgbcEVwRsB6cWXWnrZEAJGcRBR37Xr9dqzYMbONB+rX0R/VJ2by4XzgmOJgcO3YR2OxyPKt1XFE75wte4RSQAcxmlxd8D3wdth1PC9NPcp5q6kWvIMYTKzpmmsjFIBVB7mBA36lNKzZ89+85vf3NzcrLdn2CQwsnhltblaCHp1m9VmMzNDH3YNzuD7UcvArEARmaZJ2K/X3WEcieju5vZwODjPV1dXL1++DCHAAXvvd7vdL3/5y6TyxRdfbLfbL7/88je/+fLVq1dM893dHQoNNbGu5HDnqW42BzljhrRCph/PKcZ9JkJ777VMEcC3oAd58eJFTWIKT6pFDeLs7OxwOIzH6U/+5E/+zb/5N5vNJowTWGZZ2hN7qYSDkEtjZpGmbZuaATdNo5R9ADOTMepT+YmLFkKaY0BjUdTYkoc7RxIUNEbTZDbOUQNVlpxj8X6QjLqTc5ZKRRz09Xm+ERIWVvGmyQyyQCQo6BAxexZz5Jxr8oqRmBFy32UG5r0nVvyrESfCTFo42Cfl+ZrAVTy82jBAu2V9xGJO8ryvMTH+asUNh5SSLyZODOOKT00oSyNvljizZdHtXVYPPpJJyoNQ+F+naq6qCmUOWUpJExHTMKyLIeWa/hKblQHetCBM+TJ09ePjW/3i4xd/SJOu6S99B8/rg3N+10fUXHmZRv/+HfCjg4kzFp1/EDNtpEkUSXM437bt2dnZ+fn5N6+/UlXEkmgK9t6nKCCesWEYZc5SUSxpmkbn6Jx7+fLl9fX1amiiqnPu5uZmHMfVqk+lCpWvB3nwouJYpU3h+6kUA5DGee+nqUwDbH0IwUkGxPDK0/4206IqTEQ1B+JFRxNuEqSb8XrYOBGB2lGEZEepdleHwZGPxxHA7DiOsfhsZm4oY9f1UYxF/QO7qmkaFkfMRIxRZcIe8u6xaIwALEVm1ratqaGNFVBejLFpcqnPTMFgcs6Z8skxsNYUjQqXEhWdKhBWuVr1QQUIbGYsVknjzjnHp1UlIjB+YdDH44yVR+dPLej2fY9GHSICMI7ssGLLKBDiMpD/1TQXgUtTRkMCTtQiaFVzX4zHqOl4KY0bTm5m4whF/qyXtFlvUfJ88+bN27dvt9st8H/kiIgV8O2GYbDSw2alXVUL6Rc3lEuNoO46xHNI93HfQwgeBWNymnvw4n6/74dWRB4eHt69e0ekzjkhnuf53bt3zve73a5pupRsvV6T8es3r1Oy/X5/fX1ddyyzobJ+tl0hoc/+w8g5h/2M7bfbsSuw4fF4jGHOG0l4nufbh/svvvii7/s5hMvLy816nYvlZFiNy8vLX1z/Isb4+eeff/PNN8MwfPXVV+fn59gYmY1PZmaQPsYTDdeLIC/TviiHy+JOD7VzzknjnMtgEVFUjTGGkNjlx1ONY5yT4ekji4p74Vi61ndd0/qsmUXMUwxE1DTNer2GevYckqD7tfPOTBOFpBXDYHNqsXGu70AqTI6cqqaikkHFBzCzatJovhHvnXcuhCwDoItZC3hXfdir6H11EiEk7/E0ebNQI/4YY+vzs9M0zjmnKWjhXS5tOJYu0UlKwRZNodUSpnQKIGqeXVPw7MUTkYN21+k6RYSF27bVEH3bV5EwPPJckPOShj1uc/rY45SrtO+YL//h6x+722p27COhj/LP34Vpf3xhf38HXK/3u+Y3/f2PzIvOBVViM3OnkYVqSR1zkiaJuWHdrs7OLi8+iS+/+vJv5/EYNbJQYmfej+adm7EpyJEQWpLQ6K7euda7OM0k9L1PPtnd3719+1aJxDtN+re/+s1qtWrbVuMsDlOShEwNYRUJsRAJ0cT5sDJXEVFkrrU0TTvPQcTFCG3nCfU/Ii4+VZglz3XwfsYcG++M+ThPq27tHHJTVTVIBprl+mKmFzl3//Bwfn6ezFzjpnmGTQHLJTGzNExK7MZpClFVTTV6703YSRvmCOJCmNV770QO+2kYhhRVxGti5/w0TSye2Hw/7HY7Jbt9uEcW1aqIU3FKHJPacYT8chQRz56kTXF2JIcxpyDHaRqGYSoV3xijFwFjHNmb5uqUxjlA2w+CGHGc2r53LCHOmlLXtGbWqKGVZWgada5v2sNDnsM4TVPfrUIIfdfM86yJxjCbMDkZd/sandTQ+6wMIUbyCjvR9T2y7SokiTd61wZKm01ujqok85QCdMHwysPhUAH5J0+e3N7eQmUTUs9w8CKeSJzzTWOV+o6o6OHh4XA4dKvV5vz8/fv3xhznsW3RbSXMtt1ujsfj8biPMZ6dnfV9v99PIXjUwp1z43g0S8zmvXSNsxRqU5D3PpKa2TSPcxjFkVoMFslRtNivNiEEEWrFU5Lb9w87f5ymIzp6RWQYBmaX4vz6m69u3l/vdruU0v7h/nicsBrM5nJ3SktEZrbfH+sgL1yeb9x6vVZixzQMg6mF2caUVLXvXQhRyVT1YXqAB3LOTXO4ejIktcN+f3d7e7k9B5ZOjczj5Ho/DMM3d9/0fdc03jlZrQZNsWsbCIAzU++b8TQXlpmtbX039CbMrVfnGgSOIRERJVNNItK3gyUahnXXDeM4GrOJTXE0sWhRAjEzJtWLiCabFBrsfJyNHBkdnO/6YS0SmtaLOEvaNF2YY9tymNPl5Xaanr59+3aOc98NFkInLmhoGifiVJNjMkpE1nr1PgkZM88qKBh57+cQNCXv/TAMKcVGGnYkLEouJiX2c1B0iPg8gjAqqThJpkgkmC0ZJ7O5CBWwZ2m8MsLoQcQ59vM8W1LyOYf2QjGmtu3GKbRddE5CmPvWqyqnBGl75/wcJjYi1pgSi5Hzc5rF9OHhQVXJLAKsooaM2ERV2rYn8TFGCP4YqST1zs1hjqqHww4eYugH8g1TM8fU+Nah/TeGROpZrOihYu8herZHA+4M5yHm30F5ZmaiRyLPZkxlJEf2AYtWWDoVQBFAkNmpDeljX2sF9qdFxfO/cgb88SF00suGm+n7frM5a4U/+/z7f/fLX7gkIYyJ1Mg7byAXZH2PIpfFIs4RMhjnnAiTycXFxbt375rGT9NEZofD4fbm/sXLZ+wgXL7kOVPJqe1baeVWFIiW6VqNjGr5qtwkTmQxZG5z/priRDCFbXQnYXTKQsoL/XekONBj4sILq75ZVa2I24EUE7OCR2Y+jzpiQwCdc2UIYN0NMFsYHlwbWlDLbMrMRF2IS8NzwGmp5jEZKDFamc+DAmRYaCzXsndayEejY8F7bynh0y1r1ZbFTLnIjbcglj87O0M6DgYvCO1IZNu21TC3bTvygUr9qWtbVb27u1uv11SS40pIhnOtnDJfupZjjL45jTaq2XnXdWW6OJkZZJ6QYez3+6YMgqxxBhUrME3TbrdD+ymwEFW9vLzc7XagT19eXv7qV7/abtdYfNT7h2Gofcy45lQamVwRqqzEMVmIMFihHYSizpiy9GNu+sJNtAJOYtgUwMJ6BmKd5/mwH/FPbIYQsgHymM4hYhagScTMULjDExHiPIecKKyHFWB/X+ZbI1KZ42RmSUNK6fz8/MWLF5vN5t27d19//fXQd8+ePTNhACHYrnfXNwBFReT8/DxO8/n5+TxNWS6tiDAgAthsNkh8Ye+atsUgSFsoiKVy5IiN8kxPY9aULKVoKqIiTTEOVrN5VlIm6GmHMDUtP3t6yZvBe28xQf5XRDxL27ar1QozH2kkznV6dJwj0SQvjtiJGaq/pGgOFCm6PVpyvnmeIRT6wcHMaQEFwyWonmBns2rBTiljRaq5EGvqfyu3A2GoLOplVvro8ANhAUtjUmnipAp9V9xO8pzFPFiTiq5DzTXxubvDgYquJ6TEmLky+PDRSGrqVX2AD9t3oNDfdSxfX37mFFWTfZAElxfI0skuv0tNkb8rHa8n/G/GAZsgvzxdrxGza3y7Wq9JL95NhxcvXtzdXF9fX0vjJSR2nEJUZ8VXGlNFjzMKB/kIZWpaevbs2Zs3b969u27bdhxHUX13c/302RXQlcWyEkF61YzzxMTfRWPD01vjIzwhtKCtR1OJBuMuZQAfQKeUlOikIC2FoTNPse18NTooW0LjECajuI0mhGApEWc6VSVUW0FYKgOrXm2lsNVoAO/NXbBkRFRmJxt6f2nBM6zVWSqqk3CQmuexGAqlMasEOJgPKlN+iXKzbEoJ9tvM2DklYudSeRfOX63nZrOh8gTiK8ApIhvY7Xb4K6BaKw24VqR5sIwAkIHW1splbebG0uHbgcQEPYf6INXlSimIeNwCxBNIoBEnIabBAAxA31DJtiIgig9C7fz+/v7i4uLiyRPMtEcUAv+NdUMNopJKrMDy8LU4Ldw5ETnm+lctZVRwr6yoi2ANF7s9Q47r9TqEQHSaKo3L6NpM16/SXVzEw80M8ZWqEiXnBH2iOTp0rHr6vinkkjlWGPvNe990pzZZxEl/93e/DmEitRQy6+fVy0/wLIzjeCijR/b7B/Tadl1XpcGYqMkTpvM5m6bBSK6maXzToKl36URxhZTUEXuWtnGWAhsoi4nYKKqwmCVhUTNTJs5yFmqmkcjswEak+/0+pJhSCjP5MpeemYXz4Jbz84imhhCC+CZGoGuWUmp945wjVkckjogI1XGy/MymotIqgnlBj8fqnSq7ysx1zo+eOojgX6OU+ugyYahGtzrjJg9UNkjmLR+B6lAX5WSAy8hcgDOfDPnS0zPnPknxTdu2wt6IQNta+s5YxGIRg3ZdBwMEpxvLTG4rygpc2uV5kSN9q8X+Lq9sH7XtAq6vXNelK2WGDuiHld0PvvIHH03f5uP//g54qQT+j3zkdUERgVNNSEWEyQ3DmkmP+/2w3jx9/uLNu/ci4DxripFJUD2uC0FEtTmMmc1STMbMm/XwyacvfvvbrzabDWDA6+vr9+/fP316lZS8q2+1soEMMPLyOmmxlPWW1KWvoV/d8cpEajWiTGWeXbXFvmnrGaTE70ubi9RWMVklVTVXRME5xWGOSZfQx6LlmnPGzEWAd3lyuE9AzZCpm1M0M3gyvJ5KLm6l77YyurEA9Xu5Ih7JZZ5EXaJQtOu8dwgyzAxCpFzCYVwwfM9cpiKGMNdqcdu26/UahhjXj4SmdnON45jIKClYOWbWNnm+Cjy3LRqvcQZk9vVJRq7fdR2LxZhwZi06UERkhvHP7FxbH34rVGS8DNxgLBeIUbAdaEdG4p7K8IP7+/um71arVVwoOYP5BXd+d3eHcRe1/xs3cZqm/X6P7ZozlcLAqoRVkJggTAZMQhwRa8lcgdIzM4sQ5jku9ycvhlOlModVJA/lxUeLsHdCZh5hExszNc5jMVFO9uIqP99KyoU2JNqpc86YcHmvX7++v7//9NOX+4fd8Xh8/uLZdrtt29Ys7Xb3iJ9qq3SMcb0Z8GTt93tEIUisyUkIEZqL6DsXELFK1qWqppRS0hA1L1fpOUnKbExKiomnxswFGSOyxMQsj8T3ERNA94pzl+qjtgwWKGY0wD9ub2+ZiCyRspGSnuyVYIKTESH4S6eguQamzjkxIs41XVyaLZJIeuQmi+til9Io3tfUPJW5hKnMJtLScVB0abIJ0jI9MN8+gd7tY+cqRI/xVTOLaVaLZqTLC2NCWaqYvpM5TWrOucNuR8VFbTZb731IEci/2SMJoKXZqQ6YuXKBHnmZb3c/j/9azpAz+wqNPH77yRt+sOzLiGd5I77r0/9byYBPl1t+41kiKTnviTpbPXn27P3bt0+ePd9sfn13dwfjJWSkmVoqIi7PtszmQ0troJkitH/+9Nnl5SXa/PHA/Oo3X15cXDh3ajHKV8KGlr7vWjhdyBgtExQ0kmfKFguXSjBC+GzcmeGi5nnum7YmiAClkStbGVeHx+N4nLquQz5UIURkEviv82314jVXYz59egWNa++KFRkHdAfllzVdjJFMYghk4l3LyIo0aCJhn6K2Tb/f71NS01DBzArFw9TC0uFKclWmFGVhx1erVZhmYLaolcYspOW18M6apokWkXth7BI45yklQKbQgsZniQgcM84DB1zZZ7vdDuOkQH/r+36apliKBWmhxkUFbq3gbVwMDUQR18xUY0ph+fzjq4ErVHnIVXhkt9uBjYWZCqvV6v7+frVaz/McDinGuFqt8U0xbeJwOBwOB7S6aaE0V7oZtlA1YQSsr1glKiT5GkIhY8ZqZwKgIwiq+MUcrerO+TGHnAosiY1ULHVTyXS4s7U4goBJiJ331XngVFwSaFx8BvOTBst76eHh4eHhbLvZ/OQnP/n8+997+fJlmsP79+/gMO7u7uZ5rnmkqjrPuAA8YjFG9m7VDhiqs3RawzD0fR5cqKoUrT5EbDmAC9OcUmIzU1JV40woIbTYUh5RW9yepZQl1q12B7Vt2/gQ69ROxtAiYW6apm09ZpbEGJiZHZmW+MmRw5BDIY2plAxO0u6PDTusVHW0jzhBdRvX39S6ABekFPe0tjg65wBQ60J5jbJHyZF9LdCwd1VMA5+hqo7Y5IS7qqpqBD6UzbIIsSOmrA5mkjJUp1Zm1DIzZFBxf/t+BUhDwXnU3L5cgr8svnECpWsm+g/En+mUWZ2ih7qAZd3rJL1HfuGDZf/W4+M/cakBn5rDfr/Ht6fUbGRsGG9VfCE71/h29fTly81m9ebNm2+++f8Nw2Ck4qRO4kQUiJNo2RNShB5FOKXU9+33v//9//gf/yMAt7Zt7+/vj8fj+flZSkE+CpIcc7JHEC59tNzV/uKf4hq1U8PcsnBSSylVGAGhepxD7eysSkkxqFqELySSaZo2mw0eDF8m99WG4BBCRy5qcguBYnxiXAgn1TpurVLjOSmqnAquNS6yKku7IrSEeBAVyvpw4uPQiIXvhbPBwlYedV0rLVVtQisFUVrMKkBcEtPsve/6Ri36RoZVB/cjjszIKLFY2/mUklEKMTRNA/6vb8Tm5AAgO4dMuilS2HCcWBMRcd7HlJxzqLbi2gBrJw0gFtUrRDSgpQsIC3U4HMySmTRForI6aSl9WVSyfzMDxgD7a2ar1arvO+ccJNumKYuoYA/A1Q3DUGXCXGkKgBOtaXRFGpoSAYDBh2twzm2329vbW2TGw9A1jRNhIlFVcYmIyYRLNh/KcAusA5v3hVGV8eQSAiJpzlPvSJyTlIySEJtkLgbVKjiXlBq7K5Xm+NrCx2yW1EkTY7y9vvnhD77//Plz7/3bt2/fvf5GRNbr9eFwAHa92+36tuv7/mjWlKIMIhtlspjhitoglOMPycPtKWkKIYaEGCXFmKJhh4zj0bE455Sg02g1r7KFEa3P+9Lud90gIk4ahO7KxCRaZvaxsPd+1fewPCdRF1Jx5IWEUMJmM42YQJXSGLPx0dxg47z3aMp6lGNBRPk7uD9Uqr8iwuwWaA1VW+G9h4NJeQ5bHgVopROk2rqgqVlMj9MSnZQsnCC8oho1nAYAL0Fp5xw02E0Y7b9c4wMizIIzs5QMyBCYz0wCDnNF42qlqVqzb12Bug7f9afFIfU+lywlu164yOqPv/X89FENmIrz/sAB1zPUDFj/Hj74nxCFfnzApxYRL3HEqe2GF5+8GndnX/zw5q/+6q/CNHoWJk7MRGymlDTxaYl5cQhxxdxevHh2dXWFXISN53n65s27YRiYyVilgCqCjgMT+ygJrveg/lA3ARGxMBu040B/yNErimexzD7CMwCMcU6xlz7GKGW4R8kpLUUjZ0QUy1T5uYyUd86fQlp7pPemas5lNkQsjfkZ8S6zWivyXMi9GYOqfbT1yy4fmwq1LZNFKbNQlj/jOinDUCkVuWYia9u2poNipqqr1aomdiFOFSQ/HA596+Dg4d3RAgvDPY4jfCoEpIoKWK5MO+eapjkeDlTA2IpF7/d7KzB+9akVXVeL3uVKIQwBAnAsETK2mhpyKbdrEc6ErUderqqQ/iAiuHArJVvcRMyKCIeAq/Lee5G5FMIRqzVN0/c9IkWgr8yM1dDC9cP9pWKGioPEeB/G6zFUuO2aOrfAezFDHKNwD5XLdjjuoNjVuhaiIkTUtr5p3DiODNVw78QRhtILkfNsRMy+yoDwQnhkCeVxoSPUpMo551xjlhrnN8PZ8+fPr69vLy4uDsf9l19+2Qj/yZ/8CQjnfd+2rVfVFGJu4CZqO0809H0vDaZQYG10DvnAxaRkMudpKPOUXS/qBWFOKQt7pSzHppqMzDQ/zsC36KSLxMziXN+3qANj+2UjwOiVlER5/6SUvMvtcPv9fuia6XBk1CYkz1cF3kmsbDRNR0A4qpUAxd671nko8holoHo1/YDnEEaJV5aai5BytDLDA9X3EFLNrctjbgsYI4hk7UJw3XMsLqxF7I9qks3MQojza1EmzWjhOxnP4iAdwJho6sgTmaq6Oro0hTJiy9BUhq3iXVPWlivG48oMD1vESfXr2EdJ58e/efynk/elUjFcXvkHr//gl8xcpaWW3ve7PhHH0un+fZzrP26iLN9ywm/zdpYfdGna4ftf/OinP/2pc67rOnQfyuPUlUsxtSJ1eAYAE63X6x/84HMYpnmeifirr7562O/zI/vhDXqU/n7wKR9cJ44cUxfuH2Zn1rQPbGG8Xc1iSnCHVtjtVPzco69vRkR1Km11xjnmYl9pCMiKymbKy1s3MVYDV+gKhxaTBggagflRPynSxWKhkB8DZLMsK5GLxFxIVUTUNA1ALfxyWUVG5g06T03j2OWpCTlTpwQH0Pc9jAWuDZ0MzFxmKTok4jVZx3fHb2LRFcGgPYQL4HTgqrgUlfFFVqsV9CyZ2TdCRBidW/FJVZ2m4/G4h6rANE339/f7/Z6ZUYSu1VPkuJSHPnXwx6jgwpWu12u4EJDPzezh4QFZOBLxGCNELKEUZoVovVqtuBDTuEycxBuX4f+iVp0rZMAhqo5KmoNjIk1pDmLUCGMafCOu9QJN/8axhuhZPIsI9X3b9y0oz3V7xBjxRjESI1ZzxI64aVzft0SK16cytbqqbFZPUDXC4PhrcoYg9fnz57vd7n/5n//vRPSnf/qnzHw4HFIKqjoMw6tXr4pPTVMIbd8Pm7VrG+yc1WoAwgFkBWu7BGPQLIBNm1KKQeM813KPb0H+9ySSh8OTMLk8B1dZE6WUNMe+JzIjFGa0iKZlC1IIBNV/ZCIIZTJ50zSoPcUY5zBOxxFWAre17zvvnXPSN23ncyMAHhMpWa9Z1fNY0m4lxwF8guhckeZOyUpQ1eJG8Gl+kqpWM5LpC86d9O+WbszygD6louqd36U54q/xK5W4xDnXNr00vp5GF4B5jSwRdiuZb1qmXAmqOYYUIT9aJMQlIDb77jz1u49T5mqlQlfv7NIaL/9ZP8uYoC1SoZF6I+prvvVUv3/k+eNDHv+vcPiIXMljsT+6YSVNe3F19Yc//aOLq0vLuGh0zllMuAFaBsXUL58/JKmq+sYx6bMnT58/fRbnIMUiv3t3HZWgWZqSirhKT1ieZ7myy99LIU9V+8tFuD9DPYnIBAzVECOLjNME55EHys4TF5IUFfYvnmQiSmWYIMZ+TdMcY86hUzTgclFT/UQqVcxqqbGhXSFcIPH13kPtAcQW+FfUbKqTds49PDzM8wzV3zoKAn2ucC2ulP2oOABkn/isCmsTkXNumsI8x/on55xrfEjRmAChwybWF1TNECQEKaXdbmdm8Hz1FsDB4LIBngPprcApZDSk0LtqtIFU1XuP6b9mhkqz5o6jVDV2uJBmIZShhdBLJebDN8XeQL2ZmdfrNXYFANLD4TDP82azefLkEonIarXqugZDc9+9e6eFkd41TYcLixGXXUOQaZqW3G/k00j6uWh24qZ770OYDoedajRLIoQapPfeeYkJVt5UI6gJrowyrGQlLJ2Wao4WyYhyzck57rqGSIm0bb1jHg8HS2noOi/ElhybpeDYGserru+bFv5+aLvWebFs4ruu22w2zjWXl5fbs4sY469//es/+ZM/+R/+h/8hpfTLX/4SV+W977puOo5EBLeqhYkjiwFfTdf0Q+e8YOtW4BoH5hAjwBrHcTwcMGMR/0R/edu2eaS9eFXFpMtafPHNSU3aMbVeLi+2IoSYiRcNEVZAXXGkqrvdDg+d977tfNc3cxgZIQwlfBCj/aPvuqFf98O6HwY0RXjfdZ3zHOegMSHHrYZIiYzd8nn3ZYBYKvRDKVp43vvaFl8fhK7rmjz1iNyiAal+BCxM27aA30QkFDIjDmas2cmBIaxMRYWD2fm2dW2TUt5RVhT6YOWO02hMJG61WferwUkGk/A419PCwnAZ7SqlPaS6zI8dIX1HBnxyosWYU+kWqetQnXHN6Or3JcmyXqcVWHzidx31BR+QsP4+QPTv5TABtiJGkLBQFkfWdsPd7f788slPfvLT//gf/j0zp3FmPs2eW5qJ5aIQkWCUh6PN2er586dv377dbM6meW6d++abbz777FNlEhIjRVIIixbtUTJ6usDFgd8ghsXABuccLZ6NlBIJp5DENbljpIRvH5y2Qrip6NG43HQUjsex73uiii1HTZknkkwtCYYNVCZFBXVP6XihLTjnjscjCJlpQUSqCRMc3jAMEAjkwvKnMkyibu5aUq0xKZw0iqmwNU0pk1dYsusG7wXA8jiO2+02hOAcx1x/yhEMqq14GJCk1soZ8tp6L/QkAlUajmMWQgK8vNvtcvHbzGKEF4eYJYs5v0KiVu0XvuYwdPihamPhmyIyCFkmHjTpAMIzUr39fk/l6QXCgcey65qUMjV6s1nFqOM4dsNwfn7OzPg4iC9uNpv7+3vU/uH18b2kdIKFEErvUGZI1d2IhLvrGmae51SjByzjNE1QDIVRTtEdj0dwyphb1chMZsksqcZVv25a13dnWHDIefqmQZABMSysBirNq9UGN73vc14FDMAyzSdBD6T1HlkSkqqmadhnu4/NM8/Tn/7pf//DH/7g6snlb3/1a3Cb+35A9fTm5gZxZw19xnEUaZhZF437eCSLWEq/ZKePxynGqOjwKdXcCkc550RT27bsJETEHJ7I0OWMD1Whpmk4Ru/9ZrVerfuz1dqduGZtSrkP3kqzU6VVUuFnMDOeFCH4ktIF5DrULFLMXTdExGJqkRT5X67pyoJiUh9zJiJS7FbnuIDVJCKWzUKeYwhr4xb0pabJtBVAvOiJAojinGuHvppWIIY5S3E5BiIii5kZynLakGj/5VxryE4XWGU11whVQ0jZv1Jm8Djn0E7JZdpE9nJCKf4uVHnpLKuX/cA7Mmc+RH1Zzd3/s360KIqfPugflHTTf1US1ncdWcqbIMpBBh/cNq2RieOuX10+efK9z3/w1Vdf/fbXv7JSMa3B73KhlwGLGRGbhtn79tmzZ33/t2A6tG0bk379+s0nL55Hp60TQ7Ko9sHNqwcvSImnHVZ2bXUGWgSW5xhg35k5hGRmXdcxn2a+lvN8VGnIsRVnr1AaQpg5RZsj6IWY4TWfngoWMgohemMjTTHg8hJlnlooilFhSuTEhF3bzCk6yghVnnVfFI9rRF+3fiqTEKvH5cKvqbk13KcrMlII4YkIaKorEh+86LzCy6xkgZB3xgtc4ZdZ6U4GGGuPkSJmwhmWtwbwbyZjExFRLJG78y6WJssSpOdGWO+FCp9ZizoHAgIqqSe8YMqTBEfnGi06X7SgnlopD3ddnvWEaIBIh2E4jCPy+2EY+rYF5ozvBaQhqp6dnSEomaZptVphMfEtUpnPYQXJr/sTV47fIxdkIy9OiNkohSjEZNp43zZNDGHo+xgCpD/6vj8eD943UD5BFWCz2YAXttvtri4v1HJ1f7Uenj9/vj0/O+wn4Mmr1QozHphZpD0es7qIFD52jLEf2vGYdVc8MwbtqdI4js+ePdtut7/+9a9/9atfXZ5voeXeti0uWx+TbsAJMCYztlIRrKmM5TrLrsZ2pgRVEDIDf0KL5UUY2nY+aPLes5MUJy9OMI2FcmJArEIszKuh35ytzs/PL842F5fnjRPnHBffP03TNGEv+ZTS4bgLYSJSEWoah6CncT6F2bVt07imW1coFd/Ou5O0JABvMwO8zwtGW73daDOjYv0K+JRODtgi1qFt/TAMIlRdWlro/5Rw3IsQE6umUpNG7YOYeUm0WeaRmkJKCeIqYGYJe2ZHJM41OUcyTkosTFQmWVkapyN4YV3XAc8vptWHECBUbJaZrUYqfHJb9gFaWar1/BjCPHnKk3N9pONRH1guAubfdRhnzvTHDuI/e9TXf0zC+q+eBJfJwaiRgOZjkigXEvrV2gk9ff7y1Wef397e7m6vzSxpNCNfdFCpZAPLn3F4783Sqm9fvnz+5s17sOyapvn1r399dXXRS5OSOfAIMEnXnZrEl3cO266aflnIzC5fpmRRUyoNSKc9quqcOxwO3uWUV0mlPLfVH1RiKh7LaZqIZJqC996yPCxXeIdEtFxMvTyW0/XUZBcpHR6z2iCrpUwOILqWMBEMLoHN+h1dERVypccJThQFVwS2bdtqjCg1wROk0lE6TVm9EhmhaqzPDGDDGON0mIdhQAUuqQ39ep7nGJRMyEQ1693Xp2WeZyeCCBreaIm8xRil9IowczTt21Pii1yktMeMw5AVhvE1UVCsj2iF19A1VB0qHCQVg+4yWYEPh4OqmiVUf5HUHo/HaTrOc0CoYZZHs56dnSEbQH+2EB2PR0wRoJL911p4xVTwSxhKIoUXr9Qq59wwDPe7AzM3TbNa9Va6nzebdhiGtm2eP3/mvcOFrdfr9XoVQthuz1TVbJ1SatvGeyci6/Wq7/uXz1/AxaaU4hy8uKFvha1pmouL7TB0eCQB/qdM+2J8Bdz6tstjlVNKIUVVvbm5mefRzP78P/77/X5PpNvzsy8+//6nn758eHiIc6hwcYYfvWNm13jLGFh+BOp2hQOOUSsPbhrn6rcgCk0YEGTWOF71HbNNIbTmQmIm9c5V7CF3XLss8XE+tBcX2/Pz8/OL7aofnHNSZOoRswFBBU8e85gBJGy32xyyS2Zrp0XBEc9vNNU5my/nWYij5SxcRHypelQHTCeJxNNJJFPD1AwOJqLJ1TnXth5SG1x4mnh7RV+dcyzmGJapBrX57FIOZgZ9VVWZjBa8xSVaRrlS4JISpkUhT2FmIwj5jUTinOv7vm264gUbO/FSFUwx97jd44Nklx75V/r4l9/lfZeOowYW3+qlHvOt9OMT/j0PX1t3/qG58z/RYUQMH1y/OJh+RkTkXDPPwTfd5ZMn55dPnj1/eXy4I9AKLAvTo2T1wQ2oyROZet90Xfe9V68eHvb7w9GMVVMM6ebm7tUnL4MGImcanXPK5D4KcJY3pm6C/Eh4D90yEWEnJCetllrfpYLcIslrVn09M3YtlU7WlMezlIJKxqWtaCg2qpRSVFVjZ2QaFTSHlLJIr6q2jbdSGNbFXL8lRn04HFBdc86J5IyzpuxcCFlEVDHtuQyfqYtTM0Ucrg4pU4Vrr+wheIjVahVC6LoWzl5VYwz4ofE+YkSxnVYD4x/g24pNoYoUccn+/SIGqrapeiDvfVIdxxGgqDKlxLW/qIDJKS20gWQxI48X8qJcukvdoge6yYMlTqoFtCin0aKIxcxo8PXe63hExHB3d/fk8hJZ9W63Q6IMn4HIpiIBsczswxurSeJiFiHMJKVJtyZMeeSl96vVCoPwzKzrOmidotiGhrfVqh/H07RmMMiGYUDX0N3dnZWBE/gTcsqrq6u7uzv4177raGtGFONMZf7mPM/OcUpxtVqlFEERypxeteIs47/5N//GObda92dn6z/40Y+fP33SNKCm8+EwPjzsMWOKhB27erPqrpPCx8GjZ5Z5JHWX1kdYMWC+PMVd48/WQ9O6OQY+jBLT7B2T+K5t0L+Vi8Gy2Z6dnZ1tWn95ebk5W5+dnYkQ8OEa7s9zRJQwTdPd3d3Dw4OIbDabpmk2m03f5/541J5LWTqlKmUc4xTGEihw/ZqZseZc63312vUuf2CslvtTSyGz7m1m67omRq2KMRWOymNv6JTGqGrrvHNOSmE4g0ZCvmmZjVnM1IoRyM8mlZEJJX7RdJIEMCNxYMahvT6z0CkTWfKUbiquUSST/CHRLNzU77o0+B/Y/8W/PhxqUJfL6qTFRSH5Wx35R6da0J6/+9XfepxqwPb3gLx/P0cOsbLfNTQ3NOzUuWSJvdOovu1ffe9719fX3/zmF4fDAQpzMYUaClUTSR/SpqDLYdvt9unTp2/++mdt23vfishXX3319OlV13pNyiW3O63sd0ANy93ctu3hOClZbgwqtqCWdZsifAhT6Aop0cwQZGjh9VHGyrIzm+c5xNg0Tcxz0ySlOYQQs1rsiUaYf2B0BRA4R9UHVB/PpTYM1hWIOQjza1swyFlIkadpwgWDMwluVKUf15CZiMBEvb+/r3JaVvSKUR/1RRWEmdfr9TiOXdfVWbypNCwNQ8/MGiLoIdX3Q1HSFboWvhc+IoSwGoZqZPGnWnVTVSkounMumvpcK+2qvwQ7Zp5HcM1wPamwqX0RCcFNR3VZSvMuNgBO3jS5lbYG78wcwjTPhpU8Pz9HJ9V6vT47O7u9u364319cXACf3+12WHkEOkTUMvummctMCKlUZBEsDui+1bsDBMHdxC0Yx0MI4WK7RQyUUjo0Y9M0SM1XXQ8txlXfDl2j0Q9dx2aNc957sAGSMTODRFbRJnw6lijGGMKsmsx0mkbvm9xXfXHxsNut14MZ39zcrNfrh4cH3BdkhyEEDKALMSCquLy8PBwOP/zhD//lv/yXz58/nY6Hruum6RjnNGfBczHTpEqUx1ItH8lyj5SNWt9QmVEmC2rI8qEGi1VEukbE9U3jQ4wicpwCaSKStm37pjUzNRRu/eXl5Xa76drm/Gy7WvV91xJR0ipqm9Em7I2HhwdAI+v1+ur8wjmHTjFEGzWRxRpOMeEejcd871iq5KG5MpI1e8jH1RYkn2KPEuISE2hKMIanyVoA1GNpTUxF2KdyKp3PS+S9x2XUha6WJPdTORERSzqlFNOsqkJszCIO9JSm6ZzzRiKSQ4Fssoym6VjKXtQ+HqFti+FyzIzWB2ZeSl39wz3Xh96XFkla9Re/M/39x+nIXZCwWIn+YXOM/ymODxFwbDviZGrMMSXv26iJm+7V9753OBz211/9xV/8hUguJTpZ0soqqH5KWdar1ThHMnXOvXr16s3bd7vdwXs/hbjb7d6/f//qk0+hIlB86CPJSXp8z2TRDlu8gsH+I9/Ce13hONTnBKEWdrnmsi4ps5XcS1XJLcgOJ+2qRCTwMDElmKFlYGtMRgRRZcCwrmg5UcFFmblOF1imWRhoH8qoRFqg5fAWsQwwxgvqVNe6GlpQaHg1fHod8wD6Lvw33htjbIeevZsPsW+aenJk5KpaUytcFQA9V2YhWylfxSJwDTq05EFVWWupcomP47her1FDNU3OOccG86dFGmUYBkQ+9bZyQafxFayQ2/FLLSOB8UrcTch11YgnlDnB+MF7j/TX5Z7pMM9z23kWu729PT8/B1DJzIiKiOh4PF6uVggIuPBWcJ6mjKlwRZLJzI7HLJqGzEYkA9d1Mzvn2s733YCvibCp6xvfCMKyYRi6rkHYkUOcMCcvMc7oMopxFmnBhMcFI9wZhuH8/Hye53me4GOcc1jzaZrOzs4AY+x2u7Ztj8eJS43fe6+czXrb9n/2Z3/28pPnInJ/f++YiMi55ubmq/1+j6VLpqbZhdTtrarMxuygMKyFv51Kbdg5lxpKqkrGJK44bBGJRL5JHI3ZbdYrZu66uB66lMx7P3S9mRGpF9cN7eXl5bDqHdMwdGhbIFZiJ0YpxXGekNSO4wjvu91uz86362G1Wq0cZcVQZhbHxCzOc8g9CyauAg8pq5d4LroIuIU1nk6lvTA//tmrUbVUvDhSyrIhvihC415jpmqNQblwnQz8LGJVRUiq6BVZGEYRkVyTzuuvVXzDCOnv8lGtaWXMjWEUQkCPe0qhaQY0OKSoeKaWtqviPcQZ1Tthwf+A40N14fKFHhl2WjCKPjpOhc7lbz8IC37HUd/434oU5UdHFf2ARzYiM5LWt0mja1pxEubxiy++CPdvfvaznznnUKh3kkV2zDBhkOmxRsw0TY33Xbd+2B26rnv27NnNzd+Id7DX79/fPHvy1Hm2lJwXTURFhm3pfbmUQquXSqWLF7VVKr6NCoKKbR3ydKasp+G9Z6qYZE55qyEjygNt6oi3aQpTiAYtV1UzhsIXyA5d10G0hktPcNd1je9sMaCJy8wcV6BaK+MTaushrvns7AzODIKIeM7BjKjPfI3xiahyfXHB2+02la7feRzhhlUhMpClNkTk/fv351eXsALrvqMC2LZtTj0pnYDcrutC0WlamhV6/CRUWLgGzlhG7/2uzKWPMUZNMGdlAQ25yHo9EOnDwwNIQzXVbtv2cDigks2FlebLRIQacKDeeXl5Oc9zjCoi0MtMKbUt6sSERiZfurcR3xBRSgmQwH6/r3IxcI2Hw2GaJjQNX1xcII0Wkdq/SIWIkMpcDVirojPF8zyv1+thWNdqCG4Zcui2bdVyDo0MntmyhxBBtRvl5PUwmBkuL6WE4YyQUoFTGYZhs9nc3Nwg0np4eNhuz9cit7e3ZnZ5eT7PWb6RiPq2U8or3zbOpQjeWdM0P//5z//iL/7ik09fnJ2tv//Z99zaIQ7josEpJiKSUiimHMEum4gxB02UdB7D4XCImjUogMljEgDyP3PiaNE/yqISSalrnPaY0LWOMTbOY34XMzvPXdduNmvvvUgORhka0cRJU7UGRAQH3Pf91dXV+eWFpZNxqD1slvlfRcW26ExBhs8556TGFrXySgUgO/HRqFZnsrFSjEaoAbpqcA2kbCRG8LkE0PfC7GTQDsFl0zRQFuOCLNYwbnk450pLcDaMGcs9MUPzF0+KJ7SZ5yL9FqYYI7qqwM0Wdom0AoRSmN6udB+BY8XMuhDcxoGl0O9IXuFK+HHGDMvMC6Fy+l1Z9bfnviX++Ycd3kgY+o/ERqSmjnPYeIoUlqf9J0ap3YcYPUpExJwcGVESYSKXjNr+LPrh1Y/+5Q//2W9+9lf/x6rzjZqQztPkXEOkRsI+E3AcGZNN0yRtP8fYOmuaRhr7/uevvvztb+Dh+mb1/u372+cvXr58HtNsrMbqxdetzx+IXpWH1jRTJJm5a4i0maYpxSRdO88RaK0ZE7GIM8sU39VqhXqYiIQY+4I3ipmGICJt0x6PUzRl38/znIynaQIhIgR1vks6x6RJjcXN0+QaFyeMCfJCxqZxngIlJJ1g8VQXG+YkIn23imHnXdt3q4f7t13XsZOoabPZhBSBgtZ+wUonwTMA8amUDOPom6Yzyzhz27YpBPCMnHPqPZ7nGDRqwoONGnDf9xpiTNo6CfM8m3Vd6/0aw+1jjE3b1mGfcGZWgGIpLchEhEUGJqyqRjRNUz8MWNsJBM4QXJ2iqNp1nWchs+k4rlarmCK8zsPDw8PDw3q9lpOwETMzPI2ZZcg0Qxcmgvl6jJmpgEJg3WA3cFVI+pkd6MFN0xBldNr5BqwrM/Nt+/bt22QWcoMm7FdsGqdxvthuiAS3A5pK3gsA/KZpztZruNvVanV/f+uK4iOpCblVv2YT1ZjJt01zcbGNMarRat06xyGQqnnvdrvdixcvMtQvjbBnZuHkxDG5J1fPzOzrb74chmF7dkZ6v9/vvdAwDHuhqMZs19fvvPdE3HUN80bEmkbOt2f39/dNi3o8N00zp+hY+qYdD0fnXOO7w82hb9pE9s03XznnXrx48XD78Nknn65WG2FR1TlOTZflnFzuDmBmdHvner8lSykLyJT6DtW0DNtGY1BVpkRJlBkoiAhZ53hOnsn7tvWuPPirSomAx23btm3EezHWpmUiRazHzNx6Iw3zHJJOczwcJ+/aJ1fPzjbnjnzQMM4j7iCi3spUCiFMh2Pbtm3bUyKOJFH7oZ3nGX0NRMTsjEV824jTpNG0aVpSBcwjIjEEZhdSUE1t550Ren5UKWpyTSfiSXia1cyCkSVOieFYkdw24tqm7ft2xmhzYhXpVisTicrJHJtrfKOkjojYNM1dO5DOTiSlUJqzTVWIxAk715Bw369EXEyGx98sECkLJY2qcTpClyauV1tTUSbnGpEcM6Ghywr9G0+fcy5pIvdxDvktHStLD6OPmia4BsGxqAmd3iKYzPFBGzGKosA++dHvzdzi05ZZwbcm08zsMcQ+xaLwDhKU/DfCiP7OgwvDfr09+9N//a//5j/99fF4HBoX55nBPzKzQnAXETa1wvIlIkR2QrZer1+9evXLX/5tBU7fv39/fn7mG0lkXj78+t+BSJwOX+amVeB3GZ5bKbrwgvSPZAi1OqQsGVxSHcdZGs8Efo045zA4BWhPfXSlKF0gjSMiS/lDnfO1VrpEjXzTIoPBezFXp+/7w3i0QsZGNuOLeiVwNrwMalDH47HrBhSDRQT8CHzNYRju7+9T0QAyM9gIowSClWZmEyHN8s7VmcfInJCaw9vhN5WyjvvYdR1UEs/Pz2sBu/KngNbCqyHcqSaYi1ijmfmivaeqc5hQt0aMD9rz8jK4qF5475HyhpDwey30DVy5FEFKwOn1IxBMbLfbm5sbEer7fr1ej2EGULxarXBa6B5fbLfv3r3ZbrcoVKMP5/z8koiQZ2c/5BxK+MhoITmy2WzQuNl1XQoRsQuhQmEmIufn5wCB06L/u5ZFUFHGrTw7O7u+vsa+ur+/x9oiEgIe2Pc9MuDVasWuIdKu6y4uLm5vbxFj1S61PMFJabPZtO18PB77dn08HlHgSE3yjRwO+6brNpvNdrt9+uzqi8+//8knn6QU1uvt/f09vgj+ezwe8aBhGjH2OfZtNbJYcFloeAEgWSapH2Ry2RLSqU2OmdFfDgeMU+GV4ls2DprqaWOI6G7f7/fTfOy6brs5g2rmfr83ygLUJ5y2pK3ee22aEAJ6XgHLTyGzr5NGvMaXcZNUSJE1abOFBAK2BxdSVTohx8kU6s4WNRmxlhiFSu8v9kBZMVe7kwtPU1JKYqRiwtw2Tdu2pLE+6bQYoCQCxnWG3Kgwm+wkLx8OhwOuMw8FL9jS6eIXk4lpUfjjRUH6H2qoeQFqVlta/5R/r48A7g+caDWntqBP/Wc/94PDGyVM7PnujJuyNvN/5eOkRI0LdkWb/rPPPvvX//pf/9v/9/+r845dM+4f2t5559U4lRKmJtUFtTWl5D2rmnPus88+/fLLL+ETx3H++utvzs/PPn31UjXFRI3PTUf1OuoSn9adlcoMDVoQH+Z5AofeFSIiEUE4s9423H7n3FiG3sDVsQg0qRrxRCnGSNCATXmXM51GE+L8aExTVbR7Qg4acRmVxv+mzM+B5QLNCncfE2oBe1YEm4jQNoOmCy4oa8Ukiej/z9x/NUmyJGeioKqZuXvQJMUPa86ARqPvXMzOvu2uyP7o3feVnSsysgIMgEHzxuk+tOpUVWZlZhAnZqr78JlpWETmOWhgeoDrUlKSGenhxIjSTz+1uqmSn2Lv/e3tbdu26/X65uYGeUoiauetCEFjpZSAdVJVR9zNOmi11Wqphbt/HMfVfIFyGl8A26CSQGyz67rFYgE2sa7rbm9v8RacG54nDKb3HooqVL1TMHSzbjGWBhjeezBYQZLisZ1zB7iyCCwSKJIQApwGnA9vBjhhpDml5JUx+8AA49YgUsWTQHtBa+LVRARVK4g0wBKComqaZij9dC1sCDMuThOX9J4UNtBxHB3lwZzNZmOcELHAUECNIaSxXq9Nxi0WC0v5X11d4ZFQmR3TSCxoCIjwNV4WTrkL7Wq1iDECTY3XQQxguVwy+wu6vL6+/vyzL0A24sjH6LvubD/0AOjd3t52i/mzZ8/u7u5Wq8Xl5flqtej7fpwG0QSWEhhtsFaZOcaprjKwbYgJpQqfyBWTMzMf17JyCEEq/H+K3iJGQGtj/5o6dwV1DNBTKlx4MFamaYrj5B3P5m3bZTrYcerr5IgpLdy9Xa3QV1Hk0EkCh2ncrH5KNJgKDQsflwg65wzFgvaRuBES51yuiZ/LO6r3ITCczoyBEhFQIWGucw2FRsqMQFRsWTbTp7YtmDmRzpvGey8CccScg6wsKuMYd7tdcN42V6r6YKbKOMBmNLVHX6+ATVN+k1Kr5LkWiIBWTIJCSkrBebum/X8oHao+5Hs+7r+ojFU1ZEIDtOWgA+zI5j07wf+h+GhoOS06WIsTCT9jtlz81V///De/+uWrl58t2rabLzH5zpEoi0jjvFQWCt4upZREReTi4uL999//+7//h8vLSwivq6t3T548CY2LmprqMWrVS8cjXs8E4o2lWOiwCFJhfZMqdWp7RvTQHJcqfgkp6SL2mlIiMageoWciF8SEjUlKiZSIvAilQt8PbwnYq3EcEVesVTKkiRUAOOeYGNBElC2CVISKaOu6rtS2HuHUIEqoUNvAY+ZCDW1uNF6NlZqmSTK5DOByyxJHpSSBc5MJaCAAtrHDERA20Wl44Fga5NlOtoQxZgE6iUp3BKRLEcLl4qXBKURMIoSA/CvMc6hhTIqqet9QCY8jomjcmUXOugJC7h8/fvz27VuERhBg32xu9/v9qm2apoHCG/rcKge1QLlcSkQK/R5eAXXDtvZyFKHEKlzp+ofnbEODafXeN5yZScCqvVwuEUXAvDAzDBoYK7PZbBpkivH8/Pzq6qpp/eWjc4y8KxA86BsAr9q2jUKz2YxI1uv1+fn59fW1QZBCCC9evFiuV8z++vr6yy+/7Pu+3+2vr6/v7jbnl8+++93vPn78eLPZ3G43v/rVr2az2bc/+rCbtft+1zTNNA3gb7L9K6WonSjD2Ww1IpWI+bL9a+rK+Atty9hC0tLdiJmZkyTrcEo22vVyMg1NSZLmuFGcpmkYSRR2IdAPQ9pPMTNOm4oSiRbECiG0heYFjJhT4WxPhUvSl3oK8EJT1WWcCybDlJ+ITEikUa4vF6GkSTUrXaekdIh31mLN3IOUEpWUcAnUO8lo6lwclVLyhZxHyoERTpQbcjA7FXHemVby3ktMsDaEqG1nXYEEciFuK8ObOY64os2y0+ghRVsL5Pq4ryNtf9WfGMUVPXRwTgcckT7RPRDWn6L+D1g4YmI6yuf/Bx1f23OJ6WDvHMyith3H/vHTJ//b//6f/t//ry+IHXswgaemaUpJePTeM2HdHyIMjeOJXNu23/3udz/77POp9H99+/bt9fXj5y+e0qEB5OFQlcMTHR5OLA0MTAoieFIYgyGggZs1fsdin2a0S2gaMEWLCMoBhYlSmqZJiF2FabRlB4CixV2ZWUlUSz9iLt2+yrS6gghtZzPnHEqA4EbgPdCnFsrp7uZWRFarFW5hEU4pECpVZT4kSqnUX7pSfopyF2Phhz7z3msSDtT4kPXEmJsIjeMAWv8kMqVxsZxric8j9wOpagFqCKnMrjAMpdNqHNHsvQT3oO1s0EIpX44xto0HJKrrujHmlhWYJqheOHNEZMqVqy6zUjq7afHRDfA1DHlvo/NH3/eLxcKV/pgYzPl8ycxXV1ewHg6mYUqr1UpTSmlC6B48Vs65EHLQgogQL8EdTZHgTccxe/+YBUwcLLBhGLqmbXyYpmnsh3k3izFOw+jZpSmSqMTMqtH4MD9rb29vldL5xRowtNVqpapw1C4vL9+9e9c0TTNrbu5unj9/7iYiorOzM0ABEGy/urrabrfPnj3z3i/ni+V69fTp45/+9C/6fpQ4pZRub2+TEkB2t7fz9l3zX/6v/5e2bVfzWQHkS9P43W70pdTNpHMsyPyiSA7Njy1kassS00F1zR6LEh/WcG75m/sCYlegH/BhhTunTPhnWgpSG8+z3W4R5gFrmIXHUegsOfU71bPGzDGOTsWeM6XJOXLB4eYgDAHQSVWdb5CAmiYwciNQnJzzJ7pESmk7kApCKiKu5BmdJ3bk2dXhAxFhl5ciETlm75rgW8eHun+qbBcR8Z7tRqZ9MVneNc4FmBzeAaWf5RXwz0QUJV0sFjCas44vcQJcvyklEidekGVha7V131M6FuCHD22ITKgmFcjV+188+bqpTrWQAxH9a0PQKSWEYSTPPREJVpZNx797JvhQPmQH5/pgr5TqFUDMxL6dLX7wo598+5e//Piff7eYdcxOidSxL8h45xyzwwI1+857L5xiGs/OVj/84Q/+7u/++2q18t6rys3N3cXFhfPkg6+NklofqwpXXGi4pwjKQBvn0GZMmA/ysVTu1QZ7JpQwjeVciLFwQBIqLA/RKpKkyknFKxMROyI5wOhtNfgup6kcBXOVKiCJM/PZsI6ptNtzhTICO0FVEXCGHoLahhTDr+aVWl5AVXe73Xq9RugSA4i7pyk2hTQKGUQ8Sdu2nnjRzVB2DC3Ytq13uYM9XC6oHITHiw/qDawLrUYFLYlhR7rUpgDBiVQo6amA2CE1zJTRTBXUcsX4Q4easRxpgE2A13RVhxYqplIsbFxE9O7du/V6vdlsZrMZgOW73S4lcj60TTuO4+1w613uJey9v7m9nc1a6M6bm5sY4zAMXTfB5sBdjAYrpTT2vStoLxhVWDbB+1A6R0E5LZdLuLygN8H4WO2ZlEoqPAZC348ePRqG4fr6GpmI7XYLM2V5tsyVIUR93zdhvt1um8YjZ+G9Pz8/f/z48RSHlBLot7Dq9vt927YUqGkWl5eX4HAehqFpfNPkjpYxxq5rYAk551arVQittVLQQo9KRCllYI4vBNQZZT2bmU8G9YDv8nH4SnWiRN4F55mZnWOD2RLlzLHtHVsMlWw5dMs2U+koQZuiJ3KNnyQRSQgOaCBbbFh7UclWEW6qTBjeNOUOwbUFmcpq9Me8N1xFp4tSJMTaiIkUVkbmRbD3ynrIMWsWUKqaVLk46JC3CB2qYyK2eooYp1haj5tCcs4p+TonYjIKoma33SOO4pxruxk7n6KYyYLZkUwf1J58/UG1+uBR/8msNLuUjRiDSFwq5VKp9tPrHxMV8734Mx3r7697vPDHP/7h/fffX6/P7MmIMs6rvtt/BBrr1BU2HUxcNaQkt1wu3439xaNHP/9Pf/Pxx78Pbcd6QDy2PtgQ20uxI2ZOEj25MSXH/Pz584uLC2RGVenq6urx48sXL16kcUf3TKHK/JHsiJbDNmTbtmnouTQ5MKWbjcriNtkGEKEYJSVr5cvKTqsidEiNNE1EpOxchUSwzZMKiw2VIBPRoWTQFe4k7GHEhxGchBIFfgfw7Gma4Boa7AgulKkfk2Jl/SURMtcQEhauMDS35RpDaY/oMzXV1Di/32xVc9XvbrcjzplR+DGmg7VQpZtjmo6xbLV1ZeQYWgUnDRyLKDoCABlTzZnA0u6CJwRIBOa5FJpcX4hBTP3DLLAsIBcmJshTfB3DiGgzGJFgNGy3W4SdodGhuuxbeHIAeQ6SscQkLLMAA8UVNkH7rt2xbVvnqeW2tqjA7Ii5VtXFYrFarZAwRgQbhtRmt52maT6fX19fX1xceO+HYb/ttwBzTdP0wQcfbDabu+2ma9ppmgDBY2Y8VduFUkgap2mIcfTe9/3OZXs0EkgIvcdcw7OHnAMzzDRNohG9qtCmwrQUJlo1Mw+bAvbe48VTBbmy/UjHUhi/hoOTZxL2QDfL5aCifTVJ4hx6RWB/ktxa23vvHKkmp6yHLaxmDNWWAc6nEp+LpbNZlOwRTmXh2Xamom7retkTzyxlFGSWWuSY1ZGrXWRhZqnezTliPfU1YdGGEBJejVg1OddaAmsqPVipJLBcqen3vokxogakOLUkIoCSYL6WixWXqLIhS2zKAH1AgUgthKWUOdnxoJ6778jWQ2QSgxyzHhITfC/CfHoFUXKH9YDpcPQvKN0TdR7+8R//UVW/851mPp8rsSbxOWvyfwYI9KkrXOngvDeY3Cixmy1CCN/9/vd+9Bd/+fHvfruYzxWUOsRcKyrvY8UpCNHQNF5El8v5hx9+8E//9IvVatV13X7Yf/Xm7WK1XjaeSgl89RRf/8TFa+y6bj/0zDyOQwjwkA4JG4MFgZsmhLYW5c6FpJEyyBnUnGQqnFyewixWCg6IyobMJrmqqsYUga2FryOlew8WOlwZAyiiPMb0CrW5Zx9VKTcqlc3YFdfXN5rzx1QrJx+yG4fXRC4zpTSbdyEE1TSVdoepH5rZTETatoPEFI2OHYyDGGOSqe2aOAlYosxLgzeMXHuuwYjRwJMmpAD0pcI45tzBocEB0gnn3DDEQgeYSTHNlYH3CecPvwLfa8MOGQT1kKrCkvV60bYtCJNDCH3fz2azq6srmBcZDFViyESEc1CVBLjZ+fk5PGC8EYwG8+nt7k3TcKnRwkLCAzelnZRzbhxHHVPbtk3r4dqGEFarFTSo5T5dqQfdbDZdhxqz5nx9drfdMPM0DSG49XopvPSladgwDFdXVyKSxG3jDv0Nt9stbA5m7mYNGPaHYWj6vmlbGGegVESOJqaELvGppGmZPXN28pqmoSkXkePxpGTiY+HotuVqu+P29tbEuq9IGalKE3KFzEppMryVaCo8DwdnyAQovuKYx2HY7ndSOohIBnZpCDlVmSoiHaEjj8o5x47YESl772WKMca+3wH3Z7pBCkW8pX7SMa8tlwSw7Q7OrTjIQE9C6jRbb2TQLXdQBswKsW8DQs75Mkql+oCUknOeORdfqKYkU62A61ECyeA0Re8a1/ixB/zCpwJYCyE49svlkohUGY2HYWFbMMPI1QlK8muOWuHdV5n3/dH6/Iym4SoZcXxl+6RW3k4NeFq886+549cdQUR+/etfM/MPfvCDEIJzOfucLYtyRYRivvlaqfSSw2GG+f/04U51sGpRgi7JGEJQbUTj2cXlj3/yl1dXV/vNnXNMokpCxM4zKcfSkEdVRZIUpiQWRSzlgw8++OKLL2OMorqYL6+urp49e7a8PFOilLRpApEAo5FSygNFR7NCRM7n+iIIytvbW+99jKOBQfAYeTdK7majhVyJ2Y9RCuyCVRWtri28k1QoEfg3cqBGmb0b49Q1rUVQEdNGdz/IX6uUxaPC2UVy0XsP8PBut2PvsDfW63XXtJvNBvIxhHB3d8fMCJ865zabDTiqoKucO/BoOk+oDGkK1RdsnfV6rSlO04Dx8blIxogsciMUdrpcLkMIwGgA+QwBfXH+CH1woSRSYfvCF+HYYTNjVPf7PXKrdYUDdooJcYyMKde3b9/iBU2cURFzBmlOKdlIElEIYbPZZIoAYGS6Dk4eKH7gGUO5qiraL2Lf9n0/Wyygq9brNbw9PP8wDM+ePTFwKTYUbDjU7WQ+isJ1akuraZqm8YhAiIjz3vhhiBjt+aY4DOMwm10iQ4zZn8/nu90OVoL5QyJxv996z03jkcmGtH3+/ntdO9/tduMQxyHutj0zJxUR8f4CC2+z2dzd3TVNsz5bnp2dLRYL8IqIRPYuScPqrKQKkEmLVboDL0RAJSjGBA2qsYZhcMAUK6o3a6lpyvUtZlhoYXcyhW3epBbgffEhPXy+cZgKaN8juoAlpwpdxSnRbrfrh54qakYiCiHbASJCqaJd5AM3+DRNSUqNosvPYynk5XwxjiPOh9kRqtr3OE3OOS0oB+xQKe3LHAp/Y77vMI1MnohcOGp25AojfYzjvJvh7iIym81kmohywQLqCIjIez8OPaCRIfimDTHGGMeYxqn0q7Yrjyl2XeeCTyrImpftgxro2Pe9UjaRmVmEnNNuMR/iFGMk76IKE8+aljxmDVf2Y0pOMxUaP+Rx1v7ryV+lAo1i8dh6S8doTSqgqtoPrk/IRkYSrnDBUlPT3zNHyBDU5SaBPb27vX57/ebb07enaVouliopN4Gk3C/qTzx8oYOHJv4zaV8c98LRSkSkTN4FJhLfBJ/a2fzi0eNHj5++HocUR+Uc2wkheGfcp0RETM4QXaKJhITdcjm/uDj/9LPPUQAqSa+v3j17dA5lpqoi6pxL+TpHnbDs5yzmNEehAUytRbMFnczYtKETISHRnI90RJoKkU2KQsSSEw+HalcVEhKKYrLvRHPs9/vLy0uLiMKXevToERWJYJU2UIdjnJxzqAegIo2YGU6hK1XwKEiQUsAeY1wu5/gkpeQ8kSLWN7Zt56oaTZJko9G2rZI12WWkNodx/+jRIxT5sHdmy0OMbnd3+31vShcjzMV0BYrHqp6gI29vb4GJQ+mqxQA8io6mARAtDIjxYGNwIE2g9ixKaYKbmUPFlAs5iPPxYdu205QMYwz/DMoe/mglLsUV3LVJxrwOReD+YkVN04TBMZh3jBGtnUOh6mRmaHQQoZCoZUxhPyFyQIW7GCYUXhnJ49KPYYGYOTb1WPJ8i8VqHGPwbdd1r1+/hlbe7ncwyHB3hGctwO4LGxfutVjMgobIGffpXZMoTSnG3F3Dp6QWziEiVR7HadgPuIU5oHSPDrZM3FRSPHpf8rpjOE99FMP3wM+KGzUNOEzYGJUBoBn7aYxTqtpZEh1lBFVPyRJxTWQ3VBUtHWOMoQn7zfb29jYW/oCmafpxkAKT5gJcmqbJ4xbHwp0Oaobs9YUUSV+qlEFtKzCz6oEhEoMQi9cOAjWkIaxAIAQXQkD43YbIhlpVE2nwbRM6yCh7a7spSuxUFOGxpmlEEK/O7DE2uVwKGejIp3TgA7QPtXJSv871xDOY9rVBA+jpwa+YDPy6v+Zzij6qZ+Hk1g9+MQgTeffq1as3V6+fPXuBp/HmAR8d3+QE1+/vqqjvNz/3v+Z4AB3NSkKaDQ0f2mb2/Pl73/rWt/a7ze72WsVRkhhH24cCkBklZobxVdYNiQqz++CDDz759DPz+d5eX93dPbFSEJeRXNFe8P6Rl7g6IgI8x3rIU9U43axFoqwMcnCMOUZhZnJMSmnEQ2o6dKo/UEgyOefYOoUZ3sf2qqoCw6wl6ASv1Hu/2+6x3M0RJCIrs3GZxDjHlIDrloprwuJCb99eO+eIsk3NFhAryWYpAAeEl7smt/+DoyYSNTNb5ZPv7u5wr6ZpttutJZm09BoCI3zTNEBrw5PebDbwICHl7dZwi9H+CNyNli12VWE0rAp7d1+oQ/GrPQMGGQrVPsHwGtzMl66IsbRbh1q1X7fbrX3XllbJuDMiz6BxeO/582HYY+XAacYkGmIOZi5GCQFwGDpc8Lq4slVNjOPow8E5QFLf8KUQfNDQzAwRGULO3XIGt3fX19dtu1uXwPjbt28/+eST/9v/4/9+QZfTNL19+/bly5d4nn63h6Zfr9fe+zjJMNwy8zAMZ2crBPCZPZMn2mMZDNOI9Y8xb5r8dMw8DXkYTVWkAqeoP+TSBdkCS1pQyrY4bT+64/ZBzEzkY5wMWGAnWATYdlZM0zAMKZroEyJJJFxqjE0mqGOn5gEfiJHzTgGFgB56bGNhoHgMCrhEL3ImaJomBuNN0SWmVOy0rMZIQdZo8pmZqaC+64BlUuKSUTWnECdgDcQ0DsPQzWZEwuydZ8cOdJhWspzNZSYWbrqmbdsSQM3lGD73pBp3ux3qNbCFmZk8uyY78cUOYxia9vx20CEi/QBc+Rv0jo2D6fhvJpO24X3wIqcn68N8Gd+gv0OUNJvN9uPwu4//+fHjx4laVRUWR8UJrl09Ev4aHVwZU1lO3d7ewtP6sx/1UnPslJWV1ZEEOTu/fP/Dj95dv/3D3btxmjypZxdjFM5doUmV2YN3VHMciYi1YdeP6cmTJ+9/8N6rl1+FEJJK3/dfffXV97///ZSSiDZNTlaxI5VTa4AIWw2PKCq5AysSbFREQ8niJBTwuENRLDxiH+PI3jE5IMnsZV3V+JOZJamSkmOz3VJKSaW1PJAmVe26nKjb7/fILMKCHkp/YsvS4SKIpubhlay2obRMgTFzHfJCMphL9Ck0zjnPwbCaaZrEvDpXmrXhzGHQ0LWI54/jGBp3eXkJiDWCY5Z5nabJKovY6TQmRK7wtKY1YR+YR+IL2QjSiogtG3tzjLFtOgv9mVWEkTFj3AxnaD6t0F4Gb+FCZ2gOAefa69zzxyK3cJHNyun3o2uC903T5CJmLezfUJng4Qql+1BKcblcQhBni4QZyLW2oFcWiwXRDG+tqsFnZw7Xz6iotsVce+9TGrG0EMRu2xYAmVqTed8E14hMIjKfz588eTKfz2OMZ2dn3/3+97ASnj592nXdV199hdqkYZ8Ds8a3FdOIB8bchRD6fowxjkPMzm7hqooVKTHzUXMCG/C8EcqftGq7aZAFjNg0TcweI1m7QffFKBcHMZsyBWZjJbnsqHGNiEiUcRwB3fGeibwwcTrShXZ9YVLHdcGCeXikwP/krsnL5TJG6TpqQ1MHXbSg9E2SMDPda9fGzEQOYV5mRn9755xkBj2Hwg1V1MsclLeICLErpNO+YACdJx94ioPtPlUFiBVBeNEYY+RijHKARZVbqkxJRA++NTOz0jgMMeWmcLkPW4ohtN41tY3lnLOk4X0FfDJl9Yf8kM9af6KZfDtZIZld4b5y5eN4xskP/6Lu/zptjSOopjFK0zSffvrp06dP/+JHf0FqauRffdhIaYU7/bdd6qGjIsM6XFbNVhKR5XL5wQcfDdvt7dXb1199SSk2IaRxwuoSEaennrRzpOocu27mU9Qf/ehHL7985YKPY3LOvbu+RRkGESEshi+xOuJyHT0MF2SW9x44SrD03d3dmVFZr0UqBSRI91KBSmpkg7DS8RRKATmDZr2G8iXj3WSmggmAJ0cl5wooVt/3CGZCN8xms7u7OyKaz+dRMuezcw63QOYG4hgaCKm11WoVMwdTrhqC/a7ETdOgeyuCrqhphispBTEEfYlCZJBUd6o+MFoZYsBRXEjHeyPLQc4uOOSCqiKyGmMEC6OWkmVgmgq+XREB1tJ6YZom5L/HcRzjCGPc2MG44vnDgYuYbkZ7GUuoGzataRpEQWGaYKih3pC+hQZS4ZSSuoyyadt2NltDyKLjAhE5525vb6EA8I5Ugd5VFWhqeyQg6cZxtEGwP+GVU+EncaUpyDRNKaq1fICJhkKpvh/7foQcjDGiHOji4gJ8KSGE7//wB+v1eYzx7du3wzA9ffr0Zz/72ccff7zf79m7NMlqdSZW00meWMZxvL293W49kaAxAzMH3zZNI0lEJucCgufFsJvieID51JuCC0Cp3lO1vM6BJVXnMpOwCfdacGvlROL8GKPKQc7gpv2wB8hAM4BDUqVx62ueiD58oo5rhwtWqRbm15TSrGmZeb8fnHPBB7SvqIW+RWio6tZ1/5VTroZo4Gk452MczTqkyoJROTw2bJj6jsSCEB1c0q7rlMQH3zRNcBxjSjJZ+iZ/g3LG3fsGErvYo/nzGAcEBbE4D/DMWfDeD+Petrk/biJ3okr+DcrFvpJSipJExZjQvu58eggL/XU/f8N97eeTk0MiTRJdIiH9zW9+8+j84oP3PiLLKv/JDFj2HDBCm6Z5/Pjxn1X74jhVn6KiAjvVOeeoac/Pzz/44IOvvvxgv7vbXF+r0+A4MQm5KKllV+g1AKnGDLgUk2+7OE2r1eqjjz76/MsvICJ3Q//ZFy+/9+3v+OAhcTJlTM71PGBBnyjL+Xy+3W65IopLCWD+TFFJpZt9nmzHmnJYUotdSYZDJoc9bMKiLOvsQ0D71moeYUBQCiN6bKpUVdvSaO8wxM4hQjuOB64AxHJjwZDjylAJ3jOirJKTo5xSYlEklZk5JQNMZmCzqkIQp4wE9k3TzFfLxjOgXs45FGwgr8klZwxHNmc6I4POUFVvbm5SVa8FJW0JXSICDSResCl8UtM0dW1QVaSNjTc4laoVEy54AOCVqIT4iGgqtFMYXtwRf/W5SVSGhYsIKIJRZjOOI9iGl8ulMO33e/j6ULFQt1OhpcRdcuGsITALDA11Vt57QIuhZff7DG4fhoE123nb7fbZ8yfr9VpEAMXCkGI6TBMQ0X6/Xy6X3vu+PzRTwiufn59TacsYQkvkPv/8c5SKgf/kvffeWywWm81mv99fvbnu+16V0Z0CK8e4P6lqXTWOoxscu9A0Tdu2zlHbBu+ZSKcpNw4yU3sqXMr4omW+62WPAxFdq/yxdWuzXCskqmSlCo1xpKImY4zOc4rI6Ac9YHYK6JryTlFV5SI55dQPK9vnoDixDgFmbEOjyk0jzExKsDk4owQOKpaZHeD3ptrtT1VRu1RRukTKjoUz2SGXONnJiOV3L+NcdttEmQnSSNSTSIZeYzs7szAyZLrLDyZm4iRmLwKi7L2qKkvTzLz3QKJhkE3omRTCIrEUw32Fd18dHtkQx0d+Zknf4B/e/26tgx/867/ZOAgxpeD9EKdZM7u7u/vFr391dnYx71BfwcAc/ynXshVQv+qf8gT/k4fjQJ4kq1Kvknnnnz59+tWrLzbX16AyZnYq6ryXZLRQzMqSbdUMwCHicRy/853vvLl6C6E29f1XX331+PHjxxeXVKW3v+EFDYwmKoi/GZrGl6oVlCQxc4zoq5NUVcmJIOp1YGOHCW+IGzNjzXLTEvPXQmZpxriWVNN+v99ut+v1GuW/XdfFKdoSn0qLtxij8oHyTQr5O5VCYaQqkWsEvtf7BgIRDzCbzZwnzRYKee/HcRzHfjZbMHNKKbQNe5fbunkPiuN4aDPcEpFpCCq8nng2V6g21uv13d2dSiZfRH0Lnjlklg8KIWy3W1eYN2BOGW5W9VCOCVwPWAOJCJ4oV+A4kwhmFWGiwRmCnWw9FoFVgRey3W4XixV6HnRd9+TJE0BsoFaHfiKi+XzumnBxcQEbwgSNwaSHYYAuLMipvMtSoSvRUhMcSp0SMwOWRURGZA1X2PjC8BVzgrU46Fwi7bFQfCNYstlsYEm8ePEihLDZ7HDC3XaDWEgJrcfXr1/jkRB+B/CKcthTVDWlvLadc8OQEXMoAybOva2mwjumpQTAFIbFmakiYvQF6iwVeIcqPiaMABEB7FO0leoxFQPl8XVKh850RBTTxBMREXKUWo4QHBFFyqF+73xSMZkpJCpqOXh7fugwswiZGbPWOH8gN6VDrtqshEPSulI5eP4iGQ7JFMlL91DYyiWiYy/rOIiIkFQeZ4nne5qilBUu3nuh/L4pJWL1gakyUkWEXSAi5wJ2EBosmshSTTEmbDfvGdEgZhamru2YGTUC9r6W03EVzs7E7H19bJPydWoVf0qlgborWLxvUFV2TboXi/5mBXfyYPQ1+sKllJIIBBl79+mnn37yySex8Pb96cfJKKSUXr9+jYqRP/dxFB7PWThRImpC03Vd183ny8UHH3zw4Ycfnp+f+3BUJ6dStkHegnjm5EOOtDjnZvPu8vLSWrtP03R3d6eqEExAGH7dk5mbS2XC4BzYJrGBMtmd5amqyRrbTrV5axc04Vj23gFzwXzg5jJRBZAqXhfGLCBCqcCm6p/hnRThEsC5H2NErNiX5riWUzQ8rYF3pKq+hWKGMwqvLuNjOcPfkOBxFUNIyv0JclGEvTs0H145hPDkyRMiQu4TzXF3u10qtJfACcNr18KWhfOByIVl0xTajbOzM1cA3tDWNuwIYqPAiQpSCT9DQHBBdeLJ9/s9EGEYEBFZrVaLxeLq6gqlSqHqhY4xBAQdLrj9Wts9yEkvl0s8MBaJRZJFZD6fo9rbpsbMLyCu8XjosWgOqBW3tKXJMcLOCHhAWyNAglBhU3UXxjMggH9+fv79738ffSy2/f7q6urdu3f7/f7Vq1evX7/mQlVNlfephSKRDgnXSXIpbQ/bDssGizPJxE7RSquWs8yM/WX6gypFhTnypXkRUgM4zZdaIMs1ZJ2kOUpsis32YypHoaNRYA8ngRAV51xo8l3qnW7iAclsswks1LFcLpFHCKFFAV4Be7MZCieK09ZP/ZAme80DziqHDl+sh04ta56IjkFnU0VvZ7LIvuK9JxYp7RSxVqXCwYUQAPWqn1kLrzu25Hw+n8/naL6EGUTsyhdomNmFtXn0oDKr1SQd6+n7R/1UXFL+33D+1x3/WhX54OG8cwl7SSYiYU+//M0v395eCSVmntKklIhECSkcAtiv/KuPQyhDS6bt6xTVv+l4+L5sxCXqVEgjswuri4vu/PzyxfPZxTkKBjWpSxomIY7MyqyiCkY1Uq/iUpTGcdu54NOs88+fX6JtAXnvmubt1fXtbjclikLQxOyUOJV/gnywFOgpESklx+odeUfn52t0YIVSEXJTYuVmjAiM05QSOR6mMUoyokQuLL5UonbTNIkmH1xom6SUlIiCbdT5fC6aphSZWYUc++AblezQPHr0CDYmaHWnuA+NY6fOk6bUhtCG0O92++2u8SGgiVgTphRv7m6HafRNSJo9eNtCntlzIomNZ0eSpqFrmnk7DxyQL4+ZPbFjTrNZIIopjiox+Awn3u/3Z8uVJ6Yks6ad+mk+W45D3G4GpmYcpGSRJ4sZomthjPHxk0sfmJ3GNLZdaFrvPDlPSaaYRigeWA/QNCICEyGE4AN3s4adsif2NKWRXC46opLIxwJD8RL+Tyk1wc26Bu3Zm6bLIf1EpA4Rhf1+P00Dq6Q4jsM+TWOaxuD4u9/9NpFcX7++vn797t1V3+980KZl15Jz9Pr1K1Awdk3ThuCZNSXn3MXFI1UWoSjEvunHyCWTjcpmLvweIpLSBECQL11JQYvYdd2jR4/Oz8/RniiWzg21nde2rWj0gWfz1kaVnd5dv4nTfr/bpDS1bbtcrF1ob+52zofZfOFDk6K0TUfKV2+vX3/15u76SiVKmiRNq2W3WLbE0QcV7Z2PiUb8U5Zh6qc07keepBmin6RJ1IyRU3RMwXEzTUmitqFhPZiYDbuxH9DqgERhK1jfBTavl50jzsu4dMEqulaZVSS6qjbBQkdKQqxKwo6UJKYJPWstXt22s7adNU0n4pgb733rg0xx1rTnyzPvAm7tiFlJk5AYjZQLwRmGIId8Go9/7KhpAweepgFWYz9OY5xCcClNRGLs0wifjjG6EDLqojL3J1Fmzy5odlQO6TDYNzFOKUUihTpjp03r2y6oaowjUSHFcmrW+WzWtW3rm8zinqbISt7lVEu2xphAQE/k2rZtu1lMiZjVsTqeJCVS7/04Dbt+F9pGyQffReF+1OA7Vb7b7MhlfwMRIyn1yrUPaju0Vre19qXKNNESu9KCYx2GQWLynElHqaQY7FsnWkc1MStaYOVPzEhzjB+ENGkUSkJJNf8zbYVfjT/k5DlD/QsMjd1u88tf/nL9X9atl1k7YyKDsaVjqo0jRVhZJBB8MKsfPPnPeogV2uWxJqcULi4uLi4uuq5bLc92220XWkJN7Qkkr1hVWTVqDpw+f/78+vr6D3/4gw9dnNLt7e1utwved10rcdR7tX3ffKjqYrEYhgNgoSSQsr8opK4KhVkc1Tzg/KoiGgFNPBTM4XMr22hD45xTyS/lPCG4p6qgXUUOEsFhOEzmre73exjwWrUE5hJ9JXQ7rzp2hRAWi5l5BlDMNdAUF48xDsPeXF4ztFFM7JSYyUjpbDvhgvt+sOpAXMGi33g2bNRYmiA1TYPs5vX1NSyPcRwfP36sqqBTJgjcxp+Y7Tb48BFD4YUvee4cxtzvNs45dh4+fdu24KtarRcFuEvZMdUcRrq9vd1sNsv1ipmfPHnW9z18XMxI5wK0CCLD8Dh3ux1oMTAp9qj4FjLNiFrFUjPKpcM0XplKfpqI0GcQlx3GPfxydFOwhDQ4QKgUaEpJBg/D0M5nIrLZbLxvLi8vu6774x8/efT4MXIHzPzpp58i/OC9b5u8Nna73e3thoibNhCR8w2IruAbhaYRpmmMyr7vRyJqW7/Z7DabzbxtnHO73e75i2ery9V2exdCaLV99+6dGWGWzYXIxtS40oM5paQpC2gEn7SEMU0ce+9TEmu1WQc5bYWfOH9UfERzyMzZ6Lru/Px81s1Sn7e2rSURQyEhA8KAucFxn+KI4oKsZuTQoFAzLaVndIdwYt5tidudhkNFJCWlh/iiXCFzRtc1OibFu+9fuqILV6slUlcg4ENsuXbK7TGAt2iaLrRNNJ7aAnDB1Exjbp05m88Wi4VrQow5ueCIE6HkGoHM7mQY69epn/b+YY44GYb8mKPDxq1W7fX/9fnlkyNs3UOq+uE/VR8e7CQTdEGKsWDhhZjSZ599dn5+/tc//Ssh8VRiofS12vebh+Df7WBmInbOE/vlcn1x8eiDj7717up6HAYiDY2LU94MdG+S8D9aLwDG8vjx408//ZSdc15Soi+++GL1wx+mlLxzKaXgj15NDdd/PAJ25fV6PY7XsbRutZAvGqQzsypDjRGxhUNPhpGZiXRKsSkRdNwwpeR9IKIqB2ydeXK+M5U6HNDZE7mua6FsGpdrYX0B8cJOHGMGX4DFiQv+hYhypJQ5FbpmpBKx1kuqm6bC14iS6Bijq6QSosRpnKgQ8WPfclVBhKijBe5sz8OAkOPUHTiNI4gpUkYehdLHFyAmSyuqatu24zhJSXWnUlyLYAMko+WlELoEd7SD/auKbkUm/iDmMLlJJhERFkTyiQgNjJ1ziPSiuno/THiS29vb1epMYjRusqZpUGTVtu1QuCxSaXJsbr1m+AJpmiAEMQ5QwFY9rKp930Ov+9KeAVFrQAS4NLRAyhYK1beNc2E+n/fDdHN3u1ydMfPdZvPHTz6BiEQgEXFs0TQME+YxpQQoK7FDPyhm8r5hdvv9HpislGQ37jabjWd3dXV1c3PTNM16vf5OjN///vfX6/UwjW3bvnv3Tikhlw+zoECBtMxCsmAp1p5SDjITWNlTkoolquu6zWYjQkisNJnZKieMcRRzKpfpWxzbVq8riS0RAX9tKjQ7dQrPoilYzxYqh7Eou0xHijtKUo2p2K+orIumJEyXgKj1ZPFTVnUZj5pEDplfLc5JTCLqM7FU9CVzTsdgLiomjvce/GVIUPrCDUmGGy2ZVBHxrokS21nXNM00Zq4xDh7byint+r7ve3akkpl8mDjMZk0h+iZhHw4Ziiot/TD66RsOM5ukSvPVGrdWvX+yDlbm3KbowYc5+YodnB2tw1/tzMDMSTPPA1ANzjlN+stf/vL95y9evHg/sXjnRcWxIZAtC/u11Bz2kua5/687ytsagIIdBXJ6tr746KNvf/KHP4zDIFMchwm8P8xKxCezkucpdxEYUpwQuHt7ddu2rXp5+/bt1ePHT58+cR7waTYc9TevDFzfNFBKyQWDYh14AMxSdpxjjOZoUrUIfPDDBDF0MNKnaRJxFh9umsY3jom99ykdcrq+UBJaQpeqqAbgOfC6UICrTOBCktLphZWsrklVXeF1MpUZQmbpgpoBMJjgmqPjLBFSXJVCFWZGltrESiy8oaKN3utIUc8dLADz41En413ThMyhA1JitBwwayCzCnSdZT2h0hDLdZkWNMdpQ6mvnaZpPmvbtmXyUVAufAC3Y7WjdZX3HkE88GI6l6fDXHnLRyxmMyESkeVyzcybzQYVPs65J0+ewFVC2QbCD6k0scB8ceHKiDGO0wDiF1+ayIrIMAxv376FGr65uZkvZq4gS11hpWjbdrfbtW1OQHIBCrVtO/ZTjHEYR+/D2fnlex9+4H3YbDZPnjwRplevXr25vprNZl1oLi4ulqsFEbPzzpMSi9IUk4wTdic7H1U0ST+madtvt9v9fv+HTz6FxlovVxjkb33rW4+fPSUiwAW227uz1Tqm7OJLSSRn2cpZApgTWSKWhGFvZ52pZ0akXWSz2bx7d5stQudcYSIzFetKzborNR1YbyFYq4YDtgjx0nEch34UzqsUe9+2nu3xlCaDg5l4NGBBigfeSpFJq0bxltl1zikR5TajoGomZpj1SqSWZU+VJkCWR6tULu7rSiyBmakUQ4LrxnvPTMANiMisDU0T7HlihVimolR8IUDEroFERUEETHZo/fl8jhXLzMtuTgVUD+Zpk4q1lVOrtFpuf4OPV/stfExEmF3qUoRkgrr+OaswUrqnUOkhPf0vHQ/rwcwCH0tEdEyppeB8GMfx7/7h7/+fj57Muhmeh5iSJO/8g3r3ZCy+YVz+Fxz187jSqkFWq7Pz88vvfe97m9vbzXQ3xVFJ0VaTjmdRS+XZFGO2TKex67r333//9m6fUlKREMJnn332+PEjIR+aIKruIVrw+/Nhc4lKmL7vuyb3x4UtUCT4AakYK2Yfi73k65RODEBu56nBwaopR7qYWZM4Zs2MiblE1VxYBDBRbZnGDIhtS1OdXH+pAheQiGr4T049OscVlxOu2bYtEdA9cT5fx9KhJZXmQt65pmlADAnTwZWYGJf+EBB8XOJ7bWmibH0XqCDLYC6IyN3dHWp4YoyOA3iSLS9oggxqlVhKWi6MY68VksXkKdoncOltbH5PTs+TJMU4ZFuqhlYhkuFD/qIvTQDblJsb4hX2+/3FxaNhGLb7/dnZGSpKkWwmIlB39X3/6NGj29vbpuvMCnFV4rbvezjEphWM29mVejOMG2wRzDgX9JYtLawHIoJDbN7zdrt9+vTp3WY3xu1H3/rO2dnZp599/urVq6cvnq/X5yGEly9fvn37tu/7x7vt08ePvJt1XSfC4yhELilNEX0pRmQHbm5ugEfb7XYhBGJ3cX6RzR3nu1mjql999YZFpzheXV1961sfkuM0SfBeK0AsZSzukQClop4TEaLQMBwRGAAITkRev36tpZbPssu2YbH86mwLvmWIFrsjNikiAX3fT2N0DWDAhxonLpXrpoNrIAXQUXmz60EiQb1NU7LbpUPPaZ9SIgXdOpsmSKUTmqsKBe1RDZoXQoCS5mNwXPn/4BQ655wDA0x0jqwoCF9MMtVLCNbkfLmAiVk0KMuUw1dYVBgN9Bexu2MNN03DrCJIbC1c6RdHx7Unf/qhlT9qIqi+Tm2O6HEA2f6vuK2UiJSJ7pUF/ylHbQ3YCDNz1QBZNSCApqppCiG8evXqF7/4Hz//+X9SYu+CqFTurOm8Qkzx0NP8+6phOxyzY2rbRh9dPvnhD3785qvX//2//x07JtI0HQXSTchChqaIUncOzlPbPnr06Mmjq08//dx737Rht9tdXV09f/4cmk/A+MxE1XwUvXiKmE8pmQK2E0SkEDiHGCdVsEseuvqcWKzKZIsYCHaoDTiLeACE1OI4SYVBKKGmjG+EYkB3hGEYrm+voZ5jjAiHwG/r+36YRpAjzufzfrfHrSGszXg3hV0UmB8GseAnZWU/SklUyyEfptM0Ns5775GNw1tk1IynmEZAqPDwoarF2mw2CCwjomhNchbz1dXVlUGI8SdXYL3Y9icmRVPa2WoJTZsTifBgKoDz+XzuWKcxEWej3jmCEUAl2GgZa9Q4htB6P+AVZsD4FbE1n8/v7u5CCPOu2+8HGDdd4wHexgXzCfO5C8Eyx/BrIaGMUmoq3Qh8hVJOVfdJw3irKuaXSp1VSgnV6haDxSoiophUyc1ms9318Prq7W4Ytrvd+aPH19c3RG6xWL148YKZb25upmn646efPbp80XWTRQ632927d+/6vgenBNLwTdc2TdPO5vDgkafIGZBucXd3t1wu73bb7W7z3e9+dz7vMFzjNDnv9ZgXxfbdVLo5UbGBIN+HaUS0YLvdws1KKSHvGAvHu5YIZyr9IrHebLTtdrVaNXPNFVpvLbmAes/az9gyi8WqaIK8kr0LRDqNhRG2+KYxRlWChceszJ5I4enm4HORLSZMastVVT07rrqJSEm9Za2GShDyuZ0aIxzCzjnnqfzscQEEz7VwnptTYRLecSAnbTtT1RQP4D4ihfRDHYH3vglh1nUp5npRItJ46J+BkBsfe5Ynv9aa9UT03z+ND9nxHAg50Z33tW99X8qfOCISephqkpm18DvpsRtdTvCmF06e9hiHoprHWzVKapvmn371y0ePnrz3/MVs5vSUIUsejEU/8A7/vgczE3mipM5fPHrkPP3s53/96tXLr16/1MRJUlFgR0+I/xvno2ZT1xGHEN577703b94A2tC2zcuXL58+fQpcHDMi0XmRIZ4BuHg997YfmqZFCDRVdQKiBAaObIlLzlxaZkWZ2DEpqahjlySCdgTccloK5rz3znPwwXuepklLk5/QGD19SqWwGBsVyd3N3c45Z3SPVBgzoOqQ3kOTA5gFWcyJtG3LnLkpfMF2WiGBL3zalMUHO+dUZCoQXCiJOthbr5wpDkrZOoTcrAVZjNGAUXgqPGff97NuEWNcr9eW88ZF0ECesjGU84iQm5Ap+MQYHmCjpEJzISJAJzU++IJDTikBLwLrgTLtYrB8YYqaUmrbmY0SlTDmZrM5O7vwfnj37t1qtfro/fdfvv7q9evXZ88eSQGewLKB3nr+3nvIDeMKqdR+YEwQYFdV0Bma4QU107bZEZ9K/2Cwl2jpBo25ADjIvovPV6vV3d0de4eE9Fev3zx79uwv/uIvNptN3/e7fg+0RChdk1NK2+1WVXf9/u3bt+/evdvveghW773zYd52rqAfHPskcT5fqqpzASUA69ny+vo6xvGvfvbT1fmZTJlnY7fbTdPEmhcMDlJFINGktoggh2LRzpTS9fU1eE5UNcbctKOuN8OqMwUmpewYA2KmJE52JTcERxn2ECz5WIBvtVThii/TeFqggHHEeGgg7SqAlfeckpprS8W2yIqkNMpxDrkJdS5YXklVPcDg5TCf0jkmValexCL2zpH33odsBCNKlLNULMy50hL4Bns7sF8hwCYHCwlQMPbe73a7GCMTuRLjQfitbVtQf7c+oITkQFNTFT0bNMRm4UHJb5+faGgpRGMn80L3Ur/117NKJkecWVXAL3j/NK2UNGuqyatUs81UP7xpBFV17N2Ucp5/HMcB/LGOiYQcp5R++ct/QhcU7xwoS6pDqjsd4jP18eBI/RkPJS0851qNjiP2TF6F16vz7/3gR//pb/5GRKaU4fJm79i3DD+Jxw7Bec+a4vn5+fPnz13pYbDdbne7neTvHkUViOXkfet1UC6bwT5SUG+mWlJKcUrmbMUYxzhZ/xkLkLqCgLDoroXLgsspxn63JaLZbCaSXIXD1BzCYuRKU0q7ba+qFxcXXAgf4KlDhSwWi/P1Gbgh6xRUSmkce81c0x16uZsfPOY+x4mZ1+s1wneKzqnjCMFXxKVAAUA9IOKaUlLKreZRhbxYLKBHDXUFIQtDarfbXV9fo9gmhHB7e4tB3u12EJ2wA2az2XK5nKapH3au4KW7rst9D8vw4gmpSo1DkcOksLQfFDMKokD9SIVgC+KVi7/unFutcuMBVU1TtFAY6D+fP3+eUnr9+vWsaZ8/eYqIH5E0TWNNEXAj3Nc4NW3lUNVmAy64FH4o6GYp5RwiecBhSBERRtJSofaC5s2Dp9MxDC/39u3b6+ubvDYuLxDUEWLE/8/Ozrr5bErx9ds3v/3tb3//+9/3+2G9XkP7YvU2TeN941xoms77zNMJyQujB+v5xz/+MVbjECesLi4uqS1FvL4j5uO0hRarFAP47t07LLlpmlLKbTO0eKtUQhdaAPzYXxZeshPub2p7HqNnsYRFqto/mDLDMpbSKxpR2frrRKR6ABhTwbTHqhjBvHBSp7kp0FHG1L7ODjFqco6d4xC8c2xJYkKwhIhUSQz8nEMgYL+ZzdrsMGjmEUKkgJLwMcmXqnbzTANQzwLGzWocMNGqlqhmmLxExKKQJHzMQynHMY96Fu5rTbtvrYyx4+zB7HMTy7Z4anFN36jLvs4IICJWqmDqD1CIaHW4dK+MWqAAVPDQX3716ne/+90wjfAdlURJk6SYopISCcotmPleZfC/x8HEJZV+GpRn5zk0yrxYrL77/e9/+NG3pVDwjylCAGHDaIk+WdDJFACxfPTRR8vlXDIQN3z66afTlNBoWjX3YqJiZ8lJyraCDsFvW6/XIGqwhQXRgP0Pr1SZfHOgIKeyN6hMk7O2oCUUzLkKJUd34e7gfPMCEeRh8qSO1MUph2UsXGnkUzYsd3d3qOq5ODs/Pz+fz+fO03I1R+wXD4C3Q4d5V2oH8eTb7ZaVvHPmQ3PBWCK5i8qo3JfeOetyiGewrFtTMUp2XbderyGv4XWh1OTm5oYLM9QwDEBvYqAQXAW18iH7rgrOegRgodgsDM6l4YQWojEE3qVAT+FxzmYtDA7ouRDcNE2PHj3CyGCU8OJgs/KllQXnCO0WEX7Qd1iPhHEcU5qur98SEQQWjACDowMjjQeGerAcG8bEwOqXl5dcCGEwMrE09oE3hjFBTh25c4CoYaJheZydnY3j+ObNm/1+///5r//f//bf/tu+H2fdAgHDlFJSWi7XZ2dnEMFg4Lm8vOxmLfaRmbyI2ztHKMadzxZwidq2XS9XMGt++tOfgholw569w894eCoRVxOLqFy3jW/JFyJ69+5dmW72PtNiQ+cB90dFFuNDE9ZT6ZxBBRJsQl8KqhkBBqwNLUHgVNp12M7F5zB9sGymCUSeuZ0J1f2LRKjwoJnZgX2Bqm7nnHcNnE4iUqK+1DVxwUJKKeZBcCiVtlH2PGOxMjFgtusxWev1GtRXZ2dnRMROrawUbULMZBERMKqGEJwLIlI4P7J14oPrhz0mLidTCsHOcrFGAgtXg8rH+ncn7CiVRK1Vr6omFVTiCqlV5ZJj9i6pCOmU4t12048DOSZ35IZyhcyaCtM4VXq91tZ2a6o0dP3hyZ9YnSu02Cd/rTW9I1FNUn+TEIv2PlECMeGvfvurly9fSgm0EilMKjqAxOwh/gN08IMAM3hrksg3Mx/ay8vHP/v5/7ZYnznPy9VCSfop812YdYbD6dFQQOVcXl6itUvbtq9fv76+vh7BgOqciMSSmHT3qrRsRs3hWCwWq9UKGhdzn5UxGL0o+9Un6yzPNJ9M9mGCncEpE5risSfWmEkZuQAu4nQgU8S7x1KWkwoFUt/319fX4DMax7HxoQ0N9BNoxaiE5rz3cA+AGeHSrY9yI7nG84HvFzscQqGMTTbbYSJkx7HJs9k0DfwqmBRALMcy7AYppwr3RET7/R53vLu7w32p6LD9fr9arUAfzczohAi9BbGIi0ipKqayUSER4ItAa+LdIYyAssa7YA+jpAfTvdtt0FIQbyEiElOx7pP1vUcKAJOCSqE2txBmpHtrAx+DbENqTGQQW1DMl5eXuALmxTJwWjpJmCOupWLbF6AAUF2IOW+329VqtdlsfNuklPp+XK/X//D3/+Ply5d937fNbLlYz2fLtm1d8HBd4NtJSYcTq2hq2tDNWiNTs0XCpKzUNW1wHtGXy8vLtg3mgGaDWA7mLBV1lQolXJ59OXQ10AK/ijESOe+b2n1ERMR0pBZaLlsAtvuyTDiuR6i/dXKmzVEqXGkYSS6l1ekY8GVfx1SKUCwdCYkI9WBYNtvtNue5KWeRtPjczJxEtKAgMX1t2zZtCxt0uVxaU86UEujWNSvjTGHrnFNNTdPENIoItp4lUwCipiS+9ByDqMRjU6Xdi2JjVQUm3JhZsc5xwa6dGzse7sLBWwTiRL25P62URqtDCmQyVUyl9/VlEaRHavLBc+ghxUwP6WBVNpVkAlvv+cF5aclxMTWVhTVlwJgo093d3T/+4h9vbm6mFFOhsXYM6gjjuvoPUb10rH0Pz5DNEHYIcC5XZz/6yV+8/8GHgIFYxAlhw3qC+fjwTI0Pl+cXtujbtru+vo4xJlItwP1UhAUfe+EnzwNDD1zHVNgoo6SYBEacnXyyEEUE7TNPrn+QEXTY/1xC4jgnWbdwyWlXLmgsbHUt9MhEBJgYLGhkgEIpwiHOtkIap0oBE7MaetkEnKpCICaZ7F0QhWvbFlgPW3hcBdJtOrRAXi06WrjrPNKB3nv0CcfGhkpGDgn4mu12d3e3efbsmSWAqZSQchUbxAviV8zO2dkZUEi1ANXiCpvmg84Df1DTeNXcLLnve3jbuI5lxE3OYsBjjNY3PjSu7UKTuU7l4uLCHfC3EVSUXMX3zG2yBQwNqhUzMPQfZpNLlrQp1CjMuZWFZU+h16G2IbkwX1dXV2CyhB+PgMFvfvObv/u7v/vd7353d3eXUmLy3jWkzjmCTDfrx5QoEfngjPupaUPTBjxGCGGaBhFZruaz2Wy/3yeJ0zQZP6VMUSRbnFTcOPzJqubGcYzjZCYaZ9czx34xoXgqKGAMFxUDzqS23qO1p3vuF99jovi6jV+PgIikNKU01eK46PjgXDAjDIsEM2g5Bc4Y5iORoiVmG0LwFWELEcU0Il719Nnjb337w/mi2+7umtavVqssZIiICPZojKPh523l49nMK/UFKiWl9M5xKJu3ZWb4aFkFMsGY2+/3SsLMbdfNEH92Tdd1KfdzI1Xl4E/y8Sf2zYNK0b5+8kX7FcvYl7ZaVOnLkwven/Gv/+spJyNiziXyfGorPKiD7WpGk62qBy+ejLqMVETmy8VXX331i1//ou93ZlnAZVPUwxDg2lKe79/zOHnhPC6wwpxvVEi5Yd8u1+d/9Vd/Ddyvc040W7sWX7pvnuDztg3rs+UHH7yHlbdYLN69e3dzcyeJxhSp6r5wcoX6Z6nKiqAt8uMqpXhYiOUKB/6dU41LD9sKWlIs+TGQ8Q3MJcfGhfO5wiIdSl1ToZQbhsFsRiIy75BYYJvbLWynceG0wq/QWFQwYuZb4DTQ4FmgHiODFF0IYYqD5SNhIEMNmK+GSDIyvub1UlHhwGoSARdAPkO7E5euOPt+W3r9skXqDOdssowLirgWAQh9I+fNxUGBmry4uEB8GMML0DgRWa4Bf52mKTRusZyh13qhbojjOCJuHNNo2vrVq1cVLJmpoLfwmiapbT9S6TcAKbnb7UxK6sFUYnjneE0LurqC28LPyIkgqR9CWK1Wy+UysEMsFLr/7du3v/jFr/7hH/7Hxx//8e3b6xijxHR7e3t7ezsMg3PkPcMi4SpXqkkccRuaeTfrmtazAy3XMOzbtl2fLWOMd3c3tlaRa9R4YE1n79gXHndVLSlYxCfMLjTXDYETdAVdLpfAE2DwbdtiSZtdVe9fU/P3pfOJxDjapJWrJHIUX7RQc/m6Yz40PIbyo2ImImKENzVvAUV0cszSbK1EkNNxgUProUSXy6ULYX1+/v6HH67Pz51zxKKUc1LMvF4viQQgidm8RUIHpboYHBD1WLzBtobNglmxJsegfYkIiQwiwvWdc6Lqm+CbA+bGGkkxHzo612P7oMqszZcTYUuVH6IV8Wc9L/enrF5UJ+q8OvHEX5WHalHvXf9QDa719VU12DrLt88EooUqlihxQt+33/3udy9evPj2+98yZpny6iwqzE7pX0fQ+Oc73LHudzUqW4i8a7yPXTf/0U/+8uNf/fJv//ZvITrVMeyjdMz76JRSGUTTAT/4wQ+++uqN936cJhH9/PPP1+tlaByXpIcPQap+pThsleBjLvVIy+Xy9vZWVYnv4+Dx5JlOjh+y/qyKsaw9IUI7xlP/+PjK+VtmR8PdKa3lAlS1L+AjiDYIBXaMUOd8PteYc72pFPxhiSFMDUsWBQZTaelj6U9wSkhFjODpcB0pGCs8hnWaQ4wXltM0TRcXFwh6U9lprrQqmnVhNptzMdtjjGjGTMXmgERoGp+zrf0A0zU4z0oS0367G8dxuVxqkpREnVPnsjL2wXtfN0uwnDER4cPtdptS6ppWVdETArH0JNM05Xoq0aia0EiKrCSUKMZ4ff0W2Wu8DnhFuLAccHHcY6HjNq1MRdaEEJA7lFL7a8HVEEKSyTw/C2Oa78uldTE0lojEKYv72Wy2iPHdzV3g0LZtCC1K1D799NPr6+tHF5fTNL1+/bqbz/q+T0mJWCSHPYJj56xLjwKp4D1QI55ZoSpa7ylFp9Q1gZIoTVKkUz3LENmYEdsFzgiqfPZ9NZdKN7b2pBymuWOhu7LrUyW77cx6E51sxiOZrsCkHGlfV8q+M1vZURw7B2/NSG3RgrEEhJi5aTJbSFMIYplBNp2f0BYh9qxvsjGBNtuz5cyhlJ8lNOH5i6eff/bl/u7OM09x6Lpu3i2wMC4vL53PRYz7/c5nkNShdUSJNww2Jg6YLqIGxAZlcBLlIUgpbfY7jMahTN8FpJwQJTIwPwJpaD98Ek007+XrLJ5qSA/a2pVE8lFE7XjWvuE4EeMPfuVB7fsnXjnvQT4OndvfhPLjYjiiJGb+xS9+gXo+d1wZ/ae8z//6Q05+YDjBHIhIybXd7Ozs4q//+q/Pz8+zXKgKzvQ+3K4KK0AAvf/++3uUkLYtyATwdSv+e0hTPhBVbpoG3eLkGGOpquBROjyCgfHc4Wr1TNnhwxEY0hVQHyBCMF2hDFTVXDc4f9gJOEIptzffxTnnfIaY4XMEJKlEPpnZ/pRTvKIG9qaK4U8KwaT5KJC8WSVwRo4YJM38M0uZh0L5671fLBZW1ws9DVAuVCwQXihJAkBMRM7Pz83U2O/3KNqhkqXmwkMJz/gkgtr3/Xa7ha6yyCcUHgwFBMlh0/R9v9lspmnqZg16GzS5t3xCAc92u93tNpxdvQnJYC3xA5RtIMSNvJ1oxuXivhgQeMPmt5k6ASTHl7pYoMfxp/Pz87b0PsLTIu6Nx2tLu2gseLOH2i6XbGHXiAg8LWbe7/q7u7uXL19++eWX4zje3t4CVOGcC87P2m7WdkA/VQkIh4wcq7Kmrmm6ptE0TdOELIMBc6jSea6C+JofRsXuqaWtK1lJ3zZwf433TauAbR1W5QqlcXJrvnec7PGTD7lKNJh3bs9vk8LsbV/YK+AKrtTm2RYzgD2VOu+madCiwlV1g7b34egji2KTi+LyF+89e/z4MZcEAVqwnJ+fX1yehXIAjWxurnMOtoLmiqwD+QbeCI8H/vlaa2JjknccrDV4wpKzuv9+GiE3fMGN1sLNZBp9zfHgdFAlb91x/tgm69+gg+9P94ne/Lqn4ipFeHK1zBZ25GMXrdCGTJ7Cgcf9OD+bf/nq5a9+9asQwvn5OVDHqooqoP9z6OBs31DZaSLiPQvegrzz4Uc/+tHPf/7zX/ziF33f90MfOOuhGI8qrJxSYiK01XMyTSlO6cMPP3z56ivbTm/evFmtF03j4UaP48jHatgmpoiGQ/pquVyC+kCrCnEisnk8nVQmyVb2IU1F+Xb5W2Vliat2SEy5jS4+gbvfdXPViAKGVJXhwqQdqpoHLtQ5JqH2+32/z6gi761uXLtuxqU1AvQEbuqctzWaUgrB416+1NKgiYrFxuu5gwMHu8FUiHNuW4iRvfer1Qpb3aPoM9J2u3306BHaFALZi+gcld3oCmDNwKtcGJG4cAWnCixjxkqWWW2bUgKztBkECNzBR8d0I4SLB0bmGObF7e2t934xn49FNKvqNA2QcXhms9mnaVqtVvv9HrFfnI9wOluCrZiAroyetb+EN1xPfUq5azIMC+DIVPX8/HxfTetms4GG8452u90Yp/l8ziH84Y+fwjaKUTSJd2E+D455HKcYQTUzeHbOM96diocXY3LONcU9pSKkABqapqFt1/N5572PCX0nk6o3s9hViAHbUFw5N6kg8LlCooWubVxTS2Q9ziyatrbn1OK20rECth/oa/wwZq4Fb1N6KmuJQtvCMDuJS/Qblg0Xx302mwEnhdUFfxFLiKuYloiIJkeEgcLnSeTy8vL8/Nw54nAIdUDJUQEkamlq0vf9crl8/OSyK1RrIlkgeO+J2HzrlHQYJq14bTEDbemaUBkrhJdC/IaIuq4L3QF5AABE0zSJNO5jCKFtAxFNKXrf3Fdm9VvXh6qSHmb2xAyqw7RmUZ3o46NLlS/qccTbVsv9c/7Nx2HV5Uc/QUFLdM5NKSIWABJdlEn8+te/fvv2dYzw+dS6N9ODHCH/YcfBCcY7et+klGDy//CHPzw/P4cM8lVPITtse1PJfMBvWywWP/zhD+FszZeL169f73Y7AENgvllw7/6hhxYFAoIIQCH0GMpBxwEK+1zKn+z8Oj5mtmpBM2Zz2NxN2NGWvEHMmcpKgokNYwWft21rqFouWWToG2gpQ0ia4YLglSvtZrHi7crwA0QEpRTIpseK/GEqfIrIB/vCGgH/korhj0ZA8HRjjJAgVNLw8FCRtNaCUUdlFyZxu93CCQMI1gbQJBreJVSUWzbsNoZENJ/PUeIcS89aWACuMDP4wuZvn1NV84aI32w2G8feUrZgZqASMMCYYKhFBEgZLsF8uBHI1OJeuBGkKiIc1gsZoT9L4E2FbMSVJjOG+kanJqwZKcWyIkKiWDO20haLBYbdF9pk7/1ut2N2yIZYeEktzyoHnLnpnmmavM+KEAi7VFoaU+VhEB35NKFwRJhOMk/RFe5VLFf7luWDzeqiY0Vei+YTEWw/u3KcfG6vU2/2pnT1waO640wzDvvV5ANRJmAPBaoNGnmAD4z4zFARMUY0jsT1z87Onj9/nlNsqohkEEkIObCx2+1ms9nTp0/L8/CzZ0/atkW6pB7SE7vBXlBVicQSIrCbbRzsTAuTqCrMaDgqSOLAK0BZHdbSlCJMDb0X8H/Q7rm/QurPqSTRTr7C1WEfnlzh/qXoWEPfP7M+vvmEkyV3oK9CZIcicWLPrSYiYfvSPu7Ep8gTz/x/+/u/fXNzHSUNwyRCmoSVWapaJBKhlIj+jN2AidzX8FkbGs0ReSJPuTg9EUV2SWliZhENfi6p8c3KN4vzyyfL1dp5HtLoA4RjIAoqjaoXdoWsQ4Jy571T8Zzazp2fLYFGdtyw666uduOkvplPouI0SmSkGZiVKImgPCDlPnFTjCOzek6ahrPVbBq2UxyTxGox5c6RzjnA2Z0LRE4jaSRPAZCNspPJswTHjQ9NaJk5ijI1yg0555umH0dXChMRs5rN27YLSQYi8T7brVFkjJGcIxeiSN0LLAMoukXXzoNvxyEy5f0D3UPCcUzBNZvbOxKN4zT2g6oOw6CUAJzGHkOnh5iEnVfi/b4fx4mZhelut3XOkTqpYvLjODrKYChElQ9VQMLz2XLWLfBId7db75r5bDmfLb138/lsv991XTubdc4xOD5jjKIR5TF9v4MB4b1HyaCQuuCFcqc/y0mnlKYUUVCYVMgxTkNGRktJj0XRN5tNCOHp06ftrJtS7MdhGMfNduu8x8IYxnE2n7ddd3V9PZ/PHz16oqUQC7q1aRpVjlFilK6bt+2MyAm5aUx4NtMuMOOwPGBlTklvN5mOCq5Gzm0zr9fraZqWy+U0pv1ukETQ+svlHP2DAc1l5mlM/X6U5FLk9eqyaTrnHDj1Yxzi1IuMba7V9podC0kpNo3rZk5jdMVeydol+ChJKLnGkXPtbIbsb4zjbNYuZt1yvlivFiAX8I7P1ytk6Kdp2m7vYM0IEwcvTI48Kix9CaKa74Wbto0PntGH20mynTWO42azQcDARF+dQMmSUIjJkbKkA/y+eHvZSqtzK+TzP2FKLKKJWNuuwQ6FqQf1Y5YTgvBEglQPesd6zyBgSZQSpbPLs9AF8tR1DU5A5TQ4+EJwZ2ertg37/dY56rpmPu/YpcWyff/9Fyizbppm0c0W3YySBA6UiCUjnEVENJ5frB8/uXSLtll0QsKehURZk0rbtt5z03hEePI+SNM0DUSFE9s7IcdN69qZuCAuTKLCbkziSSnFze1148mJnzeL1WzduEadd00bVSZJkyQ0SkJZpip7DnRg2DyKT0iBjOQ5qtrxulJ5YV6yL0R1XKH/qBhPKDbJdcOV3rVMhK0ErvIFcshOOlUmKf/uQYDhkebnvJfW1Mp9wg+hNjHIzAo5ctVVATDIf91ut7/5zW8e/5f/a2gaTSIiTYNQ9pHmd6jF/ZouEP+eh1nBMDzm8+WLFy/6/fbt2zdd28YB4HuHGBIxkzGAElFuP4KgqK5Wq48++ujll69CaLuue/Xqy2fPnnRNGMZ923kRoWMgAFeBL18gM7E0sn3vvff++OXrtm3jdEBv1REwKm6ZVkf9XhhxVZ2mKZB3zrEDBCOnME2+wMsxK7vfj01pFeAL4JlKXwE4QOAMQh7LlR7PVPzaTL9XGvCh0D6V8kekLbnqawQf9MBH0TSxNFSxpnjjNOIruQTCOwSQTQ5qIZSw/J81fLV+iBhntdYLRFZdA2cRrrOqWkMI45u0lCoXSLAjRUrVwE24fm1Ep5S22+3jx4/Pz8+HYbi7u3vy5MmLFy9ub293uw2iAubdwrIBH0vXzTHgfd8vlzOj/kD4Gg8AH9TyvrYkXMapZdrwtg3OueV8XqJTeRi7pkVgHClzV0DU4ziZOJjP58ZzSTmccKAvbpqGHItI286wJIZhCr61WbD3YvLeq2uCq1iZWMmRiqi63HUbkQNwbsRxCo17+uy92Ww2DP1qtXKO9vs9KrhsfWrpOnWyr/kYOQXFRpXP5IMHTyf8fl9lgsUKOkoPA+99nLLXgF1r1zlxwqpfD0nKWkk3VVraJsuVsjoD7kEQ+8I+a7GBUOhoANHAvrNWXev1erVagTslxgiz79GjR6C0sxVSe3uuCXhQrCJYBimlmOZRhVMO8iO04BkWo/MFrcmcMZiimYheRJzLCESLWuPkerkyETqcxtLnBhNhDjEC47H0abUJslkwAX74sPpTnc7QKgptwecT1XaiYk9udOIHa5Xpe3AZ3D9Orl/bE1Ai9cWJyKFo7yDcWVT1Adi0HLGC/dMvf/H7P3wc41ipAKjpIxjwf7juzeq0Xos+fPitb/30pz978d4HWPrNrDvMBOcCczu7jhdhgX77298OIcQ4Yuu++uplDnyRAzizXhk4LHBnZg3ibM+ePctKmg6Z3drmMuVt66MO0VC1MqyqSgrwkotPoKowe7kiyrBsouGbqFLzyOCaDQjRgHwhFZpG7BwtojaUakW4cVMcqLSXJyJEm3Hlvu9TzOVbOFC0w8wIsqWUwBBies4V0N+Uu6YwBNNUuiabBWBjVdvOvlQBaWnnpwXB5AtrJsBN9i5mNdsI+KpqhUoNNxVzBEoUd9lsNsAqwu+PMW63W+hRLCEAxNbr9W63g1uP2jaI2hhHNG5DM2DElm0JceFssjmCgwXN3XWZscQVpnu8CGLLsWrmWKb6CNbkChmvL8l13EJKkQzQ0YiRwlhRVXSutdUOTZMOjBa5sWs25uIApzD3e2Bp2/bs7Ozi4pyZ4bpdXFz88Ic//OijD87P1zgzFLA3lwhnLctwZA9GD0VKKSVAAVLp9GzfxcvbJJ4IVj32XWyR1zeqRbNtxtoaMNPWfrXntwi5rdXaA8PKhP2B3TorB1YsqD0vLy+fPXu2XC7BXbVarcx2rPcyMFB2F/PFc5HSYqVCY0wmpoiobv2biggyFLfFqH2BX9VBCCy5zCmrCsCpZRa89wZZlULXpVVD+lpWmyrhrzmoAkjbTNm71x/qPe17ctg5h3H7xtNODmauUcD3H6B+5qN1hW9qdRincT2RRETqVDK5AzP/H//H/3F9fU0kLvhxHNXAQSTEdXXUUc3yf8jB1Qw6F1brsyfPXvzkJz95/vzFANPYqUhECAg1EoGdY2S4nMihWFA0Pnp08cGH78cYVVMI4fXr17e3t9aOu9ZkdGw6aeW3QYyGEC4uLqxWx5VU0Mnk1e9STx74t3GLUGrwRSSlCYrW1mgoVXpowioVYzAeNRY2SniENU2uK/RVyKlga9UVqHAcl8ulFpJkvIVdn0s0qe/7aRyncZymqe/3KWWnGRSY1rEAMtcXtKcpPGbOTIoyuZLiqo19izGkqpSZS39GKkCqQqARjZs6h+JjBDwEuhzI5Fo04ItascsaygY6CZ7uer2GRr+5uYFSt6QsgE6QO3d3d7NZHjfv/YsXz6jgZZbLTIwMve6Oc422DCw6IhIxMnWwznA9rqBqIS77vkcZqC0eo+7SUg1slgTeMRYKp8VisVoujZ4slO5VNkQ27MXUyzZKhhwHnrcdxhZjsljMu64dhn4cx8ViFkJwwXddBwqO+Xx+dna2Wq3mbRdKJxjoaROmemyVqmZvAXOEDD0GwUwl2KM2pHg2KTh/+noFXImRU+pgV+VKTRHev47WwfnyA1V9IGzFNqUJJhQYoK/4mUp36hDCixcvAGrJkTPvyDvXBPIukSY6LBvNdVktXh+L/1ACJyrESPRwsdJERCkJUw2UYWZhchxC2zl3oHKyBQAjG78CdpOq+nKL8PmCULFB81WjmgeF3n15eP9zc0JM+pl94NypS3ii7Gt9X59zei8+tQmgNWwFisR6WKAXH7ygLSMxOBUzEx/oLE5eMsmUKC0Ws7vt5h/+6X9ESSlNyjCCqng35zB59e3/eTX8P/X1PEDsYxTn/Pri8tnz92azeQYr1swmsEOVvDq8kMtYvgay4zvf+Q7iKlhPv//972OhqLXDlpGZP7YbTQGLyItnz9MUPTs4wVjo8bhvJVUTUU9zvVxqZxHSH/tTKoZqV9ibpRBEU8UmQaWzEBXf1LwuLYCgYRhAlEhE+/3eTrb9EwprYyjE2q6w+BowBEvUML0ikpsJpqwIa92ghSaTSk9Tw3OJsdFyLjNFbcNJFMukCZmTVH1YO6a4SKpKXABvtv4tqRQCwVmEZEGrOyRogZdR1dvbW3xdhIAUw60xgKDXhnJl5vPzcyLa7XaXl5cY9vl83vc7CyavVitzKLmwWTWF02oce8QnVBUsJXCdrUrKPHUMGiIBehxuSUm99xhbtJZS1fl8HkJraoMFOHmP+ERN2srMua6mdF4yqZR/ddrNmrPl6tGjR1ADs67p2jCfzy8vL1erhXPZvnn69Ol77z3H6JlZAyNv3qI14ZGws211tPsSxUniJLYNzVCAjjE1aU6hbVV6SMrXF68/qR1o+yKuTJX0p2IoGCzIFphFVmq9broZvjIsZtjBKDRAWgFBiLY0LLFpLZ5D3mWI+tg2x70koyCnaToQwrPPKE7lzGeHnTKmSN467mVbtqm6i3LFNdRP4zRN6KyKMIzV1AEySSXcZZDsIhkOgaWTKTb3RkrNdK1o63nBX79Zg94/jrXpEQ73XzrkfhsCyiigulnRUYjbpjvUf2AWotI7KYejT98B+mM39JeX5x9//PF7z57/5Cc/8Y4lF4GpqhA7okITfe9xiejfLTitdJqZJhdY3Gy+9K559OjJ7d27/X6nIsrkiQnEXnrwYh2zKwRsrsmtXpfL5Yv3nv/h4z+2bcuObm5uNpvNer0kclQIwughremq+mksu9VqkUvsk/DxsJzIAvxaxfqEi0/gclMTUNIc4oEhBPgKxhAEIzTG2IQuHpM9ncAQDGFr8VgqUqwt7d+naQrOWwbXZwyRIvsLR9Y5d3t7u9/vsQO9z1E4HHjmLjQmTcxxgWxyhx5qmdiISyN62/ZUegumQgjlCpmiK/HnpmlCk01sNBbE6yMC1hT6IYPXQkaHtoG8cyUr7Ats3lWgaCJCu4JpmvDW6GeA57+7u4MrzMUGB9KYiIZhWC7XuBrQyxiQ5XKJEYMMTym5MkFUrIcSVj0YFsE5cH3YbOJpfekAEQvjN5fcPJbPfL4048MxxHEqjmMgct4zkweCHUHjFAfnXON8Oo7rmnMDqZj9dUrz+Tyw67pusZj1fb8CKZKIalRNAM7P5/PHTy6/973vrFYr5Ok3m827d7foN4VXSGlC3K72LKlSkFJql+0EC5VbmIeq8ABV+jKEIEkN1FO3uz9RpXbfVPqnHSRGNUdSxWZPPEUtPpkWs5uOVQWmDxh4VcU6NDTDYrEwHWwBAOdc1EM5kFah3ax9HYsqer2wd7FPSWTc7Yru1/l8HjpEsIJvguihUQSzJlVyHsRVuSYiitKBvopLKgcmOBzcWPpCTpIrSvBX5CBsiGonW4+tZ33IG6lf0ywPM3G0srnti/d96/vHg0qX77EcKmWGiFrInzwwMxNaOPMDt8bDhwctuPxuWDrCoF2inEzN14oxusB/+/f//dmzZ5fnFwx8HlQIuth+k/kg/1E62Dk/xtS13aMnT77/ox/e3r57+cUXwixpIvLscopfVYkcq5IjrqwwIpI0Oec/+uijzz/7IoSQJIbQvn1zvVwuQ3Aqp1uUKqqdelcQcifET58+fvPmSlVFEpogPWRPZcIp55ryQ236cb2H7U+uEKybswhZk0oHNFc6k+BzUzwnprqrqmsgBaCJh2FolytYylAq0GRU/C08Ule6uyNAHQ7cN7kmFR4SJYILy+Y2MXOpkgQoCRqdszcjjoOBywwb7O5l1ChzWedILJw8Kv4Hro/gMCgIiMhrbqOGQIIrOWBXZfFDKQIOIWw2G9gfVnqBnGss/MnDsO/7/uzsDCwEGEMQo+KRwM1ydnYWS6PJkBsaVkwmJacbS78pvHiMcda2WBWwJ8ZxBI4GsYcudn3fo5gQUvVktWA2XWkbjHQduEREhMn54Cx3uF6u4pTlhs8F2VCKBHp5LgBU51zjOfiwXq5EonOcUloWqs7tdstOfGA8cD/sRC7wGE3p1LRcLs/Pz733b968AYxAK1ep1mcnMg7jMysMD8i8+HKkA4lb5Y6EEDVV6f9SpckPQMDy1tZDGpKrIx2TS9dR0FrYQq9TkTBcU3qBQoszsTmXbILdCzYcQlNGaQe4nBkB+CGpTCkys6PDG+EdJVGSOMWelYyk1nFhCHAuZaSkBWCIiIJvQ1sXWB9wbUhR4TlXyzVeB0V0BvSrw3L181BJpd2XfkeSkA8a8URVpwKpqxXhfS/5644TwVs/2MlqUVVlcQVgdHKRWmErC/O9Lj3lcCYly1XEbL1irSUSzZRQovgbM08CWTb93T/8/RgnuxmC3TiZ5Bte+F+bG/63x5+1qlFWck3TTFG8a548fX7+6PHZxUXWr5RUC6M2M+thKDGpOA2C/vHjy/c/eO9uc4sT3r59O00TuPfoIQ+YjhP4rjDys9KLFy8gK0UTK02F65wqewhKl0pkw/Y5UbbEypk5OGNGPaqTqSClufBppEK1b8I9VUw6zjn0yvWlnhUOHCKuWhwLqCstzdeQxHU+32sqvU65ECPg/FAo5uH7do1XSoAlU9mclg+GKYAxh/lMRFBgKSWlHMRG2sn8Zrjp5k9rQReqKkh229wbrkfqlAtWHCoc5BWI6Zk2wuvARbYrW1DaIplQqERkFJj45OLiESg1bm9v4e0hvqqavvzySwSxx3G8ubnp+/7q6iqWJuSIahRldkSf1HWdxOiIWHU2a03tee+JxDoB932fVHwThDQ0mZXFDB1mRjjBl8JuLDbMoPeeSguK1Wq1Xiy5tNCJcYoxUsraAtq5WFdgwnJdg4x+07ahaZq2aVKM3oPjkIm0cd5pXo3z+fzx48fwjYjQFyhi2TDzfr83BhLTavd1sAlNLoGWaZq22y2yAFjwyHAbjqGWFbZnza2v/1QftlNcWVy1fsW6rZ9TjmGYriAbzBQ4ERou9/hziKlAq8EhNj+eCmILIEcjCc+X8s43wYWcIZKSPcFmiaWtSNd1TH6ME74bY4QTjJ07odqwgoUSe98E5wIRgUWHS/xZSioEV0YzaWwliI5Qiu/R9CVWHO8Ws3EVyuFEeNZzXUtIWxWp8B/USlcrW/Ob9MQpwqaODZ8aWDkaf8/HrJeTqvXXOSRNTk4+8MjIIVlylLEw58kCHSawVHWM0xdffPbJJ5/sh76ck0Qju6P6mW94yj9NE/8ZMFw2Ac433gfy4fz84sc//vG3v/XdZy+eR0EDdmFmPLwrcJK8LDw7z7A54jSIyJMnT1AGAKn9+WdfwluKhaYYhy2FVDinuLTtDCEQy6ztLs8vEMLVijr8JOplurPW6/XJahG/CnEQq5ZzVBS/BWnt5zzKJRximDIkQQGshXYBxyxeEELZIrGIdm63W7i/TWm0nAq2K8bYNZ4kBkesybO2Xc45ERGAV+ZWmgbCOEBemHXCBXetlJLkAqqSesy8u/A1mRl+J5KmeB5waSEMAG9MS4/0/X6PTC3khVFjYqyg5pkZYOCavQvzvl6vYymjspSVqr579w6ujJTKLvh2zrnnz5+bVQQbQlWb0h/QpKeIjOPYhjANmVN6HEfQgjLnHRpyjVZ0pX8iAFOh4MMxyEBWw6SAjQUK6yZ0mGJQafZ9P8WIZdDNmru7u1DYTL07xEi5OHzGeUk5IQKCs7BcLmddJymqymq1ZGaAw0MI2BltGxaLxeXlJXiwx3F8+fLlP//zPyNluN1uP/vsM4yAKw0Wcbtatmap4hg9Yn0Tmg5FUwMzlNHB5AAAxcRJREFUr9dr8LfUaHAq4WULzutxWpeLi19LT3dIcuevq2rjfOAD8EfvOU+molwph7PnN8yHKZvDxncMUJVrAoqhF+tV6FpURQPhPKaYSNXxmAqughSdZM0CNmtAi5GdcmE9q/JsNluvzr33gEOa0MZqTynFWEJovjGZJqUPqb3dMAxQwOr4/Pwcd0cauO4OUktFLl3UUiZLSPUSOjFKHrSHpKBBjdNeS7TSl9IvOU4fmAllM9uULia1gYujXir22CZ40WbYJs6uaV1DTPLXV7bTDgRvWgVMTOHlNc1HWXGT10OcvPdC+rd//7fv3r1jn1nFx2kahp6d6r+OiuO+Gv7zI6gxYeS4CV03Xz5+8uzZe+933axr5/v9fogTIn54RzCN2LAws/OlukDTe8+fffTRR0Xu+6+++moac9wpFEIZLhUyphHN+DgYKJSWy2VKkyu4ifu2kn0iFULBOWfPIyLGKERoDEwpyVSvHqrN2EpnmGlVbyGD+SB6bB4DVIWhSe06MM/B9W/r2NSniFhjJPwKJzUUnvqzszOL9HIhmLUAHR0bwly4M6m0XPWBfWAiSjL5wKg8sbojV4gMIfiAakGpNCqCUlUxHEr7ZLikwzAA2YTk1m63gwQ3OWLbCTisGCOUHFqfmnOsqvv9AIPAOQdXeJqmJ0+ezOeZJhfIPvtZK7zeOPZUfH3vfXBOSikXvFVYGFg80FsiAjPC9BNewWIDqooYO6LTpcY3e4fDsOcS53SevPfOk3Ou9ZlWpSmlbvg/xiiS1/l8Pm9D8N4vl8uu69omiORU9Hw+98RdaBbdzGlOVcCNRlOp/X7/+vXrf/7nP3jfXF5eorHxYrHQJG1oRASsnIBDwtLCynHBs0euxzdNF0KrmtP2TWko6QoW2hRt3s7HuparECLRqe9rnx/EQtl3eJH6oGMvKlQV6r5Cftntailvu5JLUROOUPFVaeU+ElHm69BDXBPPZtsH4ihOkqKmqCUVldEP3ntJih1hRzHug/cZKtG2M/IulX7AqfRSA0VPDhTNOteEUFjtTrah7X2p0vAPeqj1ONSf1J9jX0jVoOlB4UnHMEzbvydx73rW7s8+Mzt3pMgJuGifs1TG8lFfn/n0kXCE+pb1n1VTnaZlJ0TevplKkzgY9dfX17/67W9Qi6aiIQTKeqh5MOp+X8FUx//igiUG0oo5eErh7Oz8g/c/+uLTT17Pv2KnEtM0TUTOs4ceVRIipyrIIzOgGUD9pPj08ZPf//Z3iO+No7y5ertafAAGqxit4uhQgK+Zt9mpAuDGoJlZrRcXZ+fDFL0LMabStMMRPYDOoEqbIr3tCoTE1rFSotK4m6riEDWyGMnxXhNDXGzJWDrtWPADiVVbUjCKsXWD81Lleoly14Ha4EDMDD/4cIgrxMLzALyS1RCbeoMpYDrbHAtEX2GeYx9mPc0CgNh2u8X0oaVPkgkIUsB5iOjs7IxIYozTMKIKT0TgKFPh94fva3aSxDQmWZ+tkS2GqrOkMu4LTg+E9WKMjx8/ns1mNzc3CG+0bSuSu1asVitVvbm5KanfVYw5g4vhNZcFasa+qyVQT0Rd0+y3dym14ziiyfQ0TU3jxzEX9cJxgfsrhYLUlwoxuMKH5pjqRKZaN7RtzixMMaPZ/dwzEzsF8tM5nqZJCH1syGX+yzSbtUQSQrdczl2BxjSNlxjXy/m7aeiHHRGtz5azWZjNZtM0XV9fE5EPQVU/++yLi4uLn//85xcXF3d3d9vtdrVYpim+efMGjbDAvoJXMNnlfKNVnAmjgTCGryD9+JmrSlYuCQhfIdeo8tWohI5qOWli2lRm3ghJhXOdit4Tf7h4LD03bQPa2jZrqSiYA5+zKV3TOlKRemp2Q8s+JfBE5WRwqLqOSsq5RBHBqiNkDUpzXyqeKPpV10a/sGvbzreNLUL7AVFlZlblxWIB1Jh1WVBVH7xJm5oo1EbJdOpDYvt0MLUKx2KB+aouuday9TVdBYOtdap8TY7/vu5nPhhMJ48KNWyYofrh8ff7yCenpc2FHdVfpS4RJhZiUREub9g0DerMFqvlJ5988uvf/bbvexc8ERmFSv7Wv5T9/nc7lBKVnlQpavDtk2fPPvjWt99//wPvmr7v+2kkIp+reo5GP1+hGqWzs9VHH32Etq/Oua++enN9fW1bwibbdtdBVZR4hfcsU2ya5sMPPzRwkyV46Lhhs6aa8fxA3wH318BNztPJ3e3h65yCGYNSQnDwrrgEl+AFVmGojJuIVVN61FkCLsTuQAVXCy/mg7SC82R2sWWIU+kNp1W+g6rEgRSya3AUWODLSpax/eDjIrZvJRxmHSNZQEQItVGBjmuS7XZr1zmRdN57Rwcha3BiSwBvNhs8KgID1iDh3bt3u10P0Wl2DEThbreDYzcMA/K+yJty8V8RsgO0DRfs+957hrFiE8E5SZxTazGOMF8MLjuVjs5gngqFg0lVAbpOKc3nc4CcpbBYw4tdLpdN0yyWM9guuQg7JcRambOxpRG44kOhC8rNi4YLXdetl/PHl5dmssPfWq1WoFdj9vP5/O3bty9fvvzii5f7/f4nP/nJ5eUl5ne73bZt+/Lly7OzMyREsGCkpDlijEmPgkNS2kalCluLw5ScpfNtJZhetCG6/0WuNGutOepdxnLI+0oVP7MlzVVkyHSwXaqWM3rMFmI7i+9Z5LZTqk8UWJxU8AqqhyJpjA9Tzg2P/VCK7KPkYvgYY5Qqdk1FlxNRilaDfjAcMYYwqYkI8WeoSTM4TAzWo0GVM/CgstDjwz6shapl02w06h/qX22KqdL69qeT8+0gIqvOrf11Wy1SwDf1dajS4pbetSNAtTKx6lGsrzqEyLMokxJpVClUdiM2T1LlJKr8m9/8Zr1Yfve733XEItKEkFJkdCNhoQxF/g9mx1ISRx5YZ+ccu/b87PJ73/1Bv918+sc/KtNquYxD3O12bTtrfJjSUZck1UTklVJKcbGcxxi///3vf/HyS1eqXa+vr8HDoAUWDxlxfwHZHI/j6MWfn5/P5/OhH0MudnRCykfLUUWV3MESdM55Z83Uyq5mCXyIqAC1US+4+z9oSeFwZU1nY7nqRSNVNDgUNoM4TmCL5BLiq3NpJTJ2UGbTNMToEWKFFYwgMBfXBDXBWqKFcIJNJbsqdopXq/Pu8PB2ux1YnIimGGNMY0NzKQjY8naTSOucc+xMl0MAERHeCGobXwSc0O6OhJMZTDYa6Hu4WCwAqkop9f2IWLdWprRJwGEYLGqnqqrDYrGCILNIeG36MDNYedHgj6qQhqlwe522bVUZFhJVAiKW1guQEYg5e9eM49h1naXu4M76RiDXxrFnVpQhnZ2tk/L19Q0RKehKyiLx3ofQTNPUhNB1jc+tojpN6fz8fLfbMGcYY0rp7u7uyaNLOHnztpvNZo1vb27ebjably9fwp16/fq19x6tP8/OV8G3q9Xq1atXME0w4ybB7QcEUWF2uIqpzUS/jXAdX4kxepe9QDnOGtoP93exbY1KTB+Z6Xqsp6HAptISw07gY3ew/sQ0DQ65l5ex/Vvfzt40t1zDQ8lRRnMYBmJlJXM2NIesoohoKrg28iqsxIA4olirnEyIWsGoTSldXl7iE2wNRIy896kY5VrBleuXrcSs1iN58kN9ZgiADSbvERlOqlK224kuwzVNeNZjlcOTdPAWbEZqqgwyTepKIxMqbkYZt7wI3XE43Wa26Nn815xII2ZGoPUegpeInBIVJ8YMZC2Na/BqsJF//bvfzmaz73zr2/APQghqdkouB/r3K0B68HDkqAToQ2hUxfvmyZOnf/mXf/XpZ3/89S93MUbfNihA2A99iXFhOhWTAtmFoMdsNnvvvfe++OKL2WwGjgUD69qg+9JBHc+glXdIhz2sT58+/fif/+DCgcaZH4q95E9YSQ+WeAg5HSsaVb0Zm0BIUiU+ygUPMourylH8GgoVrelRLuFWEfFVkApiummapj00UdGqcsM5RkdCPjjH0XvnPYO7qe8Zop9LKtF8d4OQmBOsJYkFLos8p6UDnSsVVmSkInyAnnrvRXICGE12nXPTmAFcQOEi14sQWSxNjpk5jrm6CTXERomVvQQR/ElVz87OgK66ubl5/fq1c24cI4rBEOythYuNGBxWhIKBjjk/P0dVMV5kt9uBMwtirmmatu1CRbQSY5zPO+gkUye1/Y7cymrZadWYa7FYTNMkieqQPkwHSM/Nbg9FBWSy4Vwuzlbb7TZGWS5nXdfd7e5chs5lBbOYd23bEqmIOKIxpWkaYGWO4+gcte08pXR7e7s+P/Pe73a7x0+fb/e77XY7DMPnn3/+6tWrVGp2r99eLZYzrGfj/LJAcdM0yq7G74hIInVNaHyoV765pLbsa1cspcR0+FWqksIT7Ws/87H3Y5dlUetlY0IZqxeBh6mwqNZb++SatU7FYXK/ltK1ZrL4U1FyZKexkkpuJ0gFBj8NO6ZMU5wlQxL1uUpeExwnBpugc67tZuy8FOi73RfuBy67XC6JnMgEOzhW/GK2Z40crQ4AmNS6J7YP43Ni4iC7zKUu+b5jzVXgoR5V+2t9/dqEKr/iHbMDef+79kkt9E6MCVsz1ZUVtUlHRGI46sgJEfmscg7PZKuNiMYhqrAD1X4T3r59+/vf/34/9CGE6R6YqJQD/RmzvP8GXS5K2cChMr7z+fzpi+c/+9nP3n//fbxdkpjSZEgfOg5rEBEiqPP5nJmfP3+eoqQoXTu7u9u+eXOVkobQAraQkoqQcwG5YWZfYAeB6Ih78tGjRxC4zjnBUFcFFYBY1RvetqJIVvkWGaaCSZbSmtfihya5cAVX+tu7Cp6Ai2iBO3Gh1oLKMayKbUIkQfEtCwKb8WHpZF8alWhxIlNKItHyvr6AS/FeU+E6Bt64vi+CkLiFhbK1pKIt5OUrXLT5taj6hYjhAvmOpbKWmdfrNRS5lByblrpMjLl5wKlQPELtwb/81a9+dXt7++jRI4CcYRZAr2A0LAHGzCADMpeUSiwUj2flsCZ6kJxmZu/ZAF/MjGyRmU1Q1UB0m6wHgcl+v7+5udntdre3txCaln3AWxiEmwrxC7OCi5iL2+ScQ56vmzXL1dwVzqa2zY2lIUwNLjSfd4hz9Ls9TKXlcn55eYn4v6peXb373e9+94//+I8xxh//+C+6bu6cQ+nRF599/sEHHyDJvd/vr66uDCtQQiyHccNimORQpcMHry7ngAsu5hCY4eK1mNyzRWjfrUFJdtqJ76v3DrtU/Rj1n7RS8CfHyZPY3jw57Z7OOGK9rmOhIiIxpSlKTBJTSmm1WgL6h72JbYJaXpxgL+i9b0PA2kD5TSqMJSBfw8+r1QoWGMJCCMBogSVD6QLaaavdXlDuhaDv6a0jxYb1gGiNzR0fVx+djHM9p/cnq1aC1dRHE7ZcGVJ2L7lX1izVgU/ccUCbiKAHnSpX961/PlpApoOxUXMAwTWuoCup2JJffPHF559/joezapx7y+t/Xgd/XXfCf+FQUpXoYNeoEnMIbbdYgXV2vV5Dpx5G07G6o4GzIYaAaJrmyZMn7733Hnpop5Sur68NpsSl4shVyYl6cWgxZbqum7UdIDn1A3OVmThZsu4YMV+rUtMZ1XyTScmmcCNrwaGYjysVVSF2jmUNqWgI2zxSUi8m96FdEBqpF5ztDV9KI+zilAP7ZMo+ltKXUPryclUrYpeyIDDEoitlV1Bph+xgVf1lDwbPz+pKMSnb7RZKd7fb4UNoO2vpisrpmgykPL8ys+W2X7x48erVq9///vfTNH300Udo/IwXjDGCluvs7ExVsX6mQkBtMxhCuL29tZcFKxYe2CBUVCivcXEIUBFB20cMnZbIsFlgUCQAbFMhciIiXBlSEgXKGDoIOMrxA1FVVJWgjApIb1vbzrmm6WCNYdDW63XbZghY13Vnq3Xf9/1uf319fXV1dXt7Cz5C6IDr6+thGFTzAuj7ETvoL//yLx89voDZ9Pb1ayosKJhiQ4OacVkZpke0UFTMF1gttZzFEtJ7bB52nVpe10pajrm0TsSmXcF+loKuOEiAe+fb3qR7asO2s21J07L2qFxVvKCmgwsFdBO6UFqGw8FNVXvm+hWkYnzEfRvvg28tmaJ8qJ4ahmGz2UAsIPuLlaCl+RI2JhW3uy1t1kyN2d0fPE5evDZlICWga2Cufd0V7DpUfI8ayfGgI6oGGi+rS46Lcqky+2xz1WKKiu1iIporD0pVHWlSyZgBrXoZ2VWiCki9hUWdemKNOcmXZFImoK6jiDKT4+To//d3f/v29l3TteQB90VUgUU1SlKipA8qYHfv3zcc/2KF0sNfZ3LOBSVlJmUVndRT1DQJf/jtHz569v7F5RPvPbNKGpkHp+SJnQtMwUnDKagESSxC3qHwY1wvFz/+0fe71pPGMJu/vbm9ur2bkgplLZXSlGLvOJKOKfaO1HtP6iSRJ3bMjnUa9sz64UfvMSWRSDJqmoIrIRRRkZyByMpVXeAAnFTbzob96Nmxkgp714xDRPHufN6NY4+uO4tu1vpASZzS2O9Yk6Y09j2JdE3jmdFkh0oUlEQ8M4k03jfej7vtsmsXbRPHyRGTaJqiaHSeYhp91cKdmRvn0YuUhWRKjQutb1g0ihNyU9JhmJqmAwXxfr/v+13T+EmmMY0lQBASk2ucb/2YxijSj6OVTPmmAR6x6bpuPp9SIufY+xR1vxscByrJG2ijruuGIaOXVbXvx7addd0cAmWapsVs3oYG7DHTMA77fuyH/XY3DeN+u4NbFWP07BofWEklekdIPjWND8ENwz5qJE/bfitM89VyTPHTL/7oGvrpT/9ivV4y636/jdPknQveLxcLlTj0u7YLT589RtleHKeuaWdtF5zfbbZpys0ejOYajwHHIqWJXeqHjdIUGodaXi2wZyvu4EKALBL7YVit1/PFYrFcxpTarpti9I0bpt43zjcuyjTGYbGaN13wTZgvz0I7HyPfbccxctKgHMh7Cr4f903nmoabhp88OvOc5p1vWm5avz5bXlyctW2bRK+vbza7vh+jKs8Wy24+c8F3TTsNo8o4jbt+fydpaoP3zMv56vz8fL/r90N8e33b71PbzL/48qv/8Y+/Hvr07nqz2+2wJjUlVu2aRkS8Z5KkKaZpnKYhpcmTNo4bx7UWUVVk0PHvoHqjqBApqzipmr2yb8k1SV1SJ+SnRMMUpyTKjrxTx8LEPrAP5LyyE+IoOiWJouTz3p+mCTpahJDoN7BbrVqqvs4co6iycwHZ8dr9wjO7KgHJzGgAjH9UUYiE3HPJtW3wnmezdr5oupmfzUPTMnFMMiQZlCZ2KTQOAaYoOqY4ibIPyi4pJSUlv+0n33USVROlSTwH7xv4bEDJDcNgbbZB+paSDnHwrU+UosZUWLhFxHwAX9VTMbOy3PtH9g+due0FreACs8ylkrAgKg5mxOH6xwlWPXiYB+eKKrcHVK+4CDQxslQppUOooHAHjeOokkhFUozTOE1jSjGlGOM0DH2ME36OcRJJIinG4ZT40J7y5EP7AWRmZIlJFqSpnXPI5Klq3/f/8A//8F/+5j8vFgslDT7ElIIPnl2UyESe3cktik79DzvgCH744YevX345n893d7eztoOpVHtyqopAeghBNbePHcdxPp9/73vf+/jjj5tupqpvX795/vQZEY1DnM27GEfvDtZuzvNzslmHx0pEXdddXl7e3N5571XJ0pCADNS2vD1M1sf+4IWYywi3htmHwEQkTOqYks3yoTwUUKbFYsHeI8/XtoeGr/D1tbAMuqoJNvZSKMwehmqeJInIvO3MAUVGU1WdC/PuUPsBQhx4eKG0t0OwtN/t2xCcossxtwXlOE6TaxpNqZvP2xBYtQ2hDcE7NxKF0p94nHqsSWwYy0jFGKdhbHxAs3pXcZLYS6GIAuatRXFxHThecDJCCWBAx8dh3BasFnBD68X63dt3aUwvXrxwzn36ySeqauTGxoiJaGrbtmkk+MRJpela3+ToQv5rga5Iod0Yxky8JSlHRLT4/arkvYfT7wr2Sgv+Cy8ohRWk67rNZrNarfp+3O+Hpmnu7raPHj168uTyzZs34zjMZt1qtez7/TSN2Pqucibgj8YYte+bWbdezAE43+23KNR2jq6urlKavPfL+WL2uCUikJOoslPyTViv11PKyIbXr1/jgf/rf/2v2+32O9/+9jD0+/0+OLJiKu+9FICFCfE6LEFEVDUyh3QtPp8/+J9iktfx4TdVjVpgFlJV68GqyZfVU+FJVXawfJ6Qfso3uyfu7ev2a61fiWrRka98DIA4OIgn6BMTFL5UN4RyuEx/cQgDMOdwPSmpKmilscGlSkz4Qi0upZU1EPshBIQPY4xgVEXSAZkmc8qbprFOprUErn16uuf643Al4UUlnmSS2b5iLkp9cdNrdYTjwXPqP9VxOzxA7bJTld23vI9VfDx4i5M3pQMIq0pWSwXMoWrtHnx/JtA0lhcWVUqUSwKmGJum+fjjjy8uLn7+Vz+LKQYfnOOYJu9D8CFnglnBw3VPDf/7HTavhNYfbfv+++//8dGj7ebWueCC18Ql/X7UIhDPjFUFcdl13fe+973PP/9cVOfz+d3d5urq+unTJ5SgBZnoEBAjFqCpiRgthL33zrGyND48efLk+t1Nob9Ilne0W+eLOFZVdgfSXSpBZsr7UI181QJuRARsiAutVCBerK2mabxvUsq9eIloEmFVazMgxFQqX7GdMIr1Jjc7VFUntJhwLEyoWPPECZ0vSqO6OE4SE4KxXTd36pzLaKxpmrh09XFVZZeU3sZSShtN7qBoBwp4GnMsdLvdslIck0h0xMO+x/RZ1BcESWiiYFAOC+YjqFtbPxDWGPYmBDjx8FDHcWyargsNxBZaVcYYP/nks/VytV6viciy6V3XISI97Ps0xZTSGOV2c4elhVkwdYLwcizcQzB09n0OpvU0amn8rDnAdVilcDvMOEulPQ4faLc9kev7DMBW1Qwy7wcWnTUtizqlwG5MY5GG3ATvXYfMcdM0w9gbFC6mKXg3jnG1WnRNM47jfN5N0+SIk0x9LwhXxnHyTUDGFkzX2+12ktR1HTg73727nc3a0DQ3N7ch+GGSMMb5fA65M45j23ZtO9tsNkQOAMYiLokL+zcdQFgEm8FEqohA5RKRalSMmmrJSWbpJLnCpEQRuRjBQnQsZ2tjnY7USZIKjWUnnIijWuMejP7KFDaJj2+dKGCqtO+JbqCKdQQLgIgQ55cK+qSqkoSIVPLDeBeICOmGJAfrH1dG4BeuCEobiChvOmaoagdulnZWZuHAVl2PADOf9M6pDaB6NCzf5AoXZq3I7EXq0ZBSAVWPFVW6lioPx8bfYkhcumJjSGGRa+H5qQf5wZklOsLi2edHTFgnx4MfpvIZYtZ2uZSS85RkUk2qiUh++ct/evn6q5TSvt+XFzvkZogIBFv6kI3w5zj+RH/6wK1PRIvF4sMPP5zNZt1i3vcjGMyZmUishKveXdM0TXEIjRONi+Xs2fMnXAIjr1+/Bv1CVn6a8YfMzFbVwwfKOi3m1dnZ2XzWAR1TrwM6pnFhZrDNWY5WSLlCDOEhx9KRlyoGf1JQtCuC+IjljjEOBa1jebi2bbv5HJfNXfkkJZVUmq6gqkFLgs0CSpBxBv/JsKzix6eUSB3i2MzsfeM5eA79bp9TcTHO2rYrrR1SBYf23sOCphzeT7APYG7DDwuFZGfWdpqEROFhU2mDA3JaVxqe26jaFkKB0Ha7Rf2r7f+2aUJxg+xk59x8NkPh73w+B/lD4/zZcoWatHEcZ213d3f3+qu3y+Xy8vJSq9IRpJbn8/ms7eA3SIF64YGh1RC+CyGkBL422u02GHOcg2A7FQ+JCwGh+c1l67kYBf/GMU5TYvaIHyLjC62PHoib7a0PHBqnlPphFxrnWbvGo62vFmAdBPqs7dgRk27ubhE8d44QJSaSWdt1behmzbt3767evJWYgmNLRavyOMZ3tzc3NzfMHmL95uZGHYPKn51D6gpB+N1ud3Nzc3t7y8yL1TKpUJHFVAmuOpdZhKw3cnw7x9awHfjVqbiC1zSRYir2/vn3/1rLdKr0sRwf9cl1svDkqGV3fS9XHfUX69uZHEil5h5byQoCbedKLhgmVUVdFtxWSLNUSGqx11BHhz0FLxkLA8sVuef5fL5arVDDjXPuD+DXHXamnW+BHNuzcpzslwIFkHuQugevT1Xs8GS06xQGVbAbvH5TCCx9oSfzFQ+gaW77wW5ktu8JE9bR85mdYg+qqkSZIUsptwmy5bXv++C9C77v++VyeX3z7he/+B9/89f/GcHM2Wzm2CWJeIByUYEyqG+LQf5T5ubPcjCzqguhJecuzh99//s/fP3VV3GcPt1/kuPt1bsnUs5gZkbkMBaCXxH54IMP3ry9IaIQwtXV1fPnz5um8QSjL4mI87nJ2cHC1oMxrqSSUtvNnjx58tvf/d45J5JizNhmTIccs3uyd6G07WRmUPGZ1c/F2MxenTCpU9FUynC1oCSYGUQNkAVTaTwwmzEzA+Hnco0pYyNDBzvniD07j3SNSnJlqVm8mo4FB/Zw17a7YT+VVgcG7ph3s9lijmfo9+N81rahiaX9eBbrxHGKwXnrEQJCq/P12W4/mHTYbbfjGE0E13QfkOCr1SrFyETB+/ls1oQwORe8V9Wh70vsNFdKxCmzbBIRZyTagXOfiFRkuVzCFEAQe7vdIrZ8dnZ2++6Gmefz+cuXL1eL5cXFxdgPIwm0JpL0zjlhmi0XGMB5E5rSAwp0mNvtHaYVFU2bzcY1bpwyjb5wbiIZQuDgmRNKcUQ1qrTzWVQkF7NDrIWc0lrJwnrAyCB5iTKe3bA/Pz+HboDOU00Id7SzNqVpHCkEF4KL0UGRLxaLlCbWzI/ovZ/NZv2wY+Y2NMjCijglPw7DlDID6G67H6OeXT7aD31SGaZxsVh0iyVWLEDd4xS1tI4fx/Fusx2nOI0wpFyKkgpbggpKHrKiPRGIZdbysuR7RljZZOoJUZzKN6VMg4PqlFq/mhow8VIrG9MWdEz2XsWBT6FGqqcObi27Tm7BxTeVCpRrl8WRCsLfOQd2yRLQPjwPLhKTzLpGVTP4ufRP4xKV7fse2H7n3NnZmXOu73vg1bHNJwS5SacUoygzBx9UFLQShL1E1c+Hl8scPoffS+bLFTK+kLnED7bswYaoOMLqI1XtI08miO/FHmxeqLJ48HMo6bBUoI550vVwvh4nI06ugB+OiZbuJSRw3LMQRQi5+SQa8UO+NPx0ygUkf/jkj3/4wx9EpO/77XabJKWUHHN5PAzAg1gqoX8BZvXNQK0/NZ3MnLUpM08phbZp57PnL16sztZt00U5QqsnstFIIqj+PmRWmqZ59uzZ8+fPb25uoNvevHkTYxyHWEqPju5rwHIuoGuPUjOVp0+frpYLyqBBdc6BXd1WA5dYinMOj2/BJaHSNhH2vgvON843PrQZyFDR4kD+GhpwGIZhGqMc2LKQ4EE0KalS6drrfe6Php1pfpsqKzl0o7SdKYVD38zSaZqSSEpq6EFzSbMCSxQnQTcVrrpQyAFNk/B40HC+kHjAD87OYtOIqpBC3eLWsDPm8/nFxUUWxt6zc/PFwofQdh3+zReL/HVsJOd8CE3bIpKYioeqVe3EOE1EhOsDFwo3GkLKgsMhhJu727fXV77JoKrtdpstht0OkgU2EzNHlSFO/YTIsMMFaykADxKNvVOpzPaFT9t7tvKtkPs0CExGqjKaPnd9VufCYrEahmxmSUnwW7jeZBxek4im0lsphLBer+FLuYL8cqUCGwh/nB/ahsgBrFfc+pZcduU3m80fP/0kRe33o/f+4tHlarVCpwERud1u+mnsp3GS1E/jOI53d3fX19fyUG7PRKEcY3pNmt1XkydHHfL1xE4PTrMklXQU5KyPWiXXHk/t9Z7I2yy5Koe4PqcW5fbJfZdXDgmI/AC+4o7mwrIOZlYYMaUe7ygDmoFpVPBoaPmQDlH0lNJ+v8e2YmaQmlkNvRWAYMehQMAdsNkHuX1f9RQw2unUmF600avjw7UStQVpt7PPv0772jVPZi04j0AdicLVcMSshLia/dMk+ZPjK9evKQ9VsgWnpIe88cGIO1mgZCFsly0WOyeTGrOGphFJMaUQmjFF8m4cx9///reXl+fn5+cxZj48OmyMYg6Q8jepzH8td8e/QvsWS4SJvHOBNHnXfPTRt7fb7Waz++qrl/1u75QcH00hHQwlBRVLcUP9j370o3/+539GkcZXX311cXF+cXEBEkEiIrX8hzI74MNVkW1hU1Tz+fzFixdffP5l27VRcq8bqGEqqRd7BhGxOm1TBt57KR0gTO5AIAJjgpihCXEp1IbkfOMDeSo2bKZdhJOENZ1K/kMLNZUZxXyw79wUo6j6nKoprZmILfymRCG02P+Zj8l79i5OkmLOQ8dIMSmzI/aiTOyRshVlZZpSbFRYBQSo+6GPMQdd4fJ2XRfTiL5sSYW9Q7ecNCZV3Ww2s3kb0zgMw2KxGMZ9TNElUtVulsfkKKpJxv4jqioG5mdWotVqtdvtFovFZrN5/vy5AdRhaU0pojJn32/Pz8+LOlQ0n7+7u1PVdtbCDLIAsqXnu9A0jUeVRSyNdFQV8rNpDl14MZj+mNaYSrcoVYUKh+lTNKWqctOE7XZ7dnZGJYg9DAN713Vts2vevXu3XC5Rv4S7IHnRNg3OBN6NCllH3/eqlBQF1n6xmG02GxV2jdvv98M0rs/PyPlh2HbdnDmvjcVidbvdvXr5ehpT0zTPnr7wgZUp+MDMrgkhtN43u12PspOmaVpRDo6ZNSllQgkAZLKpdlBIjpyrGKPwAmxzeJpDrV0cO8cTkeTwLB27NQ9ehI61u1YZx+Pz889S1ZvKMVdUfbXjOOfhXqbdLUKLM1NpyMOFMLWA6g92CVcGLg5EzlB3p6qTJkeHFClqynFBtIwUEdi1QHo751LK9nERszlo5CsQuI0YEdExNXIlonNEVlVh6+PZsLkszGbfqqO+Nhf3b6cVsMsVrlkqwWciQqWWFtClzYKdcHJZd0BjMRQrnkKPebIO80jgg9YjY5AeUuD1kqrGq2SYC7rSOScS8QF5ent99cknn0BAT9MwTcNmu3HsWDGT95XlgynhPx0g/a+DUpd94bEmoshssXj+3gc//OGPnz57FprONpite64yLrZD8O4ppbOzs5/97Gdv3lwBYnN1dQ1q3/urgZnBMFyvD0RXSNLZ2dk4DSKCFS8SQ8V0SsdM6DY7WNymC7lqg4PD9uFUegMjJ2r5UZIDCSJyxrbVy2LiYZrGNJrTLAXdIwUS1TQNWlBM46iFcwNSYIhTImrnM/yqRGPMxWzkeIyZcMosaBjacCJ3ux1cXnBOIXUKfQyvzns/SUqUaTowp/CxsJNDCHjgKGmYxotHlyicyPZNhZIw+msEb9Bbl4iEkrKwp9B6HwI7JyVUMAzD48ePnXNtG7bbu9VqsVjMJpmICOUKU4r7oR/H8dWrV9vtFrpht9tFSaFtFqsFNC7aMKuqtUlYdDMiMYIqrfjCMqh7mjBKmCOzshEwTGlSTdM0hODQkvkY4p49IVdSa5hEU/bOORBQw0dPKfXjmPKtxbnQtjMiN46xaXJuHv7QYrFgn8nxnQtTkmEaU9TFfNWELk4SJwEwJxayNiyVcRz/8Ic/INYdfOucU2VwRKCr8d12s9ltxzhBCmNO4RBLwa4DGVAXcULeHP7d8zG+Rjgctptz6IPrWBA5899wkVp0mMQwELIlC+0w+5UqxfzgU9Xat9Y0VBGPpML2bBQ6Ji5iKcFHGMPS8LE0SDZljOFtMhTDWcQ0FSSmJXqpFALM53OTeFpMdqq8eTv/XzzULN2CnYMANK7Wk0CCVOW5J47myWVx1JLzxEjC1w3CUo+wzbhdvP6TVOlns4SKhHTMrrbHnKt6JXpiX+gJUyHfsb/WN6gvnY5q7DJVoZBapa9z7je/++1nX3weJbmcmRCkgkQE7D2qIlqnNvVr1HB93I9OPxCvZjqF1R39Mf/LT4pAsfd+jNNyff6XP/vr1flFKn2fpALRmblkmowKjY5I/OC990FAwszv3r27u7tj78k7osyTDIkPAqAEzWMdreMInbGcL87Pz8dp8KV3EFEmF5Qq/ELFWcHX8bSYHXg/mZgwJk3S+AAGHFca7lIhzqUClXIlYkwk6JAISl4qmKApDo1np3k/zOdzsELGcqSUNMk47Lu2RXbTOWcEI1CcIYT9OEQV4CeZGQ0tiCiWbsrb7fZut0XtkAlBqLrZbMZOU0qIr0IH403xJNt+75pA3t1ub6eUFosF7AzYMah6Qj8iM0Hs8cx4hz5D9whfSi9sv1UDRVTISQAdguTabrequlwuzx9duiZzRvb9br5YPHr0SApIjT2KoMQ5t16vNab6H0DIWqDgIQRweC2XS8xa4/y87Vg0sEvjJFOkJPjnlPDrcjYHhjkOoxZ4M0IaIgL1bEJNVff7PV4Bvuzd3R2MntlstlyfS+705Zh9jHGz2ex2O/ZuSrEfB1XyPjRNG0IDUdt08/X5JWwLIqfsbjdb4PgMNz6OY+vD+dnZNE1Q9t57kF5Bi+z6fYwiIkx+HKIkGofoXeN8I4k2d7t+P45D3PXjdrtHB4Htdt/3Yz+NiOFv+/22H/opjkn6KSbiqDJJAs9BVIkqwqdRXxNrQIlnQUcaD4KQ8KeUlMhNUzr5OpAEphW4xPzdcTy2socOQEszo+vDXLFaMZgioVIKAeYy3HosjYchse12uMVsNuu6A7WOlGgZBEipGsrRFCxvVP3e3d0h43B5edl13X6/X6/X0PemkywA7qoaOdNb9cua8jtRhzhMvhmqtOigTJCABVmmyeaL8Sf7h/MLp4Kv6rM5JQWfFb6FL0pVgW36zoaRi5MgVfiaHtLZU+mrZgcueChDUgUYltXIH6plVGstOlZpmH8YBvXJaJU3ycTMf/jDP3/44fvksuoax5GJunaOp29CQ0RJonen8ZbqeDAQLdX//+Yjp/6VqGk6rAFm9+K9D/7z3/yXP/7216iPhHahwht8Egc+DGOS2az9m7/56e9+97vlchVCuLm5efbsmST1pZHImNvFtzFGuoeQ9MxE1HXNsydPP/nsU9GkBWvqfV6IZudCkSfNhUZExFq0iEYRYVKzeX1VeRILUahtRUfkmb3n7NRCx8SoKTUhTMMQQtA0pdJsJwEctO8lJlby7NCaUNKUCgraLDPEk5um2e52SdM05U4+iNJA9B92F/nsv5KHnnakKWfBE6tw4GGaYoyzxSJ4H4eBvQ9tq6WVUMjtGYbFYtGGoJmsTZxr8TCWeyZNq9UKIWiMBgJucNmJCLFxV/rI7vseaCAprCPz+Rw1WlRaX9zc3Dx//hyFNJMk56j1zs9BeMnB+xDc5eXjvHunmFRWqyW8ZCQsoPsNz9L3u+KFEFUmMkQhEUHOAo+KWcYEWQ6eigvShkZVp2lMzrehCc6nlBazOYlev33Ttm0b/Hq5iOMAgNs4jo6DKjsOcTqY/LD5gJQGdADvjjIwX6giuy6nitFZHovfOTebLbz3wxR32zsf2ta57W6vfFjebdtut9tXr15hVEkTSsViUrP2siRhnqYJo+e9d0QgJMlemgDrnoiobTMLW3HLmMjFOKkqLLkYo0aB8cHMUPmqTJTLL6XQczKh+JiIs/0KteecQzCACvRMSjMuqFs9LkAynar3POlagvOxQ6z3Qqn3/3rwsY75lqkC98YYgaAPIcC49AXkbOG9tm1T0hAOnZWhYtHxkwtJOKSBVDx6uAiw0/YWrmSsnHP3n/nkXewTe9lU8Py2qr9uHB684P2Bqu/18If/yiKdUvB96Id48grlFvmyR0zlOQcsJFVCvj6YWbOqOn2Nr3tVIfLOvXz91T/96pf/+8//Nx9y0eo4TU3TEBMlLX0aSFQySeTDxzfo4P/Jo1TTes9E7JvAfHn56C9++tOf/vSnv//97+tK2domwpc9MTGD5zxRYub333/xySefuNJd7u7u7uLiTCh5crY0cQWm+6tEWHkYhvPzdftVC+UEJy+RejoU+TAziUbNLATTRGmKJJqJ0VmYmelANJOfXHWE9sUOV4VsHitaY2w2iHUc49gzdzHmQdDS8G6ceqDw2DGxsFOR3JIssEtTlCkS0STqOkpKpCIxjaUm0pHEmOBJqyogVICvYojiMGpMiSaNyBtJiimydk3DqvgyiDgaz85RYOc8zbuGJJJIcC5lc54RNYD27bpOJDJR23WQ45vNBpYKgrSLxcKiglxYTVS1bcPm7saxSpriNM1mM+9cYibV7WZDqpeXl2frZRPcNPZt41tuh90+C68QgmeROBSeMrihr19/tVosZm139ebt27fvXrx4sVrOiWi/3wfPd7udpIRiz3EYEHZMMcIaYKLN3R26MJEqMrJxmlQEZlrbNPvdbhxHSakJYbPZIDzLTrUUW7tSPj6O42Kx2O/3Z2dnXdcQIcjRpZSa1o9TH9g1zivntBkzhxDOzs4+/fRTick1bSZUKeXLgZ1Mse/78/Nz7z1YrJnZs5uGEep/uWzYhZvbjWtyQ1+0q/W+6fv+448/FpHlfDGbtzFG5xubCyLCBcdxhD4Aq6UvfklKKVFaLtYpTsMwMGWzQBJJ6mezmWgOoppnI1M0BYxbCGAqTN45UjcMQ5/Ab8MGfXDOqWjf98ZwwoyxaYlIJFGVZcwS5/gH1L6f6Ax3j1CCisVsGRNmrhkMbcOa/nZVGYL9VQpFvHNwu3N5Xizskr7wB8xmi1RY5X1zwPFZ9vfs7AzWs4HssiNRcZ7XD1//zJX7UVsVJmDrUZJCuYxrIhxygh07GSi650A+qHTvf/dfPMx7sesf60GnCmTOw5rUjlDr53og1H1dVoNNt9NxfvvhO0DTOPrFL35xcXHx4x/8UCShvAeADucpxtH7xjsvmmFkXx83rtXtn4rMQruP08/q9yJy7JNGIieanG+YOSVdrFb/+T//l08++ez167cIGxp7sEWgnJJ5sarKzpHT8/8/c3/eZMty3ImBvkRk5llqufe++1a+B3DpjU2im1S3KKplJjOZTBrNZ50vMDYSZ8ZMUrNNYBMk0ADZALE8ENtb7lJ1lsyMcPf5wyMi81Td9wiA7O5Ju1ZWdW6eXGLx9ec/v7n5yle+8p3vfGe73YnIZ5999vTprYiJFmoFsxIMaI+1XkZmGYD6vr/a7T978XnoYoCgCITEtesLASItfLYAwFgoJAtzBQMidsy+ZSVnWZnVLY0EAGYiqRCOZ0/jqamJYEY0UJnHBGbOFGFmfewAMTCaZgBmQBA1ADEoKSk1yzJDaZDSvLe1dYxqoWNV0CxANo+TquY5xRiJSUH8QQkRzCRnFem7QBhM1L9CgOPpmKYxhACq03kMIRCaiUnKAqBZpvM4z2PrWIAGjWoQzJBck86MgKaRCY0kzfM8S5qxdl1kZkcYI+Lr89GzXK9fvyZARpCU7+/vb29vA7FJns6n6TzmYQMAm80mqxBGETocDsc5eVcJH+FAPM8zEsQQ5mkSke1mc4inu1evY4y73U6zzOMUiOeamPcefKboeuWMU+EfFfULBmLxLAOgiWZLQ9cTYBeiieY5BeIkcyDuQsycmAmBc5oIu6v99tWrV+P5OE/TNI7DMKR5nsfJSTo3/YAGMZCBTPME83kYhtPhLue8Hd7vIwdGBlTVjnk6nebzmYhOiKYqIsfDnWtKrLzEXqx4fP1qnmcVePHyReyG8+kwnaec1USnlM3MJOec0zRv5y0HPI9HrVADEZvnLCLn48lN0mlKMZ6ZOWCp8OaOJZun8B0ZkJKEIF545Snt6+vrtrWdpCyooKmaImLr/DONpVv2NCWzgjhDVH8vv4WI7PfQDwWnJisSRBEJgVqkCi51w9qlazIZa9lMkwztZ0s0IuIDJ6TJ5PW3uJZr2ypMbWY56zQVDpli5Yv4n56noMCS1JPmIOIBNg/SIOJ+v3dT1bd5Q4T4aDt3W9PBD8In8AUqc5HRl3VBUsFQAOAVlYilqyA80qy/jEL9onMuFPMXOIQGttRI4Uq76AONS1/uIl5QUV68sC4e9FrbP07O6gq60uR7e0NES56ZS/KNb3zj9vb23edvOyOdqFKFw6lmZCbEv3vYVnf+5U9tGv2RJi5/i2mLAYdAZpxV2LoPPvjgt37rt77xjW+IiKutGGMpbzNDvZhDRPSI8XY3fPDBe3/xzW9dhaBmd3d3p9M4DJ0SNkHgKxUWpBxatZrcyjOz29vrz1++sFXVOaiheXMw1JrKKIwWgaiyKkLhikoYa5w5LyUE/gzMaLIkkv0gA4rBGRtTKnkXL/UpiXCylAv9YUopdARVs7acltsHCqKSiQjBTCXNU84ZiTypiQCEJYSOSHlOAOBvx0hl3Zs6T5ZqFpHsTSJFAREAYyjV2ItwUfXB8Tz3dth0XSDsd7vd8XgMSDpPoEpiJBaBGJnUcpo6YksZmDvinHNEIjUAy3nSuUC+Y4zb3W46n+5fvuy6LiKdTic0G4ahCzSdj5oTBMqFQxtE5P71q367ub+72/bDs5ubw+Fwf/eakZ49e3Z8/fp8f39zc5NF0jjtnr99d3d3d3jVcZim6f7V6+l03m6358MxqYQQTocjIprANCbVyd/3fD5PR1RVoQK3yVPp/utBxZyzzAUtrKrnY8nmikiex/FcuGMbxUeaprs57Yb+8PoV6lUkPB7uTLPjnNPMACDzBCp937vhcvv0GahpFgXEThHgar83sxDCbjM0petc1mrZzPb7/X6/924T7z3/rRDCmPL17c3pPH3+6tXru8M0zshsFUMjIuM45nlWVak9wVTV0wSOU3WTxUOjxSs16LrOEr548QoArq+vc9a7u4MHObqu++yzF+fzebfbVHUuInI8nJ8+3YBRStl5ctzTEjUzOxwOd3cHfwWn2SLC4/H84sWL0+ngcAFEjKFXVdECY2zKJoQhV+KXR3KyCJALqXV5Gq4M/dUXEWCpgn2D6FvFQh+rHF9LLSHVUF2+nYftzlr+i2LO2ds1uvZ1wKDnIPyXBlJZgny6vuliWDwUwpfu74M/YYU6ZvL+YFpNmTeE7h/88qsej+/++Pg7TzBrGvrLdPAbqCiLiVSh9hfaF8BpQh/HRtZntqshMJApgqmEPrx8/epb3/rW03/z3202G8mZQ7Hjuq4T1ZRSF0u31BaU/s9ykIMPClCtkLVqDD0ivv/++3/0R3/04sWLTz75xLWLiMxzDqEQMiy6s+i2DIbTNF3dXH/44QevX931fT/N46effvrBB+/FLsqcRORLnqYwS6TJrJDLZJWcs9cBayoQqlIKohI5xC6mlIgcWV22ESKoapqKJxcqpaXvQ6Ka/S0qHIjIEImB0czMU9GSMxNxjF6RDM1UN2BH0mYBszwnRDRRJWFmNQk9ARakYsOmSu1aaGaMkJgdFMhY8tCgJik1MQMARFp6V6iqB6AK+BMIkJDIYDyeiAjRck6BOmYmQFOxZHNOInIWOx5eexLXLRv3OBGRHP203bVM1VnUwGKIIqLehRzRAFBN5uQ++nvvvEtEh37wqw1dH2O8v78/H0+scLvfRyTT3HPI5ykCoYGkHDk8ubqe5/lwd9f3/ZTz8f7e18/Pfvy3r+/vd7vd4XD0iN/5dHD00zAMr44vFEs80EuBDeDzzz83s03gvu9zSi7ZUwWET9UhM80pJXSBJUsRsEeVY8eqStyllJBsf7U1RffRmfn99993cBwihsH7+8LVdjMMgwuHlAqk452333KP8PbqFlbMySEs+jLn7IXaXRc6ppv9jpnVs3qAt9c3XTc9f/rs9av7EGbHfAHAOI5o4sFMEckVKOo/TRQRGyGRR1YLYk5EEXKtMlfVly9fnk7nrus2m+3Lly9fvXq13W43m507r1w7nVfXFlVxmuYQAoeSufzsxcv7+/ubmxsDPJ5O8zynefKbdt2w212FUGjAC+YomF0ykUklglh7O02RrJVlkQZvcuZW0sZV7/Ld5uPimxoXtk+a0d+KOJoR1p6qG3pPCbfUtV/BJ5GZ3ZBypet1/FpKjxZ2xnZ9rJgyrQiGL3qp9Z9NDrhSbxL4jSPzwFhZ/74ez/UtHj8G/HKaGx+1/n3gytplEOKLrrmwlovIeiLfYCyViy1ucQlyP7JBwKgofzStWPYkOgzdxx9//B/f/d7v/bPfbef7yJbfTQG+BIf1n+4o5hoAAKKBgbrdFxHx6urqvffe++lPf+oRFQDwpuKIi+faFj2UZgA2DP1v/uZv/umf/qlDM168ePHkyc0VX1mjD8Wgaspmq6az6+lUNTHr+z6djqKJQoTWZ75VI9jC70oEVD9HRGY0M4Il2uPX70OEuBBG6gOeHVGpxScroxhEUlsYUvlozAyp1BhwIPV+36AAMM9zIHYHpWwhESbSLGIZ0TgEA8hmjJBhYe/K85Q50mVRUMHdzMnM+r5XrBninM1sms9tTHRWh4q4vHZffzwd7u/vveedZ6pO8+RFWQTIzPNYOvW6jGBir7tHJNXSLOx8PH02frq/2f/Lr33t5z//+cd/+7dd181TKfCQlL2vsEzT1dXVSY4uL8QyqKZJZjPNgohMgQOnaW6idxzHw909Ef385SsOcbvdHk8nKTyUvc/4NE1m5OyYPjtSqTQR0aHm7s66g6KFzhqHYfDiKBfN1/2Vj1WLE/oo3d/f86qNgXvMMcbAV45UBwAnWACAGOM4pXmeI+Ew9PvNMFWD5nD3uiF7U5VEagUe77M2jflwf1/sjPMRgFLOigTI7hYj2na7ncYkIpuuFw6iyW/NsfOQSYhct0lZYI6/k1aLoepKxSttxnE6n8++SFJKXnjtI/bq1V1K036/Z+abm5sYo+PIfIH52kuSP/308/v7++12O/Sb169fH4/HELrA0LKSKSWAuN1GL1ggovE8z/Oc8sTM7qATLTuxSZ+1DCFayJHW0mClqpejnmZtv6zPeXCFJjTW51At0M+P+nw7LKN8XTGr8/JC4yvd7XauaK+vr7Fysze9zkv5xmJYrB9s7bDBm9TeWkcUnW2FZ9dWbRXWsveL7BV40/FGZfzLaN/H17HquzZaEACw5b8I3pADNQBY+D/9Tdfj9fj1zAyc/A+XwDcCgtnir1rJ+T8w8RDQmaS++c1vvvv87WdPn4pIIAbAnDPXgoSu2zy2Fx45xBdx3191vN54EJKBiEjwtCgxgaFpCOGdd975wz/8w+985ztWi8+cushhTO0d/TohhJTmwDGl9OzZs3feeed4OAGAl5P2fR+dkUPNyS6MlrHyTHWzbZmjitze3mYVr38VEbZSsedOLYK1BIwHxtvXYyy1CmRAjj90/kvLzYq6NNO8PoGCmyJqSEiArnCpchshFv7eUNoAo6O4YJWjcr6YJhGoQnNDCJZFLYcQu65D89ImBdGApcNuzpmQAodsmYgwFBu8Ge99H/0TV7GA6j3AHQMMVX9sNhvnomJmlbQdNiGEeZonYg/ZhRBCZKeiHCufrSt7j8qEVfu/EAICmOr5ePjFz37+4vPPx9OZACVlrjUhDnb1PuYm2nXd8Xg/zpMrgDbFiAhAMcbj8eh+AyICguN3xmm+v78fx3G73f7is09PpxMAcQjPnj3re3j98t71opl13TBN09XQMbNl6ULcDRt/fQ9Hm1kIFJDIQGYvsA6Oo3Gj8HQqBVdebnQ8Hq+urohoGIa7uzvPjHYcuEPnwkQtdtL5fPaCqxjj6/GkqlOac86n0wmEu64DVF2xT5NBswBUc5O/8zzHoe+6DimM47jZ7omw6wNMcJ5mrKBZEUFCN+YQ0Kr8ldrDys2FZjW2hIinIQFgmmafRHfp7u7uAMANppcvX97f3zur3X6/v7q68aFwQy3nHLqY5uRAaxf9TrxFFOZ5xo5zzk4slWtTJpHsc/Tq1au7uzu1fHV15dFpoqV0uMpq37C2VktvVCR1ny76tV5qaSSwzjG3JQ2rRCGsRDrXXpx1akotjcuWzWaTUoFfmaGDLu/v77zx+TAMV1dXHtVwPzjVMMwKXGKq1uDfD4yM9Ut9idpbdBstCFartRvr6zx+wcfXWUcF3vjdX/L4wvPNi30fts95dC9XohBEEsBDCwUuJwzAauYF3dpcrx7Xrh6a9g/KpbBoajBz8ypSHHW8O9//6Z/9u//lf/6/cQwp5U2/ned5CDHNmTGslGtBTr2plvdXyHI/HDgIq78UqiFhoAjg2tetFzNgDDlcb5/t3jf63d//2l/+xZ/FEOQ8dmSsAoro5Qo+WAwAQGoBGEQBbLuJv//7//xP/uT/41WwP/35L66vb5ljH+M0nRHRTEEW9GOSjGjIaKLMmPMcQ9j03fMnT37wgx9RYAJKBB5bzmlmiyGQSU7zGREddYXG4KGHWbuuMxFDQVAECrxOEHiSv026geNioRQIVd3pFPfEzAYaA7P3ObCslojJFFyZNUcE1IBVxOUvEIEH4QAgpYnRIcMoaYYqRNRDYWYixohIIipIYCCoYllFsjdYXXvGeZo1z2ZGyKZzMGMAjKUzgWnuuxB4A2AYXGPFGL0eMQIAB8o5D0NsJguA2wrEyCIZlVQVRJzwwlQ75pzzN/79n3kb+dPhHgCg71JpzdYBIRq9ePkq50JfoGpyHLdbWofmAPRwPnchTnNaGyieywSAm5sbM3v69K3tdjyeTqfT6ec//4SZQ9+dT5OLyCdPnnQxdj0TURc3zBwJAAAVttvNNBEiAhMipnH0unxmnl8fvXFhqCSRYDTlhIgDhfHuMKeUs2aV+1f3m81mPJc6H69qExAASCnFGEKISTIRgREzj+dZMnpwyHeWqNxe32y323Ec53nuiUQTVeKIopBMN5vNq/vD7c3NsNn1TH3g1/eHly9fTnNK3ge2fiVDRhNCBARDZKx0wZojcxc6rtwUZqYCquqGAq6alLjdFkI4Hg9m6qQuwzCcTuPV1Q1gTmnGAAr26tUrojBn+eyzlwAQu36a0/3h7Ljx8/m83+/3cdhut9M0ff7ipZm99dbb0ywpiVq4v3/16tWrzXbQjNvNnjAwRVSBqgnQ29EAEaEVyjBrm8KXunmtAC6BZQBXk2tMFqkCuqDEhepLa2itaeK1JvD/YkAPxXggkgKHGIhos9t7TbNfRDRxoCzz4XQEwhjD7dMbBRFJw9A5SapqZvZmxqvGaIiqbhxQlTxLUvwN8hnRkwhuiyMwU0wpmSIRcXC9jmaAuLBrQfEDG0z4QVWulXVS3Qw/fOj8OS/iyev2BHihXNY6vpn77buICCBiuSgxK8qECL2k2C7j6uQUsg8uDav4e5vLZbwuVaE9wnBdvt6F3eFqGAB+/vOf/+W3vvn7//xrHQcvih3Hc99t1kEMA4U3qN7/rAcippyJ4ebm5g//9b/63ne/Q6izr13XEi3bAQaqYorWWNDIzLbb7QcffPDJJ584Nf/d3d17771zOp2GoWMsuEcPJNZVW0qMtBL6OJq/7/spzYFjMGDGwI1RhwKBWVCA1lupFCkW9COqLi0lHk/W+kOA0jMclu2xbNry0zePgoICgOaLokMAAFD3mB+EvgEAzYzddzEjXUfb2i0YqiVRA+BZMhhhxlzbP/hpIRbmprNoCIXwq+uGFntsstiqw+TjSUQGCmIhBCdPtwrOXINFRZOZ5VR7TgAgYuBwe3vryrIVpzFz3/fO1hQCevGuiHmfA4/m+ZyGSsi8HTZl8KmMsru2CCQihoUw6Pp6L2LTNOVSJ2Ov7NU4jgRKoJELfWmLT4iI12tRKbgsLaScfzTnjFaQjg7Pzaqq4NhXVUs5E1HhFEtpmqa+2/gsxNAzs4DzMzjLKYcuuuqNMd7e3qa0QHPNjAD3+31rtmOwtLTC2jSMwUTEOb+6fvDWFEMX/bQIaqFLlQ6QV8yaD8R3qq0yVgp4LRaXpewhd3fftRbpAsDNzY17cuM4dnFoAZXPPvvs1avXTg8yTZNIoZJIKe33ewD4+c9/Po7TZrt1hlGnNnv9+jWAulvstBjMzAE1KQAAk1c3FRLaVTB2vReaEqkU1G+gPAQAs+VbRAyXekJXdDEPthsiGjV51QAt2KAS/mfLWzszvFvbIQRGSmZOleMFhFA7Hvp9Qwh6WYfTXu2BslgfLdzlCtyr5taFYWa6UpwX2KP1XVa3+LKEehvP5eugHuuGqra/+Mrw+M/Hx+P3xerxBqlq+uIfIpTedghLOBsAHKbrPMbrpC/ACg7elHd7ZzcxMkAXIgCkcfrGN77x/Nnbv/3V30xJ+ti1FJFezJD9chHmX80J/mXGqP0+DJucE1L8yle+8o/+yT/+xp/92bbvz8cTxaBYeFFLDbN5N6PWWgQc/fHVr370ySefOJrm5z//+XY79DEi4vF82vRDLkQBrb6tMEy52WjOgBDCzc3NZy8+BwDmoicCEjuNPiMiJvXWvW4kKSNqMc2syZ32auuF2Kap/tlKm0oFhUu0lGeoYauqXNVo6YeBaGhoZs7CcUF22BYVIkBhkHCzuA1+OwgJYKHIKdYlGSFHQwEnsClMmf4iWTJRaFuomXGl8mgluQputnKKDcMA2rzSwtJX5EjVZ2Xc0DlMIIbQ971U9sSCr6lC30k8CEsvBBE7jUcf6kAsBrlC8K6u920KtBY4tr1jamkWMaUy26hqXR+HYfvs6W3bUzlnAuFCfex4JmW4QN8gIhB69YiCdTG0AfFJIQEFCxBVlVMwBGelKKdB3G63HlImAo8Yz7P3hCCn4M5J3UZEZHc3AUAL90UHbulHUr1ADxUQHCEiehJRK7FDjPFwOt/dH8ZxNERG0Lpi2+Z6IEl9vfmqKG/tfgwta6Au7MVlkUoKUVpIIY7neRrTZhdTSiL28uVnr17feXbMfUQz8xZewzCM4/jp61fMvNlu53l+/foeObjbbWaRKcaIBLvdhpmdu630DK2v0BKZLfy7qMbytGjmab92kJtPqz8Xnb1qC3Shg5uWWt+l7egKs2Ko3FgODmiTxZVF3L3/7XZwoR1C6PvekXruszqzx8OH+KUPrHa8VMauNYRt9daKeNHK71INE9JaU5ZeT+u71KV44ZAsgw+KRbK9QWGvtduDTx4sywefP9CJABfx2Dcc63laa1YzXevgdo/1L+2WRIv3nVUIsN9ujvenb37zm8+ePXt681SzhBBSypFDU7j2q8Ghfw0d/CXnF/vDJZCqEuJ2s/9v/ui//d5//O48jb7NENGbNHjRrNuSXu3TXFgyePLkydOnT+/u7kIIp+Pp1atX7zx/LlIae1kJjLRdsehLIhIo6+/29vZwOp5PY+BYEW4AoGRFX2rO7iAys/vjiJ7BXUg/pDYz4dpFuO2RqrlNxar0XiWTGFsEqfkZVZELFBWLTbVj5fxqsqbdCBEBtAC2Vx4qVQwaEcWqONfGe3kmD7Mhdl2XZTazLsTS6KI0j/K2P0ugqckXD4sBljjkgylvtyvuYy0gWZSiGQB40iuE0HVdzjnLjMCeIJznmTkiE6J3iQzOzAAACDYxt8ZTADDNY60IX14/5yyp+O4iklVcPBIRguV5OswTAsUY+9iFEJiIeBVjFBURrqlT7njwbBwVglhYCFMVdGEz7WXwgVKwnKrgc9Ej0A8REafpXDxsU0S8uroKIYiomAbuAGCeZ0R26EDzg/2Vs8yH47LwoCpgZk45AQDzjCH6xrnabU7jfHV1ZQ3+CsSsYutt8gZJ9+AT73XSFFs7p3lXa2JRM/OQ+OvXd33fp5Ry1tPp9PLFK7dIpmnKeVFjPuDH41EVVCUlOZ5P+z1ciRyPxxcvXtze3h7ujlfX+2fPnt1e33DAYfAapAtGqmYQrBVwexEFMwEiAvOI4HJUzd3M37LvZMWSu74RrPKG69GAWnpERIgEhK5TqaZpm4ngnKDeHejZk6dutVxfX4vIOI5cCZ9jLI28isqEN2imL9HLflrDcqtoiQy9AbH1sKEFPNJEl4cPyNKqdf1I60VVhFVpAP+Fz4kP3ZiHpNPrK6+fp51mZuucK5T7uqeL2OYIV9GJdovaRwEfXPqxMtZakwCeXzRAxO12+/0f/uDtb7/9X//X/437TSEEUWVaGMldIQHSL62D4ZdQw7+knl4MnNBFwkAMH3z0lX/5L//w3/4f/3sMXUopeN2yYanDA9Pa0oSIUC3GmGYhoo8++o3vfOevzazrwosXL95+6y0iYo5mYoZ6Qa5WynXAUzhMvr13u90wDMfDqWxaQjfrHMZUgC1qTET1yQMR1XwnrqjVfVK5srO2EGhRMLXfcJHptcTwcqmtVqotA4Urnltyap/VUePtFz53E4jtjlS7u7SgSL11gUG6+pzT6HeJMfZ9iSUGZisrtfQAWOtUJOj6+OCmVmOzUGNfZpZSovpIrrbNzFdOSjpNU9d1iJZzRgBmy6K20BlqDCHnPKZZREULYrmBuczMeRsCMVe6hhCCdxfW7Ao4oLezIHTuMGRylisCRMQYOASOMYqVxCEBWmPWBfCqemamwIgopu4F+BqQVRNTVTUszpmZcaAQAjL7SokU3cza7XY55ynPHux1trXT6ZwkSy6lokTB9XpZYwEBwOexGTFUGxKU9ROC62kwmKbpPM5Q6oty3/eANM/znDXnjKrgnrg3CYWSwGtLca3Pyk/F0HdNhZQZfxT7cXiRE0qcz9N2ux+n6Xg83r2+R8QQw+Fwwgq59xudTicz2+/3U5pTSiF0TgT94sWrcTwx8ziOn3/++f5q1/de/MZEvviXPVjkUdW+RKSwKOP6Up7vxKLJ0InyqXbiWtKCJWO0ymW+Uc/5dtZ6QAsyITrco5UsUkPAIbpb7/Gh/X7viG7PjjXmcP/p1O5Y+W4BL9ThpWx/w7E2QVQ15eT8Zf7h4/dqlv2j63yZnMcLT5IfPM9jlflFjwoALvyXb32BwgZzV2t1IwQzuyDigEerE1Zm4wOTAYqWreOrZnjhXPsvhEFEOGCzzohonnPXERH9h+98+/33f+O3v/qb8zyb6RrDvbbUAL+EG+vB8WuGo79ouHPOIfA8nUF0M+z++e997Sc/+cn3v/fdSAyIompWextIsixgZQTIhCioTZLk+fPnP/nJTz7/9LPNZnN3d3c8HjebTZNKWkqelniUiHjnhhoGAWmOrChERETiorEcYWCEAEZUGzNYWZpcyoUXDhrfVNM8MnOwErltM8tUhp0882BgqnYRnFkOgwJwaDNlDTdQxUz9XOp42hJeRg10Adp02S2AoMUEbnFmrEa9v3uHndeJam6hYiFEWnVtohVKAglUluaJrgBEJHJApIbaaMIx9j3VgD/UUDMiBrKh6wEAwRrTnmd/nU36cDjsr68dWbrZbJLk8/kcQuhCseJT6YVeIntY21B6mtAkNQRpUZAVKFfScsBW7RJGQw4pJWf+ai8utY6oxTytrm7HWykY1TXvA9L3G6ieR92AgIhe2OoPM2x6TAWA6hfvum6g4XyeqGSdDWEBb/rzt2yiR3G9YIlqT3WyQuwAHA6Hw939cRgGhQKJJ0yIGBkZec7ranvvvSoGBAB6WQlZBlARAPKqofoDbd3ElKput9uc8+eff06lznW+vzvc398TkRi0YqRlOVU1nMTEEFSJWQGmacqljeYcY3zy5MkwDMTQdUEqj3Td4NCuVuMTS5S4TYQTaPtyXkv39vCLkPQLropzHiin9Vds5XZXy5gM0QGM6+HS2pbDW/8S0e3trX9+dXXl67lxDLT2D+tHWh8P1M3jo0kqWAzo5TUfq9vHKrk8+aMr13MWU6DeSB3PtRZiX/KE7UnhclH9qoffKDz4qF1OL1DQS4Duwfub2fpl2xXWJo+HUFQAGtNKoJTSsN0cj8dvfvObb7311m63y9NMtZ3ZWgd/8YR9ERXlP0BKuB0hRAQgjkBAQE/fevt3//nvf/vb38aenda2pLIYQUBVA5fla62jCEsXh9/66m++/PyFiPR9/+Of/GQYBm9ow8wP4kiuxVTFd8kivMTcjQvEMcbgHcIBkYwoAFrKJW3v1FhNvNoq+IyXeL+mcpYxr/mCpor8/BazXVtIAOCp4vXz55xNBOqeJHLASLUkVt1Vm4I0W9wm/5aBEmIr7ViuD6pmqjXAVYYZiWgzDDHGqUJem+fqOkBN1u/VpKEDjgBANTdJ5BYVY6mgaEPEzB5ctpIjcB3DHIMH6Mxsmqa95kBAgGgSCE2ygmEMANa68Pb9pvn3WtFqqhqQHshorgVXiKgpI5l7O+7HIkSvWacV0KYBxJr2xTqA57k0mVi3uAYA73zVEhOqCtb8WjJQ0QxYRiPnvNlscs7unM1z9jWB5qX8ZSV4+H0ZT2bXtQ7YcXxyx+SkxKEfEHG76Yk9LB/S+ZxzBqucR5pNtRu2DS3c3pcMs0pbnD4CORUTYX3mA1nZArYhhM8//3yappub28PhmEXO57MZns8ThRxq57sWQ3ItdT6fQ7d14zCESATnaQJ1qKm9/8F719f7foie1c6SEFGlmJJlf6GbuugW83ozFtxZdUrMDL6A6uHyvcgMRUxX2eD2uk3CrHd3TcaTIYVVd3CtIGo3m8ZxZKT91VXf907n4nXhVpuC+0Jtir9okEfC+5dUV1K7ATKzWgYAp7VvV1iG5QuOx4oDEWtA98EY+qJdwKrlBAOgB7fQB9+1B4bOo+dZi8fHzxa+fDgeWBz6pgbRy11sgWK1e/hUlVXbORdr6rouYDifz7vN5m9+8L2PPvroX/6Lf0kxCAhfqlVEBDB8qGv/ThboX1kHv4kvGgBAVLDkPrvpdHz+/O3f+72vfesvv/k33/9uxCL11ATEnEZx3bUGJHdd7+Lj/fff/8lPfvLjH//Ye+mcTif3n7JqJDJb7D4v6nIxDUXBc9NbiMjMHQdmzzoQOY6RChLDkzleHWSqUt2sFglk5tiFZk6tzdXmYq6X+Prw12pmlpk1LtZFfLsFUR3QqoCLjFgnX0t2VkoMFqryaCaCe/j+/K5TwQgRY8ceBTUzj+TnnLUy7+AKKdZekythSDsHADabjXdwyjm7n79EZc0yqNd0uI9YHgnQcSgeS0gpEUmP2IVIRPv9vvFM+QmG6MSHfpfG6JRSCkTBO6WbBSJGlDxj39fn0RgjMIHbNJ41iCES5xVxYK7PRrWsyN89pUSBS95Hq/seQqQyMj6kaBCIgex6f+Wvad4QhcArpbWix0MIqTb8JqJxHJkLoYe7j81DgtXeNzOPSfozO5WNr20OoUO0NPd9v7u+aasuiwGAGA7DsNkVHtDz+Xw8nc3McLGr2tI1M6Koqv6m5RMGooiIwQPXK7DFep1jDUucz2fvqQcAajbP2RtL+GDCyl5kZq+c3m63p1lCDDlnGcdC9Qx6Op1ub69/4zd+g5kDsVeuU6FwqluJ0GzZd5d7amm7S7iA5tblMet3L7u15KSWWqa2Z1cyvJhubYMvA0KIsGycNrweSc45S8oYwvX1dc6ZGZ3DJNcGcU1ViyTPxUxpZmcXXkmYN2qjB4ef46aq04aX4D9pIX0qrGGERBdtbNuButb8b7zXY+Vdo3SGWOwhQFBQrCvnwdfXT7v8+Xc30r3Qqgt2dD00b7QdWoSwrdoHchlrAhnW0998xNo/1S9CaETkdYr//ht//vbbb7/33ntmpqIxRMdeGZiqepOGL+2S9J/w8LuDISBhiAD25Nlb/+P/9D+//n+8fPXqlUmOzrwYwpSyf6UJO99ELogR7YMPPvjZz37GzH3ff/r5y+3++mq70Txl1cjFd6RaTxJCyFbJF0PIKTvJbRteM2diJAATEarJeN+O6OlpAKvdQF2SlvFX9GUtlx3roArrokGh5rNX66E5hdUaW+ri2+u3n75YVC9aHzYYjsvxd99+x3uLNoOg3U5ECAp4xxTFjMgQcZ6yyGRu0RMzF24QUYWK/9IKNzOHrXaF6cXhnbkSSA1d72om59x1wRtN+rdi12nKrb7FZTFSeZFmnrtg9Qyiqg5dn1Ly5owxRozFiW9gzj5GF99uKYVAIQxE5HA5H0XmCGRJFNW57EFECU3VQs9ew5uzMkcF8VZIzW9uIUSFUksaKnFpm0oiAjWVMlzezkGz5DkZApfOr2cz87ygquZqfPvy6LouZ6l5wdLDrsUGmsZ1DshxHLuuG8ex7/vtbldkqxkzd4Rjym64nE6nLEYpHY/HKRVyxKwFDYBgm82gBgQaqBPrfY05CMjdF5/KMjUU1w6l+w1YWVPac7pCffXqFZXmhvTkyRPXvj6YYuqQfdXF46xDNJNXqa5yw5s+TtP8ztvPY4zX11cxkKoSF6iH5EaSXK7jYAm3ydqKtdY3V2eHiHspTnvslhfwETBFD9UWfvVqAVOF97eNvP5WEwhZJc+p64b9fu/r3P+3WWDH+wMROfa76zrV3Pe9QxlcQ1drmyhEMdWc6DK39WBrr/XWA93mjTo8g8OVHxRWSq6tYURE0qaCynTjYlis7/vAhXigyFrElxoNRDFryu10ZSqZmXmRG1SaCrW2MB5E0f1bvkOx8oW1kNXfgYJuL9xGCr2m6BHYqiUEDd/8XUCFVeu9pLmL0Snaz+fjn/351/+Xt//vABA5aC33LC0HTAnXbvcvqYl/HScYHpkw7U9VAyMI2PWb26dPP/zoq5999vW+j6qiSV1GxNi1iS+/eA8RAxF5+5233n333Z/85CchdC9fvnznnXe8VaqZZSs5OVtFh4hJcgmseYNYx0z55kQgTyu69+zIFCICMy1wAMw558qbShXi5OtAsppemFC2An9BBWUAlEJZXpV/rIPYzWXUFRVRoCWoDuDtr1voplQueRLUzM7TmOfUjID1ICAiEoiIZFOdFaHJFFRDMkRU1qBMJHC5rx6Emk8nC5GdIsoHyv3103gOIWhuIKnCiEtE42ix44DUmtWnlIAx1O4uWL1zEWkdghHRG8ECaM5zCNy2IpoRADNvNpuc5/Xgt7eWmmJs6WEvVDueT+M8oYFbKl497y5aMReiLwbzQiAHZFBF2rskRcQuRI8AFw9mTm4suucHQODbT8vc3Z9OTivmr68Vm2OVkS2lFCO5unLN19a/y27/0KkfRVWPRyLq+96jAknNGypP0xRjnNPskNpNKKHjyKGFHMwslla+viU1ibk1U8qfKsjLrDLNXEp8MnAW62WD1w3rCzjGbhzHEKNfk5lz3YN+ZsmQ1Au2UEeeJ2Puh2hqT5/ebDabrgshUmAKIXjBoq7KgQjQCqtX/eSSLqOoQA4V9IdlfxG2ZEoxc3WxbgEQgBB19cnymlgISbBeXtzG8gXvZc1QgRdcuSTH09mVsfeUVFXHK0jFZ7VUDgVe2+sPNOvjT954wpL/vtSaD77VYuyPL2WXXunlXLfo2pL/WjRxA0hdXrZdvI3nsm1B/ZpEREi1n99ynSYn20pbG0ZfqIDXT3+hgAHAldujCidHbcDl+i4P+qi5YQ0DKnAgwB/+8Iff+Ms//9rXviaCAOJ9NA2MkAzs1+3N8Oskgx/3TSIkq6BYU6AQr29v//E/+6ff/e5fz9M5pbQbNvf390M3MDfExMXTBjAjUtWPPvroRz/60WazM7Of/vSnV1fXPmimgLSQV7iMozZzCIfDwbUgMYYQmJhKsYcQQdd1nmFyIIeZgaF3RF0bd3CRZi5lcKYOtSpt11xSPD5ZKwMcAISwBE6av9u8DSJSAM3Z19lax9QoHABA28Dn8xkNvKqn+cfNwCeDUovllsUqaU1GgQwRBdEE61x7N+x1cJJc/XSxJ2RakVe4JvBgAJ1L94K2/4kIk2c/ikQLIWyGjYfdPJrqnYWY2RMrTklotvBROKswVUy4WwCIGAKtJUjbnFZVb4lPxFJX2nUdc8m6EZGXxKiCi+ZVvME8WBOrxmVmMW3+d7Pk8pzWVbNYCp2xiXUFawFYDxucTicirozQc4PLSm2p4jl7Bxh61w1V3Ww2/tPNQV1R6jNzrK3gp2nq+s2w2fkJChWOTihEre7L5ZunjUVEKlTe064istS9VI+krecHOng98m2Rm4Gq7q+u3CO3Wge/3kRmbbF5hGlUVSbigGRwOJ6ePX1vt9v1fR+Im1Rs56/3F3lvswciqK4KEQHL53MR2T4dMUaveijq0xbv0Jrx3UYACqtdEyxQS9F8qfsC8w/LRlgVILkmPh6PABBj9DCPT67ViJ1HlTx0hJfady0GH8zCWne0t15EH5Gvn7YmHzjB7Wh69LEyXo/z4xGGSy2LWLzYx+c0sXZxwaURwOI5MLOAYGHT8DMXPgMXXMTY4ClgFmh1v7XF0R7NqS2xPWgdCAanqsLLL5rb3u2aBFXQ48VAMLNKoWtwuM3Xv/71p0+f/vZv/CYimglhMDAPPv86ILNy/PqArLppSp1NeWZVVem7zVd/87f/q//6X/9v/6//5367TSp9N0ANZbinCQ/AAvM8me2vtk+fPhWxGOP5PH722WfPnt36ZAiUrkzgeRdbaM2nlI7HY6w+Qc45RALAnBRMuu6iIF0BTMyDKLD4oBdczeUFHxmPZubfQ0QEUikMAL4TvGKpiST/ihY/g70qtf0PhcBv6sfSbkoURCZXXa3MwGqErT2PAznWmWM/jevhRcNQY0dS6vfbQl5m3/UNG/uSq2dWFGhgFPTiLuZitbiTx3UiiGickpVE9UREREGzHu+Pt7e3xGAIWYVg4a1t7OxN61jlnmxSo9k3ROQlxFCFgrtiTtarWm7ti5IDcwgiydRUFSrIkS0Q0TRNLXjerJZAXDAnWYAshOCE+9OUCoWIo9BLH1wbxxEiT9PUdX2MMcauScmu61CNmWG7EykhhzHNmnMW9dVLtdE9EXkPiZQzM4fQufp0XS61q4RP6zzPnuhthVtee+3RSA6xsYnVERMXRDkrAKjkquSW5ddW17ILaqCyoSv8mjlLSskAnMH7/v6+3wxeO7cW8R7FMbMQmGgIxNN8TilNKcdI77//ftcFKl6viUjzrVp8qz1PW70E2NQnVepA4AAAKUkIS4rKc3Nl6V4Kx9pQpu0gAJSmg9dLrokm3zWNOWe9UN0wcsU/DEMIwWnbXTe3xEe7oxUoUPnkgQ6GS427/uSBnmty5MHT1k8uUFHrkay3M1vBUwCWW+AqlXnxLTVoWeEyiFUMrgxlddoDKiq2bQe4kGyLTdB+b/q7AF/Mn5AWaCu86Vi988NRgBofX6/LMg1grvkREWtu0v05jx8jotX0gxtiXur+53/+Z29dPX329JmoKGTCICpAsBq1XyMT/KA+uE3el7vUTcHMTKX8g4kQsxnGoX/+/Pkf/MEf/M13//rjH/7I84giogJqSkRQuFmLIHCy3M6Qmd97772//uvvurz77LPP9vuNl9Mse3WVS/AhOp/Pac59N6hTwKc5Eg999ECjm8bmBpUZIlm11hERiNfmNxAZmpk1vOta+6JRSgkJAVHB0MAKlzoBADLBKg1WRACYqnpPoTKz5s8f1HHgdRVqLXZcC6DmW5RXqCKAaymOLSIpIKJ5fGVlqyYxVLXKDdSqIDhwCEG10D537glR1QpgABhil3NqMrGBsxAxBPL6CjMZz6Vj+TzPpsULCSEcj8fD4RBCqKwRJWAgVrC1QNiwWsUfrcoYVpGotc/NHNKqubKpAcJm08cYS3Ew8zRN7lACgLfcaJrG3f0Yo7cwajqm+anQrDGxptgASvt6Mc2pEHYiEwDM4+i+LwB4xrfv+92wOR9K211EdFYZrPlFM9sNmzHNbWavrq62260BNd5HrlU97777rqre3d3FGPu+V1UFuru78wdzGyJ0vY9Y65DhHNp13LLjZgooXavFaQVO0RbShRlK5bR1wNNKZLaAG2NkdwoBgDmuT6tXo9jTdrPp+/5wMJBOTZ4/e+v6ek/Q7CoQEc8cwZs0kB+lkkfVdz0jgVrOmbD0SXQSrjWexsxM0eOfzXD3Jii2CmgjItBD4mJbFSX6OvR0mO+ONpUxxrtXr11ceJ7eZ9OyTGluU1mK8d4UpcTG51NTqlUWwVpvQjnFxJa90FSyLoQ8Cit6RLskyVkP71pzLQ/zBS4yNPVMi1Zun2PxdwEAfHVdvGC9xYOxffBUTVHSyjdbFDA80sHrP7/ordZO8PpzMgCsvApVd6L7x1UfO0Wcb40YgsNYPvnkk69//et/9Ed/dHtzKypWu87BP8Dx2BW2L9bBi5pnYnAn3tyhD0CC3Kvm3f76j/74j//mu9/rQkwqIXRrdiFbwQEAHV8zK+AHH3zws5/94nQ6A4B3lysFoCDWrJNKagNAqnA4HJqmDCHEELbb7Xa7BckqCaD0CnRa2TYR5AIpdrYGa9TEVQ1hFK+9hV7dciDCNd0dIkqt0mnWp8fBgNhDlq4adUUap1WaBKw1hZbd+XMLer1GG8CnqQoXR+1qhvkiyq0aiL1Ol4i4RoRcVWBt2YSlMgpSksYBUlCdHjiNYQHRkFFtN9vCRDmrGELSk4yqioE77kRkmiYAvLm5LdYJYheHrvf+xyNUfJZCibq75iCiLoQKLXlTZkilSBm3CpAJycyc3dB7PIzjuNlsXNe+fv2aaryuWjUjr3DjIgqAMfYevnGaiALnyQIAkkE1RdMQAiHHnmPf+Zv2fX8TgpnlnGPsobb8i8TTNPV97zHtaToTATMxd9OU3Kve0nCaxtbWt4uDd0zy+Z2myWfTex5vt9t5nj/99FPy3JPbMZW9BHkJCfjcTdN0Po8NsqBibTnV8L+qLO351odfiowUS3l32yBQMyMG6oP85MnN/fHU3MEHkhAATDWlaRi650+fbbaDpHx1tZOU+50/sxTCHC0prfWM14tc+DZtMTAzY+nP4duBKv4jhKACXqwIq7wpMxtgizpQhX3QygiuKYZFGHoGN8aYs7aKXhcL3sONiHa7nacSfLFN09lyqW90y4mIagnMG1xeuzQ71nb/+kwXG01LtTVcJarYJQ4cEVfpHVvd60IftdsVQfIo/uzuL1IJ/q0Vp4K8sZKqTVazyaACS3HlFrejjWqTjf5nWD/KA3uhfdLWx/qhv0gvls/dbri44MWIt4o6IgyRp3H2ufze97632+3+4A/+wLGdwffeG+8E8CtGmP9+4ehlYooO6rruo48++q/+9b/691//s6vdXkUDRcCFOIIrCT4ipjmHEE7H05MnT/7xP/7HX//6nw3DMM/zixcvvFVZ23iImE1B1RQBLKX5fD57Es53b9d1wI4kR7u068HnmBqgrHzSZqFFrlyg+3+pas6zW75dN9hS8w2NmctJ3q3mIPxgZkNq18kqWPEIbUuIiFfzmBmqQdAmC7Bmd0REoLTqaxYMM/k1PefWljiv6mKxUCN5/toAdbPZrDeAXRQ6MzMxs9c2TNMEq5BgE3lYE2YucQAgxmjZ2Sp6DGxm2QxDqMXaJZwwz/M0n0UEsaBkVTV0g+ODuFJRmhlUZDIRtUJqf+zopSyK4zypqs/1NCZVVbCGg3Vyj3mc+r5HRP8JdYc7IEsrJar/l+eeb25ums/nTs/5PN3f34sp59JrEgMfTmeHLquqtzNypPrxeNzv97th4+uQSlK/hOtFxMPLx+Mxxniaxvv7++vr63nKt7e3p/HsVP5uVHla4e7uzvFfx/M0jtNuFzzw4FmJ7XZrFd7cHrsIfZGUS3Q9pblC2Rd4oF6CdJbd0RwsWOz79WmuS7KknAvoV0R8EtvSwpqT4oDM3Pf9zfUVAAhSjHG/33d9aMuvisSSgbbKKrnWCs2FbcvVF+o4idRiZau1wo5r09olF6vvDgBq0BSwv2aMkbA0t7Ba0uZyHQCICQAcrN4ew58qpXR3dzfP883Nze3trQckrq6u/b/amVgBnlaG8Q1e5mPDZT0jTZmZmdrCTtNEIhTte9F3aG2vND210utLpNNWx/or62d4QHKytg/Wq2g9QevHfqwiH6jOte6HlXr+MhT0+g0fjNTyiG8MTTfdi+iQh3a0q3mYK6U0DMM0TV5F4JmGb33rW0+fPv3df/a7iGBgIuLsFv9Jj9b68Es/p0aA0veDd8T84z/+4//wzW+ZWRc7EcM6x+X11wB3oidPnqRZnj17ttlsxnHcbvaff/7pW2+9xcyl79VF+KKwGaSUutj7iHmv9WkapxD3m8FA5nFiRjOzChL2b5e9KotG9L3XftdaLgUARIE5Orqq7X8iB9sZGjiXRd0D5rQVRGR14qByPEmILZYLK3fcd6mkiVZYM6i2NjNrlSaV1KK08VlrZddkjT2DABHLsiYDAyEMHodrd2++bA0Fe+Q2Oh6q76I2sJJIoAW0bJVhIOfs1O7znGDFnuHSzVUv9l3PMcTi97SLpJw9w+IYopZgcz/YNynWoLSqQk5uXQ0YRQu7NQxgiq6QRGToehFx+6Aben+SvOp472PlOsM1mWvQyGGkPM+ZmYeudzpJANpd7Zl5PE/39/cUw253BYix63b7fRdj3/f39/ebzcabJF5fX2vKjhDWUrKcXevv9/uUxJsZA8A1wle+8hXJriNT7EsNdIxxt9t13XB3d8cxvv8b++PxeB37d955py3L0+kkXoyn5rUurrldXLj5SFzKx8/naUoppaQKIlL41VWaun0gfH15F4DBQ0kqIcQ5JwA4n8/jOMZ+aDuo7Q6sztl2PwzDcLXbxhin8+jJgq4PDlDPeW6ryzNSbkp6pxQEEM/8rDRNyxRACQjPPg7rDeULSSuHRsvXiAgaeSSrvbjDozyM1zZC2zUIhohd183z7L2KreYsfNbcWnW7NhLv9/vj8dhablDFczFzVVmLi/l42OGx5rtUeL6GW4TMHwMRfY21gaKK7izFgSsFvJ5Tfx7/nj5KgTXBQoie3H2gets5689phSTFVclGO81WbvF6tdgjK8TMSrcsM2NGAFQtVrZHl9vZRJ5iAQ9TNBnEWKPj9YpF9il4wi3pRcibiDxH6Dk2l9pIJKoiSkYIyh19/c/+dNh0X/3Kb4lITQCvPfpSCmAgBG8kBvmi4wtTyF8UjF5xgGjbAoAxq2z2z7NGDK/++L/97/6//+//tbveqY7IT0SS92Qmg3kuqSyKIavYLF03KHT/4g/+xf/2v/5J1/d9t//ZTz+7vn6CgGrZEKb5TBTASNW6fvOLT17OCbo+UmAF3HRMBn3XMVE2DcgcQx87EVEtBXNJhYg4sCiUeGlKMfbICEDjNCES8RA78jkLCEYllRu5qOc8Z1fPzIwMWdKyGRDNIIvKnGLomNkJOCMHDEsjowaWJlrYnZyZaKErUWeepFZI5zetVdSmurCIq6oR1jBmamnssrgdE0SYRUUSM6MYMfQhNo+hbcWUxhi7YRgCFq6Ymu0rzmvVjsjMWWVKiZBjjFDDfb5xzRQAQ+AQghE6dlfEiFBExnEkRSJCAZmFiWOIIjLlZCaY1WkxkNTLCwr+M3QioiabzSAiYjj0Wx/8eZ6975OrVQCgUNriAmFQB3aByBRj3Gwc2iaqGiP2/T6EkNK0HQp7xqtXd9fX1wo2DNtpmhDKNBGSATIximbM3kfEx3Cz2Xz++edeKeTSP3a1E8M4HcYpBgKgPI0etunicDqdnPauGRAiMs/jOJ4AoAt9ms4myVTPx8yMY8pXV1fb3aCWj8djDP3QxWlKSJTUEy7SDCbJScVUcyDKLviYLKspouPAV/JUsUCjAaCVjrT/rVKZkmQEcusfoFR8EVHfB0l5TjMR9MMwDJsYYwx8vb+62u+maepj2AzdZtPHyCLJUwxSy3b9Ikqg4NmpsmihyvSSBlJEYBFBYAT2Gq1xHIeh8+6xLWzuKz8lcb8/Z5WsybmPCGOMIRIiiGmIxYI9n7NqdgyRVrxV3/eq0HVDzhmQDSzl3MfueDjP89zH7upqN46ngPTk6a2U1gvRhSkzGzkIwLG36ySv+y4Os6+JyAJe8zOb54Zu06eU+n6jTuLNnWhqRiRjuaKrGC+2UTV2u7xdtB5rLQtmuuruUG/qD2GmhR5lWQNrTZnNVJEImwAhLGWx/vj+2iWLjaS5QEC9Vq2GYYiC15Ejotb2jioSqrhcinTXP5vmXwUoqK2AJjEBgJwHnx5SUYJnYnBJVCB6JsOaa2K1zFSds1cVEb/3ve/dXD959vR5Hd4v4aT8L3AgYs7iuavf/u1/9Lcf/+Cv/sO33n3n+TiX4I/mbEyBozfysyyhi2nO4zhutvsnT57c3NyklIj4fD7/4he/eO+9t72GrOs6EQPEEHgcx3EcvT0qFBAKbjbDth/meSa0GKKBS9iYUgVOV842VEu1BXohQeRIpd/ALEJlyRJCmTtqkqKNtoiYaCOZg7rKHYgEtmhcKCqzQDOabdg0lq97uERle+SZuQNIzfRug0zEawvXGxQAwDB068WDiAQYQnCYUfCOAmRuthORK2wAAKAlCSoiUrvXVXAjVhKVxjDVx66PnRY2t/6B7KZ65JxTmqDuGmbe7/cBipWtVqRw3/fdZnCWiRBCVwsRqlQtFB9YioAB0S2GxMwx9q6Gm1+V7hMzl3LECh/FGnqxav3UsMespesljuPoRSZZ5Xx+GWPsu6GWGJG3vzVaEoreLCjG6LwQTpVQao3MiMjpgudpUk03NzfevIEpOj0yGoTa4c79m+1269PqIegQwjzPnhTzWucQwtBva9xCPSXpcXgPgbhpm3P2wfTXjxyEvRODikhceR62cjTbmLffm6ADAKhU6gCopm3885xijM+ePfHx6ft+6DvVXMqyN53nxQHAk0rUAJL1UFGrbRkREYAcgOGzxszZsogAAjEYiItlqdQxzfNpvpcTADAzIU95PjtVmSEihshce66sd9aDQK5bwyX2UJ2ueZ7P57MpDsPg3QkZ0IGH7QF8FhTLZl/3NoYvyxuWkW/7N4TQImrUynNhAayo6nboacFylth72/vrKW4yqingutPFajxg7YMWXVg12jrCjIgMS85+vU6a0Gj3Iig4ofpfa2g02ioZsb5aaJ5uu3oTav5L29JWw25aicdWyrtwyZqVXKnnAwyxOs+opfN5YX9o0m1tqpjZlKbb29s8508//fTFy882m03XDWbORP8PBcj6+x9ECH03zPPpd37nd/72h9//2u//i29+489TSkSdGaBVunzmaVYiImTJ7kGypDwMw0df+fAv/uKbbz9728w+//zzt956Bmi+z1U1J+37jXPNMLtBHYhonscnT24dYCKSj9Pc9SGVms5iqmtNbuWcAUqpa4nOPVpM7rAhIpiilXlsm7PsWys5sBrDsLr51Xvf0qpmoC1BWtE66mIJLuKvzCYCICbJaNj8zmbjq1pD7pgZBXbHep5HXFU3tuqLtrtiV6yQSEXztdwSoouV7Xa7nU5HosKtIeJywULpox5DCMEluJELsnEc3YjhVQEGMyNZZBQpHj9Uzac5ISLHEBBTspzznLJWAgdEyzWEI5JSShgWfgOZK31Sxcd5S6g2QURETMwcOSCWbriI2FdMFtRZafPlYAJVNcMQAjL1sQuhxP0AwBtCJ5VZsqdjXQV6mtnr0X1NEtHr169deQAAEvmYnk4nDyBPY7Ek9vu9iDCSMzZ3XRfjbr/fmlnO6slOVb27u2PmXYxJi9dIVIzLrmNXQhxXbNUObaPAzOdpbGu7/vIQct9KhOFS8lwuXTQtZCQA7mUtopmZh6HzFeh2gOdotEITuq7r+9jUMFZZ5zbE+l4AYIY+LUtIExiglPeUHFCFf6VU/P4WnmVmf4+UkmT1mfLAgyLM8xy7wP3QJPY8p/YYWFMkXeh9SYDHUBx+aPDixQsH3O33+xh7AO02gyl4XbvVSBUGppYZXclHf0MrGu5xkHLBeTmjhCtIZhZNda5aafJcpr72NKuVoYiXEePV1EPTX2vd3GJ4jxXheuU0rYxYQlzazkQArITngEvKEr32Bx1Dtr5405vtFutRCqAFE+B5WzUDNXUkflXAHof0K+U8tdGHVUbEzAzV8MJAcBtiMWpWpVHMLClpteka2Q0Avvz8hVt2f/VXf/XB+x+W9AmAc2ktT/93GVn/QMdj6BYBgBogKHF8+tY75+NxGk+/93tf+/jjH+6udnlKSXWz2Yg4JV4wMyTUlAyw77t5Soj4G7/x/re//W3PssxpvLu7u729JhIz67rONOGKwNlNkHkeQdLhcHjy5ImNJwTGQOfTKcYIBg5sJgoNBkcE7qPPYxK4MNwKbMfMQ+UGgsgI2Pd9O6e8LZFq6TvbPmwn5FRoEBCRKvTdapEDrpIf62s+NqTMU74cQlilUsyhntqkfFPAIktcxBVws+7NnCTLA+Banb9C7uOu6uFwmOd5GAYGECmIhK4LkTmlNJ0dS4VpZX2WZ7alww8RFTpMkPk8e1hYCit94cUlwJTOc55qrp1cP1NN1ioAAThuCwCyQhcjVQpJqb00vAxmHXxy+UWASsIDuQ5zRrQWHwaAAnBfJd1zLkWcCga5BP1UrCp+OFf8FzM7bMov7v2LoKYbAaBlZ1U1huCwvRjj3d3ddrt98uSJU90Rkbu5Q9efp9Hv/vr1a6dX9KXYIqLTNM3znM2du8LtJWIhBHcrXWU1SaoX23PBEEEN9y1SGC/E4gPpXISyXkh2M3EPBRE1J68+KLCMPo7TmcD6vu/7ngN2XbfZbLz/YPHnGImREkJlxm5SWGRxywiDu8WODGsiu+u6lGd3oz0uEqOHQjVn11spxsgUcm1Q4SOZDQA0JwkhhEApKxNMU3Kd2gw4ABiGAZFzTi0G6fzkx+NRRPq+9xK7nNPt7W2a5qac3JXU6qohYmsAAxdqhpa9s46YEgCo72nXiz44OUtTfkRAxESlWBwR8bIg2y5Z+R7IE1vBWluMqn1l/ZCPP6kPQIUIsKV1yWXmAvHDSxhBe45F0xuaOkEKrkxiAgMEDHmafbc3X96NrPMK9YeXcPByg1Tgvt6Ivj1BWwdVcuX10De8gHeZqLcoSewYI2UdxzFwh0jf/va33n///T/8g3/lpBSG+IASi34deqxf43jEJuF5MlMiknl8//337158/tXf+u2f/exnp/s750p1JI4qeK2XqobQsRkAeLpx6Prf/s2v/uiHP1VVA/3FL37BjCESAJiiC9/D4QAAXdcpWN/3IdC2j8MwqObdbici4+no0TARYWFEBFrCKS4IAne8ibPknLOqeb4zpYQAxIjIBOrFElhhKbACZ1aL8sLzaDCrwDW/VAEdzQLFlX2K1ak1g6Z+2o1gFRZbKTyPKHa+2/3V2sYYhq5qFkUDqk2KqFrbXgULoB2XTqWwgm802dcIKIigK2AIjzeUMx1k0Bb2nJauO8RgVVy6uqrDpQRls3DAcRKRtJiqhO5PL4OzAm2JAtVw7mazce8BER3DDOTZHL87M7MbRmXEjIhIKkb6wvCtMuL+/uhxYwBgQqLQ9/00TeN58qgyAAEXUiQ3U5jZw85E5M3qPZGPiLe3tymlw+Gw3++9NLnBtYZhiKHnSqPtCYvNZuOgYgDwkfemOl7uYmbuOrv/Z2YcgkseT3MSkZkAlGbP4u0PVQOncZ6qBU8h5pxJUhIrOeAqx5YtvNa+fvgKlCyNa6iuUvGtdJbS+bKNOagjqAwANpvN1dXVdjtwxXj7aS3w6BRs8yQi2jK+kl1JXEB7oBa0dF03zcXf9WC7xwNC6FRnH/nNsHWEVFPwOeeApJpPp9N8tWfezPMMJtM0iZiDn10UhxBibUBnZqol9PL69WuPUF5fX7uscBaO0+nkVqYDIZ1QrL3jAxLiB8P7UITWHgGImGU2kBBdogLXNFObhRACmgurqhdrSdX6gg9+byc/0MFNLmF1D2yF1Gtyr1yq9szwZC87L6MBWOG7Kjtr8ZCpou0IDB40kVypvHKENM/YWEYRsVr38zSFyuGeayeTrguA3J7Yt0TAwIBeUwj1gQixVHRQmwwfAtfTpSyyjUWoFK8B8Pnz56o69Ntpmr/97W9/9BsfvvXW2+YNb///JQQNAEAYADQTI+Lt02eb3f7Dr3z1P/zlN50uYJ7nEDrf6S4XzKzvu5xnIADUOY0ffeU3Xr44HA6Hfohe5vHs2TPPbJmhk90TAaKhahf43fff22667bABgB//7Y/Oh+Pt9U3O+XQ4cKXcI1uyRERkirvdtuu614f74/GYczIzKaUIzgXf3AUDXaa1LZSiIKX0V2+ruSljWKT8YlHyZR/iNmhmoIhrlWAubgBgtWfq7kXPrhXdD8u28ccu58N6vy3OjZmZiVQwuW8/AHDOSK1N1uqjOVVy8Ztd8QRigMXpBABf/1AotxhqI+EGU3J5NGeJbIiYZyni2BE0NU++HmFRJ3tTESHuETnneZoScwwBkClwYR41MQD0ymPXQF3vSX3MOQfmfXetDqGb5yZ6eFVjfXV15bZdCEGhZIKYws3NsNlsfD79IQFgu9kPXbi7u3Pupc3V/vr6Wmpl6qeffmpm0zQR0dXVVc656zovGL2+vj4cDvf39+7d5pw9heyi3BVtF2IIYbPb3t/fn04nRPRco49tCOF8Pmf1ioN+t+vmeXakLhXIvc6zAAAydX0cur7yWGhbD2xFKxTJjRceQvu8LWCrxVGpkG+UcAvWRrlmRozOyMhIzukdIoVIm83Gi7N9fXJYAj9tCkSEuZShq4AYJCkhrtJCQ8vTNtiEAxLLoppnEdntdsXaE1MxJyFPqYQEVABUsGNEnqbpdDwTkTiCspjX2LKhrVcjEamAmfZ9L6UVoyHi1dWVP97V1Y0jDwIHVxbep6uJC4ASnbRHgVZYI3hQPZNoZmqZiaF6faH0Hr2gUsDSEo3xkYpdH48/fyB22n+F2qkFqjmuq64ttnI54E36Umu9Ej1Kuj1+9/b5+nkeXDy88847UIGFVIPDtuIk852LFZsqK2YZNzkXp3YRjtTAMoG79f2aaUOtKwAs4Uoz895hrjlubm4Oh+Nf/uVf/tEf/dFud1Xz3G8oFvpPeTwg0iofGlhOc4xMGADy9dXNe+9+EEL4249/lPLcd0OuXS9U3aaDWigSUppyzmYyDN2TJ09ev36dMyLi3d3dfr/fbrfMXseZWhwyxtgP3dV+t9ttNkOXc/4n/+Sf/OTjH//4xz/ebba6IqxoOgyMCNndaEQ8nE+IGCNT6fpSx9DbknpZgi4QqmZmQktlJREt8sJFACKaLq2jaRWDWkdK/ZMq8gAuV6Efs69As0jF0rXqxLddIVrYBorardaAZqlZUi2R2BX9UyQ2s76PzSl0b9Il2jyN7vlFDsPQ2Xbr47MdBlVIpojmtUkikrNiLGBMAHMaBK9QOtyfREvNhm8NCtHMArXN6agoAyPV0pKvDYsCoKEZSs5iCoSb3bYfBn+LeUrNZeJahVUsY2r1JzCjOfQMRL3MKYQAXGSZfx2Jzufz4XB48uSZIbjR4GV+KaXTaWTmOPRERBhijKAFLaWqAUlTBrMEqdlYvpGdsPD6+trFqC8J10nn8/l8PjthiMt05xJxs/7u7s7j/95T+XA4cCXA8s0yzrOqdt3gytUnv4oLETFQtFKKXdi8m/WJKxuxLcIvks7+S8u4EzXwREltbLaD5329Ia5JJoJh0/knfd97fxFmDpGbpl+rYQAIIeSkCVUhj+N4PJY+B10nXdeVWh5kZDPRpoNdLzqcDQx1U4jSvLfp3d1BKtlLIFIIzbxLkg+HQ86ZGUPtCYFI4JynXclrmKLWqq1pmnwEbm9vh2EwNTfyTqdZxBDFCDEsU2+r6qw3at+HB5ZscZMwrlyq3hX3xhdR66fVlMpavTFz5cYqwsSH2U+s57sgcvfNvNRqvTCaVmrGxFrotXOapIIvqKWp/7/wCqzlWzvnwc9wtdtjJR5rUs/3uVdMepioadl1ctsfUay4yE37NhMGVr5Uk8hUISouyotJkkp5+GzihH0GOs9z38Wf//ynP/7xj//pP/1dQEVjKx0HyuW/bKb/IY56s4eZYDQIsRdJzAEAN/urp289B6APPnjvP/7H73l4bQnsi7qMOJ/Pbj6D5I6DIjx//vyTT38uIn3fnU6n4/GIyJvNBgDP49jAL8/eevrkyRMHKQzDtaNdjnf3H3/8sdcdopaEMTR7tvoxPmt9iALmDH9tcRBoqZgiCsS2ArG3NQcra6nSyi7SLXBsGwNxob+Q2uiQVqXrtCBfZG0SWk18aAWkUI1RS6MaQEArtBuIqBVK6lmhomxq1N0qdUNKyTVSjL2vWWYmYlUQMaLAIV5f9X3fE2NKydCLceJpnPu+H0KnqtNc6qeJec4OV8GKKnVT2ocL2FCTWJbNJhKGOc2TTO3FFYiIzDRnDcH3PGANKSMTISPCPM9g6Fzc3r2urSUfLm9nhDVnKVk5FqTrNE2bvt/udp5edWsgqbTksSmY4TQl91yJKSeZ8uQNqn30GHDoeiP0J3Y8s1stfp3T3anruo6DAgzD4C5djPHVq1fzeey6bsqlyZKHlKuCxBijN8M2MyfLFBFvtZRzJiTH3PqLMHOMBSKEwGPyrpqlO2HlgjA01hqhFRFfzEiGegGOhSqn1+JorSr895zFzBBgGIbr62unyM05E2MtqIWUJkYKfR9C6PrgHTiYi/TzlQyre7WJQ0Qw8kxKzvlwOL18+dLMvKEyETkqlpkJSaqMDbU9yel4Pp1PbvLu93tVZY5mY0lPeDEYBxHxBqD+8IdDmsbzkydPitAmG4ah7/uuG2LsETil5KYkAJzP59ev7+c5M9Pz588d/L8Ztp6FNbNk4upAWivDnNGjU7YM4y9zhNrJgy6yTovt0kQQro4mAeoWWLq0+Y/au6CpifbhRYUOVmTiWs78nc9fJery57ITHZ1QqUAZmIi8OAGqmi8xPDKobQ5C8HI0QDRoXQQiIwBgWPpRNFtAURGhkWsgIgPBqlbExfPqK6rasr+oqi5vCJE8j1g7ePgyBTNv9O3TA2abzebb3/7W06dP3377XQMBYM8Eiwqs2uT90sffX2cXc8ma4WxI3fDhV756uD/+3r/42g9/+LGqv5PzxnWahTlO89gSYJ5jY8Snz26fPXv2s5/9bJ7nvu/v74/b7X7OiZCnabq+vk1pIiKXBWby5OYtLwX50fd/cDgcfuu3vvryxYsYO8tC1DWx0hSYj48HuOZ55u22Lhpr6THEgDWQazWps1aEzQ8IIcgkqoaxBLIEZJEyps37KcHAVS4HAFZuH1FpZlzC1G7nQY2UtjXtTrlWROtqB5Zi9AZtlVJwWRZ8Sv78wVQAaDzP7RVCCFTWmjn9iHevIA+vISBTTjPnbH2fss45hxBEldXYS64NiIhDAE/u+po3m+fkO+00TWOaiShNk4tCItIC7AzMXpoFOS/9mFOaEZGYPa3uuBs311ratfVJRSzBeVXtqn3sgW4E8Hyq50EAYJYyKTlnR8188MEHZnY8HsfDiEBA2PgOPVnrGtfP2W635p1uNxtPNIYQvDiYKgGIz3I27Ti47uy67nh3fzifwopEwgffFXZz6F+9erXZbGLHaU6ImOYRkKuLfGaOBd0NjIhzLrVqVEHpZiklOZ4OkgQAGpuzWuFPhksF3OT7AwVcV2nBwDvS+8mTJ8fTwbU7F2CzeW7Cs2Y+XPv9vu+74itbiRE2EV92GRAh59pr1QzHcby/O4jpnPJ2u+UQNwO0lWygoAYUEAgUY991Qz/nJKYvX7+aUvZ2uaGLXOcXwTUNeKI35zxNmNL06vUpdPHp7RNmEJHAnRtMADClueu6w929E4iK2N3dHQB0XQdqjXDUDanmKTZMctuqeOkjtvHElXfXjGmoNrpV/IcVl7H4Zg/mxQxMDEoJK4ChqrkA4VCgVSuVDG2yYBWKazaf2yvN04VV77W1cYYVloHVXcaaaPeirLUpYDVKl0SJEcBhurxeAC2U0h5ARMKCYXNpS/77Et9ba3ho2IRQzQeghrNog4tQKt79IlpZguusmPMatiu7kLbiTCysLgCu6XNO8s1vfvO//++fMkfX14EDEeGvxsLxax5rjxsAANQTuoAIxFAqzi2E7vrJU7D3/vBf/6s/+ZM/efvpczOIMR6Ppxijk/gDgIOzrNocHXUfffTR6XT0PnFzTofT8e3dfp7n4/GMiF03hECgRmCRwzAM58NxiF3XdUQwT+WyXnH0wKJvgg9WoBKF7Ij3eqqWzhlQFHabbj9k1Wt9scYMEUqfqnUIGi6TxM2Abem0OaeGZDQzKMAqMFPnhHFNUC02AFVQhEu0Ba2qgJqkMzMymHVuBkF9VLcqEEBFnGPLIkR3s6ijgsMyYkAzyyo55xi6fthMc0ophRhBUUXElKhxDpe6TCauO7DAWLiMG4ADH42mMUmh9CqPHbsOgEBNsiGiaPIF3/V9qEQ3IuKJNw/JQuU2cQ/SxUGMkYlceTuhoKSEiGaSa3tHrpHG4/HIYfCeCtj4xQxdEYau9Rykddr41atXze7xXdyITRosyEE60dAVtj+/f+i35i56u9liW8zd8+fPASBEOp/P/iTmfRJzHsdxux28yRIiqvZS0eBX/SbG3smwnF9sHMeUTsMwJMqQZlUld84EFcUpYqzCT5ssXou1tdpwb0FVnzx9utvtUp492zKOY1MxzdHRVenqhUtdMe3ldgoqtX0voZnlrB5vSJIbeKoECUpL3QKbbVKbmZ2JzJFQp9Mp11ZR6y1QMVZLkRURxYifffq67/vNZoNA5/P5+fPnCDyNiQK7SeejfXd354i52+sn7gL1fe8UqmAk2Zxiky6Tjw/gV22fAoC1rOkqFWq2cLXCSiUTEeKCrmoyRFXRFljo+iuNJqXKwOVbsgLAt+s4Ae0Dc8Fti/Y8Nc6RzExlIXb1l1DRMU++GQOSijqwxsckpSlYIKJcO2G7leb3clMSFjSoBRFxYhFEVHPW3qXcm1bBfX/n0IcCBSQKIYCRi4mF22hRAFVt+2+VB8R/F1h4q9vPtb5nZjSS0v6JXr78/Ic//OE/+kf/1BQUzeDiqf5LHeixDkQwDN3w1vN3Dq9++uGHH7799tv3d4dtP0wTeHq1uYPtF1cSku29995Naf76178e+8I6e67cet5adTwff0H44YcfDMPgYedpmq6vr2MId69fq+rxeIzFFGghFPF6BtEEAOZd7SW1J19bAwBAgLoaybYWXfq3bYO1jrZsAyjtn4uZWRpsAAB4Cena+2n0EVZl1np7IKKnPNrd3bAbCph54ZFvxqK5f1w9YDJQAKQCkmoGH4hq6bUeiYIT7jQdn7MmFLOsVnRSwBJVE4Npmghws/E6V1TRPGV//SyzyxEiQgPvb++VnbhK8zBACOhufgilwhsIkTRGxkCaxcE1Tvg358lpKBwj43AqFwH+ExF3u51WSI6qjtMUQymlZUa38Z3n2V0Bj0iFEJhizhq5lLG6MhARryXzeRw6y2qO4drtdo5pc/9bTK3ixrXiyNqbutPs7qDXrzusDBG7LpipVyL5YzPznEYicnvIIwSe7CzcGtOEiICll5TnDlwR+hUOh7txnEMIm90uBEoJzMzRAG2tmKitiBS0sq09lhvtRfyXGOPTJ7f7q53LUJEEUFoFqyqoCZf964Pm6fVFC1Yb09W/Xhzoe7zrun7YuCs8TamhgrbbLaEjcsDMgMqmE5HA8ebmBgBev77H2pei6YymhJpcrW/KXTfkfH754hU+pe1uo4DDsGX2AmVRTd5DzCm3AGC/3V1dXQGQagohHA4HWXUetFYEXHKtxddqQ2orNwBcB192GbIVM0YLkpmZKSqYO8ZE5MwHTkKpj0LETUr4/yI6Dz1I9V/XUWUprSodzIxqmqsj1EKSWqk9veODVu4BWPmvbYpdxKW6eIhZVOd58vsWaEUIiJjmnJMgLZ0//JkdNhG8Uc96OfrSWa9Ra9WWK3he2bEVkrM2TJpP2+TgWtGu3kQNwHBJ+CFwzuKuC1RG0BjjdrfJSb773e9eX9++8867DKxmf78+wf8wh5PVIYCZIoWbmyfvvffe6XT6N//m3/wf//v/mWu3c2dGNCsLQlV9VxB5pD2//fZbVzd753k+3B83wx4AEFmcZi+EaZp+/OMff/zxx1ebAQDuXr32K/sY7IZNtaOXjQegpticVFjFdqzwQi8VNVwbR7cwkR9E5MJXL0FV6wtqY0njhbKKV4zq68PF99rkgurUmpWcULsL1QxTW35QG2qWHkRWNiTVRg5aq+C1dT+ttx7H0Uxa4BRQEbHrN8WqgKLFvZGXZ/JC7bLgEkdEkAkQiSFgqNeH9gqBl6SUH+SiH9Hq9qZQIkatdpMAATXLfDpX1icz9/C4tnAItUmw61T/YolLT2ekrgPIOSMyx4g5ewQewOasOhdX2C/iPx1UDADDMOx2O/er3A/gnPsYEL0FpKmqh759Ts9ZKLC7rVAFIteedOfz2ZXHbreJsdBgce2f0fd92mx8ZDwJvel6L/BVbPTFAtDf3t6O45hSskqQ6RU/KU0i4qninLOIYfGlwAvwvI7XsKwiWUehKwDQVsm/tbiziuK5vb1xC8Z9GqkE4PWLAABEQcR8zyKiWWylHGiYNLflvV7/85zcUvHFsN1ux3SnalNOdi5T4E5wCMFMmalULojknLvY7/f7nPV4PJadu4pVLHKpHlpj9ZvNcD6P9/f3h8PhrbfeSimpelEkuOqNHByuBQDb7dZT7zH2ufYNI+9RkTMQBorI5Ht7rUH9l8fGzYXMNO+7XGSjmUHtcSQgmjSlJJr81RatLwpQvD5aQYPbmz64fs0XFJn2wF6seNhSguxxl3YpqUBjAGCOIuq04WbmxHlUYEzYdHY1R+h8PnmOHBFVQVU8qe+t4zwQIrkQPorI0ufrweTBii2sidq2WIuBaWZ6ASEDgMZ7Ur/7eDIuS6/UAFjN1AwxtQRMCGG72bhhcnd3h0DznP/dv/u3/8P/8D9ud1cuexvF6H/2o2g239UIYB5oYb65faIGXezffffd73znO2+99db5cCQKnvT1AdTK4z+m2VXbMAy//du//Rd/8RdEnFK6v79vHMiSshfad12IMUpKXdeR5y0AYmQiHsexJlDFizHagxbTrzYdMjPEJR1gZqCGRFq7F7RIRtNh/mcTQM3WbjqySS6mpURvLResgjmxRjJhVZKLNW3jez5nJzisen2VjGmaPuesyq78ylNZRW7zwuyIiAAlEN33UUS8zS6xfwuJiAxUhBm60NW1nZnd/5AYo0npMekvJTWH14VYX9yIiNCYiSiaGUgLZFnf90kFC3OZEFHXRcTSpCjnOeesl0GgYRjcRPDgVd/3Xdd5Q3tXPO41AsD5fO6HzlWFmcUYVcVVoA8LM1MdPXccQ+i40hN6jBERJacYil0TaxtHn6xYu020WJ+vok0/cAyeAfUz3aJ/+vRWamu8ssDQsiR3ju/u7hAx5zxl8ZjBfD5BrQlW1VmydyokIg+KepgtSWmh4b6FR9EdA5yzKsA0pSnNKSUvuW7VLMQLqEdWLsEDdQWL/jDm4OlVbysJtShg0aJqXvcFAAZpvZuaQF9vDTNTdf6TpZHR6/s7BBqGLby+Q0QwSilV2pPQd86Y6OCAhAhuinmF99OnT10fSy2lbfPyILZkZtnbPjBdXe1F9O7uuNvtACm7iVb2gnnpec55O2yurq78srvdbpoSIiOaGARkNWGkB+Z1UxOwiiW0P31sYJUtas4rAktWM2ffK5Tv3iOkhWq9qtNLP5pKypULzMn222WthoKnqZg4zOxBWPciRWZa5ZhxBRy2VayirQcKJfDjjdeyM9d6wl2gebSgklKapiknxXFuFy9ZJMvNdPBPnKMJnXm1SaumRNvzXWhKD7boqnk4ReCSCqpCmcG5gaqxWRLS1etl5qY1q/zlVvxHRF0/5Jy9QLPvuqurK0cH+HIRsePx2PWbGDtVJbzw3f9THE5U9CgNXI5i4QIgsIIQUL/dfvTRR4fD4cMPP/zOd74zjiMwYSl2zTWoosxMMdg8qRUmqWfPnu33+5cvX3exv7+/7/uN589i7LuOW3sTd9Q8UuSdYUTEg351AS3RHreXAMBKJB9UyxZlXEJzTfrbigAZVpGP9rJtw5stkbGV7YXtUg0kiSv3Qh9RsrWvUM071kWCZhkEFJRWRzu/FCpYkzVliYrM9Qkd/QHOe+AuIAd0VU5EIZTGRGpCULoYmaipcgjsoDQtpU2IeD6f+37DHtoyJ2XDwthvZmBkRFYJnACc2fs0jVK5zNRjoCnlfAaARogx59nZtv39PPzrktcdrBijc42+fv3aFWdfU8UO6Oi6jmNgxtbfbB1/s2LEkBmm6ayZCQMietslhzv51BNx13XQLCpmD4D78zguCQAcHeZfTJLreAaHPTOzI3dSnltBUc55Tii5dM7Z9H3f9QAwxOAeLRFN04QzNiPgfD7Pc3ZTNUp0BJlHBdxMd+NAxLLqOM4NW0CEwUwZANwWArjUsmYPw3tt/fsV5nlWza7/PLzn0r8M5krX5rykgZsT7EPRtIWzwDYkOTMzo2PdHR+DwICgiud54uNRtfeEXQhBRdwam+fkztM0TSF0+/3e8eQue/0QkbrfXW6BQOn34BkNTfnJEz6dRlXlwB6xd3F0fzj4Qz5594mzfbl15Z0lmRkIQwii4hpxvbpgpTLeeKz/t0W5xON7pUVbQMz1FAqhcyPVSi9USrP4CMSutPDx8SEMF2RnAIqqApvNxnNepgiAYJaTiiTRaS3cbBULyXlJTkGdYsDo0fIGv3Kx7C1GfPCbfDgej5Jhsx1845xOp6liMD0I2hA2PghZ5osuV1ThM+uxa4LPK0bvDycv72NmJ4pz87DrOkRuXEVNUHLVxH6pELp2R7ediULpSO+4zZSvrq5c+qhkANhsNsMwMIV5nne7q81mG2NEQCb+ArX4n+Mwx74Clig0anHKAd57773j8dh13e/8zu98//vfv7m50ZSrolLvKS1gwQiBmTGlBDD2ff87v/M7/9f/9Wc5Z+cdNMWU5y5EyYZk6r6tyJhKA6vj8dj30ckmYYGWaEVCoosnqB5wU4FowGFB6AFAhXaDN8+By4xO08SPde1FzkmlWaPrpdbkGqwUg8uvtQnfuCNkLllnAYjdwr/WtrprVlV15hAiam0EY2SnHquhbAAAA9hsN9Vcxa4bYlfAQZ3TKMYlbqyqsQuluEK1484LYd0TzSmJLIk38JLwmsVEUO9n094dkVqNI+IAtd1FSkk0I2KI3PU7UEtpmucxxoFqroeIvADUa4paY1o/fGN7bimlNM8z6NLP0ZVTq/Tw+Y19p/MUY2QWBCYpWR6pXBCu0T0hHUJI8+x+qssR17INbtn3/W63Q8Ss0qZ1v987NgoJjsej47f9Cjnnvu9NwU84nU4+iqrq8oSIYheAioWhhSllzjmrQGnAEIO7+I3CTMRevn59fb2f54yns51tFC+bVhHxHHDRnWu8wqOjWZZWWsOVDbVoi4ZCxcrJtNoFVuE8LYRQBT22pS4iWbJpyaRM41wpL8oDSPZEiUnKKaV+6CigJxrO56MqMIVxHHM+tTUpVf2UUBNQhUQV3wAACAMh56SAcHNz8/LlyxcvXj158sTEKDAyetDRMx2OMPDV1dpgGEJgxlW9n9Zia/hS7Vv/68Lu8V9y0nqR0hseoSCTA8dm9/gbaMW7ETKgtkFW0aIsas2nz9fr1699JftFfJHnvEA98LIBQ17Vtq1nVhTad83MhYDVPK6IpApBd/2aZ1AdPcpoCkwBEcHQC9lTEjco+74v4sexiLACtq3HqIWq/Iqq+tze8c0JAI2hvrw2cY36rKqSbHkfZg6hJBRrP1FAxM1m45u/67osJVjadV0Xg5/m+9ZpS507TVQAgOk/fwhaHxcytZ7BBgahC2Jvv/22qv7+7//+T3/6U0ATVJOlhFoRUEsgOksuyHiCt95665133vnkF582O1o0OeH7MAybPhIRzIUsRkQiBx9PT7BRTRMAQHtIXy7uRfm6iRyEFtI7M6tOvv9eNnNJtFZlgBWa11xVqkjXxaKEFThgRS25Hqvm5jajtS2zgoImPuczNLZqXNgx284x84iCegvCGGPHJZlCXetRSsV6NBCR7Xabc4FNARZHn5lRLHbBC3sAkRhbOEcrBAEAVNRVUcE5MwNAH6L/b5Z5t9liZbhEXRoJx/3ealxhGAYgnOe511IwVgps0GIX+t7BlugEy46/80XSuAZ9YbiU94i9gbbqoO1uy0gi4jSlLkPNzPnYswoRQVfcWTOLoZTQAJdkmEulcRxVzR2dECMz39/fNwkYQri6umqLrUUj55x8uFT1fD77ILtQa8aEmW02WyJyJYoIKWUHpsYuJE92pnQ45O1262VUblxyYFMyM4/0NCEzjqMZ7DabKSVVaLYOETEbUUwLh2VBbuKbymaaCkkpDUP39OnTGONpPIcQzuNxGIYQyWnAEZFqt7Hq+kKLuvtQu93jI79OAQPAOI6m6GLNiUdU1Rc5VdQLImouEIFhF6bJfN5Pp6MLdGb0YLU/T3UiS/MPKFv1oojWXXkR8cqi+/v7YRi6rjNRRLy/v/fJevLkSeH5Yu773qlwc85aq7NCXCC6XCuRmjf5WFzWPf5QSf/iF784nyYzG4Yy0TkJInoCAiohhuNGmdkkt5WmVjRizpmpcMgU0HJNeB2PR6ppL999rrM4oFZsR1Oc7dlsFcd2NceK3g3d229rZXopACxaSyRXmi7umitLDnRNcyYipuAdd1rgMGyHXVOQtKS4QVbsV02bEpHRwrhUPldTNeqW4pP1T1sF2dvSR+TtZglfUFX+BhQ5IFPXEWFApIXdLRAgE7MBgRmRB2PlshAbYOW6fcHxBg36JccKUUOrn+5aORJ4OQEBzK6g3/XXYT/Z5uaTf/b7//zff/3ru+0WI6lByrnvY4w8jUmSISEqOiRynmcke+fdZ5999gkHINIQwZRV86aP7ESegHHjfXtk2G7neQ6hxAlbKT15XH4ZBBYRJjYVAIiBiZDYWf4hdp0vJvc5oISptXrGRbPGSnzTxdCWOIARAlW+e0TAshs9+AyIUBwPxNY0zYBzzgBGgGpeBYsiZQ/nnEVTypOIAOpms0FTyQtVctPHZXVpcS9mLBlKTgsqhBk8WQQA5+PBm46Iy25EEZnmhBCINGWt9RvFYmgXyTL5dprz2QhDKtWQInmSorRE5E7u3elstQeqec5zPnhslhFR5gSOgBAlLwZVMAXXmm4kqeVAxgxmOmw6Ve3iQERjmpndGFLH3RozIZqRiRFQF7vIvaqYKSKHwMMwiCT3JKZpGvo+pRTdIAPoqhmNHAqzB+Gc0pRmA1PQNJ2ChAGHAAHQkADQJKcuxpzmBs/uht6tDW9i7/fygOfQbyTI6XSCUm1GOSdJOZudj6dyd7AY2FTybJqViDruyAJmmOZpnue+6wD0eDyCB+0xDl0/zzMa3N/fm5mIjfOcUlJfsIRipgAKliWZrYgd1EyVAKxaD2vti9WXff/939pu93OePB5gitNYIgpEBOiYGu8PqOuI5TRNqhGAmNXK2r5wp1TVLIrKnPR8mgGoD/F0HOPAzQBCJBETMTERm3LO2/2Vig391TTqq1evnLAz50Il5oZy27NYyjiLdGVEBPRmYZpnJmIOsfaFrLamSYZpFiK+vrmdUyaiYdMfzycFyyqI1nVdzjMzd91Q5UxVvcCIhLU5jplzJdeoJ1qSHLArMGYDyfbXf/XdH/zgR434peEuzSzNGVaKkGulNVQrp/nf1f6bsRRPMiLEWPJoV1c3Td00HamqZGARH4SCTZWCq2FzhncjzEYqgAoGrAI5aUFMktsBQuS9maCwd6nnO0LKBo4BNAVQBFPJTAAgtUAXNZe4RSFkbvqVW8eF+knTnVUBLxns8jkUbDetjnZNfQS3wdXRBoiIkJ37Ht2sqHpkObP6VQtPGP0XA2F94WFmhNh13ZMnT54/f/7Jz588f/78/u4u5+xxlZQSBXbyZ6QSNGh41Lfeeuu999775JPP9rt+nudQQYDFfw3BLLWqMqqFB7bCIjbdaxWe2kZPanERrLCgfqTatbQF39yCBiiR3vXaaAKriTAiXFYOAIAb8otYgdoWtyEMkwrYOk+pHmqzXPO1znRYHYhih1WnvHwLvRtpuTUiBlo6kbUB4ULcw14f0gaBmQM79qoGGAHAzF2r9o7VzmW3lurOv2iypIrzfGzxqzZEIllVpqmksjxW4T4f1fYsbQ+aGWBrX6QtREYYTISjg2wTEZoVzARUW8GFglQCzmEYnPHUDQIrIHDjuiW7rguhU9Vcv1VYP7N4ts+n/nB/HIbBCXoQURDUoOOgakicRfPxHGNEJk/Kc1dcQHe+AcBdeQ8JrjDDJVTjwD0R8RZhMUbDEjwAAGfwjjECUwjhOE5dHDz35kk+50AW1Zz1NJ6nsQQ5mpNhlyDkclSzua3Y9j+e/pymCbnQUiKi47R9r6nlEAKF6KAzqMkOX65tefgIVFtNWskQEXn6sFB/EzmyrESVVjvXvWGLJRizjoV4rTasTraVD1cFwIWkbRK4SlHIOe/3ezek3NP1nIKj27RWnxNRSs5KVth4bBVJ9p27Hsl2+DlJUozRvVtEFJG//du//f73v//06VtYkVNloAxVte/KLZof7BZGU8krU6Y6nSvZ2yrNWmrsgbBSRMvWYIzLBq8Ssmy91jtOl14shuAkeFiBtB4MkLKmSkKlVucoGiCar/CGo167f+ggLHx0AEBgthUj4PIOlzVFZEXku83VZqgp3fUVVivmUoX7CWoG/pioAKXRW01brhbckjW0VQeO9a7+u45fzQn+lQ50ChuAq6urD97/8MWnn/zinZ//7Y9//Pbbb89TqdcGNeAWFgPPq/nqHIbh3XffffHilYiIGAVUy2gcAqNYntOmZ19engVUZVgFfssUNDlT4W+8IhBtj9oUG6+IyGGZdGyX1bxqUFq/C4XGkqoOXmCEAJBSjrEAarB6eKrq/V/rFZbC6BAiMwekzMJQYk0558hFu1QFfIG+1sKRtYZ6XXJQ187qnjX3/iTuSq5eH31v8RJPgxi4SWoEF+RihN1mKBULWFq1M5TrEwRkgVVNCFe2ZP+EufVSFNdPrX5PK8Dbm05BUajqyV1VNcXdbsfDEIm5H7J5KEz6fmAucc6W9FJVotLSoG1qRz+l2naQKDk+AJlijIQ8z7MpbIatX0rBQoiBq2vi6DyvfWNG797q7kOaZRRV3W63ScUIxzQ7qtZ1jPe2g5oQqYk9iDGmecpJFAwIDSirmWXJlg26rutDV24tItlEBPchSS79yIg8ij/nPE2F/DJNo2fyCLC2FrzA6BKR0QInXK1nAAARcXrI/fUOABpvgxsoRDTNknPmWu2Gj4iT2oy3263cNcfwFQCzr5AQaTyXxGR7Eqz1AifLXdeFrodLS6It4CZy13rxgUSCVUmLVk60Fy9eeP7ofD7P89x14fb2NnIYU6YV5LtdpKUaL2SsVVrAL0gDM0UEDoEamu+TTz65urpqaHCpWGVXwG061opWGpHqys4o12f2Au2V/Der0K0mlNbarXkd7b2gVm8W+9XW2r2lKnxXFhaHKQmiNqvDFphLJk9VmKsqqmug4kYvmuhikFXdJ6wwMu1Z23NbhfJ7ONTjDE6D0FZDWz0tVw+XCr4uCI9wF5ayNtxNzqIZkCIs8ssFqMs9xxI/nuxfTvv68ffXwfrGTxFRHdYIdnt7+957H3z26S/+41/99TzPzLXfNbJmCcRZJScniKCc5yllRLy6unr27Nknv/jUeRiIi/rsuDhPbT/TqtNt35dcr0jpuLMec1gBm6l2yGlLv5FPWanfKFHuNjsAsA5ImInqEjWyCj+BlfioMN3SBEYFHCyybpmCiKKKNZw1z7MiccDWzyCEoJLb70QL95ZrFy70pcviCStbDQAUUTV7n0QtAssYoVCTiGwHUiXx9sKuUSRzZeQH8IQaeSYF1BSLAQEATuHpKl9VPTaoqqKpGSuaixaURtMNSDy45KJLEiURcX4PqAQ67jG77uSAXhobgtf/RGanNlys3tqEERCXi7fR8Nf3M0UkZ8k5w0pqr2U6AExpjNttygJS3DVFOI2nKWX3IeZ59uchIgETwHkePeZ8dXU1TdPd3Z1n6H0x+0UahoCIsB9yzkOMqjDPczdsvVopWEhSRKQYmFkg5Nh58x9VPZ/PaeVXeX2qiMzznOfZq9fIUFZIXWiRM0C7tBebCAqVQlwVEG2dZ6Wa9ZjT1Iq/20+7sD6Th7VUl9VStonmEBnOAAAhkE9s7Fiycd2t5WlVzOycMjNvNtl1sG923yCw0kNtytZXaL+v/9RakXg4HHzuxnEMgbquu7299XQYMU7T5PM7TedQ6cz6vodaylUFCz6+44OjGQc5548//vhHP/rR9fXtNJ2a3WwKGcX3cePnuwh0rbDKj1/TzJzMr0nFlhi0S8ydVRfZ97X/kkUtm4U2UEjITUwhxhqLUgVAICsBX3+ApvuKouQKWF4Hhes64fUze+FGsJzyZScGq8aamcEqaFwuZGitAeRFucub4XBSubYf6ANEWv/ZFMOyT1aDuJ5yWhOQ/r1Q0Eua8x/2QMQQY84Tc3j27NlXvvKbv/e13//T//PfPnvraSTOSTBiCJSSOIg3paS69FGIMb7zzjufffq5qiJooI4Lz77EGJlYVwgpq8FnEYXaVwpW80JL+e/FFKxFD9QNbDXF8njhSu0V6hfAxd58HN4AAAghmBXaoyyVjZKd+X3BRecsYAVj8thoWHvzqgqo3ipPK4yinIkG2bWyYqQWky9ZRmQAElEiDoXnz6wWqjJFH/y2mB3f1HCtawMSSpimHCGEyIzIZioizkMCAISBKQBASompRKtcAM3zbAhMce2pI6IREiIwDdy5VoNq07SZIqJSu0zUTLGkiwHtYU+uFCLegAtXtdeqKgLO7CEiWdbMBgArSj+srhudz0UsZGkFGIjoBpBbQtfX13Ho0+n0+eefO1TZtex2u/VrdhwcAu3f8ns5x2EIYZ5z13UUg/N1mxnHrswOc1Y4nU7zPBPQZrPTPLppkrM2Nmwcx2HYHs+neUpejnEax3nOqupcWmvr0IfLCKtgvUBmtcC+qg5Dd3Nz49gIH+1WpGfVOABCruApK5tIpzQjEzkiPq+4MAHMIIQQu+BGAxH0IZJBguSJCD/XZauv0vP5nFLa7XZU2WDW4vexyF1vW1u5xetZ5lpVzIyuYq92+z524zj2fe/mtVszbkxQa4xYSdS9BKYpHnikfa3a5UTkdWuHw+Hjjz8ehrIq/Fg8QwAwo8u8JKyCCmWnrCzFi9tBKRa1EpDjhvxq2s1FxLLpVkGF5cpY1EoRUG6xAhhiOxtqTHF1f1Kn5yUwU1ADr0Tnh4MPK+0LAEFVGQXBfEW2Bdq0Xb0NmWGLdqy172oEsGFcm5SnRVLD+uIOz3owZw8mD1Eb27NeFNKAN0I3MG+XXTXxr6FNfz1X+M3urx+OR1BVBNrsrm5un7777vsffvjhy1cvwrDhQJql3/aWZZbMIS42ipUG3zc3Nze31y8+f9l3jR1Czqfz7e1t06YOpPRIg8tcRCRvt1wlS11z0naIrYINzRGB1gljZUo37d5kcV1zikjtjqrLMlg0JYBa9SqQmd0yDWagULoO18RnqZbxLHJAQiqt1lxzRKYaMiW1QntLXjOKCFKggmbmxSrExT/mWk4KlacJmEiL34xIXj6kYEnUOR9SzliksNc5uBWyJKVUtQLNihNZeNURmfl0Oo+EzOyQbKgoWRe1iCgKbosQGjMrgqiaasWwABErKDKjKnMptDBkCoVBzO84104GABBCrFNPNYVtIjKO4263i7FrCB0vtcpSujG2Dd48bC9eaqk+XxKTaggBRD3lgYjb7ZYrva373F6p/+r+LufMYLe3twDg5DAfvv+BTxYAOG+G0ywQ0X6/P51Ox+PZ0dHjOIo4G+KcJJ9Op8AdBnaAa1aBM4zz1FFhwC6f5yzejRF5SnNKOeesBoEo1xSPH03Ulr+lNJShFauMr6VaXixm6BUjPuPuFAKA1RKGEILDg32Em9HmfzqmgWpll9vW7gaFELq+0Cc4RUxrzWtmJmo1UOnT5xMaY+wCz9XDhsuMb/s6XGrl9SdWPVFVTSm9ePFis+k1C8fOOz33fY8EMosjk5nZE7TOkwoA6FyrNY24Hr3Hh9WmZM4w+vHHH7948cpvFLpYgr16EUJoZOnryWqzs9YsD2SaiwvXwe15in2+kk6NmGUd+m1OneGje+mSJ3VN3URr04CICKBMHDgQmJoBFhHnsJO1GeTPBRWwFpYLqRmBmbSoc1kBiLDyndubPDB81pLdT2tywc0PBKBqdGBJYhcXvoryJdy9shfMHxZX0U4zU801ahhWM35R1fpLH7+qDv4y7VuNKAvcGaSu63e7q91u97u/+7t/+u/+7TzP2+121jlLai/LhavFQGAWQbJA/PTp08P90dd9QGQehj5ut9s8n3a73TAM9/f364FqsY62OHTV1dI10PpD38OID5NhS7gCSFVNTc0Vqiqbijg1Zl3ukvOSOChfZyLkKsRjFfHm6czYByBPwxIRucMEZl0XiYgMDIqzUgAReVlaImIg7V5cjMyCDvNI/trmQMSkItkMIU3F83bl0fS0ZJM5hUjOHRE5MF8S65d5JVMzxaQSQoghEJWEqJmpWR+j5KxqRJDN8jw5O2bOS9cXfzYjVEFNCRFzKTTyULaFEPohUIhe4JTn1KbPmWdCiMOwaTY4Ip7HuSlU1RxijDEGka4vOcvj6eSSlCYiRsmqK+69op5F3CMfz5BSnpP7kXh3dxf74erqqsXktR673c7VKq+Ibfu+J7BXL166fCeiYRj81byozPWW5+0Ph4Pzitzc3Mxz/vzzzwGg3+xU8zRN+/0+pXQ4naZp2mw226H3SmIxnaZpt9lgbdKVcwagnHMXIiGboeRERH2IEzjPG61Fh1X/gRFtxV4O65BmbZzFtaWmhw08fpNzNjGMZmZIC1QCAEr6edWcpingEAJTNFxYLTmQKos4W2GrXEKsKeS1idxqaRhBmdcBqvWBbuP676v4or+I7wutvZxPpxOBAcD19bUDr4ZNr7XSRlflrdjIkIs8vlCH8Egjtgfw4drtdp988skvfvFpLbWg0n54yV83dU5QW/jpKur5YO5slYOzVeTML4QrJ7DO6YK7bPPbHrI9OdKyGKhU2K7DcktNeUrJfQ8iJxHkYrrJ7G+BiByI4EInPpysxoRV3kENwQwFgFuUw8xQDRzMfllJvV61D1Id8GgIoHqxuAp4AqhX+jwwdlaHeqZt/ZGVlED2hfumV/tVw8u/vA7+Mu0LAAYeRUFkFkkhdDdPnrz1/J3Xr18+efLkxYvPHM0/TRModDFmMUNwDgpfQiGENMvNzc3N9WtV7fseVbuuG673Hol69uxZjNERLn5gSxl4QlFKD2oiUlv0q1RwKV5aTuXFar/YtUJtK7UZSSYFkAUAgFpyzw1ZoMg1uJRzA/QC1KQmVqZlUO8ZHNwacNmOiJGXTmGImLIAqJMwEBFS6d2tqrEmmJsuaZ6x30KBQAxAzExMkZC9byYTE/ddICIxGE9HZu77ngxKBlp0qGV/TaOLqRnEEL2e8u7ukPM8dP0aWMQV2KwVcuVqNURnFnSjmKlUc3KSLCJMzsGZmXlOp4AEAHPK53EEAFDruu5wvPNF4m69b8yUUuwGJ2pOKU3z2V/EzFxnIKKTPF9dXTl9436/jTF6h8GUknpjZgNfk4joSqfoS8lIfHO1d32MtR+wX3k7lGa9qorXV+6PDjWw7CKiFCubMfM4TV55IpU74untk5SSie63m9vrr4rBPM/jnK/cfuLwJOfjefLFNvfzZrPJeT4cDg5eI6LQdRtVEbcAsmWJMQLhPGWB1HE4pwL4WscA139qzfY1x0B1wSe7avd3zzl7HypmjiEg4DzlflvcXA8JtBi1r3wfUlVVATBFSBTYrRmPN0gufrOZuBVO4BlG8zoQXcGsmoZm/DIx5GlUWzl/AOBF5FoTwKG2HfPgx9OnT5HA+dX9diISY0Q0VXTbdJ0+WMv29lS28sHaJx4jORwOL1++Pp1OvjiT5Nbo2ir0qcScSzEZWOV8bfpPKhSrWdgNGAQrDw0Rmw5uFnkrm22ChVe4VK01zUBLvsZvGqg+jGLhE0QEBO56EdGKFDYzRmNcbAEoPMvl2WiBOi0WjDn9va1Ay+Au76r6VVW9YYOZgSjFJUfYJPUDq6d9USs7Sbt4sVygLHoRQSfWxAv9ratWssUOakmpVdDDzLwey+/8YCEaSAPptYt/kTHySxx/h+ptp3nm3AA49hwCnY/Pnz//6U9+9OFHH7148RkRiWQTbcvYn8izMojoerDrOmYax7HjEGM4Ho85BTN76/23X7x44ZUMbQrayvN4kdc8+JyKZqiZmPKJiO9GqYC7OpIqIl3sXY60DJ9vVKqx6H5zFQLlOQGACghmZu67/nhwZgCfVvMahrpOitBp95KUAdQfNaWkIqfT7LitNCuihlii3FnFzDgi+jKwBU45plnmlFvTPWand4gxIvCUZJpOIoIcQkAAjLFreDcFQ6bzOIbQiYGpYRbNgrN1nFX1/nACVM3iaeim5g11v9/7Kur7TcrzeDc6P46qCiLl7I0KNpvN+XzOYu5beOM8RDQEyaX0aJY8z7NICYgx82YbPUILAKVnTt/nXOiTXL7nnJ0IKed886QTovM0ASLGoRs6UTUwQ+J+mKZpf/vEC4E2VxER5+kYzIjo01ef7DZbEGZGyT4dYKZETMTTdM615+vrVy+bPwQ1Td4gC9RaLTEzYckcns8eyD3e3fu7T7VKuPkuVOD3mubpdDxadZiSU4dCyYwc7g9+RzB6OY7zPDpuyGo0NWfFQh5pqjrPzo4JvozX8rRJgAvPSUtYYu0g+plu6/hOub+/dwsDak2gyFIagNVRa1gzL20gIgRW0bYHK5NX6daMBDFGr86C4gQ45nF5Wl5Af9isgbX+aAcANEo4MAMzAV2f7+CGFs/Y7Xaq6Xp/1Q9dzrms4aoOzSznUgbms2mrlGqbTf/E35cr7WiDdACQJ5u//e1ve+7ARVN7KShykM3Ap7IqFDQzzbmNbbtjE+MtjLwED+pDNlXtt6BaD0arvPJ6MZTRVg9rLdBuQ6+uLjQB9ZGLxiEF97YKHsDU8dRgZlqCGc1YgWLnwep3C7ZyaqHFjUFttQIMxGPfRGSaAcDJS40W/6kucTO7yDW2d9MV3XQ1EpagNIAAoHvD6y8CAKI6o+UDO6s9cFkT9SnK63kwGOSX7hn8JU7wL6l6Hx8IQMQxdMPv/KN/cjodnzx5cnd313EAQlBIKSGFPM2+yWOM52lUBbUM4Iah5JxiDEPfd12Y5/nu7jBNs6+rpoCp8UvXhG6snW2GMCyrDRCdw5PCGkTQpKHvCscKtSyvmbWKQKjl8HPKZgJqBpzmnObs/E1e2cK1E4PfJaWRSw1Dao0K1ldDKDWXxZsHFCFmRcTWLJaZPeqrqqpTi9F58szvKCJiKIbmta3g7arZ42hAdJ4mJ6YDUM96TqnQhPWRVTUQb7dD3/cRCREt1JR2M/NhARUCaNdtAzEzu5IrccJmsqgSECCHGAHJgJzPNpX+DqSq45wcZdPFYbfbSbbNsHPaHeefAqMY+pzzOKdpmosByiGEYEj3xyMzx74HgHEcs0KMvWsOIgrdkHOes3oy2Mx6yNvdrt9uNpvNZrd1plgIkcPCko9Q4vkAQGBV2Gat8sOkaFNYYQV85UznuSkDADCzLOLtRpoE0JpABQAMpKrTNJWWU7Er8tSJZyhM55N7MA5yNkRVHbrOXXZVNWytbMqOYGbHQznTMj0iNn9siJMBGWgFYXnc2y1aRBaZ+76/u7vzJT3Pcx+7UNmeYYV+aHdxKyqGHqvr1u5e7eDSTXLJATWALlyUqrRjPbAAYLBk5VYOyQX1BwM6B0rza5tqdPdxu93e3t6aGVeG7ar10RU6V7AnrhJeD+6LjQMOkWr/NLcVPE/xN3/zN97dy8wQaJ5K3bO3EEdkosV/a9cEAMSAaIy0wN8egrCa93UxZg80XxN0sFLn7S5QZWn7ipnVlvYKAIAMte2pqzlVRbBCWFF7sqEqVEX0QP2tJm5Rxqoamo1weXt/b2p7TCH7jVTdOjCCgAp22Wx5/UpSMdrr52jvsOyE+lBUKmgfoMv8u4ZVveOqlpwqxBeKDm7TsFLhqGa0GrsvOd6og3817duA2QgM4A2L+t1u1wV45713X9+9/Ktv/4fj8bjZ7LAWesYYz/NZK7I3pZE5AgMzb7fbbT90XefxUisxA1yDZYiWdVkMlMt+9esXb5KCVujo5uB6ajCXBmTu+fm+NTOP+XtHAW0pZAOv0BAHYykoInIMjiVqYS6qXU2IFlHYpE8MAXHJcjUzi5l3cOVh4foKiqpowLEIESJCXYxfhVq573wOxf8pLcnO5ynnmVtG2YyIfTC7UISjY4lTSgEesqMzcz9coLVptdJatYbWIK2Zidr5fPaG58AB1JAs+ggEBgCkoAIqCzZ1t9ttt9vzNHIMDMCxQ8T99c3hcPBpTWLIkUJEhZySqniYfZqSiOWs85wRcbPZAFDOaoaqKeczM+f5/Pr+LnIQkWfPnjnGLRBnmckgAyZNRIAclVnMUJSZpVJ8w0pEttxHGXxRRorctRyBVRysk2yIad/3jYYJEed5BjAKMdY2R8ycVeYphRCQAwDst4NYKWIWkeN59Di2T800pikn9LgRAAA5Ta2ITDmllFyTL9uzPnnbMhcR1GoYtUyNt2UTkePxaGZOx9Fm2e0kWOmhZo6oKlP0obBaK0g1hudz7e7g+tkeiL4LwYIPM4BkC+v4stnrXriQSCtWcK6gPN/su92T6+trM2smu4sUXTEhNm+bkbJe1Km2G7V4SXNSRcRDX5999tkPfvAD3/7zlNpOAQA0QHS2riUV/GAQEC9oy+iix5TCkmRFANc21c4zayHhRTxePvkiN6qFxCtWHDND9Vy+IWItuUVw7iv/+qoWB8ndzyUOvzbL1tNkFU4RWj5m/VhmyfVde9s28S0IbF4SLOo9VYia264ApG094UMzbVkrK7uj/V7Y2B6CtqXUQa/cvjoBGWoLCFgG4+J2NSnwyxx///rgNdCNEIFi2O73R0gffPDB3euXL1589v3v/UBEYugCkRXSKGTm4+ngwTPHAQGAC6z2IiEEk0JZ1cI4VqNhubXrEm2Uqg+Wmq4OP5+rbPVeN75tfCiqimocWF5uMeuSFOCUHaCETDGGGMMSiAvMiNYAHQAaAnEohpcrciQIgUMMaUoV8F/SKkSESEZoyO7Ucu0kY2ZUF7oDqRDRU8KzZIVUVldtfGkIOWcC9M5uWFhXycwUAgCgiYh1XccInroL1LAc4OwfiMhMeU7MPKfZZZazO1E9fKe4giksBwnmeTbiNXLVvPIqBCLqOuj73rtaubRzGmQcYdMP3dAzc856Pp/P5/OcCwAEER1Z6oLVp9tlnIt7qRW3nvH159xsNia83W6Z6NWrF69evt4MPaJJyojI2L4YqLKFeySt4epDJaZtm9pNFgKE4JE9aAvGKjdICIGQDaEFM33iQgge5AwInmhkZkPIqSw8Z9Jg5vE8I+KY5v31zTr6egojn8/AlJJDw03BSMFDysxRatkbvEn+tLdoMtGL6HPOP/3pT/u+97RIAzE0VRroomrAr9y2Us4ZgZvk9HrugpG25e4tU46uGfw6LS5lAADZLhS8rexsAEBTW9VzWq1RffBqIIbhgsauKkhsKLkQKOfZaY6IyJdrqAcagJphIQZo0ltrGwOoPe2tJDSDixER+fTTT92o9SIxvcgILCp8edpLh61ENS7lN1ZHtnpw68ldaKcMgFYJ/vUCaFco31klm6kU3FbZ0cafbE050NYMIgIqgacTce2xWQuDE9bm5WX8yobygW6ZjBYkEUlkQcPDuW/j0oYJRD13fXFmI+5Y1cC0IXgQ07j4fekK8HCrlIFbrfs2Cg5h8IjL4+LgMmy/7PH3rw+2UkANYAiBO+oyXl+pPb/56e2HH354uDt++unngaNVzu6AwcsMmPVwPIXQjafzOJ5C6CwLAExcvEDNs6/7sm2XPEeNMyN6Ez3z5rcc27xhsWE8AmY5exSXiEqyNmdtxePNkm1WoRU7FBE5xrJgZpmZo6iims2ZmVWzqvZ9zyHkNOWc3RUsBPRZPfboOewGqEYiDkFVc9X6xPz/6+1aeuNGjnC9mkONNJLsxIcksI9JgGwA//+bk/wKIzk4wMpY2CvJK82L7KrKobqbPWMBCfYQniQOh9PdbNbr+6oKqk0KAIjs6AYgJGZ5fzgQ0SDBfO7mTgSFTEYRb49qaSjkYGNpgpuFaBgkEER3n+ecmIRWIkJghFGwoiBMYVMDkKrqPGFXDwgRLy4uIuYWaamFZIQQ2O0IzIBB/vTOTqUojOmRqueSKOu03e1/e7vR+fjLtN/v9xBJLD4d52m/PwaEjJzGcVyv19FxITyPSJW5urqKYWAtSQ01th/G1vF4REnzrC44DGPO+fkpsxC6hcrVki+rOWdwYmbTSPUp7YSLjqx2Rgv5hgoJgeWVat6eMhLFbgqHbxgGp9JCICSymSFWIUgcSivnPGt5sof9VECK0oDBEXGeCivneJhVS0HKbKrZs+Yi94gaqAlVHDVp1ivm2O0shXj88PBwfX399u1bItrtdtEsKPq2BTY/jqO7U1poO3Dqny33XEq/AdakgNBb85TjCVrWCgHH67oQJjqx8oJURDAEdFgUSX9N+0qbePsj53xzczMMMk2Hy8tLZg4oOkYbbBKOvAAkwIqqVoHbbts4JcU0kSEy6OL84/3Dz1++Xl6sY0DUFSd2XxbKqoWHTP002yyIFo5V0yNYHN/vj24RTj/wU2e0rLa71GdkqtBpOq8XAACV4Z9HUt0dgRwRIo+pwB+d2iIEAKvNU+uTQgAUzRnqb7ewTLsvg2khKlvtbc7dCNRtKVvYvtiZcWAdcNu2ftO4vQdcttR3Criq9gVEASgJ19ZRtc0M0YlkCQgAtXS683X/78evdoVLVKQExBwQHTmRytXV1Zs3b3bbp9/94fc//fQFulK3h8NORLLO7969+/L15/v7x/1hGyGjDIiIprVxJixThu4Nd/eUBijNbhdbL3rz9YtstbrbmT3YbK92JRHVrpzY2irExe0dU3Uib4ywlBJ5QX+bFk8pEUfakrnmCC2iAwEyEpjPOpsW5dRC1mY2z5oSA6i7VWNrQkRXrSV0HDueIRFlc0eOrHmL0ElfWI6qTUoUP5eYEHGVhJmj35SZo3sU6Y58vrLrHBFxGMdpmrCABSWeFoBfkaTgh+motUXgPAU9G9Bdc/Y8MxETJWEuLRGBmdc0bi7Xqnp5kQCADYery3jXZlOikWApasg1WhglhYl4GEpTxShdFOJsnudxHEWG/X4fYPnz8/PFxWqz2Vyt10/fHi1PLLIa0nQ8lMpcJUYS0EOk4gzuTmbC4DY7kkE4AohM7tmyWml4HDSVKF2HxyD2E3HFGpxw0uzuTtVNLOQpsyp8wlAAqF3Qkc2MUMxsHEcR2c/WrsFWrTPP7qBVDzphFGzRDmrxLoOlOQC9LikKpuV2E93d3YXzejgcEP3m5iYaT03TJMRBFEcpnIkWTHZ3hJIX19jj7c3SyvYnommaorQZAOCptGHgcKWYXqaw9DITEQnBcSm72F9TRmUOvGyhCD9cXl5KaTqeWkgMggx4cVEEwmmFpap/l+BHbC2sTKgIxYUxqqqfP3/ebreRq5JSivOECIa2aPPl0PD4TxUtALhjvIFtJPXz8+HVT8Pjr+5ovRGC42lvRKjvtnshPkWKVPx4BMBjAIbQkGDQEzpwfN2yL9EZWryjVs8STkEcABBTgPhAQuZiKPm2ynFlpzWhrT40yNP7mRcPuPzrEYw9mbPXBMTvQs0vKOB6Tz2Dh7H+yjJPxGjFUzQ9LCoKwLDUevvf1fCvD0djhQYRIYo5TdOUBrn9zev7h6+vX7++uro6TBMDM/NutyOC6+vru88/3t/f//mPf/rbP/7+/PwsIikNiZiITOcwWiNTrT2CNvEAxgDArBSqtdLwbikisyxyhAtOc9KhSrT4u4E6TX6Fw2oGLTxjZsSCxKbuDuYmtUdzxFTdslk2k+gjEz3FqmfpsblVNZtS7dxQ9T3EXNw1R9KRFBpX7JxBavkhpN6YUKec8/F4PE7zAlIyDZLafFNKq0FExLImLm0kqFatAUBH0zxHRg74AhMKi2kJsjEzuGvXqh0RgZCrPcTM4zhqPoZ/HLHiVqCfKCqBgEN0NWZGkMTRAztMn5BHaK6WVZURgGie5xUtMOQgAGYNtkL3QeR4PK5SSoWLh+ggjAD4nCfV4du3p+3TkxCv12tGByCRJEgi7O6as2aL9LCW5sEslBJVbldY1UmkiVrBItmneQ+EjiWYm1JS8NnUEAhxHEer60OlTUjRhVjCehrRqmnK7i4ipsDMk+ZwmFYIl+t1EJJZpHDgwc0q+uYGZqoaVTlSbRzrnb3eZxaFW992vqSheMxIz8/PHz9+jIG9f//+1aubKMC52Wx0Lo+JZQHg2ssYhKZ4Jb2ifaGh1S06HGMX/3O1YJHiidgrbBt4SR6eCRx3B1fAcw9quRcRtDxss4hRjeOoqkE3i+4jgWXEeSKKSHuTCW0bn42njSo2beAOAPD09HR3dxcudQTAYlNN05SEEMCtFEe10lWdorUBIvJ306x7/OWlaLZI+xcRAbiLY73ghp05If3f1WAq3CNwr/3SEAAUAap6pgofNOnq7mjgrQnjEq44P6SZHlAKcbgZRopIUeTmVlUylooafYWs8ymF8l+QjKKnl1BAvwrnahugrxXcL1y790vnF2+42SNNtTdLAk6B4f/jgWZGSdxnEXn16tW3h19ub2///enHzWajpZ6RzHnabDYfPnz44S9/vb6+/ufHfxFKe0WJKN4KxpISE0oXamoK1raXEQ5smUgtPtZvUFxA1s5M6/YoFlQeAgmGDiOp10SlN1ytmIgOTAyVzMWYnPtoilpmS4CFb9x+sd1TCJlK89d5ngGKZS0iYcOqKqCDRX6UunviMRL2Aw32BWspbDLmal8jIYC6CUq0T+iTtdBLLDSKArCQlHZjORLoAMDNIKhtCIpLqepoHTPXjC+v9jvXRM9QISHdgtjFgmqac2ZZ58ofBIC4mJkZTESC7WJmhKyg0/4QrRGGpryhZUl6GCU5z09Pk5Z8MyCiAIkb8dXdh2GY5ywi06x72+ec0RTBmECQVqsV1uqhka5aPFd3SgUpsFKCzil4q3U/ZLAIIzbaESIYQq6FsokYK5ZhlQXNnB4fH0ttMl7abanq9fW43W7N7JhnYFrJKhof+UkHqqJF9tNxCn1rnnM2yG0DNOPSOu5IYOfx7jTmRFyWD4dwBw+Hw+3tbXjDZrbb7S4uVsGLVFXLGujpbBN2mQje4+Jdxktg8KqaVkNU1gxuV3w35yUhcJEaiACQ8wydoOsDVO1YziBxpSzF9b3AbHo3Tq7X63Ec53m32WxivmNEdxCJaLvdrtdrGla9wHT3XKGNs2FIaZ9VhFWQxj99+pSzpkQAHoVIm4eNZ/ks7oiVyxMBAzyfKRG2JS3SrIqyM3XQi7uTk2aR6wwdJHomA5dVNUciL1BygVcAnDr1HFdqjRPAaR+2ZUitFCVCo23G8R9WkirDTZpHzwAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import urllib\n", + "\n", + "from PIL import Image\n", + "\n", + "\n", + "def load_image_from_url(url: str) -> Image:\n", + " with urllib.request.urlopen(url) as f:\n", + " return Image.open(f).convert(\"RGB\")\n", + "\n", + "\n", + "EXAMPLE_IMAGE_URL = \"https://dl.fbaipublicfiles.com/dinov2/images/example.jpg\"\n", + "\n", + "\n", + "image = load_image_from_url(EXAMPLE_IMAGE_URL)\n", + "display(image)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Estimate depth on sample image" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAoAAAAHgCAIAAAC6s0uzAAEAAElEQVR4nMz927LkOJItCC5VkGbbPTKrsupcep5mXuY/R+ZzR0r6dNfJiou7GaHzoIBCcSXN9o6sUgnxsE2CAEiCWFh6A/1//t//3z8OAHhGPAUCRJFDYCKCKADSKfsNgHIZJgyFiQhgAhEYIAJTVTSKpCZGbQGIqUsQ5D9L4XQEQEx/pR8xdTBVspCmPwwCYMcY4xvzF/UtWOvNqZhuQZAfoB6RXC4V0IOQprZB5133KP/WgwwionJH+U4Zerj6gfwG8yXlHu3NlvrdvVN9xB5mPzByW+7ayZjxcvb2Kon1nzwu9ZXiW7SuRv/tWEl7xdKe6q9yl0tzSge/iERBhByS/nvI8cDxwPMnHg96HHgc9HziceChn0vEASB/OqD8eBjB/vS/h2KXW20RhyAe8oh4RImCI3Utt0LEgXbGHmjfcN9x30X/3XfsG3hHCMQ7MYMCEVE1qu0ef8bjdzz+g379D/q/f5P/64/j//rj+e8/n38/4h8x/iFyQIcobYE/tvD9vv/zx/a3X/i//5P8j7/JP/0tfPx1D3/Z+Xuge8BHwD1gI7kzAkkgbCQ6Rw2nMnKvqx//+bjYWStfPg0tr2VyeXIXkqtBL+dcmIbHXUOpBnK1pWvLcd9JK9AcLPfVHWkkTxdCJIGFSEKIYYvbFnmPYZNwF9pAW249PwiywcX5T6b8m8ozZSpH7K2USurH6vvU/74y0VjZCZLJ+PsUREEUeQqeUX5G+RHjT8QfEh+IPykeJAfFI3db748FAO+y/yUiYgOwMz4C/jjwM+K3pxyCp5taPBACBpDpYOqwAN0cTQAgZVKmhMdeApEAERJ0CiMQpbmMQIb1BEgGEiEJ2g0iEQRCBAJIZyUtJEKKW/Yxz2CsgViPScA5EmtHm8pZSFu0yz2QMFGU/O0hDWStQe9CD1qd4XL/8+qhQmIPvXpHfpFh6Ouht5xN77EgNEbgusBdVM8w/anlqSt5KsOvABnMwujU9eovYr1f0vkW7Wj/0qMgEGkZvd/o1o4qk+dAURD0Q0qLMwRA9HXFcnsRHEUiwpYXqBGRwYIQM3CKWzDobwJHHDx6cuzmtmEBxeCIeOARaFekjziEouBIvUcI2Ik4YA/Yd9x3uQXsir43hADeKQQi/Y9BNhRT06JDNAbiTThI2GgPtDPtgW+Bb3oXgigSmTaibQsfe/jlHv7pg//5F/nbL/L9O90+QvgIfGO6B9wd+u4sgRBIOCNcP87zC2oxNRdrsdljrf1pKNv8tj973NVKeqBtSq5xt0dcX9LfzinoNkIkTEJkP8rl1CzhmAa4i+p5EZM9HfLP0UHvAHfdHOR7lrsx//rnlDH9qOeavrREARECgYUUCSPkEHoKMThAGDgAFgYkav9FfwAIuwCgDdtfd/njoEfE7890+4AcwOHW7IK0spWyxJWGnejMrt9PJrsgISZ5AhuT6KvqViSZubrfmYg7NlyxAXFziUhZnQx5sGecVsxw63BnKJcriJVP9qNzyG7Ln12jTRn/+ApfN+o8ob++57mkgV9C8XQLIJ0F9S7I3VEZ57qeIUDWhLhaRxsqH+lZ4cjrCStgGhQmP0GVCYioBfuZvESCFzIcdb3E8eF5+fr4gvKiGYTV8WbwjNuNTutziBg7fMhxIP7E8QM/HvR40I8Djwd+KCge8hBEwRFFYfiwCgmBiXUWAcAIRNyQY9f64f9UzDMGLDiiPEWOiCgSARAxg4kCgZn2QPtG943uO+477nd8HHLbsUWRTXgjPhSDhfRbsw9Z7/GHPH/g8ZP++IHfHvLbU35EeSKR7A2IiV4Qa9+iPA55POjHU46HHD+P8AcjELOu+AU3JoEcQoGg/9md2+B0YCyHpO/CxrZHakW1Q8CEQ/FS0vyvxY98XwzkqhIMH0JMcoitdKWuRBShiOQQYtKhpZcQE0inGvfBSqq5PEY7ZWfzQale6iui/U/0lyMLbTgiERHHEGkTipAnwPV8XYGrIIOxcEZiyXQzgW4pKR3cOlwXdypdXily3T0D87X8TAyZXOtZ+5oZ8DPiKfIUiYgHpNyyUO6e/jh+BPohIYJ32RR9D0EEGIgivi1k/JvdiJeoIwYSiUJ+GgRsTIHycsGtb3OnEEWCO6IkmECBEhJXDBiSeID2k4gnGoJ0+45xpiMv8KLc1eX7GqIv8idq5zy1JaJ2CWNtXdM5L/TP/kejiEaB3kKCh9DrFdHleKeIHrLeRrHfwPMrOqGrhX2Lvd2hPztUTkA/ga7yl9B3oW12R9zyq6u8Ad18sCxAs+VCsU70iCe4Mf8WGa8oCAGAoq8e8ehr0OvV0QHsVdCU/1REF4mGvqmY4EAkxEB7lAcTi0ShGLsH7IeoOwiRdI+qYP+Jx4N+PuT3R/z9eeh/f4g8FYnT/dJTb5lp2/j7g34c9ExvHEQZawNhY/tdVNBApYWm+ZcIQNzAjhlT8xRNsJGQqxUhovQ+2F2CwW9xlUBVgbo0iaJTqEQhLS+o/hyK+4pbQnxFhjVTXVVmw6AtchAigFXDjGSGtAs5PwJkFTSQ2BuSyg493x2g72hqMK1sA5YmPXG8Lk1tTIOvl8ngVgSIZPTXihyRwpEI0nZkuI2iylscgkPk6MyuRxRjlt7Omm9cSgfSWKedORAOwc4IVEhwumTyWJy6W8wGDIXe2gbsDcBlVsrEN1Xi7Kw912yfnoJQxutGnXsqw2obE6//IZD+YEWUR5TMcRSbuXRxTUfS2DsNvIgep4y7OmsrN2VQJNEfR2LDifUeevuSGksTR/7qYn5EE53N5PESHZIWU4ddK62a+qL4b2dt7G/ODhGuPT6htrM6G7htLDW+/rrFcbcboO1/R5EDIiJPxAPRrL8P+vHAjyd+HHh47istfz0IIUosrw+AJHV0xFMXYEN7sHHfiMchjyhP/VekMgAj0VOO9GTa0gJfQMQH7U85CM+AIJBDIlOIEBaKJPb1HSJPiT/l+IHn7/jjN/6P3/DvP+TvP4//eBy/Po7fYvwjxh8ot0aEIPwEEPj+jL8d/E9PHPaVJYNjxl1T1AGIIBaheuqW7qv3nKpXPh/O6AQYUoIpfz6iC1+Jk6HeLGcJ4jXSqGDIVvni/zQN8xVFdNX0iypoMwNjhGI0Qd+mTK7LDvXN5B8eWZmAtOjIZ0clm99DEVlh8PqsrbY2qAoaAl1h8g4dA8cTiNrbth45KDK2mBC3sM8ARKJIIqAoYEqqqx7vG0WrKkhVOZYQK0bVNrIUxb5ZAYOvh8r0xA6DS3frAx7VbBJT9LVeRYjhrvdsmsFwpbZVtpdMUPDHG+ns2oNOWv1NgWS9zmAZ6x8AJpbNqsIrVLjppLcKd10STu990XIrDccddHX0lMpMl2/3wACMTxepw8pxhsoLmemNh3X2hZvPxD4cZXV+ATocPCJZ2abOBJkAVb8poSgLCYjADObEXxVlmBHysgnDCZ+y15XSX2TEHRp9y/1qOYEgEiIlYzM3GH8qZmb2Lhemj0lTn4BBG3jDtsm208eDPgLfA/+M8Sn0JEr29HQ7tBFtREzETHvAviVLM++MjWkjbISdsRM2xkaiAMHOY+OiKPVMz0QqaHTwqTeod1egdGYSrq9N5c0JqzreWYIX0Gsk1X7nrr52y430mE0sRJ0N+LoIQOlpJnw1jcFa+hUK5rfX6OdWTyGdGiuSXK+IYcxSBIhApHhQ+rcUEwCSaXE8ZHtESDfL6JFDlUqAiDxFJFuebPWNjGEeyRjEIAJFIaG0nGUCRRAT0erjnlng0llJM7Wp43ruq8RXoVdx14Nu7WbctnaUCajg08DI6oRBmJGYpf4KjR7yrPBCesOwP5hAXRc3WdmjVlvPgw9nDI7J+40g2RyVqfBRVoSS16MrKuwO+s++fLjUlZeMValAmddeni0qTWAnax/msqxcIi7qb6dXMjf017e7pr84Y8D9xabdZQRBFIqmNW5Mv/lHZfrlbAxGh8FmAG6+GiYeqdCTDVg/zWyPOhd1w/Ssg0m9fCioPxc4YA+0B75FfgoigaNspqBJflj8beNvO3+/4/td7jcKO9POtGX6O9SpartGpdZWwmzftclBHEMjb69MBSboW9dZkdpe1irxmXj07W+hL3lF1KYntcFeBBJJJMGVRBAj0TjXqh63KY+yLj+Zzd0aNhXg/FZswd6T4JlK4UyW6HtZmMCiayLaiH6KzrkccAg4lNtXAN62SFvU6biooNGNuZhjHgx9nxIN3p6ZpirOGXvT/4IwiCER4CAUpdhCUlfyD0E201LBCX3aOjGYDZgAIYmCAIoAK1dTw7AgKa8IUYRB0WoDJJUUTy7J2bR6pa6uIfyRxfMvDg4eZpaXxMqENFiUNL7Q7qpBsVMZwjAMXfRI5liebJlGGsiOLrVSukFipnJjjhN4jm4Hqx72ztJ24eyS9h4ns0ewt2Ml89NolmBXPDP6MjRTuH9OPOKWthKHJlBxkzRhMAsLhYiYnIpocJu522P0nThFq2r6IDBnvknCgoOSG1ckBG9yFonKRJWdp2KUGuL0HxFIA+YsDAlIN6irGP0Y9eCGnYiZNqY9hJuqAEie5tSdzvIWaAvYg2wBYSMOqaWJegbFK9CIZrmRod6rUZ6lq+DpqfPqusR9UX0y5RJ0I3/27bT3NdczD9cHb0jyrlG+6o9nlAXXS+qsUy1no2oaa0B9VXr0Ha0vqr8mmrrUyebS3L1phRtTujDSjegQiglFzEHMiK9eKiyItD0jAkMERxTKPn7PqM6HONIPiZCHRMNdA11BzLQyQqcAUEBIqmAKrMWy713MYGPTdB5/CYNVR2cYTGkWzqYUIVWJswCUYBjZdOQxuHmC0a0VcpyGL3MAqvv10Repw9xx39YVedLcrPzwyFCaMKq+J0P6u2hr6PDSyJU6G/GhSv2VUaTnr2std6IjIkqXr/TG13YKpf6Nea/DvP5u+SsmKDt0Zu5ZLybEd9jPnvhW/Ux2ZanVPDHqf5Q8k5OHlERHXkt0kP5Yq5obA7AgZhhGnjxDo4Vm3mN8NG/LenJ9TC3WnYnEKq7TjsSlYlo98JYjlO4b3YPsGzhQUYtmnxKKpMozNdCmCBHOw3IIunCD1nPfK3d0HX1LDLHW/y40XpdXm/Aurf5aEcpuRyKSpqpCtJIz9wCDASQXJM4YLJUuGmZcszfUTB++nuG00s0/K7AfeldNMFvMvMSEjSmCbkJPUBQ6VAeQeI8OMmJhFB3AFgESRL2F5HYIAY6IIyroQomvoe8T0aA3JjAz98ioph1FMhbKVuHWrEv9KgxpSEbRxYF/pAMMhpIYIo/BhyO7R/cZ9ysGp7XjmKmegCI4L70lO2CTrwqXQbQvP7T89eBq4oOD+8jmWL7Yqtphc+WgHfFu0lSVyQWafqKUTFNGQt8hVb2iOp7Zfc1n3tvSZtd6tDPjkUosPD9rXCBpqZdppSssdknzPoZ4OXRp7hXOuAy96NDX1VZcHBR9zcPIAnAjYhyZY73xx8DV098mEYfZaGPml8gwnByqKJIwk2rfAiHqBAMpmmqRSBQion93LMM3CfXDonZayBop4YBtp48n/Yj0EI56DXFQnzLmjYgD3ze67fR9w/0u9x0aZ5xmyGbSJuQYCqowOPVzNA/7GT5bYVM/7SqPyuZ22iCuKy/+qiYO2NVfzqK+dia9a7Qpjf1d2EHrcB01spLk2p3DgKOQRBIhRAhlf25UbLjHYEkFphg8u71iKp6cXfTceVl3cI4RgZ4s6gs5ZqKNBMxA8hpKlxCQY8OChglJuEd5ZiesQMm8ekAXzyXDTvJ5LujVom9EFDL0Tf47z4xkDGKhR1QfD2YFMoYo2UVRMHrm0XtEqzu0OmabI7TlA5KcFWhoAG7IuvYZgHbb5rgDB4OiJDcWdQPmHBM4pLBH/ef49XSwXe62q7AqAyvrZu2lRroJUjqsA1LOHgVrE+6mKF5FuJHXTxRojEAyveuFAhSTsDBBAxm9Cif/1olj8HwKn8iLSFvImiHQL6K9Q4ZPKtFXOzxpvNZ7I/f0Nz1ASgoYX8/Qzk3Zje6it/MaelGPf1eb2C1IXV7fNQkTBeBp+TfUq4rAIhEIhsolyqhG3zzUx2bg3M+CwXqVkuAym4MjHrlReBU0En9tiLWIiBD1XtD5s7XbZPUvC7RvfC8Yn4GfaSMKGyX6u+O+IWzEO9PG6n8CqrUphQRnDPZugDM9SsxWGO9kUBuAk/8zCOmmRnAIgLtUG2vobVyxBp5WqI4P3a/KAqLG3eb3dRhWMf2qTri6vNVZptFIDzE43ykhT3p+NskFnJoCNQa7AK+2Z0ztR4XiV5VH22opky9JZXrIz4rZ5BHNDOAgBv0QpYmVXVxDMTdhYBM3B6nBNaYEW9m4Wyu7evT1hDLNBnnGf4IIFMDJgUsoiogkA606AZ3ft3uMsSyq8kNxWUE8+h6pky3rHfW5tABSGC5HGBHZC9rPHQs9rT++COql+vIZ/bX5HR1L9g3pnXp+nJ3I1GW2UmIzSESI8m8DJwKnyH5E0mKKiy6xQLqwQsr6i6ifRp7P+oflS2byXf40A3A7Z6MqCbRv0Ytf3nF2mf1a6fzzqy6toXew3nJw2/DgYUoZ5FdvBxmJcZquWJ9CUAqrJBVARlDfevajbg42DlmJ4DKCkeBD1DzMkcDYBSkXQLbL7uRI9lDMTuF5vwU65w4HBgfsB/ZAkZgDbWb8IgpMW6B9w8cN3+/y7Y59Zw6M4D62vNZPpi5CcjZMGNwZlLzqnO1duIFlMNxjsH9oIzPlmM5+FfoOn/Mp+jYHexj2FNkuTDNwpPL0slfARQyu7MHpz4E6mlA9+pYHV4v0s5hgreiy4XldjJgEnNJubMRMEg5wBIs8RYPVRcABEsE3yBNCsh1RQiBR9LJUzPkhKqdU5fMDx4HjicMY5DOlhD1K4L/eOgGCJ0DgA/GQ+BAi4RBFw5QDoBh8SOX931ATT38lsWFp6K8x4Myey0JB/ztweNyNiNrhSGUFXdRuyoCJ9VOHkmAxTuAtxHyU38V4vMDI5ohUM0tVbMZ6ZyC9LuAc34onmmIw0PpkxZw9VFfu+qPB4NzWWCcM+E+mTKxNz9bqJSvzSS/FCq48FupZR38bmdNof1yag/UAbs9i+Qa9ntlV1aJvPtsoeKKqoC0/s/8XwCwjxxUx5TMyGEcAtVs1ISroqvHI7lJzQRMxU0Jfr9m2BQRBGPSUqNpmo/s6+RyI+hWrjl1JMIBIHGVP+mdizXzJtO34+Ca/mP7Zf1w6jTyigpjl/6EIbCSHILppdrRgyuGUIqawMbNxj8EaYMmWgkPIlM+SObGDiZxbo+TcwDDYSUvmta/+1iaIT6eIonai9wzMHpLXlRSH9ugwuOvNwB68xmCmavZZ6KL9FNNMN87bfkht/UHvflW1wjX5AJLONM2hxHzQFhMAx/TKRMA7yQ45ZIuAOls9Y1IkCsQS3QlwSNRgf0Xfg55m+m2QDEgfHiFydvOKIEVHBc4oiCLqE5Gm/mp6Sk8GaG3GMeufXWFpFgr2PUvFfRP6PvVGESNFcXYyW0EDENL5hTmN/xCTXojgPkgG+6vgULxhxr2WuJ+C2XFcf0n/Y+LQ2krTE/vhlbfloMBTYXg23PtFO6do48FOXSyUY4jtk0kdb245u1bZ2jRT7UKsY5rC0Hxu7kHVf9aN9NA70zz7whd277iirJpfPrf0N6ZoGqjNBhKdU6G5NazR14cJFb2d0eWqtwH1qIsJCKMvcyBXmBvyv7sOx/pPIUi2vybEgvuiTxedPqpK807vuO+43bHfOGzZAAxAgKdhgKqekZY1AXhqMCdhmMEwNzFdNRpMklMse2FUpl9yvHatf+7TUK8ZcHtwpH+e0V/Pm2NOG4IadF9VULe460kwLizDF3J6bQOWXiZO0QtC3EJv9yMtrXSA74E4EpNshzyrbUrkKXQjAuQntiMCkCPiKUJCzygReEQzAyf0fSIq+iqMQb/zjkRGiiwakh8iYgRTNuQcIkeUI03rIiDKqVObR4q0PNRVcOG+yQwsedcViDmIHRIbK/WQ+D7pqRNTXkDohHWgpNrIljDigJ0lMvjAEZUQ0+GR2GAYYE6ubCDwggH3s8lsfnFp/3RVUdzchuW9ODNeFV5F4CbEOZEDly3LzvqEWaLDNf9Qq3BS3GWTcHoQycVD13+pP0cC9Ny9zr05YbaCYtZFewyGTZj23CYMFTWINs5Qk7Xd6RQ/fklUr5zasznbaIPZvRtdIwuQ9rIuk3yhpYzwthW9mpC3ZHAMJHWDY4oOCFbnaa96ETmk/g4ixShRgTymaEXzu6RhHnVKFu3QK7HTdCQAPViC+naQJCdKH4R2mMUxxdUlDI6piUSFZyTSlpX5vlrX2naGz5FIDUesPbCm6Hu699Fa+XwFfdd7Ipn0WDskwVFKxLeBbkOC60qcB2XvkPUZ3VdDptH9Rvv+PLdrwo3sz+SPvehY8nLKF6pGeiN6ijyjIYf8jHRjABJisgFrzg0iyyaXGKQFHR0ZfR/0w+MWHCQUEinbEweTqEb3AB8SCQhChxCLDuMyt3oGbyTMIfEl9FWj7zN91fLE06D3wMNYb/7sD38XTUCwwnBEZAtblOR4EVMSXbGrSNh8vxkMHH6yMeOxwfPwxTXU1nHfCneFztEXwKEGXMA6htS9eOQlQu6kFBjOk3KjlObsIKOK6PJDyS6ySdjBpFFhL82HMIxNWntMLzhuLpA/lRp37UjjbFUppedwNjtz0fp7xfSr0rPeIRjPMnjUF4Yjhz5YFO+icOMFPTMG95XEShedIo8BnUdhmzF4H2wW5smH4NU2Po2d/0IDdkFMgJlUJEH10gH7B/7yIb/ccb/RlvY3zJQ6UtpBxCZoiYikOedF+YAqhHznypj0y6m07vTBS0UFnYqjUkEDpnMmj8FeyZyezwjXT9G3l5nvlUugURTIPh+1lwZr/SmvzUadkQOKqXk0FDyutc3eGFxaqbQUZ1roPJvAfadjnyy/EPbOn+ma8nSGD9OK5dZrOO8rN6iOIkHoxunzjkI3wUYAsMVN99aIObG65dxQ+quEUtmkou+Bx4EnRnJkHkzELEfMAKC4SCAlwSG7IyK5KkznPkn/Js9nAI3dV/L32fhbeeLr0ffAs9nKND/zyiUuaVYRYzcHocxNBIDEz1aVv3SDtTPo9WdnGuYh+k7MvXm9T2Vx4C4BZ7eyJuLZV+3SoST/rAUGA5VbltRU2N1gu9jqVTgz9PW0w45Uj2JOee1Io21eQ+/sE1wbgKsaJuh7hdqazLTQyq37QDvvlwC37Mtn25Hs9z6ySxro9QV8KDCcT/WBmFJipRm3XKL5NziF7W5WP7rvxaRxejBzUrpKWLVT2jmRCOwAmDZVPt/k2zf5uGPbiXemQGTrPF1EG2+gqu1s6dOnZoMzjx/ScOHOHIsBtbsqTc6svp7JLghdPRPTr5mHkYBD6wRqDIYD6XhZsRzzZ3XKVMeW4AyQhsGCeuEztATnhtMar3fIAmChwx6Gm776Z2G9fFuSX8DkQdj4sx4+IzYGE+3HluOOcMS0hInZ89nx4OOg54GHbnNmBlTuIgvz4iZGxNB98Bk7SXKiacPg9BDSwznhJc3UY15FAICQXpRsaopm8CGPiKhZeLz+ueQooEJPlQ1s2AnMwgF7StkjPt0uufJFbTuT2bT72nQsfMqA2wqzztw4MTIM5xiixIa9UjodEdgWDiVUqdu/IaKooydJKwFbjLss0/b2yyt2H4hGg3g3vdl+R53PanV8hru99bc53kivaq7A2HHfHnpfBV2g2lI6heXklHCeE1NyWUp6F4++tr7szbHUesLU91W0JoNFZLNGzGiVZjhC0NxY5mud002H7IEVGIFTkD1nxbJu35ISB6kIkJPZcYQEhIi4IQiOgN2n+kKmxTvud/l2l/uO7U7bTpz2YQPULYbznmy6GxJT5s6U1MJZY4ysDU6dSUGVqS0tVu2eZAHB5FDTJ8NK9czVvN4DK0KyV5c9DJRNkJL6XKedfHkaNu0pWw33MCwjE6/frSHW2Nxgdk8CJzdG4GTP997OQOcUnUizB+kOg5HX4zMqjPJjkKhyuPDHieZtoHPuEbcnFn1hzq/reWDfwITtueklSnmBvM8oRJ0Plf6q8vmg54GnbjKqGXaSziqtow+L0I84lAQLhSgaN5yig5Re695fRtD7udVMv+V4ntp0iWAlxQUqNNoqI8HpT4+43TyCDL0AFujbRAZfRN+ZW9ZFa5/dzrASOE31SPJbJ52jCxIjT9zD/F/pTykaaU+FgRKnNEta2bvXcVHElRsoiqP6jTduesUTbYK4zdk19FodS83zCnTb5tKfn0LftbwXRvUS+jpfwiPH+J6wOyKNmQcTR4lJ1ZywKm16+GqfVRgUCRBzIeRDsVziRrt1GAbAct9x27HX9BeKu3SBp/bo69ya0m1Vs26Hvla4ycVRfr/3LIa9bcKWlmZgVOA6qK3fKKnZ+qzRWovzVXJG5Qv9dvr/gfNzLrDC4FmdtrhYUGFf5oKZ+RL6NqfsdwF7BhFtrL8lMN0UgMN2JF//jGrJY9kn3FAL6+OJR7PFtxlBNaOsOVJS0XMKJZsrF42xkKjJpeJDFeuVMlEW/TPydNbrn6VTQUdc8r0aPMNORTZE3wZ6afRl2TZHcBNx/6cjNOPEvc7bC/7aBneH29HU1al/WWxgmBFjPTvl70LhFpTjiAoGZ+j1scIWQl3ljtYKHUdkZ0ax1T1qMkpZYYgMww2Q9Ug0dMuaUd6hMnkmC9xNBdLBL0Zfff6U8z+/Uds6+qgBV1tSvyr1tkv5WGklpGjggTWHuNEET5tIyUaYRMCQDZS+CwYzQpDtjo+73O/YNuItJd8AEw1RIQoI6T8mRKFQOQZXrDffSJ57c+5oz6Yo/XDL6xqqBzd1OQrINVRQNnf1HH2H0Dt0hG7qFykHDYbF/bjoxtVKg8GoSXBWUAsmGFyTYBjEtkBeYHi8Z4MzYl9bO1zAXTviKa8aYbagx2ljhIAtIByb5DyUh+5hYMmkXIihD91RJ61DHoJD1xW6wmUB3ENoclYUpExZQYikDOiZzlkK3Cb3K8keWPnf6D2fL7pfYWJtVcuWJGXX/qTnJngCW7GvDwyoAHwckSHxerOjPmOGllEMduFDBXobFbRH38VOcDl0Uk13ySTPIKEYxaCdeRJGNXhKI+ep6taUK1ClIstTA6AqR8qqvJxtY17P6KbOvnTKQGus0ZbFesr7TLwkc423TlJ6UGblB12tJztXc8X43SqtNcG8JFWk0JzgVg5ZZrygSGkzhphBtxp4qoL2f2olgXadkAK2IFvArl7KyN+L1z9bXovmDlVrLYgRgQHIRsQByaVrx22X2x37TmFn3pk1A2VwYZkRCICkpSR0JtffIc/Pfo3gYFU74ECuDGZp0JGpHOQa26j+YTXrwcy8u9xY7s/SgYkLdC5zFX07llwOSk4LajDcY3DucFLIp+0Im2/WaaFLB/IPBVO4fZNShkQug57s/5NEleNkWO6GWwzu9NXF1F/LmHb3yufmT6ccABMp9Ca1c0AUbIwtIG6b+q8eMWe80ifmgmg1wF8NwFGeeaPvhMSUnC8egDoQnqh5kg3YrTjWmoBCJrL7FZzyGUqRFadRKZxfkkbhxnnecek4CgM27utnEDhMzXUW6R9KdAkxmuPmkJULcAbOd25NxWB4jcEx353vZP+7vxFPgmd3beh7obfl47LvosWnCQA1h3udbcOAZ/R3zXpjdbwivqakuSjvOX/QxTDh09bzmO/B2KNvGoTCGnmoidPN6mR6YEHsM06bi3VvADYGrOjrF3bJqRgkSPvKRDBDovBGiGBCZDkoV5jQF2En3olDsvWSx4PaHFsxYMU5hmmhxdaIyJTXamigl2roRbkKPhE0dT/yU3rtPfZsLeac/24xR9ldt+Wp6NDXHW/PZqBN7TaGYV8zkRB331rxZ84Y7Dqdbz9jMPKGDR6G9bhR4eSqMsdgdDrtWYFXVdBrGO4lwUW2guwbstoZQGLAUTYRiZqbHgBwaMIWg7pKo3tEHB59AYhEQgR20EO/4aSOQ4wpSSw1ZuBIRQvtVJFFJGmhJc9oxfoLtMpn+0+vbTNezelvo4L2+reYvT1Fcyf3z/YMff2RmJNB+rnea7Bj1lQbKsNR4bXisee+M5zmvGV6tAMAZwzOtUWX0msFlg3W5ulpXP5Uu9PfoYdJTt77V69Fh7sz/fN19H1Vvggfi6ThcWYGpvL6gqIiakX06RK5arR28m+GlpFgImaBblbYFLB00wF7QN6QV3hDKB+Rx1331eQOEwsJSMF4wxYlTyzZP2PDtmML4KDoyxQoq6ABdfPc8n4MoWhf0n8JhgFFa4Ykz6zMLD3fRcWMC9w20ItOr+t/5D/Hj72BhubP/ms6ZcCNsbbvGNHgwth6SicMhlNHa+GyxOmjmIq/FXmvkLKwjkVf1cLwMFk0u7R8CwyGQ9leF+3FP98ruuiZDbiBakPfLSS+e9tAnBTRW0DYEOMGQPKmCwvdY7GMEh/STvrqQx0opP2WEM0PS9eqB45nGt7EIgxhASJZMoZmwBn6Dqy/Nf0tVzn6a/km+9hfj8FthqCco1r/jXQE7FEig9XYGWTzmzR4EPX7ly3ySjYS6/7XV/kAkjT3pagn8s71ZVqU5Jo+nWGpzgVo5m1XgB0jyT/IVhtotk6yH3bQ9M+N8vk9Mbx8FcsaoJrpn2eZNLwJycRjqndVs4Qb+VReQjnN1qvSe5x53c9FWaTBgkNiH25nGTmg05cREO1DGwh31KO0ihJem5NjTgMJFO5mabCQVdDRVu3dayLhkBrigLCBdedB1TzbHEjdbGlzsmfA4vTPjaEXmdqe4i5q6GVX2Npv6C/Gf1bHmz9LnWfW3yqj1oT1Vo+0VkePdM6tOtoXTuje8eAiXe7DcYRSKb9KVNlv2ACXM7JopNt5FbAp2txMTLheOJO3MWc2UBPrpre5Hk5XKdyGALP7bkFhWLYNwNYv9omIdBvBRJssy0QItIvEQLC8rLBp3T25vHF3RYJ1uyHdoJDT7kwIoAOaCX0wT3vrL0aWVPdYK89nRd+Dnoq+6jvmM/ONLcGiCJSW7RH7gScRb9ifeG6yPelIGx7LBqTfR1G1TdnwqXhH7vxUk0OWKaJL4fTokl80I+SrVmrqNitW2fepdS7r0ddDr4fbBfS6RTHyHZEdyRRkjM4y+nOIPQtrdHcqP+H8Henvoc2+6bl9Iw2mco3B5SP1Rn3XiyvoOTP94kX0TSN87oTVGINj0nkkdbRf4kV/C25Fe70zbdMUkS1KlPQ9SQl0eHeQjL72Z18VI5FpAgVic3v21l8/DjwjckdBmjsXGYnd6KWijm7NsUPctT+dkjn92eD3bP/BvolmAwYr+Tb6Do2+/rj/U6lwj8FWwxDUB5mfVUqQ9WQ8N65SVlVvEs6fbQXDqJEYWSnh3rol4K4iiRtxYJx5fJ1tw/8eXq7bdxnuaqT6FoQ5MWBg05s6RETSdlIi0kxGZQs/zWzXfdUefXNKj5YEqyO0/afp6SjjjMWJ2s5u4j7xmA1tM/1zcrnKeaptZ/IGfT305gx2sbkLNYqDcOCRXhwOBtQnK+rLpGdONYUmtcXQsgunZ+7fVD/FNHkoMVcsXxe3VXMyAFu2LDgnrD9PbF+m0iWRZn5cYK1fX7rFqKe5o2c70kX3ta+0FPWZiyD4kh/WrPyQ9UoqI8jdXmNh7+3fcFOL2XUJKV/2hfYpt/xBchsdAkmn7TzwF91uNxVFreVCXj4iLxwpLRApbf+lUe2SYs1tVpcu9RQTjuxpS0DaJBgUymo4oa8P7fU0FzV0UXeQxry5quEl96uFvOCN/I+SMRjXSumvlAE9rQl0/pEIbgPSHZzroFntPQxHjhtRHrzvCX1ve1JEKwNGIpryRNykJCls1puNYtPTX3YR9/lsRPL3UeR+GAnewCJyIOfxiQATS84KQUXzbKEdxWI30j+bs5iPO4oUDzx0ne7RVzE1+Zz11tPkGHlofE2gXcEbhl4EzUkZETiH4eRACO5jagdvamZ09PPvEn39b/OLNrVnRZRbi13FgN+Q4YUN/b1eW3E/nBdITeS1ZoOFjavjKUFcWHYXWZ0bwrQ27g7YlWMtL6mjF+j7nyuWDIvdv+qPibxGt91/7RLKqS4XJpKZ+K/DfBLtSKN/grqwCIWsrkifRs5DqX0WKdCXUMDhAWer8BPYKI0Nn9uZC2dwT6bxaaqPG+UtR6zkBJtnNffSODzng/AHZzQXzp2q2bT4emKsd6RB35r4yihL5YmIe+inzlm9+AKoNWA9Bs9U0JUiOtPffVMGLLcdYSuKaGBLzLKbG9gl1tHhnmzKwkwMcJSoW4DZZ6ZQx4JIadcwACBEMDRdg+SDZZwD6qRECYbtUXi7mngX6M5umszDpMzbLL5mAC7oq9CrxcrmFEWeUVfoxJSdohnhoEfATm12aE5bkIJVu57tqQeDswvfa3NND732/JuE27lYCec1jbS7asBj/NaKJdC5Nv3qwUDsI0OSumKueUY1oQB44cOxgg0vGf6+IsO0WU7tnNryK91607rBRFZKOkC1W9agZ7FswMOZy+rp1d059RUyxl8J/CUQIARW1wRNUsHEgpDUyEuO4TcGRg2Trglu/uyT2Jgq3teJPAItsoDAL6GveXq79WWz16f/FpJuTJPdHgCrf2l5fenZ5mtBEYFoIzwYgbAzDqFA2FmEKBAOwq4q+USpaWNJy17KO4M5GDb06hnwUOGMGnrRMOP6kivRR2vxKGtVuSgD/bSlB906HfRZK5EoDFyvi7gRWXW+Hho29UxbclB3aU9fp6zoa8hHXLFG5+YxGPVae9R0soU45XNC39sOJoRN7cRbjuGBBf4OGTBUbZtBq9dCO80z1BeaAUF84rGl1HHxoOcmQb2xIEb7k9ZXYTjV1kVV6otPS1XnlGEMtVE+F+KbNc9GfPUuRI5er5uXGrq/6UEIgXYSFkSmTRA1DR6DhUIEA09FYuPEAGJJWnn0rNHMbP07Gz52zPXPzXR20Xe3yVw9RN8qmmiJvvbKrqCvFIfw6rk0f66luc2JTiiRHv13TY572tp7rQ9L9mzY74DUd7XUswTX3vr7n0h8m/CkZtya41W/T4Ohr1dBR8RmVag7bOV04tXkU+K7Lj8A1UodQtrxUMNbqi3Pq0wUSJ6EILQTDsEhCIRD6GDcWIC0cwxDyBmGATQMuDf3pmJDZNUjdZmWHLu21rv/Lvx2M2qQNedTThrQcnYiNBhePN66fsrXps0YolDQLmn5XvPcrQdfy5PW+2p1BuP1tdpvwH2iPpsHOgyuSxa3sV5Z7ekvZd9/Juwb9l3U6HvbEQKY1QMLhQFnhwjpDKtI8GZsMsFYp3OGHlTAQ/ZBC9ifeIAAAYOfOAh8ICYVtESAA/KKsrHYZehNnUnJLKvUV7FsveDjjhL6Hnh47tugryQ2nL1RwGlnU0S1nlumEdVjCcWII2DXZB0ADImLahqssT1ZUdaODp7Q3HzwfVvvdXrh0bc7NUjMa9Jz32GBhUi9WrW0HvYs+txVo0pyc0s8vui1NGxoqJEeqpeTTwref3Ozbg435itnUXw13hs2Zv19SXqjcg6ZMz/qwUYOKu2SsYuGdz5oxQCMYmmavlCbDUToSPNgeq7q/GTOJU5EXaY3xoOxC2mSfEXiKBRZ1Lqt0c1Moo5aQVNP6B2NYLjB3eqUHh8RX18s9S9bq08xOMId9AUyNMyQ1TG6K5tsAbVe2kh/FCJd91jnhyT41AOra2w40UwTVfq1/FALjU4RXXtQO/fpXLg2G+vQbLyvy1lPiz0D1v+Y1PRrN7WhQ4I87ss+9opqghg1C0fGLQEDj0C76XWBByEIHSRpgzBFLABEfMgjIBzJVircLd7q2TNN9LohvA5zERT78VwYLAjKxUEIskcw6BH19Qh6A3AjtpmERVhFHKGQ10tpcrWeftJx/WznnR5EP+9+1UizC43R365daTA4dvsgTZuYn2z8sExX7NTCqu4bbGleejJ/+Y35Zti3rHMejKGX8j+XMuXUYPWwJnBLi3LF3Ydq2Fh5K74gDfRed7xSsqu5OCSlxDpc9piidu6r9WER6jARIQw8ETnlEi9fhF9nD3vi/QpDxuwj7WCBA5SJXVGhNU9KRA7CIXhGHA6GH4wj4BBSMD4EB5NANhJwBCgCW8Zgqmnr0NA7y8M8QOi6En+q3HhtqfUHhyk4el3pcEfCr5Uq1aU+hRp0NdYCSM+o+u2U+OVUPusKk7s8PTh/YTleX16ZaUsnRyX7wkTj7BxsoeVMG6fY331LEUdq+g2bmi6znx9vyiyRB3Ejs6GfvnkBETv0ReKXYEJkYggYIeuaGYSAg4Xt05JkAs6FuruuIkDS56TLufQ3w6Ji+cDBwoczhepeacNbmN5Uvi+RIzVCqpvaPbP3pi/WhZ0YDWImv/tQi8EvyWeuHdVmgFfcRy2piNFfJH5cGVObGCRk0y/KLANcQN9lAcDB8FxOWPLadaPRJPs/F95Yb8ta4VyHFw+SP9tC4aL3HCNExDI+lxf5RaT99sbaoQ3YFM4N9+1x1wzAPcDrhKP3GFwWmivGby95QZzQlyGHQFSZrIPNZXEZWN9BTEJEP6MSYtmZdoZumn4P+Ml0Y/oIcghuDAEfJDsDwEZiSaS9ybanuYt0Vx5KT+2YnrP0iugGg9Hp3/rVP9XlX5IC9pDh90oVsrboa/1wZerrJ4+jWjp69F1LP49eQd9FZ4ZeKkwIGmXkQo/2PcUdGfpq6Azz1qd1HLaewV2NowPy53cis4NGFpVPc3ZXPnBEsH5+JAhUMvvDGdLSfUm/72lxykg73gvXu7GtxDOGHBmcnbpJWa9au5kQA+2GwaC084TOK+YCCofBtgmjuCRTQxcqvGLcWsjMZm+nGnGzJDk8vpQTf9KBq6I24Fr3o5NUTsgyEiqXvyCnVBge2y6nhl7Q33E3zmy9fcliFapV0L374T9MNHwotl5+HF0my5n3lqEvu6wvyZECEUnPVELUel/LUzELGovoXCGmMHDebU18lyEQCxHJ0SDxU3amW8Cd6RbwEegj0AfLR5Abyx7lHuQgCSSSMngIEwWKjEFAUfs862FZ1gclNICGmH1q4un2ACbfAW8P9hZiV/+lx25XNdkoS4EIalZcXxd01NuAB05YV/TPMzeui3bl3kvLhLMNWOnvln+o+1Uqw2DeUHs2PRE38IHD9gDWUFpNBC1mH7FkWJJACx7GEICnYCNEBS1QWlYfeDAxCz8RNS+HGPoKxWSuKPym09jkzqcksckFNCWKQkCaEXQxfhWSiYLIQW7IqMv3jH1a4oIIhLqM4EgkOOFiob+vctlJPNj4Exxps0/QtzlVKEi3/VEpIOSngOw7U93Ymn2OMBgGw7mG6k9xSPlVskDct0nw9d2QXopHGlZ1ShNN92N+yzOLSe2rHHoWa5sxWC5oVIroaeWWwKcvYIEVi1sYVdgmpTGJEIY8M7aalcTW7r1DQIQwEQsdSNbLvBgnTQH/FHlEegb8iHhEekQ8Av2I9BHkzvIQ2Ul2lp0lkGwkgSQSMyFA95fXMPfCg3vRs+belaMnqq42TsvZB2ighW5k6PZcH287tjAVrwuonHTpTwr8vWADRu9j5U22w2IqjBVa98t85uz/zLhtNf3N3NfJJu7bVnvSE3jmPYCf9Dzw0G2AdRuGKE81A3uFbfq/45EEDnQw7YLDSCQApsDCTMTCagEipd1CcDk+UQ8OIngzcMoVCdrAT0TJ26QALGAIJEHpfuChZmAWCCITDjnsSMzzuiBqn+25KPqmrHbEFu7cyCuWsynyoV6Sdxe+SUz7Ovv5zuapQZaMZicGtzWK6iqq9R/A1exwFYPT4s/BcOlAxn5KnS8lruyMe0UphRdJ8EvW30EcwddNPjP0ZXBMK92khUY1mYwi0zqYNM6q4XbD8j3lzR1Lzom+xRK2l3c3md3U26M9Xw7FYCI6ZODblXtVxpV3vbaNrmMUBh2CB+FnlJ3px4HfGR+B7saGg9xZbiJ7TDC8k0SSQAJmUOvvbdLgaw1aU1WwIW7jOdMEIy0gcIHZQx48dJ++KGPfvkYL/ZW2NScjDK5kSXCXuw6fBSUzlbASCz267dhvwly4L5ASReZEHMnr6gCeOBii6JtzSJUkjm1fM+IOMt5RcriAICISRRF16XoQMUkIaZvhyJKzzEMaS3BRyORo+tm0y+DotNDZABY5B0Rm1E5+2l1nWfXk9me5QbI3ejJe/BQz4pcrS/CarabOvOhrc8qVF5OdSKG6xVE5Q2+Fwen7px6DV30rUf96a6DRalKb9h9UQ4WHqHkRej8vV9D31cxZTQyS1GdnV6moG4T+NhvwDCkxcZUaWn/NkmIkeAjDyombCnsUN/cIU0HDlmXujmTuftWLU/wUl64qQzuNh33zORiSRc1XKBDCIXhEeTD9jLgd+BHo94CPQDemG8udkcCYo8KwIIKZEUHiUmi1hlIL/vESOoxcDCHR/PXzYKTGx8o5PJuOGug+mTrwt8DwTDSHEvUbEWoN9Z5IuKBIf0+kmVBmZ006R+hByehI8BUxVywlvvuOEETTbuh62BiwjwM2V8ODngrDBx4HPZ9KfKGU9xCJIodIjI4E+9w3uKxWijmowP4N6rXYPaLTj9C0zQzKe4WqJppjPkvEkCPpqImRHaHNaD3stmfASJNaGCTYQ+gvt2TL3Y1PyW53UydHMELl9zjEbOMjeyEVUXA/zDPfYzDcN9ZPH5TVOeLW+5x70HhsGga3t9lt9PuqvH2hSr8auoi+vY3gIsykiMvsIbHQk3MOk8NSBd3AJJzSOP1Zj2HblBDZHtwEtS+SeMw0QP5peDXMNDHZme5defDh4Dy1mAlxpLYPTYw73Is7IIcAECI8I/0k+sH0x6HmYfoIuDF9BNwO+gjywfQtyI3jXShKRABiDAEECaxjte559801Iz0BYdY5z2+ZeOVqQE3ljky3btIqPdacqqaBtA2DbglclTkDhFep8Kx8Qc1RZ53r8mvNXRXFXSApnzXv1bbJ7VbZfe1HjGDeROSJKHkf+ycOwfGgnxGHaZ6f8uOQxyE/nvFHlEeUZ4xPqbM5kiITJXOvWnyZtkA7Yw+0b7gH7Bv2LTtlp3BSlHeoHtGWRahRSRlU+xTt6XZAnMKLOVtn1VczkeADUa1iTBwl6ZxVIxLrt5WsU5QQl0i3Ew8efQP2nDaoToyV6a/fZehr3Zgb6WfVIb93d9fygCEhvp5UcrYu1CX5S0pXSb1N4qLbdfJyI8EH5/Rq4Wur1QX6ejPwdZzuSw6fQL9Cesnvdxit6TVAGghgHwIAzDXPdnn+4RTRbgxTDsnzVFhPRVrF43kIX+ufu+6ROI9odkleGa2mytwJzZ/fjrt7rEEX8Blm9N8ybCz7qXsvz8w5DsEj4hHl50EbJy+tGye99AfTR5DvgR5CH0wR2Fl2xEARBCLxYBnVAOR4agWlpJv1AZkZeyAcX1KfWsus2Cke12fFNgPut0KqPZYL/R24PeffPgfWSQBSPj6A3nxkgLtD7+WmcNOr7nhlMC6FNTmkc34OQTNv6Nnyb/6hTlgSIbqPkGZLtm0M1O57yCPmfz36ikS3p0rkHO9b3RLCkCCaqBmY8t4M3h0aI3LQBAH7yUt9oX08EufcGr3YOtjGjDlhUcLgZPpV9A20G/H16BtkazTPhr7D6WbBUNfKg4uE6bIGorW3vWeBW6DdDH1zyEQiwZY637RHJlRpiV4A8yszCHcBwZ/lxJ+49uW2JmPA/BAb628vzeBsA4fyGKaKJVcwjBqJF63YPpinA8zlSht8+Dkuq+l5tUb3TNrT617z5NF3U28Dl1GV3ORgergIiTHZpX4yMSQwNiKNXLol2zB9BHwL/D3I9xC/h/ARjo9w3Jh2iYEFHE3PzElfNHib2gHdEKKMfSWsZYUhvvAwI/SfJ1T2XBKiSseuG+pV0qMv2t9WYNpiW6ceLHoC4DL01n9OoXcplbJaDcA+8aTS323DtqUNxgr6phlwi3VKKcsklVI5prjelE65NGymppE6gEjZ8NRBqQmocDuDVtqDWYCNJo72X9eRP3VNl0MIDMRsCVZGDgIJe81z8sSisurPSeTTBotvEF+kiWY6R1Q3MnnJ63lq4ca1lp4QNIzhHyOvZop+Kaj3k+JQeeALPWS0iwRC19cMpyTYtzL3wFIrwDhzeFe4/Tx7tbOhr0dcKlbbo6m/2aooV1vtwUAlUSvqa4s/4OmdulZmppOMTM5cwi7FG1NCX/22dRtNojLSAhAluyUXJxHRXEAR8tAp8EAgYlBQDE6qafoW6PtG3wL9ZeNfAn/fwvdwKBLvAoSo6bSQd2Ltcpq6eyRptNBcl76irmpMuU2mjrU0KZ5Qf7wJU80AzB30viEtUo7ONty3vvBPRN+h6d5Mv6p/vt8S91X6y1Rq9IDtM2HFDLR+43ov5AwNfdSv6p+JmGlTzXNhjaSbEe8b9iBbwB4kNFqj+t6LI/TiC+SUgiMtitWrjISZIpw3intuKyemJriZs9033RqYM5U/VTsPV/ojC9z0u7nIRPuweju+dqieaeqyJcxj86tdKvd5ZV5YY78ppf8McunrJKxm+vcw/iX1uw9UbfwNL3pgqZj6JxJCt3GCl5ZHul060KEmygColIZNmrbQgXpj+vXoy9lhdNbD0d2tRvVQevf+qj+EBn194QyQCFDH6vRA1V865kXRU4RIHgcekf4A7Uy/Mt0D3QO+b/S/A/2yKRsOv2zxe4jft+PGx85x59hN44nCshQ22Vh/R0tA6L2kkp23c6nBnfJhsacw3Iz/Zk1MLPbfup5KRvpnL42aepr9Cnmeuo6+tQz0yT4MCbW7ljkGw81xrECXNM/q/Jzpr3pgxTqwx2fC0o8fR074bNv2xSPnnhQ5ojxV52z6Z/QeWJQQiMBMu25moMSRwYq+aq9NjHMOG/bKJS88AUi2AadTly1nM/EuKkZ/Z9w3YLtCfDN0vaZ/fkP929MmH1F24u1Zr37sx8z621TovS69H9bVnk8KDnFuVrg/LJPj5/FFl0ueynVr7quX9MWi8xPmksk8+SGi828wafTGDVv1R2ajqBxYShlX3TfyahDwrBujxe58DEuZNN3gL+jb2ASRFmeSVSJEpHoA0QDiCIki4hVyBwWiQLQTf2z0EfjbRt8C/bKFXzb+yya/hPB9ix8cP8KxcwwkG4u6Scf0Vaq/cNHZ4sJ6zgoUJO7O2qk+aWV122fRSn5nQ8r6Z2u91LOA1ZEMrL8z0291WXXqJe57Lv0WDjONHGX6e8sbDpr1typWHVEbcN5xIW1poH8eRhmr1FEOd0sBicqGc7LGmBZWBWhLLror23GXO037NDR3Wvbg4MrSw875M7DEw+UNEIrqCJ0IDeWeu4lgYfRV9PUWX+g80hFfmkxe49uclJkdv86Hhu7Wi2qv9HYY7TAbjcMy1Srwq1XfayJ7RYZk168Fz2swU+WLPmgvyXDpaeqf4oc4/9a4Hvb9wQY7B32geDpm/LXrUHhMnC7hHul6MK+H03CEq1K6QV8r5DwDEmhpqgGk7VlSYQY9JQI4JGZNhmhquf0IO/GN+WOjb4F/2emvO/2y8S9BftniB2/fQrxxVDYcSBSMA8XAiJR3faiTYFzPGcldgskmj4fH4FklQ/9nEYLD4Obsmk8Ptkg6k+vlP+PqPParGoop5TWFiumfk553R1A/4y3FBDOn/Ex1X8G8WbCd7bhQ9alkfD3MSioSNcNGtfcn7cybEt9AOxHr1rn5P1bo8t5JPNoGIFsUdCUoQ0bSWN08EWRwFPi0lAyWnBVokRraxRq1u6KaE9nC+2Mor7o1vUqCPd9dl+E6125/1o7MiKyp7FT6Ir0DS7rw8upzxj5nkb7mMNCcLw4r06YGhZuGykZDnwDRK/Tl1YjhVDMIJdnTsEDIflhTe/Ca71KtlB70fNLhvnyriHaKol4H465KR2zQzn0pSCB+UeI9ud5e5/XrOdatdyTFMkZA9wbmFELJkHjkrVEBPORgoT2GP2L4g8PvB/3+5G8b/bLRL1v4HuRb4DvLR4h3lp3jRmJgvHMMLEGBmd/Zb8PQ1w8tT4VnGDwEXb99IZDQxxuAiWAereceWNz9ntBf5whNdhCol0vpqqpAuWT250imMLw4woQt0BZw2/Bx09yTsuXMz0PxiTh0rEjecBCA/zfdLQVCZN4RQUFNv1tVIW3Me4o7om2jj4B9o3vArm5hDFbr71Yl5kpuEbmVyZ2eb3+kUQqDjdI0LaX+Z4ZeXb74MCrl7sm2/QnlWCPXMfh6sRNvHbfCsBvx6Gueoshz0xCbS4s12F5fJbyk6ellTPK0w1kbtMas5ixNjjdAUjkBdTNXOu6rpcsbuTXtwk951cH1RoQq3iuYs6YsCjIJjjqYr+AuDBpHhLX5vT7oa6sbrTB4YPtYWWdOCmCCuMNLNOVAdHvAmO+nz842bEIgHoM3pKkTlHkwMQs9cAB4Ij7wBPADFGL4LYaPY/vtGb5v/GtIMPwR6PsmH8w3lm9BNpJ7iPeMxPcQd463cIggZGxTXjv0BFLxumgkE17zCc83bxhtstQf8cW8AZjyji6ogHP8RF9A35rXDt2eW/S9gqBzWeXDGlZr+uewpTAk1T/73JPlLsqHs2EiNo9b8ueUhSO7Q/vNRPvQo/Nuu413yM3y3jATJ1q8su2588PK13KEasZY0m/buzfoFp+JDRObIhq18llNvwFeBc0B+8L0e6p8ftX6288d4m7TM9eO1A60iP7Pfsrzx/1WSMjvpXisOB7M3geij99wLft7eZWUNEEpM5+a+eXjg35ULXys/Ck/GNlh8OfR97TYq03o+McoDGmtZJ6B7ku6mYWiaPj79IU2/gpDiWfjirvR3qz73xBKjqkEQSTZVGdHBIHmun+CDhwREvE8cBwSfz7Dz7h5Nvzrkz5SXi25M27MO6e8Wvrvt3B8bMfGMXA0P9hkLV4OjOzTp4sMu9lqA6VGZvrtPv6YKYUheQPwWCYpoF/WRb9UuLfUXg/nvywZ9TX11YagyTcC9ht064WTtngKwOdtO/TN6aLSEcEB7AB0h7LQcOVXNt4pYSF51uQqNr/SoB6p/hL7a9HA4pTPBSYJAEeJnHzH9ibRlf67KRKf+TzP0PeiFfZUquwQldY9wfCs/uumaO//3J2a1jlTPmM0If4ZIU80J4tDdfR1NDtlwMMNgAc9XNqDh/T3VSmgK8k6Gyf7FixwdwG619VCCw+pati89V00xa6kFfPQm8bqBHIbp9r+NRgJRtZ6a5UseAoEIOJDRG3KRwm7OJ44nohPHA+ERzx+xu1n3B4bfkb6Eej3nFRLA5l2lhvjznxj+WD5y8YP4Y9w3PjYQ6xSS45yYMXuSZrx2PytNIFlc5CWWuihqdhif03/rJX0+TemcnHroYvia5sFTp3U0G2jtBZFP9U/33aEgG2TLdPfVOfIgqCJOPLqUlEHnGLtdbefEPHIWxAeaiV1uVlQxc7WUb+CI2atr21ESIgBwQJ/I0RXcJLTCgaiHLEHIOmUvf7Zptq13k8zcvjNkbS3SuHUgUJyRCMTGp/nxvHKo++GsPZ5Pl3vT/t8YQLydLCZcRYT0EyMjJZZTEA01UVfl08qn9dSuj3K3TG/aiqnpt+Z8nk2AodmggaDT00JFyUvv0rEkbpBnHpINeg7g95PhqrPEHd2+fWBN/M6nNV2sWaqf2uNlSIkr4NVFy2SdHWqjqbcFus8lpWyeSaUCHngGSGIiE85YjiEHpEekXamnbAxNK3HRnQP8sH4I9JfIv91o++BBEfQJKCJemlWEFDnyTwMXjqyg4d3tE5kuquhr6TsMZzzbwBV7sm3I4ATHY+irm7qWwvOLDk5PSVwLRsfAaD0raa/mMon6jtzhf5ed7/yl2iM7c3v+xughlbOG/ACKe6oXJhtwFlLPWi7CQUuIb/5zvosHM2KWyRGOvzUkGsWs8SY9ZGyMSbppUnfCkFD4twW5Q33dfBczMBZOw1/ihFSyJGzlGaXkAH3fRV93XO4urqf4e7skljrYKXes2jdVn92jfpGFEznbL+Rh+uC/na1vebQ5Pv29haBpzJE3zekubtGLaFiNjkrcyXRRITocrMv74+YJVi3/Th91A3uLkB37BlwQdk7RNyTZcF8DPWul6if8CmoWxaOruaxreLKaNBpSjGYs4UkAYLiGoiFAvhAfKat0AU4fgdiFADxyQfzI2JnCUQ7I2gEJ2uKafyI/Ih2C7IzmEAiTOIBVWXoeOWPJ+KruJZ5MDqsHVYC942T7r7AwrSMAP4z9h8ctPIVS/4+2Pda07Qx9h23TbaQ8l5t25XFiDBvDAp5u2wfNZiCkSQKjigPDU/S7Y/8RoQ9BjtFdETlS9yWFIhGtW/CfYZ0ZLWzxSMd7XQ2CEZqcJedGVjRN1nvNE2BzyIySjPZaJ6vGH3Xi+7rcHtlwd6A8aurz6ENWH9YpQxq0JfzvIIafc97+7kv0afpbx4jXZsrX5KZ9bdq98z9aqgvbWD4tV451LFsya5+5jTOhbssGa6SE9A9XT6egGj3LXgIXFx4UjPVeyVlLxD9c01/h7aVoUyweNJbQpQKg0Ne3BPhyJxB/92EA+IBjjnz4AMHIqIEEexCz0iB5WckgmxMgXAL9MeBx0YCbMSBQiARoY0jE7RlZNLongzQM2B7UNJ6Wg2Nwc4s4meYZgYW9vS3f3B/DvrONl2oNMZfqNZuxOBZw9c4x/7m5M9J/4xCc61/6aB3wgrEnBQL6jnJzv3K8ClAEkTq3vU+q5TftwBJtctRYnCplSlhmM2h0ULOnctimt8531oUR2k7GSbnO43MobxNW2rSM2CHvqnkcodBk36iuUg0F2g9m6oWQ3oRqbDOB2Q/vEe60d/m+5wpqBvfqzqj0LxnWud4W7C2TF9z6uFJ9XrVuPBQu9ifKgeXz/lVyQS31k7Xux17EmxHkJdfUtadisosWBmA0SDTkuy+rcjpx9WVSj4jr6r0XYSYgCjpPp1q02SxcFz47ukHd+ScWQyKhE1Y+6lvLYADcb3LCPRKiRIJURADmCgQfTB/HHJnVvqrfWciUo0sqj2IYrcbowscSLuI9iQYPW9uV5DEkmptk28MJyaldPlU2qDl9Tjgi9JuIwivGTi7WEvWJDhVaDsSDq4iUA7/1eQbyfOZWvRtusrqgMTbTrwJR82fnNJoHI3yWTEpEgiccm44nuktwaP78tuFSoBGPTHV83j7qefZ/BBo5udhGJIuJJ2LbF465O75IMhMf3XjwhBduJFHXztyMc9z3/+Fp8ngTge8f0kvCi6eKJ/9fLQ2v/Wz5KmslTSFA5E/OBDpiv2p9mOc0eVTXfdqe4MzHXulMZ5slFS/tfx8Ogz2hY1mIaFywuBFT9ZGk/UQfWk12VDPt9+tfw5W80IpPbt9AphaJ9A43C7qgs6mszso9yURMQwWgIg1VVbM79w6qTmzglvsaljHIXgAgRREJRD9dtDHwR+BAwlBjQwERMXvmHfmBhCFFGXdjVT0l4milI2HE6zm33ZVbxJuMmxcTT859IJ+yfeqD/CdSOs5dX2xbIC9wOBakvNxyn6Vd19Y6J8zJEvNgCmACbLlqMFxXD8FFkTk0KPhaqDro0dlUYequeiHoUsK8zYqWmj9eJB3gQXZay3JwswMllMCATvL8aRnLpAS5EYcoV37V5pnvZ0mnqpPpNcrb9ONv6LE65HvlCXodFzKD4b3uSPVTEdHmf42amevi64D86YB3KX++XHf90UNr5qQG7l46TAI2KnjWmlBse7/iECPiVpPggHEPAWKw+BUQCx6W6tt3nhY0MErNt1T0F1XUvqZR5S/r1eleQ7pYP0JNOqB0vMuvnEoPhT4VCpfyKyFhjlkla8pvSDKmu1+xZySVnLJxqXVat6kA/KICEQ/D/yM+BHpx8F3jgKSxJMHaZ8VfYsxGGT3xYRDKIpsjENVKETqyeXduGbBSOZ+lfc+OgtA+rT8eVzZNVED9qzFxrdLX5iy3rT7wuZ2X+hqqXchLE5YgSgQswgJB9oZPxlBc0Gnvkqy/jYmpWb3ApF4IBIxq1WCApwTlmQvaF18cUJBQo6FR74pohJbFYWQttcGgwRJy6YfZIPBSMOdGXHD9pSnpeXb8tKB0yYTPGIAbJuhLiKOMP+Sh+h7Mq/NZ4f1RNUu4bvSF40vvW6Qmj+vTZh+h0E4jfdL3gxt33z92qUOg+3Pr+LN602QbBlXnyVMPKKrT9u6alQ12Q6r1JUVjcu6aHLYkwuTw2BNfFg3PfLYWtz4GnGvYDZG43kIvS9roXPxRjPvi0yzXtc9afYAdj13l5Tm1p1KMGx8KS1D84zUwzDyQAr549qUATMIFHJPBQCLZ2IROHKuL4+Op/hn6KvgeohiJ0URzpGdVXjSxIerf0Tk02CtpxtLj+Xf0SmyvsqPtYmv9cY6bZfUbX3X7FfJ/cqwtldBd8C8BaY98gNxk60kipqnbExZOFD+NdFrFWgFh4A1Ekl12kQcCpMwV5Hqs0wGYJ/RM3Hf5AtNAGdrZpRkWUHrjpEwWM0oTBTBhMhyGIPn7u7SjWfoRYLkFn1fkqleejQZ9S98RhdirRuSfjLCOJ3kaSdtkrp4LSZLxkYF3VTnzb2Y6ISp+/PiSnv47fRY3vTHT7U+zsT62WThSG35JuYwPGwlX1WBrkP6dGRIhXPhgsHWGf8uvI561quqM0uaOzH8Ty/5Guh1V9nTQPcJAAijPXWrCT8rdQx9s76tjlntkHgxG0c5gWEAuuFR8Oq6XBsRBUIgCoSNifIgESAIDoVq+uziMpZZ1w1doaRehVC9eUPR99RIrxm1QgLdlKHae2ARtzrqIQ84JbUvs95Xlvl9cuNVCuhZzUxF/7zvuKXsV8n9qr+kwd1aBY1AFIUCQtSdBojVUCo4NA3WxXsDqvRY5SBiRAw5OJgpQji6bYABEJA35lRVjN5mIcFCgJAg7UOi1ygGg5hFM1qb6XeQnA8ZhuFsw+WZqH6l3mUBgDf6usml+GR5LXTv/9n8XjBOXEa+2mujnYl6PD6VZq50ikTXT/d7FoB0cdV4vT/XpbZ4ta2Iq5PcwaZXQ28sA87ee7lH4uG6p2hHXStD0I3ZlcYfQX4vQ410ntYrGDYJLz7Lta1kdo8de3anFjqes641ixmvv/VIPGx31oHTRvvBc8Us0i/arKGCQUQROXt/OlChrybv1gJHJJ0GOUUGp8TLwbs+4WwfhfxIJDvFpOTVKCTYG4/zE6Dm8qahnAlrbP21VJRf7wLdxAHDfdjXMHi8tUA+VeuiS4XjjBzs9j6y9Moh775wWTYmMGhn3g6WDD8wppv+PdQRGsmhoHKWVpkF7A+00CkoiDPlJSLaiPNwLCtBSgOaDkhA2gjMyBDZNKS2YjHfLvXyJ4IEBEGMIlHzgRChygTSBiinr/Rs6zQPukPoXZh414zzOnb6qbkcHKZRvyCzeeoN9CVXDBW6X+rJ55b744aGdfZIXFHeEQZjBMOoeefQTow5DHvQ9epoO4KRRhoG4frwJxPLOjhnJldAty/WlJhB73X1yrCkPRPUSDyT2WrS6O/V3nQ32JhFGsNw6mEdQRBcTIFafBV9NwYDwQ3cQClVJBPthI2wMzYSRWVC2SLJQ483AHvlc/R0lpJjNoGQkNjfVOmDs++kqkKNwUMpTs5mFahH56kX9BU36QEG5+OY0NkF+lZ1jnR6LQYrTVSL757Df9NW1/p5x9WdxGgkeCMkzAvgZ9r4lvPmfRluqezOrVpcw+AGmRr6qxm1VAv9xIOIWYwE1266hC3vC6bkPtsnEgYLCYlemdxeolAkkfQvAtEhEiEieX8niEBi6raeinCzkufBC5o7owVNitreA6W/ticHNVTjugQLRujqfC/F0qvQu8DdLwHdoUNTyB/de3UuSjYwjIoKSGMX9G+qB+Ohyujing297dYfbCKU7JRhvB9OL2U0e1XDXMr4s0PbSn3Fe1po3549n7KVQl9q1GgzmE35bA5Qs+E6W8AZBmO0bsNoDHv0Zafto0rth0AEneQJO+Mj4MaykxgDFiHL+RFFBwYW6OvjgBWDJS0dvDrBd9V3XZA/vT7PZSqh7trqqt/gTnbVqQIaq0RXAFfJsLoC7iA6Hmy9n3PWU/S9Kgyg9X/W7Fcl/PcV2XQQQGgDPxIlTf5TpxcPWW9JdyUxrVEkqgrEk2A4shhAnB0CyZAp/ZtYnb4dhWGxZaygcTKMeXdFUQBOudYsWikYNmMEVL1NOt/pigE00PsSM/jU9ERjrOUJDW5m5N5A6GcodBMWJug7DDpqu5SVqFdua3ith93in9wV7p72uM6BOdb99hMrlm4vnuL0RmLM7cTGnLh2woJRWxfZ6anw2ipc3ftbc86roIu5CqdZzF1r/aSAZ8Con1LVpXk3bCT7YezR1+aTts56tke9dGuocOpt4wfnVqtZw5xy6NsPImRvLIQMwPeAW1JEt9+2KYeLwjmjr+tGOeK5TXBLB19hc+PJSwuaju0C9+3lGga3VQ0x+B8m0ZJANXM01frnkJNJOufnC+gJADFulMai2pMu3WGDzUqIqyMTdXTqP5gk6Z81FH0jVldAzuaQqjxRA8MgiIYUC6lrgCjIqy8fKfQiQkCIIpLBxpNjdJ9HI/0eLJ6dDJXMvVkXZ9PT2wo6lTEJWAKzs05VQOt7OJuzMEHfhcK5un0Hww2nrPrp+zNUfs6vndWzOHUKxnXhanqtTtnxC3biFCqqn/kccZvj5QstT6DQQX8fLzni9TImxH2xV6D3E91xfXCVNGC8KLkexnXS0/JvubxrpGa9/YrNapuOXkpNJ3digmqhS+tKfwPhxrjl/ZE2Es4hQH3NFn1kbs+mfG7YrWF27BJjXReZbHOEDEDEtUbaMnJMoHqQEbq5PUeOYUz3RUvw8o4ubMCgALxvfvsj8c7PPfo6nXM5AiDbgJOrnnoeBWzQdBwSdQtCLXrIs2SjdEkckZRyKYbH9VPV5DvTlpJ15CSOarPeEQLxTrwxBabs1j5efvrbnyagQVJH64+kpNXhLjl6Q2mfFBgeiieLM48qTLTNTf+zx39b7fDIMC2cn4uHvaVRVodhscWf1tACfWeddAFI5VtAVmD0PXFnuz6M0PeUOuvp9z6+/ournaJL/agn2TYpd56Um4czDGEqW2o6Koz8ir2luTk+c5NuujpEprVt4iLoolsVvQ29lYX72ttr7FYq0tkF+r6t0ZdHUDnC3fK0rUB0hlOZXOjF8LWhv5S9rhoA3hk7QfXP+p9NsikbZYuvVQ4NGY+EC1vGXhPpGjDslMYSOgfsqbyUr6PqQ4HSgf7ZHxl8//NGOYUepe2PNA5Ydx70EHt5KVA2CnR4wwAEGkoUzVBadgVG2hLYjMFagCiY+l8HT8BOxIzACBt2Fg7Yg2w7NkPfQLQRaYZx/xn4mU5qzYlahfM6CSwA5ZTF2TfMJiPb8gFA1B0gRNSLwDZl6p9LPzp7FLwyYV2nCFd00YtZdc0M/D1e0ULbwWbagpu5UNPf/sUtxPFj7+uh/74Ao07vN23itM4e3WfKRpV+ku3DUeC+8RpN89mOCsOxYTjiO9RLD2EYTjXtpaweroHcFdDFZNjMji+aHg/17hXMClcagu5s0c3UY9gGsB+9s6HbLjjqdWHDhq+Ioi/V9NcmQPuR0JdlZwkk22iV0CS9Krbe1EMCBsvc/vKLPNhswFEomOk3knlED83A/s8qIeXMEtx0skD4BNhGpE1yrFV3D63O/WTOsgK6VtoYW9DwX2wBxEkLPaz8TDYDNi+NDtn4rqGv+UhrLKLjwaz/stsk2P4L2IMETVm9U9iJd+KdaWM1ADv7aP1AKGkCWw8UAikGI0MvWVfStpfKd5NeWjkx11ndhwNv8RRfxV2cTUnD6eMFVUqzZVhtFfMzdS+n0Isl+jaa5+uYh45GNJf0oGikub0FN/01By/2ZBFv3VQ1nGTF9aHxna6uyn82hLjKLdoh8VQvTdIf7DvnmfGrMt00t1UCDU7N8Pg9BeHakN/0py88HMDN2nE4CNt63O+8CCtIvDZqLO7C10+Z+BoA1/9Vmaf6L0LRtAQdZfQtWyHVO/7G2oZil5yKLvOqbzQSQoW7n8r5PNNCz2Te6wEGL5SoqUDzp/++CBvTHnC/YQvYd9z2ceorldE9UIyWjbIwYHNfIgROYyAQYqBdcEQkp+hmrZEDkplpJ+JAO9MW0m5a1VZImuNCM4Vs4ADy6EtIS8JMuXL/Rw8q43GlySRTb+R4O3MKjSjoa0hslTd43MgVv6EF3OanNDjb24DrF93U6bpUV54cg10qvmp0KTZMGHAvQ9xFB71Vya7zjYyW7auZrkp4lFdd+pbXMNwdzI962rW2V8PL+7UCKjtf5Ybjw1H8VQ0hRsJOIL/QxkULNSHGCHT7xVbrBX3tITf328t6SDcFhqP9HH4uaD7a5M/LK2rzUOlYj76uwnGN/VE94gwTCnWDcdKLFq5Rv/TNo+/GCCQKvRur9XdAVb2Hc4O+jXp4HTf8kljmLIqgIHCWKSXBcJZgFL8qAsuQ5k7Fs+T6oOvN+CMfOz8POXH1bdsCud39NzFKZcC3TZiTAThdGAG85AudrjRvYaGoyuBAe053daiaNxJSbuR8tx56Gcy0M+2BdnQcOvV/FPjLXaqXgQoiPY30PGt8oaL3y5Ac0lUJfX0aGtjHIwWcmnwFfVbei7K2dTVl+rmpnzKGNYeuk8hW/HxtdS52GukVWJoSou+wQ99eGquKm4zIfx3d2qCVHpsbDLaD03uYNLEmUk1h38+hC3dtDB67wg4jUnrVdA/DqAkxRpy4cdRCN/wWPpULbF6vz9aGktnZGSt9VU7TjU3ilUvfXqXg6+JDGMZk1e4fuP0aJuHy6LsRdkoBSATYXk2SHCrFfHWBCn0dKruZ4Qx9NT1Wf9D/KUKZAaeB783APeW7Ftc7j0d6SdZXLYjvaXNmJFD0VeWz5t8wA7A6W/UuVxOhGDcoQZQyrNXoW9ysEFijkwWgTcBCyRWLKGgQedYfs+DIGdR23fkAACME7DvuumHEjmCmX6pXo4Muqo9YpqqzNU15So5tmFUYMC+zop1GdhUeBNTWnZnx49k6bkEXFqDLo/JN4b6T9jSCh2SqNkVuKDLqeUqkC1+pu+HmharzQ2wbzjJdr6tLVKKUZdbounJV5sFiTKIXmbfVPFsZfXex9yuZQH50zsy5WPlzGJHSGo/t0c1XVNddpusaBk9ybZLo5coIX6DyQqlTX3JV4rKeYYXDFWRv+u3f7+nqbe4fMO3hadfNYkdZBb1lEjyDT0Pf/Cc1p/4MeSui8L+wVEx3Usb0z7r1b0JfbtcXLzHgKClzhf0HwLJcMTHy4iCvtI6o8wBgxFd/6yWCgxAkb/bHCAHbJluQYJ7PtgmXMuCQXRJMFfO2zB2k06erc1kfgtmIB9cm1MfweGboWpxa4G51Sd2fGelchMH0QyBOnux6BXCKvs3kNcPXQc2u2+ImspADLBpFNLqDqCfQXGDajeED6PF4XWFV2JHjrH+249XlGL3E2etLPXc3Gss7ykdttp0Ewg5ReVb4VE5V0H2ZoV4HS6w97dtwYF+pbQ29GA1gL7PBvBgn/TzUw3lzvxaYS0kXnXA3EJiE03F5YXZ/RSw79LXCACCRFpFIL0nOXgnYY3mJ/p7OO3Z2uSRpY5B6/bNy39uWNM/btfwb8wKb8glxean8rgMKpYF2/ZyjRKGDc2INFUl6/aiwTYkQB077FN93ue+43XG/Y9sp3JjvzM7xqkLfNvQL5WAUWExw/lOPJ8+Xyhuw4KUdKWVi0eRMH5o/1QDqzKR60ffkVM2LOejOZL3UvRIW3ux1pX07ii4hKYEPKROZ72Lz1fRAqwd7T6tBQrU0GaVWrWYdAI0WxBZtHjtPF/7zXBnldw/GvnuovnpD3gqGZ5dba/WNlD+i1DeV+9k7TmM2Y45iwS8G+g8qO4PbdPBsnC8qnFXi5YWs9M3aa2T0na0dxzUsn1yjR+k1KH2F7mBZ2to3pfMhw5xxUviv68/LsHeqf14onzln3RoW89Jrm4ndca4Jz5+0mnhVzsC+7NCnmLbnnQe37P/8uvVXZZPsfqUorPrniOPAQ5AigO0HEx9y9Jk3jDGrIjooS6dvO+673D/k+x33b9hvFO4cboF3hrk9k5tDuZsBM3NNQ9yFJEEnYklTs35grYJ6gb49o21kQWq91voK6M6Mu2vcvWCznOYrbhYWw3ExnM6GtmffvYZAeD1ws673ywvOj8AAUt9j3t25zH/aq0CDBVDOh0NDL/Uehvuzw3vsc2X0FTbSEFxCuR2eR0Y10hBle/XDkGJvKp7Zib30hv8FV1Y5JaCLIB8va4vGoNphJY1bRl7/vSR9jrZmmTizYiygdzha/GtCNzxO62wKs1HeBMkpBTTndepF9P2k/nkI2LoPsQj5jQvfWNb9SRv9nsgbT8SNFZQYpPzf4jaUIJ7d5xYFh8gh8kR84jjwYPCBxyGPKA9A/ZyLP7My3UMe+lu10GoMJoRA+0b3Dfcd97t8u8nHDfsd+422O4WPwJt6anGLuyhzfbVwBRLWSXan0iGuDCnPU5JzqlmCU4i6JyTEdY9lmYvAZOGr7PuWCg+m+/M56BR0h8N6OGsPt8/Di3aa62YzTx08n5i9TT93QD0tigd7S/U3DMSU0rorKkAaDDew241ykgzXBFX9Up21fvYlm6sKUtYYnGpYPn+rePZOhy906LHlujSwNGOEx5i4bqXLJ5Pq6VB3NfgyqxpSmeU0/qpCqG+L6oMz4tuReHeqKVn/6UeLOChthOb19yUZwpCQknUI1XPXYpgNPbCQiewVPfYbztJ2L2+w8//KUnKEtvpnt/vvZcerVKcyZnXCgk5tOA56Roqc345kbywyguvwXEmw2oBTABL2QPuG+x3fd7nd5EPVznfaPjjcAt+YdsbOFKjVRqb66z+9R4NuYVQyW7mBnies9A+la4kyDAdAoFsSCoSic0h6abuYuV3TIWh7anz5LH/yejHeiEvlauSpJJKNUqprkm56X/FZW0Ojr01e9mcDuj2l4E693NyCW+mnfzdOLYopLdLWmPSIeMbW3NAqTvQeJ7zfrS2qu/XW5T5pxlAM7M0A7Gk9lu/u+lp8qNvwMFx3aQzJPR5jOdSH3ThtqxS+Xq9WdUGh/RJ7mX1WF4+ns3P0HbQIoFu0zcxYdkkKjBQwkMhDPWMQQRNPzhYK/rH4XY9ObVJ2R5ZOqyowD1gS3e9BKHYbEr7Na4cZoSdlRm6T9tyGcYqnQ2dYZ2MAtvwbISff0JTKr0qtrN4OEREckCeOZ97910J4lemaVdioMIOJdwCB7kys0KupNjz6fsP+nfePwPfA90A3RuAUI4w8wzbPR+rxZBJ02yzFXSAAMcOozkpZOQkzEpPBMFLCrKN5FCM2vE50lZ5D9zHNqNUboNv5AZzOkpXt0A7N1Jt9E8PxmXJ3poxjaYT7gWo2e3KmBJyxTJMtTS4lz8Ce0837NcQhJMAz4hD8jNoExSihW2b7fJ9A5RoymPscAZL6SBRpJtAh1/E1m33EMNiqahsuHUg36Pvg5cp+iKdff+8z3OPx8O1ni/vg1BXK6wq31bYF6uG/WLKs+zzRYw8KXHEYnKmd+1MqhfJq91xhcQoMX6GViSKhDJiy6LSSnBmwIiLRwO5wmntyzWivYLBm+SApS/y6gS9jvT4MaQzMKfQK4Mm+hO8q36e5oJN9npLdVx2vwlBb17HhJUhvOj4OiQc99ZB6JWv+jXwk+TwrAHNONql5nnd8BGy73AP2XXZlvR+0fQ/bt42/b3QP9BFsek1uBcZsbMD5H3qq+dP/FgGIYqlBoEszQCShNQ92UvKbN4BqS/DQ6DvLyNhIYyvFcmYZzgv5zwGDNKlWKl0fph7gIyR29bTf83Xu26CvVz73t2YtbyXHHm6MO8vGuLPcWTaSjUW3HY0CAT0jPYV+RPoR6fcDvz0pEPCkn5CjI4C5b4RudTVcx1DVr1rjIkU9OzOoN/Ms147Qs0bTtXVks11VA3ObV6uB4Zk0eOB720SgoX9NWWqb5XTFOZM19Pbj8FQVpJIf7KTR7kiPu75YMQPNHoKvfP7MW80zgIkJg+sfdnaYnJlQlMbTtrOMXTrchewcqbhWFPfo21zrV2MRpKHA1zcf/8+x+L4uE/TN05zugJT9n9Puv3H04C9rpLcUuJ0DkHKLyfkuUOAcKKQcF0DAzmklsN/l213uO7bk4Rz4I4RvgT42+h7oHvAR8BGws+zJl6+aoWKCwwScBqhQg26G4QakvWYyHyHVUrqDai8k+9MsxAKpt1ECcqxRA8ND3PVY6/0mTIZMd70Yx2hS8M1h5GnZVMJOjzoE42bpapO5lR9OguwUy/mBtDrnvrfNvZtuORA2wj3IB+MjyAfHjxC/hfgR4k5x47ix2IwThQ6hQ/iPI/z2DH9/hn9/8P9+ciD8qhjs3JTINaouAl+2Jp9IM88aBqMD9cG1GeaHVNjVQ7meMVgOX3QPFX0Az8WA2rbbo4OLal7C3Su9uVJmoUN6D3r7Yn1aXCszY8Pn6gq5dnu+PLRFykdo5hHl5pz3PwtB2dPaD7zKmHOt/is65/9MqcdQCUDagiqfsYV39M8mGaE3TpGXov7PQTbNhMUIG90ZgWnziLvJtuMWZNsQ7tg/aP8I4dvG3ze+B3zf6FsCXdEttPImHunFN6YLQ1lkFQdgYGzYWYH0Mx0smkkBDts7wiBZKJcUERwZiZ9R/NbCoFQzUFJXIq/41psQQJ3C3Bw787pE9QGQP3U6ESArrJoCY0WQzdfdV9DM1Be/Qt9JM/fmR1HUzqXarHCzJTPltD4b4R7wLcj3IN9D/B7it3B8C8fHdty3585xC0dgIRduEfNC+3HwzyP8x8/b//px+7cfWyAGCKBHTGntOL8p62ROCXQSkfl54Q6D4alqV3745D1ye5Ln6mlruugY7H240oUT9XVEOe5X9S8FEZWrPgG9pzflZaYqn+WEqZl9PjipvMfd4W84c8MQhmcVjuqpuuu5aRQCCUsVBum3HfQ5Kesmqqou4kaTQivrFMlcoKXedmlBcz+VF/ofIJW5tzpTdjMI2QCs+TdM//y5e9uIEIgCeMsqkCDhjm8gMHiX+y73XfYNQZNY3Sh8hHBnvgW6MX3b6PuGj4BvAd+DfIT4wVG371DoZUoaRRUFTuS3q1Ca8c9GEpDx2BA6/ZmxOe3KBDrE0JcMhiWZD/UUqevsU3CI3JgyTqsnrXFosVGFGt566G2WR/1jbXB3CLqLj7//Std7hw3gtsPFZulDNRLMkud5/b/Cm1rTh4o1e1AKzGbZVT3zR5DvQb6F43uIf9me37bnx/a878/bdmwhcogczMpV3VQUkkjHk3/5+fhlf9z4W6BblKDeZ4+OAzJl7W02FV00CXna2iuih9LF6aaDC6o6a8UQl/JZoH1rdW/PYaq3L0QZgLGKH4fvGXQHl5yZeGfQ6A62R2c5QRdWhr7+l76+WeV9gRkMn5gMhhUm5RN4/qavK4FzJ8eP7lQyGEtIGDxuWicM3QY4HeHy758kYzPw6zLdCdhew763+md/q/1VF2S7MTHhdoRdbgwKOO643WSP8k+NYvkj0Eegbxv9suFbgM6nHyF+sNxDvHO8cdw57lmF2OtLbY0W66ThdhA1QtuPiFS+IHENzEcGY0PiZ/5h/z1jOmI0+oiIwBFbnTYmU7bntVcer8fdhkSm42fL4UUjFXxWK+LSRO8MghHZWuTltxWDp7+NRtrcmIOD3mzilQ+W70G+b/GXcHzfnt+349v2uO/P+37s+3PbIgfhIOA2SrXtVKTbz+dtf6qaOuIOBPwEyn5q1XMLlNxKOcMwJpr20oLDyIx/A02jl+rBdma/Ct56INQyHQw3VuHh5QupdINuBDWMuWfGF+VLcBdL1rtGu1MsPOXTPfpOFdFLUJduIdsMmAWW59fdljC9EREYQnPCWjTPI+Vzp4FIq9s+knhoAG7bAlFGOqeFHj2cPx9xuxY/gbo4G9A5AIksBXQIaWshYI6+V5/C9rHREenO4X7sAHZs3+lGwM7Jmvt9o1/2BLqKuF5/eAvHLcTAUcmu6Q8zmyRdLiGb7pPxVagonN2uHYn7dht6RIFnzDJC5WekTHwpQnXOcggeGYYfkQ7BIxZIPoieAmE1GCf66726Ri8rYc/pi2s+clPbprOlwlEr82qd82T60dO1l8J/q3Y7rd1Q+QyHvmHkyexx93s4voXj+/b82I5v++O+P/f9CFvctsh75AAioTqdqu3jndpKQ114l+0Wt+0IrL76d0HAT/w4Bh4oNpGlzN+OEM9vP/2YoeNQPNfp1z2NLJZBjVXYd+OKzKB6qMTumTFq5B7KiL+ed3DoWDAr89LeJ11nzpr2GD88OG99eILcv+75DxZt/cJ6kTXduh0IWzbKsNsHaUF87SX2mvlZZJGOsiEMSzVmFIMRSO2D9R19DgRfk08i7qIqHhzMOyD5DRhCVabRQi9sw51z1vZPu8JSiD8/1H/4L3tQpvvXHb9s+GWTf96OX7bjL9vxEY4bx3s47ttx25637WAWJiEWiTp9qNEu/QCyG5TQEdm5PllhYITBa1T24C3ZVSdyy5LV7psJMT0SDOMp9IxKkZMV+RBy1mXJncwPbcRjBuQVg1N9tgpfcljPUHrPjl5jaTN4UYTmyxsTct6ewf6czztdh9X0q0FEBrqBcA/YSD4C7pxMvL8kvvtU3L1tx3Y7DHc5CDHa1HQahKDLR1OjaxkAkLAT7w9TVh9yB8K//8TP6FXWJaduoKTe8IS4kRl4x2qbo9eo8Kzy3qVu6Dg924q46WRNdisZ4nEPxsP9mk7lDdzte9gXO00McEUWfDcV6I6/0W7z8IHBS+zl+u2wZuFQF+jlhVWWobpLQ32Puj3pjoEepJtIJA+93t8zurkdgMRp1wZ7EQL4otzRXyOnI94bgDX95BZK8g2Tt/l+jNv//JBDKBDvvAtwY/y3D/rnXf55j/+8H6Y2vIXjviVzXeBisSOCBmNLpCOSxGQeOA42JKYEtJGERIjU4Jqc4AhASL51Os8SygwoANTqAJKMx4LCfRWMJWQk9lrr7Hg1BGMyMH5E6H9b0ktX0U2SVnzdq7GHP5lBGpbce3JdekGdDalhWr2NsHj/+mc4rL3uRlZUVOK9nSmrlz3ZVb57Y3zj+C3ItxB/2Z4f4fi+Pb/tz/v+vO/PFne3om3ubSjlTt2HWsrvQkG+h0dgyXd9jxL+/igYXM/pGX2NE5S926p79BLdta9SYV/zonID7AUM++Ym9tHBwcYNGyMwXvh2oUZlkyu5MoZyirv5CC3Kr+U66DZnrzS6sAyqNMug5tnNbj97uqza1SSUGgGs6PjGTB9THL/pkGWBwQCoTi5p/AdAhAQq2Gww3ObjSAdf7+snpX/W9nWdXtpSYYLjvnTbcNtw22XLmw8urL9XxBJx/D+/Px+R/rrxv94ZwC9B/o+Px9/2x1/3h7qnKndJOsOQc5/YE48EXQexBOBIRySEeBxMyRdGjsiBQSJHZMXdABGIy2qbOHEPxgLKam13kHIuDklMyZhxTP7NlbLawPgZSSCH0DMppeWh+ZU6Wpy5dfWp+G1u21dYvTtgDrovLYS9eA+aNrBkUmmgokVopLmj1KLVl0G3IbsedDV498ZRne++hcNcq9TEu+8x7JF3Mb5LXXrUfvfQ+p7bHbyJEe5yx/Nf8VvWg90DhV+f+BkHd6p5tYzO2ozjN49qhGsMRqcoHkrjgNPLzGMLHQzjQnOzPsyMzV7WwDm07V2x487kincVVaeu1esvv4a4TdP9N/uGNJP8xQXHojAAhq101ZUV6/yRNWRWtZoD8zDbxrqqdKTN8iF2kLLrBnGyJQ2lHM/A4bc/av4kx12aYpUkM6vNC81ZGv/GQDuf91owFaWhLygwNsYtb0G47wjbIADp3ZCk7f/1y2+PyH9/bH8c4cbxr/vjv3374/v95+32VI4bNqEt2epydwHoBg4kJJmZkujL5oTBfnMrSvZ/AkdJnJiQXVWRcNzmJkLS0RnL0cei5FgjdyUKqV4x6CPVTuWNGbKTMwlJFNpIt3CSnQoeR1bzMKmp+BHpIXhExeOExJJ3BIou+0cvvVbZbJC58++9IPgnEPwHVC/A127Sjc4ZKE5JZR7MHId1GxauNMw7YyPZGXcWNfHeQ/zg+BGOj3B4q4SaeANL2CMF4SC8jaHXy3g1mdZWBEBYiLLmJGPwf8evqe90Z+Jfn/TjSI/IP2/KyqQGhhsFQ89ce1U/5qzl1F93aC0ewjAuY5tJD9i9Hjsfn1aeKxmf/QzcYvR8cIa7r1oWF8adGd43y+WXpArWmnCtBYGeETNOWW7EdiFcdM9H3GG0eGpMvz0JnlQ8uB3diWHUiXqc1+SwXXC/OxNmMHZQMZMTv6pydubAlfI/q+l335L/8y17QVd0KndFMXiNxN3Z7f/xz38Xod9+7EfkPRwft+f37z/D/Qibm+pHPjKjTosc/t6ENbjYvRHdyy/k/TQYqhKkoL51JBFKggm2x1Ge8RZIrFw7CkIZkWJb93gk1n9TByBZO003kYNVNS2GxN5pywKWsFQcYfRtv0pBmvsNDtE9ri8qab+cZQGrM20Azggku449ShFlFlq2Z3f3e4i3cGwcb9k2sd0OxV2wVN5V+fv0ZqHSt5VblFudRRLvKc0Id/ng53/HrxrqttP93yj8SvQz4pnrNDOYXtdkYUuQbK9psotUxAAarxiyhqGiDQz3SfyHus1R5VMb8Iw9LyC5r2Td9EW5aF6tJrRPgG5/+SnPftUhY2hfKNk2yi4jq04CA6tWU95vRHiFuSKjb68Dj6KMaKCIxsxUnGux6FC48tpQDjpq0Xd+V5fu4h15S39SoW/fbSYETv7PqoJWG/Ci/us82BJx/PVf/iDG999+HpHUULfdkpXODzWv+SRQO2nqnq5ICzclwSJjDA6kFlzh5r2q9TeZi90zyUictdNAjtl1+7BmGIZt26CDLGmteyRWL6un0CYwJL4pG2Y6hB6x0k5beLGkIZgfiD2i5pHMVSDD8r3oF4hM4IzGEVJOMXNLHnr99Hy3F60hEDaSQDCgvXFUrL1zyk61cdz5CCyBY2BhjvpbvQECC4eiKSkrNq7MvUOZGVMKJuXRpRicBwpSoPpfn/+K3wLLRhLo4/+k8OtBfxw43DtSrKU8P3qE80caDb9/SsgwfAWr1hS5cVb3jtOtpf9siMwmzRlZ7/nxsJ7PyBv+RxfR93RuW/gzD+fniRvHtP4mcGs4QvoaVni03B7e0Jez/zMpM3HSH5GaBDOJ5NsfKqKNBMcuVQBq9NUfTS7onkCv8lJ8oQdWk0JL3Dvu/b/fFWLCFnALCAFhw22v8m8MenWWgbL3gr7/LYIR7lGeVKx0GwCQiwY5MTmPfOFYTXdBEY+iul9Jxk5PhbV8uumWECNBL9lZZFqc124VDKubQM7Ir1eVU9krR9RCbD+SuRcSBbtmQGR6ZjacqTA1bHgxmhZ4PCzWKE655G4U83vaSNObJADOqT0TGFcvxH40lM4BtqVJCY7X3rZDk1JtIW4hhhCJxBzuOOQ1b/ZeVtsPvJrEzDwN9M66uF416yCcY7Dy4LD/uoXjFo7779/+7cf2d6LfnvTMS6X80tMT6DHYP5xmnlWRvDX1Fa+O5lUPUdA7q6PjUi+xQB/aZDLUY/uenCrM13LdOH0RejG564ucYu0L2bQ1coSua3O/7aE0+NQjcVmozfvW1Gujsb/E/J9P6W8UnxqhgsRZ6NHw+GJU24br656ofMY5aV1tvxvSePuEhXL/uqj+OTBCwG0T24PhVfcrSxY9wuaNv2VuGMpMkKy8yL8jwKDRk9WzokCnC5xISoIlki3zGyoMklgswclJ2vywPCEOOey7xKS4s1k7XXBX9dLZ8bXopTMhhqqm4WZSAhSJQ43Eh9Az0k5yZFr8dGk9khW5273EZOF15ZXJzdm01HWuT70SWJFyI9k4ck43hlqN6eO4XDcSjgaOO0flsoq1HOK2RbX3K9D2gNpKHSaEbqKpYHWg4RnX2rB1Gw3i1NEiZQTQBma5heOfw+/7fnz8+/HB3//tx/7vzL8+6WfEI5alkmR1gue+jTJjqAHWkbxwPl/IZDanJs1Wg8Tdk5jX7xrowXimpsZkZfBV0te1mP3WLlSrVs5wt2l3Ab2z3vWvr/W6ct8d6s6vCf2QMeezEgiWVmH4QBr2Kcm1pTipMNROR84dpyiipz2zCmv627f+Xlpp71Bpq5UZb34hZbTMX+EborpBJsobAGsGys/mf7Yf5gVNGwGgvZ5nO8vtFSGlKR0GAyJCHoMBaN2qQtEMzBHiYNJzAoKppkHpEoXwPPTTnxpEqtBT82MlxA0SGydGRmIAkpH4KbRrpg7QM9JH9p12mbY8Gx4PRrcAl+agLXtNjawKfLP9KNndOd45MdR7iDsny+seDlUFp2CwYYi9M9VouLbSWfOQCrvQJhwABjEo5J7ZIizm/trujzGPihp8Vqqn7nhfUvy0gdRQVT5mdXQnxKAbblsM+499P+7b8/uv3/9/f9z/F4W/PxlIhvzkreZ48MaVmrrRRvTzbGO4RT17XgfjXD/lCwdIjKufHeD6M6iknpUWeAzXt0Y+M7OtWccb0Nuj2qlX85CS9tB7rn8GcDY8+n5eZ2jWH85uz6Z/nnYsVUgxo2/JkEUCoZAxmHLTV3ig12wPveI/KePpwgPtaUevoPJcBgbgTIw0+ihtwHDTPRB27HtKP/lVEiOALVHeDare1VNiNNP7XiHjJCcSidhZIGQQmm0YDF2R5bdr67KMqQkmvUGlwdreQqynegyO7gj8ZDdCYgCkzl96i5IctaLQrpzY+U77kGLbMeIp6fav+kjnVERU1MJChI1ASLirfPce4o0P9Xi6hWMLh+Y/CSEyC3EVjd20kn5zSvSYeO0mHEAbwKBAydveD0FfT/JzA0Ck0EtqIcjK4TpfVXlzo5tfqJoHeh2eYDAAR4IFyS+aGOGbfNse/3P7j/v+/P4fv3z//ePffmz/68G/PUkjlDzxbRhwb9/tCXFZ8I3u9WS66JDPs+FcpkLiRhYa4p4395Us7MqnqPye9G3Nbu0NPTOWquZTJXCDvgt91WAYdMeHZoumoYVkulX1QaE3KaKzDa43+gIQtbEBNkdpu5w9XazDQ81z7wvtk1xaE3qbgdp5hmh84waxJ7sVdDhaUeQvlNmb4K4MZ/2zJt/Yd9k2bCP629zY6/x4yyFQkhUWAFLkTReReVKXQq9lxdLfelwxGAyJBnuFB5uYfxZqGDYb8MxRSxNXjRA3DZc6kkehL2mnkcEYGZ4NjBWJdRAfboH5nKSkzosGWFdV2PGKxm3KjLgEqD55I6iGWT2NP8LzFo7bduzboQHZIVT5k1PeqO67ajyhaMscl8kQCwl9p+M9vSpNTaKIGLNlXiPQapiUybL0SlS+lWnZsMf4GQYjqaPDho/w5P23++15+/dj529ENwLLkx41Bqc6kXhwcxxuIm6m2t52e0VX3OuE/UNvOLHJaWJhL70S2/ett2r3ffuT5CXcHfZzBr09efVapfqSrqH6+OwZ9MPglA03LfaIzoSj2yDSXZg8PDzalShQEoPMbLZLDNiyHQDQiHDn6jw2Bqcu9cFL9mMcFL5aKf6nSa+FnuilqRkuKG8rnSIq/s+We5KXkZSD/syXHuYFPb041r/zWka/h2kkEpUPpoQPpSFAgFAQUQalblk5rkz1m9nbOQWc6YJr4Ac/hGFS+3GFuG5G84NPrCEgmYr1klCDcXBRxUGKu5bSYgVjU8d2qTSnw73ZJRsAQbb8yYUMvTc+btvxsT0NevfbwUEoCHFO5YixVlZfgQNaJbsj3F1+RVmhnwayvt4EhKC8Ki6z+Quf5HBk1nFKA2tExnvJKofikJVnBLrjvsWw/whb3FgHyE2QooR7RXRmG3jGsbZwONUuXKhO7ts1sfCpnkHyUIbUeYjH1smLcN6UfMVPa4SCrxSeVlKj7wx6F35VTQEaHfTSIG7FekdeWotG7RKvsqYc3aD/kmrIEg+Ws1Bd/UK9fo6YhIk0HsTrn9trV19hkWHc+Fox/vV+WDHRtZH1yvlhSf1K3hMmSrv/ctI/axbo3N4LVWmPh/GXAIBN1uYI54Fe2k3ZryBC4hiJ575XJOuWK6WKre+SUlrgjb7+8uwXI55r9lZhADLgeGM8bsAYZYBKk28r/y4fQKx9FhYuDHmmKES/cUi+h7hxvIXjY3/u2yClFCVv5KqLSRxoDaB3o4K7TDPuW0ZF1IYouzGROiIaDKvWpCw9QjdHz4Zr714wcvGrRjs7s0hdWPsgDEr3he2v8sv2k+nvTML0C3BXVd8j2uKswj/JR5ruNy4CqKfghQvVQoYm5HTqDJKHMowvmlHeGUVuZIiIb5Ce9TO5XuFpnlerbUY9FwC51gEM9c/DgXGlZrtkuFZIRCt7YPVPz3Lsuwqzcg5kk5IBpKKv/Tt0Sau656qFUz4PHxEVB8wTPH47M+XL1w474nrf0l/uXoO+g5BScMhtH+R/fkOYix9WlsSAe1CXOPg0RQr6LmSGxMlCOzJjmGUUNYa1WsEpGA+OFxtz904K981N6f+8phqOHAMp31ZJEpLKl70UrWkppyrxrvx5BilfBUF2jikiaDtU7WyJlMMtqsMUWZDYZDxYowl0jQdvmfVSGmrEXfQDjIPqWy4Rso4Kw2C4rEkzEqdbM2cCmUNs3/QIWQcYjNyKNhEz+tbPhHciku/0+J/h73oHIrcoAYDHYKuH3JSaavbNzqdgdIS4l8ZDZwGBL0FyI0PSPMTjRWcuypoFXNEtt5dcg+IG2E6hd2HcRfe0T52wGtDt2fCs/mb11jgfIA8//R0IG2GjNqvooFfZABzTFHSJ+9nWhMMK048/wfHq8zKd9IbBSOMaJuhrZ3P6ySr/RggDAJ71plra87gws+LxlpHERVlB32eLssX2lnSA1HHiAUOFMd0alfXPrOxNFRoMR+cXwGitwt4zy0jwzDt6/Iy0Kge3+k1ZeUMQuLHoKTJlxtOYjVF/jb754KAXDnd1nRtYATjewrGFaNsH7XsM98hBeFcXqsUNFXHJV6lHX2pmLy+VvtfBcKbCcDCMHDQmvlEPxh6JZy3OOoCJRrouVqMNhB2mbhR+wTd+/o/4H2ohewoJWKcqj8E2jXJ2zoojvVvPe0wvvXDDSZ0ZnVrw5gVLror55WnXW0zw+A0/r0bec5saV/UW9Po+ePRdQ+/wYV7yk3KLLT8GpDvblG/qzivvilNSXZ6BQEIA4UT/jLz0l6SZo8rCojqfbqE/rarUWbiv3VQUMJH5glHWjQ93Ymjkbfo76ajL5/5ezWM/lTyMOOffUOfn7WwD4FNyPL//DVGUQyCK6JZDZBBblzXNcxwY7VETX/3dKoQtnQLKxDOEYcNgVUczqNJFI13dY3CuFQbDM2nguXOg8Bev9NWURrzSeol5+yaF2z4SN9efukrJ4SIGksCy86FpHT36hl0oiHpRKanFcK5sDA2Z/r6AvtUjSHWSZXPPU04+ApteytLNwDj9zgR60ucT+2JtXPVI3MbIxbQUIgBU5jbawN/o278+/nv8j2fkh5Bgj8KCkq6ytFaTkt440/OeIRPqq/VSa3QG5de+VOvKfSuTBcTggtNtJD4pn4ww9hcviO916J09tEUv7aVx7TZP3dl10351TtlkaZoY/Z03YEhpsBZGQlU+GwOudeCCBLoJfe3UGS154WVV0ZVuyM5+j6o4aWJgaLtGdrt6uoUbu9ET81fHTf6NrdI/zwy6ymit2OUebknVHEWybVMdW3KsZ5U8zMP/GKQ7bZsd8TzYdNH2OJJ3tDMJd3+29mCiEi0FVIHC6F5rP6qaqb8ZlH7Q1wY2XzJFEucaRXIk7WlSoTxliGXSCJruMRxbiPsQfbfcZ1UgN+Syu2e1hiJZfwmhht6FrrPqqKPCgMdgFCqcGsz/k+QLbTBpBLp2PGnEQLofQuAKgojLv0Dlep3V4CJJTw7aEL7hl//2838efz8iPeIvj7gfxX20Sq23nu/sPj3vGSqlFzIsM0Plt2OCF6bHUnnp0vsAORzqX5Inq6sz/9ALqTpOdRlfc2/je6ldX75571x7RC/67I/0+O27qjnvFIbtpQ93TYjFQEvZERo+yQbVvlcvwdZ8w46BvERwv3KRd7Hd658lU5V/Y99V/yzXI4CrQdb1r9+MATH7Nlt25ha7tGeXlVOldUeIawy2aj0MewyGDytyumhklW+mv63amfN00Nigg/Pa7yV2o815U5NX2jQzS94QwkRyc9OFZOK+GYADKwan1MqaAHLbjm2LpPs/evTtl/GZXJYemC6USemvct9i8Z2NpH6hnp8FkKlwXcDbXRIYR31GYs8iqQUcBq8leTL3jLlmw9UlxoazqyQYFCXvbwraKHyTX/728388f31EFtAh299zcPAhhYigVg/2GkXUSDZTSrc9nN9vo8O0JvzTuChr6jzjYbMuXZQ11n7VZPsS+g4Y85wiX5HGDO/tDkPNx4Jt+0t8DW6VoFnNxwbgURBwsQR71LTPfXGvcfVVlQ4DlVeXa0IH3Gq8fLHy+fPS0N/uVPJ/vmXn5+3M+rvQP1/wH8sWRfNk8VjkQSz/vuKDXYKRchwwagxGp6MmSmFxHLU70JHT6KK9a7R32fGGS/3BQw5K0mOtSIMsqbepHlSnqjzmyWw8ewwetnP7WS+kTSj0EiFwVPQNLOrzzEE3gkx2XwPUqgEHbNaMxeEngzE7bFl2t5J+XtHRCULtnFWehemlc6+8tqbt6jUZK6gXvlpuHCS7hTppM3in/a/yT/JHjHQIP+I3IBgG93c/c/tEfR8N9x0qfk/u0f0eajKvP7Mr1LnnxzNN+38FqZAs/1ionYeErwfp4Q0uHnNj4vUv3dssZlc1y7Vau1ZdQvofqQ24XtrXJDg64is5La43AOf6y4RTrnUzXjOdN57P/VXxRR11L22ejbftuPW1az+s6anmOJv/847bVvyfp5d/dn2xAc565zXMrQ5QEMmlJ0xTXsJa2w0ps95K2Vil43CQFrLTcmcYzkkrUztA8r2ahSc1imsMKRTq1jNaU02Ok0G67GRc6mlwujHoNmIOtpWZxG1soqmvguZzZmFOmx9sW444ugnvCX3VjmuPPTehT60bQ0jWYvi0apyH10IWK3mzkTTOWaj00gMMxkAX/Ya0L7SB4ex7lbOMgtBiMO20/yX+0/F7FDoiET48BvegS/P+ns7Ib97ji+yz6c+pHhuj1cuMfH9G3jLSzWvLP4auzgt260suis28Jt/vcD2K/O/ZIDEezITNyOsoEYcXZwCunvgq54Y6+Yw3Yxijr1Vu9hrL2w8AkRDmzc1J4PuxSZWnRoXBVqRB3Ktu0pTzb2yMfUfYwFQlwGoIqObTuE6C+92Q3DmYU+8pzU2U1IeIjDC4dENaGAbGSmmgJI5GpsKp5BkGw9HZhsgOnow4PTPK6tKAHGhh2C6EG+KzJkK99nSKdFEYpmz6JZLAkVlU8xw24V3CLhxAXNA3JdBA8XgyHe9Acv6NpHze0vddKY1n0wBG03BZ7ee3Yvp3m/5HGGyXFAwe1n9Nmrjb2jUDMI33BIP5Tvd/jn/Db5lPfCMKf3/Qz6ipDLIbV+47YYAlPYOxGVyW9zecAZqSPQycLgKGR4Zg3NPi/kP/WuwEpvTmYjqDBfG9Ar3o0Le5SrqXaMcHnZnEjjdlrIA/Mhwk0l0SnBPWqAtAfnQpACmZftuFOLvJJ5FXaUZImeiACsLtjvrQSgu8vC5VttpI0F3yKJ3KQYz170k2ygrOvC802nE2RdwF4U75n2v985+sQ6+DWsy9KyMxgF7FP0jb2xZo44DXSmkooSxK6TRrL/JW9hiMbBVOvT678ypVpFmOayQeLhKbkcFnq1SF+Tx9JNxV52fdSoFZmMT2I+LdmX4DaCPabIVcmgIqd6fmnlPODXXnKMhdv5TOgu3vSu+2PdhhMDyQ9zy4oezFdXlOFa8xsh5U8uUVFfa6aAC0ITB9cPwX/Jpf1jcg/P1BP2JqjamaqbX9BhGbDtaKgEGZhQyfNC1bvF6t9TBVm+iLK1ZfdREUz1v/ojIqn4FeX7ivAe7hDLUa6JB4hqZVhycdW2j7rVfBtgGuSzbW38JKgZwW9x1tbkOdXeiRW6nXBfq7kAgKFTrOUz/lAiMMviJjDMbnVNk6lbHzf96CJOXzpwF4yfQHUaViblkqZ05YA5/VM+k11WMf6bk6utlGCQPuO9eKdHtbDi3HfSqRIST3DQ0yP5vaeQK9RAgh7QbIQWiTZL4tqmO9/wx4mWIWDM4PSNtLns9ZBW3Xtp2/SIjTE/EOJHBvJS+ael00mgShNQZfkTOtblnbGSQ7wp2yZmafLF3H8Z0+/iX+Db9p9Rt/MLZ/f9DPvGlS7xdN3UTcd9BKYqnJHBaYaUSvVDUTu7CFFndZ8+X+eav9oavgxUt6TTI6HG1GUwOBeqG/xF+Y/YyAkQ5j+CLsqn5gNB1rqLaNpeHCS/f8DpPMkY14AzDy5xiRUvjNLuH5doS98rmAQDHwpT+HDq1t6ohmO59YSDB6Y3APVU53dXJL12VUVcn/bP7Pmn+jGSvTji7FSjZpsNJuSJNL5KgPDfY4KlroxhIMD7G1RnpmGC5UmH1ziW/NPLO0qiEMl44PApBKgUbhXKnHHWr0F+amB+PYTRwV7kKRMW8gGDgSi+5oFFg4JO7Lt7RbEQUqsb9uEJQQoH6ZmYly5fk8TzlZdXtSpgBzAzVDc2KDwflgcZLyGHxRLmBw86fk9cEAg9MWwvSB+C/0Gydvl2/A9u8P+uE2D0Y9mfZU2HfQyxVXrKZAz3d7AHhV/IU9L+/BuJHhLgizYlW7F3p80bu7h9sGWXucW1/IVFVihc0JgOpnVWny8w9xV6HG4Bn6WrFhK3mRDKa0G6nPCNRyX8U/+AjgpBaeaW6RtdD252yEGr6iQ99yremlhV51yBpicEuCh1z2CsF9iwSTnzM3HuR/7pUkeBGDB131mzEw6fZHVbcI4gDVg6sr0yWVdNbckia0VjujhuFTKmzFnBkYC8+s9Hxqp6rxQ6gdC6u1F2yQjVHWnlIjBrr+NxOoKJ9b6CUW1TlzEE4uVzmHBo++5vRMyJksq3vw+SZrLVh1h+3xCSiuNNXu2nG+Dl0ldAj6Gga/bjBWDE6LuHZZDQGIwd/ozvGf8Xs+8S1ikwf9dLroVFs3dfazjp+UL67I+8l9AcNDudLQjE8PddReLmLklWJ9P1cDyhUeEtnmyKwVf63/GjJGOl2XMxc2OoyZgQC1AbjvRtPzvpJeqDIAA53auRczAEeZKhVsuA4twXVtqRg69JX8m+rjUBIVygZNbeujaIUFD7avdbCYyPg6JMpNmRPpH4EugkJI+Z9N/+xV0K+ybcy18LnaDbzs9OXw30oRPULrWeGh47QdV5OwZEi2NFhwnlmls+5DiHPUxFALLWPrbx+21EtvBp5Bb0qwnnXOlPboLegLrpXP6VHM+7BYM6I7OI31O73FvuYmnAKzfB2eqVcW67V82hm35cGoslQCoI0C4/6vCYMF9IzfRMK/P+jZKGYnGbL8n29MuH3JGQx/Rlr9cz7+ta28KusWZ3jml5QN11xc3iGxoH4OuuiX0QudaQ5QL8uGvZ11z1ZyHr8pf/E8oX/pWtcNcQbgvokr0l/YoK9K82fMDGphR5AzOyAAn+WpXyXnmW2Meac25kba9H/DajcGU/K9GiZ/7i/8jIU45YI2hfNsZqyTYXnxC7TqZVzYE2mBwaNY4aSLBsDRwJKAQoWbjvVawbp16co7MJ4cP7ujFnr1R0N8iYSDwrDurBAd2U37+5LtO6mQpkv0RUcq9ys35XzOMWHUkJuiWjhKLZopvtLNeUV06me9Or6GuAsl26TD1aIs51rNvd0o/EIfFIHfo9Aj0lO+PYR/fVKDsv19lxbq2dnTo4X0s3Yzs1+X9fNoKO/atPlnyHvg3uuQ7d8eeodNVPNkt939sJ89650tidZOWH0H+ib6sWQMOJtFBvOx+T+j1gzHTBWCD7MUTR+QJlpyJHgovT65c9HqsNnyKdmRChSmtDjnja+cos/nq1mBDlDJj5VTSYSHyNB30w2A570ZpqJ8XTZ1kElG+SEGT1JRvioL1+j+rGHwUHzsb+VKPSpsmSx7cd6hUhc2+ntVmnCjIfE1tfMAfUlyxNGnIHNhx/Ueyxd3DpmKX/bPVKW5jCfB+VT3ZC+jr/173dAorjmbKWwKoI34O98l/u34/fEMP47w+3F/xKSIxoiNxbIZBTChknZDsxvrSZV/qG+/nhmrBloy9/m2vlwWyuch9J7iLmqttaJRU0l++LpoJHvFpzA8G7O+/v7CmbB64JKEvH6eqqtcDK7k9JMXWljV5kXmp/xVbxiA4TXMNQYPT7We0qntMkMug4yv7o8EAEwULAHWlrJfvWHirUbwpcuzExarjvdSK7Ybkj94RQW9xuDhkcYvutJFl+VVWoX1wUjTJdgIlb0JGa/Q3+IO1rFeg17SP+foCxa3j4IqAK40DqwB1SYYN2F8DQx7KUt6Zw9GzeYWk9brMqbCM72HYbAnwVGAtFVU+Eb3fzn+5edvv/68/f0Z/oj78aCjSxOo0NvcR98Lr8ycPeLhbH5dfd3f6+LsEEXWKqLrTV+05l6UfrkzpMINPF+pitwquSHHKG9WopsZhjCMmsX6Ad70itxxceX7Hmr3lAFvtboxc9aaGEjqRixIPH0O5n4lFQkePLiLQyDOFJ9zMbAgkgEG+895wnHfwOCV9FospioAaZiBsq3kxYbH+wFr5l5Onin+eb4RX+Qam2Kw/hhuGFxVUI+SIQYjw61tZei3VYgd+vpx0DQ30FoPPfVHNRju2r/n6Fu0zaL6ZyLAfuASD05f0szQdCavLRJn9fc+0r5MB4Fjd+gZjV7KuTpa1zFR4HiwCHAIQIjQIC5iohuHX+K3vz3/9bff/u8ft//9CH8c4WesajIPs2F/h4RpAXJDYjp0t7wis/I9rg+XC2+0eNr0dWkqsAqvQ++sCw2EV5e7YlXOGBfK2N9a/377R9qT9QanrZh/O0xgko1EN2jxll2XiKpQAkHlgTUTD7ruLl5D377zyCQYtf75K6UzA88w2DrkX9gr9BewBFi2AQN9LgL48qJgS8GakKH+eYbBg5zgfTG/lJvDbePqPCjQbCTsMBhOHW3nrT/9mBvirlXbHB/SERotoi9CLwBDX/9wWvrrpQy+KViO6WxjFJn6MnyaCg9wtyXBJ37UZ7L09QC8OnrtpsjJVgYAJPIkbCD9kgl0o+0v8Zdffv7t15//9HP/+5MfMTnm9PO4UeG2hRE17Elng8RNgdmjeuMtdYrWQff+lNnzLVlDb/8nJp3vCe6M/qYjDoNzEwWG4d5Ls8zq1SFDVflCw+HLh7QTz+iWssSy7y/1ZpylImygz1t/k54CUcbyfCGJQCbRwKmMrrAnOkhfptdCn5RPt4SxU3R3G4sKS7IEC0ByGzAIM8VIMUoDxl+XHmuznAkWudE6juf+D5/iVatwR4hncAsHySNvrIoHA/BUGBkFPSFO7U8WgH3YUn98+AatgIPk5KQ/RF/HegU1/YWHkDEMQ2OfUYNlo1We4qjnbrmv/vyXaaR7RTQ6nvg6Ep9qYk6oMJO6o4gUIi4HCIJIiCLPtGcUf9D9r89/+t8//vr7x7ew/XCWYLgJVyVK+0U0RIGdjyvmjjx9gYvv4eLbGjLsvpXrmuSvMyMMKl9D7/BIX8mi5LQbGYOBAQz7V+8/pgaDe+BvLulJMLKVmjsX6LyX1/hxSy4T5QVXlVdfXYPB4zojzdJBGwYPj7cH3VfcmoHd4mLFg9drkJko+oaAEEoAEgCAssZ4gMHp2qkJ+rINmJLmUzDJjzKSPgL4XF89x+CeBM8w2E4VoK1hGE7zbDCsMgRjzMlxrmF8V7UKWhlwgl7tbY++fQ3m/JwfCJof12WOviNQH43UBYRfpbCfnJuHBE07YF/mpAVnQxp9hBmD0eiik/NoUkRjo/Atfvv4+Zf9+T3svx70iNXsY0g5o6q+4abYDIYXBWY1vySOtYxnp6Fr90I+v05bV7vA2sWRYVWNrDtuGsAZDMPBbY/BfSs9Aya3HjqkKkxI2wC3qrjRrJzQt92AIYWENLcvnxg8qDHYJAoiKMq5JbERm1FbS7CPtRlm5Jhh8OTIJdFVT9I/M25bCUBiVvp7rZ6ube2NHZ/Xs4GJBIJ8eyI9CfbVDgOrr0q16YJefgmDobDaJc9qYBg1IW48qnqS1O9xZHKaz9Jf2BBf/d2jb0N/vdAw+q/qkOlbKn/mr5U1BluZ6fXXCO6lrFgTMH7fLyFPqz4+GLEoopWf8I77x/OX/fER7jsFrlWCvYFwcbt+tp2R3bprpcCVW7kuQx+it2tbNLGQhTLcZEhqL6LvusKL3pQ27cO9lAyx4qmwV0fbi14sCxpQl7owAUEjFZ2bla/Ew7A+wOwLPb4FnM1dL0mDwSkJl7MBvydjHjz3hfZDp8QBfzLS0pL1KvpqAqw/aY05ElNB25ZE83ikidgcekpTktQwvMZg4IQKw6md9Sw6Qox6ODq7b9u1NUtujtvlDfFFxlpTStsRdL5XeiS3qq5YdbfKt14wePRYl3JZObP2zPoU6rtB9VpmyhrX534JYxJc1e+V5HpJTIpoIKXt3D7it/3xPcSdJVi8uXv1fj7yXevf2xCDcQGGh8/gbRlS7U/K2yj+BvQODy7Qt2mh+5bBNAq3rH8YDA+psFc7D13bvHEX3TBo0JfTLoRCNeXTAr56HwHss3NoPC3lRJvtXsIYuGKtpRmiPQ+uoqGijfSBcrR0o7MKi8/ZXn3CFQmuXD1qGDYMdnFN1yDZoLfonzd5LwCp2WSwv3zk/5zOVEmX1mHHtaZ0scK6lCqh8kKqqKQ/4g8WxunKexSknHKZWKwMdxt7Wcn+FrRC/5+vxFNeY70DtXONvo3pd/I0vtCun3p8UiCKraWbMxJF//uatjhnpR4W1rHE6b9VJR5TX8GQFt251YXo7iOpZkbYZd+Oj3DcuBs5pQ7pV0p9l/WSPH0PcIJcAbhi/X/Dqy7+11T+JTKs/41rfQ2+e6cHMfrd92ei36r+Swfdf0Axx1oHPAyTO96O6NECwo7776D6nVTQbW+dKx+N0dct8b92Cqm7Ud2NOmH5aODFGtq2By3/ej5txzMls6psQ7P0p0+3+VX6P69/vu0++uhE/7zeA/iybGDShMPCQgxhgrxi1v+MDDdvGDlFL0zCqIky3BLM56CeMdrZMmLBkv2FBr1IYJx6PkTffKct/f1i6J3NiEMSvBzHiw2uz6XxhXYHlQi4GKSKGdcdaOq87MN1nfGnWQ1GgnmX++15D3EnCV2DhHPyZ1y211LOaK6vo7/DGcEyWT+Vnod9Uvpn8hmZ8dc1r+3lFHpnD6n3TTMS1RuGF+roppKmb75w0yIp+kKcrqLMWl5pH2WQjorz8GKCRjFNbvQFWVSRFy6qhYZE6jVSDYj6D9l7ZjX2YEQSFqKUO7bwWh/4Wyu6Xt3NsJK8ZVxJPxk28DIB1kVp1OuK5SNE1/YEuuu7YklMy79TIHdQN/WdLl0avtIzDEZG0CEG21lrcaaU9iUx+jibJd6pd3QDvdaBBfqeYG3WP3+VB9bg4HteggCuK5/nGNl7clUYjIHVw6xBff2fC1InipKcsBiIIiCU3FgIW/wIz3uQnfDob8QZBfvJtPltE66eWUBgYyeedPy1403lfan3HuFL8D+7qpeGrA+PY/SQu2tfuy0rTaNPpNdIo8NguIfQ0+U0X3UYLJlJZwNw2zGvNPamXwC2CyGgoEv5x1TP8SXAbK0L6IivJcMa7D/ofLJg33utiC4QC5jlCGi9o5NMprixZc30z/uG+01ubgekVFu0eKRyyRuiKuhhIg7aSMAUIceR7pAHmyM18oY31nTGrDdQGm7M0NRQOce3UcIDKmwl9cfQc28GyUOKvEZfa847P1eq8kbd+uX651flE8CchGtbqNlZbUgUDlhIMJph46MLnLQwPMHgl/TSvsXaEgww9j3et+PGUVMDHv2OliPoasDAsLZxmsUE9maoNnwzi3tdV/5J8nql59dleN1F6J3JKczMMINJxGFww6kYrVW4945uus31Yr0hzcjoq//mbYBTSeeNNdhftQqkTP+K/7ORT6ooCurnu1CFUURFgvuYkRHigqjih61JGHVM8Ny+O05UOer9cNxQXqpk/XPOfnWa1fnKZH15QmckWzQoEBn9qp1y1UeXCH2+CP/ETxvtbatXZG0PbgqgnojHht5sIfam4utyir4W8psvMEJcw/mfoX928me4Sa9kMjuSW5BXe5KUg31Vw3pcSbt2MaLWtz+3BBOB9/ixPz843ln62zKKk5Kr1JzDilOZiH2Xyyn/36iDAyqzKH+x8k+aga904HrfGmkeI01ODQvng3/imG+swpTHACfsrO6r74kZj+FeLhM453/2d+QvTmk3JG0AXNc5qE3d/F8Va6I/3nRJcmFxvtAzMcOtrZXTn0u2ttBpf415NEFeTr5x2xA2EL9PcN+VrILemCKKO/RSC70gu2Vdc66+BvyDzrrooQv0MGvHmgf7yr1G2nVVmh8mPseWb665doG+pdhXxPieygtJJS+T3VnGj5Vw7Rva1ui9KlsejFbVrL3te3U5e+Ur31KxBDPApH5Y38JxYwmEY741yIQKrxSPs3pekrUSe1i5P3lRZb0QX8Hp4Fi3trb1TrSpg/LVFzrpWB1OU1WiXLMhwehWg4WJ1QOwR1xrqYx6p7hGpr8EMIoLtE82OXxuUn8WvX+yX+VeFF/DlUSV6hF2xBQKPItKuChpT/dZdG+f8WoSH/yaeP1zCKJm4FEGyqR/HqzEX3nGkzweW3IDQ5QnCCR8poUuSGmMYeB3PphSR1JpETsM9gWG2ukGg1GrlzsVZfljvWrzeS77U742Q19r0X4Mlc+5EuB15v2+vOih0GN5EwTcF5gidBfPVizBNQbjMgxXxWYwPLiFkTt0bQnWq4hBm9xuz4/tuAcJz0HFzr7bT38rxePpsmEt66lmbZqdYdJLDQ17PgPjWWcXd0EXil23JQ+XRyodoiQMBpREnkzs5XvKZRbvlEYYnJXPsmUb8MxrzAP5EB1tELLbBRX19PVVuoFMf0X9sI7IqUuTZFjjSjpFdCXNHoX+whqDX96cNIsFIJEqn83/2afO8H+26pd3ddTdhRttLM+YlgPPSEqIeaDF0MVIvVVRhcGD9Fiukilp9mzVYTA696sZBsN7NY88s9Atx9ZR6j6zh9ZWq8Fb6OX6SEHfTvns9ahJ/8xOw2/yIks+ob+VLasZSc37OhnO56myZiTYTGfSu0bXYb6XYXgQ3dvd0fSSkSTfS4B37Lfjvj1vHHeSn1Tted6YA4epghzLaT1ml328KosaFnHJuEy1Z5/HKfPuz1/Hy9klVwDbK3iHBU7BwRinmV2jA8DZF9kQ4l6axy4VBgsB6n61zU0D1nNVQfswJHW/ijnvPasXdH15c1Nd985HhO+AKgmi0CEkyQZcIpEoTGq4zoaHqPwShTgtyYTN7T94v2HfEbY39/RN/PhNNqxhSCRCxCQb42dUpTRNUkPPxG9IgBHFHC4LSvwlOQzG1CdrqKDGGQxb+dy383sZem+5/M/V8eGRXvncn/oquaqC9mXmNPc9WWCwJXtpMbgP8ii19YQVQAXDLQav+lb9GAzpmEa7FeM9fmzPD447Cx/t5OX7Qm74eRgwxrnwmH1VXnrJTSuz40MZtvMSKr9aeV/PRaKMt+jdEJgbDG7U0f47bohXte9M2zetsLQrbrnAlDNQQgjFYeVK0ox+PUc5kOkiGjQK+fVrtMwbhsHPSEfMTliR6DIDRuefNd03dpF1cojKp2PRrOUb0x6w77htyf95dO10G4ZT3L0GyVvqTSRsRE9gYzmOFBD8ZpACkLFqrelFPYeu1dGz2CS8AsPW0EJ8INPovqznA+6LpfLZj6Sx+9XnmdHFer6qoVrGKaM7LfT0bD1XjQlrDcNXMqH2z7kyJHtTSxSBhsUjbMkMfGcJhOeIuTIAtynscMakDoP7el6V9dVO21kdv6AjaEuu270+O5ze7in0LmoocQejGobI13sR9+IxuO1M3z2naWuGm19han8ca0/hv4FS+NBCvAdWRQ+yA0NxhJ4j99q+6wh/e6rK/iFEgDLgiNcikdqmr+dwfi/SdyTERIFxC7ht+NjT/oNvJMB6VXwwUobnDQCYaGMRwaYaURJWD+HW9yxpoVkIdIUZL9KSlTKOlMzU0b3meZGIYwjDKldSh8+11qVp31CDvrkTLf1NMaaN9ffKS7fvcl5yRV47z+H/TBkqohtqW1uFJ5miL2Hw4tnaqCNLDR2LyocIfJP77flte+6s8yNZZ9vZtt6YnfJNqHh4zvtBvLOwHb48FyQ6RhSpCpffZx7ig4Pr5CFvyHXvqsnlVYc8qjUVjjwEqkOnEDK0Bw8Vpb2mbT66k1t10B0BTjpA9qPLXlOgl5fJsPrbHI6C08GZXSaU/qZIJC9EeY9gnW9fUfh9CQiW+bBfQDFhy+5X9xv2PdHfEKac9dQAfEp2RxHAutnD5vQXKRducsvL/33yM7vo+jSmwlkd3WzDANVgLM29/hsbgvFQZlrr5nIe6aLL7zrtxjDhZSU8aOtN8fPXTOE8QeIXXKmLC+KswMVdGRxj5hcxuKoH8OaMy98wMQStv6Ee5CC325G10OFnBLrZjWx6oZbZ2A15Vu/dst6WIc+eRrF3Ja9Q8OGre483Xx1QdTH3JE9G0ZD1ooY9ewWz1c9wruhlxsFKfgJXEjUx8Kf8jGvcl7P/66DdHGrcyzXnh4G8tAT09FfybyGJQIPBY02y3+moFucQc9ajgb7w8g2US4iYENi5X23Yd7gtCKfip5Wh/jn5v81QfHy8ALB6Y9HG2AQSkxn4wk1dl/4p16ZW4BoVRqeRRofEflE2JArjHo7osj/YQK+d7dHX33JPf2nytbXRShP6+8kt7pO858HPo98F/MYday3EmTC2GAw3a754g1dxt1OJFxKM9EyIQRvC/fh2e36E487bH/ljoAyuyJ0lShjcNOJviM801W/I7M1liB2DCo0KN9IPiiuv4qWhNCt8EXr7q4cfytD6QUv4efXtpA+/Wf0DGMFw3z0mMIQwCDdXHfhpgFCz7DhxL+2aGBbr/Qo9+gI4hFjoED6Ej0g6VYpbgxJDjkVHcn/e20bQWrk85DTXRXK/2hj7hvue3K+ubH/UmH69XKG/81OK/BFIq4O0DKM0NIhFsLThfU6GjtNjq3A6W1hvG+xbI/FQkzzzA+wtQ77C5tpeFz1E34Hyub7cH9QG8o++jwN5AYNnxkC8jsGzvvVTHbehrwsMBjCgwsWH66X05hf6P9Rg11MjBWw3ue3Pj3DcOO5MMS/ItKAOztLZhTqgw+C3ZQg8s+RWUspMNa7Ddz/iiFd7OPP8OpWay8rw+FBWDuGTXq3rvPKCbAw00rPeWJf0zkekccDZCSt3ldQ7Gu5NWZfW3hRD6W9nCK6+hgr1rZ6s/bYyUfCMKRJpbN3r9oCfSb9PcNfpyR0OPacmyrxEf2+BPnZ8v2f3K03B0dDZP9kenGUDUcJgJmyMR8rOZZkp05Tx4gx4fdLs3bUGGDzZwRAjvjtMytGU8dIEbi67WprwRyzoyB1Jt1ah75D+es+sBbyVMv5B0WCoMTXFVlIXe0ELPexkLL26dMrB0cucfoF4w8KLyy0gGIBNAUy8y+3+/L49P0Lcn/xAO6gKBndg7IvZKa411W/LqcvSMOJoiMdDDjRT1b7RtyvSXPFJ9B14uL94J1fuYO0SVMVJ1r2yOYQBdXU1A/B0YxitwWWB7ssM1VJtn0ew6iuMSw2laYg0iPcAHUJHsQQPmxw9y2wYHqSldLsyDAsM7+oqBiuldPRX7nfsO3i6+4Lw5dxY706bG5KLPSd4CEybSDwop8T6Wi30TLrIn6EHbLVzA+YG4GHa52a5+lIq/5q2XkVfuxdfoE+JNTI26yXXFo9vo+YbV70WkDfI/1SFJGGCwRfB1fdkwUPXy5p6Q2JToxGDb7jdj2+7moERosUJF+kx2J/yP9KoufbITw17qxnJXTWL0H2VHH/JJHABTa9C79c6FJ6q3E89sBjt2Fhonq0A5x2QZkv/VmO87AM6CB/klbSaR2DMI2hvtAiaN10/1gTAwmlXhkhCCQ7P2VcHsS0J7tNxXJx8crF2YlSGqfT345aSPyv9/c8QihExbln9WpiTbIQngUAbJJK+q/6BnnLcVzWHMwxuw5NUunwduZKWDTcFfMlGXOpKt+FGdUcTLfQEfUvajdZHuv8x1z9P6C+82nYmLw6tT4YCXyHB7sgAgz/VdN/QldvP5NSTYD1OAeEWP26P79vz9tgCBacmzwpDkB8t/arOCJCH4dPP4orPLUbqH99iX4+PyTGpbnpki2nKf7kskjji01h7/okMVafDgwBG84YOBk6KwgEMo1HRUbqKkhZa+hYvGmvrtla32kPvKBh+cq0ph3TXEiEAh+Bp6Bs5Hi+HAg+dsyq+mws0JLj6c+yyP1gikSbfUPq7b9j3Qn+Z1Se56onR4iFCf5GOegPniVAdUTZGFNpEOMKTYEcXjBbP0hp4kvdVGIxmojljwz0MY4LErvXxb1/DgAp3Y6iN+p0bhusm6r//AVFDb6dRfU96eO4weJCoEmoWA64sq62hrxDaiG/ycX98hOPOsrNI7LiRRi0RNIF6m5+h/tHA8Ju96vUlo6Vkrx1APaZ6MO5p8WdCPNcy43ydOvpTrXzeT3EhhqP+oP8zdjYLXyw5P+ufy9usYnCnnZne6pr1LrK4Nqcy99XfiJK2REx+WKYhj/XUp1Pu3BF6KGu1c3PqlMHaxke4Bbpt+H7Hx21q/Z1JSUt5uWcXROOAk64QADimOOCN5IHkOSbSqvhqo+xCvgqD0TOMzjCMuVLal5mJdDbm2T5L5fgIXO33+HU0o+dE1+br7HjJl/hCf600KDvSQmPZ8xF7nu/ZcCazL6JJdUmAoEo4SQwBeM9a6HA0ZmDrl6kfTxXnDQx/RqZ2Pu38XP+5AOMFEldNTHYo+aT0j+TL0fcVI0DXmdFvg9shaPFcscMp2HPAfRtp1kCDDIN9BNpoKPbEdzZz+OPeY0GPWz6QKBQhR8qHxcfBuvHRl0VUviSjhAp577X8b2DcnPPz0vqbqz27mU/z4C3PIkyIElOOLhwMjhRSXminVW+aF0RqIHaVeOjTMoDVJQyjm4xWlfvUkostDkfo60pmrtwpn7sQo/MureUq+i6Y7j+ABHOdeqAJWFoooms/KR8ZjG4ovrr0bMekz7oaBVFDAMA7wi1+7M+b5qSMae1fLAaGwUTeDwsTjWWfKUll+H2s72ns5+8q7JNCNHU2YLzgxFWv/uQwKt+lr5W3q2y86tqzNPg9+zS1ACkG520NMXnXca4uzv0ZN7NQOM/wdVyPNH9qMmoAYEK2AWc/rEiIJNmS3PIlI8G1a3RvBm4VzldcsYrleJwzhTaiW6A9aOqrRH/Dlq6tcbRXR7f0dxgBvCg/7HCbiAMMlkTViWhj4YgtM5gjURDRcRwvQZrJSxjc+EWPJ9xX2DCucY7hHY2hF2jQtzH99mVWoFu/vzd8Aip3g3+wYrmXoSXYEPSCMXjsjdU6c322m4Mx2TB1Jtqw3VMwkqbE8k9a+9tjcC/FZlwPxIWTTtvb+Ssdehd6JL5YpgePps3++/jycXZ95HpT9xc2d3FYFWfmSYcXN6Loq/vdqAH4vYQLpzvK6I8eeg13X1Kf1YmjSdf/UegRczSw1X/9rcxV0+/vyhDNEUwo7Xaf3K8K+obNUl8p+hriijsILCeaaeasF+YmD8AgJmEGi2mhcRA2IIpMxrvOYld0g1WejSvpjc5g+A02fNbiovMt9AIdr21ybvRluPtd4/cXGC/lT9DlVVW9VnyNwQNt8xCDUUPEe9HPJh2Klzjg3s2Uwbvs+/GxHXeOO/FR74w0xOC2I2569d5beEsXvVBsIjuFqTQoC3d/szIzJE5XvdzZS7IYnqfDrel5X+2aiV5vy4cP5R+1ksz9nr0hck1/UlV7BX2vQO9LZv6K0AoIaU+kx8Ex8nFwiHKabXDU269RXM9YMm1MeyBVPn/coFv/zvb9NSlqrgvWX7zCCVyFaoWOadXADMoBwQdjEzxFp5akyC1T55QEX7SfV8bdRTrfziRsrbgywBkMvypDG7BfrC0ijpqo3xN5Fx0b6PpUCO+XVgJ0YFagd8KDOyMxWvCeK0zHHVhM6u3lBsPErVKOGHTDfjtu4bhx1NT55lzDVX6rAQYTTc2EM2+d0s2lBbFc24xQ54vbr1B7ML6IxP9IeW8VOiPEo5H18n016NvT36a+RfXJIpmtv4sYpElP2sIzc28fPuT/zL+vPohmN5HUEBCBZzQtNERAr2wMDExdnd8TXwllSzs2SlsvmPV3C+r5fFKd79BMC/3SYOr3Ay61xKx/FjUAM5IKmrBBngBAOscgwzBBXlQve2m8lmZMerjFYY/E7XRTByy93rf+q62ILy6g70X6+4a85nv1ilL6yzG4tfW2muQpCW7P9pit8t78OoLhUeg5pWCk/XnnuLFQJHQ24B6Dh61ZMS/2SS26eXofswqHPHvIjK/w5nEHJsf7WLA37NyvyhCGLw2H7sgo3Kgc96/s+tdi6EvLXROui00njdHXnBnSQTUbvgW9a4mCHArM8WCJESFH9y6mqMsZsuY16L80SZvlPtGNkaKPbiX1Vbb+mlRg/MYE+G6okjphKaRH6JQHTki8ESJThBwHbUCEmIOdOa04P6xPOlv1ITpDJO5TVzYlx+bhV2Wyj8eb6LuUlf7Z2TwGoFu7EXnsfAFHO3j+03nw6VltvNFF95fYhafdUDlVU/YbYDOIEXbZt+MWjl1TtRY+JBE0xuDkL1EaIXdnJj0ez+Ti+/AVNtBuPLt3zvJPaHH2Jemv+lqsvd704LVPLpyBbnPWey/bFf26CqNTNXJXc1nGlKvzFde/h1NvA70L3PWt9oXaNL25uCDR30fkI/IRifPeOWe9z3rKd2PyVgTGLZBpI9qYPnZ87LjfEdLGR1P6W9Qajulep78v+kW7XNCRwTG5Q28BUfDUrFhET4ImhVbXN8U8XdHXGbe/NqnIENEXhHjKhk8bapTYWEEv1ug7lNmp7v29/wCjw+BP+2G9hsErMgrEjuaekuB0omAwbP0xm2mmfWs7M+1/lH68ERM2oU32/blz3DkGYr95UoPBdnND9O1Vx3aguf8rc0tTlb9QqtsqXa2uGqmpm7XKnxFS8tIneb38Qi4ad2flfYEmcGiGvsMjdjnlfQEWJU9lcVMN/T1F3/4B25HhBf7zjYIIPIWOyM+D94NkA0WSpQaxT6z0WTOw07dIRFJSEZIH1i3gvmPfZQu47S18ztZKuIy+Z/RXHZ69c7XBvzph6RbBlDAYyRKsmwRTBDaBAE8BBNFtHaNPcxSMZB37ZADSwr2rR+KFXvpSW13hflg0Llfo0XdOf79K/5zkyoO94jQ4kasYXC3vp0jcKqJT+TNL8FAdPTP3XZEFfmfk7NXHvMu2xS1E3TyuqzJhMJJnSvsV9+jb/Bnn4SCvvgGP69KdRYfHM1p8XQX9qrw68k+UmWcN9dfOOjAku6gXLv5hLtB3Uv/LhgbblahygbjUGlBHGTX7OvgGr6wC/U6azbgSoUPoGfmIFA+ORySqYzW7aJS33+nLs9lGtAfsfufBC/v+XtEnj1dYgwtbt+qmg+VKBedkpkiu2xRFNsGN0wuMgo3wFGk2aZjo9PtgzU9aixdIfOqr9U67n0DfIlcGzXpBO9OptbTyT1bzNZVfdHQaGoMvkmB37M2UI59/JqqF3mTnGEgCaawi4Kb4kouj2/LIo28/BdtVsKc1hYeT2++rGk6sM1o8TNv0kgq61tyfFHhJejZ8HcXXJa+ALiYrqq+S4aJtJmst28ydv99VSUZ3cdoFc8VqeqvpOJ5HOOJxHMzhoIlu2SjNDIPHOwq/J4ZiH3vxvXpPx/jphBvKgPs/6zhgRAhBN8QKAVEsMyU2EsDCgikmDE4XXXhiX6KdXiDrwkL8nsyyi7SpNkya3B0vNb3GCTu7mMO+Ovy320ukPn2RmtWcdYDB11TK/6CEX0wE0ffuk+tTAG0xcGHATdZSKb6JlTo61wpMENTT0O55v3bLfVWLxBq9qmKtoJ7JMAfIvIdW+ZvySb3RsIdD3H0JcS9+ed4tIAIQhC7+d2EAnluxqr17LWyECVHS3iL9/r5vP0jD4PwnIhCFnpEfkZ8Hb5HiQUTnTfQY7P1wJ95VF/kMaQQwqf/zvuG2ybZh2yooPYmkqBXO/uCi/IvinMH0WeoP3RxpC4iSFNE3qJcWEAFBTDw4qewihMWe+To79KzMSzIj00ML8dtNLOrv7ygXeKG2lTSfdbem1f9bXHUp5py2rmbneBW8XzUPOyr8AgbPvo5XR86MF0z7bGY0pb8EJg7YQtxIqLLAJLGppFFHo0bf4diQpRf07Kqh+HUAMEDZBRI3JU+jpJq+nSax8WU+Y0O4KKdfW+NRlX6PNMxfLsYCr2wP3WNnc9aX4eRGJ+z27p14N74gp35hAjyFngc/D34+A7NwONB4Y4048VoXLRGEEy+tliZ5hGZANx9UBhxCgrb/JGmzawGonbAEzIiCtA0DSSRsDEAV0QRGlByVJLQRnkn79p8iV/TSXoqpuEnxUZccIndTZoC+o8RYwPlM8E4Kjm7h9iUhdFflVPOMroeeCmeAUpuSnbqa1/rtwfY5/3wOEjgGTjn0Y4clXh2Ncpdaprje9D0amioXoNu/Z7uzpg8Yceu181dfEpiuDxabDQzlDc32qy9tUWfTw4u4e7rU1Kf0Bgm2ywkURvswnn4MHheHGIxMgrMXY57xzns6bsW35XbsQBQ6hB6RH0fYjqgkmINcxOBGvkYLTcDGFJjM9HtR//z5mdTsuRekYcBeo8RggbBmwkKUtF1kTIgnTwEn7Vsiwcg4BH3hlzJyqHxJ/NK6nhZEZ/uxLN99E7sMYBYfPL7Kn/q0xngUt/rJKpOc6J/bRrsbGag4Myv06mjY1GW7feRB+A9ROU+l1beDGMwSKPq50r/VBk3bNEmjuE+vthgnGX6xsz2i9ybemXa6EZmTY9fuwH7cny011A5fV+xWa658neYOu/QZ3G1KpgmSKrXHUFLOY/WZEYQ5FTa2OiTBQ1wEkHcOy1mZawyGsweva/MQe1petdCPyOqKdRzMLEAM28oMPDx+roW+LMRZ/7wFUeXze/R3eNV6zXW5oewFDRQnrMgJR5UEM2MT3QeydcjKlD8n83vfCOoTcSzkPIHlqIYrOTLXzY39mXGGvt083hT+h8qXGIkvxlv0Z/3MlEdOwWCoLeOMBP/DdS2mxqcgQb2goUGQ3YToOtgAzDpvsFuTVFUtLhlKU0/ThwUnxghcT5XV/qpmO/ohzvVByYuvYLjD8UVZgy66G6H5qVNpnlsf+TMWIgC64xqDpM7p4bGtTz6V+3kCjQrDhsHqRMH0gga6aWKsGsmbGWulh9DPI2zPbQsxhEhMdIAxSmo0kul40NsYEGVNDFVOFARhgAkbJefn+w0hYAtXurGSP0F9XScE8WZgZBIc9WY46Ro2wY3lZ9qngTaSTII1JtgCk0Y3UKsjanlDPXCRN7+9klpxWVxF34WReBk6PBmPtfX3T5LKfvxGJ2clDYYbDEb9Wccr1rE/R5wfli1ZyBgwS7K6leLpRx8hVXv3zN2ba081f/zFjucuT/ow48TDthYUuYHkWFc7q7OZwdc25r7bQ1kq6qf4US5vLnllrbOQ02GbLidikQgQkmXP8KHnvp4Ec6ev1vWgbzdpiYFIEtSsRogplQP1dzEcAIsRm1uv1gqH8DPGZ+THMwQWALwfImgV0a/I2zyYNkJgzf8st+z/PPJDHssXYe1ptsucCxoAGPEAMgnGkSzBACRzYosMNmPwT3VLh8T6e+gxeJZX+RM5ydbv5oX9lyb1tC5OXl5F38X8OoHq/+rSUonJe5TuQy8ft1QYDDknwVdkoC39HPVnAguxBI4EcNobrC3SGIZ7GW56k9WDA+3lS5vkZH/XFs4bCzE6JO7FG7DRPc6GHDeWKyxVu+IKrB2wSw0dGM8Dec9Bd9YxnMHqUBZXnDjY6hAiIpSITuuYsVjDYF9Zrxym/O8Qg1FR4QTDgtbK1PewbuLk6QgQBYfQ4+CNw3ZEImEWopizJX7Fgno5Q1pQqDpOku6AdNuwhdb/+cvl4oYNWVwijqoWqgKCo9sneAt6nCyGTI3BUSQSjpEv68WEn/XeCdOzr6cP/TyeDWro9mNoS/ZGYndwsWb/TG+ra8uC4Es13QtGg/WN1cpQeJRIkwahxmDgHRK8mPM+rX6nDYElxQFPysxc0FAr69qaYbEi7fHXeohqYu1XAzMk7mWtr7bKqQbUujOuts4LF7WNucXvOTkeZa06Yd5fKO/B7dBA7nQAxMk9Spjsv9RgA72zLlkDjYq5UUcbFc6VL6u+IJ0hI7li7ZGPSCFSjHRECgAFEYfBi7xXA2PfSP887xPAhJDRV3dfCNtrw+LPgep+m2FhzgCsPDha89EswYApolnxmqLIrTgeEJeY4LwrMqCft6HmaH/7NvXjJP1yOTg8NeTZr7Lq11dnp0bfFfpmxWZzyWeF8+z7501CqaHL6NuXEUf0XCRSwd3/gsIgSo7QG+sUSQ5UzAxmSsJJNaPjUQp20iscbl3VwA3Ol9Sez+v3LLmHEHt7/RqpHuOD33YhRvg9BONBByY+zI0s9OfvjbNXh2fTcz/jHSBACIjurZNbqzGRd12uu5H00qna+k57KgxoUw6GMXg6r2qOPOeKQkJyCB8ij4N/PjcmzbgsQnIFcRtt89Tt5oIQE24M3X/Q/J/XsDq81StIfK1n80xYnDdjsH40nqsAbUGeByIQkp2CnpQAWA3DTx0y9TigjMHuCOoCpX91yRdkAdiN9Kj8VotTI+589NSN/sno+GfLGfo2zLvb7MgtvqqSQtDF/5wE91oWky9Yz2s9s7OkuTgIkqfIVsWw5qxMFf/zx+Gw0x/M1bbS8B7HeKqqhsR6aG+uCmjl/i1N/JnNWN+Icx4Yc1mP38Nb6Ho7eLCzz2h4uL/+v8JKzzyiVVT5TBAXH6BuRnJkGPbD0zTMcGA8kwaGgRTU4jpzdWaauZlGoShy5MyUj4OZOXljxdxeJM9oF4h7Ku3EY1ouBjZKAUgp/8Y8/WS6jVGHhomgh+VHZWaI2/Bgvx2hMt0DTGmqi2YeTlp1iQLzFXgybYJN5CHgtKwyo2n6QQNf0KHMIrLXV72WUPQC3L6UWGDl8Nwd/9M9n698PQtNbHfqjT2RhuX9Bk3uqHvf/ZrvUkDwq2Rkcu8DT/WaiRKIiVg4ILBoLg67rUxWTnrE1HIULzJ5e7OnT5M/PaFsEH3IzocdnvlwpXpqZtwYgHvo9SlKSru1udpuoe9Oq05/5Sn1Ffb3u8gxklr8hCvggv6WBL5AJIrddMMASA6hBn2bLAUHctjkyEWrl8YwDFdhv/5cA+Iw82AUiml3wvg8ODCHZyASJkWHc0vwJyOAiUGmf963lH9jRH/nuyGNjp/u/tttvTAsVeWFZkaMLhGHNTD0qWAGkM8FAHQLIoJnpI1okxwHDDjHpZkb1NCZ+Q2Iuh7J8GpVp51ZoO/X+FINAeNPXbdfMZRW1KwqfIrWLRL7xU72yUpOWP8wjfT6TTl3TzCIJHAkSrk4jg4VLjy8dM0wL6B0R5qrZtIkWGhgOJ86AeO6woHlONVTM+PYvXYPvf0lVqbHYHS601J+vQqfn3oDcat2P+eI30RnrbM+KdNVmsOwbFZFu6zo2yfEU7VjUoo4fJ0Nm8ow7ABPch6PU/EY6X9L+o+iQHXRR+QjxnDwQVBvLHscw+S+b0qzBuQcgPSxJ//nT9p0Z+jb0F8AGG29oL8nkLy5KjQRtDWTSbBkn6z0v4zBm9CTcWM8hZ4iz0yCUX21/bAbxva8FFRzpfzFSAZcwNphB4os0PfKe8+50wYO9z5op5b3Y5DWKLvmGl5eRN++cCHEKZlyho8OOtSkWRHof7gCUSORiKChwO1ZAEskoHpCbPMYCFklXi5uDdvgeg/DQINz02q91rpXVns3ruE31WTcHJLpNsb3GgZfMU++6gl1Kn3iqlelcWeb7kCQu6eJTnWRp9vOGf019PW3KaBDQLozOwFCbLR4yYkrKqxN5yTSjczQ0V9bK6IhQs9IgUhJMJMQyxFpY+lJ8JehL+suj6CNaA9030/2PmoqaReSA3CtZESzBqz6PAyp1SLVabSYiiI69TJfwIRbwBGx1SQYFQ9uwpNm7PAiBr+XU/oL1b/dBgzXrrIOLIFqHPX0SbDp05ePNY/UlskZ505bGJc5myArQuwwWL1CV+beucz2z6g6UClhr75BswFzdoTuYez0SU39fjMpwWi6XL+BevVf6mnWBKeaZxVzycZQF2Y12KqpfwhUlQRee5MX0dcOvoespbmGG/Rjdo7Bs7c5LpyV9vm5uSbaPlRbXqquxZQcWtjeUb6WABzq0QhhAgs12un1Ng9DDF5AY+dg6m+HInCoIjqRYOFDWNE3aMizqUmuTm7FvXeJicSEFH1U+14ZqCnAnZiEh1poU+mM7cSnIb9D2VzVjCjA4drLYFxihe1c1O2SaA+4RfkZiSFM3hJ8Sdw6ao3BC+RW+arEFLPEln9iGtF6VM3Sbw17danRKzDTm/JsVdhf7iaqq+jrq20qU3brrIU6vqo8WaOuNjqD5rFMY7ib1e7i4RS3juQ5QpyVhNkyZ8bdPl/gIpfQcHGyKL+QHgZ6GMbI7auXHFwkGDlw9e9hoNzq0De1eIE9z+QkoLZW816UWQfWccapxe7sFY1SKqkwTNT3QQeSaqHNC1rEpuNSW4e+VQEmOiTBMCwfVmf1mMkVRPSeX7GzAQeSQ5gRjQQzCZGEEI9I5LXQXpW9NFleFSYEohufb/37Qp2can772qVsA8qblM+Sq4iIUmGwMYktIAr2QLcoj0jPRIKJiyLaw+opVLyX6Mpf+1U5pWdHKvkSW6/KCAbWAPP5+ueFC0S94Yp1MlhHMFwwWKeKFAspC+pkmwauZZVKxaTXo42Ecjroma54mSAQqLGtLzA0Dqw9q31Vnb9SgWHU7HxWo6+h76pHeseZukrqwlbhRR7cqKC/XKucWjm79KvCi3u9Eq4tGrLJpRTTpzccPKjwNYF3cuwisFCs07e11667ct7PqkuS6xQYCZYQRXdJkkhCoNB8/u2PK9Kslkj1z0wITHvAR97+qJHL2ySM4fOU/i4tvr3UNmCmsuKqDEH6W4tlN2lNm7UxbYyNaWc8D4kEkTKjXZn+cPnrvCBvw/DL5PXU8eor4PnqjVyZHq5UVZSMIzRIMJLn9Je4wLqkgm4ZbNKSYPusAbz+flv7+tjB7eyVMej/z97bbkmO6tqikuyIzOq13/89z9h379WVGUb3BxgLkATYjqxe516NGt2RNgZsYyZTXxAjZmeZHjkr9QgZSrOat3KMqj08BqyPVgiyBcP7VbW0/FjCMDRIbJkIWvTN5aH8yiUIOU/SOnVR86y3NRPsdLKJPCSAAxavbO8D5DAkSKZcrzb5ovcM/XnDz/gnRwwGSG7PuaG7UtlqGzkAAm4BEHBB3BC3gLTn5aCFWYyne1xWCRP6rojPHX3b5M9C+dxJRanKVF/HKt9twIQp1UZgoHAonxMhjl1HgN3fPb3nuFUDwZPwC3lFfDFvED3zOPTe84ylaCopqFryhH7blBn0HbVAT5FUp+krUnFB22OrgIsRyls31CjgQI6ZnQTb+udZGcJgW5AQKSmfEYo44J3+6hdWz6kCtqN+DZWtahVtcElYVRjer1VqbDXVjSdcgcQZgys5zGRaeUdK1O8sPX7Ak1l97E6rso1uMWrC7vbfAiARAI4AJNgnSP9Fay1iwuDdzzkqftW5cMQAPDjfMGBI2yXEqCRBgjUWP02cmtkSCZL+Oe7++3xwTj8p+ahjALYmU92gptqJ7cdj7FEobMBEKQg4eV0JRbS8PuqiafeXDmnbRV4Jl5C3reCMwbA/KYnHaj+FPTjKrHe0L6o69wyAnea+3Rl/TBdqVF4f8LXH0qm4LrajoKYSxaL8CehVT7V4HFsnTwtd9GtAVz+FwYfTh1hGI8KCoc2Gb8zXaa6UscIVFEm97ngaLOvhqbMzQNVi0YofENXWIBNnaosok7g7vLkqAz0MvuiZbDWtyiDUzxYjLIzAzqtXt95qdSetD4Ec3WGvg0qUdaTdlHfGhHWorjbGhXELBwkOG9KCjAyUmrBca0enVkIkAEJcER4LPhZ4rrr+uSuDnLhbzILnCoOJVnGXWwLUooS0/OwIuUXKv38vTLASPgm+CFfml7Eko/2//pR6o3LEliGteJTQHPHrqeD5hAprGIZPrB7UDLD5oEhgaHbgBvS1Cx8keN+GK/kTtLsfaDJiEvYwuPGGQ5JwlFXQEF1mSt6j91Cib6pGXU8bNmD1JajfqAXDVYuqA/Z+6ngL4Oqlq2illvjmO3LM3u+Ti3rj9up7FdGBo79Vaov2BAoA0QW64sHJOWvjOu3J3luW9guVW8P+mtzJrJYKg2fdSDCuYBm3gIQFCY5O0UUHy715Trjj4Ir4XJL++bEmF+h0JzN8qw3hra61CpwSsRlD1kIfObAEYh/vc3eWJoy5cDggrAQLwZPwO+DK/NL2ixmXQQxuGPM9Qu6fQq7Yfe9xXTavTZPibML15PgxMt/MzklO+ZKg7RiMRTxSr7VxTUnHLUutJy9QEFKywKO/h9uqJbSTD6t6Wez40ympdbYLw1Ur7dBQ44mjZN0p7GS95dnqUkNidlcRrcqg+tqX9l7a4yPVWlk5fakyWlcq6LikazrM0Z+569kndSf+HkeFP0+nw0mk1hom56Sshd4YKRSWYA4IARkZ2y16BzbdabNG44qwIKyY9M/rwqQlfy6v9MzAM8jKwhurL2UclNikaQtJC11foCmbGDMnxhWYF3wGYIav6AvNKYy8WXmw8Q5rG8AIUa5+ZAlasSkXpAHpBgTX3PfeVbTsQKaPg01oU+9c2xahs5S6I1w5cD0zkXDFEr4lp83Auqga4eLs/ngxMuB9S+DS/7Nzi82PQWlNcVzaBVM3y260ttt0rUGLUyW261bLiSUGg4a+uZWKNw+KVDCMD8+RglYZTf3QH2nj+vCipNjMYz/A1m0SMkYyqtmU5Acx4rgnYdgskzqcpFVHQ6k7sSoLjFsAJNw3aYCFeNtoCwE3JG2HHoByO3lZxtA/7/SXMOufUwwS7jbgq5uhieYm6S/uOxQZFGEtisog4LQlMB3uV/k2wl4GsloK4blgYH5u+H3sUdi2OxLse1iORc8nMkyNGZhHazM66dfgo69Ov6oeur06lw/kqiB29M/q9DAoQglbkOB8XGx56cS2TTsNzKnKeaFAabpMGBG3Z7DGV+6dmjtXPe7HYhbJ/4xEvhbNVRNSHl0tr2q10/GGJaaql+d6W5+jrpRwclRS3oV3+UWpAHXKhKTmvc8bAVXJOONRiqmvRvtmTGC2VsMSp6Q0Q8E+tFQMjiI7XypReMEUjxSYt4CImHZoeBEh04Ic6XsKshc7CNhb+BRNY0y+AYCAT4LPVdE/FxdYfLfSPglGm8wAzYU7+tb0N2OYGlzVcOXKCzpqQ8otgdteHs7S6W8gBljgydESzIGTO3TdaRtThRwwvJe/TS5U5aFmVfIK9+1R/wZ6NforDs5qoU0arUKvY12Edlj7q+5ilYoRXVZAIA5bzBGNFMMyIi22+p9+DDm9q0bX8kIOjEtUImPUFi7EVgXW+MqTlzmLtQzDqApEB61Evi0SF5dr8Amub22m+xmDmwKxzkKwwWBCAIag5eKQCaLLfjYlx5TDp8UKbrZEvA6vtjY5LkF2KVDym0Jc2+WXArCJ4emYIQbFmhgqK3JuLmJw1mBmTyvRujKiAsMWAIiIAQNsyCHQthERL4E5cOHkmMFrPFsLAayIT4LHkvcfTPrnwgWa2qfDqonXa6s2A+voq9ZmUE+ZCatqbI8JVv2nM0VOlSMDwGPFjw1ejK/kDl2+n/2/FqaWY8rkyj/D9kbeSJf4gvJs+0A+aHU+EFGWkIUx/5erydLQu44qsUkzWznXjhuVC99NTN5YK8FXqAv3FhZ9F3pzqWGugZAA9w3jopUOkg6sxsJUvoy8jDLlCKPu39BWVdFo3/Di82PLibrCYGien/o0pSbZ0kJn7KSGUvs7ASvN3aRohJlpZrCkfBeIQJx8r7p644jQG2NMFekov6a0P91vqDgbl6ByN8Mdg7ORJudS5eNPJsQXRwM4IuBGmLcpDBviChwAc3JKSk1VCYyP24sFCIAAEaKSLKafxOcS9x8sEmCNp90AODBbPiNopuwKfY9H1BsI1ZSEBADrsXcS0WEGppIEU+uNtccs5aMEEAI/F3gF/NpwS9sznMFLKmepC25W5/JyDHHuG9F3TBz0zXvjqY164oDQVA3nvGukZIokXLKFIlrSqNEhoWKw9wqsU5wuROS8t1pUisfZE/FweFA57rj9BDQmIQ+2SGzBsNWWyo99J2oLg7OIiaBWXFuSjWN757UyJQ86ga/VE5hd0p92q/Bb2YdKvxUrF4e64Bu52XMSbzApSXcYJgG6oMHwxhgYVoIXAzIS42ujZSfBWwhLiHAt0JeKuCs//UOMPoIFccW0/+CywLL2X1s3HXQrd7hrtbLCjuQYtmQGPkCXPeyiPWJYxgo/Vvxk+N7wK8TtGZTRbZnLqjKe6XpIpD5gaqeHvtyLvgPtjqLvUd4dgsLgOnRJ1+LrwPCUtbiOC9qjgXen6CJh1r3eWFVntMr3XBxJZxg0FU8dI3RUWXs2DW521HStRuIuDNc1qP2sdyWq2XCLwbDDsIq+Tbc1G0WJvu3KaiBn5JwMrH704xZPbZNmV2KFZIZk1kjXUtpn+qi2DRgLmiHW2sUITpHjrqNcYM4w7Oz3cAxO5FfAWJ6YiOEVOJLgdcOwIS7MAWp36Dx1NwcL+1GkwivCY4HHCs8Vng+hutmJ5XF7ly2aFv2tCkykojQ1zL3ZIQYsbcXow5WYV3i88DPg906CR0TF6VD+mJHqOVuJGi7mwEqV3Kf4UmUCfYXauayhMQY76Ht6iA6Coo/0wk0zbkee/qTdFQvmSDCc8MzahQNgiGTtgIqYDjp7pRIACXzVLQz73DQCuv5oytdXiF49EsvY7G3A3iyi1Fgm6f6N5XG1xZYuAxRGPrmFw0X70pT2pyvnsoGqXXIyI7QNtU67u9ZRH/XVux5MtdHKiJt6LBN2Y0KR5FLbjCQHMWxMFEJKThkoBAobhS1Edyvm9KFB9c3mYVoqkbL7FS6IzwU/V/x8xPwbvK7FjJnJ7o2+0LcIB0Bq4oAHJU+U0ncaANYFAeBzhS3g31vankF+0ggQQfm0bvlKgGzsw3mAaaqyXugI/R3rhmMzbjZD19F3oAkNfR1vLIf7thh8YtxLGA4IOR0HAOAZzzIp3iLYGFoiUUnUQoOVfrLNTQ8u6J6YDyT1rFoc+aS61uL2BY5E8Y6z+eSHtWNwgb7N43AXjffLSDx3JW0I0L6xV4VGzb3sV2H6kSusnZtkMqx2eLZv3OHEs2KptFKGS8jDMcEANiSYYO99ACRExo3pFXgLuAXaNlpDCBssa8Ps91uVkas1/Y3uV2n7o5j9qnxC7uRzJh107FLjjTUEKrYTlmDNhFB4Qe9K5hw+XGydVFmC94MfD3wFfL4iCY7KlAqGFQzWpr9zWujbHKfdenT01d73iaghDbBN7Vg+66CvhVse93WmnBb7xzHYh+QCfaObR6GF1hu15bwVo1l3A3FMBx0/f+nmn3HRIYJTouW4F6qmvblcMs93ue+OtHM0lRgMhld7pYju3kITv1RGD7voax20ZISwTslI65bawN9OqtvQROBWvkQc9N/voDhu0hGDEViGplJDvqPXNDCmTF6MK+Mr4Eq4BdoCMkPUQstNgi3BXUlS09+PNYX/rnv6yQjat5uoTqC1kQL6OG820LKc1kMs/kMqWNFK+FzhueDngg/ENZWsguE60V1Z56J18+1C+z9bBtE3halVlasVUvGvrlZqnlvlc4u+2i2oqmm9Y1XJWcXylRlF1lDVSWdIkM13vS4dV5XFYi6O6AgdH3Cxrels5wB2RGf52/LDqk45GmAy/h3XIrfJfqshkB92xrZjGIqeTBmzc9i07NI+lg87qPytVtL+G++DKnk+y/9mr1X76RTO1t+qQN7XqKyq85zb9zvbbdlbOeoDY/6nnd1/NPUzIzMGgMDIAIHxxciAzBAYQyBm5IDMwAE4AATTa1Kib5wK8Ek7/V2KBFipNzbsjRhoa7A7/jxHmi3Z44ABmAhzGiwqSXD1VAq6QxBTV3I4XuZK8PHAjxd+Btw4qpyUJ5tz+lWRl6zBm1zCuVroGzXMZhNK96Z1zgNhTq2KM1/bQ19D7DBft11HnOxXDsINkuD0G5Pisq3kZyRbpxCImKAGyJGsQNCdQGvFYy2q9deiwmYrANB4bEGplK4Mw1Uqq/LljL4CQYKLakGg715SPswZzfatMl6feC8Aw6OywOBcFSOLzKZaspRax1sXiKcAoOHBFgkeUyQpkjcaqXiwXT52GwJHDIZtI+YNAnJg5sPx1rK7HaFHK8KC+Lngx4p7+O/Jqf/00GmRWM2T5ZJgEQccdrjNjs3VDkhG8Gh6sxKnlwWfzM8Fn4R/I6zAL4a4STRr+Foh08gDuWwJbtvtynmdc9nWWJhTpeOVl9foa5VMf3YXfLdp7JsI4wlV2sDSdV5U/fPxhbfrgGpcBWCxBse8HwMAthOci39d1TT3Jn2JtVVzLQxbIjXVltKyOutjsJTKziA7ExgrRbTFvWRt4271jlTF1adzBb6b95J+VA9NuzDFs0Hz1qpRlKN6qoNtyXTKwOCBe/Fe8ch6ywlzF0Mdc4R3YOS4RVIAJt59Hu0eEuAK+EjoC58PiO5Xa2sAHo41apNVqadGLlcrsWU1raxtXmhqYoLbfsS5jTCR4K8NnxtwtCKX7UTeFhc+rphm4HMYfAp0dVKuljfWbl4Bf0ZR1cgl+vaCjlwMHnwgIYZ0957CFQx2pXZNGZAh669q0lbKJKqStgSOClgtpqjbTaslTatyoFdVzILh7iNyDMaqf1byRdUwGErfu267FQZDupFiu8bmqk61J4Bz5JLxseZvISWTjuU7lQXi2dqNuSnZ9lwdAKrI16opHIuugrZiaJ+Gb26qNOEEsIfOc968JHtVJhU0Q3gBrcjIR6amlpjR4XsVc0/iryd8PE9uPlh0+p1mThs0V70Q5TjRyG45wWCLwem2RbEoSPhc+bng5wIBOHCqsNkl6QhIaB73OWfp67FeuQPq70LKtz6ndu5PWtW1e3k0jjeX9I/XxeYHsfL93YfB4tr5nZ06Zz1faEOQgBZeKByvQiQRdJTGVoFKo2jN+y0SS94MO7y1x9XCsnz6M1YeCxswnBFXuhf5ZgRxa8ddRHNgVVvuZ1cN4Mu50OorUuFfy4ZbN+nqckLl68wQaPFXrMG+huFq4nQ0HLmrULL27oerpjosCuQMrOJOUyI5YX5OmyNx/MaPXcCL7kYt1Aq4Ij4IPxf81xN/PeHXR9I/t1LliZyd3GodJMHdBmBIAJymoo2JMN68Y/RVRUJ16muA5xLdofnF+AqwRvU/JC10pLYwNDsnEtwOnFsU0bKhqXfko+8s9A4abvPPyvR78TmoONpITIUxWpvPj7rhSeqoG4b2i1lcRIvi2RIgwkKclIcZtI4eKdOxFDdVhckzWpurbEI1AKtPVtVUq1FMLQxbVHhE5I1k7OdyXGDZ7Z+H0r3dicLVe1FhuF2OZsHJ5rrO521oeOpnbK4xCcuzuXttb03bo7ywOYJ7xsoFmZAX5Mj1CXkhXijIzY85IIeUGQ0hXnlQ4fTfFXHFpHz+9YCIvln5rC6o3efb34sQSwhXyzhHKtGmJCMRBwhMhX1DpFCS4EOCMtUSQWB8rrxt+PsFLwIO/GKQlHfHYEsRrZPgeU7sy3QajYv+VmPeUvuFFdstKjkR8qvI7e4rsuYr+ueoNcm/4eb3XrelLzh2u0p2EVoDUVj3eCTYaSsKzLA8ZUSBud51baswZgCW5UFDbtVFq9JequRVFXvDJV1regV03zSK/TwbzTqzhmEWxcpqFf4aRY7EMrrpCHKbguGiXc0kXCFx/Gp9PUflFe+gL4nUrSvykv4FQl4oIDHuGx/xhvGDw8BIgIAcMTje+AJAu+n31wo588ZDRB81D8J5SkqZHoLqgH1N3dok4oi4W5Fg6Y2VMVjKoYiGQjv9XDA84OMFL+YX44r8ypPpvvccFdtfVHB4HJ8hNHdpoQsxXuecv9Uw+irg6qCv00qU2YwGTgHT7bl8+7meizbgwGkEqLpou/5p+jtgDEYEWmChOIMoBaqDodlQvVlQHWfjZKeiWhtOKif91g/LkdZ1C8rwZZnMq1VKSxjuvljrXnYDV0nX3Af1Y2KtKro7K6taigpoUftNzalbpFpXyUWVk7oyYzCMrbGgh74IgAAr8YJhIV6JH0tYiNclEPE+tIAZ9j2AmQGRAaOybd15MCGuiCvh54KfK0bNc1Q+E5ZeVDEy1varGnGz0vTP5p8XZN0Rd0+bWU+gQrEcBcuIo2wSzi+tcL9meK7wXOB7wy/kbwNH+yruw0O9Fpmx7E0yDr1lTwbRd4jIXkFf2J/wXbOao1vuBZ7bdbouiwHAMgOfxXh9ldZUxfsaPOnB4so9JaSk1rsExOxsqR8NFxtbnd7wMDnpq5hqiaOvzqfULNMVDAco5oDqBkF5kMe9ZBdouUC5a2xWr+D27BxVtd09HJ3bojL/83wfYO9DX1r7AmgG5srzDuaX7hX6UtI/hwfxg8Ia/y3bQmFZQk4qxyGafiOh5mOJ90J8MACk0KMnYdx28Lln3sjRRxTTURhIeXp4vdU562DAcSukGA2c1M4g4pF2dXRo2CiVbll5yok/om3844GvAF8BvwKLjL7qDFj5G4/uCtnAsLnl8E1m4z+IvlaLeoFbJED6zAnqSf6IAWwSpfWrLUlnvjYqnwND3DF0/y/A0GRjct/KkCFKjutLYiTSQiFOMZuq6BOzs0N5oaR9I7mlKsurBPgR5SSUyk9bZVqbElv+JA9O6f8ir1JdqZuumqRTLTB4akQspwW1oRH9hChg2nEZMAAv+yc1bnVRq1OXU1VtFhJ3X64sqVYIu1FmEWrnlcJz2R7Llulv1j+DwOA4DpkZEQn23GkEsMTEkwt+PuDjmZTPOn99L2reKGvxF2npoFvvaEtUmrUu+Fz5+wVPwifhN0NgLuObKktwxuBR9M2iwXAXgzshRtoA7KKvV8lp9O2KrzceuXCASrYzSoHBvqhqXoeaR/1znooOk3BT59E9tR7tiLtcS/WERKcSOBEipVwcsRgW/+UKUKvZWVjOlOc8Er+bK7GI1wgMV8pqKjdXUI93MXhcuibGQSXwvTJuLmw/ETXzMxyDnSs1e4XBiMrbSqvNJhnWiUfgY3DuQ/zRJgkfESWl2uEYkUy/DwoPSui7Lon+Zinm/JB0OUzMHCGJMe77u+xpn+PGR0vh/MyRBCdCPDxvnjBYSnVdvrwbdqzngs5K5qRbJgbAKgJYimoAloro1kS3EjxWfLx43XAN/Dr0Mta9T0OvFG1uhQo1S6geD/Ot64GiHr3MFCvNs2N70V6P7vnsYJgvve4dns+xqkWDzxH9tvSo8tcKJf0toNfrZ1VP7wKbENcVHisMhj0XR54391m1rn7XQJqUV7Q8kCulZqu+rtt8XF2sHcHg2c4397Kr0owFyo1yu0pI5SCqT7jQBjYvfR8zP2PqHsHgKI52untJrj83SjvxfVCIuCvQl5HqjHIcEGmnYRtiwACBMshGG3BM+xwDf+tdDK7J1CbBKgY7hQ3JKmiEEHFjS39KLTQJd+hUWLQNBgbHJ0J4xAQ/if/ed7YZf16i80c8kv/Ra4Zh/ymNYPAg8fXQ16G/aPwekfMWDqz/tKAuL2uYOYgOnmu6wmC1Uel1FcHYK1N2tZEKXOtXaRsmOGWdLyJkosUXxJHjv7LKKtzWmKpAa1wd4DV5EvO+r85VaziNwWqQUvtE3W7UGHwCff+Uo5b6qI2tG6/6IBod6G+GUdFly7RvXt7zSPc2nN59r6LyeSV+rttz2Z7rti4hBiBZky0nVyxgBloYw7H/N64EzyXZfdcFljKJRfxvdL9K0EPHf2flRo+ZKHICyh5XHIxEHFIKQ1Pj6Zrtwam74uVK34ydBOP64hUrhrrnPdnTdLwt2mRO2VC+gKvE14Lewbfs098b5cSwc0hwFUfUXqXWdlh8ubiQSz+sFn2bkWPZgxU81kLdDptILomw018mrDU11NRhUd6utPqadLxHhUF72JW1+BYe3HpHW/2vpA0pbnv78zK4BjrK2zAcxYpcqhWI4lrroRFCVEXl99KOqxaSHVsDDLwgXyzim9taiVfkR7T7UnhE7kuBlkDLsWDNUn1LgTGSwSU3sCKsCzz20KNSOOOufktv2Ay4ZcxdDt0UWEsWvxPMwyk6c1zhhEVUQKzlk3VQ4T0m+Lngc8GvgCvz6+jEoSXGa8rn+m6VUVZg8LBDVndHo0HohSn0HRwxd9HfrghXrJoES7+R1qJssmqxRGsuKfTPrQG4h75TYUj1yqzi0wQQDhRGKnYlwvTf/U+hxGthvZLBrdRbylKl0VBhuK6kNEyKawE0n53qeC5fdebE7rP5sZRdmqoDwP12/cc+W78zlByOK/29q0tyAamwHexVRW1Ftbr531pLOdJ9YtUDKSe/wusqej4/1y16PtNu+h1ZQBy3kA3AC+lpN7I62ofh98nZyFfhBS3svskXOuOrdIduTbyw40+rjpYlV4LnCs/vwxUrz6TnPr6RGfa0z7PoUgmuRalOYucp6B14CAr97V5lzRAzz7xOgBXYxGBo4FbjrHUHqjKC/kr9c6uF3oeQdlCtvJW9G1NfEMa8emVmVaq8QJGZ6yp9zZ56tpdBUELpjv32ys7yD0INa6GZvkGD4RPqSnm5vlzwa+zJveoha22WzjarxyyD+cJmM5BYGAzN62vLqxhcdaAL0sXrKy/PmucYd7S7PW/rktC3Nf1WzUUttHSQTi7QcaPbdcfgOOdkTkl4DgJFK29SKhoSAhBViTh4h1vRJzW1Qht3BAKG5cH8Y1nwscDHyn/vrlgDNKVyTz2TXPBa3NHxTu+CXijRdxgI78l7dV3aBVO1OGth2DXf1hNY634VX3osLp2Tu+g7Yn8TfZb+elxybqa0cQgSxDS2+za07Vx2VFzZSke2aG3FyiC4dy02quilVan2unGosHq8OjXCEcEm+tWSYuoztR7mLB2fFceAqi4ppXKi3YwBtb2f7TVz/VKitGUrBQloCylHRvKpwf4oSHhxx9+7z3MkviEFHUXXRaprTnlJQ/3WOCAsDJC+OFwRVtrRV860BCByVB0UZXw1Pao0xhDekws6C+1hSJUOOaIy7MpqB4NBWoXL/ZQIYV3gueCT4IvwxfziSp24l9wtcPswv5rZysFg9dRhcFWuuA16qwuVhoyzP7tQK2TXx8ZuJBI8Dr0qA1YnGy7Df9Pvo/w96CsLi6ct60ljL2RjFADo82Ylx9cjSk4too5JfDRzgq6ElKLG4I7DcHUKoEbiov9WH7T9HtQyJ+T0hZX4QO5zYmg+BQm9Uo1FAAgpXIfKcVLMqcCE2Oot9rMAtl5axeCLUlHe/OdyRBwl029yvFqCfC+y/xF6d2cLhP0NRk014p6CIypQl+UIPZIr+Ox+9Z8m6wG6pTARqumxirBgA4PzkcqBK7pifaz8FfC171EIxbWWGfiexPozomqeT9p6YQh6pzlu6YmbfqgeOJPi9SQvWaQi2jGF+cdb1iBxFwCi5pmLMhX6avnRbApoubvbpsiUEfpQ9KQUem0Wp/gjk56LOot2PSNnsZYWSxhWpTUYOxppsJXS0JwFDfidRS9oq4px7PRrvkWqVcJgZxoDAYCm6wHjFrD0rkdkYoT9vah6i6MwABhseMRluitsFCMRj/egsCTHK5HxSrg9V0xX4m4+y4BIfNwKcdyGoTAAC2I6wUrvT1B8teYVQNMzA0BlCQYxmqQTlsRg2MfakTJaqKwBAAnXJe1R+Aq87WrG2JycGQdNvNclI6tQo6hlbgguUiufwl2rD2/WTjc2YADIns+lMRiaWQdsOGxhO6JvSv6865+D+PMC+mYE9ULOKgV7bU4+fiIW1i/ChMqHN5Zm/xsPtmk9qmqHWyN5gusxJBmtAsONb1fsyXEtDCBxFmveb9241AIn5Pr8Wj29WfO2CsbWbIEpni1uV1C7QLckWKbwhPKBWy9lr6pOGH5ivmhmsjzOIUblxXyTC4bnEp7L9lxfWflcVSW+xEL/vK8zmANi0j8zLQArwmPBp7bvQqS/1CShvCgnVNmzhQGgdMISns+FGRgBmu0Z8qnK30pqrbk5SHs80keAr4AvhhAgMAcR74HnfaFlB51AzxGFdmX6FX/eqW2eo0hWny1vpgsS3Z2QMPs9SQcoTFqTvDLbTfVQThuKz1RViU2OpfuViD5SNc/1te3tNMfUnT+8GvZFxu5oyDmXgqZxSDh3LdPvfsuab3NLi7s5jMQH3YHhpgzsPTlOgTbpg1ESNDA+MV9OOStZ2zxYxapeDa7/LZ1ErioINkHlVdYTqH3aAaAOKG8Vjh29dKvekNcaHUmiPsnMeqM/xIq87Nmeo+PVQrwIfyv5DVaUN9cfjycFSTxIDAS4ID4XWEuglSE9zoSuhgbdsrnCHZDfxDJnqy0RhHAooo9impOz18VyLwdCfK7MDL9f+CLYOO5ReF3UNEZ1bEmLwb6LVrso1Y6f8LHqQ6//ctXLb0LfLHruiwzP8hunHc3KS6waQGB8fdCmvy36jjg8O4s5eaoF43RWzJ119DByhbAyG+VgyIcUVZ1rZXjYS4rOuCrTClpmYRh6lLfs81EMtMKgQvJN5tup2qxVwuDkKkdES5ezG7wKvTlhRTYAE0JlLQ5wuOBZrynfyFQM0vjTrkpW0Eu73XdBfiwx6Oi1LmFpsj1DSXkr3BUfIxbRBAQY3a8WJfNzHf7buogaMudOpaL4YOasCijLP0snLF+q3YhUY/BIJStjWOBjhVfgr4A5r24AIE09OFCxk2/BwuBxcYKAb2a951ZUw2PuXkl66UyF/f0ijQ2frfxWZvTRpOZ5XJVSgXF9YXACP8w6r0950rF5JLHDSFSuNO+UxKjIqyVMTzrlrW6mRTLZj/bO/afxMwO6ewtd8b2xVA12aUdiq3CzzUNWI5vpz0ZikNr+WPepvh6EYq2Qie+CYV1iyskto69sLkOvhbtHtxmW7NS8AC57Co6MtZldWTyEUEdHS/k5nITydkdoF4BbWK32A24J7ohkd+jXil+BvwPEGI/9/Kz++XCWruhI+bKS7Bg87VkdX7aFvj3ovUp53crPcF/b56h/1S7xD8zfeObB6XSvV+rWV6FMu8El/Y3HrhHfrmTuy7s3ftaXMBeOJO1sCOW0eJHVVQAJDRUGDYPVqhynLQuDATyO5eBriwQ+GMNPIa7TqAPGjvikufbJKivPVLJrqvBdYqoBoKqjTzzz9iosiS8BZLXzg7aFuELfTH+ZMbNeFXfl4ETkbCGihXFFjLsQrpqfs6qRto6ckHMJolU6uh/MSmUeYcBig0JjDLhmvzptFiEEghXgscIz4JPwi3Dd+KvXjR4P9u18BQyf4sGV3Iy+f0pG2LOD7tFvJ3CBweP1UMM0pf9z2KnwcRagGgmtmdmfyipbnDxoyIHBqfAffpV+3n/vwkZHTdBRR4PBsaAEHrB9f9qSMIa11rUjuDh41cgtdGXWzi2hV/6g9Gckl/V2Sbx7J7Uk2OqVj8EnBrHos4K+6xIeSyAKC/FCxRclFc7MuC+zD9xVdDbxKRHjyvgkeCyJAfty4F+zR2pzbYfLthPjXklxYa65s9PAAb1Vow0AZ5crMqgtGqrGii47gEkIIbpDr/B44brhivySiyK9/5bjzDHntpMjcQXDEoOPEM9JPB5H338u9GZRhpo7BflhTlZw0WBVgSGAdL+KBzk06DvFem1nZkeORvf9GHIrOSwEhLcU7Pxg/32PUV6bTE9iMDQ66hF1tFp5F1xbtWdgdCKVVYw8pz+4aP2d+mLbBYeU7NYrD2JZnuoIYAWDWzmHwWqxtsNOnbG3EX0XDAuGhUJE33XZECGib+S+lXtzRF9BgiMwQ/69G8j3QYhAC+BDemDt60FCIDrArN0TUJ3WDteJU/sJ7qJjcP5TlYb+RlnTDkikRwObht7rs8tK8CJ8LvC54O+AtG8SbFlzuf5Ri0pN4kHiY7/Jczk9hP75wFRsCggZhd7ZzljVthrDcRm8yrDXAsTABgQwslHOdkMEp+kuV+PEdziYzXQj4KJ+DoUKesrT6rS+X8Xg1B8XJlVRvaYHqTCUz95xrXKQ2ALIi2tV69bbatuSTjSzLz5v9qEXB9YKXfVyLgbNZ+Esd9q++fVL+guQNiNZkKPdd6FAFCL6HmpnG32ZUeJuEEMxG1AQGdeAT8AnwXMpUnCoctm+q1wS65SKZScrlrm3bgO9GYz3OOCILo3yWln3EoRgkmBHtCV0jAmGrwUfG64BX6Wr+u7ymoOUutq/tlM730VJhT0MJjyuqgKQ6qqLS0SLWidvMdu73j7Fj7vdofU6Vc1BV6fdodfC5WrsLjyd8zjTNTtTHcE4GgfzHcr4uxNSeUIZM6MOk5ZYXtMVFYYShq0mrBAs0JB4VmbdyMeLOpA8y7n92+TmVKWCBvGE/Xcn7QXqCgmMFVj1Rk47JWTtDgEQwkq8UMTgZPetNM+y0RZ9mbG1/qYZZc+ERQvQA/FzSdsfWTia55yMlxChyqC86rW++BhsVc414mq7IXWFiAGKjBzpuLEHsDWBJuX23hUkWBkeKz43fhJ+E74Cf+fVU9ONmCRFw05nMSB0zghVPPi45rmlvw36apuejFU+3oHxs3dpP6OE5rnJ31SS4P1PpVc96AWAPQW0uftCVc/96KtelfcoDcfEkSVOiPJtt+Eip8UJCpIymPRf9iek9egBw5IKt81ZnLuV1vvMwVEfD+4NTKpEVTv77WFTxqfO2BSrDuapZSqDpm8sAONTax9md55AKLJ2ROsv4qF8XijEYF/crbaxZGv3zei7BQwJhusnJtF3WQItjJ+IH+uRgkOi16noo75USmxFuW1jcC7cYFJLf6NoAGyqo2cigOX92JISYz0JvggoFJkLLEvwJPeuKzGyIKnHO8rqRJfLy1xuXYs//P+4/Vhlve0RNdBrVK19/MzoW+6+cBiAK7lZ7ayNN+Za7xI24HBMH8mFoJhezRvvbhZriRoUBDWE1HyoalqNJJ6FYdBusBujPIKjpwf76aWm+iRHetHCMIzdY4XHCX1nEnCOGOyhwSalKqx/WI8R9+Yk/SWMNmDOaSbjXUjrjIO+WyD9443sBnmhQAsvH0wfK3yu8Fjr8dFOzRUG3+ICbYmjix7Ep72GHYCjJRh2JXPeCfgElypIsNsbwhhejY+FpStWGesZJ8FB9O2nuBK6CvmjIy39rdBXraQfemQM//HZyDcJtzX7MkBSnWsZ9iRZ/iNV0bTm2VyVPDavjH+qPT29OCuHnNoER410iBMKQIKiY9NfhDKRbzktVrg1mCTLCQqCMXRUT6nVdmE4iuZ4pTR6IhV2F9pVubJM7Xp3OzJImqEEdQm9YL+v2uSfHaGHMVi02OvfXiyrLlodT/5x0F+s6W/b/+zzHH8Hhoy+oeltzgeByLQEegT6APxrgY8HrEtBQx2graJOx5lx5S+d/jRIcNuueiQfzz9yK/vxFYhgi0rzGayVAcFFGsveFJjL7Dn9cKVIgvGB+NpdsUrPZ3kESpe5SpQEWKndcvq7d20kaxtRF/uOxBPtDlw1/lqvoW/sjMjOMSEJfXf66+mfY/k70Fe4dDVHNPSFOIlsGDaS+wfEbEfqaLySilLWoObHyOL72jjV5pp9GIby0fqMrVVZZ4m+5OOdVCsZEXXdY6UxiTJF1gejbEGDXijRN3oUV3sw+GJh8DtE9jlmq870N7lc7bssVN2Qwb67whm2QBF9N6Z8STKKC98rQqaF1yfToX82cm4Myo1sWMPguewcigo6w1E1dWaUBeKoxXa00OopB5KzJ9e6QIxHem74zfgSiX+TEvJ49NJiP7KJyu5Old+uPGhdNCC68tn9Uz11r6X2HQ7S832od27oyYG+xdFbOzbUDfE7t757HsT/Rv1zKM1XbfKpbgzrvKvtAcPVo+1yOMdR2ao58PFNVUicCthddSJuq3gtu45psVzEuwe7O16ocsLRrEJfv4fd4aFicGuDmN35Q5Jg0fNUVUt/KWueuUge2aKv+AcBsvsVACQqHK2/cSFCxOsalg/GX7v+ufXA8j2qKu/l6+grNaWzqTkidBo42MYBizjZ8pqUFzq/pWK7hTJaCQIEUiKJqwV8vCViWPZ4pK+QSPDBTtIM6Hlala+/qj6ib7bvWh7O5vY4qRuVpVOgncK29boyLLGYNVNt4zLiY3zd6faEiGeSbxB3WtwWV0A6HEHAANAmwHJbP9XnRiy7cnS/goBbwBAw69biM8M9PhJKfZ2axb790xc/E1Y7ggYiW2p9cpcQp7Nl5FK/570ujc+L/uu94Lpc8+PupzMSFKT2rVU7R/o7KL7XuqN4GKp812CXTn/HWUJe8r/o/0xh71JM3YpyJVpl24jK58AYoM6EJZ1vCJkorOu2PAP9QvprTfrnpdkEaVai4npQ83kXYx5wlpK7IWkm1oyy1d4MOR4pup7LN1aO0APIad8hGOjY8gGiFnqPR8ok+MXS9BuFGasO5udpYXAqhkXhZoMjUTRxuGTZ42TiK/WruT9Z3Wq8LIsI1kg8uPwenxtaGL5Of8meBeUTKJt21MhFgaaYmv/ZvIsr6CvsIU3+54P+QloFYthQwurG2TGAoZn+buF5rTeymoUD7MdjkWbZw5ZFtaNyJMmlqsEeEd2iNAn5UbrDwVmC1B3QHvXUcidfFSXnusqnsEjeIpY7Gq+27PT+UsDqpK8LkQuFJe+4EPNvUGhnoxZ9c6qNrHyO3osilc2heUaM7lf8eIT1V6C/CH/t+uf6KYwZX/+xcoQnzeaCTsdVS5TcGHgnwQK8vRoIYVnwc4XfG34F/CqiOrL5Tb0RicHgqqP3tgAs9NVqHt03qZERNazc7w/AnkenluX+wStSeeaoLeZb6PVZ8brK9Heq424QGpyz91fbhofdmXPLgYwK4bi4RvdFZTmO76tb1fG70mD7KTjAGFPtRzcVVOPIYD3OprytVOPFt6Bbnmvj35ZKfNt+xrNtN6wH3lJhx3nK7lvdaNVJ2pvL6BtzPiOmfzG7mWSxkvtC8sNKGqONSaJvLEbHzp6JWC8f2/IL6Ney+z9XymfR29b4OhOPdFxeqaxvkbFYoRKAU1YsDy+VDQphckhWlyABMa7EG8GT8En4zfCdNlNNqj+Do8gEgQkvoyZkTxaibmQ0MSPLoBpJgrG0d8Zi5bufNYWKTpbU463z+l3SPKghNi8pu0BfDlzpn033qHIkqvpqFYbbrR28hKZxHL5oC7httIVC/7zfUH/8t5ssJZ98Lv6smxZNqMbOqYimSqr30IV5h0xf6YAv3VYqi3WnxVxt0Y1CE6C3Uu5U0e1V5RJfNeRLbGh+TjWtHv5V+XdOFwoA2TtswbCkXY/iLr+hXJMlDJZH4ovY9oC9qHyW6Btv8HgswDH6aH1u65OXX4hZ/6yqxXfpZ8OQMhL0ojpCD14rK1EPNpXETFhU8NfoS6Vk3ihTUbbbM6i7J8EGlajXrgsExo8NXgF/B1wwJqfM6Fs7ZFEyP6R28JhnJQZHsWJ/87d4nNXmg10jzRATTQCk6HQLg61VmPbp1yS4KD82sd7FdC/WIzE4i0WIK225hr5HtU6LQnxrcTEeGv9nUWfDfXf6GxlwNAAHmJ7mkh1Ee6W+/4GEbVXT2M3C4fsfQW3501mR70X8VnFaqcasarE2q4011G0VNbp6Xe+LaYlvW7kU6SI+i76VPdgqE6VrTc+sl/Zdj4TmOQf+ek+pSneVlc8V+kqJPtXrEh6PsPxi/LXix4rrsOk3B/ZUCbDeJE7EkbbrQ/E3S6NtEKWt5YMP+87Z+BTSf7ViwvQRLcHwXOCx4INwPYbMkQ8h4BHoHfD4BzE74HF3hzUi6ISmuAMXfZs9AHbdaVoQNMNdGDVtZC078Gc5bnZ6Gio8efbwqGLjn4m+Nf0NxZJrCn1zmdabr6C/0t8+HGMp0t8YgBStWbkY4fHvetCRJRKh1RkWgeO/9ojVq/YUla1UDXUrvCiy/vG28sNXKkRW/9U1dOo/c7O6v1XliLd3Uq1hnPQP9cf4XZWhnfVm4ruSgr7JQmw7BLToG5XPqYBA32oVSEtYPsIS3a8+9+gjC2L8iaudEY41vT1ZVPmqggBLvc50pEh05e5FWFSY4oCnMlu1WmjVHdqvVifQCMuCz4WfhE/E3wzfnDz8BfRyCahIfGQKpESnUm2htiUjiAEY1dEG+uovvVJE7zyYQUtAEZqYnFC2PsiDz8lsbSPlR4bJzFA6PLAM6B2p7XxmtAzwGvrmHxyANwwbvl702ign0oMB6nmjVDw4/hjMieFVq5l7W726H+fjPIS7IqG7DY3HEyhhY+5AG8x3AeUU4uicLQhsa/b9WiqHLOsWlCiNSv1eVpiJ75KNsjE1FfKSIkqUdYzwwNLRV5p+2xtZKKzL9niE5SPgJ8HHAo817b7AAWDZU3DcaqNVU07eKiiBXJNkA95DjAi2sHtRAWjBSIXM7o/klIl4uRKvhE/CB+HC2c2dd6ubIsXu6PFeOGmk9+YQMB5MN9t71PXZAGLvwqSILjC48ogufZFMY3CDwTDgNtyX232vUrVDpUrv5V5PeL+kgt7GQOvT304/9bWUqLzqlETf3fdqCwf9lVPJIMB0gtzaLmueQa0JeTCw2InNTTW7MNxt6C5O3FX/TunSHVExGOZWj7W06Ks6W41IUrEJSHNDPJQkWd12qaQDGXpj59dMeUvi6+Au7NC7/zgyXrWm32zPTQ8KIgCH5RGi+xU+l0L/7GqVI8iNpsIYNwMX/20swVpqaLVjRdPF9BgAaLcBw46m6k5HuAf1ztNlgBJ3i9+lpZkQiGBd8LHgc6PnFn7HV35w33ZLYIYjA3gEYzQwGBbOd5N+aPTX31a53E54R1bHIavC4IolNx/NbTB8r3iexlpXhZbevJeD5nKFu80GCSb6jm2lAACmEayivxX6hi0pn7ed/m6B2ix6WXT98Bj6KlSpTfiAos/2tdKVptucFWfchiFVDc2mE5lV5w621Zqo27m6HYMtBsOwoVQ21EKDRF9/ppewV0nVNx+Dj/50S2iFK+KbvK5K9F2OXEZCIbGH+UJUY6UfmGONKu5rBFZFAzAva1iegT4QPxdIGwBnhhoAxF6EHCBAwV9Vj2gp4y5aZ7cvnMiHJTB0VbyaK8+p8eRWg9J+2Ue0McJC8Fzwc8G/N/p7oxW2F8COvuk7TAuodDlvmOa4pIJGjEWIY6b89IkyQEbQAgWH0PeQ6ps7VghuMUs05ZHKmLtbA71Lmu5564MWekN5qqjnqF/XJLt3N4S+9tm2xZr7BoCA2wtfL3q9lte2vLZl2/XPMIYoPvoOR5EWvlfFAlSroK227Yal0HZyU9ROmbdms/LFz+cFPYdwlRmrGJzKz2Bw240qhmespiQnPugqR8fstZn4SvRdk8455KAj9XKJvhJ6Y8INX/McJVrHFwox+xX9WvBj333hinMMUp01Wi0VUVPxEi1TQ4M2n49zX1tyIg5M1RexvFAwVCFMpGxQOCI9WEJCfq74teHngv8OuBxrv8AoDcBbdGSnY8an6O1MHCE5UmEIwGmEISycOeiO2fvyXzWVeDdiW3y7xmC1od4ru99OPCJX0Fdoj8XldeWsFasb7SqfJ9eEre9Vi7684fZN2xbRl743CmUQsCO34G57iaKVzYYVo0qrJy2ZbjN+gM0s7xqJp8OQdC19z0Z7zrX4hEj0dZ6bQ39z67QfVDzIxMLCWU+oUqmdK+47gr5QQi8k+juEvruHboo+IuLlY6NPiPQX8+4L/wRp93tQ9mKaJMG7rFBpzx2C64xfuTeDJUNbNSAQwcIQt2d4Iq5MC/PGsEV1R73Qy2MDETZAJC5gOCAQR4UxwP5VRR3irtJQY4UHpRPsO+UQf90Mda/o9tEe+uZi0nbbgC40uCuRoMIMFVrO+17V/VGVDcnxagtJ+fyKBmCNhrbdszBvjDF7ZjYna8Ssmbm6qiLEqpF7ZCfgd3j0O5bdqaApaOawEdAa17RXpl9Vw+ybNQ9H3V5bpcv6gcHQUOHqHvMAy9ALiYYqmmef+/JBeWviCwAt+uZXUzQNvBCv67asvPyK+udy94UQgJb9PgPAfqrExVnk02XWFetK7o59N6QTOTTKzM9OXg7ClA9LHulUjkjIK8V4JHoEWmH7AkhaaIzZ8EVxBgCkmI2FIaCEYRa4hgwEACtjBOQqOFh7jPcuwrqpK1M3/gkYPIK+6rDhuoCuZ9Zw96jDGo9zLtbpR5tVrW1Fdbx6vUiib7v4S/UL3ygHBS307aoo2ywTs4bYoUyEJSFWva78nYVgjM5aMuhgBdq4U1OU3IjBgzUfTYi28lUwsIIZlPZaeVNqYjJJoKteWehbhaW1wb5h3+h3C+hDb9EZ8QCjARiR4+6/+LHgx4qPJfk/w9ia7vZ1XxdW7b0ZZkmw2A+YAoOR5Qrc7YEtI3H+0Tp2HfCsdYooxiPh54LPDRfGhXE7ZgT5waQKNiRkBkRiROYNGRH3ZB20AAbENcCe45kAcAUmwBCvOhrvg24bVhTNwD0tdK/eXL+JNLUW+pwe0PGVHHe2stGXCyp8VFuhr4mylsz4XlWJrnJW0RERUb8YTb9boDjFxAJKJuFJhfO5TI0qv7liiPVvZM/2pYD9iNtzleronHSzcVURU+pyQa2hwmCwbaiT6F7Q36ob3RVMlHZ0W1roNoGzqaPUiC9o6JvjgCX9bdEXdhIcGKXOORf2B8Cu9GZCXpewriHrn1P2q9PM8h1KGK+5SyxtPTZUkDVu9lxluTSPaKGV2oQjtPApTPFIK2YtdEBAYn6lu5X7PMdZY4ujc0NCToQ44gFiYF6IkSk63FIAWIGAgQDWHYNBI6aqm+NxFqCrhT4nP8mDTzTUvuL8OrLjValNa7nvTHNlU70OO95Vx5HYi0b/nJNeRdPvFjB7PqsT9FQSfB93ycWqcJAGsb4Zo24jmyhYHteg0WJoZmSl0XnodcKQ/PTX3cBlgJqDtnA1RYVbDBZZFY++VdHAzgqmheSuK3uupIJhJ9IToO7SknygeMGwb4cQ1Iijo6u741XMMblxbfHtagiOnidLc6DH7v/8WA//5+OCSZBDO2L4ilbzDSYWbTvCvM1RNW2VRLbenbCuBwtYTTDfMwNnwCOCdclaaCQRa9SoOPLMldZliBCyAYaRGBk4IBIvgSkwc0jhTDEwidLnFDE4Eab8oJO3gNnl2b1v+1ro/a5OYrC/BjpVp067WxQu0ZfDsOa5cjixe6ug7yQ8t603fBpjyskQUFU+VxDYci8jGEl/KT7uVsWqCKiqztb459epplAeQWIw7rGFvXsJOhg26RZNrbVCC8MOBudP0MoAavVcPnkqf3cXLoV6T8Nga77JawJdTVlafGFfH0j0TVhoo2+m4HKLBYm+rgdcMa4IGXFPAb2G9cn0i/BjxZX2bWox/bdLMY+52i7ZreRN/gtmc2nSKbcjVPI2GyZe0NTOagzxRJ/Kt46IkQSviCsDcaK2BwlOz6tRqR4KpQCIIbraMxIzMQZeNuSFOQRagJijgR+iC8G6Z3vO7yMHDQ/6a1oOz9ULHsHg2+U6+h5HjQISfdPZfEpDXz36SO3GUDFfyqaxqjbmvQobbhttCXqThi0WcCCtQ1bKs4Ogq7SiwbDVinptFlmJo9xWw58s88H7ApMcm7QDw/La6kIHg0FTLYxgZ1nevBErmUnS2SKDEecm36+vYFBbb9F3oZjxKqHvyOur6K/MszGbFS5i8EK8LGH54FL/3KvqlsRVvutyEYNkW4WbU+MxSJBSUWaFc2WUbZ2tZiE2hVIZnMxzmd5zvK6EK9CSzPWgaYqqP8N+JLseEB5IvCwhIFPgZcPlEZgZHoyBcQVYAV6RfydaDDtbPQB1fIxFiG0WVmZSjupaTX4+DMk3/Sq79jaOV+qFppg3fqpYddZF31wmRZwH3LNOplNJpyImvgA4orcs+FAPei1zcsE+yz74FVqNRl2P2s+u33UXiVXphi/LYj7Gn4NhB4OrSzIMSz1UVwNf46Xxrh1X9lQgzg0aHFZq5Pa+VKmgNx5p0bdyvILG6hxHuzAA7x6LjAxYaiCUW5YdxsRveaGwrIxPwCfBWuXfKGsxo3Vz1qMBmHSybeiO6WXJXOam1JUrwIBa3EDQjhbakqSojP91g4kJcUVYEVeOWugR7xU5ROJ/w/5FIXIIiAj44mUJ9KL1O4RHoAcvD8YX0zN++imVR8LgjJrV27H9ModIMLyNB5/wbNfExHsWZy2Ulcpn6ZbVpb91H8q/PTcx+1QDvWb5iL57yK8cSGAgmY/Bg+jbDSKyMM/itSMkO5dRkXgk/KkbMOZHITvio7VPasGAYT/Quf2a45tVwwMtskia/tnpTHUXuvZ474zSXAP5ykZDmto5gS5w9LqS6Fshrqyq2mgh0t9oBs43MkKjaZ+NYwRwMgBL/+cKzE/4ZJ0OEDqNrM4WSeqREICosQG3Unk1X9Qzpx4YL6kNU18JF8QFaOFIggmPlbsz8aWcEEeBXb0TRyHya6OFeHuF5RUOGN6YHgwrwwI7FeZoEmbYMVgNAtA7YZJgyDrt2l/auqEZuYa+dvLI+Ez3Mgb0DgNhtxtV6wNlnOZSJQX68u6kH3/HILdto8AYAgWhfz6cWgU/yyTSwuAR9J2N3+0QxDGMV1F8EIbBZm/nYpG74ifglICqumiBZh72r6owGIQ9eETU+cD2llEorNOcs82DVZhEuqtIfBF5OXBXR99KZG7qtMtCoC3gZqemqQKjAY6YPUJOBuAlLA/GJ+76ZyqelA5ptf9sfwAAt9tJREFU99lrLS20kg56Qgs93roAYN/5uRUnRaWveVaPh3Jvqeh+QIgL4Uq4BlxZ+mH5ktE3lAwmSxx2L2TChV5hIX48Xusa1megBy8fgR6Aa0mFYcfgsdfvpIYuCsAoD76qf76o1i4glo3jRVsjKa6MLrWt9wq0lVTNNpsdtVZqDjHTC4SQlvmW4rfdF6GFKx99L2LVlBK4bavtPxiEeNA8/ANi6gAGditSvbRaxy5fj61+o8E4JWCy6MbI1yCZdz7Sv8zoA4mdfTPxlWrn3Emf+wIcsb+BcWNiBoZ9jgX0zdL59ckfMQU0PZg+VnhOpp8sqep0FqpxLbRVpuXKtb5aC50s90eSTlgBkJj42BZpQJRU0tUoG8wdLa+Syx+KKmigJZLRqLuAAQXb3kORIw0aHV2k1Ij8eC3rsq2/w/rc1r/D8gzLg+nB9ARcAFYEYlwFBkO90EWBrPJPiwdDi8HvCz26y6ici4tAIy/Rlbx0oClDLTxQxmnL2GcQShNXmmJCkdwnnqroryO6nrAcrCeSdUixzLHQPGG/tw6Et4S4G4JsZxxU7qibHNgvoybR7GIwtDPTgDk5cC9KuO5brX9u+wCaOjpwXBRhC9JtAg1HKhSU6Bu1zZn4LhRAg175Z7vZUdwQbGPcAm5Mu1JMU/8I+psHW37mKFJA0xPwc4Gln7oZQAM5bw+GyvXVxNcCvy0t9DgJriAv7ytc7AqcVdBVpVnV7DhFN2M5/ekoqGUkkrMUbMYmrhTNwLTAQrwRQwBC3oYX4G2aNCjvjZB/v/Ys5L+3dQnPdVuf2+MR6BHWD6YH4xPwBclXK7plbfvAk3Zi0PylJQaXg6DA4H+sVMpnIR30teKOGjFh9W70PTB4X9SL6SY7YaVxIpdrKp61rkxtgfz7SorKtnDXImtdO+hRNQjDVa9GZNxMmEW9WQeDwUCFVintw/AxsfViqadgUvMuOjAYxKM2o9fGnqHl7ZxTbcjyaoh5td3Cjr4U0bdyfpY2b6fPWf+8LikCGJ6Ez9Uzvtb+Vhe00BW+OrZbS//sk+Cu5DKFCrqVNtzIFydF5aDyJXtmRUFMDPhBuARaGPdELXFoHpFIrHxalsh0LbB/aYGBcEFgwjVuBvL4e3usW0Ti58e2PAM9mR5A0WJBDOsOtzshrrlvcWslBsMxTrupPP7w7oSKvSD9v9I5Nwdt9B2E1Xn0LS/PT7iuoVrap1MhOZXs2KxUKZbwtSVYKWyj75ABTyMibQ1dVXDVlhkJoyHxFAy/T6ylg4XBoDkhO15aSvTzQFht5sf3ior60v955IVa6FtF+rb9V6EXIFt8aUufSYG+1dNu6W92vNrbBUKmJSwr4yfiR5n/Od8zIaC2W9E/R06kjwYQKugT2wve4odlSavQXo5o4OyHxYjAUGFwV3KMXbZbBJG95VtEY+VdqZ/L9ly3x7p9PF/P57Z8bOuTlw/GJ9Aj5bMsCLEDwyP24EY89B23sp/Wb1txR20AUs2GT6Hv5WEl9k3y0Fc2mpywIvcNh59nW7nt+9oZfBJ9u9Cr5vKVf/rZKLsZlKAH3oqO145Zsno1VWxQ1NDk8VAoNSGz4yktMdjoj9fQuOx6sUIRDVAt1P0aCgyWmmdKeyoU6OsQ+gp3g0h0ldE3Ol75OoZ2ECJy/lKiSjxtQfixwnPx0ldV0nJWC6FnYNvcmlBtWglpGfaCFjLgBe0IEYRwMhjJLl9Azr43Az6RFsY1LC8MAZGZEEa00IhMAAGQ+JhE8njNyuOkYxHYDFsawQ/iB4WP9fW5vj4er+fz9fG5rZ/b8sH0ucNwiucaZMNzGOyJalMCA4NhEuFkDVNTyjD6jp6aVT5r3s5gzP7ibFFJSfv6rVuC9pQkykw8327u4vGqHINrzS/dHCAwfAuqntO/1ncBk/0c8Q7rRh/JelRttrVDVHt2VipF9NR8UMbaHl5X0e4bNc8t+sqlWBVldKS4ggKD5Z3mhM9VkLFq+k1XIQBA2u9h4d0AvCj0V/0t5WIwrmPTleHCXS/oc8IBQAtDYiKEErdlxuYRkVpWBRuM2rTlKDABIa6ID8QnLCtvmHyYGbGrDbO0bdXB/BVl9H2FOGQQAH5vQMiP1/Kkx+e6ff5+ff7+/vz7++NzW39v6y+mD8AnIAk2bMFwdng2MBgszL5Lbnf1sr2uRqS46mzfLIdnC32jt3OWNIOHpHauPLBgxx5tRd+hXxbxHcSqtsWS/Hm02GlITfNkNBHLi5KTOUB8mcVsKzS51UW3ldTqayNmSfWU9rJOutk2Ri6pFN0Zg63LB9Qbhc9zRl8stxc0nhLEBHDtBkewP0P5iNQUHyr6YrL7Hn8uFGjhZACO+TdqDB4GPMJ7tiN8nxjoWYYhSSKbHaFP6KgdkSgbbcwSjGlPxSW7uxKshGvampAWxpdXa699TrHM0uUBmBA3Pj656OQVOA3SEOgr0N/If2/Lx2v99Vp/fa+/vr4/v7+fr9fjm+mD6QNxTZ7SMMKGqzWKBsN96++fNQ/fLvkj6ll/B726VPS1qor656Fu5qkZayy3yu8FvH6Pq45BMYiOjgSVg7ZNSIq5Fy5LnqV6DnJbuUH2ntQLbvki2h4WNWvOVi1/bT2lVfPqcS8dj+7mFtqAtMbY7PB438spVygjjuhwe66Jb35oYd9VMDs5V/srgPCVqfpGIshY1tyuOHPICQAsxETRuRXxscC6QCWpUtc1uqK/sxjslL93J1owIWqtme5WGseooap34bHKdwMDEYTG+xoRVsQn0iPgGvCbkDhqoWNXWp+a8upDC701WuiiGCMgBwZCzFaZhMF7zpevQL83+tro92v967X89fX98fv1+GtbX4GegA/AtWHD0k26ihyUVDg/hz8FvXfXOWWiK+S80VpTMpfoW2yFtM9KMQPlOAZnGfQ9BgMjpzTG1oXjUUA+bPvZr7p5rwZlKktXFMcLrPB/noRhC4NB88+qykiwvEtK06/FrfVrK/yTia6qPBsVQwUAib6vQIFxC0kXqHLxVt2dj0uXq9Sx2GgK++S9GC/Rr/ZB8GzyPxN6t+rIYYJ+/26GU65hfQb8TxDD4xBXgmfcnRBiMFLYtdDV8BhJzBvBOG2IBEyIEe8IMUDC4I3j3rvpvwsyA7wYA+NrR+LvQN8bvbblr+3rM3w/v7f1M/oUFHHDAIAM2T+rCDqq1NGw/wnuOBj3J1eOz1/yNjntyafPsEbEkc59c+GsrxaLfZ/0jIuldvZr63JZX4c82CXD7qtQTKv8Cbv4Rcy2YLi1CvvtUhmzNJi7ozrVfrKnxTJLS1Fn+yopNAiWmdG3cryq0FfQ34S+KQV6zjZoB1VDifq5dWjGsFQ+k0i/hSunFNBLy4DHEPTnNc+tMfiCKVoAcAha1spmZDVlMEcZt1cpl7f4ShC2w7M6el0Flp4V8V/cmnB58IuYFo55wSOlbKekkTQ9jSuWxOCAgITwCsd+cytyYAwAGyOHPV0w4BbwtS2/Xl8f22vdwhoCPRkfCFCmsdw5NWR1dIbhFnRLRdg96FjrdbVXpsps42MdPudN7ySYTAW0Ogvua0tl+o3WCj0C+NROwLf4YY1YdrtlRrTZU3koR6QLjVBiQyUqDKtUWK3zdNxw+3VWybMs41LRh66rvFZAReU22wbs91ihL+nxvrKrGGJ80Y6+Ob2G71bs5Nk4Rvvez2NZsNNxIqaF6QH41FJw/MGgI4fXdv2wDh9pw4RWxiD14oCt2bGqzupK8acx+JxAmq1EoIWyFpoegTYM+d3zEV80Lo3jBkRIWhAYAULSRa8EMVQpE+IcyPQC+Pe2xJRQ0UF/2+hz+w4bPDamwLgBPUoqvDeWwa9QSoe6Q52nV4lf7E3cd9cVW2jqa6GvRrQ1Cuc21xW0A1m5CuM/qFCnCOSt2aEqavRF91RTid+I8sZG8NsKLLbA1UfiXg+HvkeHRlsa78pO3FJhq/UKhivAVn2j9pJgFXP2J7aK5dbbHlYy6I1VQW+FdnuZzD6PGuJ2Rkn5XCa3gtomVtxI1XR5hKGE3qNvIgEIxcxcK8CK2BqAY6snaKUKn36qDVVmSW0uP46JADCkglZrPDFlqxOtjr6REyPwnrsra6F3X+iwMG5AIh+WhPLaybkXlsAFMiICLwTIQGl/wsTJEWJe6OOb2Bh/bwvADsCBXhv92r75ta2vbflgCAYVhsLWe8AwCL10vjFLxt9C63k0cm1ZpHvJm+LDiy17ZRcm0Vdey7qpeMiEYVEudUq10NewB/uNd0pa6zdoRoq1mLDdoXXkdnuoj5bTBLplxjI6Wb4XsG9korkmQbS6C4IF4VaGCuhBr7xNtaHSrpGOWNBbKZ8r9N127vsdaEu7j9T9kapmtfV8sLT1FvdboS8i0xJoYXogPqIBOO+8y8eOhFdkKmponNpWf06ZgfO1cMxKNgBXWSsNKfTPp52zKtdoixOvhE+ij0BPpldYtuykZ27LauVyk97OoYTwjMFRHb0ChEReE3TG0rx/Egzwe1sCh8BrClQPtL2+PzZct23lQIEpxIUeQ97RIT+0CoYBTCQ+LT76zgb7Tr7kmv5eiIOaQl/zWhN6a5637e61ctKZir61WOlF9LXEqUG1aHYzbWl232kkbquFO/TYOQ0ZaFS4bTE3Wlw+0BlfdVy990qt3S0PJVZVx63YKol8GfZaNS9KXDTQ97UtEX23gJY/2mn0tf3IGBFwDfsmSGNZoE+IBZA37jmYm7DqtEhJYFgyAGcDsAWlA8Zgs8dW863s6ItCSZuOEwJGDMblg8PXQYITZDZa6JzsNx9BBGIOCBxVyoC8m2WrjKYSg5dYCTIAI0SHLMyEGAAY4DsQA26Mr4DfYfne6LXRZ/jmbVs/Q/JzCGlj48NBGhUYBguJB6WTzqJ87PlPe5Ip0XqmJ1dkMrxYCy466K+FvjIAScY7RlFnRodptSXVYi36KhrLYXvwFBBajkW+atpx1zrdPVVX7DxM1QXMosJW05W/dJW+w2nL6mFrXvF14O3lvqjmmyqzowq9AAf6aj7PB/ruzs8Y57SytwAu+lZqZ/VOpdtzpr+IsCyBFsAHQpUC+n3W39lMGjIXh19bi8HVfBQ4IWxJf6FmwN3ZLoROmZH50tOS72gUFDPwroUmfHAmwSEghoQfhBDA3B39YL27AjK7Q2cMzjPUoYtGYEgwDABLmn85IERfwZByosIrYEDamF6Bvjb62pbsmfUI2xKYngyAsAICQwBe9/ZQTIotIYYG9k6RSEWBbKDvYATU+7KR6lv/uvT3KDaSMGvPuaEeB0F/s0xlqoJh9L0CvYPlNRwytSrdlBez4sccgwu6VjEdRHvJMlt/aStmyQJRFT4rYLMKnxZ5j+eIb9tJlfteR1+5v1xxvFwWUMbgB+PnqnlgnSXEGRQdgluHnBT46u1pqHBfA4O7nYRj7lVU0PX2grYo/s/yT+dHV7IZuDiIQIhPos9AvzlagmlL42xwc6Q4oPPnWvFggAOGBS4xym8bmQEXiCHT/NqdpSMPBoBvxN+BvgO9Nvre6K/X96/t67FtK4cFGEIMUgJ88UGFYXfQcghxllklsO+Lzm7Js4168taNFwFg2PcqFd4jgOXBFK6m5VW27B2ywMjx2kt22ItqivWqlzgxNtZVd6iOpzuvigWi40k82hqiWHRWq23ooOp+OBu+1cyCusW3C70whr6VnzMUWAvVkeOqUvlcuT6UpBwoJqF8MD4peWDdFehxi/iW3QJ3B1i1c1/se0G3DecaFR315Tk1ab/t7hLiQrASfhB9MH0zvQK+aKGwBUqv3P28d2U1ACRdNOw8GLI96eCilVI6/oh9TAyVASFERy3YGBkgMH4zEsB3oK+w/N6Wr215bfTr9fW5vSAE+mCC6Bcdh/huFY7tShiGGolBgHELz0N+VVWZzIHVa9VXevd3csZ1i9ghwdDDSCTmgFjq93jfabxop1zLj0iDsoqaboT4Oo2q3G6kVyoMZ1FV04P+0uPSXb4MisNl1UQflsu0mmlkPMOoLydCpdsOKA5NBvpK6M0X5lse0Ty36Nt2w7enSGk/n0R/iXEN9AHwWEyXq0EK+8+RLmwf/z0eiABgDjBDfxVxwpPUU67rVmEGloC0JhK8fHP44nUN24vy3gxRC+3AcP7qoj0YAALmfQokDNcBAAvWvwkhcEzdlbsZdZv4DfDN+M34HfAV6LXRa6Owff0K34+wARgYDJDcrKtHBMeX0SLl6GaFbTELfQcClsZzOOthSHeT4IziEs5raI9JrwLCjsHxsEzBUfacLSc+FUW6lrD9YOequ7TQVg9tG036IYfDiL/0iJzzqR7UYFdcVt9HUvBjNXJJdtLJqOVg6vgzUcdbN0i9daS30Ldioi36foelQt9K51y1aPlDpGKC/lLdk/q/hEzEtAA+EZ9NCo7UDyr+/KdJlwQHse0slOi7C4bAtKz1ZT8jc7pomSwTYSF8Mn4QffHyxdsr0Bdz4Phd5Y/KT/STvtuIuMyAh9FAwnD3JggRIABRCMDJPwsCQAgYfbK+A34H+mbaFT70L/79CNv6iwkYCIGgCBSuqHD7xE6EiKqioq8PilPDY0an9A4SPHhhsgeXGSiz/nmchtaNGCzBR99Z6PXFQq8u/rUK6uvaY1WDfUW1IPtTfe8WRlaO0101ewXDVj4WMEwVjrRPQ6JvaFYGIPAs018VfVslcK4zs96Ivltx4wAl+qoOz1Iq9FVvcO+VvAqQeFnC8mD6JHgusO5hSDkD8Szo/nGQVqOSrNmvnOkMFfRpHuyYe/3pmLUNDSs/LIDoisXM+CT8DPTNy29e18AMGGgwI4cYOgAChuO0nNyssK9kI2AABqDAvCAEZkLAFPELG8MrIABuzJEKxyAlZvhr+wJ4rcD45Di8+VUGCkNDhbPcskg6Uce9izMjUcx1qdJPRhlMSd2qW+VUOKh0bWnKfhzU49aRKxSzFRVBHUgDG4bVwoNyXYMtBcuY7G61qtOWCsMjcA69d6QuL3JDPvpWras+zxJ9VeKbW0yxkdErZUffCuZV9G3vvV4WaAk3ZB+k9TetGIiXlenB+EHwWKNnD+Sikk2qyBrRWk3Tka+9fSuFVm7anVCEIcmqr0i3qtbWO2W8JAREXIk+Fv7i9RdvX2ELuGyBmUZcsYQWmnlPcZU10rA76/hPN17CHLNJR9ct3v8LxBiACSFmjd423OKPlGsmaqq3mLQSFqjU0SCpcCuDc1eE8M6yZ+bJT8lpwB4cfQPpJ6U4GBy4cMKKa7g8NnKxLnio7lr7KeWgdeScA6161UhmK7+As6Cf5ev3unTlPkxVWzltVcFLU3Cujo27RAKqFXEUD2IJlu0iqUx3RVyGXEKcUN1I36pXuZUWfXMxLOphuZZFYnoE+gD8XHBttgGW3Sr+1Obj/aDpuiwrGcDLzp6Gg6A7M/XJXNAXxtDpmODuhdUoyFHCK8KT6JP4a1s/Q9gwbIiBykuVVb8cJVnHKKkwwGEblmLRI9iNwRSjlRDi5g0R/aKD9Mbwe0OAJaaSjlHB/+LfH+H1gEBPgKiOXhMGA8ABw62Mv6ipVzq87npjANJPidj7SNkByVef00wCLBCTwCz0jrjw+MxeNXC27Y5Yi7uqta5o6H6yrvZeRoKz2/ilKrm0CsP3im8mk6y0Nqka8b57maP/WaTpt002mf2Tq+ySez+9W2hLqu6KrXocidc16p8RP1ZYCUBAo0VtHRnRP1+kqnNJtZox42Lc5G5I75t6HTNgNg9IErwQrgwfRJ9h+c3Ld6AX4Su+7M7q/qi4ocL7nwAZho2PU00SRJCpMASAFTAAAwGkICUMTACPfC0HBPheOdADYp6sHYMBAIF0/+e7pKa/w45Rzrs6Tr3bBizEz4HlXZi2ggEwXuhU3ivQx5hyynHX2gu4/R4oOR7eWvWnay0+MQynMnCNSHsvI1ZqP35pJJXHuPjeAxYGV7rf7PMcj6huzxb6RkkbxqTNfVsn/7R5sGzUkgpiK8erfEowHHnLqYdxD4blg/HXCp8rLEuBvmbbwierq3++IF4ccJTZGKSerGYtIzsuqOXvlWhNbWZzJOSV8An4EehXWL95+wrbK7w2ag3HvRY4YzCIryJHDEP5cTquK8yQSfCKHAAQgACQOBpdAuPfgeC1kpgvPvj78ckUGGKmjhgcLI3AKbjc/qTf6oYwAqXvD+0tpNl5UEpn3oz7LvDhfnX819Wyzsb+Wmrnkhx7uNvV8XqhVrX+RnasqNZxjGKuMclv1BIr3qnblirOHk0j/SQrH5ax4+GIjGxFVRWQs42l7JVqZ9h1uT76ykXGvrJMq8zQrKIy+g7auQfRN7NzafqNP5Yl0IPpA+ivFT4ejZ65swQo/7wv1fN1cRJmudIw4CpMaCQn5WnpdW7fA0F7K4QQEFbEz4U2pt9h+R2WV1heLGsdtHVJdXS2ChetNXl2wIp2iGk69h2FgWMOy1hn2vXhm/F/XivJesJr/QwUs2UtcaEHQLyv86qRVzc6GInUx2kLR5UF0MSKq+sD1d12a0T0vZj2fqYOCPRlhm2jaCGL/EBe2K46WmKkzrOOs9WN0OsX01TK8mxxysFjlQvOmn7b/jjxTiOiomz7zTouZhYGAxQ+vWpCD1myJc1WybZOaymvWnwhs+Ed2Bz0lTeb/3u0C7xHUdYgmnpoaOAd9FUFG6U0xi0IF6ZPhI8Fn5r+dd5/qsNZs1QYWWXLUhFUzag1QoKHja1jKmgvQRIPFYP42e0xRUpMajiKqfXE46VKloFwZfpYll+8/ubtK7wovJAQQfWFVt1k/MilEZEr+uh+tRLEPRyWfRPDwLhFjTQAALwY/3db8PvQ/3yEbf3cmDmpo0OG4RjlsD8EcLmmOxRHI4YHZA4ax5uVi6diISXLKPRXFja1FAb6qjsjgT0SrdnWT68xiL5d7XSUEZ/nLK1P+H5cuVb1P7rIgK3Lfbty+zz9VCEDHliH2qlVR+/dEx3o+VA4ubesa1ueXTnbQ4O+LfSCbfcF8QQq/XNuq2K9tWOzNt4q9FXPVvS3vvFd/0wPxl8rfq5mCg5H5fZuImvJUMYru0wwjWQFAF/KwnFaRlAhmoGrFxNx9kkQGD9x+RWW32H5CssWtlBvMDm1vlZJsFayTFgjNlB6cXIvCJwweNtHfwCg/fd3oL9fC+IDkZm/PxhWDhCYAsMaTZQAhInGSSQuHo7oZ/sCbxmx5Tu619pw1Gagb9mTDvpGqTCYuUDfbf8dtuyRjmXuoblsTe+AXt8DyPKuUsWyj47QYt9gXMldObksKUa6WPXKCgc8sHQY3s8q18aH45jbDVgyq4rsWa7+Zd8y+i4UVOjNlVfv4nAu2Z2fQ5NkRiW+XSeAdG3p9uxIRX+j8zMS48L0BPxc0h4MhZdyuRw7l/GqtXao67hiJNmZNCyxSLAe9aMYc3MW5x2AORToOzDFzqH19a1iKW0WmHhwjh4LiCvSx8K/eP0dHl/b9oopL7zpgBuL0bg4s3O7iWF2il4BXwFZ4HrcVek70PJa/o2P1LGwhY9tDYAb0wIHDEOJxGWjSleO0Rz/HLu3gdyT+mt0322tgp4baOJa1/SrXxvShTviYmDkgCFg3AGp57bju0PXR9oRZaGvo3Oedb51wLJtwjbKpB8qEg8qh8axecRnypcqTLlbYXUjLQy3UmXkaGsD4bM50miuKqejl5OJhb4q9IL9tHm3+Cq5ZcTG51YlumEFzKFrScvUF+LlwfSB+LngY0/B4YhEQbtk33PqqLBPZM3afILb5b7HEZbQuSqFusjqbWc0Ni/OuMgCoTm/73k54Mn0i5Z/b8szLF8BX0zIYeAL73nWKJH7e8tFGFKO+InpsfLuSXBgX1T9YNhrzAEAG+PXttB3Ov5ggBDowbxwhGEmQCqQGIStpHqOe0Kv3G+hte6ENrtnxy7s7MV7k/AMhMfWM/qGDaPaOc9QvUieIQXpH4HepkumNrJqruu95diJz1lq1HbvMierXl3qJVNOWyOvY7aM5WyPu7fzQofaOf8GV1nSZRr5KjkBjDz8ltn76KvWiZH+IiT98yfhx1pseSQB7PROvb6c0CFH+Jf9cay/7Z9VzZdswLXul0ygdU5ZtR297A2I6mvb10e4EKwBnkS/wvo7rH+HhfiFRUosnknqa7Yvsue4PpYQNzHc2+CYsQOZmTGkmyg0YFvA70D4WhCZAwbe1i3QI9CGvDCuMYROIDHAAcbliJU3ibQ/rgzD18a2gnamrth4mxZ2Wn5YDf0dDzrK9DdsCXEj+m4bVTrnvcvWO61MH3WBamg5yuRx9EUaAifLgF3V6TsoWdppx0jcyggwXye+ql9Ym7RLvURepXHT+nq1n12MH5HaetWgb/yhelp1m5b0F/dpMBNutZKRzLswxn1LpfKRGnNZAj0CfQL9WuDzAStBjCka16S2CHpv9MesgdnC4O5+u0Im44CFMNG0zVj6YVnimN9zslAQ2on435XwCfQZll/b8r/b+rV9v5atqUOmbFW9XbT1ae2XIdU4IerWU1JoWQ9IKIyZOhaxhfCSl71xpgPcAr2ACZfYTw7bGpAXDmugDWlhJMCFk1sW7P9lRhShtwASj2MPChie0kiX0uzuZxcNcsq73JB2qrMt9WEMAw4Y7b4++vqSMXjc3Atvht6q8DkkrjqjumsNPiuHC84m5BqRthJrNrZ8rblJ4Nz1o+52Boy1vuqATYleHQRXRV+pf5adrJ6e85rSTNWUyK0bN2VWmKuFASV2YvMrrx9MH4h/rbDQEQE8C3vNa9Y1xtXBn/He8m3A2px1HoCTjCuTZ83ARBBCsSdSRtymRVyIn4AfRB9hffKyBqJAATcurCz9NkVMsAwIztIab7BOmJF2FJb2i+z6H+2+u5PBQYIRmBk2pu+tbCvwsu0wvAC+gFYASls1YQTjjMdb+pI57Grq1B+ADMOWRvoOv6r+672c6GOoG1GxHIAZ+EVhS+gbXa64jIm0YKMdNqehF1z9YQHSM9Bbd0Zc64Cxb9M1FMUA1+wIg0Rcle7HO2D9rY/ED1CDcG+ZUpURtfVvyupkm2Qjou9CoYXeLL7ppPW62vaZqrYszFt2LWljliT9RQRcAz2Yfi34seJzTwH97u1/bk+QMBtSbLHNvZ61P9WpWgIiCKEmwX4Ekd4PBhiYuVUzcKYkuzcWrIRPXD7CuiYtNBprNLCV0lEyBoPmQ0GcMHjfM6cM9eOEwekSkWsmtoyAsIMxlF5UMXUcbhzjB5YthCWsHHBbeGFcQ9iAFkACJGDmGNiEOwynBS0BbHwgcYDsvXW05VjW24esHK8PdKy/fvlgnzolEX0huj3vySYDYwgI5QsNDQ0SS6vRpdt1P+cWegfpl64mbWqrIPkEIXYm6HFsHvQXUztTtqjbttVTWjdqWtwstvQgCHW5NhvBuK/yiyMZfYlCJr7ReTgX85UcjsiZCiDaw463Zr1Zf63W0l9r5UFLWNewPpl+If3XA2IA0loHqphCNIejsybkK+TYx2Pp/2xMaj0G3KKvGR2505Z4SeKAMyr+rhABBGvphISwIn0s9BGWZ1jX7bXRNt+4IL51kAAcvhv6XsIpxgCVgFs5ypdjsqhXoAEQGV7bArABEDMQIzMS8cIBt4UW5o0xaqSxQGKAUjtNNRIjHE7U9lYP6Sl4zs8+XoZ8d3p55RJbukk86przfBH9nDfcNgohaZ5nHYgcdpWnmxFnq3HonfVL8t2IqlbaGXxEBzvAQYs/R97XRa8u5667D3D/qDvsq4VVR8feYpV6g3JKkToYyX1TvGyEN2LQ1gcgHnKry5FqPPWOdnW3cgvWreW2SENc0bQy1Gnh5RGWD17+a8FfKzxiBsq6ebVt8Vvg3GkvraT0vpsWzwgKVEaxG9IFNfKIZdeR3mSMFaC1wYCZBK8Ea6BPXD94WcPyHQhpMEoy9aX0jbKUVHktmTcxrGDYWunXozyDhRjZzAAIW6DdaSoAUEqjhbBwQETcmBaGjMEZiTeIOzGlPpXWYmgIcfmcxR/SiOu6SpVnCyo/dAnsPZSZNEZyTWtSKJ+j2nnf6ciy+1IJn9I5tooPURNgzaLvFPQ6gaSqdMG4azBWx+2syXaWK/uOxOPf77i6osVgdalRYXCrLKnEwTAVg3O7UvMc0TdDb3VV9TR8S0r2wOqKebnhQy77ZlHhrEJfiGP6SfzXCn898PPh0d8KXGtTrns7fxRZJ6QOQ7Lk8DzZwSBe6bBbeWpKxS8Nvcfvln/vGTnkJdm0iYgr4ZPosa1rMqKUsDLxMZehfuWy7jhebJ3U7iWspm1rp9rMiVOiHI7OzriF2FqI5hQkZiZamBg4IC0MxNGbC3fbMlJi5QUtzmjOtWo631JnIdTVEo+hryk2BicSTCzjgDt24kb5nO2+0vTQzjuSyji4Wx25F3p7kR7KQT+LNbQ0seyAw4x9P2GnCVWc1B+WdON8gv2CWuHyzVa5H5wFt9qT9iGPmC2qURE9MaPmOaNvxjBoHlS7SmibyJBPKZV+islUteUqrFZHLDVDuYyo6yRkWgItvDyY/ovoXw/8eOx7AJMs3d5CcVbxf54hwWrhLo2+AufDdHQtnIX0fpQoGGe+HWgZmowct5jWy0p2Y2xil8VZ4Q4dtynEB9HHRo+wrtv3awlQJ6sbETns2smosBADAth7CTfTsep2KN0R8ZgUUszRFmihsAVaIEB0Bl+AgMMWcRcPGCbgDTBeGVJCad4A9wYQkpEYk6P2fkdVh/INDGqPZ9A3jvwpQ6/E4Ay9XV8tSX9FVSb6ZnFg2KlnFnpHcHfQBNgFNp8mOq2M64ot/Oum/uhKL1C7Pt3NRgnlOsxCYmfecDyt1Eft+Ppl9E2uWMkF2rqDWLNSreUrV/k0OD1xWmzPVnSiSteVTqXck2H5xfSvaP1dgPYMlDmxkrhANuD0p5YRPB7D7NGcHuNy6Jy5jRuKiTjal5YNupKP7qpmicEATNGfhyCIadWy/qZK2hAhQyyebYX+EcKK+MRlTb7QECgmpXJsReq7Dmx+h5ITS+UzACzRsIvK5g2SDden+PjkKpVOjCqO6bQCI4X0/BgRiYmBFoaACMARaxGAkLe9hohYOwzvWy0VvlogkBjiVzCsaq6LXVl6NSRYtqticNl0oX/eNzgq6O/RFJrvIt9FC8NKl130PQe9li+0erxFZd8i6zPjqhUHjFvxPasrmQpAGoRqFZZUqaxL6hprqocq6eg6EJDY1EhyX8TaZs976hitqvpP30jhLCXBeHQOaXacsOKtrWtYP5h+Icbtjx77HsDZfUlOvn4Wjls0zLXtmeoflbwjK4iQ4TCkDIQkMFgej1JicyF3ZRBu80LLJcIGQJh8of8d1iV8M0a+Z8+hxZ/SRAQChtNNNJw42TnEyI7QuxQuVrlygGaUS5dIsj8GZoCIocl8yxAwEMAGGLXOAZGYSSTMOh5RgmEIuy6a9iClOMC2YwXAzdIhd7TpjyEDr/oIVpaF9+VMVeyESZjZpIw++uZTLQy3lcjy++8J9C0uPBWD1DXrWjkm286Aq6nuuuBO+VW1HyOXCx121z0Gqvlt5mvrpVWV0yOTYJu7F72S+QiidHd5knyx5b7OSBAGEe8etRbZX0qCvXDpkmYLGSP9pQ+mf634a7f+/qkNFc5Juybwc2BBqX+e3o7wHaLyVD8wyc5qk7TQViWIuCIuSE+IWugQcAu061zrXpSXFsu6CoarMhYSgwRj7CweWy/rqhXZFmA5GwoMTupoOqiwjAaOumwOgIAcv+0ACHmDhx3k9lGkPKmjJ9YZIZMLLQVEGxhOJQc8onP0kap/llX1O9bAsF+PyiRGoLcqVtXQis6EDD/nqkX/AXqkrRfaJGVKLSybtv50CvtUtVUIH4vd3d8C96w4qpLMcgJw+JgTjymryui7UEBiouR7JdcHKZFqkzm1TKR1/E5Z8pobsQzAe8dMIuuHAIBYTOQ7ynUuxMvKyy9Mu/8uC6wVVs3suOAU/oEdfx1xgn2jcNAHRAiXAXjW+dkxD4+koqzcodWRTohrysixruH14ph+mQxiZ33qLQyDhsRQemylYrkvbou+Za7yuowwitjxxuQSg5GAGSUGAzHzjrsBcj6enJ2jA3L3hZXtHR5txelY1j8DHPpnKcItRZ/ofVOlpcyoKqnqz6Dl6JwrYBtxU7B87GEYhsF9mN1nItvym8vi9PmijPhetcWYcV/Ms8RgaCaKvIXoRQ1oBVQA0QMLEDmjr/r25Vvwv83u8tQfXY5h26itKFOtL2kJyyOsv5j+WvDXis9sAB5wEuo+69sRd69wYmsHv7Ys+wSHMiP0rk7+EQZ8dMtlve2fTsZpKNcUOS80IXMyA9MjLCuv6wYAkQdDOZq7i+7WPtQisXqh6sNUadUcL2tH/PwhZUlAkUv8UKWEA4MB2mxZ5Q1ksa3ClpVE2SWwKdlF3yP3JO+dP4A2n0oHM/0FOLivZDbOBORrOy3GPOts5RDfcQ9BeYnDhqN0VdNHSWWRYRJitblxJD4HwyMoPvIYxReHsFNh2DtWTwuubdjiiy1ZAAFU2Qiyk+ADfasFH5cbh7Trp4GEkcmO5oWJT449iY/tkiLd48L0YPqFMfooJX8GgMDDe/a8gdRWNNrbyGgv2dVCT/dhMAzprSIzMZ1znK5csUQwEhDiivQJ9L9hfcWxSwBBS/XcXxJWDpO5cUuk2djK9ujYelWptNDZG0tqoSOypg8yIMhZOK8kEtlNGHwg9E6FQcJw1QfL2jrMie1dfr2S1X4MFvoWl8cZNeca27lOPCsy5PWfvKbDbNZb16AXjDEwwla7mNTlxFVzp2EYmpty2h3UUc9aJf3O5C5RjbXSp7JYH+fQNSi1u13zsPSslqck+sZW0k5B4uDRVU9LoR+c9cCCcpRaaTut6a5CX1EPr2tYHkyfiL9WjPpny9n1nx+/q2KwFGsrpDgxaf7PUWwAtnJuOEg5m/cqEdmyoS79laOsJaSEEBhXhAfRI6wfzFvGXYIAUJKGcYdJ3yskSquvLvXYnba6eBAVyXK+0LsRME27XNqHdue5wwGqwmAoYVhcKDrRtOevBXsjor+DocDd/KPQy4X9krjvbxEuzBQk1T9kdtXv4y7cBL3WxFpJO8/6zHKcpFqoP6iXttrttn5CAWC1UpxqNc979pyjxTL0VhpiowTOTvWgZhSvRFVTVRwRBHrtaTe0qvYnNmKazUJlOj/pWWZfMnG8HN7148I9/Jc+mH6t+NcDsv4ZtIndB/kfl0MLLRcH7bQoRXooZxmgJoPbEZb1+kDbIveR0OO+7Nu0I4aWk5JjMNIK9GB68HpkpCRmXLDwT86ArgZTNotlb+g3Llqpg45yu3OXM1wN4udKXK+jVd/+nSJXmuq8ZeHMzoO5u3pJx1H56IxaIDQ/9oevUOSIvsZzqpb/3ec5iAfjKTW6blZTfq0WTHZdkcdJqk+Ij8vn8fhETuNZF/FqASQUthwxOJeJiJtckRdG4vwFVY76eYWXt/dgsbmWyoxbC0VWO0dPDmy+1iz+U7LpbHOzOwaDbQNsVco8QDwq6D2Uz1n//IH416LnnhyRStP7boS+blfubPTXrjyuO2H5cjFL5WgTABBNC8JBmhCfRB/b8s1hC0uIDsBhD6ervpO61mqCGwkJzWLF+IPhw1XdR9OTPL5rH43WF/rodkAEULXQojelmrrBYFFSNlp2z1Kyy2mrarSpxyDBNcfdj1sVFuiLKXMWAwGFOUO7ZyrWkMDBXfWSK9DbNJ1+WJw4ipebwiWpXc/zE8zYouMnA7GMl1V6yQHkbzmirwTdNcQfeasxlDlco+yGD96QA4QNw3fcYgtfL4q0OAQM1bjNjl3SOBotvgEy7aYy88bhwz/g4K3F9R7OZblkpZxL99Rof1v/UN9RptKcp9tZwrqG5SPQJ+HHinHrhbQEKEN0Cs1t0xsng9Wf8nn2I5WrklEKXyVBKZBAZ8CzmyQ7rlJtiHBbJmqhaaezRf7LstNHVXGICVtlExOMhLwgPpE+eHmlSl4vSnG0rr6upErxCMCw/4ickhpn6aELneNFniytPJeWYNMdunTISrcpMVipWfzedQYddtvAbaVArn2sWml4gMoMsuOVWlVSRPe0durxcfXmicgiZeU3BkJW/g3rMY47IbckdSR+CcaUNE6c8biMIK4qtDDETe+JaWFcOIIuLkxPAAJcMGdy1R0G4642gXljfkH4xu0b1+8QYXh7Udz5I/tMpXbLNXTCXQK0o37TSDaSb1RPw8Jp2jNQWsVa4uu00lYO2jPf6W9YPoB+LXv2Ky2dZJSWd55gunfjsa6FjjKCweW0qBuAOQCOeEFLRBzP8FyldM5Oy1IdDbZOpJF6S4aqoTqlGQIBrogr4sr0ZGbmwBg4g3b2xYjiZ58v7c79Prdcedx7WYrlZGuJNPpWluDKHVpxihaAWg0wi4BWNuaiwG64lYhbcOJ9immrNe+uRR2NSx0MILqbgbIFTSW6D5SVkerP4a5avgXLdFz/XMYWkQYMW9UOSpcuzxvmyz81BXu26Saa+wi0MD2ZFsAn0ANxRVgRCQEBCOtl8j5lcHb2ZODA8GJ+wfId+AtChOFv3L7p9Uq7b7V7X+Z7RAQAXfks6W9Xre0/Ft6zQEOjlmu7NFK/qrmpPwdK4b9J//wh/Z/L0BXC0rL5Zg2zJW+NIVZdmsR8KgB4qh9EELlktYlCmydLxgup+bNIZKbU43pl4cOdIJHguCaABtGjFvqB9EB+Mb84rGHZcIPdlbhwMkKVIpRcNv4YezzloPT1n11sbq0sbRNQcuIDGiuOG21CGgbLq/qOUeKgMukL9K1jhHaSqkLvlCssAPCG6izWYnB83XRoTNCp+bqe2fdqHmhLKWmaty2Nrg3G1o3XCRw0k61ps5yBznEUqWo248Hs/J2Jky2McVeAB9Mn0APxibgiPghWTLib5wTNGxjhQGKMBq8A9Ar8zfzi9SuE3xC+w/aN22/aXodqOnNQEvv77mZmlt9sbEdal9s7avVq+hMpddGqQXecFTjQW50iSpsP0l8Lfqy4LrC8MfuVF7OrIvoYzN+fEdqQUzbgBiaZyHKz3i/RSHD+TZi00K6vVk2CpSK6JMExGhhXgpXxyfRifjFtyIucquRa7JhlHCTOMDwlMgAmiwrzbpRqXRjmaVPh/2xjMGjTnJsEo/zbRd89O9XutDLgj7PFUCID4awOqzw4I8q4lVdzZ9ULjzBdqxW/sFWgdlS2XZ0Hl4/64B/znJr6NIy8mNM1+5+AZL3Lg5cPxifQB9In4YPgSbgirAQoWG/1o9KuyTx9cRAH5hfDK/CL6TvwFy+/A3+F7Tdu3xi+aXtFKD3AT6qdK0Xu4W5dLk+b8KShbZegxGAQ89h+th2Eo+NZou/xWS1hWcLyDMsvwKh/fi5JBV1N6bp3dQN747Q4xaA2FuI/mSGrPz/P54IeP5U5sUqCwQDaERIcJfChGm69CxCBAJIWGnFlWjisAZmIIU7K0l0QDmSqkRhKlfKU2ycY86bJvdy6R6C6aGjXv5WtlyvoBoPB4rVS0q5PzQkXfcO2J4ksHaaGFJJlcNGx4SMe6wao54iGB8fbH164jHg1j3tUjZPdtrCKf9Y7Oq2gztJqqqdSUXblNKa2ohqSpIPVsvLywfQJ9IH0i/BzwSfBSrjQoR0V1dU/9qrzfxFAQjIG5sC4BXgxvwJ9Bf4Oy98cvg8k5i15bKnPIUdGReG0g8jxjWhhGt31UN4E6Zhw1PTX8hLxGIbQtwi9I6aFl5XpyfQX4V8rPNa+uTRvyYDl3gxd4JxSWVe14TBCq634ZuARA/Aut3pBd83DlWE47y4MwhVrnAQX7lpass2VMAAvAZ94kOAX4M5+VVpQfQx7U8Xw9e7RFT+tUitqW3LVqRaocbf0dhbFCqtwVdKboPPzqWouA3ah5b4CfaspQLXvyqW6hNhYON6jn5jziDEjjkuuwVXUIPFtk3tUt+A2MVG+i39m1JDBX0eUHIOxxSekitU+V0n73o9Qn7QRHi8Ppo+Yjpjwg/BzwY8VVgG9MjjV02fuP7Qw1gTJgfEVeAvwCvxVITGEbwwv2F6YvR8KWCWGkHYTtbrQulz4UFphMIgX6lBhB3qrYu0UtNDhfoUfKz7j1r8IMfvVeAyqvzdR7bQ1bjk9v2RUxIfh9mbbkcPXw5DGQ3utjQWvXA7F4EpH4io1uh8Q4IqwIT4Rv2sSnOZxjCNbTPcaBoPrxTMoV8AbfGtok3yjZbHSHTp9ge0QMtD6OOsez2mqmLHVPEv0dTw8E6kN4nUUGoiOrrLVxAoVXCYTeZ1RwZiHu1CihYW+s7jbvUSvxzbQqjA8qENWYfj+BM7G/V5JxwECeqO5d/lg+oX0SfRrwb8WeCz4XCBGxcS7zXrmZRltoy2ZDzBDCBgYXgE/An9v8GL+tdFX4G/mLw7fvH5xROKoDQLYv51w3Hk3e+igzxSUGAxwwLDFJRx1joO+uViytT8QPxf4jO5XOZhF62e1I2EXIMejgMbFd36yEnVVCfnaqwAg018ZfVRW++dSUbZCCAPbBEdrDdc+1WVIUj4ezcABMDB9cfg+SDABBypX4tJPwSAceexemSmmprMpB93TUgxsC4PVeUGqztwxmSKFxtA3S7ufgUTZFgIrnRgHlCQ4/gkCZnrzV/lnD33dmCXj+LVXqSIrasG7IxhsXTvoO31apr4mVZEeoRcRpM55+YX4a8HPRRDfBXB3ucqVnOpzG1PIgWEhCAwrQwj4WuAV4GPBLfBXgK9A34G/Qvibwzfwi2NIMQeOqmkC2F775LPJeelU/47LxbS2D/7qhXZ9BlXjl8xkEk/hGpL++dcKCx35NwbjWjPadUCxtyFgt/4ekBdbMkismeHQhfK5+M25MwKAzxmrqx0R2lPV/g+q5HwdSADlXr+GFLroas7YnxTG57wiB8QX4hPpO7lDIxNvmKyDUNiD7RabDHZn5XoNlmR97EGINTMwaA6QHQzuzd0SfQ/6e5xFsJf20FtnnEDffCSCLgEH4YTVeLWUV/VI6iD69v2qVPQ1LOtOJVfMseMymNnjdJ1RZvFGQi8tsHxEFSjSJ+GvBf9a8bEkVyAJvVZaRAtZrTJi2kSCHJTBgWBheDK+NngFfAT+DPgV+BXwY6OdEPML+AW0QXjx9kJiChxtJfWMdC6NTCiVzJUJjLS1h/P8j2G/b10M4mOMBuDlA+hjgeeyb3+kVScPSucpKRFWHcyrt1Vw0TGDtFrsOHtgeb0tkvS/82E4qmBb4NOoySkG3FUaj6fnb+V03kpJgqGIR8KV4Mn4zfQoLMFVvpgojnvhj810jryX+0qRGHz6rt30kFks1pv+PIu++XjGYIgGsEZHPYWUXfQdgRClt86b7S2G2pF5kQTDmJcWnEXi8TCtukx5CzG3Rq1z/gspQu/H7oW7UIG7dZ1pNW8lyjET6OTajgL79E1p5uWFYA0QAr4CPAK8Aj6JvwJ/BfwO8OLwBfzN+AVI8IKwBAzI1SzUZsEbDNirmG5j+tVvq7j3hvvmBF6yleRt/tz1zx+PZP0FGy/e4Z88vnORNfE5GHyUaTJPQAFYNfqGPR9C8yhWcbF+R28R09XZ1UGXZuD02RAe7tCVN1YkwYxADCvhI1Tu0JH4Ci/ZjoEQ3oB/qoy4qKAGA6ZI7ypb+1KfUrJQxS4pF4Kgv35f/Pw+I+grzipXtfGjUhctkePE2+xaBDp+7FaLUz2JhZs8jiP24PFFpArhSl+GtdMjUXbpTzW/CnH1AyIAx13fhc6Z/lrx1wrPBZ5rgl6JlFpuhCb5xrxfT5uNgBYIjARpilkY1g0Dw2vBj8BbgK/AXxt+Bf5m/Jvpm5EAYOO45QOkz2RB7cbDoe7y/RDBgGEwPlXH8VP2AbF+EbgwLUCfCB8LrlQYy/NEbS1lzhp3dYxU2XNLgmWjEqcHMbh/pIepgQ0GPLuvUbqqgdVucHCVWiuagdUIvKlupNYTnAIhrgArxZjg8M34jYjJ87AyEJZR/6WS9m02sEq6wKD4CpW8UBXHfWlE2q0RuiWlVHO06oHpo28WhzFXYUh7W3sZEV55QqFaGsCUAlPeWHulZ5d0xF0MBg1Hb8dgsGG4a23xn5hvU0cqiO/yX7vO+WOFz8cBvW2gkUztdxynosy4tArVOt8fYmAIgeOfkhDvFmL82Ph3wCXA7tsffiMECEkXjbA7Nu8NHVllukHtle2sRWL35lro5d2DrQDj5cH4BPwgPNJPugxYFV9FbF51NpvVFR5sSB13dLDhcuVHMRe0A3Jq9uZK9suHcnH4ZuCEl26jjiu1JMHyJvJc/yR8hWgJXl7MHOBF2fs/iH3KLLGznx8PcCR/Ouwm2LmdZJzwgPt4uTUgB0G3T3/HU/b3lMxKGI9R9+EWrinVnbBsu2/iN9X9bLuqyJi7Vgf/Gir8BzEYShi+CL1OyVSYmBZYHrz8YvqF9K+d+H6u8PFACQBZ9AjGo/f6b0ucMkjCNTpzDMJsRwyBHwyBcduSy/SvAL83/Hjhc6MlIL4A4Ov3GjZM6dsZF2DxNBgX3kPt63WY8rjKoID9eYwumFr0rbwdMW9/9LmkKK+4/2ALP2V7anP7JTcliWxNvJMwP4vBqbYsvOtlG/BaASaXezBgnVVTTrZnR1pp11DCe8LLjSWvQgQCJIQn4cb0gugOHULaxF5icNZFW4GSs1nLnWKno5KcmNQ29MgSKxipxeArNv1xKYmsqWC33aT1qqLUjldN+HJdfkBboDLvUb+qtrYBSq1jYUmFxzFY7YYTMVxlO7GA2YferoPbiLt42kfhycsvWH4h/WvF/1rx1wM+H/hYoEXfgo+K32W/9d/ezYzkQVzSfwLDsn9LgfZklnQg8WegjwWf3/h84RqQXgAAv9e4uwPG7K0EAIy4W2RifPwYNLRxa/1FUvNJZvQlsYMTxRyfT8An4ue+/2CUEyrMo38/ksRqgASD4LU+Epv016i/UUH70bpm0qt5lXXOk9VqoWVGjqoDVosHgRZhwaU7NKwIgfDB9NyWD4DACVf2CLy0fifFSzbKFBuw5HQlOg/Yxfdj6oqDwXehLyFvgIjMItWAugRR0Velvy36Oq5Yxd8SZspTOTrclz76ngXdwfLFKDqFwXoTRuIOaHo7wdQHYbWHzQAQd/o60PdfRP9a8L8e+OsBHw98rlBZH3nP9iNnGDA4EB0vsnM/7SXKqSpfBADsAw93MM5IDCu/NlwJV8In4eOFj42W7+W/+etreb0WAMgkeO9j1n8cerVuf/Wx0YlcL76yjL7H17cwrowPpI89/3PU/N+1EW0/K9ZNUD3AuSs2rAPz8I1fiwO2dhg8l3AjByOpVxkYfJDg/F0hHrguMZgRV4QnQWD8CvTiJaTAYwKIoXiFPViM1Py7WvufM6leMcS2n0rXMJkikcSuDEcOLDsb+zncxbTh+X45MQJwk4Evlzz+NCKOALx1hlOykEbzXJSs5qOZtYu+RJiPBhaVFH86b6EG1AaDoVVODi/+HBi2OpPFys7d1q9e3p6FEqQT+n5E7ruj769nQl8URt/AQhtXvWVlPWue8mUqlyEtEPgAYzxocdw3lx8rfq7Lxzc+v+j5TY/v5b/D73/z9qKUKXonwfG7xuK9MwzA8CDcNsfTtVTuXpzeyAPoCfgZI74au3v7KKyHVsV3UT0yqlM6H/2p/M8V3+0YYVsJTiasal+j6njxw4BJyXHrjvuGXo0EQ/kyxCklNUfEYAjFajdEbyyCJ9Mn88YUmEOyGKdRFzAFq1CsuV5vDtKj5naVMT0UEGJ8Kn7+4YoX/ozntiOIaYmOxLwh7rvztrupDKLv0PMfVgXLyizlB7ixwirxHR8kDt2q1BL12UoVrJmEofIldrXHTes7tbI500iSy+oSp7ADvbEwEtAK9ACKSTYq9K3SVKXZYGw6ruKUjNnGvHCwfmlWI4IUrbQAACCnCWpdeFnoueCT8OObntvy3+Hrf9bvbwob6Ric+hl7MgTDUroJ4GB/NSRswOnHGmgFXAE/CD52/XPlCds+ByifW/WOWgTtPuQ2nriN1baA+Y4EW9PoCwAAq6KKGdEza4y28MOyyDEYOxiarokGdW7QvaDCGYOPoRld9WMHCB+BPhACMzP/RmamOJXsy8kcM7q3tidvc5nBFNTN4mI36X/XR8mX27O8Sb6V1umsk7DY81n03a9SCutA6+hvoxhJS8AG5nPQe+45W2A8AsPdLQV9SPY8pDQF/tCFY9BbLnQi2eK4oxH914ofa6F5lpEkqZ5BdCxx94RbTCWOpbnS2B1NA8ACIcDCuBI8V3iuy8dvfP6mX6/l/3wv/0Nf/17jZoXMKDE4VlC6+vf10thTnoF4LzLPdv1jYVwQn4Qx/4ZDf6tVDhiQWVxiYnOiv2/iuwO66E4NwU0NCAA3pKI0t/gV5HjcQixHZybBMxgMkQqnwV2/UamIJgZ+BYq7iQEGiBnVEbSYUcgWYiNRZe+2buOgI76j+rf0Zh485jKfzMBARSAjuOg7nYxzYCGiI5BVpw3MbYsVYIyLaQtQgzWjBaGEYWVVUarcHaXLkLfXgAxE0PUusR8mRVvjE+gJ+GuJ4UYK+kI5e4ZGh9dKS8uKTRrmjUanITzqqNcFlg1XgsdCzwU/f9PnN/2fbfk/36+/l+9vijDcYnCWw/O/F7vR9d6X3p0F+kZOvDCuQE/A565/zv7P+ry9K5Ctz0Mqn+XvLlQ3Curjz+aWvKZPST8OSJMVwPBHGJEp36sKR+OEraqvZ126ymVvuWNSnR4LF2IAfAIEoE+GwOGbo75ZxeDmJgpm7N7uzYDnfydTer+3imoGzp5uUQsdS3r7rhjo27/NAX9ao9vH7/q9NyZk/WzR/8FG+2/H2b6pWvQoI3bAOctoNP0ow+LN8kPuXaegNz9JREYCegA9MO5rBJ8rPpYCfdXFek2hNFZqFtaSPDhn1WLFcdZ/S1kzhKywLEgEK9FjeXx80cdr+T8v/B/afi+vV9xmOHp1GDq5cvcRbydgf71bqZ139EWEqH+G6F6zLIX+OUvXAH9aJlKmDJQcC3xSo5JGCXHZisaAL6bgGIzoLS63djoaI8FNPSJJVqn3TswYYUVYER8pQXQAxgC0ICMDMQZMvWlTQaVq6s5MxT5mLxU2VLLtJcWRYYxx0Ff1w3rHXiOyMxF6EQGAIe9G9adN1K2YdLDjvbL/cJ/hudizc3tRX5fBRcwsYINrMrDQFxfGNcW64McKjxWea0Jf1bJoSWt0s05JaUOHvSYaZXhbVXWk0n6vS/ozGoaXBZ8LPr8iFX79N9P/LtsLt62nKS3mHEu7UKOvqmlrQw9iAixcEB+EC6Hc/qiSG0H3nBS5rhrOOZ7DEgB8uK12QIIU+V0c3CtvAHhk74S28IgMBoQVWuj5qarCYIDaGBxPMWJ0h96YvqKyDjhwAIQATMIkPObDMhX7eMwvqJz15Rb0dcRKNnlOsM4sBnLH09GQIXH5H5ER52EVfS/uPqm1UidvmSXB75ZRwB5G3xR+SoAEuAIumLINPxeM3rbUJNxoRbdKGrhrpVrNVtspGdfnqfkaieC5AhESxgQX9KTH45seL3q8Xn/T62+OyTpUEpzD2Uc0ZG0iAd2dswBjwOiBtWJKu0174k/LCv4zchgiDRy19L5VTko4pZd2N2CQDe0AbI0qmUJ6Vi18JQgsXz5Lgq2z0iuPEIE4MD4XfDF9MjPjiwEQd97MWXHKxRhtvaCtqfndaDFiDB6Uzs5Id0h0wmLxW/pqYvPlR7mixr/3+fvOw4aZ+S3MfjaB2j9Nxi2+IJ5hpr8x1gUfCzxWWBdd+Txu4fNxV566tMFM475aYXmVoUliAyEAwQpAT0DCdYF1wccCzy/8/Kb/Dsv/8PffGL7JguE2ZtLRNrdlLOiFhL6HTgKiMd5H3J8KENKlAuPxJUJKLm3jsUy2lcXYgEHKzfsBnzNEH5IHX2h2VoAe1h7FWj9qoYjOlSwEYUsOWa/ALwZgAgyvGoNBDmKDE/+MtHknYBB65+G5i8GKHxAZf+4kLEJv3Ik5LW6GUcpZmB9yhzbb4j9gwLDqdXUCff2Io7LFKoR6Ah3GzcDvkNPoC8SJ/j4Jn5Tob1R4VgxYMUD2NMx1DIwoE+eKgZm0I5JUtUq+ChUq6kaYPLMAgBAJeSEixCfRx4s+XvTf4fVv3r5o+6aY2KDd5qsatE5EX6tjy+ibWW+cHBCZlqiTIHgs+FgAScGhW0Q6WAFALyNVIcZGC/uRZm+GGqd17XF9j+M2YHFk7ZTwZYTmdlHT95SW7tBWbe3BKkdH3RYCAALwSvgEfjE+kV4cAjAzHouAhMEyBCVn6shVTk1nUzzV2sp3Tq7FJnVFnf0lQhxuIIIEV6f6rUx2+9xtWguOVs0LWroJC31P6BJuT0P2JhnJ19HNjuI8w4TEFN1ugZ4Q53qIe81i6e8KDb7OQi9o6DsoXeNx7lIo8xO0DanMjDAreDGuOZ4LPr7hSfT5ov8nbP+zvf5m+k1hQ97DJv0NN53PpKbCccfl/HZot8on/+d9A4Z1Ueq66IHVdemalbtSTE+1GPY1XKnhWCNh5Qr/7lq/6NHAA0rjFpV927BRp+KNBWK4IwIBPgk2xiciM31BAIYXQtgzJtLBNTISF3ZNOyMBOLPPgIyYQpUyw5GXV6SwdIqXnH9LX2hEYI0EZ1fzE/4g/hGnt3AK1dqB3PSq3+4JadUMe+vnSfDlLk0//0J6/lbQoO9OfwFWxM8Fs/Oz5L6+83Mbx2JJW0+X/o5boCXfbWEYNOiNZ9NSIwBFhHtEdXTM18Gf3/jxTZ8b/U/Y/h223xS+cfsm5pRQqN3VMUYAxk/PCqmo1M6KUWDl9FKelDZg6DwHt0Abd/Rn9dWO+BAuw39Vbzs4XvqwCnoEktXcWMfZ5mA1bTh8eizIdMLPK9YaSXAAeDC+GF8AK9N+r8gAQYSH7ny0zcnQ0uL3iYm+8/GXUpyclEO9GgEAOqK8KgyeaUhXWsb62+MOLtaAepvfmVCc3iR6hw1j8I3BSFpPrg3ySfTNxZAAn4gPgue+0w7aDrd3ye2LGsl3KxiGxi0oMMA+0SedLwEArJAV79kzCx8Lfn7jx2v597b9T9j+jfibwzfhBhmGfdGXuQJ9q/eCBLQArkn/DI8VcF8SBe7b46fk+rfUapX9wTNS4A5xAVjf1vWOhqdIcLTdYk8RXVcyHJJEEEOSKHAIwAFwYRAOWQAGDBsJoh25lYPehr6WOGZg55RJgpUlWcJgca1Hf1X0dZM0WWf6Z6ekVT5XlTt2ZaczaplSGS5U/VIP8QYMfhP0gs99AYBiABLgivgkfOyRpl3/5yuaxurRT+minYFVZaWWabAkIS6aDgAEJBK4x5kw+xsvC64LP174XOjxDX9t+PFN/w7Lv8P2b379xgzDSk+x/rHfQW30zXeWB2EKCVsh6SSe+x7A4GIEaWsuRyqH8P1IkQNrXL3hy/tCMG252QmrEDU3ll6yl4fyYmxSljigxEBHwkSCV+aNcQVkhtIYHPEDMgyL6FVpA/sxbyw/X+5RzDHwqNO9RoIrpyq3V6amtE3KER+jpTk4g74N/b39a9JvUAs6khOWU9tIi6DBsIXBRbEBDO73wBe1ht7OFo7qXkHfSL/WyLTwcL+q8l5Zc0KFwac3ux6XKcfFihC36sPEAQwMjt7RADsVXmBd4OtFHwv++0X/+0ow/DsppTkgs77eVf4s0TdZfOWSKK6Kkk/cpP+z5fWm6p+xwdqRVhxvgPoSrc6LyhU5olzKOgDAXcDr6h+LCIESYvO1FQa3l0RvLDl2u1puhQSL3YKPG9xJ8AvhCRGMMcC+Y4m00AAAcDWnCE7sPYTjju/TVE86WHUmhxvzZepgXEZE7IpogH5msY77t+UUXbR+3N3pMB7pG1Vz3Bn0PdFouxISfDdhsOMsluSumGBnDI/lKkl/asuXqjYkgBhs+iSIe/2uggFDOUG1ms8UQNJ4umaR/gswD8ZUjMihSyoXu8MS3Cils1gYHE3CIVLhDQmjhxp+vvjXC//9or+35d/b9u8Q/obtN4YNedMjhvVcchX6ygjgtCoi/FiFWoLquBVoQHEcfatTu9xAfzvW6B+hwoGB8J0MuJUCVpuEWSoGV95Ydymiw5aLSBIMHOjJIQCuAMAAxxcutamg5b7/oYxOWiuzZLe5fLrnlm21nc1abyzMOoOdBw9uMuijr9O92i05Z/7SuSNbp7r16+h7bmDUuxoAuEFf1sGaCu/D9UyX4Oy9uNALzQM8mBaCiD5a8LnAskcfSWmTKjhHqjHqWAv85T646KuAaFODROIKhkGNWuEagznAukDgY/fVNTpnxX2FF/61wV8b/b2F39v6N4ffIXxB+MawIQcxMPakQ6lfKvetOPEC+ER4Ejx3p/Ruys9KrAIWskrls3Ph+PE/JV4qyiDM/lNSs087IaWjjrYqqf50oLc9pTZXf5yJBMcslYUiOu9WJyzHGYajFHn8/antVpC2s9t0L3zXWqEmZ1RjMMChiJYpOZse5ho66mjtkuq4bSQePjWypYyCvldetzai1McLpSJaMxUDtFT4sljDz4mTBvfttIAY6W+KPkr65+UokR1KVWkDamcFS5UbaAy7vSUTV7TjeZKEEobBgI10qppREeI+bxEFcYsaaX5s+Bng64W/Nvze+O+NvgL/DuFv5u8QvoAjDAdg1h0OtJTRACkrGUar/LH/oDr3Vrefn5WqIj4oryisKp99+juuDG9L/hD9LVNRFtkzqiSWd0kLhCpIV5S3OpIV0ZIfjzXthCQdJDgwBIQ1ekTHQS0wGABCwpIo+/QHkOedd/Lg00x3EHEdX6EpHY/Gw/bnFoTTuNj3wqhHZbeFpWqv36jhpqWGbWp9D/pKaTirY253yoy7CnbFN3aMj1IPfSPfIsAFY6L/pOdcm8wbUVrK68uUuriCYaueEx6/EnFbXaAl7c1GDKZdgx0WWDZ8LvAK8Fj4I+C24a+Nvzb43ugr8N8bf3P4m/mbwxfwBjsS78s4GW60098DfRfAFfBB+Nj3H6zykamPQkXfSkGt4t9e/gz9/WFx0FN7p4YK2qK/Ay7QejKsi85TqRKhiL7Lb+KwFSEQ7GocxBWYOQ1pYABAwKT72TXSUOqQTkxw1/22biS7Xa3ylBzkrGFpMiwYACyP3Jb7DnqfvUmUPMzOs53p20hm08aFqn68SrWaJstKjXSiq9PXjs+QycNxzzO8CPerVrEs8zgeNfQ6OpVt9cYJRxXVHhel4txV7o4CyXaicsAwwxLgseK2wSskQvza8Gvjvzb4ClE1TX9z+Gb+grABvwDKZ1OhLwDEeRKfSSex7z94h2+BFEl/R8TRc/x85o22A0lbU3vXu5mwDme8s4PPYb2jNTRM169hsHLEPgkOMR0H4B4NXFHh+P/K/+UHfKFH1+4z5M+p87QzkQoSqiJ6ZNv2qbCrLBfjm5vO5DVEaVRWTb9ePZ1TOjr2XKhUmm65Fqmx7L7ci74d5XOkv5iij2Ch5H6FwgAcxCYzIzOsz5JHrLb1PVyjv1adRXaElunuP2K4bYXBAAUMxwHzQlg4EeLA/HjhtsHXBh/b8nvjjw1/b/w342/mhcMGKIeZyIGVGPCCuAKkVdF67D+IBNt2cifZe8V/0f62SFfQuqW/vaexHk120Xe+9gnpmoot1fRcI8IdWtYMAIEBoyWY4MWwIgZO7tI7Ukfehnsq1EzsYHfRivKOtfKcBvgm6HVkij8cbUkSzPng5IYWvciWt8pFOB/s6ml1sakq7/l1dJmxr1ieHfNDGyHvhkZYKblfVZ7PIRwTqIx4PCHqfC0niuomB+2+Ux2w2nIuiWLhTYbh55rQegkQGFeCV+Dlhc8Nnhs8X/Q38fLCNYS/Gb6BX3FOK196sgoLA/AjvxQ6v06v7uWEjExGHj/WMDj1ipTjbZrotj+FZ76tTiZa69MXlM8TMpJE+ijc2vZFSNJp5XZFgneqjdEba0MgZkJgBgLclcwxBklS4QqGofw2MwV8q9zlxlydvb6SGCHBI8mwHJ/nn0RfVQasAGdrdmN5LS20s13SyGudTEx2/PDQ3XWLa+kv7GQLaGda654AuZVZ6++UVLgI2nR/V+snMDiLMwcmwIhKaUGIMwyvhMuLVwz/z4sowN/MxPyC4q6o+I0r4nOBZzQAD8xu1vpAPXL6eVbUtqrK35IBtDcrEVfbSqHfH/VP0VAvDOnduKuOOcfp/zIJNjqGkLKjIhADISx4kGDKAUlwJKcUVBhKGAbhNPsO6L1KwgagV/4enQcMP/AKJ3RvLDuKemTjWKv/Pyyq+9XF9UE3n0ZuusJgebbdQjgdv/BxKyplAzJ89DWl1T+3dOTKtz+uPa4mmbeOMBXvVZFnKw9tveaIbYIQRxgmhJV4XZCQAJg2gC3q+xTCH62/BHF6hGo/ZhZkBhp8dZ7b+9ZPb12cTXVDJoiGHYPr3ZBaOfKzqME8jVcUEYSQ/LCsSKQKR511n59cWgVsE7y1pBzxOEhFVhpPabm9k2AEiJUigEzHEalwtXKKMHP8easZ8rSMzBvn/bAqmBzMSVJt0jDexDD6XkxwnaULLfqju+Pbd/Jp6HoFpYZ6XZgvScdnFcjGWGr7cAatDpcfxBVxJVwJEIHwMABH9J3VL+W5WHXHhQHtolPnXaLivfVY5RNvYVjv206IYwBxmu6IAZAQ6JsAIgZj4DaiLCoIcUVYCJblMAD7d9T+2aasGrcdOCVbaisx+Jx9914WWsoPMmBFkzwQEFxhsHRzlz/OUeHML/bLIwnGFTlgBGMkhgBMiQFHKsw5ufFOhaPICUEmKpru2K0y6hnU88NyTGC1lJEzFgmWWv3x+Olx4nvuyVurkFEZSMvVrV/1mdLT+WrJT8xWekhciZrow5e2P2oHqgpV/XP09CmYFuwzUhvgcV07XeeRGNC0vYldtc2Nf5yFjVy7/YR/e/oOkUUr6fkCY2AKG78QQq3LxRUiaU4LI0Id0iz6a/qUlQW0+1VikPIN+mbgP8iDHQDlAEi37oY0Im2mSZg3flTMexJ9632CY2KsY5UkSPCKEJgDYmAmgOh71aqjATIjiXLOR+lnDMaqKI1q4ae6WOTVNlgq+zSMOTa3qGaysUn0Pf3Yiwtt9J1zo9O4aYHB2rPtNtQuCqcSfg3KLW+k1D9rBuBqlqjSbqhz7tREfDF846JMNddOmPny9pYPLAwHDMczKeiDKUAIDMTwVXlZx31rCNOqqExJJqqakxMJnP+DJL9HDddKAM4lZofadXcdZ8BZRNlB8Vas0OQ4q4lrEwkmZALAaPNghhqDIcUEA2iZsM45Mf0p+6WJvjChT9ZlGIOj+H6/g6imTvSzz7arf1bqHN4Pcaobgxjs9LPqjGMtjnI6V/asmHr77GebaFYz0au36uDr4K4MU/JuUjWIwdZblxgMorfVMp8Q8qYOHDAE/lzhFfAVALbj8r0wZp1EtQGw0432t57cqjEQtDW8SQaZ9I0N7XItF/TIekdxvJKpnm09j+78rGWHruOX1H0aen7Xhb+cSMpBuxe0gcF1Cg6BxLOvckLHO1DD+XrUfBdXsgf3MBjKnGJdGbf1nuOd05UYGvJbvuXGogFQDbYShi1pNcNdm3EqdgceW8pnqLpNOfOw0D8vvZ12HLlR/Vj5mvyAjHhXOSpDRx1dhxoTEMOywJMhMH4G2JKVvZyU0pIIowG4VUtkGbe2Wp7P6gYM6g2ebvonxXhHOwBPqQ7OWFttXXGLwQqmTlqCew0Vrlg5Hmm/FgmZEFeCwLAhrMAvLjA4WoIh/QDQaNw8bl2fry/V4CuB5dl8U4MJMQwMBgB1RhsPaIFrfHfIkg32Sxzb41aUH+rVIc2CTMKwSoUdaXF3hOVDT3U8As9W3hKz/LJHAEv9c7xsfPJ5n+Xvhw2Kqi5diq8AyTDsPBDC5JO1LPhk3gK+AsQYkHjhUUxY5fOLzGHZR4WNg6JKf1ukTIULpXSBvu8A1x94ocY66Wd3Q8qiWoKVYmIFZzFpMO6tYLR8XAKgu0PL5ghh2Q3DS4xQ2t2vADEwQHLL4pBm1QzDoHKUHxMfKsaABEosUdTCwzsQt3tGQTld5N/SKjkKnzapco7oMr740HviNnp6xkhrlKJmE4N90Wz5IxqXvlq7Z0720bf+EyHFuqzJ06fWP4/0qZKw76pr2YbH5YctwU43wIBhMJ7PyF2v6WvExxIV0fBkfIVj8FRW+Vaqx+vz2tb0q6HvkOAYQh81n1Z2lbUN6YC9MeMCsJoD66JY3DS30pJgVTU95Q5d12DsUSimuiMz5YuBkAnidBcxGKLDAgnc3VXTsB8ZSvB7u1wDElDtrJXvi1GH7pws1yK5AzYMw5h/UJdRed/RJJrunRkqeRv0tpVors4TCbM0BcaUnXscib0yAw8kqaARcI3EV0SaDorMDj3ldSUTNehlbv2S+yFDrlgKaksprU6M8ncAIDwU0c8NmPnv4zFm/+fk/FyQHCMsu6K/arugjYy9jEl/r2u/r0t3wNTla0fCte5ud4TlAjKvx8iFVockPe2YaSdJ8FRVVZcIiz0KAwPU64DCJByKiVK12/2AXEH68SyJs60oW9Jq3kPVXF/FUu9VTdCp1NZYDy8WUFofA5tW9IFcrA/LSmbfiJHKw+/AOb9Cqwn9CHG0/uIKaZZv9z5qezASPDAYpzQ4jV4ETnWqPE3NVcoxOB9W/lmRhBAdiuiof5Y6wrgekuG/EzkNGyjtmX47nf/jGKxmcfbKq05YsZbarV+jv7csAP8hahwAyCQYoLAE78s9piMpBwtFNAgKUmAwiGm3yc7xE7fT+4RPMrkmHqaqahDAVCoMLfQ2c/2IodfySfa7dJfMoq+DF+bkKTC49Y4G+438nALmulDUQiNEA3D8+mBYN1gFI0E55UktdPZ3HUc+Czj1G+kZbq2qzlHhEzPqkWGp0RuvC3CIqmZ4EdBuP0bE6PxcrYpk693nOXKDKv11pPVensLgc17QeSMQ5yx03vuMDbh4yiX9bY+3Us0ZyvYPZzHK970yrzIcsKUlmGQ8EkiykXTRqd10BOTzKCnL+GudckEauaqooYuUPePIFVqvUGFQzJMqIXY6OQi992bGMFu/Q2YVOvXlPvqecmj/mRUkAADhnvIQkwF4Tv/sYrB/iVNgVk5zjLt8xxyVRcGpBFBFd+hdiYfrws8VA8MrMDIAZPQ9DMBT0NuVBjIxhFEMnhIHnrsY7OOU9cBliK94StV2hI162YfxWz5KCzV993rL3GtVOILNkQQXpugjHgkZGFgqnAufLFbMwAUNGn5U5yb0CWXmeA2NExDMYIM6A+iJFaMYUa3+A6mCWOqzF0B38JQi4x12i03HkRtm+OsM+Mf0N9EAjDkX7Hg/5LzZYnA+qJJgcMHjfeo6a4ifQLKRWfQgKtred5Uiel1g2+CVgn0xMxzC8pHegcH2J9FB3xsd3WVV1+NBfXIs5KbFxT2qacd7bdgEF0sOvhUiiJ4FRqN7RmhEQkDIk0IOVURKbaW5j44nitUOXST+/ZTs09nxzylWiOxn0+0uIsoK1XYRNbQgLgJqe1ucFQWqa9FoorwcoP9wpsVF8alWzCWRdXZQusr5rrH2DvH0FrsvdLI4DoqcNwdnpO4lVj0cin+zUl3V1nBiRiXDW00OO90QLpx4DhhGWBZ8rrAuR9qNFH00YDW3VKR/Vt6Z1Rlg2B1KFBjfjOFWwy01WTjOVzWWJ0s9op7N7tD7iERCjq5YG0dFtDh76KIPqtE1A78bg6/oS50CpQ0SWnY7xiD1nBJHK2wppfVWeps03MZru/KGhpTn3HsL/0zpu8tBWtSKHDjz5KYltVPRR4XB2IZev90ROW1jGNkuorrT5OOyj5Xu3JsV0euSQpIA4LWlUyn/RtON64riE9FHPyBcBbdMahqtUbSPtFNxwO9eR5yQKbWzg8RSYRengHBYpDhuWbhn4MC4YxcVsUnx0gKJNT3TeyfNdpB0m3NJT7100FyBxuefrgPRuTTUnWDccbEutByjXBnthqbtl5VYGKycbS8fj1Yy5OJwHUFfefbQP6taQV8cLFQhuSrv04xBHfiInBig7XYRWZw5mQRhQJrAYGJASgk6xCmULyUYFvoRPP5HAa0j/kuvxDHjanIhEcf4J+G96bGMHIOCdr6t1JxQsAhzSL09Qy6wzwJIwJQOxvHHwDUGh2Osl7k49jovWxZGxHSgHRZLA6n6l12xWPdh2HEXOge91amgHRy8ticT3VCP+6b3BoOjeM5i88mzfkJiTyh9MocpB8TXOmvqqzycuw6xs9g5W88PxH0oOwWVQyH3ocJgs0JM7tDbhuvCABUMK815uwTab/B9U+E/TbQx8LOZsGYVL235Csu7I1uu/rpSOSnscMscM0IjphTQDAw7Fd4xWHa2hGFokRiGfbJcYqTKaZ7ddcIqUJPmeqVeosKw7581nf0Rekr1u+VMN5zyDtPVxoaPxEryLAOGT6tIu1KY7esf8b93rAyUuBQjHmmqzhOSqfwUBrcdm9XxKlriXh/qeKQ9QXTS/AFEjxmyH+w5w4Hsrbi87/98L4e+Ph7i4x0LQIpyBwC3pPuEjEBptQSbiDiaKaxrofeNGcJe24GpRWwSQA3DIHBCTH+j3ZkrPGyUnWjOUjt3FweOD5GNK6ZhuJQO9F6zuapiTb+XzOpd0ZhuF4bBXod1I8FkDdy+9LtkMEVrcDMYy2KpWi36VpJgq7a7SLAj42RgCn1Hqq2DO8Z2cc0kmAMExukVt9ufd1/1pkQceZzU8bflk6z0z2ykCZsAYMeYXDUJgFNIXMTjNhrpkU9/RI89OPorV6wIn8cGhXH7PHZ00U0kUuHTMZ4eYTABZNP/8s/x9zCIIhXXBw0ARkZ+D1ccs6UHvXdZZO+69pZJQKPCoMIw6G+8Kj8SCZYvvBeDBxYr0sOW6x+ggZOUKpik296NcSyg3Z4Cpe5ENGJP1ctoK4+qEhWDnc7AToKRgMIxyG633Y7QXx9Q/Rc93uHB8eCg7wz9hf5uSPOkdhR9T9tFfMdsf3EnB1xZpjADZ5qQGLAMCGaEiLuGLroEKtAIcW5kXAYLN8RxogmruXKm1jD4XEMDuOLcdfHFaa3/YdOS0frJ9wjm4wILiUEv3+ZRGSTEb5UcAaxICEDLPc3k5AHnIGTW6dxppfW1tgqPoO9oExoGQzk1tQ8n8WCCV2n78/s5KMZXOoS+uQ/jn/pbcnpcsu6v9+heTiifzTWgTYIrG20qb3j0+a5YxZq63Zth36MwbohEiACJ/hJG8EXAjMG5WgQGmYJDwDBUhNh5AmB8ZgNvehzUqyneg7rWkt0uL07LCL2r+mND79WB3L28e7N2DefWW/qKqmdKd8q3RNb3Qj9BgmPi7pG9GUwJu/J5UPNjfeyeR3TjNDQ4E1pP4dzgm1oKnDSvlvn2AY4JVg4di1gH2O3lJUJLu2/9g870di8/tPmgX/lbE0E7seNV/o2sf7ZlTSUKR/xQ/2jbMCKOC/o7uzQ4wYnP02g3l5ZajJIr1n6OMwYnErybh2srKVis8eQANWXmUVxKJCkUyPdYBw3/LHAigMk4PtXo6UtCc8SV03mn9TxWJ2C4R509nf8MBudtM5jx2NiqgrYRA/Duz8KBjjRMXbFWrtIMrJ6dknvWngMyCCS1V1TTMZVyZOLbvtTCr4rSvMcXlAf/QTJ+j5bdtz0ly5Sp2STulh5cTkttpQAYQl/5fOOQ9ZUS6p8nG9qXe3hszJBP7luFxx9H+VqfRmnSNPVstwjhxL/xeoQcQHL7XVg6WzVT1Sz6kvbvisxU8q69sKYU7ye09FZK7ftevZWwjANDgELnOSjdNWhWtMI1p1H440aOXdqZcCgGtzS7VveiaphRhPz+3wHDzqaTgXuo53pd5R8q5IWQi9lOWKoxuQX5kY0z/YE+abVWpFrfXSBlhRa6rD/HIyXiCwCR9coCggdDSzVs5e25b/nSembWmLQ/kJYH38YHGh6slxGiP7d/xsT4E6I9sanh37qgq6HYtf1YNT+nGpSvWH1NpjNdiHYchsD7dni7Ucey4DpTR2Tf4841U5/i+6K1oty4rV6r7RtPRJhJyHYT9Ob76j7w7hM4vQyq1MVd7YhUw5iYqCuG7T7wTO/t9J615vmaXbqWH1D1WNK658nxRwgxa88uBQ8ujudLQB5UuN2M4Eyq54ttyTtSefA9fMCphJSnV5+9Tm3/YXLnToL28HCvko6KdeGpETUxQrIB+BWAuU6SI1ksNPOdM/nI+bGYQy885dlxfwVQ751XoTTWDtq/nT+hB8++q+/tdzclg2y4QrcWfeXvjFz2re0MuLYBnMW8tqXZqv45eS4jHZDQm9JSHtZfAABqHLIyLxHLpcIw/LZbPPER+VIvfjK5N+zByiWz0uvebOjRH5Tryuf+XkZTJFgzHvdbHMvOPRon3YZ0k+wbc0BkMfGFAIgFCQYNRKVMmPHuoJjqF1Wbw0rHovFZ7l4SfFRbukZXftH1hQQQklNqLoPlHam/nWJ3yZTS4saHaUKvbf1tC//o7OU4jw1eda8UXHbEahL3RMKD+FYm0t0YfPDgfJwOPR5Aj6upBkvn3ztFs796PBgmWdGJzoie3Hz72SQ5spnSSG139aojs/bd5lRNbNoWNdQcYcP5iNWE2s/dBrxjsCTBg1OHLCY9RlsSXNGvd0w4b3XKPdeinLssHozieNyVmZrXZlmF/7iR+ERg7dSqSP/djDroQPKbU1F2b6kyAPv24HfoKKgObVLMwHnRHeORuDkVPaLjwRSm1LCN3UEa2r0Nrt6CdjDYp9SSPSkWxyoPLqtyAGB2ilOquntCs0DODAdqyuQC73C56ndj1hjcU8NoudsqTW+5pgSFF8meqJXogyQAhGQDhi3gKwAhLJRIsGUGfqu0lr+Ja1V+PG5/lQxyJvr28La1GzpmtpIHZ8EyrUJKT1Sir2OnO7USxxCKSKQTtDXbd9X0Z06FhZdyANjvzgkIOv5k5Xg5ZuKtyRtcL43mQe8qOEt/p0SOaWxCh2drSPVgEb0OAIH3PUoF6OY/UwE40nrkF904Z43gUDuAR9FrfMR2S6oa5haDQYFhVQZvf8TBSnGgHXvbszDZLT9RYeXWMd8N/R5nTRukLJg0p6r0w0wOumunLbxv0Vdj2M2RwPAKEOjQRVca1JGoPD0qaTIRdIUrI29tpObrm7FOBSn5MCwZhXWD6fbtU7AT5SjoLh3+rPgY7PypHm+fWG92yxhcMuBTAwJ92vrDMoe4bjRwS4IBDhIMUIAuQGEMhuScdajOqMBgcKY897Pyz47mr+gVLqSZqVsMBivNda8/E7ffVZOK4+NpRn5Uumq6saFrwnCDwR0vXa18lGkkbgjxOWEGCICBIeYeDoUWOmaFANC+zfZTBYHBli906O0Z7By5Pt11MfhG3bXu5RuKAmpnDhKc6ymvSj/sb2yKuCuXN4VPwPmNWhMfetWze6RuW/isCro7+EbCk1rx00z+gKi7E1aJtwCSN5Y2BaRtQ3aHrKKqjMEAFQzfJbMxFFk6YFxS2wKDAXQYlhdKMWb8TtPykoHv6Dzi3ji9qtWOF+t1wHzgw5jqVW4jd98vTJWG/qqhAVF4h14MwCkYKcQIJ4QGLFsv6J/hWxYhnnIusWDvZ8zG9eZyZRbCahIW6fH7ohiJqfhTFitPdTJhqTWcEKlhHi9fHGG9gObRnNG3guE1VWQNWVWv7ZdsC1uf8gjhvh5pN4LlHgneJ7ky3GjPK7krondjsIwMzhgMmQrn+XFEneXMI3evTqxhbG28U6ujoYDh43KVpcFRjymqKVF9HqfXav403VKlc3IaDMYUnprTsv5sZ78kH4NhbDuH+qxyyjgeAAA4MG67IpoY4n5jmbCmGjRFgoTnlhMPmhWHNMlNzoBZ+YGtgfq1GXvNtd7RhADNnrBOtSdO/aT4ccBVmeOIAb1OGUPmGXD3I/bczP6Qjto3bFTLvZYES0vwfhUCcJrqagw+/ow1VDAs2eTF2bkV5wmfQhRFUezDcFmzZ6MdnkDqSnw94fgTmJI/NV8M3JqOwTBmEnYtx534IlUp7aaZdJyfjyNhj3rK+uewB18G5vgpNSn9nEb3Xro7BF8RqxKLvV1fL96rmlYV0dLyHcSGSGEzO5APKvHBpUO1LXP015epp2RmKp3EuypJc+uoVd5CA8A3RkqBi9bWjbVa6HMeVSekrL92h6Z6rqqNwULVXPyJkDdsqJ2zrnF7/RZOl7SfraLGtGC4V/PINoumAnmQkfzfKrZ6fFwdfUKd1L1qZJNNPe8VKX8mSWZgBqmFJgJAjjuBFk6qtY7qBrmxNtX2PzWV3dUZdT3X7n+jXhUIYNM7P9u9WF7qn8evkjKuf1bt0F0/Z1VO6HqrCwUMCwCuIoXPQZ3l+Xy6wq74jlRTlRy1lSFJIFhGzSNzyK9IUVkZgyFpznSNNNgwPDK6boRw9Ssqn6oeRiVgOIqSvsOodshY+05MHdK9v7nF6baMGXxcHT1bZhy5++7iFvqqEhvlRH+T4wXtGAxQU2EQD+fisLlYw0gkLvyUUlC9kXbOLGc/Y4s5t2ZCxfpbFDAei++odbti//Adc4v5yueun5Nh941l4qMcW0HYqwMsAtvH0DcEpUJriDhH7k0QM2KuaFo8UnPIwiQScRjVYnGJ8W+o85PlZ4W0LRl6CTGwm/xSq/ZMmVPS6Zso8zNyY1tDET5/TgZvk3cDcPwnVNDHbJNOQTOfzELaOyDwdheqEaOj3pNrX9C5nR7US27/kP3BNBgiGwdPjhuWBVrtsc99jWySWBHa8lWWi0c1oKiF1RZuK/HRtyvtO56an6R+Y0RUbw6lD8IoIv5hxm+ZF1r+mQ/W7nt3j0gLv68A/FGJAsNK65oMIXH7z5YRBHWufVPhn5a7xs/buH4lUw8zTSHC+rt7VOx/pmKCbdRZffQpTxc5S/7B5PPjcjGGWK+z96Bybqz/Cww9Vm4NlR86BLKC3hIcuzsE9gLRHFJrNHkVfbviUdWGiHWvGlz/tiRY4gSWmCHaUjC4MIaNjuOY4VL+G+hzD2hPoHL57SkQ6FZynepVNdyQVOsNl5yTMw1pw+AdQc+z7tNqospC2pttjwgMLnhwg8EFDAMcZy+y23frh6fqbwvfhcHdWOT0r9E2v8kvbERmv5Yrr9KhoyrYeZpnQbUDQ+A1/V3ZUWDo7SoNOBmvOkjf81J+h6g1k+0OrfYqekQHiG7RelRSdUnplgVyIV+KA7T5lHXtSbHmwaJM5a0Wu9FUYnnPziBoNy3J1NZ75+R6KNx4QzC7qtA+kGZXQZvmvvO+BnOq6EeS9Vf8TscDEAEzoPDb2L+Cwjkru4bk/87uNgh32JJvlLYz9/rMZvEyXPbMwP+XiYq+A4kdlfxUBfqmF6c5YalSpZa+EhZ8l3SdxQZ90sYHUNofqfHUB/FtxI2BW2/quqoDg6GB4Snt9LuQOIuKpo0LiQnDWbS38TP88nord2HwSE+mU2do7jwn02VclhOZVUwJDICHH1bYE1JWqFPCMLZfZdBCgX2RUP0OF2twJy6AYxnRXjWOwW+CRtqDkf6gjKyl2nEy/kAs9tj1LJZpNwZA0IgDtryrlfYEHrOGiCf8I/xL1EoGlwWDopLgeDBjcEwKI0F32x2bJesFhtBENEUpMRguW4VnL58DbAuGfTfptoYs/wmGNik/xoNPtti+ixEMdsjxWE8mFjcDrh2q/pwDH4fzR8dcXLDDZKLCUPJgaOZfM+7TyiAtHuV1YBtBX+faFoOhQeshK1Xl9jysbnyrYjJKN9T7nD7DF/W+WvR1kdWz+8Y+556HvK+FxSalmruk0l3zctG5yoXsLrmlzoFKdGyrjL7CHRoLk7Cooapn9kPG8t81ae3KfQhXlIeKR8aQrfd9nttvk9OeXzD4TC5eNeAdo9R24S1cQd8REa7O1b7oeU7kYpVROWe1akDdaKct370cBu/EHt8X1+nAuz1sbil/r1zJCqWXb9hj5paZXo4od6uxKglq/m8IALDWq6fTb1Glv++TK4EHjrQrIHWLJG6WjSQtUobpNx+RfW54sCLWIM/H73sALQbXRHlMIw2DqtSRedkdkl276Uml6ECjquSe3Khdn1NKiyFX7BcpPv9Zsa5ytiAsuzTZXojBvvsADw3EVopouRmkoMKYh6X1wxfn9ny9tK/8nHWnUsn6LcbpKzNn27rOUug4JX93s3D8sFdXJar38QD01rzU0l3vYqeidL9UM/xXbfsWgBzXx51oznezcg4K9D200G35VtEnEWt32FJk8BN7AxIfdetuaBpEGYqpagabzzux/3BRdnRjpcHv2vUjc+StVu3RJB62hlD/jMYU0ZZ01hxT0Ue8Z/Wq9IuBUz6sSo2cmiiRGHGHcD42b4D9oxMKwMZXqykMzSPGElnBAB7VUK0Wu0tuzIswO4Xe1DSfqKeriJ71Uxsxv4oyapINvSpQlk1rcc7r1oR3dFGsu2qons6bPLauSGsJFsf337uFmNIti6yT8fMu81PKq6LcosuxsLytfOYT0121x6zCWm17F84h8Wlj5Jk4H6/Ff4KYLuX7i2hJsC4F2p3swJQMXRharaDmO1mp8bKPtMRgKPFSIm5Rv1ZYSkuLrZLdqX9wrtMTL7vzxUVyXGn1ztXwz5H2Ocv0k9aGVKqrc1nSs8N6wHzskmnsB2z5elmBTX4Gauegc/y6XHSb0dSqe83Cv6X4nhstdDmUFTb5DneGkcGvlnE7YsKwSu6hr4oYUeQqcgIULy7QByj41ZodGW5UScpdYfCtMoSgWplpzI4DifmYuUDTwUoYFhi8nxKRrBVtkiQYSuB0YHjkNt4UJtTKucSNpbdpfQrGZqfBG2z1z1VbTp33u6AH/XeWHvqa0NtqhV0M2gH4BAqOwMYsb74oflbxSwu6huxWx7MWOkBNgo9vW3hEVx2D3vN0huCN+F01oq49lP7HbjRFh5FYa0X0Qteyit/W4JqJ+alkyJFbldMXWjLJTQsYPjfmm0VVBe1X1OxvDzzLmLers4vYJIsEOxgM5QCuVNPdaCXdgnttPqxaeSvG68a4cicDsJBVgK5S8z/VCXMgzLcQp4ziwrz7/VFywpo25g/5P98uU1w2D5pq5jhj3hAuLdUehQBqSFK6Sg1AIogHxW1pKrXZHkq5HY+b+iY00unUpR7OgfFwVSMlzyhQ3jqrOJU3kOkEDvXvS3ub09jZlO/WcH8aL5UEx+kPqYPBoIGEaiGuYDjKgfo/OGGen0Z6WRHb8rf34Z8sFv11XOoGpg9h7cjXjOufex3VzpYbfJ4Q6VDXLXa3mIE6ewRI2p4hHdS6VB58Q0ZoO7ty+29EjFJGaNZMD+flisXx7VFAf1Zo/1cf//HZ0OjJpSeZJ5VMIMBeyckgJUgOXMfl+Xeb2b6t1gkEqqZNq0zXt6b6V0l3T9x79wY+V1t3jE3pn6cqv/377NHfS/yz9pFmcs5dgkkYIDojo9OXrvEgivqSZiemEStLPk5lTHA+ZWDwYHrn6XTQXRnE49sxWG197I6mQPFK5K6s5OflfKP7hbKGk8xy4lXaK4CmM4pMzWkFQBpzyBF8mc5yYCUtgYPBLQw7bjXg0p12LnVm19OmqO7ZQckwTDY6qIICZQFm9c+HC/QfVE1bpt/99psoo1P0V1RiZcJ6g8/GCZEaY0dxNmL0zZcXFtwzFjIlMRYUViUEOOKRdp0zV41qLqmzmKqWv5SQ0rFGO7FSSj1n/ZXkHbk30nWlvhc1b0+D9ZaclFms53/6vfht9eT9dl/NqQJ2+yszBIDsEV0oovcE0a0uOhcoGsrGLKFzrtTRYN/wtP2rt+i/uGnuOammzW7HZLEq/He/1kTfH6O/J3avnzL9GpevRbkx0L5ZLOcFR6jMoFafNRymqstvlDaXG+5bAlcjtfjzDbPhDIqbUO3DcHnY3Ezi+t1VjjOG/Bg9tazCg25cP+C4NLUvxdX1xBvs7h0JzFi6U1lDvZ1SdgwG2I3BkKcRA4Oh/GBVJLYClm58947cwhRN9jJZeUV/c+Unqqou91u8RcaAzzP9jiQsSyqW/b9EEIKdiGNEbkTleV+wJPkrGsfUC35YyvEqyCNagi0SXPU2o5TjwfQ26ezicH2ZctdNOQuCH5dZN66flBZWr7piQfMS/yG3337y7eyRARUOpOQ47LsYHC+RyOp4V0kYVnsyuNiZemTvDtS5Ij79rQ7+QeWzv8EuGOjbhV7dIUBvq01FaRhUqn78M2bDk3JaC70zXS0URzC+lgRLOthi8FHJ/mNwZrwJsPVcV2A8nIYEezVAeYNXOny73uL/8zLBg38Kd3MmLHEIIMT9GMoog2rJbim6MuslipHBnJee9SccZ7YMzCUMV9Xm39DopZ1R6sNkdqiuut2Vi+g7uMANfE++4T8bfTS4RR6Moe9gVfLgMf/HMCSrZ+Ps1norszPmCV30iBxq4TvseJXe2ybBLcZ7KFW3MtiZ8s8LN+dvS3ybTK0wlMs1lnOXjGm8/+OlXLdJM/N5k3NZ1UkJA8OeGTIMt9NLrSWucuOUkcHQ5qEsqTAIGE71u/haFS5OufSmqkRy8fZ2zslIMtgfXuC29NcoYMpgLpRzCYyHcm5oxwfXKPuQW+ujTtU/I631Jcs4gs7GtM2LkSH5gN4qKYdHgu8Si2u2o9RUS44l6polwa1cROKjngGnrXY9NFLnfyYMH59IM978fFiFy/TAdG0VPi/dekJOLQk1suYCRYWSATOA5pDVElzYqTDsbPiorXwQIzrqVM/AKM/oPouyNzIW9WP3P4T+1zQ8Ps5ZnX3pb5c0pvFtC3fzZ7XF4uVli3OZsG5IwTGIOqNMsedvdZeMVC5dsaI032eTIPptffYHp22dvYjBMEuj+9PueFW3Rmj8WUJ8oXXP7hsxeNI23yZCeZ99dzRcqkDWRqhhyemH6hQNBRUu6pGzZ2O/PCbDxhXZXwvqdyQ3d5rf6fYu6c5IE4vs8ma17Y/O098RuYa+ZsRRdVwBXa2k8dwuOGENLevuBpj7oonmpFjeDmihU2eRg9grrQpJajFYHXZvXFXE+uvDExgMJgzDXQrtP+GeVnbAxsIf8HwZb0J0L30labzF39reDEPVFn+dxIWuraQqQKg3FBjSduauB1ZrJz4+tAaDyfB8rtXXe6ex8XyWNmB/kHSUNFoQlGoGbsJ47pT2Yw/hMACrorpA+03UR+6+o66DlXJqnvjOWmt25+d8v9e8oMfl3VRVATNXCz2L31qZjisWNGrP/U8Fg/2m2wpvFA3hFC5rLe3t+OBuQNQZlXWWWyDZghO18h+A2yuiOxa5GDxULQAMP+1Z34VQ/KnTXxSFY0nmnYgba6MKdOWfsE+CLQYD1PgHzUQBJRLXH3jzmFRIhmYsSZPzPzZDcivqF/HH++9nRDFP3YS+k2bc4e0Ix+X99te9ITca+L1N90jw/uVjfE0lCYYWg0fbdTFgXPGlrNOHqbB6uQ3DjvgI3YHnt37md5mof14yypaK6BaDo1RInBOcN9XuP7rkda636sHUOSR14XUsWHk36zhl6j8rWpntyhUHBahhGEBH4pYQFz1pzMa5G9ote/IPX/9dkRsx+wTxBRss1avOAbwhPQYs900EYCIs9C0NCPlV3UKCWxuX3ARJrbl78AQJJrFJsDyr7s2ghSSdxGC/h+MlT2OwbKg6NdL+8I1WE+vbPbRV8XWnf3qtX4h0uQKAwIciGkoeDEIZo70yebAG47O3XDXUYnwqQAgt9B7wnzdSPcKV8sDAFnRbFbQ8WG0bLDEYNNiuKm+lq81yNnh4K75O+bjPfmhOCo72940yrv4dI75R5nyerforDm0pSEZV0Cc2yuhi7S0Y7CNr2+0KvG9XREcSnDEYYCfBFTPuYfAPGIMtjSUMqKOrSqb6Zn2MvQq6eDyYmHMkU9hQTPM/TeqMGXhgMJS6aLBVoPJPALC5ciWzuab18hl9xZ+KxBmN96XtLiYh1ivZFdHjGAygQLKzG5J1F20+y6rO4nLbADwl92ZStaROD+J11fTA+jHx0VdVPjsuV2q11kyyp4KZtgEXJDiK7hilYXBLmtsutlODCo01lJY7Abct5iN+Tum2P04ZXxEtbiQpokmuxOPdCXjrThx3EeVunQYVhm72SriwSlDv3q5scJ49t2XFOT+y2bacyger0jOAQrat6hgM7TRi0ZcGiU3meq/s+2cgGVt0SBZb9kPgaI8EaxgMR6osqNXRoGmkMwyP5H+uJzeDECsPRLW2vuHRd1S1Y5+D1TF/e4b2lLMnqPq0b1xk3DvT2rUNA7AKNi3FrDAYoMa/UqedTnntYl3moJsEHGoImcXgFoGUFWtHp10TWcTDG6utXFtSjAbRzuLcCJMepsIwEmjUUcHNarf2H39C/QxjSHx6W6rr+1mZWgHyMBi0yCJd2oF6usv+UNzP9r2sK0DNUiqlFXV09aeKwQAmFYbm66u6MRKhVdWTDpbOz39QqkGesxY7n63UP1f4KhcWzWNR0k+q6Ktd63XmAgb3lc+3AHzZyowSYH9SheqA0kr1eEzVolV9vnR248mq/vb1y4bUoZAvtDrstygPipqxbVReG3cPbGevYojOfH4jHTbXocZqWrlHfYCc3xKRtD2JRyrBMQPzCRmrudoI8uZNIW+Soj+lIlfoddurlH+HDL4gWXj8FZdnU7uUTxmvJuNBBQwtPa8RRfszTohc1Cn2D7bViWo34n7q4/kOLRmZ6z3T5rWdZNsm2v4MjgpNuTKBvv9xMqJ/FjKpgvYVyxWzzD2oMlRUFBnqRcFYTzTi25Jdix93K1QLgPZM2wTRhBCgDklqFdGw+2RBYxK+IiMfRvdmi8IKFc5yTw7LEa0DlNMxG8dbaWtqy99Ftc/BMV9eXohui3FYvLiCB4P3TqFV/jmql5uWIFYqLn2JI78g2QeZbLJLgo8/BQ/ezyrq6LY5q0IoEauFE5/vXmHDp79Elf62BQbV5ldM16fR148LKg6OWX+nvKN9MTdjAM2AUQYLt6eSJVjFYABFwVupo6t7mGLD1Qogt+hjcHscNJ8s6D3c6rMvb1/H4KrC6BFtqHwrPJs2Rk6RFWsyVS2Lnfn6aPem/Bs9Zfv4BDU1lZ1G4iswdB3CYg3HuC4xeH/XxXh3vrm9jGKDuwC3ToZLlaBr8UWiwCBKWdAbSgtx6lOBwf3Ku/KfwuR05XPPHtxqOlVl5D/tIYz7XkkZ2eJ3viclA5aTso6+B5gdGBz7JCFWurxXVHhveJT0KH3QTMvSHqxiLcAQBsPA1C9rcDC4rTCRYLks0JloO/UMZac6oRBW71HHZgDowHCUS9G9amei3DjuR6S6iREa/ccFhzAYugrO8l0PXdLtmqb6Vl2XkHb9M+QJvdGrD0hNgivpGnGKP0k51f6I0t5Sp63GNYmwriSjndqTwbacSCT5yRdoVObAUmmc9azanozon8+h9bnwX6e8lXPjLsX+fsvzmbAs5lRR4SjSQRoa5XPrwNUVWYOKstVZq88jOli/TBeDW0X0gfoxWqnN3tVR9g5liJwSB4NBG23DMGyJz+m9s2aXjGFzL2DfArcjqstZqfUrGgbn1g/NygCgNjA87o46KPW1tzKlIk9WS3ZVLfTgWbfV4k8dI/8oIxzM31Rz4oGvSb1ZLQW0fuSczKbFcOivir6y/u5D6NpSm+99ra22M6KEJIGNwaBR0tSt4Rm94uhtQ3UTBgmWp7qBSTCgl7Z4cPSIDluuLZFgKx9gT9nr9eGcOChuwrP4fRaMfaJsmpandOxSnHvslpmV05083ZbsOTZ8PUOpKOwDZ6GpLqnwT8tIo1PrGFMp3bMEV8HBaqPdfRQszSKMDYZ35KocHPZTfOkWBX5XptQyGsp42wvV+/NqWvpi0eaWV4+cYcAjk/Vb00MO0r6pRF1dRjCuCNX91IoPXs8FPcaDzY4Z0uLcBLmEgad92OT8jpyRO3cpHpkRLqq7f2bScZo+VCwJg4sHmMeV87p3KdTOXbXHZb1I1e7+R6+0hMD82/LDyuLoITIGAyiBSWq+aPUblzVXp8Zjf7tS3b4UtTPq4uCHLTutzH414+FexZEe+sZLrIgjWWGloh8R28bcAHD346TCzlompxzAYLV+VSPtSIfmzpNgGLZ0Ffdo/RZZKqUiulJZVyRYPhkDg6fcpC2KaVbi63j7Kw/71FXz4X0wPChTtPgm3L0hj0cxng17MJQwrIrQVEMFw450l2J+gbL+PxbflSlmQ4XrfNFWDyucs0ykaoFbgNnq2KwGIwVoBQh8uGWp9trKrJvLSP3zyAsdVcvMaJ4NWBlF37HaPHEvKTdjmLLCjsu1+GhPHHW3PFtZWx2CPgLDIxicKmuMcPlU5Y3VVn4Ng08qeMGG2yv25vazam+tV+b0jHx/iNSwvBVFdHu5fH3xvEqFwWe0xRiY/nxHoDo0R2Tr+58TD/DKox67Vg9t8jXS1/ugqL6FFlo3cxqMfCSNYjU3FszvhhVw8sCyHLUsBf7ptM8jamfnvk6j75j+GRIABz62nvafskYldUvwD4sJY4Yi2vfh6sJwF4OlMRjAC0li9jAYztDH8TSNndSSql7rHoXwWJl7tJoT0+JFtP4jvE3fO9KnwmC/gsP6qxuMr0+SR+thbCSMiIZ/elrKcaRsopKK/YOdPshn5Dc3NWBUDHYK+/UPAk+kv/5Eof4YIdztw5Ea5jkTb1/t7OFUS3+76Ks+4UnMvns/4BGeNKagPynWcl2aaloMVjs2vvJ3MbgumasvcgfaavMGhu/J13G6qouG0rm2YkNvbyfLkMl84KqTMotG5aiAakRBjwo7fdBgeG9Iv+6MnqtWO09eDga8ibSUQ9deJK9BNUULIJEzdWUDtpqOEB72fJn5EonBrbSo3ILE4GdbBSA58g6n7is7Hc3hbnn5IPq2igEV+Np288ospBRM1wDYJ8ESgTrpl9/gtOWQXYcfQ/lwfQdpEzKL+gsSHN2hj+rx2C24ar2ufxSDZ8HgjDpano3yVjCesR3qcmFw3QCu73Mebp5MrZd2YVhKE+FWVKuMBG0ddkPEcGEQxfRfJ+Vn7+3orlhnEfePWaZ9kJuCQPVpHL9bntfQXyKgJlK56kZlHv5Dz62jms331Sk2oLI+OwGuScNAy5VaChkhwcr3PLvjoY2v7e5M4xjc9t+ZVIYxuL7xLKnDwhvLUm7DEAbXE0RXzVheaMLw4Av1xWNdY6z6NJKNX3gJPC5cq1fozpVq0yUS+zAsRc9lpsKw1cn9qnMwLJNhIWHq5+2P1ILegsWaCt5OhkvtgnuCtyq7bz2NXItNqj7wrHNmdxuGeGGFr+7TUPYfvAjMtyhTLc/n0xUOXhgCHAM8cIPzYartcoeG/D7Epgt1lvfm6c+OIdlKdaTdaKEtU3XSTzGjZKlXb6FZvwMAUdpVDbG9x/Q9o3ZhTyq4HUXfeEo76/EMmknK71fS/rPK/LyQ/a97ydWmBzYw6JYpe1JuzyB3MSn/lVJvMjF4d/VonIAev2TqjF9bd7R0iYtKBMtNGpQC8XgQmx9YoOWDWarnDeaW8e8ok92MvhX9dVZgDv2ty79PI/QGOd3bsQsbFXRysg/Fn5b8cd8rR5Qck92tGowIpU61tupY/E6K6K1dvWYPGS01h0uCQfDgM8oxzcupYxW2Ov8mGdRy+/d+VyenvsQfWz2oyoOSuZa+V5pZIXe2UP1UPvxGB+QQar6aETYsuK+2oLz9SYZL3stehku1Tj361p9XGQgLM3B1iUqC4Rq2yTkfNEoWJeqfp+THvoWyw6P655k6T3fmkDKOeVXCiv/JsNoVFThHdNHvaPc4mzerUMZikTaLp3tlZBsYu/gEBheXu0bie+X0Zzx14ZV7+SOUvWrdWK55LlpSSgX1UAT2gKOcsw2DXlvu4UHfvfoBPBytfaEHLhmSNkCo2x9V0X12KXCbSO5e+V5lZl91DOnQP/8RuTjntNZfNfbX8nzu168NcW3crwCAwa31IIWkHNzlpyORbnsBbwAPgwTvRxrQ8y2vPRL8DpnzjlYnGnl2Cs7bGn5S1Bmwawn7sZ5IGTHYO1RYNtES4qnH/wOf/o0BS2Aj8RgE1kk5ujV0LcEtMJ8gwaDx4MFRFITCWSqfb/kM66fdu4uRDivHh+lvW8Ogw8IJ9O3ZHQ4VNIaQ7bi591yB7mzKqq6cI6B/anb2pYe76dDmdt7KEf3jcinYqbZGn1CPvwHV7rqdN8k5zZ6q76mOO1TYr2q/ZG41dv3z/KOqBFMszLMweJwEjzfdZuG4ROI19H1tOvS+6SsYx2BnXE0pn42rOq2ox9sVj695bm628QkKQfa+SRoy55n1c6KOY3XVOeO2N98N20nh6BUmO4pwipmw4I48/jlr5Uzh/2ihAS+nPyJXOmNdWx13nLPkJUeJ4kxnfFZDqHcvQy5XfhNdGVzFjqwVtKqm16ZWdsPWmSsfjL/byNTxRE7q2Qiu6V8o0HfbDvRNpxo3mpGBamxfKPYdmrwFn4ufM/36zs9QvhrlMYqHY7U48GpWjI+eAIyuS2b8nyG+GfiPdINyAu1NKQYDSs4/QfpvzPjxDxVfeX5LhT8jljLZtV/oKbRyJVj7ZNWFi4ag9sZqOyMkO2e9a4elkUQcMKaqMfw29kvGbuDcfUpFdP4TGursk0iHugEU6JvVzkEmrp+JcbJ60qW5bYFT1tZ7An9HCnTL+/3f3+lQIg7cs23M9el9okOsFknsbYaoGVnPOUIPy+EL7dTf9vDPiTXtqjTlOlrPVluVv2G5cMIO/SOgO/pkFP1z5aW/H7dyd4AYeKVPVtUNJXdH9VlYy4KjNq3wObEvPxUgkIMnpRbBVwO4GoVxCazA3hQGO5BZE+4AALBtCXRf4YDetlq/IbPDxfGDznX9wEfEUdY69ciS/p6DIzXoHVOv2pUc4pFOZMKqqHD8M/93vJ4b5CpN6SXf6B4fzM5h/U6bBAc5OSq+0Hrne1roE8ukXp3jyaXn276zWr/k+aSbPyhTz7C/a7IcflB+OHZI23jrc97yVZf6VxlVVTc1LpbhiSqP6wts1T+iX2h4U0tvLHDVBR79tWHpCDdq0NfZ8q97RxM2XWMNcYr76j15h9wKdgmAlQ0i3IcYy8v/1pe/ScY/+APtBkiw/G3t39BKHqCD2zY4ZWBGC+3gZfvJDGpZf8S5+s/KuTzPPyAXFy76tkhRavqr+UgDKG7SlS4adL9oPVa4s0A0LEQARw6sbKLu2qrVI+f0z+dkSkM7WqcbLtVFOMdVtoLe7PCc0VdNcuA3dKOMf4/nvK7+iLiGFo0Ba0z5NlETWagaYFV8O4EcecUMIghrNsfGkoMY3Lbl3JSj685/BND3RwKoNwm28BIqq1tVwH531u2MzJ7vlnagvrM/nkL1pmrfIsabMpFYvnFVLSwqrHNYHgvZWPVAo+ps01qIYXiSMTJ16AdL9C0Sganlf0bPUc3CKtmVxytFdCiNwZW0teXjRTGBuyCsv6EJ/wU3m9W77PbD0mCEsRkBFmN+xJ44xJcmsya7j+vu3ZBUaXftBZs1Tt2e+mQtO66VI7qanlQMht67GcRg6xZiHXEWq1TQchi1VrdenaY4MPwDGDz+Cd/ysQ/c0Wlz8rvgtnvjskCjTI4/TAcrAAWJSxjW96uW91o+ob6XVttb+ejo6DYSAlVnjYcsjzfE12PPP2xf6IKWr4jOUu2SVIUkqdXKs5nyAujQqy6J6E+HDFwhuH+251LaCZxDm4qS698SIQY9okfWEVfKdy+vMBgEFZ7apwGgpsJTGAzlfLdX2yfBDgaD/bHJdgdF1weK3+rI/9Mr4GnpcrJG/sC+N1eeanWt72AFFf0tkZg0DJYFsmiceGgdUzF42XksJ5yIxMW12nvxHZXbq7q02K72pJxw+A6lv1VlDM4YHEU6Q8kaQESgxrMZd6Fkw/9Xiq4yecPme1Jm5uceA25u4GRUUnvP3k5/8w9ImVZK+MxIM4XBaj1ZFOBsyH2r/bMMYLGCkS2JBupR/uxScGeufJ+8G+ecj8G6tSvf5j9kadLoqPVUlNAgcYPBylUDMFy1K8XSVDdbiWCNjtZQGUffIc1Q39GprJzqAmpzEn0tVOhKi8G5wlbtXOEulJS3KqAKlckmB7dErN9jkXfhwA59n4YBgmFI3xd4iua1oKCevdyQC8DGeO3HWp2YVWud7alFikqFAepHOYvBVn+s4dKNU6pIsLJJcKOIVm82SqOo6JvKLIy3Tt0rP0kuu3etXOKerSNtZjs0Kd1n5a8wun7O7SjKGAzzMAz9HJatpvrAMwSIXleH0bdT2zT37co7nKq66DsuEoNz5RXoQoO7ADX0+svxLlnv3oVWwxD6tjIMBPfE4wzC5000OgHwHwglepP4C5YRDParGmzRMQkrq4Qxqup0wPnTucSB4RPdcBr6h4jamemnfUtXnPonn9iIAqOhwuA4ahnDXs8jrYx8rZPK99GUqw4IR+jjv+CphTv+0oM8eAobZis/Laq5t0JiMHC3OmLqHXta+rbMvaIO4J9HpZ/VxvdU0KqtqCvtPbTP0Y+APv3cu4+vC6tTmgrnEhWDRcmUlEO7UT3C8kTHfLm9wlbOqUN+Un7gIYz04a5KLBg2XKY79g6qwfukr3h1f20dmNpV3K9Gqu+qfFTxC4wYgE9vYX6xnijWNGtBLxhq56pXrf+zqkuvcvqqVf147iaPSea7dhJxjIuDUP43UiosGwCWOtgz3RpbxbwDfTuwOmOEHqlwpKTVBCGAdBMrXbF+EhKctu7qxpXh9APy1kf983dtvTXDuV1RL7e66NtJCE7utnRdOqw3s70+YIx65x2U3a5zVhPb3QtHQm9bskJfy0QtT/lPxjqr1Wbqny/q/xpptxT6k+KkNOEACYD3B4SO2XlEfpJJnGjLsqtbWujx5+CUbNOAmFNko0tszcB+l85N9z4Gg/Go27YGi90iVyyj3aqmhtY/cGFhvTU7zluP/dUweHRf4XzW/C6MbssuteG/XYuvUVv/uCPiklH3q5Gmp7yu0p/ay+sOV6l5VhV1vihgLGaqe8f/ZSbWB93BPJQ/Bd6GCtqylrWPvv/6tVXYRTmN9G1E8i39OdsZhHAmP6KqnbaknbNa367uNDrYsYty48d8BVb/OZh6ZZ3hwzDoSmkPg6EHw043ut0mBBn+q0La6aCgcwpqTY6dgC/KgK91R2qsGqYKWXzXsB/THlsoPgOBA7grFPJRBtF3RKcrlXxqeWv1I/cDhllXrD9uObtRLE58ugYpKswXc9yuhZYXWfa5i+ibD1owrDbxVvkZwCuYxD9v6J7WXkQ5wUE1QuxhMNTq6PMmYe1mTwObDooqW/A7Y25gcDfPG0ffbretNdA409VXJ6V999iawtAeD7iOe9sw9HyGLimTHUT0yzjH1TJt4QF25zphXZ+nRujvrNrhltkzTyu1q5SthR5Jk9nFYF8LrVwotNDxQjkzWpcM1gzGtqmDuo1b5I/QzZ9farz7Np078m/WJsSDGBx/VEjcRVO9/E5/Ff3zlPPzdbmYguPQmQ/zyCn09aX1sTqt3LK067Ud9410+TZTbq5n0APJROU7XZTWUdY7kvbMlxH0/QHpWoLr8lPh2wMze8V9bVECgm+B3vaSn4ThwWnlziRE16zmtxvd3yrOW+si8R71q/tk5Qs1zJ5FQS1kSOtwD32HxLFidunvH5QR5j04O43YrXzfKP/4vcHTAo+uoq/lj9YWqIoNznuz02PzoHYGPBhW/G68HOnDCf1wXcOAJVgfspXm5HIylDYtZVG/qxipvoSBScqMPMnXOnrptj8jCD0+L9+IuFbNfmITS/4JKOs8HGfxBJNIrCbAKuhvpfYU157IT5IvkWS3or+lVIT759KFCiw8NN4O6oQyZVV1MBEATQ2rapKd25yaCT2Ftoa+6b+G/hnsJYKx80ShhW4bTYXTsBiF3nEnpDZgui1TnT0dawSj3k4/shnDuNyepdO3/KkkWM2KpV/uRgVUlZ/YdsMRG327s5IXAOpjlW4uujAJ3gG6E5soyObOgfHPyNRjGX9lFgcqPoHC00rZmlDWWV2oijU8AheXyBk/HxH0t3rLfyBTt8NHg/BIbcuYWxgZMFwTU/dOx6cRk9G6xFcN9nUkNG7bZf1FJuPqCTjE9/RU2aLvIPS2p6b6M+zb+w8D4EE5px0dsdGmkgKDc0OmjtpeNPgY3O0qIcr9kXqa53v2cpd1vgmlhsn6mboH8XjEOfzdci/p7xJ9k/hiH4PbYn6FXam+rP2/dSIODX2HxGN7hl0zi/perlRoXnhZbau4L43by5rWqwXQCTlxVdpq4h8Qs/snxADgWce/wkI8EDCeW7ki/sLcomvprJGcsi1QXGXwCed+LevLUaGmhRaOWp3U0KfQ97h6BIbhMjgNI83tzMbbrL4uOtZ0fBQnyPRluB29l0EkHnGAaDEYtM+2y9Iqab/ZjL7yqhZ9u7ZMh0GaxwUOte+o6TxWF6r1y9+m+/EbrM53KdWk8nloOWUk4mju8cxGPtfp770N3Ud/oQLggvi3ZNxfqsizV8zJs6uhdnwMKm3OYXDVSgXD4xjsH5cSWMdgVe08CGBlux0YhhvAw5cf0ChOIHGnoqar73w46pPxthgqyrlqDNX+kn7XDlZ1FuhBxx//VAnDlSN0brqubRbsxwtU79F/8pbmtrrqxm0Ybpcpz6kRd+7TN/gO+juSb7Ib73tPT9RsKhQPai7Qjpa8sw/Sf1oMsbmFlvjMrDJ+hZPdwDjpIHZqyHPEFfQ1Sv4Bu9qfaPdP3easYN6NYKywfQ5HlwgFB3LrJ3uH9nhqUAu9lzzQ92jysjoUBmB75oO9OnJ866NZjDvOmOdEzmz1qdM42u/PiGuVyQMnemKgr2XiVTT5Y2+nFTXGSb0qMITQGMDfBI3jrPz6asgZslG82cr9IGfZ9jkhKmaxSjtnzafT9qd/BAb/ETicwrYfk9ir/O/u2ufHjI/BIBBU/vN70b07ceqGh3C4s755xW811Mk2PzzLyzkt/j59Rw70OlLjivune9zH4B9C3/Ea8rXdZ85hNL+HEMsGXGqb5d6T1pCyoKst73TrRq29Ywk+b1TouTx01QnWQRmPJHWGrbrP0EL/h8ptuf0mG/3hFn9AzuSlmilvVl5pp89VImsTxoI94GdsPa32pFYAdtx068vLzg+N1aoJNRJptpJ80L9kqObS27Rt8fA5jSiw/xlvpD3b9rN1/C6LFb7QQt6Ivk6xEw2N8MkxhcfxFI6br+y4PHZL6qkp4vsOm/n5daLUgM1ooR0xd0aa6cx7QJcDx3/vqPz/l67csgp561LGrHyY/r5LBhnhoOcamJ/kT68Ur3+MXaY7G+2j8uCkZRW/9bZKv5Nmtu+gbwSI/E9v4g5b8iCJ6lTSW63urTTrlPHw53bQRw4u//2kWJ+H/3G24Ioa3Mpi1P9Wj6ZnpLAEN+arWgttIfGfnQ0n5f9XQd/bmY4xuJKuSaWrhR7uVfuvrpaMLp2TLhI4iiVDOsvTwr44o+30403frjy3clCwcnZcuKRzVeUAAIAhyH9WMb0b1XNrH+OVvRbk2bZAF9d87pvrDAGqzRjOyJRSxTx1AaoJjz60ClvnEud4lbo5nR1wz74S0NZemzVC+4+OmtFyJYviPpnzifWvyR9RQUdx2u0+irf3+cI35b3K1i+61fFWR/x9kHriPKgyvkg0fS8GExY/5DeV/qvtDdDtwxFsaez6IE1gclPY48E2nrFOXi24ea1/yDm/0fFpH0o9f/deRtC3e+q08rk7e59DX7vade5Rtg1A75m+b1onLTx39lrooaaK1g7SW7U5b07UhoTJEkwEYVPmx1JwfM5q+1weuS1QZ1guwdishWxGfnRNcMtDqLiF/ypH9pmujoztwXBSbAiYWp+xNph10NWv5659p98fx+RsJGgURy7DcH3JiKlyLBjJhwm5lMklrRRgTm1jdtOOTHkdOeJz3/Z7OVFhcsIK3NkWAJrXfwTIGv4FPzOJqzq0K023JFgetLJzmGA8kygucA3e9SR4PQl2M19rM7g6xZyYak/C2GlA+vkl4BW5F+ONsTGh2JjE4Fx//KG2MjcA5FJSAOFg2ueqAwVSqqB4igTr3RjnMF0Mhh4i6pdor3giKPTWrRQGm1DWH5fRt7vfzzj9rS0UPWN59yszCvRU0P2GL++S9AOifh4WCR7HYLWqwdpATASlHCS46vw+E+lTai42sgxqy/TQ672k8I9UfuNwNZhov/xdMnsvamK1Uxic6hvSwTinOoqcE+s/ha3qKmiBwZIEa99FsRqQhNVSPkODNFIdDeLFtRrpSgan2XOssZrGq85I7KzXEBp4qyRY5WxWZ2bl+ka3ztkbnZnkCASADgCPJBAB6GlOmuc+JYNZLYtLGuI4UvIEBheXV5YzN4uWPF4x6eJCAgh9/XNVp/NnK90F9e1ajZ/U7voyi5rdeqAZBt3yt4jW3EnlsIrBsgn1I/a/zu53P25GmZQikEn9oWIwQA3DZVd1GFanAnU8ENZTK5I5RWRR1v3KNgaFDE6DuRKfpI782ZrGqwUHDOOFJYNw2FmmtKvPYfedixpWKAahAODBeqW7ubNQUv3WzNwr3b3uL+endEqqqJllHIO7YkVRN4Q4keBN5yhKWqLqh9VEVyw27LPkN0kXIH0V1omGBuvpPoeff1CaHiX+8ByyxjdQ8t/F4NJ6ZHz+WGj7LOp7nZ/JN1kVcDL/WFUNMpnry+Wp1bx/VokMVqnz3T5Q/zRpCE/JgOP9jxuxW2VFp/kBDK4pICi/R9o6JyqynuPBXUoN+tR5XJgyU7p2izyPjMCwcr+ews08/r5RrnZ+8I7UYie6+s/h6ONiLwTP7z4JGoEb0am0NVgiz/6Ts8qcGxJdBdUJ9P15uT1jpSVvRd82wOS6yfmENA+tUUHP6rtnuZGFwVUakK501S/jYmmPKwyG/eGoGAwlXcgFcg8lBgNoiD4fQ9WqgE5/FZbu1NKhjcifMoiOVP6PWin7T2D8MbrWis5mG04g7IhevSo8W+Zt6GtqoRXrDx4cID8NNMf/oYg25zTNz0gekRkGu9cep1wVbr78XjlUFNUeUNqf0Lzikb0crovzCdy14+HdD3a9xwVuCobvdbq7C4l1Mmps6NtiMDTQOK7Z1los4pGqPQpV1ZA8OAVpKr/xEffcXHwOxd8kXf32j8kt6Jur6nkMnN96cuR9dQ2ZTrvymN9VS9qeY6rHw2DlTzFBVUgcxTHA+YZS9QhohlKrpGzxiqiL+PeJGpI0eAsT6RRv+pZlPeOtDzKoUmIYUvDMOLnSg6SyriV2mq9HYXP59RXKXWscv6pzGKweUaqa8bU2Oz/5RalQZOmi1bP/BKZ7Wqbo3TvEn2dnv+peeelA0NFLVzKy7fE81qodmxarb7tXsxm/a+FlSxKMKOF7MsmoBLGjvb9PBfgOaUe1ExbcypQiduQDadNROWffJweYpucjnbC05JEt+tYFpEWzh+LKtN5zDZ/iyhcxOE9eIy7p6pOR7/UH5vSWClNvQ0NVpCJdOdszA5/mRpU4WHhujht/BfcbtIYXo12ZpcjDZloLOeaAeV5uDmkbWRlksZY7/jLIls69qMoqcXGv/MguvJpzjFxVXxlvVeVVh2enGpXoq2W6cgJ01WsdYJ7dSaktExqwyMd3t/nGBixNI4PNVO3NRpH78sMYfGMl51uvaLSmhW4vuWQAdt3XRyhyd4CO9+3G2fkcU39H0z/f0IQ6qi55AiD/WPLOLvruYUXCZNtYf/OPShEN5afRRigRnmfA59C3+7rbZD6qdDUrjknL+nPQG9yhwrc4OXdInYG+U9DrxNYq4T8eYq751//b3rUuuarjakFnzvs/78w0OT8gRtZdsiH0rlGtWpVOjG/Y+nSzHHpArJebl4W0xlWnr7YKy8w0crDY+HXEBM2/bAFZI+ZljL7jznVjfoxOPselapCm+vwjiVhWgyU5xebnOZdbyHyJwDB5hKAvNvnYMIwxOKsq2CZZUtVFFhpD2b2U2tjnmpr9YzjJzX49c3iFzAI2iSEJRuEpi8kCg+FbE+xfbQx2KwxLD2fkKj6PRJRgsoUWxnS6GqWZlw+DKjCcEpz/BLZ93Qd8A9ke/eyzKYrsd6UzxYzTlu51uoSBaMOCBoyTW12frHGECqvX0IyjNUjKn5ZOi9NE6K0l0HDVX1v3xX8OrI3h25CCVF7BtQcNRHxCzILt7DQECzHnUHP9EnwNOu0MzBZhOK7sXmcBNvZ2rZU/ocFPpILhmj87KyTQE9q4en1Csn2Ailake38jGAyI1QZubvBC0jI67iy9RaOIwp1V0sp9Tu3BGj8X0VezPEfyW0AAhkmd6wosEUd45ILD2ZtuLVLfJj6q0+VgR+rqGJxKb5k1d2tWaN5o+3VXqUlD5DjyzmhackoyCRh9aT0ZAyO/rk7sOak5OD/3WCkHUX9EWSw3Eaer5YNU31Im/cjE1kYXx+AeNWX3reYhPn4dUIWNuXW9v1/ZOykn9BS6Dnojrl+38AgpQytpwDcokTv8ZNe6hmGjnWEYXPYE8xCJ+JrDGIwr3HmKETRhkNgBDYZVgeZ7sU4ujevKz/FuwrzOXPeOvjJdKQyGUxX+uG+lpBwAwgd4njnasKxOoYgHQZuclK51EfpmLc+1VrTC3qAQANtGeZIhcvtN9OxpZGXEbPutV/siRmMNosiz5ACfGat1uIFbDScGAwAQ6E3cPSf2n5Bwa2zs6OENeDwjbqjsm5xMX+nGRJgsd4/vKc0Wzb4X3MOlU1KV2DHGOj53cqOcUFSVZK3YOTeESFrTwjmeQQFMy6LWqOjx5RhMmxsLE4kEOc8K7JiQnWKxu1T1AWcNs5UmarcnMY0TxAXtQYiIqbhyMZUVSU4JGHc5u0GV61boE1abLRr353iafoBU6IpmTNayI0WC73nlvImLtKVZHuIpHqzxMRZquNMIMchPNV6pjbrfI8WcWS7ZSnCKXPSNdqmk85WXn30C2OlA/kpjuzbQOTmpmXx4PIUB2M4kQr4cZDqDFh4uHKScvrwz8axYvLzokbXXB7GJYdWWTawhwhs/qQxLA0s/T2H4qozvWnSzfuuRJr5IUywEtXbdtS12IyXQs8LFeGlMBuKSDw1dtDRbWWfQ9AXDXddWYVP9tfzWwwdnnDJSvPH9FFeCc6FFx+zd6AMuZHU5v0+uUdmua14CYdic8U+Eg+ODvGJ5/giu/HB176qtogQD7HqwMtLArLL3JdYmMC8uLtjHnMRePSHsnNMT7M9lzjvYt0vHbijEnLnHMZhsIkWQnYDERtOk/5+9sKTQKxX8XKZgtVrwM0Zl3GFSQGsuzsafqaqKTLjRVIZ2zTGkmrkmJVKJJhH3pj+3UY6dxHcrwqoBw/wREp/FMfj8aaFVqaMwzYAx363qQta09hQYz4Xh4FIZzEtq1RyAiuso3kSNwc1SoN1+RjDYgG19j1+CxEejJiBpYPzp1uTOXEFEJz6/H1jYTzALgckQtNtxODWGVuBsbMXyVJSTYvy4Odo9pq01XXdjzHDFKyqpUEC0SINnlNZaZIZotXCcvBVjedTsySQhMByPJ8JwfH0aingwoEyt+RkMxSWtn7N03MGqQPHRZDtT7UNnPVZtzixCSuRjruX5aegb2UcLEyyEesKcPEhfsZll7+GFuLWZnWpDtN+G9Iafvl7R8E0e5j0WV5gWIJfC4CDJgVH5LcoDlY8/WSTzXqY1QVTh9pSmWHMlWOjMdBeRYx92YNidyVRy/P/RVyjlc726quwO5S3qNdhhiapnR+ZO/V1JkVwcqCX6zV+R4bL0rUPM2UU4smjHRQSaiGPHrVavlUtyO8sTMu6XzvesSK1p0kPHSCstke3NeiJOy0p/3XDmWATP3JfMdVzsCRaVA856hrSZZJy90ZaLu9OOtCdXiHw+75GeaZEGlXWRBiHzCidxaiUbxmq9ktzNRUaB/Y1EM83p7tLxZTxC4j4Svb+ai9d1/WZ9EDUK1mBMIAELu0Ijx6L9Qrc3r9k8sKX1ydeB+psNu2e9LCTARvgtR/3K7hdaVzMz19ot350EyTe26/ZK1uX8x3uFy8wmmVsZDfmZ+SaZ4NLpWaS39nzau915HFfhy2Llg9dnLc4/48FZNH3NEz6z/8n/72lZFwpI9wxfpJ3Ft3+VGvSEd8efWoBVSfHlnRQtr60YH1cNfcuY4pyDSgtJsSCsWnfxdR8pQ3S28ixlZW0gojoz4eIyzZ5Mn7KDPKVQLLdXZzeqmkRregq5CYmo6T4Twj16OI2FMl6hWU6hkfMCO8VHNCVOQqw2YmcK2ngwuV0dUaqC7IgvXe2pKX5f922qmoz3Ch6X1Uv3e3INcKIb5Xuk3Ac8izQM/i4JYpe+5413LGrzpxnfg1twUbm6wjqBIMC/FAyW46Jtli2msZQ7GbQEXMMgiGa50xOQeMp4s0LVFTBMApqyD5ZpYnyZWHkrg33AnYKBy2wAoPLAlmLCAMIy+tqFiTpEws32Dzv3Dsag4XQZ/Fe32wUNNVSmkh4yWs8MSu72Qj9EWzSHrrLNRKNlPf5xSrmCDJMaNgYahmjRlcIPM2jel4hlTxuIqHO4Y0Gknz82O1PKC/h9wjZe8u/P0UP6HHeszqrwNlqYM4h/jpAbcKP+ynjm7vsjHkDt2WArZSq/qafptVfK5QGn8dzpKNQ2uOU0DE5Va8NwK8OfitQsUkp0mD6WriN6o7ZLuP37B9BfhOG0s/ySYIJQuzc/qFHEvg3JiQ1aobdN+KcWVqCXf2PgsTtYmQXpwylvkEhgUKG2IHDaEVgXW8VMH/AswxSxw+xErC6FvSRWK1I7HUQoayUTi+Hw5rMA+maTjicZT+3R1O2pRu2QMSjenZzrN1HePx8sdul8Hs3bnz6kVLYTFuqcQt6RM+mRb1iAg41irwr/3KqaKOXjuKqGvp9LUI4ILF6mEedOtsvJJvHZ4AFWg1saWUQ0o537qz0WwxQ6SFt/nMdoeqeL0DfsAwoEYc3ydTd/MAE8QXGsArPNbuK5TvAjBhFEXNmNhOSAL5YDyFMrOnekhXHh/ge91DbJkTLq0nFuW4ogMTzVOj0oFoggOh4+FpyrYOfvcXVfFNXFK8erlzSaYlkihyGYihXfCPoWr1vIR59ph03446283be2SOjpqcyIvou+kaZ3KiTfmE3JVJTliNnainQcjeYSEdmNqAqXRX6O6OSkr4imBIYNDIYkO1ulmFJCfJeKGAzyag5dPhNE4kdRBOqyIH2bydq9KoOQLS5MoUEY5ktX1HEN3HUxOOjN4ei7LtBOH3H0vSLdlTYQA32BzRh+KhgYe+rBnjssuNTj0dpX0KPY0WfGXgBjEQQRuif/uEYcTgqqsEbYtnxu/lXAYOiZgtEH8hR+UEv1xYMbNeKMSbVpj8Ew6cyjVn+N7jSk1+wEWSTGxI9pjRMRxUY2vo21pAnNOs07ZnyJPeIi+uKnrkBfe8vY6CtWpc0/9wZ2FzCw3AaEUsZn3Cjum9ureIV/ij4a8LIC/EYfWhfY1s6pwD83GsqKZQpoNMTffGFEd5+Iwa02zb9rY7BorBb73J4CSYlP2dxEFzIoMKxbpPcPoXz3UzyLKge5Hd01gBwE5ln2+REk3kkXv0q1XSBzc3tPTRXWKm8fOPqSMndSKmeTUYnLlrHer1oLSn5f+8vOLo2SIg/ak3hDnP9nuN8sevVXCCilxEWMX6H4eSRXRmvXoKyXgvAULTKL1yzW02j7yAFvprB2tuU+5AprtJohGkBGbrvzEcpZth1efNWl6F0fzJUwixWOD0FE0IaC9/u/7f64VON61/Esrgdrf2bNQuKXp0+3R9+Jxuc4/3HChWLBGamdYr/9muJ79ESMyja/tPUrNT1kwHpvlLxeq0Y+YG5BHaQUjLnf25XbXgpDrcxuGyEUcD0xGMwYK+gBFT6hzjUM5i1qq03VbhNJ7YMwjOlaSL6C4kwkNbRx3J1rzL/aln613iDGOnCVNyhlcssz/nKPeSaW51bmRGgWNzrymmzBGtccaUXE3bh2ZHuCR6gbSGDBcAx2z0k/lj5jeQF8Vs+6QDsWzLktxwPhJxa2J4uWPOrdDHAHEyYjghj1RVXDQW2hQYRhzbzsNKRjMCYS1RVXEzXlQLP0YuGM/pSA5On0NYzPDs2QjTR3wGDTwZmpCQcubF8acW0HHNGeMO5kz8wZCNnNDLU8c+gVgdxosWa+qi34Nhw8LjHRFQAsK5DBppqIU2elqEXpmsdejFfALYiatibW7FL4kcgxJI64KwDDYOHBGegbh16tRVLAdbK6DWn4JMKwoNqajZJfDbmHDCdiUeenmEQNOP49eHvgYroB4wkVId/o50VDMKSocTJgm2DzXF/yUadnuRH/JIXdJnZ9d+lDsbjluUBT/LhBKYTbn22kFNlyPMg5Z4jWrb5xPL6a5wTr52PnPoJj/jdc2E7EgRRi8WRqnOITWkbfeP0pGSfSEBfl7Nc20UZHXoptVhLOTenWaeN78Se4wEgVoXtRPw75j7PAXwrJjcRk4NrCEN9d5JSUZjkwTNM2ma/VT7hhEOlAKnrZrc2tBwND2geM4rCCFGki4gAWlI1qlP6I+SFkGH/TD9oMbG8aYra9X2kwsHyNOvZ0Wt169KZJBOR7+IxhPIwrQo4jQer58ae0w++Phh9no1kYtn+6mWwG8b3DCVdr5xMA3rDR2eK8S+69WEcrPceIn2IAZQXyL8W3EIzPWhZoCTfE8lnupF7SOkkJ1ooFhYYpArSIwba+HnQAB1u3yYDeCCcPOt3DT/UaMHF+aNZa7C3mlHETVsg9Fe7XUM3tRaIANC0QB1q/Y5bnCGk1pLDQsFS79mfjpylIPMux2tWZj5z8IzQ56m3E8Sy2G8RguMBw4m7wkBWaOZ4072+wQkxcbYLhTeTKMUENeFxwTDndp9A9msAFreyJOFbYNnqYFRjn9a21yrrPbjBxiRv1FFaM6P9Px2QpyEScwThNB7eSueE5GF+NrvqWH9QQnlgtRbb4oDrku32xuUafYzB3aUAs0HTuy43h2qqI303p0kRpyV23a/5SisgeaRuNo+/4CyK2zaMnjV3o0Hs6fReAFpDrhde4HeY4csUivNMCZ7dlzPD+IFfe1gVODdjVz3A0EABoR4YNbWmEruCVQiiHOwmDx5rXbm+oweTSuSPRdQ0ZxqRySTYoO1WnaL57gi3aoHvcn1PIXWN5KEoBs205r6Q/m3LqSVvwhWCFoK6P7c9E/bViU74hiYLITzwF/YzfJu7tz6EYfOaKNvelYWJ6zhaO+gKEnduboJc+v1Uj+x4ejbLljaPWxk+3LQXLmMmN/hkuKeaU3ticly3nnGzrkPj2+bPwADAuuyqfwEE0csyngRilGBW81Dl45stsMDEI9MPvnGKIBm0zH9Dt0JdAb7BavH+7lEdv+gECq9dotFkotfJcgGhwu7aQb4TEGH0jd9JwImyNKBjEBpldwDUm8/XbF1i3X5Sti3PN0zzF5XRXRD2/qVqqdXr3dS6u+IwFbbs/ZHEbyrRthSZ1EsP4xnas0eECiXNIumfgsfhyp6ibNYCMPPUcwVmjglQxD5VHSM2JNlF27KrVoyKuaG7wjqNxE3TE0C0WiFCHx2H0hXxa38Fl+ZX9azRa6E87gY1M0AuAtpqRh7gV9oU1Et84HIBHEDGPvuSbxRYR4ujbvhFrCK42Lh23SlxzNOlw/JCxWszc28Ez7PEKeZlLKSJz3NZupGl3etUHLw6HVEi+ttJgGvFwLU7TMfhAnctW4yy/ry3Ni0Kwrf6KTwXJtpnhDmvksqbxHXqn+htefuw2JJD2bcNgMIdh6Gc7uae7cGewi0Irw4iDrlGyU4iDbM5Yo6LicsBwy1TFMnK0V6VhMK6fmHwLsUXZ43RaQ+U7IfgoUhR8JI1V37NIxwWCcdPCdXFn/UKSL+ow+j9yHaSWCqYguGAnKGB8UkJTXYo0PbhWU8bw7pt+MWiaLj4Lypm/ZjFtHzS8GBx1AY+NUZAmOta9CT8ZHRO/0XoC5DRRZH82AYr/a81rPaCR/VV2MElfkdE6DrE2kXmIBG5gD9M5n7rePw4b69r9M3oovlY+KOPtc7pO79QW51+hYJ/5evjWkJX147iZU50sa6Uii/T4Ju05VhwjtL3lEOivkMY3mvqr2Z9Bv3QnQsKdB0qWiD9+JjBH763NDMuEpcY6MaemTZrVMQ66TfISz7fV4iyYWXjHYNk3PIWwsgvI1M+/H7GY7TSOZ64dmyvE2hXFRpeycswUUhdk1cAbaShO9kQZfVv7+AD++G0+M2XNOJdlpVa+ZqzWlqs9q6uQ89nL66KcNiHfJyJP+5KRRIza604RV39FLZ8jLtnvPGg3ImrgUdMcWAGO8eQwjvh7eW+wrK/TEuvuBHK8NdgGBHgfIff4L3uQ6rJxM+y28ccXE6E1K/eiVcWNzEE6eSvJghJYpu4ujb9ro2M7GeOqYcDV2ptY/3e15LjIEjGCxWueSKb0Nu3OShGDU2G0FPNiDpFTH1CGYA9N2yMi141k6NREgVYt//M0XmIr5tp/WLvC2liytI9xH9fpBQ9Ar/tTllwOfEU39MX5arBBtcBGZeUMJOd8EHTP1pPo61qVjWCrzwzY0VsaiWWOiW2Kr0aRoLZsxtBIAWu13WUU+nP24RFyw1VcYb+s19bUiML219eqDMNTNHWx0VTnd0j7BHu+t/fxrpose0Vm3ET3Asox6IYQA30HB1WDtC5ziFTAnu3rTPqFBY+1Hc0WoghMr/23BZASLL7CU06splS0p0ycbvlwBbspDKue4ml0VWhV2ISi7xIZxZBa2lMLEfpar4JzOCL9iBSKCknuqJHN8A9D3ywvc40i0xdAtuYLWlfDpHGLRve0zB4yBwije/ODYiLc5tLs9MTXFglZpQWq8Vmu+muwLHsNE31XVIUJtdpudpwbpgjhJ1NR7jAYVfLuB75u3AccCf0YTmssmn3adLfXr2U3laQE0QI80k/3FJNqMwBYtm0vfGrAeMgi90kpwTe792yKoMg/m0aYcuFwAYSn1HE0XIfuDouwLNLYRwuuaZdZpDVVeKfUePHRx8ZzsNfTyEFBKHIFYc6PFlgAojZFznMuq4W+onGRT68RYJxdY1/PlSFSdhSam4N9GbgPWHs+QurtH+17cZH90nXzywVP2TTNlVS7gzaIGjVQ9Zf5tuWa3ePU4ilbI4CFt25ThKkVBKyse/t/ROgiVdh+6iECnEHFyMSShtCboKWfkKXRtedpTRh/xikexGBHjZ2yjui5C7gFyTzbR31s9XcKA+H7KJ7GS4ihYSeRIhEDER57TMX71f0dkqrm8VkDHe0QA2BynPKIBqIG7hZrCHI6e2cWDrBHWi9U+ByqpcG7rQNXNDQ9oq0swI0EcAUS0fiRWZEONIs0N0cfzypMFmL8VOyPQcaBWuwmxPwqK0Br34jdI+kz1z735OsHXj+dJziSlAN7JEHRieN789JdbHsNIsFxcdOjMWm4G9t5RA1pwNsG60/3jKizxidLVnnB+RLbfI5vmMipHU9StlMcbrWYF7+GbrBe0EFX/7A9P07uPs96f3e6ev80ciHq0g7chsT3kyD7z/AazBTWA4d8CpcvCXhWOoZ09iHA6xv6jovC8c6I4Vdrfx66h15L66AOe8bE7B0a0Q5JQ0ES2y2wi8FX81FwhW7s6PuZrtd7XXdr7rLpstWG7iLkNlJxzeGA5y2QO1prmpAbqCU9GE+PtbBbntRniUfEbGHZhZvtDcch4M+f60fuOV6YJBGvuEvcOMa2BK/hfNoNjf5IYKqIJzUNP0LJWSTPP2rxOUf4y4llgIyCzSd+HdnJNo4eiPXHzZtuhavX1+0NBmbZh9F7Qmo0g+qfvhsGQjdlGt/EANABVVkO47mgU95u8Vny4sifWHbc3gDIarirvA1x8WfT7waaNkJUYSwrH8Nc4eCu7cNnRART8NCCQDg9yoROJlH5FOHJstN8JuQHTiCwriM0QsLeprBjPJgg5fQRP/e9sXsWgb6PhAbMSD0B/Pk+VzkJq9s2OUrwbFJxM4M+q1nZLWG7zhgSIirLzS6PmlCSqH8XxcIM2h6aNmk17X9c2ZroubCcNc7onJPEsoUmtnHW9SjZYHjbANYTD2zvb/DMTJxxuyPVlq6gXv/CusC2I9/nYMi6wbYfj9w/5yVImyO9FV5nhBZrNWt0UXSnDWHay+IiUWPpZCr++wv7St7eL3jtqtjyfr06DoKa/GTq+PlUYRocBNFMj0x2j+0ePdBP+prvIKj7jiC0JRXatK9IgrtC8rYY+yiT64iq4QRmEIP7JM0XEprThd0oVBjxVhbs8H/L689IBV1jXJqaa53F/3C/9xs2eONslNxrK/Xy+CBmj2pl4hE6mGzXL/mTq8LwyXWFLc9n/DP6bKdV6ANiFqLIEv1Eu37xLEbeRcOX3swj3pA7keyz+NzGLr7BlDFjhwzY3r/ba/n3f97/9y9YVli3E1k5+uJKIy73oIdD3g98rn+POgHgt38fouD2mY7WeRuJjcO+kcfdAmeuMfhca7HP9gqwLactGgDgp5s3wX7oLsQqX1aZixErFzBvylVe6U81ojRrlVQe58c/IsF0es+5nOSyJGLTi3dD7tvU66hVo4hkZGKT+XYd1dprb+ZoEaGXj8F5bxQbbIXC+RWCMeno59v6s0y4BpyCozmAW5lIqDP/EiExYm7YQt7f39NYn/jq+TccFEVxcwoq22fGTvsH8Tmae6qJGseQ20L6mAFeK6w/8J/f/wdMEgOeZ2qS9AAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import matplotlib\n", + "from torchvision import transforms\n", + "\n", + "\n", + "def make_depth_transform() -> transforms.Compose:\n", + " return transforms.Compose([\n", + " transforms.ToTensor(),\n", + " lambda x: 255.0 * x[:3], # Discard alpha component and scale by 255\n", + " transforms.Normalize(\n", + " mean=(123.675, 116.28, 103.53),\n", + " std=(58.395, 57.12, 57.375),\n", + " ),\n", + " ])\n", + "\n", + "\n", + "def render_depth(values, colormap_name=\"magma_r\") -> Image:\n", + " min_value, max_value = values.min(), values.max()\n", + " normalized_values = (values - min_value) / (max_value - min_value)\n", + "\n", + " colormap = matplotlib.colormaps[colormap_name]\n", + " colors = colormap(normalized_values, bytes=True) # ((1)xhxwx4)\n", + " colors = colors[:, :, :3] # Discard alpha component\n", + " return Image.fromarray(colors)\n", + "\n", + "\n", + "transform = make_depth_transform()\n", + "\n", + "scale_factor = 1\n", + "rescaled_image = image.resize((scale_factor * image.width, scale_factor * image.height))\n", + "transformed_image = transform(rescaled_image)\n", + "batch = transformed_image.unsqueeze(0).cuda() # Make a batch of one image\n", + "\n", + "with torch.inference_mode():\n", + " result = model.whole_inference(batch, img_meta=None, rescale=True)\n", + "\n", + "depth_image = render_depth(result.squeeze().cpu())\n", + "display(depth_image)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.17" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/engine/pose_estimation/dinov2/notebooks/semantic_segmentation.ipynb b/engine/pose_estimation/dinov2/notebooks/semantic_segmentation.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..60dd3ef5505be2ad9bc3a9d98f8b3dbe1db99c5a --- /dev/null +++ b/engine/pose_estimation/dinov2/notebooks/semantic_segmentation.ipynb @@ -0,0 +1,1476 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "b470389d-a897-416e-9601-aeacb39cd694", + "metadata": {}, + "outputs": [], + "source": [ + "# Copyright (c) Meta Platforms, Inc. and affiliates." + ] + }, + { + "cell_type": "markdown", + "id": "eb5c8577-7dff-41b1-9b04-2dca12940e02", + "metadata": {}, + "source": [ + "# Semantic Segmentation \"Open" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "febdf412-5ad0-4bbc-9530-754f92dcc491", + "metadata": {}, + "outputs": [], + "source": [ + "import sys\n", + "\n", + "INSTALL = False # Switch this to install dependencies\n", + "if INSTALL: # Try installing package with extras\n", + " REPO_URL = \"https://github.com/facebookresearch/dinov2\"\n", + " !{sys.executable} -m pip install -e {REPO_URL}'[extras]' --extra-index-url https://download.pytorch.org/whl/cu117 --extra-index-url https://pypi.nvidia.com\n", + "else:\n", + " REPO_PATH = \"\" # Specify a local path to the repository (or use installed package instead)\n", + " sys.path.append(REPO_PATH)" + ] + }, + { + "cell_type": "markdown", + "id": "efdf378b-0591-4879-9db6-6a4ab582d49f", + "metadata": {}, + "source": [ + "## Utilities" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "90223c04-e7da-4738-bb16-d4f7025aa3eb", + "metadata": {}, + "outputs": [], + "source": [ + "import math\n", + "import itertools\n", + "from functools import partial\n", + "\n", + "import torch\n", + "import torch.nn.functional as F\n", + "from mmseg.apis import init_segmentor, inference_segmentor\n", + "\n", + "import dinov2.eval.segmentation.models\n", + "\n", + "\n", + "class CenterPadding(torch.nn.Module):\n", + " def __init__(self, multiple):\n", + " super().__init__()\n", + " self.multiple = multiple\n", + "\n", + " def _get_pad(self, size):\n", + " new_size = math.ceil(size / self.multiple) * self.multiple\n", + " pad_size = new_size - size\n", + " pad_size_left = pad_size // 2\n", + " pad_size_right = pad_size - pad_size_left\n", + " return pad_size_left, pad_size_right\n", + "\n", + " @torch.inference_mode()\n", + " def forward(self, x):\n", + " pads = list(itertools.chain.from_iterable(self._get_pad(m) for m in x.shape[:1:-1]))\n", + " output = F.pad(x, pads)\n", + " return output\n", + "\n", + "\n", + "def create_segmenter(cfg, backbone_model):\n", + " model = init_segmentor(cfg)\n", + " model.backbone.forward = partial(\n", + " backbone_model.get_intermediate_layers,\n", + " n=cfg.model.backbone.out_indices,\n", + " reshape=True,\n", + " )\n", + " if hasattr(backbone_model, \"patch_size\"):\n", + " model.backbone.register_forward_pre_hook(lambda _, x: CenterPadding(backbone_model.patch_size)(x[0]))\n", + " model.init_weights()\n", + " return model" + ] + }, + { + "cell_type": "markdown", + "id": "a5724efc-b2b8-46ed-94e1-7fee59a39ed9", + "metadata": {}, + "source": [ + "## Load pretrained backbone" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "2d51b932-1157-45ce-997f-572ad417a12f", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using cache found in /private/home/plabatut/.cache/torch/hub/facebookresearch_dinov2_main\n", + "/private/home/plabatut/github/patricklabatut/dinov2/dinov2/layers/swiglu_ffn.py:43: UserWarning: xFormers is available (SwiGLU)\n", + " warnings.warn(\"xFormers is available (SwiGLU)\")\n", + "/private/home/plabatut/github/patricklabatut/dinov2/dinov2/layers/attention.py:27: UserWarning: xFormers is available (Attention)\n", + " warnings.warn(\"xFormers is available (Attention)\")\n", + "/private/home/plabatut/github/patricklabatut/dinov2/dinov2/layers/block.py:33: UserWarning: xFormers is available (Block)\n", + " warnings.warn(\"xFormers is available (Block)\")\n" + ] + }, + { + "data": { + "text/plain": [ + "DinoVisionTransformer(\n", + " (patch_embed): PatchEmbed(\n", + " (proj): Conv2d(3, 384, kernel_size=(14, 14), stride=(14, 14))\n", + " (norm): Identity()\n", + " )\n", + " (blocks): ModuleList(\n", + " (0-11): 12 x NestedTensorBlock(\n", + " (norm1): LayerNorm((384,), eps=1e-06, elementwise_affine=True)\n", + " (attn): MemEffAttention(\n", + " (qkv): Linear(in_features=384, out_features=1152, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=384, out_features=384, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (ls1): LayerScale()\n", + " (drop_path1): Identity()\n", + " (norm2): LayerNorm((384,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): Mlp(\n", + " (fc1): Linear(in_features=384, out_features=1536, bias=True)\n", + " (act): GELU(approximate='none')\n", + " (fc2): Linear(in_features=1536, out_features=384, bias=True)\n", + " (drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (ls2): LayerScale()\n", + " (drop_path2): Identity()\n", + " )\n", + " )\n", + " (norm): LayerNorm((384,), eps=1e-06, elementwise_affine=True)\n", + " (head): Identity()\n", + ")" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "BACKBONE_SIZE = \"small\" # in (\"small\", \"base\", \"large\" or \"giant\")\n", + "\n", + "\n", + "backbone_archs = {\n", + " \"small\": \"vits14\",\n", + " \"base\": \"vitb14\",\n", + " \"large\": \"vitl14\",\n", + " \"giant\": \"vitg14\",\n", + "}\n", + "backbone_arch = backbone_archs[BACKBONE_SIZE]\n", + "backbone_name = f\"dinov2_{backbone_arch}\"\n", + "\n", + "backbone_model = torch.hub.load(repo_or_dir=\"facebookresearch/dinov2\", model=backbone_name)\n", + "backbone_model.eval()\n", + "backbone_model.cuda()" + ] + }, + { + "cell_type": "markdown", + "id": "c1c90501-d6ef-436e-b1a1-72e63b0534e3", + "metadata": {}, + "source": [ + "## Load pretrained segmentation head" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "d0bf0b7f-ad98-4cfb-8120-f076df8f8933", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/private/home/plabatut/.conda/envs/dinov2-extras-conda/lib/python3.9/site-packages/mmseg/models/losses/cross_entropy_loss.py:235: UserWarning: Default ``avg_non_ignore`` is False, if you would like to ignore the certain label and average loss over non-ignore labels, which is the same with PyTorch official cross_entropy, set ``avg_non_ignore=True``.\n", + " warnings.warn(\n", + "2023-08-31 06:29:03,743 - mmcv - INFO - initialize BNHead with init_cfg {'type': 'Normal', 'std': 0.01, 'override': {'name': 'conv_seg'}}\n", + "2023-08-31 06:29:03,744 - mmcv - INFO - \n", + "decode_head.conv_seg.weight - torch.Size([21, 1536, 1, 1]): \n", + "NormalInit: mean=0, std=0.01, bias=0 \n", + " \n", + "2023-08-31 06:29:03,745 - mmcv - INFO - \n", + "decode_head.conv_seg.bias - torch.Size([21]): \n", + "NormalInit: mean=0, std=0.01, bias=0 \n", + " \n", + "2023-08-31 06:29:03,745 - mmcv - INFO - \n", + "decode_head.bn.weight - torch.Size([1536]): \n", + "The value is the same before and after calling `init_weights` of EncoderDecoder \n", + " \n", + "2023-08-31 06:29:03,746 - mmcv - INFO - \n", + "decode_head.bn.bias - torch.Size([1536]): \n", + "The value is the same before and after calling `init_weights` of EncoderDecoder \n", + " \n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "scales: [1.0, 1.32, 1.73]\n", + "load checkpoint from http path: https://dl.fbaipublicfiles.com/dinov2/dinov2_vits14/dinov2_vits14_voc2012_ms_head.pth\n" + ] + }, + { + "data": { + "text/plain": [ + "EncoderDecoder(\n", + " (backbone): DinoVisionTransformer()\n", + " (decode_head): BNHead(\n", + " input_transform=resize_concat, ignore_index=255, align_corners=False\n", + " (loss_decode): CrossEntropyLoss(avg_non_ignore=False)\n", + " (conv_seg): Conv2d(1536, 21, kernel_size=(1, 1), stride=(1, 1))\n", + " (bn): SyncBatchNorm(1536, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " init_cfg={'type': 'Normal', 'std': 0.01, 'override': {'name': 'conv_seg'}}\n", + ")" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import urllib\n", + "\n", + "import mmcv\n", + "from mmcv.runner import load_checkpoint\n", + "\n", + "\n", + "def load_config_from_url(url: str) -> str:\n", + " with urllib.request.urlopen(url) as f:\n", + " return f.read().decode()\n", + "\n", + "\n", + "HEAD_SCALE_COUNT = 3 # more scales: slower but better results, in (1,2,3,4,5)\n", + "HEAD_DATASET = \"voc2012\" # in (\"ade20k\", \"voc2012\")\n", + "HEAD_TYPE = \"ms\" # in (\"ms, \"linear\")\n", + "\n", + "\n", + "DINOV2_BASE_URL = \"https://dl.fbaipublicfiles.com/dinov2\"\n", + "head_config_url = f\"{DINOV2_BASE_URL}/{backbone_name}/{backbone_name}_{HEAD_DATASET}_{HEAD_TYPE}_config.py\"\n", + "head_checkpoint_url = f\"{DINOV2_BASE_URL}/{backbone_name}/{backbone_name}_{HEAD_DATASET}_{HEAD_TYPE}_head.pth\"\n", + "\n", + "cfg_str = load_config_from_url(head_config_url)\n", + "cfg = mmcv.Config.fromstring(cfg_str, file_format=\".py\")\n", + "if HEAD_TYPE == \"ms\":\n", + " cfg.data.test.pipeline[1][\"img_ratios\"] = cfg.data.test.pipeline[1][\"img_ratios\"][:HEAD_SCALE_COUNT]\n", + " print(\"scales:\", cfg.data.test.pipeline[1][\"img_ratios\"])\n", + "\n", + "model = create_segmenter(cfg, backbone_model=backbone_model)\n", + "load_checkpoint(model, head_checkpoint_url, map_location=\"cpu\")\n", + "model.cuda()\n", + "model.eval()" + ] + }, + { + "cell_type": "markdown", + "id": "2dc1b106-d28c-41cc-9ddd-f558d66a4715", + "metadata": {}, + "source": [ + "## Load sample image" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "44511634-8243-4662-a512-4531014adb32", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAoAAAAHgCAIAAAC6s0uzAAABamlDQ1BJQ0MgUHJvZmlsZQAAeJx1kL1Lw1AUxU+rUtA6iA4dHDKJQ9TSCnZxaCsURTBUBatTmn4JbXwkKVJxE1cp+B9YwVlwsIhUcHFwEEQHEd2cOim4aHjel1TaIt7H5f04nHO5XMAbUBkr9gIo6ZaRTMSktdS65HuDh55TqmayqKIsCv79u+vz0fXeT4hZTbt2ENlPXJfOLpd2ngJTf/1d1Z/Jmhr939RBjRkW4JGJlW2LCd4lHjFoKeKq4LzLx4LTLp87npVknPiWWNIKaoa4SSynO/R8B5eKZa21g9jen9VXl8Uc6lHMYRMmGIpQUYEEBeF//NOOP44tcldgUC6PAizKREkRE7LE89ChYRIycQhB6pC4c+t+D637yW1t7xWYbXDOL9raQgM4naGT1dvaeAQYGgBu6kw1VEfqofbmcsD7CTCYAobvKLNh5sIhd3t/DOh74fxjDPAdAnaV868jzu0ahZ+BK/0HFylqvLiAv9gAAQAASURBVHicrP1psHVLlhiErSEz997nnDt845vq1avqGrqGrpJoIfVUomXUjSSwDRFYAyAJQ8jwx+AfKMIORzjCxhgThiAc/mEby9gNEcYOhMMggxAt1JJopJ6kVnd1qau7a3j16s3vG+50pr0z11r+kXvvk2efc+/7GpxR9b1998mdw5pz5cqV+IUvflZVzQwAzExE8jMR5YfxfUrJzMAoxph/ijGqKhEhIiKamZmNz6qqqmeL6uzsjNAuLi6++PnPNU2z3qy6bSsiqppUzMwUFCF/nqRNUdu222wTGANQSl1Vh1deeeQDfeYHPmkmwXHqYkoJAJNKHk/uNA/SOdd1XR45IjRNc3Z2MpvNiIh9x+xjp++9+/Tn/tovv/rKYyMmDADgvd+0681m45wjItXEzITmvQeAPLMMECIa/1VNZOOzMnMGDg5lGAbml1CU8f3hT5M/i/cMAIg71ADu4WvE2tAHImIenoGOLY+Iy5+UYy5byB+OL8cZTcY5/GsTEA0TpLG+Y6+q5Ricc7nxZNp13bvvvvv9d578uT/3p1999VUzQ63UkDxGXfmaJBlTDagEgMaQfHCVwopdUlUwrw5VxHGFyKaYadlQAcBMzQRMCBQNyAAN0gFeiunrBBd5LiMQ8kMGo5qNcC5bI3JjgwPSdmAc+WVkIuiHatbjWjN6mDG1navqrk1J7T/4f/3Fv/W3f/HBo4d/+p/4Iw8e3COPH3548Vf+i59HqjtJ83lzOj996fEnTk7OPvWpT3zms59su9Vq0zbNiaFmBul7J2LmEEJKaT6fr1abDz/8sG1bYHLOzWazGr2BMtu22xDDYrGoqxkmapctdCrruF1vnj1/8vTph8+un3z07KMPbp7xAIBNlMW9xz/2tf9OMz89Pz///pvf/gv/x//DT/3DX/vH//v/WD2rzh8++ps//1/9O//7v/DoweKf/Cf/2OuffDl1aTF/8OGHl//6v/Vvf+ozb3z7zbf+mT/7Tz9+9OrV1c3rr762Wi7/7X/r33z51ddubm6IiBmlLzH3d+/+WdM0koyIRCSE8Du/8+0/+U//s5//wc89OL+3Wq3+6l/9L9rt9vr6crvdMiMRrdfrq6urjIWU1MzEehbOxJmpdMRsxh0zM3OWEgR2yCCIqAqDGFFVHUnLIIKRqmYBWPLL2EKWt1ks7whj6D2/qaoqf+6cy59kVkLE2WzWNE1Gca7jvUfEGKMqZAnZtu1yufTep9SllEQkN66qMUYRy882lPyMiISWTAGAAMEMEIHQJEsaBSa6WX1ufu+HX//U9eVFx+rZzefz0/Oztm1TSllEjMQ/Ch8zBDUAAEK0QiJp/8BEZjKOZ8dl0rNqKbVwmHgPW8D+vwCg3VE5OXm5E4OKAABkvTggbHyYOb8A9/rpvfNqVjE1909WKL/z4TuX3fLvzWs2QIP1ev38ydP19RI7AQCtwjWqgJ1yU3fmgbEJN6l143x2o79V9GehaZlAMyKJKJP72Eimj8m/psk5d3NzU9d1pl1EFBEFy7gfMUFEiAZAZoYAZpJp4v333//BL3y2aZqUOk0xS20RdY7NVERhh0tTVe99pnIzTSl1XfI+eu/JMEXxvj45OTGDNkVmIKdZsXl23aBBAcgMAW2iJku7ZAKx20A3QXNZ/2M/OdoCgN3yflomFkD5XL6cjKqUBXcYDXeP86i23uMfNEAzUECwXkYBEYnAZrORZCLi2BCJiBidJGFfAziQpCbBhyQEyGZkCEmsCnXSLTsPBiklQoeIznHXCTJm8QVmqIioCAB4CMjjACznVc5lYq8cgug2vEwo4WPHoKohBCByzhEgMxNR27ZZcGf6Z+9MMctuVb28fH59vbx//xQRU0pVVaWUkCF/O6AAcwshBJHe/hYR5zhzEHkCxJS2zOw8ISKYLW9uPnz7Q29+c3mzXi43m5uL5dXF1cV6vaZBU2U15IgREQnYkZmopcqHzPu5L9XU00lPMBBjzLOezSpmFhGH5L1XTYC9QlJNAKSqRMAciEg0AoCIbNstGA1CBnxweSLvvffOer2GQWSlFOu6Xq1WXdeFEFSBmVNKh3gcCxRiepAtBnicXwZKP+B3cqaD0VYo5gm67xARuXJeYJSLn1Fhr1artm3rup7NZsPiIePUZbMgV67rOstJKExwIvLee78zEXJlABhH248qazVEA8j9GpCqWo9WVQBksmEllusA6Dj3o7xTdnE3j/QDxp7cSjBOZFQpM2/jt3IwAwbHARjk1swAQPrZ6eX11ezMhXmz3XbhtH7l5ZfjR+9XCCYCTE3TnD+4P6/q7XKzWq027ZZrbwZd1zVcgWGMkZjcuPwtx30ImpEOEGEknRH3E0lUmjYZi2BWVdXlzfW9e/eY2QYDPNsviJhXaoMC1swkYD24VHW9bruuu76+ZkZH6OvKzMyiYf/5CNuMcmYex57NBRHznsCUyIHRYnHy6U+/cr3cDBMxM/Heu+iyCBhnNMxryocT4jiqhkswTrj0d6t69xv8ePHd/2rFV4OwuIPuD6fwux1eCaIJfBBRTQABEAx2smz4FoioqioAWK+2GYNmZpZim5DVBYfKMabZLMRuk1Kaze5fXl7OT7iNMlucbtbqXAU2GP6MbdsyGpKicY9PZEQxIwEj05F1J5i9bXYlmR0aKLcZNz00jrF/SQlmRjRtsySYLkZJhuyy+dt1HTMbQkrJCL33SZ1tLdsXiDifz/PCCJhiF5vmJGnMBnTfCyIAMHOMMYTsXMFRQ5sZqKBDBSPElFLbtqxudX3zvW9/59HpI4/k2UkIzhMwKEivgFERiIicJyQjopRSSgkBFycz5xwzpZSWy6VzLlQuhEBERgAA2+0WALz352entQ9qiYgcc7dtzQBUQEVFjBIAIBGiIUDlgyNmJEesqoRmqmDQNI1DWi6v33nnHSJSiQCQUnKON5vNdrsdBOCoZnc+oVEYjoJuQrGqirxzhpXIAgDVZJZ17c6biIjIYLr7Kvc1trmD/KDYSioa36eUMqbyJ/nzrGVTStvtdrPZbDabqqq893kFzMwpdXkBX9f1fD4Xka7bdl3Xtm2MMctqZkbk0jgYSXR8uQNUroNgZjrIP96tYXoTYWykVDeHjHBM6exxEBQrCtjjsh0HmRlOpLftPicoRfp0AEeGRAXPA4CaiESARLRN8Waz9p4JjVsMjhe+Poe0hCSgVLkFzjDUm2ptjOubK1FtfO0UYkqCDEAE5Eq75nAQpSzAnlf79W7GfUppdNGUCngsbRu3221VVc651Wq13W5ns9lhR/lzRBQxRMpOHlPKHRHjem2bzWY2WzgH3XajqtnDs95uCp00kv6eK1jVui62bcfsAMExKSgCv/zyy+/86q+//PKrmZhym8zctm2hP2x4PgKW/qFUcreX0li5o84h5A8qFLyxY8+9AZRYK4h1WvMFmaGsf6h1Pm60oLpTxocUVXSBiOi9B4PLy8teE6DFGAM7QU1d8q7y7LebjXeokjabVQgBICG59Saa1qImEr1jJmBGZlTrHDqRpAhgBAAK2c2lar3n4xgoFF6s3I3QCRjvwO8I21vAaCJCxOhQAUMIZlZaikTEzIacFzF57di2raq+9dZb27Q+P7u/Wq3I4ei6JGYAcM7N5/PNpp3P513XmZlRT6ghhOCCorKyoaZos6ZidCj46PTBaT0jo1W8Wa1Wq9Wqi3HdtVx7QjIwREPspcQowbOg6LpuXs0IcLPZ1HXtvQ+VIwJBI6IknWPIDnDnHAMKGSLGGJ2jZlapzQaWtFH3AJiqqGbJwYhoJt7joqkR7fnz58vl0nnadFl2oXPuyZMnZpYNPkQUESIa3a4l8I9yUwajQD+GiaYclU3WO1mM4LDyg8ITW6J+pIpye2hCSPkht5OHXe4hppS899niWS6Xy+WSiEIIdV03TQMAzL63yQDMLDurswUWYxzleYy71X/ZL4Ie2JEEIBlkeUEeQjAc5kWY18EfJ/TQwHDYpTmUFappv35pGeiIrp7MCmQhIvQ/a7kX9oJlpFsAyE5wUBUAIesUbrrNXJsGg3UpuOrx2b3Hm6V27bUkY0MyV7m5X2jlaNFcPr8ANVDYpsSsjM4ROttn+ENSO4TUqG73AbS3YTbajKox+0My2pbLZV3Xw8p21x31i2EYpYNn6VQBgIiIkAiWy9VisTCT1c2NqjZVbWaIBqDZa02UeaGHO6EDVFVVgdhJ55Lj1DSLruuc8009m89PMiLbbp0dMgac+UhEnAsDdi2L6RHl48SxWLsfwrDQ4sd/mvw6KYefjygo6typJ6xnltJimHRRUtjkzYGZ+TFlokKK5yOLQigYqRdeeQ+IHCI8efKk65L3PsYNEQMAKgRfmQJzYEaRTQhVu41qTIAU6hg5NCcACpSINMZ1jG1weW9JACCvsCSbyJYFLdItPujSWDmKwaNmxN2QmYDoEL9mdrQtRDQDM3PO3Vyvtl2s6zrXVzBiQEEAEImA2csH83lzfvZoXp+IyIcffmgs7737wSuvvC6WYoze+7quiTnL64cPH8YoALDZbEqLPMa4artNt4mSkkYzO52dxjZuLm8oQVy1cdtera8363WULqEhIwAQg1nv4mZGMzEThyBdzFYCmqCBqm7Xm1kV8kso9u0Wi4VDcs4hjStCUUtni/nZYl57B70Y6geZFU/XdZr6LrL/7NH9B1Xtkezy6rlojG3XdR0RzGaz6+vr7XabPdUiUkrwUXEWIn4PFxPMlub+WEYNOtXfYzuU9Vn/YTaYSt0/ipry5fhwGMAx/lquMvNz9h2GEJqm8d43TdM087zkFemthxBCXj0PW78dFBy6m4IRoQJov6wcZYUqDIsfYh73j8V2m4O7HUkEU8uwGOBs2UOU+9tB4Bg77ANHDlGD/ZbwbvE9oguKTssPJ1bXDr+Z9wDQALMrwyyKdiygwNFu2paCrx1XQI7Dq9W83azW0kUFAAQyF6qT2s9OF7UP1x9dXG1W5J0hbbrtHBo36f5wyhOayHvAAP1WzRiwcBRWiIjsVcHM1ICZb9ars3jmnAMyZiYVIlIxVUXILmdEBCIkIkAZF5fzefP06dNnz56dn59670VSpte8h5E9cmbEzKomIpJ2M8rIMENVWC23dV1fX61+65vf/fZ336zremA2MVOR6JgLs/qIippITCK8ex/xNgV2FOCHRHCgAAygkOC3UKiZYWHu3aF9J+3fRgZ3fzUUGpxSZQu7l6PRltmml6Aw7EcOu1CzGT17dtF1XVVVkTFU89W6VSWPDObVGBCIbR3NV/OuTSnidrMFcu3VMjjq2vUs4IMHc41LAI2pJXREDEYKRoYGJnkDmizT3IH9tGdTH+rgo2xyiMqSz80M7EhrR02xAV9QvkEiRFwul0+fX+RVjoJleeecU9hsYxd8MIQoycxijHzK6LiaVR98+M7f/Bs//6f+qT9T1f76+rrruu3g5vHeLxan19fXeblshKCQ15FXV1e/8/VfUwBXB/a+ruvH919eYMXG3XIdqrlsu9S1hrbtutVmqYyOiIjMBJHZqXOuD64BaLtNVfm85MpaYb1ez2azEBwzDjuF0nXdvG6c54orAADQ3u8X42KxYEcB/AiTfqvKzDkadSH3CkAXJ3Pv/Wq1urq6MkmqiQjMYLvdXlxcOOfyTmcWaxPD6BBBEwI4tKjKT/IqPJfSqwy4Y8aseNDocEcP7iyjMoNh5TN+O8QEiA12bR5hXuPmxtfrddOszYzI5aio0W0wOA/Mex6De1Q1pdTvLVjGJ4EKINoQk4hZrZqK5oi2pGBagGIcuWE/ZkPIITYlU0z5qAfadJmxk8myW/j1sosMe6bWrIOh9DyNu8VYeg+nEnKCUzNDyHvBaGaK0Gnyvlp13dPLixBcCCEtW3T8IFTPyD+PqGZKaGgJkiKE4B+cn8/Jz66X17Hbdi0mirFzh6jND6Pvvnxv5WbJvqtk9MCMNW2IqkiqbRtDcM65GNNms7l3756pMLMTVlLVZD1W89o662AiIuuNOAWA5dKefPTs4cP7i8Xi5uY6SqorD+LGeIFxdQ4AsYtEhMhmkJJ03abrutVqs1rdXF3dPH361JAQ8eTkZLNZzWazbEOoqvdV8LzeJABQscE1jvuTKmmFMtfAx+kt2JfdH8tmd7Rzx7f9qAztlkCtkVennxw8H/75AoVKD/mh9s2/7s9CcswdDCJ1Pp9fXV21bXtycpaYPni+/JVf+sY2dpvtTahO0GpVFVtG3RhSVZ2hr9Vwm1QSe1WTzUsPT/7IP/ITc1+rrAGRGEzNVAFYLa9vjnv2RuMgl1E0TEAEx5wZR8tRUJfi5rBy8dN07eWcq+s6L2EREQb9gQ4BIKVU1UREMcau69wZ5QhHInrw4MEPffUreYlzeXnpvM9k0DSNjXvJO/Oot4RSSufn5+y9EgiCCaCicyzb5MGhaOy23WZ7095c3Vy3Fn1TUR9FnJe/fbglDPuUi8Uia0dVRdWU0mxez2bBOWcmnhgARr+0p4oARQTREFS6tpnVjpAc4+DLJSI0DwCttnXwAzwJwJJI08yI4IP33l9eX42utfV6fXFxEWO/Vi6pNFu0IxwO8VWW8adSZY4Po9YxEAOjHFxaLMtgzzPPoxPb9stt5FQOsixd1/Xh2UMIdy40xGPn2L2UUsZO/io7pcsBZCsKhgX62CAacD93MlCF3mnTr2vBACE3i4gZqlm7i4hqUtVscJQe+2Fevb7EApKICKhm063xF5FL2K9dd/iCfY4uXdKT1nZ/Imr25xQ/IWIUkQDIvOy26223mIkCLkIlYg9cdSUhQtcxKkLSZIqs5J07efDw/N6Dd5599N5HT8gxKriRCEZLCvblwoQOSkRmfO9HPE3nkJIg4qZtQ3DE3mLsui5jK/MPI8kAdURkQBlgxEjmQA1VEQDB4MmTp5+4edUTMrsYOykOtORIvTxCMAIQREbElHS93q7X667rRKSNSVVnixPvq/V6vV5vnSNVzSrfhv0h0C1oVmEEhdIqOa0Ey2iRlWiGA99vieZJg3cr74m4779Cm+zH9L9m+/Kgr7tbLud1N33f8WvmTAAYvDWHumo3/uLXHAzVv/S+evLkZr3edl0Hc67m82dX8Qtf/MKrn3z03e+89Wu/+q0vffErX/ihTzVz++7bb/7CL/z6K6+9jBS4jTfddQMBQJ8v26TcSouq7FiSgAHYsIkDCqZIAmajD3AYocA+lx7C55BHflc2yoRCjrZwVPKqSIzROTfu1QFAjiIezGVlRudINSH3qBSR7XbdxvbevXshhPV21cXIrmd8731WwKvV6vT0VGA4fmOoYoT84ME9dHy92ZKpgBKxI5+27er6Zq3WpTZBEhAj886TQ5JejiMi0bgQtBy1e7pY1D70ER5m6/XaOVdVFRGpincVIopIVVWI6EMwM02JiDNv1qHK7uW8OBt6IUTM25xmliOZVVXVP3hwjwkuLp/F1GaplePIttttCGG73eZApBhThuqEVUtxf4zO0cxGhtnDGqoo5q2zvEc0WZ+UaEVEQyu3J62IHL6NHnrDa5DeY8miOP+6F0w3iO4MumzlZDVsgw98XLWP7WTGHIUbEYEaQrZ78lK4dzIjIhECAhD11iFAJr8Rhv1qpRiz4Wif9wp73ITpwTusfengmOVxKTRdK/dKtpBFL6SDdyMc+spLTzLIK3siutms51XN4C9urhofqpMzjWlW+QdVc2nblSQ1a5kQwRmSmUOIErmqzk5OP3zytJWubk4cmiGAiqAZEanpMFsFsEwMAECEZiAiaoqI7DAfdhjPumW1WiI7P7N3XRcBgM/P23YDADc3q7OzNSJmVjEzZmaGdts55wCIARIYMzun2y7m4AgRXSz8t7/99pe+9MXZ/RNZx8yrvq5IRVtSlCw4EC2l2Elq15t2G7uuizEN+Cbvq36JPPDbiM5sSeTVQCZiot6MHck9P0PhBzCzDJMSzSNxjIxXVhgJ+pCAjgrl8mFPpR2SX25zdOUfHB7oqXIwXMp/Rz7cp9fjCzU4YICJZjpqzyEiImlhNu2RCnPXxSw4zCAjIskKbX7v5MErL73yyVcfgegv/vyv/+jv/5rS5vvv/8aP/ejvv7lp33zz6vzBq1XlTlGdzoi6q+fvXm9Tc6+O6y0SgvWbqBlkaGAgJmIghQVzBCbl1KxYHU7Qp6pIe5GuRxFalhI+UBAGjZ9OB7C3CBhdhWbmmGPbhhAQMau07XadN2VExDk6OZtXXWD2beyyYx8RY4w5HLJt281m03Xdzc1Ns5gT8ma7nd2b55W0bluqPAAsl+t5s0ADjQkNmKCqKlZcrbZAaAjOuS7F4GoAIHKZm1Q1qagqO0wp5R7NjIiW642IuirM500GRTLNGrRpGkk6W3giYnbtNubWvPfsMpBGP7PliWeF6gbDIoTwO7/zOz/5kz+pqpeXlwDQdhs13Ww2FxcXGfje+3EfbXSAqSpl3BogYHZZEhJosR88SGUc4pAniDYFZR1nOrpw83qjR2/2HQ5LPsVeyORfR71VMtG4nD1kt7EmFQeTsoAd/8zPOc4OEdu2xRzzCJBlaekrLgXXOE1VBRiOE4jm4KsegEnEIFRO8oagI1BznjzvjiPbICdHrX9ozmYdb2YGMp4SxH038kj5o7qeiE3cX96UECj9W2WxwrXZBwFkh3OeL0A+WwEABqZmCQDQWkmg2AGst5s1+wDEqI9PT9ak737wzJ0GY4pq2iZWcFXVgoApml1fXswWJ9ebm+E40Dj6PXtkqiRwZ2sPdh8Z2J4YLfckEFElAwk3bUsAjsO6XWcvNIKNMQIiggRJIsCgAIaoLOpLVde4XF5+9NGTBw/vee9VRTVtNi0zV1WVophZjHGzaa+urm5uViJiigDj6SY2056DFMEIgQHEDG2IEB615kCXU7PoUCmW+DuKWhj4cALqEuVlF+OvJXUeUYR4RLKP2vdoX4e9HNZ8wXJ0vocvD5U0wB6rmPX+ZzNVAQDI8UEAkGPRMa1rODtr6p/5v/yFP/8//RfuLcLq8oknfP+DD/6f/9f/8xc+/7/81Gsv/ebXv3/v5NGs4eDdpq28c6KQxNabyIaqhsNeOWqmxkMz+cVnvbeUOYr0El82SpCPb7k/JnUrbAu4ZZ0BCI44n5UPngkgdREIY4wfffTR/XsvbTabk7NPfvKTr333u982hEePHsV4+tprr43yPaXknHMuqCo6NrPNeuu9z8kZ5vM5ejOmCnC2ODHJK0u5urqaz+dE0MW4bNc37TLzRh0aRkZCJjBEtlFWYM6P4fvFbn9mRkQYKYQ6T4cZVXvDq0spa9YYYwgBBte0Yz/K01Eb5TKbzfKh3qqqvv/97//kT/7kl7/85b/za1/P4sXM2ra9urqKMWY3/oAX2kmzAbwvSA9wIPrHB0mmlEYxmEUYIgbnsSz5QB4CoJbSoBQXpRDA/WQvk2HfMfKy5fKrMgVHXurkdg5X2IjAzGrSi1BCNCQkMwU1ZtY+INSC92TIDm0I0h6GR6Nn7i6ZgwpDtNRRLjsK8/Gl2d63t8Fh5LhxEVzw4FS4jkod8nYFgpgRMjkGQ0XXqW3atmI/r2rq0im71xb3vq/rTdsBcSCqnEspGaCrwtPvfT9u2lQ3QuBKbi9HMJn2qI9HYtXhjHaBzVtgBJxUlsvlvGnq4G1rowNtNPpU1RGnlHL0S84tRZy3LnqfErM/OQnPnj27uV55NiKK0VwI3nsw7Nrl8+eXOcCk61J2RCPCGOljQ3Yb0711ZKnqAMBMmSlUrmsTohuttqOqd3imSSqWo3AoX040621lHOdUBx/s8N7m49pv6mMUJNxC8XcPL5fbPrlTVWeOVQB2znWph2HeQ1qtVszcSXLED+6dB0cXF09effXVJBEkLU5mGNzy5no+q2eh8o5St1HcIM3W65vLy8sYI9NMokaTxjsTNRDEfMw2u5GyvaKTUX0sUg4nNTEyjuArV7s1DcBey0Wzez9laZi37pqmyf6qtm1zngBm3m7XSdl5r5ZibK+uLtv26ee/8BlmvlktY0yqimQppTEgNgvcHPAvYM+fXWxgmw0gRLx//77XGThed5GcJ0FntF6vzSSpIWFC3VoXQbliMWEAYk9oyOTIDASp32Xcxs7MmqZB1y83x/1pX1fkuGtbTyQisZMqNJq2VVU751PsyLsUdb1eT9ykWf+W8koVmqZ5//13v/KVr/zhP/yHr6+vnz9/7oi7bUtEXZueP39e1/XgrivDXBRA4UgI4ceUYW13zK1lBEN0iA6R1dGAhqjvXf3s98CdU2S3HQsyrrgBFam3wzzlrXQbz/hlcgOwvHF7OJ5DGV6Of2T8bCtkUVmug6lPkTT8iQQEaEpIeaQMRqaEGILDTskAVJDGDWnVPjx5T2Hks0cFwPf5sXjY++ro7jgMboO9H/dOH5WCd09nl/7F4vPhlASW40SAqKJgm9Y6M3LVmrqa3AkgErHCeWg+cfbw+fX7a4hC7Im6dYs+oPMpxXffffvRo4fX222Y1Q4OZC4iosFhCA8MhpIVpX9Pu5QapXvBzIB6az3GlCpFJu/9drtt29YxjU7IEfGSbBQ0AMAppdTnLNxutyHUH330dLlcPnp8r9vmrJMhxvTRR0/ffffd6+slIlZVZWZ5dZu1b6ZGzAeTQA1kl1DQ+i3t8jwoIoYQujaZCQ4T2UEpb8kPKQMzAA5hdXcZW5u4Io/WKf/MxHBb5YPB7ALEytlNPrxNl8BthL5nr/TAnNS5pQudrPDGh5T6rAVEUNcBAC4uLtq2bQ3dzG9iWpzfW3cSkX/4R37kvadPX3n94T/wB34cw0wiLbfta3VlapV3to31ojo/qeJmTTgzYASVZJjjn01y0tN+YIowXZF/zNbvhG/vAObABVOIHULp8Kuj9DDyBTPnc8AAkHeOQM2SxBiBCHOiCdY+dyxADrqpqgodry5XH330UV3Nzs7Ozs7O5vM5kQshKMLzJ09yJofVanV2diYiKanDPpxeRBi9im7WN11qQ90I6Hq7TaQcmByTUCA25HzIlQiVHDMTubzqErOqagYLGLddZGZH2YDOme8ogzY0ddpaUgGAnOkCABC5qup8PiKDohREiJh911dXV5/73A/+9E//tKq8/fbb19fX0B/ZoKx9MxRHaFux4WrDJsXdpZTXZqOfbA9pOKjWfUHRr+/LPaDepziQZc7RcdhjSRg4nicu3hRElXUYly8nn49wGxfEo4nQr1KGsy17egERjbKPngwQNKchATCJKW+fOOcq522zVlVFzdHU2dyx4UiSqmbNXqYoGWdaenrgTudcqcrNbMjNhVOhfXsLJcv3b2zP8sXhnETWwaZKBooQnNO8FFZIAKt2q9tORFz9cKYKwg1i3WrNsJGUAKqmXm+7um6ePn1uZjmLM6ruKeBSsuj+jveIvIkHY0IHJWjySzVERzgk7sgcfnOzOj8/n89OsqcrR0CUvmsARTAi4N7jDabg2DPT86fXm81WxACgqqqLi6vvfve77733Xs7toqpg5Difbx79/gAACLwLi0PNBIqIBiBiRLv5mhkRIJloBOw320a67EW2YfYaTeAzioOj5HL4ZvyqbORonR1FHCsFJU4XZEU7Rz+9ayH74gPb/Wm4o3m0g8r9sX0AQBxNUXHOg0JKKSVJKYUAV1dXqpp4HpW5ab76B37/vZdfWyv+gT/4k7/5m99zp/yP/GP/+Gpz8/Vvfmt2erI4P5GlkoeX56ypvZ6H64un9Oo9BPPEKpHADERg4p76GH8A7OPoBbXvPtDUeoxk1t4T0uW/1vNgyYlTlBX0SVniqCqqeeeSKAGq9LuYOeipaRoRSSqnp6di2G67s7OzbInO5/Ocf4qZU0r1fPbo4ePvfOc7mSlijHkflzyJal3Ptl0rInW9SN4DmqKtus1HN88jClXeTDzmgEpHDISATKyAyAoAxEQOgHKPAJDjgEIIla8dh8y2+XgoswN0NSCTFwVVUIUxczJ7R8hUHMIZOY6dv7y8fP2NT/7UT/2Uc+6dd97JWXq6rqvr2Ztvvrlerdi5lLSqXLnyKyOe9g+nHKeB29BtRgAIk8QA1q/MeslW5NvPLfT6ifaD8obYwGPDgXHiA41NVWyuAog4qGEsChT6CRF3ef6HlkfHZK6/W46bEXBWwAzGgEaIYIBEDAhECN4xGogIgnKRSX7sbrSddjxyi694wnF3F8QjknGQSAXcwI5ss97eRU6bZ9ivo/NoyUCSJDAFQ0TxIECdQQvapVj7ugJ36twrszORzUfaqndRzFehbdvVahVVTFJKXWWNywH4tm/pHLFKdr/mN/37cRtmPF0ABUUSkaplN5GYdl3nGavgus12s9mcnZ5k5s84yUTA7AF0yA8NiP3uqST13jNxXbubm5ubm3lT8eXl5S/+0q/XdX16ck5EKgBAOeR92HPBnDM2o8HMALU0u4gop8BlRrP+MICqIBIzxrg7Rd4/aJ+t5ZAhJ+VQPx19f/h5ydXHlNxxQhlsveNXIAzNHmnztnIb3Y9cUUBgX0JByTlH9fd4VGnIoZMDRpiIyDl0zs3ns2fPni2Xy+r+Irar07PqP/qZf+/t974Yk4o21zebX/67/+Ubn3p5s9ls1upD8yu/8jcRsZ7Nnd54hG69bG8uSUUNwAxEjWDwByJkIyEnizjYAL4bp7CfBLSUEYdCcALDjwX7HeaOiBDBaPPlGIW6aTar5fPnzwU1JnPOxYT5coLFYuGcOzk5yTUfPXqUFLzzT568LyJNPX/27NlsNnv55ZdtyOveu7VVQwjMHhH7nJGSTBWMGLHdbq6uLg0AHNwsV5eba62IHWkCj+SBzAcGY4dEAApGKNIfJNWcedh63RljXMxPK3a9ywoRgFJScgzAlWvm83keWx2alLTrkvfk2A8r4OxotVGJinQPHz788R/7CSK6urparVYfffTUkqWkz58/f/LkSVVValjXHgcHOOwvlV4QTbfhbrTqxn2rocVdHQeYw3lKgWBmKnvyIafYMjPivUGOBDaO/3DA+1S3E3Rl5VEpwr5uHoE5tjl+OESomRJkQxEwZ8tHylIa0UBCCGqJJBGgp/7w+qHVMrzpp1uOeaKbByV6XEnj/lf9v3rrEqgPxdxfpUwMqcNeco6QvLDI6zZHCJpaSaq67lokD0CrFC+vl4yOkSoXHlWnF6uOokJF6Mihw06++Y3fruaVmCLiV7/05f4mDUa0iXVPU5DtTcN2yc8G0Q8TIE5Ms+xKrkMgdFVV3dzcnJ+e5V7Mdhc8FJC1IfxKAMjXIcdN1PWsbSMYSbLvfe8tImZ2VJxaVtUhO1uOvRqVUz8wIgNA09EcQ1Xps4Wo5cw4ORn9mBd+QhylsTJM9lb8lR8eAgfuFLslEdzd/rHuxlXUXYvXSe9He3lxI+OFhdf0fLBznM/AqPaZ4q+uLlJKs3YTqurhif2zf+q/V83r5aar63MACBW27cb7ut3K4vQEIHWdsq8tXc9mM4updowaISX2PBhfuTs1RNCszWB6EP7j5O/E/oB9B9Jt5tQdbR42CAUkJx3hsF+Tt0Jns9mbb765XF0DgSDHtvX1XBFSyokFYwghk/Gmax0HcpyzT5jZZrMZjOZUzZpnz5699+775+fnq9Vqs9mcnJzl/MAUPANvUwIDIt6s1l3XzU5mrnKtdp1FMJ8kBSRU8C4IM8Nwls8AgVRVkuYbMHIAdt4FyMFQDrhqahlyM6aUENj7SqXzvjLD8eqCEMJsVpfaYiQeGy4b+PEf/1Hv/bOnz5PE9957b7lcAlDctu+8805d15LMBTfKmd+teXQUd7f8OdG//ZEexD5pz5j4D4a1rBZpdKFXe2KKWsjkUm1PtFo5hr1RoWSf9lh54liGwg1eNlU+lCHZ1isxBQMANUIk0rwnzSqS6qqCXv+hmtEx4QO3CLQ8+PKD3Rz3/QovIg/xwDLeNbgPJTuovzeYYUAKMG5dSBQDdc6JqYJ1JqoCLazAZlU985Vv+TRUJxAqoBVCMmWw5dV1U7vmZNGKXrx38alPvuHgFsordcwOGUOyMETMWeNHzMVODkevCjnqxVchdtvcTD7/E2MXY6yb4cIT5zAfAxg0hyOSPotKr5WZvXPUqeTQza5bi0hdz4gmygyzY23wMmXJxcNVBIM/EBVgyPYyhGVZsXQbT6nfCp//hrpnr5RhnP9tykBYuzcDOo4YQ5M6R998rOYoShHmMCQAGcaDhRdaRiv7sNMcTwPQX+IWQlitUtd1Z6Fer5+fhub0jYfPLy/nJ43zLJaQ4qn3KmDzoHKNpLV3kkwDWdw6wCYE7SKqaRpOIvbOHoDhkjEwArx1JXFbOaosp5ryFoD/Nyi5BSIiJBhkdLYvEfH89PTBgwfk6Onltarm/BJEtNmszCyfOanr+sOnH15evH9+73HtcbFYIOInzj4x5PSoZDhhnN2PdV2v1+t8cVlKiRwjqpm2bVsxP3r06EL1nffe/ujJk8126zw5QnasKaEaGBoTEzvHVuRPUATvfeX6/cukkp3kOJxFtv7ETkJE55xSXn+zcy77KnI++XEDGBHHBLEAIBJfe+21+/cfrtdL7/1b3//ezc3NbDa7eH79rW99y3u/3XSLxWLTbvNx4QnfFfja/XkUdwdqe0yBsBeGqYV9P9DDuD7OfewaLNWODasIRNxJ/IMBTCgQCpU86uD+TVb5Y0ahYlTjmzENy/hhGR47wqqM9FTLM0EwI8QcPatJfOMRkYnytYY25LaEQdMTUS+TR8gUi9cJUnYL3P33h4bCHkyKw77Tj/uZ4JigDfpV2XQ3elKy71rVcmJv6K+FRVOLKsGz8559yIfuVDW2XfBN46uG6hsTJVTVi2fPvQudpG1Mjx+fzuvGKY2iiYZgPCTi0uDaxcSraSFomBmBh7RnNi4is5LDfPDLkBBTF5k8mK02W/an3rm2Ta1Axd55U2g1JtU0xiLkiRkAMzd1SCmJdPm4cEq63W6///13r6+vL69iVVVmgLgLIiihqTpkIwO13ozivPwyRQPF4USHCiB4IhIBIq+KGSAqgLRLUkNsRJjdmjBOEwHRZezRflQtwAiKPeukqIOIexw1uvRHJtkL1BpSJ4781reTgyGpJEczM8r+9tyjIo0DyPtPRbqM/EYHAdS/298yMdqxzWCy5AmOB1t78pBxzLtNTQTAbddrCFIUk2wtIZIqgCIjq0FwFaNDgOvr6/a1R62tzQSjnp6eqoBqvLy5RkRy3HWd976LknM2+ao7PWky9rvt2lWVQBQAJBxDXE0NwAgN++iYPY/xyLSjn3OCTeohnP1YAxPuxOmeK2wPQ4WhXaJ7ovVz3h0ENsvGIgMaASKgGLAREZr2LNluutNQzVnBmQdCC0Shs613LioK6cX1h8mWqmeQ6Fu/9Z17955z7Srvuu32q1/9oYvnH7399tv379//0g995fR0kVICpKTivXeORLskVs0aSpXvfAVXVH20Wn+gjr/15O11wqvNsxC6GkS6YEbKcI1L1plzXoGNfHCMwJCiM4FETX2CiNfR4aWmp8+vr9Nift/a6Cgw+U5iAmxFqaqU2LFjF6KYAiu5KBTmZ0yEzGb9FWjeEaK13YZMX3nl1YeP7l9dP0+qz549u7y+Ruab6+v3PvioS+rZVU0dJbF3bcxpv5TJq6qqqKHjCvMuGCaEQQ1Dzuo1JDvs8UkDbec0F5NzpZm3+uO/NuzyjvLQmEu6GJmv6HQMOjEzFe0Zdhev2vdBo5jNRnYftoN78sfMsngR7fl5Qn6gRyIoR8kzUfZm5jXGXgiREgsCADgBVnHEV5tN9fhR5Xy32TpHkUg15YxaiEjDNZSIyOx2XRRXOkLRb/5v/zxo0pH98kDVdhkaUBH6o60AoKPgykeKc5YuHDrJP/V3GZY3CWG/2s7olt5gt2E1BjpsChAQCDB4BZWoqcbk3WobP7y+qkJ4cDJ30L3EuFyDbe26DteA335ygVx5RWhXb3zhB8KZuQHcezIIRsfIXmDU3ipztCtGnTfu3fYHl4eYOhxjp1UBKMaoSt77y+urugl+iD7IiMmKpFRdmYKzG8Q5F0Louu7p06fjAUHcWZcvVHYqp6C5vE3lnEPCkWeq0HTbiIVUncDk0Gg6apGVkn18Mwr6yTlUHAzP2yyyo532PCMAOOWZfX1vk68mL/PNB4efFBp3L4b5aPtl72VrI2CzfCkXAQhEhCJqhGNCgJzyafyTmbMPdrFYAAA5Pjk5SSktmLsuLZfL+XyePaiOeLFYtG3rBl/IHdM/BH6B2cOA7ReltMPGyzKB6u7hzvqjQB9hx94pZj8HbjabRTWr61osqapzoamrtm1F5OTk5Kd+6qdijM+vnnddV9+/r6p5k9i5QEQ/8aM/9ld/7q9fXl1XVZU5sWs7QCOqurZtqrrt0mp5dXHxjGkGRO+/976qOnQ5pDl2gog+oMTWPAP0m5QuEPZit8vc2q7bd6+u8/n+qqq7KDAYnYOJQ+xcN5zTpSGlYvaLiKJYRMQ+XTxo0zSn89njx4+TxK7rxOyjjz7K0Z3vf/jBk2dPfRUyNznnJKbF6UkOjQ4BKRuomkSimTGzGIQQ8kUFoxIdV35jXrydvJ74Ng9oHvbZv0zjM2lhbOcolU7I6Sjz2uA+LIdU9ntYmDzsCfYj4nHSr1l/+etYb1SToMpIo4df1VyZ2Kcwba0o5QqYDsCCt0djlVi4m90m5RDI4/vDmmWdyaisWAhlNxI417axbVtZNDF1wbmXz+/frG8uJL7z5ElM3byZicRO0qc+9amTZu5G0XQ4GihQeBu5lDWzmlRN4w0NOfp8lGIGYGYiggiV5+VymR7cc3UwM/aOUxaUNhnASP35spDsKMtR01VVldu05SA/Hh+o+QbLUZX2oVvqgCylBKMlqDQuhA55Y0ocxblh21spHocw9MT8MY7o0RaZTK180/fV21L9G7Xd8syKCNve6ZSrFnlZR0adoL5PhQM2upTLIcHgJjl6qqrkE3ZccvhYM3/OTDkdEjM2tVsul8OGRRQRlhQ7UVVFCCGkLpLjlFIg2qyXb37nu4jYxfVms3n88NFXvvIV7z0CxBgd8e/CQNsbs5THOW6ps6dHp3WGmrRPD4fScyijiwJKH2QJKCLK6xkRaRZzZIqivqpcFYwwJRFT7/3Nzc3pfCZiMYqqzucNeXeiJ6vValbVNzerT7z62quvfsLMTHE2W3zitdcvLr+RL6ECgHyPrJEQUdQ2pY4cKUhq265LT549nc0rRIyChM6AzUAF1TonnofgTgBDE5HI7GezWUrJETnnRKJjTClh3WUDazz64pxj5/oTaRkUogKQE1RVVYXgEVFVwMTMHHNo6sub6xC89/69995r2zYleXZx8fbb75hyXdfZr25mIbj18uZkPhMRANu22+Vync24DN5ZPV+tbnISR0Rk50UE8kICBn9uj469jFRjGYc9ckepgEtsTuRqyU2HCn6PRO7YQP04sTdp+WgQ4sc0sauW/2+q/YIhJfPEIoL97PbmOE6ZittuYF+I3VEOdVD5VVbktw3+tveHNxTuhoEAOl01TcZcfqKqbdu2M3aO265bpxR8CM6fJjuh4MjWcSNmEIXZzOGDx48Y0JUTyGUCmpEsxoj/yTTHvxHRTLIas2HrPrt8s7uPiJB3YijfV+p978dzznVdVwqmkkydc4iSPxyze9+BtjsIdzcHtCzgiKjrurx/1u85keVOVTeIaEpQeHfNdhedTruA3Zgnv45/jvMqEEmTOwAO96gm9Fc2cnQ8++1PwTJaqSWKJxpl/DefK9CewHfXj+TSnw9htt662uXnO0REl/oAOcoOtIH8cxSo91Uy7drkvX/w4MHy+iaHzmqR7qAPF8ioV6t8iCkBwKd/4I0stNEgC1AeRnW40T4xm47g8dgeGxwTuIfNHv+paIoOJM4OSlM3Sc6SvcuF2yNLFQBijK++/gkf9KOLZ7DumL33Pllyzm03cnN5vT49cc6Rd+T1ankzm81OT89PTs4CU4ptzgZM5LabzV/+y3+5jSmEOmtrM6yquuu6lDpy3G1adAoJVOHZ88vvvPl9RGQXzJIBEPkczaYqxCASY1TnAImYsWnq89NFG0VVHYV8gnmzWrZti0ghBGayvMo0MkPvKnKu9p6ZQVStlyd9ZmOzUAVmbtutab4FOa+wU0oRAJbL5fX1zdXN9d/5lV/13oPzffJ5s5S64Jv1arlaLu/fP7+5uZnP55/9PV/5zne+4z3HKPnqpLqe5b1wZhdjQkSRMd39zlbeccc+O5f0M0rO8WW51i+/LZuaUNRRMjv86ahKhluod3woA573Ce84GetgBxqWd4aBmWlMqFD7YJ0QoiI4R6MpMxnennQqrqKCCUseM0TKKZTuSdxvHw8+NNslzy8VzQRKuzcHqXMOJzJWzuV6u3YULjcbXN/cq84acSfKD1wTUvvmm2/ev//Qd3q1uvriV7+wOJ2n2O3dgGFFutExJ/AomsctDYBSjU0cdFO3ZF+pPwECqhZjZCZAJKLlalVVnpljbK0/VkXlhzCQqQ6ZWcrBHIXFBF4Ax0F2KH/zgSgtAvdHX7qZgaKYGefLy/Qo4QIAFsEXEwwd1cdGaZ+ddEIZk/qwTytj5TJbdVmNbPohFJx/qIAtX6d+4Iwq6QQActSqITLQqIBhWEAfXs6xG1iWtuP2FQ20p/0Ik0FKyswnJ/P+grwB3dJFMSV0OZeTmkWV7DD07F59+ZUQwmp1U+6ebDYbM8sJaSegG58PeXsPQZajUUb9N61WgvRu3Tx+qEPsyWF3ZX0s9LSZGQiCG+UmAKSUyLuzh4uO7Hp9sd5uwuwEkFJK3oemaXISjHyI1jknqkzkkNq2rUJQVQL03pvicrm6WW1eeumlq8ubfBwsxsjMqmAgQAYGHz199uFHz957++mzp8/Pzl9yzhkpYb7xDfKoFDoUZGcGAqBMwIzMPCO/WW7ynjoR1fUsm4A80B4Nxy6qqkIO5CgzGgM6T6paVYGZc+Ck995MVYwdIlqMMXukr6+vN217vbz51b/7a+PgQ6iyk7mu/JMnH33h85/92te+9jd+7q8Fx1/96lfPTs+/+c2/X1XNYt60XVosFuv1OoSgqobALmT9EWNEMgNA2+0ZYeE5LBn8UCVP9GspHkclfUg/I1fuqasDorrjzUTEvWA5ZIq9dmh3jSANmeSMUEQYoPaB8k2XBg4Jh6MsAJDPOpeTLXnwkP7v+HNXuZBUhRG7wwzuryUmO0iZDe/sd1wX9UddBsNABsTuQswQSQ03KaEKromIZjWeVYsFufb5NXaGkjO/us9/9nOni5P24sq1bVt2POJ79AjtqzrUHMx0i5MkLzgGjsrb+5iVygB6i5K8OOc9M67X63vnp6Gu2naThbgiqO5mfQiO0sNzWKd8eYtEy00PYfdDLFy+qS2pxE0anOfAzFQoJwBQzddj3b5y0h23lBVKBisNC6Pdm6HscmvfXUoitj79BRYvrfh3WsYZTQCV94BJpygeDbJy5IiA2reQUZ+rjSGXh3CYGgGD3mfvxsFUtZdk2SzbbDb5+tg8KUYih6q9uxvVUtvlXfvl1TUzr7art956K6X06NGj1197DQDcsV3tCZ2Uem4y4BIpk5x2vSlyi949pMDJ9K2E52G1/XdqCaAPDylft13nqvDyK6/4cP/k5CTftB2TSpL1cuU/4Z1zGpOIOF+ZmSVB7xFgu91eX17MZrP53EJddQkANMYoKS3Ozohovd6261Uzh+ChCgGNHdfLm+3TJ5f37993viI28gERLWdENEG0qBYGH6NIJJo7ZlVhxhhbAFqtblIXZ7NZ3pv3zjNz5koRAcAQakNSGGW0AgAaOOcco3cNogXnCbVtEzMyo5nmfCNdih988ME3fuM3va/yKtaH2szYuSw2QOF/8i/9jx8/vH9z8XSxWLCv6rr+gz/x43/9r//XdV2fnsy3263jQWugiykCQDLtUy8CACnuDqeo2XHiKfm3fJ7EG5d6Y3weWXgkzvFhJJ5Dehtp5zaSG8uLKONDhdcPuKyDAJa3GJWJNcmicjU50w0O03HDCjgT/KT9w39vW3GWkIEDD9ZEBRyVeLfOFBFv0SYAvalRNl5Wm8jzfn3oXQuwza6+JBG7pgn32L96MlfE5fpmcTJ/7ZWXtusVOHDjHirulwJS/f9uHeLBIMqNwAyNok0b4xqEKXWxS3FGDXmHWizjdgtR2JubGeIegQ6YOAKdPSjftghGzWeQENGwDwHg4abM7OgeW+jbNwKQwbyYei2Qj6xpEJFsF22Ixa9U1CmGemsEVtndAIfpWnY3OzM7ym+IiHh4RA8HA2Wc2lGCK+YLcIsX7u5/dw/9P0OCfrAhQ68y82q1atv2/Pwciqg0iclUVIzJ5WsDRqrLKY4/+clPEtGsrkfX36G74jb5crdsQtyTAtO5HJSjOji/V92LFYCpEaBjRt/xV+oztu6NX8BEBJjq+ayqKmZGUuJKGNerzfLmalY3iEYEXbth70xwvV5KSsHxdrtdrVZPnz51oZrNT4IPZhaCG5Jm8PPVattC5VNdhSp0AO69d58sV9398+CcQ69VTSpgygAQo4bAHmdVVTkmU+y6tNls1ut1XmEjMpGtVzeXl5fOufl8Pp8388pxzzIEgMw8n8/FIGlCBtEoEhHBh3CyWCAaYYVos3mz3dp6vXTe+yrE2BHCs2fP3nnn3d/+7d9erVbMPoQakVNKVe1BzTv46IMn//P/2b/yhc//wLtvv3P/3um98/sigux+4sd+dD6f/5X//L9IAghw/9GDfAZ6UTVt2/q6sbZldmoybkNl7TuifkJOJS0dStQR6RPJProYx/qlv/ooRR3TDVMyPuz6kBoPSfTW9m33vtR/igJgp7M5m5ooMiGapt3tDgpWbs+NczGzvXTQtw/mEIxj78NIdhV039s8UdhHG79jvnd/UjKydN2W2TEEUUZHRNF07sIbJ/c+M7/3reVzI3v11Ze92axulqtrN/EWjpNMKcGeq3kXB7/r3giGYyflYa9RAZc6UlWxSPc4LqxzlGZOf5E2icjZ4OSc2EP5vGBKKaVdR3ef3xp188F7G52KuUjvSchaqY/czsLdeiWBuDNsyUCOIhX37bKJjD7Kh8eoqt81L789+jwq4CNsZgQIBjvXdGlFjqx+KAgAcp7NPR4bU3znMmZUpuKUBRxbuE8gMOKrJwzAfLaciATME+WADiQLITx9+nS1Wo2kwsP9pvkWATMDsOAZh6tPzYSZz87OYLhZ0rMbDb7buHdv4rd4uj5W105+OorWiSSdsO6oa8sW+jrYe3p7OOMOZcRIzOvtarPxzrl5M2tllZI6HzTQW29+jwnAjBEVCVREzDunEtUSaLp49vTDp0/e+t7bP/wP/v4vfPHLzlF27zNj3KZPfepTTHR9fS2pNYGrZ9fvvvPk1VfeqKs5gkOvzmFUcYGZKgCrG5+kcey9d0SwXK/ef//DZ88uiIjJf+pTn26a+YMH905O5tfX1ym1MbIBGgiZkDlkCr72vlKgNm6jtG1nqhqcn81m2tQMJgLk3GzexNh2XccOnfB6vW7b9p133vnt3/6dm5sb771zIR9qYIdmFrttUy/A4GTeMGEV+OxkETz5WSNqi9Ozn/7D//APfenLzy6ev//eBz/7sz97fn5fVW9urmbzk67det9fyjJw2s5veYft1TNIQXhWWKsTqhiZcWy2FCOHlScqsGyv5O7x7dGDAAC7Xc47pGhZBMyUoGf/3WnaqBIMah8oqYoSEZCpabkBXM500uyhB2giWo9ykxX7viXvlI3s9YJ7iBvbVwB3zKzJ9Y9CuxQIJX49h9V20zSVqq6ub9b3qnkTJLYvh8Urrnlf4Tq2n/nMp5uqXq9uANFNsD5IZBiuMdhpUBjShI57sTpkmMsybrILiIiikdDlmsSQLdy6rmZ1YyDOOTJYLpf3z88dh5TWHLylnmrNdhd9WHGursTohExLiVaA8gi+i8QRNHrUmf3gKKbxZq5ds7TzGQLskUzZaTmGQ4k8CRUeXbWjfTMoEhsnOBn33W+OUS2WGncc2EQlH7ZGB77xfaj2upYAy5ke8thkwOUhrnEMipAFnJkR9ZlE224TKpdD7UbgjL2UXhYzG3MFm+V7HSylxEjjGmKiaMehjtfKjuMp8XIoOvcZb4zPl8kn48Bu0+hH3yOiakLniy72UCYaY8S6rtfbdkS3pM4754nvnZ1vW5nXjaoHCwyRqDk9Pe267vn1M2S3Xq83q3Xbtpv18urqan1zfX5+fv/+/foLn3v+/GmMLQCIxHGXh5k/+donVqubpx+9f3X1/m/8xt8/O324mJ9V4XTdbnNiEF85FYfgzs7OiRD4gUhkRwA6axZMftuuzayuZjF2MfrFYoG4aGZhu11fX1/HiAAz733btrPF/ORkYVYZ4bbzzy9bREQytcREJ01dVZWIpZQePLi3Xi9Xq9Xp2eLi4mKz2Xz44Qe/+Zu/uVyuYNB53jMREvkQHGpczBoC+Obf/8ZPfu1HGe2N119DRERG9s4HF6qH98/F0Azb7eav/rW/+YnXXt50sYsCJp4qEWGHXdd5cl3sqhA0e+xu2SraCfdCahUCdupKseJehJFNxugz2Jdvt8kHMyNk00EUJ4HdftBhSg8AOH5sAY5Jkvync0HNpN/5zfdXqwKGUK2fXZ+9vKjZr0Tz4skzxxi99zkhDOxLhrJxHHVNsXiDQgEdjrwHxbHxIyIC2v6945gXgXbQQgHw3ee36ePhIV/wXL7J44wx1k0wkXUnN0IX1VIcnbBbAH/5ldff315bCPcfnHexRQREdCVcEBH6wAKbSK4REKMKMd2L1DWzMbVN74VGRcQkXQ9Ngaqq5vO5cywigJrHLKKr1Wo+n/e5eQugmOmBbZTLXj7nstiRVemeIYmFthvRk+9dKUDcX9ozvimF74BCHhPzHv476XFs4ag+m4x/eJyeDz4sR1vY9YIKiIdXY96m2l+8i72+dNraoVC4u5EcV1nAZ3dQ0jm3XC7zUeAx8A0RD6dQGEZ7+MIxdhpp7GsyzsN2Jjw/qXCE2wtJesi9Y5tla7f1TkTYb1eO7gQdj0LVdY3AQ4ACiuRUcagiRHR6ehpaQRKRlqmZzajr2vfefqdZzC8uLp4/v3z6/Nnl84vLi2fvvfde6hIAfOlLn/uBH/iBR3UlSiEEk+ScS6mrQ1BL333z27JdLxazs7P617/+3baNr776CbRaDOtqASTkmNkxNbWvnXOqaZsqR957JlQD8T4458Ds7PQUzFJsTetmVp+ePFJL772H3hQkieRbEV3la8TGiKsmXN1cACgjee9DcEREDIgUQp1ThWy32/fff3+5vlkul9/85jevrq6bptlutwAAJsxMSKrdO29/yAgXTy9/5B/80h/+qX9YU0dEs6ZCRGIfQs2+QiZTEIOmaf7YH/npn/trf/PJRx+K2b3ze/PZ6Xa7VRATZIQY27qquq7znu3YquuQwm1/x+E2qru7jILxUM4c1ilvUyh7v63cNuyx2ZGGy9tnaWA2wJw/BjxCXG00pURKQIwunxrtL7Ya+FFEypgSAFDoM4/g0PUdorJkqANpvwcNLNxyfSN6vPLkPNLHCsnbhsTOI4BFQXbqaK3Rp67qukWoH81PHs/m1YP798/PkUklSkw5tdsuEWh51ggOEDNC0Mzy1mkJqVKXq6X+kABSpoaqqZum8cxgoGbOsZkRg5qtVuvFYpHPJ4hGLIzEEo7lMLJuxt3Cbrpcux2C/QlFM1Ppta+IEPVOCMSdhD2uPHbXYjLuZ70oP5x+REc8Ckepp5g4f6wOBoDD+KCPLXcqxV7xK5YHr/Y/p35lfdjgHdLhaBnvA9wHnSG6HOyajwKPzhUcor1Gis1f7bTyLnOr2TCBMRSc8m0aBb6I9nz4owYtt1om7uuRILXonfaSrxWwKkrZ/iSZ/gjPThIwmaEBUH+ZrjrmfNW8iKSo+U6hfCFYt25FMiVD08wNOqPk0RPWhN5uLv/e13/j7Ozk4uJqudqsVivTVFXVZz/9KedZkzx++Ojy6iI+1+ub1euvv+49b9uN956Zl8vlW2+9dfXko7px3ss777x1//794Jsk3rkayRkZe+e4cjzzrnbMItHNGjNFNJNWNNXO1XXtuHcPtG17eXkZY/Pw0f3FYvHo0QNrN8wwOHiZyFXVDB1vWk3SxRhFUuXDvXv3PDETqCp75wNrSh9++GHdhKfPnn3wwQfL1XVd16pahyoL/cCcUry6uPizf/qP31xdr5eX//Q/9ace3DtfLpfzWR27WVZLVeXrumbv8/0T683285/99L/9b/6rv/Qrf+fb3/72r/zqbxKCITx8cJ+IrldLxyQSmfs1EBxwwYSjD82v0lyDA0lVyLQpnx5V3mUvh88lBcKxchurjsOeSKpeQSBkgcPYY87M2EHjg3aRwBQBqU/mrweL2sO+yu5KljkUQWUjeSA7iNleCzBYHoe63PZ9CbfJQ9s/2jSB20RN9L14RhVNAuxak+u4rbvKgnRd11T+wXx++uhhFdw6dY7IkXMT7VtelXWIp17zag5c2qnq0c0y1h+D/QDMOa6qajab5Y3eESiWJF8csl6vRdS5PoB2zP1tfcz3bV674xbQbap3NAazPM0nB7NpVkjbKR0fZyoAyqMyKnO/3VYODYhigjh5CXvYpcMhHW1zOL9kwwDHWrcq5tt4ctDlH2M1D2OYAv+oWPnYMsJ5+ETJwOHOvTyiIz+Ml2gBgJXJT2QHsN5JbgAAbdv2piHujWqXaBB7M7mUOGUUTOmpzupWFMZPbL+UICpfjo3nZAX7kAQAqBwXCrjnTSZKKS0WixgjAr/2+idvVmvovYvoXaXWqdh8ccpBFBKgEnqmaj5vRLddl4Kv792bnZycSdqqauVYJeVj9++8846KrbZt3jRtQuWZVbVt22fPnsm2ChWmuHKe7p0/XK9kMTtnXxsCEpEPCAHMAwYkZsy3HQiCiDClrUGqgmuq0MxqIrq6umjbNqU+dO7hw8ftzbMubkQUEVNKW+2ck6aedWmTAZtSQrST2ZwQU+q8Z++9C66uw3J1/fx5ev/D90Skrmb5pJD3AURPT+br9frJk+f/4r/4Z/+xP/rHzKTdbpoqrDfLR/fvxa67d+9ef9sEWkqdmRgLM8+aoKo//A/83q9+5Ycuri5Xy/U3vvGNn/0v/9pv/Ma3BODRw/OYRERcFTablsjJXsDpNNinlNGlPDnUwUc5ZZ8jdm8+lo/gFsH1saWsf/TbnoyN+tyQZgSoqAKGABU56sQ5FyEB9S7SpP2uZXaR7k4l7c9UcW+mh0w0+WnCUOOfh+pgfGNmeMupVDiWlOOOMnJ9+QYAokoFgGqiukld1Hi/WSAREIQ6vPbKq/bKA1KIkgg55Cs/RxlRBlKNToxJl8PVBTsH9bgHvKtJRoNeB9WmaZqmweIsr1qCIkRNRUXEe++QE+okjvo2BOzPfIq5QzYo0WCKSUxMB33MZeWJaXMHGsyGW8RxlyligpURVWXLL9h+MbU9Pr/jkxcsR6lwn9U5X3gyqXAI/Mm8Dlv+2GlO6iBml1avaDebzRjqPLHzylYPsYaIPLzM9ExEsL+cLRKY6ISQcIjtKkWGFft5+ch3WWEiFHD/OMquwhTUOxh6yifTyIZACoD+VuzcmgosTs+ykaqqzJ7Za9ttY6pnJ9XMRV13XavCAK6ZVcg1otW1z9kZTavKc9u2jjGl2Lat9z4m4VBVVROjiKiIIHBdze7fe3ja+CRrH6yBJlTz4BugxofGmIAQyCEEU88UmD0AAMYYW4kbICTvGN18Xi1mc++5aZp53VxeXq62q6dPnhO6R48fiHPbVnOqLBHrtmukxoUagAiQcKeuLOe98tx2m5Oz06Zpfv3Xv/7GJz+VIQAA3vuTxWK1unnppUff/Pu/rQZ//l/5l374q182jefn513lLy4uKu+32+1iPo8xOseUL1NzOSMvMBESRBGT9mQ+a6oH+NLjz/3AG1/7iR+/uLzquvSv/mv/GwAgom3b5tNxMGGHAaElEQL0ocmjKii1wqEOvo3Ny2bv0o6Fbsj/Hiq8ox/CPhPdVmfUU2b56hVTBDH1BmhgKpTz5BCpKg5ZVnDwBiOiEUo+Slh0SkSGBrIzec0M9iOZbxs/lgs1AMgrQJjuAU+V8YFA/t2WESMj+3eiQgiEyRRFCFwraSsRPSrYw8cP9MG9SzMPBGqQ89eUevQA1jC+6esIlNq3NO3HdbCZZf+M9362aJwbjmKq9gYTIgMqEYAykQBst9smVMw8JtfOsBmobW8ksEcxVDwf0P3BRBBYkiRNRcoIGg7+TrecS3d02WyvIAq/NwLZgbt4opwmKM+goyLD0dEBD4VKHfwi5SgDj11/bP2S+v//WHDMlblfpgbvYNXVPnhiBkTmMRJ7oPgpCx0+c4G+UaFaIaGgQBMiEvWp8MeArxKJtr98McjngnqHynSm2aU8JHejAr9H28/VomhWwFBKWyLnqOtiCHW+/iTGGIJT1RRVEohiF9X5ysAjkrFDCGgEIN4xIlCfYtkxRFVtmobQ2nZbVRURx5Solaurqy4KEQVfA0CMURWa+XnbkSk555Jy05wb1uwrDKRmSJ6wZqpDqJm9mTG36/VyvVIC57gKDk/ms2ZWBefNpGlmROxvfCdpu+2ePrmseQzPYWaOra5WKyBCpzkTuPMUKica0aByHkBms5mk7vHjx3/8f/AnfuEXfmE+P+m6zrlexL/+6mu/9mu/8Xt+z+f/h3/mzz54cG+7WTa1r4N3bKAnMcamqZGormvvXWZA3hlLhI6a2puZSAemIbiU5JOvvvTaKy+xr/7cP//P/ev/xr/5iU+98eTZJRGJAlqEfQV2lCBHuWH7q+EJC5S6c8IXE516h4KcjKEUMoelFD6TRo42ng2jPulAnsjg+J3NGibqj0bnDByG46UyqpozGEYV6+966kXoXpkYH1b+8kIL+rLaIax0uF1mgpoXafm27so/CS0BgEM28wrMuG63l+tr8ucJBWeniUG6tkIyBCXcKeBxrKNimMxBBfJdrWW1En87OwuRiPJdpIumSSmZKDomopSSqXniJAkRzSCf57m6uql98N4XopCK2IEjTuZ9ENzlyRm/MrOUJN8YNZD73tp3v00t/bdW+EiPUj8CjWFZh6PF2ze2DxUwTqiwLx8flpXbK8GFByvyu6F0UO7q1I7tkUwoe6zZv3wBpT42i4g5XxIR0X5YnEgqYYiI+a7M8u4mAtB9Bdyjr1B4uP9sZtmkG13ck5olX9iQzgURx8srx8n2kdv7i48JZm9DRL5ApmxQVfPWJiHlvrouJxMFMyR0wbvZLERl81VlMxVm8AAJsSVQSdbMAqI5i13cNlUdY5vv+4sxsguG/OZ336pns4cPHnddUoXZbHF+fk7Y3L83V1iJxM0GiWryC/YOPBoA+UDcMAXvmn7XKcX5fO4cEapncwyzOhCaiHjiUAXvKgDYbDYiutm0rk751LuZVVVF4NsONpsthx5L+f4VMzM1IGZmVTGzx48f/8k/+Sf/w7/4H77y6idyhXtnpx98+N5bb7715/75P/3P/FN/crNZffD+eyCxqfy9s5PNZlP7IBIzagiRmfpVRy+RgQjVJDgPgOg5hNBt28XJSdd1qRNXVX/kH/mpX/21v/dXfvbnzh48kISikYpjdYeIPvrnRJVOvi1fTjh3FBGHIuuQx0cZPmlnUm7TaqUpUPa753MqvkspnZ09qJwHXQH3/MjUHxGcaIrd4G8ZWKGAP2bVCwW/YwEKVYUD8PZgx72Rl/L5xb3Qh1I0z9EhrS2xoxABFS3Jqt3UTTg1BRdwVrWgZubVhDESuHHTF4aAdSuChMfJ5Jjn0i9txdYvIuZwc0TMRzKy25mIYux6t7MomAV2NgTp9a5mAyLabDYpaVU5LW6NLYmgnGQJ04kTckJMmWJK26LrOqCdtDW7TbvsqdKxhXFgRDSxzsqPD78t9SscLI4PPzywBl7QRpu63w8bv+3XcbQlig+HMal5u/nyuyuH9gdkX992O5721iHLmKqOmQszq9udAzhQnHt1yvma9UuUTLRHsWajWw939zTcdpzjgFz3UFBSVP7JMVm+H2w4+AuDe6nPtq0y5KsBRHAuSDIRA/bNLLAycBIQUK/JHDk1REiEsFgsVqsVk54192K39d7PmyqlVFWV85UC1YIu5HSP/VGue/ceVO4kySq4mXpZzE9FquVagm/AKXnyYWboVdgoO3Bxu4lV7WusVTqE5L2r6xoNVGLlfAbs+dn9qlpd3dxkQweGS2dDCHVY0Eq2sdtsNiMMey+xAQKqRu8r5zjG9tVXX/03/rf/u//wP/qL2db/Wz//X/1DP/m1f/1f+1/96B/4/Wcnzc/83/9vb731Fkv85Cc/+ejRo0ePHl1ePq/rEGPsdUnGT5LsvcjIpgHLdfDOOU3RMbqmmc1ove3Oz8/+/J//849eeu3f+Xf/vS988YsffPDBQJl6iGXYF/0lvZUqzQ5s+qMavfx18Ngd8/AVsQvwAqLjNla93TTMnrE+OwJZf7Kgi312MxHNAlZEhBmGXZ7iiqRdnuO9ZgekTETBoQFxtwQbBr8z9UdhW6q2yaR20viFl8ITUTniJaGhZxOBKKrQASjC1dXVvLqPjAnFI0OrQNQROpPhaAfu1Fs+CNF2HRHxsBIlhiRS3siAiESYI7vZ7SKfm2Y2n88JMKWEhDq468wgmYLsLu4FNSInIt5Vl9fLerZAU1AhMAUFtbxrZ7QXXFoSaJSWmcflEWUjVoSdi0mZ2Po4DjSA9Wajlq+N3MUCDDIO9j2u/XWbfUa8Xd+KiGB9CGyPNhwucbIjtF6OdqKY9xC/I50jmB5HNXEGTD4cl+zjSjDfcA491Y5u0mIXZ5+GsDB7tThnxbDjBBp9OAMIYJ+ID/XNdKDjOAx67wUAqOXDQmKm0K/s2m7bdtvFyTylRAD9xhLuw7nXqLvHfvwA5dxg3/Ewni0egTD81F/5rHqYXltFabC2p/Y77q8YDrFTtHM8aBwRO0U0pLzm1/H05uCyYxSzpJoXwapgpMiU1iYiPjhLPio7RnJElSdgwEQQiQ2MT/yp4bXExKFCxBRjZx0BgHGSTkQMVbT2XKmCsfNNUzUu2JmZRTFfLSAmB9uTs1NiL5Kv/nYcGADUoqk1FZsKqqgqEde+IfRmkZmNLFTeIYgIkENq1us1QiRgM1Yx75k5VTO/7eDiaq0KCBUqBPTOjInq4H01BwDPDh3MG/rj/8RPf+1HfiiEIGIfffQ/evXll1577TVJHar+o3/0j/zar/3a3/75X/rlX/36zab7o//oH/nEG6/E2CEqxOQUZy5AEmMfKa1Th96ZI2L2VcPMaEDOzbkGteB93LZnVbXeLn/gpQf/0p/7M3/rb/zsb3/zm5//whuXF2ndboN3XYpkYEm8c5SMGWOMhqaMHLhLHQcvItXWDIG9ayVFNQwOBjdkdk9npjCzzG/EBakP5iHcntytPNde5oef1B+59VCu5nIYA9TzlDfr1CdA4OhwE1hAZsS4hZeAXdcZQYpdxUFUraKmqlMXe21NJGDMrIOuHR1oo1vYBl0Mo1trSHZ7VOmWpozkBILj6nHwNpdb4LTnWOrbINrBwcpL1VEVdremYoGHQwMoj8EZztSwVQW89kpEta/Wqy7N2b90/6rhpQeOMvfEiKEVd2iOAfRyNQck6xDukXee8rKViHIMQp9RnXMuuui9Pz09ndWNqiZJtJ+8wsyw1FsAyATWC8G8W5ZjWqzIvGGmarsgrwlZ5GV3eboLEZk5J/RPpmjgve/atGm3iIyDUiktrPLPscCoY/oyvT9rRNILulXhmMC9o/4tivkuA22fRves78lPhyOZ9GiD8wCL4JH/NuU25XR3UdVxyZLTM8I+/CfD3h//1A8xeX8bV98xztu6e5Fy+O0I3h1N4u7UbzESw7zgHd6MFVLqRAQJnHPENYmjJIb5Pk1wzrFj75iZwFiVAFSCAACIdsCOGMDUkhqpahXCbDZjRnNc1/XlxXXui4iYMptjU0MIDgCzkUqEkN2wotBPRAHROefyRiAjICMg9Ulb8qiaEEJd1+160253N4oyMzsP5MKmFsmHNwJQHzSeUmrmM+dcCN7XfbLYT3ziE03TAMDrr79uktq2JQJGeuWVVz796U//8O/5fb/0S7/4y3/3l//Sf/L/+Yf+0B/83Oc+651j5wNy3LZ105hZt10HrpQxocyRAhiqEDlPlKfGDtnXm83m/ORs1XYPX3v4F/79f/eP/4k/9dvffeul+y89enC22m4MwAAEjQKmNiqRenVVUEliYmYSk5mBY2TqRIBdHVhMxQBUmcjASodqThG072f9eJesFWViW8MB49xNq2WFwmy1jEYYbv/jYe3iiM0MCBlZVPXgEuKRbPGW3u9QtLeNdjw3u+vilg3vw5d7fPe7LBPg2OAWlXwsy0wBVNVl9zCoI0xoqprViGTRSsN1hGZmYIUVAKOjIMWoqtkllZ1yo75k5nxVXFbMs1zqGhFUxWwMVJoOFwavGhENTAv5VEDlhmkoE4GIHO7DT1YPGfwFKBkRzKKA5ajRbdt1KdroOTzmGJw0u5OMWSoecy1iv/b9eG16WF6kcmkTFAO7q/J+y7m+4oFyzbkdJoM5JP2SH363FHpU+cHvktbNTESyF1qPff6x2vc2NQyFs640uY6O/7axHbWQ7tDch+OfaN/hWwWEEkGTMo7XDABVrUM0ZnSejTwQR4lVVTEFR55YgRQgB1sgszNDh2RkYIbBEaGkzp8szIy9qyuuqjqJtW13/3zRJQBA772IqQEZUyBGMkMmU8k+WzMwQFNTRMsmBCM7T86RIwZCxwigRIhggJrzeIcQGGabzTMcxFYOggvB523dHOadZYUiKlA7hB9777PrOl+HRUSO0NAxs3ckklLqUkpf+vIPvvzKo9c/+cov/MLf+rmf/dlv/9ZvffGLX3ztlVebe/e5mWWBdlKHCNJJ8iAhaiAEJnYuBK+QKcSiSn3StF1nZCnFL3/5i3/pL/3HP/MzP/Mz/6d//+IjiAqvvPp4tVl77wyFKlIDDk3bdWJa+5D1FqhGBgQEdiKSr9F0zPnixd7KHG1+UkMwJcAjCRRvo7dR+95Be6Uyvk0DHb7BYSUNwDYoURNDl+UkmFm73aKqcwyM5Khp+sgA7V3XvQs63waGxYChGHNpN4xvSqf6HXr6timPLyepN+9q5M7zpbdhQQAYnJkoWDIjMBFZtVK/dF8dgRoJIKIwqijbkHg532uB45lFgLw3bP01y2lMzQhGRKSQRhBkX+5isZg3sxB8tltxl89v3we7PwEdcoPlP5fLZbjXkGOQVM6KAdMxFVBS20BJmHKQNiEBS5L1er3tWudcviDlUJWM6qeUwmOzvbm3X7mvf1uiihdYJ31shcPKL/jJ3fxZlL2Agxck6zuGN3meDON31ZrtuR1w03apuDF0orFKaQLFlHAgNhxcxjt+Hv+H2SMDk/Fn7O/rRSyavFXBl7O448+jb0oUqxmTQp+JFqHnUAPrvWk4jAlQzFRARBJxMuvDXrz3hB6AMK+eGRmZHAciE3HOEYHEmhgcYZJ2ubxeb5an/lRSt5FYN3PvsGvbyi/A0DsfyVJUYiQ0NEEgUckpbchyP4ZgqCagDOCcD5z30Y2ySZzv+yUiZudc1qCp5TIiRETU0IUAAFnU5LzfUYUYVPOpIUZEUGvmDQ6R7c65nPpfVdvtmgjzshhRHj++/7Wf+LFXXn70y7/wi3/3V3/l1//Orz5+/Pinfuqnf+j3fJW8FxH2rtskIiBgrLw5R86hD+KdmolGRgpca0xEGtTYaHtx/Xu/8OV/43/xv/4X/sQ/941vfOM/+U//v3/jb/78J157ZRO7dRfJcQIwIGIvUQmAiTEpM1kF+egX5cjhptGYum2bN7mTGYEqgiIo7kIcJ/R5hxq+g+/KyhOuOSxlcBkWxSzpEI0xaE5IMd6bN7OqlvWyD7hSAyDnwuASn94qcbSUCvLwp8N52cGe8ajLj04NC4/mx8i6j8vucGjW538FzAGimhIYghF2KsnspfOF1Z4InKo5TAiQFA1c3ugygEnyZzPbrNfZ7lABZk9EKaa8KmXmEDiltN1uq6q6d3Z+cnKSUsx3X+dwzTILyajPxtWkDbsUjjjXDCHc3NzcO6+Zmcip9tEZhLur5svW9vWuDdtlBgCm/VUEy+Xy6mZdVR6Ru5Scc6bpbmiWxWyaam5ED9EgAF/MV3lY7UVU1KFiuKPyYeOTRg76Gons+GJrj2P1SINld6UKnwz1Be2GQ5qGIg6LmaGwoyf97j782J5usRjKoe4PGDMV9L183FSmqvdOW3tnIhwYCqMRUFqxAJYvERnIH0SiWlKVJF1AREQmxryF3MsipczfiETsAI3JsWNCR+wYY2w3y9X77767Xi/jw80bb7zhqiCSCCD43r2MiIGZLCGyAuVAcTRM+dJfBEJyDIqQ1NQUGdkhM+brPBlzKgJiRnaIiPkaQUSXpwDgGIfU6wqIKKmfYTYmvPfMfRxo27aLk5mZaRLvmcAq70QsJwiLsfXeE2EIzsy62DpzIfhPfuLV6ms//tqrL//Wb/3OW9///n/2V/7zbYo/+KUvnp6eAgCaVgigRrU3x+CCOU4EZgDks9L3oWJ0rqbUJVDU5aZ24Qtf/sKXfvj3RI3/8X/2s89/+7sAAAytAHtIER4+fFjXMxM4nddx2zoXNryKMYpI1dSaZLPZgCgTMaAhjPKyJ4yBZEYqLbXvhGhzoWOZ5HH/gpwDCj9SbrMv8wCSict3RJMCcOq6ew9fbkK1sZtdTYOsCMbxEFF/p431uvuw3xcXcWX98Sschoujs3v484Vb1hc/fVk2i30ctalaPr6at1PblKwJdDYXT6jqECOAECioF3A9dAih2DIUkZwoChEJHYAgIhMpYj5KFGNUQ1ALzs+b2WKxAOhPEo8HvxD7M7o7YTosdjMW8x4zEIL2B7TX63VK2jQVUZrYdIwoB9LVDo5LISJYfyT3erm6Wa2z60IBEVFEx9jl0qyD2wVxEUuX/Xj5p1vXlxPhfludo7g8fDkxMqC3k17kMNK02TtZrk+qONoxL0KsExU70b7ln7d9dbTBnjULsm7bdgj7PKI1x696KO03eDjrEphjtVvG2TvJJhO5zVY4+nKUPrdNfGKyGAgAgHE+TqmqSIRoaKPw7f14+XNVHS+vDJWD5MWQEIgIwZnlc9eWcSJqgbBfQQJ6XzHBzc3V8+fP5/M5ETx9+rRt28zjr732+v1HDyU5M+M+INwhIpEDI0SMTjiBmTH3R3pEwPoIMvAMjokIHCMzsiMy9SEf5hY0QBu3p8wsEaP33hEDucp5GlxozDyb1U0dUC2l2LbbGLuT07lzBKDOVYNHmjNrMDPVtWNyjtq2VVQzCVUI7p5HntXzs5Pzxy+9/Pd/65t/5+/96lbil7/0xUenp/eqGmMkFEnCiN5bPnyeNIs+I0QwVe1A8aSpNzdr6NYQXIddQPkDX/vh//df/AvPn15cX19fXF2/9+4HT54/f/rh02/8xm9dPH3KQJfACqqA1Ul1fn6eUuq6aGbEXM1mses0X/lqeeFpgOZyhuRej+zswrt586jRX1LsC+rgw14ynogQCA1BEQiAAQFUElSOLYmJAOe03gzZDZOjJo/lnzpuSYyMM478YF5lfSnS+PdC+5YZHe3uYFQvKlrtwPTPA2AANCVCQFMzUWtTnJ/fh8WsAwVVBFAyGVDpYPBu5dSSvRdINWfSoPG6JLPMDL0mVkWDummapqmqyswy32ZmUFUDIUaAne++9NdOhSYCDHHq2+22rutCN+7uPmLAMX3f7tseBNQfKDJS1ZR0025vbm5UwDvqkiCnqqpEpL+Adl9Wjsg79qZM8XjrFHZtGtmxJI6HYvoFGeBQ+u/T2ADeY8cS7mjkcL6HemJk1JENjn5etjBh79K4+VilXo5hbCQr4PHukY9X53d2VMLBBsVb9ljU3DsCvntPmm8NOSxHZVYZZXrbJ5NRDb+YmmGOdCpsVlWgfsAAQxBKtpjrakYuRAEgIGLCAEBmEbkjNgRnyjWDc44BmdE77rotIodmBtqpViLN1dVV27avv/7G48ePRfJWKwzWT0oiAMQOvXecLPXxksiMqpgQmDiRMZJnch6CQ++ZGFElJwf2TACEZFk6j3lnnXOzukIKYpmS+/fM2DRNCM5UnOPtetPF7Xq9NpPZrAYAHxxYBqAxO2Zuu62KInIIQRG896lr2WDx8FFVNWqwODu79+jhr//mN7735ncDWPXGp0/Oz3C15ZSa1AIzzGpoZuA5IAAh5LRfIVRpC2bw/KqpZ9DeAHT1jCD5z33y8edefwxJbLlG53Xbpgg3N6v/9C//lQ8+fLoRebpcRoTltn3zu29/8NFHbbtlx2Imqst2Q0SOyCx7E4AHi4pskNTDnSojrZbUUuyoghXxqhNeLnnwRXQw7LPk8I4Q8+2YWeQaEZHBLFSklnMZJUNvrEmsj+LGcufVjl0OsRv/gaTNSelxn0/v5vHDCneYv8Wvv4uFzfj5pOWALJiP/CQTVdIO7bWXH3FTiQnlFaMqOgJEBXN588YMRgU8psTCIb+P2zvIJQAQQlgsFjnBpIjE2I15ngEABycZWaF393E5UkDv5QZrY0dM2+12jJ0bz2KOn1C/QNghLG9OA4zKz0Ssjd3V5TURkSdRBQMREzHngsm21A0jeib3Iheq6wjcj1pzxc9ke8Hsu0+OLgrHCodLqLL+LfbveGEOwD5B3MF+h50W9kT5bfnVXZqv1L5HLYyP1b6wTxjlm3zl5aEBUc50WBvimL79KDzhgGcOFXj2OUPhKTmoooB4R5DUpMdDWEGB3+knQ3CfWWHQKO7ufUQAADJgAAGIMcYoXdclZeccuoZEyTEAIHgzzAYbkiAykCeMZqJEgYMa3Nys1ttN8JUops1mPj9rmvnZ2b1PfepTTD7GGIfc6X3XpgjK7IiAuT84k/3JIqAmhLk2ODZHyI7YgSNzVQDQ4Jzz/dk/7312UeZ5M3Nd16bcdqISU9upRJO8YaRqAmYhcGw5VA5ARaJZ1XZb59k5BwjM/XZyHaq2bc3MN40omIFbnIAabDtf1y+/+oq7qKrTxexk8fZb37t58uS5c9WHTy6/99b66QU8edfM0PnQ1L4K7H2oK++5qioOHhGb+UwU0Dk1m80WN5V6YlCr2Klq8HXbdcDO++pB1fyJH//9T5Y3337ywRXK1tEG5Pqd3/cLv/iLz2+ufue7b7774fMHjx+YSYqCngAAGMkILV/ibACGKACAtuOOUgEj9hvFt1H75M1t8uEoKRZdFBJbzTwiEaCBKBo4JAUI5CCp5WWQqHOuuNUnB+RiWaZd3yK1DocxKfvSph/nqOBLru8ntZ9rtgDLNJPd3aU8wloihfKNvcygyNluqP3py4+4DtpusU9IoGxgiIbmRDQnl8kNjfFWlpO0cW/sF9u01ycni9PT0xz/LCKI/eEzAMBeFAJAYbgdYrHAQX5Q1WxrP31+Uc/mTdOYRiJGTNlMztPTnBNkmPagnkdRBV3XrTbbzXrrnJOeihiHqxe897C7FzOPYg9nh8QB+zQxGfYRlbn7mwAywU41SlluY5vDaG3bj7/Y/zwfz5nmbT46i5JoxvmOGm5fMUyJtfyk/HMyi0NbodSdR+dbVEA1s8HmzVGUm80mh8XCcBx5zFTVG1+a3ej5oiY4HNIEMpNBqtrOBC8+nwCthB6gmAECH1bOZXJA4hA+JUx2kCysn/78/ZhmtuvqyqkKM4qIATTB3yw37TZtt6mZ195XjEG7iJRvBKzMMEUwMPYOwYuAd8M1Pkxt23YpqiEQq+Bicdq27Xx+8vjlV12YiQi5CgUwxy6bAhl7YofeIZERYgh+HB4GpvmC87YTGgAwYx3yhXQphBA85wwq+ZPK+ZyKi4hAgcAIDT1FEecZUBkNUKvai4iphOBEpMoNJqHKqySJ1G035itEJEDnHBMCoQtsZqCC2UtqBExQM8RESWfzE64CMc4cdc+fd88u//63fu28U726CesrIgKjJM+3amjAjERUeSYiX1fsnBiAY/LBzMQRIrr++nPF4FzwSqyAzXz26mc+88l7p87Lr7775k1MEPjx/Uf/3T/204Lw9d/6rW+9+b3/9K/83PnD01A5SYqIsU2g6l2FO7MSEFFMzSz7F/OpvEwbw1l2D7tz7UfEVylGyiUNFOeDx2ojWZY5K0bJg4QxxpwVAA2cd13XOYZ5Vadtm/mHmJxzdRVms1nBQTZ6iHvtVdoNsrfwG9mkjwUr7ikYJ1Iy3fCmdwWMnpWjYgcKaVb+VOqmnfFQxKONSVcy5CfV+r6Seu87FQKtnH/v+bMv/6EfnZ2ftilmR2KURAQgZkiC6gBgu92GELLNCIPII6Ku65i8d06Hg//L5fL09GQ+n4cQMkTyrYImfQJu2xluuxlORNgEfJNqzNy27Xw+J+/IwDmXV95jI2yTADXKcWSEbivtcr3pr4CWBNb76kZ6UlXal5WD/aiHqDrUxLfJ9MOaMNLWx12XhLdYdoctj5ryjsYA4A41fHScR8uoAyZKYiTZkvgm2q7kH9hHNBT8PPlXRMYwuv1x0mazyWfecD+WxMxAhy2gMVq499XtBa2UD+Mg+z9HY3F/hV12dIj33RvMh5OPKNoJtMs6E1Yva8aYhiz//fH6PBJiqus6dtE5R0SiyTF47wlDTLpZt/MTns9PLq7bpmlUlZ0LHIicBAIMzAboVdjzNncaYzJTlw/jthtml5I1zfz09NS72hQQnJoQGaIRoRk4RwjknAuBRrz3QRio+V61RdUQkQ+MiCl1ZuYcMQcmzCYUIjqCIVgEYoyxjZmoz8/PYydExM537SqlrUNwZDmkwyQKQBOafCFSpoeUutUqet9VVTUCmSinsjBVTQgA4AwICciACYPj5Be1d55rU6srQP87X/+t0NkMXPPwtZxXC4EY0bMLIeQDwVA5qCtwDD6AY3AOmHqCyxWYwBEwggI4BzFCK3px/WA++8T5/fXFB1GBHRDQvbPTL3zuMw8e3PvMZz797/0//oPnT69f+8TLMYqRAvaBS8w+gXkyVUECRx4GN2RVVaWuYsY+W2sf0XaEDif6ZuTcCeVP6Lz8tSdaA0TUMW++GZgF5oqcRxF2CQXNpIuhqdk5tbTPDsWpyHIAQx06SD932/PIREflZ38Gp5jCHWJ2zHs40UaTr0rVcFtrKKoMhkBEmmRxdhJO5sqImrICFgBQc2AKIISu67qqqjJeEXE+ny+Xy3y01zlnpiLRORdji2rnJ4vF2UleqgIYY39bXjrwiw6rQTt0rOfRl2HupVBzzq3X67OzMyKyJDlX7WhxjIuEEii5qW3XbjYbEUEgRUCcBs2qJpFIfOSY9m2aadJXWaaNDPuC5e1/ow7O7ugJTZdKohznhP5uM+LGnw7otdyhzNuIY53+JRyw5SEopjCxPR1Wdj2pXJqQdyihiUY0szEtaBmERUQ5Kf9g1fYg6jNY9fo2p4EeFSpAv6jFHF6MiHmNS4RjzHHvnhncq7AP8zG4D4sCR/ECAkZHpdhoMRzirmwfCnTkrk1Ri+s+EbEPna0qM0sphRCSgHO0Wm43m6hqMYqZNU2jgN7n25TZsQdE6xN6O/QOQbK/Kp9lmM1mjoAZU+QY3WzWnJ+eO+dU861R7BgA8n4KkmRxD/kIQL6qKN9tAUDEQETeATMG74ihQ46xQ0DCfHu3EaEr9vLz1nXOY0VEdV0BtECoCsGhafQOZ3VVV5zaTpWapmJkonx/AuRs7Zm5ttttSimlKoTgnOtDxUGQggII5aQhDGzZ182MlXdBUl3Vb333ncsnz05do9GeWmoUa8gXB3BNXjlERmCq64rqAI5hVkOowHmoA+RU0maAAExA0F9UbwD1AtZrSqlSeqU+vcTLznDpsUvx5ubq/vni/fff/eEf+qGX/uV/+W/8V//1z/31v0UEL7/8yrZL6+3Gh3obW3K8XW+qKqctaxExeKfKkhLRbovLzBA1ryLKxMsjx9m+NXyUII+WifaFQab3NGygAGZaVxWbSkyIOZabsrvRe9/FvZUoAIw59idqrK8zSA8isnGxcexsRSktx3fl4HtV8QLadzJlK8dTdnewIQgHNUMIQgoWEamN3fnLj5uTedtumDnf7m69VuzD2RwCr1dbZm7bGEJYr7aPHr4UKvftb39bVYPzANB1nSc+OT+ZzWah8v2dvoNPGPZu/526745KYTiQxbvJkOu6brPZjFZe7zNRmWArF2ZKKW3buN1uU1JyzmzcGAYYwkTHZ8W9HgdouhffA9ipT+DBU3qr/Zi7MD1+aHgEwgQUWCz1Drs+MpJbl2h0WGdQyQQgh12XK2wscpZP4hhHmi41x/i+5PmJLBjxAvu8cVv0tZqJaoySUkop5esBc+IFzP7nYx6zQ8YeNzLHzZtjDHzXaqD3CA6GIJQ2BBrartmj3H5oXZUMsjM4OK+/cyMkYjmyV7U3Rr2rEM1MPMP52VlK+uSj51c3q5Oz16qqikkQgRicA+cwMBB7A1UVQyYM3pmZiRgaMNJmsxHmqqqYIIQwn8/qplIxRHPOMZtC5jtEBGNCRO/IMSIaABLkHeJe9BMRo4JqTMrGiBqCA1S15NghGQNkNZwnKyKqJiJtG4kBTSrnqoq6mDS1oB2gIQmZmgkB1qHyHHRIi911nQ/syBmIar6+SVXVV67HshojG5mCKapDov7+b1BVAVucnl0+f+ut9947uXf/wzffac4fbCSiVqCKIKjaIVZMDMjMrRBGQXAcEUgdm1MQAoeEwABgjoRJGJkZuhQAoPa6arvL67O5f61ePF1dX9VxNmvW7RZBX3np0Qfvfv8zb/zAyU//4a/84Bf/9t/+xV//+jcWp4uH52c3m3XlsY2x4j5VQ55yjBH65Z0AEJjpPrFNgmbysw7Z1KGQMIfEfwcLjD8RIBIKWE7RSKYmOp83KKpdzBsJSAySvPeGRzgasj1ng/OgfI8oYGXYUK5jhGi3iDjcezk+939+vJkxvQCwtFF2DH7QRTnmUsQlFUMFNSXbWHzp/vnsZLGG2B80YcrBHWC96e9ywPPDhw/Pzs6ePn368OFDRLy4uMh9Z2dIXdfnJ6dN06SUVJOqDGIaywXW5N+7jawx1+jEKDMzNVyvt1VV5agK55zmq5OgPyMIBQLUbL3ZbLcdIpJzImqmzCxSWgO91BMRz3TbkOBO4turpphZ7jZhfdjm5P1IQ6Xhkn8qHybAxP2MMHd0PTGGJvba0GnevYPy5on9h8I2tD1MlTQ3aXlUeEcHedQaG4Yx0MDOcDc0E5H1ZgMAzjnE3UnH3Fke29AIFibsXhclO+2hYz8oGocEbeMeUo4hGMxfsyGVaWlJAIDtn3ovUJBt0/GgV/9veQtICZLRq5439kREwDz1m2FDMMRZ6qIIPHhw/63vv/f88uqb3/r2H/xDf6zrOkQKVZjNAzMzee8bIgV0ZkLkmRqRm9gJgVBANJPYqvdMII68d3XlEfucVpo6ov5EHyJyTqlB5D2HfPefiVl/KaRDYgbnCBFU1USTtMzs64qIU0reczYtBlxA7gegT85x7/RMNTnvAGCTWiQ9v3cmIsGx8xz8PIdkAkC+oxBRJUbPtfOUd2qj5FteOiDrt5ZNASIQGogZKwMroAqqmeq23XIIGkJiqu7f3z65uEYy6NS6pIyWEJkEOCqYU7G4WfoquLqyzYaC16pOzLFxwCEgow9QeeYqESljWDQgAg4ocVo+r/38fvCyxg8Iu3ZTB59ieunB/cunz5aXFy8/fHjv5PTzP/DGr//6l37hF3/5N7/zPQN48PD83r2Ttm37BQ8iGBmSgA3EDJkBSiN4jGDFodj+dYQTc3xkipJoD6sd4eKcsBpQVRezuSOWQUQgIjB578tUlKUQu83HWzLs5M/JeCbj3I1qMpGipYO+pgvfnXw4ZhwjoulUJR/222kEZs8cRTRwOJkxoweOIoKgKAYGINrH1ZlDZBG7f/++c+7evXuqenFx8e6774WKicjUmqa5f/9+YJddwSL9gZBMFkMIxpElOQGq7RJZjCIVCh1wOJls3nZdBwDe+1xNkiEiSL/vkK/gyG60zbbLVmHOyJwbFN1B3kwGIJqqIFSI/X1U4xhGSV7iu0TMHtYHoQPHytFJmRnktOIHODskr/xmslKcfFg+j/9OKKxvLV+FsSP5nTOgaCrbKP3dVkWn00UbHDBqSf0ldd5mH5Q7+oeKKr+X3iGNZiaW2rZ9/vx5G7umqvPlWn37uZbqcC8wAgx50IZNJkJSG5J1W39XRzlUy8ftinVD/jNvMebbL8r6uAsJ3mNFYhhV6hiXlNE7YvkQfRNwIeZUkYaI+ZZ44N7cQCQRqes6MwgRVRUuFotZ1TDTn/kzf+aNN944OTtLwgpyctIAAJFz+XrjnFmLPSEzBHQxmpkIIgbnoRIRMOOsfVNKBCIqURIzo/MAQGiI5gio3+UkdghiqgpoaIagBH3uD0eokLMIGEOoQlDHfQCLKCAhIRowOwD4vb/3927Xm5TSw3v3Ea2qPDNfry436yWhKCRHOKsrVWAkx0TIxOAcg1rKOZYlAlDTNDnlsg2XrWVq9yBArKoKioCIxAaAmFQR6GqzOX386BNf/OLXf/ZvdIRgct4nvM804RKYAyEV02QpcV1XwN57p4omQFY7hu0GlMA2UNdwD9xiJmjJkgMA72A+q65Dt7qpUR/V9W/GNG+aqCKpa5rm9ddffffd92bzmgjrwH/wx37ksz/wxne+853f+tbv/MbXv/H06WVz7ySE4GufVLquQ0CHzsyi5uvMAXWaLOhQZB1y6yEFHpYJrdouwHa/msGsadCADJIlA1CwWVU1TRPpeOOIqCZ47Oo/xN0lCjAE7gDkGw73dK2ZKQIX4zmc+O1Z+o9E0ux08G0rXTguA8tOqfYkFkU6jaePH87vnYGopAgehPKaHgDAENQMzFze63327OLhw4cicnFx8ezZs6ap81Gi8/Pzs7MzMkgSiXFi0RRDF0S3G9BO6BzRvoeDLkEA0B8niFECu+xvHFsYxbyZxhjbtt22kdmTR0mmpgCITChjs+N9PkDQny8BAOz3L16o7OlU2wUnj0Rpw2oSoF8nlaqxmOBd1mVZ2e64Q3sIrh4HdgjAssEhSKlYE9Oui33FD6qWUgc77OxANN6AZEVE4mQKhwEUx6ewP9qhqV1figBG+dpX9lzXddd1KSU3dykNzmei0dk9JA4iAGDyh1BFxPGYWWmvAID3NC6txlkMKplxvwAAueMevEzc5cSz7EipD+8sf7JhrxdzYP/wa15fDnbtxsw4eO+9akqp88xXV1cAcHV58+FH73/2s581EzD68le/8iM/8iMPHjyIYuzwwf0HSKlpKsaAwGrJLAIAogfzyhbZtUigZqzqnIEnMHaAaKBCkIwsE7LPl/4SMTNxPglljtgzMpH1fkREyycgANTIIREyOCY0BGIMITDnsK+YqY6zC5KRge7fvw/npqrterVer2dNM5s3zXUwSMzYthFQmbndrDpD5xxYp5acYzShbGIw9+FdJpI9ZAkE0fodFiIGU4uS0NjYkSEgSkrIJIorkUeffuPs9dd/7be/Y/PTE7MYBUlcxYgoZqrqSYNzyYTBlMlVFToHroK6hjlDZ5AANp0st1zVsHAIYESqQm2ChL6epydrZqwUz2bNto3eO6v8ZrtanC3sfeikRQUfnMn2wel8/sXP/+CnPvkHf98Pv/3227/07e8/e/ZstVo1TTOrGwVLKeUFCQ7eGkPLdjYcOGZKeivJfqw80SKHmru0EUcZrmBmgAYMaAZ1XesmDR2RmVVVVVVVZ+k2CWA2zSIJd4qLcTzjQ57x5BMr4rmgUMD71XYJ/MuyG8zBknrizzuU3mNJYJQSqiXUlz/1+smDM0T0SC2aEgAAqRmAIOQ04/1tSO+8887l5WVVVdvterFYrNdrE3n8+PHZySkAmCTchUGBSMxeIxx81GVEFfbycMDW/jjHoY+OxEOjjIi6lLquq6r+guHdbNHyBn72nHdd55xXNVVjdqoaRQDA+9Cr7SFyC8Bwoq6KgQ17Kofj1L0/C+07vhxD7cZPj0rnF7Q0dyReHGIpPy8CE/bW0+O9zqVqRERCNwEyEUFxz2DZvg43cIwNj09cKNcSjHlrCgbEHeWokv9F03gJWTmqIt4CkTDnWjKzrHE3m8319XVwvm23OUjYrA97M+tzvauCmXVyJNUoHDDwWETioQJm5hh3rryyBbE0GbkNK3tJVmIhn+gzw7x7nT1G5Tn7IWxVcoBV7quqvKoC9EfvTs7P5vO5SPzwww/X6+XpYtF1nff+ZH567/yBiHzxB7/4+c//4Hy2wLxoZt+225PTivMSHhEVDfqjuaZ5m0acc03TOOeYIEay2ofgYrvqOgFgSck0IZGBMFf5Ol7HObMeVJ699yZiLGaEQ7AYoxFB1KSDcAAEMwFQ5oCIKSUzUQWRPkpDVVPqunYTQkhti4iXlxer1fKt77/1/vvvM/Nytb64eLbeLFMSRE5tJ0bE0HWdSfSeM9yqqkpJUkqa00SAgSAAG0gXE0MQ0qgRknMBBAjzaYvUGUKn2qp96ff98NPvvfPeb3/nldqJilF2DpIBJsCE1CEmgFW3TZt15x2pOtVFFZYKtWPn/n+U/XnQbEl2H4adczLz3ltV37681++97vd6ep2tMTM9AAhgBstgIUFKIglSAiASoMQwF1FeRNqhoGxG2PqHfzpM0iFbdthhB2WLIsV9B0hRAAhoCBAzwKzAzPQMeu9++7dV1b03M8/xHyczK6u+7w3pGx2v66u6S97Mk2c/v+PEy/l84QY/8bwAACIjOEELxkC75eDUCEjv7RRb6xigMTZCZObD44O79+/dunEzjB5ZCJk4TK3Zeur60dbs+KUP371//+7d+w8ePHj8+PFyuVTdxUYSwiQUEfiqBoU16daCFitnTynn25C1deV9vfHpKhnZWhOjt5qUZ42EFHIe/WgvbcMrOWHN60SkDnilp1fJjP82AhtzcArXzKQnHmvbvNgqG00gYHPXXz4Cx46obewCYHawB4biGJzBESu7HlITZWTFOwNoW+fHPoa+bVvmKByefeZ2N2kBQCWZHga13ZLJ/r/an5zEs85fWlPmVQk5rDyriBiEjTF+HDXb0HuvVVwkpJtzuZwbg9ZaY4nQAIYQOTI6ZyLjxXy56EdrNStS1C4jgFblgYTGADOTIaE11Y8lIqD+VwYfo6esLSGiyifhkEyzFbtcWSpFGF9yyqSF5IxJAgAsTEiXAxKICJElSR2VXibBpa6ymWJKsFRdLAt4wVVWBSIykjJ9JkwtGtNTogpHA6lgWidcb63PSfSEgs7peuUBpFbQpYG29uktSTTK7Fa/rqctlNQPZhZCIvIhGCCkTb1tbRigsNMRCRDA4oSRY4yf//WvQFa8VJjt7+/v7u4CwPn5+b17984vlvBkMJsNx1L5c8Ne4PVL0pdSfSNrv0pyDGhezxW+K1VnDCWaK+7uollqWRFkM30xPyVSxEciogfvv3c3RgBummbiGmdsM3Wz2fbWbMd7/5GPvPKh7/j43uHRdGvPWjuOy8ZBY5sOqQVjjSGDzEaCrs4QYxyFu46MgWEIxoBzLo4g0vTDomm66MPgewBo2yY5ul1wDo2JavsSESEbZHQoorkgCtUFIhAkkmUi8r43xjjrQCJH70c2xuhnjh4RQwwShEP0fiCivl8YY+aLReea0Yflol+czR8+fIiIGPgDT98+Pj4+vThXpI6maWIcW2cuLpYxemvM+dlZ2zbqIOPUdkWbyWA0RuaLtm2ZpZcRBQcigyAiDVGcD865JeOZxf3v/thXxvnjB4+3ukmIXnreNVMAsMbGRU/OTdEaMeHRPAzouha7tu/vb5020DTgLDLvyjiePCSCrb196AWUgsMIEu12F+6/a2P83nfOHmxPX7N4vrPP6Cy0h3vte/cfP17OdydTs/TiuUUcyJ/359jAh+fjK3sOu+vj8f69hw8fjOPd4L/63vsLsfPBd81kcX7BHJYwYiuRYhNsLh/PibFCUIVFVL+E7N2dTPeXywtCNhZAIggBWmayzoYwsvKVGJxpxjEQ2aENzRidCLdm0RqcL2+ayc4AYwgLii3SxANZ4xsKjjpxIY6g8a3sSWYAFKAU80qKuyAmlxGy1ggLpHbjoE2xrDGwinsyBwmKV1Gn0RjdrpINcRZZgVIIAKBGJOEqs2f1vfr5RFILoo0Ez/Vkz5rvIeJexNHw+/7i+MVnp/tbPURvg3POMmxFjIJBQNAQIAGiIxtC6LoOAHRjaFXSs88+i7zCplgJ2ooBQZYi5TNcpRcksQeC2SbWq0gS+EauiNA8FzREGmxWDmWttcZpIQoANE0TQjg9PfM+KrTk5brYWr/bcORittLq2UcUFCrWmy5Afbt1OYEFpbaUHpVfoXbDgto0VEayMch0UJqTYnqmUMvKhU5ZxAIAmFxvI2tjxATcTcS4qh/FHFMkDX+iKYPJsVguYyk++TI5V0xX1lE2FOqNl9I/i2BGRAIEFgMrK7kutAVYYbmICNUVONYiAArEEFX0qmlojBn9cHZ+quj8TesOW2eM4bip7BfK3Bheea5cOtL3VbS3XEJrCsqKKiCHHuqdqdNaf5lXrEZRSPFRnW1ntgsNpFZuCETphtpEyLl2Op0eH19/+eWXt7a2ptNp13XOpfRgPZDUKYxEBAaYAzMSkVWYaLSIEmOUgKNAiGPXdWEc8lCFiFQz0NAAIlprnLECrHgXABCj5p2t3C0iwhKJSLM3SumU1haqgqiTMI5jytUK43K51BkIIYzGak/xj33sY9/61rcAoGmaBw8eWGvB0KNHj0Bs27bOudmkBQBj0CNCbkyOiMZa56JG0Ms0EhEjM/MwDDpORyalbRsTmE9PT4HwA889d/q1f97s42w6Hfs+tA0YiILQmqWEaWuD97OmOZV+C6EfBufckgLw0nJnJ620nSfyDTrDgRAIgNlg7BzKxMynXVxetAajDe10etbPp7SFw6KdNB++fefzX/tS99wHiDiid5bkIszExXkfGIDBIDJIO50c7u0v5/OLb3zz0dLPdve3t7cf3L3nnCFLghw8G04iShs7aiS7JlfJUBJpEQN3bUsmjMNiOp1ChCgQPABBN5tECTF6RuDIYz80DbJlA+CMDcYgIVrbQGPJDBJVz2YOBE6BIrQpdc0uyj6qOfC/8dA7bBo6TzD3Nz5vPGvjqkp4r10CcMWw68HAOtsvD/LRR6LlMMy2Jk3TDMgGDTNj6h+l57K2EIMYrLE4jEstN/Lez2azg4ODpmk4+FoA6/95vRZo4/GXX1hEhDBZ71n0lhMcGcUAYQ79sp9MJoQUY5xOpyqVZd1Ism3jfbg4XwzDAECaRXJ5jq6UDeWbIgDKQSBIq+oUQMYVWhYI5M6XmS+rby85xmWt9wgiGlv479VpwBtjQxREWElZZlSAP+HCoDGpLExEsNISrtDCVM7XM2Jg1XGvFifqACwaKCTFcVWzVLgq1iqIAc2NKu9VMqAuU0UtY3ScxV8NlaZSPmN2fNUvxcIGgQyhEAoRiDMpBxA4cvAcI4F0TYIpmHRNTRJlbFr2etlJnlPBV6JXD9skDOSiPOmSmMrxUB6hYEmYzdZCvVgJ6TIYEQHk0iimrK+e0GoHA4CC4KbbUJv8AEAz6SaT2a2bzzz//ItPPXWjaWdb06nLoezij6lj3kiggRIy0JAmeZEIETAYax2EQN77GHokcc6KaEe+9OaISATaKYElEqCGpdVBIhIRTaEQFXKleykiavy+73tr7TiO6jTu+17LRt988/Uvf/nLzrlbt25puslsNgOAtk1NC5bL5ePHj5um6WZT771wCCFYa8d+IZKSthBga2tL599YG2OZVUTEpmmiCJIw8zimmQ/OaVoDR2Dm0/PzcQgHBwdwdPzw0cMWYH8622o6huhBFmOPrVvEHhHO43K77R6fn+7v7SByjKExjTOAEAWgIRfjOCzObDtBayR4BBAeYz8YCZG5QeYxmtZM7MQZhCBxuTzcnR7tbT948P7u7r5rnfjYNE046XdsO/e9lyhEfYyRDFo3BP/m+4/NxG07C4QX84vdne04BkBsmoYXXjddCnVEMIYpJxWqUlWc1d77rmsJ4+nZOYdBHA2DFzYhctdNQYCE0ZAACEForSEzDEtL1jkHCMGIM3ZiHCQVzRIhIRhnt7amidozFULZ1xWLKCRas1C8KpflSgFc7n+ZzZbtrE8tN6Eqkn2lzNp4BDwhYnylbBYRtIaB9w8Pjq9f9zEM0auotVhKTvS2KCzCbJ2xYBJ7Ojo62traIkL1J4cQUjY/JB9sMTfrx9evUQu//Fktv82ImkJcIco49k3TtG07juP+/j4AAbAawSLIzISJepajPz8/Xy60IzeGXA5Yz93GnNbctnDStbUXXrN9L1EGVnfeuKGIIDKAQBJCQGhQp6jkAlyhWzEA1D5wSGL4ishoEZklRK0TWzwKkHdaUWwBAKKaTRpGreuJWQpgCCKiFMEPKKQ/8Zr0XR9DynqFgtcIpGgMiKiA+ADAOdaAWZYnAc9c0HopY51i1gak8o3XpBLyAjG7EpcqR3FxK2cJIZCsBOoGyWFWv6rl2wx91TMPAOo5rxfdXioZV+5Wi+oYvZp6iEhka6Uz6zlSLP4i3fUc64wxhjDlYKvvR6df33FvZ//6jVt37nzg5s1bCsk+nU67pmEARVdW49WkPqzRAIn6Y1AZMTILEBm0HiVZgyiE4kfbda0x4L1HEkCOHJ1rjMFiLNIKSpDKshKBNhYUEUWQrr3rfd+r3HXOLRYLTTIe+9RgA4AQzaNHJ8Pg9/b2Jk17+/ZtZj47uzDGhcAh8DiG8/P5cvTGmHEIalL3qOtOTdM4a5WDuaZpmkYkOw+YrW0EEbwn0rIMaRouxIOMMQ7GWUTT+xERD5575lt33zt7680XDq+1LDvTyQTdlNw4Bh/D9vY2Bx8v+sOdbV4soWmO2EMTIQQwDhoL1gP1YBtoe7AWggdhGHs7v9jiAJGgj9dnzXyU5Rad+ovp1lY/7xugZ28//YWvfLXZPVQMNjLWbs0u5vNROyMxL2McBZfjeLHsBSAKt207DIOoZTIE72PxlIiAgu/6MZYNqFqR7jIt8hzH0WD/znvv/tiP/cDuzuTN17/VNZPzswWQGXrf+zEwEMEweCLLnufzBQMbQ5OmRWTrgLxMuglLCCEYZECMAlakaZrAXkQE1kTdk47Ccyout3lOEdIbd9v4Rps31Jdc/lD/W/O3+syrRXKWoJcj7mkXG3p8evbCCx/d2ds9GZagSKXWQlzz1KohxVoHrIA4W1uzyWQiIkrKmeEGrQ8pPOhK0Xv5uPTmVGQwrFymWuxv5vP5dDp98cUXvfd9P969e1ckTieTcRwRcTqdRu97P56eno7jiMaAOu6sDSEQriqI6uxcIkCUHNRLFjwRaH0wZdTGYrhUXv4rvOi1TCrWEkmWooAIyYqO6y5Q1Z8q/zMWEP964XXAtTAoBoS1lCcQiAiFuHI/l8EUnpIXGEgbrGq7kix4mKPaCVKl9ulp6XJcLdzGrqgXFFe+hHwOAggIpxsYJACI6+738rmI0jL++lf9yeiE0trMl9y0Issx54Jaa6Vx7BkuESrkkrZqXdI5dT26VIezLs1MBhPQACgBiph6rxKhITSGkCDdA4EwBS8AIWd1iOL/MAsIWGdEqKxCGbAlJSQEQQC0ZKjRiWmZeTrdun79+rPPPnt0dNw0zaSbta1TdytVMh6AraUYhRmRJEethAiQ2BqVT8aN2knFM0fmiCjWEpLVxAXnDCJaJGutPgIJtDk3V2gk+c+o0bioLlC0HGHpB/VDav6ztfbi4mIclsYYbbBBRDHG4+NjY8zFxcVisbg4PRORra2tYRhUbBtjlsslALTTCQCAUAihbVtMRUdJqEy6zjnXtG3XdW3rm6ZRwgDiafa+FDAcYwxz0JcKnjkKEI4+EpE/PmjvPPPul7968s1vvf76m4ftZKvrms4Z5wD5dDKZts04LOddN51OmtZOPRtjmq4DY6B1YCwAwHQKMcJkmur7QgA/ABEEDzw1/XB2eiEv79v9yfkYxhYX47Kx7njv4OH7927demY+Xw4QwcGFBCMIgnHwDy/mQHaJ+PDiomnBE+3v79977x4ieu+NMWJoGAaMIAK64erNhSmlMa2FTqn3oTk0xsBHPvzSzrSbOoLIy+VAtp1fLBf9cjksGGG5XHIkS4uxH4JA27lu0gByRGATZtMpRIYYqDFACDHFQTxHIOHAULHUQjUbm7EwYf39ail4ydITUQflplsY1mVTrUBfKWWvlLX1wZeSRWC9v19OQoeI4IFnuzueIxAZ50ShbNJzUy6UZioTohVha5u9vd3JZFIonkPI0iuDHlSW++Wjfp/yhuWqJ70Us1hrl8vlbDb74Ac/eHh4eHFx8eu//vlhGJqm8WMcfa8xv3EcF4sFABhy6ksTkRAV1GYtEAuV17SMZ004cUJw0e+KIFlJFFzJmPrydHOFKKpy6MuF+YnJQKzl08Ygy4WISIBcA06lyynrEJtajqIulkmuBVJ5aAmmiohi39dR4Y33Qo3+qi8aasCZtfXdIADM7tZLw1O0tZIypu51jXESAao1B0gYA+VMeCpBUN2cqtGkiV05GABRkGKMrMlo+dWSDyAGETG02mz1PGuSQcnkB0hy0dkGqh1Y5lMQStuAkolmjJHIUtUppBlG7YIqiBlbWhvRGxOv2tsiAsCYsgFTcEhymIYIjDE6hzrtZI1qydeuXTs6una0fzSdzgw59TwXzQElipBumTJ7CKAiWc1fQjGGjCERocYyE6Iwuxi9MSayFyBrLaJ0rlEtJ1mTMURMIXMGMDkNU13KnLsiarkqAGhsPn0Zgs7e4mLe9/0G2TBz0zRqkMXRP3jwgIgWi4WyI8n55BGEiPwYjTHOOYPSNI1G01UIOefacRyGoW3Htm0TQzetJrprLjcg65nOuXEctZYFEW3TCaHn+Mi66XPPHRt3/vrbD87OTvoBF3MOIwoQx9ZYI7w9myEwM9+9/37equSce+rGjRu3nzbOoXVgiKyNPvi+Bz92REZ4fn6x9WD4tYfvffef/lk72e+DB2sbsRhH7/3+8bV37/3W1nJprGWOw7znxgyLEZgvlv398wvXdgPi3QcPyLqunbimuVheWEvjOBpHZIjQhjjWqmqRPWUJoGLOk0nX9wuOcHSw/+Du28C+a9vowzAsgX3bmMg2CotrRgRlL8TYNE1jXYQo7Id+aI9szKxYibGZtDXTyDsaoRJUVx5YsVolEJE1hoSV6rwhNZ8kaxBRsg8Z0/684niynFp7dBmAqdzS5aUQsedwfOOpdmvaD4OZtYP3iAgYSUBNWU5wAbr7we7s7Ozt7SV3hDHWWh8GwlVSu+6Bcmy8dq1xXJaFAKD26aZDQSICdF336NGjg4ODT37yk7u7u3fv3n377bcfP368vb1NRMuhB5YRRt2H3vtJN2PmwNFao6UdugM3pmklStc9G3n8azHXIn3TQSl9biW2BRR0LU19NdflQ226aY0jZAM0jSGLzDKWPC6BnJymNZVaQY24AphLYk+ThGGVaJsXhAGk2PqS2H8SP8y8YXDX2kZKRUbQeDJU+kqtgkAl6VfzlK3qEt1XQiy/gkCJcZZZKhMrsspwghzdQBBeR8mIMZJhnUx1JOgoSUiEjSEAieyJyJosrtarkOq71YSxmtg8zeWE+tq8rNn9gCAG8ySvyF5lmz6t1lEQsXVNflYswYJae8sTmpimIbBkDBkiEkAdjrW2nXTHx8c3b96cTrfUB9s2LRG1raIfEzNo9nsUQazrudUtxAAogmTB2OS4KQVdiCgxGhTg0PtAAiCiyI7GGB/GEvwudVaK5uhj8N5HSTll2lFjGLyIeD9oAYUKYCKyZEpFVln0yB4RZ7NZqjQzdhzH+/fvq26h5hoAjOMoC3TOBc+IGMZeRLquY1Y/Nsxms6Ksj2PQZCtmtk0XQqfGrjZs0Lyw6dZMX9y5lpB2dnbu339wcXGxFOrJnk8mXz4/McMIPFpDwmwBJcTj6dQv+8evv9401jjzqR/8wZs/+CPPPvvsCy+80G5PYTqF6QSGAawDQxACCEAIMA7gYzw7PXn4aPLNd1+etv7WtW9cPJ4hXpyczWwzXw5RgMldu33nm2+8dePadfReIvjFeDF4CHEeoneWmi4In1zMwTrbuPl8vlgsuukEBaKEGAJUATUNAOuREyAsc0qCExEtRTt79PiTr36UQE7PTizhcn5OQAaFowcE4AjCkUPkpGyxsNP0LoYw+sV8rn6/GGOMQGgQ1P+8WmipuOXlQ0QQCzcT3auXxaGyxvo+NZuC9Uv4Epcu5wsSyqb3uGYFNX+AJ4xcOcrlQSLi2bB86cUPdFuzniIIa3TMkhvHkYiRCFcFuoiI9vj4OMQxxFFARu8dGQMYOdbaU1mzWsquz+Bmm5rCXyA7zSuNJhXevPfee7dv3/6u7/rk9vbu6enpe++99xu/8ds3bh6quEVEMql0xxCZ1oUQGET3rbIegMsO402hu8FwUzXjhtzVS5DrNuwGECF5IDdmQ8+vpRQiAotilcFGuBEFqWhem7HVeiIREDk1Flib6hysBQCOa29UBBjlpo2QpUIJTJZpKSFzJAABSjJy5aC+kqRgJcsrgVFJtXImlfkgUpu0TFT5ULwFyGvPMoQZALnyRcegmhIpfhIgQEJQKin0IQRrbHl9Mmv+4bXhVXpSeVO6VI++IYPLVG/cs9CD3rZpmnJVzA09MSfrIaImVRGtLizTWM+/IdU2WASJyDjrnGvbdnd3d3d3dzKZaf2uiDRN07YTSsUukDUwAWbdQcYYS9opICodCiAJWtT2gkBkY4xB40NRK3QLvB2pxkAIwXuF6ylTkR0qwXs/Bl+M4BCCMU7lrvex70cRidEzMwGyc8qni2gPIYy+F5HFYoGIXdephylt/5wokFKmCfu+j2N0zhWEgBiZiBFxPp9rWYRzTl9ET2gFRUQDELouercQQtO1bTsBAO89GaugtkuPCGYJ5rfffe9wZyd4v1wu2Ydp20GIv/mt39nu2t/z+378h37sRz766nc89dwH5gc3Ly7O3pnPHz16dPbeO/P5PMZ4eO24H4aLi4vpZHKwu3ewt3/t2vH09u1DY+F7llND0LbTk9PQ+5N337n3+utLfx7HcTTQtrPpZOedd+5ut62fn2OMS+YY2BtyO7tkXOj7k/NlcDR1TruEoXWISZ8tAhgROa5oTJmW5B5fLi9EjJEFbt95uu97ACADbevCKN4vGkuL0TOz9h8Ucc45BZtMNeHa4zWCMwYDIwkjCILJnBmznl1v8w05V2+rFZu9yhiVZJusabeQARC//YHF5aWfNaiaIfPK/de58b/50BdMsNiFmVua7e0004nnfgjBOKuYnZz7ahOqsZH2vo1+wFx8KlRVHK2/bUkYqXl0/ZmqFK3aFNZtQJS+JJS+HyeTyenp449+9MPPP/9813Xz+fyLX/ziV7/6zZs392MV3N64P1mTLWlkZszmdRlqLRgAVtU4kF1klLMB60NEBKKAGLIqGFik3IyvGo/qNYhIICW1llEQBVCK6xJyftbGMl++YSbHivvnZGgAILMmEspGgsqy1FSdIsDqhybBkw1tRCLFY1lZeGtrWi8955a0vIIdTUVTiRh4RbhlMMKCAiuZpNDH2eiFGEBWrUWS9hD8msjkiIjGWhExxtauMx22ISOMhpxpnIhwZIRUnl7evb6hsvWcVAiFmFE7ylVuZahZQ8EcLUsWN3Ht19ncanKUwWnWC2Twc2NyeQYAgFibtCKVYcYYa0lEDFlmaJqmaScAcHh4OJ1O9/b2VPoiYucaADAGXWMEYvSivHIcg2Y5AUDTWiCjGWGrJLsU0F1F0HXAmmJNZEXGxJqZrbV+TKbkMAycC7tLBpn3YRg04yzJ4Pl8yVkDGMdRJyqEkUMsFc/MrFIQAFjiMAwa8RURXOXnpzkp9XV6gkETwigSnXPLhBBOml9NOdWobuwR5nPNmi5jVt/49vauCC4WvbPNYrH4+te//vDhQ+fc6UXcnU2HIYDYCGaIEtG0O9uvv/3OzWvXfvd/8B/86O/53R/92HdwS198+82/+w/+zr/85V//7Gc/+/o3X1fyUQX+r/yV/+O3vvWtb3zjGzdu3PjkJ1791X/1r156/qXWNU3TvPDCC7eff/GZ55+f7B9agOPj4xdffPG1r3zxX/3aZ9956+2Bw9O3bv/6G+8t7SLGES30Q2xdE9EMHJcxPDybDxyRcWd3V839aduIgGuafhyCj9qZWFO7y2wrDygLlwxTEVWqnnnmmcg+hHE2m549PgGxAKAAbs658+UcyIzLUR046AgRQwhRwtnJKQEAR4msedExxqbtJrNpLDE4TIW8ab9kGVf2fuJKa5tIHUUpgCfMSpA2FyBQTr3U7coZ4qIW6jWPLdwA6/QFxb6Vq/F2VgK+hPPWA6waANYUUUhprxhjPLh5/fDG9T70Htk4FzgSYNO2uislSyJDAAIxRltrH1AdNXOp2dnGOeWEWn1Yu5ZF23+qm+7i/GJ/f//k5NErH/7Iyy+/TM7O5/Mvfekr3/zmN69d21VaWU2H/iMgqOBnZQAJ+L6U3OC6Q698LmlZUGwgAzn2BllAAgDanLYll16hFjDlA6KgbEaglcJqP2c9UfUdrjyQ1vCE0wvnkqhMfIar3iZFXtZvnVEfknEMKJLdw5T68D1R3SsWYflG/6TKq68V9MmRCKa+VU0JxX+QrxKVvlIZf2n0HDduUo+QL9WjX35c+VfpRFgAFXQ4XWiMQb1PmToEuCqjEtYXeuOD5tRh/jI1RzIUNductPcfsLAgAa01PFcXtN5HK9qZIUZNRUxTEWMktMaYrmtns62mnVhrd3f39/f3Z9tbGtrUbriNsdZa7VXHzHEQjhzCGGMAYEuALFzwvTE5VLyPIbA+Li+TQUAEgxg0CBVjjDGM4+C9NyQxV9sPw2ASRDbku/lhGNRRXF5QWfw4jn3f67UcPCKqmZXXN0YOepr6pRNHXieMmgKVG6gTtWjVWSn09Y7QoFUKjphIAGr8cggxyXXq+zGEAIDzxcJa+9prr/36r3/+M5/5DI/RbG1Nm8YgkYD3cWt79/W33/l3fuIPfupTn3761u3ej3/jH/7Dv/rX/9pvfe01IOgifODOtU+8eGdnZ2cymXRdd+upp+7sbXe3nvpdH3y+bdtXPvIdi/fe/sKvfdYY85UvfeU3Xnvz+PrRD//u3/sTP/VTr7766s1rh267e+G7X927dfw//Itf+Ht/7x/MpvuvvPKJf/rz/+Tg+v7Fcjm1rQGaj+NiuSS083HoJpOzxXLazS7mZ0SgoNAQgoiQMguhGsIP1gyqVXCKiM7OztoWXn3147/5+V8dx3FsnG2bcRHQOB4HTpCTGGNEQ7GP5Z7MPPpRYrQZoUj1PzBUdGUAMER8SWrUA7t8lB3KnIS1FIcirFgx5M9ZCpfygkQ2V3I3ufxcMli1RalJ7srhrc4kBAADRpGIwhgFMTBfu3bEhJFAAHzw+jq5qEcrjxLpqi86FQ+sxr3OgGBdBj/puJKb61WtM94rfDwE7/f29u7de/+jH/rw888/H0LgcfzlX/7ld9+9f/Pm9X4YADbhs0XWovAVf085LMBQPABFSIgIgpqjOXEcktmE2fivb1gDRGwI3dohWf2krDzdrLZHCwv5t5yr8tCN08pylLMlV/hsDL6cqa7I4txIzihtD57D2JgQlLVdo2QFpoqMMgivEQNkR2t+/RQmh4paNt5iRTCXpqEIpNXECnPlw6lvhZXbnHKSZG2OlzNpBSZFKgOwShPT8Rebr1ZZNDVZTWBELLRU5v2SSI4bFjCiIbI5prPK8c7LR1XEXwRSPld5tZQcpGCWMZIFa23TdNPpbHtnr+u6rpvu7u7u7Oy0k05tay3bazs36yattcoXyQD7VYy2FCIjChGZXF4sObpE2nU4Jnk5DEMJ0KpYHYYRUYLv1RLK4BsRAAY/Nk0DqZNgEsBFvSgCWO+Joq24V0UQmheqI1R39BrN5GUtArVeaOYVo9X0bBGJEbuu012IqUY8+/85AnCMTbGhY4wcARE9x/l84b3f2dkRwZOTk69+9as3nnpmOXdE0E1sHwdx+ObDuzc/9MJ3/ugP3Xruhde+8dqf+7N/Fhhe+MAHfvxHfmQYljHOmXmxWDy6uNgCjgYne9uDxKObTzln2qY5Gy4enJ0sOZw8enBw66l/74MvhDG+9sXf/Mn/5r/57u/9rp/9j372B77/Ux/+8EtHt5/+d3/6J5/9yCv/+f/6P/8Pf+Lff+4DL37+C79+9NRxJIgMaNxiPA+hf/Dw8Rh5MpkAwMXFRSEkANCmfcJrKnuh9HpudQ4nk8np6fmP/Z4fMIbeevuNndnMe+/HUcj0fa+o+iwxMHvvXbMVQupIBoaEpe/7EGTmwCLlUgIxxri2SeSn5F9pALEqnoSkAQNeJVjShtkQTLL2FnlLrqzbWpA9oQHsGvPPw0hR4RVPuGRwVtt/PS2XUEIQRCBikKZrd4/2hjgEQDEozM45Z6zkmFRMzXG0TWxEBAtXsJg1/r4xlMvvAxW7vDz05XKpSf9+jG3b3r373vd813c/99xzzLxcLj/72c+enJ7duHFtvliIYF0r8qTH4ZOtlg3XazlKXMQYI1U1lChEVMVxC8FuzPilZaByTwUqlBxbrQX2k6bridPIULZKGQZWRGoAY4WJWt+NcnS5zEMaIQBQvb6ss7V+uQCUdUQREcWLW19TZiYCIrMmgVJswsG64AHdYJdISOfHUEp45tyob4MIgQyQQWMJoebdRabW754XEOsxSJU4VkQOrG+hDQ1Gx1zr2OXm62tVO0Jq6OwrVBCsjDkiYgbMHRULMEhhHMaYtmsnk0nbTra2tre3t7tuOp1OZ7MZ2ZTLZq1tWts0rm1b1xiNxVDOBjdIgiknI7/1iiYRUbR0HRHQsMQiOFXuqt9YD+8HRByHQaVpSilAg4aYeRl6FWZ+GEP0Ne9Tq1fvxiHWJTHMrNK32MorOtdZrXyJV+7lMvnFk4/WqE6mgWoiCxCBAImAeRh8jNEYBddLDtgYhJm7bjIu++U4LBeDRD4+PHrz9Tds027NWj/2ghBiFEP3zpe/71PfNVh56/Te//fv/k03mzz3zJ2pbfyiR+DBufnFcrZ3aBDtZBKJ3nx87r75+gdfenFvunUe/cWDR3Zv188mInJyvnj4+J0H7zyQpf99P/KDxtn/7D/5X/3wj37vT/3UT33qM5+58/zLt19+6TM//uOf++IXv/Ojr3zxC7/Zn8yJrDHufL4YOS764Xy+dLYliwiwnC80HmGMCVEQRdOsCimKCOR2XnmeVyyu67qzs/Pj48N33nqDQ2waywjaVV2QWACI2TMzB46dwYSunwGox9Ezg+ucMcargm5EK4wZFGBXgNfwk+sNoswBEuup3cWc9wPIuv8S1py1kn8qrql/g82K64k1tahiAbPe6ejKyzcpEAQF1IuFhsahv3X92tbujgdGZ4yhGNNDNQXdpyQVSnSuAXW914adV4/427/P5c/1iyFi2zrvvTXGNM3Z2ckPfOrTWvZ3fn7+K7/yK48ePd47PGRmaxvKTVivnIjLqkD9dKzM3zQ72UhSll2dkKWSDjCL3yu1nifs/1UhPwAIgUKO1ukGV87bhlx/0pRipYtdeRROd/nmtJ5cRkSSa50Li1yNfO0pIpcqkKQ6dFJL0UUlXdSi5XwCAIBcyvYvk2nNFeJz40zI6hQRQZWyWL8Xracj1GOG9dSqsr68Xr2aDy6qV4HVybtaTxa45JWp5kf1oQQhUqa3yBsNjEnO1cJ8oWb5csocRgAwxjRNM5lMtra2Jt10a2tne3t3Mpm0besyMrNWKOlBhMl7sD4zBld/6vhDCMxEBokIOBUFITIAMqb8t6ZpNCEZkmwbQwjAwiGy8DAMAGCN08UtFJUyqkIEUHBbLMIVWCSqjEwGrvfehwHUigIocr1eeszYnMWrtPp13dtR4sRK8p4j50aNxqx8/jYBehFAgiAWQEGIzGM/WGvjxcVicS4is9lsGIYv/dZXb926hYTDOHKMAWTLwQdu3nnq4Pjk7OKrX/jSjRs33r93d29n11Mcx9G1bnu61bkpGGIP2Jjo5fT+6fSVaWfb8+X49jvvPLx3goEmbnpw63Dn1kvPHN/Enn/1s7/2G7/5G9/36U++/NwLf+ev//d/46/9jT/5v/zPnnv5I5/+vu//Zw/OTk5Ovu93fc/f+ft/54P7ryAasq7rphfzIQr3ff/UU09ppMA5y8xROEYWQdB8Bg3YIWg0RHQ/otae5qQHgMBRELrGPXhwb2trJiJawuRHL+q4iRgUkFgABYIfCESAWERY202C8m1dlAgRDWkFNqoKyAK0yQxrFpd+qnivyFozoppXFO1hfQ+WnbhqonCZLdQE9gSufrXVu8GspLIoBESj61HYOjfOw8HRERv0gUmEpY7BV/fUXRlZRKAI4PoBl0n/yumAdRF1pWiEhA7Yee8fP374qe/53jt37gjw+++/90u/+C9jjIeHB0IUY7TWaZi6dufWNy98Od9YIahQIU83GOsGyy6BK9Q6I7X/Kl8xZizly2+9sSSYRXi5UICRAAQFAXMR6priVpuzlx6xmlJZaf1YmWsAIAICa8blxlNEUlFsHam9PNoNCKeNYUjusJ0kFiSPH2QXdG38rYyqdb0HUQSv0DEvL035qZ6W+sX1S/Ux1g99knaip5kq7qv6nGKkSORUAl5SNtQBJmnPb9wxTaMk39fqKSxIleqZNJNIhgRQtRgEgBL+KG6IvDqV7iI6sQp5gYht67qum0wmO9s7k8msm06m021FkCg57SlrEHMkqUyFglaSGDD1REn23OpKRZCCcCcizIE5BAmpwj3V6QmwRB9i9MLJnAUAYQhZfjvXapq0+nsBgIAAQYE4gAutJlTOGGOIow5YUt1E0GSCPOFlZdUs1gVd+68i58TiWduMqk4GZa+tXp8lcgaHEQkAqvQwMy8XF3qVqgKTtrVEF4vhnffuHu/uo2lDfzHb3okPTmkZbu8/dWPLv/L8B7/0pS/dfPppsLBgFke7QdoxOvRNO/E+OsB24B3bHraz/dkuzpfLpqN+wGWPUfZ297e3Dz7w3IvPP/38H/7pn/2dt976u3/7b3/9tdeOZvunj89++id+6v/xV/8/n/j4Jz/1qU/9rb/+3/7QZ77/6a987t779z/yI6/cvf/gnXfvIZoQWASn02kYeomBbFsIIAqX5j9Z3Szw8sV2TBqJiFxcXBgDB3u7F/MTBDYWz06WCIaIvO/JNFo/Jjl8E2MAAOcckdXcdUQo7hMCFhRjjG2avJEtc2RZkyZpJAjFn4xVnW7mk5kSUNvelByueu+vSYdEutXOFRFG2Ci1gHW2CRVLYQGUKwTfkzi25pogERoSkCjSNM327s7JOEbNG2Ox1goIIam2pJcrjJLGCqhUY9avUX/O31DCVlp3uG1M65VH2zTz+TyE8Yd/+Idv3rxJBt95551//I//B2Pp4OBA1zhEWS6XTdMFXnMn1ny5epCiJa9sPsolN9USrkmjejY11aqcU3bylVdVm30Tr6NMWn0OQNY2r1rFzTP1G4HSbad+6yvXBSryqqWXwk6V8dcjrGdg456bgxQGluIYLMuKWSjWM1OsiuJHBQAghIRH8cTEvXW6WjPd6s9lNaGCktZ/Sw5nPcJy/8IRlO+XP+t3qefn8suiZrHJ1dEpQJZLuKp5qqVUGdUTrlFeWh1aRBtUArWtm81mk8lkMplMp9Odvd3tnd2dvf12Mm2aZjqdNk1jTaM30WxetYPJACCr4CweXSKyVCGZiyhQM1EG7DJGEKJwXF3ExWkcgvZI0L6BQa1zhU8aB59BJSUEHsdR87k4w6CWEiM/qAfb6211rVOSMyKkyuCRU8+AtH/1wMqGromnpg3IrvgCval3yLAkq5XVz6UCW8eDLMgCkSVGAOi6zhm7WCym0+nW1tak2377nfd9hBt3buNsNiAEgK987evz+XLabv2hf+cPjsueWFrXQeQxxGEYfBz39/d2dna2dncOjo/0Xx/D6fnZ6P3J6SlZbDrnOjfGEE7Gf/H3/8Vf+Uv/l3/wD3/+6Onbf+4v/IV//z/8o7/++S+EED/9vZ/+hV/8xS/+9leneztb+7uvv//W7/53f/zLX/7ywf7hJz7xyU9/+tPXr19/9OjRZDIxiBcXF6oV6StrhEJj23mXrbb/hqzSmRnH8fr168vFBcfQNBYiK+jeGEPxzWiE3lob/QCRDQo5G0E0vU4FcFkvzKBpNXvZ+Hx5P218eXn7bHCMb3NcCSJ09Znrd8NLUnbjKCeoo1v/jZlvEFEI4fD4qOs6IFxRcvZ+rXo+UqLM8naWqqzXmi1WXInqbzY+f/tB6+6dTrtPfepTu1vbAPCtb33zl3/plw/3Z1pIrgg4AmKMGbw3xqAk9orrUfHLU7bBUgFWwcLC0OEJMgAgIqpWAZl61kRjObOWZPnDE5enunIzu6Bm1nLJhkO5Agpj7Zyk1lcmQ82VVkAfKxGr/yrmUS2oREQgZx0XaGjRO2OukIGMz5FmsngmNh5kyIhIFBaoYzlreJBrL8IxeYqIBFfGbj3JK3JnXva91vsys/ZdFxFnUiNYTS2DvFEBQJHMdRRE2RwVRiJhRkKkUnQkACLrOnLunYubS7+KXSFiWdyVY6Y6X0RWy1GKoQFE43P6CkWQUAWUv7W1dbB/OJ1Ot7d3vNeC11Z5HBloGqv5zwrkVOCvAQDBlAEohJap+jRjlXh4WSut6HMVl5VsN3NklZ2afh84lQsXT6MuVO4Bh6zdEkLKcI7R6ytruJpTchwoYhfzCgs23Somhn5525bxcy6JJKIIqe5LRAp3Y0bJIDAhBGEG8CDEzFqLFQLHGPu+nxlDxuzt7T1+/HjsB+fc/s7+g/uPv+a+tb+///RLzePTkw9ev/Z3fu6f2tn2/+xn//gf+SM/8/TNZ/7y//kv/doXf/13ffpTs92t/a1uMpldf+oGI4Uo1tqu65qDnTnCCOGcQ3u0G95taXe7Abr2zDM3mr1HuP35L3zxz/9v/gv48//FX/5//t9fuv2Bwxs3ljGejcFubw0k5753s/bn//nP/Zk/86f+0B/4g/fv3fvIx24+c/vZb37zd5RUjo+P33rrd6DUWAIAFiC81fJtHJTru/Iq0K1bt9rWnZ49mrY2hMFae3F+MY5BxDBzyU43hvwYAFn5jPd+uRxCYLJgIMmbyGwzCHl5hI7kSnqDq7Tn+ifdoEVjAMgohOunFW/H+pd5A15N51cPA1d5MFcfK5mYWUQU9jE42/oYDg8PydnAI5G1iIpeRyDOudEHRCREQtFyCUYwgIRkJcPyiLBI2Hg8VbE0HWXQ7OMCWMG6W8A6G0ZPRIQSOcYYp7Pu4uJib7t59dVPTBsLGF977Vu/8sv/0+7BITOHSLaZahhKIxUtMghHZV7V9OmfRIRmpTuwiPIsRwCpHTWgcI4+orFG1MAHuyb8GEujLsRVg8lsZmGSUOl0AggiK5Q15tK+t0rewczHEaIE/RVWsPVX0AcWJd2sYTMBAGU/dnJoszrNs8cymf8QYhQAay0hAkjTuKKI6ODhkggUIUXk1g5lepaWGUSFuhHWSF7Nr436QEmBwdEYkzy6GkCXiAAIomA5mIv/kFhkDaUsRWvQ6rCipLWS7DoWiVzaSitxMYOFMQYUQsQYBiLbuYbYCqeUuiiCBILAEq217IubsnKHGIoxGjIIyBEgI7OJAHDZkwwiJWGtFldlsUQktZNOc8z6iixA0JTZFiT1g2hLUbUJMtezSfALlt6Cwctk0rVNu7tzsNVtdU3njDVIHOJift51nYAwY4yCBoDQAI0hSIqtaNOFAIQGTWRdwbRYyiiSeBOJIThHKCDZNAVmYJYYMbcDYQ6DHwc/avXw6L3GHY0xBOjHlHtF4q21SjTGWWbQMus4LJRlSowoASWqjIxZshKRMdqzNgWDEYkEOYYy81SFooyydXKJnnG1UxCxLf08kFQYJGWICABCjAQGyGSPCEbFJGEWgcmkA5DOuiEO25Pu/sV5A9DZ5bV9e3Lv9fmDt3Z3d3cij8v5d71w5+/91f/39Vn7R//YH/u9f/D3fO+Pfvqf/NzPf/W3vnb3wf2j/b3t7e2jo6P9/f1O8aibpus6fbXucL+xpy99wCzm88PDw6eeeoqI5hfL7/zMD//eP/yHv/Clr/zN/9dff3x6cnRw8K8/96u/67s++YPf9eqt/a233njzjd/+rcdv3/udL772nZ985X//X/4f/vSf/jPf/4M/Qjxw9DduXds+3H3vNx+2WzshemcaAzAOAwkBMAoRyhCiNUaECS0A6j6IkRGdCFtLtjX9MBzfOODhpBHfuMnFuWduBDvAASzxyIEpMkV2o48nZ4uegSEe2E5G5hCdM/0Y290JAEDk1tgYuWtai8ZLJLKDjwasyQ5BRDAKWwEgUYhWYI5J2uqejCCCBIYrD02iYxRhzl5lLUFZDxHK6oZKBowgGTay0kefEMDKPuz6no6BCSMhE5BoaZ+yaTHOoo8zoDj4ZtY0zx692fXOuMDMITpjJEYhHPwYCTh43YYWEuIQA48oBd0mSm7CWo/vCj2UpPS9QRFAReIpUS5gFmaezrq7d+/evn37u179iMq23/qtr/3qr/7r3f09Vf8LUEtiW0mkrrG/zQHkXlJYOUUBRJO6AVawK7iWKFRmFiT73CErj8VXmUWXlgVLqcxZG8l669ONWSoPrcdfBPBlz3bRpy4TxGpaKnoqjy4+1Zxykht/XuVyX/9wZWetK4pANsYpIFq5VI88DymZmIIIG4nohLSuVJaXvfwsEcmbamUPGQMBilJISeLntrLMrC0IYmQ0ZIyr71rP2KVXvvy+a3Ar5QWL7xqyybXScpABVh2NkFJ/aEQUrJPUVngX2hmJV/nJlKk0kYf3Xp17URI8coxxuVxq6xsAkJzJWYK46l/FZElbU7lk9UUKqSidhOBNBRbGIWjOs+IW5mlPJyOgmvyiPuScVBUz9FV+X5Xj6vbAGDXImlRkjSIr0EcRqxp7wmy8wiW2WDwEZp2qKxUTNta3zLOsI9VA7umpjIKZARkgl6UZbNt2a2trPp/P5/PpZNL3/bTtQggnjx6rKc8MTz9947/+r/9vX/ryl//4n/qTv/8P/MSf/lN/8v6D+2++/dbd9+4hUdu2BwcH169f397ezq2IJTtpGFgK1IkIvvvuu+/fvX98fPwDP/RDb7755l/77/7bn//5f/qf/Kk/8f2f+r7jo4PHDx/88i//kvf+5Zdf/pVf+ZXnX3z2zp07v/Irv3L3/uPf/MIXEWEymZyfninWpjFU6vQk5XAoVnmpNw1qVIQgzrkYPYKMQ3CNBYDrR8eBl4w0DsFH7kc/9N4YZ10zX56enZ09OjkVER/5/PyCGWazzhgTh4GZIWYfSXJXFBFAEkP6DHQpCFvv97XP6d/cTa4sHxHh5QqLouxWfBI22PUTjsuP/jaHinAQSG1V8lUSotWWNs6eLi5uPffcdDoVqwmPBcwBtRg9xljoNovOxIltXcC8McqNV4VslmlCUAVXrKzKI4AxbhiX0+n05OTRhz/8wVdeecWhMMjXv/71X/u1zx/s7zOCMTZ4rrcbYoHTW83Ihh6QZRKuym9Tli+IlrIkj+mGyMkWqt4Wr/AzlE27fq1kd+Maa6j/vHLxTEbaqqXmhlSuPxdGU96xPqEWBuWhXKF6FV5ZBIOev5pPrGdyxbA2XMq1mMH1w6SGASvZUzivSggdYlmR+rnVW6QpLTMgeTkxqzzlcnU+paEKScouWa2UihxmtmTJIDMSGmuspMjCaruWOdwgjGqe1aWzKtQpLLsoE+V9V2rfetPoJEfLCiJDJsXSGV55ZZn2pmmSBWwbItJsZ11N/V4xNzTIahuXKCHzu2oR1wLzxiaczoSAkU82xsREKiu4OqzW1fuyFmmhBYFDauFMRJrtzMy6dNkvrTcpsF8E1XMBkHElcQtt1MyUmQ2g0GqBag5Q43LUv9bbSv91+QAAdfWnG0p9laj3K28Eq0O1zm5vb8/n82EY0A+OjJ3OOIRhGKRpCNDHgIgvPXf7i1/4jT/+s//xH/2P/tmf+JN/6qUPffATH/9o/Cg65/q+7/veOddNW8xMBsDplou5O7K+wu3bT9+8efN//IVf+KVf+h9/4Ad+4I//sZ/9Yz/zk888ffPBvXtvvfGtz3/uX4/94mBv9/T0NEb/hS98YWtryxjzm7/5m69981vb21snJycFYoyyD09EdLa8H5umaZoGSfwwxghEZMhZhyH4SdedX5x3ne3ny5vX91rrlotz10w9y+CZxdiu4yhvv/f+3ffvn5ydL5fetdYYY61xzu3t7QFRP44bxUXGGEFRusVUAqfuxv8/pKBcchqv2PIl8VQYSP0NPsE0etJzN26IV45KKwUg6QRZUlayn2iM4eDaMRoSCSWkYoyB7Mssq5/uXyX9rFJMazGzwZTXBsoRAUrJRvVmkYwZxmXbuocP77/00ksvvfQigCyH/qtf/e1vfOMbx0dHYwzOtgpAUz8OYBN/Y+OgquYEMUW5CInQcOU23zABKXmFy+zptqu70BOuGSLp/hsvXgQbrlu0hZnWwZUigOGStN545Y0T8AolYFNRAFjVtKyaxJVDz039HLJ9sApqRsko07CaGN1JDDk0Srl3AiISaIn5KvuMiASiXkoGkAR5LRiPmh+Y1qK0mk+iF6tYcv36mP29hCvISSV0demqOqR/RxTMfeyJCDDpBYgIYqxN5l2FRLGp1tTTWxSgmoQ2zKyy29XmRpIye2vUggxgdB3K97kfMENWznTtNGOobduunTRNs729PZ1OjTHTadc2E9u4DV3nsodDkkd3NVpmRmaN3ikrLItSLi+fKefQaSpUcfSV4F8IIcSQ8vtyVzST4V2Lb1Dzisv86IgFUd3vSdYi1wJ4tdxE7Evm5VXcsywZsgYnmAOsSvdXYYIi5IqSVG5a9qbOgTFGiCREdVfpDE9mk+3d7UW/WA590xhmjmQskaaZNc72w9A05umnjl96efvx/ff//J//cy+8+OJP/uRPf+KT37O3t9e1rutS143go4ItJ/IAgNwOQccfY3TOfeaHfuBDH3zBGHN4tP/m66+fnZ+89eY3vv7bX2tbe+PmtbffeBtRgGRra2s5jETaPR3HcWybJoSgQQqofHi6b7a2tkTEjz0KlZZQEgQRgCD6Yda52db04YPTj778kf3tnbcf3tva2rKOupl5cP/hu3fvLeb9N775hnMEZCazDrIC0batc24Ioe97TG7t9Gi0jiAUARxjZEABErmyGdi3O/CSsrWm2ta0sX4VVMJ749cree/l03g9iyvtesr2gZ6TwmZgkFAEED3w9sHebG9njCFCFEmShSFhJBQDANeVBh2SLX/UXObbTEr+JrHUPEfqtkIRPD8/f+WVj374wx8mon5YfO1r3/j6a691k6lpHPYyjuOkmy2Xy9pcA4BkrlcR0PIv5QQrqQymDcZavlkJyEpJwzUht1bFW35nliIrcZXvvnKtiAjKinHXHFwrRGJG7KuFCqyPoR5qfc5l2sKNUrnqPibD35TLDa5w2i6rTTpVtXKpnLRMsjLe1J5W3afCIAB1ZrIBAM7NmhAVaCYyc0orQETQUQgAANFmhVt6ZWHJCvLqe0FWTHMCRKOyJamZCYOUURIISXGXJI6WhWj0TESxAnet6aEs2ZWzVBNSkVUiUq/p5T18mfBAZRUmwqj3Xn1btVGIqG26qR6TyWw20xQeY4xwYtkFmqaWvpIdy8ysqMhZWktkFhHNzKJ15PMsdNc8WytRGyNzEureR81hFpEQ9SeOMUru4Ou9Shctg2TEVblaAYopQjpNtZBUzptyThGNkk3VywSfdp8BhDL+zbT/WkEhspSLkq21xV+CiMZQckQnj1jy4GgGXN/3i4vFMAwQoyE0xgZhSXmtGDgOw0A82d6aHB3vv/ve23/8P/6Z2ezwJ37iJ1599dWbzzx9584H9vf3Z7MZAGstcnkJEBCJzIIGHBmBSMhHh/u//du/9Y/+/t/65V/5hf/rf/VfPX3j4Gtf+cJ0MhmE9w92kECrVcg6opTcNI7j4eHhw0eP8v7NggeRIwPzbDr13vdL0pdtrAUAcRBjnE27xWIRQpy69jHA93z3d4tIRNNMd+7fv3//3sN7D+5/+cvfAICtnWkpCmVma+1sNlMF0S+XMUZLJIRGovd+jGwAmNk4q9F9ZhYNVa7vF0aATeb0b3sUTrghwMqOKB/qzX75hCu/ufzr6qEIKKBR5FgBEaIwAYo1Q/DXn33WTrsRIwGJoHEFu14jkivkYAB1Pq/GZuuH1a+6wfo32FYt/5KGQtD3g4h8/OMfu3XrFjP7MHzuc5/7nW+9eXx8HKMsl4NzDoSisDGmBG7TFCDmckuqZrL6sXpuOUSSN2DDcEFEqBoT1R/0voUV6vlZl9+Ur7gBbZF5BFa6zOZz19fvSjqQVcj537T8lzzSBbhAU2mIyOAqDrQxRXmQq9BvuU/hmNUANHq0qtiphsKa/ZJ/EhIUAGYJvFa4vBqtJhowQCqN3fSorzaJIGeu6llEvCRDTlMftUsVAkat9DSGhBBJrFMSSq8fIwMgc6y9x+VlN8zHy5O/8WvZM/VK1b7l+qrN1UwnlFJ1otyPVt+LiLSpatO0bdNMJxNNH81JSUg2eS6MISJLtipkr5QDff0yWilxx6ywQrahjTEg2gl41dhK5Z92G1XDlzO6hWQfz2q/AGnsSU2uMrHGGOSCKorF9V2mUf/M+qKvYx+Sazc5Fa2uU3sV6YCiKUrRO9cAQYs36PL6Js9NRXgxAtoSKvIiEcB2Xbe/v+/74fHjx8OAyNHHaA1a65jZumY5DFuzyda04zD2izhpm1c//srZefxL/6e/bBsaBp507od/7Efv3Lnz9NNP37lzZ3d3d3d3vwQXtJrrYnF2dnZ2973333zzzc/961/9J//4nwPAP/u5vzmZNpNm93t+13d99rOf3dra2Zf9vu/39/f9GGaAfT8Ow2CtfWpnr+/7Bw8eKC2JgHBM4XYiIOrnvTGpV0eIXlGIrbUNuXEcp9Mp+7BYLAHg2TvPfeWrX5pM9+7df/yrv/q5N954FwjaiXZKDipKRdg5p9LXEg5DLyKtdRZVGmEy9A2NIW5bi4aCcF53JcsnYg/Um6hmxYU46i1Wi6Sa112Wo1cK4NU52ZdZX7K6FgBljRUEBIsIrE3kQSglKwkLGeOFg8G960cjcETgyFEAOblkqscyEQGnyrDEWABAxBY2XfPiy6OvhEEJfa0ZyprR8LGPfeypp65ba733v/qrv/b2228fX3sqxohETTvp+95aS2LJuhx6xgIslbRaXj2Oi8UDQITa+9OYtTJHNfkVhQLK9akcZc1ihrS3aWPSy8uuPkglxmpoi7z2xWVXhoqlYbtsqgtry78+z1eOpKazWgCnR69cx+mhtS+hnFk9IoqYssqJmiEKJNC3emCp1XplCGa5VU8UI6JmAwGLtlcmJO2LJxmBIXnEZOUSqFVjWI0EWRiAggjzGDz7GKqMHlFMfGOxaWxLljKukSBGEVtlKSthICXtqhYDG2tRb9ryE1bKFmiLOv2TknwSEeGV5yC/l1KvxFimKIKsSNSsGg1RZprSNM2k7WazmbVOrd5CP5pf5pxDwpw/5QxSiak3NunX+qcxTi9Mi5hzOLVIaU10YUlaXFM7ykJrnnD+yxojunMBoGmacRzHKtZTrNsoTEyY+InEXBmMqeYey5QSEWIT0ZchQfZ41RskW+rAFUkXKqK8UrUAJiLNwS56Q07eQpZowNZ7RERQc4UFwRqDgCxEMJm0TWOnbbO7u728mDMzR68o8TF6RvCRfYx9GC/Ozpnw4uJivlz03n7iE9+hrtdFv/yNz/36z/2TfxICMGi1N+QZA2YggKgWlYGXXnj21s2nPvMD3/WTP/WHv/OTn/DLCzebfecnX/2Xv/SLltA6QsSdnZ0QOQSez+8tl0sR8d7P53NDq3wCVYlKJZsfxmiN91Gz8QBABT8wWGv7vm+a5vTs7Md+5DO7u7tf+fJvvf7eu1/72jcB4PBwV/tiIJoQehGZTNrZbK8xVoAx48gRoHOOBERCYxteDkm/YdQEwOi1DIRAAqJVwAn4tkehRhLgdQEJ+c/CDKFi1BvSFyu6+vYPWvMHrt8TN/mDVuWoUMhsNv2JYww71w7t1nQeRjGoilq9PQs3xnWPbJmRTQFcztj4c+OzJNVbTdgIAJPJ7JVXXtnenk0m00ePHn32s58dhv6pp55Caq2VYRhijJPJrECrgMFc10WlBGJj3OXAhFRl9L+1OcUouMIbAi2IJAFSPNNNZzWuoMPzXNDKOYbZdF5x5EsdGOtf9UtepaGuurwVUQ2Vybsy7DJLqqVsPfMb35efpNL+lPOW8ZdnbQx4bbokgSgRkaBItl027lxujoiSKs7XZJh+YEJDmpcLmWeqAqsdN4XWzXdgRlqh80iqrcIIPPTex6Dd9AKnxsyIaAEBxDlDBG3rrLXGqhkXOARsGoOuxAVjZGNR8/xLWlDJSoWrjloC1XRORIqaJgVRq0qtz68Q60lbzUxFcta6pmmKGarFPJO208xblRlN4xCxaZwawWqJWuIyKl3Ry87wMrFSEj0w9dCVbL5Tjok6qwpirO+g18bKs0JkCyVLBlhT5h5CABEAzB2TIjODgJCq9cLMMfhC9iogEMFUHTUAILIvVEpk5BKrQUSQBBO+Ypq6HXLuIWaVXc/XpPGiGeirqM9Ke60UzPaiZjEzolWtSPtKEdHObLq9PTs/PR2GwRmLKHH0jBACR+HlMDw6PTt7+Gi+WAzBD6MX5MXYA0BgBoD93Z2joyPNz0j7kUEyFEPbtmAwxmhQZrNZ29D2bPqhD76EwvOLJS0uDg/3P/3pT//iL/zS7s7hdLqlsCcXFxdjCA8fPjTW7e7uOufm87m1FgEsGbSQYgcSRAyiMUgGLQrEGNR9aI2LMbIgIkUGH+H9B/f/+t/6W//sF/6lIMxmLSIul4PO3tnZ+WzW7e3ttdZZa1kiBzAIIhzGIUYiAAkxQiRrmFm7twChtTZxyxJGTFSmla+0hih3iS9R3ZQwrqKnG+KppvmaVSYxuZ5J820OvXKD8ERytmgeIavhq/+peEYAAG2A1gf/zI1rdtYt/QJyY3XMmXFKAzFGYKxLMov0FZHssq9eb/1tVy11ESnl8COqXYAIiGnHPvuB55qm8T6+++77b7zxBgDt7x/FGNE4Q+TaiRY8YAYGGscRlICyta7+CsnAXd57nUWNQMznc32ZgirCzIjQOac737WNoteW4Chj0DysHP5RayBWvCZlbxbjNVFPZQFDlagMVfiqJiM1wHXq6tBsuUNZ3WoYm/J1/YYrwOR6aZxzHDblSuG/xWeY0z1U0K5hreSkGA0crOyh/JTkopRKz0CwgrG8IDNoSVJ+btI8vPfOOYgJaYFZxuidc4imVA1Za8cICVdTABEi4OjHcQj9OHgfx5ygJxKUtQeO1prJpG0mHSIu+qVB6CYNEbWuUYkSIxMJgAFYyR5jk7OoSKCVjlUr3UQKarGmGyFzBm0oLLvcuQQyizclhKCe8LSaVS8EY4zihwCAwmhM2k7bGek3inXlyCQIIcIY07K6xhljAMRaGzjqOBEMkVWmpp19IXuDEVENynWwSQaAEH0M2imd9UEFcaUQXkWTqbzNGIMp8xqtbYhGjtE5F0JQV65IcjXVjOySuoOQSoHjhrJSlqB8X6aaI2cAlqjFwzqZcb1qvxwhhDKr2p1Jb6hLAwBAChOw2tcAGitYQ4NputZYRBIJkZnHfvCI4xgY4mLeL8O4XA4X88XFog8cRURwzJtCWLGA+l4Q1CNSXk1EVBKDIRF2xlw74ofL+ae+73ehwDvvvDPt2q5rYtt953d+56/+q18fgw8hLBf9GOJyHB49etQ07WI5NE0DQpPJJOZDN1eZfGIIQ7CkMSIQQEKKMQoZZuYYDUg37b74la9+/gtfPjjYkaAhCBn6ZWS4fu3w4OBARJrWhdGLSNQacQGOPox94KbVjB8QMmZkvri4uNZO++U8CPR9r1AqwsEYU0BcGSH7op9QwCkrLiQisCLFFakUh1ZNReX1iagU9NQUWBZ7nSZXFnPZ4zUNF0ZB6irJNrgWRXGMIjEitFvTo5tPXQw9NCaE0LomO2BWfJ4AgdZCn1DFgOzlYeVNQUUw1+dwTHI3jZWUgumtt9557bXXZrPZYrEIIUxn2ypu+/k84+cZss6wiEjw0ZBlZhYQiZGVvwOSBQaOEgOrLyvG2LYgU9HdqKpukdmqYmjkzFqNmRnKkYlhiEHEWtsk7AdhZshpKSTasAERUyir5hg144b1eFWZqDI59RReppsNnWbj11oklHPKTdaZRaoBlcpNV86Rysiun5KsWAnlNGYGMCEEY0AdWdk+c8aYYVgiYgGMzcRtYgyVPmG0GIyrhNtJ0wJAIFIDzhjTtq1IgtYqHdEFJTLHKJ5jjNGPcRxHdTuPIShcA5BWHwlAtBZt44Cw7/seuLFmNpu0bSsiWDoPZk8E0Uo6EpEmOoGgCqSyXrVipB+KjYuprg2u9O3Xa5fns8Rc084nIjK27F51MkPutWCQtABJe8c2TdO5xllH2UEdJCFUZCPKsBbRxhRuICISkASBbqDiR4mw13XE9CsUHXHlsGEuqFUCFd4Z5QIBa63COOqrWdtE9FzmKqf45lk1AECmSr5bgeuSZqdzqo8SyaXPtIaGtrqw7JpMnJR3be0DkFKppGpBjeiO6zGFwhNXW4y0/mwFxWWMiTwaa9uuG4eBhzEK+8jLoT95fPbw5HT0/myxOD+fBwYkG2JgGVfzn+uMGXQzrhSgSr9Juv6jR48MyjiOr7/x1qNHD4yl60fHW7tnz975wPd///f/d3/9by0X/enpeSRYLoZh8MbanZ226zrXTkCo7+dN0xBB0zTL5RBjNE1jrSWhKJ5FkMGQ03xARoghkHECEBlijJPp1DoaxtEJ9/3oAxwf7Tz//POz2azv+8Vi4b0HYBSWGGP0JqOpK9Yqx2icYTJgCI3r/Wgbp50KQYS0XX3kyAIEvKZuMa47aQAKDLnIJf24HIV9rU/m5jmXvxS57G/OP111cwBtiZaflYu8CHM1lPJV58boD566JoYEOAhrEjivaxjF7wKQQZDWY7cW1mRMcdUm3pr2QKyzqAv1r4r5EPE8nouI9wl4Pca5Ep+PzLxUKrTWttbVWiFUSJBq0pUaf8yBw2EY5heLJCok8Y5KNiTzy3uJUaw1TesQcVj2SuipYyhHAC1cTzPOzBGEDBEgc0A0qsIWNgbrId7LmsqGrN34UP6saWXjnMt3ftKZqxMQ0BBV7SXqD7UeB5XXVATVOVyJWwNVqk4xpJTrl+GVERpymQAYADjJHTJEhFYYQxJjSLYxxmijGxZGY5i5H8dEyO1UBPzovY/9OAzDEAIDgIYOY4yCACERKyJGgZgcL65tu+mkbRoXomhL+SAskY0xgCxCxhikytWZUoV9jFzmeWPeDCJIhFyZpvKAhQ2VJJ2KL8Cqt29elyI/MkK1rBXD6KHqSNu2KKAeIM3NaYy1joxFShiZLKztGaxmH0jxXrpGvXwp22iFf77KC2NmoGjJqQOZV2DIK40tLVulVqr1JoKENmZguPQNWRFGjCqcEpREPoEo9dVh1nToTdVTNakyUcYki5CZER0Bc5UcpyPN67IqdC4b8ErVs7y75p2okV3fs+yyy1uJsr/UGisiDIKGhEvvIGCECLJcLh89ejRfDvPFIop4Hxf9yILGQYzR2PSIwKBIgiIiKVgYlaOuOAALWGAfIhEb6hp77/6Df/Wvfu3wYGdnZ+v+0f3trZ35RX/9qZuz2ezrX/tm03Rny3ME04+DiDx89Gjv4Gj58PHJycn2zkxn7+TkZGtrR7u+2tZ2ronSDMPQ+545MmgXXgNoQghd18UYY/QX84u9ve2+772HO3euHx8fX7t2bWdn5+TkZL7wSGJQkDT4xHUJBTMbskICxo5h1Al31o4SqOSHrqL7EnBVlZCZ2xVwQFcy2EKicFVWs1Sa9L/xeJK0ri+/UgYn5QBQCIHVbBNEDMDBwLWnb4ozRIhxBEOBozGuHvx6p0UUwQioQU4QYEHN0U/F6QqGpUPp+z6GlWairxpjrMtLmLmIc90YJ49PNeI1DEPXdYvFoptNC6YoEfWQ9lXbtmX6Qk7VwcpYVIUCDI0hhItzdd+p/CjutTGMMyLtPFqgdkJga61xTUIzA5QEkqg8K8sJJAZAIPVcciqcrUEtLsHBX6V84fpR5qo+U55AQJdp4vLnWiQjonAqgC7krrujMKNKUaNaUwEgESgcllY5rqhmhD6rbSfFTDHGZJcqxRgUNRNN0utVvG1Nt8vrGNeEEATAjx6RgAyk5GcwrtmaTETkbOl94MGHcfTjGAavCbcmISMSYq4gKwtBRIImCJP3gxr01jBHsYgCSFg6JjWtlewJUKpIr2c2J7b8q7l6sq731HGEei0wx3c3vkTEZG0DEFrjLCJqsCbbsrZtW21ib4xxzjhnWpds2jL5nAu0iEhNVc6wX03TCELxTGDGkCKikqAeY2QQkBKCzWrHSrBtciLMDgBmNo0z5CJGyjk+OR06wU6nkh1IdIVEyJxnb0Xq9bTURF1WEwAMoirqNdfDGkknX5KpMamPtnEb4y/vVZLRyp/0hA6n+apU1FTtBWFMHUmAkAXHIVwsFidnF4tF3/c9EEVhIuIoIYQIAlE40X9mnoYIU/fcvDUQIAMlhjh6b61FsD7iO++89/DRg+vHR7t729dPj69fv3F6fvH0rWevHV9/7bXXnnvuBUC8d//+m2/e3dvd+uk/8tOPHp783M/93PHxsQ+DAAzD4JwLYWya5uLiYjKZbO/uhDBGiBRJkERYAiBB27YKeWYMWkez6dbjR+fPPHN05/q14+NjFcznFyf9MIcYDIjnkBN1RTcycwRISpxBYkQQQKC+783uLoXsM0s7GRCJDMUMC19mXkQAUtFj/eVlUVrTqnL+WqSVdS8yq+a3T7pP/aC0kdfJqZbBAmC03APTeSJikJiwF99sT5vt2cIPgQAIrLXIAimNjAGAUvlSsveQJcUFKk+nbZpOt24IofQwEc7XrFRIAyAxMtHKyVNgbhAREJb9UkCG4IlsM5mOIaB1mrwHZftl1Pi+H4kIzBpAFySGKMw8jqMyNRHROsUSpi2Q35IBoepAiOTqCMq1uYipW8gYfNd1HJmQnDPJKSqSkGYpUo4qAQBA8jqsNu1qR63lK615ugRS2zvJnekAEHI0ZN2FIiKApJwHktTJT5RyeqEVRCRAKblXNQlKPntDMNTMWtNq9CfNGjUmRdAhpwhJNUIoBTaETTORVXMhBkCNLrum9d4zRx9j51xk75wbRu+ca9oOEcdxJGO3trYmk9nJyclyWCz6cbHsc+2pPirqU0hMkb56GGcZwMdA1ESDMYoPjGjU4QagidcUw6qhng6ytoestVI5wnDNGl7To2vtZGO7lmuLBVZOwOy0J7REZJxViGZEdM4pMnDTNM4Zidw0ToVxcTYkVQMw5jyUPP2gd0gaJ0IZm8Vi5IF6YuuFK+JEZ8DYNRlPVXKsnqZ83Dk3mUw452nrvsPsCtDgse41QEOGKLtUyhRpTUOe4cw9AEuFYdlNZQ5FsKp8W81zoVvMcZPVU4ALGZe7KX+gHPYuF9abVKSUYNHaXqv4hsIEkG0s0Dws5svFYtGP4xglCIIPIcbYdV0fxuVi8Bx7DivFQrtBVN26Ms1IMYUjR0SMMfYSQwjG4jR2IvL49MSHwILXjq6/Gd8koqOjIwB45533nn322f/0P/1fPHXz1uHh8d/87/+2gNjGLYeFc24Yg+KcD0P/wQ9+8Nq1a+++8c7g+/nyYrFcoDWFV4z9gIgIiDE0lk4fX3zvd3/k4x//2OmDuzHG4JdAOGlbDv709FSjy4hQqsMDw+hjZLDWFjvfOee67vT87MbWFhGhNUCp/CEyEwkLbdi6l0VjDdgAG7zxqtjfBoWUM/WrK/fslRfW3xRS2aAHADCAEQEIGQQQTCbS5Tg8dXhTnBEIIQTbatQpjTx3FlfvlFHfkHZdgHVDy56enpZNmzRchnoo6pstjIYzzwdgqdKMNfLnnNNmKbp7DTmyOS7CzBwZVk4nxgyKDwWJ0MYwKjXH3K+bsjtRlUrdWqVUMYSgTjnN6moyXw4+giChiRx93r0xsslWkXOt6cwwDGH0WrRCmnKLSGswX5t2ajlq7rnBwespLuZIWVd+crMXuRTKrXkW5shi/Q2XjmB1dDNTVWQhIkDRjFUG0bwMIBAkBpGsKUTWVuVJwCf7CVMeQfCxGjkSkXPOOEttqxFZ670xZmLdwcGBIZeYNcflcqkh2HnfPz6/WGhPu5BwHrQ1ukQkopIQVuakxEGYXdO0hhoGGkcOYRDGGMWAIAhRFIkGKQYFFU8amxKSThERFdlQT3i9UmnzrPseih6Ttqg2c0KmyqObHoEWcE0qEFHXdV2nzQTJIIElhZl0K3ONtWgDQC0DKAMmImMbk+E4SkXchoEOlVogOX1BBVJKZiRXjDyoEgY5leus8gfbtu373ntyzumG0twLGT2RdS7llqv2loXZSvri2n4xuNL3RYV9wQXR6WRmIgsrmwaUdMtCFJmq+8IAph5wUGI3qzJfZtbWw2WnhBCs1fa0VKdbUwVjV26u9GZMy9HHKMAIQCKpW3CIASDp165BMU4kttheXCwAILAAxxITv8zimVPDKJY0WkOEhliQBS8Wiwjt/UePAQjRXCyWbdO++uqrP//z//yZl57/8/+7/+3NG0+/++57IvLgwQNllcr0ACDGOJlMFovxox/9qLX2F37hF7vOOefaaatSn1mUSUqMztDp+bC/0/3wpz/58U+8QgbeW56XBHJjMcSROTRNO/QjIwROrZaCxOTDxGQqxBiZwDnrvbe2GXNyqzJELvLskijM2yr7tXMUoxa6m9dUro7SImlN9F51/rf5dW2bZ5K7PEgAUIMg6iUiiEQCg0RG2D08EEvWNH5gR0ZbT1POty9j1kTgRMYqQzUwDAAIVi3UNClVBwIRASC8otlT3Hj/2gIYx7EAIzSuU9rd4HG8XkErIpACkFB4WmEKukuh0oyKyNHj9PQ0/ZrzMn3UBjUcQRhBCMMQOCdP+jEU69nkqg8Ug7gKL4mkLn2Y6vTTQZX3o7xCEcOSTcYiSq9Y7HW6QVwX9VfR34acwAzCUIRx4SDF4MuzuvKHa3i1BAUBQPvcxYwvRpgSp02FRKb4DxwFAV2TEoWMMdpsQB/atNPZbIaIk9l0GAZE03UdEXnvl8MwXy7m82UIYblcnpyc9H3vgwTPHFNbQhHUZGxZ12dRzRPmfmBrrWVczPuL8wVIbJpm2rXnpxfWUtfYprHOGuecdRhjLJ3qpUJgWN1zfSaJNId3Te0tZCabbFTpUKEZDeQ4JeVmtNY4FY3GNfpo51zXNZPJRE+zZIiobdts++qjCVjAFAJIXFVJ0RA0TRPCiGhcxo5QdsnZgi9bBhROj5IapzLSh9FyivcjoiFiEFulC1hrp9OpKjq2TXa5AaPjTyQtoJnPTdOU9OkNJ4G2ZBDJ7SwRU8m4CGQNfOW5eQIDpcpsrVcNAEiADAXJbSjrq4gkwZatHN2FAErYpa4OwKoLeBSxWdJb6zyzSEQg7cdcqrRZgnqrfPCRGYDJ4HRrpr4E5hTaZAABhe+Hy2EsaywzW5d6SorIOI5NawVosVwux+HR6cl1186mWzdu3BjH/oUXXvjQhz40DsFae3Ex/8KXvoiIi8Ui5lbNzrnFYmEs/eIv/dI4jrZBsqQFQj6MCMY5S2j9MEjkZT9/6dnb3/nJj+3udNuT7uTxQ4vQOTuEiEQhjDHGtm3n8yUYyqIOEbWHMhrbiIAxBoWJwHNARBAYY1A3j5pqpVabn4DCkRmdRknXw736Z8UJlVquvM/GsaFb/9sfGxxg7TdOCxlBNJDFzGMYj29d3z7YG2OQSl0WQgxJ/CbmAKAoqvUwQQu31AVdv3mOcK1YsP5UyoQ2hJ+eqwJoHEdKPVgUI4nHOMQYrSGoj3wtMyfmxwiU060RrLqF6zglaAGUmNzqmUGMNcwcmVFS1YFORN8PAKglfeM4Ztc05V0HgUNrDUfxY1hhKROimAKry8ywsibraC7W87OhWOjhzKr74ZUCdbUOaqnw5jkbOxYqeipPKVm7JfxZbNYN7X6D6VCVwFIyk5mZcjPto8MDABjHEQi3traaptFqCw1ShMBj8EPfl/7qOIR+9OM43rx5c7FYGGPefvfd8/NzRf8ZhvH0/EwdFSdnZ4gYomg3+DF4Zja5zHTtrSszKNFS8DCOzIwAzIqcQMwKy8EJoSyGvu8n07a2TaWqRLo8/3kxr3BFqh+lnFaynYuaT9oBqVT3Zn8fERlrbW6o0DS267rkjjZWc6Gdc0RpyYqZrvdX8IqVV4NZRJbL5WQyq85J6CiYvV6Iq7cREWHR1RzHsYjJ5NIILCImK9YirEggWqmvsepxHInJ2p6ISiNCSfplsFW/h1pGMqDGnct4KNd8g6wykEWSwkVExrSFVjcOyhW9mI31NYGNa3sk682ImOJTms9ljFENvmgkxVYuG0EnUdcuZewbhw0BRw5xNpttbW3NZrNFvxzGMcaIJAaMcZYMBmYDa7AEXM0MigCk7hcEmmmDggRgFCw6MIMh4OjQ1VEDdV3cvn37Z37mZ/7FZ/+nX/qlX/r4x151zn3wgx/8mZ/5mb/4F/+i9jVaLBbOub4fdna2bz3/zJe//NXptJtuzUSUBwsRCcOy78fBH+7tb8+mYex/9Ec+g+JBAgS/OD+nlC0I0+n09Oy87xdN056enkNu/xyYKfuiNSmkQRM5NI0VJBtZxvj48eMXnrvTdd3YL2KMWFdaXuXdzb/C2pIioqRe9084XzbOL6fVxJ837NVmdPleLnkp4Co+zCxAVCCgEVHTMg6ODq1z8ziatN1TaZ/TEqHcZz2/e3kFTNs62/0WTaP4+TqaKKmc2biGmQVABFBLCABE2BinFJZtd43iAKIG6jnlgIigRGesQCzSmojAoD4YbXZhqedM+WDwsWJIwpyVKYgKPgLIIhxigssQIOf60Rtj9AWHYdDhTSYT9Z5phDILVtEE3RHIs3hWexpiDCIiPjrnjCF1WVlHgOjQaoI1ql0iKeAN6itEEmE/RjWmiUhAJ020t55EFpFkZ6/AGVLWJYOmD4PCU6hygVSFbBMeMhtLxhoAkGg1HhCDLC4GkV6fO5s1ZDoAjMJIRp1xzmoFqnTdtKPZMAy9H4XRNY1pOkLjrG3bhgTazh0dHezs7KAFY8zh4eFsMjk7PX/33fcfPz5FMcMwLAZmhkeP5gwxRL9czgFgHKIGOE8enwJACGGMQUTato1RxnEcfUY+Atcve2Nw7AfvvSEyJtVh29wxt+yryiEVOTKHokpT7Md+8LPZrEHgkWnoAcU5N5t0k8kESGzjYoxxHAVIGFkEjLLpXI1ndAGj5O5nG4oOpNLelX+SiKytHb9iCLTECBGFMNXCOTLktrZmerfZpLOua5vWaJUdogFstdECCwo11jCv8JKMazBijNG1beLRng15ay2iAEcRIxij2nmIhkxgTasmEVYMQsnpSH0/OmvaZhsROaQIQmudzcRmEtAlEqBYGjnEMM5mM/VYIBhnnTQYQiCLDTTeDxJtMGiEIghZ4wk4QkSMMXXIoJVzeOVjKKKUiGIMmsekHSyJbPSsZb4ojAgirLpN2zQxRjDZYY6YE0FAmNBm5zmh52hNoxJPWYkWzVlrR79UyapjgCoklKxkAEOk5VkE4KW3hhBhHAO0NDHbs9HvLpfj/ftOUMbRimBg771hSyIBwRpqTaPhEkREYwMCgikqVLHIY4ygdaKCKOjQmIAT18HAZGS61RmmCTnxfvtgOmnaH//RHx6o/RN/4n/+B/7A7//EJz5x584dY+Un/tC/94/+0T9q2xZJYmQOsLez+8zTNy/OT+/duzdGi1GcwMy2w2Ieot+zsw889xIYMo7Eih+Wk6kFi9LK6XiCbuKZp9MpsqcQYx9GJg7Ode18OI04glXuyDM3FTYNhwjeta0QYpCG7ID9eRxt15rArSAZA4QBwINwY02MUnzRjLmd+spIYExdc0CdKFE3JAAmONvMGBNzkHw+V3AFABoRXRmNZUdr55QivS/r+peFcdI1FWNbJIIEQh/ZogEfW+NYxO7M5lbAteMwzshCHwNHsqWpSZa4mnkAbIyBbNkjQsqrMGSxQhioFYwQEu5jicJyxomNVZZBbQ4m1UNV4azImKrStH7z8tCNN69duGU8vN7Wqr5EIQPrGdRhlBoqqLTpYtlojFBDI/qscRyJNGhku651ziFJjBGtWdWwqn6affrMzCgGTfYoErMECZpGqQ5bMBTDyver86CR10SCVQZyKZtUHaJEMcs3iEjkmNlHGf2or6N+/hjj9vY2ORd9qrtl5n4cuqZlFtc2xpgIMnW2cV3giGhu37499kPTNIeH+9F7AD46Ojg4Pjw/Pz86OHj++efffee909Pz9967u1wMQwgc4ezszHs/hiFyWCx6AI4BxnEchsEYQ86q81nHI4JerWZmEfFeMztoYwU3CONKzXTjkJRhAM65xjUmBVYb55xzKfjHEQjFZHRlqHamCkIW3ujgXYhnQ7nWzzGXnmfzbnWAIQ2EO+e0VggzlFjKgTZpHVFSrlCSOKJV9TnHitWsXcURIvsYLRVhI6s0OqxiEIXe1NmgrfFK2lQh+HJysbdYW/dlYaxUrbdqmibFeiXk0F7q4yRIiMgIJuheR03YEQRjV1USRGSyiyMlWhsj4op9r56C0ZBVNRyiQUpYXSQWyfNqzJmrUclHhTIsIp38ZBuwgo3wcrlUTMcNAijcH7N5vfITjIFcS7ksGxG3trZu3rzZNM2Dh4+0FaBr0GRsZwGOMWVfmKp2HBFFACKUWm09PHsiwxytNSTiGock1prd3d3t7e39/d3Dw8PpdArAOzs7t2/ffvbO2yDw2c9+9u/+3b8PCN/5nR/f2dnZ29vT0CERta158823b926dXR09MYb70yta12zPD0f4vx4tvvy8x8/2tv33ocY+zicXjw+ffyIzO7E2OXF3CKJIUOmMTaEcQweM16KzuswHyfTbhiWjWkhAgGaxjkiH1gdPRKDc82iXyo9pKkusx032f6Ve1lkjduvtmT+jDlUl+XUFayjZhn/Njzk8jDqP7MMBc2R4BgNkrD4GJj5+Knj2Ww2qJTUVEHVM0VM5tir+6y8TSt9opygkDqy8ldXbKg4M9fcbpma+SqYDqh4q2IaJM/Lej1l/S9UPG5jOsqI1VG0Jubz3XTvacKIVF3qajdsLYkXi75kSKqcMDkzhDkiovdeIxkKOuNHrcqwCMwECIYMSK4PY2ayhIQMSTfRBDgWQFYcAlSLmDCFVIgIGGJMDdqEtJ4SlSUSETMwJ4Aba/PkMDAyIqp9GatD+WbQ9qWaJkBQ2CWgAZIQmIHato0x7h8cMPMw+Pl8fnRweHJyYozZ29nyfjg/P+8m7umbN7qus5aOjg9ffPH509PTt95652h///W33prP52Pwi8VCJAppklRERPCgHFDttmEYxjHotik1JLoEpVFPvd8u773LdJX/TF/2fR88juM4adxk2iFiL+y9b0xnjDHkwEaogqmFMwKoPVy2WLWHq01b6K0oQJB9v9kgtkXEYkJ5lKZpDDnNWy5ezbZpjDHqiJboK8EZJTfc0/cXbSlhCKt+R5E9QdL/SLTZ7apWG7IuWLg/5paCzKx1+TbHbnJP+LU06bSp861EhAw0rZ3ECQCQARpAC5yImkgQomXFogqEiGKFBWOMMZoojLnsBBGNSbB3qv7rXgOQEjR1zlnrQNOvIrMIJeB3EhHGFejVapyU+1VobiaIEQEhJFRAGE3VbFL02teMq17umgAgl2yVb4qzGhHNtmm7rmnbyXQ6m83Oz8/PF/NxHAEZ0KT8waQipzk0xnnvEamkSpUJx0jWmhilaayITGdN0zSzSbe7t3N0dLC7t91Nmq3t6fXr159++umt3a3XXvs6AOzubI3D0rXN5z73myKwvT0JqU9i104nw+PTk5OzO3fubG+/xj5cnJ9c3z/8nk9817XdgziMi4v5GKIxYKNM2mY2mWzPppOt9vHj+yLSWOca65z1fhiGQQgDR1LMwSF0XQeRu6brz/zOZCt6AAgCxMgchQgFCI2MQ9QaxdQFFZEEMXVUA7gkETfYu6wfcOm4/H3NOuCSQr/+iKtjw4mvwro8r08jRAGyNI59tzVZDL0wttPJ45OTjzz7Ids2S+aSjFm8d2U8+W0QL5Ue1c+yimpWhpu14xU1F5M374G1F5b6OdUHqMRq/efGhfWgIdsZG3sDAACoZGIrp6vdlWXWlPWoWNXMl2L4FmaaszENV4UNeqb3wVrSyGV5kA+jtbYhxJxuo09RwC5kzf1acTTn1FHJLOIjYyoix5hj8+qxDywhsnb00+hYjGUOEz6RtVYrNzBjRIiIrggzKwK2yrmu6wBw0S/H4BvrmqZR6KuUhsMSlsN0irPp1jAMbdvu7Oy8/fa7jx8/vnZ03Lbu0eMHT117aW/vxsNHD6ylGzeu6yscHu4S3bn/4N7du3eb1no/bu/M7t57AABDiFpRLVUG6TAMyv5iFO8HrEHE0upLCLGmjVr3uiwIRQpqZjkh3S1Gb01rjAnCi/kyhLC1tdW29vT0vGkaTQkBAEKwGzbZOvD4Zdagh8bhIGufUil/tfQtNq56SmfTbQ39lpBEjOBcggWOMaIIIlprFcokP50ZElKuEJoqyz2fsyIwARCJAAYgNcF11jELYMoY11HpS8XodS9DTilXMqvridWo9d6D90UkM7OKYcxgIJpHEg2qMS25sEtEAFPRMDPHuIKBVE1lFRpndtaKiDFeWbxzLhuxinxhgTRuqlsSi+nIIpBr3zHD2arvwZrGWjsMA+RQbko3M8Y5p98Xo6RoP5cZVP7e5Y3slF2EEAwJEXVdt7u7e35+/vDk8fn5xXK59N6PmUOmaRHRObStZl+brA+pJCBoEzIzERKZSdtsbc/293efunFtf393Z2v76Ojo2rVr16499dRTT/XL8XOf+9zOzvTs7MwY0/f9wcGuMWa5GJxtdcL7vnfOvPfeezdv3rx9+/Y3vvJbf/ynf/bZp58hz+cPHz9ePJi5FiM/vjgZwzDbnm5Np421ljCGsbGmdYphRcwwDAOCYQ5gLIMYY7wP4+gxwMR14KUxzcAh+sG0nfcewFhnJXrTOHIWEEsEFxFJNEC5GeOXHP5cbfknCKdaPJdvNkTDiodU8FVrD0KgJ5iI3/7Qp3pm51wcvANyzo4cZwfb7e7WGEMkMM4YIs+RRHnLWvenwq/gKlajn5O7TPetCt0itNQzxlxAWbWtZhRhkZjS/RJ7vIKBYnbvwLr0vTytG1N2eabqN+Fckp95YvHwqFHFMQpzFOkr/6263xmANfNwGIZSv6RPUatlHPthGJTDqpPHGROjeK+IAakpVYyiyVnqlYYYMTvPgzACSqk/Vk+7AOb2mDEyANRVqgWjSi2rGDkEVvaULBIyGoBUCzsK++C1KFQIfQixXxKRRQomDjSY3jTW6eVbW9s20un5qY/h9rN3FovFvYcPDg8PX3jhubfeeut33vjWSy8+f/ro8cOHD9uueemF57d3Zl3rRCRGD9IcHux9x3d89K233nr3nbvTaRdjwhk2bLz3xtgQRl5lyoAKYFwlsUu93CUkVmtda7voyd6qfKuImErJsxVlpm2n2tLgQ4s2BvBjAEHXWLIWMYOBW02I0YGpKFxBbhVKKA7SDU5RJFYRw/UBRNY0mmCltJTtYKfbylrLYdSUB+ZAK+8pYeJchIhtjncAgCYx5QemNH+QVb+Bsr8A1IKMnmMDHWSU1pIYrLNHRIrYWmabM26tpjeHEASx6To/n0ulpxIBWWsMstWwvQqbHKlHBEgoWsxrog4rt5lqK3q+cZaDZhulDCmFpTcoRSmps/oxJcVzjOotV0UWJUKElCJanCvMHEXcOuOT7AMDgJJvtXHomDWUq+uY58ob22i22vb29t7hwXK5nM/ni8WiHwfvfZ/TEtUA0PTP7KJSgFV17BsfvG3c1mzmXLu/v7+zs3Xjxo1r14+Ojg6Ojw53d3d3d3e3ZtvT6XR///Ab3/jGb/zGlz70oRfv3bsnIlYTo/yKsXjvQ2DnmkePTl77xrdCCLd2j+/cuNWBOT052Wu3mj28f/++YZg1Xdva6e60aRpE4RDYh2nXIWFjHZB478cYGKz2BuUYh8G3rkGU6XRytHu0PPPzs4VpKSYw97SmEsa2bcmYIJwqqRmF2QACIlcMX13Ul/k/rs9/zQdqHlL9tHkaXLrD+glrGvaGwo3VhZfldwAxZMI4OmOI6KI/v3bjaXHGaycaQ0QkMWZFonbxUi10nyQibZFkhWUoGeX6S4pRtACj5puFDTEzovbAMYU9lWdcqbAUXnB50ms5vTEdhVfW61czoJpRIqLK0cLgNqZVv1QOrqxKt1bfu9PTx4mbD6MxxpENPsagLWnTzZU1cOqzjQAACvyEyDGIcAiRmZumsWRjBkzXPROqUDoiGgPFqWAUIJPZxyiCDBQFBRgRhExkDizLYSSiKBDCWOZQ330EdoQGTBxH7TvWtu3i/nh0dHR4cPTWu29/643XX375ZSG89/DB0f7BnTt37r7/7sXFRdu6Gzefun79+OBwr3VWIjetG4RD8G1Lx0d7164djeO4HIdvvvaGc2ZQt7MQkiXS9BZQkBBVlDCL5EItUGHZXCllL5P+6nNuGlHTiYiQMc65prHF0afSAIA0jV1SlxgBACR1zq+pBapUQWWuYZWBXAw4zGaTMQYzEMpaiNdaYxxmJ3Chz5TlZC0iNo0NY3ZIBo858FHtgss6slp7VvdmNohZYojZ8xkjaAGC/tkYixJZUOlNEZLrPY7ExpSNnDoc2IxWLTmOoF5iAOAwihCiBY7W2khkrSHCEpYSSWo4EYhYAaqXqdZpysoaYxAtExtjrCjAPRbfBnDIJB1KmR5ljPGmadVkN8bE7BqBnGsiOVSviPF+GEzVoxAqxldLgpqf1GOGbG0rPVgHrjHdpJnKVETGcVQdXRlCEbolWpE1Ei5ajrW2mbhJ23VdBwC7u7vGmIODvYODg27S7u3tTCYT51pEdE2Dzr797jsGgShxKkSMws62KuZFEIAmk3Ycx8Y1d+/eXSwXr954bnFy9ujsYoLN/s1rNsiy6Xgcjq8dcgNmim3rppNmOVxw9NN2ykRkIEZW6MMggRGIRIIoHY3L8OKtWxM3a3l5uLX/YPmo92M/+MY6BJQQYwjUNWBImAMLQOprpFWlxU2ywYTXBdJqV9aiF6pv1oRZVTmyJt4A6qyO8rgIYq6Cg15d9aSfRMhQCMGRQcBx7IPBZm9bOofIxpDRFk9ERCQAEiJYFEFmKMhol1527VgLJhX0PsnKr16m8kPftCjFhS5jAnsTRFFl97Lc3ZiUS6wnfeB1fWRjLhBNSY8t816SxYqaH7P1WT4X3qpCN3NMoyq/xo+1V3Hbtnt7B/oUlc2MEHKxrIhof1AdmIKCpigekQAyCwuIQAic27AQs4QoajiuNJtMXI6hMEfF3gqBVekpHkWENFTd4SokhmEAQoVessYKQIgjIlprI+Dgdcmw7XYenZ5cv378/PMvvP32W8a8dnx8PAzL999/9+bNmx//+MdPTh5tz6aHh/uHB3tEgAQscRwjAnAcln0gtB/9yMuTyeTRyWnfL2bbOzxfRkQlGEMacYeY+U7hg2WHlOXeUDPLn4ibcnG19JfoCFEAxFrLIuM4IopzDiElVPsITeOstTEKQAsA1pnW2ci+uoOiKLLm0q9/v8Jw2LBxlWZi9MYYzX92+TDGKbSqntN1nWSXUiqj5MABYoxt2yoiSE3DQGU3rbUxlgTNjcZqp9SkOJqU9UFoaBzHDD0hpfMVAWjAVS3yElcCgGLYcUr0WxXf65hVXE0mE91cbDThaORA1loy6HL/MZ0TBb8sLugQk3FWThARbQsfYySDxpjStCCXDClXZQAtZzcALCHYLMZEJKU8Zv9cLSxNASpZmQSpO6GeRjkurusYc1FlUQtqJkOlHVNVuac7V3WgKEIixpjpdLq1teXDoEWPJdFa9Z7i6YEKgxMRnTNd1ym/0jCZtXZ3b2d/f1ffZWtrZmwDgODcwwePE6MRnk6ni8Wi67rgmXKmCGS9dtJ2Iti4bm9r9tpXf/v3/+7fe23v6P033qXJNu/7O88+03M/2hiMH2VgDkO/MMZYawCtRRrjOO+HwDFEDchDCKHrJjIG8fz+u3df/cirN3afev/du0c7B4u+X5hlJOj7niP0i2F2fGSthRCBkIAIUJPcrxQ5Za9tbPwNAbxx5pUfNmQEQK2ob8rguhLx8h14PSJZjpHjtGniondNM0Rud2a7Tx1Hg6JsigUIFS9BEuB8BdpTveCT5iGVx+B6sR0ilkLPBIWTZZtSUnlAlr4rfUTvVPq61yReyHFDSCMmALN67mqWXU1QcrKXzWZy9xVeQdcSrbtApepJUl5HgfdijH3f932v6RVqtmrqStdNbOp7KsxRsSw8AFHaUbiqd0yPKwCwIfIw+hA5Rt0t7H0Qgcg8jl6yNRxBtN0GIjrQlHkJHKMwIvrIDEjWCEIIPAyembuuG4Pvx2H00RgjlDQMffQoMfBcEf8Z8OxiPgM3mUy++a3XP/yRDz733HPzxflyOd/d3YXILGF7e3bjxrWuscJhMmmZI/tgLUmI5GzkQBAZ4mw23d/fPdjb3d6ZXcwvRAiAWYIhkxkN6/xns2wTgLcIFayKki9vg43VB4D1jbM6VBERAO/jYrGAyNrXz6JhjhcXc2OIQabiBCwRGFQJp/4PCzmb9jILwBybKZZQYe6YgSFVADeN+pxbk/ElCKDw37zxBFFMJhIiIMCgDY6ydlwEVQjaFCTNGOfUd4PkYzAm2mxzIyIIEpIKfoVVYWaFwDTGEIBI5NQzL6VEQQKHsikomwuRnbMxRjKgqp5kVCxmNs4hCoBl7cTO2HQtCpTWoupYjCEJ4NEnARxjqk+TjHijqfIlVTvG2DTOGlfMREAxGcBcJSURQQi6QYhBRCw5Q4bQAiGU9Agi5WMlY0v1JzXlRfOTcxpXHdovZHmZAnUVSv6atZYIwKARAMCU126gsemcpm1KzKjW5JTj2dIemHDWTWKMmqZARCF6ZAEAJScyjsiga2GMX//6a7o/nHPz+bxt2+VyaWj1FABQyF6Fo9rZ2fnSN770X/7Zv/Dic8+f3H0IIshysL27DEPTNUyD61oSEeIQowKMWEuAGIN471lwDAEpoa/E4Ofni9s3n/nM933mrddef/vtN566dmPWtg2a7clsMQ4P+1HInA/j/s6utRYlRJPS/ZGFBK+c3nqXpQ/rTohyJtbm2VVOsg0Jl/bpJRmcNrJ6Ktf5jeQsqbXnrtmErHmUMfrA8eatm2bSRofRB8dSOtcyM4g0zgXh+vIiWDcmoUg3W9ILKcFopNzgckbJa4AcOykvnJXo5BmmVc4IFhFbJ7XXw+JcC8/ZWkpUW42vWANFpgIAom6JFZwNXcpTjRk4Wp/IORxbdnWtpOtO0O/7vldTFRGHYej7vkiUusmoMUYkGGMMoBCTQPBBWQBETXVJaWuL5VK3X2TmnK/BCOyTLRvB6D63NoVIx9HHGJs2KdExyjD4GOMw+HEc0QVhJLRNg8W+VwsjJ0ABkmR/XfPo0aPj4+PpdPr2229/5MMvuwYXi8W1o4Pz8/P5+YUIO4POmZ3tnXEcptOpeIVuEIneGsMAo+8bZyad+8AHnnnnnXfOLpYX8+X9+w/dbOo5ep9c+sEvJtOtcRz7vteaHFNVoClhGIvaWWtjv9X7ZENgX0m1UPKosxbY+9EMBhGbySRGD8QxhouLC5HJDDoRmbQNAY0+zCYTXWIA1HJkylnEqneqkMBsS6nAS9ZPjM4l16tzrkhfzKlMxhhnrUF0bVvkAbAqYSGhQ1CpW0NEVMeG7jJmFlwFZWuFVaVm27Yh46JrbpdWkBcK935ANEXEeu+HwZMx1lqNxrmmKfudstNI51zbqDRgQwiuMUhORPwwT470idG0I82lN9pcVygnK0QHNsZo7CpvGVcR3JWSWpQVNcQBoG3btLlQRZ3x3gOTCNfcSV/HUVJurGmUzRhjojAaijHSqiYQ1WbVSR7HkYxxTUMAirKpqyw5GFTUoKJpxVy11Y+jawy5FdIc5FYxTdPEcSjcj7J/Qp9bOJ661pQjlUmA7HIwls7PzyeTVgMZzGKdBbTL5cVvf+3rXWfm87m1VgTG4J1tRZLVNY5jY1PlFQK0rXv06MFPfs8PfuLVjw2LQSL7fljOF8aRm5pFHFxnlrwEAzH6yN5a0zQNAAlgjHHox74fIxpCCMFLZA5xXI7TZvrgvbth6T/9PZ+an1/ce/z+/mxnCN4i4c5eCGOcbVtjWusWywtrLQcfosLcgcCqy4WyALjK9sUK0lgqYQyVoC0coPxUU9dlXlGzlMs8pPyKlVV9+XzJbb/H5WhbOyzD9tE+O+yjB8rqBYLW5VskqPWASyl+ZQDlJ2a2hdnVb6W0Xrin5PQZzJDLUNWzX5qCFdaSao7l13q3p0FUw6KMUbdhc9SkXN1BX2OFUlnbWCqN6jvAuhEGWU3GrHmUc1S1L4fkwkGucnN0inSbaVcJ55y1FGMcYwiRJazkB7PmrBGzjJE1DVhEWIAIfRiNNrcWiCEyhygsCD6yRVIcHWYOMfoYA3NYDLLuqxBMxK6BpWEYgLnUIhvjHjy4d/vO087Zi4uLl1564bd+6ysxxmdu3VosFpZgd3fbWbLWgESO3mYMkBAic8g8BQ8P9oyxw+Dvvv/gm6+/ARKF8ezi/HQcmFkQnXPCQV1/OocbIrasPKzrm3CVR2hjw2z8WW8bREQ0iEabx/XjkEBanA0cLy4WzLyzNevRb00nKms14YiFJ5OJgtqXrGCp6n1V6Cp5qK6jDRUwZ+gUXq8EUKLCAIAiVHJ5rUVM/N0ZlZpKrqJ5uxylxHGj1D37sIRLOO/HenvX01IkXNmelDKQA4AAcILQzEdRl4sBKjlAWxaodRGlSSKfTNd12tdBNQBlEYqGmLTJGE2ubyYC1d51r1lrcyQr7T5RbFckdeEyM5m1sFF+nZUrWEQIyFpLLtU3E5FxVlsbQqkTy6q5jk3nQRer73sOoWg/mCsFislhqp56StjGrjoxl1dQ8dw0DWWLp7ARzAXBOleJ/kER/ZCQiMhaVYNEvfrGGOYUt5pMZ2GM1sDpydkv/uK/vHXrlrroABJeehTmMairU1U37ci5s7X1+NHDVz/5CWvN6cVjDuN00sahDyJN03gJ1NlhHJhiiAHzhChTHAbfjz4woElbdblcNmh87F987sUXbz+3OL5YnJ6fnT4+2Nk9OzvnwV8/OHREQHL37d/pjKMnpBYXUwcq8bnOE1b1vhvi9tt8eeU36XvcNHMvX1Vvn8T5ZQWKuXF/ZnaNGzkcXD+mxvoQwJIET8Y4Q2IopPeCLJXWhi1XSeUyOVYFKud6X6x0kLItdZYkpS9teo8hEWvJh1h73uWnls9cjaPcSo1RrsryNpSIWh4DlHDsKmOiXLUxgHJ5YbJZaRURNqb0zRVeLwosAg8y49AdqDAFekLpfJcyLxQjvsoPjyBlSmMVfCqCX+3XEIIWcqiDKITUli6E4H2Ikb1PgZ8igIEREVvXiMh0Ot3b2wtx5BCZebFYaDT04f0HH/zgy0QQ/fDSC8+fPHp055lnrh8fTqfT2XQKwAia02SQtQZV63eTqo4i1trppHvm6Zuta0Y/7O9uLZfL33mj5zAdQ1wsFq2zY4gc+tTt8pKNKxCZSRGCNuihXrKNnwg3d9PmyflBMUoIbFJOLAAQcxhDqgTd3d6aL/vWNYFZvFfPx3I5L/Nf5GihLsquVKrKylV6aWzVWkuUWLOzLQBYa4phR0SKC6UyW0QsGUCW3AJPR41o1GWt4jaOa+2Y9HH6U2lRXMac7llRfmZ2bAwaQ5qcXM8nc9A2vuqvrvcjIubWNBiZtW2pSkdNaWzb1jUpubrrOk07UtVBR1s8BEVRKJ5eIts0jUQum0s1YCCcTDoyCctWsr86xhhs4BB1O+hPwCyMtm2q/OTk4NXXSeLTgiYMI6K1psj+tIuJWudqgc0ZFavwh2RdAJhcE1wUGszVjDqxpcit1uOLlmNyK5oimDUburoVEpK14L3fbRpDFoAABMjevXv/7r2zV24+c3J2Vqi9OPyUewELACuAyXx+bg29f++dew/uPn74cK/Z2t3dba3r4zDi6JrGtu4snIGB2EdVIAkJkVjMYtkv5mNkRrIQgoQo0RvXbNnZ8x94buK6eTj147g16Zilde7mzZsnZycT24CJwLy7s2MACZLo04bKinuAVCV/ZGQrWOfqV4q9jc9rX16K3D/p5Ccd8m3t5rWDBRDBmn7on711o5l2FxJFgAAtGWMMUzYagUoItn7BJw1MbeuE9lKkC+ekKkSsVJOVKKpxPSBJPsJc6VE/r1AMVCZLoXLO8qMIVL1n0THpUleDPGgsjGbjWbpJIMd664konKtIBRW9pYq34mxrXAmyJ7y+Vn/yIagJwyLBewBwjanfEQiNgPaE4JjcCfq2unt9DAZtDBxiDAmmAIBQWAQhcIwjlwFwjquu81PtVwIDS9/3muYzm0w1SSTG+OabbxuC7e2th4/u3376FWvttaOj46MDlHh0dAQSLy7O9nZ3rM3IGAJFuYbUbM6w9r9jmXbt0eGuwG0RefDgQQz9a8P7h4db77///nzZEwgawzFoV4+yQOnOCKgo9RX91IoOrruDVq+ZP20oUuU0zqGQEIKivBlEh8ZYi4iDD+Pj0+3ZFqXpN9tb0+Vy6f2guMf5ZZPVi4gls68Qj3bZ6rpO4aWKBayletakvpnWNCUhKLsksSQikYEiaJWThhCisLOuyOCijBbdrpA6M0tkFFBMaUREQ6n2MWd11bsJAIxBa1faAyKa7J0GgOQSBzRgTZPiBSmXzfu8oaBEcwySZjKp+SvZFVT2uM5bHnDMWy7rCtaleSBSWJLlctmPHnPOVMzgdHora20YfQgkEo0xKBKN6cexbZ1zTkenyYwbzLTMA2Znvogsl0soQGZEtoLxazkyAAEAAElEQVSCofX+Y1i8gGqLAxVjugTg9a0RsW26UjGli2JyOUmRsvoglchCBhEtJqNcq8JYoraCbVzs+57F2Bi/9a3XAaEfR2Z2rvEx6BprmXhaIMBxGJrGGoMPHj76nu/+5G984fMo8KFnX7Zb+ybi9t4uzM8uzi/spDGO1GDxcbTWGmMJUARjEC2nCkRalCEik8nk7lvvffqTn9qezt75nTfjcphM2tZZLbjRbmZ+WG7tbt1++plp29BaaRCCkIAm21VySDbF0tX7vTDt+rRK6OJVrt36UPF/5bFBKvV9Ll+BiBAiOBOEpTGz/V10FpghRmMsEWmbQuYg2gtsXfo+aYQigkioNMBVtl61gXV/rialfIgxKoPOs7b2RoVhlXHUAv7KoeQBZX12vU6gKKq1a6i+Q81uoGJYsL6i5SmyitZIgaOqA8aIDLBym8RcXK9D4owx1HadMQaQfVAwZywKR63+q+ELV4kcBgEBzIUxJcpeLLB0K17LMhDOWNPrMQx9i4uLi2EYtmYTzc05Pj7+vu97+uzs7L133tyZTRfLi1dufuja9aNhsZzPzyddY61dzi/CbGqMMyDCApBqKpbLJTM3Ge6YhZmDJZl27eHudpDgx8nTt64NA04mk/niXESiQD+OMrLJHl3YyF3MevEGXdViuKzUBuFtfC4uSsml/ukmyAJamC0GVIlEBnr73fduXL82m836fgnArTPGuMViQdmHjOtHTTDM3HXddDq11sboi/TVihFEbFyj9GmrTtX1sJmZo6eMyaU2WSJmScJVSdG5piY2FYdY6aP1dGlLNxFBBkjJj2spJDVFYTbxcaW/MuQMlGEYY0zu34JUhYgqXfSNfPQckj+AmQVWykQx03FtfblWXjWqrQPQVt9d152cnQOAZkR674dhwCr+ahKsm1M/ADOjMaXlqDGGYdXUyObSZwAAQgNkrW1bTYlPHuaCSaLDLtdSToouNi4AxJQWh8LkbGtNDuVCgpoXkcbYevNWAnhVKb7GXW2XSM5Ak9zARAaZeTH03vvu/8fbf8ffkl31gehaa+9d4YRfvjn0vbdvB3VutQICARZBZNkYDBjPGPzx2GSeH8nPHo0ZB2SC8bOxn58BYwQ8j2cMjDHRVrQIEhIKjVpSJ3VON/3iCXWqau+15o9VtU+d8/vdRp43n6mPPq1zz69O1Q5rr7y+yyXCCAzPPPecCNS1B0BBYWZrEkCJsTMRAWRjME+TEIIz8NVf/VWwc+2dP/dvV7/xvz+1cfz45tbu9t5otJ/kqXHEwETgJUhgYywKCDNS4ut6Op0F0TYnrDrBbDZFEQP45BNP7F/bkdqvDAZXR/smzfrDgfd+PB6vrq7OZtPZZNrr9QBAsRh18xFa3P5uBZos8+SuaIjHGTpy4bAAWrpu9v2RDORVfrt0f/ynyqOxL1eOr1Oe1BxQGAFNe4i46YnS+vxgeeRL04xxM6UN25FVpPloyv2wY/DBYhGwyIKHtvu5y3bjN4cnzG1lXtyeSKm2FbR6RV+OdCBboWPsLokraFNwY3x3ibk751oUEYxP6PLc0EUYaK/48GiLIAmLJ6E2lNLcEENKSzvdcL3WbdBuGwCAtdQV+WQ6SI1CAFEMgAhAiwYsi7qI+sOzLFMFuSzL69evTyaTe++6+w2ve6i+/55nnnnKIKaZO765Qcfo2tWrxpi1lSEKh6quOCSJgvwZ7/1sVhXTMviqdi7P8yRJENBZAnaGQCT1AathniVnevmx7d2dYa/PzNOinExqRHSJWezABXETjjwPUeLionY1P5Cd53TFc1e6I6I2zI43VMGHqjbGZGkCIbxy9RoAbKyvIsYcXQtAGsKUFoUtz/Ner6fuBNGWi0miixBC0Fxr59I5jCLMM5O7seSWzInZQ4ec5npDaw1DC2nunANyvtPvL8435i7pn0ILrWxd0l0rRPU+NNnaAhwTYQAECSINxzHokLTDnfde0VTUMaAKR1xhDQZbtEFrtBQnlyD27VanqMy1c/G+iocrBshZPAAba/Jezzk3nk5CEDWCbdvVtBH51DQVdm19VEweBoCm6KUj4aiF47DGAUCaJM7NE6S50xcBEW2nMxi3ZVSRsWDbc9e2ja00qVt3JE1TpQfdS2P07GNUBfTOVgRDVHpAiUTRUAEBwFhKkoTZl76elYVN0iTtF9PxY489ludNV2Z17UjrSIsMJ7BX2LDxaHb7bbe87rUPlDdeeej1rxWSk6dOVKNSK5d2Zwfr62tTnhljpG4PE4tBNGlSjopJUQJQCCI1G4PBVwS4trJijbl+9ZphROErV19ODFVc7c9Gg3yY5nkAvr5z/dZbL2VJithkqnsR4Gizzt0MrWy66bUka6V1wB7mGK8iUw9fi8IPvPBSTfASz1n6pyVTBh8gbJ46gc5UvhYSZwywaL0zEpJWHwUJHKTTzvXIB0I34QlAQw7UMZrnhw26p7q9uOP4jT+JB6D7YmEUgXlHsqPsUeqcAYiJD3o8rNUDqdaG5mZ3w8PxkHffK50GfEcKYGMwBFx8iCgKtMjcfo0YkNjGjFslmtVxrac6psiq1465Qdo6vCZHbgMihlADmKgKkIGl8bdcUqGDlSg5yuC4sNba2WyWJgkQeO97vd5Kf5Akyf7B3t7ezi23nLtwy5nr16/NJtOiKE6eOGENSuAQQq+fI4u1pOaWdihqx09qDBE1aURInoCy1DEjr/SZc2Uoe3t7s2dmU56ladpPkhDY3+S03ew4RU/GYRncXbEl+on/ZEEOEDDUoVKvIxlDaC1aAOYAKBCCbG/vhlCvDAe9flKWhQqqrgqly652Uky86vf76lFAROW8xmgxkhXRBgQNrSKiMKpYANBC2A6YaOutjbCFSp+al9e4KzsJR9JpaRxrDeLEW88NiiCRgkO19zfg0tzVcbuLGR+r7hxoU6CttRLYGWuwyVYra4xOMr1BTVXFsaK2RiDmH2kmc7xfJNgWtrORmuwBmsQoAlRf9Kwq9R7VG2LX4Yqa0n8bQ+Yx98oYBT40xmG32gJN9GlJe8o0e7woCuz4/3HR/4xNgRw7Z7T1nkvTBjYgccYY08ldh9Y3pi6ExgcDhIAIqPmA1hitWVSEFmYGxcTC0IB7o4hXeB8wxvT7eVV5731/NdndufaJT3zizJkze+MxgzCzs6nWlEXGZQiYrSUaDHoHe5MvfcsX33bbreOhe/MXfdFtpy/WocqypK7LWJgwmo2MMVyyMU0deeIcubQoivF4wgG8DwSQYcLM0+n02GB9dXV4bOXEeHt3Mhqvrg7rshhzKCbTSVH085yIzp8/X5fToihWVvPu2RQREpGw4AM78vh3f7J0w+cuaI/gKjdPxer2Xzr826WMMFWSsn5vZWNdDAEzahMXZFRJQJ9Tu+JWWgF2ev4CgFXVGw+lhoe2pjMKIem4drvSBRdjvSD6tGYO3UBeFEXYqYtfMpEXvcEYeYTW7Mbch+4443mIY+jaCl35h4jq4+r8pMnGard/mQLilEWC7k7D1BAUb0HRcDRnqigK26aHyOLVcKgYYep0i4pj03HNNXHUJjFqly/EU0XafLuOioANzEKTd1qWZb/fB4CXXnqJQ33vvXfffsflelZUVTWZjPI81xZ1eZJq/ovaMXVdK/dJ05QQQ6h12RNjE2vRSl2XAcVYyrKEmVfXckQcjUY7O3tlHSrfto5eJBWZR3ReLfehu2vN56MUF+iIokgGHr2IoBNkLIqCiHpZvz8cWiJflYBcjMcAUNf19vZ2mh3P01SbV0onlhlCKIpCA37qb9R8Kw259fv9mBU1v2Cem9NIKdO6c6zVViqR1KV1gcTd9HXtvXbvkaIoXDoHgsDW8I0jjNbbEsPqDked4WU1Iz/vu4eIRGAMxnWL843F64hoyQAZoqBtl7IsSxJblqW0Dd2oLVlmotj6mspOq10/VyygdYA3Fa5EoS6rKjjner2eiEyn07KujTGpS8g6WoS/QNT4sUdE1zj2Q1HWIqJqk2dVXBpckYbRy7wyOIQAYNI01QIBlabGmKqqsizDRc7QCle1R8FaCx3w0SbkL00EQe9PndM4sXR8CcYYQ069hJFUIqNzxhmLbQ26xM56jNDmlpcQws7Ozic/+eidd96mrjJhREPsa41qVbOSiAiFCKuqstYiwe23356mqVlbe+1DD9T7lS/9lRdeLssySeyptRNk0HuPdq66JWSyJC9FiqKYTqchEDMTGGutJ6p9ubW1debMGS4gz3MUnk5GYACRXJqIyNXr1we9rJ8n1tper9c4TUHzPAC5YaVIHaW5c4QXueOyz1b+LIH9f/pqhNpRXm5Y5D/Nf1nQYH9laPO0RrDWBhYRsYYQEebRHDRIhBhAlp7TFUDxLZGJWUdNXA4A1MspqAxinnHXvkOkI267YgM6rmBtiNu8iZp/YEvg0HIf00LZ6Z3c1MwYibUE7X8BwHuvJbl6RFUkq74MJBp20oAut/g71lrlLMYYYQxBw1SUtGUSnhkAjUlEJIQ6Fu+2BmjdfjbRJlYYBwXb8zXXFIiEiKaTma85TVMEoy2JOpbrPCyNRAZA1fDUJTGQM19k8TFzBREBGVqNmki154Z5KrnHBaQWC0zFlTI7MDSZFf2hXd84gch7O9dX+3k/y/tZKhxC8GgEECupyVEQ0U7PmkCLmOjycuy7bihAQIvARGwRMUttWZarPQvebm4OBTxDoMRUPpB1ZTUjixaJmUnaJLugR3PuWhARQ0CGAJqkXBGRpv81ICBQE/mGjuiFjuBpBVKog3hGB45R66HDdFbsT0Zp6hLr8jwP1mxPJjXi6nCwvTvt9/vOOGiTaxAxTXNW81TAoinrYMP8XWli08TapIdtvNNaiygogOCtsdaGVmQCYtMRT4JBMHVTXd1AO0WqUEFNgOwDAyIAWcPMdfDxGEV5qf7bpszJkDEEhAQgornNtiuhSSi1qa5Yk6vsnLWJYAN4p4PJsqwsy/F4rCpXfJ0gsARBFktSzeWTYnwCkCWjXkYi6OV9EFHUCy84xy0X0Xob3XBmBrIuNSJSBY+IYAgCDIdDAJjNZtx2C20dwt45A5hCwxMEMSFH4/HYZTkRZTYXETRW1aAQAvtAWIuIFm8BgDPiMmec7fVXZrWMptWlWy9c3jz2O7/zOysrK2dOnwRfAQfkMnXWew/WeAbnHIukLk2y/ODggBkIUAL38lwkSPBpmqZO2UPGzMxTdUc7g9Y2yEhcV9qjiTkQoXOpRfJkkUBBhFFAIBgQEBTPiFYYfR044MtXbmhxLAdAocQ5riur7XIZCBPSNqYSmH3qEmS4686LKVUmT89ePPfRD37s0U8+Wo6rOy7dngx703IcHLPUEEoKdT9zPnhMXUW+mqbjcU2WPJeMYJNs2BuOr+zVBbzm3GtWMSvDJE3dWr4xSpMbN26kltmjZ05X8okvn73+UpaYy9mt7GtHKRAJ+AAcCD0wGiOVRp6gExud66AiQkdJ2CalC0EAtJYEFWGqVQS5ibo2Zm7DBxbt2hiko9YabKRS+18UYLW+AISQmQOIQhuatqi3zuHG7va5E7dZawpfVCRCaBMLZBT7kAAIDAAIgu9Iv66UjUy+HRpr9YSIzJNlmv+2BnVMelrSFCI7wBiFboGljljIzjvj5y4bPXxpmAQP+aK7qnEMCdd1LSgIDVpImi74OqJNTG0Ol4iQIWi8EHMUAuwknULMmmEOIVibSCeVIN6jJiO1eHIROAw73oKo/GLbn1hZnmIvcJu60l2QloMvRKe6Q5qTUXsntfjyOuXEGUWB1lwhAJhMJpcuXFhbW/Xe9zfWkqRFmYBWUdDNZY7L1Ro6dv5SEdXU9a86ZmutsNWkGPXSp2mKxGVZGsAQyWbBXbyA6Q0ADE1jhDmFkAC3QC7tlLs3SMew7lIvdNQ7HdJkMhmNvEHK8zxLnTF2PB4bBDPsHxwc9Pv9rN8YXkRknLOd3AXbroPWJqVp6pJMl1T5e+OxNDZuemeQCzkQyAvaapwFM2tpLnbygLDNZuIWqDxOf+kJ8UR0l0IXIcsyavGh1AaNyXTR665iWA3E2WwGALF6XkeufmlIG0rO85wZ1CWAEilW6rqWCJ3d+GnnDf70ip6thh+hQRJrrd4fkbySJJnNZnqcgU0w1vqYxc0AkGWZemiiv1qQyrJEBCJyqY35WSBCRD4UarL3hwOT9j72rnd/9qmnv+O7v+dbvuVbfu3Xfu3RRx+97+67EXxVcl1Vg8FKURXWWgUbAWBgSawzBhVbxjOjQOAAZem9sdbmwx4AkAbCEY1p+BURSOsJiPF7ImpaNJJBREIBIKPHEFiQRLD2vL+//+STT8a1ihH7uOPRT5alLk3d3t4eIZw+fTpNUwMhT7Jer/fYY49dPHtpWs5mfnb8+EblZxiBGUzTLgURa5bxeMygDa9IREajESISgG5KAEEiJMqyDK0JvtbD4qvaGBNCfePGblEUfdeUYy2xdfwc/M+vfi2Ip5s/Qal26YaGRJce2H7DrVIALZs12OBYR1Dk6aw4duLEyupqLA5ClSaHBqJqgVn+ev7XeISpDdwws1UQbVlkcnIokbh7zrHj8o2/i+cNOrJkaSGgw0eWFlS6RDbP4Gji0MpPI3tSDqKdwtA0HNwYQ2SlTSiVFgNERLSAR9mcD1XkldjaT4hWpIbONscbQtuWNW5eXJ9onVMbRmJm7JQtxZk2N7fBWn2CpmlohaUuIMu81V13QbhzmcUk27gFIYSYHcptIDPP81OntlZWVkRECzqn02mer/tQ2xZjL4SgqB3YUkH0diLOsVY4BO3d3lWGAAAwtbNSgKhBXCFoUIuthjuFMWq/LeWTdADb9CQ02e8K0quhEqWiNua1TCTtFXWymAClqoBzDgidcyEQCk8mkxCylWG/rnnvYD/LsmE/mUwmg3ytrINzLkkyRARmp54VAGsb6asiR3WaJYZChHG/GgnUStMmu6pigXmvOtPCIcXQgzbJio/lpk29bXh0x6hdOMwdHE1jFkpvsU0p0i/NYl69IItABGdOkkQbZ2V5GkLQVCwiQmpgtDk0zKJ9UVuw0MJoEzXFao2ArypQtwBhXIo4i6i66UOsSVyaqeuV2iLgtgK+ArbM7Ni0rEYLmgmAqqpSraipRwKFgZtnROp8idCA8TWjIWvtaHv7J3/yJ8sa/uvvf/AnfuInvuO7vudPPvyhj3/sT45trp05dbKqZjVznucamIilRN77eEhDCAJANhFNb6FGsjawZawJIrr7cwCvdgsYDKWuTc7ScD8LIgAKAvq2jUTtw6OPPpompJUIcTpzmidBQWvtdDpdXeldu77/li98rZYd1nVtUE6cOmmc3T3YTdO0rIqt42tlPWNkYTGuGZgEIMLJZHJjd6euPAiJoEFbl3VRFBdPXxgOh5NZwXUNgWd1Za1NsjSpuJjNgMg5F7huFFPnDknellARsUXwP3zDkeZvh8iXv4G2MIekhZZcTDxa+ufCb1/1QhZpS5YjJwSAoq7OnTye5FklrAPS3AUNDiu/Czd5dlcOxs/RZFJX7hxIGRZFZpxP/NAVSNDRuOMTo0iDQzJ4STYfHmiXwzbSSwTadBVsWt7OXWHUwqsG8czsWduuLSRkmTb3QcTHQF1VNyZdNOK74+yyeGwq7oPcfP/iE+KooJMl0X0FaeFYC6uEiNba8XictNCAzCwwN4ij5A7tFQVtfHh3XzDiMRFMp9PZbHbq1Knjx4+fPn1ybW21l+XW2rX1FUvGe5/lGaKIvlQLlMNcIrbT12K1Nlcc0fvGuOxqGN6DNOk/hojqwMF7RGOQg4BK3/kCHpWwINzUCwJoY7tGx1fL2XYwmbvMaEmli/9UdqB5vCEETQIUEUNUlmVhqdfrVbNid38vz3NBM55OBoOB9h4oy3LQy/Tnti32VVvL2oTIBsEkdv5ocR6wUzYTdbUw71BiW61ynhJIHfBL0QYMKthagykK4JiRt3R2OgQ8tzWpE7UpikKNYNOpJwaAJE20zIzbyKUW/6QtdqaaxR1NjojmeXmIoKptYN++tLHyyRgkMq1+6UMdf0VIsbYi6vGw4GuRfr+vvXsVKtIZqy4qIxgFsO6/5jzmWb8l/CYLPRJn0wAqsDEmHwz390aqnd+4cYMD3H//vR/4g99/45s+7yd/8ie/57u++7Y77/j997/vyWeev3D+rE1Q/GxlZaUbJhgOh/ohz3NmJmpczCp02QetODLGMCJKaCXuXP9Qfc4LE4JAAEDlcSTQVJJ1LJPBYDAaF08++eSJEyf8Ys/EVoVtKjh8Xed5vjZcOdgdvfGNb1xdXa1mE936Y8c3H3jowQ+85wNJkgSuKTXFuBCEADx3RQAAwGRajCfTWltd+UBECLg3Hr/5wTf2er3J7n4QDhKKcgYVeu3DaqisKkCsQ+ONF0SZK9ntqdQt1l7OS0e+I4+XqPrwbdj4k+fS99CaHP3zV/k+mgLdG0hAtHaq86XJkqzfK4MPiEgIIMYY7Y+kmAZ6uI98b7wOqwWRsduuIQsdUdQYap3F0rC6b9VVQ4Qxl0R/iE1fvYabtipMlBALy9r5piueY31kI9dbiTgej9UWgQ5uV5IkVY3UtitnmaPeR1HKTTILCoS6boAC4jCi3aB8ZD7Zlj0FXli4eE5ijFntTr0/yzIQ0czhGJCey7DWn9kOyTTBWog8tOFN8fzro7TPStxIXa44weiI1gQiZynLMoWYePHFFwd9szocti0ErMEmP8s5M0/zlyYLggCbNGv2nUbL2PBONFps0JV5pa+DQFHO6roWMglSVdXSQvguED22tL94iQiwhlIDgu0SIcAc+AU6ihp0yDLuo0pBBSvWXPRYMYmIXgCFD0YT55xLs4ODA5ErZ8+etYmtKj+dlWsrwzRN1X4e9HNmTlzinNOkWiASNCHIaDRaW1uz1oa61kImgsYMjUatDql1wyw0n8COdgUAIAQEiETS9DLCjoIcabU73+VTY7WcHIgICYVESMgQ18zISKjtcoUEjMbawbX9kaKVjJo/aG3Ub+IBVOxu2+I9ec9NLayxLSlKPI+RMpnZWIo1C8pP2qLeOfFQB37ZdCAhjTGJbfDnBRpYXC1MqOsmOTRPMzRqnTeP8h2MSZ1XYt1sVuR5Xge2abJ3sF8zVHV97Nixzc3NH/7hH37/+9//0z/9U9/wzd/y0T/58Mf+5COXLl1aHw7qshKRmIntEmOMmc1mvV7POSdI1lokQy2Iguafq6JDbbVVF8AEEYXQSIsN3hwDpV7QpvXt9hIAXLt27eGHH97Y2NrZ2YVF9t2QgYCA5Hm+v797/vQJALj77rudc8BJEDaG0zS/9/77fuu3fjtgWN/aSPNkVs+AQmS6eqQBsKirOjCi8dDsXV2WFmBlZaUOVR38eDohASasfOnFN70fOBR1RYSBeVJXdV2niV1kkoKIfOiwLx/8m4vMKD6OFNULD+n848gbjhSPUcouHMkg0h4xLWLuba5kw77nwEyYaEMTQBZBAQGznFrVPHiJ9UEnzzfOTv/ZSI4F1oYgAL51eeuXUUgvcZkjlyPe0PXEdt/dGfGCasCdjmPze9o/Rd08OqxC23ShtXQ7Xs5OGlRkBDG1u8u7oZF5IIuNLEIIQdQ5umBjxeONEZ+25SPWWtNKWd/2YYVWQVmaqTGm3+9rvWmTAobzCr8uE+/usVkESIoCXkTUKYfAyj7W19fX19dD8NPpdH19XSsx0FCWp977umZEBNKGlnO+Kajlzgtp6ogInY2TToy8DrC9s3Pl2o0gnCSJr7UsB4WbPAndfm4IvgGMiGQ63wgmIGHhrmXfJdYuaWFbJtclNr2oddhmWWZtMp0V6lEEUHhCv72zt7624tLsYDzZ3t4WHp47fSrP86KsEHHYzw01fQOt9hVoZKI1xrLI/v7+YDBQzUlNH5RGW6U2GK8DU3m2FKmRjp9ZGqVWf7hQzh5CYAG7WPcSCQARpS1mUJmKi5Fg7JQX42IxXuikZFs7z9synfIeafMnrLXS9tdTDaOqGqQqaHAQ1VOC2AYCIpKdwXnqRjPyNnk4imGDJEh1XavXQaWv9z5JktTZVgA3KNOIKBJUE9CuTYlNVUx6P0+5j153nVRu8tmsUsZ59epVROjl+TSfXb1+7fLtt/3u7/3u7/7e7/6bn/+5b/3Wb73jNXe+613vGu2NTp06hYHR2ISoqiqQVGPhiJimKWhowFhjTM2B2BO0jjSSNoNFrG26d2CjNDdRkkjw1PpFYqWDQRJBY8wrr7xy/fr+qVNn5rYjCQTQ1VBsGSWVft6rqooILl++RAR1XSeWxLksTzePb6LBG/s797/u/pKrSmqLQA2emAUAsrYu6/3RuK5rJFuXM2sSYJ5OpyePnayq6sqVK+W08GUFAEJYlDMk8hyq4I21UFdB2CSOiyrSfCz+PywJl0SJrsCR4hdlLh3jl0ufETF6oeNfsXNDFHiIC163bg1SVILmY1ssakXE4eZ6b22lCr4MdYYpWlDPyjzhSwgJWyAsEBTEI9BzdSig6Y0grA5tQsvMgoDU2K/Scsa69vHQdscUJV9Xr++u7JKsZZ6z1KWd6P4zypulvKTmMLct3vR4Qxt1Zubo8povsQh2NgA6AM7QWs9dnggdoxMaySCa7ImAIgvSOnK6EIJtkQuJSEuSmiPaPlDt17rtp6bP1wwszQ+KYk/dEUgtCG0HHR4RtUcUd/qN60ZEtkhE2lIG2lKZsiyLotja2trY2BCRxJKv6hI4HfTVv6oOPWCNQbWphjB3D3AniQYAXGvlRwmhAafJlF988cVr164R2sSZupoigDNJ1cbaIXqQlFYB2rJJEGnSGpuFZ0D0ImTAxrlzJwDWlTGwaBB3JYoOr83LS1V1Y+YgbMiWZbm3P9pYX88yur69QwTD4dCmSdKCUJpE85sJWYQEQZPVyDqHaNbW1rIsS53z2LB7bOsF0DeiqKFhIgQwHds3Jg/GvzIf6vvUVtFokHVJADfZSZq51qZK6Q1L9Eyd8Ef3vMRkybhc6qtXvEZo87T1Zu89YeMJV5ePvkuFXAQfjGOL4fC4v9ERHVqniIggWiKDiIDIoWb2IcyVgKbCPnEYyGDDOkLbBhuEmFkyLIqCoMlvsrbp76s/l9YXjYjWZkQzz4GI9nZ2EwuI0u/n1tr9/f3zt9zS7/f/h7/5N9/1nnf/k3/yT/7iX/xLj33qYw8//PCJ48fznmMJa4NhMRmvra0dHBw00R8CACSDSODAaBqu4maT+i1gTgPUtiCkNhWr9hUJEDSxBmMMGASiUJZ14Mp76/2zzz4rAFVVe+9V3h827QxBWRb9Xra7twMMa2trXDcpGsEKAPQG+YnTJ55/4fmtE1uzcoIGBIUIvBciEDaINJ3Ort3YqTyTRQ7gMqetnQeDwfbuzoHsGgHNPiuralxMbeIIzLQoXJYCYV3VxiIaAkLGuTjsysV4WmFR4YZFpr1wAjo/OfKv8b8EGFHwuj85LLY63zcQQV0xHAUStTZsAAFCEVjZXGdCFkShmoPDRMQTkQTWrW0GrLNu25cffnUcQFf8IaKNqnQ8uvpfs4hIFf/atTzil3ogO+7cheZF3AkwR2dsWMSKWro5Kub6QGwNTeh0MdLAT+zONGd8rZ82+oEj1wgRb3kRpLqxvJdGK5o2NZe+Xb4fBbymHOuMVMONz/Te196HFrSI2zJTADAtmm507sGiaIn81BiDnX2J2kyTqdPqQ9PpVM0y9RH1ej0VyXVdb545672vfZkm/aqq+oOe95WC7yj6IADYBjSdDS1kq849H7TQrCJqGLu7+9t7+7PZDBEVz8QY4+sQbxNpIrua4n+UcjyfPjQ4DtxkGR3CVosbgZ1aWOiARjGzwgSGELxnTeqx1o5GIyJilCTJyrLc299fW1sj43b3RwAvO+fOnDxBCEQNSqImQgrXhA5JEJsONidOnEiSxKAgumZ3pPFnGDLQuuixcwS6c9RIJiJKU8CqE2liS845Rpu4lIiEG2VUJ6WCk9omvl1lX2+LeVLYVhDE2AR1miVAR1lRW1YfqM16deyq2DWOKJmbLNyGTpSAWbkfd6AhjEEzBx5hZuNtqL1m2EcYyKhGAIC6bYqiDG3TCw2gaLxDo61GLBpCRAnsa0ZELT3I8xyNAobYFj0XNbwdrU/n3Pp6XtYVCF699sqZM6cMQuKMcy5JbFHOXnzx+dtuu/zrv/7r/+F/+9X//T/++tf/+a87e+HWP/qDP3z2xVcuXjgvgL3BkAB7vZ5OyhrLIJYQQNBZVuMbxQAimXblKS4ImDncnmjqpbrqpCnfJ0AIwSQulLWI9yyvvPIKAJRVRWQZFqSRai0AyMxpmg4Gg5dffPkv/6WvW19fZ2YCqaoK0dShXt/cuPeB+z/79JNksRzPrCUB3/qHDIsww6SY7o8OBCmI8hMnzJZMWZbT6TQ1FgUqDiEEBgkI3teZobSX18GTNZnNx+MD9FUQ1vU/LHWOEoEAqvG3N3Rl0kL1nX4gJDlapC2tDB41gMOvbt6DINBUMXVVhJZnCRCKoXTYn1QzAUBr6rq2iSOywIBoluxctciX3hJnJ23osGsZIuKCw7PL3XDRBu2K5/mEO/JcWvU5fhPfB61J131F15LuDkg6/dHiN1GJjmJbzRrtx6JBzbqui6IYj8cq2KKrDVvPm7KPaIDGYXNnBJHXd3/bHR52bFYi8u2l50FHNRqNdnd39w8OpkWhA9MRaoNhaPWYyKSw1YWxxZoOMeW91RviGJSxKtvS+s5oHKidZzrwYQAALMVsAo0ZUdd1XRWzaOU08+Vmbb33jXtL055D0Kn6ui6rSjtGBAZAw4KeQ5AGN1j56Xg8NsZNxoVqP1XpG/ZvIITaIogE6qxhV2PrGIJH02SXTiKxRfsvKmcRRlGXSBffOdfv9yP1E1kAULAOANjbO7h2fVtxoVfWNoy1mkCumJT6Fk1oIiJABvb6jUZDMbafMyZNUySSRVBoRKy911qOKBSjgiitUhuNNmriKUBtIjS2vYOazP9OrZosmhQqWfUgJEkyGAyoTaEioqaCqMMRvPfr6+s6Ta1Qj7kL8ZA26WztgncVTQAga5IstYlDQzFNVC9NjyBrjHVJmql5oCujaqvepruT53nUDHRVtdeyMSZNcmuS5l1J4torCqfBYKC/AgAgNM6iIZcmZE2e54gIguPxeHt7e9gfIAr7GoGF/cqwf/rUib3d7XPnzly8dP4vfv03fPf/42/t7Y+//Cu/8s577nv8s8+Op5VLcjIWjdX6KO89AhNK4gxIHQGuI2xLnudojRCSs2jnASNsLYQAQs7aNG2UaSAG5CBAuDJcG4/Hjz352VvOn1aVKGqZ2mIongXdEeccItxzz12Dfu5DHfVy7/1w2P+CL/qCO++6oz/se66BNF/dO6dkRsxcTMvZrPLMIQTvg2nwxCUf9D2HikNAKIOvONTC6IzJEnDGc7CJCyKjYiqEo+lEWr4ZWt3wSBNriZVFil1Sprt/7V5L33R5cpctdH/bFYf6VwMYXTe46GGSwADgElP60mT2oBidu3Au7/XEkBAaY7Jezsyh9twJrHSxJw+PBFu3a3Q4LTGxJtcx8sHDArU74SiBusuEh0yTpSd0b4PF2HD84ZHL132L7/Q7hJZ9iCZedZIdlIM0/q5FV3OXicdxSpsiv7S1HKtvQZ3QjccjrlKcrDL9+KHf6wU1LTvZUkuzW9KSoo0iLNH7Zw+51uNvu9sfH5VlWUTBVBuuW8SVpmmaptaQAIcQqAbFxInPiXPXuUCnq4Q+RNnBAj0ICWMIYWVlpZzVk+l1731ZHlhrG4QpCCIkEkC0VwUaQQAmRIg9czpTiyuPiCKh8Y2jxUP+pSUak06kQ0Qmk4n3fjgcalxgNpt5740xvV6vKArVW2ezCtGEEHp5miTJyy9fybPkjkuXEDHNeoRCsZsNWWOcUhOisZ2kMOX4QXxM/wGhbgcCADDWRrhH6Lh2vPeIauAis6/rmtDqDqoGmc6T++YAxbE2RgGfuY2nQOcA6kwBQGVwK8xckiSKErN0BtVnEGmA2zyMRvADR/WCiEQCAGgVUDNBM0fN7Jbs2w4sdnwgtXXntQ/WWjJW+xAIBkdonBWRRgBLLK9oAbEDigglFkPTNpurajAYQBuOCSEoRIWuPxGhMYDkA2dZtrd38PSTn906cdxXdZLYEHyaJlVZEsDm5ube3l4I4e677/j//st//XM//wu/81u//dYv/ZJz58798Qc/eOPGjfvvvTvLsn1E1MZNVps10aCXV7X0+/26rqu6draJlEmbb4WtgyGuQ1NmGgQNC6HClkIIDM35OBhPPv7xjydZWo0mkWwO8RBOkmQ6nSbJSRG48847dYVNA32KNk0ms+L0mZNf9hVfXvqSCKwlYRBRc9yCmGJaT6ez8XTKLM7ZNLXMPJ1OxUtZlmnfCYIHlsBBGK1R7Kfa+1q4qquKfelrAkaiOoRgSRb5MwDQTYqAu9FfWbKAj7pUqHc5ORySeZ/j1Qyj/VFQJzJACCFLXFlXnsGlSR180ssH66ulBAbpRHkx/pYRcDExG1kOS0zoolS1VzQvaa45doZ42CrtSrjuWe3aqd0nHOaYc0Wjdc9GYbOkAXQleqRdgIU9ix+ido+dqKHaFr5jR3ZZdqxpCO344RBCfZxddOIhomqg3FYxWWvV06tq76DfX19b444FHx8SZ9olncjUuksqi6gg0rEUoaPrRPuVmqbrpsnYdM61HW2NMXme5700WupElFpHBKqjNHtH88VU7h9HG9pLl1TNr7hx0OpnmUuGw6HG3rz3NnFdHwOi1oxC1OABmFAAGWkOXrNEHhIgeOEwP2ZdGlvSFJcWSrXv8Xg6nU7VNJHA1axMrBv0+qqpQFvQ5T1r9cvjjz35/IsvaFtyAVIVSrhBsVB9zhqM3gXqgLJF2S8IS764Lg2rCdvustFvQltXFjqpgvpYbvyZNtYI6bCjMwkAqqpSCzX6RVRIqEBSTS7GnrsL1aYWQ13XJkn6/X6v14spBdA6h+P0I+liiyqjwyYiPQLqNkjyTNcnSZI0y2LjZL3i86lTuo1tApeaxdY4QmMTp/8zLjEucWmSZKlLEx1YlvX6/aExLst6RLYsa0R0aaJOizTvuTRDY4mMF8gHwyTvXbl+7aUr20mSCIS6rICFgFNnnHNZkpw6cWJ1deXKlVcuXL68trbxlV/+5X/n7/6PVR3e8qVfevHSbR/9+CendX3s+Imy9npGsiwjAZJ55DuuhrUUXSPY9nJoKAT1vGEQDgKCJEgM2BSSoiGX3rhx49HHns2yTHGhYeFS4TFvvTybTRHg8uXLujVkQETKulb4rcFweP/994/G+7UvDQERWUOmKVU3lQ8Ho0kITEQNniVi7b1zrg6+qMpJORsX04NiMq3KikPNYRbqMvgauKgrAbCJS7IszTObOCV+XCwzg0UB2ZVMC7M6JDu6x+fwrz53oXv4XU1g6xD4ZcyPAyIhnFbl+rEtl2fciZ8CAMHCBPW/DAvjj4M8PM7D/NySYnK1BuJ82k2mCC09IoQFGKPDi7L01shflnYisq3uE5a0m+6Zj0/ARSUoyrAlbVHHqUr+AveBALDghV9619KH7qQaDisSQlA+rr4751wvz0PbTTn+sPvN0gpgx8uNbYIMdxpIRKDKwxQZ1yq67CJH08pVjYsry1Ojx1qKJguoABMRBIGAgIAMzPrsSAaRUJq3t7EA7mT2ImKaJNpWBbTXG1rvmaHB6CYCZKzYYwMvhyCMiEG0ABLJorTRu+5GxGuJTg4T3tL3RFZHbUxDBqLAhNZOJpMkSRTfeDabjcdjAG0abUDQOffss887Y19z150ba6uhLhGFxSsnBfaIVos7TVtf3oRdsTXsFMsAkcgZ40T7OrfIQYgYYwSIGBNZm3UT0jQWzQDTJ6s3whjTcgZaOvzcAVOL4lnfwm0cVBUv/cY5FzyDIAKJsDBY40IIIAjG5VmvqirtI1RVlSEr7JVsuhqzptN3N0IVGmNMmmWCELQwwVowxiSOOGiOknT0zhiB9hJwnqo6r1wx1Eg1lgYgVumwxkpaM13Dvc39zooIoQFLdZhlWSZAZVm6pLe3t+e9f+nlayGAs2lNARNVFmsk6wxWofbeJ9adOnFyZ1Q4Q3fcdddP/PiPv/e97/3//Iufed1DD527cMsffOC/nji2tTLoAZl+rz+djPI8B0MJJeqTNxY1/KHIJtwpzdJZq9MCCEkMCAmACBISs7CgMNRcO4Bnn3tBBBCIEdA4kHlTnMgGENH7ajgcXr169fy5E2vrq3VdW2rqP9XmTvOMma2lsiyMIQ5ACBZNzaxOv3JWT8uyLEult5hwYBKHRJWvK4BQe/UeJRJkKt57lQtBOMmz1BpB2B+PRtNJ0u8BaMnInyEdDSwXDccJLsmIbqKD4BGoHdJJWu7ef7NL2gxyFomGbEuW4JmBkKzxEhhk4/gWJlakAXkFAGyhrfWwQ2fAjWwS7mREzM0J7iQ/wqKUafqSRjAsJRFsQv3zFM3ub7C1Nf+s+S54p7u/wi4SUEdAHumv1g9ROEVB1PzEOO81ZBW6GkM3AiEiwm1REAIgw6HNOrT9ulMSQhR4miIHSIgCRVHWdbA2SdMUkavSF0VRFAWQKHuS1tDnjr89zi5q/caYaHF2VwMWTt3c/F0yyuN6qhiOZpAqdGVZEiUxXaudZktOyADQhX0xIKFDN93xYKeRQOhcg14/TdOdvf08y6Yzz1JPpzNmVq2QmVFYQkgS6yyJoIgVEWL2LA3Q/CLqOLRIsEvE09VC4p1LixNJJbI/ajCJbFmWaeqcs0UxIepvbW1pHuxwMEiMnUxGK8PBlWs3er3e2XNnksTmaYbWtHhPzIxkWfMuYi5C3AIUgPZjszduXnASdzx+QETvg7UW0TCAaE5WB+VfhbzST+vPoIgtiohomih+nvVDCMEHT8EYg0CExjpXV15EOEhd+UZpQO1YTNx2wxWRpmC6qkySqCqpf4oJz9SWdcW8BC2x02fGHYl8MHTyHKE9s0GCECLP14HItlifJmp6UVVVvVIT2dBLhJfSAWs6BVkjCJ6DSxONNaRJVnMIIQAaJOvrWkReePnq9evXT586VQcWgKKs1JEDqI3L2Ht2xrjcMbMwrg8HdV3vXr92+223P/7oY29845t++qd/+ru+6zu+6mv//COf+PinH3/iwQfuQ3JCDslWPgz6TR2EsVhVVVnMdMuiYtSYUKZRnoCsswbmffpINPUv7TnvgejZZ58lgtF0Utc1BzAuei8XsuXVsJ7N/Jd8yZesrazW1VitEEEyxhRFkeYZIo7HY/HBWWIP4kGb9AKRAJW+nhblZDIZDAYaoqqKWQghCAeQUDddWDSFghGYocEr5eC9p7HJsiR39tr2XuVrMkYIiYgXE/LjKV5igN3T0T3US990T/rccXzor69yLQlmlGWDFQGkBTRkZvFeDKb9Xm9lSIkDaBJZoq9OFO9ZAADYSNNYUrkWovDyjOKUu9I6CojG/0xtHS1AxP+fL1xXElCnh273hihmuq+Ejuq6dFa7Zk1X3C7pFEuCX2Jgtb2/qqqo/sfi964CAgACQdr8tCagixIzyLv/bSnjCNN8gYFKo0PoAKbT6YE0xkeSuWgEL7mO48+jFw4AmgqZTqFUFH5HqjjYuborX5aldpjRA4Nt0fNgMMiTtFVQmhcDCCIawNDurf4/AmDToP1QDlQ7wrhWzXyNW19du3LlWp7nZlR4X2i2iI5VUdOtpX6/r4UuzCyC3vtZVYXADIK8QD+ICDI/vV3KiYt55OnVy/umULUsS+89AGsWNDPPZrPpZOKcI8SqLPu9Xi/Pi/EEEdM0HU3GSZLsj0dlVQNQEDbBAIprdoENAkqTfBfVx5bAwDZAZ9S1U9v5ivYcgzZsPD8s2EbHTePTjlayaRsgaqRDpWZk6NAW+8ZoBXY8Il1x1dR/a3Z669nWOyOZVVWVtf+kTp5I/ImSurWWeZ5E3R7tJh4RQvB17duIdXSVzcnYWI2siw9xK5GIWrSymP/VLC8iiATTsEydIbZJiLECIi7CrK6IyLm0mFXGuP3x5Mknn0zSzfWN43l/9fO/4Iu/8Iu/8AMf+IP77rkDABKX+VBRW7vMQQ1HMSBo4NjWxng83dhYu3Tp0g/90I+8//0f+Ef/6B/c/+Brz5w/9+EPffD8+dOnT51wrlFiWuBoyQYDZ+xkMuoalDpTTecGAABCS0CEbXjFNGmqgZKkmEyefvrZlbUhMyQuq6GWTm/ydl8ak7rX6xkDDzzwQL/f368n3vvUIQAqRr0Evnbt2t7+DhmoiioxtvJeyYaMCYJlWY1Go9Q6CJznPUdmXFVKpWVdhRDYh8bVYSwJCIgQ1nXtOZS+5qosqoL7PQ/QIHHOozALEaXu1T2/3UPdCJebthAERORDbRWWHnL4WhI6N3mskIBmriFiQJjV1ebxk/mgF7BFOG/6VzV6AMauRwCK/6xcKx6f7mSpU4XbFQdK8PP4UCNZ4k43L1qQgnOVf/H7m11dsb90fzz82Lpouo/qDkavrsEHnbB2aBMCAUDa7TFH1VAxKhEdsXl6tQdGVSIN2EBTudo+p+WvbQ0fgLJXYFGndOXLaIEdXh/qlH/oXzUo2IrquX4ThTcsUg91wp9dU1jNI0SM+Fz6pZoyaZqmaWKQQNR13Ew8+rjngni+JkGbY+kAYqw9rkOkhPXVYb/flxs7kUgAAEhQEADTJEmdVYuzqme+ZkGo6wDT6Ww2E2bEbq2RIVChND9qkdjixKWjrMRvIg2ofabAgQpu3Ov1DJEhqpnLsoxByl6vVxWzqqpCYF+HrN976rMvHT/+pBZ1cMLaB7bZcYsCgXmesh4DwHFZdFQhqB8BiCi0uxn7fupoiUyYKyrNA+u6dm2Sl28LSU2nGCk6inT5lQI12TgSlfqKo/ta5XTD4Dq97tWYllZyx9TFqCjodPR7NZSNMdIiV3Obvd8V2yEERaeSVgluIxGEjf1tiYiVQgwBNshrTc00IQW1702zlwjGzLUxJILgo4terWGFEXU2MQ5DEJcku7v77/2vH7hx48bZs2fPbN4CgPvjydNPP7t/MM6yZDya5L1Mm29yEMGgS1FzAEALgYmqqt5cW72xu/fcc8+95q67fvt3fue3f/u3f/Gd7/zv/spf/qqv/bpPfPxjn3n0ycuXL62vrYj46CwkRJMmqU+lEwVDRKB5ziaLwp4TGAUUbkkgeADc2d77+J8+vLGxMZtVIQRmwJv3U3fWGoCLFy9CUxXWrLPUda/X994/99wzk8kksa4UQY1YEyGBINZ1PRqPd/dHvV5vMplo0Ho6naI1AWRWzjQjWhAEKOpt3vtZVTbdwBIHElRhH08mx4dDZpaWRAGw245lzmMP41JG7rpo4iIe3Wd3SUzc5GFHrFj7DRJg1wjWFzVH2Bg0EDxvnTjm0mQ0m5pUYWOWbUtqAB87cF0CBKLma2Sb+k/E1q4J3OhAgJqBarv5QdTJ72XWBuzzKsx43rpyOhLWkqyFjhA9Uk4vSdwohGJdbDx13VfEVYhvdG2vVh1z/FW3r4u6WVmDH+3Sx4WTxaxsaPW4uI7dMZsWp1eNqha1J9HDVlUVmiZW2iWCKDDiE6BTGdVdT+4UoiwtZnfdoqiLCSA6HtdmvOR5rmiU+qiqLK1Bm2bOOq1GkE4UpLMOEqBriIe4t3pbXCiKfWHFuNytrKxUVaUQHzwrGoMeUAXhyqC/sbEByMUYC1TB4DVgBsxkAETpyqA2AtMpCwCCwLzuXBZxauKydMUwGqPIRCrwlOPXVWPrrKysjEajyWSiK3P+/PlLly4988wzauTVwR87tvqZz3zmtksXT546YZkIU11hQjHGIAvZVtIs1qp1vehemEJTjRNaexE6XTqIyAsbMtbY+LRoZaryFCl/NptpkTdAGhOaNJ8NEauqSltbmTqpUmr4TiaTEEK/3w8hTKfTbvmQCmwkMgDWWnU2Rukbj6G06RpR1dB3adrOkkKmU22+VMbV5Ao052Xh4Q2HbXcTCc3CIQQRQARNCmPWz2DIAPRazOfo8SYiRnDOfuITn/i5n/s313e2e/lgd/e/PPCGtzzwwANnz56uAn/i4U8cP7Z1fWf7hNkaDHrAYlzjVGDU1l6IVWUNpf18UszW19eTNL9x48b58xdM4v7aX/v2d7/nv/zo//T2B1/70M7OtY995MO33377+TPHpY1tcVlCi4AmrR/LWottDL7VhFpzaT5dVTf56o3rDz/8yD13372390pd18YlItyVF93tm0wmPsCtly8yM7JkWSahDMErl57NZru7uyjBe58Y6+tCJFibAmPFXJb1ZDKZTqfe29msVnYxm83SXu59VRSzqPwRoOfAlSoEbBJHRJWvLaIPrKyhKIoAogBP0HCnua68IIDh6Esk9gs/+sKOO2EuAgTkKIDow7+Nn0mIsUkN6t6gerkHCSG4NF3f3PAcyBhGAGCA6PECUSYM0iLbK5g9iIR5O5SOQIxvjzKOOt5fylzmS49ssnQQPCIlPggQkkWwAhhabAHDDG0K58LQoztrSUxyU17mo0s26tfcSeTh9upKVmZWhh5dUvFXOoAmO1eECJwziNJiGggAxFJgzd0NXkCI0CJZBKP1M0smJsxjh6waCrAACwro//SfBOiM1VyewWAwHA6TJMl6edbLBYFsk40850fKf41JkyTPsixNnbUgErwP3nMIhoBDLewJhQD1fxKAwCAYYZTFTnZqbxjjjHGIBoDQWJcmikeT5UnWy12aAEHlK0ExxjBCkiSGkNhbEAzeAlpACkgBJYAERgnAnqUGoBCkqrwWqXKbh2W0/QCAIUIAQnTWOmvTnhlNR2U92zq+aa0t6wrEBi8oBL7majbM6MRmby0LfZqtJGU/l7Vhsrk66GUpBjFsDWTAhjBBUJBFBhKyQglQ0jRXsC3+KLQJBIs0YwCIGUJomkPrWWeQIIDGuiwNIIDIImmWiYhayVeuXHnpykv9fp6mDtEIE6CtK3nqmRfqylubADV1fsZaQ86luTNWAoe6yUKae3Sd9cJ1kKKsy1ldzurJuKjqoDm63nMIWn+stiBpDq2EOtS1tdTrZc6ZJLGWnDNJdPi3QouMIQCoqorIAGCogzOOgKjtPN8NZOioNANZa+XViJlMJjrsLMsaQDQEdNYLm8QlecYIVfA1By8c480qy9W/Yi0hirVkLRmjLwIii2gQjUtzgcQHBHQcJNS1AXSGEouWxJKQMAQvogEgAkqQcqQcIAVxABmYDEwGlAvlYnpCuUAu2BMzEOqLZCA5SCouhyQzaTJYW03znKwDl1My+PjHP/UTP/Uzd7zmvrd8yZff98D9b/6iL/yv7/qP/8/v+fZf+1/+LdTTb/+2v3rt2o0kzZ9/+eX90SQgKXtFFCsIdSAObC2T8SBk0VJIDJ8+vooynexdveeu2/79/+/f3XnHa37tV399dXj8y7/i6/cP/Mf+9JEaia0Dl9VMFUsp4rKcXJJkuU1SEQw1UzCGLQZDKCIBgIVQUBUQEkGgBMS+/MIVZPDeCwahGkxl2ZJ3BhIQB5igSwMCGM5yc+3qK5//pnvXhgnzNKD3XJsssSYNAYioqv1nHv3swRiu70gp694cq2h1hrayOPZlTbB3MDsYVWAsGETrDiZTMBaAtrf3RSTLMpckSFSLn/lyxlUwgqlJRYJ4tk0m8biYlgD70zGVwZYB6qD2sQNh5NKyALFgYNC+ogGEkbUVkK48oijLZRXhXQAAiH5Y7H4/l2ewIIw0+NU1afQKIFpRwQgj6ysACph6skwB7YzMjAy6jAM5csW0PHvrBcmTifgpBgY0LgFCr2lsBIBCBgnEiOgyNOAdSEymCcBRAzkJwABMBCHU3lciQeUSsycCIrAnTpxQ5Xc2m2VZNplMbGJ8HVBlfIPmi4iNA0rlXFcr0c/RH9sVaarRN/xwMQd4yXbpSsEoiZdesfQh/rcrubtvOXxFKyFu59Lb4VAcWjk1tTgPWlSqfM0Ys7a2lqbpeDzW20Kn8KnrmbTGRNDK+ApqABbqOQEtvrrjDV6YoI4/FnVgx0+o8JZJkigWjzHmxvbuqVOnWMTZJEmTclZYmk+WNStTFMNWRLC7/lHGwGLUP/6JiMjY/qAHAEVRVFU1mUwIEzUIhNCRdTax1hpniQBJyiLozdPp1IdQB0TWvIOldPR2X2jhOHUT1vDm1jBRE7NRTQ6Ae73ebFpEDLWqqmazWb/f39vbc845cqurq3t7e2q2KWxWFPNgVO+ZBz6YGTqVYCEE72sR0ZrY+FsJXPs6UoW0rpG4cYhobQMRHIOmXdGu+fZZ1ti4qtHGGnHWvM1WqTVti1ydtTGm1+spZUobOlUhrfbrZDJJe7ltzx22fpS4pOp5jtOMjnTNWNErbgEz12WJaU+JvtmLlmhJw7qimfHYpposnFbpGHpHnl/dZmhwkcBoJ+bGjIeqKH73d//zyZMn3/e+9z38yU8kibv//vvvuOOO0Wj0z//5z1jr7r3vvixL6rpOkmRnZ0dE1jdWQbDf782mhQaDjTGK56zHWbt3b2xsjMeT3d3dCxfOZ1n2Td/0LT/wA3/rB3/gh++7775XXv7sxz/28K233nrs+CYzW7QsRsjY1HBdG0QyNoTgywrAkDOIhjoTVwcdIIUQDLmXX345BI1QkNIVQgM1jYgsrBEk732aJMHDF7zpTZubm2UxdWSCtqO1xgZTlmWWZV/wBV/w9r/3DzZWN48d23zNXbcP+jY1iTFgSPZ2b9zY3rMmLauq1+upl6Usa2a+fPmSHpDJdDqZTHTHIyWEAILIwr6pNTAAMBqNAggtRglFYmrv4hWYowc0Rt9ayP0uSXQZwud+LcmaoyioHSEAABhAjf2RSF3XxtmV1VVjjCUIPIcw6z4hnpGl74+4E8R0PNIL6oIIANi11eHjj48GgxXmCgCSJAES1L65QdkfIOkC6Tk/ejJ6dJfsYOg0RThS2sVxxG9CBwO5O+HuvnY/LEnrrpDojjOKje7Dl5bjsKSH1lMXrXxuQ85FUQwGg+l0qs7MLMvKsgzd4tfFUUWGFbW57uC74idqCYfHozcigvZuipwXUYiIfagDKqC0CHnvq1kpfccBqsqPYCrBW0JjHHNTT9AO0iBKAOJOPfThZYmyPxIlUYOkT4Dsg3oyk6bqkRV+3yaO0IKgICEYY01dh8ms1NpNIgoihMTMgCysQSUQIUQyiwH4eC2tD3SIO5a9QaduyvvKGNPv94uimEwm1pgsy6qqGo/H6t2UBB2mq6uru7u7w+HKpz712OsfenBzY127E2kPrq6wEVw4hMyMqg6R6Y4qiq5erxdjFlHUtbpCx38bYwqUqDyoqgo72okxBmAhGCwtIkcU25HOo/8pwndgm8wsbQCVGnHYrNtcfQFodraTVKIXL7Zp6pJ3CCFpGkuo6DXc6usSGYdunDAKgkWFt9Hv2xsWoOOPuBABDIB3zolYaNzd8Nyzzz/11FMf+siHn3rqqROnjs9ms//4H3/73JkTly9ffs1r7rx2ffvTn/702traeDxOsySxTt3yJPD888/feuutCinKIoigQrqu67ouiWxZFFmWalHfjRvX7rvvnn/6T//ZP/2n/+xd7/rPX/blX3Lq9PlHPvnwyy9fue++ewC5nFTWkATP3Exb2AsJs/clkx00k9AmVghq5hmTcF0/8sgjCFCWVVXVRMaH0HTlAYn15QawCqHfGwJcu/ee+6xJvKmBQT3xVVUCQBA2SfLmP/eWd/zj3ld9zTce31r9yMc+esedFy5eOru2tmZcsrdfjMYzJpempqqq/f392Wx26dKFtbU159z+/v6s5WaHGakICAi34hMANNOiy8dac3XOvqhFedRHEFHXCyxtbHCJz+v9N6WEm1+HZaFeBAjAHKFi1QYBqWufoKmCXz22tnn8WIni24gJwLIqsMR8Dl+RsRMY6PRHWpI7ImJvvfXChz704V4vE5FZVaRJPqtKYxDBKARn57GsxRaHlwluYoDiIcf9zRboSOF3WEx274kcpzuAeA+1YdSlARw5fuy0XJU2LoVtsFPtS9vpwgQNCBwaY1ZXV4lof39/Op1KG8SVDjaIDu7IPYviFhaUDNV6kDq1QNCa79DGXxGF2TMDmsbe8r4mIpeYqqqyLAOG3d1dlPozjz5x/szJrY113FhZX13xzD54RJSG9gBAi3F1leY1VLKoOUVeHA1BY4yi0KeZU0FFgJrm0zBVJBH0LLPae1/Vs7IMZjIrppNZECayKILAIiGwnvbWNyBNzTu12T1xAXWjwyGguyWyIUBuRVcIsLu9Y48d03TicjbTwYcQyrJOEkHEqvQrg97a2tr29nVmuH79+uXLl/I8oc7lvQfSXtwLgGjMXM/qxGVgQ2ihv00LQaU2RBx/tCeWKJA6SFIsrLFe7iRRd685wbekq7+NECiKlhxprOueif/Vd1Gb7RUpVmkeAtskMW37amojQfo59qiP3+id+jpAJLICXnHQWFowgHkDhwBikGVecKAllgDwKqK3uQggABDZhATqukZrAE2e5w8//DAiXr58+WC8DwCnTm2NRqOXX35lMBisrKxYa0ejUb/fr+oZIq6urn7q04//6v/6y3Vd/7W/9tfvueeuQX84no5EEZsRY4GsMhNrjUiysbFx7dqV2267OB6P3/rWr/y7f/dHfuiHfujeBx66euXKh//kE7dfvrS+vooEQMZaAQniq5o9aCiJBH0A05JpwwMFQmAOo9HosUcfv+OOy7Pah5pdklgiCJ51h4m1LwiiEBhEQoCzZ8+Gmi1SAAIRYHTOAGFVhOlkDCY7deasIKxtHZtNx+9670fCuz6ysYnDwTpLurM92R9Pw9gDwC0XLujRmE6nV65eLYoi5tC0vMXHDRRp2l9y0HI1mBVVWVUOxAtHSH0AIOE2zb/JOZpDQHew4USE28gRLeYGwc1F6dIlh0AYu3+akw6i6noBhAGQAYhJwBrFIeETmxvk7KwcC2GeZhH+83McTFcG6UFAoKiQHL7fPvDgfR/4wAem0xkRWKS27hCJbFMnqu0+tFtxp49NPHtdQdjVl6NCFPlOVxB2pwQ3MUAPv+XwmnbFavfOw4L88GYsLVxnC7uWq/4TnTPWzrv0JEmSJMn6+mqaJtevX9/e3o5+0a5NsDTH0IExaY2GOo4ZEQHmvLXr0F6aS+T7oNYriUHC1h0NAOyDBv/GBwc7O3u+LvM02VhfraqKQIxtgCOg6yMSQkAADxFNsdMH/mafAUB8UBxsEen3+0VZEaVVWYowB6o8l1VNRHXNRen3xuPJuCjKytehVgiCBpNStFkhAEhgBjVzF5D8uprBzTYx6ihEhDiXN8aa7e3tlZWVwWAAIorrKyIaTQheqtKLyMqgd+zYsenkuclkYq1N0xy48foqdwgy32UzByxDbusIQwcmDBHLslTgRlr01lLb+I957p5ihrKuvK80ua9JL7KEwta6IBwCE5vUOmetaPGinYOp6WBUkB8cHGh2dFQHAUATbfQ2hQmbzWb9FsEDEY1zgAgimrIOrc9fpbv+V+9sugKLRKVcpKmAJyJRCBxEEBBCCHFrCAwhIgRg9kYk9twCaDJK/yzxSw2dCGkrRqgbVWwymTz22GN33PWa0WiUZdlsNmMW59ze3t5gZZgkyfb29mAwmEwmWZ60VQNwz/333XHHnVtbW+94x48//fTTt9xyLrSI4oioLuhGX0EkZ+u6Hg4H4/GIiO65544f/8f/5D/9xu/821/8Nw899NDGxtYnP/nwzs7exYu3EKFwAG4gwwRA2IcgiGmzFEqZAMDCAiZJr7x85YN/9Ee33X7ndP8gSRIB8r42pMloonlACIBIWZbt7+wmDm69eBkRi9ksTWzpgzFiEyqKstfroUlmVX11e8cDHExnvvTHTx0H9FVV7Y1muzs7XuyxzZNnzh87ceIEs7927dr29rbCuGInGigiAqHJzQfQyglqsSIYhAj29/crX7tOJg2qkI515ND2i4zM9iZJpoelLx71/ZGXHAp0HhZ7WsjEINC2b1KUGGYOEFyWnjh7GhMLVZs+fOjtUa4dOYYuM0dsq5easu8j4p50fGv9Da974PqNvcR1geKabD0AaMEyAZDb7k9HrMhS7HZpUaLmHj8fOYHul5HhHvmnI2fe/VPXH760cIdleRxkd6i6Ggp3tbq6OhgMer2eYt1F4Pi9vb3PfvazL774ovI1aMEfonkUWgz9sJhlFpmOcnIA0vQcbAMh1AGmiGOLU4uezKZGJbBGf3W/lDNqaN8Ys7W11esN0rTpEdQs1HyROK6VLGqR0jHBpZMuFxPrmNkgGWPW19fzPA91qUabQhSxYM1hVtWTWVlWde2lrP3+wXQ0mU5nZRXmbScEoc0tYJDmydqpKXYsgJuc0sN0Im2OnqKrAwAK5Hlf66TVPRBhF421aZrqsHd2dp577oXBYGVjY200KTS4QERBvb4g6vaIC4WN5efIWi101h1R5cy1/Q1jdFbBEwCg22+ja9Tq2qoYCy2m1eGDpj3+uAUsi7Zv63tAadsiqS1bFIU2AtEMwa6aoiPRp81ms7IouK6BWaMJ44MDha3g1h0XY4GRhk0jF1oNW2P2jEHZDRARobGCBGTaxFGIe7SkXR0+nkddBEBoDIABISALQCAYQqhC4x6vy8ZiS9N0Oiv29/f7/b76n2NKh/ceUc9A+LIve+sv/uIvfvM3f/OHP/xRbNXrxkPQxJ48EYgEInAGj21u9rLs6iuv3H7nHa9cvfKGN77pn/3zf1HV4aHXvSHrDT/y0U+Mx1NAQ1mfyHoOujKCwOKFfSfHockaAparV69WNSdJIj5oo1hrLSC3eDUCLUBBv9+/cePGmz//TVub6+wrYKnrmkTEB+1LFli857LyQejB1z70/Auv1EIH09m4CNs7k+vb043jp9785i96/Rs+7/Tp07u7u4899sTOzs5sNmPmCCmvohdwIfwXqyQ0Z95amzpzY3e/9HUgEgSVGk2ZLItmWi1tH3bYNTN3EWcPn2g4JFNe5Xp1+tF1BoBu80QEQBZnrOeQ9nuYuv3JOIAYt4DhuiRfbj6CBXHTKtbLGWQAQMAEbAnkgQfvf+973xtCLRIEKNQMpCGHaB+1HEfgSA01HnvoyIwlSXPEWrS/PfzlkffIooLTffXShyNvjhYqHNppWUwN796GnVZF0NGqRGQ8Hk+nU83JirVARy5O5FnUPrljYs5LPBux3fnh4b3XxtwIdmltI2QgIoaay2KsbD1zybHNLfHK7LxxibGK0QFBQI0JEVF0uCPJV1p7PQpj37ZyasQn0LA/OL61+dmnnkNqHJ7CDIDeh8l4GkKoqwDIk8m00LZKQQAZ0DRhJPW1tLD7ul4CLIC1zAu6pKOOHHkS4tZga/bpZ0acTCabm5uIUpWlJkExs7UkQMH7ybhQC48AnnnmmbNnTo3H47JU1zGDLOB9RgpBRGZg9oKooBBqK8e8AQBQS1G7RENbCAsLxySawmAMEMFCyjcBtACTtkUVZmaQpuFxkKZ3QreWSQVwDOsSUZIkyl7j4iguSiTsJEmKolANwFpLSYJtFa8+MCZCd6mXmkIvABBjUPvew4JmGZAMwTxpHBo3YCPC25vbDRXqMJh4JDt6PwA2p9vo984lzIwhbGxsPPjg/ZpIZa2dTscEWPkaWvAyPacAkKfZ8ePHPvzhj/93//1fOnfuXAhhOpudPXv2R//e//z61z/0N/7G37hw4cLx48d3d3cBQFc+SRJ1irQq5sxa2thY29/fz/P83nvv/cEf/MH3vOc9P/ZjP/bgAw8eP378k598+MSxjePHt9I8y4mqalJVJTcwuAKBAT2gATKAiCSA8sQTjwPAwcFBXddC2HBaRABBIq6DMVrBEQb9/o0r8JVf+VZjaDYdGwuhqrVFROU18b4QSq5cvz4r6yzvi4gA7e1POISzZ07ff/+DKytr+3vjl1566bmXHgcAIqjr2hkbs3HrujYWDbXYLyoKEMigBqPbjumGSCoJ06LooQleQFvNg+YLA3SCuBR7qzMCYPOnDs/BjoH0Z4u6m19HSormFdyUlUSFHQVAoJLSg5w7d9KkySyUYJCI6rrWGu7IabtDPXKQOnzEiEbU6JrYeDeDroxIE1MjQH9sa+2+++/d3x0hSDWbmgUUi7lLmTkwL5S3LkysVYsiv8ZFO106JlRXqMTryAd27zliNY9IEJgHLLFjOHILoLj0HFkMdi6NJ97crafitjOBZqloJ5nIZLkT21COqdq08tPQmjhdVhjXpJlSZ7O7UieOFhGtm5dm6Ru1101ZltDud57nSZKsr62WZUEEWeIcGYOg+FAAgMCEiMAN9KNoWcSrORigY7voB9U5ksSur68Tga9q4JCkVg0jZh4X073d/Z29/b39yd7+qKqDViZwm2Gopqqwn092/lrqvjGuW3f7lsYZ3Q9dTUvvvHbt2sHBQWylYC0BACFmWba1tYWdFkA7OzsxXdlzUJ6OiEE4ejh0QyvvZ1U1m800CTyWA1VVVfsycO3b/vCm01NIJaIxRseLbR57pNtICKb1D3OnykgFTORW0Uusz1TC0wAEtggt0TMRCRixgUxSXGZoDVxEnM1m3KYWqh9bWnyPSAxxL7qeLYzoBIhEDq1BNIDq5DPz4C4RWWscUdvvQbWqRZ2yqxBT+z9o2TnNv9efWLO+tfnQQw9ev36118uYfRPHJZO6pCxmvTwHkcS5xLlnnnnhwx/++Ld8yzf8w3/4DweDoajL3fus3/uGb/hLf/AHf/DmN7/5ve/9w1XNhrWNWNJNrKpKJDhnADhJ7OrqkAiuXbt29913/97v/d5DDz30C//2FxDM617/Rs/46KOPv/TCy4GBbGYoSdIeM0vgwLWEIOKBgx5GP5s98cQTg37K3NRTGLOQNCqCwg27cM4gwKWL521mAIP3FRnWQhf2QmhmRfW+933gqc8+u7u7/0d/+IdJr3/9xvU77rjjq7/ma970BW8eDodPPvnkww9/4srVF7WGUz0odWhS9uq6zntpzN1zxioVGWNs4rr0ryFLBJjMylld1aGhikb68EK9SbOzrNKkbQfUYvxKm+oRWV/XP3eYI/2fu5rnY7eldhAIYMjm6bFTJ9lgDaylmzFTcvnni2K4+1cBBmzF0BzoqOuUBm0ppt9bX1f9fv7aB+7//d//+HA4rP0Bs0+zvCxLBS3RsxU5o+80so3PjRZJFN7dtx6Wst0JHFZVDg/6ZuoMdCpDlhZFWo07Lln3+66c1g+LezxXoCJGKLQuO24L6pm52ygm+vei1zE+U39SlqViJoQQTAcIsFlDY7CNoJtOg8W4CPonMg2/01dErjeZTHReFWKv18uyhIgcGSIY9LJBr58kCaCEECyxYoxIo1QBsycBJLJELPOG6vFIxIkbY9RpVntvrBWAEGoEBDD9fp5nGZGaTTFvxXrPAQNPdc0xhLm6Y9uG8FU1Q4J+v1eWpQTWxGACIIG607f4sOilQ5BhcacOU5QxpihKDmE4HK6srADw9rXrYJEZva96vZ7msWurhmef3RmPx9rKIknTmht5zAg2TUQkiEC7UxovjMwiSZIsT5QkuM0ZNh2YbmiDtXErEVEVuJj1cyRVKzVaawN7DqIMURXB0LbpVXqbzWaq+enY1Iyz1moTRmOc9yyCxjhhRWaG2awismSt1oIniYZROUmSEIJWtijojYJP6XvjUY1TA1AvIwAY0Z4CGshnQLKIGhBEIgfNqWy9drBcmHT4uEcHX2sgsp4PDj7Nsj//tV/3i//mnRfOnZ8cjICZBGpmAAkhVNVsMBjUdf3MM8990zf9xe/6ru988LX3r6ysAECSJFXprXXKIe+5/8F/8A/+0V133fNjP/ZjW1sbKysr6htoe2+LMQ2EPjOXVbG6NiyK4vr1q5cvXzLGfcd3fMe73/3uH/2f/t5r7r5zNj7x8J9+oqqqs2dO5YO1YjpF5Dp4ErZAImwNoTEgvLe/89hjnzl79uz+6IDZ63Fn9tLQj3qVBLQ2zAckuHjxFjCSpqYqq8BMloDBmaQsw2/95u8GTE6fX/3V/+1XyZjbb798/Pjxfp7Wdf3Uk59V/RKJfV0G8apeMHOofRS66ieLHovIjljbZDEHgipUBI3CuLu/d3x1RQjZBwnBGFPXtbM2tFkRqmIBgFpxXfbeFbpdsw3bMDnOBzA/FF1u0JURSwS0xPY1okRErNjaPgiAS5Kd6ejCrbdTlpQcRCRNElEsfbvQeHuJscAhGax3NngOiPqWufuQQ2iLVrQLJYVQC4eLl275/Dfd/9LL1/r93BijgYQlyafIglH36c68a2d0xWdX6OLiddMjdihKejPhfeRPoGPU3uztR65gN3bbfS91etdr8Wg0VlZXV/v9fnxO7MHQyrZ5AxlpISfVbmbmsqrqtteYNBHZmPZFh4fa3X4iIpx7MqM9qla4tVar+hwZ51zqjK9KJMgSCywCjVOxLot5HBfQGAMs2piv++potCGiQFNNX9f1aDSazWYiIj4453yoE2eGwz4g93uZtfPEHGbWcJT+zzgLhAySZClig1uSpmmWpc5YS+bUqVO9XpZaZ62tfRk6gC3yOVzQpnt0d18nEhHTiqKo67Lf7589ezZ1NlRlP0vrsshTl2UJIiZJUpYynU51sxR3BQCybD417sTCTVuAG6MVuilxETQPIMZTl44Joio/DUxptFl12dW0jWEOaU1e6Cgc0poLOtkQQp7nKysrmqusg+n1evFpatjppLR5M3daC1Pbj2s0GmHbJ7E72m4uoUQHnSJ8KWyhDxwCiCAYIAQ02n1hiSdAR/rqLh368GdepEkqZAyAfNEXf+Hf+Jt/7aMf+3iWp+pqUuVjOByKiPdVliXWwrd/+7d98Z/7cysrKyKi4jlJ06ZSmQgA1tfXv+t7v/ed73zn2trGJz7xp3qs1ItgbaKOBFUfB/2smI6sNSurg7IqdnZu3HffPf/hP/yHu++5+3//9V/P+oPP+/w3EyWffOTRa9d2iZrSCWMTEXFJgoZCXQHR7u7uh/74j5LEeu/REgCweIZARMwSQlDz1xjjnNnZ3T59eu3kqS2QAMhq8YdQG2Py3soLL72ys3Owubl1sD++fn37bW972z133Zk688ILz33mU49sb19XXIhpcVDV02pWSmBnbJ5maZpaR2SaFqUNMQvoeyNlAoC6EzlAEPbeM8DBaFRz0AwAIlKakMDMvgEeiRZtG9DU/WsweBftDVgUbHJI4i5dS4e9+6sjrwBzeUxENQdx2FtbqSRUoXbOaa5W1JVf5VFHDqM5+IvJz+p2buMpbRAntcYArq+uvOnz3khqMkIwBttWfsvMgjrZQNgmX+BNkpvi57imR8j1Rfl95D1yc+l7WKbehC/fVFc4vHbxUiE3mylccNDeq8qktPoIEV3bXFY6LpQl6ai8IDqiI6cGRDIOcSHHKq5DnKAseGysIkZFMdD1zfq69t5bS44aB/Xx41u33X754i3ne71MObjOJU1ytYcAoK7rspiFEFxioKMzxsXpcnm9X61DREwzV5ZFPSuIKM+zXpptbW1meYIN4g+IovW3h6+qZlrE7KuamdUoDCE4k4jIcDiEwI4MEa2trUAHeaM7nu5Wdj+0FHKEw0rNcR18XdevvPLKtVeubG1tnT17Ou9lIpLnaZ7nlgyhECECPP/880QUm9XbNBmNRiGEEOq4QTGgEFOKugsV5bEOvjvObm0uM2uSlBoiEB3pjWuWbKeT7pwwaH4A4xy7VDEcDtfW1qy1a2trk8lEIxR6T8zYUqqGaByEEOo6Ko5VVU2ns6IoFe5K1QiFW4lvnO8LIiKG2lOn0yIighCgAbJCyEgBMGgXkCXh+//f5eval+VwbeVv/+0f1lBIXZUgUhQFIuZ5tre3N5tMX3nxpY3VtXf8o3/07375nTvXbyASBDbG+lY5A0AQAGMB4Mu+4it+9ud//ju/8zs/8pFPhBCSJOEmfd0RWREMIRTFJE2dr8s8TQ3i5sbas888c9vlS7feevEbv/Gbvvu7v/uJJ5685eKtd73m3mefff7G9r6xibMpIrregBlAUUSEX37llb39SqH0vPeCTbkas3CAtiebCd6nSbK7e/DmN795OByABCIAQpukgK72XAcQpnvuvd/Z9MaNnc3Nzf29nU9/+pHHHn1kd+cGh1BMpzvb2wf7u76qEAKSGItJatPMpZmLZKl0lVinUHTR7SwATVZpCLHvggHY2d+rYhZLW5jOtW9CxYgAmmvV8UW/anhx6bqZCFhim68iKY54GoshQqJZKPvrq2vHt5iQmbWDuDKoI5+zJHG6z+wKEVkq5pw/rfXCh5p8XWslzOXLl974hvsO9vcVK7g7H0XxEhFoO+keFpOHBeGStFtapvjwpQ9HCvLu3I7805Ey+8i3LP01DnXpm/hBgyLq3HPObWxsnD59+syZM2o9hAbnYSE8pkaGhhBipBZaS0hakE6a9ywzEaEwTiSy7CVxuLSqUTjpf7Ms00emabqysnLq1KnTZ06dPX9ufWstgHgOCEbIkEtKH0IQPeTOOTQkMHf2dl+qGUChg4XCbRo2AGg1S5ZlzpnzZ8+urAymxRgFrIl9fvQ8EwApfoWmgAGAMaYs636/f+bUWQAoigJZNJ+8LAtr7cqg312EJd0oXksy+NDuN+FDIsqSVA0X9RO8/PLLly7e8toH7l9dGeZZWs6mgZtcp82NweOPP3pwcKBror2V0M4hMKlNyGp3qlGqpNW6WiPYdLWuJc1JFpMhoM02iIJtrrl0ph9nLdDNRp7DteoiKIyRNnxtOhY41zoDF+IL2FrSkVDVfI8+bSUS5nmcRVpfd9SQoO2JhETGWMQWWRWwVRZMR300cV/a63M0fzV9YfmS4K2lg93dD/7RH9x15+1lWeR5bgz1B/nm+npdlgd7u6PRyDnXH/Q++ME/+YVf+IXf/M3feO7pz4LBxhMugkTBe0AEEQ7Mge+48zU/+vf//r/5tz8/ns5u3Ng5fvykThY1kGRcah0JpNZUs2maORE+efL4ZDKazaYPPnj/z/7sz917772/9Vu/nfb6b/i8z+cgzz3/0mhc2KwfvBcBFhBEQPvMM89A60XTGXnvNSaCDXY/G6Sqqvr9vgg88OB9RFSMx6UPpQ+eyQsxuCzv5/3BI5/8zIf++KO/+qu/+ulPf/ozn/lUVRYh+Nm0GI32W79OAGRjsN/v53meusQZa5AMkiVjySTWxa6g2Gb5AUDlS+89BO6ewSxLdvf3yrqSGLFWodtaI0sGRowkdE9r95slUXKkaDiS4d/s53PqUbhKFoWQREQFqlw7viWprSGgIUvGIiHiUo0fHiV3l8YAi8yzM2yeUy8iSGD2IQSLiNooI0vSu+6680Mf+WQ/y0fTQmGuVR/EpkbWwKKE7848vnVpfIfF3uHvu99wJzIaZ9j91c0W9/AaSRtR6P6cFpF94m+pi3PU+VOaupjCurq6urq6ysyK5q9YQlo5pz/XEC8tQhZEY0g9kCq5lRtmWVYU5eKSGk1JCR2giS7XjmmuUe7qDXmeT6dTa0wIdWpdr9fr9Xp5LzPOTafj0Wg06OVIGAQFaFqU1lpC8kFCCKgRJqEQvJA2oiFUziQsLBwAUDl1A0ZtbSKCdR0yh3VdlqVHhNOnT549c2o2q7aLfWst4qxLGAwiLEQg7IXRGGcMeu9PnTgtEl586flemhVFked5VVWE6Cz1+pnsLWS5t6S4pOotqJ+iaPdzAlO6EgAvyD5UzjWvnkwmzz/z9P33P3jm1IkXX3z55ZeuTCaT3d3dUIfBoPfss1efe+6548e3wEjmMmau6zrP06h8wDzpgZjZGAvAKqJUirfjmSPZQluuo7IZEYi0ooZ1cxVgIXhFSmHEBpPctF2SFKJEeaIE9tJ0n9U4a2iLQwBgd3dXkwRv3LiR57mmN4cg1s7rK0yykNgclcK6rplF/a7SwZdGxCSZd0XUiUSvOzPXNSuAr4kdjdpdAkRAZdGaTsUi8rn7mufbDTwX0nNnNoYQfvmXfvEHfuDv3nnnhWG/H0IoAgtCVc3UykeCPEv3d/cuXzo7GR988k//lJn/wnC4sXXcWMshkLHGWl/X1jlq01nyvPdXv+3b77vvvp/6qZ/69//+1970ptcVkwkzE3EIAYHr5lCr35iZYTDo13X97LNPX758kYj+wl/4+re//X/8ru/6rnMXL013rz/+5JOXKMn7vSTPy3KWplkI/tEnnkSEoigr77Ms99yUTosPgQHVnYuMiFmWicDtt98+nRW+LrPEMZks75ezenRw8NGPv+93//O7/uSjn7i+vV/XtUttktrr16+BBBAiIWa2htJeL0kNAFuTtRreHBQIWlaJTYKCQYQQfFVVnj2yWGvZmIAizBCQiEbTalaVmGZxm4iIyHiNeREAg0hooL9uIi+6rD7+87Ck+G+6jvy56jQaUq5DhVly7MypUkIgICGuPTUlzssdgw4/c2nAHYDrprpkqf8TERBiaBpHBSsigCDALnF33nnn+XMnbuzsMiAain2UGgkBRoEaunI+joAOpY0cXoXuf28y+mVDvvunpTsPP6qrqoQOpGX3mZFxLA0P53H+uUlhjMmyTG24LMtUyI3HY0WqU4yFmGIAi67aJU1F+XKLaG+1dyE3aUq+u3rMqKhtcfBRP4gT0VMhoro7EkFRFNooxxg8duxYf9BDgl6vV1ZFgMFkOgXgPM0CiPhQliUD6tv7eWaMCUEAJU3zWeXjglAHZFgFiGZLImKSJF64qEpHxMCImCbJ3v44TZPTp0+maXrl2g1smxiKCGKDiQPiiUyQYAyGEBDgxIkTBwd77DldSatqFkKwxiDA+vr69evXj9SNmBdqE7sWUUtFoWNICTatc9FXDf5lVZVCxjk3noyee+6Ze++9v9cbnD17dndn/7HHHtvZvTEYDI4fLx5//PHXvvaBJElmxUzDYG3cF1Gdq82QuA0wB02GiiFD5nltld4cK3+IFvI7pFlhihNUItRE+sOKo/5Xt4M78JD62yUbWrPJmJnIxpuZWaPBAJAkSUxiIGOsiA+sDsn4fTTioTVr4rDjhxCC97V1Sed4UbtBCk1mpLEGFmA7P8drQfrOL0bEyeTAWtrczLfW165f357NZknqyuCbvlhk0iSdzWYbG2u9Xm9/fz+EcOP69YcffvhLvuyt0HaCAiDrUtUqal86Y421wvzAg6/9mZ/5F294wxt++Id/5O6770rzfDoOzjkjnJgksHCbKp8YCiFgatfWVqfTCbO8+c1v+pf/8mc+/vGP/8AP/MBb3vLnHnz95gvPPbM7Gp89ezZNeyy8vzf6k4987NLlC5NpQWSqqgoMaJp141AbmxmDLD7Lsv39XQA4efLk+tax7StX0mxA5F5++cYnPvHIe9/zX//Vz//C+tpwffOEMWZ3f89W5MOsKqcigoxZ2t/cXE+SxIsHCDZFX85PemunNr5T02BgcdT7Qwja/lajvERIQBCCMUYAyrpWkzESDLIASXygiGjVY3NQP7fNX5LKS9dhCQKvGrWNbe5IAABFQhX82taxleObu9UEEA2gr2tLBhGEFhSFVx9J+3aMLGg+UYkwf5rQpk6jAAA2c4mgKcqKLB7b2nzj573+P/zqb28eW5vOZtLABTd02ZVwR7346EU5rNEcOY0jn9z9U7Q5uo9aevvSuw7vxxLzWhpz/NK0Rb0a3yUiLZrc3t6ezWbOueFwOJvNRqORdBAPoC1O73rzou+lLEt1SnOb1NMCZbh4TxTbzA34X+hUnkjjM9QTgjpkbrrgoUI/aCKG+ldF5GBvP8tXfAivXLsKgfu9DKVxchZF6TlkWXZ8a3Njcz1PnDVYczP+OJK5GtFRU4jIJBGcXZIs98FXVTUYDDY2Nm5s76VpurayMpvNikLBvxo1QUQAhDnoINkLAu7t7W2srq2trpVlaQxZY9QFiAgKrhkRGZdUtI6C1T17S3QYITalLCtLxlhbVVVijLX2ueeee8sXPhR8vbt949SpMydPnpyenCLJY4/J+fNnv+Ebvn4yHu3u7ua9lExjGjLrjjRQ8lEgKl50fHsMTBhQ8jBRgsYUpyggAQCRYsBCSVf9DdHL59umIFGcSwc9Jv4q5s9r8QwAjMfjjY2NKK3j/sZjAqhi0cxhRlppFJ8Z/fZEFN+/9Gr94HQAElFsW6hn/Qyg6Lu6P4gInxsX/jMugbouE+c+//M//y+87eve8+73nj592hkbgqDYLMskNJpHlmVExOLzPB+N9vv9y88/99y1K1e2trYIHQAANonoAGDIAZKvS+tcOZttbm197/d+32233f6jP/qjTz752bvvvms0GkFg7WyRJUlZ19i2V2EGlxhAi2CeeOLxc+fOvfLKS29965f/xDt+/Du/8zvPnr+4u7v76OOPr62tnTt77vr29gc+8Eeve92DV65ctTaptK0nzFkBcKP79vPs2rVr999/5+233Snep3n+/PMvferTj//n33vv//Lvf31lZeM191wuptVoMi5nIc3SsizGk1Evd47McLiepf3gUVisTcAEDrUzCKZpe6XHxDMDoL5uNptphwZjjPbPDqWA98AiJMbYxBoH3lrEplWXepsNgkCrRELbXl2lOzY46hqVmPs7GUTgaOTneLrx5sHdRY38VYlFYQ+YDZECTW6dPOGFa1ZfDjWBIgRqAVxv9tj4ZecIdLVSIVIgv5YNtb9SNgIAtq5rm5gkSQSNQXfXXXcB/LaeUlFHRKO/iMYh0Mwd3PHUvcrIlsTk0s1LE+gyl6WJHbnWN3t7/L77zK5cPHxFG5TaosYmTwoEAKqqUsDxwWBgjNnZ2YkAvxF7wbctYpY0DH11nucq+fI8VyCtqqqGw+FoNOna0E2mgwB0gPUhIqYelRSgq6UHxloyxly9epUM3nfffZvrGzv7V+u6NgQbq2uJ086ptqz9dFbs7R5s71wnosuXLt57910bG2uz2ayf9cs2OzpG+AAAae5UMG1fHSIKwU+nU+MSAHYu3djYYHmCmdfXVxUcuywrzVrS3iiGUASIqCiKfj5YXV390z/90631jV6vd3BQZVmWOLezs33s2LHJZLKzsxOtrrgacU/jRhPNtTEtczpqicQ5B9xoM8wNYvMTTzzxlrd8qbqXy2I2HA5f//rXnzx5/NFHHx2PDu6//17FWrHWBgiqk7XO/2ZG1lpFjosh1a6ZyJ6jitbdQWOMMTYegShotQ93m7rFOjC9TXOhlwhMjVRp+9JoyVB8bL/f10TuLMtawazR3HZU1gIzEIEauEkCIuy9EnaM+EYyBgBjGpDkKKTjkRGRJLGmGybohA/m29GeUYhoBZ2DeOTx/DMvItrf27vvvnu/7/u+zzn37v/87n5/qOZ9XQdgqapKBJxrVkmdCvv7+8a4F1547tixY/qcKH2h5QnWpcI+zTJ1TX/113zN5cuX/9W/+lc/8zP/6vWvf9BUXjwgUVXPnHNVPbPWhRBSa2Z1pZ0ch8Ph9vaNPO+9/vWv++mf/n9/+tOPfu/3f/+DDz2wsrr6xBNPXL16/fnnnxeBsiyNcTE7BKTRyJMkKcuakAAgz/Nr1/mtb31rr9e7dvXKf3nX7737Xe9/33v/gGx+6dLttZdr118QoboSFjOZThH86urqyjBPrQOwCEZtiQC1IDvnjMQs2qZ7h6JFjMdjrfgwxqkHRcPPWZb5eqbtOIkocYlhQGQGmFVldMgBWWRwiGWjLrfQpIBCWlADACBIAgEIheVIVQzbQHL3myXuCjeXAq9yiYhBZBAgXFtbK31FzhKgQZNoIxMJgPIqT16yBKARN82fmr8iIHZNvgXkJYFgIc2ZKJSz1JkQZme3Vv/i17zlP/3O+/vDXkBb1N4YgxCCVDYxhAKQdIVrfHfEi+8OrnsPdmxWakGUutKx647rTlJax1pkxNym2/FcyZpbI5H3dX/eVdK7axdfpEzWtHaAJrs6Z7Is1/RUIlKAe0UqjyIzQlBFedxVJuIK6Ocsy7IsU6m2srJycHBQlgWiAHBde2MMAKoPM3Jz2+nniI03A7nFL20NkaYcs6x8YEiGyYmTJ4uyevr55zc3eqnLAKDXW7EuvXbt2gvPv7izs3ftxk4M6n/26WtVnb/2wfsHg/WZnwQOta8bdzrCwXjU7/eRUYWurm2eOF+DBE8m9d7P6jK1Vkj6GV44e/L5F69Yl2VZtncwMsZlmWlWjLD2lCRJXVVElKQWWbzD8WRPnfzMMC3KldUNsMnVG/s1GyLRAF70hx91DJqgrwg4l2uOiVKiOoJaFgNIElgAAIxlMi53z764//E/fezr//zXvfLKSxtrq+trfZRw1223bAzsI488stJzd955JxjrvW/6SSNECYcElqymExORSOu3TLK4g5X3FsBaK4JERlt46FZ67dlgE2bWfCWv2Jkiouts0BlLbUU1AGjb8DRNiSgI13VthH0dEJEDDYfDuq6zJPXeJ9YRkYIqKIRTbzgIIEDOZvlsNkvyXjWbQVkDQJI5Zfb1dJplGRrw3tvcQYm9vCleKoqizZ5rrHmV7vEUswRjSeqiKn02GABLqGv1b0uQMHeJN4VnaAx7r/sGAMLACDbNoVliVAUFQFrVVoPKKBJQ8dGaXQ8gIuzH4/HW1tZd97/2+/7WD545f/Hhhx9+5JFHELLEZkhgbeJ95atZkiSDPEPEUIUQxJpkPJ7t7B1sbh1jYSLT8hANVAMAACUAYKzxvrbW3n777f/4H7/j/vvv++t//TvvvP2W1dXV8Xhs0sR7n6R5XdfOpcycmAQEHBnvOU9T4HDtyivnL555/qVn/+e///a/+lf/6tve9hdec+edo9HosUc/S8bUHopZZa21lpi5yQMAI0BIhBhE6iAeCJjsr//mf/nZn/3ZD3zgD2+77dLayTN1Xd8YXauqCqYsUCOII0QDvWEPDbnUoqG6Lg0GoBIZDaO2o0UbEDUhIDCAZx5NJsruAgMLAggSIVFgnkyn/d6m6Rsoyr41Dg0R+IwSl5xLNw9u7MBFIbIWQkpmgn6KnsAxq5izQtxq840zDFhABAEMkgiD2sEdtkzzAOiC2IvXf6vozb0pLZTkhRBJdg8OLtx7B673ggkuSB9MZkyNUBEHsoyQhIW3RJa+JFY642myMSTUzB5QBAJwYAmk3BuBEAgFJCCABQDl+MyMCCsrK3fccUfyrvdnWbY3LoxmlyCkLmEFFqeF/jxxTF0pCIfEbXcC0Z24JJi7i4uH3NR46IKO/D5yG7pSVm5iPsaL5qkxGLEAiUgrN1QpjlW88fkLer2ItJCWh230uq5XVlaMMWVZ9vv9EMK1a9dUOy7LUpGJqNODdmlrD2tb0rlaNyYqawaAyWTinDt+/Hiop3Xte73elVeuPv/Cc+99z4dYYDiwvcFKVXn1hwPAH33wD4rp+E1veuOwD63ma5i5LAut+tVqnHYr570EvPdgyKGpva9rn+f5mTNn0KQ7O/tpmooPLkmjia9aWgjBOgciRVH0+/2tra26rouiadZrjPHCXlPbDDTy8qj4xZG7qY2+Y6V61N5a1RBjkZIO6dTJrT/95GeOH9/6ojd//mR0UNd1P0/zPD179mySJO993/s2NzcvXLh0MBkbi71eT3gOwykiHOZJnrogKi+jHmaMcU0eeAxJNB5sSyRtVBgIETBiPc416PaD4jodNkl1bdWovXbtWr/fD577/b5WCoUWJEtEFIvDBwEtdC4KdSyrEFVTu0GYkSbLV6dmrTXO2bqWxtynuvG1Bl1YPTWGTKslkzLW0ODHiffedAKBiELSNDCdzKbWWmcT1I4F3qO1gFjOSmO8a1pbzndQHf8M0qyd1la2XoTJZDJYW7t8+fK3fuu3vv71r//ABz7w2Sef+9SnH5mOpoNe7gzmw2HqbF1W/X7fGDy2sXkw2hsf7I8PDjY3Nwkb51Or1TWHjZlVZugYqqrKe71v+7Zve/DBB//e29/+u7/77je+8YHxeKxgOwpDhvMOVJRlSWjBRK9evTYajW+//fZf+qVf+uxnn/7mb/7mCxcvarK9LniSNJsFbUxKWzurOnVwcLC6svJbv/k7v/Irv7K2tvaa19w+mUy0E4zSs0F0LjMQjLXkjDUJQxiPGhivWmoQyVwGIKEO1oohQgTFGS0mxd7eHrZpntBCeEa+5L2fTcZrg6EYGu3uFNOynydefO0ZPKxaKOvZ5tp6OcKimJC1GbmqnPtjuM3mg8iZjzrLh5n//4VXUGZpgIgCSG/Q3zp2LEmSaT0BkQDc3IBNh7ju1RVPsCiVuwNurNtoCkOUUE31kbTeMotkjTGhqo1z7BWiIVy+fOn+++/58Mc+lfV7DGDA+FBpboghw0eZ/FEwdA2UrgnYvXlJtCyZsF2hvnQd/vJIOX14sbosW72FrWo7/94YBbcS7xlRrDWa5VuWRRwh80JuVFzu7l/NUVhgiDgcDlWqWWvH4/H+/n5ZlltbWzFeCwDRK8CLmTvdRY4SpTue6CgmoiRJNOx37Nix7e1tg4xodnf3n3zs8SeeeO748dV+v2+tDYLT6Ww6K4wxzH5vb+8DH3h/mrpbbz2l+oGqGnv7u8gyHA7TNIdWndIjhC0oBBFaa30I4itr3drqsK7D9es3EGUw7KOxo9FERADRJUkTGTVGAFTRBoCqmMXpA6gTtAXe6mzrEt0fSRtRBEbtBNuCvC7haUCWmUfjybmzJ9/9nt+/ce3a2772q4qiAPb9fj4crCZJ8ra3ve3DH/7w6ur6+tZmCCH4yraAzE12UuuMUe1HRJBE2pYPiGiJjcHWHI96J2vcWlrXDggIamLd/MjoW2yLOgnx5jaipqTi2/Y13nsOgsCRZYcWlBQRNVRpranKUpdIRWld12mWkSUlHjBAIcq8ubJrm1ytTDUJZo5olyqYjclFhGJHZEQiG0JA9siiHQViRanimTAzBxFqdk2xqIAZjEOegWnOQpR8AADIOvuuZ1KaPaXxZDJYXUXEc+cvnD17/vLl25966plnnnnqmaeefuzxz7zw7HMEYowJ7PNeNuwPDMLqYFjVszxNAAWEEWNcgKXtEk8UM2AIAJIk4cBkzL333vuv//W//uVf/uW3v/3vP/DA3SKh1+tpKDQ0/XHnaP4AAILGmIODgyeeeOLixVvf9a53jcbjb/iGb/jQhz504cKF7e3tNHVVVakaba2dTqcxASKEMJvNVldXtfPYieOn1EtczCa6uYiYJIlxhkFclgNAVdXFbGKdeuZEpBYRZ0xVjkMIzpg8zx2Y6XRaFMUtF2/NGV544cWslyNiXQVQ1K1WxdcplNPR5vmzxf7IrQy++lu+ae9gdzIZISKEUI9Ga2trla8rDoKAxpRVpW3+mm26ienyf+fFCCRgAEWg9NVwfW11a6MMAQU0GcIzc1t/JfBq8uimr+DW0FcKF1U4Wi9dixmALEJio+auVF6W1WAw+IIveNMff/RTvTzdPxgbl1hrDRnQJI42vtW9DsunyDKok70Z/xSlS/xSn7Mkb7qviDcvCfVX2cjun6ST1rt0RdYWBxZzd5lZPWbQxobjPTHb5dVHCx2hxW1F6Xg8ds4dO3ZsMBhoWR408cvGmokGVhQhkRFElhdPRdw75bPj8Xh9bWU2mz3yyCP9ft8i7e2Otm9cL4ri+PF1AHAutdZCCFVVXbp0qZoVL774okEBg08//Vkkf+zYseFwmKTJeHfv5ZeuTqfjixcvrqysEFmNtoo0TT9DCGTmjndrbc3MwSfODHq5s7S5uTmdFD5nY63CqwEh+Sbl0jpXVZX6GKLxF3wd6eSww/lzueKCx6U7fIpESAT17zu7e+fOnnziiSd+8Zeuf8s3/aXhubPXrt0QCZsba1manjhx4o/+6I/e8IY3BBANH2DbhAoAQEg6kQJuW1QphcRzCIsqQmRAKMIQQmh7plJQf3IMCZsWFaHbVFik6WyGHa9ACEE1pyzLFHMjSRKyJp5NFjDGpM7NZjMEk2cJABqkLEk1RUSYq7J0rkEOARZf1clKAkTSmkEhBOZ5JldouzW0klIfA+yZ2rYTyPPav852EIAQUa/Xo8YhSUgEIhwCobHWOusAKVRl8N5lGQhICE03QwAwijHdPNnmeaLNFhEBKHhvkuT8hQsnj5+44/ZLL7zwwue94bXPP/vco4995sXnX9jZ2Uldkjh7Y/v6Lecv+HI2m46BPbCWepjuwuKC2ifMTAhkDIcAAKfPnv2hH/qhu+666/u///vruj537hwA1XWtuCWIouea0DibVFUVUPJ+b3tnz7oXH3vssbd+xVdcvXr1qaeeGg6He3s7IqIK9GAw0EJH6fQUcc5573d399S/FfkGIiYuS9M0hACIs9nMzzCEoF6TWRGMicmDAglUsxJAOLWhCDQF7/1kUhRFoURLRFXZAG1Cx8uoqaPXt7fvufO2u++4/eyp071ePqtKz3WaphD8bH/kKn7m8Sc100IQEjLaLHLJMINFztx8AIAOz5SjPv+38gFYlBeISMaIsBUIBGVVnT22afK0qgqyZJEAkBlV8wJE4QUOHylhib3H7xEX3DzzBIi20SoRBU2/ahF5bSi9dY3b0xjjEisQLl645Y7bz734yvW8l1alBzR1qI0xwgEtLk0pjuDw/CP1dN22XYkCi2zxVdSNeP/SPXjUXsIim+vezLz88Ibvt+WM0MJaqVyJIpt5DsSvwibK1MNLETcJWyQKRcLSxnDGmOFwmOe5pjZ03GvzvNalOmBY5OBdDUY6ziJEHA6H3vurV6/2+33vPQQajUaps3XtN48dK4oChPL+cLq9XZbl5cuXP/yhD4YQkqRvDD766KN7B/sXLly49fIl9uHxxx+dzWaJM3VdhzbTuLt3zJ4oaVYM2VjkigF40MuPbW6MDsbTma+qKgm29t455zmwZ9Q6DQYiYhEW0cXsxvWpzSqHo9LdjyQP6Xibo7Yki+qaLKreIkKJTZD2Rwer6+tJkrzzne/8G//DXz935lRVzaaTGSCfOHFib+/x559/9uzZ85ODkSbvhMCtdBRjHbbmL6OkLlOhonHTcVUfJg91C0cfbxyzhIUGmsASkbb0vboyQbjLGrROXRgITc21iGRZpp18XJqIiNpkNk2IqCgKNXxdklRl4ZxDJIjIxmXZ7+cuy5DZOYuI3nubJMrr54tGWvdsotpKbfK/ALJ2UNYSKRbt8dHmdM5dZSJzrI/mHLVCXZEMQwimxRrzvtK3G5POiYEaDA3lbcPhsCzrela5LDPGQAhe2Do6febM1vradHrr+XNnz5w+9eyzT//xH/9xnqbGIAqMRwdDXP3Mpx4ZDofrJ06ICCKDlisDz9s0NRMHarQELyLGOg7BJunXvu1tt95220/+5E/+yq/8r6973f1BpCgKRc4KLC5JVUNKe3luXF3XJ0+erKqqLOuHHnro1KlTTzzxxOXLl9QhpMxB6xt9qDiApp4o8vbBwfhNb3pjURQPP/zJtbWV0HZD8d4XRWGMqYOvah8EjDGzskYAEJqWszRNtzY2xpOD8fhAhFdWB0TkfRUCaT+u0WhE1JSA6zeKW9A9SnVdG4Gv+9qvOn382PRgFELd6w8FmZkN5qtZVo2nWT/zdSk11mUFhOZQaFLJ5Ujd+mZi9lVk8JFyZOmGhXcRgg8WjIhQ6tZOHWNCAEAWtCgAAZtRalNrkPng4SgudJg7tZxfZxRIABANEQCjQFjEurCIyEEA1bqq0NgQ6pXV3hvf+LrHf+U/bh1fDyFw4Lr2iGiTVP0YS7ONMrW7yvplFCTUyc86zAeXfgWHBHPk/ksL0b0ZO5vNh+ov48O6j+2erk6kEKLsZPZdvql/jZHa6Ps9ckhdbUOjv5PJRFOgtTkrd+pZocWjhracqbtWcUbQke7xr2pB1nXdCHXgLMsmk4mI+JpIoK6LLEnX1ze8vwagvj2s6/r82XOfHgxefvnlJNkqisnFixe3dw8efvhTo9Hk2LFj48lsPDro9/Ms6xERGNKqEWxS4bz3vqql18saKYJIBhyhWBr0063NtSs3dpLElnVd1TPrUikbx7LaUmrVGWO49TFGhh5zdujPOmDd3QQA5lqTyzSIEEKjTbVm1oL6AgDMQJldHwx293ZEZGNj6zf+029+6Ze85bZbLwqCQbuxvn7bbbc99dRTGxsbsQ9090AagwpqjSjRXTxP1OpQhUHSqnpsE1yJEoPohWPuRpehcIu1wm2ontp0iqiriQhqwxRHIYQ0TYuiWF9fVwd4dPg754y1IYSyrPM8r+u6gfNtH26TxBhTVTMdKyBqbn9RFINOWxF1VCBqWlmD7BbPEREhGeg0ZQEARNEoI7ZZDtYZYOGACBi/j1YyAIAEIipmhXPOEKAl7oS6u0emqWhChMBZ1svzYjQardsErQVCiwhcAYpzbu34VuJsv59vbW0w84vPP59l2WDQCyGwhJdffvmxz3z6wTzPVjZBAFCCZ2NtlMGRCWjOFxmnCUNkTPDeGHP33fe84x3/+PM+7/O+//v/1okTx06fPr2/f6A/uXHjhlLa9es3BEAErCXv+W//7R92zt19970//uPv+JEf+X/dffdrnnvuOY2vg4izFDzoPmpMQf3bd95552g0evjhT6ZpqqWM3GaA1nVdBs57vdFo1MsyS64oitQlp06crMvi+WefRoRLt14ofVlVlTEogkROKaSq6iTBJEmKWaXdCZVsoidJ4Uvf8sWvG/ZzFFhbW6nrUoCLqkwHmQQGZxzRytrKaG+/1+uVxsymxWE2johgUKuBGZvk58On+1WE7s2O/+dyv8JhokDw1XBzLV8bFqH24lE7TSEIgpV4xJZfsfQZD6n42mmYmiwCERHtskoEzJrYzuJDpChrmr6hAIQchOsSrXPO3Xv3a245//tXrm67tEfWGONCUBlw0wAwHKUgRGkfKTh+v/SNXpG7dYXx4Sd374Gj1JAlKbj0EDxkV3Vt0K6K0BWi3UeFDkZxnODSq7mN1DJzbIcuIlpMHJ8cZU+M23GbOLO0zYdnF1eDmTU4ZK0Fgclkokc3hADGEODBwYGIrK+vv/DCC+pkruv6ypUrd9zxmt3dXSI7m1Xnzt0yWJs++uijz794ZTSaqA9mOFx1SabdAoAlJjEp/629d860yV+MwoRS11Xey1bXhnsHo2NbG8ZNKu+L2YyI+v2+TjyE4MmjgAhqZTAzizbrbNe5K42O3PfDatySt5OZAZYhWbr3z8rSOtdfGRpLk9EBg6TW/cZv/MZXfPmX3nXXnavDFZGwurKyubFx4/r1CxcvarpKdy/UceK9N8aqjzoCjka6ApaOMi0iAoG11Sgzm9bvEomhVT7mEFdKG5EC4wQJG7Eda9YBoKqqJEk0vgsAaZqmaRqgKZkTkTRNqcUVgVZeSgyseK8b5JxzLsXW8aM2lrpzYt2dtKAN1KQpgTHzWE/cINMCdQEwQdIcmcRi2WbyN60QbHsb+LomROecZgUze0SrxBd3vYEjbu3ywWAwns5QmzJhDBsbtAY828QNBivrm+V9991njKmqCoCstZPx1Fr7+ONPBoG77n3t2tqa9kkEEcAGClCnrMNmNeqRlGOLCCAK85kzZ77ru7/nrrvufsc73vGe97z/llvOrqysOOfOn7+gRbQnT54EY9dWVpn5q77qqy5evPjv/t2/A6Bv/MZvfOc73/npTz966tSJ6XQavE+SJp8rJjAi4sHBuKr8Bz/4x6dOnbpw4YI6ORR+PrqjyVBRFAYFhYOf3XLm5Hh88MqLT4cAf+dHfvCBhx545tmn3vve9z751BPT6bSXZeVM1FOtgj/PcxGcTKfdEyQiSduO8E/+5KM/8RM/9d//lW+99dKFYS8XYJc5Zg4QnHFZbzA+c+qlF14CARQgIuA6cjktwekydkSN5i/ZSP8XXEdKBEQM4g0ispR1dcuZU5LYQryIOGMQUBAEGuezMCPNveVdHrLEjrqfueWQIsI69/hX7iLLNu4627iSJHjvrUaMCMpZcfzY5lu++M2/9Cv/qd/H6axESkLgxGUssziZ7ji6E+6KohgrXbq5K3u6Pz8sRA9Ptftl1xxZenX3h9LiBiw9ZolBxAFEA7ebPhN9m9yp0I3DPqxMdOc1t/ZaZEpl5b4F0IkUj4di1bh4Hbk+0Z9Z13XimubEzrkTx49Pp9Mb164MV/qPP/7YF37hF25vb29vb/eGgwsXLr7vfe/f3NxcWVkrxhPn0jzv744K70OSJEVZTkb7RTG54/bbNLjIzMhRmEkbpmy839Y0xVFEgACpNWsrw/r06TowPvdSWVezslxZGTBDWZba303rmkJVa+kktnIIOhH3VyeApVWSNvIqHRcIAMRNPPwoY8zu7u6g39/cXN/a2prs741G4+PHT3z26WfPnz8fQsjTbHVteOzYsStXroxHo5XV9YgxGXNEdcvqto0BdsCWEVHtXcQYrxQEIyKWrOYbWGuZfQhojHFJqrSH7fFR8tAN9d4zCLTKHyJmaR7TApxzRVH0ej1m1ixolyaatywiLE1PzBDq1fVNRayOOOe6gGmaa884NIaC6ff7iDZw8MK5cXoirLVETU1E0tjNlU55Nps1+KWslcQVQGN0aI4zgBA5tAgszMEwWed8XYuIgCBpcqrUtZp0ipc7JwYRhUVUtsjzPCwEL0xg07w/mhQA2HBPACAS75kheF/54JI06/WPnzxV+fDMM8/MitJYns7Kzc3NoiieeOIJk/QvXrx48tQp1CIoEYAAYKKn11pSLVnHFFiQ7HQ6Mxb39vZCCPfef9/3fN/3fsEXvnllZUXjqa0iCFmWFWXlnJtMJte3byRJ8sILL7z97W//y3/5mzc3NwGgKAoOQauHdVvTrDebzYxx3gdr7WAwePzxx2ezmbV2f3+/1+up36sdm61DRYh5r7+xvloW02eefgYB3v53/tZXffVX3nLLLd772289uzJI//AP1x7+5Cf39vaybKgsyBiovdegW7/X087lIuKsjbE5EUmS7I8/8siHP/x3vvNv/pUv/ZI/d/z4FqIB4DRNpfIuTTa3tgaDfjGalMU0MUkpFZIQQIM62vp16BC3/L/n8sIWCQRqDhvHj4EzFESssdaiMUItBDMHZkYwQHNx22VHXU9MlHqI+H+Q9ufBtqXZXSC21jfs6Yx3fnO+l/lyqCErU1WVmkolqSQQIKAFkk2rhd2EgogmuoPoCLfbbv6wTYcbg7HpaLvpwCbARAsKJAYLEEKiEWhEqlJWlaSSqkqVVTm/8b47nXuGPX3ft5b/WHvvu++59xXl6B0ZL8/dZ59vf+Oa128RuVahEkT3AIAIFAIDnyvGALJkjYwDQrkCACCxtdYHuvvcnTQG4GCV9sQA4Cn0YbSx53PuByX1e6x6FVvX1AJuLXUdqe0+8HmlsyOmfQbfZ/lr/LvrT99sCw0b1tQLz+mrsJ0m2r1Ia414DnW5G3VfDsJeDcHuvWvD7JQM6ZLUiZNIY+qqiPR0uK5l7Mlc3DNHC3Hv2LkYi0IPRkCSqXyoh6Mkia65unr88P79B++//JEPffnLX148XKDR1trlcknOJ0ny0gsvekf37t0TXptEkRqNtrc3R6ORMWYwSEWa614NAGmaascAUNc1aRRNRc5tVfkoine3t04WK60x+HqQpUVVAgFwSGILAHXlOZzZFUUDhgvCU7d21PPldFOxtt+6f9sw1GYHiuVmbYsyszI2juPT+bws8zSO/sD3fWr/0YOvfuXLs9nJe++997GPfawq8uVyKbr7crkcjaeI2AWdydKotpKgcEG5KeZoRFStBAXQ+DW0aosiaM3cCHYhMCI24dMAZVk61+S+i0NOdFxpShCzsYHVrMU4KXw6TdPj4+M8z3d2dgJTt6tDa2aIogjIh0Dit5ZNGCdJE8WNjU0QEauqRpTcmCgAo2DTay0JvmJlaaQwAImyVlo556qqTNMMWq4AvRqMAm5AKKlEMgnoXUCjxc2IiNoqBkoH2Wq1iuO4LMs4jooiZ04ik0lAKEloS6NFKQpOW1TaGGPLokjSFBiJWSkkVGLkt1GCqIcM1iRKR1UdHj16NJ/P87IGdRrH8b0HjyqP9+/ff/nll+/cff4MbYacVOrk1uMjQmRVlovlMs9zgQo4PT09Pjmcz+dlWVZVtf/4YD6fLxaL1Wq1XOaLxaIoiuPZSVOWKoQ//Ef+yKMHD4fD7Cd+4u8BwNbWhgQkinOKCIzREjJC5LVuAjM3Njbee++9P/Nn/sxHPvKRn/mZn8mybLlc6hYlTQFOJ9O6Kt5/930m+Mv/5//qj/7gDzz/7B1APjw8VEpNh+nHXn050oqDf/vttw9PckaApu6eo8bKEhRi1O7kRmpEZkZt7O7eIPj6b/y///7Dh49/8I/+wPPPPRsnSQjBRNZTGE1H2XCwOp1nSeyqJpkKAFAxBR/CWRJgu3ZnDIWoJ1R9c9dFIiBXnx/16YlSigMUvrx5+5l0lC2DJ5BcLBRvVSBAYk1gUHMvELpPPS6SaDyvC3GbvqwYQJHwB+9C57LsSLoRQZuaadBNtiJTHNm9ne0/8oc+9dM/80sbm6Mqr6xNA0GfD3UE7htoJ6Gt0HeBEar+fbiMrfa5Y58DwQXOhBf00f5Xly7b2v2u5X4fmLkzufe5+1q3+1efWa413hFr3SKK9CO/+q/u9/PinU5uwJ4JIc/zKIrEFExtrqq1lsBbm4yHaV1WHKp3331bAT/33J39/YPVsqhDgEDGmMlkulwu33jjDReCVWo1X1zZveNcFUd6d3c7S2OtNaBAxQlOVlfXyCBi8ME5LxANjRYeRZX3xGAUTsbD6WhINEvS4cnxQkgJERurEBtUJmZu0a3PTW+nUCKeM2wSnWPV/fstvTibHACA8xjC3ZpWVRVFpigKawbehd/93S99zyc/MUizL/3eF//5v/iXd+7c2dnarF2ZZZm1drGYd/KHMaarT6d6gVRdV2VRjDGS44uI3fw0HUMAUOLkRdSN8AHQWD5bDds0ZWhNU5w4ibvawFKNtXtpFzKzubl5eHgYx3HgJnrLe58NBwDQRM/2pAdpvyN8RKAUozFU18vlUmubJEm3Rot8tRVFzMwEcRy3cdHUpRS7du+hVkkSe2+qqoriyFd1t8M79R0RQWsi9kxKKl0Gb4wxOgKlAEqlFCMwQu29MdbaCJXmZgUVaGgsmMRKaUQVvFdKeU8AipiUUsQKlRKvs/eeIWgTRZmKar+zd3VVVKTsZhRFUTQcjD788qsysUXt3n//fWb23hdlKXWl8jw/PT0Vhjqfz2ez2Wq1EhZ+3F7OBQDQGkNgqaoiNAwRBYTHeSc48Gma/quf+/nxeGytvn371nw+F8nVGMMAqJSx1hojVcNbasmSByUC9zPPPHPt2rVr166dnp7OZrPlcrlarTam4/3H+5GBv/Tf/B++/1Pf9fyzzwyz2NWlc1WaqBACU9iaTD7yoQ+yD1kaf/lr7x4dnuR5jtikoXvvEY3ULxeC15IdBQCesKi8RrW7t/G7v/eV995778d+7Ee/49u/NYnTQN7XfjqZ7l29ks/mZR3AB09n+QgAIPQMEeE8xYbzIvU3uL6ZZ+A8Ie3/hIgIVV5XH7h1w6ZJWc5BQwpnqc+yKxGbOLw1Mv60bnQ/BwANyNAv/QvM1PqfAnMgBFbIzAHYQOPDuKBsebexOX35wx/8Zz/zS0lky9rVdaklX/6CSRkucI7uq04R4ctcdGszCz3fcJ+S9leoz+rwvFq8xp6/mfXoM91Lx9U1uPZwR9+7/vTb6a/Hxfb7H9bavLTDa7PUn5BO0JOa4d0kqBaWazBKr17bI1eXFoGmCum99945ONjPhqMoNsqpuq6dq9/6+terqhqNRkkaVVXlwRvFHsLW1vZ0MopjiwoEq6Z5vVJKGUQdSOJCtfdn8WhKqdqFg4MDQJMNRtvbWwAwmg0Wi0WxKpVSFASSOuRUkKdW1WA6cwGfyVuqB+TSelDOYvrgPBu+eGB63zbJAP1NYq12zg3SbLFYTG/ePDw++s3Pfe77vve7lYJ/8wv/+nOf+9wnPvGJ0SATypgkafd2bCv1AkAITmakKw69ZlmRdelUYWY+y9NnDsymp7h3hRPwfLIvETGCsGFjDCMobBg/t5o3AOR5LlYQ7/1kYyowL1rroii0tkkSlWUZPHW8sxEIxNTTFkmMQcnonPOdhaYpQBK8TH2XCNAtOjOjanqbDodVVVVVLUMwcSRp04yKUTWItoi+7rlmsKlXCIhAxKg9gatDHKVEZJLIxgmhblzAzAwBUDzAYAVEk8DYWBKU69ojYu2rqqqCb2CKfe3yfCXZB+lo/NyLH5CJLcsyz8vZYnlyclKW5cnJyePHj4+Ojmanp48ePXrw4MGTJ4e9/SYdlFEDERijsiyT6ssdFQ1tMr0A6ctbBOmJiJBhe3tbKcjzXBR91cKeE5HUTOunWQtH1Brkz7/9t/82ANy8efP4+Hhra2s0GglW7ttfe+PP/6d/7j/7T/+TF+7eRsWrxVGeL8URwYG9FygC2Nvd+ehHXy3L0kSjN9544969e6uyMDpKIlsCKCJkHS4LAEYw3rsqhDSyeVlHUfR/+j/+pf/df/mff++nvjsbpjqOV1V57dbNd974epYlhXNVUEpdwoPPLoVAvXN62Vn+Bhz3m2HG0DNbamSCoAfJaHerDI6IrNaglWQVIANKgj5yVyviUlbSJ0p9nqiaP5uzrZRgOcuq+j5rYOYQ2FwYgAJoYrRcWe3sbH3bay995nNfHU4yTRRFUfDUNylf5GSXDn6N5z2NW3ftdNz3rFuXVYa62OYaG1trn1sLWyfZiV+q12TzVd/T2h9gx+H6RLzPRNempX+ze6zT6vrIRxd/uDaH/ceoh0CEiFpr0cYkpiayWiyiiFhXVVGsdrY2ktjeeeZWmRcHBwePHz+uq5IIlNHO18hw7fqVJIkUYCDHzIPBIE3iyKo7t2+Nx0NjtQYGRKUghNYaLIS1+Z9RSraUY0Zj48OjJ2+//XYSD67fvOEqF1t188bV4+N4tRTEKw0AtSdmJr/qT+/FHdL/jJdZXPorvrb6XYOtxtystSwaIgQi4ZraRG+++faHPvDC4eHxG298/WMf/bg15ld+8Rd3d3c//tFvEXdmX+QSdTNJEkT2nuM4lrxoaKu6qvMwosxMCEohY7fQCqW4D5+NXcICxPCYJDEzS6KaDFlb4713ztm4sXawbwzX3Y6S/FFBrBSftLWWuSmBrBSIgRM7/xEhIgZHznltsDOhAyhr4yTRiCiuCuHZRVEMh0MmZkaBxhQLecNHkeM4Pjo6mkwmSZLWtQshACitldSfVsoAgOB1EHVmD/SBFAUAIFSRUaiM1RD8fO7zyWTiibVWtQ+rYgENdHZT/M85J8MXbhdCKMv66OhESjjXvhL7sHOuWOUnJycns6OiKJbzxeHh4SovZ7PZ1772tXv37jUMFRsDhNIqBGIGrXEymVy7dkW1ZUaFmjIzMNfOdRHvq9UqnMfubz1TZ3AcWmupL1mWpZRO6fguAFRVJZK0c8F70toKjnrfXhiC07rRpw8PD6Moun///nA4tNZubGwE4vFkdO/ePaVpe3MyGqbAMblSawuxIjAAwOSYcWd7+2Mf+5btvRub00kS2a+/9WZZ1qxBI2jbhNRRAyNxxjZQ6xBClsVVVbhVzSHcuHH9L/+V/55Rfdd3fyKObRZH8TCbbGzk4aisK7GQnR1GhUgAELj5cIFin6elTyOGa99efJjP63jdA5Gxh4vZjbvPqizK69wojQDGGPTE2KiqJLWSWFxU64J+n0+tUZj2LYQAzEExKK2AiIlCaLyN3WNEEEIwUriNpSFGRGBWQB4VEvnNyfSTn/zkZz731Tixnn0datXjfH2i+bSr78Dnngp78cmuzW69+9OnzsclrQ3+Ihvr+wv7z3fNXrqE8lVHZy8+Br3lXxtIXyZ4mhywxra7wa5tlP4rLvaTWmygjtAL0cTW3ymBuFJ8SRKTlFI7O9vPXL+WZVmZF/fv33/w6PH+/r7RVoCxJpNJlqYhePJ1lmVpmi6X8zi2167sjAYZcxADqYgjItoLNVGgEVEr3QwFgSTWQJvIxnVdv/nmm9Pp9ObNm9qYUFe7O1tFURBBXXnnlrE1IUkAoKrPDCRrU/q0dezmufv34jxf+vPuGURUqBBRGN50On708PHmxvTNN98cZukrr7wyHA5+/dd+5caNa9evXlssTweDgcg93a4Wa39XjSOEEvGMpHZ8rr9hemu9rrsHJgXMTQJ6Y1WWAFpuAyagocKNP6ku6gZeP4jxU4cQkiQRdouuZsY4TqUnaZoyc1eVCwAkHAwRdRyHEFztg2Jhw9BWekDERqBra1EQkYD0ioW8Q/IxxhR1iai0NgAS3G6NiZRSRkeYmG6LCqBeoGCUrqrKBUZUJLhYAZiUC2UIYZnndV2v8gartSiKukZmZghE5GsnuTFVVZVlnuel9342my0Wi/l8nuf5YrV88uTJ48ePHzx4MJ/POZzRBIlxli1jjJlONwejoVKKg4QjNEVXvfeVq30I8+WCiKSF7uTKBuW2JBr3DGOdJY9bYB9s0hobF2AURVVVZFkmz6RpKskR3tN0Oha/cghB6cbq1G370KDEN1AbzCy5Z2Ih39nb+qt/9a/9lb/y1z7w0jN//I//0Y98+EPP3bn9/AvPxTaysQ2Ur1YrBk1M3vvd3d1V4e7cfma5WlR1+fDR47KsGYIGCxrEHEVAjZLCwMyuKpIsXa1WUWQ0mKp0C15tbY3+m//L/+PPL+bf/we/D3GURNH2ld0vv39/srF1slisycfiVe0Oq7Cc7gjgeVXtUpfwN+DN/95LARRluffMDbJaswmhlsoQjMDEKLgECKHN1Vxj5Gt9WLuPiJ21GQBQNYr0GutlZgIOxIHIaMTABKCaiP5GGNdEIYqisqyfuXXj1Y88+/tvvWfjbFVWSVu7quMcl3al36e1zxfJ5cWx9QkW966unYvN9j9wz/S99nDfd8gXFl74WZ/ZX3z+4nr027l0+H3e3MW1ivDeRV9/42npsxzoMfiOLIoSIIccOAgP1lovl8skSbyjjckkjuOtjU3c4itXd+8cHRVl/eTJkzzPt7a2NiZTQNJagwuPHj/Y3BhZw1mWDdKEqYbzs91eWmsjCKaBKATWViU2rrFelVWWZbu7uw8fPn733XdfffXVmzev13UdWX316s3Hj/YPDg4eHxwqQGEty7yQF1wcfEfcu6vTJLpJOL8cqr1/Dvr7aXOLbfxwHMf5ajHKBnmeZ0n06NF+lmV37z7PPvze73351o2bsmSmyTjSLSJHQEQBn2qc7gSqrYIgLXcm6G4zMDOiBpDD3UQFMgJ5dr5q1aammrK1NssysSFLkXBrbeBz5X6FVcuOCi2MzGKxmG5uiEAg1pFsMCAiFQJqTSEEAkBghMAEdaWt0VbUUwVKeVdVde0qPxg2bNtGUVmWou9qra3WnQeks8lLBYUoSrwnpTSw8j74UCrlEVEy5eq6DswiKwQEBp2lqVIqACql8qKYna7KsiyKoqhKwTpeLpfkw8HBQVm50/aazWbHRwcPHz7c399v2VJjkxfDLwAAkrF2OBxev3pN5FTBtmvjH4GIyrparJbz5cIYY3VTxlFbwxyE8UstKaUUABJ7ELaAgABa2y6hH3uuH0kDa+HNrRzzDiIUAKqqEOwqbuLdKq31arXa2bvy4MEDY+1oNCryCtGhBoYASIhnaGsAIK4Kib9rQK/K8vj4aGdvO03Tt969/3/7v/8NpYAJ/vSf/uHv+a5Pbm5On7/73ObmFFQgBlYIgHs7O8zsXIUMg8Hg/oOHp6endQhAyEyd06k7L1kaVVUZx1Zr7arSAC7mq/FkeOP61v/zr/+dJydHn/ruT96+enW0sRENUiodItJ5kt4Sk8YfxP1EnW+CufYf+AYPr1H47qrLarq1OdzZLIIjBA2YxEntPQAAEQdCEJkckYSIXA6u3KfD5851+0ExSBy+GJ87507DfUMQDdF0whUwsELFoEAzBgXIITDzeDx8+eWXf+tLbw8nUQBGfza8/iysccGL07TGw/gpOmLH8Nae/2aWRH7Vb6HPLPtTtsbR+5P7NF67tpaXauTdk33GgBe4F7SKLJ1PZ7p0b1282cUWCWnoOLoQDuHI3TNaRavV6nQ2q/Z2t7e346ThB6L43nn2mbIsFbBSCsQJmFf7j9has7ExGQ6HcSIJQqyUkoKd2MbvAIjr7owNWIyiKCJUEaExkXnGbm3tXLt27bnn7kTGUvBXr+yFYDcm0+tXr73z3rvv3X8wO5kj4ngwnK9yZj4nbYmS1FP7Ot1ibeGesmoSDb5egWNt42Hr8pAkV++9QlBoZrPZ8fHxcJB+8IMf/L0vffFrb3797nN3uq0luT1EJHUpZPIlLMt76tRTpRQo027DPoLEGd4LtVWnkBUid/UEoU0t894fHBxYa5MkSbKUBKyUG9dvNGzgtQXrH1tHMiJmWZamA/FSO+dG00nwXhtTtVjQIVC3c4wxyhhgDt4zMxApZbJseJwfMaEIE+LgODsCWmtthbhII4gIoNLBsKqORVQ4OnowGk02phsIioiUtYCgTJNIXRRFkVfz+byu69lifnR4slwuH+0/efTo0cHh4f7+/qNHj/b39+/fvw/MLfqjAgBtDJFnImvteDze3d3t0qCFDIb2ZAVfi9F+Npv1HFskZnlZelA4GAwAQHy02naxFDrLmmJ8TeycRQNRc3IZUKEPQXiqSD/cuh4kMbddkYbcmSbYmIkaDt3hq0RRdHI6u3v37te//tb/5r/4L+69/+Cf/ON/vHflSp6fdPQKFPXy5NkY7T0JtJm4BrIsi2JcFflitZpsbGTJgAJ47//BP/hnn/77P51E+tu//bXv/e7vev6FZ+/cvrG3t6O13tiYpGmssZFB5ZQdHZ14RSogo/jYz5w4vi4QgAIxMDJorXWSOOdKD8/evf7pf/DP7z948J/8x//xVpxON7cevv3ORYJ28eof+TVmjOcf+2Zae9orZBR1Xd964QUVm9yXwftBFEXGOgqBSQFoRGBUqAICK0Q6Z+7t892n9aQlhl5L0Bo01RfWNWAiZkStDCIjNJ6xM26DSIEAII5jD/Dyyx969jc/93D/UNuoi2qG7iffkDvShXjdi8/0mRD10O37nLWL3lob9kXKi60ZeU02ObfMF9A2Lo6i/+Raz5826ouCRX8Und++E4Ww9cP1Xfpnm+8pr+hrVF0kqjj8qClHIzUSGr1nOBxKesNqVUzGQ2YWvN8QQprG4/GQSaJtVaidM/FHP/ZqVVVFsRoPM3FTSYQReU9MwI1bqzFFthEWDR8CNsZkma6dH47GOwGuX786GAwC+WvTK97XAWIUdIXIbm5uv/XOe298/c2qrEU3opZGUi+Xt7+yHa1fm5O1z72lR4AzUKqzRUQpQIpMyhjjqtpYZW3Egaqqevz48YsvPf/gwQOjMY6jV1999fd//8ve0XgyzItKcmclU1Pqy3rvJU+3c3zIt8wcQggApgl9l1JIDXCHJ9Kd2AdKaQUARmsJEdJaG6NlWh49evTkyZO7d+/euHWzOYBSAMN7qyy1xQr7uexC+uPhsJjPBRYRlAp1rbQWdtJNaSN2tDjMWmvQGgAwBGXMeDxOkgSs8mUp+NLSt7WZ71KiDSpf1wi6qqqf//mf//Vf/8wP/y9+5FmG2Wx2cnJyMl+cnJzs7+8fHR1JLMLXv/rGW2+9VTsPqIiprUDTCCjG2slkcuXKNUmRstYSQqidTGxdl8wMzPP50vtazOPWxgJZH5oaTdCtfqd9dp2XDaxMkyI4HA65BTYXriaDEg2YyMv2aTiul/Bv2xHWnmPiDFxWtnOfiBujAFRnQAYA6Xmappub2xS+9qM/+qNvfPXrP/3TPz0ajcryFFtbCPTST7GBFjBy9DroFcLaGIOgFvPVaulCCFrZja290WiQL+ef+eznfuVXP6s1vPTC9e/91He98sorH3zuhStXrt28eTMbjofDoYlsNhhG0XsPHu8DQ2tlV93xsVYj6tp74hBFkWAaI6tA7uR0dvXGxq/82udV8P/hD/4xE9koTny17NO0iwStI3pr9PbiA5d+ftqFT1NsAk03Nz0FZTSFYLV2VWWsCa5mhQoQQZNCAhCeKdGCfX7ErRbU50e9lwIzIzGohl51Q6MuykDGi6hQmQAMyAhKkkzEN8MIqC0hcCBlo52t7e/42Lf81D/6l+PtaE7OGFt7p5X1xErCOxUAsWoyCKUzihmlaknHXfi8agjn2We3ifvjubhC/elYY8PQ8vvOldgRcdUCaKjzMP391eoYW58x99+4xiPXeH/3rjUe0G+//VMjNnZyWei19vtDxvMB4aDQBS8odEqDwPkao4FJISmNRN4jaK0ZFIOq6xBqr0aD2WLpgg9MHHxsFQIoraxWSjI7LYfg0OhskJQlMLMyBrXRyiIgEzMoHxw3VebEJcxEBDZRSqlaExGamKhJ/bRGR9boRGeDOLKxhJvZOKnqcjg0UZzGiR5kMSIbq37/K28wKqNtUVTz1VICymRCLDIze+oMjFDXXik2xrjaAwCaLoVAUj76yKAq8LlQgHaSFYIAUwEAkQ8CH1Z7p5RCwKWrf/fLX/3Ed35HAP3G174+/Ogrt27d8FxXVT4YDBExBAfAiNzhqAgbq6oqihKxEJRlDQDgJZdGEzU2A5TCK61HsKHCTK52zEzBJ0lMFFSLkzUaDa5du/LgwQMAAGKrDSrFBFmcLd1SqloggtgzJSs0MI0mU+fc44cPppNNgiodjQEtsascg4qUipxziFqryNUUpyNgBoVF7lBBliXMTEorVEnMwABBGZ0cL+c7e7tK1wDknNMG0GiSqGylfXAKwTlnFCqgxWLxT/7JT//9n/zHP/H3f/Lx/sHF8w4ACJAN0snW5mg0iW3UN9OJfVWsCVW9you5qNrqfBx7Q2sURJEBEN7mrUWCYNp6vswMAtfZGj+74y9F3lozhiLyiskqAGAKXnA+EQgBQ/Ai6QIiB+qEZg8OlFgQEYCJSZJ3uCm8RkTBKCXB4QDAOhBTZGMissYIpAYCMIvR0QPDaLhx585dJqNVhmDFEqxVjKCMafxW3JaGRMVKgY2kdjJ6b4CAOACrECpQSEh57fOjJSIONrZ2ssx7/+b92Rf/X/8Q4B8+c3X3+77v+z7ykQ/fvn07Hk1v3H7WpqlNYpvE+/v78/lclDRXByDSiAEVUQCFBgwzMwIYxUppshg4zMrndnd/41d+Bwv4tte+NdvcVYVXHpRRWHsgx4EYNaJ26Eix4EYhMirWAEjAfchY4rPPPdrbp5NegWLQBIoBAAIwAQcx5oZgQWmliDkQsUJQuLg1jG9uOleRq6MoUSaSmpnIipgrBERutH4FBGBBEqfaTcvY4EmS76IfJFBOUGYDOQDQBoU8ivXFe6kfr5mZPDEphVZS/wxc5nhDRGIKPkRxUrlqMBi98MILSfIvi3IFoHtKalNLuAPfWmMhcJ6J9lkLnGfDF2WfNeZKF4Kq+lLJ2k/gvITSl0EuXchvIFKtyQrd82tDgAscd+1mx0GVauRfUeO6ca1179KRIjYxtC27FkxmCgGNsWLzCCEQSHk4pbU2gFEUjQbD4TDTGkMI2JMwOsaPnW7d5bOeH533XinjnAuBnPOy/xAx1E4lcZqmnTdLrGrYhiYhKHlSLKVFWdR1LcCDxpitrS3nwuxk/vDhY9BCSSPdVhMKIYgPRqMy1kRJLLZiKdlmTBPW76UOT8+I0hA7PjP7nJeWzlZf2GczUqQzIk30la985fs/9T06jd59993bt27keb61Me3mTbUVkUMIeZ4DgFhBJWhZwpe01kR16yNQPa2IiCg0II5n0XPdthS/vkRLybfb29ubm5vyxjiOFXYkWBtj6rpUUmLB2s4N6b2PbCKBeHVdpyYyxrgQsixjoizLvPdKIXCLuQggvZYBamgSBmZHR+ONqbJ2OBxSkxAMopqfIVkSs2fpW+3dcDh89/37RVEoBMlYFVNNJ+N6XzvngvdlVRHR8fGhRIlCSw10W3WxObrYZH9p7KTYdWm1L8XqLnlBr2kqomJ0+yEwn5mUiAi4lYb1WZhWR506OV7aUkqpNpJOrMMImrip7MsctDYS2NUtrkxX7Vzc2hKY2VpbVX44HEqdtNFgGMJZ/Wxo2LkSQUEkti4eG0ErdaZgyLfcxrfLpwAhiiLvgyQxa63jOJ5MJmmazg/2f/Inf+rvfRom4+SVV155/oXntNZRlEynm8yotV0sFlXpENGYqF0IqeXJ3cS2xjDQ1i7z1c1ru599/Xd87V776McmsQleedfOOSJw6ErMra9gTxmTPzu61z2z9hMJEYNergoCqMbaocE1ZBYRozg+mB3vvfisMtrnVScBO+ccneEXXezV+feueyrP/jxvqcaeitj+udYgAogUA2dBNn2WprX2wUEgCu7ZZ29/13d9xy/+4mdG42SZF50zo+OOqvOoI8J6JeOzxi+9Dxcm+uIUPI3/Pa3NbiCqi6e/zPn3TV5rDz9tQzxN/sCeZ04pxYyd/fbSpi7ORvdnf0V1AxXcgPrWdS2/TdM0Gw4UGqWUorDKl8ZcvbZ3ZZBmGlWUWGwkPFQdTBM0RrPAQax16rwZn6A5coLjo7VWChotzTX1KkKL+y8c3DRhOrrjqUQ0GGbL5TJfFcwcRVEUuygyV67uLhaLynljjK4q9k6ECaUUMrcRuEYOjEqboJg2BzfoFnvZe0ZtOsxtvOC5WJOQsHcBAMpytFvr9PT0l3/5l//A933Pzs7ebDbb2BznZTEcRNDaGGV00LO7IAoCQ2P87zfeX8T+snIb96faGn/dmsoAJas1TVMxDIgIolVzAK3VQs6E6IczdMkzf7pSarFYpNlQa62MUVoXy2XzldahjUCBVgrUHM6cbwiny8V4YwpEjKCjSFRebN0B2Hp8mrA4rY2UvdX6X/78z12/fjUASIwun/OEEQAoRO45wpVWGs+KIzXrhKwUMjZiAVyoZnbx9PUnnHyjUjfSFTayZrPKIBoPISuUv7QC0ZFb9UsabNTfrn2tgm8r/lJHRDW3YGfYOMWDUiqIoigtKA2smM/BpzTCq2qyqpI08seeKIgY14RDK2DGxs5vjGqTp9vN5mS3KIEyBtZaBUVMioiQ2VU1IioADt6RD8HVdXl6epJqPZlOh8Ph7PTkV//db/7Kr/2mMfDMMze2t7cBIE4y58mHvBGCmCVZG0A4EZ8xewSlDHMgorKuru5tfP53vnzr1q2N7S1ltFLgkRCZVSOoocKWvypxEjXHAdYY3hklv4T7nv0L53gwCsqjwpYFBKLARES7V/ZYcM7jSGstN885JrqGLrwLoNFWO5Txbq9qVIy9ACZog+R7xAfa895ub8SOAUMDd9JuLxIVSndeH6XMax/7ll/4hc9orTVgCEFpJQIXEYmlBkC1MS9dm2fOgz4rujib3VcX7198oBMx+g1enLI1Ji2UqJ+o193vmrqUWF/sw7/32343utXtXESi/vZf1M3S0xrpOtZ4C5iZWasmBFqooTCDEILYtbKhydJsEEfbW5vXrl9J03QwGCRJhIK4xISSoidjb3Phuz7oHkSXlH/vMIC01hsbG4hojNHaOudC7YTuy9PCHkKLuyZOyibpGRMKTIHLskZjtje3Vsv80aP9yWTy5MkTa5p4lkirEILRWqGyVlsrgMYS9AQatfektQJ1Xn0BoHNavYLzoNDt9K6nmbXPM7dxH1prhXB0dPS5z33h+7/ve3b3Nsm7th5Jc3WksGNC1Kuo4b3USBdQaAbw7YxppZQnUj0VvOtb15T8SW1lgp2dHURswnYQBbqkrmuiQtArnXNZlq1WK7FAiEA2HW2enp4i4ubmJpNHE8mbJNuYmUGCuqMIGABRk2qgMwF88FpraAtcRpGVGV6tVt7XWZbFcWw0FmEVQtDGGm1BLLYE2pg8z6uKoig6Xa6q6kyTa4epQPKgFSMq4C75mJG5cZMBnNmQWZAlAfnfE/y4RvKU7vTYcz8MLYh3QxOhSSLwrtOkJUtbKCbLqdW9RHDhoMzouwTfNgW8l0sNSoOxtjtQWlmBC10tl5KdPBqNOnmOiF599VUASNNUKR2CYw6IrHWjrnUBXzIECbdWSjEbhcZ5h+ps7BoVaACRffmsThc3lnUG5hqInM+rGpEnG2OllHP18Wx+crqQsCytjY0T5xwRMCCFmhEVIzO3iqYiIGb23idxWnOOiJ7Aajg9PQ0bUwIQXSEwQ2fkOMfnlFRMYXXOHntxNftrfenq9/8M3mtAozUi1hTyIt+7ckVmGxR21BIRpVDYRXjqM1batY+MTaZ4Qz365fIApU5oQ4T6XZRx9Ck5AGEDON60fi6vg8iH4ACQgk9sFACfvX3nez75yq/++henk/EiX4FIW09nmULcFT5VG770h2vsp3uGenl10NlLn67F9hnw2Wx2M9Wjv0/ryVpr/Z/3e7LWz0vZJ7akFgDEK9BZm7kNwqKnBG+vTY5CrbRyrmq7pLmtCtdlXjUQHGVFPtQrtbO9sTXdmG5MjFUhBKPQGAUkmCQkiX7MjK2Dl3qBHqBQbHbOubKs33zz7TzPn3nmGTRWR7G2NtaR0Q2cJBrdmdahF0+n2yxJrXWSxsPhMI7j09PFk/1HN2/dvvvcs/lylef5PDK1p9FwwMy1ayDsjVZABEJ6tCaiyrnONAfMClg3iowCUqU/hxfdqVzqfNT62QA7G+fZKQLFWimllUqjeLlc3n/4YO/KltJ6tVqlyVApZe2Z+iszJrWnhIXAuf2poHdMWnG7eWOfB/T3ZNdPcQZEURRCY71ExLKqBMpK/L6mhctvZWUlMXda6zzPs3R4truoJgBrrYljaNyZgZnZe9QaQjBag3RPVPi61gyj0aisqwiz0Xgc6tpa29AWRGzNEthGWXoXrLWukKQsMHFUHBzaKFmzBwigaWjtQAqlwG47Jz1BuZkWpP55uHhgL06dXCGsu67ajptLf6h7mO2dGQCRiVhrrdsaGyEEsR1RQKMsACgNYvsRIxAARJHx1CQy1XU9n8/zvGJqgCojY2/dulVWeVEUaZoyB6P0fD6/du1GkiSuehLJZW1dVd02xTaQpd/bLvpSa625tWTgufFqQZkA1gYpYM9Ng0pp5z1i479UGlxVGWPysoTFIo5SiY4GAKWtMU0mAjnfxEm2uzeKorwsFICnsDnd2NvbK6pyVZfAhFqh0RgCMxOw1lrSG1piCkzCe4W9Xc5c8bwYfSbMwRnb5vYEJjby7FRbB0UZXcyXH372dpQ00XygFAETUQDm80Gd3ABxnIvB7hPkbs80fRDOCoLR689innrPwHly1PQcBQkLdf/dAAASX45IREXlUHtPPN0Yf+I7v/1Xfu2LcRKVZckAYvkUCgvELJUQzxmJSCBv+p3g1izwzXC+p/Hj7vM34MH9aTovepx7phNw+q1dJIv9n1zKZeGyReq+7f4MLU6h6knTT/sJtvpQR5vE/SSHUHRKZmzDf6LhcChn0hgTx/FwOLxxZXc8HGqNaZxExkaRia2pinxtZqjZukTAgr4rPAQb5RKcC8aY2WyGRo83ptZECMq7oLmJQvJMulUOZZ9JgJJgPgj3jeO0ripEzLLs+vWrABAZa6197u6zp6enIbiTk1MiqD2FsLJJXBSlHHilQNASlYLIKAWWmX27m/tzWJPrWF0ncvZ3Dp+XlOVju+2bFVEKBZ2GFdo4evTwcfHSi9eubMtjjdSsm2aF/IkuIpyyTcgB7+soytaWmAgIQr8KVn+n6TazFs/jXUvaWKf0aHVWByJN066yltT6FT9fWdaJVgxBaysoknEWh7oC1KF2iKisJe+t1UWxyobDqiriJAEAliq/HIoyHw/GADCMhvliIZZ/AGiqTzIDnWeTADUFhcZaWznvHIQQkiQpKye+yZbuNwxbIXr2AEA9AZdbkXTtoLbLd3Y8105l/w62/aHzJ6vbDNiGFp+Ry8YHfCY/Kd0aNFsU7wYgpa3AyMxIChGLoljNV9Kao0DUQFQCgLbADCHA1tbkYx/72ObG9vPPP/+d3/ldv/fF3/2L//VfBICPfexbDg8P49gOBgNAYcZc13VZNviU0+k0yzKpBhHH8Xw+z4tCZrKLEqCLITLE1CsdK1YZZgYGVKAQtLZKqaomYtTWMLMPgcgrUlEUMSAqE0LIy6LxDWmtNVmttBh+dLOg3teeCRGrokzjiIiGgyxK4s3trb2dXWLuDP0MsiCEqACor+3ieZ34IqO9+Cc2MrQ03+qYCAAgKDHinicilpNtMRumrFTwHuWUCRFuQ+r6U3dOecWznl6Q8yRqBAUZppnq3iha+US0m7OIaNnqSqlzrtz+VVVV2tSXVb52BMzB7+1sv/TincePn1hrKx8aTFZC1K0QIJu/19gafekmsT+V34ANrzGni7z8ovO8v+fgfEWHi+0/jTGvtfa03178inukpP9bOfAd623YHpHqAIIv4+v99e7a8d6HwFmWZFmGTSFhmyQouYBd/E4URUmSjAaDl1/+0LUre0lq4zjWSM5V0nhfHAOFopQgyn/N/mj72dzZ3N5JkmSQDVVbt7itZda4Qk0Lyl8UlcQEIWoARQRKGaWc/OT4+FhicwAgz/Odrc07t5/RRlltDo9PnHOx1aAMAESKmdloo9pzId7SEAjII7NWipUSRCcAQPQ9a+eZebnPzLr5ZD6TuM8dQsQoipSC4N3m5vZ0On706FGaxVd2tsX2K/ura7OLfDHGIIIoPZ2/Y40By2HjM9t4kMSkTtiCFq2l3Z/AzBL7miSJBHkBB+HHnf1ZXieADDKcjY0NZU1RFExo4yiKonK1itJEcBDlyTMzkvB+KYOoFCrFzFmcEFFd14PRKM/z2vvxeIzM1rboKAEcBcHzFGBqYGRCNrBarRQ2ZnmBeVo7dkJzGbmFSg4az0pu9DnK2fFEQjh33vuclVuVpXeemENjSGyeaT27RKwQdesRwDaqThndfGgd+VVVee8ePz6MDDgH0FJYY8D7M1KXpdGdZ5+9e/fu1atXJ5PJzs7OaDoZDAbZcLC1tbWxsTEajQbDsUY1HI6J6E/8iT/xh/7QH/rLf/kv/8y/+Jm9vR2lYDQa7WxvDwap1N2yxgbvsyx5++13ZMc1paUJbt26ESfJycmJ974oCmaWbYmIwgvPBJD26mIkQSEQEoVAnplRRRIzIaUgpdCkmOjEAy1YJYLzY4wpvDPGRNbGcWyVRkTQiWYGIBH+mIMydjweJ1nqKdhRFmrnnQ9MrBC0wnOyQgvHgSB1l/vU+FLae4EFNqu/Rjp97Rp8R0RSXFTVzt6uTuNaKLDREo7aD3Npt9g5rkTnNfJOgFNttLZGBUghtKlobWshePHu+xC4VwFJ6FIj4OnWBN18rc48CFITjTlYLYmGUbFa7u3tffITn/hbf+fTG5sjqisGsMbULijQAS4x9cAF/tcei8sFnP7MXqSV3dr0hdb+r87NYy/zuP+5c752/34D7gsXln9tGdaeWRtaf2mVUnVdd3FY1BrfiJoRwfld1YEqdJ1sHUVNkJF4klxdc7ONmqqxkRYkDm2Vjo0FoDSOBsPMamWtVqC8J9SNs+pMg2zLdbTaNiGeCxDTWldlffXqVQEZUMqAQhPFVZFXVZUkmRQlBcAQQlnWZVmKdYSIBBLIOVcUhRSHz/M8z/PJcLS7u2uHg8VitbU9mc2Ow/bW8fExkR8MBmVZmjRlV8o+VkppVMxc1JWA13cBKbqNRkY+K+QsfvbOOnp+sfpLw91qUltogZkDMBCmafbo0aNrV/a2t3cRNKFqVYGuQGzdmRxbZNAGeaPjhf09xqi1wj7nW5N1sLXVS5vMrHVjUaiqSsr/BW586kdHR8KGhVaKO3AwGKQhW61WVVXtbEyl0I38PIoiJAatkiTRxkjZPgohTVP2XmzXIYTBaAQARBRnGYDS1jLz9t5eWRSy8YB8CMFYiwzOOfYhSRJldHCktC7rapBMZ7MZaBBYEh8KpXS7w6Uue6MREILW2C1uqys06pAC7IKn5BjRU+wZRNRTVBr7HraJ8l0QXzfzwlGYuSiK+Xy+XEqZcxAP3zpJQPjIyy9sb29bpaPIMJFSam9vzxijlU3TdGdn56WXXvrQhz60u3sljmMbR4AIqNvIGOkqdj4POUTf+q3f+rf+1t/6kf/pT/7ET/zEw4f3f+AHfuAXf/GXB1mmFBCR8244HH7pS597+eUP/PiP//h3fMd3RlH0mc985m/+zb/5pS99RSpAXL9xI4qiPM/rui6KSmsl+QVtv6nVwFqxgxFCSyoVMnHwXmtN7AGadEdjmrzwjmdL/LMcK4Uox7lD9WpBSuOyypVWVid1XS/zMkmK0Whk04yh9HnhQ9AIqBX6y5EVoNkA5wKk17jjRcZxkXojAyhEpZBE4GBiXpbF3ZsfzsajEirJoZfBOmpqeTUcpI+80diC1wWC3t5r6wzJzUAKGXohKUKDOrNKfyG6oUntkXZ4BMxNiJdkvCmlQnAAEFytteHgX3rphQ+8cOvNt9+Pk9QzOOe0tq32oJh8R8WUUoLxQT1Qhc6a2tdI+mepo0Rrc73GYvmCV2BtkTomx2cGyR4dbImdbpEOhat16909Sb2gm4u9FbqvWpT2tX52b4GegiULQ2ce37MS9C2L1SEE55wYoEKL8SszU9clAMex7SyNXfvWWgikIw0AzteA2Xgyuvvsc2maEpGKlAzAKB34XA+b8TY5C11Ab2ePNcxVURSnp6d5VU6nU4kejHS0WCyW84VzbnfXjMfjPM9FWJa4zSiKAhMjxGkyGAzmp8vffP3zRuOLL75YlnVRFI8fPASAa9euWWu1xiovtNZFcePho8fLvBB8Te9cN89aa++91M/p7gh4vTBpiQMXpRwAmAlaS2N/+VoSf07EptbzTUSECACT4YiIrDHGmDhNhsOhHFRhVNikMUCaRmL+FZYsKyuhy6qtU2SMUSZyznXVCwDAGCPOWuyZELv9Rg2mCjKLotxYERExiZPgSVAhhVt30NxCWRDRGLNaNeYQYTmD0agRT9wZhLK495hZ9pvo2YBYF0UIwfpQ1GWapuIFFQZPrWu/LIokisej6ePHj6eqqZboAlgbAcBilXsPURTx+RMnIzXGMBEi6gYamhA14rmzb4whHyThijiEEOI4DqGWkXbtaK0jYwWpo+MHzFyWdVVVRO7RoyfMXdki8L7hsuKL1RquXNl75ZVXrlzZm0wm169f39zcHA6HaRbv7OxMp9PERsYorfV8Pl8t5/v7+3VRPnz4cGNjIzhf1tVkMtFac3CuKnydp2kMCICKg0Njgdl5stYiArHweJSeA8PO7u6P/uiP/rE/9sdOTo7quv47f+d//J7v+R5mEHK0XC4//MG7/+Qf/aPnnnsOjQHmj3zk5R/5kR/5/d///V/6pV/61V/9d7/8K7/KDNbq7e3da9e2iqJYrRZyLpgbx5Jq6mFjoCZtgRGq2kdR5H2tFQAHBQgMwIQA5AMykG+DVQFcKBvKHIi0auJOHHkn2nYJANoYRLSRNsaQC8ezE2ttOigWWbK9uVUV5XK+GkYWvPZcCcyyakUuhB7wznlKvkbe14jzUy86Y3g++KAxytLpzqZTJMfWUfBMghqrWyCaTvlsyIKYLfny1NDuRawDEbEPrWm9sb0J922CNsrSaN1BljKzZI4RUacBN2IaIShqw5nPD1uujcn44x//6Btff19pBM9aa+9rE8e+CcpvomrPjtEF6zmetzNcOrmXctZzs3OZXaL/ucfhmj/XmDr0NGm6LAG3Y+TdTy7KYtiLFu5IzNqT/ef7H9rH1oOx+bzKLg93bNhaK2VeREXWWqtG2UryPEfAwTDLsixJkslovLu9MxoN0iyJpJQ6MfdC86S/TRlaRKHz1JbA6w0qAEBZlvv7+1IlDVFHUYOA+Pjx462tLenVcrks60oKswfPtQvOuap0RV5tbTVSzuc//zv7jw9u37mFyFVVHR4dbG9tjsdjBL59+9ZqtZKiLsxYVDVzA80oxsAmcFchEsoMSHKOPNa4opXvNphSqjP2dtOLrQasmtRJ358QeYAYg+eyLDcm0+XqlABXy3w0Gk2Vdc5nmZV5kAMGvZDabmHhzPajPAEHNtiYozvjh7xRwqZUWyVaACi4l2TFLI03eVwAUNd1WVTY6srYGMwVIorZf7AxFSNznudZljGhtRYQy7KhpPKrJrgGERFVCGJX0FoXy6Vw7uVqWXsOIXQ1A4TTR5GRITx+sr+3sxvHcVnXiBjHqTXWe3/v/XsPHz4EgLKqEFFiwjtNVIwi1ijvPQfVhYN0opJMiFEa40ZzzQaJSK7WahGwREgtiuL48FgstIhNiYW+jVMp+LZv+/iLL764vb09HA4nk40sy6R+32g0StM0isxoNJJ6z1FkFVCUpgDA3qNSoECq2ZTFqiw35yezYZYeHx6VRb5cLopVzhpRQQjhZHa8XJ3efubO7du39/au6jhFbYT6WS2ePmH8AaBFsgMEABtFw+FwMhm99dZbR0cnGxsbCFDX9XQyffPNN//kD33vM8/cRKPY18ysbLy7t7u9vfWJT3ziyZMnjx7tf+G3futnf/Znf+7n/tWjR48A4M6dZ6Q2w2KxAMUGG2hrUb/FEGLjRCm1XCziLCMf4DLjZZ/+cC/0QbY2spKhNfQcib0PIVQVGqu01nXtiB7Pl4vTw+HHX/mWdDyeHc0IFDFprQ0q55xmCAjMjbwr3ehJyevK0kX6ycwIoM4xCiHXREQa0XsPRi3z5ebNK8lw4JgCMDWhCNSl6l2kABcnpLta3tRYlcX4LEQSROhvA9RlxprZI2ot4qibuSVENACADGsWdGZB+uTmeIqZHoCZszT6yMsf/tmf/TkfPDOEwEQsAQJaa3m+E2MZoC1Fftb7i3N6YWxnT67N+NqHi38+7VcycnVZsPHapF8qc3Wfu9Gdo/L9kLwe6+p3uN/sGpPu9nrXFPZgY/s/F92L2+IBYn5kxCgyABjHsUbI89zXTk0Rx5xlyWAwiG3UvUIiIduWRc46M93JFmEFGpWoI92cVFV1Op8Nhze01s5VURKLhXn3yt7GdCtOMh9C7UJZ1PP53DmXJIkQytls9sUvfhERP/CBDzDzaDR5cniwsbGxsTlBxHy5OjjYB6DpdDPLsqpyp4uV8JuqcqeLeVXk3YR7Cs43xQaoK2QLAMoorV2oVkW1XBWdBtBNNV0I6uELqCy9lRKuzVXljmcnaRJ96UtfGqRRmiXk3TDLVFN+KsgyEQFzC5MEIMUPOt+SthH2oASx5+MU9tmOgsQsgi3wmdx3zjE3Aa6iVRtjbBx1qp74VkWKN8Y0sU51PRqN5vO5tbYoCgpweno6mU5FUACBmPVeEpYay6hW0mZosajkdSEAYmOJafRjAF/X0s5wOCRgZbRzbjgceu/ffPPt3/3d383z/DO/+Vlj9XK5lBE1sC0hSLpU8D6KImttFCXNRGmtle3kGO99WZaS+RYoGASllA9EAGlqiqIRm+JYfeu3fnxnZ/fKlSvT6XRvb297e3s6nY5Gk/F4nKbp6enJ3bt3JWlKayvqQSN7toH0Ym0UR11wpcSHAwAFhyShiKyUGg0yDj6yJjJaAb/77rsc/CKf+9pYaxF4/9FDDiS+1WvXbqAFYAXtS5mb7Dc5Z6qtvAQANoqYfFEUADAaTpigKsvgvHPOGEXkgTwqQDTAcuLRWHPt+vUrV6589GMf+5Ef+ZG3337785//rd/4jd/4Bz/5aSJAhBs3rlptiqIoyzKO46qqUCsijuNYGZPneZxlIQRzVjS+I1BCIVV7mxvqDwDQOHAllVG1/ngGBODARIECa6ODY8jz/Pjo5L23qoMnR3sb0+3RNLEREcQ2LouVVZqZAwWAJrhDkLk7JRLxklJIF4kzNj7kNT8mSi2EQKSVqSns3rhmsmRZl53tRCkl6lJHYNfexudimc5diMhMQEzsJTdr7QFhwNwaX7E1/AiBNW0ImOli8fsjDACKAAEIQIMSOElqqXZZ5Ts7Wz/4R//QP/ipfzEYZ6hAqSakENYJGctGgwvsrc+Q1gSQvqbSf6aviV66Kh3bW7vf7xK26AFPmdPzPomntNP1R3rbWRWwZ9DuWriU8QOcWUS5tRH1X9pTp5o/O2bTvQhaLV/1UvIVQpIk4+EgSSIiGg6HcXRWmIGbNTlXq7HdwwxKAMhZtxICM0vtUiAu82I2m21tbgvdd66qqsIYE9lE3I1KqeVyKdV4iqJYrJaTycTV1f7+/v7+flEUg9FQKgoAQGCy1g4Gg93d3eVy+ejRo52dnRs3bj1755miqN7Qb1X1vYODo6IslEITWV+G2tXyau99WdZJkigTMerASMFXVVUURVeVVliX974Xt3UmJ52XfNeOLsvqOOdUHDOzMaYqnbUxggq9eCilUGzLRATAHVtVbfBOA7CsQmerwDYQT5hclxvd6uIk9fu63d7fcmVZytDEvtqp0eJ6l5arqhLnYllXRVEg4pYxWZYdHx9Ph1vQVM6RaiqaOYC23jkFiFoZHZksYmYElcUpee89jceDvKwk81jeaIxBxd4T1T4bjTJEZt7c3nZVdf/ho9dff/2t9967e/fuc9fvRjaRvSplCToyFMdxXVXDYVYUxXK5Es2h2459qosI165d/c5PfPvV3b2Njcnm5ubW1laaRcPhcDgcbu5sD4fDyCbj8VhCDrU2oujLEksj+WoxGA6bxSWmELQxgroPQFJ5wpgmEhIaiYqUUqgUEKHE3xPFcQzAo9EoS2IAstpI+BscsXMOPKXJYLFYPHnyuKqqKq+IYHN7N00HqlGjJbQe2iNMrR7sBc8uBJ/nOSIIqDgzL/Nco97d2zZWoCo0+Vp2AABQCEobZlaI0+n0tdde+/jHP/5jP/Zj/9v/8j9//fXX/+2//bf/33/8T8Ucefv2MyK9PXnyxBpT1pUWN4fYCrCb7a57SjTINfLYEMnuDgBIRV/Q0ACiNpH5FACQkMExGWu/9u79t96+/52vfHgUpwpBWRMFgZTps/xzds2Ool7KJi5eDVXtEWoRiK21FfnJxmRze9sBOU22DXjUWgsgEjdBMMznzbQA0OaGXKLyUQ+npbu4hZ3vm9z4PP5Exw5QIPGbTkPfWgwAkiTfpGV1UxCAoygiguefexYANHIQyJXQQsfxmWsaoC0Nu9bLy4Z06fyuMcWLv+pP1jdutr+0F193JqFc4J0XW+vR7rP73NOJ+7/qyKu6DJLp4qjXXt31R1i7c44hoAJUrDQkaRRFkXjZlVLj4UCcqXEcW2sjo5BD55hUWhE2GD+eAqNCbJGxEQBVF1KPTTcIGToHXlEUk9F4PBnFgj7vGAXDwRrnnKcQGR2Y8rKoXI1axdYURWFMtLOzNxyOT05OsmxIRLXzWqHEE+1ubW5tbiwWp1WxOjo4WC6XH/jAh69c2X3v3gMB2+qKGSNqRGLGqnJ17W0cMyIRdaFeZVmKXxZa8UWSIrHnd+xtgzPJd43QdAuapgPva8QIJWBHq8FgsLW9Cd7FcSwmX4keElsWtZAjcpy7FJc1524/vVVWrR8pbYyRgHNuTdNEJIRbYrKEpqRpakeRtO+cE94snwWUPxsMlstlFEVorSSk6dYFpbWR4DTvve1cRUQdgzTGAiIonQ1HgJiiIiIK1Or9zEzW2lVdM5NniqLoq7//tb/7d//u+++//6EPfeijH//4YDBI4uzFF1/8Z//sn4/H4+Vy2R0cpZR3Lo7jJ08OieDjH3/1yu6OxAxvb+1sbW1t7e5sbW1JpvhwOEzTNEkSg2owzJRSgAjgxX8L0CjvANxWvgIKJE47hBb0hsl5Z4zxjqy1WinnQgh1FEUKlW41EGLPzBo1aMvsCVkphcq2lkZCRO+cjWOj9HgckOHGjRtxHCujHz16tFwuKYDY/4+PD51ztXfPPff8tRu3RpMpBDE3m9CilTXitAIEBa2M6JxjhvF4jAhVWUcmqn1ttcaGqgRAAtBAnlEpbYJ32liGoLUuyzJJkvF4+MpHPvyRlz/0Y//Rf/iX/uu/+Du/81t/42/8jV/7tc/LGK9c2WDU1tr5chGZuHFj0RkiUO9DB510phi0FCkAqC5iGREZAoAOgYW1SeyjhFQjcgBOhpmb5w8e79/Y2UmMzfPSNKSyn4nUBG8/jeB/YwZ88SKUKML4ZDa/def5dDTIqeYWX/2MxVIjapi+5n3hdWtqoSSM9DuJiIAEkgTQStiSF9AnPt1BwBZu1pwxjEtUbiXaAAEDqwDEzHVZmCi+efPmH/7D3/2v/qdfHY6HRVlr3URsXuRAfd1urdP94fU501oLcJkA8rT1uJQfd6yRz9dpuPjzNT699lj/h6FXb7j7lnoQFmvf9jfx+d6eg4PG8/Gx3U3sApFQxXEcp4lEQQsDFnNiGkdKKaCQJMmtG9c2Njak2q7kBgAqDRJooES8YoXAiKj6BhwNHdtVss+YGYAmk8nu7m6SJKiAm9wnXZZl7ZywkCRJRqMRN8ZhIz5IrXVgzobDycaGKLtV6Xd3t3d3tgWDua7Lq9f2tjYmx8fHJyeny+Vid+/a7du3337n3eWyRH+GuVHXtRZMLGZmlpheAKhrn5fFfD6v66A17OxdoV5hODiL6+4v6Hl0m8s2g8RzOedGwyuuyvf3D65d2dNajwdN/UcJZuqOcSO+dMhKbaI2gZIgqUYhbuPpuqA/eV7URInqkpvyvNaNlZoIBHUuiLM2CtZa4cqIKJJK13gIYXt7ezabVauV1nqyuVmsCjFQB3cGnwAt7wdAT3SWwtT6VIP3WhvdONlYGwPMznlWLDnBURT/0i/94n/71/673d3d09PTv/7X//qf+JH/5ac+9anRYLi1tcUMq9WqcxaI9LCzvf21r7356U//j5/61KeUUrFGsUVDKzWhUp3ljEJQWjcacgiAilwdmKgmBN1mEOkAQTdoGLq1izb0VKz0o+FIrOtaW62bWeo2uUxCCxmEoJABW2+i0EYFiMwASqFWcZzmNh9PJ3meb/itsqxlXYLzvnbGmMVi8ejhg6qq8ip//vkXh9NNZAT2WqvOWHKO6CB6H1arFQCkacoMZVlaa0Nga62yFkJw3smjTKisYfYiPUj+bBeUYBSh4uEgfu7uM8+/dPfbvvXjX//61+fz+a/+6r/7H/6H/w8DKAWTjbHWpqzcarXqOrPGhlESc1EDiPMRRPtl5ra8mO5OkMBLIpzxMGyexgDKoo4Gyfv7Bze295+9dk0UzV6IUXc2mRXDujX3qZf0sz+VUmNbPp/l4lPY2t5GazyXbDW2Zudu7kG8yHj2J1xG9rF1HgFAo7sjAYOSGBrF0CI0dDW5qcWiv6h3CXGw1hpAYlY9Q4QGAIKgz/dDWDwzR5Gpg8sGgw9/+EM/9/O/miRx7ZqiAnz+athPO5A17tuu3LqGujbsb/xVn7CuPaZ6mSdrl+qlfMAFjXZNMoALLtuLfHHtV2vc92kPtPfPNdVnwNwzn8qvrLVpEomCC6qxZ8Y2iuOYmaIoGgzSyWicZvHW5tbOzs7u7nbUZjRKQAC2MVaNYAQKgAHPdknXzf5SAsDm5ua9e/eOj4/H43E6HFRVFQKFEPK8FMukUirLMq2tUub09DQwV84dHB0ZY7a3t4louVzu7e1d3b3GHIxWRjWQzlVVKcQsywAaHe75559/9Hh/Pv8ClIUL4ejoyHu/tbNDRPfv3z86mhsDo9F4MIgnG1MisqtIwAqUUienc4nvbcIsejDF3ZT2BnvRSd/AVzHzYDDg4Gaz2e07N+fz+enp6XQ6FTFIFHRmCk0xErA2blO6G3Yrwm/oqta05s3+7uqHMVOTJrDuFRLXrPdNAWBrrbYmjmLpcBd9DQBpmorIFUIQ0/RyuRwOhwbRWjufzzc2NpgZuWmTmohrxQjWWgTUSgcKzBInorS2wCR7IngpMMDOucYFqNT+/v7f/YlPv/TSS1EU3bx588UPvPQTf/fv//Iv//IP/dAP/dqv/VocWylSK55gidST3l65cmV3d1drDVQDNDVTUSEiu7qsqipKExYsBTZEXkcRILuqUEjB+STL8nzlA2SDsa9rE6fMQaCYm1XmZp+naXp4eJilQ0mZgV5YZbsNhNfrRn8WZtMEocomYdAGKKAyQAzamMwM3ThJMiJAZYHRWnt4eDifLU5PT7MsQ62s1UVVOl8z83N3XxiMRwDKGAsgITiqvwPl83y+RFkIBc4551zgMBxlgAxIQBxYgv4aQxYiiBIcyOmuKiWX4Ml7b5KEq/LWs3du3boJRN/28dd++If+g9/64u/+7L/4uV/6lc8gQmBIU+s8nRM1WgrQMza325UQoMmRQQBg6rM/bIInmo0hzIKYdZzWrk61NVg+fnJwdXtraIyrSq3E0CbkjpnOrMFnM3OBZfTPztlX53vaMlPQWldVtbG5ubGxESAQAmsF/kzqBWh4apOwdL7xM0HkvBMz9LACuY2LFPxWbnUzGQt1+Lj96h2qQbmX9ATDbcxJP1pKGCddwF2Vd4gs+cLdu5/61Lf/21/67O7eznKZdwJ+N7yOBa2xRmwkPg8XvupPwUWGd/GZtRb6n0VHoV7xduyplZ1W9DTWu9Z+/99uCJ3bj9tg1P63fT69Jl32JxNabbL7eV9H75yX0K6ZtQ1uAPa+YuYsyybjcZYlO9s72zubW9ON4TCbTCZWKebgvW/zMHtbrQ/xAiDawEUGLH+WZR5F0WQ0Fn+SjK8si6qqEVGYX56XnkgZU3s/HA5PTk6kz4eHhwLZs1qtEptGUeMTLYpiOEjqopRw3Lqup5vbxhhQ6tlnn3333fu//cXfAaW2trZEq3jy5EkUJa+++szW1tZgNBTNuyiK6XQ6nU6NMQcHBzZOuMXmzPPydD4XdthJxMzcpTh3S9CblnYVjK7r2ihwzi2Xy2duXn9yeHTr1i2BnGwxeHVn/kVkpRqFTN7eLD01JinsBcxjqzf3JTxJ5BXcg/4mlMfSNJXUZ0T0FGQJ2txrqOu6z8jjNCnLUoIADg4OrhqjlCGioigiEyulUGsrVnGlux1YlIU1MRHXdRVFkZTTkoKvIQRqg+HLshiMR865NM2+9KUvff7znx+Px599/fMvvfT8Bz/4wR/4gR94/fXX/69/5a+OxsPBYCDagLiunXORtTLkxWKhjanKMgKPiNRERUidY0VkI2PLumAG5qYoELYZHVEUkXeRsYEpOGes7bZxq8YgoKL2psh5ov5CG7tOJBmbjVlVPmilCRqlQVLVEFhqRrAirXXtq8hY0DpOk5g4hKBQp2mapmlsI/Lh8PDQe+9Lz8zj8fjk5OQrX/kyKHzppZeiJGP2wsGwdX+0qh+XZVksV0rheDwGAIlIdz4kSQLBC3MNrlZaAyr2XlCEtTHUhhxKg0r8CJHmqkDEanEanF8ul97Ve3t7n/zO79ja2vrBH/zBL//+V7/whS+88fU3+7jofQLYWi4vaEHY/COWgVZg7Sib1KflAE2+RR18pHVVVZPR+P2Dg+tPNu32jkWEXhwMtj7Q7g70uGCfm/RZ49qFYqJoObfWVgGXq/LGjTvpcLAgBw2PpM5E3FO+1681Gb1/TvvkgrnNGW8ZqGKEM7CXs7if/mdubUKIaAAtSKHwJjSltawqBMAG2osZEQwoQHCushYZw2gYfezVD/3KL39WcW0gBAVGR7V3iMYoJPbAAZl9OPOD9keoL9T9XZvWi6yRW40QW2UREdemqftVN2B1IbG4i3DpU8Bumc+v6NmvLk59f067m91IRUpq83cRm+ooWmsUYHdpKgRSqqnXLi10Oo1u7Y+9HajEf1fXTRIOUBPia5SKIpMkUTaIdna2NqeTNI3j2BqNAIY58rVryGhgBKUQCUnk1N54m/h48URqbYkqEeiMVcNxOhhlyhoERYD5vPz6195nhJs3buWlW7z/QDyjzrkoiufzZVX5sqwWizyKorIIwLPTWZ7EOJlMRqOR+PliiIyJCHXtq5Pj09FodHV3rw7hxtXtZ5/ZfeftmFjw7v3Vq1dv3X6GQJ2cnBzNj+89eN85l6XxcDDWWj+4d/+dd98TQYSZPZNYXNMkiaJoMpnUdb1arZbLJYCKIsOtjVcWXCmpHxqkxI0ylpEch8AqUvDk8GBvb2eMw6997Wv2pee2t7e9r5ljAG2tNkaFEMrSJ0lGREoJ6IpPksSYiKBvau7gsRhRMsqc9344jKSGEiMYVAE4slFeleJzJQLvSZkQihIRjTEKtUj3RVUCsxETt3PiNa9rP7Tx6elpHKdxlkky0mhjA4HiyEgRmmq1jKJIWytZsdoT+6B8KIvT4WQSqWi1XHmlstEIlEdEZL88XQwGg7qu03SQ2KRYlJzw7PD097/8tQ9+4IUbe3sHD/d/4+D42eefe+bGdQ08m82890lsK8eCU2aNeM0BEZanc2CwSgfOjY4Us/c+ENkoQRVpja72sUml0qpCRgjknLHoCmRPVV2DhiiKqmqRqSGRUyYGJiAEVOI7kxBnOZ+SQyygY13EYnvQRZvpcAS5YQYAAExEClkwWYOomEqBD8oYIEoHmdJkY4gTYyx6coy0v78fQuACELGqnKvqr/zOFw3x7du3h8OhymIfHEBtpFJhUwYqALpFMQuBk3QKDHlZjabxojiM4oxBAypitjZydW2tZaUQSCKzUIv4ogiAABWRMgZCQAZmD8EvTg7n8/lyuQwhzBbzSNd7OwOEW0lC169vvPv+yZMnT4qioLZ+V0PBCAFAm9g5h6C5rXWmoQ3uZZbUOCZJmWMU3tFYrxvNVCMTBR0nq0DpZPrbb7+XTjfHsTG+HmhFwUMg1LaWrFwTMZUSRqEQCagJ8+pguXum0y5lOCJAhlJTZUAxJB4MQ/AuGO0UD65un0Q0ZxomiS191cLtMne6PoJGYu4wsFA8dCBMNACA+GtZofM+sG+jqGT3gFHALLmIWNUrVGQ0EwEBBC+VPJAZulKh2lgbxdqYM3vXGptZu7A37CbElIhrd+eZ2x//6Iuf+8IbW1tTrl3wLcgUolZasNZ7QDDrGn3/6jPObhP0Fce+jLbGaNe4Zr/PcCEhuP/bi8O89M7FPvcZ/1rfLp3Mp8lZ0qsmgdV7RBS/addyzz3ctOy8i9pLfhXH6Xg8TuNI4lYE9244yOLYWmuZGjgqMDIbglpKfeGjbyfoCyvUIrkwc2STOE7FRGy0DaSOj2ePHz+OB4PB6SzLsul0urGxcXp6OpvNEXG1Wh0cHBCRQuMdDYfDjY2Nzc3NQWYW8+X7793bu7K7ubk5Hg6YWVSc4XB4eHA8GR+NptM4ju/evXtwdPLGG29ubm1pGz05PH777bcPjk6IaO/KzrVr19I0nR2fLBaLd999d7msphsTpZRA22ull8tlWZYhMCBGUWOj1lrXtade9UBuweuREUELwrNsmziOjcLgvQdcLBZ7O1unp6dvvvmmMUYKBOV5jsiCTtWl7ULrLBDADdWLwpBhclvxQvRC2RtywrVWpi0x2ctQApAisnVdlbVkoMnSl3nh6rqrVCiKVAgku2g+n0NraRgMBpPJxDmXDNIg6B9as3OSo4KISitN2jkX6pqAQQEgA3lQ6KpKQtahBb6QLtV1fXx8LON1zom39f79B9euXRuNRnmeO+cIWCvbGgOgm6InT55AgxbEFGpuGKW4nxs5mxsTfTDGACoVGV+WWusojquKyrpsfStBxwkQAaiGqJ47blKbqPN2P+08qrV0WBTvKjZWouawgIbmdDTeImviQYbWxNAD2zk4OPDeLxaLNKU8zyVVrK7r3d3dvdtpFJkuagyVYgpVVUnOFQAE56Uj3ntmiKMUtcEGIwlbLHgP2kKTqoIKkCAwgW4x08HXy+WydiX5IC2fnp4y8ypfHR/Pjo6OnhwcnZ6eaq3H46FzVRQZck1RyzzPXe2VNUophaBi61yIIuscK62kqp8QH+ectZFSSsqvaWMl6kKwm6BXgwSRAwRmzQjvvP/eRz/4IpAr6zpSyKiD91pbRkUXCPX6OhFfDNTiVoFWkg7F7ImMMYuy2L6yN9naZETNyD4ISlRfkYMeR7hI57mH2A8ASAyBIDQZvcznzat4Rjy5DTRZ0/rkSTndiAoADV7GvTp222leXec4UOnLNB144sl09Nprr33uC284X3sfgm+scMwUWEpbn8vXhsuY00XedpGJwvkz8zQOuvZwnztebHDtztN8xv0NsdaNPt9dO9vcyyTpjmU3k/0n+7YBPKtxdg7jWpZTvo2jVLWXba4YEbMs29jYuHJld3NzW9x7Qpsia/rz4L0UzGrkkj7QGp/3jzJzaAu/EFFgGgwGWmultScui+L4+Lisatb6+Pi4rmvn3GKxquu6KAohKFXpvPfWkrXWUyDg6zdvYKgn4yloxcRP9g+SKHZpnBhtjRpvTOcnszzPWSlAnQyy3d3dsqwPjg5P9h+vymo6nV67dmM4Hlut83yZ57lSamdnZ3t7m1G52ud5fnJycnh4yAqlt84FBqiq6t69+4NBlqapUkpwoLix8PeWDLGJ/SG2cUREjMoHtzndstbubu9Mp+NyNTs9PQ0hjMdjbnEimSU7oIm/6BrsZ+BgD5NL0gQlhEqWWymlrSGiqq6MMQFYvMv9nSydJyJUIGlXnVNWdk+7zbgois3Nzf39fahdlmWPHj1i5t3d3ePj4zzP0zRVWoHiYrXqeJgxBhGUgsAN0KlqMnC8jaLQIn4LKiEAJEkSJ8njx4+h58y21j558iRN062trSzLiqLwIYh7UoyodV0iIjPMFnNXVUqpKIqkYJFSBhqa49sQqqC0ASI0cahLrWM5AizVLTlGRHGHgau1soDnjmd3GUl7jZ4WL9Kx3pZEAvRyYQlAQnsUi8m6zeXDXj1KwaWBM4e9Pz09Lcta8BpXq9X777/PzIeHh8EmV65cSeOIiRA1oEZkDVyX+ezoUAEYo1igjAGRJStJNaVuhHrIluDQqoMaACWkUgFDCBxCUeTz+Xy5mpMPVb6az+d1Xe8fHh0eHj45PFgs8+Pj46OT09lstli64FxsjU5iSSIvywEAFEVxdHjiCBBhMhlro6NIrVarfpJ9tz/jOCYi7wkxGGO0iUIIDBI0QFo3BC0AA8CDx4/u3Ly2maVVWQJoxUxMBkmps3jUjvU+jdR3lyYIIJY7MCKctDVVlsXyuasfTMbDha+tVhQCIVOLxLnO6ftkvFc3kIn7j3W/pUawlhzURgnsIAq6BKe2cdWxg068lkNt+owWzrOotVfKnSRJXPAhBGKI0+zZZ5+9enWaF05rTSFopRkhBBJTttZnBbT73PEbTGvXUegFOl3k1l2H10Iqug73JZfu204b7r+xa7+biu7+2rv6fbiU7/ZHd05q6QHu9zmc/CT0imzLtz3Lczeos16JGXO5XBpjBoOBYEV576MoGY0m4qSU5BPEBnq46RIDIjsA1WbIMAHDuc549go60cGHFidLqLyykeDbeU/HJ7OqqjyDUsZ7Ojk5BSnao6NVXpaVU0ZrBEZQxiLq2Wy+v39gjRRkDCcnJ0WxQsQkjjY3Rtev7A3j4WAwaDEFOY7j7e3t999/fzoeRVF0MlsUlZvP5/fv3/+93/uq1hCChJDB5sbg6vUbAJCm6dWrV3d3dw+Ojx49elQUpTHWWBtFURzHdV3PZjNBsOiISOvP6oqqN+vovQfieJBqE21ON9LY5vny+rUrzz5zbTabSYNd0YU24AA6+Ml2AltPY0/YktWRZ+q6lsoZUhmNiFArE0dC5jo7EDMvl8skSSRvWFzaSRpHsV1W1WK5lEBi4fRRFDlXJUk0mUxOjo+11hsbG4LFPRwOu+INdVGUZTkej43VWuJrtE50KsRFGw3MgvmsjeS8ZszsvUPU0O6oOI6xrYrWqewSqTcYDJb5qjpdomlD/Jq+OZlea21ZlknSuL/aGZND2mD4gRw9xLr2UNfpcAjg6ro2VsfWFnWBiLoFzQYFDXxEu7TMDEgicrW4gHJzjVGvq79yBwFbZZpbUtPRBKCeB1Gk4dFoJLZuARw9Pp4tFoskScqyzLLs4cOHi8WCtcVQX7lyJUkS0AacB2MU8uz4eDGfI8BoMECCxeI0jmNEkFB/iSdEFhwVVEozE5IHQkBm1dBJ72rDvijyoii898UqXy6XRVHki+XJ/PTRo0cns/lsdjqbLw6Ojk5OZvP5PJAyxiRJg5itFWSpjaIoyzJtoqIoDg8Pv/a1dwNDHKs0TZNsIukPVem4rYYrsosIbZ5CIA8KtYoAAMlrrb1z1sTBO21U8PDmu++8/PxdbUzlvQHQCkMICBrORz+cseGnY2IohqCQEIBZB5YybqDRM8VpMt7d8pp95SMVBQC60MoZte9XQOox4EtjYhCROQiEl27KODE0dSHFvn0OghvObxVjIqUMgGKGS4A4njLS5pL0RE+BELz3165d+f7v/9SnP/1Pt7c3cqy9C4FaeRaIzwGkn3vFRZbWTMR5OaD/1cXHvsmrz8n6P1xjn5eOvc+Pu8fEfvgN3oUXlO9+C91j/ft9OaBNqu6qWfHZIAIZw93vmNk5V1VYlmVZllLvoVOOuRXVGwbQumcIUJy1a8bn/oezHipEVJJoO0lSAEDQZV3NZrMQuC5rwHyxzL33cRwXZR3HsbVxmg4AlOAZVVWVpulqtfryl7+8u70lybtHR0eo+LF+nCbxajGPjd3cmGRZGkIAQGNtpM32zubu9uayyJN6MDudv/POO2+98wARdrY3rl67kmVZJIZba5wLIQRP5GuPiDdu3Lhx48bJycl8vlzluZijoXWrt9LrGf42AEokLjbLoaw2NrGIACE411ZBCD6EkCSJMUbCoIRPIKKcui4LSHwExhhPJKyRmaU0J7UZSuLH1dYoo7k1SnNokoP7FpQQgrFmPp9nWSaafZIkURTN53NjrWjeiCgqtQRbrVaruq5CcCHw7tUrEpPlvU/TmMivVgtjjLUaAFxd2zT1VWWUUrqBsgNkYAIArVSZ50kco1GLk9MQgrVxlCR1XWej0bVr18TnLUJDAE7TdLlc5nk+Go3iKAVYykYSLqiQkyQxBowxjoK11vtKuBoRAWKbc0Vai1OGEZGqSmud51U6QEE4QQW6rcChVTBRFHxQSrcKTO8iFmiyvurWPdM7n0/jwQqABf60kdVak54cQVF6BGGmqwhZlrVSRmsrLhtBbmHmqqrKquZQl/nq+vXro40tYAKCfLF84403xHJrjGGG+WxmrWWGKErOSAcHIAbVahHMwIEZyddKW9S6zOehXi2XS1fVVV0URTGbzRaLRZ7n9+89PDo6qn04XSwPD44Oj4/q2itttUFEjKycDJDE6yRJtra2xuPx5uZmnudVVT158uSdd975wm9/eTaTXCmzs7NLwKvVigklJzCE4IJXSqmmVqaEVnhAUSACKVCgomzw+Ph4Y//gxt6OAvDeIyCHYDRwV6jjItmUab9I9ZkJkTQqz0pse0Zpo/M8371xbbg5zSkoQE1AwJLevUbf+ALqVp939g54V823y/gPotrK6RYtqGHAhEwo8h8ASJEkZkZsYIIQNBMyglnjcGucr8+Z5FulNQBYaxBxsVyk2fijr77yUz/1T/NiBa2zBxvAqdbOed7C3mdCF6ZzvfjB0xhn91vqRYRDj8n1f9s93LczX8qJv5mr32D/JnRKz/lMp9bIfIk7XP7ULUYSAEiKUTeEtVcQkVLYVaaLbEREdV1rjd6Fuq5dVVMLF6wUWGu16WoqiPxuAICgWaNAngN2YYGIyMgEJOXP+8MsimK2mKOxNkoV8snxrCgqZrRRLJUHvfdJ4hEFXwaKogBGz0SAnnixykU5KIqVcCZltPf+3Xv3Y6OZAiKm6UsCA2SjyDkX22g4HGoDVmHu6gf37wVy3/PJ74iTVCYtXy2LoqhrVblSChV776lm55xn2tzc3N7etja2UTSdTsUlWRTFalWIw1JrQRW23R5BIAWA2mKHXslhNBoZY4bZYGM6DbVbLpeS7lxVldSlYGZr4yZEpV3KztAUWmczESEq16ZNNyoLNZnWocX6DsF5r52rEfUZupzCunBxHAvvB4CztCXdFAKRagpEVFSlHPLVanXjxo08zxMbzQGSJKmqKs9zESastaiVC7Wrg0w4t5Zk51zSnpSDJ0+YeXd31xVFmRfD8UjcaBKAfevWrUDAzFLhSnYsEYluLUJGd14QcXt7q6oqZvgDf+APiNHbl41fnIiwxWBjZtQ6tJnNVemyLOMEXelEJ16tVkmSSFi4c04ZQwGMNr16kq1srRQAJFFMDWJdK4TKmVrnwd3VGaX7mHG6S8rC1jestRangxzbOI6JYG9vT+4YYx48eMDMRVHIVBw+Ofi6wuX8NF8unnv2hcFgZIaDvFh97atflSlVSjHDcpGLOGWMgTZ1lRlCCOwJiExkAICDB1BMxEzImlx5fHxYlWVZlnmeHx8fnxzNjo+P5/P58fFs/+CwyMvZ/HSRF84FYlTKxJFCxDi2WmujcJAmk8loNBptbm4Mh8MksdeuPGOMGQxeE//Ow/3ZV77ylddff/3++w8JYDxOJ5PpIEuKsgYIEEjc0o0kxKwAyQellPNeKUMaPQGY+O1HD0ej0TRNlKIQAIhYij3gOVcgq/V8qDNizo1GywDErAA0yQNACItqdffay5xYH0qtNRIDIBlWPfmp41aKoYuZ71N3xKY0et9M2I3tbK8ihEA+1IEcsOYLLFWKDCIiaqW0VdoSsHgMDJ9ntH1Wh5epocLwXVWjVmma1q7c3Nz8U3/qT/7UT/7T4cg6JMHCdc5pqwT8v9/gWsvf+LrI6jr+2v+WL9M118SINa6/Jg3geYV1bbwX+/CNe9t/XScHiFjSCRZ9y3k3hDYSp4nS5NaRTI1u1vBgPFN8K2I/HA6zLOlqhDnnIBBYUErFcSxBWE1/sAVl1LoJAuImHlgyZ7TWrJiIkM4AJajNzzs5OZ2friYbm4Ph9NGjR0Xl8qIIeVFVFYLyTMu8KKpaFD5xiUnFHmutMVEURVeuXKEYTk5Oi6KQrNCTo8NhloyHg/v379+4dmWQJS28Rg3alGV58/q1r735FkN48cUXT+crAnP/wcPZbCY5LePxqKqqVbF0zilrEBE8KqUg0Hvvvffmm29NpxtRHM9mMwm63tnZ2dqCsiwlz7IsS2ul2IMHAGQNLQYQIobgEICBOAQiTxTyYplmDVMRY4PgdZy3aYP3FEUgk8Btvawm5a+vNGMTK8CtR6Cqqtg21QwluFpSEzqVfbFYVFW1u7srkS9EVLta1OLaVQFC7WsmkBgxiZzy3mtjpqNxURTj8TgvV+kgSweZcw5YAZIx2Eq9GkB575wLUQQhkPdtzJe1VPk4jrMkrX1wdR0nCRC98MIL3/raR588eUIt1j+32Bfek0yL5Ex770ej0dtvv8sMP/uz//yTn/wkUAi+SQ0icm18DDNzIKf0WZqWTFGcDso8T5JU63q5XGrt0jjVWiOEEAIF0MzA1GSqAvCZ6ZIlFJw5YSZEc4ld7rLjfJ7UnAVOd0oP9lBmtdZEEEWJc06ENtn5RHRwcDCfz7t4xuPjw6oqqqqqKnfzxu0bN2/OZrOjo6M4TgEktDucLhcioBhjQCRvCaWkELx3ropDJOF10j2xx5Rl6eq6qqr5fH58fHx8NDs6OjrYf3I8O83z/PDgKC+rxWLhCaDFPREYc6tVHEdZlkxG4+l0urk1HQwG1uoksQheK7Vazsbj8cb05osf+PD3fs8n/vSP/amDg6PXX3/9X//Cv31w75HWMJqMBlkKCvM8946MMaiV914i2bhlez6AJ44juyrKe/sH6sruyEREzqASgDYJeW/CqC9oIJdYooWSswTgIQIgceWraDQY7W3XHHwIMWoKREAt3sE5HUzxOYoN0IZ09fEYGLjJvWYAlEwjpZRqsfQ7iicNN1S9r0sqBmyKkaDkTDOeEYWn7bw+J2vFW2DGOI5XRW6YNSqt1QdffCHLbJoleVUzOaVioThE9b9vn5+f3zY9qVVKeG1qoMfSusN5satwPpr6bB4v89rCeYGg/zD0dNn+w+dW67zLvOFw5/32dJbpexHajUWCjmMraTxrQk/3Z3NHAwIiNeXqjFLMvFqtIqPrul4sT09Pk8koy3Qq26ObHwA6M0fLztOoWYvue1aNUQFz48KgtkYYM4s8vlqtbJz4gA8fPvbeK2WKqgoMITixggqHSNNBR5uiKFouc4nvePz48fXrV2UsQhOD89XJaV3X169dXa0KSTeQN4oIrzXcffbO4/3DOEpX+XuPnxwC8872dl3Xp6en9+7dCyFMNydZlknp7CiKF4sFA0vQr5hnkyQ5Pj4OgbRWaToQ+21jpQfl6to5YA6drN1tsCRJJI5mMBjExtpIi6Gp83cKne3Cm6FnBZGrM1G2SEyNd18ScLENiRQ36mKxALKdw5I7Gq+UyCUSKJDnOQCMRiPnXFFWiGitJe7S/6msijTJxJqilDo5Pt7Y3T06eGKMSZOBGMcoQBQZVKYpQhBYKerYiTIGACrf+JuNMdZGZVkuFqvRxtTXNROhUtdu3vwLf+Ev/PCP/KmPfstH7j18IOdInLvOVTJHIgpkWXZ8dPDjP/5nfvzHf/y1116r6yqKY621wKppIEQkCIgISBp1VVUMFDwgYhwnVVUlw1hrTd4rrbMsS9IYFUJT1bEG1FKAHUAgebFz2yAiqrN6GNCZ+p5CiC69RDAQeCdoKIAQAYWolTKoFIBDbOp0DYfDJic7jmWzzWaz0FxOsGKq0peFQ63ee/f95hUA1sSrolosVqgNKIiiCJQC33IJpQQjtqoLY0yRL2XRT09PJY1isVquVqvDw8Ojw5PT09Ojo5ODo+P56WKxWJyczr33ReWiKNI2kuezOBESYYzZnG5Mp9M4sUlkI6OHg6E2KoqsMTpLE2s0AIOvR8PhxuTm3WfvfPTVl//Uj/zwO++/99u//cV//Qu/cLD/JAQYTbLJeFjXrvbOGu1rb7Qp69pYWwcCZo26Jk6y9OHR0Xg4SjdjkHg37xW0hsPWv3Y2/5cJTaIEMzMEZkbQComZQk1u89bVdHOy4hqYAYERmJgD0PmaSxoQ8Iyk93mE3JRj2D/RQExAiGxQS5I/AFBbh5fb+nL9S0hue5QlsEPehYbP89fuxXQ+vees063MLvYuQPShvn7j2vd+93f97M//0ubm5HS54hZ+mgg0XmjhvK0bW/M6NgVWz+Xvnj8AZ77Vjg1f0kNZmwsQa/1n1lruf9tn7Xi+bvEa41xbM2wDbVqHn+p0HTxXXpC6rdW6uxp8hj44Cbfxcq1uavqzBwpd8J6C0QgA1uoQwvHx4ZW9LUkXIfIaU1QsXjRZN4DG1AnI2igIuvNHMgfnnHNVrCwSM6CAMIuGJ8845+bLxXA8IS51ZEWEL11ox2sQMYq0tbF0vhYfc10rpYqylBl78803rbVpOhiNRmWZL0/n49Hw/sn95+7cPp7NVqvVaHRltVqsVisZSF2U6WA0HA7feudeXVajwWA0miilEfHWrVsh+OVyPpuf5nm+yleLxSIxkv5sQoMVqkS/T9NU0Knqus7zvPOUM7SQPNjAJ3X7p7kJIBiBzDwcDgHIey8wxeL3Um0RIWGu4uHu7Mydh9+31UCJyNrIhyDqkTB4AGjKBAXngo/jODChNpG1QrOVUsPhsLE/OYeI8/lca52mKYr/gjHNksFgEJyP41jyPkHrjY2Ng4MjV5Wj0Ui2t9IaQVlQzICMAn5nbWytRWNUYFe5EHiVl6PRyBW5MQZBq2ZKNYdgoihfrLLRiL3/g3/wD/6v/1f/0d/79E+++MJzR0dHiCjGeeecMdF4NHJ1rZXK0vjwgP/cn/tzH3/ttXy1TKJItjII9qY2iN4qBcjkGRDIB6O0iYzWFlAlwxEAa2vY+bKoUWKviESpSZJER0lrQ2aFiprgQYAgGTsAAFWZp9kwBCdqN7Y8r130M39Zcz6RRIVWyIRIFCAQQ0Bu0kMlJRcEA1JrY1Rd+zSNZTXTNN7e3oxj630dxzaO7ePHj4uilq+89wiPax9MZN9///3xdOPoeD4cDKM08UHlRRUnGRFkWcataoXEzjujNGsqq8rXDoDqupTUA8l0J8ajo+P33rt3enqa5+XR0dHpbL7MV4/3n9R1zai11jJRrY6uEXFjMhkOh0mSDNJsPBlmWTIcDoHCMM0QMY7jNI0dhSiK0mSACBBqhpCldnTr+vVrex//6Lf8iT/+x9585+0vfOG3X3/99XffewIAWaajNLVx7KUyoHPWRITgHRmlHCmF5qtvvj155SPTZFCvFoZBt4goBAyBUAAqiJqQCFEM8BwfCQjIIL4LTxQrrYFOTk5fvPVaMFAuq1gbpZSX8EZmgYNmZsXnACE6pgO9Ggx95YdFe2EM4IlIAxJLABp67xFAKKQoMXVdd0Z48U2gMq2LSiNqBMWybTrO8e+9WpZzXjZBVqwQ4dVXP/JvfvGXGUhrdK6yNiaB+X5KsFX/wjbsqPuzP/i+ZLD2q2+m208ZxSV96D53jqu+g7nj/X0p6WKzHfNWvfyi7pneujIzMDcDR0Qp0dzpXqJmdS9Vbf6oPCyl4uI4tsaKITSOY8Hc11o31QI0QA+3RCpjAIAC9j0hwxjFrJmbQCHhuBLQK7RelNfRaBTHNk2TNE21SSSoSrekrRtgM5lKdUGF3LvaucXT+fzw6ChNkixLCHixyKuqsjauKhecHw2H4sSC4PM8d4HLstzc3JydLvPSQQsoWNf1aDQaj4dxmmitozRZrVbHT06ePHlyfHIiceDcRswJP1ZKMaOYVaFNzG2PwLmNEULQGrXW0/Ho5PBgtVpFaZLn+dWrV52rJBSxe75DUdZaS8UhMRoLbkMng8kzIQTnPTMvFgsKLBksssree6U0AEvMvNgwpfHVasUtCuZ4PBaYEe99WdUyFqVUm9LMaZouFovFYkHOLZdLCXcSHcFTANQcArPEDytyjiRSXZu6qrU2g8FQKZ1lA0Tc2NwB2RBESZIhIhpTLJfOOSkW5L3/s3/2z/69T/9kWRUMFGpRl0W2KENwzMF7CiECgH/4D39qa2vz2pUrSikIjplDYBNFoEBpC0Dga2bWxojtwXsvJQuQSGmrrAUDOrJlmfvG+K9YKaUQggsEiBqUUaoBsRfthoMDZbQW/O0+7EbnYGxJTfNHEGrLDIAMxICsFIaaGIJGRVLEGCA4pxVyCHVdS8Fj1SaGRVHUpO0ppZRKkkRO+uNHB7PTE+aQpoNlvioe1NbG792/PxoNjDGb29vGxqfzWVGV0yyVZGQEqupaaxSkCO89sS/zIoTgfS2YlVK3cbVaFWU9n8/n8+XJyelyuTw6Pjk+ni2XSyIimQWlrI1MZLFR7+qNyXQwGGxvb09GwzRNB4PBME2sNmiNWG4acxFYY0xsW/MeEXmHmphYYXjm1vXtrenLH/rgD//Qf/Dmm2++/oXPf+lLX3n8+HFRgI0wy7LY2No7X3lQRmtNxNrYMi/efv/ec9euJ1oxKmYi8lpMuEIqAbCnTV2k4QEwQgQXAhBqXfoKAAajYbY5qThYaxUBEQUFRGwAPYsb+FzhB3w6k6IOaRIUUKNsIAWldYvUQeSDGKh7tL0zpikmFLAOpbRW9ty7FJ5jwOekv6df/e8VAyOjgju3n/nYt7zy65/9nY2N8bIomYMCNMZAWyvx0mb7umY32otzcbGHnTL6DZj6xXb6f/b5OrdAbp163QWGXPxt9+q1lvt962aVz/MhOK/cd+ppN/yuG33TN7ZGAlGRuVWOEVkpFcd2PB6vVqvnn3/uzp07Ozs7aRrJmVFKSYKN5BEykIImK42INAIgoDZdh51zdV1K6i8AlHlRFMVwPJDX7ezsKGWstcZGsbGz2enm5lZ/MhsJtQVWg8t89gjkvG+1T8jzEuJoc3uHGff397c3p6vVKuMsjqKDw0MiSpJEKZ1l2YPf/cpvvv7ZOBmcnC6cC1VVxbGdz0sGuHplM8lSrfXG9tbGxsZgMNhaLefz+Wq1cs6L36dT4pmbwBkxD3p/AcgFEVvLsLWxqALvvPPOt33ra0B+Pp+PRqOyLNM0Fa/tZDLJ87ytFMTGNGoxIkoUtIy7Az6UzC6BZpSI0ybFq1ezTMQFalFO5RIxS5K88zwXZxIRSPBOVVXe+3QwlJmfTjaWq3y1Ws1m83Q4WCwWRVnvXr1azUsJ72Jm1AYgVD4gYtKUNdRaG2NjAJKIJucKRDRxDARJltZ1DSHkeR48AfNytfiNz/z6Zz/72Y9//JW33npLRMZhkmZx4qt6ka9EnrPW5nl+9+4z/91/+98D0Z//8//ZM7dvu9rZOFHsXFUopbRR0LpCgvd4ljYdAAUtLgCAryoTRVEUMRMQ1XXtPSVJEsQbh6ABCMV2JQIfIiEoSJJksVhwG13RY8PQOoYAABAAu6wXeUaxMAJJbEWlUfzBiFDXEo/aoX93ZjMxRIts1OAoCWAtmKouq6qoXG1sFAK+d+/e4/3He1c/4h4+kTNLzMfHx4PhHjBog6C1qPrO1+RDUaykZEFVVYvFoq5rIprNZrPZbLlcns7zPM9ns9nh0fHp6elymc/n86JyAgwjISbGGGOEE9tI2dFoNJmONzc3I6Ol9pT02VjdPmy65D1k0kqDQkJABGTSyKywyOdGmySydnMy/dhHX3j+7vvvv//GG2989evvPnr06PDwMF+WBGCsVa1v2Ds/mozfP3iikD/+oQ8X85lzFPW0k05/a/zf563Qss8JGoexUiowAUJd11fv3BhNx6dUx9ZC6QITIRORAt2E4Z0P71XrodFnrKEJj1AIAMRM7IkImcU60lQFljgZqTRHvu0eQFscRaDKtLLiQgJJiEJE6EVBX+RYfF5ZbG9KebSGZDMzIynWUWQ+8Ynv+PXP/o7SaIwiQXA1ls4XK+7a7DZrd7ObDrjs6tjkOVr5P+Pqt9Zn59zSa6HCoVc/bu3ta9y3a+RsZi7kMWNL3Hvz0HHonqO3yUY4kwC454stioI5SBRoNhikSWSMvvvs7bt3n93e3jRSpCE4MVxpdeaNRmo2n0ZQRofQiguKFRqFbDQCUJ7nHJqKp8vV3EY6ThIAkNOYJAmgAaDagdYa/FkpJ+r1H3qA2/3pJWBtUCthHhRCCEZT8FevXh0OR6enpxuTsczP7s5O7VxdLJkwcJEkye7u7mA4uXlTB0Yi8nXNHEpXz+fz5WKZl8Vbb71PBJPxYLK5kWVZkiRlWS9Xq6qqJFNCKsx0ho2yLHtKPBMzaiVhg01qAVFRFNvb24vToy///le+/Vtfmx0fO+eyLJEBCtxxN8POOe9JgIHkT2zqJjUZw6gVE4jJLEkS733wJGRXSHZfCoRWJuuimdIkiyNTV46Zy6LSDZZWs0vJB2OMc9rVnpklT4kQRqPRKi8Xi8Xu7q4n8lLmtvFCNchbwIpCsFEkLmFJtUClpNjGRpyCAoWYWAtE08lGVVWg1Rc+9/m/8L//r7761TeuXr0yzFIiGmRDFmTjopDIIMUQahcn0fHh4Qc+yyebAAABAABJREFUcPdzn/vNr3zle3e2trNhCuRUIxUFbZPgnAQNMLMgpcgHVCqKEMCAZIsyM7P3QSPUdS1yCTKAAsEOES565vxSAADaSE0OQlF8xVGHrdIrJxDbdJQme0RIvoiwDqViEhM3TgFgDtTmd/XKaSiRn4wxxqpAygeIE7u1vWGsYsZA/vHjh6enp8A4HI6rYpUkyWQymc1mN27cYOYiz5VSIXiQqgxlGeqKlVqtVsHVy+UykFNKlXlxenpaFIV3dHhwfHI6W61WT/aPlvlquVweHR3N58uirqraiyIrtjE5yFFsxDG/OR4aYyaTyTBLQwjDURYbG0UmiiIbndWx5hbunhsrGoLixuEOqICAAijFVAMDcIituX71CjJduXrj4cOH9+7de3zw5GQ2X+QrVzsmIsIkSYu6Go9Gj54cvDN+f2syNtZAHYJEQgL3yqG2NLZHb5t/EZkRiSNr53URRcYT3nr+DmiFNaPBQARNnjEQEZxLFF/Xo86a7ZiCAmYCxgYqgBnb+BipjsltlA8Rh8AhnBlNVVObXIuuKyI9gApNWDsys6HAIvRLReSOMbQfznltoYHXUj3PYoCgAjpj02eeeebDH37uja+9HSWpFLILrn4aX+8GucZm1hjtRY7bJ/H4dA34/9+rM0WK8TBcqDYI3zTL7/NRbOGgu6/WGuls72IN697Y70DHwLTWksaTpqmYhrRBrfVwOByNRuIwtsNBliUIDbJrFxDAzIpJKYWtXImISgMRazirLGaMQVYFFJK0s1gs5L27e3t1kTMzIhOHLMuMPmexQGxgE7ppXBNQmoBqCOIxFepvtLbWPtk/KIrqxvVBkVdHR0fW2pYCgpRJcc7N5/M8zw8Oj+I4TQZZqAUHoEmgGgwGcZoMBjWTAoCjoyPBX9ze3t3Y2FgsFkopgUewNhJAn85uzMyNMsTQaUiCjDGbzbY2J8aY6XT63nvvvXD3uel4jIh17UMIwjil4A/2SguLC4Dauu5E0DbIusWra6JxnMvzPEljCcKSNeVe0AcRKdQUuCwraIskhhCm06nUH0yyATf6feyqktqqw1KfTikzGWXWxFvbwyiKxDzbFIJFSY1twru4X+DF+8Y2SzQej733EntZlmWSZEysjeEiB4B/82/+zZe//MZrr72yv7+PaKqqKos8hMAIRKSNIHQ2gF9pmhbFSsHm67/5mW959SNRbJgZ0DEHIvZ1LRG8IjqI5ywEbgOXgjEBEXUUATnvqaqqNI7aEo3Oe9LKkCEMCG2xElCKvVRRDKCEHTr5rlMMFEBHW4W2Qc80BQDMQTUuCcUUnHPkPZHnFtIVGtjIZlmNMUqD9y7UTiR4qc0lJpNbt+LRaJCm8de//vXVKpdK6refuz0aDItiNZlMBpkqy/LgYP/Os9eAINLKlUVd14C8XC59XS0Wi+VyKWljZVkul8uqqg4Oj5bL5Wq1Ol3Mxfi8XK7K2lMAEZrFIaK1ZgjGqixLRqPRdDq9feOG+KSzNJNORlqJ7m4jAwB9cFwAQK1AoYREteQ6NOwZWAMqg9575ysb6d3dXYYj2t1BptEgW6yWR7OT45PTogp5XhD5EEJJYTgaffnNt779ox9BYzpxWOhJyxc6ffcsUOOMa7ZPRNqUrh7ubG5e2V3VdaQNBCIEpRUEVlrThcCdS8nyOX1JoWw/bLE1gM9UMjGkdcGqDcVat+MiotLaam0RNTU9bmQ/09dE4Sk85ryAIOkWiE1SMxIxMQGH8Wj4nd/27V/60lvTSXJ8OtPaCA+42ODaixrJ4nxw08VnOj508ebT+vwNHljrSecj7K/QxZf216y/it0Q1niSaq9+MFcnNIHk+V3WWzFjdl4lagwdDcyvOICHw6EPdVmWOzvbg8FA/EzMwfsaOERRZIyWJLtGcBONioGZAzlu4fSgkdeAiL1jMTqVZelDLVyqqgpqACuorus0G0dR5AMwnzNvaKW6Ld4FnQo/7phlIEdECDpJEmZk8szI3JSa1wBlWed5PhoPuugkMZRnWfbKK684Hw4ODpbLpY4MAKxWK+fqJMkIVCiDUkbqE0uEc1EUb731TpxE0+l0MpmsVqvFYpHnpRgPGgGlc9bwObwdRCRgV9enp/OXP/SB2clRqIqHj/e3NzfjOC7L0vsglmcZrHBQ7z0i2bZMr7VWGaM1gzoLHkRokM5Wq5VuawN3WwUAgNG7AK3ri7BRoEV9YeZOIR4MBovFKstCHMd1WZVlIZFWAsiV57lSJoRQ1tUgiT2B995G2juJauEGNwqRiVAURmalNRMhMiASU5Hng8EoOI+I3oVgvI4icm4wHFarVVEUk0kqGcAaMYlM7aG1jZNkOasWnTGKTFWUg8HgwYMH9+/f397etgoLX6dpWpYlsU8SwXSjONaolWbUGgDIUwjBOw/WWqobwa6qKmTKsmSQZkopqxUaLT4xZgYFSitAFdCBVJklH2lTOYeIGqxzDpkFsE+JWCqagHDvBoQSsNW6EAEZnPfBOQmHDm1wUGeaUkqJbgoA1loJFIiiSMoxKQXGKK1KrdH5KpB7+ODJ/HThyN955tZ4PDo6Orp18/kQgJmcq5eLU0Coqur4+LiqSwAoy5wDhRAEaMU5J27fqqpmp/PVajWbzWaz+fHx8XKZu8Z0p0CBJzZGK6NRgwKdJNF4PN7YmO5ubw+HmVLKVTUgTaYjY0wSR9ZasT9ja32ROQ8hKBuT1K4HACADoiJDP7deMQ2SlJnZh+FooBVSqCnUTM7ViQIOoIuiysu6LipXFJWvjcUvfuX3Xrz7/NAmIQQJJMaOzHJrAeVzWBzMAr6BjOBrF6f2ZLW88+wtsprKOjVpESpQyAoNKqN04NDX2dYZ+WXG4E7PZGKGIFQUFSMjA4sM3Skboip1NJCIEDVzMBLhKMWVG2y35hVn/r8+a4HLro6CMAOLotH4y0jOvI2y559/7plbV+bLlcj+a87zbkgdUe61eaYQ93lex5nWqHm/V9+gt5e+/dLnqQcAonvoAU9j5P3OP60b/W/7D3QDvHi/G2bHudsWGnFbohaFxpVVXtf1eDicTCbT6TRNUwD23lurI2vEy2g0AqBqN66SpHIkRFRtDTCpQshaI6JqC0KIh1LUoxAcIA0Gg9PlSimVJWkURdOpVMcz/bkSabAviPRnoDelUozIYFuYkqUkA/PGxkYSRYpVHMcnJycaELS6d++e1L/Durp58+ZqtXr3vXestRln4qAqa09EkbHWRHVd18EDwGAwsNZXdX10dAQAe3t7cRyfnJx2ABciQTKHxtKDjAhiKCYiIpVl2dtvv/1d3/ltV69efXDv3fl87pyz1k4mk7LMxawtx68LpJKbjS9TwLnoDH9HQqXk1XVdN5HelWs7w8GTkjR6rcVjpNpQedXUYEEikp6InVaM6kVRlGV1dHTUE8Yb8S5NUyYQg3Acx1r7NkOxOaDYMH4OIWhjPDlrLDEhAqMCrYCV4BITkWauqoqCz7Ls2WefPT0tnrmJVqNSynsl8b0iASSCropS2cImUawAUTEiPrz3/gdeeN4MBkQNpjERgW6qiIqiDwASmA3eyeYJwRNrIBZCv6hKpWCQDQDRVZVmq4wGCURgFEReRANEDBw8I6Kv6sRGHBxyEBlRKwDUgEoUMq2MFFRqcCixcTwqQmaWtAIi1kp3UN4gBwypLR3HIiTJgUVkrTEEgv8fZ38Wq1mWnQdia+3hzP94pxgyInKsqmTNlcWhKbIptoYWGm4bbUvuhgH7RYBh2X7wAAs2DBgNw2j4zW8GGmi0LdOAWjBFtEZSIkWyyBIpsUoiWaw5h8jMiMgY7vSPZ9p7r+WHdc65J25ElmgfJCL/+w9n2MMav/UtwDiONURJbBF8EsVGRw8fPN5ut2maIofV5fnbn/sSEFNwGsAYBQyXq4s4ibpK/eCFA/zy8lLK8Lbb/b4qm7rdbDZlWV9eXlZVU9dtCEFrgYCAMpqZhZktikxk9WKxODw8WMwm8/k0yzJt0EdG5qjzQ/SY4o0BAI0mBNAdc1xvdigSQaVReRN6vAIzyz1EUTTJM6sV0FIBRQa0QWvtvnFKKWMiyrLgJs1+t19fFHk+WU5NRczsMCDxixlfxqvX3fuBgkbddYOifDJZ3j5pOFhjNGAIgRRACBoVEA+0HteE+TWNi/Cc6hkJ587lUEqJPyzO8fgmERk4oLJX2QzoiBAQ0dMVKLlbJ4MyGAQlfnpcl5lB6QFFe9WXFhQB1nV5fHz89a9//Vd/7R/Pl9Ptds/PK5vnHhKvKvPU87W2n6Y7xyM1KKqXfu3feZ4XD0navahWr1kGP0GvD88yXHqwg8azOCzrsQmiuoay13tzjn84nBkAbE9AeHBw8OrduycnJ1mWoRIMSwQd167pgFq99kVEBWK7o1WaUbR6RyIfy/ljK9LTt+2smDBz4xsAautmsVhcrDch+NbVN26evPPOO7/9L/5geXzIo4ph733omxOMJwgVo2IgEnyWsf3IkMcovn37RpqmFxcX0yxr27bWqLXe7TZlVcbGglaSZxUVwkB5kb311luXl5d1XXsX6roFZdI0dS60zgOAb6hpGimb6YAtRE+ePMmybDKZNE0zsAOKHuqnlYZMROfFajWZTb/1rW/9D//G/0ADP33yWPK7RFQUmfgiWZaJr9OBz1U1mUyulqjWga+Wh5YSIGtlEieTicxjFHfSXKEWRkNhN5Rt0jTNYHXJqBpjptNp0zTCca1AJGzkXENEk8lkv9/n+STLMjFNnKslmEHUpQAoMAA5R0qxsZaJhB0FyFVVhRnu97uiKIrJxLWtELxIUQoM9r7Wt2/fPjqYpGnaNJXg9lvv8VpRAwMyaFTOufl8vlwupUipLMu8KJRSACwB8+A8Mwt2fb/fiyGILvgg5+xgGT44E8VpmrZ1FUJom0ZIaYyJTGS7oJFSDCGEoPpq6uDZWut9Ky5+H+J2Mt2ifcl5UF0YTCmFcN04NtqwDk3TgOIe5AgSqRp2uuoLkwBAzg895CWKIiTSBgEoipJyXytldrvdrVs3VqvV9773wV/9K/+xjnC9XjME7z0iPHz4UBReXZem7/G13W5Xq9Vms6vbRrTv+fll7drtdlvvawDwRJFBImKEyHRAsCiKsiyZFNnJyfHx4WGWJXme53nqWzdZFFVVycgLGeVgxjEzIZjeGQjEqLQ2WiKz3JeKpGm62WzaxkXaiLcndDfW2kAuz9Omyat6J8S0VdNGVofA3oOOjMZ0Xtz+wud/ir0zbiOnZSIladGxmB15wINJT0RKoVF6X5ZHb92Ns5QQEhtBIPaBIx28j1ATEeuXV7EOEcrxdMMV8iYoodygjkdWAXrqWeqe9zHGbyqlhKrO2lhrTQGI6SrQJXqHNfkQUCWI0LatNZH33phIIO890Gx0T6FU2ojMYmZEDV3XzBBrxcjvfPkL/+Af/GMkRtAMqAyB4MdCEMQHIhORGIwonWlB9gAQkTFXhc/jgPCgfsZKEftc41gvYo9qGyut8Vjz6Lim8IapHU41VpMwUrQv2lA88jykRQkAgvR140ElK5Ejg/FBPbnouNJXPB657ADO6lYDKgQs6ypN0/l8fvPkBJCmRaIVuaqKhUyYIYnToSmn/F+wcyyUMExBrs6eQwAApftH4FoBxBan0+Lw8Pj09DS0bFWstQXmLI4cBURKE/XZz7zy3o8n2zYOVCttnHOtc6pvQxn6Zj4dgIEUsVZa+7YxxihtxVhUWu92m9de/TxzqHa7WPPl+VN7fBRFuqy2++32jAwQt8EvD+ZtVe92uxCcUibWej7JfJK0PjRN07TOOddYxRU0bS1y3DnXNI6uWkup7XZf161wYYYQOJCUAfcshJoRCVgx+9alaUotLYr5D773Xvkf1Z/97Nvn55c/fu/+l770RfK029ba2DRNy3I3BN/EoPG+TbJCpszGiehsmVaR+7Is8yKLYtu2rUUj9opSKoTAQIDctDUgp2lqrEYVC5e16nktBoHets4odM5ZbQBJYNXeU5alZxfn56tLYHV84ySE1jmntGZSCKiVpuC0NiGEQKQZfWBrYmAAAgxBc4g1QKhAzbS2eZF456QSZrvdKo3OOVDq9s2Tv/W3/tbf+3t/L7YRB0LAyATZxc451FaGBYCErTPLsjSKFYOQapXbHcZxU/XuvkalWQqpxVkvy9IqDQBxHPvW7/a7fDqJ08i3dRzHQp9UN941TmsLjK52DhwAFEXBaIMHReCclxizdyG3sd/vTZ4DkWXWAHVVmSRhIsUUKHCLylqggFYBIDCD1UAUmkZby56C81abIfKMiquqHDSxAuTQcQX71iFi3TRKKRW4rapmV7KOvGsA2Ebq8HhWVhtjC2uSNhhHsDy5RQzr7TkBhcBM6r0PPtrsKrmcEEaWZXl+ft40zX5fbTab3bbc7Xbb7b4sS++D5F9kNUdRhMhGoQLIE7tYzJfLeV5keZ5nWTKdTuM4LuLEKRNH8STLpUZLoQJG4E78aq0VA/ugrLLGoEEAiLVRNhJDVnrnKoY8SRV16Q9jlLWaOUTBs2ZI4yjN0mIxx6QOuq7JtS1wA9CU++2iSN+494pqL6py37ZY+YYiAqVa1yhSCWpwjISA1ismRoUILmAIBlVIja+rREdBwWVbvnnnVhwnm7Zkg4EcaIWBlNaOWQNIt4axOhjk7fhP1YHLmJk8swYDyIHaEIJWymhkZq2wrmvqqxWkBQIxMwUbRxQgBDLWKq1tFCljGDXogQhz+BfNbrfL81zwKUZbUXXqOe//Si0N1tyVHlKdunLOJXEaGG7euvFX//Iv/5N/9jvFZNo471yjlFJdcZ7qHhBHrviVwkP1spD1+OrXBu7TvNLha9e+/2nq8ycfL9XNP+E+AUB0qupZBsfGgbwY21wv3uQQ/wFQg3Mp35cIJwBJHJiI7t69Kzk2pVSSRH2z2HaAvL/0Dp+/Wx7UfPBeAHvWgoSgpaFKkidxHM9mi11VhhC0NrPp4vXXX/+Db38gSGClIEmS3W4jyu/KdiECROirYMW4BmKxD51rTg4Pj46OdrvdycEiBPbebzbb/X4v/A9AvFqtlFLWWsUQRdF+vxXyhlhhZDEBAX2Etm2rttGmjaKo3Ffr3VbkOPYdREzfy1b8acSuuuCl0y05PKN00zSo4Ic//OHJ0cGXv/zlj++/v9lsbt26Rb5l5rquxaTlvtKpbVvhFBwP76B6+1Q6iPMq8BzVp4GHJdEhk3unf7ApQepDjBEqTfkoSRLnGuagtZFYtNbWxlHTNG/cu3v67Nx7H2dZ1DjvnHCYyFUuLy/n87kg0q21wNDUtdKqqduiyECI8pVTWru27ngGKDAQB0zTODRVURRvvfXWX/pLv/yN3/ndNE2bpumhKpQkSds1YIXxepByqa7KKzKV7xofOecGpIM8VFNWEChKUhVFu9WqmM3CZlNVeyLPPrQS4QBWSqEC57spVqiF8lqsGbDWuTaKImY0xgiu8JU7d1aXl8Z01NxisYm7BujkDoui4EBVXUqaQCZa+GoGwhwi8qGVgPlgIWlUMoObzUaAV0LIqhScnp4yGKk1EFrvKIoCw7bcC6lkHMdKQdcYOITZbPbeux+cn58LKk0oPsQD9t7v99V2uy33dVVVwsfO3NlwEu1QCqT/x2w+PT4+ns+nRVHM5tMkSfI8jeM4y7LYWgF1ImJdlxJc6Zb00ACxX8yC5BoW9vi1+AxSvT1EAqIoCr5RxkaoJpOJD2xt7L13Tds2NYe22pYnx8e3bhxZxfvtpm1dcMCIjgGJiZFDIKWBQQscOQQCBKWMAgVaSoCstd4FJJodLBcHS7kT5xw7H0IgjTCwVMLzDvXLNj6M3Dxm1h3umEhIXwYV9HyCcpCgKAgMNKbveCZiZ+SbKRxFQIXTwBB5pRRwlxccq1iA57xs6OPj3ad9SF0SV55dnBRf+9pXfuO3fgeQJKanlUIQ5tvuy4iKGa+Qrp1SVMPlxgePHvXF94eNPY4DjIfyxV9hf4x91p88K8M9jJUxvhBXhysP+/q1YMyJ0WtZHnF9DBcae/DDVYY/RYElSSfRTk5Oyu0ui6MbJ0dZliVRbCNttOl0n3eo9Wg0woBqEG3bvYTA3AUGoihq27YpmxA4isxyuRQ4tIAs4jhmhcQddmmxOND4gQI2CgMxEaVJAgII6EJ8XaAGgTReGyViAO9ckiRtW2s2VdsUedp6Ojs7kwBs2dRPzy+stcLsrxiER4yInAuKiUJXVhskJdO6jvpAoygM5xxCV06m+ton6sl7EZGIERkZhQSzX4QwVMq2wRdF/t57H7zzzjuTaX7r1i0ZkNjqKIqsveJukx6FIm3bDuHVEWBBr4S6KDqzCN+yLEWoyfCK+BtW9TCA4win4MuG04bg65okCiqLxHbt+YJwdUmZNxBpg3VTFvl8t9sKVZbUFieJYeqywkTUNE6ysIi43++gDFmWRVF0evpskudKQRQZ37o4nwK5W7duXF6ea/0LURT9/jd+d70u80khzDAyzrprEM4hBK11McmMMYgsNayTaUFETe3FlPTe1/vSOTebzci1VVUmUbzbrAFAKnDaulqtLrIsQwW7zVaUorU2jROxsTqhH0Lb1M4HMW5Es2qtrbXr9Xq9XmuNgp+SGtKLpxdaa+E4Q2WVRgC4uDwX+Z4kSQjBuQb6RJWk3o0x2uB2u5XXwpQisyb1YMLJLEBZALDW7vcVgxIKyckk77Y5aCFwBQBrY2bYbrcIKEbAs2fPNpuN0hDHcZqm3BOOlmVdlmVZlk3t2tZ774dV08sQQMQ0TafT6Y3jw6Ojo8VilqRxmqaR0WmaGquKIku0FakiqHyZNSFlg1FfhLF0VX3Buuo54OR9GRzlryxFAAAslGmaus1zzcwKMMzmHOjpJ48pzaZZOp/kALxZ79qmadvGY2qMapuwWp8XcTrLi7Zqbd+1TykFREABULFiooCBFCrQalvu7735mbwoLttaxYacp+7Gnqcr/hQWcOpLyERQDpcTsTbwvIooJeqCfGHcbYgBERUq70hZZYxxnoQ4oZczVxPU/4pNYpPQujiKFCvnnImiMCJNHY6xPoCRwgC5stiDgbSxAHD79u2f/ul3/uBf/ZvFYravHLCodTWgv8bnH9QVwKeqt2vrAD5FI46/86IOxhcc6Be/9tKzXbvbazf54kCN72H4guqTKNcMmrGuHcyaa1bn+BGkwsRqE0JYzOaXl5eL2eT46Gg+n6dx1HdSIkSlgPm5U/VJC6ShW+WLzyiGvDFGesYBQFEU2+1Wa+0Da60jTKq6CYG1tjdu3Hjt7q1nz54BBYPoGYSZL0tT6o0GfD7zHUJQI7rv+Xw6m0+JSClo23a73+VpYowp63a1KYnohz/84de//nVjjAa0VjtHUj5LVCNpjxQcAIDSV+eXhKUoMyICuoo3DBJE9V0uZAAAAFnC0VezJgopio3i6Mfvvfv48eP54rMAEELY7/fRYua9T9N4wKtjxwMc9U3FUYT1EMPgvkxlmHFRukopae0QQhjozIYRE2LL4LuC4MEaGy4aet6P4YXcfBzHm82maZqucyLgrizzuCCitm2M0YhKWiTFcay0BuY0T5q6Xi6Xgdo4y3a7XZbZJImAiIPbbtez2UQh50XKvkGtlYKTk6PdbvMLv/Dzs9nkT//0T997770BL2aMYSatOhhtnNg8SQGgbduzs7P33n93Op2ayMqYiASsqqral/P5fJipNE2rfVnX9Xq9nk6nLnQcHXVdalQSPJDWj52ujWOllFC5GWMIuKoqaYjZq1L3gx9+X8J+WZbNZpP9fivEq845RoV9ibZROkmS1epCVM4Q7fChHfbpbrcDALGlhr0sU6lRCRBBagrEctqXrbW6Knd1XVurd2VVV609O3NeAYDkwpumASSxHsqyrKqKiLS5Ms4k4tU0TV21fSp2gJKAtdYYlWVZHMfz+fzendt5nqdpvFgs8iwNwaVpGsVGuPMi1Z1Ta51lmXQoGZTNSJ0/Vx4ietoOnZh79sD+hziE6GWFtz5Y4DiOUW0jo5bzKbt2s0KrVfBus9n4tvFtQ544TeoQPnzy9PRse/dwOsmnykbeMTMgk0GllfahDRw0KlasGb1zaZ5VW3d4+0aD5MnHaAFAAwa8ymN2muZTPOCxQrmmFIiDVOKpq0+ukD3chScZABgYUUuDTuyZ7KQS4Vo/eObOxTVVVSVJ4l3QGkRA9D6ZXOpTHcSru+yJ3LTWTOzbJs/Tn/+5n/1X/+rfWKuhVCLvBRYNoMR86PttXymCsS57UbG99NIwUlrwgtobPuJR+HqwhvpLPwfD/glXHL78Uq0/HKFji3wujY/PNY69fubxlA+H7trWSq72imI6jmMOnsgslrPz8/PXXnv1s5/97HQ6HaivSGC3GiNtOiMUriINIwVPiIqIoDdzuW93KF5g8B3iIE3j2WyCRgupVIe8Z5TeL+989Us//tF7Hz96WFWVtSZWqnaeB3rVweVlkBYiKJEZZmbWCk5OjosiM7bjHZPVyIEArdLqBz/83unpKXaOoO+KHJRq29YYIzXJCBrRoUMXuSi2Td9JN4SuvxMxCeWkLGnVw6y4d8tkiBCx705HUmunlApM3nttbAjwwYcfvf35nxJ3XKqAFABiBzeTVvPCysTM0hUnhEDAg5rkkUGp+xYOcifClKT6bvYiiIeVppQqm0oc36H0UESeeCHUk4b2ycJYmL3T9Hi3211cnB0dnYQQZsWkaZpImzjriB5RKWMEJ+GDZ2MVAOjI1tt95I33/vz8dB4cAMSxJaLLi7OiKJQSlJ8VHXZycrLf7//SX/pLxydHb7/99scff/zuu+8+ffrUmlRGOpCT0i9m3u+3SRIxkHj/g0AXcjFxUlerFTIsl8vVajWdTkXzicOnDJIPg7GCHDoJ5r1MZZ5PJPIh26asK7FadruNaNk0zdfrddyDDaMoimIj5kJZls53jwYAWRIbY7bbrSgz6aYgihARJVctNpbWWpIm/UoD8YwRUaEhHwTiRESAhoFcUysNURQFpuAxmy6r2isFRVEIFqeXS1zWVbdmagFgd4rZGCOQAwBgRIEyMAACxbFNkmS5XC6Xy9lstlwukiSxVseRzbKsaarprGDmNI4UDlgh0RaxrApZRWokSAfhhiOWwLHgGmSv9D+l0N1q01OgW6WtwiLN0FNo6iKLXBXtd9u2qp1zbeNC6+M4PmuqZ4+fPDnb3lzmxyc3t1Uba6OBiAEDBUAjhRwcgiJEtKCc4l1dFQezfDGr2sbEEREB9YJuJFoVQxgJ2rHMH2CD3Jcjio+LWoUQOHgAQCV8dv2vRnq6C/UhUGdVq8AURUmHLgxBoD8AIHxFw9CZJ588ef31113bYhRF1gp/kkQI4WXHWGnB4ENgh3NxTUtgtFKvvX7vS1/67J/+2Y9m85lzTthxe/JV1UGjCHsKzU9Nso6v9VK197xSefkNj1+MbJBrofWry7305z/hEtcOHB2fdqprdzU8yOCcDXIWRouDiKyxRF5rfXF2/tWvfuWtN94oiiJN4zSOtEalQQGj6lTsEPcYPXIYW6zQO2p8lWbmjt+NUakBPJlt9juGDtFW17UPGAI3TTMt8lfv3WZwT0/Pt9t9CJymcV13bYmFCRekqY3QvFBAABN3/ern06lzDoOP84yIjE69IyFJeHZ28eP3PpzMVFPuI62453OmviaHmRG0UgEUArSRE3szSGg9MgYAm6ahEMILrCbQeRIKGRG6AmBEHEqBQ9dDULk2xJP44OjgvffeK8tyOplIuLJt2/l83jaVgJ+FGEEpZeNYWLSJCFEhUB+W6KCwcv4OitwfYiwP4Pa6rplZnDYK3DS12AqigHmESbSRIQ6oEJiJgzXGGGOt0cY65/b7rZZWTllSlrvJdBocl2WZYApCH982kjnWxhjLotWYQxzHqMxkMimrHUOIjF1dnocQRK8IESYA2CR2zt28eXOz2Wx3m1deeWU+W9y+ffvk5OTb3/72kyfPZAU45wyqIs20xv1+n2VZWZaX65VaY5JEg39ZVXuNStpIeO+lxdbZ2TPv/WI201pfXl6KQSp6wuqOA3I6nYorrJSq61rYu6QQHCMFLJyaTpKjl5eXq9VqMpkMTYqIfQhhsViEEC4ut3LDAJCnGSJLej5NU2lC1W1SDXVdy7RKGfR+v7fW7rblYFKIUhc57j3JvQnLk40MUNBaE/C+qkmZvJgTQRRFXS9tAESJNwu8AIZsjvcBIDgXesveIqJQX0vyhRGEQnyxWEhp72Ixi6LIWsMcrLWxjULoynaRcACLDJp4CEhcbRYGIR7hK7nBA9Cyt92VhPpDCLWvfXBMXdBOC/mzUvPZxACdPf3k8uysLMv9ZgtIwfkQQpRmddM8Ojt/+mR1MIleuX0Hla7LqmmaaV4Ag0LPLmBgozQABiLQyhBprc/WF599+8sh0o4psUldVZ2V/+eR18/LZBypVRGLHDwRCUwVYERp0CcXhusowGHSQYlGtZ6uzja+SqeAP3j/w5Pjm2maMiGIhvxz+YJI/Xlh5LwaY0AbQDiYz/7iL/3id77zI2uMQMWGO1BKCVZ29MByN9JW+Xq0eTxAg758UVMON/Piv4P+Hqve8TvXVttLL4qfPjLjj664I/qPxioQUXKyz316LTcsK5v7SI4aYbiYkch778uyfO3VuycnJ/P5NIpMnufGCHdvd5IuyTrilGZmIo/ddZn55cYE9sBy733beAAlvsv2wz0CA2qtddO4/a5CbeIoYe+KInvt3t3ZbPbu+/c/efJUu0iZzt0XYhChF9JaKYWBOIoiQWCmWWKtZvIArE0nqb33qKOPP374gx/8SBszzQuRtsydYcFANjJBzH8ErSNlgjCCRVEEah+cHxRVA4DIPQu0V8p0GEcOiKiVJs+A1CtfAtDIABwGDIW2pmkaRL7/0cerzW5yOIM+VBhFUV3uJAgJAHGcYpdjBu99B2tnYGbTd2eS6RBVKk4w9vgJGbE0TYVwQxys3XY/CMRhPXcLBihQiFUkwFcxjORsjWtjhYgo4Lg0TVncpeC0jrXW5W6XZdmuLLXWdd1kWQY8cOQGZlYmAqWySYFASZKg1lmWbbfbk5MTItrtdrJQkyTJJkXTNCaO5tFB43wSd5HeR48effTRR9PpFEB5kqRpx5NMHIQ1AgDSOJFTOedk4mQoQgjb7V4Cxcy831dxHO/3e9k+klvNsiy4RiBXRVFInyuJPWhrJR+vNBjdBQZCCCUiM++3uySK15cr7/1kMkHFdV23deOcazwPtlG1LyWq1DRNkpSyOyRCCz2MzlOoqkprvdvtYhttNhshPhvkRl3XUloiijxJMueaupHWZ8SMTev2uypJptA3Fb5cneve3ITOFkeJ6AzJFLpqQSbBm05WaA1E1HpX1/V+v8/yJE/SOI6TJDFaAXCWxkP02BjDnnXfDVNGXvLx0IO6iQlHXi8NvYlG0lVQaUOobxA4qNBqS0QQqGanFEKg3XZ9eX62366rqmqbSiy5JEuD54ePHj+5bBaL6I17b7Cn9WZjjdlsy8v99mA2n9rIAAZHBgCUKCAG4gABE3P8yq0WiDR6GgzursSGsCPpYwVDNHrQC/J6yGqPxT4ikgAJkTq6QAgUaOwdDQ87Cq0BARu0vezti8VHbmRntThvHjx48Oabb967d4+IXBui2Hjv4QVNM+gApRRT1+omADOwtIIKIShpHUIeEBn9vXt33nj95qMnl0E6VaDwtEqoU1h4grQEH5Tf+LJjFTvop2v/jr8w/Gp48aL2HX9tuOjggrxUIf3/eozVvLweJga6Cb4CWyFeAb+pr6cS5Sf26ZBOG+Zbvj+dTARFaa2dTadN0yjgKDLaamD23hP52HQ9aPsx90QDB/h1cwyujDJR/x2qiJmlPOzGjRunp6frzU5HsdXKOVdtdoBqVmQKdZxEk0n+U5//3MHR4f2PHhCAA4cIWiOzJmKFIPUMKrFpmuZZ6r1XgG3dJFH3rKIwnPNPTx+8//5Hu30pXf+UUnVTCaOeGsDAVocAioAQNAMLgskqHadRFKmncHp20ba11qrvdiDAJQ+gr+YIe+ZBgA4SiFKu3w2IlAu3wc2mU+/hgw8+uDn/4tABsG3b2XJR78ue3BE9UVvWeZ4rpYfCpCGkOZhi4vMlSVJVlSwPCSBD35JS8G5pknnvJaUtc4GIxhgpGu5CfE0j1oAkm733VVUlWeq9d76Nomi73e732/l8GUXRxcXFdHIQxzEqqKpKIzZVhV1KPiAapXVV7iSCrU3MjHJaAJAqXjER1ut1URTUQbt1lhW7XUnE8/kyuHa3gzRNX3nlFQlytG0bRUbQaoLmCyGs12tZyWmcSE7BjTpSVFWlABHxyZMn0n+iqqr5fL5erwNTURRt08gt1XW735fe+4vVRnpAab3v95QiYPJODAIJUzNjlmXb7d57mk6nu91KaEqH0p2q9ZIwRsTgnaiHEMJm0y0JUbda66qqJIYssLv1ep0kSVmWCjUi7rc7GR/ngtZN1yojTdfbnW8dc4C+7p8YdrvdbH6AgDKVq9WKiNq2TZOoo7FDlIJPUYK9nLlqloqgxspDSFouLi6i2BwulmVZRsZGaaI1TqeFsGeLUat6AL88pkAQJDsAvSmPfU53sM773BYMJoLqkVljFKHW2hOJ+wtMVuN6tXn44f3Tp4+RIbhWIkla66Z2T548eXza3H6luHvzDjeudd4Ys6srr6FqXXN5hsuDiTbIhBw0IwtHjFJ1qA5OjtLFtFaMUZcFUBolQQXSGwag08HXMcXPqbZuRkJnZzAzQxDQDAABiBUiKVQ91sHyWikVJbFkg7XWAMpRYMIhAYyjSgf2wXtvLi5W3/3u94+Pb+R5aowJnmXPDEiZsWqBK8aoDi2GvY2PiFpp70JsbeMDsjpYzH/hF37+//H/+vuHh8uyLJnQjxzNPtoJwxkAAFENUdPxML3os44/ffFrPHJbn79tHIa7v4cO1DCcvDctr0w8eN4CGC43eK7DpcffH/YDjOqRxu7sOLwsJpWE0eSbWZbJ1wbfiIik0iNL0zSN8zyXEgJ5NGutMZqZfPDApJQOwOB9CGG4ERQIfBed7ueiBwYPljUiMhMzMHdSnpkmWapv3KDwZLPbF0Wxutyt223btrFRxhildJ7nqnXHx8cmjnbbsq4bpVRVVRx8iIJvXWQUAEQ2skohcZqmzjVZMsnzPIksB9qtNwBQltWjR4+ddGNUZjqbaKOUNAQjBmTyAXUnFKQqQGsNEIgYgNI4wVmhFMRxfHZ+uV6vd2VXFiyCXiFoq0MAiVhiX0EhlnsIQWGXYBsnaH0It2+ffPOb//ILb91LsjTVMfeFKyaOqrqOQZmu+jliZqV0Le0fNEpNSBzHorMRUUS2JAXlxmTGB8RWVVUhBKOt2CUiLgenfAhzDcaZGAoi/qqmNpFVSkmdiXNut9u1rZ/P51qpKE03qwtRn1FknIMkjrXRwMjkURmJrSmlnGuVUpPJDBGF2b+u29u3b5+dnS2Xh8vl8uLiwkbJdnsujCJMeHl5Gdk4juPpdJplWVEU4uhLkLMPxvr1ei0WjDGmtomEKySG32F2esprY8xuX0ojI0C13e4C+e12KyGNvHWr1QqZBIQcQiDA7WY7+I5KKY2w3++JKE1ziRPIye/fv//222/XdSu559PT8/l8GkLYVY0IQGYWAk4pLSOiJEnE9W/bNkkS+aHMpszsbrvvN6wWk4K7aAdJP6jtvgwhGKOAutqnIp9s9+Urd5uqqsS4kfk6Pj5k8oixGFWDaBK9CyB7Mwz2HELXP9RyR/zZtSBkrxhu3bphrU2zxNqortooNqFvR41aK2uqqppMJmVJ1AMIxHrwrZOtIfNIRKoHW6m+mk519eitfG1wowUnEWnj2bdMsbHB83s//uEPv/+DNIvbuq7LMrIJATPCxcXFg0+2t27mN27caMqdZsXeO1SkwAFsXB1q2u/3X/+pt+MohsZRAAAVmEnR5eX6a+980TMFhMQYH9rOx1Ad5JWZhdYicLdlBg0yCHDdV4tQH8G40tDEGlhrDUghdNpBNrJsvUFfDHk9o6OONkduotdKg6+sGBy51tUGUa/X2ydPnrz++utiwkAAHPWxGRQGMwMoIg+IiNT16ezaxzAyEJFG5YPz3ltljFGfefONe6/c3Gw2zIwoZMMvJJj5CiOODNeuOFZp1wyCsTq8dlxTtKP7h6429Hk/ddCv42v9O49hksazRT0O8NrZhiuKhcGjWCIAyJ6RHNXABCSFK7LVq6oaWspPp9M0TReLxXw+l+hlHGeyPZg8UTBaKWUAoA1eA/fl188Ny9V40nXzBQB6mBQMYDxE1MDHJ4cu+NXjp3mRXlxcdIwEwJEx7AMiB98mNkqWUd16DmFa5Lvdbr/d5ZOoKAqtNfkguifJUkTM08Raq5B3611VVZvNVtoTubY1NlJKZUmqUWS38x4QrVKgdNdsDhEFQ6SC8P4QUbBWzyaFVbrj9D+/3Oz23jW654VQSlndMWMgGGs1cZfq01oDCKGYAgARRmIKR0ny6NGjBw8e3L59G5RO006aF0WBoMWxE2UTQuj7TmFZVtwTBSul9Kg5UlmWw7AP4WhZG1prIY8U50yAvmIfoJTM9jR41nYNoUWHCRWUqLrW+7IssywzxlgbS0k384aZlbXWt0TUkXaJ2YcaFPUcYaQ0WBuBVk8fPZpOp4cHxx999NHqcrOYHyBiVTbz2RIIKYB3NJsubJIQURKpYfUuFovT01PVA85FvDjnBJstTnCeFeNhgR5vaK11TSuvRd5JXrxqOirv/X6/Xq+7hktKKaWqphXUG/SOe5Ikke1MisZ5Y0zbCKAsbp3/5PGTLMuqer2vSq3146fPmLlpg7VWXCZBe8mAW2ul9x8A1HVHKyZkT7IOpXxL4tWtd0opifrL1i7L0vfwmrJsNIKEKChwWTdN46qqkXkXW7xt27jIrLVN1YwFyyD0rtn9UmGutabGiRZ0TVvX9b7c1vvy/Py8rmsEnkwmWmOaRJPJxBjlfSOFuTqy+7qyxshNSvp8CDuLdyuKNvRNp3u0aeciC9J7SB5L6AUAmIiCz5O0xeobf/B73/ve9xjC6vxCdorW2lFblg0R3bo1OTk5obaKENu2ZSIP2jFt9jtSaufotc/e/p/8T//mH/7W7373239apBk5YuCKWpslk4OFjmxsu+0TvDc48n+4y7qx+tQ0oljhnavDnQdMREAtc9Baa4MAmgOFl+U3xwJWioCNiUCroejp+nX7ElwjsMMHDx7dunXLmAwUBrFukAE6B/fqQZ7TiKPTcRfVNMZ4CkophsCeb9268YUvfv7Xf/23FvPJtqwQtESTsiwRKq8RoOzlSnesva4N2U/QvvAyhT2sYzWC1H/aSV481Vh1Yd8nkl+oJO7v/8rCGq7Vb5vn8vACUpcyvr5fXgghSPm/Uqqu67ZtsyzTWtd1vVgssiw5PDxcLhfKGOfcZJKLL+Vcw+St1UpL3IyZkV8YKO5Qb/0GZmZCYNVn5bk3FLq2DQCskINvo9g45yZF9o+/9e0vf+Wr0yLzBNvtvphOQghWmzSK97g1WbLf7xUFYkKmxOjpyVGe5xpVlmWxjTrjw+iqqnwbdpttcM36cgUA3vsk0bGxIrnyLCuKLIqeaxmCiBw63l2llELFChGRY0YFgRg9Bh+MxSJN/HzKzHEcPzs7557GVjSl1abxTUBSqBGA6AoBh6C11oJzEd2/37vFcnZ8fPz973//nXfekdSpNFiV0OKQ5OOOUbkLgYiRJNpRxl9MY3GITS/yRMcMC6BpmuBJ9jH0FFqSfdSmM+GFunssCrEH24sEl7Pleb7ZbELgNE2rqsqyedPWvmmiOC73e1G3dV3m+aSpm1AG7nxEnqZT750BlaVFmuSg9Wwx11rbOFLKuECMCpSeLZb7/X6zOZ9MJjZOAP1isbi4uCiK4uDg4OnTp4JBEzJe51sffNP6pmkEtbS63EomRUaJR4ElwboPauByvYnj2LlGIGBSOBvHcSAvRhUABN9FlTwTEVVto/q4zqDsBeNWlqVQmW42GwCQ6uGqqghGWczQ+TfDNtd9sVldt8OmlsP17Qi7sjGmELpcg0zoEGJhDsyBAxmr67q+vFxJoZTchswddUXeXWEujiJ/Y/E4qOHOcgsEV8A+klXXNM1qtdpsNp988smNk+ODg4Plcl7VbZ4lURQZ42TJEZGOLWHXt2q/39d1DQjUU3N3eRYghbpvDUNKKYLgia21eZp576WvRXA+S1LnXFNWkTYK+Zu/9/t/8PvfzLPEN21kZJVC3ZQESmudTycTpXwIVDWIunUO0BBwAK6dT4o0bLb//n/wy/fefOvXfvXvJ/MJBfShBUIGnh0d5POpJw9syHkFqPpYERIPKoZUl/69Jq5fFPtjHUREqFjp7nUIHbhkWBX9l7tlwITKKmutjiziwIF4deZOa/Q63vhAddM+ePjwzt27r7/xamT0kI7uBfSVPlJKCe84ABBKTDx4AA2oRg3trdKBQuBgTfyVr3zpn/2z3wKANE1329JqZYzy3g81l1f/9uHoa8eLHup4pF70NeF5q+TacS0jO+jC8aD/hOOll+BRxHv4dByCHhT2i8/Vtu2AktXaSuGgtDaSEG5d15KYCSHcvHnTWKWwox02PSOSUmq735Nvh0Sp8wGBjTFAAeAq5DBYIYhIJCp6MNIUE0nx/QtTDwBQ7UsbmVmR/yf/yX9vvd4+evgwzYo8L4wyGrX4Ugpwv9+3+32cduAaYxZ5ntd1e3F27ltnddcGmECVZQmBGIJR2nuvACJjvPc20kWaKK10X91Lz/VRB7H2+9lkZGZkY5TWkfek0IWgjEdtwFodR6ZtrSQImVGKZySrqrVmRqFmMzpSfYPFyEaDMFVKWZtI/5mbN29+50/+bf3XmzRN27Y1JtJaCfRJwhVdJH/EJywXwh7GIs8iqkWQz9BHrmQqpRRH/hygW5PJDPqcxbCuBB8gHHPykacglUgAoIzZlaV4mWVZ5rmGQLGxSZ4bq8/Ozg4OFkRewE1N00ZRIqpduLJ3u+1sPldogcFEsScG8pPJjJmVMs45yXcK6hV6quE8zzfroJRaXCyEAkJrjQwKcfBuFaALrBBa77wLxkQD2xeMfAvVVxIP/rEoiRBC23prNTNLRll8F+8bRJTviEEmrQwNdIMWQqirRkLB1GM+Li8vpUBIZme/32trhnwnkMAputzQYL1FUTSwcxtjiIJSajCVBtAcEaV5JmeQCK1MK0mzKeI4iZrWbXZ7sd4YOIoi55vVapWmKY+KyK/JGXhBxA3f8VIzK9/hLj0pavj8/PzBgwc3b5wcHx8XRXFwsDg+Pr5cr05OTtq6mU6ncm+OglV6Op1eXJ4Lk5dvnSRB6rrWGrW+EmViUsj953kuo8QDrRiA0ahRfeMb3/j9b/zOfDZpyiqObXCehFUXUaEi6prMV3Vpibz3BIoQW6I2gNJWauu/9jM/+/HTx//8G3/43/nLv1xtS95VQLzdbF69+aqOo4ABAckHCkEpxR16DBSxYiAECfZdLwPtj/H2HHYZIlIgYUrmEFzbyhANDy4HAHQtEfpTobGijIlImiANukYsOw4dpZpxLjDT+fnFe++9d3xyuFzOZR2LF/XyicfO5R0XmIpH7H2rlBHHSWvDEG7eOP7lX/6Ff/EvvjlbTIpJti+7ggoZBgJWI8Aw8HNNEWBkjFxTtD9BH4+92/Gnvcepxud8qRd77ZH5z+Fqv6hcx6caGUrPueb8PFGl1lqplDlIf9nLy0thBRKb6+joyBhzfnH6xhtvyK9OT09v3zyJ4/j09JSZkihKUykF7h19UMyeO5d3wG5c2c1EQT0P16cXKtURUSJJiOCaNo6S1+7duX//o+9//3t3796z8ULq/Zu6JueX01lqo8P5vKrqIbFX7vZnZ2fbzb5pGoXBWmvjJIqi0DrvPYVgEGIbB99yoEBO2ziOY2VNliax1VYjcgeP4lGl1vCCiJDZKq2UqrhC7JYuEbWtl/K6PE3qum4ad9XxlyiOY3FTurWguh0oclzoi5kZUcVxfHFxcXx8XDWw3++ZIEkShaptvYCWB+YvpZQyWmGHdh7IIiRKjF223kgGcXBeRXA3TfPs2bOiKESj53kujxBCUMryiCUtBKd1NKyfYQELvibLsrpt0zQtikLSycvZ3GSZ3m4/ePfd1996I89zmyQhuKauFeJ8Pm/bNsknxkSghOfBta6NbCTc3XJvEmiRQ5w2idPkeS7OdwhhOp1uNps8z0Uf53neVLXqmUbkDINeYebY0mC4d88ysvuca0IIMlDWWuHxbtumqoJYn0mSADJxYKAQCEEZY8T9RQRjVKSM914CSESklVksFlI25pybTCYCOxfe6el0SkiDZlWsxJ3FHsIpjrJkhWT5+Y762IiZ5XuWSvnJbDYr68r3nbKY2TOU210UGUD2rfOBrI0l0AV97PfRo0dZlrm2HnbrIPHGyvhFidRLKsWMITCHLoNLhEgBsV2tNnVdn56dz6aTg4ODs7OLONG73U4p1XqXJbGMkjEmixMJ0iAiY9fw2HsvGVK8cre0zJqE2SWvLCMg6LZJHP3mb/7mv/jN35xPZ66ulFJtV9msJNDUevI+BGairlWJ9w3aZF81+8a3xEqZ3XpnFRweHn7z93738PYBJPbydF/vdpO84DQ6vHWDhDUzEPsARKCAmAOSJoXMwF0Xh4CgP2XocFR+onoR7b0HCYz1OZTQV/OPpKgkE/uKFaWVjQZlLCthjPAlImYhjyZmNkmWKaWcbz55+uT0/Hw+nw5hk061KMCep5Alig4A0Dnd3WV600/ieOJaGVQu+Ol0+uUvfeF3fuebsbEX641SGMdZ1dTjapxr2m4k918SQx6v7/E7429+moZ+6ZLtnqtXzNfuarz6X7wTeF4Bj+2Ga482OMGDoJFPrY1CX6UqwR/mILuxLMvJZCKb8+joKM/z+/fvLw/mbdumefqDH/zgs597SxJ7u/0mTzOReoGASRgZ0VMYLTgEgFFnB3lTnvoKlXDl9nLXEhWIEaFtGqXARJY4NHVZFNnd26988uCBx1UaxdNZcXJyYhRuN2tyXqAZsgnLsqqrdlvumZGJwHAIgao6OC/Kz6DSWvoWK6KQxokyurZa267cNk1TCf2xRqWBObBwziAOUQbqYIpgNTIiWx2CCXGcBfYuIGjnwqyYnPt127a26+Ie4jgmBkZC1MBd7oqIgn+OoUzitBJhvnfn+OOPP3799de11oRe966PxJNF+yIicRAuAqU7yioYmWLYH9RnRkGiqSGIvRVCYALB2aZpOqTWZI50r9hFmQ1qAFRnAZgoSpRarzfSImk5W67X68y5xXKpowakw1JZlvt9J6CDE/wtKFXXFTOnaSZLom7bPM9RKWoaNAalAVSSSNBYHDtRvfJcbevLsjYmKorpYrE4Pjy6uLiQFhFVVc1mMyJKXKq1lqGe5IVMroySvB4Elkh2gTFHUVTX9Xy5ED5no7SNzGKxICKp0BXTAQC8C6y68fFtFTr2sY5FLssyhVoptdlsZrNZx01t+jEEoL7rNvQt56DPTBdd+6ZuSbRt29aNRB2U6ehCAWCALBVF0TRN1dTDdCsTV5OtUkoi5whKKaNMIrW/MkqPHz++d+9OCEGpaCzlxiJlMLn6ncsgdfZ41Uo1EDMTgAII3ntQyhiuqsa5UFXVdrdfb7azWXp2dnZ4eHh2dnZyclRkuUTX2rRN4lgiKIi4r0opu+g6DoifSqSU6eqJ+52iepRTlmXe+z/45jd/65//ZpJY39ZlWcY2EkZYRPQUpEc1dHU0LWrTEHk0jnhV1ruq1drGcbzdbL/+zhefPTv93d//vTc++0aNVIWmBq99u7hxND1ceGRxf9ETIkjsWvouKJZqLyAGYnhRLQwS+1qwQTx+q4IaM2D1knw08j3cT5ZxFEtoCkELmfaQUrnSFzRkNNBMJhNm3u39fr9/+PDhK3dupWkMV9OJ/Hwokjl0hAwvC8NqrQFQaw1MITggAE2vvvrqV7/6he997wfWGkAtsTXJOF4/Q0+2AvByz/LTjpcqPHiZ/n4xFPzSq4wH99oGeKndcO23L73rfjM/93QSjRQbv6fmBwF3HB8fK6WEgc9a+/Tp0yRJZrMZAHzrW9/63Gc+myTJerfdb9aTaT6bTAE6QxVAAAU6BNf3WR/s6I78ZFADANzHqFVH08HCYjkssg6FL3iN4Mn5MMmL/+w//Rubzea3fudPvvOd79z/4INXX7372r07l+fnSinvGmnvUddt65y1URrFTeMAlekI0SRKjwZVgEA+RFa3bRuCJ2sV6DxPy7qp9lvhIep/8hw0fbCchiXOzFpjR/+hlNIGQXvHjngymQCjdJLA3kTz3iNa6qgVpCIZjDFaYQjB+9aYyFpL5Im9pMqOj4/v3//oZ3+2SpLERkYaKojmFhTVcGMM5L3nnq4IRmEo6A1q0TQD9Ab6CmNEZALnXFU1cdytusFyMsZIW+hhEUoomKBjwJdrVVV1cXFBREVabDYbpVQxmyVJIjXBRnXwe0R0zpk4JedEB4rvUkwLubftdjuZTLI8h15OVU0D0NkHWiMzS1/3ui6lAnwymdy+fbuua6O0MHsT0eXl5Z07d6R2VqqnnHOKu+AB97MwjjkL9GzYaNT3gKrqUoKB0i9EvHzxROXkiAhKO+c0dgFkCcbk+UQaBgBAksZKqbzIJIgqU5kkkaTPETG4jl1LVLKETLBzc5Ugsck7udtuOhDEWpJIHhHNZpP9fu+p8/7rlos08cGFnoAzBG4cPzu9QMAoigTOHfqyiLFBP/w7HhPZoUN7Ourb5BljADuZw8xKgYgXVBxp471vWw8ATbMNIfSVVJeLxSJLupDA8mBR70sZcKnGjqIIKIwFo/ftoO8FSCioCBmT7373u//tr/3qfD6v63pfVVmSir3IzB0GCBUieudDF4Z0TVAt4Gpb7lrvWAFrDsoAPHn05J/8k3+CSpHGfVuuyp1VelPt3rz5OkZGRxYCIUFAAgi1IzKKGFQvZvsGRy9JF14T+GN9TETKXIXZwojMrks29WUsagSn7byFPhpxzVfkPnYtPzcAIEVjAPzo0aP9fh9FhoH1OMMPahgs7roLIoy8Q0m7cFcjFbSV/g/BWtsyHx0d/fRP//S/+TffPb6xuLhcM7NCe5V5HNQYC6z6JePCPc77pVrtJyjpFx1r7oNdMCpD+jSPeTxwn+ajw2g5DrPY67brce9B7Q06Q+x+idaKXSkcEYvFwhjz9OnTPM+Pj4/Pzs7Ksrx3715d1z/+8Y8++9nPHhwc7Pf77XYbGx3FRtZ9VYHWKDWXffrBD5eGDht2fYhGEXLFTMBXeMvhuYS+v+vyy9C0VRTZxWLx0z/905eXl9XBYru63G22WinvW6t0E2i7X1trjdbBeW2sUVrCL0opIoZA5CloywEAua5b5xpj1GazIeDFwYG1ejKZFEUxnU4lj0V4ZbjgqNHkMIPMjICMCMBKqSSKFUaMCo0FNuW+stYmCQjGEhDbtk2zJLRBR9paXVdOHO44TgCgrusQuGmqKIoQ0Fq7220ns2nbtpK5t2BEcmmtRA0Ma0OcOWbWRg/3TD3Didy5eFFDUFf0sfTBNcYYbeM49p4kwTboJO2VqAEAIPYSiAYA733jWq11kiSNc3mec88jPTmeCL1UaNvdrpEWzoiYZdnQQkBcg912W8zms9lss9sysw9+t9tZa1Ep17ZCPElE6/V6eTDfVbvj7BARHz16dPPmTWHyCr713jd1zcxFUbz++uuLxcI5F8fx48ePX3vttTRNPYXtdlsUhXOu3u8HXTWgsZRApdrOuRTyLAk4n52dJWlsIzOYNWkaJ0lUVZVMSp4UEl7uo6+tFOP2+omsjQcrRwZWKDv6G0ClIM9zra1v3ZCPF26psakXQogig5xyn86QObJxJKqIIex2u8mkQMVN28oybpxUi8AQaAwezi42dV1LVId6OK6Mw9XOff7FsObHokvkCTMLvECcLejqdLtB5sBKEVBodzvnXJopyYlMp9MHD7YCX4+MNcacnk3SKAYAiQdsNpssy5Koa2sh8Z7Q0wyoHu4g2iRJku9973u/8iu/cpDnTdME54UZewBLMwIRueA9hRCcQLHqtvGQEOJmt2dljbXIynufJ8WTJ6cffXA/yqNiOn369Gnj2uniqC2r5cmRZ9LS8cwTMpjYaAgehjgTIHQRM8LrQv7TxD70Tq3WV8a672k1oS8cHcn55yC9zB0sGfG5LtEAIExKzrkQHACY+XJ2dnoaxxH5drNa/dm//ZNf+ou/mMSxcy11vUS8jSIiUFbyvBqkOoUBgIBBIaJGZiZg1EZrYAJm0oAhuARbaNovf/71n/7629/+0x/kxXSzKy0RgFLAChhQ2NkUK2LUml9SgDs8J49sYRi5BWOdByPOv2HJjj/lT4UuXynjkXUJ4zsZ6+/hPGNN3F8xDIpYEqgA0IGe0AxBSyJgbqJoJs9kjAohIKrJZIKIjx8/ns/nx8fH2+0WEQ8PDzebzeMnj27fvn3jxo3Wu/OLVRLFFdJkvths95NprrUhF5IkETfQe+8YAMCorhyNgGRlDncbrti/IYSAEAiIIQytrxmQmClwZFMmZuG7QQ6+ZubjV6J3fuaNH//w3Vmhq3Kfx1kbtNOqQeNszMYE5w0YTco4iDHe8d77oJVF1FZ3YpTYMwTPtWvDyc0bf+Wv/JUf/uDHf/Jn3z0+PAKtnODqhW2RSEmLWaIo6sJZgzmlFAYwzKwsaO+ZKYqgABt8HQrVLFPn08ePN+IqEaPW2ijPOvjGpdEEEY2OvKO6WgsBp3ei94kDWWPAEQGvNuvVZnvjxg3nHGq01go3staaCJxzWhlrDBHJthfNSsAu+MCEgBxoCLcqwLqsFAOhY+YojTh4F6h0exvFWus8z50LEjciIgosgT7nuniJiFrvvYznfruz1paBIqWaUhBGUDZlkmeEoNhD8HmatG0bJWlVt03rPVO73U8mk7Yn4UEGo0wIfLg4Qa1XZxd5nqdRhqQm2TSLc8/e6Lhuw3w+ny0OsmKqtH7y+LFVJOgSEceqmFobr1arxWKx31dxnB4f36jrOk8L731sE41KVKCoTFHw3vsoBF1b8ezTJBOVBgA3b7xSVVWUxEQkcePpdLbbbbTW3nccpQYNBEiTPIQQ5xNmRmhEhUMfw5c63TTJJL6dZ0YplWeFBD+applP50Nso6r2XYi+l8uC44vj2KIAXjqBoE0ku1sW56RYeO+NTgYb15ggRWXAKG5oMZ1vS7eryjt379rY7HYbIp/Ett6tgcIArR+E0pDO6KWTAhB7EpUyTNL5EwHAWp2mKQA1dQ19oTARtY2iQETsyTvC6TR9erZd71xs9OVlNZ/PkzRezheb7bM0TSk4IsrzPIpMHMeZtpPJxMRRMZkgorJWaQzMIfjYGtdUmnGZZd/94z/9R//N3zuK0jrsQwiBg0JQyhCwiSwRUaDWUwgBFEraZr3deu+PP3PvwcdPgofIxj4wa3DoPFa1gvPd5nM3P1etQ3Xp8mS2Wq0+89nXF9MJIvqmJvIsHTydB4Csz2IEBV4cdIYsIIFIZg0Anq7kOQtdHxBAYCZiB9jaiHrIRZ+TVUjMiOgpoELul5Po2izLtYkREVg63qBSAziuVVIcFFwIDUDQiojISBgHgMvSl2X55NnTjz/++M6dOwKxNMai6ksbxZJ6mcf5nOYfCXdRQNqY+Xz+uc997lv/9gfGmCLLyrLUqAhIouRwVaz8EpNkfLmxgnyp8TLWsvy8q8oveK5jlflpb7746f/fx2AlDApPKS1wx0EoiDAKIazXa2vt8fGxc+7s7Gw2m0kTUERcLJZZlomY2G22s/lEmrtNZ0XTNGkcib3GEABAwcuCB6wAwp/n6YZBE+9S2ijByLLRiK/de1UFfPdH759uz4ok11pv9zsS03aofkZAoynQEDF+7hLMwftpMQGgd776tePj48v1dveH/0oCetzn4Qaje8BYvji8amSxCQAtijDLsrpx0+k0BNjtyu2+0lpbbZumMWkswlHgS23jhzVclqXu07dxHEuyYCjJlU0RRdZYy8ELR0rbdkT2vu8NIP2bmdm7roGg7FoZFuccUNeJgZwnGpXhKiW/leBqXdeIXfcIOXmfdmVJXgoWTOIo4o6UZamNlZzFbDarquro5k13vgrODaUyQvOZFdNOD+U5M8uGFSWqTMevaZOoKcs4y1B6TzkWsJjYlxLFNcYkkXLOQaCiKACgglLEU57nRVEkSSLO07DaifxQcSfZR2aWZ9lut0Qk3BTyREqpKErW63WWJdvtloEkQq61yvO8aZxgtSZFGscxog4haKMElmz6btBSBibZ2bZt8zyXTXd6epplmUABkv6grs1tRETCjSNrI89zWYrsWmZWXcyPiFECGLLHQwht20W/uzWvsGkaBWiMCcR13RKox09OEVEwHBJHES+WwnWWoWvSbxC2V+vfdN8n8kpFcWyTOObJZKh08k7CbGL/QV21zGsAiKIosZFSUFWVsfr06bMkSWazSQgBOEg3pziOMxt/8uRZPimWy/l0MTfGoII4jhWDzFGaZPfv3/+1X/s1RGQI3hMAKmUUP++xsHKuSZJkX9WIWNZ1Xdd/+2//7T/8wY+++ft/OpsWbRu0tj44UBTHsWvrjiHE8263y9MIgI6OjmjEz4Wj3hIvSrCXSraxTGaQNDAx+aZpQnC27+F95e/Cc2puOBWiMsYaY7g/p3zSz07XR9mHrplKCEH6uBlrrfN+v98DoAAT7t+/f+PGDWszZgrBB2IA8Cz6l+2ofAheptW4w9x2HCDMVFUVmOidd975nd//g2enF86DUgqF46EfHEQE1OOhuqYRXxzNsTJ+8SfXfNNhEQ/vjNXSp2l9fpm18dLv/+Tj2mRzF+IwYrAPrUxN37q5LMsoiu7cuSPVLwcHB8JO55y7deuW6J66ruu6ttrUdb1areaTom188O0kz5gZGChgXzIGBNgng68P6TVTZrzPBeo+1sEveXwOsdXLxSzPkktUgFzXZRcxluJCQM+sELVBR2CUllwGMHKnMhkZtTWJjap6n8ZJU7Uh8OlF9VmFQ4kIPg9nuBa0GI1qR2QhORoiQqWzDJvWNy7EcbxcLgNfVlUdxV2Ji+i/xrWTeOaw678rAjdJkrraQ0+2J7pESlnk6TuYaPCSWRQqDAQ15Fa1UQPMigOB6uxlqYKFnoSWiNIozrJss98455wLIu4l0di2XmvNHARtK0oF0RKR1kaS03Kq4VdEVBSFjWJmrqrqxs2bH3/0oC3LIewcx3HjWkQ8Oj7e7kpR23GSNHUjiQbJ4hN5cHR6+nQ2m1hr1xcXs+V8u9kopSZZ3rQ1eS//aa0Xs5lWvq7LkrwxKkmi4FpjEq3RWjObTfI8zbLEWmutFqqmg4MDMVmSJNFdYVVHviGFMUmSrNfrvuCnI4ObzWZ5njvfeu/X6/VkkhPRZKJFZxdFYa0ty9oYk08y7MvDxJjrLadICtIE9SboCqVUXZUdh1FPw8LMURQL5FsWofhDkjM2kHrv0XSYMmnZKSuKu3ZbrbUaqA/dAUam6+LnXeAYC0at9aNHjz739ud96y4vL1XPGgajNn/XdO3437GY8j2LbRrHSRRrpay1WZbleS71V865umqhqxFwAOQdIWIbat+0QyR5RzsbGWH+iuO4bjqemVleOOfcg4cnt07SNI1jm+f5fDqZ5IWrymkxef/99/+b//evtG1ttC7Lcsw0DADAHUSrDd5au6/qKIoI4OlHp/+n//x/d/vOKzfPL5Zzq40Ngff7fZzFxkY+VERgbay1rvZ7pUAW8Hw+HYsCHoVCr8U4h6OTbEwAoKGjEEBEg4q4L0noE+eRMdjPo5xQAXq+Ak5qrRV2FRndsulvYLjcIF9l8HuYDqDEQvf7fZIkTJP9fu+apq7x2em5wC5CABecBIihr7mUpML4eV6U3XClfbvsSBN4uVz8zM/8zP/nV//RyfHR+fk5GoOosMdzjW4XP+3846v8hBuA53Xt2IS85vDhp3u6196//nSf4oKPvcOX/nx02x3YpKqkQh+Z0ZiIuWMrlHJM4dyReiQJBAkm4smTp1Fkm6ZxTXvr9g0AMMZcXFxMiowApFBSATAh6G4oCEExACgAYrxKBBPCtTt87sXzT3TtQETNgAoneTGZFIAUx5Y4uKYObcuoOASWfrcOjDFsABn7TD8CsJa8v1JplETWKOLN5WZ5eNS0rQdoQwd8GESz7AQ9yo3BaNnIY8ox6EtglkBuYAyBdvsqjsu2dUajh679izIaWxiysyKjBZhjNMpEiKoQFbjdrmVU+42qnXNaW22lOQwRdXaA1h3LLgzaNLTi6ok0V9YOgr6qhPI6I6IoinygAR+Q57kAXgZzJI5TRK6qZlhmovUlNCIu+/LgUCn1/vvvv/7Wm8aY8/Pz2MT7quq6dxgdx/Enjx7Nl4ddXBQ6P0b+NMYg6t16c+PGDfEFo8js1mtxDZu2Fk6M5XIZRRFzUNawc0MqFxHbuhHbX6ZA8sTyc+ec8C/KaAjjo8R1ZSjEjEiSZL/f654qPEkSaalUFIXSnTMqP0HUQ5G3MWY2m9R1WxSFDL7wpVDP6yTuvgymQCClG1IcWZGMggZo29b7VpLuqqd8EhoQ7721No463soudsUDrERYhY0xhnxXcwIAngKi1Vq3rQ822AAEykbm4cNPvvbOTyulBHgs2m4AFasRewG8IBXHJmlkrNY6MlYSt2I4zmYTRKyqrGka50Kbt+xD0zQldo0lEMC5QNRI3kqQB0aha721tqnb9YqZuSiKs7MLoelwIcSJtdYeHywvnj2b5sXhweLJw0/+6T/+h1VVRVpyvcoHZOLeL1QMFEJog29bL6j1xrn33z/9P/wf/5f37t1brVavv3bvF3/xL/zDf/i7BwcH3tvERh5ahQbRGVTMvNlsoiiqm/LVe7ens4nCK1ztYKm8VF5dczOwmyRkRA1ICthLcg4b7wE6JHzomViGH3Ig8tKBDXqcAaNWoNBTMPpavPY5lSQLT3aZMkmWpiaKIgH6+77/1yeffPLDH/1oNp9baymwMRqNxqEJGo96JwC/+HgBrjAdzBwncQgcglNgvvqlL/79X/1HPnT2l1Zdf0ViFuY/jdez5MzPBWEGzfcTlNz405ce43Neu3l+PoT+k0/y0ndenObxffZPcEWbLpJuMNKJ2JgoTWNEfPjw4cHBweXlpQiLKDZa6+VyKWFDAHj06NFsMk2TPM/zy8s1kT9Yzuu60QiIEWNn8nd3RchjHCArHjV7vhoBwudQ0AhMV0MtKIbBzGRmA7yvGw7u+Pjw+4p21S6O7W69IeeNMY4YEEGpwAQU0GjjmEExMnZAaNBoNDIEYhdibc+fPju6eauqagbYlB1HgQh07qEQwz0PVtEwawMa4gpMS4GZ4zieMNZ1M5nU0j+HmbM0DkzAyIQS5hXfBXrOo7quZ9NC0odlWWaLJEkSccJCCEojszLG2DhumoYZuYdWaaOhh/lI03txqUMIQyshiXNExkrik5lD6xrvsO+4HicpdKPk2raVNpRKKaHolozm8FsxGsRAke43RLTZbF599dWPPvqI+7hu69rJZCLsyovjo916LTHhtm13u51oEanTjaJIRimf5k+fPi1mhQJIs6xtmsvVBSLKUFDHKd1orX3TGKOSJGniWErpRMdLKlcYg+V1HMeTyUQC0aL45VmGahxhxpYQMfUlmMYYiVvKn0p34WUJIyVJ3CUpfAcBSdPUKI0MSimrTRLFglSPbSThfWRYrVY3P/MZqYUNIaRJR7uNiIK68r6rMIEe7Cr9IcqyTJIEoQNIy3qLdNR3wiVmRmCtQFnFfeMEjcq7LiBpTFY37XZfiVB+4403tNbvvfeeVI03PmSZ9qEjUBMDdNiwagRClGMA+0RRlCRRHMdJGsVxPJnk4gFz8OW+EsS4UHtOp76uvZgszF3TeEFRKaVaRBdI/Gnx2/ZVHYDD2Xkcx9vtdjmfzRfTttwr5sV8fnl2+uMffr/c7ZFDU4uQB4Ar6Q2gQiBHHAKbyCJi3bb3Pzr/z//P/9s3P/PWbreL08SH8PV3vvrs6eV7736UJtFqc3l4uNyXDRKEwHXZkG+jyLZAr7xyyxiFNFYEL88zvnA813MPGST47L3XGolC0zQAbLV2rpUmjIMAH4Wjh+noIJ8KzeDXDIJ+eK1GRB/MLNCNKI7N0dHR6elpnERZll1cqNXFRV3vHz9+en5+fnx8jChdz3ygIKCcl2OR+6vS0Fu1Jy/dbss4TuM4rup2uZj9R3/tP/hnv/nbi/l8tVoR9C0QAIbhetG4u6bq/p16cXjga98fyeiXeLEvauUX3x+0Dv9ED/ilh+rK456DuYvcFJGNPQmwRCaFzE/CdKg4io00+Iyi6MMPP8zzfLfbnZ+f37n9inz/0aNHN24cK2XKssySuG09AE3tRDRUlykBUF3TeQZEIBQdTAjEDCO2EAAQNczdPX/qRLimLfLUGMMKb948mc2Wt26+8kd/+EdPHz8LgZABFCqlGcGzN4hEoIABmVVARA1KaTaMGtCiYoTtend+drYt98qAjuKiKLqizP6i9HzrqmE6xEfRiAQoeGmtFREFT31LGU6SZD6bEcH55do5n6Zp3TgKrZxKlCX3drQxRiuQtOjx8fHjRw8ZyBiT5ykA1HWtTUd35b0PvYCGHgMoR9043/dRkAxlmqZ1XQsmdjKZkA/OOfGkjTGKgrz23mvT9VAKwTmHgxskBtyAyRR5yl3As6PYFdfn6dOn9+7dM3GUpilie35+fu/evfV6nRTFarWqdjsp7hTwqvh5oo+ZuW27ZUlE8/lcBPdsOtFGLZdLrTVq7du6rmulompfim1xeDTf7/eXl5f7/V5MJWW0UmoymVhr15vVZFowsw9OG6WNWuSL0BO2iJfMPUI76rssD08kBk2WZWK+LKeL+Xz+5MkTIhB+LkkkswUJYAxJXLGrho3cVRmFMJvNVquV6FTRmmLADbwTiFf9gngUZsiyjCFEkSEfZM1IJl5rjYQEGBgChUB+2DVKgRTptqEhBcgIAOQDKiF6g/Pz87/7d//u7/3eN+/evbvdrCJtxgIHnpeNA2ER9JJWls3gYMlDLRaLxXQSW7Ocz8QQFLj4drsVempB/ldV1TSRWPbDdBCB9w1zrfuGMZvNLspSY9T56tmsyKuqaqoqz6LEmNjoRx+dri4uybe9rvKISKELzBJRCK4NwoEFgLjZbh9+svsv/q//+1dffbVpGpRSe6A7t2995ctfePTo8YOHz/76X//r9z/68I++9W0ACK7D5DvfLubT6Wzim1YSE/3kPhfyfKkO5lEbVpZyHgDuSIoYqNtW0jnGOSctfsfC5wrJAcDcN2FUVoxjelkkVebFhzb0NJZKdQBDc+PGjbOzM+eca1rvvYki5vDhhx+++urd5XKZJImHIEE8ZtZGsaPReQHwallL9I+pR0j391q7VgdK4sQTvPPOV3/913/buVbiSyToPdQdRjfAmItyUJbjh8FRuPhFLTh8X41qLn/C2V7898Vpe+mnL9W+nz7rnQEoOYJh20jm1zlHBHFsh9uuayf+hwzR4dFyaOZzdnamlNrtdpvN5uTkRpZlaZp6R23bZllxcXkZWS3UxEqpwMN4oqQ9WEkgGgEkUYxMPU4K+FqFEo+0bq+GiXoyD/mC1qr1jpCm88mXvvLF8/PL26/c/No7X/nTb33n8vzSewpMnhiM0gxEZBig7+0FCIxBgVIMEIiAlY1CCBfnl2dnF86DiSPRvtdso04cvwwTIGMoT4vIWmttggHjXIiiKM+xqqo0jQ8Ws8v1tm0boXIU8Ivuq3LlJE3TTCe5OI5ZEgskVWuM49goHUIA7GJKItOZO1xSWZaoQCKxTVsPbJQS7SiKgrljZtZa+1bK4nm73ZqeSdRaC4BlVQ2hYPHGiEhYheWQiCv0wCL5phhkzjlQKK+ldcFkMvvkk0+kluzB/fvL5VLKQrbbPSgT+oZ9Q7rUuTaNY0bcbNaL5XK33RqjQaneQwKQUmBjOdBmsxl8fTE44jgWCpEkSZxvnW/zIju/OBPnT1pEdFsYaJIWIkm10hKm9t6nWdK0adPWxMEYo01n3Ij3/ODhx9vtVjLHQtUihfJZlq3Xa4SuRY9EjFXfbk/sXWYW6qsoigTeKMOuNCJDZGzbtuI6W2MUdJwegphDRPJBxzZL0hCCVtboCPDK5AohMIckEbKtK7NVbDWhTlNKUQBZPFmcrNdrIvj1X//1X/mVX5lO57vdJjI2KLdYzLRGIgiBhsIK6LxhCTJ2gVBR4cxdb5WORCLSy9l0mIg0jfM8RZ577zebzW63q6pqt6/btt1udV2bssSmUSJnqqrSGgG0uONijyql9lUTyE3zLITQlGUWWXb1jdde22+3q4tLDg4ZpCXMWFRKVrXtmngzA3rX1m3zf/kv/vbbb799sVohYscJ63iz36wuzi/OTk+Olj/37/3MgwcPFEIc27b1iKgxtG198827aRpLDTr3GVJRlNdkxUs9pUFWSN5XDCyNSnSktdboXmD2Xu+LEn6Ao/e9jzSiQtD9F8L46j60QgZHCBpVFEVRlDCjeuXO7bzIpIhNcjA2TgHg3XffX6/XnruqlcEWvpLLowNAyDavVlsn04nECpalZjUeHiz+4i/9zH5bRpGVrThWq3TNCRsN1nDAC6r0xW9eS5mMFTa8zDwZT9jw5/jprl1l/IzjFy+edjiEwu3ad2BEHivWsYRYRfrLfp7P51mWib0iom273TJ37OdKaQB47733kjyTUsu6bpnZu4CIbdsSozQJEueb6Srpe3WrhF0/BsKXxjj4akCQGUkKzbo3wHunjPbkloeL88uzJ8+eHB4entw4SpMkMtaiZuc1gVYKKCCNUVTCHB96gAxaaxXqpmkuLi4AII5tud01ZYV0RbgxngJ8mWveLYK+xmAAwQ7FS9PJZLFYTPI0jWPZe3H/QnDmoj+SNGrbdrPZ3L59S/xCRLTamF5JDEakfH+320m5rWgd4rBarQSxLN3RVV/SIHgfSdbuq5L7NnyiM8qylKSvhGrl0m6Ui1I9/WToyR2HKJlQuMj4iP+32+3e/MxnyrIUNN/FxYW1Vhrdi0iN49g1jVxCRO16veYQiiyTi06nUyYyxqxWK1eXcWzbtq6qymojnC4qig4OF0ka+dB672NjZ7PZdDqVm0+zZLFYKKMXi4VYIeKqDtECeXZx2eWGhabD9JwGWZZJnjhJEkEvCxOk4HIFLSHxIdk4om/kTYmvyr/Ula4p2XQS5e74QDQy0BAeF19WqY5XQUL6um8NKT6rWGZDeFzMOGEmEWfomtTiHgM8wM24Bzd89NFHiBBCuHHjhlKQpmkgx8zDHI1l1zB0g6wbw7yNMcw9esuHYT0419R1DcRZniyWs1u3b9x79c6t2zfu3b1z6+aN5WK+XMwPD5aHh8ujo4Plcr5czqWATTqOdBhypTwJfwisVitr7bs/+uFyvjCoHj96iMRATHRF3snMoDAwOefa4OVPRiTg9++f/6/+N//rtz//+aenp6hVYEJk5xqlVGKjzXbVtPxX/8O/bBDfe++9yFgg3O32RilZsSc3jq3VSj+35cdibfzn+HUAJnxOhovSIe9E/LJ3kemyV4gI6qpWk0eRNgBQSkvk2RgD2gRgz8QI8t9zt4RdRxDoUwlXYao4jj/3uc957xeLxcHBwWw2Y2ZCeHZ+9uP3P6C+pGFQDGpUit4ZsH30Y7zaZAVYa02cAIAxpq0rBJrk2Ttf+TITxNaGEJTWopnatkUG4DCEjoeTD/gFWayy3AebYDz6wxgN7u9YH48nY6xf5TvD2Yb5G4/1tTm+puDHFxriFeNPB+sYRwGl4ULDBMsDDqPHzJJmExI+yTmdPTtFBvJBzCit9bNnp0SklT07Ozs/v8zzvK7rqm2Mttyn/X2QzS9kx3JpBABQOOSr5LqEEAC5M0evXE/9HL0LAAAjoFYB2CYxAbXegwZW/L0ffDeKzOHhYZIkRimtdRLFGhV5pzuub2RmDcg+eOe8923wsj+dD9qaJ8+eSi5wUhRCysPMEAhC1xu1u1XJpsAVtdt4VIfIoTFGASKyNQqRJQg8n0+LoogiIyCdMbGA8M4rpabFhJlPT5+1db1cztM0tlotl8uiKIR3Xvf9y8qyFPyUvGOMqarKe5+kcZ5mg2gW4LrcngCFJJ1ZVZXAkeQMzLzflxcXF2JmyYIZoLldFpy7BS/VUyLTRQoPpNO678+6327n87kEurMs2+120+kUujqfzvwvyxIRJ9Ppk08+aaoKtUalmEMU22enT54+e6w0tHUpxoE89fn5ORHty21TbmUAiSjSV7ZCnueCb8rzfLlcRkmstRbTKoqMpIZFwgghsyRT8zwXD0zOkGXZYrGQTEQURdLgIUnjg4ODwZkWfPJyuZxMJrKetVHSjVgGpHM+iJxzci2RsFEUFZPc+VY+or51kmhi8a1lbGUnyp8Sc5aFlKapqASZ+i6uaEzbtjLISZIwo9bWmAhAyaTIZo/juCiKsql/53e+eePGiawE4ZySMxhjImMVoFFaS9kpMRAbpSNjNSp5f+iWbVSHVRweeb1en56e7na7/X7vmrZ1dQghTdPJZDKbzW7fvn3j5uGrr73yuc995rXX7t29+8ort2/MZ9Pjk6PpbDKbTSR/rBS0ba0UFFlijdKA5X6rAO+///HPfP3ri/ns/fd+rAGDb3nkTYkeAqCmqaR/r6dQNU3j2sdPTv9n//P/cWD+8XvvVk1du7bxrg0etAqBZdiJ4M6dm5eXl+cX59K+ekCeHx4tDw4OYmOHortBNOELjtk1twc7Ru7nGGGhr67E/sUgt+u6Dtw1BRlc4c7oUR1tqokTYwywEshrd87eAGMIRDSQhxMRaGVMhKiVMgaADg4W8+nk/Pxc6i6MMdLB9/3333/11bs3b960JgrkJSVjQI8fDAGVQnF/e6n83POrPvmstVaAWqvbr9z60pfe+s6fvZtPsrJqxI7APrQyILzHylXeH2vEQTePr4UvUJa/eLz40YszN9bEf56TvPS3z38fAV5CgYY9jBZ6+2j8WiSytXa/q7z3RVFsNpvLy8uOOj9JhEZU9MdsNnv08GMJgTaNI/Ii4j0QAFlrAwGRl6BNUAqCwAUpjHivBuOAELjLJPTYgd5YAQClDLPrhk7pxreeiZCY4K233vqv/sv/+u23furuq68++PBRua8N6rquA3Icm6Zt0caIyCGwAqUk40k+kLJIWgNB27jVamWsUgi655EZDg3oej7RsVGlADvxpK6GVwNKoTmqHvRvCLlhwCLPiyK7uFgFCuKhyvlV3+4Ggeq6zrLUKPXw4cdf/fJXis/lgmYSTgal++BSH3odkLpiNAj2VbKzkgeVHwogSyDWAvvyFKQ0paqqELpuwcYYWWTiKIuhEHr+QsFJKaXEbxtwuYNrJTsly7KHDx8e3TiZzWYSaxWltdls0jQt66ppmsvLy5OTk8Plcr1eA5G4pxdnz5aHh4jYlKVBtVgsgg+ixWfLZdy25IPJ9Ga7ElTgsLZ133QPewCa0ijRzsVioa0ZUpVa68lkQrSFPgYoFonEe2SUJDA+EDTGcexciwrSNEVEAXuLoS+eMTNPJhOx3kIIcRIxdRB0sWCGS3f5aY0arWCq79279/TpU9866FprsDjZuucg6xLJquNvH07LcOUPDEm+JEnKslbKtK3P87xuXJBm6FoFckqrEJiZojg5O72QK9K+GiwAo3VAXi6XdbUbBNRgo4voE/CabHN11RAexeqmbiqhLMvNZpOnHfJcggFWK6sVK1RFFkKIIpPliYQW6qr13jOHNuvwWdNpIcbE6elpUjf73Sax5sP3n/3yL37l5snxg4/uh+CAJQzTbUxPwjLEjWtZaVTYtq0nUsbuy/Lnf/EvBKaL1brxLoqiPE9lqJVSs3zuAi0Ws1demd2+ffuDDz5Okqgsa0RtdOS9RwV3774ipluSJHV1BckcAF8wAr1eE9Sqlw9qCJ6RSERFvh1S/uKm+L61BnCXUR00jqhSVH18ArTWDEoam8ogXNEqh76KSV4ncaatbb0HAMOBYhu99tprFxcXYsV33OJKXVxcfPjhx/P5vCgKCWMqpRRc0ZPKlXgEvHrxUKgBIYSgATl4Yj5cLr725S/92XfejSJbN25gMZWdr/WVlhpueqwbdEc78pJS9JHC+1QsHI5SAmMJPpz/pT8Zv3hxXl/88vMnuR7UfVHf9y71ONOsETtoq8hNjvHyYi1Fn2VZCsGCFGZEUfTkyZNnz86Ojo6a1u93m9lsttnv0ig2ehS5VayUFUouYlIEwsg9UF4xM7DcbYBRWoG571149WhKvgMKtTYAxMHb2B4cLX/u537m7/zK3/lbf/N/8errrz1+/JSA4zQryx37oKQXvQZmkDIs2T1KqRbAKqU06tg+Ozs9euXWyVGWWCWRXkm3iIcRQqAXqEmHqbm2FBWDABaN0gEhMlYpFVpvrZ5OJrvtlr3WSjV1LZEehUghiGdrrTFKxbF1TXtxefb666+fnT8TYIRMk6xYa+1kMpHqiCFNINAq6jGTsdYMXbt1USqCCpb08GazmRYTIWCRsK2YHW3vk8luH04OUpLfITOAemb8wQ0FgP1+j4iD15hNp21ZE9F+vwcAa2JRQsaY2WwmZc1SCDSZTEJwp6enAj9Oi8JWVV3XRVE8evRosVi4ugZiH1pEFNfz7Ozs1TfeyJnFzJfHFCACAAw5LBb0k1ZxbCU+HMexMZVMqzyySOG6rgUjLbcU+poc6PscyNMNOHBR3qKfQgjL5XK1Wg3EKcPaGIaRuqLnrrXRycmJgLaKothttlL9LOteWiQJOkk2I/UYwN73YoSuc6LcHiIiagBkDkVRNK1nQCljdc4xi/3KiEAMu135ne98x+gOp8Pdwu64nY3VxpnBouI+ATE8juT4qesPLfEzNYCBHQVscbfbaaPm01kc10SR1lWSJDpLRX9boCFnLzmItvVt206mOQW4WF1WVRUCI+Ljx48ZKIs0xvHDBw//4r//lVdu3vjk0UMFrICIrspbCZAIHAUi8iSgNmJUAbiqqp/6whems/mzs3O73rjg0zTN80wGUGu9SqosT4rp7Bd+8ecBebPflFV7eHh0dnZmjDFRVNhCcg1aYdt0EPFBIL9UMvPYxepfYG/Yd7n0oeWzueJgDu5Kuw97WfWdRplZK2VMhKABAPv+gZKPQJRef1f4OOxJ8UTDhiCzqYHY37l7+/j4WPIiEhYryzK07sP3P9iu1q5uDCpyfmhdNzxPAOY+u9nHNq/009Wi4d6GcC0CvXrv7t17x9WIBlaeaqx9eXTQp9Shf9rxYoj4mrAefzpc7toXXvrbF0/1k38yBt3By7R7vzgYsfNaxpFqItputxLTW6/Xzrk4Ti8v13GcnpzclEZ1Av/55JNPQKmqaqTxHwBsNjtWWgpm6rr13iNoyd3KSvJM48y0ZHa7+wSNoJ+/0ytUeb8GtFKGCbWywAjE5W5PPnztp79+ufK//lv/3DEtjo4n06nERn3rImtr117tFqIQAoJmbVQUB2MmBwe1awlgt14vJpN7t2/pvgJY9aQZURQZvH4z49UyHtju1rua4KA0WKt9cE3TCNe0aHellDVGd24Eaq2Pjg6zJGma6vjw6M7d27vdjlx7cnhwcLhkoCSNh2izMpoRRJxR30W4LEtxyGDkF4rnKjReYnF27FFJIgTCklMQSSTHsIMG10r0R+j5dABAztnn/zpS5SRLy7JcLBYBeL1eP7h///LycjafC8Wmc04KhABgcNnrus7zXJJQJycncRznecHez2az/X7fNM1isVitVk1VC32b6NfdblfXdWhb6OqIGumvp7UGrQiBEJIsl4uKI5Wm+YC0AqAQHHMAIOZgbffQbVtPJvnwjjHqxo3jqtpPJhPpyzmdTg8PO1JrIhL+CpFxw46WKZCR5B4QIEecRChgZmZZVNK7Qpxk6XokeOD9fj+k1QGvetLhKOVEfRkSAFgby2LIiykxAigGrY11PrROSNDZ+RCIAZQy0SefPPEBhqbIIqNDcHmRMbN4RHL/sgsAQJYKAEgQW9bSIDTG+oaZ27bdbnZnF+ebzUaSIE1VO9fliUXgJ3GUJnGapnmeTyb5bDY7ODg4PFoeLOcnR8eL2TS4drdZG6PSyPimfOfLP/XanVdOn3zi27Kq913JIhAReQreexc4eA4EgZFANc4H4jaE+eKAUF1st5tttdlWZVnvduXFxfrycrVeb3a7/Xq9/ejjh0R05+7di/XFw4cP5/OJ63tje+/FMkuSRPC71yTAT5bS177GvfuLiCG4q92KChkEoDLedMOkq6H3kVLKGgAIDEzABAPRL/Tqz3cdLVlsxDybpGnqfRfAkHI0Y4xZHsyfPXsmAn2S5+v1Wimz3W7LsgYAhdoY4bmNRFUQAisckAbDKDyvYxQBKUattQ9OK2TG0Lrj4+Ovf/3rv/r3/2kxj7333ClUZiJ+XrPC8/pyrNqh94Z5xHA2/OSaTXSlZkZR6+Gc+Hx2czxVL33zxfdfvDcABYDXPr2mp+XemZlZDyFo7qPQiLjfV0RdHx5BZMh0Hh8fC0HBdDpl5rqupRJhvV4vFrM4jtfrtYgPDCwZiDzP43hgDPYA0Guxvu2g3CSP1isrwjC6bWS+gkD3to5p2xqRkUADRsbo3P6Vv/YXfvPX/+Xl5f7G8c2WyCZxFJumKr1rJ7MU+cp3jGwCCltiAlTGPFutPvjoozhNLlf7r37lC7m1AG5sfg0GCjxfgz6Mm5he462IFBC70mtJxyqlfNsqpfM83ey93ElVVVmWNU2FiJE2vm2tteStNphG8e0bJyGEy8vLu3deQcTJZCIlrSL9nXPS+0xSBszsnZdtRX3LlNCzZMgMihkkEyoJzqqqFGDTdFUcEoxFRAnGAsDggcmFAKBpmjiJ5CO5GQFFW2vbqhQ1KbQJADCfL1erVZokSqnlQbJerxeLhRTaOuekwENZS84JFXld7ZOiQNIAcHLzlquqxfJgu948fvz4zTff3O/3y+VSbl5k4na7Xa1Ws2KCiMzoXCDuWhQwc9s2zLxcLh89eiQDiIjSFFnqYYaEujGmJ7kEAGjbVjiZJdIjrrBQZBwcHPzoRz8CgLIsd7ud9L9LkuT8/FwGf8BGiekjug17MKnYplVVEZHgtuI4Ngrrul4ul7KDkiQpy1LaCXNfDDIg9cb7fdjaoiPzbLIr94jaRqauW2MibXXgChTHcaK469z86PHTP/ved+PUtr2OJ/JK2XLfvPqZN+fTyWq1kiEa37zp+6eJ6cDM8oV+yfc7mhUhMKP3/uzszOoO8N+4VqL3svKx7+WsFQVtyFJgqqrGDSuhTtMsFk7QenXxyvFBmsSPH37c1iUCIYVAQhHRbcYAILUPzEyMgQmVbVxQJvYEDx4+KaYTWaVKKa0rjSDza61tm8skibJJsjw42O/3H358f7FYPH16DqwI+E/+5Hv/3f/4L6ZpulqtiqLwZXtN9o7/vCZye00BHXRr9B0F0IYgFcFSNR4ChRBQCaqcCfiaApYLdVEH7pYBMwdgrQxxV1XonUTC/JDbEnCl1AEppVUa2UgrDfj6vVenRXZ5fo7MYqgS0W5XfvDBB1I9JsBarTWiHtMn8Qi9/PyBzHzlSJEMB4TgsiT+0ud/6vh4QUTSR1MAsX7Uzhp7gN9YOz4vaq/arI6PYVcMG2M8Ny/V7vDpVtK193/CGZ7/+ZW5wC+zy4a7vebvDi90TwSBqBF10zilDIASDG0cx+fn54gocFbxtwTeIr7FwwefhBAuLi4IeF921ffee+9IFlPnSAHD8wM4/pOIuJ9pRLzKB4/WYmRiZKVIYWANuNtsEfHWrdtlgJbDH3/vz5bHh7PDJSg1mUwk6pjm2eHRclZMDCpjTFpMDo9PoqJ4dPrs3Y/u69gG4Jsn86986QtFEtneJ7o2L4qHu3puB9IISI/4XB5Io0LEyNg4tsYYq7RYNrKdvPdRZKIocnXTuiZN0yyJJRl5cLA4ODiYToo4slVVDVhcAJDuv5JFhr4ZGXReL0r2XfVwMAkID/FnZhbCZEE2xXEsDX3FtRWqBNE98jU5uepRr0IHwQjD/cuy0X1PdZmgpmkODw+rqrJRJGtgu91Ksmn4/qC/gVmqWS7PL5KiAO9907RVtb28bNsWmI+OjqRc+PDgmAldG7yjcl8DK4VGK7tYLrnPARsTDdkvImHviiXR6D1JVxwZFtPXSeMIFXF+fi4wLtE6Dx8+rOtaMGiSWZeG2ev1OssyURXiVZv+GMIn41iCUgoVMLOE+tI0PT4+bprm6dOnMhRZlklMXsZQMvdaa1QsoYJhNY5NQ+79JAG+BSbvKU7Sum7PLlaPHj8tyzpKUmD1yaMnP37vgx+/98Hjp6f373/04YePJI8+AHyAQwhwfHwo3GGimXAEJhW3WIwGa+3h4aFEZQFAWwOq26qD38bMu+1+vV5vt/umaVwbmqYR6nLXeoXamshoO6AINapJns0m00lexHEss7BYzibT/DNvvFGk8eXps7rcIXhkr1WvegAYgVABK2YMjJ6AAD0pF9gH9ARn56umdU8en16sther7WZXlfumrNq2CeW+WV1unYf1dlfXbVrkp2dniOjImyhKi/zi4mI+z2/dumWMyfNJ6Fs8vShgh7l+Uaoz86iE48pJC32XUnlD9iCOGr3wdXXeQWsRNKLuKB1Ft/cjIj/0Pfek937o7a2UaluHiIa7UJKfz+dvvPHGZrPb7/dSoSi39dFHH73xxhvHx4dSwuhc8Ewc3HBPw7K+pqWGsUBAEC/TB+4dlMPDw3fe+eqv/+Zv58WE2SvotNYQYR/+HXbmNaU4/tr4oi8K6/GUfNp9jifjmkJ98Rg/3fj9/iTqz3OeF+9HZlS2zSBGh8Rhd04AgXcCwMHBgXNuu93t91vnXFXViLjblZvN5vT09OBwUZZlpNXl5eWkyJlZSpucA/IOAIRHArokCA0rZrifLls2zCMidrRlV0dw3hrjmzq2tm3b2EbOuVdfe+0XfuFrH9z/OCkmZ6vLL/3U2xdxtFtdTorcWbvfbBEZlYrjWJlo37YX+/3j09PT9Xo6LRrXQAi/9Eu/uJhO27qM0gheZniFEHCUaLimcccjT0SskDuAFRmrkiRpGiciuI8ikjEKACZZftlcVlU1n89nk2KzXZXbndaago/6WlURiyLioQdZVFWVxgldlQwGgUwYYwclJD+Re67rWrrSivImIgF7dw60923b+j7RKz7c+PFF8mqtCYmARSGJSiaipmkCk/c+sl2u94tf/CKaCBG3m41c4uz8bD6fZ1kh95Pm+Xa9dk3TtjUzt65+8vBh0zRFPpUkSFmWn/nMZxBxu91uNhtjzOnp6WQykfyroHVkoQCAtTYAG6UlzC6uvIgUIVHKsiwEJzR8kmGRSRzYReR5B9C4+Kky72VZ3rp165NPPrlx44aMQwhBjIyiKJ49ezabzQYnVWFX1S3DK1kMIdOWLSb7Qu7ZGNPUVVEUZVlKfF68lu7R+tJP6lvDit2APQJLroiIwKpt26Ojoz/61rf/9b/+1mxx8MYbb83m2NTtbr//5MnjcrdfrVY/+tG7Hz34GACoQ4op6oECSsFisQAkyY4P7ChyLbFaZLrltQQGyrLsGuAoVNKJltAzadIAtNvtjDFZluR5Hlndd2BURVGYni50kJNN4zrpCjjJC/LBKO1b9+jBR/fff9capaSgUCid95UxJjBQYEIVCHrdD40PxkTOt57Z7cs4zVab0lobyqbk2hgTGaO1jkzXPzGKoap3X/36VwDg+9///nq7revgnFPKNK37uZ/92tHREYBsKNW2tdYvl7FqxAMxVhwd1mYUP5PtDyNop9ipDEG4EQZ3BUZ9f6HjobSICICAKK1/uzAhAPQFC4KzkTmVsLOsQ+8DABgf3ID7v3v37mazu3//PgDUdS1mclVV77///mSSx1HknVPKQLi6oUEuX9OOzBJX7l7IYvWhRWJrrSPK8/ydd975x7/x24KzECyJVkB0VUAybBJ8PqgIz+vgF2UuvwwgPZbUw4vhh/hC1Hp8vGhPXbulQT0wMwDDp1hh4+8P2oJHOZvB5go9vcOgj0NXId4VTUrc8vT0dACGyCbcbDbetyGEcl/nBW42mydPnsSv3kVEof1zjsg7pRTgOHDUO46Mw3KUdwKyen4Rj0dHaw1ErDUFZ4wBol3V2iT+6Z/9me/98MdtQ+9+8P6jhx/fWC535+cnx0cUxfvdLo5MW9Xr9br1dL7db7zL8mK6mG82qyKL/8O//NfeeOONui4To/3zhpSIe6VUVVUarheYIUoXsKthl0dQ0hmqr7oeAsJ5msbGrp3XgJMsZ+LpYtq6ZrPZXF6ec/DLg3mRJgKzkvzTYrEQRkkhpBQ+W9lpUu8oSpQQASCKIgGwDPMrtpTkJq216/Xa9+2VhCJDdJLWOo5jVErKz1arVRRFQ/aRR7V53e7rneZBMaBWTdMgqFc/89ajjz76+OOPj45vSM5vt9uJBVxVleq7DAXnxOdOkmi1Wkn0Vf6VFTibzaIo2m630+lUQFJCjn1+fj6bzeq6XiwWcRw/e/JEaodOT0/jTHrldh6tjWMOQboCFEVhTMockiTZbDYC8wYAwRseHh4OtUyDMyqIceo5MrXWkuSWCi6l1H6/l/Tw0LjaGMMEAzhcBlBrrRjbtpXyYtFDUoHdNA0FEfdKbkDISURCXhNxcn7xZriHj4mG1oqyYvLN3/+X//X/8++89dm3f+r2nSwvmLlp3NnZmTFR3a6sjYtpUv34RzT060VRBgwcFrNiOp3utuvOJdVdd1HodYBkKCSiAABZls3ncyLaVzV13Eyd6ybmgjGqqhu93ZblzDnnHUlqA5HFpx+FQyTljDL7EpuZTCZpmt6/f/+D997Ns4Sc994L+KquS62RiIS43RMGAk+Ku2QWbne7KIq9awUTrpRC1K0P5EMI7FxADho7wLlz+8XRfLPZ/tG3//U3v/mdycwAR3Gcek8AcPfuXaVUZHXb1CIeQ0/Vee3A52G8g5LqgqZAvXoKzEzcg8+NQkRJEnWjN8I/D/5eL4girTWI0hUO5i58HbB3eQdrMoQgXb+YWaku+MTMRmvtfGusJiKtk6Ojo48//jhNchun5+fn6/WaW3//owevvvG6stHT8wuBpEsRhfdeGS3JMCbu2bkYERUDsKBkiRkImAKDBJqIkAMQ37u9/Ku/9PXf/t1vz+cTF5CC0TYKVDKSNjqKjAIwBq2OAKiuW++JEAgUSDlczzSKiKHXUkqp0BsynW4GoaRoBDEI/JzDNBbT4w027LFr0nysQXlUbzrYR/IhACDC6PQSs1UDFaVcQsyL8boZbmOYNjFOBxhIHKVxlGZpYXQErIBVWW4nk4lARUBpRnWx2p6ent545S7U/qNnz+q6vkXaMW92++kMdUDn2shaAxgZFQAUk9LCJNxqrZGR2BMHYkKFzBCAQYrlUCEHEAeFSDFrpVkDswGN4B0QRRpCvbm9TP/m/+i//89/63e+8/0HJydHD1e7VRk+/uisDdUQv43jCAwQ8tKa1pXr02evvXr8F37+5165dULcRHlEDFbrwYPsHCOFgCpKkxCCD61VVthIiD0CMiMRKIUAKJFypRRzUEyefRyZummNxiSKfaC6bpeL6XZzuds1JoqICJDv3Lnz8OMHsU0QcXW+evPN1zlAZOM4SrJk61qPoChwVXYt3IU7aVpMkEFMHI0qoHYueE8E3HqnrRnSJQK3aZomjZMospfnl7du3XKtMzrSYCezuKqq1epSa9U07Xw+j6zVWofet/beR7EV4ghA9o3P8xw1ogLR1lrrNE1D66QUlZt6fXmRZ0WkEIFbdtoqAIgiE0UT71sABlDOubqutdZFMZUAWBSnl6vNfrueTqe7sp7Mpp7C7HDOSEpDWsT7Z6ssSdJYGwWtJ6WM7FC5rlJKMTVNY7VRgaUZW2RtbKO6agJxlmebzSaxRuqtkzipqmpQdaJxxfXsIUgmTXPR/YKH2O12AFAUhTRBms/ndV0fHh6enZ0N8GBAFgexbVsfnNZa+MtUTy0OAJJdXq/XzJzGkcTGxeJBxVmeEAVjFZHxPQ95CCFJohCcQeXbJooixeIWIysbFfGDJ2f/t//yv3rlzt2DW3fS+UKnKdqo3O1L587Wa1IKIovaXK53DOA8AQAQW60U8m5Tf+HtNzV7amvyNbLj4IwCYdsSmZblSZraLI5m06TcV0qpkEXIEx9a13ijFLJiQqstMnrHHChJjHfh4mJVFEUcGaWBObCriiSKzDyJLGpjIqva1jknDR+thggQQz2ZJH/0h3/wZ3/4jXlmwAOwUoActIKIAZkheO9cg4jOO9AKjQpMddNAyBTG231j40hbZMXONaCRfcNAzqMxkdKWAKs2hNAyh+0n5b/+v/9xCDwpJrvtbpojcFPudm+8fnz71pKpdQ0qpXzTEiL1SJbrx8u6ISmliFlpBM8hBGQCJPKtc41CQSmzxJ+ZCBg1GmZHwVFwXTAXkFghotKxSVJQxnuPUmnCpFCR90ZrAPQtBecpgAYLoJgxsZlV1nsSEJWojyuSVWMMM8zn05OTk8efPGWE4+Nja+2zZ0+I6MGDBwKPFOada97btedkZoDnFNiLOsZ7r5R+8803f/f3vo0Kql2lbcQhKK0UdplR1dMYIV5VFkq1jGg3pZQecV90GxUR+pQM9jlz1VcP08vysi860AAv8XqvHWPlPX60l41GtwDgBVd7fOlrpxrsgMGylgfxXtpVBkSUkkdxTSSWQESPHj2SlJjgHjebDQBst9v9fl9MMu98XZeQptCnRV1wyo9CuABSTjuu9Hn+Ka4WN125+zDcLXOoqurO3dv/6X/2N17913/yD/7JbxPAwWKZTdKyDCdHh0R0dv5sPp+G4Nfknj07txp+8Re+/qUvfH6+KPI04eDJtfp5Cl/oMxr8fNzihTGn5xhfRzZN0zQAPY4R0NqYLrcHBwdt2zZ1k2SpMWZaTD73uc9997vf+fKXvxxbW5ZlPJu3bVsURZZls9nMGCNG5yDlBYcs8C7ucchyXRtZARmFEKTPgXxHnCdrrfAx1XWtdUtEeZFI4dBms8nzQlLOSikIoQfZWgDwPfMwWJTXQgomzfsE3d00zZMnT4Qsoq7r7XYL0p/c+8ViUe5rZi7Lej6fr1aryWQi9eVlWR6fnDR1HUJYLucW1fn5eZJnZVnO53PvyRijDbZ1fXl5mZycVFU1my32+0oS3oBJFifCZjWZ5GdnZydHx01by8OK4b7flWdnZx27qnfaGgDQ1ljqaKq22+18Pld99yGlVFEUy2VHk9I0zXq9Pj4+rqpquVwKMRwz13WdZZn0DRwgV0opQQ9hDzUSxw4AJKRERGVZDlKiF01KKdUza1byc0k9jBLVqLVxvo2SuK0bpZSyUZREzkNVVb/xG7+htT48PJRRdc61upVypv1uBz0E9/Hjx+J1yfoMgW2sjYGTkxO5eVlsRBRGnoN8lGVZbI0Q9bSNs9ZGxobAGlBAzmK5ympUPZtmWZaXl5c20nNgxgQV7na7NC9MHCmltO6cPNe0kzzjQBrZNfUf//G/+cY3vrGYz325ZxDpKs14gqSqGMharKoKtW6dY6V2ZX10cmO/qeI04TIkaVrWVeucsbEwmDrnEXXwHChoha1z5b7WhqyNQuDEGtdUn3njTeea1fqSCO7duRPHMbNHfYV1/8nHS+KUAETMfdgZ+iIF4DCW5xLG5L5UgYgUdnXk1BPFc1+/M9BJDX72EO4aPIehrSpRp93kOwb6lAai8j5MJpPbt28/+PgRKzTGLBaL7Xbtvf/w/sfz2fLzX3jbWi3LkUb4+5+gdYYP8Hmwj1JaGfPGG2988Yuf+bPv/riYzpwHCoA9uTQQg0YFoBQopSUYgojovQsMHACQhOCwH8fxgNJQsccKQWt9rajm5ccgqcdS+9O+OQz3WGvC8+p8eEcpM14Ww1y+9Pv9T9QwVUMGCLWSNBgAOOfW6zUgTadTCU+KjfLee++9/fbb0q3Bt43Qze/LGhFdGzarSwCIbeIgMDdWG0QgwK5tGPRkZAqRtJCojZ+REKQIuF9qwKgQCQGpB8VrrZNUr9eXJsp+/i/8zGd/6u0f/fjd73znu+/d/xABzs7OZD08efxMXvz8v/flL37h84fLhY1QAVutbBTXdVDYtWIaNhIrVHxFNEEv2Lm9TUMAelhsIA0BEdq2BVQhsFKKgY0xBwcHkn9tmsa3DoiVUlmWvPXWW48fP37t3p2iKNI0btu2bqoo6mDPAqyw1gJ2yNiBlMd7z6FrABDH8a7ci1uMPWRBQrtZlq0uLqUJj9a66z7ZlB9+uJZ2thL3Pj8/Fz0hudWhGlV8uBBC8CQnB4YQgoBxJKgo0CQZooODA0R88uTJjTu3hRJLvhACJ0lS162olvV6vVzOQandbpfnqbXWIgj302q9XSyXnzx4eHJ4oJSKYjufL87PLpIkefLkyXK5lGfpo7IQRYaIFovFan05m82ISLJaRIS6kxhpmjI5MQ6Ggl1Zfrbv0SQ5csm51HVNBBIRlfpp4UuXsPZ+v5feUNLweEicy2RJYFkC3YKjFiSwxPzFrwghxFHXAENSA4JVLorO6xDDS1aL1Exba5um0pEFgKZpojhN0/jDH/34n/7Tf3p08/ZsMpd5z5K0ruuy2onXLnlxBZ0B3VcPo/eeGb2HLgqqukkXHcA9CMD0jY2F59laW+kqtAEZPLHwp3pP2GeLlFIS3/YeqrK+uFjpnjeNgEzUVFXFqJIkiUyslErjJDivUTH6xNg//va3fvM3/lmRp841AJYloAeMipkDqxDIu+BCCI4DUqSUubjcfOlr7+T55Pzyyb5uojyqW4ceU5s2rTc6CZ4QbJrkdV03da2U2VdNmqaXF+cA+8+//WYcJQJAuzg/OzttEOHOnTvMzExoOqdRv0j8NBIEg7Adv6004v+Xsv8Ktm3NzsOwMf4w04o775PPuaFv9+2AbqCbzSASIMCCGC1aJMyyGGxSpF0upwe/qCRLJbv0JOnBpQeXA+WybMElkaJFUmAACAIgiNDoRgPd6EbnG849ae+zw8oz/GEMP4w55157n9O0verUqR3WnmvOf/5zxG98H3HsJj5ijDKApLtmVozXiIl6g6aVMcYCgNKmAz+LlioiasRrTHzUEZSKTdBKSfQmv++9LzO3g+oxRoVtSLi7uzvdGV/MFvLHxiR1XQ0Gg5OTk0ePHk2mg9BJh8pjL9BN+bq/2k59dsvlbNlHeSo0qtFo9IUv/NjXvv69JEmaesMoiZfSqLQITGi5PqWNknRNKYXe+whEMqvVfqAMyCBK5txWd40MDygFum2mokZmVnzlrfl1Sfz2av6wX93IzGCrjd8vsdx0xGtvVh24TFa4/6Bt9w/XEdrbziYy1XXDzKBwXW4o+t3d3TzPV6tVmqZnZ2fL5Uqg0XVdu7paLpfybMfgZrNZVZeKIUmSPM0ALEBIjAUgQgghWHXF+C27BV5hsSZgYa6+WkOFGDUigUKlFAGz99Yorahu1vu7w4N/7Q/8gS/86LMXz10dmqa5uLzURgl/8sHBwcP791CxBlYaNQIFFxkTa4kCxat4BQBknKhfYaWu4d0Q8aqZDddunPxIKeV8qEov58/Mk8lkuVzu7u4y83q9DsEhsnS8fFPNZrPJZHT7+AiQgVi67DYx2iji6EM7u7ndfE3TNPrQ81K1hIKdOCt2DJFFlm/sWh5UmYASwzoYFDJlIaRFArgjIt1RUfb7Rw5OkbXWxNGgkchMnG6SJED8/vvvv/nmm1VV7e8dJElydHQkMzZSShU3IAkftxguZYwJTQNAMcbNZjMdDvZuHS1nc2MMKHXv3r2njz/a3981WgOAsGAOBpqIlFUIuF4tdyfTy8tLcWwMJDNFkv1L+IIKhN1iMBhokzFjVTVJ0tLRa23TNHcuCOGadGFb6cYkOT+/HI/H/XTQYrGQkrXoMYgPXiwWWZZhN0axXC6LohDfDADi1EU9AqAl1ZIdMpvN9namPcZNuERCCBTlKUYJIxBRkFlStFfKYDcdJKv6S7/0K86527dv7+/vj0ajLEmdc7PZzNWNwNpl2TWqz33uc++9/4HEbVJMkih4drm4fXTonfjdthvFzIL66a2NTPFKlr9ZrbMsQ90iEpbLNSLaRMWgULHQcAkPVDs3UXvvomcfF0ttk1GIUqUAYhe81YojpUnyld/60j/5uf9uZzqOMWrAAIqBCSVVYgYIkTx55qg1Fkle1X69Xv7kT/7EYDh1keyAiWi5KkPEsvbzxappYl3XChQRzedzeTg3m02e5xeXF3/mT/7RH/mRzz19+vT733tPwI8A4Gp37/bO/v4+cDuLJTYffjgB1GtfzL1Ovbjy7YEOWdK2ZtD+vh8NJQR1lXTJXVAtJTtAl4XGGAXw5L33zvUHkUhCa+1jVOrKoKGgoLuJ+LaBXBTFm2++uf7G74t1m0wmZ2euqprz88unT58PR2/0/qN1J4wAvTzTttV7zer0uyfEiJqSxNy9e/djH3vwwfsfpfmIGbVuhxqtFYAcalRaa0BSSi7bKwU6aBdC33nFLZ42onain7uWe4/u8963mrhXjBNbBvoVKDWAYv5X1Tp+uHtWr17+9vF563UVtWz54+13bjtvABDYiDgDZgbEqq611s778Xj83nvvPXhwXwj5mqZZLVY7OzsAcHp6mhfper1yzqWZhTnovT1tDUVo2AFQu3RoALAHRbdrxYqJYAt9RQhKKlH9qWppo2jGCNxqu/rg8iKt63WCaZbaN9+8Q06YkKNE8YvFIkmM1qi0UoDeN8RRZ4nWGH2gGAFN/4kAoK/mj9tib796/Z7sfhj7mj8AWGtr79I0JYYQKq3Re58kWQhuMhnF6Ou6FN75GKPSkCfp/fv3naurTel8s7+/X65X4/Gw8UGO1qdWsvcEgwNbp6e2wPliK/vSkbX27OxMMm+ttbWJtDyVUuv1SlyvuGS5oizLQge/cs6JKqI88IlNUQEH7gFWAt8V7Yf79+9fXFwMiqHEAdPp9PHTJ2pnt1xXWmuVJNYkTdNQ8Aqyy/MzoRR+/uzJ3t5emiQxhBC9rqrFYpEXxWa5HIyn0+nuarVZLzebssysybKsrusYXbX2QhO23KyFu0NpLIrCN63fBQAh1Kyauu/1uhC0NcVw0CKiqyDM3rIC3ntjEgkpmqYRpyjrEGOcTCZC0SAWczgcLhaL4XC4u7srKyBDBMJtqZSScUrokki5iVLglUWeTCZEoLUWGFdfNpeHLqEEABlI8mCllGTS1lrJ/r2LWtunz57/o3/0jx48eLOp6m984xvCyeW9967O89ymyc7Ozs7OzmQ0Dq0WOCBioBhj1KiUUqghHxRorPceUYhZLPjIHcmDnFiixU7qJEmM0tEH59xkMhITH2MsyxoRlb56KBARFIZItfNl3eRNvTMeOeeqqsmyomm8bELf1KlNbKq/9jtf/X//vb+7tzMBAI4hxsigIjIzR9EwIAqBQmynkIKrzs/Kn/5TP/nojYerdZkQUZMwIytDYEashqOdqnSz2eLpR0/Kat1tnpCkNs2MAviTP/1TaZr/7m9/JfoGiKy1HD0RfOITn5iMhtqgUhC8k9w+Av//5YMRsUcQM7Nw+XWFxnaVbjhgwWf1ET9DG/t2FU2FyEDc0hQxijxgcF64ccQCmCSJzNDV7baturlu6DlGRtR37tx5cXr24YcfTqfT6XQqhLF1XS8Wi2pdFUXRY3g7b/N6oK9cNLT+5tpCiLm3Cvd2pp/77Ge+973HO4NMYJnG6NQaa63psl5tUKmWiJUhctSkyCiIzDFE0ADMGtEqrZSKDAyMgAaBgYwRVe3w2ip073e3zq9vybympNz/Fb6u+4iI8pC86piv/Nn1v2IE6AQ0GJiZenYTUP3MDwvRiiDOyrIUxKAU9JLECqx0Z2eHQT1+8tHh4WHjXVlX2pp1VT5885FSaj6fo5o0TU0hliXAjhq5oJSPJmqtEcgYiMRBUVtWae+ekK4wM0sQ2PdOhC9adYGouJ0+uNtsNmLgKLg81QTBNbW1FiFxjbPWxlA1TXN4MF0sFkxkbIqKkRUCGGMUMGhtjKk9CZtSv+B9DLcdrGy9CLrR8+5GKADSWlMDSqFwF6zXZeN8lhWuaXu37QwPB22QfNibTo72987OXw6LAQBcXp7v7+6JKWSAPkmSRqO0EqU43DSNAkw6FUXJXUIIshrGGO99YqzAiGTMpigGMlWCiBKDyz4R1yIo6yRNoROek2cbpB/MxFHGCp0UtyXDE4aWs7Oz6e7OcrEKIUwmk9FolOcDkRxM0zTPBhITiMylgOOIg2jxVlWVpjYxuizLg4O9k9Oz4XBcV5X3fjrdjd6F6PZ3987OTgWVaa1umkpquUphMcjlTLxCcXISGaR5llRJX/IlItAIjJLtKaUY2QVPdS1cmNIFz7JsPp/L3RTNYACYz+cSiQpZtCQZ8/lcay3U3JKhSp9YTgYABHMu7WTuBHR7pPT8cjYej72PSlFVNcNhIZtNGskAoLWRoWelIhH5QBQ9gqbY2oTf+I3fuLyYh/j+V7763xJDmtqm8QBgDYQA8gw9fHj31tHx3t7eel0SQU9Fjq32DhweHgKAMSYE1ydb2KpcC3k+28RQB3MBIMmqMVCWpJPJaLUaOeeIrhkiIopRAcSqrGWWLDFWadhUpUksIobgBlk+nYw18u9+9bf/Xz/7/9zf2Y3BOddYbaJ3yrQGS4BKgTCSiYQKCQBenp//m3/hz96+c+dyPssGhWFWxUFdN2lh6so7zxXSixePP/jg8aDIINLlxXxvd6hssrOz8+3vPv4rf/W//+Yb93/lV36VySskCo2yJkssEdy7e0drbYyK3jnnZProh+c/N7u/ANfyTgDiSAykuK0IwxY7AnRlZCKKdKW+oLUG1FpbpYwyRhj+ZVNRIADQSgFGZmaIDFFuDIKW4LLLr/ozZ0RlUJBNW2JJzFQUxd27tx8/ftyjD2Kk1Wr10UdPbh/v37t3j3zUiRUO6xsO7CqfQ+ipK0HyEb7mh0Q9YzAYPHx4/+ho6urKKtRaJdKdUGhFklpxoltCVMUQdVQ6WFBaGSIVDIYQiKJWGhUwECrQSrepWaRikChlXFWjQWMMU+R+SIlec/+2zxC2fG1/XTdcae8PoC0hXOsc3/hD3Grjb2+I7fdsp7x9CsVd7bofpWiH1ZiZWczN3t7ecrn0LsYYe2IgYRiuvVNKiWhPlmWL2Xw4HK3Lqna+yFKpFiSJURrAsVJKdZygmju0XddsYbq2sxkiAxGwBlTKKBURNQBkWSZZV11XiEBERZaFEBRGjREZNPJonK/Wi7xIiQg5GjSYWu9cWZZJYqw2IQRWGl4Bovfrpq6PgIOMmeNNdQ0p4kntFxGLori8nIvEV1nWodN4v5ydxxgzmxwc7FlrB8NC4aHMGibGNq5K0pHRaYwxyVIAkJIjdUy8sk4a2/GPKExkiAJL7oPO4DxHgg5O5b0HYFFPkqRKeBBXqxUiWtvikhBR2FQYSEPrhntkjTHGBS9eRBJoGe+R52t2ORciSTnscDgU39PqIw0GElsPBgPhlVwsFkqpNLWnp6eT0dB7v7mcTae7xiQEMB5NbGo9YJYWQusxnU6NMUarxWKxu7vDTKtNLWqGqqNOlBhFvCAzE0cxO0meRSI0WobKEkoBgIEq76X124/6COxodjGHAbSTF8xZlsmyiMKj1nqxWMj6j0Yj4YcS/kUAKIoCWgBzJjVkqf/L8K6YSIE3yoeGEJwLWtt+GFequ2ISiagJXljZNXNVVcPx5NmzZ1/96u8WRfH1r3/9Yx97O8vz2Gp7iKCQ8jHUdX1xcfH4g6cAoDXkeSqb2RijlWxRkETfGENUx8iBSOJIeWdfbpFk12pjBoMkSazV5+eXznOWpLvTSXB+tVl7H3GrXhpjVMrW3i1Wa9RGaxyNRkxYFB4AFIM12ij4wfe+95//7b99tL8XfCOskqyiUoohKFYBgBl9BCIgBkBT1vXJyeVf/Wt/8f6DBxfzC5sXngEAi3xnZ7c4OXl5cnry0UdPv/Pt78ZIWuFsVh8f7n7hY2+kWZKm6WK9GE3yT33qE7PLi/nlxWa91AiE0DRVCO7Rg8PpdErRR0Dnm95UKqVucP1u2/EbIXtvE3oQssxcAgB0kx2xYxPjbtIvhKDQSMONiNQWM64cWClJc1utoH5aQXXCQiK8G9sKFm7bduZour5LW1i31ngfmePOzs7+/u75+eVwOJQxD631ixcvzs/vHR8fczvsqLHTq3mty9kmzAIAZtXVLAEArDaMCED37t375Lsf/9V/+aWjo0MJ+a1WLZlNp2MjzREiMAhkdQs8AaAI6/W6qoICRm7pS6xNQEPwjhmEBGCx4EQbgyo2NQMI17loPW771OtnKwv2+vuLXVWt96Pd19emmG68offBsOWktz/01R/eOFTjnTIaFIowJ3fqLuJgXr58maZpDKyVGo1Gq9VK4uXZbLYqq6Ni73xzvtls0jTdVKXw/jOzatCHZjIcDAYD753WWpGUZYBVF88BsuoK99xTvcQ+SpCiNCqDQADR+6iUCcGnaUYUU5sE55VSDDHLE++9UkjB5YmlGDQKWUsUhlxjDIDykQDV9nJtB3k3FqrTkHjNr7gFYbEMziGiuENZtLIs15vl4eGh0gMfmqbcrNdLREyzxCg9Gg3mc58YOxi2fExNaKSA2avXiR0XxBAiiq6RbOMsy5r1Stiv2rFLbhn4pPgsWJ6ObbHFum82m/V6PRgMkiTxPlRVNR6P+2dYGy3QIW6J+7Gu6zRNszRdrleilyftW2bebDYcWJrKgoVuscfOFUWBiskTxdiSQocmy3tK6pYAJwSntdlsNkfHd7XWTGyt5AE6y7Lzs1OJ8HxwVqeIbT9oNBolxgDAYrFI05QpZFmxXK5NYo1CuWTnBQOVCTI8xtiUtQCjhsPRcDhEhh7CvVyum6Y5PDx0tUeG1WKZ5q0aWAhhOBwKM6uIJgkeYjabiWcVDmrJMtM0FdkMZpYAUcyXDB3190WWSCpn1toYua5bJhBxxkJSprUWUGTtGkSMMZ6evHz33XefvThdrsrBYLBar2Pky8tLrZQxyqACq40xezu7O5OpAMcAFCBCBFDIHAF5Z2eEiForIlLKRK7bR6xV41AAUKRt61d+LvWbLMt2dnYkXtnDPYnRQ1h3tUh5gpAVAmPT+KqqqiprmmYyGk93xuv1Wg8HTdN8/Qff/9n/8v9x6/CAog+NQyUT7Z4jURR0MIQQQwACBEZGWJer/+n/7H98eOt4U5bFYBwYfAzEfHa+vrh4/NWvfvUH7z0eDnPnKTHw9ttvfOrdjw0HRZraxXJGzGmxn+f50fHB7Pzs4vKcKPgQgY0xWJWbhw8fjocDxBbqmCRXOMTXW+cfngHL3DAyMERmVsiISnXNq96adQ6StWrHuxERlTEmSdPUJKm0KvqEqm85SUAsHydbSHVg/qsuQGeUiKhF0wEAtl3otg1ZFMWtW7cuLmabzWYwGAyHQwHX/eAH7+/u7j969AAAYozGtMIm/anDdX92Y40QNQBFYGmcFMNBVW2GxfiLX/zCr/zKlzSC1soolKqg1T36l3uMVZIkhowEF4hI2lxNhmidWsvMWsFgMIi+qV0wWk2n04vzc2u1czHLk7quESBGKRpc6aV0vcNWNxfauab2g27Yd74uyNP/8EahG69XqrcDsdavbIlZ9ner8xlx+8R0p6EmwbtSSqgKVKfpm6apOFd5UCnCelUS0XA4jKElVzo/uzw8PMzSwjVhtS4V4sGBdS6G4Oe8Bm0MMKMCikmCioECowi+SJ7L16ITAEXsumttGx6olSIj90wpwwxKafkfABAhBH/lIpHQXMNzAQADiN4RvpLy3lhbABANzr4HIrDp3jn1R5bkcjAY1I0Lng4PD8/OL7TGTbnabDZJkkwmk1uHB/O5DSFU5WYyHtV1OR6PdyZT4SCUeoPQcQTnffCi/S6oH4kIJUOCniQWwGgrfjo4L7uXWUTLvZRMe7EdqaxGptVmPZ6Mm6aZFNNmvkzS1IcQvAfmxFpjNBBzJLOluSRhNHd1MymXiXRuURTCEBJj1NpW1VxOVWZnlVLO1z40aWYR8fT09M0335Rjys4hitbq3d1dOXObJOVyJewcgtOu69JY7Tx2KooWsYkx6g51pbWOQIJEk/lvof3RWooFwMzWJnW9FMSZ1rosSwYqslzwxnIySZK9ePFCyAVGo9FwOKyqarq7471/+fLl4eEhdQgpY8xgMPjoo4/29/elPH55eSmOE7r2geAn+rp9nwpLl1reIOwrsj2Y215An7GIEJk8ic65xNjlcvn06VMi+oVf+OUv/IHPP31xcnE5994boxRiCC41ViWptXYyboFp3vu6dkprREYAVKrclPdu3RqPBvOLc2bJfVsLQB0xgJwJAHjvB3kBSMaqPMmA2DlnrQ5BFYP8+PhYyDtd8N57IjE7rYyj9977uF6vk8T4xiVW3z469In58PnTX/nlfz4qcooeIimNHMmTBwDURkXtfCRgIGQmpdGH+P6H5//ev/e/vnX7zmq12ju4VdYNAz59/vIb3/jmN772zdPzS2MQANbr6qf++B/5xLtvH+zt1tUqBOe9n0xGIbLW+u6DhzHg+fm5jHjEGCXZJArGqDSzTVNFCn3mo1sShZtGWF6vVhmhz2sjyc5H6htqLNtSorEewNz7L2OsUirEmCSotYyiteW0NlFhkvkiDex9E0KblIqMtOrobtQWO4qc8NVsDGCfkZOoJL399tvPnj1bLtfU0e8BwNnZ2WKxqKomTW1mk0hR0ArtEQCAX5N/XCVJQuYPbfjJzBSjc/Xd27f+6B/+3Je/+rtHh8cCP8mSbpLSWufqG4mjrKzWmhk1qmC1AqO1RgYiVsDj4cDVVdPMjQKjQGs1Gg6qSi3L0igNXd+aiJhCL+u4dc78qowgvJIrv+6HcVs4aPs9/ZnDdY8OW/57+/9tb/3DToBbAQsQltGXL1/2aTF3QhzcUfr1BcY0TZ0LzoUsy/JNlaTm5fn5/v5eWZZFYvtPaXuxMjnH/TxSO+TenslWkaO78wqUBgakDuvG0AO4mOM2boK29HsJFUOPXsZu2Pv1bXjsas6v6k/D9UexX7G+kFjXGwA4Pj5u+ZyVWq1WVuF0Or1169blxfl6vZb2CjIYY5LUWGsF9ZpngxaZz1Jyb++12aLm76kkZGooS1MKUQZmiqKo63o4LLz325OIfUQsFcvpdCqMsMZq77qRJ4AQvDSr+odZSLWEOwIZeo6tvb09aXxuNhtkmEwmSpk0TafT6YsXL8QJCWQ6hCDCA9IdXy6XWusksZKgr5cLBLXZVGVZEgFqc/r8xd17dxTgcrng6HsZKJumiFhVFaJ6+fLl8MF9RJRyOgAQYD4oFouFMWY0GnnvBaZnklS0jzjEiiruNCrWm41I88oQV2wFiJS1reiylN+rTTkYDC4vL5l5d3eXiGTCuCzLo6MjSUeop+iJUdZNugNiZOXgACDXy5E6aEXSlwZDCKJ0Kzhz7khMvfeCkDZKO+cI8MGDB7/wi/+3e/duP3v27PTsXJtkMBgIu6e11sfIde29r8q11O2HwyHiZrOpjLVVVU2mo3IDDx7cGwwGl2cvjb4ixbuxpdsOZYyo2BjDsW1J7EymQLwyq7JuiGg6nYYQFouVcy3Qkpl701eWJbIb5MXuzuTi5VluzWY5++qXf8vXFTAjsYj69nkeEUWHxKqsK7DamMRT/PDJ+X/wH/5vHrzxaDFf5uOxd7wpm1/9tS/95pd+a7kqJ0WKAAr4z/yZn/zMZz41HY+8r6xRm7UbFDlRGonLjdtUtVbZetWcvTwvNxWiTmwWY2SO3vt3P/lxRFYaMaI0SyX6v1kGu24f+tPeNuwUhPoqEJHItAAS8E1bzR3/sboiEkcZRZLJyxtGBoV7KpJzTjEopsAt1g8Aul7htYJra2OZAK5kaAFaDBEliQGAR48efeUrX5GqFCIWRbG42Hzrm7+/v79/eLjPzLGrfQO+xjMBAAN1Lllxm04CMGitbWIisbXau9rY9LOf+5Evf/l3E6NcXSZJgqnlljPTSQVve+fxFXMyIGJitBZWemZRVxoUmTk6WC7nTVVOH94/3N+NMRZZErmt0ccYfWz7Z9hOR1xNjjKzkHZsb328Xkm+Yd+3XvH6t9cc+Y1sGF7xFn3Itu19u1PCPrJTIHsHiICoJSY7Ozu7deuWVBr7OEu0YqxWRFBVlfdxMtmpqkp6bHmee29ms1lR5FprqzRx7IPEtvUkSISuMAOAQLG7in4KnAGA2pvAzExAwLrdF53/Bo3MhIgEijtYWS/zAF3bAhEQlOo896urjYjA6sauYwSNugsErjDJcnDd6qoqIorRF8MBM08mo5cvT/amE2YmjkWe4d4eUajrejQYSjQDjE3tg6cYWNywVAUECie8KAcHB5vNpi4r+SxjjNamKWtlTc08GBT7+/svX76ULG13dyqiwiE4ebhaUb8YJZCSKrQUq5fL5cHBAfXMoLGVLZE0t699SWTknJdnZL1eyzulW/nRd77DjEdHR9bazWYj4ID1ej0cDtfrtQztiA8uikI8d5pamTM2hXnjjTfOL2aj0eRyPh+OBtZaBTwY5qfPL40xi+Vca31+eSnCSmIrqsYZbRtX90RUUugGAGk2yygR+WCMcTFIs1aYIrTWRqtNvTk6OpIBJJkvEopmCU1ms9l6vZYO9GAwkHkkyTkkngCAvb09UYLqAXHUzX1J4iuBab+eEu5JM1hgaPI2CTL60qK4agkUGlcpzEySeu8Tm6Rp+nM/93N377/xg/feL0ZjbRJpPwNiCF5rTYwxRmAWwLZsoSQxIfgiT43SyLC/twMxABMAyaiqpHr9Zoa+xkMtiIxbtmGlDRaDTCIPTpK9/V3V0cs33kmspjtSbiQ2SjdVfclxfzp5/uTJy9MX1WoZfNOOI7YDv8AMPsQYIwd0PqKxEQAYvvnd0//0P/n3f+RHP3dyehZJfe+77339a7/3C//8NxRCIFAIB/vjv/DH/tjxraPd/R1EDr5WSHVV7u6MyBNo4wKPx8Xpy8ff/c77xLiaXSqd1k0ZAqXWVFV1997t4bBgIABCbPMNAUD9/+KAO9fWxhzEgbs5XcXU5WzkvetXcvsIXVEHiSGxqTySwriw/VkKiOFa/Vkz2SRJWunf7h5tSfbK35rONnYetM1iZReG/YPddqJAFIaJANTTp08ff/DhaDQqigyBqeMiaM8GbriT3m72C6GwU0P2rjE20RpjcLduHX3yU2+vlm39oXNCLV1c7xclq1OAgAqIkVkBW2ut0VKUSES3xJg0Te7dveOcd3WVJbqsQ5IkjBKrcgiB6hqNlkgWABRyBOpCkNbbXbPvW752OyN/nQ8GuHIbXXADLGOs/fuZWaPGroCKAO3X3H3dFQzEFSMgiWDW1WS1PHsBEdfrtdggoQiQ3ybaGKVWm00+HSNiCFSWy9lsJnCMshQ9OCw3dVlWaZqGkLYblFViGRUTAEdWCgJfdYEFaQXQovS3VoClcMPUzjXJTFNX1cfIxKCuj88rgKsFQUQmlF4VXV/b7ToBgga8+pNeyVjAEX28gpJhI0GHdCWGLMu8i8vlcjAY7O/vLxaL+Xx+7/5dYb3Z39/lSL5xaqS2uwzSOe6ewBhCIE3CJSuszoJ/7rYNpmkKsdWbahonh0rT1FotbERCCqE1TiYTCbfLsizLUhCwAtNN0zTLU2ttXdfGCPwV5TQkPJfJYwBSCpVKkyTJityYxIhAggunp6fHx8fHx7eHg6FR2lotok91XU4mo+Vy7lw7DSxPQZ7nQuUYAhljd3d3m6Z1ikmSHB4enr548fjxB2+8/aZbOSLSRtVNzLPicr5IkjRJMmvteDxFxOne3uLyMnTCLXLCAuGWHupqtWICqb3LMhpjgElWWHihEVHqAZKy70ymXZdaD4fDljI6tnNEfXF1PB5XVdWX5WXPSK1OrrQXWfLeS1AiE5j9gy+ps9xoYxQzCysyIsrayt4rsjzG6HwtDT/nXFW3A9bQMchKzAdSbwNDFNMkqeuSmXcn08FgILFXjNEYnaagtfauUcAaryCuzG1zMMZosB+GUUREIUJHER8E+JOaJEmYG0gz3pl4751z3jcILZpXEg9mbuoyTVPyrt5svKtDXRmNsYnMqp9FZOYYObRQkEiatdYuhG98//n/5f/6n73x5tsfffTRD95/7+d//ue//OWvEwMCRIbPfPqtH/+Jn3h0Z6/FXmjwPhgNhJxkKTNHjUoZbWxejD5KXv7LX/31j7/7yXK1joSARisCpZfr1R/44ueLovC+kYQYW2UfRFAKFfGNbAe65bpZZeQtgQBmRoqw9fN+pPCGAe8ro20Wqm2fHXXvou6pDyF4lmY+oun0yuQ4JDZd1nOLosDAa1/IWiut0+kU33jz4a//2m8OhxQjl+UGAPJ88MEHH9y5d3c6fcDMEaL3vsfN8hbO6LVL0546sgQZTVNnWW6MmYzxs5/59D/9+V/c3993zmHn1/M8l8pVv4IoOZh0RLRCpVihUlY4NiAxWmMxSNI0d3V5fnmJwGmWNE1jtTLDrCyBmYM2FKIQR6xWKydMoZGVQYrILUTr9RNWN3LTHxaIve5X/Q5oDwM3de+vPgK2Ng1ejSJcvfoFl+ExmfGX90vM3jRNojBNskt/SQTGtOMxy+VadWORBwcHaWrn8/nR8YHQriFqWVsCZk8oOCzqTltqwm0NX2HLyI0R+obK1aW2O0HAbgwAIDxWwtnCzAqVpLgdRSgwAwMBKARAgPDKRtreY5KJ8hVGGxBFOUKcM8q6CW7EtxUnNRwWzHx2fhljNhwOHzx48OTxh2VZHh0cCmAqMVYUC7q5uJYEQ4DK2BWmutQzikUmImGTkBQqOl/XNWg1GAzEjssoi/e+qipBOwvFxGjUUjIlSfLi9PnJyYn0OOUEhsMhKqjWZV/2lGdBHIOkbT19TwhBeyM0I/J+EVEQGgrn3KDI14ulBvSRUmNNroINRVGcnp7iaOyqer1YjooBMzPxzs4uxAiw1soOh1lVVdraJDFHR/fPT07yPD2+dSRsl9bawWCQprl3UaRqARSHEGMMPmqjZHhdtnTajVR57xWCQlBaxeCNMQrR2NZNyvpI/C0IZKEGk+oOdzWwvb299Xq9WC03mw0iStm/v1ntViTCjgxEng6pEPS7yHTiwY1vFQ+ZRZjIM5O4f+qQ7dK8VxqIgmIQVFSa2TzPl6sFAsjskI+RGVErqdqBQmABqWmJrsgHpRGBB4Miy1Kt9WJ2kSbm7q3j9WqJiqvNCgC0QWYOwWvdspD2+RwiamWZucdFokKlVJIkMlwgzendvR3hQ/Xes49GIZCiEIPzaDC6JjKtLs+1AoyhKTdZkkZqlWEjU4zsIxMjoPYcSCsX3NMXF/+n//P/4eDw9n/5s3/nn/zCzz998txqEO/7p/70H/uxH/vcrVtHSqlYL+t6ba3VSjNEbTQRYjtBp7wnYm2tPTo+eO+DD27due0dOReAEFFtyuqNN964c++uYOalfqok/AYpuirm0FvC3urCVsmw96DUKQy2llf2BpLQcUhAA0KZd4VxaZErAO3gOLSCLrYzRK3ICzMIYhERGQgRkySR97de4BVEmHzxGgfMHQKoaZrEprdu3do/2N2sqyTJ9vf3Y+PKsry8vDw5Odnb2xkMBpEjXPes25/VW97+yCCLh9g2YKrKeyfUufcf3BWRcESUCBRbbLaVH8qf966IiEArZtYoinNtNpwkCUfa25l63zDHLEtWq2i0Go2HISqWkk4CzJEZldEhyGwsAjEapbSMmcuG/qFAu9e+bkQeryZw/de4JeXU/2Tbs8J1HcpuQ1yrrPZfyIONqInAmMQo62rvm3B0+3AwGCyXy/39fd1KTPLJycm9u/eZmvlsQUR5ns/nS2DlvS/retD2TkKMkUJUGhCRGKX3qwhk2DwCSxlcLlNJE1d2JBIgAYicFyDxFaD6leXajiTgKvIQ960QXpMEI/RsOO0I8tbWkqOB9Fa69UFQHIFDCNoYY9KiKAaDZr1e2rQYDwef/OQnnz17lt5NsyxJbZIkien0dDuPa5TSROA7vSMRNjbGrNdrRA/dEy7bVdyztVYn1ph2iE4ybGGTmM1mcnzpTG82q7JEpcytW7eIKM8HMcbFYiFvFi4z1en+dgvFAEkPzZPjMHP0QWVKHhnJ+c7Ozm7duvO9733PGJMPW0VSKUHv7u6en58PBoMsy0JwAHB5ebm/v98KCiEuFisiGk7yl2fnOzs7yqr9/X1UEKN3DkaDQZ7nlxez3V29u7svYz/WZgiCQVPr9bqXju8pmkMI6/VaUmGZhxb/Kj5PKVWWJSIKsbNS6uXLl5LDpWlal5VSSrBRQid5eno6HA6tNmVdiRChjFRh25Bu4dxifCXX7Pu+ki4LYhwRy7KU7UStTGEFACJ+JZQRbeXZKq21AgMdUpI5IippbL/zzoPVRiBm2vvoZQAUWCkMMWigGCkGZ60OTajr+vT08s/9uT+plPoH/+AfGwN/62/9rcOD/eXsAiIVebp0NXTiekQBEUNoC4RiIQWTJVvCao2mNYxpmnoXy7JEhNFgOJ2MmXk+n8slKIUx+roOidLeO41cpAlSdK42SjPIdBEwQ4zsAgWKQkbdkGaGF2cXP/2n//T33//g3/n3//enLy/zzDKAj/C3/sb/8NEb94+PDrRG55uq2mD0CjmGoBATa61JjTGqawqsNpXzVFfre/du/czP/Nm/83d/7tbxUZYlWZrkebpabf7gH/qi1jpSZI5iUrpQA5gZIt3Ij3o7sOVBr8YCmVlJF68j2IOW+ipuJ8e95+5GrttSk9Y6MvTGavuzZGYthoAAMURltLXWIEQiYECtsEu02vgA29TObNu+7ogy12jSNA0+Tqfjz3zmM1/+rd8uioyITF6s12vv45PHH+3uTu/evQsajDEMV67i+nJwZ0O5/xREFiyaMUrYDARBsLe39/nPf/7Xf/3X8zzvIZQdX8zVg6Q6Pn1ZBQUEUquhiFonqRkV+Wg8YIjj0QAAZBi/KGpmHuQFR6pdI96orGuttHAphxbl34bPgAxAQgByw03CVpzV/9/FH9fZifn/C5Krv983fMyrR2Zm6kSstk+pfxsRaVQcSQEaY+bzed/bOz09/cQnPoGIREBEgkEV8+e9F8xeCCF0xPRGqdC9LBttWtFC6Iso0H1PBlCoOEhw/EQkHHUA1L4FWbV/xQCma950ECTuMlaGNjMW2Bag7NE+1LhaH+6kNaQPfHMBNQDFrmnaAsuBkiSRPEy6d8Nh4ZzbbNYHe3tJkty5c6euy+l03AvHhhAEZBUCKdVilIjaaVHZnzZNeqciZyvPuXM+0SZJEkKQiR256slkUlWb9XotIWZVVZPJRMaQAMBaa6OR8rIMiYlnGg6HFFqwhZyeUkrrNrqXeRjotA6dc6ZpEHG1Wp2cnEhBWwZnjTGr5bKqqtFoJO5Zpo0FbUREIqslTe6iKJaXl8IZXG4qcZl37t8h72YXc5lYe/Lkyf7+vsK2FoqgnXN1VW02m+FwiFobkxDRfL4YDAtZmfV6XRRZjL6qNkme+KaVHRSnKDIJzDydTsWDqm4WCACkwSwpoLV2PB4XRXF5eSlZKTPPZrOdnZ0QQlVV0i2Wa1Ft+78lzZWFgm6wRwrFUinVSltrV6tVkrTzEfKkyLNgjNEG+za8Usomyns/KgYAal2Wd+7c+Zmf+Zn/3X/0n967d/tisWx8S8BCTBpVCFEnbQ0gBGdtC5V/6623zs7O/tSf+smf+smfKNLk9ORF4yql2r0UY1QaVMcmJLUr7krlMUbgdgjFGoOGRZYqSZLhCETnA0BlWba7g9L8dM4xgFTUd3d3QuNicE1dYgxW6UieYwdEIiaSP+JAHJlcNAB4dPvhv/i13/iv/t4/jgxpCvnA/s//F3/zi1/4vMIYvStX6xAJQixsGiFoQBeJImitQxOQEI1BrWOIe3t7T58/U9oQN5//wqc/8cm3/87f+YUffO97g0G+Mx3tHuzv7+8zM+BVVtr6FFTMSipf/Loa5LbR4A450X8NnbYgEfeLLH/Yt0tk5AygjZKNSbaPDABCkoiKgSDGGKKTCAkABPOPiJ3YK/eJ6Q0v+WoG3PEcUUBUMicwnU4PDw9PT0+ttS6CMN6dnp7KCHyWZa8txPev/oP7aipAK/dLFBBbmWvxqg8fPvzKV76yndDIiJExSQjysAQFKKS1iKiR278EZqWMVUWaDQaDzCYy0rdarYxJd6fjixlba/NiIlmLMjpJksistc2KvK5rF0KMZNOEmX2nO/baVx8BbbtSefVVr/ZXyL23uOFZtw/AW4kgdDH49sdhh+jro13eam6BmBJ3RctnrT0/Pz88PJzNZswso2KirC6HEsBLXdflpgK4sFm6rspiNKzrOkkSMkbqBDFGozT8EL43RlAAzKrrVSF3mi0MEbDDFyCQTAtey1YVcpR89l+91Nvr2dUI2la6eqXZwVv4tRuBjtYmSRLXTcvkeb6/b8LL8+VyOZ1OR6MRUByNRq5uWk+52bSOLbTADcRWekjW2TkXmYgoy7I0TYG4S2oTGf+IMRajYQfEaDNXGRXDrocnvkHKrZvNhhGyLKuqlshpOp2K2o9RWrgYdafPiFtgYLHyACCUyFLjFZN9dnYmNXBr7bNnzz72mU+E6LVRqUqCj977yWTS7y7pOneOPDSNn4yHMUZU6vDw8L333jOp0cA20cwk6XWMUWtdVdVkd2+13GhtQ2jDuNViMRqNmqba2dkBbHdvCCHLMpEOTPIsekfBByaNygcnrdk+GwYAoZTpe67y1MtT8OzZs/F4vL+/f3Z2lqbp4eFhkiRnZ2fC7y2NZOzqQ/3DQkQy6CK+HDqx4a4+FIno7Ox0Op3KYva+XNjKANu2sbzT1dFaO5s5YwwrTQ188YtfONgfP3/+fLp/AMoJQN173zQ+z7Pg2+EWIjLaxBj39yff+MY3/uSf/Ol33313vVo8ffxhCMGgqlvhrNiiq3TbY4IODiYmvmma1CZKKWw5OgwACGFPkiRCy9M0fjQaSRIvebx464P9fYOwbkrfuFQrF2MIPksTwZ8TERETcWQKRD5SDKSz6WKxeP7hhyIn9Mf++Bd/6qf++Gc+9cnEIkdvtd0sHAIr1MCEoJW1McbUWGZUoEEjMxhlmCjLsrpc7+/veorz1fLBw/t/9s/9uR/84PL3v/Xto+HwydNnDx78wSRJlAHXBOywygwRQXeKBuo1ooNdC6q3GL2pRESKBB3CtLeitCU53xtlbvtcVxrkzIw9M4HCrcpujLEFrmJXsm0HaIkRFW1hXLbDAhQu6CtrCiAtSQSwqJxzEDlEt7s7vXP36Nnzx4NhGryKDajE1iH+3je/c7B/S2ubppY5RgpJmroYlEKl1GZdpmnK1BacAVipK1iWkJnJ8GjslIuUwfEkf+fjb3z9618fDEbIqtqUWZYBiZBcKzjoyQMicaBIrGKWZaiQGRONibYcKTqXaDyYjGezWW61VmCSdDQYKGO0Joplkau6rnJtNhjzNB1mqR8Nzy9neTHcVOVwOCpMslqtHMcGyJjEew+gEZGYrdUhBNWFkCEEZkzTtGk8IqKMr6BsAGlCXvPW2HdzQZQnr+okvZXvvXX/5v7Pow+olVjhPgqWNwSKSZY23g2LQYzx2bNnt27dMlbXTeW931Q1Ac4WS2MSZdLlpsyyzGbpR8+evvHGG5nNUpMrNrWrq6ZJoqCWWuSI9zFJEiBUqoVECileJIrgmBlJPKlsWYUsmkURwDCzgh5KBmJ+iBmQ0CgWQUMEifw6/9265Qh0IxBRSkn9WZ4w6DZ0v0S9SoQ2PTUHKNSKFQdWhJow+pCnWVR4Pr+4dbh7enpqNKfWhADOOWF5DYEYtUmy5XKeZZnW7Qi/WGH5rKbxWmNwsShSCq16eYyxKAxiJv7Pe5emCYAWam4AsDYtimFVNXXtqqqZTrUQ8AKonZ095xxHCMGbTAfnow9G6cY7tDYfFM45oxVT1EYrrUFpIgZQ3sc0TbMsaZrKGBN9MMZkSVoMcpsY7/1qtciK1CQamHYOD777ne/cv3+fG4eJMXk6n8/TNFVyklUteJ8mlDrREZt0kDrnZpdnh7sjt1ocHh6WZTlfLr33t+7emc1mLf4oNMpyuV7funUkZJ8HBwdKKVgtnK8ROUksEI1Goxi5KIYIajKcnFcngpNLErPZxGpTxhiZqNpUSmGWZb5xvnHj0Wi2uEzTdDabjcfjNE+YeWfv9gcffPBwOBhNxhKbS1l7tVxXZQ0AWpl0YCXgdq4hClprwdykacocm8alaUrEWZaE4NI0Dc5rjUdHR+J3xXAbqxh0iG5TksxAS9S1XC4pQpqnq9UqYTAGqrqaTsb/4X/w7/4v/1f/Tr1aWGMDEVIcZHnVlM45rSwAhRiNTQmJUSmjf/mX/wUiI9B0NI4+aK0j6wimcrWKrBgUAxAhamY0xjTB195prSVcixDIBW0QHCm0xhjFZK2unc8G6dTszudLrBtQOBoNNAdD1Xx29vDBvRBWy7MSmQ0H7wISoUbvvZgsT+SJQ0TWifN+1XibJh++9x4R/I/+6l9cb1Z/9I/84c9+5lMAZI1ZzuYhBIKQqyTaxDmnDDpXR+10qryL2liZ+k4S7bi0RtfkTJYwJhmm1fPyc+/+RNyMfuNLv8lARCFGeOvR7TxB750FFVgBM7CKCIBC0U9aaxMVMzMxIffhrHizXnFHI6BWgYkoIkVBFyGicHGIkWCGlhmVaLuwHxQaqyExESAyKESFioEVMqGwzwZuQ3NixtKFPM9tMgokY2+WiKC1m4CAgJ2FV4jYaRy+GkFIVaqHUN+/f3+xWDx9+nQw2MnznIiWy+V8Pn/y5Mlo/HHvwRglCBHq0OH91EHv+fvI4nVRS/ueQZF98t13v/2tb2VJWlUNIlaN64HKfeDQH4eZBWJnUUm1pK8ODYb5bDG31mpjQ4zCJCDXLl0+ijDMiwhgrCqKIt2USimCfDAosKx8lsUYU6UBFKHywQu4NLT8JphmNkaPqBDBOSfD2gKH6SMdWd7IhIg9nmt7BW5keDd+jluveF0Cc9s3S4Cc5/mTJ0/yPJ/cur3ZbDblRtCeWuvp7q5rA+oWTHR2dnZ8fGxtKnq0qkPrOefqynBCzrm9nWmMEYi1QSJSgC3sqe1HIm61sZmZkTsG6WulY2pFSNrrfO3d78lPbqyD2uI67fatQkTZZn3IKb+iDuS/vbA3Ahoi4i6XEqDT3t5eWZZZWrT9HlR6S8VIkk75IFlM2XJ5nitlBG8lbYvYUYRKYsrcEkwKukqGU+X9y+Uyz/OqqqTaLOcpCW6vNg/dwH6e55JnQ9c5U1sEnMa275RMMca4Xq93d/fb9oG16/U6S3ORAXjy5Ek+myulRoOhEJMtl0upn0s6ldpkfjnb399rmqau6+Ccc36xWAiSq1ytRYhX0GRS0m+zf4ByvWHm3b2p1vru3btSAFgul87VSWq8b6IHKarLWpWbtSTf8u1gMMjzXODWy+VyOBwKh2I3rxWsTb33wvQUhFUZYDweS5tZYMyS6DPQer3e39+fzWbGptL3lVsjyZ94azltmUSSDl+fDMnXPVeBMKX0QFmpeAtuC0Gfn5/LXDW00oH0qU996j/5j/+jf/d/+x8MhyMfKAKv16s8zwJFYVuSQCHNrGS3u7vTf/bPfvXD9z/4a3/l38rzVACAANAXtHirKykrJjwq8kpNapQOIdgkres6SdMkSWrvBBDTNM1oUCxjKNerncmkKZej0Wg8yhFhs1oJvS93aQADERNE8i4oY4N3AXSIvnYeUc/nS0/wd/+b//yNB/fXm6USZU7X1DXZQlMTys1Ga12F0qa6LCtWMRI2VW1MotAQIyJGGaDXCTObpNiUze/+ztc3pb/74P6/+Bf/8gc/+N5bbz56efJsMFDHx8cA7XPQ18kEgdv7ixvls956yAPfPlx80+QyM0Ps1ZDkT2krR+rfbIzR2iqlcEs3AbuBXaKglBIYslgGC5ilxXZeRERK6R+G1r5WHd2+Bt0xOsnGzfP86OhIOgre+93dXUFLnZyc9NtUdckHEQUfJQPoV6o/+I2XnIBGQCaOoWmaw8N9Wfo8z01imZkIQiDnggxgGVSKQYsMI0H0oschr8hA3jfaYJqmwtndd4CkGmy1LrJsOp1OJpO9vT2DKBMI4+FgOBzmWaINpmkyHgzGg4HR2miUsp5SQBSyLBNRZLl5SrdmRameS+v6JeOV8+iXmrZyX8Rri9Pvku3lusII4DWnorqXTLAopURDZrlcTsYTsfjr9XoymVxeXkojUE5bLF3vAIQbYT6fbzab1WrVUiSGAACoVQzbW1MpZRA1XvHC46vnDwBbshZI1x0hwM3u+Gu9L27BoOT2CbqnL952s7Cm+3cTKN4eQbfGS0pDEs0YY8bjsRAeCYPjcDgUSZm+iithX/+UiuGOMVZVJbo64lzFrG8XaaRmJYGjiOGI3RdXJ/kTdCAggebKJ4pXFsLIPM/X67XAtWLHLB06JcG+BCK46MFgoDri5RCc/Lkk6yGEJ0+eyBTy048+KrLMKAUA5XojHvdw/8A3bn45k8pqnueptRzjzs7OZlMlSSZJpE7s+fk5EWXjcYzx6Ojo4uVZWZaypHmejwbDoihm8wu5rqapvG/kbMWWee/lkRQqShlAEqSVnPx6ve7763KxMsUu7DHCG8WMo1E7uCWXL4HLZrMRmBsA+OA25VobtVgsxJ+FEOSw8kHiUyVI6kk5ZHLJd6JV0mKTjrJEQi1yWynq0BKxmxKWCofgA5bzBRH92I/92Hw+z7IEAJLEuk6fToB1o2GRp1kIYbNZN01zfLzzg/ee/OzP/uyzZ8+sSUxilTKyMfpnQcRm+g0pJkhssgQlRJTlgxijCH+Nx+OiyJIksUblWbK/O/VNfbi3X+SpQlzO5jFGoACRiAIzE3bWO5ILsfER0CKYzbpp6uA8XczcT/7rf2g4nax9oxIblbZ5Bkbp1AZkD5CORhHVaGeqktTkqUpTgoQh0Tr3gevGodKAhlh7wiQfPnt++g/+4T/+z/6P/8Uf+aM/bhL7j//pP7HWVlW12bif/umfHg6HssidGbmCXm4vyw0bAtfcJDBfUxhs7Zhqr7T7+dVvb5ggYxKtdWesrsyLfCMEz3ILuGsR9jgGvN6HvnFk5ghA11Tit9/Rd02yLGuCB4CdnZ3j4+PLi6XoDVhrsyx//vz5k4+eJW89ShIjZDHCjBijqA+prc+72atTV5PULUM0EWmlAeDdd9/9tX/5G0mS9V0cQUgpogylHB+ZeyHC3sNda5dKdBkoWrFKPmhttGZrW7EUbdRkMhFzyUpF8sFT01QU/M50UrsmSQ2vy82myrNEKVVualSYZ4n3XhdZjNEY7Zz3oTE6kYKt0KNf7QOFyG0EwAoFMdTfDL5ySCQl7mt/261J/5Pt3dZvMnktl0vJRXp0lTBWLv1mOB4bY4QhSGhyEVFwWMJGVBRFUeS1c3VdWyvThIyIi8VyOh55HzmGNKbKtuC93ooBQN8JISJgxUg/hJHl2ohSf99/2PbYXoHW9eI1F97faEmdsasEbNcJrr+z/VZr3THpQJZl+/tWHHPwJOvGzCKzKtxVYoulDyS/HY/H4kTrupbYS6BPYt878tR2HkYsuFRHBccroYA0ibMskwybmdM0FYsvwj4iYi9uwHsv0AQJaqWTKgGQNP6FUjHP06ra9BGblNGm02m5qV6+fJkPivv37z97+sQ3bnd39/nz53lWNHUzGY5kgjxJkvViebi3rwAkRGDmhw8fXlxcnJ+fj8fjpmmE3jKUpTiqnZ0diQwkKZTudYwxku/vCEPMssxV5Wq1SlOLiFW1ljl1MVsXFxfHx8fC/iEbSSiOY4wXFxd379598eJFmmebzUaGjJ1zQrUhpFqr1Wq5XEoqPxgM5B7JYkoHV7JbIRqULwTbJfGorKf4s6qqhB5TeuqCh3C+ZmYZA+Ut9aQu6cH+a2qx083jDz/6pV/6xfe+9/2jo6PVep0aDS1/IfbmWM4HEVNrxdgeH+28ePHia1/72qfe/aRzrnYNgJL6fGsxkABU7MAZs9lsOCxQschhybXU3iEq8l6wWs43o2FeA2imi3IzLNLVctnU5eXZS+IAHQRSAUdmiTMjcSTWNnWOXCQCDoSbym/qZlikv/DPfnO9/o9v374Vox8Ni8EgHxa5UtooLc+RVVruYFWXw+FQI65K76IaFgNlzLp01tpiOFBK1w6//OXfOz2b/8hnP1E37ktf+fJ//d/818LSCgDvvPWm3BcOIjbfd5Su4MTMLM83MysGuqKaAKSWSY9ksWIkIoYIzKgYAPuubas6uO2e29vURf/bk6LdwCMRAQsWvR2glwNp07Lj9cZHKdWfWPdD7gCt/EPmgAEkVO9ngRBxPB4/evToyUe/URRDIkqSNM/z995777333rt3/440UZQ1wAyMSmGMUWvbpvvXYWnIxMywVUnGDvqfGNP48OjBw2/9/nfOzi4AEJQJ3ksSrBR5ra3Aawk6B8eIul0qrZUCVByj564dq7VmREMSSQWZfLdWUmhMUlOVzd7eHnnfhODDwDm3uzudz+d5mhSjqfC7AqgiS2SXKyatFVGUjKquGyKwRik0Pl6NS4kh7F3FdgjU34wt/yr0xVcE3zf8zY0bdGO79K5Xqm0hhKZpFotFmimZ/b0CbigVYtTGrDcbIprNZoPBYDQaJklycnJSZGY6nZZlmVh7fn6+O52EEIoiY+YYGC2zkvJvNxDFvSBa25AliAJHEu/4Ci/Y1k7Y2gCvvqe9TLrypjcCFEQNwIgE1731aw/Yv7TWaDS0sCyd54n40egajiQgIOZYlmvvG2u1gJLko2U7yUpaa50Lkr3RdUwHAAjATWhg5VbKfZEsULLM9XottluyMWut+Ly6rvsgSaRERqNRWVf9Lup3lPc+T9twWzI86pSJldKIKJQag2J4dHQkT7So9R0eHkYfGq6L4cB7X603QOQaNx6Py7L0PpnNZg8fPpxdXM5ms/V6eXBwsHd4ePHyJfkgrJxKqXQ4HAM8e/bs0RsPy7LEdmDBXF6uhVtbarNJatbrpdVChwtSEN7fP1ytVj44gUmOx2OOpJQRwJp47t6/yhUZY6qqypJUyJum0+nF2flgMHB1Y7Upsvzy8vLlyelkMkmsTYxdrVZgLTJQiI4bClFrHX2oNqXUnxtXK0CjtEZFIcYYkSHSFehJggy5+977xWIht6NVv8iyEII2arVaSxIsVaUQ3Gw2WywWSgMzZWkaiVwM3vsksc5509loo1BdDckwcZzs7vzar311NBrtTKZKKZNY2ED3hsgd6IGZyYfFYnF4uG80QpLWdSux3ESa7oxVVN77ED3EYJRSSBpJcyyr8sXzj1xVE5FWKgTPMkjI7UcIVolBOUeECEotF+uXF0uTZUmSMurJcPKbv/41Y74RogMZzAUZBVTIQF2YPR1PrbWHh/vjkZW4anc6KYoCkMejyXR3J0ky1Pbb3/+wqd0X/uAX/+Vvfulv/9//C23MZDw8Pb34sc+9WxTFYjmbTqflehFCkGf2VRsIrxjGG3aGe7AqEpOYC+yWNPS573aeo5TCjurEmlRr8RQaOnYBbgErLFEOUQtIVFprayKTUgpbGiJW2rLzEthhm3NeAb7Mq2e8/bWEe2iE9hoODg4ePnz4+PETQSIAwHA4fHby4gfff/9Hf+yzoA0zM3FvjLb/V1sk0b2BfDUtFotmjPrYW2+/fHlubBpcIALU4j67sAKuhKGgs85ad3AfJbMfSZYneZ5aa13ljLHOOcAoD0CMEbEllDfGMESb6DRNNcCq3GjFWvFoNIqcNJvNs2fPkzw72N/13jdNEyMORsOyXHvvh6NhkWXz2SIw1U1prJWT6aOq7SulK8rOdt/GTlVCXIhS0Dcyu/rqTQq6Gz6mu/Zuup9osVjInlgul2PMjTHCpibLJT4AANbr9c50ulg0q9Uqy7LRaKSU2myqvb2DGAMmyWKxjgwiqYSJCJmliC0XsXwibLnGdq9HCdX77aS36EeuzwuBlnfy9b5++x4Z+INrV7r1tb4Rmrwao2y/lEYZh1NKGdvSjCulbJJkSRpCsCqRKoJgYiW0ksqeXEg7dmJtH8rked7i8kOQunGPUe/upuo9t/yJHFP6OxI2yRt6gQ3xRr2PF8oIIgLiJE+hGzKWWyBZoEzEKgVyGkopgU8zo6jTF/ng8PDww48eX15eHu4feO9d3SwWi92dPUmApBPx5MmTg4OD+XwuSW1ZlvPlYrprJFw4ff48s0mM8fHjx5IOfv/bv//2u++G6N977z3RbiKi0WS6uzt1LpTVWhusqs2Lk/kwz/IiC8Gt10uhBHn+/KmUmmWdvfdWGykMmE5pijqtX0QkYGPM7u7ucr6QXq9oa45GI4EoywmIpxQ6676xLT+UtZJ9K9/2YHIpDkuE1H90GyqR55aRo31tmfUgzSPpHcROzoEjPXv+tFxvZMgCgLXWhTFGlHe6UjxR1B2xQ5IkGkFCqOPjnW9+85s/+qM/6l3ANt8N1IF4EUiBEkb9EEK53ihkq42sJACAwhgYFRDHRCtQmoIbZmkD/MFi9tEHH27WS2BWHXkLbVE9BxLOWWRENLqpmrwY372/7+DZYr0OBMogU/Pxd97a352mqc2y5Pzs5Wq1evny5WK+BoAEVWBSAMvlHADml2eRwRrwr8yUiEXY2dltvH968nNEQVsDQHmerRarj7/ztrE6SYZltWmfF+5Jaq+Moer6wdglwdLv0x0DgBDVgdScuwGN7npjb6L7HtN2nK2U0tYkSQZd2n3FIwmsACMAd40ARCQCmbO4cZJEgSj0lFid/WpFIAyLjtx1Gye2SXohSinoHOp4PL5///77778PkEm3Q8YnXrw8vZzPR+NBe3rtfLpmGdLtLOtrTeS2wWJmjWCUDt7dun20t7d3cTlr6zCxzf7bNZL/ARGUVt3VIhFBCE5pwxxBoUiLt+sOEQC0Aq1gWAz6VebokCm4RmttjLZej1XB0Q+LbDws1hXlWTLI8zTPpqNRVVUaoBhMhuPxpswuLy+tUeL2XOO98VHGh7fkCpiZOtT8FkbpqrKE1+dcmVsys35z9D3sbde7vW6SRuzu7kp5TWplAGCMqap6sVhKtO69x05RWDqX/bCs975pHDOs1+uW9yfj2WzmvW98kGoeBa+1RrQh+h6gd+OssFW9BdWJKMglwhZPVn+x3XpcRX5XO4Sv+sfbW6VzaZpfV0WArR/eeGmtgbGv3ELLaKM1KjHTRqM0yOVb1VH2iy3u/6QdBtWaiIxp59SFMRgA+uZl7+D74RY5N1l5uaei+dqPADBzjF5rXZZlT08hHX0pqybGNr5V7uu3ltA/yYlJ5bavhJdlLf74B+99P8+Ksq6qqhrm+eXlpaShbe+Q4mKx2N/fjzG+//77R0dHQtS8WCxms9l0d08ptVwuxUlPp9Nnz54Vg3wyHp2fNfOzM9FJm81maZoWRSGjt3t7eyLikue5Bh6Px42ry3KNiMfHx7PZ7OTkZDAYlWWZJe2sc57nClTfDy7LMssykRGU+rOcUtNIH1fLJSyXS4FGy7pJAi1Ttk1TaY1ya/oevCyaYLWkni/tA6lzwJbukOyi0I0jyi2T4yBi01RVtclsomwiaUOaWkQuy9K72DTNbHaR5kVZ1kmSAkDjXZsnMShUrDTatlsh+QYQtyhRo+raffOb33z77bclu+p3dW8k5YEFgKZpDo/2RZILEXVudZKaxFoDCFaRNwiJtUDxm7/3/nd///cQkb2LMXrnjNXIDAwRKHaZArW6AMjMkeDO/Xs2yb/2rW9VVZjsTgD1cn55tPfgL//lP5dlCZP0GbmRvkPkxXwVQnj58mXjKlHImM1DWa0Xi9mmXIpcZAgACEbBaLxbu8Cs6k0VyKep1YmOy/LgYHzv3h3naqtaAyhhKyBu9zTF+25bHhCIFiMjq57jiBiQOi6K3gRR73QBiSEyRInpW8eiEJTI4EL7oS2fFcunCD2w940cR6YnBLMph6B+OCWEHlDFbR/hylgZABDqrG0Vo94lSGDedzhijAcHB/fu3ZvNFqPR0BhzcXERYzw/P3/58mWeP9CmFVeMMaapAVAcw7bN3fYc+EomBy0TOqRpOkb11ltvXX7lt41VUqOLMTJgjJGUFB4UYLRKb+1LIOYQyCa68eKKjAKMHBKjahcTa5PUxhhRo9TrtVbeY88xFrzzTS0pSJINlVIKYl1u7t29XbsGgUbDYrNeHh7eTZJkMsqHebapK0QNxPOwNDp1JE0HjgAycUURWRqTvO01SRB8fQbfo41kfyjVSkTIjewxRzf8CkoVnkSdW/WdPwGYWGsbV5+fn7/11ltN09RNyz0EHYrHex8JmLmuaxmd1NqWZZnnedP405fn63VJBOvVJs9zH2JC0bARTNaVL2RpaVxlw22HmOn6yepuNGtrC4IGvBJ7afcDvyYi3PqJlrZK76G3Pe62ndp+A2I7s9+7RtWBuXqnKAxQ4hFNpz0n7UB5p9hfKS8z83ispWUrFlDavfL+Pl0WAyrGvXfnPaS/LMvpdJqmqQjmhBAmkx3p+wpNlex8wRtrrWMZ2LTUV7LI4r28b3Qnk7e3t9c0zXq9LorhbDabTCbj8Vgr44IvyzJPkocPH6426+nuDjA23onYaD4c5MOB2PFVuVbGpHl+fPu2jP3IZGo2HhSDfLGcK8AQwt7eHhAnxjLzZDJCxKLIAvFsdnnn7t2Tk5MsK/amOzQeEceqLpMkidGfnr4oiuHezu7FxcxaO8h3ZTWKohDyegGfS7N8s9nIBcqqeu/H4zEiLpfL8Xg8n8/Ozs4ODg4Qcb1eS/08hLCzM9lsgvSDh8Ph5eWlbGmJOfqlkzsiiXiPVpNgSO4acSDqlFo6sBuKpmxEIto4nyQREVNrhY/FGLVer4dFbozxjUvTlAldDBrVcDiM0RtliCjLEiJqQyWtfeN6w+29z7NssVg9e/Zif393yzh00XxXG0CGvmDQb3hpbUamPDWRIE2MQv793/vmz/+jfzSdTuezhVx4DyaIPjAgMRMjgQJEZgF5qeM7t533AeDP//n/3tls/o//6S9lmeYIn3jnPtDGQFQGlA6jwVBN8xACooa7e0qZGN8OIUj2eXpJztXaKOfKsizrul4uS9fE1cZ99zvvzRcbZY0ymGR2NrtYr5fz2eYP/6EvTEZDYB+8x44SR7UTHK/vLt1wJb3FkLXijvmHO4ra/iUCbF0i1FkP4Y8HrZWVjQDcpcVtpochRKVbrm9jTF27LEuGw2HsKAqkoIKI0QeTtRPn0JHW9ybLXJWLpVnL13IRyRj6SI2ZR+PBo0ePTk+/JLZJsP5lWX3zm9+6ffu2Nm3u1TPXKLjiEFdb2rd0HTDMHfSamJij8w0zPnx4/8mzp4tV6Zrgl0vXNEmSrNfOjIfGJiFcMarLfqLAWaKVbstBQkN/997tb3/nB4nNE6MQAIk1oELFtp00EDVNRKwqxxCHo8JYpU1irWFmq3E8LCY7k/V6PRqNzs/P93amk9Ewz/P1ZnXn9q2L2eVmXea7U46xLMska9t+67JEVFVVaQUk089dVtdb5L5HztdZNeRy+gJ7X4Dq91Z/j2KMgjUQcR7oaDt7X96TiBljxAz1+ZaMmsh4jPd+MBhMJpP55QURbDaVMSZN04vzWTHIWOuqcQq4aXxqEwBgwi41JyR5AIQ8JF75P6UVXLMa3aPSPgxdBPpqBvz6SknvXElS7O5P1Bajp8Qo4iklgO2XV7DNtEUjzJ2os+SUtCVOR1ukY9JYDVvcLPJOiU2FvMJ0ZML9TZQagxhWOY4IDcnTZK3t1ACdMUprK9PAUksUCaPeVYhIgFQ4jTHIIGoisv8BSJhJ0jQVmgUJv4xRSSfUvVwux9PJcDhkiqBwPB674IExSZIkz1arlQ2peHrn3HA4bJpmOp3+1m/91p3j24k2Njcffvjhyakej8fIEGIQzPzx8XHTVNxZrhCCtuloNPro8WMkRora6npVLlcLuZaqqoaDwXg4KtcbYWEz5lAmmsqyRFZSVxN3KyM08r8My0nxxjkn1BxyRwRv6JybTqcChdtsNsJZIbhlRBRabHm/rLC4LmlP5nkui7+9bSQ+k/JGO3Ek3jFJtyVVYksdHOUIJk3SNB0Oh8UgXy3XzIoYrNXOOQUMqBBYKSUzG6bjt9/eNvJZAHB6elqW5WQy2v4tbpEMy/mEEMbTVt7RWhtbVkDerNZ5ql0dv/vtb/13//DvH+7tV5uSpcQMSimFADFGiiwMjUQQGRgxIjJAVhSBYuCAEWxqHjy882/8+T8xHo9vTbOjo6NyufJVWRTZYDAoV5skSZiQOQAorQkEA0UASmWp2CUyOhmPM9eE+/fS4KGq+OJ8tlyu7966nQ2yYpgnySf+/j/8bxHgjTceed8YjQwUg+sDWdzCfhpU3CGZt+Pvzn+xdBNCCJIYSH1dAwYiafwRtSz3sePA6iyMkDS07b8oRmYLriL/IWII7SCMjyFJksFggKhJZq06IQPuZEkRRbtuKz0gZt4q9LUv9UOLePIKIRwc7u3v70sPRuCa0+l0vV5/8MEHRCTlcnGBEgT9K143LGxvqZk5NbrI07u37+xOxnfu3tIG5SFE5hBCXddZVmyLFbXPA7BwONd1fXJycn5+bozRCEYrrTGSD52Om0El40wCHKdu0FPoGwURY4wpy83OznQyGkxGg+ib0SDfP9hVCNboYZGnqT3Y2x8UuUY43N8bFvm9O7fv3DpOjD7c3RkNiiJLjWmnvrfmvqFzJDd7I1tr0tE6XidU6/+XH4q+lQRDYnRiN/LYuwoAWC7XNslkTkMeezHlMuICqC8vW5JYaaEhovcxRp4vF1Uts2exrpqqqmrnJXuOoSuGKySE2AanV11PRESlsWvV49brmkTjdr4LGl+nTnE9VrtSDdsuG2A7KIkCRO8/cfsIsFUYR6lFd6NN7UdAFCJruVnMHMKVflk/DAodkFVCYFk3uRFaX8Ho5KNVi9WnvpDTD+kWRWY6NUzJ/ABA4iFJAeWY/WCopGh5nhdZrkAoFb1YKDkNAd8xc9LJ9BKRRFcSmqRpWtaVSaxkjSZN5LHKirwYDr793e8Q8MGtW8ropmmaphkMi9n8UoDfm9XaCpuYc0RUFJmw8cUYd3Z2pLm7nM8VQPTuwYMHq9Xqg/feE3R0XZdZlo2GQ6XUycmJOMXZbJYk2XA43myqsqwByFqtNXrfbDabzWYj3rTvBcjmnM/niOhcc3h4mGWZQJqFoB4AlsulUkbaBKFj1LKtmnKb4MrGkEep3cwxyiMj9FjMrDSIneGu3ouIEKlpWpGl7T0gTWXnnKvqJDH37t/Z391DRKO0MUYKYNRShRMAqH7m4eqJ6HZpNxoqJyzBhDzy2xayqVqKzYuLC6lQxhibponBVZs1x5BabTR+4+tf+7t/57/aGU/Wy1VLp88tEydFpsjEmgl9oECAaCMjMRibglZR2szl+uLypW/Knelof39nZ3roHYWICpPNplmv60DKO/aemTWCpagia0YbGV0TtI51tWzqleKGXKUwlptlDPV4lO5OR++//72Xp8+eP/1ovVp4V3/m05++c3c0yFPJO+TpphaA1nlA3n6iCV8Zurjhv/pFY46oWnq1/vZR97p6tBUi6CRJimJobaqUUqj11rAlIiAycYt5jDESARqN2nYkU1uFcb55bv3XhEAIV7HVlelHfvXd/StGn2XZm2++KfHFer0GgCRNTWI//PDD1WpV17Vs0HZnMxCgcFj/K3zw9rfMKMmEMeb27ePJeLS3M/6RT32y2qxSq5UC55y2Zl1ulNG8ZVJluoSIIkHTeJGZY45vvPmQyBkNiVW7051WPbtLyDRgL2Nutij45YFXyKNhoRUeHOxba5LETobDLDEhOGNVltrpeLQzGQt7eFFkuTW3jg6OD/d3p+Pjo73d6cRqHBaFoLMRCIEUolbKCGL7ldfWkxb6cAQ6JGTs2Fq2/0RidpmZa8cNsZ3iB1CIerFciqmS5iJ3wXuPVTm/uGRGra1CPZ8t6qqReuZisaqqqm58Xde1d96FGGOgKL7k1S0FXYn4ygWCEqf8qhvu/5YJt13vtV/dDFlkNa6BJq6ix46poGcL6X9F13EWN/5WbT1g/SdG8tSRuki6JpmsZNL9cWJHL6w6xnbZPGLu+1smX/RVjSRJAKhpGhl2ktMT9xk6tkvvvTxfEiBKTL1t/TW23Rn5OMEZyXQZM1dVxVtw+hjjYDAwiZWfhxD2Dg/kbahbgcUHDx7M53MKfm9nV4q6QFEQEkBxb2+HOSoFiIzYti0EjLbZbGaz2Ww2K8tyb28vhABJUte1MFFLtdMonWXZs2fPdnZ2AEDIpaVK3IYmTD3TQJ6nWZbleU7AgkCW007TZDAorDUS4ud5LkAz4V6Vju9ms8myQpCr/VB73xSQTPfGVuStTChJEqXbIoesz2azKcvS14242/7p6wPi9qEDalxNPoxGox/90c+en10oBeV6ZYwBIo2oTTuFIh+67YNvPEqqE96QaKzfuu1GJQapLyrVVC3pqRS0OHgDYBGA+Cu/9eVf/uVfvn37toQIRC3cJ3Yd0AhMBBHa9JcYGFDZRNuUSWSqEBHLzWq1WoTgNuv5pt64GMqmrppaae2jK+uNpxghEgAhsWIAYiRGYKRUk+YQ6o3VSiNoZIhOMTH5nelwWKTL+fl7P/jON7/+tcePHwPx5z73I+PxUCspskXdvaDzvtteo/NU14xhf0Ov7BIF5qswX7Y6X2dMkrqCSRL5LCHfqFwDXUpxw2chcXQNUpsGyBAKM4uGLLKSL7atOrCScF++kH+qDwGuuUC4ntfDtQMRhTt3b+3v78rmq5tGItMXpyez2Uz4KMSf2U6PCQAIUDxx5Nd44quzVJpR6A89cxwNi9E41wj37t75/Od/dDavJRx2LiRJJhPJ1ErBAzNShEAQY8s1AwDVpjzY293dmbimUhgFagjEVrf6YogtNkcKHT14Ukxhkpg8z7VGo9Eak+d5lifMzNELRZwPTTHI8jx1TT0aDsajIk/M0f5ultjbR4cP7t2ZTkbRuzy1qW2tczcwfa2neyNq6+5I7JxxmxBfu6Pq6iAyOCh2VopsumOEwI5tX8wubuXTvVIeIvRgH/HQddV4F6qqipGlp9g03pOUaqEde70u8QFtlR2lgd07XYVaUuHtH/b258aTc+O1/Su4FqhdA5n3v+2/kMeyP4LYStVNLOIrTaOtE8M+cRQfzMy+aThGuX09NFcgP77jvepvUG8yiEhAbWVZSswnPU7xwtsdGbH+kgQLDFsSOxHNFSctHyH3t8+kJfaStrRgiwS9JW8W8gpmrptK6h/S7zw/Pxe0lDCEK0DpTSBDlqSubpxzt27d2tnZQcT9/f3Hjx9fXl7GGIPzTVUnxkowJxGD7qZsV6vVZDIS8mpwTkoyOknSNL1z6+5ms7k4nw2KESI2jd/d3d3d3TXG7O/vSz2fmUWZeLlcGmPquhR9X631ZrNZr9dFPhBt8r7MIy7q5cuXh4eHsnrT6XQ+n0svf7sTL1msrLnpqLwlnOpvPQAQB5u0/f4eAeCc87VUBCpJPEJHyXIV3gERUV3XZVki8YMHD376p3/q6dPnRVEE10jIAvGaXb3+OHQ/p5ZvC7dkicUFtO/vJkHkjovtveoVeg/BI9Nvf+W3fukXfzG1SVNW6/VaKQPSJFJaKQOgPHEIgRCUMgQKGAkUo9XKRgZQWh5z+YiqXLu63KxWl/NLRo4c1tWaFHny6/WyrFeoKXDjqSb0rAKxA3TGAiLneYrMGjHLMmTI0tQarMvlm2/c+9f+yBfOL16Ox4O6Kj/64MN6U77z9ptpZsU+9Y/zVSgPAAACAgd6DbdUG8p0iSVyz7YRO0PFMbbqv0xb6NGu5gzcMvaAMsAKWt68/kXIwpB7pRKbZVma5H1adMOpvXp6W6+t5tm/8n1bf6HbYOETn/iEMXo8mciwo8BYnjx7GjoZTmNfz78FAL0bpq3G540zCdEbhdbqw/09a1TjNvfu3npwb3e58kqpxvvZYuWCrCz1/FAi4suE3kUiUIBlWV5cXDx6cG+Q29DUrmN1FyYE2f19GZC3hn/kogaDgbXaWk1EWZYkVndSEMoYxRx9XSmGxBir8WB3b2cyAgrDIptOxqkxh/t7bz58MBjmqU20RqtVb5pf9R/b67C1MtSnwt1NuzLcttOHlxczSw3NGBOZtNY2SaiTWusHMJTRQigtoNP5fD4cDlfr9XyxcDHYLCUE0KoJfr3alGXpfdyUdVVVdV27xm8XY5mwH9Xtr+WGZ+0uDGW0vbc7r/W4N3+uZHD+RhVaUsB2QmPbPdNVp7l9G26lFzeO37eUuCtob6Od20PJI9u1oLAbwJUvhAaOOrl1cY3yAMvBBZ+V57npRIUlIevhvtILEF9O3dST/FBamLDV2O46jq1hEqkf7IrPsSMhkd2iulExGctBRHFLSimh5i9Xa8FYFUXx8uSUQmya5uLiIhsOL8/PX56cnL548dHjD5aL2fNnT7LU1tVGCC6kCNwScyIwc5Jko9FkMJqs12WapsvlUgQPmLnebGR59/b2Hj165L1//PixiDEMB6O+n9LveWm7zmYzieDlb2/dujUattNZfT12d3cXESX5lhU4OztDxKZpTk5OxHv1ioTc0ZiEjslEHBh0fdx+Y/T8U0KApToONex6DTJrIBjybbIquQXEUWscjQef/exnP/9jn5PxMIm6+k342le72bZ6K3DD9hN1tE4k8YE449VqJY9/CKFIE6Pxa7/9ld/57S8f7O2FENbrUmsbA/kYInfdeu7ruoqYvQ87e/sPHr2xu7uf5gUxoNIKdXtzQIdAvgnRB+d8WVYU2/K4lLWlaOE7QjEJf9vnhTlLkizLmtorVhBJAyoN053RcGDfeeeR95BanWXJ7PJys9kMBkX0viUJESx2aDngrqwKX5mCV53Lq4Zi27Iqda3oG6/on69gJdamyiQAILq/8l75J74/Rt9K/zLD1Xjhq+fyiglsXyj/AK4PIF3bDXgzP5CXMUZpZI537tw5PDwUFM9oNHr06NH+/v7Lly/Pz89lF1JL2Xr12dsngdhmw70zlt8SIOEVDxcyTcejYZ4BRQT64he/ePfOTtM0IRAodD50Jb6AqBkRsatkKi0UOT64cr1ipnfe+dhgmMUYpfDbNI3GdoJItrukMgLDCd20SZpZVDIoyXmeJolB5CJPtVLWaIWQJImEzuPxeDgqLEKWWGTanY6txiLP9nf37hwf2URLez0xQhtLnRd5Pa7v+g3jPhWGrXQQOqshtUdEDCHIVGL/w74Ttlqt5BoDRcmGWyHk7iW2qaoaIoix7SCwwtq51WoFADHGunYyGiEgoL44xnTVu2Il5/ZDt9a2/7tWrN6awOve0pHPIfUk5v3+gTaOidtryB3ACgCkQ6k6MpYriwZtZ11ttQBuOOy4NeLc4wa4S036uQDv/Wg0ih3AcNvzcdc57uO5/ucSz7WCZV0vQL4YDoe6o8wUxyz5JXa4RdUNz/QQ7v7IoRtKlkoGEQlzqhR7JSbQqDQq4XA+Pz8nH6zSGjA6HxqnGJqyqpfL5XL5zd/7RmqT4+NjcboSNFxcnk2mo52dHWGuELIRMT2ReWdnZ7VazefzyWQCAEVRvP/++6enp/Wm/vDDD09OTtbr9cnJye7u7nK5vLy8PD8/n81mTdMISEo2pxiZ9rkG7ldG0PsyLyS2XmhGpNhjOjpuOZpzbrlceu+lhyqZHHQ2VyDl206u/78vLcg7Jb9ULTD4Kp3ol7ofKcYOOIKIkjHnefr5z//Ym2++mWVZcF7qUjc8rt6q8WxvbPmJghZ+f839XMEdenhwy1aNiBz97/3OV3/rS18C4s1m4zrUt7AFMqEL5AIxodLaJonW+vnz5x//xCc+9ZnP5Hk+GA19iNamRECACi2CQlahifWm8U3wNW1WpasdkSIHvooU0DdUl54CcwQKDIRWJ4lJFWitrEIzHu16H5smKtUyBlL0iHz33q2/9Jf+9WfPzwDJB3777XesUcQBiJFa4pEbl4+vTw6vyl3bNrO/rf2vYqtLeDVb0dsc4a7Zenhv2i6llNLtQfrQjfkKb6Ve6WRt37Xu59iXnAHwKhy48fphwYVSINbHGPPGG29Iz0asvzC7vnz5UrY7bWnnbdu4G4FJG9wBEiCjQL01K5mgciE4rXG6M0mMBoDJZPTOO++sK44xxkCr1apq6k7UNvSNeiJWSgVP8/l8NV8AwLNnTybj4cMH95RSVVUhgGCsuJsY2a5E9WfYP1qCl5NrlIkFxPbOZWmSpWmWpKNBkdqkKLJhkSVGaYQsS4xRCmEyGu9Nd/I8FxXFRF+xFsMPCeX6B3Kr7Hxd5kFfWXzokqS+jyUv1QrytAgRtSWcEFsYtha70GbG3fCrAOtjjEVRAOB8Pvfe+0De+6bF/rWgku2T77fa9sj8ttXov9j2rz8sFe5/e/VXinviju1jMkfm2NfoWqjw9ZRX6yvijhuv/qb3z5VqtTIJImVZ0r9BVlvw4XIXuknBG4NSV1cq1lxKyhLiCEiCOuyrZGOy/tKnlyYIM8tAMCJKiN3HENTCeUB8qnigvtAqXkfeLOsglCy9U5FSc1PVt2/fnk6nzPz8+XP59OjDw/sPfvC97x/s7X/60592deNc3TQVcyvYIJXnzWZzcXGx2Wx6dxW7jTqdTnd3d0VgO03T27dvA8ByuTw7Ozs/P1+v15/85Ce/+93v5nk+HIzkQkajUS+xUBTFcDiczWayzjIWpTquMa11CE6gN6I5KPwnPRuRBNDD4XBnZzdJ0uFwlGWZ3EopDscOzNyvKnfE3dvrI9V1ub+LxaKVppCSz5ab5K590G8bazUA9Dyjx8fHD+7f//jHPzadTqMPsquvIRC3TSuA0OG1G5VbuH7/QdtPijEmuEZaG8xc16VUYr7/7W998+u/l2gFFJsug3eNB1AKTYQusEZgRufCk6dPf/wn//hbb73lnIuM1iQHB0figKUGFFxsGkfUNvh87eUfNUGBZk/1pmzKKlRNqJpYO2o8+KiIDaBFSIzkt7lRab1xWglNm8qyxFiYTofvvvvOT/3UH0REhXCwt99FRVeVUWutZKI3HrFtb7dtZ7ZdTO8FdfeGvrLVrioIhWJLzQSsjEmkg9alBzefa6IWfiWf0rMCICIASl9S8bV7vGXqe6vOkgdsSdlcf8ErPrjfc609gnh0dHRwcCCTDzJgx8wnJyd1U8n1bG/ZV48P130PXo/vxLsAMTLsTMZJkuR5Gpw/Pj7+8R//A4tVxcyESkCJ3nsf+2y4bSFLIPzy5cvNZrVaLp8+fXp4ePjmm28Kdkx8khi+HvbCAJHIWCsgUjkZQWEILREiMkR5znsrn6bpYJi34yhaMcWdnR1A4kgcY57n+/v7R0cHk+koT7O+OLkVB9003NChMGgLOtQvF3WDNLiVcvVVUMl+oCU9pj7HFSckiALqJF98bIvbQifkvZ9MJgL8KYoiy4rNZiMNudVqI4Wmvsq0fRNfa1AED7/thl/ng3H7DTd+dWNNtt92fVu3JyP4Z4kOZQG3A51tj4tbfJ+qo3vty/jS5ZVvZcQIOk4G2SpC+ydoLCkP9FDV/oPEKfYTX1JfESspe2+5XMoYvQQHksTIx3WJshVP379H7nLP0iXGV+5gf2k94le+XSwW0+lUyiGSBwNAVVVlWbq6Wczmq8VyOp4kxp6/PLPWLhaLy/ML37iDvf2yLEPjNOAwLzTwerHc29s7Ozs7Oz+VZU/TNMaWkE9Ct8lkcnFxwczC9syMO5Pp2dmZ3AsBasms+TvvvCNqVHLykvVK6NAy2zgnTPrSVWmaRghhlFLOOQASRSkAcM6JGJRwmXnfYsW3w53tkqmsP3Q9C1nbHjItorkCiRCybtcpN8hZ9TTOsNUP2v6VHFDoKqfT6f37999999033njDN1f16ldf17wsXz3soRPe6O81d8Ie/VaRM/zggw9+53d+xxjV9zhkuWSILgIDKACF2sgtu7i4+Df/wp9/5513ZLuOx2NrbYyMWiNoJnlwFEUwqCFSvanrarNZLaty7aoquBqZrFHAMfimKtdVuXZNFXzj6iq4RiEASB+ErUm8jwBoTSqF3DzPy2o9mY5++qf/xK1bR9Pp6Pbt2yG4Vuh+ywj0T9YNE6HU6xPIV30NbgVMiCjEkNsWtR+1tdYStHSERG0TsP1cbjF3IjECSKqDX0FHw7Bto9Qr5qvbckzMV0K0AhBoYQJbpwXIApXu/xIAkDMEE2PUVukkvv3O/dqtZvPz9WrjHQyLPdfgt3///eCZXETibQu49VJMmqKiqICN1laqfJECUFQgOsYIoLRNCFAZWxQFEnP0mcaHxwc/8vbdzWxpm9V8A3W0LiaRNIOOrBiBsC1GVY2rA53P1w2p+dqfzcp7h4Mf/8OfmRQQ6pniGtkBh9QYqcUN0iJFy02EhprVxjJOB/bhvcP7tw+zFClWITZKoXPNYDBwrlYKtEaOLtE6MUpzaGKweeYppsYO8jTVMMmNxSZV4e7R5PhgHJpllts0Ta21xqYWIdWKKRgNkZw2rWAkR0A2CqwCC4REFMgHarr9p7HD1Ck08rQAK2jj67ZrRcDBxTwtsiSfzRZEZJQOQkxKzJGM0pL4Gq1d07im8XVlFQprIADFGMuyDMQuhJcX55GIGUFpY4y2hog8uQiBFbGifsodAEArmU2KbX26c5bY/hOMJaNiVII07ELO/sGjLenDq84FIBEHpoBACllCNKO0URa5TWShUyWK0QvcafsIBlVidGK01QoVM0RUrLagAAJoSNO0GI6rxgt1SE8gIumUiAQDUJKYGL00jgHIGMXMLW9GjHmei78cDosQnHN1CE4wR2LcpY/bND5JMmE1EQ/de33dcYD0dRqx9fLt5XyxXK6JpEFQt6osIVitY4zzy5nojE3Hk8XlpUJIspQRJjvTQHG2XgTkZbXZPTrYPTq4WM51lizXq9F0+uLly01ZHd+6/eLFWVX5yWQvMCzWq9PTU601U8s9fn5+vpjNgTjUTay9JqydL4aj2vnZ/HIyHXtXNa7Mi+Tu3duf+cxn3njjjc1mE4K7uDhDBUfHhxcXZ7PZRbleBdeU6w0yxMhV1eztHeSDYVU1TGBskqRZVuTK6MFo7ELUNml8SPPscj4rikLCIHGiVVU1Ta21Wq2W6/VKIIS640in4Ci4xIiwmUOOgzw1CppqQ8EpoOiiBq0YpCZfrTcGlcwO1LUTR9UnQACAyCIeQJGZAFgDi3iaNQryVD96cPvurf3jg/GbD299+t03j45300xpQ8gBORjFBmKiGKMDaMkrQCtMDGktc+UGMTE6hIDIARiMrkMkQFAaABKF2JQFuWff+d1n3/pqYL8q14EjI0YGH8mFWPvACkN0ja9rVykFlWsCwd/4m/+TT/3oZyPGwC7NjFV+VOidSTJIKctI66ZuVhEa1OCZSCnPXFNomMsQykgbHx2oTU2elIvkiQOTp9gEHwEJVe2i46YhF1UY7OT5UK/Wc1RsTIKk2YHlRIUwyfVf+jf+xL/91/7UZLC0kABpIhVBR9ABtGNmhawQOoRxRIrAATEgKmUIVRCiSq3AICmOFFp9cYhIUTFoVMhIIRLFPnMmDsThqomDSiEqpRLTqvRqrSlahAQAW7AR+0gNQ0REijAeT/Mk9y62uTJ6VhwhBg7U+e0uUDNSNWRmAFYICgEVXHFBM7Ow2782oLhyzBAku+XYVpzu3bv33e98fzqxWVZYa5u6vLi4WK/XO+OR1ipuKbYCckd6xkRtwfP09HQwzKfTcV3XqK45++00azKZuOZcdnxRFJ/+9KeVUt/67kfpGABgtVplB3vOOa0SbRImTxEIUGM7n+OasF6V6/VaPbh1586dL37xi4+fPHn6/LlipRRQ9MxY183FxYVw1kyn03duvb23tzecJHmer5ab5yenq+Vmtlwsl0vhSeAOVKwAYoxG0FsGQwjcJpatio6AuUyaRFLL9aZ2cTQoytpHAux6tEQkwbtE6HLm1wMovVUPIYCWmRMAEEErJGIAJBLV+3YCWEyGUAtJxi9GPG4xIPLWmBN0YB/5VjgQ+vr2arXx+75pmiJr+W+VsgIduRFjbQee12PA14SEcqtvpLxdeMt9qLsdz/JVkaY/4DWqy+2wDzvMmupeVx+N7Wf1CDVB+ggUWeEVuatUKaVH22fD2LHF9R8ti9bnmkIlQUQxhr4NCQAyTVuWpaS8dd3mWFmWScW4qqrDw0NpB8hmk2xvGzAodWMEbJqmr16Kbw4hiMazc248Hi8WK0mmpWwrUGEdwsXFxcOHD2ez2d27dyU0sdbevXv3q1/9qqyG1no4HFbVJjWWBoPLy0sR3SrLUqopeZ4vl0tmRqOapsHEGGOGebFYLJg5T7OPPvposVjcunVLLvyzn/3se++9V9f1l770pSzLBMyCuoVlrNdra1Pn3Hw+19ZYaxObLhaLNM/G4/H5+bngoiX1l9u6Wq2KohC9QmZer9eIKLRuQk82m11I58h7X2TtmK/cL0ku+9yFiKpq0z87MV7Bf1arlfeNMJ/0xeq+MdHvLu7my0PHViaFdBmUstaO96lcby4vL2VkK4aW4VJrTSAj/qiYA7ST+uBcjISKpUKQppn33lhljIoxuroMTW4zM5/Pnz59ulqtyqqSAyZJSq1QfGBudy8qk2XZbLmwJv3rf/2vHx8fXy7nshV93TKYpsDT6TTEGRER+T4ilPVpmiDFv0Y1q9XK10ma2DS1UhkiagufoeNflFVqmhbu6n0TY9b/KkmSsnJVVe3v7+dFcX7xMrHJDYux9aTf/CEIu/5W1ayrEEQiput48rb7SwydbHxrZ9BIfGaMMV15srdLWhu4ElnqOjgtwiZR7TRHW4ICvLK6r6adN9Jx+V+s/LVhkit3S4BXajT931+DCQyHw7fffvu9H3ygtZbYXyl1evryg/cfjz/9iTRLpOqCqrOTKGoNaBLNhGdnZz/4wQ/2D3bH4yERZWkKgXnLkvZJvaBRrNVK6Rjjzu7krbfeOjk5KQGrquJIl5eXeZomSUIIwBiYXCCrGT1WZaOUNIn5B35T1/W9hw8++clPolbvf/h4uVyPJzuTyeStNx/t7Ozcvn1bazufz40xHKmuNyGEyHDv3r08Gzx5/uz73/9+PxYyGAw4xrqurUm1dBGU4hCZ2zFcpRQzxRgHRW69jZEPDg5ens98oG14pGIKwVtMEDHLsrpynXCHbpmjWnGt1+CN+29Vx2rJXQdFnn+xC9LolWyAO+BAv1F6/h2x78aYi4uL6XQqnH99p1PgPCFIsNHOugD1Owh4C72HiKLE0O4r1QsXXXVrWs+KLHw121uruyyN6oqTZNvTd9t1+4G8ItaWC+xXhrvWybYDbp9hIjGOYjj6epe1VgY3qdPlVB3kTYIY2QbQ0Wb1pnwbrtx/nHyE/DzPc2lbqm7cM3YDLfP5fG9vTyk1mUx8JwC8s7OjlJLWgOto9xHRe1+kiYixJ4mVT1mv1xrbmEh6sXLCaZoyglJqOBwuFgtrbQZgbdIHH7u7u7Jo4vvH4/F8Ph8V6cuXJ8651Wp1fHx8gbP9/X1pVI+H03xQ5HnKzE3TaKOyLKsbNzu/ODo6MsaIJJH0a6pNOUfFzM+fPy+K4uLi4v79+/P5XOoBEnYAgASLABBjnM/nUmQm4tls5py7uLiYjEeC3mgZCJJE4P0A4L3f399/9uxZnuej0Wg+n8vKS29Y9r8UoqU63Ud1fV1Bhm773cIdfbRkn/Lq73jo5uB7q9ofUG56CK2llvsi0AHPfm9vL0kSETCuqir4KK1cuVNSsIkU5dMNQJoVQohNRBQcEI2KSV2VRWqtUUmSeFfPlpdCXcKoQohZlvc80jGyUhhCcD5OJsPT87PJeOdv/M1/e29vb7FYIKKEKS5QnuchEjgFoEaDISIyVwK0jJ0KBcXofTTGS5jPSUQFSWIaH63elqBXYnzEPYpTE9tSlmvEkcQoTWjk8Vyv1+vNJrEZAMhw7Y3oGa6/+gBacVujvXo/tQpHKIG1QkAC2uZX6KfzQUgLtNYxUpa1BKgEiNB/OjNIceMKw4WIRCyhs1IKu0YGsOJXEg9EFMnUGw5YCC/Nle28/urefE25iJkRW/4mxPYsj4+P33zzze9//z1EzNJUkH5Pnz59482HMkVAREy0BecDRHbOeRc3m408QjL13zS1RdODVPsPbdtpRoVNsFYjcoxxPBm++8mP//KXfi/PE6M0M66WG3EViVXW2kgspYPIdWKsUqapHIzT89llWuR37979+Mc/dv/+fULY290/PDwcTabee+/iycnJYnEhnf8005FEQ43TfPDGG2/s7Ox85StfEehZ0zRK6B5bCjSUUb+ON7QNPDnEfDRC5fJIt4/TxKbPT84pA6u0jEmkSWqMFvxdaw5a73hj56kO79f3Dq4CK2aEG+9WSlslnTBZf+54QNuIuNu4fZwoEGbpt4nGuwTUeZ4ZncQYvPfGjLCTNOg2hwJAuK4MCFc+uBvp0zJSdPXb3qJB561vHIGZUbCi6gpPoLaQKYAEW+RZ/eOqO33Wljv6FXlH7Pbidq1CslVmFr5DrbV31zjFwpacgyAQpUMpbcsOkXHFfiWpABEh8jY1tBxHSil9mzN26kbyHhmKret6NptNp1MAEK1vY4xsGwmbRJFQ4FdZltV1PR4OpTslGgaSNe7v759fXigF6/VaSjKw2ZDF/qKkM9c2oYVFtdNPFNzyarVyVd00zWAwmM/nxOHi7NxqE6Pf39+XYRgp5CiNHKmJTZKYR48e/dIv/vOdyfTWrVtZUbx49my+XJycnBwfHx8dHb3//kqq/RJqyI0TIhG5wM1mUxRFVuSqm+gVrQhZhLqufdPSWT958uTevXtZlokwyXQ6lSdUgNOC1ZIuai9YIpCcHkFNV4BH6G86Ikqa3peLpC/DzOKcVCdSKV9LBCZdZGtTeQzlitI05fkCETnmGiG1SZMXzEzA69VmtVqtVqvaNURkTdIWMLXxMciEjNxfoLhczHcmYwWkASj6qtqcn58vZrNBnkduWyRJkihlyrIUoxsJ9vb23n/84cffefcv/MxfFJULybllqxSRvffOhxCCV7EYZDIm0xMrUScw07bPo1VKpUYzAREQeSIdmGI0vQMmIp20VN7e+zRNB4PBbDbLssx7FKelO4Vm35H0AYDqtI/6h5puUDBjddAAAQAASURBVE725oVZ99JCghNrtQIjMCOyZMiRYh850RaEZft/bRKttXg9MSMMIANRzIwcIgVRx5Jd2nPV9Xa1zxC2vxacQN/Fb6P/buTJ9H6u/x8AgAWCJ01fgCuVDJnNkhgtaKV8iEmSfPzjH//Od763WCzyoyNmttbO5/MXL15kWZZlCXGkuGVYsb0Atvzy5cv54vLOnTvSvGmXHW90jkkoM4uiaGpR222nch89enS2rL/97e9hmsxmc2tN7oK13iSFI1QomRNYRuTIXPvGMa0PDw/ni0UxGEwmk4ODPaGfLAapa8rZbHZ+cTmbLXxoBGbcuBjKOsuKfDBARGYYjca7u7vf+c53bt062qxWkj/5RqafUem2DakUWm0AORKPRiOtdZrYpmnyPDdJen5+rnXujPPBNU3DEaT5L+GLtbaJ9XW05DXkpCyO2poVQ0SALk0Ehn6aELVE/arrcfYbug/oiAg61LdSqBO7Wq2Gw/FisRK1K9kbVmtWILZMWgl4/QWgRXgErteBiUCpHkOgRO6sfwb6F3f1FeZrVZb2nFkjwI2IpD/C1b6/Wo0WFgHXMZP9O9vcFJS010EpxaABkRiYo/NIDJqQOUkSIojel2U5nk6xK+BLHC3BjbjPjnv5CpsjTMXM7FzTe2WpPIeOY4uZrW3H4WSHCGxK0keB+0pIJAl0URQywzoYDGSXeu9j9LJzpN2AiM45cdjQEVMnSeK9U4pFeFgplSeZjCH5ujk+PhbaSFmZuqnGk9F6fnF8fCwZMzMPh8PNcjUcDmOM6/V6MBiEEIzSwTUi0zuYjJBhMZuHEC4uLgaDPEtSRMyyTAZ2lVJ3bt0u15vT01MprsiJ9QUYAJIUf1gMtNZ37tw5OTlhZmOT0WhEMTjnpL5trQ3OqzS9uLiQAsPp6enh4WGapk+fPrXW7u7uWtvydYi5TK313s/n8z43oG72Wu4mbFGHSltGVkMKD9SBsfu17QsSsp+lyMFdTycEJwhcyey11lOCEILVbYMpSYz3EREHeWETk6bp2cV50zRaIwdSSjFq55ysj9ZaARPTIE1cuXGJ2r1/u6nLJx8+Zgry0Qo1RTImAVCXl5fD4ZAiuOAHg8G3vvv+j37uM3/xf/Azo8m4rmvUqi9vZlnGqBeLhfB+MLMxRgI7uZAY2ip0ZHZNS8ypG12bYJ3TRiXGAkRpJoFWOgb0zjCl7RB/ysyJNX3DQhQ/qSs1JUkSuqJR74MBgNVr0t/tx18uQDGIOHGPbKIOtYaAxCHKiPZ1vinoHS1DB9tUgaNChYgi/UAkI4IchQOwI0Qq8sJaC6BCF22o6yCszl223/agAUZA1RphJjB9xsB8RbN5w8BJdAyAsauGK6VC5NYHRNrf33/77bcff/ChxAKDwWCxWDz+8MntW3ezLOkjAkmdmYEgIigievOtR2V5JOedpqkPLgRChP5UVDsQHJ0Lw+FQKbWYL13TGGOyNM/T7JOf/OTFxcV8vhwOB875zaZUSpkkQdTWmAgRmCOjlgY48sV8I82V4XCY5QlDZAqoOGlMVVWbzarcrENw2pgQnPd+sd6EEPZ2D6rGHxzo8XgCAI8evTGfzwWK1TSNb5wxCTNbY4UlBQDSxGLHsVUUReMqlSTaKABKrVEIWoMCk9qkq1RzXhTC119XoqbAUpntDPq1ebh2G3U90L781QaqCACKGVFhjDFQ7LVRxUxLqQ26wjUDSPhZuTAcDquq0lomtWxVlQcHB9AVtHVHtcivjEbIjobODfY/V0oxY0dtAwAInZD21dOlEAmYFTPhVtEJ8SotRmyLLlvBmbypTYIRr/SDu482vUe/dp7Q4sEEbSqroXqqd1mKqkrTtMPDtwPHXfGKVMfALGmiLH5sx8CU6vCZUt3tJ/fKsuwWhGlrUL5vJ8sYtyj6iXqMVGgljZA/6SfKYozWtBgCocfpvQURFcOh/CTPcxHsE8WOnsxZvKxSajwei7buZrMRJ6c0imuZz+fvvPOOMF0Yo2IMRPHi5enh3gErkRTD5WItSkd5nvrGiS27uLhAJquN6EkDwN7h4dnJSQjhxYsXg8HAGDMaDE9OTuq6Ho1GMsQoIQtzKzWxWC1ha9rq5fnZerna2dmR0V5hDhmPx4LAOjg4ODk5EUEk6FQN0tT2zXjvfWJUX2/o73i/K2KMAqPTWjeN630zAAgxGXZzoj3GuK8DiS/fzpKpmwKXDFtrRDRZYqLGLDGC+dLalnXtvUfU4+FIGtUXs8sYY4xBKRMVJFnOMYiqoGvqLDHROwXx7bfeCL756IP3KQrggKXcB6AUQ13Xk8mkqT0oTNP0+cnLL3z+c//WX/nLNk0uLy9NkimFaHSms7quCUAebW2UJetiIAJtMKUkZpLoOxIGS9FhiRxCqJouyldKFUqj5iBzJBQCIToiEkUQ7xtua4QtOl1aNp5iTycn/bgb3lGJBXzdq41yWtqHq+xWSBHF5yhWxKFzfq195F5ceasXa9MEEQNTS7aBgNAaE0RGJk/Bh1bmBDsl757Vsd8GvbPrsohrxeeOKR9b2mQAA4wdy8drLnL7FAHAAEZBR0vGTZJLcWL1xz/29sXZuXNNlmUCVJnNFs+fPy8GD7VpWRGI+rF3zUxa48HBgVBOAkCIHuJVCYiI+vY6ESnVzXV0X0TyAHDn1tEf+uIX/t7f/6fD4bCqGx8CMSzXmyTZSZRGxEgBIwUOTEprrY2dLdaBgZm1waODQ7LBN/Ui+Kpx6/W6dlVVlbVzq021Wq02m2q13IzHz4fDsVJ6MBwOB4NHjx4B8K/+6q9G7aWWm6Z59MEY45pKIdokkdqdGEqFrdhnniaoTVn7wSBnRq1M7QJz1NY0Ph4dHKDWs9ksBmc6pRTZKgCglFAY9tFM31Bteyx9wtdLGXJXE1Ndn08cRl9x5a1xpsRapZSIPgHAer0+OjqSSloIIU0TBkgSKyq2PbNP72K7TaIACa+XkTt/AwAM0HfOWmfde6NO3VP1eXB/gCsfzFpQ4q/sVJIiwVU02T4M7dVJ9t+/5EHrI+J+6FPspowP6Y5ysj9anqf94quO1FB1lAjQdYl6Rc4Yo1UttssYS0TCnIWIPdOkFGBWq43q5HqSJJH+a57nwhwpiITxeCw3UcB0ACDMFamVedYougXWWjZG9IK4444Wy24SK/XJyWRHittnZ2dqX8GyBev54BSiTdPpdCpXaq2VfH2xmBVFJhiCpmm8b5h5Ot0lZqVhPp9zK31qN6v1YDCwWt29e19mFKuqOjo6unj5UrjilVLf//73pVw8n88PDw+TJCmGg/V6PZ/P0zSV2ul8Po8xboiSJDk7O8uyDIhTm2xWa6nV++CZWdpYdV1Pp9PDw8PZbAYAEr40TUMUet0qhrheryUbRkStMUbqKUL7JEYeIkE+I7bFQ9upUnKr4Guoo1LZftD6WSaxUfLEwRUikkejgUwrtbwoGncmI+9j470xqizr8WS02WzW5UY2YU1KAlpjTLWpE40UfFOufuKP/VED/OHjD6tyLS7NKtM0DZMGrRsfmXC13BDwaDR6/8OP/tAf+cM/8zM/44Kn2ueDERH5GGyasHfQicAaIV9TontRypZIjM6SlJk9+OBbraUYI3pkZuTaJrq93oS1RiDtr/rrKGhERARoGReyLCnLWsbfpd8xm81cE7KsqKrq1ZZo3+XtXdKWZRAc5VVULXNrwNxqOQBRuAbgajMHAO4ySa2NMlraz62xwit4ilGAHMMWYRYiJjYzxgQmJXq+3Dop6Pti/axKRzPO3KF4EIGRO6nWK+WNq+vprvO1L8BWQqTfrPLze/fuHR8f1nW9Xq8lpI0xvvfeB865GAi30nAxgNDSJnCe/38Y+89nS5JkPxBz9xCZeeSVpavFdPdMjyTeGzwAtrsPWKhd7pI0gkszkruk0Wj8G/ll+Yk0EgCNeMAbvNE906KqS159j0oREe784BlxT1XPgHtsrOfWFedkRka4/PnvVx0cHJAAj4PqdxGEaAUh/22Mg5LWTqeTGIPOX0qKBwcHP/rBx2fn17PZjBk2m13X9qvNth1CZElRWDAwh5hS5BSxD7Hrh29fvf7DF1++fvtmvd7qhfV9f3u7Xt1ublarN2cXb9++ffHy9Zu35wySBFbrzfnFxZs3Z2/fvn358uV2uz05PCIijklTKOfcbrdF4NJK1JkQVHI7lhijM6apagMyn00qZw8P5g/unRiLHBPHoWmavm27XTufz701lpBAGWGU0UkxBZoa300EotLDsqCAzsFSnv/TI6SKSYi4P4pa/CJksFsGVI+wET0hde214FmPIDg3nU5FsKoqa+60K3DvDCAYyBPA722kdzwmIggi3HlxuYMzjsNU+8dw/63ef5/sg9/pMANowAnv+GP9eyp3XXyw2uhCuaDmVZ+jJjGc9bLU6erzLZ0/2Euk9ItS3q+db/zI32Sz9JaiclQqQBdQy8LMrORNJvNZrtfr5XKpuK3b21sAsHu8hpBrIXGUILSaLheX4L3Xwq+ChFUPcbfb6JDx06dPh2EY+o5TdJW/ODu/vr7+5ptvrLVnZ28mk3qxWGy32+m0WSwWKlhprf3hD3/46NGjGONms0pDGNrOEliC68vLEPqu2/V9q03r7Xarf6XKKJeXl7e3t9vt9uTkRH2zNmjX6/V+SSalUFXV1dVV13UpxLZtnXM6kj6fzxV+AQCbzcZaq4xXu93uD3/4g3NOEdEhBE2yb25ulPVaB/80oAkhqAAzAMQ46BBtCEFx5jpzXOa8KYtnlASXaNR5LNPYOgZd/rBYxVKx1NJuqXJba4+PDw8Pl97bqqqspdrbyWRS1/7k6Pjw6MAZO8bHOtNnTN/3TV0RyNvXN//sv/wni9nkqy//kOIwrRttl/QxgCGd9xvJkQzNZotf/vr5f/3f/M//1b/6V93Qg1DghIZCGqEJxhhXVdZa5hhCCP3QD10IwephJMXfGGesJaMAkRi5RIEh8dDHvu+7MMQwcjCkJEMIIcagg3ddZ7Kagj4F1WcEAGVHARkb51pt2k+CZa8c/Z6rggzCUvDm6FYAhKMy1NLYDFanPBYk9t8fEYmMtdYab4xh7UpZ8+7vaCQdcjFSjDGuroSMclFnQXS6KzLrkGXmuh8dsNYHgQqj9XhrZDBxZEmJozZfC3vGu7bsboxyRAmlsf4AWrpB/vzzz7tuJ5JCCK9fvx6G8Pbt2Zs3ZypjIoJa+Skxpr7SEHbrTSkHZfbsu54xj01+FkEd6VGrN51OUorrzYpAPv30ewTQte1kUm+7dtu16/V6vdlwEiDTh8QJQpLIqQ9RwCI4Fnr99uLy4na76/oQ19vd1eVN2/bDEPoueF8vlofz+dJam5JcX1/f3t4+e/bt8+fPf/ub33/xxRcvXrzUnpyaTvWyPA50J21pKzCnODOUUW0NhCtr7p8enx4ffvj0SeNdiP3h8uD68vLi7Oz48EBJsOvaN42mN7by1lunrSNrrbN2LEnvxUy6RDpjrrO3wigIJQgtu4GsIWuKc1InOpvNrLaRiLTRq3aHiIxR+sOgtMYiSQibpslh3J2vLY7hfa/8nZ5u3n9EeMcAR+/qHxPdCfrevT+Y8v6Q607MvC8sqH9CGatFuf9dnG6xhmp8NQOWXENWZ6C/o8O1ZVQsZHJ8LaMVoiXIsK9yg5qtao9TRGrvYyZ/sNZqGjSbzZxzmhdCFlaaTCZKpKx9MpUb0mdxcHBQiBX1FCiSVm2cJuKXl5fqpfQKNRrWiFkBw5qohX6YTaYpxN1me319LSKb21tlzEAUAN5mDmdE1K2uU0DT6fTrr7/ebDbMvFgsQugLCRcRhL5fzGYa0CggWT/u9vZ2vV7re2oSrJ91cnLyi1/8QkNVJdFbr9fMfHx8qEoheqb0Ljabjea12gZWHg+tqN+7d282mz1//lzZMUvlRvWRNKzRg1ASVq0BIKKeU/1aC/haSFAVjZRnspULUx2Plha0F6AN2s1mM2R5SvUlYU8lkLKyZOJoCDnFyrvppDk8OLCW6tpPp9PKmclkUlXu4OCgbioCJEBjCTg5gygsKVycXf93/+t/dricf/3VHyUGyiMAemzVuvZ9b33VDaGezH7922f/l//r/+6f/JN/EpMAWUYAGgtCgVPgxADa+JhOp9Pp1DlX+8pXzlqLPA4Uhaxjoc5MvU5KqWv7GGNIcb3abrfttmv7GLTUzAwhk2YPWYpKr1P1krVoocs+ZP1ybQEgiiJMATi3ftJ7X5cvyI0kVvrUZAypk0GRsYLImBXH9UfaaEJE57yIhMje+8BCRBoKwF1vWPT3mbkMAUZmnUZDRGbQ6FxnvdQT75u4lKnO9+P+kiUzMy0WC/2W1t8E2HlbKmzwnddoDQUBwAAWvkDn3MNH97/3yUdaWKgnjbGWRb755pu+D2GIaomMubP7AgwpT5jsTc5Azk5SSvoLAABAMUblMa6qSvkNmFnZKO6fnvwv/tt/YQwhoiO0SLvd7uZmdb263XWtMYass9YLGuPqbTus1i2hJ1Nd3dyeXVyvV20SA8YCkq+axeFRM5kBEFrXtv3Lly836+3t7frq6urLL7/86quvvvzyy5ubm8LDrldonG2aJuX52pE+EIlgfPYqBrJZ3RiU+/dOF/PpbrueTKvlYh77rqkcx6CnySAcHiwOlvPKW2tkPp3k2jNqYUofqC67IdKgCzM6VxJDdqspE/QXbEiSDDIn3IOm39E77EHt45hXZQqIQrysp8W691RN714CVNihcS8vVFKO7+wqLP+/77b/p7zGN4e7geb82uOwzIVEABC+89n62u12yqp4FwVmLT81PQpvURbGzWajhni/75sy6ZvJklMlhlBLXciVOE8cad+rnNJ9TXi9YPWgOtVa0nSFGpVWcfH3agL0ftW7l8hPRSqn06lGhJPJBFhUzmi9XquTVlfXtu2TJ0+Wy/lms1E6yaOjo+12++LFC2t9VVVv3ry5vLxU/JfiwtRz60EoPpgI4tClENrtVnNEteyffPKJyhIzc1VVi8ViNpv97Gc/s5nuUS9bo4SU0np9yxyJIPSDWvOjoyPvvZK1rVYr3agxxpubG2U1UctYmuUKop7P57e3t2ru0yiKBdpf14Z6zKrvkueOZG/cqGR7OqwhImXDFLSzyfTdmMlHd7tdwXkVozc+XDNiKbz3ROidnc1m6oYrZ6bT6cnR4cP7DwQYCYATcAx9V3nz9TcX//V/9Y8Pl/Pf/PqXu/VKRLQeCQBCOA7VWJcEnHPGuWfPnv2f/s//25/+9KcxSeA7fNl+OJvdkhBRVbmmqbz3zlgVDaus81ZHGcffB3gnQ00pxcgaf+x2Xd9p6qtaJmq0ebfrNpudYs2K4dJJNsiAUA1x9hOz916mgED2DjXu1Zk0zx1ZaceOLzMnJS0v7zxCsfLXKttqjPPeG3KQeez3rdwwDCH2GldF5qaZApAw3vFKgggC5zGUfRc7/hOBcjm6JMqaFdBiNtVYQ3OdsXhlUDkx/oQpTKPmo3pfA6j/2+02zPF73/uo63fM0Vo7nU6NcV9/9ezN6zPlThYZG5DMo3ak5rvybsGheGhIXCKIsnZoRvJbGKG5iJIIYT6d/IO//5dD300nTVV7Iuja7Thpx9L2oetD2w+7tjfkAvP1zWq12fYDb9vu629fnp1fCZiqnpDzTT09Pj59/MGHn//gh3//7//9n/z4p23brlar169fX1xcrddra3xxbKMYInMIoQ9DZthnvUIcJUFAW8LaAmyaxqBIjFVVGYInjx7OZ9Oha7vd5mCxbGq/nE+fPnp4dDCfNh5RCFPlLBEYAkfGIhlAR8YbMiAlYCREQ6BgS2aWTFFZSqb7/qnkiPpSG1Fi9n3nIcLGmKpy0+nUmhHSWWpxRPZd6tTxgOXj8j4Ds346f9fDyn+qXl1edw6+VJX3hITvgk007/1+ubD3Ikvcg2Rj5pGGHAjucz2ObCp5ZEWTY81ElTo4ZbICzrNGkrHlxQQgokHUPaBmSFVvKU8y0B4QzFo7mUyGLEqvA0UlG9PJFnW36sV1YoeZlWBLF6QQIN/e3p6fnztjdb7o+vpSm9CHh4fz6QxY5tOZM0SAp6enl5eXj588HEKntQFFdSgcrK7r+Xzetq0Cu0II8/lUc5r5fK51dUT03h4cHMxmsxcvXmjgcn19vVwuP//88wcPHmiaq6HMYrFQheCjoyP9LxG9evVKC0uLxaLv2xQGjmG9vr29vdUau96+ukxFfWpuPZ1O60njKn+zug0p3r9/fzKZLBaL65tL/X2NMnXxS6O3sI6UOEa3t251TXO1c6w5ve4QbVuoiy2RsW4YJWChjNsa4yE3cuCEFIcYNEa31lpDk2mjJeja22ZS3b9/Om1qYDESvcFp7b7645v//X/3z++fnnzxxRdt21prrRujPd28KSUB2Ox2VdOElN6cvf3v/4//w49/+pMhJNU+iiOWedyNqKK9GaStN2utNQaNMVoDt5a0VaHtGOecOouic6ARTEgxhlHTvg+x7xXxFmKmCi9Veo2B9PFpXBuG9N55+ZMvEfnufHA5ueN3MgA2mzeRNBJ5lhBWa2X6pJKwMu2gNYQ2v/M7JWLJrJ+Ye3O68ZhZEEDp6dMdY3+5nnJ5DKOMMXxH5IqZ7cOHD5Uqve9bIjTGalH7u2vxHZu436Jji8TMjx49+t73vvfi5ev5fFlXDZE9P3v91VdfPX7y0NqxyKAlHRyHdFW2eK/xLCACCKhsmfuhxJ3ZNRS6OIQBCJkjIm43q/lsMZ1+EEL4D//hPzx8/Hi1TpvN0Pc9snjrAMBbZ4xFBDGUWISFEvQDd30KcXd2fj2dTocYr69v276zvprP57PZ7JPvffrRhx//+Mc//bu/+7urq5uUUt8HAOi6zqprIUEap8dK/RkAnCNmFhhTebKGCLTkqKmMJohdjE1lJnU1DP20mXhvN5vuwQdPDg+WIBH4wDm33ewEsesjAOPEIIKydyABM5uM49+23TAMmc5MENEajHHM1cb9TSh7e3E/sS5VMvVDxmhpK6KInrfJpEZEja4AAAwKjDg5IvNdWWwAQBkHh+4OSXauQrDfsRUZSalG/OH72+/PhMbvJtkyDg1j2VHF3Y5hwagl+o5qoXMGYERIYc6fJCey2jhU/2ettWRTZssSEZ0mKiVrychnLSTIHrxLyzOa+liiISNQtEZXVY2O7aasjOSc03rv1dXV4eHhZrPRbFgTOx0PM8YwCIOWo1PTNLe3twq/Kl3qggGezWYhRcUoaYOTCHabLQEOiHVdf/vtMwV/LWZzxqSZsd7+er0+P788PDzUyqEu+L1790RE58VVNOLt27fL5TLG+OjRo2EYZrOJxoIXFxff//735/P59fX1119/vVgs5vO5iNzc3PzqN7/WEOTq6orzePrR0dHl5SUAbLdbddXqAi0ZTUb7vl8ul0R0cHCgbeY3b97ovFbkpAzM6gUJhJkfPny43a2JKKS02+2m00ajFnWBWvkUAZ3/KaZT67rK0IKIOndkjEE0OjbWdUPTVPtJAu8R/412wNvCAQJ7IJ0YB1vXQKIfhWTI4HQ6NaYDAHtkHzx48OWXX8a+mx0sfv/71//qf/nXJ4fLL//w+3a3cYTMLGlMpxIgx5gYELFq6l3Xtm37f/gf/vuPP/64GwbnKoMYOWGMkKW3TFbrGqK6Uh7NFJEWVyB3gpih9mMnKCWXeHT5+jt6FpgRBYbB9L0DAKy0lesAgHlkYWzbnkMcJSwFteSz67v1eqNMIKLUj/9JVT/k97RPFUqizSbilACYRqooRhAlm+QRLMyav2GhTFHst6/UMTFoAfnO0YiIIeKUhFHZar2vrfUpjnAbvKtU64JIsV2Y0zOR0dPthwvldyxLtI6cNwroSqKldv2798xgRmxJ4QnZG8QEFoCU+Ec/+vzZt8/btq2rpqqqxWL55s3Zi29fffLpx87vkT/sXfe+ay8PoFjV8gu6aiBojQNBLWszBxFommo+bfqQTo6WTx49ZEknhwfIY5Nvvdlaa8WDc+TN2OYki4nh7eXlru+tNSLSvXy13W436+2m3TnnHjx49ODBg916c+/evcPD488//9GvfvWrl9++0FbTyekRWGuMQYDIiaNoBWy3WxHdDXsAABlDBPqAtU4VY9QZu74P1gAIHyxmN6vN6cnJEEJTHT198tgSEiZOYTKZvMWLzbpNMThfWTdONVCO4wCA7IgS0kk1Q0g0iuWZPDX03lamQvfDd5PjzGzHWHj8TowR8ujLZFKrHWmaJsaAOCEikpJl3mkswp4DHs/NXtXr7vDsnyLdvXp49ioigoqOvrtyARABRCMw+n/9JiABIu1VfkriC3eR6RgPlRKxOl0YRU7YGDceVER1XZPJRCuNatCb6TiRqd5XiS9CJvg0owaR1yNNexqXLLEcQs40n/fv349ZCkmdeoxD27K1tq49cxRJCmTx3mrltm05hB5xZKZ0VV0ctrrbzWYDuXstIsfHx9PptO/DYrGY14uLi7P5fN73/Xw+3+12Osh7fHw8DJ1iiB48eLBrNwR4c3PDzOvddj6f9zEc3ztdzuZ/93e/+Pzzz3e73fX19cOHD3e7nSbHCvOs63q5nOtqbDab+cHy4uLiJz/5icbci8VCH4duoYuLi9N7x4uDH/37f//vNWJYrVYnJyeIqMBma+2DBw+++eYbALh3755z7uDg4OrqRvvB+w3XxWJBRJvN5vj05Pb2VivAWmbo213btmdnZxpOWWN0Mrhsb115yjQsyrN/t/kzAAoARLDve+/ru+29V1h+z1Lp8SczVoM0nOr73lZ+1kz0g8asyBoUHcE3th5pR13kp48frje3q/PzL37/+r/9l3/18QdPvvrjFzfXl4ho8c7wRoGUIqCx1gBQn+IQw7/4r/7l0w8/vLq5WS4PrXeAmIZUWlclHBeRNFJmjobXZBWQupaUUoh6UqxlLcnECTUFewgoANq4NUFC3xljOkRxNMIRhoGcBS0SAABJ5b0YQ9ZSQWbpuFTuvLwfx9+d+mJYBADu2lhEBHskEzKWgRn3oCR66vf+CZqKGCQ1yCAkwgKY55fy7+xBt1IU72ptMo7c9bmqvHeFd4O8glm9jgGzwK7mn7DHUWjPzs4U06vscTe3az3M2f2WN3xPQkvzaBQBRVRrLXIXu8PDww8//PAPX3xtjUtJrPEhxK+++uaDD5943xBZkZSGYIwR4f1GnYjsW/BiQPUeFDHLnIR0f9QF9NS27XJ5sF7fxsgnx0cnx4fb3e7k9Liq/Xq9ztMIzAw0hDCYqqqm06YPKTJ0YVjvWmZ1KlF0itw4ZthsNq9fvzYg3377crfbOVchYt/3z58//+lPf1zXdaFfZObEQsz9MBhnnbHOeYXPqb+xvsIUUgrqpLuuC0H1XC306d7JEQB+8+wlh+CsefTo0WI2RRJj5dWrF4vFwaMHD1+kN5HJ+6qpvPcecCyKKnRWLfhiMZlM6uvbddu2QISEBGyML4tJRLpB3m39jsZIDxUSGYMi6oaJOQGKMcZaqrwnFGauKl9wtjimueOwL8CdjO7daZG96WAYe8law9Ek+C4C0wgyZ7CICH+KiG4fFJ33zzt0qsUOFn+vgaOUuG8vFE17zAnMQc9qSf54b2RLkyH9Qmu8MaubaapUUNOSB1d0q1trhxBLqQ0RUwjT6VT5MYioaZqqql6+fKmOVn1h4Vm8vLxUNkr1/ZqRj5SNzMrxtN1uq6rabDYHBwdd22o3lJkV9gxAwzD4ulLpAr08S6ap6hcvXqxWY5U1hEAGUkrz+fzFq5fl1m5ubp48ebLebj/65NPASckUDw4Ozs7OmBnROIdEdHJyogk6AIcQDg4O3r59eyAHs9lsOp/drlcPHz+aT2d/8zd/89Of/vTN21cpJWMNM19fXzdNc3x6IiwKbUt5gnY2m11dXSHi1dWVunDFQl9dXVVVdXl5qf5e2a0PDw+7rru9vT0+Plag1nw6kmr5ylpr+13b92MVQeukup4aTnlfF0AQ5PKjjDMOVUqDoqlLdaQ01EpHrPwhMwuwtZUyfu+Dv8AQ5xjOWptSSCmhIVI6dxTvfQjbyWTyweMn/7f/7+//m3/+l5//4JPf/PKXr1+9qOva0agTICIpCXMQIGsMM/Shp9r/s3/xL05OTr755psPP/4oCcswjPiePD04dklYiEjGUs2I7y05aJm5YGMQjQoJwCgTJDpTpOdbxrouaihTOy8iOigMLOJMDGytnUxrvQx1vcw8mUy6UMJfRkP85+iuilfLpmCfKitXnkYXCzKWVEszCJAV+VHehEEMkrXWeGeM4QTGWcpKvYhokAQ4iYQsrQuIVVWNhDnG5DiB3vNZoiOUWKrM4/WXXnWxS4gGAG1VOeZ4cXHmnDs9PXWWhpDuUuq9l+x9V/K8Zl4g9N6LpKpywxA//fTT589eKX0jp+Scu7q6evvm/MOPHpNBGcO9O6r9vMR3GvXFjOo97OdVGkEoDmu9XhuS+WQaQuCYlsuDruseP350fX09Xyzqul4vNtfX12/fnAmZwIkEOez6vicanVAckkCy3q02W8rS1iys0ybM3HdtSlsAuL6+vbq6evv6/PPPP7t3754xBvOMAQB473UywTlLWS9lGIIiJDXT8s6Uk6DGWkSI2PvaGh+G9PrNRYUOQAU/pPbu9PjIOH/SLIY+GLOKQaraWWu0elFVVVV5Edbyvq+rGDillDjEwGTIGJMiF3Yb7X/AXnCjq1rqUURkHaEhI0bf3BiDko6OjnS8W2Qk0S2hn3o4wtHVMaNOBUgePIfcuSGwgqXNo9CxMWDM/lsAQHfwfna8f/D0pI3N3zuT9840XclO9oNZRAR5R94R91ijOU9wai2rGN+SZhXIVRgGfdyl7IxZFdjm0W2iUddMK7249yoBdV3XZMybN2/0MrQZPJlMVFoHMwhLRLTAqwyLNk9D6T8BQJCCjGZoGAbNaHXSSa9B0WTeuxDCZrNR6mZjzG63Ozk6ZuaDgwPtyRHR1dXV69evtasKLH3f37/3sGmam9ur8/NzxVQfHR2stptvnj87PDzUbax44JOTk5RS1+2UMEdHXVWnyFX+d7/73cOHD2Mca+C6tnVdW9ecnp6+fv36Jz/5yTAMTVUbY/SaAUBBXvqYnHPX19eqSaxjSypmHEJQJqwCjVauJQWZd12raxtTGLuauThfcOOY5RN2u6vyta5STlnuZnw1G+Ys6IlomFmnDEpirZG6sTo7kBRf5rLIqZbQnTMSk3MmJY3JmMAhjog/a20I7Xw+/9/8r/6zp0+fPv/6q/OzN9Pp1IAojkSra4EFwRiDIaSQkrX2H/6jfzRbzK9vb5qmGWfknCe0YChwgpDxwHEkk8kbMjMj5hnFPAhnATFGJopanQ4hFTHsGGM5pmMhwcBglQTBAhMajFltXRhTkhBaXS7FymhAk7KKOTPTe4nenRl43zGTjHlw9pp3faVx+CiFGO9ia8gBPWbUi82SsmjwDqqsYQWOobky0YpIAQEgGAAl9ivmvwzsCCAUydf9umNxvThCZUdMuJ3NZqrG3HW7kTF4bCm/sxKyJ/ohuahdLGT2i2OId3h4+NFHH33z9bOqqnvuKt+E2D179uz+g+NmUqvByivyzkeICCLtZyf4rv0twAeVZFmtVojIEoFxOmtC37Vtu5xPq8ol4eVyPp3PjDEX55ciwgnQKigm6MzfMAwadtmQlOQTM0DGWtuHYRiG2nlEub1dv379um+Hn/3sp59++r26rrt+5xWAnjilVFVj74SIASCkSIDa3EqF28GOsWeOJVVAgw3CZDJ58OBBN4Tjo9PEyomanPeffvrp2dtz69xsNgsJwsBIUUQFiV3lrKQoiHWtmpSIHo+PD6tJs16vb1ebtm0RvI7BEFFKgbOWhs0jvLDHZaomSb9f1/VsNtXNeXx8GELw3jrXXF9fq4EuRWzIneC8EUf09Z84OWIAU37chMh7frYkwSR7qgySf5p3Bee47U5r4b2Nuu+JSyjAPEbB78WRKSWXBXS10asTnzFT3mgIpR63bdtmOkdEHQ9Ve8rMOm6kJmkYBiKrkJyUksG7sFqP5VhpQCyqZzHGppmq+2zbbcoodEVI6cVcX19rFtU0zWazUZbKEELkmHCEJWovTW9NG8wiopClvl97713lV6vVbDZTH3B7e6vBeByGpqoyV2W4ubl5/PgxIsY4qJzf69evf/jDH+q6KRz6+Pj4enX7+tWrx48fbzY7a+3z588BwFrSOWbvvV7n4YFfLBaqpqDY481m8+zZs5///OevX7/21bTvQ9cN6o/rqtZlTCmFvjfOKZrpzZs39+7dG4Zht+tOT081MNLMWBdK46HNZqNFJq0eazqrxQBv/MuXL72xw9DpJIWOx8AebXgp5uv7a4NfR030g/bB59qtKKAteDdziDEmBh071nLdGH5ldQfnzJCihqE2C17pdte/GoZhOrWPDj7UsYumqYa2BedgZOwCEWROCCgCbT8sl8tPPvnM19VmszFZNdl4N3QdZF6w0PVazlFaAsygBGPumDhRRGlonFNkRkgpZ4G5dqpRQjFlmLlXNdowRjtKtS6mkur3fZ9CgEyToHMBl5eXi/nBGASPydj7jc/3bcj+YX9n2QUAtAGMgDxOJ4R9SBMR5dEPa95VDR+G+E53LBO5p5QILSNrT4RFiIzkFFRxSu/bn3JV2TdrxlXsgLroMeI5evRDbO7Njj64vr58++ZVu9s6Y5mjMyPCG43VQjQDCajKJADwXRtYr5iMoEEgAnbI3//BR9+++JJhe3B0hAZWq/jlV88ePHj0+effTxwAExIYgzGGGGJlK2McI4nkFdwnyEQgg3GsWgshWGd10HUxnw1t0AKsZUwiKJzC0FSOyIaU5s30hm6aqo4s5Kht+16GqqrW/WCtZRaOISVBHJtAja+IjIBB8ijQ9R2ntFqt1jc3xpgf//gHn33/E2/M0G0hBV/PQhgaX5nahBAqb1NKRC5lfnwi6vs2xrhYLETG4qEOI1qryytAtp5U7XbnKB0vPISbWdPE3c1kOjUiglB517arxdT2u3C5vaVUN01DQEozTQRNXRMRS0TEiXNmWs0bqiGYsGuBLtYDpyCMXeys9QIYY3S1S4LIKaVQeR9ib5DqZhpjhJCG0FpnmsovZpPKmdvba2NxsTy03s2mM83s79+fkwDd0WiIek1rDTAVJJGoUUHMvFMMrDoSCQCYQYvTROOzZmZAQbhjCEEwkuWrRQDREpW0+Z19r868MrbvBk0BDaldA2NszJyCmLUotJ6sh1AnTOyoPQh1XavAe2nQAsBsNkPEodsBQO19SknRoYhm1/UGiUePb9p2O5021hKiIBAgb7YrRc9674ehC4G9t9v1rbWj3FLoW2ewrmuOLoWgSVgUWa1WsIfqUttd7CmSSIjoHKKwxM12NZ9M+747ODlq27bdsvd11++msybsdnUzcRaALQpwYgJs234+n89mNZG9vl1r1OW979qha4e+71EodLer6zZ27XzSxMg3bbe6XsU+kqd20zbNdDqdM68vLy/v37+/2Ww2m918vjTGVRXv1pvdevPg9N627SVxXbm+A5C0OJyDka+++Xq1uvn0e585CyDBGuQUAH1iZmHlSK+kmkxrjTl0TMsYozm01tuVLAwAVqvVbDbZbrdD1xpjQh+9tdv1SiesEFFVRBWuvN1uC2USIRLakAKn5MgyR4ipcb6p6/Vq65wTeV+ysChnxBit3eNzFfHOpZT6vNmIKEAAABSMQzTGeAHuAqQUIaBQJ1LXk+12KwCVpdB3lXPkTN+1Mw+1c7/+zS9VgxnReF/HGBEcJyaiISawVRTeDf3y5Pj+0yc4qzdtN5lMqqZBNF03eAbnnMQESCSAeUwOxiYlY9IifLDW+sr2fc/ALFw3HmDouuissdM6ht5Z6tphoMRxUCRmSikmtmgSc0ySUmyoBvIhIfUB0bBITBxCVAy1OFNVFRhY77YHBweqWnt6716MAQBSGpm0/4SjfbfyuheOA8KAwIaYQ0QRJMMxkTExRGFDY8g7zjullKyKpxqxzjuqhE0SBARrRqEUg2p8WBWj1Wij8VU1RVMJJzI2CaMjLSnmUWwAFhRiABylYgn3OPv2y2AiSW/IGLREdHp6Op/PDw4W3pnn33ytdHSRk2bTIiLCSo5JaP48NydgRm0h4mKx+Oijj7766qthGDQwJ6I3b96cnBwdHC6sNSkFEcBcvtM7kZz7f/fNc4UzF/pBJ6ldt+01VlU+Gw1++76vqrHQpOcWeGyBgAAztG1vbVIWBWMkm1epR1Y/Gbq+bdvdbte3/dC2zrkPPnzy8ccfV1XV73aV94gOQBUURDiRNdpPUjio1oi0GDidTodhcG6kjygdJjX9IXHXdUXcZjqZAIA2d9u2Z5Da+Rij89W9e/eIbBfA2DEI1XSQJRJaQ0bL4LpQs3kA5L5vwMZd3+12Xdqmvu+Ntc65oevquha4q9soaEv2xhYpixg2TQNCi8VCrfNsNjNGqxSQJ3ag1C2YWVvCpbFatp3eOGfyjlLwgXdT1f3zVr7GdxUa3t94auwAGUHto2RKDRmlFO4I90v8Xig1NDuZTqfWjphYten6BK21+g4hBC1vUla/UVoJnZE1eR7fe9/3ra5M13UWra6NAhE0NGmaZoTsjpRbVaGp4twnBoC2bR88eKBYKkUVqfPQXFlEYmANI1JKgKC/ICIpiXOVDtFgZkBTHNnhwTGMbFNJFSdViEy5JPWONCNc3W6ms2axWADA8fFxSqnvByLabrcffvihjt62bfu73/1Oz4WIaKFVD0JZk4uLCyCrM0XKEAKEr169stYeHx+en5+r0O/V1ZWIXF1dLZdLDTgKTGEIUY+Vni9jjO5DrdJr1T3l6equ6/Rc65SRJqDMXFVVTEPbtt5YRNFZUF1tGSGxkEQQQFlHXF0JJGsbANi2OzUsalvK4dUzoug5rU/2wyCZ2r20NkpOT0S1U2wwxxhZQEQGosRRCwaTutJzVFWVxeq3v/3txcVFCqPYJeuoOhpm7mMSBE4pJG6a6cnJiSpkVFWlD3paN6WQrntg/zSVYFR3oO6HlAVaKI8k1XWtY75jbQxMt1llaAU752Ias7oYI+UMOCWyVOuRgbH9mdTfhBAokXXm6urq7du39++PQgAiqGMjf9a53FmAO/jbe7VSxCxrynfTR2lvCJiZGQ0ik4H89BMiAIrNiquQXbWuTAo60jK2GBIngJGrMr/u/oqIAO+mHHGvfLv/q/t2zLKIIWeMqevJ0eHJ+fnbzdnaIqnDGH07AKIYoAQJ360Jv28oVQkjpaaqP/n4e29evdaV0c7N2dnZdvvhbD4R0TmjkV5RkigF/3cvt3xzP+qRrE+paZaunSCOB6/vEE1Iabtt356fXV/fGOMsIDNYSzFELWKD0NBHLT8qxLxpGmbo+55jCiGs1qurqysHVNf1hx89/d5HHzdNo2qDfd9qZC2ZS9lkOdiUOXQAYDKZ6NZ3mQNv37eV9iHHpMlH2/q6aWIIVV2LiDGIQFVlojAgKdyMAgCASi+LQFU755zWfLy3VaVKsZUxWFWubVuGZDfr9c2tsTivJuvtZhiGuvFpVB7N3PEwIvQksvobAGCGuq500EvnB2gk4WNJEYwho0/nbuwHdDD6T/FplJxAcrqKWWVMCDH3YPZ31r7f3T9y++9cfoqIOhhmLMVwd/b00egeDcMQQ1APrQGHde7q6koVZPt+BI1rL0YykEqroACgkCibmSCzq6ZyHFJKzo88GCPuxiRnx0q1tTbGYTzYKemqhhCIsHwiAOhUKxHN5/PVaqVBlYr4AkBVVdo2G/VkMuOEdxWS6KeErOG63+/UqWVOl4rt0oqu/rJK5+52u4cPH65WN23bbrfb5XLpvd/cXg7D0A/Dq1eviMbWY0rp9PT0+vr69PQ0Zv5FRQnEOGjPdbVaaWG8rush8na7vb0dgc06J6MldILRgr948WIYBtVH0kvFjCTXkEXRavqeWplQXPFsNgOAGKNy7ekVaiCl/x039l54R4WgBpHHH+WHmLsqBlALSyzRmtGBaT+izP5aa1VgJsZYLkxEMFsnfQom551ElDIpG2sCLiIpOjISU1P5lJIAWyLv7W9/9evXr1/vNq3ut5RSAhHBJCmwMEhiQWN95ZrplNDoVIiGazoyJxnHUCKzfaM6giJZdK/GGNUM7vsJ51xKIw4RAKraTHgyDEPfBSJb1yQC/TCUt2Xmvu9jROvIWqsjL0QkzMXuaaqtjC4HBwdtHljf71v9yVe2BrLfHtWLTSnl56WTbCN1F2fgm947aa6gJK8qz/ru0ISMrpo5Rj3FAOCcU0hmyk2KYlJwlCmCO2uHd3Ti8G7u/l2nCQC2richBGt9SmlxsHz8+OlmtRZgFABgta1IoOJFEhnd3ZzyexY2t0bGAZiDg4OHDx9+/bWKEtZD6Fc3t8+ePZvNJwcHi8ScUtShb+GxQC8qpLd3rXf+uPAY5IaNZGSvtXbXdfp4tu0upSTCm9vb9Xp7fnahd6cLxMwIxhCCJT1IGp8SUd9dHyxSaqqu67rdVt98OpktpvWjBw8/+uijpqli7h6FYfxoJdMv16m5uGYA0+kUMpahBM5lNn90eykZMioABwBar8bcYrTWdkMASETUDT2hOTw8tNt+u92CJBAhQ95bnZIvb55SstYg+hAG5uSM1JV5+uQxIF1eXve9i5ziEMgaADTGJAZgIJRBK/kgxpghxW4YGhFGYCRmtsYDjsJbxjiNM3jEcYwVntJEYWaV3FIEloiM1eh8Sr9ztAyToNwNI/354/fOgSk/Kv5+HHrmqMurEylExClh5ltQc6kOAInqulaOlKpq9CFOJhPlH9bpkfJEipvUjEqxAt5XaEYWQMw4L520gXcrxsYYRLKZykOVjjRrCWHQI+1spen49fV11TTqazEzX2q+rq5FRLqum81mxT4OfSTptOOosGprrebKiIhgDDll9tAQBBFD6IdhODw8TCntdrvXr19/9NEHm83mzZs3zHz//v047G6++ebw8FByeppSevny5fHxcdM02+324OBA4WOXl5eTyaTrdjoKrGGcc+7s7Gy2ONCDcHFx8erVq8lkojPB3377+tGDx2dnZ0oRrHwmOu6s1n+EENtKCUYAYBh6AFAiqlLkcM6VZurY+8wvvS8i6oeWs+Bj2T97Rj/pQ4lDqydXXamO3ChhTgi8b3N0IxljqsoWVIdu8iEEW8K+XL2DjOwr3Zmy0Z1zfbuz1hIwx+Sn/qs//PEPX3zhnCkABWEktCmlqIqjZAUYkVxdW+t2u52QWSwW7a5rmiYMcZO21u0Rwoex7AzF9aoDoDFN53F8kcqaFNcF2ZnpzkTEoRrZP4wxztp413IC5gSAMXDvgjrFqnJJRIt8Ik5EzIAxRsU3IGJKUoKt9yz//qv4AlVLz26C87yJAWR1JSmP+hT7oH+YD6CzrnKuIrIMAECqMEF4p6WW0ji3zczKXJY0ZVLMEO+hPvfiFSJied/XluzxvXvR79uQNALyjhlAZvPldL5Y3VwnYJNRYwDAkkTeeXL7RrB8oTteV9OR+ejpB8+/fcuSEHGxWMQhfPPNN/cfnM7nU2ONSBJB5nFORUQUhV8W+rtPoqQ7JfusJ5Oid9b3PRAimoury6urm27o1dQKYhpSGZ3ScU3ltmQWIqX0xJubm5sbrpzX6q6ztqqqTz96cnp66pxLoTfGMKehH4rLVGicPu+C41BjbfJ4g2SuPv01DZyxlGSZq6pmZuaoWJV9cXJFhAHZruuGNBCR84Y3UZJoQdh756yZNI1WhARBxI6gXEeATBgJJHpZbXab9a2khEAIyDGhICMp+xqOo7FovU2AwzB0Xc88Mq1rxqatYo2v1Z7mPY2Id2dVCkLyLj67QytQCbAKhiDv1ztElqCI4B4K8LvJ9J87qJTZHFliTEEE1P7GPKxJWXRILX7KU0MKVYNcX9HF17vQMqz6y1LQ02fXdV3mgIUxxUlpX+0u5vxAzZwxI9nWkHmh8/hvqOuJtZaTdO1gjKmryc3t6tGjB/q32+22BA3l+kumqLGCMcagxBhvb2/THq8TM4chKd9WTENMgyE3ZDi3c265XD579uzjjz/+wx/+sFptDg6OAK6stVokV9rXk5MT72tFUaleVlVVFxcXqpqlo5yatRPZGKOCtBGxmjQppfV6fXl5qRmqiFyeX+y61jm3WCyYebVaqcS95JKSVoz0pBu6E3XWbaYVb13J4i30oeiycKYDU4en/kM/OnRtqVfxHigvckxJBCFwYmGRO6/DeUZWq9D6KWPhLWfMqmZSYsuUybrLFtW3CjqzxBx5pGbkmJwhg9hu17WvppP6+dff/ObXv2aOzIigesaMiGiIASMnNJY5IZkEEobkPHjjhLHd9ZPp2C4h1bjV8TwWtR5oRphkOUGl/oyIwmOcVyYdSh5fUFfI4shMp1NmXq+3GgtqfSU3s1AEQwimN9YRZsqXYtP0v7PZTHtzktXPShz53TP+3jEXkczSNM77MLMgiQAza06vP9VktCQ8IiIogCN9Qho7yvk9xzdPmEcTCYXNOLDLxcXuDb+QjArEuqpq18qHftc6lX9yobsXAeMqEbHOOecnk9n9B49cVafIMRtNtR0CbJ3ZW4JsbVlIAFkMoIoaGcDKOiI6Pj5+9OjRZrPRw7lYLIxx3z5/uV5vEcgY954lfc+qlkXbz5lkb3BTRJw3SKKSWNY7ADi7uFDIZRaOVor8O25CzgPR+8uRQkDEynnvfeXtbDo9PT398MMPHz24N5vUKAmUAYKZM5uaZhLq+xXtTBlOrIhZLdiWwkBpa5UbCSGoS9MIVPOAMhtakuzdboOI1lCIQ+jaSeUnVV17N5k2KqRKBIvFYrFYHCyWy/lCFe6Uge/e6eHDB6fzWYMpQkoInEJvLBoaH2scQkopcgpprImllESwbdu+H1RJJgrc3t5u1jul4HdkYhhlDBSAqn+phcGYxTz2nuYImNQzzCAMkvYAhCJjWYlxdMOIKAzCo35I+R8IEhrCd2Z73tsq+k0tUQJKCKHvOsnZjNr0IrGgZWTNFPWf2kUDFfvLJPLqJvWb+idjUBKCPrvQD3EYOfo16wohaLqgXeSqqjRMgRyj5A46VVW1XC7r2ouIpnqqmue91zHjzWanabrOIymNlLpA1V0BAJ0FGiIrYYXeXReGGKPS7OnWGtrOALbb9fr2VqVztZQ7nU4Xi8XDhw/7vidrZovlvQcP+zA8efJEWQP/8Ic/VFV1fHysFbm//du/ffnyJeVxo/OrS7RmupinlLTRq+uz37RWwJSGEfP5/MG9+8Dy9u3bxWLx4MGD4qLKymtCbK1VNHJ5WNoI0N9XbGqRatbb1K2oa65PR3vbJXzZ7XaqYKF7uJzolIJzPsaESKUjoL5n17Z6hPMjFiLLQEOK7dBrV6uUu4ksZPR7cQNqHrHIF46IGUZJWvK0ZKzB8zdvf/WrX6UULBKHmIYRqcCAMXJkFqTEEgEEDaBhQNUABqCUJCUJITnn5vO5ZgKYRzdFlHH/jouGs7gI5Bx3yCzi5VnQOOCuoJMRhWOzNLi1RhccRl7o8eyPCxJFH5nq6ABgQZ7PZ0sRIbSSODt7I/J+nP2e93rvVSy5ARRIufgU1LaXgoTsFd6132qMk33FtvHdizjpkFJCYMgdHzUpuf5n3ru20QThaAP3nW4xUPu/v+89rSCIwBCSJRCgqpkcH59eXp7HOHBMUdiOaqwMAASU4B2XaeCdNeKsiaiP0zn3wZPHL54/s2QkMRqazKYXFxdnZ2fTaaM6wbo14c8EC3emfH/Cfe9HzCMrwmbbisjt7frVq1cA5OuKQhSRmPv/MQrkhrGuyxiDEOAYTZNBcpaWy+Xh4cHhYjmdThE5xoCgnGTBGKqqqZotZiaaltRfn42CKXQCUrMQ5UnQyeP9Vr+MhUot+Iwzedq8UerUuq7RYNsGAnDOtG0f+sE5N1/MdJ1r52vnjcEoI95BrZtBFBHleQi7YVu5tm0FYhi6+eLAN00/xCg8DEMag7Ux3iSixBxiBMTtrrtZ3UpiAOm67tXbs4P5KD7ThYGIwhBBkFEQ0Y3aeXdVOHiX+DT3uzLfJCQZ8dF3nH/vbYD39/cefoRIpcZAsuLY/n81auGMARFgpXcAAA0ROLdgy7tpbRmR1BmrHlHma6TtdouIWuzNGoWdEymVDDW+lFv7zKmkgDHGQnwBYDS6QkTNy7WMURbBWqOZ7vHx8YsXLwKPbFZlwriI4ungSowxJQEA771I1Cvpg2smNbQ7IZQo2007nZKIKBkvwah2PAydzj5pLKKN4clkstltV6vVZDIBgLbtp9OpOrzT01MtTU+n09Vq9fTpU5UUBIBhGK6urgpH7rfffqvRJABsNhtXV9pi1JUsU6RKJ7nZ7M7Pz1VDcLFYFEyZ3mlJFJSks6yDvmDk2Q4aNhW4k96p5scGEZjj0HnvlUE9ZjNSjOO+TYwpxSwYHDKIKY4AaR4jY+PUxuuYkwJ/onAlriTNxvgYB8qK0SQAlpjZmDvUNAEYo5s5WTIsYXV984tf/CIMnTGGQ2RmYRAyACjCIQkQAiKDEDlBIOOMc0mwGyLZZIwdiUQYQ0gAylfl7QjHExFQL3PnewTiyJ2io3rjNHOKkm0CIFo9CCEEBWukFNWn1nWd0lj544zhYmYRo6ePwAPE3A4YEKSuvZqpGFEJHWWPcwm/U619zw68d95TSpJCAVVJGkMKyvR/xbIZlZKrauM9GlWH23elwiyIAswaQ+sbVtYaY0Lu+KbAOhSu2F3JMzvlslMGA+7fhXynd1b+aQmtQKqqWll/q3riqt3BwVHXde1uE4YYCZzmbZra78mek0Apmpc4y1orzMXinpwePXjw4PXr11XVsBJXgXzz7NuHDx/WtSeDzFoFyG1geCfZvQt25B1Id/kpESkecrVabTabbdsTUWQJw9hMjWNYIzEOOFb5WXvP+py8HSGLlrCqqqPDg9Oj49l8onGfhZiLycndsauDCCopndYkeVTnNlqy0x6bZKkc732Mo2KM5LC69AJVucFau9lsSpGzlGv0nYfdLoR+uZx7X6vDICKb2QebpsFcRFG7Q1lW+rI/4xguz197a/5nP/vRdHbQ9aEL4fziYg2saLQc1ujyEnPUmEYV0b21q9vNzc1N7T4MIXjnhmFYTGdaw5wfLFJKfQ4VBTgNQRNNPXVE+2MDhHRXEbrb+9n86YszQ5bk+t7+ISxeU0Rgj2RVUQsl/dU/z3wLNGRZBe3QF7OYMgdk0zQpiZZqREQ7uJwRrSNrRHYAI/7WOe+9cz6EoH507Bca55xbr9eKr9aAQCTpw9XwPGV5pfl8rt/Uj0AkRUVVVSVh0D7uzc2NMpBPp9NSgoYRnGliFl/Sjarhgr5hiqMAsO4ZRLx/ekxECkL+4IOP1uv1YrHY7XbKRL1td5vN5qOPPnr16hUAXF1dAXyirvfo6OiPf/xKl2W73TZNo1LBi8WimjQffvjh2dnZt99+e3ywVIPunGvbto8h7FJdTSaTyXa7ffjw4cuXL5XAMsa4WCycq7SMfHV1pcQauakxlnn16Wy3W8rQ3BL3KG9J2w6admy3W91vpWE/mhRJBRiv/ULtodCeinNJ+NRqhyxwlM/UnU6tcVUfdqvVqq4nxhgUJjSI0odgjGl8BcMI0WIGMjAmTIWrAZFFxjAcGdEqp8OkaVbX7d/+7d92beuc04FdIkpoYwiCIGiAAJDIGgSKwgAjR5UIIBLACPL3rmbm3W7XNJW1Vu02ERUR+DxxO+rE6POSzHaSRv6+MTE1xuhR1mpHDOytSymlxAYlIiKONlBEYkoj3w9ASlYjgFqDmBiZYzPRwROj0FRd5yEGrVMaY1T7/Ls++E99k0VY0rja+QGN/WzYU3fQ9UFDImLIKeny6MUIBSQT9gmAjOVeHom7jXfGO5X+VSL9EmfrfijeVHRckln3Z/G17xm3fReGqBIQYhgYiEAsp2CcXR4ebzabq8tLZeQPITrnAse6cint0U/LXYp9F1HuU0iKTCaTH3z+2c3NzWazsb7y3oeQ3r59++LVy8+//9kQemNMIbDWYdYS+cK72U/I4ueyV0URCd77pmn6vl2tVn1IACCAxcwpGRgBqr5HHwZfub2HqiB1OTk+9t4fHSyn0+lsUqeULKEhNojOGe8nKSWFb4TQa8FZ5080Xk4jR2CsKqffNBmr2TRN27beWz35BUKp7A0pBcmU/QCgox02yyeEFIlgGAaOcdo00+lUW9fKg7HdbpkZR2cPtavqutEUDRWogGjuMSJ+/vnnMXJIGKJ03XB9e9NOJyEOzCkkQGCikekpF1e7uq67dlAa567rzs7OjpYHfd8b5WWMoxIc2rH+2TS1XrOxqLZePYRejIgoe3mCVOqHwJKxMgggKY2dNkgsuS0kWbJJvRQjoDEjV7UeAeCUkqQICOqNptNpVVUj0QoiCiSJqv2pj6k49cDJu0rdpzFmGHrlMlQkncZSWshZLpe4B4AqKC3vvYJvtaGVUtKOgE6tvH79uqmq/T5lSmk2m3Xdzhj03gKAem5FyG82G+8mzKxKt7ebtabL6/X65ORoGJJW9nR7AEDbtk0zlbGUUsVxCKpXWSRjDKekIGFdSWut/kibr+v1+vr6+ujk5OXLV23b/vjHP1ZcVdd1v/vd737yk588fvx4vdtqCTfGeHBwAACbzUb5n1Xfd7fbvb04Pz09ffz48b17926vLolsF4ZNu1sul4HT+fl5DHx4eDidNU3TnJycKJWV9uBvbm7UeetWKdwmmOlRQwjgaTab7Xa73W5X15NO1eZRCm+z3hcRbTab09NTrS1r7m4taUsSEfWflDXfODe2SpO4bdtqMgUy5NwQeompdEBDGMWmIkNVVdfX185VrvImm1fkUTZHEJwZRaBZ4C4YzUaYmdmaEU7DEjk1db1br/7mb/5mu9kQUd+H2tcppRAGQUZDLCgIXdcfnpwAmsDJMKIhIiu5+xMDG3IiMnQdSrVYzBQkVVc+hFDXPoRxFJN5JJ9JKVkaw/27c0o5w9trXeuJqKoqxpSSNFCvwjqEUDnXA1hrRGhQLTUeNUVDCERVShJSMiGE0M9mk8yCYEPsE49ACjUIlLHKd94RAO7yPdRJGRzZ1EcWkTRATnbHhKc4iD0/N7JTjc/aGiDDIohAiJKU7dr2feutW23XMUbnbBiGzHOeVG9NHWqpvsCe6nlxSSZD38tdlGUsYYTsJcQ2Xy4wB0BAa+pqkhbp4Ojk8vKy3W2stWIMwEiEC+ihwK/xrixwl2LDO6lr226Xy/mTp4/+7j/+ylV1HgsOr16+eXj/wXw+7fq2duN7vhcvlPfcex7w3Z9qQrBcLl+/fmtdNQyRLA19UKstkgDQWgNgAaCq7diSMcZaY9ApnPXBg3uWTFU7EkAUQ4onF6FxcTnr+qmEqNLVhj1JV2MMAGv5qwBkdNEmk4nO7KYRCnuH4qFM8Lvb7dR6qtIcIm63W06JhWOM3lvvPUhCIItkcQQFiAiwIOJkMvF146wzZE1xUYgOZ0R2sTiIQa5v15eX1yGI9/b4cN622xB6LSoZpCEFEUkxEoGGd23baktyvd2GEF69OQOO8/l8Nm22zg1t17at8RUR3NzcHB0diYgxZnu1NSjqJwDg9PgkxGCt7Ya+rmuVh1K9bQFgjlrUgsyIKXEEl5awukSL5KzJnRh9HCklFqmcZ0PajUYzhrcEWPuq4JW22w2RyVtCcp3QaMlOk1qdDdWsSxMgDeOqqtLUWR+6mmOdQ1MHzMw6QqMI5JR4Op3WdV2PHUqxljQwEhHNLUpuJ5K0OaK3XK5HXTIzM0dVhStZWik1mawJ4ZxTfrQx7Iv91DTMHOK4G5umefv2raaYOnalB+3q5oazD6vr+u3bt9fX1x999NFsNtPf0TAlpXR9fd334fT0VJfie9/7no4AIGJNWNf11dXVdrtNKTTNdOqnVVXd3Nwsl8vLy8sPPvjg4uJitb5R70tEqlYUwjin65w7PDwsosVl6EuNmspOKPOdc9UwdNaSLo76OXUhi8Vis9nc3t7qsqi/HLpdsZsjv7ExzIwEZfRRG5Mpw9YUYslK00GkAai2U1TJUYAnkwlzdIaGLpZMGtIoqCVG2qF3ZIicZlWgNgiJcQT3QQbrNbXvuu4//sf/eHt766xVL96DUanErg9oTV1XL169/Ht/8ZenD+5fXF7fbtZDSDo6hYQ297ZCCGRIfDbRmfFfjaTJwGzFnagNVGepz7SkUhplYu6MGAPej1m14vaTSVXvc8d3dDbO24IwDxyttUMMta9i5GCSYtqPjg9d5YsDQ5L35Y3+1Kukv7ppme/ip5QULwYiIOkubUtZbQKRyN5JoqGxmmyU04R5HhoRtXWFiDGO1n786L0y3L77/O51fneA4096sfJ9C++4S7FkjK8a5tP7929ubp5/sx5icNboI2EAhVyBaFzxJ94XMqJM/+crlyKfnp6W86z74Ozs7OLq8uBgAZnmm4gIMH2HLLBEEOUZlOcn+YQQ0dHRUdM0nUokgdBefUlEAMWMeoisgAIls9Su7aSpnCVrjUUQEASxzjoyKr1V7iv33irjbB8GRLTZ+xYoh/6mtujUVeRseKRWUFGzmAl7tfeW50FphG1nfYWUkqpXqlXSrqQIAQBkBIpzzlnvXWXKy7pSpksER9NZ1w3r1UbQTKfTXddfXFx89fU3zcQBTohotdkF6b2tODFzNGYcGdSmICe4vb01ZFarlUG8vLxst03btpoYBeH5fH51daNSNgcHy5ubG5Sx+FN5651pmqbtemABcSCJOfHdo4Fc8gJvLBKGu7TY7AdkiGjt2AIAAE0oSJSQdQQh7zabIfLBYumM5dwxVYumXT399ZiZ9J1zHIIueNd1iEZ1eBCx6zpNImOMqrhXEoXLy0sVF6qqarfbzedLrd+qRwwheK+lvxGA03Wjwrw+66pymkxgRsLrDIzuFnRIRMZi3fimqdStKnkkZrZhTUf0SMZ+8MZaSzGyLhpLlC41TXN2diZ5bGYYhkldO2OUnLlpmmGIaPq6rpfLZQhJRQx/9KMfxb/7O2PMz372szdv3vR9G0P12WefvXnzZjabpbRWQOXh4eGbN28ODg6qqvryyy//83/81zrd9Ovf/ma5WLx+/XY+n6/X68lkopQjk8lkOp2u1jdadey6TvWLrLWbze7rr7/WypBqTpRyoq5n3/dDPwpgVFVFBEoeov0gEdGst+RtWsmnDJ2T3PHR3GvMrYG1Rq2fIswIYI0BY7oYDAikaADFICEA4sBJQLEmvXOGIk+barvdxtQAi+CY8WCepdHrj8Kk389IBSYcQlJOeAEIw6AckM+++vrNmzNLbrvZee+berrdbgUpJHaTmpm//Prbv/7H/9n3Pvt0u9sdHMh623pLSYCZCY2SSepHVG48+1ppQ6g10NTwEXJGyyMmf1T13k8wirlTC6bR5zB0MQZrCRIgEhjwlQ3Jp5SsqO9PSnqqxjlm6UMZB6DBuiqFoNAwjnfzvkiCop3HP0MD/a7HKsafZaTaKGY2ZQ7X8tL9j5nEyZArnri4Ehox4YmINNTT9anqxnlfPA3lN1HXtP8+5fLwXRqD/Zf8mR+MiCQyIEDMzFpWtkbZia8uzm5vriwZIGUt/xNkJcV57y+TXg0RxZSGMJycHH322We//f0XxpihD13fpRRev3r74N597x2nCAAoIHtViP012r8B3Sv6/pDH9fQwP3jw4Le//8N8vuyGkdUWEX3lUkrAApKMta5ys9lsNpspj2DZcAbEgACAIdWjRqVVMaYQkkgfBjSkipJd1zVNY5xV7KvNJO82z7yXKYUx4t5DX5uMiC7/NHuzSS6L8Eymddd1fd9Pmso6JyJV5bRZEkJApGlTCRpjjMtjnZQlxO++ppqIrKlm06Xqol9d37bt9snjB0fHh199800IYTGbbHYdp8R3uQKlzCPjnGt3PZKklLx3220bY0zCta/Wqy2g0bGT8/NrBawCc9d3IoLAnJxBIkB9H8k4Bchk1OPjTmNnRS9bgJFAY6YixUGav+Tn5ZwPoTfGAEvfdykFjrHrut//4cvT09N6ZDIx2kPVcCdlCgV9h1IvKkW2qmqUK1R/YbPZSIYla8ObMhGuLoJzzlWVtngRsSSveqeG6Pj4+ObmSivDym7WdTsA0Cq3Lq/+1V6NmjV0MMZMp1NldCnIXn24sjcIUDeeDNTeJ+eYOXEo6ePh4eG33357cHBgLBow0ZimaYx3j54+ubq8QcTTxcIYE+P4iefn59PpdLlcXp5fEMHVxTkRHSynm81GQw0iuri4ePTokUE6ODp8/vy59/7o6Gi9Xp+dnd2sbh8+fHh9eaHn5fr6WrvXWqhYLBZ/+OPvVWVBfbCIOGeK1ISiGe7du6f6dPpxugm146PJZfEiipnQBdHDqOkpZFoMIlqv19piNcZYSylZ7dqwZngCnFhQayGqaT1mMSklZw0BKoNs5UwKkcB4iwZS4GQIhAMPvagktSQkAuCUmEiH8k1KKXAYPx2MMCYWIhpCAkkWyRiDKF99+c0fv/iCmQdm731IHFJP1sXARLTd7W5vb//6H/9nP/zJj7UA4FxYLBa3t7cGkHJlVUSAmPLgmcnYi+wz7vT4ssVW/FpSEHJxnGaPaELtgBmFBQciqutaosQYMQRrrXemY1Y8pDFI1ikj4b5fiJmBLoTQeK+9swjDfzrtzdb+nakkolExDxGRRLV2iyMojmPfH+17YkLrnANBEETKvlNRbgQAoEB2zLI0Kv0bRGGAf2Is6j1fvu+23r2LP+t9AcDSCBGw+U1BpV8G5uXh4f2Hj7tux8De2jQk+I7Pf++avntl1hCzRaCTkxP53e9TZK2vbrbrly9fnhwffvbZJzEAmbHSu+/Lv7us+1/oy5FJoPABevLkyYtXb6y1jaEYIxk0lowxljTJNtbR8cFSy4NERHQnYTtSE4AQGZSU0nibZCjGiGYs40jWSCl2MAkryFA9bhkLpndn2L13moep67V5kF8j0/V6LZkLZkyjkZ2rtRaqybriSHe7ndoLRPLeAlkRRILixckWPIIIAqEhY8AAkSVryJqqqZcH0/Pzy+fffnu4nDPzarWtvU8CMTIQ6Qepudf+KDMDwwBDCGFVb5pQpSQyl81uS9YBwDAM6/X64uKqrmuDAECSgqtsHII0sl7fzifTNPQSKzEAKESEBCxJmcXIOWNIhGPOJJgZBYy1jGOjBczd8DSOJGhOYoppBDQNXUcE52/erq5vqtPTbrsrwRki7na7sjnJGgIlqk3F+iiRk9IFaH6p9G3K6KRuWPu+2i9QbR8VlxxriU2jfU1m7ttWW8UiSeHQ+mtEVKgwxkEpAM3/tJRtjDKKj3hs/Vpb0cwMyIm52+50V7Rt2zQz9UYGUS9eeW9ijPfu3dORm6Zpri8unavW6/Xteq397Nls1vf9arWqqmY+nz979uzg4OD3v//98fGhohA0iFGPu9lsKuuGIc7n881m88nH3/v+D3/0/Pnz6+vrk/v3lElba8taS7h37958PlfDfXp6+u/+3b978OBB3/dt2242m2EYjo6ONpvN2dmbqmrUlSqIWiFjWrQosaC2ZiATUekZVBSVjnit1+uDg4MC+9AolsY5e5UHMFq1NkTCY5KqMRAzS0yISChExlVWRATYqwwfR+eodnXXdQDsvEmSaiXwnlYEqQuRqCIi1OQhJs6ozPEMkjBzEkZGEVEoUBwSeDBor6+vfvGLX1RuhDgRWU3UgKiPu6qqrm5v/vqv//oHP/h8t9tZ53a7rqqqxWzStq12nEUEM3cCZs5LGFMXReeRcy4ETpk7IaVEZBFNCMmNucCd3NNdlR6xFF2UTrht28L1iAjOOQ3ZwcS+C8zROZPSCKfKiJmx8NB13f37pzbLwSEivJ/18l7H9/2XiECeuEFEyLTHJq82pyCQYKypcalJaPeX0GqrMTAiotbM9lwMiIiaiCSsZxn3+l/vXRXm0fD33K2IEL5/C6Ml/lNVa0S0d45zbN8CESJZ5yr2w717926uLi4vziwZtA7SHaN9eQv9gJQ5rADG2WTRsnwICs87OT36wQ9+8NWXX+vzYEmbzerVq1ePHz+srLvzGRnNC7m6W67+TwYUKoUUhLXDd3x8/ObNma8rrV8hInByztWNJ6LGV0dHB3uxYUp5dpDHD4JRz1ISMztjk3BIkYQoYwI1rCv0SWW/ag9STbCSHkgePSyXrW5e100xO8biELq27RaLhXpfr+LtUTSvUvDtXscrOeuc9wiGyDKCYhudH1Nw0UE0QgOGxAgaXzVj4ijSNFVKaberlstFwX+tVuvauyEwEsY9KU0t6OnciDGGBYFT3w86JZVSipHfnJ0dLpd93+523cuXL4+ODubTibVWELx3YJ1ReSNmQxRDyKVWRSCP3aZStQMAg8TCIcYkUUSMdagjs9kBa6iRUvK+6Xct94yIKYSu65wzn332WQHyKLu47qJCo6EQUCIUvhMZhKKnlpvE9G4jVmue6l2Ojo7u349qniaTCdGw3W4VJ6wJRAhh2jTqHu5OmrWFrkQncPSzivnTXrgWBlyWj9VqR934oe+ms2YYBg0CNLPPAlyUUgKiEHvdLbrrStaSUlLglfeerL+5uVHxwa7rmmY6mUw0zlPZ2hjjixcvPv/880ePHn377bfrze3YzjejfEUK8euvv9ZbUOTOarU6Ojl2lf/FL35xcnRYEGo6+KuRjbZRb29vJ5PJwcFB46sWttO66ULUpM17f3t7e3BwoNIL+lx056s/LlmdVhS0Xq3Xtp+96f0W++jsKLwzmdQaygxDVoKzNvbDkO6YOlKKFgQBHKEh8BatbSrniGio3DAMxtkYk7U2pFRXbrPZODLIkoRRGA0ZO8bQegHWWpWGjzGCBUQc+mBQrHeKFxmGGGOsvU+JlRchRVnvds5766qLy+uf/9Xf//Cjj29XK00uJWMS59Np27Y7HrKnBN3zxlkex5qRSCFgoLuidPpSZsosIMqy7Ytp1X8WH1zssPeeLScOnq2IhEgAIaWEJCnptSknyegXAmjvWQzi4eFh8eiomn8go7av3BEsf9dRFTdZwhq9RxEhAu0B7v+oFE2JxjaWMSOJKeNdfoJjMVkQKY3SGlZEwJCrKzCUQIQI+Z1Us+Tc77nY9zzrd79TTNz+n9v3/lj27EWwfr5cnpyc7LbrlAIByKhb8/4bMUJRcJW9WS4tFQiRMNd19eTJk1cvXw/DMJ/PjaWUwtXV1atXrz54/AQFTKY+2ne0nHGw+J7fLcyUZAQxP3I+PT29uVkZY7RQjIgEMl9M67q2CE3T2NxZZOYMjhu5YYwxWCASYgjEGCO5AKibMomEECJzYy3GWBYh5dFvbTvFTISku2H/CwAoG9o557zZ7XbWka/sdtMys7GYhlG+ymUScIV0dV1XVVbVtERAOAEaY1EVLEIcQiJtbRpn9c5c3SRAFKwqZ41yryfv6Pr25nvf+7hpmqaZPnr49Nm3r589eyEigKM6W3kEeuy1/VxXTkSYRRDX63VKab3dEZGM3BQUQvKuct4CJ2vQOQcpOueEk6s8IhgkEJCYUh7zxwwsJCJJHNKghQFdn9l8UXaw2XtpGcB7n7gmQEkzhZ19//vfHyWN3t2lY8fBmmLFGO4GTqpcSYYsnV2wS4WvQ8uk1loVwVTXslqtdOhIfd5ut+v7HoCnTaOF0LquNbKmDL4tXyOiBmr6iVVVpdS1batNaETph5YlrtY3gAvVGNArV5pJEamqCngs3hgzmlHNRPUj1KudnJxc7VoYYRakNEbb7db7erFYrFYrXdLFYqEc459++qnWdY3F4+Pj7Xb7+PHjy8tLZNSeGTN/++23ZM3jx49X2835+fnN6lazXucckf3qq6/u3bv38uXL5XJ5fX3dNM3BwcF01jx79oyZF9NZmoZhT1k5Zm7nPZjbSG0hItZ4fSK73e74+FgR1JSbuAqfLvAZbbSncURCiFQkYwAArTzHGJjZIhHSwDyGWUTjzBhGA2i9QURvbVVVzqAxxltKlWPmYMkYU7Hz1qTQ+abmBJhiCEHG8VBBuDNlIhJFSMZ6GOfma0rRIh4eHv7oRz/68g9/0Gij70IfQ9M0V9e3fRh+/pd/9eH3Ptp2rYhI1+l7Kk5NMUbMPMTS9QQ0Y3NXo2edYdE43jmzl8xpyjwiJ4qjFVFOj6Q1Fc5D2PqGo6ELkawxxjiHYEgzSUYKIWhy7L1n7pihsJ4NMXgw09lMZ9i+O0+Ed61T/k5a/P6rhMspJeUf1YSX914CSVt1+c2lRO1I40QZSnGKUCw2AIBQ5X3lm5FEDAEMCiCk8d1UeKb0wiS/xjXcy633c+j38unyIkQoWYVGgoAEACzivZ9MJkcn946Oj9W+lKh8P9aQd1/ls9W2WjuWeXU+5MGDB4XLpmmm2+325cvXpfG2n++/94b7q7//Be/1pBFxNpkcHR3p+bTWTpv65OTk3r17B/NZoY3UldIr19NLGbElMkqFcwbplMYbAECuf9b1BICcrcq8kJpFlSLXwF93rfalSn0YcvlCC2iawaiZ1igS8I6MpjgntVDW2vl8buyoIBtCiGnQsTkA6GMfQhiGvpTgdGJSU2rjXUppiEEjDAA4PT45OFw8evTw5z//i+PjYyX9d8YKJOdHjkm9WvWFmgsCGmWo8L7SsRBEVKojZva+Vj0fdV1N0xiD0+lUmNUp6lWVaoFWF9WZMTPH1Lbt9fX15eXl7e2tzhHtP+5saEbItJrO+Xw+mdQnJydPnjxZLpeHBwd1Vc3nc8W4hRBIQJM/n/W3y8JqmVcyLEufWqE0UWe2Wq1sZr2oqkobKHo9um8LVafSNOo9asiCmR87ZSVBfZUdq2GWCujiCOEBaxVpP6gWISIqp4qSNSofCGemC93PzKyjULmanawla2kymWghXZfij3/8ozFGq7W67dUZK1pKx7dCCPP5/Pb21rqRL/3Jkyc6U3vv3r3Hjx8T0cXFBQAcHx9/8cUXetfGGI1Lrq+vOSu6p8ynEUL48ssvtZJ//+R0uVxqTVJlg5W5+sMPP5xOp6XYI3sS6Lr46o+1WaMdYt2l2+1WZ8DUJeue0Q91Wdxef1l3lA4avBPP5W6Rc25Se+dN7avae2cMAacwcAwo7AylMMS+G9odS6y8nc8mVVVNZ81yPtVykdpxxZCXatm+1RqGQbL9Ubv78ccf63yX3iaR2e52y+Xyr/+Lf/LRRx/xnrBPMU3driUCLb9r+RoyZZUGXnolxYSqJy5JWLHexRwBgDFutP9AOhCvn6i1bt3VJXjVzMFaa41SYqH33ldWAyA9ZVqkgay0rcTpWgT6rh8CGHnj/9yr/NW+9S6xe3FMpfZQAhHI+aT5joiA2UvGCgZ2f8PIu3nmvhXSGBf20l/8Dv9G+dv9/+5/HwBIlWsEBfLSiAigFbYInrCez46Wi5NHDz8g41VrSfsHiAaAooBSlIvge5FL9p8TgxNrrcBQ1/HjT++THYACIhryk/r4xfPLFy+vvZ33Q2IBTgKCnCTFUZ2RABXFA9lH6urp7DmQiyIalYAka+T4YOIoLifuZDk5XEzunxw4BENUV1UMoVSx9AvnnAGwiJW1k6pyKKp8aQiIqG1bnWYJ/VA5TwDOGAIwKHHofGWL58YMOyRAjmnaTCrnheOkqRDFOaMZpHYB1XRqCVotsvqnEILWBjUCBeC+b7tul1IgAkRJKVisiRyKCX3kRN54EYlDgMQpJUJUrcDSB1KzzhyjMJBNaIIgVQ2bqpkdNvNlH8NiOf35X/7o008eWLtbNPPYhlk9RSGDlsCkJNZ6TZ8QMTEI0K5rmWi123V9iokSmMi423Vt2wonA2lWmdPltMLkKE0bX9d1M5kCEqYh9TtMA8Seh7bfbmLXpr5Lfbdd37bb9WZ1k0Kv+jC189ZA4iFxRInICVldlLXW1vUErTHO+8mMfIWu9pM5k+yGNkp0dRU4CWEfY98PKXEIsW97ZxwK1r4mQAQxxum8tAZy6r2YI3NUUaAMVq+Pjk6s9QzEQIKo5TxtCqQUqsqlFHS6l5nX220znfYhqBPVyM9ab63X8RIFFgGQph11PRmG2DTTBAKG2qG/Wa8q3wx9RDDW+Olk7l1tyCEYTfiGrNYCAETQbXeOjCMzqbwjE7o+dH272Q5t50iLt8b7+ubqYn17nUK/nE+9BW9hOW8I4sFiMqntZnVTOdNUzltaTGeQeGiHL377xcXbszSEN29exTgcnRw+fPwgSXz+/Pnt7e2zr76eNbOpn+xut7WpLNpJVX/y0ceL6UxiSkOI/cASj08OX716JSLXq9tq0nz4vY+/99mnk/liu1v1fSuSjg+Wj+7fq6yxSLXz3lgOA4dhaHft7qbtVyHtyCaBENNABlhiP7QCSRkrR0MfAoqgCHGSMEAMLEH1rRHEOcucGl81zgoPBMO0kolPmFYON7M6NGbjoPc4kLQVxdqJN1JZlNiHbju0G4m9DF3qdrxbx/XKDH3VX7ndZc3rR0v7YOkbmyi0BxO/mNQT71MYUKIx2KW+jR07IJuSdLthGzkOnAaRy/Xmpg03bdhGMtOFaRbrLn346edHDx8NWnFMmq+jNaauHKFUtbPWKhm8egL1JQQoiGQtECVBEWy7IbEwY4wsgpoXVFXFLMBAQCkJM8Q4lu547AQrMec4KKUhtTrRADFwAAMhJUQBEiSonCUCg0AoiUNd14gCwNNpYwkJZLNdPX782BgTBUb+f3RKnymMimMjAGJBAEIZxxTzMAWOQGYAScLMKaAwgRAgkVMELYIhtAQILJIYgSsd/EvgbFM1cwYLVOt5scYQUYwDSkBJ3W4DPKAklDSbzWaLOQMCGTHOUUViFainVmK/HrDvUIvVLd9BHP8K0aAQChEYjSO0+Kr/w3f/4O6l39TQ2Ho3n89zRM/a3oc9ukEALgNh8E6cNXbLjDFKWXD//n3NmbSmCgBv3769vLxENNtty1mkr/g22COJ3X9vDXx0o5TAR8EpmgBVo176mK/oNlWMaLkkDnG/QAF0J+kDmadXgyOtUhZCJREpaZzGuWoF9lPz4uY14paM1qYMHinRvWZRLiufaFlSVLprL3Ur8VrJUHOEqxh1raLrY7+jmI5xKNWLHPqNjdXZbHb//v2PP/74k08++eijjz744IOmqabTZhh2i+UspoEl6jCMSh7pmqeUhj5qlKBBvcYQ2uRTbKrepk7vUMbRiEjps2rBVuNrALi6ulJGfm0BbLYrTd9VnDWlMAzDEHp+l4h0vB07Bge6SdSUKNQo7QnDmYye04zWOU+ZPbSErkrUoKzRksHtJUbWfaIPyGTdkRJc6/ZQIiq9C80my8prZVXPlGal+hHOOe0iF+xuXddHR0daWy75q4ZrkCeG9QI0R5GRLCK0basLq3tJxYtSSpo3L5dLDQV0biqEsNlsnj17Vte10qdPJhM9UF988YXu8OfPnx8eHmqreDab/eY3v7m+vjbGKOGzyjAA8NnZ2XK51Oo3AGy3Wx0ijzFqEBNjPDk5+fjjjyeTyZdffvnixYsQwsHBwf17D7Rn/Ktf/Wqz2aj4vGKVFQhWZgpKRUoyvr3k0JvNZoRuMTMzokRODBI5aRKfMkGgAVS2fGuMksk0tZ/Pp42vlBpDN5KOpgCAQdW/Gw+pfqLyd+puLC1qBZBbQykOm81mMZs0lZvUjbdOYpLEiDh0vSHrrE+RY0giEkK6vLp58eJFCKntu9v16vb21tXV/+P/9f/84osv1MKMmXRKClHUJ1usmfd+lOHItdayW2CP4XnEeOcyzB6hJ2s4W5J1yXh7ybQ8nFldjTGaIagFU1NQ6m1jKIAjYVkpqaaUHjx4oHLFxV6VUwzvvhBHf5y7g3d+br/yOib0d9bgrjSb38eMn4Ij1IP3Zmf0pvb7X5DLVMXdyF6tuyTZ+8WM8oL/Ca+sQ/V+KmzzPxCQtRzNzAA6DSIi5Otquphvd6vpbLZe3yZO+T6TCBSeCtrD0exfH4gAapxOzKGqqo8++uibb75VWgktwZ2fn58cLat6JBM2mSkpqyK+09MWEX173is+7607eu+V/acY1mKpy/pSVqaEdLf/9Duq22UznZg2w/R3ME+CxjytK1lQQU0nM0MW+kbUMYcRW8h7ROeQR4olE++VQrfaC/XN+05lLLghIZKqWzvn+iGmlKIwoSVrNNGRXIehxEnEEAMhjG2bEeyGmbMeAOq6bnwznU51NOv//W//Q0zu4aN7t+vtZFJVVZ2EAUBJdlLWHVMv610dgt4iDn27mE1TSsqx0Pd9U7nKaYcvAEDiuF73oVtrPJH3zBhCrdfr+XyujEuKUe/6XeIQJTbTCQAorY/GKzCy6Y6wOIuUxm08GpTtdnt4uHz79i1k1InN7FHFxJRWmfbXtbHattuykfYQHAYxM0uL7Ha7wtVQTAwRaSrGWfNO/RwzQpYjLJGlwvSGYYhxpc/FWqtDYnVTX19fi0hd10MfS5SmUY6WAVkIALTfjIzFEwNA13Vani2lb+0drNfrvu8V6KceS5HYr169UuGH29vb2WxWGDdDCGdnZ1oC1QK4MaSqySGE29vbJ0+eIOJsNjs5OVmtVsMQr6+vDw4O2nZ7eXn57bffPn36NGU139VqpQ1s/XN988PDQyJijvfu3ZvNZi+ff3txcYFoZrOF+n4dOCEiQoNm7NFy4KHtIHE1abRupG5eU7SqdgycmLftzhuLiMY7QiMyyi0AR0tYeUJJkpIFdmQqa8hbg4114/RwjFEgSeIEBgBQADiFvhv6PoRx2NQ5Z72TJIhoyVTOC6XGVwjkDc0nU45JEnd9aIeeBFCAQTjpjJDKdRKAWOvJOl9XKaUkGFOMfZzMpv/m3/67fyTyg08/HIZQVRXAWCYFACSbxlMA1loFwezb9+JXELFkWZxZNiVjDBERQIqd2XcqxfHoeR+ysJslCNl1IaK3TpC6rpM4/j4RKdFviby3u/WDBw/q2jOzNzalO1BqMeB4B8p6z5+pjg4XBBZAtm/5jnjvJftVYmNSYpWLyOkiKn2FDr4SIeS3RSTmSM6avblTyOAeeVf377212r9cfDcz3n+pchPgO79m9/4AiTA7J8RR3NigyGQ6r5ppSqFupt1qheMQyAhX2Y9TynXvX5neYeKRhEU7SW9eX1Su0iorSzy7vDg+OTw4WAxDd/d4cNQH3q+t548YP4Xone2l3ym9Xsgj4fvrgnky3RhDZKw1JRcxFtUB60a3Wdu8hN6U53Q14h5vE6GwZ1gySZiTTrWPs78arasJ0Msrj3O/t7F3ZtBay0yY6VuFBQGQiFnuXHJiEUEWITHGKCSBmWWcXUObOY8NjuolkOMPyK0R51ztag2lq6paHJ1cXFxMp9Pf/Pp3X37zdQjBkhNtUwAo9b+IhJC0cqvVqhh6rZkfHh7ev3/fEId2S0QgPJ/Odl0fQl/76rZfaRyTspaqPtYYk4jo9IvWRQqMXN2z7km9O849Wl38/U6MiJABRcZqnV97kFrG0PfnXGJRgLeO3ucFEXV4OriidDPqsUqANQyD9iwxVy/0HTTFL3RXWhJwzmnurj/CjIUZhdhyc26z2WiIEEJoZtPJZFKQX7oOqq3U9/3p6almYM6529tbAPDGM8cQxqguxpiSIEoIwblKB5PUGlaV2+12IklFkHRaSZHhZ2dn1lr1xKvVSjmzrq6uPvroo+VyOZ/Pm6a5vDx/+/atdmr/+Mc/atv4zZs3WlxZLg8BoGmat29fHx4ePnz4ULNhpXXb3K50qdfrNREtFgvlyzw8PLy6ujk7O/vggw9E5Pj4eLPZ6WEcKaWkU/z2kMYCjPF2xDDGcbK8H9ohdA3VtjJCLvRDVbm+J18rlg3r6YRj4JgkBUkxojiEoe+8Nc6jRSFmZ0xVGeeckNG8ExLLyIM3phA4lhlDjKPdTyl5X1ljLJIhMiE4BFvXzpCROG/qoe+HYTCASZLEZIgEDDIiKAtbNMbMZrPT09Pr2/UYWxP6ugoxnd4/+o+//Lvlsjk4OOhjQBYADEFjGsgehYwxSURxNiJJZBzLSRxSGkUUzAirFLV7Gq8wj7OUpbRTCm9Eo90rmydlcQJXewPIcZRYreuaGRRmaAATZco2YGACAjJgrT04ODB7o03yp+BIACNFtY4bKRmtOpD33Fvx9CIcs+Mvfgf22rGajxljIt+VDPdcKXHi/cqlVlwIDCKCoEEK8o77wPziPfDTu+7vu7e1f4MjVrlc7R5tNJC2gZkZEQQJIpA1YRiaplkeHiDJ4dBfhiHGqINGjGAEUbvIeU5L81MQANVHRExJBUjGGqB37rNPvvfm5Vs1rIiIgBcXF+tHD6bTJoGAVlMtEZkUYtrTDXx3FQQA9yEt+toveEKe2S+rTxmFP2YJ6Y4MAXXcOst0axai6S8Rhdw/LuZejab3XmXYRxyZIYijZo5iL0v12GbJEchBCRFxFgkutU1vnba9FUK+f+9a5NTijIK29NgwkCROmCwa4613jmgMP7FUJtJYL9JX2TckAESa90yn00cfPF2tVm3bXl9fffXsq8mkRjT9MIQQnPM4qkSMbxJCQJCm9iklAGma5qc//fFiseDU8zAq+k0mk6aB7bYl65uqBs44gz1EKKJolVIy7yPvEavGITCzc8xcJY46iHEHdYZSdxqxfkiiKL+HD+//8Y9fHR4eVlWNuXqhf7V/YouV4TK74pxGWkojPEaQKYhgu92GvpeUOEYUAWZJqWma3W5nEG2GN1cK4kKMAAo/0V6y+iFdvZRS3/fOVbrN9FH2u7Z2fhiG9c0toqmqSlLqdru6qjhGgygpiSSyzqKmuZz50UTPQq5vj2NvOkGumxkRz8/P67rWpLaoBmmVm5mbprm6ulLpwOfPn798+XKxWLx+/Vo7R69evdJEs+/78/PzqnIi8vTp0/Pz881mpWIequZUxqY19FRnv9vtvvrqK33oSv1mjDk9Pb2+vl6tVgaw60Z9T92oVVUZg5rgStc2TRNTGHZBvxNjHFW5QtCdRiNNbNc0S185QOn6zlrrrelDH/oWhR0Cp8DAq5tL70ztXe2dVJawFoecYLfZ6JYgQzpJycyQGCVZAmdMAJAUQCixcIiO7KDdIoS6ct4QIHhLmJJDnFrbG+o5YWKhlJiVMydk3kcA8K5+/PTDm9WvXeVlGLzzSlWWQHxT/+qXv/lH/+gfxcAxxslkEuOgw9AafIwxQXa6PIpAcAnlTZ6Lc9kmUCYbKDlSOQsmj2MVa1N+Ry1YPpLYdV0cBiCy1ouIt47QWGt9jGCshqSATEgicnR0NJ9PAQCFYxTELBu/51OhZFZC7/wzuytERGDIRNDALHvjJ8Ums+hcpo5lg7Yq1K0bM472IY0SHbxXq1c3oYSDWUhIHdndRM94PQgCIKzwaQDAMqAMACWuyBf//iSxlB9pz2v/bksqNn6t+sOCRFhPpn3fT2eLuNzd3NyMSwwAROb9Hu2de9fHUJ6fMQqlDtp3/N3vvphOZ1VVdf1uGPqvnz2bLRazeY0szBxDIsPjLWKh7nwPof6OfwK4CwxLjUX2aKpgzN4CAHjvDWAUJYAaQa0UyTpCwLHexVy6HSJikBik4OXU+6YQowp4pVRqvFplbdut1q6LQjjmSXltfnvvDRGBMUgGCQUIUfKbIyq07Z2iR4kSQiZbBgCBxIwEhggqm3WWYoiBkkTa4yVIeTpLXb6IhBj0G2qXr1fXdXWsLKE/+8mPL69vum4AoK4PwxAQBxA9wIICBCjAMbL3brfb/eAHP/iLv/gLh3h1+bbrujipAUCpFRBxt9s0zZTviGY8Z+ziZrMhoqapNpsdIqp1Hov2yADgUemOdS48dsyaGkJmsStLpFjl7Xa7Xq9/8pOfvH37Vmn6Y9aqGyP9KM5WhLauR1UM3WP6tWJPtO5KWURWg6oCQtZdxJl/VGM4bcHO53MNleq6vrm5KeWilNJ8Pi20WYvFAgC6blATqW+rjVgRads2Rl4sFiIynU61Uq2e0jmLiMpPmSMYxNxj1uN2dXUFRaJKRFHuIqIJpU7lAoDi1ZVz1Fq7Xq+1nXl9fa2iC9bay8tL55wxeHJyst1ut9vtbDb79ttvq6pq2/78/HwYBh3J1RN3e3urNM7K7azMkZC4RAk3Nzc+y1cYgw8fPu77vnZ+u32r6A2NepumYY76bjfrlVbIzV0HfJSQUn+via+WKiVFDpGB+90OnDdVhZJAgkUA5m6zbneb1fUVp+i9PVjM5vNpGvrQeSJqQ0QALaKCCr5p3SUFYCBQuCqkFDiN4yiTyYQIUwwCUntjjfPeeRJmmdaeeRr6ARGtd203pL7z3oMhg0BkWASQHz58eH5+fnVzXVfNZredTCZDiiLSNM3q9vbZty+ePn7ifR1jBLIhJO9HGnB9iIyg9LQxRsq1LpfR0bj3Kg6vpH2lgKSPz2YaS8g0W/slRhFp2xHzj4jCrP0OY0aOdnUJOrGjRriu65OTI+ecsp1Ihi4jIjCpP/tzr/d8cDlE+S40ybsrW8vYlxx9ESI6XxtjCpmPIIiw5gCII+a5eKuqqqzx+s7MrBzJ+xfwXr773qXuX+f/39uB7In3hJn0buTu93T5jTExJuecqyvjKxUvU9wHZEoUgTH9LfFC+aRRLTPHU4jILNbYp0+f/u53XyBi3TiWKMIXFxfX19d1c3pX3xZQ+prCV4XISkH+3nK8t0yYUTB7HzpumrLijgzvdWQhB1POm5jZdjQvsZk3HxENYOQxcbfWGqR2nMAZu7Z3YWmeDC4bnXPvRAuw6g/IOaIR979/L7Kn+1TyM1GsTRp5VjVZFE09LCvwSiVihPKZAWOMkawebwvDuLWkPfKxGIAiSYR0ROTm5uazzz779NPv/+t//W+sr72vv/n6+Wq1qpwPIXVdR4qZB6OFmKOjox99/v1/+A//6ujoaOi2VVV1Q9+27WI21RjTORMCAXDhEyhPBBE1YVWTLSLK+V70hcQKgWrj9CoMl7hPHMlgmT7Xs68RutI4X99cfmI++fnPf/5v/s3/Z7fbVdUIgEwpcRoBlgziwDnn6trrAJU+Rz3n6qU0WNGczFrrXFVEAkwWVpIs2Vv2Hma9DTMKsTEzEwFz3O12JycnMcbMS3Drfa3+NYSgsRoRTadTnbTWjEctJjOPshwAiFjYoPQ6daZWL3W5XCrL483NzcHBgWLBFN6lkaIiv5bLZYxxvV4XPG15QBrinJ2d3b9/X/PX169fisjBwcHz588Xi0WMcTabnJ+fz2az29vb4t1PT0/Pz8+NMcp8vlqtdCkq64aq0qz36urKWqvmW/ksJabZbPb27duUkqou7nY7AL65uSmHAhGBqB165dLSxvlkMvHGXrUXYsiINNZzDEa4rmsjHONAQpVFcKbvdsN2e3tzeX150W43lXPOzMqhGztKICySEmgagCTOWEQjIixqFsZckPku7PbO9O1WRAyAgYQciKMAVAa59sO8qWMSY4ElslgD00mDZLbbrTAwR2/d06dP35y9nTROT7pz42D6bDp//uzb2Wx27949SaDaaB58qdZk/zc6yxTFOWfdmIHo0dPglbmI9UqxSO95l31fkpJYS8WsMTOKKFxUW1d933OI4MZRTBi1QVlEnDIsstS1Pzk8skhp1C8aZzSI7iph330xjJ5o79qSMIfQhxDseOhKEfiOaxNyhxEAyBrvPaJBICRULItOsRNBcQr6FsYYLRGpC9esh/dgRu+50js/utcbfe87772++5sAYAHonSI7gWKZmSOi0VPNHMlUzWRq12s7aaqmdl0LPPoYISAp5btxZfY/lYSFlZJNjHGIHGNcLGcfffTB2dmFiCIRrIi8fv16Np/MZjNv1F0hWaMay3QH8torOGRqbHm3EE+FfXtPflUtJrIo8VbOYu/YgMtWSymRNb6uNpvNfDrLkd2dnWVhS4ZjCjwCmtSbKlIaRyku0l8OISgCRVMf/QWFxmBOsjPIEEsqU8pBzGCQR/S5CNIYP+rJH4YBoPSBIIU4DINzwVprrWeM5Ekk9X2UTowxVVPX1UTz8nKziEiIAkAoQhZRnj798PEj6Ibh8PAIyDLDNx8+32w2q9X6m2++efXqFRENQ7y9vb1/evTZZ5/de3jvH//n/8VkUnvvU+g0u725uWmaajKtw5DG6jeAqjKru9WPVrx6iYR0clqBVKqtW/xZjLFpsKoqltH/OTIFtVEOoWZ41tpnz5794Ac/+OEPf/DLX/56MpkMQ9AxWQS03nVd58inFHSIaD+H1rRG2642k3tr6dWYccS84AO0bKUKjDp0q4QSWvkokLGu67wfsaaKqNKGaMrM2Hpt2oUCgNPTU2MMZ/G7p0+f6sqkFEsgpaWCjGYgReeCUnFNp7vdbrlcXl1d6e28fPmyruvJZDKfzy8uLuq61mHrxWLxxz/+cbFY6CD79fX1fD7XZ7FYLJT56+rqqm1bpepU76v9aRGZTGrFlKWUNpvNYrHQKndd1xcXF1oVmEwm6/V6uVw6Moenh8Mw3N7eMvNkMlG4Rtu2q+sbDbnOz8+Pj4+991dXV4iy2WyWy6VGA7rymkPrCbLW1s73fVt5LyHEfjAEKQik6AFCEkMgoRdJod2tr69SHLw18+ns8f17s9lEJzvGaQVrQaTSohpySkkGRlGslAACCyCAUiVwTCkJWZPi0HbbEC0RaDcq9B2K73FnrWcWIzBrGhdjYOFqjOZn08o5x2kg49rBtt1wfHz42Sefvnj5atpMVGWBY6qsU3qo58++ravm+OhQN2Rktu+OxZf9T2QEUggJEcnBfs8FALRxWxxwsY2Skcm6FMVwxQhEjKjUEFw6ZfoUOAEReWOVKoTIjO6aQEUmQgjW1c2k9pXre60V5YSH33H27ziqMbndfzGIMMcQQoyD9Z4ItRO/D+eW3E5yzqU0EjkkENTmXVSvL8wJIHP97xl269UBjwh4ys3K4lbeK69+15uCViNglDP/c693HDCONFA6/vteS2zcjK6qCSSlNJ3OLq4v54sDZri8PM/ML2Mnb+8T7qITVXrRUqE2VFjGKfvPPvvs4uJCxza0HXV+frk8mC8WC/XsKQpARBlZF0pquOd6sahM7/80ZeI0tY8lNRERZwtVWxCRlEBDfmWcR8QUJUWppqNkjWIKNLEwWaNbPd9+hcdaqwB9yHUSZQaWjJrjLLFp8viK+l1nrcqDQy4EZUm7UUhkWjeArAAiItpst/Wk0UKlflNRjkag63eEFlEJRiGEnghCO9bhjbN1XTtjCcVYFEjDwNZaawynFO+Qus77atLMmXkueO/0AQAlkMePnorIMAy//vWvf/nLX/Z9f3V19Q/+wc8//vDp06dP7927d3R05Jxpd5u6rq9Blsvl6uY6pcQpVL6RGJSyLmYWT53SWS6X6qXUEBfEOI9t1+RKTYwQEZUoalKN1HqRx8eq83nW2hjuivPr9Xq9Xv/0pz91rvrVr341ny8QcQhhOq3VrQKANSbG2HcdiCBAl91Mu9stFgtCnM1mXTtIYuNIE3FdefWaIyw59/XLfzXn00BEn3hKScRo2HR7e6vHuPQvY4yKjdL6sz79w8PDm5uVtXaxWISsX6nNcsgYulIAz72uUStpvV4/ePAAcWymqLYKAOjEjjr1ctIfP358fn6uWsj6zb7vj46OptPpzc3N48eP5/P5q1evnDOLxUI3p7KCaD0sxqi0lGdnZ7jXSi/MHpPJBAAuLi70a0fm6Oio67rz83PvvZJtichqtdKjutmsdBKpFCRcXel0Q0g8xHBwdKhcmP2uHYaOEEPXOWtj35Elg+SYJQxOJPKw2267XUsolSExfj6dmdMTjZA0GAKygiiM1vvGqzWElFKsYkpJEpdqlve2DB/mEZ3YNJUxuF6vm9rrejqmGFpOgYwDRm8NkMGY6sMFkvSddZgWs/m9eydDSJdXq5v1egjp4cP7APDti5feeo3hdrsdC2tL5fLy8mC50HIgIjKPJk4bTHqFfpQGv+tZxhhFxr6YNkT1p7lSAilxSUa1LFfSCW27pCGllKp6pHwvXDrW+OnU6MCe4gxSBr1PJ5P1ZiMiu93u4+99pFs6xqH4b9grPL+XLzIIpzusVjbniUb0CfgxOIgpk5hCLo9nU8ApcRL2ru7C4H1NRCGx9pX0AGrzIoQwpAgIQ4qVaWLWTCuV0XINkNusfzJx38/U1UypppGImH3+6neTYH0fLUG/mwTvl6CRiECYAQGJrKsUINdMJrN+sd2uY4jWWuPHEn95k3LdsJe26nsSjzrCp/eOHz58+PXXXyuhPCJuNpvrq1vFGBdP6YwdhrCXhpaixDsPrzhg/WfJLfSV8litUl/st81KGlqWGxE5AaIZhgEnWFCCin2F3LErBefy2Jxzm93WWksISdgAqPnQUpuabP2IksSYLNKZGYzHMceyhqWSqfaarI6Qj3ovIkI4kreRACEnkRjH+ZlBegadl6gq57XNzMwpjGFNisOQKdpHprCQhFgMGuMsIplRwKu5VwmaEILmIl/+4fcnRwd/9Vd/9ejRg8XiwDmnxVUy1PfBWtvMpkPXjhdPIxmQTiCUSoC2NtfrtQbmiEb9q7IjYa7fgoLUnDV7mlFpGIe18lyjKKlF8UkAsNvtXrx48emnn/7sZz9h5t/+9rfWec3VtOJERGHoRjBt3+vf9n0HIMo8VXgNva9ijLPZrO9bPTt58F3PDleVFxHnjPfWe6tbru/HufZS4I15NBxzJ0LjAFVW0FXSYVONQnS1Dw8PVa4gjGhz0ArBdDqdTCYjiXyO2Mrm15RdK70i0jSNUm6pv9xut1qun81mWqMuwFftCGw2mydPnmhW2jTNvXv3UgohhPv37zPz27dvdV5OO9MxxpI367FSCYoy7qwZ//HxsQo0IQuyPPngqfLiKZh8uVxOp835+blzy+12q0m5ttWj8DAM0+nU+mo6nR4eHsYhOOe8IRAJXQcikmLjfOy7LvRxCN12YwAH3qQYHYmS6sznc2OMARTGdtdHYaUR9nVlrYkxgjWAQNr8BwwggdO+xczGlJKwJDAA2+02pIiIXbdTad6h65um8b62QCzIwohUOdtMJ4aYZzWhncwnk+mMgXQAoY+xqqrF4qCqqlevXhPUIjLxNVntyMLbszfTSfP48UNmCH1vzB0Zk16bus8ybod7leSQSTaModJM6fsWss78vuUvEbAxJg1jLBuG5L0HiF076I7ljOSgTC8xmiMymldwSov5fLGYxTgAjCQHKQsSs7yjcSfvljDLapf8UJkn1DwSYUmgSw25pDoAIAjeeWMMI4EQ57HVlJJVkJrEtttpu0cNeFNPx+omiwggqlPHIhpd3vw974bvZsD7LglxHMBFREWpw3dedv+3FfktIAhIRJqMAyChARRrfdNM6+lk0+6M87P5PMa4CStmts5EieVSmFmAWa9bWBPrdxYaIcZofPXg4b1Xr16lFA4PD0OKu7Z7/fr16enp6empmdTFp+7VnyEPhN2B9/bv/D0HXL75XmC4H8WETFtf/lydohLjaWriMrdliZKI3mlFh0xJqPArzRLAoEUD74Y/JZICkRQjZBpbbeAZg2U+Z6xOc0JEb5xIGoZBEELoNZgYC0HMwANSJltHlBSSARBiZusrAFA+mpgCEtTWEmAcQqQYY0RR3y9928UwOH+YktjEQERoDZLqtwAAEVpyMp8+vH/65R9+f3l5Xtf++PhYo1S0xtUVcBxiHEKoqybMpkmSBRziaNk1bwCAXDwH5QWczRbqcdU9TKfTMsxD1qq/lMRBEmLQ8nKZ7YtZ/gUygFm/iWDQp7ZtLy4uROTv/b2/1w39H//wZdM0VeW323axWHjvnR1NkublOmOdMly8hALqINu2jZyytPBd20n/RJ+FThxpoMB54ouItJ1Woiv9fi4dd87NMWswAIDWh09PTwFA6xwq0ue9VWYSNXwFk6JbriyLAo9TSldXV8y8XC77vr+8vFRqGgBQn6rNbMUt611oYUNE5vN527a73W6xWCjo8uLioqpGEpurqyvtTKvTJaIYB2NwPp8rmKvrOp3qFhEFT6lE0nK53Gw25+fnOpfy9tXrw5NjM4rfxZRC30cN2larlWoD6wSgVqF2ux1Zp4iww+XBMHTWWgSorG1n09cvX3a77W9/+avlfOYtWWtrX4npmBUeAWht1/Ua+W27TnPZIQ4iUvd101SI6MDecQ/ASGeokjqjXUFLyq3BoGe87YfIoiA+Iqi8t9bGyN4H5wOgQWuMdcaTt8Y6S0QMUNd+Npsa5w8Oj49P27YPV9er0113oHWaIVxeXg6DCAARTSdNjPHy8vLgYDFtGuVL2PdSdDfX+52iaOZZ1PqWlt+8t8MwUgNx4dvfY3YEABKI+UdapCG0QsnuSZiUmp+1RU8PnbPT6QQRDg4O9OmnTErKzPQd2NU7pjvjp7KJHs/X+CCKPc9sB7JXMBtvXMXFXY3W4F4Pl1mlB0fKprJuIqLzeOrE73DLjLKXN+JemvsnXemffN2FQQXevXfLsO+A4R0s9J1/IrIiQUCMcVVVHR2f3q42Q9e7uqnqlpm7bpciy74yJUACARBCU3Sj9mMcQHHOcYoffvjh1dXVL/7274DM0GuaS+fnl4pPESHKcF9NnuRdkFeGfv0JEEG5/rK9yk8LNN9kiaE8GPeOzLBORGy2Ix/vvvPQnp+OfxhjdH9Pp9PId1q/RGSyxmLJ29Qixxhd8Z25V40jyt8rvTNnPYAxdjMyDLEPyfoRGlbseMqi3+q5NadHHrWVCYRADAIicoiRZRDoEABgbAMD9ENXEjWkQREKToQtC1gSArxDeVSV+/DDp7vd33vz5s2TJ0/UjndhsGK7rrs4e9O3rQi6yjtXrW9vaj9ChHRV95+OMSYlSUmGYafxjeJmFbwjjNbYclhSkhQHjTwwA6Mkg5644NWjeGeE+5SnC0ZZ39n0n/7Tf3pwcPBv/vW/nU6nztnb29vpdAqStIZGRGV+OlcgeLfbaa+x63fKy6FpH+RhRDPKQtzRzOprv7ahzbDCgqsbQFfDmLv4TPNvziKsVVVdXV3dv3/fe79arY6OjjabVdM0fd9XVaWTygWQz8xFT6LAHTabTdM0McbNZuOz5oFqX5a5ZBWJ0ke/2WxWq5X6VGvt2dnZj3/846Ojo7/9279VkPP9+6chBF23i4sL59xqtWqa5vr6uu/7Fy9eHBwcKI662PHpdKptaa2jdl334MGDr776yjn39OnTr7766vnz5/P5XCSpkMNmswGA9WZ1cnq83qxiUoA6Xt3eOOeqqmKQm5ur6+vr48Ojjz/4EABub64P5gtm/tWvfnVzdflXP/+Lw/nydnUtMW02m27bWmvb0Gk9pe2iMebi8rZtW51ZstbOFzMCQeG68e12bCiMIAxJIOicCSOdFqhoDydhQAYkxhgYJAlJjsCScy4GhWs7Y209nThfGVXpmE+9r8gaZRazvmqms3tg+5Bubldt2x8fH67WN2dnZ8CBiNbb9ubmRj3cbrd5+/r1o0cPirejPU3xMqyxX8zTnyKwIozKZuPcEq6q5j0fg7l2GlKkPeioTudDpuozGYZtvOMQdJMXqKC+23w+NwatQSIAScLJEBKa+J3Z0T3PlOeAR3suIgKizSnVkRRgBcqlmMahjLtwBEdbTc5qMiIICISYxSSyWcC9akGZyMCxyKwtXtFk9M9c5/tw6D/plfd/jTU8yOGdftO+9wf5B3ePanwLISJxrloslkdHx2/fvgFB5yp/4NNlGLreWOIxblDvCwAg44feJX9lFBkJhm6op/bp06dff/WsbdvK11VVCaerq6urq6vZZGItpRSTMPEIfi6XCaDldlDM2H5Uol8UW1+OU/m1fcAqj+L2d2lx2cQm81Kpt9MQyXsviYUYBSwZznSPmo+mcDeOVc4AGqJMHK8me6djnZkgovwojZxnY4cY9kbxxvqJtc5ZzUv4boh2pFoUjomzakIm1dNitJ6NlJJxI37Sex/TmPdrktc0FXIKcQCdb2VvxAGwtR5JSd9II9nlcvmzn/3sL//yLw8ODvQaUpeGYfj666/Pz99KiqeHB7Np1TRN3+40CimDVdk6gzGOyG42K59VU/RgV74REU6ACCJCe76NhRUnrB46nxbEPbyicbaUr8d9zxxCePHixcOHj3/2s58dHh7+/ndfXFxcHBwsEM3hwcLkgZamqbVaCzmZHoagSGNmnkwmMUgYOpjUCILACEgoiEAogGK99d4LR0MAkhAwxcGSQWO98zENCGCN8c6BSNM06/WaEIe+N8ZwTJZM6Ac0oP3s2WymOrvL5RJRvLfb7bjTyqMHYMrMMLrhc/+bvK9vbq4mk4kxowBUSkkxqJD7x1rw17mpMhdnrf36669V0eTy8nK5XE4mk8lk8uLFC++tQsO0QsDM5+fnmqFqMHR8fKz1cPXNh4eH2ukvo6vKKDKfz9++favDS/OD5evXL5l5WjcAYJ3pVx0ZVGe8Wq222+2nn346n8+vrq40UDBIlsxuvbm6vkwhTiaT129e/t//x//x008+/pf/8p+vVzcW4eHsUQzhMAwvv01t2/Z9EuSUhiHEEMLNzY0AE4iv7L3jo9mkqZwBiBxgFyJldW1ElAQxhpRS1w1J2JCLMYUYhxj0tIYgzrlep3oAhyH9/0j7015NluQ8EDQzd4/l3c6W682bd6viVmSJEgUBRAsQhGnoe//RxvyARgsYskeaJkcSRVEki7Xdm3fJ5azvFosvZvPBwv3EycwqUpoXhVsn3zXCw8OWxx57bBhjXfGx9yTTOi8366qqBGB5PIb4+PTULKtF5RbOVWAsGYfGAfHZ+ekm8nLZhjC+evVqv98L8zffvYvRp5Tauj4cDgo7PXr0qGSuiJhUP6CqUnqAlus+QUSESWMghKkND/LcxrnlL2ZHM8Fy+5R8QP8ugjNjRrbKZzGTJSGrAnMmPJc79J+YPhYHrD+dUgIQmklQAeqBSTHmmFtnDbk5J2nK1hiKrdCqh766WCwIp7qVet/yEf3Cj/qXD4+2LON7T/J7dC0iASizmCYWtMp9FawYp//l3wYCSKhsW7Kn548Oh8PxeGCERV03zaLve5NnKb93fA8yd4bc/iQiUjfVMAybzebHP/7xf/ovf9XULQCFEMdx+Pbb7zer1fn5qWaiqhx2/z0FI3oIJs9/WkqTXIF8dfYWTOOAdLlLNbeghWrjtFakyZOhB2M0Yi62VVU1+ElxsBCsypVjZuaoqCDllSz0GcoqxKP3gMhT3+eDKk4pe2PmBdR1HTnkGILLMatvnlIuFjIoIsCChCmFlCBGzwxA1CCiq9CQWsbsvCcEKaXkIDJLSkwElCQRIoIBC4QsbMyUuK9WK+UyIEld12bov//++7/8y78korZ2Lz/9ZLFoOUVXV8LRAJXAc70+UcgBEfu+f/func6NYWYE0x0H9QGKCY/j6NwkN01EgqAVAQ35dfgPAHASQmMNgqAhSbkhu+yEQ3f0MdT1lff+xYsXX37x1TiO6/Xae694RUopBH88Hi8vL1+9evWrX/1KB9qsVqvD4ViIG/3QzyNlfcgMktHdqEjJnANvLMZ0Lw+uF06hlOnLOVSuQUQRdo5CEGutal9obLff79WVqrttmmYce02C67oOYSoBElFVNX0/3tzcPH782FpSKNtaawx679frE/2UQj46CEunWm23W2ttVVXaufs7v/M7f/d3f/cnf/Inbduen59/9tlnd3d3P/zwg3ZnXVxcqMIXZRWX/X6vk4b1Qi+XS4XlS1szAGjjk/7K3d2d9365WarS1nG3v7m5qWqnReWQZxH+7Gc/CyH80R//MwDo+973w+l6s14uvPd3d3enm5Pr6+v/9Jf/909+8pP/5X/506Z2t1fXr998H1mudrf77TYmvNt1MXGMcX/s+m4cx1EgNU2zaOtnT59+8uxRWxlhD5xYos7w0T5g3RjKMBiGkQWJJCYJIY4hRQYRlBhFo1wkBggJU2JEUb6tiJgkpvfMTMaISDeEDRg0VUhp8NGBjZFRIhpbUSVOrDP/4k/++Scvnl9fXl1eXgI6FE0ufYy+73tFIObJKOdRXYR2GA88TQ6Y5uvqttSdE2PQuE3HBZaSB8wEegEAWZKIErYpC9JpwctUThIyAiMUB6+vFjOoi6ajnROP7+UkLIFyvew3PeS+vsOc50Po3cHMnBJLRERLxmc/WhywMcZUzhgjAkj2HgwAZhA7HbWqAhszmw6g0rZ6g6c4WWD5wLN8eKi/5fny31KtVzoUT549s6DnnywlEB0xxDo3V4nXQspF2pyexhiBJUW/WCwOh52wMNyrqxQ0A+G+U2hCtgUAQZs1DQsRvXjx4tffvOqOfV23oWlE5HA4XF1drVYLoumKltBs+raHihzFzb9nCucv6RU1mTmlZTO1IHPQRj2Z6hYpNG3JFKNZuj5KxEdZ6S2lxCCQoV1rbQhlSBarvIDeMyV7S3neiNrWqq5lkg2BEhZInmgkmaFj82xt9ZqUJ79O2kYcDN+TxjX/0DquxSrGOAydMa4beiKqKovocrjAtasAEmh4KT5GYQSWSNGi0Z40jHHCSGMIVVUJoq2ruq6//fbbf/jFz5ft4g9+73cuLi4c4fGwjTEGP6wXS72NdQK82lb1KwUX4gRNM52XVuXVTA/jWFJ2slNUG0K4vr5OKSGQQu5TKgwwjkOhcUoGugkNIh4Oh6ZZHA6HkxOjQGvTNClPwlCBHm2AaZrm22+/Vd5TLtN6a+3JyYlOJ5xHSBqeS8aa5mUFmnFkaMafNMaoQIearQJKExHnElSM8fLyUg8Ac2uyZo2an3HWmRmGgVkKKU+da9boULF+p2okxhjt/1GGbVVVl5eXJycnWhLWftyU0vn5+Q8//PAv/+W/1ARXa8l6nLqxEfHu7k5rvc+fP9/v96qRqZdJk2BrraZKBWnX5jpdSZ14qIJZWs1ZNi2gFHp/VU18i88+++zVq1ftavnVV19tNptut+u643q9fnzxaBzHd+/e/fznP/uDP/zDZ08e3dzdbtbL08cXVVv9+3//7199+83F6dnheut9Cine3e72XV9VlW1bS2azWT0+23zy/MnJquY4IBtnDCFau5jIOBJjpo+llAY/cgJBEsbej+M4xshJmADTmJCIkUUkCSNSSAIpIQunQFkGpzLOObdo165eVFXDYGMSEhhDcGRC3ycQ5xxDaprqk0+enGwWj5+cnZ49Oj3dfPfdd9fX1+v12pLhxHc3t/SIlA+v1iYGtkaMmUa9AbAxxs6SOZoJE8WswsHMKcn9PZiryJyp0WVLFytXOkJTHnOiF70gjpNxiFGnoQAwABa2l3yA3MIslQIAFc/SYpC+yhKnPF7dl/YIqDJE+dRM0FcXvHzzfZasB5B1D/V1rTMq5wk+yCFlclcfeZSzKG+efus3++MPk2P4EIImmCrPGk7cfzsYRAGAqm5TSqvVZuz7ytrb68umadq2Pe4PAvdlsOlnpsOaHUQ+KxWjZxBJcnp6+tOf/vTP/+z/cq5WQDLGeHV19eTJk/V66b0XTjMXi7P/fuRCQubczx1wRnc55aKsyfM7V6uVrl2W570fJ8LMyCjGKFxTVdXYD+oY9LPAUth9SVi7QdRHmqzAAACWJrl85ZQ2da3NV4gomfKjkU3f9ynHlepoS2SqVl4zHkVZq2oSVeCsDj9xJcgLYykoQi5DhpAwU3VijEAYI6kAljMWgQVSSt6AgaTRJEpSpgZjMkmk67q6ahHRj6Oei60MC2uy+Md//Mcc0+//5Ccnm7PucFdXzcnJye5uaq9SKX+dGjsPvRHRmkpoShzVb3Vdt91uz8/PSy2g4BN6z6tHscZtNhuVfdaXtDk15Xm3inAYZ0Wk0EBSnnjhvedJaFoQpw6/x48fv3jx4nd+53d2u93hcLi+vtntdkprgqwWq1c2m7CUcr9gvuGn4Tld14lAjBFGjnnoJGf6qJ4FlGIeSYxxsWq7rgOg7XarXcVlCv1qtdIWNU0Qq8oVt22tKaM+9JC0X4g5+jyWmIiGYdhsVoj45s0bY4xiAG3bvn37FhHVg6aUVFhDZ0Lo5Am9Ww+HQ9vWxpizs7PtdtsPHSJ+9913xpgffvhBm4wBSPuGU0pPnz5VRQ7v/ZMnT47H49OnT9+9e7derwt9eowj5D7pEL3Kdsbor6+vnzx50vfjH//xHz9+/PjV998dDoff/Z3f+/zly9vb29vbW50ecX19/a/+1b9aLBaJAyQ+dt2x6/7bX//V3/ztf//kk+e77giAArTdH33iullUVaW//uTx2fnpqqnsOA4WuLJkDRqDaie0vuhj7Lpuv9+HEJJAisKAwqjMwZA4RamcExnJGEYMmYQREy/aVmIaJ4pc1bZtu1ysVpuqWRBaJEtoRTD45GPfCAROIYTFogGAkXtrrXF0erpxbq1zsdq6uarqt+ltiOMwdt4vtcCv0Zgfx0IBKYkpmEmLI3gik0TEOZtSUoxdphx3LHcWFNQtTb1z0z3CrF9LRIGT5g8pC9GkPKClZEra7LRctjpCTe0c831UGvz9/TK30vNn5r6qxBAE9+GCTAqGRIhpJuNIuT6dWCxN38AIquwRY4yjR0MlhyEiQxpSq4vRL5kw/JLozXO8D93Ne294L8h475/lU4hoCUszMrxXcRZIgEJGXeY0yyWMY10tVysZjt1V37fLdeJwenYxjsH7AYmQhYURUTXHAVgZC1O/LikIPc391UKUtbBa1k+ene33e0SzXq8Ph8P27rC925+enPsh6OTElKIUrF9Y1T6UtlpO6b309/5U8v9iCgCAJGqSlBeqtKZCEF0ul0gy9r1Aaqq1iMTIMSZjEhByYh+DIITkyWpndwQQHdnNMXCMxtkwDilMIwKVS6V+paoqHwKoknX2LpoRskRABpy2O9/zelSTkkSQE1nTZN4Nls/qkWugoHExMwOKgCQOMbCqJRz2u8dPn4z9sa7bMYSqbfV2Ekvoas/goBJGThhFOMWqIkZGY8Y0TslK3TX1YhzH3e52tVq1ovUt/OlP/vDTZ08vLy9fPn9urbW2qttFE8P13XZ/OKwXyz54ZgYi61zQ0T1dl0Qis6q+aH+ZRSBDzXJBZPZdZ63RPElEJDGwqGBnShx9GIdeVktDiIYQrYjEIDHGpl3psiB5AVXt0GgmphS6bicSYrIikqIPeT6MZv4+eB+obu0nm6fMT38kX717d/W3f/u33vtjfySYxtRT6cHI95gyhyGDz9N1txLCCDh1Oqm+lfKojTGlEcI5N6RhtVpaWy2XRqHa29tbnfGne14zTm3EBABm0OnCGmfkbNgrtKO3wzD4sm7r9clx3wFL3dSnm5NDNw1A1O7bUvw+7PbOWBQI/ZBGf3t7vd/vtVTvLDlru67bIiKiM265XGoRQfOzlBJa55xrlosQQuDkU7TMKSXlYJ+crA+H3Tj2682iac+HYWhSdXd3l0A2m03fg2bkFxePu25ghpPzszeXb376J//sy9/96tWrV79+9Usg/PLLL59//tnbt5d///d/uz47Oz09j8Px8cnpYXvz3//mv/+X//SX29u7L168cEM0QzgSdv4Q07Be1WcnG2uwtnB+tjhZW4KeGAgFEaOIRAwRPHHcH8cxBZ9CiOMYvDcpgU+eiFjEpzQmStSEGAOnbozWWgLgEJy1RBVHttYS2v3QA0KzWImtzGJ9+vhJvVxW66VbtmIocnCsA+oNxBFCMMzxmMjZgBhDAMYQoo3bBvpPTqsFnH1y0vztcNhumZHG292Ipl2tFam1TZUMDpAgMVmXQhROyhdrnLEGgUhiUJsZI0OCunHGWlVCxZkG0WQ5UwSAwGkKR4TV15I1Poa6rjkwh6h63eohmKVxFbIMw3B2dqYODMkUl2TyzETrppqUyL34hL6JU7TGsIAwI0JKkWNCFGAxiCKQEicRJMeSQggmRSF0xgmCCFlyxrUhJOtqouIRmYSIMKXYxZERUCSE1LRNu1gxA8ukKqqRgogAiTALJIQHSMDcv96j6IYAQOdWysSfz643RwwiksdSGRFWfgACvp8Bf/Qx92eaIDZNs1qtbu+uj+NoHS2Xy8VyqcC6yVDblOHJ/ZdMrhAEBBMnxRA0JtpsNhovL+rNRHtJ8fXrN+fn503tVDhQZpJVzDofSAOH+7CinG355xzfKM8gTt296hpNFqUq4dWU4OZFDyHoODCeFYPn05QhK6QUcrXM6IhEpDJGNIlbcWEk3c+ByJUzZpbE5c2SxapERAn9iKiOVmnYANC2rWaWBXcNs3G2nCVbNTXvus57zwzL9VrzRZhY/hMbkIhk0oOZjmqSlXC11HUIwZq43++vrq4eP378yYtaf265aC7fxbZtT09PUxj1BJXYPI59CEFFx0Ie2KdRajUNbEbmVFxL6fdHRGuNZmY2z7SIs2+YrqWZBCNjjEpKE5m0QpmZp+qRFEqdtaQLlVKKYSy5Y5xpw67WJyEEEWya5quvvmqa5he/+MXbt2/bui1yB5JrGZLrXu/tNJXroywnrvun4GPKJgMArTcr8A44OOdUAUOTeBV40V44veL6bZq8QkZQJM9+KdMsIFMRdcry4XAIIeikXn2n5OrGMAyPHj3Sfi3KQ59sVamSxpMnT7755htd6ZSSHlvf9y9evNBUe7laKUp/eXm53Jzo+Wrr0Wa5gsz5SimN43hxcVEqiyml5WJ1enp6fXOl4V0B3n/0ox/99V//9U//+R/XtfvVr371+eef/6t/9a/2+70fkw5revTo/OXLl49OTzkG17avvv36//V//p/fff31yXqpAzG9RDAkzE3T1NYtV4vNagmSDIgWa1AHv5Mid1NPiwcax7Hvwjh6icIJAEBIRCCmEEIafBxDjAlSxBhZaOKj+WHgqiqFEiJqFq21tF6vV5v1ZrNZbtbL5dItFnpZoYDA4T7ULlXYAqKI913X3dzcxJispRefPq/r+t31TW2ciKTg0VWo/ashihiTU6uyRSVNNdTGVSZPQ7Jtjbl1uFjLchgwQ5gpE611U/kYVPBL98NUwkMEgLquE3NMHkmWy3axWHjvAR/Ueh/mRQ9dw8zXcNZyKcbc5upy8YLFmMPsjtMDVj2i8ouISDT1F5U7fbFYrDcnkIOA8kNlBfIH72UQ4YNc9r1T+8deZYD3ByH/4w54/qXl/jTGLNar05Nz730KAQDX63XfdcPQTSobQAkSiJgHTV36LQgwbbU0FVZHa6tHjx4VY1HXdc98dXX17t27zz7/FFnN7j1nj6eOaSH8bVc0W8npSSLCXA/XCpN2dKjvlDxtEGfSHAVprGtXiHOSuzmLi+LMtbZ54N20vtbqz4WsF1hY05i1XdTZiEiMXus6OIu29DH37mUr+BCWy2XMVcwiDc15ejxkYFZElNO0XC7HMJGENfnW1imkaYiK955oKuTU2CZEmqq/lfejQVM1raZxWuEjlOAHIvr6229/9rOf/d7v/a7kiUZ1XXs/LJfLw+EwdMeJdcmsVXCDmEJoqgoAJIpVXyXgnENASIxGauuEJqZJoSmWs1Nr0g39YrWkrHMSQyr3jLFkxTBPU+QkjwFOyRRgX5UiYgYbmXmz2azX63EcnXPMcHV1NQz+5OTkyy+/HIbhuD9qOEIPCepQtMSd01BAuWalDAH5mmoiq1vl9PRUZUk0QEGceAlacFUBLIWp1V9SZq0r4aWAxiYrljDfL5H27VxfX/JqrS2/uoB6iyXhwiHQbF55eVqhUNjZ1XXTNCpKZa1Vec66rk9OTohIZyowcz8Mel7L5XJRN+M4QmJI3HV9W9XH43G9Xt/e3lZVtdvd6SAmLQG0bXt5efnys0+//PLLm5ub7777LoRwdXVVVdXv/d7v/fSnP/3hh+9evHjx1RdfXl5fhRBOTk5WS4d4fnd3d3NzzSk2TVVR/cO3r/6f//v/vmrqk9N1fzg2VV1uLrU86/W6XTTH45FQTtermLhyNqYgKU4ZXpyIOX3gEELwMcZJlJeIyBgyEKP0Y+hHH6IIYxRi4eDHyMkZK4REhERAAICL1XJlN1Vll8vlcrM+OTlpl4tm0VIu1evtViLy+6QqD4fXgMwBiODhcOj7ngTbtn385AIAhhAFgVM0Bo04YI4pSgSyNWglUUTSpFkLwI2rYoyEoFBfcata1Jg74PJHyfxkknoGAKjqSjdeXdeVc5oSRGZESQnQ0GazUQ7NFCib98ulH/XB5Q2IlFIqMh2TAc9HpVxofYVZ/wYAHYmGRGiMM8aokjMAiYBSf9WSRx8gz5RrmoXW7IhseYO8l87NuEEfHjbOIGi8T3PvG4hULWT+qupCijBmaNviDIV/74+P/xjqHQ7O1aenpz4M29tbH8eqahbLpVpwk1NDEZn3Ms8CDEbQ7nsGox3i4WS9+fLLL3/xs6/NNNY0cUqXl5fnF6fGoGb5RASEOclP71WCP4yk3vNhACBJM7z7UkGJieZOV+eoz6MeZaPAjM+FOOvxyg7Y5VEzuk01DVKFv5QZ+TCrSatbLamnZlFN2/phhBy7xMhqHL337UyqsPy6DpQtJUYNbJXPqT2g+k69bZSXq26AiKqmFm2+0y3InhGrqrLOEIj3gzEGgNiYlFLTTBzRZ0+e1M6tT040pQshiPAnnzz/5JNPUNLhMMToNchwzp2s1ncxWGsr6wKMmvSnlBQCTfGeFYKImo8uFgsFoAAmDovOl9RyrxK4NHja7/fr9bo0B8cU4T7wstkZM5HLWUUPkhBAo4QYY9/3h8PhcDjo+kguOI3j2PcjIt7d3V1fX0+/2I8xT4Yuf8x5AyWG00tWKvcmd5+XLKeQqxExBI9YiYi19TAMqnWsV2q73eqXaFjAua6v26YMb8huOOUvnAQCNRLS7afWVt8sqMNcJxvd971GwOVC3N3dbTabm5sbndq03W5BZLVa3d3dqZAkZMGQzckJZ1376+trdbFae67rWruJNIZrmkoZDO8u36kQtHXm66+//vTTTzebzb/5N//m+++/v7u7+7u/+7umaX784x+/fPny7u4uxvjpp59qSt22SEiPHp/fXt9YZD8e+3H4//yHP9+crEI36JZ2dcUwiQKKj8um7Yfu7vZmuWzPTjfkLBGFmHwIwzBEH9UBKxOwT4GjFiy1cgjGoBGBxD5E71OIEhlUUJdZsvlCYw1aA4aqtlosFquzk7Zt69rVbbNer5vFwtU1zqZ3TNEbyNzOMDPHadSKWpsYp3BzGAZkMMbUzj5+dH7ojmNIPgbhhAkMkNPyvx+JyICwsOgkO1ZMaNrVzjmJSctVi8WC4X464dwRwkN9x3KEuluKIkJVVYkDMCplOsWo0u6S29yL3vI/8XHvdDOXQuJkdVNKmoOVNIansQrTdCPrnLXWkNPcdX4KzBz8MIx9jGzMBBQVa+y9n6t4lltgjk1+9Gg/9MH3Lz2kb5VoZrrQwAYQFIL+R33ww3+icQ5SNGDqRVtXLZqdg3r0/XK51mvDIJWZusJVs+q9xRUQIEECAuLsGq2lL7/8/Ltv36SYNNbuU3rz5u3JyckXX37GHBEF6AHrrOiEzWOrstXgnox9f3WJCIREWN3VMAySEQ9NSkQEkEuC+N6VKBdDcwJtxCyvzn9UH3pv5wHvU0+UOn590EyWoapdzBOcIGc2eK/pKoVtqIAeGac4pFrVQpctX6L2SLJ4NTOXUQdd19mqappmikJCBANEZIAIiWS6fzgmgxTS2PdHaytVZvbeV1Xz6NEjANWdEUQ4Ozs7PT3d7e98PxhjlovGGGOQNHPSjixJ3B0OhUemf6CdRE7mIdE9xYOnKwWzZm7N7ZT4pjHf5H1DCjGUG6muaxUVQdGiAHMKQ/R916nWLgCoVoFkpVx1wMMw+JAWi8XZ2ZkOHnj16jv1IkWKudQXmFlTihy6TTSxElW4PAtSRDSxVt+fMw+x1sY4dW2ykLVWSzwqnqyu13uvCtKr1UovK2V9IpgBhirMp3KbmDuL1L5on1vXdSqOIbnPhGe359u3b5XZVD7lnHv06FFKqeu6y3fvTk5OlOesxQslKJXVu7u7I7qnt+jPac69Wq2229tp8rElANhvd68Ph6fPn2mAojPWzs/PT09Pnz9//vbt22+//fbpJ0+/+OKLruvC6BfLhV7ZpqkksSN88viisvT9m5u7uxsDOHJUzJyZBz91RdfNcrff7na7xaI5WW+Wy6VIisI6M2MYfIpq2UGRkm6IGu+mKMCoEWRV28QcQggh+hSTgDCKpCRoKwBDpnK1dVVdt8vFZrNar9ebk5PFYuEaV9f1arWqmlrzY93VxZPlMpoUEA5mdBb1KzGmxWLZtt2ibqwzvveLRcMciUaU5GOCyMZWxlAC6UPSuh4gxhmkrEZKdx0AVNYoUTT4WCAcmSVwxf9RfuhtUvBCtWmLphIR6ywCGmelgCh5vMp7X/uhm3/vMZ04GIAp6xBmmhSN7kWsJs8qgIRaR7euqpvGuRrQIEKWcQTIchExxhBGjYybZqFt03qmpTF17vKKy/yoi51HJLnhSyD37n6IRL/vnhWPLhnwPPItf8wjgvlXTAft6haxWS7qfR1jHIbBVK5pFkd7KHf1lFQhIBqRe/xWpSiJCA3o2Ao1Ioum/fLLL//mb/5GMTEN2G+2dxf7s5OTNQCzpFkwgu+d1Tx+KYBbiTz0mhFMrSwlaymEQKWkMzMSFXQuBYUNJ+11mAkYAQCi0VI/zAAcdRgySU8nxQnxXl9p8uhp1jWvX9u0057Qvkx5qOFljFksFn2ekQcZSyknmzL1V1Gm+a9XrtGESZd9GIaUpjEVYfR6KyILERlBsiTAyUcC8d6nYPfHbhzHs4tzRdRTMuPYq8o5WgQAi8QS765vvv7668ePH3/22adNXWujqrU2ELVtiwKd7xSh1fTOOYdgdMqbzfoSRKRpbsrM55Tp3HrPlCDmeDy6qq5cPQ7eGudsNYyDUq5U+R1RnLEpJQFBEiQBwdyfMymreD9oTFOEVmwOMpi5aRgATk5OXrx48e2333777beVrWLWGSixEeZhG8Ul00xCqNxWJvcOmSySJXLffV6MS9/3WtxV9pkx5u7uTt28UqB1DbWiX2K1/EMSs074OI6qwau1Vc5F1onxEKJS8bWBTbfHpJLx+o3yzx8/fvzdD9/qaOHtduucu729VZ96dnamtWSlTOrcJO/92elp13XB27apCHDsh7ZtU4hDf7y9vfXe73Y756Y5UcvlEghPT09/9atfPXv2bLPZlHaDzz77bBz7qm289zqzSETImMghhcgST0/Wh92dAUh+5JTapmqqcz8E732MvNvphOn27vbm7u5utVq8eP5J0+r+hxBC34+j9+MYUoQkEINm8BjRJUwJSIi5wLCMIcQQoo8xBp4a9cAgQkqpbVvtFmva9vR0c3Z21iwXm9NTHcdiK1O3ja3c/GZHREFgUMXeiaAw+aeHI1Zd3TaL1dNntl0uFk3bHfZX8YpjwgkKFJSQEhokQoMgJACJRWV8EAkRRECkqethGBTEUgPVj8F7b3VibkbjijGRh3EA5BzUUqZDhxCjF3G5bjXxMIzG3QZEuNzCHz4+6n3nz8/T3+KMOHNilFdIRMyJiGRqf5pEqnMvbe4ABInMqqWoR27vB4Yaa9FaVwoW5UjeCxfgAx8MH40tpi7e9xnNH4YjIsIIFoRUMro43fd+uzwDSvEiSswKj1PVbDanwzAcdnd120QfmqZZrja7u62PAeW+PUsIULAYI4FJ9ggAeALsJxzvyZMnm83Gj6HObmy327+7ullulgYmZbD5Qnx4IecBC0+krdn5E5JAyUHVKZYIaAIV89wummYACqJTvowaVjVwGU1CmIz+pEejJ1JwwmJwS3o3NRrmfVzAzHnVyhiDxtzPfMwUf6XwKLZpALXbUpupigPQZVffXL5NEyOlYSfJcHcOCzQmMICiglrMIYT+2N3e3tpqyrOdsZU1MUbhsW1bUjQixsVicTz2q3YBp3x2dvL55y+bpgERPTXCqeZ02G2nZ2jqVlSQHHLApMZXPYHC5lpmhsSJU6b+Tgs7juMwjJsTo36Fck2rqo2OLSuBPzMLJB3PlbJis3NusVggyvEo6tXUb6kbDiEIqKyjaM+JmuLlcukHr4ehGKzNI0PUHZZoScFhyE4XHk5T0bAvpajDEvQCqYokGdE3aBdQXdc61lcvro6vUI1J/Tma0fq0xbbEKDHGtm2Ln9YATnNWXfkw+sVqWQrVjx49+vWvf73f76umruv6u+++u7i4WCwW33777VdffbXOmffp6amO7tZNpRVi3ZOnp6dEpH0+RFTVoodxe3t77PZXV1eIqElw3/dKeP7hzetPP/30Jz/5yXa7VXOptAbdCQDQLBdlV6SU0EAIoXaGgJ0xd1eXX//615vlKoy+63rtD/LeD35smubQHffX15++eHF2cVpVVVM5ROzGIcZ47LpxDKNPMfIYxPsYEwMAk4rY3NN/TNA+LgzMiYEREAgQiIwxar6t5rXW2sV6dXJ+pkMyqqauqooItH5GRAwgyAKkFrGYrLJD1EThNG+YEZEF15tTWMvFk8eSwu21CyHsdjstXtQVG6TICUBIGHlSWUdlqBIRWk4chfvBi+io4EoEvB8BoK5rI6JEVHWiupfmqRfPK6MIKSWDFEU5vxMbRqMHZraWqmoifmJmQf6mNOk32O1pHRInSAwAZPQwZsL+KR8SMggwCAEQWZyS/ik4BgBhIATBpGpiiOJ9tLZyzunYRFWd9Dx+1F/OkecSBOguLeclIjDzwTM3KjARv97vX5r7VqtOCQB0kPJ7Pvj+rfeHRYiCRCnGlGLdNOfnjzSs3t3dGbM8ERm6vuvvK8HTyRDOfbCmXzFGR8anmFIwZJl5tVo8f/78F7/4BSIG7/UOv7y8fPHiubFIBIIwwYogmQn94KzmIUlOxCcHM+GEIFpmxtw7qxZK9z0gA0yUlrquIa+1doDMU5kHW1Mw8yegZEhqR+btp+UaqNUrWRfmGXDzKK+qKgaGNFGi1MFQZgPqlywWC5XzNVkMRPFYyb2wpZmY7xXdAFiAQCUt27Y1SCUWQZauOw5jN47j4L1SnU9PTzenZ9puYA0liaXYpglWXTvmarl8fHKyttZS1kw2SCklS+3xeNwdjtEHZ6YSuKbmzPexC2T/qpm9xtKgfa4iJNMQKl2ud+8uE7Niejq/HQAWiwXgNDmuAAyQ80v9Uc3bAKCqrDHm7Ozs9vbWWntzc6NCiZq4sOBmswkhpZT6fry6urrnoVRV2VqK4hYhi1JNVC/OzIvFQouvKv0PWb67aWpF0dVnqxE3xixXJ4onKVDBme+t0YnS6JQXPd+QunvVs6aUtBJMBOM4haFarNVhCSGE1WqlIULtqhCCohFlwUMIZA0aCimq7LNKbXzzzTeT0jjiixcvrq+vh2FQMOPu7u7i4mK5XMYQVHJSg4bE4bDvlNb++PHjR48eNU3z61//6vd///f7vr+9vWWQv/iLv7i9vf3n//yf64lrpzUREEHf96pYovepMWYYOgO4S8EhjEP/3//6r7c3t7fXN1dXN977FCfFVmOMNe5wOHz1xWfn5+dVZRnBGhNT6rrueOxj5JC49yH45IOEwJGBiFIWM1DDKZICgFoufZ7IgCaXhNaSAUQWA9hUddu22tZY17WtpvVEM1knIrJEWZP/Xlu3FFymez8loZkpB1u3duyHum767rBYLR+b57aqbq+vBYGsQUZt8WdmMlKRC1ndDyGDNJz0SgFQ36sCT1WywCI4XyL4cidKpsjofrCEkthYBXKM9/7ArDiN5iOleQE+Nh+iOJTf5IOnnSwikiCxql+p+9a7jwABkEm0RM+MzjkQIjJkjDFGeJpxwoI6RVgkMacQAkcPIkQ2q/hpAjZZG4YpAynY7Xs+5aOZ3nuxxT/6+NANTzG7iDIO7mcUcylf61aYOTZmZE5IZMhyROvc6enpD98eXF37YWiaZr1eD2NHeI+yqul3ZACA5Z7KpHe7JmQCbJ1x5J48efTq1avgg3UuxWiMOR6716/fvvzs+dQHxumeFJo91nzV5os4P+H5gjJI8L4yVu1m8aksUeFoxVhCSMYYH4PmCpirgGqpC5Y4obgT5xDmIs9ElAf3Ykr3mZym0Yq0qLvt+tHMuo805xYRlSrVEHvIxFdrrbVuSvsytqzphebHWuNUk80J9DZT3hMApMRK0tHESw33MAyHu1vVuFCk8fHFRVvXVVX5oY8xrpnJuhDSydlpSgl9YJ4G6mVKoRJ8UN3/xMPMIQVZk+IYwiQeklKKMEls6nVRbQHOzG1N1Mpsvma50Jl92kvz+s0bRJz8JbP6QlfZ3W632+1evnyhlWYAEEZmHsdR2XDaHXRzeUVEq5ON4uS73e6Xv/zlarV68eLFarXph+Hy8vLk5Gy1WnnfAcDXX3/9q1/++ssvvnj69Kl6cWOM5g0Fai4Bsl59yaiGWiI9u+KeNTLTOYCqd8HMfX9smgaAvR8uLs6urm5U2qVEabqXAEqYr00mIpnobq1ljogQwlQHdcuVRgY+DNaZw7FzlTHGuMqwRB+G1bJVGrAxCMDff/9tXbsY/d3djY7g7Pt+t91qTKkiG5eXl4fDgXJPRFXXSjlMo18+faKhxjAMh8PhZHMWY3TVxKU/Pz+/vb1Vq13X9en52f/2v/1vf//3f/9Xf/VXT548efbsWdu2zOycWa/XyrM7Ho8/vPkupbRer8nZ4XjAFB3hd6++eff63auvv7m5uem7AQBc3bi6SSm1VaPjGp8+fmSMsZWr6+o4DLe3t8M4qJizH0OKEhKPMTFTYhjGMWZfiyjGGIMCAIZQZgUFvRPJIIA4snpp6tq1ldtsNicnJ+qA1f4QWiJCysQUKeT5qD54/oA83UtomvBhrPGRrasjJ2OrekmCuElJZUwO273eEcPgFVJy1q1pdeiOamGAqK7rMXi9u2OMrqkBKbAgiIQIMg19UVuhVf/5pkVDCpVPtzBMt7b3XFWVRhO6DarK6ohPETFIcaZtWczv/Ez1TpE5JMssia0llUPXTQ44pTopTMSrxKmQHoTRGFosFsZaZk17EjMDmpSSjkn0Y4Q0YUKrzUlVVcIoMOGCMUZTOUwydxzvudu57yhp8X2m94FzLfl3OVN+WJDCTMV9MDxSREAIcioMADJNlLj/TH6zmhtAxKpqQKRZLvbbMcbojKGpxSiVNBRRrLUxJZhpM8G98AowgDBwEk7h0ePzH/3oy//8n/9qsz5BMHXVhji+e/dus1ktVy0RVlVT0DZ6mKnPz/83PTAXUJ1zkNnnlHuTrJsOr641fewR74V2dTtq/prymG71vjIxKSDGqAyjcuVsnmNYekYh16cVrNPdVmjM+igHPPpxukwpsfZozoZ/lY9gboPTlVHfw1rnpqndebFYaBwQQ5qIiyk1i0UIYbc73N3dGRQVf6jrOoQxpSBS7be3tq7aZrnbKROenHOwgsS2IichUZwghAnbnwqWrI4ZAKz1GiWwyHLZhjCNwuUEygtzOr+TH9yuuixd12nOFwbVjMRx9Ij45MkTrUoiouoX7rs++n6/3282m6pqvI8aPBpjQpigC/3+EMIR0SIc+k4PWyWL/9t/+28x8hdffNH1vfdxGLwxLsZ4d3e3vdt98803P/7RjzR0KLcQ4gRCao1No4GShWgqr1lyuToAE8KhbCmf5cl2u127WKpO9d3d3TiOVWW1RVX9XBHhSpk4phdROVwFP9BjWywWGilqdVYVdXTraspewsdJSiwrsj179kyLIzp2sOs6QvTee+91pq9elHEcz87O9MtVwS2E8PTi0f5uqz4bACrXqCi01uzLibTtYhg9CxyPx9PT0z/8wz9k5levXr179y6ltFotdP6SMs91Wtzbt2+///77tm3PTjfG0N3V5evvf/jFP/w8xhhDQjSApu9GXJimaUMIZ2dnpycbJHCVRcK+77e73fF4HELs+pGFfOKQOCRJUZIkTsAsCQURRFggiQjTPR+nYG9kkCaxaGhr65xraqe5r+4ENSyUKVdqWxBIBZmKoQcARClumDMQLSLIAMCAGDGJSEI9DCJydbsAoNVmc3dzE0IMIQx4z2gB2wKAJSMIxiCQIaIkHKYpHZPZSSl5DymliqCgGhqllfMFAIaJy1uO0JhpziYApCSAoridtVakMGMQEY0gsDxEdj/eklT+KyK5FYtVRUudUUqRADmrvKmGBE3qs0hkBAjR6JUjnXYALMwMmFKIo0956jmhRbg/TtFhfSUuQKDZmHn5WNY7/3sKu2dv0JFEepYyK32Wu3Ju35jZFtc7WxqCLEWoW6O4ekTUnYNotKUJ0FgLItXJyUkcfXc4KqzXd4fdbsccjXEx+lL2A2BDRmaZ6PTlCZSRTyAi+NmnL7/+1TfM4uqqruthNNvt7vZ2u1qtlPlcEg75ILWFf8IjewvLzFHY0dQe4JyrG6fZkjFNyiPnyk9wZt/ZPPCVM9trCtPSfS8TzGKfwszSA9BGmmI09Z0TKyonT6U623ejujGFXbQNNIRAhrLXn6IHTWfnv6L3FeEkBFFCsJSSNi9578cQqqo6HDrd90pTUt3NyjpNzpbWucocD4ckEyTunLG0nKKQzDIzpflYASijwBeMRAoq6KFq289yuXS2rqpKZzyUi4OzB2T4brIvWXPj0aNHSCQiqvjofSRnT05a5ujq5vT8whqnPj7GKAiuruZBqC5jQDTG1HXlvV8u13/0R//s5OTs5z//uff+s8+/XK9b5+q+79+8eXN1ee29/8lPfkLTXFVUzFaPsOhsUO5Bmu+WIiGSI7Z7MqeZRuGyNm9QFhvXTEJXXrHcOaWucKzUQ+Ms+S5IDGf5wPtqS2U0CizTL1JKmspTHqmEiF3XqYrvZrO53W0v3NnFxcXxcFAmlEpYqOt9/Pix8qWVUj6Mox6Aum1FLPzYpZQWi8Xbt29VaFpvpO++//78/HyxWGx3dwAwDIPq0eZgC2bLxcYYhQ2dc99/+9312zePH51vb2+v3r1DRE5ibcWSDv1QVdUYk43RWVivlqeblSMGRO/Dvjtud4d914cIwxgT4Oh5DMmHGBliEmZJnCLqwLskwmzYEpIBJFvlblEiIoNE4JxxzjhrNJBq21avGiICTd7XGKO9pBMXVDWOM2iXExvWNqGYVXSKOyCiycRMVTelEdiqbgVS3S5X6zCOgXe7kLxycVMMAGCdUeekI3ZU2oysQURk4RRTCkk3SdvALB0qWKB6ow/9jVobzt1rYAznTEMht2LVdRPKQ/bu/Nve+6P8BPNESgBk0Pw3prImzMwJEA2CEUYyRCZPUyjGbZJDSIjT/SUiKYohnO5Z0ryVMLPQtW1o5kk+4jJ+05Oz+iyUy4eIlGnC/Bs+jpiVsN5fGiGgKXbBGUowX9/5NmKGqmpOT0+7455jqKpquVwej3uJKMjlzVCacWeDoJmZ073EFRk8HrqmbT/99JNf/vJrsCrnCyJye3v77NkTg9LHVNdO8wbJNMLftHKICLnZrkQS+stEucJRTwNlm7bKbbv3cMQ8RtHLaa01SKWkJ7lSklKKccotKCt7qL3WZiHJqluYs/CUG4L1DDArPJRFNnmIIeTCDOasHRE1/IRJEPje+BbwRxUbCHOHVWacHQ5djBENpZSin+ToFosFRx9jtNamFLz3iYMBXC1bREk+dP3BkGNAZl4sFk3TCFskU05Kj19/y+ZJUN77yNMUSAZBYVtVq80mpeRTJENRGK0RxDRxA1EErbE6Y1Xdm/oeMvc5ZcoXaL8/jvFOaWjr9Xq9PlksVoe+SyBrsgBQ1xOyjYiAUy1ccgCrbClF9b/88stHjx7917/6b//HP/wff/qnf7pYrN6+fXt3u/3hhx/evn377Nknn718mSOtqYdhEkxwrgDplLvA9T2Uh1FquKjvCSHoXATKc6a1kO/zOPr1er3f73XjVZWdGAD59gGAAqARkTFINCVbZTfqj1ZVJYZVe1VduG4bnbyrjl+zZG272m63IjIEf3p6Ktu7xWKxWi5/td/r7EJtiNJDVWO93W415/Per09OMPPJAycbuPRn667zMbhYPXv2yevX35dUHjPxSk+trp2p3LJdmPzQk1VKwcXJ5ury7e31zZsfXl++fff06dOhH1+/vtSIGtB4Py4ae7I53ayatrHJh93xeDweu3489qMPEmIKCUNiHzlEiQlj4iisN2LUwEkiiQgwojV4v7f1ahpEg2gNVs6BsLaNtW3brpbW2slvISIRPmxfFJE4y4BJwJDJc5Ow7Mm5KU4pMDNylmTiiIgGAcUsVxs1DsPQpZT8MDIzR2+MaawRxBg5pURgLBk2IBxDCghcltTmGWicx3MpcpN0rkzOfYv1c87pXBPmpGrJRJSSEHDZ6jSjPWc7d+9HPnQiD/zOtE4TwANT98q9hSyfLQklGWdtpYOMYGpMtQKJp/6CqUDDzCDSNMvJQs4MrBDC7DAYp5G9H/qR944ffoNbLadzH3mwotEyTamC4unpQf9T8U8AMJfo/PA9ZRVYOd95SMB6vb6+fHfYHYdhQBYfQgg6YwcSR0AAuRc8K17z/r/MUVJVO5H06NGjb7/9oTv2lFkq2+3u8vLyyaNHIlELt5VzaTraCTD/yKoBYJHLQiVjE8A9tVhv8uRDSql1taYL6/Va31Dl6evqVxRMhtwuYozRklVOlKdx65wpr5RZ7/OrWBwkEalIgq6AvpNyh5J+7TwAUrKbOiT18VrzmzOHS1bNnBvpRFIMy+VSMjNIj1Dp0Jxxy2E4dl23qBtjwFrr/VDXtXNOqa3Hw0EA4hjMwqjaFEvCPDm1QLvz/WetresWgGEPiGjrytaV70YA2Gw2q9Xq5uZuv98TkTF27lp4pgkq8T5k0QjDp3uULKXEDPvuqJXCq6ur05PzR48eKYROZBE7AxP4bG0FACrzKbP2bhFRoUdjzPHQX5w//tf/+l//X//h//2zn/38yZMn37767vXr1wC03e4vLsJyudQrVdjU6gXn+Ws5VJOHnTEzEeZ8YIIKSgCnSaqiDjEmzTKtMXVVCTMCOGtZczQBROSYCJAAkSik5KZ5UBOaVdJrZXKFEFDAGaMdnOodFYWuspZ13/cFo+u67uzsLOZG+eurKx0JvN1u70U/fBi7XloRkbquj8ejOvvg/fmjE2PMzfbO4SRpon1TT58+fXd1qTmic+78/NFut9tsTkc/lAFTerv9/Fe//OSTT4wxjaskdxirtNNisVgvKkK5efd2e3vz1VdfffXVV//p//ufY0qmqtZ1s9/vK+fW6+Vms2ob54djN8bDoTt0Qz96H2JgGCMOIYWYIkuM7BNHTpr+MnPiWTMJgFaCLRIRoSj1ChHRqLdX/o+1VV27pm6axlaOjIGHyRDkHEiY0yytLDaQEVkR8Jl+rb4hlvKTISVOExFZS4RtVTlLBrBtqtur66vry+P+MCZhjgREhlAEWRPj2DqTGEIIWCA3DiKSYBoQrrBKabaEmcsp9q2qKo7l1ZlIMpAWGYmspi4lPH3PKpcFKb5m7lbLe1gi5GI8PHAX98VjXShrK2stgKp6a4GMAdggGEPeez90IsIshHaxWAgil3G7czchggTKfsffnLB+9Jn503PHnL+ERMDY+wmMJcJA/JgWNCICqdH/iIwnTbPZp+MjIsiKyiys+VY/HFMKiNj1RzHGWmNyWJRmrW9TiglUgDhmZmAASjGdnp4+ffrkH372y/Pzc7Udw9hdXl4/Oj+vKhdCQJW6/UD74rc8RCacoVy/st2jMKSJkauVp+PxOAxD1SxwNqNXrSciGqScfDgRUbTMe79c1mrO1FvrgRUVLZhpJigKulwu9fudcyFOey5lWj/n1tj57qTcCpVm6owxjwNTU67ZmOQ2YtV4AgCbRyHp7dR1HWtOzKxAKBBpIXO73T559Pj8/BwRt9tbAGBBAIkxcorWLgxIOcf3/Nl0ZYmqqmKOxlnt6l4sFhIGEdHxt5l/1DtXqd8CgLmEuu6UlO6jPY0VxtHrFlQthdvddr/fq79Zrk5X33/Xtu3zp8+CT6v1YhoaYxU6SxyToogajhBa51zlmpTS6HtV1Q4h/O7v/v5f/MVf/O1//zsdP1zX1RdffPHv/t2/K5lficn0D72jUhY7041UNMhE2JgpPJKcv5bGD31/ZuNzSrGu691upyy5HC7cDx5Os/ZKybF8KWRQ1pXU5RqGAQUAq2EYdNxWeZtuDN0VNzc3zjkfgp7vbrdTT6m5uCpZajChIi0hBA10Hj9+TEQKZd/c3EDik5MTZ+vVatV1nX6wbZc+JmZomoVz9vr6erVaHY/Htm1PTk4uLy8111eG9n6/V03Ki9Mza62SAQ2Sc846Gg7bm6ur2jnnzOefvwwhHI/9s2fPru72d/udcfbkbL3erE7WbQrj9u6q48Z7nwRCTINnBvRRRp9ikgQYGGLiKJI4ZQ01A8CGrDWAJDqKFBEnQVsWRjFAYBBYOCXU+oI1RGScJYW+CBl1rifDVMecbGaCe/cjs+EHzExyH4PevwoCCMwMESSbaxGJKREYRGzbtnLGEsY0orAdY9d1kkYha42xFpk5sAQfjDHWSF1RVU08UCWyUJY9KNCOcRZnyGI5zpIS4NQMYktuuly2xjmY+Z7pbb8Buf2oxUZEgcQS83JN/AfITN5y+yCiISI0zmmc/X4DLUtEER+G6SMsVVMbchGo/Hq5KOWQJvSxOIt/7FEc8MM3U8ZcQb05PKQGl0UzxnxcAYQzrP3RtSvnWQ7aWrtarXzfaUq3XC5ROI6+bVuqJo77fI30Mf3z/glENBbTOI4Mxjn37Nmzb1/9MAyDyDRTqNsf7u7unj59zDEZcpwZ+791dd6PwuYhHuXidErJIpWX9GLHGClGay1aizmsUwdMgG3bKl8A8t2SZoIyOGvJlRlcnzIRWpNRLejqS5pHKnRZsrTyd474JiaFiPR9p4RtnU93v4jGqH3XuFV/WtMXyDKzGb2YmmpSSqt2sT49OR6PmsbFGNvlAhFvb69TSk3ThDgS2uQHAGLmw+Fw7kdbteoUERGA8qpPeBSIiCRtuo0pVFUVqmqi5ywr5+oQwm63VywhO/J7uRIASCGGKSjKdzvhMAzjOCbmcQxayDwej/v9vmmaYUzv3r1j5u+///7NmzePHz9++uzx+fkpCTStzo0YCRCQtRvV2brAsADw+vXrYRi6biBrQghff/316empYh9/9Ed/VMImKPzBrIkmmTKmF0jDIEQcx9E5WwwZ5aYDRTIKn3m6cXN9wTnc7Y6aGU8S3zxhgMxcCPbq/nM0cG8NMc/g0ktjkNSXF+6eRgYFBLbTzIxxGMeiW6kNxGnq2znqTkPEw3ZnrT09PT0ej8aYV69eaRBGRMvl8vGzp5wghHA4HFTvsKoaRAwh6PxKXTdGWJ+evL28rB1pQ1QI4Xg8LpftZrN59erVzc3Np88/UdFpzYGMRWMMRR/GISavvLw3b944Z+4O+/1+f3d39/nnn9d13TQVEuzvbvtufxQOISWBIUofIguNIUYwkZMgJeEkKECs8AIAASGRtWgNAHIGZpRPRAIISRimtCzGiM6CIWMMEBqtAVnDIIiQhJkFldSTpgLcXKCwmAv9p0Vb7sriIcgwCIEwAAkmVfBV4+I5gaQURj8M3g+IaC2R5xSDiBhOSMZUDkEcUWVcXddKrtYNE0VAEtIk06bmgma9Q/NETfIsE0sT3bK8Wa3TYrHSNEyPvfi2uXEuf5eQ/UPTPfOywiw8C1CmBE1vE0EQJEPWVIQEhjjrbLBEFW9QssJ0kNYo4RHMnIElItM4pmJw1P1hht8/GiiUl+ZvmL4EzPwT974g8TxfKo7m48MYEFDgwW+X3+OHpVzRuTMqEQBy2G+JYNku/NBbZ54/fx6Et9vtmPWbEJHwQS0TckhIZEWSSg9GAQ2xX758+fd///ebzWmMXu3amzdvTk9Pm2rSNE8cP3oKv+VRjBTkjT61vlRuHvFpf4uPk3+lHGqoWdRukJREE52pUoukfXXFIqtfKYzoKb5hzqo0jR6DOjwlYKtjLlc3pYRoSuBZhoCqjTs7O9P3KEdGtdHhHq6YKEtK8Q2zUe3KhlH+yMQeqqpuf+jHQXs/NpuNDpvr+6OqAacQTe2cseQqRNnv9957O2H4DgCiMIAQkcUKgBEx5SjE1RUdp4MREZX7j5GV4GqyCOh9wSafgjoYkIkuzsxjnCSXD8fj4dCN47g7HiSLeo7HYCypNb+5uTndnDx99vizzz778Zdf1E2l11oSJw53d3d3d3cpSl3Xz549897f3Nwojv348dPbd3c6Qc85V1XNZ599dnFxsVyuCVN6aBQAYE5lL5cbZ6V6NV564Yhovv3KG5RspV5TYQ9NNLVDrK5andNQSiEppXEMSA9ief0h/af2NenhqZNTVlqYZEAmXZdSplXy13q9ZgS9Rsvlkn1QhtR2O8EMtXU6bFjF7+q6VtT6Xk7LpxcvXjCz6nWISN02ZI21tu97772geffu3bNnz5xzdW3X67XKbCFi1TYhhD/4gz9QApqoRJ11zByT7/ue0mh0fDrL69evLy+vbm9vb/fdEEWbjL0fandxc3N1vLtCSKM0x2OXGEJI3nOIEiKjqYCMKJmUUpnnIgiEhASWcOpAytaPhQ2SIAgI8PQkpBzr1NXU/ltXNLXJznp5mVlYVW4EHqRf82RoDj6XQI1TIGsACIAtTvQLSUkAvB85hRTGoeui98agtTaFmIIXkZQQgJwkss45Y61VebXJ2AoYiyTgc+WiCNcXVycZiqOZWBsZG1NEcz+DS3ERLcHw9P736Uj3nuUhQlv27XsLov9SInNJG4oDM0Qg0/KW5CRN+mQsLACgrYYhBBVFds4uFqsQwnsOXwP6clTlSN87tg8fMgPSH3zkg0Ko2m2YuXyZNcJYAHr4foWFwSDNdbRzlq3tOro5QNFXkQRILNgsV82itYdqCIMYNHXVbhb+Zh99zEgdSEwMIILMTGhwmoSspGsmkBQtEhlgoIh2/PSLi1c/2N7vrVtwhAR4uw1v3l1/9tmLxIMxaEQ3h8xdznsLhLNznAcWJdYTQeeqwGzJsCB7T0TeD01TseLM1ipCqDaxqiyQVM001n4MXWtbsmDpXldL38+5WKurH/KUQO+nEJ5ncpgxsI5BNMZYUw3DUFXOGlL+i0YJMU8Q894Hn2LgcThY61KUEHzKKgQpirYeseBisei6zllLpNXiJoTUdVv1gmM/qFIPSySDBoWjB4Dlcrnf7gjtcnEyjj4laZpFSokQLQiliJjefPfNi+XG1hVzNMY4xJTEIhEgGcPMzlqJxiBFEAXbfX9IzLn5uPJ+YE5V5WIMVVVrQfRwONR10w+haZqqXijbDpH8BPNy3/ddPx4Oh64bkkjwSrJwwSctSalgZBjj9fXtfr/fbQ8pis5s0AiGiPrRHHuMzFfbu9dXu7Efrq6umPnt27ft4lsO0Rhsmqaq3cnJql1YH459d6sBZ7EUKomsZc4QvHOOyBGZvu/ruhrHkUH6cdD5PMWqQuLd7V1VVRITIqI1ImCtC0GLGtb7YLJw1bRXJRqLRBhjcFUjkFxlrIj3WhuWYeiMMcvlUoR02tJi0YQQTjar3W5XN64UmyH3hqlpU+0LnXDVdx0tl411EuKiqkM/KEBijFF5VEQ8HA4n9uyw36/XK2a+uDg3hk5ONiEERLCIV7sbY7FpmsRhuWpBaOiO7XJxc3OzWDRNYzeb81ffd2Tk2fPHlz+8QYGnj59c3Vy/fft20S6bptntDqt2sd8fnRuWyzZY27Y1EazXLe/7umr3t2nfj30/Xl7vuwi2qs+Wrm3r/d27L774zAe+uQvMGwDYe/Bc+WHkJCDGGSBizwlRhtEzCANzZBC2gI2ryUUCdAaJCNWeACVOVNUiKACCFAVClIZMVdlGyNoKwbSLFbnK2ioFLZyAAULVdRKwaAQhJSGaqsFlCzGzCEICtBhziwdl3NKiKUPgRKKgQZHInIIXjr477rd33WFvhEUSjwOiqet2HMdxGC1SjEzkq2VjoUbf13UtkGKMlogcsYE+hiScIleYGDD6gII6iobQkMWUJAZOjIJmiGxNYhA/ekIchr5ta5F0dn5SO5QUwBAo9k5ZQOJjYgyTrRaQSfp6KpATgCbaiEiIkVOMUbc3p2SNCTHGmMAigBC5drkaORqDwHGC+qKQFnNDkigkBEApidssgjA6CxO1a1aNZkEEi4RAkLuS+CGgrfCewq2TqybSHrLsXCckwMzmAxVPxFkXDLJMU8H8/0nzgPXbIMtq5a4TnL0E1trgh+V6fTwe9vvtcrls65qInBsQFb5NAEhTWqBybg8iDMkV55QSWSKiEONysfj000//7m9/vraLuq5FIKW4vdsfL45Nay2i8PsRx4dhy3vPzP9ZtohkvrH+HSdtyEn8uXxEWzUQp6KIy7NdUUexuupwOCjWZ3LzEs1knzUQ0XusFHcL+FN4iZoiKwTEuUZYpKNKbqTDg9PUF+9L3K2f0lwqxbTb7aIPi/PzCWm3FhG1Wl8qQCU6izGqSIJkzMN7j3Qv11XqnYjY9/3+bltbZ+oa7qUhAAhF7nF+k1kCmqnf3t4OY7i6utLiQkFH53m/LniMUV0RAKA1FGW/3/d93w9e82+YsdAlAwwZWZkeIaTr6+v/+B//o86A0u5YXclxHFeb9eXl5XF/UGWDCdT1aAAVJNY65fPnz1eLpfe+rhclrqfM5C/VgYxYYOE0hBQ1q5jHvwawkPsCp3AM1k4SuwDQdR1kH6mAMM5YCAXwN1lFVQc86OWYAzD7/T7GCCKr1arsap3EELJe1WKxgHvGXL3ZbDQTUt1pbfw9Ozuz1j569Gi73VZVdXt7ezwe9VXN2pVTreT/Q98R0c3NjeqeIiKhFZHI6ebmpu9r3T+afOeK/jR+EQDevXt3fn6+22+/++bV+flpSmkcus1mdXZ2drJeDsNw3trr6+sffvhBiyzA99rXqqFtjNvv91pE67qu85BS4pRABCdrCMwcszVDRDIoMIG0VSMkQICAIrmlkBA5MaIAGkQkFAJkjimCCM1Df8nCAJAZJ/pQUrEG0DCnXzFzlmvwabIbBSMRkcBqLgwiCiHwNGgQAYZh6A6H3W532G2Jk7XWEDhnRCoiEEmSEksMPqmTQWNCCAKTilFidpXBpD3HYhBB5/6hECIwylS9ToiTiShcOY0gMCMlkDFRflhYfc/7zs0vzsrD89WQLMkAOeuVov8sgkjOkTEG4N5q5W7Z4p4m06G2Tos4Zho3IEAfOaQPH8UEzY5ZmH+jH8FcSQWWD95wn2GXR0mLP3TApEkwZCB6djRT6IBTVbWklQigQl5msVhdXFxUzux2d3c3V865/jhq9qYron1dRHb61Lx8jaKFrmK7RaSqqk8//fTXv/o2hLGurHM2pbjf74/HY1Wvx5EtZaMPH3GrH1up37gnjKEQJoGFUuEoiO7cpw6Db5oasjWciriJvUwykJQbc0vEJLOGXcido7p9FXM2uesXZzCUgpnF+G42m7mXWq1WJcMumIa+VFpgU9KMOY1jn1KI0e/3+/1+P6kBh6C6IurFjTGKiqcUvIfa6Y0aMPdITOl1ZmQE73f7u83JSv0E0dReh4hETjiKCFlntAjqJMa4Wm2Wy/V2+0PXdZJ4vT7Rry0eSJu11E6NKQ19f3FxIQBd14lICKEfBuVJpZR8ZH0zM6uZmJG5JpIdEWk5cxxH/XL13FPl9YdpzTXsULTWh6G2zjnjnPvkk09+9KMf1bXTuQKcqSt6HRXSUEKcIm+chVmmmDdMhLsStVTGusrpHOLyzrOzcyX9UW7/VZhEL64xpsy4LI5cffzkunJsx1kZn3Ot2hozzSAyBhGLGgYAHI/H1WqlI0/01bqu3717p3MP9/u9Ntjc3t6q2OezZ89ub2//4A/+4PLykoj04ynrKKn+iSodhuAVQB7HcRg71ZcG4N1ud3Jy0jTN69evv/7m1y9fvnzx7NnFxcXPfvaz5XJ5PHSXl5ftomHmk5OT3W737MlTkLS9ueUQx/4IAIe3++3uMI6jTIrlY9M0WkMJIZycrFNKu2OHLDHGQ9cPcVIqIjACwopRitLJ0Y8jxwQsjavWm9WyaQEGAjQGUYRjUEZCYhCBwIkTQ2JtpKjIuYmfL4bAEJAwc1ROkCa+ADCJUchUUi07p/gbLg02U5svyazJDbI/SyCIaMgRADMfdvvDftvvd34YUko+xFqEXGXt1DIQQvBlzCJzZB7GDhGV8WCsnQinZIAlCaAICguLACGhCBPZpCcPLIQAGEJCU4lM7ey67bWEREQiSU2vPKywwMwrF1OsxfCSMOgf6iYg92cWVz1teGbK1l7vBQFQlVDQuEezUpHEaV7SXiwWWW5TiIx8kJd/6BTKM+XX33tpfmqQidDqGefuVluNMbdEl2udreXHWNCzByHwewN9Z4vI2n01tUawOOdS9IvVer1e6x0bhrFdLprFsusOFEgEVeQEc9fze/QonMh1hiVpmMzMy+Xy888//4ef/bJyCxEhA97716/fnpyuY4ymumfD68aef+F84X77Q2ZKNPhwBlEBitWJ6rZT46K2rK7rFKaRghr8vrfzEO+tcMzzBPUP5URQntwwv+SYBR21njEMg9Zi9SALHVcdTFGchhwWAMA4jqoJoA5Smc8x+vK7AGCdUylEzck0L18uW+UsaBRZ1GLLT2CWyO6P3X67a6raLVb5QuQrQlY4EpG1lYgwiKu4qqqLi4vtdrvbHaq6UWqPplO65toRBDmmLnqZ+mTX930/pvvHvf5XSlyqsDTN6J1QiqnrXoehzmIpLRMo7A9ayjVgHRmsY4zONS9evHj27Bkijv2Q2Sv3WkI4U2KJceo+wiy8WpAGNyOLppR8GFiiMXaxWIzjOHq/Xq8BQDUFFWLRrmLdcnE2fCnNpgsrdKHvwawXXSIwXUYlChwOh9Vq1bat0qZKaFLyAxUzGYah6zotOWt5IuWBmABwOByePn1aVdXhcGia5uzsVKVU3r59q5fJ54f+il5ZZW+1bdv1/fn5+fX1tZKzNifrz7/47PXr13/2Z3/2ox/96MsffbVcrbd3O508r5Hr2A8/vP7u+ZPHyOl42LVN5f0I/dEQIKKxdHd3h4iUby5tRzl2ndrx4/EYYiCoJWcLggQiavy9HwVBFR4MgTVYW2MNWkRnbFVVliDrP2BMaRyDNY7tBDpaY5xF1XuujJ3DMFotFSkmDvSPwCnGqE1xORG6rwEzAjHHCIhSLiIRqYaiZPEiIQRm74dhGPb7/f5uJ8ELpxR8CuKtJzuR7WHy90F3aEpBoNVe+cPhQMaoHKkhQuGJ55aSpBQZkOyE1KoaByAJaKN0qozSOzCbLGOxAIFqnHQW73te7T0HXAxIfpIBJKXJxhafp3VlUcma2SxCY6YDoFkXzMQXASjdgJqHuLomoikO+k0tM4LveboPfDDOvOz98/mZDE3f56UPvFsCEZ4FW3lN/lEIevLB5ZimXqu81lLGE5ERTgKk0ler9cknLz7/5S/+oW2WTdN0XYdkcOLyCTMjlFXO1GjQ3FGrtsjMxrqUhIhevnz561+9CnEEIWvtOPavXr365MWT09MNgwAIIX7Yb1ZY3B9f8dlbIUs2auRlcCrTFk+m94n6FcW7ikAg8PQn5QfmUZow00KSGWuOs2xk4b9A7grgLIlOeTiu5NECeoQKBWvgWQ54jhiXW1cFIpbr1Xa7PT89U7KMhsY6R0d/zoJTP9G2rXJ8tECbUrJkyACzykXV+isxD05QHzMO/dXV5cRAsTYrDxTiNwoAGWOhVnb/OFTtav38+YuUvhvHMaR4dnb27bffIpIm3zFGdSSQgX21p0TkQ+j7UWU+mTnyFC5MNwcBMBblVNKmo3zhFAeGPJ8KihABsoAhA4Bc15XVc2jcctnqYC7n3Gq1CuNQ9DrUExdGVeEkxzyXMGWTMQUEufOtqipkSTwdwH6/b9u2seZ4PA7DqJ05egzM6Xicahlu0u6GAgMSESLEGESkbRd932MeqasJsapZaQvQkydPdAawEvROT09Lm5xyjDGPWNc9r/98+vTpf/2v//Xzzz8vJQkiur6+VuBht9vd3Ym6apVc1m387t27Fy9eaMB3PB51Zy4Wi8Visd5sjsfjycnJ7e1tCOHq6mq3233xxRdffPb5n/3Zn3/z6tt/8S/+xcuXLwHg7bs3X3755c9/9veffvrp7u5GRAildtXQd0QEICGE7rDv+56DT2JEkMwk4RRC8F5Skq4f+8Ejqi8AhIlelRiSxCjCAmoQCcRZVztjDSLEReUqa5qmsoYgaYYqPrKzlmVi1amTqCpbV44sGnvf/ldNd2sAoLmQBWMmMc0GxZc6RQJRr1a2K+QkL0zj1CYPFySFcdzv9/3xcNgfjocDxIAgKXhthWyayk56t0EkqUWaInWZPJOIVHVNpkZE4MjMkiKrHYsJBFmCjrlBRgDQHivNQUUm6XulQRVTlpUj1di+32I7977FUsF9fiIyw58xd/HpqzGlGALlQBC15T3LfMJksYmIUggKwinJlCx578/Pz7MhvS8TzC3/dEj5mEXedySTe0ICfHAKc89SztHA+4oI+ig7p3xqcg2/xTPdL2ImRau/fNjcdV/qYwREY7U/Fc365HRzcj4cdovl+nA4iKRh6FOMSOjIBF+avWB+ZFP4hkBEJADGgPBy2b58+eIf/uFXp6enzOycG6359a+++ek/+0NL+cAQROShiMnkg+UDSOQj5zhfXELFb/VTmpiq2ZWsCGNtO449sEB2qDyTCFZvavIAYMgaGphHdBV6YUm7JQPXugjqXEuMrNBoociq+F8prGq2Wr5Z7zHNJEqWXFQqldJlDBrjgCZasjGo6GZV6Vh6l1KKPrBEANhsNsWrFRR0wlRJuuOx744xxrpmIs0GMIEAEqckZIxhETHkwE0M3vXpycl+f3t7OwzDsl1opylk/LaqqtxuQTp8Asn6kPb7ozYgdeNARFrMSymFkFRmz1YmhTQPlfLdPnHLNbNUr4mIbduyxPv8PkQyVu/Ek5MT59z5+fnJ6Sb6cHNzZ7Mom65DYcPpTaFfYvJIjxLaa3miGFyC+9GEGmckTuv1+njsdF7N8XjILhZVuFtEFEDmmb5B+TkNQVSjVMMg/VFE1M4iyMlrlx+6x1TMeSKvMqvzBgBrrU4GfPHihW5gTZu0+6Cqqru7O+bJPprcl6UI+ZMnT0oRerfbrtcbPQVFGhSvbtsWUPb7/TiOf/3Xf/2jL3/0//hf/9fvvvvuz//8z3/v937vs88+u7q+FJGnT5/ut7c//vFXu+12tVgedts4DtaY7fXNOPphDPvtvq7r4zAS2cSMiCEJpSiM4+iP/ShAIIR5MyBZFoycIqcYuMRJhOAM1c5WjhpXLRs0xjSODKqsLwCQtclVhhOExOVWrSpbO8cWUEBSSCmUUFjDaJj19SYQ/WTKQWGJqhOIjv0p9Sk93mKOmDlOYwgYwYx9v91ury/fpRiTH0HEgAhLDEldqNaSdBcBC5J47wERgA/bO0RcnWysMWGMKYqhaQqBJPWFQATCwEpCloQMjBPpR8dJ5SMkbQDR/TMlQJND4glyeNh3VJ7BrBGtS4K5TUAyDKnRTMk9EjPgpPVmjHGuss4ZY4hs0riAYO7Y5lehaRrMCsrzAvADRyD4oXOdO6bpX7MnP3DA+TvzLxQLICKcO51EhEgXSX6LA74vA+d/gio/684AAER5b4CiiBiyCSKiqZuF9x4Znj5/cfk9pyQ3Nzcioj6gqCK/54AFytlycScqt2KJvvjii++/fx3z7PQTc3p5fXN5dfPs8Wm5bCJSKt55aWQejv0m7zudZ57Lq2Gz/lMhUMrjZtX9TArDifV09JkJEtW0+GHaHbMqgkIixXCXKK9cVMyj3VXjXkFj/d3CgFAQby7jUDxK+VrNj5UgU8Y3abVGfflud2gWrQLmzhljJqnk+y3IYq2tap2jN43SK/Mkyha0SDHGMPTBj7JYGCIBAFNmbAkAsHUIYkQQcblYxZCE4eT8zFp7c3Nz6I5nZ2c8tWZN4oVFL0WZPupvuq47HA7dOCDi0PuCpqapCMJ6vfUuhSzPKZIQTYE0ShFOoVE/dIMPRLReTVNsa1V5MEbroyigjk3tgMtqA+oLtcpu8ozIEEKRtkh5ZGSJXUhA97N+dhxG5xwYYua2bWJMTdMcDvvj8ajoMc6I0ylLfBQ0RXeaTrCJIWgKq5ZL/9ATbJtGkeq+71VZWk2SVnlpmqweiUgh8fLry+VyuVy8fv1Ge7E+//xzjX4WizbGoBtewwgAUDGs58+fFyAaADRNDyEsFktrrQ+jBlhN06pjXiwW/bH/5S9/+fu///tnZ2f/4T/8h5efffpHf/RH//W//NWXX35uCYa+/+zTF91hTxKvrq6O/VBydCLQ9DpFgcQ+Dygb2R/7LiSuqipFtqAlCsuAKSYfg4/MIMPgAdhZWzvT1nVVO4NCKJU1iACcEiTtahdEQiQBIXFkjKn0QoAkQEY0MUbvvSQGZIkpwiSVYwC1YUTFN9Q0lMlgc1uh6S/NOjhwmrY7Uee03VEdcHc8brfb47HnFJIPwtERogBL0lBARCrrXNNUlSNzL4xKRHVVIREy6vVq21ZytKfFS2dMEkh47zpUvDwqXZ+suoaUEiEgio5+YmZD77mD9F5/zdyrlSeZWbX9U56jyrnpqMSsIveYPCIiqrSTUR+uU+cllwuZo1YGAUBljjT8h8nJPnCf04EBlURoHiU8PB0pDvg3nQtM067eP+UEIjlDE0mQZ0ROVgg+/njPBwPCVIRQeSyShy9nFjGAHb1HEGOcCK7WZ/7sGHQ1ARQGJEI/jPBeyo+TTnmSaKzhIU+pjIlZjKlOTzZffvHZ3/z3vzs5OWGGlFJdNTc3d48fnWKagGsBUSXPEijALDaZL/qHi6tySxNvhUBr+MoIFRGbhcQ05B/Hceh6zYnV1UlGY3AaTG3nm0bTX80VitvgPB3BZkVWyQU/Ilqv15pVaAcnZ4hbIdlyPCKiB5CyckiJ+/S/iUPwQ5CpdB05GWeBDJqk+GEI4/n5uea41lpdLudcCjHEsV3UdT1diKL1EePkJoloDKPePLrvFQBW6Kzsdd0YAsjM1Xqt+VDi6MiIyLt373CBFxcXb9++DaGvqjqEkKLEGMmaoh+itUy1ZcJT3TfGyKWEoXkDCE0zgrzI5K5UvNcZgwWOy9Cc5mR1Xavecl3XL168UJvS1HXlLAAobZiIhmFomikB1ek3fd+JcKFwcyal67YxeZCtiEhMHrzm9wpIhDixl4/HY13Xx2OHiGdnZyoRtd/veQZ4lr1a8O1yh3vvEzPR1K6mj/1+r5vTGhNj3O12t7e32tut37nZbPQ9IhOYvNlsvvnmG/3apql/+OH7i4sLa+3hcCiV+zdvXispAQC0qKwMF3XGfd+rgJfubc22JY+P1G1mG6uM65TSkydPtne7GOPf/M3ffPXVV//23/7bv/jL//vx48c//vFX//W//Jc//uOfhnH0w3B2cnp3dcnB+6Enoq7rdR1Wq1U3euesj52uvHGWxyGEBCrUQGSNJVsBYYrsYwohRS5SdEKknUJRkkspYSVRm2EECDABWNKSWU5GhY0x1pIxBsSIMAMWXJcAE4eQRA1dQjRgokxlv2mA7UxTgu8pOVBsBQCUacEppcGHNOlTMydIyR+Px9vb293dVhJLCs5YbCtk8cOYUlo4A4ySWKNIi2SNFeskJu89WVPXNSdhBeRYQAiztgZIEiRteGEOCIxowBhgTgwibEya/AKKUtU2m01du3HsER1o2fMhwIso73likntrrA5YbWOp6cBMkUbvUCJSmi8iTt5UUEEryHNEFAFSBXtOCQA0iHzPncnDNBfhnkJUEoa5f33Pjzz0LFJO9p4FPXM3iMiooiAP6r5ze/VbIOgPfPD0w7kY/N67c0tGyQZYgAVW6xPv/dPnL9788B2S1dKkMUYY0mymmw4JBgDlFuUEgojAioxxJLSPHl1U1TTOr2kaBnl3efXs6cX5+WngRJKnyaZ7/4S5HvnBWcz/mBZEPRBkcSjNXTRt0mi98Kc4dxoo4leAR90KahyVeVt4ufpzWopT6Xnn3H6/n1OfirXVbFWf1xG2BcQLIajIH+Zh2qUBSdMvVeTQg08p+eCJqG1a770Sntt2qftguVz2fX84dJqOKIsVgE5PT2P0eo7jOFpL0QctA9vKFXhHWUJo7DiO3eFYGUoxxBiruo06hCfDGDF6EkBHIggRT09PD8c9cLJIACySDrt90zQnJyfqy6uqOh67lEPjw+FQtU3vRyGUKMJYeqwpz7rALCWhYycUEwaA2plhCGoXiIBQGMUSMGLfDW3bbjarcRxFUtuuN5tNXdfLxUI1WFKaGKQ6kff0dMJaFJ9gTsz3dQSadWdNehQhAIDydYmIrE2cdHiGunznXIqqoKvKG6hXQc+6FFa1+cdaq9VWEQlZRBoRm6Y9HA4mT7BWg6WUPQ0RttttjPFwOJycnHAGsYdhUDaynp2Wh+/u7oZhOD8/0/3TNM1qtRKBy8vLq6srY2i5XLRtu1gs9Evu7u60xfzi4kLvAj2v/X6vHC4tmuhl1YXa7/fH/eHdm7c6r+XRo0fm/Hy73b548eLb71598sknf/qnf/qXf/mXnzx7+tOf/uF//k9/+eOvvrwmub56yzEddnsQ8XEMKfb9iGhEg4BuiDHu9we9fK9fv0UiABQkLZwZY3TcOlBgAA2kUkpNXTXOgSSCydYDwBiiJTRgCEQk+ShE1pALHFgAyDCIIBARoKQk1li9SZVgr1ar8Awi+6T2p6S2M/BvZosnkh3OKmV6o8UwQbJ+HFOUEIIqoozjaJBEkMOE1kJiMjAMHjEsl23fjwbRuQXHVLvKkglTNQSSJBU86LpheVLFMWgUG1m0gCuErErEyEAEQgAJgJjBuYnr4Ed/dnZCRg1d5ojNfEzhAzNrM8hUUFMdlbKNtYxSwmJDqAIakqk5+ocaUufqyrlMITUJBDPIRMAhjDFGbQSKnJ48eUJEISX1jsYaziL8IpNI6NyhftRHzgIjLGXK+acwV2b1JcoENAXlJauQZqhyijAy8vEbM+DJq8L7ie69r3rQQfTwZZiR01y7bBf9YrVqlouUQoqeowECmi3u/IOzibDZMSJYJEE8Ozt7+uzJD9+/Xq1WSbhtlyLdd999t1wum6aBxLknpFL663srm6M8AmR4GN3MVwSyXLMCmIZcSql0cUwdfokz8Y+UC1O4AwVH0ncOwyAi8z4Q3W2IqCKUzjll9+jxqE0sfUEKDyq2JvcgDD7ovWOO0/yipGq6i8Wibdvr62sAUA2scRxt5Q6Hw2ZzOoVmzNfX10qftrbiPLcuJdlut4jS1k27qJmnozWWWMdlO2uMqBQrEQ1DbwBH33ddt1hvNPVhBGOcblIisNYScIwxjL5y1hmqjO1RiKCp6kVb+7FXT//o0aPXr1/f3NwITB3SYwxjDOGYFFxNcRrhVzjkAMAhACtB3RBZThOl2eSRVs4ZFGGB4u1SSrUzbe1A0nq1Wi6XT58+Zeb1em2tWSzamJWcOYs1quNMSccqzLojchNFwRU1gBvHMca4bFr10CGOumKci38ioli3BnlF+KlpGlVL1m1TUkzIMMncNili7DMdT79TU3bdS+M4Xl5e/uhHP9LNoM1FiqPorivV8RjjZrNer9c//PCDOtr9fs8sT5483mzW33zzjc4Jvrq60ux5tVo9f/785ubm5z//+bNnz5Quvt1uz8/PiWi73epB3t3dqRypzktfr9dt22632++///7nP//5F199eXFxcTgczs/P7+7urLV/8if//Ifvvv/ss8+cpW+/+Zq9XzTV7u7WjyOiMMhuezDGubryY9BlUQK5llSQyOC9nuJEWWAZtfALyfuo1Rm9ag5JB2eklPrRA9LoGQkMkiVl23FMPTMLEmO0nFQ7iYgYFJEoqLIACmOC3OQmOJlgKAlTvnYzu/eg6AgA6hw4F5s1rgKhGMfD4XA89MEnAPI+eu8JJjiKY2LmBiauCTPoDInKOv0JSyayIDKRTSlJSonh8voGEStXu6ZGNIlZRKlXyrBFEEIDBikyA2p70uTDtIm81K0gO+APXVp5mIfyxpq35BNMCBBj0g0JWROQiNRLUJ7wONk9IGUWExEgGzKagWAu8UzrrEeLoLRwmlS0AOD9Zcd7SjbM//jo42H6iwVnVRQW8lqUSCsnhAL3Pht+ewasj4/oZH34KIeeIzgDwADaus5Vu1iv16vlxvfDgEcR4SiO7qHwh6dK+fD1DHSoF6QUm6Z6+eLTN2/eDH60ecbq7d3usO+Wi3UUn5IYMwl9vA9xzxYOBCEPSSzXoGx30JBKQU7MfaViCsaoG13PuniC4gsLNEpZZbqg1pq/zk22opH6B89UrkpxVy+epst68IfDQRPfUhhWP1SCht1uh1nlQxnjddWqUIuaHi2pLhfr73/4thS5Y+SceTcxRknRGjQe9YobS8aYBImZaZLNswXqsLYax/Hu7u7k/MK6CGhM5Zhj0b9GSQBgEK01yFrRScMwHPZbPc6maVI6ej8YcovFou+Hw7FHRO9jlAgAEwzg74kIMmOV4yy9IEAlZAEwkWnbtrLTiEYlEyn1mrIA/Xq9zl1AXV3XOudDiwXqUGWSUuEYU4xJ0WubNc7KBqZMsFTk32aRZ8kc+JRiidKU/XQ4HIzxNS41fhKR69ubtm6U/AUAx+NRT03TWfjAUoiItVNbtk53ds5peU/RF91FmiIoKU/jOW1sK2URDd2Ox4NiM/v9XudkxOkRlsul7vCTk5NhGK6vrzebjSbWIvIHf/AHKaU3b970ff/JJ58on2u5XFaVHYZhvz/o1jJ5auH5+aOvvvrKWvvmzZvdbvfLX/7yyZMn2tSuv/748eP99vaTTz6RFG8vLzXhQxRI3IcwBt8YyyH4FLtuGH0EQ4nDarUqAKYulCGHZhqkHaOPkkTE+4FZqspWxjprJIYYGTE6axigG0VSBABrsDLinADQOPqQpjoiUdDuJGtV7QkTiPNpHMMwDFVdM07gBCIia2stwoREzigveilJlB4wN1YiUJosQgTvYwpRO8q2N9vtdtt3HSfRtBhAnGNjDIfIzALROQAIAigiQ/DLti6FDwAt0SAzhBhSSl0K1loGDCxI5JwDY9M0FeYjRj7GaCyFEBFFsZDJWKm9/QBwnp3sg6pwoWJoMEqIMpc3nmkw6N0HAMY4Y60xjogETQaFWUAgMVhKk1q1iTE2i6WaYlENEwQAEO2ZmSe/v4GWO/NlxSu/9897nHnuxRIKyEMG1qzWUODY4nf+6UpY+qDCxiqUp/nh5j+wmEU0brFYnF08HschxDGEMaXEMclDZLwcbrnYIiIMUxcBijEGSJ4+ffz555///Be/3Gw2jGAqB1K9vbw6OTmzlkTYGDcOnVahAN7fQ1P1YHaoSgGYHUAiIk4gE2wCMevsAAAAS+KUe+pposJbPVg9ZU1ACxqpLaea4uiYOcWW1QvGmTiwXqrcdWBEpOs6zdgUSwQAdZ/MXAScC70LMmtMf3G322kCpPGYPgMAx+PRmgqEvEQQenTxpKotIu73ncL7iKaqKpfFjY1BEE6CVHj/LELTHaK2u2msH2Pf9+M4WleRcWZWATII0acxeVV29YMPIWy3N7c3V9vtrZ71OI4AE4CvQ5Oub24QzTB49foK7A/DqIm+iABQjCyStKUKs/iUc7X3AwBU1iaAse8AoGmazWqxWq18ilVtrbXDMCiA2TaNAoCKympEpcXUYRi03olIysmKWYLjw8BRMg0EcxMaqTmTzAWL05bQ0ym59eCjiJyfn5M1N3e3mr8+fvyYmfUwNC9XTUrNocvP6TeX3aW7SA+73FB1XeuRFy84YQkpaYatn72+vloul1dXV8+fP9dpgFpz0a5fTZHVMatMh2qSvHv3rsDOb9++ffnypY64IKK+PwKAxg1VVWmUo4Lb33///XK5/uKLL87Pzz/55JP98aDp8unpRnfR3famtk6LNcft1g/dOAwcPAAMgwcgIOt9AKDRx77vGYSI2rYeR9W0mWzl1JadZ46LiAoMaEeBxlJpaqgjAWRA7yOzEAJFHmBSmQhRYowM6tdDSgIAdWUBAFkYwXl/PB5dXS1EyFmbBe9oagZWeqAI6LRrlqKICJNvmMeRMsl1QdKWqekeCV03DMMQQkyRtZah1CAfIuaR9SiAhIIJQ0qp73rQi75s66qq9BSsrRJzjNNoc2bwPhKxtvoQkgATWRQERFYaNCcQVNhTt/fpZu2cE0jamASIgDouk2HWijO/R8okipJs6IMQ0SAzabJRbo2yLCYLPpcMWGZJtp64bmnJyLY2iajNR8Qi5Dh3MbPVnnck37/6T3nMTQF+cFTlgQ8f5W3/ow4YlFsPAIisFbwPD1eyoqlotkd2tVqdnp4d9tvjbjv0Joye5P6gkUAHFhcdGQBAZXrdRyISoq8b99lnn37/+odhGGzdEFpBc3V5fXZ6/uji3BIwA0ie1SzAHyzjdKgZiIbsiYmIOepCGWtKNKD3cIwRAay9j9HKKRtjACaDq752uVwWPipq9yeiWiI1yiUgUuaqFqUw6wmUXEqRZ7V9+j2qNVh2m8m9cSKi9drSKOy9196eMu9BmVM6fW8Ivq7rdrVcrFcqva1nNAxdjNGSodxN6JyTrEWr5h6BBKdbSN0Y57G4mjNZDuJR5S9QMIWQOAAniSmE4bA/3t3dXV5ebm+ufRj0V/w4GmPIOGNM5dzJ2cnrN28Oh6641bquCS20VFVV13VFbCjG2DRVWXkAWC6XXTe16TtjjTG1M4vF4uTkRMURRQStUWLzer1G5q7rTtcb3mzUmUGOujR0zas3TUeoqrnmwP0NXLLhgn9MliImZjYWBWxKSUmbGnhpMmpczcyXl5frk83Z2VlbN8YY9WHaI0SZ4l7yqkJEpyzr2PW9FvJV2Fkyd4GIJNd91UOr4gpneou6Un1n0zTb7VZDQ6WAIaLCjACgeh0ppbOzMwBQTvXZ2ZmIfPfdd13XKbqgua+IaBiEiNfX14rGr1ar8/Pzr776KsZ4OHQ3NzcppfV6KQjr9RKzbILm8ctFO/R8eTzWdb1NaRxHizAMQwJkoJQ4CWt9FADGcVwul8653e4AkkTyZEYDgpBiCtrsB8l7LwB1U1ljJkrPJNZWMcCh9yyGGREYJUmKVeC8E5REyAgwYrLjpJljBRkhhHQ89sZWSLYCNMYxgwEWxEkMGqYeGJkGEj6wSIJQBrbONxUzA4MkTklU12gcJ9EYnXmnKURKPPl4VEEroSQYIqEgsBBWlpJwnRgAhNE5Dpx0zAwz9/0IAIvFwk4SQyNaJxwBDYBB5shqGoGIyBiWREQ6BgaRNGkRvucJF2+gKY1qY8HMVxXLLxmb1UeJTZVYXp601slMSVDovktCREiAcpslsKjMftFL0C5jzhIZH3rWcjwf9xS/4YEfKGTlJz/SHDz3u+W/k3/5Lb/x0YdgHlYtBJAm7UEQ0GHGXEKMyXIRGQBp2+VmsznZnPWHffBD9CH58NtOVXLUktHklOnp6/X65cuXf/ezf2its8YoNeD6+nq9Wrm2neDZKA9rDR9hQb8XrWCefUJEtavVPJXklaZ6ktUdM5m22eBen1Hx4nphtsP0t1QhSO+r0iejtcZyIUsVWd9T17XWtzjPdnWzgXSSk2ZNdAqnWuf9aXiLuRaiKV27XJKzEPzl5dVyuTDGcEwxxexpzHK5TMF7711lmKO1BMKaCOb9JIk5xolwp0iZkrNUUoqZgWOMSjHlmLykCImPh9319fVhe9hubw+HQ4ijiPDk3cV7P4Yjka0cPX36FIF+9rOfd12HIZBA4yptJVIuBpFVJ6cPzntDo5m2bStLzFxZt1g2i7qx1m5Wa2MppRhCAIFVu3BNzcyQ++IcmejDYtnoJkhZY08yNT3OGrvL5sGZJnOcFDFxHqpHiSmlaY67gMA0Vuvm5kZ9xrEfNd1Ugl4RxNAsSuO23W6nImhlJ8z27dRVrEwCk3VJNXABgHEYttvtl19+WTQ+NapLWesgpRRjiDEqg+/q6qp4a8jVSnX5L1++7PteSdHK84q5vU0/2/f9zc3NZrM5Pd2cnZ2pMb24uDAWmbnvRp0RWdf12dlZXbdEpCC2j0HDgnEcz85OPn3x/HDYEcpytUjWftP3dV1DiiGEu25cLBaREyeIkb33vR9FRDvZ+v5YDNx9DjApLoOeqUZmjgwhYq62RgZ9p5DlBBwjCFtEFg5xavBDnMiuKs2BGI0xaEhYYoxd16E1VVNreAcAhFTEnhGV0/S+nbs3x7MmmeJ9U0oi1vs4jiOHGEIchsEPo/qslBLzZBg1jRRmNtYzo4BgJBCDgqMXcQwxyWQK0HtEk1ICwuijprDj2CNK0y6NMTF4IQOQAI0AAbPW7RBAA26lCMQYXWUm1s5seLw2/pVz0dOeKzRwJkOllHjaqNP54qz1jt6Tec8BaEkCdS6qEpB1K+qbF4uFyQqGuaI7lbPx/R4ZmQcH8yvy/qX6DY8PPpiLpyAfrdiW9+tP/w87YAQzrfVM0GR6Scvw+dCJcns3J+GkVmaz2Qz9sT92HOLD457+y+kBsF4e1hlKkkAqsp9//vmbd5ddN6BFImtM3G73h8Nh1bbMrDMrLOUkunRYA+CDlX0/6rmPSsqoCmtTSi7Xm40xRSxNV7CQYjjradg8MclmrfCCWyqhpvTvzjNdyFJckqWmNK0sOxKyvhpnwvN7sIwmLgpW63D1lAfF13WtAlIpTnu6aVqiqSAXRw8ArrJ6gZqm6VKRYxVmTjEUi6wuk5kFJznDpHJUzqpFbhchhGARx3E0OphMUhzG3fb29evv3715o1QYMtA0TUoqFjYll5YopHS1vWoWy88+++xw6P72b/92bkmdMcLcuAYRVaIPSSrlmQMYxEobujhYu6zruqlqg1LX9bJd1E0VY7TG2KysqQYXAXQsVYyRzJTs+r6vmkZ3RYxxt9srCc5aq5KT8NDEq7mkzL8rXrAYmhAC8P19jrMJaIUSpcoYF2fnu91Or6ky+LhIScyGTMwtFCIqLFzIAbrlNCvl3K8c8xgW3Y0mi1lqYKFPqoaGioNqSq156pMnT7QfSSM5ZXhpGKFK97pjNWRZrVZZLU6MMV1/ePvD1cXFxXLVXlz8rmIYNzc3b99eAsDz50+fPHnSLFpNekTk7u7ueDz++EdfvtrerVart3c/6G7cbQ9d13Wd32xOlIvn0zDGoAZX3f/xeMxrMt1cOqyFIVPYEOrKEYGxSICYYU+KMQn7GI2rJWEIgsJgDabkJZbeayJw1sYkMHqJxjrCqmZmNFESo3WL9WhDNZE8lAbLDCKsV5w+kgO85w8KOKFmBIHKM9lbUCb8SSr7gRAREogBM8Zgsw0UwjF4IEzCDKLtJ8wBIIFQSgyYyhZFBeoEmRmR7xV2AAEAAElEQVRFEqBI1PkTBkiYQSSyENFms0HExMGB0WM2mHnAD1ncAADAuXfmfuuWu2bukDirN/OsK1dVGTT4JnrgsBim6UMxS15rKjKXxpyWPYnIJNZUFlwP470766NOoRw8fpD45q968KmHZ4c5gLtHucsV/x+HoAHeY0cXvzV9qcDMB1NVVcH7JMnZerlcxtPTcej6Y+f7YX5AMKXLFNN9zj4HigEgpoDGIuJqtXr06NHPf/5LJOsMpSR9f9xutxcX5wbB+1hVVpf3vUBGr8FHH5LrcyUg0mdWq5VkgfuqqogmMcJCsyqnqWGv5i6ais13Es40sABAR9Drjao9RSLinCvKi3oYWgVUCFoTDmWypDzoQ49Tf1pV6ZlZtX+nBGgci5zyyeasaZrd8YhZCNMYKyYNwyCQ9Ncpq1gYY2JUJhpUVTUMnbWVMUaQEdFYq/mKD4mZyVkWDCH4GKDrXEoxRj/0IuIIx+54c325vb0NwVuqOAWOkCQwc1Xbqq5xmnOc7naH/WH73XffxTAB7KULSwRVk6RyTbkHOM/50mOuqipxqK1bLBYEaB1VxjrnEsftdrter7XCqj44hGABl5tlEYRylen7Xkv4zpijnwSot9vto0ePnHMghO5jtD4AvQrqFG2etIGIpOK0fmrr0g3g6kq7jNTkOee6rtNrquIYCg7rcWr9W51r2XJ8L8AyTd3Q/TYMw2q1mjMMVEpTadX6Q/pVuje6rvPeP3786OTkRH/RGHN2dvbq1Svv/e/8zu/s93sR0Y5hFdLSm0JvAV0rXYeLi4u7u7vT01Mi+sUvfrHb7c7Ozl68eHF6enp+fv7u3bu3b99WrlEg+uXLl1XVpJT2h9ubm5sk/OjRo/Pz8xBGPbA3b96oZ339+rVzrj8Mh8Ph7u7u4sXnZO2xPxLZu7s7/d3FYmGt3e52fd9bWwEAwWRkuUj4oRJ5JmAfETFJDmEdGdf13eFwbJeqh8oIgsgpeGBumqnVxFpLmESAEzOijRYFGAQNpQi2csGnzFGAYkYUvyaiD5PgElrBNDHpXnFdnUrwvZKEvQ+KyoqI9179RUbLILEASEoySkgpgTM4KTZgYLYpJY5A2BiLBiFN2kwxRmNFmcO6LCpT6pwTQIgxCRAJGgfIwsDMYlCp7DkiKX207/EhCk3p48Y2ZR0SnFJeKBVcZZaU7zd5HknZ/PkWAEBQuEgr4rog2hqq25JZ26gmqDKB2JlOtbyfvD7AROcvffQU4EHw9D6Dujzm7K35t+nf/xMZcPaIeD8NEQrgAAA4JZr3vofImpqZTd2aekluWS/WJ4/g8u0bZraEWr+ckkhKAACCAIRWOGphn1AQwRowKUVCefn86d3N5d3dHbiGalzWi29evXr69Ol6vSKj8uWACIlZWBAJhADRGCfsP4xENK4EEGNc1w1KpCJHCQOHaK3VwrRx1HWDc864yfkRUYyTb2bRKOw+G1Z4Wcts3dCrur0gGVdVAmhsDDGlmGSCE0JiTEzGjt5rf7rqFfgYXF1t9ztbOUsmZdRU7bIiQpRJXn3f28qhIUGIMfqYtvvD/tARGc1sQEtGzKtFzRzIgKuMcyal6P1QW5Oir6oqjN6SQYE05XZmHMfVct2ulrvdAUUOxyORNXUzeC/dcPHkBBHjODZV1e/3fuxDCBy8QOqPh6Hr6roy5iQlNoE03THOkXGAtFmvY4zBSTOCsc2b12/RVMMwHPuxG7wyco2hkCSEBBSttQnEEJjK9OPYVLZpqpRCVVlCU9f1yWYFKiu9XMQYnKmatvVxrOtasAphJAPGSp05q0Tkw2Bl8p0K/HL0yA0Yevz4UT8Mo/er1coao2Pv9CqrzLJzbuyPrq3Bqn6vb5oGOEoKtq7HcXR1FWMUgKqpp3PPPtUZgcSOjBXsuq5ZLg6Hgxo4RfU1KiqFCcWQ1TG7qWjHkrUkFcglIuFECJxiXbUpxaYyzgBBEtGONS8iy+V6GHrESflP+4zVzbdtq9od2luvp7lcLsMwHg4HizSMncaFYRyaplk0Vdcdjsc9QfTeX717IyLXcVg0VkTWpyeff/5SRDTL96Fzzh27XgTbtiVj9vv99d21T75d1G1V19XSIVwfj2/fXu52O4N4t+/6hJtHz87t2g/DkuXY3X36aN2uGmsqMHbsjsd9Z6lOEYlIleeZBYAiM6KxYMZ+Z42zYityECOiMAeBFABDgMDgxUhKWkmxSBUYkIqFExsUiDEsDCZAw0DGjJIMM2uphSGluCbyQ2fxBFIEI2ytgOgcRJxucRKWWVIx4aeAEGMkAS2XxiTBx+CFGdMQhq7z3vthHIZhGIaQfBImSxwlaT1YpszaEQbW9DdFFkCSBAbNwUtTWYiIlivnhBgB0AAJ+JCQLACMPsao3ZUkgoQWkHS8vR9HIFwsFn4MCMNmsVxUmvjqzxKiUUAKp7OS7D4tswr6YgIGYcwDCdRyphiIQERCHJUokOKIACAJEIypiAwasq4mU5GrBAyQ+hrUONVLlzjF6AEghFEEz5bnZDOBgywiSmJCEGEDIHCPac8j6fdy6/IQgYmenf1qyZUpd76ommZ2JRocKF6gFdj54Pn3+27+5zLgcnTKUb6Xg84HLQ/PjViStXa5XBJwfzzeXL1Tllrf94Kk+SIAxBiRFL4AzfXzF6tKBmmJ2xCdnK6fPHlydXXVLivtrzDGvH37tm2b2hldJ8pAHyKKTGHXbxkHibPGwTgJQRuCadw6AJSGXUWHiMh73zQT4hRiFJG6VunRKdPVtAZm6qb6Ks0ajYo2J+TxdgDAnEqzpvbyppSWy2X0YYYGM029RvcDWZumsZVT7YiyXYio67rtdnt6eqovtW2rUw0g91NpKqm45Wq1OhwO5+enXdeRwXEctfWzaZqUYVXnXNcN6CaZsNvb27Ozs6qqxmHw3ofgmTmGIKAJgdA0lIlcnl6nMIDmbVVVcQwqhXh7e7vcnOi5FHflnFMemWKz1t5rWunxT3/DNCdqtVqVoFiX12WN3Iw0+Bjj0A9KEs4lNwkh1Dkpt9YGTpJbHkVElbl0q6SU9NgUtuUsXJUL1VRwXcy1BgVIlB9XUjH9IRFp27YfBskzkfRJzr1tZXOW7VrqrwCgmPAwDGQsEQVmxZB11yk4fDweUURR6JTSfr9/9OjR119/fX5+ru88PT29vLx89uyZ3lB6XbS5XLeuwvW6mBr5Tbw2RF2Btl2GEM7Pz8/PzxUWChxSStvt9tNPP/3iiy8UudEeVvXHifn8/NzWlTEmcRjHkQAPfdc0zW63SykN41jX9SeffDIMQwxehRsfPXq0XC+EOPjUDXG/3w/DwDzpyOadX0TLweeUaCJw3Hc/TDZKr4XS34wxBKiorA6z0u8JySAJI1iAXDodERGAjXlQlnrPDErpw8EHUCeyMN6jbswSY0yBY5hgZ218UOzheDxmvbn7PVA+q//NqhDvG7fCCWDmyjm1P3Gm/wO5CqZvbuoFWZNSZABjHIN478lQEjVHSUQgVy4+xG/nSY5+fTnUgqiHEDglRDKGECYxLJW4AgDCSSOz3LPGGMjTlwt2IAIKDYoIkdXq7/RDOgvwn/AoeeqHDyJKOaEtp4YPIesPU+fyfvngShQf/D+ZAX/s8KddNXfA5ciisCECEbKVMOMiPXnypD/uL9+9McZZq0O6YjE0CUSERXh+6JKvYUoJCQWgaZqnTx//6leVWsBx9FVVXV5ePnp00V6cfYTahqy+77ecB5HJzZoTTxXzFB21Gmrlyz4u9Vc1hcrqUDMEGkwgagl2ds8Q0aTXoUgL5hKv5K6nlCfZ6am1bQsAKuVagsfiyHXpVBFQj7YIY00koMjW2tPTUz2F3W5XVVXbtoiIJJqHEZHKZcQYow9NW9n8gKz2lVIiNCJye3urUV5TL47SE5pJgzkrY6iLijFmhQFU95mXyuo/tWNVsw1TO3LWVHV3HF68eLHfHb7//nsVsun7vnRhSW6BcM4Zg1lFxFbOEJFKL7VNpQNuc609NU3TVDUiWjf5MA34jHEA3DQWAFhi0dzGTOgoiHGMsXKOdRilNUVwivJ8w5QSy8RpKoOWlaUiualRr6nOJFZKsMZzkOUIFNhYVU5R3xJ86NG6rA5t8kSQwpm3eQYiaBHXVXrMx+NRBaeUf2eMOT09PRwOBnG336s5896/fPlSG96UKL7ZbK6urhTH1v+GEFTuQxtt9dbabDY6DfDx48faqrTdbpXi8OLFy9VqFZmNRQ0K9XLc3e2urm50OJKSpTXRH8cxpuRTjDEai8mH6IPGOspAVJhHm8dqsE3TLNbLunZAkoQZRQcphpB0s8E9PIcx5yIhRNAZ7zAxegSS3Ava3+clej/6FJNPbVsvVsuxHyJ7AaXjkSHtuhGRaO1UEXDunvAoIkmEpnqWwL0Y371ZK2/W1xBRizhhjH6MKSXvY/KhP/bH41GHRWqYqC7qgYFDFpkaDmVK1SYdDcnCvAAUgp8UoetapWoBzSToOENTOQ900fHGgmq/pw1WN/VivUogLOzIpBSIJk06eYjKyuyhI0pJoEyiwPzIe39KAHwc58eDiIZUVVN1Ix54BGYGRj9OV9baarFY6PoQEeDHBxOVb4AHruojr0KJaWavzt0nPiy/6uXOlcd7q/5h+lse/3854Pufzz6YkFgehAaIBkFQyDpK0RPak5PT589f9H1ft1ci6XiMmCexO+dE8YH7b6ByDYVZJBFVMUYg3KzXn3/++T/84pv1Ygk1E2Df91dX16frjXVE0/mrx5qTHT7COC8PEckmWIimIkTpxDB5JqtaPWtt0zTeD8YYlfLRgy2FZMozc7z31lalcyDl0WlqczU4xTw9Pv+QalxYhRkLaGlw6jwpbrhwahRCVBelgbkmUsMwOFsXiUrV4WvbFmEap1O6jZnZWaqqagy+XS609xQECQkEm6bxMTCDtWa/OxLa2lW7rtMEq8pjfTUIAAADSI4IzMgJpgIzWKst2qBZ49TW3FRgqK7d6enpanPSd8Pf/cPPb29vT09PJ3XurDcigswRYOo1REQCbpql3r117RCgdF4RkTFUW6c5t7VTn7TWjdTJNbXVLL9EQrqY2+22aRrB+/GiiXmxWAQ/ljJ5Cc8VcNZ3asW93GwaBvk8V6PwqPXSmCxAqIs2DAM5+/jx477vtfyv0U/MSiwFNSnHoHoahUVVEIIinJkyr14DvkIX2G63jy6eODJN3eBsDrx6xNPT07u7O0UFjTE6llEjHh01qLXkvu/10ihun1I6u3hsnHv99u3l5aX2KbFEZtaZpFN2KNS27Xq9rut6tV4sl8vFYrE2Jgpz9DFGP4zEy8u373TMnIpja69wCP7Fi09c47rhOI5jSLy92++PR/Wy8DDDQNCgW1iRAKAQY0opMVtEdbo8CZgzsKCII6uBfooSUjppFsvNOoSQAoMWT9PEZbcEIlQsBk50EAvZ7PI9wwhnrqj0z6RSONTsTYQlQQwcY+TAcfQ6MKPrumHwIUUtjKp9nzvg6WvhfoI95GGv+t36vDEOMUrmW2mtIbCq64C1FrIDBoBkQvTJWgtkYoyCwAwxxkfPn5WMIvuVBA/1JTAzlZh57ndkht967wl1tAmH4GPu4i3nVarLNM32ZmDt96ayjDHGFMZ8GMoFmaaOqv7Ge95K/292CWZedvZemaVuE4IIyB+kl+VrP2RgiaiSp37zpA0AH/hg/CcoYX34KBTwh1nvVPyd/JyIIAEikLbzMgChqypEkUS2qs7Pz7d3N5d+LLG8Mfj/Y+5Pm2VJjuxAUFXNfIuIG3d5a+ZLJJBAAQUWq1lksWV66lf0D22Zr90zX2YoI8KekWkhWeyuBU0UUIUll7fdNRZ3NzPV+XDMLPy+lwlWCatAukAS990b4eFhbq7L0aNH53kkd7okJkHxRFWlzNw2NmY2oqZpvvjBD96/uweaF+fgvb++vr5/8uTy6lxNsb3r1SEhM9TuHwtEL+4HL37joNlW20MrnItkHdAi9k1OixPN85EfH6oaQnKNr6GflCOVAQx1EajIJ1mZG58eKz+w5OmHcF2w7wgRkLLUEiacKz4UDmC73Z6dnYHLejweMQwA/kM1k/iHvkWqsVqtDuPxbL3BBR+Px8vLy8N49EXjCZ94fX2zWq00mRP/cHO/Xq+vrq4q/O48u1yzwcRDx14a8SGE+Tih++U4Ta7p1u2aiPrVcHNz9+LFi4fD8S//8i+/91m3Hobd4YAK9zzPzolID0/Qtm3XNCBRqypZIvO+yQduI2agVvoSF4VR5IiqGtMc0zy0A3qmsVyAE6Q8HbjLc8gDQup54LGQ0Q5d44vyFPIMKhEY2E94ZeXoAQY3zLayGWXdaZrI5TnzyCZTSogn6jaTxUxTLgiNqqJUXN1wjBHOmwqD1JeOdtDd8TOmPrRtW0eYAMTD9WCr4EoANniW+/v7aZqatjk7O4sxXl1dXV9fv3v37tPPPsMK/OY3v/nlL3/58uVLkB7mOXz55ZfjON7f31d62tnZ2Xa7ffbs2bDqhmFQs81ms1r3ZtZ1Xev8WCzDuD8ue6LWm+Hy6eXd3V0IiZ0/7u6Ox1HYO8ciIcRkKUG3Ry2RcjIMss3uCyMszUwlE7XyYhZQIUYTNTIRoaZp2r5ru4G9m2JCt7w58EWY2HMRaRcRItZ0UkqpZqTa5PJ/Btf8yHgax3kyYwjazMcZHUfTNI3jPI4z5NtwKIR2a+q2SKk/sO+W01smkkAgZkvSZClFnZtkzjlxqgDuFwR7YXYZJXJBY4pmnKtpqFZU0K58KGzsqTtu4UpPV1Vh8LxRhQAJ4HstHXCpyNYbxESmifgUyagQk8bj8ehcg3C271f52WRvp7WlTE36aGL8Bz9861G/oyBc+yhRXp5nGUDQIhz5+HO5NFP9l2TAWRwqf5hJaRAmZhZ3kscUccSaohJJ03RK3HV90w2XV0/v7u6a4zFZmqcJRkqVxPhjRL6Gk/HUYpvOztZf/PAHf/7nf14Txxjj9d3t9nwDFQgRYYY0HbZjLOfN0FA9/yIgYipy4cysmtDinXmzhUBrhSOD/AaFVSKCq6s4ITOjZZYWgyq5DH7A40SlBFW3YEoJGXDtKqkwuInh/LUsB6NcDXFKaTWsKyAZQmqaxjTXILnQbkMIu/094OK+75vGWy5/NtW5tm3b9p1zDkrLIek8R2YOM/TnaJrCdJwcu13cxxjX67X72olRnGcRKlIrdW3Fe5/qrXRiLCGlcRwxRZjFI2DYbDbf+973fvazn/3v/8efv3r5yszmEsqkxXjj9TA0jRNp2rYVzpJPIQSE81lxuQzVqNVT3KBMs1I1yoTJWAZJVdy76zpyAt8D0D6lZJpjJtyyAq078DBR5YWeF+6mLwIpWHlwmjATEM08T5482e+OlmvqB1aHsYAYJYRviqxaFybSCrKNop0VWdMYIzQIURjGlQOTV9XLy0tMShjHcbvd3t0+IIXF1m2a5v7m9tnLFzc3N3Cuqorixd3dXXbbRkBuBt/BQzw8PMzz/Mknn8QYv/rqq3fvrr33//yf//PtdvvmzTfwx3/1V3+FkQ8XFxeQ6bi5u337/t2bd2//+Kd/2Pd97fRT1eP+cIiJLDnmN2/eIEoAbDNN0w++96lavHu4bZrufvfw/ubO+46V5hhFlPkReolHKSqFEM3YmEQoKcFWOBYzi5rQ+eica1s/J2Vmx2rM8DrzPLPINM9t4zSZkDgmgb6/NE4YfQq5GtK2qKP5R20ndjJi/GgwO2filapSjGEep8PDYbfbH4/H6ZgpGjHGqKe6ciiAGc6vduqKPVkzy2wjZmeM8LeFSeFSnMoWyfLU82jqNXuLpDpNU99zjd1xzVdXV3UmDRXRAi2qBsClTqHGwhUR5XGMVMJZ59hU5znAaNX7ld9evVeR3PHeE3GOPFThTs2ylU0ptW0PdIdMIqkj9wHwkFfsowZX5g+TseVfa82lnsT4w7cs31sf0nryb7lFJjUg+C+vAUtNhY201Mdpia4wsznREE2TiJi4s+3F3d3ddnvx5MmT43G/32HymrRtO84nHe0cOjFzGXqTUkqa4BLgeJ4/f35xcXF3fZfDZLPb27vd5dXZdk1Qglsww8p1fctRPVlZaEGPuBeuOWiFjlVV45KwE2rXB7ZjrgJQ9rKyINDjS81laFL1x1qKT4gHm8bVr1whTbZTdzLiUxQjcfHIvSo6ut/voZkXQmByEEjalx4k9GSD5oNTocjUNA0x+6bhsvnmeQ4hvXjx4uHhIcbopBGx43G6vX9omgYPJFhdSAgOh0PTNDEGjz5EaIOMCkfinIAd5l3jfXLOYVz0erMl49vb27u7ByYR33zxxRe3t7f52uaZSp8Vnn+AV4QuXjZX5hyP+0NqGsgcVaQBrzc6jf6uTA14RHzxuhQPDw+N78hl7Ktt27bNRKoYc8aDlYHnG4YBEDQO5Hn1r1oEmXF3EMPhN8fjEaUBVW2a5vz8vB16NEq1bQt5Z1wDl9oHAj64c6xGTdxpgT9hGyCx6LpOSFOZKYJUGLyBWKShEf/t9/vz4+iIj8fjxcUFesDMbL1e73a7u7u7s9X68vLy7du3zIyWLUDlNzc397vdOI4/+MEPNpvN/f3927ev3759OwzDp59+Oofx+vr6hy9+cHV1Ja5BDA2ggpnbtn3y5ElSFaG+7+McLKaubV/vfou68sPDQ9t2h8NhGAYWubu/JyIS3u0OKanzUhtXIJEmAuadmVmMSdXmENCDK96pqjJ1Xdc4bn1jZlNITEyOWt90jamqsakFVdvv90oWwmRmhLG7hS2Fpe67PPRptRrQRl/dWw0plg4YLpeym2HNjfUkIhZtGsN+f3h42B/3hxDyFNeUkoJLbZKMVT9EMpOZWwCkxT3wMkMD1cOMjIXEJbN5DmbmnHnvvSNVTY7bwtqt2zXvKFXv5enTq5oILZ2NLsRhqum2chEn+1uubZ5nLW27XOaJ4ZHJ/S+MTho8wlkil4gcERPFhZIgZ5W6LHatiRJhVv23+8iPzX79Ov/ZF+NlQqzlhn7wLkMVgai6v0UQVl8ktJhj9I9Bwnp8CYS1I8LHALEUIxIhEVCuXNtcPrma5+nqybPXr18z7duhD+OxbVuxxHLaTFT2sRSyKxroY4xCLMR927z65NP3b9+tmzXs1MPDw7ub64vLbf7uSkmUSyr2GIh4lARb4Tfhn/B8TdsK51QDuwRKQEgFfBlfWCk5ISSwEsAsQGLaNE1apC84OZKnap2ZeUlbJSI0FyGdmscpzqF2+sKZ+YUOMO40AoIQQtN0ZhzCXFBTwl8rKWwcR99kz5RSGseYUipjCY4ppc1mM42jaocRDre3tyklJ839/T1YkbvdbjVsjofpcNxhKQ6HA+DKeZ5FyPsVvkjTdJpsF47DMFByMaT6lbfbi/1xfPebm9X795eXT0qrcXbbZ2dnWB8v0noPsHe9XjtmTN0IITjBpmNWjdPU9x2GgFbmDhbZKNWaOhVVbdgXOAy4JeAKH7ca64nXhlKfVCgYP8SUqhAPsN9aoQBAgiupjn8cR0wBgqYjAhcUxeEO651FBRpJMC4eoZ4vsti8ML7zPEMWeLNe1+zEOWfpJH1VRyxjbAP2fGVcw6nf39xePn3S9/1vf/vb58+fY5NcXV1NhyO46yC9o+QfY7y5uUlml5eXMc4PD3cpRTPbnK1++9vfxjT/0R/90e3tLaS++mE4OzvDVe33x4bp/Zu3Dw8Pcwjb7faz733qvW+7Xpi//ur1q1evfvGffg5soO8G51zSwMzs/M3N3WGcNpvt/jCT+Gk6xpSYIemI1EdTSkoOyRezoCMST3Hf962X6KY5hqgzUgcRaZzOlMEzRzrP42Hcp5Scd957UhUm45NnK8kfNU3TD20HOqH7kP5T0jvO/GEieBQzy3OJgh4P093d/f39/cPdbhxnRFFaFF7NLFFNYcH05ApApg9yuGVDCpEx+SYXIGLI882I2Xkfw5GZieDBUwRcqZjdMq9Wq65rYDeapgGUUreKFZWY4lzzN6oHl2tgtbRIf/HYUq3jZnRdSVHJrIdzC6n85XriJgLmQS9A0zSaiBwJu2WdOL/3MRpsCzmO5e8/Pmpc+2h51ciJfeR9qcYi6Dwv0bAupGo/eMs/igMuSTA54tyVjxhhuXbFionzrWncrLf77T6E8Mmnn+73DykG37Uppg+yey384WWAn4ouKDPHND99enV5eXl7czcMAzg4b9++e/78+dWTrcU8+11NXdYsTKiKl9U5lVeliBpaKQhVjIWINBFoJqhkWFLYxJqX1DIGF2IUiKw4Q0wRrtHKnHaAlrX/pObW5+fn9/f3oCUz83pYgaSKBem6DilmXRARwWRfKV35IaSURhCOcAH7/R5tKqgp4qbEmP1xSqnr2lCm2IYQoP6/Xq2wn9q23T0c5nnebrfim2meiWgaQwz38xymMQCl/MUvfoFBAsR5xvvZ2TqE5L2cnZ2/f//++vZus9kQZqGIMzUSPd9eTs/Cmzfv+n51cXEB0kfTdBAfvtxeog0MJhvA2cWTJzHOh8NhNXSQjxCzi4uLGCMwQC5lKgCYiFRCGaOLbADO7O3btwg7drsdKGzr1dnNeAdfiMGOwG+JKIS5xuxmhsCrRlTVSaMUwgXtqHToys9CXnt9fd33PVJJaPsR0WEa7+/v1+s1mEdIT2tkgGtOKcGZWVH/wAcBNF6v1uM4Qjrj6dOnKKB0TQOeF/YDNE0rw5EK5+X+/v729pacSKHHg4Q/juN6vQ4hjON4dnaWihYNFvb+/n4YhourKwDmRunps6t371Qt/vjHPxahlMKrV59Upt7t/V0MiqDkbL25vLxsh56Z265rfGsaRcRUpfGfffb5//a//btV1xJRbvRiipqmaRrnuWm6aY5kAsrh8rnGo62qlLLljXmuO+ERaNt2PXTBY1TwYXcMSVWoqOVQcGQkogUUaRvXNF7nEGNo2wYTcEval5+R7Xa7Xq/JiQg5x8UauGJVOKWYlZPNVGdmDiHNxzEli0Hv7+/v7x8Ou+MEoWly8xQrkzelFBZAdIGFuaTTygyEncwoiXp2zCUxNtZkzufGtpgSaNwhRCPeHw+b1Xq1WhkYKt7DR65WqymGzomIzGH/+fc/G1bdGAOeo2qfqQQTcuLLnnhYIiIIfkuhJ4TAnGdAMWcJd+fcPE3omG6aBma5bVvvIW6TdemXhZjjcd+2fpryoPSmG5IZVqvG92amC8Zi9Sx1k+Cfy8umx2nx8jXLN8aS9Fsh+dpizN1iE9pin3zo1OgfPwMuh4CGl9l4ZOWyFLAEO2navlutVsdPP/3szevXr19/1XfdYd5VDvUySKkpSD65kRWNSRHp+uaTT16gTDUnG8cjE93e3m63GxYBI8Y3ng08WKv4QL0X+Z+A5gt7ra5p/WfTNKgd4sCugtMSkbbNeY8VlXzsA6SnUOSBI0kphTkxixNpW48kCddpZWzcdrsRkfFw5MIDhPUEDRUBIMa+DsOQ9Te8DyF1nUf+pEXZ3Hs/DANKaK4IGO12O0yagoEW6Zsi0IhowHufVKf93nuP4RpEdBinGKMpT2MYwyxJmF3bdyJyOEzv378/HA7v3r3bnK3nef7Rj34EYabNeoAt+tnP/uanf/SHWAeNSiRt08eWnj59/u7m/vrufrO9/OSTT96/v8FlCHsU/5BTmhk6WGKc8cVzSM6sRYmCc2ODom0GfnGcDm2Z/EjF2UiR19HCynbOoR8xFaI77IVbDFqA78GWwCgheNBU0hTAvFT4z7gLtYSMZYQvwY3uuq5pupQSckQ03SIthnurZemcypuh5KyqdUrg0l4gCAjzhLIrZU5WwNaCwx6GYRiG/e6IPp+U0rNnz6Kpa5toOYmHonhKabfbAa9GKoaIpKWsTNk0DfjMyYyZLy632JarVX91dYErRyMTFnC1WkFWDCuc5sTM7B3a6GOMwux9++b11y9evPjtr3798PCw7p+3bRNCOh6PZ4c+ET/sj+M4xcQxakiUUGshNSOzXOxnEs6tECTixbIibKkQKZcObGY+jDcWIztHwUS4dU6EY9JoViYLEqkRKYuZJVVLeXB47gJH5GfCwibipOhFUO6vzcpPZKkU71NKKU7z4TCGOYUQjsdxvz+O0xjLICASRsxnmKJUHQNTgqHkTH2txVcm/NdZIR4vD3oMADJzjIlIphD9HL0XDC9g71zilBL7LPm36geEp54laeZSJeL6NNEiBay70cxYrc5hrB/qnLOi/0ygX5T4FfeL4NSz8T8lwdUjqmY6d0hpGNau6conEnMuTlvp1tJCff3ASS2vih7nxx/8hj7Kkj2Llt/jc/IXh7KK8XfVOj841T86BP2o03wJ+HKm6YsSsfHhMJ5dXM5hDCG8ePlyHA/7/Y4eJ820uKn8EUoAdW/H1DX+s1effPPNm2mcnXPMzix9+eWX5+dnZ9u1JSNSsqyNgD79cnIq1HTWRESPKiu4DJhCPLRhTr7JcVA/DAiywLIhIufc4XCAjUbCykXbMqXEwoBwmVyKyQotCIZVSqsoHAaUNzJJJMaaedeSLUg9CFQhhjCOY9v2+I649zXJizG2Q09EIAHt93uwiGs6Xmswqup87jQFnBBDcNLsj/uma7O+TKLd8XB9fT3PoWmah909hGG1CHJdX1+DWPuzn/3s4uLiv/vv/jvne0l2HOfbu4f/82c/f/r06eXlE8Qi0ZScd2334x//+Pbm7ubm5vLyCfPtX/7lX755/a7OFAOJF4s5zyM2yWro+r4XsxzipGDqyBrKzPMAV4puVKBAqZg2y5OMPanNMSxgfIuWaeRYdnyv6oBd0XlGOou1mudZyGqCVW86/lldOMKI6+trdNxeXl7udrtxHKNTMMhSSvf398+ePUMLLBEhLQZ6jOADZ5Mi5HJ61hZBvZQDF5NSAiUScwPPz89hL549e/b5558HTfv9/v7+3srsQkRvqfDdnHMXFxcovkDO5erq6ub2Pfwx1Ch3u92vfvOb8/Pz84uzeZ5jnNfr9bNnzzabTdO4yiLU3JeyVCPiEGKcRi1bkZxLZNMYQkivX78dhoG92+33qpo0BbUwx+NxnGZVpXGOqhYTKLjpcQbMaPwTIyF2LOqIiBonuN1937a+SSklo91hTEqJyDfKSYhVE4mSK3oOQiykxOacNE3jhBsvXd90Xbdarc7ONuvNgKiiVKDJyvw7M44pQVmdTEUIFdY4xXEcdw8HDBmMMR6PE4ZemxmEPpBu6gL0ttIYkm8355ZkHMuJ98ZEBeRIKbHQAppWlPVCMuAWzrm1W0VNzNw0jr2PMVIZdg4dtJQSsTPj0hybXb6gQWvBUa7207uM+VMBnGOcNWc1eZ6plumElEnLGYA8TcYDZ9hEF3qunFmZBjVyW+Sd+a+PYfml/1762vKmDyHl332ICGmWArOF8jN0IH/XO1kXwyT+0RzwiYqVY7EqjwW2Wv6TEAHphVMJF1dPRfjp/uUcxr/75S9jbGghz43DHk+AWf6eiMQxkazX688/e/XXP/s5M6Oi9rDbX9/eDcPQ9S2XgXpmj1zsx079g5MT5UKvL9LHMWXRZniy8shlEjJseoWpkV5gByd0yttpTjCVHBoEIuwC7NSu68I0enHwuFRKC7WbBTAaHk4wb5umU9UsAyIMSLwpc2GRk8GOIyHDOSvJogaJTeNSCkTUNI2mJOydc8m0d00I4/EwKdM4Tilp12d/D54w2legbYTca57n+7uHu9v7P/mTP9lsNt51ZO5nP/tPP/hBvL8/PH369PLpk816413z9ddfHw8jSPI3Nze73e5nf/2fbt6/f/HiBU6OO4XUP6UwDEPrG3HZ06jq9myNdDAlEzayJMyNF+Ksb+XLbAwu84twj+BsXOfqk6ll3kCNpWpjMe7OPM/7/f7i4oJKTUtV2zbzeGMR6HZZOsDg+HGXAXXA3ByPRwDy0xi4ttKSvX79GtA3lUo24gmwAeqmlSJm5MpkLSolSS58b1Q69vv90DVYwN3hYMwhxl7k8ukTKFt1Xffb3/52HEfkqUhtUYDf7/ebzWa73SIguLi4cM793d/93ZOnl0tAG5vncDhAh45Mp8Px+u27+5vbpu9ECMj8arXqugEkLOfc8XhcdZu2bUV9VMRGgi8yF1m01bCZpxjmxMwxqCmHlOaQQtBMb1YQU3NZquATTGTMTjjVgbtN06QUmsb1fd80ruu6xruUZLNZXYZzdvKwP8YYiZKQeSHxgilJROaFWcwZN65ZDb330rd+s9ls18N2u724uNhuN03feO/RBIX7Dg8RMbYIslbHkZktpePxOI0Ba344HHHN8zzHUAZ4aGJ2OdO17EKS6SmnMaEyxI0pW9fCdiFHLsc6TJjex5r7zrNRZQJ2qqoxap8omWpUYVAlPHYdgDHQU8wM1f1lgqSnKqkxiWmeSM/E6XEhFts1hEC5YhJQO5AlzJlneDTlUiFn6KCQiAMVYzMjS1KMpJk556sPriiClP7Dil3zR7Tn+uzXr7b00x875voMsprJafJEjYAfOxH58CR5mtF/aRvSdx7MbCacP1WZWXIqXG2HJ58bNLfb85cvXx53D/9n+D/nGFwR/louwcfed/ENlYVJ7enTp5uzr969fb/ZbGE9379/f7ZePR+eQmFQclxcz7yg7SWSRd2+fgtsI3AQiKhpGnGEn2FGkSRxEVIAD77mqeM44gVmxuIhlaWllRN+1JWRDFYEynElFUauWwqnxS9RF4QHrRbfFt3lqgtUpHC+qChqIVyQImMpIt53wDzZIlh6U1ayNOTWzDxPcT8eVfXu/gGZNwpFt7e3AAnCQph6vz/e3t6HEP76P/387fXNdrv9V//qX7189dk333xzd3s/Hid2vl+tt2fn2+3F4TD++3//5//hP/yHH/7wh9uziz/4gz/4gz/4g39/e4uGmarUSETeC/jOYZo5WRin+h0Rc8BZVr+F+buuDM/Q0mgkhULFfGJLNU3DxDWvraFSbX1eZp+1/ydvDMldUrEkDb7MgqwFMAgrIoq6u7tbr9coMzunKMAfj8du6BFJXF9fg6yErNeXCV24++U5El3UompOzAvaRCX61Q0TQthsNs+fP4fI9vX1NdDvTz/9dBiGN2/evHnzBtd/fX395s2bruu++eabzWbz5MmT2/fXKFff3l0/efLk7u4OXwrhxTRNaGLebreQRDCzOGWi1qzz4ZAnGPZ9f3Z21rZt7JSRvjhBTUFESG0cZ9907J2SYYQcWGnTnOYphmBhTkE1BmUnTqDWZOzY8MVRjhfnPVNIpNGIvXMaQ9M0m2FVvbX3nsWNcxznME9xmiYmA6+eiJI5cDtaL0LGruu7dr0ehq7p+36zWZ2v19vt9vzibLVauRP9YKn4nxu4p2l6eNjP4ySWB9/O8zzPYZ5DjBENfiklU0YIaWbMJwZMPmBHTTgzshfOIJtEhlNWJnlcwvxge+AHcT6GgGvWRKqa1GFvowJS4ss2F+MSJSv9Kdl7IebLPnjpaYQoRq08ifqccuHna2EnLQ1+xpmKyuzSC1qenjmHEDBHtR/Wtd63LMcsLefSv36Xz/rYsyxd6be+ILvq0ln2wcL+5z+Dlf5RHfCjEUmnT8lCXlarrcpkRkKy2Z4d97um61599lmM8y//9m9+9au/8/l2/q7FOt17oqSR2TG7fmi///3v393eI/Z0zt3d3r8d3l9cXAhZjKnJM8VwBqFHIU/ZppQ++Kzq4Uq7fRZ5DhN6UxkVL+R8KI+5omEJkwFCY5tnq58+FycPMcPXSLKxIx2LQixGRIrgA0JROBLgn3D5qup9Vo1GVdKYUBK2MssWc9phylUVOhsphBrf4CFJKZkGfGUwfaLobrfrhpWqBk0iEmP2Meybph8a7zDYR4sWBOwEkrkUZk30+pu3/8v//P/85OWrTz/9FMICJu6Xv/y7h4e9c83d3cPP/+aXr9+8OxynP/8P//vr16//x//xf4RUIRazhiwpBdVMjksatptt33ciQ+MFqaHI6YvXpw60eSoA8tIk1eTYypyyxGRqpeh7uk1W2M6IMIC6L2M1Khp+S5sCqzcMAxqLqaTdqrrdbuGKzMy7Fukj+rOB62IA8H6/v7y8BEn77OzMew9yFvabFZFnvAvxQShy4iICah7+CQe5Xq9jjFdXV0+ePEHxkpnfvHkDX/uH/+ynX/zoh89fvkDQGUL48Y9/PM/z119/TadWFtvtdiICte3dbodiOcKF92/fqipuxzi6lCyEyTk3DANmKB3SYQp6/XB98+6mWw1PL59uLy622+36bNMOfYxxf/9wf3s3jqNjoI7knJtCnENcrVYhJDNmcoliipYsmzDnnBKCzhRjEmq8cw4TC1iZWYyRiK+GoR/a9XrdNFltmHLLLA+rLlpE311eQ2Z24oVXXeuceO9Xq/78bL1arVarfrVaDU2z3gx930uDqO5U4IBbIWYiSSkdDuP9/f34sMfGSEmRyYUQQkgoKpmZknA0IlIyjDGsvkeppJvEy1C77kBmpgXlR4nkZNzyhqygESwyvGzjvcGNgSBpaZ7n8/MzBLUwI2GarW3IvCmpWi3YsWBk8LeAlOXyhGghspGj0lz/ouKMa93EucZ771wZKWhSk1cywYqlEKf5KEWDaPl4ghDwcTL6gYemhZuEX6g+dfmuj7/RB7/PX8pImfmR2/r2pPGD8/zjZsDf4oOZiEiJJauUElLvRCRhDmbmvYthfv78+R/98R9/8803NNViQBXV/JZ4hEuBmdVMDADU8+fPnz9//v79jW8bMwthur2/e3h4uLrYmolZ7V7P58+LZbknvV5uXR0iUjsV8GOMUDOFi0JUWJMtq3LW6YR4o5kP5KCQVZdzZY6QK1OqkXit2MVy1GGrsK01wNYi38GlE6bKg6SUQspSlHhZ27bA4HGRvsg66pSVpZ1zFQ5qfKZm4PeasqO6v9u9v34fY4opzx8Ei/sYRnPknHfM3vv9eJznebu9uLu7c9KMx7nrOlX64z/+Z//T//R/++KLLzDD6uz8kpn/9pe/+l//7f+XnbRte319Pc/zZrV++fLlX/7lX6aUzksPUg2KMeWbiMBWRbIrC2FI7xwoGFCWwMp771HZkjIo8ERNz0phkUjgNVWzhfI+N/5qKTtVhwrkBlZjkQczF2XKeoOYGfScu7s7IPNokcJmgJxySokswkXtdrum70Tk5uaGiLbbLTwZhKNLFJKqF88M8PL7ygyoqAA25zzPOmQpDzM7Ozt79erV1dXV4TCCJffHf/zHv/rNr1EAJiLMbcQJX33yqar+5Cc/2e1219fXf/eLX0J19c3bb3796183ZWS1c+77n3769u3bu5ubcRznOaoSKBH4XCwF1gpyaS9evHj27MVme7HZbNjJPM/3+93d3c18zHN/+rZryzwSM0NEO03BOSeNlzkxk4g614iAxpGfQSr2N7eCmDrnoAANdUxw0NrGqUboSokjSNBII0CGU8pK0czsKMvXdH2zWvXb7Xa9Hvqh67pONFVwohr9GorBCqvq8Tjt9/vj8Tgex9pnmEphuLpSJcoekDO8wfKofYUX/njhMDKwhx1qC8jQPsrJWEwYYprk2JxroKtrZmrmnYsxkLiY5nHMHUcppZubm77v126DacH2OLe2BCutVUN4GRzgSFlQKFp5lLBzUoh1QZawhIiA1k52UmA1MxQ+cKMB2lXviykOqloJETXnMTMjZRamR405ixzsQ+/7u4+6AvX2uYwsy98r/S3HPzoEnXn5H+QH9asVCRNJFJ1z1DRMiuTp6dOnn3/++d/+7OfL4KWmaMt7+ejzRFLSxISpGk+ePPnqm9eDd67xZunh4eH6+nqz6kWEDDDsI1i/Ro4pM1Q/5J3TIhogInEZk4xzgF/UMpUBGSoRoT3DzLquC6GUeEWQK9RqMf4LfhBIrRW01DINVEsXEKxYRY/hA5C+WIGac15bIHEtZV3nHJU9V7HcuoFyGUYYoHffod5pzLzb7SolJBb5C8tSIZrScbVasTAGKMWg6GHtuqEqdzZNA75GjGm1Wt/e3nnf7veH9zd32+2267rDeASAaWZdl/UgIWiF+iUzQ4wJSTyA/ZSS5I4kLUkMidA8xRAz/xnLiyuBkBDCFy2letzlojUW+tVQ9908z02Dtc2mP9euknZ9pkHBEQKNcM5pnJexgpbp9Lhll5eX6DzGP8Haw9CFtm2nMQA+6ft+TpnZh13BzAh0oAaFyfPLjYcyx8kKlL5JMPaZGTPqvfe47N3h8KMf/WgYhnfv3u33x4eHh6ZrLy8v/8W/+BdU5MCI6ObmBtvPS8YMzs/Pt9vt2Wp9e3v79ddfX19fIwYCQfrh4QEf9Pn3fvD6zdchhN0ud4e3rVfV29vb+/v7EML5+fmPfvSjP/zDf/bjH//hxcWFkkzT9P7du3Ec5xTHcZ7HeTweiQjaUszcdd39/T22+vF4bPtVjZQRlYpQslPYISLCkp8LMlVtnActdRiG8/Pzvu9jmokGfDUUp41kHGci9YP0rYf3hYQcdj4c8DB0fd+CQmilizqloOzMEnOuDRVUKUstHo/Hw+EwjfPxeAR0wcyZllUclarCXyZLwqfUvD6tJ2atLX3byftWq2UEIb9c+nts03hpYPPjg/KEiEFBloWIjsfjdrvBa25vb8/Pz4f1SlMWIlx6r5oPQAPDzJgQzpIagt1UAsdE5fXpsQJdNcXwmks3Vr8gTF9KSZidcwhntZTkcA22SKN4kfWqaUrJl6FVH5yf6EPv+22v+XZe9AcREv3Ogx+Du/9EbUgAWok5573CuUXAEyWnZiYkrBZZTFmazTzN7C6fvvzJuzdvbm5uICO9Xq12Dw/C3mISZuUSYwqZkQmrGqsXpq5ppimwpxfPn7x98+TNmze+bbyTVT/87d/+6urq6uLiQmMkdmqnlWVmxmNGyXk42rp8p5YyM0tJ+76D5BCY2zWCY+arq6v7+3vsoTkkI0ma+SS+kVR4fvWJpbIzzCyFSESOxbEc94fG+RSiFxejIug+Ho+wQap6PB69NKYkzmk0Ed80zTjOMNZt37nGhxRrTszM8xTJRCSDbG1XdK2N29Y7x0LWODGzw25/dnZ22B2H9cqMTZmE5ziGZDrvphiSWtN4nQKzi+McY+SeSU2U58PUdV1QI6JpPDZNN7TD/f191zTTNHVNE6YkjrpG5hi7YVifnSHCYKNVP4jIbrdrO/GuaxrPYkZpTnO02JCQotkDnX8ts0UNq64fhl7L2Mezs9XhcDQ7jVSzBdnNkjLzPE7DegVumm/bOcaQUr9eT9NkrBjbzJaIqGk758RM0b1zf39fm4IqhsEl3yUINZN4n+0RuniBYWy3m3keh6GbpmPFNqpmHnt3PEyYOVHCFxDNUrXjGGqEt+x2O8t6FxGWCyk+LgMKKkTERTUspTS03evdPqw3fT+gr3q73r59+/4Xv/jFN29ebzabTz755M37dxDF7LpuaDtAC9icQWczizHPrjh/cnbxdPvq+5987/Wrb7755ssvvxzDOIax7fvb29vLy8vj7vjs6QvIeP31X/+1SR5adXl5+a9+/M9+8pOffPrpq8urK+h/3T7sj/e3Dw8PN3e32A9UYgXxfn88PH35/OzyYhxH1ImjKs0p2bFtW9/wFKITp1kn22I0pnxHiJmFRGia1FhUoxNpO3+2atjmtt10XROIiF0iU05t22gInevCkCVdQwjTPJux916ZmsaBi+C8SOvMUdBgYk4kUSIlnkkbT6wmrKqtlzkGL03jWo6H6f5wvD3EY5iDi6lJKVm2iEykIk5VWTyKqI7Z1BJKtmAaAs1KhUqZYo4OhZlZE2tSNDp6TiwiTpOZGHvvmUkLPTPG6Jz33qc4iwiZtCQtiUSVJvu8pHScQ991+8PRSdhuVtFGtvTNb399db7idpNSIiNVB8MlIgq5y2wtofKhzMRkKQJwtnmeiVTYYopFpMgfj8dxnrz3LA0zO9+osLSdSRuVRYTNERGLixaJKaY5hInZQgzi3Gq9Vm40BCP2vlHVGGYuzMdl+svMjj+kYVL2nbKErKoLyz9brtMR1zYnZmYTSpZwL0oKTsZU1CDzUU54yh5xhvqvf6o+4NOxaEyqzh8ZpxiJiLE5Zurai4uLV69excOPv/zyy9evX7e+AfHSKEkjlE5ZtekpfxXhpIqQf06x7/unT59+9dVXQO3MzHv/7t27q6urSJSSfVCnsI/A7Y8PyZ1RBEcojkIIM83olYTaBi2aRD9wAEDhwKbhxag7wCnCecoNTGelQ9fP9UXwSMp0TPzpYb+rMluhSA0TAljnUkpF+jHnambW9Q1CLpclM8U5t91upeguTdMUYuBxNGWiRJK3ct/3t/evp2kaxxForQgNw9C2rVmWSEwp7ff7+h3L3RGc/+HhobZmYVWRVqLnGNfsF2MecPR935CEFCU5IoLMFpfBFTHG1nllldKXxcxEAqVlrFuVREaUDQEKEUHXuIgAJsW61QF8WLC6mGZ2OBx4oZlXN8YSsSCXi3+HwwFqFavV6nDYQa/x8vLyq6++ORwO2JkIj7BD7u/vqdSxsIaVRlvBj6y/xgyoIy70TZHoV3Qdp62vD0WfCz7+8vLyzZs3/+kXf/Ob3/xmWK9+8pOffPb9zw+Hw/v373ExzAyUL4QA/SzLdUSDqHXb+bZtv//973/++ed3d3c313dffvnl27dv37x5g2vD9Oj1ev2jH/3o6tnTzz777MWLF2B4gbswTdM4He/v79+/fz89PIzzVAvYJILVcM6FaQJX6+bmBpRpZk5kKQTvfdd1MenhcFAqkx819+O40io2TRNLI46NhEUAjaBqWBcQzqDiWGyKJ11VhxxmNSacUqi1G5+1SFOM0VlOtZk0mRLl0eZTCnVf4eMOh0OMCgpFTGEBhFp9uku4n8wwSJAAnAozRgwgI8/7kBnZpCmDLI0zSGlzF2JxJJwvA18Q569xpEOOKEtSFccYNVnuuYixb3vTeZ7nm+u7s6e9LtpqlZHxLng5pW9a84BtTSkLE4kI02mWNh4lzi0YudDDhclMxKpqaszsPNLfzD5JSZkZaJ/JaTyJftRB8/c4vqtSq7+jmwh3rnzBBflZl+n3h9nw0tfUUOCf3gFT9sEfpN6c2edeLWEK73q9fvHixf311fF4HMfx/vauxYABBPXCVOsKzMQGxUpNSS3Nc6aGtm378uXLL7/8Eo8rbMqbN+9evXoF37CoBOd1FKMqZbK8amUSo1ovNDPvXX2oVqtV07XoDEG7USlWnQokdaMvyzxwh4CwpBCeudRr8eLa18HM8K+pMKWR4CrZ4XAAoWa9XkP8Ada2nhyGO0YlomEYhlW39BbOOUtGaiAzcynRIf8wZVVlJ6o0hcn7drfbpQjtgr5awxCCamZlE9F6vfZF2b82HCOrw0Sg3W43xwjKNPxu5WP3fe+bCiZntlTf93GanXPzPGKA3Zs3byD7db69BOyPT8dtSqX3gEvFve/7OkvDOTeFWYs+ZTZ2MeJi0HqExTk/OxvHWYrcFSYr4CNqRXMZD+Fee8n+soozH49HZvfmzbuLi4sYFb/HoEPn3DzP0Oh+eHjINzcEaFODFI3ACL+kYsorZ5vL0ZRZT205tPBdY4ydb3D9IgI62G+//uqbb77puu5f/It/8ezZM3wQtM+ur68dYShQDNMsRY3EMleARSSlVhOFObVt++zpi+fPXv70pz/d7Xbv3l2rapoDVgx5ebcacP5xHHe73ddff/3w8IAOguPxuDvs0zRi88/zTCKo32MjYfGfPXv2N3/zNxBt5qKyEmPsuh4RcJhDDb+szN7AhmRmdtw6SGhp3/fr9dB1HZFCzVEtpTKJlpk9C3tXAE+KhathZuadNJ6ZvbAjphTJzHmfThPpg2hjlnA7jLRpmhR0nuZpmsa5BK/MSYhKrweR1nYMZmMmNjVSNHcwE7FUbydmKlalQai0GUXNAQRlwV9yzmHWkwid0PhFFab6SKIk3qUcEBDMYwixaVLXtFHjNE3roTNpx3G8vr5uzy6YWcSfxvAwm2Y8jyiPqbBMV464pymFfMl2soHAEUt6k90YGi5OroOSGbE6M41xjnEmopSsaZr1+gzMUV/mYWupLrvCoKZFsZw/TLQ+6g76HR6sftPyei0MEioV9/Ip33GGx4W/erbfiwMmInQlUWJmZ5TIvPeMRFbJRImIHa1Wq+fPPr25vjs7O9/tDgjrykVbZgIixDBGzIdodBqhxk4xhqZxn3/++V/91V/BpDLz8Xh88+bd97//vaTqHi+QmUGouq7s8pboQpcU9lHktJt3ux06i4BqornicMxj1LhIAxKRL2N9q4eoVFU8OfC7VaYY9pfZiTA8YowRRVBwSmvt04ooHQmjrcFKySp7uNbN89y0zhWdEHyoxoAfYNxxebhTYQ4iEjUBTpnnGUPcYI6plG1sIcqIiAExaQih71eS+/lcSgkOBvOF5jKyIhW1GnDNqHRcLDIMa9u281m7/+7uDh8hIhfn53AMY1JIOxWHmlsm4MNWq5UvI6d8UZUCdRy2u2ma8XAA1I+0T4uB92V4H3wD7uCjaLc8eKCjs6XjcZZSAIZTRxPa+fk5M6M/py511qhqMssUZCvcZe89PBCksmKZdbF0q1h/bEgt7AF8KEqzNzc3mZx/HBEvgrE1p/SrX/26bds//df/PaBvZK4QtJrn+Vd/+3dN01xsz3GFEIiIMYgI7iBaxy8uLqCQFUJCOf+zzz4zM0shlYPZXr9+XZ+Xm5ubm5sblLStNN1pnL33h8MB+rKgbWfJra4zs6urK2z1EgogCEjjNDnnVqtNSPfH47Ft+hrIprTg3HHWP1Gyvm+992ZJxCWN1Zcj/GryyGqpd1nVkKYBPxCRMjeaMEFJ0PyUYTlWi2CDEhERa6IQ4zRNx3ma5ziHlKm2hf9RuTKaSUYkC1svxMLCjr2DopeYGS27a4jo5MrzZVTfIywsp9owESHPkUIC5VxvfjTnw8yIGE3nXdNqsnmOU0iOkzEdj+M0Td633i8AWxOg3NloFjpxzuOZIe0gQiIuxYw/4zWqynyKJ5xzkGFPycBKKeuTiDSmuSadUKmsMpbVolrtdHrcvLSw7R8PiyT6KFutf+XSaf3B75fOvi6yiNRTPj6h8EKfanmef3IHXK/DzKBRRYVYmArB2LWdmc42i8iLT169e3f9/v3N1dXVN199LQaOe1qcsMz4BUdRMpmeiMQosTrnPvnkBTorKor7/v37Tz75BHtsGRkt14I+CojMiBdcvlAnAJrCXp+dnYUUSTDpTCpAilPB/YBPpIXejPSr5r7jdGAIkzP5tlFVY2q6VpxTMmGOmqA8d5zG1WZtAacNq1UPSdUYIwkDwaumuQa5VtiwqWjxZ1+SiBatftjBaKOC/45TmqbJmM24aZoUFQkQfHzlYx+P+e1UUNzqWVFV9d6Dj4NRDURZQGaepr7vNUYv4kVSyI0NTeMcsbAYmWdxrSPSh4c9Hmm0snRtix6SNIccOjgHdTpcG9xYzrnnGUqc6LKFuZmmCT05tF7Xij7wcDjalHUhGCkplRLDMmCXBVUEgqBwpTc3Ny9evICCNwb/3d/fV04ZYjWUV7z34mi17lOK3vm285wy3A3WFe4ObhO2lhXyAc5DRNBnBioLABz7zRWufpnP0UzTxN6LyNWTJ03TPHv27P3tjSZt23Y8jjc3N9vt9rPPPvv666/fv3+POAYD4xDaz/OMqMV7f3f3AKi8aZo3b95UrbHj/gEELqxPSml7cXE8HjFFuOqfMzOGEDji0ebDNDZN470wOxGXkjkn7Jp5iiKe2cWozA7WWESS2hxGRE7on8aTVYHcYnAYIaxZYifr9SBC8zz3QwcRIxESz413w9D1fY8wBUUGnCTEHDQjrlJLzjkv7MWpKllafpyppSQ1J17ESXHKD2Agl6sYZicMo1ib3P5LRGxIfbk6VMM83MJRSuk0HbdiuWZcSp8CxwEjZ6dMhuD/3Kn7Not2mZlmaStOZNMUui4JsxpN0ySkTC4phZCYs6o2M5MJySkmEBFTEKVPc1dV6wDZfKlL8JkowxVq5rxHnk8Z1qbs4k2ThhgjxlF477tuSNGYHD1awJMY1rcaeWauA33N8qC86mi/1X99OFWjvLfmvvWNxd5++6mqKf7gwn5fGXAOJbLzc6eEncs1iaomU2b55NVnX3311d3dHTuJIYLIUzLXGr9IZhiaZiQzmVnquk4Tee9fvnwJDQF824eHh5ubm+fPn1PZi/XCHsH2Hx1LZ6ZqqlmbVxwjXq4RfTf0+/2+bfqlq6PCv4exWJ4ZLtm7lkzUTlnaojqF8klOaoHpmZl4N88zO4kxMuvusMfTXnsGVLXIa3hmxgTWlFLbtuM4juOYUuiQFDuH2ThauMHH41FVk5EqxRiNXYxxtz9gKUCmKDvc4MmaplmtVtD0AQDLJXfE0TQNKuWp6GOjZxR/bdsWCvU4aVMk10tAY7jsly9fXl1dzfPctW0IwTlp29ZiludsGmeWBTcqA7xpGlxSyCqkWcwhpWQpoS+WMa6x2LUaQDjFnJYE3FgWBdf6+FWih5c8PhlBFaoSm80GE/Tu7u7wsiriDR+PPI+yVnMsVToPJRMogmHCHbJ2VDqA7lZPhs2PrBG/CSGAt4UYqB9W0DNBu/YcQtO25+fnZ2dn13e38zyDjI2vs3vYa4je+3E+3t7eqqpncBrMe6/MYZxi02CYwXQ4mhkesbvjGCGw6vPezlJiZvO7d+/fv6+aSkiOEZvO8yziQYmAdydhp863DYB0MKGaprm/vcM9UqOkWf4+JTNW733frVAdmBfDwYoDJmZjobZrhmHwTR6NwJyf667rzs7Ozs+2eL4Am01TqLs3Ft0uzoKUQONIRFiKcc/dHdnvLpIkTkbTNM1zjEbR1J1onmLERpKpzyRmGg0ds1QGwhFnzC8P3GUWcebUjSHQwvviPewyP0O4mFa1ooicg+Z5ns2oKVM9vM9aeImMyz73rp1DAOTmmWPUlELTOmdW+yHNTNiTfIsfquLPIhLClLLwJMHK4WJqTe3kHpida1DldQ5IW8CdMkohhJSCqiayrhtc0wVNIs5/NGXhg/T3Y3v+3caevk3K4lsy47Tgln/gbr/LkSMJ/ijB+71B0ERExMRGaAImKiFDKn3TROycN+2ePH3+2efff/36tbCPNimTc4xWs5z3gmlvQmYspyYwVXUJI9b1/PwM48crDeqrr74+P78YOlnerbIOj7Ca5VEqK6fBFxlzUOgSTJVwxKV7hArLLC2m9qKPs+zFjBsjQYkxCnuludZ6a8ewqmLWiohAuqHGvFRaD2OMUISvQWgNMFMpYqVFw8wwDN7LYZcJU3BU+O6Hw2Ec5zmGkDSEEJIB3kYyB22jesJhGIgULZUist/vwXjabLZEdHd3l2nY8xxLnZULUgRjh1wNjjzTlc1gi6lIoHRdAxS3guqHw6Hvuwz2FpkU5OUQlkLeACQfADvWGR5XC91p6Lq7hwcvsl6vK1YBUjECaE9Sycb4Lm4hg1XjWV+0f/EFod14fX395MmTw+GwGjbCPqYZVOplzR47E7MKwAo+HA5kJ3K1K9+u67q2bff7PaKoCmLXgGOaJkikQe4be8/M1uv1Ybcfx/H8/BxAepxnhAg393dd12F+Bk6IlQdkzUQYPeUcxxgQ3tU2OTM7HA745W63Q8CR11CDL0MsUH1YynhVB4y/IqTDpprCPGVUeYWVqejRxcXFl19+ebE9T0VemwvohbTbDy2CLTw4tTDvnCOnRqlru9V68A2krfOzLiIIDbfb7Wq9xsbufEeEeWJJT97B1UfSTFNS6E0yiVECMaraE13UBZGJqtkcQ0yJWELpYVVVE3Hg4Bo5YlU29MsRqsEOdkUI5LJHMgtcRv3UTKZxvusa77I9zEkqqVlW8pFSMZHSOF5tCIkQZdDYSdM0zTSF/XESEWuaBuF40EoJVFUmZ1yQZCYxIiMhc87JIskrBAIyo5hiWsTl5SLZSLxz3rfMED07kaqYmciSphAmPHfONYWpKmS54mOLCmvFqOjv1yC0sPzf3un6sde04oDtMdaoqszfmjPjjfxBsxP/nkhY9fOQbmY5BRURYUmMu57jVrZhPOx+8MWPDofD/3rz3lt3OOw674wJCmtmAgcsxKmo80zTpAn8vdnEtZ3vQw8gGtG6OP/6zZtnz59/79MXRJlrUC9MzZZkuOXjlHKnQHV4WV5KRHa7nXPnXAZbYno89oQvsla+SBBXlpBbyDJXZ9Y0jXctSxYWh6l1mfDsoTOwXq9BB7XcRFgVAHLdtKbdcDDImVwZqujLSL71eh3j3PadLzMN8V7E/qi7ayL09XrfAAYKITVNR0Sw70ibSsW3n+c5JUP7CrzCkrymqqjt1c7OOoJis1kdDgd9rLEOYBNiT8wMl4NQY/fwsNls8BrPkiA/ObQaExxwN/QaMOgtYD5SpVzldWgaIATOOV/S7lAG7hLkVswwkf58cwYomIosfk3c8fPQNdrkev84jpeXl9gGEN9o2xZY7vE4Nr4jE7guzJQkos1m8+LFC6wGbsEcs9w0/to0zeFwQAaMYAJ7qdaki1ew8/PzGGPlc8UYz8/PHx4e0BI9jiPEq1fsVN/OScdxfvv2/TyPxjTOkxCHEK6vry2mtm2dyLI0Ps9zOuSJ164wtFer1WEaU0rvbq5xi0MIzLZerwGPz2UkF3z8EpUB361t2/3+6Jw7jEcUYkUEhQBQEOYUN+v15dXV8Xh89uzZeHfvfN60JEjC2ElmXwKMQaSFHcgZdaO+71b9YJbMyPtGRIh0tRrOz8+7ruv7FZ4aUASSUUhxCjMqoDFGZpBdjSgRqXfinIfHEhGUTlCEqgEKAiPvG2aHByfG5JwLsCRmSuLY+SYHfDFEVbJEBCVlccyORaBGycR1CDG2nwgXnDx7Mu9917QimY/GGC+wyNKsDAjBX3DxoThmPKosnhoVn2Pl8eiIiIceny7sVdWUTRnST8rkiN1iahwzk5ASVcGNalRTSilFRGCU6S+OnTDlR56ZXek7pxK+GGmMoPikGGPb9t57VVwMVexnmVZ9V5pbX/Rtaeu3e98PDrgDcLOpBBknRmdK3n+7Ay6fLNX18O+VhFUvAi1VzJyHChosrxEJpIjMSPzZxeUP/+BHv/3tb372l//RuyZoaoSNTMRrSmTknKiSF0lF2URPI9MdZkBeXV19+umnb968EZ/v7vv37188vfTeQwEYmVmMs3s8QHuJYOTkGJAQZ48CgwsXQpShFc5jtlhE0AFVA0Ba3CrgmUgRYPRRumvbdre/xzmrM0Du1XXZDlIxNM41qmm1GsB+wmXXwkyJHPOaLxJlBsjsvQfIM4XZe48y8+6w9+JEZDzOZ2erKQaMVGN24zijbFklYbuug0oict/66UXXItasd7/fuyIULIt5QTHOzPzwoPj6MNkYAyWlcYuKXYBJrZ+ILAe+v8piOOeQYvZ9D2V/K2M84B5qaAJ8GCsfgx72Ywn8u7bpYzgIe3EU08xiKUyOjdicYySRuNEpTK3Pg5aPxyMqyjc3N+v1+vb29smTJxgJhccSF6ZFSmW322GhkJtSMdnn5+dTSIfDAem7KxLWoBHBc6/Xa/RQaaHvwfFAGQO5I6rI2AMVpkYIgg358PCAvRRSngQXVTWlpmnGOez3ezJDdihiMcbdbgdO2fG4b5rcJYWaLsKm3W7HzH3fq9Lt7b2Vfneku9j8UiRlEP1g3UIIFZStRRCsybt377qmZeYnT55gZnPTtXOI7CRqGlyPjI1K3gMaP/7bNI1RMkr+dEjbtuIhmmZnZ9vNZoOupDLOFnOFHTNP43w8jlNM43G2gpeGEBrn0D3vve+71jkXUnCuqFASZR1mTfDlZgwpHvEtOZlCikDInBCLa9rtxWXXDsfjceJpfNiJE8d+ToGVnRdVc9jAxCklw3xfO02+sWx2qO+7Kprhi8iMK2CYFG2W0mKneE0IwdjhdphxSurbPGMxatKU5hTTLjVNI2TmhZ04yRJpFYjW0sFlBdibppmZpcSyzMbs1GJN1k/+T1gYYE/jvVclYnYklhV/jZk0RA0xzmNU9d6DhqmqvnNe/BIUrE9Eze8/sOdmxo/7UOuVqH67zgY7Z4t6Nlh5NUOTcrYlKkOLpHnpa8uJhTKzSH/fDpiI2Kg8b5Lh9MeHiIj3FGl7dvGDH/zg7euv37z+0rtc8U2ln8zMVE/3kov4Wg4JldvOO7d9+fL5+5trKqW7h4eH29vbp0+fEgmMKdXb8y1gP/zZqWaMONeIiKzxzWq1ymWkZn0c9znrTY/uYt1z+BnlxppLwRWB1XI8HpkcSl+WVbQCOC+Hw4ja0nq9ZnbeP4q/cG3AJE8Lwqyq6D9piqyglISvgISqqs41ZhRjilHN8e7h4Jw7ztOIOQckFme83fuWSEIY8ZAfj8fXr193XQMP4UtLBhHB6dZNqUX9kYiOx2PXNWdna/iYWA6k/jWmwf1qmmYYcmNxvV+g1az7IVevLaZUJlIwIfoRkcTJOQe/iIRyGIb7+3tImEHBu21b77JAAWq9h8PBipa4lYka8zxvNhtNKXf4WCKN1foQ0dnZGRqualn37u4OnzhLDCEYpZiCc86Lh6PCxYcQvvzyy+xdmuZwOCRjxAoAhIBb6EIOSYueKBYKP8iCfAQ4pLK37u/vu67DgKPdbvfm+vaXv/zl5ny72Wxev36tZKvVCrkUbp9rG4mCimyM0bmsZ5myzHWDBamcr4onE9H9/T3kIGpSpbGq5EsIAUxsLvysfExaIYqHw/758+fS+N3d/TAMpBZNX758+ZM//MMvf/vb7Xa7Pxxx75hHxCVJY5hyqT4jKJ5FRJqViPiG+77HLiUiUmta13bdZrNZr9fDMKDvBckcp3g8HO4edrcPu8PhMI1hHGcS1mSN99M0ElHft6vVaiCZOIkk52X5GNZcCvHuNE1mnCgLyeFOpZSYpO1X2+327PxKRILqFGN/tgnjFEIwopCMk3loSBkZmpUJT/oppE4pOSfO+bbxwiQibdPiMrggvVZk7Mp25Wo9lGmegtlpxHXxLtb3/XSckRGllNDuQQXBzl+EjI1VDY25C2d2+icDQ04paQwhaIw1CqSTnBnujmCYn1ImiovktHJBk1zDTxuRKmEkYPWIUljTfz9fRHTylN+Z/la4a+nmiT408gsf8Zhg9B3FTcuzMtPvwQFnccr6b/jgogwtYAOePJyx895Mu9X6Bz/84dfffHlz/S6FmZw42Ahjcii0GJEjYyZm1pr/4eNSUmJ69uzZ5Tev3717B2MRUnzz5s12uwXdt2ka1fgBHJGPrBle+uQ/Glk1xVAVMACDIG+DdPjHd4tr53VpjAE1RiodehwR3KFACMYsgMfb29sYNcYDeD1chPEArsL21VIrcE4mp4m8d8bq25ZTSmbEnFQ1BBeCX3TN1lKxqipT33fH4/jw8OCkMUag58/OzlFoFPHAguq+xNdkNlyA93k2ADDtWISf8Fh6L3DVAKXxsy99RylTHLvqD4KmRhyVVi4rNJAad0+z1gwPvhPdVjX7pMJ3AyuemQ/7Q4W4w4zhaw3gVvg2YM7A6C4vL3FHjsdjV5jMVKYFVxD+7OzMCjRCRIBAuq4TkZhmz74uNYhCVpTDsX/Oz88xVug4BSxj7W2tSXzFTrQ0fSKzb8rU57rNqMxu4jIkA58+z/P79zc3N3d/97e/nsZglIb1CmcIIUXNVDjnPRaBiaLGkKL3nsySEZnOYWbh4ziFmOW6kAcT2zzPmh5V3dCkYGYu5rFxMc7gE9hCk0jKeOPGNxcXFymlfr3yLDHGVT90Xfdnf/Zn/+7f/bubm5v11t8+3G82G8+CEIqIpBej5BuJKQs5KFF+QFrrVx1qHwhN1uvh7Oxse36+2WyapmPwj5gxG+F4nK5v795f305TGMdxnoJIVm8FD6jrujmmaeha33jvV+u2YfFQyiilQWYPszZN03KorZmy95oSkXjvu35wvp2maX8Yxym0bTvH4zxNIhyDKlnrG5YIRlZIZhYVDGPGlo6gEHdd1zXOg24GUUliYkF6I4W9D1dVjWQ0ZXKsyVjqM8JOvPcirvFdkEjEyqqq1DjnHKZKFJ9HdQ5xNsCOlIw0mz4+DZ4hM4tBY4wp5DYkIhYvpmxs3nt2jRKJeAPmVUcHln7iFI1IMCB8DiZlfG8Ngisw9rt98Ad/NbPfDT5/8OLHkda3vuwR6frj1yydHbP/vWXAshSIXuTBjy4IQZlzTs1RoO3ZxRc/+NE3X3715vXXbOoE5TeEaQircx4sH2mJtc6PYR6G4dWrT77++uuu61LUlNLd3d3NzU3FMWwxPAQhWb6Yx1dfT1tvgIgc58k5EWLQBXEqNEzVW1U3R/2BCnahpTW+hoTOuWnM9lcXIxxgIpmdGc/zDN4pF0QReg5mRs4rsVrlbOs0HkWoOtq6yPM8RxHkhexcDZBTSqrGLNM0MTnE/uhhSGZzjK5pHHOMuTEDHqiqdyGMRTVU0LCkeTIxxBkArqLIV7esiGD18JsaWNTgnTwVtimZWds2634AJbimy1JkMVIRHur7PqGc6RzOX/ug4ONRSY0hh88okKMWReV53u/3gC9rzlEf+xpIwc8h1/RlEsN6vUbzFdxtXV6kswCiY2FZIxypgRRYCzVNwcIiOKgO2EqfJa65Pu24nmmanjx5gp83m83d3Z2qjuMIReL9fv//+bf/9qd/9Ec//vGPwRIHEzglXxis5iiXzGMSsGNijEzSdR0e3XlKY26+B517zvmTOCLCvpHCh8JXW61zDVu8M6aQNKTCIipNcavVCi3aIrLenjli773G9OOf/qHv2v/H//y/UFn/n/zRT3/+s59TIbEXHDX/Nal2fdP3LTvruq7xDcKstm036+327HwYVn2/KttMiSSlME3h5vb+9Tfv3717H5OO4zzPOahl71gtpjDOYY6p79t+6FarlXLqe1ux856NxXH2gng882I6NwzDMkc0s5hQNU+7wwjC+XrQ4zRqMm82p4wYNY6iy29R45RSLJ27zNx13dA1beu9hxCzsYoYLVlCZgayfghL4QiJoQi62aM8EiB8zTpqbITojggONZs4gfZy2XspJbZTsmhmzuVefzsd+R4W0+q8b4GBM3NKj2QTEzQ2jVW1G/raiy8iJhZVaWFGvK/jrb6T7fwR5voBIXf5yuomHzUOLW/it35E/ZOV9Hfp9ZeOmX+/JKzsg/N1nLaIEKV6oU6cWXLixTnftZ988ukXX/xoOo67hzsic+JNc8G1fM8MqfMJ8SgejtsQwtOnT6+uru53DzAcYU5v3759/vy5923KwuKPApMPlmkJp5iWpFgy14lZSHiep5Vf7Q8P3vsUT+Jkyxit7m8AepV0A5Mas8RamqapaXPCh7omRr/BDSDDNuV5iiGEeYqoM7EQe+ecg+dzZQTsPM+ukd1hL4Qn1jVN470jYrBQiWiacpJqJFMI8zzv9/vD4dB2A2L2lJIxSwE/uSCfxXhBzYrm+UiY9EzUdd00TUO/BqK7OVshkuDFrAItwiNamutRKq4MGiwjmpospuLjs95T0zTH49Eowaeie2oYBlNufJeiaaKk5JyYcd5Z4pldSqkFH0qZRcwSas+1Sgokv+szxgC/BeAUy0sFmAIPGZ9eDRaADZCVcM0oAdS3wPdjQCHId078r3/12/V6jZ7dym2GeORSh6taw/xEFV6blWzyBA/mQntsxCFDRWp+e3uLIuvPf/7z169f/9Ef/3O1eHFxAVW1GiUnMshusHcQrupX7nAY56TRKMxpCgnhRQWoU0rish1k5qSaQpbOxjCQ42HE3nDisc7jOLJRSsk1GdDWUq33pSPufHNGTIfx+PLly//Ln/1fHx52P/zhD//iL/7i8vJydba6fX/rFr3RmgVe6gB7W69W2EJOHIB9KGLWx1xZhDMeHmI8juH27v72bhdN5ymmlMe8N9YSUYymKc5Rj1MYQozKMRzx8K548J45V9aUyEoknef2ZDOi6piT2TxNNyER0RwV0dXBLITUOCE6qekFTZQ8M8P7BgWSZQaZmsaJCJsC0MS9q3hJ/vQUvWvk0RSg3IIB48PMYrkuUNewRO0mOe15NANYVYOmRvMgh/r7GCMccH2ZCB5AW3ogIlIyTw6blvOgMyXK3C5hsM1TnELIc0f8ZrMx4xhUfIPPTSktUIdT4eNjb7o07JSN+Lc0EX3r63nRwfzBt/j4PN/qlb/1l3jL77kGfCLSP06CHREpZ4ZeSkGYm6Yjs/XZ5uXLl9fv3s7TcZomKJdWd1XfS4vuJhxm1nUNujN/9KMf/fs//w/eeRGxGO/v71EJNjNw8WgR6SzPQIs4iKzK1BCZAHg8Ho8wemOYpxB4wYl3C5FCs+qwuf4JuQ4MB/6bmztdi9b1WPSY6rgbDAJCRaRmQt77RNaImBlwdXg77z2zmDHUKIlI2JRMmNS0OsK0GGN3PB5jTIfDYZ7iau3RMpBSIvFd15kdUkpeBJ6SiFTjOOZWq2L5PUDy9Xod5gDmjikp5YosJhphJgxSfClT5ev6pzLBmxbz5AuE6Jbg6jiGql6E9enaASuWha7GEdwTePSKTmNjLAtah8MBWRTCgo4aIJxaiNwVE8aBGjYXiTRmhhaHL3MRwDZCCl7xYXw67o6ImPJ+3GMBd7vdixcvGidop4EqNTqJa+8W1gSLT2W2lRbuiRVCZm0rQpiCyAzlgPXqDNi7c+7+/v7nP/852qV2u916ve77nkgdC9h2t7e33LZ5fcbpt19+3TSN9y1uX822s+EmJhMNSVPE3TFKOY3Gik1j6zx4dvvdIWdjTESkMUV3mhEpC7Wmu93Duh+6rttut//yT//V+3fXZKaqv/jFL7quC2EahjzhkYiI1CyRa1zmf6W+XzVNa5a889i3wzA0TQPjY8ZZ0VZ1DlHVEvGc9DiFlCyZMgmxc75NyaJqiNFiiEmjmpIlpTRIUhL2IrKSwZosFwKya9P4aQKfLkfkLOScY9V5nlI6ag4Nq2q0Mjsycs5lLN045uYrheYB7rL3vnXee2cpqqlYS2zeOdQF6svYyJRce6LuoyaYSsgiWeSSkKJKqRCrgWSan0foMFNxqykl5Tx6qAIPmiy7z4X9TKVzLMaI2abFl+cWD+eA7ZlZYWKXntJ5nsM0hTirUtM0bdurqpL6oqakqtWh1DNbGTP89z/4W1DrytXKj39Fv77VZ398hu96Wf3Zfn9a0I+OE4WPiKo0Ry7tkqopCauaeOfVC/v12fbp0+e7+4evv/lSWKTwTZICHaokN4xcOq2CE2maJik9e/bk4uLi7u6u8S3M5evXrzebTdc1y9CMymUxuSpJg8PM+IPlU674G/pwXBGXWUaLtEim8QKYUdivSk2iUqLLFp+sRnO5FLoofNazhSKmOoaZcgwhGGMYYgwAihfEBNi1ygfJF0AnwcVpmkScKYt3fd8fDiMzTyE5l5M8IppCYMvAKaw2lyo4EaE8jO9YJTW89yEG9HTWqYLgbcEVwRsB6cWXWnrZEAJGcRBR37Xr9dqzYMbONB+rX0R/VJ2by4XzgmOJgcO3YR2OxyPKt1XFE75wte4RSQAcxmlxd8D3wdth1PC9NPcp5q6kWvIMYTKzpmmsjFIBVB7mBA36lNKzZ89+85vf3NzcrLdn2CQwsnhltblaCHp1m9VmMzNDH3YNzuD7UcvArEARmaZJ2K/X3WEcieju5vZwODjPV1dXL1++DCHAAXvvd7vdL3/5y6TyxRdfbLfbL7/88je/+fLVq1dM893dHQoNNbGu5HDnqW42BzljhrRCph/PKcZ9JkJ777VMEcC3oAd58eJFTWIKT6pFDeLs7OxwOIzH6U/+5E/+zb/5N5vNJowTWGZZ2hN7qYSDkEtjZpGmbZuaATdNo5R9ADOTMepT+YmLFkKaY0BjUdTYkoc7RxIUNEbTZDbOUQNVlpxj8X6QjLqTc5ZKRRz09Xm+ERIWVvGmyQyyQCQo6BAxexZz5Jxr8oqRmBFy32UG5r0nVvyrESfCTFo42Cfl+ZrAVTy82jBAu2V9xGJO8ryvMTH+asUNh5SSLyZODOOKT00oSyNvljizZdHtXVYPPpJJyoNQ+F+naq6qCmUOWUpJExHTMKyLIeWa/hKblQHetCBM+TJ09ePjW/3i4xd/SJOu6S99B8/rg3N+10fUXHmZRv/+HfCjg4kzFp1/EDNtpEkUSXM437bt2dnZ+fn5N6+/UlXEkmgK9t6nKCCesWEYZc5SUSxpmkbn6Jx7+fLl9fX1amiiqnPu5uZmHMfVqk+lCpWvB3nwouJYpU3h+6kUA5DGee+nqUwDbH0IwUkGxPDK0/4206IqTEQ1B+JFRxNuEqSb8XrYOBGB2lGEZEepdleHwZGPxxHA7DiOsfhsZm4oY9f1UYxF/QO7qmkaFkfMRIxRZcIe8u6xaIwALEVm1ratqaGNFVBejLFpcqnPTMFgcs6Z8skxsNYUjQqXEhWdKhBWuVr1QQUIbGYsVknjzjnHp1UlIjB+YdDH44yVR+dPLej2fY9GHSICMI7ssGLLKBDiMpD/1TQXgUtTRkMCTtQiaFVzX4zHqOl4KY0bTm5m4whF/qyXtFlvUfJ88+bN27dvt9st8H/kiIgV8O2GYbDSw2alXVUL6Rc3lEuNoO46xHNI93HfQwgeBWNymnvw4n6/74dWRB4eHt69e0ekzjkhnuf53bt3zve73a5pupRsvV6T8es3r1Oy/X5/fX1ddyyzobJ+tl0hoc/+w8g5h/2M7bfbsSuw4fF4jGHOG0l4nufbh/svvvii7/s5hMvLy816nYvlZFiNy8vLX1z/Isb4+eeff/PNN8MwfPXVV+fn59gYmY1PZmaQPsYTDdeLIC/TviiHy+JOD7VzzknjnMtgEVFUjTGGkNjlx1ONY5yT4ekji4p74Vi61ndd0/qsmUXMUwxE1DTNer2GevYckqD7tfPOTBOFpBXDYHNqsXGu70AqTI6cqqaikkHFBzCzatJovhHvnXcuhCwDoItZC3hXfdir6H11EiEk7/E0ebNQI/4YY+vzs9M0zjmnKWjhXS5tOJYu0UlKwRZNodUSpnQKIGqeXVPw7MUTkYN21+k6RYSF27bVEH3bV5EwPPJckPOShj1uc/rY45SrtO+YL//h6x+722p27COhj/LP34Vpf3xhf38HXK/3u+Y3/f2PzIvOBVViM3OnkYVqSR1zkiaJuWHdrs7OLi8+iS+/+vJv5/EYNbJQYmfej+adm7EpyJEQWpLQ6K7euda7OM0k9L1PPtnd3719+1aJxDtN+re/+s1qtWrbVuMsDlOShEwNYRUJsRAJ0cT5sDJXEVFkrrU0TTvPQcTFCG3nCfU/Ii4+VZglz3XwfsYcG++M+ThPq27tHHJTVTVIBprl+mKmFzl3//Bwfn6ezFzjpnmGTQHLJTGzNExK7MZpClFVTTV6703YSRvmCOJCmNV770QO+2kYhhRVxGti5/w0TSye2Hw/7HY7Jbt9uEcW1aqIU3FKHJPacYT8chQRz56kTXF2JIcxpyDHaRqGYSoV3xijFwFjHNmb5uqUxjlA2w+CGHGc2r53LCHOmlLXtGbWqKGVZWgada5v2sNDnsM4TVPfrUIIfdfM86yJxjCbMDkZd/sandTQ+6wMIUbyCjvR9T2y7SokiTd61wZKm01ujqok85QCdMHwysPhUAH5J0+e3N7eQmUTUs9w8CKeSJzzTWOV+o6o6OHh4XA4dKvV5vz8/fv3xhznsW3RbSXMtt1ujsfj8biPMZ6dnfV9v99PIXjUwp1z43g0S8zmvXSNsxRqU5D3PpKa2TSPcxjFkVoMFslRtNivNiEEEWrFU5Lb9w87f5ymIzp6RWQYBmaX4vz6m69u3l/vdruU0v7h/nicsBrM5nJ3SktEZrbfH+sgL1yeb9x6vVZixzQMg6mF2caUVLXvXQhRyVT1YXqAB3LOTXO4ejIktcN+f3d7e7k9B5ZOjczj5Ho/DMM3d9/0fdc03jlZrQZNsWsbCIAzU++b8TQXlpmtbX039CbMrVfnGgSOIRERJVNNItK3gyUahnXXDeM4GrOJTXE0sWhRAjEzJtWLiCabFBrsfJyNHBkdnO/6YS0SmtaLOEvaNF2YY9tymNPl5Xaanr59+3aOc98NFkInLmhoGifiVJNjMkpE1nr1PgkZM88qKBh57+cQNCXv/TAMKcVGGnYkLEouJiX2c1B0iPg8gjAqqThJpkgkmC0ZJ7O5CBWwZ2m8MsLoQcQ59vM8W1LyOYf2QjGmtu3GKbRddE5CmPvWqyqnBGl75/wcJjYi1pgSi5Hzc5rF9OHhQVXJLAKsooaM2ERV2rYn8TFGCP4YqST1zs1hjqqHww4eYugH8g1TM8fU+Nah/TeGROpZrOihYu8herZHA+4M5yHm30F5ZmaiRyLPZkxlJEf2AYtWWDoVQBFAkNmpDeljX2sF9qdFxfO/cgb88SF00suGm+n7frM5a4U/+/z7f/fLX7gkIYyJ1Mg7byAXZH2PIpfFIs4RMhjnnAiTycXFxbt375rGT9NEZofD4fbm/sXLZ+wgXL7kOVPJqe1baeVWFIiW6VqNjGr5qtwkTmQxZG5z/priRDCFbXQnYXTKQsoL/XekONBj4sILq75ZVa2I24EUE7OCR2Y+jzpiQwCdc2UIYN0NMFsYHlwbWlDLbMrMRF2IS8NzwGmp5jEZKDFamc+DAmRYaCzXsndayEejY8F7bynh0y1r1ZbFTLnIjbcglj87O0M6DgYvCO1IZNu21TC3bTvygUr9qWtbVb27u1uv11SS40pIhnOtnDJfupZjjL45jTaq2XnXdWW6OJkZZJ6QYez3+6YMgqxxBhUrME3TbrdD+ymwEFW9vLzc7XagT19eXv7qV7/abtdYfNT7h2Gofcy45lQamVwRqqzEMVmIMFihHYSizpiy9GNu+sJNtAJOYtgUwMJ6BmKd5/mwH/FPbIYQsgHymM4hYhagScTMULjDExHiPIecKKyHFWB/X+ZbI1KZ42RmSUNK6fz8/MWLF5vN5t27d19//fXQd8+ePTNhACHYrnfXNwBFReT8/DxO8/n5+TxNWS6tiDAgAthsNkh8Ye+atsUgSFsoiKVy5IiN8kxPY9aULKVoKqIiTTEOVrN5VlIm6GmHMDUtP3t6yZvBe28xQf5XRDxL27ar1QozH2kkznV6dJwj0SQvjtiJGaq/pGgOFCm6PVpyvnmeIRT6wcHMaQEFwyWonmBns2rBTiljRaq5EGvqfyu3A2GoLOplVvro8ANhAUtjUmnipAp9V9xO8pzFPFiTiq5DzTXxubvDgYquJ6TEmLky+PDRSGrqVX2AD9t3oNDfdSxfX37mFFWTfZAElxfI0skuv0tNkb8rHa8n/G/GAZsgvzxdrxGza3y7Wq9JL95NhxcvXtzdXF9fX0vjJSR2nEJUZ8VXGlNFjzMKB/kIZWpaevbs2Zs3b969u27bdhxHUX13c/302RXQlcWyEkF61YzzxMTfRWPD01vjIzwhtKCtR1OJBuMuZQAfQKeUlOikIC2FoTNPse18NTooW0LjECajuI0mhGApEWc6VSVUW0FYKgOrXm2lsNVoAO/NXbBkRFRmJxt6f2nBM6zVWSqqk3CQmuexGAqlMasEOJgPKlN+iXKzbEoJ9tvM2DklYudSeRfOX63nZrOh8gTiK8ApIhvY7Xb4K6BaKw24VqR5sIwAkIHW1splbebG0uHbgcQEPYf6INXlSimIeNwCxBNIoBEnIabBAAxA31DJtiIgig9C7fz+/v7i4uLiyRPMtEcUAv+NdUMNopJKrMDy8LU4Ldw5ETnm+lctZVRwr6yoi2ANF7s9Q47r9TqEQHSaKo3L6NpM16/SXVzEw80M8ZWqEiXnBH2iOTp0rHr6vinkkjlWGPvNe990pzZZxEl/93e/DmEitRQy6+fVy0/wLIzjeCijR/b7B/Tadl1XpcGYqMkTpvM5m6bBSK6maXzToKl36URxhZTUEXuWtnGWAhsoi4nYKKqwmCVhUTNTJs5yFmqmkcjswEak+/0+pJhSCjP5MpeemYXz4Jbz84imhhCC+CZGoGuWUmp945wjVkckjogI1XGy/MymotIqgnlBj8fqnSq7ysx1zo+eOojgX6OU+ugyYahGtzrjJg9UNkjmLR+B6lAX5WSAy8hcgDOfDPnS0zPnPknxTdu2wt6IQNta+s5YxGIRg3ZdBwMEpxvLTG4rygpc2uV5kSN9q8X+Lq9sH7XtAq6vXNelK2WGDuiHld0PvvIHH03f5uP//g54qQT+j3zkdUERgVNNSEWEyQ3DmkmP+/2w3jx9/uLNu/ci4DxripFJUD2uC0FEtTmMmc1STMbMm/XwyacvfvvbrzabDWDA6+vr9+/fP316lZS8q2+1soEMMPLyOmmxlPWW1KWvoV/d8cpEajWiTGWeXbXFvmnrGaTE70ubi9RWMVklVTVXRME5xWGOSZfQx6LlmnPGzEWAd3lyuE9AzZCpm1M0M3gyvJ5KLm6l77YyurEA9Xu5Ih7JZZ5EXaJQtOu8dwgyzAxCpFzCYVwwfM9cpiKGMNdqcdu26/UahhjXj4SmdnON45jIKClYOWbWNnm+Cjy3LRqvcQZk9vVJRq7fdR2LxZhwZi06UERkhvHP7FxbH34rVGS8DNxgLBeIUbAdaEdG4p7K8IP7+/um71arVVwoOYP5BXd+d3eHcRe1/xs3cZqm/X6P7ZozlcLAqoRVkJggTAZMQhwRa8lcgdIzM4sQ5jku9ycvhlOlModVJA/lxUeLsHdCZh5hExszNc5jMVFO9uIqP99KyoU2JNqpc86YcHmvX7++v7//9NOX+4fd8Xh8/uLZdrtt29Ys7Xb3iJ9qq3SMcb0Z8GTt93tEIUisyUkIEZqL6DsXELFK1qWqppRS0hA1L1fpOUnKbExKiomnxswFGSOyxMQsj8T3ERNA94pzl+qjtgwWKGY0wD9ub2+ZiCyRspGSnuyVYIKTESH4S6eguQamzjkxIs41XVyaLZJIeuQmi+til9Io3tfUPJW5hKnMJtLScVB0abIJ0jI9MN8+gd7tY+cqRI/xVTOLaVaLZqTLC2NCWaqYvpM5TWrOucNuR8VFbTZb731IEci/2SMJoKXZqQ6YuXKBHnmZb3c/j/9azpAz+wqNPH77yRt+sOzLiGd5I77r0/9byYBPl1t+41kiKTnviTpbPXn27P3bt0+ePd9sfn13dwfjJWSkmVoqIi7PtszmQ0troJkitH/+9Nnl5SXa/PHA/Oo3X15cXDh3ajHKV8KGlr7vWjhdyBgtExQ0kmfKFguXSjBC+GzcmeGi5nnum7YmiAClkStbGVeHx+N4nLquQz5UIURkEviv82314jVXYz59egWNa++KFRkHdAfllzVdjJFMYghk4l3LyIo0aCJhn6K2Tb/f71NS01DBzArFw9TC0uFKclWmFGVhx1erVZhmYLaolcYspOW18M6apokWkXth7BI45yklQKbQgsZniQgcM84DB1zZZ7vdDuOkQH/r+36apliKBWmhxkUFbq3gbVwMDUQR18xUY0ph+fzjq4ErVHnIVXhkt9uBjYWZCqvV6v7+frVaz/McDinGuFqt8U0xbeJwOBwOB7S6aaE0V7oZtlA1YQSsr1glKiT5GkIhY8ZqZwKgIwiq+MUcrerO+TGHnAosiY1ULHVTyXS4s7U4goBJiJ331XngVFwSaFx8BvOTBst76eHh4eHhbLvZ/OQnP/n8+997+fJlmsP79+/gMO7u7uZ5rnmkqjrPuAA8YjFG9m7VDhiqs3RawzD0fR5cqKoUrT5EbDmAC9OcUmIzU1JV40woIbTYUh5RW9yepZQl1q12B7Vt2/gQ69ROxtAiYW6apm09ZpbEGJiZHZmW+MmRw5BDIY2plAxO0u6PDTusVHW0jzhBdRvX39S6ABekFPe0tjg65wBQ60J5jbJHyZF9LdCwd1VMA5+hqo7Y5IS7qqpqBD6UzbIIsSOmrA5mkjJUp1Zm1DIzZFBxf/t+BUhDwXnU3L5cgr8svnECpWsm+g/En+mUWZ2ih7qAZd3rJL1HfuGDZf/W4+M/cakBn5rDfr/Ht6fUbGRsGG9VfCE71/h29fTly81m9ebNm2+++f8Nw2Ck4qRO4kQUiJNo2RNShB5FOKXU9+33v//9//gf/yMAt7Zt7+/vj8fj+flZSkE+CpIcc7JHEC59tNzV/uKf4hq1U8PcsnBSSylVGAGhepxD7eysSkkxqFqELySSaZo2mw0eDF8m99WG4BBCRy5qcguBYnxiXAgn1TpurVLjOSmqnAquNS6yKku7IrSEeBAVyvpw4uPQiIXvhbPBwlYedV0rLVVtQisFUVrMKkBcEtPsve/6Ri36RoZVB/cjjszIKLFY2/mUklEKMTRNA/6vb8Tm5AAgO4dMuilS2HCcWBMRcd7HlJxzqLbi2gBrJw0gFtUrRDSgpQsIC3U4HMySmTRForI6aSl9WVSyfzMDxgD7a2ar1arvO+ccJNumKYuoYA/A1Q3DUGXCXGkKgBOtaXRFGpoSAYDBh2twzm2329vbW2TGw9A1jRNhIlFVcYmIyYRLNh/KcAusA5v3hVGV8eQSAiJpzlPvSJyTlIySEJtkLgbVKjiXlBq7K5Xm+NrCx2yW1EkTY7y9vvnhD77//Plz7/3bt2/fvf5GRNbr9eFwAHa92+36tuv7/mjWlKIMIhtlspjhitoglOMPycPtKWkKIYaEGCXFmKJhh4zj0bE455Sg02g1r7KFEa3P+9Lud90gIk4ahO7KxCRaZvaxsPd+1fewPCdRF1Jx5IWEUMJmM42YQJXSGLPx0dxg47z3aMp6lGNBRPk7uD9Uqr8iwuwWaA1VW+G9h4NJeQ5bHgVopROk2rqgqVlMj9MSnZQsnCC8oho1nAYAL0Fp5xw02E0Y7b9c4wMizIIzs5QMyBCYz0wCDnNF42qlqVqzb12Bug7f9afFIfU+lywlu164yOqPv/X89FENmIrz/sAB1zPUDFj/Hj74nxCFfnzApxYRL3HEqe2GF5+8GndnX/zw5q/+6q/CNHoWJk7MRGymlDTxaYl5cQhxxdxevHh2dXWFXISN53n65s27YRiYyVilgCqCjgMT+ygJrveg/lA3ARGxMBu040B/yNErimexzD7CMwCMcU6xlz7GKGW4R8kpLUUjZ0QUy1T5uYyUd86fQlp7pPemas5lNkQsjfkZ8S6zWivyXMi9GYOqfbT1yy4fmwq1LZNFKbNQlj/jOinDUCkVuWYia9u2poNipqqr1aomdiFOFSQ/HA596+Dg4d3RAgvDPY4jfCoEpIoKWK5MO+eapjkeDlTA2IpF7/d7KzB+9akVXVeL3uVKIQwBAnAsETK2mhpyKbdrEc6ErUderqqQ/iAiuHArJVvcRMyKCIeAq/Lee5G5FMIRqzVN0/c9IkWgr8yM1dDC9cP9pWKGioPEeB/G6zFUuO2aOrfAezFDHKNwD5XLdjjuoNjVuhaiIkTUtr5p3DiODNVw78QRhtILkfNsRMy+yoDwQnhkCeVxoSPUpMo551xjlhrnN8PZ8+fPr69vLy4uDsf9l19+2Qj/yZ/8CQjnfd+2rVfVFGJu4CZqO0809H0vDaZQYG10DvnAxaRkMudpKPOUXS/qBWFOKQt7pSzHppqMzDQ/zsC36KSLxMziXN+3qANj+2UjwOiVlER5/6SUvMvtcPv9fuia6XBk1CYkz1cF3kmsbDRNR0A4qpUAxd671nko8holoHo1/YDnEEaJV5aai5BytDLDA9X3EFLNrctjbgsYI4hk7UJw3XMsLqxF7I9qks3MQojza1EmzWjhOxnP4iAdwJho6sgTmaq6Oro0hTJiy9BUhq3iXVPWlivG48oMD1vESfXr2EdJ58e/efynk/elUjFcXvkHr//gl8xcpaWW3ve7PhHH0un+fZzrP26iLN9ywm/zdpYfdGna4ftf/OinP/2pc67rOnQfyuPUlUsxtSJ1eAYAE63X6x/84HMYpnmeifirr7562O/zI/vhDXqU/n7wKR9cJ44cUxfuH2Zn1rQPbGG8Xc1iSnCHVtjtVPzco69vRkR1Km11xjnmYl9pCMiKymbKy1s3MVYDV+gKhxaTBggagflRPynSxWKhkB8DZLMsK5GLxFxIVUTUNA1ALfxyWUVG5g06T03j2OWpCTlTpwQH0Pc9jAWuDZ0MzFxmKTok4jVZx3fHb2LRFcGgPYQL4HTgqrgUlfFFVqsV9CyZ2TdCRBidW/FJVZ2m4/G4h6rANE339/f7/Z6ZUYSu1VPkuJSHPnXwx6jgwpWu12u4EJDPzezh4QFZOBLxGCNELKEUZoVovVqtuBDTuEycxBuX4f+iVp0rZMAhqo5KmoNjIk1pDmLUCGMafCOu9QJN/8axhuhZPIsI9X3b9y0oz3V7xBjxRjESI1ZzxI64aVzft0SK16cytbqqbFZPUDXC4PhrcoYg9fnz57vd7n/5n//vRPSnf/qnzHw4HFIKqjoMw6tXr4pPTVMIbd8Pm7VrG+yc1WoAwgFkBWu7BGPQLIBNm1KKQeM813KPb0H+9ySSh8OTMLk8B1dZE6WUNMe+JzIjFGa0iKZlC1IIBNV/ZCIIZTJ50zSoPcUY5zBOxxFWAre17zvvnXPSN23ncyMAHhMpWa9Z1fNY0m4lxwF8guhckeZOyUpQ1eJG8Gl+kqpWM5LpC86d9O+WbszygD6louqd36U54q/xK5W4xDnXNr00vp5GF4B5jSwRdiuZb1qmXAmqOYYUIT9aJMQlIDb77jz1u49T5mqlQlfv7NIaL/9ZP8uYoC1SoZF6I+prvvVUv3/k+eNDHv+vcPiIXMljsT+6YSVNe3F19Yc//aOLq0vLuGh0zllMuAFaBsXUL58/JKmq+sYx6bMnT58/fRbnIMUiv3t3HZWgWZqSirhKT1ieZ7myy99LIU9V+8tFuD9DPYnIBAzVECOLjNME55EHys4TF5IUFfYvnmQiSmWYIMZ+TdMcY86hUzTgclFT/UQqVcxqqbGhXSFcIPH13kPtAcQW+FfUbKqTds49PDzM8wzV3zoKAn2ucC2ulP2oOABkn/isCmsTkXNumsI8x/on55xrfEjRmAChwybWF1TNECQEKaXdbmdm8Hz1FsDB4LIBngPprcApZDSk0LtqtIFU1XuP6b9mhkqz5o6jVDV2uJBmIZShhdBLJebDN8XeQL2ZmdfrNXYFANLD4TDP82azefLkEonIarXqugZDc9+9e6eFkd41TYcLixGXXUOQaZqW3G/k00j6uWh24qZ770OYDoedajRLIoQapPfeeYkJVt5UI6gJrowyrGQlLJ2Wao4WyYhyzck57rqGSIm0bb1jHg8HS2noOi/ElhybpeDYGserru+bFv5+aLvWebFs4ruu22w2zjWXl5fbs4sY469//es/+ZM/+R/+h/8hpfTLX/4SV+W977puOo5EBLeqhYkjiwFfTdf0Q+e8YOtW4BoH5hAjwBrHcTwcMGMR/0R/edu2eaS9eFXFpMtafPHNSU3aMbVeLi+2IoSYiRcNEVZAXXGkqrvdDg+d977tfNc3cxgZIQwlfBCj/aPvuqFf98O6HwY0RXjfdZ3zHOegMSHHrYZIiYzd8nn3ZYBYKvRDKVp43vvaFl8fhK7rmjz1iNyiAal+BCxM27aA30QkFDIjDmas2cmBIaxMRYWD2fm2dW2TUt5RVhT6YOWO02hMJG61WferwUkGk/A419PCwnAZ7SqlPaS6zI8dIX1HBnxyosWYU+kWqetQnXHN6Or3JcmyXqcVWHzidx31BR+QsP4+QPTv5TABtiJGkLBQFkfWdsPd7f788slPfvLT//gf/j0zp3FmPs2eW5qJ5aIQkWCUh6PN2er586dv377dbM6meW6d++abbz777FNlEhIjRVIIixbtUTJ6usDFgd8ghsXABuccLZ6NlBIJp5DENbljpIRvH5y2Qrip6NG43HQUjsex73uiii1HTZknkkwtCYYNVCZFBXVP6XihLTjnjscjCJlpQUSqCRMc3jAMEAjkwvKnMkyibu5aUq0xKZw0iqmwNU0pk1dYsusG7wXA8jiO2+02hOAcx1x/yhEMqq14GJCk1soZ8tp6L/QkAlUajmMWQgK8vNvtcvHbzGKEF4eYJYs5v0KiVu0XvuYwdPihamPhmyIyCFkmHjTpAMIzUr39fk/l6QXCgcey65qUMjV6s1nFqOM4dsNwfn7OzPg4iC9uNpv7+3vU/uH18b2kdIKFEErvUGZI1d2IhLvrGmae51SjByzjNE1QDIVRTtEdj0dwyphb1chMZsksqcZVv25a13dnWHDIefqmQZABMSysBirNq9UGN73vc14FDMAyzSdBD6T1HlkSkqqmadhnu4/NM8/Tn/7pf//DH/7g6snlb3/1a3Cb+35A9fTm5gZxZw19xnEUaZhZF437eCSLWEq/ZKePxynGqOjwKdXcCkc550RT27bsJETEHJ7I0OWMD1Whpmk4Ru/9ZrVerfuz1dqduGZtSrkP3kqzU6VVUuFnMDOeFCH4ktIF5DrULFLMXTdExGJqkRT5X67pyoJiUh9zJiJS7FbnuIDVJCKWzUKeYwhr4xb0pabJtBVAvOiJAojinGuHvppWIIY5S3E5BiIii5kZynLakGj/5VxryE4XWGU11whVQ0jZv1Jm8Djn0E7JZdpE9nJCKf4uVHnpLKuX/cA7Mmc+RH1Zzd3/s360KIqfPugflHTTf1US1ncdWcqbIMpBBh/cNq2RieOuX10+efK9z3/w1Vdf/fbXv7JSMa3B73KhlwGLGRGbhtn79tmzZ33/t2A6tG0bk379+s0nL55Hp60TQ7Ko9sHNqwcvSImnHVZ2bXUGWgSW5xhg35k5hGRmXdcxn2a+lvN8VGnIsRVnr1AaQpg5RZsj6IWY4TWfngoWMgohemMjTTHg8hJlnlooilFhSuTEhF3bzCk6yghVnnVfFI9rRF+3fiqTEKvH5cKvqbk13KcrMlII4YkIaKorEh+86LzCy6xkgZB3xgtc4ZdZ6U4GGGuPkSJmwhmWtwbwbyZjExFRLJG78y6WJssSpOdGWO+FCp9ZizoHAgIqqSe8YMqTBEfnGi06X7SgnlopD3ddnvWEaIBIh2E4jCPy+2EY+rYF5ozvBaQhqp6dnSEomaZptVphMfEtUpnPYQXJr/sTV47fIxdkIy9OiNkohSjEZNp43zZNDGHo+xgCpD/6vj8eD943UD5BFWCz2YAXttvtri4v1HJ1f7Uenj9/vj0/O+wn4Mmr1QozHphZpD0es7qIFD52jLEf2vGYdVc8MwbtqdI4js+ePdtut7/+9a9/9atfXZ5voeXeti0uWx+TbsAJMCYztlIRrKmM5TrLrsZ2pgRVEDIDf0KL5UUY2nY+aPLes5MUJy9OMI2FcmJArEIszKuh35ytzs/PL842F5fnjRPnHBffP03TNGEv+ZTS4bgLYSJSEWoah6CncT6F2bVt07imW1coFd/Ou5O0JABvMwO8zwtGW73daDOjYv0K+JRODtgi1qFt/TAMIlRdWlro/5Rw3IsQE6umUpNG7YOYeUm0WeaRmkJKCeIqYGYJe2ZHJM41OUcyTkosTFQmWVkapyN4YV3XAc8vptWHECBUbJaZrUYqfHJb9gFaWar1/BjCPHnKk3N9pONRH1guAubfdRhnzvTHDuI/e9TXf0zC+q+eBJfJwaiRgOZjkigXEvrV2gk9ff7y1Wef397e7m6vzSxpNCNfdFCpZAPLn3F4783Sqm9fvnz+5s17sOyapvn1r399dXXRS5OSOfAIMEnXnZrEl3cO266aflnIzC5fpmRRUyoNSKc9quqcOxwO3uWUV0mlPLfVH1RiKh7LaZqIZJqC996yPCxXeIdEtFxMvTyW0/XUZBcpHR6z2iCrpUwOILqWMBEMLoHN+h1dERVypccJThQFVwS2bdtqjCg1wROk0lE6TVm9EhmhaqzPDGDDGON0mIdhQAUuqQ39ep7nGJRMyEQ1693Xp2WeZyeCCBreaIm8xRil9IowczTt21Pii1yktMeMw5AVhvE1UVCsj2iF19A1VB0qHCQVg+4yWYEPh4OqmiVUf5HUHo/HaTrOc0CoYZZHs56dnSEbQH+2EB2PR0wRoJL911p4xVTwSxhKIoUXr9Qq59wwDPe7AzM3TbNa9Va6nzebdhiGtm2eP3/mvcOFrdfr9XoVQthuz1TVbJ1SatvGeyci6/Wq7/uXz1/AxaaU4hy8uKFvha1pmouL7TB0eCQB/qdM+2J8Bdz6tstjlVNKIUVVvbm5mefRzP78P/77/X5PpNvzsy8+//6nn758eHiIc6hwcYYfvWNm13jLGFh+BOp2hQOOUSsPbhrn6rcgCk0YEGTWOF71HbNNIbTmQmIm9c5V7CF3XLss8XE+tBcX2/Pz8/OL7aofnHNSZOoRswFBBU8e85gBJGy32xyyS2Zrp0XBEc9vNNU5my/nWYij5SxcRHypelQHTCeJxNNJJFPD1AwOJqLJ1TnXth5SG1x4mnh7RV+dcyzmGJapBrX57FIOZgZ9VVWZjBa8xSVaRrlS4JISpkUhT2FmIwj5jUTinOv7vm264gUbO/FSFUwx97jd44Nklx75V/r4l9/lfZeOowYW3+qlHvOt9OMT/j0PX1t3/qG58z/RYUQMH1y/OJh+RkTkXDPPwTfd5ZMn55dPnj1/eXy4I9AKLAvTo2T1wQ2oyROZet90Xfe9V68eHvb7w9GMVVMM6ebm7tUnL4MGImcanXPK5D4KcJY3pm6C/Eh4D90yEWEnJCetllrfpYLcIslrVn09M3YtlU7WlMezlIJKxqWtaCg2qpRSVFVjZ2QaFTSHlLJIr6q2jbdSGNbFXL8lRn04HFBdc86J5IyzpuxcCFlEVDHtuQyfqYtTM0Ucrg4pU4Vrr+wheIjVahVC6LoWzl5VYwz4ofE+YkSxnVYD4x/g24pNoYoUccn+/SIGqrapeiDvfVIdxxGgqDKlxLW/qIDJKS20gWQxI48X8qJcukvdoge6yYMlTqoFtCin0aKIxcxo8PXe63hExHB3d/fk8hJZ9W63Q6IMn4HIpiIBsczswxurSeJiFiHMJKVJtyZMeeSl96vVCoPwzKzrOmidotiGhrfVqh/H07RmMMiGYUDX0N3dnZWBE/gTcsqrq6u7uzv4177raGtGFONMZf7mPM/OcUpxtVqlFEERypxeteIs47/5N//GObda92dn6z/40Y+fP33SNKCm8+EwPjzsMWOKhB27erPqrpPCx8GjZ5Z5JHWX1kdYMWC+PMVd48/WQ9O6OQY+jBLT7B2T+K5t0L+Vi8Gy2Z6dnZ1tWn95ebk5W5+dnYkQ8OEa7s9zRJQwTdPd3d3Dw4OIbDabpmk2m03f5/541J5LWTqlKmUc4xTGEihw/ZqZseZc63312vUuf2CslvtTSyGz7m1m67omRq2KMRWOymNv6JTGqGrrvHNOSmE4g0ZCvmmZjVnM1IoRyM8mlZEJJX7RdJIEMCNxYMahvT6z0CkTWfKUbiquUSST/CHRLNzU77o0+B/Y/8W/PhxqUJfL6qTFRSH5Wx35R6da0J6/+9XfepxqwPb3gLx/P0cOsbLfNTQ3NOzUuWSJvdOovu1ffe9719fX3/zmF4fDAQpzMYUaClUTSR/SpqDLYdvt9unTp2/++mdt23vfishXX3319OlV13pNyiW3O63sd0ANy93ctu3hOClZbgwqtqCWdZsifAhT6Aop0cwQZGjh9VHGyrIzm+c5xNg0Tcxz0ySlOYQQs1rsiUaYf2B0BRA4R9UHVB/PpTYM1hWIOQjza1swyFlIkadpwgWDMwluVKUf15CZiMBEvb+/r3JaVvSKUR/1RRWEmdfr9TiOXdfVWbypNCwNQ8/MGiLoIdX3Q1HSFboWvhc+IoSwGoZqZPGnWnVTVSkounMumvpcK+2qvwQ7Zp5HcM1wPamwqX0RCcFNR3VZSvMuNgBO3jS5lbYG78wcwjTPhpU8Pz9HJ9V6vT47O7u9u364319cXACf3+12WHkEOkTUMvummctMCKlUZBEsDui+1bsDBMHdxC0Yx0MI4WK7RQyUUjo0Y9M0SM1XXQ8txlXfDl2j0Q9dx2aNc957sAGSMTODRFbRJnw6lijGGMKsmsx0mkbvm9xXfXHxsNut14MZ39zcrNfrh4cH3BdkhyEEDKALMSCquLy8PBwOP/zhD//lv/yXz58/nY6Hruum6RjnNGfBczHTpEqUx1ItH8lyj5SNWt9QmVEmC2rI8qEGi1VEukbE9U3jQ4wicpwCaSKStm37pjUzNRRu/eXl5Xa76drm/Gy7WvV91xJR0ipqm9Em7I2HhwdAI+v1+ur8wjmHTjFEGzWRxRpOMeEejcd871iq5KG5MpI1e8jH1RYkn2KPEuISE2hKMIanyVoA1GNpTUxF2KdyKp3PS+S9x2XUha6WJPdTORERSzqlFNOsqkJszCIO9JSm6ZzzRiKSQ4Fssoym6VjKXtQ+HqFti+FyzIzWB2ZeSl39wz3Xh96XFkla9Re/M/39x+nIXZCwWIn+YXOM/ymODxFwbDviZGrMMSXv26iJm+7V9753OBz211/9xV/8hUguJTpZ0soqqH5KWdar1ThHMnXOvXr16s3bd7vdwXs/hbjb7d6/f//qk0+hIlB86CPJSXp8z2TRDlu8gsH+I9/Ce13hONTnBKEWdrnmsi4ps5XcS1XJLcgOJ+2qRCTwMDElmKFlYGtMRgRRZcCwrmg5UcFFmblOF1imWRhoH8qoRFqg5fAWsQwwxgvqVNe6GlpQaHg1fHod8wD6Lvw33htjbIeevZsPsW+aenJk5KpaUytcFQA9V2YhWylfxSJwDTq05EFVWWupcomP47her1FDNU3OOccG86dFGmUYBkQ+9bZyQafxFayQ2/FLLSOB8UrcTch11YgnlDnB+MF7j/TX5Z7pMM9z23kWu729PT8/B1DJzIiKiOh4PF6uVggIuPBWcJ6mjKlwRZLJzI7HLJqGzEYkA9d1Mzvn2s733YCvibCp6xvfCMKyYRi6rkHYkUOcMCcvMc7oMopxFmnBhMcFI9wZhuH8/Hye53me4GOcc1jzaZrOzs4AY+x2u7Ztj8eJS43fe6+czXrb9n/2Z3/28pPnInJ/f++YiMi55ubmq/1+j6VLpqbZhdTtrarMxuygMKyFv51Kbdg5lxpKqkrGJK44bBGJRL5JHI3ZbdYrZu66uB66lMx7P3S9mRGpF9cN7eXl5bDqHdMwdGhbIFZiJ0YpxXGekNSO4wjvu91uz86362G1Wq0cZcVQZhbHxCzOc8g9CyauAg8pq5d4LroIuIU1nk6lvTA//tmrUbVUvDhSyrIhvihC415jpmqNQblwnQz8LGJVRUiq6BVZGEYRkVyTzuuvVXzDCOnv8lGtaWXMjWEUQkCPe0qhaQY0OKSoeKaWtqviPcQZ1Tthwf+A40N14fKFHhl2WjCKPjpOhc7lbz8IC37HUd/434oU5UdHFf2ARzYiM5LWt0mja1pxEubxiy++CPdvfvaznznnUKh3kkV2zDBhkOmxRsw0TY33Xbd+2B26rnv27NnNzd+Id7DX79/fPHvy1Hm2lJwXTURFhm3pfbmUQquXSqWLF7VVKr6NCoKKbR3ydKasp+G9Z6qYZE55qyEjygNt6oi3aQpTiAYtV1UzhsIXyA5d10G0hktPcNd1je9sMaCJy8wcV6BaK+MTaushrvns7AzODIKIeM7BjKjPfI3xiahyfXHB2+02la7feRzhhlUhMpClNkTk/fv351eXsALrvqMC2LZtTj0pnYDcrutC0WlamhV6/CRUWLgGzlhG7/2uzKWPMUZNMGdlAQ25yHo9EOnDwwNIQzXVbtv2cDigks2FlebLRIQacKDeeXl5Oc9zjCoi0MtMKbUt6sSERiZfurcR3xBRSgmQwH6/r3IxcI2Hw2GaJjQNX1xcII0Wkdq/SIWIkMpcDVirojPF8zyv1+thWNdqCG4Zcui2bdVyDo0MntmyhxBBtRvl5PUwmBkuL6WE4YyQUoFTGYZhs9nc3Nwg0np4eNhuz9cit7e3ZnZ5eT7PWb6RiPq2U8or3zbOpQjeWdM0P//5z//iL/7ik09fnJ2tv//Z99zaIQ7josEpJiKSUiimHMEum4gxB02UdB7D4XCImjUogMljEgDyP3PiaNE/yqISSalrnPaY0LWOMTbOY34XMzvPXdduNmvvvUgORhka0cRJU7UGRAQH3Pf91dXV+eWFpZNxqD1slvlfRcW26ExBhs8556TGFrXySgUgO/HRqFZnsrFSjEaoAbpqcA2kbCRG8LkE0PfC7GTQDsFl0zRQFuOCLNYwbnk450pLcDaMGcs9MUPzF0+KJ7SZ5yL9FqYYI7qqwM0Wdom0AoRSmN6udB+BY8XMuhDcxoGl0O9IXuFK+HHGDMvMC6Fy+l1Z9bfnviX++Ycd3kgY+o/ERqSmjnPYeIoUlqf9J0ap3YcYPUpExJwcGVESYSKXjNr+LPrh1Y/+5Q//2W9+9lf/x6rzjZqQztPkXEOkRsI+E3AcGZNN0yRtP8fYOmuaRhr7/uevvvztb+Dh+mb1/u372+cvXr58HtNsrMbqxdetzx+IXpWH1jRTJJm5a4i0maYpxSRdO88RaK0ZE7GIM8sU39VqhXqYiIQY+4I3ipmGICJt0x6PUzRl38/znIynaQIhIgR1vks6x6RJjcXN0+QaFyeMCfJCxqZxngIlJJ1g8VQXG+YkIn23imHnXdt3q4f7t13XsZOoabPZhBSBgtZ+wUonwTMA8amUDOPom6Yzyzhz27YpBPCMnHPqPZ7nGDRqwoONGnDf9xpiTNo6CfM8m3Vd6/0aw+1jjE3b1mGfcGZWgGIpLchEhEUGJqyqRjRNUz8MWNsJBM4QXJ2iqNp1nWchs+k4rlarmCK8zsPDw8PDw3q9lpOwETMzPI2ZZcg0Qxcmgvl6jJmpgEJg3WA3cFVI+pkd6MFN0xBldNr5BqwrM/Nt+/bt22QWcoMm7FdsGqdxvthuiAS3A5pK3gsA/KZpztZruNvVanV/f+uK4iOpCblVv2YT1ZjJt01zcbGNMarRat06xyGQqnnvdrvdixcvMtQvjbBnZuHkxDG5J1fPzOzrb74chmF7dkZ6v9/vvdAwDHuhqMZs19fvvPdE3HUN80bEmkbOt2f39/dNi3o8N00zp+hY+qYdD0fnXOO7w82hb9pE9s03XznnXrx48XD78Nknn65WG2FR1TlOTZflnFzuDmBmdHvner8lSykLyJT6DtW0DNtGY1BVpkRJlBkoiAhZ53hOnsn7tvWuPPirSomAx23btm3EezHWpmUiRazHzNx6Iw3zHJJOczwcJ+/aJ1fPzjbnjnzQMM4j7iCi3spUCiFMh2Pbtm3bUyKOJFH7oZ3nGX0NRMTsjEV824jTpNG0aVpSBcwjIjEEZhdSUE1t550Ren5UKWpyTSfiSXia1cyCkSVOieFYkdw24tqm7ft2xmhzYhXpVisTicrJHJtrfKOkjojYNM1dO5DOTiSlUJqzTVWIxAk715Bw369EXEyGx98sECkLJY2qcTpClyauV1tTUSbnGpEcM6Ghywr9G0+fcy5pIvdxDvktHStLD6OPmia4BsGxqAmd3iKYzPFBGzGKosA++dHvzdzi05ZZwbcm08zsMcQ+xaLwDhKU/DfCiP7OgwvDfr09+9N//a//5j/99fF4HBoX55nBPzKzQnAXETa1wvIlIkR2QrZer1+9evXLX/5tBU7fv39/fn7mG0lkXj78+t+BSJwOX+amVeB3GZ5bKbrwgvSPZAi1OqQsGVxSHcdZGs8Efo045zA4BWhPfXSlKF0gjSMiS/lDnfO1VrpEjXzTIoPBezFXp+/7w3i0QsZGNuOLeiVwNrwMalDH47HrBhSDRQT8CHzNYRju7+9T0QAyM9gIowSClWZmEyHN8s7VmcfInJCaw9vhN5WyjvvYdR1UEs/Pz2sBu/KngNbCqyHcqSaYi1ijmfmivaeqc5hQt0aMD9rz8jK4qF5475HyhpDwey30DVy5FEFKwOn1IxBMbLfbm5sbEer7fr1ej2EGULxarXBa6B5fbLfv3r3ZbrcoVKMP5/z8koiQZ2c/5BxK+MhoITmy2WzQuNl1XQoRsQuhQmEmIufn5wCB06L/u5ZFUFHGrTw7O7u+vsa+ur+/x9oiEgIe2Pc9MuDVasWuIdKu6y4uLm5vbxFj1S61PMFJabPZtO18PB77dn08HlHgSE3yjRwO+6brNpvNdrt9+uzqi8+//8knn6QU1uvt/f09vgj+ezwe8aBhGjH2OfZtNbJYcFloeAEgWSapH2Ry2RLSqU2OmdFfDgeMU+GV4ls2DprqaWOI6G7f7/fTfOy6brs5g2rmfr83ygLUJ5y2pK3ee22aEAJ6XgHLTyGzr5NGvMaXcZNUSJE1abOFBAK2BxdSVTohx8kU6s4WNRmxlhiFSu8v9kBZMVe7kwtPU1JKYqRiwtw2Tdu2pLE+6bQYoCQCxnWG3Kgwm+wkLx8OhwOuMw8FL9jS6eIXk4lpUfjjRUH6H2qoeQFqVlta/5R/r48A7g+caDWntqBP/Wc/94PDGyVM7PnujJuyNvN/5eOkRI0LdkWb/rPPPvvX//pf/9v/9/+r845dM+4f2t5559U4lRKmJtUFtTWl5D2rmnPus88+/fLLL+ETx3H++utvzs/PPn31UjXFRI3PTUf1OuoSn9adlcoMDVoQH+Z5AofeFSIiEUE4s9423H7n3FiG3sDVsQg0qRrxRCnGSNCATXmXM51GE+L8aExTVbR7Qg4acRmVxv+mzM+B5QLNCncfE2oBe1YEm4jQNoOmCy4oa8Ukiej/z9x/NUmyJGeioKqZuXvQJMUPa86ARqPvXMzOvu2uyP7o3feVnSsysgIMgEHzxuk+tOpUVWZlZhAnZqr78JlpWETmOWhgeoDrUlKSGenhxIjSTz+1uqmSn2Lv/e3tbdu26/X65uYGeUoiauetCEFjpZSAdVJVR9zNOmi11Wqphbt/HMfVfIFyGl8A26CSQGyz67rFYgE2sa7rbm9v8RacG54nDKb3HooqVL1TMHSzbjGWBhjeezBYQZLisZ1zB7iyCCwSKJIQApwGnA9vBjhhpDml5JUx+8AA49YgUsWTQHtBa+LVRARVK4g0wBKComqaZij9dC1sCDMuThOX9J4UNtBxHB3lwZzNZmOcELHAUECNIaSxXq9Nxi0WC0v5X11d4ZFQmR3TSCxoCIjwNV4WTrkL7Wq1iDECTY3XQQxguVwy+wu6vL6+/vyzL0A24sjH6LvubD/0AOjd3t52i/mzZ8/u7u5Wq8Xl5flqtej7fpwG0QSWEhhtsFaZOcaprjKwbYgJpQqfyBWTMzMf17JyCEEq/H+K3iJGQGtj/5o6dwV1DNBTKlx4MFamaYrj5B3P5m3bZTrYcerr5IgpLdy9Xa3QV1Hk0EkCh2ncrH5KNJgKDQsflwg65wzFgvaRuBES51yuiZ/LO6r3ITCczoyBEhFQIWGucw2FRsqMQFRsWTbTp7YtmDmRzpvGey8CccScg6wsKuMYd7tdcN42V6r6YKbKOMBmNLVHX6+ATVN+k1Kr5LkWiIBWTIJCSkrBebum/X8oHao+5Hs+7r+ojFU1ZEIDtOWgA+zI5j07wf+h+GhoOS06WIsTCT9jtlz81V///De/+uWrl58t2rabLzH5zpEoi0jjvFQWCt4upZREReTi4uL999//+7//h8vLSwivq6t3T548CY2LmprqMWrVS8cjXs8E4o2lWOiwCFJhfZMqdWp7RvTQHJcqfgkp6SL2mlIiMageoWciF8SEjUlKiZSIvAilQt8PbwnYq3EcEVesVTKkiRUAOOeYGNBElC2CVISKaOu6rtS2HuHUIEqoUNvAY+ZCDW1uNF6NlZqmSTK5DOByyxJHpSSBc5MJaCAAtrHDERA20Wl44Fga5NlOtoQxZgE6iUp3BKRLEcLl4qXBKURMIoSA/CvMc6hhTIqqet9QCY8jomjcmUXOugJC7h8/fvz27VuERhBg32xu9/v9qm2apoHCG/rcKge1QLlcSkQK/R5eAXXDtvZyFKHEKlzp+ofnbEODafXeN5yZScCqvVwuEUXAvDAzDBoYK7PZbBpkivH8/Pzq6qpp/eWjc4y8KxA86BsAr9q2jUKz2YxI1uv1+fn59fW1QZBCCC9evFiuV8z++vr6yy+/7Pu+3+2vr6/v7jbnl8+++93vPn78eLPZ3G43v/rVr2az2bc/+rCbtft+1zTNNA3gb7L9K6WonSjD2Ww1IpWI+bL9a+rK+Atty9hC0tLdiJmZkyTrcEo22vVyMg1NSZLmuFGcpmkYSRR2IdAPQ9pPMTNOm4oSiRbECiG0heYFjJhT4WxPhUvSl3oK8EJT1WWcCybDlJ+ITEikUa4vF6GkSTUrXaekdIh31mLN3IOUEpWUcAnUO8lo6lwclVLyhZxHyoERTpQbcjA7FXHemVby3ktMsDaEqG1nXYEEciFuK8ObOY64os2y0+ghRVsL5Pq4ryNtf9WfGMUVPXRwTgcckT7RPRDWn6L+D1g4YmI6yuf/Bx1f23OJ6WDvHMyith3H/vHTJ//b//6f/t//ry+IHXswgaemaUpJePTeM2HdHyIMjeOJXNu23/3udz/77POp9H99+/bt9fXj5y+e0qEB5OFQlcMTHR5OLA0MTAoieFIYgyGggZs1fsdin2a0S2gaMEWLCMoBhYlSmqZJiF2FabRlB4CixV2ZWUlUSz9iLt2+yrS6gghtZzPnHEqA4EbgPdCnFsrp7uZWRFarFW5hEU4pECpVZT4kSqnUX7pSfopyF2Phhz7z3msSDtT4kPXEmJsIjeMAWv8kMqVxsZxric8j9wOpagFqCKnMrjAMpdNqHNHsvQT3oO1s0EIpX44xto0HJKrrujHmlhWYJqheOHNEZMqVqy6zUjq7afHRDfA1DHlvo/NH3/eLxcKV/pgYzPl8ycxXV1ewHg6mYUqr1UpTSmlC6B48Vs65EHLQgogQL8EdTZHgTccxe/+YBUwcLLBhGLqmbXyYpmnsh3k3izFOw+jZpSmSqMTMqtH4MD9rb29vldL5xRowtNVqpapw1C4vL9+9e9c0TTNrbu5unj9/7iYiorOzM0ABEGy/urrabrfPnj3z3i/ni+V69fTp45/+9C/6fpQ4pZRub2+TEkB2t7fz9l3zX/6v/5e2bVfzWQHkS9P43W70pdTNpHMsyPyiSA7Njy1kassS00F1zR6LEh/WcG75m/sCYlegH/BhhTunTPhnWgpSG8+z3W4R5gFrmIXHUegsOfU71bPGzDGOTsWeM6XJOXLB4eYgDAHQSVWdb5CAmiYwciNQnJzzJ7pESmk7kApCKiKu5BmdJ3bk2dXhAxFhl5ciETlm75rgW8eHun+qbBcR8Z7tRqZ9MVneNc4FmBzeAaWf5RXwz0QUJV0sFjCas44vcQJcvyklEidekGVha7V131M6FuCHD22ITKgmFcjV+188+bqpTrWQAxH9a0PQKSWEYSTPPREJVpZNx797JvhQPmQH5/pgr5TqFUDMxL6dLX7wo598+5e//Piff7eYdcxOidSxL8h45xyzwwI1+857L5xiGs/OVj/84Q/+7u/++2q18t6rys3N3cXFhfPkg6+NklofqwpXXGi4pwjKQBvn0GZMmA/ysVTu1QZ7JpQwjeVciLFwQBIqLA/RKpKkyknFKxMROyI5wOhtNfgup6kcBXOVKiCJM/PZsI6ptNtzhTICO0FVEXCGHoLahhTDr+aVWl5AVXe73Xq9RugSA4i7pyk2hTQKGUQ8Sdu2nnjRzVB2DC3Ytq13uYM9XC6oHITHiw/qDawLrUYFLYlhR7rUpgDBiVQo6amA2CE1zJTRTBXUcsX4Q4easRxpgE2A13RVhxYqplIsbFxE9O7du/V6vdlsZrMZgOW73S4lcj60TTuO4+1w613uJey9v7m9nc1a6M6bm5sY4zAMXTfB5sBdjAYrpTT2vStoLxhVWDbB+1A6R0E5LZdLuLygN8H4WO2ZlEoqPAZC348ePRqG4fr6GpmI7XYLM2V5tsyVIUR93zdhvt1um8YjZ+G9Pz8/f/z48RSHlBLot7Dq9vt927YUqGkWl5eX4HAehqFpfNPkjpYxxq5rYAk551arVQittVLQQo9KRCllYI4vBNQZZT2bmU8G9YDv8nH4SnWiRN4F55mZnWOD2RLlzLHtHVsMlWw5dMs2U+koQZuiJ3KNnyQRSQgOaCBbbFh7UclWEW6qTBjeNOUOwbUFmcpq9Me8N1xFp4tSJMTaiIkUVkbmRbD3ynrIMWsWUKqaVLk46JC3CB2qYyK2eooYp1haj5tCcs4p+TonYjIKoma33SOO4pxruxk7n6KYyYLZkUwf1J58/UG1+uBR/8msNLuUjRiDSFwq5VKp9tPrHxMV8734Mx3r7697vPDHP/7h/fffX6/P7MmIMs6rvtt/BBrr1BU2HUxcNaQkt1wu3439xaNHP/9Pf/Pxx78Pbcd6QDy2PtgQ20uxI2ZOEj25MSXH/Pz584uLC2RGVenq6urx48sXL16kcUf3TKHK/JHsiJbDNmTbtmnouTQ5MKWbjcriNtkGEKEYJSVr5cvKTqsidEiNNE1EpOxchUSwzZMKiw2VIBPRoWTQFe4k7GHEhxGchBIFfgfw7Gma4Boa7AgulKkfk2Jl/SURMtcQEhauMDS35RpDaY/oMzXV1Di/32xVc9XvbrcjzplR+DGmg7VQpZtjmo6xbLV1ZeQYWgUnDRyLKDoCABlTzZnA0u6CJwRIBOa5FJpcX4hBTP3DLLAsIBcmJshTfB3DiGgzGJFgNGy3W4SdodGhuuxbeHIAeQ6SscQkLLMAA8UVNkH7rt2xbVvnqeW2tqjA7Ii5VtXFYrFarZAwRgQbhtRmt52maT6fX19fX1xceO+HYb/ttwBzTdP0wQcfbDabu+2ma9ppmgDBY2Y8VduFUkgap2mIcfTe9/3OZXs0EkgIvcdcw7OHnAMzzDRNohG9qtCmwrQUJlo1Mw+bAvbe48VTBbmy/UjHUhi/hoOTZxL2QDfL5aCifTVJ4hx6RWB/ktxa23vvHKkmp6yHLaxmDNWWAc6nEp+LpbNZlOwRTmXh2Xamom7retkTzyxlFGSWWuSY1ZGrXWRhZqnezTliPfU1YdGGEBJejVg1OddaAmsqPVipJLBcqen3vokxogakOLUkIoCSYL6WixWXqLIhS2zKAH1AgUgthKWUOdnxoJ6778jWQ2QSgxyzHhITfC/CfHoFUXKH9YDpcPQvKN0TdR7+8R//UVW/851mPp8rsSbxOWvyfwYI9KkrXOngvDeY3Cixmy1CCN/9/vd+9Bd/+fHvfruYzxWUOsRcKyrvY8UpCNHQNF5El8v5hx9+8E//9IvVatV13X7Yf/Xm7WK1XjaeSgl89RRf/8TFa+y6bj/0zDyOQwjwkA4JG4MFgZsmhLYW5c6FpJEyyBnUnGQqnFyewixWCg6IyobMJrmqqsYUga2FryOlew8WOlwZAyiiPMb0CrW5Zx9VKTcqlc3YFdfXN5rzx1QrJx+yG4fXRC4zpTSbdyEE1TSVdoepH5rZTETatoPEFI2OHYyDGGOSqe2aOAlYosxLgzeMXHuuwYjRwJMmpAD0pcI45tzBocEB0gnn3DDEQgeYSTHNlYH3CecPvwLfa8MOGQT1kKrCkvV60bYtCJNDCH3fz2azq6srmBcZDFViyESEc1CVBLjZ+fk5PGC8EYwG8+nt7k3TcKnRwkLCAzelnZRzbhxHHVPbtk3r4dqGEFarFTSo5T5dqQfdbDZdhxqz5nx9drfdMPM0DSG49XopvPSladgwDFdXVyKSxG3jDv0Nt9stbA5m7mYNGPaHYWj6vmlbGGegVESOJqaELvGppGmZPXN28pqmoSkXkePxpGTiY+HotuVqu+P29tbEuq9IGalKE3KFzEppMryVaCo8DwdnyAQovuKYx2HY7ndSOohIBnZpCDlVmSoiHaEjj8o5x47YESl772WKMca+3wH3Z7pBCkW8pX7SMa8tlwSw7Q7OrTjIQE9C6jRbb2TQLXdQBswKsW8DQs75Mkql+oCUknOeORdfqKYkU62A61ECyeA0Re8a1/ixB/zCpwJYCyE49svlkohUGY2HYWFbMMPI1QlK8muOWuHdV5n3/dH6/Iym4SoZcXxl+6RW3k4NeFq886+549cdQUR+/etfM/MPfvCDEIJzOfucLYtyRYRivvlaqfSSw2GG+f/04U51sGpRgi7JGEJQbUTj2cXlj3/yl1dXV/vNnXNMokpCxM4zKcfSkEdVRZIUpiQWRSzlgw8++OKLL2OMorqYL6+urp49e7a8PFOilLRpApEAo5FSygNFR7NCRM7n+iIIytvbW+99jKOBQfAYeTdK7majhVyJ2Y9RCuyCVRWtri28k1QoEfg3cqBGmb0b49Q1rUVQEdNGdz/IX6uUxaPC2UVy0XsP8PBut2PvsDfW63XXtJvNBvIxhHB3d8fMCJ865zabDTiqoKucO/BoOk+oDGkK1RdsnfV6rSlO04Dx8blIxogsciMUdrpcLkMIwGgA+QwBfXH+CH1woSRSYfvCF+HYYTNjVPf7PXKrdYUDdooJcYyMKde3b9/iBU2cURFzBmlOKdlIElEIYbPZZIoAYGS6Dk4eKH7gGUO5qiraL2Lf9n0/Wyygq9brNbw9PP8wDM+ePTFwKTYUbDjU7WQ+isJ1akuraZqm8YhAiIjz3vhhiBjt+aY4DOMwm10iQ4zZn8/nu90OVoL5QyJxv996z03jkcmGtH3+/ntdO9/tduMQxyHutj0zJxUR8f4CC2+z2dzd3TVNsz5bnp2dLRYL8IqIRPYuScPqrKQKkEmLVboDL0RAJSjGBA2qsYZhcMAUK6o3a6lpyvUtZlhoYXcyhW3epBbgffEhPXy+cZgKaN8juoAlpwpdxSnRbrfrh54qakYiCiHbASJCqaJd5AM3+DRNSUqNosvPYynk5XwxjiPOh9kRqtr3OE3OOS0oB+xQKe3LHAp/Y77vMI1MnohcOGp25AojfYzjvJvh7iIym81kmohywQLqCIjIez8OPaCRIfimDTHGGMeYxqn0q7Yrjyl2XeeCTyrImpftgxro2Pe9UjaRmVmEnNNuMR/iFGMk76IKE8+aljxmDVf2Y0pOMxUaP+Rx1v7ryV+lAo1i8dh6S8doTSqgqtoPrk/IRkYSrnDBUlPT3zNHyBDU5SaBPb27vX57/ebb07enaVouliopN4Gk3C/qTzx8oYOHJv4zaV8c98LRSkSkTN4FJhLfBJ/a2fzi0eNHj5++HocUR+Uc2wkheGfcp0RETM4QXaKJhITdcjm/uDj/9LPPUQAqSa+v3j17dA5lpqoi6pxL+TpHnbDs5yzmNEehAUytRbMFnczYtKETISHRnI90RJoKkU2KQsSSEw+HalcVEhKKYrLvRHPs9/vLy0uLiMKXevToERWJYJU2UIdjnJxzqAegIo2YGU6hK1XwKEiQUsAeY1wu5/gkpeQ8kSLWN7Zt56oaTZJko9G2rZI12WWkNodx/+jRIxT5sHdmy0OMbnd3+31vShcjzMV0BYrHqp6gI29vb4GJQ+mqxQA8io6mARAtDIjxYGNwIE2g9ixKaYKbmUPFlAs5iPPxYdu205QMYwz/DMoe/mglLsUV3LVJxrwOReD+YkVN04TBMZh3jBGtnUOh6mRmaHQQoZCoZUxhPyFyQIW7GCYUXhnJ49KPYYGYOTb1WPJ8i8VqHGPwbdd1r1+/hlbe7ncwyHB3hGctwO4LGxfutVjMgobIGffpXZMoTSnG3F3Dp6QWziEiVR7HadgPuIU5oHSPDrZM3FRSPHpf8rpjOE99FMP3wM+KGzUNOEzYGJUBoBn7aYxTqtpZEh1lBFVPyRJxTWQ3VBUtHWOMoQn7zfb29jYW/oCmafpxkAKT5gJcmqbJ4xbHwp0Oaobs9YUUSV+qlEFtKzCz6oEhEoMQi9cOAjWkIaxAIAQXQkD43YbIhlpVE2nwbRM6yCh7a7spSuxUFOGxpmlEEK/O7DE2uVwKGejIp3TgA7QPtXJSv871xDOY9rVBA+jpwa+YDPy6v+Zzij6qZ+Hk1g9+MQgTeffq1as3V6+fPXuBp/HmAR8d3+QE1+/vqqjvNz/3v+Z4AB3NSkKaDQ0f2mb2/Pl73/rWt/a7ze72WsVRkhhH24cCkBklZobxVdYNiQqz++CDDz759DPz+d5eX93dPbFSEJeRXNFe8P6Rl7g6IgI8x3rIU9U43axFoqwMcnCMOUZhZnJMSmnEQ2o6dKo/UEgyOefYOoUZ3sf2qqoCw6wl6ASv1Hu/2+6x3M0RJCIrs3GZxDjHlIDrloprwuJCb99eO+eIsk3NFhAryWYpAAeEl7smt/+DoyYSNTNb5ZPv7u5wr6ZpttutJZm09BoCI3zTNEBrw5PebDbwICHl7dZwi9H+CNyNli12VWE0rAp7d1+oQ/GrPQMGGQrVPsHwGtzMl66IsbRbh1q1X7fbrX3XllbJuDMiz6BxeO/582HYY+XAacYkGmIOZi5GCQFwGDpc8Lq4slVNjOPow8E5QFLf8KUQfNDQzAwRGULO3XIGt3fX19dtu1uXwPjbt28/+eST/9v/4/9+QZfTNL19+/bly5d4nn63h6Zfr9fe+zjJMNwy8zAMZ2crBPCZPZMn2mMZDNOI9Y8xb5r8dMw8DXkYTVWkAqeoP+TSBdkCS1pQyrY4bT+64/ZBzEzkY5wMWGAnWATYdlZM0zAMKZroEyJJJFxqjE0mqGOn5gEfiJHzTgGFgB56bGNhoHgMCrhEL3ImaJomBuNN0SWmVOy0rMZIQdZo8pmZqaC+64BlUuKSUTWnECdgDcQ0DsPQzWZEwuydZ8cOdJhWspzNZSYWbrqmbdsSQM3lGD73pBp3ux3qNbCFmZk8uyY78cUOYxia9vx20CEi/QBc+Rv0jo2D6fhvJpO24X3wIqcn68N8Gd+gv0OUNJvN9uPwu4//+fHjx4laVRUWR8UJrl09Ev4aHVwZU1lO3d7ewtP6sx/1UnPslJWV1ZEEOTu/fP/Dj95dv/3D3btxmjypZxdjFM5doUmV2YN3VHMciYi1YdeP6cmTJ+9/8N6rl1+FEJJK3/dfffXV97///ZSSiDZNTlaxI5VTa4AIWw2PKCq5AysSbFREQ8niJBTwuENRLDxiH+PI3jE5IMnsZV3V+JOZJamSkmOz3VJKSaW1PJAmVe26nKjb7/fILMKCHkp/YsvS4SKIpubhlay2obRMgTFzHfJCMphL9Ck0zjnPwbCaaZrEvDpXmrXhzGHQ0LWI54/jGBp3eXkJiDWCY5Z5nabJKovY6TQmRK7wtKY1YR+YR+IL2QjSiogtG3tzjLFtOgv9mVWEkTFj3AxnaD6t0F4Gb+FCZ2gOAefa69zzxyK3cJHNyun3o2uC903T5CJmLezfUJng4Qql+1BKcblcQhBni4QZyLW2oFcWiwXRDG+tqsFnZw7Xz6iotsVce+9TGrG0EMRu2xYAmVqTed8E14hMIjKfz588eTKfz2OMZ2dn3/3+97ASnj592nXdV199hdqkYZ8Ds8a3FdOIB8bchRD6fowxjkPMzm7hqooVKTHzUXMCG/C8EcqftGq7aZAFjNg0TcweI1m7QffFKBcHMZsyBWZjJbnsqHGNiEiUcRwB3fGeibwwcTrShXZ9YVLHdcGCeXikwP/krsnL5TJG6TpqQ1MHXbSg9E2SMDPda9fGzEQOYV5mRn9755xkBj2Hwg1V1MsclLeICLErpNO+YACdJx94ioPtPlUFiBVBeNEYY+RijHKARZVbqkxJRA++NTOz0jgMMeWmcLkPW4ohtN41tY3lnLOk4X0FfDJl9Yf8kM9af6KZfDtZIZld4b5y5eN4xskP/6Lu/zptjSOopjFK0zSffvrp06dP/+JHf0FqauRffdhIaYU7/bdd6qGjIsM6XFbNVhKR5XL5wQcfDdvt7dXb1199SSk2IaRxwuoSEaennrRzpOocu27mU9Qf/ehHL7985YKPY3LOvbu+RRkGESEshi+xOuJyHT0MF2SW9x44SrD03d3dmVFZr0UqBSRI91KBSmpkg7DS8RRKATmDZr2G8iXj3WSmggmAJ0cl5wooVt/3CGZCN8xms7u7OyKaz+dRMuezcw63QOYG4hgaCKm11WoVMwdTrhqC/a7ETdOgeyuCrqhphispBTEEfYlCZJBUd6o+MFoZYsBRXEjHeyPLQc4uOOSCqiKyGmMEC6OWkmVgmgq+XREB1tJ6YZom5L/HcRzjCGPc2MG44vnDgYuYbkZ7GUuoGzataRpEQWGaYKih3pC+hQZS4ZSSuoyyadt2NltDyKLjAhE5525vb6EA8I5Ugd5VFWhqeyQg6cZxtEGwP+GVU+EncaUpyDRNKaq1fICJhkKpvh/7foQcjDGiHOji4gJ8KSGE7//wB+v1eYzx7du3wzA9ffr0Zz/72ccff7zf79m7NMlqdSZW00meWMZxvL293W49kaAxAzMH3zZNI0lEJucCgufFsJvieID51JuCC0Cp3lO1vM6BJVXnMpOwCfdacGvlROL8GKPKQc7gpv2wB8hAM4BDUqVx62ueiD58oo5rhwtWqRbm15TSrGmZeb8fnHPBB7SvqIW+RWio6tZ1/5VTroZo4Gk452MczTqkyoJROTw2bJj6jsSCEB1c0q7rlMQH3zRNcBxjSjJZ+iZ/g3LG3fsGErvYo/nzGAcEBbE4D/DMWfDeD+Petrk/biJ3okr+DcrFvpJSipJExZjQvu58eggL/XU/f8N97eeTk0MiTRJdIiH9zW9+8+j84oP3PiLLKv/JDFj2HDBCm6Z5/Pjxn1X74jhVn6KiAjvVOeeoac/Pzz/44IOvvvxgv7vbXF+r0+A4MQm5KKllV+g1AKnGDLgUk2+7OE2r1eqjjz76/MsvICJ3Q//ZFy+/9+3v+OAhcTJlTM71PGBBnyjL+Xy+3W65IopLCWD+TFFJpZt9nmzHmnJYUotdSYZDJoc9bMKiLOvsQ0D71moeYUBQCiN6bKpUVdvSaO8wxM4hQjuOB64AxHJjwZDjylAJ3jOirJKTo5xSYlEklZk5JQNMZmCzqkIQp4wE9k3TzFfLxjOgXs45FGwgr8klZwxHNmc6I4POUFVvbm5SVa8FJW0JXSICDSResCl8UtM0dW1QVaSNjTc4laoVEy54AOCVqIT4iGgqtFMYXtwRf/W5SVSGhYsIKIJRZjOOI9iGl8ulMO33e/j6ULFQt1OhpcRdcuGsITALDA11Vt57QIuhZff7DG4fhoE123nb7fbZ8yfr9VpEAMXCkGI6TBMQ0X6/Xy6X3vu+PzRTwiufn59TacsYQkvkPv/8c5SKgf/kvffeWywWm81mv99fvbnu+16V0Z0CK8e4P6lqXTWOoxscu9A0Tdu2zlHbBu+ZSKcpNw4yU3sqXMr4omW+62WPAxFdq/yxdWuzXCskqmSlCo1xpKImY4zOc4rI6Ac9YHYK6JryTlFV5SI55dQPK9vnoDixDgFmbEOjyk0jzExKsDk4owQOKpaZHeD3ptrtT1VRu1RRukTKjoUz2SGXONnJiOV3L+NcdttEmQnSSNSTSIZeYzs7szAyZLrLDyZm4iRmLwKi7L2qKkvTzLz3QKJhkE3omRTCIrEUw32Fd18dHtkQx0d+Zknf4B/e/26tgx/867/ZOAgxpeD9EKdZM7u7u/vFr391dnYx71BfwcAc/ynXshVQv+qf8gT/k4fjQJ4kq1Kvknnnnz59+tWrLzbX16AyZnYq6ryXZLRQzMqSbdUMwCHicRy/853vvLl6C6E29f1XX331+PHjxxeXVKW3v+EFDYwmKoi/GZrGl6oVlCQxc4zoq5NUVcmJIOp1YGOHCW+IGzNjzXLTEvPXQmZpxriWVNN+v99ut+v1GuW/XdfFKdoSn0qLtxij8oHyTQr5O5VCYaQqkWsEvtf7BgIRDzCbzZwnzRYKee/HcRzHfjZbMHNKKbQNe5fbunkPiuN4aDPcEpFpCCq8nng2V6g21uv13d2dSiZfRH0Lnjlklg8KIWy3W1eYN2BOGW5W9VCOCVwPWAOJCJ4oV+A4kwhmFWGiwRmCnWw9FoFVgRey3W4XixV6HnRd9+TJE0BsoFaHfiKi+XzumnBxcQEbwgSNwaSHYYAuLMipvMtSoSvRUhMcSp0SMwOWRURGZA1X2PjC8BVzgrU46Fwi7bFQfCNYstlsYEm8ePEihLDZ7HDC3XaDWEgJrcfXr1/jkRB+B/CKcthTVDWlvLadc8OQEXMoAybOva2mwjumpQTAFIbFmakiYvQF6iwVeIcqPiaMABEB7FO0leoxFQPl8XVKh850RBTTxBMREXKUWo4QHBFFyqF+73xSMZkpJCpqOXh7fugwswiZGbPWOH8gN6VDrtqshEPSulI5eP4iGQ7JFMlL91DYyiWiYy/rOIiIkFQeZ4nne5qilBUu3nuh/L4pJWL1gakyUkWEXSAi5wJ2EBosmshSTTEmbDfvGdEgZhamru2YGTUC9r6W03EVzs7E7H19bJPydWoVf0qlgborWLxvUFV2TboXi/5mBXfyYPQ1+sKllJIIBBl79+mnn37yySex8Pb96cfJKKSUXr9+jYqRP/dxFB7PWThRImpC03Vd183ny8UHH3zw4Ycfnp+f+3BUJ6dStkHegnjm5EOOtDjnZvPu8vLSWrtP03R3d6eqEExAGH7dk5mbS2XC4BzYJrGBMtmd5amqyRrbTrV5axc04Vj23gFzwXzg5jJRBZAqXhfGLCBCqcCm6p/hnRThEsC5H2NErNiX5riWUzQ8rYF3pKq+hWKGMwqvLuNjOcPfkOBxFUNIyv0JclGEvTs0H145hPDkyRMiQu4TzXF3u10qtJfACcNr18KWhfOByIVl0xTajbOzM1cA3tDWNuwIYqPAiQpSCT9DQHBBdeLJ9/s9EGEYEBFZrVaLxeLq6gqlSqHqhY4xBAQdLrj9Wts9yEkvl0s8MBaJRZJFZD6fo9rbpsbMLyCu8XjosWgOqBW3tKXJMcLOCHhAWyNAglBhU3UXxjMggH9+fv79738ffSy2/f7q6urdu3f7/f7Vq1evX7/mQlVNlfephSKRDgnXSXIpbQ/bDssGizPJxE7RSquWs8yM/WX6gypFhTnypXkRUgM4zZdaIMs1ZJ2kOUpsis32YypHoaNRYA8ngRAV51xo8l3qnW7iAclsswks1LFcLpFHCKFFAV4Be7MZCieK09ZP/ZAme80DziqHDl+sh04ta56IjkFnU0VvZ7LIvuK9JxYp7RSxVqXCwYUQAPWqn1kLrzu25Hw+n8/naL6EGUTsyhdomNmFtXn0oDKr1SQd6+n7R/1UXFL+33D+1x3/WhX54OG8cwl7SSYiYU+//M0v395eCSVmntKklIhECSkcAtiv/KuPQyhDS6bt6xTVv+l4+L5sxCXqVEgjswuri4vu/PzyxfPZxTkKBjWpSxomIY7MyqyiCkY1Uq/iUpTGcdu54NOs88+fX6JtAXnvmubt1fXtbjclikLQxOyUOJV/gnywFOgpESklx+odeUfn52t0YIVSEXJTYuVmjAiM05QSOR6mMUoyokQuLL5UonbTNIkmH1xom6SUlIiCbdT5fC6aphSZWYUc++AblezQPHr0CDYmaHWnuA+NY6fOk6bUhtCG0O92++2u8SGgiVgTphRv7m6HafRNSJo9eNtCntlzIomNZ0eSpqFrmnk7DxyQL4+ZPbFjTrNZIIopjiox+Awn3u/3Z8uVJ6Yks6ad+mk+W45D3G4GpmYcpGSRJ4sZomthjPHxk0sfmJ3GNLZdaFrvPDlPSaaYRigeWA/QNCICEyGE4AN3s4adsif2NKWRXC46opLIxwJD8RL+Tyk1wc26Bu3Zm6bLIf1EpA4Rhf1+P00Dq6Q4jsM+TWOaxuD4u9/9NpFcX7++vn797t1V3+980KZl15Jz9Pr1K1Awdk3ThuCZNSXn3MXFI1UWoSjEvunHyCWTjcpmLvweIpLSBECQL11JQYvYdd2jR4/Oz8/RniiWzg21nde2rWj0gWfz1kaVnd5dv4nTfr/bpDS1bbtcrF1ob+52zofZfOFDk6K0TUfKV2+vX3/15u76SiVKmiRNq2W3WLbE0QcV7Z2PiUb8U5Zh6qc07keepBmin6RJ1IyRU3RMwXEzTUmitqFhPZiYDbuxH9DqgERhK1jfBTavl50jzsu4dMEqulaZVSS6qjbBQkdKQqxKwo6UJKYJPWstXt22s7adNU0n4pgb733rg0xx1rTnyzPvAm7tiFlJk5AYjZQLwRmGIId8Go9/7KhpAweepgFWYz9OY5xCcClNRGLs0wifjjG6EDLqojL3J1Fmzy5odlQO6TDYNzFOKUUihTpjp03r2y6oaowjUSHFcmrW+WzWtW3rm8zinqbISt7lVEu2xphAQE/k2rZtu1lMiZjVsTqeJCVS7/04Dbt+F9pGyQffReF+1OA7Vb7b7MhlfwMRIyn1yrUPaju0Vre19qXKNNESu9KCYx2GQWLynElHqaQY7FsnWkc1MStaYOVPzEhzjB+ENGkUSkJJNf8zbYVfjT/k5DlD/QsMjd1u88tf/nL9X9atl1k7YyKDsaVjqo0jRVhZJBB8MKsfPPnPeogV2uWxJqcULi4uLi4uuq5bLc92220XWkJN7Qkkr1hVWTVqDpw+f/78+vr6D3/4gw9dnNLt7e1utwved10rcdR7tX3ffKjqYrEYhgNgoSSQsr8opK4KhVkc1Tzg/KoiGgFNPBTM4XMr22hD45xTyS/lPCG4p6qgXUUOEsFhOEzmre73exjwWrUE5hJ9JXQ7rzp2hRAWi5l5BlDMNdAUF48xDsPeXF4ztFFM7JSYyUjpbDvhgvt+sOpAXMGi33g2bNRYmiA1TYPs5vX1NSyPcRwfP36sqqBTJgjcxp+Y7Tb48BFD4YUvee4cxtzvNs45dh4+fdu24KtarRcFuEvZMdUcRrq9vd1sNsv1ipmfPHnW9z18XMxI5wK0CCLD8Dh3ux1oMTAp9qj4FjLNiFrFUjPKpcM0XplKfpqI0GcQlx3GPfxydFOwhDQ4QKgUaEpJBg/D0M5nIrLZbLxvLi8vu6774x8/efT4MXIHzPzpp58i/OC9b5u8Nna73e3thoibNhCR8w2IruAbhaYRpmmMyr7vRyJqW7/Z7DabzbxtnHO73e75i2ery9V2exdCaLV99+6dGWGWzYXIxtS40oM5paQpC2gEn7SEMU0ce+9TEmu1WQc5bYWfOH9UfERzyMzZ6Lru/Px81s1Sn7e2rSURQyEhA8KAucFxn+KI4oKsZuTQoFAzLaVndIdwYt5tidudhkNFJCWlh/iiXCFzRtc1OibFu+9fuqILV6slUlcg4ENsuXbK7TGAt2iaLrRNNJ7aAnDB1Exjbp05m88Wi4VrQow5ueCIE6HkGoHM7mQY69epn/b+YY44GYb8mKPDxq1W7fX/9fnlkyNs3UOq+uE/VR8e7CQTdEGKsWDhhZjSZ599dn5+/tc//Ssh8VRiofS12vebh+Df7WBmInbOE/vlcn1x8eiDj7717up6HAYiDY2LU94MdG+S8D9aLwDG8vjx408//ZSdc15Soi+++GL1wx+mlLxzKaXgj15NDdd/PAJ25fV6PY7XsbRutZAvGqQzsypDjRGxhUNPhpGZiXRKsSkRdNwwpeR9IKIqB2ydeXK+M5U6HNDZE7mua6FsGpdrYX0B8cJOHGMGX4DFiQv+hYhypJQ5FbpmpBKx1kuqm6bC14iS6Bijq6QSosRpnKgQ8WPfclVBhKijBe5sz8OAkOPUHTiNI4gpUkYehdLHFyAmSyuqatu24zhJSXWnUlyLYAMko+WlELoEd7SD/auKbkUm/iDmMLlJJhERFkTyiQgNjJ1ziPSiuno/THiS29vb1epMYjRusqZpUGTVtu1QuCxSaXJsbr1m+AJpmiAEMQ5QwFY9rKp930Ov+9KeAVFrQAS4NLRAyhYK1beNc2E+n/fDdHN3u1ydMfPdZvPHTz6BiEQgEXFs0TQME+YxpQQoK7FDPyhm8r5hdvv9HpislGQ37jabjWd3dXV1c3PTNM16vf5OjN///vfX6/UwjW3bvnv3Tikhlw+zoECBtMxCsmAp1p5SDjITWNlTkoolquu6zWYjQkisNJnZKieMcRRzKpfpWxzbVq8riS0RAX9tKjQ7dQrPoilYzxYqh7Eou0xHijtKUo2p2K+orIumJEyXgKj1ZPFTVnUZj5pEDplfLc5JTCLqM7FU9CVzTsdgLiomjvce/GVIUPrCDUmGGy2ZVBHxrokS21nXNM00Zq4xDh7byint+r7ve3akkpl8mDjMZk0h+iZhHw4Ziiot/TD66RsOM5ukSvPVGrdWvX+yDlbm3KbowYc5+YodnB2tw1/tzMDMSTPPA1ANzjlN+stf/vL95y9evHg/sXjnRcWxIZAtC/u11Bz2kua5/687ytsagIIdBXJ6tr746KNvf/KHP4zDIFMchwm8P8xKxCezkucpdxEYUpwQuHt7ddu2rXp5+/bt1ePHT58+cR7waTYc9TevDFzfNFBKyQWDYh14AMxSdpxjjOZoUrUIfPDDBDF0MNKnaRJxFh9umsY3jom99ykdcrq+UBJaQpeqqAbgOfC6UICrTOBCktLphZWsrklVXeF1MpUZQmbpgpoBMJjgmqPjLBFSXJVCFWZGltrESiy8oaKN3utIUc8dLADz41En413ThMyhA1JitBwwayCzCnSdZT2h0hDLdZkWNMdpQ6mvnaZpPmvbtmXyUVAufAC3Y7WjdZX3HkE88GI6l6fDXHnLRyxmMyESkeVyzcybzQYVPs65J0+ewFVC2QbCD6k0scB8ceHKiDGO0wDiF1+ayIrIMAxv376FGr65uZkvZq4gS11hpWjbdrfbtW1OQHIBCrVtO/ZTjHEYR+/D2fnlex9+4H3YbDZPnjwRplevXr25vprNZl1oLi4ulqsFEbPzzpMSi9IUk4wTdic7H1U0ST+madtvt9v9fv+HTz6FxlovVxjkb33rW4+fPSUiwAW227uz1Tqm7OJLSSRn2cpZApgTWSKWhGFvZ52pZ0akXWSz2bx7d5stQudcYSIzFetKzborNR1YbyFYq4YDtgjx0nEch34UzqsUe9+2nu3xlCaDg5l4NGBBigfeSpFJq0bxltl1zikR5TajoGomZpj1SqSWZU+VJkCWR6tULu7rSiyBmakUQ4LrxnvPTMANiMisDU0T7HlihVimolR8IUDEroFERUEETHZo/fl8jhXLzMtuTgVUD+Zpk4q1lVOrtFpuf4OPV/stfExEmF3qUoRkgrr+OaswUrqnUOkhPf0vHQ/rwcwCH0tEdEyppeB8GMfx7/7h7/+fj57Muhmeh5iSJO/8g3r3ZCy+YVz+Fxz187jSqkFWq7Pz88vvfe97m9vbzXQ3xVFJ0VaTjmdRS+XZFGO2TKex67r333//9m6fUlKREMJnn332+PEjIR+aIKruIVrw+/Nhc4lKmL7vuyb3x4UtUCT4AakYK2Yfi73k65RODEBu56nBwaopR7qYWZM4Zs2MiblE1VxYBDBRbZnGDIhtS1OdXH+pAheQiGr4T049OscVlxOu2bYtEdA9cT5fx9KhJZXmQt65pmlADAnTwZWYGJf+EBB8XOJ7bWmibH0XqCDLYC6IyN3dHWp4YoyOA3iSLS9oggxqlVhKWi6MY68VksXkKdoncOltbH5PTs+TJMU4ZFuqhlYhkuFD/qIvTQDblJsb4hX2+/3FxaNhGLb7/dnZGSpKkWwmIlB39X3/6NGj29vbpuvMCnFV4rbvezjEphWM29mVejOMG2wRzDgX9JYtLawHIoJDbN7zdrt9+vTp3WY3xu1H3/rO2dnZp599/urVq6cvnq/X5yGEly9fvn37tu/7x7vt08ePvJt1XSfC4yhELilNEX0pRmQHbm5ugEfb7XYhBGJ3cX6RzR3nu1mjql999YZFpzheXV1961sfkuM0SfBeK0AsZSzukQClop4TEaLQMBwRGAAITkRev36tpZbPssu2YbH86mwLvmWIFrsjNikiAX3fT2N0DWDAhxonLpXrpoNrIAXQUXmz60EiQb1NU7LbpUPPaZ9SIgXdOpsmSKUTmqsKBe1RDZoXQoCS5mNwXPn/4BQ655wDA0x0jqwoCF9MMtVLCNbkfLmAiVk0KMuUw1dYVBgN9Bexu2MNN03DrCJIbC1c6RdHx7Unf/qhlT9qIqi+Tm2O6HEA2f6vuK2UiJSJ7pUF/ylHbQ3YCDNz1QBZNSCApqppCiG8evXqF7/4Hz//+X9SYu+CqFTurOm8Qkzx0NP8+6phOxyzY2rbRh9dPvnhD3785qvX//2//x07JtI0HQXSTchChqaIUncOzlPbPnr06Mmjq08//dx737Rht9tdXV09f/4cmk/A+MxE1XwUvXiKmE8pmQK2E0SkEDiHGCdVsEseuvqcWKzKZIsYCHaoDTiLeACE1OI4SYVBKKGmjG+EYkB3hGEYrm+voZ5jjAiHwG/r+36YRpAjzufzfrfHrSGszXg3hV0UmB8GseAnZWU/SklUyyEfptM0Ns5775GNw1tk1IynmEZAqPDwoarF2mw2CCwjomhNchbz1dXVlUGI8SdXYL3Y9icmRVPa2WoJTZsTifBgKoDz+XzuWKcxEWej3jmCEUAl2GgZa9Q4htB6P+AVZsD4FbE1n8/v7u5CCPOu2+8HGDdd4wHexgXzCfO5C8Eyx/BrIaGMUmoq3Qh8hVJOVfdJw3irKuaXSp1VSgnV6haDxSoiophUyc1ms9318Prq7W4Ytrvd+aPH19c3RG6xWL148YKZb25upmn646efPbp80XWTRQ632927d+/6vgenBNLwTdc2TdPO5vDgkafIGZBucXd3t1wu73bb7W7z3e9+dz7vMFzjNDnv9ZgXxfbdVLo5UbGBIN+HaUS0YLvdws1KKSHvGAvHu5YIZyr9IrHebLTtdrVaNXPNFVpvLbmAes/az9gyi8WqaIK8kr0LRDqNhRG2+KYxRlWChceszJ5I4enm4HORLSZMastVVT07rrqJSEm9Za2GShDyuZ0aIxzCzjnnqfzscQEEz7VwnptTYRLecSAnbTtT1RQP4D4ihfRDHYH3vglh1nUp5npRItJ46J+BkBsfe5Ynv9aa9UT03z+ND9nxHAg50Z33tW99X8qfOCISephqkpm18DvpsRtdTvCmF06e9hiHoprHWzVKapvmn371y0ePnrz3/MVs5vSUIUsejEU/8A7/vgczE3mipM5fPHrkPP3s53/96tXLr16/1MRJUlFgR0+I/xvno2ZT1xGHEN577703b94A2tC2zcuXL58+fQpcHDMi0XmRIZ4BuHg997YfmqZFCDRVdQKiBAaObIlLzlxaZkWZ2DEpqahjlySCdgTccloK5rz3znPwwXuepklLk5/QGD19SqWwGBsVyd3N3c45Z3SPVBgzoOqQ3kOTA5gFWcyJtG3LnLkpfMF2WiGBL3zalMUHO+dUZCoQXCiJOthbr5wpDkrZOoTcrAVZjNGAUXgqPGff97NuEWNcr9eW88ZF0ECesjGU84iQm5Ap+MQYHmCjpEJzISJAJzU++IJDTikBLwLrgTLtYrB8YYqaUmrbmY0SlTDmZrM5O7vwfnj37t1qtfro/fdfvv7q9evXZ88eSQGewLKB3nr+3nvIDeMKqdR+YEwQYFdV0Bma4QU107bZEZ9K/2Cwl2jpBo25ADjIvovPV6vV3d0de4eE9Fev3zx79uwv/uIvNptN3/e7fg+0RChdk1NK2+1WVXf9/u3bt+/evdvveghW773zYd52rqAfHPskcT5fqqpzASUA69ny+vo6xvGvfvbT1fmZTJlnY7fbTdPEmhcMDlJFINGktoggh2LRzpTS9fU1eE5UNcbctKOuN8OqMwUmpewYA2KmJE52JTcERxn2ECz5WIBvtVThii/TeFqggHHEeGgg7SqAlfeckpprS8W2yIqkNMpxDrkJdS5YXklVPcDg5TCf0jkmValexCL2zpH33odsBCNKlLNULMy50hL4Bns7sF8hwCYHCwlQMPbe73a7GCMTuRLjQfitbVtQf7c+oITkQFNTFT0bNMRm4UHJb5+faGgpRGMn80L3Ur/117NKJkecWVXAL3j/NK2UNGuqyatUs81UP7xpBFV17N2Ucp5/HMcB/LGOiYQcp5R++ct/QhcU7xwoS6pDqjsd4jP18eBI/RkPJS0851qNjiP2TF6F16vz7/3gR//pb/5GRKaU4fJm79i3DD+Jxw7Bec+a4vn5+fPnz13pYbDdbne7neTvHkUViOXkfet1UC6bwT5SUG+mWlJKcUrmbMUYxzhZ/xkLkLqCgLDoroXLgsspxn63JaLZbCaSXIXD1BzCYuRKU0q7ba+qFxcXXAgf4KlDhSwWi/P1Gbgh6xRUSmkce81c0x16uZsfPOY+x4mZ1+s1wneKzqnjCMFXxKVAAUA9IOKaUlLKreZRhbxYLKBHDXUFIQtDarfbXV9fo9gmhHB7e4tB3u12EJ2wA2az2XK5nKapH3au4KW7rst9D8vw4gmpSo1DkcOksLQfFDMKokD9SIVgC+KVi7/unFutcuMBVU1TtFAY6D+fP3+eUnr9+vWsaZ8/eYqIH5E0TWNNEXAj3Nc4NW3lUNVmAy64FH4o6GYp5RwiecBhSBERRtJSofaC5s2Dp9MxDC/39u3b6+ubvDYuLxDUEWLE/8/Ozrr5bErx9ds3v/3tb3//+9/3+2G9XkP7YvU2TeN941xoms77zNMJyQujB+v5xz/+MVbjECesLi4uqS1FvL4j5uO0hRarFAP47t07LLlpmlLKbTO0eKtUQhdaAPzYXxZeshPub2p7HqNnsYRFqto/mDLDMpbSKxpR2frrRKR6ABhTwbTHqhjBvHBSp7kp0FHG1L7ODjFqco6d4xC8c2xJYkKwhIhUSQz8nEMgYL+ZzdrsMGjmEUKkgJLwMcmXqnbzTANQzwLGzWocMNGqlqhmmLxExKKQJHzMQynHMY96Fu5rTbtvrYyx4+zB7HMTy7Z4anFN36jLvs4IICJWqmDqD1CIaHW4dK+MWqAAVPDQX3716ne/+90wjfAdlURJk6SYopISCcotmPleZfC/x8HEJZV+GpRn5zk0yrxYrL77/e9/+NG3pVDwjylCAGHDaIk+WdDJFACxfPTRR8vlXDIQN3z66afTlNBoWjX3YqJiZ8lJyraCDsFvW6/XIGqwhQXRgP0Pr1SZfHOgIKeyN6hMk7O2oCUUzLkKJUd34e7gfPMCEeRh8qSO1MUph2UsXGnkUzYsd3d3qOq5ODs/Pz+fz+fO03I1R+wXD4C3Q4d5V2oH8eTb7ZaVvHPmQ3PBWCK5i8qo3JfeOetyiGewrFtTMUp2XbderyGv4XWh1OTm5oYLM9QwDEBvYqAQXAW18iH7rgrOegRgodgsDM6l4YQWojEE3qVAT+FxzmYtDA7ouRDcNE2PHj3CyGCU8OJgs/KllQXnCO0WEX7Qd1iPhHEcU5qur98SEQQWjACDowMjjQeGerAcG8bEwOqXl5dcCGEwMrE09oE3hjFBTh25c4CoYaJheZydnY3j+ObNm/1+///5r//f//bf/tu+H2fdAgHDlFJSWi7XZ2dnEMFg4Lm8vOxmLfaRmbyI2ztHKMadzxZwidq2XS9XMGt++tOfgholw569w894eCoRVxOLqFy3jW/JFyJ69+5dmW72PtNiQ+cB90dFFuNDE9ZT6ZxBBRJsQl8KqhkBBqwNLUHgVNp12M7F5zB9sGymCUSeuZ0J1f2LRKjwoJnZgX2Bqm7nnHcNnE4iUqK+1DVxwUJKKeZBcCiVtlH2PGOxMjFgtusxWev1GtRXZ2dnRMROrawUbULMZBERMKqGEJwLIlI4P7J14oPrhz0mLidTCsHOcrFGAgtXg8rH+ncn7CiVRK1Vr6omFVTiCqlV5ZJj9i6pCOmU4t12048DOSZ35IZyhcyaCtM4VXq91tZ2a6o0dP3hyZ9YnSu02Cd/rTW9I1FNUn+TEIv2PlECMeGvfvurly9fSgm0EilMKjqAxOwh/gN08IMAM3hrksg3Mx/ay8vHP/v5/7ZYnznPy9VCSfop812YdYbD6dFQQOVcXl6itUvbtq9fv76+vh7BgOqciMSSmHT3qrRsRs3hWCwWq9UKGhdzn5UxGL0o+9Un6yzPNJ9M9mGCncEpE5risSfWmEkZuQAu4nQgU8S7x1KWkwoFUt/319fX4DMax7HxoQ0N9BNoxaiE5rz3cA+AGeHSrY9yI7nG84HvFzscQqGMTTbbYSJkx7HJs9k0DfwqmBRALMcy7AYppwr3RET7/R53vLu7w32p6LD9fr9arUAfzczohAi9BbGIi0ipKqayUSER4ItAa+LdIYyAssa7YA+jpAfTvdtt0FIQbyEiElOx7pP1vUcKAJOCSqE2txBmpHtrAx+DbENqTGQQW1DMl5eXuALmxTJwWjpJmCOupWLbF6AAUF2IOW+329VqtdlsfNuklPp+XK/X//D3/+Ply5d937fNbLlYz2fLtm1d8HBd4NtJSYcTq2hq2tDNWiNTs0XCpKzUNW1wHtGXy8vLtg3mgGaDWA7mLBV1lQolXJ59OXQ10AK/ijESOe+b2n1ERMR0pBZaLlsAtvuyTDiuR6i/dXKmzVEqXGkYSS6l1ekY8GVfx1SKUCwdCYkI9WBYNtvtNue5KWeRtPjczJxEtKAgMX1t2zZtCxt0uVxaU86UEujWNSvjTGHrnFNNTdPENIoItp4lUwCipiS+9ByDqMRjU6Xdi2JjVQUm3JhZsc5xwa6dGzse7sLBWwTiRL25P62URqtDCmQyVUyl9/VlEaRHavLBc+ghxUwP6WBVNpVkAlvv+cF5aclxMTWVhTVlwJgo093d3T/+4h9vbm6mFFOhsXYM6gjjuvoPUb10rH0Pz5DNEHYIcC5XZz/6yV+8/8GHgIFYxAlhw3qC+fjwTI0Pl+cXtujbtru+vo4xJlItwP1UhAUfe+EnzwNDD1zHVNgoo6SYBEacnXyyEEUE7TNPrn+QEXTY/1xC4jgnWbdwyWlXLmgsbHUt9MhEBJgYLGhkgEIpwiHOtkIap0oBE7MaetkEnKpCICaZ7F0QhWvbFlgPW3hcBdJtOrRAXi06WrjrPNKB3nv0CcfGhkpGDgn4mu12d3e3efbsmSWAqZSQchUbxAviV8zO2dkZUEi1ANXiCpvmg84Df1DTeNXcLLnve3jbuI5lxE3OYsBjjNY3PjSu7UKTuU7l4uLCHfC3EVSUXMX3zG2yBQwNqhUzMPQfZpNLlrQp1CjMuZWFZU+h16G2IbkwX1dXV2CyhB+PgMFvfvObv/u7v/vd7353d3eXUmLy3jWkzjmCTDfrx5QoEfngjPupaUPTBjxGCGGaBhFZruaz2Wy/3yeJ0zQZP6VMUSRbnFTcOPzJqubGcYzjZCYaZ9czx34xoXgqKGAMFxUDzqS23qO1p3vuF99jovi6jV+PgIikNKU01eK46PjgXDAjDIsEM2g5Bc4Y5iORoiVmG0LwFWELEcU0Il719Nnjb337w/mi2+7umtavVqssZIiICPZojKPh523l49nMK/UFKiWl9M5xKJu3ZWb4aFkFMsGY2+/3SsLMbdfNEH92Tdd1KfdzI1Xl4E/y8Sf2zYNK0b5+8kX7FcvYl7ZaVOnLkwven/Gv/+spJyNiziXyfGorPKiD7WpGk62qBy+ejLqMVETmy8VXX331i1//ou93ZlnAZVPUwxDg2lKe79/zOHnhPC6wwpxvVEi5Yd8u1+d/9Vd/Ddyvc040W7sWX7pvnuDztg3rs+UHH7yHlbdYLN69e3dzcyeJxhSp6r5wcoX6Z6nKiqAt8uMqpXhYiOUKB/6dU41LD9sKWlIs+TGQ8Q3MJcfGhfO5wiIdSl1ToZQbhsFsRiIy75BYYJvbLWynceG0wq/QWFQwYuZb4DTQ4FmgHiODFF0IYYqD5SNhIEMNmK+GSDIyvub1UlHhwGoSARdAPkO7E5euOPt+W3r9skXqDOdssowLirgWAQh9I+fNxUGBmry4uEB8GMML0DgRWa4Bf52mKTRusZyh13qhbojjOCJuHNNo2vrVq1cVLJmpoLfwmiapbT9S6TcAKbnb7UxK6sFUYnjneE0LurqC28LPyIkgqR9CWK1Wy+UysEMsFLr/7du3v/jFr/7hH/7Hxx//8e3b6xijxHR7e3t7ezsMg3PkPcMi4SpXqkkccRuaeTfrmtazAy3XMOzbtl2fLWOMd3c3tlaRa9R4YE1n79gXHndVLSlYxCfMLjTXDYETdAVdLpfAE2DwbdtiSZtdVe9fU/P3pfOJxDjapJWrJHIUX7RQc/m6Yz40PIbyo2ImImKENzVvAUV0cszSbK1EkNNxgUProUSXy6ULYX1+/v6HH67Pz51zxKKUc1LMvF4viQQgidm8RUIHpboYHBD1WLzBtobNglmxJsegfYkIiQwiwvWdc6Lqm+CbA+bGGkkxHzo612P7oMqszZcTYUuVH6IV8Wc9L/enrF5UJ+q8OvHEX5WHalHvXf9QDa719VU12DrLt88EooUqlihxQt+33/3udy9evPj2+98yZpny6iwqzE7pX0fQ+Oc73LHudzUqW4i8a7yPXTf/0U/+8uNf/fJv//ZvITrVMeyjdMz76JRSGUTTAT/4wQ+++uqN936cJhH9/PPP1+tlaByXpIcPQap+pThsleBjLvVIy+Xy9vZWVYnv4+Dx5JlOjh+y/qyKsaw9IUI7xlP/+PjK+VtmR8PdKa3lAlS1L+AjiDYIBXaMUOd8PteYc72pFPxhiSFMDUsWBQZTaelj6U9wSkhFjODpcB0pGCs8hnWaQ4wXltM0TRcXFwh6U9lprrQqmnVhNptzMdtjjGjGTMXmgERoGp+zrf0A0zU4z0oS0367G8dxuVxqkpREnVPnsjL2wXtfN0uwnDER4cPtdptS6ppWVdETArH0JNM05Xoq0aia0EiKrCSUKMZ4ff0W2Wu8DnhFuLAccHHcY6HjNq1MRdaEEJA7lFL7a8HVEEKSyTw/C2Oa78uldTE0lojEKYv72Wy2iPHdzV3g0LZtCC1K1D799NPr6+tHF5fTNL1+/bqbz/q+T0mJWCSHPYJj56xLjwKp4D1QI55ZoSpa7ylFp9Q1gZIoTVKkUz3LENmYEdsFzgiqfPZ9NZdKN7b2pBymuWOhu7LrUyW77cx6E51sxiOZrsCkHGlfV8q+M1vZURw7B2/NSG3RgrEEhJi5aTJbSFMIYplBNp2f0BYh9qxvsjGBNtuz5cyhlJ8lNOH5i6eff/bl/u7OM09x6Lpu3i2wMC4vL53PRYz7/c5nkNShdUSJNww2Jg6YLqIGxAZlcBLlIUgpbfY7jMahTN8FpJwQJTIwPwJpaD98Ek007+XrLJ5qSA/a2pVE8lFE7XjWvuE4EeMPfuVB7fsnXjnvQT4OndvfhPLjYjiiJGb+xS9+gXo+d1wZ/ae8z//6Q05+YDjBHIhIybXd7Ozs4q//+q/Pz8+zXKgKzvQ+3K4KK0AAvf/++3uUkLYtyATwdSv+e0hTPhBVbpoG3eLkGGOpquBROjyCgfHc4Wr1TNnhwxEY0hVQHyBCMF2hDFTVXDc4f9gJOEIptzffxTnnfIaY4XMEJKlEPpnZ/pRTvKIG9qaK4U8KwaT5KJC8WSVwRo4YJM38M0uZh0L5671fLBZW1ws9DVAuVCwQXihJAkBMRM7Pz83U2O/3KNqhkqXmwkMJz/gkgtr3/Xa7ha6yyCcUHgwFBMlh0/R9v9lspmnqZg16GzS5t3xCAc92u93tNpxdvQnJYC3xA5RtIMSNvJ1oxuXivhgQeMPmt5k6ASTHl7pYoMfxp/Pz87b0PsLTIu6Nx2tLu2gseLOH2i6XbGHXiAg8LWbe7/q7u7uXL19++eWX4zje3t4CVOGcC87P2m7WdkA/VQkIh4wcq7Kmrmm6ptE0TdOELIMBc6jSea6C+JofRsXuqaWtK1lJ3zZwf433TauAbR1W5QqlcXJrvnec7PGTD7lKNJh3bs9vk8LsbV/YK+AKrtTm2RYzgD2VOu+madCiwlV1g7b34egji2KTi+LyF+89e/z4MZcEAVqwnJ+fX1yehXIAjWxurnMOtoLmiqwD+QbeCI8H/vlaa2JjknccrDV4wpKzuv9+GiE3fMGN1sLNZBp9zfHgdFAlb91x/tgm69+gg+9P94ne/Lqn4ipFeHK1zBZ25GMXrdCGTJ7Cgcf9OD+bf/nq5a9+9asQwvn5OVDHqooqoP9z6OBs31DZaSLiPQvegrzz4Uc/+tHPf/7zX/ziF33f90MfOOuhGI8qrJxSYiK01XMyTSlO6cMPP3z56ivbTm/evFmtF03j4UaP48jHatgmpoiGQ/pquVyC+kCrCnEisnk8nVQmyVb2IU1F+Xb5W2Vliat2SEy5jS4+gbvfdXPViAKGVJXhwqQdqpoHLtQ5JqH2+32/z6gi761uXLtuxqU1AvQEbuqctzWaUgrB416+1NKgiYrFxuu5gwMHu8FUiHNuW4iRvfer1Qpb3aPoM9J2u3306BHaFALZi+gcld3oCmDNwKtcGJG4cAWnCixjxkqWWW2bUgKztBkECNzBR8d0I4SLB0bmGObF7e2t934xn49FNKvqNA2QcXhms9mnaVqtVvv9HrFfnI9wOluCrZiAroyetb+EN1xPfUq5azIMC+DIVPX8/HxfTetms4GG8452u90Yp/l8ziH84Y+fwjaKUTSJd2E+D455HKcYQTUzeHbOM96diocXY3LONcU9pSKkABqapqFt1/N5572PCX0nk6o3s9hViAHbUFw5N6kg8LlCooWubVxTS2Q9ziyatrbn1OK20rECth/oa/wwZq4Fb1N6KmuJQtvCMDuJS/Qblg0Xx302mwEnhdUFfxFLiKuYloiIJkeEgcLnSeTy8vL8/Nw54nAIdUDJUQEkamlq0vf9crl8/OSyK1RrIlkgeO+J2HzrlHQYJq14bTEDbemaUBkrhJdC/IaIuq4L3QF5AABE0zSJNO5jCKFtAxFNKXrf3Fdm9VvXh6qSHmb2xAyqw7RmUZ3o46NLlS/qccTbVsv9c/7Nx2HV5Uc/QUFLdM5NKSIWABJdlEn8+te/fvv2dYzw+dS6N9ODHCH/YcfBCcY7et+klGDy//CHPzw/P4cM8lVPITtse1PJfMBvWywWP/zhD+FszZeL169f73Y7AENgvllw7/6hhxYFAoIIQCH0GMpBxwEK+1zKn+z8Oj5mtmpBM2Zz2NxN2NGWvEHMmcpKgokNYwWft21rqFouWWToG2gpQ0ia4YLglSvtZrHi7crwA0QEpRTIpseK/GEqfIrIB/vCGgH/korhj0ZA8HRjjJAgVNLw8FCRtNaCUUdlFyZxu93CCQMI1gbQJBreJVSUWzbsNoZENJ/PUeIcS89aWACuMDP4wuZvn1NV84aI32w2G8feUrZgZqASMMCYYKhFBEgZLsF8uBHI1OJeuBGkKiIc1gsZoT9L4E2FbMSVJjOG+kanJqwZKcWyIkKiWDO20haLBYbdF9pk7/1ut2N2yIZYeEktzyoHnLnpnmmavM+KEAi7VFoaU+VhEB35NKFwRJhOMk/RFe5VLFf7luWDzeqiY0Vei+YTEWw/u3KcfG6vU2/2pnT1waO640wzDvvV5ANRJmAPBaoNGnmAD4z4zFARMUY0jsT1z87Onj9/nlNsqohkEEkIObCx2+1ms9nTp0/L8/CzZ0/atkW6pB7SE7vBXlBVicQSIrCbbRzsTAuTqCrMaDgqSOLAK0BZHdbSlCJMDb0X8H/Q7rm/QurPqSTRTr7C1WEfnlzh/qXoWEPfP7M+vvmEkyV3oK9CZIcicWLPrSYiYfvSPu7Ep8gTz/x/+/u/fXNzHSUNwyRCmoSVWapaJBKhlIj+jN2AidzX8FkbGs0ReSJPuTg9EUV2SWliZhENfi6p8c3KN4vzyyfL1dp5HtLoA4RjIAoqjaoXdoWsQ4Jy571T8Zzazp2fLYFGdtyw666uduOkvplPouI0SmSkGZiVKImgPCDlPnFTjCOzek6ahrPVbBq2UxyTxGox5c6RzjnA2Z0LRE4jaSRPAZCNspPJswTHjQ9NaJk5ijI1yg0555umH0dXChMRs5rN27YLSQYi8T7brVFkjJGcIxeiSN0LLAMoukXXzoNvxyEy5f0D3UPCcUzBNZvbOxKN4zT2g6oOw6CUAJzGHkOnh5iEnVfi/b4fx4mZhelut3XOkTqpYvLjODrKYChElQ9VQMLz2XLWLfBId7db75r5bDmfLb138/lsv991XTubdc4xOD5jjKIR5TF9v4MB4b1HyaCQuuCFcqc/y0mnlKYUUVCYVMgxTkNGRktJj0XRN5tNCOHp06ftrJtS7MdhGMfNduu8x8IYxnE2n7ddd3V9PZ/PHz16oqUQC7q1aRpVjlFilK6bt+2MyAm5aUx4NtMuMOOwPGBlTklvN5mOCq5Gzm0zr9fraZqWy+U0pv1ukETQ+svlHP2DAc1l5mlM/X6U5FLk9eqyaTrnHDj1Yxzi1IuMba7V9podC0kpNo3rZk5jdMVeydol+ChJKLnGkXPtbIbsb4zjbNYuZt1yvlivFiAX8I7P1ytk6Kdp2m7vYM0IEwcvTI48Kix9CaKa74Wbto0PntGH20mynTWO42azQcDARF+dQMmSUIjJkbKkA/y+eHvZSqtzK+TzP2FKLKKJWNuuwQ6FqQf1Y5YTgvBEglQPesd6zyBgSZQSpbPLs9AF8tR1DU5A5TQ4+EJwZ2ertg37/dY56rpmPu/YpcWyff/9Fyizbppm0c0W3YySBA6UiCUjnEVENJ5frB8/uXSLtll0QsKehURZk0rbtt5z03hEePI+SNM0DUSFE9s7IcdN69qZuCAuTKLCbkziSSnFze1148mJnzeL1WzduEadd00bVSZJkyQ0SkJZpip7DnRg2DyKT0iBjOQ5qtrxulJ5YV6yL0R1XKH/qBhPKDbJdcOV3rVMhK0ErvIFcshOOlUmKf/uQYDhkebnvJfW1Mp9wg+hNjHIzAo5ctVVATDIf91ut7/5zW8e/5f/a2gaTSIiTYNQ9pHmd6jF/ZouEP+eh1nBMDzm8+WLFy/6/fbt2zdd28YB4HuHGBIxkzGAElFuP4KgqK5Wq48++ujll69CaLuue/Xqy2fPnnRNGMZ923kRoWMgAFeBL18gM7E0sn3vvff++OXrtm3jdEBv1REwKm6ZVkf9XhhxVZ2mKZB3zrEDBCOnME2+wMsxK7vfj01pFeAL4JlKXwE4QOAMQh7LlR7PVPzaTL9XGvCh0D6V8kekLbnqawQf9MBH0TSxNFSxpnjjNOIruQTCOwSQTQ5qIZSw/J81fLV+iBhntdYLRFZdA2cRrrOqWkMI45u0lCoXSLAjRUrVwE24fm1Ep5S22+3jx4/Pz8+HYbi7u3vy5MmLFy9ub293uw2iAubdwrIBH0vXzTHgfd8vlzOj/kD4Gg8AH9TyvrYkXMapZdrwtg3OueV8XqJTeRi7pkVgHClzV0DU4ziZOJjP58ZzSTmccKAvbpqGHItI286wJIZhCr61WbD3YvLeq2uCq1iZWMmRiqi63HUbkQNwbsRxCo17+uy92Ww2DP1qtXKO9vs9KrhsfWrpOnWyr/kYOQXFRpXP5IMHTyf8fl9lgsUKOkoPA+99nLLXgF1r1zlxwqpfD0nKWkk3VVraJsuVsjoD7kEQ+8I+a7GBUOhoANHAvrNWXev1erVagTslxgiz79GjR6C0sxVSe3uuCXhQrCJYBimlmOZRhVMO8iO04BkWo/MFrcmcMZiimYheRJzLCESLWuPkerkyETqcxtLnBhNhDjEC47H0abUJslkwAX74sPpTnc7QKgptwecT1XaiYk9udOIHa5Xpe3AZ3D9Orl/bE1Ai9cWJyKFo7yDcWVT1Adi0HLGC/dMvf/H7P3wc41ipAKjpIxjwf7juzeq0Xos+fPitb/30pz978d4HWPrNrDvMBOcCczu7jhdhgX77298OIcQ4Yuu++uplDnyRAzizXhk4LHBnZg3ibM+ePctKmg6Z3drmMuVt66MO0VC1MqyqSgrwkotPoKowe7kiyrBsouGbqFLzyOCaDQjRgHwhFZpG7BwtojaUakW4cVMcqLSXJyJEm3Hlvu9TzOVbOFC0w8wIsqWUwBBies4V0N+Uu6YwBNNUuiabBWBjVdvOvlQBaWnnpwXB5AtrJsBN9i5mNdsI+KpqhUoNNxVzBEoUd9lsNsAqwu+PMW63W+hRLCEAxNbr9W63g1uP2jaI2hhHNG5DM2DElm0JceFssjmCgwXN3XWZscQVpnu8CGLLsWrmWKb6CNbkChmvL8l13EJKkQzQ0YiRwlhRVXSutdUOTZMOjBa5sWs25uIApzD3e2Bp2/bs7Ozi4pyZ4bpdXFz88Ic//OijD87P1zgzFLA3lwhnLctwZA9GD0VKKSVAAVLp9GzfxcvbJJ4IVj32XWyR1zeqRbNtxtoaMNPWfrXntwi5rdXaA8PKhP2B3TorB1YsqD0vLy+fPXu2XC7BXbVarcx2rPcyMFB2F/PFc5HSYqVCY0wmpoiobv2biggyFLfFqH2BX9VBCCy5zCmrCsCpZRa89wZZlULXpVVD+lpWmyrhrzmoAkjbTNm71x/qPe17ctg5h3H7xtNODmauUcD3H6B+5qN1hW9qdRincT2RRETqVDK5AzP/H//H/3F9fU0kLvhxHNXAQSTEdXXUUc3yf8jB1Qw6F1brsyfPXvzkJz95/vzFANPYqUhECAg1EoGdY2S4nMihWFA0Pnp08cGH78cYVVMI4fXr17e3t9aOu9ZkdGw6aeW3QYyGEC4uLqxWx5VU0Mnk1e9STx74t3GLUGrwRSSlCYrW1mgoVXpowioVYzAeNRY2SniENU2uK/RVyKlga9UVqHAcl8ulFpJkvIVdn0s0qe/7aRyncZymqe/3KWWnGRSY1rEAMtcXtKcpPGbOTIoyuZLiqo19izGkqpSZS39GKkCqQqARjZs6h+JjBDwEuhzI5Fo04ItascsaygY6CZ7uer2GRr+5uYFSt6QsgE6QO3d3d7NZHjfv/YsXz6jgZZbLTIwMve6Oc422DCw6IhIxMnWwznA9rqBqIS77vkcZqC0eo+7SUg1slgTeMRYKp8VisVoujZ4slO5VNkQ27MXUyzZKhhwHnrcdxhZjsljMu64dhn4cx8ViFkJwwXddBwqO+Xx+dna2Wq3mbRdKJxjoaROmemyVqmZvAXOEDD0GwUwl2KM2pHg2KTh/+noFXImRU+pgV+VKTRHev47WwfnyA1V9IGzFNqUJJhQYoK/4mUp36hDCixcvAGrJkTPvyDvXBPIukSY6LBvNdVktXh+L/1ACJyrESPRwsdJERCkJUw2UYWZhchxC2zl3oHKyBQAjG78CdpOq+nKL8PmCULFB81WjmgeF3n15eP9zc0JM+pl94NypS3ii7Gt9X59zei8+tQmgNWwFisR6WKAXH7ygLSMxOBUzEx/oLE5eMsmUKC0Ws7vt5h/+6X9ESSlNyjCCqng35zB59e3/eTX8P/X1PEDsYxTn/Pri8tnz92azeQYr1swmsEOVvDq8kMtYvgay4zvf+Q7iKlhPv//972OhqLXDlpGZP7YbTQGLyItnz9MUPTs4wVjo8bhvJVUTUU9zvVxqZxHSH/tTKoZqV9ibpRBEU8UmQaWzEBXf1LwuLYCgYRhAlEhE+/3eTrb9EwprYyjE2q6w+BowBEvUML0ikpsJpqwIa92ghSaTSk9Tw3OJsdFyLjNFbcNJFMukCZmTVH1YO6a4SKpKXABvtv4tqRQCwVmEZEGrOyRogZdR1dvbW3xdhIAUw60xgKDXhnJl5vPzcyLa7XaXl5cY9vl83vc7CyavVitzKLmwWTWF02oce8QnVBUsJXCdrUrKPHUMGiIBehxuSUm99xhbtJZS1fl8HkJraoMFOHmP+ERN2srMua6mdF4yqZR/ddrNmrPl6tGjR1ADs67p2jCfzy8vL1erhXPZvnn69Ol77z3H6JlZAyNv3qI14ZGws211tPsSxUniJLYNzVCAjjE1aU6hbVV6SMrXF68/qR1o+yKuTJX0p2IoGCzIFphFVmq9broZvjIsZtjBKDRAWgFBiLY0LLFpLZ5D3mWI+tg2x70koyCnaToQwrPPKE7lzGeHnTKmSN467mVbtqm6i3LFNdRP4zRN6KyKMIzV1AEySSXcZZDsIhkOgaWTKTb3RkrNdK1o63nBX79Zg94/jrXpEQ73XzrkfhsCyiigulnRUYjbpjvUf2AWotI7KYejT98B+mM39JeX5x9//PF7z57/5Cc/8Y4lF4GpqhA7okITfe9xiejfLTitdJqZJhdY3Gy+9K559OjJ7d27/X6nIsrkiQnEXnrwYh2zKwRsrsmtXpfL5Yv3nv/h4z+2bcuObm5uNpvNer0kclQIwughremq+mksu9VqkUvsk/DxsJzIAvxaxfqEi0/gclMTUNIc4oEhBPgKxhAEIzTG2IQuHpM9ncAQDGFr8VgqUqwt7d+naQrOWwbXZwyRIvsLR9Y5d3t7u9/vsQO9z1E4HHjmLjQmTcxxgWxyhx5qmdiISyN62/ZUegumQgjlCpmiK/HnpmlCk01sNBbE6yMC1hT6IYPXQkaHtoG8cyUr7Ats3lWgaCJCu4JpmvDW6GeA57+7u4MrzMUGB9KYiIZhWC7XuBrQyxiQ5XKJEYMMTym5MkFUrIcSVj0YFsE5cH3YbOJpfekAEQvjN5fcPJbPfL4048MxxHEqjmMgct4zkweCHUHjFAfnXON8Oo7rmnMDqZj9dUrz+Tyw67pusZj1fb8CKZKIalRNAM7P5/PHTy6/973vrFYr5Ok3m827d7foN4VXSGlC3K72LKlSkFJql+0EC5VbmIeq8ABV+jKEIEkN1FO3uz9RpXbfVPqnHSRGNUdSxWZPPEUtPpkWs5uOVQWmDxh4VcU6NDTDYrEwHWwBAOdc1EM5kFah3ax9HYsqer2wd7FPSWTc7Yru1/l8HjpEsIJvguihUQSzJlVyHsRVuSYiitKBvopLKgcmOBzcWPpCTpIrSvBX5CBsiGonW4+tZ33IG6lf0ywPM3G0srnti/d96/vHg0qX77EcKmWGiFrInzwwMxNaOPMDt8bDhwctuPxuWDrCoF2inEzN14oxusB/+/f//dmzZ5fnFwx8HlQIuth+k/kg/1E62Dk/xtS13aMnT77/ox/e3r57+cUXwixpIvLscopfVYkcq5IjrqwwIpI0Oec/+uijzz/7IoSQJIbQvn1zvVwuQ3Aqp1uUKqqdelcQcifET58+fvPmSlVFEpogPWRPZcIp55ryQ236cb2H7U+uEKybswhZk0oHNFc6k+BzUzwnprqrqmsgBaCJh2FolytYylAq0GRU/C08Ule6uyNAHQ7cN7kmFR4SJYILy+Y2MXOpkgQoCRqdszcjjoOBywwb7O5l1ChzWedILJw8Kv4Hro/gMCgIiMhrbqOGQIIrOWBXZfFDKQIOIWw2G9gfVnqBnGss/MnDsO/7/uzsDCwEGEMQo+KRwM1ydnYWS6PJkBsaVkwmJacbS78pvHiMcda2WBWwJ8ZxBI4GsYcudn3fo5gQUvVktWA2XWkbjHQduEREhMn54Cx3uF6u4pTlhs8F2VCKBHp5LgBU51zjOfiwXq5EonOcUloWqs7tdstOfGA8cD/sRC7wGE3p1LRcLs/Pz733b968AYxAK1ep1mcnMg7jMysMD8i8+HKkA4lb5Y6EEDVV6f9SpckPQMDy1tZDGpKrIx2TS9dR0FrYQq9TkTBcU3qBQoszsTmXbILdCzYcQlNGaQe4nBkB+CGpTCkys6PDG+EdJVGSOMWelYyk1nFhCHAuZaSkBWCIiIJvQ1sXWB9wbUhR4TlXyzVeB0V0BvSrw3L181BJpd2XfkeSkA8a8URVpwKpqxXhfS/5644TwVs/2MlqUVVlcQVgdHKRWmErC/O9Lj3lcCYly1XEbL1irSUSzZRQovgbM08CWTb93T/8/RgnuxmC3TiZ5Bte+F+bG/63x5+1qlFWck3TTFG8a548fX7+6PHZxUXWr5RUC6M2M+thKDGpOA2C/vHjy/c/eO9uc4sT3r59O00TuPfoIQ+YjhP4rjDys9KLFy8gK0UTK02F65wqewhKl0pkw/Y5UbbEypk5OGNGPaqTqSClufBppEK1b8I9VUw6zjn0yvWlnhUOHCKuWhwLqCstzdeQxHU+32sqvU65ECPg/FAo5uH7do1XSoAlU9mclg+GKYAxh/lMRFBgKSWlHMRG2sn8Zrjp5k9rQReqKkh229wbrkfqlAtWHCoc5BWI6Zk2wuvARbYrW1DaIplQqERkFJj45OLiESg1bm9v4e0hvqqavvzySwSxx3G8ubnp+/7q6iqWJuSIahRldkSf1HWdxOiIWHU2a03tee+JxDoB932fVHwThDQ0mZXFDB1mRjjBl8JuLDbMoPeeSguK1Wq1Xiy5tNCJcYoxUsraAtq5WFdgwnJdg4x+07ahaZq2aVKM3oPjkIm0cd5pXo3z+fzx48fwjYjQFyhi2TDzfr83BhLTavd1sAlNLoGWaZq22y2yAFjwyHAbjqGWFbZnza2v/1QftlNcWVy1fsW6rZ9TjmGYriAbzBQ4ERou9/hziKlAq8EhNj+eCmILIEcjCc+X8s43wYWcIZKSPcFmiaWtSNd1TH6ME74bY4QTjJ07odqwgoUSe98E5wIRgUWHS/xZSioEV0YzaWwliI5Qiu/R9CVWHO8Ws3EVyuFEeNZzXUtIWxWp8B/USlcrW/Ob9MQpwqaODZ8aWDkaf8/HrJeTqvXXOSRNTk4+8MjIIVlylLEw58kCHSawVHWM0xdffPbJJ5/sh76ck0Qju6P6mW94yj9NE/8ZMFw2Ac433gfy4fz84sc//vG3v/XdZy+eR0EDdmFmPLwrcJK8LDw7z7A54jSIyJMnT1AGAKn9+WdfwluKhaYYhy2FVDinuLTtDCEQy6ztLs8vEMLVijr8JOplurPW6/XJahG/CnEQq5ZzVBS/BWnt5zzKJRximDIkQQGshXYBxyxeEELZIrGIdm63W7i/TWm0nAq2K8bYNZ4kBkesybO2Xc45ERGAV+ZWmgbCOEBemHXCBXetlJLkAqqSesy8u/A1mRl+J5KmeB5waSEMAG9MS4/0/X6PTC3khVFjYqyg5pkZYOCavQvzvl6vYymjspSVqr579w6ujJTKLvh2zrnnz5+bVQQbQlWb0h/QpKeIjOPYhjANmVN6HEfQgjLnHRpyjVZ0pX8iAFOh4MMxyEBWw6SAjQUK6yZ0mGJQafZ9P8WIZdDNmru7u1DYTL07xEi5OHzGeUk5IQKCs7BcLmddJymqymq1ZGaAw0MI2BltGxaLxeXlJXiwx3F8+fLlP//zPyNluN1uP/vsM4yAKw0Wcbtatmap4hg9Yn0Tmg5FUwMzlNHB5AAAxcRJREFUr9dr8LfUaHAq4WULzutxWpeLi19LT3dIcuevq2rjfOAD8EfvOU+molwph7PnN8yHKZvDxncMUJVrAoqhF+tV6FpURQPhPKaYSNXxmAqughSdZM0CNmtAi5GdcmE9q/JsNluvzr33gEOa0MZqTynFWEJovjGZJqUPqb3dMAxQwOr4/Pwcd0cauO4OUktFLl3UUiZLSPUSOjFKHrSHpKBBjdNeS7TSl9IvOU4fmAllM9uULia1gYujXir22CZ40WbYJs6uaV1DTPLXV7bTDgRvWgVMTOHlNc1HWXGT10OcvPdC+rd//7fv3r1jn1nFx2kahp6d6r+OiuO+Gv7zI6gxYeS4CV03Xz5+8uzZe+933axr5/v9fogTIn54RzCN2LAws/OlukDTe8+fffTRR0Xu+6+++moac9wpFEIZLhUyphHN+DgYKJSWy2VKkyu4ifu2kn0iFULBOWfPIyLGKERoDEwpyVSvHqrN2EpnmGlVbyGD+SB6bB4DVIWhSe06MM/B9W/r2NSniFhjJPwKJzUUnvqzszOL9HIhmLUAHR0bwly4M6m0XPWBfWAiSjL5wKg8sbojV4gMIfiAakGpNCqCUlUxHEr7ZLikwzAA2YTk1m63gwQ3OWLbCTisGCOUHFqfmnOsqvv9AIPAOQdXeJqmJ0+ezOeZJhfIPvtZK7zeOPZUfH3vfXBOSikXvFVYGFg80FsiAjPC9BNewWIDqooYO6LTpcY3e4fDsOcS53SevPfOk3Ou9ZlWpSmlbvg/xiiS1/l8Pm9D8N4vl8uu69omiORU9Hw+98RdaBbdzGlOVcCNRlOp/X7/+vXrf/7nP3jfXF5eorHxYrHQJG1oRASsnIBDwtLCynHBs0euxzdNF0KrmtP2TWko6QoW2hRt3s7HuparECLRqe9rnx/EQtl3eJH6oGMvKlQV6r5Cftntailvu5JLUROOUPFVaeU+ElHm69BDXBPPZtsH4ihOkqKmqCUVldEP3ntJih1hRzHug/cZKtG2M/IulX7AqfRSA0VPDhTNOteEUFjtTrah7X2p0vAPeqj1ONSf1J9jX0jVoOlB4UnHMEzbvydx73rW7s8+Mzt3pMgJuGifs1TG8lFfn/n0kXCE+pb1n1VTnaZlJ0TevplKkzgY9dfX17/67W9Qi6aiIQTKeqh5MOp+X8FUx//igiUG0oo5eErh7Oz8g/c/+uLTT17Pv2KnEtM0TUTOs4ceVRIipyrIIzOgGUD9pPj08ZPf//Z3iO+No7y5ertafAAGqxit4uhQgK+Zt9mpAuDGoJlZrRcXZ+fDFL0LMabStMMRPYDOoEqbIr3tCoTE1rFSotK4m6riEDWyGMnxXhNDXGzJWDrtWPADiVVbUjCKsXWD81Lleoly14Ha4EDMDD/4cIgrxMLzALyS1RCbeoMpYDrbHAtEX2GeYx9mPc0CgNh2u8X0oaVPkgkIUsB5iOjs7IxIYozTMKIKT0TgKFPh94fva3aSxDQmWZ+tkS2GqrOkMu4LTg+E9WKMjx8/ns1mNzc3CG+0bSuSu1asVitVvbm5KanfVYw5g4vhNZcFasa+qyVQT0Rd0+y3dym14ziiyfQ0TU3jxzEX9cJxgfsrhYLUlwoxuMKH5pjqRKZaN7RtzixMMaPZ/dwzEzsF8tM5nqZJCH1syGX+yzSbtUQSQrdczl2BxjSNlxjXy/m7aeiHHRGtz5azWZjNZtM0XV9fE5EPQVU/++yLi4uLn//85xcXF3d3d9vtdrVYpim+efMGjbDAvoJXMNnlfKNVnAmjgTCGryD9+JmrSlYuCQhfIdeo8tWohI5qOWli2lRm3ghJhXOdit4Tf7h4LD03bQPa2jZrqSiYA5+zKV3TOlKRemp2Q8s+JfBE5WRwqLqOSsq5RBHBqiNkDUpzXyqeKPpV10a/sGvbzreNLUL7AVFlZlblxWIB1Jh1WVBVH7xJm5oo1EbJdOpDYvt0MLUKx2KB+aouuday9TVdBYOtdap8TY7/vu5nPhhMJ48KNWyYofrh8ff7yCenpc2FHdVfpS4RJhZiUREub9g0DerMFqvlJ5988uvf/bbvexc8ERmFSv7Wv5T9/nc7lBKVnlQpavDtk2fPPvjWt99//wPvmr7v+2kkIp+reo5GP1+hGqWzs9VHH32Etq/Oua++enN9fW1bwibbdtdBVZR4hfcsU2ya5sMPPzRwkyV46Lhhs6aa8fxA3wH318BNztPJ3e3h65yCGYNSQnDwrrgEl+AFVmGojJuIVVN61FkCLsTuQAVXCy/mg7SC82R2sWWIU+kNp1W+g6rEgRSya3AUWODLSpax/eDjIrZvJRxmHSNZQEQItVGBjmuS7XZr1zmRdN57Rwcha3BiSwBvNhs8KgID1iDh3bt3u10P0Wl2DEThbreDYzcMA/K+yJty8V8RsgO0DRfs+957hrFiE8E5SZxTazGOMF8MLjuVjs5gngqFg0lVAbpOKc3nc4CcpbBYw4tdLpdN0yyWM9guuQg7JcRambOxpRG44kOhC8rNi4YLXdetl/PHl5dmssPfWq1WoFdj9vP5/O3bty9fvvzii5f7/f4nP/nJ5eUl5ne73bZt+/Lly7OzMyREsGCkpDlijEmPgkNS2kalCluLw5ScpfNtJZhetCG6/0WuNGutOepdxnLI+0oVP7MlzVVkyHSwXaqWM3rMFmI7i+9Z5LZTqk8UWJxU8AqqhyJpjA9Tzg2P/VCK7KPkYvgYY5Qqdk1FlxNRilaDfjAcMYYwqYkI8WeoSTM4TAzWo0GVM/CgstDjwz6shapl02w06h/qX22KqdL69qeT8+0gIqvOrf11Wy1SwDf1dajS4pbetSNAtTKx6lGsrzqEyLMokxJpVClUdiM2T1LlJKr8m9/8Zr1Yfve733XEItKEkFJkdCNhoQxF/g9mx1ISRx5YZ+ccu/b87PJ73/1Bv918+sc/KtNquYxD3O12bTtrfJjSUZck1UTklVJKcbGcxxi///3vf/HyS1eqXa+vr8HDoAUWDxlxfwHZHI/j6MWfn5/P5/OhH0MudnRCykfLUUWV3MESdM55Z83Uyq5mCXyIqAC1US+4+z9oSeFwZU1nY7nqRSNVNDgUNoM4TmCL5BLiq3NpJTJ2UGbTNMToEWKFFYwgMBfXBDXBWqKFcIJNJbsqdopXq/Pu8PB2ux1YnIimGGNMY0NzKQjY8naTSOucc+xMl0MAERHeCGobXwSc0O6OhJMZTDYa6Hu4WCwAqkop9f2IWLdWprRJwGEYLGqnqqrDYrGCILNIeG36MDNYedHgj6qQhqlwe522bVUZFhJVAiKW1guQEYg5e9eM49h1naXu4M76RiDXxrFnVpQhnZ2tk/L19Q0RKehKyiLx3ofQTNPUhNB1jc+tojpN6fz8fLfbMGcYY0rp7u7uyaNLOHnztpvNZo1vb27ebjably9fwp16/fq19x6tP8/OV8G3q9Xq1atXME0w4ybB7QcEUWF2uIqpzUS/jXAdX4kxepe9QDnOGtoP93exbY1KTB+Z6Xqsp6HAptISw07gY3ew/sQ0DQ65l5ex/Vvfzt40t1zDQ8lRRnMYBmJlJXM2NIesoohoKrg28iqsxIA4olirnEyIWsGoTSldXl7iE2wNRIy896kY5VrBleuXrcSs1iN58kN9ZgiADSbvERlOqlK224kuwzVNeNZjlcOTdPAWbEZqqgwyTepKIxMqbkYZt7wI3XE43Wa26Nn815xII2ZGoPUegpeInBIVJ8YMZC2Na/BqsJF//bvfzmaz73zr2/APQghqdkouB/r3K0B68HDkqAToQ2hUxfvmyZOnf/mXf/XpZ3/89S93MUbfNihA2A99iXFhOhWTAtmFoMdsNnvvvfe++OKL2WwGjgUD69qg+9JBHc+glXdIhz2sT58+/fif/+DCgcaZH4q95E9YSQ+WeAg5HSsaVb0Zm0BIUiU+ygUPMourylH8GgoVrelRLuFWEfFVkApiummapj00UdGqcsM5RkdCPjjH0XvnPYO7qe8Zop9LKtF8d4OQmBOsJYkFLos8p6UDnSsVVmSkInyAnnrvRXICGE12nXPTmAFcQOEi14sQWSxNjpk5jrm6CTXERomVvQQR/ElVz87OgK66ubl5/fq1c24cI4rBEOythYuNGBxWhIKBjjk/P0dVMV5kt9uBMwtirmmatu1CRbQSY5zPO+gkUye1/Y7cymrZadWYa7FYTNMkieqQPkwHSM/Nbg9FBWSy4Vwuzlbb7TZGWS5nXdfd7e5chs5lBbOYd23bEqmIOKIxpWkaYGWO4+gcte08pXR7e7s+P/Pe73a7x0+fb/e77XY7DMPnn3/+6tWrVGp2r99eLZYzrGfj/LJAcdM0yq7G74hIInVNaHyoV765pLbsa1cspcR0+FWqksIT7Ws/87H3Y5dlUetlY0IZqxeBh6mwqNZb++SatU7FYXK/ltK1ZrL4U1FyZKexkkpuJ0gFBj8NO6ZMU5wlQxL1uUpeExwnBpugc67tZuy8FOi73RfuBy67XC6JnMgEOzhW/GK2Z40crQ4AmNS6J7YP43Ni4iC7zKUu+b5jzVXgoR5V+2t9/dqEKr/iHbMDef+79kkt9E6MCVsz1ZUVtUlHRGI46sgJEfmscg7PZKuNiMYhqrAD1X4T3r59+/vf/34/9CGE6R6YqJQD/RmzvP8GXS5K2cChMr7z+fzpi+c/+9nP3n//fbxdkpjSZEgfOg5rEBEiqPP5nJmfP3+eoqQoXTu7u9u+eXOVkobQAraQkoqQcwG5YWZfYAeB6Ih78tGjRxC4zjnBUFcFFYBY1RvetqJIVvkWGaaCSZbSmtfihya5cAVX+tu7Cp6Ai2iBO3Gh1oLKMayKbUIkQfEtCwKb8WHpZF8alWhxIlNKItHyvr6AS/FeU+E6Bt64vi+CkLiFhbK1pKIt5OUrXLT5taj6hYjhAvmOpbKWmdfrNRS5lByblrpMjLl5wKlQPELtwb/81a9+dXt7++jRI4CcYRZAr2A0LAHGzCADMpeUSiwUj2flsCZ6kJxmZu/ZAF/MjGyRmU1Q1UB0m6wHgcl+v7+5udntdre3txCaln3AWxiEmwrxC7OCi5iL2+ScQ56vmzXL1dwVzqa2zY2lIUwNLjSfd4hz9Ls9TKXlcn55eYn4v6peXb373e9+94//+I8xxh//+C+6bu6cQ+nRF599/sEHHyDJvd/vr66uDCtQQiyHccNimORQpcMHry7ngAsu5hCY4eK1mNyzRWjfrUFJdtqJ76v3DrtU/Rj1n7RS8CfHyZPY3jw57Z7OOGK9rmOhIiIxpSlKTBJTSmm1WgL6h72JbYJaXpxgL+i9b0PA2kD5TSqMJSBfw8+r1QoWGMJCCMBogSVD6QLaaavdXlDuhaDv6a0jxYb1gGiNzR0fVx+djHM9p/cnq1aC1dRHE7ZcGVJ2L7lX1izVgU/ccUCbiKAHnSpX961/PlpApoOxUXMAwTWuoCup2JJffPHF559/joezapx7y+t/Xgd/XXfCf+FQUpXoYNeoEnMIbbdYgXV2vV5Dpx5G07G6o4GzIYaAaJrmyZMn7733Hnpop5Sur68NpsSl4shVyYl6cWgxZbqum7UdIDn1A3OVmThZsu4YMV+rUtMZ1XyTScmmcCNrwaGYjysVVSF2jmUNqWgI2zxSUi8m96FdEBqpF5ztDV9KI+zilAP7ZMo+ltKXUPryclUrYpeyIDDEoitlV1Bph+xgVf1lDwbPz+pKMSnb7RZKd7fb4UNoO2vpisrpmgykPL8ys+W2X7x48erVq9///vfTNH300Udo/IwXjDGCluvs7ExVsX6mQkBtMxhCuL29tZcFKxYe2CBUVCivcXEIUBFB20cMnZbIsFlgUCQAbFMhciIiXBlSEgXKGDoIOMrxA1FVVJWgjApIb1vbzrmm6WCNYdDW63XbZghY13Vnq3Xf9/1uf319fXV1dXt7Cz5C6IDr6+thGFTzAuj7ETvoL//yLx89voDZ9Pb1ayosKJhiQ4OacVkZpke0UFTMF1gttZzFEtJ7bB52nVpe10pajrm0TsSmXcF+loKuOEiAe+fb3qR7asO2s21J07L2qFxVvKCmgwsFdBO6UFqGw8FNVXvm+hWkYnzEfRvvg28tmaJ8qJ4ahmGz2UAsIPuLlaCl+RI2JhW3uy1t1kyN2d0fPE5evDZlICWga2Cufd0V7DpUfI8ayfGgI6oGGi+rS46Lcqky+2xz1WKKiu1iIporD0pVHWlSyZgBrXoZ2VWiCki9hUWdemKNOcmXZFImoK6jiDKT4+To//d3f/v29l3TteQB90VUgUU1SlKipA8qYHfv3zcc/2KF0sNfZ3LOBSVlJmUVndRT1DQJf/jtHz569v7F5RPvPbNKGpkHp+SJnQtMwUnDKagESSxC3qHwY1wvFz/+0fe71pPGMJu/vbm9ur2bkgplLZXSlGLvOJKOKfaO1HtP6iSRJ3bMjnUa9sz64UfvMSWRSDJqmoIrIRRRkZyByMpVXeAAnFTbzob96Nmxkgp714xDRPHufN6NY4+uO4tu1vpASZzS2O9Yk6Y09j2JdE3jmdFkh0oUlEQ8M4k03jfej7vtsmsXbRPHyRGTaJqiaHSeYhp91cKdmRvn0YuUhWRKjQutb1g0ihNyU9JhmJqmAwXxfr/v+13T+EmmMY0lQBASk2ucb/2YxijSj6OVTPmmAR6x6bpuPp9SIufY+xR1vxscByrJG2ijruuGIaOXVbXvx7addd0cAmWapsVs3oYG7DHTMA77fuyH/XY3DeN+u4NbFWP07BofWEklekdIPjWND8ENwz5qJE/bfitM89VyTPHTL/7oGvrpT/9ivV4y636/jdPknQveLxcLlTj0u7YLT589RtleHKeuaWdtF5zfbbZpys0ejOYajwHHIqWJXeqHjdIUGodaXi2wZyvu4EKALBL7YVit1/PFYrFcxpTarpti9I0bpt43zjcuyjTGYbGaN13wTZgvz0I7HyPfbccxctKgHMh7Cr4f903nmoabhp88OvOc5p1vWm5avz5bXlyctW2bRK+vbza7vh+jKs8Wy24+c8F3TTsNo8o4jbt+fydpaoP3zMv56vz8fL/r90N8e33b71PbzL/48qv/8Y+/Hvr07nqz2+2wJjUlVu2aRkS8Z5KkKaZpnKYhpcmTNo4bx7UWUVVk0PHvoHqjqBApqzipmr2yb8k1SV1SJ+SnRMMUpyTKjrxTx8LEPrAP5LyyE+IoOiWJouTz3p+mCTpahJDoN7BbrVqqvs4co6iycwHZ8dr9wjO7KgHJzGgAjH9UUYiE3HPJtW3wnmezdr5oupmfzUPTMnFMMiQZlCZ2KTQOAaYoOqY4ibIPyi4pJSUlv+0n33USVROlSTwH7xv4bEDJDcNgbbZB+paSDnHwrU+UosZUWLhFxHwAX9VTMbOy3PtH9g+due0FreACs8ylkrAgKg5mxOH6xwlWPXiYB+eKKrcHVK+4CDQxslQppUOooHAHjeOokkhFUozTOE1jSjGlGOM0DH2ME36OcRJJIinG4ZT40J7y5EP7AWRmZIlJFqSpnXPI5Klq3/f/8A//8F/+5j8vFgslDT7ElIIPnl2UyESe3cktik79DzvgCH744YevX345n893d7eztoOpVHtyqopAeghBNbePHcdxPp9/73vf+/jjj5tupqpvX795/vQZEY1DnM27GEfvDtZuzvNzslmHx0pEXdddXl7e3N5571XJ0pCADNS2vD1M1sf+4IWYywi3htmHwEQkTOqYks3yoTwUUKbFYsHeI8/XtoeGr/D1tbAMuqoJNvZSKMwehmqeJInIvO3MAUVGU1WdC/PuUPsBQhx4eKG0t0OwtN/t2xCcossxtwXlOE6TaxpNqZvP2xBYtQ2hDcE7NxKF0p94nHqsSWwYy0jFGKdhbHxAs3pXcZLYS6GIAuatRXFxHThecDJCCWBAx8dh3BasFnBD68X63dt3aUwvXrxwzn36ySeqauTGxoiJaGrbtmkk+MRJpela3+ToQv5rga5Iod0Yxky8JSlHRLT4/arkvYfT7wr2Sgv+Cy8ohRWk67rNZrNarfp+3O+Hpmnu7raPHj168uTyzZs34zjMZt1qtez7/TSN2Pqucibgj8YYte+bWbdezAE43+23KNR2jq6urlKavPfL+WL2uCUikJOoslPyTViv11PKyIbXr1/jgf/rf/2v2+32O9/+9jD0+/0+OLJiKu+9FICFCfE6LEFEVDUyh3QtPp8/+J9iktfx4TdVjVpgFlJV68GqyZfVU+FJVXawfJ6Qfso3uyfu7ev2a61fiWrRka98DIA4OIgn6BMTFL5UN4RyuEx/cQgDMOdwPSmpKmilscGlSkz4Qi0upZU1EPshBIQPY4xgVEXSAZkmc8qbprFOprUErn16uuf643Al4UUlnmSS2b5iLkp9cdNrdYTjwXPqP9VxOzxA7bJTld23vI9VfDx4i5M3pQMIq0pWSwXMoWrtHnx/JtA0lhcWVUqUSwKmGJum+fjjjy8uLn7+Vz+LKQYfnOOYJu9D8CFnglnBw3VPDf/7HTavhNYfbfv+++//8dGj7ebWueCC18Ql/X7UIhDPjFUFcdl13fe+973PP/9cVOfz+d3d5urq+unTJ5SgBZnoEBAjFqCpiRgthL33zrGyND48efLk+t1Nob9Ilne0W+eLOFZVdgfSXSpBZsr7UI181QJuRARsiAutVCBerK2mabxvUsq9eIloEmFVazMgxFQqX7GdMIr1Jjc7VFUntJhwLEyoWPPECZ0vSqO6OE4SE4KxXTd36pzLaKxpmrh09XFVZZeU3sZSShtN7qBoBwp4GnMsdLvdslIck0h0xMO+x/RZ1BcESWiiYFAOC+YjqFtbPxDWGPYmBDjx8FDHcWyargsNxBZaVcYYP/nks/VytV6viciy6V3XISI97Ps0xZTSGOV2c4elhVkwdYLwcizcQzB09n0OpvU0amn8rDnAdVilcDvMOEulPQ4faLc9kev7DMBW1Qwy7wcWnTUtizqlwG5MY5GG3ATvXYfMcdM0w9gbFC6mKXg3jnG1WnRNM47jfN5N0+SIk0x9LwhXxnHyTUDGFkzX2+12ktR1HTg73727nc3a0DQ3N7ch+GGSMMb5fA65M45j23ZtO9tsNkQOAMYiLokL+zcdQFgEm8FEqohA5RKRalSMmmrJSWbpJLnCpEQRuRjBQnQsZ2tjnY7USZIKjWUnnIijWuMejP7KFDaJj2+dKGCqtO+JbqCKdQQLgIgQ55cK+qSqkoSIVPLDeBeICOmGJAfrH1dG4BeuCEobiChvOmaoagdulnZWZuHAVl2PADOf9M6pDaB6NCzf5AoXZq3I7EXq0ZBSAVWPFVW6lioPx8bfYkhcumJjSGGRa+H5qQf5wZklOsLi2edHTFgnx4MfpvIZYtZ2uZSS85RkUk2qiUh++ct/evn6q5TSvt+XFzvkZogIBFv6kI3w5zj+RH/6wK1PRIvF4sMPP5zNZt1i3vcjGMyZmUishKveXdM0TXEIjRONi+Xs2fMnXAIjr1+/Bv1CVn6a8YfMzFbVwwfKOi3m1dnZ2XzWAR1TrwM6pnFhZrDNWY5WSLlCDOEhx9KRlyoGf1JQtCuC+IjljjEOBa1jebi2bbv5HJfNXfkkJZVUmq6gqkFLgs0CSpBxBv/JsKzix6eUSB3i2MzsfeM5eA79bp9TcTHO2rYrrR1SBYf23sOCphzeT7APYG7DDwuFZGfWdpqEROFhU2mDA3JaVxqe26jaFkKB0Ha7Rf2r7f+2aUJxg+xk59x8NkPh73w+B/lD4/zZcoWatHEcZ213d3f3+qu3y+Xy8vJSq9IRpJbn8/ms7eA3SIF64YGh1RC+CyGkBL422u02GHOcg2A7FQ+JCwGh+c1l67kYBf/GMU5TYvaIHyLjC62PHoib7a0PHBqnlPphFxrnWbvGo62vFmAdBPqs7dgRk27ubhE8d44QJSaSWdt1behmzbt3767evJWYgmNLRavyOMZ3tzc3NzfMHmL95uZGHYPKn51D6gpB+N1ud3Nzc3t7y8yL1TKpUJHFVAmuOpdZhKw3cnw7x9awHfjVqbiC1zSRYir2/vn3/1rLdKr0sRwf9cl1svDkqGV3fS9XHfUX69uZHEil5h5byQoCbedKLhgmVUVdFtxWSLNUSGqx11BHhz0FLxkLA8sVuef5fL5arVDDjXPuD+DXHXamnW+BHNuzcpzslwIFkHuQugevT1Xs8GS06xQGVbAbvH5TCCx9oSfzFQ+gaW77wW5ktu8JE9bR85mdYg+qqkSZIUsptwmy5bXv++C9C77v++VyeX3z7he/+B9/89f/GcHM2Wzm2CWJeIByUYEyqG+LQf5T5ubPcjCzqguhJecuzh99//s/fP3VV3GcPt1/kuPt1bsnUs5gZkbkMBaCXxH54IMP3ry9IaIQwtXV1fPnz5um8QSjL4mI87nJ2cHC1oMxrqSSUtvNnjx58tvf/d45J5JizNhmTIccs3uyd6G07WRmUPGZ1c/F2MxenTCpU9FUynC1oCSYGUQNkAVTaTwwmzEzA+Hnco0pYyNDBzvniD07j3SNSnJlqVm8mo4FB/Zw17a7YT+VVgcG7ph3s9lijmfo9+N81rahiaX9eBbrxHGKwXnrEQJCq/P12W4/mHTYbbfjGE0E13QfkOCr1SrFyETB+/ls1oQwORe8V9Wh70vsNFdKxCmzbBIRZyTagXOfiFRkuVzCFEAQe7vdIrZ8dnZ2++6Gmefz+cuXL1eL5cXFxdgPIwm0JpL0zjlhmi0XGMB5E5rSAwp0mNvtHaYVFU2bzcY1bpwyjb5wbiIZQuDgmRNKcUQ1qrTzWVQkF7NDrIWc0lrJwnrAyCB5iTKe3bA/Pz+HboDOU00Id7SzNqVpHCkEF4KL0UGRLxaLlCbWzI/ovZ/NZv2wY+Y2NMjCijglPw7DlDID6G67H6OeXT7aD31SGaZxsVh0iyVWLEDd4xS1tI4fx/Fusx2nOI0wpFyKkgpbggpKHrKiPRGIZdbysuR7RljZZOoJUZzKN6VMg4PqlFq/mhow8VIrG9MWdEz2XsWBT6FGqqcObi27Tm7BxTeVCpRrl8WRCsLfOQd2yRLQPjwPLhKTzLpGVTP4ufRP4xKV7fse2H7n3NnZmXOu73vg1bHNJwS5SacUoygzBx9UFLQShL1E1c+Hl8scPoffS+bLFTK+kLnED7bswYaoOMLqI1XtI08miO/FHmxeqLJ48HMo6bBUoI550vVwvh4nI06ugB+OiZbuJSRw3LMQRQi5+SQa8UO+NPx0ygUkf/jkj3/4wx9EpO/77XabJKWUHHN5PAzAg1gqoX8BZvXNQK0/NZ3MnLUpM08phbZp57PnL16sztZt00U5QqsnstFIIqj+PmRWmqZ59uzZ8+fPb25uoNvevHkTYxyHWEqPju5rwHIuoGuPUjOVp0+frpYLyqBBdc6BXd1WA5dYinMOj2/BJaHSNhH2vgvON843PrQZyFDR4kD+GhpwGIZhGqMc2LKQ4EE0KalS6drrfe6Php1pfpsqKzl0o7SdKYVD38zSaZqSSEpq6EFzSbMCSxQnQTcVrrpQyAFNk/B40HC+kHjAD87OYtOIqpBC3eLWsDPm8/nFxUUWxt6zc/PFwofQdh3+zReL/HVsJOd8CE3bIpKYioeqVe3EOE1EhOsDFwo3GkLKgsMhhJu727fXV77JoKrtdpstht0OkgU2EzNHlSFO/YTIsMMFaykADxKNvVOpzPaFT9t7tvKtkPs0CExGqjKaPnd9VufCYrEahmxmSUnwW7jeZBxek4im0lsphLBer+FLuYL8cqUCGwh/nB/ahsgBrFfc+pZcduU3m80fP/0kRe33o/f+4tHlarVCpwERud1u+mnsp3GS1E/jOI53d3fX19fyUG7PRKEcY3pNmt1XkydHHfL1xE4PTrMklXQU5KyPWiXXHk/t9Z7I2yy5Koe4PqcW5fbJfZdXDgmI/AC+4o7mwrIOZlYYMaUe7ygDmoFpVPBoaPmQDlH0lNJ+v8e2YmaQmlkNvRWAYMehQMAdsNkHuX1f9RQw2unUmF600avjw7UStQVpt7PPv0772jVPZi04j0AdicLVcMSshLia/dMk+ZPjK9evKQ9VsgWnpIe88cGIO1mgZCFsly0WOyeTGrOGphFJMaUQmjFF8m4cx9///reXl+fn5+cxZj48OmyMYg6Q8jepzH8td8e/QvsWS4SJvHOBNHnXfPTRt7fb7Waz++qrl/1u75QcH00hHQwlBRVLcUP9j370o3/+539GkcZXX311cXF+cXEBEkEiIrX8hzI74MNVkW1hU1Tz+fzFixdffP5l27VRcq8bqGEqqRd7BhGxOm1TBt57KR0gTO5AIAJjgpihCXEp1IbkfOMDeSo2bKZdhJOENZ1K/kMLNZUZxXyw79wUo6j6nKoprZmILfymRCG02P+Zj8l79i5OkmLOQ8dIMSmzI/aiTOyRshVlZZpSbFRYBQSo+6GPMQdd4fJ2XRfTiL5sSYW9Q7ecNCZV3Ww2s3kb0zgMw2KxGMZ9TNElUtVulsfkKKpJxv4jqioG5mdWotVqtdvtFovFZrN5/vy5AdRhaU0pojJn32/Pz8+LOlQ0n7+7u1PVdtbCDLIAsqXnu9A0jUeVRSyNdFQV8rNpDl14MZj+mNaYSrcoVYUKh+lTNKWqctOE7XZ7dnZGJYg9DAN713Vts2vevXu3XC5Rv4S7IHnRNg3OBN6NCllH3/eqlBQF1n6xmG02GxV2jdvv98M0rs/PyPlh2HbdnDmvjcVidbvdvXr5ehpT0zTPnr7wgZUp+MDMrgkhtN43u12PspOmaVpRDo6ZNSllQgkAZLKpdlBIjpyrGKPwAmxzeJpDrV0cO8cTkeTwLB27NQ9ehI61u1YZx+Pz889S1ZvKMVdUfbXjOOfhXqbdLUKLM1NpyMOFMLWA6g92CVcGLg5EzlB3p6qTJkeHFClqynFBtIwUEdi1QHo751LK9nERszlo5CsQuI0YEdExNXIlonNEVlVh6+PZsLkszGbfqqO+Nhf3b6cVsMsVrlkqwWciQqWWFtClzYKdcHJZd0BjMRQrnkKPebIO80jgg9YjY5AeUuD1kqrGq2SYC7rSOScS8QF5ent99cknn0BAT9MwTcNmu3HsWDGT95XlgynhPx0g/a+DUpd94bEmoshssXj+3gc//OGPnz57FprONpite64yLrZD8O4ppbOzs5/97Gdv3lwBYnN1dQ1q3/urgZnBMFyvD0RXSNLZ2dk4DSKCFS8SQ8V0SsdM6DY7WNymC7lqg4PD9uFUegMjJ2r5UZIDCSJyxrbVy2LiYZrGNJrTLAXdIwUS1TQNWlBM46iFcwNSYIhTImrnM/yqRGPMxWzkeIyZcMosaBjacCJ3ux1cXnBOIXUKfQyvzns/SUqUaTowp/CxsJNDCHjgKGmYxotHlyicyPZNhZIw+msEb9Bbl4iEkrKwp9B6HwI7JyVUMAzD48ePnXNtG7bbu9VqsVjMJpmICOUKU4r7oR/H8dWrV9vtFrpht9tFSaFtFqsFNC7aMKuqtUlYdDMiMYIqrfjCMqh7mjBKmCOzshEwTGlSTdM0hODQkvkY4p49IVdSa5hEU/bOORBQw0dPKfXjmPKtxbnQtjMiN46xaXJuHv7QYrFgn8nxnQtTkmEaU9TFfNWELk4SJwEwJxayNiyVcRz/8Ic/INYdfOucU2VwRKCr8d12s9ltxzhBCmNO4RBLwa4DGVAXcULeHP7d8zG+Rjgctptz6IPrWBA5899wkVp0mMQwELIlC+0w+5UqxfzgU9Xat9Y0VBGPpML2bBQ6Ji5iKcFHGMPS8LE0SDZljOFtMhTDWcQ0FSSmJXqpFALM53OTeFpMdqq8eTv/XzzULN2CnYMANK7Wk0CCVOW5J47myWVx1JLzxEjC1w3CUo+wzbhdvP6TVOlns4SKhHTMrrbHnKt6JXpiX+gJUyHfsb/WN6gvnY5q7DJVoZBapa9z7je/++1nX3weJbmcmRCkgkQE7D2qIlqnNvVr1HB93I9OPxCvZjqF1R39Mf/LT4pAsfd+jNNyff6XP/vr1flFKn2fpALRmblkmowKjY5I/OC990FAwszv3r27u7tj78k7osyTDIkPAqAEzWMdreMInbGcL87Pz8dp8KV3EFEmF5Qq/ELFWcHX8bSYHXg/mZgwJk3S+AAGHFca7lIhzqUClXIlYkwk6JAISl4qmKApDo1np3k/zOdzsELGcqSUNMk47Lu2RXbTOWcEI1CcIYT9OEQV4CeZGQ0tiCiWbsrb7fZut0XtkAlBqLrZbMZOU0qIr0IH403xJNt+75pA3t1ub6eUFosF7AzYMah6Qj8iM0Hs8cx4hz5D9whfSi9sv1UDRVTISQAdguTabrequlwuzx9duiZzRvb9br5YPHr0SApIjT2KoMQ5t16vNab6H0DIWqDgIQRweC2XS8xa4/y87Vg0sEvjJFOkJPjnlPDrcjYHhjkOoxZ4M0IaIgL1bEJNVff7PV4Bvuzd3R2MntlstlyfS+705Zh9jHGz2ex2O/ZuSrEfB1XyPjRNG0IDUdt08/X5JWwLIqfsbjdb4PgMNz6OY+vD+dnZNE1Q9t57kF5Bi+z6fYwiIkx+HKIkGofoXeN8I4k2d7t+P45D3PXjdrtHB4Htdt/3Yz+NiOFv+/22H/opjkn6KSbiqDJJAs9BVIkqwqdRXxNrQIlnQUcaD4KQ8KeUlMhNUzr5OpAEphW4xPzdcTy2socOQEszo+vDXLFaMZgioVIKAeYy3HosjYchse12uMVsNuu6A7WOlGgZBEipGsrRFCxvVP3e3d0h43B5edl13X6/X6/X0PemkywA7qoaOdNb9cua8jtRhzhMvhmqtOigTJCABVmmyeaL8Sf7h/MLp4Kv6rM5JQWfFb6FL0pVgW36zoaRi5MgVfiaHtLZU+mrZgcueChDUgUYltXIH6plVGstOlZpmH8YBvXJaJU3ycTMf/jDP3/44fvksuoax5GJunaOp29CQ0RJonen8ZbqeDAQLdX//+Yjp/6VqGk6rAFm9+K9D/7z3/yXP/7216iPhHahwht8Egc+DGOS2az9m7/56e9+97vlchVCuLm5efbsmST1pZHImNvFtzFGuoeQ9MxE1HXNsydPP/nsU9GkBWvqfV6IZudCkSfNhUZExFq0iEYRYVKzeX1VeRILUahtRUfkmb3n7NRCx8SoKTUhTMMQQtA0pdJsJwEctO8lJlby7NCaUNKUCgraLDPEk5um2e52SdM05U4+iNJA9B92F/nsv5KHnnakKWfBE6tw4GGaYoyzxSJ4H4eBvQ9tq6WVUMjtGYbFYtGGoJmsTZxr8TCWeyZNq9UKIWiMBgJucNmJCLFxV/rI7vseaCAprCPz+Rw1WlRaX9zc3Dx//hyFNJMk56j1zs9BeMnB+xDc5eXjvHunmFRWqyW8ZCQsoPsNz9L3u+KFEFUmMkQhEUHOAo+KWcYEWQ6eigvShkZVp2lMzrehCc6nlBazOYlev33Ttm0b/Hq5iOMAgNs4jo6DKjsOcTqY/LD5gJQGdADvjjIwX6giuy6nitFZHovfOTebLbz3wxR32zsf2ta57W6vfFjebdtut9tXr15hVEkTSsViUrP2siRhnqYJo+e9d0QgJMlemgDrnoiobTMLW3HLmMjFOKkqLLkYo0aB8cHMUPmqTJTLL6XQczKh+JiIs/0KteecQzCACvRMSjMuqFs9LkAynar3POlagvOxQ6z3Qqn3/3rwsY75lqkC98YYgaAPIcC49AXkbOG9tm1T0hAOnZWhYtHxkwtJOKSBVDx6uAiw0/YWrmSsnHP3n/nkXewTe9lU8Py2qr9uHB684P2Bqu/18If/yiKdUvB96Id48grlFvmyR0zlOQcsJFVCvj6YWbOqOn2Nr3tVIfLOvXz91T/96pf/+8//Nx9y0eo4TU3TEBMlLX0aSFQySeTDxzfo4P/Jo1TTes9E7JvAfHn56C9++tOf/vSnv//97+tK2domwpc9MTGD5zxRYub333/xySefuNJd7u7u7uLiTCh5crY0cQWm+6tEWHkYhvPzdftVC+UEJy+RejoU+TAziUbNLATTRGmKJJqJ0VmYmelANJOfXHWE9sUOV4VsHitaY2w2iHUc49gzdzHmQdDS8G6ceqDw2DGxsFOR3JIssEtTlCkS0STqOkpKpCIxjaUm0pHEmOBJqyogVICvYojiMGpMiSaNyBtJiimydk3DqvgyiDgaz85RYOc8zbuGJJJIcC5lc54RNYD27bpOJDJR23WQ45vNBpYKgrSLxcKiglxYTVS1bcPm7saxSpriNM1mM+9cYibV7WZDqpeXl2frZRPcNPZt41tuh90+C68QgmeROBSeMrihr19/tVosZm139ebt27fvXrx4sVrOiWi/3wfPd7udpIRiz3EYEHZMMcIaYKLN3R26MJEqMrJxmlQEZlrbNPvdbhxHSakJYbPZIDzLTrUUW7tSPj6O42Kx2O/3Z2dnXdcQIcjRpZSa1o9TH9g1zivntBkzhxDOzs4+/fRTick1bSZUKeXLgZ1Mse/78/Nz7z1YrJnZs5uGEep/uWzYhZvbjWtyQ1+0q/W+6fv+448/FpHlfDGbtzFG5xubCyLCBcdxhD4Aq6UvfklKKVFaLtYpTsMwMGWzQBJJ6mezmWgOoppnI1M0BYxbCGAqTN45UjcMQ5/Ab8MGfXDOqWjf98ZwwoyxaYlIJFGVZcwS5/gH1L6f6Ax3j1CCisVsGRNmrhkMbcOa/nZVGYL9VQpFvHNwu3N5Xizskr7wB8xmi1RY5X1zwPFZ9vfs7AzWs4HssiNRcZ7XD1//zJX7UVsVJmDrUZJCuYxrIhxygh07GSi650A+qHTvf/dfPMx7sesf60GnCmTOw5rUjlDr53og1H1dVoNNt9NxfvvhO0DTOPrFL35xcXHx4x/8UCShvAeADucpxtH7xjsvmmFkXx83rtXtn4rMQruP08/q9yJy7JNGIieanG+YOSVdrFb/+T//l08++ez167cIGxp7sEWgnJJ5sarKzpHT8/8/c3/eZMty3ImBvkRk5llqufe++1a+B3DpjU2im1S3KKplJjOZTBrNZ50vMDYSZ8ZMUrNNYBMk0ADZALE8ENtb7lJ1lsyMcPf5wyMi81Td9wiA7O5Ju1ZWdW6eXGLx9ec/v7n5yle+8p3vfGe73YnIZ5999vTprYiJFmoFsxIMaI+1XkZmGYD6vr/a7T978XnoYoCgCITEtesLASItfLYAwFgoJAtzBQMidsy+ZSVnWZnVLY0EAGYiqRCOZ0/jqamJYEY0UJnHBGbOFGFmfewAMTCaZgBmQBA1ADEoKSk1yzJDaZDSvLe1dYxqoWNV0CxANo+TquY5xRiJSUH8QQkRzCRnFem7QBhM1L9CgOPpmKYxhACq03kMIRCaiUnKAqBZpvM4z2PrWIAGjWoQzJBck86MgKaRCY0kzfM8S5qxdl1kZkcYI+Lr89GzXK9fvyZARpCU7+/vb29vA7FJns6n6TzmYQMAm80mqxBGETocDsc5eVcJH+FAPM8zEsQQ5mkSke1mc4inu1evY4y73U6zzOMUiOeamPcefKboeuWMU+EfFfULBmLxLAOgiWZLQ9cTYBeiieY5BeIkcyDuQsycmAmBc5oIu6v99tWrV+P5OE/TNI7DMKR5nsfJSTo3/YAGMZCBTPME83kYhtPhLue8Hd7vIwdGBlTVjnk6nebzmYhOiKYqIsfDnWtKrLzEXqx4fP1qnmcVePHyReyG8+kwnaec1USnlM3MJOec0zRv5y0HPI9HrVADEZvnLCLn48lN0mlKMZ6ZOWCp8OaOJZun8B0ZkJKEIF545Snt6+vrtrWdpCyooKmaImLr/DONpVv2NCWzgjhDVH8vv4WI7PfQDwWnJisSRBEJgVqkCi51w9qlazIZa9lMkwztZ0s0IuIDJ6TJ5PW3uJZr2ypMbWY56zQVDpli5Yv4n56noMCS1JPmIOIBNg/SIOJ+v3dT1bd5Q4T4aDt3W9PBD8In8AUqc5HRl3VBUsFQAOAVlYilqyA80qy/jEL9onMuFPMXOIQGttRI4Uq76AONS1/uIl5QUV68sC4e9FrbP07O6gq60uR7e0NES56ZS/KNb3zj9vb23edvOyOdqFKFw6lmZCbEv3vYVnf+5U9tGv2RJi5/i2mLAYdAZpxV2LoPPvjgt37rt77xjW+IiKutGGMpbzNDvZhDRPSI8XY3fPDBe3/xzW9dhaBmd3d3p9M4DJ0SNkHgKxUWpBxatZrcyjOz29vrz1++sFXVOaiheXMw1JrKKIwWgaiyKkLhikoYa5w5LyUE/gzMaLIkkv0gA4rBGRtTKnkXL/UpiXCylAv9YUopdARVs7acltsHCqKSiQjBTCXNU84ZiTypiQCEJYSOSHlOAOBvx0hl3Zs6T5ZqFpHsTSJFAREAYyjV2ItwUfXB8Tz3dth0XSDsd7vd8XgMSDpPoEpiJBaBGJnUcpo6YksZmDvinHNEIjUAy3nSuUC+Y4zb3W46n+5fvuy6LiKdTic0G4ahCzSdj5oTBMqFQxtE5P71q367ub+72/bDs5ubw+Fwf/eakZ49e3Z8/fp8f39zc5NF0jjtnr99d3d3d3jVcZim6f7V6+l03m6358MxqYQQTocjIprANCbVyd/3fD5PR1RVoQK3yVPp/utBxZyzzAUtrKrnY8nmikiex/FcuGMbxUeaprs57Yb+8PoV6lUkPB7uTLPjnNPMACDzBCp937vhcvv0GahpFgXEThHgar83sxDCbjM0petc1mrZzPb7/X6/924T7z3/rRDCmPL17c3pPH3+6tXru8M0zshsFUMjIuM45nlWVak9wVTV0wSOU3WTxUOjxSs16LrOEr548QoArq+vc9a7u4MHObqu++yzF+fzebfbVHUuInI8nJ8+3YBRStl5ctzTEjUzOxwOd3cHfwWn2SLC4/H84sWL0+ngcAFEjKFXVdECY2zKJoQhV+KXR3KyCJALqXV5Gq4M/dUXEWCpgn2D6FvFQh+rHF9LLSHVUF2+nYftzlr+i2LO2ds1uvZ1wKDnIPyXBlJZgny6vuliWDwUwpfu74M/YYU6ZvL+YFpNmTeE7h/88qsej+/++Pg7TzBrGvrLdPAbqCiLiVSh9hfaF8BpQh/HRtZntqshMJApgqmEPrx8/epb3/rW03/z3202G8mZQ7Hjuq4T1ZRSF0u31BaU/s9ykIMPClCtkLVqDD0ivv/++3/0R3/04sWLTz75xLWLiMxzDqEQMiy6s+i2DIbTNF3dXH/44QevX931fT/N46effvrBB+/FLsqcRORLnqYwS6TJrJDLZJWcs9cBayoQqlIKohI5xC6mlIgcWV22ESKoapqKJxcqpaXvQ6Ka/S0qHIjIEImB0czMU9GSMxNxjF6RDM1UN2BH0mYBszwnRDRRJWFmNQk9ARakYsOmSu1aaGaMkJgdFMhY8tCgJik1MQMARFp6V6iqB6AK+BMIkJDIYDyeiAjRck6BOmYmQFOxZHNOInIWOx5eexLXLRv3OBGRHP203bVM1VnUwGKIIqLehRzRAFBN5uQ++nvvvEtEh37wqw1dH2O8v78/H0+scLvfRyTT3HPI5ykCoYGkHDk8ubqe5/lwd9f3/ZTz8f7e18/Pfvy3r+/vd7vd4XD0iN/5dHD00zAMr44vFEs80EuBDeDzzz83s03gvu9zSi7ZUwWET9UhM80pJXSBJUsRsEeVY8eqStyllJBsf7U1RffRmfn99993cBwihsH7+8LVdjMMgwuHlAqk452333KP8PbqFlbMySEs+jLn7IXaXRc6ppv9jpnVs3qAt9c3XTc9f/rs9av7EGbHfAHAOI5o4sFMEckVKOo/TRQRGyGRR1YLYk5EEXKtMlfVly9fnk7nrus2m+3Lly9fvXq13W43m507r1w7nVfXFlVxmuYQAoeSufzsxcv7+/ubmxsDPJ5O8zynefKbdt2w212FUGjAC+YomF0ykUklglh7O02RrJVlkQZvcuZW0sZV7/Ld5uPimxoXtk+a0d+KOJoR1p6qG3pPCbfUtV/BJ5GZ3ZBypet1/FpKjxZ2xnZ9rJgyrQiGL3qp9Z9NDrhSbxL4jSPzwFhZ/74ez/UtHj8G/HKaGx+1/n3gytplEOKLrrmwlovIeiLfYCyViy1ucQlyP7JBwKgofzStWPYkOgzdxx9//B/f/d7v/bPfbef7yJbfTQG+BIf1n+4o5hoAAKKBgbrdFxHx6urqvffe++lPf+oRFQDwpuKIi+faFj2UZgA2DP1v/uZv/umf/qlDM168ePHkyc0VX1mjD8Wgaspmq6az6+lUNTHr+z6djqKJQoTWZ75VI9jC70oEVD9HRGY0M4Il2uPX70OEuBBG6gOeHVGpxScroxhEUlsYUvlozAyp1BhwIPV+36AAMM9zIHYHpWwhESbSLGIZ0TgEA8hmjJBhYe/K85Q50mVRUMHdzMnM+r5XrBninM1sms9tTHRWh4q4vHZffzwd7u/vveedZ6pO8+RFWQTIzPNYOvW6jGBir7tHJNXSLOx8PH02frq/2f/Lr33t5z//+cd/+7dd181TKfCQlL2vsEzT1dXVSY4uL8QyqKZJZjPNgohMgQOnaW6idxzHw909Ef385SsOcbvdHk8nKTyUvc/4NE1m5OyYPjtSqTQR0aHm7s66g6KFzhqHYfDiKBfN1/2Vj1WLE/oo3d/f86qNgXvMMcbAV45UBwAnWACAGOM4pXmeI+Ew9PvNMFWD5nD3uiF7U5VEagUe77M2jflwf1/sjPMRgFLOigTI7hYj2na7ncYkIpuuFw6iyW/NsfOQSYhct0lZYI6/k1aLoepKxSttxnE6n8++SFJKXnjtI/bq1V1K036/Z+abm5sYo+PIfIH52kuSP/308/v7++12O/Sb169fH4/HELrA0LKSKSWAuN1GL1ggovE8z/Oc8sTM7qATLTuxSZ+1DCFayJHW0mClqpejnmZtv6zPeXCFJjTW51At0M+P+nw7LKN8XTGr8/JC4yvd7XauaK+vr7Fysze9zkv5xmJYrB9s7bDBm9TeWkcUnW2FZ9dWbRXWsveL7BV40/FGZfzLaN/H17HquzZaEACw5b8I3pADNQBY+D/9Tdfj9fj1zAyc/A+XwDcCgtnir1rJ+T8w8RDQmaS++c1vvvv87WdPn4pIIAbAnDPXgoSu2zy2Fx45xBdx3191vN54EJKBiEjwtCgxgaFpCOGdd975wz/8w+985ztWi8+cushhTO0d/TohhJTmwDGl9OzZs3feeed4OAGAl5P2fR+dkUPNyS6MlrHyTHWzbZmjitze3mYVr38VEbZSsedOLYK1BIwHxtvXYyy1CmRAjj90/kvLzYq6NNO8PoGCmyJqSEiArnCpchshFv7eUNoAo6O4YJWjcr6YJhGoQnNDCJZFLYcQu65D89ImBdGApcNuzpmQAodsmYgwFBu8Ge99H/0TV7GA6j3AHQMMVX9sNhvnomJmlbQdNiGEeZonYg/ZhRBCZKeiHCufrSt7j8qEVfu/EAICmOr5ePjFz37+4vPPx9OZACVlrjUhDnb1PuYm2nXd8Xg/zpMrgDbFiAhAMcbj8eh+AyICguN3xmm+v78fx3G73f7is09PpxMAcQjPnj3re3j98t71opl13TBN09XQMbNl6ULcDRt/fQ9Hm1kIFJDIQGYvsA6Oo3Gj8HQqBVdebnQ8Hq+urohoGIa7uzvPjHYcuEPnwkQtdtL5fPaCqxjj6/GkqlOac86n0wmEu64DVF2xT5NBswBUc5O/8zzHoe+6DimM47jZ7omw6wNMcJ5mrKBZEUFCN+YQ0Kr8ldrDys2FZjW2hIinIQFgmmafRHfp7u7uAMANppcvX97f3zur3X6/v7q68aFwQy3nHLqY5uRAaxf9TrxFFOZ5xo5zzk4slWtTJpHsc/Tq1au7uzu1fHV15dFpoqV0uMpq37C2VktvVCR1ny76tV5qaSSwzjG3JQ2rRCGsRDrXXpx1akotjcuWzWaTUoFfmaGDLu/v77zx+TAMV1dXHtVwPzjVMMwKXGKq1uDfD4yM9Ut9idpbdBstCFartRvr6zx+wcfXWUcF3vjdX/L4wvPNi30fts95dC9XohBEEsBDCwUuJwzAauYF3dpcrx7Xrh6a9g/KpbBoajBz8ypSHHW8O9//6Z/9u//lf/6/cQwp5U2/ned5CDHNmTGslGtBTr2plvdXyHI/HDgIq78UqiFhoAjg2tetFzNgDDlcb5/t3jf63d//2l/+xZ/FEOQ8dmSsAoro5Qo+WAwAQGoBGEQBbLuJv//7//xP/uT/41WwP/35L66vb5ljH+M0nRHRTEEW9GOSjGjIaKLMmPMcQ9j03fMnT37wgx9RYAJKBB5bzmlmiyGQSU7zGREddYXG4KGHWbuuMxFDQVAECrxOEHiSv026geNioRQIVd3pFPfEzAYaA7P3ObCslojJFFyZNUcE1IBVxOUvEIEH4QAgpYnRIcMoaYYqRNRDYWYixohIIipIYCCoYllFsjdYXXvGeZo1z2ZGyKZzMGMAjKUzgWnuuxB4A2AYXGPFGL0eMQIAB8o5D0NsJguA2wrEyCIZlVQVRJzwwlQ75pzzN/79n3kb+dPhHgCg71JpzdYBIRq9ePkq50JfoGpyHLdbWofmAPRwPnchTnNaGyieywSAm5sbM3v69K3tdjyeTqfT6ec//4SZQ9+dT5OLyCdPnnQxdj0TURc3zBwJAAAVttvNNBEiAhMipnH0unxmnl8fvXFhqCSRYDTlhIgDhfHuMKeUs2aV+1f3m81mPJc6H69qExAASCnFGEKISTIRgREzj+dZMnpwyHeWqNxe32y323Ec53nuiUQTVeKIopBMN5vNq/vD7c3NsNn1TH3g1/eHly9fTnNK3ge2fiVDRhNCBARDZKx0wZojcxc6rtwUZqYCquqGAq6alLjdFkI4Hg9m6qQuwzCcTuPV1Q1gTmnGAAr26tUrojBn+eyzlwAQu36a0/3h7Ljx8/m83+/3cdhut9M0ff7ipZm99dbb0ywpiVq4v3/16tWrzXbQjNvNnjAwRVSBqgnQ29EAEaEVyjBrm8KXunmtAC6BZQBXk2tMFqkCuqDEhepLa2itaeK1JvD/YkAPxXggkgKHGIhos9t7TbNfRDRxoCzz4XQEwhjD7dMbBRFJw9A5SapqZvZmxqvGaIiqbhxQlTxLUvwN8hnRkwhuiyMwU0wpmSIRcXC9jmaAuLBrQfEDG0z4QVWulXVS3Qw/fOj8OS/iyev2BHihXNY6vpn77buICCBiuSgxK8qECL2k2C7j6uQUsg8uDav4e5vLZbwuVaE9wnBdvt6F3eFqGAB+/vOf/+W3vvn7//xrHQcvih3Hc99t1kEMA4U3qN7/rAcippyJ4ebm5g//9b/63ne/Q6izr13XEi3bAQaqYorWWNDIzLbb7QcffPDJJ584Nf/d3d17771zOp2GoWMsuEcPJNZVW0qMtBL6OJq/7/spzYFjMGDGwI1RhwKBWVCA1lupFCkW9COqLi0lHk/W+kOA0jMclu2xbNry0zePgoICgOaLokMAAFD3mB+EvgEAzYzddzEjXUfb2i0YqiVRA+BZMhhhxlzbP/hpIRbmprNoCIXwq+uGFntsstiqw+TjSUQGCmIhBCdPtwrOXINFRZOZ5VR7TgAgYuBwe3vryrIVpzFz3/fO1hQCevGuiHmfA4/m+ZyGSsi8HTZl8KmMsru2CCQihoUw6Pp6L2LTNOVSJ2Ov7NU4jgRKoJELfWmLT4iI12tRKbgsLaScfzTnjFaQjg7Pzaqq4NhXVUs5E1HhFEtpmqa+2/gsxNAzs4DzMzjLKYcuuuqNMd7e3qa0QHPNjAD3+31rtmOwtLTC2jSMwUTEOb+6fvDWFEMX/bQIaqFLlQ6QV8yaD8R3qq0yVgp4LRaXpewhd3fftRbpAsDNzY17cuM4dnFoAZXPPvvs1avXTg8yTZNIoZJIKe33ewD4+c9/Po7TZrt1hlGnNnv9+jWAulvstBjMzAE1KQAAk1c3FRLaVTB2vReaEqkU1G+gPAQAs+VbRAyXekJXdDEPthsiGjV51QAt2KAS/mfLWzszvFvbIQRGSmZOleMFhFA7Hvp9Qwh6WYfTXu2BslgfLdzlCtyr5taFYWa6UpwX2KP1XVa3+LKEehvP5eugHuuGqra/+Mrw+M/Hx+P3xerxBqlq+uIfIpTedghLOBsAHKbrPMbrpC/ACg7elHd7ZzcxMkAXIgCkcfrGN77x/Nnbv/3V30xJ+ti1FJFezJD9chHmX80J/mXGqP0+DJucE1L8yle+8o/+yT/+xp/92bbvz8cTxaBYeFFLDbN5N6PWWgQc/fHVr370ySefOJrm5z//+XY79DEi4vF82vRDLkQBrb6tMEy52WjOgBDCzc3NZy8+BwDmoicCEjuNPiMiJvXWvW4kKSNqMc2syZ32auuF2Kap/tlKm0oFhUu0lGeoYauqXNVo6YeBaGhoZs7CcUF22BYVIkBhkHCzuA1+OwgJYKHIKdYlGSFHQwEnsClMmf4iWTJRaFuomXGl8mgluQputnKKDcMA2rzSwtJX5EjVZ2Xc0DlMIIbQ971U9sSCr6lC30k8CEsvBBE7jUcf6kAsBrlC8K6u920KtBY4tr1jamkWMaUy26hqXR+HYfvs6W3bUzlnAuFCfex4JmW4QN8gIhB69YiCdTG0AfFJIQEFCxBVlVMwBGelKKdB3G63HlImAo8Yz7P3hCCn4M5J3UZEZHc3AUAL90UHbulHUr1ADxUQHCEiehJRK7FDjPFwOt/dH8ZxNERG0Lpi2+Z6IEl9vfmqKG/tfgwta6Au7MVlkUoKUVpIIY7neRrTZhdTSiL28uVnr17feXbMfUQz8xZewzCM4/jp61fMvNlu53l+/foeObjbbWaRKcaIBLvdhpmdu630DK2v0BKZLfy7qMbytGjmab92kJtPqz8Xnb1qC3Shg5uWWt+l7egKs2Ko3FgODmiTxZVF3L3/7XZwoR1C6PvekXruszqzx8OH+KUPrHa8VMauNYRt9daKeNHK71INE9JaU5ZeT+u71KV44ZAsgw+KRbK9QWGvtduDTx4sywefP9CJABfx2Dcc63laa1YzXevgdo/1L+2WRIv3nVUIsN9ujvenb37zm8+ePXt681SzhBBSypFDU7j2q8Ghfw0d/CXnF/vDJZCqEuJ2s/9v/ui//d5//O48jb7NENGbNHjRrNuSXu3TXFgyePLkydOnT+/u7kIIp+Pp1atX7zx/LlIae1kJjLRdsehLIhIo6+/29vZwOp5PY+BYEW4AoGRFX2rO7iAys/vjiJ7BXUg/pDYz4dpFuO2RqrlNxar0XiWTGFsEqfkZVZELFBWLTbVj5fxqsqbdCBEBtAC2Vx4qVQwaEcWqONfGe3kmD7Mhdl2XZTazLsTS6KI0j/K2P0ugqckXD4sBljjkgylvtyvuYy0gWZSiGQB40iuE0HVdzjnLjMCeIJznmTkiE6J3iQzOzAAACDYxt8ZTADDNY60IX14/5yyp+O4iklVcPBIRguV5OswTAsUY+9iFEJiIeBVjFBURrqlT7njwbBwVglhYCFMVdGEz7WXwgVKwnKrgc9Ej0A8REafpXDxsU0S8uroKIYiomAbuAGCeZ0R26EDzg/2Vs8yH47LwoCpgZk45AQDzjCH6xrnabU7jfHV1ZQ3+CsSsYutt8gZJ9+AT73XSFFs7p3lXa2JRM/OQ+OvXd33fp5Ry1tPp9PLFK7dIpmnKeVFjPuDH41EVVCUlOZ5P+z1ciRyPxxcvXtze3h7ujlfX+2fPnt1e33DAYfAapAtGqmYQrBVwexEFMwEiAvOI4HJUzd3M37LvZMWSu74RrPKG69GAWnpERIgEhK5TqaZpm4ngnKDeHejZk6dutVxfX4vIOI5cCZ9jLI28isqEN2imL9HLflrDcqtoiQy9AbH1sKEFPNJEl4cPyNKqdf1I60VVhFVpAP+Fz4kP3ZiHpNPrK6+fp51mZuucK5T7uqeL2OYIV9GJdovaRwEfXPqxMtZakwCeXzRAxO12+/0f/uDtb7/9X//X/437TSEEUWVaGMldIQHSL62D4ZdQw7+knl4MnNBFwkAMH3z0lX/5L//w3/4f/3sMXUopeN2yYanDA9Pa0oSIUC3GmGYhoo8++o3vfOevzazrwosXL95+6y0iYo5mYoZ6Qa5WynXAUzhMvr13u90wDMfDqWxaQjfrHMZUgC1qTET1yQMR1XwnrqjVfVK5srO2EGhRMLXfcJHptcTwcqmtVqotA4Urnltyap/VUePtFz53E4jtjlS7u7SgSL11gUG6+pzT6HeJMfZ9iSUGZisrtfQAWOtUJOj6+OCmVmOzUGNfZpZSovpIrrbNzFdOSjpNU9d1iJZzRgBmy6K20BlqDCHnPKZZREULYrmBuczMeRsCMVe6hhCCdxfW7Ao4oLezIHTuMGRylisCRMQYOASOMYqVxCEBWmPWBfCqemamwIgopu4F+BqQVRNTVTUszpmZcaAQAjL7SokU3cza7XY55ynPHux1trXT6ZwkSy6lokTB9XpZYwEBwOexGTFUGxKU9ROC62kwmKbpPM5Q6oty3/eANM/znDXnjKrgnrg3CYWSwGtLca3Pyk/F0HdNhZQZfxT7cXiRE0qcz9N2ux+n6Xg83r2+R8QQw+Fwwgq59xudTicz2+/3U5pTSiF0TgT94sWrcTwx8ziOn3/++f5q1/de/MZEvviXPVjkUdW+RKSwKOP6Up7vxKLJ0InyqXbiWtKCJWO0ymW+Uc/5dtZ6QAsyITrco5UsUkPAIbpb7/Gh/X7viG7PjjXmcP/p1O5Y+W4BL9ThpWx/w7E2QVQ15eT8Zf7h4/dqlv2j63yZnMcLT5IfPM9jlflFjwoALvyXb32BwgZzV2t1IwQzuyDigEerE1Zm4wOTAYqWreOrZnjhXPsvhEFEOGCzzohonnPXERH9h+98+/33f+O3v/qb8zyb6RrDvbbUAL+EG+vB8WuGo79ouHPOIfA8nUF0M+z++e997Sc/+cn3v/fdSAyIompWextIsixgZQTIhCioTZLk+fPnP/nJTz7/9LPNZnN3d3c8HjebTZNKWkqelniUiHjnhhoGAWmOrChERETiorEcYWCEAEZUGzNYWZpcyoUXDhrfVNM8MnOwErltM8tUhp0882BgqnYRnFkOgwJwaDNlDTdQxUz9XOp42hJeRg10Adp02S2AoMUEbnFmrEa9v3uHndeJam6hYiFEWnVtohVKAglUluaJrgBEJHJApIbaaMIx9j3VgD/UUDMiBrKh6wEAwRrTnmd/nU36cDjsr68dWbrZbJLk8/kcQuhCseJT6YVeIntY21B6mtAkNQRpUZAVKFfScsBW7RJGQw4pJWf+ai8utY6oxTytrm7HWykY1TXvA9L3G6ieR92AgIhe2OoPM2x6TAWA6hfvum6g4XyeqGSdDWEBb/rzt2yiR3G9YIlqT3WyQuwAHA6Hw939cRgGhQKJJ0yIGBkZec7ranvvvSoGBAB6WQlZBlARAPKqofoDbd3ElKput9uc8+eff06lznW+vzvc398TkRi0YqRlOVU1nMTEEFSJWQGmacqljeYcY3zy5MkwDMTQdUEqj3Td4NCuVuMTS5S4TYQTaPtyXkv39vCLkPQLropzHiin9Vds5XZXy5gM0QGM6+HS2pbDW/8S0e3trX9+dXXl67lxDLT2D+tHWh8P1M3jo0kqWAzo5TUfq9vHKrk8+aMr13MWU6DeSB3PtRZiX/KE7UnhclH9qoffKDz4qF1OL1DQS4Duwfub2fpl2xXWJo+HUFQAGtNKoJTSsN0cj8dvfvObb7311m63y9NMtZ3ZWgd/8YR9ERXlP0BKuB0hRAQgjkBAQE/fevt3//nvf/vb38aenda2pLIYQUBVA5fla62jCEsXh9/66m++/PyFiPR9/+Of/GQYBm9ow8wP4kiuxVTFd8kivMTcjQvEMcbgHcIBkYwoAFrKJW3v1FhNvNoq+IyXeL+mcpYxr/mCpor8/BazXVtIAOCp4vXz55xNBOqeJHLASLUkVt1Vm4I0W9wm/5aBEmIr7ViuD6pmqjXAVYYZiWgzDDHGqUJem+fqOkBN1u/VpKEDjgBANTdJ5BYVY6mgaEPEzB5ctpIjcB3DHIMH6Mxsmqa95kBAgGgSCE2ygmEMANa68Pb9pvn3WtFqqhqQHshorgVXiKgpI5l7O+7HIkSvWacV0KYBxJr2xTqA57k0mVi3uAYA73zVEhOqCtb8WjJQ0QxYRiPnvNlscs7unM1z9jWB5qX8ZSV4+H0ZT2bXtQ7YcXxyx+SkxKEfEHG76Yk9LB/S+ZxzBqucR5pNtRu2DS3c3pcMs0pbnD4CORUTYX3mA1nZArYhhM8//3yappub28PhmEXO57MZns8ThRxq57sWQ3ItdT6fQ7d14zCESATnaQJ1qKm9/8F719f7foie1c6SEFGlmJJlf6GbuugW83ozFtxZdUrMDL6A6uHyvcgMRUxX2eD2uk3CrHd3TcaTIYVVd3CtIGo3m8ZxZKT91VXf907n4nXhVpuC+0Jtir9okEfC+5dUV1K7ATKzWgYAp7VvV1iG5QuOx4oDEWtA98EY+qJdwKrlBAOgB7fQB9+1B4bOo+dZi8fHzxa+fDgeWBz6pgbRy11sgWK1e/hUlVXbORdr6rouYDifz7vN5m9+8L2PPvroX/6Lf0kxCAhfqlVEBDB8qGv/ThboX1kHv4kvGgBAVLDkPrvpdHz+/O3f+72vfesvv/k33/9uxCL11ATEnEZx3bUGJHdd7+Lj/fff/8lPfvLjH//Ye+mcTif3n7JqJDJb7D4v6nIxDUXBc9NbiMjMHQdmzzoQOY6RChLDkzleHWSqUt2sFglk5tiFZk6tzdXmYq6X+Prw12pmlpk1LtZFfLsFUR3QqoCLjFgnX0t2VkoMFqryaCaCe/j+/K5TwQgRY8ceBTUzj+TnnLUy7+AKKdZekythSDsHADabjXdwyjm7n79EZc0yqNd0uI9YHgnQcSgeS0gpEUmP2IVIRPv9vvFM+QmG6MSHfpfG6JRSCkTBO6WbBSJGlDxj39fn0RgjMIHbNJ41iCES5xVxYK7PRrWsyN89pUSBS95Hq/seQqQyMj6kaBCIgex6f+Wvad4QhcArpbWix0MIqTb8JqJxHJkLoYe7j81DgtXeNzOPSfozO5WNr20OoUO0NPd9v7u+aasuiwGAGA7DsNkVHtDz+Xw8nc3McLGr2tI1M6Koqv6m5RMGooiIwQPXK7DFep1jDUucz2fvqQcAajbP2RtL+GDCyl5kZq+c3m63p1lCDDlnGcdC9Qx6Op1ub69/4zd+g5kDsVeuU6FwqluJ0GzZd5d7amm7S7iA5tblMet3L7u15KSWWqa2Z1cyvJhubYMvA0KIsGycNrweSc45S8oYwvX1dc6ZGZ3DJNcGcU1ViyTPxUxpZmcXXkmYN2qjB4ef46aq04aX4D9pIX0qrGGERBdtbNuButb8b7zXY+Vdo3SGWOwhQFBQrCvnwdfXT7v8+Xc30r3Qqgt2dD00b7QdWoSwrdoHchlrAhnW0998xNo/1S9CaETkdYr//ht//vbbb7/33ntmpqIxRMdeGZiqepOGL+2S9J/w8LuDISBhiAD25Nlb/+P/9D+//n+8fPXqlUmOzrwYwpSyf6UJO99ELogR7YMPPvjZz37GzH3ff/r5y+3++mq70Txl1cjFd6RaTxJCyFbJF0PIKTvJbRteM2diJAATEarJeN+O6OlpAKvdQF2SlvFX9GUtlx3roArrokGh5rNX66E5hdUaW+ri2+u3n75YVC9aHzYYjsvxd99+x3uLNoOg3U5ECAp4xxTFjMgQcZ6yyGRu0RMzF24QUYWK/9IKNzOHrXaF6cXhnbkSSA1d72om59x1wRtN+rdi12nKrb7FZTFSeZFmnrtg9Qyiqg5dn1Ly5owxRozFiW9gzj5GF99uKYVAIQxE5HA5H0XmCGRJFNW57EFECU3VQs9ew5uzMkcF8VZIzW9uIUSFUksaKnFpm0oiAjWVMlzezkGz5DkZApfOr2cz87ygquZqfPvy6LouZ6l5wdLDrsUGmsZ1DshxHLuuG8ex7/vtbldkqxkzd4Rjym64nE6nLEYpHY/HKRVyxKwFDYBgm82gBgQaqBPrfY05CMjdF5/KMjUU1w6l+w1YWVPac7pCffXqFZXmhvTkyRPXvj6YYuqQfdXF46xDNJNXqa5yw5s+TtP8ztvPY4zX11cxkKoSF6iH5EaSXK7jYAm3ydqKtdY3V2eHiHspTnvslhfwETBFD9UWfvVqAVOF97eNvP5WEwhZJc+p64b9fu/r3P+3WWDH+wMROfa76zrV3Pe9QxlcQ1drmyhEMdWc6DK39WBrr/XWA93mjTo8g8OVHxRWSq6tYURE0qaCynTjYlis7/vAhXigyFrElxoNRDFryu10ZSqZmXmRG1SaCrW2MB5E0f1bvkOx8oW1kNXfgYJuL9xGCr2m6BHYqiUEDd/8XUCFVeu9pLmL0Snaz+fjn/351/+Xt//vABA5aC33LC0HTAnXbvcvqYl/HScYHpkw7U9VAyMI2PWb26dPP/zoq5999vW+j6qiSV1GxNi1iS+/eA8RAxF5+5233n333Z/85CchdC9fvnznnXe8VaqZZSs5OVtFh4hJcgmseYNYx0z55kQgTyu69+zIFCICMy1wAMw558qbShXi5OtAsppemFC2An9BBWUAlEJZXpV/rIPYzWXUFRVRoCWoDuDtr1voplQueRLUzM7TmOfUjID1ICAiEoiIZFOdFaHJFFRDMkRU1qBMJHC5rx6Emk8nC5GdIsoHyv3103gOIWhuIKnCiEtE42ix44DUmtWnlIAx1O4uWL1zEWkdghHRG8ECaM5zCNy2IpoRADNvNpuc5/Xgt7eWmmJs6WEvVDueT+M8oYFbKl497y5aMReiLwbzQiAHZFBF2rskRcQuRI8AFw9mTm4suucHQODbT8vc3Z9OTivmr68Vm2OVkS2lFCO5unLN19a/y27/0KkfRVWPRyLq+96jAknNGypP0xRjnNPskNpNKKHjyKGFHMwslla+viU1ibk1U8qfKsjLrDLNXEp8MnAW62WD1w3rCzjGbhzHEKNfk5lz3YN+ZsmQ1Au2UEeeJ2Puh2hqT5/ebDabrgshUmAKIXjBoq7KgQjQCqtX/eSSLqOoQA4V9IdlfxG2ZEoxc3WxbgEQgBB19cnymlgISbBeXtzG8gXvZc1QgRdcuSTH09mVsfeUVFXHK0jFZ7VUDgVe2+sPNOvjT954wpL/vtSaD77VYuyPL2WXXunlXLfo2pL/WjRxA0hdXrZdvI3nsm1B/ZpEREi1n99ynSYn20pbG0ZfqIDXT3+hgAHAldujCidHbcDl+i4P+qi5YQ0DKnAgwB/+8Iff+Ms//9rXviaCAOJ9NA2MkAzs1+3N8Oskgx/3TSIkq6BYU6AQr29v//E/+6ff/e5fz9M5pbQbNvf390M3MDfExMXTBjAjUtWPPvroRz/60WazM7Of/vSnV1fXPmimgLSQV7iMozZzCIfDwbUgMYYQmJhKsYcQQdd1nmFyIIeZgaF3RF0bd3CRZi5lcKYOtSpt11xSPD5ZKwMcAISwBE6av9u8DSJSAM3Z19lax9QoHABA28Dn8xkNvKqn+cfNwCeDUovllsUqaU1GgQwRBdEE61x7N+x1cJJc/XSxJ2RakVe4JvBgAJ1L94K2/4kIk2c/ikQLIWyGjYfdPJrqnYWY2RMrTklotvBROKswVUy4WwCIGAKtJUjbnFZVb4lPxFJX2nUdc8m6EZGXxKiCi+ZVvME8WBOrxmVmMW3+d7Pk8pzWVbNYCp2xiXUFawFYDxucTicirozQc4PLSm2p4jl7Bxh61w1V3Ww2/tPNQV1R6jNzrK3gp2nq+s2w2fkJChWOTihEre7L5ZunjUVEKlTe064istS9VI+krecHOng98m2Rm4Gq7q+u3CO3Wge/3kRmbbF5hGlUVSbigGRwOJ6ePX1vt9v1fR+Im1Rs56/3F3lvswciqK4KEQHL53MR2T4dMUaveijq0xbv0Jrx3UYACqtdEyxQS9F8qfsC8w/LRlgVILkmPh6PABBj9DCPT67ViJ1HlTx0hJfady0GH8zCWne0t15EH5Gvn7YmHzjB7Wh69LEyXo/z4xGGSy2LWLzYx+c0sXZxwaURwOI5MLOAYGHT8DMXPgMXXMTY4ClgFmh1v7XF0R7NqS2xPWgdCAanqsLLL5rb3u2aBFXQ48VAMLNKoWtwuM3Xv/71p0+f/vZv/CYimglhMDAPPv86ILNy/PqArLppSp1NeWZVVem7zVd/87f/q//6X/9v/6//5367TSp9N0ANZbinCQ/AAvM8me2vtk+fPhWxGOP5PH722WfPnt36ZAiUrkzgeRdbaM2nlI7HY6w+Qc45RALAnBRMuu6iIF0BTMyDKLD4oBdczeUFHxmPZubfQ0QEUikMAL4TvGKpiST/ihY/g70qtf0PhcBv6sfSbkoURCZXXa3MwGqErT2PAznWmWM/jevhRcNQY0dS6vfbQl5m3/UNG/uSq2dWFGhgFPTiLuZitbiTx3UiiGickpVE9UREREGzHu+Pt7e3xGAIWYVg4a1t7OxN61jlnmxSo9k3ROQlxFCFgrtiTtarWm7ti5IDcwgiydRUFSrIkS0Q0TRNLXjerJZAXDAnWYAshOCE+9OUCoWIo9BLH1wbxxEiT9PUdX2MMcauScmu61CNmWG7EykhhzHNmnMW9dVLtdE9EXkPiZQzM4fQufp0XS61q4RP6zzPnuhthVtee+3RSA6xsYnVERMXRDkrAKjkquSW5ddW17ILaqCyoSv8mjlLSskAnMH7/v6+3wxeO7cW8R7FMbMQmGgIxNN8TilNKcdI77//ftcFKl6viUjzrVp8qz1PW70E2NQnVepA4AAAKUkIS4rKc3Nl6V4Kx9pQpu0gAJSmg9dLrokm3zWNOWe9UN0wcsU/DEMIwWnbXTe3xEe7oxUoUPnkgQ6GS427/uSBnmty5MHT1k8uUFHrkay3M1vBUwCWW+AqlXnxLTVoWeEyiFUMrgxlddoDKiq2bQe4kGyLTdB+b/q7AF/Mn5AWaCu86Vi988NRgBofX6/LMg1grvkREWtu0v05jx8jotX0gxtiXur+53/+Z29dPX329JmoKGTCICpAsBq1XyMT/KA+uE3el7vUTcHMTKX8g4kQsxnGoX/+/Pkf/MEf/M13//rjH/7I84giogJqSkRQuFmLIHCy3M6Qmd97772//uvvurz77LPP9vuNl9Mse3WVS/AhOp/Pac59N6hTwKc5Eg999ECjm8bmBpUZIlm11hERiNfmNxAZmpk1vOta+6JRSgkJAVHB0MAKlzoBADLBKg1WRACYqnpPoTKz5s8f1HHgdRVqLXZcC6DmW5RXqCKAaymOLSIpIKJ5fGVlqyYxVLXKDdSqIDhwCEG10D537glR1QpgABhil3NqMrGBsxAxBPL6CjMZz6Vj+TzPpsULCSEcj8fD4RBCqKwRJWAgVrC1QNiwWsUfrcoYVpGotc/NHNKqubKpAcJm08cYS3Ew8zRN7lACgLfcaJrG3f0Yo7cwajqm+anQrDGxptgASvt6Mc2pEHYiEwDM4+i+LwB4xrfv+92wOR9K211EdFYZrPlFM9sNmzHNbWavrq62260BNd5HrlU97777rqre3d3FGPu+V1UFuru78wdzGyJ0vY9Y65DhHNp13LLjZgooXavFaQVO0RbShRlK5bR1wNNKZLaAG2NkdwoBgDmuT6tXo9jTdrPp+/5wMJBOTZ4/e+v6ek/Q7CoQEc8cwZs0kB+lkkfVdz0jgVrOmbD0SXQSrjWexsxM0eOfzXD3Jii2CmgjItBD4mJbFSX6OvR0mO+ONpUxxrtXr11ceJ7eZ9OyTGluU1mK8d4UpcTG51NTqlUWwVpvQjnFxJa90FSyLoQ8Cit6RLskyVkP71pzLQ/zBS4yNPVMi1Zun2PxdwEAfHVdvGC9xYOxffBUTVHSyjdbFDA80sHrP7/ordZO8PpzMgCsvApVd6L7x1UfO0Wcb40YgsNYPvnkk69//et/9Ed/dHtzKypWu87BP8Dx2BW2L9bBi5pnYnAn3tyhD0CC3Kvm3f76j/74j//mu9/rQkwqIXRrdiFbwQEAHV8zK+AHH3zws5/94nQ6A4B3lysFoCDWrJNKagNAqnA4HJqmDCHEELbb7Xa7BckqCaD0CnRa2TYR5AIpdrYGa9TEVQ1hFK+9hV7dciDCNd0dIkqt0mnWp8fBgNhDlq4adUUap1WaBKw1hZbd+XMLer1GG8CnqQoXR+1qhvkiyq0aiL1Ol4i4RoRcVWBt2YSlMgpSksYBUlCdHjiNYQHRkFFtN9vCRDmrGELSk4yqioE77kRkmiYAvLm5LdYJYheHrvf+xyNUfJZCibq75iCiLoQKLXlTZkilSBm3CpAJycyc3dB7PIzjuNlsXNe+fv2aaryuWjUjr3DjIgqAMfYevnGaiALnyQIAkkE1RdMQAiHHnmPf+Zv2fX8TgpnlnGPsobb8i8TTNPV97zHtaToTATMxd9OU3Kve0nCaxtbWt4uDd0zy+Z2myWfTex5vt9t5nj/99FPy3JPbMZW9BHkJCfjcTdN0Po8NsqBibTnV8L+qLO351odfiowUS3l32yBQMyMG6oP85MnN/fHU3MEHkhAATDWlaRi650+fbbaDpHx1tZOU+50/sxTCHC0prfWM14tc+DZtMTAzY+nP4duBKv4jhKACXqwIq7wpMxtgizpQhX3QygiuKYZFGHoGN8aYs7aKXhcL3sONiHa7nacSfLFN09lyqW90y4mIagnMG1xeuzQ71nb/+kwXG01LtTVcJarYJQ4cEVfpHVvd60IftdsVQfIo/uzuL1IJ/q0Vp4K8sZKqTVazyaACS3HlFrejjWqTjf5nWD/KA3uhfdLWx/qhv0gvls/dbri44MWIt4o6IgyRp3H2ufze97632+3+4A/+wLGdwffeG+8E8CtGmP9+4ehlYooO6rruo48++q/+9b/691//s6vdXkUDRcCFOIIrCT4ipjmHEE7H05MnT/7xP/7HX//6nw3DMM/zixcvvFVZ23iImE1B1RQBLKX5fD57Es53b9d1wI4kR7u068HnmBqgrHzSZqFFrlyg+3+pas6zW75dN9hS8w2NmctJ3q3mIPxgZkNq18kqWPEIbUuIiFfzmBmqQdAmC7Bmd0REoLTqaxYMM/k1PefWljiv6mKxUCN5/toAdbPZrDeAXRQ6MzMxs9c2TNMEq5BgE3lYE2YucQAgxmjZ2Sp6DGxm2QxDqMXaJZwwz/M0n0UEsaBkVTV0g+ODuFJRmhlUZDIRtUJqf+zopSyK4zypqs/1NCZVVbCGg3Vyj3mc+r5HRP8JdYc7IEsrJar/l+eeb25ums/nTs/5PN3f34sp59JrEgMfTmeHLquqtzNypPrxeNzv97th4+uQSlK/hOtFxMPLx+Mxxniaxvv7++vr63nKt7e3p/HsVP5uVHla4e7uzvFfx/M0jtNuFzzw4FmJ7XZrFd7cHrsIfZGUS3Q9pblC2Rd4oF6CdJbd0RwsWOz79WmuS7KknAvoV0R8EtvSwpqT4oDM3Pf9zfUVAAhSjHG/33d9aMuvisSSgbbKKrnWCs2FbcvVF+o4idRiZau1wo5r09olF6vvDgBq0BSwv2aMkbA0t7Ba0uZyHQCICQAcrN4ew58qpXR3dzfP883Nze3trQckrq6u/b/amVgBnlaG8Q1e5mPDZT0jTZmZmdrCTtNEIhTte9F3aG2vND210utLpNNWx/or62d4QHKytg/Wq2g9QevHfqwiH6jOte6HlXr+MhT0+g0fjNTyiG8MTTfdi+iQh3a0q3mYK6U0DMM0TV5F4JmGb33rW0+fPv3df/a7iGBgIuLsFv9Jj9b68Es/p0aA0veDd8T84z/+4//wzW+ZWRc7EcM6x+X11wB3oidPnqRZnj17ttlsxnHcbvaff/7pW2+9xcyl79VF+KKwGaSUutj7iHmv9WkapxD3m8FA5nFiRjOzChL2b5e9KotG9L3XftdaLgUARIE5Orqq7X8iB9sZGjiXRd0D5rQVRGR14qByPEmILZYLK3fcd6mkiVZYM6i2NjNrlSaV1KK08VlrZddkjT2DABHLsiYDAyEMHodrd2++bA0Fe+Q2Oh6q76I2sJJIoAW0bJVhIOfs1O7znGDFnuHSzVUv9l3PMcTi97SLpJw9w+IYopZgcz/YNynWoLSqQk5uXQ0YRQu7NQxgiq6QRGToehFx+6Aben+SvOp472PlOsM1mWvQyGGkPM+ZmYeudzpJANpd7Zl5PE/39/cUw253BYix63b7fRdj3/f39/ebzcabJF5fX2vKjhDWUrKcXevv9/uUxJsZA8A1wle+8hXJriNT7EsNdIxxt9t13XB3d8cxvv8b++PxeB37d955py3L0+kkXoyn5rUurrldXLj5SFzKx8/naUoppaQKIlL41VWaun0gfH15F4DBQ0kqIcQ5JwA4n8/jOMZ+aDuo7Q6sztl2PwzDcLXbxhin8+jJgq4PDlDPeW6ryzNSbkp6pxQEEM/8rDRNyxRACQjPPg7rDeULSSuHRsvXiAgaeSSrvbjDozyM1zZC2zUIhohd183z7L2KreYsfNbcWnW7NhLv9/vj8dhablDFczFzVVmLi/l42OGx5rtUeL6GW4TMHwMRfY21gaKK7izFgSsFvJ5Tfx7/nj5KgTXBQoie3H2gets5689phSTFVclGO81WbvF6tdgjK8TMSrcsM2NGAFQtVrZHl9vZRJ5iAQ9TNBnEWKPj9YpF9il4wi3pRcibiDxH6Dk2l9pIJKoiSkYIyh19/c/+dNh0X/3Kb4lITQCvPfpSCmAgBG8kBvmi4wtTyF8UjF5xgGjbAoAxq2z2z7NGDK/++L/97/6//+//tbveqY7IT0SS92Qmg3kuqSyKIavYLF03KHT/4g/+xf/2v/5J1/d9t//ZTz+7vn6CgGrZEKb5TBTASNW6fvOLT17OCbo+UmAF3HRMBn3XMVE2DcgcQx87EVEtBXNJhYg4sCiUeGlKMfbICEDjNCES8RA78jkLCEYllRu5qOc8Z1fPzIwMWdKyGRDNIIvKnGLomNkJOCMHDEsjowaWJlrYnZyZaKErUWeepFZI5zetVdSmurCIq6oR1jBmamnssrgdE0SYRUUSM6MYMfQhNo+hbcWUxhi7YRgCFq6Ymu0rzmvVjsjMWWVKiZBjjFDDfb5xzRQAQ+AQghE6dlfEiFBExnEkRSJCAZmFiWOIIjLlZCaY1WkxkNTLCwr+M3QioiabzSAiYjj0Wx/8eZ6975OrVQCgUNriAmFQB3aByBRj3Gwc2iaqGiP2/T6EkNK0HQp7xqtXd9fX1wo2DNtpmhDKNBGSATIximbM3kfEx3Cz2Xz++edeKeTSP3a1E8M4HcYpBgKgPI0etunicDqdnPauGRAiMs/jOJ4AoAt9ms4myVTPx8yMY8pXV1fb3aCWj8djDP3QxWlKSJTUEy7SDCbJScVUcyDKLviYLKspouPAV/JUsUCjAaCVjrT/rVKZkmQEcusfoFR8EVHfB0l5TjMR9MMwDJsYYwx8vb+62u+maepj2AzdZtPHyCLJUwxSy3b9Ikqg4NmpsmihyvSSBlJEYBFBYAT2Gq1xHIeh8+6xLWzuKz8lcb8/Z5WsybmPCGOMIRIiiGmIxYI9n7NqdgyRVrxV3/eq0HVDzhmQDSzl3MfueDjP89zH7upqN46ngPTk6a2U1gvRhSkzGzkIwLG36ySv+y4Os6+JyAJe8zOb54Zu06eU+n6jTuLNnWhqRiRjuaKrGC+2UTV2u7xdtB5rLQtmuuruUG/qD2GmhR5lWQNrTZnNVJEImwAhLGWx/vj+2iWLjaS5QEC9Vq2GYYiC15Ejotb2jioSqrhcinTXP5vmXwUoqK2AJjEBgJwHnx5SUYJnYnBJVCB6JsOaa2K1zFSds1cVEb/3ve/dXD959vR5Hd4v4aT8L3AgYs7iuavf/u1/9Lcf/+Cv/sO33n3n+TiX4I/mbEyBozfysyyhi2nO4zhutvsnT57c3NyklIj4fD7/4he/eO+9t72GrOs6EQPEEHgcx3EcvT0qFBAKbjbDth/meSa0GKKBS9iYUgVOV842VEu1BXohQeRIpd/ALEJlyRJCmTtqkqKNtoiYaCOZg7rKHYgEtmhcKCqzQDOabdg0lq97uERle+SZuQNIzfRug0zEawvXGxQAwDB068WDiAQYQnCYUfCOAmRuthORK2wAAKAlCSoiUrvXVXAjVhKVxjDVx66PnRY2t/6B7KZ65JxTmqDuGmbe7/cBipWtVqRw3/fdZnCWiRBCVwsRqlQtFB9YioAB0S2GxMwx9q6Gm1+V7hMzl3LECh/FGnqxav3UsMespesljuPoRSZZ5Xx+GWPsu6GWGJG3vzVaEoreLCjG6LwQTpVQao3MiMjpgudpUk03NzfevIEpOj0yGoTa4c79m+1269PqIegQwjzPnhTzWucQwtBva9xCPSXpcXgPgbhpm3P2wfTXjxyEvRODikhceR62cjTbmLffm6ADAKhU6gCopm3885xijM+ePfHx6ft+6DvVXMqyN53nxQHAk0rUAJL1UFGrbRkREYAcgOGzxszZsogAAjEYiItlqdQxzfNpvpcTADAzIU95PjtVmSEihshce66sd9aDQK5bwyX2UJ2ueZ7P57MpDsPg3QkZ0IGH7QF8FhTLZl/3NoYvyxuWkW/7N4TQImrUynNhAayo6nboacFylth72/vrKW4yqingutPFajxg7YMWXVg12jrCjIgMS85+vU6a0Gj3Iig4ofpfa2g02ioZsb5aaJ5uu3oTav5L29JWw25aicdWyrtwyZqVXKnnAwyxOs+opfN5YX9o0m1tqpjZlKbb29s8508//fTFy882m03XDWbORP8PBcj6+x9ECH03zPPpd37nd/72h9//2u//i29+489TSkSdGaBVunzmaVYiImTJ7kGypDwMw0df+fAv/uKbbz9728w+//zzt956Bmi+z1U1J+37jXPNMLtBHYhonscnT24dYCKSj9Pc9SGVms5iqmtNbuWcAUqpa4nOPVpM7rAhIpiilXlsm7PsWys5sBrDsLr51Xvf0qpmoC1BWtE66mIJLuKvzCYCICbJaNj8zmbjq1pD7pgZBXbHep5HXFU3tuqLtrtiV6yQSEXztdwSoouV7Xa7nU5HosKtIeJywULpox5DCMEluJELsnEc3YjhVQEGMyNZZBQpHj9Uzac5ISLHEBBTspzznLJWAgdEyzWEI5JSShgWfgOZK31Sxcd5S6g2QURETMwcOSCWbriI2FdMFtRZafPlYAJVNcMQAjL1sQuhxP0AwBtCJ5VZsqdjXQV6mtnr0X1NEtHr169deQAAEvmYnk4nDyBPY7Ek9vu9iDCSMzZ3XRfjbr/fmlnO6slOVb27u2PmXYxJi9dIVIzLrmNXQhxXbNUObaPAzOdpbGu7/vIQct9KhOFS8lwuXTQtZCQA7mUtopmZh6HzFeh2gOdotEITuq7r+9jUMFZZ5zbE+l4AYIY+LUtIExiglPeUHFCFf6VU/P4WnmVmf4+UkmT1mfLAgyLM8xy7wP3QJPY8p/YYWFMkXeh9SYDHUBx+aPDixQsH3O33+xh7AO02gyl4XbvVSBUGppYZXclHf0MrGu5xkHLBeTmjhCtIZhZNda5aafJcpr72NKuVoYiXEePV1EPTX2vd3GJ4jxXheuU0rYxYQlzazkQArITngEvKEr32Bx1Dtr5405vtFutRCqAFE+B5WzUDNXUkflXAHof0K+U8tdGHVUbEzAzV8MJAcBtiMWpWpVHMLClpteka2Q0Avvz8hVt2f/VXf/XB+x+W9AmAc2ktT/93GVn/QMdj6BYBgBogKHF8+tY75+NxGk+/93tf+/jjH+6udnlKSXWz2Yg4JV4wMyTUlAyw77t5Soj4G7/x/re//W3PssxpvLu7u729JhIz67rONOGKwNlNkHkeQdLhcHjy5ImNJwTGQOfTKcYIBg5sJgoNBkcE7qPPYxK4MNwKbMfMQ+UGgsgI2Pd9O6e8LZFq6TvbPmwn5FRoEBCRKvTdapEDrpIf62s+NqTMU74cQlilUsyhntqkfFPAIktcxBVws+7NnCTLA+Banb9C7uOu6uFwmOd5GAYGECmIhK4LkTmlNJ0dS4VpZX2WZ7alww8RFTpMkPk8e1hYCit94cUlwJTOc55qrp1cP1NN1ioAAThuCwCyQhcjVQpJqb00vAxmHXxy+UWASsIDuQ5zRrQWHwaAAnBfJd1zLkWcCga5BP1UrCp+OFf8FzM7bMov7v2LoKYbAaBlZ1U1huCwvRjj3d3ddrt98uSJU90Rkbu5Q9efp9Hv/vr1a6dX9KXYIqLTNM3znM2du8LtJWIhBHcrXWU1SaoX23PBEEEN9y1SGC/E4gPpXISyXkh2M3EPBRE1J68+KLCMPo7TmcD6vu/7ngN2XbfZbLz/YPHnGImREkJlxm5SWGRxywiDu8WODGsiu+u6lGd3oz0uEqOHQjVn11spxsgUcm1Q4SOZDQA0JwkhhEApKxNMU3Kd2gw4ABiGAZFzTi0G6fzkx+NRRPq+9xK7nNPt7W2a5qac3JXU6qohYmsAAxdqhpa9s46YEgCo72nXiz44OUtTfkRAxESlWBwR8bIg2y5Z+R7IE1vBWluMqn1l/ZCPP6kPQIUIsKV1yWXmAvHDSxhBe45F0xuaOkEKrkxiAgMEDHmafbc3X96NrPMK9YeXcPByg1Tgvt6Ivj1BWwdVcuX10De8gHeZqLcoSewYI2UdxzFwh0jf/va33n///T/8g3/lpBSG+IASi34deqxf43jEJuF5MlMiknl8//337158/tXf+u2f/exnp/s750p1JI4qeK2XqobQsRkAeLpx6Prf/s2v/uiHP1VVA/3FL37BjCESAJiiC9/D4QAAXdcpWN/3IdC2j8MwqObdbici4+no0TARYWFEBFrCKS4IAne8ibPknLOqeb4zpYQAxIjIBOrFElhhKbACZ1aL8sLzaDCrwDW/VAEdzQLFlX2K1ak1g6Z+2o1gFRZbKTyPKHa+2/3V2sYYhq5qFkUDqk2KqFrbXgULoB2XTqWwgm802dcIKIigK2AIjzeUMx1k0Bb2nJauO8RgVVy6uqrDpQRls3DAcRKRtJiqhO5PL4OzAm2JAtVw7mazce8BER3DDOTZHL87M7MbRmXEjIhIKkb6wvCtMuL+/uhxYwBgQqLQ9/00TeN58qgyAAEXUiQ3U5jZw85E5M3qPZGPiLe3tymlw+Gw3++9NLnBtYZhiKHnSqPtCYvNZuOgYgDwkfemOl7uYmbuOrv/Z2YcgkseT3MSkZkAlGbP4u0PVQOncZ6qBU8h5pxJUhIrOeAqx5YtvNa+fvgKlCyNa6iuUvGtdJbS+bKNOagjqAwANpvN1dXVdjtwxXj7aS3w6BRs8yQi2jK+kl1JXEB7oBa0dF03zcXf9WC7xwNC6FRnH/nNsHWEVFPwOeeApJpPp9N8tWfezPMMJtM0iZiDn10UhxBibUBnZqol9PL69WuPUF5fX7uscBaO0+nkVqYDIZ1QrL3jAxLiB8P7UITWHgGImGU2kBBdogLXNFObhRACmgurqhdrSdX6gg9+byc/0MFNLmF1D2yF1Gtyr1yq9szwZC87L6MBWOG7Kjtr8ZCpou0IDB40kVypvHKENM/YWEYRsVr38zSFyuGeayeTrguA3J7Yt0TAwIBeUwj1gQixVHRQmwwfAtfTpSyyjUWoFK8B8Pnz56o69Ntpmr/97W9/9BsfvvXW2+YNb///JQQNAEAYADQTI+Lt02eb3f7Dr3z1P/zlN50uYJ7nEDrf6S4XzKzvu5xnIADUOY0ffeU3Xr44HA6Hfohe5vHs2TPPbJmhk90TAaKhahf43fff22667bABgB//7Y/Oh+Pt9U3O+XQ4cKXcI1uyRERkirvdtuu614f74/GYczIzKaUIzgXf3AUDXaa1LZSiIKX0V2+ruSljWKT8YlHyZR/iNmhmoIhrlWAubgBgtWfq7kXPrhXdD8u28ccu58N6vy3OjZmZiVQwuW8/AHDOSK1N1uqjOVVy8Ztd8QRigMXpBABf/1AotxhqI+EGU3J5NGeJbIiYZyni2BE0NU++HmFRJ3tTESHuETnneZoScwwBkClwYR41MQD0ymPXQF3vSX3MOQfmfXetDqGb5yZ6eFVjfXV15bZdCEGhZIKYws3NsNlsfD79IQFgu9kPXbi7u3Pupc3V/vr6Wmpl6qeffmpm0zQR0dXVVc656zovGL2+vj4cDvf39+7d5pw9heyi3BVtF2IIYbPb3t/fn04nRPRco49tCOF8Pmf1ioN+t+vmeXakLhXIvc6zAAAydX0cur7yWGhbD2xFKxTJjRceQvu8LWCrxVGpkG+UcAvWRrlmRozOyMhIzukdIoVIm83Gi7N9fXJYAj9tCkSEuZShq4AYJCkhrtJCQ8vTNtiEAxLLoppnEdntdsXaE1MxJyFPqYQEVABUsGNEnqbpdDwTkTiCspjX2LKhrVcjEamAmfZ9L6UVoyHi1dWVP97V1Y0jDwIHVxbep6uJC4ASnbRHgVZYI3hQPZNoZmqZiaF6faH0Hr2gUsDSEo3xkYpdH48/fyB22n+F2qkFqjmuq64ttnI54E36Umu9Ej1Kuj1+9/b5+nkeXDy88847UIGFVIPDtuIk852LFZsqK2YZNzkXp3YRjtTAMoG79f2aaUOtKwAs4Uoz895hrjlubm4Oh+Nf/uVf/tEf/dFud1Xz3G8oFvpPeTwg0iofGlhOc4xMGADy9dXNe+9+EEL4249/lPLcd0OuXS9U3aaDWigSUppyzmYyDN2TJ09ev36dMyLi3d3dfr/fbrfMXseZWhwyxtgP3dV+t9ttNkOXc/4n/+Sf/OTjH//4xz/ebba6IqxoOgyMCNndaEQ8nE+IGCNT6fpSx9DbknpZgi4QqmZmQktlJREt8sJFACKaLq2jaRWDWkdK/ZMq8gAuV6Efs69As0jF0rXqxLddIVrYBorardaAZqlZUi2R2BX9UyQ2s76PzSl0b9Il2jyN7vlFDsPQ2Xbr47MdBlVIpojmtUkikrNiLGBMAHMaBK9QOtyfREvNhm8NCtHMArXN6agoAyPV0pKvDYsCoKEZSs5iCoSb3bYfBn+LeUrNZeJahVUsY2r1JzCjOfQMRL3MKYQAXGSZfx2Jzufz4XB48uSZIbjR4GV+KaXTaWTmOPRERBhijKAFLaWqAUlTBrMEqdlYvpGdsPD6+trFqC8J10nn8/l8PjthiMt05xJxs/7u7s7j/95T+XA4cCXA8s0yzrOqdt3gytUnv4oLETFQtFKKXdi8m/WJKxuxLcIvks7+S8u4EzXwREltbLaD5329Ia5JJoJh0/knfd97fxFmDpGbpl+rYQAIIeSkCVUhj+N4PJY+B10nXdeVWh5kZDPRpoNdLzqcDQx1U4jSvLfp3d1BKtlLIFIIzbxLkg+HQ86ZGUPtCYFI4JynXclrmKLWqq1pmnwEbm9vh2EwNTfyTqdZxBDFCDEsU2+r6qw3at+HB5ZscZMwrlyq3hX3xhdR66fVlMpavTFz5cYqwsSH2U+s57sgcvfNvNRqvTCaVmrGxFrotXOapIIvqKWp/7/wCqzlWzvnwc9wtdtjJR5rUs/3uVdMepioadl1ctsfUay4yE37NhMGVr5Uk8hUISouyotJkkp5+GzihH0GOs9z38Wf//ynP/7xj//pP/1dQEVjKx0HyuW/bKb/IY56s4eZYDQIsRdJzAEAN/urp289B6APPnjvP/7H73l4bQnsi7qMOJ/Pbj6D5I6DIjx//vyTT38uIn3fnU6n4/GIyJvNBgDP49jAL8/eevrkyRMHKQzDtaNdjnf3H3/8sdcdopaEMTR7tvoxPmt9iALmDH9tcRBoqZgiCsS2ArG3NQcra6nSyi7SLXBsGwNxob+Q2uiQVqXrtCBfZG0SWk18aAWkUI1RS6MaQEArtBuIqBVK6lmhomxq1N0qdUNKyTVSjL2vWWYmYlUQMaLAIV5f9X3fE2NKydCLceJpnPu+H0KnqtNc6qeJec4OV8GKKnVT2ocL2FCTWJbNJhKGOc2TTO3FFYiIzDRnDcH3PGANKSMTISPCPM9g6Fzc3r2urSUfLm9nhDVnKVk5FqTrNE2bvt/udp5edWsgqbTksSmY4TQl91yJKSeZ8uQNqn30GHDoeiP0J3Y8s1stfp3T3anruo6DAgzD4C5djPHVq1fzeey6bsqlyZKHlKuCxBijN8M2MyfLFBFvtZRzJiTH3PqLMHOMBSKEwGPyrpqlO2HlgjA01hqhFRFfzEiGegGOhSqn1+JorSr895zFzBBgGIbr62unyM05E2MtqIWUJkYKfR9C6PrgHTiYi/TzlQyre7WJQ0Qw8kxKzvlwOL18+dLMvKEyETkqlpkJSaqMDbU9yel4Pp1PbvLu93tVZY5mY0lPeDEYBxHxBqD+8IdDmsbzkydPitAmG4ah7/uuG2LsETil5KYkAJzP59ev7+c5M9Pz588d/L8Ztp6FNbNk4upAWivDnNGjU7YM4y9zhNrJgy6yTovt0kQQro4mAeoWWLq0+Y/au6CpifbhRYUOVmTiWs78nc9fJery57ITHZ1QqUAZmIi8OAGqmi8xPDKobQ5C8HI0QDRoXQQiIwBgWPpRNFtAURGhkWsgIgPBqlbExfPqK6rasr+oqi5vCJE8j1g7ePgyBTNv9O3TA2abzebb3/7W06dP3377XQMBYM8Eiwqs2uT90sffX2cXc8ma4WxI3fDhV756uD/+3r/42g9/+LGqv5PzxnWahTlO89gSYJ5jY8Snz26fPXv2s5/9bJ7nvu/v74/b7X7OiZCnabq+vk1pIiKXBWby5OYtLwX50fd/cDgcfuu3vvryxYsYO8tC1DWx0hSYj48HuOZ55u22Lhpr6THEgDWQazWps1aEzQ8IIcgkqoaxBLIEZJEyps37KcHAVS4HAFZuH1FpZlzC1G7nQY2UtjXtTrlWROtqB5Zi9AZtlVJwWRZ8Sv78wVQAaDzP7RVCCFTWmjn9iHevIA+vISBTTjPnbH2fss45hxBEldXYS64NiIhDAE/u+po3m+fkO+00TWOaiShNk4tCItIC7AzMXpoFOS/9mFOaEZGYPa3uuBs311ratfVJRSzBeVXtqn3sgW4E8Hyq50EAYJYyKTlnR8188MEHZnY8HsfDiEBA2PgOPVnrGtfP2W635p1uNxtPNIYQvDiYKgGIz3I27Ti47uy67nh3fzifwopEwgffFXZz6F+9erXZbGLHaU6ImOYRkKuLfGaOBd0NjIhzLrVqVEHpZiklOZ4OkgQAGpuzWuFPhksF3OT7AwVcV2nBwDvS+8mTJ8fTwbU7F2CzeW7Cs2Y+XPv9vu+74itbiRE2EV92GRAh59pr1QzHcby/O4jpnPJ2u+UQNwO0lWygoAYUEAgUY991Qz/nJKYvX7+aUvZ2uaGLXOcXwTUNeKI35zxNmNL06vUpdPHp7RNmEJHAnRtMADClueu6w929E4iK2N3dHQB0XQdqjXDUDanmKTZMctuqeOkjtvHElXfXjGmoNrpV/IcVl7H4Zg/mxQxMDEoJK4ChqrkA4VCgVSuVDG2yYBWKazaf2yvN04VV77W1cYYVloHVXcaaaPeirLUpYDVKl0SJEcBhurxeAC2U0h5ARMKCYXNpS/77Et9ba3ho2IRQzQeghrNog4tQKt79IlpZguusmPMatiu7kLbiTCysLgCu6XNO8s1vfvO//++fMkfX14EDEeGvxsLxax5rjxsAANQTuoAIxFAqzi2E7vrJU7D3/vBf/6s/+ZM/efvpczOIMR6Ppxijk/gDgIOzrNocHXUfffTR6XT0PnFzTofT8e3dfp7n4/GMiF03hECgRmCRwzAM58NxiF3XdUQwT+WyXnH0wKJvgg9WoBKF7Ij3eqqWzhlQFHabbj9k1Wt9scYMEUqfqnUIGi6TxM2Abem0OaeGZDQzKMAqMFPnhHFNUC02AFVQhEu0Ba2qgJqkMzMymHVuBkF9VLcqEEBFnGPLIkR3s6ijgsMyYkAzyyo55xi6fthMc0ophRhBUUXElKhxDpe6TCauO7DAWLiMG4ADH42mMUmh9CqPHbsOgEBNsiGiaPIF3/V9qEQ3IuKJNw/JQuU2cQ/SxUGMkYlceTuhoKSEiGaSa3tHrpHG4/HIYfCeCtj4xQxdEYau9Rykddr41atXze7xXdyITRosyEE60dAVtj+/f+i35i56u9liW8zd8+fPASBEOp/P/iTmfRJzHsdxux28yRIiqvZS0eBX/SbG3smwnF9sHMeUTsMwJMqQZlUld84EFcUpYqzCT5ssXou1tdpwb0FVnzx9utvtUp492zKOY1MxzdHRVenqhUtdMe3ldgoqtX0voZnlrB5vSJIbeKoECUpL3QKbbVKbmZ2JzJFQp9Mp11ZR6y1QMVZLkRURxYifffq67/vNZoNA5/P5+fPnCDyNiQK7SeejfXd354i52+sn7gL1fe8UqmAk2Zxiky6Tjw/gV22fAoC1rOkqFWq2cLXCSiUTEeKCrmoyRFXRFljo+iuNJqXKwOVbsgLAt+s4Ae0Dc8Fti/Y8Nc6RzExlIXb1l1DRMU++GQOSijqwxsckpSlYIKJcO2G7leb3clMSFjSoBRFxYhFEVHPW3qXcm1bBfX/n0IcCBSQKIYCRi4mF22hRAFVt+2+VB8R/F1h4q9vPtb5nZjSS0v6JXr78/Ic//OE/+kf/1BQUzeDiqf5LHeixDkQwDN3w1vN3Dq9++uGHH7799tv3d4dtP0wTeHq1uYPtF1cSku29995Naf76178e+8I6e67cet5adTwff0H44YcfDMPgYedpmq6vr2MId69fq+rxeIzFFGghFPF6BtEEAOZd7SW1J19bAwBAgLoaybYWXfq3bYO1jrZsAyjtn4uZWRpsAAB4Cena+2n0EVZl1np7IKKnPNrd3bAbCph54ZFvxqK5f1w9YDJQAKQCkmoGH4hq6bUeiYIT7jQdn7MmFLOsVnRSwBJVE4Npmghws/E6V1TRPGV//SyzyxEiQgPvb++VnbhK8zBACOhufgilwhsIkTRGxkCaxcE1Tvg358lpKBwj43AqFwH+ExF3u51WSI6qjtMUQymlZUa38Z3n2V0Bj0iFEJhizhq5lLG6MhARryXzeRw6y2qO4drtdo5pc/9bTK3ixrXiyNqbutPs7qDXrzusDBG7LpipVyL5YzPznEYicnvIIwSe7CzcGtOEiICll5TnDlwR+hUOh7txnEMIm90uBEoJzMzRAG2tmKitiBS0sq09lhvtRfyXGOPTJ7f7q53LUJEEUFoFqyqoCZf964Pm6fVFC1Yb09W/Xhzoe7zrun7YuCs8TamhgrbbLaEjcsDMgMqmE5HA8ebmBgBev77H2pei6YymhJpcrW/KXTfkfH754hU+pe1uo4DDsGX2AmVRTd5DzCm3AGC/3V1dXQGQagohHA4HWXUetFYEXHKtxddqQ2orNwBcB192GbIVM0YLkpmZKSqYO8ZE5MwHTkKpj0LETUr4/yI6Dz1I9V/XUWUprSodzIxqmqsj1EKSWqk9veODVu4BWPmvbYpdxKW6eIhZVOd58vsWaEUIiJjmnJMgLZ0//JkdNhG8Uc96OfrSWa9Ra9WWK3he2bEVkrM2TJpP2+TgWtGu3kQNwHBJ+CFwzuKuC1RG0BjjdrfJSb773e9eX9++8867DKxmf78+wf8wh5PVIYCZIoWbmyfvvffe6XT6N//m3/wf//v/mWu3c2dGNCsLQlV9VxB5pD2//fZbVzd753k+3B83wx4AEFmcZi+EaZp+/OMff/zxx1ebAQDuXr32K/sY7IZNtaOXjQegpticVFjFdqzwQi8VNVwbR7cwkR9E5MJXL0FV6wtqY0njhbKKV4zq68PF99rkgurUmpWcULsL1QxTW35QG2qWHkRWNiTVRg5aq+C1dT+ttx7H0Uxa4BRQEbHrN8WqgKLFvZGXZ/JC7bLgEkdEkAkQiSFgqNeH9gqBl6SUH+SiH9Hq9qZQIkatdpMAATXLfDpX1icz9/C4tnAItUmw61T/YolLT2ekrgPIOSMyx4g5ewQewOasOhdX2C/iPx1UDADDMOx2O/er3A/gnPsYEL0FpKmqh759Ts9ZKLC7rVAFIteedOfz2ZXHbreJsdBgce2f0fd92mx8ZDwJvel6L/BVbPTFAtDf3t6O45hSskqQ6RU/KU0i4qninLOIYfGlwAvwvI7XsKwiWUehKwDQVsm/tbiziuK5vb1xC8Z9GqkE4PWLAABEQcR8zyKiWWylHGiYNLflvV7/85zcUvHFsN1ux3SnalNOdi5T4E5wCMFMmalULojknLvY7/f7nPV4PJadu4pVLHKpHlpj9ZvNcD6P9/f3h8PhrbfeSimpelEkuOqNHByuBQDb7dZT7zH2ufYNI+9RkTMQBorI5Ht7rUH9l8fGzYXMNO+7XGSjmUHtcSQgmjSlJJr81RatLwpQvD5aQYPbmz64fs0XFJn2wF6seNhSguxxl3YpqUBjAGCOIuq04WbmxHlUYEzYdHY1R+h8PnmOHBFVQVU8qe+t4zwQIrkQPorI0ufrweTBii2sidq2WIuBaWZ6ASEDgMZ7Ur/7eDIuS6/UAFjN1AwxtQRMCGG72bhhcnd3h0DznP/dv/u3/8P/8D9ud1cuexvF6H/2o2g239UIYB5oYb65faIGXezffffd73znO2+99db5cCQKnvT1AdTK4z+m2VXbMAy//du//Rd/8RdEnFK6v79vHMiSshfad12IMUpKXdeR5y0AYmQiHsexJlDFizHagxbTrzYdMjPEJR1gZqCGRFq7F7RIRtNh/mcTQM3WbjqySS6mpURvLResgjmxRjJhVZKLNW3jez5nJzisen2VjGmaPuesyq78ylNZRW7zwuyIiAAlEN33UUS8zS6xfwuJiAxUhBm60NW1nZnd/5AYo0npMekvJTWH14VYX9yIiNCYiSiaGUgLZFnf90kFC3OZEFHXRcTSpCjnOeesl0GgYRjcRPDgVd/3Xdd5Q3tXPO41AsD5fO6HzlWFmcUYVcVVoA8LM1MdPXccQ+i40hN6jBERJacYil0TaxtHn6xYu020WJ+vok0/cAyeAfUz3aJ/+vRWamu8ssDQsiR3ju/u7hAx5zxl8ZjBfD5BrQlW1VmydyokIg+KepgtSWmh4b6FR9EdA5yzKsA0pSnNKSUvuW7VLMQLqEdWLsEDdQWL/jDm4OlVbysJtShg0aJqXvcFAAZpvZuaQF9vDTNTdf6TpZHR6/s7BBqGLby+Q0QwSilV2pPQd86Y6OCAhAhuinmF99OnT10fSy2lbfPyILZkZtnbPjBdXe1F9O7uuNvtACm7iVb2gnnpec55O2yurq78srvdbpoSIiOaGARkNWGkB+Z1UxOwiiW0P31sYJUtas4rAktWM2ffK5Tv3iOkhWq9qtNLP5pKypULzMn222WthoKnqZg4zOxBWPciRWZa5ZhxBRy2VayirQcKJfDjjdeyM9d6wl2gebSgklKapiknxXFuFy9ZJMvNdPBPnKMJnXm1SaumRNvzXWhKD7boqnk4ReCSCqpCmcG5gaqxWRLS1etl5qY1q/zlVvxHRF0/5Jy9QLPvuqurK0cH+HIRsePx2PWbGDtVJbzw3f9THE5U9CgNXI5i4QIgsIIQUL/dfvTRR4fD4cMPP/zOd74zjiMwYSl2zTWoosxMMdg8qRUmqWfPnu33+5cvX3exv7+/7/uN589i7LuOW3sTd9Q8UuSdYUTEg351AS3RHreXAMBKJB9UyxZlXEJzTfrbigAZVpGP9rJtw5stkbGV7YXtUg0kiSv3Qh9RsrWvUM071kWCZhkEFJRWRzu/FCpYkzVliYrM9Qkd/QHOe+AuIAd0VU5EIZTGRGpCULoYmaipcgjsoDQtpU2IeD6f+37DHtoyJ2XDwthvZmBkRFYJnACc2fs0jVK5zNRjoCnlfAaARogx59nZtv39PPzrktcdrBijc42+fv3aFWdfU8UO6Oi6jmNgxtbfbB1/s2LEkBmm6ayZCQMietslhzv51BNx13XQLCpmD4D78zguCQAcHeZfTJLreAaHPTOzI3dSnltBUc55Tii5dM7Z9H3f9QAwxOAeLRFN04QzNiPgfD7Pc3ZTNUp0BJlHBdxMd+NAxLLqOM4NW0CEwUwZANwWArjUsmYPw3tt/fsV5nlWza7/PLzn0r8M5krX5rykgZsT7EPRtIWzwDYkOTMzo2PdHR+DwICgiud54uNRtfeEXQhBRdwam+fkztM0TSF0+/3e8eQue/0QkbrfXW6BQOn34BkNTfnJEz6dRlXlwB6xd3F0fzj4Qz5594mzfbl15Z0lmRkIQwii4hpxvbpgpTLeeKz/t0W5xON7pUVbQMz1FAqhcyPVSi9USrP4CMSutPDx8SEMF2RnAIqqApvNxnNepgiAYJaTiiTRaS3cbBULyXlJTkGdYsDo0fIGv3Kx7C1GfPCbfDgej5Jhsx1845xOp6liMD0I2hA2PghZ5osuV1ThM+uxa4LPK0bvDycv72NmJ4pz87DrOkRuXEVNUHLVxH6pELp2R7ediULpSO+4zZSvrq5c+qhkANhsNsMwMIV5nne7q81mG2NEQCb+ArX4n+Mwx74Clig0anHKAd57773j8dh13e/8zu98//vfv7m50ZSrolLvKS1gwQiBmTGlBDD2ff87v/M7/9f/9Wc5Z+cdNMWU5y5EyYZk6r6tyJhKA6vj8dj30ckmYYGWaEVCoosnqB5wU4FowGFB6AFAhXaDN8+By4xO08SPde1FzkmlWaPrpdbkGqwUg8uvtQnfuCNkLllnAYjdwr/WtrprVlV15hAiam0EY2SnHquhbAAAA9hsN9Vcxa4bYlfAQZ3TKMYlbqyqsQuluEK1484LYd0TzSmJLIk38JLwmsVEUO9n094dkVqNI+IAtd1FSkk0I2KI3PU7UEtpmucxxoFqroeIvADUa4paY1o/fGN7bimlNM8z6NLP0ZVTq/Tw+Y19p/MUY2QWBCYpWR6pXBCu0T0hHUJI8+x+qssR17INbtn3/W63Q8Ss0qZ1v987NgoJjsej47f9Cjnnvu9NwU84nU4+iqrq8oSIYheAioWhhSllzjmrQGnAEIO7+I3CTMRevn59fb2f54yns51tFC+bVhHxHHDRnWu8wqOjWZZWWsOVDbVoi4ZCxcrJtNoFVuE8LYRQBT22pS4iWbJpyaRM41wpL8oDSPZEiUnKKaV+6CigJxrO56MqMIVxHHM+tTUpVf2UUBNQhUQV3wAACAMh56SAcHNz8/LlyxcvXj158sTEKDAyetDRMx2OMPDV1dpgGEJgxlW9n9Zia/hS7Vv/68Lu8V9y0nqR0hseoSCTA8dm9/gbaMW7ETKgtkFW0aIsas2nz9fr1699JftFfJHnvEA98LIBQ17Vtq1nVhTad83MhYDVPK6IpApBd/2aZ1AdPcpoCkwBEcHQC9lTEjco+74v4sexiLACtq3HqIWq/Iqq+tze8c0JAI2hvrw2cY36rKqSbHkfZg6hJBRrP1FAxM1m45u/67osJVjadV0Xg5/m+9ZpS507TVQAgOk/fwhaHxcytZ7BBgahC2Jvv/22qv7+7//+T3/6U0ATVJOlhFoRUEsgOksuyHiCt95665133vnkF582O1o0OeH7MAybPhIRzIUsRkQiBx9PT7BRTRMAQHtIXy7uRfm6iRyEFtI7M6tOvv9eNnNJtFZlgBWa11xVqkjXxaKEFThgRS25Hqvm5jajtS2zgoImPuczNLZqXNgx284x84iCegvCGGPHJZlCXetRSsV6NBCR7Xabc4FNARZHn5lRLHbBC3sAkRhbOEcrBAEAVNRVUcE5MwNAH6L/b5Z5t9liZbhEXRoJx/3ealxhGAYgnOe511IwVgps0GIX+t7BlugEy46/80XSuAZ9YbiU94i9gbbqoO1uy0gi4jSlLkPNzPnYswoRQVfcWTOLoZTQAJdkmEulcRxVzR2dECMz39/fNwkYQri6umqLrUUj55x8uFT1fD77ILtQa8aEmW02WyJyJYoIKWUHpsYuJE92pnQ45O1262VUblxyYFMyM4/0NCEzjqMZ7DabKSVVaLYOETEbUUwLh2VBbuKbymaaCkkpDUP39OnTGONpPIcQzuNxGIYQyWnAEZFqt7Hq+kKLuvtQu93jI79OAQPAOI6m6GLNiUdU1Rc5VdQLImouEIFhF6bJfN5Pp6MLdGb0YLU/T3UiS/MPKFv1oojWXXkR8cqi+/v7YRi6rjNRRLy/v/fJevLkSeH5Yu773qlwc85aq7NCXCC6XCuRmjf5WFzWPf5QSf/iF784nyYzG4Yy0TkJInoCAiohhuNGmdkkt5WmVjRizpmpcMgU0HJNeB2PR6ppL999rrM4oFZsR1Oc7dlsFcd2NceK3g3d229rZXopACxaSyRXmi7umitLDnRNcyYipuAdd1rgMGyHXVOQtKS4QVbsV02bEpHRwrhUPldTNeqW4pP1T1sF2dvSR+TtZglfUFX+BhQ5IFPXEWFApIXdLRAgE7MBgRmRB2PlshAbYOW6fcHxBg36JccKUUOrn+5aORJ4OQEBzK6g3/XXYT/Z5uaTf/b7//zff/3ru+0WI6lByrnvY4w8jUmSISEqOiRynmcke+fdZ5999gkHINIQwZRV86aP7ESegHHjfXtk2G7neQ6hxAlbKT15XH4ZBBYRJjYVAIiBiZDYWf4hdp0vJvc5oISptXrGRbPGSnzTxdCWOIARAlW+e0TAshs9+AyIUBwPxNY0zYBzzgBGgGpeBYsiZQ/nnEVTypOIAOpms0FTyQtVctPHZXVpcS9mLBlKTgsqhBk8WQQA5+PBm46Iy25EEZnmhBCINGWt9RvFYmgXyTL5dprz2QhDKtWQInmSorRE5E7u3elstQeqec5zPnhslhFR5gSOgBAlLwZVMAXXmm4kqeVAxgxmOmw6Ve3iQERjmpndGFLH3RozIZqRiRFQF7vIvaqYKSKHwMMwiCT3JKZpGvo+pRTdIAPoqhmNHAqzB+Gc0pRmA1PQNJ2ChAGHAAHQkADQJKcuxpzmBs/uht6tDW9i7/fygOfQbyTI6XSCUm1GOSdJOZudj6dyd7AY2FTybJqViDruyAJmmOZpnue+6wD0eDyCB+0xDl0/zzMa3N/fm5mIjfOcUlJfsIRipgAKliWZrYgd1EyVAKxaD2vti9WXff/939pu93OePB5gitNYIgpEBOiYGu8PqOuI5TRNqhGAmNXK2r5wp1TVLIrKnPR8mgGoD/F0HOPAzQBCJBETMTERm3LO2/2Vig391TTqq1evnLAz50Il5oZy27NYyjiLdGVEBPRmYZpnJmIOsfaFrLamSYZpFiK+vrmdUyaiYdMfzycFyyqI1nVdzjMzd91Q5UxVvcCIhLU5jplzJdeoJ1qSHLArMGYDyfbXf/XdH/zgR434peEuzSzNGVaKkGulNVQrp/nf1f6bsRRPMiLEWPJoV1c3Td00HamqZGARH4SCTZWCq2FzhncjzEYqgAoGrAI5aUFMktsBQuS9maCwd6nnO0LKBo4BNAVQBFPJTAAgtUAXNZe4RSFkbvqVW8eF+knTnVUBLxns8jkUbDetjnZNfQS3wdXRBoiIkJ37Ht2sqHpkObP6VQtPGP0XA2F94WFmhNh13ZMnT54/f/7Jz588f/78/u4u5+xxlZQSBXbyZ6QSNGh41Lfeeuu999775JPP9rt+nudQQYDFfw3BLLWqMqqFB7bCIjbdaxWe2kZPanERrLCgfqTatbQF39yCBiiR3vXaaAKriTAiXFYOAIAb8otYgdoWtyEMkwrYOk+pHmqzXPO1znRYHYhih1WnvHwLvRtpuTUiBlo6kbUB4ULcw14f0gaBmQM79qoGGAHAzF2r9o7VzmW3lurOv2iypIrzfGzxqzZEIllVpqmksjxW4T4f1fYsbQ+aGWBrX6QtREYYTISjg2wTEZoVzARUW8GFglQCzmEYnPHUDQIrIHDjuiW7rguhU9Vcv1VYP7N4ts+n/nB/HIbBCXoQURDUoOOgakicRfPxHGNEJk/Kc1dcQHe+AcBdeQ8JrjDDJVTjwD0R8RZhMUbDEjwAAGfwjjECUwjhOE5dHDz35kk+50AW1Zz1NJ6nsQQ5mpNhlyDkclSzua3Y9j+e/pymCbnQUiKi47R9r6nlEAKF6KAzqMkOX65tefgIVFtNWskQEXn6sFB/EzmyrESVVjvXvWGLJRizjoV4rTasTraVD1cFwIWkbRK4SlHIOe/3ezek3NP1nIKj27RWnxNRSs5KVth4bBVJ9p27Hsl2+DlJUozRvVtEFJG//du//f73v//06VtYkVNloAxVte/KLZof7BZGU8krU6Y6nSvZ2yrNWmrsgbBSRMvWYIzLBq8Ssmy91jtOl14shuAkeFiBtB4MkLKmSkKlVucoGiCar/CGo167f+ggLHx0AEBgthUj4PIOlzVFZEXku83VZqgp3fUVVivmUoX7CWoG/pioAKXRW01brhbckjW0VQeO9a7+u45fzQn+lQ50ChuAq6urD97/8MWnn/zinZ//7Y9//Pbbb89TqdcGNeAWFgPPq/nqHIbh3XffffHilYiIGAVUy2gcAqNYntOmZ19engVUZVgFfssUNDlT4W+8IhBtj9oUG6+IyGGZdGyX1bxqUFq/C4XGkqoOXmCEAJBSjrEAarB6eKrq/V/rFZbC6BAiMwekzMJQYk0558hFu1QFfIG+1sKRtYZ6XXJQ187qnjX3/iTuSq5eH31v8RJPgxi4SWoEF+RihN1mKBULWFq1M5TrEwRkgVVNCFe2ZP+EufVSFNdPrX5PK8Dbm05BUajqyV1VNcXdbsfDEIm5H7J5KEz6fmAucc6W9FJVotLSoG1qRz+l2naQKDk+AJlijIQ8z7MpbIatX0rBQoiBq2vi6DyvfWNG797q7kOaZRRV3W63ScUIxzQ7qtZ1jPe2g5oQqYk9iDGmecpJFAwIDSirmWXJlg26rutDV24tItlEBPchSS79yIg8ij/nPE2F/DJNo2fyCLC2FrzA6BKR0QInXK1nAAARcXrI/fUOABpvgxsoRDTNknPmWu2Gj4iT2oy3263cNcfwFQCzr5AQaTyXxGR7Eqz1AifLXdeFrodLS6It4CZy13rxgUSCVUmLVk60Fy9eeP7ofD7P89x14fb2NnIYU6YV5LtdpKUaL2SsVVrAL0gDM0UEDoEamu+TTz65urpqaHCpWGVXwG061opWGpHqys4o12f2Au2V/Der0K0mlNbarXkd7b2gVm8W+9XW2r2lKnxXFhaHKQmiNqvDFphLJk9VmKsqqmug4kYvmuhikFXdJ6wwMu1Z23NbhfJ7ONTjDE6D0FZDWz0tVw+XCr4uCI9wF5ayNtxNzqIZkCIs8ssFqMs9xxI/nuxfTvv68ffXwfrGTxFRHdYIdnt7+957H3z26S/+41/99TzPzLXfNbJmCcRZJScniKCc5yllRLy6unr27Nknv/jUeRiIi/rsuDhPbT/TqtNt35dcr0jpuLMec1gBm6l2yGlLv5FPWanfKFHuNjsAsA5ImInqEjWyCj+BlfioMN3SBEYFHCyybpmCiKKKNZw1z7MiccDWzyCEoJLb70QL95ZrFy70pcviCStbDQAUUTV7n0QtAssYoVCTiGwHUiXx9sKuUSRzZeQH8IQaeSYF1BSLAQEATuHpKl9VPTaoqqKpGSuaixaURtMNSDy45KJLEiURcX4PqAQ67jG77uSAXhobgtf/RGanNlys3tqEERCXi7fR8Nf3M0UkZ8k5w0pqr2U6AExpjNttygJS3DVFOI2nKWX3IeZ59uchIgETwHkePeZ8dXU1TdPd3Z1n6H0x+0UahoCIsB9yzkOMqjDPczdsvVopWEhSRKQYmFkg5Nh58x9VPZ/PaeVXeX2qiMzznOfZq9fIUFZIXWiRM0C7tBebCAqVQlwVEG2dZ6Wa9ZjT1Iq/20+7sD6Th7VUl9VStonmEBnOAAAhkE9s7Fiycd2t5WlVzOycMjNvNtl1sG923yCw0kNtytZXaL+v/9RakXg4HHzuxnEMgbquu7299XQYMU7T5PM7TedQ6cz6vodaylUFCz6+44OjGQc5548//vhHP/rR9fXtNJ2a3WwKGcX3cePnuwh0rbDKj1/TzJzMr0nFlhi0S8ydVRfZ97X/kkUtm4U2UEjITUwhxhqLUgVAICsBX3+ApvuKouQKWF4Hhes64fUze+FGsJzyZScGq8aamcEqaFwuZGitAeRFucub4XBSubYf6ANEWv/ZFMOyT1aDuJ5yWhOQ/r1Q0Eua8x/2QMQQY84Tc3j27NlXvvKbv/e13//T//PfPnvraSTOSTBiCJSSOIg3paS69FGIMb7zzjufffq5qiJooI4Lz77EGJlYVwgpq8FnEYXaVwpW80JL+e/FFKxFD9QNbDXF8njhSu0V6hfAxd58HN4AAAghmBXaoyyVjZKd+X3BRecsYAVj8thoWHvzqgqo3ipPK4yinIkG2bWyYqQWky9ZRmQAElEiDoXnz6wWqjJFH/y2mB3f1HCtawMSSpimHCGEyIzIZioizkMCAISBKQBASompRKtcAM3zbAhMce2pI6IREiIwDdy5VoNq07SZIqJSu0zUTLGkiwHtYU+uFCLegAtXtdeqKgLO7CEiWdbMBgArSj+srhudz0UsZGkFGIjoBpBbQtfX13Ho0+n0+eefO1TZtex2u/VrdhwcAu3f8ns5x2EIYZ5z13UUg/N1mxnHrswOc1Y4nU7zPBPQZrPTPLppkrM2Nmwcx2HYHs+neUpejnEax3nOqupcWmvr0IfLCKtgvUBmtcC+qg5Dd3Nz49gIH+1WpGfVOABCruApK5tIpzQjEzkiPq+4MAHMIIQQu+BGAxH0IZJBguSJCD/XZauv0vP5nFLa7XZU2WDW4vexyF1vW1u5xetZ5lpVzIyuYq92+z524zj2fe/mtVszbkxQa4xYSdS9BKYpHnikfa3a5UTkdWuHw+Hjjz8ehrIq/Fg8QwAwo8u8JKyCCmWnrCzFi9tBKRa1EpDjhvxq2s1FxLLpVkGF5cpY1EoRUG6xAhhiOxtqTHF1f1Kn5yUwU1ADr0Tnh4MPK+0LAEFVGQXBfEW2Bdq0Xb0NmWGLdqy172oEsGFcm5SnRVLD+uIOz3owZw8mD1Eb27NeFNKAN0I3MG+XXTXxr6FNfz1X+M3urx+OR1BVBNrsrm5un7777vsffvjhy1cvwrDhQJql3/aWZZbMIS42ipUG3zc3Nze31y8+f9l3jR1Czqfz7e1t06YOpPRIg8tcRCRvt1wlS11z0naIrYINzRGB1gljZUo37d5kcV1zikjtjqrLMlg0JYBa9SqQmd0yDWagULoO18RnqZbxLHJAQiqt1lxzRKYaMiW1QntLXjOKCFKggmbmxSrExT/mWk4KlacJmEiL34xIXj6kYEnUOR9SzliksNc5uBWyJKVUtQLNihNZeNURmfl0Oo+EzOyQbKgoWRe1iCgKbosQGjMrgqiaasWwABErKDKjKnMptDBkCoVBzO84104GABBCrFNPNYVtIjKO4263i7FrCB0vtcpSujG2Dd48bC9eaqk+XxKTaggBRD3lgYjb7ZYrva373F6p/+r+LufMYLe3twDg5DAfvv+BTxYAOG+G0ywQ0X6/P51Ox+PZ0dHjOIo4G+KcJJ9Op8AdBnaAa1aBM4zz1FFhwC6f5yzejRF5SnNKOeesBoEo1xSPH03Ulr+lNJShFauMr6VaXixm6BUjPuPuFAKA1RKGEILDg32Em9HmfzqmgWpll9vW7gaFELq+0Cc4RUxrzWtmJmo1UOnT5xMaY+wCz9XDhsuMb/s6XGrl9SdWPVFVTSm9ePFis+k1C8fOOz33fY8EMosjk5nZE7TOkwoA6FyrNY24Hr3Hh9WmZM4w+vHHH7948cpvFLpYgr16EUJoZOnryWqzs9YsD2SaiwvXwe15in2+kk6NmGUd+m1OneGje+mSJ3VN3URr04CICKBMHDgQmJoBFhHnsJO1GeTPBRWwFpYLqRmBmbSoc1kBiLDyndubPDB81pLdT2tywc0PBKBqdGBJYhcXvoryJdy9shfMHxZX0U4zU801ahhWM35R1fpLH7+qDv4y7VuNKAvcGaSu63e7q91u97u/+7t/+u/+7TzP2+121jlLai/LhavFQGAWQbJA/PTp08P90dd9QGQehj5ut9s8n3a73TAM9/f364FqsY62OHTV1dI10PpD38OID5NhS7gCSFVNTc0Vqiqbijg1Zl3ukvOSOChfZyLkKsRjFfHm6czYByBPwxIRucMEZl0XiYgMDIqzUgAReVlaImIg7V5cjMyCDvNI/trmQMSkItkMIU3F83bl0fS0ZJM5hUjOHRE5MF8S65d5JVMzxaQSQoghEJWEqJmpWR+j5KxqRJDN8jw5O2bOS9cXfzYjVEFNCRFzKTTyULaFEPohUIhe4JTn1KbPmWdCiMOwaTY4Ip7HuSlU1RxijDEGka4vOcvj6eSSlCYiRsmqK+69op5F3CMfz5BSnpP7kXh3dxf74erqqsXktR673c7VKq+Ibfu+J7BXL166fCeiYRj81byozPWW5+0Ph4Pzitzc3Mxz/vzzzwGg3+xU8zRN+/0+pXQ4naZp2mw226H3SmIxnaZpt9lgbdKVcwagnHMXIiGboeRERH2IEzjPG61Fh1X/gRFtxV4O65BmbZzFtaWmhw08fpNzNjGMZmZIC1QCAEr6edWcpingEAJTNFxYLTmQKos4W2GrXEKsKeS1idxqaRhBmdcBqvWBbuP676v4or+I7wutvZxPpxOBAcD19bUDr4ZNr7XSRlflrdjIkIs8vlCH8Egjtgfw4drtdp988skvfvFpLbWg0n54yV83dU5QW/jpKur5YO5slYOzVeTML4QrJ7DO6YK7bPPbHrI9OdKyGKhU2K7DcktNeUrJfQ8iJxHkYrrJ7G+BiByI4EInPpysxoRV3kENwQwFgFuUw8xQDRzMfllJvV61D1Id8GgIoHqxuAp4AqhX+jwwdlaHeqZt/ZGVlED2hfumV/tVw8u/vA7+Mu0LAAYeRUFkFkkhdDdPnrz1/J3Xr18+efLkxYvPHM0/TRModDFmMUNwDgpfQiGENMvNzc3N9WtV7fseVbuuG673Hol69uxZjNERLn5gSxl4QlFKD2oiUlv0q1RwKV5aTuXFar/YtUJtK7UZSSYFkAUAgFpyzw1ZoMg1uJRzA/QC1KQmVqZlUO8ZHNwacNmOiJGXTmGImLIAqJMwEBFS6d2tqrEmmJsuaZ6x30KBQAxAzExMkZC9byYTE/ddICIxGE9HZu77ngxKBlp0qGV/TaOLqRnEEL2e8u7ukPM8dP0aWMQV2KwVcuVqNURnFnSjmKlUc3KSLCJMzsGZmXlOp4AEAHPK53EEAFDruu5wvPNF4m69b8yUUuwGJ2pOKU3z2V/EzFxnIKKTPF9dXTl9436/jTF6h8GUknpjZgNfk4joSqfoS8lIfHO1d32MtR+wX3k7lGa9qorXV+6PDjWw7CKiFCubMfM4TV55IpU74untk5SSie63m9vrr4rBPM/jnK/cfuLwJOfjefLFNvfzZrPJeT4cDg5eI6LQdRtVEbcAsmWJMQLhPGWB1HE4pwL4WscA139qzfY1x0B1wSe7avd3zzl7HypmjiEg4DzlflvcXA8JtBi1r3wfUlVVATBFSBTYrRmPN0gufrOZuBVO4BlG8zoQXcGsmoZm/DIx5GlUWzl/AOBF5FoTwKG2HfPgx9OnT5HA+dX9diISY0Q0VXTbdJ0+WMv29lS28sHaJx4jORwOL1++Pp1OvjiT5Nbo2ir0qcScSzEZWOV8bfpPKhSrWdgNGAQrDw0Rmw5uFnkrm22ChVe4VK01zUBLvsZvGqg+jGLhE0QEBO56EdGKFDYzRmNcbAEoPMvl2WiBOi0WjDn9va1Ay+Au76r6VVW9YYOZgSjFJUfYJPUDq6d9USs7Sbt4sVygLHoRQSfWxAv9ratWssUOakmpVdDDzLwey+/8YCEaSAPptYt/kTHySxx/h+ptp3nm3AA49hwCnY/Pnz//6U9+9OFHH7148RkRiWQTbcvYn8izMojoerDrOmYax7HjEGM4Ho85BTN76/23X7x44ZUMbQrayvN4kdc8+JyKZqiZmPKJiO9GqYC7OpIqIl3sXY60DJ9vVKqx6H5zFQLlOQGACghmZu67/nhwZgCfVvMahrpOitBp95KUAdQfNaWkIqfT7LitNCuihlii3FnFzDgi+jKwBU45plnmlFvTPWand4gxIvCUZJpOIoIcQkAAjLFreDcFQ6bzOIbQiYGpYRbNgrN1nFX1/nACVM3iaeim5g11v9/7Kur7TcrzeDc6P46qCiLl7I0KNpvN+XzOYu5beOM8RDQEyaX0aJY8z7NICYgx82YbPUILAKVnTt/nXOiTXL7nnJ0IKed886QTovM0ASLGoRs6UTUwQ+J+mKZpf/vEC4E2VxER5+kYzIjo01ef7DZbEGZGyT4dYKZETMTTdM615+vrVy+bPwQ1Td4gC9RaLTEzYckcns8eyD3e3fu7T7VKuPkuVOD3mubpdDxadZiSU4dCyYwc7g9+RzB6OY7zPDpuyGo0NWfFQh5pqjrPzo4JvozX8rRJgAvPSUtYYu0g+plu6/hOub+/dwsDak2gyFIagNVRa1gzL20gIgRW0bYHK5NX6daMBDFGr86C4gQ45nF5Wl5Af9isgbX+aAcANEo4MAMzAV2f7+CGFs/Y7Xaq6Xp/1Q9dzrms4aoOzSznUgbms2mrlGqbTf/E35cr7WiDdACQJ5u//e1ve+7ARVN7KShykM3Ap7IqFDQzzbmNbbtjE+MtjLwED+pDNlXtt6BaD0arvPJ6MZTRVg9rLdBuQ6+uLjQB9ZGLxiEF97YKHsDU8dRgZlqCGc1YgWLnwep3C7ZyaqHFjUFttQIMxGPfRGSaAcDJS40W/6kucTO7yDW2d9MV3XQ1EpagNIAAoHvD6y8CAKI6o+UDO6s9cFkT9SnK63kwGOSX7hn8JU7wL6l6Hx8IQMQxdMPv/KN/cjodnzx5cnd313EAQlBIKSGFPM2+yWOM52lUBbUM4Iah5JxiDEPfd12Y5/nu7jBNs6+rpoCp8UvXhG6snW2GMCyrDRCdw5PCGkTQpKHvCscKtSyvmbWKQKjl8HPKZgJqBpzmnObs/E1e2cK1E4PfJaWRSw1Dao0K1ldDKDWXxZsHFCFmRcTWLJaZPeqrqqpTi9F58szvKCJiKIbmta3g7arZ42hAdJ4mJ6YDUM96TqnQhPWRVTUQb7dD3/cRCREt1JR2M/NhARUCaNdtAzEzu5IrccJmsqgSECCHGAHJgJzPNpX+DqSq45wcZdPFYbfbSbbNsHPaHeefAqMY+pzzOKdpmosByiGEYEj3xyMzx74HgHEcs0KMvWsOIgrdkHOes3oy2Mx6yNvdrt9uNpvNZrd1plgIkcPCko9Q4vkAQGBV2Gat8sOkaFNYYQV85UznuSkDADCzLOLtRpoE0JpABQAMpKrTNJWWU7Er8tSJZyhM55N7MA5yNkRVHbrOXXZVNWytbMqOYGbHQznTMj0iNn9siJMBGWgFYXnc2y1aRBaZ+76/u7vzJT3Pcx+7UNmeYYV+aHdxKyqGHqvr1u5e7eDSTXLJATWALlyUqrRjPbAAYLBk5VYOyQX1BwM6B0rza5tqdPdxu93e3t6aGVeG7ar10RU6V7AnrhJeD+6LjQMOkWr/NLcVPE/xN3/zN97dy8wQaJ5K3bO3EEdkosV/a9cEAMSAaIy0wN8egrCa93UxZg80XxN0sFLn7S5QZWn7ipnVlvYKAIAMte2pqzlVRbBCWFF7sqEqVEX0QP2tJm5Rxqoamo1weXt/b2p7TCH7jVTdOjCCgAp22Wx5/UpSMdrr52jvsOyE+lBUKmgfoMv8u4ZVveOqlpwqxBeKDm7TsFLhqGa0GrsvOd6og3817duA2QgM4A2L+t1u1wV45713X9+9/Ktv/4fj8bjZ7LAWesYYz/NZK7I3pZE5AgMzb7fbbT90XefxUisxA1yDZYiWdVkMlMt+9esXb5KCVujo5uB6ajCXBmTu+fm+NTOP+XtHAW0pZAOv0BAHYykoInIMjiVqYS6qXU2IFlHYpE8MAXHJcjUzi5l3cOVh4foKiqpowLEIESJCXYxfhVq573wOxf8pLcnO5ynnmVtG2YyIfTC7UISjY4lTSgEesqMzcz9coLVptdJatYbWIK2Zidr5fPaG58AB1JAs+ggEBgCkoAIqCzZ1t9ttt9vzNHIMDMCxQ8T99c3hcPBpTWLIkUJEhZySqniYfZqSiOWs85wRcbPZAFDOaoaqKeczM+f5/Pr+LnIQkWfPnjnGLRBnmckgAyZNRIAclVnMUJSZpVJ8w0pEttxHGXxRRorctRyBVRysk2yIad/3jYYJEed5BjAKMdY2R8ycVeYphRCQAwDst4NYKWIWkeN59Di2T800pikn9LgRAAA5Ta2ITDmllFyTL9uzPnnbMhcR1GoYtUyNt2UTkePxaGZOx9Fm2e0kWOmhZo6oKlP0obBaK0g1hudz7e7g+tkeiL4LwYIPM4BkC+v4stnrXriQSCtWcK6gPN/su92T6+trM2smu4sUXTEhNm+bkbJe1Km2G7V4SXNSRcRDX5999tkPfvAD3/7zlNpOAQA0QHS2riUV/GAQEC9oy+iix5TCkmRFANc21c4zayHhRTxePvkiN6qFxCtWHDND9Vy+IWItuUVw7iv/+qoWB8ndzyUOvzbL1tNkFU4RWj5m/VhmyfVde9s28S0IbF4SLOo9VYia264ApG094UMzbVkrK7uj/V7Y2B6CtqXUQa/cvjoBGWoLCFgG4+J2NSnwyxx///rgNdCNEIFi2O73R0gffPDB3euXL1589v3v/UBEYugCkRXSKGTm4+ngwTPHAQGAC6z2IiEEk0JZ1cI4VqNhubXrEm2Uqg+Wmq4OP5+rbPVeN75tfCiqimocWF5uMeuSFOCUHaCETDGGGMMSiAvMiNYAHQAaAnEohpcrciQIgUMMaUoV8F/SKkSESEZoyO7Ucu0kY2ZUF7oDqRDRU8KzZIVUVldtfGkIOWcC9M5uWFhXycwUAgCgiYh1XccInroL1LAc4OwfiMhMeU7MPKfZZZazO1E9fKe4giksBwnmeTbiNXLVvPIqBCLqOuj73rtaubRzGmQcYdMP3dAzc856Pp/P5/OcCwAEER1Z6oLVp9tlnIt7qRW3nvH159xsNia83W6Z6NWrF69evt4MPaJJyojI2L4YqLKFeySt4epDJaZtm9pNFgKE4JE9aAvGKjdICIGQDaEFM33iQgge5AwInmhkZkPIqSw8Z9Jg5vE8I+KY5v31zTr6egojn8/AlJJDw03BSMFDysxRatkbvEn+tLdoMtGL6HPOP/3pT/u+97RIAzE0VRroomrAr9y2Us4ZgZvk9HrugpG25e4tU46uGfw6LS5lAADZLhS8rexsAEBTW9VzWq1RffBqIIbhgsauKkhsKLkQKOfZaY6IyJdrqAcagJphIQZo0ltrGwOoPe2tJDSDixER+fTTT92o9SIxvcgILCp8edpLh61ENS7lN1ZHtnpw68ldaKcMgFYJ/vUCaFco31klm6kU3FbZ0cafbE050NYMIgIqgacTce2xWQuDE9bm5WX8yobygW6ZjBYkEUlkQcPDuW/j0oYJRD13fXFmI+5Y1cC0IXgQ07j4fekK8HCrlIFbrfs2Cg5h8IjL4+LgMmy/7PH3rw+2UkANYAiBO+oyXl+pPb/56e2HH354uDt++unngaNVzu6AwcsMmPVwPIXQjafzOJ5C6CwLAExcvEDNs6/7sm2XPEeNMyN6Ez3z5rcc27xhsWE8AmY5exSXiEqyNmdtxePNkm1WoRU7FBE5xrJgZpmZo6iims2ZmVWzqvZ9zyHkNOWc3RUsBPRZPfboOewGqEYiDkFVc9X6xPz/6+1aeuNGjnC9mkONNJLsxIcksI9JgGwA//+bk/wKIzk4wMpY2CvJK82L7KrKobqbPWMBCfYQniQOh9PdbNbr+6oKqk0KAIjs6AYgJGZ5fzgQ0SDBfO7mTgSFTEYRb49qaSjkYGNpgpuFaBgkEER3n+ecmIRWIkJghFGwoiBMYVMDkKrqPGFXDwgRLy4uIuYWaamFZIQQ2O0IzIBB/vTOTqUojOmRqueSKOu03e1/e7vR+fjLtN/v9xBJLD4d52m/PwaEjJzGcVyv19FxITyPSJW5urqKYWAtSQ01th/G1vF4REnzrC44DGPO+fkpsxC6hcrVki+rOWdwYmbTSPUp7YSLjqx2Rgv5hgoJgeWVat6eMhLFbgqHbxgGp9JCICSymSFWIUgcSivnPGt5sof9VECK0oDBEXGeCivneJhVS0HKbKrZs+Yi94gaqAlVHDVp1ivm2O0shXj88PBwfX399u1bItrtdtEsKPq2BTY/jqO7U1poO3Dqny33XEq/AdakgNBb85TjCVrWCgHH67oQJjqx8oJURDAEdFgUSX9N+0qbePsj53xzczMMMk2Hy8tLZg4oOkYbbBKOvAAkwIqqVoHbbts4JcU0kSEy6OL84/3Dz1++Xl6sY0DUFSd2XxbKqoWHTP002yyIFo5V0yNYHN/vj24RTj/wU2e0rLa71GdkqtBpOq8XAACV4Z9HUt0dgRwRIo+pwB+d2iIEAKvNU+uTQgAUzRnqb7ewTLsvg2khKlvtbc7dCNRtKVvYvtiZcWAdcNu2ftO4vQdcttR3Criq9gVEASgJ19ZRtc0M0YlkCQgAtXS683X/78evdoVLVKQExBwQHTmRytXV1Zs3b3bbp9/94fc//fQFulK3h8NORLLO7969+/L15/v7x/1hGyGjDIiIprVxJixThu4Nd/eUBijNbhdbL3rz9YtstbrbmT3YbK92JRHVrpzY2irExe0dU3Uib4ywlBJ5QX+bFk8pEUfakrnmCC2iAwEyEpjPOpsW5dRC1mY2z5oSA6i7VWNrQkRXrSV0HDueIRFlc0eOrHmL0ElfWI6qTUoUP5eYEHGVhJmj35SZo3sU6Y58vrLrHBFxGMdpmrCABSWeFoBfkaTgh+motUXgPAU9G9Bdc/Y8MxETJWEuLRGBmdc0bi7Xqnp5kQCADYery3jXZlOikWApasg1WhglhYl4GEpTxShdFOJsnudxHEWG/X4fYPnz8/PFxWqz2Vyt10/fHi1PLLIa0nQ8lMpcJUYS0EOk4gzuTmbC4DY7kkE4AohM7tmyWml4HDSVKF2HxyD2E3HFGpxw0uzuTtVNLOQpsyp8wlAAqF3Qkc2MUMxsHEcR2c/WrsFWrTPP7qBVDzphFGzRDmrxLoOlOQC9LikKpuV2E93d3YXzejgcEP3m5iYaT03TJMRBFEcpnIkWTHZ3hJIX19jj7c3SyvYnommaorQZAOCptGHgcKWYXqaw9DITEQnBcSm72F9TRmUOvGyhCD9cXl5KaTqeWkgMggx4cVEEwmmFpap/l+BHbC2sTKgIxYUxqqqfP3/ebreRq5JSivOECIa2aPPl0PD4TxUtALhjvIFtJPXz8+HVT8Pjr+5ovRGC42lvRKjvtnshPkWKVPx4BMBjAIbQkGDQEzpwfN2yL9EZWryjVs8STkEcABBTgPhAQuZiKPm2ynFlpzWhrT40yNP7mRcPuPzrEYw9mbPXBMTvQs0vKOB6Tz2Dh7H+yjJPxGjFUzQ9LCoKwLDUevvf1fCvD0djhQYRIYo5TdOUBrn9zev7h6+vX7++uro6TBMDM/NutyOC6+vru88/3t/f//mPf/rbP/7+/PwsIikNiZiITOcwWiNTrT2CNvEAxgDArBSqtdLwbikisyxyhAtOc9KhSrT4u4E6TX6Fw2oGLTxjZsSCxKbuDuYmtUdzxFTdslk2k+gjEz3FqmfpsblVNZtS7dxQ9T3EXNw1R9KRFBpX7JxBavkhpN6YUKec8/F4PE7zAlIyDZLafFNKq0FExLImLm0kqFatAUBH0zxHRg74AhMKi2kJsjEzuGvXqh0RgZCrPcTM4zhqPoZ/HLHiVqCfKCqBgEN0NWZGkMTRAztMn5BHaK6WVZURgGie5xUtMOQgAGYNtkL3QeR4PK5SSoWLh+ggjAD4nCfV4du3p+3TkxCv12tGByCRJEgi7O6as2aL9LCW5sEslBJVbldY1UmkiVrBItmneQ+EjiWYm1JS8NnUEAhxHEer60OlTUjRhVjCehrRqmnK7i4ipsDMk+ZwmFYIl+t1EJJZpHDgwc0q+uYGZqoaVTlSbRzrnb3eZxaFW992vqSheMxIz8/PHz9+jIG9f//+1aubKMC52Wx0Lo+JZQHg2ssYhKZ4Jb2ifaGh1S06HGMX/3O1YJHiidgrbBt4SR6eCRx3B1fAcw9quRcRtDxss4hRjeOoqkE3i+4jgWXEeSKKSHuTCW0bn42njSo2beAOAPD09HR3dxcudQTAYlNN05SEEMCtFEe10lWdorUBIvJ306x7/OWlaLZI+xcRAbiLY73ghp05If3f1WAq3CNwr/3SEAAUAap6pgofNOnq7mjgrQnjEq44P6SZHlAKcbgZRopIUeTmVlUylooafYWs8ymF8l+QjKKnl1BAvwrnahugrxXcL1y790vnF2+42SNNtTdLAk6B4f/jgWZGSdxnEXn16tW3h19ub2///enHzWajpZ6RzHnabDYfPnz44S9/vb6+/ufHfxFKe0WJKN4KxpISE0oXamoK1raXEQ5smUgtPtZvUFxA1s5M6/YoFlQeAgmGDiOp10SlN1ytmIgOTAyVzMWYnPtoilpmS4CFb9x+sd1TCJlK89d5ngGKZS0iYcOqKqCDRX6UunviMRL2Aw32BWspbDLmal8jIYC6CUq0T+iTtdBLLDSKArCQlHZjORLoAMDNIKhtCIpLqepoHTPXjC+v9jvXRM9QISHdgtjFgmqac2ZZ58ofBIC4mJkZTESC7WJmhKyg0/4QrRGGpryhZUl6GCU5z09Pk5Z8MyCiAIkb8dXdh2GY5ywi06x72+ec0RTBmECQVqsV1uqhka5aPFd3SgUpsFKCzil4q3U/ZLAIIzbaESIYQq6FsokYK5ZhlQXNnB4fH0ttMl7abanq9fW43W7N7JhnYFrJKhof+UkHqqJF9tNxCn1rnnM2yG0DNOPSOu5IYOfx7jTmRFyWD4dwBw+Hw+3tbXjDZrbb7S4uVsGLVFXLGujpbBN2mQje4+Jdxktg8KqaVkNU1gxuV3w35yUhcJEaiACQ8wydoOsDVO1YziBxpSzF9b3AbHo3Tq7X63Ec53m32WxivmNEdxCJaLvdrtdrGla9wHT3XKGNs2FIaZ9VhFWQxj99+pSzpkQAHoVIm4eNZ/ks7oiVyxMBAzyfKRG2JS3SrIqyM3XQi7uTk2aR6wwdJHomA5dVNUciL1BygVcAnDr1HFdqjRPAaR+2ZUitFCVCo23G8R9WkirDTZpHzwAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import urllib\n", + "\n", + "from PIL import Image\n", + "\n", + "\n", + "def load_image_from_url(url: str) -> Image:\n", + " with urllib.request.urlopen(url) as f:\n", + " return Image.open(f).convert(\"RGB\")\n", + "\n", + "\n", + "EXAMPLE_IMAGE_URL = \"https://dl.fbaipublicfiles.com/dinov2/images/example.jpg\"\n", + "\n", + "\n", + "image = load_image_from_url(EXAMPLE_IMAGE_URL)\n", + "display(image)" + ] + }, + { + "cell_type": "markdown", + "id": "7e3240cb-54d0-438d-99e8-8c1af534f830", + "metadata": {}, + "source": [ + "## Semantic segmentation on sample image" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "49226d5b-83fc-4cfb-ba06-407bb2c0d96f", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAoAAAAHgCAIAAAC6s0uzAAAXVklEQVR4nO3dXXbiuraAUdcZaViaRtPStPPAHi6KgPGPpLUkzfl0x7m1EweIPi/ZgT+3BWAKP7cP/+D70z848TU/2vim5774963AUdVw4uFN4vfjWeRn+V+BrwEAHCTAABBAgAH+E7JzW/yb5tx/5jcBBqCFkc4MivwsAgxw0khFaeMn6w1iG+od8J9qXxkgkf3L6J4bXI8uynvuTP79fbtr1SFd3BS9/RRc/BEEGJhCkQAf+iLrP376gvtLPHaAV2lLvOfxv3LwtqCB8R0q2ct/fHrv9PcCnbY3URKeZ7TZKjcBA4M7vZLeS3niP1//w+3WvvvKs03Aj7Z3C2qfu5Q6x9pJgIGRhTRsZ4CXTw2eMMA7FS/xxYf63PF8XfqeAInlD9i7m7PyH3msd9fXz32RKAIMjCl8ed3JsHvF6RKXfcDvX+3oMdiCBgYU27PTY5kMX7Tzka/3OB966k3AAAzi5ccWnRtPTx/A/m/kz5AAGNaa5IRvwiXAwGiyrbMEWufR7yOzaRsCDJBFtkL07v54/k5vksfZNWCARPa8a/T6Lx+Z+58kqewGEzBASdfX/XVue/f/fbmbmr83LUU9Goe+rwADQwkfBIscwEZ9T/xX5CTAwHTeDZFlBb4L5uR6eRC8EQcwlI3sFfmcwZ2u/PnpiU8OPvoVxrbnMc/wXhwCDIzm5doauCgfavC5O7CufKkhnfscqgbf+pEtaGA0p3cgK21d7nwLiITvFMEJ+59EEzAwoCufIxuyOXn6U4c/mjnqgR90sefZEWBgWPWuwp728mBOfzsNXiX8UKmPz44taGBYp2917uU2Wh6lqu8eJmCAt2rflnXx65uAk9t+grwVJcBblTY2WxZRfdMSYIAPEl5f3KO7A56NAAPssv9jEgLlP0JWbsIC2CvzzVn+jLg7AgxwQKoGr8WV3h65CxrgMMFjD3dBAxT29HdEZd9eg3reFTHkyTIBA9Siwak0/oSGj1crBBigLhkO1/4dS/Z8RzdhAdSV6r6tCbV//Hd+RwEGqE6Du1DkafJ5wAC5aHCIzA+7AAMwphP1vRjsQ/+5AAM0knkaG0/aS78rAQZoR4PbuPI4N/sMaQEGYCi9nOUIMADjaHwn85VvKsAATfUyn1GbAAPAeafPqAQYoDVDMIsAA4TQ4BoKPqoNniABBoihwWV193j6NCSASD4r6bp66f347Fz51iZggEjft9eLeHfzXIh3j14Rtc+Nvup+eQB2+L79t9zr7n7hj9XFAxBggBTCc9KXAR4uW9AAcNj1MwABBqAzA4y/iy1oAO4eq5b53uwM9S1yDCZggKRalib/p+de+a9OaHAKIsAAeYVMe2nH3wyz71LuMAQYgP+i+3NrV9+0mV9aHZt3wgLILnOrmkmy+VzwMNyEBQDL0vxExxY0AARsMwgwQHZJbj4aWMgmvwADQAABBoAAAgxAB8a7FVyAAehDpQZHpV2AAejGSHOwAAPQk5Zv1/Wk7O3oAgyQ3UhjXymlHpPAx9Y7YZHC93J7+b//vPnfAX5uBUbS77h5+kyAH9fKPevju7X1EAvxkD6+NtZ/4AXAtIy/G4o0OMrnAG8vke9iXCS6ew7Dutypo6+Q+7/3dMPw1qAOf+ax9WlIxSOagRU8UKlXlCeReQwfodXvQXb/z359CN75vcpO228n4CHru/z6uSzlVVV6FX0vN08c0LvXE/Co9d1mTb+u8SvHU8bY5hl/l8s7z22G4LoT8JzpvTt6cxl3ga8ZozAMI//ZRvG7vf5Yv7Z5fJ4kPEXzHDGq/E1K5WIgPz7axQPs74A/yPlnMNsVPHGoCbO6nzkYrnjsStfJ7+5PkkzAh8U+YkdL+fJou87tO17JjKdqDt+1qusG353L8PYPXiPtAnxe44duyGqW5cXMYEq18Gg83n3fK38p1F6pn/rEl9pJgKso/qiq735e0gzjet4KzoI9Tsz7f/z29V1cA67kRC/fZUN6j3JJGIo3o69rq3f5j1mAsxDagjSYadWozvbXDPwwgw2lHoeqFRdgxqTBTKhILZ5qmn+OvLtynFEnEP+L+bZQn00FuKiX+l4ROL4LMCPTYGggvNOnD6D9nx49EmAGp8GwX8KruR9Vqm8DrgEzPteDYY/wIFWV8KczATMFczDdaRyMhH3aY+f4m/OnE2BmocHwTs4+fdR1fRcBZioaDL+l7VOsBneWCTBz0WAoLiTh4bdeXyfAAAwr83z/v5/l9uMeUWZiCCa/Qx+HUEnmEbP2sbX52f9OwDLMPDSY7tyT0CyKRT5HqJLMZwaH2IJmUhpMp1LlJ/MG73L28Jo9ws8BNgQDxGpZtQx73dN6DrCxgHl4tdOjBnnOPNeOdHJgC5qpaTAzyxzal47W90StWwZegAE6UDyW21+wuzYX0Xi8/ifApgEm5GVPR1TzqMxb1iZggD6c6+vP7cV/2GOqx/jb30cCDIZghrWG9p7hQ939+I97qXjaIViAAcb0MpCHMtxLYq8LifTfABsCmJnXP7w0T4PbMwHDfzSY3j2OcQXDmeEqctpt5Cu+og8AgAJqJ2rgUTiq7v98GEPMIUAahmB69H17TsjAsRzJP1vQGgwaTF+G3Jt9MurP6BowQK9elsn4+2T7AQmsuwADJCKf8/BxhPDMLjQh1puNd85ko27MzsMEDJDLlSG45QDtDOCiP7dX/6sJgMnZCiKJjaD+7l9UfYt/3+Jpf3eEsecQJmB4wTko7R19r+ZAVbvVrL7hXgfY6T9AiNNv1NwyM2mTdkj4Frp3wgJIapjPI9ovPIot2YIG4LDx2t+eAAOkcB/+1hFwwsJNNf4uAgyQx2wFmpwAAzCdDOc6AgyQUYZCbEh+eF0QYACOqVHfCYv+9s+Qfpab9yIAYDVMI5P8IP4OGCCp7wTvjZWkVUOyBQ1QRXg7L/q+Vfxcpv1f/Lqnb5TnlGIrwN6QEuCKThtco46PX/P3F68dxfW756nvYgIGKOvpMxW6a3ClRGX4qIlU9V3efRzhI7diMSc7QFx07831Rb+Xj/i9eJzZ6tiACRiglr6icvFo+/phM/g8AS+GYGZlCCaJBkNwqXxeOdTZEi7A8JYAk0rxDNe73HvaVA3eFeBFg5mSAJNQF5daNXgP14ABetLyL2hDhN8s3czeABsFmJCNH9JKnuGLxzZJg03AAL1KnuErZmjwgfeC9vEMAP1qnOr12x1N6ainFL+ZgGGLk07ym6dYgxFgABKZYfP57liA3YoFkNDHIThqSp6npieYgOEDu9B0YSOx3e1RT5LtwwE2BAPk9Pum6IFvkx7AgbugAciveHEf59FmOf+Z4NRBgAH4z8e931Kfschy7hqwXWiAwfzcDlx5bXONdvgrwW7Cgs/chwVPNPi6kwE2BAMM41znmjV41AybgAE4qVkah8zw+QAbggFo2cWN73UvdF+dNgEDcEnjBj99u76i++hSgA3BACxvKlivi+u36zS9dyZgAMpYc9hmKu26vsv1ABuCAbpWNmP9bgi3ZwKGXfwpMHSho3fpKhBgQzAAHOW9oAHmtX+7+N1kacP5NAEGmNSedn7c0f120fesMteA7UIDdOfjpwXvvJ7a0WXXVEzAAFOTzyjuggbgKhU/oViA7UIDzCxDgzMcw34mYADK6Kt/4QQYgGICG9xd/ksG2C40AOxkAgagpPaT6Me/p8pJgGEXGzywX8sc9pjeu8IBtkgBsLTqYr/1XWpMwBoMwFK/jl3Xd7EFDUA9vTeyqj+3Ol/Xh6cyGFs7cFHZz2wYIO21JmCrFYNxTgkXdXqvcj22oAFoR4ZXFQNsCAbgJQ1eTMAAEEKAYS+XgaEgQ7AAA0CAugF2GRiA4saYnk3AAAQo+2fBPRJgAAggwHCA+7CgiCvj7xj7z0uDALsMDMAjm893JmAA2rlY32HG36VNgA3BAPDEBAzHuAwMFNEowIZgAK4b6fpxuwlYgwFg1XQLWoMZg11o4DrXgAFIZ6S7nd9pHWBDMMC0dl7Bvdd3+AabgAEgQECADcEAc/q+FZhrh7kR2gQMhzmJhCuKZHgAAgxAAA2OCbABAoCX1jAPs9X8jgkYgBiT70V/RR8AdMb+DZT1ssHDj79L4ARsFQPgpe36DjM024KGA5w4AqVEBthaBsC0TMAAEECAAejGMBeAFwGG/Vw0AQoSYAD6MNL4uwgw7GT8BcoKDrBFDYBHL/8IeMj3zPJOWPCZM0WIMl53V/Fb0JY2AO6ext+B67uYgAHIY+ziPomfgBdDMLl5fQI1pAgwAMzmzy36CFbf5gzyMf5Cbet136n2nxcTMABRfm7/3HU1w2cAP0o0AS+GYJIx/kJt7/7qdwa5ArxoMJkIMDTzu8TDZzjdFrQljyS8FKGl4XP7W7oAA8AMBBheMP5Ce7MNwRkDbO0jllcg0EDGAAPA8JIG2AhCFK89oI2kAQaAsQkw/GX8hSRmuCEr3RtxPPKmHDQmwEAzqSdgqyEAo0od4KVVg5WexcsAaOsr+gCCrWvu+n/Y9waggdTXgFeVovhx4hHjqZiAgZZmn4C3Pa3IegxQ1vohSDPc9vwk+zXguySjyc9yS3IkAPSujwBXcm6ilWEAruvjGvBd1JXgbfalh+G8Cmhp6gn47mJB7wOxtRuAQwR4WQpNsTIMwH49bUEv0fu9O/tqU7pHTp6AxjqbgGNXye/ltieulnIAPuoswL3QYAC29Rfg8Lbt3GF2SRiADf0FOIP9V3lluAueI6C9LgPc3XIpwwA86TLAS4IGn7jVWYYBWPUa4Ay8k+UYPB1AiI4DnGHdPP0nvxkOHoBAHQd46TxjRuEMPAVAlL4DvPS/gPZ+/F3z4AOBug/w0v8y2vvxd8rDDsTq7L2gN4S8A3PZRdybSLchvUAGI0zAdwOsqq4KN+ARBpIYJ8BL87W10rdTCIAZjLMFvWqzkdsgk3aki3NyA+QxYIDvqtar5Touw6WoL5DKUFvQj4ZZbV0YLsJjCGQzbICXaukKWcr1A2Aww25BPyq4ixseQjvSJ4Q/awC/jTwBr0qtvxnW8QzHAMB1U0zAqyvjY6rymYP3S/XEAaymmIBX59bihLdBZTseAI6aawJe7Zwgk3fOHLxH8icRmNZcE/Bqz1Br4Qagnkkn4N/u02R30TUEf9TdcwpM4iv6ALKwTAPQ0qRb0MNw3gDQKQEGgAACDAABBLh7dqG3uU8NyEmAASCAAI/AEAzQHQFmfHahgYS8Ecc4ZGabfQKOevk75YVEKd6IA+Af2+eyj/9fMeYKE/BQDMHbLJe8c/F3x0uLEwR4NN01+HHlanDwFkpWNV5vXmDsJ8ADyt/gPYtUvZ/CEjk553kkIcBjStvgEwuTMYVSWv5eeI3xkT9DGlPOX/5zR1XjZ0l7gkI9jZ90rzE+MgGPLNsScCWllX6WnGcqlBX7i+A1xjsCPLg8Db64DFX9QSyRo8rz+l+8zPjFFjR9qLp4pVqmKSXb0/q93LIdErFMwFPI8Gt/vaBuXmWnDC/4j7zYEOBZhC9JXQT4zsrYtfCX+lFeb9MS4Ln0fjeKBrOtu/quvOQmJMDTCVyhOgrwnTWxL/3W94kX3iTchDUdv9v7DbOgD2+w+5sG+3F4xwQ8qZBf7yLtb3/kTlkyGz5UXn4DMwFPym/1fsaRnCZ5Xmb4GaclwPNq32BLCUVMkt7VVD/sVAR4aj3OwVHHbBHMYLb0rub8qYcnwLNr3DPrCOdMm96VR2A8bsJiWTr8mLZ+byLjEMl5yUtxDALMXx29zUXUumzha0Z6P/Jq7J0taP76WW69/Er3cpycYK91J49S70zAvFb7d7vTIVj4q1KUc7wsO2UCplcWHbhz4tIpAea12nmzZPDES+IKj16PBJi3al8Svr5kGIJhpcHdEWA+0GDohQb3RYD5TIMbf6M5eXiL0OCOCDC7JF8cGxxe8kcAVhrcCwEmXpH1omog1Ze++FvqLggw46iUSfVtxkNdlgYnJ8DslfxK8F3xg5QEumYUzsw7YXFA1d/ksqnr6N4unghGJV7S2QgwB+R/f8pHp4/WOhVOg+vx8s7DFjQH9PWre+KNRDr6OAo4x8lNHgLM4HY2VXqZhwYnYQuaYzq6DPzb74MX3bREogGv/1gCzGFdN5heCHADft1i2YIG0lHfNjzOsQSYw5w1wzA0ONBX9AHAP77dDDWBp0X/6RmXBCbhGjAn1VslBXhU26+Z+/OuviH80oWwBU06luAhfXxavWkisxFgTnLKzH7KmpwnKIQAA3VZ3LvgaWpPgMnIWgAMT4A5zy40HzmX6ognqzEBBoAAAswl9YZgJ+MD8CR2x1PWkgADQAABBuAvQ3AzAsxVbsXiJes4bBNgAAggwBRgCIaR2L1oQ4ApQ4N5ZAWHjwQYAAIIMMUYggH2E2BKKthgOW/G5wDym5dEA1/RB8Bofqzm/Xh8ph7/74tnP14AsMefW/QRMKorq7Dxt4GdT9CJ50KAh+E3sSpb0NRy+lfX73wq9w3q/U1VX9jJBEwL+xdl9W2j0v6E+o7Hr2Q9rgHTwv13eHt19nveC5WFIkzAMB0F5RAnx5W4BgzAFmdslQgwAAQQYJiLaYYTvGxqEGAACCDAAHxmCC5OgAEggADDXPxJCSQhwAAQQIABIIAAA0AAAYbpuAwMGQgwAAQQYJiRIRjCCTAABBBgmJQhGGIJMMxLgyGQAMPUfpabDEMIAQZkGAJ8RR8AkMW9wXs+9OZdrX1gDuz35xZ9BMB4lHhUdkoKMgED5T0t03oMvwkwUN3LsUmVmZwAAzHWKisxc3IXNBDMPdjMSYABIIAAAykYgpmNAANAAAEGYBe7FGUJMJCF9Z2pCDAABBBgAAggwAB85gJBcQIMZOEtsZiKt6IEYsgtkxNgoCKVhXcEGKhCekfiAnANAgwUJr2whwADxUgv7OcuaKAM9YVDBBgoQH3hKAEGrlJfOEGAgUvUF84RYOA89YXTBBgAAggwcJLxdxLehaMSAQbOUF+4SICBw9QXrhNg4Bj1nYr953oEGAACCDAArxl/qxJgAAggwMABLgBDKQIMwAv2n2sTYAAIIMAAPDP+NvAVfQDAvH6v8q4xMw8BBg74WW7XG2m6Ss4T1IYtaKCdn+W2vbhb+pmHCRg45t7IQ3OwrHbEk9WMAANn7Myw1RzesQUNnLfR14+7zSTkKWvJBAxcYsmGc0zAABBAgAEggAADiXgjDuYhwAAQQIABIIAAA1nYf47lhvbGBBgAAggwAAQQYAAIIMBACi4AMxsBBsAdWAEEGGB26htCgAEggAADKRjConjkowgwAAQQYIB5GX8DCTAABBBgIAvTWHv+/DqQAANAAAEGEjEEN+YBDyTAQC6SwCQEGEhHg5txDTiQAAMZaTDDE2AgKQ1mbAIM5KXBDdiFjiLAQGoazKgEGMhOgxnSn1v0EQDsZLO0Buc3UUzAQDd+lptaMAwBBjojw4xBgIEuyTC9+4o+AIDzdjbYxWMSMgED4zMuk5AAA7PQ4N88JoEEGJiI3pCHAANzsR1NEgIMzEiDFw9CNAEGJiU/xBJgYF4aTCABBqamwUTxYQwAy/LpzTqeOj3AO3s48wgnwAAndZ1hAQ5nCxrgJA3jCgEGgAACDDAds3sGAgwAAQQY4KSub8IinAADzMX+cxICDHBGp+Ov+ubxFX0AALQgvdmYgAEO6278Vd+EBBjgGPWlCFvQAAf0VV/pzUyAAXbpK72L+qZnCxrgM/WlOBMwwAd91Vd6eyHAAFs6qq/09kWAAd7qor662ykBBuiS7vZOgAFeyzn+6u4wBBigD9I7GH+GBNAB9R2PAAO8kGr/WX2HJMAAqanvqAQYIC/1HZibsABS0NrZCDBAMOmdky1ogEjqOy0BBoAAAgwQxvg7MwEGgAACDPCC2ZTaBBgghsZPToABIIAAAwQw/iLAABDAO2EBNGX25e7PLfoIANIq+6GE0ssjEzBAddLLb64BA9SlvrwkwAAQQIABIIAAA0CA/wMijwbhSpVtFgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import numpy as np\n", + "\n", + "import dinov2.eval.segmentation.utils.colormaps as colormaps\n", + "\n", + "\n", + "DATASET_COLORMAPS = {\n", + " \"ade20k\": colormaps.ADE20K_COLORMAP,\n", + " \"voc2012\": colormaps.VOC2012_COLORMAP,\n", + "}\n", + "\n", + "\n", + "def render_segmentation(segmentation_logits, dataset):\n", + " colormap = DATASET_COLORMAPS[dataset]\n", + " colormap_array = np.array(colormap, dtype=np.uint8)\n", + " segmentation_values = colormap_array[segmentation_logits + 1]\n", + " return Image.fromarray(segmentation_values)\n", + "\n", + "\n", + "array = np.array(image)[:, :, ::-1] # BGR\n", + "segmentation_logits = inference_segmentor(model, array)[0]\n", + "segmented_image = render_segmentation(segmentation_logits, HEAD_DATASET)\n", + "display(segmented_image)" + ] + }, + { + "cell_type": "markdown", + "id": "de40012e-a01e-4e73-bb71-3048f16d41c8", + "metadata": {}, + "source": [ + "## Load pretrained segmentation model (Mask2Former)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "ff2cbbbe-c53c-4e5b-977f-c2a7d93f4b8c", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/private/home/plabatut/github/patricklabatut/dinov2/dinov2/eval/segmentation_m2f/models/losses/cross_entropy_loss.py:222: UserWarning: Default ``avg_non_ignore`` is False, if you would like to ignore the certain label and average loss over non-ignore labels, which is the same with PyTorch official cross_entropy, set ``avg_non_ignore=True``.\n", + " warnings.warn(\n", + "/private/home/plabatut/.conda/envs/dinov2-extras-conda/lib/python3.9/site-packages/mmcv/ops/multi_scale_deform_attn.py:209: UserWarning: You'd better set embed_dims in MultiScaleDeformAttention to make the dimension of each attention head a power of 2 which is more efficient in our CUDA implementation.\n", + " warnings.warn(\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "load checkpoint from http path: https://dl.fbaipublicfiles.com/dinov2/dinov2_vitg14/dinov2_vitg14_ade20k_m2f.pth\n" + ] + }, + { + "data": { + "text/plain": [ + "EncoderDecoderMask2Former(\n", + " (backbone): ViTAdapter(\n", + " (patch_embed): PatchEmbed(\n", + " (proj): Conv2d(3, 1536, kernel_size=(14, 14), stride=(14, 14))\n", + " (norm): Identity()\n", + " )\n", + " (pos_drop): Dropout(p=0.0, inplace=False)\n", + " (blocks): Sequential(\n", + " (0): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): Identity()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (1): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (2): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (3): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (4): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (5): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (6): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (7): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (8): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (9): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (10): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (11): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (12): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (13): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (14): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (15): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (16): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (17): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (18): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (19): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (20): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (21): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (22): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (23): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (24): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (25): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (26): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (27): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (28): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (29): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (30): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (31): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (32): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (33): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (34): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (35): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (36): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (37): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (38): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (39): Block(\n", + " (norm1): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): Attention(\n", + " (qkv): Linear(in_features=1536, out_features=4608, bias=True)\n", + " (attn_drop): Dropout(p=0.0, inplace=False)\n", + " (proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (drop_path): DropPath()\n", + " (norm2): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (mlp): SwiGLUFFN(\n", + " (w1): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w2): Linear(in_features=1536, out_features=4096, bias=True)\n", + " (w3): Linear(in_features=4096, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " )\n", + " (norm_pre): Identity()\n", + " (spm): SpatialPriorModule(\n", + " (stem): Sequential(\n", + " (0): Conv2d(3, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (1): SyncBatchNorm(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU(inplace=True)\n", + " (3): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (4): SyncBatchNorm(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (5): ReLU(inplace=True)\n", + " (6): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (7): SyncBatchNorm(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (8): ReLU(inplace=True)\n", + " (9): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)\n", + " )\n", + " (conv2): Sequential(\n", + " (0): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (1): SyncBatchNorm(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU(inplace=True)\n", + " )\n", + " (conv3): Sequential(\n", + " (0): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (1): SyncBatchNorm(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU(inplace=True)\n", + " )\n", + " (conv4): Sequential(\n", + " (0): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (1): SyncBatchNorm(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (2): ReLU(inplace=True)\n", + " )\n", + " (fc1): Conv2d(64, 1536, kernel_size=(1, 1), stride=(1, 1))\n", + " (fc2): Conv2d(128, 1536, kernel_size=(1, 1), stride=(1, 1))\n", + " (fc3): Conv2d(256, 1536, kernel_size=(1, 1), stride=(1, 1))\n", + " (fc4): Conv2d(256, 1536, kernel_size=(1, 1), stride=(1, 1))\n", + " )\n", + " (interactions): Sequential(\n", + " (0): InteractionBlockWithCls(\n", + " (injector): Injector(\n", + " (query_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (feat_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): MSDeformAttn(\n", + " (sampling_offsets): Linear(in_features=1536, out_features=576, bias=True)\n", + " (attention_weights): Linear(in_features=1536, out_features=288, bias=True)\n", + " (value_proj): Linear(in_features=1536, out_features=768, bias=True)\n", + " (output_proj): Linear(in_features=768, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (extractor): Extractor(\n", + " (query_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (feat_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): MSDeformAttn(\n", + " (sampling_offsets): Linear(in_features=1536, out_features=192, bias=True)\n", + " (attention_weights): Linear(in_features=1536, out_features=96, bias=True)\n", + " (value_proj): Linear(in_features=1536, out_features=768, bias=True)\n", + " (output_proj): Linear(in_features=768, out_features=1536, bias=True)\n", + " )\n", + " (ffn): ConvFFN(\n", + " (fc1): Linear(in_features=1536, out_features=384, bias=True)\n", + " (dwconv): DWConv(\n", + " (dwconv): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384)\n", + " )\n", + " (act): GELU(approximate='none')\n", + " (fc2): Linear(in_features=384, out_features=1536, bias=True)\n", + " (drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (ffn_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (drop_path): DropPath()\n", + " )\n", + " )\n", + " (1): InteractionBlockWithCls(\n", + " (injector): Injector(\n", + " (query_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (feat_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): MSDeformAttn(\n", + " (sampling_offsets): Linear(in_features=1536, out_features=576, bias=True)\n", + " (attention_weights): Linear(in_features=1536, out_features=288, bias=True)\n", + " (value_proj): Linear(in_features=1536, out_features=768, bias=True)\n", + " (output_proj): Linear(in_features=768, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (extractor): Extractor(\n", + " (query_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (feat_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): MSDeformAttn(\n", + " (sampling_offsets): Linear(in_features=1536, out_features=192, bias=True)\n", + " (attention_weights): Linear(in_features=1536, out_features=96, bias=True)\n", + " (value_proj): Linear(in_features=1536, out_features=768, bias=True)\n", + " (output_proj): Linear(in_features=768, out_features=1536, bias=True)\n", + " )\n", + " (ffn): ConvFFN(\n", + " (fc1): Linear(in_features=1536, out_features=384, bias=True)\n", + " (dwconv): DWConv(\n", + " (dwconv): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384)\n", + " )\n", + " (act): GELU(approximate='none')\n", + " (fc2): Linear(in_features=384, out_features=1536, bias=True)\n", + " (drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (ffn_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (drop_path): DropPath()\n", + " )\n", + " )\n", + " (2): InteractionBlockWithCls(\n", + " (injector): Injector(\n", + " (query_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (feat_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): MSDeformAttn(\n", + " (sampling_offsets): Linear(in_features=1536, out_features=576, bias=True)\n", + " (attention_weights): Linear(in_features=1536, out_features=288, bias=True)\n", + " (value_proj): Linear(in_features=1536, out_features=768, bias=True)\n", + " (output_proj): Linear(in_features=768, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (extractor): Extractor(\n", + " (query_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (feat_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): MSDeformAttn(\n", + " (sampling_offsets): Linear(in_features=1536, out_features=192, bias=True)\n", + " (attention_weights): Linear(in_features=1536, out_features=96, bias=True)\n", + " (value_proj): Linear(in_features=1536, out_features=768, bias=True)\n", + " (output_proj): Linear(in_features=768, out_features=1536, bias=True)\n", + " )\n", + " (ffn): ConvFFN(\n", + " (fc1): Linear(in_features=1536, out_features=384, bias=True)\n", + " (dwconv): DWConv(\n", + " (dwconv): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384)\n", + " )\n", + " (act): GELU(approximate='none')\n", + " (fc2): Linear(in_features=384, out_features=1536, bias=True)\n", + " (drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (ffn_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (drop_path): DropPath()\n", + " )\n", + " )\n", + " (3): InteractionBlockWithCls(\n", + " (injector): Injector(\n", + " (query_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (feat_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): MSDeformAttn(\n", + " (sampling_offsets): Linear(in_features=1536, out_features=576, bias=True)\n", + " (attention_weights): Linear(in_features=1536, out_features=288, bias=True)\n", + " (value_proj): Linear(in_features=1536, out_features=768, bias=True)\n", + " (output_proj): Linear(in_features=768, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (extractor): Extractor(\n", + " (query_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (feat_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): MSDeformAttn(\n", + " (sampling_offsets): Linear(in_features=1536, out_features=192, bias=True)\n", + " (attention_weights): Linear(in_features=1536, out_features=96, bias=True)\n", + " (value_proj): Linear(in_features=1536, out_features=768, bias=True)\n", + " (output_proj): Linear(in_features=768, out_features=1536, bias=True)\n", + " )\n", + " (ffn): ConvFFN(\n", + " (fc1): Linear(in_features=1536, out_features=384, bias=True)\n", + " (dwconv): DWConv(\n", + " (dwconv): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384)\n", + " )\n", + " (act): GELU(approximate='none')\n", + " (fc2): Linear(in_features=384, out_features=1536, bias=True)\n", + " (drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (ffn_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (drop_path): DropPath()\n", + " )\n", + " (extra_extractors): Sequential(\n", + " (0): Extractor(\n", + " (query_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (feat_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): MSDeformAttn(\n", + " (sampling_offsets): Linear(in_features=1536, out_features=192, bias=True)\n", + " (attention_weights): Linear(in_features=1536, out_features=96, bias=True)\n", + " (value_proj): Linear(in_features=1536, out_features=768, bias=True)\n", + " (output_proj): Linear(in_features=768, out_features=1536, bias=True)\n", + " )\n", + " (ffn): ConvFFN(\n", + " (fc1): Linear(in_features=1536, out_features=384, bias=True)\n", + " (dwconv): DWConv(\n", + " (dwconv): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384)\n", + " )\n", + " (act): GELU(approximate='none')\n", + " (fc2): Linear(in_features=384, out_features=1536, bias=True)\n", + " (drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (ffn_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (drop_path): DropPath()\n", + " )\n", + " (1): Extractor(\n", + " (query_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (feat_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (attn): MSDeformAttn(\n", + " (sampling_offsets): Linear(in_features=1536, out_features=192, bias=True)\n", + " (attention_weights): Linear(in_features=1536, out_features=96, bias=True)\n", + " (value_proj): Linear(in_features=1536, out_features=768, bias=True)\n", + " (output_proj): Linear(in_features=768, out_features=1536, bias=True)\n", + " )\n", + " (ffn): ConvFFN(\n", + " (fc1): Linear(in_features=1536, out_features=384, bias=True)\n", + " (dwconv): DWConv(\n", + " (dwconv): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384)\n", + " )\n", + " (act): GELU(approximate='none')\n", + " (fc2): Linear(in_features=384, out_features=1536, bias=True)\n", + " (drop): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (ffn_norm): LayerNorm((1536,), eps=1e-06, elementwise_affine=True)\n", + " (drop_path): DropPath()\n", + " )\n", + " )\n", + " )\n", + " )\n", + " (up): ConvTranspose2d(1536, 1536, kernel_size=(2, 2), stride=(2, 2))\n", + " (norm1): SyncBatchNorm(1536, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (norm2): SyncBatchNorm(1536, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (norm3): SyncBatchNorm(1536, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (norm4): SyncBatchNorm(1536, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " (decode_head): Mask2FormerHead(\n", + " input_transform=multiple_select, ignore_index=255, align_corners=False\n", + " (loss_decode): CrossEntropyLoss(avg_non_ignore=False)\n", + " (conv_seg): None\n", + " (dropout): Dropout2d(p=0.1, inplace=False)\n", + " (pixel_decoder): MSDeformAttnPixelDecoder(\n", + " (input_convs): ModuleList(\n", + " (0-2): 3 x ConvModule(\n", + " (conv): Conv2d(1536, 1536, kernel_size=(1, 1), stride=(1, 1))\n", + " (gn): GroupNorm(32, 1536, eps=1e-05, affine=True)\n", + " )\n", + " )\n", + " (encoder): DetrTransformerEncoder(\n", + " (layers): ModuleList(\n", + " (0-5): 6 x BaseTransformerLayer(\n", + " (attentions): ModuleList(\n", + " (0): MultiScaleDeformableAttention(\n", + " (dropout): Dropout(p=0.0, inplace=False)\n", + " (sampling_offsets): Linear(in_features=1536, out_features=768, bias=True)\n", + " (attention_weights): Linear(in_features=1536, out_features=384, bias=True)\n", + " (value_proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (output_proj): Linear(in_features=1536, out_features=1536, bias=True)\n", + " )\n", + " )\n", + " (ffns): ModuleList(\n", + " (0): FFN(\n", + " (activate): ReLU(inplace=True)\n", + " (layers): Sequential(\n", + " (0): Sequential(\n", + " (0): Linear(in_features=1536, out_features=6144, bias=True)\n", + " (1): ReLU(inplace=True)\n", + " (2): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (1): Linear(in_features=6144, out_features=1536, bias=True)\n", + " (2): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (dropout_layer): Identity()\n", + " )\n", + " )\n", + " (norms): ModuleList(\n", + " (0-1): 2 x LayerNorm((1536,), eps=1e-05, elementwise_affine=True)\n", + " )\n", + " )\n", + " )\n", + " )\n", + " (postional_encoding): SinePositionalEncoding(num_feats=768, temperature=10000, normalize=True, scale=6.283185307179586, eps=1e-06)\n", + " (level_encoding): Embedding(3, 1536)\n", + " (lateral_convs): ModuleList(\n", + " (0): ConvModule(\n", + " (conv): Conv2d(1536, 1536, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (gn): GroupNorm(32, 1536, eps=1e-05, affine=True)\n", + " )\n", + " )\n", + " (output_convs): ModuleList(\n", + " (0): ConvModule(\n", + " (conv): Conv2d(1536, 1536, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (gn): GroupNorm(32, 1536, eps=1e-05, affine=True)\n", + " (activate): ReLU(inplace=True)\n", + " )\n", + " )\n", + " (mask_feature): Conv2d(1536, 1536, kernel_size=(1, 1), stride=(1, 1))\n", + " )\n", + " (transformer_decoder): DetrTransformerDecoder(\n", + " (layers): ModuleList(\n", + " (0-8): 9 x DetrTransformerDecoderLayer(\n", + " (attentions): ModuleList(\n", + " (0-1): 2 x MultiheadAttention(\n", + " (attn): MultiheadAttention(\n", + " (out_proj): NonDynamicallyQuantizableLinear(in_features=1536, out_features=1536, bias=True)\n", + " )\n", + " (proj_drop): Dropout(p=0.0, inplace=False)\n", + " (dropout_layer): Identity()\n", + " )\n", + " )\n", + " (ffns): ModuleList(\n", + " (0): FFN(\n", + " (activate): ReLU(inplace=True)\n", + " (layers): Sequential(\n", + " (0): Sequential(\n", + " (0): Linear(in_features=1536, out_features=6144, bias=True)\n", + " (1): ReLU(inplace=True)\n", + " (2): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (1): Linear(in_features=6144, out_features=1536, bias=True)\n", + " (2): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (dropout_layer): Identity()\n", + " )\n", + " )\n", + " (norms): ModuleList(\n", + " (0-2): 3 x LayerNorm((1536,), eps=1e-05, elementwise_affine=True)\n", + " )\n", + " )\n", + " )\n", + " (post_norm): LayerNorm((1536,), eps=1e-05, elementwise_affine=True)\n", + " )\n", + " (decoder_input_projs): ModuleList(\n", + " (0-2): 3 x Identity()\n", + " )\n", + " (decoder_positional_encoding): SinePositionalEncoding(num_feats=768, temperature=10000, normalize=True, scale=6.283185307179586, eps=1e-06)\n", + " (query_embed): Embedding(100, 1536)\n", + " (query_feat): Embedding(100, 1536)\n", + " (level_embed): Embedding(3, 1536)\n", + " (cls_embed): Linear(in_features=1536, out_features=151, bias=True)\n", + " (mask_embed): Sequential(\n", + " (0): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (1): ReLU(inplace=True)\n", + " (2): Linear(in_features=1536, out_features=1536, bias=True)\n", + " (3): ReLU(inplace=True)\n", + " (4): Linear(in_features=1536, out_features=1536, bias=True)\n", + " )\n", + " (loss_cls): CrossEntropyLoss(avg_non_ignore=False)\n", + " (loss_mask): CrossEntropyLoss(avg_non_ignore=False)\n", + " (loss_dice): DiceLoss()\n", + " )\n", + ")" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import dinov2.eval.segmentation_m2f.models.segmentors\n", + "\n", + "CONFIG_URL = f\"{DINOV2_BASE_URL}/dinov2_vitg14/dinov2_vitg14_ade20k_m2f_config.py\"\n", + "CHECKPOINT_URL = f\"{DINOV2_BASE_URL}/dinov2_vitg14/dinov2_vitg14_ade20k_m2f.pth\"\n", + "\n", + "cfg_str = load_config_from_url(CONFIG_URL)\n", + "cfg = mmcv.Config.fromstring(cfg_str, file_format=\".py\")\n", + "\n", + "model = init_segmentor(cfg)\n", + "load_checkpoint(model, CHECKPOINT_URL, map_location=\"cpu\")\n", + "model.cuda()\n", + "model.eval()" + ] + }, + { + "cell_type": "markdown", + "id": "53c0309f-df2b-4912-bca5-e57d8b3875b3", + "metadata": {}, + "source": [ + "## Semantic segmentation on sample image" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "f4abb13b-0e5a-4a40-8d44-21da4286ba7d", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/private/home/plabatut/.conda/envs/dinov2-extras-conda/lib/python3.9/site-packages/torch/functional.py:504: UserWarning: torch.meshgrid: in an upcoming release, it will be required to pass the indexing argument. (Triggered internally at /opt/conda/conda-bld/pytorch_1678402374358/work/aten/src/ATen/native/TensorShape.cpp:3483.)\n", + " return _VF.meshgrid(tensors, **kwargs) # type: ignore[attr-defined]\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAoAAAAHgCAIAAAC6s0uzAAAfKElEQVR4nO3dO4xlR3rY8WqBomFvSueKjA7W4KJ7Q2Kzuduhm1Lq1JMZAwECpgWGlIeAAGPgbJQ6lcy0tydbKNwZgLCCgSMqNlM5GCy2FZz15eXt+zjv+qrq9wMDPoY9px9z//erOo+LXz+mLN58OuY3fv7x4v7iv8xyAP/8p29m+Tgs5JcfL3IfQnl+SP+u+5t/uPiX7m+ePw74Mr65ePzy0z8/8Qv+18d/6Pmh/ufVdfc33/7Tn/U/gD7u/vI/nP4F/+ObV/P+jozz280nuQ9hJX9/9Vfj/sc/mfc4AIA+sr1Def7xYtwQDGQxYvyd3e3Pv08P3+/9y8vNZvcf/+vLO0Mwa/qL9387bggubIngzaePKT3PfRQQ1Gfp/21XoUd4/njx5b/5cuIx7NX39uffz7UKffvz7w/++w8PD7v/uNdjCKuwAM/FBjAs4eDsu23wtqBDk3wsvQd9eHhIyQTMqoYOwa/++/9JzQYYmN2Jlee9gp4O6m6eB6UXitDVNzUSYPMuLG3GfV/RpUQjdoJrDrDu0pqeG8BvLv54/uPeRUpvLh7TycuQjlnurKsRHt6/y30INGpog3NehvR8+IWePf+Xf/7TN+oLB23rO5dQ9YW8/uL93579Ndtr2cu7Dvjm8e9O/wLphRFmD3MWxl8KUuQS9M3j3z29H5buwvqMv7Cnz0J0NwQXNgFv792xOwdbcIb+jt2cctBNK2My/sbRzn0op8gc4BHbwHukF4Z6/njR/TXx4xh/4aA+O8Gp0CXojvRCRtHqa/ylOPmXoIcOwW8+fez+Wuh4oAJ//vizY//pzcXj9q/RH1994bQ+Q3D+AAOrqeNU5z3qS6EEGNo1rsehxl/1DcgZWD0JMADM7+wqdIgATz8Xmvr8zjb/cP/p+nL718E5de/M591/7DkNG3+hv9MNDhFgYDVTLkBSXxjqRIOt1BPX7z59/KXVkQlO9PLAYxgmfDRgBAGGSnxxfXX6F1R5CjTROAOrP0vQUL+JV/0CUxxbhfZWhdCsQk/0xfXVf+69V/rlqIcBl2VzfCHdpjIrizIBOxEaIrv9+fe5D+EnlojliTbDEkzAULn+J099+U9/tuSB5Hc2sZura3MwqwkU4OcfL9zhGaKJNvuO03+63f5KJWZpgQIMjHb2FOhxWqvvwf9LiVlIlD1gILu93Eau75r7tfaGWYgAAwdErm9/m6vrWfKpwSxBgIF9RdT3dBTnSu9yHxDsAQM/KiK9W+sX0WnSzCjWBOxqYCA4czBziRVggPgsRx/jRtCDCDDAGDJMT39/9VcH/70AA4ynwYwmwACTaDDjhAuw87DY8zs3KCU8DWYEG+YAM/CgQw46tgGcBBhgaW4rzUHhlqCTVWigUnWvVLsG6akT42+KGWDY9UtvyM5Z6FFILKHuBrPrdH2TJWiAle022KJ0y0zAANkYiFsmwFA268+lq6PBNoD3nF1/TpagoWjqWwenSbcpaICff7x44/YL5XjxH+d8C//6f3sNolF707Ae1y1ogIlj3rjG/B0LZfytXtdjGS5On/XnlNLFr0POmcbfeVWQtH989z73IUShu22KnGEbwHsEOKUqwgPQCdtgAd7Vs76p4iVo6QUqs7m6Dthg9R0t4hdu4vgrvQDEV9t1wOoLQBFqCzAA5NJ/AzgFXIKesv5s/AUYqs+tuB7e/fsf/+HifsGjaUm4AI+mvgCDjLwL5uPNtsHOwJoi1tdu3PgrvQCDDE3v5vr/pr05mMliBXgo6QUYZMqzH37M8M4QzGiBTsJy9yuARc3y5KUuw+nxZvqHqsygM7BSnACPqK/xF2hKnLtw/LHBTBMlwACcEKe+HQ3eM3T8TQIMABONqG8KEmC7vwBLm2UDmBmFCPAINoCBdkRbf+64CHii/AE2/gJQrnHrzyl7gN15A2AFS6w/f/3D57N/zKbkn4ABoFCjx99U4p2wjL8A/Tn3KiwTMEC11HdRU8bflH0Cfv7xYtA2sPEXoA/pja+kCVh9AfpQ3yLkD/DzjxfPP16c/WXqC0AcE9efU/Yl6K2uwXvL0aILMJTxtxRRAtx5/vHi315f5T4KAFhcoADfX/z10395e+WZzw359v3+E0b9AEDM+1Ay3YIB3g3qzeN/O9jXs56+IicvyjU6+I3u/r1vN4T19Q+ff/XZd7mPolQXv558J+ZxZV2CV+qyHIvuQb65tGnE+LvyHnCbAZ5+BlYaMQHHye1TFjDDGtTaYx/BdxOoSa8AR47uad3rvhfuvKbXF+jJKdArmGX8TWcDXG56d+0GQIyXs3RovZ2iNU6/Cmiu+qaUPqkjsf05q+ugvQXeyDPr9th814CiBboMKaPGX9O7Tz9ydA8yEANFy38rylC+fX9TXIcmau3zBRhtxvXnZAI+qNaBuMrWOjsaKJQAn7JXrFAv9FXWdBxr0UCJLEEPEGeBOshhAMtxCnT1BHiw7BlW34N8WcBFwIuadwM4WYIeLcvqtMacZi2aahh/o5m9vkmA5zIljT2Dob49OS2L0qlvIyxB59dnTVt9B/HlAuIzAUdxbPlUS4A+bAAvZ4n15yTA0cjtXOwHA8FZgqZm3tAAYQkwAGQgwFTOEAzEJMDUL/u9U6A/1yC1Q4BphQYDIyx0CnQSYADIwmVINMRNsthd4K3pwtmaPpd2mIBpi4Xolu1trz68f2fDlYwEmOZocJuOtVaDOWG5DeAkwLRJg9llFOagReubBJhmaXBT+vS13AbbAC6UANMuDWZPuQ1mdkuPv0mAaZwGUzrjb7lchgTwo+0QLGwszQQMcEARZ2Z5l1A0AaZ1VqE5IX6D83p4/+5XD7/PfRTzW2EDOAkwJA2u3cSIrtngQntfZYNXIMCQkgZz0jrL0YXWtz5f/OYX6/xGAgzUbMaqCWQLVqtvEmDYMgTXZ/ZkLtfgEute4jH38eLu2Tq/0Sd3L//w6hsZBiqxdBUe3r9r6tzjnl/PXz38/rebGq5rXa2+qbsO+O7lH1JKMgyUbp2ZrPprhWsdbaP5MbpdhgEKtX42irhWeKjRn5FzoYf6ydSrwTTONjAj1NHg7s1EHZ/LaOtc/rtVw5I9QF7BN4Ybz2pY+/u+hmCAEcKOjzGPinTwMiQNBkoUYQZds3Z9Pl/17W/l9ed0bAnatUlA9Z4GTK5Y09E9YA2mNbdX97kPgcy2SZ5S4jgXKXk/0d8Xv/nFi7u1f1OJhZTUtz2n6zhLOxfdFc5e98qsef+NLQEGOGCuwuUaQ42//a2/+9sRYDD+NqdnXAttcNjzsdnjOmCgHpur62PtGVfT7v+a3rPuIyy6biy6xTk1AbseCajA5up6YvkCjsJ7H0p9R8u1/pxMwEBlFpoy5xqFDxr3YYPffouzzgTYxUgAW9OvU5q3mhkH393Potz5O8vVR1viCjDY9GXtTqEnTO197kUP4q9fvc31W58PsJ1ggBl1xS00vanw3O764je/yHsAJmCAkUbPwYWmNx2v7+bq+rcbJxUNI8DgMcBMUs1EeNpcq+7RZLkHVscbFkgppW/f37gdB6OduP64LFUmNiwTMPyROZhmddPtxPp+/cPncx3PCrJvAKeeE7CLkQDqYMbdlXH9OfVfgtZggGPCrj/L7Ql565ssQcMuq9CMELO+Wc6ZKmsVOrsBAXZBMC3QYPoLey2vwbcIzoIGOCVmYk/LeJvor3/4/KvPvsvyWxdnWIDtBNMClyQVpMQ6kl2EU6CTCRgoke4yWpD6JgEGCqK7/XVfqywL0VahexJgOMAqdByiO8XeV8/JWXHG3zQiwLaBgf7kM5Sn346Fkvyrh997NsNZvkDAIqS3CAutVG+uroM3OPtdONK4G3G4IJgWuCCYdni3lEXctycArOZgg6dMxgH3m0NtAKfRt6I0BNMCQ/BoJqo6dLf6Gn3Dr7B3poyw/pzcCxqAPsLed3OoIPVNlqAB6G+186hn160/x6lvmhJg1yMBHNMnS3UMlEUkOdrub8cEDDCbQe3Z/uI6SryV8SZcZZkUYEMw0DiZOabL8ENKca4GDrX+nKZPwBpM3dyTsm5l5bPQiflXD79PWTMcc/05zbIErcHArnXyUFY7T9tcXZ/+ou19sgc/9+BVDn5jrCx8OYDZrNCAmrrbU89PuftlwTOcUbT15+Q6YGAuS7/0b66u1Xf2X7+mbi06i4D1TXNNwFahoWWj0xu5FuXa+6qGmoktRO/yhQAmGff6Lr2r2f1SR4jxyg0OewZWsgQN0I4g73syrkWHMluAPZ4BGhRhomKQprbSI4+/yRI0QIMKvaS4MgIM0K5cJV5hJzj4+Jvm3QO2Cg300c4SaEGaWpoOwklYwHhesivTZbiyb2vMi4DT7EvQLggGqMCJBs+1WL3oKnT89edkAgZgkBnn4xWuRwo7/iYBBmCoUhocub5pibOgrUIDx1S2ubi0yF+uaDfY2lXE+nNa6DIkDYY2RQ4Gy3n6fc+Y5FLqm5ZbgnZJErRGfUeLNkFOF+FU6uDrz8keMDBR9zqb/dWWgCJkODJ3wgKm8iI70ebqur4heOv0j8dXn3034+9V0PpzMgEDkNHXP3y+xIeNv/6cFg2wbWAAzlqowfGZgAGoShHjbxJgALKbcQgupb5JgAGowxe/+UVB9U1LB9g2MBX49v1N7kMAKrT4BKzBAJw2/WKksi5A6liCBiCnGev7+tXbqUezIjfiACCPee/CUZw1JmCr0JTONjDMbq76lrj43LEEDb1oMMxoodm3rLOgLUEDsJ7lbv5cVn3TagH2hGAAztb34B05jv1f2/oWl96OCRggs4ofhbRr9Ox7sMr37y6mHU5+602lTsUCaFaf+o67IWWh429a+SQsDQZoUOOXGx1jXxaABfWsb4MPJVw7wIZgAEgmYACWY/H5BGdBAzC/QeltcP05mYAB8qryGqSl61vBNUgpS4BtAwOQUvr6h8/bnH07eSZgDQZINY6/X332ndOee7IEDcAM+qc35ajvwUcF531+cLYAG4IBTthcXec+hAHin3IV8IZZJmAAJsl4rdHErOatsgAD5FHHBvDQ+k4ff+s4BTrlDbBVaICirV/fmrgRB/Rye3Wf+xAglsinXBUh8xK0IZgiqC/syVXfatafU/YAJw0GKI3Zdxb5AwzBGX9ZQh1nYJ02e31rGn9TkAAbgoGmnK1v5IuAPeBoLk7CAljQw/t325pWMPVafJ6RAMMp1p+Zoivu0O6GHX/Vd14hlqAB6lPBvDuO+vYUJcC2gQnI+Mto9dU3+zOOKjsDK1mChmPUl9HarO/6g2/A5ysMIsAAc6qsvs55Xk6UJehkFRog8BlYpy09/j5dfy59/E2hApw0mDCsP0MKsO97TAX1TdECnLzwAcTQc993hfrujb911DcFDHDSYKBY1WwAh936raa+KeBJWJebzYeHh1k+1NOQf/v+ZpaPTN28BaRxYVeeKxNxAp7FwdfQ26t7r62c5icE+litvvVd/rtVbYBP8ArLMX42IOYlv1WKGODLzWbp06GNwsASCr2IaCvs1m+Vwu0Bz6JnXLe/zN4wyfhLAEX02/g7l4gTcGfNa4K98gLZZa9v/PG3plOgU8wAv3719vWrt2ndLmpw4/wAkNHm6jpvfb/67DtnPq8vYoBf3D3bvs3RYKBuBQ2+69d39xToysbfVOse8Gi7DbYx3A7vvchCehsXOsCjb8rx7fub6S+pt1f3GgwMtbm6Png/rOy53RO/vhVfAdyJuAQdh8EImIX6DlV9fVPwCbiTdxK1KA0M1eV2OwerLwdFD3C3Ct1VMHv/Dg7E2Y8KiCladzvqG4cl6KncVAsoRSn1fbr+XN8p0KmIAF9uNt3fRO5c5GMDSOXUtx3Rl6ALstdgS9NAHAXVt5HxNxUxAaedIbi/7P2zNA0E0f8uV9nr25QyArw1KGnZGwyQXVn3mNwbf3dvjFifwgI8VPYGG4KBjOI/X+GEitPbKSbAl5tNtxBdXNKsRQNZFLTv22nh5hu7ignwrhKTVtwBA+Xq/3SjFKa+e6off1OhAR4k+yr0lgYDKxi07Byzvo0oLMC7p0P375kGA42oo74tjL+puACPpsFA9Yqub2sbwKn0ABd6VZIGA7MbtOkbrb67Ghl/U+kBTsU2GGBGRV9ulJocf1MFAU7DGxwhw4ZgIIvIs29qafxNdQQ4De9ZkAbLcAS+C5Su9DtNtjn+pmoCnMpscPLqD0xT1p0m2VXe05AuN5sPDw8H/9Pt1f2grA5t8EKxHHrYAIOUUt+m1p9TTRNwZ9GBcrlMmoOBEfqMv8Hr2+z6cyo0wCOeThifBgODVFDfxpW3BN2ZcSE6jr0GF/pZACuor76trT+nQifgs5abJteMopkYOKi++rbp4uXLl7mPYbxjQ/DW7L3MFUXT8EK8y6E4NdV3uwHc4Pibap2At6q51raaTyQUX1KKU1N9KXUPuHP/7uLm+jH3UVAk9aUs9V3v2/j4myqYgPucwj7XS232l2xzMLSpvvqSSg9w//dNE9MVqnxxjqRcvoZUJuxtJjmh7CXolNKLu2cf0plTsba6l90gN8CaotxLrbIL+N2EE2rd9G35/htbZU/AnaH35ajjJbiOzwKYooLBt9kN4FTBBDzOiVteFBQ2c/AgBX1noVP6U345rdEA7yn3pVmDzyr3m0vjzta39NmXGpagGycwJ/jiUCj1bUElE7ALgnlKfSnU6fpKbzVMwDVQmqd8TahSZfVt+QysVFOAGz+pXW+gDsfG3wpOeN7qXq4br2+qJsC+kezydoTKVJPe1PywtKuSAANUwHVHTRHgehj7oGgH61vTynPyAIafqifAL+6eDb0lFkBkNaWXp+oJMEC5no6/6lu9Sq4DBqhGrel1+tWe2gJ8udl8eOj7cKT6RLsz5elt6VCHCnl9/cPnX332Xa3p3WMDuFNbgFPzDQ6izxlh8gy71Lc1Fe4Bv371tuWFjmrOhb69uq/mcwHu3128uHumvrsqnIBf3D17/ept7qPIKftC9Izh7D6UaZjs9n4IvTtkugon4OSSpOpeHUzDZPTt+5unbwG9KRyk5VXJEyqcgKlV9sme1pz+edv+V+8OGUeAq1Vlrnp+Ut++v/GayCBT/rAo8WnG32PqXILu+K5buYWzDq4wj/5Qs3wcGlFzgJMGAyfNnkwNpr/KA5w0uNVlMa+DnDbj4LvaRy6aC5Ceqj/ApLoa3P9z8SLIU10dV/jZkOGOEegEAW5FTfvBGsw46/88+AnsGH8PqjnA22+5t2BbDTYYOrlaaBTmmMovQ3JXLFyS1Lgg8WvzUiXDz2k1T8Cdbg72c7C1wp//dV7yBi1EB3kVZjWr7fUOFfCQFuJV96z6A7zlp2GrqffgNCh45IIf3uxsAB/TRIB9+59ausEBX2ICHhJLKOIbHXM6n5GBp4/K94D33L+7uLl+zH0Ucc376KGAm69PP7VoR8hEZVXt4NH6mWxHKwHuhmAnZO06dl/lKm8ifYxnzFWjmh/ayk7XsgB5QhNL0BxT+p/w2Y+/mhdxKlDuT+N2/Vl9T2srwC/untmZ2HOwYaWHeYrqN+eqVOu3rOifRvU9q60Ac1CX273oztLgFV47FnqvEPYiFhpU1o9iN+Sobx8tBtgQ/JQ5+JiCXvioWxEZ9uo6SIsBTn5KeqvpDtIAoTQaYAaZ0uByV6EpRfy5cEZNfbLVE2B6mTIKl/6SUfrxU5mwa9FOfh6quQB7RNIUYRtsCG5WzBStoNlPvCbNBZiJwu4KxzwqgGNaDLAhuFYLNVjaw2p8Coy5Fm39ub8WA8x0YZsU9sBgIaEarL6DNBpgQ3DFukXyuUqs6MQXocFeS0doNMBMF79M048w/ufYsgjVoaO+4wiwH52VFPdyqb4UJOZ+MKe18jhCljDv84OX8DSinsBKxbI8hNvlv6O1G+AXd888HrhBWluHyG/78srSYMZpegna+7VZ9P/T7kWTWfhBOs3XpxRNBzj9/wbbBl6NlwYm8iPUx/Z5mqt9ucwzI7S7BA3Qgm2Dl1iavtxsLu9m/6itaH0C3jIEr8YEA1n4oxeNAAO0wtVKoQiwu2JBMcRjFnNl+HKzmf5BWibAzGDo3pKXUUbwYzMvX8/sBBgogFpEc//uwq0UJhLgn7AKPZohGIrjj2FeAkw2/vBDduP+GJpVZiHAKbmEfCZugMdCvFdblC9vLgK8zzu7NfmTDzRLgMlMg6FQ1g4nEuA/2v1JMgSvzM0BgAYJ8I+8m5vONjC04MXdMy+Y0wnwYYbg9RmCgaYI8E94T5eXtWjIYtCfO/PJXAQYCM17snX4Oq9PgAFIqV+Djb8zEuCj/JwBsBwB3mcbGOKwLrqy019wY8m8BPgADQY4xivkXD7JfQCh3b+7uLl+zH0Uhbm9uje1QH0uN5vLu9wHURcT8GHe4k3hdhxQroNXA15uNlkOpm4CfIY9j3Fur+5lGMrlovwVCDALkmGAYwT4PEPwRIMaLNhsmcCCsP68EAFmDT2zqr5sqW8El5uN+i7HWdCsRFwBdpmAj/KEYMjF+EsLTMB9uSYYlqO4Mb365k/uXv4h91FUS4BPeXH37PWrt9t/1GBq0rN5y+0diC6NswR9hjty0LjuetB5Y+ka04K8+kYmluIrC/QyVzKlFzqWoIexCk0dxlVw+3+NWJfWXdgjwOft7QQDagrTWYIGgAwEuBfXBAMwLwHuy+nQQJucCL0QX9YxDMGUzp1BITsBHsAQDDTIzbAWIsAjGYIBmEKAASADAYZG3V7d2wmGjNyIA5p2sMFP77Nxe3Xf/ctjzV7i1hx7v5e7f1CZi5cvX+Y+hsLs3hXLbSnhhCnJfFp6Ac7FSVgLMQEP5s6U0FPP8brn/wiVEWBgVaPjul0Ghzo4CWuM7QXBLkaC1agvlRHgqTQYgBEEeCR3xQJgCgEGgAwEeDw7wQCMJsAAkIEAT2InGKibu3AsR4Cn6hpsFRoW5Rok6iPAs9FgoDLG30UJMABkIMBzMgQD0JMAz8CpWAAMJcAAkIEAA0AGAjwz28AA9CHA87ANDMAgAjwbt4YGauIi4KUJ8JzMwbAEt8GiSgK8CEMwAKcJ8MwMwQD0IcBLMQTDLKw/U6tPch8AwGHSS90EeH4v7p69fvU291FAUHtZvb26P/troEoCDCzrdE21lmYJ8CK6Ifj+3cXN9WPuY4H1qCn0J8BLsRBNO3QXRnAWNDCJ+sI4ArwsFyNRN/WF0SxBL85OMFWSXpjIBAwAGQgwAPs8CmkFArwgDygE4BgBBuAnjL/rcBLWslwNDKc9vRWl07vyUt/VmIAX1y1EW4WGXbdX991fx/7T+ocEKxPgNXhIMGz17KsGUz1L0OtxQTDNUtNSWH9ekwCvSoNphOLCWQIMzEx9S2T2XZ894JXYBgZglwkYGMOYWxPjbxYm4PW4HgmALQFelYVoIBrjby4CDNAu9c1IgDOwCg1EoL55CXAeGgzQOAFem21gAJIAZ2QIBjKy/pydAANABgKcgVVoOMtTgameAOfhphwAjRNgIBzjLy0Q4GwMwXCQ+tIIAQaADAQ4J0MwkMurb7z+Z+YbAAAZCHBmLkkCaJMA5/fi7tnlZpP7KABYlQADgTgFmnYIMABkIMBROBcaoCkCHIgGA7RDgIFAbq/ucx8CrESAYzEEAzRCgKNwQTBAUwQYADIQ4EAMwZBsA9MMAY7lxd0z28AALRDgiDSYxhmCaYEAA0AGAhyOhwRDSun26r77K/eBwFI+yX0AAKcMbbDHOVAKE3BEhmAYzdxMKS5evnyZ+xg46sPDQ+5DgLIZiI+5e/mH3IfQOkvQQM0OTsOqTAQCHNrlZmMIhtntVlmMycUeMNA0G8bkIsDRORULlua8LbKwBA2Q0hyjsNVsBjEBF8AQDEUwSTOIAAPMSYPpSYCj84xCKI4G04cAF0CDAeojwGW43GxyHwIwQPAh2G2wIhBggLaobxACDAAZCDAAZCDAAA2x/hyHAANABgJcBs9EAqiMAAPML+Z9oa0/hyLAADOLWV+iEWCAJhh/oxHgArx+9Tb3IQB9xRx/1TcgAY5OfYGJ1DcmAS7AzfVj7kMAeok5/hLTJ7kPAKASQeq79xwIj3IJS4ABZpClvmefuaS+kQkwwFTr1zf44w7pQ4ABxos5+FIEAQYYaeX66m5lBDg6p0BDTGvWd3R6Pzw82AYOy2VIAIMVUd+OR7mEZQIGGKCg9BKcAIfmrSvEsVx6lw6theiYBBjgvIXqu9qMq8EB2QMGOKP0+hKTAAOcUk19Pzw8eLhLKAIclw1gyC7I7Z3n4rLGUAQYoCHe2cchwACQgbOgAdrS7QS/uHuW+0Bq1melQYAB1vaz9DeblO0U6Jvrx/t3F7l+9/qMXtUXYIBV/Sz9Te5DSDfXj6/fvTUEH7TaNrkAA5DZweadvnNIBWeTCTBAi26uH1PWW2OdLWgFiT3NWdAAjcpVuA8PD9XHtQ8TMAAr0d1dJmCAdq1ZRPXdI8AAR81+x+YIp0AThADH5dlhEIFnFs3C+PuUAAM0bYVHJKnvQQIMcEb1Q/CiDVbfYwQ4Lj+1EEfFDV70GYVex05wGRJAL12Dd58QfLDKhT5C+PWree5MuTdM3+jvcQIMMMDZUXj3F5QS4+7xDBOfkrTCXnJlLEEDLOX26r77K/eBDDCuowf/r0UXtysgwACQwb8Cfzi08Mu2voAAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "array = np.array(image)[:, :, ::-1] # BGR\n", + "segmentation_logits = inference_segmentor(model, array)[0]\n", + "segmented_image = render_segmentation(segmentation_logits, \"ade20k\")\n", + "display(segmented_image)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.17" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/engine/pose_estimation/dinov2/pyproject.toml b/engine/pose_estimation/dinov2/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..da67abd8ceabe6d427a96e5d9d4f04b25aebcd32 --- /dev/null +++ b/engine/pose_estimation/dinov2/pyproject.toml @@ -0,0 +1,29 @@ +[tool.black] +line-length = 120 + +[tool.pylint.master] +persistent = false +score = false + +[tool.pylint.messages_control] +disable = "all" +enable = [ + "miscellaneous", + "similarities", +] + +[tool.pylint.similarities] +ignore-comments = true +ignore-docstrings = true +ignore-imports = true +min-similarity-lines = 8 + +[tool.pylint.reports] +reports = false + +[tool.pylint.miscellaneous] +notes = [ + "FIXME", + "XXX", + "TODO", +] diff --git a/engine/pose_estimation/dinov2/requirements-dev.txt b/engine/pose_estimation/dinov2/requirements-dev.txt new file mode 100644 index 0000000000000000000000000000000000000000..5cad34c34cde3a182b616d68b168588827eb9b7c --- /dev/null +++ b/engine/pose_estimation/dinov2/requirements-dev.txt @@ -0,0 +1,3 @@ +black==22.6.0 +flake8==5.0.4 +pylint==2.15.0 diff --git a/engine/pose_estimation/dinov2/requirements-extras.txt b/engine/pose_estimation/dinov2/requirements-extras.txt new file mode 100644 index 0000000000000000000000000000000000000000..ac75fb3eace10f58fa6b5bc373f24358394141e5 --- /dev/null +++ b/engine/pose_estimation/dinov2/requirements-extras.txt @@ -0,0 +1,2 @@ +mmcv-full==1.5.0 +mmsegmentation==0.27.0 diff --git a/engine/pose_estimation/dinov2/requirements.txt b/engine/pose_estimation/dinov2/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..04c159c443b89330ff3c84257c41b011f9791257 --- /dev/null +++ b/engine/pose_estimation/dinov2/requirements.txt @@ -0,0 +1,11 @@ +--extra-index-url https://download.pytorch.org/whl/cu117 +torch==2.0.0 +torchvision==0.15.0 +omegaconf +torchmetrics==0.10.3 +fvcore +iopath +xformers==0.0.18 +submitit +--extra-index-url https://pypi.nvidia.com +cuml-cu11 diff --git a/engine/pose_estimation/dinov2/scripts/lint.sh b/engine/pose_estimation/dinov2/scripts/lint.sh new file mode 100644 index 0000000000000000000000000000000000000000..b91acaf762c4be3a0c9d2a162210bfebfaacba08 --- /dev/null +++ b/engine/pose_estimation/dinov2/scripts/lint.sh @@ -0,0 +1,28 @@ +#!/bin/sh + +if [ -n "$1" ]; then + echo "linting \"$1\"" +fi + +echo "running black" +if [ -n "$1" ]; then + black "$1" +else + black dinov2 +fi + +echo "running flake8" +if [ -n "$1" ]; then + flake8 "$1" +else + flake8 +fi + +echo "running pylint" +if [ -n "$1" ]; then + pylint "$1" +else + pylint dinov2 +fi + +exit 0 diff --git a/engine/pose_estimation/dinov2/setup.cfg b/engine/pose_estimation/dinov2/setup.cfg new file mode 100644 index 0000000000000000000000000000000000000000..a2b19d6b2045143e056628b09fb9a5db357a6e07 --- /dev/null +++ b/engine/pose_estimation/dinov2/setup.cfg @@ -0,0 +1,8 @@ +[flake8] +max-line-length = 120 +ignore = E203,E501,W503 +per-file-ignores = + __init__.py:F401 + hubconf.py:F401 +exclude = + venv diff --git a/engine/pose_estimation/dinov2/setup.py b/engine/pose_estimation/dinov2/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..daa9d6322fc3a451d2a07038ffdb9eea709e96bf --- /dev/null +++ b/engine/pose_estimation/dinov2/setup.py @@ -0,0 +1,88 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the Apache License, Version 2.0 +# found in the LICENSE file in the root directory of this source tree. + +from pathlib import Path +import re +from typing import List, Tuple + +from setuptools import setup, find_packages + + +NAME = "dinov2" +DESCRIPTION = "PyTorch code and models for the DINOv2 self-supervised learning method." + +URL = "https://github.com/facebookresearch/dinov2" +AUTHOR = "FAIR" +REQUIRES_PYTHON = ">=3.9.0" +HERE = Path(__file__).parent + + +try: + with open(HERE / "README.md", encoding="utf-8") as f: + long_description = "\n" + f.read() +except FileNotFoundError: + long_description = DESCRIPTION + + +def get_requirements(path: str = HERE / "requirements.txt") -> Tuple[List[str], List[str]]: + requirements = [] + extra_indices = [] + with open(path) as f: + for line in f.readlines(): + line = line.rstrip("\r\n") + if line.startswith("--extra-index-url "): + extra_indices.append(line[18:]) + continue + requirements.append(line) + return requirements, extra_indices + + +def get_package_version() -> str: + with open(HERE / "dinov2/__init__.py") as f: + result = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", f.read(), re.M) + if result: + return result.group(1) + raise RuntimeError("Can't get package version") + + +requirements, extra_indices = get_requirements() +version = get_package_version() +dev_requirements, _ = get_requirements(HERE / "requirements-dev.txt") +extras_requirements, _ = get_requirements(HERE / "requirements-extras.txt") + + +setup( + name=NAME, + version=version, + description=DESCRIPTION, + long_description=long_description, + long_description_content_type="text/markdown", + author=AUTHOR, + python_requires=REQUIRES_PYTHON, + url=URL, + packages=find_packages(), + package_data={ + "": ["*.yaml"], + }, + install_requires=requirements, + extras_require={ + "dev": dev_requirements, + "extras": extras_requirements, + }, + dependency_links=extra_indices, + install_package_data=True, + license="Apache", + license_files=("LICENSE",), + classifiers=[ + # Trove classifiers: https://github.com/pypa/trove-classifiers/blob/main/src/trove_classifiers/__init__.py + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.9", + "Topic :: Scientific/Engineering :: Artificial Intelligence", + "Topic :: Software Development :: Libraries :: Python Modules", + ], +) diff --git a/engine/pose_estimation/install_runtime.sh b/engine/pose_estimation/install_runtime.sh new file mode 100644 index 0000000000000000000000000000000000000000..22d1a32d044636ec7efd653942519604e8eaad93 --- /dev/null +++ b/engine/pose_estimation/install_runtime.sh @@ -0,0 +1,3 @@ +pip3 install -U xformers==0.0.22.post3+cu118 --index-url https://download.pytorch.org/whl/cu118 + +pip3 install -v -e third-party/ViTPose \ No newline at end of file diff --git a/engine/pose_estimation/model.py b/engine/pose_estimation/model.py new file mode 100644 index 0000000000000000000000000000000000000000..c859e99718ffc21043340357751a57a5f4061ace --- /dev/null +++ b/engine/pose_estimation/model.py @@ -0,0 +1,810 @@ +# Multi-HMR +# Copyright (c) 2024-present NAVER Corp. +# CC BY-NC-SA 4.0 license + +import os +import sys + +sys.path.append("./") +sys.path.append("./engine") +sys.path.append("./engine/pose_estimation") +import copy + +import einops +import numpy as np +import roma +import torch +import torch.nn as nn +from blocks import ( + Dinov2Backbone, + FourierPositionEncoding, + SMPL_Layer, + TransformerDecoder, +) +from pose_utils import ( + inverse_perspective_projection, + pad_to_max, + rebatch, + rot6d_to_rotmat, + undo_focal_length_normalization, + undo_log_depth, + unpatch, +) +from torch import nn + + +def unravel_index(index, shape): + out = [] + for dim in reversed(shape): + out.append(index % dim) + index = index // dim + return tuple(reversed(out)) + + +class Model(nn.Module): + """A ViT backbone followed by a "HPH" head (stack of cross attention layers with queries corresponding to detected humans.)""" + + def __init__( + self, + backbone="dinov2_vitb14", + pretrained_backbone=False, + img_size=896, + camera_embedding="geometric", # geometric encodes viewing directions with fourrier encoding + camera_embedding_num_bands=16, # increase the size of the camera embedding + camera_embedding_max_resolution=64, # does not increase the size of the camera embedding + nearness=True, # regress log(1/z) + xat_depth=2, # number of cross attention block (SA, CA, MLP) in the HPH head. + xat_num_heads=8, # Number of attention heads + dict_smpl_layer=None, + person_center="head", + clip_dist=True, + num_betas=10, + smplx_dir=None, + *args, + **kwargs, + ): + super().__init__() + # Save options + self.img_size = img_size + self.nearness = nearness + self.clip_dist = (clip_dist,) + self.xat_depth = xat_depth + self.xat_num_heads = xat_num_heads + self.num_betas = num_betas + self.output_mesh = True + + # Setup backbone + self.backbone = Dinov2Backbone(backbone, pretrained=pretrained_backbone) + self.embed_dim = self.backbone.embed_dim + self.patch_size = self.backbone.patch_size + assert self.img_size % self.patch_size == 0, "Invalid img size" + + # Camera instrinsics + self.fovn = 60 + self.camera_embedding = camera_embedding + self.camera_embed_dim = 0 + if self.camera_embedding is not None: + if not self.camera_embedding == "geometric": + raise NotImplementedError( + "Only geometric camera embedding is implemented" + ) + self.camera = FourierPositionEncoding( + n=3, + num_bands=camera_embedding_num_bands, + max_resolution=camera_embedding_max_resolution, + ) + # import pdb + # pdb.set_trace() + self.camera_embed_dim = self.camera.channels + + # Heads - Detection + self.mlp_classif = regression_mlp( + [self.embed_dim, self.embed_dim, 1] + ) # bg or human + + # Heads - Human properties + self.mlp_offset = regression_mlp([self.embed_dim, self.embed_dim, 2]) # offset + + # SMPL Layers + self.nrot = 53 + dict_smpl_layer = { + "neutral": { + 10: SMPL_Layer( + smplx_dir, + type="smplx", + gender="neutral", + num_betas=10, + kid=False, + person_center=person_center, + ), + 11: SMPL_Layer( + smplx_dir, + type="smplx", + gender="neutral", + num_betas=11, + kid=False, + person_center=person_center, + ), + } + } + _moduleDict = [] + for k, _smpl_layer in dict_smpl_layer.items(): + for x, y in _smpl_layer.items(): + _moduleDict.append([f"{k}_{x}", copy.deepcopy(y)]) + self.smpl_layer = nn.ModuleDict(_moduleDict) + + self.x_attention_head = HPH( + num_body_joints=self.nrot - 1, # 23, + context_dim=self.embed_dim + self.camera_embed_dim, + dim=1024, + depth=self.xat_depth, + heads=self.xat_num_heads, + mlp_dim=1024, + dim_head=32, + dropout=0.0, + emb_dropout=0.0, + at_token_res=self.img_size // self.patch_size, + num_betas=self.num_betas, + smplx_dir=smplx_dir, + ) + + print(f"person center is {person_center}") + + # set whether do filter + def set_filter(self, apply_filter): + self.apply_filter = apply_filter + + def detection( + self, + z, + nms_kernel_size, + det_thresh, + N, + idx=None, + max_dist=None, + is_training=False, + ): + """Detection score on the entire low res image""" + scores = _sigmoid(self.mlp_classif(z)) # per token detection score. + # Restore Height and Width dimensions. + scores = unpatch( + scores, patch_size=1, c=scores.shape[2], img_size=int(np.sqrt(N)) + ) + pseudo_idx = idx + if not is_training: + if ( + nms_kernel_size > 1 + ): # Easy nms: supress adjacent high scores with max pooling. + scores = _nms(scores, kernel=nms_kernel_size) + _scores = torch.permute(scores, (0, 2, 3, 1)) + + # Binary decision (keep confident detections) + idx = apply_threshold(det_thresh, _scores) + if pseudo_idx is not None: + max_dist = 4 if max_dist is None else max_dist + mask = (torch.abs(idx[1] - pseudo_idx[1]) <= max_dist) & ( + torch.abs(idx[2] - pseudo_idx[2]) <= max_dist + ) + idx_num = torch.sum(mask) + if idx_num < 1: + top = torch.clamp( + pseudo_idx[1] - max_dist, min=0, max=_scores.shape[1] - 1 + ) + bottom = torch.clamp( + pseudo_idx[1] + max_dist, min=0, max=_scores.shape[1] + ) + left = torch.clamp( + pseudo_idx[2] - max_dist, min=0, max=_scores.shape[2] - 1 + ) + right = torch.clamp( + pseudo_idx[2] + max_dist, min=0, max=_scores.shape[2] + ) + + neigborhoods = _scores[:, top:bottom, left:right, :] + + idx = torch.argmax(neigborhoods) + try: + idx = unravel_index(idx, neigborhoods.shape) + except Exception as e: + print(pseudo_idx) + raise e + idx = ( + pseudo_idx[0], + idx[1] + pseudo_idx[1] - max_dist, + idx[2] + pseudo_idx[2] - max_dist, + pseudo_idx[3], + ) + + elif idx_num > 1: # TODO + + idx = (idx[0][mask], idx[1][mask], idx[2][mask], idx[3][mask]) + else: + idx = (idx[0][mask], idx[1][mask], idx[2][mask], idx[3][mask]) + # elif bbox is not None: + # mask = (idx[1] >= bbox[1]) & (idx[1] >= bbox[3]) & (idx[2] >= bbox[0]) & (idx[2] <= bbox[2]) + # idx_num = torch.sum(mask) + # if idx_num < 1: + # top = torch.clamp(bbox[1], min=0, max=_scores.shape[1]-1) + # bottom = torch.clamp(bbox[3], min=0, max=_scores.shape[1]-1) + # left = torch.clamp(bbox[0], min=0, max=_scores.shape[2]-1) + # right = torch.clamp(bbox[2], min=0, max=_scores.shape[2]-1) + + # neigborhoods = _scores[:, top:bottom, left:right, :] + # idx = torch.argmax(neigborhoods) + # try: + # idx = unravel_index(idx, neigborhoods.shape) + # except Exception as e: + # print(pseudo_idx) + # raise e + + # idx = (idx[0], idx[1] + top, idx[2] + left, idx[3]) + # else: + # idx = (idx[0][mask], idx[1][mask], idx[2][mask], idx[3][mask]) + else: + assert idx is not None # training time + # Scores + scores_detected = scores[ + idx[0], idx[3], idx[1], idx[2] + ] # scores of the detected humans only + + scores = torch.permute(scores, (0, 2, 3, 1)) + return scores, scores_detected, idx + + def embedd_camera(self, K, z): + """Embed viewing directions using fourrier encoding.""" + bs = z.shape[0] + _h, _w = list(z.shape[-2:]) + points = ( + torch.stack( + [ + torch.arange(0, _h, 1).reshape(-1, 1).repeat(1, _w), + torch.arange(0, _w, 1).reshape(1, -1).repeat(_h, 1), + ], + -1, + ) + .to(z.device) + .float() + ) # [h,w,2] + points = ( + points * self.patch_size + self.patch_size // 2 + ) # move to pixel space - we give the pixel center of each token + points = points.reshape(1, -1, 2).repeat(bs, 1, 1) # (bs, N, 2): 2D points + distance = torch.ones(bs, points.shape[1], 1).to( + K.device + ) # (bs, N, 1): distance in the 3D world + rays = inverse_perspective_projection(points, K, distance) # (bs, N, 3) + rays_embeddings = self.camera(pos=rays) + + # Repeat for each element of the batch + z_K = rays_embeddings.reshape(bs, _h, _w, self.camera_embed_dim) # [bs,h,w,D] + return z_K + + def to_euclidean_dist(self, x, dist, _K): + # Focal length normalization + focal = _K[:, [0], [0]] + dist = undo_focal_length_normalization( + dist, focal, fovn=self.fovn, img_size=x.shape[-1] + ) + # log space + if self.nearness: + dist = undo_log_depth(dist) + + # Clamping + if self.clip_dist: + dist = torch.clamp(dist, 0, 50) + + return dist + + def get_smpl(self): + return self.smpl_layer[f"neutral_{self.num_betas}"] + + def generate_meshes(self, out): + """ + Generates meshes for each person detected in the image. + + This function processes the output of the detection model, which includes rotation vectors, + shapes, locations, distances, expressions, and other information related to SMPL-X parameters. + + Parameters: + out (dict): A dictionary containing detection results and SMPL-X related parameters. + + Returns: + list: A list of dictionaries, each containing information about a detected person's mesh. + """ + # Neutral + persons = [] + rotvec, shape, loc, dist, expression, K_det = ( + out["rotvec"], + out["shape"], + out["loc"], + out["dist"], + out["expression"], + out["K_det"], + ) + scores_det = out["scores_det"] + idx = out["idx"] + smpl_out = self.smpl_layer[f"neutral_{self.num_betas}"]( + rotvec, shape, loc, dist, None, K=K_det, expression=expression + ) + out.update(smpl_out) + + for i in range(idx[0].shape[0]): + person = { + # Detection + "scores": scores_det[i], # detection scores + "loc": out["loc"][i], # 2d pixel location of the primary keypoints + # SMPL-X params + "transl": out["transl"][i], # from the primary keypoint i.e. the head + "transl_pelvis": out["transl_pelvis"][i], # of the pelvis joint + "rotvec": out["rotvec"][i], + "expression": out["expression"][i], + "shape": out["shape"][i], + # SMPL-X meshs + "v3d": out["v3d"][i], + "j3d": out["j3d"][i], + "j2d": out["j2d"][i], + } + persons.append(person) + + return persons + + def forward( + self, + x, + idx=None, + max_dist=None, + det_thresh=0.3, + nms_kernel_size=3, + K=None, + is_training=False, + *args, + **kwargs, + ): + """ + Forward pass of the model and compute the loss according to the groundtruth + Args: + - x: RGB image - [bs,3,224,224] + - idx: GT location of persons - tuple of 3 tensor of shape [p] + - idx_j2d: GT location of 2d-kpts for each detected humans - tensor of shape [bs',14,2] - location in pixel space + Return: + - y: [bs,D,16,16] + """ + persons = [] + out = {} + + # Feature extraction + z = self.backbone(x) + B, N, C = z.size() # [bs,256,768] + + # Detection + scores, scores_det, idx = self.detection( + z, + nms_kernel_size=nms_kernel_size, + det_thresh=det_thresh, + N=N, + idx=idx, + max_dist=max_dist, + is_training=is_training, + ) + if torch.any(scores_det < 0.1): + return persons + if len(idx[1]) == 0 and not is_training: + # no humans detected in the frame + return persons + + # Map of Dense Feature + z = unpatch( + z, patch_size=1, c=z.shape[2], img_size=int(np.sqrt(N)) + ) # [bs,D,16,16] + z_all = z + + # Extract the 'central' features + z = torch.reshape( + z, (z.shape[0], 1, z.shape[1] // 1, z.shape[2], z.shape[3]) + ) # [bs,stack_K,D,16,16] + z_central = z[idx[0], idx[3], :, idx[1], idx[2]] # dense vectors + + # 2D offset regression + offset = self.mlp_offset(z_central) + + # Camera instrincs + K_det = K[idx[0]] # cameras for detected person + z_K = self.embedd_camera(K, z) # Embed viewing directions. + z_central = torch.cat( + [z_central, z_K[idx[0], idx[1], idx[2]]], 1 + ) # Add to query tokens. + z_all = torch.cat( + [z_all, z_K.permute(0, 3, 1, 2)], 1 + ) # for the cross-attention only + z = torch.cat([z, z_K.permute(0, 3, 1, 2).unsqueeze(1)], 2) + + # Distance for estimating the 3D location in 3D space + loc = torch.stack([idx[2], idx[1]]).permute( + 1, 0 + ) # Moving from higher resolution the location of the pelvis + loc = (loc + 0.5 + offset) * self.patch_size + + # SMPL parameter regression + kv = z_all[ + idx[0] + ] # retrieving dense features associated to each central vector + pred_smpl_params, pred_cam = self.x_attention_head( + z_central, kv, idx_0=idx[0], idx_det=idx + ) + + # Get outputs from the SMPL layer. + shape = pred_smpl_params["betas"] + rotmat = torch.cat( + [pred_smpl_params["global_orient"], pred_smpl_params["body_pose"]], 1 + ) + expression = pred_smpl_params["expression"] + rotvec = roma.rotmat_to_rotvec(rotmat) + + # Distance + dist = pred_cam[:, 0][:, None] + out["dist_postprocessed"] = ( + dist # before applying any post-processing such as focal length normalization, inverse or log + ) + dist = self.to_euclidean_dist(x, dist, K_det) + + # Populate output dictionnary + out.update( + { + "scores": scores, + "offset": offset, + "dist": dist, + "expression": expression, + "rotmat": rotmat, + "shape": shape, + "rotvec": rotvec, + "loc": loc, + } + ) + + assert ( + rotvec.shape[0] == shape.shape[0] == loc.shape[0] == dist.shape[0] + ), "Incoherent shapes" + + if not self.output_mesh: + out.update( + { + "K_det": K_det, + "scores_det": scores_det, + "idx": idx, + } + ) + return out + + # Neutral + smpl_out = self.smpl_layer[f"neutral_{self.num_betas}"]( + rotvec, shape, loc, dist, None, K=K_det, expression=expression + ) + out.update(smpl_out) + + # Return + if is_training: + return out + else: + # Populate a dictionnary for each person + for i in range(idx[0].shape[0]): + person = { + # Detection + "scores": scores_det[i], # detection scores + "loc": out["loc"][i], # 2d pixel location of the primary keypoints + # SMPL-X params + "transl": out["transl"][ + i + ], # from the primary keypoint i.e. the head + "transl_pelvis": out["transl_pelvis"][i], # of the pelvis joint + "rotvec": out["rotvec"][i], + "expression": out["expression"][i], + "shape": out["shape"][i], + # SMPL-X meshs + "v3d": out["v3d"][i], + "j3d": out["j3d"][i], + "j2d": out["j2d"][i], + "dist": out["dist"][i], + "offset": out["offset"][i], + } + persons.append(person) + + return persons + + +class HPH(nn.Module): + """Cross-attention based SMPL Transformer decoder + + Code modified from: + https://github.com/shubham-goel/4D-Humans/blob/a0def798c7eac811a63c8220fcc22d983b39785e/hmr2/models/heads/smpl_head.py#L17 + https://github.com/shubham-goel/4D-Humans/blob/a0def798c7eac811a63c8220fcc22d983b39785e/hmr2/models/components/pose_transformer.py#L301 + """ + + def __init__( + self, + num_body_joints=52, + context_dim=1280, + dim=1024, + depth=2, + heads=8, + mlp_dim=1024, + dim_head=64, + dropout=0.0, + emb_dropout=0.0, + at_token_res=32, + num_betas=10, + smplx_dir=None, + ): + super().__init__() + + self.joint_rep_type, self.joint_rep_dim = "6d", 6 + self.num_body_joints = num_body_joints + self.nrot = self.num_body_joints + 1 + + npose = self.joint_rep_dim * (self.num_body_joints + 1) + self.npose = npose + + self.depth = (depth,) + self.heads = (heads,) + self.res = at_token_res + self.input_is_mean_shape = True + _context_dim = context_dim # for the central features + self.num_betas = num_betas + assert num_betas in [10, 11] + + # Transformer Decoder setup. + # Based on https://github.com/shubham-goel/4D-Humans/blob/8830bb330558eea2395b7f57088ef0aae7f8fa22/hmr2/configs_hydra/experiment/hmr_vit_transformer.yaml#L35 + transformer_args = dict( + num_tokens=1, + token_dim=( + (npose + self.num_betas + 3 + _context_dim) + if self.input_is_mean_shape + else 1 + ), + dim=dim, + depth=depth, + heads=heads, + mlp_dim=mlp_dim, + dim_head=dim_head, + dropout=dropout, + emb_dropout=emb_dropout, + context_dim=context_dim, + ) + self.transformer = TransformerDecoder(**transformer_args) + + dim = transformer_args["dim"] + + # Final decoders to regress targets + self.decpose, self.decshape, self.deccam, self.decexpression = [ + nn.Linear(dim, od) for od in [npose, num_betas, 3, 10] + ] + + # Register bufffers for the smpl layer. + self.set_smpl_init(smplx_dir) + + # Init learned embeddings for the cross attention queries + self.init_learned_queries(context_dim) + + def init_learned_queries(self, context_dim, std=0.2): + """Init learned embeddings for queries""" + self.cross_queries_x = nn.Parameter(torch.zeros(self.res, context_dim)) + torch.nn.init.normal_(self.cross_queries_x, std=std) + + self.cross_queries_y = nn.Parameter(torch.zeros(self.res, context_dim)) + torch.nn.init.normal_(self.cross_queries_y, std=std) + + self.cross_values_x = nn.Parameter(torch.zeros(self.res, context_dim)) + torch.nn.init.normal_(self.cross_values_x, std=std) + + self.cross_values_y = nn.Parameter( + nn.Parameter(torch.zeros(self.res, context_dim)) + ) + torch.nn.init.normal_(self.cross_values_y, std=std) + + def set_smpl_init(self, smplx_dir): + """Fetch saved SMPL parameters and register buffers.""" + mean_params = np.load(os.path.join(smplx_dir, "smpl_mean_params.npz")) + if self.nrot == 53: + init_body_pose = ( + torch.eye(3) + .reshape(1, 3, 3) + .repeat(self.nrot, 1, 1)[:, :, :2] + .flatten(1) + .reshape(1, -1) + ) + init_body_pose[:, : 24 * 6] = torch.from_numpy( + mean_params["pose"][:] + ).float() # global_orient+body_pose from SMPL + else: + init_body_pose = torch.from_numpy( + mean_params["pose"].astype(np.float32) + ).unsqueeze(0) + + init_betas = torch.from_numpy(mean_params["shape"].astype("float32")).unsqueeze( + 0 + ) + init_cam = torch.from_numpy(mean_params["cam"].astype(np.float32)).unsqueeze(0) + init_betas_kid = torch.cat( + [init_betas, torch.zeros_like(init_betas[:, [0]])], 1 + ) + init_expression = 0.0 * torch.from_numpy( + mean_params["shape"].astype("float32") + ).unsqueeze(0) + + if self.num_betas == 11: + init_betas = torch.cat([init_betas, torch.zeros_like(init_betas[:, :1])], 1) + + self.register_buffer("init_body_pose", init_body_pose) + self.register_buffer("init_betas", init_betas) + self.register_buffer("init_betas_kid", init_betas_kid) + self.register_buffer("init_cam", init_cam) + self.register_buffer("init_expression", init_expression) + + def cross_attn_inputs(self, x, x_central, idx_0, idx_det): + """Reshape and pad x_central to have the right shape for Cross-attention processing. + Inject learned embeddings to query and key inputs at the location of detected people. + """ + + h, w = x.shape[2], x.shape[3] + x = einops.rearrange(x, "b c h w -> b (h w) c") + + assert idx_0 is not None, "Learned cross queries only work with multicross" + + if idx_0.shape[0] > 0: + # reconstruct the batch/nb_people dimensions: pad for images with fewer people than max. + counts, idx_det_0 = rebatch(idx_0, idx_det) + old_shape = x_central.shape + + # Legacy check for old versions + assert idx_det is not None, "idx_det needed for learned_attention" + + # xx is the tensor with all features + xx = einops.rearrange(x, "b (h w) c -> b c h w", h=h, w=w) + # Get learned embeddings for queries, at positions with detected people. + queries_xy = ( + self.cross_queries_x[idx_det[1]] + self.cross_queries_y[idx_det[2]] + ) + # Add the embedding to the central features. + x_central = x_central + queries_xy + assert x_central.shape == old_shape, "Problem with shape" + + # Make it a tensor of dim. [batch, max_ppl_along_batch, ...] + x_central, mask = pad_to_max(x_central, counts) + + # xx = einops.rearrange(x, 'b (h w) c -> b c h w', h=h, w=w) + xx = xx[torch.cumsum(counts, dim=0) - 1] + + # Inject leared embeddings for key/values at detected locations. + values_xy = ( + self.cross_values_x[idx_det[1]] + self.cross_values_y[idx_det[2]] + ) + xx[idx_det_0, :, idx_det[1], idx_det[2]] += values_xy + + x = einops.rearrange(xx, "b c h w -> b (h w) c") + num_ppl = x_central.shape[1] + else: + mask = None + num_ppl = 1 + counts = None + return x, x_central, mask, num_ppl, counts + + def forward(self, x_central, x, idx_0=None, idx_det=None, **kwargs): + """ " + Forward the HPH module. + """ + batch_size = x.shape[0] + + # Reshape inputs for cross attention and inject learned embeddings for queries and values. + x, x_central, mask, num_ppl, counts = self.cross_attn_inputs( + x, x_central, idx_0, idx_det + ) + + # Add init (mean smpl params) to the query for each quantity being regressed. + bs = x_central.shape[0] if idx_0.shape[0] else batch_size + expand = lambda x: x.expand(bs, num_ppl, -1) + pred_body_pose, pred_betas, pred_cam, pred_expression = [ + expand(x) + for x in [ + self.init_body_pose, + self.init_betas, + self.init_cam, + self.init_expression, + ] + ] + token = torch.cat([x_central, pred_body_pose, pred_betas, pred_cam], dim=-1) + if len(token.shape) == 2: + token = token[:, None, :] + + # Process query and inputs with the cross-attention module. + token_out = self.transformer(token, context=x, mask=mask) + + # Reshape outputs from [batch_size, nmax_ppl, ...] to [total_ppl, ...] + if mask is not None: + # Stack along batch axis. + token_out_list = [token_out[i, :c, ...] for i, c in enumerate(counts)] + token_out = torch.concat(token_out_list, dim=0) + else: + token_out = token_out.squeeze(1) # (B, C) + + # Decoded output token and add to init for each quantity to regress. + reshape = ( + (lambda x: x) + if idx_0.shape[0] == 0 + else (lambda x: x[0, 0, ...][None, ...]) + ) + decoders = [self.decpose, self.decshape, self.deccam, self.decexpression] + inits = [pred_body_pose, pred_betas, pred_cam, pred_expression] + pred_body_pose, pred_betas, pred_cam, pred_expression = [ + d(token_out) + reshape(i) for d, i in zip(decoders, inits) + ] + + # Convert self.joint_rep_type -> rotmat + joint_conversion_fn = rot6d_to_rotmat + + # conversion + pred_body_pose = joint_conversion_fn(pred_body_pose).view( + batch_size, self.num_body_joints + 1, 3, 3 + ) + + # Build the output dict + pred_smpl_params = { + "global_orient": pred_body_pose[:, [0]], + "body_pose": pred_body_pose[:, 1:], + "betas": pred_betas, + #'betas_kid': pred_betas_kid, + "expression": pred_expression, + } + return pred_smpl_params, pred_cam # , pred_smpl_params_list + + +def regression_mlp(layers_sizes): + """ + Return a fully connected network. + """ + assert len(layers_sizes) >= 2 + in_features = layers_sizes[0] + layers = [] + for i in range(1, len(layers_sizes) - 1): + out_features = layers_sizes[i] + layers.append(torch.nn.Linear(in_features, out_features)) + layers.append(torch.nn.ReLU()) + in_features = out_features + layers.append(torch.nn.Linear(in_features, layers_sizes[-1])) + return torch.nn.Sequential(*layers) + + +def apply_threshold(det_thresh, _scores): + """Apply thresholding to detection scores; if stack_K is used and det_thresh is a list, apply to each channel separately""" + if isinstance(det_thresh, list): + det_thresh = det_thresh[0] + idx = torch.where(_scores >= det_thresh) + return idx + + +def _nms(heat, kernel=3): + """easy non maximal supression (as in CenterNet)""" + + if kernel not in [2, 4]: + pad = (kernel - 1) // 2 + else: + if kernel == 2: + pad = 1 + else: + pad = 2 + + hmax = nn.functional.max_pool2d(heat, (kernel, kernel), stride=1, padding=pad) + + if hmax.shape[2] > heat.shape[2]: + hmax = hmax[:, :, : heat.shape[2], : heat.shape[3]] + + keep = (hmax == heat).float() + + return heat * keep + + +def _sigmoid(x): + y = torch.clamp(x.sigmoid_(), min=1e-4, max=1 - 1e-4) + return y + + +if __name__ == "__main__": + Model() diff --git a/engine/pose_estimation/pose_estimator.py b/engine/pose_estimation/pose_estimator.py new file mode 100644 index 0000000000000000000000000000000000000000..89aa6c8da859495e898163eb86aa70539ca015ff --- /dev/null +++ b/engine/pose_estimation/pose_estimator.py @@ -0,0 +1,228 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Peihao Li +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-11 12:47:58 +# @Function : inference code for pose estimation + +import os +import sys + +sys.path.append("./") + +import pdb +from dataclasses import dataclass + +import numpy as np +import torch +import torch.nn.functional as F +from PIL import Image + +from engine.ouputs import BaseOutput +from engine.pose_estimation.model import Model + +IMG_NORM_MEAN = [0.485, 0.456, 0.406] +IMG_NORM_STD = [0.229, 0.224, 0.225] + + +@dataclass +class SMPLXOutput(BaseOutput): + beta: np.ndarray + is_full_body: bool + msg: str + + +def normalize_rgb_tensor(img, imgenet_normalization=True): + img = img / 255.0 + if imgenet_normalization: + img = ( + img - torch.tensor(IMG_NORM_MEAN, device=img.device).view(1, 3, 1, 1) + ) / torch.tensor(IMG_NORM_STD, device=img.device).view(1, 3, 1, 1) + return img + + +def load_model(ckpt_path, model_path, device=torch.device("cuda")): + """Open a checkpoint, build Multi-HMR using saved arguments, load the model weigths.""" + # Model + + assert os.path.isfile(ckpt_path), f"{ckpt_path} not found" + + # Load weights + ckpt = torch.load(ckpt_path, map_location=device) + + # Get arguments saved in the checkpoint to rebuild the model + kwargs = {} + for k, v in vars(ckpt["args"]).items(): + kwargs[k] = v + print(ckpt["args"].img_size) + # Build the model. + if isinstance(ckpt["args"].img_size, list): + kwargs["img_size"] = ckpt["args"].img_size[0] + else: + kwargs["img_size"] = ckpt["args"].img_size + kwargs["smplx_dir"] = model_path + print("Loading model...") + model = Model(**kwargs).to(device) + print("Model loaded") + # Load weights into model. + model.load_state_dict(ckpt["model_state_dict"], strict=False) + model.output_mesh = True + model.eval() + return model + + +def inverse_perspective_projection(points, K, distance): + """ + This function computes the inverse perspective projection of a set of points given an estimated distance. + Input: + points (bs, N, 2): 2D points + K (bs,3,3): camera intrinsics params + distance (bs, N, 1): distance in the 3D world + Similar to: + - pts_l_norm = cv2.undistortPoints(np.expand_dims(pts_l, axis=1), cameraMatrix=K_l, distCoeffs=None) + """ + # Apply camera intrinsics + points = torch.cat([points, torch.ones_like(points[..., :1])], -1) + points = torch.einsum("bij,bkj->bki", torch.inverse(K), points) + + # Apply perspective distortion + if distance is None: + return points + points = points * distance + return points + + +class PoseEstimator: + def __init__(self, model_path, device="cuda"): + self.device = torch.device(device) + self.mhmr_model = load_model( + os.path.join(model_path, "pose_estimate", "multiHMR_896_L.pt"), + model_path=model_path, + device=self.device, + ) + self.pad_ratio = 0.2 + self.img_size = 896 + self.fov = 60 + + def get_camera_parameters(self): + K = torch.eye(3) + # Get focal length. + focal = self.img_size / (2 * np.tan(np.radians(self.fov) / 2)) + K[0, 0], K[1, 1] = focal, focal + + K[0, -1], K[1, -1] = self.img_size // 2, self.img_size // 2 + + # Add batch dimension + K = K.unsqueeze(0).to(self.device) + return K + + def img_center_padding(self, img_np): + + ori_h, ori_w = img_np.shape[:2] + + w = round((1 + self.pad_ratio) * ori_w) + h = round((1 + self.pad_ratio) * ori_h) + + img_pad_np = np.zeros((h, w, 3), dtype=np.uint8) + offset_h, offset_w = (h - img_np.shape[0]) // 2, (w - img_np.shape[1]) // 2 + img_pad_np[ + offset_h : offset_h + img_np.shape[0] :, + offset_w : offset_w + img_np.shape[1], + ] = img_np + + return img_pad_np, offset_w, offset_h + + def _preprocess(self, img_np): + + raw_img_size = max(img_np.shape[:2]) + + img_tensor = ( + torch.Tensor(img_np).to(self.device).unsqueeze(0).permute(0, 3, 1, 2) + ) + + _, _, h, w = img_tensor.shape + scale_factor = min(self.img_size / w, self.img_size / h) + img_tensor = F.interpolate( + img_tensor, scale_factor=scale_factor, mode="bilinear" + ) + + _, _, h, w = img_tensor.shape + pad_left = (self.img_size - w) // 2 + pad_top = (self.img_size - h) // 2 + pad_right = self.img_size - w - pad_left + pad_bottom = self.img_size - h - pad_top + img_tensor = F.pad( + img_tensor, + (pad_left, pad_right, pad_top, pad_bottom), + mode="constant", + value=0, + ) + + resize_img = normalize_rgb_tensor(img_tensor) + + annotation = ( + pad_left, + pad_top, + scale_factor, + self.img_size / scale_factor, + raw_img_size, + ) + + return resize_img, annotation + + @torch.no_grad() + def __call__(self, img_path): + # image_tensor H W C + + img_np = np.asarray(Image.open(img_path).convert("RGB")) + + raw_h, raw_w, _ = img_np.shape + img_np, offset_w, offset_h = self.img_center_padding(img_np) + img_tensor, annotation = self._preprocess(img_np) + K = self.get_camera_parameters() + + with torch.cuda.amp.autocast(enabled=True): + target_human = self.mhmr_model( + img_tensor, + is_training=False, + nms_kernel_size=int(3), + det_thresh=0.3, + K=K, + idx=None, + max_dist=None, + ) + if not len(target_human) == 1: + return SMPLXOutput( + beta=None, + is_full_body=False, + msg="more than one human detected" if len(target_human) > 1 else "no human detected", + ) + + # check is full body + pad_left, pad_top, scale_factor, _, _ = annotation + j2d = target_human[0]["j2d"] + # tranform to raw image space + j2d = ( + j2d - torch.tensor([pad_left, pad_top], device=self.device).unsqueeze(0) + ) / scale_factor + j2d = j2d - torch.tensor([offset_w, offset_h], device=self.device).unsqueeze(0) + + # enable the full body contains 95% of the image + scale_ratio = 0.025 + + is_full_body = ( + ( + (j2d[..., 0] >= 0 - raw_w * scale_ratio) + & (j2d[..., 0] < raw_w * (1 + scale_ratio)) + & (j2d[..., 1] >= 0 - raw_h * scale_ratio) + & (j2d[..., 1] < raw_h * (1 + scale_ratio)) + ) + .sum(dim=-1) + .item() >= 95 + ) + + return SMPLXOutput( + beta=target_human[0]["shape"].cpu().numpy(), + is_full_body=is_full_body, + msg="success" if is_full_body else "no full-body human detected", + ) diff --git a/engine/pose_estimation/pose_utils/__init__.py b/engine/pose_estimation/pose_utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0e00f25534403ce61e8171f5205076956543134a --- /dev/null +++ b/engine/pose_estimation/pose_utils/__init__.py @@ -0,0 +1,19 @@ +from .humans import get_mapping, rot6d_to_rotmat, get_smplx_joint_names + +from .camera import (perspective_projection, get_focalLength_from_fieldOfView, inverse_perspective_projection, + undo_focal_length_normalization, undo_log_depth, log_depth, focal_length_normalization) + +from .image import normalize_rgb, unpatch, denormalize_rgb + + +from .tensor_manip import rebatch, pad, pad_to_max + +from .color import demo_color + +from .constants import SMPLX_DIR, MEAN_PARAMS, CACHE_DIR_MULTIHMR, THREEDPW_DIR, EHF_DIR, SMPLX2SMPL_REGRESSOR + +from .training import AverageMeter, compute_prf1, match_2d_greedy + +from .rot6d import axis_angle_to_rotation_6d, rotation_6d_to_axis_angle + +from .render import RendererUtil \ No newline at end of file diff --git a/engine/pose_estimation/pose_utils/camera.py b/engine/pose_estimation/pose_utils/camera.py new file mode 100644 index 0000000000000000000000000000000000000000..effd6699fd603179b9066d0d03ec95322c4f1264 --- /dev/null +++ b/engine/pose_estimation/pose_utils/camera.py @@ -0,0 +1,95 @@ +# Multi-HMR +# Copyright (c) 2024-present NAVER Corp. +# CC BY-NC-SA 4.0 license + +import numpy as np +import math +import torch + +OPENCV_TO_OPENGL_CAMERA_CONVENTION = np.array([[1, 0, 0, 0], [0, -1, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1]]) + + +def perspective_projection(x, K): + """ + This function computes the perspective projection of a set of points assuming the extrinsinc params have already been applied + Args: + - x [bs,N,3]: 3D points + - K [bs,3,3]: Camera instrincs params + """ + # Apply perspective distortion + y = x / x[:, :, -1].unsqueeze(-1) # (bs, N, 3) + + # Apply camera intrinsics + y = torch.einsum("bij,bkj->bki", K, y) # (bs, N, 3) + + return y[:, :, :2] + + +def inverse_perspective_projection(points, K, distance): + """ + This function computes the inverse perspective projection of a set of points given an estimated distance. + Input: + points (bs, N, 2): 2D points + K (bs,3,3): camera intrinsics params + distance (bs, N, 1): distance in the 3D world + Similar to: + - pts_l_norm = cv2.undistortPoints(np.expand_dims(pts_l, axis=1), cameraMatrix=K_l, distCoeffs=None) + """ + # Apply camera intrinsics + points = torch.cat([points, torch.ones_like(points[..., :1])], -1) + points = torch.einsum("bij,bkj->bki", torch.inverse(K), points) + + # Apply perspective distortion + if distance == None: + return points + points = points * distance + return points + + +def get_focalLength_from_fieldOfView(fov=60, img_size=512): + """ + Compute the focal length of the camera lens by assuming a certain FOV for the entire image + Args: + - fov: float, expressed in degree + - img_size: int + Return: + focal: float + """ + focal = img_size / (2 * np.tan(np.radians(fov) / 2)) + return focal + + +def focal_length_normalization(x, f, fovn=60, img_size=448): + """ + Section 3.1 of https://arxiv.org/pdf/1904.02028.pdf + E = (fn/f) * E' where E is 1/d + """ + fn = get_focalLength_from_fieldOfView(fov=fovn, img_size=img_size) + y = x * (fn / f) + return y + + +def undo_focal_length_normalization(y, f, fovn=60, img_size=448): + """ + Undo focal_length_normalization() + """ + fn = get_focalLength_from_fieldOfView(fov=fovn, img_size=img_size) + x = y * (f / fn) + return x + + +EPS_LOG = 1e-10 + + +def log_depth(x, eps=EPS_LOG): + """ + Move depth to log space + """ + return torch.log(x + eps) + + +def undo_log_depth(y, eps=EPS_LOG): + """ + Undo log_depth() + """ + return torch.exp(y) - eps diff --git a/engine/pose_estimation/pose_utils/color.py b/engine/pose_estimation/pose_utils/color.py new file mode 100644 index 0000000000000000000000000000000000000000..3713657855a136e771a95da2a561c60141118da3 --- /dev/null +++ b/engine/pose_estimation/pose_utils/color.py @@ -0,0 +1,22 @@ +# Multi-HMR +# Copyright (c) 2024-present NAVER Corp. +# CC BY-NC-SA 4.0 license + +import numpy as np + +def hex_to_rgb(hex): + y = tuple(int(hex[i:i+2], 16) for i in (0, 2, 4)) + return (y[0]/255,y[1]/255,y[2]/255) + +# Define colors for the demo +color = ['0047AB', # cobaltblue + '6495ED', # cornerblue + 'FF9999', 'FF9933', '00CC66', '66B2FF', 'FF6666', 'FF3333', 'C0C0C0', '9933FF'] # rosé - orange - green - blue - red - grey - violet +color = [ hex_to_rgb(x) for x in color] + +for i in range(200): + color_i = list(np.random.choice(range(256), size=3)) + color.append((color_i[0]/225, color_i[1]/225, color_i[2]/225)) + +demo_color = color + diff --git a/engine/pose_estimation/pose_utils/constants.py b/engine/pose_estimation/pose_utils/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..a93150502473f82d91ca6cd98821a46b6911b0c5 --- /dev/null +++ b/engine/pose_estimation/pose_utils/constants.py @@ -0,0 +1,16 @@ +# Multi-HMR +# Copyright (c) 2024-present NAVER Corp. +# CC BY-NC-SA 4.0 license + +import os + +SMPLX_DIR = 'checkpoints' +MEAN_PARAMS = 'checkpoints/smpl_mean_params.npz' +CACHE_DIR_MULTIHMR = 'checkpoints/multiHMR' + +ANNOT_DIR = 'data' +BEDLAM_DIR = 'data/BEDLAM' +EHF_DIR = 'data/EHF' +THREEDPW_DIR = 'data/3DPW' + +SMPLX2SMPL_REGRESSOR = 'checkpoints/smplx/smplx2smpl.pkl' \ No newline at end of file diff --git a/engine/pose_estimation/pose_utils/constants_service.py b/engine/pose_estimation/pose_utils/constants_service.py new file mode 100644 index 0000000000000000000000000000000000000000..1fd92ef69552f980fa6aa295fce840cdc7937c3e --- /dev/null +++ b/engine/pose_estimation/pose_utils/constants_service.py @@ -0,0 +1,14 @@ +import os + +current_dir_path = os.path.dirname(__file__) + +SMPLX_DIR = f"{current_dir_path}/../checkpoints" +MEAN_PARAMS = f"{current_dir_path}/../checkpoints/smpl_mean_params.npz" +CACHE_DIR_MULTIHMR = f"{current_dir_path}/../checkpoints/multiHMR" + + +SMPLX2SMPL_REGRESSOR = f"{current_dir_path}/../checkpoints/smplx/smplx2smpl.pkl" + +DEVICE = "cuda" +MODEL_NAME = 'ABCGSUR8' +KEYPOINT_THR = 0.5 diff --git a/engine/pose_estimation/pose_utils/humans.py b/engine/pose_estimation/pose_utils/humans.py new file mode 100644 index 0000000000000000000000000000000000000000..421a9f4bad7260b2c7c531730de2f05c95dab86f --- /dev/null +++ b/engine/pose_estimation/pose_utils/humans.py @@ -0,0 +1,472 @@ +# Multi-HMR +# Copyright (c) 2024-present NAVER Corp. +# CC BY-NC-SA 4.0 license + +import numpy as np +import torch.nn.functional as F +import torch +import roma +from smplx.joint_names import JOINT_NAMES + +def rot6d_to_rotmat(x): + """ + 6D rotation representation to 3x3 rotation matrix. + Args: + x: (B,6) Batch of 6-D rotation representations. + Returns: + torch.Tensor: Batch of corresponding rotation matrices with shape (B,3,3). + """ + x = x.reshape(-1,2,3).permute(0, 2, 1).contiguous() + y = roma.special_gramschmidt(x) + return y + +def get_smplx_joint_names(*args, **kwargs): + return JOINT_NAMES[:127] + +COCO17_JOINTS_NAME = { + 0: 'nose', 1: 'left_eye', 2: 'right_eye', + 3: 'left_ear', 4: 'right_ear', 5:'left_shoulder', + 6: 'right_shoulder', 7: 'left_elbow', 8: 'right_elbow', + 9: 'left_wrist', 10: 'right_wrist', 11: 'left_hip', + 12: 'right_hip', 13: 'left_knee', 14: 'right_knee', + 15: 'left_ankle', 16: 'right_ankle' +} + +OPENPOSE25_JOINTS_NAME = { + 0: 'nose', 1: 'neck', 2: 'right_shoulder', 3: 'right_elbow', 4: 'right_wrist', + 5: 'left_shoulder', 6: 'left_elbow', 7: 'left_wrist', 8: 'MidHip', 9: 'right_hip', 10: 'right_knee', 11: 'right_ankle', 12: 'left_hip', + 13: 'left_knee', 14: 'left_ankle', 15: 'right_eye', 16: 'left_eye', 17: 'right_ear', 18: 'left_ear', 19: 'LBigToe', + 20: 'LSmallToe', 21: 'left_heel', 22: 'RBigToe', 23: 'RSmallToe', 24: 'right_heel', +} + + +def joints_smplx_to_coco(): + smplx_joints_name = get_smplx_joint_names() + joints_idx = [] + for k, v in COCO17_JOINTS_NAME.items(): + joints_idx.append(smplx_joints_name.index(v)) + + return joints_idx + +def joints_openpose25_to_coco17(): + idx_list = [0] * 17 + is_found = False + for coco_key, coco_value in COCO17_JOINTS_NAME.items(): + is_found = False + for openpose_key, openpose_value in OPENPOSE25_JOINTS_NAME.items(): + if coco_value == openpose_value: + idx_list[coco_key] = openpose_key + is_found = True + break + assert is_found, f'{coco_key} is not found in openpose keypoints' + return idx_list + + + +COCO_WHOLEBODY_KEYPOINTS = [ + "nose", + "left_eye", + "right_eye", + "left_ear", + "right_ear", + "left_shoulder", + "right_shoulder", + "left_elbow", + "right_elbow", + "left_wrist", + "right_wrist", + "left_hip", + "right_hip", + "left_knee", + "right_knee", + "left_ankle", + "right_ankle", + "left_bigtoe", + "left_smalltoe", + "left_heel", + "right_bigtoe", + "right_smalltoe", + "right_heel", + "right_contour_1", # original name: face_contour_1 + "right_contour_2", # original name: face_contour_2 + "right_contour_3", # original name: face_contour_3 + "right_contour_4", # original name: face_contour_4 + "right_contour_5", # original name: face_contour_5 + "right_contour_6", # original name: face_contour_6 + "right_contour_7", # original name: face_contour_7 + "right_contour_8", # original name: face_contour_8 + "contour_middle", # original name: face_contour_9 + "left_contour_8", # original name: face_contour_10 + "left_contour_7", # original name: face_contour_11 + "left_contour_6", # original name: face_contour_12 + "left_contour_5", # original name: face_contour_13 + "left_contour_4", # original name: face_contour_14 + "left_contour_3", # original name: face_contour_15 + "left_contour_2", # original name: face_contour_16 + "left_contour_1", # original name: face_contour_17 + "right_eyebrow_1", + "right_eyebrow_2", + "right_eyebrow_3", + "right_eyebrow_4", + "right_eyebrow_5", + "left_eyebrow_5", + "left_eyebrow_4", + "left_eyebrow_3", + "left_eyebrow_2", + "left_eyebrow_1", + "nosebridge_1", + "nosebridge_2", + "nosebridge_3", + "nosebridge_4", + "right_nose_2", # original name: nose_1 + "right_nose_1", # original name: nose_2 + "nose_middle", # original name: nose_3 + "left_nose_1", # original name: nose_4 + "left_nose_2", # original name: nose_5 + "right_eye_1", + "right_eye_2", + "right_eye_3", + "right_eye_4", + "right_eye_5", + "right_eye_6", + "left_eye_4", + "left_eye_3", + "left_eye_2", + "left_eye_1", + "left_eye_6", + "left_eye_5", + "right_mouth_1", # original name: mouth_1 + "right_mouth_2", # original name: mouth_2 + "right_mouth_3", # original name: mouth_3 + "mouth_top", # original name: mouth_4 + "left_mouth_3", # original name: mouth_5 + "left_mouth_2", # original name: mouth_6 + "left_mouth_1", # original name: mouth_7 + "left_mouth_5", # original name: mouth_8 + "left_mouth_4", # original name: mouth_9 + "mouth_bottom", # original name: mouth_10 + "right_mouth_4", # original name: mouth_11 + "right_mouth_5", # original name: mouth_12 + "right_lip_1", # original name: lip_1 + "right_lip_2", # original name: lip_2 + "lip_top", # original name: lip_3 + "left_lip_2", # original name: lip_4 + "left_lip_1", # original name: lip_5 + "left_lip_3", # original name: lip_6 + "lip_bottom", # original name: lip_7 + "right_lip_3", # original name: lip_8 + "left_hand_root", + "left_thumb_1", + "left_thumb_2", + "left_thumb_3", + "left_thumb", + "left_index_1", + "left_index_2", + "left_index_3", + "left_index", + "left_middle_1", + "left_middle_2", + "left_middle_3", + "left_middle", + "left_ring_1", + "left_ring_2", + "left_ring_3", + "left_ring", + "left_pinky_1", + "left_pinky_2", + "left_pinky_3", + "left_pinky", + "right_hand_root", + "right_thumb_1", + "right_thumb_2", + "right_thumb_3", + "right_thumb", + "right_index_1", + "right_index_2", + "right_index_3", + "right_index", + "right_middle_1", + "right_middle_2", + "right_middle_3", + "right_middle", + "right_ring_1", + "right_ring_2", + "right_ring_3", + "right_ring", + "right_pinky_1", + "right_pinky_2", + "right_pinky_3", + "right_pinky", +] + +SMPLX_KEYPOINTS = [ + "pelvis", + "left_hip", + "right_hip", + "spine_1", + "left_knee", + "right_knee", + "spine_2", + "left_ankle", + "right_ankle", + "spine_3", + "left_foot", + "right_foot", + "neck", + "left_collar", + "right_collar", + "head", + "left_shoulder", + "right_shoulder", + "left_elbow", + "right_elbow", + "left_wrist", + "right_wrist", + "jaw", + "left_eyeball", + "right_eyeball", + "left_index_1", + "left_index_2", + "left_index_3", + "left_middle_1", + "left_middle_2", + "left_middle_3", + "left_pinky_1", + "left_pinky_2", + "left_pinky_3", + "left_ring_1", + "left_ring_2", + "left_ring_3", + "left_thumb_1", + "left_thumb_2", + "left_thumb_3", + "right_index_1", + "right_index_2", + "right_index_3", + "right_middle_1", + "right_middle_2", + "right_middle_3", + "right_pinky_1", + "right_pinky_2", + "right_pinky_3", + "right_ring_1", + "right_ring_2", + "right_ring_3", + "right_thumb_1", + "right_thumb_2", + "right_thumb_3", + "nose", + "right_eye", + "left_eye", + "right_ear", + "left_ear", + "left_bigtoe", + "left_smalltoe", + "left_heel", + "right_bigtoe", + "right_smalltoe", + "right_heel", + "left_thumb", + "left_index", + "left_middle", + "left_ring", + "left_pinky", + "right_thumb", + "right_index", + "right_middle", + "right_ring", + "right_pinky", + "right_eyebrow_1", + "right_eyebrow_2", + "right_eyebrow_3", + "right_eyebrow_4", + "right_eyebrow_5", + "left_eyebrow_5", + "left_eyebrow_4", + "left_eyebrow_3", + "left_eyebrow_2", + "left_eyebrow_1", + "nosebridge_1", + "nosebridge_2", + "nosebridge_3", + "nosebridge_4", + "right_nose_2", # original name: nose_1 + "right_nose_1", # original name: nose_2 + "nose_middle", # original name: nose_3 + "left_nose_1", # original name: nose_4 + "left_nose_2", # original name: nose_5 + "right_eye_1", + "right_eye_2", + "right_eye_3", + "right_eye_4", + "right_eye_5", + "right_eye_6", + "left_eye_4", + "left_eye_3", + "left_eye_2", + "left_eye_1", + "left_eye_6", + "left_eye_5", + "right_mouth_1", # original name: mouth_1 + "right_mouth_2", # original name: mouth_2 + "right_mouth_3", # original name: mouth_3 + "mouth_top", # original name: mouth_4 + "left_mouth_3", # original name: mouth_5 + "left_mouth_2", # original name: mouth_6 + "left_mouth_1", # original name: mouth_7 + "left_mouth_5", # original name: mouth_8 + "left_mouth_4", # original name: mouth_9 + "mouth_bottom", # original name: mouth_10 + "right_mouth_4", # original name: mouth_11 + "right_mouth_5", # original name: mouth_12 + "right_lip_1", # original name: lip_1 + "right_lip_2", # original name: lip_2 + "lip_top", # original name: lip_3 + "left_lip_2", # original name: lip_4 + "left_lip_1", # original name: lip_5 + "left_lip_3", # original name: lip_6 + "lip_bottom", # original name: lip_7 + "right_lip_3", # original name: lip_8 + "right_contour_1", # original name: face_contour_1 + "right_contour_2", # original name: face_contour_2 + "right_contour_3", # original name: face_contour_3 + "right_contour_4", # original name: face_contour_4 + "right_contour_5", # original name: face_contour_5 + "right_contour_6", # original name: face_contour_6 + "right_contour_7", # original name: face_contour_7 + "right_contour_8", # original name: face_contour_8 + "contour_middle", # original name: face_contour_9 + "left_contour_8", # original name: face_contour_10 + "left_contour_7", # original name: face_contour_11 + "left_contour_6", # original name: face_contour_12 + "left_contour_5", # original name: face_contour_13 + "left_contour_4", # original name: face_contour_14 + "left_contour_3", # original name: face_contour_15 + "left_contour_2", # original name: face_contour_16 + "left_contour_1", # original name: face_contour_17 +] + +LEFT_HAND_KEYPOINTS = [ + "left_wrist", + "left_index_1", + "left_index_2", + "left_index_3", + "left_middle_1", + "left_middle_2", + "left_middle_3", + "left_pinky_1", + "left_pinky_2", + "left_pinky_3", + "left_ring_1", + "left_ring_2", + "left_ring_3", + "left_thumb_1", + "left_thumb_2", + "left_thumb_3", +] + +RIGHT_HAND_KEYPOINTS = [ + "right_wrist", + "right_index_1", + "right_index_2", + "right_index_3", + "right_middle_1", + "right_middle_2", + "right_middle_3", + "right_pinky_1", + "right_pinky_2", + "right_pinky_3", + "right_ring_1", + "right_ring_2", + "right_ring_3", + "right_thumb_1", + "right_thumb_2", + "right_thumb_3", + +] + +COCO_PLUS_KEYPOINTS = [ + 'nose', + 'left_eye', + 'right_eye', + 'left_ear', + 'right_ear', + 'left_shoulder', + 'right_shoulder', + 'left_elbow', + 'right_elbow', + 'left_wrist', + 'right_wrist', + 'left_hip', + 'right_hip', + 'left_knee', + 'right_knee', + 'left_ankle', + 'right_ankle', + "left_bigtoe", + "left_smalltoe", + "left_heel", + "right_bigtoe", + "right_smalltoe", + "right_heel", +] + +KEYPOINTS_FACTORY = { + "smplx": SMPLX_KEYPOINTS, + "coco_wholebody": COCO_WHOLEBODY_KEYPOINTS, + "left_hand": LEFT_HAND_KEYPOINTS, + "right_hand": RIGHT_HAND_KEYPOINTS, + "coco_plus": COCO_PLUS_KEYPOINTS, +} + +MAPPING_CACHE = {} + +def get_mapping( + src: str, + dst: str, + keypoints_factory: dict = KEYPOINTS_FACTORY, +): + """Get mapping list from src to dst. + + Args: + src (str): source data type from keypoints_factory. + dst (str): destination data type from keypoints_factory. + approximate (bool): control whether approximate mapping is allowed. + keypoints_factory (dict, optional): A class to store the attributes. + Defaults to keypoints_factory. + + Returns: + list: + [src_to_intersection_idx, dst_to_intersection_index, + intersection_names] + """ + if src.lower() in MAPPING_CACHE.keys() and dst.lower() in MAPPING_CACHE[src.lower()].keys(): + return MAPPING_CACHE[src.lower()][dst.lower()] + + src_names = keypoints_factory[src.lower()] + dst_names = keypoints_factory[dst.lower()] + + dst_idxs, src_idxs, intersection = [], [], [] + full_mapping_idx = [] + unmapped_names, approximate_names = [], [] + for dst_idx, dst_name in enumerate(dst_names): + try: + src_idx = src_names.index(dst_name) + except ValueError: + src_idx = -1 + if src_idx >= 0: + dst_idxs.append(dst_idx) + src_idxs.append(src_idx) + intersection.append(dst_name) + full_mapping_idx.append(src_idx) + # approximate mapping + + mapping_list = (dst_idxs, src_idxs, intersection, full_mapping_idx) + if not src.lower() in MAPPING_CACHE.keys(): + MAPPING_CACHE[src.lower()] = {} + MAPPING_CACHE[src.lower()][dst.lower()] = mapping_list + return mapping_list + + +if __name__ == '__main__': + print(joints_smplx_to_coco()) \ No newline at end of file diff --git a/engine/pose_estimation/pose_utils/image.py b/engine/pose_estimation/pose_utils/image.py new file mode 100644 index 0000000000000000000000000000000000000000..18df524f59fd0b801ff86fbca7046bee2b6c0188 --- /dev/null +++ b/engine/pose_estimation/pose_utils/image.py @@ -0,0 +1,105 @@ +# Multi-HMR +# Copyright (c) 2024-present NAVER Corp. +# CC BY-NC-SA 4.0 license + +import torch +import numpy as np +from PIL import Image, ImageOps +import torch.nn.functional as F +import cv2 +import time + +IMG_NORM_MEAN = [0.485, 0.456, 0.406] +IMG_NORM_STD = [0.229, 0.224, 0.225] + + +def normalize_rgb_tensor(img, imgenet_normalization=True): + img = img / 255. + if imgenet_normalization: + img = (img - torch.tensor(IMG_NORM_MEAN, device=img.device).view(1, 3, 1, 1)) / torch.tensor(IMG_NORM_STD, device=img.device).view(1, 3, 1, 1) + return img + +def normalize_rgb(img, imagenet_normalization=True): + """ + Args: + - img: np.array - (W,H,3) - np.uint8 - 0/255 + Return: + - img: np.array - (3,W,H) - np.float - -3/3 + """ + img = img.astype(np.float32) / 255. + img = np.transpose(img, (2,0,1)) + if imagenet_normalization: + img = (img - np.asarray(IMG_NORM_MEAN).reshape(3,1,1)) / np.asarray(IMG_NORM_STD).reshape(3,1,1) + img = img.astype(np.float32) + return img + +def denormalize_rgb(img, imagenet_normalization=True): + """ + Args: + - img: np.array - (3,W,H) - np.float - -3/3 + Return: + - img: np.array - (W,H,3) - np.uint8 - 0/255 + """ + if imagenet_normalization: + img = (img * np.asarray(IMG_NORM_STD).reshape(3,1,1)) + np.asarray(IMG_NORM_MEAN).reshape(3,1,1) + img = np.transpose(img, (1,2,0)) * 255. + img = img.astype(np.uint8) + return img + +def unpatch(data, patch_size=14, c=3, img_size=224): + # c = 3 + if len(data.shape) == 2: + c=1 + data = data[:,:,None].repeat([1,1,patch_size**2]) + + B,N,HWC = data.shape + HW = patch_size**2 + c = int(HWC / HW) + h = w = int(N**.5) + p = q = int(HW**.5) + data = data.reshape([B,h,w,p,q,c]) + data = torch.einsum('nhwpqc->nchpwq', data) + return data.reshape([B,c,img_size,img_size]) + +def image_pad(img, img_size, device=torch.device('cuda')): + img_pil = ImageOps.contain(img, (img_size, img_size)) + img_pil_bis = ImageOps.pad(img_pil.copy(), size=(img_size,img_size), color=(255, 255, 255)) + img_pil = ImageOps.pad(img_pil, size=(img_size,img_size)) # pad with zero on the smallest side + + # Go to numpy + resize_img = np.asarray(img_pil) + + # Normalize and go to torch. + resize_img = normalize_rgb(resize_img) + + x = torch.from_numpy(resize_img).unsqueeze(0).to(device) + return x, img_pil_bis + +def image_pad_cuda(img, img_size, rot=0, device=torch.device('cuda'), vis=False): + img = torch.Tensor(img).to(device) + img = torch.flip(img, dims=[2]).unsqueeze(0).permute(0, 3, 1, 2) + if rot != 0: + img = torch.rot90(img, rot, [2, 3]) + + if vis: + image = img.clone()[0].permute(1, 2, 0).cpu().numpy() + if image.dtype != np.uint8: + image = image.astype(np.uint8) + cv2.imshow('k4a', image[..., ::-1]) + cv2.waitKey(1) + _, _, h, w = img.shape + scale_factor = min(img_size / w, img_size / h) + + img = F.interpolate(img, scale_factor=scale_factor, mode='bilinear') + + _, _, h, w = img.shape + + pad_w = (img_size - w) // 2 + pad_h = (img_size - h) // 2 + + + img = F.pad(img,(pad_w, pad_w, pad_h, pad_h), mode='constant', value=255) + + # Normalize and go to torch. + resize_img = normalize_rgb_tensor(img) + return resize_img, img \ No newline at end of file diff --git a/engine/pose_estimation/pose_utils/inference_utils.py b/engine/pose_estimation/pose_utils/inference_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..222f4aa734b9148fa55f585e3661bf06888d2246 --- /dev/null +++ b/engine/pose_estimation/pose_utils/inference_utils.py @@ -0,0 +1,22 @@ +import os +import torch + +from pose_utils.camera import get_focalLength_from_fieldOfView + + +def get_camera_parameters(img_size, fov=60, p_x=None, p_y=None, device=torch.device("cuda")): + """Given image size, fov and principal point coordinates, return K the camera parameter matrix""" + K = torch.eye(3) + # Get focal length. + focal = get_focalLength_from_fieldOfView(fov=fov, img_size=img_size) + K[0, 0], K[1, 1] = focal, focal + + # Set principal point + if p_x is not None and p_y is not None: + K[0, -1], K[1, -1] = p_x * img_size, p_y * img_size + else: + K[0, -1], K[1, -1] = img_size // 2, img_size // 2 + + # Add batch dimension + K = K.unsqueeze(0).to(device) + return K diff --git a/engine/pose_estimation/pose_utils/postprocess.py b/engine/pose_estimation/pose_utils/postprocess.py new file mode 100644 index 0000000000000000000000000000000000000000..bf17bedf95f7fa9eaea94570bcbbcc53d3f605d8 --- /dev/null +++ b/engine/pose_estimation/pose_utils/postprocess.py @@ -0,0 +1,165 @@ +import time +import torch +import torch.nn.functional as F +import numpy as np + +def get_gaussian_kernel_1d(kernel_size, sigma, device): + x = torch.arange(kernel_size).float() - (kernel_size // 2) + g = torch.exp(-((x ** 2) / (2 * sigma ** 2))) + g /= g.sum() + + kernel_weight = g.view(1, 1, -1).to(device) + + + return kernel_weight + +def gaussian_filter_1d(data, kernel_size=3, sigma=1.0, weight=None): + kernel_weight = get_gaussian_kernel_1d(kernel_size, sigma, data.device) if weight is None else weight + data = F.pad(data, (kernel_size // 2, kernel_size // 2), mode='replicate') + return F.conv1d(data, kernel_weight) + + +def exponential_smoothing(x, d_x, alpha=0.5): + return d_x + alpha * (x - d_x) + + +class OneEuroFilter: + # param setting: + # realtime v2m: min_cutoff=1.0, beta=1.5 + # motionshop 2d keypoint: min_cutoff=1.7, beta=0.3 + def __init__(self, min_cutoff=1.0, beta=0.0, sampling_rate=30, d_cutoff=1.0, device='cuda'): + self.min_cutoff = min_cutoff + self.beta = beta + self.sampling_rate = sampling_rate + self.x_prev = None + self.dx_prev = None + self.d_cutoff = d_cutoff + self.pi = torch.tensor(torch.pi, device=device) + + def smoothing_factor(self, cutoff): + + r = 2 * self.pi * cutoff / self.sampling_rate + return r/ (1 + r) + + def filter(self, x): + if self.x_prev is None: + self.x_prev = x + self.dx_prev = torch.zeros_like(x) + return x + + + a_d = self.smoothing_factor(self.d_cutoff) + # 计算当前的速度 + dx = (x - self.x_prev) * self.sampling_rate + + dx_hat = exponential_smoothing(dx, self.dx_prev, a_d) + + cutoff = self.min_cutoff + self.beta * torch.abs(dx_hat) + a = self.smoothing_factor(cutoff) + + x_hat = exponential_smoothing(x, self.x_prev, a) + + self.x_prev = x_hat + self.dx_prev = dx_hat + + return x_hat + + +class Filter(): + filter_factory = { + 'gaussian': get_gaussian_kernel_1d, + } + + def __init__(self, target_data, filter_type, filter_args): + self.target_data = target_data + self.filter = self.filter_factory[filter_type] + self.filter_args = filter_args + + def process(self, network_outputs): + filter_data = [] + for human in network_outputs: + filter_data.append(human[self.target_data]) + filter_data = torch.stack(filter_data, dim=0) + + filter_data = self.filter(filter_data, **self.filter_args) + + for i, human in enumerate(network_outputs): + human[self.target_data] = filter_data[i] + + +if __name__ == '__main__': + import argparse + import matplotlib.pyplot as plt + import numpy as np + from rot6d import rotation_6d_to_axis_angle, axis_angle_to_rotation_6d + + from humans import get_smplx_joint_names + parser = argparse.ArgumentParser() + parser.add_argument('--data_path', type=str) + parser.add_argument('--save_path', type=str) + parser.add_argument('--name', type=str) + args = parser.parse_args() + + fig, axs = plt.subplots(nrows=2, ncols=2, figsize=(10, 8)) + data_types = ['rotvec']#, 'j3d'] + observe_keypoints = ['pelvis', 'head', 'left_wrist', 'left_knee'] + joint_names = get_smplx_joint_names() + + + data = np.load(f'{args.data_path}/shape_{args.name}.npy') + fig, axs = plt.subplots(nrows=2, ncols=2, figsize=(10, 8)) + for i in range(2): + for j in range(2): + x = data[:, i*4 + j*2] + print(x.shape) + axs[i, j].plot(x) + + axs[i, j].set_title(f'{4 * i + 2 * j}') + axs[i, j].plot(np.load(f'{args.data_path}/dist_{args.name}.npy')) + plt.tight_layout() + plt.savefig(f'{args.save_path}/shape_{args.name}.jpg') + # for data_type in data_types: + # data = np.load(f'{args.data_path}/{data_type}_{args.name}.npy') + # fig, axs = plt.subplots(nrows=2, ncols=2, figsize=(10, 8)) + # for i in range(2): + # for j in range(2): + # # todo: something wrong here + # filter = OneEuroFilter(min_cutoff=1, beta=0.01, sampling_rate=30, device='cuda:0') + # x = data[:, joint_names.index(observe_keypoints[i*2+j])] #(F, 3) + # print(x.shape) + + # x = axis_angle_to_rotation_6d(torch.tensor(x, device='cuda:0')) + + # x_filtered = x.clone() + # start = time.time() + # for k in range(x.shape[0]): + # x_filtered[k] = filter.filter(x[k]) + + # print(x_filtered.shape[0]/(time.time()-start)) + # # x_filtered = x.clone() + # # a = 0.5 + # # for k in range(1, x.shape[0]): + # # x_filtered[k] = (1 - a) * x_filtered[k-1] + a * x[k] + # #theta = np.linalg.norm(x, axis=-1) + # #x = x / theta[..., None] + + + # # f, n = x.shape + # # x_filtered = gaussian_filter_1d(x.permute(1, 0).view(n, 1, -1), 11, 11) + # # x_filtered = x_filtered.view(n, -1).permute(1, 0) + + # x = rotation_6d_to_axis_angle(x).cpu().numpy() + # x_filtered = rotation_6d_to_axis_angle(x_filtered).cpu().numpy() + # axs[i, j].plot(x[..., 0]) + # axs[i, j].plot(x[..., 1]) + # axs[i, j].plot(x[..., 2]) + + # axs[i, j].plot(x_filtered[..., 0]) + # axs[i, j].plot(x_filtered[..., 1]) + # axs[i, j].plot(x_filtered[..., 2]) + # #axs[i, j].plot(theta) + + # axs[i, j].set_title(f'{observe_keypoints[i*2 + j]}') + # plt.tight_layout() + # plt.savefig(f'{args.save_path}/{data_type}_{args.name}.jpg') + \ No newline at end of file diff --git a/engine/pose_estimation/pose_utils/render.py b/engine/pose_estimation/pose_utils/render.py new file mode 100644 index 0000000000000000000000000000000000000000..f97ba5ed6a56b3317633398a0046b36fc54f7c39 --- /dev/null +++ b/engine/pose_estimation/pose_utils/render.py @@ -0,0 +1,378 @@ +import os +import imageio +import numpy as np +import torch +from tqdm import tqdm + +from pytorch3d.renderer import ( + PerspectiveCameras, + TexturesVertex, + PointLights, + Materials, + RasterizationSettings, + MeshRenderer, + MeshRasterizer, + SoftPhongShader, +) +from pytorch3d.renderer.mesh.shader import ShaderBase +from pytorch3d.structures import Meshes + +class NormalShader(ShaderBase): + def __init__(self, device = "cpu", **kwargs): + super().__init__(device=device, **kwargs) + + def forward(self, fragments, meshes, **kwargs): + blend_params = kwargs.get("blend_params", self.blend_params) + texels = fragments.bary_coords.clone() + texels = texels.permute(0, 3, 1, 2, 4) + texels = texels * 2 - 1 # 将 bary_coords 映射到 [-1, 1] + + # 获取法线 + verts_normals = meshes.verts_normals_packed() + faces_normals = verts_normals[meshes.faces_packed()] + bary_coords = fragments.bary_coords + + pixel_normals = (bary_coords[..., None] * faces_normals[fragments.pix_to_face]).sum(dim=-2) + pixel_normals = pixel_normals / pixel_normals.norm(dim=-1, keepdim=True) + + # 将法线映射到颜色空间 + # colors = (pixel_normals + 1) / 2 # 将法线映射到 [0, 1] + colors = torch.clamp(pixel_normals, -1, 1) + print(colors.shape) + mask = (fragments.pix_to_face > 0).float() + colors = torch.cat([colors, mask.unsqueeze(-1)], dim=-1) + # colors[fragments.pix_to_face < 0] = 0 + + # 混合颜色 + # images = self.blend(texels, colors, fragments, blend_params) + return colors + +def overlay_image_onto_background(image, mask, bbox, background): + if isinstance(image, torch.Tensor): + image = image.detach().cpu().numpy() + if isinstance(mask, torch.Tensor): + mask = mask.detach().cpu().numpy() + + out_image = background.copy() + bbox = bbox[0].int().cpu().numpy().copy() + roi_image = out_image[bbox[1]:bbox[3], bbox[0]:bbox[2]] + if len(roi_image) < 1 or len(roi_image[1]) < 1: + return out_image + try: + roi_image[mask] = image[mask] + except Exception as e: + raise e + out_image[bbox[1]:bbox[3], bbox[0]:bbox[2]] = roi_image + + return out_image + + +def update_intrinsics_from_bbox(K_org, bbox): + ''' + update intrinsics for cropped images + ''' + device, dtype = K_org.device, K_org.dtype + + K = torch.zeros((K_org.shape[0], 4, 4) + ).to(device=device, dtype=dtype) + K[:, :3, :3] = K_org.clone() + K[:, 2, 2] = 0 + K[:, 2, -1] = 1 + K[:, -1, 2] = 1 + + image_sizes = [] + for idx, bbox in enumerate(bbox): + left, upper, right, lower = bbox + cx, cy = K[idx, 0, 2], K[idx, 1, 2] + + new_cx = cx - left + new_cy = cy - upper + new_height = max(lower - upper, 1) + new_width = max(right - left, 1) + new_cx = new_width - new_cx + new_cy = new_height - new_cy + + K[idx, 0, 2] = new_cx + K[idx, 1, 2] = new_cy + image_sizes.append((int(new_height), int(new_width))) + + return K, image_sizes + + +def perspective_projection(x3d, K, R=None, T=None): + if R != None: + x3d = torch.matmul(R, x3d.transpose(1, 2)).transpose(1, 2) + if T != None: + x3d = x3d + T.transpose(1, 2) + + x2d = torch.div(x3d, x3d[..., 2:]) + x2d = torch.matmul(K, x2d.transpose(-1, -2)).transpose(-1, -2)[..., :2] + return x2d + + +def compute_bbox_from_points(X, img_w, img_h, scaleFactor=1.2): + left = torch.clamp(X.min(1)[0][:, 0], min=0, max=img_w) + right = torch.clamp(X.max(1)[0][:, 0], min=0, max=img_w) + top = torch.clamp(X.min(1)[0][:, 1], min=0, max=img_h) + bottom = torch.clamp(X.max(1)[0][:, 1], min=0, max=img_h) + + cx = (left + right) / 2 + cy = (top + bottom) / 2 + width = (right - left) + height = (bottom - top) + + new_left = torch.clamp(cx - width/2 * scaleFactor, min=0, max=img_w-1) + new_right = torch.clamp(cx + width/2 * scaleFactor, min=1, max=img_w) + new_top = torch.clamp(cy - height / 2 * scaleFactor, min=0, max=img_h-1) + new_bottom = torch.clamp(cy + height / 2 * scaleFactor, min=1, max=img_h) + + bbox = torch.stack((new_left.detach(), new_top.detach(), + new_right.detach(), new_bottom.detach())).int().float().T + return bbox + + +class Renderer(): + def __init__(self, width, height, K, device, faces=None): + + self.width = width + self.height = height + self.K = K + + self.device = device + + if faces is not None: + self.faces = torch.from_numpy( + (faces).astype('int') + ).unsqueeze(0).to(self.device) + + self.initialize_camera_params() + self.lights = PointLights(device=device, location=[[0.0, 0.0, -10.0]]) + self.create_renderer() + + def create_camera(self, R=None, T=None): + if R is not None: + self.R = R.clone().view(1, 3, 3).to(self.device) + if T is not None: + self.T = T.clone().view(1, 3).to(self.device) + + return PerspectiveCameras( + device=self.device, + R=self.R.mT, + T=self.T, + K=self.K_full, + image_size=self.image_sizes, + in_ndc=False) + + def create_renderer(self): + self.renderer = MeshRenderer( + rasterizer=MeshRasterizer( + raster_settings=RasterizationSettings( + image_size=self.image_sizes[0], + blur_radius=1e-5,), + ), + shader=SoftPhongShader( + device=self.device, + lights=self.lights, + ) + ) + + def create_normal_renderer(self): + normal_renderer = MeshRenderer( + rasterizer=MeshRasterizer( + cameras=self.cameras, + raster_settings=RasterizationSettings( + image_size=self.image_sizes[0], + ), + ), + shader=NormalShader(device=self.device), + ) + return normal_renderer + + def initialize_camera_params(self): + """Hard coding for camera parameters + TODO: Do some soft coding""" + + # Extrinsics + self.R = torch.diag( + torch.tensor([1, 1, 1]) + ).float().to(self.device).unsqueeze(0) + + self.T = torch.tensor( + [0, 0, 0] + ).unsqueeze(0).float().to(self.device) + + # Intrinsics + self.K = self.K.unsqueeze(0).float().to(self.device) + self.bboxes = torch.tensor([[0, 0, self.width, self.height]]).float() + self.K_full, self.image_sizes = update_intrinsics_from_bbox(self.K, self.bboxes) + self.cameras = self.create_camera() + + def render_normal(self, vertices): + vertices = vertices.unsqueeze(0) + + mesh = Meshes(verts=vertices, faces=self.faces) + normal_renderer = self.create_normal_renderer() + results = normal_renderer(mesh) + results = torch.flip(results, [1, 2]) + return results + + def render_mesh(self, vertices, background, colors=[0.8, 0.8, 0.8]): + + self.update_bbox(vertices[::50], scale=1.2) + vertices = vertices.unsqueeze(0) + + if colors[0] > 1: colors = [c / 255. for c in colors] + verts_features = torch.tensor(colors).reshape(1, 1, 3).to(device=vertices.device, dtype=vertices.dtype) + verts_features = verts_features.repeat(1, vertices.shape[1], 1) + textures = TexturesVertex(verts_features=verts_features) + + mesh = Meshes(verts=vertices, + faces=self.faces, + textures=textures,) + + materials = Materials( + device=self.device, + specular_color=(colors, ), + shininess=0 + ) + + results = torch.flip( + self.renderer(mesh, materials=materials, cameras=self.cameras, lights=self.lights), + [1, 2] + ) + image = results[0, ..., :3] * 255 + mask = results[0, ..., -1] > 1e-3 + + image = overlay_image_onto_background(image, mask, self.bboxes, background.copy()) + self.reset_bbox() + return image + + def update_bbox(self, x3d, scale=2.0, mask=None): + """ Update bbox of cameras from the given 3d points + + x3d: input 3D keypoints (or vertices), (num_frames, num_points, 3) + """ + if x3d.size(-1) != 3: + x2d = x3d.unsqueeze(0) + else: + x2d = perspective_projection(x3d.unsqueeze(0), self.K, self.R, self.T.reshape(1, 3, 1)) + + if mask is not None: + x2d = x2d[:, ~mask] + bbox = compute_bbox_from_points(x2d, self.width, self.height, scale) + self.bboxes = bbox + + self.K_full, self.image_sizes = update_intrinsics_from_bbox(self.K, bbox) + self.cameras = self.create_camera() + self.create_renderer() + + def reset_bbox(self,): + bbox = torch.zeros((1, 4)).float().to(self.device) + bbox[0, 2] = self.width + bbox[0, 3] = self.height + self.bboxes = bbox + + self.K_full, self.image_sizes = update_intrinsics_from_bbox(self.K, bbox) + self.cameras = self.create_camera() + self.create_renderer() + +class RendererUtil(): + def __init__(self, K, w, h, device, faces, keep_origin=True): + self.keep_origin = keep_origin + self.default_R = torch.eye(3) + self.default_T = torch.zeros(3) + self.device = device + self.renderer = Renderer(w, h, K, device, faces) + + def set_extrinsic(self, R, T): + self.default_R = R + self.default_T = T + + def render_normal(self, verts_list): + if not len(verts_list) == 1: + return None + + self.renderer.create_camera(self.default_R, self.default_T) + normal_map = self.renderer.render_normal(verts_list[0]) + return normal_map[0, :, :, 0] + + def render_frame(self, humans, pred_rend_array, verts_list=None, color_list=None): + if not isinstance(pred_rend_array, np.ndarray): + pred_rend_array = np.asarray(pred_rend_array) + self.renderer.create_camera(self.default_R, self.default_T) + _img = pred_rend_array + if humans is not None: + for human in humans: + _img = self.renderer.render_mesh(human['v3d'].to(self.device), _img) + else: + for i, verts in enumerate(verts_list): + if color_list is None: + _img = self.renderer.render_mesh(verts.to(self.device), _img) + else: + _img = self.renderer.render_mesh(verts.to(self.device), _img, color_list[i]) + if self.keep_origin: + _img = np.concatenate([np.asarray(pred_rend_array), _img],1).astype(np.uint8) + return _img + + def render_video(self, results, pil_bis_frames, fps, out_path): + writer = imageio.get_writer( + out_path, + fps=fps, mode='I', format='FFMPEG', macro_block_size=1 + ) + for i, humans in enumerate(tqdm(results)): + pred_rend_array = pil_bis_frames[i] + _img = self.render_frame( humans, pred_rend_array) + try: + writer.append_data(_img) + except: + print('Error in writing video') + print(type(_img)) + writer.close() +def render_frame(renderer, humans, pred_rend_array, default_R, default_T, device, keep_origin=True): + + if not isinstance(pred_rend_array, np.ndarray): + pred_rend_array = np.asarray(pred_rend_array) + renderer.create_camera(default_R, default_T) + _img = pred_rend_array + if humans is None: + humans = [] + if isinstance(humans, dict): + humans = [humans] + for human in humans: + if isinstance(human, dict): + v3d = human['v3d'].to(device) + else: + v3d = human + _img = renderer.render_mesh(v3d, _img) + + if keep_origin: + _img = np.concatenate([np.asarray(pred_rend_array), _img],1).astype(np.uint8) + return _img + + +def render_video(results, faces, K, pil_bis_frames, fps, out_path, device, keep_origin=True): + # results [F, N, ...] + if isinstance(pil_bis_frames[0], np.ndarray): + height, width, _ = pil_bis_frames[0].shape + else: + shape = pil_bis_frames[0].size + width, height = shape[1], shape[0] + renderer = Renderer(width, height, K[0], device, faces) + + + # build default camera + default_R, default_T = torch.eye(3), torch.zeros(3) + + writer = imageio.get_writer( + out_path, + fps=fps, mode='I', format='FFMPEG', macro_block_size=1 + ) + for i, humans in enumerate(tqdm(results)): + pred_rend_array = pil_bis_frames[i] + _img = render_frame(renderer, humans, pred_rend_array, default_R, default_T, device, keep_origin) + try: + writer.append_data(_img) + except: + print('Error in writing video') + print(type(_img)) + writer.close() diff --git a/engine/pose_estimation/pose_utils/render_oldversion.py b/engine/pose_estimation/pose_utils/render_oldversion.py new file mode 100644 index 0000000000000000000000000000000000000000..b4c936a426e8e01f231a9fe575fcb0b9ceab59f3 --- /dev/null +++ b/engine/pose_estimation/pose_utils/render_oldversion.py @@ -0,0 +1,264 @@ +# Multi-HMR +# Copyright (c) 2024-present NAVER Corp. +# CC BY-NC-SA 4.0 license + +import torch +import numpy as np +import trimesh +import math +from scipy.spatial.transform import Rotation +from PIL import ImageFont, ImageDraw, Image + +OPENCV_TO_OPENGL_CAMERA_CONVENTION = np.array([[1, 0, 0, 0], + [0, -1, 0, 0], + [0, 0, -1, 0], + [0, 0, 0, 1]]) + +def geotrf( Trf, pts, ncol=None, norm=False): + """ Apply a geometric transformation to a list of 3-D points. + H: 3x3 or 4x4 projection matrix (typically a Homography) + p: numpy/torch/tuple of coordinates. Shape must be (...,2) or (...,3) + + ncol: int. number of columns of the result (2 or 3) + norm: float. if != 0, the resut is projected on the z=norm plane. + + Returns an array of projected 2d points. + """ + assert Trf.ndim in (2,3) + if isinstance(Trf, np.ndarray): + pts = np.asarray(pts) + elif isinstance(Trf, torch.Tensor): + pts = torch.as_tensor(pts, dtype=Trf.dtype) + + ncol = ncol or pts.shape[-1] + + # adapt shape if necessary + output_reshape = pts.shape[:-1] + if Trf.ndim == 3: + assert len(Trf) == len(pts), 'batch size does not match' + if Trf.ndim == 3 and pts.ndim > 3: + # Trf == (B,d,d) & pts == (B,H,W,d) --> (B, H*W, d) + pts = pts.reshape(pts.shape[0], -1, pts.shape[-1]) + elif Trf.ndim == 3 and pts.ndim == 2: + # Trf == (B,d,d) & pts == (B,d) --> (B, 1, d) + pts = pts[:, None, :] + + if pts.shape[-1]+1 == Trf.shape[-1]: + Trf = Trf.swapaxes(-1,-2) # transpose Trf + pts = pts @ Trf[...,:-1,:] + Trf[...,-1:,:] + elif pts.shape[-1] == Trf.shape[-1]: + Trf = Trf.swapaxes(-1,-2) # transpose Trf + pts = pts @ Trf + else: + pts = Trf @ pts.T + if pts.ndim >= 2: pts = pts.swapaxes(-1,-2) + if norm: + pts = pts / pts[...,-1:] # DONT DO /= BECAUSE OF WEIRD PYTORCH BUG + if norm != 1: pts *= norm + + return pts[...,:ncol].reshape(*output_reshape, ncol) + +def create_scene(img_pil, l_mesh, l_face, color=None, metallicFactor=0., roughnessFactor=0.5, focal=600): + + scene = trimesh.Scene( + lights=trimesh.scene.lighting.Light(intensity=3.0) + ) + + # Human meshes + for i, mesh in enumerate(l_mesh): + if color is None: + _color = (np.random.choice(range(1,225))/255, np.random.choice(range(1,225))/255, np.random.choice(range(1,225))/255) + else: + if isinstance(color,list): + _color = color[i] + elif isinstance(color,tuple): + _color = color + else: + raise NotImplementedError + mesh = trimesh.Trimesh(mesh, l_face[i]) + mesh.visual = trimesh.visual.TextureVisuals( + uv=None, + material=trimesh.visual.material.PBRMaterial( + metallicFactor=metallicFactor, + roughnessFactor=roughnessFactor, + alphaMode='OPAQUE', + baseColorFactor=(_color[0], _color[1], _color[2], 1.0) + ), + image=None, + face_materials=None + ) + scene.add_geometry(mesh) + + # Image + H, W = img_pil.size[0], img_pil.size[1] + screen_width = 0.3 + height = focal * screen_width / H + width = screen_width * 0.5**0.5 + rot45 = np.eye(4) + rot45[:3,:3] = Rotation.from_euler('z',np.deg2rad(45)).as_matrix() + rot45[2,3] = -height # set the tip of the cone = optical center + aspect_ratio = np.eye(4) + aspect_ratio[0,0] = W/H + transform = OPENCV_TO_OPENGL_CAMERA_CONVENTION @ aspect_ratio @ rot45 + cam = trimesh.creation.cone(width, height, sections=4, transform=transform) + # cam.apply_transform(transform) + # import ipdb + # ipdb.set_trace() + + # vertices = geotrf(transform, cam.vertices[[4,5,1,3]]) + vertices = cam.vertices[[4,5,1,3]] + faces = np.array([[0, 1, 2], [0, 2, 3], [2, 1, 0], [3, 2, 0]]) + img = trimesh.Trimesh(vertices=vertices, faces=faces) + uv_coords = np.float32([[0, 0], [1, 0], [1, 1], [0, 1]]) + # img_pil = Image.fromarray((255. * np.ones((20,20,3))).astype(np.uint8)) # white only! + material = trimesh.visual.texture.SimpleMaterial(image=img_pil, + diffuse=[255,255,255,0], + ambient=[255,255,255,0], + specular=[255,255,255,0], + glossiness=1.0) + img.visual = trimesh.visual.TextureVisuals(uv=uv_coords, image=img_pil) #, material=material) + # _main_color = [255,255,255,0] + # print(img.visual.material.ambient) + # print(img.visual.material.diffuse) + # print(img.visual.material.specular) + # print(img.visual.material.main_color) + + # img.visual.material.ambient = _main_color + # img.visual.material.diffuse = _main_color + # img.visual.material.specular = _main_color + + # img.visual.material.main_color = _main_color + # img.visual.material.glossiness = _main_color + scene.add_geometry(img) + + # this is the camera mesh + rot2 = np.eye(4) + rot2[:3,:3] = Rotation.from_euler('z',np.deg2rad(2)).as_matrix() + # import ipdb + # ipdb.set_trace() + # vertices = cam.vertices + # print(rot2) + vertices = np.r_[cam.vertices, 0.95*cam.vertices, geotrf(rot2, cam.vertices)] + # vertices = np.r_[cam.vertices, 0.95*cam.vertices, 1.05*cam.vertices] + faces = [] + for face in cam.faces: + if 0 in face: continue + a,b,c = face + a2,b2,c2 = face + len(cam.vertices) + a3,b3,c3 = face + 2*len(cam.vertices) + + # add 3 pseudo-edges + faces.append((a,b,b2)) + faces.append((a,a2,c)) + faces.append((c2,b,c)) + + faces.append((a,b,b3)) + faces.append((a,a3,c)) + faces.append((c3,b,c)) + + # no culling + faces += [(c,b,a) for a,b,c in faces] + + cam = trimesh.Trimesh(vertices=vertices, faces=faces) + cam.visual.face_colors[:,:3] = (255, 0, 0) + scene.add_geometry(cam) + + # OpenCV to OpenGL + rot = np.eye(4) + cams2world = np.eye(4) + rot[:3,:3] = Rotation.from_euler('y',np.deg2rad(180)).as_matrix() + scene.apply_transform(np.linalg.inv(cams2world @ OPENCV_TO_OPENGL_CAMERA_CONVENTION @ rot)) + + return scene + + +def length(v): + return math.sqrt(v[0]*v[0]+v[1]*v[1]+v[2]*v[2]) + +def cross(v0, v1): + return [ + v0[1]*v1[2]-v1[1]*v0[2], + v0[2]*v1[0]-v1[2]*v0[0], + v0[0]*v1[1]-v1[0]*v0[1]] + +def dot(v0, v1): + return v0[0]*v1[0]+v0[1]*v1[1]+v0[2]*v1[2] + +def normalize(v, eps=1e-13): + l = length(v) + return [v[0]/(l+eps), v[1]/(l+eps), v[2]/(l+eps)] + +def lookAt(eye, target, *args, **kwargs): + """ + eye is the point of view, target is the point which is looked at and up is the upwards direction. + + Input should be in OpenCV format - we transform arguments to OpenGL + Do compute in OpenGL and then transform back to OpenCV + + """ + # Transform from OpenCV to OpenGL format + # eye = [eye[0], -eye[1], -eye[2]] + # target = [target[0], -target[1], -target[2]] + up = [0,-1,0] + + eye, at, up = eye, target, up + zaxis = normalize((at[0]-eye[0], at[1]-eye[1], at[2]-eye[2])) + xaxis = normalize(cross(zaxis, up)) + yaxis = cross(xaxis, zaxis) + + zaxis = [-zaxis[0],-zaxis[1],-zaxis[2]] + + viewMatrix = np.asarray([ + [xaxis[0], xaxis[1], xaxis[2], -dot(xaxis, eye)], + [yaxis[0], yaxis[1], yaxis[2], -dot(yaxis, eye)], + [zaxis[0], zaxis[1], zaxis[2], -dot(zaxis, eye)], + [0, 0, 0, 1]] + ).reshape(4,4) + + # OpenGL to OpenCV + viewMatrix = OPENCV_TO_OPENGL_CAMERA_CONVENTION @ viewMatrix + + return viewMatrix + +def print_distance_on_image(pred_rend_array, humans, _color): + # Add distance to the image. + font = ImageFont.load_default() + rend_pil = Image.fromarray(pred_rend_array) + draw = ImageDraw.Draw(rend_pil) + for i_hum, hum in enumerate(humans): + # distance + transl = hum['transl_pelvis'].cpu().numpy().reshape(3) + dist_cam = np.sqrt(((transl[[0,2]])**2).sum()) # discarding Y axis + # 2d - bbox + bbox = get_bbox(hum['j2d_smplx'].cpu().numpy(), factor=1.35, output_format='x1y1x2y2') + loc = [(bbox[0] + bbox[2]) / 2., bbox[1]] + txt = f"{dist_cam:.2f}m" + length = font.getlength(txt) + loc[0] = loc[0] - length // 2 + fill = tuple((np.asarray(_color[i_hum]) * 255).astype(np.int32).tolist()) + draw.text((loc[0], loc[1]), txt, fill=fill, font=font) + return np.asarray(rend_pil) + +def get_bbox(points, factor=1., output_format='xywh'): + """ + Args: + - y: [k,2] + Return: + - bbox: [4] in a specific format + """ + assert len(points.shape) == 2, f"Wrong shape, expected two-dimensional array. Got shape {points.shape}" + assert points.shape[1] == 2 + x1, x2 = points[:,0].min(), points[:,0].max() + y1, y2 = points[:,1].min(), points[:,1].max() + cx, cy = (x2 + x1) / 2., (y2 + y1) / 2. + sx, sy = np.abs(x2 - x1), np.abs(y2 - y1) + sx, sy = int(factor * sx), int(factor * sy) + x1, y1 = int(cx - sx / 2.), int(cy - sy / 2.) + x2, y2 = int(cx + sx / 2.), int(cy + sy / 2.) + if output_format == 'xywh': + return [x1,y1,sx,sy] + elif output_format == 'x1y1x2y2': + return [x1,y1,x2,y2] + else: + raise NotImplementedError + diff --git a/engine/pose_estimation/pose_utils/rot6d.py b/engine/pose_estimation/pose_utils/rot6d.py new file mode 100644 index 0000000000000000000000000000000000000000..e3e3bbe57ebe766e691f40a2245e4a8d3e7c5b04 --- /dev/null +++ b/engine/pose_estimation/pose_utils/rot6d.py @@ -0,0 +1,425 @@ +from typing import Optional + +import torch +import torch.nn.functional as F + +def quaternion_to_axis_angle(quaternions: torch.Tensor) -> torch.Tensor: + """ + Convert rotations given as quaternions to axis/angle. + + Args: + quaternions: quaternions with real part first, + as tensor of shape (..., 4). + + Returns: + Rotations given as a vector in axis angle form, as a tensor + of shape (..., 3), where the magnitude is the angle + turned anticlockwise in radians around the vector's + direction. + """ + norms = torch.norm(quaternions[..., 1:], p=2, dim=-1, keepdim=True) + half_angles = torch.atan2(norms, quaternions[..., :1]) + angles = 2 * half_angles + eps = 1e-6 + small_angles = angles.abs() < eps + sin_half_angles_over_angles = torch.empty_like(angles) + sin_half_angles_over_angles[~small_angles] = ( + torch.sin(half_angles[~small_angles]) / angles[~small_angles] + ) + # for x small, sin(x/2) is about x/2 - (x/2)^3/6 + # so sin(x/2)/x is about 1/2 - (x*x)/48 + sin_half_angles_over_angles[small_angles] = ( + 0.5 - (angles[small_angles] * angles[small_angles]) / 48 + ) + return quaternions[..., 1:] / sin_half_angles_over_angles + + + +def matrix_to_quaternion(matrix: torch.Tensor) -> torch.Tensor: + """ + Convert rotations given as rotation matrices to quaternions. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + + Returns: + quaternions with real part first, as tensor of shape (..., 4). + """ + if matrix.size(-1) != 3 or matrix.size(-2) != 3: + raise ValueError(f"Invalid rotation matrix shape {matrix.shape}.") + + batch_dim = matrix.shape[:-2] + m00, m01, m02, m10, m11, m12, m20, m21, m22 = torch.unbind( + matrix.reshape(batch_dim + (9,)), dim=-1 + ) + + q_abs = _sqrt_positive_part( + torch.stack( + [ + 1.0 + m00 + m11 + m22, + 1.0 + m00 - m11 - m22, + 1.0 - m00 + m11 - m22, + 1.0 - m00 - m11 + m22, + ], + dim=-1, + ) + ) + + # we produce the desired quaternion multiplied by each of r, i, j, k + quat_by_rijk = torch.stack( + [ + # pyre-fixme[58]: `**` is not supported for operand types `Tensor` and + # `int`. + torch.stack([q_abs[..., 0] ** 2, m21 - m12, m02 - m20, m10 - m01], dim=-1), + # pyre-fixme[58]: `**` is not supported for operand types `Tensor` and + # `int`. + torch.stack([m21 - m12, q_abs[..., 1] ** 2, m10 + m01, m02 + m20], dim=-1), + # pyre-fixme[58]: `**` is not supported for operand types `Tensor` and + # `int`. + torch.stack([m02 - m20, m10 + m01, q_abs[..., 2] ** 2, m12 + m21], dim=-1), + # pyre-fixme[58]: `**` is not supported for operand types `Tensor` and + # `int`. + torch.stack([m10 - m01, m20 + m02, m21 + m12, q_abs[..., 3] ** 2], dim=-1), + ], + dim=-2, + ) + + # We floor here at 0.1 but the exact level is not important; if q_abs is small, + # the candidate won't be picked. + flr = torch.tensor(0.1).to(dtype=q_abs.dtype, device=q_abs.device) + quat_candidates = quat_by_rijk / (2.0 * q_abs[..., None].max(flr)) + + # if not for numerical problems, quat_candidates[i] should be same (up to a sign), + # forall i; we pick the best-conditioned one (with the largest denominator) + + return quat_candidates[ + F.one_hot(q_abs.argmax(dim=-1), num_classes=4) > 0.5, : + ].reshape(batch_dim + (4,)) + +def _sqrt_positive_part(x: torch.Tensor) -> torch.Tensor: + """ + Returns torch.sqrt(torch.max(0, x)) + but with a zero subgradient where x is 0. + """ + ret = torch.zeros_like(x) + positive_mask = x > 0 + ret[positive_mask] = torch.sqrt(x[positive_mask]) + return ret + +def matrix_to_axis_angle(matrix: torch.Tensor) -> torch.Tensor: + """ + Convert rotations given as rotation matrices to axis/angle. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + + Returns: + Rotations given as a vector in axis angle form, as a tensor + of shape (..., 3), where the magnitude is the angle + turned anticlockwise in radians around the vector's + direction. + """ + return quaternion_to_axis_angle(matrix_to_quaternion(matrix)) + +def euler_angles_to_axis_angle(euler_angles: torch.Tensor, convention: str) -> torch.Tensor: + """ + Convert rotations given as Euler angles in radians to axis/angle. + + Args: + euler_angles: Euler angles in radians as tensor of shape (..., 3). + convention: Convention string of three uppercase letters from + {"X", "Y", and "Z"}. + + Returns: + Rotations given as a vector in axis angle form, as a tensor + of shape (..., 3), where the magnitude is the angle + turned anticlockwise in radians around the vector's + direction. + """ + return matrix_to_axis_angle(euler_angles_to_matrix(euler_angles, convention)) + +def euler_angles_to_matrix(euler_angles: torch.Tensor, convention: str) -> torch.Tensor: + """ + Convert rotations given as Euler angles in radians to rotation matrices. + + Args: + euler_angles: Euler angles in radians as tensor of shape (..., 3). + convention: Convention string of three uppercase letters from + {"X", "Y", and "Z"}. + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + if euler_angles.dim() == 0 or euler_angles.shape[-1] != 3: + raise ValueError("Invalid input euler angles.") + if len(convention) != 3: + raise ValueError("Convention must have 3 letters.") + if convention[1] in (convention[0], convention[2]): + raise ValueError(f"Invalid convention {convention}.") + for letter in convention: + if letter not in ("X", "Y", "Z"): + raise ValueError(f"Invalid letter {letter} in convention string.") + matrices = [ + _axis_angle_rotation(c, e) + for c, e in zip(convention, torch.unbind(euler_angles, -1)) + ] + # return functools.reduce(torch.matmul, matrices) + return torch.matmul(torch.matmul(matrices[0], matrices[1]), matrices[2]) + +def _axis_angle_rotation(axis: str, angle: torch.Tensor) -> torch.Tensor: + """ + Return the rotation matrices for one of the rotations about an axis + of which Euler angles describe, for each value of the angle given. + + Args: + axis: Axis label "X" or "Y or "Z". + angle: any shape tensor of Euler angles in radians + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + + cos = torch.cos(angle) + sin = torch.sin(angle) + one = torch.ones_like(angle) + zero = torch.zeros_like(angle) + + if axis == "X": + R_flat = (one, zero, zero, zero, cos, -sin, zero, sin, cos) + elif axis == "Y": + R_flat = (cos, zero, sin, zero, one, zero, -sin, zero, cos) + elif axis == "Z": + R_flat = (cos, -sin, zero, sin, cos, zero, zero, zero, one) + else: + raise ValueError("letter must be either X, Y or Z.") + + return torch.stack(R_flat, -1).reshape(angle.shape + (3, 3)) + +def axis_angle_to_quaternion(axis_angle: torch.Tensor) -> torch.Tensor: + """ + Convert rotations given as axis/angle to quaternions. + + Args: + axis_angle: Rotations given as a vector in axis angle form, + as a tensor of shape (..., 3), where the magnitude is + the angle turned anticlockwise in radians around the + vector's direction. + + Returns: + quaternions with real part first, as tensor of shape (..., 4). + """ + angles = torch.norm(axis_angle, p=2, dim=-1, keepdim=True) + half_angles = angles * 0.5 + eps = 1e-6 + small_angles = angles.abs() < eps + sin_half_angles_over_angles = torch.empty_like(angles) + sin_half_angles_over_angles[~small_angles] = ( + torch.sin(half_angles[~small_angles]) / angles[~small_angles] + ) + # for x small, sin(x/2) is about x/2 - (x/2)^3/6 + # so sin(x/2)/x is about 1/2 - (x*x)/48 + sin_half_angles_over_angles[small_angles] = ( + 0.5 - (angles[small_angles] * angles[small_angles]) / 48 + ) + quaternions = torch.cat( + [torch.cos(half_angles), axis_angle * sin_half_angles_over_angles], dim=-1 + ) + return quaternions + + +def axis_angle_to_matrix(axis_angle: torch.Tensor) -> torch.Tensor: + """ + Convert rotations given as axis/angle to rotation matrices. + + Args: + axis_angle: Rotations given as a vector in axis angle form, + as a tensor of shape (..., 3), where the magnitude is + the angle turned anticlockwise in radians around the + vector's direction. + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + return quaternion_to_matrix(axis_angle_to_quaternion(axis_angle)) + +def quaternion_to_matrix(quaternions: torch.Tensor) -> torch.Tensor: + """ + Convert rotations given as quaternions to rotation matrices. + + Args: + quaternions: quaternions with real part first, + as tensor of shape (..., 4). + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + r, i, j, k = torch.unbind(quaternions, -1) + # pyre-fixme[58]: `/` is not supported for operand types `float` and `Tensor`. + two_s = 2.0 / (quaternions * quaternions).sum(-1) + + o = torch.stack( + ( + 1 - two_s * (j * j + k * k), + two_s * (i * j - k * r), + two_s * (i * k + j * r), + two_s * (i * j + k * r), + 1 - two_s * (i * i + k * k), + two_s * (j * k - i * r), + two_s * (i * k - j * r), + two_s * (j * k + i * r), + 1 - two_s * (i * i + j * j), + ), + -1, + ) + return o.reshape(quaternions.shape[:-1] + (3, 3)) + +def axis_angle_to_euler_angles(axis_angle: torch.Tensor) -> torch.Tensor: + """ + Convert rotations given as Euler angles in radians to axis/angle. + + Args: + axis_angle: Rotations given as a vector in axis angle form, + as a tensor of shape (..., 3), where the magnitude is + the angle turned anticlockwise in radians around the + vector's direction. + Returns: + Rotations given as a vector in axis angle form, as a tensor + of shape (..., 3), where the magnitude is the angle + turned anticlockwise in radians around the vector's + direction. + """ + return matrix_to_euler_angles(axis_angle_to_matrix(axis_angle), 'XYZ') + +def _angle_from_tan( + axis: str, other_axis: str, data, horizontal: bool, tait_bryan: bool +) -> torch.Tensor: + """ + Extract the first or third Euler angle from the two members of + the matrix which are positive constant times its sine and cosine. + + Args: + axis: Axis label "X" or "Y or "Z" for the angle we are finding. + other_axis: Axis label "X" or "Y or "Z" for the middle axis in the + convention. + data: Rotation matrices as tensor of shape (..., 3, 3). + horizontal: Whether we are looking for the angle for the third axis, + which means the relevant entries are in the same row of the + rotation matrix. If not, they are in the same column. + tait_bryan: Whether the first and third axes in the convention differ. + + Returns: + Euler Angles in radians for each matrix in data as a tensor + of shape (...). + """ + + i1, i2 = {"X": (2, 1), "Y": (0, 2), "Z": (1, 0)}[axis] + if horizontal: + i2, i1 = i1, i2 + even = (axis + other_axis) in ["XY", "YZ", "ZX"] + if horizontal == even: + return torch.atan2(data[..., i1], data[..., i2]) + if tait_bryan: + return torch.atan2(-data[..., i2], data[..., i1]) + return torch.atan2(data[..., i2], -data[..., i1]) + + +def _index_from_letter(letter: str) -> int: + if letter == "X": + return 0 + if letter == "Y": + return 1 + if letter == "Z": + return 2 + raise ValueError("letter must be either X, Y or Z.") + + +def matrix_to_euler_angles(matrix: torch.Tensor, convention: str) -> torch.Tensor: + """ + Convert rotations given as rotation matrices to Euler angles in radians. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + convention: Convention string of three uppercase letters. + + Returns: + Euler angles in radians as tensor of shape (..., 3). + """ + if len(convention) != 3: + raise ValueError("Convention must have 3 letters.") + if convention[1] in (convention[0], convention[2]): + raise ValueError(f"Invalid convention {convention}.") + for letter in convention: + if letter not in ("X", "Y", "Z"): + raise ValueError(f"Invalid letter {letter} in convention string.") + if matrix.size(-1) != 3 or matrix.size(-2) != 3: + raise ValueError(f"Invalid rotation matrix shape {matrix.shape}.") + i0 = _index_from_letter(convention[0]) + i2 = _index_from_letter(convention[2]) + tait_bryan = i0 != i2 + if tait_bryan: + central_angle = torch.asin( + matrix[..., i0, i2] * (-1.0 if i0 - i2 in [-1, 2] else 1.0) + ) + else: + central_angle = torch.acos(matrix[..., i0, i0]) + + o = ( + _angle_from_tan( + convention[0], convention[1], matrix[..., i2], False, tait_bryan + ), + central_angle, + _angle_from_tan( + convention[2], convention[1], matrix[..., i0, :], True, tait_bryan + ), + ) + return torch.stack(o, -1) + + +def rotation_6d_to_matrix(d6: torch.Tensor) -> torch.Tensor: + """ + Converts 6D rotation representation by Zhou et al. [1] to rotation matrix + using Gram--Schmidt orthogonalisation per Section B of [1]. + Args: + d6: 6D rotation representation, of size (*, 6) + + Returns: + batch of rotation matrices of size (*, 3, 3) + + [1] Zhou, Y., Barnes, C., Lu, J., Yang, J., & Li, H. + On the Continuity of Rotation Representations in Neural Networks. + IEEE Conference on Computer Vision and Pattern Recognition, 2019. + Retrieved from http://arxiv.org/abs/1812.07035 + """ + + a1, a2 = d6[..., :3], d6[..., 3:] + b1 = F.normalize(a1, dim=-1) + b2 = a2 - (b1 * a2).sum(-1, keepdim=True) * b1 + b2 = F.normalize(b2, dim=-1) + b3 = torch.cross(b1, b2, dim=-1) + return torch.stack((b1, b2, b3), dim=-2) + + +def matrix_to_rotation_6d(matrix: torch.Tensor) -> torch.Tensor: + """ + Converts rotation matrices to 6D rotation representation by Zhou et al. [1] + by dropping the last row. Note that 6D representation is not unique. + Args: + matrix: batch of rotation matrices of size (*, 3, 3) + + Returns: + 6D rotation representation, of size (*, 6) + + [1] Zhou, Y., Barnes, C., Lu, J., Yang, J., & Li, H. + On the Continuity of Rotation Representations in Neural Networks. + IEEE Conference on Computer Vision and Pattern Recognition, 2019. + Retrieved from http://arxiv.org/abs/1812.07035 + """ + return matrix[..., :2, :].clone().reshape(*matrix.size()[:-2], 6) + + +def axis_angle_to_rotation_6d(axis_angle: torch.Tensor) -> torch.Tensor: + return matrix_to_rotation_6d(axis_angle_to_matrix(axis_angle)) + + +def rotation_6d_to_axis_angle(d6: torch.Tensor) -> torch.Tensor: + return matrix_to_axis_angle(rotation_6d_to_matrix(d6)) \ No newline at end of file diff --git a/engine/pose_estimation/pose_utils/tensor_manip.py b/engine/pose_estimation/pose_utils/tensor_manip.py new file mode 100644 index 0000000000000000000000000000000000000000..1f09c3083a0057783f2e546fe2f00ffbceb18693 --- /dev/null +++ b/engine/pose_estimation/pose_utils/tensor_manip.py @@ -0,0 +1,45 @@ +# Multi-HMR +# Copyright (c) 2024-present NAVER Corp. +# CC BY-NC-SA 4.0 license + +import torch + +def rebatch(idx_0, idx_det): + # Rebuild the batch dimension : (N, ...) is turned into (batch_dim, nb_max, ...) + # with zero padding for batch elements with fewer people. + values, counts = torch.unique(idx_0, sorted=True, return_counts=True) + #print(idx_0) + if not len(values) == values.max() + 1: + # Abnormal jumps in the idx_0: some images in the batch did not produce any inputs. + jumps = (values - torch.concat([torch.Tensor([-1]).to(values.device), values])[:-1]) - 1 + offsets = torch.cumsum(jumps.int(), dim=0) + + # Correcting idx_0 to account for missing batch elements + # This is actually wrong: in the case where we have 2 consecutive images without ppl, this will fail. + # But two consecutive jumps has proba so close to 0 that I consider it 'impossible'. + offsets = [c * [o] for o, c in [(offsets[i], counts[i]) for i in range(offsets.shape[0])]] + offsets = torch.Tensor([e for o in offsets for e in o]).to(jumps.device).int() + idx_0 = idx_0 - offsets + idx_det_0 = idx_det[0] - offsets + else: + idx_det_0 = idx_det[0] + return counts, idx_det_0 + +def pad(x, padlen, dim): + assert x.shape[dim] <= padlen, "Incoherent dimensions" + if not dim == 1: + raise NotImplementedError("Not implemented for this dim.") + padded = torch.concat([x, x.new_zeros((x.shape[0], padlen - x.shape[dim],) + x.shape[2:])], dim=dim) + mask = torch.concat([x.new_ones((x.shape[0], x.shape[dim])), x.new_zeros((x.shape[0], padlen - x.shape[dim]))], dim=dim) + return padded, mask + +def pad_to_max(x_central, counts): + """Pad so that each batch images has the same number of x_central queries. + Mask is used in attention to remove the fact queries. """ + max_count = counts.max() + xlist = torch.split(x_central, tuple(counts), dim=0) + xlist2 = [x.unsqueeze(0) for x in xlist] + xlist3 = [pad(x, max_count, dim=1) for x in xlist2] + xlist4, mask = [x[0] for x in xlist3], [x[1] for x in xlist3] + x_central, mask = torch.concat(xlist4, dim=0), torch.concat(mask, dim=0) + return x_central, mask diff --git a/engine/pose_estimation/pose_utils/tracker.py b/engine/pose_estimation/pose_utils/tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..e2c12f6262ebaa50df5ce2f22c2ce92d0cafcf11 --- /dev/null +++ b/engine/pose_estimation/pose_utils/tracker.py @@ -0,0 +1,105 @@ +import numpy as np +import warnings +import torch + + +def bbox_xyxy_to_cxcywh(bboxes: np.ndarray, scale=1.0, device=None): + w = bboxes[..., 2] - bboxes[..., 0] + h = bboxes[..., 3] - bboxes[..., 1] + cx = (bboxes[..., 0] + bboxes[..., 2]) / 2.0 + cy = (bboxes[..., 1] + bboxes[..., 3]) / 2.0 + new_bboxes = torch.stack([cx, cy, w * scale, h * scale], dim=-1) + if device is not None: + new_bboxes = torch.tensor(new_bboxes, device=device) + return new_bboxes + + +def compute_iou(bboxA, bboxB): + """Compute the Intersection over Union (IoU) between two boxes . + + Args: + bboxA (list): The first bbox info (left, top, right, bottom, score). + bboxB (list): The second bbox info (left, top, right, bottom, score). + + Returns: + float: The IoU value. + """ + + x1 = max(bboxA[0], bboxB[0]) + y1 = max(bboxA[1], bboxB[1]) + x2 = min(bboxA[2], bboxB[2]) + y2 = min(bboxA[3], bboxB[3]) + + inter_area = max(0, x2 - x1) * max(0, y2 - y1) + + bboxA_area = (bboxA[2] - bboxA[0]) * (bboxA[3] - bboxA[1]) + bboxB_area = (bboxB[2] - bboxB[0]) * (bboxB[3] - bboxB[1]) + union_area = float(bboxA_area + bboxB_area - inter_area) + if union_area == 0: + union_area = 1e-8 + warnings.warn("union_area=0 is unexpected") + + iou = inter_area / union_area + + return iou + + +def track_by_iou(res, results_last, thr): + """Get track id using IoU tracking greedily. + + Args: + res (dict): The bbox & pose results of the person instance. + results_last (list[dict]): The bbox & pose & track_id info of the + last frame (bbox_result, pose_result, track_id). + thr (float): The threshold for iou tracking. + + Returns: + int: The track id for the new person instance. + list[dict]: The bbox & pose & track_id info of the persons + that have not been matched on the last frame. + dict: The matched person instance on the last frame. + """ + + bbox = list(res["bbox"]) + + max_iou_score = -1 + max_index = -1 + match_result = {} + for index, res_last in enumerate(results_last): + bbox_last = list(res_last["bbox"]) + + iou_score = _compute_iou(bbox, bbox_last) + if iou_score > max_iou_score: + max_iou_score = iou_score + max_index = index + + if max_iou_score > thr: + track_id = results_last[max_index]["track_id"] + match_result = results_last[max_index] + del results_last[max_index] + else: + track_id = -1 + + return track_id, results_last, match_result + + +def track_by_area(humans, target_img_size, threshold=0.3): + if len(humans) < 1: + return None + + IMAGE_AREA = target_img_size**2 + target_human = None + max_area = -1 + for human in humans: + j2d_coco = human["j2d"].to(torch.float) # [joints_smplx_to_coco()].to(torch.float) + + # compute bbox + j2d_area = (j2d_coco[..., 0].max() - j2d_coco[..., 0].min()) * ( + j2d_coco[..., 1].max() - j2d_coco[..., 1].min() + ) + if max_area < j2d_area: + max_area = j2d_area + target_human = human + # if max_area / IMAGE_AREA < threshold: + # return None + return target_human diff --git a/engine/pose_estimation/pose_utils/training.py b/engine/pose_estimation/pose_utils/training.py new file mode 100644 index 0000000000000000000000000000000000000000..288186b13c979245a82fd6fd03eb86f2caea5284 --- /dev/null +++ b/engine/pose_estimation/pose_utils/training.py @@ -0,0 +1,225 @@ +# Multi-HMR +# Copyright (c) 2024-present NAVER Corp. +# CC BY-NC-SA 4.0 license + +import torch +import numpy as np +from itertools import product + +def compute_prf1(count, miss, fp): + """ + Code modified from https://github.com/Arthur151/ROMP/blob/4eebd3647f57d291d26423e51f0d514ff7197cb3/simple_romp/evaluation/RH_evaluation/evaluation.py#L90 + """ + if count == 0: + return 0, 0, 0 + all_tp = count - miss + all_fp = fp + all_fn = miss + if all_tp == 0: + return 0., 0., 0. + all_f1_score = round(all_tp / (all_tp + 0.5 * (all_fp + all_fn)), 2) + all_recall = round(all_tp / (all_tp + all_fn), 2) + all_precision = round(all_tp / (all_tp + all_fp), 2) + return 100. * all_precision, 100.* all_recall, 100. * all_f1_score + +def match_2d_greedy( + pred_kps, + gtkp, + valid_mask, + imgPath=None, + baseline=None, + iou_thresh=0.05, + valid=None, + ind=-1): + ''' + Code modified from: https://github.com/Arthur151/ROMP/blob/4eebd3647f57d291d26423e51f0d514ff7197cb3/simple_romp/trace2/evaluation/eval_3DPW.py#L232 + matches groundtruth keypoints to the detection by considering all possible matchings. + :return: best possible matching, a list of tuples, where each tuple corresponds to one match of pred_person.to gt_person. + the order within one tuple is as follows (idx_pred_kps, idx_gt_kps) + ''' + predList = np.arange(len(pred_kps)) + gtList = np.arange(len(gtkp)) + # get all pairs of elements in pred_kps, gtkp + # all combinations of 2 elements from l1 and l2 + combs = list(product(predList, gtList)) + + errors_per_pair = {} + errors_per_pair_list = [] + for comb in combs: + vmask = valid_mask[comb[1]] + assert vmask.sum()>0, print('no valid points') + errors_per_pair[str(comb)] = np.linalg.norm(pred_kps[comb[0]][vmask, :2] - gtkp[comb[1]][vmask, :2], 2) + errors_per_pair_list.append(errors_per_pair[str(comb)]) + + gtAssigned = np.zeros((len(gtkp),), dtype=bool) + opAssigned = np.zeros((len(pred_kps),), dtype=bool) + errors_per_pair_list = np.array(errors_per_pair_list) + + bestMatch = [] + excludedGtBecauseInvalid = [] + falsePositiveCounter = 0 + while np.sum(gtAssigned) < len(gtAssigned) and np.sum( + opAssigned) + falsePositiveCounter < len(pred_kps): + found = False + falsePositive = False + while not(found): + if sum(np.inf == errors_per_pair_list) == len( + errors_per_pair_list): + print('something went wrong here') + + minIdx = np.argmin(errors_per_pair_list) + minComb = combs[minIdx] + # compute IOU + iou = get_bbx_overlap( + pred_kps[minComb[0]], gtkp[minComb[1]]) #, imgPath, baseline) + # if neither prediction nor ground truth has been matched before and iou + # is larger than threshold + if not(opAssigned[minComb[0]]) and not( + gtAssigned[minComb[1]]) and iou >= iou_thresh: + #print(imgPath + ': found matching') + found = True + errors_per_pair_list[minIdx] = np.inf + else: + errors_per_pair_list[minIdx] = np.inf + # if errors_per_pair_list[minIdx] > + # matching_threshold*headBboxs[combs[minIdx][1]]: + if iou < iou_thresh: + #print( + # imgPath + ': false positive detected using threshold') + found = True + falsePositive = True + falsePositiveCounter += 1 + + # if ground truth of combination is valid keep the match, else exclude + # gt from matching + if not(valid is None): + if valid[minComb[1]]: + if not falsePositive: + bestMatch.append(minComb) + opAssigned[minComb[0]] = True + gtAssigned[minComb[1]] = True + else: + gtAssigned[minComb[1]] = True + excludedGtBecauseInvalid.append(minComb[1]) + + elif not falsePositive: + # same as above but without checking for valid + bestMatch.append(minComb) + opAssigned[minComb[0]] = True + gtAssigned[minComb[1]] = True + + bestMatch = np.array(bestMatch) + # add false positives and false negatives to the matching + # find which elements have been successfully assigned + opAssigned = [] + gtAssigned = [] + for pair in bestMatch: + opAssigned.append(pair[0]) + gtAssigned.append(pair[1]) + opAssigned.sort() + gtAssigned.sort() + + falsePositives = [] + misses = [] + + # handle false positives + opIds = np.arange(len(pred_kps)) + # returns values of oIds that are not in opAssigned + notAssignedIds = np.setdiff1d(opIds, opAssigned) + for notAssignedId in notAssignedIds: + falsePositives.append(notAssignedId) + gtIds = np.arange(len(gtList)) + # returns values of gtIds that are not in gtAssigned + notAssignedIdsGt = np.setdiff1d(gtIds, gtAssigned) + + # handle false negatives/misses + for notAssignedIdGt in notAssignedIdsGt: + if not(valid is None): # if using the new matching + if valid[notAssignedIdGt]: + #print(imgPath + ': miss') + misses.append(notAssignedIdGt) + else: + excludedGtBecauseInvalid.append(notAssignedIdGt) + else: + #print(imgPath + ': miss') + misses.append(notAssignedIdGt) + + return bestMatch, falsePositives, misses # tuples are (idx_pred_kps, idx_gt_kps) + +def get_bbx_overlap(p1, p2): + """ + Code modifed from https://github.com/Arthur151/ROMP/blob/4eebd3647f57d291d26423e51f0d514ff7197cb3/simple_romp/trace2/evaluation/eval_3DPW.py#L185 + """ + min_p1 = np.min(p1, axis=0) + min_p2 = np.min(p2, axis=0) + max_p1 = np.max(p1, axis=0) + max_p2 = np.max(p2, axis=0) + + bb1 = {} + bb2 = {} + + bb1['x1'] = min_p1[0] + bb1['x2'] = max_p1[0] + bb1['y1'] = min_p1[1] + bb1['y2'] = max_p1[1] + bb2['x1'] = min_p2[0] + bb2['x2'] = max_p2[0] + bb2['y1'] = min_p2[1] + bb2['y2'] = max_p2[1] + + assert bb1['x1'] < bb1['x2'] + assert bb1['y1'] < bb1['y2'] + assert bb2['x1'] < bb2['x2'] + assert bb2['y1'] < bb2['y2'] + # determine the coordinates of the intersection rectangle + x_left = max(bb1['x1'], bb2['x1']) + y_top = max(bb1['y1'], bb2['y1']) + x_right = min(bb1['x2'], bb2['x2']) + y_bottom = min(bb1['y2'], bb2['y2']) + + # The intersection of two axis-aligned bounding boxes is always an + # axis-aligned bounding box + intersection_area = max(0, x_right - x_left + 1) * \ + max(0, y_bottom - y_top + 1) + + # compute the area of both AABBs + bb1_area = (bb1['x2'] - bb1['x1'] + 1) * (bb1['y2'] - bb1['y1'] + 1) + bb2_area = (bb2['x2'] - bb2['x1'] + 1) * (bb2['y2'] - bb2['y1'] + 1) + + # compute the intersection over union by taking the intersection + # area and dividing it by the sum of prediction + ground-truth + # areas - the interesection area + iou = intersection_area / float(bb1_area + bb2_area - intersection_area) + + return iou + + +class AverageMeter(object): + """ + Code mofied from https://github.com/pytorch/examples/blob/main/imagenet/main.py#L423 + Computes and stores the average and current value + """ + + def __init__(self, name, fmt=':f'): + self.name = name + self.fmt = fmt + self.reset() + + def reset(self): + self.val = 0 + self.avg = 0 + self.sum = 0 + self.count = 0 + + def update(self, val, n=1): + if type(val) == torch.Tensor: + val = val.detach() + self.val = val + self.sum += val * n + self.count += n + self.avg = self.sum / self.count + + def __str__(self): + fmtstr = '{name} {val' + self.fmt + '} ({avg' + self.fmt + '})' + return fmtstr.format(**self.__dict__) + diff --git a/engine/pose_estimation/requirements.txt b/engine/pose_estimation/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..95db2f2bb274b81de59ca3116067060aeddb1487 --- /dev/null +++ b/engine/pose_estimation/requirements.txt @@ -0,0 +1,25 @@ +torch==2.0.1 +trimesh==3.22.3 +pyrender==0.1.45 +einops==0.6.1 +roma +pillow==10.0.1 +smplx +pyvista==0.42.3 +numpy==1.22.4 +pyglet==1.5.24 +tqdm==4.65.0 +xformers==0.0.20 + +# for huggingface +gradio==4.18.0 +spaces==0.19.4 + +# for training/validation +tensorboard==2.16.2 + +# for ehf +plyfile==1.0.3 + +# for smpl +chumpy==0.70 \ No newline at end of file diff --git a/gfpgan/weights/detection_Resnet50_Final.pth b/gfpgan/weights/detection_Resnet50_Final.pth new file mode 100644 index 0000000000000000000000000000000000000000..16546738ce0a00a9fd47585e0fc52744d31cc117 --- /dev/null +++ b/gfpgan/weights/detection_Resnet50_Final.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6d1de9c2944f2ccddca5f5e010ea5ae64a39845a86311af6fdf30841b0a5a16d +size 109497761 diff --git a/gfpgan/weights/parsing_parsenet.pth b/gfpgan/weights/parsing_parsenet.pth new file mode 100644 index 0000000000000000000000000000000000000000..1ac2efc50360a79c9905dbac57d9d99cbfbe863c --- /dev/null +++ b/gfpgan/weights/parsing_parsenet.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d558d8d0e42c20224f13cf5a29c79eba2d59913419f945545d8cf7b72920de2 +size 85331193 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/requirements_lhm.txt b/requirements_lhm.txt new file mode 100644 index 0000000000000000000000000000000000000000..51bb15f775d1e695b644a7db22898d564db10924 --- /dev/null +++ b/requirements_lhm.txt @@ -0,0 +1,53 @@ +einops +roma +accelerate +smplx +iopath +# gradio +wheel +# chumpy==0.66 +decord==0.6.0 +diffusers +dna==0.0.1 +gfpgan==1.3.8 +gsplat==1.4.0 +huggingface_hub==0.23.2 +imageio==2.19.3 +jaxtyping==0.2.38 +kiui==0.2.14 +kornia==0.7.2 +loguru==0.7.3 +lpips==0.1.4 +matplotlib==3.5.3 +megfile==4.1.0.post2 +numpy==1.23.0 +omegaconf==2.3.0 +open3d==0.19.0 +opencv_python +opencv_python_headless +Pillow==11.1.0 +plyfile +pygltflib==1.16.2 +pyrender==0.1.45 +PyYAML==6.0.1 +rembg==2.0.63 +Requests==2.32.3 +scipy +setuptools==74.0.0 +taming_transformers_rom1504==0.0.6 +timm==1.0.15 + +# https://download.pytorch.org/whl/cu121/torch-2.5.1%2Bcu121-cp310-cp310-linux_x86_64.whl#sha256=92af92c569de5da937dd1afb45ecfdd598ec1254cf2e49e3d698cb24d71aae14 +# https://download.pytorch.org/whl/cu121/torchvision-0.20.1%2Bcu121-cp310-cp310-linux_x86_64.whl#sha256=304937b82c933d5155bd04d771f4b187273f67a76050bb4276b521f7e9b4c4e7 +# https://download.pytorch.org/whl/cu121/xformers-0.0.29.post1-cp310-cp310-manylinux_2_28_x86_64.whl#sha256=e213ff8123e20602bd486739ffee4013338b02f9d2e0e4635a2912750854fdbe + +tqdm==4.66.4 +transformers==4.41.2 +trimesh==4.4.9 +typeguard==2.13.3 +xatlas==0.0.9 +imageio-ffmpeg + +https://huggingface.co/spaces/DyrusQZ/LHM_ZeroDebug/resolve/main/wheels/diff_gaussian_rasterization-0.0.0-cp310-cp310-linux_x86_64.whl?download=true +https://huggingface.co/spaces/DyrusQZ/LHM_ZeroDebug/resolve/main/wheels/simple_knn-0.0.0-cp310-cp310-linux_x86_64.whl?download=true + diff --git a/requirements_real.txt b/requirements_real.txt new file mode 100644 index 0000000000000000000000000000000000000000..be1b83ed0f99b6d160517c53cdf1ff7ed94f30ff --- /dev/null +++ b/requirements_real.txt @@ -0,0 +1,48 @@ +einops +roma +accelerate +smplx +iopath +# gradio +chumpy +decord==0.6.0 +diffusers +dna==0.0.1 +gfpgan==1.3.8 +gsplat==1.4.0 +huggingface_hub==0.23.2 +imageio==2.19.3 +jaxtyping==0.2.38 +kiui==0.2.14 +kornia==0.7.2 +loguru==0.7.3 +lpips==0.1.4 +matplotlib==3.5.3 +megfile==4.1.0.post2 +numpy==1.23.0 +omegaconf==2.3.0 +open3d==0.19.0 +opencv_python +opencv_python_headless +Pillow==11.1.0 +plyfile +pygltflib==1.16.2 +pyrender==0.1.45 +PyYAML==6.0.1 +rembg==2.0.63 +Requests==2.32.3 +scipy +setuptools==74.0.0 +taming_transformers_rom1504==0.0.6 +timm==1.0.15 + +https://download.pytorch.org/whl/cu121/torch-2.5.1%2Bcu121-cp310-cp310-linux_x86_64.whl#sha256=92af92c569de5da937dd1afb45ecfdd598ec1254cf2e49e3d698cb24d71aae14 +https://download.pytorch.org/whl/cu121/torchvision-0.20.1%2Bcu121-cp310-cp310-linux_x86_64.whl#sha256=304937b82c933d5155bd04d771f4b187273f67a76050bb4276b521f7e9b4c4e7 +https://download.pytorch.org/whl/cu121/xformers-0.0.29.post1-cp310-cp310-manylinux_2_28_x86_64.whl#sha256=e213ff8123e20602bd486739ffee4013338b02f9d2e0e4635a2912750854fdbe + +tqdm==4.66.4 +transformers==4.41.2 +trimesh==4.4.9 +typeguard==2.13.3 +xatlas==0.0.9 +imageio-ffmpeg \ No newline at end of file diff --git a/scripts/convert_hf.py b/scripts/convert_hf.py new file mode 100644 index 0000000000000000000000000000000000000000..301ac9c81899b2133b0ee84fcd5c973a572bbbb1 --- /dev/null +++ b/scripts/convert_hf.py @@ -0,0 +1,111 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import pdb +import sys +import traceback +from tempfile import TemporaryDirectory + +import safetensors +import torch.nn as nn +from accelerate import Accelerator +from megfile import ( + smart_copy, + smart_exists, + smart_listdir, + smart_makedirs, + smart_path_join, +) +from omegaconf import OmegaConf + +sys.path.append(".") + +from LHM.models import model_dict +from LHM.utils.hf_hub import wrap_model_hub +from LHM.utils.proxy import no_proxy + + +@no_proxy +def auto_load_model(cfg, model: nn.Module) -> int: + + ckpt_root = smart_path_join( + cfg.saver.checkpoint_root, + cfg.experiment.parent, + cfg.experiment.child, + ) + if not smart_exists(ckpt_root): + raise FileNotFoundError(f"Checkpoint root not found: {ckpt_root}") + ckpt_dirs = smart_listdir(ckpt_root) + if len(ckpt_dirs) == 0: + raise FileNotFoundError(f"No checkpoint found in {ckpt_root}") + ckpt_dirs.sort() + + load_step = ( + f"{cfg.convert.global_step}" + if cfg.convert.global_step is not None + else ckpt_dirs[-1] + ) + load_model_path = smart_path_join(ckpt_root, load_step, "model.safetensors") + + if load_model_path.startswith("s3"): + tmpdir = TemporaryDirectory() + tmp_model_path = smart_path_join(tmpdir.name, f"tmp.safetensors") + smart_copy(load_model_path, tmp_model_path) + load_model_path = tmp_model_path + + print(f"Loading from {load_model_path}") + try: + safetensors.torch.load_model(model, load_model_path, strict=True) + except: + traceback.print_exc() + safetensors.torch.load_model(model, load_model_path, strict=False) + + return int(load_step) + + +if __name__ == "__main__": + + parser = argparse.ArgumentParser() + parser.add_argument("--config", type=str, default="./assets/config.yaml") + args, unknown = parser.parse_known_args() + cfg = OmegaConf.load(args.config) + cli_cfg = OmegaConf.from_cli(unknown) + cfg = OmegaConf.merge(cfg, cli_cfg) + + """ + [cfg.convert] + global_step: int + save_dir: str + """ + + accelerator = Accelerator() + + # hf_model_cls = wrap_model_hub(model_dict[cfg.experiment.type]) + hf_model_cls = wrap_model_hub(model_dict["human_lrm_sapdino_bh_sd3_5"]) + + hf_model = hf_model_cls(OmegaConf.to_container(cfg.model)) + loaded_step = auto_load_model(cfg, hf_model) + dump_path = smart_path_join( + f"./exps/releases", + cfg.experiment.parent, + cfg.experiment.child, + f"step_{loaded_step:06d}", + ) + print(f"Saving locally to {dump_path}") + smart_makedirs(dump_path, exist_ok=True) + hf_model.save_pretrained( + save_directory=dump_path, + config=hf_model.config, + ) diff --git a/scripts/exp/run_4gpu.sh b/scripts/exp/run_4gpu.sh new file mode 100644 index 0000000000000000000000000000000000000000..ea90e0647db9b1180dc27c5959979095c91f3ab4 --- /dev/null +++ b/scripts/exp/run_4gpu.sh @@ -0,0 +1,16 @@ + ACC_CONFIG="./configs/accelerate-train-4gpu.yaml" + TRAIN_CONFIG="./configs/train-sample-human.yaml" + + if [ -n "$1" ]; then + TRAIN_CONFIG=$1 + else + TRAIN_CONFIG="./configs/train-sample-human.yaml" + fi + + if [ -n "$2" ]; then + MAIN_PORT=$2 + else + MAIN_PORT=12345 + fi + + accelerate launch --config_file $ACC_CONFIG --main_process_port=$MAIN_PORT -m openlrm.launch train.human_lrm --config $TRAIN_CONFIG \ No newline at end of file diff --git a/scripts/exp/run_8gpu.sh b/scripts/exp/run_8gpu.sh new file mode 100644 index 0000000000000000000000000000000000000000..f6f65a66db97c04adae59f8c0ca3018ba8bd606f --- /dev/null +++ b/scripts/exp/run_8gpu.sh @@ -0,0 +1,16 @@ + ACC_CONFIG="./configs/accelerate-train.yaml" + TRAIN_CONFIG="./configs/train-sample-human.yaml" + + if [ -n "$1" ]; then + TRAIN_CONFIG=$1 + else + TRAIN_CONFIG="./configs/train-sample-human.yaml" + fi + + if [ -n "$2" ]; then + MAIN_PORT=$2 + else + MAIN_PORT=12345 + fi + + accelerate launch --config_file $ACC_CONFIG --main_process_port=$MAIN_PORT -m openlrm.launch train.human_lrm --config $TRAIN_CONFIG \ No newline at end of file diff --git a/scripts/exp/run_debug.sh b/scripts/exp/run_debug.sh new file mode 100644 index 0000000000000000000000000000000000000000..6aa6233149e06b19da6bcd288d04ef91e3d961c0 --- /dev/null +++ b/scripts/exp/run_debug.sh @@ -0,0 +1,15 @@ + ACC_CONFIG="./configs/accelerate-train-1gpu.yaml" + + if [ -n "$1" ]; then + TRAIN_CONFIG=$1 + else + TRAIN_CONFIG="./configs/train-sample-human.yaml" + fi + + if [ -n "$2" ]; then + MAIN_PORT=$2 + else + MAIN_PORT=12345 + fi + + accelerate launch --config_file $ACC_CONFIG --main_process_port=$MAIN_PORT -m openlrm.launch train.human_lrm --config $TRAIN_CONFIG \ No newline at end of file diff --git a/scripts/upload_hub.py b/scripts/upload_hub.py new file mode 100644 index 0000000000000000000000000000000000000000..52fba14d95d367a776c45a63fbfef8054b2e1406 --- /dev/null +++ b/scripts/upload_hub.py @@ -0,0 +1,43 @@ +# Copyright (c) 2023-2024, Zexin He +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import sys + +sys.path.append(".") + +import argparse + +from accelerate import Accelerator + +from LHM.models import model_dict +from LHM.utils.hf_hub import wrap_model_hub + +if __name__ == "__main__": + + parser = argparse.ArgumentParser() + parser.add_argument("--model_type", type=str, required=True) + parser.add_argument("--local_ckpt", type=str, required=True) + parser.add_argument("--repo_id", type=str, required=True) + args, unknown = parser.parse_known_args() + + accelerator = Accelerator() + + hf_model_cls = wrap_model_hub(model_dict[args.model_type]) + hf_model = hf_model_cls.from_pretrained(args.local_ckpt) + hf_model.push_to_hub( + repo_id=args.repo_id, + config=hf_model.config, + private=True, + ) diff --git a/tools/metrics/compute_facesimilarity.py b/tools/metrics/compute_facesimilarity.py new file mode 100644 index 0000000000000000000000000000000000000000..5e5474a164707120a8fabba666406699d25e801c --- /dev/null +++ b/tools/metrics/compute_facesimilarity.py @@ -0,0 +1,194 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-03 10:29:00 +# @Function : easy to use FaceSimilarity metric + +import os +import pdb +import shutil +import sys + +sys.path.append("./") +from collections import defaultdict + +import numpy as np +import torch +import torch.nn.functional as F +from PIL import Image +from prettytable import PrettyTable +from torch.utils.data import Dataset +from torchmetrics.image import StructuralSimilarityIndexMeasure +from torchmetrics.image.lpip import LearnedPerceptualImagePatchSimilarity +from torchvision import transforms +from tqdm import tqdm + +from openlrm.models.arcface_utils import ResNetArcFace +from openlrm.utils.face_detector import FaceDetector + +device = "cuda" +model_path = "./pretrained_models/gagatracker/vgghead/vgg_heads_l.trcd" +face_detector = FaceDetector(model_path=model_path, device=device) + +id_face_net = ResNetArcFace() +id_face_net.cuda() +id_face_net.eval() + + +def get_image_paths_current_dir(folder_path): + image_extensions = { + ".jpg", + ".jpeg", + ".png", + ".gif", + ".bmp", + ".tiff", + ".webp", + ".jfif", + } + + return sorted( + [ + os.path.join(folder_path, f) + for f in os.listdir(folder_path) + if os.path.splitext(f)[1].lower() in image_extensions + ] + ) + + +def write_json(path, x): + """write a json file. + + Args: + path (str): path to write json file. + x (dict): dict to write. + """ + import json + + with open(path, "w") as f: + json.dump(x, f, indent=2) + + +def crop_face_image(image_path): + rgb = np.array(Image.open(image_path)) + rgb = torch.from_numpy(rgb).permute(2, 0, 1) + bbox = face_detector(rgb) + head_rgb = rgb[:, int(bbox[1]) : int(bbox[3]), int(bbox[0]) : int(bbox[2])] + head_rgb = head_rgb.permute(1, 2, 0) + head_rgb = head_rgb.cpu().numpy() + return head_rgb + + +def gray_resize_for_identity(out, size=128): + out_gray = ( + 0.2989 * out[:, 0, :, :] + 0.5870 * out[:, 1, :, :] + 0.1140 * out[:, 2, :, :] + ) + out_gray = out_gray.unsqueeze(1) + out_gray = F.interpolate( + out_gray, (size, size), mode="bilinear", align_corners=False + ) + return out_gray + + +@torch.no_grad() +def eval(input_folder, target_folder, front_view_idx, device="cuda"): + src_img = os.path.join(target_folder, f"{front_view_idx:05d}.png") + if not os.path.exists(src_img): + return -1 + + head_img = crop_face_image(src_img) + input_imgs = get_image_paths_current_dir(input_folder) + if "visualization" in input_imgs[-1]: + input_imgs = input_imgs[:-1] + + to_tensor = transforms.ToTensor() + + head_img = to_tensor(head_img).unsqueeze(0).to(device) + src_head_tensor = gray_resize_for_identity(head_img) + src_head_feature = id_face_net(src_head_tensor).detach() + + face_id_loss_list = [] + + for input_img in input_imgs: + try: + input_img = crop_face_image(input_img) + input_head_tensor = gray_resize_for_identity( + to_tensor(input_img).unsqueeze(0).to(device) + ) + input_head_feature = id_face_net(input_head_tensor).detach() + face_id_loss = F.l1_loss(input_head_feature, src_head_feature) + face_id_loss_list.append(face_id_loss.item()) + + except: + continue + if len(face_id_loss_list) > 0: + + return min(face_id_loss_list) # return max similarity view. + else: + return -1 + + +def get_parse(): + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("-f1", "--folder1", type=str, required=True) + parser.add_argument("-f2", "--folder2", type=str, required=True) + parser.add_argument("--pad", action="store_true") + parser.add_argument("--debug", action="store_true") + args = parser.parse_args() + return args + + +if __name__ == "__main__": + + opt = get_parse() + + input_folder = opt.folder1 + target_folder = opt.folder2 + + valid_txt = os.path.join(input_folder, "front_view.txt") + + target_folder = target_folder[:-1] if target_folder[-1] == "/" else target_folder + + target_key = target_folder.split("/")[-2:] + + save_folder = os.path.join("./exps/metrics", "psnr_results", *target_key) + os.makedirs(save_folder, exist_ok=True) + + with open(valid_txt) as f: + items = f.read().splitlines() + items = [x.split(" ") for x in items] + + results_dict = defaultdict(dict) + face_similarity_list = [] + + for item_ in tqdm(items): + + try: + item, front_view_idx = item_ + front_view_idx = int(front_view_idx) + except: + print(item_) + + target_item_folder = os.path.join(input_folder, item) + input_item_folder = os.path.join(target_folder, item, "rgb") + + if os.path.exists(input_item_folder) and os.path.exists(target_item_folder): + + fs_ = eval(input_item_folder, target_item_folder, front_view_idx) + + if fs_ == -1: + continue + + face_similarity_list.append(fs_) + + results_dict[item]["face_similarity"] = fs_ + if opt.debug: + break + print(results_dict) + + results_dict["all_mean"]["face_similarity"] = np.mean(face_similarity_list) + + write_json(os.path.join(save_folder, "face_similarity.json"), results_dict) diff --git a/tools/metrics/compute_facesimilarity_video.py b/tools/metrics/compute_facesimilarity_video.py new file mode 100644 index 0000000000000000000000000000000000000000..a51ab0eb1e4908c8bd2e2e79d949489e2555520e --- /dev/null +++ b/tools/metrics/compute_facesimilarity_video.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-03 10:29:00 +# @Function : easy to use FaceSimilarity metric + +import os +import pdb +import shutil +import sys + +sys.path.append("./") +from collections import defaultdict + +import numpy as np +import torch +import torch.nn.functional as F +from PIL import Image +from prettytable import PrettyTable +from torch.utils.data import Dataset +from torchmetrics.image import StructuralSimilarityIndexMeasure +from torchmetrics.image.lpip import LearnedPerceptualImagePatchSimilarity +from torchvision import transforms +from tqdm import tqdm + +from openlrm.models.arcface_utils import ResNetArcFace +from openlrm.utils.face_detector import FaceDetector + +device = "cuda" +model_path = "./pretrained_models/gagatracker/vgghead/vgg_heads_l.trcd" +face_detector = FaceDetector(model_path=model_path, device=device) + +id_face_net = ResNetArcFace() +id_face_net.cuda() +id_face_net.eval() + + +def get_image_paths_current_dir(folder_path): + image_extensions = { + ".jpg", + ".jpeg", + ".png", + ".gif", + ".bmp", + ".tiff", + ".webp", + ".jfif", + } + + return sorted( + [ + os.path.join(folder_path, f) + for f in os.listdir(folder_path) + if os.path.splitext(f)[1].lower() in image_extensions + ] + ) + + +def write_json(path, x): + """write a json file. + + Args: + path (str): path to write json file. + x (dict): dict to write. + """ + import json + + with open(path, "w") as f: + json.dump(x, f, indent=2) + + +def crop_face_image(image_path): + rgb = np.array(Image.open(image_path)) + rgb = torch.from_numpy(rgb).permute(2, 0, 1) + bbox = face_detector(rgb) + head_rgb = rgb[:, int(bbox[1]) : int(bbox[3]), int(bbox[0]) : int(bbox[2])] + head_rgb = head_rgb.permute(1, 2, 0) + head_rgb = head_rgb.cpu().numpy() + return head_rgb + + +def gray_resize_for_identity(out, size=128): + out_gray = ( + 0.2989 * out[:, 0, :, :] + 0.5870 * out[:, 1, :, :] + 0.1140 * out[:, 2, :, :] + ) + out_gray = out_gray.unsqueeze(1) + out_gray = F.interpolate( + out_gray, (size, size), mode="bilinear", align_corners=False + ) + return out_gray + + +@torch.no_grad() +def eval(input_folder, target_folder, device="cuda"): + + gt_imgs = get_image_paths_current_dir(target_folder) + result_imgs = get_image_paths_current_dir(input_folder) + + if "visualization" in result_imgs[-1]: + result_imgs = result_imgs[:-1] + + if len(gt_imgs) != len(result_imgs): + return -1 + + to_tensor = transforms.ToTensor() + + face_id_loss_list = [] + + for input_img, gt_img in zip(result_imgs, gt_imgs): + + try: + input_img = crop_face_image(input_img) + input_head_tensor = gray_resize_for_identity( + to_tensor(input_img).unsqueeze(0).to(device) + ) + input_head_feature = id_face_net(input_head_tensor).detach() + + head_img = crop_face_image(gt_img) + head_img = to_tensor(head_img).unsqueeze(0).to(device) + src_head_tensor = gray_resize_for_identity(head_img) + src_head_feature = id_face_net(src_head_tensor).detach() + + face_id_loss = F.l1_loss(input_head_feature, src_head_feature) + face_id_loss_list.append(face_id_loss.item()) + except: + continue + + if len(face_id_loss_list) > 0: + + return np.mean(face_id_loss_list) # return max similarity view. + else: + return -1 + + +def get_parse(): + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("-f1", "--folder1", type=str, required=True) + parser.add_argument("-f2", "--folder2", type=str, required=True) + parser.add_argument("--pad", action="store_true") + parser.add_argument("--pre", default="") + parser.add_argument("--debug", action="store_true") + args = parser.parse_args() + return args + + +if __name__ == "__main__": + + opt = get_parse() + + input_folder = opt.folder1 + target_folder = opt.folder2 + + valid_txt = os.path.join(input_folder, "front_view.txt") + + target_folder = target_folder[:-1] if target_folder[-1] == "/" else target_folder + + target_key = target_folder.split("/")[-2:] + + save_folder = os.path.join(f"./exps/metrics{opt.pre}", "psnr_results", *target_key) + os.makedirs(save_folder, exist_ok=True) + + with open(valid_txt) as f: + items = f.read().splitlines() + items = [x.split(" ")[0] for x in items] + + results_dict = defaultdict(dict) + face_similarity_list = [] + + for item in tqdm(items): + + target_item_folder = os.path.join(input_folder, item) + input_item_folder = os.path.join(target_folder, item, "rgb") + + if os.path.exists(input_item_folder) and os.path.exists(target_item_folder): + + fs_ = eval(input_item_folder, target_item_folder) + + if fs_ == -1: + continue + + face_similarity_list.append(fs_) + + results_dict[item]["face_similarity"] = fs_ + if opt.debug: + break + print(results_dict) + + results_dict["all_mean"]["face_similarity"] = np.mean(face_similarity_list) + + write_json(os.path.join(save_folder, "face_similarity.json"), results_dict) diff --git a/tools/metrics/compute_facesimilarity_video_anigs.py b/tools/metrics/compute_facesimilarity_video_anigs.py new file mode 100644 index 0000000000000000000000000000000000000000..8622ef53e19ee87e1814b58097fdd8597802f477 --- /dev/null +++ b/tools/metrics/compute_facesimilarity_video_anigs.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-03 10:29:00 +# @Function : easy to use FaceSimilarity metric + +import os +import pdb +import shutil +import sys + +sys.path.append("./") +from collections import defaultdict + +import numpy as np +import torch +import torch.nn.functional as F +from PIL import Image +from prettytable import PrettyTable +from torch.utils.data import Dataset +from torchmetrics.image import StructuralSimilarityIndexMeasure +from torchmetrics.image.lpip import LearnedPerceptualImagePatchSimilarity +from torchvision import transforms +from tqdm import tqdm +from tqlt import utils as tu + +from openlrm.models.arcface_utils import ResNetArcFace +from openlrm.utils.face_detector import FaceDetector + +device = "cuda" +model_path = "./pretrained_models/gagatracker/vgghead/vgg_heads_l.trcd" +face_detector = FaceDetector(model_path=model_path, device=device) + +id_face_net = ResNetArcFace() +id_face_net.cuda() +id_face_net.eval() + + +def get_image_paths_current_dir(folder_path): + image_extensions = { + ".jpg", + ".jpeg", + ".png", + ".gif", + ".bmp", + ".tiff", + ".webp", + ".jfif", + } + + return sorted( + [ + os.path.join(folder_path, f) + for f in os.listdir(folder_path) + if os.path.splitext(f)[1].lower() in image_extensions + ] + ) + + +def write_json(path, x): + """write a json file. + + Args: + path (str): path to write json file. + x (dict): dict to write. + """ + import json + + with open(path, "w") as f: + json.dump(x, f, indent=2) + + +def crop_face_image(image_path): + rgb = np.array(Image.open(image_path)) + rgb = torch.from_numpy(rgb).permute(2, 0, 1) + bbox = face_detector(rgb) + head_rgb = rgb[:, int(bbox[1]) : int(bbox[3]), int(bbox[0]) : int(bbox[2])] + head_rgb = head_rgb.permute(1, 2, 0) + head_rgb = head_rgb.cpu().numpy() + return head_rgb + + +def gray_resize_for_identity(out, size=128): + out_gray = ( + 0.2989 * out[:, 0, :, :] + 0.5870 * out[:, 1, :, :] + 0.1140 * out[:, 2, :, :] + ) + out_gray = out_gray.unsqueeze(1) + out_gray = F.interpolate( + out_gray, (size, size), mode="bilinear", align_corners=False + ) + return out_gray + + +@torch.no_grad() +def eval(input_folder, target_folder, device="cuda"): + + gt_imgs = get_image_paths_current_dir(target_folder) + result_imgs = get_image_paths_current_dir(input_folder) + + gt_imgs = gt_imgs[::4] + result_imgs = result_imgs[::4] + + if "visualization" in result_imgs[-1]: + result_imgs = result_imgs[:-1] + + if len(gt_imgs) != len(result_imgs): + return -1 + + to_tensor = transforms.ToTensor() + + face_id_loss_list = [] + + for input_img, gt_img in zip(result_imgs, gt_imgs): + + try: + input_img = crop_face_image(input_img) + input_head_tensor = gray_resize_for_identity( + to_tensor(input_img).unsqueeze(0).to(device) + ) + input_head_feature = id_face_net(input_head_tensor).detach() + + head_img = crop_face_image(gt_img) + head_img = to_tensor(head_img).unsqueeze(0).to(device) + src_head_tensor = gray_resize_for_identity(head_img) + src_head_feature = id_face_net(src_head_tensor).detach() + + face_id_loss = F.l1_loss(input_head_feature, src_head_feature) + face_id_loss_list.append(face_id_loss.item()) + except: + continue + + if len(face_id_loss_list) > 0: + + return np.mean(face_id_loss_list) # return max similarity view. + else: + return -1 + + +def get_parse(): + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("-f1", "--folder1", type=str, required=True) + parser.add_argument("-f2", "--folder2", type=str, required=True) + parser.add_argument("--pad", action="store_true") + parser.add_argument("--debug", action="store_true") + args = parser.parse_args() + return args + + +if __name__ == "__main__": + + opt = get_parse() + + input_folder = opt.folder1 + target_folder = opt.folder2 + + save_folder = os.path.join("./exps/metricsanigs", "psnr_results") + os.makedirs(save_folder, exist_ok=True) + + input_folders = tu.next_folders(input_folder) + + results_dict = defaultdict(dict) + + results_dict = defaultdict(dict) + face_similarity_list = [] + + for input_folder in input_folders: + + item_basename = tu.basename(input_folder) + + mask_item_folder = None + input_item_folder = os.path.join(input_folder, "rgb") + target_item_folder = os.path.join(target_folder, item_basename) + + if os.path.exists(input_item_folder) and os.path.exists(target_item_folder): + + fs_ = eval(input_item_folder, target_item_folder) + + if fs_ == -1: + continue + + face_similarity_list.append(fs_) + + results_dict[item_basename]["face_similarity"] = fs_ + if opt.debug: + break + print(results_dict) + + results_dict["all_mean"]["face_similarity"] = np.mean(face_similarity_list) + + write_json(os.path.join(save_folder, "face_similarity.json"), results_dict) diff --git a/tools/metrics/compute_metric_ssim_lipis.py b/tools/metrics/compute_metric_ssim_lipis.py new file mode 100644 index 0000000000000000000000000000000000000000..d9d00c016428e19712f2c332cb43906c9b076b1e --- /dev/null +++ b/tools/metrics/compute_metric_ssim_lipis.py @@ -0,0 +1,284 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-03 10:28:47 +# @Function : easy to use SSIM and LPIPS metric + +import os +import pdb +import shutil +from collections import defaultdict + +import numpy as np +import torch +from PIL import Image +from prettytable import PrettyTable +from torch.utils.data import Dataset +from torchmetrics.image import StructuralSimilarityIndexMeasure +from torchmetrics.image.lpip import LearnedPerceptualImagePatchSimilarity +from torchvision import transforms +from tqdm import tqdm + + +def write_json(path, x): + """write a json file. + + Args: + path (str): path to write json file. + x (dict): dict to write. + """ + import json + + with open(path, "w") as f: + json.dump(x, f, indent=2) + + +def img_center_padding(img_np, pad_ratio=0.2): + + ori_w, ori_h = img_np.shape[:2] + + w = round((1 + pad_ratio) * ori_w) + h = round((1 + pad_ratio) * ori_h) + + img_pad_np = (np.ones((w, h, 3), dtype=img_np.dtype) * 255).astype(np.uint8) + offset_h, offset_w = (w - img_np.shape[0]) // 2, (h - img_np.shape[1]) // 2 + img_pad_np[ + offset_h : offset_h + img_np.shape[0] :, offset_w : offset_w + img_np.shape[1] + ] = img_np + + return img_pad_np, offset_w, offset_h + + +def scan_files_in_dir(directory, postfix=None, progress_bar=None) -> list: + file_list = [] + progress_bar = ( + tqdm(total=0, desc=f"Scanning", ncols=100) + if progress_bar is None + else progress_bar + ) + for entry in os.scandir(directory): + if entry.is_file(): + if postfix is None or os.path.splitext(entry.path)[1] in postfix: + file_list.append(entry) + progress_bar.total += 1 + progress_bar.update(1) + elif entry.is_dir(): + file_list += scan_files_in_dir( + entry.path, postfix=postfix, progress_bar=progress_bar + ) + return file_list + + +class EvalDataset(Dataset): + def __init__(self, gt_folder, pred_folder, height=1024): + self.gt_folder = gt_folder + self.pred_folder = pred_folder + self.height = height + self.data = self.prepare_data() + self.to_tensor = transforms.ToTensor() + + def extract_id_from_filename(self, filename): + # find first number in filename + start_i = None + for i, c in enumerate(filename): + if c.isdigit(): + start_i = i + break + if start_i is None: + assert False, f"Cannot find number in filename {filename}" + return filename[start_i : start_i + 8] + + def prepare_data(self): + gt_files = scan_files_in_dir(self.gt_folder, postfix={".jpg", ".png"}) + + gt_dict = {self.extract_id_from_filename(file.name): file for file in gt_files} + pred_files = scan_files_in_dir(self.pred_folder, postfix={".jpg", ".png"}) + + pred_files = list(filter(lambda x: "visualization" not in x.name, pred_files)) + + tuples = [] + for pred_file in pred_files: + pred_id = self.extract_id_from_filename(pred_file.name) + if pred_id not in gt_dict: + print(f"Cannot find gt file for {pred_file}") + else: + tuples.append((gt_dict[pred_id].path, pred_file.path)) + return tuples + + def resize(self, img): + w, h = img.size + new_w = int(w * self.height / h) + return img.resize((new_w, self.height), Image.LANCZOS) + + def __len__(self): + return len(self.data) + + def __getitem__(self, idx): + gt_path, pred_path = self.data[idx] + + gt, pred = self.resize(Image.open(gt_path)), self.resize(Image.open(pred_path)) + if gt.height != self.height: + gt = self.resize(gt) + if pred.height != self.height: + pred = self.resize(pred) + gt = self.to_tensor(gt) + pred = self.to_tensor(pred) + return gt, pred + + +def copy_resize_gt(gt_folder, height): + new_folder = os.path.join( + os.path.dirname(gt_folder[:-1] if gt_folder[-1] == "/" else gt_folder), + f"resize_{height}", + ) + if not os.path.exists(new_folder): + os.makedirs(new_folder, exist_ok=True) + for file in tqdm(os.listdir(gt_folder)): + img = Image.open(os.path.join(gt_folder, file)) + img = np.asarray(img) + img, _, _ = img_center_padding(img) + img = Image.fromarray(img) + w, h = img.size + img.save(os.path.join(new_folder, file)) + + return new_folder + + +@torch.no_grad() +def ssim(dataloader): + ssim_score = 0 + ssim = StructuralSimilarityIndexMeasure(data_range=1.0).to("cuda") + for gt, pred in tqdm(dataloader, desc="Calculating SSIM"): + batch_size = gt.size(0) + gt, pred = gt.to("cuda"), pred.to("cuda") + ssim_score += ssim(pred, gt) * batch_size + return ssim_score / len(dataloader.dataset) + + +@torch.no_grad() +def lpips(dataloader): + lpips_score = LearnedPerceptualImagePatchSimilarity(net_type="squeeze").to("cuda") + score = 0 + for gt, pred in tqdm(dataloader, desc="Calculating LPIPS"): + batch_size = gt.size(0) + pred = pred.to("cuda") + gt = gt.to("cuda") + # LPIPS needs the images to be in the [-1, 1] range. + gt = (gt * 2) - 1 + pred = (pred * 2) - 1 + score += lpips_score(gt, pred) * batch_size + return score / len(dataloader.dataset) + + +def eval(pred_folder, gt_folder): + # Check gt_folder has images with target height, resize if not + pred_sample = os.listdir(pred_folder)[0] + gt_sample = os.listdir(gt_folder)[0] + + img = Image.open(os.path.join(pred_folder, pred_sample)) + gt_img = Image.open(os.path.join(gt_folder, gt_sample)) + + copy_folder = None + if img.height != gt_img.height: + title = "--" * 30 + "Resizing GT Images to height {img.height}" + "--" * 30 + print(title) + gt_folder = copy_resize_gt(gt_folder, img.height) + print("-" * len(title)) + copy_folder = gt_folder + + # Form dataset + dataset = EvalDataset(gt_folder, pred_folder, img.height) + + dataloader = torch.utils.data.DataLoader( + dataset, + batch_size=16, + num_workers=0, + shuffle=False, + drop_last=False, + ) + + # Calculate Metrics + header = [] + row = [] + + header += ["SSIM", "LPIPS"] + ssim_ = ssim(dataloader).item() + lpips_ = lpips(dataloader).item() + row += [ssim_, lpips_] + + # Print Results + print("GT Folder : ", gt_folder) + print("Pred Folder: ", pred_folder) + table = PrettyTable() + table.field_names = header + table.add_row(row) + + if copy_folder is not None: + shutil.rmtree(copy_folder) + + return ssim_, lpips_ + + +def get_parse(): + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("-f1", "--folder1", type=str, required=True) + parser.add_argument("-f2", "--folder2", type=str, required=True) + parser.add_argument("--pre", type=str, default="") + parser.add_argument("--pad", action="store_true", help="if the gt pad?") + parser.add_argument("--debug", action="store_true") + args = parser.parse_args() + return args + + +if __name__ == "__main__": + + opt = get_parse() + + input_folder = opt.folder1 + target_folder = opt.folder2 + + valid_txt = os.path.join(input_folder, "front_view.txt") + + target_folder = target_folder[:-1] if target_folder[-1] == "/" else target_folder + + target_key = target_folder.split("/")[-2:] + + save_folder = os.path.join(f"./exps/metrics{opt.pre}", "psnr_results", *target_key) + os.makedirs(save_folder, exist_ok=True) + + with open(valid_txt) as f: + items = f.read().splitlines() + items = [x.split(" ")[0] for x in items] + + results_dict = defaultdict(dict) + lpips_list = [] + ssim_list = [] + + for item in items: + + target_item_folder = os.path.join(input_folder, item) + input_item_folder = os.path.join(target_folder, item, "rgb") + + if os.path.exists(input_item_folder) and os.path.exists(target_item_folder): + + ssim_, lpips_ = eval(input_item_folder, target_item_folder) + + if ssim_ == -1: + continue + + lpips_list.append(lpips_) + ssim_list.append(ssim_) + + results_dict[item]["lpips"] = lpips_ + results_dict[item]["ssim"] = ssim_ + if opt.debug: + break + print(results_dict) + + results_dict["all_mean"]["lpips"] = np.mean(lpips_list) + results_dict["all_mean"]["ssim"] = np.mean(ssim_list) + + write_json(os.path.join(save_folder, "lpips_ssim.json"), results_dict) diff --git a/tools/metrics/compute_metric_ssim_lipis_anigs.py b/tools/metrics/compute_metric_ssim_lipis_anigs.py new file mode 100644 index 0000000000000000000000000000000000000000..b8f77362bf1fe3d89c7196df420b9fb6082dfe65 --- /dev/null +++ b/tools/metrics/compute_metric_ssim_lipis_anigs.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-03 10:28:47 +# @Function : easy to use SSIM and LPIPS metric + +import os +import pdb +import shutil +from collections import defaultdict + +import numpy as np +import torch +from PIL import Image +from prettytable import PrettyTable +from torch.utils.data import Dataset +from torchmetrics.image import StructuralSimilarityIndexMeasure +from torchmetrics.image.lpip import LearnedPerceptualImagePatchSimilarity +from torchvision import transforms +from tqdm import tqdm +from tqlt import utils as tu + + +def write_json(path, x): + """write a json file. + + Args: + path (str): path to write json file. + x (dict): dict to write. + """ + import json + + with open(path, "w") as f: + json.dump(x, f, indent=2) + + +def img_center_padding(img_np, pad_ratio=0.2): + + ori_w, ori_h = img_np.shape[:2] + + w = round((1 + pad_ratio) * ori_w) + h = round((1 + pad_ratio) * ori_h) + + img_pad_np = (np.ones((w, h, 3), dtype=img_np.dtype) * 255).astype(np.uint8) + offset_h, offset_w = (w - img_np.shape[0]) // 2, (h - img_np.shape[1]) // 2 + img_pad_np[ + offset_h : offset_h + img_np.shape[0] :, offset_w : offset_w + img_np.shape[1] + ] = img_np + + return img_pad_np, offset_w, offset_h + + +def scan_files_in_dir(directory, postfix=None, progress_bar=None) -> list: + file_list = [] + progress_bar = ( + tqdm(total=0, desc=f"Scanning", ncols=100) + if progress_bar is None + else progress_bar + ) + for entry in os.scandir(directory): + if entry.is_file(): + if postfix is None or os.path.splitext(entry.path)[1] in postfix: + file_list.append(entry) + progress_bar.total += 1 + progress_bar.update(1) + elif entry.is_dir(): + file_list += scan_files_in_dir( + entry.path, postfix=postfix, progress_bar=progress_bar + ) + return file_list + + +class EvalDataset(Dataset): + def __init__(self, gt_folder, pred_folder, height=1024): + self.gt_folder = gt_folder + self.pred_folder = pred_folder + self.height = height + self.data = self.prepare_data() + self.to_tensor = transforms.ToTensor() + + def extract_id_from_filename(self, filename): + # find first number in filename + start_i = None + for i, c in enumerate(filename): + if c.isdigit(): + start_i = i + break + if start_i is None: + assert False, f"Cannot find number in filename {filename}" + return filename[start_i : start_i + 8] + + def prepare_data(self): + gt_files = scan_files_in_dir(self.gt_folder, postfix={".jpg", ".png"}) + + gt_dict = {self.extract_id_from_filename(file.name): file for file in gt_files} + pred_files = scan_files_in_dir(self.pred_folder, postfix={".jpg", ".png"}) + + pred_files = list(filter(lambda x: "visualization" not in x.name, pred_files)) + + tuples = [] + for pred_file in pred_files: + pred_id = self.extract_id_from_filename(pred_file.name) + if pred_id not in gt_dict: + print(f"Cannot find gt file for {pred_file}") + else: + tuples.append((gt_dict[pred_id].path, pred_file.path)) + return tuples + + def resize(self, img): + w, h = img.size + new_w = int(w * self.height / h) + return img.resize((new_w, self.height), Image.LANCZOS) + + def __len__(self): + return len(self.data) + + def __getitem__(self, idx): + gt_path, pred_path = self.data[idx] + + gt, pred = self.resize(Image.open(gt_path)), self.resize(Image.open(pred_path)) + if gt.height != self.height: + gt = self.resize(gt) + if pred.height != self.height: + pred = self.resize(pred) + gt = self.to_tensor(gt) + pred = self.to_tensor(pred) + return gt, pred + + +def copy_resize_gt(gt_folder, height): + new_folder = os.path.join( + os.path.dirname(gt_folder[:-1] if gt_folder[-1] == "/" else gt_folder), + f"resize_{height}", + ) + if not os.path.exists(new_folder): + os.makedirs(new_folder, exist_ok=True) + for file in tqdm(os.listdir(gt_folder)): + if os.path.exists(os.path.join(new_folder, file)): + continue + img = Image.open(os.path.join(gt_folder, file)) + img = np.asarray(img) + # img, _, _ = img_center_padding(img) + img = Image.fromarray(img) + w, h = img.size + img.save(os.path.join(new_folder, file)) + return new_folder + + +@torch.no_grad() +def ssim(dataloader): + ssim_score = 0 + ssim = StructuralSimilarityIndexMeasure(data_range=1.0).to("cuda") + for gt, pred in tqdm(dataloader, desc="Calculating SSIM"): + batch_size = gt.size(0) + gt, pred = gt.to("cuda"), pred.to("cuda") + ssim_score += ssim(pred, gt) * batch_size + return ssim_score / len(dataloader.dataset) + + +@torch.no_grad() +def lpips(dataloader): + lpips_score = LearnedPerceptualImagePatchSimilarity(net_type="squeeze").to("cuda") + score = 0 + for gt, pred in tqdm(dataloader, desc="Calculating LPIPS"): + batch_size = gt.size(0) + pred = pred.to("cuda") + gt = gt.to("cuda") + # LPIPS needs the images to be in the [-1, 1] range. + gt = (gt * 2) - 1 + pred = (pred * 2) - 1 + score += lpips_score(gt, pred) * batch_size + return score / len(dataloader.dataset) + + +def eval(pred_folder, gt_folder): + # Check gt_folder has images with target height, resize if not + pred_sample = os.listdir(pred_folder)[0] + gt_sample = os.listdir(gt_folder)[0] + + img = Image.open(os.path.join(pred_folder, pred_sample)) + gt_img = Image.open(os.path.join(gt_folder, gt_sample)) + + copy_folder = None + if img.height != gt_img.height: + title = "--" * 30 + "Resizing GT Images to height {img.height}" + "--" * 30 + print(title) + gt_folder = copy_resize_gt(gt_folder, img.height) + print("-" * len(title)) + copy_folder = gt_folder + + # Form dataset + dataset = EvalDataset(gt_folder, pred_folder, img.height) + + dataloader = torch.utils.data.DataLoader( + dataset, + batch_size=16, + num_workers=0, + shuffle=False, + drop_last=False, + ) + + # Calculate Metrics + header = [] + row = [] + + header += ["SSIM", "LPIPS"] + ssim_ = ssim(dataloader).item() + lpips_ = lpips(dataloader).item() + row += [ssim_, lpips_] + + # Print Results + print("GT Folder : ", gt_folder) + print("Pred Folder: ", pred_folder) + table = PrettyTable() + table.field_names = header + table.add_row(row) + + if copy_folder is not None: + shutil.rmtree(copy_folder) + + return ssim_, lpips_ + + +def get_parse(): + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("-f1", "--folder1", type=str, required=True) + parser.add_argument("-f2", "--folder2", type=str, required=True) + parser.add_argument("--pre", type=str, default="anigs") + parser.add_argument("--pad", action="store_true") + parser.add_argument("--debug", action="store_true") + args = parser.parse_args() + return args + + +if __name__ == "__main__": + + opt = get_parse() + + input_folder = opt.folder1 + target_folder = opt.folder2 + + save_folder = os.path.join( + f"./exps/metrics{opt.pre}", "psnr_results", "anigs_video" + ) + os.makedirs(save_folder, exist_ok=True) + + input_folders = tu.next_folders(input_folder) + + results_dict = defaultdict(dict) + lpips_list = [] + ssim_list = [] + + for input_folder in input_folders: + + item_basename = tu.basename(input_folder) + + mask_item_folder = None + input_item_folder = os.path.join(input_folder, "rgb") + target_item_folder = os.path.join(target_folder, item_basename) + + if os.path.exists(input_item_folder) and os.path.exists(target_item_folder): + + ssim_, lpips_ = eval(input_item_folder, target_item_folder) + + if ssim_ == -1: + continue + + lpips_list.append(lpips_) + ssim_list.append(ssim_) + + results_dict[item_basename]["lpips"] = lpips_ + results_dict[item_basename]["ssim"] = ssim_ + if opt.debug: + break + print(results_dict) + + results_dict["all_mean"]["lpips"] = np.mean(lpips_list) + results_dict["all_mean"]["ssim"] = np.mean(ssim_list) + + write_json(os.path.join(save_folder, "lpips_ssim.json"), results_dict) diff --git a/tools/metrics/compute_psnr.py b/tools/metrics/compute_psnr.py new file mode 100644 index 0000000000000000000000000000000000000000..99d57c3870ec415b45da9c2868a07a8409fe6421 --- /dev/null +++ b/tools/metrics/compute_psnr.py @@ -0,0 +1,217 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-03 10:28:35 +# @Function : Easy to use PSNR metric +import os +import sys + +sys.path.append("./") + +import math +import pdb + +import cv2 +import numpy as np +import skimage +import torch +from PIL import Image +from tqdm import tqdm + + +def write_json(path, x): + """write a json file. + + Args: + path (str): path to write json file. + x (dict): dict to write. + """ + import json + + with open(path, "w") as f: + json.dump(x, f, indent=2) + + +def img_center_padding(img_np, pad_ratio=0.2, background=1): + + ori_w, ori_h = img_np.shape[:2] + + w = round((1 + pad_ratio) * ori_w) + h = round((1 + pad_ratio) * ori_h) + + if background == 1: + img_pad_np = np.ones((w, h, 3), dtype=img_np.dtype) + else: + img_pad_np = np.zeros((w, h, 3), dtype=img_np.dtype) + offset_h, offset_w = (w - img_np.shape[0]) // 2, (h - img_np.shape[1]) // 2 + img_pad_np[ + offset_h : offset_h + img_np.shape[0] :, offset_w : offset_w + img_np.shape[1] + ] = img_np + + return img_pad_np, offset_w, offset_h + + +def compute_psnr(src, tar): + psnr = skimage.metrics.peak_signal_noise_ratio(tar, src, data_range=1) + return psnr + + +def get_parse(): + import argparse + + parser = argparse.ArgumentParser(description="") + parser.add_argument("-f1", "--folder1", required=True, help="input path") + parser.add_argument("-f2", "--folder2", required=True, help="output path") + parser.add_argument("-m", "--mask", default=None, help="output path") + parser.add_argument("--pre", default="") + parser.add_argument("--debug", action="store_true") + parser.add_argument("--pad", action="store_true", help="if the gt pad?") + args = parser.parse_args() + return args + + +def get_image_paths_current_dir(folder_path): + image_extensions = { + ".jpg", + ".jpeg", + ".png", + ".gif", + ".bmp", + ".tiff", + ".webp", + ".jfif", + } + + return sorted( + [ + os.path.join(folder_path, f) + for f in os.listdir(folder_path) + if os.path.splitext(f)[1].lower() in image_extensions + ] + ) + + +def psnr_compute( + input_data, + results_data, + mask_data=None, + pad=False, +): + + gt_imgs = get_image_paths_current_dir(input_data) + result_imgs = get_image_paths_current_dir(os.path.join(results_data)) + + if mask_data is not None: + mask_imgs = get_image_paths_current_dir(mask_data) + else: + mask_imgs = None + + if "visualization" in result_imgs[-1]: + result_imgs = result_imgs[:-1] + + if len(gt_imgs) != len(result_imgs): + return -1 + + psnr_mean = [] + + for mask_i, (gt, result) in tqdm(enumerate(zip(gt_imgs, result_imgs))): + result_img = (cv2.imread(result, cv2.IMREAD_UNCHANGED) / 255.0).astype( + np.float32 + ) + gt_img = (cv2.imread(gt, cv2.IMREAD_UNCHANGED) / 255.0).astype(np.float32) + + if mask_imgs is not None: + mask_img = ( + cv2.imread(mask_imgs[mask_i], cv2.IMREAD_UNCHANGED) / 255.0 + ).astype(np.float32) + mask_img = mask_img[..., -1] + mask_img = np.stack([mask_img] * 3, axis=-1) + mask_img, _, _ = img_center_padding(mask_img, background=0) + + if pad: + gt_img, _, _ = img_center_padding(gt_img) + + h, w, c = result_img.shape + + scale_h = int(h * 512 / w) + + gt_img = cv2.resize(gt_img, (512, scale_h), interpolation=cv2.INTER_AREA) + result_img = cv2.resize( + result_img, (512, scale_h), interpolation=cv2.INTER_AREA + ) + + if mask_imgs is not None: + mask_img = cv2.resize(mask_img, (w, h), interpolation=cv2.INTER_AREA) + gt_img = gt_img * mask_img + 1 - mask_img + result_img = result_img * mask_img + 1 - mask_img + mask_label = mask_img[..., 0] + psnr_mean += [ + compute_psnr(result_img[mask_label > 0.5], gt_img[mask_label > 0.5]) + ] + else: + psnr_mean += [compute_psnr(result_img, gt_img)] + + psnr = np.mean(psnr_mean) + + return psnr + + +if __name__ == "__main__": + + opt = get_parse() + + input_folder = opt.folder1 + target_folder = opt.folder2 + mask_folder = opt.mask + + valid_txt = os.path.join(input_folder, "front_view.txt") + + target_folder = target_folder[:-1] if target_folder[-1] == "/" else target_folder + + if mask_folder is not None: + mask_folder = mask_folder[:-1] if mask_folder[-1] == "/" else mask_folder + + target_key = target_folder.split("/")[-2:] + + save_folder = os.path.join(f"./exps/metrics{opt.pre}", "psnr_results", *target_key) + os.makedirs(save_folder, exist_ok=True) + + with open(valid_txt) as f: + items = f.read().splitlines() + items = [x.split(" ")[0] for x in items] + + results_dict = dict() + psnr_list = [] + + for item in items: + + input_item_folder = os.path.join(input_folder, item) + if mask_folder is not None: + mask_item_folder = os.path.join(mask_folder, item) + else: + mask_item_folder = None + target_item_folder = os.path.join(target_folder, item, "rgb") + + if os.path.exists(input_item_folder) and os.path.exists(target_item_folder): + + psnr = psnr_compute( + input_item_folder, target_item_folder, mask_item_folder, opt.pad + ) + + if psnr == -1: + continue + + psnr_list.append(psnr) + + results_dict[item] = psnr + if opt.debug: + break + print(results_dict) + + results_dict["all_mean"] = np.mean(psnr_list) + + print(save_folder) + + print(results_dict) + write_json(os.path.join(save_folder, "PSNR.json"), results_dict) diff --git a/tools/metrics/compute_psnr_anigs.py b/tools/metrics/compute_psnr_anigs.py new file mode 100644 index 0000000000000000000000000000000000000000..f1f23d781c3f8dd1a1b4aba71a56bf6fda0bcac3 --- /dev/null +++ b/tools/metrics/compute_psnr_anigs.py @@ -0,0 +1,204 @@ +# -*- coding: utf-8 -*- +# @Organization : Alibaba XR-Lab +# @Author : Lingteng Qiu +# @Email : 220019047@link.cuhk.edu.cn +# @Time : 2025-03-03 10:28:35 +# @Function : Easy to use PSNR metric +import os +import sys + +sys.path.append("./") + +import math +import pdb + +import cv2 +import numpy as np +import skimage +import torch +from PIL import Image +from tqdm import tqdm +from tqlt import utils as tu + + +def write_json(path, x): + """write a json file. + + Args: + path (str): path to write json file. + x (dict): dict to write. + """ + import json + + with open(path, "w") as f: + json.dump(x, f, indent=2) + + +def img_center_padding(img_np, pad_ratio=0.2, background=1): + + ori_w, ori_h = img_np.shape[:2] + + w = round((1 + pad_ratio) * ori_w) + h = round((1 + pad_ratio) * ori_h) + + if background == 1: + img_pad_np = np.ones((w, h, 3), dtype=img_np.dtype) + else: + img_pad_np = np.zeros((w, h, 3), dtype=img_np.dtype) + offset_h, offset_w = (w - img_np.shape[0]) // 2, (h - img_np.shape[1]) // 2 + img_pad_np[ + offset_h : offset_h + img_np.shape[0] :, offset_w : offset_w + img_np.shape[1] + ] = img_np + + return img_pad_np, offset_w, offset_h + + +def compute_psnr(src, tar): + psnr = skimage.metrics.peak_signal_noise_ratio(tar, src, data_range=1) + return psnr + + +def get_parse(): + import argparse + + parser = argparse.ArgumentParser(description="") + parser.add_argument("-f1", "--folder1", required=True, help="input path") + parser.add_argument("-f2", "--folder2", required=True, help="output path") + parser.add_argument("-m", "--mask", default=None, help="output path") + parser.add_argument("--pre", default="anigs") + parser.add_argument("--debug", action="store_true") + parser.add_argument("--pad", action="store_true") + args = parser.parse_args() + return args + + +def get_image_paths_current_dir(folder_path): + image_extensions = { + ".jpg", + ".jpeg", + ".png", + ".gif", + ".bmp", + ".tiff", + ".webp", + ".jfif", + } + + return sorted( + [ + os.path.join(folder_path, f) + for f in os.listdir(folder_path) + if os.path.splitext(f)[1].lower() in image_extensions + ] + ) + + +def psnr_compute( + input_data, + results_data, + mask_data=None, + pad=False, +): + + gt_imgs = get_image_paths_current_dir(input_data) + result_imgs = get_image_paths_current_dir(os.path.join(results_data)) + + if mask_data is not None: + mask_imgs = get_image_paths_current_dir(mask_data) + else: + mask_imgs = None + + if "visualization" in result_imgs[-1]: + result_imgs = result_imgs[:-1] + + if len(gt_imgs) != len(result_imgs): + return -1 + + gt_imgs = gt_imgs[::4] + result_imgs = result_imgs[::4] + + psnr_mean = [] + + for mask_i, (gt, result) in tqdm(enumerate(zip(gt_imgs, result_imgs))): + result_img = (cv2.imread(result, cv2.IMREAD_UNCHANGED) / 255.0).astype( + np.float32 + ) + gt_img = (cv2.imread(gt, cv2.IMREAD_UNCHANGED) / 255.0).astype(np.float32) + + if mask_imgs is not None: + mask_img = ( + cv2.imread(mask_imgs[mask_i], cv2.IMREAD_UNCHANGED) / 255.0 + ).astype(np.float32) + mask_img = mask_img[..., -1] + mask_img = np.stack([mask_img] * 3, axis=-1) + mask_img, _, _ = img_center_padding(mask_img, background=0) + + if pad: + gt_img, _, _ = img_center_padding(gt_img) + + h, w, c = result_img.shape + + gt_img = cv2.resize(gt_img, (w, h), interpolation=cv2.INTER_AREA) + if mask_imgs is not None: + mask_img = cv2.resize(mask_img, (w, h), interpolation=cv2.INTER_AREA) + gt_img = gt_img * mask_img + 1 - mask_img + result_img = result_img * mask_img + 1 - mask_img + mask_label = mask_img[..., 0] + psnr_mean += [ + compute_psnr(result_img[mask_label > 0.5], gt_img[mask_label > 0.5]) + ] + else: + psnr_mean += [compute_psnr(result_img, gt_img)] + + # Image.fromarray((gt_img * 255).astype(np.uint8)).save("gt.png") + # Image.fromarray((result_img * 255).astype(np.uint8)).save("result.png") + + psnr = np.mean(psnr_mean) + + return psnr + + +if __name__ == "__main__": + + opt = get_parse() + + input_folder = opt.folder1 + target_folder = opt.folder2 + mask_folder = opt.mask + + save_folder = os.path.join( + f"./exps/metrics{opt.pre}", "psnr_results", "anigs_video" + ) + os.makedirs(save_folder, exist_ok=True) + + input_folders = tu.next_folders(input_folder) + + results_dict = dict() + psnr_list = [] + + for input_folder in input_folders: + + item_basename = tu.basename(input_folder) + + mask_item_folder = None + input_item_folder = os.path.join(input_folder, "rgb") + target_item_folder = os.path.join(target_folder, item_basename) + + if os.path.exists(input_item_folder) and os.path.exists(target_item_folder): + + psnr = psnr_compute( + input_item_folder, target_item_folder, mask_item_folder, opt.pad + ) + + if psnr == -1: + continue + + psnr_list.append(psnr) + + results_dict[item_basename] = psnr + if opt.debug: + break + print(results_dict) + + results_dict["all_mean"] = np.mean(psnr_list) + write_json(os.path.join(save_folder, "PSNR.json"), results_dict) diff --git a/wheels/diff_gaussian_rasterization-0.0.0-cp310-cp310-linux_x86_64.whl b/wheels/diff_gaussian_rasterization-0.0.0-cp310-cp310-linux_x86_64.whl new file mode 100644 index 0000000000000000000000000000000000000000..45d1bed59b3cdd8f7ab54a3a9db70052c7067f03 --- /dev/null +++ b/wheels/diff_gaussian_rasterization-0.0.0-cp310-cp310-linux_x86_64.whl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:18969ebb896d921bc4c54630e5edf990898ee9505c2cc46c4feb3486a959ce54 +size 3373299 diff --git a/wheels/simple_knn-0.0.0-cp310-cp310-linux_x86_64.whl b/wheels/simple_knn-0.0.0-cp310-cp310-linux_x86_64.whl new file mode 100644 index 0000000000000000000000000000000000000000..55fa721f7349d9819b137be2150a946e7998999d --- /dev/null +++ b/wheels/simple_knn-0.0.0-cp310-cp310-linux_x86_64.whl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:21ffecc42d12fe431e71ded0297c2b3ab4586b668a432d41e58d7440a15b274d +size 3130569