diff --git a/.gitattributes b/.gitattributes
index a6344aac8c09253b3b630fb776ae94478aa0275b..efc57177347a87f71011ab9aedea1ecaf08667c6 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -33,3 +33,20 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
*tfevents* filter=lfs diff=lfs merge=lfs -text
+EvalDataset/clips/bear/output_video.mp4 filter=lfs diff=lfs merge=lfs -text
+EvalDataset/clips/bear/output_video_gray.mp4 filter=lfs diff=lfs merge=lfs -text
+EvalDataset/clips/boat/output_video_gray.mp4 filter=lfs diff=lfs merge=lfs -text
+EvalDataset/clips/cows/output_video.mp4 filter=lfs diff=lfs merge=lfs -text
+EvalDataset/clips/cows/output_video_gray.mp4 filter=lfs diff=lfs merge=lfs -text
+EvalDataset/clips/dog/output_video.mp4 filter=lfs diff=lfs merge=lfs -text
+EvalDataset/clips/flamingo/output_video_gray.mp4 filter=lfs diff=lfs merge=lfs -text
+EvalDataset/ref/goat/0000.jpg filter=lfs diff=lfs merge=lfs -text
+EvalDataset/ref/hockey/0000.jpg filter=lfs diff=lfs merge=lfs -text
+EvalDataset/ref/horsejump-high/0000.jpg filter=lfs diff=lfs merge=lfs -text
+EvalDataset/ref/motorbike/0000.jpg filter=lfs diff=lfs merge=lfs -text
+EvalDataset/ref/surf/0000.jpg filter=lfs diff=lfs merge=lfs -text
+examples/bear/video.mp4 filter=lfs diff=lfs merge=lfs -text
+examples/cows/video.mp4 filter=lfs diff=lfs merge=lfs -text
+examples/flamingo/video.mp4 filter=lfs diff=lfs merge=lfs -text
+gradio_cached_examples/13/output/003c3114319372a78bf2f812ebaf0041afa280fb/output_video.mp4 filter=lfs diff=lfs merge=lfs -text
+gradio_cached_examples/13/output/7969adca8ae38cb3b38ff8e7bb54688d942c7bc8/output_video.mp4 filter=lfs diff=lfs merge=lfs -text
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..6a5b7bb32ac229eeab8cead3a765a2d91f5b9406
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,137 @@
+checkpoints/
+wandb/
+.vscode
+.DS_Store
+*ckpt*/
+# Custom
+*.pt
+data/local
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
diff --git a/README.md b/README.md
index e4715eb76d75c0d1ef4dc50d21e869234a70ba14..cf98d63d5fb06482fac1aba313ae5e3f220c715c 100644
--- a/README.md
+++ b/README.md
@@ -1,12 +1,6 @@
---
title: ViTExCo
-emoji: ๐
-colorFrom: gray
-colorTo: green
+app_file: app.py
sdk: gradio
sdk_version: 3.40.1
-app_file: app.py
-pinned: false
---
-
-Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
diff --git a/UI.py b/UI.py
new file mode 100644
index 0000000000000000000000000000000000000000..033046d4e8709d171221bc145df3422cfeed9e64
--- /dev/null
+++ b/UI.py
@@ -0,0 +1,81 @@
+import streamlit as st
+from PIL import Image
+import torchvision.transforms as transforms
+from streamlit_image_comparison import image_comparison
+import numpy as np
+import torch
+import torchvision
+
+######################################### Utils ########################################
+video_extensions = ["mp4"]
+image_extensions = ["png", "jpg"]
+
+
+def check_type(file_name: str):
+ for image_extension in image_extensions:
+ if file_name.endswith(image_extension):
+ return "image"
+ for video_extension in video_extensions:
+ if file_name.endswith(video_extension):
+ return "video"
+ return None
+
+
+transform = transforms.Compose(
+ [transforms.Resize((256, 256)), transforms.ToTensor(), transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225))]
+)
+
+
+###################################### Load model ######################################
+@st.cache_resource
+def load_model():
+ model = torchvision.models.segmentation.deeplabv3_resnet101(pretrained=True)
+ model.eval()
+ return model
+
+
+model = load_model()
+########################################## UI ##########################################
+st.title("Colorization")
+
+uploaded_file = st.file_uploader("Upload grayscale image or video", type=image_extensions + video_extensions)
+if uploaded_file:
+ # Image
+ if check_type(file_name=uploaded_file.name) == "image":
+ image = np.array(Image.open(uploaded_file), dtype=np.float32)
+
+ input_tensor = torchvision.transforms.functional.normalize(
+ torch.tensor(image).permute(2, 0, 1),
+ mean=[0.485, 0.456, 0.406],
+ std=[0.229, 0.224, 0.225],
+ ).unsqueeze(0)
+ process_button = st.button("Process")
+ if process_button:
+ with st.spinner("Tแปซ tแปซ coi..."):
+ prediction = model(input_tensor)
+ segment = prediction["out"][0].permute(1, 2, 0)
+ segment = segment.detach().numpy()
+
+ st.image(segment)
+ st.image(image)
+
+ image_comparison(
+ img1=image,
+ img2=np.array(segment),
+ label1="Grayscale",
+ label2="Colorized",
+ make_responsive=True,
+ show_labels=True,
+ )
+ # Video
+ else:
+ # video = open(uploaded_file.name)
+ st.video("https://youtu.be/dQw4w9WgXcQ")
+
+hide_menu_style = """
+
+ """
+st.markdown(hide_menu_style, unsafe_allow_html=True)
diff --git a/app.py b/app.py
new file mode 100644
index 0000000000000000000000000000000000000000..79efaf4e23a61a2f6b7eb9318b89b04c58d96b73
--- /dev/null
+++ b/app.py
@@ -0,0 +1,215 @@
+import numpy as np
+import shutil
+import os
+import argparse
+import torch
+import glob
+from tqdm import tqdm
+from PIL import Image
+from collections import OrderedDict
+from src.models.vit.config import load_config
+import torchvision.transforms as transforms
+import cv2
+from skimage import io
+
+from src.models.CNN.ColorVidNet import GeneralColorVidNet
+from src.models.vit.embed import GeneralEmbedModel
+from src.models.CNN.NonlocalNet import GeneralWarpNet
+from src.models.CNN.FrameColor import frame_colorization
+from src.utils import (
+ RGB2Lab,
+ ToTensor,
+ Normalize,
+ uncenter_l,
+ tensor_lab2rgb,
+ SquaredPadding,
+ UnpaddingSquare
+)
+
+import gradio as gr
+
+def load_params(ckpt_file):
+ params = torch.load(ckpt_file, map_location=device)
+ new_params = []
+ for key, value in params.items():
+ new_params.append((key, value))
+ return OrderedDict(new_params)
+
+def custom_transform(transforms, img):
+ for transform in transforms:
+ if isinstance(transform, SquaredPadding):
+ img,padding=transform(img, return_paddings=True)
+ else:
+ img = transform(img)
+ return img.to(device), padding
+
+def save_frames(predicted_rgb, video_name, frame_name):
+ if predicted_rgb is not None:
+ predicted_rgb = np.clip(predicted_rgb, 0, 255).astype(np.uint8)
+ # frame_path_parts = frame_path.split(os.sep)
+ # if os.path.exists(os.path.join(OUTPUT_RESULT_PATH, frame_path_parts[-2])):
+ # shutil.rmtree(os.path.join(OUTPUT_RESULT_PATH, frame_path_parts[-2]))
+ # os.makedirs(os.path.join(OUTPUT_RESULT_PATH, frame_path_parts[-2]), exist_ok=True)
+ predicted_rgb = np.transpose(predicted_rgb, (1,2,0))
+ pil_img = Image.fromarray(predicted_rgb)
+ pil_img.save(os.path.join(OUTPUT_RESULT_PATH, video_name, frame_name))
+
+def extract_frames_from_video(video_path):
+ cap = cv2.VideoCapture(video_path)
+ fps = cap.get(cv2.CAP_PROP_FPS)
+
+ # remove if exists folder
+ output_frames_path = os.path.join(INPUT_VIDEO_FRAMES_PATH, os.path.basename(video_path))
+ if os.path.exists(output_frames_path):
+ shutil.rmtree(output_frames_path)
+
+ # make new folder
+ os.makedirs(output_frames_path)
+
+ currentframe = 0
+ frame_path_list = []
+ while(True):
+
+ # reading from frame
+ ret,frame = cap.read()
+
+ if ret:
+ name = os.path.join(output_frames_path, f'{currentframe:09d}.jpg')
+ frame_path_list.append(name)
+ cv2.imwrite(name, frame)
+ currentframe += 1
+ else:
+ break
+
+ cap.release()
+ cv2.destroyAllWindows()
+
+ return frame_path_list, fps
+
+def combine_frames_from_folder(frames_list_path, fps = 30):
+ frames_list = glob.glob(f'{frames_list_path}/*.jpg')
+ frames_list.sort()
+
+ sample_shape = cv2.imread(frames_list[0]).shape
+
+ output_video_path = os.path.join(frames_list_path, 'output_video.mp4')
+ out = cv2.VideoWriter(output_video_path, cv2.VideoWriter_fourcc(*'mp4v'), fps, (sample_shape[1], sample_shape[0]))
+ for filename in frames_list:
+ img = cv2.imread(filename)
+ out.write(img)
+
+ out.release()
+ return output_video_path
+
+
+def upscale_image(I_current_rgb, I_current_ab_predict):
+ H, W = I_current_rgb.size
+ high_lab_transforms = [
+ SquaredPadding(target_size=max(H,W)),
+ RGB2Lab(),
+ ToTensor(),
+ Normalize()
+ ]
+ # current_frame_pil_rgb = Image.fromarray(np.clip(I_current_rgb.squeeze(0).permute(1,2,0).cpu().numpy() * 255, 0, 255).astype('uint8'))
+ high_lab_current, paddings = custom_transform(high_lab_transforms, I_current_rgb)
+ high_lab_current = torch.unsqueeze(high_lab_current,dim=0).to(device)
+ high_l_current = high_lab_current[:, 0:1, :, :]
+ high_ab_current = high_lab_current[:, 1:3, :, :]
+ upsampler = torch.nn.Upsample(scale_factor=max(H,W)/224,mode="bilinear")
+ high_ab_predict = upsampler(I_current_ab_predict)
+ I_predict_rgb = tensor_lab2rgb(torch.cat((uncenter_l(high_l_current), high_ab_predict), dim=1))
+ upadded = UnpaddingSquare()
+ I_predict_rgb = upadded(I_predict_rgb, paddings)
+ return I_predict_rgb
+
+def colorize_video(video_path, ref_np):
+ frames_list, fps = extract_frames_from_video(video_path)
+
+ frame_ref = Image.fromarray(ref_np).convert("RGB")
+ I_last_lab_predict = None
+ IB_lab, IB_paddings = custom_transform(transforms, frame_ref)
+ IB_lab = IB_lab.unsqueeze(0).to(device)
+ IB_l = IB_lab[:, 0:1, :, :]
+ IB_ab = IB_lab[:, 1:3, :, :]
+
+ with torch.no_grad():
+ I_reference_lab = IB_lab
+ I_reference_l = I_reference_lab[:, 0:1, :, :]
+ I_reference_ab = I_reference_lab[:, 1:3, :, :]
+ I_reference_rgb = tensor_lab2rgb(torch.cat((uncenter_l(I_reference_l), I_reference_ab), dim=1)).to(device)
+ features_B = embed_net(I_reference_rgb)
+
+ video_path_parts = frames_list[0].split(os.sep)
+
+ if os.path.exists(os.path.join(OUTPUT_RESULT_PATH, video_path_parts[-2])):
+ shutil.rmtree(os.path.join(OUTPUT_RESULT_PATH, video_path_parts[-2]))
+ os.makedirs(os.path.join(OUTPUT_RESULT_PATH, video_path_parts[-2]), exist_ok=True)
+
+ for frame_path in tqdm(frames_list):
+ curr_frame = Image.open(frame_path).convert("RGB")
+ IA_lab, IA_paddings = custom_transform(transforms, curr_frame)
+ IA_lab = IA_lab.unsqueeze(0).to(device)
+ IA_l = IA_lab[:, 0:1, :, :]
+ IA_ab = IA_lab[:, 1:3, :, :]
+
+ if I_last_lab_predict is None:
+ I_last_lab_predict = torch.zeros_like(IA_lab).to(device)
+
+ with torch.no_grad():
+ I_current_lab = IA_lab
+ I_current_ab_predict, _ = frame_colorization(
+ IA_l,
+ I_reference_lab,
+ I_last_lab_predict,
+ features_B,
+ embed_net,
+ nonlocal_net,
+ colornet,
+ luminance_noise=0,
+ temperature=1e-10,
+ joint_training=False
+ )
+ I_last_lab_predict = torch.cat((IA_l, I_current_ab_predict), dim=1)
+
+ # IA_predict_rgb = tensor_lab2rgb(torch.cat((uncenter_l(IA_l), I_current_ab_predict), dim=1))
+ IA_predict_rgb = upscale_image(curr_frame, I_current_ab_predict)
+ #IA_predict_rgb = torch.nn.functional.upsample_bilinear(IA_predict_rgb, scale_factor=2)
+ save_frames(IA_predict_rgb.squeeze(0).cpu().numpy() * 255, video_path_parts[-2], os.path.basename(frame_path))
+ return combine_frames_from_folder(os.path.join(OUTPUT_RESULT_PATH, video_path_parts[-2]), fps)
+
+if __name__ == '__main__':
+ # Init global variables
+ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
+ INPUT_VIDEO_FRAMES_PATH = 'inputs'
+ OUTPUT_RESULT_PATH = 'outputs'
+ weight_path = 'checkpoints'
+
+ embed_net=GeneralEmbedModel(pretrained_model="swin-tiny", device=device).to(device)
+ nonlocal_net = GeneralWarpNet(feature_channel=128).to(device)
+ colornet=GeneralColorVidNet(7).to(device)
+
+ embed_net.eval()
+ nonlocal_net.eval()
+ colornet.eval()
+
+ # Load weights
+ # embed_net_params = load_params(os.path.join(weight_path, "embed_net.pth"))
+ nonlocal_net_params = load_params(os.path.join(weight_path, "nonlocal_net.pth"))
+ colornet_params = load_params(os.path.join(weight_path, "colornet.pth"))
+
+ # embed_net.load_state_dict(embed_net_params, strict=True)
+ nonlocal_net.load_state_dict(nonlocal_net_params, strict=True)
+ colornet.load_state_dict(colornet_params, strict=True)
+
+ transforms = [SquaredPadding(target_size=224),
+ RGB2Lab(),
+ ToTensor(),
+ Normalize()]
+
+ examples = [[vid, ref] for vid, ref in zip(sorted(glob.glob('examples/*/*.mp4')), sorted(glob.glob('examples/*/*.jpg')))]
+ demo = gr.Interface(colorize_video,
+ inputs=[gr.Video(), gr.Image()],
+ outputs="playable_video",
+ examples=examples,
+ cache_examples=True)
+ demo.launch()
diff --git a/checkpoints/colornet.pth b/checkpoints/colornet.pth
new file mode 100644
index 0000000000000000000000000000000000000000..5a11ecf6fde0aade0a82c4c412145681a168a863
--- /dev/null
+++ b/checkpoints/colornet.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5257ae325e292cd5fb2eff47095e1c4e4815455bd5fb6dc5ed2ee2b923172875
+size 131239411
diff --git a/checkpoints/embed_net.pth b/checkpoints/embed_net.pth
new file mode 100644
index 0000000000000000000000000000000000000000..0439349777f69682d5b01e03b96659ad64c817c9
--- /dev/null
+++ b/checkpoints/embed_net.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:fc711755a75c43025dabe9407cbd11d164eaa9e21f26430d0c16c7493410d902
+size 110352261
diff --git a/checkpoints/nonlocal_net.pth b/checkpoints/nonlocal_net.pth
new file mode 100644
index 0000000000000000000000000000000000000000..ee2aae80a5637970c868178d95a16d491e3b4f7e
--- /dev/null
+++ b/checkpoints/nonlocal_net.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b94c6990f20088bc3cc3fe0b29a6d52e6e746b915c506f0cd349fc6ad6197e72
+size 73189765
diff --git a/cmd.txt b/cmd.txt
new file mode 100644
index 0000000000000000000000000000000000000000..b080a831c60ebd081c316bd8f1ac1709173272d5
--- /dev/null
+++ b/cmd.txt
@@ -0,0 +1,21 @@
+python train.py --video_data_root_list datasets/images/images \
+ --flow_data_root_list datasets/flow_fp16/flow_fp16 \
+ --mask_data_root_list datasets/pgm/pgm \
+ --data_root_imagenet datasets/imgnet \
+ --annotation_file_path datasets/final_annot.csv \
+ --imagenet_pairs_file datasets/pairs.txt \
+ --gpu_ids 0 \
+ --workers 12 \
+ --batch_size 2 \
+ --real_reference_probability 0.99 \
+ --weight_contextual 1 \
+ --weight_perceptual 0.1 \
+ --weight_smoothness 5 \
+ --weight_gan 0.9 \
+ --weight_consistent 0.1 \
+ --use_wandb True \
+ --wandb_token "f05d31e6b15339b1cfc5ee1c77fe51f66fc3ea9e" \
+ --wandb_name "vit_tiny_patch16_384_nofeat" \
+ --checkpoint_step 500 \
+ --epoch_train_discriminator 3 \
+ --epoch 20
\ No newline at end of file
diff --git a/cmd_ddp.txt b/cmd_ddp.txt
new file mode 100644
index 0000000000000000000000000000000000000000..0d8e8d82c32b11f646692aa97bd2a76e9a3d51c3
--- /dev/null
+++ b/cmd_ddp.txt
@@ -0,0 +1,20 @@
+!torchrun --nnodes=1 --nproc_per_node=2 train_ddp.py --video_data_root_list $video_data_root_list \
+ --flow_data_root_list $flow_data_root_list \
+ --mask_data_root_list $mask_data_root_list \
+ --data_root_imagenet $data_root_imagenet \
+ --annotation_file_path $annotation_file_path \
+ --imagenet_pairs_file $imagenet_pairs_file \
+ --gpu_ids "0,1" \
+ --workers 2 \
+ --batch_size 2 \
+ --real_reference_probability 0.99 \
+ --weight_contextual 1 \
+ --weight_perceptual 0.1 \
+ --weight_smoothness 5 \
+ --weight_gan 0.9 \
+ --weight_consistent 0.1 \
+ --wandb_token "165e7148081f263b423722115e2ad40fa5339ecf" \
+ --wandb_name "vit_tiny_patch16_384_nofeat" \
+ --checkpoint_step 2000 \
+ --epoch_train_discriminator 2 \
+ --epoch 10
\ No newline at end of file
diff --git a/docs/.gitignore b/docs/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/environment.yml b/environment.yml
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/examples.zip b/examples.zip
new file mode 100644
index 0000000000000000000000000000000000000000..f3a62a9625eab85ad8a0d79f6b954a3fec8bd98b
--- /dev/null
+++ b/examples.zip
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bd4531bd3abdec6df90efb0d19fadd54284bdc70d5edfff19752a205159eb4db
+size 6955837
diff --git a/examples/bear/ref.jpg b/examples/bear/ref.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..20e3da29f586ff4adafddd1e5bd7e4a8da4ba9b0
Binary files /dev/null and b/examples/bear/ref.jpg differ
diff --git a/examples/bear/video.mp4 b/examples/bear/video.mp4
new file mode 100644
index 0000000000000000000000000000000000000000..2e3f77af2d48b9ab40bc5b1818c2bc816c703ac4
--- /dev/null
+++ b/examples/bear/video.mp4
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cb4cec5064873a4616f78bdb653830683a4842b2a5cfd0665b395cff4d120d04
+size 1263445
diff --git a/examples/boat/ref.jpg b/examples/boat/ref.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..a824073c110425a5d071317722fe63833294beea
Binary files /dev/null and b/examples/boat/ref.jpg differ
diff --git a/examples/boat/video.mp4 b/examples/boat/video.mp4
new file mode 100644
index 0000000000000000000000000000000000000000..77d209492bcea9cb76ece5be5979b55b6f98da6c
Binary files /dev/null and b/examples/boat/video.mp4 differ
diff --git a/examples/cows/ref.jpg b/examples/cows/ref.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..0e5bd05c7878ed97be25c87fc733e39e12226a96
Binary files /dev/null and b/examples/cows/ref.jpg differ
diff --git a/examples/cows/video.mp4 b/examples/cows/video.mp4
new file mode 100644
index 0000000000000000000000000000000000000000..7809e9e5d985dfd63967e74d2a2d109028aec8c7
--- /dev/null
+++ b/examples/cows/video.mp4
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1ac08603d719cd7a8d71fac76c9318d3e8f1e516e9b3c2a06323a0e4e78f6410
+size 2745681
diff --git a/examples/flamingo/ref.jpg b/examples/flamingo/ref.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..0a10d911be10a92e54a669cc4ba917bc3d683db1
Binary files /dev/null and b/examples/flamingo/ref.jpg differ
diff --git a/examples/flamingo/video.mp4 b/examples/flamingo/video.mp4
new file mode 100644
index 0000000000000000000000000000000000000000..8b0e90ba000fa95f8915d28770bd5c9e7606bad0
--- /dev/null
+++ b/examples/flamingo/video.mp4
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5a103fd4991a00e419e5236b885fe9d220704ba0a6ac794c87aaa3f62a4f1561
+size 1239570
diff --git a/gradio_cached_examples/13/log.csv b/gradio_cached_examples/13/log.csv
new file mode 100644
index 0000000000000000000000000000000000000000..fb995e3b2badc50a4ca445b09f4718bf1ccd0826
--- /dev/null
+++ b/gradio_cached_examples/13/log.csv
@@ -0,0 +1,5 @@
+output,flag,username,timestamp
+/content/ViTExCo/gradio_cached_examples/13/output/003c3114319372a78bf2f812ebaf0041afa280fb/output_video.mp4,,,2023-08-15 09:45:37.897615
+/content/ViTExCo/gradio_cached_examples/13/output/e6d6153dedeb9fec586b3241311cc49dbc17bc85/output_video.mp4,,,2023-08-15 09:46:01.048997
+/content/ViTExCo/gradio_cached_examples/13/output/7969adca8ae38cb3b38ff8e7bb54688d942c7bc8/output_video.mp4,,,2023-08-15 09:46:34.503322
+/content/ViTExCo/gradio_cached_examples/13/output/74c76e483235b7e80665e32d7fcdcc3da2be7644/output_video.mp4,,,2023-08-15 09:46:58.088903
diff --git a/gradio_cached_examples/13/output/003c3114319372a78bf2f812ebaf0041afa280fb/output_video.mp4 b/gradio_cached_examples/13/output/003c3114319372a78bf2f812ebaf0041afa280fb/output_video.mp4
new file mode 100644
index 0000000000000000000000000000000000000000..ece03d2d27a95954b0d80802385976754e90505c
--- /dev/null
+++ b/gradio_cached_examples/13/output/003c3114319372a78bf2f812ebaf0041afa280fb/output_video.mp4
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b5ab666998e14fb00281a90f8801753eca001a432641ae2770007a8336b4c64e
+size 1213824
diff --git a/gradio_cached_examples/13/output/74c76e483235b7e80665e32d7fcdcc3da2be7644/output_video.mp4 b/gradio_cached_examples/13/output/74c76e483235b7e80665e32d7fcdcc3da2be7644/output_video.mp4
new file mode 100644
index 0000000000000000000000000000000000000000..96013646f3a76c9161c88f081db6dbec0eff5ff1
Binary files /dev/null and b/gradio_cached_examples/13/output/74c76e483235b7e80665e32d7fcdcc3da2be7644/output_video.mp4 differ
diff --git a/gradio_cached_examples/13/output/7969adca8ae38cb3b38ff8e7bb54688d942c7bc8/output_video.mp4 b/gradio_cached_examples/13/output/7969adca8ae38cb3b38ff8e7bb54688d942c7bc8/output_video.mp4
new file mode 100644
index 0000000000000000000000000000000000000000..1eb87f5afa098667ef5ccb41aabbdafe2b54d211
--- /dev/null
+++ b/gradio_cached_examples/13/output/7969adca8ae38cb3b38ff8e7bb54688d942c7bc8/output_video.mp4
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7c367dab34e596f7f0fed34c7e2384525de2ba1824b410d0770bdbd17bc9e72a
+size 1793060
diff --git a/gradio_cached_examples/13/output/e6d6153dedeb9fec586b3241311cc49dbc17bc85/output_video.mp4 b/gradio_cached_examples/13/output/e6d6153dedeb9fec586b3241311cc49dbc17bc85/output_video.mp4
new file mode 100644
index 0000000000000000000000000000000000000000..92b58e5f1f571227c24fbc04291189de1d4f86f1
Binary files /dev/null and b/gradio_cached_examples/13/output/e6d6153dedeb9fec586b3241311cc49dbc17bc85/output_video.mp4 differ
diff --git a/inputs/video.mp4/000000000.jpg b/inputs/video.mp4/000000000.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..4cc97d06e4db87df257e66be4e35f703188cfbca
Binary files /dev/null and b/inputs/video.mp4/000000000.jpg differ
diff --git a/inputs/video.mp4/000000001.jpg b/inputs/video.mp4/000000001.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..b6f6d43bc315ae3b7a6536058b394e68f1e4d131
Binary files /dev/null and b/inputs/video.mp4/000000001.jpg differ
diff --git a/inputs/video.mp4/000000002.jpg b/inputs/video.mp4/000000002.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..112bb9a5194fc47550310c22601cdc8acc5324aa
Binary files /dev/null and b/inputs/video.mp4/000000002.jpg differ
diff --git a/inputs/video.mp4/000000003.jpg b/inputs/video.mp4/000000003.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..91b0c77d598bba7334bf6ee5714d5a2748e71891
Binary files /dev/null and b/inputs/video.mp4/000000003.jpg differ
diff --git a/inputs/video.mp4/000000004.jpg b/inputs/video.mp4/000000004.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..91f8dfe650925ff56ee28a2582e849bb044620fb
Binary files /dev/null and b/inputs/video.mp4/000000004.jpg differ
diff --git a/inputs/video.mp4/000000005.jpg b/inputs/video.mp4/000000005.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..5913826729f81711e929c4555f82f20150b05d27
Binary files /dev/null and b/inputs/video.mp4/000000005.jpg differ
diff --git a/inputs/video.mp4/000000006.jpg b/inputs/video.mp4/000000006.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..ee8b1e30810dfb65d1b9c82b990b22f78f1e4525
Binary files /dev/null and b/inputs/video.mp4/000000006.jpg differ
diff --git a/inputs/video.mp4/000000007.jpg b/inputs/video.mp4/000000007.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..af964d773c8a0932bb1c9339cb782067f21a3305
Binary files /dev/null and b/inputs/video.mp4/000000007.jpg differ
diff --git a/inputs/video.mp4/000000008.jpg b/inputs/video.mp4/000000008.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..6af0fb9c7735eb8119b8b329d16883afa75663d8
Binary files /dev/null and b/inputs/video.mp4/000000008.jpg differ
diff --git a/inputs/video.mp4/000000009.jpg b/inputs/video.mp4/000000009.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..9a0e45801d3d57beb7fca1337e7f23e95c64039d
Binary files /dev/null and b/inputs/video.mp4/000000009.jpg differ
diff --git a/inputs/video.mp4/000000010.jpg b/inputs/video.mp4/000000010.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..fda58171652b283c51adb2868cdb731da0b74e5a
Binary files /dev/null and b/inputs/video.mp4/000000010.jpg differ
diff --git a/inputs/video.mp4/000000011.jpg b/inputs/video.mp4/000000011.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..c73537a50ed92aca64d1828f9174d45d90898472
Binary files /dev/null and b/inputs/video.mp4/000000011.jpg differ
diff --git a/inputs/video.mp4/000000012.jpg b/inputs/video.mp4/000000012.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..7b710e7c7b7e2cb8e35b0417020c4ae5772ea5a4
Binary files /dev/null and b/inputs/video.mp4/000000012.jpg differ
diff --git a/inputs/video.mp4/000000013.jpg b/inputs/video.mp4/000000013.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..bb34aff550b49adc1e4cc174581a08c13d0b4492
Binary files /dev/null and b/inputs/video.mp4/000000013.jpg differ
diff --git a/inputs/video.mp4/000000014.jpg b/inputs/video.mp4/000000014.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..6bb2f0c8b367da7ad427677cd4d8eb3e8f43e117
Binary files /dev/null and b/inputs/video.mp4/000000014.jpg differ
diff --git a/inputs/video.mp4/000000015.jpg b/inputs/video.mp4/000000015.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..12040c05ed1b713d522e62f8838de3225e28d4af
Binary files /dev/null and b/inputs/video.mp4/000000015.jpg differ
diff --git a/inputs/video.mp4/000000016.jpg b/inputs/video.mp4/000000016.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..efbc2dd46d9fec13f4a5bf7958e751c23ffdac98
Binary files /dev/null and b/inputs/video.mp4/000000016.jpg differ
diff --git a/inputs/video.mp4/000000017.jpg b/inputs/video.mp4/000000017.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..1eb25e126212f109ec29909e57447c6f49952717
Binary files /dev/null and b/inputs/video.mp4/000000017.jpg differ
diff --git a/inputs/video.mp4/000000018.jpg b/inputs/video.mp4/000000018.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..42381e6e1460e5e88f05ce68fc3f68c69765627b
Binary files /dev/null and b/inputs/video.mp4/000000018.jpg differ
diff --git a/inputs/video.mp4/000000019.jpg b/inputs/video.mp4/000000019.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..566c8f9b7ea64cf1b6242111884d01a4e454db3e
Binary files /dev/null and b/inputs/video.mp4/000000019.jpg differ
diff --git a/inputs/video.mp4/000000020.jpg b/inputs/video.mp4/000000020.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..bee6c3f5458d99336456933d018b204b7785c091
Binary files /dev/null and b/inputs/video.mp4/000000020.jpg differ
diff --git a/inputs/video.mp4/000000021.jpg b/inputs/video.mp4/000000021.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..d7d2a61c8a02e8057c4edc88bb3e9fb3dcca51b5
Binary files /dev/null and b/inputs/video.mp4/000000021.jpg differ
diff --git a/inputs/video.mp4/000000022.jpg b/inputs/video.mp4/000000022.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..da9c6514e545a6ff9e9b12a5bb04e38eef6b7304
Binary files /dev/null and b/inputs/video.mp4/000000022.jpg differ
diff --git a/inputs/video.mp4/000000023.jpg b/inputs/video.mp4/000000023.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..ee8aab46fdb09d1a0ab95f3d9b21f4183f21dbd8
Binary files /dev/null and b/inputs/video.mp4/000000023.jpg differ
diff --git a/inputs/video.mp4/000000024.jpg b/inputs/video.mp4/000000024.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..1873ebfa50ab03e60c9e7c7e3e5844f0ee5e2e24
Binary files /dev/null and b/inputs/video.mp4/000000024.jpg differ
diff --git a/inputs/video.mp4/000000025.jpg b/inputs/video.mp4/000000025.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..e778b7c95d06c59017b8abe7a5ac846fc89363b7
Binary files /dev/null and b/inputs/video.mp4/000000025.jpg differ
diff --git a/inputs/video.mp4/000000026.jpg b/inputs/video.mp4/000000026.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..ae32acdc1fe9a5d3615ce3bdc753b6cc64c79c68
Binary files /dev/null and b/inputs/video.mp4/000000026.jpg differ
diff --git a/inputs/video.mp4/000000027.jpg b/inputs/video.mp4/000000027.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..907df4995041d7223bc6615d943329e367ee7867
Binary files /dev/null and b/inputs/video.mp4/000000027.jpg differ
diff --git a/inputs/video.mp4/000000028.jpg b/inputs/video.mp4/000000028.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..0aa11d05d704ac7351bb7162b529fafd20c5b78f
Binary files /dev/null and b/inputs/video.mp4/000000028.jpg differ
diff --git a/inputs/video.mp4/000000029.jpg b/inputs/video.mp4/000000029.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..efa8771390695e90c440a773b59171d9047bc3c2
Binary files /dev/null and b/inputs/video.mp4/000000029.jpg differ
diff --git a/inputs/video.mp4/000000030.jpg b/inputs/video.mp4/000000030.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..3923d3700b136d6d4579d48f52e5ab0c0300a245
Binary files /dev/null and b/inputs/video.mp4/000000030.jpg differ
diff --git a/inputs/video.mp4/000000031.jpg b/inputs/video.mp4/000000031.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..6afc4b41ae471e648a721762e7186f75f7a43b28
Binary files /dev/null and b/inputs/video.mp4/000000031.jpg differ
diff --git a/inputs/video.mp4/000000032.jpg b/inputs/video.mp4/000000032.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..0fd34d50f09ab35e807256fecdcaefef27b44d25
Binary files /dev/null and b/inputs/video.mp4/000000032.jpg differ
diff --git a/inputs/video.mp4/000000033.jpg b/inputs/video.mp4/000000033.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..03232e86e8609f96c38c7d328033369e51d3d88f
Binary files /dev/null and b/inputs/video.mp4/000000033.jpg differ
diff --git a/inputs/video.mp4/000000034.jpg b/inputs/video.mp4/000000034.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f97f5675959149ce18bc7f54a0cdda29c02d1468
Binary files /dev/null and b/inputs/video.mp4/000000034.jpg differ
diff --git a/inputs/video.mp4/000000035.jpg b/inputs/video.mp4/000000035.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..09d805df2de7d99283cf0a07192baa87cd10450c
Binary files /dev/null and b/inputs/video.mp4/000000035.jpg differ
diff --git a/inputs/video.mp4/000000036.jpg b/inputs/video.mp4/000000036.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..5cd30a2c04127ce8825261153c203556cc90f07a
Binary files /dev/null and b/inputs/video.mp4/000000036.jpg differ
diff --git a/inputs/video.mp4/000000037.jpg b/inputs/video.mp4/000000037.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..9344ed2b97c59dc1681f78fd23eebe54af87366f
Binary files /dev/null and b/inputs/video.mp4/000000037.jpg differ
diff --git a/inputs/video.mp4/000000038.jpg b/inputs/video.mp4/000000038.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..c0daadedffbedd7a23743ad17fd7081d89003b37
Binary files /dev/null and b/inputs/video.mp4/000000038.jpg differ
diff --git a/inputs/video.mp4/000000039.jpg b/inputs/video.mp4/000000039.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f1e3e7bfd9315b18641a6a616f2f69de1749ad8b
Binary files /dev/null and b/inputs/video.mp4/000000039.jpg differ
diff --git a/inputs/video.mp4/000000040.jpg b/inputs/video.mp4/000000040.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..02f677b5587e23ba970557d62617fe547362bd01
Binary files /dev/null and b/inputs/video.mp4/000000040.jpg differ
diff --git a/inputs/video.mp4/000000041.jpg b/inputs/video.mp4/000000041.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..181eaa715d53bce66d2afd22beb9a20af1e64e6b
Binary files /dev/null and b/inputs/video.mp4/000000041.jpg differ
diff --git a/inputs/video.mp4/000000042.jpg b/inputs/video.mp4/000000042.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..11c824ebc4cd07e0961c0c1109c8e6efbe82eb99
Binary files /dev/null and b/inputs/video.mp4/000000042.jpg differ
diff --git a/inputs/video.mp4/000000043.jpg b/inputs/video.mp4/000000043.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..bd05645e9aa42d95f1adcea4d3d9ba96d066f656
Binary files /dev/null and b/inputs/video.mp4/000000043.jpg differ
diff --git a/inputs/video.mp4/000000044.jpg b/inputs/video.mp4/000000044.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..b5e62559a8b28a0d49bd432ba52f0633eb6d35a1
Binary files /dev/null and b/inputs/video.mp4/000000044.jpg differ
diff --git a/inputs/video.mp4/000000045.jpg b/inputs/video.mp4/000000045.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..fab42e8efb1a691b41d0a4ebe5c624ddefa795ee
Binary files /dev/null and b/inputs/video.mp4/000000045.jpg differ
diff --git a/inputs/video.mp4/000000046.jpg b/inputs/video.mp4/000000046.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..912d88e420982b0e538af19356dee2663f492383
Binary files /dev/null and b/inputs/video.mp4/000000046.jpg differ
diff --git a/inputs/video.mp4/000000047.jpg b/inputs/video.mp4/000000047.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..22e05f974fe69f63c7fa4806a01292fe13aa98dc
Binary files /dev/null and b/inputs/video.mp4/000000047.jpg differ
diff --git a/inputs/video.mp4/000000048.jpg b/inputs/video.mp4/000000048.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..3e82d5c7434437cf04e025ab5edf28eb304bbc86
Binary files /dev/null and b/inputs/video.mp4/000000048.jpg differ
diff --git a/inputs/video.mp4/000000049.jpg b/inputs/video.mp4/000000049.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..baebb51c5bc15f2886d1802614d5c54af4859476
Binary files /dev/null and b/inputs/video.mp4/000000049.jpg differ
diff --git a/inputs/video.mp4/000000050.jpg b/inputs/video.mp4/000000050.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..a996b5e82f47ded7955f9657408fb21b22273b31
Binary files /dev/null and b/inputs/video.mp4/000000050.jpg differ
diff --git a/inputs/video.mp4/000000051.jpg b/inputs/video.mp4/000000051.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..5e240319908f48f00fa3d2461f00f896f7533067
Binary files /dev/null and b/inputs/video.mp4/000000051.jpg differ
diff --git a/inputs/video.mp4/000000052.jpg b/inputs/video.mp4/000000052.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..c0f303c8b34bc2ae23df6e8086b2d4eede04ba36
Binary files /dev/null and b/inputs/video.mp4/000000052.jpg differ
diff --git a/inputs/video.mp4/000000053.jpg b/inputs/video.mp4/000000053.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..5622e5da0ee7f0924cc4d02f3221c659949b7ed8
Binary files /dev/null and b/inputs/video.mp4/000000053.jpg differ
diff --git a/inputs/video.mp4/000000054.jpg b/inputs/video.mp4/000000054.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..d6a1fcd16c2cbd4e39958a484f96f02bf9c2076d
Binary files /dev/null and b/inputs/video.mp4/000000054.jpg differ
diff --git a/inputs/video.mp4/000000055.jpg b/inputs/video.mp4/000000055.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..7927a7194f76a09b2404d26bb6c1349f243ccfe7
Binary files /dev/null and b/inputs/video.mp4/000000055.jpg differ
diff --git a/inputs/video.mp4/000000056.jpg b/inputs/video.mp4/000000056.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..e938edd8e6432eab4455fa37f2649fda3680e196
Binary files /dev/null and b/inputs/video.mp4/000000056.jpg differ
diff --git a/inputs/video.mp4/000000057.jpg b/inputs/video.mp4/000000057.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..7d833168bd8cc9172cc7d0deeb2d1d814b01627d
Binary files /dev/null and b/inputs/video.mp4/000000057.jpg differ
diff --git a/inputs/video.mp4/000000058.jpg b/inputs/video.mp4/000000058.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..97b5b45abd786421f448e67d07778f6261df3afe
Binary files /dev/null and b/inputs/video.mp4/000000058.jpg differ
diff --git a/inputs/video.mp4/000000059.jpg b/inputs/video.mp4/000000059.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..c8f37fc6e7d86389d2011727db202c7964b25755
Binary files /dev/null and b/inputs/video.mp4/000000059.jpg differ
diff --git a/inputs/video.mp4/000000060.jpg b/inputs/video.mp4/000000060.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..d3c66d1c1b3f151939342d383beb164db2b10d83
Binary files /dev/null and b/inputs/video.mp4/000000060.jpg differ
diff --git a/inputs/video.mp4/000000061.jpg b/inputs/video.mp4/000000061.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f3f78930f5d7b883547b93762649dbf6ae86497f
Binary files /dev/null and b/inputs/video.mp4/000000061.jpg differ
diff --git a/inputs/video.mp4/000000062.jpg b/inputs/video.mp4/000000062.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..ab763c94bb69bc648a159e45e1d4c58001358cc2
Binary files /dev/null and b/inputs/video.mp4/000000062.jpg differ
diff --git a/inputs/video.mp4/000000063.jpg b/inputs/video.mp4/000000063.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..722ea065f58d5f2ac413cdf0008e45e7e40311b8
Binary files /dev/null and b/inputs/video.mp4/000000063.jpg differ
diff --git a/inputs/video.mp4/000000064.jpg b/inputs/video.mp4/000000064.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..34397f9173ca48037cbceda2fd51e850af2d6be1
Binary files /dev/null and b/inputs/video.mp4/000000064.jpg differ
diff --git a/inputs/video.mp4/000000065.jpg b/inputs/video.mp4/000000065.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..a8ee4c73736120debe6f6d9cd75288bffb388d5f
Binary files /dev/null and b/inputs/video.mp4/000000065.jpg differ
diff --git a/inputs/video.mp4/000000066.jpg b/inputs/video.mp4/000000066.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..ae6fcd88c7d87b84cd9d3e63ff8c23f7972c4cd6
Binary files /dev/null and b/inputs/video.mp4/000000066.jpg differ
diff --git a/inputs/video.mp4/000000067.jpg b/inputs/video.mp4/000000067.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f8bb0780ae13173d52b8209f3f8316009668be00
Binary files /dev/null and b/inputs/video.mp4/000000067.jpg differ
diff --git a/inputs/video.mp4/000000068.jpg b/inputs/video.mp4/000000068.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f9f485376354d20c3330f3776db91b43366e71af
Binary files /dev/null and b/inputs/video.mp4/000000068.jpg differ
diff --git a/inputs/video.mp4/000000069.jpg b/inputs/video.mp4/000000069.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..6d92d4b320630af838b48041acfde8352cd9cfd5
Binary files /dev/null and b/inputs/video.mp4/000000069.jpg differ
diff --git a/inputs/video.mp4/000000070.jpg b/inputs/video.mp4/000000070.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..6c8c651e36b94f67b6478f28616b917d1be3d499
Binary files /dev/null and b/inputs/video.mp4/000000070.jpg differ
diff --git a/inputs/video.mp4/000000071.jpg b/inputs/video.mp4/000000071.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..dcb7a91e3ebf28fe475d132ac71001a8d8600e75
Binary files /dev/null and b/inputs/video.mp4/000000071.jpg differ
diff --git a/inputs/video.mp4/000000072.jpg b/inputs/video.mp4/000000072.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..76a9fae73d7656f48500a3f33e8ec38310762157
Binary files /dev/null and b/inputs/video.mp4/000000072.jpg differ
diff --git a/inputs/video.mp4/000000073.jpg b/inputs/video.mp4/000000073.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..8a7b91f122c517b660f68e24aad3a5f0d375a37d
Binary files /dev/null and b/inputs/video.mp4/000000073.jpg differ
diff --git a/inputs/video.mp4/000000074.jpg b/inputs/video.mp4/000000074.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..34e15aa4967beb7054a660cbc80bd597eb9e8fa9
Binary files /dev/null and b/inputs/video.mp4/000000074.jpg differ
diff --git a/inputs/video.mp4/000000075.jpg b/inputs/video.mp4/000000075.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..14f82bf5a78fb5a38884e7a8c05fc9194f19a497
Binary files /dev/null and b/inputs/video.mp4/000000075.jpg differ
diff --git a/inputs/video.mp4/000000076.jpg b/inputs/video.mp4/000000076.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..b2c71a14dc9db8e891e876b5f66c6713c5fb735e
Binary files /dev/null and b/inputs/video.mp4/000000076.jpg differ
diff --git a/inputs/video.mp4/000000077.jpg b/inputs/video.mp4/000000077.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..bdc946208c0eb04e74ac69e853dc89917025c8e3
Binary files /dev/null and b/inputs/video.mp4/000000077.jpg differ
diff --git a/inputs/video.mp4/000000078.jpg b/inputs/video.mp4/000000078.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..16ed47aea63e56bb4c74fe19a4c05a48927fa72b
Binary files /dev/null and b/inputs/video.mp4/000000078.jpg differ
diff --git a/inputs/video.mp4/000000079.jpg b/inputs/video.mp4/000000079.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..cfd370710874df663e90c7c39b6012c0d20e6979
Binary files /dev/null and b/inputs/video.mp4/000000079.jpg differ
diff --git a/notebooks/data_preparation.ipynb b/notebooks/data_preparation.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..b2bd1c1a7183b249d2b43097208c12e73fc9992f
--- /dev/null
+++ b/notebooks/data_preparation.ipynb
@@ -0,0 +1,2313 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import cv2\n",
+ "import glob, os\n",
+ "import matplotlib.pyplot as plt\n",
+ "import time\n",
+ "from tqdm import tqdm\n",
+ "\n",
+ "annot_folder = '/home/kuhaku/Code/FPT/VideoColorization/dataset/hollywood/annotations'\n",
+ "vid_folder = '/home/kuhaku/Code/FPT/VideoColorization/dataset/hollywood/videoclips'\n",
+ "saved_folder = '/home/kuhaku/Code/FPT/VideoColorization/dataset/train/input'"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 19,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ " 59%|โโโโโโ | 278/475 [01:16<01:23, 2.35it/s][mpeg4 @ 0x562a0e578180] ac-tex damaged at 18 8\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 250\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] P cbpy damaged at 13 11\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 332\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 13 11\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 332\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 4 9\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 265\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 8 2\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 66\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] illegal mb_num in video packet (448 420) \n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] illegal mb_num in video packet (448 420) \n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 8 2\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 66\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] illegal mb_num in video packet (0 420) \n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] ac-tex damaged at 25 2\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 83\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] I cbpy damaged at 21 11\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 340\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 21 11\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 340\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 4 9\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 265\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 8 2\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 66\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] P cbpy damaged at 3 0\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 3\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 20 13\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 397\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] ac-tex damaged at 26 14\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 432\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] ac-tex damaged at 25 2\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 83\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] P cbpy damaged at 1 12\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 349\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 1 12\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 349\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 4 9\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 265\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 8 2\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 66\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] I cbpy damaged at 9 2\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 67\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 20 13\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 397\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] illegal mb_num in video packet (448 420) \n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 24 6\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 198\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 16 4\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 132\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 8 2\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 66\n",
+ "[mpeg4 @ 0x562a0e578180] I cbpy damaged at 24 2\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 82\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 1 12\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 349\n",
+ "[mpeg4 @ 0x562a0e578180] ac-tex damaged at 4 12\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 352\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 26 8\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 258\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 1 12\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 349\n",
+ "[mpeg4 @ 0x562a0e578180] ac-tex damaged at 7 12\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 355\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 24 6\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 198\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 8 2\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 66\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 16 4\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 132\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 16 4\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 132\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 16 4\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 132\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] ac-tex damaged at 25 2\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 83\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] P cbpy damaged at 9 12\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 357\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 9 12\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 357\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 4 9\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 265\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 8 2\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 66\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 20 13\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 397\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] P cbpy damaged at 1 12\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 349\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] P cbpy damaged at 1 12\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 349\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 24 6\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 198\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 16 4\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 132\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] P cbpy damaged at 25 2\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 83\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] mcbpc damaged at 8 2\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 66\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] marker does not match f_code\n",
+ "[mpeg4 @ 0x562a0e578180] P cbpy damaged at 27 2\n",
+ "[mpeg4 @ 0x562a0e578180] Error at MB: 85\n",
+ "100%|โโโโโโโโโโ| 475/475 [02:41<00:00, 2.95it/s]\n"
+ ]
+ }
+ ],
+ "source": [
+ "vids = glob.glob(vid_folder + '/*.avi')\n",
+ "\n",
+ "for vid in tqdm(vids):\n",
+ " vid_name = os.path.basename(vid).split('.')[0]\n",
+ " vid_name = vid_name.lower().replace(' ', '_')\n",
+ " vid_name = vid_name.replace('(', '').replace(')', '')\n",
+ " vid_name = vid_name.replace(',', '').replace('?', '')\n",
+ " vid_name = vid_name.replace('!', '').replace('\\'', '')\n",
+ " vid_name = vid_name.replace('-', '')\n",
+ " vid_name = vid_name.replace('__', '_')\n",
+ " cap = cv2.VideoCapture(vid)\n",
+ " fps = cap.get(cv2.CAP_PROP_FPS)\n",
+ " total_frame = cap.get(cv2.CAP_PROP_FRAME_COUNT)\n",
+ " ret, frame = cap.read()\n",
+ " index = 0\n",
+ " while ret:\n",
+ " index += 1\n",
+ " if index % 2 == 0:\n",
+ " os.makedirs(os.path.join(saved_folder, vid_name), exist_ok=True)\n",
+ " saved_path = os.path.join(saved_folder, vid_name, \"frame{:06d}.jpg\".format(index))\n",
+ " cv2.imwrite(saved_path, frame)\n",
+ " ret, frame = cap.read()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 27,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import shutil"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 32,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "100%|โโโโโโโโโโ| 475/475 [02:45<00:00, 2.87it/s]\n"
+ ]
+ }
+ ],
+ "source": [
+ "frames_vid_folder = glob.glob(saved_folder + '/*')\n",
+ "for frames_vid in tqdm(frames_vid_folder):\n",
+ " frames_list = glob.glob(frames_vid + '/*.jpg')\n",
+ " os.makedirs(os.path.join(frames_vid, 'gt'), exist_ok=True)\n",
+ " os.makedirs(os.path.join(frames_vid, 'gray'), exist_ok=True)\n",
+ " for frame in frames_list:\n",
+ " image = cv2.imread(frame)\n",
+ " gray_image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)\n",
+ " cv2.imwrite(os.path.join(frames_vid, 'gray', os.path.basename(frame)), gray_image)\n",
+ " shutil.copyfile(src=frame, dst=os.path.join(frames_vid, 'gt', os.path.basename(frame)))\n",
+ " os.remove(frame)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "vid_list = glob.glob(os.path.join(vid_folder, '*.avi'))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Casablanca\n",
+ "# Double Indemnity\n",
+ "# Forrest Gump - 00776\n",
+ "# Forrest Gump - 00785\n",
+ "# Its A Wonderful Life\n",
+ "# Lost Weekend, The\n",
+ "# Naked City, The"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAigAAAD5CAYAAAAN8M4mAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9SawtS5aeB37LGnff+5zbvCYyXkZmkCkCBYKkAAlQQ40FAZwKnHBIcJ4aKDURJ2xGmnIgaUYQqAFBgmMCHChRDSCQUEFUQahCESQlNpkZ8fp772n2dnczW6sGy3zvfc699zXZRTzwWMR9p9uNb3N3s3/961//EjMznsbTeBpP42k8jafxNH6JRvhFH8DTeBpP42k8jafxNJ7G4/EEUJ7G03gaT+NpPI2n8Us3ngDK03gaT+NpPI2n8TR+6cYTQHkaT+NpPI2n8TSexi/deAIoT+NpPI2n8TSextP4pRtPAOVpPI2n8TSextN4Gr904wmgPI2n8TSextN4Gk/jl248AZSn8TSextN4Gk/jafzSjSeA8jSextN4Gk/jaTyNX7rxBFCextN4Gk/jaTyNp/FLN36hAOV/+B/+B37jN36DaZr483/+z/O//C//yy/ycJ7G03gaT+NpPI2n8UsyfmEA5e///b/Pb/3Wb/HX//pf55/+03/Kf/Af/Af8hb/wF/j8889/UYf0NJ7G03gaT+NpPI1fkiG/qGaBf/7P/3n+k//kP+G//+//ewBUlZ/+9Kf8V//Vf8V/+9/+t9/4XFXlZz/7Gc+ePUNE/jgO92k8jafxNJ7G03gaf8BhZtze3vKTn/yEEL6ZI0l/TMf0YKzryv/6v/6v/NW/+ldPvwsh8F/8F/8F//gf/+O3Hr8sC8uynH7+vd/7Pf7sn/2zfyzH+jSextN4Gk/jaTyNP9zxO7/zO/z6r//6Nz7mFwJQvvzyS1pr/PjHP37w+x//+Mf8s3/2z956/H/33/13/M2/+Tff+v3f+r/+//jqy8TNzT3/4l/8f/h//2//dz799F8QwkyIlWYGFghEMMW0YbbQ2gHKEdoKrYIZAphVQCFGJA2kNCAh0Vqjrau/6cbYPP6qCkMmpcRGSqWYTghRVZGQCEEopSISaKq01kgpESWiGBIh5wFBaFVpTU+fV0TY7XaknBCEWivLUpG848c//dN88MmfYrx6xocfPOfZTrl59bvU9YZWjgRRxmHi7n7lMMPzFx/x7/+5f5/Dmzf8b//k/8n86lOyLQQqaUys1gAIAQRQDRjQtGFACEJgY68CIgZBKa2CJJAr1jWzLJHVBob9h3z44a/ywcc/wSQzl4Xjck/KlcPNZxzvvubF9cfsnv+E9Ow3+ODjX8da4X//p/8P/tX/95+QR8W0ggTGlMkx0KxAAAVQAQ20asQIISgSQEUI4xVp/4xpd8U4TkzDxP76mml3zTAMjHLgzWf/ki9/758zhSNTUEQVNaFqZdWGNqPWxjIfaaZ89PGHXD17zv3dHWstxBBIKZJy5vr6mmfPX7K/vma32zMMI3kciXHw6ypnhnGHhEhKmWG3I6VEiImUB0JIhJCIIZFCJo8jIUVSigzZnx+j/7xdYyEEJARijIiAiCE0YhJyShhQamVejxyXI4f5wKvXX/L6zVfcH16zLLfc3L3h7uYr7m++Yr6/Yykrh3VlXlbu3xy5/+qW+9cH5sOCqTCOO55d7bna7ZjCcLpOQ79Ww+N7xAw1A8wvKgPhbQb0xIr2hwWR7eEg4l+D9Je0B1/fOy6OYRvbc97Pwp6PE87HtH0v4t+e39u2//trbv/625oZIoKoXbyW9Xnyv4UQqaanwzTT82upbU/yQxMBM8wMBSwIJqDaCECUgNj5s5mBIfgM2qOZ7/N48THF5LQ29hPxcO76XJzmT+zi8/bPanqawu38nWbQ7HxuL563XT9cPG97bp/y0/UlQDD/RMp5rYS3z+u7rhER+dbHiciD311+LxfH/a7HXj5n+3c5VPX8XH30HOw0j6fpvni9t67bfm+JgOn5/VT1fAz4tVRVL96nH39rSOD0HBHxS47zZ1K21/Xj2w6s0vyx6o8BA/VLwrT5Ot2U9mqFFV7zmmfPnr01T4/HLwSgfN/xV//qX+W3fuu3Tj/f3Nzw05/+lJ99/jWHw57ajCYBi5k0XZPTiITWF8PQL2TDtKA2UZdIS5FgBSsFa83/VQGrQCSESJDgt3OIaExYa0iIfqOIIBf0VGsNkYCIb1QAtVZKKYQYCSEQg5ByxhCGYUBVOR4XvylTRFAMRRBEAkFAor+P4BfpNE3sdjtqrdzf3aOmaF25v7/hWV3Yp5fU1ljWxrxWylKoyxGRRgqJaRqJQ2IYA0bh+sWeq+dXhHpFqIEy3/lGSUBVCcEcZFmgNgUc6DX1xdAMQug3i/ULWgIhJAgJixE0EuNAGveEtEMZCGHH1f4FeTAkJpZVsXyFpR0WRsbdS549f8Ynn/4ev/ev/0+wG5odfW5DxPo59U1CMAmYCHHbF0LbvoE4kvKOcf+Madqx3+3Y7/06SSkTtNLUF5eYEjEaUZWqitZAUGVb0NUgxEAeRmIMmBhmSm1Ks0rTSkiBNGTCEBnGAcJIytmfM4ykNDKMEzEmQszkaSTESMoOikMHJiFE/z5lJCVyHhjHkZwdCKeciSH0Y5a+wfnXlATRSkqRPCYQQc1Y6spaZu7nI6Wt3B1uUG2UVlFrNGu+SYUAIRJCxlDUEkZGydRaoF+P5g9GNpB0sYlsAOVy0ZaLRXrbaB6PS4AS+GaAAtvC+80g5fJ93ve4b0sXP/7rux9vPHj5i432DMb6Am9+XQVwsN/vofjgHeMpyDFV5IRPDCGg6qBPxCCGDpgS0oFFsD73JqfNKYg68OgA6YS9xMH+6fAvsCQC8higICegtIG17Vz3vxL6fXMJNLZ5245R8Pc9/Z4LXMjD8x4v5jyIIMYpUFJ5eF6/C0B532MvH395/b4TGHyH8RgobOMSoGwfWOQMaKT/7xLcPAQu1tdhOc25n+nze4lsAM46hgwP5nv7GxghCKq+byIQ7OE5hTPw8evK+n1qb184dICy3cjmAeM3zfnj8QsBKB9//DExRj777LMHv//ss8/45JNP3nr8OI6M4/jW7//l//EvOc57RBJv7m+wGMjTjkAmRENMMQt9kzWwjFKwYCTLRC20sqLLyjofQRpo7cvCdntdDBFCjCfUHaODlQ1tNozWGjHG0wWpqg8eE6Nv/DFGdrsdrRnH4xE1JcZAa0qrle3cxxBPy5q2RllXhpzJKTGOA1Ubx2Xl9defE6dnDLsdQ4YVZZ5X7m/vqOs9wQpBYdo9Jw0Rkcqb26+52u2JY2b34jlTvOaLn6/MzUgpIaFh5si4maLWCME3fTM9B5OnEAuQhEgG2TYtwdq2SBm1KZYCxEwcB6Z9otSVKl8yt8CgAcyoQBr2XD3/iN31R5RFURSJfWF3aI71c2S+UvaL3jALIELrqN6AGBPDNJHHHYREMxiCR5mlFNSMIBGzguHnr7WGtnY6n37DxxObtP1OrWFNaRqxA36dRGc88jD6NZUc+G7R34n5sC0i9TkWEyRkZ6mCsCGji/jRrw81CPKAhTgvftKvHd91LNCZr0CIkdYq8/HI7c1rXr9+xbzcsSx3LMcjZS1UVUwFSAQzghgpQQyG6oK1RomNUiq1Npq0i6i5L+L98+lltHYJUH6fCz2PnrctoO97vcfg5HHU+/gxl1H195XoeZDy9vFuYK2p8mAtxzfngGBqqBgiAZDTvKlaJ2PkgjVxsNwInfUw31jEHqxaxuV104+RDihOc9kZmL7DncGBnG9recf5Mt46F9v8PQChnD/vA8ZiY2fe8fwHzzm9lbx9ruXh877rdXD6CO94/Lte5wSevsO19L7xLsD0gHE543K03yfxrWO+mLONMTFhg4TWfx1Or/T2eHys1oFL61/Z1iPe/dkuz80f9fiFAJRhGPiP/qP/iN/+7d/mv/wv/0vAkeRv//Zv85u/+Zvf+XU+/fxT5mWHmqCsxJzJ454UDET7xAsSPKJuWkALOQRSUkIrtGWmypF1qRAqaGNjroIaFvHoRJw9CBEHIMoJrIQQUNNTdLgBkJwzFR7cxLVWMKXW0oFXZl1nYowd2DioqbWnWLIDJTBiCpRSuLm5IaUEBikExJTl9jWvPv8ZLz/8kOvrHbfrwv1h5ubuHtqMNN9UXpB4Me0ZhsQ8HwkpEXY7JCgfffyCxZQvP/8SZ57MmSfp0ZsIKQW25I7hEd+24CqBLAFJE8pIyIlkkVA7gGtKLZWQA0amtkTafUBeFyxdsVajqJAIng7LIzFfM+xeMk5wdyfUckvTgmwIzrddrLNMIQaC9WhQAmL9tUImDSPDtCONAzFmhpgZhwGdBa1rZyMCWhasg5NmzcHYFvF2lkVSdFDT52WjVEutGJCWmWHdsZZCrc6siJizYhcR0oZETR3chhBPrAxihACpg52U4uk6CSEihM6y+HLkmOBig5QIGGp+fNUaa1k5LAdevfqar199yVdffc7Nm1e0ulDqAS0F6+DEJJFzxibDSiGuM2U0buVAbUdaMWqp1NKo0i7YNvUN+RGlfY7C+qIXwgM6Hx5uQu8bIo8WyG1Dew/geNfP79uYLo/h8rHvGu87whjCdpJRZaOKTsyPEDrA3q4BMPF7KorDi1IKtQkxJ0LylK6YnjYQNQUirXlqw1MjnUnB16oTdO3MQrAz4+EHKg+ZlP7Zz1sTbLzPN2HJ81w9nP/Lp7x3U5PH8On83O2cnlJcnBmeePGcLWkF9uAcvuscPwbH35ai+TYW5puuo3elih4/79vYPL1gHs8zqAh93emfPWxBywlhfjOEePAZL36v4OuP2MNz3nN/wXiUSPujHb+wFM9v/dZv8Zf/8l/mP/6P/2P+0//0P+Vv/a2/xf39PX/lr/yV7/wa93c3VBNqa0iCkDLDuCdgHvljvpUGA3HdR4iJmCM5gbQCFqlFsTCAVAieZmnaL4Jtwd/QqnhUu6WRt4sshuhsQV90UkonBsXMumbAp1tVOR4Op43djE6bny/a1hygtNigp4RCcIZlnudOwwnDkPEVq9GWA3WZiQL3xyOvb+65uz1wPQpDnlACpRZKXblKzzHgMC9YHljrwtXHv8KHarw6rLR1pq2FJEKyTqMGIRiEkC5uNumfUwjiKQFkQpggJEiBaXK2qZXCsi6MkyI5onEkpGvi8BwLe1pdqYbno2NA8kDME2ncsxsyEpSb25W6LBc3LhA9JSadNQgS+oIfMRPCsCNNeweveSLlkd20ZzdMpADLAqgSoxACtM5ObItI6MyWdr5bOjAta+mpPcFMaH0DLrVSanWA07Sn+vxfDIbEfo5rBZzRiJ1pcK1SQ62hGrFgiCgxZmJyBm4DxTHGDgT65i/Ro29xSZTGbSOEZsZaK/fHI3f3t9zd3XB3+5qbN6853N2gbaG2GWuV1ozSFLNMShNjiuhQ0DGS8kwMA9UWaIZWcwAWIpIERQn0tAPne+Sd44INefdC3bfKt1iTtyPby2d/EzNzuXE93ojeFSW/S6Pw1iGevpzpdAcGvllYP18QPSKWcPr4nseXDiYd2Kg2TIz9OPHs5QumcU+zRisVs3ZKB5nCvBRqLbRWsOZfW10wrTSMYD10kC2g2M7L9lm3z3FOxbxrzrYMlD/0/DkvaY4H8/b+GXvneGuOTwBHzkTOxbk7/QMPQnEG4J2vxbvB6yWQ+MZU46PfPdaSfFf25DE4uvws7xsdvyKypXwuri3OWOT0dQOgEjpG6cyKcGJazLbruuddvmEOLo7knazWYzbwXVD0D8K4/MIAyl/6S3+JL774gr/21/4an376Kf/hf/gf8o/+0T96Szj7TeMwHwhpoqhBUdKQMd0jahjaI39f4E0qWCZFCKESrKKyYqFRbSYNVzQRrAqmK9oa1VqPeA2rFThHvr4QB1JKLMvClvIRcfHqJZOyUbXrujKOoz8OpZbSxWFGa7VT+uEUJdNfw5kMIeUBiUapSllnQJDgrwNQDrcc796ANcyE47yiKsQ88vLlNbtpJISINo/YJQ00CexevOTN8Zb0/AUf7yY++/o1690N6y1QZzAlSSAGj8hFAel6gyDoKdwRYhgoTQh5QsLk+ptkrFVZ1wXykdQqcQykPDJM17SyEMKeakpTodTqzENTQh4JceTq2TOef7BDPm28+bJAXTqYMGJIhBhxIQmkYcBCpqlvCtP+OdfPXzJMV6Rxz7i/4vrqmt04YHXh0FZnUARqWUnxrDMyFUJQzOppYUgp0lQpTana87a2pZWcZWnNKKVyPB4ZxompFHbNaE2JGVCjWkVNkBiJHdCCCwdNFYsbWK5EIlhCesqNZh2w+uTHrnParicJgapGii6IK62w1pn7wy03t6948+YrXr/+mru7Nxzvb0EXsIZapTRlXRpqAWu+gQqZGIWUZ1IaWdotzZoLh5eFITiA8mvWTsez3TPb/dEu0j2XC+L7yg0fp3R8YzwvstvfN9AGPEgpvfP13vH+375RvB05h7BtApzu95RTT/+Zs1wxdg1axAggAVWordGzPb70a0ItoBhp2PHhr/yIDz76iKvnzzET1rKwrCutFlJfI1qr7ErFtKFaaHWlzEfm4z3LcqCuM7WsmBoRSAgEZ2tMtWtKNgATTwzh+fPKA/biXRvPN1Erj5mRt/4WwtsbrkiP1j0t5rqYt8/nAzZu+89jkPQN4Pfx476JNXn8+3eBlG96/OUxPE5xfhuDYsH3sIeSVqDz2P0seUBglzMekLABukgS15Y084DJg0lhE2H7fZQcHPdr+/K4Hdv2AO2UWtIH9x9Yf81L9sr8Ovu+iLWPX6hI9jd/8ze/V0rn8ahlobUjzSAkISLENBAluYbAWr9onfJUGhINo9DqjDXDZEDCRBogRC/sqcVA1WlUFcyqh6TBo5ta+2USHbKaGWspZHqOH6F2TYP0zUbVF7C56UngN8aIXgAZAaw1QhrY7XaYGqW2voi5lC6FQEqNELQ/LztI0Ma6No73N8yHA6U4BRzTRB527K9e8MHLFzRt3N3fUVrDYZIw7K+okqgh8cGPPuD5xz+n7PaUPHB8/TmxLNS1oqYuwgxOsCr0xbHHLiFgURy6SSSmkSgDbS3U9UArM6Es1DJDq4whkNOIDtfktEfL4tUryW8eDGLKIBkl8MHLH1H1yHK45fCmOJ0tfgx0UCfiG77EhFkipIH91TOePfuAZ88/ZJj2TNPEMO0YhshSjxyPt7Q6k1FP57CJxXqkauIVV6oeFIsvCK1qXzz8b8363/v1cDh4Cm3c7VnXlbWs5GHyajIJBMJpcVDtgNoM1YYQsKZYUJo2ai1+bUlEmhKTnZgsMU/HqTl4lChb5o1VHVwdljtubl/x1Vef8fVXn/LlF5/y1VefcX93g65HtK0Yns5qCrUapRa0VYJF6gKlGM0EkwjWQZQ6gNkWyiAdpMEJlDyOHH1jt2+loR+Py6jtgfbAX/yd7Mjjr6fnfGO0eH7M+yLlUyrqYtMOW2onhh6dBoxAM0HX5o8VaA1Kr9BrPQdkKEWFabfjo48+4Vd/+ifZP3vGWiu3NzedKanUUj3FSSBGIYdICpk0DAzjnnHaM11dczweWOZ75sMd6/GIakMFginaUwGBfpH0mTXeBdLsweZ/GWlLZ+8w4+Ls9Ii/P8YuZ4gzkOxsQDi/y/k5/d4/bYiPUyMX878xKA/f5f3n8buc9/e9xuUx/GE+/+G8nmgqEHE9UcCDHx5d06fUllw8tweMhFO6hs6WhNjBRQcivm4HX8PU5QuuHexBgOrjW62/90bVnNNvfk54VDXWRblIv7q+//hBVPG8b6xlpckMMWEagYhIJMTcH9EI0VM0QQwTj0jVFt8QqIgMpLRDDWIzoNF0weoKPVW03Sj0KgXtJ65pAzxadG1JL5sT8SqXTZ9g0ErpzAuAITHSUgLCaWMtpSB4HjrnoYtMDSRiCKqQc2YcAqoejU7TjmH00sLXr98wH+65v79nNQcIORopDqQ8cvXsBaUuvH5zw3GeCdNClcT19ZVvZrVx/fwFLz74CN3vaePAp8db6rr0vcQrigwhhEwQoWmhWj0J9dDGUiPFGlMK7HZXxFy5Ox5cazMcqeVI0uJVTSFSQiLFgRoGUkzklHpBgrIfR6ZpR2kL43TFr/z41zncvma5v8N0RcQcOHXBrN8sXtUjMRKHkWl3xW73nP3uBWnwyh3EweHhcMv97WtECyGpp8pMMen5/b4+b6K1je6utdJaPUXz2pdIU8No6LoSU2YsXsm1rCvH45FxnMipi4jFiBqw1qi1OutRq7+HBoI0NDjIrj2yVIUomawbQAkQWgc72q+/gKIs68JSZo7HW7746ud8+unv8OWXP+frLz/lcH/DzZuvWY73iBVn3dRLs2NIiOCC5roSLNGKsCyF1s5pF+ugDek/m7FFbtvYNqTH1DycmY73UvL2KPD6HpvLN0Wnp9d/z3t/22u+KzLfBM/EgEUHjstaWJaFeSmU0kASMfi1JyGgRmdbHVTmYcdHH/0Kv/Zrf4KrZ885zDNfv37Nzc0dx+M9pazUWljLgqmRgiufhxSZxpHdODLtMnnYcf1iz/76BcfjHfPdLXWdsbLSSkFrdUbwNL8BCRtgvIjuH+av3p5X6wJc6aLbnovZov0oDli3+yf4C5zn/RLsXJyLIFtgYA9omNM1xAVAuTi2bzqn72JW3nVuv+kx73vN7wJ+33Utvp1WOQveN2Dg68oJIXYQ6Y/Wzph4xaeDYtdJd9bxFFAF1+YFf1zSRquVUlcX9jeXF/i+ZidMugUdlxfAeZ7OzMrpnLB9L+eEkLw9V98HqPygAUqzRtHiFJZ6hU2KXn5pAmjrC8e59r/ROm29YJaQMJIGt9GggUjFbKXSMK29JE9oYnj5rC+2tSmlVrIqMSeo5QGVHTFa15+EENyTpetKwE9vrdWj/eAbobYGZjTlotRLiOICUwkw7UZSjhBGlrVAjFxdTTRdubu9RbWyrAthesbu6opEZrpKpGEkTxMU4bAurFohJ1QSz188I2ijzTNTTlxd7YlTJl5NHL7+jJvjgWCuZyhUQq/SGYYrhgBSFuayUNX9Uqq5PsMGZT8O2FJZ14WlKHla0LYiuhK0IWZorcQQGVImp8FFoUAQ5fmzK54/u+Lrr27BEh9/9KvcvfqCV5//nFaspySc4ZLowtFt3lLKDL0sexr3jMOeYdwRUiJKpdQDh8MN8/0bxFYSnvu35umXpv1Gl3P6QXpqS62x1HI6m9sCpHh5e60NWQLhmBkPO8bdjmEcKaWQ80qKLhBtWqEFB799QQGwcI6Ionllh4qitUIIpNTfc4tiQgCUogXMqLXw9ddf8dWrz/n5p7/Dv/03/wdffPG7LPMt83xHK4sLwrV5ClFcquVhti9MWrtwl4bWiOJzEXoKjKZnvwVciEt8e1GDt/Pb7yq5/K5DtnDt0XhMnX8TxX/5nPdtRo/p+O3r+fGC5L45dPGrIqylsdbC3d2B43EDKAoSiDGTx4lxd0UMGSViGAl4+eI5n/zqj7m63jEf7rl584bbN2+4v7/n7v6esi60WtC6enRrRmueSsops9tNXF1fcX19ze5qx5AnputMHHbYOlPnmbIcKfOBdZmdGd7i8F567J/Kz1642Hjs0Tw+qujdAv7TnnSaUXkIPriYv9MsXoAOuCDIeur0TJvB5XcPz+v3q975w3rc4+vnXdfMu66hLYV23ue7ycTFOQCv4kx0xjJ4gYIHyoHWNl8Tr/oyo7OafT8R3ApDnCn3+CEhyUhBMUloqF271lBdXPtmPTBnO6/bdeBM38Z7Cdu9b13Y37//fbCj7xs/aICCuDiVFLs/SSCkDCETkp/yIFspp1dGCBWsEmVAoxKGiEmkokiCFCtBCmtQSpkRa06zSffA2FgUq65DKIWczxHh5cW4XZApJbQU6gZQelXG5UIqQQg5IerK/1orRiCnAUNoJgwxMU57UhpJQ8Nu7yh1pVSjlIWqjevdyH4/Evd79+mo90xTZn/9jGHaMa8rh3lGUNKYMEkc76/R5Ug53kOrZHHMPqTMOO0IOaNVqVpxRJKRsGOaPuT5iw8xlNe3b3h1e8thUZSIykCxwNKMZV1Z1pnSjFJnWluIAcYcEVMCwjROSJu7OVlA60pdj0w58pNPPmGZbylrZUgTL19+zH53zaH6+bEQToZyG5QQHCgOw8A4TAx5ZEgTu/GamCLNDtzdHLh58zXz8Y5svQqFiBJYm/oGvJ3rXmosKNoaeqq06exBpz3NOAHTtRZyWViKOyGXsrKWhbS6R0xKQrDQhbaVmLOXlKrSaFB9WXCQG4khQTz7jUBnd3oKs1plKUeO68z9/S1ffP4zfv6z3+H//Ff/nN/73f+Tu7uv/H3EhZZD9NmqTYnSU3fmwlfddEYhEfD7S6K5niJ6ZYm2ziaqoU1Pi7NJj5/6Pbctt483gMf08Xcd9uiJpwTDo3vvm0SI3zdafutx/bz797AJ25dSuTuu3B+PHLuYvTahNqU2Zy5GC4Q0IGMmDRkQcs7s9hO1Vl6/fs3xuPDm9pZXr15xe+dpmtZ87UK1gwqPVFvXlHn1nzKvyn5t7K527KeRNOyJecc4LKzHA3O/fsq6YFYQvJhAt5Tp5fR2cCDh4TyGLQXD20zVJcNBP84N5LyLMXnXORDpwk7e3uBP318Cqv5e73qdd73+5fgmBuS76ky+C3vzrsdYT4E4XfIIhNkGAra/OzOM+RplMXb2Vk6XokkPEiT5OfJ3hBCIyfWQ6zKj2txUzfzshygu4G/V1zj1IMxQNiLLX6mnmuxcObUBk/OB/+GNHzRAEdkEOC4uCwbg4sDcBazSoxZXpDVUBZOBEDI5mZt+hQBWvYQqNkQaIl7m2dYFoUEQrLnvRUruCNuaUdYVbCtz1F6NQ9dreK41p4TsdhzF0NaQkByQdJMtp0Dxkl+cfSi1gWx+JJ4KSjGT88Q47hlHQTVwON5QWmVZV1JOfPLJj/nxJz9iZcSsgU5cv9gz7XauLVBjWQtluWfaZ1KemO/eUI53LHdvqId7UhCWWjiuM4Knleq8YoJHfRqoNWNckdKHjNMVpHs0vMFu71kPC+uxonMjH47UeaWqOuqv1elmLcSgtDKDNXKOzOaAb11W7m7ekCTxYn/Fy2fXvHh27cLl0pjyjjGNHPpdnEIgEhA1QgoXN5N46gNnVoRAiiM5J9ayUpaFw73rT/bRmSrBNUKJiGr3Omn1LO4UrxrTWtmcFFX1ZNCmJ6BiqLpmoHSANs/+LwQ3m9t0Cljt0U1wTRLmxrhxM/Qy3JknIiREGk2V0BpBKzQX7C7lyM3da17fvObLrz7ni89+xmef/lt+9rN/xc3NF5jNBBqhCyWb+JyYGVXBrHuniM9dipmYRsQyzYQQmrvVpoCkBLUbg+G6mW8cxikPLe/YsOB7pF02luki1WCP0kff9jrvovS/LS10WoM7ra1ILwsOaPPS4PvDkde3B5Z1BRz0SYAYO5gLZ/+blOPZuDEPKMKbm1vuDkfmeeXucM/t7T3z/YFSV4J1wX+rhICvHRuAUMVqY5WChRViRlImZzcGTClgIRObMmpzEl6grpuBpT1gTexi8z/pDDYhc984N93DOVX3nnm+PFffcl629zy/tzO32722gZ3TuT4d5+b38u7XfxeLsf3+uwHUy+tCHuDTd73nQ47ITufpTC7K5QMfHceFCNfOYv1euOliV1xjqbjXk+G2FzGlXsmY+/riIwTfD0tZWe8OrOvqwZ3DGw/m2awyBJHooBVlkzmIbEFHnwPwIEb6/J/Oj5w/Yj9tp0SVnCbgO40fNEAB15s2dXv1FmCtyjgkXLfu1JdnZ+KJ9woWyXFHs4BS0Ah5vMJaIMbQF+lIU0GL0Vov621KC64R0KYuzmpGtaVnAVxYqeoqeYAoETEYh7Gnhlqn3BxcealodDMts56rNYI1Uk6+sJiz8cESkZEoeyxE0qDsxDC9JUjk+voFv/En/iQ//dVP+PL2ntYiMPLs+TUhZFqBYJkx7yjzEaqxv07EUKll5vbNG15//YpyOFKPB+abNxzujr6INQEVpEVKgbUuNLmnpSPPuCbEj3jx8a8Q9yv65RfcH35GnWfaMVDL0XPmOQHCMi8cj3ccj2+IY6JqRdPCwowVI84HtAG1YesVASWPAdVCLWuvEhkAnztBCQZREljAWs/TdhbEI0sv81X1GyxLJKoi68wgxpgCMXhZhQQXnQIspUJtSOyGcKFTq7bFDy5idUffbSF1z4mggpbKfDhwHCbGNDEM975YxMyQhlP0o0FPN3xICU1Kq6VHSGBS3G1YE8ECGhoaiqeZtNHmwu39K77++gs+/+JTfvbz3+Xzz3+XN2++5Ouvv6DWmRQ9kqpqxKB9noJHQwqmEUsBkQjS59ZcP1WK0coKVn1BSgFiRMXTl4pCgBDlZOXu5Z/hpHUw2zZAIwZ/nFws0NvYKOKtUsNCBDlvdhsAPMX624vLadk8v9Y7FsNt89h0Fpve5fKx23uJnAGvPyZ0x/kOTMyBbCmN46Fwe7+wLN2ZuJsAGs6Q5lFIMblAO4/kPBKz6+VC9Cqushbm48zd/YHj4UirBelVOCL9esPbeJiKJ4fMwDw97JUbrWtVGkuFPLgzcZRMGrc57j4/taBa8Py2slXnuZWKnZx8TeQBc3WpJNhm7XIT3kCob7CbnX/fSB+fC7k8a1s6oe+6cgZKCqT3MCTn8/r+8d31Ru+p0JGNOdC+Tp+Zi+39TzqaraT3YqaCdC3bNofujtZbsmwf1UX+GFSrlL7WmPZqJjGMQMPZlGYu4ids11lvidEdp4P4mivibudNjTSMzGVBm3Ymxav3xCqBbomxsfyqBIuYOrNySkWxsbecHMW395H+PydbhNY6E4QLtL8Pa/qDBijajGAKoXkfigANpYkgzRe/tDlopnPJXAwDFiFYoHaxogTc6r4lxCI1QAgFiQWrpYtUPKKWFvrN6zb6ulmVBoDuBirQqvc2aDUw7AdGJjeH46SxPi20YuLlv9BdRr1ENARozR0Fk0SSTISwZ62KkRmnPYe7W0ozPvn4R/zk136dl8+ecb8cudp7L6FpnNxdl4GcGh9//AlDiuyHzPPrPfv9RIxwf3/Hl599QVlmrMzcvH7N3Ztb6tyFfBbRClqdhblvrznIxAsbmXYfcP3sBfsXEz9Kzyhr5fXXv0M73qFa0VaRPCEI87xwf/eG6fAVlgtVGi3dY7mwlEo8CloKrc2Y3bPfDUhYqcuB432mrispZy+7BsQq1gkOM99wgwHNdUQbQFnLynGZCaEbyhUl1IXUDdF87nvpYzfnawal4exGF0d7Zif0W9VTGmqcRNGnu7dBo7IcF47DkWnYkYd7hmEgp5mcMimpV/Sot2vwcukAQZHkgDnl5OxMMEICsgOWta3UtjLPd7y++ZIvv/gZn3/xcz77/Od8+dVn3N6/otQjS5mJ4puZgS9qXe8SghIsYQrVCV0vga6N5biwLjNlMbRBK9VddbU5c9kp5lIb7D3V5BVQGygJiHZQ0jcav40MCcZZTPeOmMo2wNYBSF8I/RxfAoktwj898cHLvC9FcEF+n3+2R+9nnJ/X2Vm1TqmrgzglUmrjuFTuj5V5hap+z1orCM6WpV5lllImRzcKzHkgxMRWUdNK9XtrWTjeHyiLa0TGFHtJshfcWvCKxGbnzUDCxTVfVlrK1GaUalQLNPMigpivPO22OVa3QpAGXbvQL/4+Mdp5RzqIvgz+L4DBRYm4mZ0kW2e8Ek7n//F4CDB84jsxw+OztZmzvcV+BXmQhnjf+D4A5a0rcjseudBZSAeNerpk3OdmYwsvjkl61cvJ5Vc23mIzyuPEPkh3BzYTSmu9ArQff/DXMkIXyQOEDiDtJLgPYcElEMrW18uAZV1Za3WdXA8kFE8wYK5HiuBtQ7qoFtzlHK0ovV+d2rmAYAsU6Omo7T5CAGdYrDsMhq6V+67jhw1QtPeGkW7DTkMkUdsKkgBz9bK4gE86Og0Eopg3eBNnBFoNSApIS1QxSjkSY3YKtmXUiqdMOqV/WU6VxHN41gGQmXUhoQuOmjVK9ecr1dNRIdK5dr8Aqz82bvl9db8L7ULNPGRi9n4tEhJrnTESaYiEYULqjp/8xp/iwx99Qukb6DgMDGNi3F+hLVALxHHHJ7/2U55d7wi68OL5NdfPJq7318z3R/7tv/k35OhKjFevX3Nzd+ebjPUFcmnMBW4P99yutzyvwlojw1QoOvDi5cj18xd88mu/zjy/4vbmU0pbsN64TpJHpms58vrV5xzLLRqdBQss1NqoQGszxzVSdaLUzGH2aqK7g0FbqboQkp8Drb4oVq2IRISKENBWWNeZZT2SlyMhzki8dxCS/aYKmzkW4HtzTxWKxz4xpN7XB0TcuE918w/oi2qnmzdcsjVSVFVn2kpgXo7cHW6RFMnDwKas309XhDS4biNYb4EjJ0o2Jnfv9RWwg3FbOc4HjvM9N3ev+Pqrz/j009/l8y9/xldffsbt3RtKXVjLTIjWBcTOgBnSo/rNByOco2LV7qdRuD+sHO9WjkdoxSuHrHUDuVLcsA8hCOToC5kvaL5A+a1wNpY6U+s+0ZfshL7Fe5zTP+/bVC59U75PJc67xmNR7fkgOpPTTf/aia7u14xx0qHNx5ll9qiUZm5iZw5yowT/FzZjPdeMoGdhZFWlrG4m6WnOilY3WHODxK6xCwmJA2qN6tSaBy8XLTHMXOBclso61B749GqPITEmmE2JdSXWFesMitoFo+Xo9XRP9Jm6mLR+luziHIk9OJPnc7/9591zf/mibwusL87F6Vt79Kd3g5/3v8+3ja2k92IjvWA+Hr7euarGwbCcvj/dVxfpkVP6o7+mPnqtzh2d22xo66yvZwNCf7IL2zeNlT+3eZSE1upBuhm1M/4pDrTWnIHuerqmntbZGo+iiaaVWpVWGtkgBb++Ygiue2nh4vR4FHZ29ebMipoHOieZtfVZtX+HqnhOm7uo286bnTw5JPhGU/tsqrqQMkJv0hZQrfjOMxJTAEtQF7BKKiOtjGgdsJTRFtEQLhaBLgC0Laccu79Qv8j6gheD+0Acj0caivaS3DwMpDgQgzuQEjixNEF8c1IRmqk7iA6JOGSaePXQ2iCEiecvn3P9cqLqzK/9e/8XWsx8+eVX1LUxpYn9bkfOO0qEpa2kOPLB9cT1LrHcv2aaBsZx4IPnL7lpt6zHmTSNXL98xhADHzx/6XS2NtdUtEZp8FENfPnqni++uuHmdmZ/fcDEDdI+/tFHfPzjX+Xmzae8ev0pasa43zFdT6Q8YGEkZjguNxzrDdLbCZR5RVS6/4uxtMZhFnZ3CTEXch4PC3U+cH/3Ncf5DUYlmJFiopn30gkxu6cN3t6g1JWyrsxhJsSZGCJ5n3rpnbdm2wp2TluqBRfCivTFwSOREIViRlNfIPoe4VU3PX200a30RUDriswHr77Jid1uIufEMAwMeWDM2dsx9A15M9AyXFgpujo/bIm7w8yyzNwdbvjyy8/44suf8fVXn/Hq1Rfc371hme8xq4Qk5+aW+IbYrPVotn9m9WvYpGEqSGtYbRwPC3c3M/OxsS4O3nMavaGmGWhFmkfdKQb2w8iQOksgbnV/2jTk4nMJnaPZ2ArfkoI8jMDhYVR++ht9Cb8QCcM3bz7v0xic2JmLnzdQdJGp6IBETtGi0W0MxF2Zj4v73dzd3bEupV8Dfh3EXv7pTGtwjVMPcEoprMcZDd7XqZSK6dYLCmpxndPJjKuzGhIDMWVvvhlc6yZbtBv6JhYzhIRVRddGXRttgDQE4jiiLRBaJbWur6ortEYU6x3DfTakVyxu9P1DJsrOFMmJ9Xg4z45hLv7+jnPzTeftu57Pb9MufZsW6Xz2H77Hg0N4B/uGyGbUfPnA05fTNQWd/rPT88225oHhlAgyc20JvYrzdN77lWfW+2MFD62sNa+p6dYP2i0AJCraDTQ3qLW23pKi+x2drpmu1UzJ19hSK4u6Z5O2huZAToGIsBkNOhsSeqfqM1AXNsZFPOVN8BYjElBco6byAKJ96/hBAxRrDaSBBFe4SyUGF0t6eZXiFTBdL2JCQ4jBfTw2T48hB4SG1uLGqUMh1xHVHa0d0ZqJIaExIX2xV+sllrHfxGq9twVAoGjtkYde0MzilG+n7zYnya0FvYnQemn0OO5QiTRwFicINRj3y4xKhjix21/x41/9KdN+xUJh9/wFr+8OHI8rYxq4Ggdy7CLbmEgFbzI4ZQaUoE7vigm/9slP+OmPhSkPPH/+jD/x05+wm0akL5ilrDRtNHNnwuNc+Zf/x7/ht/9v/5h/8S9/RrPMsHtBGK65fvmCF8+fM129JKSRIQZ21xPjbqSJcFwX0OobuTSwQgxu/d6KsXJDM1i1AcoxRsYhs8uJ9a6xHA7c33zOvNwSgldAWOjl2VYIZKo5YGxoX3TNGZzQwWvs6ZTeHMeCMwIigaDBGQYRsEqrSi3NX0vctE219vPodKvRtqSds16qp0oHq8a8zJgIw5gZ7ydiDOScGcaRmAeGGAGlOl2BiXrlZwV6BVW5KxwOt3z16gtevf6Kr7/6nNevvuBweMO83HXvHiUEX+AkJGi+IFhfSa3TwR6KbRUCijbBiqFFWeeFVpXc0xGtBswCq63UdUFrcWdlU6+6ioEhZZLE06J8WWl0YkL6MWyskzMtclr8H/hrPKLxL8dlA7X3VW2c1oj+dYuFwzse55VHD8HK6TvZ9BcuI5SQkJhoTVmWwps3N9zd3lGW0iualEj0tIMIFhzpaHNgG2PqpegNoaDSvJeY5268KzUC5sByA7ldJIQFIQ5ePp9S3ILTzlj1tglx8DYTMYIqy7yQciakTJYIaUeahCgJwQF0bcUND2lbosWvd+xk3HWR33kwUSfW5RGAObNg757zt4GDr5HfZft6l8j1+4iiLz/EBlbfV6b+ruds4HpjBE9sERuo3X7LGfDZVgq8vW84Z0J6VYzWRtuML7f1KJwrRJ0l6X5PrXoz1C1d1CUC3iuu0KJ3PfdrGFpptLV04GtM44CcSoYdeI9xT0yVsMzUslBxRtB6NauYQA9C/MD9+DaHZDUHRtZ7hcW0ae0UamEJ932l/G7jBw1QWuvN+4KwtorqjBtXuZEZnb3QprQumEV6ZUdIPSLOQEOComXFWm8qGIdus55JKVNjJKjjUROvSNmi3s1p1EXMD0tAW6suUIvRhVNCrwIIHqWbO0pyov/8ATENfqE1w6SXQjdF10IeA7v9c569/JAXL3+Fab9SOXCYC2VZvGtvGshxQLpYMw2JmrI3nNtq6sW9V1qBD1++4Grcs8sjV1cTP3r5gXspjOMJITdttP7ZWvUUwT//5/+Cn/3eZyz1yLoeWNYDb968YT+5i+sw7onxyPXzawiR+Tgzr0vfGM01Qrp4Xr015uNKLY6+VbyLbhVhEVi6nf1yPDDf3ngDviEhSUD9JnN/CHUH1B61xiAMKZJzcJAShZOGOvp5dHDSr43ex0I62m+9z06zRi12Srttxkba4clZed9TA/i3jYYWd5rNyRvwSRDX0Qyjp3JixMiE4KnJjW1QLRzuD7y+e+3N/b7+gi+/+Jyb26+Zl3vKeqC1FTEXscXel8js7DPZCX6gL3S4i/DmaOxuxuai8KbENPDig4n9/gU5XaMtMi+F169e8+b119zPByirR3IxIv34TyymdcNC3K7cS1Qv40fYlvMNoDzYPLYNh4eR9Kn651HE/O0R8mX5+cO/nTYi2+bq8gF0jVkv7+w9npbamOeZNzc3vHnzhvk4e1oH/H4T33zoQNEauIGkRyShr0n15MCciDJ61VSIDkRYPQXQ3ADSzC3trblM1gvB/Dl0BlfCeV1z5iyAqrOH60qcRlIzdtOeEAcsRLQurJP38KrLQiCxgRT/7H2r/RYgeB7W753zZJ5YhHc8/12gYUtZfJfxXVI33+Ux7wMvj/mR7bcnYPPW619+rv7JNwRJBDE6adt1M77hiwRqU68MtXpeS9hewoPiXh3Mxmid2iWIF1KABwfLEnrblM6YdnfepluVjleg+ev4C8QwkAdvT7KrhXm+53C8Zy0Hb9ERjChdHLztVeLCXBNfg60FLLgDeM4j47BzdrsVdwrPCaXwXccPGqBsC4xqY3MUNfOcXWvFGQ8LJybDjN5Pxrvb5pyIGZqulLZAzIQ8uctpmtC6ENKIxtxp8eApGutCwI3HA8/fbz+Kb0rSNSbbL72zroOSQGLrgtzaeoo6g3aqrykSvD+HqSAxUKqRRJjSxDg9YxqfoRoRGbA6sy4rdHfZGLukrCugMoGxi/WCKbXnI9f5NXevVgYCX6kQFMYs/OvdiGLuzdEj4IpR+sY9TRPzYWGdb9lPUO9XsJkYG6UufP3qK+7v7kkpE6Lnt+fjgbvDPcdlJYbk2gWMQEXLQl1n2rKyzpXNpjmKEFJETTnUyno89iZ9vflj2BFUkWiMOVDoSn9zC6wsQg6BYUjsp4G0y0jwEj2J4tUostm7dcpWN3AhZyFid8Ss6oLhFHN/XKdr7cJ4zOxUxeWB5wYAGre3vlnX4uLdtRTmeeb6+prnL14yjhOhuteI4mLV17df89lXP+eLLz7jq6+/4PbupldeuHYgiBFCXzI0nCvKmuGksSvz42b0RLdc0HPM6iBUkJDYX+94/uJHfPDBrzCMz9AWuLs9EvOICpS2st4XBCWMA3kYnAkKXkYeOn2cTinRx1tU18I8iqy3KPESMFwOT2EY1vQBQ3O5Frx3rfiWNWQDP1u57em4xO8/AJNA1cbxuHJze8vt7S3zPPu57lUvSOxeNdmvWz9wkISQQUaEkWaZoBOht6LYjc9JYcBQluUe1ftTusjPj69pTStWjaJHprZjmq5O7sghJWc8+9yaumiy9caVpVaqtm7ulonDFeNV6czxkcPhjogSzTcjr7xxdtAn5O202+O5vPx+q8T5Jnhwed7eViJ9t/GtZcuPwNC3lpOfnuNas0uQcgLF73hOb017Yk9OBRCdLTG8W/np/c36/hBOHIxZr7ZprWuYPBW06dla07MWqqffrAP9rRothHgG+b2S5uyCbogkUq8glN5gVJAu/o+M445pd0UeBhRY2+opyO1k9v3MQbuzz2ogIXpT0xwhCfv9lbcyaY2yNJo4jv/u/MkPHaCgPYdqFNw3pAanzs0Cra5IFoKkEz3mvilehiy9j40I5BCwVrHslGfME7Ue3Qcij6wpY0VoZRNMbsJbOsXmw7o4KPQSQ68KMQewoicvnpR8A7L+2LBFmwg59qqK1dDgPWVKCwxhoLXUSxsjpRj3dzN5CJ1daaTkFu8WfLPO3Uk3oOyi58CbVZb5nsPtDbdvPuf+9g3tMNOOK1aVQCOKd++1ECAIjc5kqLNA0zBgRTncz0QqEYW2kKOx3w0cD3e8uXntOXKDr776mnlduF8XmipXO4/ichDaulLmO+b5wLIstNJTtsF9YIImtFbK8chyOPR0XiTEAbATa0Kv5de2EMhoE1qZmY9ui59zIOfo/X2C0/oxJkKIJIlorQ5o1bqbYm97v5VkivY+K87+WLtQ9LeN1O5sWAcmvlqddRfLurgR1+HI7d0t027P1dW1u3/ur3h2/ZyQIrW68Ps4r14+fOPA5Hi4p1klBohZkGCuYSIQAzTo5cN94dvSh63RThU1vQw+yImmtv65k0T3zojO5ojTJKRh4IOPPoLgYt7DzQC6stsN5HF05ueyN0en9s+29xeL/MVPPj1nzcoGPMTeTgO01nqF2zv0Kd+y4XzbeJ9Phy/KTlsXrRyOKzc3t9ze3bEcvfsz1mlyCR4xppE8TMTsaWSJ0dcgicS4I8Y9IiMxXzHuXnB15UyVNUFbZc63zPEN8/yGVu9o9Z5WZ0qtKKtX8Mx4U9NaGXd7WmsM487dore5j77BmlVKmbGjYDEyxD37/RUpDkyDMyb3hxv07hVWGuZcJCC92rAXALxrik9ojnN0djGP0n0y3v3kt+f7rFD47ufz8tr5pvFO3dK3vM27wI9nuroHjp1dkR3TbEUZ0DY/pJ7XSb29Rs/cnSwPoFFVWeaVpayu86tuidDrtjpAbV1n0o+ra92sM6DbHIicxc70v9WqXW/nAnk3Ld2dWsFsjUq9AAAQuuPxnni8RYtQSsOi0XoRQIyJmLzHDyEyDDtymlhbpQUj5glDOK4zh8OB5f6WdS3Eb57yB+MHDVAwF4lZdaSJNOq69o0+ouLRg9tRd5qro824LcBBSCFhttJkQWIXoOURrRM6jphOrMvIsrhh2oYBvYOk9dKvnrvp5SCbYGgT3TXzKMSiR1mKdxwGV1hL17HE6JuBg5DspnJxh8gAckWIV2ATpola4HC/cv1i8gtkHNwDRuDQViKBKfUW79b9MrSxrAfuXn/Nmy8/5+vXn1Hub7GlwNKgKTEoUQBRdEvpYNQYWYhuCiYu0qIJOWRiNFq5py73pFBYWGk6uxX9MnN/uGMuCw1jGEemYWBKCa0z6zKzHA+s85GyulCWIEgO0MxZi1JPAq8g9O70dmriZ61hoUFYO7PhEYfWxLrccrx7zd205zom73NkeEPDMLgBXhxcYG2evtIuKl3XlVprr/0XEBeG1db6Tf7weqTf7NbOcYJ0Ek2hLzLaTb0OIK+I0dM+KWemaSKl1PU0jdoqRQvK4joBUyQ4Tatt62/SywR7EWMIfh06q+JRlBspab9PtsaH8iDSVQ1gjeNhZl2+4uZmJsQdKU9ARKt/5qurPWMCWmXIgV2OJFHXFUGvZuO0V50EntipN0uEByyJwel4zvP2jrSCvGejec94V/T8TY/bjuXBieu/2Do3H+e5u/8K3h3c5z4PE/url+x210zTNXkc8D5awRssMpDTNUO+Jg7XjNNLht1z9vuX5LB3+r4W5uWOw/Frbm8+5XD/JfPs3iYEFzHWMmMYrRqruXYrTQWCkKeRGCEGT0mVUqh1pR4hYUjMsFdnuLL3BRr1Bdcf/gqtzty/+gxdCqWpOw2bz4iney7n6azd8EcY79ztt3kP385anKb9Ha/0TSmh7zreDTYesjff6fWFDY1wmf7aGIqNhTsLjP3hKniKrtUTW+hmkF2cupbe5XpjTWBLIm9vvIGJs4svnCqHZBPaNnIXTG8fOah5oYN5MAJCC9UNI3t1n6rbMrTmlV+b78owDJgWyuoMXDBz9jsm8jAw7d3TJ+Ud2oTl/o55mTkeb7zgYV5Z5gNtcf3a9wGfP2iA0uqKmZfdikgv9cVztb1My2hI9K6wQQURPaHaZnjJcQrU2qCL2yS5X4HlAc0DdU3E5OW/kmK/6KSXnZ0xv8BJUIfKpmXyKHK716MDm6IVMe+xgDnqbjRCygzDHmRgtRFhIA3PSHlPjNfsxh/5wpavEYnd60CZ9olgCbKL22pz8zBKRWyFGrG1UMrK/f0NN19/wd2bVyx3NwStpAAhJ1IEKMRId7DtGgUUjYkUM2t10XHF2ZXMwFgax3Lg/v5L7u5eMB8PlPWOdelf55mqK3nIPNsNXE8joo37w8J8OLLMC61UpBmxN/rzRXgzQasuQEy+wUlIKJFqBtqIlrzUXBtmlWgF0cTcvUzevJ5w10UhxMSu9/wZhoGaRyIrkgqtWl98lFqUtTrz4FCggXWR6SZ443Ix6pStyXs2QjulfFpzQaKZsVw8PoTQ3XDdldbZWUOSd5KO0dk2wbGGtY6LDTdIExBzYBI66bytp6fGils3YrpegF4doEItlVrvaXpA9TXaIjGOpwacMcCQAlPKxCGTYiRHgzrT6taqXbqxnZxTMb1sG7soM5StPFsIlzn3d4xTdQ0XKZmLjeE0w+95je+iU+ln6HR8p/MqTmebVQcItbrHkXiVHiqkPHB99QHPn3/E1dVLxmlPTIN3MraIhEyQHXm4ZsjPGcZnjPuP/Ov0jCFeEUioVbQdOS6vubp6zutXV9zeJA6HwLw4G6K6orrQ6uLN3pJiYiwpM44jOT9jyJ7qMaILvFtF15WxFObjgWXcoSRCAgsD49Uzrl5+RCtHFl3czKWnvbC352nb0B8yEXICKluazjZBynfY+E8pHhEen61vq9z5LjqTd77no2vnO11L+Bpf7WGyQkwegJOmxuVDDG/OV9Z6Yl22cuJWz5ok020N8bSzsmnKAOHUD841IM7aboylM2bQ2plNUnW9plejmWcPdOsy7ppKRDCtVC2EKlR1aww1dy1OKaItOVQSQVImjRPTbuLZ1TNCyrQG87yylMZxLpRlxWqjzgtmC9G8bD7xzUzX5fhBA5RSF2B0MzQ1Eu5H4gJF65UMvTzL3CWRnuMrZSXEQMzepwd8UY0xQ8qQM6YjWkdsyb20L7pupPcf2fLWXNyjEoRAPLEj3gSwG+0EV9jXpogmhjwQLJzcGkPKpDSQ80TKVwzxGVWuSOMLYmdOdtNH5PyMlAZiFGL0iHfYRRdfRneEDM2Q3rejrF04epw5Hu64vX3N7ddfM9/dQK3k1I3goxAFYhyI0Tcaw83pmipNYOipAyQSswOkIIlpFIo2jvev+OqL33GL9/nAutxxONxQrRBzYD8N7KeRKMqyzhwP98zLTKveiTelSJREiM4yeRWOpwEQCHEDKF5Z0nAnxs3yS7USJbgfBQVplbUWDDc9qqrEIOSXH5KoDCKsIYJFTLJXFUVPgTRbu6jMAUBtztJ5O3jrEdMZ8NK/mj7s3EoHGpsnwFuVB7rBnM4INhdhe0L5zBqctBqte6Zshk44KNkAMYJX7XShfTgZOPiDTeys5wCwgPSoCoNWK+vSWJbar52u0Onn52o3cbXfeRVACGhV75LbqjcV6KBk64PV9XnfuEk90AX0RV4u/rZ9/baN69t+//3H9t7es2hdiwOxrYuzevfn3e45z59/xPPnH3N9/ZIQBy+zVDASMe2JcU9O18TtX7iGsMeYIEykNHQPnGtGvWKYBvI0MO0SX3zeKPWAxiPZsndS1+qFg71UuKzORuZhJHU/pTRkphhZttSAKnXxdeC4OhhGKqZCSCO73TNkPaKLeZdrT5q+U3PxeL43n48LBMo3nYZ3MmT27pLlbfxBU3nfNOTifn3488Nj0UeHsOlV+g/dGLLRmp7YEOsaIlWvCDyZOuLgwYPp7e3D6bW63NoTPb3W3ZfgcMFZdWCoFyzWBdNiJieg6AJ/Z0LVqmsYQ+6WN0ptM6G6Rq+UhVpW6L2+Qgx07hOL0ffJ4G0wytpYizIvjdoEteD/1JNU3vTQsweyaZq+w/hBA5SmxVMXKKTYFcucxGGk+OA6F3ETLKSXBXO+CEMMpDTQpNFkJcpECsWFbMuOZXYRmtd8ycWGcc4/qjm1KhKxxhYwY0RvyqQRtUSrDlZqGBg3oaiAxO5yS0bD0Gngj0njB4jtaSUT5BnCDiEy5MCQG3d3r8hT4qNf2ROzNyZbtSBaUSssdeZYFtbbO25vXnO4fcN6OKDrQhYhBTkBLi926l4cvQu0W5ZLzw54PRQhIZKdpZFATpCyUuuB168/9YXGlFIPrOuRMER2U2Y/JaI1jvczd7c3HHobebHOEIhX0njk7zlTrzsRzibLTq1uN/T2NQSh1a2Uz0FZawVdF5a1MK8LVb0Md5/h2Q5iaA7ENEEcUOtVE60bYQW3e3errthtqcVfX7RXwOjJzGpbIpQNOZyjGl+A9S2GM2zVNHiq0kGvz4frEs/btfZFyCNbRWI4XWjCxiz4HAmcQAo96vP55OS0fIqQHV+dI1+tvWFjc1bJQzvWIEi7JoXug1m9PNp0JaDOMm4AJYSTp0vP4TxY6h9vNieQchFxXwKzHqTzNkx5+Bp/eMM9cMxCByfeW2mbp61CZxwGrvYvePbsI66uX5KHnZckdw0aMhDiSIgDEkeUhFbB1kaShkpFQiUOg5sxjgOJxDAFhl1kmox5ecP94SuWWbAWSCH1KsIu0Fbv0VOXhWM6YoiXIo8jKWRU7NSuw1phng+spkgvvU9SoVp3uB1p9eiaPIO+KDxiS/q3b2l3/PydnFYf8FEPz9NbAAXpG+/3kVH+UYxvB0GnQomNTe1H3bRSSqPUdhK7ttY8JbwFM739hnAhbr14XS6AOuD3jW3MzXZfbKvguQT80gSt1np+vc1r5SI11GqjSEERhhBcLyVGa4sHJQKtevdsRE9+O5s1h1e0RVY12rzSmgGRiiBpYBAIIWO5YTmTYyXpwnI3o2v9zmfiBw1QtpXVBZLhglLsF4BZr/aIkOPJbteBCqfVWUJwHUACjeb9dgalVSPLzLEcWJYdy7qj1YVVzKNSw3PD/TWtdcFrL3XGesGeZNBEbZFmzpAgLridS2E3SBdvJiRk1CJoBDJ5fMYwvkTbjkBGZEeME0NOXO0TKd/z2WdfY2a8+PDXCElIGEkrbTlSlgNlPTAfbri7eUM53rMeDlipiNmpBYB0dsJvgoaKnPpnEIKzGpJorZFCQsU7aoLn5pt5iWvTiurSQUJzx9cMu93AfjeSk1DrkePBqwZKKah6Jcq29Vo3EFO8OkJUuqcI+I7rHjf0rq7Wmv9T78VDiATZbNs9p7qWQlVnR4LAPhnycgQ74kXCEclD76+kGMH1NxJo5l4VIqGzY32xD+djfuC2eap22Ey7wunvfannFPVwkdro7UctnMtpTV0I65dZYOsquxlEiblh3AkZWb82pRuoG46cg7u+0n9W8fL8U48Ug5NVpngjumEEo7ovSmlYLZjAukSOR/ceWlMmJ2FInJuVRXdMDSdPjzPlf6kl2CLPDY9sm9ZGfz8YciG8/f0tFv1lzu9/+buLs9fPnM/Hxo6Vdu7ldIqYu7HaNOy5unrBfvecnHfbWe2Xwvlcq5q3BQiVZisprERJtF75I01QzcS493YI8ow4NNTumPbPGIYdB6IzaM5tgnlfqhAE0UYpBZYFghtLTjERUgf+QU7asrUuzK1CdZZrjEZSB1VizgI3uokkXb5n7waab41tM93moJ+/x+fg+4LJ9wmZt7/9/sDp9wdDJ0+r/n5V3VCxVa/Kc1By1sfV4tq/7Y5X692o8fv7sqT4EqA8BnFBxKtxOIOOLZhx8L4FOvTAsV/r6sUR0q9n689trXqhQIq0FkldZNx09ePQhuDH3lrt+6RXCKWcMQmsCtYKaoGcByy69ikPmTQo0ipBJ5IUdLmjpHTaN77L+EEDlGU5kuIOIVNrITfXTvhZc7tzM+86LNH6AiyEMBATxKhIaMToG+iQAjYqVkav9dbEIa5Qb1nKFUVnt7wX10Rs3ZPPzRS0u6IKtay9o2RkrYZqII8vefHBJ/zkJ/8erTa+/PJnvH79cxoLxOgt2CViMSMyYTYSwjUSnlFLxBhJTIS4Yxgi4xgp5TVffPFzikZ+bX5JjpmcIFthOd5yuH3NMh+4ufmK4/0twRr0Y4+4pX1Qp+22zV5poKELqnrJs4BEL2FrFlhrpTSlqlssF/UIIkR3rGytMc8rao08JIahmwaxNa0qXn0pnnIR1Bsvqkd4p54V3QtRmwu2fKq7YZEoSoXqBbfS0bs3WuqRjYJY801aK4f71+QU+SwU6jwyxZm5GxcNIVExb8bWG+aZRFpzC3MHDtvN5a6sQQTiVvVzXkBVz+K2jYb19I92lu1CQ+E5HGL0svNNdK12pkKtsyv0Z/gv7WJhbn1D9cqNy8jVtG8XYht/cuqoG0RAIxYEbWDdI2jIQgwjmOscBK9ioTWqFtZ19SBgB9O0Y9olchByF2XLKVjg5EiKbH1dLiJG7UCMCwblfeMEYh7++n0b0zdtaN/2Xp10cqsABat6Kg3fjluI5Dyx218z7fbe+C8ESl177YWXlHr/kgiyEmNG1JssWp0dAMdAqYHSIlRjsgHC6P1T8kCMCdnAX05IDdRWQStD6oZuKN5aoLGWxQFP8nL1LPHUiiCnTG2KtdV78QQHLkP2a8g6KHHNdb9/uLSwv/zudFpwoA1bsmMrdd/8vP6gWpE/7nFBXrxFqDxmAUstLIuX4q7r2vlOD0pcs9R6s1T3aRIudCSP80Xg7Kk8ZK1OlUrigdLGUjWasysinXn08xUkuoatbaBney0HRhv7Y6bUUtwaYBzJMXgpcDfJlCDo2mi1uRwiBCQmCF4s0bShCnnckae9a65aIabIOq8gobuDK1WhNr4XQ/aDBigJ38wkZTz/v3kwCENWQliJKZBiQEJhTAM5QUyFISfGobKbMrurxLQPBGk93ZHRpqxLQNhT254WX2IDkAVJMN/fOaVaaq+OEIpWjMCQEskC1s2Y1Jyyma5f8pNf+9P8xk//HPOyEOMzjmul1NeEMVO1ECUxDNeY7EjxBdg1tU60lhGyg5NxZByMWo988eWnzPWAhT11namxUMqR5XDDcnjNcrjh9s0blsMtQVZPJyXIbLbk9IqWnn4InR0wZYvTJHq5qW/WSlVvQLZiLGp+4alg2vvGNNcklLoiyTv3jjl7d1xxxmpLNmTxfKf1JnSttlPO1b0AYFv5tkjXWVXvLO3tdn0zLAoyCDVBTiMhTISUgdo3SNe1HJYb0l2Blhmo0D1RVmAt6n2TFDSMqK5g4v0rqgvKmrnINQRPhTQ7a0js0g+F89q2pTziVnqoemqR4NqKQIjZr6Ut8pTNodZ/50DRo2DtOCn2bsS6+aBsBndbeq7nsU+i7SCd3HN/FM8TgzbPLwdCNwuLxCgweLoy4KxYXdfe+NaYhsw0ZnZjdI+dgLNaG9Mhm5CYLdbDHXr7SVRBxIgXS76K9fnswl/6omzb8+0B2/JN4xKEvCvCftfzL3+j/Xnuq1TRWvw6aF6Iiwh53DFdXTHuJyRHqlVvWFodEKo6XR57Y8HW1r7I+xxYCFiLiI60aux2A7RIXZWqK+syc3d3cIZyiMQxE1pEa3JhsW4ePIpQUVvJaSCIUsvSnW8zQ47EIUMMfg/XcjLtS9k7XEs0YgDNmVIDVDzNaxtgCx3gnoHIdj9eXvsYhAbWr4OtFPa8KV6wBY/G5rnyznMjZ33T9vzvXBl08XoPn7OZwb89zKwnm3nA6p20I1VZ1pX744FlWbvmhG5f4I9trbn7siqtOkjcgpUNgJyuZdUONjZgdA40dDMA3T57EL8/++ZPfz3bFPObBUZPKTtQpuvPhJTd/8QUrFXWxaileUowpS6s9fSUL52pP168YEQS1ZxpDikRw+CWHICKUM07J7eqhFKwckS723L4HoXGP2iA4vmxgtYFDcK6uqArWyOExjgOpGSIVHJKpKQMozGNgTQaeTDGSdjtI+MUGXJiGLz3SC1CisoaK2ovkKCEFMlD9AZKQViO96i6d0aI3jFUOrIM5kIhJBPzjpA/4IOXP+Xq6sf883/+e6zrzLBPjLuXlMNC0dCRdcJCJoSJmHZIyIh4ORchMw2JPACycHv7Oa9uPuXZix2f/OpH5ARlPqDrgcPNK+5uXnG8u2OdD5hWcnK9Q+z5cXp+0g1VZdNj9hJX60Y+LpCE2M28zuKsqlDMusUxznqrotXdYpsqeRwYcu7Uvedhy7pS1oVaZrde1uqoH9cISTclMm3nXapvxJ0n8a1YBO0KTNOtz1FxTZcF8uBWyylmkiqtdx0uZWVejGCJKk62hBSoqTsthkQcE0NMzMvsrJZ6+so3dadvPc+rJ6aA8/oBXC6EW07eTgK7cwpnU+OrAy1/IoiDIcN7TW2gpbvqdFOlzg7Kxg76Bi7hvAH45uB/dmmK990JXUPhDIvHznRG0J2Ps0dd0bugmtD9YLqFfvDoPOXkAvIUcXnX1kTxQiPUr40zU9LnQM5z04O/R3tFf+IfYHwrK7M97tHPZ04UsF6dhN8tDd+MY8hM055pf+XWALhz9GZu5pev67msM3rN1FvYixsJqnnVWdVCbH49SS/PanVmne+Z53u0rYQI45Td9mAt0CA081L0spDMyDEStbp/iSnrshC6MVcIgdjt8b3UtaJtpZkgyRVftRWa9pJ6vMmh4BqqLayg36vbvJ2KYPtJ3Kxv/JHy1uRu5+T3U3H1vsd+p3Ms8hZQ3dI1bw9/va2799lMztnRpZTexXpx7ybVzpoGJMSTsZpXxXVRddreaWOTfDU7GwX2yjo/sA5I/PFyfpqna7tGMEjwFm5bsyikk8ddd3LS23pgErbOz+JmiibiIt1aaWJsPi4xOHjetDJDd4wOIRGj2+cH9Qq8GHwNSCmx9mpLq5V1WdB1IdYZKytWNpv9f0dEsqruDeB9XSK1LURzb4IQnSmI3VgqZSMPMI7CMAkxwzAK0y4y7T2PvttndqNrRWpxE5qUjRgqMQjjOLKfRjc/A24RWlHWeUZVvOMridoMCYMbickIesXV7sf8+OM/xX76mP/93/6/WA43fPzTXyHma1KaqW0mpwjRIy2TADEiKZDH2AGAIbGw1nvm42tevf633N59wYcfRa6ngbocKIdb2nLgcHPDfLijLAdMCyH6Be9N4zwqNXPZQ8+14NubIhvTAT1f6XRks9At3z2t4xe2Upt1LwgvlV7XwlorIXX3UjPWdem0f6OsC8tydP1JWdlS+oCnLZpHEsGEtkXbp/wpHtWJb9Tu0NhBCga1uaiRgusWRnL0CqwUusxWPC+7LhWL4u3si2++KUV35ozB7b9NWBv9M1pXvivNvISdoKdQMjz4IJfDTl/Mzv4Rps4YqLnINlk5GSZJMG/6hQMVnwY3DDsFUuILqIRN1dIXvCan6h6lA5j+d21etoyciXtTvBRWA4TkdG2cwAKDKWkckbvUe8Z0/Yv4deGmh4CE3jARRB4u6P20ApsG55wy2FI7G8/y3rk7gT95z+PeP37fJah0BkA5gajQNQCmEFJiyBNj3iGS0EYXwINZt5qXiEgihAEJgzfUtNS7nbsNWsOIWlGr7hbbZjCjLHfMxzfMxzess/daihg5JbAMUbysfnENRK0VSiGWlZhG1/Obb5KtVMpayDGRU4YU0Gasa6HWRk5gQyCYV3i4O27Pz9jltXRZFXZxii5Bp12yKw8ByiWQ+DZjtT+s8V3Ytnc95zQugiBTY62Vw3HmuCzMy4qpEqJv3tpvPO/fVWildgbDP+slQD/PmZzm6cTWCL2fEyeBqp+LcMr06tY5uL/Yxlr2JG5/vW4iGDw9Y2lLvXY2S+VBFZHZQgjGOI6keE5JpWEgdIDiFhDSW7Q4PM05OZm9LiyHO7QV2jxjtSB1RnQlWCHSiP+upHh8chpYXxVCIyWYpsS08462EhppCKTcGAZjGJSUGilFxgHGKTCO7jC62w9MuwFrEW2ZPETKGNhNkXEcOB7uWXZXhJhozajNqEVpVSilIJKIaWC3G93oTTNrEbAdV7uP+ZWP/iT7qx8xDhNlOTBOO7fPbgtqA8jqXqiyI4Vdd0p1jYuIUuuR26VQlzeU5Wvu7j5D2z11Drz58lNyXGnzgbocWOc7ap0RGilZTy/4jYP2/gwCvqN0nUcXp0o8b3dqQlWlbVqTKhSDVdV/37ykrlbtG7CXzSF4HlXduVZr6ZbbhboutLL0dACAnZxDrfclOu9L4cwqtF4tExy8sdlGny8I6B18A27e1npPntQXZl8/PCpYykItIKO7sMYYyPs9EgPzcuTm5o6749IXcddfSO/U62W953d+sP5JnwinSR5Ali3K3BZqLwV08FXZaHLrtHoXv3XXW98gvax6q/zZNgQ13OfHJ62nIYRuVdMBjW2HhIZu6yaC4nobSQM579hfv2Q3XAEBbY1l8a67azVqg1YqtQmlmvcP0d5Ir38+r8L2eBCRs3TGtqrpTptvuG0DM6fP9DgNYGw9rjwV+f0Ayh9kbGydA5Se/hJFgjvujuOelEe6cyBBArUaItF7fUlGwuBC1jQhYaBqguDpFpPe50ndFbuUmXmuBCms82uOh1cs969Yjt6pupUVOemYEoKQsmExoN2S36P3Rs5eVWjaWI5HMCV3oazhvX3W5UithRo9tT0lTxloM3eVDpuBO1sdwvvnSi6BMuf79g9Jd/J92JLL8W0mfd/2fttQoLbKcV24n2dKaR3DuV7PvXE6EGnN2QeR3nakL7OXKR3O4ESQHqxsJfYbS+XX/0Ykb9b3pl3av732CQR1jVb0dTF2vcpWgRNO/lJ6qiY6sUqmXt2zmHcm3wTvwZno0F23Tdw3zB1vFSPRSmWxA8f7W+bDjev+avE2JG0mUBFrpPD9QMcPG6BQgUaI3lNHsjBOkWmK7HcZMyVI866x2RiykmJ1DcqYmHaBcfQbfNxF8pjIY3Jq2yIxBcoQoY7sph33445DGjELlLmyzoVWlLoKTY9AIMaRadqTxh3HRYmWyOma/fSMnAaGPHD17Jrds8RHHz7nMN9RykRVukV2ZhyuSPkZQ96DCVrVUxB15nB/Ryt35KCM00BZE8vdG77UG3ZThTpT5iNaZkfCU/aNre9koV+orm9wUNGsOcrfNlXw3HhnSkpzD4iqwlqgGidRLMFV7GtZPe8p0fu+JDfP01poptRlpdbVKfDmnXA3Z9HWHxM9D9BN2XxDCDHxsEoGtLW+8EVS9P4jDfr7K1m8o6cGcWChHnB6lYkLX91IySHA2oyB0L1XAutauL8/cHN7x7J6BRCledUTEBF0o1a3BaEPM324gG4BpJ0X7Mfrq5xCTtcSgD/G1DiV6/QVSu1M0z4IXa1HuB2z6wYGFMyLRM5MRT+GrYrIDQoTMU3k3Z7p+hlX0zOCuPg8zEdKa6xrRQ0O90e0Vq9UaDio7oJhP1w9g7TzNHjS4EF+x04ApE/NCeS8bxh2fo0/htFxXa9KSt6kT9xwahh2jPsrQhdm+8d18XGK3UAyZCAT0khIAyqZKJFG6ClgPKVpbkq4rnP3u1hYjq+4v/uKu7tXzMcbynpA24JpdSArfv/kPJAlU7VXlWvv4K2NFIXSKktttFYQjFpWVCvz8cDtzRu0VcYhIYzkKbvRZU//Chebbj8/vY7urXGqbuF8rbeORB+LZL8NLPx+QcV3ec0/yPNqa6zFjfr83peTR4kzGnZqxzDG2NO4Xs2DnMXgl/9C1wKedWvnAO0BYyi9Gsy8DYbCuTXECfT42nCqGuzs+HntOa/xm0XC9ritIKKUQlgWstm5CejWWBBOj1nX1Xtk1pXD7a3LHtZ76nwPWpCN39ZKDM44Bzy9+V1J0B82QLEG0ojRyAmGMTINMA7CkIG+4UVWkgjCgllvuDfsGAchJQcxISiINx2U6HqHRL8A2kiKI4QBwcuAy6qoCsPgPWXi61sAJESGcUc1z/lN08QuX3G1z8zHV6gW9jsjTxMxFabJWGuitJH9bu/lznFE4kSS6CjdFiK9lXuITLtrnl1/gPCMz35+5M2nv0solahGpjAEheymWqrq1J6qp8QsEAyCdOo/Sqfd6b4KXXNgQlXfvJfWWSKDpTgwcWjoepXWGloLZsY4RHJ3saxty0c26tr7lvTTkmLyhbbWLu22U2m2E2Pq+dOejtjahmtraK0bVYOZOegABzbrQhOv6w8heCpKvAFaKYUYIzknt3pvCTOldBqz2kq9d++UUvzzOG3uhnf0MnVfu7fw/4IsoS8+J/BwesjpG49YtrnuBGwHOtqPwzrNselW/Ll9AxTpollHPobTt0FcV9QwgqkL3XqZb9Jwim5jkM0VwRdVcXfSINkBXB7cFXJLxkgESUicGPfPaU1oLbrwT2ApjV2FNhhRH28CPitnlks6qOufr8/n5WJ1Ilve2kzOIAaRP9SN6/3Dl1gJetp0YoyIRDdAu3rG1f4ZOU9ueAWYCDnmnsJxY0YRT/dKHjAGIom1uaMnwe3mVd1IMKxKWQtW75mPX3N/85Uv+nXGrGA01JyNdEo/dLTg0nJVpeqCiWshpDeia1VpumC6crj3a63MM/f3t5gprQyIVqQmkqhfQ2xVa9+dkjdPfJ6Ftdvv36n9+IaZ/0M+v9+3DPmt0t/t99vP3ehQzQXR4Mfc1J2rc2cettcRka5T7CAu9m7D4jq/Vtv5tfv7NLaqRS9Y4HQ8HgTFIKTeF/AUsPS/1Q6gAE/J9Oe2LW3HJVDqf8N64z9lWRfUjGGaGHr1pQMYN6BbOiOuBqUZytrZ0UZmS3urm7Ohp3hFu0Tgu44fNEDJQySlSB4jwxSYrjLTlBiSEcUFZz7jXsWhzSs/zCIijRgcnAiVEMyN3rZIU9y+mpihKCKZnSWsRcwCrZiDmJT9Pcgs6+LlZCnTWiHnSBIhpYraa16/+dddE3GP1sJyCDQK2OzNCnXpG3JylKmrO3TWFdZAVWGMI1f7F3z44hkxFo43NxzD7zDEIzkU0N64j4YoXpUkzkBsy02UrQ5f+ya5RT3SKyikp3BgbbA2oTbr+hNcg9ITEFodMFhrSAyu4QiBZSknsyDUgQrQex8JQYyq5mxIH+eSu56T6DbQ4OyHRNd/G2ClQO9rk0a/DmoIfjytUltBJFDNNxe3kd7mwRddM/PGVs3Q3lgtFjwytV5yXVcXbqjiZc3K1rm4q0I5d6yGLT11WjQ2psJ6ukA3BsM62LhgYM6BUscf5/mwfn78LVyY7JS7nVgHz1dvLrHi4m3zeetbrTeBE0+juTdHFxe3ivbeQ7e39yxJGfLOG4yFzLS/IuZMDJkURtddteXcLbVb/2Nymo7QgckWWT8mfU7n/QRf3k7fvBV5X3z/xzEuI9Mgm/YJQorewTmPDuZrO5Vpx7iVlDdEFEne28qFjZFA8rJui7jhoe8yenJ9PlLXO5bDPfN8BAox9UtLtUfQrhHZikukv+dWNWj9/AYJxDT47CuU2SuRFKOunm5FjIJypGItMeXAED2IMT2nMiV0/cPG6p0m6e05C/2r8fB8Pn7cH+Z5et/4ttJ1f4F3P++xyPpkPhhwzxiRzkQbdvLG8lSQqP/s7FvsLTL04mrvbPaJBbW33t9vCTvdR9bTgQbd2XV7rPb3F1qrlOo2CFta9Ay45ORnIv6BADd2e2CTYEZpjdhaF9D3IE675lDPqW73o9rsNpRIJcj5RrfTa3o6Wb4H4P1BA5Sr3YTExDAldlcD49VITpEUDdGVVgtCTzeoodU8z9cKUD0y6p5qKQaG3PN5p7ygp3mI1vUgiVJhL5FazdmNZtRqzGuB+3vqWiitYdacVrdCq2+4vVsoyytCGAEvdy2LosHLAVUDy+Jt2odhz273ghRXhBFrkVYiRmYcr5nyFcGuGEPkVz78U6xf/hvEfhfT1dulRyMGR+sibrsfJBKil9u4c632JnI9b+B1j14WZ7BUpRIozfvXVDOaelRG14moWhfgeSPCNCRS8JRNWWfM3OK4dR+NM4ARVC/cBC9pf+Nk4+zK276Ri2taQnbhbfVSI8ScjUm7PWupHOflBEYlnjfG1jfy2hqh34xbJU5pFU0dpK2NHANDdCYkId2UroOsHm1sIuNz9QxnUcUGsOgl26f70braXsF6xNTB8Gn7Fbz8NgDdu8fXjU7fcmEnfrEob5/vQZ6+ekR0aovOBmA8mvNO0/54NZ8LoSA6Y0MiyMgwZHbTxNXVtXvbTEeW/YHD3R3z8dZL87ML0zcLa+vivU20d/LVkI3698hfOFuoX0aOW+7+8l58/P0fz3AGy1ORkKIrUkIMjOPIMEw0M8qyUoozqilmFmbMHA7WZoRa0dw7iycHjDmPzrrE7FVRtikOrDeTXFnWI0pFxBjHAZFMK+4CWrs5oWzXVnVA0rRSFWIH9mtKjL1rde1loy72Nhdw9nOgrTDPFauRsBvIU+qpOuCSRREXyYpcsBKq59Kd88w5SL0Qwv6iPFC+6X23IuPHEOZBCmZrb9KZk5QyYS2U5utICGDanYU3pqKTn7GLUzdRv/Y08VaRaL1/z9mW/m3m8JyK7Tb6rTk73L96heT2Qc6iVxHvP2ftDFBOrG0/L84I+rq/VSttbA/iBoWhVlJKDrxq9XR/LbRWSUFAm1fySPAgT7al3FNG9MCwme8l9s4E4bvHDxug7EdvdLUfvBInB6BR1+JeBK12xfngtrvuOtTtzHsZa/dl2PLa3lUY6CdJJHq/ChqmMO2MmFJXaXuqoWrj6nCgmXHQey9BW2eGFFBbsdYoEml1QshMwxUqRhR3kNWhcjw2lmXtVuyZ4/E511cfM04vSWlHkh0pjuQoJElYiUja8WL/CcvHf4I3r7+mtRvyEEgJogK9DCxIZKvJ8UUWFzF1uK+Ib1Aq1CYsVb2xMbA2o6jQr19/fKf6aimU1RtghZjIKbkgb15czCcO+CK4yZlACu5L08xoEmkh9uZ2fZPebtCgmPbqopD6v4jERAiKVNzZ1HwDTsOIpIFjvANzmlWaQvDPruLN99ZS/OU3P4LmLFBBTqXW4MBt7A0j11rO5cE9yBeCRxUbQDkxI+fUy/ZKvYNlX3xi34wu2IJtYTu9uZ03cqe8LpSu56eehXGXVLR0P4Zuxifigrno3gyWXEOzGUFtaQwvLwwMKTONO/a7K66unrObrthfXZFzZp5n5unAMs0MeeJ+SNQyk2Jz08MTfbx9ej+eE5NyYurOxxp7l1s7tZ5/TLH80YCSbys/vmSAQhCG0TtMh1CZpoGr62tiTqzF04HaPKAxNe/YupkEdh3KkBdGa4w7I4mLF92tNzuQ6WuOYCzNgYZ1MCk5YS2hGnsVkfRrjX7teAsIEy9zrr3CDhFyGoj93mnV9VTar9VaCiJCzsnZZVOKNUoCHfyaMTilAM6C5R799zncriJ7eNrOxsS/IGDynUYnQc/83DfbiG2l2t4eQ7tv0ZkFPA3bAgK8iaf1YKEFT93IWUu2iVXVNiamcyyypUUu/FDUxf/+2A7xpTOzmyeSNr8eFGp1F2HBAdTWg6vW6uc+BWIMpJQIIVBO+iNOHijDOHbPHDd6DDg7iDUC7peUex836+0dVJuns5RT2q/z+ryTrnrP+EEDlOvrPRYm0m500xp6K/t1BdMupoTuLEVUz81vsWgUIeIncNM2aAiuQ+n5W3+89Px8JpuXF7Zau5/HSj4e2T97zroWjscFo1NmXfMgBqWtrKvrS6zNDn5CJsjAmAQbwKxhZWZdbmn1QK0H9uWW/fWPGMbGkBOBoz9/uCZKJA1XfPTRr1HKv+FYXuGmZJ7OuPQbOJkLqV9gGzvooDtQqrEUBynFhLWLYL1yw9GviF+YrTXWsjIvx+6Y6wyUmLEuK+s8n1Iim47i5DdgdtqYw6YtMa8U2ERa7qQaT9Ry6AryDaQEjJihasAa3gdCIiFHGCeoFSx4JUuIpGEg5gEzKNXLhIecu/0z0Dds1/9slTDuexNTho1lArw8z+BkI99/LWDWyxy2HIdtVHhgu9UkdBBsPj9C32C85tjPiPQzI3ZmaVK/qXsDRwkbUPI3P+fL+4KpRlShiRJC8/RlCuQmhBSIUXuHXmdlogiYW/kPeWA37dmNe8ZxIg0jIUaGXQfsZF/YI7Q2E6nEesCK+x9sMakCoW9gZ37FP6PxgAA6sUhiAZFv2iL+8MY3i3H7f625xUBOlKKkHCElcs6YmYsFF3O33g76ai8TdY8sQUJkLQeqrRRdmabCODSmnBgHj2Jr8/NaWwHpna57l9kQgvup0B2AYyRa9magnKvnVJzFCfj9Zr3UNcYVJFKqs2RBcOFmraToLrPSGUPTirZ4uo4e7yVb6uYUUABsUbw8YvAuvv+lBinQb7vzdenAtJfoXnyOaMY0DNRp9IaovaUKZpzExLqJT32nCQIaBNPet8q8e7hbPmzgvK+RZhdrth+YA5SAWdf7aetyuEBlA9sOJLdjiJ253ES6XHwWZ0zOYv4Ykq/Fw4CoVx9tXZZNDdQ1MN5MN2BaERqiRoiNMTsr3iMP4Jz6eTTF72Srvmn8oAFKnrwiI08ZUs+p93boAUPV+yMEtyM45XAf08oiQqkrqaZ+szVijN6xMRjaexaQAgk3PMrjxLDfMSxXDId7xumKPB3931rQcqQVb0SHKWVe0bpig4s1x90IdcSOlRATQxqJcWCowr0cWOYDh7tCKzOtzeyvPkSohBRYl8Dzq2fkvCeFyLR/zrj7gEVH1GYHFGp+IefYt4p+adimPRA0dHdYDcxFOa7qVsUSKNqN2NpmLnbu3FvKQpkXjxTViDkSCWhRyrJAKf19enoi9ufi0YpFL9X0l/WbWE1o2tjc3SX10hMESZkwZK++EdwfoBkOPCMxj6RxTxMhXz2nLCv0jsBpmBjHa+I4UNZCWZtbfGvPJ5sQLRKJ3Q0x4m3HK1TBujgals66+UKmvUxG+oq2xf3eK+e8UNkWccpWOUXfuXo5MYaYouJpDb1gtnzzceDgvZn6OdwAiYLF7bEb4txAYNfvdLAU0ubY6htLso2Riah5M0tZGqIzwe4xzdQqjEthWKqD8p4S09pQiaRpzxBHcqiEJVCOynqsaKsXQmGfGV9Atzuvl0yzPWabO6eI6V1Qt3vzjz+142PL/TvZFhhyYjcaaw00VW7v76g1UipESaRNKBL8XKm6FXgzI4bEUg/syoFWFso0YzjzNAx7rwQSoekKUpAuiq/Fm7W1or1KJJJChuhl/s7++vttrIuam+mFENAm1MVQWhdIuugXlCjOqOYUkOiC+rpqN4/rHbV76euWrjspJi7Wzg0p/9KDkPeMt466k5nKwz0CcE1hTLBzhuxwPFJLI6izp62p+yz1hp0mwVOs5p5Nzla6cVtreJ8tc0Dg4KSdNG7W2fxTZWCnZDaQeAlmzMT9jbbj7fuOt1lNnsqKcirlh86Q1IbGSojRfX1iotHc7j64FYXVCulcqGDdcT0kIcdATsn7/PQCAD0VZGxLna9l7njb/t0xaks5s5YeaYpblK9l9QU5nDeFkLpiOp7zgfQoQ/tCHmPojIt7HDi9WmkxEoO75UlwkNJKZGyV1jwvvx5nDnf35DwyTdes80ork5cj4kAhpokUpLe2dvdHWfsCrEYLLrmIMfH86hn3ds/hsNDKPceDouqOq3UwhtSo7SWqibUp81KollAGkOGU33dnDRdTbQZgOTpSa2ZUdQCwVliqsTQvIRYJlGY06yK83o8EM1qtbkBU66nsFj0vgK3WrlHRTj0Gd8ftiLp2lC2ylbl1lqSnGrz5Ys9Titfdx+QMiDfuc3M4E+8+7ODGy1wlDsRxR2uefglBiCG7+2HItIi7POJ5eFG3Lafb9AdxbwuRwdsYiNBKwWIXS9cCGFsRzRYlbh4DvoaFcyO/7ULtLIgRug9LcNpXNt2IsykesWnPOhvIpnnpq2XYAIudvzbYmgye3tH647dYxTrIVPdKCSpUk94J2Q3XtHn5dl2PaBFKcSIqDZU8r0hOPXpTgkTGHMm7kWEITLFBNmiLV2vpo/SJub31ue3IBo7sgmXqf+kLr/R49hcJULbj8ftJGIZINaHNymGduTscKFWAxJBHpmHnVHprqDVqW72btnqp7loPLOuBeT0yzncsZWZZD+x21wy7a0LM3d11waioFkpZUVavWOvY1W+evt31dc1o0AKxCdrTmiebqEC/jyYP1Nrs6+Q230CKEVJE6wJ01iNsn5/zOTpHdw8j4QcX/Hnufr+lvd82Hl8Pv9/3uVRDPPiI5jzfyeV1S2f171OMXE17ggQvv1c3cyy1oWqkrdBOpG/egkju+4HgyRk9p+hMUS4az56Y5s33xI/M+vm/FI1v4GWrAnT4uT3XqLW4bmYTxvbPEXtRgetVPDW0td7AjNR7dW3BxJYUTgJTTFjvBO8yFGd2zLS7FDeXBaiXGTdcjP99KsLgBw5QYkpoVUpbe+61oWZM08RwAhzBHWW7FW+MmymZl5PWuiLFSzelR+cxDf7ardJqJE3Zu/5ad4YMgcEmV0uvC8M0Mu32jNOOYV7Iw0BbJpay0NTz0SnuvBW9CLUtyMkq3WvJzY6YBnbD3gV4cUSzG2HV5UCtK6qNNSnTsPJliizLG3KMzIfXzMvRS8DEqBipawqUDZFvKQD3ylAJVBNqhbk0luaApZqbR9Xm8jkjELuUrLZGWecu6q0nV1BrjVZddHrmSBV6LtLaKZmK9dvwfIOF0yYQxE6l0P1kuHthGohxhK3PjxbfuPuG7SZykELCJGFUQnRTLZHUqxrcxyIPEyaBYOramR79BfF0W0o7p/GpLDGyLIU8GTSlzgdo1T0meph1IiLsLGY7Nw7stKe4xsPz9Bs49mvYCQ7vcbFRwr6w9TRQs25I6mmEk1h+SwsBWO+QuUWzp+R/f9+eGiOkHlknB28pE6ObiCEZ00gwf7yq2/4HM9Q8BkspElJkN+3Z70ZSEkIohHZkXu9owolW7gmtE23OCY9sgDT0wKJ//lOKanNP0ItN6BcHTtw3yM3aYu9/IgJrmd2QjaFXnLplfYzZ78O6UtaZUv1ekYD31llnlvnIMNyzLgvH+1t2++fsr18wDBMSArWu1LaCeGqZKrBF39rBiUQkuth90zJY4GRDrhoJcWQYrohpRx4mF+ZaY1m+Zl7vAVBt1FoYhuypHug+KIoQN4LTr281rFcFbteacWa57NHc/VEBlPd5qfz+3usSfT38KVjXkISLtBVbUUUkjJ5uy7l15kBYVxexNj3f42rWwWV0ICEVMw8eTRvaWg8GezHBKf3+SDRrbwP2c/ppC5h60ES/+0x72tBI5B4Q+YoeQqBFBzauSQlYb3wK5xR8zkMHWkqKiZAzwZylt9Ne6ukgwUXeZlsPK0Vaw6RB7+gcovBdSZQ/dIDyN/7G3+Bv/s2/+eB3f/pP/2n+2T/7ZwDM88x/89/8N/y9v/f3WJaFv/AX/gL/4//4P/LjH//4e7/XVkq3FKegQkrsdyO7YSRJoCwrtdQeifnmaNJQKlUXSp2Ja3SaTbYSMo8mUVBpWNqi1t67gIAFF8qmnEljZtiPXD3fUcoV2o4EveYglRtdONx7i/bUzW5EnClx9buhvStsLY0YMquqC0xj6F1+BUqhlIXVAkyRdV2pbeFw+LJXIR2o5VOCHsmpQevdTHuNvDbvjrl5mxj9ZirKUpW1ejlxVW+xbuaak00AR0+NaatdOKWnPcOxT3ckPNU8CsTckThY22rypYOUvunKFgE6cDMFS73QJQRPoaRIyAlS6lGAQRNaMEi+GZs4g+JZF2fMojl7Rgxd7OzpmJi8JNxZpILETsOmkTDskGHnWharJITxutKys2iz4CAFoAsM5ZQ2q51yvVg0O7XrYMv9RM724V070PtfgLmfgm25jw7Qun7HbfWDi75PHUrNK33U6OY2nHpyiBAYkJiJcXSQEiMxRMbsAHyaJoZxIqbBPX4kIV3XIyESh9H9PvYTu93OU5G7Hbvdnjy4qmRdD8z3R+7XlaWq9+aQSLStqXuX1rDpELYCdzsbA0pnTGxLCbHJF/r153N23o++HbBcLuzf1/9iO4DQN2LZOK2gDMnI2Rii0VJ39O2pFWmJmF2UulilXlQABTGC1V5N6GLUu1rJ4x2tHrF2oAzuHm29ciuSMCZidMPEasWj8JDYSsVTv4eaAlZhc/tkIOZrQrxC0p5p/4xxmFiWO2pbaPoGdAXcA6q2wBAyKUKWQBYjByGJp2MJDla3YgIV3wD7jvcWONnm8g/CfD1Or1yW/b7tqXL5733j7WNR2bbyUwh3foXOflwey6lCUAIhCjlk126EASyzLspaGrVWZ5S1OasiYNWDFgldf9I3dT2ldIVNIO+90HzdlnM+9MHHONX+iZxSUluQ3WE/FUVrt0yQ7XO66HZbn+lpwVpLD0V7M1QRsK2IwUHZmDOlViwlrHqhyablM/X7Wjtzc6p8wlljevARvukUPRp/JAzKn/tzf47/6X/6n85vks5v81//1/81//Af/kP+wT/4B7x48YLf/M3f5C/+xb/I//w//8/f/436OW3NS/9yHnj2/Dn7cUQUlnBklZmibmrkJkcVrStrcXASoiBRGCRQgzdK0yC00HrnUu1dZLvxRKfTRQSJkHJgnDJX1zu0XmFtZkwwSEPLwrosfuI35sYMif6+QaSLCCtKJVhBi4MnaZnNu6IXJxGsYHZETX3hShPrck9pX5HklklWcvBmYyF5z5lu2tovzu5fou4AWotSqtIUauulxJcMgLnYyjpCLnWlaT0xMtYj+csLEegUZDhRlP47zpH9KaT2x212ylvpXVGvKJANNPb8aIjJXTdjIqbmhj9mXSvkr5FShN6c0PO9bjUdQ1fAa6NpJYVMyNkZHRNayMi4I0/XDENE2wqh62JSoEZBtfQ8P1Bbr4TxkvKtodypQkfOLJDXMSUgI9LO4M57ofuSeFkB0+ncjV43tNdDCtbCCdyeFq3QX7PhjEl0kLDLe2KafNMTd+Q1hBASSSJRRnLcEfOONIzENJLCgDVz4BQjOWemaWA/TYQ0MEwTkryTqbbKMq/c3R85lurljybErd/QBlD8w3ZzwB7td5O2k13CxrBsz+pR+2nh7pfMxrv9QcbjlNHjyPv8N+tAARQlBWPIwqSJ3UTvwF0ozX2SYpgYY3SSoyVqzL3iAkLoxohWu+jQfSZqW1FdqeXAbrwiD3skeJoxSiLHq85ArmAzIgMSCpibqfli1LVzcWRIiZh2mEwQnyH5mjQ8d0M5SSzLp5i8cS+L2hh7xF3b6seGMsTAECCq9nJTTz2GB3PzEAqIyB/wrLz/XF2OS3Dy9nmDR796NN4GPNYvqofr1/ZeZzZjux9PGh9Tt6wAQhrIaSSGPSkGcmmUUmnqFgahVFppFC3+0irYZjPQD3xbczd3740d1Q4mHnws83skdOYx9GPdCiPACVM7k8zOTFdQGu72nXo6sqexglc52qZjUvU1VzxlGUJgGgbGcaTU2lnC5uyg4WuG2qmh6NbCA3p9gTiLvQGZ7zr+SABKSolPPvnkrd+/efOGv/23/zZ/9+/+Xf7z//w/B+Dv/J2/w5/5M3+Gf/JP/gn/2X/2n33Pd+onSCJDGntp5BX7caStFSs+UXVxOtwRoXT9x+o0qXgHYrSr5AdPDbghUqOJn4Tawqk8M0anW2OMpJQYdxNW99SyUNYVMaOVwn4+emnmvJBiPlFhXnuunm5qDcGNxlx82UWcsqLqbdpzzL2eXlArSBzY7Qc+/PAlr79u3L9eCMnlUEWMnLzbpQqEHKD1SEDd+2JdSxfACtqkS0a2m7YLnWwrTHZ3xP8/ef8SY0u2pWmh35gPM1truft+xDknTmZWZt7iikcLGiClUkAvEUokxCM7VULVACR61UlVpyRQqVopARIvlWjQARoI0QGJDhKiQydBqHpcELeKS5FkZZ5z4kTEfrj7WmY25xy3Mca0Ze577zgRWZnihq6Fduzty9fDltl8/OMf//hH151Ud2/tG5C9xjfYrsVw59otypcOPuzoTqnQ86UCajqhkCCsq0WEDmAk9OjZitRiFOubo8palRAiyUvlrCrIxsamZ+iRcLDvsnojw5wzUazaIYbIEAcO48Q4ZmrNLO6PI+1AuYxEtWBybg8uRF7opkO6pVd6JCRAgJhABoIMCAkk20ZFF7cZ82QLU2Ofo9XOhKozMa1rc7rYFq47vIKXoOdotOxpOpCHg3ckDlvDQ6WxLjaua1vJJTK2zHiwCp8QjcYP0cEjUNaVQRLrZabMM6WutLpQlgvlsqClQWtevt6eLKlWrnvd9J/sZH8WuxpsLp7fdHwsBfEUuGz/AmdRBHNoHnOg5ML5slBbJaWBPGTikJG1kUOixkwLrotzMFrx0nffjFot1PPKsqxcLivDWK3L+pgYcmbIk2kD4kCoIyEswGobnABRqSKoDkgamMY7TjevyPnOGpGOrwnpFmpgnWdieiQsmYB5CXXvjlJWKiYClWG0KYulLq7Ml17nE/yZZ96epzM+7hPy4Wu+W6qnfQBqrsvTDhw7Cy890PLXdLG6ac+iG3cqwkqpru9qwcqRm4vAK6hGgiSapz56v6c+p5+OvQ6m9mfoAQ722R4ymr2CP2W7dmJrdq32vkHaJgBuxVLxSb03206cbjb5VmzSvIt9w8S1xYtRarGCCOlNEl3TqdqslcMWgDkQ+i63hj8jgPK3/tbf4pd/+ZeZponf/M3f5Pd+7/f4tV/7Nf7m3/ybrOvKb/3Wb23P/Yf+oX+IX/u1X+P3f//3PwlQZm9p3Y937949/RIpMk0Hbm/vOByPBG+kZC595sVhVsHiPiduvx7Ny2NZFtMjGgHllFrY1NRW5lUx0aYtfkGsgyPTRC0LdV0YD0fy45nL+UJMo7div2wpnRgzKTUzd1oXyz0Wo8hisOi6at84GmVtJEmQlBRH67FRrfnfkCO3pxN1WXlzf7DsSWvUpkStFEwTspSKVjOAE6z/TGmRtSqleKM3p5R1P/F67xestLi01RwMO+2K7Y212caEquVjxxHA/FG2DdvuwHY4om7gJbdOoXt00nyi9ntFLTQJECNBxZxP1ZXidofsWtZi1VeKg0/ZxMFW828AZplnaq0MeSANGUlYpZBTnuLNBTUdjH3RgoZo2ZRSWS8rxBXa4tG+aQKaV1NYRJsgZSQPRBlQErR93Yr9f7Od17qJtK8hRgc7cBUDCOo9XLbwzunmIEIkEDURWkZLgijexMyiPtSamplIemVpSpgrea6Mc+U4zYxDZhwHYhrJOW2uk+syG2DtERmVUBtRldgZtc7T9D3Mz118/vWx8+E6pU/++q7H80j7OUPyqVTDp6Jx9fRTx3+i5hiqYoArpkDOwrIqOUem6cQ0HgmS3VLeVpMYIilGai3GyAZFveml4p21NSECVQNNDcTiTQYJgTgpEi0VI4tak84ihFDMjDBGREbG4SW3tz/g5vialA9UHUjDS0K8ZZkbbX1A0gnE2j1oy8TozhRqJapRDIBZabONz54GkI9c5z+r42PC6G9ivez3H39uf/7Hzv0DgOr/t6rZHZXyJN0kG2Nk0oCEYA7jphVSzMBC3b05oFJIqad1CiFES8Vpf69e7VOfzgFnS9qT09SNbRTtsnpPpXpll9kh7Cvh2MCmoleXbwS0sVb1JpL2/iHEzbTTXq88PDxwmWeTGJTi7snepsTZkcY13d9D3H1Pse86vf/UAcpv/MZv8B//x/8x/+A/+A/yx3/8x/z1v/7X+Sf/yX+S//l//p/5yU9+wjAMvHz58slrPv/8c37yk5988j1/7/d+7wNdC/RNPzIME6fDkdPphsNhos0LWpTVuzGaCt8j9ma9E1JWEz4uKzHMVt0QZpZgbeNTEGrN26YXUcTb3gcX96UUkZZZh4EyTNRj5TCvXC4LebyQD0eG6cLlspovRUyknNCqXOSR4uhTW3VXTxdUaUPVzeQ1+MKGTxJL4Vh34MrxcOLu9jPq/A5ZrdNkE0XbxdgJlV6AQ6uNZYZSArXCqpbuaQqt6nWGb/PRBpkpsPtGejUnMlPU9gR152QVO1U+Ymis/hnP/qg/1wBW2yZJqz3NVtGwIh5ZaC1oW12sauVPZV3MpTN0Nir4JCsoDQlW6jkOA8vl4o6IhZgy4zAgMYE2lsvF2gEkW4iimH9KzBPHG2G+LLx7ew+XRy+DtoUghOwCxeQbS7IFJyRjTpqXGkqwMs7enlQx8ABIqF7i2cs19pt2X1yDqfSk/22AKoiBiFaUZVWKFMplJaaLVZF5GwckUsWckRsQqlKp6LoYM1NXahmMefT+RUtamfKISDLQn+LOS2OlrgtaFqjWvdTkMFsv5m1x3GJAkQ9WKtsDdt/5Oxx7weAv0ivsn9+f98ljw9a2YQWiOTJLIqdIG2FdhSFHpsMdOZ/MXLAtiBqbFYYDKVqlXG3rFuU2IiqZnKwMPucXHKZXHE+vyeMNIY6kmJ3By2i8mgguayVU68MSEqTDRMgnpsMPOU2vaS3x7mHl/vE9wxi5uZkIDKiMaMgoVior48A4KDkbkI9BGIZETJ21vDIWtu7931dN1Y89Q/JUo6JPnvP8+R877+dsi+z+AFeLeJ7Wnuw/LvjcCxJB3fixqrU9aMFN4OzeicsDWoxIS9Yjp5nHTSds9hlN+458fEps5309R/suXhyCulfStRS56g6o9anYA3K8stSvU9eQtBA3t9mm5g2lKqzrSu1O33qtxtTNbG6XntpAVvNWJd8+x/OnDlB++7d/e/v3P/wP/8P8xm/8Br/+67/Of/Ff/BccDoc/0Xv+1b/6V/nd3/3d7ed3797xq7/6q941FE7HE3e3L5jGg13wmJFgpX3WIjphqmm/WK15eakxEnVZqRIpawQJRlAHIZeBsi6kFNEWjVZtK+qivhhAUmQYMuV4QEKkNmWZV+q6UkplOi9c5kJdi4OpgbpWBm3ElNA2sM5nzucHWl0tZ+2b02aHpB4Fh4bEBq2yztbZ+Ob0ks9/9Od4fP+G5f3XaP2Kqr6BYzoNbVZ5vsyVeVZasUWqNMsL1nYdYDhaDj6YzfzJVea7FMS+DM532W3CXxe1DRY+2WQ3hb+IiTcxUyHx3GdrjV4X20qlpkaQ4ufgbplltaaBMaNaqXWh1eztcQSRaldPdHN8tFRgZEiJuS5bGsoIMwEapSzMsxKLlZWHIFb+PWRSjAyHE2kcmd/1723XLOTE4XBDHk6IDBSFpRQrfyaZSZNG7wtUrjoPBygAleosh6d8+nXbh0/ipj4x9BbFvmJGd5ksV/EaCyIFWiFG9SqcRBgGi/ziRExCGjLDNJGTddpN0UDmfJmZmZnDTB0PiDsSm5OmghZ0vVCXR9oyQ1kRV+p30mc7877ZqWwL4rfd6n6R2HL/++6f8vz3fdztn9+f+7H33xxbMWvw4N2/e9fmYRgJEiklITIwDLfAwZw4yYYjxVoAUL3aq67eQ0eIcSTmE9PxjuPpFcfpNYfpJdPpJXk8IWEEVWpZaWuiBSzf3yoxV7JmxiEwDiPTizvG00vy+BnoyMO7R96+e+DN27fc3GRSuuMwTuQ8kLKlr4NAGjKnUyJGWNeZIDDlgZyTVy06Q6mOhf9vBiefOq7lvx///adYsv3v4Ck4+dTz/Tds9Inz7q2ZB1drljY3cGJjPYhAsAo3QiO4I3BowRh7ZAMnunON3U7mE+IaAyWN4JWXBnIMHNRavUGfM9NeUdN6l/hgmQW8rUQIliai4do6DIBINZ+kBqlVkprzca11Czi29FP/Dtv1Y0vvtG7a+R0DkD/zMuOXL1/yD/wD/wB/+2//bf6pf+qfYlkW3rx584RF+elPf/pRzUo/xnFk9NTB/uj16OM4MU6T9wvoGgoA00TElKxck26Wg2VU1UFKa5R5QSRsFzjkSFlX7zmQqS0Sq1W5hCAWLSertGk6eVQyUKryeL6Y/4FahdFSG8vjhVIqtVmGXuLAkLJFRSI8Pj6aI2oFf9A3qi44qoTWiFppdWZZHnl8eM9puuXu5iWDDFxk4nKGuRTWOpuvRbHJW5uaK2wTR+1QzfrR/S2gy6u3mMHz5JZP9BLgXb8J9Z4yNmHDtuhfK3rUUzhsG2nXqOAbgPjE2BikXcoIxIz31sUcRmt3b23WxK8UECyPv64wVqh6Vd6LsjX6U8yuKDRyFrQl01pgollbTAZUrQ+K5f1Np4IIIQ2koIyHI8PhxEMMsFZIeNll8FL2EQnZrmfDmZwBNHqKp2/eygc7dK80U0/5bADKKVJ/9Xb9UvT0t9G2WzF51wyUgjbTLLRlpUYIMTJyMvCVEjGOHF3knXO28yRZI0fvl3RZLpTFr1EQYo6mc6IgZYFyJlRLu0Wsy6q1nX+uRnkWHu6/ej/3TySAvs3xPLK2SyFP/v7Yc58DmL5gW2WZbtS0iKWAo0YkDgQGDuOAhAMxnljLaC6bAWLImOR8NZpfgzNkkEjEfGQ4vOBw85qbm1fcHF9xOr1iOt4R4kglUkplmc8snbmMK3kMNk5b4XQ4cHd6weHujuHmBYUjDw8LlWaWAVU3K/7TSRjGkYfHSK0rtEbKVkYag6ItMKTEOCSGlMgRevsGkWB6bP3ImP3/oeNjQOT5vf0Fb7ABgeevCyJb+a12cC3BfJYk0ZpQ1kqtJmQ3ZtXWq57YXX0tNCI5fPDHT+LpRv6JS97BiAU413G7gYMe4G65Vl9benpoCxhNq9Vq23qgibK56DbXzDQXS9RqKcFuvNk8NR0l0ivx1L1dsNaku+tpgey+IOkXHX/mAOX+/p7//X//3/lLf+kv8Y/+o/8oOWf+u//uv+N3fud3APjf/rf/jT/4gz/gN3/zN7/ze9e6MkyHjW62Q6516L4h5pwtneL5MUO1G3a1ni5RWWcflFEoc6IMF9YlMI7ZKNJWLCVRPUZ05B5yIrYKITK1xu36Cm2N8zwznI6M88LjefbmfUIeB2QNrGVBtXE8nFhOM+/fmGhRmpX1iW9OTQtRArVeaKt9j1ky7979jDFbdcWYb0k3maYz88M9Ko+srVJqY10rSDD9ieMJMzcz5C7R0hR9i7P9UJ8Mrt7fIYZgAKGzHBv4ABXvKeFCWu0NfEJAognExClDw2His9cjWL83at23DAy13vhK7bqosV7qiyytmoldNSCTxazpFWirRYBRhKC9M3FjStF7avYJZOWiHRzQYNVKaNYsXIL50eRkXX2PN7c83twwL/dQV7xmGlVhnhdrtyMJiG7gZgI6gkXhaRPB9QjMFpImRss2r2Kyxcrzue7uacxHvHaojqC+meZgMuLaZnCbdEzcAGrmSZGB0FZCLSStZJQcrMN1ihAkoTIQmrK0SC3mx2Gp0YqIudAKgSEowkLQlaCmn4moMwfOzClI6HPNgVSnndltKv7vXilt4MwqR2KMfSTyseX6uV7h2+pPftHm1XZrSt+oOxOY4kgKB5iExoGc78j1SAqV2lbm9ZF5faSVs6Udo4HZYTwyjCficEPMt+bGO9wwjHekcAOMSJigBWqbkTCS8tGunyjogXB6wTAmXt284tXph8TTgTVE7h8b69uvaWQ0ZhNohwABUo7ECEKjrBev1rA5WWslSiB7JZytBddrGp3x+jYC1T+t4xfdm4+l7Z4Dk/177BtQfsOHfvB+HwO7pk8xViKKaQupcUuzbOtlc42eWAuDtVhlz1OW77pdB7dLsMhG2NpW7NbiT12H63cNPlx1xyxZIB9CMN1h3X3+JtgH3D3Ygsh+TRyUROs/lL3MeDO7FO/vo0/vmdIZymt1V/P1/DsU8fzpA5S/8lf+Cv/sP/vP8uu//uv80R/9EX/tr/01Yoz8xb/4F3nx4gX/6r/6r/K7v/u7vH79mru7O/7yX/7L/OZv/uafoIIH5rIQunDTKAEaZiBDMJOjppHQrPy0OTK80oF2cyzftiLOJDQfEDFGYgyUaaDWgei0mOXnbaDFaOY7KWWCqPXYmCYuhwNpHInTRDpMpHG4DpgYHWgIIWbykDgeblkvM+dWzCjJWQQRxRoIL6ayD4HVeBzOjyPvB2sieDq+RiQR8g0h36H1wcS2dWFdi0XZKta7AfHGf7Kt95aXNMO6PWy3BaxYNO3omD5xnYHiyYZgOpK2dRfEWaorbblVWASPljd6c/e+/twr29kV/YGymLMmIbhAELtmZaXGZ5GxmJssrkiPIRigjNaUrcHOiKm6L5pAs94xdbES9pgq05Q5HE/c3d3y/ssDc4x2HlWppTEvC6oQknnQqPEnV7OvblRAH392zc0GW7wUVawPFGL+PM1EwV3siz3dgAwGwlWueWUJzdoEiCKSkYoBr1qNFGuNcllo7dHGX4jUnGkpoiGgebBxnyNRG6UEyrJCMb2JGXs1cx4VCFIIUi2NJHh/K7Zoc78l7CM82T+2j1q/ZSrnY7/bf87zxz527D/74++rHnnGrWeTaqBpdKYzkvJECDcM40tifAWMLOXC/f07Hi5vWdcHFNNPpRiZDieG8ZaYj6hMhHxkmm7IaUTCgNbIignctQlmDDmQsgHIGEyUezhO3J1ecxhfWUPLdaEyUyUxN6GRGQ43TMcbbm7vmMaBZbnn4f4ty+WM6EKtgbJCjpBjIAbI3f5guxxXQSjItvn9WR4f0xTBh5/7FJR+/D0+/vOH598r/r7NYTDZqgZjGpFozKMEE6jqDtw0rwZdatm6SNvHeADd2c5v+uiNVNRPPy7+HTxYpC/tGxHjbBjP58ce1FloZ2lIT3tL2LQsW2q+tS1d5HZV9Lfa3rN/btvI391nffvx86cOUP7wD/+Qv/gX/yJffvklP/zhD/kn/ol/gv/hf/gf+OEPfwjAv/Pv/DuEEPid3/mdJ0Ztf5KjFJv40S15O87sEVdTNbMjzQw0ylK2gWCioWbusM0jM1VqOz/pwplSZB0GlphsURrMwVbD7jkxkCWj3tCr1Mq6zjyeb1lrsfbo88IlRcqyUtay2YZXhRJhGA/cvnhBjJVlgbJYqbKZJAVoxcZjsw2pLHCOkfwwENOBGAdyPiIhI+GI6gF0dY+TRqs2mmvDy9ri00kt4uDOJxngORdasVK47akK0p5GF0HEHqOg1XQPPRLwQvzrKuIbahM3v9sYm866OMbuJkU+74KXL9sD5o8SY/Cy5YIX+vu57ZQ8SWklUtXKiSVkP5eej7WoY15nq5IK1iRQMIpTm5XcljZxGDLj4cjhdMt5OlJmEznDddrVaq6MIVoPjOiNtFpz8zL3NEEC1mCwL8jRN01fY7yDsZLc0A20WzAGG99VvZmYeLAsisFMBTXhLyGhLYFW6+lSZ2SprItpo9Q7Ux/XwniKjIeBlBMxRKJkA/6u41E1nUsr1a/hAs0FsurGZXRh3m6A7SMscEfdD48/jb3vu2yg3wR4bFXYCW93pe+9dYBIYhiO3Ny+5ubml5nGV7TW+NnPf8JXb37G+fw1c7kHLaZnUqv2UKL5zgwTeRhJkl3LYFqvrkQIEhing83flsjZ7s94GIn5hhImahXmIpyXwnkpqETG4y2nu1tevXjND159RlsX3n3xJV9/+VPKeiaHSi2NZWmkKZNiIodA9NRcH82CXLv9yrX9wDeBhT+t42P38WNAdHcW+2eyfYfngPWbscpH399wgD3ZNmYhx4GURiBT6bq6vu7sur57g9WqjV7bYle2C1RtjdRN08I1CLQT2U50r++7fhf1KrN+pkLQKwBvu+8rGGfTPri29qSmzdZP8RokT91sQFW6Z0uvjgxPAo9+mayA4nrNnuGWb338qQOU//w//8+/8ffTNPE3/sbf4G/8jb/x9/xZ2nRbmBGL7iUaW6IoQRNGNllJ5LW78fVGb0IfR5jaIK4rxTeuZbywjNm0JsXKgtMwktPoNe/dG8XsxmuOHA4Dl3nkcDqxLgt1XiiXM7TGuVbqsmyDcF2t8+4YI4fDCXS26F4bc+2K6ELvyyJiYKWxsC6PnC/vGIevuUxHmphRztoCrQ1omHDrNS8Rdnt0wTcbYZsf/h+BJ74cm7JBLXcozyJj3W7AlbXo+pM+OCWEriezK622+QZxGt0Bihmodjt4/zv0Wv5u3e8lcMGjbdeZWGPI1bbmvsh3szRrcoSGSAuRkKznUG2Y06r3qdDFSuhajEzeg6lVu0chBMpaacNATgOH6cQ4HWlloWpnwgbQRFWrgghRtg17i1zEF5cejdAnt+z+qP8+uMHfhmfQbgsvxqDgzEgfw+L3o7nAVzUgMSHVWgCoC8OpXoFTCrVV5mVhLYVjtZ4iQ10tVVUa8/lCWRZzl6RZ6whV1L1g2nIhdzND7VvDVRPTF88tktoGz8cYk93CvMUN6tmqT4OJX8yG/MmO66k8BVtNG6UVQigMU2CYjty9+Iy72x8jEjjc3PLi3Qu++OIP+OkXf8jl8p61rOi8UgpMLTLGiW70Z6yZpQCDqAmVsc7Z4xBZV0U1kodEGkc0JuZqnk5F4bIU5mWl1Mo0HXj18o7jceT25pbDEPjjv/szfv7FH3D/9idInQnu4NVKA03EEEgeDe/nbk/NoS7q9uNTVTHP78s3/fxt0zd/r895+rnXMfIxrYoPOQ+sdPeK62t64IMkVJOZXhY19r0Zg2+eQ2577w0Aq5aN2eggZSfNcADcr/OH1+rjwNvO+Ak3oXp9bweY4MHWR95vA0lbYHENLmzdMmPR7Tqpehl621xx7fP2rBHb5+u2Imy7zLc+vte9eETatr71XD0edUoIpAQEWFmRghmjqVwFQM0atKn/u5dE1VqJpaJroVzOzINZhAeJxDhQl4KovXmMzX3Ieq7PHPdOxyOXmwtlmVnOZy7jwMXdSIPv6eqVAb11doyJYZiMDSiVuvScpbVeF3CxLyiNshaW+cz58SuGaaIIaBjMeI1Ma4mlCKXK5ndiJan2Dr6l+btZjYJ1LzUgI9RN22PASK9syBN0z3Vg9t91mlGMKmzg/e2sKqinONaybkIyUaOXrQMoaN/YHKDojgrtlKz1B/HKpnVGVkCVahSEbdYIVYQaopUDD1bmWYkMQ2KYDuRhcEMy0wVJsEoNqc2MydbKMq/oYbKNRAUl0rrBHcLWW0DtnyEKHYJYNCweLMnWe+caG/VB7UuOgATfmAO+2fc0jzMmCDlFokZfAGtfmxC/hq2vfmLCPaNezImUZmZ+81lYmjV7LA2W0siXR/OHa2rjcC0IQo5ClIQ0MeBfjYHpLedVlRZ8GdrGxhWT9O/b/77m+T/c3JQPH7/Se3/2h25gy+5Ul1zV1rgsM0UjIQykYaF4yjHlgRgTr/Jr0tB4fPwS+UJZ5jOlzJgoEQjRu4nbWE6TkAIIkZiUmIOPF6PRo5rzp4pwWatVgMhKbEojMM8zta5MOSJETscDt7cnTseJ5fENX37xf/LVz/8/rMuXTLISBQjGBKQUGXIiBUidAUO9hDYgGujixy1wf5Je+TTQ+BSb9Yuqsr7peJ76+Tav+eT5fWQw7UHKh+9rzIeBk0BZldXbhazFmqvWZqZotVrHaVvzrl2hm3jxAG0TYdvnPgclO4D8/Dxki/f6j7YH0deXZ+BwFwb1Nw7+3Ou3fnpYY1DZKrjUmZO2BQRXwHKtNOwifxzk7GiTDeB++3v1vQYoIZpB1VUAfb0JMVgppq5QnOHQZFSYRK/RVvEBdFVaBzxV4ZUo67JwOV9IKZPTSI3VxbImmI0x0lubx2g+AimZl0qpposolwvzwz2X+/cEgbWt7vHR0GLN2LpuKcTMmA/IINRUmOsFVKha3K0veM19RKs5fM7rPY+XN2geSfnWd8fs+oHGWtRxTSCGHYuyu2aGnhu9RXzAy4vdiKeXHW8q7Qo9VdOHeOulK31z8l5CXXPSNyRrnuY3TS2tY9qP4IZ5fcP3WMU7U3eUXrsxHIJWS/k0rOytufFdC17CHIO7K7owOhRCw0zV0shxGDjd3DJNE+cQuJzvWZeZGCPH44FmnAGL93Va5sI6W9puixJUKK2yrAshZBMfh0DEO2CrAZUQd8CrRy1+H4xMsB/6paEXnCuo2KYRoznphhhJaQCCWas3ZVln5hmWVpxqNgt+LyeCFI1NUgvXOnNciwnzHgUKwrwWJPUy60DQQPC3iENGkwvFm7WZ9zaI9Mi7MzzgC+AOoIBvMB/ZUPr33+d5PliYPwJmPlWp8yc9npYtX3uidG1abeYPU2RB1zNN3hG//hqVn/JwDzklms68f/g5P//ypzye3zEvD5R1MUGlg51lrZRakWDgpMWRkEYigSCFGJILMo1VaQjzWlnWGUmFMQ7k4OxLK0RtBDHbhDILSxLq5T1f/Ozv8Af/5/+Ld1/9HaS+IU6WborepmMaR8acLL0T1MaOBxC9EsUvMB+L7MOOQf3YtXx+/KL79W10Rn9SHcxHSTuuQNo+nyu7vA8hnhExtWIM1gprsT5nFkht5QYWLKlunkB9Q+8moE9PDtNOemsBtBv5fTNzpdsE2325zqLsTtqYzN2XkKtwXTuDvV0nY0EIECW4HsV1b5t4d8fYGOq2890PE+3XQuFPcM++1wCl24uro9GmSnTNRvBou3mviZizrZ+tI2c7OrnVlch9QTDqtVHXlVYqy7IQwyOtYT1JtKFYR9dUrfJEg6d6QmDIgWk8cjicOBwfOd7cMp8vnB8euDw8orVQl4XlslJCpqZEakLMdr4hRkIeTICoghQbFFtG0M9T24pqY1ln8noxxX8IEKL1qhFzqI2Yit1E3rviz83J0H7eWmbjfWtq2VVIsQ1m82tmt1dsUB+bmldR63b0DYje3wSv1b++jt336zeodwA2XGJoXdydUIunlFIEDWjxzqLBgKi0QMEqqKsEJDQCgUokh8w4JG5OR8/zL2aepOoA5UgLmcfZTPEqjWWZabWR0mCRVIzgdtW1LZ6HjjaJFZIE20A0kSRTtVhWRvrVlO17Sl9ogg3URt0Wmu7JMo4D02FinA4cTydizL5pVh4fH3n3/g337xuXc2Ftwfu4CGQrg84IJRRj4nwjarLa2qgzoT6iC1ADVa1EP8SMiPnVaAuuQQENJgCMKBJsgRMHlh8sv08W916muYvCYdcNoYt+ryvdNkZ2gPj6dp/eBH/R8am00Pb4toZ4w0ttfq8b58W8YuT9wsNj5cuffk0IBwNyEZb1nvvHL5mXd7R6Aey182VmWS6EfHZGtSF1YTrcMITmzs6Wpm4aCNE8NipKbYXSCrLimp9qPk9USnnk8d2XLPOZuk48vms8vP+Cn/7R3+LNz/7fBN4wTIt5KYkQQ7T+Kt4M08hn9X6VVy0EsG1U15D90yDjyR757J78vYLI7e7v1p5Piaevny+7c9INpNg+uytXAYwBCE9BTP/KthtjFSomFi3F/mgL7jHSixt9fYJNXydeodPZlNDXtuANOi2PuUv9PqFQtgvbx6ax/w45el+m3u9mdxGuWOFJwwk6V0YXsoYeH/RUzZUp2YKPnpLHqyPBHZv83JprVHg6dzfA9h2P7zdAIVCaGrVNIwSrqpEoXoEqxJSILVnvEdgs0DvLYkIl/Pr7Aii2Ube6EuLAui7ENRODlQqHZE3zaIUxWGVMFKF191AxG+xhmBmnW6abhcNltcZq9++JX33F45t7dFktkmwL62qAYsA2SJLCFKmPuE7W3Gxtca/GTgSAwDoXYlxZx4WcVhOBIoRoXWpDWGlt3UZKzwnSzJ4eEUTdnFkCquayqpsw1mp7NsYl9BHn1U7gv21bqq2XeEuwBV4Fr7T6UC9goMM2yxgCEbZUTwvm9dGaGa8Ffy6teT8+Y0hYPZffp2N11iSKdwbFr53SaiAEGLNwnBKHQybmiGphLTPzOls0FDI5T+RxMq+UAFUacTAXyDyM5HWkrAWRSpLKYTLdR6l2n6wiIiIavR+SAZEt7SvG2Fl+uBtjNZoWu87ughwxqj3GkePxyKvPPmO6uQGx77euCyFBY6GVC1ouSM0wBbQuDDExZWGI0JaJZX7k8fHR5k0SJDamUTlMEEJBJVBVzA9G1HQnMZGidfRN0e53X8gtHdCjMY/AuHrStG1x3BApbm17jahhcw7eykJ9fBhgsed/ap17Pqb2kfp3jbj758foQDs0pBlrpFijzbKstDrQVmW5XPi6/AGtWkoxpkiQioRKjEqIFhS0dgGiBRZrYX4oJKnkUEipMkwRkYSSrHw8ONubhNDM7h6smqqVTKEYW4vyeHnH2/dfQL2wXBaW89e8++oPefvzv4OWt+Sxksfs98nWrDGa74kE21Crqjnm7i+GXNOUV1SAryI7ZoBeytvvaRdNf9eNSfgQhj5lPnbb7gfPu97rJ7ydn09nbp3k1e4h5H5DANtqt2MGt0fAKrmuAWIrSrMmEzsBqf2JURCsRYeJZK1oobRiEgXt4ZwFOk3NkLIbG+5TQHK99HaWur8Gvjf0MuPWNvNN4TrPOnQQCfbeTZEmxvx46t1qD/16aLD+XdHX9abk6GxiUzOFVOi7AKhdn2Yap/3dt9jyyiZ9m+N7DVC2G+dRm20GsqFA2W5s3ywjXa0ZQiDl7A6R0FyQGoIJEy3fqx02U9aVc2uEspJS8g28mEeAmHGbJLPy7QxNTObQOR1PHG4urPOZu4cXPN7e8vj2a5bFXTcd1KhCrdZBdFsSOoL1m3tFun3iVE/FFKgF0UoMlge30uerlfG2pHgudLtO7rtxzTF2Qe414tDqGh/ZzRZ/jm1EurFZdm92givftOgRSGdBwvXt7PmmdA8SXT4iO1Oy7OyIg5nmevigz7IC3ssHNcMkn7A9WlAHKTFFxjFzGDIpQKkL63qhrDO1FDfpK+QxMI0jpcxoXQ3kYixazKNZ5DcxNiJWQsQe643C3FFUUXdmtMWhm/DZoq5EcRdZ8YVTmrmRRl+qWyMNA9M0crq94ebujul0Y1yfNuI801plXWYuaeCMWOmoWAPMaYxMY+Q4RMpyZl1GjqfRKo5wG+qcmbKVm9bemRSQ0AwlSyPlQMqJNAQSIK2RJBKaLfO1tW0B6qXPVwHiNarab0CfAg/PQcZ3jcCfi2c/dnxjmmH7Y3Ojbdu0oFiH1xCalfIvZ+parPN5SiCRFoQcHFSpembNfI0ahdCsfD8GBzBRrat0TNu4Uo2W4gy+bYuxvKWZJUJxRu6yLjw+3rOWBcoj5/lr3r/5Qx7f/F3W+StyXEgxkoIJYqPghnrXO3ONlu0O2V+7a/fRjb9fL3t8K0Xd1oK/N8bkU8fzzfrbjYs9f9D/9Uy2uQ1RYyP6JmuswRWo9IKDWs0jqWH9lKydmbL5SknXnrkCuXnK2lXvwSv5bG8KBI3OJNprn3ORfRnt1TRP5lHzggrY+mfZC3Rb/yxO7On2nuBxpuYDLCnOAvW13Nbmtlvre+reUhNhexlBel3H7pr2f3/7MfG9BijBI26ALggQMSpSOy7eDMUczXfPiRjtjy8gEmwRskWnV0r0um+1br61ElXNat3fM6dMGgZCykjKZmgjxnikHMnTwLhMHE4HlvnIzd0tr37wGe++/pL58XET6lqpa48Sow/CSJREbzQpXp9uQYq73qqibSa1xRGwEEIixcF0MzlbVKfWZwXYBpWiVs2DmQL1ihp7ThfC+qjd8ox7OG9/tnzqM/HstlixAxHybB5s74tt2gbaDf33fGw0w7Mq9Ur199QH/ZT273MFYv0+Bn8P9XsfY7B7lxKtVubLwuXxEW3VQGe19J62Zk0FY2Qp5jDbfJFR8S6iqpgjraBBGKeJ0CKlgAahSfOFwMyb7Jwa3oHIwAiVGPvF8c1MBIK1R0/jyOFw4nRn4ORwc8MwHakiVnl0Wc0lWCIxZ/I4EHRlyoFpGhnHQE6BISo6BqQNtDK6FkpZV1ugYjRGca2WWgxYRZAGkBjI40CaBoYhkiXSFiWo+tjSHYjY+JKnQa6DFv/ho/O637c/DU3Jn/R9NvYG33U6sPZ1JUY1w7oeALipXEggsRKTrTcxeB8TapcVmB065pjYWmWejcWKQ2Y8nAgp0yQySIAkxgaKbF1ku9FX1YJiIs3zxRpgjtOIlIWHpbLM7ynlgWGAHCM5JIvzQyBFE8daasfphE9Fttvk/SbAsbvn35Gt+s7H/hS2Kb8Dxftff+LxfnzSNkyvLE7fu68faABsXi5czo1lDYQ4GbsV3Pxxszpws01nq2tzhkISISoJcyNftVG8TUQTZ2FUEInO52w+sA5crnuUCZlNK9U/84lG1kHT9XL5Ly3v7WDqCvocjV2f667WrTUk9gxDF/16SulZurYD1g1S+brWH/22x/caoKBXxsQ2Dq866VGrCCa9DN5bskdiYQMF/Y9pQa8LrKUG7QLXVkBt84nqav1ifh/mhhrNGhxLN8U0kLLZpocgDOPA4XRkuRw5nw7cvHjB6e6W92++Zp5XExqGa969Vd0cV1M0kzHzXShGu4nVqPcb3yOyXs5mwzeRQmYYEusqUNWrZ3pEuZ98punoqu7kFR/qaMjsH2Q3gn0j3WIK9hSGPWOXL1VkW9zthW7hvotOrn/8eofgjakqQU0Toj0699kkPQKBzRbekqI2pQXL01bUO9AmOisFurnUrpcL5/sHzo8P1j5cLBKpqznP5mxAb7k8Umsv1zZWKLiPiWiAIoQWGMJo3anp4jMluJ6k0f1fjJ3byuD93w0TDItEiNBEkJRI00icMpIii1bePj4S10ZpyuWycH544PHhnuV84VIaKhGJgRgDp9ORII0UlcBKHtP13Jua11xxFlGs/0ZqgVKhSKRKokogH0am08QwDMQkxs40778T6q4E/Sq8694hzwbHp7bCJ5vbx3UFnwYcv0hc+an327/+SVqofx4eAYuDkGBC5ehRZYyBNNhrQjOAEqIFStIjdB/bm4GdYkyo29+XlljXkXk+ElIGsSanquo9u6y60EwTKxBoVNa2WvVILeQ0cJwibW2c7xswE+JCykoS15g0sO7p1tE4Z+sP1bejXuGnqDcF2eH+J0AFnofcT3xDNhHo/vnf5vggjL/ejz8L4NMXoP7WCjxNcF2fBnRbgOaB6rIWLnPz3m8WFJp79FNQ0OfCtX2FpZlTsGogWwcatQLNOt6rdGii29jpwaVdD2f6vSlhF7q24LLUXvnJ9V70W93xQm/Arp0w+sRl2ooTPKDvVatXHCJb8PXhezi4/xPcv+81QOm5aq3VqmGqsSW626B6+asN7t73QJ5soJv9vbMxHcRslKf6ZlaUKgUVYZWF9bKwzIv1urhZWNbZSlbzwHA4QLBFJuXAeBgZjwcONzfU+czty5e8+/pr2lp36ZNuWoabz2VUZvvsrt+Aa1dRZwZs1bQy01ptwYshk4eJuCRjD8TygkYVB6cH1Vk5F2E6Fdl1Hd1op5f02sj2Ef58rMmnFqodq9E/U68poSvU9xy3Nmppds7Z0zHRxMet1m3j2D5HdhUhDhpADeBs/VQM/Sf/nKoFrYFWF9bLGVXl/PgApTDEaBUztTDPZ9b1QhpHUjCWaVlXom8WIiYO9ZWHprBeVspYCXlkSOILfSOqUZ5rbVsE3Zq1NmhaPWIxFqYAcRiQMCBxIA0DS0qmNdDK/cN79PGMpAEJ1iRwWVaWupqQMkbWPCBtZKGZLqpVE1y7mV0ISggJolhqIeIGVI2UzQtlLbA2KEQWCRxOR4ZpIkS2eC6EAM3YqWvq7UoL91u/sV37MfFsrHzs+JOyKB/7jG+TSurP78zpPksRpINLYRiEogq1IkFNa6CB2qwJqBC2BXvTAohvfRY3ecQNEhZqPbOu75mXA2kYCSHbPKyNYRitLNnXKWNTCk1Xa42hwpgSmiM5Vc71LdoWAgtBzMCwSY/sQZu50o5DJPcu1z6nP2D4uTKVPXr/2PF0zl/3ou9++z5kaj4YAx8hcr4raL3+HlTFgxIH19t18O++Hxt61TeNh4miK8t6YV5mYKXWgZDilloXcbmx6OY/GYP7dIml7qIYixKB1d19EUGwNeVqinkFe5Ztd1fjFrZ759waEopLAkwndl3re2Dkabi+mvp12K/Gz9OqrTWkta2Z4AZQdoy8+l55LYDoTNIVJH0XmPK9BijWZ8a0AslBSPUoUFxsqqpWiqodhbarUMqPfWqnHyHYhr6usy0IfqERcWt8p9dC4vzwyMPp3spVjyem45HhciCPIyEPbk0uDIeJ482JOp853txwOB253D+wPl4oxfrtBK/6MAtzbOHXzmjIZrndz1tp1vRODHUXbWQgSCaFYuLakAhik6KX/doK0jZipHf2NWqxgz5rvy1NfWLiZbOfGGZ7VL1/mCtluaNctlXMAbiDywLe96eVTHRRlsSIpkxJ7gLrhkjeUciFpjvA4ymzDlCqOD/RLGecJFFr43I5w7xwuVwI2shRKNpYlgsP9+8ZxxMHvBdNLbSyMAQTFeahknJhnY3lkaaUZWGZL0x5JA8jlWYVGGpmgZe1sFY1t+Fmi4hGA4Yxnwg5E4aRfLolH4/k6cDp9pbj6cAwTcRhQIlIzj62rMNwXVaWy5nl8cz8cM/68EC7f6A9vKNIQdSqsaYU0WZOtyaYEV+wGjFY2sl6jATrGxgSIhmJyfr/CG5a2MvR9RoEdB+cndHUd1mMPsZmfAzsflfQ8ovSPB8F1Oy/gbMqGBtmhXpKEBNmq0aCeN8bTejqwsHuTtgwEBqwqFZAo4WvvaJO9cK6Zsr8nnWYSHFgDdFE4X0d8D45QRqlragW34CSNXoMCbBUT6srKQolGvCNKREQaisM0ToWDzm5iFO9QdxTlqJflXDNK3/s6n30eu85h2+63h89epDxDBxs7+1Mzp9GCrATfFdjMmOm99oU2X8VsTsmMTLlA00Tj/crTRfzIipKwjyzgggpRCDa9oOl/YTk5brqsZQ5VcdRkdXYJ61ilgDYGl/6Br+BxkAMA0HsD8367KxlobULIRqDYv5SBhC06RP5h11h9e9vP7fdfeuVmCY/8KC2tc0ioqeY+vxvzhA1L3gwv6j+udcy5u8CUb7XAKW2yroW1lrJnp8NKbG5jjb1BRO6wEy4poXAh6I7n3ZxbLehL6Wwug6hN1gSoPhnldIo2hiGicPxyN3dC25u77i5uyNPR4bjcdtUzDgucjgcWI8HTnc3HE83vM9vmDnb4HUAZQDBkG5MZklfPcWh1CvwErdTblYSbGZdZhIESmnOSzgK0d1AxJXmrfWy3h5FNEpZzLCsVRdx2ZWyl7XdmnOlhp8f+3RZ7yuEgzqaCUOJvuiL+89pdXtXn5a1QF1RVVIMBBlYo6UbLE3UhcN+D/3+dgo0BKvLZ3e/W/Vmhs0a8C3zhVINWJhN/0pdC00Lc3rg/v1bVEx1X9aFsszEDCko+ZDJc6LM2ah4AtIiZa2s1e7FQmOpjbKuBniXSouBdDgQxwPj8cBwOjEcb7h7+ZLxeGK6ueV4e8twOjHdnDgej4xjZjiMhJzQEEnjQBjyBsRqqSyXC/P9Aw9fv+X+67ecv3jD27/7h1x+/kcs739OWApZK9qU5G0D+iYsYo4vZtDHNmaMXQsgkbVWpBSEQPC8kNRGKNXZqx7ptW0M+M1kN4LYyh03Zu4ZMNiNHYBPAZVve3zXkuP+76dFmf269PJKT/e4W7EJtgWqYKIx8Sg0ULQAHhy5xkfE0r8xGmVuJnsXSrlQ10dKnqy8O0RKESQm89WRHic3UAsiUEXygNBYy2qd1L2KQ3xTtUttYz+mkWlMDDl6GrtX31xFxb/4mn3898/vzfOfn1fwffKtN1L0Q3brF21xv4gx+/Cc7UOfn00P6Pq/+8lJCAjRK0XZ0h3XdaURozMNqt5s0dluzGW6OntxxX2CyIBIcdABTToAbSTp/In4OpfIww1jvmFIN4QwMF9W3j+847K8oTNi6oy/XTW3Lejz2iPDnsp/Kl6V6987O3v167q50u5SOn3+G0Dxz/f97LuFKtfj+w1QamEtK6WsZpK1mEGaYhP/mvOD0qynTIr5SktpVzdb9UFoAmqujKuLYpuXtM7z4sInb2G+WBO+Ui06GaaJ+7fvuL294/bFHePpxOH2BcfbG043t6RhcMZFkBg43JyYbo7EIZNyQkujtWIgoWCbd+9Zo50OjF4m7MyHV/MUa/+KV41RakOSRQUiFmGXZgLTor3lu72+98gxIZeBk+Uy04oJattO2NQraLbDx/CThWa/UamaMLWjIlVjZixB7wJnjzZbtbJm9Y7G6BZdmssvJm5NCfLg8F+dGLOpE4JV/9SO7L2LlfgmvuVBm+XsWzWAcrm4jXuvVvFoY50feXx4tzXma2Wl1ZWW7PxTHhgPRyiFpSrSzESv1IauCzONJQY0jTDcENPA6Thy8+KWu9c/4O71Dzi8fMl094Lj7QsONzfkaWKYJtI0EIeBNCRiipbyS2L280HQJN6U0DQ42pS2VupcmB/PnN+85/1P3zC+esnDH73g8tP/C77+KfP9G6uGKiDRBHziZfpK2xZTK+G3lgDNCCvb9EphbRDqamzXuhLqSo67IdCuRlSuBKITf9fCTX1i1ravuNn/+xfpRp6zLvvN7+85PSRsC7PgKR4FoVf8CTlHWki0GikmHvLWDteajyiJFKKlFsXF0ji171URpkOqaJtRXWh1RnVFdaHWHjX73Ggr2qxhpoH4hoaKhECpq6cfo7PIARpWkSaBmFws7exJLySQ4EwK8YPrtgmGt2ttwGt/rX7Rfdo/9ovSMdeX7ZmY3e+fPPrtAOgnNSyb26lv2p4oUZxt2NIh/XMCASsDX9fGWhohZHKG2maWXkwRIyklY2aC9QDr3X/FDSx73KcOclQd3JKMaQuBiHUI19ArJYKvZ5kQj+T8gsP0iuPxJSqB4c1X/Pyr/5N1LbanVPNTSrk3/Ws7ICJXqyE1lm5f+WqHsffXYpKu+XP+Ra9MVwgO2Hq2Yt+/bbtz3+34XgOUta6sZWVZVtKwspaElGAOotZO1o8epfHE3h7PD/eBu7ZCq43LPHOZL3aTPfqfzzOtVE/7WN8SAyjmKJty5v7tO94fv+b09Q2HF3fcvXzN3fkldVk43JzIebDopprrqUQhTRYJL+t5Q9tN3Ao+ChKM1bj6SPg38p8lREJISMgovUzQ0EuIdi1Uzf2yV6W31tBgDaX60WplLSvrOlNWM3/rS0EIcQMoHxtiH1ukZPe7vrjaNb8yKgHL6ds6sM+zXu9b8+oiVdOipJStyqGUKzJXvaZ0gqXBnkdRijrgNHdVacYArPPFjPNUGcZI9DJbYztmUr6wLtmuUVksXRNXQoqkIZFub0g0pKzUuUJstFghK3LKDDd3HD/7McdXn3M4veSzz17y2Q9ec/viJePdC/LxhjgdiaOzIU73hiTeewcKShUbC+YpY9SvOOAzx14gKC0H4unIIY/I4UQ4jrz8/DXrF5/z8Hf/gPd/9w9YvvopZbkn6koOzc1mnTGIcSO7mgpr81qj3hPJUzlUK4vujssadhHUlvLprKV6lWXftD+Msn8RsLjqWT4ca3ud2T5d2zVIf9Kj72dB9pujC2ODbEZVKlBKZV2sv1WrywaKbRMQovU+AKm+wQXM70SgV9CtKyUu1DIbUGkzrUa/743SbGzXWswwcDXDvBiyCctr8HYR0QBKGAgho21BWyMOkWnIjEPcUjvN72GDjWHu96DhJbLa2bT24fVxhuZjrOq+muv58c33Ra5v4yBhz8T9SY6neqjdG6mDaH98g2DS/+cVdc2k7kJEJUPLpokjMQ0RkYwSaCrGrreGluqlpubLZWPSwEhwzVbbMZZqpXIo3uRTzYQvBUHFWh1IMo3g0gRtkflikzXnzItXr8jjLUt95PKwsCywLg+b83Y3xfQuF/bd1B28/T514NQlDFG8GCGaRcemy8SsKcJ2XZ0l8nm3gRPdLjKdk/wuM/L7DVCWwjJfmMczaUkMU6bVSKuFYg1yrqZPvmmZoHAxdNtz5uAaA+vwOq8L87xQil/k1jg/nqnLQvPOr6UZ62LKZiv1nC8X5suF8/mRw+MDl8dH5sdHlvOFu1evOJxOIFhZaGvkYeDm9pb54cxynkm7hmGGVkHU2nrXpdCV4CpebuybRkoDIQ0metS+wBvFmLxUORIhZnM7bcbO7AdMdfaklGJUoFf8bDS87kDHJ+7HBpp2z9E+Izrdz1VItUVTsqMCnf0IPqC12fWupRBj3nxqWtfRqLppXXQY1IOhpxsYzfvzoFvZudbCUirzciFIIA+2uIcglLaylIV4ebRyzGThQQ5CFEgpMg0n0mniMSZqqZzjBQ0JmSam16948au/yuHHv8LN57/O6Qe/wnTzkrvTkbu7E2EcqGK6mjUEFo/wgrgIunkJspvRCXKNbjwCQw2QKV2yatc35MQ4DsiQyVMm/Pg1/Llf4vGXPufd5z/i8Sf/F/c//UPOX/2UdX5ExKqkOmBVvMs2gSrGEMSUkSjEAKE2RCtBjXExXVLb2DOrqOsApQNQjxq7/Gk7W7Y5+JxBeT62Pnb01/T52+dy72i+F77/vRyddRVVBG+xIQpUWsF7r9jCXlshhuxCV19iY/Zy3uZspVkVaPHO5jRCa0S5cMkPhDhCHIkKMfs8aNBqodaV9XKmrQ00Mg43bhyZCDGQ00DKEylPSDWH5BRWhiFwPFplH5imCgco1RmhLoC07yzXL++BT88QXG+HXEGK3ak/8TV+ql3BGaynp7AFMfIhk/axNOCH40afPNYZvt0LebqLCtqsUkYlESQhbaDWwLoqQRLTlEgD1npCIvNyMUZWAjFmq+hzULIHbX2zb3Tn3kAMmXEQomZUqxG6iu1lDhaar2FlbZSyUuaZWt6R8pHbVze8evFD3tZ3BF0sGFsLTRdHJU8ZI19lPQD1x7b74H5hMRFCdNM4KxbYshN+9WydD75+ts2LanfVNyfe7zJGvtcApSwL67CyLuuWkpFSiKGYyVEzgWzTAjRaVaOgaL7hRUpfIFSZZ6vKMT+QYALTGJEklFTRqrS1IUSzfY5KTNdyRK2F5XKhrquBmGWmrSutrGir1HVGUuLy+GgD2GnAOGRizgaiTF3kYiqbPjEkhmEwm3Q1t0fUtCjSxGzUyaQQvKmh1cVLDOQ0cZyOlOXI46VQ1xmlWuNA9R47IlZJVJu9bquEMiJqm9QeKT3bOrZVpFN72/DrQKCzHFyft0XNT6IvO6RHrD6Ba7P7G+OwY4jEq6SsgNy6a7puZWOXTIfUgcwmMHaAtMyz3f+5IDlZuZ4kqprmpBZzaK3rTGtWOZGjRTSH6cCPf/CS43Hk7fs3tHygvXsHaWS4e8XLX/lzfPbrfx/HH/4S42c/5vDqc4bjLSE2mBKLuKrBEIlTytXHki0cRqIZAGh9U1eQ5ve3QSttSwGaL1AkxWBCzpiQ6QapJ/TuhvHlLTc//Iz7n3zOT/7WifPfhstXX6CP7xmakgUSijalqlAarH4fRARqJSmE1kjqpdOtIq2hOzpOm1CLLURWCn7dLPYRbGcxt9ft2JH9hvP83x97bM/CwJUpff6+3/bYb2HXTe16/lEgp0hr5lsUJFKi9ddZV2utEdOEElECKoGUgjsbF+pyptSLlbW7NYJ6KmadL8zxEUImVCVWRVJmLcbqlXmmLBdahRCymQa6h0aKA+Nw4Ob0AspLlnBPW2ZyCJwOmdvTgWEwN9naNROtolhAkpN7t4TrtWrYemjp8y2G2K7O8/vx9JDtFtuvZXv8U1f8+b2SZ2vEftx8ilH72N/7z3nC2PWT0+u43NhmxZgNEiIjQmYtgfNjZS3N1u+YGXIkRhOTz8vIuq4IVkYcJDnIcTigphu0easeIOo1OIuJrNYt2MaFos0DOazNgooJry3VH1iWhbdv35IPgZwTEhIi5oelLbklhvVka5vdJBtoaMIW3G6XW4QYMyllC4KaA3DFbDWam1Goi8AVpKfs+1rbb5yyY/i//fG9Bii1rGipVNeELJfZoicCze3YrZ28ub7WptCEptXBRzCRbVlZloW1GAhJaeBwmDhMB1fH24a0LDPV9QoIGDNuA60UY3PKamBkvVxopdDblceY0KZMp6O7OAaOpyPl7o7z45lyWdClUpfFNlNJV2QKWG7YwJFodTTtRnUtEiWTYvLyZBPWJhFUIkMaGIeRZQ4svrj48KR7dZi1f7NrpWpUfqc8xTwzesrh6bHnS/bnewUsT6lhG/3dS0T06evBe1Tgi4dP0FKthPqa2vJoRCx/3nZsGF7xgHQhmPJ8ZqgqpViPnZ7b31cgiWAbq1+bIGJjSBQYmaaRzz7/MXevbpkur7kcJ9av3tDSgZtXn3P3+S9z+OGPyXcvGW9eMBwOxDETB0HHvvz1SLC7yRoT0fPUm8gXdcbLKXjExpViVGrDNEuhp1TskmZRwmD7X42ZcXrBeDMRTwce6sqsyvHlZ1x++seUd2/Qdaa1BQWKqplHqVpKshSi65RqWwit2nhU3TYr1X5v+6J0/d3T0bLf9OGJvqGzdj4quhmULXDX9+33cC+k/Vh13seAzrcST3YqW42dMkukdi1EE0vFpSFYNdWQqSvkFRqRGEfyeEBkoLZAVSWlSEiwrhdjc7VQ6kqnrrQoNTbWUojrDPPZLNIvK6RkrO28UHydCRIYpxuqNDPTE+uIPk1H6u0dtDtWuUXWlSkVDpPZHSjBU2J78eKONhBTo3jg7vdCrz/4vZY+0D64dHswuZ/f8uT3Hz7v6b178px+Fltg8zHw8fT1HwOzz1+zvfs2/MTjH6vmsY9JICNBB9CBulbWZXWzThPbpyEQ88DtkJnKxDLP5plUMat4rgC3ueW9ql4LF5xB7itQVawPk+AMqZ1tT/EqEKzD5NbY9Xx54N07kHDGGHgzIrUmJtdvu7Fez+5et6/YdFe7NGkp7hLtbFuXRjR6I9l+HZ+g0etjfR3+jsf3GqC0Vim1mED2ciEPmVQraCDKSmvKuqy0DlCK5Yi7iZCIWGfSUpjnlRQj03Tk9vaOFy9ecTrdGEBBqGWlrIW2LJbWEXGznkpphXmZuZzPzJczZTWRaa2VdS483j+Q0+D2+pE0ZIZhIMkLokZDOkW4f/vWRE1t3ZBs0+aVNFZC3VzsunXFbdZROYVEFO8tBJ5XDv4aNeCi7joou7wnhnpFrO8IBGozzQG1WaVGCCbua2FbuDeWg6v4VDp7srEmnrIJshucfdP3igK5pi46M3JdjOw8mza0FFotRjP65MPV5+Il4duxbVbimhPdWIAgQgtuoFWrd0PGv3/015rIurhQWZtVXwQxUDwvF1Sg0qgpEG9vuY2/wuXFKxaZmO5+wPDyh6QXnxFPt6TjiThmQhZkEFqyyRuDgWX1aD+603G/FuYlIG7Y1LwbqT1nwwC9AVk146hqggkSMATrkFECaHaKeDgiraKvfsTxVxrT61/h4fiK93/4d1je/Ix2aV7m3StTggudcTBVTX/ypG9Q7cvvk8Xf8tHypHnZ81j22kxyVw3AdZHsa9t1RXWwsDv6vdszLx+rJtmnkPavffLcK9q6XmNfXLWPdTGn6KCuQ1G7Gq0JWRLDeEfMBw7TLRJHahPmpSAxEGKj1gsaGpWZ2hbXMgRvEmiAsJRCuzyia6W0BNFE9lortXiFXc7EJLampERKAzkPSFTQO6TeUuREqiuHVEjRgG/ZbqanwUUsIAqmWQspeqW417E8jyG2a7q7bPSNf7thT67x/u/veux1ZPqEkX16Xz8GfD4cB8/f19ih/bn1Edm0R/zePV4TdRVKEZSEoNQqFG+BccjmTA2WihcKazOLgVIbOFDpKWhFN9t46zXGFpB1t4TS2UGEq8A2WPUWmD6pmrHlWuH+oZJzcdF/1ycGomSgbkUO/f23NL946OMBQi/K2Lykqm7gxLCU0r34euj0dJY/nemiHQLrbqb/4uN7DVDWWsnF0KzIxapphpFalCDBLOmLGWK1WqnFmzO5/qFTVor5mYyHE69efsbdq1fc3r3kdHNjXWub0JsHarWKBxSverF8+2W+mHblcmZZzpSl8HB/77m4yLoWlnllmVdbSOJkIlk5oG1gXTNLgXktUK5lmiJinhl1oegMstJ9JppGAiZizTkTo+lQVI2aK7U4iGqU2qgbI+Ilx61amaGoi7XcVbcJFHserZmNee9l1MI2kINPmoZ42VnXmnTU3HZj1ae9R6KmUyiI7qzyUZBgcyiKsxYNKRXxKL61RqV6ddG1HDXEaMyWi0zFNSrXyiC3+DYbULNyh023oG6KVJpHLmkgaTUH3loIeUAQSjWN0vv7e37+1RvqkIm3Rw7HWz6bbilpQsY78s0N4XBCDkc0T6Y3Ua8CESUnKzcNTdzx1X17uNLLxqBd9VN986y+sAqbrpZWG6UW801uVrllXYadfQig0ap/ypDQ44np81/jtELKd4Q48VaEyxcrtSwUrYToLJ59EgYaZQMqtRkobV6ybtoiV1B5WwHtzcvAHH07+NgB1muKZje5n/xbN3arsyqbWaEfzxmU50BFnYXZxZBPXtv7k4gD6s39Xey9mvSEq28WWCpEq7B6VZiqdbFWhBAzxISbpnhgANbXJ5DGkbHdULVQVkErRCzf3897LZVSFksBB2NHQkomWHady5AHcgjkYClpDYMZyeUbljhaSboIMTs4czAsUWwuC1iKSjYgKn59NyoTtoifDli4ahdCX1N87vcWZoJL47exexX6i7Ddc7v+V/BgrUKu2qL9/Wx6rQzZ3+uP/d3vbf/7OiZ2j+GMybOhZ+yAj1/11EYxz6OlgITMkIwZm5cF9UINEfMHMqax21EUK/l2BqUD7144UHRX3t96iqQzEdfzCl74AJGlFdZ1tjGBpYOCDpSitLpc2WYJpDRQSqVW108JJubXXknn5dRqrtsxxc1kNMZsQaF4lakaWEHdebzp1eZedPN3EcTX/7ZtAaH5vvVMbP1Nx/caoJRmxjQs5uAIkWUo1zyaNreHtnIr2SmLVW1BQCx3eLo58erFK16//sx6nZxOjMcjQxph9RJLXc0uracdSmWZV0KIHA4FDdazp9TCcpl5+/UbLsuZ0swwTVVNM1DheJgY8kQbAR1Yl8D945mHx3vWOpODMb9LP39tRu2J5Zqt+XsEuUZPKU/ENGwusLVce3fU5mY9mKdLit05FqcNlZDEVf+Ntdn1aiK+a1uHYlpHy+HK+HoUcBXDgi9pfh/8ovcfFFOwNyuhu+4zsm0I9rZepYJpi7Ss9t2s7OTKzjjit0qXqxjtmmK6nmffyIKD1B4GtLZa1VYIIO6lE83bYPNZccZlWWfeP7zj3dtb4mHilBP59sSL04lwuEHHG3S6oQ6ZCiytQVOimm4kej56i0x9rNoVMOYgdLMzL7HuoAXwKApw068gQFBS8PJPMfalaIO227QxcNNEyIcbhjQwtUyQkdrgfH7g4eEtZT1v9HNKgZhHQhwYUiDUSFgqUtYrO0VPF1z9D/ru/iSWeka7m0eR6xt889vSMj0JKW7Rvr1F7zwru6idDah+6uhjULezerpp9aNxjQXF9QGIP+6bW2vXodyqmkneAusqlBpZNJPXxmUpxDwZuHVPE8TYyZgS43igaWFdEmVZzW04JhrRZ4+d2ziODOOJHAcCSl0utHUmxMQ4jAzJhN0WKAQ0joQ0mqDTgwjbFvqqoVcDuO269eTCDkTsH+kXQvs8tfvcgWdnUPtru03+dm37OOnX18dIwBnZJ8TLjr36COj49D3+8DnPQWy3nbg+eP2e109nCxYaNvfXsrLOhbIIawkG+FN048eVSKSUBUuf1c1AdNM4Nt0SLXbd/A77yTQ108ltQXBauYPuDZjt2AtjSQoa1g1QlSKoLuDd6ANW5aXh2sOp7x7d06qLo4NbUlifBgtIJQSaSx/QXnJt+2n0buS+YG1Xr4PW2oPWrXzoOma+7fG9BihaKsuyoE1YQ0Er5jfiPXRa663b7YYGrgM2hICkgSFHhmHk9ubI7e2J03FiyIFIJbSFSLSNSrx9tYBI8/yjt6KPymEYyeOIRIs021I4HU+8e/+W9/fvWOuKFjP7olSGnDkdTxZ1SWZeKnfvvub+7YF1vmdZFwZPjfSoRyWaeY8j3hgTIQzEfCAEt8cOiVqs7LGWhXl+tA69dTXUGywt1JpaDtMXmiFn62JbC16OACVQKdRoLAQuHraL7+yEWJxkM2bHhNAXln63Wl/ltwfNhdPt96Od1457vG5WzZxt67paVNP88wR6B2uCsRAV8w8wd3fbNK3rsqd0sPLqoGb013ebVqqVEKdEiJiOB0HkWuW1pZFa4/LwyNdffkE+ZI4vb8gxmJHaaaINI0wjc4jMKMKKkEyEXCOymMmTGblGk8to93vp7r6u1PB+HFs1iJe3ItdNOtCICDkm51F123TMjdg2qbJWE6/OwinfMI0HYhNaWznownR5T7r/irWeqY/vqWqpwZyDzZPg9mRBtvWoO8hKX1y3Dql2E6X7JXRKe5+Ka27YR68iMINCbSZsj97gEU9xbvS+2IK+9bWRp1vMk43KF8Z9KqlH9NJfuwM7nfUH37h2Ebh0FsDP2b6PdRNe18ayNJYSYQXimTyeGQ8npumGNAxI8zRUM/sCkUBMmVoKoadddeczkSIpjxymE8N4YBwmpDVmhaoGcqI3KiVYarJ1BsetFKyapG7sloLNb92DPw8k+jXrzNF+K/Ex138XFDcb6/Pdrl9fY9V3on0zZPzebfdqB0T7R1na3QKm/b18nq55rlH5VNVXB6BXcPK0JD3nzmzu9BfOCjZ8k21KWQvrrKyLmh09Z2JIlGqGnuM4UlZYC150YZ2tu27DAH+kw/b9HNn0KT1y3l33fWWauqm4FY4GhpydLRar9pGGNvNqUl1BrXdbEHPiFrWu44hlBNSBg6oxRRKzOUNLQLHqT5PBGvvSXKjbE7rbdd/mw/UePkWBz49PBxLPj+81QGm+aQmRGLD+OMU6fFZ3VhV1XwRJpBiJnq8ehoFxnJjGiXEcvWlWoZYLZW6IFoJUQquGfNUXUC/PW9fGslZqg4GJcZwYxsGMxATIxl7UZpqF8rhS1oV1vhDuGhFcbT1SNXDzovDy4TUP737O/PiW9TI782NRmpeyG2kQAyk4a5JGQjpYRHw4IWGkYM6l8zqzrBfK+khZL2grbpkl24KFC1+HwQDKMjdqsHRIDYpoRFoyJNyd4AR6vyMbnHvmZBum18d6yNk3oh3zoq26LTLXCap9U74O5VZXWN0syAW8hLBNdvBOvB6ZPVnQFLQ2al0Rd6pt1cArrUGKfppKsMrNLboMKXnTQqWVLgwVA5vnR8IyM7SVsS7kOsOSKGoahfGY0WFEUyAldZ2CbtFIaTifbSBCqwGTEMDd781/QGwBVcNhDkDs2kQHMimYUZLimh3/6kXb1j+xLI22CPXcoAjpEM30dBqJr16THn6Z8eufUi7vmGujzmeW+cw8XzikyWze60JuCwlrEqhq0Xjnd3zZ+giJ68txB/lcKwZcz749b7+B9NSX7AcDfLBZ7St2PhBYql3TzqJsYKOnMOnBXc/1ezmno5Wtu7l/N1VjJ9dSuSyNeVaWVVmLdxyulbZE1nKhlJl1nRnGyaqqglgvo1aprdCKpYlLMQ1CitEbhSY36pvM+ydYfx/BS+olQUjUJuZgLJbabBjwVlWvRjMGMnu39s6EXAEfT65VB20dwPU7p8G1Kn1K+2ttTMr2vK7P62/8ROjvY3qbkx1wIuD6G9y8kSDbue1TPR8DIr9I+HxNIT4dW9tY2YCSbOdkjIRdERGFYAxUF6yWqpQirGsBAmkQ1tWeb+DGLd71Ot6NYXx2Xh/hEz6mnbkCe/HUSyTFZIwZrg1RxXxUVszM01hQdR+ZEKKNEbHnhlbR4sUKwUBMCN69XE2D0wWxTdmqh+jjod/XPkZ215Mn36Ev8BZOfWx1+NTxvQYoQYyCb2LpFW1GyVV3U0SEcRjIw0hO1q8ipUROiZSzd2W1mzLPZ97fG7Icx5FxGFjnkRSiGdm410OpRtutpbHW5vS3IHJjVFmQLf93Fa9aDeF6WQBYTherOLpt5BRIOTMdj7x+/Rnnt685v/+aR62UefFKE7Aux+wiHxdASSLng1HAwxGVkeAOn62ZE2WrM2U9U9czSmWrIVFjGcyWuZsydboetnDpKhuDDeUDPSKTdh2YPhl6SW+PDix4anR1vO0V5kwqJqKAvRfKNuGs/4m2RltXgzjqVS4CwdSq17O1sNEj4eaLjwGrWnv1U/BFqG1ARLxJIvSFA0JKRDwVvy5WNUYj5EgWuB0SR2lMy8x4OTOOg0X7IhzCidvjwHBzQ42RInCpjYfVnFibwFKhmKYbRb37rRBELQ0QMOGzlyEjeDmgs4EObPq9S8nF1Fqpvug2tTmx1kZZGnoJzPeFtjT0qEiKtCnT5IjcvOLww18hlTOEyPuf/4TH+y8o50ceGwwSyKIcojBmIe11QN1DoS+GfkN02/bZNi31PSn4DieG0TY/CDMr/DBlc23p/uGi/oSZ6c/qwObZ7zc9Q7/XfkIbQ+KApI+RLRrvokARaoN5qVwuhXlR1hVqM9/PWgtVhdqsgWicHxjyREiZGCN5NJ2IigdRHjRZx+1ISCPDdCTmI2k4mA+FeD+XpohEZ08zvYyZ3qtrp/dQH9smmbsCRBPY7zZoP542S5VnTr+W0g3b1PfNNVzXh9bn28ZKOcjYAgaQCmztOmyQKD5+RAiqH2zi3/Z4/tyPgZnn33vTS9hP9HURFdfnCUogJcijEoMSqiK1UddqzRrJROnl2v45re10NR0Y7s4FtnX2aTD19Byfpi6v4MQ6XZuVhnW4xu00BGVF3OzTsKKVLodkTtt9XDRVylK3qtSYBEnJx66dW6nWvLV6aux6mr5GhrCxj0/uQ5/kHRT7ve7X+Nse32uAkmNkWc3TBBot2kWtxfrnpDww5pHj4WgdhoeBnKwUF7+wrVaKrEgQ0rqwzGbI1daV5XKxRSMmRNh686yrl5hJ4HC8YcgDl/kRorhbbLXSzlrppXyijbosaK083N8zHd5zvHlBiBMhRsbDgXZ7x4sXn/H+7ivKZaZcTG9ilLDtZNLzfh51IBFCRklUNXt/1Esgk7JgzaPQGWkmsBWCN3/DhLop2vLmjFO39IdrpPtk7lu4aZG/9AHZro8L9FbxvRqDTmN65GQ29cEbOTpv6TqGPrgtdSAI1Z7bI1z6uI/2PnLF5PosQrpObUVr2QScVlFhi4+EuC3sva9GiJGcB2uwtq7U5hOZxpAjx5w5DYmbFDi0yqlWjrUyRGE6Trx4ccPdyyPjaaIG4f2ycL8W3tO4BGGWxoPCmcCqYr2IYrDgUc1no4kZgIUEGrzRWDDNEMEcILdr7NSrbDjB7mM35FPF/BgkGoB3MWuMQkiR2AaOL16Tl1+jRmHMk7WCn1ce18L6+GAus0Al0oJQHSAhtlDuQUgTfVK906EusLE/29am9mDsdApeIn99O3+dbGPoo8dH1r2+QW4VCv5o6wzg7vGeFtsbd/VqMAsM8N5XyroWHi8Xzo8ry6q0FqiaUKKJzIM5gmqt1LZQlkfzt8gDtY3knN0VVEg5WfQqYiAmjaRhIOSBlPYmX1Z2HiShUYgpQxpMHB/T9h0kBpqL3GMPsHxeaVOK2/p9KCbVTTvy/Heyv4E9EHHqaUsd9dfueM3gAYmxrpaWe7qWsGMG2cq4+6b3XCP05HZ/Iq3zTXqkj4lm+3l8+Nz+D0jZWGWtpn0camCZCxKaiU+t+mLH1OwZGn+zHdNj/7/2wurs8f57f5jS6i7J0YJlM1bxq922cNCoWF9D6e6uAzHq5rsUnTVesxlhIhBygJSoVTb32Xp5YF5XylqN/eqCsHq93x+7eE/YE1+15ckg+nbH9xugpMy6zhtNG3oPjGYXZBgGbm9uOZ1OZO/mKR3x4f/zvGpOmZzMS0R84ddmXTNqW2xRWhZmd5P1uWnRi4OGZVk5nE7+3uKNuwqilYiCVupauH/3lpgHhsMNIU2Mp1uGnGC64e72M07TF7yL76FdqKUimhwh++DcPrsvphFVNy2LyRC+W4EEGkEKMVbrvdLphgCEwDBFcgZR6+shLnjsQAfCNuHVNz2nJ7BR7ZGAXreTj+YgdTd5UAclFbxy4Ooku9OxaLUoQY1FQasBkS0qNFHmVrHj98xe3oUEzhd1EAWe+4/GdmAGVF1si4+jmBJpGIgKdZ1ZS6GUxQFjIEpjyInDODDFyKiNY1A+O028ev2Cm5uJITRCuyAhcggrd2NjPSYeVHlbGl8VJWjgUZOJWdU7gnq0IarUFWIGotKiu31GA44axCl99SWq9aHodtri11mNym2RVoV5MSPBS2mMowFtgjBOJ8bXP4acuJlO3IwThyB8nRPL+7fosqDr6pU7V1YtSzCXXlfpd/3BfjzsNHRPFn5RtgqbfmwbTB8z2xDSJ3/vN6LteR9E3308sBvHbMD5uUBy22S1C7zVWYvOxJg4/jLPXC4X5nllLdA0olpQEk0CASvrVN99q0RqjVjjgkLTgaiJ4N5F0XtThWQCRTzCVQdFSQxYU00jYNqqhMRMTJkUsqUMYy8QMAAvMYFG20BFgEapPQXgrIX4uui9WD5WHSXB9UzPInzVXvraN3q79z2Q2OuENPTbEa/P9d/h96WLg/tt22sGPwRUT499Fdc+vfdknOzPR7ueyOeTj1mxJ7CW4sDCmN80JIIGYlNiFWIQahXmS6W1xYOBsAGUPWtneqZwZU0cTFy/k3bi64Ojp6ZEguvUeuFElzJY4mmbLdLrzIIH2GHT5YUkhBxMrxYCMVbWZCXLYYgQE6UEtFm37odlYV3rJqgVrJ9TSAKl+lxiO/HnqTO2X/U08Ce+5CeO7zVAub25NefGuaDC5t5Ha6Rx5Hg88uruBcfj0QRLZUWpFoEAwzjan2HkeHPD6XQDYDlfv7mqyryuJFWQGZW8DfhSVmptPNzfs64rh/Mj83xhmg5ITOab0q4mYEmEeVk4P9wTUjYB3emWw91LogTCnJmmO46nV5yO77ncr5TlbMKnNjMkw8gxGjBKccDszjOSki1GXiqspVi5qKd3khSmKW6l1QRFcmTKkSEbqFtVSDEwDAMB77yLmcNprXTudas+dCBj5XM28UIXQuwBygZo9kZaxmgQsr1PF9p2atCfY2xOo0nZQqu+kKpC1Qops63Kq3WO3dZLX/ysYsTewNgRq7BZq4HbKKbh6E2+wMZTbYXLfNnEasGrq7pT6fFw4PWrl9y9vOF4d8sPXr/k9csbSl2Y3zxyc3vD7cs7PrsdkDGyBuUM/ORh4TA3hhXelMYlBkpT1ob59biwW4gWoWUT1Wo0PRIVtCohKCvNUwe9TsB3AlVEA3VV6iKUC5S58f7hgcfLGTmO3KVb1qUg1aJdzRN6uGMMgUOORC1MQ+b81Rfcf/kll/fmTKoSIAY0NJbtnnXWylOBrYDYte06IxHM9l4tDVR94VI8x60WJRaP4LY4TK4loU/i9ecR9m6D3WsTzGW4Pt18AYkGwE1If03pbADFQ3zbIC19u+6rM7SbVzVrNQGm23I6wIZ+IERA1OjyOrMUY1Oip5olRiCyXCqNlZiVOAi1RcbRRItBKs1alJutQLIeXDEkq+Sj995ZqVWJISHjhIQDqg/uVyPknFmK+Tl1IfIG3hyUSQik3sX2SbBxLbfuAXGvcOrXfBO69/m3Wx/YPff5NtXFtU/ZLn+fzhRuDMVVk/Ipoez+tc91Sdff7Zk9efKaK7ti7ISRJGpsNqZrGodAq7AsxbxOiFdXa72OZ5BuQWLnv/v230Y/0wFaKYWCmWmuLiV4fg17Cj8EE9GGaAFXCKBia7ZGkwiEGAktISmRDwPDcKRWAyjLfE9I75mOR8rlzLrMdu7+vXKIJralM+O7a72BE6U3rNLeOfk7sCjfa4ByujnxcJ4p68OGgBsQc2Y6HLi9uWEcR+A66bIEQsoM48jxeOJ4OnE4nKyseDyYw2zoOV+zJJ7LSqmVcbH68uQT83I5cz4/srgTrWrbqkpCsgZz1hsjkiKoVspqLExFGA433L5+5GWr5DTCCLx4wWeffc7l/ky5wBDPnC+PPD6+RVmQboyloGq5RYvynJwQMTBWvSplnc1iX2ZSdGOpGAgpELMwJIjBIjVLRfqX88VJgtG+QaoNbo/qrgSiLyofWxye07n24JN/t52Adjv2tKvY9w2t2SDHKmvExSGiXVwarLKgFNtY5BqZbR2VW7PUXjTGoDVrvicRux7RGgWC+1qsK+syM1/OhFoIEcYUmKbENGRuT0eOp5MBkB++5nB7ZDxk1nLhcj4TgBfjHT+6m7wSTCkBLgjxNBKkeLM2gdq4FNmt7aYrsBtjEVKt0CKEhHlHNbxjs6DJN//EJjCUKgQNrItSZ2izst5fuP/6PZeycH+8J3SPdWumS6srYxoYhxeIrtz94MckLfy8LLx785ZFG2W1FGZQ9Z6cFvHWYgt5jtEXQ2PGbNkWK6N0d8wNrNjlBmnOBJlvTwc7ffz06LTf2ydDZR+ofXD0CFw++L3i1RHYxtP2LR5E3I7cxp1V32CeRA5QbHzXjf0T743UEHA9gu2AmUBfyM0NppWKNhN+lyWTYyakZAFQHEkMSKxIMsZGdTWKfnPrhdaEpIJocDLS05WqlKKczyuxBW4OJ6Q0LqVS1cThnSUMfa7tNv/OXlVfz2SzuFfXGu0ZCOH6Fva6INsqsmO2LI3bwNtr4M/rzIylNPdpQXi6eX+MOfsmUPKp3/XfX0EoDgLsrPvnXBmZ/l42D5FmbJeD1mVegUYMNj6cbPjYpzqja+B8L2a3jd/ZJwcBNq2cmfCUUSl9HOkmU+iXN+KAm4ioAeCQkgmuczKA3FaaNjPjrELTRBwSeRiZDkfycERkQlDefK3EmLm9fcGcEo/3UJoFiloKVeIWxO/TWn5jrn+LJ/3Ey5m/AZA9P77XAOU4HTiOE+eHi99GAxU5RU6HA6fj0RxBS3HvD3NxHceJ29tbbm5vOZ5uOR6P5OFgdGvKiHujtKqUVsleFVN9UYohgDTOj/e2cD5a3bu2wrpcuJythDmlhEhzM61OL5vg6HJ54N27N7x4/451XTiMBytFPCi3L15zc/Oey01xy+wRpTHP700f0ywPHUImD6OZ93TKWgOiEW0BbbKh2toKLQg5mREU0evuXZmtWjdk3AV3KsY6yJZGCV4bqx5VfBip2nFF1NepuqNfN4LTS4afcP701deomi3w8qoK2YnYonVvNcGWpwRa8bcSq+X3VIlgWhXcsTXEgFDNC8fLcJtWmpeBW7Rs3a3RSozCNIzcnkbubk68vLvleDowTiPDNBCnTKHy9t1bck5M48QPX73mB6/uOA1mR42YV0ussKhwqoGbCrNG1qo750pbQPpGhPpi2VX01UsxcZdhFFkbtQoSFQ2+uVSgJpZzY31ocKmc373n/qs3zGUmamV9f0+O0SpBAAkKQ2RJgVQbEjPEzNpgLivn84Xl8cwaIzFZnyrcTlub9QKSGHtNBDifYoG1d9AWE1FXhBA6CAkOgNnusX31K/oI/THdDRlk++E6Aj9C/8e06ZO2UfosGr9qqbysHDcwFNuMSmmbBm0tlVoKvbIPZEsXJIHaygb21ZmLq5arOaMjbgS4UmMm1IGYhJxMlxa8qsL6cinq4xKsx5Q0pVa1QEWDRcSSqDSW0ni4LEzSiDcTKRnT8/j4YEFHuKbRZDeHN1CFsVtVTajNxkLIFs03nB3w5xuAeTrbLWV0Fd82rs/ZC5gFtrFh5cvX99zfr32H6n3a5qNMwrbmfvhe27jwTu3ic0zEqgR7cKVeMRe2NcQ5HgnG/o6CnkZUFuZL9XvkgdBztmab0FdQtAeFuo34/d/9C/VR7Y8L23jrTxDpmftICHlrIpvSgCRPQXrvMsS0M+YDNhLzgTTcMoy3TONLgjTevv2Keakch8ThxQtSCLx99zWlLNun2vx4ylCFEDbWevsu6mmo7wBO4HsOUIbRyntDCBuKlRDI6cA0Hkgpb+W5QQIxDaRxYDwcOZxumA5HxnEiD/19EmkYUcQaJGlgrdYvI4aIZo8oBLvZunI+J3ho7jPiAsR1IQajbEMwEWoeMsOQyWNifZxZ55mH+3vev3vL+fzA4XTHMCRaEw6nE8N0oBGZF7NJzuNAbdm0BFgpIiH54mkGO6U2gxaSSXGyxmH5gAwjWhdCbITUJ06vmfdqKJ+QTfoGEwlqxmQqQmx18wWAtjFW26LuA7Hnl20c7oDHs+1j+6kjmSeTWbkawm2PXJ/fwY26ULRVp179dRtYY3OkNFAmzhzZfYmI+9a4/qMUWiuo07G1VBNPB4gxcZgyr17c8sPPXvPi9sTdzYmchbXMvH/3hkolp8TLFy95+fozfvyjz7g5TgTt98U3aYSsMGpgasqhKuPauNRmjrOYWLf1zdy/t/R/e1WGqnrJoKWeilrTPg3WUynURF0L54fC8n6m3j9y+fpLHn/+c1pbefPua96oMsRAQAkxMEyZwxhZYuJYF9r919x/+SUP79/z/p39WR9nAzU5EXIipoQ2qw5LydKIMdgYC6KkaHSKYJUNofkGEzoj1hd9i7a6duS5FgIHof4AncN7tjVt46bT6B3z9lJWdXagA5bqqR91Txd1JmV1r4icBpLPEWuN4aXE3RtHm82XreS+D9gO3iqtderddAideeiAQLuZnwSmYeJwuiENR3Ka3NvIUlRNXTMi0fVoRimFEMlxQNWp+zCwFPHAIzEcb5AozN5MNYiL+GEbV/uNXNkHVf1xjxe0axycce3pVWFLhTw9fC5LB6i+kYm4gzVb8BH81n7TNrZtgn480Xp8MGaeMi/733WG7soC9VSUtVNoPmb28LczTsXZtRQnbm4GVBbW9X4DDVsa0oFFr1J7KpJ9Bq607V55BSj9rLWfU79GuyVT/bqLB4fBaGFCzEhvk+Bi6RCU/l8MmZwPpHwkhiMpnRjyDSINrYnz4wxl5UefvSS9EC7zA2W9EHMmNHNrZwsyduxWv6bqgeFm7PcMeP2C43sNUFJMDK50ny8rOKJPKZOz6TNqLbaJx0QeR9I4EYeJkAYkZLqqsLXmJcfRBIYx0lSIEokp0FLa6MvWKmX1NuWtoFqpZaVRSDHCOIIbhEURNJqwKKauXSkUFdI6czk/8nh/z92LhXTIpCkxHCbGw4kQEpd5YSlnlsWaD4oK7BiSpoG1NmRdkbKSJBNDJMaBIR/R8YjoCa2VGGa3SrZUSCmFWqDVSBVYipe9ihm+STAQYytOAVmvEdaGhJ8xJ+hu5nxsIF7Zk+tzPkHFqi/4fZH0RRCwxaxHFR4F65YGYpsg18liG4ylccT74BijZSky9y+onao0HUeK2SCXWt+gISdujhMvbm+4uZkQUS7zI5Iax2nkRy9f8vkPf8Trly85DANdjNl841q0ocF6ncTaSOvKsAonhYdlocVsi0qylgObYFbUUzd6bejINUoMnurqVQNShVaUsihrKazrwvp4z/ntG5Y3X7E8vGU53zM/PhBbIwYlDolpyoxD5CZmDqKwPHJ5/yVvfv7H3H/9NeeH9+iloDEja0KGgZQhaKJpI4aCjplhTAQJ5geh1suor1PiYCuoGDDSpwD0Src/HRlC2CLH/r3bp4YO1yGqgNQrDV21bl4hwFax0Bx0iBjr5m4WpP5+YEAqBIuyuW7uduoKar4YAmZ+paDOQDR3st43+rTvERACUQJDHjgeTtze3pHHW1Ka0CbMl5VZV1gNdjVnJpKXLlvZcaRoYMhHDqdbVCLv7mfORbjLEyk08vlAKw82B7w9RE9/7q+H+Bq4fa/OBvlnhw1A+vf3i7Cl77b7oBuDRgfcYteg+l01TZl11K368VWjX+vrMHnqf/JNlTsfe/32swO+GHwt2AGRvbutK6D8T+R6lmL70CBM40oplqJ8Ep85EHoShD0DTaq6G+tXFgX210Mco6szp/vPsLHZtPsh+auCsd7aAb6LeBHrvWYO5CdSnIjxRJAjWgdCMK1mrY3H85l1PnK6mbi5veFyedg+M0gH3Nf51kzouH2/zvjQQeD/vwCUrqncXCL3EYBHRq2ZqNTGYqQSWJtynheQ4E6AjXEsjGoVDiFlhqEZeBFlyFae2VpDmvUkmC8PvH//hvv7e+blwroa7RVTRJbElDKxJdTNcXLO7sMSGMYE1RbhVivz+cI6X5gOB/I0crw9cffqJae7O8IXP2U+L5QyW+6uBZ8gAdQG0LIsFC6kENE0WKO/FghxIKaJnEaK24mXnTNhWRaj3cjm39ACiEVfpAHRCs0iy4YBIpsJO7AgNpkton/qKPDx47qoaX/iE7oeUEuhbU/f7zb93dVEt926X3dR1D767lipbyQCWxVWqdVL9cTSYc2jqt6QMCa0VCQFkghDCowpcZwmTjdH4hiRMTCeJn75xz/ml3/0Q37pBz/k7nhLTAY0XM9Kp2CbKKUtPDxeuH/zwHpWtNr94rJQQ6QOI5JHIBEku5uqVelYFO0dmJ1Js5y2aWrE6WfbADMhFi5Ym4ZaL8wPbzl//TMev/wp5zdfsrz9GtbVWIAsHKbMccg8hkRSL4+vj5zvv2Z5/x6dV2i22Vbr3U6pQhYxti3CQi/VNhalihngmZ6p3wvTq8TQvUSvu4GqEpyJ26/pm3iyg5cdOGAbmlYG3LbI1DddemWFUfC1NppcwUithdqqsW4OhKL3PgnJ+uk0aVsatEfxoV1LRWmmo7F2A9E3Au3kwTYnolhvnhii91exCrw8jGZdPwyMw8Q4Hkhp5HIxRnboxV6YZiAPIymat5OZtwVEA8MwMU5HmgTePTzy7kH5wWcHhunI6a5wubfO1AG8GaqxKZayUkIpzspdN88nIKUZwIgx7DQ7ntbZGGa/KnJluPZzsb9vZ1Ogp5h0GwqfSt08Pz6mR9m/bi+u/eA9NmJON1by+vr+fEubo82L/QLESMOKEta1Ukslud6j6fpsvfJtedOmdBfk/Vb94Yq5v04Nux+mXcHT333NewrjfVF+xmYYo2+TpDNnCcIIZJSMkEEjtYiteWlEJLLOC5flwo+Or4HPuL9/y+PjI1rjE93OPmXX53QHWnsW6pt2h+fH9xqgPD4+cP/wyDwv9NIG20Sv9sIivUSrscwzSy3oWqnrwuVyJoXIMA7cnG45LAtVIeeBcTqQh4GUIpBoLjyV1ljmM2/ffM3br77m3lM0y7q4ONANk4L1c0EjgqVJQo7EMTPoiC6VlK2zcKsXqCtBTXyVx5GbFzfcvb5l+tnE/VlYLxb1SVUbODET0ggSKaUhzBAzgYVGRcQax4lEmgaWYkZnOZn3RWuNtS6msRBFYkYlunNpsw6tYqClFTc5c4vsDaR0bwUvbbXFvqP8/Z36ONvSBbZ2bKuXbcj733X03+ehT8zWurLezkNcZ9B3NXUPEDfMtb4SYG7Dvhhvjpj+5r0/UfAWByLGuoxD4jiNHMbElAPHIXI4Tpxub/jBj37Er/8//z5+9ce/xGkYSf79C+b+bz4tJmS8lJV3D4988bOv+PnP3/EwK+/nyptSKSFzRpjHCcYbJB9I+YQMg+lKmo1B9U1QepQn4hXhBlQMz/gGWRttvSDrmdRmYrlHH99Q3n1Je/Ml+u4NrGfQRo2BdYxcUmSprnlolVIXyvqIetNNaULhav3fFHPpFXPzrW1lWY01TDFYP59ki6MJr51jaJBT3BZSJ4l8dJg3j/bItVPGPe1A31R2Czk214unXjbX0L4tNNcZdctzrr4THbgErK2FjR+edNsWT/30ii7x8dJCTzP42Kdflx51u3/RbtPzJyLSWdqBnM01FlXaWmip0DTRXLweUyQ2c5FNMRNIHlBkf38AAzwxBmJUWpk5PxZUX3I83bj1QGFdzlBtUx1S9uClbOtcbRWqM8Slt2GwcuPmETOhg3n/agHrJyR4MsG7vvh8MABgDFoHMr3yWLiWEtd27dnT789GCOh29XxpuIKT5+mT7bx4Cpro91x1q9jrhMU1oaMmdJawjT+wgBWuYva1FC7nhcvaqEU9pSPbWN/eS83ht4ttRXthsL1ft/bfmvftaCn1SaYdDCtYmxUXGdOrr+wLa//sYGuprWnBzA9VjSVvailC8T/Bgvfa3G1dYEiBGBuFhSbK8faOPE2MP/spj48XZ4WtMsdcyXdEgezOvQNCEbcU2FdzfvPxvQYob9+/5es3byjLgvFNWKvxAEstpGUh52QLRSnIOhNTos4zKZnwLcZIypn3794ZyzGOTNOB4+nENJkI8jLb5G/VOhqf7+/56uc/4+uvvuTh4Z5lWQgSKEnIqUJWlrVAWFCSOanHgIwj+eZAFkVjddfZC2V5pK0PUF4g2SKzfMq8+vwVr796ycPjl9TlnjavaLBNIAyZOB6QPKIx+5hdWPWBHAfGMfqsj0jIaBXmy0yLzcqNMWDRinI+XxjHRppGc+VlBUZiE+bSrNOqCDXoZtBjE8VnqbKFR41rfvopldd1KrsF5PlKsh/I24zDw7NdlNC9YNouXdQXqL6BqVofOq/YqT5pI96tueefEVtGRU0gqrbphBjcGThuVWEvXtzy+tUtN4fM3XFkmjKHaeLmdEvIB87NxIvJLfhpdg61NB7nmfvzzMNl5c37R97fP/D4fub+4ZHzsjKvK+cQWYfJqrwWJR0CIhMxq3c8Va8yy5bOaUJr5ney1ELKgZQDlGbVJvPK5XLm7Rc/gft70sM969svWN78lOXNz2kP7+DyQGTBSmCVVc1LiFotJdEN9FpPf4TNsK3nbLSuVvLqC2cpylos4rO2EplxGuxWtu4vIqwi5JQt/RmMcjaGxTdCfGN2qUXT6iyKLVt7lsS6jXvn7p22pLRuSta74O5y/KrUUjfPClVLyXZ2xMCDQrSS8hQhBag94vYeKKHbs9NIMYFayXj/zCDZBK8SaS1ccYq77za1e5nUuDAtlfl8hhpZYqGRKK1XQdj7BB0QRkKYEBLiGhcTzCsiBeFCiqu1LDg/wMsTh+OJGIQ3X/2ceT0jNaDeniPnAZkiw5iJMTBfLjzev+dy/0hZFwK9pYJQtCBE26Q8gqZawIViIAm2jXcTSPZ57nOvl+RKn8aYMLenwXqqRbuQ1v8TCZumw/pjGrBUv659tQg7d2i751dYoLjIs/sjIVfRvVohQmsNCV3EatOhiSWumhaWqsylmBdOkyvgWQtVmzMlNqasFcdV5BrADRPbblSyNV0UrOdSg97uZgPFT8GU9KHkuNdYxNCtCpKl64RAqyaDaGCeWe5ULSKUBqMkUgzm39TOCGeIheEwMJ1umCRyunvJ12++htjsnnXZVevrrzG4VxiPW+67XxXf/vjOAOW//+//e/6tf+vf4m/+zb/JH//xH/Nf/pf/Jf/8P//Pb79XVf7aX/tr/Ef/0X/Emzdv+Mf/8X+c//A//A/5+//+v397zldffcVf/st/mf/6v/6vCSHwO7/zO/x7/96/x83NzXc6l3leqKtF9hLY3PG6HmFdV2o1e/SqgAaSsxzgUZlXI8QYvcPoyOFw5PbmhuPxxOF0JKRsaYxWWecLD+/e8+7tWy7nCzTXKcS4qadjHhnGyRt5RR/gjaSZoU3My0r05HlthWU+c5kfWJcLw2i9aYYpc3p15O4Htxy/mljOibkoWtTNyUwMR8gba2Pz3+pFgy/yQWyT7akA2zi9GkRssJRq3YJDS96XxtJY6iWhlWvt+xYZeFXJDujT/2mT7Pnd6k96xpj4Q3INnZ8ylvsnffC6bVX78H3okUuDYPe3U+mqav1w6AuXb7RNUarRuc2uo0hkGkdev37JZ5+95gc/+IzXL+84HCaGaSTFyHyZ+eLLL3l/WTgejxyPo3mlrCu6FspSeDzP3D9cWC4ry1pZ1pk6n1nnR5bLwrKslDSAJGIS87YIySZosw7E6sxO87rMnvIodTXqtxQWj3rP50fO55n58sD926+Jj4/Et2+4//kXPH79JXV+ROtCFMU6LVX39QgUMcvx1sygSdwHYrsLnZYXcZDXBbxdrW/PrbVdNR1u5tVqdfMo00uUuiKrsXpDTuQYSFFYi5fseyWX6UHj5tPR0zWt9Y7lxQBLuzYRNH8SMxK7blBXMHJlWdwbyL9hz6tH12s1tX5DvUGh6Wp8NG5s3U5ouXFylobsEbF4yqi1rnvrFXK2QXegZKXn1XyWFqW407D59whDGokpkdPg899SCxITRPO7aGo+SFoXtDWW85l1nrl5+ZIUA5fzmcvjzPmyIBpIOUGw6R9j4ub2htPNDSkEailuo640XRECWdI2Jc3h1tN3u7mnXCuvOmjYp+VEodUrJSJ98/KNWELXgXhw4czqxpr09aivAS5Af7IeKVdg5EyMsTXeZLR/Cd0FOVzXsd5bTdXWXRUXXzvzlRRSNjakVv+OCqxeTCDGjjRfd01/1demrtC5rm0fbt+6sdLoVXvVz083SkWulFPfC3p6x12IhUiIigQzE4zRZAcxT4Q4EsNowFxXLpc3PDx8hWrheHvDi9evyccjh8MNv/Lnfp0vfvYzLvfvfR3YXTC/xp35VDDGVPupCR98xW84vjNAeXh44B/5R/4R/pV/5V/hX/wX/8UPfv9v/pv/Jv/+v//v85/8J/8Jf/7P/3n+jX/j3+Cf/qf/af6X/+V/YZomAP6lf+lf4o//+I/5b//b/5Z1XfmX/+V/mX/tX/vX+M/+s//sO51LzgYqyrKAYJMJX5hqoUqgFlMRl2bRUkfUm/FWso0rpWTMRc5M08Tjwz2Hw4HTzQ3DaOXKQZR1njk/PFJrswaBNy/IOaMhUJsyDCOHmxsOL14gORnR2Va0LjSt5DSQ00BZrcyrtspSFublwlIWDjTSODCFiWM58eL1C+5e3fD2S2VpC8EdFWziR4tiGlahErzUMAq4cLd3XI5RvIeDd3fu48QHV1PTp0iwzrStrrRm6R6C7hZwfC7IZsJ0ffwZyNgmz/74KPrYicR277k99ROoZVvNZHv+k0/zDYOmhGQLXKdEW2DLSzfUCSCFFnwRAlpjHCPHceTVq5cGTl6/4uWrW46HkTxOaEzmhfOzL2nxDSknpnEk0oyFKOb6WNZGW4p1E1WovfOorhQtEAIhjgz5SBxvkOFAjNmYKwTxXiiWOlKgkbxrd20W3ZZl5vFy5nI5c5nN/baWC21dCMvCej5Tzo+0ZabNM1KrMSNYTxGqlUdqCFZi680U1TfYXlLatC+ugPcwkt3Gr4IzUGFjMubLtaGfAWr1c3c9TcB0LSmSvT9WECFFF62ru9GqiThX71N1BRlPSxj3ugPdjSkTQF//3o+3vodVGuIBhM0Jc+KsbshWXX8TepdlB2zXsl0DHmZoaOwTaj/jVXdWDWPVTa0ZSGkeVKVUKGGl1QtLERqRhjeuDAOSLRrvBmCCvWdKmeBdycu6crlcDBAirOvC5XLhplVb40ZLJ90/Xmj1kWmaGKeB2grzunCkMuTE8fbA4zyhcbXxasWSJAme4bVzKb1RGFaNVMXSPNvGxH5Tbdfp6+OhM1bqqWNbW/a6MttwTZ/UK4zY7mFnTuoGAsRBkfo96ePC/95AULSAr9HfbBtHwe97c/bOmAx1qwLzy9IQ0BaQ0FhXT4UVvGTeBf679/Td+8oo7Zawzqw8fT7X77ljSejAT66vZ9Ob9Ctu5x+89F+aadmCWKfiGAdSHolxJMQBVfOB0nbm/f0XvHv3c4Zh4Ac/+hEvX/8QyQNpGvnRL/8SP/zDz/mj84zW1fVUXonVfMzv05m75fzjq/+nj+8MUH77t3+b3/7t3/7o71SVf/ff/Xf51//1f51/7p/75wD4T//T/5TPP/+c/+q/+q/4C3/hL/C//q//K//Nf/Pf8D/9T/8T/9g/9o8B8B/8B/8B/8w/88/wb//b/za//Mu//MH7zvPMPM/bz+/evQPgcDgxHQrr+ZE2L1QyZFPEt9JsE1CltEZpSitWHtujLvAOuH3xFfMJWcYL8/nCeRx5fHz0bscDg1fytFKZpiOH6cDt3QvG8YBt60LKI2kcCONgfgTrxZq0iYGpYRyZamEphdoWz/GvXOYz5/mRU6sccyIOgbXN3H12y+3rW9IhUaWamRHqfUsiiFkWe5U+XW2uWqnVxLWtLU8af7WmW968oa47Mboz4vSlOwQSKmb1bJR2XzN6VKI8xSXAFnF8u+M6bG0O2mwTdu8h8uFLnmCW3Q99onLdMBCP5Kt9F3vLQPT+JaX1FEZ3y71OdFETch7HgdNxYjpO5MNIGAfCYL1XtCllXphXS9UMKTL4bmopr2g6A4VWC7UULlq5oMy1UEQJ05Hx8JJ6vEMPt7R8gGB+Iq2Zz0XfPNXTCTFgnbXnM+ob0uP5zMPjA72nELUyBWGIPj6bEmpBlwtaCtQVpeAufwZI3GNlq9EXbKHrnYWbay60h6PG4jR3YrUS2N43qrEubqUdrmmzFs1bx4Sd1VgirZQaqdUiu+AmcF0LYhuyubHOc7G2E736pPtVBCFKt0X3zaA2L4W8MiidObGMQ6A3sNsDcTPl8g1DfN3on+dRYuvjrW9uKGlXxtndaWnVUv1iYtagrgkQ2+RrrbCuKLOnsDIhCqVAk2T3rqedPJiyIW+C3BAzQx4JQ0aiASKcSa6rUpaVdb6wLAvTNJHSwDQeeIyPLOuCBLHyewSVmfPlghwPTMcjp7sbNBbqGtC4Ui42RiLiaTyf8zQ3P4Swmdb0YKZzSd5Dhq6jApyR6uzmtgfvom4JAWIghbhN9X6LQ0heCdRYiulouh+I4ZO2ve+2UvS9s6dQ0Kt3CzamCUrTYqkJt89WB4Rd8CoqzmBZ93pjwApb2liC6fn6+JDeHXhbSOmcTS8RVrkyEDua5LpW9rQRHejvl0YTYAeJ7optfigSBgvYFUQSgUyIAyFMBlCC6ZCarjzOb/j63U+Y13tOpyN3Lz/jePsSUqYA080tv/pr/w8e39zz7s0XqFa6/lCvF/a6djhQFP8uH1omfvr4U9Wg/B//x//BT37yE37rt35re+zFixf8xm/8Br//+7/PX/gLf4Hf//3f5+XLlxs4Afit3/otQgj8j//j/8i/8C/8Cx+87+/93u/x1//6X//g8dPNkZuHwuP9Pe1yphRhGI7GMtSGiKH6ZS3GrFTdbMo3gd+G/C0KqilZ7rqszPOFeZ6JMTJNE9M4MuTMOI7cnG549fozXn32Q3IeqRIZhiOSBqoqVQuXckbvoQQzeEshkELmfL5sDISV1lXmdeEyzyzrzKRHhiFzPB25e/mSm7tbjrcnwhApc0NaYfSobdvc1VIxUbxXQyu0stDaQq0rpdhiHtxoYBOIdtrQZ2eUYCZh0l1xTWBsGo2dGKs9pSY/OGS3HHwjWNmvNrJbma5iyOdP3ZMm1xSNbJulTd4r0LDFupq5mW+2OSdyTjaPSvXuncKWEkqRHBK0ypACp8PI8TAyjrZxlrZSV1scqUKjUIttRLUszK1ZGkcbaRgZUjbWqxg4fdDGEhMXEus4EaYXyPEOxhtKGmkSadoIazEGji7stCgeKTRp1Hnm8f49j+/e2P2NkSElxsNEQKmXBiXY8958vfXUsTy3bBeyi1TNS2YXuQYTuKYUd4tpZyOaA4BKKb2ltBibWNuW4unC5T7nQqjeIC9ZisP7FjwRLQchaKR6OXjEgolWG+tcmFervmutbmOhu1pK6MZpbCSb4amnjSSv0bduQ1R9LIoYs9EZlLgrw+3nSbPz7J9tX99ZIoEmxvps7I529ih6vx57viqUZmMjSKGsKy0XRMzgrZQZDYnkgUmK3bLAekkFB9shm+W9SkGb9QDr51tLoSwL63xhHEckRtIwMA4DrRaWsqLnR/KYySEzrysHOTLenLgVS2vMlwcqC22daaV4qlmcjvD1lOal39G7lDeCGExvXMGkblG/rcHdKG3fc2eDDCKbpw2oaXCCfW8DMMHGqUKSlSLFmS1nUYKZk2mtW9VRB9CrMyPBx4v0cSRsGhLVZr5TvgapWgl/o7JWpRRr6llrY12LO/86c9OXIRxs77+Je8AIbHq66zjULUEoviztu63352w/91jOLq+J00P0SrGMhMECB3XfJ4moBraK4NCIoiyXe96++SO+fvdTmhRevP6MVz/4IWkcqGIi/1ECP/6lX+X9l1+znO+5XFYHkT7G9fp9+jXt3+rPnEH5puMnP/kJAJ9//vmTxz///PPtdz/5yU/40Y9+9PQkUuL169fbc54ff/Wv/lV+93d/d/v53bt3/Oqv/ioxDRwOBw6HI+X+HpTNDwAV6mptpEtpFO0D4brQyLaJslG0qsqyWjomFnONTBJYLheWaeJ4ODAOI+M4cjrdcDrdEoejm8CdsM7CwlwuMGdqMUq6IGhdaTTG4cg0WmMxFStxvMxnzucH5vlMbTdImshM3L14yctXn3E83ZLSSCurVU34RtVFZUjDZWk0LQSdKXWm1sVy0c0aATrjaYNHr6JA3DLbPBGKLbhRDNTFZur5BrVfs93qfxVH7R1lv+1Q3FMhPTK4/vT02A31PehxWnhzbwz/X/L+pEe2bcvrBX+zWoWZufve+xS3CAiCRwTxeC1SpEQi0QDR4SPQowUSUjRo8QFCfAJaNOnwHehAHwlI3ksyH6QCorjnFqfYhbub2VprltkYYy7zfe69ESek18gjTNrn7MLd3MzWXHOO8R//oqf8tv3ZbgclWK8HoxqFkbNyZpyErXXymHwj3sEYHPMYcA5SishPjNK9FfGjscHjjZAcY5LPHqDFRkqRHAtrTmzWsllPsgN1OsL0AMfXlOFE9CPReApGCp6SsLFCc+TW2FomlUwqGyVHao7E65XlemYcB47jyOF4ZBwG0rqwpI3t6ZHHn/2Mr//wf/D0i1/Q4sagDP/mgqyZKhyWl5+6sVaCxUK4eWZUUXC46nDNibRY5bV7sWPlV81VlF+dp6HFZKmNlopY61jpqoWQKj83uULOqGeNI3lHqF4yqGohlyafTxOSsLO3zd9UkcHuDshGVAa13DgBH6+99uL3v3q59e67W7hba4lRgzyNZjfp81p9P631AuLFMyva4owQaWvrmCf0MURrskflHKnNUpulNTlQ5Ns7CVED/JoUhDKqlhA3caveZGynaGZMEschye/gnZhH+mHAqxllzkXOLmNZt0yuhjDOvB5nqJYPybLZK84jo0v1daHzfoyhGsmOEiWjFCUije2EUZSXYXY0oh/+VccPpnRFVB9Fm936PZeKN47RBUFAjSjkGoA1OBfAZXIplCbWZP0+Lk2eZydiG6O+K1Xlsloovzgj9lXSC2eglUqO0FpmTY0UK6Wxy41raWKH09e8HDzC49kR3rb/PP1CoH38c1uTEVJrgMWYJqRtbveT1EFNt+Ru4mf2tdqUeO3sgCFQqtlRo747GtvwrtLqyuX6FefrV2AT96/ueP3JGw53dxhvSGkjRkOYj7x69YYf/YW/wNdf/ZSUL3R7ho9GTS/WvqXtHjnfXcPzPVHxjOO4Z+q8fAQfBII8HlnOM7UmMWnzg5hw7bJCcWCkyaHcuhGRlYsOtwNPWNtqppYSyUa89bvFNa1xOt3pDSkdjHcDYT4xjHdYP4B1+LyII2uTTXbbBuJ6JUVwLjGOM80YUsm6NhslJ5blwrLcMYwT1nnm6Y67uzeM0x1hmEmxYVSm1cm3rQk06jwKE1ZqidS6UvNCLZs4fWo5Lwe1ZKZg7U2SDdg+i22VZjLWVryBqpuhsVYg848e/Sar+yZ7+7vvAud9fNOZ3XXw13357eftDyNLvzPUb1/a9lFNqy+KGKRzyxVyEsdcbePwthcnDe8twVnmceAwj5I6bZo40BojXUjv5rTTSyazlZWaE61kXBMORa7wVA1xPNCGA21+wJw+wc4PtMM9NUxU63VDhrismGbwpWFqFpOxElm2lXVdiNsV2yqUzOAD8ziJ1L01SlyJH95x/sUvePzZn/Duj/6I9z/7KduHd5icCE5QpqLdLqaKmZnJHYQS/x4nBMxu8NfHZrZ/f85iTtXsfk+5IGaBuQlb5mPfiZ72qldJbQHonaUiLeJfJKMMbx2lVoYQdIP3eng5GXnVFyZaVlpIw8tOU4uTvkP2zlZb255z0hR+3k0bjcVZIdY7J4GZe9aWMeT2wklW5xziLeHkoK9ICOCLhWytlcydKmPovu6tbXinCgpj5NAnkasBEwhBDMF6Rhj09yBPUWnSBTtxQKYJegoyFk5ZD20db1vv8MFLYeMdtlUZPS5JRopuY1sipViOhyOnY+P6uBKXhHUF4yM5iWJKikDxCilZ8mKcNTRnkBSmpq8RhMja03W1kexdd5GohKbqsWosxjmaVQfrVrHBy+fnBjEL9B6KkFkr4EKTqIqcaK2p4kXG8pUkHDv1nimlUIlIIKoUy5TcP1JBcBTt6QVA5wzlXCT4dctkFbtJEyTFGKYb4CmKRdMcNS3e0HFIRz90JGfkI5L7TyMOejHz7cbtRkfvW6DRr+vFuRYDDZzxiqJIMVdbpbSMsRmMkr/LM9v2hoJ9lAABAABJREFUFszG3asTd6eZ6XSURriJgtBgJfAyNO5fv+GTzz7lev0gqF9ShEh5jy/VWf2FWt2Lv+vj/9IC5Yc//CEAX375JT/60Y/2v//yyy/563/9r+9f89VXX330fTln3r17t3//d324IBLh+XBkPp1Yrxesk3mkZI93xES+vpSCg5uk0InRjNEu42XOQ9MDreZCc2332thCkFn/9crlcsENF4ybCbPBh4EwHURhk2Uj8VbY9us6cjWeRQOmKoZqDMS4y8ZKLcS07t2Z9zPTcOQ4PTCPdwzDkeQrBq+EV+g5CGLLXCXzpSVqjdSy0lqi1rSz8EHWizNOjbQkq0gkllbPiiLwddGK1zZcheocrVhKMS8QlBedR+ugJLcZDPBtspt810uYcgc69bNXOWv71vO87DY6atJfgpGAum/HsstGrtLZ/rqM2U38cqnSCe9DISkoJRStqWxTR4FNQGfnBTottRBrpmCpWE00LaQS2eKCyQVXK6YApZGa4xImohsJh1f4u88xpzfU6Q7GmWQMxQn8YGvDlYZ14BvYUrGlUHMjlkgypvefDONEsGBr5fr4yGOO1BSJX3/N88+/4N1P/pCnn/2C+OE9rmTmMdAVN+Jj4GjI4Saze+XpWC04jLl1v9wOaKltpMu37ZYE7dT8S2r4W1IuQG1yv+ulYd+C+1zeygZacsJ7KS6TMeRaqFO7uT1jxXG3czxao7WsQXQ79NF/yIslKX/elRAvxlb78yi47pwleE/wHmcMWYt47xxDCLsbc0oZ3zzWDWLGOA5Yayml6bqQ57PWYpxIUU111Cqus/L+6y4hNr3jLpFSxfCumz26EAR90FbUGYPzRmoi2/ChUvJGTldK2fSgspQmXLwYI9u2ScNnDVhzUz/VRk5JDjQXWK4b65I5zJ5xOnE6PQjnKVpMaLA6UkxkdaCWFGfhRciksGlInYyxatP7KwgHZgiD7D8pUdNt/FZMBCuHuTFe1JklkdJGaJ6i4gCMw5lBwo/CiHOWlLIgR96Jp1QQ3mCKidiuUnyGQBgnbM20HGWUn3QErvtXxxgEvDDqiiyvv+d2rdvKdUmkfDNXuO1WOkoy6lBdC7fRuu6ADVppag6ooyWQgoGOjHTUqe33rBQYL5Hn25lVW8E0HfGZuqPm8hy3Ah2kOLFOUKqYVtL2llI+ME6W0/0d0zRhfKDUgm0G7y0eaVC3mPCj55PPP+XDhy95/+49xkkBJ7fRDQ1q9fYe0SLvuz7+Ly1Q/vJf/sv88Ic/5N/+23+7FyRPT0/8+3//7/kn/+SfAPC3/tbf4sOHD/zH//gf+Rt/428A8O/+3b+j1srf/Jt/88/183rxOYwDh8NROiPnbjRC/ZBarXrD3IhJwMfFSLsZ69Ryk9S2BqZI5kkphRgj5/OZ4f17XJhobiDMR8ZatItx2BAIBg4HKMPMMIyEYZBZ6CJFVbuI4si4QK0JaYQTOa9Kbs1MxoAdGKcTh/mBcTgQfUTCDDWHx3mZbhhLbULKNS1i6karG7SI4I16w2m/IPP6Fx8ihlbERh2skPZaB0gLzRSR+fmgn2cvIvr23h/9MzYf/S0ffdXLyv9jKNCYb3Xcep32r98Lk/ZLX9OVH7XDtIg0E3szW+pwbVW2ubxPnTEb2V6NOrLSGiF48YSIG+fLmUri7nTE2EKKkdQaG4ZUGzFGUc5UhcqVMFpbJVeI1hOHI+XwCnd8Qzt+Qp0eqGGmOS+FMDcoeZxmXAGHIGUlZ2xyNCNz/ckDpTA4S4kry+XC0+M7Hj98IF8v5Ldvufz8pzz94qeU8xlTEiE4vLfCIdBPvuivSlNpr9VisyMbUqh3yXAH2UyDHhZmtONzzimiLa6b1lt8COoBUhUxlPVlVfrekMNT4AyjPBjIregrFEVRqwUfBvw4SuHgPMY4GXXVDC+dgHudzEu1x8fLcP8r07kB9aNlJYW8eLP0zda57nB7G2mWKqOs0Ve89xwOB+b5SAiB2gzP14XLJUnnWQuuFZwLmhWmvBknVvfd76LWrN43Dhvk/Xr95ZzfxyfONDBFDhubsFRKOnM9v6OWFW+1CGyGlApbymzbtvt19NFdKWpN0CBtGWMil/PC+XnhdEwE5zneH0jlyuUSMf7ANB/YYmE5Z1grpvUkeEGyaku4aWScZsCSYqVhGYcjQxg5Hu8Y/EhKie26ssWNuK24JvL30hq5aYq6SbTmWVPEtcYhOIwVWb71AwlBCTKV5geMkRBVPw0STGlXKYJKhnEiHE8MxhBKxFyvrNczOW1YCtbJ6Lwnx1sjZO6mKIILBhcKOAuukVLWUYrcvbJCiqIyjtYqKUtMiLGe0hq16qgOBW51/955Ugp/7Fy6j1fsLz+avF5B3gylJElkr5lWko75DezxE1HI1EhBG9OZHN/iXMSPA2EIembZXeFoGyrOsKSaCN5w/+YVD68fuFyeaVRyFsuPlzyZtjey7Hvbd338uQuU8/nMH/zBH+x//sM//EP+83/+z7x584bf/M3f5J/+03/KP//n/5zf+Z3f2WXGP/7xj3evlL/21/4af//v/33+0T/6R/zLf/kvSSnxe7/3e/yDf/APfqWC5097SP6OQIXDONKMdMRY1Z43SSFOMQl7Gehud/1A21GFfeRzI/Oh8sluxp1zYVkWrUStcBXCyHS853D/RqVoooQJIeCsoYQs83gLxTZaMPhtoDrpXux6YVsXlS8mce0skZgjuVYcgXE6cjzdM4xHjL2KR0YYsNbTZX4CnFZqTlBWTLuS00rJG7VEqPk2EjFGoUiBs/tc2rlOVpfKXdIoDYUbAdJ6L2qYlDHGYYwSFTtk+WuLkl/1535E9hLlT6msP0JSvv18ZkdVaspyU5Qi0t0uEe03vUK1xtndBXN/b/b2554k68cRNwRyqzxfL2Ark/ekWqk1kQzkJr4dKSVAJJiyYTlyNcTmSSFQpxP+9Q+wD5/gT5/AfE8NB4pzWlCr5XgTBMsZ2VytU6xWid+ThzANVNvIcWO9nLk8vmc5P3N+95bL+3ewrpS370gf3uNyYp5HTPHQIjEnyDoyopKbHFBkGYf2BFdACoPW9nA8qUxu2+ZNGQOtyWFHybtKxnmPU0PEUozAxaoG6xyhWttHFYRBiI27QgcN7muVnBsDDu8GfPA448jWEtOqYwBVn7XbkrlZqf/yCnvZ6clqah/92/79irD2z0EKLCm+2ovCN5bMVOV7QgjgRLbdWFivWcJES8JYUSqVoo1CySh9USItdlgdhhAYQtiLExc81suBVom0ZqF5jEnUunF+/pqnD1/S0qIFSqNV4UhsayROkXma9uu3bRumNgbvKbkI2pEy2xa5Xi6sW8QdRnwQd+BUMqY1DocTp+PEMmSWpdKKVQK6uGc/nZ+xfuDhzQ85HO7EtoAALdCaE/fu6YC1lnXduFzOPJ+fgIhpmVwLqWbW9UrezrTmSClTixCNwzBRxQBL0KyUMUZtFmgUY8Tfw1kCFhdlTG/CiBlmQnDUzTPZAedHtuUsppmtECy6TsvOXzPGYoZBivhSObQT7nBgimIIWGIkxhUQJ+5WZW+sFLByP1hv8DYID6saGQP2ZOFaxJpAi311oPulrXAXBrTbn3dgBGmMGlniLWrGtogpen/IEBNjE84I2p7zhZQueJ+xweKcqm6MUUWQOirrCnVOEu2rLYTJc3d/x/H+RHz3SG7ri5JK0ch2+5uP77A/+/HnLlD+w3/4D/zdv/t39z938uo//If/kH/1r/4V/+yf/TMulwv/+B//Yz58+MDf/tt/m3/zb/7N7oEC8K//9b/m937v9/h7f+/v7UZt/+Jf/Is/70uRi1gMzg84P2CycERSETlg30iq/j0aA9+M0dGOkuhaf74byfJl0eqUoFZzEYfYZZG8nunIdHji+PDE3XLhmDaG1uXLKrVsVhxbXaO5RnUGPw6YwWGDxTzL5ldqotRCyitbWolpoypBc5onjvd3HE53LOcrRueJNEk/9tYLQaxBbo2qCp6cF/HBKGJ/L0Qt1DQLaEWs2HVJVeWYlCKs7Pqi+u2VdO2Ai6orbiXeDR1BvuX2+DUzxxsg2mfx6rK5u7yZF1/74um+/UQdJdslst8qOvvYR3kiPmhOjm48tbWdLv8yv8I6RxgGqjFiW6/chW2Nom5CHEZtrZATthS8E9vxUgzFeNZqWMOEOb3GnF7hXn2CO72C6UQNIv3Lch6RU2ZLi3RbueCaw2The9QWqdtGTRGXEy1n8rpwuTzx4d1brudn6rayPj3hU8KXzLqtmBTxVDEo9F5ym2rWoXlT8qLExJMqtEwxBedlTNPHMS+Vb/3zEci6W+97Ssm8VMp0GCOnpMUu+u9VjbKsoHTGKOGTm4liLzpr3dGQnLNwyVyUUYqzBCeddGuZVjKVrsrQNWOg9XlI28Wb8GJt7zlMFZFE9noXdJxidkRW1oplOhxkbdTK5XrlfDmLxPV6pVZDjInjaWOYRgkqHRylVEgGURcWjPH44OhBlS/RzF5gOQfeGYKXokws8aU4q1WycKytOFsxROJy5cM3P+P6+BWUFWuVF1Gla6dZLDeUa1tXUoyMQZx+1+WKs45xHGgls20L67IwT8L98OMo/KKUKbkwTpa7uxPTbDAEwjByOByJaWP54guer5G75Pjs9DmvHj6jtZE1Qcrg/cg4HZjmmVOrzJczw/lRlId5lTDTmjimK5fze96/+7lkwNTMFjNDKWIlIz7RWCOmakXVVy54SjNa1DXcMFKNJYwzYZyE41Qtg29CrrWdn5EwQZRrhkbOibhFWjNkRY2r9Ux3I69PJ4wJxBQ5Pz2xrguWKunXJXG9nlmXlUHMgRjmmfFwojbD+XxlOS+UKFw1OZOcWj30QuQl9eC2C0pRooihNmlGiw+w1CJxBnJWOIy3mJq0hSxYozyfslHLgrMJTNExuRQk3lpR74kkjdocCWi2YH3DlEithTAGxmnEuIb1Qq9o6pDb937J4zIvb+fv9PhzFyh/5+/8nW/N+D9+GGP4/d//fX7/93//137Nmzdv/tymbL/q4b0nV7modgjUzWCcoyTRpLdS986ntobTjvrlQdR/7URRhVplUK0zdzSdtArUnFJiW1dR3qxXrpcL5+dnpuMzbpiQ75qkC3ee4A1Fi5NkGy5Z3GDEll8Ri2290IzIV7e8spVILBGnybaH04nD6cjzPFEStCZFyuBGUuk6dFl+tWYxhuvQXpEwuA63tXob94DREYi8Zxl1dW8YVQjRO021l1a7c2rHzsVH4qPHn7JG/sxHq9I99EPqxYhHRjV9BPGtMY9u8AKlCrnROM16UajfeQlac87vBU1sklurE+Z99mucwwTPcJgI8wBOOrg1w3H0WCPqEIcctsbLZ1MrpNxoIdDGA+70BvPqMzg+4O8esNNMCQPJeAriuCpFaqbEzHq9sC0reUuQCjUl0rZQ1gWbNmyO1LiyXJ94Pj9yvT6LIquBTYU5BGyrpJoJreG9WNOX1oilkEvG1wpFC4ZahUSgSpdmZR0ZI+7JZR959uLtBXyLzHr2cUcpt220K9TWKoTxbgBXC91V1TSnplZNCxdFMcuvQuPkZ2RFApy1uDHIId8GcY3OebcTby/W4b7Bt7181XFQ/5IOuUuWEc1ofXTzPrGKigzDwPF04nR/T2uVd+/f84tf/ILn85ktRlLMrIvsD9Nh4nQ64cLANA8EbzRfR7BJZ2XM02Wy4F6gPzeUtzUlLXtFUJzbP0NnCqYlamk8P37N11/+hPX6hGtFYH/tYJ0LMiJzEjKacyZuGxZDcI60Sbr6OIy0UkhJUI3L9ZmDRjtMo4yvllIFvWtgzUAzgeOdZFON44zdHPPdPe/P3/D4tPDJp43p+ArrTrjNEItwl8ww0oYRax2jP3I331OKqA5zimzblVYWwnQg5cj56Z2g4jkTY8L4hvEGjJM1Y4TLVxEkKpVCjYIEltZwPjBMB8I4YoxltJLfVbyn0UglQ9mwozQnxgApUd1GTsI3KqmSq2UcJo53bzie7ljXFeMC47rgrWEeA60kHt9/4PnpUce+lvnunofXb8AGhg+PPPon0rKRl5WaElWFGArS04uS3YH3122rLzq42gotNYTraGlGzA6NqCjANKzJ2FypbaO1hPOV3DKpSOZSqxnbCoObwDnWrQiVuBayzYRWwWzEuAjZ1nU0cRGSM0ilU4S7J82tjpF/zVv4VY/vhYrn1z2cd/hq2TaR3hoj0sxuc1+TfMjdJnm3O0en6zpTf8l56LbANKPf53ZL5FoKJUsnt22b2ImvV+K2sKzPnM8fsF46yXq8ZxpntaB22BYIFkYK0YizaaVIB9qEARDTJrBmjlQd9zRmjLdMh1m8LZyn5QqaTlmb7j9VZH+1JvE/KRslJ2pJtJbp4eatY4HmtjF3B1qz80YMteeIWOHKtAI3DLqreWSc0uWOe3XwHR+//KV68LX+e3ktO9Su/ygkSaMkw9tj7+y1KLUdMUO7UWPVuEhks61WsstQFLKUVlNSbr3BhMB0f8+rH/2Y6dU9DI6tiutpyBXbEq1VinHKYXGspZJjo5kB4yb8w6e4Nz/EPHxGmQ6YcYIgVtNFr51TE6haDfM0Qi3YBksuXM4Xzu/ec3n3jnw540rEpIW0PLOtF9Z4odSEtYbReSYbyNGSLwvL8xMxrgxOiqBiGjGKY2xXtbUOv/YAL6Ok4D5Lb5VW8/4hGnMrZHf/BZVo7hCzXouGjhBLxlZRZIC8Fuf8R+OVj1hJxoCpKgO/rQOBvcUuP6aEi44hOAYjRPTs5WATVLnuo0qxrXf7CpPRXtsLkLojF1qYG7v/3W7qBjuidnd/z6effcqr16+ptTJOk/CerOXp8YmWGilHrtdCygu0wuFeSO7TONPawLo2Shbya20N15wazOl7VpjfdUkxXZJrd88VazupuWFMZlsXvv7yJ7x/+3NyvAJZ6dsiye1rvzbhS3Un3mEcsM5wuV6IcdsDH02SkUVKkVwqDY/zE0OYiHYhx5U1nVWSDIWCn4IEWzY4no6c7jZSTLx7/55Xb86c7iaakZTcWiGWRl4zLoDzE9M4aYOVyTnh45WcLsKNsyMpw7ZsDMGyris4hx8bGCcRFcjaLa1KErne+7kW4haZplkiKoaBWpua+FnsYDEl0YxEMHjjwAdSzqSm8mlTJXagQqlG8ndw+GHGN4sLF2osuOCkAKqZaRb0JaYNSYCWptWGgel4lOC+Q6ZukfX5zHI+k1O87YcfTXg+PtpfBiX2guZlA1pKgrRh3VWiCLpDs2svTDkTrQpvrllRedmWyWmjpBHqDFbzpVS96Yw0waUsaouRZYwWpNhru4uyNC9y06mSVpvA7/r4Xhco0zhSS2GrlZoyeUuUFEX7ngt1izRn8UEgS+scSbtmQS6Mzq1hHCYhQFmRNrYqSZetidG0WmjgnRd3xyLujCVtxHQhrk8sl5FxdDhb8UHY9c4cGMK4Ezcdnmk4EqNwDEKYGKfIFqPmqFhSLmxplbFPSwQ3cDzecTre462nWWGEl9ZINSlsVqAmMAmszKIhKQFXyHSSISG7mqRZ3jpig8xYdwO7ol2d86I2QiSFmEqjYPQQp6qDoGgIv3WFPsI9fsW/v3js7G5z+5b9Odr+P6MFljUCmd6Iz5ZcdDZrLBUhW7ZaMU5Iwd5LyqsUhI5iDNU5jPdS9Tf17rCWZjzVDQyffMb8o9/APExc0oovhS1Xrpcrh7YxOov1DUmAliKvNI873GPu3kiB8slnmNMrohvYrKFqEJ6MD9UNwliClcjzaQps68xlGsFalvNzb3xopVJSoqRETSsubdgmhWI1ls044ppY3j+SrhdMKQQcKUYJtytgGKRA84Ic2uIpdRP1js3UvO7mTVKZlh0pEwKrjkV7S1eNmnMpcdTcitX9qjezB7F1HnUxhU4srKXuJFshcIq5mEjJXhS/un5rK6ScSNkTqid4J51/y6Q1y3KqneTYoK93+/G4pnEz47JWgugadU9hDs7hrfBIvA0MfiCEcXdznaaJSmPLiZgjcd3YyqqjLEOKlcv1THOC/I2nmVbBD4ZmlIxuBSWVukkkolVl0dYGgp0Ibia4CW8HGWVYhLTejCBIJnN+/IpvvvoTLue3tHLFmSLeMUZI7n4QWTEOYl5JcRMFkA2UGFmWi/DNbB/vynXZ83KaZESN04Hr+UxMF1LcuF6vFCxhfiLMM2NOGOMZBscnr+94fFxY12fefv0LUd26mZgaMYmUeBhmbBgYD0fGccYHgx08xoHzR7alUI0jZnUEV4+r7GR/x1iJg7Cas1YF2SytUuICRiILUk54RdCbqvBKHwlbQ2mOXCHFjKlVxj7GYKy8lrolaJbgA6aknULQjPgJVRcoGJZcsCkxWPbQ0eADtUKOhet1wx+snE2HiTmMuNZYHp9495Xh/PioyktxsYa+X0hH8/KIN70C681A7UWEw9RKyxsmAiZjbBDOTjU42/10hDeVo3h0ee/BQ06RJV4xMTBadaw1BloRXmYr1JiI60qOm/L3elOhWW8t674hbtrWNqwq/r5rI/u9LlC8C8yjp+bG5elZoLeUBSFJQgR0w4G7uxPH05GUM8u26Sz7lh0iSZOeeZ7lAjWR23kMLminV0VLT4MQrEpNZQaf08q6XnBnj3OGWjLGOzHuKh5fvW48ImGttYIRWaIPI2GYGadIKkIGLKVRYiLnjVIy3gW8GxiHCWctuSVyjeJYW62OPAoiL95oTYixpWzqKKnjHdMQC3y1NrIyvLLKAehpqTmJg9YQBN6jgXEVciZV0c23vY7Q3tcYRWA+7obla/rf/ymPl/9ovl2kgHkR627g5mj7rZ9E5xDB3iGZ1mjW7F17yZnSzM4vMM7L2Ve6hE+kt+N8z/yD38R+8iPyMZCXZ9y2ktrGYBMTjWrb7oFQa6LZCRMm6nQknF7jX73Gn06UaUDPdorYiyKDKymQm7FS8DWwxhEYmKzh1KBskYP1rI8fyMuFvAyk0TNcDHmxUGUWXBSyL9eVuG16wDl6Jon4hShx2EvIJK1iS8DZIF4rZF2fopq5SR1vaIRUuHqdepGiuSPdkrhbxgtaIT4ZfcRo2g2ubi8l6K13gzeeiFTfcl074l308OxIZggea4RDEEKg5UxrSoxvHS0R1EjsuNk/e7Gzv73H7obpaHhj95GOEGSlsClFuvFSCt57Tscj8fUr1nVhu668S4W0bbRSqbYR44a5ag5UAdxErYHKjB9GwFFiL8LlILDGY53HDRPTfM803TGP90zjCTsEmqui4AN8gLRd+ObtT3l6/BLLyhCkebAOxskzzQE3OCHtIiit8NyyNHlxJemhm4uMMqwT9KjmQsmyfw3TTKsnzk+P1CqI3LJGmjEsi/BVjLGEMBLCyGGy5OSotbCt7zk/GTADyxa5rpFUIAwzYThwunvN4e6e+TAxjh7vAi044iYcshwT2xbJKRFjQ9RLGp7pPdUk4VppId9KImUhTZda2XLCGMO6XPCD5KE5L9yggqiGUiqUNRGtYRwnxnnEBx2V+My6XSUHKiX8IOtU9mEpnmIRJ2+3VfAy0mxG1uowTEiiQcK1iTAMWD+IQ3kzOGNYlgvrciYljebQ26yjgS8Lk6po7m0Ars6+iv43KqYpb8uowlHvOGc91omvkC0SWptTFc6kl9y3Fi0hz0xG7A5KyzjTGIKg53FZWK5XSs0EZxicxdqusVQVoGky5pFoa00+/p8EQfFeEI+clWeiOSdyyDRMGDidTrx584ZxGlm3FQysqwZzIRui955pHjgeZ5UUQrIWiljDy4ZY9ufdH0b8NM7nM8YHYhYYqzYDYcQOA17NpaTyV5BACYsuBBoTQy0MRUhfNW6ULHLmuEXaLKyiaRLymbMCQzurQYFFOSct0soq5LK0EtMiIXHWUlI/PPUAx+GCHFDOeknsbGLXLC99JAyjsOSBLWVailRrRarGjiXSmlW+ZD+8XiIlHy/EPwND+eWv7SdS09/Xfph3aXKPKVAp8c5VURdP5UVQhDGfTaG2TK6KAnmRqdrcICZaTko8g2YDp89/zOlHv0V79WPiaCh2pPAE9YqvlVQtcbsSt1U29uZwx5Hp/oHhk8+p9w+UcSIbGTmYweMQv4NCV1H1w1xkmgKVCLHaucbxdMf4I0e9e8N2fmZbnsnXJ9bHd6wfviFfnijblZwj23WhpWdSTTQkmTvQsLngqlwvPzgchiGMgkC1SlC1WiuJmK/SAdVITSvsHCU0Abr98kVs+h8tLG7jn456/JoNqb0gA/aDWUexwzBQi3DJpO22FEkUFHMu/b5YMltKOC9+LS4EhlKIsfAy16XqoSW37YuRrnKTXtJnjcJVDdlXRD3jwBZqK8S4ssVRGh1gGEfu7u7Yto24rGzLxqWIos8YMbbalpVaGuuSsG7CmAljE3cPoxRYk4QPxk1HS/pr9AOH+zvmw4n5eGScjvhxkPvBNlrL1LLy9Piet1/9grheCF58TFoJeFc5HCcOpxnnGnE7U2KhFsluenx8lGJWugwRkuRKToUwVAyO1kRdJt5RlnEcmaYJay0pJXLJWOfleqVIyYHgxROllpXgMs02anliXaXAvZwXHp+eWVZRNE3zifjqU/L6Cel4x3Q8Mc8njIOyXUjLheV6Zr1eyMsFVz05OXwYIGbcGDQ5XpAo0yopbqQtibqyNdacyKXw+umVoO/NAk4M91oTC/+4UqPIsOu2UZ0oQskFVxItCaevtUqKkev1yrRcyA2Cl2iQvG3EYsSVuhXlZmU1hquQIjlujENgGuSzqjFBzUJQdY5akhK2VcLf+lbYG5sbQX0Pq7Sm05/pjY9I0SXMEKPkdGcZhoFh8BgLPghyvi5PmmheCIPFBCfjvRwxNuCt3CO5bKzXJy7XZ3JeEcK32dEijeYWz6JaSX1E324sge/6+F4XKM5ZmnW7i2f3tSgqcex29Hd3DxjbWONGd7GsTSBfayVnZ55nsX42Lz0alImck0KxCDzYBMLKrbBsK+0snXguKkWz4i6KtxgrxFfjByFQGrX1Dh7XBuU+FMacxUOjNS065AZorWKdwfvA4XBgCIGlXmhkcILi1JKgiTFbyRs1bZQsVuvWZLpaybqeJyGz9OClO/TDQMmVlIpsimFQN9BAzBmSGJCJokNQlmrqbkey8xhuWzzfLlL+7OJknxm8+DbzrcNNn8W8fC79Gu28hTrT5IDpqc1VuvtsC7YK3Nuw2NJwTiv8UjGtYG2QDck75k8+Y/z0R9TTZ2TfqAQGBpo706znujwSa2TN4k1jwsh0uIe715TTK9rpgXY4knygmgaU3WDPArmK3Xcnj5lO9GmNXDOVxhA888MD9lDY7g5cLwfiZWIYHfPoqNeZ7fxIvF4JxkGqtC3jamMIDl8bjY2gBDfrwTbD4EewnmYMo/cEaylxw0dPbYUSDVsWSJtOSN7HcP2jf4F+6Lbz0k9Iio4u9Tb7pezopVUZ5S5fRjhPYq1vBemKUSTj3OBspy6kVo0WS7+fnUTK4x2mOB3VyoG402bb7TVK7SQp5f3ndyOsUivGZk0XDuo7JMhXeSE1TilhrDQ5x9OJu/s7pukdy+VCzere3FTJVCrJJLARzIpzkeADh09H7h4emA5Hcqwsl8TzdaM2x939PdM0yf0YnPjYDCPOy15S88J12/jw/iuWywcsEWsLtjX8MDCNnmEaMcC6nDmfM9MYmKeRWivX5cq2rExh3LlD3UZfEpL9LjJIJROTDBjGacQruuzUNn8InpoTcdtwxtBKFhS3FeHJYLXJcFgWSnrH+cMHUioM45F4+Zp4+YzD3RuOD58QTyes92zrlaf3X/L89A3b8oytiVYb27KJDN5ZBkZs1lFja1Agp40Uo2RC6Riu5MzT+7cMQ2AYZpp1tK7abKL4TFvE06gpkhcZkZlayetKSauo7XLGuoGSIyVF4TOlhGvCuUol09Q8sRZxvKw1Y3yQojInnGnMU6CVyvPzI0/v3nE9P0PNe2ECEnDbow564QFGaIi6ZuUvb5rKvh1b5zCuicLGNXBWQitlMobzFm9GUir4sLIsC6lEphIwRhSE1jpOdw/in1QK23JhWZ6pNUqKsfJOfLCEMSh6KEh1txCQHqQr9L57ifK9LlCwqJW8blpdjVPFeCuMI2EcqEDaItfrQoxx79gaUsQ8PDxwPJ4wCJEyRrGIttaRiyAyRccEzUFOMpuPKcJqpSO3lqzdcMqVNSWqdmG5FqbTHS4M4r+BzH29nvChVcaiXhNFTX10I9xnit5yOh2Y5okPqOeGStda26h1pZSVkpd9tENTS39jZQZuLc4PDIMWZNoJuRCU1a85EuqRkkshJdmkc0rknHE+YK0T6W0pkMqtU75RUl9cpBdw/Z/j8RHHQTf57sH+kSvsi+dtrYmLMKh9ec8KESKcKYK6dHWXkIzL7YcZS7WNZhvDceb42WfMn3yOuftUxmNmBDthw5FiRrZcWV0meifhg4cD9f5T0uk1Zb6H6Y42HYjOwCDqEI+MD1rr9W730JDuSGy+ZXMagsdjxVfFObKpYsCXPWYaCHki1xW7qquwEt+G4HHHo3BbchYEyQjMDg1TLS4MYAMYxzCMDM5T7ArWkGsmtsa2LKgX/stP+XaJO5ooEhN9E98a8O0E5x4Xd5uXG3eTSvZH56DkrNfSysz8JbriFflywYvopTWKpuv262h9wOakxYsSYV8UTk3/LFJ0gStuhFkdC6WMc5lxkKahk377vZnVuNE6eZ3jNIna7nDg/PSEhMxpcZRfPG8WS4EseBx39wc++/wv8Vd+53e5P77mw4crX375njVW5sMDlRFjZqZ5YD4OTIcJ5z2tiZLl8rSwnt9i28IYxAup1SiHIpZ1u/L0vFFqIQTPNLxhmsZdcSXESSkgvRMkyhrphg03onDKGW9hUNv93oxN08gwjnhn2daFbdso6cAwBnywWFfBiAfJtj0RwkAIjuOh8fT+wuPzey7PjvX8FdvyntPlU+L2RNoeGOeJuG28f/cFTx++Im1nDpPFmkqMGzRBQott0jTp9TVVeBRx2xS5gFQytWaeH9/ineV490CYD7hpwFnJ56pVxArWVNxqZXRaBF3NaWNdFmLOpFqxvrAuF+JylaTvHCELylLIsrfkTEpRVUQ6Em+FcZ44jgOHceD58Ymn92959/XX1JQwCPekBxvuk1S66qzq2r31caKkq7f9xKhTuhcHWOktmiI0ViAZ0+kNjnEYON3fCRcnbRiMGE8+ZkGknOUwz5QcuV6fRHWq9hUilID5OHMfhR+3amRAf60o0bzquPe7Pr7XBUotRcz8lJ0spC45cPw0MR8ODMNIypnrurFum8jIjBzWYwjcP9zz6aefcjqdWNdNRkQWXFElEOBDwNVCVbth06ymXTY5vLVD72FRuRRSTVLh9sXjLYNp+HHEKBEXpHquvuKHik8TIXdFkt3n3aVIPso0TfjBq6dJodVN1KF5odWNViOSfJvxztIItJaUdyAd5zDM4nR5PDEdZ6Z5Fs8LVUDFLXI9i43/smys68a6StdQGwyTx7hGNUhwYan02ZVRTsOu6AE+LlhubJQ/tVzph0mfGPVH5ydoJ6tsX7me9O7PqFdLRZPAbq8jy5hO45ppXmF97wVpyeIVg7OM9yfuPnvD9PBAng+0psFjxlHdhDGBiqPaA8RNxjenO+zDJ7S7T2jHV7TxSHGyeVrXszek2DJFns8hN+1+aKrRjHOWwXkx60JC8ewQ8IcRnyfaNhDPyDVaFsq2kdYNcsFbiw3CHaoVyWjywsWptWKbxfsR4wLWB+ZxJlhPcSPWa9FWC9ZfKDnKk5iXhehHF+t2rX/VRdXR3w1l2+/efkl5OR6Cm7JHYGshqMqy0LwoK5uudZaikudUCi2Bs00RVStFk5WiuyBjSJoUrzQ0y0pGbvva6sWSKmpqbTta0uxNTr1tG9u64rzHB08YBylQDieOxwPjdJP/g1xDcbjVAlSLpm2pnB+/Ybs+chgdv/3bv4Vl5HxJbEk4Zu8er8RksW4ijAfCOIC1lFw4PxXi9p4cP+DsQisLKV9I8cJShTxdWqaVwjB6PvnkE8Yx4JwgaqVkconUqqjJi9r/llUmY+ScEptpEvPQxBUXA+Mw4kOg1sK6rCK1XhfuXz3g/BHnPSkJ/y/GJGnwpyOHo+f+1czT0zuenx7Zrs+kuHC9PnK5vOX18hkHHZ19eP8zLtdvqHWhFEeM4tdUm4XsqC1KtpjuH6YZco6ComQlketY+vr8iDOSIxXiwpCFnFv2z6uSSiGWQkmNdbmStitNw2NzLZQmZm3r9ZnreWIcJ2pcKXGjZjHGFC8qcXStteKtKAbtMDAGx8PpgLWGXzx94MO7t6zLhcG5XanVnNx6/Xr0wuOl5P8GMO+boFxDB2EQSTrW0CiUKk/SoysavDBDNAxh4HS8o9UJayogUR45b2zrmcFDShvbdiGXDWfkHKpGYlKmeeKhQa0F7xzb8zN53WR4qty13ZPoOz6+1wVKKQ1pXty+cQC62ciHsMZIiyvLeiGXJFJSRI1zOMw83N9zf3/P8XhkmCfiuuJjIOeCXRYGM0m6sQFiVC7gbaMttUpwWM40tv3vcitYL1JXP3iGeQJn8IMX8pq1VH2d1ngJHPQj0W04m/fF10dS1jimeWScR5w3RDW/kgUmih1jimzQ1mHGQCuGuMlGYqpU1tZ5/DAyHQ+c7u+ZOjHYGrYlkrOMcq7XK+fzlW1dySnJFmtFfuisSE3lEDAyBjC788SvfPxy1NWf8dg72T7WeYma8KJIkf8L+mVoPSyv9AQv/Xm1QUvymmsnemrn70VmvFNaxoHDJ695/dlnjMdZlALKnDeACQMtjLgwMZ1eY5o4B4dpwk4H2nSgDgeqc6ISMAVMkM1Poqj3GAbxr5EfT2Yfe5jaqFoUiKmZrBk/TIRxooVRTODWje2aMLFiahNiobPElvUeKFgXhEGPIHwGyzTM4ALOj0zTkWCcFCjOkMpKzhvODxQXZJRT4fYB8fG16NfrowP+JVrRuUofo2u9qLRq0tYRilr7SEfGkq2JqqePiuiKGyfcmWLQDJbKNHjlHzmadeC8omlV+FNqJIdCz8LrVQRKFSDG2hfOmbdN3HshU9em2S4x4raNcRbCox8GhnEUS4BpYotp/96mRHYH4PrSFML684dv+KM/+K+cDgfmYeJHP/xL3B3veR1mUrW8ev3AdcmkamjWY0yltMi6XinlA9v6DbU8sa3vWM/fENcntrgoOmgw3nCcZw7zxDyKwd2yLlyXs1qiF2IUnw/FiTDWELxV5ZvEQOSSYCt442WMrp0zTQQEsUaWZeH5+UzcNokgOd1hTRAUq8peFPNKyp5pnHjz6SvO52diXLlcVs7n92xx4en5G1J65rS8ksyb8zeUdKaZSG1OlZSVlitNHa3N4GQCogSekrOo3WqW4qsZWs7E65VzE7Jq2I4cc4JT3k0euzHjVgttS6S4kWMkrtfd2kDUyZ7L+YkxDLSDjIZa3oRsniNxW2QfQqwpisnSJDppTExrXJ6feffN12zLFW8NIXj18ZGxs2xx5WPUYZ98v0ACXzSEnUIQBjHXq61RkBwvGftYCVXUMzPnrMh5Uzl+FZ7KODFTRPDRKtt6ZVme2baLqA9N5aUflbGNYZp4ePWKaZjYhoHlciVGkVrbYrDF7k3ad3l8rwuUpheoH429A5KDtPL49Mjz9YwxotGXSb9kzFgvF88Gj3GWrSSMs4RpEtlpa8zHAzEl7LLQLfZqa9iqKZzKOZECSbNGkvBVco07fGa9Ezb4GIibx/kBbwy2SfBYq5VaHMMwsW4bxokJlfcB6V4KfgiMh5npMBIGR0xm9znJdcXbhrWNYRrws8PbI7RMKieu1ytx29QFU9jbzg0MwyxMcmdxNrBeJWfo3fu3nJ+e2NYoip7SMN4zhFFY8hiabczzRLKGHKMkWSqS1A/zm8dFH/18e1W+HAm9/IUUPZoF1ORJ9ABp+5958bxNCb708NgmNyNWzlfTGqbUm22/N5jRE44T0/2J67YRP4iD7nj/hh/+pb/Mw+efEaaRNRcmJ7B2MRO5BIr1hHFkbFIg26AEPQwE9dtxMAyGqrLZiih4Gk2CCXKl1IZF4WVUHpnF1GkfjqSCa2IA15phGGaqGxjDzGG+Z7hr+DmxmpHzhyesL7SyUZNsPKVVSYk1AtU66zBuwI8zLkyEaWZ0AzUkbDCs6YJdroThQN4uNMV5aHqNMXzk9qvt3UvcrCOaMspS3Kw1geT16yryunzwss4we9FtrWUcB4yFbUVyiKxVmXgl5YhRj6NuHlhrYWuV4Kza9Ygc14yOsq2yNFVYYD3EFLHWaOCscMF6+pTtfkgqSy2t4fv7qqKFWJYr8+mgLsZSFIcQON2dON6duCwLNWYtMq1+TlU7atm7nA3E5Ykv/vi/k2Pk+nzmf/krv8MPfvgXOT28Jowz0+GekgvWBNKSyGSwhfPje37xsz/i5z/7rzx++ILr5SvS+khOCyUnue4u4PDMh5nXr18xz7O8V/VAGccAteoIV9KafXA4HQs4bwmD17GaIZfMtkk0QveMyjWT40pKhcv5TNoiwVnithHXxGAcNWXisrJsC4d50vC9hh8CP/qNHzOME7/42Zc8Pz6zXB8JeeDxveF6+cCWM0/Pj9S8iqlpKeTWx3JC3K05SVOoh27Lt8Rs74R8LmNrGQ01pECZc8TVhq2V4AcsgpgbazFhoGaxgMjbJoGpRRBWWqGkwnpprNOERT/DTVLMc0qSd6UGbcZCSeJXZFqlpcj6/Mi7t295fv8O0wrzPOKspcak4/ZMrYoiNi1W1Hm5aXHd9p1F8GlrwAYrKFmwQjMoPbakYqolICoequxFBidcEl+pMQkRNiVomWkelZ6TeLo+sq5XvK14bylJxljGyToopYARNew8TKRx4NlbrtdN7u0o+1vt3ijf4fG9LlBSyhjSXqV2W3PheVRNyYxyQlElVV4NZQ6HmXmesdawbistCdFN5FdO5soNwjAo41kDlIKY98hFl45YGnH3EWS2riu8f7t3gfPxgPNirz7NJzVgEhM4Zy3VeozJOCeFgMy5Vean6ZY2WI53B+bTTG2RoXiWDQkn9AbfKq5Gao54bxmDkL/WuLJuG9fLIlVyGCRU0SDJnj4gEeIyP4yryFRLTIpACNExDFqVW4thwFtDnibStrItCymulJx03qgx69/iGHx8jJkX//910N8L6FLdSEsuKvn++HlqraoY7SOHfiOYnesgUQYGgmc8zBxfP3C4P8H5mXQJNOsZX3/C3Wc/JhwfBMo1OwVUot+BPDiM9TTrwfp9eCWbesN7g/cokU3g5bKPLoSoWtRB1ci8UEYKxZA1/bgUfT8YMLIWtnWFtEoKc23M0wFvHSZukBs5VfL5Si4bNRZMqYr4KWyMSFiHYWI8nAijdO3BeMxYcBtc1yPjepGN2ge9h7gVff269b/7FZfs5Q4k7/Fb+JqxelgrWrKb1cnYwflRPH9o5BjVcVe6V++dKoqgKaer1aKeRg2DlzOkO2m2hnFexxYVb5W35py49xZBhkqt5FLx3olja4+r0BFTKQWn7024ZsLP6k6zNBiGgfl45HA8MQxPIhe1DYFNinhQGOEjyShXkIi0Xfjy539EyRvL8kRrid+af4cwwPPjQskifU8pseUrT89v+eM/+SP+5I//hJ/9yR9xff6aEs9QN0CQVGcc4zgyTBN3pyPTOOFVam+MUWXgQXNs0gtOgxhtOe8IIYgyQzNVeh5SJ/12hcq2RdZ1I6W8f1bCRUkUVUTWlCjbBuNArUUcaK1w67zzUCrBGh4/PBLXK+cmjVEuIp9vWfbxZlB+nyyzSiWDkHEVOafeUEpZt3J9aY1WDHmLlFTwDdYGrjX88cRAY/BOvZ8aJSVpwHQvlOeTSAfTDLQi6rks3js5ReGslKxruXzrHpHXs60L33z5Fe/evydtm/DRtAGoVVx+c07E1DC2EPy0h0S+HKHvgHK3iXDC0zJqVEmDaqpKjG98ohvyIlwjZx0mZwbvoWVyQhSPaSNviVoLUR1+mzU0taqwzquqsglK2pruz3UvYq0XUUZpmRaVFvAdH9/rAqW0IlbGCr8OwyC69lp15tWhpLofcs45RrXHri1zuVxY1w2s+VYxMkjX2aTwGSdhzg/DqCiJSERLraRYtKDpcLZ0eJflgnny+GkgTCOq9GRwgRYGDWVz7Em8aozmwwCtaqGli8/KTPHh1SseXt1hTBIyYQMTDM42fC3YWqhxpbUsm7B33FtDzJnnD09sMTKNIyaIzbp1AT+MrMvCsq5cLhe2GCV0r/NLrBzu1lrCOOybd/AeUxvburIEz3K1wodIef8sXsAd3LrtG8fg9s/fQlB26339Y1O5N6hxklE0rOnPkhvXCVavP079OmrbgwGbNZjgsfPEdHfi9PoePwTa5ZnmAvb4wN3nv8Hx0x9BOLKlBt7tmTWVSrXQrGcziLeK2ttbY/C24Z2RYljHXqVWOWD7GMyghmDyvvvwqJRKTFVJ2Ig0ucksujnYFDa2WWHnHOXQk5ZfNiQn/ICcJG9nsI5gxem2GGHkhyAS8mGYGacjh1k5KFVg6HGeSdtMGMWno+RFr8Gf3vncDgO55/qIklu6CKqJlK+vMqrY7e0xQh4tmdCC2rm/TKDWMEM8tklmicHRXkhhqepiaRULUfVDCAZn5RAIzkGT8MGyZW42+JX5cOSHP/whQwicz2fSut3MAE1VKaugn1UPzk4g74qXaTrw8PoNHx7PnK8rJWdZl73YtoIqWeXlyEFayPHC4+OXvPvmQE7/C598euT1mzek2NjWwnLd+PKrb/ij//Hf+OlP/5Cf/uwn/OIXX7Iuz5iWsE0OEIv4Y4zDwDRP0pA0yXqy1oqhoDG7fLrzbPo13Jli+z0s3XxrylNHxgK5ZEptlNLjMaSAKbngfc96ydRkaTmJQmZdqYeJkhIMktLsrMUWw6u7E6aI6eY368q2rhhrhdtXJXm3Ow+gcRx972gVCuoCjaFHFQiq2/bMrY6yt1owtZHWTcNPC6Qk+2+rwktrVZQ82wZRCjiMGggawUFNLcR1oZZECJ5as6h6ivgSlVoxigj2fayVynpduJwvPD8/C9qn0vmiasnu8tvXvasi2rh59sj7fjkmtdaIKic4ifQIA60ZrNdCsjUwdkfD5B5sqtiSxt1ZoBm2FtVBOJFyFHd0Cs5JOG2HIq2x8jloYxhjYluEn1XWlW25kksjZ20edq7Xr99HXj6+1wVKM0bDAOWN++AlDyCnm19J37yDwMND8ATnKDlzOZ9ZzIKzMmN1ITCMw16ohBCwxjL4gAueaZoYR7Hqrs3QFHaPUW5WMbYq5Jxom4wcaq1czmfc27fk1uRilcqrN4X5cCKMkywQzTIIQbqL1orA2WrBbpzMFU/3R+bTzLI45sPIdDgy3R0wpmFSpKVEXC5czhJcVVplng6c3BE/jsQtYhCCr58m3BCowLoJGXaLwjmRz6/JZuotQ5D47fkgYyFvpUChNU1XFW8XYzzbtlKTeC20XHjJO/jlYuQlCrK3A9yyePSblHHc0MgC6ylOkZTcuRHCFbEW8KN0G1UOoZYSYLAh4KcRfzzip5FqDdeYWGKBMBM++yGv/sJvMb36jNS8eAw5Q9aNtyMI1nuKgYgejBgG69QlVjps+fKmZk3iHNoU3qxoim/ts/RGjll4QCnJW9bpV3ENY8RcK60rg2mUtBK3hfT8SEwRV0R9kLKYWVGrZKxYL92pEkGdHxjCzDgKMXCaDszHO8liiRsxXvHDSJhm5uOBnI/QVtKalRtykzn+yk2mz1FeIGedaEm7FaQ9OVqKD8n/oApPxNJAU5VrSerzU/fRSrIVZzwtN0x1okbqn1cVD49iK85pVo2VAj74nswKJQn0n63TTdrycDrxV3/3d/nrf/3/xvnpif/6f/6ffP3VV4IslkzD4IunW+7TIK4by/XKMIx4L6MQ4XgdGY8HrCKuphemrR8rcnE7amGNGNLVknh8/MCH928JzvDjH33GPB25nDf+8L//Ef/vL/+E//Zf/jM//dkf8fz8nmW5oquJZipW4f55GDjMsxx8RkZkl+uVLSWmUhmDByTzqBYNFCwVp/dZbfLn0p1kqypAlAi/bZuqHWX0WrMUMKX2cbscyNK3CdGy1UKKcnh15EzGnBlnLa9fPeCtI26Ry+XC5XKlJHl9zva8MCF7y5xOfYiLcmF6nEeTBkAXlzYwt8UqOKI49lIzJVY2HT+GMGJDEI8QB1srmCKmhcagcQ1GrdyhtkpKIqVuVQpRKSz067UwbQ3dB+QgL0nVPUlQn6aj2FqLkHA7kdsY/VwzpliMIlmmE8G7hYQWKd4avHUMQdSaMlGQzykXNWlTeX5rvShVUzcjDUBWIuy6iJ2F8WL4V4vYUBiEl+S8xahbciuVuGycr1fJobpeaar+LLWKIXQxhG7S+B0f3+sCxVpHzZplaWV2GAYhZHmko2w67sGi1Z9wReImhNYektfh3HXpm4yMdKy1YpZ0ODDMo/4MzzAcCMOEc+JIuK4rOScxzIorbg3if2ItMQtS07Ba3HSvhcbcUOjO43yQZOass0fNRWnI4ja24QfPOAVRdVIYRsurTx4kBv1y5fr4SFnEkXYrGWvEC2M6TQyHmeV6Ja5S4VvnyAbytvLh8ZHHxw+cL2fVsYvltfeB+XhkPt1zur9nPh6kEEEWuuLUmhCt72MZyKsYzSU2OQ1ad+z81kXcfTVe/N9YbhKotvN/rBeIEyOYg7MO07JElSth1mAlEG2apIiohehXgVGNww8BNw4M04QfB1wYuC6Jwgj3J04/+ou8+gu/yfTwmmYl68hUQ6mCmImxl6NlQ7VN84/E06PWSsuG1ArZiMmfVXm39U74EM6oGszIDLhK9ELJhZSiICPrJuodJaBl28CJ8qPFVdZCThATeVnIl0VsrWslx0TeNkyp2CqRALbJJmSN2rWHieBHxvHANEuhYo2MwfwwYL1jmEbmuxOtrZi2cMmryOp7IdnRlF5T9o6svfhLbl9+K0V1AbTbpZdzpOiMXd5z0fl7TmnPFpEvqCq1FMI2Td5nT22lqYdJ6yM3S/NGeADOyohGHXL7oeWc+I381d/9Xf7m3/x/8Nu//dv89z/4A/74j/8Eax0pL3qN/b7Oul9KilHsvlOilUJzDucD0zRzOp2Y5wPbFnd+SzNyyCq3VFEJqZpKa1Aal/OVL37yU37yJz/hx7/xGzjn+PKrn/Jf/st/5P/4z/+Bn33xhzw/vienK96UvdO21jIoj2IMI4dpEoJ0kKDLWCq5RrYtMmgXfTlfSDHp+A/xfLESRGjUpK4jXF4blZKqHrCJTuKPMep4x2gg58A4jtptq9FbLdSmdgZVCw8nxmrWikstFa73Vx6fjnsR5L2MwW3fLxSJNEYPfe3m+6i/E64BdRKW9Wn0H1tTdM9CyoXqrIQ3ro1aMqFNhNnjGHHIfVhrpVlDbVlGG7puaymany3ZN0b/ziCBmK2hDScYb/Waqzoz9yRwef+lZGKKYoWhqIu18jlRMs06HZWKgVsHpOVXVWBSXksn1TvvsUH2Z/dCM9AbA5qhtEKLhdYiW7wSl4VtubCtV4ytnO5m7BC6DZiMD50V0nFtkCvrdeXxwyOP52fdg6K8ZkWPDQZbLb457P8sKp6ehmq0i5/GkdPxyBC8hlsBzoks1942a1O7CQ5AZ0rLBtHnfy4lYvRY64huI+fEmCaREh6PYgJ3umcIsxQeVUYiWxQU4rpeWbZVnRZlFnm5XMitkjLkUkm58qrCOM2EYQKkKh6GkVzYDehqq+SasepjYZ1wEnISdVIsicN0wA2e3CrXdRFJXxFr4jUtjFkq6jAM4rdSxWyIZWO7Xvnm3Vvevn/H9Xqh1YL1nsM8M80H7h4eON7d48cJ453wKap0vcbIfD8M4u1ifcD7keQ3vF+5WkuORv096n4A6RW8/X6HwLUAsVYUR34Q111VGvV5f6uVkqTwFBtlJSwbzzjMzKcH3BhIpWDDgtuk2xvnSSLj5xE3TFg/YseB8dMT9u6eh7/wl5g//ZQaPLGIAiZlkRzSIBgrZm81SyxMExMnNORxyUmk463hg2cYAj54xmkkjAEzSACcwKINqtW8k6qmZBstr9QUqdqRNWcxIezZKy0mTGqMzWMYiOVKiRnTLLZYQnWYmqUe1BGdweJckDwXNzLoiCeECe8GuY6t6Zg04Acpym09UNOB9fIk12oHvL5dWMql6D4igqPcnFx/5UOJf+3F8/XNu/Mk+ujBWpH2VwArh1JT2SQ6fxfHTNdrCKrpxl0Gh8ZgOOGCdKjZWgsWfvjDH/E3/sb/nf/1f/vfqLVxvix6gMoIJKdEcYYtRpU5e5q1FCMKvhQjOWd1iB6YDwdev/mED49PXC4XUtRMIaNKLZoohtTIznWeRjUsl8hPv/iS/+d/+t/Fb8MZ/r//7b/x//l//Rd+/sUXrMtC2q5YKs5I5ozw2SytCh8u+FE6XWP59JNPOL56xfN14flyJa2RtG1s28b5+UxKmTEMWCMFSlBemnfKyUNRJgzBe0ps5CQE+hgj2xpZVzErM+bGf/DDIBYMiKnlGtPeEJZc2WLBeUlSNsqvyaUwDAN3dydBp1ZJl5fiQvY/iS0Q8zVTURNERTfabYvpY11dbIKwKbnUWSHpRkVvbJHRfCmCltkAYQg4I0pR48BYQ6xZOEx7Q9RddkX5JC9F5eW1ijt3U56ickdaQ1G5IqarrYpqzDhodi9+ZY8Fa5qOBFXLY8F7WUPKrlMloCA3tWSx1FAU3novLuLI/WONVU6WkVFzg5gy23ZmWZ+Jy0WQyxxxzlDqgMPhnYxgU4oYAh7Ia+bp/TPPj2fO5wuX61X2+iKWDR0tbM2qgvbFCP47PL7XBQogBkNoDPo44Ba3k6FCEPvj3CR/AmSG2olU3WehbxKyCb4IQquadtyKkEflu/B+oCJF0Xgc8WESAqexpCJa+TWurKs4812uF5Z1FdIjjhwjz49P0CylNk73D9zdy4gJdY0lVoVfVULpeyKxjJBKzqQaKZdGePcNrb3BG63SrSWWwhYTQfBOzs9nprkSFN6jirQwbQvnxyfefvMN56dnaq7YQQzcTqe7PVZ+GGeMc+Qq8je0SGhawpu+mdlArRK2l43FNyMW5Uoco+Q9grtjQ3S1RD/OrARp2WliOB53MzkQqFpka3qQG9SxV9RU4+HIfH/P/etXuCGQa2U6HiRbxDiGMaiWqxGMYXIO/+qe+9MnDG8+5fTj32A4vKI2p47NldTUzhur/i9G7Z81ZiBn4nblen5mu15FDYFhGAfG6cB0mEjHA9NxZjwdMM7eQIgCNTfaVoSUnBItb5S0YdWXobkgninOY6uhpgqxMDiPmw6060ZO8ukNbuQwHtmyoRSRVxYjsvoQRhnVuSDFqh+kY9biJxlBIYdhgDpAmaEc2ZYDzg90SVSfe7cOofS1QEdQmo7J21547n9GO2D9Oq0fbkhcq7tTq7GK6nhPTxRuFIl3N31EJmOfZgquOSFKqk2mHGzKDauGXJt2dRmDhC7GmJgPMw9393z+2efcnR54+/Ytl+uVXDLeO7yzlGzUyj19JPvGGPE+ipEcoyCvSsA9HGZev3rF8+MHnh+jjCOBaoSDUhWWFxm7Wg9UgfPfvf+a//Sf/gP//Y//gJIS7959w+OHD5SUCUrIt+ok3bvqnpsT9pGeZToc+PTzz/nsBz9giZHzdeX8dObx3Qcenz4IV6DcUn8tjeAdQ/C7WqRQ8LXSihx+cY2sy0JOWbyTonT9phk1rdPvq5I6ba3yq6IWMYhFvt1WZjPLGKwm1mWhlYLznsN85DKdpUhXf5ZaBG2qWqA0JBxPxuCykGoVInutlUrdJea1Nen4m6aH6xruZnS0JjYUWYoprpZhmkQpWcteeNdS98gQYwyuGUiZUgUddTtc2E3eFD/QYqDmRm5iFtmEuIa3ju7hlbN4phgjlASDEY5MMzLe0UJG+G3SqMrQCopwCsT/pWbMKnspzontvnphCcIVFPVFSc4rKS3E7UpK686QK02MQ0MTyoOrst63ZeV521ifFx6/fmS5LpRcaVkMQnvnYQ1qztb0/JV9uosp/6zH97pAcd7hjFfIqJHTxrIsBLW0997LELQagTqd2clKH0dV3whj/d9vIYJODgajo4Mw0FN/KwWcIUxB/DecZ7IzQ4MDAvVt28blcuF6ubJtmxDHFN5MW2S5XBiGgTiODFZUNRYHpZKrOMuawE72ctZQUiSlhdYydc08vpfcksEHDHC6fwXN8PT4nloLpsB6XSTCfBigNcq2cH1+Jm6R8/Mzy/mJmhLWeY6nO06nE8fjiePpxDgMurwEKTCtEZS3Y4yjIH4JtSKOtVnMwMgVM6ocriTIGyTtp3OkZ/eIssYK/G2E6EkIuOOB8UG8WpwLGAwlZdr1SsqVZhY5MCfPOE0M08Q4HJjv7jne3zOMAestORdKazgfaK1xPp+5Xs6wbdgUOb2acJ9+gn/9GcP8QEmWuBZcy7JxmErJiabOvTV1lv3GFq/kuJHXlfX5mfVyJq8rBrEDnw8n4unEuhw45VekVgmT5KKIyZ0ierXdZIxVOAWSySE3vEXSmE1rVHfrsvww4aYDJGHOuwDH+YivhmtcsM7Je3eeYZokjdd7ISd6zzCOjPNB1bSNTQt96oi3J7wrrMuzFOEhqIFHpneyxlqakqJfNqx7ka+GZzc+CiCMCUwVbhWgadLqEtwkIVbuW6/3oKMoauL1/ssICbGUqAWu+Pw4JV4K4t10/l6x3IzdDIVWhVwZ/MA0TbRW+fD+A9+8fct1uWKsYRwG5nGiFZnNO00Y7sZrBig5sW0rx3qSgslUnLdM08Cr+xOPpwPr+QmZgBiV+4trcaXhDQQfyCmxbJJ/VGPi57/Y+ObdlxjUR6No8eWk8LAWBi+F6Lpt5M7FskYPJYtXFY8PgU9PJz7/zPH0fOX86ac8Pz7y0y9+wtdffUnetv2zl/GpqPac1wiRmknJ4C2s60qMSUa8reGtx5uMcxKiOkwDwziQSsLnoM7AYmApCcKiDnFOiuJak6iiUoIqh9jxcOQ6HzBUvDVYoyMUo5+FHu7o1axNuIE5ydgkm0JuYi5ZsMIP1IOyVChVkp4H54C2o8IZMKlRLxcZZ2Ux9hQbIDVBbJ1lVRX9gFyrWC30wkVHV603w9ZiNZepZBkZ9TPHWLGQL6qwESmx2CxgJGOnN25GybBoY96Lk6Y3Xc0VU0RRVLWBbMaqxb2YCg7jgGp6pNGN3bMl0VohWKtFZaGgjW5IIhqpTQrU65Xnpyeuzwv5KpLqrlK7NZ9dcQQYKSQbL9Kxv8Pje12gWF0IwQ0YDPXuTqWIjZSi3Ayt6c02EgYZE+wBYApFWmsJ6n3SXjx3t9Y2ehC77p2i4wblMYnjvHfSjQ4B5wPBy6Fes2we67KwrislCYmx5CJqFGMlZXldMcEzDYHBB3KDUlZ6LDzascpGLt1fzgu2OGwYCM5xSYkUxVExOMfpcFAoNrGtV9JyxQaPs4blfOHp/QdR7yzyuoZx5HA4cX93x93DA4f5oFK1druBihAKnfJCjHHaMSGELSuVejNJblbvcIyYKp1VQb00mtW7HlHWSDAEuIBxDhMG/DBJ7kgYGcKAMYaI5JiU1lRqagnTzOnVA4fjHdZ4pvnI6f6B+/s7DscZsGwpsm6RlNO+mVsDTqWTndRW+3tNWT0wpEBpVRULsRHXzLotpHhlWy9sy0JeV9L1QrpeSeuKbY00zZRtJaWVQ7nHB0c2jaFWpjBQq/BCXIf8LTI+cJaKwxlDCE68a3wgWIcfLCVGYm2UVHDGCqk1RBFbNkcZlOARnHBeahPS3Djirfxf7gfxSnDBEZzFtMyUJw7phHMVimMcDMv5g6i3xpmaRO4p0mGkYFHi3stHR1eadrj7+I5ODDUCyMDOYeqkxluCsmyynccg83qze6y8/GnUXkQ2LZxVG2WFJGppEljXeS5qPeCDyOFzTjw9PXE5r/zxFz/hm7ffUKo4D/cmpo+fnJOOd9C9QfyPZKPvHATnZbx3PMwcj0fmaRJFS+l5svq+9f5uRlDg2vSgNjKncHo4TdMkjZRa/QfnBeZvhXVbibkgZo0yGsUYNQmUAv16XfT+PoIJvH79CfGzhVoyz0+PnGOkalMkqqR0a86s00MSMXi7XmitMY0TbaiYanbUxhixQ6itEuPKNMy7tLWP+/qhaI3dc2qaysGddYxe1JjzPCs3SRAvY6v6SlklbOvYQkcnpVQJ3itNMsSA1LJwlJTcKyO2Dl9WukCro3+ltZ23sq6rPn9TeS/K50LRcB1pyrB7V8m0ihJepYA31qqNQ9YRk6FbN/S11N2Kd8XYi3Unfphuz4MyxogVPuj76Eq3qo1Cu6GUzahNhTQ9VkhY4nJbkp4PotihiQNsCJq/ZK2kvmOkYd42Ukycn595fn5mOZ8pa9Q4jLajJn1t29ZHOW0HxwXy/u6P73eB4l5kc2Boh8N+sa/LhRg3Yq77JhOGYQ/2GseREMJexfZCBNifo7Py941VUYNmjDKjhRtS+rzNojLmgTCMe+c4zDOH00ncZrNsKCWL50JFZqdNUQRjRBrsMLttvuztOofUrIyaxBGxuSaZQ3f3WGP48OED18uF0+HAYZoJBnmd28b1etnHX9frwnK5KhKQ8U7MnI7HEw/398yHg/BvVEK5z1GNUw8X6Yw7nJ7UgbYUtf/XVGlrrCTNIjlE2Tay0Sa86KxSs2ZwQhQ2bgArMegpZpyV3CFjJB+ip8jiA9Z75sM9h+MDh+OJdZF4ce9H3nzyKZ9//gOstTw+PfH49Mi2RS7nC9/Ur4lRYuXNuuLWBRNX6rZgw1UJy07iwVuRLi9lckws143leiEnIZMtlzPpukCK1BRpKUJrrCVqJkrDObh4Q2hFxoOHAwa3e6jUIvkszVRJqm23UaPI8nrGCpRtI14XtutC4EXonRVTwOIydoBZfXBqQ1VYkxxsQ8B6g/XC8BfAShxbQx6ZTgeMzdRiGELjcDpyOB5Zzk9sOfbBzP7ZKL79q29Swc/psnF2Bcx36KI6z0VHPwJ9Q8/N2X1t+li237ulULTgcV6g8lwLtgmlsrWINTL/l8wTyVR59/YbcoGvvvqK63IlZUE0RK3ycWG0F0qyM2AxxC2yXK8EZxn136d55v7+nqcPH2T0u677Jv4SxaXdkpOlA/cyHpxmvA87WdVbt+eOGWMEPTKGGJPwXKooDHNuNBLX68I3796xbJvwuHCkXHh4eCAcjwzjSIySFeOHwBhu5PKcBIf3zjMMo3hGXTeu60qulcF5rIU2yWsvvXO2IskXtF9s43NKgBLrEaQwxcgWN/WdGuT9qZDBW8vhOFNbIm5CMrWK1IoZ9I3HIKCjHOwh6L6Ue2OJIktIvpMuK/EwaTv/sK8zo2uoVBE+9KDEfbXeFhr71LLJGhBRUB/DQK1NCLVWxoNNEXljjE5Bbuv45e97YKb3kjYsS9zqdZfPT0i1igop56u/tiYkG+Gv2JsvitEJQquZVBOxKzaVNO6N3dWZ1uqe3LyinY31srAocnK5XEQZWbTZtLfC4zb+1Y+qc9d6kfJd7n19fK8LFJmPNpGA1ooNjuk4c98esMERo4x8NiVIwm1jcU5MiIZhuBUi3DaNvlCEnNk5A6Lk6NSkojwO6zyDVwfGKuZSRqE1gwagDYM8f2scNOUxJZGU1daw3kvo1jDoTWwJwwDqoFlbIzexU85RGNI5ReJ1w7nA528+4fXDA/l64fndW755euJ4OMh4phWWy5Wnp0eRHpdCXKP6FYgx3TiO3B1PHOcDU4+6j5KwXLvhjrX6+nqHptAlFpBCZ1tlHt0PE+ssrjkha5lAGBx58JQ80EomRR2dGI9xAYnbdTQj/gfXy0rJDevlOUtO4qrpPF6zlsb5gPUDFUephfNl492HZz75rDAd7ri/v2M8njjev+Lp6YnrsvCLn/2MbZPwSLOtuO2KWc+EYcS6UfwQrKEVT2yZ0kQyFxcZ2S2XC3W7kvNGuV7I2yr8kZQwWXj9vnlqNOTlmc1WvBPJdjTgWxNJuXViMlVFll00ir0VkdYO3uOdwXY+Uk4sH55Yns9slwtZkRbRL/ZkZCmmTTd0MuD8gPEeCUQU86ZmFEmwEtznnByQYRipbRY3Tpvx08Tx7o7nD+9I2yIz5SKohfgf3H4OfQZvzEcbfwfF5dHTx80OicsNdusedw8OC06/XrxEmkjXq1j798QClNuCOkW3UjTD5+ZhUlsRRKcVsJmaK6MPTKPsAcv1Sq4N5w3zPAlaVEURhzXSSLw4SOR+vnXky7JQDZyUvxH03joej9zf37Ntm4Z8KvG387dA75fb3jRNIgEPw62RstaphwsvzLYCwwiHYyPnJsq5XGlrZJwGlmXj66/esj1kXr3+FOdX4WNZxziIPNu6gPcD4zBK5Mc40owlRvGA8SEwTeN+kKWcpOOugjx3Gb1EYAh6kVJknEYhECeJBdg9MGDnWjRjsLUy+5FxGOhuwc4Yxnlmyxspb6pqUTTM3txU+0XohapVxCk0Qw0WjCBZYOTzj/3zF2SitRcrsxcougZzkVToTsrvyN6NLadeKx31E5xuRwFrrUr4vaPWyrpdb+ubSm1ZlquiZoIWyl4sCiinr7HqbdTpB7qG+62lAGxXXjlrJdHbSUHng3xfVRlzrTKSKbq++5Ao+IHgxTeoG/VZnFzPbeP8/Mz1fOaqEmLZWITYK7M5t5OD9cLoHd+LOv2s+J+kQIkxYnKjWOF1OKTAkBtqwuqsvtZGaWJt7JHqsBsrwQ1K60TZXo27F+qRfWOi+1xUcoyaVmoVuZKKvEthJQNFXGn7DNIawzhOpJwJQ4fj5CYJITCGgG2iPrDO4Vrb9/qmc9ptFSlYXjdqTTy/f8/zu/c8zCc+efWK9fnMFz/5gvdff6PvsbGsV57Pz2xxpSGzytbE+fIwzxwPR46HI/MkkOwWI6W0/TMJIajEegSjBEUjI4qikGdKWd0kFWpHvRP8IHbZztFqouZB/q9psD02p1Tb+bfSpVf0xlUvEZUHCnRvCG6SvJjmJFiNQmpSOH14vvDFz79kvr/nL/vA4e6eu1evubu/Y4srX3zxJ6xfrcS4YbYr5vpEOx8gTFg7koyVzck7tiqQey6JuG7kdSGtF+p6peSNvC3UGLElY/UAbE069JYbeRO1k1cvm9YqtmTyOAmbXtdzLZESN0lF7bLh1rDCzKXmSl5Wlg+PLE/PxGWhWMfognRuXbKsKjVftLHpa9tpZLsVyXozmYb8wgjfwA+ewIj1DVMccSli6X88cPfqNaZVrjyJAkwfxtjbpmP2v/z4/7oeWlOJZO/8+r/3z6CPA5ykLxttDPrj49HOi4dC9jJzr6AqqVarjMVp2mn2VCi5vkMY+I0f/Qa/9Rf/Eq/ffMKyJoxzzHFicJYaBT1cloVWZC6/rpsUINrgWB3zmNbU7jySvBeBmREU5e7ujuv1yrZurFV4Erx4693ev6MooqbyH73//vvbvWUxLihHzhG3JIigMQzDwGef/YD7hztyKRyORx4eHri7v+PD+w+iVFQjyBACRdU7wzAwjpPm21h6fEcugVbFJj5nMe/atoI3lpxEUhuCp5lKSpEYN0IYhEC7CerZr1/OneQsZpRhCLthm+yngsj5IM9prMU0NWajc4i0mTTS9BlE3tzXT1Upj/eOEKQADVfPhYtaTOgapB+at/Ul5FUZP+2FSZe6cxs17t9bZXRYX5CwKo1xHHjz5g0//NEPqaXwxU9/wrZt+2fRr3spgsJ3LmQI4Ya0tPqCdnBb6rVZVXP24p69ODHeYyeZFjhnscg1qTmR4irFexN0RxApjYawcj51d3Ov52SO6ktzPhPXlZoyPRy2q8dKFd6LbVb4r3uKcb9a370oefn4XhcoW5QD2rsMDbwV+GuNkUVvJOc90zyxrFGsl+uNPtwP3r4xvIRc+0awk4+4fdamQSmJdWukWhhKpTSJ8m46QqnVCDdgGHAE6f4souBoDucM3otsFiPVOuVmBmaUFNnUBdc6K/LVkoSDsC6SL2INKW989eWXjH7g7njHcT4yDyN53VguV2otYsC2CZRrnJNiwwfu7u6kOJmPMgOvlWVZiCmJ6kMN60LPp1BY21rt2nUUlWIixbyPgzp3wAePcY5h8IQgmUW0QDc2khET5FSJsZKSWGnXapAsHqsFZtkdD3t8gHcD1g8YLxb11VisF7fbYRq5bhs/+/nPcd7xgx/+gM8++5RPf/AZMa78jz94xduvviRuG3ZdaOdHsvMUY4UEu0giqxsH1prETrsKipJToqWVmiN1i7SUcdoBO7Er1Xk6yCgkU9NKXIxIjWlstVDiJhJAIxBwrQVKxtSCQRwr7TgJhJ0yedkozxcev/mGp7dvaaVSfIBhxBsvIYlV5JKtipeI8e52sCGFnjENYyutZWrdqGWgVCf8huAIBEJreDzOZMZpZjoeeXjzRkLCEB5BXhG/nJcz5hfIye4U/PKxH7BFCaW3VNb9XtPx6u1bXjzLRx3rLz05/dTva9CYIsu0h1oC4KS3M41hGLm/f8Wrh9c8PDxwOhmO2z1rWrgeT0xekmq//PLnnJN4JyUdV7w0c+xoCK2Jd0cIBGMJQbxDpmlinmfGYZAYCQperctr99Lp3Dgdg0i6rvT3O3lfE5Wdmjs2FGGvdX/f1ntevXnF7/6vf5Uf/uhHXK5X5nnmN3/rN/fR7VXHu51nYZRT5lxgGmfhtQ3SydMEmSlFGpCiVuXbuvGsflLzNOG87gWaR0YTXkjOZZeOp6TcFivGlM4GvBtuY4g+HjCNMMhrsN6JUV5H2NptRGh70pOt+4i89kRw/cy8Ez5LsCIiOLfOB0EPVfmUP1pXOip0pic83wqjj9eb7IWmNjVhbPuY/uHVPZ99/gmvXt3z4cOH/frs3BJjBNVTxH03+uvkWWOozd5GSVqMdGm2VcTEqAJSJjsO4x14aRpKSarWXMW2vkQ1ndMID2MEobWaNGxE1kyrpFTY1pWrIsY5RhmLGq/Fj7uNlzW402gQbi4JI3I1bLMSfLo/Pv4U/7TH97pAkbyHiGHD4CRTplWW65XL5ZmcM8dpZhpHUs48X1bYZIOZpglgR0xqrdJJ1FvTV5o6+nUoULMYcqlkhQmdD2zDxrCO5G2lpEhVC+hRE1Or5vUIYi3hWkatmvtGKiRUuWkGP+iIwYKSWp0VpnhcF1LaME2guVoLOTce3z9iquX+7p6SMlGlb7UaZa1DQ6ySrbHM08Td3R33dw+M44h3npQSi5JmRbY9Ms0zYQh007he39nOSi8iJyz6+msr+3jHecPQiZheflnbN1tHa4FQKzlX4lawLsuvVMkZPVBU2mr8buEsvBZP8JOQR8NEmCZcGMUPwMB8mDjdnTDB8/7DB0orWGf4weefcToJ5O68oxRJuy6XZyKW0iwtNcZZEkn9PBFbJRbxJZFup1LTRkmRkpNcCysSQ6N+LCBFlBR0stm1kknLVQ7pXCh+040EcslQhaBnmpByaeBqw/lMua6U85X49Mz1/XuuT0/iexEGbKm4MFGrGodtmzh/1kKrHpxsWiYJATeXCG3EWpGt15ZobdCZtVikO+NxttDawjjPzMejqMDyRtw2luUqpL7WhEdkzbfmyy+VOy8IffJP8ucdPRHEsQcQ7s3Bi1/98W142NyIOvse3oPVyp4CYLFG8lUcTk3pEuAoufH0eObLL79mTYnT6RXTYSaMcv+VmLhezwyDFOu9cKT1gL1MCAVbxW+ChiSiO8dgLUV5K845xmFknmfWdaW1RvAe7x0xRZqVQmbPmyoy/pWML+EL6fQAZwVVwJjdwr2UTK5y2Dln+fzzz/mrf/V3uH+45/HpiWmeOBwmhkFQ2se48eHDB57PZyGBOiuO0M4JYjbPzPMRP0+St9I6tyzJCMY5Uk5cr1dBhYdhH//IOEfec9nzimRf2LZNRsU6wnD+lkTfaqVZKy7dKBnUB7EuaFEnM715lPVkrRj3GYeo4ZzKWBXV6yOfVmEYR+baSLnQ1oWUMs50fw5pBGsfLTZ1XO3LWtexKHiUeMLL9SmOxbnKz5ymkdevXnF/f8e6Lnz99dds2wp6feAWjGkMhOB3D5phEFWZ956imU9FeTRocxZCwA0D1iqhtQOWclMoSXkjbRvr9UKKC0YFv+KbIueAeDHdPIeaMaRagUzRVPvr5UqKGZBCRBgPvajRBqI7Dxajn5nblW51v8fbbRL8HR/f6wLFO89mpEihodV9ZlmvrFfpEExtjOPIPI48O8uq/IiXm9+NOd3zMfRPCiE2hO1dNbwvq6KlIbLGri1fr+LUui0rd6+a3GilMkyyidYiwXCt8dE4SRwEkyzElGljYxoNwzhiFBKXkYgaQsWIoQkEV6oS9BJff/0179+9xxkncjsM4zTLqNAHartATXjvmOcjd3ciJ24Ntm3j+flZNhcs4yimdH4ISq6Tz0dmlnogNLRASsIIV0twgVaD8hlkdGBsw9kmG0nnKjTpBPvhAj0XIiPqPy/9oxHJZBg8XlGdIYyi8gkjYZgZ5hnnA0IuLcyHic8+/YRxDGzryuP5CfNzMTiyrXE6HZnniaenFeKGGTwtrWzPj5gmh0FOG0O5oziJmm8135yAsziHdjMiC3gnRRuKVlgjMGj3UCi5EJer5JrkgvUDON1cNGfG0ASCLWKBveRCsx6zRdL5Qjyf2a4X8rYRfADjKCRylc1AEmRXapX03aYwug0W68SIq5HJacFlg6uO2qRQaVi884zeYY2SSZ3HDQODhsodc6bmotyuTT1tdPN8UUjYHv7XN/D+S9dRr4b6CNYoH6Xfdy+5YB/blJv9sHiZ6trRC5mx6gHSms7dK6YId8M6h0XSfI2x1AJff/OWXAvjzyZevf6Ez3/wOYe7I7Wm/cC11nI4HNis2bvrFKPM4xvMM4x6n2RV023OfeS2GhRJGYaBUkQx4Z2/hSA6iYuQDlRcV1uTBGFfvf45aZGiTtlVTP6ulwvrJi7D4zTwgx98xmeff6ojloUtrVyXC846tmXj8cMHvv7qK66Xy47eyEjb7a9T/IcGjPPiNqqnS1f0FDWgdCHs16LzTYZBiqicihRgre5f09EVcXe9PVdHf5oxZDXJc06CLWPM0IoUKVWTr/eEXzFBa6aJckSDBGtrGOzudTVOUmyV2lknm+ylRqNNv3UeCEnVKiouxfBewGgxXbVIF98Sj6FhnWUaR+ZZlFeXy4Xr9bKPtnb7enW+7YhJ0NHe4XDgeDzinGPbEq1tIk/uXEAn8vsQxB/Kebcjl6UUUhK0fLleWK5nSlrFfFMVgjR2VVUrhVRElt0J7EI0z6S4cb1eWbcV26TwEz8Xeoche7Yx3AZRKMIuppJmN2czH+0P3/XxvS5Q5sNBJWbqdVEaccssy0KKAkmeixCV5mliHAY27XoEeXE4GxhCIfuKMRlK/WhDBHaH2ao3ZCmFqg6BXZuOEkiHaWK5LGRNo613D8pv8TQHCdG3S6SDLPZSK9u6yS/noaAmXJOiFJLBktZISsKbqK27XEooVVX+SzUFaz3OyOx1CN3DxLDFjGueEDzz4cQ4HakYrsuVx8dHrtcrwXsOxxPT4cA4jfsBKxV2z6KRgLBcKtsmyoQUoxix0fbOSBAT4VFJH6PExj4d4gZ5DkHk2U0/t5yMhOahs05/m837IHPyMM54LVSsk2TUUhrzPPPwcM/Dm1dMY+B8OXP+8swvvvoFtWROhwPGGqZx4H3OkmGyOZr1gtzghPM1TEImdV6CyFq7fQal4PpNamVoIBuq3uCoHXbrygEj10kD7RyGVrIquJBCpzVMK4K25EJFCtBkHS4VatrEcyUnnGl7Zk1JjTVlTNMCZVmEVBgGigXngn7ODW/BCzmGViOmBSxaaBlR+wg3IUuYpfXgvCRmTxOHVqk1c76cWbdFRp+2QYxoBbBvwk6df8sL6STKL7FakAivq0kRrKOub49cX8LiTeX9Is3Vz/9PebQqXg/WOPE9QYA5Z4PM2v1AafD4/Iw5n7kuG1vcuH99h7VwfnpkXVectbhx1IIUspqTXa/X3UkUa/H6uqJaCqDoibi0OsZpYBoDJUeNm/plx1xrLU2zbbqstrSiCFlSFWATabiic4uGdPak9tevX+Gs5fn5iffv3+t4O+voT9LWr8tZ0DQ132taJOcS6RJs58VxtROTHWZPdO7hdD2gddsEXZODN+j1a3vwXW/Iejq0KADFY8VkjRvRAqU0VQCGgflwkn2+FR3vyPd02W7Jon6T7t7STMKFAkZGSc5Kxtk0zcyz7MXeDwzLlcvz0/76qjYGRq/ZHpZqzF7A9KK7cy5qq+reK40RKufFiMz66Vn4f5jKt5thYyTmJASPc17Hw5Kp1S4ypl2XyLKsXLuhnjE4RVjm05FxPjDP0kgaY1m2hcvzM+v5iev1mZIT3luCd3gr3BLx1JIzLOtIuAs5pMaXsdwWs4b8mY/vwfYtJLMhewUfj2NFKGI+aj7+vI/vd4EyjUoyFUe8dY2sy1Wq+BTxzqnF/Fk6DiUR1do+YoIXRUZSSkKOqn0xSQcmXfOLAkUzFPbuzhhxBBwGwrLIvLZCqwbnAuMgbrOD+rUImiISY4nXLoqkCMFSOjCJ+bZOYOa0RbZ1YblcuF4ulJLwVn1Hmr7e0sB4rG/CJXAeZ0XZ0ppRJcDAMPl9xHU+n8VwZ1lorYmD7PHIOI0CNb88XLih+EVlgoumjsaYFMq2OC/SVdsLe4VDa58P6RxGOmHhSGANHktzGn0eDCVJt+a82+fLMt4xYkhgGrUVruuV0q40JW6F4RXGQkwbxmq3Y+Dx6Ymnx0fePLxiMEKCrqVCK1S3ko0H48lpIcbA4B0lbap4QTarUjUOROfPBnF5Nd2XR+ayzohKIOdMS3lfK9aYPekUEGddA1Aha+R9qepgCaZmmh8lar4WMecaAiTNdilZlE/OyGgxbrQY5XlCoEPKSs+i5gROPkexSm9YKj3LQ3gQQuyuiDQ/jCN+FCOz2gpjmjk93JPUEHG7FkqK8vm025gG8yIYrLe6+uiuqf2A3vlfL+7Lvtm9VLjs67BveB1/f8E96Qh830SrFikGR9X12NRZs9IkY8gFjseZ+TBTaubLr35OjpGSM9u6klJEwRPhnSDXdlHSY86ZmDMhDOqVdN0J6PM86b5zO6Q6kvDy0OpjjlzFCBDlpPhh2BVEIOPAuK3Ebd3zTkAKoOA9pjWeHh/5wz/8H3zxxRc8PT/t6cPoUrPWaSDfhW3bGMOwv6aO+vTRXCmiyik5Sx6YcwwhMM+zoIVVU3hT2r9XCq0b0tWfN6nfUh9lyaSmO73efG76Ht2MODJvMVKy+MFMqnTMqiZalpWSE9Z3WbEUJAYhfLba9tGJMZZ5hoZhmAacN5yfH7mcr/K6nZWxhJJiu1knOq74eIyp7089Sbpxn1cO0fP5GXO5iGQ7548Odedu4+7++XQflK72KkVC9sqO2GuhYJWD914yn6bjzDhNYAzrsnK9njFZIhmCEy8l73VkWQulGR2jQVMeobEe74Ii4UXtJcRrxjlP/Rb6c8NCO3JpFLFC73ttMKykZ2PyTi7+ZWLar398rwsU55zYciPkS+8l4dWpFK+oPGTd1EgHg0Wsq53O7oC9OLFVTMeqhrfJTS2+FeJYqZ2tEmGBfTMVuLVpUJRc8BDE+GyeD4Qxw4BI5Zq5dV0NjLF4N+C9blQVai7EdcNaQ0oSdne9XHh6fOLyfGZdVoJrigZtooQpDWcDZjRU68VOWQmbpjVRCDnLPI7YJu6y5/OzFjxFqvJp4nQ47G6dTeekN9hOYNVcsvjMaNeUUxJ/D9crdeHNOCcokYzJXrgi6eFVuy+GAUxWNNAIu79bMztJkDVqiT+EQUZrXgh1OSeiGt8FN4oHQ4lsydGsegt4Ry6F5XzG1Mrr04nD4cA8zazbFVMqpiTpbGuikWgmU1qEZuneH73b7BuzrRVvbim5DunysZba5/BRFEtON9xcJNq9HzjCoG87ObZViWg3pUHOSqJVFZO3uMHDJlJf6YbUcr1koBKcbBpWAjp2qWIriWogx0b2jTB4Sk5CnFMIX/iFDvHfE/fdcZrI9cQQHKYWKIm7h3vSuhK3KzUP4v8SU5/f7QRYQO3Jb8WpoRclHzcKcJOQvhzDvhzn9EPjpQJIBqgvOzSZq7fuGmuE1OutIDydfFqbRMjnmnCjwY2e4SCBo+m6sG4L27KR46pW7rciq+a8Fyai7JEuV/KXJBH9/uEBf3diCEEKGEVcJFg0031VREJt8aqySsrbMM7KeMeMagdgGSc5jIz5wPVSNVRRXUmN1TyeyjdffcXl/MxPf/pTYs7CD9OvC17M+tZ1IWcZ+Xp/45lI8J8UE0YRsJxEmmqbkWJmnmlVUoZzkr0ydnIswqmQIkGQk+6NVGtlmuf9vcu100+178c0qob/1So+KqnzDZ1nng+c7u8AMeSclkUKqNJJ+gbvgvDfvFNTNbNn9VjnOByPWHvkeByxrsk92aT4Sqtw8W4FcrutLm0yDK5zVhWFkIXmnOd4vON4utvXRffKEjPIrqjTdamFiawl+ex6dIAobeyN/6KjFEmfTtSykdYz17PHh6CBj6rg845pGvHB7fVAVr5fyZ0naCXwVgMiW5Oz5Hq9yvV+kYklkRECfYusnF/DJzEf/V56d+Ec1h3x/O5oyve6QNnShrWeYXA7WWhbV4ZhYLlcWJZFYHZ13bTO05Rg2W3tO1u6dzMdQYlRbtBUxNNihwB1HtklXzsjG73RapWKOSZ5Ddcrx5iYX2y01vobrNuUyGWkcs9RJKY5JjZdbDGtxOuFt998wzfffMPj0yNpPWMR6+jtmqAvIAfFOBJGA6Tkr71zzOOgxlyWuK1cF9ksS86SHTLPHJXlX0qh5rZLqmsTR16BMQ05iclZyWnf8HcPA2/1UFSzJiOHX+9uTVdTKPeioSMfe2OzO4c44oZBoVvHOEwMo/wSC/SBgmVNRUPVPMMwM88zp9OB+TAzaCJ18AFTGt989RVl21iuK8GPzNMsCcLalbYQxJY/bdg8QcvYlpHSg/2zqKVgsly/5sQRWLgrDaooMKqiRk0LvQYSYJfzfpDLZqeoQ8liyd2ayBpbo+bODrE6EpCgwqqFkYzgKiUVchY3yK60kTGwFlQvEnzF4l0UQy2rJ0IuVC/rSRRlRrzXvGOYJlJdwVtyXGl5ox5PlPuVtF4xRUzQ1naRk+YlItB5IR1pRJCNW3OQPy429PcyRpWAvZfxFE27NVRxcPs5HyN9XbMk4WtVc3kUxbMVYzQrJkecswzZU2oi5Q3rxFhvCI5t0UN726DpmKII72rb1v1Qrq0Rc8Lazt8ad7fq3gBcLhceHx935cxLg0hnJJOl6GssRRCfmBO+yGjW+4FBlYHruhLXjWazuoQKKusGy+l04v7+XmXBUbx/1kXM2IKYrg3juH92tq+hmlkWach8CBjrJALAWOG+pSTcN+8Z2rAXIzEmLucLuRRSzsJ18OKdkmyV1HQt0Prea3T8hxVOg6B8Hx9cMaedBLznHlWRjXvlXlgtrkQwsbKsi3AEc6Y2cfv1Sl42ahfhg1i+j2Og1ln2qtqk6QuBHCR7K64bBvHUwXQjT0X6UKNAFFGoYgo4hIF5PjAOI+u67sh4z397iQx2yXU/b4Sjcht/SYEC9NhN027NzL78JTKiJTGv7ATacRAeWS2FXJsi/v2+UMXWMHA4HAlhpNXC8/Oj+Fit285RkftO/rOjnP0eM705uN1zfW83/RbV+9QYI3zKj57hz358rwuUxw+PTPOJYZyxzjKYgePpxHw48Pz0tHdha1klyySMuO4e++IDr1oBG6MbkEKdPfOhvHSu7BV0Lzac01n2zTPFO/lYY4xcF+lSzG1/xntHKQajdtbilRKwWDZ0A0xyaDQKcVu4PL3n6y+/5Kuvv+bD0xM1XmhViJrBiNW9sTLrRtGekjPFWs3GmLDWaMeocGvcMMDxMDOOEw/3d8zTpNLHulfQVR04K03UJVZcdMWdXnwMXHOaPiwzaavz596Zd3uenmYKOt6F/d+s8jjCEBiCGEeN06yIlGEcDxyPd8zzjA/igVJ1Xl2NpWIlAM8NhMFxOBwY55HgAw/3lYe7Bz775FOe3r/j8uGRa44M04S9WGrVDiZnXKtYB5WsPgh1v+Nqk26rpIQrWeByKtYGTNHwN/tClaA3um3IaAeFaI0gJ9JiyAdR4yreHVoAtdbAifEa1mMN5CT5TSVGvB8JTowBU8qaqC1kwuCcohUV2wP+2s3joSuFTC+EqqiIrKofUmrivdIMYZiYykx1lvFwkCTXUkjLkbv7OyHCVfEFqlrQmxeb8D4GfbHBOdeh/LrzE36JuK4FSutEJtANvO0bufyQsv8s+Zq+9mRl9cRbqw69tCJMgSbNhLGgA1dyjliNNujOptaKaWJRh+BWq3qBCCm81KrTPy1YNRhv2zbevX37EYzfDbl2Z1j17kBh86LjnVILzjudVjUd08r7ri2T9R6nilhAJ/2M48Bnn33Gb/+V3yZuG8+Xs+45flfhpBi5Lld1rbU4nf85a2mlEdZlPzQPxyMhDPLza/fNuHFmQlDugyIOYuNgd6TABbMXQLlk4aZpc1jRFF5jdD3c1ocxZvcLcd5zOB6kmNGivEvHJT1dvGiGQUi9FcP1/ERMWQ05bzb8zjp1/BbTwmEcef3mE2JMPH54ZAiBViVtPueiTadyyvbRo45/tFbuLtfjMHI63RF8YNtk/C0KJjEctO623jvyFjViQCZKbUdZnDrBdnM828fD5obmWCfOr95rhpxpOOMwTty+S6cRqCrK9ZyqbvznHMEPWGPJrUqjFsXA09IbAiFqv/Qrsh+NeZoipfoXpu/mfGuUo+XUd69NZN3++b78/78eH9594PRguQ8D1nict8zHmYeHB67nZ66Xs8RGl0JtHsg0DJsR6+rZOIqtWiioGMpqJk8naQGSgq2VITcim1P1TlC3WWe9ZM/4wDBOuGkWwpfOM5seUgStkJUe4Y2w50c74FrYoVdDI6fMtm5cni48vXvk6f0zKWbKJuZh0hk6vBPVjZarUjQ4ix+kWwijE+mZ5tE0W5lPMyEMjOPAYZyZ5kmUIUBKlWpltCUTA0OjqAVBoZmGHwzOTwzzDUqXz1BNlXrqaDPs/a0uWrHS0oNIbZx9cByPB+Z5ZjocxUXTy2w8xogfRmwIGD8wzBOtiVGQs+JrUTEMYRSScPB4P+L8IHk+znA4Hrl/uGO7fMrXv/gZX/4Mnq/PXLYrdUtstdKaOCs4wDVwiMV8dVJC5VKoqdByppT/H3n/EmrbuuV5ob/v1fsYY8619tr7nDjnRFy9kBmpXi9XLaiIltSQjAitaGQlQTBVMCExspAKgllLFAPRigpqTbKgCIIWREgRNBUyA9EEsaAWMm4kSXLPiXP2Y6015xij9/492i209n29z7XXeezEey+b2yPm2WutOR798T1a+7d/+/8V9q+uEryVf2rDWysrruk1OqE67c4JvdOgZrOMt+4DL1pekka0um4QhxMzqUsNLxBqJeSM3BfiDNPsqTSymEx+aDamhcDKFE84t+k5qc0r3kWQot0QUvFWtqFmEPWL2YoKuDURUpyR+UyNjrM8chdhaplL3qDphl6lclvvbNcrzulYD2JCfr1rQoBSdTNPhRBnK5sWCGrv0CwACF4drQVRDx2nbaAO5TZ1qfAPF88xzNrLYMXhRlboHVC1LBS8llai7z/eDGJ1TM4pUeeJVRpU846xT2zibA47E/PUxboi2gFxv40gpNZ6UIlmnFtHhUoTXG2GzGw271QHqDYhOVWXFhG2ZWG53Ww82a018vZ8mvnOdz/j1etX1HrhV3/1V/n+97/Htiy8f3riiy++4N379wrjl4qglht6fzSQLFll7tf7wvnR3MSDqkErdy2xLneTgnfG71EEr9VC8CoOGULQ8nReaE2RlRBQkUAqdVNto8vpxClFXPfzwdEOBOPoNbn0zgK4vPF8uzGlE95HzqcL3sH5rL5dhEgFbrdnbjkzeyXha/yqAc5aGk+3O9FVLpcLn33ne8Q4I60RgwYpt9uddVmsrVYds53rKB/2bHQR1yA2cTpbiTkX87jJFjh0WQZt31WgsaPyMhIZfFd2MWTVFJCDD4aGOkOZNSiZpkm7v4J6pqm4ZqAOB4pAjJMFJUltUZKqEtsoZl21q+v9u7fUbT3oEx0oDJ3/1fk3hoo22yv7a/EyOFGKohtii5X5xZDNX/D4VgcotRRqrsbjcEgQvEQ++fQN27qyLgvPT2/Zima8tQixCZ1shMAkjVITMWqwQSdq4XExESJ7RMi+qERDTEJUrkkMGpzEFHE+EOcTLmkt2jlHNQ0NAdrdGQcmEkMkxUQKkeaEEhuby9S6WgVEy0Xrou2/rz55zXr/Hsv1RM432lbATfg04c2tV0lVmoF1lMOHPpkcPjgupwdDKhKzIRZ+QOt7ltBhV51MjVyKlRrMqjw4IkNQfEwgTYZ0Ua1F9Ur1/nnEaQdQlUaMjpBU2vvh4cLjqwdOp4vqHwTNMmsVQsraIeIjVUSlN4JpQQwuh9eNx7vREtcdiH1MpOBJc2KeJ51IQX2u4unM9bqQrleu1qVBLriY8Vm7KRAoAnmrSK5aApKiZtlKKSGKuvM624xdk7Hh+qYaBNXKO9I0kGgdoXIN7xXCDbZIeRcItvXlkokGkTqp5G2hOGBKKiJXNmorBK+LoXYiOLwXRLR01DrvR6BJ0OCwNTyGoki1nd9ZSyOEMKlxIDO1NmqdyaeNuT4gjyvkhVI2tlq53hVeL4uWQzqRfByGhQtQc2WKjuADRVQtOIagaKJ3bLUcSIl9F+5wMVrRPJByj3MTcYjfx5saB3ZM3FpPu8CXU8Qz+oR3QbtHTeehN8E6bFPiCNHvytP90LjGOpiMH6eddi9lDbp/GHDgsSiKm8umQmjejAFNVdSbdHlvbS62gYuhV0LvJGk8PT3xBz/+Ax4eHnjz5g2vXr3i+vxEdx9W3ybP/a7dd3RkJHbZAy1r3W43ipWmYvRcLmdC1E32fr+ptH/TDhDli2gQ0sUvu/fQui5asnKqcDvNSt5uUpj8PDZb791o4+3EaPqalQJT8rx/vrMUU/Q+O+LkCaUZU6PgQ+DVq9cQA3wVWG5X1pwRRIU8gz7jnAvvn57J65XvfPqGV4+v+eSTT5WL6ByPr1/z8O6VcVuyrSkWNhji4TrHyXeirIyyjUouGNfjAPZ19KSPp659FFwf27Ineg6i94gzSoB0GQY/um7iaPFGzSFtFa7iTIJhUrPVWcnbvYOql16bVNUNe3pH3TaVNxjnuqOgncZwnM9jToAqOIMFVS8quhaQvIBTfuHjWx2gaHlEF4PTpG2mzTf842v8DxxI48vPI2+/fMft/Y1m5nvOBogWUCDFhpxOhLiLKg242EicHabti0+HyEJUglI0tdWQbGONCbG2WNjh3dBACfqF4BMSzSTMaXAUHJZl63c3vytKvvnOp/zfL/8P7n/o/8r9+R1P77/i6f07yqJ6LzUX88ERqjRCDJzOZ86XEzFFfFN4FQfTFNXV9tDeWUqh5EoTdUKt3RbbOZyLuOBIGEreeulnry9qnbfLJRtUa/CxTmiHzXJCcsaHSaQ5cXm48PjqFfN5JvhokzHgVYEJsVbyLWeci2xbZj4nguvolbpJz9NZkRbr/GlFTIrbIaIcmXg68eaXvs90OfPwyad87wdvuT7d+IO3X/H5+7dct42tVCZx+Fw00yZAbcbX6O3C1bqL9KY0NKOuI8toyltoFSdNFSubjOwEdCLrfawq9ObHLVfU1Ct6VaXRvCNOAZkDq6tQFmRVqDwXDYARXdRUnMwCglooDWhOu31aMOGsYj/NNqZsIm6BnDfSFC3zLDg3seWsAoIhQUrI6Yy7PFDzRi6VV6/u1HXjXoTasmZNh5q7pp86GPoGFEKkmFqwS2GUfQbPRASaU7l/C/56Rwyu0ZwioMfv6UEywIfoyocaK72c4L0GMV2zxluQqdUs3WgauhB3TtqRwGtfPJKX81ldjGOMPD09jXZk59yuzGxljFIKtWhAtxknIRxURVNKI2hd18XQDyvP1TpQXdCA5/PPP2ddV0UizQX5drvy/PTE9fmZ2139eJQE38uJO+9BV1b2EotEnEuaqFnZrDU106u1jvJRv4/9vEXEApTVsvBdLXVs0s6PTT2KNYIdBPuKlc3UEFadim+3m3IOXST4pJo23uO8aaw4VC7hcsEjikB6hxOvIpfm6eOA6/XKcrvy6tV7Xr9WB/cpKYr8/PSe++2Zp6dFS2kxjMB77ECiAW/30iqmrzPmsNtFA/uzHs0CfdOHkQgeyeA9TPZBmzewUqga/1kXlL20j8dgQe3kJ86XiybIQUXgquxWEpgvz3K78vTuHbfnZ2rO6iZk53Ukr3f08cMgpXNTjuHH8X0DPDjMR8chYvs5xzcOUP6H/+F/4N/6t/4t/spf+Sv88Ic/5L/4L/4L/sl/8p8cv/9n/9l/lj//5//8i/f8+q//On/hL/yF8fcvv/ySP/2n/zT/5X/5X+K954/9sT/Gv/Pv/Ds8Pj5+o3PxxnyPTvkXIQbEC8lPzJP6vzy+OnM6/YQv0lc8P113193W8GKFhtkxnWwxapbVG5GLw6TrverB/HXUhVMVJLUdMGnEGiM+TkiIpGlSGLQ2as605ohR/XsErVeXXHBVA6paTTgo6kKsxQbBec+bzz7jzSd/C+fZU8vG9fkdT+/f89WPP+fdV2/54ief8/z0zHpf8M7x8HDhcjkzn2fdSJu38/F42wyqaMeSssk1exIrR1XjDejCE6zFVxeqWgtiRLQm2mHjTLURm0TKazMJ9RBIacabcqoz5900z4QpKpozn2g0ctGuqhgiHhOIasJWqrUfTjhfOeGZ5olpOil8GWflwaQZF7wRF/U9eatsrur3TJHpMvPqdGJ+9Qm/9MuZsha+/+4tP/r8cz7/8gu++PIrRFQZciuVijp3Sim0Ukw11hRjcSSr5TsBV9rgDfimyrB9YQv0TEOzDWe1bJzXzh1AVS2VcNuFn4ooWTY4T3o8k64z6/VOub4fwllOwItAVRJci1aW69yk4HHiaDXQqnaIlKrZHiWD8xpYe08rFX+emKZJ21N9YmKmzVVbOpsG+t5s29dSOV9vrJc7dSusRcfTOPom7v1APXpbOh+UPV4ee6Dg6YGXPwAqnahon9leQsgdem4v8JAdCXHOHRxcg5Yym26MvW1WF/WXzrNHvks/Z+W6axnk9evXvH79GtCunGJ8A+fcSGiqbVg5ZyuxOBMxs/KyBZqK8FiHj4nASd35TaEHFhZkdBG9hwctz3Sphdv1quTaXChZg+xd26mT3LVcEPt1GechpTiCnRGwtTZk/4+bVif/btumthm9I8Xv1iKj8yiG/bl5RhBkH6rnZ6iws0TqfrsRXMC7qMFBFU6nGe+bttJ5FWR8uJxJHrZVSbKShXu+02omTROns0r5v/vqy3Ger19/omW91jifte38en2i1WLq1jq2gjdl30Og2p+lmjsGYg24zQ3y8No1YuxeOBveozxyOBTnP4Qplhh7C0762Osdlq0pETemZI0EZ3WgDoGGmc2WOjgt2t164+ndO67X58GJCU6Jth8L8MfYt2fjXGc+Mdaf/tz6PNvnsOn8fEMg5RsHKNfrlb/n7/l7+Of/+X+e3/qt3/roa37jN36D/+g/+o/G32djjPfjn/6n/2l++MMf8t/8N/8NOWf+uX/un+NP/sk/yX/yn/wn3+hcxLL+ddsIayK5iegjPgbClDidJh4eHzidX/Hw6i2ff/4Fb796y7osPZGzDdlRSkNkNaM1nUjq3WDCPkldRVOv91mpJFiZwYWuWKgRb0gJHyPBAgG1aTAWtvME+/HK0KO0gtRMLRlE202daX1oxC6KiFwuvPrkwpw8rf0SOW88f/WeLz//nB//6Ed8+ZMveffVW0rOnKdJRXpSsn70MASumvcUgyQrQIiqp+FVrVEXbgtMYlCkI+y+FB0R2rZsbHsduCLgogdDlqY0kaYz83xmsgWuiGaj3ghbxshSgaus5MNSKrUxZJgxVEche20rDyEyTydO54vWWYOW10KYrF1cCc4iSnoTwEXR9t2qkkouzWprL+AeL0yfvOaT736HT378E67PN56vN97f7mwilFahbLRcFCVz3soCDnWjbMpUV6by8AVwQ+vDWn8t4JJm/AgjI4lUXaS1toIL5qidFGEQI9tdXl1o2yu+WG7crlcNkEIkeR1L4hzSPGWt+Kq26d55dTgVT62RUhypJdW4KBnKBj5QyoY45T1483sJBDAPpTTPTNZy2nJCZtVJuTw8cr7cyI8rLVfKql0flopZNulMCVT5JFW6AufuEtxVNbfNHMitzt0aRJQUuC+ah+DFvww89k4J5b80F8wrRF5sCqrgmYahnDOeR62VLZddj0J28u4xOHnRJm3w++4+7LUFFcZPN230IVCsA6hW4xgczts7P2B1TcR0qc69BbW1F0T/Wisn887Rcd+I5qSr16AbpBvtvQeugFP+Sb9uMAf2pB2PXRW4lMLCYhtd2z2DDu2oPcgB3Suen5+HgurRZ+j4nDrnb2T3XgtxYgFUz9q9Bbd521j8Xe0LSqOsmZLP+OBIJ9WBckGJ2PM8qXp31fVg84VWMz7A+WHi9faKvC48PT3x+eefc7/f1QXe9qzLRYO8+/XZUAQLHDqaYWtos/uzbauitJ24igWohrzuHWvWGOA4IC42jszTB+esfORfdHw16dwPMdE4W18sWU5JqQddJLFhnall12MqeeX2/MT1qurhzUrbPyt+GLjOIcEQYxP2svb+wsMnibyMV17+5Wce3zhA+c3f/E1+8zd/82e+Zp5nfvCDH3z0d//7//6/8xf+wl/gf/qf/if+vr/v7wPg3/v3/j3+8X/8H+ff/rf/bX7lV37la+9Z11Vlte14//49oBNA/Aw+kEvh4ZNHLucHfDCDuqSmcSHOnB8+4fLwivPlJ7x/96SbYC668cbJOiPFOA+N4JuWi5TVNeq2eK2T91qzujrug2tfZPoihv3XkYw8m6YZj6q9djdIqgnDtYp3agblHGwm2tX9FW73K+I2UlSHXIfgUuT86pHvtO/hDPnZljsek4MO3lxetYwkzlEVFyaGYDb2fnQntV7r915bd1Nink9MUyQGNwLiXDLLfdGMzTo4RDRwSzEym1z2q8fXTOmE817NxKT1+o+1VSofRaRajCRa8mpaj9aJrroqMaivTJonLU31RdVpucU3RyPjmgaO3uBFwYiWtjBsJWv7YtD+fO+t9DNNXB4feHx45On5idtt4fMvv+T98413T1fVJSGrZkmf0U1oueGk4iUivmsXGN/CaUCiEtF1TN7eRdI6SmQwuISAcxM+OgiCj44o6oKcgifEE2wXnj733MqiyExIFKwzyntciBqA1WCiUAGpEWmevGnLciqJXDZiWZGccCGp6ynQ8MNjCWf+L1HJt3JWgq0rGVc3pryxZeF8eaCsRctqqy7IZdvGWOqLq8Box+88jg59j0z8wO3AOqk02AhjLg4OgGWSx9LNEe3oh6JavXSrG1j/rGit4vp1WjLbtm0oT/f5/DHkpAfX/ej8jY5Q9mvrwUs/3yFtgPIHGoeyEdaWaaiIQ7sC12WhlX0M9XUB09wBzOhT5eofXr2i1kKctEX5/ft3fPnFWxybtp+KjVMpIwg4cuxAN952v5NNjkC7fww1smd3REf67zuCIlai6b9XUq0F9x0zbLsUfjPxwlr13nlnVhLe/IqcY7lbyf5cKWljXRdCClzazHxK+OjsfnotbXaLERfBZxW6jI7zZebx8ZFlWXh+ftZzaI1luXOeZ06zoojL3dyxxUZSL/kZWtBR5Zy3cf0dlTh67gxZCvdSnVWfpQXO7K3GA9Xy2nHUNNtCWiObbkoTUZTNurlEdG1uxmvSNVZGWa3WyrYs3K7PbKvKWgTnFGE9ng8vkcaPHk0ALT+Dsg7pMRa9TVrVnzvC4n5mGPTy+P8IB+Uv/sW/yPe+9z0+/fRT/tF/9B/lX//X/3W+853vAPC7v/u7vHnzZgQnAP/YP/aP4b3nf/wf/0f+qX/qn/ra5/3O7/wOf+7P/bmv/fvz85VcdRFY1lUHZIVpqpxPJ1IKTPPMmzRzeXjN5fLA5eGB9++euN/vLPd1D/pkJ7bBnvVyGCTO7QiJ762kFgF3Y7gekfTX0uE5m8AamWvHQPARRKPbklUkzB8gNq3hqxhTWTO3lqn1zvMTNCnUlq0dU7MKKZU1Z5pHlSdb043Jewg9wDIL85QIaRqeOT54nOmzlKo+Pl17wJtyZEyRuZP2enaQM+uyjihcBLpZW0qReZ55OF+UbyPCljOxNaqoE3QQQUohiPbQe+PEIE45H9VaMsXRG5Wd80oYLMV0DxppKpy4IJRxz11nqodoSJQptIoK2FXpeg+a8aQpMkVFyeY08ebTN6xr5nK58PkXX+BFCK3hi7Zqt1ZxVbU2XNAumVoLUt2gUDS01iu1KWXY+ChaRlODxdJUR0O1IDzSggaC4lHWr2j5BSVKBme6EU5IHpO7Vx2E6DyNQBY9D4KnWsbeYkRcYCsBVxw5T/htw68rQsI7LXM0HISoiI7Vtb1PKgQYLDptosq2Ziq2LY3T+ZGyVVourJc79/szJa+2mdri51VXQrkP1qpuRNKOzOH036r3Wrw/YBD6OptbowyhSE+fzM7mbF+M902E8XqNB93YPHsQrGaQlXVdWbZNFTWNOHlMQDpaoNyUl4HQUeyMwwY+z/ML7slRvEzLmG2gSA6GkVvyYXA+tnWzoi/7eYwyrM6J2hrTbJ5V00StllTZe/JWuV2v3Ay9OXIGOpG1u5cf+Tc9yOzXM0oOh023ByigiWX3Meo2FSFoUKImgbqGvLAOQVU/cI5arZRsCJtzShyWpkRg7wKvciFPKjMR5khpZx7aRZFtNCDxzoLa6JhdhDrRmpbJPHtgNYJFQ4ejdf8oAr6XnRTVMk2W1pQnB6ohlAubVx2RLW8aFLSX6MTxftHHk7dQ7XAf+73t6MmUJqo0De5aY8mbnVIPLBVp3UqmrpuiUE2GXkkPmGpWdeRifmIOC27FIx8EDx8GKx8eo9xjVVUNCrU5IeyvGNf5sc/4Wcf/6QHKb/zGb/Bbv/Vb/KE/9If4vd/7Pf7sn/2z/OZv/ia/+7u/SwiBH/3oR3zve997eRIx8tlnn/GjH/3oo5/5r/6r/yr/0r/0L42/v3//nr/1b/1bdcOphev1OhRfS648nB8tQyvaOpt0Yrx+9YbTdOE7n6k+yfV6VVJa1Qh0JCXWHjVsw50DAjFMBjNiXAhnnjPRuA9JXUeD9ukHy2JUNj4MmG+erFSE01JG0S4LXWyCQuDWTle3TPCOKQWQjbJWtrZyu19Z1zu1VW07zs0WNO3EiPNkzHCHjxNxVg0YF638lJL2y3v7u/FGcJbNogv4yESdo2Kbnb0/eHVsfX1YaHLR9jrB00S9LbYGTqpG6FNS1nprOFEine8blFMEqS9i0vZ6NrXZYqkPqbSi7aqbKk2mquJKuLhP8qAaAaGpVXtFcMUyBe/xJiiXfCKKI+SiqrUxMMVAk8RpSkj9lHMMXGLii/QF72Pg6XrltqwqTOW0rbiUjRY0IOy1Ya2jN4WrW8W3Ap2gakS1bj7WAyZSoiD22kJ1jha1fTqGwLqtPN9vOCcEJ2x5gdaIwVNxbOKRriwqEVpTk0Z0v89mobBtizr8xgkh0gWccI6HTz61TNDq4cEzTSeW5abt22kjzSdohbBlTo9QjKdRt5Xzw4VtfbSscjUyriP6hJsmatVuhr0sahB82fAhWKmiUKQHaOp/QnGjk0F3cSMQsiMoY8MV7YhQPRpbrA2U7tc2NB0MdVHRtablnU07ZXonjXNOHYzzNjbuWquKelmg0fWTwFABG9vJPqOTR/sm2EsqGFk0HAKOdV3xMZLXFbxnud7UCLIqyqpclR19CMadKIZ0TN1h2Hg00zxzfnjgcVUeSw+WjudxOp2YZ7W5ELBSo5736XxC0ACsNC2/9PemlLRUZfdpXVfu9zugSEBvVe7P59jlhGhH3uCZgHndOKZZ15lSCjFGHi4nLueZLz//AofjimcJd9KcCNmzblfK9pp5OlnAXfAe0hTxKVIpSlgnQOvO1G4gPHov1bdKvYX2DqRa6+BQdHXZ5OPwlGqlsjn1SfPbtquPA51APTZ82c1ELY99gaj0ADDFpKVrZ/YooqXmMgJLRTelOWoRpFWycb9G8t12Em2thXVRjpIUVbcNQGB3RO7HMWjt59w3yT3Q0ExM/doU0e/7oxXq8C6OJME5ExT9BY//0wOUP/7H//j489/1d/1d/N1/99/Nr/7qr/IX/+Jf5Nd+7df+pj5znuev8VgAbeVaFry1v5ZaKVtjeyysW+b8cKbUwtnE3NKsMOerV57NINxaxbwX5AUkpyUgndilax+M4E+lzTXKVUfSkCZCiloHDN4EhJK67oYDrFsKJIsmnaAs70qTSqfyde+GvK3qd2JN7cW0N0q5c709c19vanRVVG9iSlquSkknf4wmEx8ifkqDAKntub77YO1ZJRqFx5iG+FSfmL2eWavgQ2GaKqd50udi6IzyQAqbD3o/azGSqXp4hMMk9VEbaJvxEVrtGgOeFiK2+2gJKkbtUmrqg5RzGRtDqYXiTQG1VUKaTLXREZp2rIyNC8vOvNXWm/ISggguaUlFpJBroTkhS8ODuj6fz3zy6hWfPDzw/u1bfvLFF/z4iy94vj7rltdM/MtM/qrouOmENFrTduxWVb21VdWDaFU7g0xR11UhNFMDFoV7JWZaSASn7drNNlnxuplFrwq/ztr59PuFSAJUbM1VKDS8E2r1lOLZNkVU8raB2wDdTH1MvHrzqZJzW1GkxgVT/IwIjpBnfKrEVjidC2UTptNGOs1M5zPzw4nzdmHdFlXVPdatpS9cvYsGoLe2d5a/2hNQvAV51Z5jw9zqB4pHsyDqkO31Pw9RsGrcnC7p3xS9aRI1WDJhLi3ndhuNGVq07hPN4muMcD6NbHldV2RdcfashyuvISdWO1YbifOZlBL3+/3QxbIjMz349r0920r3Uit5XcnLoirE9jsMuu8oECKjFN55PYOYaUcyJDRaUgKHAMdQXee6tQN0l900paGO25GfdVlZ3TqueUef3QjUevDSE42+UXp36Iy0cvnOQXFjsXUHboqIMKVkTr+e6/VKteAvt0hq+vlOPItfCSENhdo4BcKkyLeWzjwmDanoTppGwNafY80qotY7lPr4daN7pvOj+tiGWqoRNRQVrYdOx4H40Wl3blz/Tgk4tBFH7b4Z3JNmitTGR7MhsOOLHaGo2qFKR2gMv9H1M+seVNXB3PfX0QMKPfo5fxQ1Of6bYETZto9XEZuThhx1cFP6d/z/MED58PjDf/gP893vfpe/+lf/Kr/2a7/GD37wA3784x+/eE0phS+//PKn8lZ+2qHBRMY3FRQrtVJKY1k2Hu53HpYH8vbA9lB5eITLyTFP6oR7CRY3+oCIyjUvy2Js7pWwbdQ0DUiz+83UWm0AxSHUNk0TYZpwhpyoDHsXwzkcDbNpX6m+arumCFBpUvCIOSdrtr1tK/frleV+U0O+9UbZFppkWhWmMJHChJtUWrpDyNPU2xhtUbb6Is66dqppJ9ggVzRBFycfeonFDO+ClqO0pCFD4basG+UeyafTWOj6otgzIbfPu51g1YMhUxx0QbtgnFMC5M4f0KHc2xK9TexSCs6g47wVXRykQVVpeVfrYZIrtyD4XZWxqzqGkAgpIHGjpYRMypfxIVAcFCcU479MKRB94nV8zRQjr149EE5JOUrRKVHYrBEG497GjbRu0NYoQLBODalNm1fEyIpNrQS8oOWTWmnFkb3yOCQmJu/YqlBzJrc6+BTea/lHTBLbeVTnxVl9uDmErIq3TlDpi2p7gGNJC00CtQmlTExnbFxq9xlO0Szx6nGCT9SpIa2ySSHOqsirFu2Leddo+/F9ubMsdxXewvX/3+HtpghUcEYVECUi4lSbo8ZINd+iahwnnMf79GKD6xt1Hz/96JtACN64Qc6ky2WUI3uXRShFzfa8dkM4wDUNiELUza8EHVe1VnLMWok0LtGLlszDZhSs5fXh4WFstkdiaQ9GMEi+NaEVK23URt4yt/uNbV1xAsH58Xy6fHvndmybPouUuuDibvzX70cPGMT1zPeYtas+07HUkGbV0dAOuDSQmWVayCUrjwEG8hJjtORPA6M+h1/IN7id8/Yhd0iFFwUnGjTtqsEaHDw8PnI6z7x/ese63ZVj0yLCrCXzIrimLexpUiK+35REmlJiikkTHyt7xZhISRWnu5ZJrT1B3Cjr9sLmZHSyOCs3Y15tIlqKHsFjG4HzsXwDvAzQ7N+6X1UPTlJKQxvoOLaarR/iDuW1fodGGcASMukrL9qJlLURQ1ovPek4OnROj3Eih8/0HAKgF8ch6HCHaAlGWWeQfr9heQf+vxCg/I2/8Tf44osv+OVf/mUA/sF/8B/k7du3/JW/8lf4e//evxeA//a//W9prfEP/AP/wDf6bC3RKipRWzMHRmFbNaDQGujK7b6xbpXyqvDw8KiiXdPM+Ww+BKLEMv/8PISzqpUUjtmHiLWENpUUTlF1T2KY8CEpsQtFAYKPVpu0XdrtxKht2/CuILGZdLRoVigaNJSspK/79Zn7/TqIYsFHSBOOyDk8EJMfLG9vyEeIu3hUqcWImLpRKTSoNXPXwJkmhw8eaiOERvAVF8JYxAYr3yL4bctDBO/p+cqzf9IAwGBbH1TWOoTAPE3q8Bzj6Nv3XtsnMWgaQHwXyFPflJJ3rYDjwvq1uqzfoWnnFLUQU6f0zpRIQ6aFrnHRhqpvYYNV4dEQE+W0QaukaUJioAXlkTQn5JYRtOzz+OYT4mmmRtWoSfPM89Mz79+9t5KjZk29hNQkWoaIBSKOWhyIqs+2mrXzFiGaT5Sz0lScImEO+KQiVslbN1jT5+dF0QZnBGcRR3UWpkijSNGAx0W64oLzWitvm/KTfIi4+42cGykV5vOF+XIeyFozJ+hS1ezQO93oQ0okOdFaYUsb8wPK66mZWlYNVPLK5X5huV2peTvoRxgJ1CaxjAzeupXEIU0F1FqSPXO0uUmtqlmEx/tosPvuFnxEJcYYcuZ5RTCdB+UA4S1IKVbS8U7J6z4QYsO1Xeehn+NYC2yzH6iAoSbH4MN5zxS1NOcMTZOqpc9+aLCmjtjdKbvVnQibt4379UZeVbl48CVEN3Fwe0naEIUeoMA+z3pg5HAvOkKqBTtiGBxOE57Rrei7m6/e5xi0PTiGxLZmtmUlBLWW6KWejtyEw1pynMuDJGr3svU2VO/NFM/QNI9qmBjK5pzjdD5xebgQogrXtVapoqaxiKd5vXchBqpM+LLpmPLKrzlZoJViHMheX+cUGctsebOkdNOONXseAa+lb5uHzcqGymM7dOPYyu8sWJHDdXvvtZTnPzDKtHm8oydxBGgDxa7d9Vg7lXSchTEGYEdowCw2sKC4bOrOXvq6KbukfWsvAij6+bPHGx87nGOUdY7HQI7RIB/2bq1vcnzjAOX5+Zm/+lf/6vj77//+7/O//C//C5999hmfffYZf+7P/Tn+2B/7Y/zgBz/g937v9/hX/pV/hT/yR/4Iv/7rvw7A3/l3/p38xm/8Bv/Cv/Av8B/+h/8hOWd++7d/mz/+x//4Rzt4ftbR3MvWP13oNmhm/10ypWbOa6b2LhlRKeAUE+eTPgJnA/d8Pu9s8ias6zIMn4Y3CpiCJUMcLMa4C9GMwdg3Uw+isvbVNSRndVUN0Yiu/cFVpGZKXrnf7tyXG8tyRxDmaeby6hXQTGtBv2kw+NmdPwWhSh1eIa2puZaaykEb9UlTYXXqZUNV6LD6RnQwhZmYdIL0BabVxhQStyZs94XNvCa8V/+dedIyl7RGcY6aM1MppPNpLNY9e+56Fn2xHdlvdUAZz7gv9nLoEui1a++CZWrmuFqqBZAg3lrf8LTWM7Q2MtjWzITMAruyZaQU5vMZP0+4OeGmSDNOj3fmhp0Cboq8fvMG5xzn04n3b99xnk/cr1eu14X7toJznBLW7thJ1apAWUuly8zX7MgmDz3FaQQop8uJ0+VEOk/Mpwvp/ECcTjjnefaRr55vSEwUPNK0OIjzCIqMuCY08+/2e8VMgyjbPLshX9lWalZjwhgSyeaH8iUqmER1ro1oJkLeR9J8UjXQav4ydWJ+uFC3B/Jyp24L6/LA8/N7tmUx7Y02Wmh74PtCu+QAsffNrett6KIniN9RiI8GIx+uEx0OZy8LjawdhrdMKQU2h6Q+zxnrivJDDCav5cX3xahk8CP5cZR67Hvyto21pKMLx84j3aitY8dKHL2br7VGtvd4eNEuLYdn2TPpnnn3a+/f08suuWQLPHZvGLr+i3Fp+nXhLNjvbcil4pNq/oQ5qFO7zcvL5cLpdGJZFt6bF1rfaPu5fI1U25sJpD8T4wmJteSPZ9td1RspRQtQghrCNlGUsHkdH1MkePCtkQuQvXVJBUU6UyIlc7W3FnPQoEyq0NY6WoaldnXlvTShQY23sooNW2GgWQNhsefKIcB90WhxQI56gJui2nSEQ0LXg492CE56cNfX0X3c7+UmNz5bLRSKleZ0bLZxbvuLeXneP+MYvx8Iub7Z9yDESl/edcTJfv8LfPbx+MYByv/8P//P/CP/yD8y/t7Jq3/iT/wJ/oP/4D/gf/1f/1f+/J//87x9+5Zf+ZVf4Y/+0T/Kv/av/WsvOCT/8X/8H/Pbv/3b/Nqv/RpdqO3f/Xf/3W96KgOi7gZSzpkQTFNBrQ1BrkLOjVo1w2xNaBaslArzfFJY02mpZ57O9AGxlo01b2y3bdhPK9BhEy8lYgOHVxa6E/A73NYHmAaTDUc1+H3volRVUlUBrWVlvV2VYZ03fIDHxwvzNPF4vqiEuWMYb5WqXSDLdmcrd2uJzGxVdQ+aRcXHc3GHvzuvZmSaNQHWHeNs41DSAeqFgnm9iPp3OK/qh+We1TgvBMTaip0XlpwpT0qCPF0emC8PnI2o2zUndFK4F88SVHeGEfUXqqtgEHEwyNO7MMhfTaq1JMvQmxERmhnL9cyrb2qDyChaVqlbptxXyu2Z0/lMOp+JD2fi5QwhqLhd0KBpWdUML8TAd7/zGW9ev+b502c+ffNuuNW+e/+eZVvVJdo7nNMSlMNRS6O6SqkrLW+EvOCzQshn14gu4YhMGeYMU2hMzpPE4UqFEAl5w5eClGrSmwqy6mJ2MHYUENfGPegLp5g+SyldrKvhvCkJ1zIIqKqT0TsVhCJADOqWHAIRqHFSAb5HlQIPTpC8cr89c9rOPK4Xnh8eKMvGvalEu+rbKUlRAzeTW7MNqrVGy1nHSUi0qHO2lDLE2VpTp+yPHS9KBtYd0iwnD60pD+EAj4cQlJiJQC3GIbOOPNHAUkQ5VUqirV/7vr6hHBOmfg4dUfDWFaNqz3sZaC9LuYEkOhOhDN5Ttk1LEd7E+9ze1jwy8Ca0g6p2CGHwXHrXTR/71chne3eiI9PnRWXbsiZeYHwuP8ZP52McdTlU5VkDzKOEP8CcErOJQ+5lCp3r/TO0wrWXMLqFgSD2OXsg6oMmk69eP3B5OHNfbqojVTMini5+6OdAExU/VFRc1828eaQU8qZoRZpnpqkiRsp1/Rx6WcyCCVOhUL5F7Xu6G0GiiMkeOUaZqD8ffZ17cc/DgXcDKvympfmJEHZkpSsb9waEWl6iy34kYjvZuyt5g5bpS1fAztneryTrEcDYONZyzD4mP5xP488fwCDug/8e6zx78vD1IOgXOb5xgPIP/8P/8IuT//D4r//r//rnfsZnn332jUXZPnaoINWucioGC1psiVTHthakrSBXRKDkQl4zy33j+ek+5KDnpOWIKSZSVNb5J/WVudA25V2UVfvfpQ2xpTRP1BoR7+jGdTFGUtDFNfjeiqdBiVqEu9FBUmujSiHXjZxXlrKwlhXvHPNp5nw6cZ7PnOazTp5aWbfV+CNN9cFKY81qMrblrN0O1vfVW351Lun/aZVKIbidIMvojujW4BiMmbespni1UrdCy5noHFMILEC2TDzGQKnqH1NK4Xa7kbeN8PY9p8sDj68eOZ3OzKcTcVIOSEqzQu/OmVIopBho4ihZdiOzvrliypmuB1l+FyJ1aGtya2OBUzJ5R41EM4lSTCNAkRlPJYuQV8e2LUzrwpw3plIIp1nb5cy1NVs7r3OOmE48PDxyPp15ZVoK798/8/btO96+fa8KlGUjLze2LWuQUjLt/kxb7kjNtLJBUQsEtjsuJQiB7R6pz4EyJXKaEBdpPlFC4nYvvP/qPcvzTWvtPlIFakfk0DZq58D5OLIW55R+Wp2jtUquymUJxk0R1xDzKCptY9vuiAWizfVkK4xFVxxIiEhQvkaImZY24pyYThMln9jWC+fHB+63hfu6glNRP22iUAuDHc3YM2axEquKqCUtS1kpQFqjhwidy8D4e8/O+waiJU1v49kd7kXXm1Bbh4LzbgSU3jlk1Fy0pTSbiKBzKgWAzcdj1txbT+m1fdvsSiksy6rliNKDE29Dto1gvSFUQY0dMbJ50bbswVdw9jx0givxWjCRQiXGSxPWZR3cGO+9BZxapgrGFblcLjjnWJdNuyHLRs6JnJUY2yH81kQ73g2J2bbVghF9ZqVVcsnDf0evdRfe807XZe9UC6mZcm3XnQJGAOWs5OlaZevBnnOUuuG8kFLk8eGRx8dXvH/3bN1WjVYzW9PShXMzkykP951TNRJVANLhVZBwuas4ocjBoVpRvnk+qYN1a8PfytlCLk3L0RXNEbo2VmsVK6IPlMI7r12bXmUUQh+vog0aDusimubR1qxWFN2KoI0EuW/2L9y8bZ3vK3wfff0VtSnHMh+I2WPO6OM7xBwvMZSOaR2DFLFzP/7duU7FdRz9hEbfnAj7GR8Upn/O8S334tFAxLlgEto2eDBRMlABNC+UnHl+elZ+ypK5P995Pj/z+PDA+XLhfDqpb83prD36Iprx++5O/J6n94Ft1da5rWy41REXazGeZ07TWYOKywUJAaIbmgwDrrOWyVKztnZJo0jlttxZlivL/Yq0qhPw1SOneCK4qBBla9rDngs5a+fSlldyruQiB+ElC9AwYpcDEdUV0AVOReBwmC+MU+NDC/RSX1B61tqRDOtKkNpwJtsevSd6vQ7nVdHT2eIXQ+C2bdy3G8vzlfu7d8yXC4+vHpnPJx4eLjAX0pyIaQJjfasFO0iAJEGnu3NKQmsNH3hhQ5CSw0dHaAFp1bggbWSbrmfBCN6jmzF754SghM8mgLNFQG8hoamSZal1KDHqJjCx1QIZTtPEJ5888ng5c5pmoo/ktbHdM0su1K1CabS8EmvGL3fieid6QcgUMg6YEEKt6MakXUY+r1QPLkZaOJEl8fx25fb+DqURiIg3l9fWqNZuW6VAUzVg9U3SrKmIlfyc+vtUVB48zooIpsuEP0WIQpMVmsOFZFmuBre9xRHv8HEmzVDLyiQgNTOfzzy8fk0IwratWuKbk84J741HAeKDbsbG+awH5AHnyVsmRW2Hj9OEODdaZjuHI/iEeDf+rhmqlS+CfpYXXaTpgYohCIImCMu2UqUySyNFdOA1R7J2IWme25a535WQ2e0tBo/C7dwNJZ32tUkDHG+tldrhoYGRR7uDdF1olKpquT1giU4RzpxVRoC+4XkTAQxazuiiZs71clNCmrDcF+PUGO/LNcu43cieZ9NK6WO61GyeQVoar0X5UYRgqCZDFdaHXiYWqhTWbeV2v+O857bcWdcNb7pKtRRcjJzmCwBrLmxbppZGit78pvYybzA0u9Rt17Fxji07cEJMgRQmHs4PpJBYy6YbczN5hLUZ38gRozNdGYeT7qYrWrVxyhlbVg3wevkresc8n4GKTBMtZzqOFJyjFahF144qwlryGEvNkiLl9+iz9yFyeXhUNGmeQWSQx6WplUVMSj7eS5C+d89rUt1bwUXsMzunZw8c/IGLIqirtoiQS2XL2kCiZaEw1tkj2rOjRvvHiv2PuJdBhfTXgyKiFjiJ68ijaBBoa+seMg2m0y90fKsDFN0YHTTp1iUakXaSux0qhFYIDdZ6p2wby3znfNHOnHVdWU4nzstCfthe2KI75zjNM959wpQS99uVbV2tewTylpGLGFdj4nJ+4PzwSHViA6TDl3tnCYiVYMxbI2fW5WbKfjd13ZRmFttoB01WjoW21+axAWNaJViJpNf7dPG0G2AoEN4Gj7fsWi9wENN6VjV1tUvTOSjmP9PJir346kNgOqk2QjGC6TTPzJczF/v9uq7U5YntfmO5PhOfn1ivz5wfLuRXj5zNL2Q+n0bbdodVncGVfeIMYhmMtmpvm0LDOrlsA3PuIL8NCJphNZOAdhW6yZzGczqpujLttm1wD9o+6iDUYiRkzYAa6l+iWVKDFI3XdOHh0ni4LCzXDXIjCoT5QqTAdqfEiXq/ImXBuaTZX6lcpkmD4xggzMRptmdUcClQ3Ymvnhs3qWR30tZ0p8ULJ01LL6hbtIjVpm2TFOeGcB3IQACdcR588ISosvopeaYUtYumNePh6IAprVrXlXb0hKR2ENXrPXR1JbSMawXqxulyYTqf1XMpJqpfaFm7f3q7e8fGnI3h1uqLGr1zjHKC955lWcZm1ssX7jjhuzcRfZ2V/e8H+FpJr3HIzKuZW6AUQz1tA249u86qteMK+FLGptLbZztvpmHt5O1lS6X0BMJ37kU/X4+YfDzssHs1vY4jYj26ImCYPPdSgXduVzE1UiVOW/Wb3T/X2sjg85gr+v5OzmzSyHljy1FVsr0jjlKG7LyVwzkpQrQMm4Ie9HRuT1/7dI2uQ7L/fD4zmeO7916D/qZ2GyEmcN4CpoqYUWBKiXCJnE9qyLjeFX3WmK0hrVDyai3K/VnbKmBlxVabaUFWFmsn7nYdXdMqBa+kUm98FRzBeWpplA0rjwuxbEN/ZisF11RssHc0qafPhe988obHx0dijFxvz/z4x3/Au3fvLHHaW74/vK/btqpStz0r39GYHqQcOoeOI76XF4f/Dm6MnZ97WFB4PJeP8btevMYgyqGy/qKYcyDGf4Miz7c6QEneU+p+4cEgf/0Bq2uMG9vbVxFhkTvFjAPv9zuXy2WYbJ1OpwFNxhhxITLPE9OUeLhc2NZ1kPYEhw+RlGZimvBhwvtkdfY2JnQfgH1D1A2zsa4L27Ly/PTE7fYepOLixP12551/p7mWVydPqYwBqddznICMa+waD72VcgyIw5g7wsV7XVShX7G6bVfELKVo58FhofSgHQqTSs5HGqfLiel05nQ+76Wi1jj7xPXde673Z/Jy57lk8nI3bYeV5TQbcqUOyzFpa2AzqD72Z9rrpgdSpQ+qz1HVQMiIhO7AcO9wpgy0SDNwy3zrTjTW1j8rF9bKuiyQM1WEyQT/UprwTQhRaFHl5HNRS/YpqrS2OS6Rwkx4CJw//S6vzxOTF+7vv+L+1ees799S8zOtXq3VVpicI00RlyISJnycNKb0FR8DWSa2rXDylVWiaR008EJodwaRs49Lt5P2nOylvB6IhZ5RG/KlFgWqGhxs0x8LptnJu1oI7AqiIaoVROth+HwhmMBc2VZO1zun0wPzSQOVsqrbtpRCdUWlx3sQ2kdqa8p5CoFSMsm5F8TB7gqrhF/d2I+6EjrzD8qxuOM6Ol43nIKDt0Csi7WZYJv3L4S5dj0OdaYWGMJrfTPoc+oYLx05Mft83DcTJZl3qH2Xm2/STJ15Tzg0I939XDq3wx0SDBHVPxH77Gpr2bhuCyJut9sgufe53btwOm9m8BKsk+fYJn3kQvQAxTk3tFgGEbt1DZMVEVjMmC+apsnZlGt9CHhTN22tsnNm9N87wTNvmXZS4bl5njUAa1oebqLBR7EGAb0HgZR6oXPXLVFBPg06+97gTQOmVZUZyFlRC9dR1aAEQp/AOfUpizVZILHhvDdNrmzorpYoJ0NPTqcTj4+PvPn0E06nmb/xN/4GT89PL3RxepCpz2Bh3XYtmyMa3wm0eyFGMc4enPR1f3CiBgqiie3HwoQPg+HjnDr+/kNyt6LVh/LQR2KZv5njWx2gRB8gaL+7826oHsoA5JzV4nuvuBttmkW6zbyKIJVNkZNuU95loru2SA9Y5mniNM9j4uZScT6R0okpngBPLg0xRVlFA8TquFZXT7oB1pzZljvXpyfevf2SvN05zRPiPdt6411V1lUKE8FPxuqfcA7NKspOhhXZgxPVEjAhIqe1yR5xe92NdrKp19bW1L033GERykqALb2DAhSiPwQ2LmjgJt5xNnfQ+XQa6IcHLkmNBvmyqXrvuqjkfC1I3pjmme10Is0zp/OJ6XQiTpMRYyNhMtKYcUuct8nnXk7IZmq84gDjXnToEadlkoAjRUGatrTitHvK0RU8TSK9txZa+YpacWcVZKNk0ukEbtJna5BdrdA2z/2+kdeGNM8ULnzn08/4wS99h8k3rl99ydP8ivvpS8r6jvv9C/JyNR+fYlmxdVehf9ZSQ6XlTJDAFB+IroAPVNlQMbbeemqBm5FQtTwmQ3G0YwmD2NfMD8gCvGDeS9FrthWMf1Kli2h5GwPaRRdtbLk4kWvFpRmpJ8q28fD4mqenK/P5wuXxkfvN0MuiZQma0KRAPZC4xXhDrVCbxxULGoMfZPgYo523tR/30orltNmDEHBD62Qnuo6ABesiC+azdVhcezavSp8q2d27UVqrFFMgrbZhfti5c8yCfdDg6AU60oPGng07Z3o9e7DTu+Y2UY0R5/xACBQI7W3aqqSMcyRDIrZSKJ18aSjGUZa+lo11W6w8nMf16rwWkyrQACmXDZ/9HqDYOhDizq/pXVbddyfnjKAddb2bWpFiFTFblsXWHm1N7vc2pURIcayTGpCYwZ2VmfXfVKrBWZDdpCBSh45QFe14KVVl6KNXnR0l1jdc8AP9bLWqGmzUoEQDGFi3osioIU5rqwTnCHVvuw4WsHg3aVJU1bqjGfLh2DVKSs5cr1dbQj2vP3nFd77zHQ0Qg+d6uw7ydHeN3raN1RLEX/QQ2kDH+xq+bZuqbHMAPLpmSR/38uHn7Ic7rIt9DH/4Og1UOnfxo6EP8tGQ6Gcf3+oA5TRPZO9xLlHZSVfizDZdOtjgRoYFR10A7UxQddLMNE1sWXU+TsZJKSbeFA6IymmeNZOOkZg8DU9Ms/IEUNfd4CwQsJIE5rvSqkrbt2JkstuV69M7ltuzBhMSxwDo57nJplLxeKagA1Ba9/BoticdBJfMHK6jSM4duDAG7fcFomdUOyN/10v4sBtBP80O51Qqv3e5RPX46YGdq1U/V2yQKTMN5x3X5ydqKeRt4yoyFFe7RkkuhTjPzKczbuqTyoK9vpgH1SPow16qbkp6mkpSQ3rXSt+0Pd6b6qa1TpZQacVbJ5DVqK1Mpp9bWWtBlU/BS6OGagu4StZLTFo3p7AujXVdKbWxboXqHSKR8+U1n7668Pjwmk8++ZT1/Tu221dcrz/h9vyW5f5EXm7kupJpGmCFgAtWPkHMYTgxz4mYtG2y5DvNFYSZWp/NAVsJ285/YJgne8Cu7czenndFpBCiY0qeGNwuw+0dW+9+sgUP27hLKQTMq2makZLJPhLCRPAJHyfm+czl4UH5Xecz6+lEWVbdQKsGfNoF3mvq+yLWRRFL3kCEOCXg2P0hdD8d7JkNFKNnkx9kfH0MO7sGR+9+Evu7qXj27zB58T7wx/jo86KUMSc6inLMPEdNX/ZM9sU5deRiipxOF2KILMudZVkptRCdtbr35+bD4K6ICK0of6kjjLVW2rqOgKR/Tw96BvfIylHH33dyuapP750ktRRy2PA+AUITt6Mbh664dV1HGVi6TLV9rurMFAtmdi+f+TQboowq1XYXeLv/tSoR98iDayJG3t/Io8MpDCTNHzP4ppILqjdlrf61o2HVgnJzl+YlMiStWjwe1CZDzK/G9hUl8qrQnxJ9X8IGSg72tFpZloWaixGIF9Zt4Xw+cblc+O53v0t6n3j79u1QGQYG7+Snbesj0D3QT7/++94izeCT7KdpY1EY//ghYtKDV7UhOXDEfsY5DSOy8SFiOeI3h1W+1QHKmzefsm1OxdnKpjC06xtXZ2/r4qJ79T6IetZdcgHZjZSUD7KwzDOnZSFNEyfbdPsEX+aZNJ9Iaeb08DhaZnudT5wjxIRLwYaOGHG5gTS2ZWXbFm7PT9yen7nfrtSyqZKp1WpV8KyOQEvzPqH0djj7R4dqkIhL1hWhmUEXN2rGoMbKJD4Egknvj8GHbgauB27WlfO1mrPrBlCM7G+aJk6X85DO7iQ8QQOC3lUwn8+8Cf3eC/fbbSwGtao4ktjz8jkTa8V55UiUWgmtEdNEc54ai3ZDeeUvNKccIIJykqTUkUEMWF20RVEAFzWrkqaEsxbNFM20AgaDvqMywZEdBITkHJIaNXty3QhtwqVGCwlHo3aKjkM9eYCtFKpAOF24pJmUzjw8fEIr32FZvsvt+p777Ynl+R239Zmn6xP3bSVLIzSheSXniQ+IT7h4Ip2Seu6QoFbEXRB51sXeifLFXVBpdA4t7xbId1G9GPf28xQ8KTgCgohlgD5YLb/iTISvSqU2j6+O4rwKyFl5rDYlrp7OD5SSmU8XHi6PPJwfuJ+vrPMzW0rULWuZrqrktnjVouiaNZjacXXexkjRTdqb2dxB56PWpiXVtgcJsGsw2A0wCH/fvPrvVBE64D1fExXrn1fLLt1+LId0DZBhk9EhA/bvGMGMbTV9zsHeTTdNE5fLWdeX9a5zv1iiUvMh4MHWuK+3M++aJnupqP+utxk758jbru/UkWHXrCwkPRvez3twz7xY2bhSqxsl4D6P1WtMSyLdgNHB+H304OieYY377cr795MaZDq4VNVRCSYS90LiXaA5E8arGijfDI11Vp6VUcJncHxETKCsBIhGxMZpWac2TueTrmdi6s+ivKGUzMhV9PWteVX2dYHeEt8E1lzV02ogZIzSzmxdOT1wbjaGNEi58fjqUfkp5zM4c6s2+gDsHKER8Hb00jhWUlG0la939YygMGdEqjYG/BwA40VwYsndcaz2379IenoC7DxYI8JgFFhUNOIf6UHrLx6ofKsDlE8+fWMBSuN6v5NrtpZBFDa2UoSpLJBbh/AsULHFT5oQiGTZBoy4bSrTfTqdWKyXv/siLMuCjzfSfOI1cLm8Jpq+QAwmWW41YTcycgw1WDRDul15fnrP89N7rs/PzPPM4+MD0zyDs66M2qzOPWv274IRkLwqGhpSkPA4c9BtaP3VdbKSfbcOZBUsEtd2ISeDQ/UlL2Pcr9Uj4QX2l0LgdDoxzbNmm4DUwrKttHXby1jrSmkK1aakPJ7gHOuyKFJhui6+KinTNe3bX5e7PrugnTwaBaLqnwhpSqMcoloFVU3HSh1QdC9HjWsdk92ZHo1uiiF4WtcmMHRny5u2n2YVIErBUWIgMOu9DlpGyc3RoqiJXkhMs+N0nlnWTN0qW8u8v185XU9D/yCdLgQ/E18/ci4by/2OlJV1vfL2/Ze8fX7H9fpMWa8EyTiphBbUUiFONOe0vbeB1BPBXchuRkRbzHsoeNS/8dYBYg8U7x0pakBsgB/eW6OiVLxPNKRXKhFrQuumjq1q3N06SbRpg8yU1Afq+nwlxImHh1d88vo1y/Mz15iUdF4raxOKoR+wC3lJ6MRvfRY6jqFuGRcbQ0NlhO27ZHsvc9VSlcMkqKcPulnpXOgCZcrB0RKDIo7dj6UfOxdgHZt/X6hTSqNE0dtra61jzvfrGgu6YOiW/lG7MhThW+6LIhegonablnbKlkcb/WiZrbtmhzTT5fG7e7N33TJjR2wUjdDg6nZ9Hptga2ZkyB6QZUsWeiDbSzitofowh7VhMbHG3krsnDdlb7MTsDV38MhEiH1AAffrjTRNw8G51kqa5lGuAkvATZ1amhJ4b89X7reb8u36vRxaf34EWiCGYNxJc1N1WSvljPLemCOekwuI/U7EEkFphODUFR1Pc82ItqpN0k0lUzLU0/kdabeyYDURucHrq7q+PD4+cnm4IMhAjTrH6shLaUDASi2dX8UOhvQ24zFu0Q6vrpsy7qXTMT/Q4vH6wzrf18yO0Nh6OUi6fne51iTGjzF93CKc3/elPcveA5hf5PhWByiffvc75M1TqjDfb9yXu+pUFCibBhlSC9FksV0tOnHFZL7twQ9VG8H6zqsJ46iMcomJbV11Ipsugw+JUxWcj2xbQZy2pnnvQHQAB3to0iqlbOoiuVxZ7zfut2fev3vL9fkJ5xwPD5o9+Em/674u1Cq2SCRCclpq8pORgd0gE9aaaU7wtQyScOtGTWJS1tKMtAq4Ru3oySEI6e2He9ayb/Ijo6zN7lcYi2Q7ICGtbKz3O9v1Rtk2yqa6LGIwcMua0Qdb/HLeNIuOQc+lNm3/c4W2LAo5p4Q3fZOa1C6vWbTeheUULdJNty+EiBwkxWUQHZ0hTzsj3q6jkxO9Cj5JK0hXDa2VltUXx6OlHjfNKDxXddMM4Jwwz4nXb17RcFyfb2Qyb5/f0lzhPE1MXv1AUvKEaSakB07TG5JveN94uL3n8v4rbtd3rM9vycuV7X5nvVdC9TivnTulVlpzGqKGM8GfELchqJJth3f3QNN4UK1AaEQxiwEEadV4TSv35UY8JYJM6qDqIAZFg5rXBc6JBRA2tltMzOeTLuQ06rbgzbzydFIo+3I683A+I1sm1IYU3cBqJ2GDIR02xrT+hOAR14ZQogtuwNbeAtRxhVbq6cHO0BN2XR9CDgZ0O5k1Ru1Y0U3CHzJHMf5Eb3ndBcmSdfFMtsH2jbyUgiXy45CxY+5Bcn9Fa43n52e2nEne1JHLLk7W39+h9v5v3ntoVcf7wSW5g8dHUnifv31j6T89uEg+jO9Rvyo3vkPF6VSPxdedbLtZJ07nuHTNp+5b0xyju6qvFapdpJl0rY2ybWz3hdXuYScrZ9tYY4ikEAyhVkXd2/XK7Xpl6UaIIhawekvERlVtL1PVgq8evHbujPtHw/s4CKfBOyVe92BWz34X4jNAupe29HlXDZMdo9FgnjUQn6fJ9hs1Eu2NGLVU5eNtm6rhRq+8msMzOz43O41RhtdS3EHQ7cBxak27+qTtiYo+A01AjmOyB84vvudw//r7Piz97GP6SAjf33UMiF5yT8QkE3+x41seoHxGKYltK5zzA8u6siwL21rIi26GrWSg0cQRt8wyWuAi0hi6AvASztJBrxmMM0fLdV1VE8U8ZlzwpHJiajMiulBjwlZlXchZN8lSNrb1zv1+Y71fWe53btcnrk9P3O439bCYJqbTiQJUcaxFW11DiPg4kSYx3Ym9owHr62+LQqbNdYKoTVQRxMch0NQMjsOrSNEYvgcIukflDVvgQtDygmWSpTUCHteg1cZyv+O3DfpCllVLYeny5ma05awe2j9f5WCEuhXEW826oQFFSrgaca2SUc0YHyK1FWKdNPvsi3SKuNaG0aJ3gZCUayJVhqDd2KRd38TMk8RFegcETvBUzVJaIHtPsJZEWlMPi+xpwYiapWg2EyrVFVwohOAJ0XEOE8IFvI7D97evWLYnHuYLk49MfiLNE3FKxOnMPF24fPop8xzxpwf89Mgnr79Lvr1lub7n+vzEu7dXihSm08LpXNi2opwnFyktUl2koSUxQUmDFdVVUTE8QxGM8KrCVBun85noVTmn1cK63pnzSRWFg44VFY9q6vzTPXWMdFuKQvTTPBNjIOeFUgNp1q6uVjZOlwcuDw+8+fRTonM8W/lLPKz3Oy0Xi0c+rHNrWVQ3GT9Kh8fNp//oJWp3HAbVi9t/3xd8FwwuZyeWC6hnVF+InaIyXYZgXTeTgVc35DRKmjvP40PkxRmCo22kR+6K/XvvnhE975JXfZ61DlKjBiGHMiwHxMk1deq1OapZql5n6GioaAeK7/O8NdLBBDVvG3ndEOPXHbt9xO1Kp4iW+dheJjDOOTW+tOAEEas9oCWqca7mcXR4ts7pPNy2hfsScUGJvrlk9TLzjpYS2b6v5Kxdf8vCer+z3m/UvILoXMbudUdnepmiH7U16G7QDkR6IFAseFLOlxNPqX3N7HuDBuaKWZtUhGjHYPR+ONCLc3ijA0QrAYYYmOcJJ7CuK9frlefrE8ui3UzLurxwdH5ZFgSce8EZHPzKMaYM2bQx3ooK14mtu95AC1MKGOv74Qv4RY7jGH9ZCuo7xzGo+chn7rDWL3x8qwOUedLWz5AaJ868ao1lXclroeSqrZWiEFzOlWVdud3u1qcP25pNKHKvz8LL0kZrTcWt7CGHqMSlWRppmqhlQ63uhVYzZWtAZdtuRsKCWjZK3ti2lWW5cr/fuN+v5KIRc4zRFDNncJ5YIa6rShsbwao7WtbWmHU51UU2OsKUcAb1NjwEiKoNpevDsYQTg7ktB0KvZQ9iH0MtsdeABUVI6JF93aF01UpgkG9LrVoeMbfhUZfPWYMTMdjYJjro93SX6FYbMltrYGtQHbWo2JQLkZQzaZ51Qcw6CSMnolM0Cde7LjTjalRa9Rq4ibXBdSt7qxU1ExEaonZ6o6zFcFMYuRTd7POsXVO28IV0wvmgCEATmgtMSQ3sQohcLjPePbIu6oq6bgv5fiPimfxMmmd8mgjTmdevGikm5qKlxIfXn0Gr5POZ8/mB6fQKF59p7sptfWIrKyIrecmUxXEtMLIXEW3zFUer4L1C0Y3GVgtChQIuO9IWkVKQGBUVEuV7rLc7eE+aT7io99eZmF1x2j1Re+bu1QIhF4W61SgxEtKMjwnXdSXOJy6vHl903vgQiN5zv6tXz9Ffx3acMQ+dlFGi6ejAnj1a5t9/DshBKzoPO6py7Fo5ts2CBig5Z/V1ERU8u91uQyVVOKhkyk6KHcaiPeDogmhiWe0hSLIYBW9QvYBlw9Yy7B1BjBTeSfZ9YW/aydOvIcadqzHmOAc4/4OMXHknakbZWiOHOKw7jtl7NeKnlmCV49BkDyA7kTOlNMq/nbPV702TRmDXMRplhI5uOWdk15XlHsY5iED1dZTbipVF7vc761Vbo3ujwSgL+kYrPSDZUaee5Y+ErZ+LCYrVlhXJ8xCj2wM9Bk1+D9hsg3WoZYo6m2jCp+jkjiTnnK1BQpjdhHOzIiqjHK7zbFkWawmXwas6NiiMQPdQhhrFqzGm9Fyc3VuRHdUeKHj/X+GDIOEXD05A1+8jstP3jX7Xx8c7tyvvHsLEn0ey/fD4Vgcoa15V6tyrVgne4VOCC0jrks46MfKmUtPP1xtPT0/cbjfEPSMbtCqkcLC0tvJAMGjRe8wfwuq+Zq+ultybSiU39XMoLbPlm7Gv7UGKRum5ZMq2UvJKrRs4YZ4nHh4emE+6YUVTzqxNVQmz1SvXVUW21BivkmQipkjyCReDKksaiUoDBjfEx1wzbgcqfhRDHCS2EXU3M0Q0SfEuI9+Hl7dFViF8xiQQEbDPGlkaPRAJEJRPMvrunQYb0QTZtlqGY2iWZuRS5YS4ANWyc/HRBOM0CArThANO3YcpeJMfb7tQkDjtspJAy90g0HQNnOYRYucsTZek5mQsHp20pp0aii7kXiYEUoM5BFyIui+VrHBxK3gfidHzcDkxxcj9DrdaWO8ra6nc6k2FykLAxxPX6528bjy+emA+T4qopQB+wk8PnF8lXHrEn+74+Zk0vedyuXJ798zT+ztrBbkr2hCDLcg982wV3wquaT1cW4ozzkPJK6VspBYpeWNZ74T1pPdzVrJitGfubRGuGvWY3LwQY2DdFkot6vDdS5De4adIOp9Iqwr4la7pYAu+N90VFwLL7UZeFku69wzN6qSIL1AdhPiipAO9TNX219s87UiLE2c8m6Ox2gE97OUR0WCrtYqUwu12ZVmWYaKnG4kfnJO+ifRNqTYlgQZrY+5uybTjwm7nLBpMDDzvECB4txPYxdAgZ2VM58zxNsUXML9JH41b1zkkwOA01FoJ7aXx5mSlqmPZZ9s2ipjUejMPLlFvo46k7siVbf5t12053ldkz68Hgnq4970kNDbcWkAaBZDatOR903V7vd8tWNWEo/P9KkKzJgh3CPxGUNIDPcVN6Zu1dt8Uvf8VRnBjz3oIow7MwQK4Uu08d60ldyBLaxKshPJci6KPr5S3NM+zCsyty77GuJdlmjE+bQrofO4aS6OAtQfo3b/ocO8HabrPAf7mjn4f++f0IOVnvd5ulQV0A0j7xse3OkB5//49MTWm6YxvqsDZszcnnsvpzHxKVnNWyeA3y8pXX33FF198AcDz85V12b72cMfkdh2KtIzfNnVtpVOeynK/cl+uTLPezm1b1bPDq8hYiMoHqBaclG3DO8frx1fENHG5XJhPJ84PD9SQIERyLbSWWZxjW1flCFhvPQ4qjdTS6NYQd9Q6EXwyZp1l+82yoGAmh8EChD3Khe502zMcjPykfFIx/061ClcYvQ5iVwwB8SrMpvXoqMFJrcYRsUwqBvXhCYFaZ9aS1fiuZzmWHSvvw4ImUJ+YqoqrtVViqar14VQCfxL0c6N2DYGpj+L291lZp5d59PYYR8UyJ2eZfQ8wpWnHQNfRaLapuhDMPC8iIeBCIljQWkpWyJuEB6bkkTohpwK1st5XJQ/nSpaGDyvbVqh5475+wunhzDSfmOeJ6NRkMsbE+c0j6fEz5ocb5/NXvH/1nqevZt59Jfj5ytZ+wv32pAuwlFH/baYpElGuThfiK0WUw5U3nJwGmdAvMz5FLq2Q86abolkhNOfIraB7+S4slaLe69aRPhFciqR5ptVNrSAeLizLjbDNzOgzmFJime6aCDjHUynUbQPX+8UYq5t2khyg9F6SPcxbu8CX77NF3AcL3N3xo3cn8G3bjAplnJycud2UMyDt5QrbuzG69Htv0e1Gd3oqGhS15gZhv2+cx8N7j9S2K+d+8JoRCPg9iOlzrsdaI9O29x7h+L5h9rZVl42MaoJ8fdPswckg27adr9IJm92gcy9hyfiOzu86GtG9PA7lnRfPVUaCpMROMcn+xtaE+/XG9VkDRSdiPx/eH00+VF5dr3sngToz0FMEwnn1OOrliUpFWlfhZgg76tr/8rz7GjB23MOfNYhXtLHVSrENuvOSvKi9QN9besdY50z1sfBhELCXTfZz6Pd83HcLYo8lz+N+9rVjJAG/eOTwsbLNsSQKjKaMJkIAI8h++L5f/Du/1QHKu7fviFPjfG74FAlTJKbJjJl6nc2T5gnnA14c7bEOQluHE1vVQZ+LMudtbA7xpiEIhSEH0pCsA2+yjHK9X3knSliT2hAqIXqrMyucervdWFbtc79czjw+vtI2zMfXPLx6zXQ6U1xQnkfQa5kMNpYqA+oV6W2XQPbq6WLENmc/ffA5891xaBYRYiJNpj0gYqqNHV3YWSidud2RlD0j6bLjBRE3pN/ttgHYAqqGZS5U1Ydpu0EYMeKnRBA45UcqyvWppQ4EyDk0e5Fu+GfnSmbDLAaaLTCtMWzMjUvSycLByne+eCWUGglzX/R3qXUc5tvSkQhF4Cq7lgPeE7YNnxLFefy6KAqSwKc0Ftpay2iV1Y4u4XRKBK8168XBtjaUalDI241lCWxlJT1rG7uPil7MyXM+z7x6rXoqn/iJKSQezidevUo8vA6cX0GR9/zkxwutekqGmrteC4r2Vad+PVW9XbzT7pPn5ycldYdAisE6Hma2bSUIuFKYTyfCrM96E7Vd6MhjiZHNQUoHvoehjM0BwcQAzyrAF+YEwCsfaJcLy8NiqFmklsIdKPlluy6iUL3Ow6oBrTSOAlI4tyMpBw0gJZZqZj3k/w9jVRpmkeBBPDmvA03ppcohfc/eLdGz5cFDafI1hGbMJVvEP8w7u55JL3lAN8PcCbJHpdeh4GvdIrCXe7zby0L7bdsFInvbapO9VTrOp0NJjfHZPgZCC+Mau5JpKdp51EmjMWj/TzU7DOfc6JrqQcievfeA8WUA1WrFixKxUwwjvlxvN+7LwnLfg8TUJRJwiNSBXnjv8GIdKhbcOpOuV3n7gBuU6V24zwdFVnZZgUMZMKBWKoexUmujFqG3rDXpRo7WVea9ok2tqaajY9zf902YloWUIjGFF/f8wzH54hj36iXyt5cM5cVndGn7nxacdN4K0tG5Paj9WceHn6koNYcna+eKIVCHfejFa74BlvOtDlDu68bsCs2vxKaqlmcarmVOU2JbCoGEOCFNatokDWKa+eTNpwgMzYS8rsQC27oCva++aJudGD8Dy+QFlKikXijBy162oU9wxzRdOE0T25Z5fn7mer9SJJNSUEhPKufLA+fH17jpARfPuFpJLjGnEyVu1FQJtVIkU6Vav74uRsGrczCt6vmJQZ6jJm317i5f5KIpjDqT4kfLAN5IV03INSsRUnb5cO1w8NaSrHoUIQUlhkmhWUeMVEVgcJ40zUbwjcRayK0qaQunvAQfCQFODw+Ig3VZqCa7jXcQ9izCVd2I1HQN6roRYmL1K3hFUNTV2DGFqBmJiJZ3vBgp2IiRStTRSL+JlQF6ya5p1F8aEY/4RJtOuKaoGH2D61ksaLbeqlrTi6Idzslo2aU56qYtnZ5GcI0pOeQciOnEWVnNtKYKt7Wu3J8XbtdnhdEdnM4zDw8XtuI5zYVLOhOniYdPHglBOF0m5lNiyws+ON4//YT3b38M5Y62C0MzF9euGtsK5LJnyx2qn5tQXCCGiSnOTGdt865VeAgaWAQCVRrBoUiQQHKe0jYbH13EbqM2vfaQ1Er+dLlQt0x1ARcLwXniaUYO4mOacd6NT2KBitXzMXQDFNV2Vs8f0bM09CFaZmvBbtDBjhTBwENF3zpfIGp3SamNZdsI3nNKE++bmq1Ns7dNERMhsyDa7Vmrs43SsZNLvbUyv4DJ7UdMgE+TDr2WPlubqKbKHjzYom7GckOorjmckcGbk4PInN6vJurd1URdgjXDcuStkKISzpctk+tRaVcLGqU1+6lGSrdrM66SE0VWU4zQAtKdnsdV7v/pbf9dnbcheNMfCsHTmqKLzrq6NtOjWjcdU8FI3j1pFGkDCRWMB1cE14wYjfFMULdzb5pKmpSZuJ1D+Sf2mVrs2YMnH3qpWhVqjzweaVaic0eehdjF2xrmoJvD5lJoZWNZA2mKpBRN/LBypPK+QD7E1iqnHW/OocrOdq49aLRRZP/e+kal52d7lTNlaOkcPDl0wYkNxiNaZMi5ytcrB7Mj0uMlfVPc37VfA6IBvwXoAzkUz66m9fOPb3WA0olbW66IF3wTlX2ujuYrDeFa7yQRzng2ybScVZI7BE7nE4+vXgFYC/CCQzkTfYA4B/M06eZmGZAI5hsz6XvXXTJa5fEn5umCF2G937ndVSsArxMi10K9PSMNzg8Lp1fGHDeeSw+AWhVaUellDK4fRC0rSTSb4A6TtZa95mgrtQVXypoVW8u909bNUd5hH2S1VXzzJHrbZRiLgEMXP3DDbKsnGdWBeEdMSUdWa0SZrQ5brVSiWXe0Nu/zmcEdqAPetIXeo5ooruLEmw6Cfi6iWUzeMi5s5GllS4nkZyYCLvmxCGFkQkFouWhni21orWrLbmtVdzygdcXKquTKEIKa4jntMohJeTwpJbsORVq8M3fn4Mf5q2GbLTqongLJ04j40IjefJxCpDXzAGmNrehYzlURj+vtzrb9mETicr5wmmaCVG0ndI40n/j0O9/HBcfb94+kKfH2xz9U9M81WmkqGFcLrYltCkoCTVNiWRautysSEviJZ9EF/tIgnWYER7zf1cANR3RKMM6GfE0pUYOKt3XfnmpEyhAjPqmR5Pn8gJSK5KJGjCKEMmu3mFdhw/v1StmyaaR0XolY67xlfJa9dRVlHcQ9ULC/aux3KH/4l/C3IW/N5l0IpofiHG/evOLx/MB2z1yfr9ryb2XeXlf3Pign64BwOOfobIdg6IKDQZLF/n6ExbEAe2SwzkKBj2TSwx+sl5WMW+EMlVDEoI0kpbdFY/PXew3aO1LSRJGgzSwtWmsqN29qs8450w7R7qjOjXHOvH8MSelk3IGMyH4t4+jX3zdv+rNRJKB7//SSSEevxuss2NzLE31NUrQkhI5w63PyTrvQwA1U6sMumbHhHs6zl/NLNnTsIILmnKhx4mjXPZRVxnPTT1ERvr2pQBxq9Lpktk2Dsy1vH0U7elBqACDOd32Sfv0vDQZ7UCBivkEvVnQL2F5+w/isF3jfYUwOynFrhortJcQXx4cGhOMj9BzaB++QD87kZx3f8gBFZd2raDsgwLZsRLzB24ngI5NFlWK1uta0EzukxOXxAeccp9PM/XbjNM9D3GbUhL0fPjXFJq73ER/DyMBAmLuHSYrQtAX3er1yu9/wKTFfTvjkydvK9fnGuhSm+YHzq095OD1oaUjUzr6K6CRw5v3Qo1u/iwthmVo0VVV1IrWmryOE2nopY8/i+iIoh0WlQ4bNYOsOPTvvcUayFZyqpQ7VWt1QVUjO4VIidThVOrTXyF0Ku6qnRex24mgtXNpMsaww2PrmPEMfIxB0EzcXUmeu0M0muV8WJXQScdJIzD0FMKdfR5pnMkKmon7EsteibWJXK4FUewZKvA5MwTqbUjQFUZX1D2kywnEYm0oP6myQjpp1l4p3QEphEBWj+czMPtGFmGpDS18N1ryx5kzJC8v6zHK/EUMgOQetsi2qq+Bj4ju/9D0un5yYz5EUHD/5yf+L+/0Grej+4CJC0Zp7E7aqQmjrtvL07CjNsRUdB3GaRktqs8B2nk/4kDRg6NmkmDdNdfioom9iZNMeuHUjxpASp/MFUqUaghNFM3aplfV+5/HVI8v1bjFozw0PUPjY6A7Ccz1aOa6FlkFj4z44ZwifEYgtA/W1y5vrvHl4eOCXf+VXeHV54Cd/8BPkh1gQ0UueDFK4x1lwoG3TxxW8o1Lw9e6HHkz0QMU7lVvvnWS9bNPnIDA0V46fdexGGn9vhso4Rsasa6XZXBi6EKMu/1V2ATERwVtnW+esqBmm6t50smlrSkp9UV47XNuHCIrYfXcf/IzyGJA3FcrsGiidX3EUG3R97ZLjdzEQ1GaJpd7b/j8MtOWoBCyiAnJ0FKEPG7fzOWiN5nbOzSijjXLKMeDpDx6754KIKXg7NeOsVXVejqTlY4ntRdnn4DmmENsebO7f9/UfOT6X/v6Bpve/76erl6KlS5FDkDI+wR0e5T7Ad8SND4I8C9Ks8WD8+0ejm599fKsDlFIKoYrByXv3TREdwDFWlXVPKgffTIcjWETcyaIpJfK2qoqrbVBlU0Sks8u7CmmQbmSl8uzKWlfoTZK1j5XM7XalWBfAbblrJ8MlMc2zEqhyYbmvpPQlrz75DudXr3HuDC7gXBhEKh8iEaFWzUp7q1mfAMdMIoRAaFqueLnAOcQ7I6JCT976u4/BjMhuYX+cNGJZHofWNekpKrZ4WXYXzYPC90mO4Iq2dJecVTG7f5ehFD4EwoCBUc8Y7xALkKJX8murAqUot8cZlIpQrKNq83e8t2zbW9eSgxY6r0Unj3fOsitDqFBukbYK1uFw3EtcgHnj7ItlJxxrCcwQrjE6O7sfJHiwVvWWFQUAMJFqOlWvNdVxSdPMHBM+Kgl6zTNLzqxLZrkt1CxsZSE3QYoGKAicThOXh1d8Mr3ik89eczpPkDyff/4T5DmQtwXJ6iasGZnXrq2mlg8Ld2qFkhUK7uJbrUE6K5LhnSOmA2LhdPnajPgdWsQHDf8QLQU6G7Md0vA+4pJ1kgR1f62gJcJNW5y3uzrD3lBEi2q1fmxsjB34sBDbCjiegbx4GhYIGH/D72qYTYQtZ+SuPKOUwgv32a7+260c1HzR+A6HY0cRDomAPmgcX+9+OG7uPViRsVnuScb5fB5EVu/9CBx03ocdRfBahqqlKFpoqIciCErs7mteFzPsSUtPTpZlUYK/O3aBOBsHPeHASkp7YHTc1Ha+zIHbwh4rgNhGOD4NkTbUbbV7phua7pybzpcA9/L+2rWLnUswiEsOM83pBdl93Z+JrkMvmUGqj4V1gLrDeesvndu7G91+R+z/e8BipOlgpfWuE8NLrZ9+3z7kDe3/1XvVRmGnlws7aqKJYk9gnb2+n8PefSSH/92fg92A8XBevMo2ig/5Lx98wviT66Uk6d/6cg70wPGbxCjf6gBlXVaEu/kjBOIU9yw/Vryxs5XMOhmZshOpNItwzhEkaqeKdejUXMhxG4qy0syWuxSqqILn7jKpcLn624BIJefV2hPv3Jc7S15J5Uw6qT/Paq1l99uKiOf16zc8vnmt5nhBCazBh+ERMjwO0OCq+y7sUXpfELzJgr/MLpSYqsO7q2QanfBwN/eM4Nie1r+nf8cLIS3LPoJJh2NZ2jAmdJ5WLVBAiLbYaTRpQkL9OXTuDCp0p6K3DacCBQTLwFtAgy1DfzAl2R6krNtClUrM2uY3X85DJVQMESF4awu3z2rKrSk5DwZ+j/wHinQYNxZyjSxUPz8YuqXqqrUW2wg1c+pUihgDYkFuk2rqlyoZnfMCzuOTV0PEZIHnNBFKZJoS85RoRViXjfW+UD1ESYhoF1M6zbx6feFNfEOI6jMU5zNffv5j3r/7ivvtiVCVx6Hrj5WVnGWtBWqWAefHuFs8ECJSKoVtjJzefbXc70qylUaSMEwJtVSqC301FKmhol3ROroomROosGKuPLy6st5W7TRqjfV+V2ShibVIC50Dctw9ehm2j9kRjRvaOM7ZNjpFMTTxWNeVdatWQgnc73deXR757NNPefPmDbfr8/ieXuYIXYX4gAgc50tHWUA5JX0u9iz+GPg79iRIkH2zNTuJeZ6Zpmkgtv3ze4LVHdeBURpRhdcDIhOCmkR65RuUUrQEIRqM9s+vhqqB8RyyBg1i6qTj8/x+zT2AOyqgDndbGGNtf/3+LPb2WrtHwVtAt5sZvkBnxAQCPtjkbQFGt8BdP6Zv2F2l1vm94HEsaexjw42SWQ8QelCm47oTpu1r+48lfXs5z+6ZNJwLL7/vp/z5GHj1sk1fcfb1uqMgeg9eXMJQj64fvP7rQd3+O+zzXwb4Ysh7R7F+XpDyMpD7eBjyDQGUb3eA8vz8TMoO5ydcSKQa8c4xxck8MQr4htwhRm+W7A6Z4oCepWkLKqIR4LKs5G0lb3lM9FL2P9fWlENiBl4hRrxLut23StlWcmvkXHl6/8Tz7ZksmROVh+0BvyQTGqps64rD8fz0jvV+pW4rbgqjhOEtQ2+tjaERgpa1+pPusOWHiMfO6nZ4v2cxe5eBM5jfMoauRIcGIdV+mi3mR/EmS/cIfu84GAu2tXrrhuCQrqFhZQ8HqmUiO7kt2KbdeTUx6qat88qCA+nXH8yF94AK+c4NqizrQigZn4OZ5gmz2ZcrIqPoSm6FUsyNNW9UUwp1TUZQ2uvH/Z4WU2bsC5czorI6B/sX6qRwhPVBFyszHzxC2wGEqgJqUkEcy+pwwTFFR0wzPgaaB+/SMGB8bA/crwvblsmLQvOnNDGfTHdk8vzSr/wyJE+cJk7nEzFGvmxKJKyb2jjoCevtdg5arqxlARyn+cT2cGOdZ7xXdKyEgKuBZqhXTMolKnlFmurQiKEoztCA5rRsmbOal9GaEtaddpWd04wjUNZCXSsPj69Z7hulCVtpVPM7MtEifS7KWrTg4+UmxjHDdcqV2tG8DzbFJtScVYwRDdpSCrx7945PX7/hB9//Pl98/sv8tb/213aPq4Gm+KFD0o8XEP1HNr8PNyA9hzbMSF9wZERG0NHbfI+lDw1uIsECglEyjHF03rwgkXbyvHPKHRNtsVUelrbPz+eT6UeFEdSrud3Kcl+0/OucmZVqVxjb9qJ0crzW4F9uwsrR0fXD7b8YiFa/Xa01LRfyMvjrC5nzxp/u9xAOJRAZz17fr5twMGQ2GPI6kr4evLodFVHAxZR7ndu9vQQc+3rcxziISZEYetIDClG0o7VKYBfDkx5kOOs6c3tAtAMYfRSPy3oxvo732+0v+ZoWzT7WjuFB/5xjMC8vXwKH4OTnH/3efYBtfe1fvsnxrQ5Qrtdn/K0R4sx0vhDcGW+eFK02Siv4qND203tFP5w4E7KyhapWai6DIHa7Xc3hWEZbXil5TMChOunF2PTa1ipS2fLKulS2dWNbN96/f2LZ7oRT5CG+Uu2Pli0AqOR1g9pYrleW2zOlbszhQvTaiZJqUg+Jg6tlH2Y9aOjByD65j6CbvlI3eRlw3R7AaI2/aziMVIF9gemfV+0+dS8dvNe9wasuijNvkhijohr2uVrq0aPzeBxoKe2QeXY1zA63iuyLavDBPk8j+mQ2A/10mxOKtRxv90U/32u7d0dEFNnR61rXlXVZ2MwDpuZM2zJS7B7WCrWNgA46srRntsrA1y6H2FTKvD8T37Nnr7obzZmGBXDd1oM/EKY4GtT/rjbdlJdMdpWHYPwXn0hRV+TNbfgQeHh85M0nbyilcL+vgzfknXYv4RvpfOGz7/4SPng+efWah4cHpjjx1Rc/4entW/K2aoZXKw4ty3Wzxpozy/3O87v3ln3tY2s6n0lTIqSk86jfYxHd6GvGR/PBcRpUc9BsQLR8l5K2/ysJeWI+XShLJk0nHl+9pjbhtir/ptWm5Oa8i1l5K/F0HlWHqjuhcuhgiAX17ETR/jqd36Ym7DTbfXr/xBeff873P/suv/z9X+azzz7jr//1v27ZtR/z58PgBPZNuK8T3TiwSbXS8E76HHPVApFggUXnp/T5d71eX3z+cX7W20LwnvPpNEpBXYSx86Ccc9ZFpUTvhgz9lm3byCJqF+G9ajJNs3YCmdP1/X7n6f0TThx5W8ccEVsjs8gLZOXD+9FXo/7f3Xl3J+u6YPpSKMr5AhVhH3sveTzQUTIHB7Kys/ZgKzfYONDuG/MIaroH+MN7+4q5P5dhUXo8FQ1qrTTdw6z+Hb3E0wPZ47M6tjCPuyJi1AO3//sLtGife4O0+kHw0ce+w9bNjjB17srhePm+/usD+vcBSPIhMvjh8/3w+JmhyN9EnPLtDlCergiZlM6kNbPdb6SkxL5buqlYmIMuvtc3CdouzuScTta+OR2Rgk42K62OB9UjZKOoasujLei1ZIrJo99vm6rVemEOJ06nM+fHB1KakKwLjrRCyYW3X33OV19+xqtPv0ucHwhpIkVPtglXDjXSMWAOwdJxk/dO3TalQZU9+u1ibr2bJzjbPGvFO09pWY0Wq2X/9h21aNtoKaaE2+Fl45tMSe3SezDRs7i+oQMa4Elvw/QUQ31KzqbDYhPOkKLu2NzM3s0HrYGPtjqnKIuI1jpbrSBGVq5qyCVA21ZaXinrwul81mwPyK2q70l/bltBch2TvE9qb8GX934QCXPOrE0DiZBOhGkCF3DSiM4Rg/JRaivaYuqshu2EIkUJzS5ZRqxE0rV3TzhPrZlSheohzDOxRFxJBD8ToioPt9a4rzemdOJ0PnN+OJlXTCZGP77PVw2e3zjhNE9MU+JhPvOjH77my598wfXpLW+/+pKl3si1seaMx5OiHw6y99vNNreZaZ4py2olQl2Et1op60pogo+Rli3jRJEjPR81YctZUapkJTFvgn4laykWAg2P+AghKf8oRtL5PIz+lDNrJUKnre2I0GK0kiO7CCGoH5PsRMdRVrGFNsZIqZneARFwlLzx+U8+5+333vGDX/o+p9OZeZrI66IBauif8xLWPpKGh6otGtyXuicZHe1UIUjz/ClFidaiJZdeCuqBRJ/7L3kMqoQqItzNOO/Dck8v3cSegLhD+/4hMeiiYTFGSs4sS2Y+TYPzNk8z7dx4bo1tXYaUenfGPZa5jhtaa/Wg4Dvu1GEV31EkbyoAXcepE5ePiMLx2X3Ijej9IodLHbYADi0Z96BrnKut8V18xfXPkV1/pgcc43ucKBzD3j49CiN9E24W3Biy2mqlkIc7tDQVnpTadpn6ugffYhfRy1xYV6cCUsq3KaUeAu6+F4mp47IjNS9O7GWQoq87XoH72mt+GvJ3/PMomb340A9Zefbv3yBQ+VYHKPfrDZGNEivbtLHeo3VZzASv7ZAaYPdaHkP7omdQ3qmI1bFTSpna7P8WjGwKMKA5I6V65TMUdFHa1oX7/caybpRWiCEwTTPn04U5zdqmKc9o9tAopXG9Xfnqyy94fPM5frpwfnhQGWvjcWjWo0cTiDgjdBqa00c4HAaZjIkKlkWFaC3CWFDQRqDTDdZGDfaDzEXvlWY5HUURIy76aJCw1W57x4Z03gcmKKQfSC/bOPZ6bc96+0mLdEdoJch6y1z75O7BSm1qIlZqQUphuy/c3r9XLYVpZrud2R4e2B4eCFZ+0tKVTmgpBWq/bvZsLGiwEa001fkBW9X24yIry7YSzSjRi26QiuDFnavjOopgbqg+gFinVlVnZkdffCv35c5WKpM0pvPKmifSVInRE1zUlujcKC3jimdq6rA9zYGQeoal1xdCoIZAnGYefCDFifN84nR+4OHhNV/+5CeEOPH0/ityWRXRswwWpxvruq6E69WEA7VzKZWJFio1VqQWtnUh4ZBajIukz8V5OXi8qJJz8EE5LTESYsL7qK3Avg1iZEwTISZcSMR5ZpKGC54AbALZDN/2dfKwYfnd2HAsrA3EPr+1hu+bcy/LOdtILUDOufL+3Ts+//xz3n3v+7Smvlu9g6e3In949PH5YUnCe08QP9Roe2DQEY1s6NK6rqOMDAfeV3c2dtY9MhIUGeXfPie6oGAPPEJQsTXVelEPrspOhNflrXcBajvx/X7n+nylk/IBpCgq3flT3dm51l1u/lj26OWJvnke1xI5vO7D+ye2yLnjBmdZ/scz+APG4XqJZEeIHW4IOo4BY0TXzr3YY4sPyiBj7ePF+nosFf60jbvvLzhHbdpFmA+K1Mcy3ct7wNcOEVHRuY7SfOR7+6W8DDC+/mEf56G8eMV+sR/77U95v/Qt6HCvRvnr56AuP+v4VgcodVtVOjwIbssqc54SKWWcixqSuN7tYvyTpu1bvRW0BxiDE+D2Lg/BJLs53mMZkznFvoGpMNVmRMtWNWuISc9nnk8aoMQZCU49dEweXEQoJXO9PvP0/i3nV5/gvcNzZpCjbMN2qElVN+Xb69JVOTH0TX4fHJ3otAcdXQvAOhiqKkC2qgNzBDJmQT7qxkbAHUGCfX8x/Rfv9Pf1oDQZLKNFmjqS9ohpwJE9nOq9+4zMAcQY4ViT0KE23xdjaUOWPpdC2TbW52e252ftDIkLbVttnBTm85mQ0tCKoTV8Xwx7J4QtbB4GqdDHQBBF0raSh+JutftXa7GMzVGjBsli3ivBNkwlLAYKmknnUshlo7UyLNoRtYVvpZoD9srpdMawA1MJ1nvRsM2iFYJ4XBC825Ul+33yITBPMxKTjteYcEHLKefLI6fLA+/fvuLdu6949+4teb3vdhFOF/eSC8v1yjxNbOcLp9NpoIY5a2uoE6GKIZAo2uS8jE24H1OIgMMTtJunS5M7Dz7ggykdx4Tz6ojsgtfv9toNJbVQNmdIynHBVOStw/kjQm+mF3Qg0Xbpeecd0UdaRfkYxhlywPPzE59//jm1ZWYzesN1UrzqAY3g2nXzvjjGtWeHxWN4AMwTx8ZuF0B0pbCsWVVKB2/k8ByljeBdXO9o2Tem/qy7O3KXrO/Z9e12o/NZplmtRvGOycjPGjhrVr6uK9fnK9frdQQxKv4W1Y3Y7lsuBWmV2IUsOfLb+tNgbFbH0szHtrjj5vs1Q4BeuzhsnB8NVEYy1l/Py/uDfbaVTdRM9uX5vghknLYSfBic9ETrGKgc/9vfO4KZw7X19fxjjQgv7sWHAcbhtPr9fHn/dnkokcMJv3jNzwpO3Is3/LxSzvE1A0OyU+6dT/oc3Ch/uY88x593fKsDlA710gpShFJ1kulkMG0KpwGJenD0UsRuk96sQyb4ww13Qiege3Go5Yuz5FIXodOUmKZEDJpVlQLNJvM0TXhUgdJZe2yKE96r+moMKmEeoieKw7umbsfrwrbcWNNECI4qmg0PK2V7rqVUa5nbA6omypXpOE9XXtX/KnIiKG9DPWz0/uVisuWt7e6cqOz0sXSkmZqOutLUVBC3dwb0rKDD004cUrV7oYmYVwRjgxi1+loZa0AThT2daAdp1dbcigZBx6yjdE6DIRulbBqgLMsoHVURNhRmzfOqHBgLmvr3xxDBBUo/Pxg8k9aRK5PQdiER0qQoCC+9YPT+F9y2aUATgsK8xlPp1ywdwUG7jlotiro4MaKythP6VqFstLxqANQazjRGnJHrKpXSCqF6ot+l/juPp7ZKigEfZxBodSLECXGB8/mB08OF8+MD7798ZP7JBfGBd199gTfLhmKEyFaFbS3crjceXi0gqh9RamVbFpbbDbL6RAloy3fOeO9GgKKIRUCmBuKZEtCd2FxQiNppmQfnVS8CiDFpmSglkgNKRbaiJUxzT7ZJe3gWHyzE/Xn7XU6+B4zBzk1cxDnl4yCREBx523j37h3TrMjRNM92b3f3307uPpZfNLBrNsX0WTw8aGCXc+Z6u7HlbZyTzg9LbKSb7+m9DJ2bcNgwdk5DpZTGlFTht7fpaqlXzyFb109HVeacNYhyjuI3tSAws8BiWkVdD+WInsYYSSHRWmS5b7TWBro4ArLD/dVz7rYSWPxgfLn+WETMTb4nTrbJeUWX9+d3KB8ckN2Poyky/twLDKJN7AfeB+b4rZ/XBfB7SFMPn6+BRfsaCiAf+e9AhgCT7aUbVvZgefCwPnJ8PIDo6+Z+Cg1RPmV39e7nKA2kG14eEcaPB4U//XhxJfZPbuxBXy8RdbRqvw53+ASNU9w3PQngWx6gOCe2qCnkSamaCdiPO8hRayYQ8URi2rNab3C2k3ro29cn0cl0Y4J4teMearHzZCUiRS9CCMwGBWcKbNqOFkPShTZEalbuRSkmP+0gRN00WyuUspG3O/ebIC7oZnuAAkcdFiNwmmQ+xUyvTMM+xclq5Z0YGEapCsewFe+Lw07W0g29w77Heu8gaWEGZKUQcyZu24vXShOq32HdRvcBMV+hHkSaYFEAZdXLwXHYKfkVp7oO/bp75qEt3/uEzyVrqa0Uir2utGauw7uWoQYapn5Jr01jpT9duLQjSM+9iBAs68I7vLU8C26gHsdMtpRCNCXa4ne9mgGnhzCefa7KWUIqodhCaagOvbtoW3G1jtZrqb3lptGksG0rjobzOu6kszWaiRKaanIIEUIgiCPNRcssKXG+PPDm0zc8vnnD+eGBH/1w4vb03sjj+QVkW7IGgWXbaKdJuUHmxNksuKqtmTGdjoli5cDOZaj1hHNadhBQQrUOGt00sc4dUYJ1DBFPQGSDONEuF8q6DZLy6N/uiIhtKgZp0flax81TbFx0ZEAFtAq1uYMJoQzHYh9Og8S6rgutZo5EyCMvpBNTe7fL3kIrI9APIRAljk2wf19rx6Rj34hHmeUw/ltrUI30KUpyPYq1iQVq1daNHlDt+ikaGB11Ro7mc/01PeGa0jR4YPM847zwcDnjgOv1Os75w82rdWSjx5EjoNwTlj22lHEf+3043o/xXA/rYH/0/diz+uPG7oZezygl9+8zLpM+THnxffSh37/78B0f22sHKnw4ocFRcTuJ/mPB1U8jwHbEwRsvr79GUStNGvu6JUOO4iWq+LMjg2MQov8zSjP2Z6Sf13jRyzFK5yzu992hnXOVPbn/mzm+1QHKFBIVlV/PRUZ0V0vBBdt4AecCXrowURsQmAqhddLYPrHFat4+7ASlvlCkuLeKBctWh5mW1Rr9NAGe5gopKpFxSifmdDb1wEKrGVqvAzuzdKk7bE4bCLZm2laWMpKX6mBpvb9VGePwCDc670lR1SdjSBAsM5VCV930zhFDGOq5wICqp5gGCtU1F4pUKo0iKj8vtCFL7b1HmqPkhnPbeF9txdQoi04HE7nLJQ+yXUEQE7ATqUPv4qgiOTJHUW+LUcuVOspd1SZptcVc0QlVy6wOqnNMKWoZix2VwVvLNZrVDQE+Z+JZ3uEI+JS0Q4i+DumCpN1c3e1an2W24NKHfaH10SGZYX2xbSt125hjIHmnD9xMFUNtuKoOr2KmcCE6ophsf9HyoPeQWlRUpbbBQXROy1BFmk10Q4RCwPnI5ALOB1WMnU+cLxdef/KKLz//Ie/fP7Fe70hRET7VSJHBT0jzRDqpymyKEXGBmiu1bNStUs09Oov6U+lGocha9JFtzixrJrlgJR4lD4uVIL3TcoSSMJUE7WMiXR5x5nJdS7EOHD28IXqKeomZNPoXQcKLEoRzZtwWKdnRysZGwTn9rNF1I4zAI28bHMovIId1Qwb3A5Hx3a1Vnp4YhPY8BB5lby0+rGtHVduBTLq9y6MTcH0IRHcIkp1JJ9RKs+v0MLpbejm0k2m9lRajVzJ4VzrucxLZO/xKLcQQ+e53v8sf/tVfZZ7VFuDHP/oRv//7vz9sDT4sf+Dh0FD89TKIvKxdHLNx4bgRuq+/H148T90cDxuyHMXznAEm+myUcC1EOgLU56jNed0pXmyu/R43C0I+3HP7te/B2uE8rezxtfPuqq/ykjN4/D4QBWSsy0w94GRHp+x8+97VCdT09GvsDcdnMPCOD6/gcI/djpyMl7gXQeLxrfqYXs6vHhzijqPgFz++1QGKi+qYi+gDFDO9ozZEsnEVPC3oJEsJvOqeDVGtzi8NLowIu4v8eHRR7WUB9V7xNljMh6ZatmzCYG4EMQEfTvh0Jp1fEaczPkQ8nmAmTlhHTatCzsJyz2zLhg8rsuYhQtZ6/dR70xnztpDryAte4d4mnYPgCL7hXeB0OjNNMzHOCtvnquWkslAKqoXyQWQcQiSGRPBpoEi+aaAXuvdMiBbR7zB1KwVVOZCx+HinHR3ZyKVY5le2jW1bNNCwenbNmVo7AVIDFOd0AR06L7YwdE+ZUaevQivNiHy2IOBeKAGDlnTSNBPjpKTfYi3GteFc0yXOM1Q2nWXO3ntowmSbjoh5zFgQF0JUGXEftc0vq0OrCDTUXdt7JfxGr0qlNSWiC7RcyMUyudqUqW+fI+K0nFUyIc7EkHBmLtg3k1q98gWaisR5lFdVq3ZmOe90ner3UNqLcddq4HR6xfT9M6fTI+fTA5cvvuD5/Tvuz8+stzt5W6nFqcrrsphfUaQVgQpznNnqytoE1yXnWxviiA4Pflc3ldooWwGXdV6IcnO1ZOrA6/11DlxTe4MYA9RCaZl5uXG/R0OgVA+lCwTS1LSy55LahoxxePYyjA9e21udioOlKVFapmQtM245s5UNvBsEUzm8dzMHX2BsFsHrZ4YeONjGoe3gdw1QTE8FQ1hKzjj2zeiDPfgletpnag9Gemm6oxBjk9HrVf2XpvyWIw8kKDrrLcAs3pO3TYPqWsda2KzbrtXGm9dv+Nv+b387f9vf/rfx+PjI9fmJ3/3Lfwn3137fZP8ZZRUcVrrakYkPjw+5Gz0g+1D8rpcOP7Z5v0AcBup03FN1gXcW/Pf705sMWtiJx84N3MVKEl9Ha+zSNECEwVEcZY0PkBFdCt34/q+V6von9nXsI89+oOZNhkJz50g6bK3qZfGROelI0fP6WGjw4g6N17z8fgEnhzGlqOHLcPp4U/oYsOnv+/Uor0tcf9b16+//Kce3OkDxU8Q5geJwRTSzFRtstkBqz7rJ3wMRUedUWwxBw5FRPwZ6uBm6j0IIzKeZ+aQqr2Kb8bqu0Ap5W1nXBe898+mkr/eJFCbc6YHTwyf4dIbm8RVtaa3W+ounFse2Na7Xlfnpma0J+Mh8PjGfEy441dYI3uSXGfB1a8qT6f4OraKbpHVtpDgzTWem6Yx3no0M1bHKRqlCLp39r4NOoXC1chc83ifdOSyoS2mitT2TcL2bqeuYjA6EnsbrkB0qvVXNGHPeVFF3uas5Y6222ciQmfZOOUV9EdDLNnntvvjZfot5y7jm9LmhpNEOctCE4ALzdGJKJ2JSLRVyRPDIqn4jutib+SKYnL0iBa41vIS+vyvZeT5Z14kGMgELYIqaVTZDqGyVxHkxzsCJlgtlmlmz6rdkabhWOU+RiiNPM76i5NNacDIj4pDmcT4C2sEloqUcBdQEcY1a1W5huW/qn3NSF9xaFKGLflJuydLIWTifzlzmyBQunOIDl/lTvpz/gPfxc27xHU/v3+qYXxa2ZWFdNpxPeIl4V4lxsqDMyktNRee8GVQq8Vs9svKayVsmrxlpMM0ykoXQMy4rp/ngcbXQXMMTKFlIU+R8PrGcZ7ZtQcrLNlcRGSrHFf1zSJEpJVrfOJ1TATz03iJ7l4+iARrYXu83XufXg2/R25Kb0zb+0urutwMD8fDJm2x9G6qsu0R9U4Vo50YQ2cm5x2ThQ8Snl3BGmUbPUrlm9poevKi+jQyBQ7Edvm+0tRSaZbVtnlmXhfv9Ttk2a/n1YyNutZLSxOMnr/mVv+X/wh/6I79KTIm/9v/8PZ6vV0NGbcPuJSfvdQMdmfzX0Y9+fK3cMVqDuyyCbXqiVhDHosXxM33XV+nxWg/aWg+aXnbC9O8Sb51GfWPvSIDsY6V/1wimGF81NGA+PHrAoUHEjnIcP0tRk64N0w5X9vL6Oh/PWbNEcMpn689oD8p695AGJgqA7QJ2P+3o7/168DEuwH7cz/oYe7kizq2pwzZS7U7pfKz//xKgTPOMi9C2RkZhTUG5BR4GtN8DjqNOwVGTQAegwcA9kzGY13mF70MMpi+giMZibYG1bNZRUwmavBF8IsQZN5+ZXr3h8uo1MU5I1Y6Ikov5ihha4zy1CDlX1jXjYybMNj2dV26INz+NDt+gWifN6kAiDmX26oI1pZlpnqxVVss1KhvvqL4ORr50/sABQRn+DhZBOzRjjGkipo1uGCYwCHYdAh9E1oOyZDQF2Qpg5McmbZRBSs6WkWvnQXQO76xtt0OyffMBFXJCRcl07jmi13OUEAaPpKFOrNpO6qyNVe9HnBLOSh0dWh3CXuxZnE8RHzU/9AaDO+dJUW3T4zwTQhxzVjujTNRP1OjQea9omLcyhxGt59NMXU+UpVBqQ8oGreCameiVQmzVnJW7boot1NJLK3tpcmRBaOnndlUFUPCkOCOSWbeVmiuNTMvq2JrXxpQ8zs+cTxNzmDVgOc+8f3jg3dufEELg3du35FK5Xu9crndTuY2qO2Lf3EyFt5+P9350iCGYVPvG/X7Hh8TppDwG7x2tFX220TxjYoBWaRsgGU8gtsg0nThfCre7tkCX1gZi91IwTEtx3qwUpCMUTRGXLuvuDa0bnAzraBFRFdXr9crj4+OwNaitjGBhcBr6RvZBJuyO4+LAEwniTCRRDgmCO5z7fvRA5QUfTPYM+YhAHJU/nXM44/i0qtddrJsp9yAnhEHiLkY678iO3rudCB9iNJHEwLKu/MHnn/Pl27eUqiXEvVvDzu8XxPSP5/+R3/KxD+qoxfj74d4cS8KtVuOUyfi0cW/AGhDakP/Xpf/wfQPFwX75UpH4p533cY9paODWPZ2O5318Xf+s4587kt+v+Xj9fqB0Fmi1XT24lwakR2ofCQ4/Fmd8DS1yHx+TH7zra58oh18dBWp72PmLHt/qACVEFdAiBpLzasaGGnp5i1i1NOGsNPP1SLgfH4NXFVZjkDLVSCyRUmA78E90UdQaZ+8siT6SphNpPjFPZ6Y00daF+30BlLQavNbccQFiMOKqnksKkRQTXQW2d+T09iKxVjn1OdFaspgi49BdmS5KhvRR0QjRYGGeZ9Y64beecehi12Fdm4f0Oi1ul9Ku3RzPBmy0LoBh5uXc8AHZyb3O2jp1k6jOjWxTpHtkaObV23tdjDgrrbxYoEWIh/e21oZ0fnNQi36vd856hW2RPk4wrbdoWcbUFvu1d05L5yT1a3OGjFAriKrZplkN3IRdy4IqI0DpyYY4R0JoRKJPNCe4oIv9dJoJcQFgyRuuZKj6rKeqQZuSsLFsVqXftRQXXkLKTseD2igU7neVwo9+Zg1GWq1KgJFWud8y794+s62ZWhoex+V84nx+5PHhwsPlzKeffML7t685Xy6k+Uc8Pb0HpwZ/27oSqqoiB+eU/NuaIn0o0tTHMxg0LLYpNzV3dDaPQvR0/5CUEm2e9T1VnX9qzbRWqAI+qIZKN/O7H57fi02jI3qGetZWd4Jp51iUQgiaiPheyomBkvW167qqpUbq5U6d4+Gg5dMj3GAdWn7AdtBhEZGXwYQIw1NHCcN+aBRhQQLwtc0L2NulnZJ7j2va8bVdawWxzj/vCaLlgWzrVmMPnmprxJQ0aLE53P2NSlM/smy2H/dt5at3b3m+3ayrRK84HMbjqDQc1tiPoSgf/vsLUbef9f4PyiVHFO3470eEwx3+fT92/RoNsPZ76Psmb6/3B3TgWFr6MGA6tvv37xtdpYdn2/lFX9NE+QCp0bO0m9wRO9fL3ZYUHL7TOXuzIXIfBh37ub+8x/33P+vvP/PowciLZcn85H4W8vJTjm91gOK9w0e9I84Hy6qNwIpTbQYsC+h1Ru/wATWMOvwAhwclUIXqNDoNXslxXf9knmc1dEuBnNUELRjvRFygOY8LEyGeifFCCAnvArlC3pSzMrJLHJGAC9EkqtVL6Hy+ME+n/boMNXCY4JMYuuC0RRqnkuLBB6bpxDydiJbZKtIi0PSFnQA7x0iJEcmbDnpz+o3RTO6spbWhmiTRHGj17+pK650fIlaadWVcFaCMidQDet8DEOcGgXGOE1ShAC14M5CbCCniUyBZ4ON7kAP7Zm1wfHNeW4JrpTh1xnXeE5L6GnnLnktrbKXga8G3pBPZm1uxzZ7a6rA2+LAO3q8RVIhO0SN1HK6L2dO3atwk80dyqP+PFEKL5FCJU2CeJ9Swz+NTJM0zt/uzIjS5kmIibxupbMrNacrTaeZoOlYA2wC7IR+owV6xTqKWM4vcCB5VoTVZ+9Yc1+uV6/OVvBVK1nbv5COvLg+cz4n5NPP4+MDl8YF0OhOnMz/5yY9YloVWYV02fDSukOj1dx9T5xz4DnF3/Ym+iHeovZclCyF4Cwy1yyXNZ7wXqFVLdVIpm1BW20RCYD6fmc9nlnWlds0OQ/ccR5XPl+WRTpqtWQO/lALTbGPugNT1APi+3DjfJuaUwAQNoXPU9P573JD8b5iabUD5QAgiLztkgKEz4pwbqO9ACQ/HCNz77w8IhbbJ75tj77zp5agQtTwZJmE+bFR+2zRANFKxOJXD7908sqrGlM6Jpk7jzrGuK7f7ja0UlmVhK5lqGbpDBnh/DBh+WmnnePy0TVBsro/2cPv3o0bTbs5n9+GDgPBjm7Mcn2H/pl4y7ufTf+wcnFNFZLFn8LOuYSA4h84qREyLKO7JmY2J4ax9QPnH9Yzr2r/j2NnV/6tBkC0HP+Ne/7zn8bMRrQ+uucNO7F+qiW1fpT4Mduiwzi90fKMA5Xd+53f4z//z/5z/4//4Pzifz/xD/9A/xL/5b/6b/B1/x98xXrMsC//yv/wv85/+p/8p67ry67/+6/z7//6/z/e///3xmr/+1/86f+pP/Sn+u//uv+Px8ZE/8Sf+BL/zO78z5Jl/8aPpBh7EyMpOCXfejYHXdQSC9byLRef6QG2x/+Ahi4gRmvRmb2VDHGrJLnunS5pn8qYCS4SAcxEXIs5HYlLux+l0YUonYIfhy1YQUTKqF4fzKuvtURLnaVYEZJpmLVl5GaqXXkBUEBxL1fbsEQ1QYtByjnZgWvtx7PoKvdtA+SRaU/e0rhFjpZDjj36JIhHpNJtOiCcaQbQjKtlId7oFMd7vexryweIZQ6Sa4JUK64nyOlLCpYiLHkLAi7owB+Oz0BU0W6MXF3pZqfVzte/W60xjUcglE0oh1orzRcszthCNdlFfX2xozu3BkTee02D4ew20YpTRSTTQncMi1FurVacsDvQjTFYimxIxBDb2Msi63EmmS1FKGbLd/aHvlD7bTA8u1i4G0pTI68Z9uYKriFx0bviGNE/JeXTCiKxc73ce1401V+Z5IsaZlMKBTKqk7Xfv3qovU1PinvO7g24v4+gYswDZgTQ3OkL6wq1Zuo4WbXvdy4K9nOpoinA4YfNQNj3nPr60dLZ3eo2OmmYtmGNuHBZeGyNSK6VWpBVaq4jMpBSVZyXaDSitUTcluFazOtAuml2ptXal18PC312HvXMDVfDOmfM2Yy3Yx5TO1c6R+PDQ59uGU7mOMWdcj/3o98AHE4mcZqbTrEgKSrh2Dm63G4uRdn0MeJw6JqfE7X5nXRaOJfDgPa0U3r19x/u373DBk7e93bqvRQM40pP82rV8bNP7WknL3jt+fyh9Ob6OkHQEq/VzkX19l9bR4R3F24PkPb4f33y4v9471QG06/AWmFi6+6JcdAwQj9fVf0bH1CFAAV4YOx6dqj8M8LoOlQPrCpR+l/fX65cO5PBlqepwwuOPvWLwN9dhcwymXyI97DfVQTsgUB++9ucd3ygi+O//+/+ef/Ff/Bf5+//+v59SCn/2z/5Z/ugf/aP8b//b/8bDgyol/pk/82f4r/6r/4r/7D/7z/jkk0/47d/+bX7rt36Lv/SX/hKgE/ef+Cf+CX7wgx/wl//yX+aHP/wh/8w/88+QUuLf+Df+jW9yOrQq5vnh8SngUH8PDEHZa3duRJb7CD70jJsiKTBKE9jk9KKLbxXT2pBGCJ7Lw5nz+WyDV7souuiQiAcXEfZgJUgf/CrC1JVXEYfzkYItWqJBxpwmDWycdg6MjEF6K6mMhVkRJKe+I2DlHNNAcb1VWgjRsiN7TeenOGyxdaoJEWKyrhRV8+zOpc5BTJOKqYmKaA0DMst8W4wE+46eOTt/mMxjM/E7CIBpipiw1zRN+JQQIxJ6YErJAqxK3jYr5yk3ReiERdXPwLIq15pmxV33JGhJsFrGopm1DG6JZi57hntsu+7nrVojVuO28/feM01plAHB2P1F26q9MPQAYmyIdAVPJdD2AKp3C+kmJJRtI68rJa/UUk1UUIlvPThq1XgNYAQ6GSTm03lmW+4s642np4VSFv7f7b17rCVZdR7+7VdVncd9dM+Tx/Dwz8RkjCEOjqFj5aEwMnGsvMQfloUikliJTAbLyFYUOy/sRBFWIiVKIsQ/SeAvByVRsCOCLRMwOEQD2BOIGUjGYGGD8cw0M9197z3n1GM/1u+PtfauOufeHk8DmZkOZ43udPc9dc6p2rVr77W+9a1vzWcNFosDxJigNUf2IXCrANdXrMAbEyIpVEIQ1tYwZ0cZaGPRNAuszk7R953MjQTve8TIZD+tNKDz4ssIS2Hwy2JLMTFZ11Xb8LgQHV3FvYO0hjiwvGEEHxB9BHQPUgrWOdi6wiALdNEh8eya7z0AAG3iSURBVH7EmoU4X5oKCmIRkJBSkMKy3J6+kefNQDt2cawoPncdp+KyfDyjlgaRQiHY5h2Pu6SLIyXIqNIs10+ApIK2o/lzAnNy+vz8JEym5vgiTTrtTjez/Llyjs1shrqZs1geIlZnZ1ivVmVD1GB9J6s1urYtQnPZOWFNG48bN67j5MYNuKrC0HWSKhujZaUmARMRzp0yxo0bk/u+7aTsoCGYIAYYiaXl6MlnTEu+eT0tdTlbG/k45mUky3eXz6btDvHQmkng08+/wOHafS1fbylBn/wuH2cmabX855a2FJEIy1FBq8p4TpyTi8b9Qod3y1sZuUPn78UtmIAoJUDlTyuBwtdjt+Sg/PIv//LWv9/73vfi7rvvxsMPP4w/+Sf/JE5OTvBv/+2/xc///M/jz/yZPwMAeM973oM//If/MD7xiU/g9a9/PX7lV34Fn//85/Hf/tt/wz333IM/8kf+CP7xP/7H+Dt/5+/gZ37mZ1BV1bnv7fueK2bETk9PAbDDwZ2zNW+kk7b3LKhN4FJegqFJiR5ksoh3nJAkikwSpQOZZBRTABQheI9BSomtYz2SpqpRuRqDDYghAUqX/LhxDZxrYF2NppnBpAhvLcuObykAojw0UTRSKJEgIdztlZRiPopWSD7CaN5IM7ympbiL9R84ytKkCym8pIbyg5U3XigYKR2OMCCVoLWUGBvmrihjACJESOt0xd15M4kOWnRcpD8RXz8DvbxgsJosYkRUmRMwjYggctnsZFWWVTttXbODIotUluRmMS3PeW9ImiBlUTZWmCWJaKHB2iISUfJHjWXHrDHG1zEVqYopMo8kMplyiAnGptIlVplprltxU0pQiY4KB4ciVAJiZAl7rTW8CqhjxSTbpoG2Fl7e56xFENSARda4BNUPnPuPMYIMg/1J+qAUCW8aK6gAgjIGVeVQ1Q56zRGzHzo4exm59UNGLpgHEdEPA7puwODZSYFmMrGrHapamiJqC2tr1FWDtl3DDwMG3wGih0MKqCq+l0laSGQBQC3cnyL9L3M/o2paZ5VdcPTfzLmlhHMAiJsZugpkNAIAGI2qaVB1HYLnkt+YF3atoVJi8UabuSJRoupU1EJzMEKauVNd10kaWNSAwZ/nvS8CbZxa5N42BFHuTFIRE2NB5HJKICO2eSPIGj1ZPvCZpkF2j1UltQK+58hOgirROlTurMxBRj2boaltcTiymJwGk64haJ/NnDKJ/J01UCCc3jjB1SeewHK5wOrkBNFzBU8JUrC9yeXalN2NGdh2LLY3xaw+q8pGl4OFUo+y4yTsppPYYb/5mOrJ922lgSaoAAkHp3ympIq5l+F5JGc3fVQI0TndNnFQprabhpqO3/SadpEliHpsQZEm791KFeaXp85WOfimQ1TOYVcA8OZvwKSinIMspYQ8nYeX/i8iKLt2cnICALh8+TIA4OGHH4b3Hg888EA55pWvfCVe8pKX4KGHHsLrX/96PPTQQ/iu7/qurZTPG9/4Rrz1rW/F5z73OXz3d3/3ue955zvfiZ/92Z8993utNBKkskU2uALHK8kL574L4EmZpjc+seDY9mTfvgHTnglEhH7ocHbG/BcsOcfMTfgCSLMOhrUs1KYsNy6sXAVEz1GZtnzrRMOEPzoByjABMkSZdwIlKi3vk6obxQtmjOx8Ze+U+/5RgYAzEJnff25aELu7Wmnui6IM548Vw5r8Q6xqqvjzMvJjRPpf6VEHRctHKuL+LygPtSAIEs0QMtt/jBpz5ZQ1XL3hnIN1DrCZQ5MKj4Q3E+GcEBMfg/xk4qPc4LK4Gq1Lo0AgVy4lGDNJx5T875gTjkUF1cCI5kpVVWMDR4FRtTYgRWURgiwqIbILZYg7OoMS15hRRF07kKBPRhwUI0hK9IM4CKyzEzxL+A/DAFdV8vmq6N7wJsWptIwKZUegqiqAiAmtNm+0ojiciMvwh8jKrZIY5ShOSL9geXxtDBZkoLWDsxWausFms8Jms8bq7BSbFBFsACVG5ggJgydYC5A2IMOzEibD3LqMV17AGT0SJEGzKF5V17DOIMYA07ZQzkE5B1M5Lm81Btrl/kdphLgLZCegDDHkn0XQkDeFCaqjkiqS8Frk4qHG/jZcSTWKnfEaogSEHYXNpjwRonHTUuMplTXo6ewidGEa0SuFyaaR1zWUNZCIkEKENwEusHge5D2mcBh4DXCWm5MGacSYReZyqgYxQoHguw6Pf/X3UTmDa08+idD3rGEDnksQBEVDSSXd+WuaOiQXjgHl/03Kpyeb9HRDL8RR+aw8/tNxvtkY7n7uRacxJdiWKaU0jFHF+ZsiHbvk2Dy3M9l+V3Tyop/p+V7kEIzHTdauiZNy4Xt3HSBMhnnyuVNUJ3/WrgO5db7AuQSRypOAxuNvOsh/gH3dDkpKCW9/+9vxfd/3fXjVq14FAHj88cdRVRWOj4+3jr3nnnvw+OOPl2Omzkl+Pb92kf30T/80fuInfqL8+/T0FPfddx+y0BkPIEPL2jKIamSDLiRZxSOmVfZupWtoWThkYdFjZ07OUSdEuZNKlDq7roNSCtFHdj6oxC5gMTN2Uup6zrLQxoCkwiF4hpTLJk0ERQxhI6VRyCovaqTALGjOU2dhNUo8CYpTosYCLi1euxH+RoZ7s/omK2QmpDjqmOjI1RRKqn0g0W3KnT/B6AO2Fpbx74VvQARraVIWzKmx/FqZu0kVXkJMiRGFukZV13BVJTol42eWUkGdQNBIpMvv2LGgkopKinVUykaTuS+OESEi5hdYzT2JMqwaZXFWRCBp3JZTNi5UI5JjWDshBkLWj7FawxkDL5tuoISu66GgUImUevKe70dM6Gsm2NZ1g8o5zOdz9O0Mqe+waTfo+wEmAbReI9kK9XyJ+XIpVRaiVyD5dV4kwX12itMCGFOhdg2McfA+Yhg8+k2LrmmRooEfIrwP8D7COId5s8DB4gB10zDnKgZUnhtvaqVgrcN8thA5BEEv6jmc4zJzqy2IQlEOzh2zWfdA0D7hBWW0yVU1j1FVSXqWkS52OmoYVwEKqOoZZosF+r5F1dQIYcaoUsuKqq7iiixnLCgxATUpBZUbJCpVmvHlZ6QggBNuDP9Kby25uQttFmOcLuR5/k83x0RRAofsMGaMYxvujxnNwxjRPxMkZYz2t3/HqZqJ4xkSSAcYZ7m79zCwk2tUqTTLaInL/CsfgMhtKLRE3SlFRBc5CIkBT33tCbTrDc7OTmEooxUXnyOBypoxtUIcnZx7eR+dr4oZX6Oy941SCLwWldTn5L3PJF3BYI069zsAJV2qlCr7gNGq9C3LVU676MIuwpPTw9PjpoFR/vtF9z8jzTT59/gnS0LkNiLAhAdDJBmh8w7ZTcfiGcy/ck4qI5NyLjseD8keR0T4epwT4BtwUB588EE88sgj+PjHP/71fsQzttJJdMdyFGHK4ieDRsSS8wCyMiiIJ9t0MiTZiRl+lomdRLgHAm1JZBkoFZJiSgnr9Rq+92jqhhdozSTHykhjMWkuZqsKma+QUkKQxm85EhC0cJy0CaK0CWSNlK3JK9exleek0YtVInA2ZbmDUOSr86Y7eI+QAoCRVJeRCs6Xjw96iaIAgTbH8b8ImtRFr4WkXHdHmkeNC1D5kXNQ1rK2hrOc3iLmjLC2yXZEQKIjQeJUYBKdWWtFS0M+L0cveVFM+aEexyRvXipvHPL7XEZuneOGbHp0dMrGINomVVXDDSzz772HSgnKcuPAMAxASNBEsBuLuq7gLPdpquuaUwZa5OlTRBg8QtcBVYt2s8HQdpjPF9BOkpc5ghlvSLkvICqIXeVqKGh0fYuz1QpaGWjU6DYRfduDYsJsMcN8tsB8zk3ttIYoMXN6QBkNoy1MZYvSkHUsBKi1AsXECrrJIyaPzWaFXg0IKYslptI6Im/QppTSK1TikCptpUpCQykhnusEWzeomznq2Rx+GJgzM/SwdQUzDLDBjRwQNfIWlNJcYZdbUuTKGdlYlJluGBlS52fVE3N0ELkXVJKqFjNppJmjyBwUJNn0t6POyaKNMcJkVHISwU+epd2NlXZe4+fygtRQjqSTBDwAUuDr9sMA3/VokdAPw9ZamNOSfd9L36yxGot7FvF8996jXW9w4/p1ThFhJ8qW70/ZcaBtZwRyX6b/ztc3BmUjQ2J34y7jkjfH8u+tYf6GbPrdFwI8FzhO+RynPKDdcy49kiaI/BShz8eeu/db3zlxdneOmjo+eQ04tz5Pru/87Ly53cxxkgVo/JRv4n0Avk4H5W1vexs+8IEP4Nd+7dfw4he/uPz+3nvvxTAMuHHjxhaK8sQTT+Dee+8tx3zqU5/a+rwnnniivHYrVjYsxUJdpEiIguDYRSkuzSSInkksEP70AeQxFnLdJE1AKe3kkwW+B6MrzOgn+CGirhO0Yaa81lwJUFV1ETHLHXf7vh0npHj+5RxA3IU3ROGhcPoopdG5UjtOFjtko4eec9Bj+RwAUNE84CZrPA4hstNFslCrwr/ZDnp4MZxob+C89z8Vhyr9RRKTNrXSJWLVkjffcm5kjGE0YA1zT2SzQsoNsaLQwVk1NxGx+FSkEvGBxsUvlwC7qpL0gRGUbfT8c5uETJAF5UqJkZvActYo1wagiJHluVCuW1s4B2kDMAH0ZR7GYUD0XC4My2RkV9WYzSq4qkI9a2Ac90wKRIhhAHoLtC1Wp6dYLpZYLJZwVQ1nDKBNqTYDUJoqKrmmoDgFVkljy3V7inazBoUAqxt0G0LfBShVYTZrsFgu0DQ1rDVS2pvGqgLnWPjOGBZYmy1gHJ+/MQYIEQaA9x186OF9z1FnSAhhVLKc9lbaLol1yFVCSWlo46BNFuuz0MT3sm4a+L5BiAPmMYCS5zJsP0ANXG1HiitTDDitZ43B0HlQCEAW7sqTW2mwvDOHKpRLTSOBlDSupFiqAbkKSm9tQNkx1vJ6JlGr7CzlQKI88plMO86djP7lZ2sX7t/lqWRNlum8LJgmFQiU09jSZZuUhnMdSByU/HnZMc8oS4wxe2vF2a8cp9T6jscbibk8Spo6KqAQhfOGmp22fK930wcX2e7mujsWY1n25D1qRHPLbb3gc8trO2O5++44OZ4DUiprXA7yioOlxvVsml6aIidcsTjuL1to0dOMA4CSEt96LZ8njWRYLjrI+wKd8xPyNWbHJu28hp3jLzqfC+/bhQ4Jo2BExNzEb8BjuSUHhYjwYz/2Y3j/+9+Pj370o3j5y1++9fprX/taOOfw4Q9/GG9605sAAI8++ii+/OUv48qVKwCAK1eu4J/8k3+Cq1ev4u677wYAfOhDH8Lh4SHuv//+Wzp5oigPkYU1mgV0iKBVRhSSdClNQCCWtJbrSLQNvSFJhJdJbYmJjZGAQAFJqggg8G++WfwwByFHRvgQxjRNKXGTXjlZB0FhXMhyikruIYtYsYPC2h8aUIQQeUPW2dmYQM05z85aKWprwUJiYS8CK6umlOA9l5ayZgO3Iudmd0Z8hemUVciVLVNocRotEI15eWDKuMfoLJEqi2dGRRIlbjtgLEzloCoHXVVQlZQZ5yJiInBdVs7lU3EiKMRSbly+T+uCnoy8kCkMmT9WyjZp+1HOUtt5cTEWxdFhCJ/7tBghxIYQYLRstCqr69ZwrkLoe4FaueJm6Dp432OgAGUsZsslZvMDEX6TktCqQt91LNI2eGC9gYJGVTVYHBygnrEAn7X8mRC6MG9IEFY/zzllAOM0mkUFt9YY+g2iH+CMh+81iAysU7BWwWq+a4miNMtLSMQ6LBRyOpOF9LQxcJpRH6s1yA8wCuhag03LC2aM7OBkXoehLFTF8yOnXnO5OrSR1hIWdT0DNGuIZE4UlIFzNWxdoQqNVMoM6DZr9BtGSozOKrAOCqYQ9JIIKObWFbzw8/rBXCcmjI/wOMpcN4LkQT4rl1Fnx6FwEOTalM6L+cTJwPh5U+8/pynLE3fBpnXRhkZETOi/aK/PSIR8c5KUV0wbxBhxQAdC/hfnWa5tyq1RE7QrR/6bzQZ92xZkwaqRa8O8oUmaI1e+7XzORdc4XUt2f7+LQoxrUxodNCLZgSduSw5CdpGX8rK68Hu37kUW85A0bgoRSaetc8lo2e61TdePPD943sRRn4cy/y7zObjirfwQz32i0SEdKwm3+ZFaEEGWOhjT8jwDUBypfL2SVHjGrsMWYrizXm7PdcI0CfeNgim35KA8+OCD+Pmf/3n84i/+Ig4ODgpn5OjoCLPZDEdHR/iRH/kR/MRP/AQuX76Mw8ND/NiP/RiuXLmC17/+9QCA7//+78f999+Pv/JX/gr+6T/9p3j88cfx9//+38eDDz54YRrn6UyLH13qc/iuFfgdiUr/i+QZ5jSThwZAaZYFks6iSDK8WzuZyPUSuLX9JHeaCFpldELyvH4AFJfXajAS44cxOuFoy4jHjXINHN2P0sZl08mobeSKjRBySSSfYyG85UkhnjWnK6g0xmIFxCwOJEgKjZLXOZk8QscEUJbChzxkQb5vgs/sLKBEJJwOTqFk3ZIkRN7cnTbzfrRwEWzdwDW8AWltgADWrcnwKREoxBK9UWBBNJW2y+wy70QpyRFrWURjFNRkFEQyMtoa/F3cKdSUzTPGCCXk3XzfGYVKcM5xlY33cNbBuQraWcxCwLJfoF2tMLRrRK9QSVl3RwmxD/AUYesah22LcBigRQBwNl+gW8zRDz0GcWq7zQbJByyaBpvTyzg6PAJq5m5oLUTw3B8J2SnmNalqHNpOo6odXOVwujmDoYAhDaBUwdgFmtrK2k4s9mcMrNEIYXTwCVl0TKEWB0UpA5UUCDXmyyWrvVJCHwZAEMkQIgbfwxJ3WzbeIlmLlMYUWUnNSiluTsmR1iCVYJQFpcAE7crBNTVi9IjBc5lxxVwV6nrRNeH+TVoBQYTVcqM8ay1sVbHmREhSgWwwMrgmG6OKACJgDIjC9now3dgykjaJsi8qrLwoAp2mBi5KCU2fsd0Nmztln6+sUEqVHjwqoTzjSvh1fb9hJExtp2uDoGUZ9ZgKiPkQMHgWd+O1klPBWXunPJ9KFc2mjKTmc5qiC9PxKGM2RTcxpkSmDkESVFvlbBLyesUVg9N7RFv/vPh+bP2eysHIaXNKU4dFmokqlkPIKa+85uT7mb8vp5WNzto8sWj9nHO+OHrLPiPkBMB9hDLKr7Z+iiYPhMN0UyeiXHBBxoAJcpLnCi6aeeP4XTiG04FTaqzbznmMRDvH3prdkoPy7ne/GwDwp//0n976/Xve8x781b/6VwEA/+Jf/AtorfGmN71pS6gtmzEGH/jAB/DWt74VV65cwWKxwFve8hb8o3/0j2755DW4LNaJl+qjB4HgQwRSbtrGegRRuB8hRliikTwpEX7ZoBMk7QHmjYRYFmYlzc+AifepWSwqkz0zVkHKA4orN/wQ0HYdNn2PIQSpRpnkYYnFk5BIWtbzw17aYomHopRCEricvfuMAmVKMJdxQmtuiibEKSIqAk086b00+FJSSsyLGJeqRhZIm6S8xoZZBEoe3EZPlR4sIacWoMu5qcSLI0WWwdbKYYg9/NAjRJFvVznN0UC7mrsnq1zeyRVLOaqwIAQiIHiQD7CJORaDYiJ0knsLAEoTjFVwlStCVhyhBEAZFsdTPPVDCiACjJKoRDBQbcW7NIqVba10b1YKFFjDww8GzhpQapASb04RhLqqsJjP0S8P0G9W6NZrdg7BREM/cCWNbzpsNiucrk5wcHwMU1nMD5cY/DF8DIiaEFcrbM42oNZiZTXWh4fojg9Rz2ropgHiiCYpZZFigveMilnDC0TVzFAvDjBfrtFvWpzeuAEVNYwmNLMGIfUYhh65S7VzDlAJiixCP7BOiiyEtQF0ypGc4WoqraBmDSo6gFeEOnk08yWMvY6UAvzQAcnCKMLQgR2GKgJKw7gKOnhUWvAy4kDYGH6uYmK42FqL2XwOmAQv6ckZKcTBo246VHWLvuVKpxBZMySQRwiRHSBnAQO4usZysZBeRRxHaqhSBZG1adiJZVePnXsDbaRmLqmCimUybCgIaASFwOkp8Eaa0VFSgDK69NdK0iKA05Vh3MRlv+L9JG9iEDRJUqpGGimWzUUhR7NGy1yNo8gdBXa0lFFIg4cynOhykvrMLTqmqYqSzpBnv+jsSIk7P2uMPhvZII0aWzBopaBtdkqAnFiYVr3kNSY3CE1JhBBJ9J52HA1oWRVLzoLks8c0DJB7sJ13CjN6UP6tJEU6tgIGIFIKlLjHl2YUKslNsXWN+176MlTW4bHHHsPZ6hQKXP2oFVfKKWNFNJTvtRYeFp9DXogloJMBYv4hx3S5pUluvcaoM6NURlAeq1nXKVGWmJD5SijFIHnyFARwcu0jqidjMTmnPFZTh2jXsSrvUkoarSnEInrFBSPcJV6XdD9vsc8cV7nlFM8fZE3T4F3vehfe9a533fSYl770pfjgBz94K199oSnwRVMiJMX6FSQ3muvDx4lA4rkqyQ/bSXVHrixQSsqQpcqGF2Hu76IUeGND8WMACJnOe/YhlEEdPPc7QQJnjAidDxgGjxBjBuO3ogmOTgEIeoPEMDtP7FTIoCNRNnvAk+oluZbCk0hjJVBeaDLql4riUy7B1UjgDrgxavZNMrIjXndMEQgJlVJQ2sBolvjn8x9TS2Uy54dRFg4fA3NGiJBSKGiPNU5Ki2sY6T3EKzl/t7NWNgkvHYwBCgHkg/BT5Joms2LKb7DGFAE04jwYv0eaFlLK6FiCMlk7AiL1r8dITo9N1yC/y0hOTvN40f2w1mI+n6NbMOF06FgaXFwlpBgxeI9us0G/adG3G1R1JSJoMxwcHnNfG0VIYUB7eoph8Di7fg2n157C2cllmPkMh8IXSQQkI2JjLB/MeXOtSgqtmc0wmy+xqc6gccZEU0WwrkFdzaWRYeZRsIhgjs5iSkAEej/AWAPr5B6BoIyCgYVZzEGihdL3PZrZHPODJdpugRB7UIrwYeCIV+aOMhauHmCdQUqmpAGJEnfwNgZELAWQEs9fbbmTcQoBfSRoKyrB2iAhz0kFZViVWWsmhxpreb5XFWzF/CRAc0VeZPqT1tLmAQClAK4ktYiIQvoEryUSGGT8PettJMWpETPZHLncV8T0ciQtyGuZsRnpy0Y5Js0VWjwOOTXAJdC8EbAzMUn1EEk35kmqlVgzyCnhuqQoa8CokJxDroKcCkqRN6YUs4IuMflW3mfyudPIUTPWlmvllzJPSvHXqG0BsoKa0DQIPx9v0/TP7PSVYXxmKZ1zv+O69jEyEeeEEiMgSRDa/JqxFnfefQ9eef934tLxMb74W7+F3/qtR3F6dsojKM7FmOaT+6kurtApr29d8zaGzy+p0iYkj9tFqMaIVE2QtqkjtDOeN0O1pp+1m8LKP/m+ASL5QbwPQ7G0RO4yL951xlQuHoOb2G3di4dAkjLhjqQ5wkgxb+Kavf4JRKjVSMybln5t5zhl8VXsPQdKALYXkLyBas25+hADVAgIMSL4gGHohfXPzkZMubz3fKvpDFVCwLwsJpUSu9RRyKAUhZimRkjP7DSgyhNtSqQt3VdpdF4g6QHexC1IpQIvBgqsq6KY18JE0YgYgGRsXqPkOwAYgebVThkdqCi9phTB1RWEUNqf5+Zs3KCt3BejiwKCJg4mEjhVF3yAmjC8VKn2iCV6tNqNcvqlsaEI2pFGkbYtQmd84Vap0rCNe/nYAlPH7BhqVhLWyvD1ITuZLJ5HwmPIKc+uXWPoO6xOTpiAa5n4Gb3nTrlnZ0w01RaLg0M4x0iBHw6gFWAooVttcNadYr1Z49r1p7B48jLcconZwZIryCqLFBTSEEAUBC0mKSM3MIY5MU0zQ9PM4KoKoQ8IIWEYBsyaCMP9KrmvE/KmJ84KUPrsGKOgjYIFwWlGRI3m8WzqOdSCQCHAdxusVguszxqs1xbRM+eo9wOgNWpjkWJgyfzIqSwlzjJvzBHGGWjDTiQHF4kRDsvCcWno4ZxFM2swaxr42QzO8NiyQrBBVIodWc1ijkZEAbW1qGcNAiVQEFl7BUEVgazgnCF+TvlsOxXQ+VfbJNDpMZmzNI1Cy2uYwvEXbRQ8LzkqFj0n0TXKDk2WyS+OBEkqExO+ApGAPhIspDFd5ayFEQTFDwP3fCLhusm8TkSgKJuLUqOTD4hw2eisaDOKZfImNtmQRA5hvL7JWE6uYcxEbI8Jrykov9se663g/9x3ZG7JrtuTuRi7H5TXY2MNNBE3V4TCcrHAffe9FC9/xXfgzkuXcLZe43e/8mXQ2amkWxP3esvNJOWa4kWSuhfYdqpvvPZM7p86KLtE7fzXiX/yddtFzslWUE2Z/5irqabfecGXl9fHVOozsdvaQUmJoLTwK4Q0xWqSgCLZhCjLBZE83NsluNObUBYHnSErgLkkkvMsD9tIbC0PjKAUwXsMA0Pm3velfDWmsay5eP8YH7KYWKOFJELL554jJ56QURZbRhhKRUrGAfO5TJCTrBGSf5//zOOQtUKU0iAlrQNi5AUCIxErZaY/aUQdAZW4OkppEYQbH6yc7sqiWFpnREKVh0dJOsG4SpqUWSF+CjxNghoF6TtEYJnzELa60nJEzg6FIl5QjLVwtoK1Fay2SKSEk5PHPxONBf1R7LdQAncalvRFcX40kyxjysdz5FVE3RIr2foYYDk/URyNS5cuo2tbtG2LoW2hjEFUgI8BehjQrdfY2ArOsIKvns9gjWW9ERAQAs4WJ1ifnMF3PZ782lWYWYPqcIHl5WPusitKylonWV8jo1RJNjGjYaUB5WK5xGK+ROgDYuR+P33dixZInET7sllIBOy9BxRhkBLqStArVWWyq4E1hFTVmM8X8AeHWK0OsVossVqfok0RIQ4AlDQFDEgpIEh3XKW447M2GqCIRAExKkbuEDltpy1qI5B7rBA7Cy1Ks82sRuxn8NYwEbn3ADF6ZOdWIHueH1zGDLiqxkJbpBDRti1SGEScUOZKnifSpFEJsobp0yZ7blYXzcG4NmM5++TBHNECTH9N5/6+xTWZRqw7Gxi0EsQ1VwbyHyEGptBonQNYAIrbC3iHjHvGSSfyXF58Eacsp0Yo70bi8IAg68cEfVOj0zTxZCa5hB1Hb3Kdo7OCklqZjNTkvKj8O++QuygAEU0QImALZ2UvHrkXVHYWE5K00OCDiFJBGoy1uOuuu/Cyb/s2XL7rHlhrkaARiNfvfP7bsvwy7y5wXndt9/5ScZTGMVFA0TZJmYvIm9OW03YRYvR0xsdf/PvpuU2/Y5sfxHFfyTUpxWuHLrenoGS3Yre1gwKVt5wEkOZOk5SrXSCqlNKfBee9wpIGsBY0QVqm5CwFBbIj2x4EET4D8tNXiIpEk0ZvHQZpMAagoAgpJdGvoC0HhZ9LEWGjVDYZFZXwTAlAJt9NnCsAWawOGD3rUW5dHmSFresrKSHNaIDSgAEh6giVNEdOiR2kLP2uAUStEXRkcl7uSVP8ru1Ip7Qp11RImLnEGWByrLWM4LisIGst51Ujp+pCjLAAo1Sibpr7zxDy4kjlupQay1atsdwMMJcOQ8aQhCycEqLwKayV/Dc4LWfA0XNUIuCXRAcjI0IyP0LidA2GHjAaTc1PoiGuKpkvljg6PkbbbnBKCb5NRfMlxYj2dAWdFJyxqKsGzjIisFgsUDmL2hiErkd7tsL1/ils1me4fv1JHD/1JC7fdSdmdQPbzOS8eZGIKbBDAeYRuIrHdb5YYlhuMJ+foFtvMAwBQx/QDx1i7OGHFsPQwTQW1hmZ14QQpVLJqlJ2nAneChhlvEVV2VUN5sslDo+OsN4cY9OuQJTQdQlZVA6anZAh9NA9kBCRUHNPrWhgvEEiz0lPxQ6TcxaAfBdFhNbx/TfcC6lzFkFKuUPwMDCi3eLgGgfjHCprEWOAjx51NcPxbAZKhNMbN7A6O0Hse97cUxDBM34WQQQYNc5zcVaURPzspHHTzSgtGnIgYIUEypLyEH4Fq/fmtShXBuV1avrndOPbiq7zmiS/KuR6eSBZk+X8pj2VWACNpNgsXDmNzsd1RpV+Svx76RytmANnJoHftFltSU1NN9gLNueLjD973NQy0FSciW3vpaSZt383ORegoGDZ6QBNosz8HchBI3PFKpEpsNbi8h134J4XvAj1bImh79CHxK0h8riA+VNJZA/KsyIJXkUXVw6N17utD5MRHvGnGE3UQmvI16+xhaB8vTZ1EqeO0jmnePoeyH6iBKlERurlvTojhcjb0C2BO7e1g0KZmBrzputELTUC2jECIFUkCige/m5qh9M04+dq3q3LAhNBzDeQ6CjlShfwpCjkJ0T4wND9Zr3GerPGUfSwtoI2qhDjtJJKoUhlU89xVQIvrt4PCJ6jQE4h8XdaxTnyzJIHMC4A8kBk9CKmrFEyTjRjuHrIGAslOWilmHeilYWOEQoM84aQWeeppEIiPHRUgHLckyZ7gmrbay8Lb45QlHR4HTy8pL4qQUysE6lzM1GHldVIK8P8Bc/E3ix2xykBKmklWIBIw2h2dDLUnM8la4Mwiobi3ClBjbIUv80y9nnBEackiUrl+JmWN041LpaDtK+HUdDJwCgFV9dYHB7g0nAHCISTlNAs5lCIGNYbdOseFDyc0Zgt5qhnLO1e1xXsbA6nAXXvvYhhQOhb3Lh+gvXJdVx76iruuHEPjg6PMa9rJvCBNSmQEqLvwZMYvIFrdgJnsxmWyyWGrocPCYNfI/gO7WaN1eoMi+4Aurawzkrkk5AocJrSK9m0MwmXNUa0Msy/gYJWLFRIaYGDgyOs1mdYrc8QEzsj5D1zSYwCNCHGAYPnOWStQYweGNiZV4KGWceEUwAlNZvCgLqu0cwa+K5D3TSoqhr9ao1hGJgUqhW0AWrncHR0CcujI9RNhbZtcbZew1qL4+NjWGMwm8/gnrRYXb+ObnXGlXLC5WCydpnmW0yB4vTLZprnKwElCOBNULEonFITgcFxAU8UpdniqLWCHLXmypDy7Tub8FbEPdkAJqmY6bHTCDiIponSmh3wSVScLTvjRirqIOvIlhhk+TOPTP4OYMtjyUHlxOu4KNLfTQHljTN/VPn3FDXY/YzxCuTP89+Te6KNwPbIl4mJVXYzqlPVDY6OL6GZL+BJYTN4rNoea+n8rGSTHvVwuAw7Ee8f6ny3x/PnMx0L+XtGyoHtKqHdi1UKIvB5Hv16Rt/3NI7IrhUnJu+jhCKZkc9baSW8TXX+5jxDu60dlN4PLJkuOb+YAjZtC0ABljfcKTyaybA51aAmkz17rmOvjvF7MqLCkPe42WsoQIuIGhGyKqn3nlUbhx4pBGhbFe/eGCNiZYkj//I9slHGxIJuocfge2gAITID3GiLVK5hdLCSLHj5YeeoahSUyuecryX/mXvKaCXKr1rBmASbEgafRlVbxLLopsQOTSLAAdIawJTPGzUi+PfcqzEJmz3rz2jumlw5VHUDLQ4FQZAKItH3kMZy3otuCzst7BRGIAHOAhDExBgNBe7hoq2USkrUl7sYGyOCcaKbI54H8yqshXXbfCStVTlnNYHLrbEAEjepNGPOfdp0MEf3rmZEYdO16PoOMXQYWkkppoQ49Di9cR2urmCkVYNWSzQi8lfVFZbLJY6PjjC0Ldquw+m1azi5dg39XfcgHRwA4KiNwCRknjuWOTXETp0Goa4rLA8PMPQ91psN1Bl3616vTnB2egMH6yOYuoJ1tnSq1nLdUQi4UAQdGKVxzsEoJq+MYoEOVTXDfHGI46M70LYb+L5HW28QFCMh2nDTNVIRpAIIFkQRlAJiEMQxepjkkCixFomkArUCguUuxnUzxzBnJ29TrQDDTlmSZ8Aai8VijsuXL+Pg+Biudlh4j3q9gg8B1XyGw+UBFgtW0b3RNLj+pMHZjRvoYmCFXCNN/7Q4tRmpBTu1+TkzQqrOz5dSjLrFyP21oMdFh6bPKj/+xXaRk93Negvh3XFO8jHZQeL1YZugWZCOCU+Nm1PGreh5l3OQUcvskIwpcT1Bs3n5zeJmVsqgCwowsfy9YypDzr8Mx5TwOdnkJqhRMRrX83zO+ZzyAbsployIT887n4FS0ncLCr730Frj+PgSLt95F4yrMcSETedxtlphs9mAYoRRACZ8RcUebBm3cu7jKWP7a1X5fXE2idf1zDk81xJh16FQKkPXT+uYXGTTlM3Wd2Ab1Zsi8XmujX5pVkGhcv1K0ujyv1uCUG5rB2UYeoAMUuINIyaJCAhw2kGZ8QZnSPZ8y+vthYArN6g8gHlzG7znXKKQ0hjiHvVU8tSKMRSNhhQDS3IrlPRFRi34O2mSP+b6/pgCQvQYhg5dv4FJibUGlIHVQFJJGNPSbVdxCuhmEyvX4eeqlOwk8SUaOMccFFIcIda1gjEKIQwwYAQkpogYPBRYJwMgWIGnWYzJcToECYqMaMNYXpBJgxIQQy9IVwVro0ChVrQugOwxGm0ZCUseFJn7MoQIP4RSRmy0QSpOI3c4VsZA24pLnw2njyIoh3AgRHGsCEhBur0acE8fgnMWlTOjxD8g3BbFLH416nVEmU9a52Z3PI+mKrsZyQIA6yrUszkWB0u0fYuuX8M6RgZSilCUMLRrnN24Dtc0UnWkOF0RevTtBt57WGNQW4cQgfZshRtPXcPpySmWB0eYLQ7grOPy5BS5MshVSMkgJQ3vU66aZlLpvIGrKiitMGxanJ6eYDm/huXJIdysgnMGCg0AlA7jSRCslFh11Pc9OgCoCVWjYZQ0LFQ8/s18gYPDY7T9BpvNBoMf0HdrKB1gKwttDEJi4qvS7AgnijDKIEQvz0NCChF1VSNFwCgLownOVQAYoTo4PIKFQrdpsRHSsfeMfJnKYXl4iONLlzA/WEIZjblScHWF3nvUdYNmvkDTzFDXDZazBk3DInvXnvwaunbDJfgAdJIKHuK5ESM73hDHVE/WlaI6Kg4w6VyRMyFhQ9I9+TiMAUQ+dop4TP99UTpoqnC7G3GbycayC+Vn52OaXthFQgtalJ0XM2oF5deyzkp2YpI8Q6X654KNqVQbFsRFBpem54oSGPBGqCQYS5I2UGCS+hhoFgcQwMR1umCN1MW5yXvFFPCJIQLQWC4PcM8LX4Q77rqXS+MVV4CdrM7gY+T2FVpDGw6Ujc7BMaON+RqYorHjCMifim8yo1nIyDjB5jEGk5pjEp0f2au2rkyJrkxMW3OnvPw0CMno81x8zNRRTWnS3T2jUDvGiJeg+4odzASc69v0dHZbOygpcZSvoBHBOyynUKas4zzq4/t2vcHdnO+01w1HxWOJH8QbTJNZPF0MkuIJCYoMh8tncPmd4TQPUilRnSbniBJCZCZ9DNzG3hAhRKCqGu7kqvKGK2XUBfPM3um213tRSosf3Hzd/Bm5E7QiXkyNcQjKi3MREX2EUknY/Aopy6xLZ5bRSdJQZCRy5NMJIWHoIrxPUufPegxQ0szPOFnwLJSWqB0JQwjoNx2GTYdOflLgcmN2gFBUQdmbYHEvThe5SWoKxank4i7We+FTMLywaEZ1eJHfJiVqbZDB8xy87Y6tAkdKkIqULIedK5NCjKhncywPDhCGFt3pSUGXWEfGoNtssDk9Qd1wf5tea6w3Z1ifnqJdrxC8h7MWrg+Ig8fmbIXTGzdw6dKdqOs5O0lKMQyfAvwQ+XhxhCgxX6qua5aWlznb9z00DM7OTnH92jVU8xkTACNXLGVnLkpUVvoTlehfw5gKrKzDZGsoJuZWzRwHB8e4fLmDVsDq7Dp86Lg6RzNKovTIycpzG8QOoASDPGcjIYYI5VhttmrmqDyPM8WE5cEh+vUGvh1gNJeYN7MZ5otFEYFUAIyzaDCDrWo0QkoGOIiYz2eomxp108DWDl+7ehWbFaeoyngIysX7NW9/MSVYjAsxR5BqC8HIAVFOXwLno9SLfrJN0eDp/DuHxkw+K4s4Igdq+XNyqiBv9jhvW2uj5nSeyjyYPPfls3fPLU3Wzu00jNo6vnD7+GzKqe6ex0Ub7ZZDJgjLRcgPFe2qEXnJGz7lxSEfnz9f1jQCI35Hx5fx0pf9f7j7nhfAuRoRQN3UOFgsYY1BCgOAvCcxGp6dueyd7HJmdq8n2+61lvJycDVlEo7iiKJMPmcyjrvj90zTNxfZNGU0PV81vcYpCjj5mfxPtrptns3T2W3toLCXziTOkv+cQF/lhighdRl9Lnd68U0bPyNN88Xl2HH4M7k2P/T5gSMh2Cl5eJiQakoZoEJmeDMvhTQ7HFoppCiVQH0HRwqRuLKBOQZmPD+56YWpPpkjWxuo0qUcOb82PjzCQckPtVIwxPokIQuiSCFUAiFmRU9tkbTwe5ICl/oaUARiBFLUGDrunjr0G6xWawxdB5AIZ2kNpR2MrUTXwQIwINJIgeD7hLb1ODtZoT07Rb8+w9C1oOjhjOFOwLMaWsqeeeF0khN1oAQW7APlEgvmsORo0UQka+FUBWhbNhtGQdIId0v0ONWVyXnr6RiXNNokwmCuD883J31r6maG+fIAp1WDNYCQIih6KIoww4ButUI7mzO6pIBrN57CyfXr6DcbqK4HDZ04IMTk2XUL33MVji6cEF7MUoxIoUf0FjEkxBAxpAQLjWEIiJHndggRgw7YbFrcuHED9WIB5yoWeas47aSVgrIW080wk7FBnagxRwCOe/loDTIOVdVgNj/E0VEQqBro+zVX6agABGw1t+TlS4GEs5V1eoikFDgRoAyMBerZHEOMsNYAMWG2XOLo+BJ0IsxmM6SYuCdWXXMrCh8QAmFmLTslirglgSjLJhNh6pq5VcbC1A7aWXztiSewWfH8M2r7uUd+3sVRKR1u5encqg7Ma47M1yKyNdm4djeA3c1qF00pZcSTNM50bctrQ055KZmj05RDQRoma+HUCcql5JkXk4/PDkpxeCZzY3RKdsisavzOET3JKMe27a7NCiMf5mY2vfayqeZ374TupaO8Qikn50KU7aDVuQrHl+/EC150H2aLJYYUQdA4OjjAvXffjVndYNVteAwysVoqEXOaRmtV9Gn+IEd0ekzBfmSOJZHTKAUW58aCinOcnZKpc/J0aZ+8Bu6O//RzLhxrAoQEB6HEju/b8ltkr6FvIQcFEMg1A4DEWiJGSZdjzRGdtoa5G0ZfeBOs5aGIoiEAbDmmyMp+ODe4cg5yA41iNUWrtTT+E+jUaCm7IgxdlwsC4INHiIE1FZSCIyD5AaFrMdQN80RMBZW43NYoBW25WoXAC5xWY2phimZmyDP/lNyo/H+E5xQ0aebTEMEoA6u4CsZqiyipHhJtCZAGRSWVqUrKWZk/EkNC8Al957Fe9di0a3TdGuvVGcLQoXIGdcPpDWNYOVbBgGAYefEafedxetLi9OwUZ9dvYHV6Ar9ZI/kBFDnVsTxYwLkZO32RUz0paVhVgZUxCcEnJtKKqkoMUu7tRb/DZeE+DTI5qhWnQ9AhI2OrTObXTJADjFGaEXG5EXWL7JhqK+JmFeqqQWcrNPUM8/kSK1uhJ4UYAKsImojL5AePMAxIANpNj5OTFYb1Gi4GOKnKsFBI3sN3HfpuQAipKH4SJQx9DwNCcg4pBCCxlolveyAQzk5X0mGYnUSrLfwQcHpyimreYD5vMJ81/HmJHeMo1VcK3BIgRUJEBEih7VtECkioocyM5emVArRFXS9wdAhQDKDgUVUOMQ7wNED7XjZNVZBPIq564BJ/EqeAxzamBAyBCbDSbFErIPQ96tkM8eAQRinMZjNpiAdUTcMpmcGXxZ4b2wWRKrAwMNAmsY4KIprDhEuGETpXV3jiq1/FSQylWicSO7CUAyPNatNQuXdPKuCo0VZSubJ5TFMpIARK52LKXWT3ZptyTs0453Yq9yABkPy9EG23bYpuEE0UtidaUblKSyvptJ2d94vOJ8//4pzEMg75ivN3bSMvu0jK+Q1xep7ZwZl+6s3GaPy9lkeXRkV2LXuGUqWLskosoeCsA7RC3cxx970vxKXLdyLCICUSzD6hqRwWsxl8WzF6KsEsTS46I/3T8y+psx3HYdfBmzoYudhhPF6qscjgPLVDJAfGzUDG4sIhQkbSt8dre+wvRE9yoC4Ok5q+/wJfiJBRu5udx7bd9g6KMaY0+VIKhd+REgGGSnRbSJMxFWegbCQ7N2RMC0jjvq0y3vM3eTqJ8kPNER/npauqQhWDVAtF+L7n7stgmfiQIqdVFG+MQ7tBL4qXxhjUDUf1WhEqq4W7IR2ShbioynnkyTFO6BGG+4MWPI4hFHGpnDUOzjoE44DEMvXOaoQhQJGB0QnRJkRN0DohJEIICV3rcXpyivW6Rdtu0G026No1YvCYzStUroKzNeqqESKtAYjRkxAS2s2A05MNrp+cYnO2Qd8OSH0EQmTFVnikBIQANLMZjHXQWqPvBlS1KzwZ7weeE1pxFI5R0VM54lRQmS9cUp3HJ2s7bEHq0lwpTlCSEunovKmKDkyMspHz+621cFWFpmkQfY+DwyMMl+5A7Dq0PsAoheADpywIcNogyf1jLR3CzDhYFblKxTOaEIaAruvgBw+ngeBZi6cVh45Tiw4EA98NWJ1tMGw6tJsWXevFseHGjEMfEWkNc+0aZk2D5XLJjr02MJIGydsS+8K8GHOaqAOToRWquoYmAkhxcz8YJB8wny1hFOCHDU7OroG8giOCoqw6qsXJZb1vVn8lJAqwugIUN+E0jp8BayoopdD3BmHwMK6Cqys4dYB502C1XmO9aeGlCmwYBnbUE1fRhYEr7oyrpeLHANYBugdpCzeb4/K9FrPFDM5YKEpo1ytQ1gnS2/MjpYSoAGfzRjh91ogVWAGoLHUfJ/NtwvMYA4rtNSZ/x+4m5pxjxeJhQCcVJWnikGwRK8t9u6AKRo+yC1v8AvYeyp5ihCMRM7m2pFlQ1s0c3ecKx+mGVByPUtVCU59/65hz0f/OOZfxyYUPerL5ZUSgOAz5DMWRFyco4+GZTwSlCleCoHBwcIAXvehFmC8PEUQllijixlNP4mtXr2LoB2QdkoI0TJ0tilKivbPPiAOpxyO3gp+Mcm85NHrkFU1TJ+XyZK6UrxKH7EJvcvtkns57Kee7O/ZTv3PXZc1IPSZ0gjw/nqnd1g6KNUZK6Rh6hHQBRkoI3vOGohSsYijSFPGt0XvNm4z3odSv56qc7OjQzqTZllWfvKQB6wyM5aic9TrYSbFSS5+kw2vO57PEXF4wIkIM6CAbpLFcNmud9MEAb4B5cVNMTJw+xLywTRwquZ5pYzBg+8Hny5KJhNyXRMMYB2drJDtgUJznX3UtlNKoqoDBE/oBqGqCNTV8SOh71ta4ceMM3nN79m4zoO+4fBoAi2pFIAWGT0lpxAQMvUffRZydbHB2ssbqrEO3YeckDgGxH5iDAkIIhMFHNL1HXc9KmaQ2TORVEr0xeKVRNU6a0YmzCoXkpFpIJ0SdWKUWItKGUSlXszQkwJJ0UMSkxqjGqgcmTIKregCpThhTPQBH0pWr4V3DmiSHR1ifnMK3A6MhQXqcJP4mZnlzLtxojmCNBqx2gOUy4BQjwsA6JSpq7psTIoZ+gG/XOFweIjUR2lkozfN9s2nRtz36jonLTdPAwiEOzDXqNy2u37iBxXIJaI3FfMGietOGaLKwMxE0Sn8ehZgccgfhzM0ho7CYL2G1QucsNhvFPZA6jTRwKSIv6swRSikhiTibSgApRmyQJB0qfY9gNCwqpBhYS6eukWYzaOuQvEc3eIRwhs16DUMJfdvBNRYz3wDKYLNew6eEZraAkqos3TC5N6aE1Cc4o9BUd6K2DhaE3/vd38HZ6Qmy9sO0M22QqNkYkTegnOJhPgK0wOEKSCE7EdupmKldBKuPzypbVXGF18HBAbquA8BVWdlJzo4Fo1TjFj39rKlTVIlw29QRKPdbNhwlayClhN2qkrzp5l44uSFw2S+nDtHWVeX3b69R06h911nZWsOy8yIfvN2k/HyKQgnylWR8pgRngJ+/GBNc7XB8fBnHl+6QppHsYFLyWK2YA5ZihDUaUA4xnOcCZQ5bBt/HdDqfW5p8PwGM6ogAH8k5apVKq5LpZ0wwlW0nj7A7wOftJn7C7t6Qf3eTg8cUXXGYtmZYmTeFT/ZM4RPc5g6KURoR7KRwaYECfALFgOQCorEgKQ+1SQMkHVgBcFmmTGLitFCGmhW4NJMSFTl0fkD1VhpzmutUBFgt0aYS3gJxyTBFD0MJVhsAGj4maG5xB4AQAShFiOJhpqEFNgrGWdi6wizORItCmh5G7s+hDef0qERgqtz8TGaL05laYFROX0zncI4kIBFx3lStsUiuQeV6bLApCp3BA4MHjI2wzsPoCj4S+j4gRcB3TKRUEZwKiIyu9P2AzbpDXW+gtUUiAygNHxK6zYD1qsN6vUG76eEHbpyYpMyaS405uvYpIMqV+GFsSJXSAK25AVzO/VpnUPUWVVOhql3ZqJP3iNaycBspREQosOy7hF9QehxPvueCCisIjMA/3GwRyASwEcJOcr80nDEYtIEzDrPZHP7wCIdHZxi6Hu3pipESQYYTMT8lBI/gPVQIoKriCD8pQBuRgE+IySPGATYY7to9MBm57wLWmw3cfIGmnnFHZqUxhAAfPICIpjaY1XM4VaHbDCKkR2jXG5ycnGI+X2A+m3MFmnAQbEoIkOsOfI7QvBAPPhRlWKUyF0XBKIdGL/hRBbc7CARoY+X8Q+mGTDGBYkTSgIYBUUTSHklbThMxMx7WaChtEbwFjEFVz6ASIXYdIPluigQKEUPboWtbEBxzdpRCuzqDDwGHB8eoaiUOv8H84AgpEbp+gyFGzKsKl+68Cyl6nK3OsG43vN5AhKn4hpdNnjW6st4Od0gvEbws4mmy4Bu1XZ0z3ex3bVpibK1F3TDRt5nPQMSoYJRqt1zuntMxZSMmFJG1gpCosc0DgXlyiUgCltxiY0K0z2vHznkTRr4WXyntUD8ox/zY/W1JUWNEeRg1kUCLuPv4dhQ/OjD5v7LYgblC46a4C9OwIyJnXc4MpMSx0nBuhuXhJVT1TEizBIWEYbPGjSe/htXpDWgQnNbMxyq9vabzgb2lsYHfCG9M113K68WEgM4tJ5joy87RWCV1oZVrFQ9tShA+Z9l1pHPoxxQNynNo12GcOkmkqHwdkZAJ5HZOLzuHec/Ubm8HBYr7w2pejLRX0ElLN2BIfpj5GlrZAmWX3JkMJG82CpxiZ7EpUlEIbCgPzkhGFS0PGej8IGutYaBhwNU6IXqEoeMS2zy5CAhEMJKyUYq1FNj75/9iAgbfo+tbVN0aXb9APZsjRA+bAqAd58IBqczQ4P/4WrTRUMYV54vF5cD9NZgCxn1hNJNgt6KSrbyuhtYWlWuwmLNCLhKjNyEprs6JHugAUI+QgBBYPM2CH3JKSTo0J6SQ4KGwWXcir63Rt3wewSe0bYfVaoMweEQfS5cCPwTAR5GnFy0TlUs0GUIFsUCfAjdqzEvViFoNGLzHPNVoFDfZY+chgpQBFLEAn5EFUSICpYSnMnnAM8cplwGCpA0BeKBzKbvWQhgjErJ0VhxlFGc2n+Pg6Bjr1Rpt2/OYi8OZwCTfru8QvAf1AR00UDPiBGRZ9YSU2HFNUdRPPcEPEV0/YLVe4+DSZRij4Sp2eF1doetaaKtQGYPD5Qy1ncMvIrreoxt6+MFjfbbC6fwUi8USi8USrnIwiUvcucydnWwuP1bwIcBKY0zue0MwrHjIOiLWwNYz1D7C+wgfI1xl4X0PHzsMgUuYs/AgJUKIAwI8I0gQ1d/UIKUABUntGlbmtTVztXxIQEjQ0DCk4KSDNcWIFDRSYO5Jt17DhoQ4eOh6zjwUpbGcH6Dre1Ai9F0HxIjq4ABHl+/A5bvuwrVrT2F9dlZSRaUXjaCXSZzbWBb0MdUxLuo8T7IzcRGJcZrmmVbh5fdoQZAJQNf36PteBA3jqEckSM9WqkDIx/l7M5eGj0kSEIxKt+O8x7m/76abpuklJTvvRcgFsJuqUfIo8ZqY/w5MeDTY5p2MG3sS51mPZyZrX37flk2+k59vcYhkH8jnqXWNxfwQx5fvRj1fihMVEYceT119Ar//ld/FyVNPwfctq4PHODoXBRHhLRmUtgLIc2OaAxpRoJUwOa9ifO3iWCI3vpX9JKMY2ScBmHjODmMJWXfGYPcsLnYabobglbmZURP5riTat5nPkyT4T8VTIejd+/E0dls7KMVo1KjIjnKWlGcxstxQLpNER0JeLj3lvjI0uXHCOMZE8Kw459sw1tSrtNLsTinmDrRdh81mA2gjLc0jglTuWJ0FeMon8XQSYa1BFp2u67AIXjZB1rIwWotk9gCrK0y7k+acd4YjlSrzn6NKeejVllePsriUBQaZBKjgqgqL+QJWmiP6CPQxoQuE0JN08wVIWg4wG14WsEhMpiWOaGOf0K49FLVoLTfh4hJkFv/igiV2MpMPCEMPDD1vSpoYPRLiqZa0XBByqbPgEm4hXZKRxTYNcH4AiTKuVhrOWAQTkBJgiFESghXURImzSFwhA0ilDjtHlMcnL7KJdWymY5j/HkOAkpy+tUwe9YE75FZNhXrWwDiDlIKU6PJcGIYem/UGm/UGaYhInvsqGW1hNEDKgMCaEeW+ibPk+x7B91y2Hj20ZsRhMa/RHS4w9C2sjqirCvPlDJWt4TygqwFpxc7ienWGk7rC4dEhLl2+XFI8xhgkY3gDljbygViYrhIl5xSiPA983cVBURrURATPJNmYNLTT0EEBA+DDAJ3Y+Qk+8NyhiVCVcXBhgA0ORE4cIYumaaASIUQC9R4RA/KN4j5XXP2AmHhORU6DkbKIUZx1IZuaigOA4JnfE4cB87rGrGlwx1134WtPPI716gwpEaOixHvjmDI2W2hlaXY4iTrHNWV7Y9hNuUyJlFOdHf6T08Xr9Zp1ZtqO+UlEheQ6Pd7I9yqpepzyVKbfP+1zNZ6T3jr/p7NpOuWiTfnp3of8romfodQY0ZdjoZFbXGRnxqht/CTvBUrS23xKcrMoOybyfxqrrfJ7SAHzgwPcceedaJoZfEqAMhiGAV/72lU8/thjWK3O4Pueix4mVUmZHpDVYzOABmxv+mrnd9P7n8QBNpPU2s1Gvzhrk1GHHJ8Vxm8BuDhnF6F6W4HtBKNS4xnImyfPwgUOz9PZbe2gxBQ5Vo6ATgrJk0CvXA2RInMQCmw4mSUFvsoDrwg5VTxFFZRSo4TviIht2RQqVVpKhRMhBI+2ZZEqU9WcBlD8IVldMKlUBM24DoDPMXNV/NDD9y380CH4AckxfB6DEmBOc2t4V0nPCHG8wOTgJEgRP5tjZQHKImjOLTr5AckVK/myjbWo6xpEhBoWNRTcENEZj76PUDEhJeGxeE7HZKeKowJxXpRBDEC78TCWUaoQM1mMkSCVFMh7RD8gDj3S0MMaoLIWzWyG2WKOppmDlMYweGxWG3QhwBiLmIbSs4LHh7gTM3loq1kltfdwjqXpteH8sE0aKTqkxDn4skRnqNyM5EU9iUDzQ5d1UHI0KKNZymhzJUTW/UhELPxlAV1pxI6b0iXFlR1977FZd1ivWyRP6LVHSIR5M0NtHaAY4YI0e8tTlBIjSilFRN9j6NfsyLoKy4MF+r5Ft67QmwhnDIwDXGVYhFVZ+Ojgg0eMHl3XYug6Ti1ONtncYI67qSuG4CVyj9K7B1ohGYluMQL7SYGdFWslz56QKMK6IMRToBOuUyKpTBHkwEfmMqXokaiBBVfg1XWN5ANoGLbEGH2MiJsN5k1VSv9j4DRUDEEcU06nkjFA5tmECN8PjBIpxT21nMXR0RHuvOsunFy/gdXqFBlSH2OXySaeNwid043j/CGigoRsCbhN1pI8Z7LTsEucTTEh0jCqFweWAMh9rbTW22gKpAza8DNW1qAd6D7P7xwcs5OUBc3OIz3537ubF3NrcMHxuwHeuFHnSUJ5zVLnvyuvxfk9+fx2q2Km5F2QEhxiXNdlDx8dl3Ivc58xjcXyAAdHR9DWIoCdxLZtcfWJJ3D16hPoug5aEIKizFu26VSQ82di53g1W9crCFVGWLacxfGaOTa9AKUpQdXWWya3gUZv8ILzupkjnRGc/JGq3JhpsI/xPtyi3dYOiu8HJM2bS0wE6sfof/pAbi0MkGORI1we/DR12ZGhQc65F9xFKYERz0dDRATNTzs/8DFCTxrbOcUCWfP5HNcEwmdybIImLWkelHMnJO5fMgzYtBtUZ2cwpgIlwNkKxjgWo3I1NFVSCsiCWoBCiEAkJi7mTqKKNEqjvszWV9vVKlPbJaUpQLq0KlhdwRoDVxvMaiat+pDQdtxDiNALp4BGFVtSIhzmRCwNiJFLl53O6YpclRShKUFT5B8NNJXF8mCJg+NDHF26hIPDY0ApbNYtrl27jrMbFsl7IBJvYJEAlYQXZDB4j6H1mDckMv6E6KW9vFT5RIqAcCpkd+E+PdAwExTq3KIjY5hLPacbuplI4RvnYKsatqqRYuBovXLQVkNbltwnSkJ8FWG7qEEhwCNhowYoWJCpsCTmeYCUHMvoSYzS70bzOHT9Gn5YY15b2NrCOYP5soExrPdCiICOMM6ggsYsOgydZgSvW2O1PsVms8J8uZgoImNEAIzmMnUAKQT4wTNSR4yCaa2RrJDSPXf3hgK0cdCaALKAMgAMoHmMlPfQxM0ivaAxJkUY7+GGAXXD4wfLTQ2ttczZqiru0Cs/ANB2LAynIFV9ieC7HsF7VAQhpLNoIMDlyEPXom3XUh3FY9EpVj++dOkOHB1dRbvZgGIQ1IrvfXZKMlmdtUNQ1oz8LO3qluw6CBdtylPnuBCvpWpQaw3jKukGfvFmws8VJwxyyXDeFLOzlL9Xa06dF6d0Msd314ibEVn5c3g9urhL8oh+jJsstjbNEvkTkBuy5iaWuS2JEf0fA65ipMk1b6EuNCIQN43kiZdLrTWqpsHRpUuYL5dQ0gxz03W4du0arl+/js16Lc8ABxxKKURK03154riWj781UyhcMxBJwMk8vBx756SXEm9rig1pAFG++OkcBMrvu2AfyK9Pf7/lkNLudebPGY/lw0Z855nabe2g8CbAaYMUAjAkIMUyWkYg9UwgnSIjOaogZOKYwFQy5pmMxKVnBgokPVdMeaDYMRFSaRxzo0QJRnrDaHEaqrrGfD7DYrlgiNxP8raMwY34iVIihsbVC33f4eTGtQLnVlUtTgpXhTTzA3jfI9ESi8UCzlZwSnMFBDQS5SonEuExAnJEP1FgmKYmygScoANZJVEQRzhXoXFzGO1ASSNEQtd7bNZrrK5fx+AHaAlKlU5lQWHhPH6EYuS0hlJgJVfDXAGtEgwlGBCsUTDGYT6vcXiwwKXjY1y+6y4cX74T2hi0mw6urlG5CpuTU/SKF/Aw+LLw5ieZN37WKFHy/REJKmhBAIKkC7PjxumivEjnlNzWwicDkhf6GKPA64JeJSbqmWYmuhoNqqYHpYDkZ6ga7keUwCXMkYCUFCgqqMQ6MQBXbYWQsGkHkDHoWo++9xj6gK4bUNcVV58oYnE1oxADa6X4voVeLGGMQ9NU8PMZQBFd1yKkASE5OFPDWo2qMnBOwXtCSp5TRZ5TbFpXHJ3L9eYInxKLtnnvYV2ATRaURj0Q7qPEVUckIbMxBgQHEwN4E4P8yPNAI6chowgx+onk/kTWXbH+CKyDlyoU6xyqpsHQ90gpwcp6oJRC8KzfUtUVVwIqXbqat5sVzm6cYHO2Qgo9LAjJVfCDR+UcDg+PcOnSZTz11FMYusj59cTOoYKCdiiICa8vQEbV4kXnju0Fv6QJo0jsZ7QqpZK6UUpxefF8BuccKlfB2rEr8dAP6CVFnDfPUjYs0gpTNGdcU8fKNJndBVko6+KOM7UVxOxsbrlKJj+Cu9H9uGZOxmKCZozGm25Zb2Ut0ULONsaKiF+BRIoswPT6diP5jNRoQbbznpBAaBZLHB4fo24amX8R164/hd//6ldx7akn4aVaSiFJGxRVHFSanna+bLU9biUQvmBMCoqd012Unwv+PiDL+jOOPuXyFCT1Zk7YTSw7odvnePPPyYjNJLZGzhDkFE/eRyeXe0t2Wzso1hiQMghpAIfjmaAkkavIXWshTxFlUdEJ81weaCU3IWZ0BCM6YhXnUTPEn8mzo4qjQIMq541ZYjo7KMYa1FWFMJth1sxgrYGXByPXwUeBkpnUS9Caz88Pg2iDtFit16jrEzTNDFUlIm7KYL44QjNrcHx0jHjpMpbLA1TVXNQyDZKICyVAuiGzM2aMgcGEizNxRig7I1NnBUCI3GfCgEXIqqpGXc3YSYHGEBIWswaWeDPJRGFjpURUKRBYiRaa6af5YawrywBVihjaBBUdVKxg1QxGJTQzlm2v6xqLxRIHB4ewVYX5wiMpztWeQGGtNHejjRGEzEGJUnINVhT1HpRqANJ9NjL0n0LghoyahecYwRRCqmakQAt0zSRVKiWn2UHJC6jWFjFF+J6rijIfg9VNZ/BdD2McqnoGV89hq5ZLriOfZ4wJSBoaFpGlfKUHR4IfEtq2x3rdo+sG9P0A7wPPHWthKgcFjShpwr5tEf0gDorD0Ffoe83CgdEDfQs4QCsDowlVpRGD5dJ8JHZUhh7UNIzKWYsKXOYafMCQOA3ktUYdg1Th8D1gwnjeYJL8iJOvcppRniURN2TQgcpGPDrI4uSCK6dSSkBuSEe8gQWp+DLWYTZbwPcDnAOcNIQkgCujlEJTN9yA0Bi4ivv7dJsNzk5O0J6dISVGUGjJirsggqsqXLrjMhaPLeCHHtF7KFmP8sYw3cjLxreDnECOnWoxjcgASvmplevPTk9d1zDGYDabYSEy/nVdQ9nRkem6DqvVCgnEZcfyPRpgQUCMG8824qG3zk+pHF3fmu2mfLaQlp3fndsAaTy3XJo69XtKKgTshBiT+4mN6xerle5E/fkqaLxmJgKjBCBJ83NNBMzmcyyPDlHPGiQFrNcrfOV3fwe//dtfwNWrT2DoewlgIwdYWfRRrjFzX0CZD3jBOOHmzsTUSdTKlJTc9s1QZe9ih+C8D6h333KBZWL+ud/voF75vKYo2M1s63Jv0VnKdls7KHVVA8qh64OU9rErpxyLYjlXSbtsqdzRupSLap1bn8tmIqJQaoKyEHE9v7OW5a/l4aWYyoObRJthmjPWmnuvVEoxeTKxM9DMZqw5YTLbAiUynObpcwRTFippCuiHAUPboqtnqKsGxhoobXG2WqFyNU5uXMN6dYa77r4Hly/fhdn8ANAsdU9aMYGQSAiWOXdryrnkCC1GJhEmbpxTNorM3wzBw9qaozVBKKqqgnU1qhhRWQ2ngKqyqGcVbljAVUAI3AOmaWpUdQ2A0ydW0j6Vs6isRooe/WaDVWNxagl+0EhxgK0MmlmDqmHJ+LrhfirQA2bzJeJxhIoRtTPSsHFAjIFLwKFAhnk1fugx9Bz96KRHCDmJLgrxmCchJeUy7ghAE6CsRQRgrMshP4+zZjwqK20iJU4hyf10boDWnIqoqxl8NcC3PZybY9Ys0boVNCmEICmooKDhYLTjLyYW5lNCjA0+8kZ0tkI9qzGb1agqi6qZoZnPUblaKqgi+o4rPGZzhdqxHL2xhvv0eBbAC8rDKJGvVwlVlR2HiK5t0XUbVE0D5ypGqKLn+SRoTRi4kigGjxgsrDFMEFYibkfsXOT+Q1AcMBjDujSKmPuBxNU/pAjKKEQEDInl/EHcbiFRRIgejmq+X8Qqs94P8J4F2FJK3HfIOVhLcJY5OykwWumcgzasT2Ss46aIKWGzWqFdrRF6j5Q8YuMZhbEWQQFmPsfh0TEOLx3j9PQUKXXsoCaCrWzZKAsCqXJKIp5DSqaplWlaIgumZdK7qypY4ZZUVYXZbIb5fI561sh79NaOMFWDPTk5wWazQQgBTnPF2rT0d3pO0xT5tgOjBLnfdjqm61VeDy6S7t96n/x5EXo0RWUYSRlJpzyeudzWwOTmrxCkE6qMNyRyL/wayp3UhbZLo1PCczwJmqKgNQthzpcLzOZzELgx7VNf+xp+57d/G4/9/lfRrtcgQU60fEZeA1JK5Xt53Zykr7auG9ilEJ9L+2XHhkbi7PRGq0nALb9BxiwuQrtuZuzwqQsdmd3PGfknhdGD8kdWiMtI0kUO6LdKikcbXnBsMAggRJmYSnq1WGcZkdAXMNDL5BRnQErqprAnDyzDiMZwxB8C6zDkZ5AdFIEUJ6S2fHNSivC+R0yehbaMZv0GKbnixVWkri/oE5TPgRKYQCkOVQoB1jpoYzAEVt3cbGom84EY7lYW9cwKSS9HFHLNWm1d7+6iUiI9+eHrSVzxoBRC8KChx8w2JTKw1kI7i8paHMwaLA8WaOYNmtpgddaAiErUZ6saKfFGkZUrrdawBhj6FpvTE1gEJL9GpwcMQ4AxBpVzmM1mmM1mcK5GUgZBUlnQFovlEk5pDEOPRBHdpmVnJJdzgtMNfT+gbTvUqHmD0hFJJ5BjBxQkDb8AkWzncTLaIGuq5pLscwstjX1qtNIs4EUkG2KFynA6ZT47QGg9nOlgTAOtG5AP6IeIZojQyqCuGxZ3E3XVlLKuAztR3XqD0xuncJXDfD6HMUtRAGZUK2TEaBhYPl9Skc5VsJZFACNJFKayMB1QOQsPLscN0aNrV+i7JfwwK0gFQMJ5cuitLU0Eg/cI1rIKMc9e2UCkPFYrQIl2TBoX4ekzI5J4Am1zHyFSCcEGDH6A9wN0cIiJGy1mrY4QAvwwMCoq6EvtHJSKhRdC4B48RmnMmoada8tpgji02KzX6NsNkAjW2K1gYRg8KudRVTUOD45QVRU6Y0SOXxSsGQgt68x08ygb2CTFk6ttsqNe1FwFkcmIiatrNHK+3CPJlo04O2b585xz/JxJCw8iwnq9RojcJJMpVmprr9jliWw7F2oLAthNAdyMv3ahgzJxxvJnbTswNEILE+PjxjW7cAsnqEF2Zs7ZFFqYIBN8f1JJw3IlXoStKiwWS07vAPB9jxvXnsKTV5/A2ckJO97IFZVSMaVoK6W1m7IqAM5kTHZdgvzalBCdn7UpClHGe/wyeX7yuF0ApdzExvHiN++mr6f3d3cfJTnBaSJPfKZyr0bc7NZRlNvaQeGeCKJ5kltvZ9KYiFLljVjqAMt7FURMRiJkJZOgcFMoiyFNHI6YWEo8RNE4GPOhVhYVXmz4RqfIkenQ91wC6yycFTIekYjbcElxfq9SvHokxVAs5NNIejxAscBVioRgWG8iZOSj76GISWN1NYN1DZQxMG4GkOQzs9AT1LlFIv99i28CKihTjElSPIb1FvoBs0Y2GDDnxzkHXQGVAuazBspw34q6Ylnyw8NDHBwcQBvuHaK0EeItl+RRCugMMGxWQGLpOqUAaxVcbTGbN2hmM1SCnsTESAYTT3sozKAp4eBwiZQCrAbOUoT34yMSQ0TXDnCuQ4y8CaWggMSIlFIG2ippEM07NlHu3yGLo5UHOzt5E6eOiBCHASapwoEK3qNbb2B1Da0rEDRSUAg90LUJQ0fwHiBPsCqi77x0z+W5wg7WuGlk7Y0g6Mj6bIPNaoPFbAFnGsybA1RVg36zgu8DurZH13boux5Qlqs9rIO3DilEWK3hlIUfPDspRgGepIw4YNNucO3aNWhbYbHkMbPGQDlWTLZOQw3EomteIRiFYAwULDtrKSJzIYzheaIS6yhnlWSttfSwGuPKFFm6P4aAqBS0H+BSjZAirIjKKSWoF4A+BPRSoaNTnjs217wL/E+wlYNzDWzFaFD0AUiMrq1OT+EHD2s0rGVtIwiHZvAem67H4YHDpUuXMZstsFqteaORFFRCFl9DaZsBeZJH1HUkudZ1XZzupmm4MV9BXVJxWOqakcdM5s1Vcpn7E6QZXe7N0zQNnHM4PDwsZ9BtuOFmpmvkVHYuFN11UMa1cFw/L4qod48vz8FO+mr6/mlKYcvBoYudjHweOXVS1vaySm47TdPPLk6R/E4L6jQ2/pRiCRASaRwfH+PSpWPMmhpM5gtoN6fo2jVS9MyLI7N1fjkFk3+mtWuJEpB2HDEijHL/22NRrisvWpMxlTc/LSpyq66AAiZzYrwf6YL7sP29O+Oe/xw9pfPO1C3Ybe2geO9BWsN7zw+8kk1FK8l7BzjtoKww6sVh2c2Hjg9YriSZMORVJrcx+dF7z0REYv2HlFhTw1S80FRVBQBI0gsnw54hsJQ+FGulEGjUR5jI6ZMsGlueq0B9wAhLsuR3gtKRH/SUkFTgqPGGxWy2hKtrGFdhvrRQygLQTNWBpHlIX7gQ5O/hFXZEVJgQxr1SlFZQMUlzRSFLZr0LAEzmIlSWiYr1MEBrVVI0zlbs5MkEjyGwmJtEwcPAVUBKJWgDAEak6jWqukFdz2FcDZUUXNJwLsDYFo6ZsCynHxZQipBSEBSgl+8gEDxa28PHBKMsqoqbHAbiKNjVFtqNDR5z2aVOmSRNo1AbsLXwZREsaWJQ+BR5HJXA1EOfsF71WJ22WK8GrM56aApAVFC6ZedNc7t3r7l0PXOccnrEKg0DhTQE9Ose4TBydA0LBdaIiZHF0/p+QBw8TGUBAqx1MIrJwaQU4JwElUqq0aRzagrouxakuAmfNhpN3cBa5qMQUukUzutpRpKizLHIPJ8S1CV2gIhl8vthKM+w0grGGtZA2d3wEle2cXqJxemCCLYh5QVbeEDEJFPudI7SWyUTbrVzsHUNYxyQAAoRKkT0mw7r0zOkGOEsVxllEiKjp4S+7zHUFZaHhzg8OsL169fgh14qBln5Vh7kreeJs4ETnSHFfbqapsHh4SHm8zmqqhKUL24dOwY/KOdBYA4F92qalBPL2mGla/OsaZBiQt916Dft1iayi3RMfz8994ICTNeHyfG77ytrBlHhSOxuqKUKbHcNUjlRsa0rwulphYxEjR2W83VMz4FuErTn0mc1VhfmNVU4RsZoHB0d4447L6OZ1VAKGIYOpzduoG83gvQ6kCCGRNKzDecrXaZjkR28bVSJtvagXWeOyv/Gz1T52p6B3YpPMEXun9FnS6Ar/5Dnm8paNwUEoL4FEZQhsNy57zvRn2DIE0Zj8AERPbSui3S5ks2Do2HZLDDx6BPzDrYgyMlrQbRV8vunD00lOey6Fm5GArRzqCxL30NY+VmsC0DhrkwhQcJ4X7ehNr6GkWQIjvQhk0KuMcUBbbvG6cl1zJcHODi4hKZZMBZJxNGAARRp7pA8zZfLdxaJZlDx+ssP+GFEUlC5lDaN0UlKiQmmsqDHyOTU/LilxGJ1kC61sXT/5M0qJuZMBNHhoMxYN2PkXdcc+Rrt2OGzCtpU4JJbqdBSYMSFGkS/gPcBKfK4xRQxDAlqM8D6BKUCOhPQ9QN6z1U+82WDZtFA2zHCFOqQ3COeG3le5Hx/huWVpAJj9MitU5XS8MMAVwX4SOjaDutVi5Pra5xcX6PrAjQFaChUTUDtLKraYj7nBoNZyTeLS+TUCWICfELqAkI7wCaDNCQm+Sp2KPu+R9t26LoOjZWy9MDy9ZrAyrApMGKo2UHQsrGnFNEPHWIinJ6dCAKYYC2L3WkR8bSW54yRzt1afpKsspSiVCoBcWAHNkaPbugw+EFI1Bj5PMIZy2ObiJv0+ejhg4cJHs4P0NZBC5uSny+DaBnlyw0gox8YRNUKylgoa7mcWdAKJEIMHt16jbZtuYTezeCHDooIIQaQ53keArcQODhY4uDwEHXTYOg73kinYo+UN9OpVs64SE/TuTHGIrJGREwungRK+Rm1ORCT4wKNTn12UPLYee9RGU4bNVWFWd1gU1UY+ngunTO1rQAub5y4+fbydAhKWdt2Pv/pjKP5MW2w7TCN/Jws1z+lQEy/Z9o3MJ8nMEqtawVoqeqilBApgAionEPdVOLYRXQtk6bPTq4hDi2vA4mdCz25diUNYHcdQNxknHfHZZq62jqi5G7kGv7AfX6Kim1/Tx6jiwixFzkm0/3xIrvIkeJ1acc/2TqLZ+6o3NYOCkdRCRQCssqaNtwPh6WaBzR1RCFb0TZ0CUiqR3FknsWOEhGMtcVLJ0lv5Acu8wDypLGWlSzrui4CSSElXgTN2LsiUSokNy5xFXVaujlcN05y+XeWAld83iSRBkcGBhQT+q7HZr3CZrVC167RzJcwjqCUbEjC686RicI40abkOhCdm8hZ1yWlAAx+jJKAiZfMj0BKDEFro6AMk0eHMKBJAcibl1JSxQSoSCAVAcXN4hIxg0GoCLLpMR/IaAutuVIlVZyfd1WD0HOePS/UVeWwWC4KuUzrHn0/YAgRPghCANZZCTnK0QAZBV1ZzOsZhJPKr4cAHQKUMbAml77GMg+mwlopJiRRjKVEoEiwpoWxNYYBGLqBnQjS0mRMIUWGseeLOeaLGSgGVHWFZtZAGw0aBiSVMiWGxbpCYPVWHzGse6SeMLQe3geuaIlJSk47DN7DxSTpRgtn+M/gB1BMwtuSImytoZSgZDGi63ooy1U3Wis4a6CqitEyJTQdo6AVtwIozw8RPPhz8ohTDOygpADvB4QYhEciqTIlIIQ8066qmG9iWKU1b8ohBFidG4aOKRMVAsgHpGEAQHx9WkNbLbo4ijlfxsA6B6M1og/YbNbww4DaWjS1RQzC7bCSupUqvtyYzxiDppmhXW8QPTdqzF3E+YFViJFANCIcRCjHhCBqtTGOGjPyTOTNyjlX9HUyYTsrvQYJBIL3Zc6XKDZyNU/+/KHvQTGKDtRku5huoDvNUbc2oJ3f3YxjsnWMrAsXOSr5uLz+jJyNyfeKBIESsr/RI09HqbHsn5vEjudKiicQQW2tn9mfySmeoiUi48mnQLhx4zq++MUv4PefeBzGOrTrNW5ce4qblYpekxYUBvLeadpqF0m5aEtWFxxbeh1Nxo+mPyV4xnit08/cAqTEHcm/uAkwkr8v630VsEw9vXOyfX009ifDOM7fqN2WDkoZsESgwF1OoRSUYZVQpVhvAImjv5QXQ8VRJb91+8FiIp5nPRXN8DOliBgBnbIqKkeBMXDDvLyAOmvQ1BUqZzm4pQSiCMSA4AcMXYtus4axGil4aEpQMSKGYYT9Yv7cBEyUMEd4O1+8ym5tkVyPFGAUASLu5oeAvqqwPjvFyY1r0NahauawtuLoymgYW0EZC6cFzckLIBGGvkPfb1hvInJ5aRx6DH2Lvltz0z7SMDFhs1qwuqurJNUj3aKTx9B3fI00IPoOXd+CqIc1CVpF7kAsS+XQDQgDb6Ld5gx9u4YfeqQ4gCjIAq8R/IAwsK6H0Q3IWETP5cEUA3zfoe9F/dT3IIpQSOUe+S6gVzLsMSFgADeBTIgw6LsOSidAJ2gjHYoNE5SJwPOLCKCEAD2mG6yFAeCcBcWEFDz6tkXfD+BKWIOmrrkRXgKACgbAcjbDwXKOs1mDbnMi8zagqSzmTY3WKlgLNJWD1jyvfYrQmoAUkUKPoTUwEWidw0lKUMqiDz181yEFj9AP6BBxdnKKWdUgJWA+X/L9DqzWu1mdIRqHejbnBn4hgmLgsU1cIdMNAdF7UIgIQ4cwdFjMZwBYcM8PXkq7FQwIwVogBe5q3W8weI9AcbLgSq+dvscQWkY7pM8IAHgfEIdBkBN+pggJoQN6zWWXGgqmd3DKSt+nAYoSP4dp7FNEKUpULi0JjIaViiPf9xiUhgdhvVohBg9bV4zwhYCub5FAqFHxnCWu7uvaFqHrYIW87aMQgXMwQdOycz8hxgIgLWtOQAwBfddxq4CqYomCjGQaUxA6EN+H/NlJgdPN4iQzmmtYxiBxI8eUEoa+R4gRfddh6Hv4xFwVM1ljyrldsBeN6QmJgydpgHPrFI2pTBKHKtuUA7K1jk8cHkYzwKRVNdlYaUzJ6JJGpjH1dC74pNIXhyZjBklXkWZUO2+BOV2RiNs8nH3lK3jsiSfhmpqD1UTYrNfoNhsBoyNy65SCnhBXAWYkhbeCPNMvsvH6s+ORYtpC3DLbOmuglGubOCHbzuO2wzk6Z9tnkCZjPh6fCr1g+o4tBGtyrvmz+Q+hSIBTetzrbceBLajazRGlXVP0TI56ntnv/d7v4b777nuuT2Nve9vb3va2t719HfaVr3wFL37xi5/2mNvSQUkp4dFHH8X999+Pr3zlK1ss9b3dup2enuK+++7bj+U3yfbj+c21/Xh+c20/nt8824/lrRsR4ezsDC984Qt3+Dbn7bZM8Wit8aIXvQgAcHh4uJ8Y3yTbj+U31/bj+c21/Xh+c20/nt8824/lrdnR0dEzOu7p3Ze97W1ve9vb3va2t+fA9g7K3va2t73tbW97e97Zbeug1HWNd7zjHajr+rk+ldve9mP5zbX9eH5zbT+e31zbj+c3z/Zj+X/XbkuS7N72tre97W1ve/t/225bBGVve9vb3va2t739v2t7B2Vve9vb3va2t70972zvoOxtb3vb2972trfnne0dlL3tbW9729ve9va8s72Dsre97W1ve9vb3p53dls6KO9617vwspe9DE3T4HWvex0+9alPPden9Ly0X/u1X8Of//N/Hi984QuhlMIv/MIvbL1ORPiH//Af4gUveAFmsxkeeOABfOELX9g65tq1a3jzm9+Mw8NDHB8f40d+5EewWq2exat4ftg73/lO/LE/9sdwcHCAu+++G3/pL/0lPProo1vHdF2HBx98EHfccQeWyyXe9KY34Yknntg65stf/jJ+8Ad/EPP5HHfffTf+9t/+2wghPJuX8rywd7/73Xj1q19dFDivXLmCX/qlXyqv78fy67ef+7mfg1IKb3/728vv9uP5zO1nfuZnSuPC/PPKV76yvL4fy2fR6Daz973vfVRVFf27f/fv6HOf+xz9jb/xN+j4+JieeOKJ5/rUnnf2wQ9+kP7e3/t79J//838mAPT+979/6/Wf+7mfo6OjI/qFX/gF+l//63/RX/gLf4Fe/vKXU9u25Zg/+2f/LL3mNa+hT3ziE/Tf//t/p2//9m+nH/7hH36Wr+S5tze+8Y30nve8hx555BH6zGc+Q3/uz/05eslLXkKr1aoc86M/+qN033330Yc//GH6jd/4DXr9619Pf/yP//HyegiBXvWqV9EDDzxAn/70p+mDH/wg3XnnnfTTP/3Tz8UlPaf2X/7Lf6H/+l//K/3Wb/0WPfroo/R3/+7fJeccPfLII0S0H8uv1z71qU/Ry172Mnr1q19NP/7jP15+vx/PZ27veMc76Du/8zvpscceKz9f+9rXyuv7sXz27LZzUL73e7+XHnzwwfLvGCO98IUvpHe+853P4Vk9/23XQUkp0b333kv/7J/9s/K7GzduUF3X9O///b8nIqLPf/7zBIB+/dd/vRzzS7/0S6SUoq9+9avP2rk/H+3q1asEgD72sY8REY+dc47+43/8j+WY//2//zcBoIceeoiI2GHUWtPjjz9ejnn3u99Nh4eH1Pf9s3sBz0O7dOkS/Zt/82/2Y/l12tnZGb3iFa+gD33oQ/Sn/tSfKg7Kfjxvzd7xjnfQa17zmgtf24/ls2u3VYpnGAY8/PDDeOCBB8rvtNZ44IEH8NBDDz2HZ3b72Ze+9CU8/vjjW2N5dHSE173udWUsH3roIRwfH+N7vud7yjEPPPAAtNb45Cc/+ayf8/PJTk5OAACXL18GADz88MPw3m+N5ytf+Uq85CUv2RrP7/qu78I999xTjnnjG9+I09NTfO5zn3sWz/75ZTFGvO9978N6vcaVK1f2Y/l12oMPPogf/MEf3Bo3YD83vx77whe+gBe+8IX4tm/7Nrz5zW/Gl7/8ZQD7sXy27bbqZvzkk08ixrh14wHgnnvuwf/5P//nOTqr29Mef/xxALhwLPNrjz/+OO6+++6t1621uHz5cjnmW9FSSnj729+O7/u+78OrXvUqADxWVVXh+Ph469jd8bxovPNr32r22c9+FleuXEHXdVgul3j/+9+P+++/H5/5zGf2Y3mL9r73vQ//83/+T/z6r//6udf2c/PW7HWvex3e+9734ju+4zvw2GOP4Wd/9mfxJ/7En8AjjzyyH8tn2W4rB2Vve3s+2IMPPohHHnkEH//4x5/rU7mt7Tu+4zvwmc98BicnJ/hP/+k/4S1veQs+9rGPPdenddvZV77yFfz4j/84PvShD6Fpmuf6dG57+4Ef+IHy91e/+tV43eteh5e+9KX4D//hP2A2mz2HZ/atZ7dViufOO++EMeYcY/qJJ57Avffe+xyd1e1pebyebizvvfdeXL16dev1EAKuXbv2LTveb3vb2/CBD3wAv/qrv4oXv/jF5ff33nsvhmHAjRs3to7fHc+Lxju/9q1mVVXh27/92/Ha174W73znO/Ga17wG//Jf/sv9WN6iPfzww7h69Sr+6B/9o7DWwlqLj33sY/hX/+pfwVqLe+65Zz+e34AdHx/jD/2hP4QvfvGL+7n5LNtt5aBUVYXXvva1+PCHP1x+l1LChz/8YVy5cuU5PLPbz17+8pfj3nvv3RrL09NTfPKTnyxjeeXKFdy4cQMPP/xwOeYjH/kIUkp43ete96yf83NpRIS3ve1teP/734+PfOQjePnLX771+mtf+1o457bG89FHH8WXv/zlrfH87Gc/u+X0fehDH8Lh4SHuv//+Z+dCnseWUkLf9/uxvEV7wxvegM9+9rP4zGc+U36+53u+B29+85vL3/fj+fXbarXCb//2b+MFL3jBfm4+2/Zcs3Rv1d73vvdRXdf03ve+lz7/+c/T3/ybf5OOj4+3GNN7Yzs7O6NPf/rT9OlPf5oA0D//5/+cPv3pT9Pv/u7vEhGXGR8fH9Mv/uIv0m/+5m/SX/yLf/HCMuPv/u7vpk9+8pP08Y9/nF7xild8S5YZv/Wtb6WjoyP66Ec/ulV+uNlsyjE/+qM/Si95yUvoIx/5CP3Gb/wGXblyha5cuVJez+WH3//930+f+cxn6Jd/+Zfprrvu+pYsP/ypn/op+tjHPkZf+tKX6Dd/8zfpp37qp0gpRb/yK79CRPux/EZtWsVDtB/PW7Gf/MmfpI9+9KP0pS99if7H//gf9MADD9Cdd95JV69eJaL9WD6bdts5KERE//pf/2t6yUteQlVV0fd+7/fSJz7xief6lJ6X9qu/+qsE4NzPW97yFiLiUuN/8A/+Ad1zzz1U1zW94Q1voEcffXTrM5566in64R/+YVoul3R4eEh/7a/9NTo7O3sOrua5tYvGEQC95z3vKce0bUt/62/9Lbp06RLN53P6y3/5L9Njjz229Tm/8zu/Qz/wAz9As9mM7rzzTvrJn/xJ8t4/y1fz3Ntf/+t/nV760pdSVVV011130Rve8IbinBDtx/IbtV0HZT+ez9x+6Id+iF7wghdQVVX0ohe9iH7oh36IvvjFL5bX92P57JkiInpusJu97W1ve9vb3va2t4vttuKg7G1ve9vb3va2t28N2zsoe9vb3va2t73t7Xlnewdlb3vb2972tre9Pe9s76DsbW9729ve9ra3553tHZS97W1ve9vb3vb2vLO9g7K3ve1tb3vb296ed7Z3UPa2t73tbW9729vzzvYOyt72tre97W1ve3ve2d5B2dve9ra3ve1tb8872zsoe9vb3va2t73t7Xlnewdlb3vb2972tre9Pe/s/wfF6QbL7pSBfwAAAABJRU5ErkJggg==",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "import cv2\n",
+ "import matplotlib.pyplot as plt\n",
+ "\n",
+ "import glob\n",
+ "frame_folder = '/home/kuhaku/Code/FPT/VideoColorization/dataset/hollywood/frames'\n",
+ "img_paths = glob.glob(os.path.join(frame_folder, '*.jpg'))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 37,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "('/home/kuhaku/Code/FPT/VideoColorization/dataset/train/test/lost_weekend_the_01681/gt',\n",
+ " 'frame001716.jpg')"
+ ]
+ },
+ "execution_count": 37,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": []
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 40,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "test_path = '/home/kuhaku/Code/FPT/VideoColorization/dataset/train/test/*/gt/*.jpg'\n",
+ "for path in glob.glob(test_path):\n",
+ " folder_name = path.split('/')[-3]\n",
+ " os.makedirs(os.path.join('/home/kuhaku/Code/FPT/VideoColorization/dataset/train/test_', folder_name), exist_ok=True)\n",
+ " shutil.copyfile(path, '/home/kuhaku/Code/FPT/VideoColorization/dataset/train/test_/' + folder_name + \"/\" + os.path.basename(path))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 46,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'/home/kuhaku/Code/FPT/VideoColorization/dataset/train/test_/double_indemnity_01977/frame000060.jpg'"
+ ]
+ },
+ "execution_count": 46,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "'/home/kuhaku/Code/FPT/VideoColorization/dataset/train/test_/' + folder_name + \"/\" + os.path.basename(path)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 43,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'/home/kuhaku/Code/FPT/VideoColorization/dataset/train/test_/frame000060.jpg'"
+ ]
+ },
+ "execution_count": 43,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "'/home/kuhaku/Code/FPT/VideoColorization/dataset/train/test_/' + os.path.basename(path)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 44,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'/home/kuhaku/Code/FPT/VideoColorization/dataset/train/test/double_indemnity_01977/gt/frame000060.jpg'"
+ ]
+ },
+ "execution_count": 44,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "path"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "VideoColorization",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.8.16"
+ },
+ "orig_nbformat": 4
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/opt.txt b/opt.txt
new file mode 100644
index 0000000000000000000000000000000000000000..4d96bb3d4d62a1462fc61abd8ea87c62e12339a8
--- /dev/null
+++ b/opt.txt
@@ -0,0 +1,46 @@
+train.py 2023-07-08 16:16:10
+------------------------------ Options -------------------------------
+annotation_file_path: dataset/annotation.csv
+batch_accum_size: 10
+batch_size: 2
+beta1: 0.5
+checkpoint_dir: checkpoints
+checkpoint_step: 500
+contextual_loss_direction: forward
+data_root_imagenet: imagenet
+data_root_imagenet_full: imagenet
+data_root_list: dataset
+domain_invariant: False
+epoch: 40
+gpu_ids: 0,1,2,3
+head_out_idx: 8,9,10,11
+ic: 7
+image_size: [384, 384]
+imagenet_pairs_file: imagenet_pairs.txt
+load_pretrained_model: False
+lr: 0.0001
+lr_gamma: 0.9
+lr_step: 2000
+luminance_noise: 2.0
+nonzero_placeholder_probability: 0.0
+permute_data: True
+real_reference_probability: 0.7
+resume: False
+resume_epoch: 0
+step_train_discriminator: 1
+use_dummy: True
+use_feature_transform: False
+use_wandb: False
+vit_version: vit_tiny_patch16_384
+wandb_name:
+wandb_token:
+weight_consistent: 0.05
+weight_contextual: 0.5
+weight_gan: 0.5
+weight_nonlocal_consistent: 0.0
+weight_nonlocal_smoothness: 0.0
+weight_perceptual: 0.02
+weight_smoothness: 5.0
+weigth_l1: 2.0
+workers: 0
+-------------------------------- End ---------------------------------
diff --git a/outputs/video.mp4/000000000.jpg b/outputs/video.mp4/000000000.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..19ec53d39120dee70d96204f8e126dec57d710e3
Binary files /dev/null and b/outputs/video.mp4/000000000.jpg differ
diff --git a/outputs/video.mp4/000000001.jpg b/outputs/video.mp4/000000001.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..4d3aff5c7ea6209a2868c5be8eb51aa6a18e2e6f
Binary files /dev/null and b/outputs/video.mp4/000000001.jpg differ
diff --git a/outputs/video.mp4/000000002.jpg b/outputs/video.mp4/000000002.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..ef47278965885062a9ffd7f4b9b133825c0ee9c7
Binary files /dev/null and b/outputs/video.mp4/000000002.jpg differ
diff --git a/outputs/video.mp4/000000003.jpg b/outputs/video.mp4/000000003.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..b0c3011674fe7083010400398d9f0ac0913741c6
Binary files /dev/null and b/outputs/video.mp4/000000003.jpg differ
diff --git a/outputs/video.mp4/000000004.jpg b/outputs/video.mp4/000000004.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..c27de8add41119a03b9f2888aa9e87b68ca9ee07
Binary files /dev/null and b/outputs/video.mp4/000000004.jpg differ
diff --git a/outputs/video.mp4/000000005.jpg b/outputs/video.mp4/000000005.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..c3197d1a0d9a53db831103184492a1bbb98432c7
Binary files /dev/null and b/outputs/video.mp4/000000005.jpg differ
diff --git a/outputs/video.mp4/000000006.jpg b/outputs/video.mp4/000000006.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..b49b974108a33c6e94045366585609d6e553fa6c
Binary files /dev/null and b/outputs/video.mp4/000000006.jpg differ
diff --git a/outputs/video.mp4/000000007.jpg b/outputs/video.mp4/000000007.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..fac7b1c4af31ed44207ed8d29e95cbd282d615f6
Binary files /dev/null and b/outputs/video.mp4/000000007.jpg differ
diff --git a/outputs/video.mp4/000000008.jpg b/outputs/video.mp4/000000008.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..2a74d0419518d7d0a4eb25b745e5fb9227669870
Binary files /dev/null and b/outputs/video.mp4/000000008.jpg differ
diff --git a/outputs/video.mp4/000000009.jpg b/outputs/video.mp4/000000009.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..0f69da59b98032b58545cad2aa6cf67f6182bd04
Binary files /dev/null and b/outputs/video.mp4/000000009.jpg differ
diff --git a/outputs/video.mp4/000000010.jpg b/outputs/video.mp4/000000010.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..b1eede07f04b7e139208265f9ce57eb09486b16c
Binary files /dev/null and b/outputs/video.mp4/000000010.jpg differ
diff --git a/outputs/video.mp4/000000011.jpg b/outputs/video.mp4/000000011.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..712e0fceccd6d5f6dec27492e9011f80acd38485
Binary files /dev/null and b/outputs/video.mp4/000000011.jpg differ
diff --git a/outputs/video.mp4/000000012.jpg b/outputs/video.mp4/000000012.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..319ba417642cec8d43ac132364e773ab4a0d4c5a
Binary files /dev/null and b/outputs/video.mp4/000000012.jpg differ
diff --git a/outputs/video.mp4/000000013.jpg b/outputs/video.mp4/000000013.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..7794e3328cf6bfff81f27936657531dc27d8850f
Binary files /dev/null and b/outputs/video.mp4/000000013.jpg differ
diff --git a/outputs/video.mp4/000000014.jpg b/outputs/video.mp4/000000014.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..0adbef39af14e16e2b9b96a2c06bfa605b20bc5a
Binary files /dev/null and b/outputs/video.mp4/000000014.jpg differ
diff --git a/outputs/video.mp4/000000015.jpg b/outputs/video.mp4/000000015.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..6b618fbd36fe689b055d24ba0ff26bbc22c72639
Binary files /dev/null and b/outputs/video.mp4/000000015.jpg differ
diff --git a/outputs/video.mp4/000000016.jpg b/outputs/video.mp4/000000016.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..51223ba35c3d38fc7d418d853fab33b75e8038e7
Binary files /dev/null and b/outputs/video.mp4/000000016.jpg differ
diff --git a/outputs/video.mp4/000000017.jpg b/outputs/video.mp4/000000017.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f832a81397647d65f9b2923653c0eb51c3a11f1f
Binary files /dev/null and b/outputs/video.mp4/000000017.jpg differ
diff --git a/outputs/video.mp4/000000018.jpg b/outputs/video.mp4/000000018.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..a3acc34281aa8017c687bb507f90bb8e62fafb58
Binary files /dev/null and b/outputs/video.mp4/000000018.jpg differ
diff --git a/outputs/video.mp4/000000019.jpg b/outputs/video.mp4/000000019.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..b0a1d0e3ac999786f6116837d03f2d9e0f914511
Binary files /dev/null and b/outputs/video.mp4/000000019.jpg differ
diff --git a/outputs/video.mp4/000000020.jpg b/outputs/video.mp4/000000020.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..84bd6268ee736dc04efc705de6066b9c174f67b1
Binary files /dev/null and b/outputs/video.mp4/000000020.jpg differ
diff --git a/outputs/video.mp4/000000021.jpg b/outputs/video.mp4/000000021.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..ede2dc61e2e8ec7a5512c959881e9ca7bc88f397
Binary files /dev/null and b/outputs/video.mp4/000000021.jpg differ
diff --git a/outputs/video.mp4/000000022.jpg b/outputs/video.mp4/000000022.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..1969e4dce3b36e3e78913e46bd4c1a5b3a622722
Binary files /dev/null and b/outputs/video.mp4/000000022.jpg differ
diff --git a/outputs/video.mp4/000000023.jpg b/outputs/video.mp4/000000023.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..cd7ba8c9a1bc591e04697faa71dd7ab51b63223c
Binary files /dev/null and b/outputs/video.mp4/000000023.jpg differ
diff --git a/outputs/video.mp4/000000024.jpg b/outputs/video.mp4/000000024.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..66646bcb85f86c46c73f37c94a9a7f099bf7a431
Binary files /dev/null and b/outputs/video.mp4/000000024.jpg differ
diff --git a/outputs/video.mp4/000000025.jpg b/outputs/video.mp4/000000025.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..4aeff96345839321f004d9606c9163d6639223d9
Binary files /dev/null and b/outputs/video.mp4/000000025.jpg differ
diff --git a/outputs/video.mp4/000000026.jpg b/outputs/video.mp4/000000026.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..0809ee2308763e738a25de7b6812a43f9549ff7a
Binary files /dev/null and b/outputs/video.mp4/000000026.jpg differ
diff --git a/outputs/video.mp4/000000027.jpg b/outputs/video.mp4/000000027.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..153167c612cbf41f4f9c178421a2d3d0893ef067
Binary files /dev/null and b/outputs/video.mp4/000000027.jpg differ
diff --git a/outputs/video.mp4/000000028.jpg b/outputs/video.mp4/000000028.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..333f594a41d806ec5cb3fece37ec9b1654fb2512
Binary files /dev/null and b/outputs/video.mp4/000000028.jpg differ
diff --git a/outputs/video.mp4/000000029.jpg b/outputs/video.mp4/000000029.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..95c517b4b3d204cbe4fa8115f601a3f155395ba7
Binary files /dev/null and b/outputs/video.mp4/000000029.jpg differ
diff --git a/outputs/video.mp4/000000030.jpg b/outputs/video.mp4/000000030.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..a032c0a18f7e61a0050bc6bf54b82f742b368748
Binary files /dev/null and b/outputs/video.mp4/000000030.jpg differ
diff --git a/outputs/video.mp4/000000031.jpg b/outputs/video.mp4/000000031.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..3c7d72fea69e21401bfc6ff062a002e0d03c051b
Binary files /dev/null and b/outputs/video.mp4/000000031.jpg differ
diff --git a/outputs/video.mp4/000000032.jpg b/outputs/video.mp4/000000032.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..463c3b58ffbf08a4af844ce65ded9788ca4f804c
Binary files /dev/null and b/outputs/video.mp4/000000032.jpg differ
diff --git a/outputs/video.mp4/000000033.jpg b/outputs/video.mp4/000000033.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..2e009493404ac175a71aba81655567f2562ba3d2
Binary files /dev/null and b/outputs/video.mp4/000000033.jpg differ
diff --git a/outputs/video.mp4/000000034.jpg b/outputs/video.mp4/000000034.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..83dfdbeb3608a83d4a34ea1220f421f7f41d9b21
Binary files /dev/null and b/outputs/video.mp4/000000034.jpg differ
diff --git a/outputs/video.mp4/000000035.jpg b/outputs/video.mp4/000000035.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..7f2182ab41fbf0d3d46b2e3b4462362ae814805c
Binary files /dev/null and b/outputs/video.mp4/000000035.jpg differ
diff --git a/outputs/video.mp4/000000036.jpg b/outputs/video.mp4/000000036.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..9bc2285e7de66aac4d84dbd97bbcd006f06304d5
Binary files /dev/null and b/outputs/video.mp4/000000036.jpg differ
diff --git a/outputs/video.mp4/000000037.jpg b/outputs/video.mp4/000000037.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..ce0313a81275c7a8270b92a0b21aede7113ec33f
Binary files /dev/null and b/outputs/video.mp4/000000037.jpg differ
diff --git a/outputs/video.mp4/000000038.jpg b/outputs/video.mp4/000000038.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..0982fd130f1c7af275b6efc0b131a525a74df5ed
Binary files /dev/null and b/outputs/video.mp4/000000038.jpg differ
diff --git a/outputs/video.mp4/000000039.jpg b/outputs/video.mp4/000000039.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..b54985a9e8cbca5c8d0e7b715deccf175788c761
Binary files /dev/null and b/outputs/video.mp4/000000039.jpg differ
diff --git a/outputs/video.mp4/000000040.jpg b/outputs/video.mp4/000000040.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..38ceb4820c9264ef2b2142716ad46ee786d8da01
Binary files /dev/null and b/outputs/video.mp4/000000040.jpg differ
diff --git a/outputs/video.mp4/000000041.jpg b/outputs/video.mp4/000000041.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..12f9df50ec311be69c8cd5b1b1b104c58760742b
Binary files /dev/null and b/outputs/video.mp4/000000041.jpg differ
diff --git a/outputs/video.mp4/000000042.jpg b/outputs/video.mp4/000000042.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..019e66c460607df003d094ea7ce41111457a70be
Binary files /dev/null and b/outputs/video.mp4/000000042.jpg differ
diff --git a/outputs/video.mp4/000000043.jpg b/outputs/video.mp4/000000043.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..c1baa20275351a5c4d67d1efed26f0a4c84df770
Binary files /dev/null and b/outputs/video.mp4/000000043.jpg differ
diff --git a/outputs/video.mp4/000000044.jpg b/outputs/video.mp4/000000044.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..095997eafdf2e6e0b61d2fc2f5a151f4d3b72f25
Binary files /dev/null and b/outputs/video.mp4/000000044.jpg differ
diff --git a/outputs/video.mp4/000000045.jpg b/outputs/video.mp4/000000045.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..a28d045a2c2df9a7bebc78953fe495a3ac2810d7
Binary files /dev/null and b/outputs/video.mp4/000000045.jpg differ
diff --git a/outputs/video.mp4/000000046.jpg b/outputs/video.mp4/000000046.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..96c994f5453830a3323dc202dbe38b387e5e2e24
Binary files /dev/null and b/outputs/video.mp4/000000046.jpg differ
diff --git a/outputs/video.mp4/000000047.jpg b/outputs/video.mp4/000000047.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..1d92b67dea69f5577a8a9911e2d85e65e54c314f
Binary files /dev/null and b/outputs/video.mp4/000000047.jpg differ
diff --git a/outputs/video.mp4/000000048.jpg b/outputs/video.mp4/000000048.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..36259b7af0301faeb4037cadb2712a5e538f9ff8
Binary files /dev/null and b/outputs/video.mp4/000000048.jpg differ
diff --git a/outputs/video.mp4/000000049.jpg b/outputs/video.mp4/000000049.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..dec0d2defb622750389721eedd603f5db8612b1f
Binary files /dev/null and b/outputs/video.mp4/000000049.jpg differ
diff --git a/outputs/video.mp4/000000050.jpg b/outputs/video.mp4/000000050.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..83e7296ce1af30366dfb239fe2a6758a85dce4be
Binary files /dev/null and b/outputs/video.mp4/000000050.jpg differ
diff --git a/outputs/video.mp4/000000051.jpg b/outputs/video.mp4/000000051.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..e27b49ed0d5217e62cc9726bdd83c8cb09ccbc23
Binary files /dev/null and b/outputs/video.mp4/000000051.jpg differ
diff --git a/outputs/video.mp4/000000052.jpg b/outputs/video.mp4/000000052.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..cecf337d82635a99d81cb39bad0006419f43bdc7
Binary files /dev/null and b/outputs/video.mp4/000000052.jpg differ
diff --git a/outputs/video.mp4/000000053.jpg b/outputs/video.mp4/000000053.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..00e053fde3255e941022b4f4266952948a3be987
Binary files /dev/null and b/outputs/video.mp4/000000053.jpg differ
diff --git a/outputs/video.mp4/000000054.jpg b/outputs/video.mp4/000000054.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..a549a0a51d8ad2e35bff3b5a5776a0e07fc3db56
Binary files /dev/null and b/outputs/video.mp4/000000054.jpg differ
diff --git a/outputs/video.mp4/000000055.jpg b/outputs/video.mp4/000000055.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..a56b8a3bcbc373c1a939511debb3e02722c77b1f
Binary files /dev/null and b/outputs/video.mp4/000000055.jpg differ
diff --git a/outputs/video.mp4/000000056.jpg b/outputs/video.mp4/000000056.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..6b497025c3b63e9b254fb27c0ecbf82a30e412fa
Binary files /dev/null and b/outputs/video.mp4/000000056.jpg differ
diff --git a/outputs/video.mp4/000000057.jpg b/outputs/video.mp4/000000057.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..c94da8fefc6af06c269af15df3bbb0c1ef041502
Binary files /dev/null and b/outputs/video.mp4/000000057.jpg differ
diff --git a/outputs/video.mp4/000000058.jpg b/outputs/video.mp4/000000058.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..61c018494a223a1e79e3aa3e6da043152c487adc
Binary files /dev/null and b/outputs/video.mp4/000000058.jpg differ
diff --git a/outputs/video.mp4/000000059.jpg b/outputs/video.mp4/000000059.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..bb012741f2e6e3a4dfbae3ac3cb7cd424da27bd1
Binary files /dev/null and b/outputs/video.mp4/000000059.jpg differ
diff --git a/outputs/video.mp4/000000060.jpg b/outputs/video.mp4/000000060.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..ed76570fcbfae12561a66d50fdfb17ca3bee5638
Binary files /dev/null and b/outputs/video.mp4/000000060.jpg differ
diff --git a/outputs/video.mp4/000000061.jpg b/outputs/video.mp4/000000061.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..753eacfc795cd4eec02fbd5dc378f86657203007
Binary files /dev/null and b/outputs/video.mp4/000000061.jpg differ
diff --git a/outputs/video.mp4/000000062.jpg b/outputs/video.mp4/000000062.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..2c57b31862f3add2f227951fc2f84e3dfbca2091
Binary files /dev/null and b/outputs/video.mp4/000000062.jpg differ
diff --git a/outputs/video.mp4/000000063.jpg b/outputs/video.mp4/000000063.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..256497c7673b0a28ec090d746d7b6d44b196c833
Binary files /dev/null and b/outputs/video.mp4/000000063.jpg differ
diff --git a/outputs/video.mp4/000000064.jpg b/outputs/video.mp4/000000064.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..287259d5fb359a2b789df00cf3d039f2affe87dd
Binary files /dev/null and b/outputs/video.mp4/000000064.jpg differ
diff --git a/outputs/video.mp4/000000065.jpg b/outputs/video.mp4/000000065.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..ad5943dcfb4fb6a3d651fdd07b4d8ad539f04dd8
Binary files /dev/null and b/outputs/video.mp4/000000065.jpg differ
diff --git a/outputs/video.mp4/000000066.jpg b/outputs/video.mp4/000000066.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..41fa2f1430412d9464d36d9cf02a56bac0afd48a
Binary files /dev/null and b/outputs/video.mp4/000000066.jpg differ
diff --git a/outputs/video.mp4/000000067.jpg b/outputs/video.mp4/000000067.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..a8078301ad450b7d1c859abd8ab207a307f5792f
Binary files /dev/null and b/outputs/video.mp4/000000067.jpg differ
diff --git a/outputs/video.mp4/000000068.jpg b/outputs/video.mp4/000000068.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..3560785587d5c77901e5523b427d43e7f4a1709d
Binary files /dev/null and b/outputs/video.mp4/000000068.jpg differ
diff --git a/outputs/video.mp4/000000069.jpg b/outputs/video.mp4/000000069.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..28287c4fc4be43cedc6d6f42099d6a1713aefb14
Binary files /dev/null and b/outputs/video.mp4/000000069.jpg differ
diff --git a/outputs/video.mp4/000000070.jpg b/outputs/video.mp4/000000070.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..5dbe1902dba31b81e0701d7448046cfc80bf3b0c
Binary files /dev/null and b/outputs/video.mp4/000000070.jpg differ
diff --git a/outputs/video.mp4/000000071.jpg b/outputs/video.mp4/000000071.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..57a52d8bc6037de60829045bc83b06ae49eb309c
Binary files /dev/null and b/outputs/video.mp4/000000071.jpg differ
diff --git a/outputs/video.mp4/000000072.jpg b/outputs/video.mp4/000000072.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..b67e174bd8b889951c9dbb63cc8baaa5e041b35c
Binary files /dev/null and b/outputs/video.mp4/000000072.jpg differ
diff --git a/outputs/video.mp4/000000073.jpg b/outputs/video.mp4/000000073.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..ddda3caa3bae3c62c18a5f842eb7697053eca2fa
Binary files /dev/null and b/outputs/video.mp4/000000073.jpg differ
diff --git a/outputs/video.mp4/000000074.jpg b/outputs/video.mp4/000000074.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f0069adf8b36fccd8c1a943b32195a1084cacd86
Binary files /dev/null and b/outputs/video.mp4/000000074.jpg differ
diff --git a/outputs/video.mp4/000000075.jpg b/outputs/video.mp4/000000075.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..e9aed6377201d2ee9bd2cc6b7384fbcd1c0f775e
Binary files /dev/null and b/outputs/video.mp4/000000075.jpg differ
diff --git a/outputs/video.mp4/000000076.jpg b/outputs/video.mp4/000000076.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..55b91ea6f0912e3ac154a6ada278814fd38d1d9d
Binary files /dev/null and b/outputs/video.mp4/000000076.jpg differ
diff --git a/outputs/video.mp4/000000077.jpg b/outputs/video.mp4/000000077.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..e3e88758f1d5effe787a62abbcc33feae8d2fbca
Binary files /dev/null and b/outputs/video.mp4/000000077.jpg differ
diff --git a/outputs/video.mp4/000000078.jpg b/outputs/video.mp4/000000078.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..80cda26da2ef7967f691951a43afd7b5f37ae852
Binary files /dev/null and b/outputs/video.mp4/000000078.jpg differ
diff --git a/outputs/video.mp4/000000079.jpg b/outputs/video.mp4/000000079.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..219824b7eca8bbce00957b8e227ce1e818d83c0f
Binary files /dev/null and b/outputs/video.mp4/000000079.jpg differ
diff --git a/outputs/video.mp4/output_video.mp4 b/outputs/video.mp4/output_video.mp4
new file mode 100644
index 0000000000000000000000000000000000000000..96013646f3a76c9161c88f081db6dbec0eff5ff1
Binary files /dev/null and b/outputs/video.mp4/output_video.mp4 differ
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..b3977df208962d721c7956cc71b459218f768762
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,10 @@
+einops==0.6.1
+pandas==2.0.2
+pip-chill==1.0.3
+scikit-image==0.21.0
+timm
+tqdm==4.65.0
+wandb==0.15.4
+numba
+opencv-python
+gradio==3.40.1
\ No newline at end of file
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/__init__.py b/src/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/__pycache__/__init__.cpython-310.pyc b/src/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..37827d96d5a1d2d1bd495a5927813f9658acbb47
Binary files /dev/null and b/src/__pycache__/__init__.cpython-310.pyc differ
diff --git a/src/__pycache__/utils.cpython-310.pyc b/src/__pycache__/utils.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..599166c08a02a41e282830374647edce8e60e192
Binary files /dev/null and b/src/__pycache__/utils.cpython-310.pyc differ
diff --git a/src/data/__pycache__/functional.cpython-310.pyc b/src/data/__pycache__/functional.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ce5b70c6290bee94368cd6d9822e3482d67036f7
Binary files /dev/null and b/src/data/__pycache__/functional.cpython-310.pyc differ
diff --git a/src/data/dataloader.py b/src/data/dataloader.py
new file mode 100644
index 0000000000000000000000000000000000000000..73512b6e53ffa0774d1526d6570150dd0918182f
--- /dev/null
+++ b/src/data/dataloader.py
@@ -0,0 +1,320 @@
+import numpy as np
+import pandas as pd
+from src.utils import (
+ CenterPadCrop_numpy,
+ Distortion_with_flow_cpu,
+ Distortion_with_flow_gpu,
+ Normalize,
+ RGB2Lab,
+ ToTensor,
+ Normalize,
+ RGB2Lab,
+ ToTensor,
+ CenterPad,
+ read_flow,
+ SquaredPadding,
+ SquaredPaddingFlow,
+ ResizeFlow
+)
+import torch
+import torch.utils.data as data
+import torchvision.transforms as transforms
+from numpy import random
+import os
+from PIL import Image
+from scipy.ndimage.filters import gaussian_filter
+from scipy.ndimage import map_coordinates
+
+
+def image_loader(path):
+ with open(path, "rb") as f:
+ with Image.open(f) as img:
+ return img.convert("RGB")
+
+
+class CenterCrop(object):
+ """
+ center crop the numpy array
+ """
+
+ def __init__(self, image_size):
+ self.h0, self.w0 = image_size
+
+ def __call__(self, input_numpy):
+ if input_numpy.ndim == 3:
+ h, w, channel = input_numpy.shape
+ output_numpy = np.zeros((self.h0, self.w0, channel))
+ output_numpy = input_numpy[
+ (h - self.h0) // 2 : (h - self.h0) // 2 + self.h0, (w - self.w0) // 2 : (w - self.w0) // 2 + self.w0, :
+ ]
+ else:
+ h, w = input_numpy.shape
+ output_numpy = np.zeros((self.h0, self.w0))
+ output_numpy = input_numpy[
+ (h - self.h0) // 2 : (h - self.h0) // 2 + self.h0, (w - self.w0) // 2 : (w - self.w0) // 2 + self.w0
+ ]
+ return output_numpy
+
+
+class VideosDataset(torch.utils.data.Dataset):
+ def __init__(
+ self,
+ video_data_root,
+ flow_data_root,
+ mask_data_root,
+ imagenet_folder,
+ annotation_file_path,
+ image_size,
+ num_refs=5, # max = 20
+ image_transform=None,
+ real_reference_probability=1,
+ nonzero_placeholder_probability=0.5,
+ ):
+ self.video_data_root = video_data_root
+ self.flow_data_root = flow_data_root
+ self.mask_data_root = mask_data_root
+ self.imagenet_folder = imagenet_folder
+ self.image_transform = image_transform
+ self.CenterPad = CenterPad(image_size)
+ self.ToTensor = ToTensor()
+ self.CenterCrop = transforms.CenterCrop(image_size)
+ self.SquaredPadding = SquaredPadding(image_size[0])
+ self.SquaredPaddingFlow = SquaredPaddingFlow()
+ self.ResizeFlow = ResizeFlow(image_size)
+ self.num_refs = num_refs
+
+ assert os.path.exists(self.video_data_root), "find no video dataroot"
+ assert os.path.exists(self.flow_data_root), "find no flow dataroot"
+ assert os.path.exists(self.imagenet_folder), "find no imagenet folder"
+ # self.epoch = epoch
+ self.image_pairs = pd.read_csv(annotation_file_path)
+ self.real_len = len(self.image_pairs)
+ # self.image_pairs = pd.concat([self.image_pairs] * self.epoch, ignore_index=True)
+ self.real_reference_probability = real_reference_probability
+ self.nonzero_placeholder_probability = nonzero_placeholder_probability
+ print("##### parsing image pairs in %s: %d pairs #####" % (video_data_root, self.__len__()))
+
+ def __getitem__(self, index):
+ (
+ video_name,
+ prev_frame,
+ current_frame,
+ flow_forward_name,
+ mask_name,
+ reference_1_name,
+ reference_2_name,
+ reference_3_name,
+ reference_4_name,
+ reference_5_name
+ ) = self.image_pairs.iloc[index, :5+self.num_refs].values.tolist()
+
+ video_path = os.path.join(self.video_data_root, video_name)
+ flow_path = os.path.join(self.flow_data_root, video_name)
+ mask_path = os.path.join(self.mask_data_root, video_name)
+
+ prev_frame_path = os.path.join(video_path, prev_frame)
+ current_frame_path = os.path.join(video_path, current_frame)
+
+ reference_1_path = os.path.join(self.imagenet_folder, reference_1_name)
+ reference_2_path = os.path.join(self.imagenet_folder, reference_2_name)
+ reference_3_path = os.path.join(self.imagenet_folder, reference_3_name)
+ reference_4_path = os.path.join(self.imagenet_folder, reference_4_name)
+ reference_5_path = os.path.join(self.imagenet_folder, reference_5_name)
+
+ flow_forward_path = os.path.join(flow_path, flow_forward_name)
+ mask_path = os.path.join(mask_path, mask_name)
+
+ reference_gt_1_path = prev_frame_path
+ reference_gt_2_path = current_frame_path
+ try:
+ I1 = Image.open(prev_frame_path).convert("RGB")
+ I2 = Image.open(current_frame_path).convert("RGB")
+
+ I_reference_video = Image.open(random.choice([reference_gt_1_path, reference_gt_2_path])).convert("RGB")
+ reference_path = random.choice(
+ [reference_1_path, reference_2_path, reference_3_path, reference_4_path, reference_5_path]
+ )
+ I_reference_video_real = Image.open(reference_path).convert("RGB")
+
+ flow_forward = read_flow(flow_forward_path) # numpy
+
+ mask = Image.open(mask_path) # PIL
+ # binary mask
+ mask = np.array(mask)
+ mask[mask < 240] = 0
+ mask[mask >= 240] = 1
+ mask = self.ToTensor(mask)
+
+ # transform
+ I1 = self.image_transform(I1)
+ I2 = self.image_transform(I2)
+ I_reference_video = self.image_transform(self.CenterPad(I_reference_video))
+ I_reference_video_real = self.image_transform(self.CenterPad(I_reference_video_real))
+ flow_forward = self.SquaredPaddingFlow(self.ResizeFlow(torch.tensor(flow_forward)))
+
+ mask = self.SquaredPadding(mask, return_pil=False, return_paddings=False)
+
+ if np.random.random() < self.real_reference_probability:
+ I_reference_output = I_reference_video_real # Use reference from imagenet
+ placeholder = torch.zeros_like(I1)
+ self_ref_flag = torch.zeros_like(I1)
+ else:
+ I_reference_output = I_reference_video # Use reference from ground truth
+ placeholder = I2 if np.random.random() < self.nonzero_placeholder_probability else torch.zeros_like(I1)
+ self_ref_flag = torch.ones_like(I1)
+
+ outputs = [
+ I1,
+ I2,
+ I_reference_output,
+ flow_forward,
+ mask,
+ placeholder,
+ self_ref_flag,
+ video_name + prev_frame,
+ video_name + current_frame,
+ reference_path
+ ]
+
+ except Exception as e:
+ print("error in reading image pair: %s" % str(self.image_pairs[index]))
+ print(e)
+ return self.__getitem__(np.random.randint(0, len(self.image_pairs)))
+ return outputs
+
+ def __len__(self):
+ return len(self.image_pairs)
+
+
+def parse_imgnet_images(pairs_file):
+ pairs = []
+ with open(pairs_file, "r") as f:
+ lines = f.readlines()
+ for line in lines:
+ line = line.strip().split("|")
+ image_a = line[0]
+ image_b = line[1]
+ pairs.append((image_a, image_b))
+ return pairs
+
+
+class VideosDataset_ImageNet(data.Dataset):
+ def __init__(
+ self,
+ imagenet_data_root,
+ pairs_file,
+ image_size,
+ transforms_imagenet=None,
+ distortion_level=3,
+ brightnessjitter=0,
+ nonzero_placeholder_probability=0.5,
+ extra_reference_transform=None,
+ real_reference_probability=1,
+ distortion_device='cpu'
+ ):
+ self.imagenet_data_root = imagenet_data_root
+ self.image_pairs = pd.read_csv(pairs_file, names=['i1', 'i2'])
+ self.transforms_imagenet_raw = transforms_imagenet
+ self.extra_reference_transform = transforms.Compose(extra_reference_transform)
+ self.real_reference_probability = real_reference_probability
+ self.transforms_imagenet = transforms.Compose(transforms_imagenet)
+ self.image_size = image_size
+ self.real_len = len(self.image_pairs)
+ self.distortion_level = distortion_level
+ self.distortion_transform = Distortion_with_flow_cpu() if distortion_device == 'cpu' else Distortion_with_flow_gpu()
+ self.brightnessjitter = brightnessjitter
+ self.flow_transform = transforms.Compose([CenterPadCrop_numpy(self.image_size), ToTensor()])
+ self.nonzero_placeholder_probability = nonzero_placeholder_probability
+ self.ToTensor = ToTensor()
+ self.Normalize = Normalize()
+ print("##### parsing imageNet pairs in %s: %d pairs #####" % (imagenet_data_root, self.__len__()))
+
+ def __getitem__(self, index):
+ pa, pb = self.image_pairs.iloc[index].values.tolist()
+ if np.random.random() > 0.5:
+ pa, pb = pb, pa
+
+ image_a_path = os.path.join(self.imagenet_data_root, pa)
+ image_b_path = os.path.join(self.imagenet_data_root, pb)
+
+ I1 = image_loader(image_a_path)
+ I2 = I1
+ I_reference_video = I1
+ I_reference_video_real = image_loader(image_b_path)
+ # print("i'm here get image 2")
+ # generate the flow
+ alpha = np.random.rand() * self.distortion_level
+ distortion_range = 50
+ random_state = np.random.RandomState(None)
+ shape = self.image_size[0], self.image_size[1]
+ # dx: flow on the vertical direction; dy: flow on the horizontal direction
+ forward_dx = (
+ gaussian_filter((random_state.rand(*shape) * 2 - 1), distortion_range, mode="constant", cval=0) * alpha * 1000
+ )
+ forward_dy = (
+ gaussian_filter((random_state.rand(*shape) * 2 - 1), distortion_range, mode="constant", cval=0) * alpha * 1000
+ )
+ # print("i'm here get image 3")
+ for transform in self.transforms_imagenet_raw:
+ if type(transform) is RGB2Lab:
+ I1_raw = I1
+ I1 = transform(I1)
+ for transform in self.transforms_imagenet_raw:
+ if type(transform) is RGB2Lab:
+ I2 = self.distortion_transform(I2, forward_dx, forward_dy)
+ I2_raw = I2
+ I2 = transform(I2)
+ # print("i'm here get image 4")
+ I2[0:1, :, :] = I2[0:1, :, :] + torch.randn(1) * self.brightnessjitter
+
+ I_reference_video = self.extra_reference_transform(I_reference_video)
+ for transform in self.transforms_imagenet_raw:
+ I_reference_video = transform(I_reference_video)
+
+ I_reference_video_real = self.transforms_imagenet(I_reference_video_real)
+ # print("i'm here get image 5")
+ flow_forward_raw = np.stack((forward_dy, forward_dx), axis=-1)
+ flow_forward = self.flow_transform(flow_forward_raw)
+
+ # update the mask for the pixels on the border
+ grid_x, grid_y = np.meshgrid(np.arange(self.image_size[0]), np.arange(self.image_size[1]), indexing="ij")
+ grid = np.stack((grid_y, grid_x), axis=-1)
+ grid_warp = grid + flow_forward_raw
+ location_y = grid_warp[:, :, 0].flatten()
+ location_x = grid_warp[:, :, 1].flatten()
+ I2_raw = np.array(I2_raw).astype(float)
+ I21_r = map_coordinates(I2_raw[:, :, 0], np.stack((location_x, location_y)), cval=-1).reshape(
+ (self.image_size[0], self.image_size[1])
+ )
+ I21_g = map_coordinates(I2_raw[:, :, 1], np.stack((location_x, location_y)), cval=-1).reshape(
+ (self.image_size[0], self.image_size[1])
+ )
+ I21_b = map_coordinates(I2_raw[:, :, 2], np.stack((location_x, location_y)), cval=-1).reshape(
+ (self.image_size[0], self.image_size[1])
+ )
+ I21_raw = np.stack((I21_r, I21_g, I21_b), axis=2)
+ mask = np.ones((self.image_size[0], self.image_size[1]))
+ mask[(I21_raw[:, :, 0] == -1) & (I21_raw[:, :, 1] == -1) & (I21_raw[:, :, 2] == -1)] = 0
+ mask[abs(I21_raw - I1_raw).sum(axis=-1) > 50] = 0
+ mask = self.ToTensor(mask)
+ # print("i'm here get image 6")
+ if np.random.random() < self.real_reference_probability:
+ I_reference_output = I_reference_video_real
+ placeholder = torch.zeros_like(I1)
+ self_ref_flag = torch.zeros_like(I1)
+ else:
+ I_reference_output = I_reference_video
+ placeholder = I2 if np.random.random() < self.nonzero_placeholder_probability else torch.zeros_like(I1)
+ self_ref_flag = torch.ones_like(I1)
+
+ # except Exception as e:
+ # if combo_path is not None:
+ # print("problem in ", combo_path)
+ # print("problem in, ", image_a_path)
+ # print(e)
+ # return self.__getitem__(np.random.randint(0, len(self.image_pairs)))
+ # print("i'm here get image 7")
+ return [I1, I2, I_reference_output, flow_forward, mask, placeholder, self_ref_flag, "holder", pb, pa]
+
+ def __len__(self):
+ return len(self.image_pairs)
\ No newline at end of file
diff --git a/src/data/functional.py b/src/data/functional.py
new file mode 100644
index 0000000000000000000000000000000000000000..14aa7882d3dfca1ba6649d0b7fdb2c443e3b7f20
--- /dev/null
+++ b/src/data/functional.py
@@ -0,0 +1,84 @@
+from __future__ import division
+
+import torch
+import numbers
+import collections
+import numpy as np
+from PIL import Image, ImageOps
+
+
+def _is_pil_image(img):
+ return isinstance(img, Image.Image)
+
+
+def _is_tensor_image(img):
+ return torch.is_tensor(img) and img.ndimension() == 3
+
+
+def _is_numpy_image(img):
+ return isinstance(img, np.ndarray) and (img.ndim in {2, 3})
+
+
+def to_mytensor(pic):
+ pic_arr = np.array(pic)
+ if pic_arr.ndim == 2:
+ pic_arr = pic_arr[..., np.newaxis]
+ img = torch.from_numpy(pic_arr.transpose((2, 0, 1)))
+ if not isinstance(img, torch.FloatTensor):
+ return img.float() # no normalize .div(255)
+ else:
+ return img
+
+
+def normalize(tensor, mean, std):
+ if not _is_tensor_image(tensor):
+ raise TypeError("tensor is not a torch image.")
+ if tensor.size(0) == 1:
+ tensor.sub_(mean).div_(std)
+ else:
+ for t, m, s in zip(tensor, mean, std):
+ t.sub_(m).div_(s)
+ return tensor
+
+
+def resize(img, size, interpolation=Image.BILINEAR):
+ if not _is_pil_image(img):
+ raise TypeError("img should be PIL Image. Got {}".format(type(img)))
+ if not isinstance(size, int) and (not isinstance(size, collections.Iterable) or len(size) != 2):
+ raise TypeError("Got inappropriate size arg: {}".format(size))
+
+ if not isinstance(size, int):
+ return img.resize(size[::-1], interpolation)
+
+ w, h = img.size
+ if (w <= h and w == size) or (h <= w and h == size):
+ return img
+ if w < h:
+ ow = size
+ oh = int(round(size * h / w))
+ else:
+ oh = size
+ ow = int(round(size * w / h))
+ return img.resize((ow, oh), interpolation)
+
+
+def pad(img, padding, fill=0):
+ if not _is_pil_image(img):
+ raise TypeError("img should be PIL Image. Got {}".format(type(img)))
+
+ if not isinstance(padding, (numbers.Number, tuple)):
+ raise TypeError("Got inappropriate padding arg")
+ if not isinstance(fill, (numbers.Number, str, tuple)):
+ raise TypeError("Got inappropriate fill arg")
+
+ if isinstance(padding, collections.Sequence) and len(padding) not in [2, 4]:
+ raise ValueError("Padding must be an int or a 2, or 4 element tuple, not a " + "{} element tuple".format(len(padding)))
+
+ return ImageOps.expand(img, border=padding, fill=fill)
+
+
+def crop(img, i, j, h, w):
+ if not _is_pil_image(img):
+ raise TypeError("img should be PIL Image. Got {}".format(type(img)))
+
+ return img.crop((j, i, j + w, i + h))
diff --git a/src/data/transforms.py b/src/data/transforms.py
new file mode 100644
index 0000000000000000000000000000000000000000..aafd87a4cfad80c2ea0729257ce8d98fe0b9b423
--- /dev/null
+++ b/src/data/transforms.py
@@ -0,0 +1,348 @@
+from __future__ import division
+
+import collections
+import numbers
+import random
+
+import torch
+from PIL import Image
+from skimage import color
+
+import src.data.functional as F
+
+__all__ = [
+ "Compose",
+ "Concatenate",
+ "ToTensor",
+ "Normalize",
+ "Resize",
+ "Scale",
+ "CenterCrop",
+ "Pad",
+ "RandomCrop",
+ "RandomHorizontalFlip",
+ "RandomVerticalFlip",
+ "RandomResizedCrop",
+ "RandomSizedCrop",
+ "FiveCrop",
+ "TenCrop",
+ "RGB2Lab",
+]
+
+
+def CustomFunc(inputs, func, *args, **kwargs):
+ im_l = func(inputs[0], *args, **kwargs)
+ im_ab = func(inputs[1], *args, **kwargs)
+ warp_ba = func(inputs[2], *args, **kwargs)
+ warp_aba = func(inputs[3], *args, **kwargs)
+ im_gbl_ab = func(inputs[4], *args, **kwargs)
+ bgr_mc_im = func(inputs[5], *args, **kwargs)
+
+ layer_data = [im_l, im_ab, warp_ba, warp_aba, im_gbl_ab, bgr_mc_im]
+
+ for l in range(5):
+ layer = inputs[6 + l]
+ err_ba = func(layer[0], *args, **kwargs)
+ err_ab = func(layer[1], *args, **kwargs)
+
+ layer_data.append([err_ba, err_ab])
+
+ return layer_data
+
+
+class Compose(object):
+ """Composes several transforms together.
+
+ Args:
+ transforms (list of ``Transform`` objects): list of transforms to compose.
+
+ Example:
+ >>> transforms.Compose([
+ >>> transforms.CenterCrop(10),
+ >>> transforms.ToTensor(),
+ >>> ])
+ """
+
+ def __init__(self, transforms):
+ self.transforms = transforms
+
+ def __call__(self, inputs):
+ for t in self.transforms:
+ inputs = t(inputs)
+ return inputs
+
+
+class Concatenate(object):
+ """
+ Input: [im_l, im_ab, inputs]
+ inputs = [warp_ba_l, warp_ba_ab, warp_aba, err_pm, err_aba]
+
+ Output:[im_l, err_pm, warp_ba, warp_aba, im_ab, err_aba]
+ """
+
+ def __call__(self, inputs):
+ im_l = inputs[0]
+ im_ab = inputs[1]
+ warp_ba = inputs[2]
+ warp_aba = inputs[3]
+ im_glb_ab = inputs[4]
+ bgr_mc_im = inputs[5]
+ bgr_mc_im = bgr_mc_im[[2, 1, 0], ...]
+
+ err_ba = []
+ err_ab = []
+
+ for l in range(5):
+ layer = inputs[6 + l]
+ err_ba.append(layer[0])
+ err_ab.append(layer[1])
+
+ cerr_ba = torch.cat(err_ba, 0)
+ cerr_ab = torch.cat(err_ab, 0)
+
+ return (im_l, cerr_ba, warp_ba, warp_aba, im_glb_ab, bgr_mc_im, im_ab, cerr_ab)
+
+
+class ToTensor(object):
+ """Convert a ``PIL Image`` or ``numpy.ndarray`` to tensor.
+
+ Converts a PIL Image or numpy.ndarray (H x W x C) in the range
+ [0, 255] to a torch.FloatTensor of shape (C x H x W) in the range [0.0, 1.0].
+ """
+
+ def __call__(self, inputs):
+ """
+ Args:
+ pic (PIL Image or numpy.ndarray): Image to be converted to tensor.
+
+ Returns:
+ Tensor: Converted image.
+ """
+ return CustomFunc(inputs, F.to_mytensor)
+
+
+class Normalize(object):
+ """Normalize an tensor image with mean and standard deviation.
+ Given mean: ``(M1,...,Mn)`` and std: ``(S1,..,Sn)`` for ``n`` channels, this transform
+ will normalize each channel of the input ``torch.*Tensor`` i.e.
+ ``input[channel] = (input[channel] - mean[channel]) / std[channel]``
+
+ Args:
+ mean (sequence): Sequence of means for each channel.
+ std (sequence): Sequence of standard deviations for each channel.
+ """
+
+ def __call__(self, inputs):
+ """
+ Args:
+ tensor (Tensor): Tensor image of size (C, H, W) to be normalized.
+
+ Returns:
+ Tensor: Normalized Tensor image.
+ """
+
+ im_l = F.normalize(inputs[0], 50, 1) # [0, 100]
+ im_ab = F.normalize(inputs[1], (0, 0), (1, 1)) # [-100, 100]
+
+ inputs[2][0:1, :, :] = F.normalize(inputs[2][0:1, :, :], 50, 1)
+ inputs[2][1:3, :, :] = F.normalize(inputs[2][1:3, :, :], (0, 0), (1, 1))
+ warp_ba = inputs[2]
+
+ inputs[3][0:1, :, :] = F.normalize(inputs[3][0:1, :, :], 50, 1)
+ inputs[3][1:3, :, :] = F.normalize(inputs[3][1:3, :, :], (0, 0), (1, 1))
+ warp_aba = inputs[3]
+
+ im_gbl_ab = F.normalize(inputs[4], (0, 0), (1, 1)) # [-100, 100]
+
+ bgr_mc_im = F.normalize(inputs[5], (123.68, 116.78, 103.938), (1, 1, 1))
+
+ layer_data = [im_l, im_ab, warp_ba, warp_aba, im_gbl_ab, bgr_mc_im]
+
+ for l in range(5):
+ layer = inputs[6 + l]
+ err_ba = F.normalize(layer[0], 127, 2) # [0, 255]
+ err_ab = F.normalize(layer[1], 127, 2) # [0, 255]
+ layer_data.append([err_ba, err_ab])
+
+ return layer_data
+
+
+class Resize(object):
+ """Resize the input PIL Image to the given size.
+
+ Args:
+ size (sequence or int): Desired output size. If size is a sequence like
+ (h, w), output size will be matched to this. If size is an int,
+ smaller edge of the image will be matched to this number.
+ i.e, if height > width, then image will be rescaled to
+ (size * height / width, size)
+ interpolation (int, optional): Desired interpolation. Default is
+ ``PIL.Image.BILINEAR``
+ """
+
+ def __init__(self, size, interpolation=Image.BILINEAR):
+ assert isinstance(size, int) or (isinstance(size, collections.Iterable) and len(size) == 2)
+ self.size = size
+ self.interpolation = interpolation
+
+ def __call__(self, inputs):
+ """
+ Args:
+ img (PIL Image): Image to be scaled.
+
+ Returns:
+ PIL Image: Rescaled image.
+ """
+ return CustomFunc(inputs, F.resize, self.size, self.interpolation)
+
+
+class RandomCrop(object):
+ """Crop the given PIL Image at a random location.
+
+ Args:
+ size (sequence or int): Desired output size of the crop. If size is an
+ int instead of sequence like (h, w), a square crop (size, size) is
+ made.
+ padding (int or sequence, optional): Optional padding on each border
+ of the image. Default is 0, i.e no padding. If a sequence of length
+ 4 is provided, it is used to pad left, top, right, bottom borders
+ respectively.
+ """
+
+ def __init__(self, size, padding=0):
+ if isinstance(size, numbers.Number):
+ self.size = (int(size), int(size))
+ else:
+ self.size = size
+ self.padding = padding
+
+ @staticmethod
+ def get_params(img, output_size):
+ """Get parameters for ``crop`` for a random crop.
+
+ Args:
+ img (PIL Image): Image to be cropped.
+ output_size (tuple): Expected output size of the crop.
+
+ Returns:
+ tuple: params (i, j, h, w) to be passed to ``crop`` for random crop.
+ """
+ w, h = img.size
+ th, tw = output_size
+ if w == tw and h == th:
+ return 0, 0, h, w
+
+ i = random.randint(0, h - th)
+ j = random.randint(0, w - tw)
+ return i, j, th, tw
+
+ def __call__(self, inputs):
+ """
+ Args:
+ img (PIL Image): Image to be cropped.
+
+ Returns:
+ PIL Image: Cropped image.
+ """
+ if self.padding > 0:
+ inputs = CustomFunc(inputs, F.pad, self.padding)
+
+ i, j, h, w = self.get_params(inputs[0], self.size)
+ return CustomFunc(inputs, F.crop, i, j, h, w)
+
+
+class CenterCrop(object):
+ """Crop the given PIL Image at a random location.
+
+ Args:
+ size (sequence or int): Desired output size of the crop. If size is an
+ int instead of sequence like (h, w), a square crop (size, size) is
+ made.
+ padding (int or sequence, optional): Optional padding on each border
+ of the image. Default is 0, i.e no padding. If a sequence of length
+ 4 is provided, it is used to pad left, top, right, bottom borders
+ respectively.
+ """
+
+ def __init__(self, size, padding=0):
+ if isinstance(size, numbers.Number):
+ self.size = (int(size), int(size))
+ else:
+ self.size = size
+ self.padding = padding
+
+ @staticmethod
+ def get_params(img, output_size):
+ """Get parameters for ``crop`` for a random crop.
+
+ Args:
+ img (PIL Image): Image to be cropped.
+ output_size (tuple): Expected output size of the crop.
+
+ Returns:
+ tuple: params (i, j, h, w) to be passed to ``crop`` for random crop.
+ """
+ w, h = img.size
+ th, tw = output_size
+ if w == tw and h == th:
+ return 0, 0, h, w
+
+ i = (h - th) // 2
+ j = (w - tw) // 2
+ return i, j, th, tw
+
+ def __call__(self, inputs):
+ """
+ Args:
+ img (PIL Image): Image to be cropped.
+
+ Returns:
+ PIL Image: Cropped image.
+ """
+ if self.padding > 0:
+ inputs = CustomFunc(inputs, F.pad, self.padding)
+
+ i, j, h, w = self.get_params(inputs[0], self.size)
+ return CustomFunc(inputs, F.crop, i, j, h, w)
+
+
+class RandomHorizontalFlip(object):
+ """Horizontally flip the given PIL Image randomly with a probability of 0.5."""
+
+ def __call__(self, inputs):
+ """
+ Args:
+ img (PIL Image): Image to be flipped.
+
+ Returns:
+ PIL Image: Randomly flipped image.
+ """
+
+ if random.random() < 0.5:
+ return CustomFunc(inputs, F.hflip)
+ return inputs
+
+
+class RGB2Lab(object):
+ def __call__(self, inputs):
+ """
+ Args:
+ img (PIL Image): Image to be flipped.
+
+ Returns:
+ PIL Image: Randomly flipped image.
+ """
+
+ def __call__(self, inputs):
+ image_lab = color.rgb2lab(inputs[0])
+ warp_ba_lab = color.rgb2lab(inputs[2])
+ warp_aba_lab = color.rgb2lab(inputs[3])
+ im_gbl_lab = color.rgb2lab(inputs[4])
+
+ inputs[0] = image_lab[:, :, :1] # l channel
+ inputs[1] = image_lab[:, :, 1:] # ab channel
+ inputs[2] = warp_ba_lab # lab channel
+ inputs[3] = warp_aba_lab # lab channel
+ inputs[4] = im_gbl_lab[:, :, 1:] # ab channel
+
+ return inputs
diff --git a/src/losses.py b/src/losses.py
new file mode 100644
index 0000000000000000000000000000000000000000..dd78f9226bdee39354fa8fb31a05e4aefeb9e55d
--- /dev/null
+++ b/src/losses.py
@@ -0,0 +1,277 @@
+import torch
+import torch.nn as nn
+from src.utils import feature_normalize
+
+
+### START### CONTEXTUAL LOSS ####
+class ContextualLoss(nn.Module):
+ """
+ input is Al, Bl, channel = 1, range ~ [0, 255]
+ """
+
+ def __init__(self):
+ super(ContextualLoss, self).__init__()
+ return None
+
+ def forward(self, X_features, Y_features, h=0.1, feature_centering=True):
+ """
+ X_features&Y_features are are feature vectors or feature 2d array
+ h: bandwidth
+ return the per-sample loss
+ """
+ batch_size = X_features.shape[0]
+ feature_depth = X_features.shape[1]
+
+ # to normalized feature vectors
+ if feature_centering:
+ X_features = X_features - Y_features.view(batch_size, feature_depth, -1).mean(dim=-1).unsqueeze(dim=-1).unsqueeze(
+ dim=-1
+ )
+ Y_features = Y_features - Y_features.view(batch_size, feature_depth, -1).mean(dim=-1).unsqueeze(dim=-1).unsqueeze(
+ dim=-1
+ )
+ X_features = feature_normalize(X_features).view(
+ batch_size, feature_depth, -1
+ ) # batch_size * feature_depth * feature_size^2
+ Y_features = feature_normalize(Y_features).view(
+ batch_size, feature_depth, -1
+ ) # batch_size * feature_depth * feature_size^2
+
+ # conine distance = 1 - similarity
+ X_features_permute = X_features.permute(0, 2, 1) # batch_size * feature_size^2 * feature_depth
+ d = 1 - torch.matmul(X_features_permute, Y_features) # batch_size * feature_size^2 * feature_size^2
+
+ # normalized distance: dij_bar
+ d_norm = d / (torch.min(d, dim=-1, keepdim=True)[0] + 1e-5) # batch_size * feature_size^2 * feature_size^2
+
+ # pairwise affinity
+ w = torch.exp((1 - d_norm) / h)
+ A_ij = w / torch.sum(w, dim=-1, keepdim=True)
+
+ # contextual loss per sample
+ CX = torch.mean(torch.max(A_ij, dim=1)[0], dim=-1)
+ return -torch.log(CX)
+
+
+class ContextualLoss_forward(nn.Module):
+ """
+ input is Al, Bl, channel = 1, range ~ [0, 255]
+ """
+
+ def __init__(self):
+ super(ContextualLoss_forward, self).__init__()
+ return None
+
+ def forward(self, X_features, Y_features, h=0.1, feature_centering=True):
+ """
+ X_features&Y_features are are feature vectors or feature 2d array
+ h: bandwidth
+ return the per-sample loss
+ """
+ batch_size = X_features.shape[0]
+ feature_depth = X_features.shape[1]
+
+ # to normalized feature vectors
+ if feature_centering:
+ X_features = X_features - Y_features.view(batch_size, feature_depth, -1).mean(dim=-1).unsqueeze(dim=-1).unsqueeze(
+ dim=-1
+ )
+ Y_features = Y_features - Y_features.view(batch_size, feature_depth, -1).mean(dim=-1).unsqueeze(dim=-1).unsqueeze(
+ dim=-1
+ )
+ X_features = feature_normalize(X_features).view(
+ batch_size, feature_depth, -1
+ ) # batch_size * feature_depth * feature_size^2
+ Y_features = feature_normalize(Y_features).view(
+ batch_size, feature_depth, -1
+ ) # batch_size * feature_depth * feature_size^2
+
+ # conine distance = 1 - similarity
+ X_features_permute = X_features.permute(0, 2, 1) # batch_size * feature_size^2 * feature_depth
+ d = 1 - torch.matmul(X_features_permute, Y_features) # batch_size * feature_size^2 * feature_size^2
+
+ # normalized distance: dij_bar
+ d_norm = d / (torch.min(d, dim=-1, keepdim=True)[0] + 1e-5) # batch_size * feature_size^2 * feature_size^2
+
+ # pairwise affinity
+ w = torch.exp((1 - d_norm) / h)
+ A_ij = w / torch.sum(w, dim=-1, keepdim=True)
+
+ # contextual loss per sample
+ CX = torch.mean(torch.max(A_ij, dim=-1)[0], dim=1)
+ return -torch.log(CX)
+
+
+### END### CONTEXTUAL LOSS ####
+
+
+##########################
+
+
+def mse_loss_fn(input, target=0):
+ return torch.mean((input - target) ** 2)
+
+
+### START### PERCEPTUAL LOSS ###
+def Perceptual_loss(domain_invariant, weight_perceptual):
+ instancenorm = nn.InstanceNorm2d(512, affine=False)
+
+ def __call__(A_relu5_1, predict_relu5_1):
+ if domain_invariant:
+ feat_loss = (
+ mse_loss_fn(instancenorm(predict_relu5_1), instancenorm(A_relu5_1.detach())) * weight_perceptual * 1e5 * 0.2
+ )
+ else:
+ feat_loss = mse_loss_fn(predict_relu5_1, A_relu5_1.detach()) * weight_perceptual
+ return feat_loss
+
+ return __call__
+
+
+### END### PERCEPTUAL LOSS ###
+
+
+def l1_loss_fn(input, target=0):
+ return torch.mean(torch.abs(input - target))
+
+
+### END#################
+
+
+### START### ADVERSIAL LOSS ###
+def generator_loss_fn(real_data_lab, fake_data_lab, discriminator, weight_gan, device):
+ if weight_gan > 0:
+ y_pred_fake, _ = discriminator(fake_data_lab)
+ y_pred_real, _ = discriminator(real_data_lab)
+
+ y = torch.ones_like(y_pred_real)
+ generator_loss = (
+ (
+ torch.mean((y_pred_real - torch.mean(y_pred_fake) + y) ** 2)
+ + torch.mean((y_pred_fake - torch.mean(y_pred_real) - y) ** 2)
+ )
+ / 2
+ * weight_gan
+ )
+ return generator_loss
+
+ return torch.Tensor([0]).to(device)
+
+
+def discriminator_loss_fn(real_data_lab, fake_data_lab, discriminator):
+ y_pred_fake, _ = discriminator(fake_data_lab.detach())
+ y_pred_real, _ = discriminator(real_data_lab.detach())
+
+ y = torch.ones_like(y_pred_real)
+ discriminator_loss = (
+ torch.mean((y_pred_real - torch.mean(y_pred_fake) - y) ** 2)
+ + torch.mean((y_pred_fake - torch.mean(y_pred_real) + y) ** 2)
+ ) / 2
+ return discriminator_loss
+
+
+### END### ADVERSIAL LOSS #####
+
+
+def consistent_loss_fn(
+ I_current_lab_predict,
+ I_last_ab_predict,
+ I_current_nonlocal_lab_predict,
+ I_last_nonlocal_lab_predict,
+ flow_forward,
+ mask,
+ warping_layer,
+ weight_consistent=0.02,
+ weight_nonlocal_consistent=0.0,
+ device="cuda",
+):
+ def weighted_mse_loss(input, target, weights):
+ out = (input - target) ** 2
+ out = out * weights.expand_as(out)
+ return out.mean()
+
+ def consistent():
+ I_current_lab_predict_warp = warping_layer(I_current_lab_predict, flow_forward)
+ I_current_ab_predict_warp = I_current_lab_predict_warp[:, 1:3, :, :]
+ consistent_loss = weighted_mse_loss(I_current_ab_predict_warp, I_last_ab_predict, mask) * weight_consistent
+ return consistent_loss
+
+ def nonlocal_consistent():
+ I_current_nonlocal_lab_predict_warp = warping_layer(I_current_nonlocal_lab_predict, flow_forward)
+ nonlocal_consistent_loss = (
+ weighted_mse_loss(
+ I_current_nonlocal_lab_predict_warp[:, 1:3, :, :],
+ I_last_nonlocal_lab_predict[:, 1:3, :, :],
+ mask,
+ )
+ * weight_nonlocal_consistent
+ )
+
+ return nonlocal_consistent_loss
+
+ consistent_loss = consistent() if weight_consistent else torch.Tensor([0]).to(device)
+ nonlocal_consistent_loss = nonlocal_consistent() if weight_nonlocal_consistent else torch.Tensor([0]).to(device)
+
+ return consistent_loss + nonlocal_consistent_loss
+
+
+### END### CONSISTENCY LOSS #####
+
+
+### START### SMOOTHNESS LOSS ###
+def smoothness_loss_fn(
+ I_current_l,
+ I_current_lab,
+ I_current_ab_predict,
+ A_relu2_1,
+ weighted_layer_color,
+ nonlocal_weighted_layer,
+ weight_smoothness=5.0,
+ weight_nonlocal_smoothness=0.0,
+ device="cuda",
+):
+ def smoothness(scale_factor=1.0):
+ I_current_lab_predict = torch.cat((I_current_l, I_current_ab_predict), dim=1)
+ IA_ab_weighed = weighted_layer_color(
+ I_current_lab,
+ I_current_lab_predict,
+ patch_size=3,
+ alpha=10,
+ scale_factor=scale_factor,
+ )
+ smoothness_loss = (
+ mse_loss_fn(
+ nn.functional.interpolate(I_current_ab_predict, scale_factor=scale_factor),
+ IA_ab_weighed,
+ )
+ * weight_smoothness
+ )
+
+ return smoothness_loss
+
+ def nonlocal_smoothness(scale_factor=0.25, alpha_nonlocal_smoothness=0.5):
+ nonlocal_smooth_feature = feature_normalize(A_relu2_1)
+ I_current_lab_predict = torch.cat((I_current_l, I_current_ab_predict), dim=1)
+ I_current_ab_weighted_nonlocal = nonlocal_weighted_layer(
+ I_current_lab_predict,
+ nonlocal_smooth_feature.detach(),
+ patch_size=3,
+ alpha=alpha_nonlocal_smoothness,
+ scale_factor=scale_factor,
+ )
+ nonlocal_smoothness_loss = (
+ mse_loss_fn(
+ nn.functional.interpolate(I_current_ab_predict, scale_factor=scale_factor),
+ I_current_ab_weighted_nonlocal,
+ )
+ * weight_nonlocal_smoothness
+ )
+ return nonlocal_smoothness_loss
+
+ smoothness_loss = smoothness() if weight_smoothness else torch.Tensor([0]).to(device)
+ nonlocal_smoothness_loss = nonlocal_smoothness() if weight_nonlocal_smoothness else torch.Tensor([0]).to(device)
+
+ return smoothness_loss + nonlocal_smoothness_loss
+
+
+### END### SMOOTHNESS LOSS #####
diff --git a/src/metrics.py b/src/metrics.py
new file mode 100644
index 0000000000000000000000000000000000000000..3b16301da4ed3f7accc0946c6fafd61b82200cb5
--- /dev/null
+++ b/src/metrics.py
@@ -0,0 +1,95 @@
+# import os
+# import cv2
+# import glob
+# import numpy as np
+# from PIL import Image
+# from scipy.linalg import sqrtm
+
+# import torch
+# from torch import nn
+# import torchvision.transforms as transforms
+
+
+# def PSNR(gt_imgs, pred_imgs):
+# """
+# Calculate PSNR for a batch of images
+# Args:
+# gt_imgs (list): list of ground truth images
+# pred_imgs (list): list of predicted images
+# Returns:
+# float: average PSNR score
+# """
+# total_psnr = 0
+# for idx, (gt, pred) in enumerate(zip(gt_imgs, pred_imgs)):
+# assert gt.shape == pred.shape, f"Shape mismatch at {idx}: GT and prediction"
+# total_psnr += cv2.PSNR(gt, pred)
+# return total_psnr / len(pred_imgs)
+
+
+# class FrechetDistance:
+# def __init__(self, model_name="inception_v3", device="cpu"):
+# self.device = torch.device(device)
+# self.model = torch.hub.load("pytorch/vision:v0.10.0", model_name, pretrained=True) # .to(self.device)
+# self.model.fc = nn.Identity()
+# print(self.model)
+# self.model.eval()
+
+# self.transform = transforms.Compose(
+# [
+# transforms.ToTensor(),
+# transforms.Resize(299),
+# transforms.CenterCrop(299),
+# transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
+# ]
+# )
+
+# # Return parts to calculate in FID and FVD
+# def _calculate_act(self, images1, images2):
+# images1 = [self.transform(img) for img in images1]
+# images2 = [self.transform(img) for img in images2]
+
+# images1 = torch.stack(images1).to(self.device)
+# images2 = torch.stack(images2).to(self.device)
+
+# # Get activations
+# act1 = self.model(images1).detach().numpy()
+# act2 = self.model(images2).detach().numpy()
+
+# return act1, act2
+
+# def calculate_fid(self, images1, images2):
+# act1, act2 = self._calculate_act(images1, images2)
+
+# # calculate mean and covariance statistics
+# mu1, sigma1 = act1.mean(axis=0), np.cov(act1, rowvar=False)
+# mu2, sigma2 = act2.mean(axis=0), np.cov(act2, rowvar=False)
+
+# fid = (np.sum((mu1 - mu2) ** 2.0)) + np.trace(sigma1 + sigma2 - 2.0 * sqrtm(sigma1.dot(sigma2)))
+# return fid
+
+# def calculate_fvd(self, frames_list_folder1, frames_list_folder2, batch_size=2):
+# frames_list1 = glob.glob(os.path.join(frames_list_folder1, "*.png"))
+# frames_list2 = glob.glob(os.path.join(frames_list_folder2, "*.png"))
+
+# assert len(frames_list1) == len(frames_list2), "Number of frames in 2 folders must be equal"
+
+# all_act1, all_act2 = [], []
+# for i in range(0, len(frames_list1), batch_size):
+# batch1 = frames_list1[i : min(i + batch_size, len(frames_list1))]
+# batch2 = frames_list2[i : min(i + batch_size, len(frames_list1))]
+
+# img1 = [Image.open(img) for img in batch1]
+# img2 = [Image.open(img) for img in batch2]
+
+# act1, act2 = self._calculate_act(img1, img2)
+
+# all_act1.append(act1)
+# all_act2.append(act2)
+
+# all_act1 = np.concatenate(all_act1, axis=0)
+# all_act2 = np.concatenate(all_act2, axis=0)
+# print(all_act1.shape)
+# print(all_act1.shape)
+# fid = self.calculate_fid(all_act1, all_act2)
+
+# return np.sqrt(fid)
diff --git a/src/models/CNN/ColorVidNet.py b/src/models/CNN/ColorVidNet.py
new file mode 100644
index 0000000000000000000000000000000000000000..6d14f395ae2bb89d0b7432c859179e3891e313b3
--- /dev/null
+++ b/src/models/CNN/ColorVidNet.py
@@ -0,0 +1,281 @@
+import torch
+import torch.nn as nn
+import torch.nn.parallel
+
+
+class ColorVidNet(nn.Module):
+ def __init__(self, ic):
+ super(ColorVidNet, self).__init__()
+ self.conv1_1 = nn.Sequential(nn.Conv2d(ic, 32, 3, 1, 1), nn.ReLU(), nn.Conv2d(32, 64, 3, 1, 1))
+ self.conv1_2 = nn.Conv2d(64, 64, 3, 1, 1)
+ self.conv1_2norm = nn.BatchNorm2d(64, affine=False)
+ self.conv1_2norm_ss = nn.Conv2d(64, 64, 1, 2, bias=False, groups=64)
+ self.conv2_1 = nn.Conv2d(64, 128, 3, 1, 1)
+ self.conv2_2 = nn.Conv2d(128, 128, 3, 1, 1)
+ self.conv2_2norm = nn.BatchNorm2d(128, affine=False)
+ self.conv2_2norm_ss = nn.Conv2d(128, 128, 1, 2, bias=False, groups=128)
+ self.conv3_1 = nn.Conv2d(128, 256, 3, 1, 1)
+ self.conv3_2 = nn.Conv2d(256, 256, 3, 1, 1)
+ self.conv3_3 = nn.Conv2d(256, 256, 3, 1, 1)
+ self.conv3_3norm = nn.BatchNorm2d(256, affine=False)
+ self.conv3_3norm_ss = nn.Conv2d(256, 256, 1, 2, bias=False, groups=256)
+ self.conv4_1 = nn.Conv2d(256, 512, 3, 1, 1)
+ self.conv4_2 = nn.Conv2d(512, 512, 3, 1, 1)
+ self.conv4_3 = nn.Conv2d(512, 512, 3, 1, 1)
+ self.conv4_3norm = nn.BatchNorm2d(512, affine=False)
+ self.conv5_1 = nn.Conv2d(512, 512, 3, 1, 2, 2)
+ self.conv5_2 = nn.Conv2d(512, 512, 3, 1, 2, 2)
+ self.conv5_3 = nn.Conv2d(512, 512, 3, 1, 2, 2)
+ self.conv5_3norm = nn.BatchNorm2d(512, affine=False)
+ self.conv6_1 = nn.Conv2d(512, 512, 3, 1, 2, 2)
+ self.conv6_2 = nn.Conv2d(512, 512, 3, 1, 2, 2)
+ self.conv6_3 = nn.Conv2d(512, 512, 3, 1, 2, 2)
+ self.conv6_3norm = nn.BatchNorm2d(512, affine=False)
+ self.conv7_1 = nn.Conv2d(512, 512, 3, 1, 1)
+ self.conv7_2 = nn.Conv2d(512, 512, 3, 1, 1)
+ self.conv7_3 = nn.Conv2d(512, 512, 3, 1, 1)
+ self.conv7_3norm = nn.BatchNorm2d(512, affine=False)
+ self.conv8_1 = nn.ConvTranspose2d(512, 256, 4, 2, 1)
+ self.conv3_3_short = nn.Conv2d(256, 256, 3, 1, 1)
+ self.conv8_2 = nn.Conv2d(256, 256, 3, 1, 1)
+ self.conv8_3 = nn.Conv2d(256, 256, 3, 1, 1)
+ self.conv8_3norm = nn.BatchNorm2d(256, affine=False)
+ self.conv9_1 = nn.ConvTranspose2d(256, 128, 4, 2, 1)
+ self.conv2_2_short = nn.Conv2d(128, 128, 3, 1, 1)
+ self.conv9_2 = nn.Conv2d(128, 128, 3, 1, 1)
+ self.conv9_2norm = nn.BatchNorm2d(128, affine=False)
+ self.conv10_1 = nn.ConvTranspose2d(128, 128, 4, 2, 1)
+ self.conv1_2_short = nn.Conv2d(64, 128, 3, 1, 1)
+ self.conv10_2 = nn.Conv2d(128, 128, 3, 1, 1)
+ self.conv10_ab = nn.Conv2d(128, 2, 1, 1)
+
+ # add self.relux_x
+ self.relu1_1 = nn.ReLU()
+ self.relu1_2 = nn.ReLU()
+ self.relu2_1 = nn.ReLU()
+ self.relu2_2 = nn.ReLU()
+ self.relu3_1 = nn.ReLU()
+ self.relu3_2 = nn.ReLU()
+ self.relu3_3 = nn.ReLU()
+ self.relu4_1 = nn.ReLU()
+ self.relu4_2 = nn.ReLU()
+ self.relu4_3 = nn.ReLU()
+ self.relu5_1 = nn.ReLU()
+ self.relu5_2 = nn.ReLU()
+ self.relu5_3 = nn.ReLU()
+ self.relu6_1 = nn.ReLU()
+ self.relu6_2 = nn.ReLU()
+ self.relu6_3 = nn.ReLU()
+ self.relu7_1 = nn.ReLU()
+ self.relu7_2 = nn.ReLU()
+ self.relu7_3 = nn.ReLU()
+ self.relu8_1_comb = nn.ReLU()
+ self.relu8_2 = nn.ReLU()
+ self.relu8_3 = nn.ReLU()
+ self.relu9_1_comb = nn.ReLU()
+ self.relu9_2 = nn.ReLU()
+ self.relu10_1_comb = nn.ReLU()
+ self.relu10_2 = nn.LeakyReLU(0.2, True)
+
+ self.conv8_1 = nn.Sequential(nn.Upsample(scale_factor=2, mode="nearest"), nn.Conv2d(512, 256, 3, 1, 1))
+ self.conv9_1 = nn.Sequential(nn.Upsample(scale_factor=2, mode="nearest"), nn.Conv2d(256, 128, 3, 1, 1))
+ self.conv10_1 = nn.Sequential(nn.Upsample(scale_factor=2, mode="nearest"), nn.Conv2d(128, 128, 3, 1, 1))
+
+ self.conv1_2norm = nn.InstanceNorm2d(64)
+ self.conv2_2norm = nn.InstanceNorm2d(128)
+ self.conv3_3norm = nn.InstanceNorm2d(256)
+ self.conv4_3norm = nn.InstanceNorm2d(512)
+ self.conv5_3norm = nn.InstanceNorm2d(512)
+ self.conv6_3norm = nn.InstanceNorm2d(512)
+ self.conv7_3norm = nn.InstanceNorm2d(512)
+ self.conv8_3norm = nn.InstanceNorm2d(256)
+ self.conv9_2norm = nn.InstanceNorm2d(128)
+
+ def forward(self, x):
+ """x: gray image (1 channel), ab(2 channel), ab_err, ba_err"""
+ conv1_1 = self.relu1_1(self.conv1_1(x))
+ conv1_2 = self.relu1_2(self.conv1_2(conv1_1))
+ conv1_2norm = self.conv1_2norm(conv1_2)
+ conv1_2norm_ss = self.conv1_2norm_ss(conv1_2norm)
+ conv2_1 = self.relu2_1(self.conv2_1(conv1_2norm_ss))
+ conv2_2 = self.relu2_2(self.conv2_2(conv2_1))
+ conv2_2norm = self.conv2_2norm(conv2_2)
+ conv2_2norm_ss = self.conv2_2norm_ss(conv2_2norm)
+ conv3_1 = self.relu3_1(self.conv3_1(conv2_2norm_ss))
+ conv3_2 = self.relu3_2(self.conv3_2(conv3_1))
+ conv3_3 = self.relu3_3(self.conv3_3(conv3_2))
+ conv3_3norm = self.conv3_3norm(conv3_3)
+ conv3_3norm_ss = self.conv3_3norm_ss(conv3_3norm)
+ conv4_1 = self.relu4_1(self.conv4_1(conv3_3norm_ss))
+ conv4_2 = self.relu4_2(self.conv4_2(conv4_1))
+ conv4_3 = self.relu4_3(self.conv4_3(conv4_2))
+ conv4_3norm = self.conv4_3norm(conv4_3)
+ conv5_1 = self.relu5_1(self.conv5_1(conv4_3norm))
+ conv5_2 = self.relu5_2(self.conv5_2(conv5_1))
+ conv5_3 = self.relu5_3(self.conv5_3(conv5_2))
+ conv5_3norm = self.conv5_3norm(conv5_3)
+ conv6_1 = self.relu6_1(self.conv6_1(conv5_3norm))
+ conv6_2 = self.relu6_2(self.conv6_2(conv6_1))
+ conv6_3 = self.relu6_3(self.conv6_3(conv6_2))
+ conv6_3norm = self.conv6_3norm(conv6_3)
+ conv7_1 = self.relu7_1(self.conv7_1(conv6_3norm))
+ conv7_2 = self.relu7_2(self.conv7_2(conv7_1))
+ conv7_3 = self.relu7_3(self.conv7_3(conv7_2))
+ conv7_3norm = self.conv7_3norm(conv7_3)
+ conv8_1 = self.conv8_1(conv7_3norm)
+ conv3_3_short = self.conv3_3_short(conv3_3norm)
+ conv8_1_comb = self.relu8_1_comb(conv8_1 + conv3_3_short)
+ conv8_2 = self.relu8_2(self.conv8_2(conv8_1_comb))
+ conv8_3 = self.relu8_3(self.conv8_3(conv8_2))
+ conv8_3norm = self.conv8_3norm(conv8_3)
+ conv9_1 = self.conv9_1(conv8_3norm)
+ conv2_2_short = self.conv2_2_short(conv2_2norm)
+ conv9_1_comb = self.relu9_1_comb(conv9_1 + conv2_2_short)
+ conv9_2 = self.relu9_2(self.conv9_2(conv9_1_comb))
+ conv9_2norm = self.conv9_2norm(conv9_2)
+ conv10_1 = self.conv10_1(conv9_2norm)
+ conv1_2_short = self.conv1_2_short(conv1_2norm)
+ conv10_1_comb = self.relu10_1_comb(conv10_1 + conv1_2_short)
+ conv10_2 = self.relu10_2(self.conv10_2(conv10_1_comb))
+ conv10_ab = self.conv10_ab(conv10_2)
+
+ return torch.tanh(conv10_ab) * 128
+
+
+class GeneralColorVidNet(nn.Module):
+ def __init__(self, ic):
+ super(GeneralColorVidNet, self).__init__()
+ self.conv1_1 = nn.Sequential(nn.Conv2d(ic, 32, 3, 1, 1), nn.ReLU(), nn.Conv2d(32, 64, 3, 1, 1))
+ self.conv1_2 = nn.Conv2d(64, 64, 3, 1, 1)
+ self.conv1_2norm = nn.BatchNorm2d(64, affine=False)
+ self.conv1_2norm_ss = nn.Conv2d(64, 64, 1, 2, bias=False, groups=64)
+ self.conv2_1 = nn.Conv2d(64, 128, 3, 1, 1)
+ self.conv2_2 = nn.Conv2d(128, 128, 3, 1, 1)
+ self.conv2_2norm = nn.BatchNorm2d(128, affine=False)
+ self.conv2_2norm_ss = nn.Conv2d(128, 128, 1, 2, bias=False, groups=128)
+ self.conv3_1 = nn.Conv2d(128, 256, 3, 1, 1)
+ self.conv3_2 = nn.Conv2d(256, 256, 3, 1, 1)
+ self.conv3_3 = nn.Conv2d(256, 256, 3, 1, 1)
+ self.conv3_3norm = nn.BatchNorm2d(256, affine=False)
+ self.conv3_3norm_ss = nn.Conv2d(256, 256, 1, 2, bias=False, groups=256)
+ self.conv4_1 = nn.Conv2d(256, 512, 3, 1, 1)
+ self.conv4_2 = nn.Conv2d(512, 512, 3, 1, 1)
+ self.conv4_3 = nn.Conv2d(512, 512, 3, 1, 1)
+ self.conv4_3norm = nn.BatchNorm2d(512, affine=False)
+ self.conv5_1 = nn.Conv2d(512, 512, 3, 1, 2, 2)
+ self.conv5_2 = nn.Conv2d(512, 512, 3, 1, 2, 2)
+ self.conv5_3 = nn.Conv2d(512, 512, 3, 1, 2, 2)
+ self.conv5_3norm = nn.BatchNorm2d(512, affine=False)
+ self.conv6_1 = nn.Conv2d(512, 512, 3, 1, 2, 2)
+ self.conv6_2 = nn.Conv2d(512, 512, 3, 1, 2, 2)
+ self.conv6_3 = nn.Conv2d(512, 512, 3, 1, 2, 2)
+ self.conv6_3norm = nn.BatchNorm2d(512, affine=False)
+ self.conv7_1 = nn.Conv2d(512, 512, 3, 1, 1)
+ self.conv7_2 = nn.Conv2d(512, 512, 3, 1, 1)
+ self.conv7_3 = nn.Conv2d(512, 512, 3, 1, 1)
+ self.conv7_3norm = nn.BatchNorm2d(512, affine=False)
+ self.conv8_1 = nn.ConvTranspose2d(512, 256, 4, 2, 1)
+ self.conv3_3_short = nn.Conv2d(256, 256, 3, 1, 1)
+ self.conv8_2 = nn.Conv2d(256, 256, 3, 1, 1)
+ self.conv8_3 = nn.Conv2d(256, 256, 3, 1, 1)
+ self.conv8_3norm = nn.BatchNorm2d(256, affine=False)
+ self.conv9_1 = nn.ConvTranspose2d(256, 128, 4, 2, 1)
+ self.conv2_2_short = nn.Conv2d(128, 128, 3, 1, 1)
+ self.conv9_2 = nn.Conv2d(128, 128, 3, 1, 1)
+ self.conv9_2norm = nn.BatchNorm2d(128, affine=False)
+ self.conv10_1 = nn.ConvTranspose2d(128, 128, 4, 2, 1)
+ self.conv1_2_short = nn.Conv2d(64, 128, 3, 1, 1)
+ self.conv10_2 = nn.Conv2d(128, 128, 3, 1, 1)
+ self.conv10_ab = nn.Conv2d(128, 2, 1, 1)
+
+ # add self.relux_x
+ self.relu1_1 = nn.PReLU()
+ self.relu1_2 = nn.PReLU()
+ self.relu2_1 = nn.PReLU()
+ self.relu2_2 = nn.PReLU()
+ self.relu3_1 = nn.PReLU()
+ self.relu3_2 = nn.PReLU()
+ self.relu3_3 = nn.PReLU()
+ self.relu4_1 = nn.PReLU()
+ self.relu4_2 = nn.PReLU()
+ self.relu4_3 = nn.PReLU()
+ self.relu5_1 = nn.PReLU()
+ self.relu5_2 = nn.PReLU()
+ self.relu5_3 = nn.PReLU()
+ self.relu6_1 = nn.PReLU()
+ self.relu6_2 = nn.PReLU()
+ self.relu6_3 = nn.PReLU()
+ self.relu7_1 = nn.PReLU()
+ self.relu7_2 = nn.PReLU()
+ self.relu7_3 = nn.PReLU()
+ self.relu8_1_comb = nn.PReLU()
+ self.relu8_2 = nn.PReLU()
+ self.relu8_3 = nn.PReLU()
+ self.relu9_1_comb = nn.PReLU()
+ self.relu9_2 = nn.PReLU()
+ self.relu10_1_comb = nn.PReLU()
+ self.relu10_2 = nn.LeakyReLU(0.2, True)
+
+ self.conv8_1 = nn.Sequential(nn.Upsample(scale_factor=2, mode="nearest"), nn.Conv2d(512, 256, 3, 1, 1))
+ self.conv9_1 = nn.Sequential(nn.Upsample(scale_factor=2, mode="nearest"), nn.Conv2d(256, 128, 3, 1, 1))
+ self.conv10_1 = nn.Sequential(nn.Upsample(scale_factor=2, mode="nearest"), nn.Conv2d(128, 128, 3, 1, 1))
+
+ self.conv1_2norm = nn.InstanceNorm2d(64)
+ self.conv2_2norm = nn.InstanceNorm2d(128)
+ self.conv3_3norm = nn.InstanceNorm2d(256)
+ self.conv4_3norm = nn.InstanceNorm2d(512)
+ self.conv5_3norm = nn.InstanceNorm2d(512)
+ self.conv6_3norm = nn.InstanceNorm2d(512)
+ self.conv7_3norm = nn.InstanceNorm2d(512)
+ self.conv8_3norm = nn.InstanceNorm2d(256)
+ self.conv9_2norm = nn.InstanceNorm2d(128)
+
+ def forward(self, x):
+ """x: gray image (1 channel), ab(2 channel), ab_err, ba_err"""
+ conv1_1 = self.relu1_1(self.conv1_1(x))
+ conv1_2 = self.relu1_2(self.conv1_2(conv1_1))
+ conv1_2norm = self.conv1_2norm(conv1_2)
+ conv1_2norm_ss = self.conv1_2norm_ss(conv1_2norm)
+ conv2_1 = self.relu2_1(self.conv2_1(conv1_2norm_ss))
+ conv2_2 = self.relu2_2(self.conv2_2(conv2_1))
+ conv2_2norm = self.conv2_2norm(conv2_2)
+ conv2_2norm_ss = self.conv2_2norm_ss(conv2_2norm)
+ conv3_1 = self.relu3_1(self.conv3_1(conv2_2norm_ss))
+ conv3_2 = self.relu3_2(self.conv3_2(conv3_1))
+ conv3_3 = self.relu3_3(self.conv3_3(conv3_2))
+ conv3_3norm = self.conv3_3norm(conv3_3)
+ conv3_3norm_ss = self.conv3_3norm_ss(conv3_3norm)
+ conv4_1 = self.relu4_1(self.conv4_1(conv3_3norm_ss))
+ conv4_2 = self.relu4_2(self.conv4_2(conv4_1))
+ conv4_3 = self.relu4_3(self.conv4_3(conv4_2))
+ conv4_3norm = self.conv4_3norm(conv4_3)
+ conv5_1 = self.relu5_1(self.conv5_1(conv4_3norm))
+ conv5_2 = self.relu5_2(self.conv5_2(conv5_1))
+ conv5_3 = self.relu5_3(self.conv5_3(conv5_2))
+ conv5_3norm = self.conv5_3norm(conv5_3)
+ conv6_1 = self.relu6_1(self.conv6_1(conv5_3norm))
+ conv6_2 = self.relu6_2(self.conv6_2(conv6_1))
+ conv6_3 = self.relu6_3(self.conv6_3(conv6_2))
+ conv6_3norm = self.conv6_3norm(conv6_3)
+ conv7_1 = self.relu7_1(self.conv7_1(conv6_3norm))
+ conv7_2 = self.relu7_2(self.conv7_2(conv7_1))
+ conv7_3 = self.relu7_3(self.conv7_3(conv7_2))
+ conv7_3norm = self.conv7_3norm(conv7_3)
+ conv8_1 = self.conv8_1(conv7_3norm)
+ conv3_3_short = self.conv3_3_short(conv3_3norm)
+ conv8_1_comb = self.relu8_1_comb(conv8_1 + conv3_3_short)
+ conv8_2 = self.relu8_2(self.conv8_2(conv8_1_comb))
+ conv8_3 = self.relu8_3(self.conv8_3(conv8_2))
+ conv8_3norm = self.conv8_3norm(conv8_3)
+ conv9_1 = self.conv9_1(conv8_3norm)
+ conv2_2_short = self.conv2_2_short(conv2_2norm)
+ conv9_1_comb = self.relu9_1_comb(conv9_1 + conv2_2_short)
+ conv9_2 = self.relu9_2(self.conv9_2(conv9_1_comb))
+ conv9_2norm = self.conv9_2norm(conv9_2)
+ conv10_1 = self.conv10_1(conv9_2norm)
+ conv1_2_short = self.conv1_2_short(conv1_2norm)
+ conv10_1_comb = self.relu10_1_comb(conv10_1 + conv1_2_short)
+ conv10_2 = self.relu10_2(self.conv10_2(conv10_1_comb))
+ conv10_ab = self.conv10_ab(conv10_2)
+
+ return torch.tanh(conv10_ab) * 128
diff --git a/src/models/CNN/FrameColor.py b/src/models/CNN/FrameColor.py
new file mode 100644
index 0000000000000000000000000000000000000000..68fe374641d92bc95d0f2877d1a854a39c21c654
--- /dev/null
+++ b/src/models/CNN/FrameColor.py
@@ -0,0 +1,76 @@
+import torch
+from src.utils import *
+from src.models.vit.vit import FeatureTransform
+
+
+def warp_color(
+ IA_l,
+ IB_lab,
+ features_B,
+ embed_net,
+ nonlocal_net,
+ temperature=0.01,
+):
+ IA_rgb_from_gray = gray2rgb_batch(IA_l)
+
+ with torch.no_grad():
+ A_feat0, A_feat1, A_feat2, A_feat3 = embed_net(IA_rgb_from_gray)
+ B_feat0, B_feat1, B_feat2, B_feat3 = features_B
+
+ A_feat0 = feature_normalize(A_feat0)
+ A_feat1 = feature_normalize(A_feat1)
+ A_feat2 = feature_normalize(A_feat2)
+ A_feat3 = feature_normalize(A_feat3)
+
+ B_feat0 = feature_normalize(B_feat0)
+ B_feat1 = feature_normalize(B_feat1)
+ B_feat2 = feature_normalize(B_feat2)
+ B_feat3 = feature_normalize(B_feat3)
+
+ return nonlocal_net(
+ IB_lab,
+ A_feat0,
+ A_feat1,
+ A_feat2,
+ A_feat3,
+ B_feat0,
+ B_feat1,
+ B_feat2,
+ B_feat3,
+ temperature=temperature,
+ )
+
+
+def frame_colorization(
+ IA_l,
+ IB_lab,
+ IA_last_lab,
+ features_B,
+ embed_net,
+ nonlocal_net,
+ colornet,
+ joint_training=True,
+ luminance_noise=0,
+ temperature=0.01,
+):
+ if luminance_noise:
+ IA_l = IA_l + torch.randn_like(IA_l, requires_grad=False) * luminance_noise
+
+ with torch.autograd.set_grad_enabled(joint_training):
+ nonlocal_BA_lab, similarity_map = warp_color(
+ IA_l,
+ IB_lab,
+ features_B,
+ embed_net,
+ nonlocal_net,
+ temperature=temperature,
+ )
+ nonlocal_BA_ab = nonlocal_BA_lab[:, 1:3, :, :]
+ IA_ab_predict = colornet(
+ torch.cat(
+ (IA_l, nonlocal_BA_ab, similarity_map, IA_last_lab),
+ dim=1,
+ )
+ )
+
+ return IA_ab_predict, nonlocal_BA_lab
diff --git a/src/models/CNN/GAN_models.py b/src/models/CNN/GAN_models.py
new file mode 100644
index 0000000000000000000000000000000000000000..137111bb8035c8d0dbd26b6b958c4036260b8821
--- /dev/null
+++ b/src/models/CNN/GAN_models.py
@@ -0,0 +1,268 @@
+# DCGAN-like generator and discriminator
+import torch
+from torch import nn
+import torch.nn.functional as F
+from torch.nn import Parameter
+
+
+def l2normalize(v, eps=1e-12):
+ return v / (v.norm() + eps)
+
+
+class SpectralNorm(nn.Module):
+ def __init__(self, module, name="weight", power_iterations=1):
+ super(SpectralNorm, self).__init__()
+ self.module = module
+ self.name = name
+ self.power_iterations = power_iterations
+ if not self._made_params():
+ self._make_params()
+
+ def _update_u_v(self):
+ u = getattr(self.module, self.name + "_u")
+ v = getattr(self.module, self.name + "_v")
+ w = getattr(self.module, self.name + "_bar")
+
+ height = w.data.shape[0]
+ for _ in range(self.power_iterations):
+ v.data = l2normalize(torch.mv(torch.t(w.view(height, -1).data), u.data))
+ u.data = l2normalize(torch.mv(w.view(height, -1).data, v.data))
+
+ sigma = u.dot(w.view(height, -1).mv(v))
+ setattr(self.module, self.name, w / sigma.expand_as(w))
+
+ def _made_params(self):
+ try:
+ u = getattr(self.module, self.name + "_u")
+ v = getattr(self.module, self.name + "_v")
+ w = getattr(self.module, self.name + "_bar")
+ return True
+ except AttributeError:
+ return False
+
+ def _make_params(self):
+ w = getattr(self.module, self.name)
+
+ height = w.data.shape[0]
+ width = w.view(height, -1).data.shape[1]
+
+ u = Parameter(w.data.new(height).normal_(0, 1), requires_grad=False)
+ v = Parameter(w.data.new(width).normal_(0, 1), requires_grad=False)
+ u.data = l2normalize(u.data)
+ v.data = l2normalize(v.data)
+ w_bar = Parameter(w.data)
+
+ del self.module._parameters[self.name]
+
+ self.module.register_parameter(self.name + "_u", u)
+ self.module.register_parameter(self.name + "_v", v)
+ self.module.register_parameter(self.name + "_bar", w_bar)
+
+ def forward(self, *args):
+ self._update_u_v()
+ return self.module.forward(*args)
+
+
+class Generator(nn.Module):
+ def __init__(self, z_dim):
+ super(Generator, self).__init__()
+ self.z_dim = z_dim
+
+ self.model = nn.Sequential(
+ nn.ConvTranspose2d(z_dim, 512, 4, stride=1),
+ nn.InstanceNorm2d(512),
+ nn.ReLU(),
+ nn.ConvTranspose2d(512, 256, 4, stride=2, padding=(1, 1)),
+ nn.InstanceNorm2d(256),
+ nn.ReLU(),
+ nn.ConvTranspose2d(256, 128, 4, stride=2, padding=(1, 1)),
+ nn.InstanceNorm2d(128),
+ nn.ReLU(),
+ nn.ConvTranspose2d(128, 64, 4, stride=2, padding=(1, 1)),
+ nn.InstanceNorm2d(64),
+ nn.ReLU(),
+ nn.ConvTranspose2d(64, channels, 3, stride=1, padding=(1, 1)),
+ nn.Tanh(),
+ )
+
+ def forward(self, z):
+ return self.model(z.view(-1, self.z_dim, 1, 1))
+
+
+channels = 3
+leak = 0.1
+w_g = 4
+
+
+class Discriminator(nn.Module):
+ def __init__(self):
+ super(Discriminator, self).__init__()
+
+ self.conv1 = SpectralNorm(nn.Conv2d(channels, 64, 3, stride=1, padding=(1, 1)))
+ self.conv2 = SpectralNorm(nn.Conv2d(64, 64, 4, stride=2, padding=(1, 1)))
+ self.conv3 = SpectralNorm(nn.Conv2d(64, 128, 3, stride=1, padding=(1, 1)))
+ self.conv4 = SpectralNorm(nn.Conv2d(128, 128, 4, stride=2, padding=(1, 1)))
+ self.conv5 = SpectralNorm(nn.Conv2d(128, 256, 3, stride=1, padding=(1, 1)))
+ self.conv6 = SpectralNorm(nn.Conv2d(256, 256, 4, stride=2, padding=(1, 1)))
+ self.conv7 = SpectralNorm(nn.Conv2d(256, 256, 3, stride=1, padding=(1, 1)))
+ self.conv8 = SpectralNorm(nn.Conv2d(256, 512, 4, stride=2, padding=(1, 1)))
+ self.fc = SpectralNorm(nn.Linear(w_g * w_g * 512, 1))
+
+ def forward(self, x):
+ m = x
+ m = nn.LeakyReLU(leak)(self.conv1(m))
+ m = nn.LeakyReLU(leak)(nn.InstanceNorm2d(64)(self.conv2(m)))
+ m = nn.LeakyReLU(leak)(nn.InstanceNorm2d(128)(self.conv3(m)))
+ m = nn.LeakyReLU(leak)(nn.InstanceNorm2d(128)(self.conv4(m)))
+ m = nn.LeakyReLU(leak)(nn.InstanceNorm2d(256)(self.conv5(m)))
+ m = nn.LeakyReLU(leak)(nn.InstanceNorm2d(256)(self.conv6(m)))
+ m = nn.LeakyReLU(leak)(nn.InstanceNorm2d(256)(self.conv7(m)))
+ m = nn.LeakyReLU(leak)(self.conv8(m))
+
+ return self.fc(m.view(-1, w_g * w_g * 512))
+
+
+class Self_Attention(nn.Module):
+ """Self attention Layer"""
+
+ def __init__(self, in_dim):
+ super(Self_Attention, self).__init__()
+ self.chanel_in = in_dim
+
+ self.query_conv = SpectralNorm(nn.Conv2d(in_channels=in_dim, out_channels=in_dim // 1, kernel_size=1))
+ self.key_conv = SpectralNorm(nn.Conv2d(in_channels=in_dim, out_channels=in_dim // 1, kernel_size=1))
+ self.value_conv = SpectralNorm(nn.Conv2d(in_channels=in_dim, out_channels=in_dim, kernel_size=1))
+ self.gamma = nn.Parameter(torch.zeros(1))
+
+ self.softmax = nn.Softmax(dim=-1) #
+
+ def forward(self, x):
+ """
+ inputs :
+ x : input feature maps( B X C X W X H)
+ returns :
+ out : self attention value + input feature
+ attention: B X N X N (N is Width*Height)
+ """
+ m_batchsize, C, width, height = x.size()
+ proj_query = self.query_conv(x).view(m_batchsize, -1, width * height).permute(0, 2, 1) # B X CX(N)
+ proj_key = self.key_conv(x).view(m_batchsize, -1, width * height) # B X C x (*W*H)
+ energy = torch.bmm(proj_query, proj_key) # transpose check
+ attention = self.softmax(energy) # BX (N) X (N)
+ proj_value = self.value_conv(x).view(m_batchsize, -1, width * height) # B X C X N
+
+ out = torch.bmm(proj_value, attention.permute(0, 2, 1))
+ out = out.view(m_batchsize, C, width, height)
+
+ out = self.gamma * out + x
+ return out
+
+
+class Discriminator_x64(nn.Module):
+ """
+ Discriminative Network
+ """
+
+ def __init__(self, in_size=6, ndf=64):
+ super(Discriminator_x64, self).__init__()
+ self.in_size = in_size
+ self.ndf = ndf
+
+ self.layer1 = nn.Sequential(SpectralNorm(nn.Conv2d(self.in_size, self.ndf, 4, 2, 1)), nn.LeakyReLU(0.2, inplace=True))
+
+ self.layer2 = nn.Sequential(
+ SpectralNorm(nn.Conv2d(self.ndf, self.ndf, 4, 2, 1)),
+ nn.InstanceNorm2d(self.ndf),
+ nn.LeakyReLU(0.2, inplace=True),
+ )
+ self.attention = Self_Attention(self.ndf)
+ self.layer3 = nn.Sequential(
+ SpectralNorm(nn.Conv2d(self.ndf, self.ndf * 2, 4, 2, 1)),
+ nn.InstanceNorm2d(self.ndf * 2),
+ nn.LeakyReLU(0.2, inplace=True),
+ )
+ self.layer4 = nn.Sequential(
+ SpectralNorm(nn.Conv2d(self.ndf * 2, self.ndf * 4, 4, 2, 1)),
+ nn.InstanceNorm2d(self.ndf * 4),
+ nn.LeakyReLU(0.2, inplace=True),
+ )
+ self.layer5 = nn.Sequential(
+ SpectralNorm(nn.Conv2d(self.ndf * 4, self.ndf * 8, 4, 2, 1)),
+ nn.InstanceNorm2d(self.ndf * 8),
+ nn.LeakyReLU(0.2, inplace=True),
+ )
+ self.layer6 = nn.Sequential(
+ SpectralNorm(nn.Conv2d(self.ndf * 8, self.ndf * 16, 4, 2, 1)),
+ nn.InstanceNorm2d(self.ndf * 16),
+ nn.LeakyReLU(0.2, inplace=True),
+ )
+
+ self.last = SpectralNorm(nn.Conv2d(self.ndf * 16, 1, [3, 6], 1, 0))
+
+ def forward(self, input):
+ feature1 = self.layer1(input)
+ feature2 = self.layer2(feature1)
+ feature_attention = self.attention(feature2)
+ feature3 = self.layer3(feature_attention)
+ feature4 = self.layer4(feature3)
+ feature5 = self.layer5(feature4)
+ feature6 = self.layer6(feature5)
+ output = self.last(feature6)
+ output = F.avg_pool2d(output, output.size()[2:]).view(output.size()[0], -1)
+
+ return output, feature4
+
+
+class Discriminator_x64_224(nn.Module):
+ """
+ Discriminative Network
+ """
+
+ def __init__(self, in_size=6, ndf=64):
+ super(Discriminator_x64_224, self).__init__()
+ self.in_size = in_size
+ self.ndf = ndf
+
+ self.layer1 = nn.Sequential(SpectralNorm(nn.Conv2d(self.in_size, self.ndf, 4, 2, 1)), nn.LeakyReLU(0.2, inplace=True))
+
+ self.layer2 = nn.Sequential(
+ SpectralNorm(nn.Conv2d(self.ndf, self.ndf, 4, 2, 1)),
+ nn.InstanceNorm2d(self.ndf),
+ nn.LeakyReLU(0.2, inplace=True),
+ )
+ self.attention = Self_Attention(self.ndf)
+ self.layer3 = nn.Sequential(
+ SpectralNorm(nn.Conv2d(self.ndf, self.ndf * 2, 4, 2, 1)),
+ nn.InstanceNorm2d(self.ndf * 2),
+ nn.LeakyReLU(0.2, inplace=True),
+ )
+ self.layer4 = nn.Sequential(
+ SpectralNorm(nn.Conv2d(self.ndf * 2, self.ndf * 4, 4, 2, 1)),
+ nn.InstanceNorm2d(self.ndf * 4),
+ nn.LeakyReLU(0.2, inplace=True),
+ )
+ self.layer5 = nn.Sequential(
+ SpectralNorm(nn.Conv2d(self.ndf * 4, self.ndf * 8, 4, 2, 1)),
+ nn.InstanceNorm2d(self.ndf * 8),
+ nn.LeakyReLU(0.2, inplace=True),
+ )
+ self.layer6 = nn.Sequential(
+ SpectralNorm(nn.Conv2d(self.ndf * 8, self.ndf * 16, 4, 2, 1)),
+ nn.InstanceNorm2d(self.ndf * 16),
+ nn.LeakyReLU(0.2, inplace=True),
+ )
+
+ self.last = SpectralNorm(nn.Conv2d(self.ndf * 16, 1, [3, 3], 1, 0))
+
+ def forward(self, input):
+ feature1 = self.layer1(input)
+ feature2 = self.layer2(feature1)
+ feature_attention = self.attention(feature2)
+ feature3 = self.layer3(feature_attention)
+ feature4 = self.layer4(feature3)
+ feature5 = self.layer5(feature4)
+ feature6 = self.layer6(feature5)
+ output = self.last(feature6)
+ output = F.avg_pool2d(output, output.size()[2:]).view(output.size()[0], -1)
+
+ return output, feature4
diff --git a/src/models/CNN/NonlocalNet.py b/src/models/CNN/NonlocalNet.py
new file mode 100644
index 0000000000000000000000000000000000000000..69477c9442abe2cdcc2a697ceb9fffa37cc55dcf
--- /dev/null
+++ b/src/models/CNN/NonlocalNet.py
@@ -0,0 +1,741 @@
+import sys
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from src.utils import uncenter_l
+
+
+def find_local_patch(x, patch_size):
+ """
+ > We take a tensor `x` and return a tensor `x_unfold` that contains all the patches of size
+ `patch_size` in `x`
+
+ Args:
+ x: the input tensor
+ patch_size: the size of the patch to be extracted.
+ """
+
+ N, C, H, W = x.shape
+ x_unfold = F.unfold(x, kernel_size=(patch_size, patch_size), padding=(patch_size // 2, patch_size // 2), stride=(1, 1))
+
+ return x_unfold.view(N, x_unfold.shape[1], H, W)
+
+
+class WeightedAverage(nn.Module):
+ def __init__(
+ self,
+ ):
+ super(WeightedAverage, self).__init__()
+
+ def forward(self, x_lab, patch_size=3, alpha=1, scale_factor=1):
+ """
+ It takes a 3-channel image (L, A, B) and returns a 2-channel image (A, B) where each pixel is a
+ weighted average of the A and B values of the pixels in a 3x3 neighborhood around it
+
+ Args:
+ x_lab: the input image in LAB color space
+ patch_size: the size of the patch to use for the local average. Defaults to 3
+ alpha: the higher the alpha, the smoother the output. Defaults to 1
+ scale_factor: the scale factor of the input image. Defaults to 1
+
+ Returns:
+ The output of the forward function is a tensor of size (batch_size, 2, height, width)
+ """
+ # alpha=0: less smooth; alpha=inf: smoother
+ x_lab = F.interpolate(x_lab, scale_factor=scale_factor)
+ l = x_lab[:, 0:1, :, :]
+ a = x_lab[:, 1:2, :, :]
+ b = x_lab[:, 2:3, :, :]
+ local_l = find_local_patch(l, patch_size)
+ local_a = find_local_patch(a, patch_size)
+ local_b = find_local_patch(b, patch_size)
+ local_difference_l = (local_l - l) ** 2
+ correlation = nn.functional.softmax(-1 * local_difference_l / alpha, dim=1)
+
+ return torch.cat(
+ (
+ torch.sum(correlation * local_a, dim=1, keepdim=True),
+ torch.sum(correlation * local_b, dim=1, keepdim=True),
+ ),
+ 1,
+ )
+
+
+class WeightedAverage_color(nn.Module):
+ """
+ smooth the image according to the color distance in the LAB space
+ """
+
+ def __init__(
+ self,
+ ):
+ super(WeightedAverage_color, self).__init__()
+
+ def forward(self, x_lab, x_lab_predict, patch_size=3, alpha=1, scale_factor=1):
+ """
+ It takes the predicted a and b channels, and the original a and b channels, and finds the
+ weighted average of the predicted a and b channels based on the similarity of the original a and
+ b channels to the predicted a and b channels
+
+ Args:
+ x_lab: the input image in LAB color space
+ x_lab_predict: the predicted LAB image
+ patch_size: the size of the patch to use for the local color correction. Defaults to 3
+ alpha: controls the smoothness of the output. Defaults to 1
+ scale_factor: the scale factor of the input image. Defaults to 1
+
+ Returns:
+ The return is the weighted average of the local a and b channels.
+ """
+ """ alpha=0: less smooth; alpha=inf: smoother """
+ x_lab = F.interpolate(x_lab, scale_factor=scale_factor)
+ l = uncenter_l(x_lab[:, 0:1, :, :])
+ a = x_lab[:, 1:2, :, :]
+ b = x_lab[:, 2:3, :, :]
+ a_predict = x_lab_predict[:, 1:2, :, :]
+ b_predict = x_lab_predict[:, 2:3, :, :]
+ local_l = find_local_patch(l, patch_size)
+ local_a = find_local_patch(a, patch_size)
+ local_b = find_local_patch(b, patch_size)
+ local_a_predict = find_local_patch(a_predict, patch_size)
+ local_b_predict = find_local_patch(b_predict, patch_size)
+
+ local_color_difference = (local_l - l) ** 2 + (local_a - a) ** 2 + (local_b - b) ** 2
+ # so that sum of weights equal to 1
+ correlation = nn.functional.softmax(-1 * local_color_difference / alpha, dim=1)
+
+ return torch.cat(
+ (
+ torch.sum(correlation * local_a_predict, dim=1, keepdim=True),
+ torch.sum(correlation * local_b_predict, dim=1, keepdim=True),
+ ),
+ 1,
+ )
+
+
+class NonlocalWeightedAverage(nn.Module):
+ def __init__(
+ self,
+ ):
+ super(NonlocalWeightedAverage, self).__init__()
+
+ def forward(self, x_lab, feature, patch_size=3, alpha=0.1, scale_factor=1):
+ """
+ It takes in a feature map and a label map, and returns a smoothed label map
+
+ Args:
+ x_lab: the input image in LAB color space
+ feature: the feature map of the input image
+ patch_size: the size of the patch to be used for the correlation matrix. Defaults to 3
+ alpha: the higher the alpha, the smoother the output.
+ scale_factor: the scale factor of the input image. Defaults to 1
+
+ Returns:
+ weighted_ab is the weighted ab channel of the image.
+ """
+ # alpha=0: less smooth; alpha=inf: smoother
+ # input feature is normalized feature
+ x_lab = F.interpolate(x_lab, scale_factor=scale_factor)
+ batch_size, channel, height, width = x_lab.shape
+ feature = F.interpolate(feature, size=(height, width))
+ batch_size = x_lab.shape[0]
+ x_ab = x_lab[:, 1:3, :, :].view(batch_size, 2, -1)
+ x_ab = x_ab.permute(0, 2, 1)
+
+ local_feature = find_local_patch(feature, patch_size)
+ local_feature = local_feature.view(batch_size, local_feature.shape[1], -1)
+
+ correlation_matrix = torch.matmul(local_feature.permute(0, 2, 1), local_feature)
+ correlation_matrix = nn.functional.softmax(correlation_matrix / alpha, dim=-1)
+
+ weighted_ab = torch.matmul(correlation_matrix, x_ab)
+ weighted_ab = weighted_ab.permute(0, 2, 1).contiguous()
+ weighted_ab = weighted_ab.view(batch_size, 2, height, width)
+ return weighted_ab
+
+
+class CorrelationLayer(nn.Module):
+ def __init__(self, search_range):
+ super(CorrelationLayer, self).__init__()
+ self.search_range = search_range
+
+ def forward(self, x1, x2, alpha=1, raw_output=False, metric="similarity"):
+ """
+ It takes two tensors, x1 and x2, and returns a tensor of shape (batch_size, (search_range * 2 +
+ 1) ** 2, height, width) where each element is the dot product of the corresponding patch in x1
+ and x2
+
+ Args:
+ x1: the first image
+ x2: the image to be warped
+ alpha: the temperature parameter for the softmax function. Defaults to 1
+ raw_output: if True, return the raw output of the network, otherwise return the softmax
+ output. Defaults to False
+ metric: "similarity" or "subtraction". Defaults to similarity
+
+ Returns:
+ The output of the forward function is a softmax of the correlation volume.
+ """
+ shape = list(x1.size())
+ shape[1] = (self.search_range * 2 + 1) ** 2
+ cv = torch.zeros(shape).to(torch.device("cuda"))
+
+ for i in range(-self.search_range, self.search_range + 1):
+ for j in range(-self.search_range, self.search_range + 1):
+ if i < 0:
+ slice_h, slice_h_r = slice(None, i), slice(-i, None)
+ elif i > 0:
+ slice_h, slice_h_r = slice(i, None), slice(None, -i)
+ else:
+ slice_h, slice_h_r = slice(None), slice(None)
+
+ if j < 0:
+ slice_w, slice_w_r = slice(None, j), slice(-j, None)
+ elif j > 0:
+ slice_w, slice_w_r = slice(j, None), slice(None, -j)
+ else:
+ slice_w, slice_w_r = slice(None), slice(None)
+
+ if metric == "similarity":
+ cv[:, (self.search_range * 2 + 1) * i + j, slice_h, slice_w] = (
+ x1[:, :, slice_h, slice_w] * x2[:, :, slice_h_r, slice_w_r]
+ ).sum(1)
+ else: # patchwise subtraction
+ cv[:, (self.search_range * 2 + 1) * i + j, slice_h, slice_w] = -(
+ (x1[:, :, slice_h, slice_w] - x2[:, :, slice_h_r, slice_w_r]) ** 2
+ ).sum(1)
+
+ # TODO sigmoid?
+ if raw_output:
+ return cv
+ else:
+ return nn.functional.softmax(cv / alpha, dim=1)
+
+
+class WTA_scale(torch.autograd.Function):
+ """
+ We can implement our own custom autograd Functions by subclassing
+ torch.autograd.Function and implementing the forward and backward passes
+ which operate on Tensors.
+ """
+
+ @staticmethod
+ def forward(ctx, input, scale=1e-4):
+ """
+ In the forward pass we receive a Tensor containing the input and return a
+ Tensor containing the output. You can cache arbitrary Tensors for use in the
+ backward pass using the save_for_backward method.
+ """
+ activation_max, index_max = torch.max(input, -1, keepdim=True)
+ input_scale = input * scale # default: 1e-4
+ # input_scale = input * scale # default: 1e-4
+ output_max_scale = torch.where(input == activation_max, input, input_scale)
+
+ mask = (input == activation_max).type(torch.float)
+ ctx.save_for_backward(input, mask)
+ return output_max_scale
+
+ @staticmethod
+ def backward(ctx, grad_output):
+ """
+ In the backward pass we receive a Tensor containing the gradient of the loss
+ with respect to the output, and we need to compute the gradient of the loss
+ with respect to the input.
+ """
+ input, mask = ctx.saved_tensors
+ mask_ones = torch.ones_like(mask)
+ mask_small_ones = torch.ones_like(mask) * 1e-4
+ # mask_small_ones = torch.ones_like(mask) * 1e-4
+
+ grad_scale = torch.where(mask == 1, mask_ones, mask_small_ones)
+ grad_input = grad_output.clone() * grad_scale
+ return grad_input, None
+
+
+class ResidualBlock(nn.Module):
+ def __init__(self, in_channels, out_channels, kernel_size=3, padding=1, stride=1):
+ super(ResidualBlock, self).__init__()
+ self.padding1 = nn.ReflectionPad2d(padding)
+ self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, padding=0, stride=stride)
+ self.bn1 = nn.InstanceNorm2d(out_channels)
+ self.prelu = nn.PReLU()
+ self.padding2 = nn.ReflectionPad2d(padding)
+ self.conv2 = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, padding=0, stride=stride)
+ self.bn2 = nn.InstanceNorm2d(out_channels)
+
+ def forward(self, x):
+ residual = x
+ out = self.padding1(x)
+ out = self.conv1(out)
+ out = self.bn1(out)
+ out = self.prelu(out)
+ out = self.padding2(out)
+ out = self.conv2(out)
+ out = self.bn2(out)
+ out += residual
+ out = self.prelu(out)
+ return out
+
+
+class WarpNet(nn.Module):
+ """input is Al, Bl, channel = 1, range~[0,255]"""
+
+ def __init__(self):
+ super(WarpNet, self).__init__()
+ self.feature_channel = 64
+ self.in_channels = self.feature_channel * 4
+ self.inter_channels = 256
+ # 44*44
+ self.layer2_1 = nn.Sequential(
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(128, 128, kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(128),
+ nn.PReLU(),
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(128, self.feature_channel, kernel_size=3, padding=0, stride=2),
+ nn.InstanceNorm2d(self.feature_channel),
+ nn.PReLU(),
+ nn.Dropout(0.2),
+ )
+ self.layer3_1 = nn.Sequential(
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(256, 128, kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(128),
+ nn.PReLU(),
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(128, self.feature_channel, kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(self.feature_channel),
+ nn.PReLU(),
+ nn.Dropout(0.2),
+ )
+
+ # 22*22->44*44
+ self.layer4_1 = nn.Sequential(
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(512, 256, kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(256),
+ nn.PReLU(),
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(256, self.feature_channel, kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(self.feature_channel),
+ nn.PReLU(),
+ nn.Upsample(scale_factor=2),
+ nn.Dropout(0.2),
+ )
+
+ # 11*11->44*44
+ self.layer5_1 = nn.Sequential(
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(512, 256, kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(256),
+ nn.PReLU(),
+ nn.Upsample(scale_factor=2),
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(256, self.feature_channel, kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(self.feature_channel),
+ nn.PReLU(),
+ nn.Upsample(scale_factor=2),
+ nn.Dropout(0.2),
+ )
+
+ self.layer = nn.Sequential(
+ ResidualBlock(self.feature_channel * 4, self.feature_channel * 4, kernel_size=3, padding=1, stride=1),
+ ResidualBlock(self.feature_channel * 4, self.feature_channel * 4, kernel_size=3, padding=1, stride=1),
+ ResidualBlock(self.feature_channel * 4, self.feature_channel * 4, kernel_size=3, padding=1, stride=1),
+ )
+
+ self.theta = nn.Conv2d(
+ in_channels=self.in_channels, out_channels=self.inter_channels, kernel_size=1, stride=1, padding=0
+ )
+ self.phi = nn.Conv2d(in_channels=self.in_channels, out_channels=self.inter_channels, kernel_size=1, stride=1, padding=0)
+
+ self.upsampling = nn.Upsample(scale_factor=4)
+
+ def forward(
+ self,
+ B_lab_map,
+ A_relu2_1,
+ A_relu3_1,
+ A_relu4_1,
+ A_relu5_1,
+ B_relu2_1,
+ B_relu3_1,
+ B_relu4_1,
+ B_relu5_1,
+ temperature=0.001 * 5,
+ detach_flag=False,
+ WTA_scale_weight=1,
+ ):
+ batch_size = B_lab_map.shape[0]
+ channel = B_lab_map.shape[1]
+ image_height = B_lab_map.shape[2]
+ image_width = B_lab_map.shape[3]
+ feature_height = int(image_height / 4)
+ feature_width = int(image_width / 4)
+
+ # scale feature size to 44*44
+ A_feature2_1 = self.layer2_1(A_relu2_1)
+ B_feature2_1 = self.layer2_1(B_relu2_1)
+ A_feature3_1 = self.layer3_1(A_relu3_1)
+ B_feature3_1 = self.layer3_1(B_relu3_1)
+ A_feature4_1 = self.layer4_1(A_relu4_1)
+ B_feature4_1 = self.layer4_1(B_relu4_1)
+ A_feature5_1 = self.layer5_1(A_relu5_1)
+ B_feature5_1 = self.layer5_1(B_relu5_1)
+
+ # concatenate features
+ if A_feature5_1.shape[2] != A_feature2_1.shape[2] or A_feature5_1.shape[3] != A_feature2_1.shape[3]:
+ A_feature5_1 = F.pad(A_feature5_1, (0, 0, 1, 1), "replicate")
+ B_feature5_1 = F.pad(B_feature5_1, (0, 0, 1, 1), "replicate")
+
+ A_features = self.layer(torch.cat((A_feature2_1, A_feature3_1, A_feature4_1, A_feature5_1), 1))
+ B_features = self.layer(torch.cat((B_feature2_1, B_feature3_1, B_feature4_1, B_feature5_1), 1))
+
+ # pairwise cosine similarity
+ theta = self.theta(A_features).view(batch_size, self.inter_channels, -1) # 2*256*(feature_height*feature_width)
+ theta = theta - theta.mean(dim=-1, keepdim=True) # center the feature
+ theta_norm = torch.norm(theta, 2, 1, keepdim=True) + sys.float_info.epsilon
+ theta = torch.div(theta, theta_norm)
+ theta_permute = theta.permute(0, 2, 1) # 2*(feature_height*feature_width)*256
+ phi = self.phi(B_features).view(batch_size, self.inter_channels, -1) # 2*256*(feature_height*feature_width)
+ phi = phi - phi.mean(dim=-1, keepdim=True) # center the feature
+ phi_norm = torch.norm(phi, 2, 1, keepdim=True) + sys.float_info.epsilon
+ phi = torch.div(phi, phi_norm)
+ f = torch.matmul(theta_permute, phi) # 2*(feature_height*feature_width)*(feature_height*feature_width)
+ if detach_flag:
+ f = f.detach()
+
+ f_similarity = f.unsqueeze_(dim=1)
+ similarity_map = torch.max(f_similarity, -1, keepdim=True)[0]
+ similarity_map = similarity_map.view(batch_size, 1, feature_height, feature_width)
+
+ # f can be negative
+ f_WTA = f if WTA_scale_weight == 1 else WTA_scale.apply(f, WTA_scale_weight)
+ f_WTA = f_WTA / temperature
+ f_div_C = F.softmax(f_WTA.squeeze_(), dim=-1) # 2*1936*1936;
+
+ # downsample the reference color
+ B_lab = F.avg_pool2d(B_lab_map, 4)
+ B_lab = B_lab.view(batch_size, channel, -1)
+ B_lab = B_lab.permute(0, 2, 1) # 2*1936*channel
+
+ # multiply the corr map with color
+ y = torch.matmul(f_div_C, B_lab) # 2*1936*channel
+ y = y.permute(0, 2, 1).contiguous()
+ y = y.view(batch_size, channel, feature_height, feature_width) # 2*3*44*44
+ y = self.upsampling(y)
+ similarity_map = self.upsampling(similarity_map)
+
+ return y, similarity_map
+
+
+class WarpNet_new(nn.Module):
+ """input is Al, Bl, channel = 1, range~[0,255]"""
+
+ def __init__(self, d_model=768):
+ super(WarpNet_new, self).__init__()
+ self.feature_channel = 64
+ self.in_channels = self.feature_channel * 4
+ self.inter_channels = 256
+ # 44*44
+ self.d_model = d_model
+ self.layer2_1 = nn.Sequential(
+ nn.Upsample(scale_factor=8),
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(d_model, int(d_model / 2), kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(int(d_model / 2)),
+ nn.PReLU(),
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(int(d_model / 2), self.feature_channel, kernel_size=3, padding=0, stride=2),
+ nn.InstanceNorm2d(self.feature_channel),
+ nn.PReLU(),
+ nn.Dropout(0.2),
+ )
+ self.layer3_1 = nn.Sequential(
+ nn.Upsample(scale_factor=8),
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(d_model, int(d_model / 2), kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(int(d_model / 2)),
+ nn.PReLU(),
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(int(d_model / 2), self.feature_channel, kernel_size=3, padding=0, stride=2),
+ nn.InstanceNorm2d(self.feature_channel),
+ nn.PReLU(),
+ nn.Dropout(0.2),
+ )
+
+ # 22*22->44*44
+ self.layer4_1 = nn.Sequential(
+ nn.Upsample(scale_factor=8),
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(d_model, int(d_model / 2), kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(int(d_model / 2)),
+ nn.PReLU(),
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(int(d_model / 2), self.feature_channel, kernel_size=3, padding=0, stride=2),
+ nn.InstanceNorm2d(self.feature_channel),
+ nn.PReLU(),
+ nn.Dropout(0.2),
+ )
+
+ # 11*11->44*44
+ self.layer5_1 = nn.Sequential(
+ nn.Upsample(scale_factor=8),
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(d_model, int(d_model / 2), kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(int(d_model / 2)),
+ nn.PReLU(),
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(int(d_model / 2), self.feature_channel, kernel_size=3, padding=0, stride=2),
+ nn.InstanceNorm2d(self.feature_channel),
+ nn.PReLU(),
+ nn.Dropout(0.2),
+ )
+
+ self.layer = nn.Sequential(
+ ResidualBlock(self.feature_channel * 4, self.feature_channel * 4, kernel_size=3, padding=1, stride=1),
+ ResidualBlock(self.feature_channel * 4, self.feature_channel * 4, kernel_size=3, padding=1, stride=1),
+ ResidualBlock(self.feature_channel * 4, self.feature_channel * 4, kernel_size=3, padding=1, stride=1),
+ )
+
+ self.theta = nn.Conv2d(
+ in_channels=self.in_channels, out_channels=self.inter_channels, kernel_size=1, stride=1, padding=0
+ )
+ self.phi = nn.Conv2d(in_channels=self.in_channels, out_channels=self.inter_channels, kernel_size=1, stride=1, padding=0)
+
+ self.upsampling = nn.Upsample(scale_factor=4)
+
+ def forward(
+ self,
+ B_lab_map,
+ A_relu2_1,
+ A_relu3_1,
+ A_relu4_1,
+ A_relu5_1,
+ B_relu2_1,
+ B_relu3_1,
+ B_relu4_1,
+ B_relu5_1,
+ temperature=0.001 * 5,
+ detach_flag=False,
+ WTA_scale_weight=1,
+ ):
+ batch_size = B_lab_map.shape[0]
+ channel = B_lab_map.shape[1]
+ image_height = B_lab_map.shape[2]
+ image_width = B_lab_map.shape[3]
+ feature_height = int(image_height / 4)
+ feature_width = int(image_width / 4)
+
+ A_feature2_1 = self.layer2_1(A_relu2_1)
+ B_feature2_1 = self.layer2_1(B_relu2_1)
+ A_feature3_1 = self.layer3_1(A_relu3_1)
+ B_feature3_1 = self.layer3_1(B_relu3_1)
+ A_feature4_1 = self.layer4_1(A_relu4_1)
+ B_feature4_1 = self.layer4_1(B_relu4_1)
+ A_feature5_1 = self.layer5_1(A_relu5_1)
+ B_feature5_1 = self.layer5_1(B_relu5_1)
+
+ if A_feature5_1.shape[2] != A_feature2_1.shape[2] or A_feature5_1.shape[3] != A_feature2_1.shape[3]:
+ A_feature5_1 = F.pad(A_feature5_1, (0, 0, 1, 1), "replicate")
+ B_feature5_1 = F.pad(B_feature5_1, (0, 0, 1, 1), "replicate")
+
+ A_features = self.layer(torch.cat((A_feature2_1, A_feature3_1, A_feature4_1, A_feature5_1), 1))
+ B_features = self.layer(torch.cat((B_feature2_1, B_feature3_1, B_feature4_1, B_feature5_1), 1))
+
+ # pairwise cosine similarity
+ theta = self.theta(A_features).view(batch_size, self.inter_channels, -1) # 2*256*(feature_height*feature_width)
+ theta = theta - theta.mean(dim=-1, keepdim=True) # center the feature
+ theta_norm = torch.norm(theta, 2, 1, keepdim=True) + sys.float_info.epsilon
+ theta = torch.div(theta, theta_norm)
+ theta_permute = theta.permute(0, 2, 1) # 2*(feature_height*feature_width)*256
+ phi = self.phi(B_features).view(batch_size, self.inter_channels, -1) # 2*256*(feature_height*feature_width)
+ phi = phi - phi.mean(dim=-1, keepdim=True) # center the feature
+ phi_norm = torch.norm(phi, 2, 1, keepdim=True) + sys.float_info.epsilon
+ phi = torch.div(phi, phi_norm)
+ f = torch.matmul(theta_permute, phi) # 2*(feature_height*feature_width)*(feature_height*feature_width)
+ if detach_flag:
+ f = f.detach()
+
+ f_similarity = f.unsqueeze_(dim=1)
+ similarity_map = torch.max(f_similarity, -1, keepdim=True)[0]
+ similarity_map = similarity_map.view(batch_size, 1, feature_height, feature_width)
+
+ # f can be negative
+ f_WTA = f if WTA_scale_weight == 1 else WTA_scale.apply(f, WTA_scale_weight)
+ f_WTA = f_WTA / temperature
+ f_div_C = F.softmax(f_WTA.squeeze_(), dim=-1) # 2*1936*1936;
+
+ # downsample the reference color
+ B_lab = F.avg_pool2d(B_lab_map, 4)
+ B_lab = B_lab.view(batch_size, channel, -1)
+ B_lab = B_lab.permute(0, 2, 1) # 2*1936*channel
+
+ # multiply the corr map with color
+ y = torch.matmul(f_div_C, B_lab) # 2*1936*channel
+ y = y.permute(0, 2, 1).contiguous()
+ y = y.view(batch_size, channel, feature_height, feature_width) # 2*3*44*44
+ y = self.upsampling(y)
+ similarity_map = self.upsampling(similarity_map)
+
+ return y, similarity_map
+
+
+class GeneralWarpNet(nn.Module):
+ """input is Al, Bl, channel = 1, range~[0,255]"""
+
+ def __init__(self, feature_channel=128):
+ super(GeneralWarpNet, self).__init__()
+ self.feature_channel = feature_channel
+ self.in_channels = self.feature_channel * 4
+ self.inter_channels = 256
+ # 44*44
+ self.layer2_1 = nn.Sequential(
+ nn.ReflectionPad2d(1),
+ # nn.Conv2d(128, 128, kernel_size=3, padding=0, stride=1),
+ # nn.Conv2d(96, 128, kernel_size=3, padding=20, stride=1),
+ nn.Conv2d(96, 128, kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(128),
+ nn.PReLU(),
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(128, self.feature_channel, kernel_size=3, padding=0, stride=2),
+ nn.InstanceNorm2d(self.feature_channel),
+ nn.PReLU(),
+ nn.Dropout(0.2),
+ )
+ self.layer3_1 = nn.Sequential(
+ nn.ReflectionPad2d(1),
+ # nn.Conv2d(256, 128, kernel_size=3, padding=0, stride=1),
+ # nn.Conv2d(192, 128, kernel_size=3, padding=10, stride=1),
+ nn.Conv2d(192, 128, kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(128),
+ nn.PReLU(),
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(128, self.feature_channel, kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(self.feature_channel),
+ nn.PReLU(),
+ nn.Dropout(0.2),
+ )
+
+ # 22*22->44*44
+ self.layer4_1 = nn.Sequential(
+ nn.ReflectionPad2d(1),
+ # nn.Conv2d(512, 256, kernel_size=3, padding=0, stride=1),
+ # nn.Conv2d(384, 256, kernel_size=3, padding=5, stride=1),
+ nn.Conv2d(384, 256, kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(256),
+ nn.PReLU(),
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(256, self.feature_channel, kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(self.feature_channel),
+ nn.PReLU(),
+ nn.Upsample(scale_factor=2),
+ nn.Dropout(0.2),
+ )
+
+ # 11*11->44*44
+ self.layer5_1 = nn.Sequential(
+ nn.ReflectionPad2d(1),
+ # nn.Conv2d(1024, 256, kernel_size=3, padding=0, stride=1),
+ # nn.Conv2d(768, 256, kernel_size=2, padding=2, stride=1),
+ nn.Conv2d(768, 256, kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(256),
+ nn.PReLU(),
+ nn.Upsample(scale_factor=2),
+ nn.ReflectionPad2d(1),
+ nn.Conv2d(256, self.feature_channel, kernel_size=3, padding=0, stride=1),
+ nn.InstanceNorm2d(self.feature_channel),
+ nn.PReLU(),
+ nn.Upsample(scale_factor=2),
+ nn.Dropout(0.2),
+ )
+
+ self.layer = nn.Sequential(
+ ResidualBlock(self.feature_channel * 4, self.feature_channel * 4, kernel_size=3, padding=1, stride=1),
+ ResidualBlock(self.feature_channel * 4, self.feature_channel * 4, kernel_size=3, padding=1, stride=1),
+ ResidualBlock(self.feature_channel * 4, self.feature_channel * 4, kernel_size=3, padding=1, stride=1),
+ )
+
+ self.theta = nn.Conv2d(
+ in_channels=self.in_channels, out_channels=self.inter_channels, kernel_size=1, stride=1, padding=0
+ )
+ self.phi = nn.Conv2d(in_channels=self.in_channels, out_channels=self.inter_channels, kernel_size=1, stride=1, padding=0)
+
+ self.upsampling = nn.Upsample(scale_factor=4)
+
+ def forward(
+ self,
+ B_lab_map,
+ A_relu2_1,
+ A_relu3_1,
+ A_relu4_1,
+ A_relu5_1,
+ B_relu2_1,
+ B_relu3_1,
+ B_relu4_1,
+ B_relu5_1,
+ temperature=0.001 * 5,
+ detach_flag=False,
+ WTA_scale_weight=1,
+ ):
+ batch_size = B_lab_map.shape[0]
+ channel = B_lab_map.shape[1]
+ image_height = B_lab_map.shape[2]
+ image_width = B_lab_map.shape[3]
+ feature_height = int(image_height / 4)
+ feature_width = int(image_width / 4)
+
+ # scale feature size to 44*44
+ A_feature2_1 = self.layer2_1(A_relu2_1)
+ B_feature2_1 = self.layer2_1(B_relu2_1)
+ A_feature3_1 = self.layer3_1(A_relu3_1)
+ B_feature3_1 = self.layer3_1(B_relu3_1)
+ A_feature4_1 = self.layer4_1(A_relu4_1)
+ B_feature4_1 = self.layer4_1(B_relu4_1)
+ A_feature5_1 = self.layer5_1(A_relu5_1)
+ B_feature5_1 = self.layer5_1(B_relu5_1)
+
+ # concatenate features
+ if A_feature5_1.shape[2] != A_feature2_1.shape[2] or A_feature5_1.shape[3] != A_feature2_1.shape[3]:
+ A_feature5_1 = F.pad(A_feature5_1, (0, 0, 1, 1), "replicate")
+ B_feature5_1 = F.pad(B_feature5_1, (0, 0, 1, 1), "replicate")
+
+ A_features = self.layer(torch.cat((A_feature2_1, A_feature3_1, A_feature4_1, A_feature5_1), 1))
+ B_features = self.layer(torch.cat((B_feature2_1, B_feature3_1, B_feature4_1, B_feature5_1), 1))
+
+ # pairwise cosine similarity
+ theta = self.theta(A_features).view(batch_size, self.inter_channels, -1) # 2*256*(feature_height*feature_width)
+ theta = theta - theta.mean(dim=-1, keepdim=True) # center the feature
+ theta_norm = torch.norm(theta, 2, 1, keepdim=True) + sys.float_info.epsilon
+ theta = torch.div(theta, theta_norm)
+ theta_permute = theta.permute(0, 2, 1) # 2*(feature_height*feature_width)*256
+ phi = self.phi(B_features).view(batch_size, self.inter_channels, -1) # 2*256*(feature_height*feature_width)
+ phi = phi - phi.mean(dim=-1, keepdim=True) # center the feature
+ phi_norm = torch.norm(phi, 2, 1, keepdim=True) + sys.float_info.epsilon
+ phi = torch.div(phi, phi_norm)
+ f = torch.matmul(theta_permute, phi) # 2*(feature_height*feature_width)*(feature_height*feature_width)
+ if detach_flag:
+ f = f.detach()
+
+ f_similarity = f.unsqueeze_(dim=1)
+ similarity_map = torch.max(f_similarity, -1, keepdim=True)[0]
+ similarity_map = similarity_map.view(batch_size, 1, feature_height, feature_width)
+
+ # f can be negative
+ f_WTA = f if WTA_scale_weight == 1 else WTA_scale.apply(f, WTA_scale_weight)
+ f_WTA = f_WTA / temperature
+ f_div_C = F.softmax(f_WTA.squeeze_(), dim=-1) # 2*1936*1936;
+
+ # downsample the reference color
+ B_lab = F.avg_pool2d(B_lab_map, 4)
+ B_lab = B_lab.view(batch_size, channel, -1)
+ B_lab = B_lab.permute(0, 2, 1) # 2*1936*channel
+
+ # multiply the corr map with color
+ y = torch.matmul(f_div_C, B_lab) # 2*1936*channel
+ y = y.permute(0, 2, 1).contiguous()
+ y = y.view(batch_size, channel, feature_height, feature_width) # 2*3*44*44
+ y = self.upsampling(y)
+ similarity_map = self.upsampling(similarity_map)
+
+ return y, similarity_map
diff --git a/src/models/CNN/__init__.py b/src/models/CNN/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/models/CNN/__pycache__/ColorVidNet.cpython-310.pyc b/src/models/CNN/__pycache__/ColorVidNet.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..12d9ce50eeeebb42ba0366cc16febca38d30cf55
Binary files /dev/null and b/src/models/CNN/__pycache__/ColorVidNet.cpython-310.pyc differ
diff --git a/src/models/CNN/__pycache__/FrameColor.cpython-310.pyc b/src/models/CNN/__pycache__/FrameColor.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8241f80ae48ee657266126e44cc66ae4f599528b
Binary files /dev/null and b/src/models/CNN/__pycache__/FrameColor.cpython-310.pyc differ
diff --git a/src/models/CNN/__pycache__/NonlocalNet.cpython-310.pyc b/src/models/CNN/__pycache__/NonlocalNet.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..aa5b36cbcac4559c8a4ddeb9e9934472a01cc702
Binary files /dev/null and b/src/models/CNN/__pycache__/NonlocalNet.cpython-310.pyc differ
diff --git a/src/models/CNN/__pycache__/__init__.cpython-310.pyc b/src/models/CNN/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..451c134a22f844fbb25e3eeee2ef919f7ce010b5
Binary files /dev/null and b/src/models/CNN/__pycache__/__init__.cpython-310.pyc differ
diff --git a/src/models/__init__.py b/src/models/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/models/__pycache__/__init__.cpython-310.pyc b/src/models/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3428795b1984f04bb552685d67c59ceeaf8b077f
Binary files /dev/null and b/src/models/__pycache__/__init__.cpython-310.pyc differ
diff --git a/src/models/vit/__init__.py b/src/models/vit/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/models/vit/__pycache__/__init__.cpython-310.pyc b/src/models/vit/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..82b70e0d92e32dfcd56a027b8b7d42c667c2205b
Binary files /dev/null and b/src/models/vit/__pycache__/__init__.cpython-310.pyc differ
diff --git a/src/models/vit/__pycache__/blocks.cpython-310.pyc b/src/models/vit/__pycache__/blocks.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f00084f6c09b78f4c0f9a9d4dc762d8ab447633f
Binary files /dev/null and b/src/models/vit/__pycache__/blocks.cpython-310.pyc differ
diff --git a/src/models/vit/__pycache__/config.cpython-310.pyc b/src/models/vit/__pycache__/config.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..45d58642b7dfbc4407df93b397c546e6eb2370d9
Binary files /dev/null and b/src/models/vit/__pycache__/config.cpython-310.pyc differ
diff --git a/src/models/vit/__pycache__/decoder.cpython-310.pyc b/src/models/vit/__pycache__/decoder.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..85da7a210bff670c757f1f4185fdade2a80afcd6
Binary files /dev/null and b/src/models/vit/__pycache__/decoder.cpython-310.pyc differ
diff --git a/src/models/vit/__pycache__/embed.cpython-310.pyc b/src/models/vit/__pycache__/embed.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..12b9fe151e2afa0135f555caba4596222623a70f
Binary files /dev/null and b/src/models/vit/__pycache__/embed.cpython-310.pyc differ
diff --git a/src/models/vit/__pycache__/factory.cpython-310.pyc b/src/models/vit/__pycache__/factory.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2d7f5e297a19c13c3f05f226227984e538f8873a
Binary files /dev/null and b/src/models/vit/__pycache__/factory.cpython-310.pyc differ
diff --git a/src/models/vit/__pycache__/utils.cpython-310.pyc b/src/models/vit/__pycache__/utils.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f441581eda885fc62fc1faa2156792bb94021069
Binary files /dev/null and b/src/models/vit/__pycache__/utils.cpython-310.pyc differ
diff --git a/src/models/vit/__pycache__/vit.cpython-310.pyc b/src/models/vit/__pycache__/vit.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..99ddfe09f7e968b42b615a5d2f1ce047149264c9
Binary files /dev/null and b/src/models/vit/__pycache__/vit.cpython-310.pyc differ
diff --git a/src/models/vit/blocks.py b/src/models/vit/blocks.py
new file mode 100644
index 0000000000000000000000000000000000000000..305114e2274b360dd4fce3eaa08f13922563fbff
--- /dev/null
+++ b/src/models/vit/blocks.py
@@ -0,0 +1,80 @@
+import torch.nn as nn
+from timm.models.layers import DropPath
+
+
+class FeedForward(nn.Module):
+ def __init__(self, dim, hidden_dim, dropout, out_dim=None):
+ super().__init__()
+ self.fc1 = nn.Linear(dim, hidden_dim)
+ self.act = nn.GELU()
+ if out_dim is None:
+ out_dim = dim
+ self.fc2 = nn.Linear(hidden_dim, out_dim)
+ self.drop = nn.Dropout(dropout)
+
+ @property
+ def unwrapped(self):
+ return self
+
+ def forward(self, x):
+ x = self.fc1(x)
+ x = self.act(x)
+ x = self.drop(x)
+ x = self.fc2(x)
+ x = self.drop(x)
+ return x
+
+
+class Attention(nn.Module):
+ def __init__(self, dim, heads, dropout):
+ super().__init__()
+ self.heads = heads
+ head_dim = dim // heads
+ self.scale = head_dim**-0.5
+ self.attn = None
+
+ self.qkv = nn.Linear(dim, dim * 3)
+ self.attn_drop = nn.Dropout(dropout)
+ self.proj = nn.Linear(dim, dim)
+ self.proj_drop = nn.Dropout(dropout)
+
+ @property
+ def unwrapped(self):
+ return self
+
+ def forward(self, x, mask=None):
+ B, N, C = x.shape
+ qkv = self.qkv(x).reshape(B, N, 3, self.heads, C // self.heads).permute(2, 0, 3, 1, 4)
+ q, k, v = (
+ qkv[0],
+ qkv[1],
+ qkv[2],
+ )
+
+ attn = (q @ k.transpose(-2, -1)) * self.scale
+ attn = attn.softmax(dim=-1)
+ attn = self.attn_drop(attn)
+
+ x = (attn @ v).transpose(1, 2).reshape(B, N, C)
+ x = self.proj(x)
+ x = self.proj_drop(x)
+
+ return x, attn
+
+
+class Block(nn.Module):
+ def __init__(self, dim, heads, mlp_dim, dropout, drop_path):
+ super().__init__()
+ self.norm1 = nn.LayerNorm(dim)
+ self.norm2 = nn.LayerNorm(dim)
+ self.attn = Attention(dim, heads, dropout)
+ self.mlp = FeedForward(dim, mlp_dim, dropout)
+ self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity()
+
+ def forward(self, x, mask=None, return_attention=False):
+ y, attn = self.attn(self.norm1(x), mask)
+ if return_attention:
+ return attn
+ x = x + self.drop_path(y)
+ x = x + self.drop_path(self.mlp(self.norm2(x)))
+ return x
diff --git a/src/models/vit/config.py b/src/models/vit/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..9728920e7962562cca44223633fdaaef4c682389
--- /dev/null
+++ b/src/models/vit/config.py
@@ -0,0 +1,22 @@
+import yaml
+from pathlib import Path
+
+import os
+
+
+def load_config():
+ return yaml.load(
+ open(Path(__file__).parent / "config.yml", "r"), Loader=yaml.FullLoader
+ )
+
+
+def check_os_environ(key, use):
+ if key not in os.environ:
+ raise ValueError(
+ f"{key} is not defined in the os variables, it is required for {use}."
+ )
+
+
+def dataset_dir():
+ check_os_environ("DATASET", "data loading")
+ return os.environ["DATASET"]
diff --git a/src/models/vit/config.yml b/src/models/vit/config.yml
new file mode 100644
index 0000000000000000000000000000000000000000..acdc7f899b3cdd5cb46a62b33cb537e3e6ddb843
--- /dev/null
+++ b/src/models/vit/config.yml
@@ -0,0 +1,132 @@
+model:
+ # deit
+ deit_tiny_distilled_patch16_224:
+ image_size: 224
+ patch_size: 16
+ d_model: 192
+ n_heads: 3
+ n_layers: 12
+ normalization: deit
+ distilled: true
+ deit_small_distilled_patch16_224:
+ image_size: 224
+ patch_size: 16
+ d_model: 384
+ n_heads: 6
+ n_layers: 12
+ normalization: deit
+ distilled: true
+ deit_base_distilled_patch16_224:
+ image_size: 224
+ patch_size: 16
+ d_model: 768
+ n_heads: 12
+ n_layers: 12
+ normalization: deit
+ distilled: true
+ deit_base_distilled_patch16_384:
+ image_size: 384
+ patch_size: 16
+ d_model: 768
+ n_heads: 12
+ n_layers: 12
+ normalization: deit
+ distilled: true
+ # vit
+ vit_base_patch8_384:
+ image_size: 384
+ patch_size: 8
+ d_model: 768
+ n_heads: 12
+ n_layers: 12
+ normalization: vit
+ distilled: false
+ vit_tiny_patch16_384:
+ image_size: 384
+ patch_size: 16
+ d_model: 192
+ n_heads: 3
+ n_layers: 12
+ normalization: vit
+ distilled: false
+ vit_small_patch16_384:
+ image_size: 384
+ patch_size: 16
+ d_model: 384
+ n_heads: 6
+ n_layers: 12
+ normalization: vit
+ distilled: false
+ vit_base_patch16_384:
+ image_size: 384
+ patch_size: 16
+ d_model: 768
+ n_heads: 12
+ n_layers: 12
+ normalization: vit
+ distilled: false
+ vit_large_patch16_384:
+ image_size: 384
+ patch_size: 16
+ d_model: 1024
+ n_heads: 16
+ n_layers: 24
+ normalization: vit
+ vit_small_patch32_384:
+ image_size: 384
+ patch_size: 32
+ d_model: 384
+ n_heads: 6
+ n_layers: 12
+ normalization: vit
+ distilled: false
+ vit_base_patch32_384:
+ image_size: 384
+ patch_size: 32
+ d_model: 768
+ n_heads: 12
+ n_layers: 12
+ normalization: vit
+ vit_large_patch32_384:
+ image_size: 384
+ patch_size: 32
+ d_model: 1024
+ n_heads: 16
+ n_layers: 24
+ normalization: vit
+decoder:
+ linear: {}
+ deeplab_dec:
+ encoder_layer: -1
+ mask_transformer:
+ drop_path_rate: 0.0
+ dropout: 0.1
+ n_layers: 2
+dataset:
+ ade20k:
+ epochs: 64
+ eval_freq: 2
+ batch_size: 8
+ learning_rate: 0.001
+ im_size: 512
+ crop_size: 512
+ window_size: 512
+ window_stride: 512
+ pascal_context:
+ epochs: 256
+ eval_freq: 8
+ batch_size: 16
+ learning_rate: 0.001
+ im_size: 520
+ crop_size: 480
+ window_size: 480
+ window_stride: 320
+ cityscapes:
+ epochs: 216
+ eval_freq: 4
+ batch_size: 8
+ learning_rate: 0.01
+ im_size: 1024
+ crop_size: 768
+ window_size: 768
+ window_stride: 512
diff --git a/src/models/vit/decoder.py b/src/models/vit/decoder.py
new file mode 100644
index 0000000000000000000000000000000000000000..acdb2f83660904423b97f9163bd81a4016dc8723
--- /dev/null
+++ b/src/models/vit/decoder.py
@@ -0,0 +1,34 @@
+import torch.nn as nn
+from einops import rearrange
+from src.models.vit.utils import init_weights
+
+
+class DecoderLinear(nn.Module):
+ def __init__(
+ self,
+ n_cls,
+ d_encoder,
+ scale_factor,
+ dropout_rate=0.3,
+ ):
+ super().__init__()
+ self.scale_factor = scale_factor
+ self.head = nn.Linear(d_encoder, n_cls)
+ self.upsampling = nn.Upsample(scale_factor=scale_factor**2, mode="linear")
+ self.norm = nn.LayerNorm((n_cls, 24 * scale_factor, 24 * scale_factor))
+ self.dropout = nn.Dropout(dropout_rate)
+ self.gelu = nn.GELU()
+ self.apply(init_weights)
+
+ def forward(self, x, img_size):
+ H, _ = img_size
+ x = self.head(x) ####### (2, 577, 64)
+ x = x.transpose(2, 1) ## (2, 64, 576)
+ x = self.upsampling(x) # (2, 64, 576*scale_factor*scale_factor)
+ x = x.transpose(2, 1) ## (2, 576*scale_factor*scale_factor, 64)
+ x = rearrange(x, "b (h w) c -> b c h w", h=H // (16 // self.scale_factor)) # (2, 64, 24*scale_factor, 24*scale_factor)
+ x = self.norm(x)
+ x = self.dropout(x)
+ x = self.gelu(x)
+
+ return x # (2, 64, a, a)
diff --git a/src/models/vit/embed.py b/src/models/vit/embed.py
new file mode 100644
index 0000000000000000000000000000000000000000..d04b4dce8c8406dc9e575ce88a431b5c6863ee4f
--- /dev/null
+++ b/src/models/vit/embed.py
@@ -0,0 +1,72 @@
+from torch import nn
+from typing import List
+from src.models.vit.factory import create_vit
+from src.models.vit.vit import FeatureTransform
+from ...utils import print_num_params
+from timm import create_model
+from einops import rearrange
+
+
+class EmbedModel(nn.Module):
+ def __init__(self, config, head_out_idx: List[int], n_dim_output=3, device="cuda") -> None:
+ super().__init__()
+ self.head_out_idx = head_out_idx
+ self.n_dim_output = n_dim_output
+ self.device = device
+ self.vit = create_vit(config).to(self.device)
+ self.vit.eval()
+ for params in self.vit.parameters():
+ params.requires_grad = False
+ print_num_params(self.vit)
+ print_num_params(self.vit, is_trainable=True)
+
+ if self.n_dim_output == 3:
+ self.feature_transformer = FeatureTransform(config["image_size"], config["d_model"]).to(self.device)
+ print_num_params(self.feature_transformer)
+ print_num_params(self.feature_transformer, is_trainable=True)
+
+ def forward(self, x):
+ vit_outputs = self.vit(x, self.head_out_idx, n_dim_output=self.n_dim_output, return_features=True)
+ feat0, feat1, feat2, feat3 = vit_outputs[0], vit_outputs[1], vit_outputs[2], vit_outputs[3]
+ if self.n_dim_output == 3:
+ feat0, feat1, feat2, feat3 = self.feature_transformer(vit_outputs)
+ return feat0, feat1, feat2, feat3
+
+
+class GeneralEmbedModel(nn.Module):
+ def __init__(self, pretrained_model="swin-tiny", device="cuda") -> None:
+ """
+ vit_tiny_patch16_224.augreg_in21k_ft_in1k
+ swinv2_cr_tiny_ns_224.sw_in1k
+ """
+ super().__init__()
+ self.device = device
+ self.pretrained_model = pretrained_model
+ if pretrained_model == "swin-tiny":
+ self.pretrained = create_model(
+ "swinv2_cr_tiny_ns_224.sw_in1k",
+ pretrained=True,
+ features_only=True,
+ out_indices=[-4, -3, -2, -1],
+ ).to(device)
+ elif pretrained_model == "swin-small":
+ self.pretrained = create_model(
+ "swinv2_cr_small_ns_224.sw_in1k",
+ pretrained=True,
+ features_only=True,
+ out_indices=[-4, -3, -2, -1],
+ ).to(device)
+ else:
+ raise NotImplementedError
+
+ self.pretrained.eval()
+ self.upsample = nn.Upsample(scale_factor=2)
+
+ for params in self.pretrained.parameters():
+ params.requires_grad = False
+
+ def forward(self, x):
+ outputs = self.pretrained(x)
+ outputs = [self.upsample(feat) for feat in outputs]
+
+ return outputs
diff --git a/src/models/vit/factory.py b/src/models/vit/factory.py
new file mode 100644
index 0000000000000000000000000000000000000000..ab2cad05744bf6ed60ee6278a5b79b92321ac4c5
--- /dev/null
+++ b/src/models/vit/factory.py
@@ -0,0 +1,45 @@
+import os
+import torch
+from timm.models.vision_transformer import default_cfgs
+from timm.models.helpers import load_pretrained, load_custom_pretrained
+from src.models.vit.utils import checkpoint_filter_fn
+from src.models.vit.vit import VisionTransformer
+
+
+def create_vit(model_cfg):
+ model_cfg = model_cfg.copy()
+ backbone = model_cfg.pop("backbone")
+
+ model_cfg.pop("normalization")
+ model_cfg["n_cls"] = 1000
+ mlp_expansion_ratio = 4
+ model_cfg["d_ff"] = mlp_expansion_ratio * model_cfg["d_model"]
+
+ if backbone in default_cfgs:
+ default_cfg = default_cfgs[backbone]
+ else:
+ default_cfg = dict(
+ pretrained=False,
+ num_classes=1000,
+ drop_rate=0.0,
+ drop_path_rate=0.0,
+ drop_block_rate=None,
+ )
+
+ default_cfg["input_size"] = (
+ 3,
+ model_cfg["image_size"][0],
+ model_cfg["image_size"][1],
+ )
+ model = VisionTransformer(**model_cfg)
+ if backbone == "vit_base_patch8_384":
+ path = os.path.expandvars("$TORCH_HOME/hub/checkpoints/vit_base_patch8_384.pth")
+ state_dict = torch.load(path, map_location="cpu")
+ filtered_dict = checkpoint_filter_fn(state_dict, model)
+ model.load_state_dict(filtered_dict, strict=True)
+ elif "deit" in backbone:
+ load_pretrained(model, default_cfg, filter_fn=checkpoint_filter_fn)
+ else:
+ load_custom_pretrained(model, default_cfg)
+
+ return model
diff --git a/src/models/vit/utils.py b/src/models/vit/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..eabc9de77e12d9ed2d1907ca3cedb6424fb16c03
--- /dev/null
+++ b/src/models/vit/utils.py
@@ -0,0 +1,58 @@
+import math
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from timm.models.layers import trunc_normal_
+
+
+def resize_pos_embed(posemb, grid_old_shape, grid_new_shape, num_extra_tokens):
+ # Rescale the grid of position embeddings when loading from state_dict. Adapted from
+ # https://github.com/google-research/vision_transformer/blob/00883dd691c63a6830751563748663526e811cee/vit_jax/checkpoint.py#L224
+ posemb_tok, posemb_grid = (
+ posemb[:, :num_extra_tokens],
+ posemb[0, num_extra_tokens:],
+ )
+ if grid_old_shape is None:
+ gs_old_h = int(math.sqrt(len(posemb_grid)))
+ gs_old_w = gs_old_h
+ else:
+ gs_old_h, gs_old_w = grid_old_shape
+
+ gs_h, gs_w = grid_new_shape
+ posemb_grid = posemb_grid.reshape(1, gs_old_h, gs_old_w, -1).permute(0, 3, 1, 2)
+ posemb_grid = F.interpolate(posemb_grid, size=(gs_h, gs_w), mode="bilinear")
+ posemb_grid = posemb_grid.permute(0, 2, 3, 1).reshape(1, gs_h * gs_w, -1)
+ posemb = torch.cat([posemb_tok, posemb_grid], dim=1)
+ return posemb
+
+
+def init_weights(m):
+ if isinstance(m, nn.Linear):
+ trunc_normal_(m.weight, std=0.02)
+ if isinstance(m, nn.Linear) and m.bias is not None:
+ nn.init.constant_(m.bias, 0)
+ elif isinstance(m, nn.LayerNorm):
+ nn.init.constant_(m.bias, 0)
+ nn.init.constant_(m.weight, 1.0)
+
+
+def checkpoint_filter_fn(state_dict, model):
+ """convert patch embedding weight from manual patchify + linear proj to conv"""
+ out_dict = {}
+ if "model" in state_dict:
+ # For deit models
+ state_dict = state_dict["model"]
+ num_extra_tokens = 1 + ("dist_token" in state_dict.keys())
+ patch_size = model.patch_size
+ image_size = model.patch_embed.image_size
+ for k, v in state_dict.items():
+ if k == "pos_embed" and v.shape != model.pos_embed.shape:
+ # To resize pos embedding when using model at different size from pretrained weights
+ v = resize_pos_embed(
+ v,
+ None,
+ (image_size[0] // patch_size, image_size[1] // patch_size),
+ num_extra_tokens,
+ )
+ out_dict[k] = v
+ return out_dict
diff --git a/src/models/vit/vit.py b/src/models/vit/vit.py
new file mode 100644
index 0000000000000000000000000000000000000000..72f8a24daf623c8389e359ac5c0e3257817e4b9c
--- /dev/null
+++ b/src/models/vit/vit.py
@@ -0,0 +1,202 @@
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from timm.models.vision_transformer import _load_weights
+from timm.models.layers import trunc_normal_
+from typing import List
+
+# from utils import init_weights, resize_pos_embed
+# from blocks import Block
+
+from src.models.vit.utils import init_weights, resize_pos_embed
+from src.models.vit.blocks import Block
+from src.models.vit.decoder import DecoderLinear
+
+
+class PatchEmbedding(nn.Module):
+ def __init__(self, image_size, patch_size, embed_dim, channels):
+ super().__init__()
+
+ self.image_size = image_size
+ if image_size[0] % patch_size != 0 or image_size[1] % patch_size != 0:
+ raise ValueError("image dimensions must be divisible by the patch size")
+ self.grid_size = image_size[0] // patch_size, image_size[1] // patch_size
+ self.num_patches = self.grid_size[0] * self.grid_size[1]
+ self.patch_size = patch_size
+
+ self.proj = nn.Conv2d(channels, embed_dim, kernel_size=patch_size, stride=patch_size)
+
+ def forward(self, im):
+ B, C, H, W = im.shape
+ x = self.proj(im).flatten(2).transpose(1, 2)
+ return x
+
+
+class VisionTransformer(nn.Module):
+ def __init__(
+ self,
+ image_size,
+ patch_size,
+ n_layers,
+ d_model,
+ d_ff,
+ n_heads,
+ n_cls,
+ dropout=0.1,
+ drop_path_rate=0.0,
+ distilled=False,
+ channels=3,
+ ):
+ super().__init__()
+ self.patch_embed = PatchEmbedding(
+ image_size,
+ patch_size,
+ d_model,
+ channels,
+ )
+ self.patch_size = patch_size
+ self.n_layers = n_layers
+ self.d_model = d_model
+ self.d_ff = d_ff
+ self.n_heads = n_heads
+ self.dropout = nn.Dropout(dropout)
+ self.n_cls = n_cls
+
+ # cls and pos tokens
+ self.cls_token = nn.Parameter(torch.zeros(1, 1, d_model))
+ self.distilled = distilled
+ if self.distilled:
+ self.dist_token = nn.Parameter(torch.zeros(1, 1, d_model))
+ self.pos_embed = nn.Parameter(torch.randn(1, self.patch_embed.num_patches + 2, d_model))
+ self.head_dist = nn.Linear(d_model, n_cls)
+ else:
+ self.pos_embed = nn.Parameter(torch.randn(1, self.patch_embed.num_patches + 1, d_model))
+
+ # transformer blocks
+ dpr = [x.item() for x in torch.linspace(0, drop_path_rate, n_layers)]
+ self.blocks = nn.ModuleList([Block(d_model, n_heads, d_ff, dropout, dpr[i]) for i in range(n_layers)])
+
+ # output head
+ self.norm = nn.LayerNorm(d_model)
+ self.head = nn.Linear(d_model, n_cls)
+
+ trunc_normal_(self.pos_embed, std=0.02)
+ trunc_normal_(self.cls_token, std=0.02)
+ if self.distilled:
+ trunc_normal_(self.dist_token, std=0.02)
+ self.pre_logits = nn.Identity()
+
+ self.apply(init_weights)
+
+ @torch.jit.ignore
+ def no_weight_decay(self):
+ return {"pos_embed", "cls_token", "dist_token"}
+
+ @torch.jit.ignore()
+ def load_pretrained(self, checkpoint_path, prefix=""):
+ _load_weights(self, checkpoint_path, prefix)
+
+ def forward(self, im, head_out_idx: List[int], n_dim_output=3, return_features=False):
+ B, _, H, W = im.shape
+ PS = self.patch_size
+ assert n_dim_output == 3 or n_dim_output == 4, "n_dim_output must be 3 or 4"
+ x = self.patch_embed(im)
+ cls_tokens = self.cls_token.expand(B, -1, -1)
+ if self.distilled:
+ dist_tokens = self.dist_token.expand(B, -1, -1)
+ x = torch.cat((cls_tokens, dist_tokens, x), dim=1)
+ else:
+ x = torch.cat((cls_tokens, x), dim=1)
+
+ pos_embed = self.pos_embed
+ num_extra_tokens = 1 + self.distilled
+ if x.shape[1] != pos_embed.shape[1]:
+ pos_embed = resize_pos_embed(
+ pos_embed,
+ self.patch_embed.grid_size,
+ (H // PS, W // PS),
+ num_extra_tokens,
+ )
+ x = x + pos_embed
+ x = self.dropout(x)
+ device = x.device
+
+ if n_dim_output == 3:
+ heads_out = torch.zeros(size=(len(head_out_idx), B, (H // PS) ** 2 + 1, self.d_model)).to(device)
+ else:
+ heads_out = torch.zeros(size=(len(head_out_idx), B, self.d_model, H // PS, H // PS)).to(device)
+ self.register_buffer("heads_out", heads_out)
+
+ head_idx = 0
+ for idx_layer, blk in enumerate(self.blocks):
+ x = blk(x)
+ if idx_layer in head_out_idx:
+ if n_dim_output == 3:
+ heads_out[head_idx] = x
+ else:
+ heads_out[head_idx] = x[:, 1:, :].reshape((-1, 24, 24, self.d_model)).permute(0, 3, 1, 2)
+ head_idx += 1
+
+ x = self.norm(x)
+
+ if return_features:
+ return heads_out
+
+ if self.distilled:
+ x, x_dist = x[:, 0], x[:, 1]
+ x = self.head(x)
+ x_dist = self.head_dist(x_dist)
+ x = (x + x_dist) / 2
+ else:
+ x = x[:, 0]
+ x = self.head(x)
+ return x
+
+ def get_attention_map(self, im, layer_id):
+ if layer_id >= self.n_layers or layer_id < 0:
+ raise ValueError(f"Provided layer_id: {layer_id} is not valid. 0 <= {layer_id} < {self.n_layers}.")
+ B, _, H, W = im.shape
+ PS = self.patch_size
+
+ x = self.patch_embed(im)
+ cls_tokens = self.cls_token.expand(B, -1, -1)
+ if self.distilled:
+ dist_tokens = self.dist_token.expand(B, -1, -1)
+ x = torch.cat((cls_tokens, dist_tokens, x), dim=1)
+ else:
+ x = torch.cat((cls_tokens, x), dim=1)
+
+ pos_embed = self.pos_embed
+ num_extra_tokens = 1 + self.distilled
+ if x.shape[1] != pos_embed.shape[1]:
+ pos_embed = resize_pos_embed(
+ pos_embed,
+ self.patch_embed.grid_size,
+ (H // PS, W // PS),
+ num_extra_tokens,
+ )
+ x = x + pos_embed
+
+ for i, blk in enumerate(self.blocks):
+ if i < layer_id:
+ x = blk(x)
+ else:
+ return blk(x, return_attention=True)
+
+
+class FeatureTransform(nn.Module):
+ def __init__(self, img_size, d_encoder, nls_list=[128, 256, 512, 512], scale_factor_list=[8, 4, 2, 1]):
+ super(FeatureTransform, self).__init__()
+ self.img_size = img_size
+
+ self.decoder_0 = DecoderLinear(n_cls=nls_list[0], d_encoder=d_encoder, scale_factor=scale_factor_list[0])
+ self.decoder_1 = DecoderLinear(n_cls=nls_list[1], d_encoder=d_encoder, scale_factor=scale_factor_list[1])
+ self.decoder_2 = DecoderLinear(n_cls=nls_list[2], d_encoder=d_encoder, scale_factor=scale_factor_list[2])
+ self.decoder_3 = DecoderLinear(n_cls=nls_list[3], d_encoder=d_encoder, scale_factor=scale_factor_list[3])
+
+ def forward(self, x_list):
+ feat_3 = self.decoder_3(x_list[3][:, 1:, :], self.img_size) # (2, 512, 24, 24)
+ feat_2 = self.decoder_2(x_list[2][:, 1:, :], self.img_size) # (2, 512, 48, 48)
+ feat_1 = self.decoder_1(x_list[1][:, 1:, :], self.img_size) # (2, 256, 96, 96)
+ feat_0 = self.decoder_0(x_list[0][:, 1:, :], self.img_size) # (2, 128, 192, 192)
+ return feat_0, feat_1, feat_2, feat_3
\ No newline at end of file
diff --git a/src/scheduler.py b/src/scheduler.py
new file mode 100644
index 0000000000000000000000000000000000000000..87a9a1c5fcbf2df2a9263d49a5d3f5ba87ccb48d
--- /dev/null
+++ b/src/scheduler.py
@@ -0,0 +1,40 @@
+from torch.optim.lr_scheduler import _LRScheduler
+
+class PolynomialLR(_LRScheduler):
+ def __init__(
+ self,
+ optimizer,
+ step_size,
+ iter_warmup,
+ iter_max,
+ power,
+ min_lr=0,
+ last_epoch=-1,
+ ):
+ self.step_size = step_size
+ self.iter_warmup = int(iter_warmup)
+ self.iter_max = int(iter_max)
+ self.power = power
+ self.min_lr = min_lr
+ super(PolynomialLR, self).__init__(optimizer, last_epoch)
+
+ def polynomial_decay(self, lr):
+ iter_cur = float(self.last_epoch)
+ if iter_cur < self.iter_warmup:
+ coef = iter_cur / self.iter_warmup
+ coef *= (1 - self.iter_warmup / self.iter_max) ** self.power
+ else:
+ coef = (1 - iter_cur / self.iter_max) ** self.power
+ return (lr - self.min_lr) * coef + self.min_lr
+
+ def get_lr(self):
+ if (
+ (self.last_epoch == 0)
+ or (self.last_epoch % self.step_size != 0)
+ or (self.last_epoch > self.iter_max)
+ ):
+ return [group["lr"] for group in self.optimizer.param_groups]
+ return [self.polynomial_decay(lr) for lr in self.base_lrs]
+
+ def step_update(self, num_updates):
+ self.step()
\ No newline at end of file
diff --git a/src/utils.py b/src/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..0fbbd8d04d09ac623584005374238bd0edcb40d9
--- /dev/null
+++ b/src/utils.py
@@ -0,0 +1,806 @@
+import sys
+import time
+import numpy as np
+from PIL import Image
+from skimage import color
+from skimage.transform import resize
+import src.data.functional as F
+import torch
+from torch import nn
+import torch.nn.functional as F_torch
+import torchvision.transforms.functional as F_torchvision
+from numba import cuda, jit
+import math
+
+rgb_from_xyz = np.array(
+ [
+ [3.24048134, -0.96925495, 0.05564664],
+ [-1.53715152, 1.87599, -0.20404134],
+ [-0.49853633, 0.04155593, 1.05731107],
+ ]
+)
+l_norm, ab_norm = 1.0, 1.0
+l_mean, ab_mean = 50.0, 0
+
+
+class SquaredPadding(object):
+ def __init__(self, target_size=384, fill_value=0):
+ self.target_size = target_size
+ self.fill_value = fill_value
+ pass
+
+ def __call__(self, img, return_pil=True, return_paddings=False):
+ if type(img) != torch.Tensor:
+ img = F_torchvision.to_tensor(img)
+
+ H, W = img.size(1), img.size(2)
+ if H > W:
+ H_new, W_new = self.target_size, int(W/H*self.target_size)
+ # Resize image
+ img = F_torchvision.resize(img, (H_new, W_new))
+
+ # Padding image
+ padded_size = H_new - W_new
+ paddings = (padded_size // 2, (padded_size // 2) + (padded_size % 2), 0, 0)
+ padded_img = F_torch.pad(img, paddings, value=self.fill_value)
+ else:
+ H_new, W_new = int(H/W*self.target_size), self.target_size
+ # Resize image
+ img = F_torchvision.resize(img, (H_new, W_new))
+
+ # Padding image
+ padded_size = W_new - H_new
+ paddings = (0, 0, padded_size // 2, (padded_size // 2) + (padded_size % 2))
+ padded_img = F_torch.pad(img, paddings, value=self.fill_value)
+
+ if return_pil:
+ padded_img = F_torchvision.to_pil_image(padded_img)
+
+ if return_paddings:
+ return padded_img, paddings
+
+ return padded_img
+
+class UnpaddingSquare(object):
+ def __init__(self):
+ pass
+
+ def __call__(self, img, paddings):
+ H, W = img.size(1), img.size(2)
+ pad_l, pad_r, pad_t, pad_b = paddings
+ W_ori = W - pad_l - pad_r
+ H_ori = H - pad_t - pad_b
+
+ return F_torchvision.crop(img, top=pad_t, left=pad_l, height=H_ori, width=W_ori)
+
+class ResizeFlow(object):
+ def __init__(self, target_size=(384,384)):
+ self.target_size = target_size
+ pass
+
+ def __call__(self, flow):
+ return F_torch.interpolate(flow.unsqueeze(0), self.target_size, mode='bilinear', align_corners=True).squeeze(0)
+
+class SquaredPaddingFlow(object):
+ def __init__(self, fill_value=0):
+ self.fill_value = fill_value
+
+ def __call__(self, flow):
+ H, W = flow.size(1), flow.size(2)
+
+ if H > W:
+ # Padding flow
+ padded_size = H - W
+ paddings = (padded_size // 2, (padded_size // 2) + (padded_size % 2), 0, 0)
+ padded_img = F_torch.pad(flow, paddings, value=self.fill_value)
+ else:
+ # Padding flow
+ padded_size = W - H
+ paddings = (0, 0, padded_size // 2, (padded_size // 2) + (padded_size % 2))
+ padded_img = F_torch.pad(flow, paddings, value=self.fill_value)
+
+ return padded_img
+
+
+def gray2rgb_batch(l):
+ # gray image tensor to rgb image tensor
+ l_uncenter = uncenter_l(l)
+ l_uncenter = l_uncenter / (2 * l_mean)
+ return torch.cat((l_uncenter, l_uncenter, l_uncenter), dim=1)
+
+
+def vgg_preprocess(tensor):
+ # input is RGB tensor which ranges in [0,1]
+ # output is BGR tensor which ranges in [0,255]
+ tensor_bgr = torch.cat((tensor[:, 2:3, :, :], tensor[:, 1:2, :, :], tensor[:, 0:1, :, :]), dim=1)
+ tensor_bgr_ml = tensor_bgr - torch.Tensor([0.40760392, 0.45795686, 0.48501961]).type_as(tensor_bgr).view(1, 3, 1, 1)
+ return tensor_bgr_ml * 255
+
+
+def tensor_lab2rgb(input):
+ """
+ n * 3* h *w
+ """
+ input_trans = input.transpose(1, 2).transpose(2, 3) # n * h * w * 3
+ L, a, b = (
+ input_trans[:, :, :, 0:1],
+ input_trans[:, :, :, 1:2],
+ input_trans[:, :, :, 2:],
+ )
+ y = (L + 16.0) / 116.0
+ x = (a / 500.0) + y
+ z = y - (b / 200.0)
+
+ neg_mask = z.data < 0
+ z[neg_mask] = 0
+ xyz = torch.cat((x, y, z), dim=3)
+
+ mask = xyz.data > 0.2068966
+ mask_xyz = xyz.clone()
+ mask_xyz[mask] = torch.pow(xyz[mask], 3.0)
+ mask_xyz[~mask] = (xyz[~mask] - 16.0 / 116.0) / 7.787
+ mask_xyz[:, :, :, 0] = mask_xyz[:, :, :, 0] * 0.95047
+ mask_xyz[:, :, :, 2] = mask_xyz[:, :, :, 2] * 1.08883
+
+ rgb_trans = torch.mm(mask_xyz.view(-1, 3), torch.from_numpy(rgb_from_xyz).type_as(xyz)).view(
+ input.size(0), input.size(2), input.size(3), 3
+ )
+ rgb = rgb_trans.transpose(2, 3).transpose(1, 2)
+
+ mask = rgb > 0.0031308
+ mask_rgb = rgb.clone()
+ mask_rgb[mask] = 1.055 * torch.pow(rgb[mask], 1 / 2.4) - 0.055
+ mask_rgb[~mask] = rgb[~mask] * 12.92
+
+ neg_mask = mask_rgb.data < 0
+ large_mask = mask_rgb.data > 1
+ mask_rgb[neg_mask] = 0
+ mask_rgb[large_mask] = 1
+ return mask_rgb
+
+
+###### loss functions ######
+def feature_normalize(feature_in):
+ feature_in_norm = torch.norm(feature_in, 2, 1, keepdim=True) + sys.float_info.epsilon
+ feature_in_norm = torch.div(feature_in, feature_in_norm)
+ return feature_in_norm
+
+
+# denormalization for l
+def uncenter_l(l):
+ return l * l_norm + l_mean
+
+
+def get_grid(x):
+ torchHorizontal = torch.linspace(-1.0, 1.0, x.size(3)).view(1, 1, 1, x.size(3)).expand(x.size(0), 1, x.size(2), x.size(3))
+ torchVertical = torch.linspace(-1.0, 1.0, x.size(2)).view(1, 1, x.size(2), 1).expand(x.size(0), 1, x.size(2), x.size(3))
+
+ return torch.cat([torchHorizontal, torchVertical], 1)
+
+
+class WarpingLayer(nn.Module):
+ def __init__(self, device):
+ super(WarpingLayer, self).__init__()
+ self.device = device
+
+ def forward(self, x, flow):
+ """
+ It takes the input image and the flow and warps the input image according to the flow
+
+ Args:
+ x: the input image
+ flow: the flow tensor, which is a 4D tensor of shape (batch_size, 2, height, width)
+
+ Returns:
+ The warped image
+ """
+ # WarpingLayer uses F.grid_sample, which expects normalized grid
+ # we still output unnormalized flow for the convenience of comparing EPEs with FlowNet2 and original code
+ # so here we need to denormalize the flow
+ flow_for_grip = torch.zeros_like(flow).to(self.device)
+ flow_for_grip[:, 0, :, :] = flow[:, 0, :, :] / ((flow.size(3) - 1.0) / 2.0)
+ flow_for_grip[:, 1, :, :] = flow[:, 1, :, :] / ((flow.size(2) - 1.0) / 2.0)
+
+ grid = (get_grid(x).to(self.device) + flow_for_grip).permute(0, 2, 3, 1)
+ return F_torch.grid_sample(x, grid, align_corners=True)
+
+
+class CenterPad_threshold(object):
+ def __init__(self, image_size, threshold=3 / 4):
+ self.height = image_size[0]
+ self.width = image_size[1]
+ self.threshold = threshold
+
+ def __call__(self, image):
+ # pad the image to 16:9
+ # pad height
+ I = np.array(image)
+
+ # for padded input
+ height_old = np.size(I, 0)
+ width_old = np.size(I, 1)
+ old_size = [height_old, width_old]
+ height = self.height
+ width = self.width
+ I_pad = np.zeros((height, width, np.size(I, 2)))
+
+ ratio = height / width
+
+ if height_old / width_old == ratio:
+ if height_old == height:
+ return Image.fromarray(I.astype(np.uint8))
+ new_size = [int(x * height / height_old) for x in old_size]
+ I_resize = resize(I, new_size, mode="reflect", preserve_range=True, clip=False, anti_aliasing=True)
+ return Image.fromarray(I_resize.astype(np.uint8))
+
+ if height_old / width_old > self.threshold:
+ width_new, height_new = width_old, int(width_old * self.threshold)
+ height_margin = height_old - height_new
+ height_crop_start = height_margin // 2
+ I_crop = I[height_crop_start : (height_crop_start + height_new), :, :]
+ I_resize = resize(I_crop, [height, width], mode="reflect", preserve_range=True, clip=False, anti_aliasing=True)
+
+ return Image.fromarray(I_resize.astype(np.uint8))
+
+ if height_old / width_old > ratio: # pad the width and crop
+ new_size = [int(x * width / width_old) for x in old_size]
+ I_resize = resize(I, new_size, mode="reflect", preserve_range=True, clip=False, anti_aliasing=True)
+ width_resize = np.size(I_resize, 1)
+ height_resize = np.size(I_resize, 0)
+ start_height = (height_resize - height) // 2
+ I_pad[:, :, :] = I_resize[start_height : (start_height + height), :, :]
+ else: # pad the height and crop
+ new_size = [int(x * height / height_old) for x in old_size]
+ I_resize = resize(I, new_size, mode="reflect", preserve_range=True, clip=False, anti_aliasing=True)
+ width_resize = np.size(I_resize, 1)
+ height_resize = np.size(I_resize, 0)
+ start_width = (width_resize - width) // 2
+ I_pad[:, :, :] = I_resize[:, start_width : (start_width + width), :]
+
+ return Image.fromarray(I_pad.astype(np.uint8))
+
+
+class Normalize(object):
+ def __init__(self):
+ pass
+
+ def __call__(self, inputs):
+ inputs[0:1, :, :] = F.normalize(inputs[0:1, :, :], 50, 1)
+ inputs[1:3, :, :] = F.normalize(inputs[1:3, :, :], (0, 0), (1, 1))
+ return inputs
+
+
+class RGB2Lab(object):
+ def __init__(self):
+ pass
+
+ def __call__(self, inputs):
+ return color.rgb2lab(inputs)
+
+
+class ToTensor(object):
+ def __init__(self):
+ pass
+
+ def __call__(self, inputs):
+ return F.to_mytensor(inputs)
+
+
+class CenterPad(object):
+ def __init__(self, image_size):
+ self.height = image_size[0]
+ self.width = image_size[1]
+
+ def __call__(self, image):
+ # pad the image to 16:9
+ # pad height
+ I = np.array(image)
+
+ # for padded input
+ height_old = np.size(I, 0)
+ width_old = np.size(I, 1)
+ old_size = [height_old, width_old]
+ height = self.height
+ width = self.width
+ I_pad = np.zeros((height, width, np.size(I, 2)))
+
+ ratio = height / width
+ if height_old / width_old == ratio:
+ if height_old == height:
+ return Image.fromarray(I.astype(np.uint8))
+ new_size = [int(x * height / height_old) for x in old_size]
+ I_resize = resize(I, new_size, mode="reflect", preserve_range=True, clip=False, anti_aliasing=True)
+ return Image.fromarray(I_resize.astype(np.uint8))
+
+ if height_old / width_old > ratio: # pad the width and crop
+ new_size = [int(x * width / width_old) for x in old_size]
+ I_resize = resize(I, new_size, mode="reflect", preserve_range=True, clip=False, anti_aliasing=True)
+ width_resize = np.size(I_resize, 1)
+ height_resize = np.size(I_resize, 0)
+ start_height = (height_resize - height) // 2
+ I_pad[:, :, :] = I_resize[start_height : (start_height + height), :, :]
+ else: # pad the height and crop
+ new_size = [int(x * height / height_old) for x in old_size]
+ I_resize = resize(I, new_size, mode="reflect", preserve_range=True, clip=False, anti_aliasing=True)
+ width_resize = np.size(I_resize, 1)
+ height_resize = np.size(I_resize, 0)
+ start_width = (width_resize - width) // 2
+ I_pad[:, :, :] = I_resize[:, start_width : (start_width + width), :]
+
+ return Image.fromarray(I_pad.astype(np.uint8))
+
+
+class CenterPadCrop_numpy(object):
+ """
+ pad the image according to the height
+ """
+
+ def __init__(self, image_size):
+ self.height = image_size[0]
+ self.width = image_size[1]
+
+ def __call__(self, image, threshold=3 / 4):
+ # pad the image to 16:9
+ # pad height
+ I = np.array(image)
+ # for padded input
+ height_old = np.size(I, 0)
+ width_old = np.size(I, 1)
+ old_size = [height_old, width_old]
+ height = self.height
+ width = self.width
+ padding_size = width
+ if image.ndim == 2:
+ I_pad = np.zeros((width, width))
+ else:
+ I_pad = np.zeros((width, width, I.shape[2]))
+
+ ratio = height / width
+ if height_old / width_old == ratio:
+ return I
+
+ # if height_old / width_old > threshold:
+ # width_new, height_new = width_old, int(width_old * threshold)
+ # height_margin = height_old - height_new
+ # height_crop_start = height_margin // 2
+ # I_crop = I[height_start : (height_start + height_new), :]
+ # I_resize = resize(
+ # I_crop, [height, width], mode="reflect", preserve_range=True, clip=False, anti_aliasing=True
+ # )
+ # return I_resize
+
+ if height_old / width_old > ratio: # pad the width and crop
+ new_size = [int(x * width / width_old) for x in old_size]
+ I_resize = resize(I, new_size, mode="reflect", preserve_range=True, clip=False, anti_aliasing=True)
+ width_resize = np.size(I_resize, 1)
+ height_resize = np.size(I_resize, 0)
+ start_height = (height_resize - height) // 2
+ start_height_block = (padding_size - height) // 2
+ if image.ndim == 2:
+ I_pad[start_height_block : (start_height_block + height), :] = I_resize[
+ start_height : (start_height + height), :
+ ]
+ else:
+ I_pad[start_height_block : (start_height_block + height), :, :] = I_resize[
+ start_height : (start_height + height), :, :
+ ]
+ else: # pad the height and crop
+ new_size = [int(x * height / height_old) for x in old_size]
+ I_resize = resize(I, new_size, mode="reflect", preserve_range=True, clip=False, anti_aliasing=True)
+ width_resize = np.size(I_resize, 1)
+ height_resize = np.size(I_resize, 0)
+ start_width = (width_resize - width) // 2
+ start_width_block = (padding_size - width) // 2
+ if image.ndim == 2:
+ I_pad[:, start_width_block : (start_width_block + width)] = I_resize[:, start_width : (start_width + width)]
+
+ else:
+ I_pad[:, start_width_block : (start_width_block + width), :] = I_resize[
+ :, start_width : (start_width + width), :
+ ]
+
+ crop_start_height = (I_pad.shape[0] - height) // 2
+ crop_start_width = (I_pad.shape[1] - width) // 2
+
+ if image.ndim == 2:
+ return I_pad[crop_start_height : (crop_start_height + height), crop_start_width : (crop_start_width + width)]
+ else:
+ return I_pad[crop_start_height : (crop_start_height + height), crop_start_width : (crop_start_width + width), :]
+
+
+@jit(nopython=True, nogil=True)
+def biInterpolation_cpu(distorted, i, j):
+ i = np.uint16(i)
+ j = np.uint16(j)
+ Q11 = distorted[j, i]
+ Q12 = distorted[j, i + 1]
+ Q21 = distorted[j + 1, i]
+ Q22 = distorted[j + 1, i + 1]
+
+ return np.int8(
+ Q11 * (i + 1 - i) * (j + 1 - j) + Q12 * (i - i) * (j + 1 - j) + Q21 * (i + 1 - i) * (j - j) + Q22 * (i - i) * (j - j)
+ )
+
+@jit(nopython=True, nogil=True)
+def iterSearchShader_cpu(padu, padv, xr, yr, W, H, maxIter, precision):
+ # print('processing location', (xr, yr))
+ #
+ if abs(padu[yr, xr]) < precision and abs(padv[yr, xr]) < precision:
+ return xr, yr
+
+ # Our initialize method in this paper, can see the overleaf for detail
+ if (xr + 1) <= (W - 1):
+ dif = padu[yr, xr + 1] - padu[yr, xr]
+ else:
+ dif = padu[yr, xr] - padu[yr, xr - 1]
+ u_next = padu[yr, xr] / (1 + dif)
+ if (yr + 1) <= (H - 1):
+ dif = padv[yr + 1, xr] - padv[yr, xr]
+ else:
+ dif = padv[yr, xr] - padv[yr - 1, xr]
+ v_next = padv[yr, xr] / (1 + dif)
+ i = xr - u_next
+ j = yr - v_next
+ i_int = int(i)
+ j_int = int(j)
+
+ # The same as traditional iterative search method
+ for _ in range(maxIter):
+ if not 0 <= i <= (W - 1) or not 0 <= j <= (H - 1):
+ return i, j
+
+ u11 = padu[j_int, i_int]
+ v11 = padv[j_int, i_int]
+
+ u12 = padu[j_int, i_int + 1]
+ v12 = padv[j_int, i_int + 1]
+
+ int1 = padu[j_int + 1, i_int]
+ v21 = padv[j_int + 1, i_int]
+
+ int2 = padu[j_int + 1, i_int + 1]
+ v22 = padv[j_int + 1, i_int + 1]
+
+ u = (
+ u11 * (i_int + 1 - i) * (j_int + 1 - j)
+ + u12 * (i - i_int) * (j_int + 1 - j)
+ + int1 * (i_int + 1 - i) * (j - j_int)
+ + int2 * (i - i_int) * (j - j_int)
+ )
+
+ v = (
+ v11 * (i_int + 1 - i) * (j_int + 1 - j)
+ + v12 * (i - i_int) * (j_int + 1 - j)
+ + v21 * (i_int + 1 - i) * (j - j_int)
+ + v22 * (i - i_int) * (j - j_int)
+ )
+
+ i_next = xr - u
+ j_next = yr - v
+
+ if abs(i - i_next) < precision and abs(j - j_next) < precision:
+ return i, j
+
+ i = i_next
+ j = j_next
+
+ # if the search doesn't converge within max iter, it will return the last iter result
+ return i_next, j_next
+
+@jit(nopython=True, nogil=True)
+def iterSearch_cpu(distortImg, resultImg, padu, padv, W, H, maxIter=5, precision=1e-2):
+ for xr in range(W):
+ for yr in range(H):
+ # (xr, yr) is the point in result image, (i, j) is the search result in distorted image
+ i, j = iterSearchShader_cpu(padu, padv, xr, yr, W, H, maxIter, precision)
+
+ # reflect the pixels outside the border
+ if i > W - 1:
+ i = 2 * W - 1 - i
+ if i < 0:
+ i = -i
+ if j > H - 1:
+ j = 2 * H - 1 - j
+ if j < 0:
+ j = -j
+
+ # Bilinear interpolation to get the pixel at (i, j) in distorted image
+ resultImg[yr, xr, 0] = biInterpolation_cpu(
+ distortImg[:, :, 0],
+ i,
+ j,
+ )
+ resultImg[yr, xr, 1] = biInterpolation_cpu(
+ distortImg[:, :, 1],
+ i,
+ j,
+ )
+ resultImg[yr, xr, 2] = biInterpolation_cpu(
+ distortImg[:, :, 2],
+ i,
+ j,
+ )
+ return None
+
+
+def forward_mapping_cpu(source_image, u, v, maxIter=5, precision=1e-2):
+ """
+ warp the image according to the forward flow
+ u: horizontal
+ v: vertical
+ """
+ H = source_image.shape[0]
+ W = source_image.shape[1]
+
+ distortImg = np.array(np.zeros((H + 1, W + 1, 3)), dtype=np.uint8)
+ distortImg[0:H, 0:W] = source_image[0:H, 0:W]
+ distortImg[H, 0:W] = source_image[H - 1, 0:W]
+ distortImg[0:H, W] = source_image[0:H, W - 1]
+ distortImg[H, W] = source_image[H - 1, W - 1]
+
+ padu = np.array(np.zeros((H + 1, W + 1)), dtype=np.float32)
+ padu[0:H, 0:W] = u[0:H, 0:W]
+ padu[H, 0:W] = u[H - 1, 0:W]
+ padu[0:H, W] = u[0:H, W - 1]
+ padu[H, W] = u[H - 1, W - 1]
+
+ padv = np.array(np.zeros((H + 1, W + 1)), dtype=np.float32)
+ padv[0:H, 0:W] = v[0:H, 0:W]
+ padv[H, 0:W] = v[H - 1, 0:W]
+ padv[0:H, W] = v[0:H, W - 1]
+ padv[H, W] = v[H - 1, W - 1]
+
+ resultImg = np.array(np.zeros((H, W, 3)), dtype=np.uint8)
+ iterSearch_cpu(distortImg, resultImg, padu, padv, W, H, maxIter, precision)
+ return resultImg
+
+class Distortion_with_flow_cpu(object):
+ """Elastic distortion"""
+
+ def __init__(self, maxIter=3, precision=1e-3):
+ self.maxIter = maxIter
+ self.precision = precision
+
+ def __call__(self, inputs, dx, dy):
+ inputs = np.array(inputs)
+ shape = inputs.shape[0], inputs.shape[1]
+ remap_image = forward_mapping_cpu(inputs, dy, dx, maxIter=self.maxIter, precision=self.precision)
+
+ return Image.fromarray(remap_image)
+
+@cuda.jit(device=True)
+def biInterpolation_gpu(distorted, i, j):
+ i = int(i)
+ j = int(j)
+ Q11 = distorted[j, i]
+ Q12 = distorted[j, i + 1]
+ Q21 = distorted[j + 1, i]
+ Q22 = distorted[j + 1, i + 1]
+
+ return np.int8(
+ Q11 * (i + 1 - i) * (j + 1 - j) + Q12 * (i - i) * (j + 1 - j) + Q21 * (i + 1 - i) * (j - j) + Q22 * (i - i) * (j - j)
+ )
+
+@cuda.jit(device=True)
+def iterSearchShader_gpu(padu, padv, xr, yr, W, H, maxIter, precision):
+ # print('processing location', (xr, yr))
+ #
+ if abs(padu[yr, xr]) < precision and abs(padv[yr, xr]) < precision:
+ return xr, yr
+
+ # Our initialize method in this paper, can see the overleaf for detail
+ if (xr + 1) <= (W - 1):
+ dif = padu[yr, xr + 1] - padu[yr, xr]
+ else:
+ dif = padu[yr, xr] - padu[yr, xr - 1]
+ u_next = padu[yr, xr] / (1 + dif)
+ if (yr + 1) <= (H - 1):
+ dif = padv[yr + 1, xr] - padv[yr, xr]
+ else:
+ dif = padv[yr, xr] - padv[yr - 1, xr]
+ v_next = padv[yr, xr] / (1 + dif)
+ i = xr - u_next
+ j = yr - v_next
+ i_int = int(i)
+ j_int = int(j)
+
+ # The same as traditional iterative search method
+ for _ in range(maxIter):
+ if not 0 <= i <= (W - 1) or not 0 <= j <= (H - 1):
+ return i, j
+
+ u11 = padu[j_int, i_int]
+ v11 = padv[j_int, i_int]
+
+ u12 = padu[j_int, i_int + 1]
+ v12 = padv[j_int, i_int + 1]
+
+ int1 = padu[j_int + 1, i_int]
+ v21 = padv[j_int + 1, i_int]
+
+ int2 = padu[j_int + 1, i_int + 1]
+ v22 = padv[j_int + 1, i_int + 1]
+
+ u = (
+ u11 * (i_int + 1 - i) * (j_int + 1 - j)
+ + u12 * (i - i_int) * (j_int + 1 - j)
+ + int1 * (i_int + 1 - i) * (j - j_int)
+ + int2 * (i - i_int) * (j - j_int)
+ )
+
+ v = (
+ v11 * (i_int + 1 - i) * (j_int + 1 - j)
+ + v12 * (i - i_int) * (j_int + 1 - j)
+ + v21 * (i_int + 1 - i) * (j - j_int)
+ + v22 * (i - i_int) * (j - j_int)
+ )
+
+ i_next = xr - u
+ j_next = yr - v
+
+ if abs(i - i_next) < precision and abs(j - j_next) < precision:
+ return i, j
+
+ i = i_next
+ j = j_next
+
+ # if the search doesn't converge within max iter, it will return the last iter result
+ return i_next, j_next
+
+@cuda.jit
+def iterSearch_gpu(distortImg, resultImg, padu, padv, W, H, maxIter=5, precision=1e-2):
+
+ start_x, start_y = cuda.grid(2)
+ stride_x, stride_y = cuda.gridsize(2)
+
+ for xr in range(start_x, W, stride_x):
+ for yr in range(start_y, H, stride_y):
+
+ i,j = iterSearchShader_gpu(padu, padv, xr, yr, W, H, maxIter, precision)
+
+ if i > W - 1:
+ i = 2 * W - 1 - i
+ if i < 0:
+ i = -i
+ if j > H - 1:
+ j = 2 * H - 1 - j
+ if j < 0:
+ j = -j
+
+ resultImg[yr, xr,0] = biInterpolation_gpu(distortImg[:,:,0], i, j)
+ resultImg[yr, xr,1] = biInterpolation_gpu(distortImg[:,:,1], i, j)
+ resultImg[yr, xr,2] = biInterpolation_gpu(distortImg[:,:,2], i, j)
+ return None
+
+def forward_mapping_gpu(source_image, u, v, maxIter=5, precision=1e-2):
+ """
+ warp the image according to the forward flow
+ u: horizontal
+ v: vertical
+ """
+ H = source_image.shape[0]
+ W = source_image.shape[1]
+
+ resultImg = np.array(np.zeros((H, W, 3)), dtype=np.uint8)
+
+ distortImg = np.array(np.zeros((H + 1, W + 1, 3)), dtype=np.uint8)
+ distortImg[0:H, 0:W] = source_image[0:H, 0:W]
+ distortImg[H, 0:W] = source_image[H - 1, 0:W]
+ distortImg[0:H, W] = source_image[0:H, W - 1]
+ distortImg[H, W] = source_image[H - 1, W - 1]
+
+ padu = np.array(np.zeros((H + 1, W + 1)), dtype=np.float32)
+ padu[0:H, 0:W] = u[0:H, 0:W]
+ padu[H, 0:W] = u[H - 1, 0:W]
+ padu[0:H, W] = u[0:H, W - 1]
+ padu[H, W] = u[H - 1, W - 1]
+
+ padv = np.array(np.zeros((H + 1, W + 1)), dtype=np.float32)
+ padv[0:H, 0:W] = v[0:H, 0:W]
+ padv[H, 0:W] = v[H - 1, 0:W]
+ padv[0:H, W] = v[0:H, W - 1]
+ padv[H, W] = v[H - 1, W - 1]
+
+ padu = cuda.to_device(padu)
+ padv = cuda.to_device(padv)
+ distortImg = cuda.to_device(distortImg)
+ resultImg = cuda.to_device(resultImg)
+
+ threadsperblock = (16, 16)
+ blockspergrid_x = math.ceil(W / threadsperblock[0])
+ blockspergrid_y = math.ceil(H / threadsperblock[1])
+ blockspergrid = (blockspergrid_x, blockspergrid_y)
+
+
+ iterSearch_gpu[blockspergrid, threadsperblock](distortImg, resultImg, padu, padv, W, H, maxIter, precision)
+ resultImg = resultImg.copy_to_host()
+ return resultImg
+
+class Distortion_with_flow_gpu(object):
+
+ def __init__(self, maxIter=3, precision=1e-3):
+ self.maxIter = maxIter
+ self.precision = precision
+
+ def __call__(self, inputs, dx, dy):
+ inputs = np.array(inputs)
+ shape = inputs.shape[0], inputs.shape[1]
+ remap_image = forward_mapping_gpu(inputs, dy, dx, maxIter=self.maxIter, precision=self.precision)
+
+ return Image.fromarray(remap_image)
+
+def read_flow(filename):
+ """
+ read optical flow from Middlebury .flo file
+ :param filename: name of the flow file
+ :return: optical flow data in matrix
+ """
+ f = open(filename, "rb")
+ try:
+ magic = np.fromfile(f, np.float32, count=1)[0] # For Python3.x
+ except:
+ magic = np.fromfile(f, np.float32, count=1) # For Python2.x
+ data2d = None
+ if (202021.25 != magic)and(123.25!=magic):
+ print("Magic number incorrect. Invalid .flo file")
+ elif (123.25==magic):
+ w = np.fromfile(f, np.int32, count=1)[0]
+ h = np.fromfile(f, np.int32, count=1)[0]
+ # print("Reading %d x %d flo file" % (h, w))
+ data2d = np.fromfile(f, np.float16, count=2 * w * h)
+ # reshape data into 3D array (columns, rows, channels)
+ data2d = np.resize(data2d, (2, h, w))
+ elif (202021.25 == magic):
+ w = np.fromfile(f, np.int32, count=1)[0]
+ h = np.fromfile(f, np.int32, count=1)[0]
+ # print("Reading %d x %d flo file" % (h, w))
+ data2d = np.fromfile(f, np.float32, count=2 * w * h)
+ # reshape data into 3D array (columns, rows, channels)
+ data2d = np.resize(data2d, (2, h, w))
+ f.close()
+ return data2d.astype(np.float32)
+
+class LossHandler:
+ def __init__(self):
+ self.loss_dict = {}
+ self.count_sample = 0
+
+ def add_loss(self, key, loss):
+ if key not in self.loss_dict:
+ self.loss_dict[key] = 0
+ self.loss_dict[key] += loss
+
+ def get_loss(self, key):
+ return self.loss_dict[key] / self.count_sample
+
+ def count_one_sample(self):
+ self.count_sample += 1
+
+ def reset(self):
+ self.loss_dict = {}
+ self.count_sample = 0
+
+
+class TimeHandler:
+ def __init__(self):
+ self.time_handler = {}
+
+ def compute_time(self, key):
+ if key not in self.time_handler:
+ self.time_handler[key] = time.time()
+ return None
+ else:
+ return time.time() - self.time_handler.pop(key)
+
+
+def print_num_params(model, is_trainable=False):
+ model_name = model.__class__.__name__.ljust(30)
+
+ if is_trainable:
+ num_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
+ print(f"| TRAINABLE | {model_name} | {('{:,}'.format(num_params)).rjust(10)} |")
+ else:
+ num_params = sum(p.numel() for p in model.parameters())
+ print(f"| GENERAL | {model_name} | {('{:,}'.format(num_params)).rjust(10)} |")
+
+ return num_params
diff --git a/test.py b/test.py
new file mode 100644
index 0000000000000000000000000000000000000000..b8a4a2053d6592ceed9232ebf07bcb1a8fd7770d
--- /dev/null
+++ b/test.py
@@ -0,0 +1,147 @@
+import numpy as np
+import shutil
+import os
+import argparse
+import torch
+import glob
+from tqdm import tqdm
+from PIL import Image
+from collections import OrderedDict
+from src.models.vit.config import load_config
+import torchvision.transforms as transforms
+import cv2
+from skimage import io
+
+from src.models.CNN.ColorVidNet import GeneralColorVidNet
+from src.models.vit.embed import GeneralEmbedModel
+from src.models.CNN.NonlocalNet import GeneralWarpNet
+from src.models.CNN.FrameColor import frame_colorization
+from src.utils import (
+ RGB2Lab,
+ ToTensor,
+ Normalize,
+ uncenter_l,
+ tensor_lab2rgb,
+ SquaredPadding,
+ UnpaddingSquare
+)
+
+def load_params(ckpt_file):
+ params = torch.load(ckpt_file)
+ new_params = []
+ for key, value in params.items():
+ new_params.append((key, value))
+ return OrderedDict(new_params)
+
+def custom_transform(transforms, img):
+ for transform in transforms:
+ if isinstance(transform, SquaredPadding):
+ img,padding=transform(img, return_paddings=True)
+ else:
+ img = transform(img)
+ return img.to(device), padding
+
+def save_frames(predicted_rgb, video_name, frame_name):
+ if predicted_rgb is not None:
+ predicted_rgb = np.clip(predicted_rgb, 0, 255).astype(np.uint8)
+ io.imsave(os.path.join(args.output_video_path, video_name, frame_name), predicted_rgb)
+
+
+def colorize_video(video_name):
+ frames_list = os.listdir(os.path.join(args.input_videos_path, video_name))
+ frames_list.sort()
+ refs_list = os.listdir(os.path.join(args.reference_images_path, video_name))
+ refs_list.sort()
+
+ for ref_path in refs_list:
+ frame_ref = Image.open(os.path.join(args.reference_images_path, video_name, ref_path)).convert("RGB")
+ I_last_lab_predict = None
+
+ IB_lab, IB_paddings = custom_transform(transforms, frame_ref)
+ IB_lab = IB_lab.unsqueeze(0).to(device)
+ IB_l = IB_lab[:, 0:1, :, :]
+ IB_ab = IB_lab[:, 1:3, :, :]
+
+ with torch.no_grad():
+ I_reference_lab = IB_lab
+ I_reference_l = I_reference_lab[:, 0:1, :, :]
+ I_reference_ab = I_reference_lab[:, 1:3, :, :]
+ I_reference_rgb = tensor_lab2rgb(torch.cat((uncenter_l(I_reference_l), I_reference_ab), dim=1)).to(device)
+ features_B = embed_net(I_reference_rgb)
+
+ for frame_name in frames_list:
+ curr_frame = Image.open(os.path.join(args.input_videos_path, video_name, frame_name)).convert("RGB")
+ IA_lab, IA_paddings = custom_transform(transforms, curr_frame)
+ IA_lab = IA_lab.unsqueeze(0).to(device)
+ IA_l = IA_lab[:, 0:1, :, :]
+ IA_ab = IA_lab[:, 1:3, :, :]
+
+ if I_last_lab_predict is None:
+ I_last_lab_predict = torch.zeros_like(IA_lab).to(device)
+
+ with torch.no_grad():
+ I_current_lab = IA_lab
+ I_current_ab_predict, _, _ = frame_colorization(
+ I_current_lab,
+ I_reference_lab,
+ I_last_lab_predict,
+ features_B,
+ embed_net,
+ nonlocal_net,
+ colornet,
+ luminance_noise=0,
+ temperature=1e-10,
+ joint_training=False
+ )
+ I_last_lab_predict = torch.cat((IA_l, I_current_ab_predict), dim=1)
+
+ IA_predict_rgb = tensor_lab2rgb(torch.cat((uncenter_l(IA_l), I_current_ab_predict), dim=1))
+ save_frames(IA_predict_rgb, video_name, frame_name)
+
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='Video Colorization')
+ parser.add_argument("--input_videos_path", type=str, help="path to input video")
+ parser.add_argument("--reference_images_path", type=str, help="path to reference image")
+ parser.add_argument("--output_video_path", type=str, help="path to output video")
+ parser.add_argument("--weight_path", type=str, default="checkpoints/epoch_5/", help="path to weight")
+ parser.add_argument("--device", type=str, default="cpu", help="device to run the model")
+ parser.add_argument("--high_resolution", action="store_true", help="use high resolution")
+ parser.add_argument("--wls_filter_on", action="store_true", help="use wls filter")
+ args = parser.parse_args()
+
+ device = torch.device(args.device)
+
+ if os.path.exists(args.output_video_path):
+ shutil.rmtree(args.output_video_path)
+ os.makedirs(args.output_video_path, exist_ok=True)
+
+ videos_list = os.listdir(args.input_videos_path)
+
+ embed_net=GeneralEmbedModel(pretrained_model="swin-tiny", device=device).to(device)
+ nonlocal_net = GeneralWarpNet(feature_channel=128).to(device)
+ colornet=GeneralColorVidNet(7).to(device)
+
+ embed_net.eval()
+ nonlocal_net.eval()
+ colornet.eval()
+
+ # Load weights
+ embed_net_params = load_params(os.path.join(args.weight_path, "embed_net.pth"))
+ nonlocal_net_params = load_params(os.path.join(args.weight_path, "nonlocal_net.pth"))
+ colornet_params = load_params(os.path.join(args.weight_path, "colornet.pth"))
+
+ embed_net.load_state_dict(embed_net_params, strict=True)
+ nonlocal_net.load_state_dict(nonlocal_net_params, strict=True)
+ colornet.load_state_dict(colornet_params, strict=True)
+
+ transforms = [SquaredPadding(target_size=224),
+ RGB2Lab(),
+ ToTensor(),
+ Normalize()]
+
+ # center_padder = CenterPad((224,224))
+ with torch.no_grad():
+ for video_name in tqdm(videos_list):
+ colorize_video(video_name)
\ No newline at end of file
diff --git a/train.py b/train.py
new file mode 100644
index 0000000000000000000000000000000000000000..8181702e26b66117a7e2d54b4d07e22308874caf
--- /dev/null
+++ b/train.py
@@ -0,0 +1,609 @@
+import os
+import sys
+import wandb
+import argparse
+import numpy as np
+from tqdm import tqdm
+from PIL import Image
+from datetime import datetime
+from zoneinfo import ZoneInfo
+from time import gmtime, strftime
+from collections import OrderedDict
+import random
+
+import torch
+import torch.nn as nn
+import torch.optim as optim
+import torch.backends.cudnn as cudnn
+from torchvision.transforms import CenterCrop
+from torch.utils.data import ConcatDataset, DataLoader, WeightedRandomSampler
+import torchvision.transforms as torch_transforms
+from torchvision.utils import make_grid
+
+from src.losses import (
+ ContextualLoss,
+ ContextualLoss_forward,
+ Perceptual_loss,
+ consistent_loss_fn,
+ discriminator_loss_fn,
+ generator_loss_fn,
+ l1_loss_fn,
+ smoothness_loss_fn,
+)
+from src.models.CNN.GAN_models import Discriminator_x64
+from src.models.CNN.ColorVidNet import ColorVidNet
+from src.models.CNN.FrameColor import frame_colorization
+from src.models.CNN.NonlocalNet import WeightedAverage_color, NonlocalWeightedAverage, WarpNet, WarpNet_new
+from src.models.vit.embed import EmbedModel
+from src.models.vit.config import load_config
+from src.data import transforms
+from src.data.dataloader import VideosDataset, VideosDataset_ImageNet
+from src.utils import CenterPad_threshold
+from src.utils import (
+ TimeHandler,
+ RGB2Lab,
+ ToTensor,
+ Normalize,
+ LossHandler,
+ WarpingLayer,
+ uncenter_l,
+ tensor_lab2rgb,
+ print_num_params,
+)
+from src.scheduler import PolynomialLR
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--video_data_root_list", type=str, default="dataset")
+parser.add_argument("--flow_data_root_list", type=str, default="flow")
+parser.add_argument("--mask_data_root_list", type=str, default="mask")
+parser.add_argument("--data_root_imagenet", default="imagenet", type=str)
+parser.add_argument("--annotation_file_path", default="dataset/annotation.csv", type=str)
+parser.add_argument("--imagenet_pairs_file", default="imagenet_pairs.txt", type=str)
+parser.add_argument("--gpu_ids", type=str, default="0,1,2,3", help="separate by comma")
+parser.add_argument("--workers", type=int, default=0)
+parser.add_argument("--batch_size", type=int, default=2)
+parser.add_argument("--image_size", type=int, default=[384, 384])
+parser.add_argument("--ic", type=int, default=7)
+parser.add_argument("--epoch", type=int, default=40)
+parser.add_argument("--resume_epoch", type=int, default=0)
+parser.add_argument("--resume", type=bool, default=False)
+parser.add_argument("--load_pretrained_model", type=bool, default=False)
+parser.add_argument("--lr", type=float, default=1e-4)
+parser.add_argument("--beta1", type=float, default=0.5)
+parser.add_argument("--lr_step", type=int, default=1)
+parser.add_argument("--lr_gamma", type=float, default=0.9)
+parser.add_argument("--checkpoint_dir", type=str, default="checkpoints")
+parser.add_argument("--checkpoint_step", type=int, default=500)
+parser.add_argument("--real_reference_probability", type=float, default=0.7)
+parser.add_argument("--nonzero_placeholder_probability", type=float, default=0.0)
+parser.add_argument("--domain_invariant", type=bool, default=False)
+parser.add_argument("--weigth_l1", type=float, default=2.0)
+parser.add_argument("--weight_contextual", type=float, default="0.5")
+parser.add_argument("--weight_perceptual", type=float, default="0.02")
+parser.add_argument("--weight_smoothness", type=float, default="5.0")
+parser.add_argument("--weight_gan", type=float, default="0.5")
+parser.add_argument("--weight_nonlocal_smoothness", type=float, default="0.0")
+parser.add_argument("--weight_nonlocal_consistent", type=float, default="0.0")
+parser.add_argument("--weight_consistent", type=float, default="0.05")
+parser.add_argument("--luminance_noise", type=float, default="2.0")
+parser.add_argument("--permute_data", type=bool, default=True)
+parser.add_argument("--contextual_loss_direction", type=str, default="forward", help="forward or backward matching")
+parser.add_argument("--batch_accum_size", type=int, default=10)
+parser.add_argument("--epoch_train_discriminator", type=int, default=3)
+parser.add_argument("--vit_version", type=str, default="vit_tiny_patch16_384")
+parser.add_argument("--use_dummy", type=bool, default=False)
+parser.add_argument("--use_wandb", type=bool, default=False)
+parser.add_argument("--use_feature_transform", type=bool, default=False)
+parser.add_argument("--head_out_idx", type=str, default="8,9,10,11")
+parser.add_argument("--wandb_token", type=str, default="")
+parser.add_argument("--wandb_name", type=str, default="")
+
+
+def load_data():
+ transforms_video = [
+ CenterCrop(opt.image_size),
+ RGB2Lab(),
+ ToTensor(),
+ Normalize(),
+ ]
+
+ train_dataset_videos = [
+ VideosDataset(
+ video_data_root=video_data_root,
+ flow_data_root=flow_data_root,
+ mask_data_root=mask_data_root,
+ imagenet_folder=opt.data_root_imagenet,
+ annotation_file_path=opt.annotation_file_path,
+ image_size=opt.image_size,
+ image_transform=transforms.Compose(transforms_video),
+ real_reference_probability=opt.real_reference_probability,
+ nonzero_placeholder_probability=opt.nonzero_placeholder_probability,
+ )
+ for video_data_root, flow_data_root, mask_data_root in zip(
+ opt.video_data_root_list, opt.flow_data_root_list, opt.mask_data_root_list
+ )
+ ]
+
+ transforms_imagenet = [CenterPad_threshold(opt.image_size), RGB2Lab(), ToTensor(), Normalize()]
+ extra_reference_transform = [
+ torch_transforms.RandomHorizontalFlip(0.5),
+ torch_transforms.RandomResizedCrop(480, (0.98, 1.0), ratio=(0.8, 1.2)),
+ ]
+
+ train_dataset_imagenet = VideosDataset_ImageNet(
+ imagenet_data_root=opt.data_root_imagenet,
+ pairs_file=opt.imagenet_pairs_file,
+ image_size=opt.image_size,
+ transforms_imagenet=transforms_imagenet,
+ distortion_level=4,
+ brightnessjitter=5,
+ nonzero_placeholder_probability=opt.nonzero_placeholder_probability,
+ extra_reference_transform=extra_reference_transform,
+ real_reference_probability=opt.real_reference_probability,
+ )
+
+ # video_training_length = sum([len(dataset) for dataset in train_dataset_videos])
+ # imagenet_training_length = len(train_dataset_imagenet)
+ # dataset_training_length = sum([dataset.real_len for dataset in train_dataset_videos]) + +train_dataset_imagenet.real_len
+ dataset_combined = ConcatDataset(train_dataset_videos + [train_dataset_imagenet])
+ # sampler=[]
+ # seed_sampler=int.from_bytes(os.urandom(4),"big")
+ # random.seed(seed_sampler)
+ # for idx in range(opt.epoch):
+ # sampler = sampler + random.sample(range(dataset_training_length),dataset_training_length)
+ # wandb.log({"Sampler_Seed":seed_sampler})
+ # sampler = sampler+WeightedRandomSampler([1] * video_training_length + [1] * imagenet_training_length, dataset_training_length*opt.epoch)
+
+ # video_training_length = sum([len(dataset) for dataset in train_dataset_videos])
+ # dataset_training_length = sum([dataset.real_len for dataset in train_dataset_videos])
+ # dataset_combined = ConcatDataset(train_dataset_videos)
+ # sampler = WeightedRandomSampler([1] * video_training_length, dataset_training_length * opt.epoch)
+
+ data_loader = DataLoader(dataset_combined, batch_size=opt.batch_size, shuffle=True, num_workers=opt.workers)
+ return data_loader
+
+
+def training_logger():
+ if (total_iter % opt.checkpoint_step == 0) or (total_iter == len(data_loader)):
+ train_loss_dict = {"train/" + str(k): v / loss_handler.count_sample for k, v in loss_handler.loss_dict.items()}
+ train_loss_dict["train/opt_g_lr_1"] = step_optim_scheduler_g.get_last_lr()[0]
+ train_loss_dict["train/opt_g_lr_2"] = step_optim_scheduler_g.get_last_lr()[1]
+ train_loss_dict["train/opt_d_lr"] = step_optim_scheduler_d.get_last_lr()[0]
+
+ alert_text = f"l1_loss: {l1_loss.item()}\npercep_loss: {perceptual_loss.item()}\nctx_loss: {contextual_loss_total.item()}\ncst_loss: {consistent_loss.item()}\nsm_loss: {smoothness_loss.item()}\ntotal: {total_loss.item()}"
+
+ if opt.use_wandb:
+ wandb.log(train_loss_dict)
+ wandb.alert(title=f"Progress training #{total_iter}", text=alert_text)
+
+ for idx in range(I_predict_rgb.shape[0]):
+ concated_I = make_grid(
+ [(I_predict_rgb[idx] * 255), (I_reference_rgb[idx] * 255), (I_current_rgb[idx] * 255)], nrow=3
+ )
+ wandb_concated_I = wandb.Image(
+ concated_I,
+ caption="[LEFT] Predict, [CENTER] Reference, [RIGHT] Ground truth\n[REF] {}, [FRAME] {}".format(
+ ref_path[idx], curr_frame_path[idx]
+ ),
+ )
+ wandb.log({f"example_{idx}": wandb_concated_I})
+
+ torch.save(
+ nonlocal_net.state_dict(),
+ os.path.join(opt.checkpoint_dir, "nonlocal_net_iter.pth"),
+ )
+ torch.save(
+ colornet.state_dict(),
+ os.path.join(opt.checkpoint_dir, "colornet_iter.pth"),
+ )
+ torch.save(
+ discriminator.state_dict(),
+ os.path.join(opt.checkpoint_dir, "discriminator_iter.pth"),
+ )
+ torch.save(embed_net.state_dict(), os.path.join(opt.checkpoint_dir, "embed_net_iter.pth"))
+
+ loss_handler.reset()
+
+
+def load_params(ckpt_file):
+ params = torch.load(ckpt_file)
+ new_params = []
+ for key, value in params.items():
+ new_params.append((key, value))
+ return OrderedDict(new_params)
+
+
+def parse(parser, save=True):
+ opt = parser.parse_args()
+ args = vars(opt)
+
+ print("------------------------------ Options -------------------------------")
+ for k, v in sorted(args.items()):
+ print("%s: %s" % (str(k), str(v)))
+ print("-------------------------------- End ---------------------------------")
+
+ if save:
+ file_name = os.path.join("opt.txt")
+ with open(file_name, "wt") as opt_file:
+ opt_file.write(os.path.basename(sys.argv[0]) + " " + strftime("%Y-%m-%d %H:%M:%S", gmtime()) + "\n")
+ opt_file.write("------------------------------ Options -------------------------------\n")
+ for k, v in sorted(args.items()):
+ opt_file.write("%s: %s\n" % (str(k), str(v)))
+ opt_file.write("-------------------------------- End ---------------------------------\n")
+ return opt
+
+
+def gpu_setup():
+ os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
+ cudnn.benchmark = True
+ torch.cuda.set_device(opt.gpu_ids[0])
+ device = torch.device("cuda")
+ print("running on GPU", opt.gpu_ids)
+ return device
+
+
+if __name__ == "__main__":
+ ############################################## SETUP ###############################################
+ torch.multiprocessing.set_start_method("spawn", force=True)
+ # =============== GET PARSER OPTION ================
+ opt = parse(parser)
+ opt.video_data_root_list = opt.video_data_root_list.split(",")
+ opt.flow_data_root_list = opt.flow_data_root_list.split(",")
+ opt.mask_data_root_list = opt.mask_data_root_list.split(",")
+ opt.gpu_ids = list(map(int, opt.gpu_ids.split(",")))
+ opt.head_out_idx = list(map(int, opt.head_out_idx.split(",")))
+ n_dim_output = 3 if opt.use_feature_transform else 4
+ assert len(opt.head_out_idx) == 4, "Size of head_out_idx must be 4"
+
+ os.makedirs(opt.checkpoint_dir, exist_ok=True)
+
+ # =================== INIT WANDB ===================
+ if opt.use_wandb:
+ print("Save images to Wandb")
+ if opt.wandb_token != "":
+ try:
+ wandb.login(key=opt.wandb_token)
+ except:
+ pass
+ wandb.init(
+ project="video-colorization",
+ name=f"{opt.wandb_name} {datetime.now(tz=ZoneInfo('Asia/Ho_Chi_Minh')).strftime('%Y/%m/%d_%H-%M-%S')}",
+ )
+
+ # ================== SETUP DEVICE ==================
+ # torch.multiprocessing.set_start_method("spawn", force=True)
+ # device = gpu_setup()
+ device = "cuda" if torch.cuda.is_available() else "cpu"
+
+ # =================== VIT CONFIG ===================
+ cfg = load_config()
+ model_cfg = cfg["model"][opt.vit_version]
+ model_cfg["image_size"] = (384, 384)
+ model_cfg["backbone"] = opt.vit_version
+ model_cfg["dropout"] = 0.0
+ model_cfg["drop_path_rate"] = 0.1
+ model_cfg["n_cls"] = 10
+
+ ############################################ LOAD DATA #############################################
+ if opt.use_dummy:
+ H, W = 384, 384
+ I_last_lab = torch.rand(opt.batch_size, 3, H, W)
+ I_current_lab = torch.rand(opt.batch_size, 3, H, W)
+ I_reference_lab = torch.rand(opt.batch_size, 3, H, W)
+ flow_forward = torch.rand(opt.batch_size, 2, H, W)
+ mask = torch.rand(opt.batch_size, 1, H, W)
+ placeholder_lab = torch.rand(opt.batch_size, 3, H, W)
+ self_ref_flag = torch.rand(opt.batch_size, 3, H, W)
+ data_loader = [
+ [I_last_lab, I_current_lab, I_reference_lab, flow_forward, mask, placeholder_lab, self_ref_flag, None, None, None]
+ for _ in range(10)
+ ]
+ else:
+ data_loader = load_data()
+
+ ########################################## DEFINE NETWORK ##########################################
+ print("-" * 59)
+ print("| TYPE | Model name | Num params |")
+ print("-" * 59)
+ colornet = ColorVidNet(opt.ic).to(device)
+ colornet_params = print_num_params(colornet)
+
+ if opt.use_feature_transform:
+ nonlocal_net = WarpNet().to(device)
+ else:
+ nonlocal_net = WarpNet_new(model_cfg["d_model"]).to(device)
+ nonlocal_net_params = print_num_params(nonlocal_net)
+
+ discriminator = Discriminator_x64(ndf=64).to(device)
+ discriminator_params = print_num_params(discriminator)
+
+ weighted_layer_color = WeightedAverage_color().to(device)
+ weighted_layer_color_params = print_num_params(weighted_layer_color)
+
+ nonlocal_weighted_layer = NonlocalWeightedAverage().to(device)
+ nonlocal_weighted_layer_params = print_num_params(nonlocal_weighted_layer)
+
+ warping_layer = WarpingLayer(device=device).to(device)
+ warping_layer_params = print_num_params(warping_layer)
+
+ embed_net = EmbedModel(model_cfg, head_out_idx=opt.head_out_idx, n_dim_output=n_dim_output, device=device)
+ embed_net_params = print_num_params(embed_net)
+ print("-" * 59)
+ print(
+ f"| TOTAL | | {('{:,}'.format(colornet_params+nonlocal_net_params+discriminator_params+weighted_layer_color_params+nonlocal_weighted_layer_params+warping_layer_params+embed_net_params)).rjust(10)} |"
+ )
+ print("-" * 59)
+ if opt.use_wandb:
+ wandb.watch(discriminator, log="all", log_freq=opt.checkpoint_step, idx=0)
+ wandb.watch(embed_net, log="all", log_freq=opt.checkpoint_step, idx=1)
+ wandb.watch(colornet, log="all", log_freq=opt.checkpoint_step, idx=2)
+ wandb.watch(nonlocal_net, log="all", log_freq=opt.checkpoint_step, idx=3)
+
+ # ============= USE PRETRAINED OR NOT ==============
+ if opt.load_pretrained_model:
+ # pretrained_path = "/workspace/video_colorization/ckpt_folder_ver_1_vit_small_patch16_384"
+ nonlocal_net.load_state_dict(load_params(os.path.join(opt.checkpoint_dir, "nonlocal_net_iter.pth")))
+ colornet.load_state_dict(load_params(os.path.join(opt.checkpoint_dir, "colornet_iter.pth")))
+ discriminator.load_state_dict(load_params(os.path.join(opt.checkpoint_dir, "discriminator_iter.pth")))
+ embed_net_params = load_params(os.path.join(opt.checkpoint_dir, "embed_net_iter.pth"))
+ embed_net_params.pop("vit.heads_out")
+ embed_net.load_state_dict(embed_net_params)
+
+ ###################################### DEFINE LOSS FUNCTIONS #######################################
+ perceptual_loss_fn = Perceptual_loss(opt.domain_invariant, opt.weight_perceptual)
+ contextual_loss = ContextualLoss().to(device)
+ contextual_forward_loss = ContextualLoss_forward().to(device)
+
+ ######################################## DEFINE OPTIMIZERS #########################################
+ optimizer_g = optim.AdamW(
+ [
+ {"params": nonlocal_net.parameters(), "lr": opt.lr},
+ {"params": colornet.parameters(), "lr": 2 * opt.lr},
+ {"params": embed_net.parameters(), "lr": opt.lr},
+ ],
+ betas=(0.5, 0.999),
+ eps=1e-5,
+ amsgrad=True,
+ )
+
+ optimizer_d = optim.AdamW(
+ filter(lambda p: p.requires_grad, discriminator.parameters()),
+ lr=opt.lr,
+ betas=(0.5, 0.999),
+ amsgrad=True,
+ )
+
+ step_optim_scheduler_g = PolynomialLR(
+ optimizer_g,
+ step_size=opt.lr_step,
+ iter_warmup=0,
+ iter_max=len(data_loader) * opt.epoch,
+ power=0.9,
+ min_lr=1e-8,
+ )
+ step_optim_scheduler_d = PolynomialLR(
+ optimizer_d,
+ step_size=opt.lr_step,
+ iter_warmup=0,
+ iter_max=len(data_loader) * opt.epoch,
+ power=0.9,
+ min_lr=1e-8,
+ )
+ ########################################## DEFINE OTHERS ###########################################
+ downsampling_by2 = nn.AvgPool2d(kernel_size=2).to(device)
+ timer_handler = TimeHandler()
+ loss_handler = LossHandler() # Handle loss value
+ ############################################## TRAIN ###############################################
+
+ total_iter = 0
+ for epoch_num in range(1, opt.epoch + 1):
+ # if opt.use_wandb:
+ # wandb.log({"Current_trainning_epoch": epoch_num})
+ with tqdm(total=len(data_loader), position=0, leave=True) as pbar:
+ for iter, sample in enumerate(data_loader):
+ timer_handler.compute_time("load_sample")
+ total_iter += 1
+
+ # =============== LOAD DATA SAMPLE ================
+ (
+ I_last_lab, ######## (3, H, W)
+ I_current_lab, ##### (3, H, W)
+ I_reference_lab, ### (3, H, W)
+ flow_forward, ###### (2, H, W)
+ mask, ############## (1, H, W)
+ placeholder_lab, ### (3, H, W)
+ self_ref_flag, ##### (3, H, W)
+ prev_frame_path,
+ curr_frame_path,
+ ref_path,
+ ) = sample
+
+ I_last_lab = I_last_lab.to(device)
+ I_current_lab = I_current_lab.to(device)
+ I_reference_lab = I_reference_lab.to(device)
+ flow_forward = flow_forward.to(device)
+ mask = mask.to(device)
+ placeholder_lab = placeholder_lab.to(device)
+ self_ref_flag = self_ref_flag.to(device)
+
+ I_last_l = I_last_lab[:, 0:1, :, :]
+ I_last_ab = I_last_lab[:, 1:3, :, :]
+ I_current_l = I_current_lab[:, 0:1, :, :]
+ I_current_ab = I_current_lab[:, 1:3, :, :]
+ I_reference_l = I_reference_lab[:, 0:1, :, :]
+ I_reference_ab = I_reference_lab[:, 1:3, :, :]
+ I_reference_rgb = tensor_lab2rgb(torch.cat((uncenter_l(I_reference_l), I_reference_ab), dim=1))
+
+ _load_sample_time = timer_handler.compute_time("load_sample")
+ timer_handler.compute_time("forward_model")
+
+ features_B = embed_net(I_reference_rgb)
+ _, B_feat_1, B_feat_2, B_feat_3 = features_B
+
+ # ================== COLORIZATION ==================
+ # The last frame
+ I_last_ab_predict, I_last_nonlocal_lab_predict = frame_colorization(
+ IA_l=I_last_l,
+ IB_lab=I_reference_lab,
+ IA_last_lab=placeholder_lab,
+ features_B=features_B,
+ embed_net=embed_net,
+ colornet=colornet,
+ nonlocal_net=nonlocal_net,
+ luminance_noise=opt.luminance_noise,
+ )
+ I_last_lab_predict = torch.cat((I_last_l, I_last_ab_predict), dim=1)
+
+ # The current frame
+ I_current_ab_predict, I_current_nonlocal_lab_predict = frame_colorization(
+ IA_l=I_current_l,
+ IB_lab=I_reference_lab,
+ IA_last_lab=I_last_lab_predict,
+ features_B=features_B,
+ embed_net=embed_net,
+ colornet=colornet,
+ nonlocal_net=nonlocal_net,
+ luminance_noise=opt.luminance_noise,
+ )
+ I_current_lab_predict = torch.cat((I_last_l, I_current_ab_predict), dim=1)
+
+ # ================ UPDATE GENERATOR ================
+ if opt.weight_gan > 0:
+ optimizer_g.zero_grad()
+ optimizer_d.zero_grad()
+ fake_data_lab = torch.cat(
+ (
+ uncenter_l(I_current_l),
+ I_current_ab_predict,
+ uncenter_l(I_last_l),
+ I_last_ab_predict,
+ ),
+ dim=1,
+ )
+ real_data_lab = torch.cat(
+ (
+ uncenter_l(I_current_l),
+ I_current_ab,
+ uncenter_l(I_last_l),
+ I_last_ab,
+ ),
+ dim=1,
+ )
+
+ if opt.permute_data:
+ batch_index = torch.arange(-1, opt.batch_size - 1, dtype=torch.long)
+ real_data_lab = real_data_lab[batch_index, ...]
+
+ discriminator_loss = discriminator_loss_fn(real_data_lab, fake_data_lab, discriminator)
+ discriminator_loss.backward()
+ optimizer_d.step()
+
+ optimizer_g.zero_grad()
+ optimizer_d.zero_grad()
+
+ # ================== COMPUTE LOSS ==================
+ # L1 loss
+ l1_loss = l1_loss_fn(I_current_ab, I_current_ab_predict) * opt.weigth_l1
+
+ # Generator_loss. TODO: freeze this to train some first epoch
+ if epoch_num > opt.epoch_train_discriminator:
+ generator_loss = generator_loss_fn(real_data_lab, fake_data_lab, discriminator, opt.weight_gan, device)
+
+ # Perceptual Loss
+ I_predict_rgb = tensor_lab2rgb(torch.cat((uncenter_l(I_current_l), I_current_ab_predict), dim=1))
+ _, pred_feat_1, pred_feat_2, pred_feat_3 = embed_net(I_predict_rgb)
+
+ I_current_rgb = tensor_lab2rgb(torch.cat((uncenter_l(I_current_l), I_current_ab), dim=1))
+ A_feat_0, _, _, A_feat_3 = embed_net(I_current_rgb)
+
+ perceptual_loss = perceptual_loss_fn(A_feat_3, pred_feat_3)
+
+ # Contextual Loss
+ contextual_style5_1 = torch.mean(contextual_forward_loss(pred_feat_3, B_feat_3.detach())) * 8
+ contextual_style4_1 = torch.mean(contextual_forward_loss(pred_feat_2, B_feat_2.detach())) * 4
+ contextual_style3_1 = torch.mean(contextual_forward_loss(pred_feat_1, B_feat_1.detach())) * 2
+ # if opt.use_feature_transform:
+ # contextual_style3_1 = (
+ # torch.mean(
+ # contextual_forward_loss(
+ # downsampling_by2(pred_feat_1),
+ # downsampling_by2(),
+ # )
+ # )
+ # * 2
+ # )
+ # else:
+ # contextual_style3_1 = (
+ # torch.mean(
+ # contextual_forward_loss(
+ # pred_feat_1,
+ # B_feat_1.detach(),
+ # )
+ # )
+ # * 2
+ # )
+
+ contextual_loss_total = (
+ contextual_style5_1 + contextual_style4_1 + contextual_style3_1
+ ) * opt.weight_contextual
+
+ # Consistent Loss
+ consistent_loss = consistent_loss_fn(
+ I_current_lab_predict,
+ I_last_ab_predict,
+ I_current_nonlocal_lab_predict,
+ I_last_nonlocal_lab_predict,
+ flow_forward,
+ mask,
+ warping_layer,
+ weight_consistent=opt.weight_consistent,
+ weight_nonlocal_consistent=opt.weight_nonlocal_consistent,
+ device=device,
+ )
+
+ # Smoothness loss
+ smoothness_loss = smoothness_loss_fn(
+ I_current_l,
+ I_current_lab,
+ I_current_ab_predict,
+ A_feat_0,
+ weighted_layer_color,
+ nonlocal_weighted_layer,
+ weight_smoothness=opt.weight_smoothness,
+ weight_nonlocal_smoothness=opt.weight_nonlocal_smoothness,
+ device=device,
+ )
+
+ # Total loss
+ total_loss = l1_loss + perceptual_loss + contextual_loss_total + consistent_loss + smoothness_loss
+ if epoch_num > opt.epoch_train_discriminator:
+ total_loss += generator_loss
+
+ # Add loss to loss handler
+ loss_handler.add_loss(key="total_loss", loss=total_loss.item())
+ loss_handler.add_loss(key="l1_loss", loss=l1_loss.item())
+ loss_handler.add_loss(key="perceptual_loss", loss=perceptual_loss.item())
+ loss_handler.add_loss(key="contextual_loss", loss=contextual_loss_total.item())
+ loss_handler.add_loss(key="consistent_loss", loss=consistent_loss.item())
+ loss_handler.add_loss(key="smoothness_loss", loss=smoothness_loss.item())
+ loss_handler.add_loss(key="discriminator_loss", loss=discriminator_loss.item())
+ if epoch_num > opt.epoch_train_discriminator:
+ loss_handler.add_loss(key="generator_loss", loss=generator_loss.item())
+ loss_handler.count_one_sample()
+
+ total_loss.backward()
+
+ optimizer_g.step()
+ step_optim_scheduler_g.step()
+ step_optim_scheduler_d.step()
+
+ _forward_model_time = timer_handler.compute_time("forward_model")
+
+ timer_handler.compute_time("training_logger")
+ training_logger()
+ _training_logger_time = timer_handler.compute_time("training_logger")
+
+ pbar.set_description(
+ f"Epochs: {epoch_num}, Load_sample: {_load_sample_time:.3f}s, Forward: {_forward_model_time:.3f}s, log: {_training_logger_time:.3f}s"
+ )
+ pbar.update(1)
diff --git a/train_ddp.py b/train_ddp.py
new file mode 100644
index 0000000000000000000000000000000000000000..3fc255c50f7f7aad1667940e131e351a3e3d1ed7
--- /dev/null
+++ b/train_ddp.py
@@ -0,0 +1,637 @@
+import os
+import sys
+import wandb
+import argparse
+import numpy as np
+from tqdm import tqdm
+from PIL import Image
+from datetime import datetime
+from zoneinfo import ZoneInfo
+from time import gmtime, strftime
+from collections import OrderedDict
+import random
+
+import torch
+import torch.nn as nn
+import torch.optim as optim
+import torch.backends.cudnn as cudnn
+from torchvision.transforms import CenterCrop
+from torch.utils.data import ConcatDataset, DataLoader, WeightedRandomSampler
+import torchvision.transforms as torch_transforms
+from torchvision.utils import make_grid
+
+from src.losses import (
+ ContextualLoss,
+ ContextualLoss_forward,
+ Perceptual_loss,
+ consistent_loss_fn,
+ discriminator_loss_fn,
+ generator_loss_fn,
+ l1_loss_fn,
+ smoothness_loss_fn,
+)
+from src.models.CNN.GAN_models import Discriminator_x64
+from src.models.CNN.ColorVidNet import ColorVidNet
+from src.models.CNN.FrameColor import frame_colorization
+from src.models.CNN.NonlocalNet import WeightedAverage_color, NonlocalWeightedAverage, WarpNet, WarpNet_new
+from src.models.vit.embed import EmbedModel
+from src.models.vit.config import load_config
+from src.data import transforms
+from src.data.dataloader import VideosDataset, VideosDataset_ImageNet
+from src.utils import CenterPad_threshold
+from src.utils import (
+ TimeHandler,
+ RGB2Lab,
+ ToTensor,
+ Normalize,
+ LossHandler,
+ WarpingLayer,
+ uncenter_l,
+ tensor_lab2rgb,
+ print_num_params,
+ SquaredPadding
+)
+from src.scheduler import PolynomialLR
+
+from torch.nn.parallel import DistributedDataParallel as DDP
+import torch.distributed as dist
+from torch.utils.data.distributed import DistributedSampler
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--video_data_root_list", type=str, default="dataset")
+parser.add_argument("--flow_data_root_list", type=str, default='flow')
+parser.add_argument("--mask_data_root_list", type=str, default='mask')
+parser.add_argument("--data_root_imagenet", default="imagenet", type=str)
+parser.add_argument("--annotation_file_path", default="dataset/annotation.csv", type=str)
+parser.add_argument("--imagenet_pairs_file", default="imagenet_pairs.txt", type=str)
+parser.add_argument("--gpu_ids", type=str, default="0,1,2,3", help="separate by comma")
+parser.add_argument("--workers", type=int, default=0)
+parser.add_argument("--batch_size", type=int, default=2)
+parser.add_argument("--image_size", type=int, default=[384, 384])
+parser.add_argument("--ic", type=int, default=7)
+parser.add_argument("--epoch", type=int, default=40)
+parser.add_argument("--resume_epoch", type=int, default=0)
+parser.add_argument("--resume", action='store_true')
+parser.add_argument("--load_pretrained_model", action='store_true')
+parser.add_argument("--pretrained_model_dir", type=str, default='ckpt')
+parser.add_argument("--lr", type=float, default=1e-4)
+parser.add_argument("--beta1", type=float, default=0.5)
+parser.add_argument("--lr_step", type=int, default=1)
+parser.add_argument("--lr_gamma", type=float, default=0.9)
+parser.add_argument("--checkpoint_dir", type=str, default="checkpoints")
+parser.add_argument("--checkpoint_step", type=int, default=500)
+parser.add_argument("--real_reference_probability", type=float, default=0.7)
+parser.add_argument("--nonzero_placeholder_probability", type=float, default=0.0)
+parser.add_argument("--domain_invariant", action='store_true')
+parser.add_argument("--weigth_l1", type=float, default=2.0)
+parser.add_argument("--weight_contextual", type=float, default="0.5")
+parser.add_argument("--weight_perceptual", type=float, default="0.02")
+parser.add_argument("--weight_smoothness", type=float, default="5.0")
+parser.add_argument("--weight_gan", type=float, default="0.5")
+parser.add_argument("--weight_nonlocal_smoothness", type=float, default="0.0")
+parser.add_argument("--weight_nonlocal_consistent", type=float, default="0.0")
+parser.add_argument("--weight_consistent", type=float, default="0.05")
+parser.add_argument("--luminance_noise", type=float, default="2.0")
+parser.add_argument("--permute_data", action='store_true')
+parser.add_argument("--contextual_loss_direction", type=str, default="forward", help="forward or backward matching")
+parser.add_argument("--batch_accum_size", type=int, default=10)
+parser.add_argument("--epoch_train_discriminator", type=int, default=3)
+parser.add_argument("--vit_version", type=str, default="vit_tiny_patch16_384")
+parser.add_argument("--use_dummy", action='store_true')
+parser.add_argument("--use_wandb", action='store_true')
+parser.add_argument("--use_feature_transform", action='store_true')
+parser.add_argument("--head_out_idx", type=str, default="8,9,10,11")
+parser.add_argument("--wandb_token", type=str, default="")
+parser.add_argument("--wandb_name", type=str, default="")
+
+
+def ddp_setup():
+ dist.init_process_group(backend="nccl")
+ local_rank = int(os.environ['LOCAL_RANK'])
+ return local_rank
+
+def ddp_cleanup():
+ dist.destroy_process_group()
+
+def prepare_dataloader_ddp(dataset, batch_size=4, pin_memory=False, num_workers=0):
+ sampler = DistributedSampler(dataset, shuffle=True)
+ dataloader = DataLoader(dataset,
+ batch_size=batch_size,
+ pin_memory=pin_memory,
+ num_workers=num_workers,
+ sampler=sampler)
+ return dataloader
+
+def is_master_process():
+ ddp_rank = int(os.environ['RANK'])
+ return ddp_rank == 0
+
+def load_data():
+ transforms_video = [
+ SquaredPadding(target_size=opt.image_size[0]),
+ RGB2Lab(),
+ ToTensor(),
+ Normalize(),
+ ]
+
+ train_dataset_videos = [
+ VideosDataset(
+ video_data_root=video_data_root,
+ flow_data_root=flow_data_root,
+ mask_data_root=mask_data_root,
+ imagenet_folder=opt.data_root_imagenet,
+ annotation_file_path=opt.annotation_file_path,
+ image_size=opt.image_size,
+ image_transform=torch_transforms.Compose(transforms_video),
+ real_reference_probability=opt.real_reference_probability,
+ nonzero_placeholder_probability=opt.nonzero_placeholder_probability,
+ )
+ for video_data_root, flow_data_root, mask_data_root in zip(opt.video_data_root_list, opt.flow_data_root_list, opt.mask_data_root_list)
+ ]
+
+ transforms_imagenet = [SquaredPadding(target_size=opt.image_size[0]), RGB2Lab(), ToTensor(), Normalize()]
+ extra_reference_transform = [
+ torch_transforms.RandomHorizontalFlip(0.5),
+ torch_transforms.RandomResizedCrop(480, (0.98, 1.0), ratio=(0.8, 1.2)),
+ ]
+
+ train_dataset_imagenet = VideosDataset_ImageNet(
+ imagenet_data_root=opt.data_root_imagenet,
+ pairs_file=opt.imagenet_pairs_file,
+ image_size=opt.image_size,
+ transforms_imagenet=transforms_imagenet,
+ distortion_level=4,
+ brightnessjitter=5,
+ nonzero_placeholder_probability=opt.nonzero_placeholder_probability,
+ extra_reference_transform=extra_reference_transform,
+ real_reference_probability=opt.real_reference_probability,
+ )
+ dataset_combined = ConcatDataset(train_dataset_videos + [train_dataset_imagenet])
+ data_loader = prepare_dataloader_ddp(dataset_combined,
+ batch_size=opt.batch_size,
+ pin_memory=False,
+ num_workers=opt.workers)
+ return data_loader
+
+def save_checkpoints(saved_path):
+ # Make directory if the folder doesn't exists
+ os.makedirs(saved_path, exist_ok=True)
+
+ # Save model
+ torch.save(
+ nonlocal_net.module.state_dict(),
+ os.path.join(saved_path, "nonlocal_net.pth"),
+ )
+ torch.save(
+ colornet.module.state_dict(),
+ os.path.join(saved_path, "colornet.pth"),
+ )
+ torch.save(
+ discriminator.module.state_dict(),
+ os.path.join(saved_path, "discriminator.pth"),
+ )
+ torch.save(
+ embed_net.state_dict(),
+ os.path.join(saved_path, "embed_net.pth")
+ )
+
+ # Save learning state for restoring train
+ learning_state = {
+ "epoch": epoch_num,
+ "total_iter": total_iter,
+ "optimizer_g": optimizer_g.state_dict(),
+ "optimizer_d": optimizer_d.state_dict(),
+ "optimizer_schedule_g": step_optim_scheduler_g.state_dict(),
+ "optimizer_schedule_d": step_optim_scheduler_d.state_dict(),
+ }
+
+ torch.save(learning_state, os.path.join(saved_path, "learning_state.pth"))
+
+def training_logger():
+ if (total_iter % opt.checkpoint_step == 0) or (total_iter == len(data_loader)):
+ train_loss_dict = {"train/" + str(k): v / loss_handler.count_sample for k, v in loss_handler.loss_dict.items()}
+ train_loss_dict["train/opt_g_lr_1"] = step_optim_scheduler_g.get_last_lr()[0]
+ train_loss_dict["train/opt_g_lr_2"] = step_optim_scheduler_g.get_last_lr()[1]
+ train_loss_dict["train/opt_d_lr"] = step_optim_scheduler_d.get_last_lr()[0]
+
+ alert_text = f"l1_loss: {l1_loss.item()}\npercep_loss: {perceptual_loss.item()}\nctx_loss: {contextual_loss_total.item()}\ncst_loss: {consistent_loss.item()}\nsm_loss: {smoothness_loss.item()}\ntotal: {total_loss.item()}"
+
+ if opt.use_wandb:
+ wandb.log(train_loss_dict)
+ wandb.alert(title=f"Progress training #{total_iter}", text=alert_text)
+
+ for idx in range(I_predict_rgb.shape[0]):
+ concated_I = make_grid(
+ [(I_predict_rgb[idx] * 255), (I_reference_rgb[idx] * 255), (I_current_rgb[idx] * 255)], nrow=3
+ )
+ wandb_concated_I = wandb.Image(
+ concated_I,
+ caption="[LEFT] Predict, [CENTER] Reference, [RIGHT] Ground truth\n[REF] {}, [FRAME] {}".format(
+ ref_path[idx], curr_frame_path[idx]
+ ),
+ )
+ wandb.log({f"example_{idx}": wandb_concated_I})
+
+ # Save learning state checkpoint
+ # save_checkpoints(os.path.join(opt.checkpoint_dir, 'runs'))
+ loss_handler.reset()
+
+
+def load_params(ckpt_file, local_rank, has_module=False):
+ params = torch.load(ckpt_file, map_location=f'cuda:{local_rank}')
+ new_params = []
+ for key, value in params.items():
+ new_params.append(("module."+key if has_module else key, value))
+ return OrderedDict(new_params)
+
+
+def parse(parser, save=True):
+ opt = parser.parse_args()
+ args = vars(opt)
+
+ print("------------------------------ Options -------------------------------")
+ for k, v in sorted(args.items()):
+ print("%s: %s" % (str(k), str(v)))
+ print("-------------------------------- End ---------------------------------")
+
+ if save:
+ file_name = os.path.join("opt.txt")
+ with open(file_name, "wt") as opt_file:
+ opt_file.write(os.path.basename(sys.argv[0]) + " " + strftime("%Y-%m-%d %H:%M:%S", gmtime()) + "\n")
+ opt_file.write("------------------------------ Options -------------------------------\n")
+ for k, v in sorted(args.items()):
+ opt_file.write("%s: %s\n" % (str(k), str(v)))
+ opt_file.write("-------------------------------- End ---------------------------------\n")
+ return opt
+
+
+def gpu_setup():
+ os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
+ cudnn.benchmark = True
+ torch.cuda.set_device(opt.gpu_ids[0])
+ device = torch.device("cuda")
+ print("running on GPU", opt.gpu_ids)
+ return device
+
+
+if __name__ == "__main__":
+ ############################################## SETUP ###############################################
+ torch.multiprocessing.set_start_method("spawn", force=True)
+ # =============== GET PARSER OPTION ================
+ opt = parse(parser)
+ opt.video_data_root_list = opt.video_data_root_list.split(",")
+ opt.flow_data_root_list = opt.flow_data_root_list.split(",")
+ opt.mask_data_root_list = opt.mask_data_root_list.split(",")
+ opt.gpu_ids = list(map(int, opt.gpu_ids.split(",")))
+ opt.head_out_idx = list(map(int, opt.head_out_idx.split(",")))
+ n_dim_output = 3 if opt.use_feature_transform else 4
+ assert len(opt.head_out_idx) == 4, "Size of head_out_idx must be 4"
+
+ # =================== INIT WANDB ===================
+# if is_master_process():
+ if opt.use_wandb:
+ print("Save images to Wandb")
+ if opt.wandb_token != "":
+ try:
+ wandb.login(key=opt.wandb_token)
+ except:
+ pass
+ if opt.use_wandb:
+ wandb.init(
+ project="video-colorization",
+ group=f"{opt.wandb_name} {datetime.now(tz=ZoneInfo('Asia/Ho_Chi_Minh')).strftime('%Y/%m/%d_%H-%M-%S')}",
+ #group="DDP"
+ )
+
+ # ================== SETUP DEVICE ==================
+ local_rank = ddp_setup()
+ # =================== VIT CONFIG ===================
+ cfg = load_config()
+ model_cfg = cfg["model"][opt.vit_version]
+ model_cfg["image_size"] = (384, 384)
+ model_cfg["backbone"] = opt.vit_version
+ model_cfg["dropout"] = 0.0
+ model_cfg["drop_path_rate"] = 0.1
+ model_cfg["n_cls"] = 10
+ ############################################ LOAD DATA #############################################
+ data_loader = load_data()
+ ########################################## DEFINE NETWORK ##########################################
+
+
+ colornet = DDP(ColorVidNet(opt.ic).to(local_rank), device_ids=[local_rank], output_device=local_rank)
+ if opt.use_feature_transform:
+ nonlocal_net = DDP(WarpNet().to(local_rank), device_ids=[local_rank], output_device=local_rank)
+ else:
+ nonlocal_net = DDP(WarpNet_new(model_cfg["d_model"]).to(local_rank), device_ids=[local_rank], output_device=local_rank)
+ discriminator = DDP(Discriminator_x64(ndf=64).to(local_rank), device_ids=[local_rank], output_device=local_rank)
+ weighted_layer_color = WeightedAverage_color().to(local_rank)
+ nonlocal_weighted_layer = NonlocalWeightedAverage().to(local_rank)
+ warping_layer = WarpingLayer(device=local_rank).to(local_rank)
+ embed_net = EmbedModel(model_cfg, head_out_idx=opt.head_out_idx, n_dim_output=n_dim_output, device=local_rank)
+
+ if is_master_process():
+ # Print number of parameters
+ print("-" * 59)
+ print("| TYPE | Model name | Num params |")
+ print("-" * 59)
+
+ colornet_params = print_num_params(colornet)
+ nonlocal_net_params = print_num_params(nonlocal_net)
+ discriminator_params = print_num_params(discriminator)
+ weighted_layer_color_params = print_num_params(weighted_layer_color)
+ nonlocal_weighted_layer_params = print_num_params(nonlocal_weighted_layer)
+ warping_layer_params = print_num_params(warping_layer)
+ embed_net_params = print_num_params(embed_net)
+ print("-" * 59)
+ print(
+ f"| TOTAL | | {('{:,}'.format(colornet_params+nonlocal_net_params+discriminator_params+weighted_layer_color_params+nonlocal_weighted_layer_params+warping_layer_params+embed_net_params)).rjust(10)} |"
+ )
+ print("-" * 59)
+ if opt.use_wandb:
+ wandb.watch(discriminator, log="all", log_freq=opt.checkpoint_step, idx=0)
+ wandb.watch(embed_net, log="all", log_freq=opt.checkpoint_step, idx=1)
+ wandb.watch(colornet, log="all", log_freq=opt.checkpoint_step, idx=2)
+ wandb.watch(nonlocal_net, log="all", log_freq=opt.checkpoint_step, idx=3)
+
+
+
+ ###################################### DEFINE LOSS FUNCTIONS #######################################
+ perceptual_loss_fn = Perceptual_loss(opt.domain_invariant, opt.weight_perceptual)
+ contextual_loss = ContextualLoss().to(local_rank)
+ contextual_forward_loss = ContextualLoss_forward().to(local_rank)
+ ######################################## DEFINE OPTIMIZERS #########################################
+ optimizer_g = optim.AdamW(
+ [
+ {"params": nonlocal_net.parameters(), "lr": opt.lr},
+ {"params": colornet.parameters(), "lr": 2 * opt.lr},
+ {"params": embed_net.parameters(), "lr": opt.lr},
+ ],
+ betas=(0.5, 0.999),
+ eps=1e-5,
+ amsgrad=True,
+ )
+
+ optimizer_d = optim.AdamW(
+ filter(lambda p: p.requires_grad, discriminator.parameters()),
+ lr=opt.lr,
+ betas=(0.5, 0.999),
+ amsgrad=True,
+ )
+
+ step_optim_scheduler_g = PolynomialLR(
+ optimizer_g,
+ step_size=opt.lr_step,
+ iter_warmup=0,
+ iter_max=len(data_loader) * opt.epoch,
+ power=0.9,
+ min_lr=1e-8
+ )
+ step_optim_scheduler_d = PolynomialLR(
+ optimizer_d,
+ step_size=opt.lr_step,
+ iter_warmup=0,
+ iter_max=len(data_loader) * opt.epoch,
+ power=0.9,
+ min_lr=1e-8
+ )
+ ########################################## DEFINE OTHERS ###########################################
+ downsampling_by2 = nn.AvgPool2d(kernel_size=2).to(local_rank)
+ # timer_handler = TimeHandler()
+ loss_handler = LossHandler()
+ ############################################## TRAIN ###############################################
+
+ # ============= USE PRETRAINED OR NOT ==============
+ if opt.load_pretrained_model:
+ nonlocal_net.load_state_dict(load_params(os.path.join(opt.pretrained_model_dir, "nonlocal_net.pth"),
+ local_rank,
+ has_module=True))
+ colornet.load_state_dict(load_params(os.path.join(opt.pretrained_model_dir, "colornet.pth"),
+ local_rank,
+ has_module=True))
+ discriminator.load_state_dict(load_params(os.path.join(opt.pretrained_model_dir, "discriminator.pth"),
+ local_rank,
+ has_module=True))
+ embed_net_params = load_params(os.path.join(opt.pretrained_model_dir, "embed_net.pth"),
+ local_rank,
+ has_module=False)
+ if "module.vit.heads_out" in embed_net_params:
+ embed_net_params.pop("module.vit.heads_out")
+ elif "vit.heads_out" in embed_net_params:
+ embed_net_params.pop("vit.heads_out")
+ embed_net.load_state_dict(embed_net_params)
+
+ learning_checkpoint = torch.load(os.path.join(opt.pretrained_model_dir, "learning_state.pth"))
+ optimizer_g.load_state_dict(learning_checkpoint["optimizer_g"])
+ optimizer_d.load_state_dict(learning_checkpoint["optimizer_d"])
+ step_optim_scheduler_g.load_state_dict(learning_checkpoint["optimizer_schedule_g"])
+ step_optim_scheduler_d.load_state_dict(learning_checkpoint["optimizer_schedule_d"])
+ total_iter = learning_checkpoint['total_iter']
+ start_epoch = learning_checkpoint['epoch']+1
+ else:
+ total_iter = 0
+ start_epoch = 1
+
+
+
+ for epoch_num in range(start_epoch, opt.epoch+1):
+ data_loader.sampler.set_epoch(epoch_num-1)
+
+ if is_master_process():
+ train_progress_bar = tqdm(
+ data_loader,
+ desc =f'Epoch {epoch_num}[Training]',
+ position = 0,
+ leave = False
+ )
+ else:
+ train_progress_bar = data_loader
+ for iter, sample in enumerate(train_progress_bar):
+ # timer_handler.compute_time("load_sample")
+ total_iter += 1
+ # =============== LOAD DATA SAMPLE ================
+ (
+ I_last_lab, ######## (3, H, W)
+ I_current_lab, ##### (3, H, W)
+ I_reference_lab, ### (3, H, W)
+ flow_forward, ###### (2, H, W)
+ mask, ############## (1, H, W)
+ placeholder_lab, ### (3, H, W)
+ self_ref_flag, ##### (3, H, W)
+ prev_frame_path,
+ curr_frame_path,
+ ref_path,
+ ) = sample
+
+ I_last_lab = I_last_lab.to(local_rank)
+ I_current_lab = I_current_lab.to(local_rank)
+ I_reference_lab = I_reference_lab.to(local_rank)
+ flow_forward = flow_forward.to(local_rank)
+ mask = mask.to(local_rank)
+ placeholder_lab = placeholder_lab.to(local_rank)
+ self_ref_flag = self_ref_flag.to(local_rank)
+
+ I_last_l = I_last_lab[:, 0:1, :, :]
+ I_last_ab = I_last_lab[:, 1:3, :, :]
+ I_current_l = I_current_lab[:, 0:1, :, :]
+ I_current_ab = I_current_lab[:, 1:3, :, :]
+ I_reference_l = I_reference_lab[:, 0:1, :, :]
+ I_reference_ab = I_reference_lab[:, 1:3, :, :]
+ I_reference_rgb = tensor_lab2rgb(torch.cat((uncenter_l(I_reference_l), I_reference_ab), dim=1))
+
+ # _load_sample_time = timer_handler.compute_time("load_sample")
+ # timer_handler.compute_time("forward_model")
+
+ features_B = embed_net(I_reference_rgb)
+ _, B_feat_1, B_feat_2, B_feat_3 = features_B
+
+ # ================== COLORIZATION ==================
+ # The last frame
+ I_last_ab_predict, I_last_nonlocal_lab_predict = frame_colorization(
+ IA_l=I_last_l,
+ IB_lab=I_reference_lab,
+ IA_last_lab=placeholder_lab,
+ features_B=features_B,
+ embed_net=embed_net,
+ colornet=colornet,
+ nonlocal_net=nonlocal_net,
+ luminance_noise=opt.luminance_noise,
+ )
+ I_last_lab_predict = torch.cat((I_last_l, I_last_ab_predict), dim=1)
+
+ # The current frame
+ I_current_ab_predict, I_current_nonlocal_lab_predict = frame_colorization(
+ IA_l=I_current_l,
+ IB_lab=I_reference_lab,
+ IA_last_lab=I_last_lab_predict,
+ features_B=features_B,
+ embed_net=embed_net,
+ colornet=colornet,
+ nonlocal_net=nonlocal_net,
+ luminance_noise=opt.luminance_noise,
+ )
+ I_current_lab_predict = torch.cat((I_last_l, I_current_ab_predict), dim=1)
+
+ # ================ UPDATE GENERATOR ================
+ if opt.weight_gan > 0:
+ optimizer_g.zero_grad()
+ optimizer_d.zero_grad()
+ fake_data_lab = torch.cat(
+ (
+ uncenter_l(I_current_l),
+ I_current_ab_predict,
+ uncenter_l(I_last_l),
+ I_last_ab_predict,
+ ),
+ dim=1,
+ )
+ real_data_lab = torch.cat(
+ (
+ uncenter_l(I_current_l),
+ I_current_ab,
+ uncenter_l(I_last_l),
+ I_last_ab,
+ ),
+ dim=1,
+ )
+
+ if opt.permute_data:
+ batch_index = torch.arange(-1, opt.batch_size - 1, dtype=torch.long)
+ real_data_lab = real_data_lab[batch_index, ...]
+
+ discriminator_loss = discriminator_loss_fn(real_data_lab, fake_data_lab, discriminator)
+ discriminator_loss.backward()
+ optimizer_d.step()
+
+ optimizer_g.zero_grad()
+ optimizer_d.zero_grad()
+
+ # ================== COMPUTE LOSS ==================
+ # L1 loss
+ l1_loss = l1_loss_fn(I_current_ab, I_current_ab_predict) * opt.weigth_l1
+
+ # Generator_loss. TODO: freeze this to train some first epoch
+ if epoch_num > opt.epoch_train_discriminator:
+ generator_loss = generator_loss_fn(real_data_lab, fake_data_lab, discriminator, opt.weight_gan, local_rank)
+
+ # Perceptual Loss
+ I_predict_rgb = tensor_lab2rgb(torch.cat((uncenter_l(I_current_l), I_current_ab_predict), dim=1))
+ _, pred_feat_1, pred_feat_2, pred_feat_3 = embed_net(I_predict_rgb)
+
+ I_current_rgb = tensor_lab2rgb(torch.cat((uncenter_l(I_current_l), I_current_ab), dim=1))
+ A_feat_0, _, _, A_feat_3 = embed_net(I_current_rgb)
+
+ perceptual_loss = perceptual_loss_fn(A_feat_3, pred_feat_3)
+
+ # Contextual Loss
+ contextual_style5_1 = torch.mean(contextual_forward_loss(pred_feat_3, B_feat_3.detach())) * 8
+ contextual_style4_1 = torch.mean(contextual_forward_loss(pred_feat_2, B_feat_2.detach())) * 4
+ contextual_style3_1 = torch.mean(contextual_forward_loss(pred_feat_1, B_feat_1.detach())) * 2
+
+ contextual_loss_total = (
+ contextual_style5_1 + contextual_style4_1 + contextual_style3_1
+ ) * opt.weight_contextual
+
+ # Consistent Loss
+ consistent_loss = consistent_loss_fn(
+ I_current_lab_predict,
+ I_last_ab_predict,
+ I_current_nonlocal_lab_predict,
+ I_last_nonlocal_lab_predict,
+ flow_forward,
+ mask,
+ warping_layer,
+ weight_consistent=opt.weight_consistent,
+ weight_nonlocal_consistent=opt.weight_nonlocal_consistent,
+ device=local_rank,
+ )
+
+ # Smoothness loss
+ smoothness_loss = smoothness_loss_fn(
+ I_current_l,
+ I_current_lab,
+ I_current_ab_predict,
+ A_feat_0,
+ weighted_layer_color,
+ nonlocal_weighted_layer,
+ weight_smoothness=opt.weight_smoothness,
+ weight_nonlocal_smoothness=opt.weight_nonlocal_smoothness,
+ device=local_rank
+ )
+
+ # Total loss
+ total_loss = l1_loss + perceptual_loss + contextual_loss_total + consistent_loss + smoothness_loss
+ if epoch_num > opt.epoch_train_discriminator:
+ total_loss += generator_loss
+
+ # Add loss to loss handler
+ loss_handler.add_loss(key="total_loss", loss=total_loss.item())
+ loss_handler.add_loss(key="l1_loss", loss=l1_loss.item())
+ loss_handler.add_loss(key="perceptual_loss", loss=perceptual_loss.item())
+ loss_handler.add_loss(key="contextual_loss", loss=contextual_loss_total.item())
+ loss_handler.add_loss(key="consistent_loss", loss=consistent_loss.item())
+ loss_handler.add_loss(key="smoothness_loss", loss=smoothness_loss.item())
+ loss_handler.add_loss(key="discriminator_loss", loss=discriminator_loss.item())
+ if epoch_num > opt.epoch_train_discriminator:
+ loss_handler.add_loss(key="generator_loss", loss=generator_loss.item())
+ loss_handler.count_one_sample()
+
+ total_loss.backward()
+
+ optimizer_g.step()
+ step_optim_scheduler_g.step()
+ step_optim_scheduler_d.step()
+
+ # _forward_model_time = timer_handler.compute_time("forward_model")
+
+ # timer_handler.compute_time("training_logger")
+ training_logger()
+ # _training_logger_time = timer_handler.compute_time("training_logger")
+
+ ####
+ if is_master_process():
+ save_checkpoints(os.path.join(opt.checkpoint_dir, f"epoch_{epoch_num}"))
+ ####
+ if opt.use_wandb:
+ wandb.finish()
+ ddp_cleanup()
\ No newline at end of file
diff --git a/train_swin_224.py b/train_swin_224.py
new file mode 100644
index 0000000000000000000000000000000000000000..31e6069068771f8f3184f58bfea1dc4d0f11bdc0
--- /dev/null
+++ b/train_swin_224.py
@@ -0,0 +1,593 @@
+import os
+import sys
+import wandb
+import argparse
+import numpy as np
+from tqdm import tqdm
+from PIL import Image
+from datetime import datetime
+from zoneinfo import ZoneInfo
+from time import gmtime, strftime
+from collections import OrderedDict
+import random
+
+import torch
+import torch.nn as nn
+import torch.optim as optim
+import torch.backends.cudnn as cudnn
+from torchvision.transforms import CenterCrop
+from torch.utils.data import ConcatDataset, DataLoader
+import torchvision.transforms as torch_transforms
+from torchvision.utils import make_grid
+
+from src.losses import (
+ ContextualLoss,
+ ContextualLoss_forward,
+ Perceptual_loss,
+ consistent_loss_fn,
+ discriminator_loss_fn,
+ generator_loss_fn,
+ l1_loss_fn,
+ smoothness_loss_fn,
+)
+from src.models.CNN.GAN_models import Discriminator_x64_224
+from src.models.CNN.ColorVidNet import GeneralColorVidNet
+from src.models.CNN.FrameColor import frame_colorization
+from src.models.CNN.NonlocalNet import WeightedAverage_color, NonlocalWeightedAverage, GeneralWarpNet
+from src.models.vit.embed import GeneralEmbedModel
+from src.data import transforms
+from src.data.dataloader import VideosDataset, VideosDataset_ImageNet
+from src.utils import CenterPad_threshold
+from src.utils import (
+ TimeHandler,
+ RGB2Lab,
+ ToTensor,
+ Normalize,
+ LossHandler,
+ WarpingLayer,
+ uncenter_l,
+ tensor_lab2rgb,
+ print_num_params,
+)
+from src.scheduler import PolynomialLR
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--video_data_root_list", type=str, default="dataset")
+parser.add_argument("--flow_data_root_list", type=str, default="flow")
+parser.add_argument("--mask_data_root_list", type=str, default="mask")
+parser.add_argument("--data_root_imagenet", default="imagenet", type=str)
+parser.add_argument("--annotation_file_path", default="dataset/annotation.csv", type=str)
+parser.add_argument("--imagenet_pairs_file", default="imagenet_pairs.txt", type=str)
+parser.add_argument("--gpu_ids", type=str, default="0,1,2,3", help="separate by comma")
+parser.add_argument("--workers", type=int, default=0)
+parser.add_argument("--batch_size", type=int, default=2)
+parser.add_argument("--image_size", type=int, default=[384, 384])
+parser.add_argument("--ic", type=int, default=7)
+parser.add_argument("--epoch", type=int, default=40)
+parser.add_argument("--resume_epoch", type=int, default=0)
+parser.add_argument("--resume", type=bool, default=False)
+parser.add_argument("--load_pretrained_model", type=bool, default=False)
+parser.add_argument("--lr", type=float, default=1e-4)
+parser.add_argument("--beta1", type=float, default=0.5)
+parser.add_argument("--lr_step", type=int, default=1)
+parser.add_argument("--lr_gamma", type=float, default=0.9)
+parser.add_argument("--checkpoint_dir", type=str, default="checkpoints")
+parser.add_argument("--checkpoint_step", type=int, default=500)
+parser.add_argument("--real_reference_probability", type=float, default=0.7)
+parser.add_argument("--nonzero_placeholder_probability", type=float, default=0.0)
+parser.add_argument("--domain_invariant", type=bool, default=False)
+parser.add_argument("--weigth_l1", type=float, default=2.0)
+parser.add_argument("--weight_contextual", type=float, default="0.5")
+parser.add_argument("--weight_perceptual", type=float, default="0.02")
+parser.add_argument("--weight_smoothness", type=float, default="5.0")
+parser.add_argument("--weight_gan", type=float, default="0.5")
+parser.add_argument("--weight_nonlocal_smoothness", type=float, default="0.0")
+parser.add_argument("--weight_nonlocal_consistent", type=float, default="0.0")
+parser.add_argument("--weight_consistent", type=float, default="0.05")
+parser.add_argument("--luminance_noise", type=float, default="2.0")
+parser.add_argument("--permute_data", type=bool, default=True)
+parser.add_argument("--contextual_loss_direction", type=str, default="forward", help="forward or backward matching")
+parser.add_argument("--batch_accum_size", type=int, default=10)
+parser.add_argument("--epoch_train_discriminator", type=int, default=3)
+parser.add_argument("--vit_version", type=str, default="vit_tiny_patch16_384")
+parser.add_argument("--use_dummy", type=bool, default=False)
+parser.add_argument("--use_wandb", type=bool, default=False)
+parser.add_argument("--use_feature_transform", type=bool, default=False)
+parser.add_argument("--head_out_idx", type=str, default="8,9,10,11")
+parser.add_argument("--wandb_token", type=str, default="")
+parser.add_argument("--wandb_name", type=str, default="")
+
+
+def load_data():
+ transforms_video = [
+ CenterCrop(opt.image_size),
+ RGB2Lab(),
+ ToTensor(),
+ Normalize(),
+ ]
+
+ train_dataset_videos = [
+ VideosDataset(
+ video_data_root=video_data_root,
+ flow_data_root=flow_data_root,
+ mask_data_root=mask_data_root,
+ imagenet_folder=opt.data_root_imagenet,
+ annotation_file_path=opt.annotation_file_path,
+ image_size=opt.image_size,
+ image_transform=transforms.Compose(transforms_video),
+ real_reference_probability=opt.real_reference_probability,
+ nonzero_placeholder_probability=opt.nonzero_placeholder_probability,
+ )
+ for video_data_root, flow_data_root, mask_data_root in zip(
+ opt.video_data_root_list, opt.flow_data_root_list, opt.mask_data_root_list
+ )
+ ]
+
+ transforms_imagenet = [CenterPad_threshold(opt.image_size), RGB2Lab(), ToTensor(), Normalize()]
+ extra_reference_transform = [
+ torch_transforms.RandomHorizontalFlip(0.5),
+ torch_transforms.RandomResizedCrop(480, (0.98, 1.0), ratio=(0.8, 1.2)),
+ ]
+
+ train_dataset_imagenet = VideosDataset_ImageNet(
+ imagenet_data_root=opt.data_root_imagenet,
+ pairs_file=opt.imagenet_pairs_file,
+ image_size=opt.image_size,
+ transforms_imagenet=transforms_imagenet,
+ distortion_level=4,
+ brightnessjitter=5,
+ nonzero_placeholder_probability=opt.nonzero_placeholder_probability,
+ extra_reference_transform=extra_reference_transform,
+ real_reference_probability=opt.real_reference_probability,
+ )
+
+ # video_training_length = sum([len(dataset) for dataset in train_dataset_videos])
+ # imagenet_training_length = len(train_dataset_imagenet)
+ # dataset_training_length = sum([dataset.real_len for dataset in train_dataset_videos]) + +train_dataset_imagenet.real_len
+ dataset_combined = ConcatDataset(train_dataset_videos + [train_dataset_imagenet])
+ # sampler=[]
+ # seed_sampler=int.from_bytes(os.urandom(4),"big")
+ # random.seed(seed_sampler)
+ # for idx in range(opt.epoch):
+ # sampler = sampler + random.sample(range(dataset_training_length),dataset_training_length)
+ # wandb.log({"Sampler_Seed":seed_sampler})
+ # sampler = sampler+WeightedRandomSampler([1] * video_training_length + [1] * imagenet_training_length, dataset_training_length*opt.epoch)
+
+ # video_training_length = sum([len(dataset) for dataset in train_dataset_videos])
+ # dataset_training_length = sum([dataset.real_len for dataset in train_dataset_videos])
+ # dataset_combined = ConcatDataset(train_dataset_videos)
+ # sampler = WeightedRandomSampler([1] * video_training_length, dataset_training_length * opt.epoch)
+
+ data_loader = DataLoader(dataset_combined, batch_size=opt.batch_size, shuffle=True, num_workers=opt.workers)
+ return data_loader
+
+
+def training_logger():
+ if (total_iter % opt.checkpoint_step == 0) or (total_iter == len(data_loader)):
+ train_loss_dict = {"train/" + str(k): v / loss_handler.count_sample for k, v in loss_handler.loss_dict.items()}
+ train_loss_dict["train/opt_g_lr_1"] = step_optim_scheduler_g.get_last_lr()[0]
+ train_loss_dict["train/opt_g_lr_2"] = step_optim_scheduler_g.get_last_lr()[1]
+ train_loss_dict["train/opt_d_lr"] = step_optim_scheduler_d.get_last_lr()[0]
+
+ alert_text = f"l1_loss: {l1_loss.item()}\npercep_loss: {perceptual_loss.item()}\nctx_loss: {contextual_loss_total.item()}\ncst_loss: {consistent_loss.item()}\nsm_loss: {smoothness_loss.item()}\ntotal: {total_loss.item()}"
+
+ if opt.use_wandb:
+ wandb.log(train_loss_dict)
+ wandb.alert(title=f"Progress training #{total_iter}", text=alert_text)
+
+ for idx in range(I_predict_rgb.shape[0]):
+ concated_I = make_grid(
+ [(I_predict_rgb[idx] * 255), (I_reference_rgb[idx] * 255), (I_current_rgb[idx] * 255)], nrow=3
+ )
+ wandb_concated_I = wandb.Image(
+ concated_I,
+ caption="[LEFT] Predict, [CENTER] Reference, [RIGHT] Ground truth\n[REF] {}, [FRAME] {}".format(
+ ref_path[idx], curr_frame_path[idx]
+ ),
+ )
+ wandb.log({f"example_{idx}": wandb_concated_I})
+
+ torch.save(
+ nonlocal_net.state_dict(),
+ os.path.join(opt.checkpoint_dir, "nonlocal_net_iter.pth"),
+ )
+ torch.save(
+ colornet.state_dict(),
+ os.path.join(opt.checkpoint_dir, "colornet_iter.pth"),
+ )
+ torch.save(
+ discriminator.state_dict(),
+ os.path.join(opt.checkpoint_dir, "discriminator_iter.pth"),
+ )
+ torch.save(embed_net.state_dict(), os.path.join(opt.checkpoint_dir, "embed_net_iter.pth"))
+
+ loss_handler.reset()
+
+
+def load_params(ckpt_file):
+ params = torch.load(ckpt_file)
+ new_params = []
+ for key, value in params.items():
+ new_params.append((key, value))
+ return OrderedDict(new_params)
+
+
+def parse(parser, save=True):
+ opt = parser.parse_args()
+ args = vars(opt)
+
+ print("------------------------------ Options -------------------------------")
+ for k, v in sorted(args.items()):
+ print("%s: %s" % (str(k), str(v)))
+ print("-------------------------------- End ---------------------------------")
+
+ if save:
+ file_name = os.path.join("opt.txt")
+ with open(file_name, "wt") as opt_file:
+ opt_file.write(os.path.basename(sys.argv[0]) + " " + strftime("%Y-%m-%d %H:%M:%S", gmtime()) + "\n")
+ opt_file.write("------------------------------ Options -------------------------------\n")
+ for k, v in sorted(args.items()):
+ opt_file.write("%s: %s\n" % (str(k), str(v)))
+ opt_file.write("-------------------------------- End ---------------------------------\n")
+ return opt
+
+
+def gpu_setup():
+ os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
+ cudnn.benchmark = True
+ torch.cuda.set_device(opt.gpu_ids[0])
+ device = torch.device("cuda")
+ print("running on GPU", opt.gpu_ids)
+ return device
+
+
+if __name__ == "__main__":
+ ############################################## SETUP ###############################################
+ torch.multiprocessing.set_start_method("spawn", force=True)
+ # =============== GET PARSER OPTION ================
+ opt = parse(parser)
+ opt.video_data_root_list = opt.video_data_root_list.split(",")
+ opt.flow_data_root_list = opt.flow_data_root_list.split(",")
+ opt.mask_data_root_list = opt.mask_data_root_list.split(",")
+ opt.gpu_ids = list(map(int, opt.gpu_ids.split(",")))
+ opt.head_out_idx = list(map(int, opt.head_out_idx.split(",")))
+ n_dim_output = 3 if opt.use_feature_transform else 4
+ assert len(opt.head_out_idx) == 4, "Size of head_out_idx must be 4"
+
+ os.makedirs(opt.checkpoint_dir, exist_ok=True)
+
+ # =================== INIT WANDB ===================
+ if opt.use_wandb:
+ print("Save images to Wandb")
+ if opt.wandb_token != "":
+ try:
+ wandb.login(key=opt.wandb_token)
+ except:
+ pass
+ wandb.init(
+ project="video-colorization",
+ name=f"{opt.wandb_name} {datetime.now(tz=ZoneInfo('Asia/Ho_Chi_Minh')).strftime('%Y/%m/%d_%H-%M-%S')}",
+ )
+
+ # ================== SETUP DEVICE ==================
+ # torch.multiprocessing.set_start_method("spawn", force=True)
+ # device = gpu_setup()
+ device = "cuda" if torch.cuda.is_available() else "cpu"
+
+ ############################################ LOAD DATA #############################################
+ if opt.use_dummy:
+ H, W = 224, 224
+ I_last_lab = torch.rand(opt.batch_size, 3, H, W)
+ I_current_lab = torch.rand(opt.batch_size, 3, H, W)
+ I_reference_lab = torch.rand(opt.batch_size, 3, H, W)
+ flow_forward = torch.rand(opt.batch_size, 2, H, W)
+ mask = torch.rand(opt.batch_size, 1, H, W)
+ placeholder_lab = torch.rand(opt.batch_size, 3, H, W)
+ self_ref_flag = torch.rand(opt.batch_size, 3, H, W)
+ data_loader = [
+ [I_last_lab, I_current_lab, I_reference_lab, flow_forward, mask, placeholder_lab, self_ref_flag, None, None, None]
+ for _ in range(1)
+ ]
+ else:
+ data_loader = load_data()
+
+ ########################################## DEFINE NETWORK ##########################################
+ colornet = GeneralColorVidNet(opt.ic).to(device)
+ nonlocal_net = GeneralWarpNet(feature_channel=256).to(device) # change to 128 in swin tiny
+ discriminator = Discriminator_x64_224(ndf=64).to(device)
+ weighted_layer_color = WeightedAverage_color().to(device)
+ nonlocal_weighted_layer = NonlocalWeightedAverage().to(device)
+ warping_layer = WarpingLayer(device=device).to(device)
+ embed_net = GeneralEmbedModel(pretrained_model="swin-small", device=device).to(device)
+
+ print("-" * 59)
+ print("| TYPE | Model name | Num params |")
+ print("-" * 59)
+ colornet_params = print_num_params(colornet)
+ nonlocal_net_params = print_num_params(nonlocal_net)
+ discriminator_params = print_num_params(discriminator)
+ weighted_layer_color_params = print_num_params(weighted_layer_color)
+ nonlocal_weighted_layer_params = print_num_params(nonlocal_weighted_layer)
+ warping_layer_params = print_num_params(warping_layer)
+ embed_net_params = print_num_params(embed_net)
+
+ print("-" * 59)
+ print(
+ f"| TOTAL | | {('{:,}'.format(colornet_params+nonlocal_net_params+discriminator_params+weighted_layer_color_params+nonlocal_weighted_layer_params+warping_layer_params+embed_net_params)).rjust(10)} |"
+ )
+ print("-" * 59)
+
+ if opt.use_wandb:
+ wandb.watch(discriminator, log="all", log_freq=opt.checkpoint_step, idx=0)
+ wandb.watch(embed_net, log="all", log_freq=opt.checkpoint_step, idx=1)
+ wandb.watch(colornet, log="all", log_freq=opt.checkpoint_step, idx=2)
+ wandb.watch(nonlocal_net, log="all", log_freq=opt.checkpoint_step, idx=3)
+
+ # ============= USE PRETRAINED OR NOT ==============
+ if opt.load_pretrained_model:
+ # pretrained_path = "/workspace/video_colorization/ckpt_folder_ver_1_vit_small_patch16_384"
+ nonlocal_net.load_state_dict(load_params(os.path.join(opt.checkpoint_dir, "nonlocal_net_iter.pth")))
+ colornet.load_state_dict(load_params(os.path.join(opt.checkpoint_dir, "colornet_iter.pth")))
+ discriminator.load_state_dict(load_params(os.path.join(opt.checkpoint_dir, "discriminator_iter.pth")))
+ embed_net_params = load_params(os.path.join(opt.checkpoint_dir, "embed_net_iter.pth"))
+ embed_net.load_state_dict(embed_net_params)
+
+ ###################################### DEFINE LOSS FUNCTIONS #######################################
+ perceptual_loss_fn = Perceptual_loss(opt.domain_invariant, opt.weight_perceptual)
+ contextual_loss = ContextualLoss().to(device)
+ contextual_forward_loss = ContextualLoss_forward().to(device)
+
+ ######################################## DEFINE OPTIMIZERS #########################################
+ optimizer_g = optim.AdamW(
+ [
+ {"params": nonlocal_net.parameters(), "lr": opt.lr},
+ {"params": colornet.parameters(), "lr": 2 * opt.lr},
+ {"params": embed_net.parameters(), "lr": opt.lr},
+ ],
+ betas=(0.5, 0.999),
+ eps=1e-5,
+ amsgrad=True,
+ )
+
+ optimizer_d = optim.AdamW(
+ filter(lambda p: p.requires_grad, discriminator.parameters()),
+ lr=opt.lr,
+ betas=(0.5, 0.999),
+ amsgrad=True,
+ )
+
+ step_optim_scheduler_g = PolynomialLR(
+ optimizer_g,
+ step_size=opt.lr_step,
+ iter_warmup=0,
+ iter_max=len(data_loader) * opt.epoch,
+ power=0.9,
+ min_lr=1e-8,
+ )
+ step_optim_scheduler_d = PolynomialLR(
+ optimizer_d,
+ step_size=opt.lr_step,
+ iter_warmup=0,
+ iter_max=len(data_loader) * opt.epoch,
+ power=0.9,
+ min_lr=1e-8,
+ )
+ ########################################## DEFINE OTHERS ###########################################
+ downsampling_by2 = nn.AvgPool2d(kernel_size=2).to(device)
+ timer_handler = TimeHandler()
+ loss_handler = LossHandler() # Handle loss value
+ ############################################## TRAIN ###############################################
+
+ total_iter = 0
+ for epoch_num in range(1, opt.epoch + 1):
+ # if opt.use_wandb:
+ # wandb.log({"Current_trainning_epoch": epoch_num})
+ with tqdm(total=len(data_loader), position=0, leave=True) as pbar:
+ for iter, sample in enumerate(data_loader):
+ timer_handler.compute_time("load_sample")
+ total_iter += 1
+
+ # =============== LOAD DATA SAMPLE ================
+ (
+ I_last_lab, ######## (3, H, W)
+ I_current_lab, ##### (3, H, W)
+ I_reference_lab, ### (3, H, W)
+ flow_forward, ###### (2, H, W)
+ mask, ############## (1, H, W)
+ placeholder_lab, ### (3, H, W)
+ self_ref_flag, ##### (3, H, W)
+ prev_frame_path,
+ curr_frame_path,
+ ref_path,
+ ) = sample
+
+ I_last_lab = I_last_lab.to(device)
+ I_current_lab = I_current_lab.to(device)
+ I_reference_lab = I_reference_lab.to(device)
+ flow_forward = flow_forward.to(device)
+ mask = mask.to(device)
+ placeholder_lab = placeholder_lab.to(device)
+ self_ref_flag = self_ref_flag.to(device)
+
+ I_last_l = I_last_lab[:, 0:1, :, :]
+ I_last_ab = I_last_lab[:, 1:3, :, :]
+ I_current_l = I_current_lab[:, 0:1, :, :]
+ I_current_ab = I_current_lab[:, 1:3, :, :]
+ I_reference_l = I_reference_lab[:, 0:1, :, :]
+ I_reference_ab = I_reference_lab[:, 1:3, :, :]
+ I_reference_rgb = tensor_lab2rgb(torch.cat((uncenter_l(I_reference_l), I_reference_ab), dim=1))
+
+ _load_sample_time = timer_handler.compute_time("load_sample")
+ timer_handler.compute_time("forward_model")
+
+ features_B = embed_net(I_reference_rgb)
+ B_feat_0, B_feat_1, B_feat_2, B_feat_3 = features_B
+
+ # ================== COLORIZATION ==================
+ # The last frame
+ I_last_ab_predict, I_last_nonlocal_lab_predict = frame_colorization(
+ IA_l=I_last_l,
+ IB_lab=I_reference_lab,
+ IA_last_lab=placeholder_lab,
+ features_B=features_B,
+ embed_net=embed_net,
+ colornet=colornet,
+ nonlocal_net=nonlocal_net,
+ luminance_noise=opt.luminance_noise,
+ )
+ I_last_lab_predict = torch.cat((I_last_l, I_last_ab_predict), dim=1)
+
+ # The current frame
+ I_current_ab_predict, I_current_nonlocal_lab_predict = frame_colorization(
+ IA_l=I_current_l,
+ IB_lab=I_reference_lab,
+ IA_last_lab=I_last_lab_predict,
+ features_B=features_B,
+ embed_net=embed_net,
+ colornet=colornet,
+ nonlocal_net=nonlocal_net,
+ luminance_noise=opt.luminance_noise,
+ )
+ I_current_lab_predict = torch.cat((I_last_l, I_current_ab_predict), dim=1)
+
+ # ================ UPDATE GENERATOR ================
+ if opt.weight_gan > 0:
+ optimizer_g.zero_grad()
+ optimizer_d.zero_grad()
+ fake_data_lab = torch.cat(
+ (
+ uncenter_l(I_current_l),
+ I_current_ab_predict,
+ uncenter_l(I_last_l),
+ I_last_ab_predict,
+ ),
+ dim=1,
+ )
+ real_data_lab = torch.cat(
+ (
+ uncenter_l(I_current_l),
+ I_current_ab,
+ uncenter_l(I_last_l),
+ I_last_ab,
+ ),
+ dim=1,
+ )
+
+ if opt.permute_data:
+ batch_index = torch.arange(-1, opt.batch_size - 1, dtype=torch.long)
+ real_data_lab = real_data_lab[batch_index, ...]
+
+ discriminator_loss = discriminator_loss_fn(real_data_lab, fake_data_lab, discriminator)
+ discriminator_loss.backward()
+ optimizer_d.step()
+
+ optimizer_g.zero_grad()
+ optimizer_d.zero_grad()
+
+ # ================== COMPUTE LOSS ==================
+ # L1 loss
+ l1_loss = l1_loss_fn(I_current_ab, I_current_ab_predict) * opt.weigth_l1
+
+ # Generator_loss. TODO: freeze this to train some first epoch
+ if epoch_num > opt.epoch_train_discriminator:
+ generator_loss = generator_loss_fn(real_data_lab, fake_data_lab, discriminator, opt.weight_gan, device)
+
+ # Perceptual Loss
+ I_predict_rgb = tensor_lab2rgb(torch.cat((uncenter_l(I_current_l), I_current_ab_predict), dim=1))
+ pred_feat_0, pred_feat_1, pred_feat_2, pred_feat_3 = embed_net(I_predict_rgb)
+
+ I_current_rgb = tensor_lab2rgb(torch.cat((uncenter_l(I_current_l), I_current_ab), dim=1))
+ A_feat_0, _, _, A_feat_3 = embed_net(I_current_rgb)
+
+ perceptual_loss = perceptual_loss_fn(A_feat_3, pred_feat_3)
+
+ # Contextual Loss
+ contextual_style5_1 = torch.mean(contextual_forward_loss(pred_feat_3, B_feat_3.detach())) * 8
+ contextual_style4_1 = torch.mean(contextual_forward_loss(pred_feat_2, B_feat_2.detach())) * 4
+ contextual_style3_1 = torch.mean(contextual_forward_loss(pred_feat_1, B_feat_1.detach())) * 2
+ contextual_style2_1 = torch.mean(contextual_forward_loss(pred_feat_0, B_feat_0.detach()))
+ # if opt.use_feature_transform:
+ # contextual_style3_1 = (
+ # torch.mean(
+ # contextual_forward_loss(
+ # downsampling_by2(pred_feat_1),
+ # downsampling_by2(),
+ # )
+ # )
+ # * 2
+ # )
+ # else:
+ # contextual_style3_1 = (
+ # torch.mean(
+ # contextual_forward_loss(
+ # pred_feat_1,
+ # B_feat_1.detach(),
+ # )
+ # )
+ # * 2
+ # )
+
+ contextual_loss_total = (
+ contextual_style5_1 + contextual_style4_1 + contextual_style3_1 + contextual_style2_1
+ ) * opt.weight_contextual
+
+ # Consistent Loss
+ consistent_loss = consistent_loss_fn(
+ I_current_lab_predict,
+ I_last_ab_predict,
+ I_current_nonlocal_lab_predict,
+ I_last_nonlocal_lab_predict,
+ flow_forward,
+ mask,
+ warping_layer,
+ weight_consistent=opt.weight_consistent,
+ weight_nonlocal_consistent=opt.weight_nonlocal_consistent,
+ device=device,
+ )
+
+ # Smoothness loss
+ smoothness_loss = smoothness_loss_fn(
+ I_current_l,
+ I_current_lab,
+ I_current_ab_predict,
+ A_feat_0,
+ weighted_layer_color,
+ nonlocal_weighted_layer,
+ weight_smoothness=opt.weight_smoothness,
+ weight_nonlocal_smoothness=opt.weight_nonlocal_smoothness,
+ device=device,
+ )
+
+ # Total loss
+ total_loss = l1_loss + perceptual_loss + contextual_loss_total + consistent_loss + smoothness_loss
+ if epoch_num > opt.epoch_train_discriminator:
+ total_loss += generator_loss
+
+ # Add loss to loss handler
+ loss_handler.add_loss(key="total_loss", loss=total_loss.item())
+ loss_handler.add_loss(key="l1_loss", loss=l1_loss.item())
+ loss_handler.add_loss(key="perceptual_loss", loss=perceptual_loss.item())
+ loss_handler.add_loss(key="contextual_loss", loss=contextual_loss_total.item())
+ loss_handler.add_loss(key="consistent_loss", loss=consistent_loss.item())
+ loss_handler.add_loss(key="smoothness_loss", loss=smoothness_loss.item())
+ loss_handler.add_loss(key="discriminator_loss", loss=discriminator_loss.item())
+ if epoch_num > opt.epoch_train_discriminator:
+ loss_handler.add_loss(key="generator_loss", loss=generator_loss.item())
+ loss_handler.count_one_sample()
+
+ total_loss.backward()
+
+ optimizer_g.step()
+ step_optim_scheduler_g.step()
+ step_optim_scheduler_d.step()
+
+ _forward_model_time = timer_handler.compute_time("forward_model")
+
+ timer_handler.compute_time("training_logger")
+ training_logger()
+ _training_logger_time = timer_handler.compute_time("training_logger")
+
+ pbar.set_description(
+ f"Epochs: {epoch_num}, Load_sample: {_load_sample_time:.3f}s, Forward: {_forward_model_time:.3f}s, log: {_training_logger_time:.3f}s"
+ )
+ pbar.update(1)
diff --git a/train_swin_224_ddp.py b/train_swin_224_ddp.py
new file mode 100644
index 0000000000000000000000000000000000000000..b12218dca1bf6a54657613fe51e7116ea04c07f7
--- /dev/null
+++ b/train_swin_224_ddp.py
@@ -0,0 +1,634 @@
+import os
+import sys
+import wandb
+import argparse
+import numpy as np
+from tqdm import tqdm
+from PIL import Image
+from datetime import datetime
+from zoneinfo import ZoneInfo
+from time import gmtime, strftime
+from collections import OrderedDict
+import random
+
+import torch
+import torch.nn as nn
+import torch.optim as optim
+import torch.backends.cudnn as cudnn
+from torchvision.transforms import CenterCrop
+from torch.utils.data import ConcatDataset, DataLoader, WeightedRandomSampler
+import torchvision.transforms as torch_transforms
+from torchvision.utils import make_grid
+
+from src.losses import (
+ ContextualLoss,
+ ContextualLoss_forward,
+ Perceptual_loss,
+ consistent_loss_fn,
+ discriminator_loss_fn,
+ generator_loss_fn,
+ l1_loss_fn,
+ smoothness_loss_fn,
+)
+from src.models.CNN.GAN_models import Discriminator_x64_224
+from src.models.CNN.ColorVidNet import GeneralColorVidNet
+from src.models.CNN.FrameColor import frame_colorization
+from src.models.CNN.NonlocalNet import WeightedAverage_color, NonlocalWeightedAverage, GeneralWarpNet
+from src.models.vit.embed import GeneralEmbedModel
+from src.models.vit.config import load_config
+from src.data import transforms
+from src.data.dataloader import VideosDataset, VideosDataset_ImageNet
+from src.utils import CenterPad_threshold
+from src.utils import (
+ TimeHandler,
+ RGB2Lab,
+ ToTensor,
+ Normalize,
+ LossHandler,
+ WarpingLayer,
+ uncenter_l,
+ tensor_lab2rgb,
+ print_num_params,
+ SquaredPadding
+)
+from src.scheduler import PolynomialLR
+
+from torch.nn.parallel import DistributedDataParallel as DDP
+import torch.distributed as dist
+from torch.utils.data.distributed import DistributedSampler
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--video_data_root_list", type=str, default="dataset")
+parser.add_argument("--flow_data_root_list", type=str, default='flow')
+parser.add_argument("--mask_data_root_list", type=str, default='mask')
+parser.add_argument("--data_root_imagenet", default="imagenet", type=str)
+parser.add_argument("--annotation_file_path", default="dataset/annotation.csv", type=str)
+parser.add_argument("--imagenet_pairs_file", default="imagenet_pairs.txt", type=str)
+parser.add_argument("--gpu_ids", type=str, default="0,1,2,3", help="separate by comma")
+parser.add_argument("--workers", type=int, default=0)
+parser.add_argument("--batch_size", type=int, default=2)
+parser.add_argument("--image_size", type=int, default=[224, 224])
+parser.add_argument("--ic", type=int, default=7)
+parser.add_argument("--epoch", type=int, default=40)
+parser.add_argument("--resume_epoch", type=int, default=0)
+parser.add_argument("--resume", action='store_true')
+parser.add_argument("--load_pretrained_model", action='store_true')
+parser.add_argument("--pretrained_model_dir", type=str, default='ckpt')
+parser.add_argument("--lr", type=float, default=1e-4)
+parser.add_argument("--beta1", type=float, default=0.5)
+parser.add_argument("--lr_step", type=int, default=1)
+parser.add_argument("--lr_gamma", type=float, default=0.9)
+parser.add_argument("--checkpoint_dir", type=str, default="checkpoints")
+parser.add_argument("--checkpoint_step", type=int, default=500)
+parser.add_argument("--real_reference_probability", type=float, default=0.7)
+parser.add_argument("--nonzero_placeholder_probability", type=float, default=0.0)
+parser.add_argument("--domain_invariant", action='store_true')
+parser.add_argument("--weigth_l1", type=float, default=2.0)
+parser.add_argument("--weight_contextual", type=float, default="0.5")
+parser.add_argument("--weight_perceptual", type=float, default="0.02")
+parser.add_argument("--weight_smoothness", type=float, default="5.0")
+parser.add_argument("--weight_gan", type=float, default="0.5")
+parser.add_argument("--weight_nonlocal_smoothness", type=float, default="0.0")
+parser.add_argument("--weight_nonlocal_consistent", type=float, default="0.0")
+parser.add_argument("--weight_consistent", type=float, default="0.05")
+parser.add_argument("--luminance_noise", type=float, default="2.0")
+parser.add_argument("--permute_data", action='store_true')
+parser.add_argument("--contextual_loss_direction", type=str, default="forward", help="forward or backward matching")
+parser.add_argument("--batch_accum_size", type=int, default=10)
+parser.add_argument("--epoch_train_discriminator", type=int, default=3)
+parser.add_argument("--vit_version", type=str, default="vit_tiny_patch16_384")
+parser.add_argument("--use_dummy", action='store_true')
+parser.add_argument("--use_wandb", action='store_true')
+parser.add_argument("--use_feature_transform", action='store_true')
+parser.add_argument("--head_out_idx", type=str, default="8,9,10,11")
+parser.add_argument("--wandb_token", type=str, default="")
+parser.add_argument("--wandb_name", type=str, default="")
+
+
+def ddp_setup():
+ dist.init_process_group(backend="nccl")
+ local_rank = int(os.environ['LOCAL_RANK'])
+ return local_rank
+
+def ddp_cleanup():
+ dist.destroy_process_group()
+
+def prepare_dataloader_ddp(dataset, batch_size=4, pin_memory=False, num_workers=0):
+ sampler = DistributedSampler(dataset, shuffle=True)
+ dataloader = DataLoader(dataset,
+ batch_size=batch_size,
+ pin_memory=pin_memory,
+ num_workers=num_workers,
+ sampler=sampler)
+ return dataloader
+
+def is_master_process():
+ ddp_rank = int(os.environ['RANK'])
+ return ddp_rank == 0
+
+def load_data():
+ transforms_video = [
+ SquaredPadding(target_size=opt.image_size[0]),
+ RGB2Lab(),
+ ToTensor(),
+ Normalize(),
+ ]
+
+ train_dataset_videos = [
+ VideosDataset(
+ video_data_root=video_data_root,
+ flow_data_root=flow_data_root,
+ mask_data_root=mask_data_root,
+ imagenet_folder=opt.data_root_imagenet,
+ annotation_file_path=opt.annotation_file_path,
+ image_size=opt.image_size,
+ image_transform=torch_transforms.Compose(transforms_video),
+ real_reference_probability=opt.real_reference_probability,
+ nonzero_placeholder_probability=opt.nonzero_placeholder_probability,
+ )
+ for video_data_root, flow_data_root, mask_data_root in zip(opt.video_data_root_list, opt.flow_data_root_list, opt.mask_data_root_list)
+ ]
+
+ transforms_imagenet = [SquaredPadding(target_size=opt.image_size[0]), RGB2Lab(), ToTensor(), Normalize()]
+ extra_reference_transform = [
+ torch_transforms.RandomHorizontalFlip(0.5),
+ torch_transforms.RandomResizedCrop(480, (0.98, 1.0), ratio=(0.8, 1.2)),
+ ]
+
+ train_dataset_imagenet = VideosDataset_ImageNet(
+ imagenet_data_root=opt.data_root_imagenet,
+ pairs_file=opt.imagenet_pairs_file,
+ image_size=opt.image_size,
+ transforms_imagenet=transforms_imagenet,
+ distortion_level=4,
+ brightnessjitter=5,
+ nonzero_placeholder_probability=opt.nonzero_placeholder_probability,
+ extra_reference_transform=extra_reference_transform,
+ real_reference_probability=opt.real_reference_probability,
+ )
+ dataset_combined = ConcatDataset(train_dataset_videos + [train_dataset_imagenet])
+ data_loader = prepare_dataloader_ddp(dataset_combined,
+ batch_size=opt.batch_size,
+ pin_memory=False,
+ num_workers=opt.workers)
+ return data_loader
+
+def save_checkpoints(saved_path):
+ # Make directory if the folder doesn't exists
+ os.makedirs(saved_path, exist_ok=True)
+
+ # Save model
+ torch.save(
+ nonlocal_net.module.state_dict(),
+ os.path.join(saved_path, "nonlocal_net.pth"),
+ )
+ torch.save(
+ colornet.module.state_dict(),
+ os.path.join(saved_path, "colornet.pth"),
+ )
+ torch.save(
+ discriminator.module.state_dict(),
+ os.path.join(saved_path, "discriminator.pth"),
+ )
+ torch.save(
+ embed_net.state_dict(),
+ os.path.join(saved_path, "embed_net.pth")
+ )
+
+ # Save learning state for restoring train
+ learning_state = {
+ "epoch": epoch_num,
+ "total_iter": total_iter,
+ "optimizer_g": optimizer_g.state_dict(),
+ "optimizer_d": optimizer_d.state_dict(),
+ "optimizer_schedule_g": step_optim_scheduler_g.state_dict(),
+ "optimizer_schedule_d": step_optim_scheduler_d.state_dict(),
+ }
+
+ torch.save(learning_state, os.path.join(saved_path, "learning_state.pth"))
+
+def training_logger():
+ if (total_iter % opt.checkpoint_step == 0) or (total_iter == len(data_loader)):
+ train_loss_dict = {"train/" + str(k): v / loss_handler.count_sample for k, v in loss_handler.loss_dict.items()}
+ train_loss_dict["train/opt_g_lr_1"] = step_optim_scheduler_g.get_last_lr()[0]
+ train_loss_dict["train/opt_g_lr_2"] = step_optim_scheduler_g.get_last_lr()[1]
+ train_loss_dict["train/opt_d_lr"] = step_optim_scheduler_d.get_last_lr()[0]
+
+ alert_text = f"l1_loss: {l1_loss.item()}\npercep_loss: {perceptual_loss.item()}\nctx_loss: {contextual_loss_total.item()}\ncst_loss: {consistent_loss.item()}\nsm_loss: {smoothness_loss.item()}\ntotal: {total_loss.item()}"
+
+ if opt.use_wandb:
+ wandb.log(train_loss_dict)
+ wandb.alert(title=f"Progress training #{total_iter}", text=alert_text)
+
+ for idx in range(I_predict_rgb.shape[0]):
+ concated_I = make_grid(
+ [(I_predict_rgb[idx] * 255), (I_reference_rgb[idx] * 255), (I_current_rgb[idx] * 255)], nrow=3
+ )
+ wandb_concated_I = wandb.Image(
+ concated_I,
+ caption="[LEFT] Predict, [CENTER] Reference, [RIGHT] Ground truth\n[REF] {}, [FRAME] {}".format(
+ ref_path[idx], curr_frame_path[idx]
+ ),
+ )
+ wandb.log({f"example_{idx}": wandb_concated_I})
+
+ # Save learning state checkpoint
+ # save_checkpoints(os.path.join(opt.checkpoint_dir, 'runs'))
+ loss_handler.reset()
+
+
+def load_params(ckpt_file, local_rank, has_module=False):
+ params = torch.load(ckpt_file, map_location=f'cuda:{local_rank}')
+ new_params = []
+ for key, value in params.items():
+ new_params.append(("module."+key if has_module else key, value))
+ return OrderedDict(new_params)
+
+
+def parse(parser, save=True):
+ opt = parser.parse_args()
+ args = vars(opt)
+
+ print("------------------------------ Options -------------------------------")
+ for k, v in sorted(args.items()):
+ print("%s: %s" % (str(k), str(v)))
+ print("-------------------------------- End ---------------------------------")
+
+ if save:
+ file_name = os.path.join("opt.txt")
+ with open(file_name, "wt") as opt_file:
+ opt_file.write(os.path.basename(sys.argv[0]) + " " + strftime("%Y-%m-%d %H:%M:%S", gmtime()) + "\n")
+ opt_file.write("------------------------------ Options -------------------------------\n")
+ for k, v in sorted(args.items()):
+ opt_file.write("%s: %s\n" % (str(k), str(v)))
+ opt_file.write("-------------------------------- End ---------------------------------\n")
+ return opt
+
+
+def gpu_setup():
+ os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
+ cudnn.benchmark = True
+ torch.cuda.set_device(opt.gpu_ids[0])
+ device = torch.device("cuda")
+ print("running on GPU", opt.gpu_ids)
+ return device
+
+
+if __name__ == "__main__":
+ ############################################## SETUP ###############################################
+ torch.multiprocessing.set_start_method("spawn", force=True)
+ # =============== GET PARSER OPTION ================
+ opt = parse(parser)
+ opt.video_data_root_list = opt.video_data_root_list.split(",")
+ opt.flow_data_root_list = opt.flow_data_root_list.split(",")
+ opt.mask_data_root_list = opt.mask_data_root_list.split(",")
+ opt.gpu_ids = list(map(int, opt.gpu_ids.split(",")))
+ opt.head_out_idx = list(map(int, opt.head_out_idx.split(",")))
+ n_dim_output = 3 if opt.use_feature_transform else 4
+ assert len(opt.head_out_idx) == 4, "Size of head_out_idx must be 4"
+
+ # =================== INIT WANDB ===================
+# if is_master_process():
+ if opt.use_wandb:
+ print("Save images to Wandb")
+ if opt.wandb_token != "":
+ try:
+ wandb.login(key=opt.wandb_token)
+ except:
+ pass
+ if opt.use_wandb:
+ wandb.init(
+ project="video-colorization",
+ group=f"{opt.wandb_name} {datetime.now(tz=ZoneInfo('Asia/Ho_Chi_Minh')).strftime('%Y/%m/%d_%H-%M-%S')}",
+ #group="DDP"
+ )
+
+ # ================== SETUP DEVICE ==================
+ local_rank = ddp_setup()
+ # =================== VIT CONFIG ===================
+ cfg = load_config()
+ model_cfg = cfg["model"][opt.vit_version]
+ model_cfg["image_size"] = (384, 384)
+ model_cfg["backbone"] = opt.vit_version
+ model_cfg["dropout"] = 0.0
+ model_cfg["drop_path_rate"] = 0.1
+ model_cfg["n_cls"] = 10
+ ############################################ LOAD DATA #############################################
+ data_loader = load_data()
+ ########################################## DEFINE NETWORK ##########################################
+
+
+ colornet = DDP(GeneralColorVidNet(opt.ic).to(local_rank), device_ids=[local_rank], output_device=local_rank)
+ nonlocal_net = DDP(GeneralWarpNet().to(local_rank), device_ids=[local_rank], output_device=local_rank)
+ discriminator = DDP(Discriminator_x64_224(ndf=64).to(local_rank), device_ids=[local_rank], output_device=local_rank)
+ weighted_layer_color = WeightedAverage_color().to(local_rank)
+ nonlocal_weighted_layer = NonlocalWeightedAverage().to(local_rank)
+ warping_layer = WarpingLayer(device=local_rank).to(local_rank)
+ embed_net = GeneralEmbedModel(device=local_rank).to(local_rank)
+
+ if is_master_process():
+ # Print number of parameters
+ print("-" * 59)
+ print("| TYPE | Model name | Num params |")
+ print("-" * 59)
+
+ colornet_params = print_num_params(colornet)
+ nonlocal_net_params = print_num_params(nonlocal_net)
+ discriminator_params = print_num_params(discriminator)
+ weighted_layer_color_params = print_num_params(weighted_layer_color)
+ nonlocal_weighted_layer_params = print_num_params(nonlocal_weighted_layer)
+ warping_layer_params = print_num_params(warping_layer)
+ embed_net_params = print_num_params(embed_net)
+ print("-" * 59)
+ print(
+ f"| TOTAL | | {('{:,}'.format(colornet_params+nonlocal_net_params+discriminator_params+weighted_layer_color_params+nonlocal_weighted_layer_params+warping_layer_params+embed_net_params)).rjust(10)} |"
+ )
+ print("-" * 59)
+ if opt.use_wandb:
+ wandb.watch(discriminator, log="all", log_freq=opt.checkpoint_step, idx=0)
+ wandb.watch(embed_net, log="all", log_freq=opt.checkpoint_step, idx=1)
+ wandb.watch(colornet, log="all", log_freq=opt.checkpoint_step, idx=2)
+ wandb.watch(nonlocal_net, log="all", log_freq=opt.checkpoint_step, idx=3)
+
+
+
+ ###################################### DEFINE LOSS FUNCTIONS #######################################
+ perceptual_loss_fn = Perceptual_loss(opt.domain_invariant, opt.weight_perceptual)
+ contextual_loss = ContextualLoss().to(local_rank)
+ contextual_forward_loss = ContextualLoss_forward().to(local_rank)
+ ######################################## DEFINE OPTIMIZERS #########################################
+ optimizer_g = optim.AdamW(
+ [
+ {"params": nonlocal_net.parameters(), "lr": opt.lr},
+ {"params": colornet.parameters(), "lr": 2 * opt.lr}
+ ],
+ betas=(0.5, 0.999),
+ eps=1e-5,
+ amsgrad=True,
+ )
+
+ optimizer_d = optim.AdamW(
+ filter(lambda p: p.requires_grad, discriminator.parameters()),
+ lr=opt.lr,
+ betas=(0.5, 0.999),
+ amsgrad=True,
+ )
+
+ step_optim_scheduler_g = PolynomialLR(
+ optimizer_g,
+ step_size=opt.lr_step,
+ iter_warmup=0,
+ iter_max=len(data_loader) * opt.epoch,
+ power=0.9,
+ min_lr=1e-8
+ )
+ step_optim_scheduler_d = PolynomialLR(
+ optimizer_d,
+ step_size=opt.lr_step,
+ iter_warmup=0,
+ iter_max=len(data_loader) * opt.epoch,
+ power=0.9,
+ min_lr=1e-8
+ )
+ ########################################## DEFINE OTHERS ###########################################
+ downsampling_by2 = nn.AvgPool2d(kernel_size=2).to(local_rank)
+ # timer_handler = TimeHandler()
+ loss_handler = LossHandler()
+ ############################################## TRAIN ###############################################
+
+ # ============= USE PRETRAINED OR NOT ==============
+ if opt.load_pretrained_model:
+ nonlocal_net.load_state_dict(load_params(os.path.join(opt.pretrained_model_dir, "nonlocal_net.pth"),
+ local_rank,
+ has_module=True))
+ colornet.load_state_dict(load_params(os.path.join(opt.pretrained_model_dir, "colornet.pth"),
+ local_rank,
+ has_module=True))
+ discriminator.load_state_dict(load_params(os.path.join(opt.pretrained_model_dir, "discriminator.pth"),
+ local_rank,
+ has_module=True))
+ embed_net_params = load_params(os.path.join(opt.pretrained_model_dir, "embed_net.pth"),
+ local_rank,
+ has_module=False)
+ if "module.vit.heads_out" in embed_net_params:
+ embed_net_params.pop("module.vit.heads_out")
+ elif "vit.heads_out" in embed_net_params:
+ embed_net_params.pop("vit.heads_out")
+ embed_net.load_state_dict(embed_net_params)
+
+ learning_checkpoint = torch.load(os.path.join(opt.pretrained_model_dir, "learning_state.pth"))
+ optimizer_g.load_state_dict(learning_checkpoint["optimizer_g"])
+ optimizer_d.load_state_dict(learning_checkpoint["optimizer_d"])
+ step_optim_scheduler_g.load_state_dict(learning_checkpoint["optimizer_schedule_g"])
+ step_optim_scheduler_d.load_state_dict(learning_checkpoint["optimizer_schedule_d"])
+ total_iter = learning_checkpoint['total_iter']
+ start_epoch = learning_checkpoint['epoch']+1
+ else:
+ total_iter = 0
+ start_epoch = 1
+
+
+
+ for epoch_num in range(start_epoch, opt.epoch+1):
+ data_loader.sampler.set_epoch(epoch_num-1)
+
+ if is_master_process():
+ train_progress_bar = tqdm(
+ data_loader,
+ desc =f'Epoch {epoch_num}[Training]',
+ position = 0,
+ leave = False
+ )
+ else:
+ train_progress_bar = data_loader
+ for iter, sample in enumerate(train_progress_bar):
+ # timer_handler.compute_time("load_sample")
+ total_iter += 1
+ # =============== LOAD DATA SAMPLE ================
+ (
+ I_last_lab, ######## (3, H, W)
+ I_current_lab, ##### (3, H, W)
+ I_reference_lab, ### (3, H, W)
+ flow_forward, ###### (2, H, W)
+ mask, ############## (1, H, W)
+ placeholder_lab, ### (3, H, W)
+ self_ref_flag, ##### (3, H, W)
+ prev_frame_path,
+ curr_frame_path,
+ ref_path,
+ ) = sample
+
+ I_last_lab = I_last_lab.to(local_rank)
+ I_current_lab = I_current_lab.to(local_rank)
+ I_reference_lab = I_reference_lab.to(local_rank)
+ flow_forward = flow_forward.to(local_rank)
+ mask = mask.to(local_rank)
+ placeholder_lab = placeholder_lab.to(local_rank)
+ self_ref_flag = self_ref_flag.to(local_rank)
+
+ I_last_l = I_last_lab[:, 0:1, :, :]
+ I_last_ab = I_last_lab[:, 1:3, :, :]
+ I_current_l = I_current_lab[:, 0:1, :, :]
+ I_current_ab = I_current_lab[:, 1:3, :, :]
+ I_reference_l = I_reference_lab[:, 0:1, :, :]
+ I_reference_ab = I_reference_lab[:, 1:3, :, :]
+ I_reference_rgb = tensor_lab2rgb(torch.cat((uncenter_l(I_reference_l), I_reference_ab), dim=1))
+
+ # _load_sample_time = timer_handler.compute_time("load_sample")
+ # timer_handler.compute_time("forward_model")
+
+ features_B = embed_net(I_reference_rgb)
+ B_feat_0, B_feat_1, B_feat_2, B_feat_3 = features_B
+
+ # ================== COLORIZATION ==================
+ # The last frame
+ I_last_ab_predict, I_last_nonlocal_lab_predict = frame_colorization(
+ IA_l=I_last_l,
+ IB_lab=I_reference_lab,
+ IA_last_lab=placeholder_lab,
+ features_B=features_B,
+ embed_net=embed_net,
+ colornet=colornet,
+ nonlocal_net=nonlocal_net,
+ luminance_noise=opt.luminance_noise,
+ )
+ I_last_lab_predict = torch.cat((I_last_l, I_last_ab_predict), dim=1)
+
+ # The current frame
+ I_current_ab_predict, I_current_nonlocal_lab_predict = frame_colorization(
+ IA_l=I_current_l,
+ IB_lab=I_reference_lab,
+ IA_last_lab=I_last_lab_predict,
+ features_B=features_B,
+ embed_net=embed_net,
+ colornet=colornet,
+ nonlocal_net=nonlocal_net,
+ luminance_noise=opt.luminance_noise,
+ )
+ I_current_lab_predict = torch.cat((I_last_l, I_current_ab_predict), dim=1)
+
+ # ================ UPDATE GENERATOR ================
+ if opt.weight_gan > 0:
+ optimizer_g.zero_grad()
+ optimizer_d.zero_grad()
+ fake_data_lab = torch.cat(
+ (
+ uncenter_l(I_current_l),
+ I_current_ab_predict,
+ uncenter_l(I_last_l),
+ I_last_ab_predict,
+ ),
+ dim=1,
+ )
+ real_data_lab = torch.cat(
+ (
+ uncenter_l(I_current_l),
+ I_current_ab,
+ uncenter_l(I_last_l),
+ I_last_ab,
+ ),
+ dim=1,
+ )
+
+ if opt.permute_data:
+ batch_index = torch.arange(-1, opt.batch_size - 1, dtype=torch.long)
+ real_data_lab = real_data_lab[batch_index, ...]
+
+ discriminator_loss = discriminator_loss_fn(real_data_lab, fake_data_lab, discriminator)
+ discriminator_loss.backward()
+ optimizer_d.step()
+
+ optimizer_g.zero_grad()
+ optimizer_d.zero_grad()
+
+ # ================== COMPUTE LOSS ==================
+ # L1 loss
+ l1_loss = l1_loss_fn(I_current_ab, I_current_ab_predict) * opt.weigth_l1
+
+ # Generator_loss. TODO: freeze this to train some first epoch
+ if epoch_num > opt.epoch_train_discriminator:
+ generator_loss = generator_loss_fn(real_data_lab, fake_data_lab, discriminator, opt.weight_gan, local_rank)
+
+ # Perceptual Loss
+ I_predict_rgb = tensor_lab2rgb(torch.cat((uncenter_l(I_current_l), I_current_ab_predict), dim=1))
+ pred_feat_0, pred_feat_1, pred_feat_2, pred_feat_3 = embed_net(I_predict_rgb)
+
+ I_current_rgb = tensor_lab2rgb(torch.cat((uncenter_l(I_current_l), I_current_ab), dim=1))
+ A_feat_0, _, _, A_feat_3 = embed_net(I_current_rgb)
+
+ perceptual_loss = perceptual_loss_fn(A_feat_3, pred_feat_3)
+
+ # Contextual Loss
+ contextual_style5_1 = torch.mean(contextual_forward_loss(pred_feat_3, B_feat_3.detach())) * 8
+ contextual_style4_1 = torch.mean(contextual_forward_loss(pred_feat_2, B_feat_2.detach())) * 4
+ contextual_style3_1 = torch.mean(contextual_forward_loss(pred_feat_1, B_feat_1.detach())) * 2
+ contextual_style2_1 = torch.mean(contextual_forward_loss(pred_feat_0, B_feat_0.detach()))
+
+ contextual_loss_total = (
+ contextual_style5_1 + contextual_style4_1 + contextual_style3_1 + contextual_style2_1
+ ) * opt.weight_contextual
+
+ # Consistent Loss
+ consistent_loss = consistent_loss_fn(
+ I_current_lab_predict,
+ I_last_ab_predict,
+ I_current_nonlocal_lab_predict,
+ I_last_nonlocal_lab_predict,
+ flow_forward,
+ mask,
+ warping_layer,
+ weight_consistent=opt.weight_consistent,
+ weight_nonlocal_consistent=opt.weight_nonlocal_consistent,
+ device=local_rank,
+ )
+
+ # Smoothness loss
+ smoothness_loss = smoothness_loss_fn(
+ I_current_l,
+ I_current_lab,
+ I_current_ab_predict,
+ A_feat_0,
+ weighted_layer_color,
+ nonlocal_weighted_layer,
+ weight_smoothness=opt.weight_smoothness,
+ weight_nonlocal_smoothness=opt.weight_nonlocal_smoothness,
+ device=local_rank
+ )
+
+ # Total loss
+ total_loss = l1_loss + perceptual_loss + contextual_loss_total + consistent_loss + smoothness_loss
+ if epoch_num > opt.epoch_train_discriminator:
+ total_loss += generator_loss
+
+ # Add loss to loss handler
+ loss_handler.add_loss(key="total_loss", loss=total_loss.item())
+ loss_handler.add_loss(key="l1_loss", loss=l1_loss.item())
+ loss_handler.add_loss(key="perceptual_loss", loss=perceptual_loss.item())
+ loss_handler.add_loss(key="contextual_loss", loss=contextual_loss_total.item())
+ loss_handler.add_loss(key="consistent_loss", loss=consistent_loss.item())
+ loss_handler.add_loss(key="smoothness_loss", loss=smoothness_loss.item())
+ loss_handler.add_loss(key="discriminator_loss", loss=discriminator_loss.item())
+ if epoch_num > opt.epoch_train_discriminator:
+ loss_handler.add_loss(key="generator_loss", loss=generator_loss.item())
+ loss_handler.count_one_sample()
+
+ total_loss.backward()
+
+ optimizer_g.step()
+ step_optim_scheduler_g.step()
+ step_optim_scheduler_d.step()
+
+ # _forward_model_time = timer_handler.compute_time("forward_model")
+
+ # timer_handler.compute_time("training_logger")
+ training_logger()
+ # _training_logger_time = timer_handler.compute_time("training_logger")
+
+ ####
+ if is_master_process():
+ save_checkpoints(os.path.join(opt.checkpoint_dir, f"epoch_{epoch_num}"))
+ ####
+ if opt.use_wandb:
+ wandb.finish()
+ ddp_cleanup()
diff --git a/video_predictor.py b/video_predictor.py
new file mode 100644
index 0000000000000000000000000000000000000000..3f591774b11638b71a74577eaf0c3f014c053b2f
--- /dev/null
+++ b/video_predictor.py
@@ -0,0 +1,196 @@
+# from PIL import Image
+# from predictor import predictor
+# import torch
+# from src.utils import (
+# uncenter_l,
+# tensor_lab2rgb,
+# )
+
+import numpy as np
+import shutil
+import os
+import argparse
+import torch
+import glob
+from tqdm import tqdm
+from PIL import Image
+from collections import OrderedDict
+from src.models.vit.config import load_config
+import torchvision.transforms as transforms
+
+from src.models.CNN.ColorVidNet import GeneralColorVidNet
+from src.models.vit.embed import GeneralEmbedModel
+from src.models.CNN.NonlocalNet import GeneralWarpNet
+from src.utils import (
+ TimeHandler,
+ RGB2Lab,
+ ToTensor,
+ CenterPad,
+ Normalize,
+ LossHandler,
+ WarpingLayer,
+ uncenter_l,
+ tensor_lab2rgb,
+ print_num_params,
+ SquaredPadding,
+ UnpaddingSquare,
+)
+from src.models.CNN.FrameColor import frame_colorization
+# cur_path="./horse2_ground.webp"
+# ref_path="./horse2_ref.jpg"
+# las_path="./horse2_ground.webp"
+weight_path="./ckp/12/"
+out_path = "./output_video/"
+root_path = "./EvalDataset"
+device="cuda"
+
+shutil.rmtree(out_path)
+os.mkdir(out_path)
+videos_list=os.listdir(root_path+"/clips/")
+# predictor_instance=predictor(model_path=weight_path,device=device)
+def load_params(ckpt_file):
+ params = torch.load(ckpt_file)
+ new_params = []
+ for key, value in params.items():
+ new_params.append((key, value))
+ return OrderedDict(new_params)
+
+embed_net=GeneralEmbedModel(pretrained_model="swin-small", device=device).to(device).eval()
+nonlocal_net = GeneralWarpNet(feature_channel=128).to(device).eval()
+colornet=GeneralColorVidNet(7).to(device).eval()
+embed_net.load_state_dict(
+ load_params(
+ (glob.glob(os.path.join(weight_path,"embed_net*.pth")))[-1]
+ ),strict=False
+ )
+nonlocal_net.load_state_dict(
+ load_params(
+ (glob.glob(os.path.join(weight_path,"nonlocal_net*.pth")))[-1]
+ )
+ )
+colornet.load_state_dict(
+ load_params(
+ (glob.glob(os.path.join(weight_path,"colornet*.pth")))[-1]
+ )
+ )
+
+def custom_transform(listTrans,img):
+ for trans in listTrans:
+ if isinstance(trans,SquaredPadding):
+ img,padding=trans(img,return_paddings=True)
+ else:
+ img=trans(img)
+ return img.to(device),padding
+# def save_numpy(path:str,ts,module):
+# np_ar=ts.numpy()
+# np.save(path.replace(".jpg","")+"_"+module,np_ar)
+transformer=[
+ SquaredPadding(target_size=224),
+ RGB2Lab(),
+ ToTensor(),
+ Normalize(),
+ ]
+high_resolution=True
+center_padder=CenterPad((224,224))
+with torch.no_grad():
+ for video_name in tqdm(videos_list):
+ frames_list=os.listdir(root_path+"/clips/"+video_name)
+ frames_list= sorted(frames_list)
+ ref_path = root_path+"/ref/"+video_name+"/"
+ ref_file = os.listdir(ref_path)[0]
+ ref_path = ref_path + ref_file
+ I_last_lab_predict = torch.zeros((1,3,224,224)).to(device)
+ video_out_path = out_path+"/"+video_name+"/"
+ os.mkdir(video_out_path)
+ ref_frame_pil_rgb=Image.open(ref_path).convert("RGB")
+ I_reference_lab, I_reference_padding= custom_transform(transformer,center_padder(ref_frame_pil_rgb))
+ I_reference_lab=torch.unsqueeze(I_reference_lab,0)
+ I_reference_l = I_reference_lab[:, 0:1, :, :]
+ I_reference_ab = I_reference_lab[:, 1:3, :, :]
+ I_reference_rgb = tensor_lab2rgb(torch.cat((uncenter_l(I_reference_l), I_reference_ab), dim=1)).to(device)
+ features_B = embed_net(I_reference_rgb)
+ for frame_name in frames_list:
+ # current_frame_pil_rgb=Image.open(root_path+"/clips/"+video_name+"/"+frame_name).convert("RGB")
+ # ref_frame_pil_rgb=Image.open(ref_path).convert("RGB")
+ # last_frame_pil_rgb=Image.open(las_path).convert("RGB")
+
+ #=================================using predictor but fail========================
+
+
+ # I_current_lab = predictor_instance.data_transform(current_frame_pil_rgb)
+ # I_current_lab = torch.unsqueeze(I_current_lab,0)
+ # I_current_l = I_current_lab[:, 0:1, :, :]
+ # I_current_ab = I_current_lab[:, 1:3, :, :]
+
+ # # print(I_current_l[0,0,112:122,112:122])
+
+ # # I_last_lab = predictor_instance.data_transform(last_frame_pil_rgb)
+ # # I_last_lab = torch.unsqueeze(I_last_lab,0)
+ # # I_last_l = I_last_lab[:, 0:1, :, :]
+ # # I_last_ab = I_last_lab[:, 1:3, :, :]
+
+ # I_current_lab_predict= predictor_instance(I_current_l=I_current_l,ref_img=ref_frame_pil_rgb,I_last_lab_predict=I_last_lab_predict)
+ # I_current_l_predict=I_current_lab_predict[:, 0:1, :, :]
+ # I_current_ab_predict=I_current_lab_predict[:, 1:3, :, :]
+
+ # I_current_rgb_predict = tensor_lab2rgb(torch.cat((uncenter_l(I_current_l_predict), I_current_ab_predict), dim=1))
+
+ # image_result2 = Image.fromarray((I_current_rgb_predict[0] * 255).permute(1, 2, 0).detach().cpu().numpy().astype(np.uint8))
+ # I_last_lab_predict = I_current_lab_predict
+ #=================================using predictor but fail========================
+
+ current_frame_pil_rgb=Image.open(root_path+"/clips/"+video_name+"/"+frame_name).convert("RGB")
+ im_w,im_h=current_frame_pil_rgb.size
+ # ref_frame_pil_rgb.show()
+
+
+
+ I_current_lab,I_current_padding = custom_transform(transformer,current_frame_pil_rgb)
+ I_current_lab=torch.unsqueeze(I_current_lab,0)
+ I_current_l = I_current_lab[:, 0:1, :, :]
+ I_current_ab = I_current_lab[:, 1:3, :, :]
+
+
+
+
+
+ # save_numpy(video_out_path+"/"+frame_name,I_current_l,"current_I")
+ # save_numpy(video_out_path+"/"+frame_name,I_reference_lab,"reference_lab")
+ # save_numpy(video_out_path+"/"+frame_name,I_last_lab_predict,"I_last_lab_predict")
+ with torch.no_grad():
+ I_current_ab_predict,_ = frame_colorization(
+ IA_l=I_current_l,
+ IB_lab=I_reference_lab,
+ IA_last_lab=I_last_lab_predict,
+ features_B=features_B,
+ embed_net=embed_net,
+ colornet=colornet,
+ nonlocal_net=nonlocal_net,
+ luminance_noise=False,
+ #temperature=1e-10,
+ )
+ if high_resolution:
+ high_lab=transforms.Compose([
+ SquaredPadding(target_size=max(im_h,im_w)),
+ RGB2Lab(),
+ ToTensor(),
+ Normalize(),
+ ])
+ # print(im_h)
+ # print(im_w)
+ high_lab_current = high_lab(current_frame_pil_rgb)
+ high_lab_current = torch.unsqueeze(high_lab_current,dim=0).to(device)
+ high_l_current = high_lab_current[:, 0:1, :, :]
+ high_ab_current = high_lab_current[:, 1:3, :, :]
+ upsampler=torch.nn.Upsample(scale_factor=max(im_h,im_w)/224,mode="bilinear")
+ high_ab_predict = upsampler(I_current_ab_predict)
+ I_predict_rgb = tensor_lab2rgb(torch.cat((uncenter_l(high_l_current), high_ab_predict), dim=1))
+ else:
+ I_predict_rgb = tensor_lab2rgb(torch.cat((uncenter_l(I_current_l), I_current_ab_predict), dim=1))
+ # I_predict_rgb = unpadder(I_predict_rgb,I_current_padding)
+ image_result2 = Image.fromarray((I_predict_rgb[0] * 255).permute(1, 2, 0).detach().cpu().numpy().astype(np.uint8))
+ I_last_lab_predict = torch.cat((I_current_l, I_current_ab_predict), dim=1)
+ # save_numpy(video_out_path+"/"+frame_name,I_last_lab_predict,"result_lab")
+ image_result2.save(video_out_path+"/"+frame_name)
+
+# image_result2.show()
\ No newline at end of file