diff --git a/.gitattributes b/.gitattributes
index eca6c9ab51e6fcfa77e81826a4fdf47f8f4343ac..a792e665b9528b1e3189c05e49a12215fc6aed9b 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -46,3 +46,16 @@ src/pixel3dmm/preprocessing/facer/samples/data/weirdface.jpg filter=lfs diff=lfs
src/pixel3dmm/preprocessing/facer/samples/example_output/alignment.png filter=lfs diff=lfs merge=lfs -text
src/pixel3dmm/preprocessing/facer/samples/example_output/detect.png filter=lfs diff=lfs merge=lfs -text
src/pixel3dmm/preprocessing/facer/samples/example_output/parsing.png filter=lfs diff=lfs merge=lfs -text
+src/pixel3dmm/preprocessing/MICA/data/FLAME2020/FLAME_masks/FLAME_masks.gif filter=lfs diff=lfs merge=lfs -text
+src/pixel3dmm/preprocessing/MICA/demo/input/connelly.jpg filter=lfs diff=lfs merge=lfs -text
+src/pixel3dmm/preprocessing/MICA/demo/input/justin.png filter=lfs diff=lfs merge=lfs -text
+src/pixel3dmm/preprocessing/MICA/demo/input/lawrence.jpg filter=lfs diff=lfs merge=lfs -text
+src/pixel3dmm/preprocessing/MICA/documents/BP4D.gif filter=lfs diff=lfs merge=lfs -text
+src/pixel3dmm/preprocessing/MICA/documents/D3DFACS.gif filter=lfs diff=lfs merge=lfs -text
+src/pixel3dmm/preprocessing/MICA/documents/FACEWAREHOUSE.gif filter=lfs diff=lfs merge=lfs -text
+src/pixel3dmm/preprocessing/MICA/documents/FLORENCE.gif filter=lfs diff=lfs merge=lfs -text
+src/pixel3dmm/preprocessing/MICA/documents/FRGC.gif filter=lfs diff=lfs merge=lfs -text
+src/pixel3dmm/preprocessing/MICA/documents/LYHM.gif filter=lfs diff=lfs merge=lfs -text
+src/pixel3dmm/preprocessing/MICA/documents/STIRLING.gif filter=lfs diff=lfs merge=lfs -text
+src/pixel3dmm/preprocessing/MICA/documents/teaser.jpg filter=lfs diff=lfs merge=lfs -text
+src/pixel3dmm/preprocessing/MICA/documents/voxceleb.gif filter=lfs diff=lfs merge=lfs -text
diff --git a/src/pixel3dmm/preprocessing/MICA/.gitignore b/src/pixel3dmm/preprocessing/MICA/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..b680d054868209584883f6dc923d87ceb2c1f87d
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/.gitignore
@@ -0,0 +1,71 @@
+# Compiled source #
+###################
+.idea
+
+datasets/creation/template/*
+statistics/*
+*.o
+*.so
+
+# Packages #
+############
+# it's better to unpack these files and commit the raw source
+# git has its own built in compression methods
+*.7z
+*.dmg
+*.gz
+*.iso
+*.jar
+*.rar
+*.tar
+*.zip
+
+# OS generated files #
+######################
+.DS_Store
+.DS_Store?
+._*
+.Spotlight-V100
+.Trashes
+ehthumbs.db
+Thumbs.db
+
+# 3D data #
+############
+*.mat
+*.obj
+*.dat
+*.npz
+*.pkl
+
+# python file #
+############
+*.pyc
+__pycache__
+
+## deca data
+data/FLAME2020/generic_model.pkl
+data/FLAME2020/female_model.pkl
+data/FLAME2020/male_model.pkl
+data/FLAME2020/FLAME_albedo_from_BFM.npz
+results
+output
+TestSamples
+
+## dump files
+__dump
+
+## visual code files
+.vscode
+render_dataset.py
+shapes.pt
+partial
+images
+*.pt
+testing/now/jobs
+testing/now/logs
+testing/stirling/logs
+testing/stirling/jobs
+
+demo/arcface
+demo/output
diff --git a/src/pixel3dmm/preprocessing/MICA/LICENSE b/src/pixel3dmm/preprocessing/MICA/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..cda4f9927437eb96723916b4d4657129c93aa28b
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/LICENSE
@@ -0,0 +1,88 @@
+License
+
+Software Copyright License for non-commercial scientific research purposes
+Please read carefully the following terms and conditions and any accompanying documentation before you download
+and/or use the MICA model, data and software, (the "Model & Software"), including 3D meshes, software, and scripts.
+By downloading and/or using the Model & Software (including downloading, cloning, installing, and any other use
+of this github repository), you acknowledge that you have read these terms and conditions, understand them, and
+agree to be bound by them. If you do not agree with these terms and conditions, you must not download and/or use
+the Model & Software. Any infringement of the terms of this agreement will automatically terminate your rights
+under this License
+
+Ownership / Licensees
+The Model & Software and the associated materials has been developed at the
+Max Planck Institute for Intelligent Systems (hereinafter "MPI").
+
+Any copyright or patent right is owned by and proprietary material of the
+Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (hereinafter “MPG”; MPI and MPG hereinafter
+collectively “Max-Planck”) hereinafter the “Licensor”.
+
+License Grant
+Licensor grants you (Licensee) personally a single-user, non-exclusive, non-transferable, free of charge right:
+
+ • To install the Model & Software on computers owned, leased or otherwise controlled by you and/or your organization.
+ • To use the Model & Software for the sole purpose of performing peaceful non-commercial scientific research,
+ non-commercial education, or non-commercial artistic projects.
+
+Any other use, in particular any use for commercial, pornographic, military, or surveillance purposes is prohibited.
+This includes, without limitation, incorporation in a commercial product, use in a commercial service,
+or production of other artefacts for commercial purposes.
+
+The Model & Software may not be used to create fake, libelous, misleading, or defamatory content of any kind, excluding
+analyses in peer-reviewed scientific research.
+
+The Model & Software may not be reproduced, modified and/or made available in any form to any third party
+without Max-Planck’s prior written permission.
+
+The Model & Software may not be used for pornographic purposes or to generate pornographic material whether
+commercial or not. This license also prohibits the use of the Model & Software to train methods/algorithms/neural
+networks/etc. for commercial use of any kind. By downloading the Model & Software, you agree not to reverse engineer it.
+
+No Distribution
+The Model & Software and the license herein granted shall not be copied, shared, distributed, re-sold, offered
+for re-sale, transferred or sub-licensed in whole or in part except that you may make one copy for archive
+purposes only.
+
+Disclaimer of Representations and Warranties
+You expressly acknowledge and agree that the Model & Software results from basic research, is provided “AS IS”,
+may contain errors, and that any use of the Model & Software is at your sole risk.
+LICENSOR MAKES NO REPRESENTATIONS
+OR WARRANTIES OF ANY KIND CONCERNING THE MODEL & SOFTWARE, NEITHER EXPRESS NOR IMPLIED, AND THE ABSENCE OF ANY
+LEGAL OR ACTUAL DEFECTS, WHETHER DISCOVERABLE OR NOT. Specifically, and not to limit the foregoing, licensor
+makes no representations or warranties (i) regarding the merchantability or fitness for a particular purpose of
+the Model & Software, (ii) that the use of the Model & Software will not infringe any patents, copyrights or other
+intellectual property rights of a third party, and (iii) that the use of the Model & Software will not cause any
+damage of any kind to you or a third party.
+
+Limitation of Liability
+Because this Model & Software License Agreement qualifies as a donation, according to Section 521 of the German
+Civil Code (Bürgerliches Gesetzbuch – BGB) Licensor as a donor is liable for intent and gross negligence only.
+If the Licensor fraudulently conceals a legal or material defect, they are obliged to compensate the Licensee
+for the resulting damage.
+
+Licensor shall be liable for loss of data only up to the amount of typical recovery costs which would have
+arisen had proper and regular data backup measures been taken. For the avoidance of doubt Licensor shall be
+liable in accordance with the German Product Liability Act in the event of product liability. The foregoing
+applies also to Licensor’s legal representatives or assistants in performance. Any further liability shall
+be excluded. Patent claims generated through the usage of the Model & Software cannot be directed towards the copyright holders.
+The Model & Software is provided in the state of development the licensor defines. If modified or extended by
+Licensee, the Licensor makes no claims about the fitness of the Model & Software and is not responsible
+for any problems such modifications cause.
+
+No Maintenance Services
+You understand and agree that Licensor is under no obligation to provide either maintenance services,
+update services, notices of latent defects, or corrections of defects with regard to the Model & Software.
+Licensor nevertheless reserves the right to update, modify, or discontinue the Model & Software at any time.
+
+Defects of the Model & Software must be notified in writing to the Licensor with a comprehensible description
+of the error symptoms. The notification of the defect should enable the reproduction of the error.
+The Licensee is encouraged to communicate any use, results, modification or publication.
+
+Publications using the Model & Software
+You acknowledge that the Model & Software is a valuable scientific resource and agree to appropriately reference
+the following paper in any publication making use of the Model & Software.
+
+Commercial licensing opportunities
+For commercial uses of the Model & Software, please send email to justus.thies@tuebingen.mpg.de
+
+This Agreement shall be governed by the laws of the Federal Republic of Germany except for the UN Sales Convention.
diff --git a/src/pixel3dmm/preprocessing/MICA/README.md b/src/pixel3dmm/preprocessing/MICA/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..1783974f132b2df1b4efac27f31189b8d24ea9f3
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/README.md
@@ -0,0 +1,72 @@
+
MICA - Towards Metrical Reconstruction of Human Faces
+
+
+
+Max Planck Institute for Intelligent Systems, Tübingen, Germany
+
+
+
+
+

+
Official Repository for ECCV 2022 paper Towards Metrical Reconstruction of Human Faces
+
+
+
+
+
+### Installation
+
+After cloning the repository please install the environment by using attached conda `environment.yml` file with the command
+``conda env create -f environment.yml``. Additionally, the FLAME2020 model is needed. To obtain it please create an account at the [website](https://flame.is.tue.mpg.de/) download the model and place it in the `/data/pretrained/FLAME2020/` folder.
+
+You can also simply run the `install.sh` script:
+
+```shell
+git clone https://github.com/Zielon/MICA.git
+cd MICA
+./install.sh
+```
+you will be asked to provide `{flame_user}` and `{flame_password}` for your FLAME account in order to access the file server.
+
+### Pre-trained Models
+
+If you decide to not use the installation script, the pretrained model can be found under the [link](https://drive.google.com/file/d/1bYsI_spptzyuFmfLYqYkcJA6GZWZViNt/view?usp=sharing). After downloading, please place it in the `/data/pretrained/mica.tar` location. Additionally, you will need to provide models for `inisghtface`:
+1) [antelopev2](https://drive.google.com/file/d/16PWKI_RjjbE4_kqpElG-YFqe8FpXjads/view?usp=sharing)
+2) [buffalo_l](https://drive.google.com/file/d/1navJMy0DTr1_DHjLWu1i48owCPvXWfYc/view?usp=sharing)
+
+then you need to unzip them and place in `~/.insightface/models/`. The `install.sh` script does it for you.
+
+### How To Use
+
+To use MICA you can simply run the `demo.py` file. It will process all the images from `demo/input/` folder and create the output destination for each subject with `.ply` mesh, rendered image, and `.npy` FLAME parameters.
+
+### Dataset and Training
+
+The MICA dataset consists of eight smaller datasets for about 2300 subjects under a common FLAME topology. Read more information about how to obtain and use it under the [link](https://github.com/Zielon/MICA/tree/master/datasets/). To train MICA the images from all eight datasets are needed. The repository contains scripts how to generate the Arcface input images as well as the complete list of all the images used for the training. More information can be found [here](https://github.com/Zielon/MICA/tree/master/datasets).
+
+When you train from scratch for Arcface model initialization please download [Glint360K](https://github.com/deepinsight/insightface/tree/master/recognition/arcface_torch) and specify the path to it in the config as `cfg.model.arcface_pretrained_model`.
+
+### Testing
+
+The testing was done using two datasets, [Stirling](http://pics.stir.ac.uk/ESRC/) and [NoW](https://now.is.tue.mpg.de/). In the [model folder](https://github.com/Zielon/MICA/tree/master/models) you can find the corresponding scripts to run testing routine, which generates the meshes. To calculate the NoW challenge error you can use the following [repository](https://github.com/soubhiksanyal/now_evaluation).
+
+### Citation
+If you use this project in your research please cite MICA:
+```bibtex
+@proceedings{zielonka22mica,
+ author = {Zielonka, Wojciech and Bolkart, Timo and Thies, Justus},
+ title = {Towards Metrical Reconstruction of Human Faces},
+ journal = {European Conference on Computer Vision},
+ year = {2022}
+}
+```
diff --git a/src/pixel3dmm/preprocessing/MICA/condor.sh b/src/pixel3dmm/preprocessing/MICA/condor.sh
new file mode 100644
index 0000000000000000000000000000000000000000..8722a4780fb5fed95fc284bea873df8e5b4f973e
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/condor.sh
@@ -0,0 +1,44 @@
+#!/bin/sh
+
+# bash condor.sh 100 ./configs/mica.yml 1
+
+# default parameters
+BID=3
+CONFIG=./configs/mica.yml
+NODE_CONFIG=condor/config.sub
+NODE_SCRIPT=./condor/job.sh
+GPUS=1
+GPU_TYPE=0
+
+# set parameters
+if [ -n "$1" ]; then BID=${1}; fi
+if [ -n "$2" ]; then CONFIG=${2}; fi
+if [ -n "$3" ]; then GPU_TYPE=${3}; fi
+if [ -n "$4" ]; then GPUS=${4}; fi
+if [ -n "$5" ]; then NODE_CONFIG=${5}; fi
+if [ -n "$6" ]; then NODE_SCRIPT=${6}; fi
+
+mkdir -p output/condor_logs
+cp -nf ${NODE_CONFIG}{,.bak}
+
+GPU_NAME=Error
+
+if [ $GPU_TYPE -eq 0 ]; then GPU_NAME='Quadro RTX 6000'; fi
+if [ $GPU_TYPE -eq 1 ]; then GPU_NAME='Tesla V100-SXM2-32GB'; fi
+if [ $GPU_TYPE -eq 2 ]; then GPU_NAME='NVIDIA GeForce RTX 2080 Ti'; fi
+
+NAME=$(basename ${CONFIG} .yml)
+sed -i "s/{errorfile}/${NAME}/" ${NODE_CONFIG}.bak
+sed -i "s/{outfile}/${NAME}/" ${NODE_CONFIG}.bak
+sed -i "s/{logfile}/${NAME}/" ${NODE_CONFIG}.bak
+sed -i "s/{gpus}/${GPUS}/" ${NODE_CONFIG}.bak
+sed -i "s/{gpu_name}/${GPU_NAME}/" ${NODE_CONFIG}.bak
+
+# start node and execute script
+echo 'Executing:' ${NODE_SCRIPT} ${CONFIG}
+echo '# BID:' ${BID}
+echo '# GPUS:' ${GPUS}
+echo '# GPU NAME:' ${GPU_NAME}
+
+condor_submit_bid ${BID} ${NODE_CONFIG}.bak -append "arguments = ${NODE_SCRIPT} ${CONFIG}"
+rm ${NODE_CONFIG}.bak
diff --git a/src/pixel3dmm/preprocessing/MICA/condor/config.sub b/src/pixel3dmm/preprocessing/MICA/condor/config.sub
new file mode 100644
index 0000000000000000000000000000000000000000..22e7f51b217815b7aaf867458d3565f1ac8d9e54
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/condor/config.sub
@@ -0,0 +1,16 @@
+executable = /bin/bash
+error = ./output/condor_logs/{errorfile}_$(ClusterId).$(ProcId).err
+output = ./output/condor_logs/{outfile}_$(ClusterId).$(ProcId).out
+log = ./output/condor_logs/{logfile}_$(ClusterId).$(ProcId).log
+request_memory = 32768
+request_cpus = 6
+request_gpus = {gpus}
++WantGPUStats = true
+requirements = (TARGET.CUDADeviceName=="{gpu_name}")
+# EXIT SETTINGS
+on_exit_hold = (ExitCode =?= 3)
+on_exit_hold_reason = "Checkpointed, will resume"
+on_exit_hold_subcode = 2
+periodic_release = ( (JobStatus =?= 5) && (HoldReasonCode =?= 3) && (HoldReasonSubCode =?= 2) )
++RunningPriceExceededAction = "kill"
+queue
diff --git a/src/pixel3dmm/preprocessing/MICA/condor/dataset.sh b/src/pixel3dmm/preprocessing/MICA/condor/dataset.sh
new file mode 100644
index 0000000000000000000000000000000000000000..71983a72a7872d58e65e685cb3272da04015b7d8
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/condor/dataset.sh
@@ -0,0 +1,21 @@
+#!/bin/sh
+
+CONFIG=${1}
+
+PYTHON_ENV=/home/wzielonka/miniconda3/etc/profile.d/conda.sh
+SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
+export PATH=/usr/local/bin:/usr/bin:/bin:/usr/sbin:$PATH
+export PYTHONPATH="${PYTHONPATH}:/home/wzielonka/projects/OnFlame-internal/"
+
+echo 'START JOB (dataset generation)'
+
+module load cuda/10.1
+module load gcc/4.9
+
+echo 'ACTIVATE MICA'
+source ${PYTHON_ENV}
+conda activate MICA
+
+echo 'RUN SCRIPT'
+cd ${SCRIPT_DIR}/../datasets/creation
+python ./main.py
\ No newline at end of file
diff --git a/src/pixel3dmm/preprocessing/MICA/condor/job.sh b/src/pixel3dmm/preprocessing/MICA/condor/job.sh
new file mode 100644
index 0000000000000000000000000000000000000000..4f6ce1c3066dd6baa2000677b72a855c2cb456a3
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/condor/job.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+CONFIG=${1}
+
+PYTHON_ENV=/home/wzielonka/miniconda3/etc/profile.d/conda.sh
+SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
+export PATH=/usr/local/bin:/usr/bin:/bin:/usr/sbin:$PATH
+
+echo 'START JOB (MICA training)'
+
+module load cuda/10.1
+module load gcc/4.9
+
+echo 'ACTIVATE MICA'
+source ${PYTHON_ENV}
+conda activate MICA
+
+echo 'RUN SCRIPT'
+#echo 'ScriptDir' ${SCRIPT_DIR}
+echo 'CONFIG: ' ${CONFIG}
+cd ${SCRIPT_DIR}/..
+python ./train.py --cfg ${CONFIG}
\ No newline at end of file
diff --git a/src/pixel3dmm/preprocessing/MICA/configs/__init__.py b/src/pixel3dmm/preprocessing/MICA/configs/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/pixel3dmm/preprocessing/MICA/configs/config.py b/src/pixel3dmm/preprocessing/MICA/configs/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..aed4eb64013319b79456ca6d3ce2e8489de20210
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/configs/config.py
@@ -0,0 +1,128 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import argparse
+import os
+
+from yacs.config import CfgNode as CN
+
+cfg = CN()
+
+abs_mica_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
+cfg.mica_dir = abs_mica_dir
+cfg.device = 'cuda'
+cfg.device_id = '0'
+cfg.pretrained_model_path = os.path.join(cfg.mica_dir, 'data/pretrained', 'mica.tar')
+cfg.output_dir = ''
+
+# ---------------------------------------------------------------------------- #
+# Options for Face model
+# ---------------------------------------------------------------------------- #
+cfg.model = CN()
+cfg.model.testing = False
+cfg.model.name = 'mica'
+
+cfg.model.topology_path = os.path.join(cfg.mica_dir, 'data/FLAME2020', 'head_template.obj')
+cfg.model.flame_model_path = os.path.join(cfg.mica_dir, 'data/FLAME2020', 'generic_model.pkl')
+cfg.model.flame_lmk_embedding_path = os.path.join(cfg.mica_dir, 'data/FLAME2020', 'landmark_embedding.npy')
+cfg.model.n_shape = 300
+cfg.model.layers = 8
+cfg.model.hidden_layers_size = 256
+cfg.model.mapping_layers = 3
+cfg.model.use_pretrained = True
+cfg.model.arcface_pretrained_model = '/scratch/is-rg-ncs/models_weights/arcface-torch/backbone100.pth'
+
+# ---------------------------------------------------------------------------- #
+# Options for Dataset
+# ---------------------------------------------------------------------------- #
+cfg.dataset = CN()
+cfg.dataset.training_data = ['LYHM']
+cfg.dataset.eval_data = ['FLORENCE']
+cfg.dataset.batch_size = 2
+cfg.dataset.K = 4
+cfg.dataset.n_train = 100000
+cfg.dataset.num_workers = 4
+cfg.dataset.root = '/datasets/MICA/'
+
+# ---------------------------------------------------------------------------- #
+# Mask weights
+# ---------------------------------------------------------------------------- #
+cfg.mask_weights = CN()
+cfg.mask_weights.face = 150.0
+cfg.mask_weights.nose = 50.0
+cfg.mask_weights.lips = 50.0
+cfg.mask_weights.forehead = 50.0
+cfg.mask_weights.lr_eye_region = 50.0
+cfg.mask_weights.eye_region = 50.0
+
+cfg.mask_weights.whole = 1.0
+cfg.mask_weights.ears = 0.01
+cfg.mask_weights.eyes = 0.01
+
+cfg.running_average = 7
+
+# ---------------------------------------------------------------------------- #
+# Options for training
+# ---------------------------------------------------------------------------- #
+cfg.train = CN()
+cfg.train.use_mask = False
+cfg.train.max_epochs = 50
+cfg.train.max_steps = 100000
+cfg.train.lr = 1e-4
+cfg.train.arcface_lr = 1e-3
+cfg.train.weight_decay = 0.0
+cfg.train.lr_update_step = 100000000
+cfg.train.log_dir = 'logs'
+cfg.train.log_steps = 10
+cfg.train.vis_dir = 'train_images'
+cfg.train.vis_steps = 200
+cfg.train.write_summary = True
+cfg.train.checkpoint_steps = 1000
+cfg.train.checkpoint_epochs_steps = 2
+cfg.train.val_steps = 1000
+cfg.train.val_vis_dir = 'val_images'
+cfg.train.eval_steps = 5000
+cfg.train.reset_optimizer = False
+cfg.train.val_save_img = 5000
+cfg.test_dataset = 'now'
+
+
+def get_cfg_defaults():
+ return cfg.clone()
+
+
+def update_cfg(cfg, cfg_file):
+ cfg.merge_from_file(cfg_file)
+ return cfg.clone()
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--cfg', type=str, help='cfg file path', required=True)
+ parser.add_argument('--test_dataset', type=str, help='Test dataset type', default='')
+ parser.add_argument('--checkpoint', type=str, help='Checkpoint to load', default='')
+
+ args = parser.parse_args()
+ print(args, end='\n\n')
+
+ cfg = get_cfg_defaults()
+ if args.cfg is not None:
+ cfg_file = args.cfg
+ cfg = update_cfg(cfg, args.cfg)
+ cfg.cfg_file = cfg_file
+
+ return cfg, args
diff --git a/src/pixel3dmm/preprocessing/MICA/configs/mica.yml b/src/pixel3dmm/preprocessing/MICA/configs/mica.yml
new file mode 100644
index 0000000000000000000000000000000000000000..c1c63542919e69affb4b4e1b7835ded076f94e20
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/configs/mica.yml
@@ -0,0 +1,30 @@
+# Mica config
+
+pretrained_model_path: ''
+
+dataset:
+ root: '/scratch/NFC/MICA/dataset/'
+ training_data: [ 'LYHM', 'D3DFACS', 'BU3DFE', 'FRGC', 'Stirling', 'FaceWarehouse', 'BP4D' ]
+ eval_data: [ 'FLORENCE' ]
+ num_workers: 4
+ batch_size: 8
+ K: 2
+
+train:
+ lr: 1e-5
+ arcface_lr: 1e-5
+ weight_decay: 2e-4
+ use_mask: True
+ reset_optimizer: False
+ max_steps: 160000
+ log_steps: 50
+ val_steps: 300
+ vis_steps: 1200
+ val_save_img: 1200
+ checkpoint_steps: 1000
+ checkpoint_epochs_steps: 10000
+
+model:
+ use_pretrained: False
+ n_shape: 300
+ name: 'mica'
diff --git a/src/pixel3dmm/preprocessing/MICA/data/FLAME2020/FLAME_masks/FLAME_masks.gif b/src/pixel3dmm/preprocessing/MICA/data/FLAME2020/FLAME_masks/FLAME_masks.gif
new file mode 100644
index 0000000000000000000000000000000000000000..179856af51ad93588afdd95201dae2aadf02caa1
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/data/FLAME2020/FLAME_masks/FLAME_masks.gif
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0021ad6d1a2e748eb80d2561ff3b9489ff12ffa346083c230b57c037f2f06f52
+size 1039146
diff --git a/src/pixel3dmm/preprocessing/MICA/data/FLAME2020/FLAME_masks/FLAME_masks.pkl b/src/pixel3dmm/preprocessing/MICA/data/FLAME2020/FLAME_masks/FLAME_masks.pkl
new file mode 100644
index 0000000000000000000000000000000000000000..29f6891837acd88f36a7be9d69c46aa0b7340800
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/data/FLAME2020/FLAME_masks/FLAME_masks.pkl
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ccefbe1ac0774ff78c68caf2c627b4abc067a6555ebeb0be5d5b0812366ab492
+size 215062
diff --git a/src/pixel3dmm/preprocessing/MICA/data/FLAME2020/FLAME_masks/readme b/src/pixel3dmm/preprocessing/MICA/data/FLAME2020/FLAME_masks/readme
new file mode 100644
index 0000000000000000000000000000000000000000..ed94d309059f95ca35c40b2cb5ac49e81a651f83
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/data/FLAME2020/FLAME_masks/readme
@@ -0,0 +1,2 @@
+Dictionary with vertex indices for different masks for the publicly available FLAME head model (https://flame.is.tue.mpg.de/).
+See the gif for a visualization of all masks.
diff --git a/src/pixel3dmm/preprocessing/MICA/data/FLAME2020/head_template.obj b/src/pixel3dmm/preprocessing/MICA/data/FLAME2020/head_template.obj
new file mode 100644
index 0000000000000000000000000000000000000000..4a6b0ccd66ec2a67b734fa3a9e452b0bea48905e
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/data/FLAME2020/head_template.obj
@@ -0,0 +1,14999 @@
+v 0.063475 -0.009089 -0.050588
+v 0.067027 -0.010080 -0.049303
+v 0.067573 -0.008624 -0.048926
+v 0.064161 -0.008104 -0.050293
+v 0.074290 -0.009254 -0.058712
+v 0.075194 -0.005716 -0.054235
+v 0.074691 -0.003284 -0.053359
+v 0.074395 -0.003310 -0.056271
+v 0.026336 -0.045377 -0.113390
+v 0.011602 -0.045140 -0.117554
+v 0.011563 -0.056361 -0.114971
+v 0.025233 -0.059372 -0.109563
+v 0.044968 -0.082789 -0.090786
+v 0.039546 -0.074675 -0.096078
+v 0.037286 -0.086856 -0.099831
+v 0.046525 -0.090619 -0.090466
+v 0.039289 0.048517 0.039143
+v 0.046469 0.047187 0.033244
+v 0.045399 0.041278 0.036846
+v -0.063531 -0.009213 -0.053226
+v -0.067118 -0.010205 -0.051915
+v -0.067699 -0.008785 -0.051607
+v -0.064062 -0.008181 -0.053010
+v -0.074129 -0.009141 -0.061887
+v -0.075316 -0.005671 -0.057339
+v -0.074778 -0.003285 -0.056444
+v -0.074335 -0.003245 -0.059425
+v 0.038421 0.041884 0.041256
+v 0.090713 0.013058 -0.067235
+v 0.090837 0.010574 -0.068471
+v 0.091486 0.008760 -0.066747
+v 0.091168 0.010860 -0.065282
+v 0.087011 -0.004221 -0.067140
+v 0.087319 0.002846 -0.067991
+v 0.088534 0.005311 -0.068452
+v 0.088712 0.003116 -0.067787
+v 0.086690 -0.014468 -0.064526
+v 0.085356 -0.013764 -0.064337
+v 0.086771 -0.009298 -0.065899
+v 0.088402 -0.009728 -0.066394
+v 0.073091 -0.034771 -0.041042
+v 0.075770 -0.031851 -0.043526
+v 0.075892 -0.035064 -0.044977
+v 0.071418 -0.036632 -0.043217
+v 0.068308 -0.034740 -0.044393
+v 0.072658 -0.035415 -0.048767
+v 0.072240 -0.031877 -0.049960
+v 0.068636 -0.031738 -0.046733
+v 0.086152 0.022057 -0.054554
+v 0.086618 0.018355 -0.053723
+v 0.085485 0.017223 -0.051651
+v 0.084780 0.020845 -0.052193
+v 0.071117 -0.008366 -0.064940
+v 0.069428 -0.014791 -0.063533
+v 0.075563 -0.010181 -0.065801
+v 0.076114 -0.007457 -0.065845
+v 0.085803 0.012357 -0.060127
+v 0.085807 0.013003 -0.063141
+v 0.086181 0.011003 -0.064462
+v 0.085653 0.009744 -0.062235
+v 0.087661 0.014647 -0.068206
+v 0.087801 0.011831 -0.069568
+v 0.071593 -0.025127 -0.040409
+v 0.070786 -0.021652 -0.039596
+v 0.071236 -0.020163 -0.040891
+v 0.071396 -0.021246 -0.042525
+v 0.073815 -0.009267 -0.055419
+v 0.075256 -0.004711 -0.052800
+v 0.083422 -0.025255 -0.060002
+v 0.083060 -0.023214 -0.058997
+v 0.085295 -0.019100 -0.061990
+v 0.085498 -0.020568 -0.063475
+v 0.079766 -0.020880 -0.057113
+v 0.082110 -0.018005 -0.060269
+v 0.083087 -0.017280 -0.061973
+v 0.082216 -0.019502 -0.060016
+v 0.087102 0.014838 -0.061430
+v 0.085890 0.017123 -0.056585
+v 0.087300 0.016050 -0.055372
+v 0.088886 0.013939 -0.059491
+v 0.078245 0.003959 -0.064975
+v 0.076823 0.008838 -0.062891
+v 0.075681 0.008218 -0.062948
+v 0.076764 0.003992 -0.064861
+v 0.087530 0.012147 -0.065924
+v 0.087940 0.009977 -0.067208
+v 0.086594 0.008715 -0.065932
+v 0.077648 0.009674 -0.049150
+v 0.079276 0.010769 -0.051092
+v 0.079385 0.009191 -0.052012
+v 0.079065 0.008014 -0.051268
+v 0.075551 -0.035972 -0.048486
+v 0.068313 -0.023407 -0.057112
+v 0.070007 -0.023056 -0.058797
+v 0.072716 -0.018918 -0.061592
+v 0.071011 -0.017674 -0.061975
+v 0.068485 -0.032590 -0.038934
+v 0.071746 -0.032382 -0.039240
+v 0.072291 -0.033630 -0.040235
+v 0.069156 -0.034869 -0.040527
+v 0.080191 0.014053 -0.051285
+v 0.082215 0.013288 -0.053015
+v 0.080500 0.012514 -0.052557
+v 0.079253 0.012998 -0.051189
+v 0.086153 0.001078 -0.066504
+v 0.085835 0.007283 -0.064036
+v 0.085273 -0.007924 -0.061968
+v 0.085382 -0.007571 -0.064710
+v 0.084900 -0.011634 -0.063269
+v -0.028905 -0.045527 -0.113499
+v -0.014233 -0.045172 -0.117584
+v -0.014089 -0.056449 -0.115030
+v -0.027659 -0.059598 -0.109681
+v 0.084242 -0.011778 -0.060851
+v 0.085548 -0.000104 -0.061528
+v 0.085684 0.000018 -0.064750
+v 0.089263 0.008030 -0.065448
+v 0.089306 0.002331 -0.066606
+v 0.088709 0.008703 -0.066316
+v 0.074871 0.011834 -0.060601
+v 0.075310 0.015008 -0.064798
+v 0.073856 0.009018 -0.066755
+v 0.074784 0.009509 -0.062364
+v 0.071955 -0.016215 -0.041026
+v 0.073060 -0.014007 -0.042335
+v 0.071405 -0.016094 -0.044214
+v 0.070440 -0.018661 -0.043699
+v 0.073569 -0.021151 -0.060445
+v 0.070913 -0.024192 -0.058043
+v 0.071561 -0.024932 -0.057392
+v 0.074193 -0.022869 -0.059032
+v 0.084243 0.013934 -0.054842
+v 0.084429 0.009824 -0.056214
+v 0.082949 0.009815 -0.054957
+v 0.075775 -0.019983 -0.049871
+v 0.076048 -0.018725 -0.051920
+v 0.077469 -0.021460 -0.053098
+v 0.077312 -0.023501 -0.051177
+v 0.079790 -0.031907 -0.054155
+v 0.077026 -0.032061 -0.055039
+v 0.074842 -0.034093 -0.051648
+v 0.077749 -0.034304 -0.051296
+v 0.071168 -0.019824 -0.043153
+v 0.071358 -0.018737 -0.041072
+v 0.080376 0.004177 -0.066616
+v 0.079339 0.010000 -0.063882
+v 0.085444 0.008342 -0.060259
+v 0.085619 0.006142 -0.061921
+v 0.074437 -0.003593 -0.045695
+v 0.072784 -0.003171 -0.047435
+v 0.073095 -0.003989 -0.047376
+v 0.075155 -0.005034 -0.045507
+v 0.074430 -0.003974 -0.044238
+v 0.074933 -0.005545 -0.044130
+v 0.074228 -0.005581 -0.043338
+v 0.073879 -0.004757 -0.043159
+v 0.072078 -0.002994 -0.047911
+v 0.072480 -0.003251 -0.047658
+v 0.072646 -0.002751 -0.047687
+v 0.076348 -0.000562 -0.058520
+v 0.075745 -0.007681 -0.060649
+v 0.075216 -0.009018 -0.060679
+v 0.076925 -0.003358 -0.058105
+v 0.085929 -0.006555 -0.066190
+v 0.082433 0.017459 -0.048223
+v 0.081523 0.020258 -0.048187
+v 0.083836 0.022892 -0.052022
+v 0.070427 -0.017067 -0.054122
+v 0.073806 -0.016916 -0.051788
+v 0.073349 -0.017057 -0.050114
+v 0.070932 -0.017237 -0.052347
+v 0.072370 -0.012051 -0.046928
+v 0.070017 -0.015092 -0.046880
+v 0.073500 -0.013630 -0.044850
+v 0.074566 0.001677 -0.043917
+v 0.073977 -0.002042 -0.046562
+v 0.073998 -0.002869 -0.045327
+v 0.073990 -0.002550 -0.044046
+v 0.086835 -0.015188 -0.065940
+v 0.088084 -0.009933 -0.067998
+v 0.086412 -0.009994 -0.069476
+v 0.085270 -0.015654 -0.067452
+v 0.069901 -0.028675 -0.037952
+v 0.065053 -0.030549 -0.041412
+v 0.068784 -0.025467 -0.038127
+v 0.089255 0.019371 -0.062386
+v 0.087213 0.021190 -0.062713
+v 0.087445 0.019275 -0.064880
+v 0.089936 0.017522 -0.064350
+v 0.074399 0.002694 -0.051439
+v 0.073999 0.000728 -0.051317
+v 0.074507 0.001809 -0.049941
+v 0.074337 0.004027 -0.049955
+v 0.050330 -0.031316 -0.096197
+v 0.058432 -0.031348 -0.077260
+v 0.062412 -0.017585 -0.085528
+v 0.055315 -0.017776 -0.101497
+v 0.080319 -0.028666 -0.053343
+v 0.079655 -0.030863 -0.050810
+v 0.076395 0.009172 -0.042703
+v 0.075694 0.010857 -0.041966
+v 0.076432 0.016345 -0.043831
+v 0.077459 0.013200 -0.043725
+v 0.088468 0.004410 -0.071286
+v 0.090939 0.003639 -0.070058
+v 0.079664 0.014230 -0.049755
+v 0.081767 0.016826 -0.052543
+v 0.080433 0.014736 -0.051512
+v 0.079155 0.013484 -0.050020
+v 0.079351 0.006049 -0.046928
+v 0.080011 0.007320 -0.046910
+v 0.078190 0.008237 -0.047099
+v 0.077199 0.006947 -0.047307
+v 0.075065 0.015842 -0.041271
+v 0.076218 0.020894 -0.044807
+v 0.066255 -0.023670 -0.056726
+v 0.078578 0.007980 -0.044306
+v 0.079292 0.010566 -0.044687
+v 0.080598 0.009048 -0.046795
+v -0.047008 -0.083378 -0.090970
+v -0.041648 -0.075210 -0.096225
+v -0.039449 -0.087339 -0.099946
+v -0.048495 -0.091227 -0.090685
+v 0.069830 -0.020768 -0.037683
+v 0.070954 -0.019161 -0.039534
+v 0.078774 0.013155 -0.049796
+v 0.078093 0.012006 -0.049684
+v 0.076818 0.008120 -0.048134
+v 0.077598 0.007076 -0.050282
+v 0.081301 0.000588 -0.057545
+v 0.081863 -0.007482 -0.059220
+v 0.065535 -0.017125 -0.071358
+v 0.062172 -0.030049 -0.057228
+v 0.077053 0.008691 -0.047378
+v 0.077244 0.009245 -0.047546
+v 0.076685 0.008456 -0.047334
+v 0.076176 0.007606 -0.047736
+v 0.078488 -0.032783 -0.048374
+v 0.081404 0.006644 -0.055325
+v 0.078663 0.004708 -0.053344
+v 0.080248 0.006615 -0.053336
+v 0.088798 -0.005312 -0.066389
+v 0.090396 0.002054 -0.066435
+v 0.091752 0.002246 -0.067836
+v 0.090066 -0.005452 -0.067550
+v 0.066490 -0.033360 -0.043015
+v 0.073830 -0.000019 -0.052656
+v 0.074848 -0.001347 -0.051989
+v 0.076124 0.005531 -0.050635
+v 0.075535 0.006710 -0.048118
+v 0.071157 -0.016147 -0.039028
+v 0.086647 0.015626 -0.063115
+v 0.087229 0.014064 -0.064486
+v 0.085729 0.014488 -0.061900
+v 0.085595 0.015565 -0.060396
+v 0.085103 0.017850 -0.058790
+v 0.084473 0.018548 -0.058331
+v 0.086136 0.017130 -0.061428
+v 0.078366 0.006155 -0.051496
+v 0.079584 0.007114 -0.052260
+v 0.075906 -0.021183 -0.048210
+v 0.076911 -0.026744 -0.048211
+v 0.083650 -0.015132 -0.061304
+v 0.082525 -0.014523 -0.058908
+v 0.083872 -0.004328 -0.070063
+v 0.083878 0.004359 -0.069855
+v 0.082130 0.004197 -0.067998
+v 0.082585 -0.004098 -0.068794
+v 0.076291 -0.001920 -0.065731
+v 0.072704 -0.000970 -0.065686
+v 0.073560 -0.003916 -0.042659
+v 0.072996 -0.000187 -0.040714
+v 0.074851 0.004740 -0.049933
+v 0.075036 0.006448 -0.048511
+v 0.077428 -0.018763 -0.063013
+v 0.073905 -0.025730 -0.055500
+v 0.077778 -0.020899 -0.061140
+v 0.069785 -0.026151 -0.053533
+v 0.069591 -0.027157 -0.051226
+v 0.072144 -0.027334 -0.052761
+v 0.079547 -0.018962 -0.056300
+v 0.081619 -0.017643 -0.058720
+v 0.072671 -0.003472 -0.048485
+v 0.071610 -0.005132 -0.050177
+v 0.070842 -0.003516 -0.048312
+v 0.084261 0.000014 -0.059386
+v 0.084460 -0.007712 -0.060171
+v 0.079642 0.009532 -0.047552
+v 0.076684 0.000174 -0.047206
+v 0.077444 0.003521 -0.045282
+v 0.082488 0.012438 -0.047793
+v 0.081294 0.011033 -0.048136
+v 0.080597 0.010163 -0.047836
+v 0.081424 0.010668 -0.047005
+v 0.066127 -0.012213 -0.050351
+v 0.068274 -0.013942 -0.048513
+v 0.069972 -0.010970 -0.047847
+v 0.084108 0.017730 -0.055815
+v 0.084709 0.015965 -0.056823
+v 0.081180 0.010516 -0.065263
+v 0.079873 0.011014 -0.051952
+v 0.079876 0.009640 -0.053065
+v 0.078492 0.023032 -0.059512
+v 0.078889 0.020436 -0.055768
+v 0.078689 0.020670 -0.054375
+v 0.078511 0.025348 -0.054861
+v 0.068980 -0.030009 -0.047933
+v 0.071647 -0.029765 -0.050608
+v 0.071306 -0.028422 -0.051114
+v 0.069419 -0.028441 -0.049275
+v 0.072808 -0.017180 -0.049443
+v 0.070932 -0.017436 -0.051328
+v 0.084354 -0.013674 -0.064103
+v 0.083895 -0.014596 -0.063084
+v 0.085150 -0.010628 -0.064998
+v 0.085262 -0.009162 -0.065920
+v 0.075313 0.003406 -0.054703
+v 0.074899 0.002372 -0.053528
+v 0.077352 0.015579 -0.057026
+v 0.079722 0.017346 -0.058185
+v 0.079798 0.019385 -0.055103
+v 0.077964 0.017908 -0.055238
+v 0.082948 0.011214 -0.067162
+v 0.079848 0.008152 -0.053129
+v 0.080191 0.008321 -0.054001
+v 0.079703 0.008554 -0.052524
+v 0.076345 0.006063 -0.047609
+v 0.078498 0.004890 -0.047659
+v 0.082277 0.012754 -0.049111
+v 0.084841 0.015903 -0.051680
+v 0.084035 0.016499 -0.052892
+v 0.081788 0.013635 -0.049282
+v 0.077984 0.010822 -0.048062
+v 0.012927 -0.032970 -0.123860
+v 0.014716 -0.022682 -0.131122
+v -0.038361 0.048294 0.038288
+v -0.045789 0.046919 0.032581
+v -0.044612 0.041034 0.035953
+v -0.037433 0.041768 0.040126
+v 0.080224 0.020073 -0.047867
+v 0.080296 0.021457 -0.051267
+v 0.081339 0.022329 -0.051691
+v 0.080802 0.020809 -0.048357
+v 0.085602 0.014438 -0.059984
+v 0.088144 0.011152 -0.065226
+v 0.075934 -0.003306 -0.050889
+v 0.073370 -0.006038 -0.051641
+v 0.063911 -0.006517 -0.050781
+v 0.064244 -0.005382 -0.050508
+v 0.064901 -0.005725 -0.051546
+v 0.064825 -0.006605 -0.052005
+v 0.067959 -0.011793 -0.052874
+v 0.070073 -0.012593 -0.052131
+v 0.069139 -0.014048 -0.050629
+v 0.067351 -0.013066 -0.052243
+v 0.063421 -0.009151 -0.051293
+v 0.063678 -0.007708 -0.050992
+v 0.064434 -0.008174 -0.052482
+v 0.064048 -0.009684 -0.053181
+v 0.083446 0.018313 -0.054158
+v 0.084958 0.018063 -0.056368
+v 0.090058 0.009806 -0.064022
+v 0.090311 0.007792 -0.065243
+v 0.087656 0.014867 -0.062397
+v 0.088019 0.013097 -0.063743
+v 0.073244 -0.011353 -0.042045
+v 0.074269 -0.011177 -0.043623
+v 0.073953 -0.012763 -0.043684
+v 0.091159 0.012333 -0.063736
+v 0.090422 0.015215 -0.065716
+v 0.072216 -0.028473 -0.039665
+v 0.071232 -0.030079 -0.038538
+v 0.045232 -0.100767 -0.023832
+v 0.054527 -0.095484 -0.036558
+v 0.053675 -0.105559 -0.036198
+v 0.044602 -0.110750 -0.024078
+v 0.081349 0.015596 -0.046770
+v 0.079765 0.018408 -0.046951
+v 0.063377 -0.011117 -0.051660
+v 0.063360 -0.011264 -0.051750
+v 0.065971 -0.013006 -0.051511
+v 0.068047 -0.007202 -0.048861
+v 0.064458 -0.006790 -0.050045
+v 0.068691 -0.010371 -0.052655
+v 0.068269 -0.005268 -0.050974
+v 0.067950 -0.004068 -0.049772
+v 0.068702 -0.016859 -0.047965
+v 0.069580 -0.017844 -0.049811
+v 0.071159 -0.018309 -0.046789
+v 0.064408 -0.011774 -0.053338
+v 0.076143 0.004892 -0.052852
+v 0.071394 -0.010062 -0.052362
+v 0.072356 -0.012184 -0.054446
+v 0.071601 -0.013834 -0.053703
+v 0.070723 -0.011727 -0.052448
+v 0.078195 -0.021835 -0.054530
+v 0.078070 -0.020057 -0.054604
+v 0.070588 -0.016272 -0.056225
+v 0.073751 -0.016741 -0.054600
+v 0.076064 -0.000570 -0.050869
+v 0.077572 0.002306 -0.048571
+v 0.073384 -0.003401 -0.049016
+v 0.071480 -0.007967 -0.051419
+v 0.073050 -0.010260 -0.053764
+v 0.068168 -0.006094 -0.048975
+v 0.071336 -0.005751 -0.047614
+v 0.071095 -0.004722 -0.047882
+v 0.068140 -0.005163 -0.049186
+v 0.081525 0.009793 -0.054300
+v 0.078508 0.016392 -0.045221
+v 0.080510 0.013119 -0.045564
+v 0.078760 0.027611 -0.071328
+v 0.076213 0.019579 -0.076059
+v 0.077567 0.019981 -0.063149
+v 0.069488 -0.008800 -0.052367
+v 0.064520 -0.004884 -0.050560
+v 0.065047 -0.005434 -0.050255
+v 0.084118 -0.017920 -0.061953
+v 0.071608 -0.019098 -0.046417
+v 0.075302 0.004179 -0.048086
+v 0.083111 -0.009355 -0.069045
+v 0.082175 -0.014747 -0.067190
+v 0.074988 0.005590 -0.048296
+v 0.075200 0.006613 -0.048279
+v 0.070401 -0.017809 -0.050765
+v 0.072337 -0.017801 -0.048252
+v 0.075544 0.006881 -0.048025
+v 0.074452 -0.011597 -0.045042
+v 0.085517 0.009717 -0.058973
+v 0.075476 -0.029254 -0.055468
+v 0.073612 -0.030698 -0.052536
+v 0.064898 -0.005827 -0.049891
+v 0.067949 -0.013852 -0.049944
+v 0.068584 -0.006915 -0.051408
+v 0.073438 -0.007824 -0.046791
+v 0.073039 -0.009978 -0.046599
+v 0.074634 -0.009344 -0.044792
+v 0.074673 -0.016137 -0.056261
+v 0.071270 -0.014170 -0.058166
+v 0.072414 -0.013344 -0.060168
+v 0.076556 -0.015055 -0.057520
+v 0.080929 -0.019797 -0.064884
+v 0.079213 -0.024279 -0.061690
+v 0.081515 -0.026324 -0.061775
+v 0.083881 -0.021374 -0.064989
+v 0.079583 0.043721 -0.080362
+v 0.079530 0.033202 -0.064518
+v 0.080322 0.050061 -0.068943
+v 0.082496 0.018808 -0.062746
+v 0.082306 0.016588 -0.064212
+v 0.087593 0.017116 -0.066615
+v 0.078054 0.023329 -0.049560
+v 0.078453 0.020684 -0.052890
+v 0.080199 0.020614 -0.051087
+v 0.087421 -0.004832 -0.070717
+v 0.075642 0.005460 -0.042676
+v 0.077767 0.005602 -0.044473
+v 0.076328 -0.017466 -0.063929
+v 0.077917 -0.016913 -0.055076
+v 0.080337 -0.016133 -0.056759
+v 0.088014 0.018442 -0.056047
+v 0.085971 0.016330 -0.053533
+v 0.010269 -0.112001 0.006173
+v 0.010028 -0.119085 0.001712
+v 0.007448 -0.009164 0.058072
+v 0.008429 -0.009369 0.056860
+v 0.006999 -0.007822 0.055729
+v 0.006664 -0.007798 0.056318
+v 0.007972 -0.017734 0.057767
+v 0.007265 -0.016431 0.057044
+v 0.008475 -0.016481 0.056103
+v 0.009508 -0.017416 0.057005
+v 0.005794 -0.007659 0.055888
+v 0.004738 -0.009507 0.058605
+v 0.005772 -0.009335 0.059183
+v 0.006138 -0.007731 0.056172
+v 0.088256 0.021543 -0.056893
+v 0.011814 -0.016975 0.057583
+v 0.010632 -0.015290 0.056954
+v 0.010924 -0.014792 0.058560
+v 0.013118 -0.015870 0.059171
+v 0.004422 -0.015612 0.065299
+v 0.005490 -0.013582 0.065229
+v 0.004512 -0.014747 0.063213
+v 0.004483 -0.016618 0.062821
+v 0.007515 -0.013207 0.064469
+v 0.009434 -0.013153 0.066334
+v 0.011164 -0.013296 0.064536
+v 0.009011 -0.013705 0.062895
+v 0.004769 -0.013325 0.058850
+v 0.005735 -0.015270 0.060009
+v 0.004064 -0.012530 0.061847
+v 0.010507 -0.012767 0.057121
+v 0.010248 -0.013702 0.056057
+v 0.008479 -0.010371 0.055822
+v 0.008714 -0.010028 0.055652
+v 0.048338 -0.083965 -0.023101
+v 0.055913 -0.081667 -0.037769
+v 0.034508 0.005425 0.037909
+v 0.033735 0.002574 0.038982
+v 0.029569 0.003742 0.039700
+v 0.030483 0.006205 0.038719
+v 0.074969 -0.007348 -0.043897
+v 0.074177 -0.006607 -0.042924
+v 0.082298 0.014030 -0.065633
+v 0.086484 0.016061 -0.061934
+v 0.082825 0.020348 -0.060904
+v 0.082321 0.021882 -0.056278
+v 0.080870 0.020588 -0.055611
+v 0.080976 0.018863 -0.059446
+v 0.089846 0.015901 -0.060613
+v 0.072662 -0.020553 -0.046258
+v 0.083040 0.005866 -0.057206
+v 0.081819 0.022338 -0.053842
+v 0.084959 0.023514 -0.054266
+v 0.087098 -0.004821 -0.066299
+v 0.090425 0.014566 -0.062344
+v 0.089244 0.012941 -0.061178
+v 0.083495 0.014392 -0.048597
+v 0.074757 -0.010762 -0.061255
+v 0.080503 -0.010864 -0.059083
+v 0.078684 -0.013536 -0.058404
+v -0.090455 0.013007 -0.070134
+v -0.090657 0.010518 -0.071377
+v -0.091150 0.008701 -0.069535
+v -0.090862 0.010796 -0.068158
+v -0.086695 -0.004277 -0.069972
+v -0.087029 0.002694 -0.070952
+v -0.088128 0.005212 -0.071476
+v -0.088551 0.002987 -0.070749
+v 0.073986 -0.012699 -0.061033
+v 0.074505 0.001241 -0.053236
+v 0.070967 -0.015509 -0.036939
+v 0.078302 -0.026306 -0.051633
+v -0.086732 -0.014494 -0.066942
+v -0.085461 -0.013899 -0.066930
+v -0.086713 -0.009336 -0.068431
+v -0.088425 -0.009757 -0.068762
+v 0.079119 -0.025138 -0.054035
+v 0.029121 -0.032455 -0.118238
+v 0.032721 -0.021359 -0.124716
+v 0.089479 -0.005215 -0.069386
+v 0.083994 0.018482 -0.055569
+v 0.074569 -0.020995 -0.047386
+v 0.074946 -0.026215 -0.045160
+v 0.073402 -0.025526 -0.043567
+v 0.080038 0.008798 -0.053743
+v 0.075513 -0.016029 -0.063835
+v 0.075281 -0.013757 -0.065057
+v 0.065080 -0.005107 -0.051256
+v 0.078294 -0.022471 -0.061207
+v 0.074865 -0.027361 -0.055384
+v 0.058234 -0.078356 -0.052091
+v 0.055062 -0.075060 -0.068165
+v -0.073839 -0.034344 -0.042773
+v -0.076261 -0.031521 -0.045365
+v -0.076428 -0.034498 -0.046748
+v -0.072078 -0.035925 -0.045107
+v 0.055991 -0.086848 -0.065972
+v 0.058217 -0.090980 -0.050820
+v 0.071715 -0.001906 -0.037779
+v 0.072251 -0.008783 -0.039961
+v 0.071849 -0.010016 -0.038384
+v 0.024311 -0.102517 0.000204
+v 0.021195 -0.114800 -0.003790
+v 0.070362 -0.014877 -0.033404
+v 0.069686 -0.015482 -0.021757
+v 0.071166 0.000277 -0.022161
+v 0.070602 -0.005046 -0.032638
+v 0.059838 0.030892 0.014455
+v 0.061620 0.031556 0.009911
+v 0.062182 0.022094 0.010058
+v 0.060067 0.023262 0.013937
+v 0.020309 0.087364 0.031087
+v 0.010264 0.088454 0.033368
+v 0.010081 0.095507 0.027294
+v 0.020364 0.094212 0.024737
+v 0.034620 -0.013000 -0.128756
+v 0.036509 -0.002438 -0.133296
+v 0.016850 -0.005239 -0.140359
+v 0.015711 -0.014997 -0.135333
+v 0.020476 0.111611 0.002285
+v 0.010010 0.113156 0.004985
+v 0.010136 0.118334 -0.005071
+v -0.069116 -0.034538 -0.046136
+v -0.072884 -0.034955 -0.050661
+v -0.072426 -0.031689 -0.051917
+v -0.069401 -0.031796 -0.048504
+v 0.020780 0.116548 -0.007436
+v 0.053145 0.003150 0.028304
+v 0.055244 -0.002650 0.028764
+v 0.051492 -0.003130 0.032418
+v 0.049857 0.002723 0.031290
+v 0.074892 -0.030651 -0.042369
+v 0.073789 -0.029568 -0.041185
+v -0.085848 0.021802 -0.057040
+v -0.086231 0.018130 -0.056369
+v -0.085004 0.017011 -0.054348
+v -0.084381 0.020610 -0.054623
+v -0.072913 -0.009044 -0.067149
+v -0.071343 -0.015655 -0.065586
+v -0.077211 -0.010868 -0.068081
+v -0.077546 -0.007984 -0.068356
+v 0.010319 0.122802 -0.016856
+v 0.021218 0.120859 -0.018814
+v 0.011836 0.012785 0.044638
+v -0.084422 0.012308 -0.063511
+v -0.084873 0.012902 -0.066401
+v -0.085447 0.010894 -0.067602
+v -0.084710 0.009616 -0.065364
+v -0.087451 0.014536 -0.071088
+v -0.087630 0.011752 -0.072539
+v -0.072544 -0.025421 -0.041998
+v -0.071566 -0.022011 -0.041247
+v -0.071882 -0.020430 -0.042728
+v -0.072349 -0.021425 -0.044256
+v -0.073669 -0.009188 -0.058531
+v -0.075251 -0.004696 -0.055878
+v -0.083209 -0.024868 -0.062164
+v -0.083164 -0.023037 -0.061208
+v -0.085338 -0.019011 -0.064282
+v -0.085323 -0.020364 -0.065759
+v 0.012190 0.014623 0.041867
+v 0.015894 0.011092 0.040734
+v 0.015510 0.009159 0.042935
+v 0.008164 0.061162 0.045362
+v 0.009088 0.068856 0.042580
+v 0.018071 0.068745 0.041205
+v 0.016665 0.061166 0.044472
+v 0.020595 0.008600 0.039783
+v 0.019715 0.006650 0.041325
+v 0.010017 0.101918 0.020586
+v 0.020431 0.100590 0.017809
+v 0.020651 0.127386 -0.069060
+v -0.080095 -0.020908 -0.059482
+v -0.082390 -0.018058 -0.062741
+v -0.083292 -0.017363 -0.064489
+v -0.082494 -0.019574 -0.062491
+v 0.021113 0.127532 -0.060582
+v 0.009937 0.130162 -0.060975
+v -0.086262 0.014784 -0.064558
+v -0.085037 0.017046 -0.059714
+v -0.086548 0.015939 -0.058353
+v -0.088101 0.013891 -0.062531
+v 0.011022 0.129384 -0.073557
+v 0.067875 0.000756 -0.093117
+v 0.070776 -0.000809 -0.077150
+v 0.073845 0.011151 -0.078646
+v 0.072147 0.017227 -0.095696
+v 0.035040 -0.095715 -0.009784
+v 0.032846 -0.108207 -0.013028
+v -0.079023 0.003583 -0.067504
+v -0.077395 0.008472 -0.065225
+v -0.076718 0.007807 -0.065152
+v -0.077985 0.003605 -0.067370
+v -0.086934 0.012056 -0.069037
+v -0.087433 0.009883 -0.070278
+v -0.085955 0.008600 -0.068998
+v -0.077148 0.009436 -0.052376
+v -0.078939 0.010432 -0.054369
+v -0.078804 0.008973 -0.055296
+v -0.078642 0.007742 -0.054536
+v -0.075840 -0.035325 -0.050398
+v -0.069941 -0.024240 -0.059052
+v -0.071446 -0.023784 -0.060832
+v -0.074124 -0.019632 -0.063748
+v -0.072716 -0.018518 -0.064048
+v 0.053301 0.052911 0.021642
+v 0.055213 0.057998 0.017300
+v 0.058497 0.051058 0.013604
+v 0.056390 0.046944 0.018339
+v 0.006636 0.048710 0.048658
+v 0.014153 0.048641 0.048433
+v 0.012295 0.042491 0.048626
+v 0.005963 0.042432 0.049043
+v -0.069364 -0.032683 -0.040445
+v -0.072570 -0.032349 -0.040797
+v -0.073082 -0.033403 -0.041929
+v -0.069991 -0.034503 -0.042315
+v 0.060086 0.015749 0.014645
+v 0.062563 0.012963 0.011830
+v 0.061692 0.004869 0.016954
+v 0.058788 0.008628 0.018886
+v 0.055643 0.004270 0.025095
+v -0.079622 0.013760 -0.054633
+v -0.081332 0.013125 -0.056479
+v -0.079766 0.012323 -0.055944
+v -0.078784 0.012700 -0.054500
+v 0.058084 -0.001273 0.025073
+v -0.085880 0.000920 -0.069343
+v -0.085098 0.007173 -0.067042
+v 0.019460 0.078064 0.037317
+v 0.009768 0.078678 0.038907
+v 0.060582 0.041795 0.010798
+v 0.058567 0.039272 0.015808
+v 0.009221 0.016191 0.046957
+v 0.009877 0.017641 0.043466
+v 0.007802 0.022138 0.045696
+v -0.085290 -0.007993 -0.064485
+v -0.085161 -0.007656 -0.067342
+v -0.084860 -0.011682 -0.065800
+v -0.084389 -0.011784 -0.063233
+v 0.004373 0.021962 0.051503
+v 0.004588 0.025944 0.049862
+v 0.007507 0.025775 0.046386
+v 0.034148 0.060791 0.037314
+v 0.035705 0.067703 0.033639
+v 0.043152 0.066387 0.028137
+v 0.041607 0.059875 0.031987
+v 0.020432 0.106255 0.010505
+v -0.085338 -0.000252 -0.064196
+v -0.085389 -0.000146 -0.067488
+v 0.009969 0.107659 0.013299
+v 0.005105 0.034973 0.048817
+v 0.009396 0.035144 0.046954
+v 0.008399 0.031570 0.046243
+v -0.088908 0.007982 -0.068419
+v -0.089215 0.002246 -0.069446
+v -0.088305 0.008644 -0.069332
+v -0.075668 0.011340 -0.062327
+v -0.076086 0.014494 -0.066132
+v -0.075379 0.008588 -0.068322
+v -0.075912 0.009069 -0.064154
+v 0.005025 0.031730 0.048806
+v 0.024557 0.004938 0.040422
+v 0.025410 0.007138 0.039350
+v 0.065429 -0.029679 -0.047799
+v 0.067134 -0.031069 -0.046436
+v 0.073542 0.016091 -0.035153
+v 0.071651 0.005489 -0.033387
+v 0.072096 0.013419 -0.023250
+v 0.072849 0.026489 -0.025663
+v 0.075171 0.038680 -0.029357
+v 0.075819 0.046917 -0.034734
+v 0.077081 0.030677 -0.041659
+v 0.075703 0.025142 -0.037662
+v 0.051565 -0.084013 -0.079490
+v 0.048881 -0.073574 -0.082869
+v 0.004596 0.018908 0.054349
+v 0.004618 0.015930 0.057020
+v 0.019351 0.036877 -0.151327
+v -0.072070 -0.016450 -0.043275
+v -0.073162 -0.014246 -0.044759
+v -0.071557 -0.016277 -0.046537
+v -0.070951 -0.018911 -0.045833
+v 0.018683 0.023316 -0.149525
+v 0.039068 0.027293 -0.141049
+v 0.039471 0.040221 -0.142400
+v 0.064626 -0.009877 -0.089307
+v -0.074787 -0.021737 -0.062543
+v -0.072117 -0.024795 -0.060061
+v -0.072605 -0.025451 -0.059413
+v -0.075192 -0.023362 -0.061156
+v 0.068036 -0.009489 -0.074545
+v 0.009588 -0.011925 0.058827
+v 0.006779 -0.007675 0.055035
+v 0.007460 -0.010656 0.056207
+v 0.006860 -0.007699 0.055113
+v 0.010269 -0.014222 0.060882
+v 0.012833 -0.014068 0.062149
+v 0.007812 -0.014785 0.055551
+v 0.006854 -0.014722 0.055983
+v 0.006827 -0.010427 0.056415
+v 0.007075 -0.007646 0.055143
+v 0.001878 -0.017024 0.067591
+v 0.002530 -0.015462 0.070510
+v 0.006201 -0.014112 0.069331
+v 0.004617 -0.015726 0.067316
+v 0.005989 -0.007594 0.055905
+v 0.005082 -0.010139 0.057469
+v 0.004859 -0.011736 0.063152
+v 0.006577 -0.009177 0.058933
+v 0.006395 -0.007737 0.056291
+v 0.006739 -0.011422 0.062258
+v 0.005689 -0.017259 0.060425
+v 0.008192 -0.011551 0.060729
+v -0.082816 0.013891 -0.058412
+v -0.083372 0.009705 -0.059569
+v -0.082046 0.009677 -0.058334
+v -0.076495 -0.020014 -0.052027
+v -0.076875 -0.018759 -0.054092
+v -0.077952 -0.021444 -0.055327
+v -0.077695 -0.023428 -0.053363
+v 0.076526 -0.017806 -0.053666
+v 0.056822 -0.063468 -0.032739
+v 0.059789 -0.048587 -0.040568
+v 0.059431 -0.049453 -0.046236
+v -0.079553 -0.031316 -0.056183
+v -0.077083 -0.031868 -0.057203
+v -0.075166 -0.033866 -0.053672
+v -0.077768 -0.033698 -0.053292
+v -0.072045 -0.020040 -0.045080
+v -0.071752 -0.019004 -0.043088
+v -0.080759 0.003879 -0.069370
+v -0.079905 0.009649 -0.066493
+v 0.057809 -0.065182 -0.042001
+v 0.081496 0.008308 -0.054932
+v 0.066901 0.058193 -0.004685
+v 0.067762 0.069346 -0.014387
+v 0.073043 0.058251 -0.026015
+v -0.084381 0.008202 -0.063369
+v -0.084809 0.006012 -0.064843
+v 0.073084 0.048052 -0.020696
+v 0.068290 0.079193 -0.022592
+v 0.073507 0.068884 -0.033396
+v 0.076622 0.057092 -0.042963
+v 0.078783 0.036545 -0.056223
+v 0.042673 0.017616 0.032895
+v 0.042823 0.017464 0.032995
+v 0.041244 0.017187 0.034298
+v -0.074543 -0.003809 -0.048517
+v -0.072882 -0.003366 -0.050324
+v -0.073198 -0.004183 -0.050210
+v -0.075274 -0.005262 -0.048252
+v 0.041300 0.017300 0.034163
+v 0.042251 0.017845 0.032321
+v 0.040713 0.017415 0.033400
+v -0.074569 -0.004258 -0.046978
+v -0.075092 -0.005820 -0.046827
+v -0.074411 -0.005878 -0.045994
+v -0.074060 -0.005068 -0.045826
+v 0.043228 0.018168 0.031362
+v 0.043697 0.017892 0.031837
+v 0.043919 0.017678 0.031914
+v 0.024929 0.018630 0.036331
+v 0.025702 0.018987 0.035831
+v 0.028363 0.017983 0.036001
+v 0.027618 0.017945 0.037001
+v 0.024679 0.018389 0.036708
+v 0.027550 0.017665 0.037366
+v -0.072170 -0.003124 -0.050861
+v -0.072573 -0.003419 -0.050568
+v -0.072740 -0.002906 -0.050630
+v -0.075929 -0.000355 -0.062300
+v -0.075962 -0.007711 -0.063969
+v -0.075185 -0.008996 -0.063874
+v -0.076455 -0.003291 -0.061555
+v 0.028879 0.020783 0.030173
+v 0.028346 0.021082 0.030059
+v -0.085625 -0.006615 -0.068922
+v 0.029715 0.020413 0.030301
+v 0.023132 0.020493 0.034063
+v 0.023141 0.021777 0.034040
+v -0.082265 0.017125 -0.050626
+v -0.081451 0.019857 -0.050345
+v -0.083745 0.022511 -0.054252
+v 0.028047 0.022051 0.030037
+v 0.028040 0.021305 0.029975
+v 0.023459 0.020128 0.034317
+v -0.070889 -0.017298 -0.056615
+v -0.074551 -0.017019 -0.054075
+v -0.073869 -0.017072 -0.052349
+v -0.071526 -0.017409 -0.054723
+v -0.072511 -0.012238 -0.049444
+v -0.070183 -0.015274 -0.049287
+v -0.073653 -0.013834 -0.047305
+v -0.074486 0.001457 -0.046768
+v -0.073916 -0.002160 -0.049469
+v -0.074113 -0.003125 -0.048144
+v -0.074131 -0.002852 -0.046801
+v -0.086761 -0.015155 -0.068354
+v -0.088025 -0.009961 -0.070473
+v -0.086475 -0.010142 -0.072213
+v -0.085325 -0.015764 -0.070067
+v 0.021787 0.019785 0.034341
+v 0.020794 0.020068 0.033858
+v -0.070723 -0.028956 -0.039399
+v -0.065970 -0.031006 -0.042780
+v -0.069851 -0.025995 -0.039477
+v -0.089003 0.019183 -0.065045
+v -0.086998 0.020917 -0.065261
+v -0.087250 0.019047 -0.067533
+v -0.089693 0.017389 -0.067114
+v -0.074329 0.002540 -0.054620
+v -0.073899 0.000636 -0.054485
+v -0.074342 0.001644 -0.053089
+v -0.074284 0.003790 -0.053095
+v 0.020816 0.021774 0.034064
+v 0.032809 0.016630 0.037364
+v 0.032998 0.015871 0.037400
+v 0.028212 0.016239 0.037646
+v 0.027826 0.017096 0.037567
+v 0.032628 0.017070 0.037233
+v 0.036101 0.017125 0.036298
+v 0.036161 0.016685 0.036405
+v 0.036269 0.015928 0.036387
+v -0.052561 -0.031744 -0.096723
+v -0.060372 -0.031983 -0.078034
+v -0.064457 -0.018141 -0.086516
+v -0.057469 -0.018210 -0.102212
+v -0.080394 -0.028299 -0.055452
+v -0.079752 -0.030404 -0.052851
+v -0.076363 0.008809 -0.045327
+v -0.075687 0.010412 -0.044027
+v -0.076343 0.015907 -0.045599
+v -0.077263 0.012912 -0.045880
+v 0.024851 0.017190 0.037405
+v 0.024761 0.018045 0.037097
+v 0.036290 0.015256 0.036381
+v 0.033223 0.015205 0.037331
+v -0.088341 0.004362 -0.074342
+v -0.090679 0.003624 -0.072893
+v 0.028407 0.015572 0.037619
+v 0.024822 0.016756 0.037491
+v 0.021986 0.018019 0.036803
+v 0.023161 0.018211 0.036874
+v 0.033275 0.014424 0.037178
+v 0.033211 0.013604 0.037097
+v 0.028308 0.013982 0.037611
+v 0.028419 0.014800 0.037581
+v -0.079067 0.013911 -0.053101
+v -0.080681 0.016669 -0.055997
+v -0.079519 0.014543 -0.054949
+v -0.078590 0.013185 -0.053343
+v -0.079030 0.005823 -0.050065
+v -0.079592 0.007092 -0.050068
+v -0.077759 0.007967 -0.050280
+v -0.076864 0.006670 -0.050472
+v 0.036335 0.014486 0.036107
+v 0.036312 0.013619 0.035996
+v -0.075034 0.015201 -0.042898
+v -0.076028 0.020204 -0.046201
+v 0.045085 0.017732 0.030180
+v 0.045633 0.018215 0.029278
+v 0.045584 0.017662 0.029343
+v -0.067879 -0.024517 -0.058407
+v 0.045217 0.017257 0.029817
+v 0.047819 0.015152 0.026466
+v 0.048857 0.016534 0.025690
+v 0.050426 0.015979 0.024127
+v -0.078225 0.007726 -0.047387
+v -0.078957 0.010284 -0.047531
+v -0.080094 0.008835 -0.049884
+v -0.070479 -0.021274 -0.039291
+v -0.071341 -0.019527 -0.041409
+v -0.078290 0.012833 -0.053111
+v -0.077702 0.011687 -0.052953
+v -0.076416 0.007843 -0.051325
+v -0.077295 0.006758 -0.053484
+v 0.049236 0.014421 0.025389
+v -0.081150 0.000523 -0.060543
+v -0.081872 -0.007475 -0.061941
+v 0.057536 0.005946 0.021759
+v 0.051610 0.011625 0.024770
+v 0.053022 0.013121 0.022674
+v 0.044552 0.017539 0.030979
+v 0.043894 0.017281 0.031910
+v 0.044531 0.017856 0.030997
+v 0.045029 0.018170 0.030064
+v 0.044686 0.016890 0.030646
+v 0.043960 0.016639 0.031697
+v 0.042855 0.017131 0.032985
+v 0.042883 0.016417 0.032877
+v 0.041243 0.016137 0.034231
+v 0.041174 0.016877 0.034306
+v 0.039053 0.016746 0.035400
+v -0.067421 -0.017887 -0.072762
+v -0.063592 -0.030892 -0.058219
+v 0.039382 0.017142 0.035251
+v 0.039134 0.015970 0.035324
+v 0.041364 0.015481 0.033990
+v 0.039246 0.015308 0.035222
+v 0.042916 0.015743 0.032499
+v 0.044052 0.016076 0.031249
+v 0.044833 0.016441 0.030125
+v 0.045530 0.016827 0.029301
+v 0.046024 0.017465 0.028735
+v 0.045163 0.015910 0.029485
+v 0.045803 0.015201 0.028636
+v 0.044542 0.014704 0.030036
+v 0.044243 0.015494 0.030726
+v 0.046060 0.016414 0.028552
+v 0.046868 0.015817 0.027537
+v -0.076595 0.008396 -0.050581
+v -0.076748 0.008957 -0.050768
+v -0.076299 0.008162 -0.050492
+v -0.075822 0.007290 -0.050918
+v 0.043130 0.015152 0.032034
+v 0.043195 0.014408 0.031550
+v 0.041372 0.014034 0.033180
+v 0.041414 0.014801 0.033641
+v -0.078772 -0.032289 -0.050312
+v -0.080739 0.006559 -0.058434
+v -0.078490 0.004462 -0.056434
+v -0.079868 0.006401 -0.056473
+v 0.039304 0.014620 0.034922
+v 0.039374 0.013743 0.034544
+v -0.088518 -0.005262 -0.068929
+v -0.090350 0.002001 -0.069065
+v -0.091628 0.002196 -0.070441
+v -0.089739 -0.005370 -0.070037
+v 0.032580 0.017104 0.037042
+v 0.032254 0.017334 0.035909
+v 0.035734 0.017121 0.035314
+v 0.036173 0.017115 0.036071
+v 0.039451 0.017148 0.035124
+v -0.067399 -0.033394 -0.044625
+v -0.073684 -0.000058 -0.055826
+v -0.074834 -0.001394 -0.055135
+v 0.038708 0.017229 0.034452
+v 0.035584 0.017152 0.034172
+v 0.033913 0.019525 0.030030
+v 0.035261 0.019537 0.029658
+v 0.038318 0.017195 0.033408
+v -0.075977 0.005112 -0.053777
+v -0.075261 0.006388 -0.051240
+v 0.032449 0.017447 0.034690
+v 0.032394 0.019677 0.030295
+v 0.029151 0.018228 0.034800
+v -0.071476 -0.016554 -0.041115
+v 0.030896 0.020017 0.030358
+v 0.026452 0.019021 0.035059
+v 0.034227 0.030160 0.037257
+v 0.034173 0.030362 0.037667
+v 0.039465 0.029696 0.036603
+v 0.038991 0.029357 0.036317
+v 0.049378 0.000858 -0.122737
+v 0.050959 0.015806 -0.128211
+v 0.038427 0.011329 -0.138010
+v 0.041665 0.026887 0.035371
+v 0.041634 0.027890 0.035168
+v 0.044079 0.026632 0.033388
+v 0.043931 0.025668 0.033466
+v 0.041669 0.025795 0.035657
+v -0.085771 0.015540 -0.066346
+v -0.086498 0.013970 -0.067655
+v -0.084711 0.014388 -0.065200
+v -0.084561 0.015461 -0.063709
+v -0.084076 0.017751 -0.062012
+v -0.083522 0.018462 -0.061592
+v -0.085168 0.017043 -0.064713
+v -0.078108 0.005850 -0.054664
+v -0.079156 0.006879 -0.055467
+v 0.043733 0.024504 0.033641
+v 0.038919 0.027053 0.037598
+v -0.076679 -0.021180 -0.050298
+v -0.077236 -0.026555 -0.050302
+v -0.083838 -0.015146 -0.063733
+v -0.083079 -0.014598 -0.061162
+v -0.083894 -0.004486 -0.072841
+v -0.084098 0.004132 -0.072681
+v -0.082479 0.003912 -0.070776
+v -0.082755 -0.004316 -0.071528
+v 0.038948 0.028253 0.037273
+v 0.038866 0.029090 0.036779
+v 0.042649 0.023015 0.029538
+v 0.041900 0.023914 0.030251
+v -0.077579 -0.002330 -0.068329
+v -0.074428 -0.001408 -0.068032
+v -0.073792 -0.004270 -0.045255
+v -0.073202 -0.000583 -0.043133
+v -0.074771 0.004439 -0.053062
+v -0.074904 0.006106 -0.051626
+v -0.078434 -0.019273 -0.065269
+v -0.074710 -0.026136 -0.057669
+v -0.078564 -0.021358 -0.063482
+v -0.070832 -0.026686 -0.055475
+v -0.070549 -0.027605 -0.053127
+v -0.072916 -0.027695 -0.054821
+v 0.042565 0.023708 0.031665
+v 0.043581 0.022953 0.030395
+v 0.043348 0.022366 0.028929
+v 0.044251 0.022267 0.029486
+v 0.038057 0.022248 0.027394
+v 0.037692 0.022583 0.027704
+v 0.037162 0.023007 0.028141
+v -0.080086 -0.018991 -0.058519
+v -0.081974 -0.017651 -0.061053
+v 0.038272 0.021919 0.027207
+v -0.072775 -0.003576 -0.051476
+v -0.071513 -0.005098 -0.053250
+v -0.070939 -0.003627 -0.051260
+v -0.083939 -0.000067 -0.062118
+v -0.084451 -0.007756 -0.062723
+v -0.079134 0.009297 -0.050726
+v 0.038377 0.021560 0.027122
+v -0.076572 0.000064 -0.050288
+v -0.077229 0.003356 -0.048395
+v -0.082120 0.012183 -0.050696
+v -0.080849 0.010774 -0.051232
+v -0.080021 0.009959 -0.050964
+v -0.080862 0.010475 -0.050004
+v -0.066227 -0.012316 -0.052897
+v -0.068407 -0.014058 -0.051047
+v -0.070088 -0.011107 -0.050451
+v 0.038379 0.021173 0.027132
+v 0.038268 0.020843 0.027295
+v 0.060103 -0.009482 0.022776
+v 0.060148 0.001007 0.021108
+v -0.082974 0.017651 -0.059185
+v -0.083483 0.015889 -0.060213
+v -0.081251 0.010286 -0.068071
+v 0.062416 -0.006796 0.018215
+v 0.044279 0.018156 0.030862
+v -0.079488 0.010699 -0.055253
+v -0.079240 0.009455 -0.056362
+v 0.044815 0.018534 0.029959
+v -0.078230 0.022289 -0.060228
+v -0.078638 0.019733 -0.056852
+v -0.078372 0.020020 -0.055668
+v -0.078100 0.024593 -0.055743
+v 0.043792 0.018440 0.030509
+v -0.069758 -0.030226 -0.049752
+v -0.072323 -0.029973 -0.052581
+v -0.072080 -0.028741 -0.053096
+v -0.070236 -0.028755 -0.051136
+v -0.073292 -0.017225 -0.051684
+v -0.071577 -0.017599 -0.053638
+v 0.044288 0.018844 0.029742
+v -0.084546 -0.013816 -0.066692
+v -0.084087 -0.014694 -0.065607
+v -0.085091 -0.010707 -0.067605
+v -0.085292 -0.009289 -0.068569
+v 0.045377 0.019119 0.029086
+v 0.045718 0.018831 0.029016
+v 0.041615 0.017949 0.031603
+v 0.037118 0.019858 0.028766
+v 0.037628 0.020045 0.028318
+v -0.075064 0.003215 -0.058031
+v -0.074679 0.002288 -0.056851
+v -0.077451 0.015086 -0.058922
+v -0.079509 0.016962 -0.060244
+v -0.079460 0.019009 -0.057025
+v -0.077909 0.017382 -0.056976
+v -0.082920 0.011034 -0.070018
+v 0.042415 0.018431 0.030714
+v 0.040143 0.017535 0.032509
+v 0.036298 0.019673 0.029225
+v 0.037873 0.020301 0.027964
+v 0.038108 0.020526 0.027578
+v -0.079376 0.007949 -0.056318
+v -0.079702 0.008134 -0.057158
+v 0.042750 0.018797 0.030024
+v -0.079171 0.008345 -0.055768
+v 0.043239 0.019249 0.029287
+v 0.044662 0.019511 0.029149
+v -0.076111 0.005789 -0.050761
+v -0.078264 0.004688 -0.050797
+v -0.081696 0.012551 -0.052222
+v -0.084016 0.015846 -0.054740
+v -0.083154 0.016371 -0.056111
+v -0.081177 0.013391 -0.052495
+v 0.043742 0.020415 0.028393
+v 0.043803 0.021057 0.028397
+v 0.044803 0.020097 0.028897
+v -0.077495 0.010551 -0.051299
+v 0.044883 0.020830 0.028802
+v 0.043573 0.019861 0.028680
+v -0.015498 -0.033031 -0.123928
+v -0.017202 -0.022749 -0.131217
+v 0.043670 0.021720 0.028555
+v 0.044708 0.021466 0.029048
+v 0.045942 0.018354 0.028849
+v 0.046028 0.019298 0.028650
+v 0.046477 0.020413 0.028039
+v 0.046403 0.018420 0.028312
+v -0.080167 0.019608 -0.049813
+v -0.080152 0.020997 -0.053205
+v -0.081194 0.021902 -0.053788
+v -0.080696 0.020368 -0.050399
+v -0.084305 0.014386 -0.063363
+v 0.047071 0.018507 0.027551
+v 0.046627 0.017200 0.027875
+v 0.047467 0.021249 0.027550
+v 0.048818 0.021776 0.026698
+v -0.087632 0.011092 -0.068275
+v -0.075902 -0.003353 -0.053980
+v -0.073266 -0.006017 -0.054723
+v 0.048251 0.018574 0.026473
+v 0.047541 0.016986 0.026842
+v 0.045736 0.019844 0.028700
+v -0.063801 -0.006582 -0.053569
+v -0.064135 -0.005445 -0.053380
+v -0.064786 -0.005771 -0.054403
+v -0.064704 -0.006641 -0.054775
+v 0.045814 0.020514 0.028423
+v 0.046928 0.022414 0.028805
+v -0.067981 -0.011849 -0.055446
+v -0.070190 -0.012669 -0.054731
+v -0.069277 -0.014123 -0.053150
+v -0.067451 -0.013120 -0.054730
+v 0.047708 0.023407 0.028531
+v 0.046249 0.021686 0.029046
+v 0.045740 0.022568 0.030258
+v 0.046262 0.023430 0.030167
+v -0.063314 -0.009184 -0.053889
+v -0.063566 -0.007758 -0.053697
+v -0.064316 -0.008210 -0.055148
+v -0.063985 -0.009744 -0.055619
+v 0.046845 0.024323 0.029943
+v 0.045371 0.024416 0.031722
+v -0.082367 0.018215 -0.057525
+v -0.083924 0.018025 -0.059635
+v 0.045759 0.025395 0.031617
+v 0.044963 0.023439 0.031768
+v 0.045630 0.021361 0.029073
+v -0.089653 0.009766 -0.066973
+v -0.089979 0.007757 -0.068073
+v -0.086886 0.014817 -0.065535
+v -0.087385 0.013040 -0.066834
+v 0.045140 0.022135 0.030021
+v 0.044366 0.022999 0.031296
+v 0.043303 0.023657 0.033194
+v -0.073405 -0.011653 -0.044545
+v -0.074412 -0.011425 -0.046180
+v -0.074084 -0.013003 -0.046193
+v -0.090776 0.012277 -0.066679
+v -0.090136 0.015138 -0.068575
+v -0.072928 -0.028627 -0.041323
+v -0.072025 -0.030227 -0.040021
+v -0.045716 -0.101213 -0.024414
+v -0.055259 -0.096066 -0.037108
+v -0.054697 -0.106307 -0.036760
+v -0.045340 -0.111288 -0.024573
+v -0.081202 0.015287 -0.049197
+v -0.079768 0.018005 -0.049020
+v 0.041462 0.024751 0.035280
+v 0.040634 0.024657 0.033481
+v 0.038691 0.026026 0.037155
+v -0.063452 -0.011227 -0.054234
+v -0.063428 -0.011331 -0.054242
+v -0.066069 -0.013059 -0.054003
+v -0.068169 -0.007374 -0.051622
+v -0.064555 -0.006961 -0.052818
+v 0.037878 0.025451 0.035137
+v 0.034673 0.031313 0.038434
+v 0.040413 0.030846 0.037160
+v 0.037378 0.025591 0.033254
+v 0.034906 0.023764 0.029537
+v 0.033328 0.023877 0.030077
+v 0.034215 0.025644 0.034284
+v -0.068601 -0.010378 -0.055433
+v 0.023714 0.022808 0.034581
+v 0.024665 0.023777 0.035201
+v -0.068177 -0.005235 -0.054036
+v -0.068049 -0.004159 -0.052762
+v -0.069157 -0.017105 -0.050238
+v -0.070142 -0.018021 -0.052081
+v -0.071774 -0.018497 -0.048976
+v -0.064361 -0.011744 -0.055635
+v -0.075959 0.004580 -0.056090
+v 0.028797 0.022916 0.030537
+v 0.028322 0.022523 0.030286
+v 0.022155 0.022788 0.035207
+v 0.023542 0.023855 0.036049
+v -0.071315 -0.010031 -0.055320
+v -0.072367 -0.012225 -0.057302
+v -0.071665 -0.013890 -0.056377
+v -0.070670 -0.011723 -0.055212
+v -0.078551 -0.021842 -0.056835
+v -0.078575 -0.020072 -0.056831
+v -0.070525 -0.016413 -0.058965
+v -0.074114 -0.016780 -0.057077
+v -0.076014 -0.000637 -0.054060
+v -0.077439 0.002156 -0.051717
+v -0.073473 -0.003513 -0.052025
+v -0.071419 -0.007931 -0.054439
+v -0.072993 -0.010228 -0.056760
+v -0.068275 -0.006257 -0.051816
+v -0.071449 -0.005910 -0.050437
+v -0.071199 -0.004857 -0.050762
+v -0.068242 -0.005303 -0.052104
+v -0.080782 0.009637 -0.057625
+v -0.078291 0.016145 -0.047149
+v -0.079913 0.012950 -0.048137
+v -0.079201 0.026978 -0.071956
+v -0.077243 0.019040 -0.076973
+v -0.077648 0.019332 -0.064054
+v -0.069392 -0.008746 -0.055349
+v -0.064424 -0.004929 -0.053503
+v -0.065143 -0.005594 -0.053171
+v -0.084171 -0.018016 -0.064526
+v -0.072404 -0.019264 -0.048481
+v -0.075136 0.003933 -0.051205
+v -0.083463 -0.009652 -0.071753
+v -0.082517 -0.015057 -0.069873
+v -0.074862 0.005293 -0.051438
+v -0.075021 0.006290 -0.051444
+v -0.070989 -0.017977 -0.053069
+v -0.072817 -0.017865 -0.050453
+v -0.075292 0.006559 -0.051194
+v -0.074596 -0.011805 -0.047580
+v -0.084390 0.009585 -0.062232
+v 0.028315 0.025140 0.035396
+v 0.030453 0.023536 0.030711
+v -0.075726 -0.029310 -0.057700
+v -0.073996 -0.030727 -0.054631
+v 0.029467 0.023247 0.030693
+v -0.064994 -0.005999 -0.052723
+v 0.026504 0.024486 0.035256
+v -0.068068 -0.013924 -0.052465
+v -0.068507 -0.006924 -0.054336
+v 0.027639 0.025650 0.037312
+v 0.025234 0.024725 0.036672
+v 0.034469 0.025950 0.036359
+v -0.073586 -0.008045 -0.049461
+v -0.073166 -0.010176 -0.049205
+v -0.074805 -0.009581 -0.047385
+v 0.020997 0.072596 -0.148073
+v 0.020430 0.057043 -0.151426
+v -0.075107 -0.016113 -0.058734
+v -0.071506 -0.014327 -0.060918
+v -0.072878 -0.013506 -0.062866
+v -0.077013 -0.014934 -0.059925
+v -0.081175 -0.020050 -0.067461
+v -0.079458 -0.024446 -0.064068
+v -0.081398 -0.026168 -0.064049
+v -0.083799 -0.021316 -0.067391
+v 0.039962 0.058835 -0.142481
+v 0.040013 0.072322 -0.139769
+v -0.079992 0.043175 -0.080749
+v -0.079641 0.032498 -0.065067
+v -0.080456 0.049430 -0.069299
+v -0.082348 0.018489 -0.065205
+v -0.082209 0.016324 -0.066807
+v -0.087292 0.016982 -0.069433
+v -0.077804 0.022649 -0.050825
+v -0.078194 0.020118 -0.054353
+v -0.080033 0.020111 -0.052808
+v -0.087261 -0.004904 -0.073686
+v 0.039845 0.024866 0.031845
+v 0.036175 0.023450 0.028842
+v 0.049955 0.073388 -0.131951
+v 0.049208 0.081906 -0.129041
+v 0.040550 0.083877 -0.134631
+v 0.035233 -0.061220 -0.100590
+v 0.025437 -0.072819 -0.107254
+v -0.075468 0.005196 -0.045625
+v -0.077491 0.005384 -0.047603
+v 0.020102 0.022508 0.036480
+v -0.077570 -0.018050 -0.066117
+v 0.021944 0.023849 0.036986
+v -0.078515 -0.016891 -0.057248
+v -0.080788 -0.016084 -0.058962
+v -0.087608 0.018246 -0.058734
+v -0.085135 0.016247 -0.056522
+v 0.018865 0.021312 0.036060
+v 0.018176 0.021155 0.036097
+v 0.019360 0.022614 0.036450
+v 0.020981 0.024083 0.036969
+v 0.024148 0.024996 0.037506
+v 0.023434 0.025445 0.037929
+v 0.026980 0.026859 0.038970
+v 0.027090 0.026006 0.038589
+v 0.034561 0.026818 0.038321
+v 0.034644 0.027844 0.038771
+v 0.026463 0.027809 0.038481
+v 0.022906 0.026042 0.037406
+v 0.025826 0.028822 0.037982
+v 0.022715 0.027087 0.037158
+v 0.034498 0.029793 0.037684
+v 0.034707 0.028978 0.038432
+v 0.021830 0.018893 0.036345
+v 0.019522 0.019276 0.036132
+v 0.022377 0.019188 0.036210
+v 0.019886 0.019551 0.035946
+v 0.023576 0.019555 0.035326
+v 0.019023 0.020063 0.035744
+v 0.018290 0.019879 0.035904
+v -0.010565 -0.111937 0.005848
+v -0.010468 -0.119048 0.001358
+v 0.024443 0.019680 0.034839
+v 0.018609 0.022680 0.035990
+v 0.017232 0.020984 0.035913
+v 0.017471 0.023217 0.035193
+v 0.016478 0.021117 0.035398
+v 0.019933 0.025646 0.035858
+v 0.020269 0.024398 0.036152
+v 0.017287 0.019747 0.035767
+v 0.017944 0.019205 0.035980
+v 0.016527 0.019717 0.035562
+v 0.017202 0.018991 0.035843
+v 0.024574 0.016093 0.037550
+v 0.024308 0.015435 0.037539
+v 0.021206 0.017507 0.036878
+v 0.020579 0.017052 0.036756
+v 0.032974 0.012736 0.036935
+v 0.036191 0.012711 0.035979
+v 0.032619 0.011595 0.036994
+v -0.007530 -0.009113 0.057933
+v -0.008513 -0.009309 0.056717
+v -0.007088 -0.007775 0.055601
+v -0.006750 -0.007751 0.056192
+v 0.036032 0.011540 0.035901
+v 0.027496 0.012113 0.037791
+v 0.028031 0.013084 0.037668
+v 0.032515 -0.116380 -0.014536
+v 0.046591 0.014332 0.027920
+v 0.047635 0.013322 0.027276
+v 0.045996 0.012533 0.029085
+v 0.045085 0.013830 0.029412
+v 0.043401 0.013429 0.031044
+v -0.008120 -0.017690 0.057590
+v -0.007430 -0.016382 0.056904
+v -0.008638 -0.016420 0.055962
+v -0.009648 -0.017356 0.056813
+v 0.043861 0.012053 0.030976
+v 0.041459 0.011760 0.032794
+v 0.041301 0.013035 0.032839
+v 0.039340 0.012843 0.034335
+v 0.039154 0.011487 0.034385
+v 0.049858 0.018152 0.025075
+v 0.050892 0.021642 0.024630
+v 0.051472 0.017551 0.023430
+v 0.053042 0.021282 0.022451
+v 0.015684 0.024282 0.035326
+v -0.005893 -0.007621 0.055766
+v -0.004840 -0.009477 0.058499
+v -0.005873 -0.009304 0.059059
+v -0.006232 -0.007692 0.056053
+v 0.015320 0.021463 0.035078
+v 0.014063 0.025053 0.036278
+v 0.013873 0.022113 0.035858
+v 0.015483 0.027995 0.037280
+v 0.017371 0.027161 0.036071
+v 0.015670 0.019804 0.035476
+v 0.016433 0.018690 0.035618
+v 0.014551 0.019717 0.035761
+v 0.015533 0.018217 0.035965
+v 0.023768 0.014644 0.037467
+v 0.023211 0.013717 0.037510
+v 0.019937 0.016461 0.036876
+v 0.018999 0.015630 0.036926
+v 0.047440 -0.010238 -0.117895
+v 0.051839 0.031575 -0.130880
+v 0.047705 0.034794 0.034385
+v 0.045402 0.031956 0.035222
+v 0.041898 0.033178 0.037489
+v 0.043727 0.036589 0.037577
+v 0.050284 0.038718 0.032108
+v 0.052573 0.035213 0.029260
+v 0.050281 0.032424 0.031419
+v 0.048006 0.030223 0.032848
+v 0.051768 0.024576 0.025412
+v 0.050037 0.024545 0.027050
+v 0.050109 0.026352 0.028396
+v 0.051803 0.027025 0.026880
+v 0.053880 0.024618 0.023184
+v 0.054187 0.027908 0.024814
+v 0.051298 0.029507 0.029017
+v 0.049265 0.028196 0.030460
+v 0.053768 0.031174 0.027074
+v 0.023694 0.033001 0.039632
+v 0.024228 0.031447 0.038416
+v 0.021267 0.029212 0.037194
+v 0.019901 0.029863 0.037814
+v 0.022538 0.034356 0.040584
+v 0.017891 0.030761 0.038611
+v 0.036909 0.037103 0.040506
+v 0.035906 0.034084 0.039466
+v 0.025432 0.029493 0.037484
+v 0.023403 0.028168 0.037345
+v 0.025075 0.029656 0.037396
+v 0.022765 0.028094 0.037135
+v 0.058174 0.092634 -0.111268
+v 0.060389 0.098530 -0.098735
+v 0.043972 0.109368 -0.105129
+v 0.043028 0.102030 -0.117443
+v 0.041683 0.028204 0.035351
+v 0.042362 0.028710 0.035167
+v 0.044796 0.027601 0.033376
+v 0.044202 0.027127 0.033571
+v 0.046080 0.026054 0.031579
+v 0.046687 0.026550 0.031480
+v 0.047832 0.025432 0.029772
+v 0.047301 0.025107 0.029938
+v 0.047919 0.024296 0.028732
+v 0.048423 0.024323 0.028295
+v 0.022836 0.029028 0.037361
+v 0.024891 0.030140 0.037847
+v 0.051260 0.061588 -0.133204
+v 0.045007 0.073614 0.023091
+v 0.051955 0.070150 0.016472
+v 0.049991 0.063741 0.022156
+v 0.043401 0.029803 0.035555
+v 0.045987 0.028572 0.033468
+v 0.047723 0.027256 0.031281
+v 0.048799 0.025880 0.029452
+v 0.049202 0.024889 0.028231
+v 0.071662 -0.012835 -0.038439
+v 0.071868 -0.012742 -0.040079
+v 0.078104 -0.002610 -0.065850
+v 0.075875 0.003910 -0.064821
+v 0.074683 0.007349 -0.063540
+v 0.080269 0.014430 -0.049387
+v 0.082265 0.017452 -0.052206
+v 0.082293 -0.021243 -0.059063
+v 0.078939 -0.023144 -0.055313
+v -0.088015 0.021319 -0.059449
+v 0.082035 -0.020307 -0.059569
+v 0.088470 0.013361 -0.061450
+v 0.088835 0.011769 -0.062831
+v 0.089744 0.011425 -0.062514
+v 0.040505 -0.031774 -0.108067
+v 0.044967 -0.019078 -0.114004
+v 0.089010 0.010043 -0.064216
+v 0.073543 -0.008124 -0.042284
+v 0.079381 -0.003109 -0.066028
+v 0.081108 -0.003707 -0.067792
+v 0.082069 -0.008808 -0.068068
+v 0.081196 -0.008265 -0.067414
+v 0.081325 -0.013844 -0.066329
+v 0.080526 -0.012980 -0.065852
+v 0.083561 -0.017439 -0.062092
+v 0.083653 -0.013059 -0.064875
+v 0.083296 0.017850 -0.053597
+v 0.083210 0.017079 -0.052852
+v 0.084040 0.017849 -0.054034
+v 0.074388 -0.009250 -0.043664
+v 0.078537 0.012253 -0.049445
+v 0.078299 0.010486 -0.047686
+v 0.077983 0.010936 -0.048162
+v 0.072420 -0.004360 -0.047541
+v 0.072196 -0.003552 -0.047682
+v 0.073583 -0.005819 -0.047202
+v 0.075271 -0.007105 -0.045126
+v 0.069509 -0.014589 -0.047403
+v 0.069134 -0.015295 -0.048957
+v 0.068550 -0.015997 -0.048416
+v 0.085927 0.023551 -0.057229
+v 0.078424 0.009505 -0.047274
+v 0.079631 0.010479 -0.047650
+v 0.080221 0.011593 -0.047988
+v 0.084760 0.017162 -0.054194
+v 0.080720 0.012475 -0.048224
+v 0.079685 0.012564 -0.048408
+v 0.081023 0.014123 -0.049251
+v 0.079017 0.012681 -0.048787
+v 0.074476 -0.012052 -0.060911
+v 0.073467 -0.012115 -0.058923
+v 0.073196 0.007269 -0.038909
+v 0.073815 0.005278 -0.040746
+v 0.077961 -0.023894 -0.053119
+v 0.075535 0.013103 -0.057951
+v 0.076767 0.016118 -0.060110
+v 0.085549 -0.008984 -0.066077
+v 0.078577 -0.015321 -0.056719
+v 0.076466 -0.016272 -0.055436
+v 0.080823 -0.013946 -0.058109
+v 0.082892 -0.011392 -0.059541
+v 0.074964 0.003621 -0.051532
+v 0.084172 0.007835 -0.057367
+v 0.084542 0.005668 -0.058631
+v 0.082790 0.008012 -0.056057
+v 0.078025 0.019380 -0.058735
+v 0.078283 0.019060 -0.056114
+v 0.075098 0.016027 -0.057216
+v 0.078067 0.017191 -0.055456
+v 0.071432 -0.011287 -0.047000
+v 0.068997 -0.016749 -0.050867
+v 0.071144 -0.015510 -0.052202
+v 0.070045 -0.017351 -0.051866
+v 0.071627 -0.016474 -0.053483
+v 0.071875 -0.015237 -0.054936
+v 0.072373 -0.013564 -0.056066
+v 0.073753 -0.011723 -0.055998
+v 0.071232 -0.007279 -0.047509
+v 0.077787 0.019477 -0.046715
+v 0.070753 -0.009064 -0.047593
+v 0.072693 -0.005646 -0.047327
+v 0.072608 -0.007357 -0.047030
+v 0.072106 -0.009234 -0.046846
+v 0.074731 -0.019094 -0.049373
+v 0.074881 -0.017598 -0.051810
+v 0.073708 -0.018506 -0.048671
+v 0.075323 -0.017116 -0.053953
+v 0.072721 -0.018102 -0.048206
+v 0.077207 0.013973 -0.057371
+v 0.078111 0.016818 -0.057384
+v 0.071435 -0.009457 -0.036792
+v 0.064442 -0.001753 0.012850
+v 0.065813 0.007704 0.007009
+v 0.026601 -0.049715 0.035669
+v 0.025823 -0.052675 0.036184
+v 0.057050 -0.012012 0.027204
+v 0.052690 -0.014810 0.032089
+v 0.025343 -0.044526 0.046870
+v 0.024422 -0.045156 0.047046
+v 0.023491 -0.044521 0.047357
+v 0.024344 -0.044380 0.046936
+v 0.022273 -0.043645 0.045915
+v 0.023196 -0.044065 0.044977
+v 0.023331 -0.044045 0.046228
+v 0.022457 -0.043630 0.047033
+v 0.028699 -0.029824 0.047542
+v 0.032453 -0.025685 0.046099
+v 0.035448 -0.030055 0.044883
+v 0.032014 -0.033473 0.046426
+v 0.024778 -0.046643 0.037929
+v 0.024690 -0.045079 0.038278
+v 0.023989 -0.044679 0.041057
+v 0.023895 -0.045385 0.040805
+v 0.024718 -0.047658 0.038006
+v 0.023724 -0.045735 0.040854
+v 0.026555 -0.048011 0.035534
+v 0.026295 -0.045437 0.035686
+v 0.034324 -0.050696 0.042893
+v 0.030729 -0.048566 0.044618
+v 0.031718 -0.045560 0.044925
+v 0.035342 -0.046240 0.043246
+v 0.025189 -0.017866 0.046790
+v 0.029072 -0.021488 0.046633
+v 0.026093 -0.026052 0.047651
+v 0.022037 -0.021571 0.047517
+v 0.018247 -0.017021 0.047888
+v 0.021106 -0.013751 0.046868
+v 0.016213 -0.009059 0.051105
+v 0.016958 -0.012780 0.052414
+v 0.016425 -0.013885 0.055825
+v 0.016709 -0.011644 0.054639
+v 0.014774 -0.018025 0.054450
+v 0.017095 -0.015028 0.050905
+v 0.010823 -0.019385 0.055116
+v -0.011930 -0.016880 0.057384
+v -0.010766 -0.015210 0.056779
+v -0.011018 -0.014702 0.058376
+v -0.013192 -0.015755 0.058955
+v 0.010637 -0.018871 0.056115
+v 0.014904 -0.018399 0.052898
+v -0.004493 -0.015620 0.065181
+v -0.005548 -0.013573 0.065087
+v -0.004687 -0.014750 0.063093
+v -0.004624 -0.016648 0.062725
+v 0.015160 -0.019323 0.050949
+v 0.011344 -0.019969 0.053929
+v 0.010069 -0.018081 0.057000
+v 0.012974 -0.017694 0.056921
+v -0.007542 -0.013179 0.064309
+v -0.009417 -0.013105 0.066176
+v -0.011191 -0.013219 0.064361
+v -0.009048 -0.013644 0.062724
+v -0.004934 -0.013288 0.058743
+v -0.005925 -0.015238 0.059891
+v -0.004217 -0.012512 0.061732
+v -0.010607 -0.012687 0.056961
+v -0.010383 -0.013623 0.055910
+v -0.008571 -0.010313 0.055699
+v -0.008796 -0.009968 0.055510
+v 0.014507 -0.015425 0.058330
+v 0.003909 -0.016620 0.065033
+v 0.005092 -0.015230 0.067021
+v 0.009229 0.014054 0.049980
+v 0.011949 0.010477 0.047128
+v 0.009363 0.011435 0.052237
+v 0.011922 0.008189 0.049266
+v 0.013912 -0.013051 0.061674
+v 0.015280 -0.011240 0.060174
+v 0.011477 -0.011854 0.065370
+v 0.012346 -0.009224 0.065050
+v 0.004742 0.029012 0.049097
+v 0.001925 -0.020387 0.060997
+v 0.002279 -0.021010 0.060542
+v 0.001682 -0.019014 0.062603
+v 0.021956 0.095076 -0.135587
+v 0.022554 0.107349 -0.124424
+v 0.021817 0.120345 -0.101286
+v 0.019608 0.124505 -0.088862
+v 0.005630 -0.037507 0.061352
+v 0.005347 -0.034761 0.061396
+v 0.012811 -0.042559 0.045517
+v 0.007419 -0.042695 0.046840
+v 0.007260 -0.043657 0.049864
+v 0.012676 -0.043861 0.048674
+v 0.016362 -0.043770 0.047071
+v 0.016949 -0.042521 0.043850
+v 0.013009 -0.041075 0.043037
+v 0.007471 -0.041179 0.043962
+v 0.012066 -0.044772 0.054234
+v 0.006840 -0.044419 0.056152
+v 0.006468 -0.042678 0.058691
+v 0.011783 -0.043114 0.056312
+v 0.017907 -0.023490 0.049840
+v 0.013848 -0.022989 0.052342
+v 0.009767 -0.022975 0.055890
+v 0.006572 -0.023171 0.058524
+v 0.005826 -0.021150 0.058713
+v 0.008280 -0.020588 0.056554
+v 0.005432 -0.020329 0.058965
+v 0.007748 -0.019890 0.057074
+v 0.007586 -0.019248 0.057592
+v 0.008009 -0.018368 0.057872
+v 0.006326 -0.018335 0.059100
+v 0.006792 -0.017650 0.058953
+v 0.005339 -0.019623 0.059279
+v 0.004484 0.006164 0.065237
+v 0.004398 0.009578 0.062253
+v 0.009970 -0.033214 0.059494
+v 0.005228 -0.032600 0.060865
+v 0.004287 -0.028484 0.060101
+v 0.008635 -0.028649 0.058784
+v 0.013170 -0.029453 0.056317
+v 0.014917 -0.034879 0.056878
+v 0.002685 -0.021781 0.060211
+v 0.010323 -0.035530 0.059644
+v 0.010715 -0.038069 0.059331
+v 0.015383 -0.037247 0.056893
+v 0.015633 -0.039492 0.056438
+v 0.019601 -0.037236 0.053700
+v 0.018475 -0.031300 0.053418
+v 0.024229 -0.033287 0.050242
+v 0.023514 -0.039642 0.050685
+v 0.031418 -0.042618 0.045790
+v 0.026830 -0.044801 0.046582
+v 0.026758 -0.043618 0.047023
+v 0.026457 -0.042535 0.047835
+v 0.030729 -0.039634 0.046801
+v 0.034830 -0.041914 0.044223
+v 0.033925 -0.037548 0.045291
+v 0.028617 -0.036496 0.048206
+v 0.025594 -0.041251 0.048890
+v 0.025459 -0.043876 0.047350
+v 0.024485 -0.044119 0.047199
+v 0.025110 -0.043341 0.048050
+v 0.024219 -0.043924 0.047801
+v 0.024508 -0.042722 0.048957
+v 0.023662 -0.043707 0.048572
+v 0.022510 -0.043140 0.049831
+v 0.023080 -0.041674 0.050435
+v 0.022586 -0.044531 0.046046
+v 0.021442 -0.044798 0.046926
+v 0.021408 -0.044766 0.048081
+v 0.022476 -0.044631 0.046977
+v 0.023314 -0.044360 0.045298
+v 0.023096 -0.044429 0.046415
+v 0.023590 -0.044391 0.041654
+v 0.022830 -0.044123 0.042256
+v -0.048743 -0.084534 -0.023624
+v -0.056365 -0.082273 -0.038349
+v 0.023684 -0.044163 0.044850
+v 0.023725 -0.044231 0.045782
+v 0.023606 -0.044190 0.046909
+v 0.023488 -0.044263 0.047223
+v 0.022930 -0.044329 0.047876
+v 0.021884 -0.044199 0.048958
+v 0.015697 -0.041591 0.055374
+v 0.019301 -0.041291 0.053223
+v 0.019073 -0.043040 0.052057
+v 0.015661 -0.043808 0.053677
+v 0.018802 -0.044418 0.050748
+v 0.017607 -0.041126 0.041254
+v 0.015671 -0.044976 0.051785
+v 0.020802 -0.042785 0.041316
+v 0.021820 -0.041637 0.038827
+v 0.019907 -0.043853 0.044535
+v 0.024309 -0.044115 0.038932
+v 0.023615 -0.043437 0.039302
+v 0.025718 -0.043716 0.036160
+v 0.024791 -0.042540 0.036881
+v 0.018845 -0.044951 0.049222
+v 0.019560 -0.039439 0.053689
+v 0.011147 -0.040538 0.058089
+v 0.006027 -0.040021 0.060192
+v -0.034201 0.005393 0.036851
+v -0.033518 0.002578 0.038032
+v -0.029368 0.003880 0.039026
+v -0.030184 0.006318 0.037938
+v 0.005244 -0.001540 0.071792
+v 0.004876 0.002685 0.068308
+v 0.022813 -0.043936 0.047497
+v 0.010035 -0.124761 -0.001787
+v -0.075155 -0.007632 -0.046527
+v -0.074396 -0.006922 -0.045541
+v -0.082223 0.013818 -0.068418
+v 0.025849 -0.045908 0.046585
+v 0.024185 -0.049816 0.038247
+v 0.023015 -0.046615 0.041329
+v 0.031358 -0.055089 0.043501
+v 0.026328 -0.058424 0.046216
+v 0.024393 -0.054375 0.047649
+v 0.028376 -0.051774 0.045252
+v 0.021513 -0.048502 0.049814
+v 0.024237 -0.047000 0.047682
+v -0.085607 0.016017 -0.065155
+v 0.022925 -0.045707 0.048414
+v 0.020311 -0.046323 0.050690
+v 0.019509 -0.044586 0.051223
+v 0.022005 -0.044551 0.048720
+v 0.017720 -0.042310 0.050711
+v 0.013388 -0.041515 0.053119
+v 0.013016 -0.045990 0.048265
+v 0.017504 -0.046493 0.045989
+v 0.021781 -0.046579 0.042819
+v 0.021069 -0.043184 0.047122
+v -0.082635 0.020007 -0.063271
+v -0.082233 0.021484 -0.058569
+v -0.080529 0.020251 -0.057752
+v -0.080692 0.018518 -0.061682
+v 0.018208 -0.042341 0.051154
+v 0.021137 -0.043061 0.048341
+v 0.013930 -0.041634 0.054157
+v 0.020927 -0.121402 -0.006922
+v 0.021546 -0.043570 0.048701
+v 0.018780 -0.043034 0.051202
+v 0.014554 -0.042637 0.054630
+v 0.015407 -0.044690 0.054879
+v 0.016233 -0.047056 0.054784
+v 0.017006 -0.049946 0.053894
+v 0.019165 -0.057280 0.051296
+v 0.020624 -0.062257 0.050114
+v 0.012919 -0.058725 0.054406
+v 0.014091 -0.063900 0.054146
+v 0.011937 -0.050986 0.057486
+v 0.011498 -0.047489 0.058773
+v 0.010837 -0.044658 0.058518
+v 0.010210 -0.042341 0.057758
+v 0.003306 -0.023429 0.059868
+v 0.005114 -0.018158 0.060362
+v 0.004025 -0.017533 0.062482
+v 0.007418 -0.013663 0.067817
+v 0.001600 -0.018015 0.064901
+v 0.010068 -0.012058 0.067174
+v 0.008378 -0.012515 0.068693
+v 0.008993 -0.010885 0.069508
+v 0.010140 -0.010276 0.068321
+v 0.006952 -0.013106 0.070020
+v 0.008250 -0.011081 0.070575
+v 0.003058 -0.014295 0.071943
+v 0.004122 -0.010856 0.074073
+v 0.005193 -0.005160 0.073842
+v 0.004624 -0.008574 0.074548
+v 0.009535 -0.009261 0.070058
+v 0.010741 -0.007429 0.068354
+v -0.089415 0.015776 -0.063415
+v 0.010899 -0.004377 0.066249
+v 0.012861 -0.006582 0.062623
+v 0.010465 0.129420 -0.045043
+v 0.009545 -0.041074 0.056954
+v 0.008666 -0.040896 0.055304
+v -0.073719 -0.020665 -0.048160
+v -0.082300 0.005793 -0.060136
+v 0.012891 -0.056553 0.041672
+v 0.012925 -0.051926 0.043957
+v 0.008085 -0.052024 0.045487
+v -0.081725 0.021925 -0.056032
+v -0.084836 0.023160 -0.056626
+v 0.008002 -0.056584 0.043436
+v 0.004693 -0.040474 0.056359
+v -0.086860 -0.004830 -0.068960
+v -0.090023 0.014471 -0.065236
+v -0.088605 0.012888 -0.064198
+v -0.083038 0.014143 -0.051425
+v -0.075186 -0.010816 -0.064090
+v -0.080784 -0.010769 -0.061623
+v -0.079111 -0.013406 -0.060812
+v -0.074399 -0.012681 -0.063640
+v -0.074441 0.001202 -0.056439
+v 0.004520 -0.045333 0.050637
+v 0.008288 -0.045487 0.049885
+v 0.004978 -0.040801 0.058162
+v 0.005467 -0.042141 0.059363
+v 0.005968 -0.044581 0.060507
+v 0.004373 -0.052018 0.046118
+v 0.004360 -0.056868 0.044122
+v -0.071443 -0.016033 -0.038826
+v 0.022546 -0.051139 0.039465
+v -0.078501 -0.026063 -0.053773
+v -0.079272 -0.025002 -0.056302
+v -0.031668 -0.032568 -0.118378
+v -0.035134 -0.021500 -0.124941
+v -0.089143 -0.005188 -0.072061
+v 0.023218 -0.054908 0.037392
+v 0.017884 -0.052263 0.042141
+v 0.018276 -0.055877 0.039936
+v 0.007422 -0.064099 0.057203
+v 0.006600 -0.059107 0.056727
+v 0.006508 -0.047792 0.060770
+v 0.006381 -0.051647 0.059445
+v 0.022554 0.115030 -0.113499
+v 0.062593 0.076101 -0.007033
+v 0.061207 0.069559 0.001003
+v -0.082830 0.018392 -0.058930
+v 0.054539 0.076787 0.008088
+v 0.056189 0.082072 0.000678
+v 0.063232 0.084103 -0.015309
+v 0.056772 0.088855 -0.007181
+v 0.040050 0.121671 -0.064519
+v 0.040596 0.120944 -0.050225
+v 0.021619 0.126871 -0.046299
+v 0.034892 0.121777 -0.078510
+v 0.047223 0.113626 -0.087833
+v 0.049434 0.116235 -0.068400
+v 0.049357 0.115957 -0.053069
+v -0.075528 -0.021031 -0.049344
+v -0.075667 -0.026261 -0.047136
+v -0.074261 -0.025655 -0.045374
+v -0.079477 0.008615 -0.056972
+v 0.025317 -0.084686 -0.108524
+v 0.051967 0.044204 -0.132331
+v 0.048056 0.114854 -0.039235
+v 0.039917 0.119035 -0.036410
+v 0.039407 0.115875 -0.023976
+v 0.046740 0.112493 -0.027487
+v 0.021449 0.124349 -0.031824
+v 0.010421 0.126519 -0.029966
+v 0.018035 0.007504 -0.145753
+v 0.004439 0.012965 0.059487
+v 0.046172 -0.048153 -0.091481
+v 0.055442 -0.048369 -0.073616
+v 0.058022 -0.100978 -0.050444
+v 0.068790 -0.025501 -0.054454
+v 0.068340 -0.024712 -0.055095
+v 0.057836 -0.063571 -0.054340
+v 0.058677 -0.049305 -0.055316
+v 0.036764 -0.047067 -0.103234
+v 0.077146 -0.014349 -0.065115
+v 0.077871 -0.010670 -0.065988
+v 0.078684 -0.015363 -0.064749
+v 0.079330 -0.011368 -0.065908
+v 0.079570 -0.007143 -0.066323
+v 0.079525 -0.017184 -0.063864
+v 0.078397 0.018783 -0.054281
+v 0.079954 0.020216 -0.052975
+v -0.076891 -0.016715 -0.066055
+v -0.076846 -0.014472 -0.067261
+v 0.068278 -0.027191 -0.050311
+v 0.068115 -0.026402 -0.051961
+v -0.064969 -0.005144 -0.054184
+v 0.067427 -0.026756 -0.051248
+v 0.067318 -0.027285 -0.049890
+v 0.068158 -0.028194 -0.048702
+v 0.067211 -0.027954 -0.048781
+v 0.077829 0.035864 -0.047475
+v 0.066079 -0.027174 -0.052390
+v 0.067468 -0.025116 -0.054737
+v 0.067630 -0.025789 -0.053261
+v 0.067085 -0.026622 -0.051939
+v 0.066977 -0.027805 -0.049967
+v 0.066973 -0.028782 -0.048296
+v 0.067465 -0.029417 -0.047678
+v -0.078853 -0.022808 -0.063565
+v -0.075379 -0.027614 -0.057619
+v -0.059015 -0.078828 -0.052796
+v -0.056518 -0.075941 -0.068404
+v -0.057517 -0.087541 -0.066275
+v -0.059271 -0.091579 -0.051235
+v -0.072552 -0.002616 -0.039789
+v -0.072610 -0.009224 -0.042342
+v -0.072434 -0.010589 -0.040544
+v 0.080034 -0.018421 -0.064173
+v 0.073034 -0.028730 -0.052790
+v -0.024795 -0.102793 -0.000012
+v -0.021705 -0.114937 -0.004234
+v 0.076771 0.012598 -0.059497
+v 0.076597 0.010765 -0.061135
+v 0.079167 0.012155 -0.062278
+v 0.079220 0.014227 -0.060891
+v 0.057614 -0.009373 -0.105473
+v 0.060066 0.001596 -0.110637
+v 0.011848 -0.082492 -0.113063
+v 0.011711 -0.069818 -0.112953
+v 0.074965 0.033957 -0.095350
+v 0.076928 0.014072 -0.058189
+v 0.079482 0.015877 -0.059566
+v 0.078289 -0.006610 -0.066196
+v 0.074725 0.014065 -0.058524
+v 0.074861 0.012382 -0.059905
+v 0.080789 0.017395 -0.061053
+v 0.080535 0.015491 -0.062470
+v 0.080332 0.013214 -0.063813
+v 0.074985 0.010475 -0.061327
+v -0.071360 -0.015575 -0.034972
+v -0.070668 -0.016213 -0.023076
+v -0.071990 -0.000531 -0.023450
+v -0.071810 -0.005864 -0.034236
+v 0.077794 0.017861 -0.060430
+v 0.079968 0.064232 -0.071776
+v 0.079049 0.058478 -0.085844
+v 0.076637 0.081073 -0.075338
+v 0.075541 0.076171 -0.090635
+v 0.070899 0.070340 -0.106595
+v 0.074125 0.051609 -0.101864
+v 0.078312 0.018982 -0.054749
+v 0.080753 0.021258 -0.053323
+v 0.063760 0.090614 -0.023192
+v 0.068891 0.085791 -0.030468
+v 0.056941 0.094802 -0.014431
+v 0.056773 0.100152 -0.021224
+v 0.063129 0.096803 -0.030228
+v 0.070386 0.090219 -0.042388
+v 0.079077 0.054917 -0.056702
+v 0.078298 0.069520 -0.057617
+v 0.075289 0.073409 -0.044028
+v -0.059553 0.030458 0.013749
+v -0.061386 0.031025 0.009260
+v -0.061888 0.021566 0.009215
+v -0.059584 0.022786 0.013053
+v 0.074658 0.085830 -0.059691
+v 0.066141 0.099337 -0.048827
+v 0.069820 0.095835 -0.063483
+v 0.059041 0.107777 -0.052728
+v 0.061145 0.105963 -0.067435
+v 0.060945 0.103424 -0.083478
+v 0.071434 0.091989 -0.078691
+v 0.061251 0.102331 -0.036458
+v 0.055142 0.104987 -0.026721
+v 0.056161 0.108701 -0.039988
+v 0.052842 0.109039 -0.031315
+v 0.073276 0.074950 -0.036825
+v 0.061903 -0.033227 -0.048127
+v 0.064422 -0.033950 -0.042968
+v -0.019482 0.087300 0.030869
+v -0.009300 0.088429 0.033222
+v -0.009551 0.095605 0.027074
+v -0.019946 0.094377 0.024423
+v 0.068844 -0.023362 -0.034686
+v -0.036942 -0.013148 -0.128996
+v -0.038686 -0.002568 -0.133505
+v -0.019147 -0.005308 -0.140467
+v -0.018136 -0.015072 -0.135441
+v 0.068466 0.000929 0.000365
+v 0.068394 0.013517 -0.002491
+v 0.071436 0.005725 -0.012861
+v 0.069791 -0.009060 -0.010889
+v -0.020227 0.111563 0.002265
+v -0.009700 0.113145 0.004955
+v -0.009844 0.118293 -0.005071
+v -0.020533 0.116446 -0.007414
+v -0.052833 0.002886 0.027320
+v -0.054971 -0.002977 0.027755
+v -0.051192 -0.003400 0.031340
+v -0.049515 0.002517 0.030255
+v 0.067016 0.027339 -0.004481
+v 0.070545 0.019902 -0.014726
+v 0.071296 0.034648 -0.017001
+v 0.067525 0.042005 -0.005176
+v 0.062041 0.065007 -0.121807
+v 0.064145 0.048197 -0.119853
+v -0.075513 -0.030444 -0.044136
+v -0.074461 -0.029567 -0.042905
+v 0.064461 0.035418 -0.117984
+v 0.048461 0.057939 0.026196
+v -0.010032 0.122735 -0.016836
+v -0.020962 0.120712 -0.018759
+v 0.021544 0.086215 -0.141849
+v 0.046940 0.080927 0.015437
+v 0.054501 0.085280 -0.122143
+v -0.001256 -0.068248 -0.114532
+v 0.041438 0.091335 -0.128765
+v 0.070577 0.087298 -0.093830
+v 0.067235 0.082170 -0.107374
+v 0.059096 0.075950 -0.122252
+v 0.063286 0.019202 -0.115934
+v 0.047284 0.108440 -0.019875
+v 0.056441 0.024469 0.020041
+v 0.055666 0.019903 0.019346
+v 0.056700 0.029284 0.021397
+v 0.053956 0.015066 0.021173
+v 0.015279 0.007135 0.044942
+v 0.014921 0.005094 0.046869
+v 0.014576 0.002617 0.048854
+v 0.018342 0.002375 0.044917
+v 0.017741 -0.000520 0.046762
+v 0.011445 0.003008 0.054004
+v 0.011789 0.005601 0.051567
+v 0.013920 0.000180 0.050740
+v 0.013531 -0.002819 0.052513
+v 0.011395 0.000084 0.056337
+v 0.017565 -0.015042 0.048799
+v 0.018536 -0.010338 0.047439
+v 0.015926 -0.006361 0.049268
+v 0.021517 -0.003309 0.045498
+v 0.020244 -0.006514 0.046730
+v 0.016814 -0.003144 0.048129
+v 0.022640 -0.000155 0.043533
+v 0.014435 -0.006166 0.054888
+v 0.015642 -0.009403 0.057926
+v 0.011861 -0.003278 0.059483
+v 0.010275 -0.001094 0.062889
+v 0.009785 0.002429 0.059710
+v 0.018958 0.004568 0.043165
+v 0.023720 0.002341 0.041950
+v 0.023959 -0.009755 0.046368
+v 0.028548 -0.012830 0.046144
+v 0.026124 -0.005652 0.044698
+v 0.030417 -0.008142 0.044311
+v 0.027232 -0.002301 0.042909
+v 0.031702 -0.004542 0.042407
+v 0.032647 -0.016735 0.045737
+v 0.036295 -0.020647 0.044781
+v 0.039549 -0.026184 0.043144
+v 0.039212 -0.015618 0.042812
+v 0.043005 -0.020986 0.040940
+v 0.041089 -0.010459 0.040654
+v 0.045601 -0.015667 0.038683
+v -0.011729 0.012852 0.044231
+v -0.012068 0.014700 0.041446
+v -0.015812 0.011192 0.040289
+v -0.015433 0.009248 0.042473
+v -0.007024 0.061101 0.045101
+v -0.007971 0.068776 0.042371
+v -0.017033 0.068602 0.040894
+v -0.015554 0.061059 0.044048
+v 0.035091 -0.011363 0.043702
+v 0.036689 -0.007362 0.041655
+v 0.032875 -0.001040 0.040545
+v 0.028456 0.000772 0.041199
+v 0.042056 0.000110 0.036867
+v 0.041974 -0.005238 0.038708
+v 0.037494 -0.002991 0.039708
+v 0.037971 0.001353 0.037959
+v 0.047173 -0.008875 0.036646
+v 0.046778 -0.001497 0.035183
+v 0.045797 0.003285 0.033631
+v 0.031837 0.009657 0.037539
+v 0.026796 0.010332 0.038200
+v 0.035458 0.009419 0.036567
+v 0.041763 0.003940 0.035442
+v 0.038323 0.004621 0.036763
+v 0.049565 0.010500 0.027100
+v 0.047356 0.009692 0.029420
+v 0.041691 0.009159 0.033703
+v 0.038811 0.009210 0.035252
+v 0.044671 0.009333 0.031667
+v 0.022192 0.011908 0.038088
+v 0.017445 0.013949 0.037995
+v 0.054258 -0.061592 -0.070679
+v 0.009395 0.005603 0.057266
+v 0.009348 0.008537 0.054641
+v 0.015283 0.031214 0.040063
+v 0.012926 0.028401 0.039153
+v 0.007702 0.028809 0.046328
+v 0.019342 0.035379 0.042648
+v 0.014200 0.016717 0.037862
+v -0.020460 0.008736 0.039278
+v -0.019623 0.006767 0.040844
+v 0.012406 0.019327 0.037745
+v 0.011555 0.022067 0.037907
+v 0.011437 0.025380 0.038382
+v -0.009588 0.101979 0.020448
+v -0.020104 0.100684 0.017627
+v -0.020397 0.127261 -0.068998
+v -0.020834 0.127348 -0.060499
+v -0.009652 0.130072 -0.060936
+v -0.010761 0.129355 -0.073540
+v 0.030770 0.048676 0.044078
+v 0.027349 0.042126 0.045833
+v 0.058332 0.062888 0.010272
+v 0.062014 0.054556 0.006365
+v 0.051051 0.044048 0.028780
+v 0.054110 0.039827 0.025685
+v 0.056030 0.034665 0.023086
+v 0.063783 0.043052 0.003583
+v 0.064249 0.030509 0.003093
+v 0.065217 0.018744 0.004219
+v -0.069702 0.000330 -0.093988
+v -0.072617 -0.001319 -0.078532
+v -0.075405 0.010679 -0.079720
+v -0.073571 0.016813 -0.096401
+v 0.048533 0.092831 0.001183
+v -0.035622 -0.096251 -0.010197
+v -0.033346 -0.108544 -0.013553
+v -0.001279 -0.081403 -0.114392
+v 0.048430 0.098599 -0.005751
+v 0.048100 0.086653 0.008405
+v 0.039100 0.111672 -0.014488
+v 0.039189 0.107125 -0.006011
+v 0.048054 0.103836 -0.012709
+v 0.039669 0.101863 0.001355
+v 0.040041 0.096111 0.008228
+v 0.039621 0.090042 0.015487
+v 0.038874 0.084044 0.022310
+v 0.037351 0.075757 0.029146
+v 0.045341 -0.060680 -0.089190
+v 0.006856 -0.007519 0.055255
+v 0.006437 -0.007611 0.055716
+v -0.052544 0.052430 0.021054
+v -0.054442 0.057481 0.016788
+v -0.057795 0.050495 0.013031
+v -0.055820 0.046458 0.017746
+v 0.006624 -0.007541 0.055058
+v 0.006130 -0.007578 0.055563
+v 0.006465 -0.007952 0.055672
+v 0.006403 -0.016056 0.058302
+v 0.005419 -0.014078 0.057335
+v 0.005865 -0.010376 0.056604
+v 0.068278 -0.029918 -0.037101
+v -0.005574 0.048705 0.048422
+v -0.013060 0.048597 0.047811
+v -0.011256 0.042494 0.048061
+v -0.004935 0.042444 0.048831
+v -0.059511 0.015267 0.013657
+v -0.062272 0.012427 0.010959
+v -0.061487 0.004440 0.016110
+v -0.058427 0.008219 0.018008
+v -0.055306 0.003995 0.024195
+v -0.057862 -0.001627 0.024122
+v -0.018484 0.077898 0.037046
+v -0.008693 0.078600 0.038755
+v -0.060082 0.041253 0.010194
+v -0.058237 0.038818 0.015220
+v -0.009033 0.016233 0.046613
+v -0.009693 0.017680 0.043089
+v -0.007280 0.022052 0.045370
+v -0.003513 0.021782 0.051142
+v -0.003394 0.025743 0.049351
+v -0.006677 0.025628 0.045927
+v -0.033259 0.060543 0.036819
+v -0.034879 0.067422 0.033262
+v -0.042351 0.066003 0.027728
+v -0.040733 0.059528 0.031445
+v -0.020161 0.106273 0.010419
+v -0.009615 0.107685 0.013225
+v -0.003919 0.034977 0.048270
+v -0.008415 0.035103 0.046436
+v -0.007412 0.031482 0.045713
+v -0.003714 0.031676 0.048189
+v -0.024575 0.005066 0.039924
+v -0.025374 0.007247 0.038783
+v -0.066323 -0.030243 -0.049171
+v -0.068076 -0.031291 -0.048015
+v -0.073514 0.015340 -0.036624
+v -0.072116 0.004717 -0.034956
+v -0.072293 0.012702 -0.024438
+v -0.072458 0.025664 -0.026758
+v -0.074651 0.037649 -0.030274
+v -0.075782 0.046005 -0.035339
+v -0.077018 0.029741 -0.042590
+v -0.075277 0.024292 -0.038914
+v -0.053396 -0.084678 -0.079746
+v -0.050731 -0.074269 -0.083116
+v -0.003679 0.018761 0.054078
+v -0.003671 0.015775 0.056744
+v -0.020890 0.036815 -0.151385
+v -0.020488 0.023250 -0.149607
+v -0.040697 0.027169 -0.141199
+v -0.040823 0.040098 -0.142499
+v -0.066630 -0.010362 -0.090338
+v -0.070003 -0.010100 -0.076065
+v -0.009664 -0.011856 0.058660
+v -0.006875 -0.007629 0.054912
+v -0.007592 -0.010609 0.056100
+v -0.006989 -0.007645 0.055001
+v -0.010331 -0.014130 0.060696
+v -0.012869 -0.013971 0.061955
+v -0.007964 -0.014723 0.055440
+v -0.007023 -0.014663 0.055885
+v -0.006964 -0.010388 0.056314
+v -0.007159 -0.007599 0.055008
+v -0.001772 -0.017014 0.067546
+v -0.002269 -0.015367 0.070408
+v -0.006029 -0.014025 0.069170
+v -0.004542 -0.015688 0.067190
+v -0.006097 -0.007546 0.055786
+v -0.005224 -0.010103 0.057370
+v -0.004955 -0.011724 0.063015
+v -0.006661 -0.009133 0.058801
+v -0.006480 -0.007694 0.056166
+v -0.006808 -0.011386 0.062107
+v -0.005858 -0.017251 0.060290
+v -0.008259 -0.011493 0.060570
+v -0.077087 -0.017787 -0.055905
+v -0.058071 -0.064324 -0.033299
+v -0.061941 -0.049879 -0.041327
+v -0.061240 -0.050503 -0.047002
+v -0.059003 -0.066083 -0.042615
+v -0.080858 0.008144 -0.058133
+v -0.066673 0.057354 -0.005053
+v -0.067999 0.068888 -0.014533
+v -0.073215 0.057534 -0.026356
+v -0.072649 0.047009 -0.021351
+v -0.068364 0.078692 -0.022638
+v -0.073597 0.068279 -0.033558
+v -0.076644 0.056363 -0.043299
+v -0.078788 0.035798 -0.056764
+v -0.042234 0.017599 0.031868
+v -0.042424 0.017430 0.031998
+v -0.040848 0.017165 0.033282
+v -0.040908 0.017273 0.033144
+v -0.041794 0.017746 0.031251
+v -0.040387 0.017166 0.032321
+v -0.042747 0.018172 0.030282
+v -0.043239 0.017878 0.030807
+v -0.043440 0.017664 0.030897
+v -0.024552 0.018573 0.035138
+v -0.025294 0.018647 0.034670
+v -0.028018 0.017765 0.034783
+v -0.027245 0.017893 0.035839
+v -0.024291 0.018333 0.035509
+v -0.027156 0.017611 0.036191
+v -0.028450 0.020473 0.029048
+v -0.027869 0.020824 0.028964
+v -0.029294 0.020130 0.029121
+v -0.022561 0.020652 0.032923
+v -0.022772 0.021803 0.033004
+v -0.027599 0.021923 0.028991
+v -0.027488 0.021186 0.028876
+v -0.022954 0.020002 0.033221
+v -0.021271 0.019895 0.033174
+v -0.020251 0.020347 0.032649
+v -0.020362 0.021934 0.032944
+v -0.032427 0.016585 0.036213
+v -0.032607 0.015838 0.036248
+v -0.027820 0.016187 0.036464
+v -0.027435 0.017031 0.036391
+v -0.032247 0.017029 0.036072
+v -0.035715 0.017117 0.035176
+v -0.035772 0.016667 0.035285
+v -0.035868 0.015922 0.035275
+v -0.024449 0.017126 0.036211
+v -0.024367 0.017980 0.035903
+v -0.035870 0.015249 0.035273
+v -0.032814 0.015191 0.036156
+v -0.028007 0.015542 0.036419
+v -0.024420 0.016704 0.036289
+v -0.021584 0.017968 0.035600
+v -0.022755 0.018154 0.035667
+v -0.032849 0.014416 0.035987
+v -0.032788 0.013591 0.035884
+v -0.027909 0.013972 0.036424
+v -0.028017 0.014788 0.036376
+v -0.035886 0.014474 0.034980
+v -0.035855 0.013596 0.034844
+v -0.044581 0.017694 0.029210
+v -0.045122 0.018169 0.028327
+v -0.045071 0.017603 0.028410
+v -0.044721 0.017201 0.028872
+v -0.047407 0.014929 0.025610
+v -0.048364 0.016269 0.024812
+v -0.049886 0.015652 0.023218
+v -0.048806 0.014143 0.024538
+v -0.057273 0.005605 0.020909
+v -0.051187 0.011301 0.023888
+v -0.052521 0.012759 0.021755
+v -0.044058 0.017514 0.029983
+v -0.043404 0.017263 0.030895
+v -0.044048 0.017836 0.029995
+v -0.044536 0.018140 0.029081
+v -0.044222 0.016844 0.029660
+v -0.043462 0.016617 0.030685
+v -0.042441 0.017098 0.031985
+v -0.042472 0.016382 0.031873
+v -0.040838 0.016115 0.033206
+v -0.040769 0.016865 0.033284
+v -0.038650 0.016746 0.034310
+v -0.038984 0.017139 0.034176
+v -0.038726 0.015966 0.034249
+v -0.040937 0.015449 0.032952
+v -0.038809 0.015291 0.034125
+v -0.042488 0.015698 0.031501
+v -0.043553 0.016034 0.030261
+v -0.044367 0.016373 0.029169
+v -0.045041 0.016748 0.028384
+v -0.045509 0.017374 0.027830
+v -0.044713 0.015808 0.028564
+v -0.045376 0.015062 0.027741
+v -0.044107 0.014590 0.029090
+v -0.043745 0.015425 0.029759
+v -0.045578 0.016300 0.027664
+v -0.046386 0.015663 0.026669
+v -0.042631 0.015107 0.031016
+v -0.042704 0.014334 0.030547
+v -0.040893 0.013974 0.032169
+v -0.040937 0.014765 0.032617
+v -0.038443 0.014670 0.033778
+v -0.038516 0.013765 0.033409
+v -0.032215 0.017107 0.035889
+v -0.031968 0.017174 0.034595
+v -0.035422 0.016993 0.033974
+v -0.035795 0.017132 0.034966
+v -0.039069 0.017135 0.034058
+v -0.038361 0.016924 0.033254
+v -0.035205 0.016875 0.032924
+v -0.033425 0.019291 0.028863
+v -0.034637 0.019320 0.028486
+v -0.037920 0.016896 0.032199
+v -0.032085 0.017193 0.033429
+v -0.031948 0.019433 0.029134
+v -0.028795 0.017964 0.033538
+v -0.030473 0.019764 0.029169
+v -0.026073 0.018676 0.033863
+v -0.033539 0.030045 0.035874
+v -0.033434 0.030234 0.036333
+v -0.038749 0.029538 0.035299
+v -0.038335 0.029203 0.034981
+v -0.051381 0.000667 -0.123039
+v -0.052670 0.015588 -0.128495
+v -0.040350 0.011207 -0.138193
+v -0.040978 0.026734 0.034053
+v -0.040947 0.027704 0.033828
+v -0.043349 0.026475 0.032068
+v -0.043200 0.025540 0.032174
+v -0.041021 0.025678 0.034395
+v -0.042997 0.024404 0.032424
+v -0.038266 0.026940 0.036273
+v -0.038284 0.028063 0.035889
+v -0.038208 0.028898 0.035415
+v -0.042198 0.022935 0.028502
+v -0.041474 0.023883 0.029209
+v -0.042137 0.023692 0.030533
+v -0.043152 0.022898 0.029295
+v -0.042825 0.022258 0.027917
+v -0.043742 0.022158 0.028468
+v -0.037537 0.022113 0.026369
+v -0.037205 0.022478 0.026668
+v -0.036692 0.022918 0.027128
+v -0.037719 0.021770 0.026210
+v -0.037832 0.021390 0.026149
+v -0.037823 0.021011 0.026190
+v -0.037709 0.020687 0.026350
+v -0.059952 -0.009901 0.021752
+v -0.059974 0.000624 0.020239
+v -0.062337 -0.007238 0.017269
+v -0.043815 0.018133 0.029845
+v -0.044335 0.018502 0.028961
+v -0.043276 0.018455 0.029445
+v -0.043772 0.018817 0.028753
+v -0.044881 0.019079 0.028100
+v -0.045209 0.018786 0.028051
+v -0.041111 0.017784 0.030517
+v -0.036588 0.019638 0.027642
+v -0.037053 0.019902 0.027255
+v -0.041862 0.018389 0.029644
+v -0.039823 0.017162 0.031310
+v -0.035825 0.019413 0.028068
+v -0.037291 0.020162 0.026940
+v -0.037520 0.020403 0.026607
+v -0.042189 0.018742 0.028982
+v -0.042686 0.019159 0.028323
+v -0.044188 0.019410 0.028224
+v -0.043284 0.020338 0.027424
+v -0.043286 0.020962 0.027413
+v -0.044385 0.019992 0.027935
+v -0.044451 0.020764 0.027810
+v -0.043104 0.019709 0.027729
+v -0.043111 0.021612 0.027574
+v -0.044169 0.021369 0.028054
+v -0.045421 0.018286 0.027915
+v -0.045440 0.019225 0.027692
+v -0.045975 0.020300 0.027075
+v -0.045865 0.018319 0.027373
+v -0.046511 0.018355 0.026592
+v -0.046110 0.017066 0.026980
+v -0.046857 0.021099 0.026516
+v -0.048001 0.021590 0.025550
+v -0.047709 0.018339 0.025516
+v -0.047072 0.016777 0.025965
+v -0.045105 0.019784 0.027706
+v -0.045324 0.020441 0.027432
+v -0.046277 0.022296 0.027696
+v -0.046966 0.023264 0.027365
+v -0.045688 0.021581 0.027984
+v -0.045141 0.022453 0.029144
+v -0.045535 0.023318 0.028990
+v -0.046075 0.024195 0.028715
+v -0.044582 0.024312 0.030485
+v -0.044964 0.025270 0.030324
+v -0.044290 0.023330 0.030609
+v -0.045128 0.021286 0.028031
+v -0.044621 0.022041 0.028942
+v -0.043819 0.022877 0.030180
+v -0.042707 0.023538 0.032041
+v -0.040811 0.024647 0.034083
+v -0.040181 0.024603 0.032355
+v -0.038058 0.025945 0.035912
+v -0.037450 0.025515 0.034237
+v -0.033848 0.031165 0.037118
+v -0.039624 0.030680 0.035891
+v -0.036993 0.025493 0.032315
+v -0.034441 0.023599 0.028615
+v -0.032850 0.023665 0.029156
+v -0.033809 0.025515 0.033449
+v -0.023436 0.022632 0.033561
+v -0.024305 0.023528 0.034140
+v -0.028349 0.022667 0.029458
+v -0.027892 0.022313 0.029226
+v -0.021792 0.022724 0.034116
+v -0.023219 0.023691 0.034894
+v -0.027939 0.024908 0.034396
+v -0.030005 0.023263 0.029713
+v -0.029017 0.022974 0.029635
+v -0.026102 0.024254 0.034187
+v -0.027304 0.025551 0.036167
+v -0.024845 0.024534 0.035538
+v -0.034095 0.026078 0.035513
+v -0.021947 0.072539 -0.148088
+v -0.021657 0.056981 -0.151461
+v -0.041022 0.058715 -0.142531
+v -0.040827 0.072211 -0.139783
+v -0.039431 0.024816 0.030828
+v -0.035717 0.023320 0.027883
+v -0.050606 0.073188 -0.131957
+v -0.049708 0.081728 -0.129019
+v -0.041148 0.083772 -0.134618
+v -0.037450 -0.061624 -0.100803
+v -0.027615 -0.073097 -0.107385
+v -0.019593 0.022525 0.035209
+v -0.021425 0.023863 0.035697
+v -0.018369 0.021316 0.034805
+v -0.017693 0.021145 0.034896
+v -0.018850 0.022610 0.035209
+v -0.020440 0.024077 0.035691
+v -0.023644 0.025007 0.036216
+v -0.022888 0.025419 0.036632
+v -0.026444 0.026807 0.037643
+v -0.026593 0.026032 0.037291
+v -0.034024 0.026760 0.037007
+v -0.034097 0.027683 0.037404
+v -0.025869 0.027732 0.037144
+v -0.022317 0.026008 0.036107
+v -0.025187 0.028740 0.036592
+v -0.022093 0.027032 0.035792
+v -0.033872 0.029614 0.036296
+v -0.034108 0.028752 0.037021
+v -0.021417 0.018846 0.035125
+v -0.019134 0.019232 0.034936
+v -0.021970 0.019143 0.034985
+v -0.019500 0.019501 0.034725
+v -0.023122 0.019309 0.034212
+v -0.018566 0.020036 0.034508
+v -0.017866 0.019856 0.034722
+v -0.024024 0.019339 0.033743
+v -0.018111 0.022673 0.034811
+v -0.016776 0.020973 0.034798
+v -0.016989 0.023204 0.034078
+v -0.016071 0.021107 0.034384
+v -0.019352 0.025624 0.034499
+v -0.019727 0.024389 0.034909
+v -0.016879 0.019722 0.034653
+v -0.017556 0.019170 0.034822
+v -0.016140 0.019704 0.034531
+v -0.016829 0.018973 0.034767
+v -0.024174 0.016060 0.036343
+v -0.023926 0.015421 0.036367
+v -0.020835 0.017467 0.035702
+v -0.020219 0.017028 0.035637
+v -0.032558 0.012718 0.035741
+v -0.035735 0.012677 0.034789
+v -0.032117 0.011618 0.035833
+v -0.035585 0.011483 0.034695
+v -0.027140 0.012142 0.036778
+v -0.027627 0.013088 0.036540
+v -0.033057 -0.116696 -0.014946
+v -0.046199 0.014147 0.027046
+v -0.047291 0.013094 0.026400
+v -0.045571 0.012356 0.028153
+v -0.044692 0.013675 0.028499
+v -0.042991 0.013301 0.030088
+v -0.043443 0.011893 0.030006
+v -0.040997 0.011633 0.031736
+v -0.040820 0.012951 0.031819
+v -0.038482 0.012854 0.033163
+v -0.038679 0.011406 0.033218
+v -0.049233 0.017868 0.024071
+v -0.050068 0.021377 0.023437
+v -0.050766 0.017217 0.022391
+v -0.052250 0.020958 0.021292
+v -0.015277 0.024233 0.034449
+v -0.014964 0.021444 0.034184
+v -0.013722 0.024938 0.035656
+v -0.013575 0.022071 0.035214
+v -0.014936 0.027841 0.036597
+v -0.016844 0.027091 0.035020
+v -0.015342 0.019808 0.034580
+v -0.016109 0.018690 0.034676
+v -0.014298 0.019737 0.035028
+v -0.015284 0.018243 0.035173
+v -0.023428 0.014647 0.036405
+v -0.022886 0.013758 0.036581
+v -0.019596 0.016471 0.035860
+v -0.018754 0.015660 0.036064
+v -0.049625 -0.010484 -0.118258
+v -0.053238 0.031352 -0.131080
+v -0.046901 0.034579 0.033291
+v -0.044519 0.031790 0.034004
+v -0.041023 0.033014 0.036254
+v -0.042880 0.036408 0.036418
+v -0.049687 0.038446 0.031311
+v -0.051987 0.034947 0.028373
+v -0.049499 0.032204 0.030318
+v -0.047170 0.030050 0.031632
+v -0.050913 0.024335 0.024208
+v -0.049141 0.024359 0.025837
+v -0.049201 0.026175 0.027195
+v -0.051020 0.026790 0.025722
+v -0.053078 0.024334 0.022033
+v -0.053470 0.027639 0.023722
+v -0.050491 0.029296 0.027869
+v -0.048426 0.028012 0.029249
+v -0.053082 0.030916 0.026064
+v -0.022833 0.032833 0.038552
+v -0.023470 0.031330 0.037165
+v -0.020635 0.029142 0.035907
+v -0.019228 0.029730 0.036753
+v -0.021684 0.034224 0.039632
+v -0.017182 0.030601 0.037799
+v -0.035959 0.036952 0.039317
+v -0.034977 0.033931 0.038238
+v -0.024766 0.029396 0.036099
+v -0.022759 0.028082 0.035960
+v -0.024409 0.029564 0.036023
+v -0.022137 0.028028 0.035762
+v -0.058379 0.092440 -0.111222
+v -0.060456 0.098259 -0.098678
+v -0.044031 0.109194 -0.105073
+v -0.043252 0.101903 -0.117400
+v -0.040993 0.028035 0.034041
+v -0.041616 0.028554 0.033879
+v -0.044040 0.027444 0.032091
+v -0.043488 0.026960 0.032264
+v -0.045339 0.025902 0.030297
+v -0.045919 0.026386 0.030227
+v -0.046986 0.025278 0.028541
+v -0.046484 0.024976 0.028691
+v -0.047109 0.024154 0.027511
+v -0.047583 0.024160 0.027084
+v -0.022163 0.028934 0.036022
+v -0.024188 0.030027 0.036505
+v -0.052122 0.061386 -0.133257
+v -0.044283 0.073252 0.022796
+v -0.051300 0.069681 0.016154
+v -0.049207 0.063280 0.021710
+v -0.042615 0.029630 0.034327
+v -0.045192 0.028398 0.032228
+v -0.046909 0.027083 0.030042
+v -0.047907 0.025719 0.028231
+v -0.048315 0.024724 0.027014
+v -0.072062 -0.013349 -0.040529
+v -0.072211 -0.013166 -0.042367
+v -0.079107 -0.002991 -0.068457
+v -0.077331 0.003518 -0.067336
+v -0.076170 0.006934 -0.065673
+v -0.079708 0.014118 -0.052688
+v -0.081272 0.017300 -0.055584
+v -0.082395 -0.021293 -0.061544
+v -0.079240 -0.023150 -0.057658
+v -0.082254 -0.020370 -0.062039
+v -0.087753 0.013299 -0.064538
+v -0.088252 0.011708 -0.065891
+v -0.089236 0.011377 -0.065513
+v -0.042936 -0.032004 -0.108330
+v -0.047251 -0.019373 -0.114429
+v -0.088541 0.009991 -0.067240
+v -0.073792 -0.008462 -0.044830
+v -0.080108 -0.003469 -0.068691
+v -0.081610 -0.004021 -0.070490
+v -0.082456 -0.009105 -0.070728
+v -0.081757 -0.008605 -0.070047
+v -0.081852 -0.014224 -0.068953
+v -0.081255 -0.013431 -0.068440
+v -0.083716 -0.017546 -0.064649
+v -0.083819 -0.013227 -0.067487
+v -0.082252 0.017732 -0.056969
+v -0.082330 0.016902 -0.056160
+v -0.083008 0.017771 -0.057332
+v -0.074576 -0.009540 -0.046232
+v -0.078075 0.011933 -0.052727
+v -0.077724 0.010233 -0.050915
+v -0.077384 0.010661 -0.051432
+v -0.072524 -0.004513 -0.050406
+v -0.072292 -0.003704 -0.050590
+v -0.073706 -0.006032 -0.049946
+v -0.075423 -0.007335 -0.047800
+v -0.069662 -0.014740 -0.049905
+v -0.069315 -0.015397 -0.051427
+v -0.068744 -0.016131 -0.050798
+v -0.085665 0.023262 -0.059676
+v -0.077913 0.009255 -0.050470
+v -0.079047 0.010256 -0.050835
+v -0.079746 0.011308 -0.051183
+v -0.083768 0.017125 -0.057379
+v -0.080170 0.012209 -0.051436
+v -0.079194 0.012249 -0.051662
+v -0.080433 0.013842 -0.052518
+v -0.078525 0.012341 -0.052075
+v -0.074601 -0.012064 -0.063848
+v -0.073601 -0.012179 -0.061813
+v -0.073693 0.006644 -0.040817
+v -0.074070 0.004838 -0.043162
+v -0.078307 -0.023833 -0.055353
+v -0.076107 0.012620 -0.059820
+v -0.076960 0.015614 -0.061572
+v -0.085553 -0.009089 -0.068661
+v -0.079033 -0.015203 -0.058981
+v -0.076940 -0.016185 -0.057723
+v -0.081392 -0.013939 -0.060410
+v -0.083168 -0.011378 -0.061932
+v -0.074894 0.003394 -0.054727
+v -0.083331 0.007682 -0.060466
+v -0.083606 0.005629 -0.061597
+v -0.082068 0.007846 -0.059206
+v -0.077924 0.018696 -0.059805
+v -0.078260 0.018427 -0.057483
+v -0.075377 0.015441 -0.059081
+v -0.078156 0.016633 -0.057162
+v -0.071558 -0.011451 -0.049593
+v -0.069465 -0.016923 -0.053267
+v -0.071255 -0.015560 -0.054735
+v -0.070575 -0.017536 -0.054290
+v -0.071767 -0.016537 -0.056018
+v -0.071942 -0.015322 -0.057592
+v -0.072399 -0.013633 -0.058871
+v -0.073735 -0.011705 -0.058949
+v -0.071359 -0.007448 -0.050259
+v -0.077791 0.018953 -0.048404
+v -0.070894 -0.009229 -0.050259
+v -0.072809 -0.005823 -0.050119
+v -0.072741 -0.007552 -0.049756
+v -0.072255 -0.009424 -0.049512
+v -0.075639 -0.019124 -0.051460
+v -0.075676 -0.017650 -0.054043
+v -0.074383 -0.018487 -0.050748
+v -0.075813 -0.017073 -0.056257
+v -0.073319 -0.018136 -0.050323
+v -0.077627 0.013404 -0.059125
+v -0.078349 0.016205 -0.058860
+v -0.072250 -0.010127 -0.038720
+v -0.064524 -0.002295 0.011972
+v -0.065866 0.007108 0.006174
+v -0.026568 -0.049333 0.035205
+v -0.025826 -0.052132 0.035737
+v -0.056877 -0.012375 0.026165
+v -0.052584 -0.014994 0.031014
+v -0.025436 -0.044427 0.046334
+v -0.024593 -0.045072 0.046592
+v -0.023681 -0.044405 0.046986
+v -0.024465 -0.044248 0.046503
+v -0.022407 -0.043288 0.045540
+v -0.023228 -0.043833 0.044631
+v -0.023393 -0.043839 0.045831
+v -0.022600 -0.043439 0.046667
+v -0.028879 -0.029792 0.046785
+v -0.032556 -0.025723 0.045009
+v -0.035461 -0.030037 0.043876
+v -0.032118 -0.033405 0.045649
+v -0.024898 -0.046342 0.037566
+v -0.024795 -0.044834 0.037918
+v -0.024051 -0.044444 0.040709
+v -0.024016 -0.045118 0.040450
+v -0.024808 -0.047272 0.037645
+v -0.023858 -0.045408 0.040493
+v -0.026522 -0.047673 0.035072
+v -0.026304 -0.045164 0.035273
+v -0.034340 -0.050641 0.041924
+v -0.030784 -0.048494 0.043707
+v -0.031775 -0.045483 0.044065
+v -0.035386 -0.046183 0.042339
+v -0.025363 -0.017908 0.045813
+v -0.029173 -0.021565 0.045552
+v -0.026279 -0.026029 0.046823
+v -0.022198 -0.021575 0.046716
+v -0.018596 -0.016993 0.047346
+v -0.021414 -0.013740 0.046100
+v -0.016937 -0.008910 0.050912
+v -0.017528 -0.012643 0.052175
+v -0.016669 -0.013737 0.055540
+v -0.017148 -0.011494 0.054400
+v -0.014997 -0.017851 0.054123
+v -0.017566 -0.014910 0.050637
+v -0.010953 -0.019208 0.054724
+v -0.010742 -0.018722 0.055790
+v -0.015240 -0.018256 0.052549
+v -0.015508 -0.019248 0.050578
+v -0.011503 -0.019849 0.053489
+v -0.010181 -0.018001 0.056781
+v -0.013068 -0.017578 0.056682
+v -0.014551 -0.015293 0.058073
+v -0.003949 -0.016653 0.064946
+v -0.005063 -0.015199 0.066883
+v -0.008989 0.014098 0.049668
+v -0.011853 0.010555 0.046766
+v -0.009156 0.011484 0.051962
+v -0.011863 0.008265 0.048937
+v -0.013921 -0.012949 0.061460
+v -0.015308 -0.011116 0.059898
+v -0.011471 -0.011796 0.065200
+v -0.012339 -0.009159 0.064850
+v -0.003389 0.028864 0.048488
+v -0.001989 -0.020399 0.060929
+v -0.002324 -0.020990 0.060412
+v -0.001723 -0.019032 0.062588
+v -0.022446 0.095025 -0.135570
+v -0.022829 0.107290 -0.124398
+v -0.021739 0.120205 -0.101235
+v -0.019431 0.124379 -0.088806
+v -0.005334 -0.037470 0.061125
+v -0.005154 -0.034765 0.061113
+v -0.012546 -0.042390 0.045213
+v -0.007073 -0.042568 0.046673
+v -0.006911 -0.043518 0.049704
+v -0.012382 -0.043660 0.048397
+v -0.016155 -0.043554 0.046676
+v -0.016753 -0.042281 0.043455
+v -0.012724 -0.040945 0.042729
+v -0.007130 -0.041011 0.043786
+v -0.011803 -0.044590 0.053938
+v -0.006482 -0.044241 0.055956
+v -0.006166 -0.042598 0.058522
+v -0.011609 -0.042995 0.056051
+v -0.018115 -0.023443 0.049304
+v -0.014034 -0.022893 0.051911
+v -0.009868 -0.022882 0.055468
+v -0.006586 -0.023118 0.058080
+v -0.005848 -0.021090 0.058352
+v -0.008351 -0.020488 0.056150
+v -0.005478 -0.020272 0.058707
+v -0.007822 -0.019786 0.056737
+v -0.007660 -0.019170 0.057324
+v -0.008132 -0.018320 0.057663
+v -0.006448 -0.018310 0.058918
+v -0.006941 -0.017614 0.058783
+v -0.005409 -0.019597 0.059109
+v -0.003850 0.006008 0.064927
+v -0.003648 0.009418 0.061949
+v -0.009890 -0.033212 0.059110
+v -0.005195 -0.032632 0.060485
+v -0.004257 -0.028495 0.059740
+v -0.008657 -0.028635 0.058363
+v -0.013287 -0.029388 0.055945
+v -0.014888 -0.034795 0.056517
+v -0.002701 -0.021752 0.059995
+v -0.010168 -0.035482 0.059325
+v -0.010473 -0.037983 0.059039
+v -0.015295 -0.037171 0.056571
+v -0.015510 -0.039407 0.056125
+v -0.019589 -0.037117 0.053358
+v -0.018614 -0.031209 0.053026
+v -0.024285 -0.033163 0.049767
+v -0.023495 -0.039544 0.050273
+v -0.031488 -0.042546 0.045051
+v -0.026908 -0.044702 0.045892
+v -0.026757 -0.043494 0.046433
+v -0.026435 -0.042409 0.047288
+v -0.030796 -0.039557 0.046136
+v -0.034937 -0.041860 0.043457
+v -0.034069 -0.037485 0.044570
+v -0.028725 -0.036414 0.047590
+v -0.025567 -0.041140 0.048387
+v -0.025519 -0.043762 0.046844
+v -0.024563 -0.043982 0.046755
+v -0.025151 -0.043224 0.047577
+v -0.024260 -0.043789 0.047367
+v -0.024539 -0.042621 0.048505
+v -0.023681 -0.043581 0.048160
+v -0.022518 -0.043038 0.049459
+v -0.023101 -0.041593 0.050039
+v -0.022609 -0.044357 0.045735
+v -0.021466 -0.044582 0.046590
+v -0.021383 -0.044594 0.047729
+v -0.022466 -0.044456 0.046609
+v -0.023359 -0.044199 0.044959
+v -0.023110 -0.044260 0.046032
+v -0.023580 -0.044213 0.041308
+v -0.022810 -0.043909 0.041907
+v -0.023736 -0.043933 0.044486
+v -0.023763 -0.044030 0.045386
+v -0.023690 -0.044016 0.046526
+v -0.023526 -0.044105 0.046828
+v -0.022937 -0.044174 0.047492
+v -0.021875 -0.044064 0.048598
+v -0.015557 -0.041492 0.055065
+v -0.019286 -0.041210 0.052904
+v -0.019040 -0.042935 0.051743
+v -0.015556 -0.043677 0.053386
+v -0.018753 -0.044267 0.050428
+v -0.017450 -0.040868 0.040896
+v -0.015506 -0.044741 0.051431
+v -0.020674 -0.042554 0.040958
+v -0.021747 -0.041395 0.038490
+v -0.019826 -0.043608 0.044119
+v -0.024261 -0.043978 0.038569
+v -0.023564 -0.043245 0.038939
+v -0.025710 -0.043489 0.035775
+v -0.024766 -0.042309 0.036514
+v -0.018805 -0.044669 0.048862
+v -0.019570 -0.039364 0.053361
+v -0.010952 -0.040429 0.057803
+v -0.005727 -0.039941 0.059986
+v -0.004565 -0.001652 0.071500
+v -0.004269 0.002552 0.068029
+v -0.023004 -0.043786 0.047156
+v -0.010759 -0.124784 -0.002135
+v -0.025958 -0.045833 0.045967
+v -0.024259 -0.049321 0.037877
+v -0.023149 -0.046168 0.040962
+v -0.031480 -0.055067 0.042749
+v -0.026650 -0.058425 0.045856
+v -0.024705 -0.054345 0.047328
+v -0.028588 -0.051753 0.044565
+v -0.021921 -0.048491 0.049594
+v -0.024456 -0.046952 0.047241
+v -0.023218 -0.045653 0.048086
+v -0.020708 -0.046289 0.050497
+v -0.019838 -0.044521 0.050983
+v -0.022285 -0.044462 0.048429
+v -0.018022 -0.041978 0.050387
+v -0.013628 -0.041308 0.052776
+v -0.013134 -0.045627 0.047955
+v -0.017661 -0.046053 0.045633
+v -0.021988 -0.046034 0.042414
+v -0.021302 -0.042757 0.046744
+v -0.018472 -0.042205 0.050864
+v -0.021340 -0.042891 0.048013
+v -0.014149 -0.041525 0.053858
+v -0.021434 -0.121633 -0.007305
+v -0.021795 -0.043448 0.048393
+v -0.019070 -0.042934 0.050923
+v -0.014827 -0.042551 0.054336
+v -0.015747 -0.044607 0.054631
+v -0.016573 -0.046995 0.054641
+v -0.017401 -0.049903 0.053825
+v -0.019473 -0.057250 0.051026
+v -0.020915 -0.062265 0.049791
+v -0.013200 -0.058693 0.054103
+v -0.014331 -0.063934 0.053793
+v -0.012291 -0.050944 0.057318
+v -0.011885 -0.047397 0.058499
+v -0.011156 -0.044607 0.058185
+v -0.010482 -0.042316 0.057446
+v -0.003297 -0.023399 0.059519
+v -0.005251 -0.018181 0.060245
+v -0.004137 -0.017601 0.062413
+v -0.007349 -0.013611 0.067665
+v -0.001598 -0.018047 0.064882
+v -0.010007 -0.012028 0.067022
+v -0.008254 -0.012482 0.068537
+v -0.008854 -0.010873 0.069362
+v -0.010064 -0.010246 0.068161
+v -0.006721 -0.013033 0.069844
+v -0.007998 -0.011074 0.070426
+v -0.002640 -0.014155 0.071784
+v -0.003460 -0.010757 0.073822
+v -0.004434 -0.005241 0.073494
+v -0.003890 -0.008555 0.074244
+v -0.009324 -0.009253 0.069909
+v -0.010596 -0.007404 0.068183
+v -0.010834 -0.004336 0.066043
+v -0.012964 -0.006494 0.062395
+v -0.010172 0.129265 -0.044979
+v -0.009781 -0.041038 0.056697
+v -0.008890 -0.040809 0.055069
+v -0.012947 -0.056066 0.041380
+v -0.012977 -0.051516 0.043657
+v -0.008156 -0.051699 0.045244
+v -0.008078 -0.056188 0.043180
+v -0.004776 -0.040515 0.056242
+v -0.004651 -0.045355 0.050503
+v -0.008427 -0.045374 0.049632
+v -0.005065 -0.040837 0.058009
+v -0.005569 -0.042202 0.059119
+v -0.006104 -0.044600 0.060189
+v -0.004481 -0.051877 0.046004
+v -0.004431 -0.056598 0.043990
+v -0.022698 -0.050451 0.039042
+v -0.023241 -0.054479 0.037015
+v -0.017994 -0.051560 0.041652
+v -0.018353 -0.055315 0.039590
+v -0.007622 -0.064140 0.056760
+v -0.006842 -0.059065 0.056346
+v -0.006663 -0.047741 0.060393
+v -0.006735 -0.051554 0.059142
+v -0.022650 0.114957 -0.113463
+v -0.062791 0.075846 -0.007107
+v -0.061093 0.069107 0.000853
+v -0.054318 0.076455 0.008010
+v -0.056337 0.081991 0.000606
+v -0.063325 0.083724 -0.015335
+v -0.056837 0.088624 -0.007200
+v -0.039837 0.121393 -0.064404
+v -0.040362 0.120629 -0.050094
+v -0.021338 0.126648 -0.046202
+v -0.034731 0.121556 -0.078417
+v -0.047153 0.113379 -0.087751
+v -0.049272 0.115889 -0.068285
+v -0.049155 0.115581 -0.052938
+v -0.027633 -0.085030 -0.108551
+v -0.053095 0.043991 -0.132447
+v -0.047848 0.114465 -0.039097
+v -0.039685 0.118706 -0.036279
+v -0.039197 0.115560 -0.023861
+v -0.046548 0.112104 -0.027355
+v -0.021180 0.124160 -0.031747
+v -0.010132 0.126423 -0.029929
+v -0.020108 0.007437 -0.145846
+v -0.003584 0.012810 0.059200
+v -0.048297 -0.048677 -0.091887
+v -0.057433 -0.049174 -0.074135
+v -0.059654 -0.101741 -0.050829
+v -0.070054 -0.026159 -0.056358
+v -0.069763 -0.025432 -0.057023
+v -0.059275 -0.064472 -0.054795
+v -0.060456 -0.050363 -0.055918
+v -0.039130 -0.047399 -0.103490
+v -0.078423 -0.014938 -0.067418
+v -0.079187 -0.011229 -0.068403
+v -0.079645 -0.015866 -0.067140
+v -0.080347 -0.011871 -0.068377
+v -0.080358 -0.007533 -0.068925
+v -0.080234 -0.017639 -0.066364
+v -0.078202 0.018257 -0.055885
+v -0.079790 0.019751 -0.054767
+v -0.069399 -0.027725 -0.052071
+v -0.069345 -0.027044 -0.053749
+v -0.068696 -0.027429 -0.052930
+v -0.068541 -0.027895 -0.051563
+v -0.069114 -0.028596 -0.050440
+v -0.068284 -0.028467 -0.050422
+v -0.077789 0.035043 -0.048148
+v -0.066813 -0.027656 -0.053890
+v -0.068992 -0.025911 -0.056510
+v -0.068941 -0.026521 -0.055016
+v -0.068463 -0.027408 -0.053588
+v -0.067799 -0.028269 -0.051559
+v -0.067742 -0.029180 -0.049861
+v -0.068447 -0.029802 -0.049315
+v -0.080534 -0.018778 -0.066693
+v -0.073587 -0.028938 -0.054892
+v -0.077166 0.012166 -0.061597
+v -0.077134 0.010339 -0.063263
+v -0.079321 0.011874 -0.064685
+v -0.079221 0.013899 -0.063188
+v -0.059708 -0.009738 -0.106099
+v -0.061917 0.001288 -0.111134
+v -0.014323 -0.082666 -0.113109
+v -0.014104 -0.069937 -0.113011
+v -0.075870 0.033512 -0.095833
+v -0.077163 0.013610 -0.060221
+v -0.079332 0.015522 -0.061753
+v -0.079353 -0.007028 -0.068760
+v -0.075157 0.013545 -0.060426
+v -0.075481 0.011901 -0.061828
+v -0.080488 0.017076 -0.063361
+v -0.080331 0.015201 -0.064893
+v -0.080647 0.012869 -0.066384
+v -0.075759 0.010030 -0.063315
+v -0.077953 0.017193 -0.061610
+v -0.080069 0.063670 -0.071961
+v -0.079358 0.057995 -0.086045
+v -0.076658 0.080579 -0.075382
+v -0.075712 0.075741 -0.090699
+v -0.071300 0.069984 -0.106671
+v -0.074784 0.051232 -0.102079
+v -0.078194 0.018391 -0.056271
+v -0.080658 0.020815 -0.055337
+v -0.063751 0.090179 -0.023161
+v -0.068882 0.085293 -0.030450
+v -0.056924 0.094468 -0.014398
+v -0.056682 0.099758 -0.021144
+v -0.063040 0.096336 -0.030156
+v -0.070314 0.089699 -0.042357
+v -0.079088 0.054220 -0.057066
+v -0.078305 0.068914 -0.057779
+v -0.075293 0.072812 -0.044132
+v -0.074603 0.085313 -0.059690
+v -0.066015 0.098859 -0.048740
+v -0.069725 0.095388 -0.063406
+v -0.058885 0.107351 -0.052613
+v -0.061020 0.105547 -0.067337
+v -0.060885 0.103071 -0.083399
+v -0.071386 0.091558 -0.078643
+v -0.061111 0.101862 -0.036354
+v -0.054998 0.104558 -0.026606
+v -0.055986 0.108263 -0.039860
+v -0.052666 0.108606 -0.031184
+v -0.073315 0.074406 -0.036869
+v -0.063238 -0.034232 -0.049175
+v -0.065290 -0.034666 -0.043879
+v -0.069889 -0.024073 -0.035997
+v -0.068602 0.000301 -0.000541
+v -0.068475 0.012923 -0.003293
+v -0.071752 0.004938 -0.013884
+v -0.070168 -0.009741 -0.011970
+v -0.066942 0.026684 -0.005193
+v -0.070663 0.019230 -0.015546
+v -0.070867 0.033729 -0.017846
+v -0.067018 0.041169 -0.005847
+v -0.062689 0.064723 -0.121874
+v -0.065047 0.047930 -0.119982
+v -0.065551 0.035107 -0.118226
+v -0.047617 0.057479 0.025639
+v -0.022245 0.086165 -0.141845
+v -0.046528 0.080647 0.015342
+v -0.054856 0.085087 -0.122110
+v -0.041853 0.091227 -0.128735
+v -0.070642 0.086915 -0.093805
+v -0.067486 0.081876 -0.107361
+v -0.059564 0.075689 -0.122253
+v -0.064713 0.018880 -0.116322
+v -0.047129 0.108075 -0.019768
+v -0.055638 0.024169 0.019029
+v -0.054815 0.019568 0.018274
+v -0.056122 0.028971 0.020456
+v -0.053316 0.014687 0.020179
+v -0.015225 0.007213 0.044490
+v -0.014902 0.005177 0.046468
+v -0.014638 0.002685 0.048436
+v -0.018320 0.002443 0.044386
+v -0.017802 -0.000469 0.046187
+v -0.011554 0.003098 0.053766
+v -0.011819 0.005677 0.051270
+v -0.014120 0.000270 0.050400
+v -0.013931 -0.002701 0.052250
+v -0.011672 0.000187 0.056103
+v -0.018195 -0.014951 0.048522
+v -0.019070 -0.010252 0.046952
+v -0.016466 -0.006232 0.048945
+v -0.021518 -0.003255 0.044761
+v -0.020437 -0.006457 0.045996
+v -0.017059 -0.003069 0.047614
+v -0.022566 -0.000066 0.042905
+v -0.014885 -0.006035 0.054663
+v -0.015840 -0.009285 0.057667
+v -0.012131 -0.003178 0.059261
+v -0.010322 -0.001035 0.062675
+v -0.009818 0.002486 0.059491
+v -0.018910 0.004649 0.042642
+v -0.023698 0.002472 0.041424
+v -0.024109 -0.009731 0.045506
+v -0.028718 -0.012885 0.045224
+v -0.026121 -0.005594 0.043880
+v -0.030390 -0.008124 0.043445
+v -0.027170 -0.002180 0.042253
+v -0.031606 -0.004492 0.041600
+v -0.032756 -0.016816 0.044711
+v -0.036349 -0.020753 0.043680
+v -0.039523 -0.026184 0.042055
+v -0.039106 -0.015667 0.041805
+v -0.042925 -0.021060 0.039917
+v -0.040880 -0.010544 0.039685
+v -0.045437 -0.015747 0.037657
+v -0.034986 -0.011395 0.042766
+v -0.036516 -0.007380 0.040727
+v -0.032687 -0.000988 0.039716
+v -0.028367 0.000929 0.040632
+v -0.041701 -0.000035 0.035695
+v -0.041631 -0.005322 0.037664
+v -0.037211 -0.003038 0.038670
+v -0.037652 0.001252 0.036776
+v -0.046831 -0.009014 0.035624
+v -0.046329 -0.001631 0.034103
+v -0.045411 0.003142 0.032553
+v -0.031392 0.009705 0.036489
+v -0.026536 0.010390 0.037347
+v -0.035040 0.009356 0.035386
+v -0.041349 0.003816 0.034237
+v -0.037929 0.004518 0.035533
+v -0.049233 0.010214 0.026196
+v -0.046937 0.009468 0.028463
+v -0.041250 0.009020 0.032561
+v -0.038355 0.009098 0.034026
+v -0.044251 0.009154 0.030643
+v -0.021922 0.011983 0.037314
+v -0.017282 0.014014 0.037300
+v -0.056122 -0.062445 -0.071114
+v -0.009301 0.005646 0.057041
+v -0.009218 0.008581 0.054398
+v -0.014602 0.031053 0.039463
+v -0.012388 0.028215 0.038751
+v -0.006655 0.028694 0.045793
+v -0.018488 0.035293 0.041862
+v -0.014049 0.016781 0.037245
+v -0.012245 0.019366 0.037175
+v -0.011327 0.021998 0.037492
+v -0.011102 0.025225 0.038019
+v -0.029679 0.048547 0.043078
+v -0.026215 0.042096 0.044666
+v -0.057701 0.062307 0.009857
+v -0.061367 0.053883 0.005844
+v -0.050565 0.043722 0.028194
+v -0.053731 0.039514 0.025065
+v -0.055617 0.034362 0.022348
+v -0.063250 0.042374 0.002974
+v -0.064060 0.029923 0.002446
+v -0.065190 0.018161 0.003427
+v -0.048537 0.092740 0.001143
+v -0.048383 0.098381 -0.005730
+v -0.048135 0.086741 0.008292
+v -0.038924 0.111397 -0.014404
+v -0.039049 0.106917 -0.005966
+v -0.047948 0.103528 -0.012638
+v -0.039563 0.101754 0.001344
+v -0.039948 0.096135 0.008134
+v -0.039505 0.090233 0.015276
+v -0.038348 0.083917 0.022281
+v -0.036539 0.075477 0.028866
+v -0.047400 -0.061311 -0.089514
+v -0.006949 -0.007475 0.055126
+v -0.006532 -0.007569 0.055593
+v -0.006707 -0.007497 0.054931
+v -0.006230 -0.007535 0.055439
+v -0.006608 -0.007913 0.055559
+v -0.006583 -0.016006 0.058181
+v -0.005582 -0.014036 0.057236
+v -0.006007 -0.010338 0.056503
+v -0.069173 -0.030520 -0.038090
+v -0.058237 -0.097513 -0.065909
+v -0.054471 -0.094000 -0.079489
+v 0.056545 -0.096814 -0.065562
+v 0.052698 -0.093320 -0.079189
+v 0.020825 -0.127361 -0.009159
+v 0.032816 -0.123428 -0.015733
+v 0.033852 -0.131988 -0.017187
+v 0.021398 -0.135349 -0.011158
+v 0.060075 -0.114720 -0.071215
+v 0.057828 -0.105899 -0.067009
+v 0.054158 -0.102371 -0.081714
+v 0.057383 -0.111079 -0.086845
+v 0.047096 -0.100227 -0.094432
+v 0.050243 -0.109929 -0.100159
+v 0.039538 -0.110196 -0.110146
+v 0.037330 -0.098779 -0.104230
+v 0.025425 -0.097988 -0.111380
+v 0.026901 -0.110589 -0.116509
+v 0.044763 -0.118663 -0.024893
+v 0.053790 -0.113786 -0.037104
+v 0.055057 -0.123113 -0.039668
+v 0.045808 -0.127666 -0.026727
+v 0.058527 -0.109596 -0.051319
+v 0.059940 -0.118872 -0.054936
+v 0.013072 -0.110647 -0.119593
+v 0.012240 -0.096902 -0.114869
+v 0.023080 -0.154948 -0.014625
+v 0.009939 -0.157215 -0.013359
+v 0.010122 -0.146856 -0.010360
+v 0.022272 -0.144910 -0.013054
+v 0.053847 -0.158377 -0.033366
+v 0.067749 -0.151895 -0.046836
+v 0.074210 -0.160413 -0.043568
+v 0.058148 -0.168087 -0.030411
+v 0.095856 -0.157802 -0.055544
+v 0.105281 -0.146706 -0.077385
+v 0.038094 -0.162350 -0.022428
+v 0.040568 -0.172104 -0.018915
+v -0.001127 -0.123198 -0.126942
+v -0.001177 -0.110348 -0.120090
+v 0.095343 -0.141401 -0.080150
+v 0.094620 -0.137615 -0.104287
+v 0.106846 -0.143309 -0.102567
+v 0.053193 -0.159149 -0.143435
+v 0.053505 -0.165999 -0.146214
+v 0.068523 -0.161313 -0.143890
+v 0.064359 -0.154855 -0.140387
+v 0.073762 -0.125525 -0.099135
+v 0.062838 -0.126171 -0.114264
+v 0.070909 -0.133786 -0.119718
+v 0.083986 -0.131562 -0.103190
+v 0.036160 -0.143110 -0.136526
+v 0.054100 -0.137984 -0.130259
+v 0.048258 -0.129585 -0.124862
+v 0.032176 -0.132603 -0.130967
+v 0.085720 -0.136415 -0.080822
+v 0.075437 -0.130693 -0.079197
+v 0.078108 -0.144713 -0.062743
+v 0.069981 -0.137448 -0.061956
+v 0.062119 -0.143135 -0.046210
+v 0.055733 -0.118710 -0.107306
+v 0.043409 -0.120350 -0.117480
+v 0.029102 -0.122075 -0.123775
+v 0.014023 -0.123083 -0.126526
+v -0.000775 -0.172292 -0.144966
+v -0.000827 -0.160006 -0.143020
+v 0.017391 -0.145799 -0.138242
+v 0.015671 -0.134621 -0.133395
+v 0.086526 -0.151101 -0.060728
+v 0.063510 -0.128604 -0.059287
+v 0.057890 -0.133219 -0.043218
+v 0.064694 -0.175161 -0.019770
+v 0.082701 -0.168864 -0.035586
+v 0.044838 -0.179704 -0.008633
+v 0.026740 -0.183399 -0.002146
+v 0.018992 -0.158714 -0.141960
+v 0.018364 -0.170773 -0.144262
+v 0.037588 -0.168014 -0.145189
+v 0.041168 -0.155335 -0.141880
+v 0.050454 -0.148272 -0.032146
+v 0.036988 -0.152189 -0.021069
+v 0.023344 -0.164882 -0.014957
+v 0.063983 -0.118945 -0.092893
+v 0.065850 -0.123161 -0.075782
+v 0.047878 -0.137692 -0.029660
+v 0.035244 -0.141986 -0.019492
+v 0.009841 -0.137681 -0.007350
+v 0.024529 -0.174514 -0.010786
+v 0.095865 -0.147695 -0.123837
+v 0.082815 -0.154983 -0.136908
+v 0.075710 -0.151155 -0.135313
+v -0.001131 -0.146733 -0.139337
+v -0.001047 -0.147560 -0.009933
+v -0.001387 -0.157719 -0.013313
+v -0.000616 -0.138350 -0.006170
+v 0.080264 -0.142597 -0.125605
+v 0.059918 -0.147345 -0.135794
+v -0.000502 -0.131196 -0.002842
+v -0.001247 -0.096216 -0.115730
+v -0.001499 -0.168894 -0.015852
+v -0.001448 -0.180708 -0.012266
+v -0.001277 -0.189959 -0.002130
+v 0.009948 -0.130234 -0.004480
+v 0.011001 -0.178433 -0.010868
+v 0.010222 -0.167655 -0.014648
+v 0.012077 -0.187928 -0.001725
+v -0.001060 -0.134901 -0.134026
+v -0.021555 -0.127666 -0.009430
+v -0.033525 -0.123762 -0.016067
+v -0.034799 -0.132297 -0.017413
+v -0.022329 -0.135573 -0.011343
+v -0.061668 -0.115509 -0.071588
+v -0.059556 -0.106705 -0.067391
+v -0.055979 -0.103076 -0.081990
+v -0.059148 -0.111763 -0.087104
+v -0.049081 -0.100845 -0.094593
+v -0.052183 -0.110474 -0.100320
+v -0.041562 -0.110623 -0.110264
+v -0.039436 -0.099272 -0.104328
+v -0.027594 -0.098337 -0.111448
+v -0.028989 -0.110892 -0.116606
+v -0.045674 -0.119264 -0.025274
+v -0.054796 -0.114691 -0.037602
+v -0.055997 -0.123685 -0.040008
+v -0.046883 -0.128086 -0.026982
+v -0.060147 -0.110585 -0.051740
+v -0.061205 -0.119819 -0.055354
+v -0.015251 -0.110834 -0.119603
+v -0.014602 -0.097103 -0.114877
+v -0.024912 -0.155164 -0.014562
+v -0.012201 -0.157237 -0.013284
+v -0.011625 -0.146873 -0.010455
+v -0.023731 -0.145077 -0.013228
+v -0.054790 -0.159352 -0.034207
+v -0.068762 -0.152648 -0.047563
+v -0.075269 -0.161050 -0.044241
+v -0.059498 -0.168635 -0.030712
+v -0.096972 -0.158485 -0.056203
+v -0.106522 -0.147315 -0.077962
+v -0.039763 -0.162946 -0.022584
+v -0.042539 -0.172802 -0.019044
+v -0.096705 -0.141947 -0.080624
+v -0.096190 -0.138034 -0.104643
+v -0.108348 -0.143761 -0.103002
+v -0.054917 -0.159381 -0.143700
+v -0.055222 -0.166193 -0.146484
+v -0.070154 -0.161549 -0.144210
+v -0.066065 -0.155144 -0.140619
+v -0.075460 -0.126035 -0.099332
+v -0.064596 -0.126835 -0.114426
+v -0.072644 -0.134224 -0.119885
+v -0.085585 -0.132049 -0.103416
+v -0.037983 -0.143355 -0.136662
+v -0.055933 -0.138173 -0.130402
+v -0.050082 -0.129398 -0.124790
+v -0.034034 -0.132829 -0.131005
+v -0.087134 -0.136968 -0.081217
+v -0.076839 -0.131291 -0.079559
+v -0.079367 -0.145352 -0.063269
+v -0.071256 -0.138117 -0.062517
+v -0.062997 -0.143916 -0.047116
+v -0.057570 -0.119181 -0.107463
+v -0.045353 -0.120703 -0.117583
+v -0.031115 -0.122305 -0.123793
+v -0.015962 -0.123725 -0.126594
+v -0.019464 -0.146512 -0.138437
+v -0.018134 -0.133337 -0.133201
+v -0.087706 -0.151779 -0.061357
+v -0.064667 -0.129395 -0.059927
+v -0.058912 -0.133853 -0.043885
+v -0.066341 -0.175935 -0.019867
+v -0.083948 -0.169492 -0.036059
+v -0.046990 -0.180361 -0.008579
+v -0.029208 -0.183659 -0.001863
+v -0.020870 -0.158762 -0.142102
+v -0.020054 -0.170829 -0.144304
+v -0.039156 -0.168266 -0.145389
+v -0.042907 -0.155570 -0.142068
+v -0.051663 -0.148888 -0.032884
+v -0.038628 -0.152615 -0.021125
+v -0.025556 -0.165402 -0.015108
+v -0.065710 -0.119535 -0.093134
+v -0.067399 -0.123869 -0.076172
+v -0.049232 -0.138121 -0.030031
+v -0.036611 -0.142318 -0.019732
+v -0.011269 -0.137807 -0.007585
+v -0.027014 -0.174872 -0.010697
+v -0.097429 -0.148121 -0.124146
+v -0.084447 -0.155317 -0.137195
+v -0.077372 -0.151487 -0.135522
+v -0.081922 -0.143012 -0.125854
+v -0.061632 -0.147686 -0.135990
+v -0.010904 -0.130347 -0.004770
+v -0.013654 -0.178677 -0.010728
+v -0.012785 -0.168025 -0.014746
+v -0.014466 -0.187882 -0.001556
+v -0.006999 -0.007855 0.055196
+v -0.067860 -0.032730 -0.015524
+v -0.067314 -0.040402 -0.022977
+v -0.064107 -0.051653 -0.015179
+v -0.065150 -0.043144 -0.007397
+v -0.059236 -0.061673 -0.006972
+v -0.062098 -0.051662 -0.001005
+v -0.053415 -0.069491 0.002493
+v -0.057767 -0.059358 0.006529
+v -0.048508 -0.074029 0.012738
+v -0.053091 -0.064259 0.015045
+v -0.033816 -0.076665 0.035238
+v -0.032072 -0.081895 0.031411
+v -0.025264 -0.084319 0.039473
+v -0.025928 -0.080178 0.043745
+v -0.017894 -0.086349 0.046455
+v -0.018532 -0.082341 0.050643
+v -0.009842 -0.087061 0.050489
+v -0.009918 -0.083199 0.054895
+v -0.000127 -0.086961 0.052450
+v -0.000137 -0.082916 0.056729
+v -0.000082 -0.073326 0.059714
+v -0.008947 -0.073679 0.058432
+v -0.009472 -0.078594 0.057403
+v -0.000170 -0.078761 0.058823
+v -0.016472 -0.072996 0.054509
+v -0.017881 -0.078048 0.053115
+v -0.023154 -0.070881 0.048822
+v -0.025012 -0.075837 0.046750
+v -0.030839 -0.067097 0.041879
+v -0.032800 -0.072098 0.038938
+v -0.037115 -0.062545 0.037599
+v -0.040436 -0.065648 0.033205
+v -0.009026 -0.090204 0.043826
+v -0.000129 -0.089962 0.046230
+v -0.016593 -0.089601 0.040123
+v -0.023616 -0.087746 0.034004
+v -0.029756 -0.086069 0.026825
+v -0.041923 -0.077066 0.022670
+v -0.036211 -0.084233 0.018437
+v -0.043468 -0.080240 0.009727
+v -0.048454 -0.075929 -0.001436
+v -0.055785 -0.067911 -0.012511
+v -0.062245 -0.056877 -0.022616
+v -0.065985 -0.044839 -0.030560
+v -0.068667 -0.032454 -0.028483
+v -0.015133 -0.068732 0.054630
+v -0.008275 -0.068950 0.057974
+v -0.021284 -0.066914 0.049927
+v -0.028492 -0.062830 0.044135
+v -0.034423 -0.058701 0.040534
+v -0.039307 -0.047000 0.039792
+v -0.038097 -0.053115 0.039498
+v -0.037783 -0.035259 0.042349
+v -0.038744 -0.041035 0.041003
+v -0.045796 -0.028729 0.037158
+v -0.047464 -0.037378 0.034015
+v -0.043195 -0.039982 0.037741
+v -0.041933 -0.032478 0.039898
+v -0.049220 -0.022799 0.034128
+v -0.051477 -0.033232 0.029882
+v -0.008126 -0.092844 0.037854
+v -0.000131 -0.092962 0.039112
+v -0.014874 -0.092564 0.034379
+v -0.020906 -0.091731 0.028610
+v -0.025403 -0.091340 0.021687
+v -0.030381 -0.090362 0.013961
+v -0.035572 -0.088018 0.005078
+v -0.042136 -0.083835 -0.005535
+v -0.051027 -0.075504 -0.017764
+v -0.059807 -0.061693 -0.029025
+v -0.063318 -0.048827 -0.037267
+v -0.021839 -0.098538 0.009007
+v -0.017822 -0.097771 0.015447
+v -0.014282 -0.097892 0.021101
+v -0.011057 -0.097556 0.025875
+v -0.006421 -0.098765 0.025363
+v -0.000063 -0.099655 0.024964
+v -0.005657 -0.101003 0.017530
+v 0.000144 -0.101815 0.018094
+v -0.008024 -0.103541 0.012916
+v 0.000163 -0.104388 0.014322
+v -0.010335 -0.106811 0.009678
+v -0.000003 -0.108933 0.010644
+v -0.041646 -0.055850 0.036044
+v -0.045840 -0.056746 0.031087
+v -0.047748 -0.046924 0.031622
+v -0.052109 -0.044255 0.026292
+v -0.050556 -0.056118 0.024752
+v -0.043485 -0.048068 0.036224
+v -0.055825 -0.052697 0.017076
+v -0.056512 -0.039778 0.020353
+v -0.055320 -0.027231 0.025755
+v -0.059731 -0.033986 0.015212
+v -0.059856 -0.047143 0.009795
+v -0.058716 -0.022971 0.021151
+v -0.061348 -0.018756 0.016981
+v -0.062000 -0.029232 0.010660
+v -0.062499 -0.040499 0.004675
+v -0.045678 -0.068748 0.025027
+v -0.064680 -0.034334 -0.000767
+v -0.063616 -0.014876 0.012266
+v -0.063997 -0.024191 0.005885
+v -0.066044 -0.009441 0.006294
+v -0.066799 -0.018410 -0.002305
+v -0.067384 -0.026595 -0.009198
+v -0.069617 -0.024262 -0.024554
+v -0.000117 -0.068568 0.059393
+v -0.063515 -0.032819 -0.053591
+v -0.021746 -0.028842 0.049743
+v -0.016464 -0.026979 0.052514
+v -0.011537 -0.026205 0.055713
+v -0.007530 -0.025881 0.058167
+v -0.003648 -0.025952 0.059583
+v -0.000244 -0.120346 0.003729
+v 0.000602 0.042237 0.049298
+v -0.000193 -0.113533 0.007681
+v -0.000152 -0.056713 0.044342
+v 0.000155 0.127456 -0.028834
+v -0.000127 -0.063767 0.058661
+v -0.000091 -0.051711 0.046483
+v 0.000404 0.003481 0.069939
+v -0.000133 -0.059369 0.057665
+v -0.000058 -0.048217 0.061288
+v 0.000012 -0.042053 0.059679
+v 0.000659 0.078518 0.039669
+v -0.000010 -0.044521 0.061004
+v -0.000093 -0.023792 0.059913
+v 0.000434 -0.000600 0.073359
+v 0.000011 -0.040644 0.058324
+v -0.001314 -0.044979 -0.118086
+v -0.000064 -0.051935 0.059735
+v -0.000146 -0.045410 0.050843
+v -0.000024 -0.040326 0.056651
+v 0.000239 -0.041342 0.044220
+v 0.000010 -0.026621 0.059849
+v 0.000543 0.020254 0.055558
+v -0.001194 -0.006197 -0.142079
+v 0.000429 0.017184 0.058141
+v 0.000253 0.102500 0.021743
+v 0.000538 0.088778 0.034063
+v 0.000405 0.007272 0.066638
+v -0.000675 0.056625 -0.154942
+v -0.000530 0.073108 -0.151715
+v 0.000560 0.061243 0.045368
+v 0.000082 0.126155 -0.092153
+v 0.000451 -0.004530 0.075501
+v 0.000004 -0.018507 0.065028
+v 0.000165 0.119143 -0.003853
+v -0.000961 0.022185 -0.152597
+v -0.000077 0.117028 -0.116545
+v 0.000135 -0.042689 0.059331
+v 0.000156 0.123646 -0.015789
+v 0.000175 -0.044241 0.056432
+v 0.000185 -0.014785 0.072547
+v -0.001088 0.006427 -0.148362
+v -0.000835 0.035893 -0.154562
+v -0.000010 -0.033068 0.060617
+v -0.000009 -0.029221 0.059874
+v 0.000009 0.122514 -0.104768
+v 0.000747 0.048438 0.048754
+v 0.000038 -0.035538 0.061462
+v 0.000615 0.029159 0.050174
+v 0.000112 -0.037982 0.061781
+v -0.000366 -0.125863 0.000086
+v -0.000288 0.096947 -0.138702
+v 0.000118 -0.040310 0.060737
+v 0.000227 -0.043473 0.050365
+v 0.000464 0.010852 0.063550
+v 0.000235 -0.042618 0.047269
+v 0.000613 0.068667 0.043005
+v -0.000063 -0.021284 0.061168
+v -0.000024 -0.020728 0.061726
+v 0.000520 0.026618 0.051062
+v 0.000304 0.096209 0.028193
+v 0.000698 0.031710 0.049904
+v -0.000019 -0.021957 0.060635
+v -0.000398 0.087772 -0.145160
+v 0.000134 0.130655 -0.074779
+v 0.000142 0.131560 -0.061532
+v 0.000510 0.023248 0.053221
+v 0.000520 0.014217 0.060582
+v -0.001281 -0.054794 -0.116322
+v 0.000373 -0.011310 0.075279
+v 0.000423 -0.008404 0.076114
+v 0.000180 0.113827 0.006372
+v -0.001244 -0.015759 -0.136868
+v 0.000151 0.130799 -0.044326
+v 0.000210 0.108246 0.014619
+v -0.001293 -0.033405 -0.125026
+v -0.001263 -0.023462 -0.132537
+v 0.000100 -0.016018 0.070805
+v 0.000516 0.035002 0.049464
+v 0.000028 -0.017513 0.067686
+v -0.000173 0.109583 -0.127375
+v 0.000005 -0.019692 0.062902
+v 0.006885 -0.007903 0.055308
+v 0.067018 -0.032027 -0.014503
+v 0.065700 -0.039406 -0.022133
+v 0.062631 -0.050875 -0.014255
+v 0.064314 -0.042610 -0.006366
+v 0.058268 -0.061049 -0.006146
+v 0.061436 -0.051169 -0.000108
+v 0.052566 -0.068899 0.003115
+v 0.057170 -0.058900 0.007271
+v -0.004175 -0.014740 0.069849
+v -0.003184 -0.016407 0.067396
+v 0.047943 -0.073595 0.013255
+v 0.052632 -0.063907 0.015667
+v -0.015156 0.020612 0.034363
+v -0.007898 -0.018590 0.057573
+v -0.006992 0.004643 0.062685
+v -0.013934 0.020870 0.035049
+v 0.033361 -0.076283 0.035468
+v 0.031620 -0.081449 0.031576
+v 0.024631 -0.083720 0.039426
+v 0.025315 -0.079724 0.043708
+v -0.010389 -0.018170 0.056431
+v 0.017392 -0.085958 0.046446
+v 0.018079 -0.082120 0.050677
+v 0.009515 -0.086962 0.050568
+v 0.009667 -0.083210 0.055049
+v -0.006534 0.011216 0.057108
+v -0.008239 0.020130 0.044264
+v 0.008615 -0.073679 0.058765
+v 0.009144 -0.078655 0.057686
+v -0.006557 0.007848 0.059992
+v -0.015693 -0.014729 0.056617
+v -0.014034 -0.017601 0.055473
+v -0.006843 -0.008906 0.072320
+v -0.007759 -0.006141 0.071334
+v 0.016108 -0.072995 0.054672
+v -0.007480 -0.011905 0.070249
+v -0.004753 -0.013604 0.070926
+v 0.017357 -0.077984 0.053247
+v -0.016613 0.020361 0.034680
+v -0.027385 0.021548 0.028844
+v -0.006028 -0.010855 0.072208
+v -0.002981 -0.012808 0.072849
+v -0.019895 0.021198 0.032538
+v -0.015958 0.020494 0.034476
+v -0.011878 -0.010311 0.065200
+v -0.006859 0.019144 0.048586
+v 0.022691 -0.070777 0.049013
+v 0.024399 -0.075624 0.046884
+v 0.030457 -0.066958 0.042312
+v 0.032323 -0.071877 0.039321
+v -0.014569 -0.011897 0.060828
+v 0.036862 -0.062441 0.038323
+v 0.040060 -0.065491 0.033838
+v 0.008837 -0.089980 0.043779
+v -0.017453 0.020459 0.034597
+v -0.018055 0.020642 0.034496
+v -0.007619 0.001038 0.065748
+v 0.016245 -0.089084 0.039979
+v 0.023182 -0.087079 0.033864
+v 0.029496 -0.085508 0.026836
+v 0.041620 -0.076827 0.023084
+v -0.022514 0.021259 0.032855
+v -0.002965 -0.018366 0.062524
+v -0.010073 -0.011031 0.067625
+v -0.008645 -0.011544 0.069107
+v -0.003456 -0.019176 0.060666
+v 0.036026 -0.083932 0.018612
+v 0.042987 -0.079661 0.009936
+v -0.011676 0.020727 0.037331
+v 0.047558 -0.075102 -0.001135
+v -0.006517 0.016878 0.052191
+v -0.006401 0.014095 0.055015
+v 0.054799 -0.067092 -0.012028
+v 0.060576 -0.055843 -0.021983
+v -0.002829 -0.017416 0.064935
+v 0.064220 -0.043738 -0.029867
+v 0.067577 -0.031650 -0.027433
+v 0.014850 -0.068700 0.054919
+v 0.008065 -0.068914 0.058351
+v 0.020933 -0.066861 0.050209
+v -0.007966 -0.002675 0.069165
+v 0.028142 -0.062765 0.044535
+v 0.034278 -0.058660 0.041309
+v 0.039231 -0.047027 0.040682
+v -0.005902 -0.018683 0.059148
+v 0.037969 -0.053108 0.040438
+v 0.037688 -0.035287 0.043241
+v 0.038614 -0.041068 0.041837
+v 0.045741 -0.028706 0.038145
+v 0.047226 -0.037306 0.034908
+v 0.043039 -0.039958 0.038646
+v 0.000281 -0.013265 0.073986
+v -0.005318 -0.012404 0.071667
+v 0.041906 -0.032480 0.040897
+v 0.049271 -0.022689 0.035086
+v 0.051232 -0.033140 0.030773
+v 0.008013 -0.092550 0.037769
+v -0.029031 0.035299 0.039722
+v -0.028963 0.032961 0.038334
+v 0.014640 -0.092059 0.034252
+v 0.020630 -0.091141 0.028485
+v 0.025082 -0.090820 0.021666
+v 0.030090 -0.089966 0.013985
+v 0.035085 -0.087507 0.005123
+v 0.041314 -0.083031 -0.005292
+v -0.022085 0.038377 0.043424
+v -0.004404 0.038499 0.048518
+v -0.009745 0.038573 0.047402
+v 0.050357 -0.074777 -0.017300
+v -0.029934 0.028200 0.037141
+v -0.030476 0.025951 0.035897
+v -0.028813 0.030857 0.036996
+v -0.030240 0.026359 0.037220
+v -0.030227 0.027190 0.037554
+v 0.058317 -0.060769 -0.028444
+v -0.028905 0.029862 0.036603
+v 0.061207 -0.047527 -0.036513
+v -0.028870 0.029894 0.036608
+v 0.021544 -0.098278 0.008882
+v -0.027096 0.037461 0.041426
+v 0.017638 -0.097468 0.015310
+v 0.014218 -0.097529 0.020967
+v -0.029343 0.029201 0.036570
+v -0.030635 0.025331 0.034202
+v 0.011029 -0.097225 0.025743
+v -0.031326 0.023525 0.029573
+v 0.006379 -0.098623 0.025300
+v 0.000569 0.038557 0.049391
+v -0.021001 0.048551 0.046172
+v -0.051298 0.047821 0.024808
+v 0.005821 -0.100932 0.017471
+v -0.054468 0.042488 0.021981
+v 0.008045 -0.103427 0.012839
+v -0.048065 0.006404 0.029192
+v -0.050728 0.007267 0.026644
+v -0.018229 0.042387 0.046752
+v 0.010245 -0.106741 0.009794
+v 0.041374 -0.055752 0.036833
+v 0.045347 -0.056592 0.031766
+v 0.047325 -0.046798 0.032406
+v 0.051690 -0.044094 0.027091
+v -0.026180 0.068008 0.037933
+v -0.030616 0.114050 -0.010519
+v -0.024654 0.060909 0.041268
+v -0.039008 0.053471 0.035410
+v 0.050122 -0.055953 0.025452
+v -0.030752 0.109401 -0.001356
+v 0.043220 -0.048007 0.037022
+v 0.055318 -0.052487 0.017866
+v 0.056045 -0.039560 0.021255
+v 0.055200 -0.027001 0.026688
+v 0.059326 -0.033616 0.016123
+v -0.046385 0.051926 0.029230
+v 0.059280 -0.046860 0.010721
+v -0.055373 0.012125 0.019287
+v -0.006099 0.054463 0.047272
+v -0.056705 0.017896 0.016359
+v 0.058601 -0.022657 0.022199
+v -0.014327 0.054464 0.046548
+v -0.027123 0.124766 -0.073239
+v 0.061273 -0.018276 0.017985
+v 0.061555 -0.028811 0.011665
+v 0.061959 -0.040069 0.005617
+v 0.045423 -0.068488 0.025556
+v -0.013128 0.035253 0.044173
+v 0.064109 -0.033889 0.000283
+v -0.038172 0.007097 0.034583
+v -0.041219 0.006659 0.033229
+v 0.063466 -0.014366 0.013242
+v -0.031009 0.124466 -0.061926
+v -0.054455 0.009985 0.021408
+v -0.029421 0.085930 0.027411
+v -0.009145 0.028564 0.042307
+v -0.010959 0.031389 0.042421
+v -0.030261 0.092750 0.020713
+v 0.063578 -0.023615 0.006841
+v 0.065855 -0.008896 0.007230
+v 0.066430 -0.017822 -0.001314
+v -0.030607 0.098837 0.013729
+v 0.066814 -0.025955 -0.008119
+v 0.068806 -0.023572 -0.023335
+v -0.030948 0.118211 -0.021195
+v -0.021239 0.010463 0.038157
+v 0.062119 -0.031828 -0.052555
+v 0.021548 -0.028922 0.050275
+v 0.016280 -0.027080 0.052922
+v -0.056723 0.036294 0.019438
+v -0.057576 0.029665 0.017833
+v -0.031610 0.123758 -0.048025
+v -0.030833 0.008178 0.037116
+v -0.057275 0.023702 0.016665
+v -0.015385 0.126768 -0.081361
+v 0.011418 -0.026292 0.056105
+v -0.052898 0.008333 0.024020
+v 0.007545 -0.025923 0.058615
+v 0.003683 -0.025957 0.059934
+v -0.031599 0.054290 0.040345
+v -0.034703 0.007542 0.035978
+v -0.008603 0.025416 0.041792
+v -0.016600 0.012770 0.038481
+v -0.013164 0.015793 0.038910
+v -0.030702 0.104314 0.006580
+v -0.010805 0.018768 0.039844
+v -0.008993 0.022016 0.041189
+v -0.026012 0.008970 0.037997
+v -0.027988 0.076872 0.033679
+v -0.031162 0.121488 -0.033929
+v -0.044693 0.006425 0.031382
+v 0.000119 0.128078 -0.084209
+v 0.000548 0.054541 0.047354
+v -0.009774 0.020411 0.040674
+v -0.015738 0.038587 0.045776
+v -0.023123 0.054424 0.044268
+v -0.000069 -0.055572 0.058216
+v -0.026877 -0.039159 0.048280
+v -0.028306 -0.041177 0.046907
+v -0.028069 -0.046818 0.045074
+v -0.019457 -0.034316 0.053282
+v -0.023939 -0.036877 0.050363
+v -0.018391 -0.053313 0.052541
+v -0.028752 -0.043073 0.045955
+v -0.012718 -0.054504 0.055684
+v -0.029011 -0.044932 0.045220
+v -0.006828 -0.054946 0.057348
+v 0.000003 -0.031002 0.060277
+v -0.009326 -0.030844 0.058768
+v -0.004535 -0.030578 0.060223
+v -0.014348 -0.032119 0.056263
+v -0.026014 -0.048980 0.046148
+v -0.023004 -0.051382 0.048773
+v 0.004415 -0.014851 0.070001
+v 0.003287 -0.016431 0.067490
+v 0.015485 0.020616 0.035250
+v 0.007800 -0.018649 0.057804
+v 0.007311 0.004691 0.062948
+v 0.014206 0.020870 0.035753
+v 0.010294 -0.018280 0.056701
+v 0.007010 0.011272 0.057379
+v 0.008503 0.020141 0.044599
+v 0.006948 0.007907 0.060264
+v 0.015625 -0.014889 0.056918
+v 0.013938 -0.017751 0.055766
+v 0.007285 -0.008896 0.072523
+v 0.008167 -0.006110 0.071551
+v 0.007742 -0.011941 0.070417
+v 0.005099 -0.013729 0.071117
+v 0.017036 0.020378 0.035769
+v 0.027923 0.021548 0.029922
+v 0.006478 -0.010891 0.072411
+v 0.003534 -0.012946 0.073058
+v 0.020373 0.021085 0.033756
+v 0.016360 0.020503 0.035499
+v 0.011902 -0.010372 0.065385
+v 0.007280 0.019182 0.048911
+v 0.014569 -0.012014 0.061072
+v 0.017918 0.020476 0.035757
+v 0.018534 0.020654 0.035719
+v 0.007831 0.001052 0.065986
+v 0.022982 0.020994 0.033996
+v 0.002894 -0.018309 0.062568
+v 0.010136 -0.011060 0.067782
+v 0.008788 -0.011563 0.069258
+v 0.003353 -0.019137 0.060761
+v 0.011879 0.020718 0.037850
+v 0.007054 0.016935 0.052499
+v 0.007028 0.014166 0.055293
+v 0.002812 -0.017362 0.064984
+v 0.008300 -0.002646 0.069388
+v 0.005803 -0.018701 0.059323
+v 0.005735 -0.012493 0.071872
+v 0.029981 0.035469 0.040838
+v 0.029848 0.033101 0.039576
+v 0.023098 0.038415 0.044398
+v 0.005478 0.038489 0.048854
+v 0.010754 0.038563 0.047925
+v 0.030507 0.028360 0.038520
+v 0.030801 0.025977 0.036879
+v 0.029578 0.030978 0.038321
+v 0.030739 0.026358 0.038527
+v 0.030760 0.027278 0.038913
+v 0.029611 0.029964 0.037980
+v 0.029554 0.030001 0.038063
+v 0.028093 0.037590 0.042504
+v 0.029970 0.029330 0.037962
+v 0.031020 0.025550 0.035077
+v 0.031786 0.023776 0.030497
+v 0.022153 0.048633 0.047113
+v 0.051913 0.048248 0.025409
+v 0.054884 0.042882 0.022563
+v 0.048442 0.006598 0.030202
+v 0.051058 0.007540 0.027592
+v 0.019322 0.042391 0.047691
+v 0.027034 0.068231 0.038303
+v 0.030821 0.114235 -0.010580
+v 0.025612 0.061079 0.041749
+v 0.039902 0.053766 0.036093
+v 0.030939 0.109514 -0.001367
+v 0.047161 0.052315 0.029865
+v 0.055920 0.012536 0.020255
+v 0.007201 0.054499 0.047565
+v 0.057471 0.018309 0.017411
+v 0.015454 0.054550 0.047107
+v 0.027338 0.124945 -0.073319
+v 0.013998 0.035311 0.044785
+v 0.038606 0.007209 0.035807
+v 0.041658 0.006780 0.034417
+v 0.031255 0.124690 -0.062029
+v 0.054863 0.010343 0.022304
+v 0.030098 0.086021 0.027515
+v 0.009847 0.028725 0.042679
+v 0.011696 0.031546 0.042865
+v 0.030540 0.092542 0.021011
+v 0.030817 0.098751 0.013883
+v 0.031178 0.118442 -0.021284
+v 0.021441 0.010347 0.038779
+v 0.057007 0.036673 0.020053
+v 0.057979 0.030030 0.018642
+v 0.031867 0.124016 -0.048140
+v 0.031216 0.008095 0.038025
+v 0.057949 0.024079 0.017601
+v 0.015608 0.126834 -0.081395
+v 0.053301 0.008629 0.024920
+v 0.032551 0.054499 0.041029
+v 0.035074 0.007592 0.037095
+v 0.009035 0.025561 0.042106
+v 0.016699 0.012663 0.039012
+v 0.013262 0.015707 0.039384
+v 0.030887 0.104337 0.006638
+v 0.010943 0.018719 0.040276
+v 0.009253 0.022089 0.041506
+v 0.026152 0.008886 0.038694
+v 0.028804 0.077129 0.033996
+v 0.031411 0.121751 -0.034039
+v 0.045089 0.006562 0.032469
+v 0.009970 0.020412 0.041051
+v 0.016734 0.038574 0.046546
+v 0.024195 0.054552 0.044965
+v 0.026870 -0.039253 0.048828
+v 0.028315 -0.041281 0.047530
+v 0.027989 -0.046920 0.045875
+v 0.019443 -0.034438 0.053654
+v 0.023914 -0.036993 0.050808
+v 0.017851 -0.053311 0.052715
+v 0.028696 -0.043168 0.046631
+v 0.012371 -0.054524 0.055902
+v 0.028943 -0.045022 0.046002
+v 0.006510 -0.055059 0.057692
+v 0.009342 -0.030851 0.059180
+v 0.004550 -0.030548 0.060545
+v 0.014277 -0.032177 0.056629
+v 0.025852 -0.049037 0.046684
+v 0.022600 -0.051383 0.048992
+v 0.031854 0.022156 0.025021
+v -0.031292 0.021919 0.023973
+v 0.030092 0.021398 0.037882
+v 0.028525 0.021398 0.037406
+v 0.027081 0.021398 0.036636
+v 0.025869 0.021399 0.035640
+v 0.030123 0.021716 0.037881
+v 0.028586 0.022023 0.037407
+v 0.027170 0.022306 0.036636
+v 0.025983 0.022543 0.035639
+v 0.030215 0.022023 0.037882
+v 0.028768 0.022623 0.037407
+v 0.027434 0.023178 0.036636
+v 0.026314 0.023642 0.035639
+v 0.030366 0.022305 0.037882
+v 0.029063 0.023177 0.037407
+v 0.027862 0.023981 0.036636
+v 0.026855 0.024655 0.035639
+v 0.030569 0.022551 0.037881
+v 0.029461 0.023662 0.037406
+v 0.028439 0.024686 0.036636
+v 0.027582 0.025544 0.035640
+v 0.030815 0.022755 0.037882
+v 0.029944 0.024060 0.037407
+v 0.029142 0.025263 0.036636
+v 0.028469 0.026272 0.035638
+v 0.031096 0.022905 0.037882
+v 0.030497 0.024356 0.037406
+v 0.029944 0.025693 0.036635
+v 0.029481 0.026814 0.035638
+v 0.031403 0.022999 0.037881
+v 0.031096 0.024538 0.037406
+v 0.030814 0.025957 0.036636
+v 0.030578 0.027146 0.035637
+v 0.031720 0.023029 0.037881
+v 0.031720 0.024599 0.037405
+v 0.031720 0.026046 0.036634
+v 0.031719 0.027259 0.035637
+v 0.032038 0.022998 0.037880
+v 0.032344 0.024537 0.037405
+v 0.032625 0.025956 0.036633
+v 0.032861 0.027145 0.035635
+v 0.032344 0.022905 0.037880
+v 0.032942 0.024354 0.037403
+v 0.033495 0.025691 0.036632
+v 0.033958 0.026811 0.035634
+v 0.032625 0.022754 0.037879
+v 0.033495 0.024058 0.037403
+v 0.034297 0.025261 0.036631
+v 0.034969 0.026269 0.035633
+v 0.032871 0.022550 0.037879
+v 0.033978 0.023659 0.037402
+v 0.035000 0.024682 0.036630
+v 0.035857 0.025540 0.035633
+v 0.033074 0.022303 0.037878
+v 0.034376 0.023174 0.037401
+v 0.035576 0.023978 0.036629
+v 0.036583 0.024651 0.035631
+v 0.033225 0.022021 0.037877
+v 0.034671 0.022621 0.037400
+v 0.036005 0.023174 0.036627
+v 0.037124 0.023637 0.035629
+v 0.033317 0.021714 0.037876
+v 0.034853 0.022020 0.037399
+v 0.036269 0.022302 0.036627
+v 0.037456 0.022538 0.035628
+v 0.033349 0.021396 0.037876
+v 0.034914 0.021396 0.037398
+v 0.036358 0.021395 0.036626
+v 0.037570 0.021394 0.035629
+v 0.033317 0.021079 0.037876
+v 0.034853 0.020771 0.037398
+v 0.036269 0.020488 0.036625
+v 0.037456 0.020251 0.035627
+v 0.033225 0.020772 0.037876
+v 0.034671 0.020171 0.037398
+v 0.036005 0.019617 0.036626
+v 0.037125 0.019152 0.035628
+v 0.033075 0.020490 0.037876
+v 0.034377 0.019618 0.037399
+v 0.035578 0.018813 0.036627
+v 0.036585 0.018139 0.035629
+v 0.032872 0.020243 0.037877
+v 0.033980 0.019133 0.037400
+v 0.035002 0.018109 0.036628
+v 0.035860 0.017249 0.035632
+v 0.032626 0.020040 0.037877
+v 0.033497 0.018735 0.037401
+v 0.034300 0.017531 0.036629
+v 0.034973 0.016521 0.035631
+v 0.032345 0.019889 0.037878
+v 0.032944 0.018439 0.037401
+v 0.033498 0.017101 0.036630
+v 0.033962 0.015980 0.035633
+v 0.032039 0.019796 0.037879
+v 0.032346 0.018256 0.037403
+v 0.032628 0.016836 0.036632
+v 0.032865 0.015647 0.035633
+v 0.031722 0.019765 0.037879
+v 0.031722 0.018195 0.037403
+v 0.031723 0.016747 0.036632
+v 0.031724 0.015534 0.035634
+v 0.031404 0.019796 0.037880
+v 0.031099 0.018256 0.037404
+v 0.030818 0.016837 0.036633
+v 0.030582 0.015647 0.035635
+v 0.031098 0.019889 0.037880
+v 0.030500 0.018439 0.037404
+v 0.029948 0.017102 0.036633
+v 0.029486 0.015981 0.035635
+v 0.030816 0.020040 0.037880
+v 0.029947 0.018735 0.037405
+v 0.029145 0.017532 0.036634
+v 0.028474 0.016523 0.035635
+v 0.030570 0.020244 0.037880
+v 0.029463 0.019134 0.037404
+v 0.028442 0.018110 0.036634
+v 0.027586 0.017252 0.035637
+v 0.030367 0.020491 0.037881
+v 0.029065 0.019619 0.037406
+v 0.027865 0.018815 0.036634
+v 0.026858 0.018142 0.035636
+v 0.031721 0.021397 0.038040
+v 0.030216 0.020773 0.037881
+v 0.028769 0.020173 0.037405
+v 0.027436 0.019619 0.036635
+v 0.026317 0.019156 0.035638
+v 0.030124 0.021080 0.037881
+v 0.028587 0.020773 0.037406
+v 0.027170 0.020491 0.036636
+v 0.025983 0.020255 0.035638
+v 0.024688 0.021399 0.034784
+v 0.022863 0.021400 0.033329
+v 0.021306 0.021400 0.031428
+v 0.020149 0.021401 0.029258
+v 0.019438 0.021401 0.026903
+v 0.019199 0.021401 0.024455
+v 0.019440 0.021400 0.022004
+v 0.020152 0.021399 0.019649
+v 0.021310 0.021399 0.017477
+v 0.022868 0.021398 0.015574
+v 0.024766 0.021397 0.014012
+v 0.026932 0.021396 0.012852
+v 0.029281 0.021395 0.012138
+v 0.024823 0.022774 0.034783
+v 0.023032 0.023131 0.033330
+v 0.021505 0.023437 0.031429
+v 0.020370 0.023663 0.029259
+v 0.019673 0.023802 0.026903
+v 0.019438 0.023849 0.024454
+v 0.019674 0.023802 0.022004
+v 0.020374 0.023661 0.019649
+v 0.021510 0.023435 0.017477
+v 0.023038 0.023129 0.015574
+v 0.024900 0.022758 0.014012
+v 0.027023 0.022333 0.012852
+v 0.029328 0.021873 0.012138
+v 0.025223 0.024096 0.034783
+v 0.023535 0.024797 0.033330
+v 0.022097 0.025395 0.031429
+v 0.021029 0.025838 0.029258
+v 0.020371 0.026112 0.026903
+v 0.020149 0.026204 0.024453
+v 0.020372 0.026111 0.022004
+v 0.021032 0.025837 0.019649
+v 0.022102 0.025392 0.017477
+v 0.023541 0.024794 0.015574
+v 0.025295 0.024065 0.014012
+v 0.027296 0.023234 0.012852
+v 0.029467 0.022332 0.012138
+v 0.025872 0.025315 0.034784
+v 0.024353 0.026332 0.033331
+v 0.023059 0.027199 0.031429
+v 0.022098 0.027843 0.029257
+v 0.021506 0.028239 0.026903
+v 0.021306 0.028373 0.024453
+v 0.021507 0.028238 0.022004
+v 0.022101 0.027841 0.019648
+v 0.023063 0.027196 0.017477
+v 0.024358 0.026328 0.015574
+v 0.025937 0.025271 0.014012
+v 0.027738 0.024064 0.012852
+v 0.029692 0.022756 0.012138
+v 0.026746 0.026382 0.034784
+v 0.025455 0.027676 0.033329
+v 0.024354 0.028780 0.031428
+v 0.023536 0.029599 0.029257
+v 0.023033 0.030104 0.026902
+v 0.022864 0.030274 0.024453
+v 0.023035 0.030103 0.022003
+v 0.023539 0.029598 0.019648
+v 0.024357 0.028777 0.017477
+v 0.025460 0.027673 0.015574
+v 0.026802 0.026327 0.014012
+v 0.028334 0.024792 0.012852
+v 0.029997 0.023126 0.012138
+v 0.027812 0.027257 0.034783
+v 0.026798 0.028779 0.033328
+v 0.025932 0.030076 0.031427
+v 0.025290 0.031041 0.029256
+v 0.024895 0.031634 0.026902
+v 0.024761 0.031835 0.024453
+v 0.024896 0.031634 0.022003
+v 0.025292 0.031039 0.019648
+v 0.025935 0.030074 0.017477
+v 0.026801 0.028776 0.015574
+v 0.027856 0.027194 0.014012
+v 0.029061 0.025389 0.012852
+v 0.030366 0.023431 0.012138
+v 0.029028 0.027907 0.034781
+v 0.028329 0.029598 0.033327
+v 0.027733 0.031040 0.031426
+v 0.027291 0.032112 0.029256
+v 0.027018 0.032772 0.026902
+v 0.026927 0.032994 0.024453
+v 0.027018 0.032771 0.022003
+v 0.027292 0.032110 0.019648
+v 0.027735 0.031038 0.017477
+v 0.028332 0.029595 0.015574
+v 0.029060 0.027838 0.014012
+v 0.029889 0.025832 0.012852
+v 0.030789 0.023656 0.012138
+v 0.030347 0.028307 0.034780
+v 0.029992 0.030102 0.033326
+v 0.029687 0.031633 0.031426
+v 0.029462 0.032771 0.029255
+v 0.029323 0.033471 0.026902
+v 0.029276 0.033707 0.024453
+v 0.029324 0.033471 0.022004
+v 0.029463 0.032770 0.019648
+v 0.029689 0.031631 0.017477
+v 0.029994 0.030100 0.015574
+v 0.030364 0.028234 0.014012
+v 0.030788 0.026106 0.012852
+v 0.031247 0.023796 0.012138
+v 0.031719 0.028442 0.034780
+v 0.031719 0.030272 0.033326
+v 0.031719 0.031833 0.031425
+v 0.031719 0.032993 0.029255
+v 0.031719 0.033706 0.026901
+v 0.031720 0.033949 0.024454
+v 0.031720 0.033706 0.022003
+v 0.031720 0.032992 0.019648
+v 0.031721 0.031831 0.017477
+v 0.031721 0.030270 0.015574
+v 0.031722 0.028368 0.014012
+v 0.031723 0.026198 0.012852
+v 0.031724 0.023843 0.012138
+v 0.033091 0.028306 0.034779
+v 0.033447 0.030100 0.033325
+v 0.033751 0.031631 0.031424
+v 0.033976 0.032769 0.029254
+v 0.034116 0.033470 0.026901
+v 0.034164 0.033705 0.024452
+v 0.034117 0.033469 0.022003
+v 0.033978 0.032768 0.019648
+v 0.033753 0.031630 0.017476
+v 0.033449 0.030098 0.015574
+v 0.033080 0.028233 0.014012
+v 0.032658 0.026105 0.012851
+v 0.032201 0.023795 0.012138
+v 0.034410 0.027904 0.034778
+v 0.035109 0.029595 0.033324
+v 0.035706 0.031036 0.031423
+v 0.036148 0.032108 0.029254
+v 0.036422 0.032768 0.026900
+v 0.036514 0.032990 0.024451
+v 0.036422 0.032768 0.022002
+v 0.036149 0.032107 0.019647
+v 0.035707 0.031035 0.017476
+v 0.035111 0.029593 0.015574
+v 0.034385 0.027836 0.014012
+v 0.033558 0.025831 0.012851
+v 0.032659 0.023656 0.012138
+v 0.035627 0.027253 0.034778
+v 0.036641 0.028774 0.033324
+v 0.037507 0.030071 0.031423
+v 0.038149 0.031035 0.029253
+v 0.038545 0.031629 0.026899
+v 0.038680 0.031829 0.024450
+v 0.038545 0.031629 0.022002
+v 0.038150 0.031034 0.019647
+v 0.037508 0.030070 0.017476
+v 0.036643 0.028772 0.015573
+v 0.035589 0.027191 0.014012
+v 0.034386 0.025387 0.012851
+v 0.033082 0.023430 0.012138
+v 0.036693 0.026377 0.034777
+v 0.037984 0.027670 0.033323
+v 0.039085 0.028773 0.031423
+v 0.039904 0.029593 0.029252
+v 0.040407 0.030097 0.026898
+v 0.040578 0.030267 0.024450
+v 0.040408 0.030096 0.022001
+v 0.039904 0.029592 0.019647
+v 0.039086 0.028772 0.017475
+v 0.037985 0.027668 0.015573
+v 0.036644 0.026324 0.014011
+v 0.035113 0.024790 0.012851
+v 0.033452 0.023125 0.012138
+v 0.037567 0.025309 0.034776
+v 0.039086 0.026325 0.033323
+v 0.040381 0.027191 0.031422
+v 0.041344 0.027835 0.029252
+v 0.041937 0.028231 0.026898
+v 0.042137 0.028365 0.024450
+v 0.041937 0.028230 0.022001
+v 0.041344 0.027834 0.019646
+v 0.040382 0.027190 0.017475
+v 0.039087 0.026323 0.015573
+v 0.037509 0.025267 0.014011
+v 0.035709 0.024061 0.012851
+v 0.033756 0.022754 0.012138
+v 0.038216 0.024090 0.034774
+v 0.039905 0.024790 0.033321
+v 0.041345 0.025387 0.031422
+v 0.042415 0.025830 0.029253
+v 0.043074 0.026103 0.026899
+v 0.043296 0.026194 0.024450
+v 0.043073 0.026103 0.022001
+v 0.042414 0.025829 0.019646
+v 0.041345 0.025385 0.017475
+v 0.039906 0.024788 0.015573
+v 0.038153 0.024060 0.014011
+v 0.036152 0.023231 0.012851
+v 0.033982 0.022331 0.012138
+v 0.038616 0.022768 0.034773
+v 0.040410 0.023124 0.033322
+v 0.041939 0.023428 0.031422
+v 0.043076 0.023654 0.029254
+v 0.043775 0.023793 0.026899
+v 0.044010 0.023840 0.024451
+v 0.043774 0.023792 0.022001
+v 0.043074 0.023653 0.019646
+v 0.041938 0.023427 0.017475
+v 0.040410 0.023122 0.015573
+v 0.038549 0.022753 0.014011
+v 0.036426 0.022330 0.012851
+v 0.034121 0.021871 0.012138
+v 0.038753 0.021393 0.034774
+v 0.040582 0.021393 0.033322
+v 0.042142 0.021392 0.031423
+v 0.043300 0.021391 0.029254
+v 0.044012 0.021391 0.026899
+v 0.044253 0.021392 0.024452
+v 0.044010 0.021391 0.022002
+v 0.043297 0.021391 0.019646
+v 0.042139 0.021391 0.017475
+v 0.040581 0.021391 0.015573
+v 0.038683 0.021392 0.014011
+v 0.036518 0.021392 0.012851
+v 0.034168 0.021393 0.012137
+v 0.038618 0.020019 0.034773
+v 0.040412 0.019661 0.033322
+v 0.041942 0.019355 0.031423
+v 0.043079 0.019129 0.029254
+v 0.043777 0.018990 0.026899
+v 0.044012 0.018942 0.024450
+v 0.043776 0.018990 0.022001
+v 0.043076 0.019129 0.019646
+v 0.041940 0.019355 0.017475
+v 0.040411 0.019660 0.015573
+v 0.038550 0.020031 0.014011
+v 0.036427 0.020455 0.012851
+v 0.034121 0.020916 0.012137
+v 0.038218 0.018697 0.034773
+v 0.039908 0.017996 0.033322
+v 0.041349 0.017398 0.031422
+v 0.042420 0.016954 0.029254
+v 0.043078 0.016680 0.026899
+v 0.043300 0.016587 0.024449
+v 0.043077 0.016679 0.022001
+v 0.042418 0.016953 0.019646
+v 0.041348 0.017397 0.017474
+v 0.039908 0.017995 0.015572
+v 0.038155 0.018724 0.014011
+v 0.036153 0.019554 0.012851
+v 0.033982 0.020456 0.012137
+v 0.037571 0.017479 0.034775
+v 0.039091 0.016461 0.033323
+v 0.040386 0.015594 0.031422
+v 0.041349 0.014949 0.029253
+v 0.041943 0.014552 0.026898
+v 0.042142 0.014418 0.024449
+v 0.041942 0.014552 0.022000
+v 0.041349 0.014949 0.019645
+v 0.040386 0.015593 0.017474
+v 0.039091 0.016460 0.015572
+v 0.037512 0.017518 0.014011
+v 0.035711 0.018724 0.012851
+v 0.033757 0.020033 0.012137
+v 0.036697 0.016412 0.034776
+v 0.037989 0.015117 0.033322
+v 0.039091 0.014013 0.031421
+v 0.039910 0.013193 0.029252
+v 0.040414 0.012688 0.026897
+v 0.040585 0.012517 0.024449
+v 0.040414 0.012687 0.022000
+v 0.039910 0.013192 0.019645
+v 0.039091 0.014012 0.017474
+v 0.037990 0.015116 0.015572
+v 0.036647 0.016461 0.014011
+v 0.035116 0.017996 0.012851
+v 0.033453 0.019663 0.012137
+v 0.035631 0.015536 0.034776
+v 0.036647 0.014014 0.033322
+v 0.037513 0.012716 0.031421
+v 0.038157 0.011751 0.029252
+v 0.038553 0.011157 0.026897
+v 0.038687 0.010956 0.024449
+v 0.038553 0.011157 0.022000
+v 0.038157 0.011751 0.019645
+v 0.037514 0.012715 0.017474
+v 0.036648 0.014013 0.015572
+v 0.035593 0.015595 0.014011
+v 0.034389 0.017399 0.012851
+v 0.033083 0.019358 0.012137
+v 0.031725 0.021394 0.011896
+v 0.034415 0.014886 0.034775
+v 0.035116 0.013195 0.033322
+v 0.035713 0.011752 0.031421
+v 0.036156 0.010681 0.029253
+v 0.036430 0.010020 0.026898
+v 0.036522 0.009797 0.024449
+v 0.036430 0.010020 0.022000
+v 0.036156 0.010680 0.019646
+v 0.035714 0.011751 0.017474
+v 0.035117 0.013193 0.015572
+v 0.034390 0.014951 0.014011
+v 0.033561 0.016956 0.012851
+v 0.032661 0.019132 0.012137
+v 0.033097 0.014485 0.034776
+v 0.033453 0.012691 0.033323
+v 0.033759 0.011159 0.031422
+v 0.033985 0.010021 0.029253
+v 0.034125 0.009321 0.026898
+v 0.034172 0.009085 0.024450
+v 0.034125 0.009320 0.022001
+v 0.033986 0.010020 0.019646
+v 0.033760 0.011158 0.017475
+v 0.033455 0.012689 0.015573
+v 0.033085 0.014555 0.014011
+v 0.032662 0.016682 0.012851
+v 0.032202 0.018993 0.012137
+v 0.031725 0.014350 0.034778
+v 0.031726 0.012521 0.033323
+v 0.031727 0.010959 0.031422
+v 0.031728 0.009799 0.029253
+v 0.031728 0.009086 0.026898
+v 0.031729 0.008845 0.024452
+v 0.031729 0.009085 0.022001
+v 0.031729 0.009798 0.019646
+v 0.031728 0.010958 0.017475
+v 0.031728 0.012519 0.015573
+v 0.031727 0.014421 0.014011
+v 0.031726 0.016591 0.012851
+v 0.031726 0.018946 0.012138
+v 0.030353 0.014486 0.034778
+v 0.029999 0.012692 0.033323
+v 0.029695 0.011161 0.031423
+v 0.029471 0.010023 0.029253
+v 0.029332 0.009323 0.026899
+v 0.029285 0.009087 0.024450
+v 0.029332 0.009322 0.022001
+v 0.029472 0.010022 0.019646
+v 0.029696 0.011160 0.017475
+v 0.030000 0.012691 0.015573
+v 0.030369 0.014556 0.014011
+v 0.030791 0.016683 0.012851
+v 0.031249 0.018993 0.012137
+v 0.029034 0.014888 0.034778
+v 0.028336 0.013198 0.033324
+v 0.027741 0.011756 0.031423
+v 0.027299 0.010684 0.029254
+v 0.027026 0.010024 0.026899
+v 0.026936 0.009801 0.024450
+v 0.027027 0.010023 0.022001
+v 0.027300 0.010683 0.019646
+v 0.027742 0.011754 0.017475
+v 0.028338 0.013196 0.015573
+v 0.029064 0.014953 0.014011
+v 0.029892 0.016957 0.012851
+v 0.030790 0.019133 0.012138
+v 0.027818 0.015539 0.034779
+v 0.026804 0.014018 0.033325
+v 0.025940 0.012721 0.031424
+v 0.025298 0.011756 0.029254
+v 0.024903 0.011162 0.026899
+v 0.024769 0.010961 0.024450
+v 0.024904 0.011162 0.022001
+v 0.025299 0.011755 0.019646
+v 0.025941 0.012719 0.017475
+v 0.026806 0.014016 0.015573
+v 0.027860 0.015597 0.014011
+v 0.029063 0.017401 0.012851
+v 0.030368 0.019359 0.012138
+v 0.026751 0.016415 0.034780
+v 0.025462 0.015122 0.033326
+v 0.024361 0.014019 0.031424
+v 0.023544 0.013199 0.029255
+v 0.023040 0.012694 0.026899
+v 0.022871 0.012524 0.024451
+v 0.023041 0.012694 0.022002
+v 0.023545 0.013198 0.019647
+v 0.024363 0.014017 0.017476
+v 0.025464 0.015121 0.015573
+v 0.026806 0.016465 0.014011
+v 0.028336 0.017999 0.012851
+v 0.029998 0.019664 0.012138
+v 0.025876 0.017484 0.034780
+v 0.024359 0.016467 0.033326
+v 0.023065 0.015601 0.031424
+v 0.022104 0.014957 0.029255
+v 0.021511 0.014560 0.026900
+v 0.021312 0.014426 0.024451
+v 0.021512 0.014560 0.022002
+v 0.022105 0.014956 0.019647
+v 0.023068 0.015599 0.017476
+v 0.024362 0.016466 0.015574
+v 0.025940 0.017522 0.014012
+v 0.027740 0.018727 0.012851
+v 0.029693 0.020034 0.012138
+v 0.025227 0.018702 0.034781
+v 0.023540 0.018002 0.033327
+v 0.022102 0.017405 0.031425
+v 0.021033 0.016962 0.029256
+v 0.020375 0.016688 0.026901
+v 0.020153 0.016597 0.024452
+v 0.020376 0.016688 0.022003
+v 0.021035 0.016961 0.019648
+v 0.022105 0.017404 0.017476
+v 0.023544 0.018001 0.015574
+v 0.025297 0.018728 0.014012
+v 0.027297 0.019557 0.012852
+v 0.029468 0.020458 0.012138
+v 0.024825 0.020024 0.034782
+v 0.023035 0.019669 0.033328
+v 0.021508 0.019364 0.031426
+v 0.020373 0.019138 0.029257
+v 0.019675 0.018999 0.026902
+v 0.019441 0.018952 0.024453
+v 0.019676 0.018999 0.022004
+v 0.020375 0.019137 0.019648
+v 0.021511 0.019362 0.017476
+v 0.023039 0.019667 0.015574
+v 0.024901 0.020036 0.014012
+v 0.027023 0.020459 0.012852
+v 0.029329 0.020917 0.012138
+v -0.033078 0.021492 0.036800
+v -0.034644 0.021493 0.036322
+v -0.036087 0.021493 0.035549
+v -0.037298 0.021493 0.034551
+v -0.033046 0.021810 0.036800
+v -0.034583 0.022118 0.036323
+v -0.035999 0.022400 0.035550
+v -0.037186 0.022637 0.034551
+v -0.032954 0.022117 0.036801
+v -0.034401 0.022718 0.036324
+v -0.035735 0.023272 0.035551
+v -0.036854 0.023736 0.034552
+v -0.032804 0.022399 0.036801
+v -0.034106 0.023271 0.036325
+v -0.035307 0.024075 0.035552
+v -0.036315 0.024749 0.034554
+v -0.032602 0.022646 0.036802
+v -0.033709 0.023756 0.036325
+v -0.034731 0.024780 0.035553
+v -0.035589 0.025638 0.034556
+v -0.032355 0.022849 0.036802
+v -0.033226 0.024154 0.036326
+v -0.034029 0.025357 0.035554
+v -0.034702 0.026366 0.034556
+v -0.032074 0.023000 0.036803
+v -0.032673 0.024450 0.036326
+v -0.033227 0.025787 0.035555
+v -0.033690 0.026908 0.034557
+v -0.031768 0.023093 0.036803
+v -0.032075 0.024632 0.036328
+v -0.032357 0.026052 0.035556
+v -0.032594 0.027241 0.034558
+v -0.031450 0.023124 0.036803
+v -0.031451 0.024694 0.036327
+v -0.031452 0.026141 0.035556
+v -0.031452 0.027354 0.034559
+v -0.031133 0.023093 0.036804
+v -0.030827 0.024632 0.036329
+v -0.030546 0.026051 0.035558
+v -0.030310 0.027241 0.034559
+v -0.030826 0.023000 0.036804
+v -0.030228 0.024450 0.036328
+v -0.029676 0.025786 0.035557
+v -0.029213 0.026907 0.034560
+v -0.030545 0.022849 0.036804
+v -0.029675 0.024154 0.036329
+v -0.028873 0.025357 0.035558
+v -0.028201 0.026365 0.034560
+v -0.030299 0.022646 0.036804
+v -0.029192 0.023755 0.036328
+v -0.028170 0.024778 0.035558
+v -0.027313 0.025637 0.034562
+v -0.030095 0.022399 0.036804
+v -0.028794 0.023270 0.036329
+v -0.027593 0.024074 0.035558
+v -0.026586 0.024747 0.034560
+v -0.029945 0.022117 0.036804
+v -0.028498 0.022716 0.036328
+v -0.027165 0.023270 0.035557
+v -0.026045 0.023733 0.034560
+v -0.029852 0.021810 0.036803
+v -0.028316 0.022116 0.036328
+v -0.026900 0.022398 0.035557
+v -0.025713 0.022634 0.034559
+v -0.029821 0.021491 0.036804
+v -0.028255 0.021491 0.036328
+v -0.026811 0.021490 0.035557
+v -0.025599 0.021490 0.034560
+v -0.029852 0.021173 0.036803
+v -0.028316 0.020866 0.036328
+v -0.026900 0.020583 0.035557
+v -0.025712 0.020346 0.034559
+v -0.029945 0.020866 0.036803
+v -0.028498 0.020266 0.036327
+v -0.027164 0.019711 0.035556
+v -0.026045 0.019247 0.034559
+v -0.030095 0.020584 0.036803
+v -0.028793 0.019712 0.036328
+v -0.027592 0.018907 0.035556
+v -0.026585 0.018233 0.034558
+v -0.030298 0.020338 0.036803
+v -0.029191 0.019227 0.036327
+v -0.028169 0.018203 0.035556
+v -0.027312 0.017344 0.034559
+v -0.030545 0.020134 0.036803
+v -0.029674 0.018829 0.036328
+v -0.028872 0.017625 0.035556
+v -0.028200 0.016616 0.034558
+v -0.030826 0.019983 0.036803
+v -0.030227 0.018533 0.036327
+v -0.029674 0.017196 0.035556
+v -0.029211 0.016074 0.034557
+v -0.031132 0.019890 0.036803
+v -0.030826 0.018350 0.036327
+v -0.030544 0.016931 0.035556
+v -0.030308 0.015741 0.034557
+v -0.031450 0.019859 0.036802
+v -0.031449 0.018290 0.036326
+v -0.031449 0.016842 0.035555
+v -0.031449 0.015628 0.034557
+v -0.031767 0.019891 0.036802
+v -0.032073 0.018351 0.036326
+v -0.032354 0.016932 0.035555
+v -0.032591 0.015742 0.034556
+v -0.032073 0.019984 0.036802
+v -0.032672 0.018534 0.036325
+v -0.033224 0.017197 0.035553
+v -0.033687 0.016076 0.034556
+v -0.032354 0.020135 0.036801
+v -0.033224 0.018830 0.036324
+v -0.034026 0.017627 0.035552
+v -0.034698 0.016618 0.034554
+v -0.032601 0.020339 0.036801
+v -0.033707 0.019229 0.036323
+v -0.034729 0.018206 0.035551
+v -0.035586 0.017347 0.034554
+v -0.032804 0.020586 0.036800
+v -0.034105 0.019714 0.036322
+v -0.035305 0.018911 0.035550
+v -0.036312 0.018237 0.034552
+v -0.031450 0.021492 0.036963
+v -0.032954 0.020868 0.036800
+v -0.034400 0.020268 0.036322
+v -0.035733 0.019715 0.035549
+v -0.036852 0.019251 0.034550
+v -0.033046 0.021175 0.036800
+v -0.034582 0.020868 0.036322
+v -0.035997 0.020586 0.035548
+v -0.037184 0.020350 0.034549
+v -0.038481 0.021494 0.033695
+v -0.040309 0.021494 0.032242
+v -0.041870 0.021494 0.030344
+v -0.043030 0.021494 0.028176
+v -0.043741 0.021495 0.025821
+v -0.043981 0.021495 0.023374
+v -0.043739 0.021494 0.020923
+v -0.043026 0.021493 0.018568
+v -0.041868 0.021493 0.016397
+v -0.040310 0.021492 0.014494
+v -0.038412 0.021491 0.012933
+v -0.036246 0.021490 0.011772
+v -0.033897 0.021490 0.011059
+v -0.038346 0.022868 0.033695
+v -0.040139 0.023225 0.032243
+v -0.041669 0.023531 0.030344
+v -0.042807 0.023756 0.028176
+v -0.043506 0.023896 0.025821
+v -0.043740 0.023943 0.023373
+v -0.043504 0.023896 0.020923
+v -0.042805 0.023755 0.018568
+v -0.041668 0.023529 0.016397
+v -0.040140 0.023223 0.014495
+v -0.038278 0.022852 0.012933
+v -0.036155 0.022428 0.011772
+v -0.033850 0.021967 0.011059
+v -0.037946 0.024190 0.033696
+v -0.039636 0.024891 0.032244
+v -0.041077 0.025489 0.030345
+v -0.042148 0.025932 0.028175
+v -0.042806 0.026206 0.025821
+v -0.043029 0.026298 0.023372
+v -0.042805 0.026205 0.020923
+v -0.042146 0.025931 0.018568
+v -0.041076 0.025487 0.016397
+v -0.039637 0.024889 0.014495
+v -0.037883 0.024159 0.012933
+v -0.035882 0.023329 0.011773
+v -0.033711 0.022427 0.011059
+v -0.037299 0.025408 0.033699
+v -0.038819 0.026425 0.032246
+v -0.040114 0.027293 0.030345
+v -0.041077 0.027937 0.028175
+v -0.041671 0.028333 0.025820
+v -0.041871 0.028467 0.023372
+v -0.041670 0.028332 0.020923
+v -0.041077 0.027935 0.018568
+v -0.040114 0.027290 0.016397
+v -0.038819 0.026423 0.014495
+v -0.037241 0.025365 0.012933
+v -0.035440 0.024158 0.011773
+v -0.033486 0.022850 0.011059
+v -0.036425 0.026476 0.033700
+v -0.037717 0.027770 0.032246
+v -0.038819 0.028874 0.030345
+v -0.039638 0.029694 0.028175
+v -0.040142 0.030198 0.025821
+v -0.040313 0.030368 0.023372
+v -0.040142 0.030197 0.020923
+v -0.039638 0.029692 0.018568
+v -0.038820 0.028872 0.016397
+v -0.037718 0.027767 0.014495
+v -0.036376 0.026422 0.012933
+v -0.034844 0.024887 0.011773
+v -0.033181 0.023220 0.011059
+v -0.035360 0.027351 0.033701
+v -0.036375 0.028873 0.032247
+v -0.037241 0.030171 0.030346
+v -0.037885 0.031135 0.028175
+v -0.038280 0.031729 0.025821
+v -0.038415 0.031929 0.023372
+v -0.038281 0.031728 0.020923
+v -0.037885 0.031134 0.018569
+v -0.037242 0.030169 0.016397
+v -0.036376 0.028871 0.014495
+v -0.035321 0.027289 0.012933
+v -0.034117 0.025484 0.011773
+v -0.032812 0.023525 0.011059
+v -0.034144 0.028001 0.033700
+v -0.034844 0.029692 0.032247
+v -0.035441 0.031134 0.030346
+v -0.035884 0.032206 0.028176
+v -0.036158 0.032866 0.025822
+v -0.036249 0.033089 0.023373
+v -0.036158 0.032866 0.020924
+v -0.035884 0.032205 0.018569
+v -0.035442 0.031132 0.016397
+v -0.034845 0.029690 0.014495
+v -0.034118 0.027932 0.012933
+v -0.033289 0.025927 0.011773
+v -0.032389 0.023751 0.011059
+v -0.032825 0.028402 0.033701
+v -0.033181 0.030197 0.032247
+v -0.033487 0.031728 0.030346
+v -0.033712 0.032866 0.028176
+v -0.033852 0.033566 0.025822
+v -0.033900 0.033802 0.023374
+v -0.033853 0.033566 0.020925
+v -0.033713 0.032865 0.018569
+v -0.033488 0.031726 0.016398
+v -0.033183 0.030194 0.014496
+v -0.032813 0.028328 0.012934
+v -0.032390 0.026201 0.011773
+v -0.031931 0.023890 0.011059
+v -0.031453 0.028537 0.033703
+v -0.031454 0.030367 0.032248
+v -0.031454 0.031928 0.030347
+v -0.031455 0.033088 0.028177
+v -0.031455 0.033801 0.025823
+v -0.031456 0.034044 0.023376
+v -0.031456 0.033801 0.020925
+v -0.031456 0.033087 0.018570
+v -0.031456 0.031926 0.016398
+v -0.031456 0.030364 0.014496
+v -0.031455 0.028462 0.012934
+v -0.031455 0.026293 0.011773
+v -0.031454 0.023937 0.011059
+v -0.030080 0.028402 0.033703
+v -0.029726 0.030196 0.032248
+v -0.029422 0.031727 0.030348
+v -0.029198 0.032865 0.028177
+v -0.029059 0.033565 0.025823
+v -0.029012 0.033800 0.023375
+v -0.029060 0.033564 0.020925
+v -0.029199 0.032863 0.018570
+v -0.029424 0.031725 0.016398
+v -0.029728 0.030193 0.014496
+v -0.030097 0.028327 0.012934
+v -0.030519 0.026200 0.011773
+v -0.030977 0.023890 0.011059
+v -0.028761 0.028000 0.033703
+v -0.028063 0.029691 0.032249
+v -0.027467 0.031133 0.030349
+v -0.027026 0.032204 0.028178
+v -0.026753 0.032864 0.025824
+v -0.026662 0.033086 0.023374
+v -0.026754 0.032863 0.020925
+v -0.027027 0.032202 0.018570
+v -0.027470 0.031130 0.016398
+v -0.028066 0.029688 0.014496
+v -0.028792 0.027931 0.012934
+v -0.029620 0.025926 0.011773
+v -0.030519 0.023750 0.011059
+v -0.027544 0.027350 0.033705
+v -0.026530 0.028871 0.032251
+v -0.025665 0.030168 0.030349
+v -0.025023 0.031132 0.028178
+v -0.024629 0.031725 0.025824
+v -0.024496 0.031925 0.023374
+v -0.024631 0.031724 0.020925
+v -0.025026 0.031130 0.018570
+v -0.025669 0.030165 0.016399
+v -0.026534 0.028867 0.014496
+v -0.027588 0.027286 0.012934
+v -0.028792 0.025482 0.011773
+v -0.030096 0.023525 0.011059
+v -0.026478 0.026474 0.033706
+v -0.025187 0.027767 0.032252
+v -0.024086 0.028870 0.030350
+v -0.023269 0.029689 0.028179
+v -0.022767 0.030193 0.025824
+v -0.022597 0.030363 0.023375
+v -0.022768 0.030192 0.020925
+v -0.023272 0.029687 0.018570
+v -0.024090 0.028867 0.016399
+v -0.025192 0.027763 0.014496
+v -0.026533 0.026419 0.012934
+v -0.028064 0.024885 0.011773
+v -0.029726 0.023219 0.011059
+v -0.025602 0.025406 0.033706
+v -0.024084 0.026422 0.032253
+v -0.022789 0.027289 0.030351
+v -0.021829 0.027932 0.028180
+v -0.021237 0.028327 0.025825
+v -0.021038 0.028461 0.023375
+v -0.021239 0.028326 0.020925
+v -0.021833 0.027929 0.018570
+v -0.022795 0.027285 0.016399
+v -0.024090 0.026418 0.014496
+v -0.025668 0.025362 0.012934
+v -0.027468 0.024156 0.011773
+v -0.029422 0.022849 0.011059
+v -0.024954 0.024187 0.033704
+v -0.023265 0.024887 0.032252
+v -0.021826 0.025484 0.030351
+v -0.020758 0.025926 0.028180
+v -0.020101 0.026199 0.025825
+v -0.019880 0.026290 0.023375
+v -0.020103 0.026198 0.020926
+v -0.020762 0.025924 0.018570
+v -0.021832 0.025480 0.016399
+v -0.023271 0.024884 0.014496
+v -0.025025 0.024155 0.012934
+v -0.027025 0.023326 0.011773
+v -0.029196 0.022425 0.011059
+v -0.024553 0.022865 0.033704
+v -0.022762 0.023221 0.032251
+v -0.021233 0.023525 0.030351
+v -0.020099 0.023750 0.028180
+v -0.019401 0.023889 0.025825
+v -0.019167 0.023936 0.023375
+v -0.019403 0.023888 0.020926
+v -0.020103 0.023748 0.018570
+v -0.021239 0.023522 0.016399
+v -0.022767 0.023217 0.014496
+v -0.024629 0.022848 0.012934
+v -0.026752 0.022425 0.011773
+v -0.029057 0.021966 0.011059
+v -0.024418 0.021490 0.033704
+v -0.022592 0.021489 0.032250
+v -0.021034 0.021488 0.030349
+v -0.019876 0.021488 0.028180
+v -0.019165 0.021488 0.025824
+v -0.018925 0.021488 0.023376
+v -0.019168 0.021487 0.020926
+v -0.019880 0.021486 0.018570
+v -0.021038 0.021486 0.016398
+v -0.022597 0.021486 0.014496
+v -0.024494 0.021487 0.012934
+v -0.026659 0.021487 0.011773
+v -0.029010 0.021488 0.011059
+v -0.024553 0.020114 0.033703
+v -0.022762 0.019758 0.032249
+v -0.021235 0.019452 0.030348
+v -0.020099 0.019226 0.028179
+v -0.019401 0.019086 0.025824
+v -0.019166 0.019038 0.023375
+v -0.019402 0.019085 0.020925
+v -0.020102 0.019224 0.018570
+v -0.021238 0.019450 0.016398
+v -0.022766 0.019755 0.014496
+v -0.024628 0.020126 0.012934
+v -0.026751 0.020550 0.011773
+v -0.029057 0.021010 0.011059
+v -0.024954 0.018793 0.033702
+v -0.023266 0.018092 0.032248
+v -0.021828 0.017494 0.030347
+v -0.020758 0.017050 0.028178
+v -0.020100 0.016775 0.025823
+v -0.019878 0.016683 0.023374
+v -0.020101 0.016775 0.020924
+v -0.020760 0.017049 0.018569
+v -0.021830 0.017492 0.016398
+v -0.023269 0.018090 0.014495
+v -0.025023 0.018818 0.012933
+v -0.027024 0.019649 0.011773
+v -0.029196 0.020551 0.011059
+v -0.025603 0.017574 0.033702
+v -0.024085 0.016557 0.032248
+v -0.022790 0.015690 0.030346
+v -0.021828 0.015045 0.028177
+v -0.021235 0.014648 0.025822
+v -0.021035 0.014513 0.023373
+v -0.021236 0.014648 0.020924
+v -0.021829 0.015044 0.018569
+v -0.022792 0.015688 0.016397
+v -0.024087 0.016555 0.014495
+v -0.025666 0.017612 0.012933
+v -0.027467 0.018819 0.011773
+v -0.029421 0.020127 0.011059
+v -0.026477 0.016507 0.033702
+v -0.025186 0.015213 0.032247
+v -0.024085 0.014108 0.030346
+v -0.023266 0.013288 0.028176
+v -0.022763 0.012783 0.025821
+v -0.022593 0.012612 0.023372
+v -0.022764 0.012783 0.020923
+v -0.023268 0.013287 0.018568
+v -0.024086 0.014107 0.016397
+v -0.025188 0.015211 0.014495
+v -0.026531 0.016556 0.012933
+v -0.028062 0.018091 0.011773
+v -0.029725 0.019757 0.011059
+v -0.027543 0.015631 0.033701
+v -0.026528 0.014109 0.032247
+v -0.025662 0.012811 0.030345
+v -0.025020 0.011847 0.028176
+v -0.024624 0.011252 0.025821
+v -0.024490 0.011051 0.023372
+v -0.024625 0.011252 0.020923
+v -0.025021 0.011846 0.018568
+v -0.025664 0.012810 0.016397
+v -0.026530 0.014108 0.014495
+v -0.027585 0.015689 0.012933
+v -0.028789 0.017494 0.011773
+v -0.030095 0.019452 0.011059
+v -0.031453 0.021489 0.010818
+v -0.028758 0.014981 0.033700
+v -0.028059 0.013290 0.032246
+v -0.027463 0.011847 0.030345
+v -0.027020 0.010775 0.028176
+v -0.026747 0.010115 0.025821
+v -0.026655 0.009892 0.023372
+v -0.026747 0.010115 0.020923
+v -0.027021 0.010775 0.018568
+v -0.027464 0.011846 0.016397
+v -0.028061 0.013288 0.014495
+v -0.028788 0.015045 0.012933
+v -0.029617 0.017051 0.011773
+v -0.030517 0.019227 0.011059
+v -0.030077 0.014580 0.033700
+v -0.029721 0.012786 0.032246
+v -0.029417 0.011254 0.030345
+v -0.029191 0.010116 0.028175
+v -0.029052 0.009415 0.025821
+v -0.029005 0.009179 0.023372
+v -0.029053 0.009415 0.020923
+v -0.029192 0.010115 0.018568
+v -0.029418 0.011253 0.016397
+v -0.029723 0.012784 0.014495
+v -0.030093 0.014649 0.012933
+v -0.030516 0.016777 0.011772
+v -0.030976 0.019087 0.011059
+v -0.031449 0.014445 0.033700
+v -0.031449 0.012615 0.032246
+v -0.031449 0.011054 0.030344
+v -0.031448 0.009894 0.028175
+v -0.031448 0.009180 0.025820
+v -0.031449 0.008939 0.023373
+v -0.031449 0.009180 0.020923
+v -0.031449 0.009893 0.018568
+v -0.031450 0.011053 0.016397
+v -0.031450 0.012614 0.014494
+v -0.031451 0.014515 0.012933
+v -0.031452 0.016686 0.011772
+v -0.031452 0.019040 0.011059
+v -0.032821 0.014581 0.033699
+v -0.033176 0.012787 0.032245
+v -0.033481 0.011255 0.030344
+v -0.033706 0.010117 0.028175
+v -0.033845 0.009417 0.025820
+v -0.033892 0.009181 0.023371
+v -0.033845 0.009416 0.020923
+v -0.033706 0.010116 0.018568
+v -0.033482 0.011254 0.016396
+v -0.033178 0.012785 0.014494
+v -0.032809 0.014650 0.012933
+v -0.032387 0.016778 0.011772
+v -0.031929 0.019087 0.011059
+v -0.034140 0.014983 0.033698
+v -0.034839 0.013292 0.032245
+v -0.035435 0.011850 0.030344
+v -0.035877 0.010778 0.028175
+v -0.036151 0.010118 0.025820
+v -0.036242 0.009895 0.023371
+v -0.036151 0.010117 0.020922
+v -0.035878 0.010777 0.018567
+v -0.035436 0.011849 0.016396
+v -0.034840 0.013290 0.014494
+v -0.034114 0.015047 0.012932
+v -0.033286 0.017052 0.011772
+v -0.032388 0.019227 0.011059
+v -0.035356 0.015633 0.033699
+v -0.036371 0.014112 0.032245
+v -0.037236 0.012815 0.030344
+v -0.037879 0.011850 0.028174
+v -0.038274 0.011256 0.025819
+v -0.038409 0.011055 0.023371
+v -0.038274 0.011256 0.020922
+v -0.037879 0.011850 0.018567
+v -0.037237 0.012814 0.016396
+v -0.036372 0.014111 0.014494
+v -0.035318 0.015692 0.012932
+v -0.034115 0.017496 0.011772
+v -0.032811 0.019453 0.011059
+v -0.036422 0.016510 0.033698
+v -0.037713 0.015216 0.032244
+v -0.038815 0.014112 0.030344
+v -0.039633 0.013293 0.028174
+v -0.040137 0.012788 0.025819
+v -0.040307 0.012617 0.023370
+v -0.040137 0.012788 0.020922
+v -0.039633 0.013292 0.018567
+v -0.038815 0.014112 0.016396
+v -0.037714 0.015215 0.014494
+v -0.036373 0.016559 0.012932
+v -0.034842 0.018093 0.011772
+v -0.033180 0.019758 0.011059
+v -0.037296 0.017578 0.033697
+v -0.038816 0.016561 0.032244
+v -0.040111 0.015694 0.030344
+v -0.041074 0.015050 0.028175
+v -0.041667 0.014654 0.025820
+v -0.041866 0.014520 0.023371
+v -0.041666 0.014654 0.020922
+v -0.041073 0.015050 0.018567
+v -0.040111 0.015694 0.016396
+v -0.038816 0.016560 0.014494
+v -0.037238 0.017616 0.012932
+v -0.035438 0.018822 0.011772
+v -0.033485 0.020129 0.011059
+v -0.037944 0.018797 0.033694
+v -0.039634 0.018096 0.032242
+v -0.041075 0.017499 0.030344
+v -0.042145 0.017056 0.028175
+v -0.042804 0.016782 0.025820
+v -0.043025 0.016691 0.023371
+v -0.042803 0.016782 0.020922
+v -0.042143 0.017055 0.018567
+v -0.041074 0.017498 0.016396
+v -0.039635 0.018095 0.014494
+v -0.037881 0.018823 0.012932
+v -0.035881 0.019652 0.011772
+v -0.033710 0.020552 0.011059
+v -0.038344 0.020119 0.033694
+v -0.040138 0.019763 0.032242
+v -0.041669 0.019457 0.030344
+v -0.042806 0.019232 0.028176
+v -0.043505 0.019093 0.025821
+v -0.043739 0.019045 0.023372
+v -0.043503 0.019092 0.020923
+v -0.042803 0.019231 0.018568
+v -0.041667 0.019456 0.016396
+v -0.040139 0.019761 0.014494
+v -0.038277 0.020130 0.012933
+v -0.036155 0.020553 0.011772
+v -0.033850 0.021012 0.011059
+f 4 2 1
+f 8 6 5
+f 13 15 14
+f 17 19 18
+f 32 30 29
+f 36 34 33
+f 37 39 38
+f 21 23 20
+f 25 27 24
+f 41 43 42
+f 48 46 45
+f 52 50 49
+f 53 55 54
+f 60 58 57
+f 29 62 61
+f 66 64 63
+f 67 6 68
+f 72 70 69
+f 76 74 73
+f 77 79 78
+f 81 83 82
+f 59 86 85
+f 88 90 89
+f 96 94 93
+f 100 98 97
+f 101 103 102
+f 106 34 87
+f 108 115 107
+f 117 36 118
+f 124 126 125
+f 131 129 128
+f 132 134 133
+f 142 140 139
+f 148 60 147
+f 144 127 124
+f 149 151 150
+f 156 154 153
+f 159 158 157
+f 116 164 105
+f 171 169 168
+f 174 173 172
+f 179 181 180
+f 186 188 187
+f 194 196 195
+f 199 139 198
+f 206 208 207
+f 213 211 210
+f 202 214 201
+f 96 216 54
+f 92 43 44
+f 97 246 100
+f 250 229 228
+f 252 58 253
+f 258 256 255
+f 260 229 259
+f 110 112 113
+f 114 263 109
+f 265 267 266
+f 56 270 269
+f 274 249 250
+f 185 63 64
+f 280 278 276
+f 285 283 157
+f 288 211 212
+f 141 92 46
+f 289 175 176
+f 148 105 106
+f 312 170 171
+f 238 42 43
+f 318 160 317
+f 310 308 307
+f 241 325 324
+f 160 240 317
+f 47 307 308
+f 108 315 164
+f 345 86 119
+f 348 350 349
+f 356 358 357
+f 355 353 352
+f 363 32 362
+f 149 150 176
+f 364 253 365
+f 366 368 367
+f 183 371 63
+f 163 161 160
+f 209 101 208
+f 378 165 166
+f 379 381 380
+f 383 3 4
+f 284 386 385
+f 387 389 388
+f 391 229 249
+f 73 397 396
+f 405 407 406
+f 203 411 410
+f 414 413 412
+f 1 357 4
+f 277 131 275
+f 254 57 58
+f 358 415 351
+f 417 386 408
+f 401 192 420
+f 419 127 143
+f 421 182 422
+f 103 302 409
+f 428 368 174
+f 380 359 356
+f 62 266 323
+f 140 431 430
+f 357 351 348
+f 356 379 380
+f 415 403 434
+f 437 436 435
+f 438 440 439
+f 188 450 449
+f 452 454 453
+f 265 181 421
+f 224 64 225
+f 237 213 427
+f 275 128 458
+f 281 460 459
+f 50 79 461
+f 73 282 281
+f 468 466 465
+f 390 352 359
+f 482 484 483
+f 489 487 486
+f 493 495 494
+f 472 470 469
+f 502 500 499
+f 220 222 223
+f 512 143 66
+f 518 511 80
+f 513 230 286
+f 205 31 244
+f 189 451 188
+f 163 532 8
+f 533 225 251
+f 446 447 412
+f 335 540 334
+f 544 546 545
+f 329 291 292
+f 199 262 238
+f 385 416 550
+f 532 191 247
+f 363 118 243
+f 236 237 250
+f 578 580 579
+f 573 571 570
+f 561 562 272
+f 241 259 240
+f 590 592 591
+f 545 595 594
+f 503 155 504
+f 606 624 623
+f 478 480 479
+f 646 648 647
+f 590 689 684
+f 564 651 650
+f 728 184 727
+f 714 716 715
+f 692 575 693
+f 541 581 578
+f 346 248 400
+f 161 230 160
+f 128 94 95
+f 698 704 703
+f 574 576 575
+f 290 401 328
+f 630 725 726
+f 733 735 734
+f 755 496 493
+f 759 481 760
+f 757 762 761
+f 764 466 467
+f 768 3808 3809
+f 759 488 489
+f 492 475 771
+f 474 769 473
+f 773 465 772
+f 771 486 483
+f 484 775 491
+f 774 489 486
+f 495 764 756
+f 495 761 494
+f 137 784 136
+f 325 797 547
+f 165 51 52
+f 369 29 370
+f 730 568 569
+f 90 301 89
+f 243 516 242
+f 800 806 805
+f 817 808 816
+f 816 823 822
+f 306 447 807
+f 815 809 808
+f 829 828 825
+f 726 501 502
+f 274 424 423
+f 879 881 880
+f 884 879 885
+f 219 217 211
+f 906 904 897
+f 455 205 542
+f 898 830 829
+f 903 880 881
+f 906 897 898
+f 377 519 165
+f 923 948 946
+f 946 951 950
+f 3519 739 3517
+f 900 886 880
+f 225 65 144
+f 182 442 422
+f 809 947 824
+f 954 962 961
+f 927 965 966
+f 885 880 886
+f 925 966 967
+f 343 341 340
+f 965 971 968
+f 968 970 969
+f 972 969 973
+f 963 981 978
+f 336 338 339
+f 992 827 828
+f 810 996 959
+f 1007 1002 1008
+f 1012 827 1009
+f 1011 1007 1008
+f 294 292 291
+f 907 920 919
+f 1017 1019 1018
+f 1020 1022 1021
+f 1035 1020 1044
+f 1061 1047 1046
+f 1063 1046 1062
+f 1044 1021 1045
+f 1102 1097 1109
+f 1116 1118 1117
+f 1016 1014 1013
+f 881 904 903
+f 816 1127 817
+f 1094 949 1097
+f 514 167 515
+f 146 81 82
+f 1133 1130 1129
+f 1102 1126 822
+f 822 1116 816
+f 1145 1148 1136
+f 1076 1151 1069
+f 1148 1086 1087
+f 1109 1114 1136
+f 1154 1156 1155
+f 1153 967 1156
+f 3464 463 3714
+f 1144 1152 1151
+f 1156 1163 1162
+f 1164 1169 1165
+f 1145 1176 1147
+f 1114 1154 1171
+f 1183 1164 1177
+f 1184 1177 1185
+f 1069 1062 1064
+f 1023 1194 1022
+f 1162 1170 1169
+f 1195 1185 1191
+f 1183 1176 1155
+f 1201 1152 1196
+f 1061 1201 1202
+f 3825 843 3836
+f 1203 1195 1034
+f 1202 1184 1195
+f 1218 1203 1217
+f 1060 1202 1203
+f 1225 1217 1219
+f 671 694 670
+f 1219 1024 1035
+f 1217 1034 1024
+f 1226 1015 1227
+f 1243 1234 1233
+f 1231 1229 1228
+f 847 1233 843
+f 843 1244 878
+f 1233 1245 1244
+f 1923 233 216
+f 2100 499 500
+f 3914 2135 3900
+f 61 323 505
+f 210 217 457
+f 1299 1309 1300
+f 3863 1231 3862
+f 1066 1321 1322
+f 1322 1228 1229
+f 1060 1321 1047
+f 1325 1323 1310
+f 1244 1332 1330
+f 1290 1242 1288
+f 1337 1339 1338
+f 1330 1340 1339
+f 1341 1245 1294
+f 1342 1332 1341
+f 1343 1341 1344
+f 1344 1294 1293
+f 3857 1345 1346
+f 1347 1342 1343
+f 3856 1295 1345
+f 1349 1348 1347
+f 3861 1347 3853
+f 3853 1343 3857
+f 906 829 1353
+f 905 1353 1354
+f 865 1355 1357
+f 1355 829 825
+f 1357 825 826
+f 1356 1353 1355
+f 3834 1338 3833
+f 866 1356 865
+f 1358 1354 1356
+f 3828 1337 3834
+f 838 1012 841
+f 1363 1338 1339
+f 1368 1339 1340
+f 1348 1340 1342
+f 1371 3824 3829
+f 524 526 523
+f 528 530 527
+f 1372 1369 1371
+f 535 537 538
+f 1367 1348 1350
+f 1370 1359 1369
+f 1369 3833 3824
+f 1373 903 904
+f 555 557 558
+f 1374 910 1373
+f 1373 905 1375
+f 1374 1375 1376
+f 1375 1354 1370
+f 1378 908 1377
+f 376 651 1387
+f 1388 973 969
+f 586 588 585
+f 1390 1388 1391
+f 1391 969 970
+f 597 599 596
+f 600 602 603
+f 1392 970 979
+f 608 610 607
+f 612 523 611
+f 614 616 613
+f 617 25 24
+f 620 622 619
+f 1397 1391 1392
+f 1398 1392 1399
+f 1400 920 1378
+f 1402 1165 1169
+f 636 638 635
+f 930 1402 929
+f 641 643 644
+f 929 1169 1170
+f 1404 1403 1402
+f 1411 1365 1406
+f 1412 1415 1414
+f 652 654 655
+f 609 657 658
+f 659 661 662
+f 558 586 585
+f 665 667 664
+f 1413 1406 1412
+f 1418 3810 3813
+f 1417 1371 1416
+f 1416 3829 3810
+f 1419 1416 1418
+f 1386 1374 1420
+f 677 679 676
+f 1385 1420 1421
+f 1420 1376 1422
+f 685 687 688
+f 1421 1422 1423
+f 528 691 658
+f 1422 1372 1417
+f 1018 747 1425
+f 699 701 702
+f 1424 579 1017
+f 3663 1767 3660
+f 700 711 712
+f 1093 3754 1571
+f 717 530 719
+f 721 723 720
+f 704 3543 3554
+f 19 1426 1430
+f 1429 1427 1426
+f 1658 3542 3544
+f 1439 1434 1438
+f 1437 1435 1434
+f 1426 1433 1432
+f 1442 1432 1440
+f 742 744 745
+f 1432 1441 1440
+f 751 753 750
+f 1438 1403 1405
+f 1446 1444 1443
+f 1447 1446 1443
+f 3860 1443 3848
+f 1415 1445 1446
+f 1414 1446 1448
+f 2173 469 470
+f 777 779 686
+f 780 782 783
+f 1676 1806 1675
+f 3859 1349 3861
+f 789 791 788
+f 745 793 742
+f 652 795 653
+f 1455 1457 1456
+f 1454 1350 1452
+f 610 802 801
+f 3831 698 703
+f 1352 1045 1351
+f 1015 1459 1460
+f 811 813 814
+f 2118 3817 2117
+f 819 821 818
+f 1466 1194 1190
+f 1467 1465 1466
+f 1182 1468 1467
+f 832 833 831
+f 835 837 834
+f 1467 1190 1182
+f 1469 1444 1445
+f 840 712 690
+f 1453 1469 1454
+f 599 845 846
+f 1226 3849 3855
+f 851 853 850
+f 855 856 854
+f 858 860 857
+f 861 863 864
+f 1014 3855 3858
+f 867 868 869
+f 870 872 873
+f 874 876 877
+f 1310 1471 1309
+f 1476 1427 1475
+f 1461 1475 1460
+f 887 889 890
+f 788 892 891
+f 894 896 893
+f 1460 1227 1015
+f 901 524 612
+f 1465 1477 1464
+f 1865 3500 1864
+f 911 913 914
+f 916 918 915
+f 1850 3507 1851
+f 921 895 894
+f 1479 1436 1478
+f 926 667 601
+f 1481 563 1480
+f 269 84 1482
+f 931 933 916
+f 934 614 869
+f 936 688 937
+f 938 662 939
+f 1484 122 123
+f 941 835 834
+f 320 509 510
+f 1849 3505 1850
+f 84 1484 83
+f 1487 1488 539
+f 1757 3502 3509
+f 365 1491 364
+f 461 80 511
+f 117 362 1496
+f 1494 541 1495
+f 926 957 958
+f 1477 1433 1476
+f 1709 1585 1700
+f 119 1496 345
+f 271 504 156
+f 511 189 186
+f 345 1492 365
+f 255 344 254
+f 57 299 132
+f 1501 268 1500
+f 267 1499 145
+f 974 976 977
+f 1678 1693 1676
+f 3846 1807 3840
+f 557 663 558
+f 983 984 985
+f 1443 3849 3848
+f 421 268 265
+f 989 991 988
+f 1490 418 1504
+f 279 309 310
+f 997 676 679
+f 998 999 875
+f 1504 38 1505
+f 1506 1507 1486
+f 360 1508 1506
+f 1506 207 360
+f 939 1006 938
+f 270 1483 269
+f 66 546 512
+f 1010 793 935
+f 367 437 1509
+f 244 363 243
+f 1670 3534 1669
+f 209 1510 226
+f 235 1512 1511
+f 1511 234 235
+f 388 426 425
+f 427 250 237
+f 161 521 231
+f 100 45 44
+f 1025 608 1027
+f 1029 1031 1028
+f 939 1033 1032
+f 41 594 99
+f 1036 783 1037
+f 702 1038 1039
+f 1040 1042 1043
+f 362 369 1493
+f 187 477 186
+f 70 539 198
+f 1511 1521 234
+f 1049 603 1048
+f 613 869 614
+f 860 1051 857
+f 1005 1053 1006
+f 753 1056 1054
+f 1057 1059 1055
+f 293 1522 292
+f 288 1521 1522
+f 292 1523 329
+f 331 1508 1524
+f 306 453 305
+f 100 41 99
+f 462 78 79
+f 330 1524 462
+f 635 1068 636
+f 1485 1526 1528
+f 206 1528 1510
+f 1070 1072 831
+f 942 1073 1074
+f 916 1075 917
+f 440 1530 439
+f 157 1514 285
+f 1529 5 1530
+f 371 546 63
+f 790 663 791
+f 1077 857 1078
+f 1403 1435 1165
+f 1080 1082 1079
+f 531 1529 440
+f 21 1084 1085
+f 272 1531 561
+f 137 396 397
+f 690 802 691
+f 335 3567 581
+f 1090 1029 1091
+f 1092 794 1042
+f 1533 137 138
+f 661 1095 1096
+f 69 445 72
+f 1099 1101 1098
+f 38 1536 1505
+f 1104 1106 1103
+f 852 1108 853
+f 1377 909 1386
+f 291 411 294
+f 316 516 33
+f 539 1533 534
+f 1505 316 313
+f 1110 1112 1113
+f 1539 460 264
+f 522 1537 1539
+f 834 1120 1119
+f 1122 1124 1121
+f 612 1041 901
+f 935 615 614
+f 276 130 131
+f 1686 3522 1685
+f 193 1541 273
+f 231 1540 287
+f 556 982 557
+f 365 85 345
+f 190 318 1541
+f 1810 3574 3528
+f 3924 3512 3791
+f 1132 985 1131
+f 208 102 132
+f 1134 662 661
+f 1491 80 77
+f 429 1542 147
+f 1137 915 918
+f 172 428 174
+f 1139 1141 1142
+f 549 54 55
+f 1517 1518 296
+f 976 1146 938
+f 834 984 941
+f 1543 286 115
+f 1544 239 513
+f 133 1544 1542
+f 506 257 258
+f 420 328 401
+f 1158 1160 1157
+f 587 1103 588
+f 1513 1514 151
+f 607 1027 608
+f 304 1545 303
+f 364 77 506
+f 69 198 139
+f 1006 976 938
+f 700 1112 701
+f 3877 3525 3787
+f 1166 657 656
+f 1167 858 1077
+f 1300 3524 1299
+f 1172 1174 1175
+f 218 200 217
+f 1179 1181 1178
+f 372 97 98
+f 89 227 88
+f 1186 1188 1189
+f 172 1517 1549
+f 1482 81 1498
+f 1090 1193 1030
+f 394 353 1551
+f 526 1198 1197
+f 811 858 812
+f 1199 1026 1025
+f 606 697 696
+f 1204 1206 743
+f 1207 523 526
+f 1209 867 613
+f 914 685 936
+f 1478 1441 1477
+f 1029 1161 1091
+f 1211 1213 1214
+f 3858 1451 3859
+f 844 1216 845
+f 423 273 274
+f 1220 1222 1083
+f 22 1224 23
+f 882 897 881
+f 1519 173 387
+f 1189 1232 1178
+f 349 550 416
+f 1071 1236 1072
+f 1237 1239 745
+f 1221 1189 1240
+f 1510 1512 333
+f 939 1241 1005
+f 1556 5 67
+f 1553 394 1551
+f 404 67 347
+f 1247 1249 1246
+f 142 238 92
+f 1251 635 1250
+f 850 1253 851
+f 1254 1077 1255
+f 57 133 429
+f 443 552 551
+f 1187 1175 1188
+f 1256 831 833
+f 432 382 383
+f 741 747 746
+f 434 351 415
+f 349 383 348
+f 251 1481 1480
+f 395 352 353
+f 1518 1550 1551
+f 1558 378 340
+f 109 314 315
+f 285 1513 407
+f 395 393 392
+f 202 410 1558
+f 417 405 432
+f 1246 1258 1247
+f 3 297 2
+f 1259 1261 1262
+f 403 347 284
+f 955 953 952
+f 462 51 330
+f 1096 687 1263
+f 429 60 57
+f 896 1265 932
+f 1267 1268 1266
+f 423 327 420
+f 37 180 40
+f 20 1187 1186
+f 1256 1071 1070
+f 155 156 504
+f 1188 1269 1232
+f 1236 1271 1262
+f 1272 535 621
+f 565 463 464
+f 1273 745 1239
+f 876 1255 1274
+f 316 164 315
+f 437 503 1509
+f 194 1495 197
+f 1275 864 863
+f 624 631 630
+f 1277 1137 1278
+f 569 567 566
+f 1481 125 366
+f 324 547 326
+f 1279 1107 1280
+f 1525 329 1523
+f 1281 1137 918
+f 171 1553 1552
+f 1488 396 1533
+f 1557 1562 1559
+f 1282 1206 1205
+f 1053 1278 1281
+f 778 607 1283
+f 713 3566 3569
+f 251 144 124
+f 4 348 383
+f 709 707 706
+f 728 45 246
+f 261 1563 544
+f 789 1287 790
+f 74 263 282
+f 403 385 434
+f 1178 1240 1189
+f 1223 1289 1224
+f 158 150 151
+f 1220 1186 1221
+f 296 354 433
+f 784 1538 1566
+f 1181 1291 1222
+f 1269 1257 1246
+f 1566 438 399
+f 136 1566 1564
+f 1563 169 1565
+f 107 109 108
+f 1297 1298 1296
+f 1464 1476 1461
+f 1520 507 508
+f 1301 1303 1304
+f 1306 1308 1305
+f 175 1532 272
+f 1532 200 201
+f 1311 1312 1313
+f 872 1315 1316
+f 1317 1318 1319
+f 3836 878 3828
+f 1305 864 1276
+f 1040 863 1320
+f 1564 399 169
+f 592 1575 591
+f 591 1088 689
+f 1581 1583 1582
+f 1196 1147 1176
+f 710 583 713
+f 285 408 386
+f 953 961 963
+f 1594 1589 1593
+f 1585 1587 1586
+f 1592 1590 1589
+f 3885 944 3899
+f 1600 1598 1597
+f 1604 1602 1601
+f 1399 979 980
+f 1589 1596 1595
+f 1610 1608 1607
+f 298 361 360
+f 551 276 277
+f 1627 3814 3819
+f 859 818 860
+f 1078 1328 1329
+f 1555 1530 1556
+f 572 680 681
+f 772 776 774
+f 977 918 917
+f 404 392 393
+f 1054 750 753
+f 1644 1643 1642
+f 932 1082 933
+f 1067 1334 1068
+f 1540 264 114
+f 597 643 1336
+f 471 762 470
+f 1642 3831 3842
+f 1644 3842 3843
+f 923 950 927
+f 96 548 95
+f 520 522 521
+f 445 443 442
+f 849 848 842
+f 480 755 493
+f 1646 481 1639
+f 311 1567 170
+f 464 3497 3495
+f 3497 1361 3495
+f 234 236 235
+f 190 192 191
+f 153 152 149
+f 231 286 230
+f 1552 1551 1550
+f 760 1648 488
+f 3832 1649 3830
+f 828 826 825
+f 52 166 165
+f 1663 1661 1660
+f 287 115 286
+f 668 670 669
+f 634 640 639
+f 553 559 554
+f 118 33 516
+f 492 491 490
+f 3552 1651 3553
+f 582 584 583
+f 384 392 415
+f 1381 1383 1380
+f 1562 435 436
+f 952 951 947
+f 3529 583 584
+f 1660 1667 1666
+f 1394 1396 1393
+f 1660 1664 1663
+f 1671 1669 1668
+f 1624 1673 1672
+f 1407 1409 1410
+f 1677 1675 1674
+f 1613 1677 1625
+f 1679 1676 1677
+f 1626 3811 3814
+f 534 198 539
+f 176 150 159
+f 803 734 800
+f 400 191 192
+f 65 143 144
+f 3526 1656 3540
+f 402 157 283
+f 629 627 626
+f 461 49 50
+f 758 495 756
+f 796 786 785
+f 406 1513 1560
+f 342 166 167
+f 1694 1658 1695
+f 1692 1694 1696
+f 3557 1652 3552
+f 3921 1703 3923
+f 326 302 90
+f 3916 1702 1706
+f 3919 1698 1701
+f 1699 3927 3918
+f 1557 1560 1561
+f 1127 1117 1128
+f 400 289 346
+f 1706 1707 1708
+f 1707 1599 1600
+f 1709 1708 1588
+f 1714 1711 1713
+f 3919 1709 1700
+f 1704 1577 1703
+f 1716 1713 1715
+f 1559 1549 297
+f 1722 1720 1719
+f 1335 596 1489
+f 410 377 378
+f 973 1163 972
+f 168 399 398
+f 1571 1089 1093
+f 1572 682 1571
+f 749 647 754
+f 1730 1723 1729
+f 1594 1729 1592
+f 1729 1591 1592
+f 1731 1730 1583
+f 1384 1377 1379
+f 1732 1712 1714
+f 1724 1731 1732
+f 1712 1731 1580
+f 1722 1732 1733
+f 1721 1733 1734
+f 1734 1716 1717
+f 1735 1736 1697
+f 1733 1714 1716
+f 1663 1741 1668
+f 1742 1740 1743
+f 1738 1737 1735
+f 1665 1666 1740
+f 1665 1744 1664
+f 1725 1746 1745
+f 1591 1745 1590
+f 1590 1747 1596
+f 1745 1748 1747
+f 1659 3538 3542
+f 3287 3545 3282
+f 1185 1182 1190
+f 1664 1749 1741
+f 1741 1739 1738
+f 1746 1743 1748
+f 1726 1742 1746
+f 1153 924 925
+f 1115 1153 1154
+f 1654 3575 1655
+f 1896 3549 1686
+f 1720 1744 1726
+f 1721 1749 1720
+f 1750 1701 1698
+f 1697 1750 1696
+f 1151 1063 1062
+f 1751 1670 1671
+f 1695 1752 1751
+f 961 986 981
+f 1770 1768 1767
+f 1766 1593 1765
+f 1731 1579 1580
+f 1579 1774 1578
+f 3928 1771 3929
+f 1764 3928 3917
+f 1777 1775 1774
+f 3762 1699 1700
+f 1004 1128 1003
+f 1774 1771 1772
+f 987 1399 980
+f 1578 1772 1764
+f 1783 1781 1778
+f 1597 1770 1767
+f 639 3895 3884
+f 3928 1769 1770
+f 1793 1789 1788
+f 3929 1798 1769
+f 3508 3773 1806
+f 1802 3924 3922
+f 1788 1794 1793
+f 1793 1795 1776
+f 3773 1675 1806
+f 1584 1783 1789
+f 1771 3920 3929
+f 1801 1798 1800
+f 1768 1798 1799
+f 1775 1797 1771
+f 508 515 1520
+f 1796 1802 1797
+f 1800 3920 3922
+f 1794 1804 1795
+f 3532 1752 3547
+f 485 1807 775
+f 3809 1640 768
+f 3837 1810 1653
+f 485 1640 1808
+f 1690 3770 3772
+f 487 1648 1811
+f 3839 1816 3822
+f 3838 1648 3830
+f 3839 1811 3838
+f 1812 487 1811
+f 1641 767 768
+f 465 755 776
+f 767 3823 3808
+f 3822 3826 3847
+f 1809 1815 767
+f 3837 1651 3840
+f 675 3541 3496
+f 1814 1822 1821
+f 1821 3821 3820
+f 1824 1649 1825
+f 1827 1850 1805
+f 1668 1662 1663
+f 1779 1848 1828
+f 1790 1805 1794
+f 1833 1853 1852
+f 3521 693 575
+f 1614 1616 1617
+f 1852 3498 3501
+f 1620 1622 1623
+f 1855 1574 1861
+f 1847 3501 3513
+f 1766 1855 1782
+f 1629 1631 1628
+f 1633 1634 1632
+f 1635 1637 1638
+f 1848 1852 1847
+f 1855 1863 1862
+f 1803 1867 1802
+f 1862 1831 1832
+f 1782 1862 1781
+f 3922 1865 1800
+f 1873 1870 1869
+f 1874 799 804
+f 2045 3897 2044
+f 1219 1295 1225
+f 1615 1396 1395
+f 3884 1877 1876
+f 645 3786 3559
+f 1345 1035 1346
+f 634 3884 3880
+f 15 1327 14
+f 3910 1878 1893
+f 1894 605 1893
+f 3506 627 693
+f 3560 645 3559
+f 340 454 1558
+f 1897 195 1898
+f 1889 1877 1890
+f 1901 129 1900
+f 196 754 232
+f 1705 3921 3916
+f 340 166 343
+f 1570 563 561
+f 1904 194 1897
+f 1781 1832 1780
+f 1905 55 1906
+f 1000 995 996
+f 986 920 987
+f 1191 1190 1194
+f 2094 2075 2093
+f 1920 1919 1915
+f 453 1912 1911
+f 1915 1918 1916
+f 3882 2106 3901
+f 1655 3531 1868
+f 1916 1926 1925
+f 704 3561 703
+f 278 1925 1900
+f 3565 1818 1820
+f 1097 1115 1114
+f 1927 727 1923
+f 3578 3654 3758
+f 1920 307 1929
+f 1929 48 728
+f 503 1497 1509
+f 1902 787 796
+f 1816 3820 3826
+f 1925 1923 1924
+f 1535 1568 1534
+f 254 258 255
+f 1509 366 367
+f 119 35 36
+f 219 293 294
+f 141 47 431
+f 542 244 245
+f 1940 430 431
+f 1779 1827 1790
+f 1701 3915 3919
+f 1661 3515 1667
+f 270 754 647
+f 197 749 196
+f 1727 1212 1211
+f 53 232 754
+f 1465 1479 1478
+f 1284 3854 1293
+f 1951 412 413
+f 168 1554 1553
+f 1486 206 207
+f 506 78 361
+f 994 1007 993
+f 951 963 964
+f 1013 3861 1351
+f 3826 1820 1818
+f 1498 1954 1482
+f 299 543 132
+f 1754 1756 1753
+f 1681 1683 1682
+f 1696 1695 1697
+f 13 738 737
+f 473 475 474
+f 482 768 1640
+f 204 30 205
+f 1761 820 819
+f 611 1125 612
+f 1501 1908 1909
+f 1074 711 699
+f 1960 82 83
+f 120 1960 123
+f 35 87 34
+f 1773 1025 1031
+f 1965 1569 1535
+f 494 471 479
+f 1966 1969 1968
+f 446 1971 1967
+f 174 125 126
+f 1580 1578 1577
+f 1785 1787 1784
+f 1972 304 305
+f 1254 875 999
+f 1973 342 514
+f 321 1973 509
+f 1910 275 1907
+f 509 514 508
+f 1486 1527 1485
+f 1974 804 1975
+f 1977 1974 1978
+f 1980 1966 1981
+f 379 2 295
+f 1718 1710 1701
+f 884 992 883
+f 1680 1678 1679
+f 902 525 524
+f 344 256 299
+f 135 137 136
+f 236 228 333
+f 448 1967 1966
+f 589 604 584
+f 910 908 907
+f 1981 1968 1987
+f 1982 1987 1979
+f 1988 1987 1989
+f 1990 1989 1991
+f 1991 1993 1992
+f 322 320 319
+f 992 830 883
+f 1144 1086 1143
+f 1995 1978 1994
+f 1133 1118 1126
+f 1997 1994 1996
+f 870 1335 1489
+f 805 1982 1998
+f 727 2000 1999
+f 1996 1988 1990
+f 1994 1979 1988
+f 1829 792 1273
+f 1830 941 983
+f 798 800 799
+f 729 731 730
+f 2011 2023 2022
+f 1834 846 1159
+f 402 284 347
+f 2022 2024 2025
+f 1838 989 988
+f 810 952 809
+f 1823 1840 644
+f 1430 1432 1431
+f 1215 1841 1079
+f 149 176 177
+f 1316 873 872
+f 2112 1384 2106
+f 1842 1844 1845
+f 1159 845 1160
+f 837 1846 1120
+f 1971 2030 2027
+f 517 370 189
+f 858 833 812
+f 1471 2027 1888
+f 3551 626 627
+f 2174 770 490
+f 935 1854 1010
+f 1613 1611 1618
+f 1231 1225 1295
+f 1856 891 892
+f 1300 748 741
+f 1150 1858 1859
+f 1643 696 1642
+f 1215 1264 1216
+f 1320 902 901
+f 1956 1944 1960
+f 2031 669 1474
+f 2034 1310 1299
+f 1879 1656 1657
+f 40 242 39
+f 1992 2039 1456
+f 888 957 889
+f 1882 1996 1990
+f 1078 1255 1077
+f 1871 1192 1090
+f 294 218 219
+f 1323 2026 1471
+f 123 83 1484
+f 1324 2041 1323
+f 384 359 352
+f 1324 2038 2036
+f 1883 1885 1829
+f 287 114 107
+f 1096 1134 661
+f 915 931 916
+f 806 1981 1982
+f 3533 584 604
+f 1425 2042 1018
+f 1018 1948 1017
+f 3562 740 3519
+f 1948 649 646
+f 1881 1990 1991
+f 1900 130 278
+f 992 994 993
+f 923 925 924
+f 3878 2047 3876
+f 1643 625 606
+f 2050 2051 2049
+f 2045 1404 2047
+f 3876 945 3885
+f 1139 1079 1841
+f 1037 892 982
+f 2055 2057 2056
+f 1198 718 717
+f 1619 1612 1611
+f 2058 1606 2059
+f 2059 1607 2058
+f 2058 1624 1605
+f 71 179 37
+f 405 1557 382
+f 976 1006 977
+f 322 1972 1911
+f 2061 2063 2062
+f 2066 2067 1825
+f 2052 2064 2051
+f 1609 2066 1647
+f 1515 152 1516
+f 1009 993 1007
+f 1644 3815 2118
+f 279 1916 278
+f 1285 3862 1284
+f 1647 1825 1649
+f 2056 2067 2065
+f 319 1953 1952
+f 667 1913 1914
+f 2070 625 2048
+f 2052 2055 2063
+f 2038 1655 1458
+f 1606 2073 2072
+f 1235 1270 1236
+f 2072 2075 2074
+f 2059 2072 2062
+f 2074 2077 2076
+f 1601 2078 2073
+f 2049 1643 1645
+f 1000 1001 994
+f 1089 684 689
+f 1931 1306 1930
+f 1462 1021 1022
+f 1932 1934 1935
+f 875 1846 998
+f 1936 1937 1938
+f 44 46 92
+f 1534 1956 120
+f 1541 317 391
+f 2081 2084 2083
+f 146 1959 300
+f 1360 1942 1361
+f 985 1032 1033
+f 1995 1892 2043
+f 2079 2082 2081
+f 2095 2076 2077
+f 2081 2078 2079
+f 2055 2054 2053
+f 501 2095 500
+f 2097 2099 2098
+f 2098 2094 2083
+f 2140 3893 3866
+f 2141 3894 3893
+f 2051 2048 2049
+f 592 2101 1576
+f 2071 2076 2096
+f 2102 2098 2101
+f 593 2102 592
+f 1019 746 747
+f 1950 12 1327
+f 725 2096 501
+f 1385 2104 1379
+f 1379 2106 1384
+f 2107 2100 2097
+f 3908 502 3896
+f 3896 499 3901
+f 940 2109 944
+f 928 1170 973
+f 3868 684 3899
+f 1962 1964 1961
+f 3878 573 3897
+f 2039 1968 1969
+f 1389 2110 2109
+f 1398 2113 1397
+f 3757 3753 3743
+f 3911 2107 2103
+f 949 924 1115
+f 260 324 326
+f 1390 2113 2110
+f 2070 2064 2071
+f 3867 2103 593
+f 2105 1421 2114
+f 2114 1423 2115
+f 3892 2115 3903
+f 3908 2114 3892
+f 554 1902 553
+f 972 967 966
+f 1765 1573 1574
+f 1984 1986 1983
+f 2053 2069 2057
+f 3573 724 714
+f 1625 1674 1673
+f 3881 2119 2122
+f 3903 2123 3904
+f 56 1906 55
+f 626 3879 629
+f 2001 2003 2004
+f 3912 2127 3907
+f 2006 2008 2009
+f 2127 1412 2128
+f 2014 2016 2017
+f 2018 2020 2021
+f 1495 1947 197
+f 1379 1386 1385
+f 3814 1680 1618
+f 2029 1884 2028
+f 3902 2120 3887
+f 629 3914 3872
+f 682 943 1089
+f 724 2121 716
+f 1757 3821 3845
+f 765 3844 3809
+f 3851 715 3852
+f 706 3870 3872
+f 981 987 980
+f 1457 1656 1880
+f 2017 2032 2033
+f 1458 1868 1457
+f 1776 1792 1793
+f 1959 1946 1958
+f 3663 3725 3661
+f 3913 2122 3850
+f 1093 689 1088
+f 966 968 972
+f 3852 3881 3913
+f 2172 2170 2171
+f 670 2137 669
+f 3761 1903 233
+f 1429 28 1449
+f 964 978 971
+f 3841 1413 2127
+f 1473 669 2137
+f 3834 1359 1358
+f 2138 1870 2137
+f 1872 2137 1870
+f 715 3888 3881
+f 1881 1992 1880
+f 3843 739 740
+f 1837 3513 3514
+f 253 59 85
+f 1808 3840 1807
+f 1647 3818 1609
+f 3865 671 668
+f 2112 3883 2111
+f 1405 2044 1438
+f 3829 1364 1366
+f 273 391 249
+f 576 3520 3555
+f 3875 668 2031
+f 1430 2140 2139
+f 2136 3860 28
+f 1430 18 19
+f 3893 570 695
+f 1442 2046 2141
+f 1431 2141 2140
+f 3869 3850 2136
+f 3808 1817 766
+f 3866 695 671
+f 2142 798 2138
+f 571 2142 694
+f 3819 1618 1611
+f 2143 2025 2142
+f 694 2138 670
+f 681 2144 572
+f 2144 2022 2143
+f 328 457 290
+f 1572 2011 2144
+f 630 3903 624
+f 2085 2087 2088
+f 2090 2092 2089
+f 572 2143 571
+f 1875 2153 2149
+f 1873 2149 2154
+f 2103 2097 2102
+f 2157 2155 2156
+f 2128 1414 2120
+f 3874 589 582
+f 3872 628 629
+f 2149 2158 2159
+f 2159 3905 3890
+f 698 3902 705
+f 3823 3827 1817
+f 2158 3874 3905
+f 1438 2046 1439
+f 2035 2160 2161
+f 2161 3889 3886
+f 3871 605 589
+f 3873 18 3875
+f 370 61 451
+f 649 413 648
+f 2153 2156 2158
+f 1428 1449 1450
+f 1490 73 1488
+f 3890 710 633
+f 2155 3891 3871
+f 1456 1880 1992
+f 2125 2087 2124
+f 2160 3890 3889
+f 300 505 323
+f 1064 1046 1065
+f 2043 1891 2155
+f 2129 2004 2003
+f 2131 2133 2134
+f 3848 1450 1449
+f 1998 804 805
+f 3879 3864 3914
+f 474 490 770
+f 1164 1182 1177
+f 707 3909 3870
+f 708 2162 707
+f 2162 3886 3909
+f 548 1905 458
+f 1145 1114 1171
+f 705 1650 704
+f 2043 1977 1995
+f 737 554 559
+f 1034 1191 1023
+f 458 1907 275
+f 195 232 233
+f 2040 1969 1970
+f 2145 2147 2148
+f 2054 2049 1645
+f 3832 1639 3818
+f 9 11 10
+f 252 506 258
+f 472 479 471
+f 1865 3791 3503
+f 900 910 907
+f 1941 2151 1942
+f 1921 1927 1919
+f 2163 1898 2116
+f 271 153 178
+f 1904 12 9
+f 282 264 460
+f 1872 2154 2035
+f 1326 1897 2163
+f 628 693 627
+f 1724 1719 1723
+f 1549 436 172
+f 1582 1730 1729
+f 1921 1929 1928
+f 3563 10 11
+f 2152 1950 1949
+f 475 774 771
+f 769 2171 473
+f 2171 476 473
+f 476 772 475
+f 1791 464 1760
+f 2166 2168 2169
+f 595 372 98
+f 483 1641 482
+f 762 2173 470
+f 771 484 492
+f 486 1809 483
+f 297 1517 296
+f 763 2174 762
+f 776 759 489
+f 184 2176 2000
+f 412 303 414
+f 2005 185 224
+f 3811 1682 3846
+f 2164 756 764
+f 1472 2161 2162
+f 950 964 965
+f 2177 2179 2180
+f 227 1510 333
+f 764 467 2164
+f 2182 2184 2181
+f 2186 2018 2185
+f 1985 2181 1986
+f 214 729 1531
+f 1545 1546 1569
+f 865 842 866
+f 849 1357 1362
+f 452 1558 454
+f 559 3188 737
+f 16 737 3188
+f 560 498 374
+f 3193 3195 3194
+f 3200 3198 3199
+f 3211 3213 3212
+f 3215 3217 3216
+f 2187 2002 2001
+f 2169 2189 2190
+f 2192 2085 2191
+f 2193 2195 2196
+f 2198 2200 2197
+f 1998 1979 1975
+f 3664 3668 3671
+f 2116 1903 1902
+f 1859 2009 1150
+f 3904 2126 3906
+f 1135 1087 1130
+f 2201 2015 2014
+f 1149 110 1858
+f 3221 3218 3215
+f 2203 2205 2206
+f 579 1895 1019
+f 1755 2208 1756
+f 3891 1893 605
+f 1211 2150 1727
+f 868 2210 2209
+f 944 930 940
+f 2130 2202 2201
+f 233 1898 195
+f 665 750 666
+f 1167 999 618
+f 2211 2213 2214
+f 2215 2217 2218
+f 3867 590 3868
+f 2220 220 2219
+f 823 948 1094
+f 3225 3227 3226
+f 266 455 265
+f 3882 2107 3883
+f 2221 3519 3517
+f 2223 2225 2226
+f 2207 2124 2208
+f 673 3869 3864
+f 817 996 815
+f 2227 2146 2145
+f 1947 646 749
+f 3900 17 3873
+f 2229 1638 1381
+f 1679 1618 1680
+f 1637 2232 2230
+f 2233 1617 1616
+f 2231 2236 2237
+f 1381 2238 1382
+f 3586 2242 3587
+f 1630 2233 1631
+f 1681 472 469
+f 1408 2243 2244
+f 3236 3238 3237
+f 3232 3240 3235
+f 1637 2238 1638
+f 3237 3233 3234
+f 1409 1634 2245
+f 1380 2247 2246
+f 1628 2245 1621
+f 2249 1622 1633
+f 1632 1408 2244
+f 2248 1631 2250
+f 2235 1637 1636
+f 3245 3199 3198
+f 631 2071 725
+f 515 52 49
+f 782 2251 1251
+f 2253 2255 2252
+f 397 459 784
+f 1456 2040 1455
+f 2040 2036 1455
+f 1401 2111 1398
+f 2041 1970 2026
+f 1535 414 1965
+f 1586 2080 2079
+f 2256 1132 1886
+f 1700 3918 3919
+f 3815 1686 3817
+f 1263 1886 1096
+f 598 844 599
+f 2257 2259 2260
+f 2258 2262 2259
+f 2259 2263 2216
+f 2260 2216 2215
+f 1967 1970 1969
+f 2212 1963 2213
+f 922 2218 2217
+f 1312 1101 2264
+f 1307 1286 789
+f 1548 1546 1972
+f 2266 2268 2265
+f 2265 2270 2269
+f 2272 2269 2271
+f 2273 2265 2272
+f 2275 2277 2274
+f 2278 2277 2279
+f 2284 3617 3639
+f 2287 2286 2281
+f 2288 2283 2287
+f 2290 3639 3620
+f 2291 2293 2294
+f 2295 2294 2279
+f 2296 2291 2295
+f 2297 2292 2291
+f 2294 2299 2300
+f 2279 2300 2278
+f 2298 2302 2292
+f 2292 2303 2293
+f 2293 2304 2299
+f 2304 2306 2299
+f 2306 2300 2299
+f 2308 2310 2307
+f 2302 2310 2303
+f 2301 2307 2302
+f 2312 2307 2311
+f 2313 2315 2316
+f 2318 2320 2317
+f 3748 2322 2323
+f 2324 2273 2326
+f 2313 2326 2327
+f 2328 2313 2316
+f 2329 2324 2328
+f 2331 2333 2330
+f 2330 2267 2266
+f 2325 2266 2273
+f 2329 2330 2325
+f 2333 2335 2267
+f 2336 2333 2332
+f 2338 2332 2337
+f 2331 2337 2332
+f 2329 2339 2331
+f 2328 2340 2329
+f 2316 2341 2328
+f 2342 2315 2343
+f 2344 2346 2347
+f 2341 2347 2340
+f 2342 2344 2341
+f 2348 2345 2344
+f 2350 2352 2353
+f 2339 2353 2337
+f 2340 2350 2339
+f 2347 2351 2350
+f 2337 2354 2338
+f 2353 2355 2354
+f 2356 2276 2357
+f 2356 2358 2359
+f 2356 2296 2295
+f 2279 2356 2295
+f 2360 2270 2268
+f 2267 2360 2268
+f 2362 2364 2365
+f 2362 2361 2358
+f 2366 2358 2357
+f 2366 2363 2362
+f 2357 2368 2366
+f 2366 2369 2367
+f 2276 2370 2368
+f 2368 2282 2369
+f 2372 2374 2371
+f 2375 2377 2007
+f 2378 2380 2381
+f 2382 2381 2383
+f 2384 2378 2382
+f 2385 2379 2378
+f 2388 2390 2387
+f 2387 2392 2391
+f 2393 2387 2391
+f 2394 2388 2387
+f 2186 2402 2400
+f 2403 2327 2326
+f 2272 2326 2273
+f 2271 2403 2272
+f 2405 2404 2403
+f 2404 2408 2327
+f 2409 2411 2412
+f 2409 2271 2269
+f 2413 2269 2270
+f 2413 2410 2409
+f 2417 2416 2418
+f 2417 2406 2405
+f 2412 2405 2271
+f 2411 2417 2412
+f 2406 2407 2404
+f 3569 632 713
+f 2420 2397 2421
+f 2420 2423 2422
+f 2424 2422 2419
+f 2424 2398 2420
+f 2425 2392 2426
+f 2421 2426 2423
+f 2397 2425 2421
+f 2396 2391 2425
+f 2427 2315 2314
+f 2427 2408 2428
+f 2430 2428 2429
+f 2343 2427 2430
+f 2432 2430 2431
+f 2431 2429 2433
+f 2435 2433 2434
+f 2436 2431 2435
+f 2428 2407 2437
+f 2422 2407 2419
+f 2438 2422 2423
+f 2429 2437 2438
+f 2440 2433 2439
+f 2441 2433 2429
+f 2442 2439 2441
+f 2443 2440 2439
+f 2445 2444 2443
+f 2447 2443 2442
+f 2383 2445 2447
+f 2381 2446 2445
+f 2448 2423 2426
+f 2449 2426 2392
+f 2441 2449 2442
+f 2441 2438 2448
+f 2449 2390 2450
+f 2450 2389 2451
+f 2447 2451 2383
+f 2442 2450 2447
+f 2451 2453 2452
+f 2452 2455 2454
+f 2382 2454 2384
+f 2383 2452 2382
+f 2456 2373 2372
+f 2459 2461 2458
+f 2463 2465 2462
+f 2462 2285 2284
+f 2466 2284 2290
+f 2467 2462 2466
+f 2461 3703 3701
+f 2468 2470 2471
+f 2472 2471 2473
+f 2474 3701 3688
+f 2475 2477 2478
+f 2458 2453 2479
+f 2479 2389 2388
+f 2395 2479 2388
+f 2480 2458 2479
+f 2481 2483 2478
+f 2484 2485 221
+f 2471 2464 2463
+f 2473 2463 2467
+f 2487 2466 2486
+f 2486 2290 2488
+f 2490 2488 2489
+f 2491 2486 2490
+f 2492 2467 2487
+f 2493 2487 2491
+f 2494 2492 2493
+f 2495 2473 2492
+f 3690 2474 3688
+f 3691 2496 3690
+f 2498 2493 2499
+f 2500 2499 2501
+f 3700 2498 2500
+f 3687 2494 2498
+f 2306 2278 2300
+f 2305 2504 2306
+f 2504 2507 2506
+f 2506 2288 2508
+f 2274 2508 2275
+f 2278 2506 2274
+f 3633 2489 2488
+f 3620 2488 2290
+f 2507 2289 2288
+f 2505 2509 2507
+f 2508 2287 2511
+f 2511 2281 2280
+f 2370 2280 2282
+f 2275 2511 2370
+f 2512 2489 2513
+f 2514 2513 2515
+f 2517 2514 2516
+f 2517 2490 2512
+f 2499 2491 2517
+f 2499 2516 2501
+f 2510 2519 2518
+f 2518 2521 2520
+f 3616 2520 3621
+f 3632 2518 3616
+f 2522 2303 2310
+f 2523 2310 2309
+f 2305 2522 2524
+f 2524 2523 2525
+f 2519 2525 2521
+f 2505 2524 2519
+f 2308 2527 2526
+f 2526 2529 2528
+f 2531 2528 2530
+f 2309 2526 2531
+f 1214 2151 1211
+f 2419 2418 2424
+f 2533 2349 2317
+f 2534 2317 2320
+f 2535 2533 2534
+f 2536 2345 2533
+f 2537 2346 2536
+f 2538 2536 2535
+f 2539 2537 2538
+f 2540 2351 2537
+f 2312 2541 2527
+f 2527 2542 2529
+f 2543 2434 2544
+f 2545 2544 2546
+f 2319 2543 2545
+f 2318 2435 2543
+f 2514 2548 2547
+f 2547 2550 2549
+f 2552 2549 2551
+f 2552 2514 2547
+f 2520 2554 2553
+f 2553 2556 2555
+f 3590 2555 3593
+f 3621 2553 3590
+f 2523 2531 2557
+f 2557 2530 2558
+f 2525 2557 2559
+f 2559 2558 2560
+f 2554 2560 2556
+f 2521 2559 2554
+f 2561 2007 2006
+f 2376 2225 2377
+f 2564 2566 2563
+f 2563 338 2567
+f 2569 2567 2568
+f 2570 2563 2569
+f 2572 2574 2571
+f 2571 2576 2575
+f 2544 2575 2546
+f 2544 2572 2571
+f 2578 2569 2577
+f 2579 2569 2568
+f 2574 2579 2576
+f 2573 2577 2574
+f 2581 2583 2580
+f 2584 2583 2585
+f 3697 2580 2584
+f 3676 2580 3675
+f 2582 2552 2583
+f 2583 2551 2585
+f 2500 2589 2588
+f 2590 2589 2591
+f 3693 2588 2590
+f 3695 2500 2588
+f 2501 2591 2589
+f 2592 2594 2595
+f 2386 2503 2502
+f 2596 2386 2374
+f 2596 2373 2597
+f 2599 2597 2598
+f 2599 2379 2596
+f 2600 2380 2599
+f 2600 2598 2601
+f 2603 2601 2602
+f 2603 2446 2600
+f 2604 2602 2605
+f 2440 2605 2434
+f 2444 2604 2440
+f 2606 2581 2607
+f 2606 2590 2591
+f 2591 2582 2606
+f 3676 2456 3689
+f 3689 2372 3693
+f 2478 2608 2481
+f 2609 2611 2199
+f 2564 2613 2612
+f 2612 2598 2597
+f 2457 2597 2373
+f 2565 2612 2457
+f 2614 2570 2578
+f 2615 2578 2573
+f 2602 2614 2615
+f 2601 2613 2614
+f 2572 2605 2616
+f 2616 2602 2615
+f 2573 2616 2615
+f 1459 1045 1021
+f 1265 1079 1082
+f 1612 1609 1611
+f 2067 2069 2068
+f 1135 1136 1148
+f 1938 2618 2617
+f 95 458 128
+f 2058 1608 1612
+f 1454 1445 1367
+f 655 1048 2619
+f 722 2621 723
+f 1131 1033 1134
+f 562 366 1497
+f 1723 1725 1591
+f 1786 1122 1787
+f 267 323 266
+f 1325 1654 2038
+f 1468 1435 1479
+f 658 529 528
+f 8 247 7
+f 382 1559 3
+f 788 1307 789
+f 940 929 928
+f 911 2623 912
+f 2621 655 654
+f 2624 2625 2626
+f 998 27 26
+f 635 2626 2625
+f 2627 644 1840
+f 2628 1840 2629
+f 1200 2627 2628
+f 1199 641 2627
+f 1520 49 477
+f 1452 1349 1451
+f 2630 1859 1858
+f 644 1335 1823
+f 1197 717 2632
+f 2632 719 1166
+f 2628 1166 1200
+f 2629 2632 2628
+f 1762 1050 821
+f 1823 873 1839
+f 2634 794 652
+f 1028 1027 1161
+f 1042 2635 1043
+f 93 1901 1924
+f 2036 1458 1455
+f 1527 1525 1526
+f 877 1274 1277
+f 134 797 1544
+f 1328 931 1329
+f 607 1091 1161
+f 1043 2637 2636
+f 2636 2639 2638
+f 1275 2638 1276
+f 1275 1043 2636
+f 1272 2626 2640
+f 2640 638 637
+f 1110 2640 637
+f 536 2640 2641
+f 1472 1474 1473
+f 2042 1951 649
+f 1058 1105 1059
+f 2642 2643 2644
+f 1192 2644 1193
+f 912 2642 1192
+f 3898 3526 3786
+f 1049 2620 2621
+f 616 1885 613
+f 1331 666 750
+f 990 1198 525
+f 1911 321 322
+f 1298 1205 2645
+f 76 1504 75
+f 1502 421 422
+f 610 658 691
+f 1146 659 938
+f 2646 914 936
+f 1280 1238 1279
+f 975 2647 2648
+f 2647 975 974
+f 1453 1452 1451
+f 1514 158 151
+f 835 1843 1842
+f 1271 1173 1289
+f 2649 813 2650
+f 1893 1826 1894
+f 1050 1937 1051
+f 2651 814 813
+f 1006 1281 977
+f 2653 2654 2655
+f 1001 1003 1002
+f 585 679 558
+f 2624 621 620
+f 555 2028 556
+f 1207 1197 2629
+f 1626 478 472
+f 426 419 1567
+f 1030 1773 1031
+f 3898 634 3880
+f 534 138 262
+f 1125 1042 1041
+f 333 88 227
+f 569 561 730
+f 1489 871 870
+f 1857 620 891
+f 1673 3770 3763
+f 622 864 1308
+f 2647 974 2657
+f 619 891 620
+f 261 545 262
+f 393 1556 404
+f 2658 2647 2657
+f 1075 2657 917
+f 1081 2658 1075
+f 1080 2659 2658
+f 2644 1141 2660
+f 2660 1140 1336
+f 642 1336 643
+f 1193 2660 642
+f 679 555 558
+f 1503 1500 1502
+f 1318 1101 1100
+f 2661 1139 1142
+f 2661 2663 2662
+f 2662 2659 2661
+f 2662 2622 2664
+f 2664 911 2646
+f 2648 2646 1146
+f 2648 2662 2664
+f 2083 2093 2081
+f 1442 1437 1439
+f 519 330 51
+f 1885 1209 613
+f 799 805 804
+f 1946 1952 1953
+f 1302 1253 1252
+f 566 224 533
+f 1389 928 1388
+f 683 681 680
+f 836 1842 2665
+f 2665 1845 1303
+f 2666 1303 1302
+f 24 2665 2666
+f 831 2650 832
+f 1845 1304 1303
+f 2010 2012 2011
+f 1051 2667 2668
+f 1522 1511 1523
+f 531 441 522
+f 782 1251 1250
+f 1250 2625 2669
+f 2669 1857 1856
+f 783 1856 1037
+f 782 2669 783
+f 720 2671 721
+f 1055 1930 1056
+f 1333 1251 2251
+f 619 1308 1307
+f 652 2619 2634
+f 521 1539 1540
+f 1838 537 2672
+f 2672 536 2641
+f 1113 2641 1110
+f 1113 1838 2672
+f 1016 1351 1045
+f 3897 570 3894
+f 226 104 101
+f 3564 3827 1818
+f 1036 1884 1883
+f 2674 1304 2673
+f 2673 1844 2675
+f 1334 2675 1039
+f 1333 2673 1334
+f 2676 1039 2675
+f 1843 2675 1844
+f 942 2676 1843
+f 1074 702 2676
+f 811 859 858
+f 752 1055 753
+f 1200 656 1026
+f 1120 874 2677
+f 2677 877 1052
+f 1241 1052 1005
+f 1119 2677 1241
+f 1560 151 1515
+f 1208 611 523
+f 215 736 214
+f 1460 1462 1461
+f 1208 1839 873
+f 854 1282 1297
+f 72 182 179
+f 544 1565 512
+f 217 456 457
+f 2047 930 945
+f 1204 1937 2633
+f 913 686 685
+f 840 1113 1112
+f 1531 730 561
+f 2678 1283 801
+f 601 1914 602
+f 1537 459 460
+f 1819 3509 3527
+f 991 862 538
+f 2679 1073 1830
+f 2680 983 2256
+f 779 2256 1263
+f 778 2680 779
+f 2678 1830 2680
+f 180 245 40
+f 1886 1131 1134
+f 1138 1274 1255
+f 1841 1140 1139
+f 75 314 74
+f 193 420 192
+f 2681 1099 1098
+f 170 1565 169
+f 349 417 432
+f 1121 2684 2683
+f 547 409 302
+f 123 121 120
+f 2643 1142 1141
+f 1542 513 1543
+f 899 907 919
+f 1210 676 867
+f 791 982 892
+f 24 837 836
+f 893 932 931
+f 1860 863 862
+f 535 862 861
+f 832 813 812
+f 2650 813 832
+f 1569 1547 1568
+f 181 542 180
+f 660 937 688
+f 2013 568 2012
+f 887 2631 2630
+f 1524 361 78
+f 520 162 1529
+f 2653 854 2685
+f 1238 2655 2686
+f 718 527 530
+f 1492 518 1491
+f 792 615 793
+f 599 1835 596
+f 290 456 175
+f 312 1552 425
+f 1248 1179 1249
+f 153 177 178
+f 1240 1222 1221
+f 1257 1235 1071
+f 1291 1083 1222
+f 1052 1277 1053
+f 743 2618 1204
+f 444 430 443
+f 23 1172 1187
+f 2687 1180 1179
+f 102 409 134
+f 1283 610 801
+f 373 650 651
+f 1180 1084 1291
+f 855 2655 1237
+f 1173 1917 1174
+f 2689 853 2688
+f 2688 1108 1279
+f 2686 1279 1238
+f 2687 2688 2686
+f 2691 1252 2690
+f 2690 850 2689
+f 1248 2689 2687
+f 1248 2691 2690
+f 2692 24 2666
+f 2691 2666 1302
+f 1247 2692 2691
+f 1258 617 2692
+f 394 1555 393
+f 2693 1259 1223
+f 1072 1262 1261
+f 1257 1168 1258
+f 398 1555 1554
+f 871 1314 872
+f 1178 1249 1179
+f 1224 1173 1172
+f 1010 2618 742
+f 1175 1292 1269
+f 2654 2686 2655
+f 2694 1216 1264
+f 71 1487 70
+f 701 1111 1038
+f 1072 2649 2650
+f 1085 22 21
+f 895 1264 896
+f 1601 1605 1604
+f 1259 1271 1289
+f 21 1220 1083
+f 988 538 537
+f 598 1336 1140
+f 820 1762 821
+f 1050 818 821
+f 1298 1761 2652
+f 1168 618 617
+f 858 1256 833
+f 39 516 1536
+f 2652 819 814
+f 313 315 314
+f 2026 1971 2027
+f 2656 596 1835
+f 814 818 811
+f 2696 813 2649
+f 1260 2649 1261
+f 2693 2696 1260
+f 2697 2651 2696
+f 381 390 380
+f 3818 1611 1609
+f 1475 1428 1227
+f 1292 1917 1235
+f 1551 354 1518
+f 1526 1523 1511
+f 1593 1595 1573
+f 1232 1246 1249
+f 2695 1223 22
+f 2698 1296 2697
+f 2698 2693 2695
+f 2685 2695 1085
+f 2685 1297 2698
+f 562 271 272
+f 2652 1296 1298
+f 1365 1364 1363
+f 1958 1953 1957
+f 585 2210 997
+f 1561 1515 435
+f 975 1146 976
+f 347 68 346
+f 1038 636 1068
+f 2700 780 2699
+f 2699 1036 1883
+f 2701 1883 1829
+f 851 2699 2701
+f 2674 2251 2702
+f 2702 781 2700
+f 1253 2700 851
+f 1301 2702 1253
+f 1519 388 1550
+f 1273 1280 2703
+f 2703 1107 852
+f 2701 852 851
+f 1829 2703 2701
+f 91 326 90
+f 1784 2656 1785
+f 2683 2705 2704
+f 604 3499 3533
+f 1669 3548 1662
+f 2062 2060 2059
+f 621 861 622
+f 2023 732 2024
+f 686 1263 687
+f 857 2668 1328
+f 883 882 879
+f 637 1111 1110
+f 2668 893 1328
+f 2667 894 2668
+f 1039 1068 1334
+f 313 1504 1505
+f 1911 305 453
+f 1936 1964 2212
+f 2707 2401 2183
+f 2708 2183 2182
+f 738 2116 554
+f 2349 2432 2436
+f 2711 2020 2019
+f 2400 2019 2186
+f 2714 2716 2713
+f 2717 2719 2720
+f 2721 2723 2724
+f 2726 2728 2725
+f 2725 2730 2729
+f 2731 2729 2709
+f 2732 2725 2731
+f 2734 2736 2733
+f 2738 2740 2737
+f 2737 2741 2742
+f 2744 2746 2743
+f 2745 2748 2747
+f 2749 2747 2751
+f 2753 2751 2752
+f 2250 2246 2248
+f 2754 1614 2755
+f 3598 2755 3609
+f 3608 2747 3609
+f 1617 2755 1614
+f 2242 1620 2757
+f 2191 2760 2759
+f 2761 2760 2762
+f 3648 2761 3649
+f 3623 2759 3648
+f 3209 3223 3224
+f 1395 2236 2235
+f 1395 1636 1615
+f 1616 2229 2233
+f 1617 2763 2756
+f 2756 3628 3608
+f 2765 2234 1630
+f 2766 3628 3622
+f 3202 3246 3247
+f 1516 435 1515
+f 2768 3552 3553
+f 1495 578 1424
+f 2135 28 17
+f 3526 2773 2774
+f 1690 3926 3925
+f 2778 2780 2777
+f 2777 2781 2782
+f 2784 2777 2783
+f 3555 575 576
+f 1494 9 540
+f 3529 2015 3566
+f 451 505 450
+f 2786 2788 2785
+f 2779 2785 2780
+f 2789 2741 2740
+f 2790 2752 2789
+f 2792 2794 2791
+f 2793 2796 2794
+f 2796 2750 2749
+f 2794 2749 2753
+f 2798 1396 2754
+f 3591 2754 3598
+f 2798 2800 1393
+f 3591 2799 2798
+f 2797 2795 2801
+f 2791 2753 2790
+f 216 232 54
+f 3804 2807 3803
+f 3805 2804 3803
+f 2769 3557 3552
+f 183 184 97
+f 2811 2775 2776
+f 2804 2776 2805
+f 2809 2811 2804
+f 2812 2813 2814
+f 3805 2816 3795
+f 3796 2815 3795
+f 3798 2820 2821
+f 3793 2819 3798
+f 2824 2735 2819
+f 2823 2824 2819
+f 2825 2826 2724
+f 3793 2827 3792
+f 3796 2826 3792
+f 2713 2829 2828
+f 2828 2831 2830
+f 2822 2828 2830
+f 2821 2713 2828
+f 2832 2822 2830
+f 2833 2830 2831
+f 2834 2832 2833
+f 2835 2827 2832
+f 2837 2839 2836
+f 2836 2841 2840
+f 2842 2840 2727
+f 2843 2836 2842
+f 2840 2845 2844
+f 2718 2845 2719
+f 2730 2844 2718
+f 2727 2844 2728
+f 2846 2719 2845
+f 2846 2841 2847
+f 2829 2847 2831
+f 2829 2716 2846
+f 2847 2839 2848
+f 2848 2838 2849
+f 2833 2849 2834
+f 2831 2848 2833
+f 2850 2851 2852
+f 2853 2852 2854
+f 2783 2782 2855
+f 2785 2853 2856
+f 2780 2856 2781
+f 2857 2855 2782
+f 2859 2782 2781
+f 2861 2842 2860
+f 2860 2727 2726
+f 2862 2726 2732
+f 2863 2860 2862
+f 2861 2858 2857
+f 2843 2857 2859
+f 2856 2854 2864
+f 2781 2864 2859
+f 2837 2859 2864
+f 2838 2864 2854
+f 2852 2834 2849
+f 2854 2849 2838
+f 2865 2818 2835
+f 2851 2835 2834
+f 2813 2815 2865
+f 2814 2865 2851
+f 2866 2814 2850
+f 2788 2850 2853
+f 2866 2787 2867
+f 2812 2867 2775
+f 1892 1890 1891
+f 2720 2846 2870
+f 298 256 257
+f 2872 2713 2820
+f 3794 2735 2734
+f 2729 2874 2873
+f 2846 2715 2870
+f 2876 2878 2875
+f 2733 2878 2734
+f 3806 2879 2880
+f 3806 2872 3794
+f 2882 2884 2881
+f 2881 2715 2714
+f 2880 2714 2872
+f 2879 2881 2880
+f 2885 2887 2888
+f 2888 2890 2885
+f 2885 2892 2891
+f 2893 2885 2891
+f 1942 2532 2894
+f 2895 2720 2870
+f 2884 2870 2715
+f 2883 2895 2884
+f 2896 2892 2895
+f 2889 2717 2890
+f 2890 2720 2892
+f 2897 2891 2896
+f 2898 2896 2883
+f 2899 2883 2882
+f 2900 2882 2879
+f 3797 2879 3807
+f 2876 2901 2877
+f 2901 2904 2903
+f 1785 1835 1834
+f 2905 2899 2900
+f 2907 2897 2898
+f 3797 2903 3799
+f 632 577 633
+f 3532 2867 2787
+f 434 550 350
+f 2768 3640 3643
+f 2910 1623 2249
+f 2241 2758 2242
+f 3640 2913 3652
+f 1623 2757 1620
+f 3587 2757 3652
+f 2229 1380 2250
+f 2765 1629 2914
+f 2915 1629 2912
+f 3642 2914 2915
+f 3641 2765 2914
+f 2919 3642 3613
+f 2918 2912 2241
+f 3614 2241 3586
+f 3618 3613 3670
+f 3541 2180 3496
+f 3521 2188 3506
+f 3705 714 3851
+f 2925 2917 2924
+f 2924 2916 2919
+f 3610 2919 3618
+f 3611 2924 3610
+f 2926 2766 2925
+f 3658 2925 3611
+f 1715 1705 1710
+f 785 498 796
+f 1387 565 1791
+f 374 1899 560
+f 3573 2206 3556
+f 2929 2886 2893
+f 2932 2934 2931
+f 1905 1908 1907
+f 2930 2936 2937
+f 2938 2930 2929
+f 1493 517 518
+f 1603 1586 1602
+f 2939 2929 2908
+f 2940 2908 2907
+f 3665 1600 3661
+f 2886 2937 2887
+f 2906 2898 2899
+f 2908 2893 2897
+f 2942 2933 2941
+f 2941 2937 2936
+f 3501 2936 3513
+f 3498 2941 3501
+f 2943 2710 2873
+f 2943 2874 2889
+f 2945 2889 2888
+f 2946 2943 2945
+f 2932 2888 2887
+f 2931 2945 2932
+f 727 1929 728
+f 2903 2947 2948
+f 545 42 262
+f 1480 533 251
+f 2907 2949 2940
+f 2906 2950 2949
+f 1558 215 202
+f 3799 2948 3801
+f 2054 1644 2118
+f 3705 2203 3573
+f 1828 1849 1827
+f 2953 2955 2952
+f 2952 2957 2956
+f 2956 2258 2952
+f 2953 2258 2257
+f 2454 2474 2496
+f 3747 2959 3766
+f 3747 2131 3737
+f 2961 2963 2958
+f 2958 2964 2959
+f 222 2485 2965
+f 2477 2966 2608
+f 2631 2006 1859
+f 2384 2496 2497
+f 2967 2959 2964
+f 2968 2970 2969
+f 3759 2971 3784
+f 3784 2960 3766
+f 2972 2033 2032
+f 2007 2973 2008
+f 3560 2134 2133
+f 1836 1832 1831
+f 3506 2090 3551
+f 955 959 956
+f 672 674 673
+f 633 713 632
+f 2024 733 803
+f 2025 803 798
+f 425 1550 388
+f 2975 888 887
+f 1319 1157 2694
+f 1212 2977 1213
+f 889 2228 2227
+f 1978 1975 1979
+f 1138 1329 915
+f 751 2979 2978
+f 1619 1625 1624
+f 2252 1728 1727
+f 1157 845 1216
+f 2706 1938 2617
+f 2980 2254 2981
+f 2982 887 2630
+f 802 711 2679
+f 678 2029 2028
+f 527 690 528
+f 1839 2629 1840
+f 2983 602 1914
+f 2983 1913 1331
+f 2985 1331 1054
+f 2986 2983 2985
+f 2987 2635 2634
+f 2985 2639 2986
+f 2990 1318 2989
+f 2991 2993 2994
+f 2995 2994 2996
+f 1106 2991 2995
+f 1058 2992 2991
+f 1415 1365 1367
+f 1317 2217 2997
+f 1932 2255 2980
+f 926 2999 664
+f 3001 2992 3000
+f 3000 1057 2978
+f 2999 2978 2979
+f 2998 3000 2999
+f 3002 2209 3003
+f 3002 2996 2994
+f 2993 3002 2994
+f 3001 2998 3002
+f 1103 2995 3004
+f 3004 2996 3003
+f 2209 3004 3003
+f 588 3004 2210
+f 295 433 381
+f 2638 2988 3005
+f 3005 1056 1930
+f 1305 1930 1306
+f 1276 3005 1305
+f 1987 1993 1989
+f 2633 1761 2645
+f 1423 1417 1419
+f 3561 739 703
+f 2671 2704 2705
+f 1473 2035 1472
+f 922 2694 895
+f 1401 1378 1384
+f 556 1884 1037
+f 842 3825 3836
+f 1854 2617 1010
+f 933 1081 1075
+f 1440 1436 1437
+f 529 719 530
+f 1031 1027 1028
+f 777 912 1871
+f 2645 1204 2633
+f 441 1538 1537
+f 790 587 586
+f 2065 1610 1607
+f 1860 990 902
+f 60 87 59
+f 1424 1948 1947
+f 332 1507 331
+f 261 138 135
+f 187 449 507
+f 1719 1726 1725
+f 1931 1059 3006
+f 3006 1105 1104
+f 1287 1104 587
+f 1286 3006 1287
+f 3007 3009 3010
+f 3009 653 795
+f 618 26 25
+f 2029 1210 1209
+f 957 600 2228
+f 1049 2228 600
+f 2631 3011 2561
+f 890 2227 3011
+f 3011 2145 3012
+f 2561 3012 2375
+f 1026 609 608
+f 2706 1961 1964
+f 3013 2485 3014
+f 2148 1267 3015
+f 3015 1266 1311
+f 2997 1101 1317
+f 642 1773 1193
+f 186 461 511
+f 1041 1320 901
+f 1121 3017 1122
+f 2984 2987 3018
+f 3018 2634 2619
+f 1048 3018 2619
+f 603 2984 3018
+f 2679 801 802
+f 2062 2074 2061
+f 2061 2076 2064
+f 414 1545 1965
+f 3016 2683 3019
+f 3019 2704 2670
+f 3020 2670 720
+f 3007 3019 3020
+f 1871 1091 777
+f 3021 1122 3017
+f 3022 3017 3010
+f 1314 3022 1315
+f 1784 3021 1314
+f 1316 1763 611
+f 3023 3010 3009
+f 3023 795 1092
+f 1763 1092 1125
+f 1315 3023 1763
+f 3016 3010 3017
+f 752 2978 1057
+f 2986 2637 2987
+f 3024 653 3008
+f 3020 3008 3007
+f 720 3024 3020
+f 723 654 3024
+f 2681 1268 3025
+f 2671 1268 721
+f 2705 3025 2671
+f 1313 3027 1311
+f 3026 3029 3027
+f 3027 3030 3031
+f 1311 3031 3015
+f 2012 731 2023
+f 917 974 977
+f 743 856 744
+f 1888 2030 1425
+f 2682 2684 3032
+f 3032 1124 2989
+f 1100 2989 1318
+f 1099 3032 1100
+f 1123 2989 1124
+f 3033 1159 1158
+f 2990 1158 1319
+f 1123 3033 2990
+f 1786 1834 3033
+f 1054 2988 2985
+f 2979 664 2999
+f 2623 2663 2643
+f 1095 688 687
+f 3034 2261 2956
+f 3036 2956 2957
+f 3037 3034 3036
+f 3038 3035 3034
+f 3040 3026 1313
+f 2263 3041 3040
+f 3042 3043 3041
+f 3041 3028 3026
+f 3044 3043 3039
+f 3046 3045 3044
+f 3047 3049 3045
+f 3043 3049 3028
+f 3038 3051 3050
+f 3050 3053 3052
+f 3052 3044 3050
+f 3039 3050 3044
+f 3054 3039 3042
+f 3042 2262 3054
+f 2261 3054 2262
+f 2216 2997 2217
+f 2212 2667 1936
+f 3056 2209 3055
+f 934 1961 1854
+f 2013 3757 567
+f 3058 3060 3061
+f 3059 3063 3060
+f 3060 2213 1963
+f 3061 1963 1962
+f 3063 2214 2213
+f 3062 3064 3063
+f 3065 2260 3064
+f 3064 2215 2214
+f 3066 3031 3030
+f 2608 3067 3066
+f 2966 3068 3067
+f 3031 3068 3015
+f 2455 2461 2474
+f 2476 2226 2477
+f 3551 2089 3525
+f 3526 3769 3786
+f 433 355 381
+f 2115 1419 2123
+f 3509 2922 3527
+f 3769 2131 2134
+f 3069 2167 2166
+f 2971 2928 2960
+f 2961 2773 2962
+f 3070 2478 2483
+f 2610 3071 2954
+f 2502 2374 2386
+f 3072 2595 3073
+f 2964 3052 2967
+f 3048 3074 3049
+f 2593 3075 3074
+f 3075 3029 3074
+f 3074 3028 3049
+f 3072 3075 2592
+f 3076 2482 2481
+f 2481 3066 3076
+f 3076 3030 3075
+f 2967 3053 2970
+f 2482 3073 2483
+f 479 493 494
+f 706 3873 709
+f 3561 2221 3517
+f 2013 3754 3755
+f 3533 2016 3529
+f 3562 2222 2974
+f 1171 1155 1176
+f 2586 2565 2587
+f 2982 113 2484
+f 2562 3077 3068
+f 2376 3012 3077
+f 3012 2148 3077
+f 3077 3015 3068
+f 2963 3046 2964
+f 3051 2970 3053
+f 1683 491 775
+f 3850 1447 3860
+f 1986 3734 3768
+f 3079 2546 2575
+f 3081 2575 2576
+f 1983 3768 3765
+f 3082 2319 2545
+f 3080 2545 2546
+f 3734 3082 3080
+f 3732 2323 3082
+f 1309 1888 748
+f 3073 2772 2771
+f 2760 3084 2762
+f 2760 2088 3083
+f 3085 3086 3087
+f 3089 3090 3088
+f 3089 3084 3085
+f 3092 3090 3091
+f 3093 2744 2743
+f 2751 2748 3093
+f 3093 2752 2751
+f 2742 3093 3094
+f 3094 2743 3095
+f 3096 3098 3087
+f 3099 3087 3086
+f 3097 3095 3098
+f 3100 2746 3101
+f 3101 3102 3100
+f 2764 2927 3101
+f 2745 3101 2746
+f 3102 3091 3100
+f 3100 3095 2743
+f 2869 3592 3634
+f 3102 3104 3092
+f 3658 2869 3634
+f 2927 3103 3102
+f 3098 3091 3090
+f 3087 3090 3085
+f 2088 3105 3083
+f 3083 3086 3084
+f 2483 2771 3070
+f 2401 2185 2321
+f 3099 3105 3106
+f 2125 3106 3105
+f 3108 2742 3107
+f 3107 3094 3097
+f 3110 3107 3109
+f 3109 3097 3096
+f 3111 3096 3099
+f 3112 3109 3111
+f 3113 2737 3108
+f 3115 2722 3114
+f 3117 3114 3116
+f 3119 3116 3118
+f 3118 3120 3121
+f 3116 3113 3120
+f 3122 3111 3123
+f 3122 1755 1754
+f 3124 3126 3127
+f 3125 3121 3126
+f 3128 3118 3125
+f 3129 3125 3124
+f 2020 3128 3129
+f 3853 1346 1352
+f 1065 1047 1066
+f 1814 3830 1649
+f 2021 3129 3130
+f 3111 3106 3123
+f 3123 2207 1755
+f 2377 2224 2973
+f 3014 113 112
+f 1756 3782 3767
+f 3131 2530 2528
+f 3133 2528 2529
+f 1753 3767 3775
+f 3134 3127 3135
+f 2327 2314 2313
+f 2436 2317 2349
+f 2018 3711 3712
+f 2534 3137 2535
+f 2320 3136 2534
+f 2185 3712 3771
+f 3138 2542 2539
+f 3134 3744 3745
+f 3785 3134 3745
+f 3140 2539 2538
+f 2535 3140 2538
+f 3711 3130 3785
+f 2558 3132 3141
+f 3782 3141 3132
+f 2560 3141 3142
+f 3760 3142 3141
+f 958 2976 2981
+f 1933 2980 3143
+f 2399 2418 2416
+f 2348 2343 2432
+f 2322 2319 2323
+f 2718 2874 2730
+f 2709 2873 2710
+f 2894 1361 1942
+f 346 176 347
+f 1876 1882 1881
+f 2802 3607 3592
+f 3104 3088 3092
+f 2974 3649 3603
+f 3089 2761 2762
+f 3088 3145 3089
+f 2803 3603 3607
+f 3870 692 628
+f 2123 1418 2126
+f 1365 1368 1367
+f 1581 1782 1783
+f 3569 2129 3520
+f 3555 2002 3521
+f 3146 2551 3147
+f 3751 3147 3750
+f 3742 3146 3751
+f 3149 2585 3146
+f 3196 3197 3195
+f 3604 2191 3623
+f 3142 2556 2560
+f 3777 3150 3142
+f 3646 2550 3593
+f 3150 2555 2556
+f 3778 3151 3150
+f 3788 3152 3646
+f 3153 2551 2549
+f 3152 2549 2550
+f 3781 3153 3152
+f 3776 3147 3153
+f 3148 2206 2205
+f 2183 2321 2184
+f 2132 2928 2133
+f 3684 2204 2203
+f 3719 2197 3721
+f 2594 2773 2951
+f 2595 2951 2772
+f 1870 799 1869
+f 3713 2178 3706
+f 3706 3736 3790
+f 336 3774 3722
+f 339 3154 336
+f 3789 3149 3742
+f 3683 2584 3149
+f 2963 3048 3047
+f 3722 337 336
+f 339 2566 2586
+f 3156 2168 2167
+f 2610 2167 2611
+f 2954 3156 2610
+f 3157 2953 2257
+f 3707 2169 3709
+f 2567 3159 2568
+f 337 2567 338
+f 3730 2166 3707
+f 3764 1983 3765
+f 2579 3081 2576
+f 2568 3160 2579
+f 3709 2190 3764
+f 3065 3162 3161
+f 3161 1984 2189
+f 3157 2189 2168
+f 2257 3161 3157
+f 734 1922 806
+f 3059 2708 3163
+f 3163 2182 1985
+f 3162 1985 1984
+f 3062 3163 3162
+f 3203 3205 3204
+f 2122 1448 1447
+f 3817 1685 3812
+f 139 444 69
+f 1570 566 533
+f 2957 3165 3036
+f 2955 3164 2957
+f 1912 341 1973
+f 3130 3124 3134
+f 1548 319 1547
+f 1949 1327 1887
+f 2969 3078 3167
+f 3167 3169 3168
+f 3724 2017 3720
+f 3720 2033 3759
+f 3168 3165 3170
+f 3170 3164 3171
+f 3779 3171 3756
+f 3724 3170 3779
+f 3171 3166 3172
+f 3172 3071 3173
+f 3752 3173 3749
+f 3756 3172 3752
+f 3169 3036 3165
+f 2593 2962 2594
+f 3173 2609 3174
+f 3174 2199 2198
+f 3783 2198 3719
+f 3749 3174 3783
+f 2190 1984 1983
+f 2196 2767 3148
+f 2119 1414 1448
+f 1464 1466 1465
+f 2199 3069 2200
+f 2457 2587 2565
+f 2226 2562 2966
+f 481 1627 1639
+f 333 235 236
+f 3078 3037 3169
+f 3040 2997 2263
+f 2219 1933 2220
+f 2385 2497 2503
+f 221 3175 2484
+f 2220 3143 3175
+f 3175 2976 2975
+f 2484 2975 2982
+f 2954 3166 2955
+f 2630 110 2982
+f 1547 1952 1955
+f 2091 2188 2187
+f 3733 2178 2177
+f 1918 1927 1926
+f 3563 111 3511
+f 955 960 954
+f 2037 112 3563
+f 3143 2981 2976
+f 2218 2214 2215
+f 672 3787 3541
+f 2152 3014 2037
+f 1147 1143 1145
+f 2248 1409 2245
+f 1615 1635 1616
+f 1410 2246 2247
+f 3241 3242 3240
+f 3177 1383 1382
+f 2238 3176 1382
+f 3176 2230 3178
+f 1410 3179 1407
+f 3179 2243 1407
+f 1633 2800 2249
+f 1621 2758 2912
+f 1622 2245 1634
+f 2236 3181 3182
+f 2237 3182 3183
+f 2250 2233 2229
+f 2912 1628 1621
+f 135 1564 1563
+f 1567 512 1565
+f 1085 2653 2685
+f 3184 868 3056
+f 869 3057 934
+f 1266 1098 1312
+f 1267 722 721
+f 937 2646 936
+f 2146 722 2147
+f 2211 921 2667
+f 1957 507 449
+f 1313 2264 3040
+f 1496 1493 1492
+f 2681 2705 2682
+f 2887 2933 2932
+f 1687 1659 1694
+f 145 1498 81
+f 1317 1319 2694
+f 37 418 71
+f 1935 3185 2977
+f 104 301 103
+f 2101 2083 2084
+f 543 360 207
+f 1934 3186 3185
+f 234 212 237
+f 3845 1822 1824
+f 1976 1874 1974
+f 223 2219 220
+f 3655 3605 3612
+f 468 773 2165
+f 1935 1728 1932
+f 3240 3226 3235
+f 3261 3250 3260
+f 3215 3244 3264
+f 3265 3215 3264
+f 3267 3198 3196
+f 3205 3269 3255
+f 3214 3265 3270
+f 3271 3214 3192
+f 3251 3239 3236
+f 3222 3266 3272
+f 3232 3234 3233
+f 3216 3243 3244
+f 3196 3268 3267
+f 3244 3254 3255
+f 3238 3247 3246
+f 3264 3255 3269
+f 3253 3216 3217
+f 3211 3221 3265
+f 3257 3218 3256
+f 3258 3272 3259
+f 3274 3280 3273
+f 3228 3262 3263
+f 3275 3281 3280
+f 3226 3273 3280
+f 3236 3260 3251
+f 3281 3236 3237
+f 3287 3192 3189
+f 3235 3280 3234
+f 3234 3281 3237
+f 3288 3259 3272
+f 3260 3276 3251
+f 3289 3278 3284
+f 3210 3224 3283
+f 3199 3201 3200
+f 3228 3230 3229
+f 3290 3285 3286
+f 3282 3271 3287
+f 3260 3262 3261
+f 3243 3268 3254
+f 3267 3241 3232
+f 3194 3208 3193
+f 3191 3269 3206
+f 3270 3192 3214
+f 3275 3230 3231
+f 3264 3270 3265
+f 3252 3276 3291
+f 3248 3291 3223
+f 3213 3279 3277
+f 3272 3289 3288
+f 3277 3212 3213
+f 3228 3281 3231
+f 3220 3253 3219
+f 3219 3217 3257
+f 3256 3222 3258
+f 3247 3252 3248
+f 3245 3232 3233
+f 3266 3212 3289
+f 3208 3255 3254
+f 3288 3284 3285
+f 3204 3208 3207
+f 560 3187 559
+f 3209 3201 3202
+f 3189 3191 3190
+f 448 807 447
+f 647 122 270
+f 1833 1780 1832
+f 510 508 507
+f 3210 1887 3201
+f 1887 3200 3201
+f 15 3197 3200
+f 16 3195 3197
+f 3195 3187 3194
+f 3194 1899 3207
+f 3207 375 3204
+f 387 126 127
+f 3189 1760 3287
+f 3577 757 758
+f 763 2172 2175
+f 3583 3580 3581
+f 3581 3579 3578
+f 3594 3596 3595
+f 3400 3602 3401
+f 3626 3625 3627
+f 3631 3599 3601
+f 3636 3595 3596
+f 3647 3588 3645
+f 3650 3584 3647
+f 3653 3580 3651
+f 3653 3654 3579
+f 1801 3656 3655
+f 3651 3582 3650
+f 3644 3595 3637
+f 1767 3659 3660
+f 1597 3661 1600
+f 1708 3665 3664
+f 3671 3667 3666
+f 3666 3673 3672
+f 1588 3664 1587
+f 2080 3666 2082
+f 2082 3672 2084
+f 3674 3635 3631
+f 3677 3636 3635
+f 3682 3645 3681
+f 3692 3650 3686
+f 3681 3644 3680
+f 786 3692 785
+f 3694 3651 3692
+f 785 3686 497
+f 497 3682 650
+f 650 3681 564
+f 3696 3681 3680
+f 3405 3605 3402
+f 3702 3678 3677
+f 3699 3679 3678
+f 3645 3638 3644
+f 1799 3659 1768
+f 3708 3458 3460
+f 3462 3708 3460
+f 3714 564 3696
+f 3615 3602 3600
+f 3710 3696 3698
+f 3635 3596 3599
+f 3629 3627 3630
+f 3597 3599 3596
+f 3630 3715 3629
+f 3627 3597 3594
+f 3717 3723 3718
+f 3715 3660 3629
+f 3716 3725 3715
+f 3668 3661 3725
+f 3668 3717 3667
+f 3667 3718 3673
+f 3727 3673 3718
+f 3728 3672 3673
+f 3726 3718 3723
+f 1576 2084 3672
+f 3727 3731 3729
+f 3203 1387 3190
+f 3190 1791 3189
+f 3283 1949 3210
+f 317 259 391
+f 2000 3653 3694
+f 3601 3600 3602
+f 3589 3584 3585
+f 3577 756 2170
+f 3585 3582 3583
+f 3678 3637 3636
+f 3204 376 3203
+f 1801 3657 1799
+f 3729 3728 3727
+f 1575 3738 1088
+f 3735 1576 3728
+f 1776 1796 1775
+f 1293 1290 1284
+f 1326 1327 12
+f 1294 1234 1290
+f 878 1330 1337
+f 497 374 498
+f 122 413 121
+f 3699 3704 3708
+f 3606 3401 3602
+f 3708 3698 3699
+f 3464 3710 3462
+f 3704 3443 3458
+f 439 399 438
+f 1512 1526 1511
+f 1668 1738 1671
+f 839 1362 838
+f 1362 826 1012
+f 3843 1896 3815
+f 2053 2118 2117
+f 3888 2120 2119
+f 696 3816 3831
+f 1826 639 640
+f 3740 3729 3731
+f 956 886 960
+f 1980 1922 807
+f 33 105 164
+f 1343 3856 3857
+f 3907 2128 3902
+f 1766 1582 1594
+f 14 2163 738
+f 595 99 594
+f 3233 3246 3245
+f 1346 1044 1352
+f 748 1425 747
+f 3193 3254 3268
+f 3190 3206 3203
+f 3612 3606 3615
+f 3624 3615 3625
+f 3631 3443 3674
+f 3679 3644 3637
+f 3686 3647 3682
+f 3702 3674 3704
+f 3625 3600 3597
+f 3601 3415 3631
+f 3739 3735 3729
+f 3630 3594 3741
+f 3741 3716 3630
+f 1284 1288 1285
+f 3698 3680 3679
+f 1778 1780 1779
+f 1792 1584 1789
+f 695 571 694
+f 3638 3594 3595
+f 3741 3588 3589
+f 3247 3209 3202
+f 3292 3294 3295
+f 3296 3298 3299
+f 3299 3300 3301
+f 3302 3304 3305
+f 3306 3308 3309
+f 3307 3311 3308
+f 3303 3301 3300
+f 3312 3304 3313
+f 3314 3316 3317
+f 3318 3320 3321
+f 3324 3321 3325
+f 3326 3328 3323
+f 3329 3331 3332
+f 3333 3335 3336
+f 3337 3339 3340
+f 3333 3341 3342
+f 3342 3343 3344
+f 3319 3344 3343
+f 3338 3334 3339
+f 3346 3302 3347
+f 3305 3347 3302
+f 3312 3223 3349
+f 3350 3340 3351
+f 3312 3348 3305
+f 3352 3341 3326
+f 3311 3354 3308
+f 3341 3327 3326
+f 3359 3361 3362
+f 3360 3250 3249
+f 3318 3345 3319
+f 3364 3318 3324
+f 3314 3324 3365
+f 3366 3301 3346
+f 3299 3367 3296
+f 3308 3368 3309
+f 3369 3295 3294
+f 3317 3364 3314
+f 3370 3317 3316
+f 3325 3365 3324
+f 3352 3319 3343
+f 3296 3353 3311
+f 3297 3311 3310
+f 3363 3354 3345
+f 3345 3353 3344
+f 3344 3367 3342
+f 3366 3342 3367
+f 3347 3334 3346
+f 3339 3348 3340
+f 3348 3351 3340
+f 3363 3369 3368
+f 3294 3368 3369
+f 3293 3309 3294
+f 3331 3374 3332
+f 3346 3333 3366
+f 3323 3352 3326
+f 3356 3321 3320
+f 3355 3325 3321
+f 3357 3371 3325
+f 3322 3320 3352
+f 3361 3329 3362
+f 3373 3375 3374
+f 3374 3376 3332
+f 3329 3376 3362
+f 3327 3372 3328
+f 3336 3375 3327
+f 3335 3376 3375
+f 3376 3337 3362
+f 3337 3359 3362
+f 3359 3276 3250
+f 3377 3295 3370
+f 3277 3315 3278
+f 3371 3379 3365
+f 3378 3358 3380
+f 3365 3315 3314
+f 3316 3279 3370
+f 3282 3370 3279
+f 3313 3224 3312
+f 3378 3284 3379
+f 3380 3285 3378
+f 3379 3278 3315
+f 3276 3351 3291
+f 3291 3349 3223
+f 3313 2965 3013
+f 2965 3303 222
+f 222 3300 223
+f 223 3298 3186
+f 3298 3185 3186
+f 3297 2977 3185
+f 3310 1213 2977
+f 3307 1214 1213
+f 2532 3306 3293
+f 2894 3293 3292
+f 2871 3292 3377
+f 3253 3240 3242
+f 3283 3013 2152
+f 1997 1889 1892
+f 1237 744 855
+f 1535 120 121
+f 1531 201 214
+f 1119 1032 984
+f 946 824 947
+f 163 5 162
+f 3589 3723 3741
+f 1155 1162 1164
+f 427 327 424
+f 553 796 498
+f 3381 2231 2237
+f 3381 2230 2232
+f 2237 3180 3381
+f 3383 3385 3382
+f 3384 3387 3385
+f 3386 3389 3387
+f 3388 3391 3389
+f 3392 3394 3395
+f 3395 3396 3397
+f 3398 3397 3396
+f 3400 3399 3398
+f 3403 3405 3402
+f 3406 3404 3403
+f 3408 3407 3406
+f 3410 3409 3408
+f 3412 3411 3410
+f 3415 3398 3414
+f 3414 3396 3416
+f 3416 3394 3417
+f 3417 3393 3418
+f 3393 3420 3418
+f 3419 3421 3420
+f 3390 3422 3421
+f 3388 3423 3422
+f 3386 3424 3423
+f 3384 3425 3424
+f 3425 3426 3184
+f 2904 3428 2947
+f 3429 2904 2902
+f 3430 2902 2876
+f 2875 3430 2876
+f 3432 2733 2736
+f 2825 3435 2824
+f 3437 3439 3436
+f 3441 3436 3440
+f 2724 3434 2825
+f 3436 3115 3117
+f 3440 3117 3119
+f 3443 3414 3442
+f 3442 3416 3444
+f 3444 3417 3445
+f 3445 3418 3446
+f 3446 3420 3447
+f 3420 3448 3447
+f 3421 3449 3448
+f 3422 3450 3449
+f 3423 3451 3450
+f 3424 3452 3451
+f 3425 3056 3452
+f 3451 2253 2252
+f 3450 2252 1727
+f 3449 1727 2150
+f 3448 2150 1941
+f 3453 3448 1941
+f 3454 3447 3453
+f 3455 3446 3454
+f 3456 3445 3455
+f 3442 3456 3457
+f 3443 3457 3458
+f 3459 3458 3457
+f 3457 3455 3459
+f 3459 3454 3461
+f 3462 3459 3461
+f 3461 3453 3463
+f 3464 3461 3463
+f 3463 1941 1360
+f 3404 3401 3405
+f 3407 3399 3404
+f 3409 3397 3407
+f 3411 3395 3409
+f 3465 3413 3412
+f 3467 3469 3466
+f 3465 3431 3433
+f 3470 3466 3465
+f 3438 3432 3435
+f 3467 3438 3437
+f 3468 3437 3441
+f 3471 3468 3472
+f 3472 3441 3473
+f 3473 3440 2712
+f 2712 3119 3128
+f 3475 3472 3474
+f 3474 3473 3476
+f 3476 2712 2711
+f 3477 2711 2400
+f 3478 3476 3477
+f 3479 3474 3478
+f 3392 3413 3480
+f 3466 3480 3413
+f 3419 3392 3480
+f 3390 3480 3391
+f 3469 3391 3480
+f 3389 3471 3475
+f 3387 3475 3479
+f 3385 3479 3481
+f 3482 2400 2402
+f 3483 3477 3482
+f 3481 3478 3483
+f 3484 2402 2707
+f 3058 2707 2708
+f 3061 3484 3058
+f 3485 3482 3484
+f 3382 3481 3486
+f 3486 3483 3485
+f 3426 3382 3487
+f 3487 3486 1962
+f 3486 3061 1962
+f 1961 3487 1962
+f 3057 3426 3487
+f 3428 3402 3488
+f 3427 3403 3428
+f 3429 3406 3427
+f 3430 3408 3429
+f 3431 3410 3430
+f 3470 3433 3432
+f 3434 3438 3435
+f 2723 3439 3434
+f 2875 3433 3431
+f 2736 3435 3432
+f 2947 3488 3500
+f 3464 1360 3497
+f 1900 1924 1901
+f 1736 1718 1750
+f 1955 1943 1956
+f 1671 1735 1751
+f 1696 1698 1692
+f 1800 1864 1801
+f 2063 2056 2060
+f 1737 1717 1736
+f 1751 1697 1695
+f 943 3899 684
+f 3656 3402 3605
+f 1577 1764 1703
+f 3917 1599 3923
+f 1945 82 1944
+f 68 7 248
+f 147 1543 148
+f 1109 1133 1102
+f 1503 1907 1908
+f 1499 1909 1498
+f 452 735 215
+f 1824 3835 3845
+f 1825 2068 1824
+f 1463 1022 1194
+f 841 1009 1011
+f 1568 1955 1534
+f 1926 1927 1923
+f 1910 1502 1939
+f 1939 422 442
+f 309 1940 308
+f 1804 1866 1803
+f 269 1954 56
+f 2065 2060 2056
+f 3812 1758 3835
+f 280 552 1940
+f 1875 1869 1874
+f 1739 1734 1737
+f 1778 1790 1788
+f 310 1915 279
+f 1909 1906 1954
+f 1789 1778 1788
+f 978 980 979
+f 1584 1731 1583
+f 425 311 312
+f 148 115 116
+f 2044 3894 2046
+f 277 1939 551
+f 1943 1945 1944
+f 1461 1463 1464
+f 2253 3056 3055
+f 2981 3489 958
+f 2209 3489 3055
+f 2254 3055 3489
+f 2998 958 3489
+f 2826 2721 2724
+f 3490 2721 2817
+f 2816 3490 2817
+f 2334 2296 2335
+f 2298 2334 2336
+f 2301 2336 2338
+f 2311 2338 2354
+f 2312 2354 2355
+f 2359 2361 2360
+f 2335 2359 2360
+f 2414 2365 2364
+f 2413 2361 2365
+f 2355 2540 2541
+f 2541 2539 2542
+f 2722 2739 2738
+f 3114 2738 3113
+f 3110 3121 3120
+f 3108 3120 3113
+f 3122 3127 3126
+f 3112 3126 3121
+f 3127 1753 3135
+f 3139 2529 2542
+f 3744 3133 3139
+f 3490 2740 2739
+f 3491 2789 3490
+f 228 91 88
+f 245 243 242
+f 248 247 191
+f 2 296 295
+f 145 300 267
+f 327 210 328
+f 329 331 330
+f 250 228 236
+f 239 240 230
+f 373 375 374
+f 178 176 175
+f 1218 1228 1321
+f 3862 1295 3854
+f 808 824 823
+f 3858 1013 1014
+f 3813 1411 1413
+f 2154 2159 2160
+f 3845 1758 1757
+f 306 1922 452
+f 2157 1976 1977
+f 708 2031 1474
+f 1450 1227 1428
+f 1672 1605 1624
+f 1813 1821 1816
+f 1717 1715 1718
+f 176 402 347
+f 1710 3916 3915
+f 1712 1577 1711
+f 1705 1711 1704
+f 971 979 970
+f 822 1094 1102
+f 2909 2793 2792
+f 2810 2795 2793
+f 2799 2249 2800
+f 3662 2910 2799
+f 2769 2801 2795
+f 3177 3178 3179
+f 3178 3180 3179
+f 3181 1393 2800
+f 3181 1632 3182
+f 3182 2244 3183
+f 3183 2243 3180
+f 3492 2790 3491
+f 2808 3491 2816
+f 2239 3572 2240
+f 1149 3571 3570
+f 1150 3567 3571
+f 2202 3566 2015
+f 2921 3565 2923
+f 3568 2133 2928
+f 1777 1759 1792
+f 1201 1183 1184
+f 3561 2195 2194
+f 3553 2770 2768
+f 3927 1687 1692
+f 1516 154 503
+f 3549 2974 2803
+f 3550 2779 2778
+f 2771 3575 3546
+f 3070 3546 3558
+f 3547 2775 2867
+f 1376 1370 1372
+f 2767 3556 2206
+f 2195 3543 2767
+f 2775 3542 2776
+f 3377 3545 2871
+f 3804 3539 2806
+f 2776 3538 2805
+f 2240 3535 2920
+f 2920 3669 3619
+f 3548 2786 2779
+f 1805 1851 1804
+f 3534 2787 2786
+f 2951 3540 3531
+f 2772 3531 3575
+f 2973 3530 3536
+f 2224 3537 3530
+f 1957 320 510
+f 306 304 303
+f 2770 3528 2913
+f 2913 3574 2239
+f 2923 3527 2922
+f 308 431 47
+f 207 132 543
+f 1879 1881 1880
+f 3733 3525 2089
+f 2475 3558 3524
+f 2476 3524 3523
+f 2223 3523 3537
+f 3522 2803 2802
+f 2037 11 1950
+f 1959 450 505
+f 2003 3520 2129
+f 9 334 540
+f 2009 3518 3567
+f 2008 3536 3518
+f 3877 673 3879
+f 3515 2778 2784
+f 2935 3513 2936
+f 111 3570 3511
+f 3510 2935 2938
+f 683 3885 943
+f 3508 2810 2909
+f 736 732 729
+f 1795 1803 1796
+f 3505 2938 2939
+f 3507 2939 2940
+f 2950 3504 2949
+f 2940 3504 3507
+f 3801 3512 2950
+f 2868 3502 2869
+f 2869 3522 2802
+f 1024 1023 1020
+f 3500 2948 2947
+f 551 442 443
+f 2972 3568 2928
+f 2032 3499 2972
+f 216 1924 1923
+f 203 201 200
+f 1828 1847 1837
+f 1958 449 450
+f 272 178 175
+f 2871 3495 1361
+f 3619 3564 2921
+f 2801 3643 3662
+f 2005 3654 2176
+f 960 899 962
+f 3657 3612 3624
+f 3780 3646 3151
+f 3151 3593 2555
+f 2193 3623 2194
+f 3607 3145 3144
+f 3603 2761 3145
+f 3592 3144 3104
+f 2926 3634 3103
+f 3634 3104 3103
+f 3755 3746 3753
+f 566 3758 2005
+f 3488 1864 3500
+f 1602 2079 2078
+f 2868 3611 2922
+f 2922 3610 2923
+f 2923 3618 2921
+f 3670 2918 3614
+f 3619 3614 2920
+f 2921 3670 3619
+f 2920 3586 2240
+f 3613 2915 2918
+f 2917 3622 3641
+f 2916 3641 3642
+f 2239 3652 2913
+f 2911 3652 2757
+f 3643 2911 2910
+f 2797 3662 3591
+f 2797 3598 2750
+f 3622 2763 2765
+f 3608 2764 2745
+f 2194 3648 2221
+f 2221 3649 2222
+f 2756 3609 2755
+f 2750 3609 2747
+f 3669 1817 3827
+f 3763 1691 1699
+f 2515 3590 2548
+f 2548 3593 2550
+f 2513 3632 3616
+f 2513 3621 2515
+f 2509 3620 2289
+f 2510 3633 2509
+f 2289 3639 2283
+f 2283 3617 2286
+f 2240 3587 2239
+f 3659 3624 3626
+f 716 3887 3888
+f 727 3761 1923
+f 2165 2170 2164
+f 3660 3626 3629
+f 3743 3578 3757
+f 2175 769 770
+f 2100 2095 2099
+f 3758 3757 3578
+f 640 3568 1826
+f 1653 3553 1651
+f 1895 3530 746
+f 746 3537 741
+f 1653 3528 3576
+f 3762 1604 1672
+f 2093 2073 2078
+f 959 995 884
+f 962 919 986
+f 1400 1398 1399
+f 1000 1127 1004
+f 956 884 885
+f 1662 3550 1661
+f 2034 3546 1654
+f 3926 3539 3802
+f 3535 766 1817
+f 3572 765 766
+f 334 3571 335
+f 1868 3540 1656
+f 1684 1652 1678
+f 1682 775 1807
+f 1585 3762 1700
+f 3763 1672 1673
+f 2173 490 491
+f 1658 3547 1752
+f 3556 1650 724
+f 1571 2010 1572
+f 1587 3671 2080
+f 2099 2077 2094
+f 787 1999 786
+f 2000 786 1999
+f 3155 3697 3683
+f 3713 3683 3789
+f 2180 3685 3684
+f 3496 3684 3705
+f 1684 3840 1651
+f 1760 3495 3545
+f 2607 3693 2590
+f 2581 3689 2607
+f 2371 3700 3695
+f 2371 3693 2372
+f 2587 3675 2586
+f 2586 3697 339
+f 2497 3687 2503
+f 2502 3687 3700
+f 2494 3690 2495
+f 3690 2472 2495
+f 3688 2468 2472
+f 3701 2469 2468
+f 726 3892 630
+f 581 3518 580
+f 10 3570 334
+f 1837 3510 1849
+f 741 3523 1300
+f 1299 3558 2034
+f 1851 3504 1866
+f 3527 1820 1819
+f 580 3536 1895
+f 1758 3522 3502
+f 1693 3508 1806
+f 3504 1867 1866
+f 1894 3568 3499
+f 2126 3813 3841
+f 3906 3841 3912
+f 2113 3867 2110
+f 2179 3789 3685
+f 624 3904 623
+f 2192 3788 3780
+f 2177 3787 3733
+f 1364 3833 1338
+f 3858 1470 1453
+f 3135 3775 3744
+f 2069 3835 2068
+f 1351 3853 1352
+f 2108 3901 499
+f 2089 3736 3733
+f 3828 1358 866
+f 2001 3783 2187
+f 2187 3719 2091
+f 2130 3752 2004
+f 2004 3749 2001
+f 2014 3779 2201
+f 2201 3756 2130
+f 3167 3759 2969
+f 3168 3720 3167
+f 3159 3764 3160
+f 3160 3765 3081
+f 3730 3158 337
+f 3158 3709 3159
+f 2200 3730 3722
+f 3685 3742 2204
+f 3722 2197 2200
+f 3790 2092 3721
+f 3774 3721 2197
+f 3154 3790 3774
+f 3155 3706 3154
+f 2091 3721 2092
+f 2196 3750 3776
+f 2193 3776 3781
+f 3604 3781 3788
+f 2086 3780 3778
+f 2087 3778 3777
+f 2204 3751 2205
+f 2205 3750 3148
+f 3909 574 692
+f 2124 3777 3760
+f 2208 3760 3782
+f 3711 3140 3137
+f 3785 3138 3140
+f 3745 3139 3138
+f 3771 3136 2322
+f 3712 3137 3136
+f 3775 3131 3133
+f 3767 3132 3131
+f 1813 3838 1814
+f 1293 3856 1344
+f 2184 3748 3732
+f 2181 3732 3734
+f 3765 3079 3081
+f 3768 3080 3079
+f 709 3875 2031
+f 2774 3737 3769
+f 3786 2134 3559
+f 3809 766 765
+f 2968 3766 2959
+f 2969 3784 2968
+f 2958 3737 2961
+f 2132 3766 2960
+f 2321 3771 3748
+f 3860 1449 28
+f 3905 582 710
+f 3886 577 574
+f 3847 1818 3827
+f 705 3887 2121
+f 3816 3907 698
+f 2111 3911 2113
+f 3810 1366 1411
+f 1812 3822 1815
+f 675 3852 674
+f 3496 3851 675
+f 2110 3868 2109
+f 2104 3901 2106
+f 2109 3899 944
+f 3842 703 739
+f 1680 3846 1684
+f 1639 3819 3818
+f 623 3906 697
+f 2139 3866 3865
+f 1808 3844 3837
+f 3820 1819 1820
+f 3872 3900 706
+f 1646 3830 1648
+f 3889 633 577
+f 2139 3875 18
+f 2156 3871 3874
+f 674 3913 3869
+f 2117 3812 2069
+f 1815 3847 3823
+f 3855 1444 1470
+f 697 3912 3816
+f 866 3836 3828
+f 3864 2136 2135
+f 3585 3726 3589
+f 3583 3731 3585
+f 3753 3738 3739
+f 3740 3581 3743
+f 1088 3746 1093
+f 3739 3743 3753
+f 2791 3493 2792
+f 3492 2807 3493
+f 2792 3494 2909
+f 3493 2806 3494
+f 3508 3494 3516
+f 3516 2806 3539
+f 2948 3791 3801
+f 2105 3896 2104
+f 3538 3804 2805
+f 680 3876 683
+f 1891 3910 3891
+f 2905 3801 2950
+f 2900 3799 2905
+f 2901 3807 2877
+f 2734 3806 3794
+f 2877 3806 2878
+f 2820 3794 2872
+f 2818 3792 2827
+f 2823 3792 2826
+f 2822 3798 2821
+f 2819 3800 3798
+f 2817 3795 2816
+f 2809 3795 2815
+f 2808 3803 2807
+f 2805 3803 2804
+f 1702 3923 1599
+f 3922 1797 1802
+f 3915 1706 1709
+f 3918 1692 1698
+f 1657 3880 1879
+f 1890 3895 3910
+f 3772 1674 1675
+f 1689 3772 3773
+f 3926 3538 1688
+f 3516 1689 3773
+f 3917 1703 1764
+f 1598 3928 1770
+f 3880 1876 1879
+f 1691 3925 3927
+f 3925 1688 1687
+f 1117 1118 3930
+f 1066 1322 3930
+f 1243 847 3930
+f 3825 848 3930
+f 1128 1117 3930
+f 1129 1130 3930
+f 841 1011 3930
+f 1230 3863 3930
+f 1242 1243 3930
+f 1322 1229 3930
+f 1064 1065 3930
+f 1003 1128 3930
+f 1229 1230 3930
+f 3863 1285 3930
+f 1285 1288 3930
+f 848 839 3930
+f 847 3825 3930
+f 1118 1129 3930
+f 1065 1066 3930
+f 1087 1086 3930
+f 1008 1002 3930
+f 1011 1008 3930
+f 1002 1003 3930
+f 1076 1069 3930
+f 838 841 3930
+f 839 838 3930
+f 1288 1242 3930
+f 1130 1087 3930
+f 1086 1076 3930
+f 1069 1064 3930
+f 2410 2414 3931
+f 2411 2410 3931
+f 3617 2285 3931
+f 2394 2393 3931
+f 2393 2396 3931
+f 2281 2286 3931
+f 2282 2280 3931
+f 2398 2399 3931
+f 2465 2464 3931
+f 2396 2397 3931
+f 2470 2469 3931
+f 2399 2416 3931
+f 2480 2395 3931
+f 2459 2480 3931
+f 2369 2282 3931
+f 2460 2459 3931
+f 2415 2411 3931
+f 2416 2415 3931
+f 2364 2363 3931
+f 2286 3617 3931
+f 2464 2470 3931
+f 2469 3703 3931
+f 2285 2465 3931
+f 2414 2364 3931
+f 2395 2394 3931
+f 2363 2367 3931
+f 2397 2398 3931
+f 2280 2281 3931
+f 3703 2460 3931
+f 2367 2369 3931
+f 3934 3937 3938
+f 3934 3939 3935
+f 3932 3937 3933
+f 3938 3943 3939
+f 3937 3940 3941
+f 3937 3942 3938
+f 3942 3947 3943
+f 3941 3944 3945
+f 3942 3945 3946
+f 3947 3950 3951
+f 3944 3949 3945
+f 3945 3950 3946
+f 3950 3955 3951
+f 3949 3952 3953
+f 3950 3953 3954
+f 3955 3958 3959
+f 3953 3956 3957
+f 3953 3958 3954
+f 3959 3962 3963
+f 3957 3960 3961
+f 3958 3961 3962
+f 3962 3967 3963
+f 3960 3965 3961
+f 3961 3966 3962
+f 3966 3971 3967
+f 3965 3968 3969
+f 3966 3969 3970
+f 3970 3975 3971
+f 3968 3973 3969
+f 3969 3974 3970
+f 3974 3979 3975
+f 3973 3976 3977
+f 3974 3977 3978
+f 3979 3982 3983
+f 3976 3981 3977
+f 3977 3982 3978
+f 3982 3987 3983
+f 3981 3984 3985
+f 3982 3985 3986
+f 3987 3990 3991
+f 3984 3989 3985
+f 3985 3990 3986
+f 3991 3994 3995
+f 3989 3992 3993
+f 3990 3993 3994
+f 3995 3998 3999
+f 3992 3997 3993
+f 3993 3998 3994
+f 3998 4003 3999
+f 3997 4000 4001
+f 3998 4001 4002
+f 4002 4007 4003
+f 4000 4005 4001
+f 4001 4006 4002
+f 4006 4011 4007
+f 4005 4008 4009
+f 4006 4009 4010
+f 4011 4014 4015
+f 4008 4013 4009
+f 4009 4014 4010
+f 4014 4019 4015
+f 4013 4016 4017
+f 4014 4017 4018
+f 4019 4022 4023
+f 4016 4021 4017
+f 4017 4022 4018
+f 4023 4026 4027
+f 4021 4024 4025
+f 4022 4025 4026
+f 4027 4030 4031
+f 4024 4029 4025
+f 4025 4030 4026
+f 4031 4034 4035
+f 4029 4032 4033
+f 4030 4033 4034
+f 4034 4039 4035
+f 4032 4037 4033
+f 4033 4038 4034
+f 4038 4043 4039
+f 4037 4040 4041
+f 4038 4041 4042
+f 4043 4046 4047
+f 4040 4045 4041
+f 4041 4046 4042
+f 4046 4051 4047
+f 4045 4048 4049
+f 4046 4049 4050
+f 4051 4055 4056
+f 4048 4054 4049
+f 4049 4055 4050
+f 4056 4059 4060
+f 4053 4058 4054
+f 4055 4058 4059
+f 3932 4052 3936
+f 3936 4052 3940
+f 3940 4052 3944
+f 3944 4052 3948
+f 3948 4052 3952
+f 3952 4052 3956
+f 3956 4052 3960
+f 3960 4052 3964
+f 3964 4052 3968
+f 3968 4052 3972
+f 3972 4052 3976
+f 3976 4052 3980
+f 3980 4052 3984
+f 3984 4052 3988
+f 3988 4052 3992
+f 3992 4052 3996
+f 3996 4052 4000
+f 4000 4052 4004
+f 4004 4052 4008
+f 4008 4052 4012
+f 4012 4052 4016
+f 4016 4052 4020
+f 4020 4052 4024
+f 4024 4052 4028
+f 4028 4052 4032
+f 4032 4052 4036
+f 4036 4052 4040
+f 4040 4052 4044
+f 4044 4052 4048
+f 4048 4052 4053
+f 4053 4052 4057
+f 4060 3934 3935
+f 4058 3932 3933
+f 4058 3934 4059
+f 4057 4052 3932
+f 4069 4081 4082
+f 4066 4080 4067
+f 4065 4077 4078
+f 4063 4075 4076
+f 4071 4085 4072
+f 4069 4083 4070
+f 4067 4081 4068
+f 4066 4078 4079
+f 4063 4077 4064
+f 4061 4075 4062
+f 4072 4086 4073
+f 4071 4083 4084
+f 4076 4090 4077
+f 4074 4088 4075
+f 4086 4098 4099
+f 4083 4097 4084
+f 4082 4094 4095
+f 4079 4093 4080
+f 4078 4090 4091
+f 4076 4088 4089
+f 4085 4097 4098
+f 4082 4096 4083
+f 4080 4094 4081
+f 4079 4091 4092
+f 4089 4103 4090
+f 4088 4100 4101
+f 4098 4112 4099
+f 4096 4110 4097
+f 4095 4107 4108
+f 4092 4106 4093
+f 4090 4104 4091
+f 4089 4101 4102
+f 4097 4111 4098
+f 4095 4109 4096
+f 4094 4106 4107
+f 4092 4104 4105
+f 4103 4115 4116
+f 4100 4114 4101
+f 4112 4124 4125
+f 4110 4122 4123
+f 4107 4121 4108
+f 4105 4119 4106
+f 4103 4117 4104
+f 4101 4115 4102
+f 4110 4124 4111
+f 4109 4121 4122
+f 4107 4119 4120
+f 4105 4117 4118
+f 4115 4129 4116
+f 4114 4126 4127
+f 4124 4138 4125
+f 4123 4135 4136
+f 4120 4134 4121
+f 4119 4131 4132
+f 4117 4129 4130
+f 4114 4128 4115
+f 4124 4136 4137
+f 4122 4134 4135
+f 4119 4133 4120
+f 4117 4131 4118
+f 4129 4141 4142
+f 4126 4140 4127
+f 4138 4150 4151
+f 4136 4148 4149
+f 4133 4147 4134
+f 4131 4145 4132
+f 4130 4142 4143
+f 4127 4141 4128
+f 4137 4149 4150
+f 4135 4147 4148
+f 4132 4146 4133
+f 4131 4143 4144
+f 4142 4154 4155
+f 4140 4152 4153
+f 4150 4164 4151
+f 4149 4161 4162
+f 4146 4160 4147
+f 4144 4158 4145
+f 4142 4156 4143
+f 4140 4154 4141
+f 4149 4163 4150
+f 4148 4160 4161
+f 4146 4158 4159
+f 4144 4156 4157
+f 4155 4167 4168
+f 4153 4165 4166
+f 4163 4177 4164
+f 4161 4175 4162
+f 4159 4173 4160
+f 4158 4170 4171
+f 4155 4169 4156
+f 4153 4167 4154
+f 4163 4175 4176
+f 4161 4173 4174
+f 4159 4171 4172
+f 4156 4170 4157
+f 4167 4181 4168
+f 4165 4179 4166
+f 4176 4190 4177
+f 4175 4187 4188
+f 4173 4185 4186
+f 4170 4184 4171
+f 4169 4181 4182
+f 4167 4179 4180
+f 4175 4189 4176
+f 4173 4187 4174
+f 4171 4185 4172
+f 4170 4182 4183
+f 4180 4194 4181
+f 4178 4192 4179
+f 4190 4202 4203
+f 4187 4201 4188
+f 4186 4198 4199
+f 4183 4197 4184
+f 4182 4194 4195
+f 4180 4192 4193
+f 4189 4201 4202
+f 4186 4200 4187
+f 4184 4198 4185
+f 4183 4195 4196
+f 4193 4207 4194
+f 4192 4204 4205
+f 4202 4216 4203
+f 4200 4214 4201
+f 4199 4211 4212
+f 4196 4210 4197
+f 4194 4208 4195
+f 4193 4205 4206
+f 4201 4215 4202
+f 4199 4213 4200
+f 4198 4210 4211
+f 4196 4208 4209
+f 4207 4219 4220
+f 4204 4218 4205
+f 4216 4228 4229
+f 4213 4227 4214
+f 4211 4225 4212
+f 4209 4223 4210
+f 4207 4221 4208
+f 4205 4219 4206
+f 4214 4228 4215
+f 4213 4225 4226
+f 4211 4223 4224
+f 4209 4221 4222
+f 4219 4233 4220
+f 4218 4230 4231
+f 4228 4242 4229
+f 4226 4240 4227
+f 4225 4237 4238
+f 4223 4235 4236
+f 4221 4233 4234
+f 4218 4232 4219
+f 4228 4240 4241
+f 4226 4238 4239
+f 4223 4237 4224
+f 4221 4235 4222
+f 4233 4245 4246
+f 4230 4244 4231
+f 4242 4254 4255
+f 4240 4252 4253
+f 4237 4251 4238
+f 4235 4249 4236
+f 4234 4246 4247
+f 4231 4245 4232
+f 4241 4253 4254
+f 4239 4251 4252
+f 4236 4250 4237
+f 4235 4247 4248
+f 4246 4258 4259
+f 4244 4256 4257
+f 4254 4268 4255
+f 4253 4265 4266
+f 4250 4264 4251
+f 4248 4262 4249
+f 4246 4260 4247
+f 4244 4258 4245
+f 4253 4267 4254
+f 4252 4264 4265
+f 4250 4262 4263
+f 4248 4260 4261
+f 4259 4271 4272
+f 4257 4269 4270
+f 4267 4281 4268
+f 4265 4279 4266
+f 4263 4277 4264
+f 4262 4274 4275
+f 4259 4273 4260
+f 4257 4271 4258
+f 4266 4280 4267
+f 4265 4277 4278
+f 4263 4275 4276
+f 4260 4274 4261
+f 4271 4285 4272
+f 4269 4283 4270
+f 4281 4293 4294
+f 4279 4291 4292
+f 4277 4289 4290
+f 4274 4288 4275
+f 4273 4285 4286
+f 4271 4283 4284
+f 4280 4292 4293
+f 4277 4291 4278
+f 4275 4289 4276
+f 4274 4286 4287
+f 4284 4298 4285
+f 4282 4296 4283
+f 4294 4306 4307
+f 4291 4305 4292
+f 4290 4302 4303
+f 4287 4301 4288
+f 4286 4298 4299
+f 4284 4296 4297
+f 4293 4305 4306
+f 4290 4304 4291
+f 4288 4302 4289
+f 4287 4299 4300
+f 4297 4311 4298
+f 4296 4308 4309
+f 4306 4320 4307
+f 4304 4318 4305
+f 4303 4315 4316
+f 4300 4314 4301
+f 4298 4312 4299
+f 4297 4309 4310
+f 4305 4319 4306
+f 4304 4316 4317
+f 4302 4314 4315
+f 4300 4312 4313
+f 4310 4324 4311
+f 4308 4322 4309
+f 4320 4332 4333
+f 4318 4330 4331
+f 4316 4328 4329
+f 4313 4327 4314
+f 4311 4325 4312
+f 4310 4322 4323
+f 4318 4332 4319
+f 4316 4330 4317
+f 4315 4327 4328
+f 4312 4326 4313
+f 4323 4337 4324
+f 4322 4334 4335
+f 4332 4346 4333
+f 4330 4344 4331
+f 4329 4341 4342
+f 4327 4339 4340
+f 4325 4337 4338
+f 4322 4336 4323
+f 4332 4344 4345
+f 4330 4342 4343
+f 4327 4341 4328
+f 4325 4339 4326
+f 4337 4350 4351
+f 4334 4349 4335
+f 4346 4359 4360
+f 4344 4357 4358
+f 4341 4356 4342
+f 4340 4353 4354
+f 4338 4351 4352
+f 4335 4350 4336
+f 4345 4358 4359
+f 4343 4356 4357
+f 4340 4355 4341
+f 4338 4353 4339
+f 4351 4363 4364
+f 4349 4361 4362
+f 4359 4373 4360
+f 4358 4370 4371
+f 4355 4369 4356
+f 4354 4366 4367
+f 4351 4365 4352
+f 4349 4363 4350
+f 4358 4372 4359
+f 4357 4369 4370
+f 4355 4367 4368
+f 4352 4366 4353
+f 4364 4376 4377
+f 4362 4374 4375
+f 4373 4385 4386
+f 4370 4384 4371
+f 4368 4382 4369
+f 4367 4379 4380
+f 4364 4378 4365
+f 4362 4376 4363
+f 4372 4384 4385
+f 4370 4382 4383
+f 4368 4380 4381
+f 4365 4379 4366
+f 4376 4390 4377
+f 4374 4388 4375
+f 4385 4399 4386
+f 4384 4396 4397
+f 4382 4394 4395
+f 4379 4393 4380
+f 4378 4390 4391
+f 4376 4388 4389
+f 4384 4398 4385
+f 4382 4396 4383
+f 4380 4394 4381
+f 4379 4391 4392
+f 4389 4403 4390
+f 4387 4401 4388
+f 4399 4411 4412
+f 4396 4410 4397
+f 4395 4407 4408
+f 4393 4405 4406
+f 4391 4403 4404
+f 4389 4401 4402
+f 4398 4410 4411
+f 4395 4409 4396
+f 4393 4407 4394
+f 4391 4405 4392
+f 4402 4416 4403
+f 4401 4413 4414
+f 4411 4425 4412
+f 4409 4423 4410
+f 4408 4420 4421
+f 4406 4418 4419
+f 4403 4417 4404
+f 4402 4414 4415
+f 4410 4424 4411
+f 4408 4422 4409
+f 4407 4419 4420
+f 4404 4418 4405
+f 4416 4428 4429
+f 4413 4427 4414
+f 4425 4437 4438
+f 4423 4435 4436
+f 4421 4433 4434
+f 4418 4432 4419
+f 4416 4430 4417
+f 4415 4427 4428
+f 4423 4437 4424
+f 4421 4435 4422
+f 4420 4432 4433
+f 4418 4430 4431
+f 4428 4442 4429
+f 4426 4440 4427
+f 4437 4451 4438
+f 4435 4449 4436
+f 4434 4446 4447
+f 4432 4444 4445
+f 4430 4442 4443
+f 4427 4441 4428
+f 4437 4449 4450
+f 4435 4447 4448
+f 4432 4446 4433
+f 4430 4444 4431
+f 4442 4454 4455
+f 4439 4453 4440
+f 4451 4463 4464
+f 4449 4461 4462
+f 4446 4460 4447
+f 4444 4458 4445
+f 4443 4455 4456
+f 4440 4454 4441
+f 4450 4462 4463
+f 4447 4461 4448
+f 4445 4459 4446
+f 4444 4456 4457
+f 4455 4467 4468
+f 4453 4465 4466
+f 4463 4477 4464
+f 4462 4474 4475
+f 4459 4473 4460
+f 4457 4471 4458
+f 4455 4469 4456
+f 4453 4467 4454
+f 4462 4476 4463
+f 4461 4473 4474
+f 4459 4471 4472
+f 4457 4469 4470
+f 4347 4073 4086
+f 4347 4086 4099
+f 4347 4099 4112
+f 4347 4112 4125
+f 4347 4125 4138
+f 4347 4138 4151
+f 4347 4151 4164
+f 4347 4164 4177
+f 4347 4177 4190
+f 4347 4190 4203
+f 4347 4203 4216
+f 4347 4216 4229
+f 4347 4229 4242
+f 4347 4242 4255
+f 4347 4255 4268
+f 4347 4268 4281
+f 4347 4281 4294
+f 4347 4294 4307
+f 4347 4307 4320
+f 4347 4320 4333
+f 4347 4333 4346
+f 4347 4346 4360
+f 4347 4360 4373
+f 4347 4373 4386
+f 4347 4386 4399
+f 4347 4399 4412
+f 4347 4412 4425
+f 4347 4425 4438
+f 4347 4438 4451
+f 4347 4451 4464
+f 4347 4464 4477
+f 4468 4063 4064
+f 4466 4061 4062
+f 4477 4072 4073
+f 4474 4071 4475
+f 4472 4069 4473
+f 4471 4066 4067
+f 4468 4065 4469
+f 4466 4063 4467
+f 4347 4477 4073
+f 4476 4071 4072
+f 4474 4069 4070
+f 4472 4067 4068
+f 4469 4066 4470
+f 3935 4465 4060
+f 4465 4056 4060
+f 4452 4051 4056
+f 4439 4047 4051
+f 4047 4413 4043
+f 4043 4400 4039
+f 4039 4387 4035
+f 4387 4031 4035
+f 4031 4361 4027
+f 4361 4023 4027
+f 4348 4019 4023
+f 4334 4015 4019
+f 4015 4308 4011
+f 4011 4295 4007
+f 4007 4282 4003
+f 4282 3999 4003
+f 3999 4256 3995
+f 4256 3991 3995
+f 4243 3987 3991
+f 4230 3983 3987
+f 3983 4204 3979
+f 3979 4191 3975
+f 3975 4178 3971
+f 4178 3967 3971
+f 3967 4152 3963
+f 4152 3959 3963
+f 4139 3955 3959
+f 4126 3951 3955
+f 3951 4100 3947
+f 3947 4087 3943
+f 3943 4074 3939
+f 4074 3935 3939
+f 4480 4483 4484
+f 4480 4485 4481
+f 4479 4482 4483
+f 4484 4489 4485
+f 4482 4487 4483
+f 4483 4488 4484
+f 4488 4493 4489
+f 4487 4490 4491
+f 4488 4491 4492
+f 4493 4496 4497
+f 4490 4495 4491
+f 4491 4496 4492
+f 4496 4501 4497
+f 4495 4498 4499
+f 4496 4499 4500
+f 4501 4504 4505
+f 4498 4503 4499
+f 4499 4504 4500
+f 4505 4508 4509
+f 4503 4506 4507
+f 4504 4507 4508
+f 4509 4512 4513
+f 4506 4511 4507
+f 4507 4512 4508
+f 4513 4516 4517
+f 4511 4514 4515
+f 4512 4515 4516
+f 4516 4521 4517
+f 4514 4519 4515
+f 4515 4520 4516
+f 4520 4525 4521
+f 4518 4523 4519
+f 4520 4523 4524
+f 4525 4528 4529
+f 4522 4527 4523
+f 4523 4528 4524
+f 4528 4533 4529
+f 4527 4530 4531
+f 4528 4531 4532
+f 4533 4536 4537
+f 4530 4535 4531
+f 4531 4536 4532
+f 4537 4540 4541
+f 4534 4539 4535
+f 4536 4539 4540
+f 4541 4544 4545
+f 4539 4542 4543
+f 4539 4544 4540
+f 4544 4549 4545
+f 4542 4547 4543
+f 4544 4547 4548
+f 4548 4553 4549
+f 4547 4550 4551
+f 4547 4552 4548
+f 4552 4557 4553
+f 4551 4554 4555
+f 4552 4555 4556
+f 4557 4560 4561
+f 4554 4559 4555
+f 4555 4560 4556
+f 4560 4565 4561
+f 4559 4562 4563
+f 4560 4563 4564
+f 4565 4568 4569
+f 4562 4567 4563
+f 4563 4568 4564
+f 4569 4572 4573
+f 4567 4570 4571
+f 4568 4571 4572
+f 4572 4577 4573
+f 4570 4575 4571
+f 4571 4576 4572
+f 4576 4581 4577
+f 4575 4578 4579
+f 4576 4579 4580
+f 4580 4585 4581
+f 4578 4583 4579
+f 4579 4584 4580
+f 4584 4589 4585
+f 4583 4586 4587
+f 4584 4587 4588
+f 4589 4592 4593
+f 4586 4591 4587
+f 4587 4592 4588
+f 4592 4597 4593
+f 4591 4594 4595
+f 4592 4595 4596
+f 4597 4601 4602
+f 4594 4600 4595
+f 4595 4601 4596
+f 4602 4605 4606
+f 4600 4603 4604
+f 4601 4604 4605
+f 4478 4598 4482
+f 4482 4598 4486
+f 4486 4598 4490
+f 4490 4598 4494
+f 4494 4598 4498
+f 4498 4598 4502
+f 4502 4598 4506
+f 4506 4598 4510
+f 4510 4598 4514
+f 4514 4598 4518
+f 4518 4598 4522
+f 4522 4598 4526
+f 4526 4598 4530
+f 4530 4598 4534
+f 4534 4598 4538
+f 4538 4598 4542
+f 4542 4598 4546
+f 4546 4598 4550
+f 4550 4598 4554
+f 4554 4598 4558
+f 4558 4598 4562
+f 4562 4598 4566
+f 4566 4598 4570
+f 4570 4598 4574
+f 4574 4598 4578
+f 4578 4598 4582
+f 4582 4598 4586
+f 4586 4598 4590
+f 4590 4598 4594
+f 4594 4598 4599
+f 4599 4598 4603
+f 4606 4480 4481
+f 4603 4479 4604
+f 4604 4480 4605
+f 4603 4598 4478
+f 4615 4627 4628
+f 4612 4626 4613
+f 4611 4623 4624
+f 4609 4621 4622
+f 4618 4630 4631
+f 4615 4629 4616
+f 4613 4627 4614
+f 4612 4624 4625
+f 4609 4623 4610
+f 4607 4621 4608
+f 4619 4631 4632
+f 4617 4629 4630
+f 4622 4636 4623
+f 4620 4634 4621
+f 4632 4644 4645
+f 4629 4643 4630
+f 4628 4640 4641
+f 4626 4638 4639
+f 4624 4636 4637
+f 4622 4634 4635
+f 4631 4643 4644
+f 4628 4642 4629
+f 4626 4640 4627
+f 4624 4638 4625
+f 4635 4649 4636
+f 4634 4646 4647
+f 4644 4658 4645
+f 4642 4656 4643
+f 4641 4653 4654
+f 4639 4651 4652
+f 4636 4650 4637
+f 4635 4647 4648
+f 4643 4657 4644
+f 4642 4654 4655
+f 4640 4652 4653
+f 4637 4651 4638
+f 4648 4662 4649
+f 4646 4660 4647
+f 4658 4670 4671
+f 4656 4668 4669
+f 4653 4667 4654
+f 4651 4665 4652
+f 4649 4663 4650
+f 4648 4660 4661
+f 4656 4670 4657
+f 4654 4668 4655
+f 4653 4665 4666
+f 4651 4663 4664
+f 4661 4675 4662
+f 4660 4672 4673
+f 4670 4684 4671
+f 4668 4682 4669
+f 4667 4679 4680
+f 4665 4677 4678
+f 4663 4675 4676
+f 4661 4673 4674
+f 4670 4682 4683
+f 4667 4681 4668
+f 4665 4679 4666
+f 4663 4677 4664
+f 4675 4687 4688
+f 4672 4686 4673
+f 4684 4696 4697
+f 4682 4694 4695
+f 4679 4693 4680
+f 4678 4690 4691
+f 4676 4688 4689
+f 4673 4687 4674
+f 4683 4695 4696
+f 4681 4693 4694
+f 4678 4692 4679
+f 4676 4690 4677
+f 4688 4700 4701
+f 4686 4698 4699
+f 4696 4710 4697
+f 4695 4707 4708
+f 4692 4706 4693
+f 4691 4703 4704
+f 4688 4702 4689
+f 4686 4700 4687
+f 4695 4709 4696
+f 4694 4706 4707
+f 4692 4704 4705
+f 4689 4703 4690
+f 4701 4713 4714
+f 4699 4711 4712
+f 4710 4722 4723
+f 4707 4721 4708
+f 4705 4719 4706
+f 4704 4716 4717
+f 4701 4715 4702
+f 4699 4713 4700
+f 4709 4721 4722
+f 4707 4719 4720
+f 4705 4717 4718
+f 4702 4716 4703
+f 4713 4727 4714
+f 4711 4725 4712
+f 4723 4735 4736
+f 4721 4733 4734
+f 4719 4731 4732
+f 4716 4730 4717
+f 4715 4727 4728
+f 4713 4725 4726
+f 4721 4735 4722
+f 4719 4733 4720
+f 4717 4731 4718
+f 4716 4728 4729
+f 4726 4740 4727
+f 4724 4738 4725
+f 4736 4748 4749
+f 4733 4747 4734
+f 4732 4744 4745
+f 4730 4742 4743
+f 4728 4740 4741
+f 4726 4738 4739
+f 4735 4747 4748
+f 4732 4746 4733
+f 4730 4744 4731
+f 4728 4742 4729
+f 4739 4753 4740
+f 4738 4750 4751
+f 4748 4762 4749
+f 4746 4760 4747
+f 4745 4757 4758
+f 4743 4755 4756
+f 4740 4754 4741
+f 4739 4751 4752
+f 4747 4761 4748
+f 4745 4759 4746
+f 4744 4756 4757
+f 4741 4755 4742
+f 4753 4765 4766
+f 4750 4764 4751
+f 4762 4774 4775
+f 4759 4773 4760
+f 4758 4770 4771
+f 4755 4769 4756
+f 4753 4767 4754
+f 4752 4764 4765
+f 4760 4774 4761
+f 4758 4772 4759
+f 4757 4769 4770
+f 4755 4767 4768
+f 4765 4779 4766
+f 4764 4776 4777
+f 4774 4788 4775
+f 4772 4786 4773
+f 4771 4783 4784
+f 4769 4781 4782
+f 4767 4779 4780
+f 4765 4777 4778
+f 4774 4786 4787
+f 4771 4785 4772
+f 4769 4783 4770
+f 4767 4781 4768
+f 4779 4791 4792
+f 4776 4790 4777
+f 4788 4800 4801
+f 4786 4798 4799
+f 4783 4797 4784
+f 4782 4794 4795
+f 4780 4792 4793
+f 4777 4791 4778
+f 4787 4799 4800
+f 4785 4797 4798
+f 4782 4796 4783
+f 4780 4794 4781
+f 4792 4804 4805
+f 4790 4802 4803
+f 4800 4814 4801
+f 4799 4811 4812
+f 4796 4810 4797
+f 4795 4807 4808
+f 4792 4806 4793
+f 4790 4804 4791
+f 4799 4813 4800
+f 4798 4810 4811
+f 4796 4808 4809
+f 4793 4807 4794
+f 4805 4817 4818
+f 4803 4815 4816
+f 4814 4826 4827
+f 4811 4825 4812
+f 4809 4823 4810
+f 4808 4820 4821
+f 4805 4819 4806
+f 4803 4817 4804
+f 4813 4825 4826
+f 4811 4823 4824
+f 4809 4821 4822
+f 4806 4820 4807
+f 4817 4831 4818
+f 4815 4829 4816
+f 4826 4840 4827
+f 4825 4837 4838
+f 4823 4835 4836
+f 4820 4834 4821
+f 4819 4831 4832
+f 4817 4829 4830
+f 4825 4839 4826
+f 4823 4837 4824
+f 4821 4835 4822
+f 4820 4832 4833
+f 4830 4844 4831
+f 4828 4842 4829
+f 4840 4852 4853
+f 4837 4851 4838
+f 4836 4848 4849
+f 4834 4846 4847
+f 4832 4844 4845
+f 4830 4842 4843
+f 4839 4851 4852
+f 4836 4850 4837
+f 4834 4848 4835
+f 4832 4846 4833
+f 4843 4857 4844
+f 4842 4854 4855
+f 4852 4866 4853
+f 4850 4864 4851
+f 4849 4861 4862
+f 4847 4859 4860
+f 4844 4858 4845
+f 4843 4855 4856
+f 4851 4865 4852
+f 4849 4863 4850
+f 4848 4860 4861
+f 4845 4859 4846
+f 4857 4869 4870
+f 4855 4867 4868
+f 4866 4878 4879
+f 4864 4876 4877
+f 4862 4874 4875
+f 4859 4873 4860
+f 4857 4871 4858
+f 4856 4868 4869
+f 4864 4878 4865
+f 4862 4876 4863
+f 4861 4873 4874
+f 4859 4871 4872
+f 4869 4883 4870
+f 4868 4880 4881
+f 4878 4892 4879
+f 4877 4889 4890
+f 4874 4888 4875
+f 4873 4885 4886
+f 4871 4883 4884
+f 4868 4882 4869
+f 4878 4890 4891
+f 4876 4888 4889
+f 4873 4887 4874
+f 4871 4885 4872
+f 4883 4896 4897
+f 4880 4895 4881
+f 4892 4905 4906
+f 4890 4903 4904
+f 4887 4902 4888
+f 4885 4900 4886
+f 4884 4897 4898
+f 4881 4896 4882
+f 4891 4904 4905
+f 4889 4902 4903
+f 4886 4901 4887
+f 4885 4898 4899
+f 4897 4909 4910
+f 4895 4907 4908
+f 4905 4919 4906
+f 4904 4916 4917
+f 4901 4915 4902
+f 4899 4913 4900
+f 4897 4911 4898
+f 4895 4909 4896
+f 4904 4918 4905
+f 4903 4915 4916
+f 4901 4913 4914
+f 4899 4911 4912
+f 4910 4922 4923
+f 4908 4920 4921
+f 4919 4931 4932
+f 4916 4930 4917
+f 4914 4928 4915
+f 4913 4925 4926
+f 4910 4924 4911
+f 4908 4922 4909
+f 4918 4930 4931
+f 4916 4928 4929
+f 4914 4926 4927
+f 4911 4925 4912
+f 4922 4936 4923
+f 4920 4934 4921
+f 4931 4945 4932
+f 4930 4942 4943
+f 4928 4940 4941
+f 4925 4939 4926
+f 4924 4936 4937
+f 4922 4934 4935
+f 4930 4944 4931
+f 4928 4942 4929
+f 4926 4940 4927
+f 4925 4937 4938
+f 4935 4949 4936
+f 4933 4947 4934
+f 4945 4957 4958
+f 4942 4956 4943
+f 4941 4953 4954
+f 4938 4952 4939
+f 4937 4949 4950
+f 4935 4947 4948
+f 4944 4956 4957
+f 4941 4955 4942
+f 4939 4953 4940
+f 4938 4950 4951
+f 4948 4962 4949
+f 4947 4959 4960
+f 4957 4971 4958
+f 4955 4969 4956
+f 4954 4966 4967
+f 4951 4965 4952
+f 4949 4963 4950
+f 4948 4960 4961
+f 4956 4970 4957
+f 4954 4968 4955
+f 4953 4965 4966
+f 4951 4963 4964
+f 4962 4974 4975
+f 4959 4973 4960
+f 4971 4983 4984
+f 4968 4982 4969
+f 4966 4980 4967
+f 4964 4978 4965
+f 4962 4976 4963
+f 4960 4974 4961
+f 4969 4983 4970
+f 4967 4981 4968
+f 4966 4978 4979
+f 4964 4976 4977
+f 4975 4987 4988
+f 4973 4985 4986
+f 4983 4997 4984
+f 4981 4995 4982
+f 4979 4993 4980
+f 4978 4990 4991
+f 4976 4988 4989
+f 4973 4987 4974
+f 4983 4995 4996
+f 4981 4993 4994
+f 4978 4992 4979
+f 4976 4990 4977
+f 4988 5000 5001
+f 4985 4999 4986
+f 4997 5009 5010
+f 4995 5007 5008
+f 4992 5006 4993
+f 4991 5003 5004
+f 4989 5001 5002
+f 4986 5000 4987
+f 4996 5008 5009
+f 4994 5006 5007
+f 4991 5005 4992
+f 4989 5003 4990
+f 5001 5013 5014
+f 4999 5011 5012
+f 5009 5023 5010
+f 5008 5020 5021
+f 5005 5019 5006
+f 5004 5016 5017
+f 5001 5015 5002
+f 4999 5013 5000
+f 5008 5022 5009
+f 5007 5019 5020
+f 5005 5017 5018
+f 5002 5016 5003
+f 4893 4619 4632
+f 4893 4632 4645
+f 4893 4645 4658
+f 4893 4658 4671
+f 4893 4671 4684
+f 4893 4684 4697
+f 4893 4697 4710
+f 4893 4710 4723
+f 4893 4723 4736
+f 4893 4736 4749
+f 4893 4749 4762
+f 4893 4762 4775
+f 4893 4775 4788
+f 4893 4788 4801
+f 4893 4801 4814
+f 4893 4814 4827
+f 4893 4827 4840
+f 4893 4840 4853
+f 4893 4853 4866
+f 4893 4866 4879
+f 4893 4879 4892
+f 4893 4892 4906
+f 4893 4906 4919
+f 4893 4919 4932
+f 4893 4932 4945
+f 4893 4945 4958
+f 4893 4958 4971
+f 4893 4971 4984
+f 4893 4984 4997
+f 4893 4997 5010
+f 4893 5010 5023
+f 5014 4609 4610
+f 5012 4607 4608
+f 5022 4619 5023
+f 5020 4617 5021
+f 5018 4615 5019
+f 5017 4612 4613
+f 5014 4611 5015
+f 5012 4609 5013
+f 4893 5023 4619
+f 5021 4618 5022
+f 5020 4615 4616
+f 5018 4613 4614
+f 5015 4612 5016
+f 4481 5011 4606
+f 5011 4602 4606
+f 4998 4597 4602
+f 4985 4593 4597
+f 4593 4959 4589
+f 4589 4946 4585
+f 4585 4933 4581
+f 4933 4577 4581
+f 4577 4907 4573
+f 4907 4569 4573
+f 4894 4565 4569
+f 4880 4561 4565
+f 4561 4854 4557
+f 4557 4841 4553
+f 4553 4828 4549
+f 4828 4545 4549
+f 4545 4802 4541
+f 4802 4537 4541
+f 4789 4533 4537
+f 4776 4529 4533
+f 4529 4750 4525
+f 4525 4737 4521
+f 4521 4724 4517
+f 4724 4513 4517
+f 4513 4698 4509
+f 4698 4505 4509
+f 4685 4501 4505
+f 4672 4497 4501
+f 4497 4646 4493
+f 4493 4633 4489
+f 4489 4620 4485
+f 4620 4481 4485
+f 4 3 2
+f 8 7 6
+f 13 16 15
+f 17 28 19
+f 32 31 30
+f 36 35 34
+f 37 40 39
+f 21 22 23
+f 25 26 27
+f 41 44 43
+f 48 47 46
+f 52 51 50
+f 53 56 55
+f 60 59 58
+f 29 30 62
+f 66 65 64
+f 67 5 6
+f 72 71 70
+f 76 75 74
+f 77 80 79
+f 81 84 83
+f 59 87 86
+f 88 91 90
+f 96 95 94
+f 100 99 98
+f 101 104 103
+f 106 105 34
+f 108 116 115
+f 117 119 36
+f 124 127 126
+f 131 130 129
+f 132 102 134
+f 142 141 140
+f 148 106 60
+f 144 143 127
+f 149 152 151
+f 156 155 154
+f 159 150 158
+f 116 108 164
+f 171 170 169
+f 174 126 173
+f 179 182 181
+f 186 189 188
+f 194 197 196
+f 199 142 139
+f 206 209 208
+f 213 212 211
+f 202 215 214
+f 96 93 216
+f 92 238 43
+f 97 184 246
+f 250 249 229
+f 252 254 58
+f 258 257 256
+f 260 91 229
+f 110 111 112
+f 114 264 263
+f 265 268 267
+f 56 53 270
+f 274 273 249
+f 185 183 63
+f 280 279 278
+f 285 284 283
+f 288 219 211
+f 141 142 92
+f 289 290 175
+f 148 116 105
+f 312 311 170
+f 238 262 42
+f 318 163 160
+f 310 309 308
+f 241 239 325
+f 160 230 240
+f 47 48 307
+f 108 109 315
+f 345 85 86
+f 348 351 350
+f 356 359 358
+f 355 354 353
+f 363 31 32
+f 364 252 253
+f 366 125 368
+f 183 372 371
+f 163 162 161
+f 209 226 101
+f 378 377 165
+f 379 295 381
+f 383 382 3
+f 284 285 386
+f 387 127 389
+f 391 259 229
+f 73 281 397
+f 405 408 407
+f 203 218 411
+f 414 121 413
+f 1 356 357
+f 277 276 131
+f 254 344 57
+f 358 384 415
+f 417 416 386
+f 401 400 192
+f 419 389 127
+f 421 181 182
+f 103 301 302
+f 428 367 368
+f 380 390 359
+f 62 204 266
+f 140 141 431
+f 357 358 351
+f 356 1 379
+f 415 392 403
+f 437 428 436
+f 438 441 440
+f 188 451 450
+f 265 455 181
+f 224 185 64
+f 237 212 213
+f 275 131 128
+f 281 282 460
+f 50 462 79
+f 73 74 282
+f 468 467 466
+f 390 355 352
+f 482 485 484
+f 489 488 487
+f 493 496 495
+f 472 471 470
+f 502 501 500
+f 220 221 222
+f 512 419 143
+f 518 517 511
+f 513 239 230
+f 205 30 31
+f 189 370 451
+f 163 318 532
+f 533 224 225
+f 446 448 447
+f 335 541 540
+f 544 512 546
+f 329 519 291
+f 199 534 262
+f 385 386 416
+f 532 190 191
+f 363 117 118
+f 578 581 580
+f 573 572 571
+f 561 563 562
+f 241 260 259
+f 590 593 592
+f 545 546 595
+f 503 154 155
+f 606 625 624
+f 478 481 480
+f 646 649 648
+f 590 591 689
+f 564 565 651
+f 728 246 184
+f 714 724 716
+f 692 574 575
+f 541 335 581
+f 346 68 248
+f 161 231 230
+f 128 129 94
+f 698 705 704
+f 574 577 576
+f 290 289 401
+f 630 631 725
+f 733 736 735
+f 755 466 496
+f 759 480 481
+f 757 763 762
+f 764 496 466
+f 768 767 3808
+f 759 760 488
+f 492 474 475
+f 474 770 769
+f 773 468 465
+f 771 774 486
+f 484 485 775
+f 774 776 489
+f 495 496 764
+f 495 757 761
+f 137 397 784
+f 325 239 797
+f 165 519 51
+f 369 32 29
+f 730 731 568
+f 90 302 301
+f 243 118 516
+f 800 734 806
+f 817 815 808
+f 816 808 823
+f 306 303 447
+f 815 810 809
+f 829 830 828
+f 726 725 501
+f 274 427 424
+f 879 882 881
+f 884 883 879
+f 219 218 217
+f 906 905 904
+f 455 204 205
+f 898 882 830
+f 903 900 880
+f 377 291 519
+f 923 949 948
+f 946 947 951
+f 3519 740 739
+f 900 899 886
+f 225 64 65
+f 182 445 442
+f 809 952 947
+f 954 960 962
+f 927 950 965
+f 885 879 880
+f 925 927 966
+f 343 342 341
+f 965 964 971
+f 968 971 970
+f 972 968 969
+f 963 961 981
+f 336 337 338
+f 992 993 827
+f 810 815 996
+f 1007 1001 1002
+f 1012 826 827
+f 1011 1009 1007
+f 294 293 292
+f 907 908 920
+f 1017 579 1019
+f 1020 1023 1022
+f 1035 1024 1020
+f 1061 1060 1047
+f 1063 1061 1046
+f 1044 1020 1021
+f 1102 1094 1097
+f 1116 1126 1118
+f 1016 1015 1014
+f 881 897 904
+f 816 1116 1127
+f 1094 948 949
+f 514 342 167
+f 146 145 81
+f 1133 1135 1130
+f 1102 1133 1126
+f 822 1126 1116
+f 1145 1143 1148
+f 1076 1144 1151
+f 1148 1143 1086
+f 1109 1097 1114
+f 1154 1153 1156
+f 1153 925 967
+f 3464 3497 463
+f 1144 1147 1152
+f 1156 967 1163
+f 1164 1162 1169
+f 1145 1171 1176
+f 1114 1115 1154
+f 1183 1155 1164
+f 1184 1183 1177
+f 1069 1151 1062
+f 1023 1191 1194
+f 1162 1163 1170
+f 1195 1184 1185
+f 1183 1196 1176
+f 1201 1063 1152
+f 1061 1063 1201
+f 3825 847 843
+f 1203 1202 1195
+f 1202 1201 1184
+f 1218 1060 1203
+f 1060 1061 1202
+f 1225 1218 1217
+f 671 695 694
+f 1219 1217 1024
+f 1217 1203 1034
+f 1226 1014 1015
+f 1243 1242 1234
+f 1231 1230 1229
+f 847 1243 1233
+f 843 1233 1244
+f 1233 1234 1245
+f 1923 3761 233
+f 2100 2108 499
+f 3914 3864 2135
+f 61 62 323
+f 210 211 217
+f 1299 1310 1309
+f 3863 1230 1231
+f 1066 1047 1321
+f 1322 1321 1228
+f 1060 1218 1321
+f 1325 1324 1323
+f 1244 1245 1332
+f 1290 1234 1242
+f 1337 1330 1339
+f 1330 1332 1340
+f 1341 1332 1245
+f 1342 1340 1332
+f 1343 1342 1341
+f 1344 1341 1294
+f 3857 3856 1345
+f 1347 1348 1342
+f 3856 3854 1295
+f 1349 1350 1348
+f 3861 1349 1347
+f 3853 1347 1343
+f 906 898 829
+f 905 906 1353
+f 865 1356 1355
+f 1355 1353 829
+f 1357 1355 825
+f 1356 1354 1353
+f 3834 1337 1338
+f 866 1358 1356
+f 1358 1359 1354
+f 3828 878 1337
+f 838 1362 1012
+f 1363 1364 1338
+f 1368 1363 1339
+f 1348 1368 1340
+f 1371 1369 3824
+f 524 525 526
+f 528 529 530
+f 1372 1370 1369
+f 535 536 537
+f 1367 1368 1348
+f 1370 1354 1359
+f 1369 1359 3833
+f 1373 910 903
+f 555 556 557
+f 1374 909 910
+f 1373 904 905
+f 1374 1373 1375
+f 1375 905 1354
+f 1378 920 908
+f 376 373 651
+f 1388 928 973
+f 586 587 588
+f 1390 1389 1388
+f 1391 1388 969
+f 597 598 599
+f 600 601 602
+f 1392 1391 970
+f 608 609 610
+f 612 524 523
+f 614 615 616
+f 617 618 25
+f 620 621 622
+f 1397 1390 1391
+f 1398 1397 1392
+f 1400 987 920
+f 1402 1403 1165
+f 636 637 638
+f 930 1404 1402
+f 641 642 643
+f 929 1402 1169
+f 1404 1405 1403
+f 1411 1366 1365
+f 1412 1406 1415
+f 652 653 654
+f 609 656 657
+f 659 660 661
+f 558 663 586
+f 665 666 667
+f 1413 1411 1406
+f 1418 1416 3810
+f 1417 1372 1371
+f 1416 1371 3829
+f 1419 1417 1416
+f 1386 909 1374
+f 677 678 679
+f 1385 1386 1420
+f 1420 1374 1376
+f 685 686 687
+f 1421 1420 1422
+f 528 690 691
+f 1422 1376 1372
+f 1018 1019 747
+f 699 700 701
+f 1424 578 579
+f 3663 1597 1767
+f 700 699 711
+f 1093 3746 3754
+f 717 718 530
+f 721 722 723
+f 704 1650 3543
+f 19 1429 1426
+f 1429 1428 1427
+f 1658 1659 3542
+f 1439 1437 1434
+f 1437 1436 1435
+f 1426 1427 1433
+f 1442 1431 1432
+f 742 743 744
+f 1432 1433 1441
+f 751 752 753
+f 1438 1434 1403
+f 1446 1445 1444
+f 1447 1448 1446
+f 3860 1447 1443
+f 1415 1367 1445
+f 1414 1415 1446
+f 2173 1683 469
+f 777 778 779
+f 780 781 782
+f 1676 1693 1806
+f 3859 1451 1349
+f 789 790 791
+f 745 792 793
+f 652 794 795
+f 1455 1458 1457
+f 1454 1367 1350
+f 610 691 802
+f 3831 3816 698
+f 1352 1044 1045
+f 1015 1016 1459
+f 811 812 813
+f 2118 3815 3817
+f 819 820 821
+f 1466 1463 1194
+f 1467 1468 1465
+f 1182 1165 1468
+f 832 812 833
+f 835 836 837
+f 1467 1466 1190
+f 1469 1470 1444
+f 840 700 712
+f 1453 1470 1469
+f 599 844 845
+f 1226 1450 3849
+f 851 852 853
+f 855 744 856
+f 858 859 860
+f 861 862 863
+f 1014 1226 3855
+f 867 676 868
+f 870 871 872
+f 874 875 876
+f 1310 1323 1471
+f 1476 1433 1427
+f 1461 1476 1475
+f 887 888 889
+f 788 791 892
+f 894 895 896
+f 1460 1475 1227
+f 901 902 524
+f 1465 1478 1477
+f 1865 3503 3500
+f 911 912 913
+f 916 917 918
+f 1850 3505 3507
+f 921 922 895
+f 1479 1435 1436
+f 926 664 667
+f 1481 562 563
+f 269 1483 84
+f 931 932 933
+f 934 935 614
+f 936 685 688
+f 938 659 662
+f 1484 270 122
+f 941 942 835
+f 320 321 509
+f 1849 3510 3505
+f 84 1483 1484
+f 1487 1490 1488
+f 1757 1758 3502
+f 365 1492 1491
+f 461 79 80
+f 117 363 362
+f 1494 540 541
+f 926 601 957
+f 1477 1441 1433
+f 1709 1588 1585
+f 119 117 1496
+f 271 1497 504
+f 511 517 189
+f 345 1496 1492
+f 57 344 299
+f 1501 1499 268
+f 267 268 1499
+f 974 975 976
+f 1678 1652 1693
+f 3846 1682 1807
+f 557 982 663
+f 983 941 984
+f 1443 1444 3849
+f 421 1500 268
+f 989 990 991
+f 1490 1487 418
+f 279 280 309
+f 997 868 676
+f 998 26 999
+f 1504 418 38
+f 1506 1508 1507
+f 360 361 1508
+f 1506 1486 207
+f 939 1005 1006
+f 270 1484 1483
+f 66 63 546
+f 1010 742 793
+f 367 428 437
+f 244 31 363
+f 1670 3532 3534
+f 209 206 1510
+f 388 389 426
+f 427 274 250
+f 161 520 521
+f 100 246 45
+f 1025 1026 608
+f 1029 1030 1031
+f 939 662 1033
+f 41 42 594
+f 1036 780 783
+f 702 701 1038
+f 1040 1041 1042
+f 362 32 369
+f 187 1520 477
+f 70 1487 539
+f 1049 600 603
+f 613 867 869
+f 860 1050 1051
+f 1005 1052 1053
+f 753 1055 1056
+f 1057 1058 1059
+f 293 288 1522
+f 288 212 1521
+f 292 1522 1523
+f 331 1507 1508
+f 306 452 453
+f 100 44 41
+f 462 1524 78
+f 330 331 1524
+f 635 1067 1068
+f 1485 1527 1526
+f 206 1485 1528
+f 1070 1071 1072
+f 942 941 1073
+f 916 933 1075
+f 440 1529 1530
+f 157 158 1514
+f 1529 162 5
+f 371 595 546
+f 790 586 663
+f 1077 858 857
+f 1403 1434 1435
+f 1080 1081 1082
+f 531 520 1529
+f 21 1083 1084
+f 272 1532 1531
+f 690 712 802
+f 335 3571 3567
+f 1090 1030 1029
+f 1092 795 794
+f 1533 396 137
+f 661 660 1095
+f 69 444 445
+f 1099 1100 1101
+f 38 39 1536
+f 1104 1105 1106
+f 852 1107 1108
+f 1377 908 909
+f 291 377 411
+f 316 1536 516
+f 539 1488 1533
+f 1505 1536 316
+f 1110 1111 1112
+f 1539 1537 460
+f 522 441 1537
+f 834 837 1120
+f 1122 1123 1124
+f 612 1125 1041
+f 935 793 615
+f 276 278 130
+f 1686 3549 3522
+f 193 190 1541
+f 231 521 1540
+f 556 1037 982
+f 365 253 85
+f 190 532 318
+f 1810 765 3574
+f 3924 1867 3512
+f 1132 983 985
+f 208 101 102
+f 1134 1033 662
+f 1491 518 80
+f 429 133 1542
+f 1137 1138 915
+f 172 436 428
+f 1139 1140 1141
+f 549 96 54
+f 1517 1519 1518
+f 834 1119 984
+f 1543 513 286
+f 1544 797 239
+f 133 134 1544
+f 506 361 257
+f 420 327 328
+f 1158 1159 1160
+f 587 1104 1103
+f 607 1161 1027
+f 304 1546 1545
+f 364 1491 77
+f 69 70 198
+f 700 840 1112
+f 3877 626 3525
+f 1166 719 657
+f 1167 1168 858
+f 1300 3523 3524
+f 1172 1173 1174
+f 218 203 200
+f 1179 1180 1181
+f 372 183 97
+f 89 104 227
+f 1186 1187 1188
+f 172 173 1517
+f 1482 84 81
+f 1090 1192 1193
+f 394 395 353
+f 526 525 1198
+f 1199 1200 1026
+f 606 623 697
+f 1204 1205 1206
+f 1207 1208 523
+f 1209 1210 867
+f 914 913 685
+f 1478 1436 1441
+f 1029 1028 1161
+f 1211 1212 1213
+f 3858 1453 1451
+f 844 1215 1216
+f 423 193 273
+f 1220 1221 1222
+f 22 1223 1224
+f 882 898 897
+f 1519 1517 173
+f 1189 1188 1232
+f 349 350 550
+f 1071 1235 1236
+f 1237 1238 1239
+f 1221 1186 1189
+f 1510 1528 1512
+f 939 1032 1241
+f 1556 1530 5
+f 1553 1554 394
+f 404 1556 67
+f 1247 1248 1249
+f 142 199 238
+f 1251 1067 635
+f 850 1252 1253
+f 1254 1167 1077
+f 57 132 133
+f 443 430 552
+f 1187 1172 1175
+f 1256 1070 831
+f 432 405 382
+f 741 748 747
+f 434 350 351
+f 349 432 383
+f 251 124 1481
+f 395 384 352
+f 1518 1519 1550
+f 1558 410 378
+f 109 263 314
+f 285 1514 1513
+f 395 394 393
+f 202 203 410
+f 417 408 405
+f 1246 1257 1258
+f 3 1559 297
+f 1259 1260 1261
+f 403 404 347
+f 955 954 953
+f 462 50 51
+f 1096 1095 687
+f 429 147 60
+f 896 1264 1265
+f 1267 721 1268
+f 423 424 327
+f 37 179 180
+f 20 23 1187
+f 1256 1168 1071
+f 1188 1175 1269
+f 1236 1270 1271
+f 1272 536 535
+f 565 564 463
+f 1273 792 745
+f 876 1254 1255
+f 316 33 164
+f 437 1516 503
+f 194 1494 1495
+f 1275 1276 864
+f 624 625 631
+f 1277 1274 1137
+f 569 568 567
+f 1481 124 125
+f 324 325 547
+f 1279 1108 1107
+f 1525 332 329
+f 1281 1278 1137
+f 171 168 1553
+f 1488 73 396
+f 1557 1561 1562
+f 1282 856 1206
+f 1053 1277 1278
+f 778 777 607
+f 713 583 3566
+f 251 225 144
+f 4 357 348
+f 709 708 707
+f 728 48 45
+f 261 135 1563
+f 789 1286 1287
+f 74 314 263
+f 403 284 385
+f 1178 1181 1240
+f 1223 1259 1289
+f 1220 20 1186
+f 296 1518 354
+f 784 459 1538
+f 1181 1180 1291
+f 1269 1292 1257
+f 1566 1538 438
+f 136 784 1566
+f 1563 1564 169
+f 107 114 109
+f 1297 1282 1298
+f 1464 1477 1476
+f 1520 187 507
+f 1301 1302 1303
+f 1306 1307 1308
+f 175 456 1532
+f 1532 456 200
+f 1311 1266 1312
+f 872 1314 1315
+f 3836 843 878
+f 1305 1308 864
+f 1040 1275 863
+f 1564 1566 399
+f 592 1576 1575
+f 591 1575 1088
+f 1581 1584 1583
+f 1196 1152 1147
+f 710 582 583
+f 285 407 408
+f 953 954 961
+f 1594 1592 1589
+f 1585 1588 1587
+f 1592 1591 1590
+f 3885 945 944
+f 1600 1599 1598
+f 1604 1603 1602
+f 1399 1392 979
+f 1589 1590 1596
+f 1610 1609 1608
+f 298 257 361
+f 551 552 276
+f 1627 1626 3814
+f 859 811 818
+f 1078 857 1328
+f 1555 439 1530
+f 572 573 680
+f 772 465 776
+f 977 1281 918
+f 404 403 392
+f 1054 1331 750
+f 1644 1645 1643
+f 932 1265 1082
+f 1067 1333 1334
+f 1540 1539 264
+f 597 1335 643
+f 471 761 762
+f 1642 696 3831
+f 1644 1642 3842
+f 923 946 950
+f 96 549 548
+f 520 531 522
+f 445 444 443
+f 849 839 848
+f 480 759 755
+f 1646 760 481
+f 311 426 1567
+f 464 463 3497
+f 3497 1360 1361
+f 234 237 236
+f 190 193 192
+f 153 154 152
+f 231 287 286
+f 1552 1553 1551
+f 760 1646 1648
+f 3832 1647 1649
+f 828 827 826
+f 52 167 166
+f 1663 1662 1661
+f 287 107 115
+f 668 671 670
+f 634 645 640
+f 553 560 559
+f 118 36 33
+f 492 484 491
+f 3552 1652 1651
+f 582 589 584
+f 384 395 392
+f 1381 1382 1383
+f 1562 1561 435
+f 952 953 951
+f 3529 3566 583
+f 1660 1661 1667
+f 1394 1395 1396
+f 1660 1665 1664
+f 1671 1670 1669
+f 1624 1625 1673
+f 1407 1408 1409
+f 1677 1676 1675
+f 1613 1679 1677
+f 1679 1678 1676
+f 1626 1681 3811
+f 534 199 198
+f 803 733 734
+f 400 248 191
+f 65 66 143
+f 3526 1657 1656
+f 402 159 157
+f 629 628 627
+f 461 477 49
+f 758 757 495
+f 796 787 786
+f 406 407 1513
+f 342 343 166
+f 1694 1659 1658
+f 1692 1687 1694
+f 3557 1693 1652
+f 3921 1704 1703
+f 326 547 302
+f 3916 3921 1702
+f 3919 3918 1698
+f 1699 1691 3927
+f 1557 406 1560
+f 1127 1116 1117
+f 400 401 289
+f 1706 1702 1707
+f 1707 1702 1599
+f 1709 1706 1708
+f 1714 1712 1711
+f 3919 3915 1709
+f 1704 1711 1577
+f 1716 1714 1713
+f 1559 1562 1549
+f 1722 1721 1720
+f 1335 597 596
+f 410 411 377
+f 973 1170 1163
+f 168 169 399
+f 1571 682 1089
+f 1572 681 682
+f 749 646 647
+f 1730 1724 1723
+f 1594 1582 1729
+f 1729 1723 1591
+f 1384 1378 1377
+f 1732 1731 1712
+f 1724 1730 1731
+f 1722 1724 1732
+f 1721 1722 1733
+f 1734 1733 1716
+f 1735 1737 1736
+f 1733 1732 1714
+f 1663 1664 1741
+f 1742 1665 1740
+f 1738 1739 1737
+f 1665 1660 1666
+f 1665 1742 1744
+f 1725 1726 1746
+f 1591 1725 1745
+f 1590 1745 1747
+f 1745 1746 1748
+f 1659 1688 3538
+f 3287 1760 3545
+f 1185 1177 1182
+f 1664 1744 1749
+f 1741 1749 1739
+f 1746 1742 1743
+f 1726 1744 1742
+f 1115 924 1153
+f 1654 3546 3575
+f 1896 3562 3549
+f 1720 1749 1744
+f 1721 1739 1749
+f 1750 1718 1701
+f 1697 1736 1750
+f 1151 1152 1063
+f 1751 1752 1670
+f 1695 1658 1752
+f 961 962 986
+f 1770 1769 1768
+f 1766 1594 1593
+f 1731 1759 1579
+f 1579 1777 1774
+f 3928 1772 1771
+f 1764 1772 3928
+f 1777 1776 1775
+f 3762 3763 1699
+f 1004 1127 1128
+f 1774 1775 1771
+f 987 1400 1399
+f 1578 1774 1772
+f 1783 1782 1781
+f 1597 1598 1770
+f 639 1878 3895
+f 3928 3929 1769
+f 1793 1792 1789
+f 3929 3920 1798
+f 3508 3516 3773
+f 1802 1867 3924
+f 1788 1790 1794
+f 1793 1794 1795
+f 3773 3772 1675
+f 1584 1581 1783
+f 1771 1797 3920
+f 1801 1799 1798
+f 1768 1769 1798
+f 1775 1796 1797
+f 508 514 515
+f 1796 1803 1802
+f 1800 1798 3920
+f 1794 1805 1804
+f 3532 1670 1752
+f 485 1808 1807
+f 3809 3844 1640
+f 3837 3844 1810
+f 485 482 1640
+f 1690 1691 3770
+f 487 488 1648
+f 3839 1813 1816
+f 3838 1811 1648
+f 3839 1812 1811
+f 1812 1809 487
+f 1641 1809 767
+f 465 466 755
+f 767 1815 3823
+f 3822 1816 3826
+f 1809 1812 1815
+f 3837 1653 1651
+f 675 672 3541
+f 1814 1649 1822
+f 1821 1822 3821
+f 1824 1822 1649
+f 1827 1849 1850
+f 1668 1669 1662
+f 1779 1780 1848
+f 1790 1827 1805
+f 1833 1836 1853
+f 3521 3506 693
+f 1614 1615 1616
+f 1852 1853 3498
+f 1620 1621 1622
+f 1855 1765 1574
+f 1847 1852 3501
+f 1766 1765 1855
+f 1629 1630 1631
+f 1633 1622 1634
+f 1635 1636 1637
+f 1848 1833 1852
+f 1855 1861 1863
+f 1803 1866 1867
+f 1862 1863 1831
+f 1782 1855 1862
+f 3922 3924 1865
+f 1873 1872 1870
+f 1874 1869 799
+f 2045 3878 3897
+f 1219 1345 1295
+f 1615 1614 1396
+f 3884 3895 1877
+f 645 3898 3786
+f 1345 1219 1035
+f 634 639 3884
+f 15 1887 1327
+f 3910 3895 1878
+f 1894 604 605
+f 3506 3551 627
+f 3560 640 645
+f 340 341 454
+f 1897 194 195
+f 1889 1882 1877
+f 1901 94 129
+f 196 749 754
+f 1705 1704 3921
+f 340 378 166
+f 1570 1480 563
+f 1904 1494 194
+f 1781 1862 1832
+f 1905 549 55
+f 1000 994 995
+f 986 919 920
+f 1191 1185 1190
+f 2094 2077 2075
+f 1920 1921 1919
+f 453 454 1912
+f 1915 1919 1918
+f 3882 2112 2106
+f 1655 3575 3531
+f 1916 1918 1926
+f 704 3554 3561
+f 278 1916 1925
+f 3565 3564 1818
+f 1097 949 1115
+f 1927 1928 727
+f 3578 3579 3654
+f 1920 310 307
+f 1929 307 48
+f 503 504 1497
+f 1902 1903 787
+f 1816 1821 3820
+f 1925 1926 1923
+f 1535 1569 1568
+f 254 252 258
+f 1509 1497 366
+f 119 86 35
+f 219 288 293
+f 141 46 47
+f 542 205 244
+f 1940 552 430
+f 1779 1828 1827
+f 1701 1710 3915
+f 1661 3550 3515
+f 270 53 754
+f 197 1947 749
+f 1727 1728 1212
+f 53 54 232
+f 1465 1468 1479
+f 1284 3862 3854
+f 1951 446 412
+f 168 398 1554
+f 1486 1485 206
+f 506 77 78
+f 994 1001 1007
+f 951 953 963
+f 1013 3859 3861
+f 3826 3820 1820
+f 1498 1909 1954
+f 299 298 543
+f 1754 1755 1756
+f 1681 469 1683
+f 1696 1694 1695
+f 13 14 738
+f 473 476 475
+f 482 1641 768
+f 204 62 30
+f 1761 1762 820
+f 611 1763 1125
+f 1501 1503 1908
+f 1074 1073 711
+f 1960 1944 82
+f 120 1956 1960
+f 35 86 87
+f 1773 1199 1025
+f 1965 1545 1569
+f 494 761 471
+f 1966 1967 1969
+f 446 1951 1971
+f 174 368 125
+f 1580 1579 1578
+f 1785 1786 1787
+f 1972 1546 304
+f 1254 876 875
+f 1973 341 342
+f 321 1912 1973
+f 1910 277 275
+f 509 1973 514
+f 1486 1507 1527
+f 1974 1874 804
+f 1977 1976 1974
+f 1980 448 1966
+f 379 1 2
+f 1718 1715 1710
+f 884 995 992
+f 1680 1684 1678
+f 902 990 525
+f 344 255 256
+f 135 138 137
+f 448 446 1967
+f 589 605 604
+f 910 909 908
+f 1981 1966 1968
+f 1982 1981 1987
+f 1988 1979 1987
+f 1990 1988 1989
+f 1991 1989 1993
+f 322 321 320
+f 992 828 830
+f 1144 1076 1086
+f 1995 1977 1978
+f 1133 1129 1118
+f 1997 1995 1994
+f 870 1823 1335
+f 805 806 1982
+f 727 184 2000
+f 1996 1994 1988
+f 1994 1978 1979
+f 1829 616 792
+f 1830 1073 941
+f 798 803 800
+f 729 732 731
+f 2011 2012 2023
+f 1834 1835 846
+f 402 283 284
+f 2022 2023 2024
+f 1838 718 989
+f 810 955 952
+f 1823 1839 1840
+f 1430 1426 1432
+f 1215 844 1841
+f 1316 1208 873
+f 2112 1401 1384
+f 1842 1843 1844
+f 1159 846 845
+f 837 27 1846
+f 1971 1951 2030
+f 517 369 370
+f 1471 2026 2027
+f 3551 3525 626
+f 2174 2175 770
+f 935 934 1854
+f 1613 1619 1611
+f 1231 1228 1225
+f 1856 1857 891
+f 1300 1309 748
+f 1150 1149 1858
+f 1643 606 696
+f 1215 1265 1264
+f 1320 1860 902
+f 1956 1943 1944
+f 2031 668 669
+f 2034 1325 1310
+f 1879 1880 1656
+f 40 245 242
+f 1992 1993 2039
+f 888 958 957
+f 1882 1889 1996
+f 1078 1138 1255
+f 1871 912 1192
+f 294 411 218
+f 1323 2041 2026
+f 123 1960 83
+f 1324 2036 2041
+f 384 358 359
+f 1324 1325 2038
+f 1883 1884 1885
+f 287 1540 114
+f 1096 1886 1134
+f 915 1329 931
+f 806 1980 1981
+f 3533 3529 584
+f 1425 2030 2042
+f 1018 2042 1948
+f 3562 1896 740
+f 1948 2042 649
+f 1881 1882 1990
+f 1900 129 130
+f 992 995 994
+f 923 927 925
+f 3878 2045 2047
+f 1643 2048 625
+f 2050 2052 2051
+f 2045 1405 1404
+f 3876 2047 945
+f 1139 1080 1079
+f 1037 1856 892
+f 2055 2053 2057
+f 1198 989 718
+f 1619 2058 1612
+f 2058 1605 1606
+f 2059 2060 1607
+f 2058 1619 1624
+f 71 72 179
+f 405 406 1557
+f 322 1548 1972
+f 2061 2052 2063
+f 2066 2065 2067
+f 2052 2061 2064
+f 1609 1610 2066
+f 1515 151 152
+f 1009 827 993
+f 1644 3843 3815
+f 279 1915 1916
+f 1285 3863 3862
+f 1647 2066 1825
+f 2056 2057 2067
+f 319 320 1953
+f 667 666 1913
+f 2070 631 625
+f 2052 2050 2055
+f 2038 1654 1655
+f 1606 1601 2073
+f 1235 1917 1270
+f 2072 2073 2075
+f 2059 1606 2072
+f 2074 2075 2077
+f 1601 1602 2078
+f 2049 2048 1643
+f 1000 1004 1001
+f 1089 943 684
+f 1931 1286 1306
+f 1462 1459 1021
+f 1932 1933 1934
+f 875 874 1846
+f 1936 1051 1937
+f 44 45 46
+f 1534 1955 1956
+f 1541 318 317
+f 2081 2082 2084
+f 146 1945 1959
+f 1360 1941 1942
+f 985 984 1032
+f 1995 1997 1892
+f 2079 2080 2082
+f 2095 2096 2076
+f 2081 2093 2078
+f 2055 2050 2054
+f 501 2096 2095
+f 2097 2100 2099
+f 2098 2099 2094
+f 2140 2141 3893
+f 2141 2046 3894
+f 2051 2070 2048
+f 592 2102 2101
+f 2071 2064 2076
+f 2102 2097 2098
+f 593 2103 2102
+f 1019 1895 746
+f 1950 11 12
+f 725 2071 2096
+f 1385 2105 2104
+f 1379 2104 2106
+f 2107 2108 2100
+f 3908 726 502
+f 3896 502 499
+f 940 1389 2109
+f 928 929 1170
+f 3868 590 684
+f 1962 1963 1964
+f 3878 680 573
+f 2039 1993 1968
+f 1389 1390 2110
+f 1398 2111 2113
+f 3757 3755 3753
+f 3911 3883 2107
+f 949 923 924
+f 260 241 324
+f 1390 1397 2113
+f 2070 2051 2064
+f 3867 3911 2103
+f 2105 1385 1421
+f 2114 1421 1423
+f 3892 2114 2115
+f 3908 2105 2114
+f 554 2116 1902
+f 972 1163 967
+f 1765 1593 1573
+f 1984 1985 1986
+f 2053 2117 2069
+f 3573 3556 724
+f 1625 1677 1674
+f 3881 3888 2119
+f 3903 2115 2123
+f 56 1954 1906
+f 626 3877 3879
+f 2001 2002 2003
+f 3912 3841 2127
+f 2006 2007 2008
+f 2127 1413 1412
+f 2014 2015 2016
+f 2018 2019 2020
+f 1495 1424 1947
+f 1379 1377 1386
+f 3814 3811 1680
+f 2029 1885 1884
+f 3902 2128 2120
+f 629 3879 3914
+f 682 683 943
+f 724 1650 2121
+f 1757 1819 3821
+f 765 1810 3844
+f 3851 714 715
+f 706 707 3870
+f 981 986 987
+f 1457 1868 1656
+f 2017 2016 2032
+f 1458 1655 1868
+f 1776 1777 1792
+f 1959 1945 1946
+f 3663 3715 3725
+f 3913 3881 2122
+f 1093 1089 689
+f 966 965 968
+f 3852 715 3881
+f 2172 3577 2170
+f 670 2138 2137
+f 3761 787 1903
+f 1429 19 28
+f 964 963 978
+f 3841 3813 1413
+f 1473 1474 669
+f 3834 3833 1359
+f 2138 798 1870
+f 1872 1473 2137
+f 715 716 3888
+f 1881 1991 1992
+f 3843 3842 739
+f 1837 1847 3513
+f 253 58 59
+f 1808 3837 3840
+f 1647 3832 3818
+f 3865 3866 671
+f 2112 3882 3883
+f 1405 2045 2044
+f 3829 3824 1364
+f 273 1541 391
+f 576 632 3520
+f 3875 3865 668
+f 1430 1431 2140
+f 2136 3850 3860
+f 1430 2139 18
+f 3893 3894 570
+f 1442 1439 2046
+f 1431 1442 2141
+f 3869 3913 3850
+f 3808 3823 1817
+f 3866 3893 695
+f 2142 2025 798
+f 571 2143 2142
+f 3819 3814 1618
+f 2143 2022 2025
+f 694 2142 2138
+f 681 1572 2144
+f 2144 2011 2022
+f 328 210 457
+f 1572 2010 2011
+f 630 3892 3903
+f 2085 2086 2087
+f 2090 2091 2092
+f 572 2144 2143
+f 1875 1976 2153
+f 1873 1875 2149
+f 2103 2107 2097
+f 2157 2043 2155
+f 2128 1412 1414
+f 3874 3871 589
+f 3872 3870 628
+f 2149 2153 2158
+f 2159 2158 3905
+f 698 3907 3902
+f 3823 3847 3827
+f 2158 2156 3874
+f 1438 2044 2046
+f 2035 2154 2160
+f 2161 2160 3889
+f 3871 3891 605
+f 3873 17 18
+f 370 29 61
+f 649 1951 413
+f 2153 2157 2156
+f 1428 1429 1449
+f 1490 76 73
+f 3890 3905 710
+f 2155 1891 3891
+f 1456 1457 1880
+f 2125 2088 2087
+f 2160 2159 3890
+f 300 1959 505
+f 1064 1062 1046
+f 2043 1892 1891
+f 2129 2130 2004
+f 2131 2132 2133
+f 3848 3849 1450
+f 1998 1975 804
+f 3879 673 3864
+f 474 492 490
+f 1164 1165 1182
+f 707 2162 3909
+f 708 1472 2162
+f 2162 2161 3886
+f 548 549 1905
+f 1145 1136 1114
+f 705 2121 1650
+f 2043 2157 1977
+f 737 738 554
+f 1034 1195 1191
+f 458 1905 1907
+f 195 196 232
+f 2040 2039 1969
+f 2145 2146 2147
+f 2054 2050 2049
+f 3832 1646 1639
+f 9 12 11
+f 252 364 506
+f 472 478 479
+f 1865 3924 3791
+f 900 903 910
+f 1941 2150 2151
+f 1921 1928 1927
+f 2163 1897 1898
+f 271 156 153
+f 1904 1326 12
+f 282 263 264
+f 1872 1873 2154
+f 1326 1904 1897
+f 628 692 693
+f 1724 1722 1719
+f 1549 1562 436
+f 1582 1583 1730
+f 1921 1920 1929
+f 3563 3511 10
+f 2152 2037 1950
+f 475 772 774
+f 769 2172 2171
+f 2171 2165 476
+f 476 773 772
+f 1791 565 464
+f 2166 2167 2168
+f 595 371 372
+f 483 1809 1641
+f 762 2174 2173
+f 771 483 484
+f 486 487 1809
+f 297 1549 1517
+f 763 2175 2174
+f 776 755 759
+f 184 185 2176
+f 412 447 303
+f 2005 2176 185
+f 3811 1681 1682
+f 2164 2170 756
+f 1472 2035 2161
+f 950 951 964
+f 2177 2178 2179
+f 227 226 1510
+f 2182 2183 2184
+f 2186 2019 2018
+f 1985 2182 2181
+f 214 736 729
+f 865 849 842
+f 849 865 1357
+f 559 3187 3188
+f 16 13 737
+f 560 553 498
+f 3193 3196 3195
+f 3200 3197 3198
+f 3211 3214 3213
+f 3215 3218 3217
+f 2187 2188 2002
+f 2169 2168 2189
+f 2192 2086 2085
+f 2193 2194 2195
+f 2198 2199 2200
+f 1998 1982 1979
+f 3664 3665 3668
+f 2116 1898 1903
+f 1859 2006 2009
+f 3904 2123 2126
+f 1135 1148 1087
+f 2201 2202 2015
+f 1149 111 110
+f 3221 3222 3218
+f 2203 2204 2205
+f 579 580 1895
+f 1755 2207 2208
+f 3891 3910 1893
+f 1211 2151 2150
+f 868 997 2210
+f 944 945 930
+f 2130 2129 2202
+f 233 1903 1898
+f 665 751 750
+f 1167 1254 999
+f 2211 2212 2213
+f 2215 2216 2217
+f 3867 593 590
+f 2220 221 220
+f 823 824 948
+f 3225 3220 3227
+f 266 204 455
+f 3882 2108 2107
+f 2221 2222 3519
+f 2223 2224 2225
+f 2207 2125 2124
+f 673 674 3869
+f 817 1000 996
+f 2227 2228 2146
+f 1947 1948 646
+f 3900 2135 17
+f 2229 1635 1638
+f 1679 1613 1618
+f 1637 2231 2232
+f 2233 2234 1617
+f 2231 2235 2236
+f 1381 1638 2238
+f 3586 2241 2242
+f 1630 2234 2233
+f 1681 1626 472
+f 1408 1407 2243
+f 3236 3239 3238
+f 3232 3241 3240
+f 1637 2230 2238
+f 3237 3238 3233
+f 1409 1408 1634
+f 1380 1383 2247
+f 1628 2248 2245
+f 2249 1623 1622
+f 1632 1634 1408
+f 2248 1628 1631
+f 2235 2231 1637
+f 3245 3246 3199
+f 631 2070 2071
+f 515 167 52
+f 782 781 2251
+f 2253 2254 2255
+f 397 281 459
+f 1456 2039 2040
+f 2040 2041 2036
+f 1401 2112 2111
+f 2041 2040 1970
+f 1535 121 414
+f 1586 1587 2080
+f 2256 983 1132
+f 1700 1699 3918
+f 3815 1896 1686
+f 1263 2256 1886
+f 598 1841 844
+f 2257 2258 2259
+f 2258 2261 2262
+f 2259 2262 2263
+f 2260 2259 2216
+f 1967 1971 1970
+f 2212 1964 1963
+f 922 921 2218
+f 1312 1098 1101
+f 1307 1306 1286
+f 1548 1569 1546
+f 2266 2267 2268
+f 2265 2268 2270
+f 2272 2265 2269
+f 2273 2266 2265
+f 2275 2276 2277
+f 2278 2274 2277
+f 2284 2285 3617
+f 2287 2283 2286
+f 2288 2289 2283
+f 2290 2284 3639
+f 2291 2292 2293
+f 2295 2291 2294
+f 2296 2297 2291
+f 2297 2298 2292
+f 2294 2293 2299
+f 2279 2294 2300
+f 2298 2301 2302
+f 2292 2302 2303
+f 2293 2303 2304
+f 2304 2305 2306
+f 2308 2309 2310
+f 2302 2307 2310
+f 2301 2311 2307
+f 2312 2308 2307
+f 2313 2314 2315
+f 2318 2319 2320
+f 3748 3771 2322
+f 2324 2325 2273
+f 2313 2324 2326
+f 2328 2324 2313
+f 2329 2325 2324
+f 2331 2332 2333
+f 2330 2333 2267
+f 2325 2330 2266
+f 2329 2331 2330
+f 2333 2334 2335
+f 2336 2334 2333
+f 2338 2336 2332
+f 2331 2339 2337
+f 2329 2340 2339
+f 2328 2341 2340
+f 2316 2342 2341
+f 2342 2316 2315
+f 2344 2345 2346
+f 2341 2344 2347
+f 2342 2348 2344
+f 2348 2349 2345
+f 2350 2351 2352
+f 2339 2350 2353
+f 2340 2347 2350
+f 2347 2346 2351
+f 2337 2353 2354
+f 2353 2352 2355
+f 2356 2277 2276
+f 2356 2357 2358
+f 2356 2359 2296
+f 2279 2277 2356
+f 2360 2361 2270
+f 2267 2335 2360
+f 2362 2363 2364
+f 2362 2365 2361
+f 2366 2362 2358
+f 2366 2367 2363
+f 2357 2276 2368
+f 2366 2368 2369
+f 2276 2275 2370
+f 2368 2370 2282
+f 2372 2373 2374
+f 2375 2376 2377
+f 2378 2379 2380
+f 2382 2378 2381
+f 2384 2385 2378
+f 2385 2386 2379
+f 2388 2389 2390
+f 2387 2390 2392
+f 2393 2394 2387
+f 2394 2395 2388
+f 2186 2401 2402
+f 2403 2404 2327
+f 2272 2403 2326
+f 2271 2405 2403
+f 2405 2406 2404
+f 2404 2407 2408
+f 2409 2410 2411
+f 2409 2412 2271
+f 2413 2409 2269
+f 2413 2414 2410
+f 2417 2415 2416
+f 2417 2418 2406
+f 2412 2417 2405
+f 2411 2415 2417
+f 2406 2419 2407
+f 3569 3520 632
+f 2420 2398 2397
+f 2420 2421 2423
+f 2424 2420 2422
+f 2424 2399 2398
+f 2425 2391 2392
+f 2421 2425 2426
+f 2397 2396 2425
+f 2396 2393 2391
+f 2427 2314 2408
+f 2430 2427 2428
+f 2343 2315 2427
+f 2432 2343 2430
+f 2431 2430 2429
+f 2435 2431 2433
+f 2436 2432 2431
+f 2428 2408 2407
+f 2422 2437 2407
+f 2438 2437 2422
+f 2429 2428 2437
+f 2440 2434 2433
+f 2441 2439 2433
+f 2442 2443 2439
+f 2443 2444 2440
+f 2445 2446 2444
+f 2447 2445 2443
+f 2383 2381 2445
+f 2381 2380 2446
+f 2448 2438 2423
+f 2449 2448 2426
+f 2441 2448 2449
+f 2441 2429 2438
+f 2449 2392 2390
+f 2450 2390 2389
+f 2447 2450 2451
+f 2442 2449 2450
+f 2451 2389 2453
+f 2452 2453 2455
+f 2382 2452 2454
+f 2383 2451 2452
+f 2456 2457 2373
+f 2459 2460 2461
+f 2463 2464 2465
+f 2462 2465 2285
+f 2466 2462 2284
+f 2467 2463 2462
+f 2461 2460 3703
+f 2468 2469 2470
+f 2472 2468 2471
+f 2474 2461 3701
+f 2475 2476 2477
+f 2458 2455 2453
+f 2479 2453 2389
+f 2395 2480 2479
+f 2480 2459 2458
+f 2481 2482 2483
+f 2484 113 2485
+f 2471 2470 2464
+f 2473 2471 2463
+f 2487 2467 2466
+f 2486 2466 2290
+f 2490 2486 2488
+f 2491 2487 2486
+f 2492 2473 2467
+f 2493 2492 2487
+f 2494 2495 2492
+f 2495 2472 2473
+f 3690 2496 2474
+f 3691 2497 2496
+f 2498 2494 2493
+f 2500 2498 2499
+f 3700 3687 2498
+f 3687 3691 2494
+f 2306 2504 2278
+f 2305 2505 2504
+f 2504 2505 2507
+f 2506 2507 2288
+f 2274 2506 2508
+f 2278 2504 2506
+f 3633 3632 2489
+f 3620 3633 2488
+f 2507 2509 2289
+f 2505 2510 2509
+f 2508 2288 2287
+f 2511 2287 2281
+f 2370 2511 2280
+f 2275 2508 2511
+f 2512 2490 2489
+f 2514 2512 2513
+f 2517 2512 2514
+f 2517 2491 2490
+f 2499 2493 2491
+f 2499 2517 2516
+f 2510 2505 2519
+f 2518 2519 2521
+f 3616 2518 2520
+f 3632 2510 2518
+f 2522 2304 2303
+f 2523 2522 2310
+f 2305 2304 2522
+f 2524 2522 2523
+f 2519 2524 2525
+f 2505 2305 2524
+f 2308 2312 2527
+f 2526 2527 2529
+f 2531 2526 2528
+f 2309 2308 2526
+f 1214 2532 2151
+f 2419 2406 2418
+f 2533 2345 2349
+f 2534 2533 2317
+f 2535 2536 2533
+f 2536 2346 2345
+f 2537 2351 2346
+f 2538 2537 2536
+f 2539 2540 2537
+f 2540 2352 2351
+f 2312 2355 2541
+f 2527 2541 2542
+f 2543 2435 2434
+f 2545 2543 2544
+f 2319 2318 2543
+f 2318 2436 2435
+f 2514 2515 2548
+f 2547 2548 2550
+f 2552 2547 2549
+f 2552 2516 2514
+f 2520 2521 2554
+f 2553 2554 2556
+f 3590 2553 2555
+f 3621 2520 2553
+f 2523 2309 2531
+f 2557 2531 2530
+f 2525 2523 2557
+f 2559 2557 2558
+f 2554 2559 2560
+f 2521 2525 2559
+f 2561 2375 2007
+f 2376 2562 2225
+f 2564 2565 2566
+f 2563 2566 338
+f 2569 2563 2567
+f 2570 2564 2563
+f 2572 2573 2574
+f 2571 2574 2576
+f 2544 2571 2575
+f 2544 2434 2572
+f 2578 2570 2569
+f 2579 2577 2569
+f 2574 2577 2579
+f 2573 2578 2577
+f 2581 2582 2583
+f 2584 2580 2583
+f 3697 3675 2580
+f 3676 2581 2580
+f 2582 2516 2552
+f 2583 2552 2551
+f 2500 2501 2589
+f 2590 2588 2589
+f 3693 3695 2588
+f 3695 3700 2500
+f 2501 2516 2591
+f 2592 2593 2594
+f 2386 2385 2503
+f 2596 2379 2386
+f 2596 2374 2373
+f 2599 2596 2597
+f 2599 2380 2379
+f 2600 2446 2380
+f 2600 2599 2598
+f 2603 2600 2601
+f 2603 2444 2446
+f 2604 2603 2602
+f 2440 2604 2605
+f 2444 2603 2604
+f 2606 2582 2581
+f 2606 2607 2590
+f 2591 2516 2582
+f 3676 2587 2456
+f 3689 2456 2372
+f 2478 2477 2608
+f 2609 2610 2611
+f 2564 2570 2613
+f 2612 2613 2598
+f 2457 2612 2597
+f 2565 2564 2612
+f 2614 2613 2570
+f 2615 2614 2578
+f 2602 2601 2614
+f 2601 2598 2613
+f 2572 2434 2605
+f 2616 2605 2602
+f 2573 2572 2616
+f 1459 1016 1045
+f 1265 1215 1079
+f 1612 1608 1609
+f 2067 2057 2069
+f 1135 1109 1136
+f 1938 1937 2618
+f 95 548 458
+f 2058 1607 1608
+f 1454 1469 1445
+f 655 2620 1048
+f 722 1049 2621
+f 1131 985 1033
+f 562 1481 366
+f 1723 1719 1725
+f 1786 1123 1122
+f 267 300 323
+f 1325 2034 1654
+f 1468 1165 1435
+f 658 657 529
+f 8 532 247
+f 382 1557 1559
+f 788 619 1307
+f 940 930 929
+f 911 2622 2623
+f 2621 2620 655
+f 2624 1857 2625
+f 998 1846 27
+f 635 638 2626
+f 2627 641 644
+f 2628 2627 1840
+f 1200 1199 2627
+f 1199 1773 641
+f 1520 515 49
+f 1452 1350 1349
+f 2630 2631 1859
+f 644 643 1335
+f 1197 1198 717
+f 2632 717 719
+f 2628 2632 1166
+f 2629 1197 2632
+f 1762 2633 1050
+f 1823 870 873
+f 2634 2635 794
+f 1042 794 2635
+f 93 94 1901
+f 2036 2038 1458
+f 1527 332 1525
+f 877 876 1274
+f 134 409 797
+f 1328 893 931
+f 607 777 1091
+f 1043 2635 2637
+f 2636 2637 2639
+f 1275 2636 2638
+f 1275 1040 1043
+f 1272 2624 2626
+f 2640 2626 638
+f 1110 2641 2640
+f 536 1272 2640
+f 1472 708 1474
+f 2042 2030 1951
+f 1058 1106 1105
+f 2642 2623 2643
+f 1192 2642 2644
+f 912 2623 2642
+f 3898 1657 3526
+f 1049 1048 2620
+f 616 1829 1885
+f 1331 1913 666
+f 990 989 1198
+f 1911 1912 321
+f 1298 1282 1205
+f 76 1490 1504
+f 1502 1500 421
+f 610 609 658
+f 1146 937 659
+f 2646 911 914
+f 1280 1239 1238
+f 1453 1454 1452
+f 835 942 1843
+f 1271 1270 1173
+f 1893 1878 1826
+f 1050 2633 1937
+f 2651 2652 814
+f 1006 1053 1281
+f 2653 1084 2654
+f 1001 1004 1003
+f 585 997 679
+f 2624 1272 621
+f 555 678 2028
+f 1207 526 1197
+f 1626 1627 478
+f 426 389 419
+f 1030 1193 1773
+f 3898 645 634
+f 534 1533 138
+f 1125 1092 1042
+f 333 228 88
+f 569 1570 561
+f 1489 2656 871
+f 1857 2624 620
+f 1673 1674 3770
+f 622 861 864
+f 619 788 891
+f 261 544 545
+f 393 1555 1556
+f 2658 2659 2647
+f 1075 2658 2657
+f 1081 1080 2658
+f 1080 1139 2659
+f 2644 2643 1141
+f 2660 1141 1140
+f 642 2660 1336
+f 1193 2644 2660
+f 679 678 555
+f 1503 1501 1500
+f 1318 1317 1101
+f 2661 2659 1139
+f 2661 1142 2663
+f 2662 2647 2659
+f 2662 2663 2622
+f 2664 2622 911
+f 2648 2664 2646
+f 2648 2647 2662
+f 2083 2094 2093
+f 1442 1440 1437
+f 519 329 330
+f 1885 2029 1209
+f 799 800 805
+f 1946 1943 1952
+f 1302 1301 1253
+f 566 2005 224
+f 1389 940 928
+f 683 682 681
+f 836 835 1842
+f 2665 1842 1845
+f 2666 2665 1303
+f 24 836 2665
+f 831 1072 2650
+f 1845 1844 1304
+f 2010 2013 2012
+f 1051 1936 2667
+f 1522 1521 1511
+f 531 440 441
+f 1250 635 2625
+f 2669 2625 1857
+f 783 2669 1856
+f 782 1250 2669
+f 720 2670 2671
+f 1055 1931 1930
+f 1333 1067 1251
+f 619 622 1308
+f 652 655 2619
+f 521 522 1539
+f 1838 988 537
+f 2672 537 536
+f 1113 2672 2641
+f 1113 527 1838
+f 1016 1013 1351
+f 3897 573 570
+f 226 227 104
+f 3564 3669 3827
+f 1036 1037 1884
+f 2674 1301 1304
+f 2673 1304 1844
+f 1334 2673 2675
+f 1333 2674 2673
+f 2676 702 1039
+f 1843 2676 2675
+f 942 1074 2676
+f 1074 699 702
+f 752 1057 1055
+f 1200 1166 656
+f 1120 1846 874
+f 2677 874 877
+f 1241 2677 1052
+f 1119 1120 2677
+f 1560 1513 151
+f 1208 1316 611
+f 215 735 736
+f 1460 1459 1462
+f 1208 1207 1839
+f 854 856 1282
+f 72 445 182
+f 544 1563 1565
+f 217 200 456
+f 2047 1404 930
+f 1204 2618 1937
+f 913 777 686
+f 840 527 1113
+f 1531 729 730
+f 2678 778 1283
+f 601 667 1914
+f 1537 1538 459
+f 1819 1757 3509
+f 991 1860 862
+f 2679 711 1073
+f 2680 1830 983
+f 779 2680 2256
+f 778 2678 2680
+f 2678 2679 1830
+f 180 542 245
+f 1886 1132 1131
+f 1138 1137 1274
+f 1841 598 1140
+f 75 313 314
+f 193 423 420
+f 2681 2682 1099
+f 170 1567 1565
+f 349 416 417
+f 1121 1124 2684
+f 547 797 409
+f 123 122 121
+f 2643 2663 1142
+f 1542 1544 513
+f 899 900 907
+f 1210 677 676
+f 791 663 982
+f 24 27 837
+f 893 896 932
+f 1860 1320 863
+f 535 538 862
+f 1569 1548 1547
+f 181 455 542
+f 660 659 937
+f 2013 567 568
+f 887 890 2631
+f 1524 1508 361
+f 520 161 162
+f 2653 855 854
+f 1238 1237 2655
+f 718 1838 527
+f 1492 1493 518
+f 792 616 615
+f 599 846 1835
+f 290 457 456
+f 312 171 1552
+f 1248 2687 1179
+f 153 149 177
+f 1240 1181 1222
+f 1257 1292 1235
+f 1291 1084 1083
+f 1052 877 1277
+f 743 742 2618
+f 444 140 430
+f 23 1224 1172
+f 2687 2654 1180
+f 102 103 409
+f 1283 607 610
+f 373 497 650
+f 1180 2654 1084
+f 855 2653 2655
+f 1173 1270 1917
+f 2689 850 853
+f 2688 853 1108
+f 2686 2688 1279
+f 2687 2689 2688
+f 2691 1302 1252
+f 2690 1252 850
+f 1248 2690 2689
+f 1248 1247 2691
+f 2692 617 24
+f 2691 2692 2666
+f 1247 1258 2692
+f 1258 1168 617
+f 394 1554 1555
+f 2693 1260 1259
+f 1072 1236 1262
+f 1257 1071 1168
+f 398 439 1555
+f 871 1784 1314
+f 1178 1232 1249
+f 1224 1289 1173
+f 1010 2617 2618
+f 1175 1174 1292
+f 2654 2687 2686
+f 2694 1157 1216
+f 71 418 1487
+f 701 1112 1111
+f 1072 1261 2649
+f 1085 2695 22
+f 895 2694 1264
+f 1601 1606 1605
+f 1259 1262 1271
+f 21 20 1220
+f 988 991 538
+f 598 597 1336
+f 1050 860 818
+f 1298 2645 1761
+f 1168 1167 618
+f 858 1168 1256
+f 39 242 516
+f 2652 1761 819
+f 313 316 315
+f 2026 1970 1971
+f 2656 1489 596
+f 814 819 818
+f 2696 2651 813
+f 1260 2696 2649
+f 2693 2697 2696
+f 2697 1296 2651
+f 381 355 390
+f 3818 3819 1611
+f 1475 1427 1428
+f 1292 1174 1917
+f 1551 353 354
+f 1526 1525 1523
+f 1593 1589 1595
+f 1232 1269 1246
+f 2695 2693 1223
+f 2698 1297 1296
+f 2698 2697 2693
+f 2685 2698 2695
+f 2685 854 1297
+f 562 1497 271
+f 2652 2651 1296
+f 1365 1366 1364
+f 1958 1946 1953
+f 585 588 2210
+f 1561 1560 1515
+f 975 2648 1146
+f 347 67 68
+f 1038 1111 636
+f 2700 781 780
+f 2699 780 1036
+f 2701 2699 1883
+f 851 2700 2699
+f 2674 1333 2251
+f 2702 2251 781
+f 1253 2702 2700
+f 1301 2674 2702
+f 1519 387 388
+f 1273 1239 1280
+f 2703 1280 1107
+f 2701 2703 852
+f 1829 1273 2703
+f 91 260 326
+f 1784 871 2656
+f 2683 2684 2705
+f 604 1894 3499
+f 1669 3534 3548
+f 2062 2063 2060
+f 621 535 861
+f 2023 731 732
+f 686 779 1263
+f 857 1051 2668
+f 883 830 882
+f 637 636 1111
+f 2668 894 893
+f 2667 921 894
+f 1039 1038 1068
+f 313 75 1504
+f 1911 1972 305
+f 1936 2706 1964
+f 2707 2402 2401
+f 2708 2707 2183
+f 738 2163 2116
+f 2349 2348 2432
+f 2711 2712 2020
+f 2400 2711 2019
+f 2714 2715 2716
+f 2717 2718 2719
+f 2721 2722 2723
+f 2726 2727 2728
+f 2725 2728 2730
+f 2731 2725 2729
+f 2732 2726 2725
+f 2734 2735 2736
+f 2738 2739 2740
+f 2737 2740 2741
+f 2744 2745 2746
+f 2745 2744 2748
+f 2749 2750 2747
+f 2753 2749 2751
+f 2250 1380 2246
+f 2754 1396 1614
+f 3598 2754 2755
+f 3608 2745 2747
+f 1617 2756 2755
+f 2242 2758 1620
+f 2191 2085 2760
+f 2761 2759 2760
+f 3648 2759 2761
+f 3623 2191 2759
+f 3209 3248 3223
+f 1395 1394 2236
+f 1395 2235 1636
+f 1616 1635 2229
+f 1617 2234 2763
+f 2756 2763 3628
+f 2765 2763 2234
+f 2766 2764 3628
+f 3202 3199 3246
+f 1516 437 435
+f 2768 2769 3552
+f 1495 541 578
+f 2135 2136 28
+f 3526 3540 2773
+f 1690 1689 3926
+f 2778 2779 2780
+f 2777 2780 2781
+f 2784 2778 2777
+f 3555 3521 575
+f 1494 1904 9
+f 3529 2016 2015
+f 451 61 505
+f 2786 2787 2788
+f 2779 2786 2785
+f 2789 2752 2741
+f 2790 2753 2752
+f 2792 2793 2794
+f 2793 2795 2796
+f 2796 2797 2750
+f 2794 2796 2749
+f 2798 1393 1396
+f 3591 2798 2754
+f 2798 2799 2800
+f 3591 3662 2799
+f 2797 2796 2795
+f 2791 2794 2753
+f 216 233 232
+f 3804 2806 2807
+f 3805 2809 2804
+f 2769 2810 3557
+f 183 185 184
+f 2811 2812 2775
+f 2804 2811 2776
+f 2809 2813 2811
+f 2812 2811 2813
+f 3805 2808 2816
+f 3796 2818 2815
+f 3798 3800 2820
+f 3793 2823 2819
+f 2824 2736 2735
+f 2823 2825 2824
+f 2825 2823 2826
+f 3793 2822 2827
+f 3796 2817 2826
+f 2713 2716 2829
+f 2828 2829 2831
+f 2822 2821 2828
+f 2821 2820 2713
+f 2832 2827 2822
+f 2833 2832 2830
+f 2834 2835 2832
+f 2835 2818 2827
+f 2837 2838 2839
+f 2836 2839 2841
+f 2842 2836 2840
+f 2843 2837 2836
+f 2840 2841 2845
+f 2718 2844 2845
+f 2730 2728 2844
+f 2727 2840 2844
+f 2846 2845 2841
+f 2829 2846 2847
+f 2847 2841 2839
+f 2848 2839 2838
+f 2833 2848 2849
+f 2831 2847 2848
+f 2850 2814 2851
+f 2853 2850 2852
+f 2783 2777 2782
+f 2785 2788 2853
+f 2780 2785 2856
+f 2857 2858 2855
+f 2859 2857 2782
+f 2861 2843 2842
+f 2860 2842 2727
+f 2862 2860 2726
+f 2863 2861 2860
+f 2861 2863 2858
+f 2843 2861 2857
+f 2856 2853 2854
+f 2781 2856 2864
+f 2837 2843 2859
+f 2838 2837 2864
+f 2852 2851 2834
+f 2854 2852 2849
+f 2865 2815 2818
+f 2851 2865 2835
+f 2813 2809 2815
+f 2814 2813 2865
+f 2866 2812 2814
+f 2788 2866 2850
+f 2866 2788 2787
+f 2812 2866 2867
+f 1892 1889 1890
+f 2720 2719 2846
+f 298 299 256
+f 2872 2714 2713
+f 3794 3800 2735
+f 2729 2730 2874
+f 2846 2716 2715
+f 2876 2877 2878
+f 2733 2875 2878
+f 3806 3807 2879
+f 3806 2880 2872
+f 2882 2883 2884
+f 2881 2884 2715
+f 2880 2881 2714
+f 2879 2882 2881
+f 2885 2886 2887
+f 2888 2889 2890
+f 2885 2890 2892
+f 2893 2886 2885
+f 1942 2151 2532
+f 2895 2892 2720
+f 2884 2895 2870
+f 2883 2896 2895
+f 2896 2891 2892
+f 2889 2874 2717
+f 2890 2717 2720
+f 2897 2893 2891
+f 2898 2897 2896
+f 2899 2898 2883
+f 2900 2899 2882
+f 3797 2900 2879
+f 2876 2902 2901
+f 2901 2902 2904
+f 1785 2656 1835
+f 2905 2906 2899
+f 2907 2908 2897
+f 3797 2901 2903
+f 632 576 577
+f 3532 3547 2867
+f 434 385 550
+f 2768 2770 3640
+f 2910 2911 1623
+f 2241 2912 2758
+f 3640 2770 2913
+f 1623 2911 2757
+f 3587 2242 2757
+f 2229 1381 1380
+f 2765 1630 1629
+f 2915 2914 1629
+f 3642 3641 2914
+f 3641 3622 2765
+f 2919 2916 3642
+f 2918 2915 2912
+f 3614 2918 2241
+f 3618 2919 3613
+f 3541 2177 2180
+f 3521 2002 2188
+f 3705 3573 714
+f 2925 2766 2917
+f 2924 2917 2916
+f 3610 2924 2919
+f 3611 2925 2924
+f 2926 2927 2766
+f 3658 2926 2925
+f 1715 1713 1705
+f 785 497 498
+f 1387 651 565
+f 374 375 1899
+f 3573 2203 2206
+f 2929 2930 2886
+f 2932 2933 2934
+f 1905 1906 1908
+f 2930 2935 2936
+f 2938 2935 2930
+f 1493 369 517
+f 1603 1585 1586
+f 2939 2938 2929
+f 2940 2939 2908
+f 3665 1707 1600
+f 2886 2930 2937
+f 2906 2907 2898
+f 2908 2929 2893
+f 2942 2934 2933
+f 2941 2933 2937
+f 3501 2941 2936
+f 3498 2942 2941
+f 2943 2944 2710
+f 2943 2873 2874
+f 2945 2943 2889
+f 2946 2944 2943
+f 2932 2945 2888
+f 2931 2946 2945
+f 727 1928 1929
+f 2903 2904 2947
+f 545 594 42
+f 1480 1570 533
+f 2907 2906 2949
+f 2906 2905 2950
+f 1558 452 215
+f 3799 2903 2948
+f 2054 1645 1644
+f 3705 3684 2203
+f 1828 1837 1849
+f 2953 2954 2955
+f 2952 2955 2957
+f 2956 2261 2258
+f 2953 2952 2258
+f 2454 2455 2474
+f 3747 2958 2959
+f 3747 2132 2131
+f 2961 2962 2963
+f 2958 2963 2964
+f 222 221 2485
+f 2477 2226 2966
+f 2631 2561 2006
+f 2384 2454 2496
+f 2967 2968 2959
+f 2968 2967 2970
+f 3759 2033 2971
+f 3784 2971 2960
+f 2972 2971 2033
+f 2007 2377 2973
+f 3560 3559 2134
+f 1836 1833 1832
+f 3506 2188 2090
+f 955 810 959
+f 672 675 674
+f 633 710 713
+f 2024 732 733
+f 2025 2024 803
+f 425 1552 1550
+f 2975 2976 888
+f 1319 1158 1157
+f 1212 1935 2977
+f 889 957 2228
+f 1978 1974 1975
+f 1138 1078 1329
+f 751 665 2979
+f 1619 1613 1625
+f 2252 2255 1728
+f 1157 1160 845
+f 2706 1936 1938
+f 2980 2255 2254
+f 2982 2975 887
+f 802 712 711
+f 678 677 2029
+f 527 840 690
+f 1839 1207 2629
+f 2983 2984 602
+f 2983 1914 1913
+f 2985 2983 1331
+f 2986 2984 2983
+f 2987 2637 2635
+f 2985 2988 2639
+f 2990 1319 1318
+f 2991 2992 2993
+f 2995 2991 2994
+f 1106 1058 2991
+f 1058 1057 2992
+f 1415 1406 1365
+f 1317 922 2217
+f 1932 1728 2255
+f 926 2998 2999
+f 3001 2993 2992
+f 3000 2992 1057
+f 2999 3000 2978
+f 2998 3001 3000
+f 3002 2998 2209
+f 3002 3003 2996
+f 2993 3001 3002
+f 1103 1106 2995
+f 3004 2995 2996
+f 2209 2210 3004
+f 588 1103 3004
+f 295 296 433
+f 2638 2639 2988
+f 3005 2988 1056
+f 1305 3005 1930
+f 1276 2638 3005
+f 1987 1968 1993
+f 2633 1762 1761
+f 1423 1422 1417
+f 3561 3517 739
+f 2671 2670 2704
+f 1473 1872 2035
+f 922 1317 2694
+f 1401 1400 1378
+f 556 2028 1884
+f 842 848 3825
+f 1854 2706 2617
+f 933 1082 1081
+f 1440 1441 1436
+f 529 657 719
+f 1031 1025 1027
+f 777 913 912
+f 2645 1205 1204
+f 441 438 1538
+f 790 1287 587
+f 2065 2066 1610
+f 1860 991 990
+f 60 106 87
+f 1424 1017 1948
+f 332 1527 1507
+f 261 262 138
+f 187 188 449
+f 1719 1720 1726
+f 1931 1055 1059
+f 3006 1059 1105
+f 1287 3006 1104
+f 1286 1931 3006
+f 3007 3008 3009
+f 3009 3008 653
+f 618 999 26
+f 2029 677 1210
+f 957 601 600
+f 1049 2146 2228
+f 2631 890 3011
+f 890 889 2227
+f 3011 2227 2145
+f 2561 3011 3012
+f 1026 656 609
+f 2706 1854 1961
+f 3013 2965 2485
+f 2148 2147 1267
+f 3015 1267 1266
+f 2997 2264 1101
+f 642 641 1773
+f 186 477 461
+f 1041 1040 1320
+f 1121 3016 3017
+f 2984 2986 2987
+f 3018 2987 2634
+f 1048 603 3018
+f 603 602 2984
+f 2679 2678 801
+f 2062 2072 2074
+f 2061 2074 2076
+f 414 303 1545
+f 3016 1121 2683
+f 3019 2683 2704
+f 3020 3019 2670
+f 3007 3016 3019
+f 1871 1090 1091
+f 3021 1787 1122
+f 3022 3021 3017
+f 1314 3021 3022
+f 1784 1787 3021
+f 1316 1315 1763
+f 3023 3022 3010
+f 3023 3009 795
+f 1763 3023 1092
+f 1315 3022 3023
+f 3016 3007 3010
+f 752 751 2978
+f 2986 2639 2637
+f 3024 654 653
+f 3020 3024 3008
+f 720 723 3024
+f 723 2621 654
+f 2681 1098 1268
+f 2671 3025 1268
+f 2705 2681 3025
+f 1313 3026 3027
+f 3026 3028 3029
+f 3027 3029 3030
+f 1311 3027 3031
+f 2012 568 731
+f 917 2657 974
+f 743 1206 856
+f 1888 2027 2030
+f 2682 2705 2684
+f 3032 2684 1124
+f 1100 3032 2989
+f 1099 2682 3032
+f 1123 2990 2989
+f 3033 1834 1159
+f 2990 3033 1158
+f 1123 1786 3033
+f 1786 1785 1834
+f 1054 1056 2988
+f 2979 665 664
+f 2623 2622 2663
+f 1095 660 688
+f 3034 3035 2261
+f 3036 3034 2956
+f 3037 3038 3034
+f 3038 3039 3035
+f 3040 3041 3026
+f 2263 3042 3041
+f 3042 3039 3043
+f 3041 3043 3028
+f 3044 3045 3043
+f 3046 3047 3045
+f 3047 3048 3049
+f 3043 3045 3049
+f 3038 3037 3051
+f 3050 3051 3053
+f 3052 3046 3044
+f 3039 3038 3050
+f 3054 3035 3039
+f 3042 2263 2262
+f 2261 3035 3054
+f 2216 2263 2997
+f 2212 2211 2667
+f 3056 868 2209
+f 934 3057 1961
+f 2013 3755 3757
+f 3058 3059 3060
+f 3059 3062 3063
+f 3060 3063 2213
+f 3061 3060 1963
+f 3063 3064 2214
+f 3062 3065 3064
+f 3065 2257 2260
+f 3064 2260 2215
+f 3066 3067 3031
+f 2608 2966 3067
+f 2966 2562 3068
+f 3031 3067 3068
+f 2455 2458 2461
+f 2476 2223 2226
+f 3551 2090 2089
+f 3526 2774 3769
+f 433 354 355
+f 2115 1423 1419
+f 3509 2868 2922
+f 3769 3737 2131
+f 3069 2611 2167
+f 2971 2972 2928
+f 2961 2774 2773
+f 3070 2475 2478
+f 2610 2609 3071
+f 2502 2371 2374
+f 3072 2592 2595
+f 2964 3046 3052
+f 3048 2593 3074
+f 2593 2592 3075
+f 3075 3030 3029
+f 3074 3029 3028
+f 3072 3076 3075
+f 3076 3072 2482
+f 2481 2608 3066
+f 3076 3066 3030
+f 2967 3052 3053
+f 2482 3072 3073
+f 479 480 493
+f 706 3900 3873
+f 3561 2194 2221
+f 2013 2010 3754
+f 3533 2032 2016
+f 3562 3519 2222
+f 1171 1154 1155
+f 2586 2566 2565
+f 2982 110 113
+f 2562 2376 3077
+f 2376 2375 3012
+f 3012 2145 2148
+f 3077 2148 3015
+f 2963 3047 3046
+f 3051 3078 2970
+f 1683 2173 491
+f 3850 2122 1447
+f 1986 2181 3734
+f 3079 3080 2546
+f 3081 3079 2575
+f 1983 1986 3768
+f 3082 2323 2319
+f 3080 3082 2545
+f 3734 3732 3082
+f 3732 3748 2323
+f 1309 1471 1888
+f 3073 2595 2772
+f 2760 3083 3084
+f 2760 2085 2088
+f 3085 3084 3086
+f 3089 3085 3090
+f 3089 2762 3084
+f 3092 3088 3090
+f 3093 2748 2744
+f 2751 2747 2748
+f 3093 2741 2752
+f 2742 2741 3093
+f 3094 3093 2743
+f 3096 3097 3098
+f 3099 3096 3087
+f 3097 3094 3095
+f 3100 2743 2746
+f 3101 2927 3102
+f 2764 2766 2927
+f 2745 2764 3101
+f 3102 3092 3091
+f 3100 3091 3095
+f 2869 2802 3592
+f 3102 3103 3104
+f 3658 2868 2869
+f 2927 2926 3103
+f 3098 3095 3091
+f 3087 3098 3090
+f 2088 2125 3105
+f 3083 3105 3086
+f 2483 3073 2771
+f 2401 2186 2185
+f 3099 3086 3105
+f 2125 2207 3106
+f 3108 2737 2742
+f 3107 2742 3094
+f 3110 3108 3107
+f 3109 3107 3097
+f 3111 3109 3096
+f 3112 3110 3109
+f 3113 2738 2737
+f 3115 2723 2722
+f 3117 3115 3114
+f 3119 3117 3116
+f 3118 3116 3120
+f 3116 3114 3113
+f 3122 3112 3111
+f 3122 3123 1755
+f 3124 3125 3126
+f 3125 3118 3121
+f 3128 3119 3118
+f 3129 3128 3125
+f 2020 2712 3128
+f 3853 3857 1346
+f 1065 1046 1047
+f 1814 3838 3830
+f 2021 2020 3129
+f 3111 3099 3106
+f 3123 3106 2207
+f 2377 2225 2224
+f 3014 2485 113
+f 1756 2208 3782
+f 3131 3132 2530
+f 3133 3131 2528
+f 1753 1756 3767
+f 3134 3124 3127
+f 2327 2408 2314
+f 2436 2318 2317
+f 2018 2021 3711
+f 2534 3136 3137
+f 2320 2322 3136
+f 2185 2018 3712
+f 3138 3139 2542
+f 3134 3135 3744
+f 3785 3130 3134
+f 3140 3138 2539
+f 2535 3137 3140
+f 3711 2021 3130
+f 2558 2530 3132
+f 3782 3760 3141
+f 2560 2558 3141
+f 3760 3777 3142
+f 958 888 2976
+f 1933 1932 2980
+f 2399 2424 2418
+f 2348 2342 2343
+f 2322 2320 2319
+f 2718 2717 2874
+f 2709 2729 2873
+f 2894 2871 1361
+f 346 289 176
+f 1876 1877 1882
+f 2802 2803 3607
+f 3104 3144 3088
+f 2974 2222 3649
+f 3089 3145 2761
+f 3088 3144 3145
+f 2803 2974 3603
+f 3870 3909 692
+f 2123 1419 1418
+f 1365 1363 1368
+f 1581 1766 1782
+f 3569 2202 2129
+f 3555 2003 2002
+f 3146 2585 2551
+f 3751 3146 3147
+f 3742 3149 3146
+f 3149 2584 2585
+f 3196 3198 3197
+f 3604 2192 2191
+f 3142 3150 2556
+f 3777 3778 3150
+f 3646 3152 2550
+f 3150 3151 2555
+f 3778 3780 3151
+f 3788 3781 3152
+f 3153 3147 2551
+f 3152 3153 2549
+f 3781 3776 3153
+f 3776 3750 3147
+f 3148 2767 2206
+f 2183 2401 2321
+f 2132 2960 2928
+f 3684 3685 2204
+f 3719 2198 2197
+f 2594 2962 2773
+f 2595 2594 2951
+f 1870 798 799
+f 3713 2179 2178
+f 3706 2178 3736
+f 336 3154 3774
+f 339 3155 3154
+f 3789 3683 3149
+f 3683 3697 2584
+f 2963 2962 3048
+f 3722 3730 337
+f 339 338 2566
+f 3156 3157 2168
+f 2610 3156 2167
+f 2954 2953 3156
+f 3157 3156 2953
+f 3707 2166 2169
+f 2567 3158 3159
+f 337 3158 2567
+f 3730 3069 2166
+f 3764 2190 1983
+f 2579 3160 3081
+f 2568 3159 3160
+f 3709 2169 2190
+f 3065 3062 3162
+f 3161 3162 1984
+f 3157 3161 2189
+f 2257 3065 3161
+f 734 735 1922
+f 3059 3058 2708
+f 3163 2708 2182
+f 3162 3163 1985
+f 3062 3059 3163
+f 3203 3206 3205
+f 2122 2119 1448
+f 3817 1686 1685
+f 139 140 444
+f 1570 569 566
+f 2957 3164 3165
+f 2955 3166 3164
+f 1912 454 341
+f 3130 3129 3124
+f 1548 322 319
+f 1949 1950 1327
+f 2969 2970 3078
+f 3167 3078 3169
+f 3724 2014 2017
+f 3720 2017 2033
+f 3168 3169 3165
+f 3170 3165 3164
+f 3779 3170 3171
+f 3724 3168 3170
+f 3171 3164 3166
+f 3172 3166 3071
+f 3752 3172 3173
+f 3756 3171 3172
+f 3169 3037 3036
+f 2593 3048 2962
+f 3173 3071 2609
+f 3174 2609 2199
+f 3783 3174 2198
+f 3749 3173 3174
+f 2190 2189 1984
+f 2196 2195 2767
+f 2119 2120 1414
+f 1464 1463 1466
+f 2199 2611 3069
+f 2457 2456 2587
+f 2226 2225 2562
+f 481 478 1627
+f 333 1512 235
+f 3078 3051 3037
+f 3040 2264 2997
+f 2219 1934 1933
+f 2385 2384 2497
+f 221 2220 3175
+f 2220 1933 3143
+f 3175 3143 2976
+f 2484 3175 2975
+f 2954 3071 3166
+f 2630 1858 110
+f 1547 319 1952
+f 2091 2090 2188
+f 3733 3736 2178
+f 1918 1919 1927
+f 3563 112 111
+f 955 956 960
+f 2037 3014 112
+f 3143 2980 2981
+f 2218 2211 2214
+f 672 3877 3787
+f 2152 3013 3014
+f 1147 1144 1143
+f 2248 2246 1409
+f 1615 1636 1635
+f 1410 1409 2246
+f 3241 3243 3242
+f 1382 3176 3177
+f 3177 1410 2247
+f 2247 1383 3177
+f 3176 2238 2230
+f 1410 3177 3179
+f 3179 3180 2243
+f 1633 3181 2800
+f 1621 1620 2758
+f 1622 1621 2245
+f 2236 1394 3181
+f 2237 2236 3182
+f 2250 1631 2233
+f 2912 1629 1628
+f 135 136 1564
+f 1567 419 512
+f 1085 1084 2653
+f 3184 869 868
+f 869 3184 3057
+f 1266 1268 1098
+f 1267 2147 722
+f 937 1146 2646
+f 2146 1049 722
+f 2211 2218 921
+f 1957 510 507
+f 1313 1312 2264
+f 1496 362 1493
+f 2887 2937 2933
+f 1687 1688 1659
+f 145 1499 1498
+f 37 38 418
+f 1935 1934 3185
+f 104 89 301
+f 2101 2098 2083
+f 543 298 360
+f 1934 2219 3186
+f 234 1521 212
+f 3845 3821 1822
+f 1976 1875 1874
+f 223 3186 2219
+f 3655 3656 3605
+f 2165 2164 467
+f 467 468 2165
+f 773 476 2165
+f 1935 1212 1728
+f 3240 3225 3226
+f 3261 3249 3250
+f 3215 3216 3244
+f 3265 3221 3215
+f 3267 3245 3198
+f 3205 3206 3269
+f 3214 3211 3265
+f 3271 3213 3214
+f 3251 3252 3239
+f 3222 3221 3266
+f 3232 3235 3234
+f 3216 3242 3243
+f 3196 3193 3268
+f 3244 3243 3254
+f 3238 3239 3247
+f 3264 3244 3255
+f 3253 3242 3216
+f 3211 3266 3221
+f 3257 3217 3218
+f 3258 3222 3272
+f 3274 3275 3280
+f 3228 3229 3262
+f 3275 3231 3281
+f 3226 3227 3273
+f 3236 3263 3260
+f 3281 3263 3236
+f 3287 3271 3192
+f 3235 3226 3280
+f 3234 3280 3281
+f 3288 3290 3259
+f 3260 3250 3276
+f 3289 3212 3278
+f 3210 3209 3224
+f 3199 3202 3201
+f 3228 3231 3230
+f 3290 3288 3285
+f 3282 3279 3271
+f 3260 3263 3262
+f 3243 3241 3268
+f 3267 3268 3241
+f 3194 3207 3208
+f 3191 3270 3269
+f 3270 3191 3192
+f 3275 3274 3230
+f 3264 3269 3270
+f 3252 3251 3276
+f 3248 3252 3291
+f 3213 3271 3279
+f 3272 3266 3289
+f 3277 3278 3212
+f 3228 3263 3281
+f 3220 3225 3253
+f 3219 3253 3217
+f 3256 3218 3222
+f 3247 3239 3252
+f 3245 3267 3232
+f 3266 3211 3212
+f 3208 3205 3255
+f 3288 3289 3284
+f 3204 3205 3208
+f 560 1899 3187
+f 3209 3210 3201
+f 3189 3192 3191
+f 448 1980 807
+f 647 648 122
+f 1833 1848 1780
+f 510 509 508
+f 3210 1949 1887
+f 1887 15 3200
+f 15 16 3197
+f 16 3188 3195
+f 3195 3188 3187
+f 3194 3187 1899
+f 3207 1899 375
+f 387 173 126
+f 3189 1791 1760
+f 3577 763 757
+f 763 3577 2172
+f 3583 3582 3580
+f 3581 3580 3579
+f 3594 3597 3596
+f 3400 3601 3602
+f 3626 3624 3625
+f 3631 3635 3599
+f 3636 3637 3595
+f 3647 3584 3588
+f 3650 3582 3584
+f 3653 3579 3580
+f 3653 2176 3654
+f 1801 1864 3656
+f 3651 3580 3582
+f 3644 3638 3595
+f 1767 1768 3659
+f 1597 3663 3661
+f 1708 1707 3665
+f 3671 3668 3667
+f 3666 3667 3673
+f 1588 1708 3664
+f 2080 3671 3666
+f 2082 3666 3672
+f 3674 3677 3635
+f 3677 3678 3636
+f 3682 3647 3645
+f 3692 3651 3650
+f 3681 3645 3644
+f 786 3694 3692
+f 3694 3653 3651
+f 785 3692 3686
+f 497 3686 3682
+f 650 3682 3681
+f 3696 564 3681
+f 3405 3606 3605
+f 3702 3699 3678
+f 3699 3698 3679
+f 3645 3588 3638
+f 1799 3657 3659
+f 3708 3704 3458
+f 3462 3710 3708
+f 3714 463 564
+f 3615 3606 3602
+f 3710 3714 3696
+f 3635 3636 3596
+f 3629 3626 3627
+f 3597 3600 3599
+f 3630 3716 3715
+f 3627 3625 3597
+f 3717 3716 3723
+f 3715 3663 3660
+f 3716 3717 3725
+f 3668 3665 3661
+f 3668 3725 3717
+f 3667 3717 3718
+f 3727 3728 3673
+f 3728 1576 3672
+f 3726 3727 3718
+f 1576 2101 2084
+f 3727 3726 3731
+f 3203 376 1387
+f 3190 1387 1791
+f 3283 2152 1949
+f 317 240 259
+f 2000 2176 3653
+f 3601 3599 3600
+f 3589 3588 3584
+f 3577 758 756
+f 3585 3584 3582
+f 3678 3679 3637
+f 3204 375 376
+f 1801 3655 3657
+f 3729 3735 3728
+f 1575 3735 3738
+f 3735 1575 1576
+f 1776 1795 1796
+f 1293 1294 1290
+f 1326 14 1327
+f 1294 1245 1234
+f 878 1244 1330
+f 497 373 374
+f 122 648 413
+f 3699 3702 3704
+f 3606 3405 3401
+f 3708 3710 3698
+f 3464 3714 3710
+f 3704 3674 3443
+f 439 398 399
+f 1512 1528 1526
+f 1668 1741 1738
+f 839 849 1362
+f 1362 1357 826
+f 3843 740 1896
+f 2053 2054 2118
+f 3888 3887 2120
+f 696 697 3816
+f 1826 1878 639
+f 3740 3739 3729
+f 956 885 886
+f 1980 806 1922
+f 33 34 105
+f 1343 1344 3856
+f 3907 2127 2128
+f 1766 1581 1582
+f 14 1326 2163
+f 595 98 99
+f 3233 3238 3246
+f 1346 1035 1044
+f 748 1888 1425
+f 3193 3208 3254
+f 3190 3191 3206
+f 3612 3605 3606
+f 3624 3612 3615
+f 3631 3415 3443
+f 3679 3680 3644
+f 3686 3650 3647
+f 3702 3677 3674
+f 3625 3615 3600
+f 3601 3400 3415
+f 3739 3738 3735
+f 3630 3627 3594
+f 3741 3723 3716
+f 1284 1290 1288
+f 3698 3696 3680
+f 1778 1781 1780
+f 1792 1759 1584
+f 695 570 571
+f 3638 3741 3594
+f 3741 3638 3588
+f 3247 3248 3209
+f 3292 3293 3294
+f 3296 3297 3298
+f 3299 3298 3300
+f 3302 3303 3304
+f 3306 3307 3308
+f 3307 3310 3311
+f 3303 3302 3301
+f 3312 3305 3304
+f 3314 3315 3316
+f 3318 3319 3320
+f 3324 3318 3321
+f 3326 3327 3328
+f 3329 3330 3331
+f 3333 3334 3335
+f 3337 3338 3339
+f 3333 3336 3341
+f 3342 3341 3343
+f 3319 3345 3344
+f 3338 3335 3334
+f 3346 3301 3302
+f 3305 3348 3347
+f 3312 3224 3223
+f 3350 3337 3340
+f 3312 3349 3348
+f 3352 3343 3341
+f 3311 3353 3354
+f 3341 3336 3327
+f 3359 3360 3361
+f 3360 3359 3250
+f 3318 3363 3345
+f 3364 3363 3318
+f 3314 3364 3324
+f 3366 3299 3301
+f 3299 3366 3367
+f 3308 3354 3368
+f 3369 3317 3295
+f 3317 3369 3364
+f 3370 3295 3317
+f 3325 3371 3365
+f 3352 3320 3319
+f 3296 3367 3353
+f 3297 3296 3311
+f 3363 3368 3354
+f 3345 3354 3353
+f 3344 3353 3367
+f 3366 3333 3342
+f 3347 3339 3334
+f 3339 3347 3348
+f 3348 3349 3351
+f 3363 3364 3369
+f 3294 3309 3368
+f 3293 3306 3309
+f 3331 3373 3374
+f 3346 3334 3333
+f 3323 3322 3352
+f 3356 3355 3321
+f 3355 3357 3325
+f 3357 3358 3371
+f 3322 3356 3320
+f 3361 3330 3329
+f 3373 3372 3375
+f 3374 3375 3376
+f 3329 3332 3376
+f 3327 3375 3372
+f 3336 3335 3375
+f 3335 3338 3376
+f 3376 3338 3337
+f 3337 3350 3359
+f 3359 3350 3276
+f 3377 3292 3295
+f 3277 3316 3315
+f 3371 3378 3379
+f 3378 3371 3358
+f 3365 3379 3315
+f 3316 3277 3279
+f 3282 3377 3370
+f 3313 3283 3224
+f 3378 3285 3284
+f 3380 3286 3285
+f 3379 3284 3278
+f 3276 3350 3351
+f 3291 3351 3349
+f 3313 3304 2965
+f 2965 3304 3303
+f 222 3303 3300
+f 223 3300 3298
+f 3298 3297 3185
+f 3297 3310 2977
+f 3310 3307 1213
+f 3307 3306 1214
+f 2532 1214 3306
+f 2894 2532 3293
+f 2871 2894 3292
+f 3253 3225 3240
+f 3283 3313 3013
+f 1997 1996 1889
+f 1237 745 744
+f 1535 1534 120
+f 1531 1532 201
+f 1119 1241 1032
+f 946 948 824
+f 163 8 5
+f 3589 3726 3723
+f 1155 1156 1162
+f 427 213 327
+f 553 1902 796
+f 3381 2232 2231
+f 3381 3178 2230
+f 2237 3183 3180
+f 3383 3384 3385
+f 3384 3386 3387
+f 3386 3388 3389
+f 3388 3390 3391
+f 3392 3393 3394
+f 3395 3394 3396
+f 3398 3399 3397
+f 3400 3401 3399
+f 3403 3404 3405
+f 3406 3407 3404
+f 3408 3409 3407
+f 3410 3411 3409
+f 3412 3413 3411
+f 3415 3400 3398
+f 3414 3398 3396
+f 3416 3396 3394
+f 3417 3394 3393
+f 3393 3419 3420
+f 3419 3390 3421
+f 3390 3388 3422
+f 3388 3386 3423
+f 3386 3384 3424
+f 3384 3383 3425
+f 3425 3383 3426
+f 2904 3427 3428
+f 3429 3427 2904
+f 3430 3429 2902
+f 2875 3431 3430
+f 3432 3433 2733
+f 2825 3434 3435
+f 3437 3438 3439
+f 3441 3437 3436
+f 2724 2723 3434
+f 3436 3439 3115
+f 3440 3436 3117
+f 3443 3415 3414
+f 3442 3414 3416
+f 3444 3416 3417
+f 3445 3417 3418
+f 3446 3418 3420
+f 3420 3421 3448
+f 3421 3422 3449
+f 3422 3423 3450
+f 3423 3424 3451
+f 3424 3425 3452
+f 3425 3184 3056
+f 3451 3452 2253
+f 3450 3451 2252
+f 3449 3450 1727
+f 3448 3449 2150
+f 3453 3447 3448
+f 3454 3446 3447
+f 3455 3445 3446
+f 3456 3444 3445
+f 3442 3444 3456
+f 3443 3442 3457
+f 3459 3460 3458
+f 3457 3456 3455
+f 3459 3455 3454
+f 3462 3460 3459
+f 3461 3454 3453
+f 3464 3462 3461
+f 3463 3453 1941
+f 3404 3399 3401
+f 3407 3397 3399
+f 3409 3395 3397
+f 3411 3392 3395
+f 3465 3466 3413
+f 3467 3468 3469
+f 3465 3412 3431
+f 3470 3467 3466
+f 3438 3470 3432
+f 3467 3470 3438
+f 3468 3467 3437
+f 3471 3469 3468
+f 3472 3468 3441
+f 3473 3441 3440
+f 2712 3440 3119
+f 3475 3471 3472
+f 3474 3472 3473
+f 3476 3473 2712
+f 3477 3476 2711
+f 3478 3474 3476
+f 3479 3475 3474
+f 3392 3411 3413
+f 3466 3469 3480
+f 3419 3393 3392
+f 3390 3419 3480
+f 3469 3471 3391
+f 3389 3391 3471
+f 3387 3389 3475
+f 3385 3387 3479
+f 3482 3477 2400
+f 3483 3478 3477
+f 3481 3479 3478
+f 3484 3482 2402
+f 3058 3484 2707
+f 3061 3485 3484
+f 3485 3483 3482
+f 3382 3385 3481
+f 3486 3481 3483
+f 3426 3383 3382
+f 3487 3382 3486
+f 3486 3485 3061
+f 1961 3057 3487
+f 3057 3184 3426
+f 3428 3403 3402
+f 3427 3406 3403
+f 3429 3408 3406
+f 3430 3410 3408
+f 3431 3412 3410
+f 3470 3465 3433
+f 3434 3439 3438
+f 2723 3115 3439
+f 2875 2733 3433
+f 2736 2824 3435
+f 2947 3428 3488
+f 3464 3463 1360
+f 1900 1925 1924
+f 1736 1717 1718
+f 1955 1952 1943
+f 1671 1738 1735
+f 1696 1750 1698
+f 1800 1865 1864
+f 2063 2055 2056
+f 1737 1734 1717
+f 1751 1735 1697
+f 943 3885 3899
+f 3656 3488 3402
+f 1577 1578 1764
+f 3917 1598 1599
+f 1945 146 82
+f 68 6 7
+f 147 1542 1543
+f 1109 1135 1133
+f 1503 1910 1907
+f 1499 1501 1909
+f 452 1922 735
+f 1824 2068 3835
+f 1825 2067 2068
+f 1463 1462 1022
+f 841 1012 1009
+f 1568 1547 1955
+f 1910 1503 1502
+f 1939 1502 422
+f 309 280 1940
+f 1804 1851 1866
+f 269 1482 1954
+f 2065 1607 2060
+f 3812 1685 1758
+f 280 276 552
+f 1875 1873 1869
+f 1739 1721 1734
+f 1778 1779 1790
+f 310 1920 1915
+f 1909 1908 1906
+f 1789 1783 1778
+f 978 981 980
+f 1584 1759 1731
+f 425 426 311
+f 148 1543 115
+f 2044 3897 3894
+f 277 1910 1939
+f 1943 1946 1945
+f 1461 1462 1463
+f 2253 3452 3056
+f 2981 2254 3489
+f 2209 2998 3489
+f 2254 2253 3055
+f 2998 926 958
+f 2826 2817 2721
+f 3490 2739 2721
+f 2816 3491 3490
+f 2334 2297 2296
+f 2298 2297 2334
+f 2301 2298 2336
+f 2311 2301 2338
+f 2312 2311 2354
+f 2359 2358 2361
+f 2335 2296 2359
+f 2414 2413 2365
+f 2413 2270 2361
+f 2355 2352 2540
+f 2541 2540 2539
+f 2722 2721 2739
+f 3114 2722 2738
+f 3110 3112 3121
+f 3108 3110 3120
+f 3122 1754 3127
+f 3112 3122 3126
+f 3127 1754 1753
+f 3139 3133 2529
+f 3744 3775 3133
+f 3490 2789 2740
+f 3491 2790 2789
+f 228 229 91
+f 245 244 243
+f 248 7 247
+f 2 297 296
+f 145 146 300
+f 327 213 210
+f 329 332 331
+f 239 241 240
+f 373 376 375
+f 178 177 176
+f 1218 1225 1228
+f 3862 1231 1295
+f 808 809 824
+f 3858 3859 1013
+f 3813 3810 1411
+f 2154 2149 2159
+f 3845 3835 1758
+f 306 807 1922
+f 2157 2153 1976
+f 708 709 2031
+f 1450 1226 1227
+f 1672 1604 1605
+f 1813 1814 1821
+f 1717 1716 1715
+f 176 159 402
+f 1710 1705 3916
+f 1712 1580 1577
+f 1705 1713 1711
+f 971 978 979
+f 822 823 1094
+f 2909 2810 2793
+f 2810 2769 2795
+f 2799 2910 2249
+f 3662 3643 2910
+f 2769 2768 2801
+f 3177 3176 3178
+f 3178 3381 3180
+f 3181 1394 1393
+f 3181 1633 1632
+f 3182 1632 2244
+f 3183 2244 2243
+f 3492 2791 2790
+f 2808 3492 3491
+f 2239 3574 3572
+f 1149 1150 3571
+f 1150 2009 3567
+f 2202 3569 3566
+f 2921 3564 3565
+f 3568 3560 2133
+f 1777 1579 1759
+f 1201 1196 1183
+f 3561 3554 2195
+f 3553 3576 2770
+f 3927 3925 1687
+f 1516 152 154
+f 3549 3562 2974
+f 3550 3548 2779
+f 2771 2772 3575
+f 3070 2771 3546
+f 3547 3544 2775
+f 1376 1375 1370
+f 2767 3543 3556
+f 2195 3554 3543
+f 2775 3544 3542
+f 3377 3282 3545
+f 3804 3802 3539
+f 2776 3542 3538
+f 2240 3572 3535
+f 2920 3535 3669
+f 3548 3534 2786
+f 1805 1850 1851
+f 3534 3532 2787
+f 2951 2773 3540
+f 2772 2951 3531
+f 2973 2224 3530
+f 2224 2223 3537
+f 1957 1953 320
+f 306 305 304
+f 2770 3576 3528
+f 2913 3528 3574
+f 2923 3565 3527
+f 308 1940 431
+f 207 208 132
+f 1879 1876 1881
+f 3733 3787 3525
+f 2475 3070 3558
+f 2476 2475 3524
+f 2223 2476 3523
+f 3522 3549 2803
+f 2037 3563 11
+f 1959 1958 450
+f 2003 3555 3520
+f 9 10 334
+f 2009 2008 3518
+f 2008 2973 3536
+f 3877 672 673
+f 3515 3550 2778
+f 2935 3514 3513
+f 111 1149 3570
+f 3510 3514 2935
+f 683 3876 3885
+f 3508 3557 2810
+f 736 733 732
+f 1795 1804 1803
+f 3505 3510 2938
+f 3507 3505 2939
+f 2950 3512 3504
+f 2940 2949 3504
+f 3801 3791 3512
+f 2868 3509 3502
+f 2869 3502 3522
+f 1024 1034 1023
+f 3500 3503 2948
+f 551 1939 442
+f 2972 3499 3568
+f 2032 3533 3499
+f 216 93 1924
+f 203 202 201
+f 1828 1848 1847
+f 1958 1957 449
+f 272 271 178
+f 2871 3545 3495
+f 3619 3669 3564
+f 2801 2768 3643
+f 2005 3758 3654
+f 960 886 899
+f 3657 3655 3612
+f 3780 3788 3646
+f 3151 3646 3593
+f 2193 3604 3623
+f 3607 3603 3145
+f 3603 3649 2761
+f 3592 3607 3144
+f 2926 3658 3634
+f 3634 3592 3104
+f 3755 3754 3746
+f 566 567 3758
+f 3488 3656 1864
+f 1602 1586 2079
+f 2868 3658 3611
+f 2922 3611 3610
+f 2923 3610 3618
+f 3670 3613 2918
+f 3619 3670 3614
+f 2921 3618 3670
+f 2920 3614 3586
+f 3613 3642 2915
+f 2917 2766 3622
+f 2916 2917 3641
+f 2239 3587 3652
+f 2911 3640 3652
+f 3643 3640 2911
+f 2797 2801 3662
+f 2797 3591 3598
+f 3622 3628 2763
+f 3608 3628 2764
+f 2194 3623 3648
+f 2221 3648 3649
+f 2756 3608 3609
+f 2750 3598 3609
+f 3669 3535 1817
+f 3763 3770 1691
+f 2515 3621 3590
+f 2548 3590 3593
+f 2513 2489 3632
+f 2513 3616 3621
+f 2509 3633 3620
+f 2510 3632 3633
+f 2289 3620 3639
+f 2283 3639 3617
+f 2240 3586 3587
+f 3659 3657 3624
+f 716 2121 3887
+f 727 1999 3761
+f 2165 2171 2170
+f 3660 3659 3626
+f 3743 3581 3578
+f 2175 2172 769
+f 2100 500 2095
+f 3758 567 3757
+f 640 3560 3568
+f 1653 3576 3553
+f 1895 3536 3530
+f 746 3530 3537
+f 1653 1810 3528
+f 3762 1603 1604
+f 2093 2075 2073
+f 959 996 995
+f 962 899 919
+f 1400 1401 1398
+f 1000 817 1127
+f 956 959 884
+f 1662 3548 3550
+f 2034 3558 3546
+f 3926 1689 3539
+f 3535 3572 766
+f 3572 3574 765
+f 334 3570 3571
+f 1868 3531 3540
+f 1684 1651 1652
+f 1682 1683 775
+f 1585 1603 3762
+f 3763 3762 1672
+f 2173 2174 490
+f 1658 3544 3547
+f 3556 3543 1650
+f 1571 3754 2010
+f 1587 3664 3671
+f 2099 2095 2077
+f 787 3761 1999
+f 2000 3694 786
+f 3155 339 3697
+f 3713 3155 3683
+f 2180 2179 3685
+f 3496 2180 3684
+f 1684 3846 3840
+f 1760 464 3495
+f 2607 3689 3693
+f 2581 3676 3689
+f 2371 2502 3700
+f 2371 3695 3693
+f 2587 3676 3675
+f 2586 3675 3697
+f 2497 3691 3687
+f 2502 2503 3687
+f 2494 3691 3690
+f 3690 3688 2472
+f 3688 3701 2468
+f 3701 3703 2469
+f 726 3908 3892
+f 581 3567 3518
+f 10 3511 3570
+f 1837 3514 3510
+f 741 3537 3523
+f 1299 3524 3558
+f 1851 3507 3504
+f 3527 3565 1820
+f 580 3518 3536
+f 1758 1685 3522
+f 1693 3557 3508
+f 3504 3512 1867
+f 1894 1826 3568
+f 2126 1418 3813
+f 3906 2126 3841
+f 2113 3911 3867
+f 2179 3713 3789
+f 624 3903 3904
+f 2192 3604 3788
+f 2177 3541 3787
+f 1364 3824 3833
+f 3858 3855 1470
+f 3135 1753 3775
+f 2069 3812 3835
+f 1351 3861 3853
+f 2108 3882 3901
+f 2089 2092 3736
+f 3828 3834 1358
+f 2001 3749 3783
+f 2187 3783 3719
+f 2130 3756 3752
+f 2004 3752 3749
+f 2014 3724 3779
+f 2201 3779 3756
+f 3167 3720 3759
+f 3168 3724 3720
+f 3159 3709 3764
+f 3160 3764 3765
+f 3730 3707 3158
+f 3158 3707 3709
+f 2200 3069 3730
+f 3685 3789 3742
+f 3722 3774 2197
+f 3790 3736 2092
+f 3774 3790 3721
+f 3154 3706 3790
+f 3155 3713 3706
+f 2091 3719 3721
+f 2196 3148 3750
+f 2193 2196 3776
+f 3604 2193 3781
+f 2086 2192 3780
+f 2087 2086 3778
+f 2204 3742 3751
+f 2205 3751 3750
+f 3909 3886 574
+f 2124 2087 3777
+f 2208 2124 3760
+f 3711 3785 3140
+f 3785 3745 3138
+f 3745 3744 3139
+f 3771 3712 3136
+f 3712 3711 3137
+f 3775 3767 3131
+f 3767 3782 3132
+f 1813 3839 3838
+f 1293 3854 3856
+f 2184 2321 3748
+f 2181 2184 3732
+f 3765 3768 3079
+f 3768 3734 3080
+f 709 3873 3875
+f 2774 2961 3737
+f 3786 3769 2134
+f 3809 3808 766
+f 2968 3784 3766
+f 2969 3759 3784
+f 2958 3747 3737
+f 2132 3747 3766
+f 2321 2185 3771
+f 3860 3848 1449
+f 3905 3874 582
+f 3886 3889 577
+f 3847 3826 1818
+f 705 3902 3887
+f 3816 3912 3907
+f 2111 3883 3911
+f 3810 3829 1366
+f 1812 3839 3822
+f 675 3851 3852
+f 3496 3705 3851
+f 2110 3867 3868
+f 2104 3896 3901
+f 2109 3868 3899
+f 3842 3831 703
+f 1680 3811 3846
+f 1639 1627 3819
+f 623 3904 3906
+f 2139 2140 3866
+f 1808 1640 3844
+f 3820 3821 1819
+f 3872 3914 3900
+f 1646 3832 3830
+f 3889 3890 633
+f 2139 3865 3875
+f 2156 2155 3871
+f 674 3852 3913
+f 2117 3817 3812
+f 1815 3822 3847
+f 3855 3849 1444
+f 697 3906 3912
+f 866 842 3836
+f 3864 3869 2136
+f 3585 3731 3726
+f 3583 3740 3731
+f 3753 3746 3738
+f 3740 3583 3581
+f 1088 3738 3746
+f 3739 3740 3743
+f 2791 3492 3493
+f 3492 2808 2807
+f 2792 3493 3494
+f 3493 2807 2806
+f 3508 2909 3494
+f 3516 3494 2806
+f 2948 3503 3791
+f 2105 3908 3896
+f 3538 3802 3804
+f 680 3878 3876
+f 1891 1890 3910
+f 2905 3799 3801
+f 2900 3797 3799
+f 2901 3797 3807
+f 2734 2878 3806
+f 2877 3807 3806
+f 2820 3800 3794
+f 2818 3796 3792
+f 2823 3793 3792
+f 2822 3793 3798
+f 2819 2735 3800
+f 2817 3796 3795
+f 2809 3805 3795
+f 2808 3805 3803
+f 2805 3804 3803
+f 1702 3921 3923
+f 3922 3920 1797
+f 3915 3916 1706
+f 3918 3927 1692
+f 1657 3898 3880
+f 1890 1877 3895
+f 3772 3770 1674
+f 1689 1690 3772
+f 3926 3802 3538
+f 3516 3539 1689
+f 3917 3923 1703
+f 1598 3917 3928
+f 3880 3884 1876
+f 1691 1690 3925
+f 3925 3926 1688
+f 3934 3933 3937
+f 3934 3938 3939
+f 3932 3936 3937
+f 3938 3942 3943
+f 3937 3936 3940
+f 3937 3941 3942
+f 3942 3946 3947
+f 3941 3940 3944
+f 3942 3941 3945
+f 3947 3946 3950
+f 3944 3948 3949
+f 3945 3949 3950
+f 3950 3954 3955
+f 3949 3948 3952
+f 3950 3949 3953
+f 3955 3954 3958
+f 3953 3952 3956
+f 3953 3957 3958
+f 3959 3958 3962
+f 3957 3956 3960
+f 3958 3957 3961
+f 3962 3966 3967
+f 3960 3964 3965
+f 3961 3965 3966
+f 3966 3970 3971
+f 3965 3964 3968
+f 3966 3965 3969
+f 3970 3974 3975
+f 3968 3972 3973
+f 3969 3973 3974
+f 3974 3978 3979
+f 3973 3972 3976
+f 3974 3973 3977
+f 3979 3978 3982
+f 3976 3980 3981
+f 3977 3981 3982
+f 3982 3986 3987
+f 3981 3980 3984
+f 3982 3981 3985
+f 3987 3986 3990
+f 3984 3988 3989
+f 3985 3989 3990
+f 3991 3990 3994
+f 3989 3988 3992
+f 3990 3989 3993
+f 3995 3994 3998
+f 3992 3996 3997
+f 3993 3997 3998
+f 3998 4002 4003
+f 3997 3996 4000
+f 3998 3997 4001
+f 4002 4006 4007
+f 4000 4004 4005
+f 4001 4005 4006
+f 4006 4010 4011
+f 4005 4004 4008
+f 4006 4005 4009
+f 4011 4010 4014
+f 4008 4012 4013
+f 4009 4013 4014
+f 4014 4018 4019
+f 4013 4012 4016
+f 4014 4013 4017
+f 4019 4018 4022
+f 4016 4020 4021
+f 4017 4021 4022
+f 4023 4022 4026
+f 4021 4020 4024
+f 4022 4021 4025
+f 4027 4026 4030
+f 4024 4028 4029
+f 4025 4029 4030
+f 4031 4030 4034
+f 4029 4028 4032
+f 4030 4029 4033
+f 4034 4038 4039
+f 4032 4036 4037
+f 4033 4037 4038
+f 4038 4042 4043
+f 4037 4036 4040
+f 4038 4037 4041
+f 4043 4042 4046
+f 4040 4044 4045
+f 4041 4045 4046
+f 4046 4050 4051
+f 4045 4044 4048
+f 4046 4045 4049
+f 4051 4050 4055
+f 4048 4053 4054
+f 4049 4054 4055
+f 4056 4055 4059
+f 4053 4057 4058
+f 4055 4054 4058
+f 4060 4059 3934
+f 4058 4057 3932
+f 4058 3933 3934
+f 4069 4068 4081
+f 4066 4079 4080
+f 4065 4064 4077
+f 4063 4062 4075
+f 4071 4084 4085
+f 4069 4082 4083
+f 4067 4080 4081
+f 4066 4065 4078
+f 4063 4076 4077
+f 4061 4074 4075
+f 4072 4085 4086
+f 4071 4070 4083
+f 4076 4089 4090
+f 4074 4087 4088
+f 4086 4085 4098
+f 4083 4096 4097
+f 4082 4081 4094
+f 4079 4092 4093
+f 4078 4077 4090
+f 4076 4075 4088
+f 4085 4084 4097
+f 4082 4095 4096
+f 4080 4093 4094
+f 4079 4078 4091
+f 4089 4102 4103
+f 4088 4087 4100
+f 4098 4111 4112
+f 4096 4109 4110
+f 4095 4094 4107
+f 4092 4105 4106
+f 4090 4103 4104
+f 4089 4088 4101
+f 4097 4110 4111
+f 4095 4108 4109
+f 4094 4093 4106
+f 4092 4091 4104
+f 4103 4102 4115
+f 4100 4113 4114
+f 4112 4111 4124
+f 4110 4109 4122
+f 4107 4120 4121
+f 4105 4118 4119
+f 4103 4116 4117
+f 4101 4114 4115
+f 4110 4123 4124
+f 4109 4108 4121
+f 4107 4106 4119
+f 4105 4104 4117
+f 4115 4128 4129
+f 4114 4113 4126
+f 4124 4137 4138
+f 4123 4122 4135
+f 4120 4133 4134
+f 4119 4118 4131
+f 4117 4116 4129
+f 4114 4127 4128
+f 4124 4123 4136
+f 4122 4121 4134
+f 4119 4132 4133
+f 4117 4130 4131
+f 4129 4128 4141
+f 4126 4139 4140
+f 4138 4137 4150
+f 4136 4135 4148
+f 4133 4146 4147
+f 4131 4144 4145
+f 4130 4129 4142
+f 4127 4140 4141
+f 4137 4136 4149
+f 4135 4134 4147
+f 4132 4145 4146
+f 4131 4130 4143
+f 4142 4141 4154
+f 4140 4139 4152
+f 4150 4163 4164
+f 4149 4148 4161
+f 4146 4159 4160
+f 4144 4157 4158
+f 4142 4155 4156
+f 4140 4153 4154
+f 4149 4162 4163
+f 4148 4147 4160
+f 4146 4145 4158
+f 4144 4143 4156
+f 4155 4154 4167
+f 4153 4152 4165
+f 4163 4176 4177
+f 4161 4174 4175
+f 4159 4172 4173
+f 4158 4157 4170
+f 4155 4168 4169
+f 4153 4166 4167
+f 4163 4162 4175
+f 4161 4160 4173
+f 4159 4158 4171
+f 4156 4169 4170
+f 4167 4180 4181
+f 4165 4178 4179
+f 4176 4189 4190
+f 4175 4174 4187
+f 4173 4172 4185
+f 4170 4183 4184
+f 4169 4168 4181
+f 4167 4166 4179
+f 4175 4188 4189
+f 4173 4186 4187
+f 4171 4184 4185
+f 4170 4169 4182
+f 4180 4193 4194
+f 4178 4191 4192
+f 4190 4189 4202
+f 4187 4200 4201
+f 4186 4185 4198
+f 4183 4196 4197
+f 4182 4181 4194
+f 4180 4179 4192
+f 4189 4188 4201
+f 4186 4199 4200
+f 4184 4197 4198
+f 4183 4182 4195
+f 4193 4206 4207
+f 4192 4191 4204
+f 4202 4215 4216
+f 4200 4213 4214
+f 4199 4198 4211
+f 4196 4209 4210
+f 4194 4207 4208
+f 4193 4192 4205
+f 4201 4214 4215
+f 4199 4212 4213
+f 4198 4197 4210
+f 4196 4195 4208
+f 4207 4206 4219
+f 4204 4217 4218
+f 4216 4215 4228
+f 4213 4226 4227
+f 4211 4224 4225
+f 4209 4222 4223
+f 4207 4220 4221
+f 4205 4218 4219
+f 4214 4227 4228
+f 4213 4212 4225
+f 4211 4210 4223
+f 4209 4208 4221
+f 4219 4232 4233
+f 4218 4217 4230
+f 4228 4241 4242
+f 4226 4239 4240
+f 4225 4224 4237
+f 4223 4222 4235
+f 4221 4220 4233
+f 4218 4231 4232
+f 4228 4227 4240
+f 4226 4225 4238
+f 4223 4236 4237
+f 4221 4234 4235
+f 4233 4232 4245
+f 4230 4243 4244
+f 4242 4241 4254
+f 4240 4239 4252
+f 4237 4250 4251
+f 4235 4248 4249
+f 4234 4233 4246
+f 4231 4244 4245
+f 4241 4240 4253
+f 4239 4238 4251
+f 4236 4249 4250
+f 4235 4234 4247
+f 4246 4245 4258
+f 4244 4243 4256
+f 4254 4267 4268
+f 4253 4252 4265
+f 4250 4263 4264
+f 4248 4261 4262
+f 4246 4259 4260
+f 4244 4257 4258
+f 4253 4266 4267
+f 4252 4251 4264
+f 4250 4249 4262
+f 4248 4247 4260
+f 4259 4258 4271
+f 4257 4256 4269
+f 4267 4280 4281
+f 4265 4278 4279
+f 4263 4276 4277
+f 4262 4261 4274
+f 4259 4272 4273
+f 4257 4270 4271
+f 4266 4279 4280
+f 4265 4264 4277
+f 4263 4262 4275
+f 4260 4273 4274
+f 4271 4284 4285
+f 4269 4282 4283
+f 4281 4280 4293
+f 4279 4278 4291
+f 4277 4276 4289
+f 4274 4287 4288
+f 4273 4272 4285
+f 4271 4270 4283
+f 4280 4279 4292
+f 4277 4290 4291
+f 4275 4288 4289
+f 4274 4273 4286
+f 4284 4297 4298
+f 4282 4295 4296
+f 4294 4293 4306
+f 4291 4304 4305
+f 4290 4289 4302
+f 4287 4300 4301
+f 4286 4285 4298
+f 4284 4283 4296
+f 4293 4292 4305
+f 4290 4303 4304
+f 4288 4301 4302
+f 4287 4286 4299
+f 4297 4310 4311
+f 4296 4295 4308
+f 4306 4319 4320
+f 4304 4317 4318
+f 4303 4302 4315
+f 4300 4313 4314
+f 4298 4311 4312
+f 4297 4296 4309
+f 4305 4318 4319
+f 4304 4303 4316
+f 4302 4301 4314
+f 4300 4299 4312
+f 4310 4323 4324
+f 4308 4321 4322
+f 4320 4319 4332
+f 4318 4317 4330
+f 4316 4315 4328
+f 4313 4326 4327
+f 4311 4324 4325
+f 4310 4309 4322
+f 4318 4331 4332
+f 4316 4329 4330
+f 4315 4314 4327
+f 4312 4325 4326
+f 4323 4336 4337
+f 4322 4321 4334
+f 4332 4345 4346
+f 4330 4343 4344
+f 4329 4328 4341
+f 4327 4326 4339
+f 4325 4324 4337
+f 4322 4335 4336
+f 4332 4331 4344
+f 4330 4329 4342
+f 4327 4340 4341
+f 4325 4338 4339
+f 4337 4336 4350
+f 4334 4348 4349
+f 4346 4345 4359
+f 4344 4343 4357
+f 4341 4355 4356
+f 4340 4339 4353
+f 4338 4337 4351
+f 4335 4349 4350
+f 4345 4344 4358
+f 4343 4342 4356
+f 4340 4354 4355
+f 4338 4352 4353
+f 4351 4350 4363
+f 4349 4348 4361
+f 4359 4372 4373
+f 4358 4357 4370
+f 4355 4368 4369
+f 4354 4353 4366
+f 4351 4364 4365
+f 4349 4362 4363
+f 4358 4371 4372
+f 4357 4356 4369
+f 4355 4354 4367
+f 4352 4365 4366
+f 4364 4363 4376
+f 4362 4361 4374
+f 4373 4372 4385
+f 4370 4383 4384
+f 4368 4381 4382
+f 4367 4366 4379
+f 4364 4377 4378
+f 4362 4375 4376
+f 4372 4371 4384
+f 4370 4369 4382
+f 4368 4367 4380
+f 4365 4378 4379
+f 4376 4389 4390
+f 4374 4387 4388
+f 4385 4398 4399
+f 4384 4383 4396
+f 4382 4381 4394
+f 4379 4392 4393
+f 4378 4377 4390
+f 4376 4375 4388
+f 4384 4397 4398
+f 4382 4395 4396
+f 4380 4393 4394
+f 4379 4378 4391
+f 4389 4402 4403
+f 4387 4400 4401
+f 4399 4398 4411
+f 4396 4409 4410
+f 4395 4394 4407
+f 4393 4392 4405
+f 4391 4390 4403
+f 4389 4388 4401
+f 4398 4397 4410
+f 4395 4408 4409
+f 4393 4406 4407
+f 4391 4404 4405
+f 4402 4415 4416
+f 4401 4400 4413
+f 4411 4424 4425
+f 4409 4422 4423
+f 4408 4407 4420
+f 4406 4405 4418
+f 4403 4416 4417
+f 4402 4401 4414
+f 4410 4423 4424
+f 4408 4421 4422
+f 4407 4406 4419
+f 4404 4417 4418
+f 4416 4415 4428
+f 4413 4426 4427
+f 4425 4424 4437
+f 4423 4422 4435
+f 4421 4420 4433
+f 4418 4431 4432
+f 4416 4429 4430
+f 4415 4414 4427
+f 4423 4436 4437
+f 4421 4434 4435
+f 4420 4419 4432
+f 4418 4417 4430
+f 4428 4441 4442
+f 4426 4439 4440
+f 4437 4450 4451
+f 4435 4448 4449
+f 4434 4433 4446
+f 4432 4431 4444
+f 4430 4429 4442
+f 4427 4440 4441
+f 4437 4436 4449
+f 4435 4434 4447
+f 4432 4445 4446
+f 4430 4443 4444
+f 4442 4441 4454
+f 4439 4452 4453
+f 4451 4450 4463
+f 4449 4448 4461
+f 4446 4459 4460
+f 4444 4457 4458
+f 4443 4442 4455
+f 4440 4453 4454
+f 4450 4449 4462
+f 4447 4460 4461
+f 4445 4458 4459
+f 4444 4443 4456
+f 4455 4454 4467
+f 4453 4452 4465
+f 4463 4476 4477
+f 4462 4461 4474
+f 4459 4472 4473
+f 4457 4470 4471
+f 4455 4468 4469
+f 4453 4466 4467
+f 4462 4475 4476
+f 4461 4460 4473
+f 4459 4458 4471
+f 4457 4456 4469
+f 4468 4467 4063
+f 4466 4465 4061
+f 4477 4476 4072
+f 4474 4070 4071
+f 4472 4068 4069
+f 4471 4470 4066
+f 4468 4064 4065
+f 4466 4062 4063
+f 4476 4475 4071
+f 4474 4473 4069
+f 4472 4471 4067
+f 4469 4065 4066
+f 3935 4061 4465
+f 4465 4452 4056
+f 4452 4439 4051
+f 4439 4426 4047
+f 4047 4426 4413
+f 4043 4413 4400
+f 4039 4400 4387
+f 4387 4374 4031
+f 4031 4374 4361
+f 4361 4348 4023
+f 4348 4334 4019
+f 4334 4321 4015
+f 4015 4321 4308
+f 4011 4308 4295
+f 4007 4295 4282
+f 4282 4269 3999
+f 3999 4269 4256
+f 4256 4243 3991
+f 4243 4230 3987
+f 4230 4217 3983
+f 3983 4217 4204
+f 3979 4204 4191
+f 3975 4191 4178
+f 4178 4165 3967
+f 3967 4165 4152
+f 4152 4139 3959
+f 4139 4126 3955
+f 4126 4113 3951
+f 3951 4113 4100
+f 3947 4100 4087
+f 3943 4087 4074
+f 4074 4061 3935
+f 4480 4479 4483
+f 4480 4484 4485
+f 4479 4478 4482
+f 4484 4488 4489
+f 4482 4486 4487
+f 4483 4487 4488
+f 4488 4492 4493
+f 4487 4486 4490
+f 4488 4487 4491
+f 4493 4492 4496
+f 4490 4494 4495
+f 4491 4495 4496
+f 4496 4500 4501
+f 4495 4494 4498
+f 4496 4495 4499
+f 4501 4500 4504
+f 4498 4502 4503
+f 4499 4503 4504
+f 4505 4504 4508
+f 4503 4502 4506
+f 4504 4503 4507
+f 4509 4508 4512
+f 4506 4510 4511
+f 4507 4511 4512
+f 4513 4512 4516
+f 4511 4510 4514
+f 4512 4511 4515
+f 4516 4520 4521
+f 4514 4518 4519
+f 4515 4519 4520
+f 4520 4524 4525
+f 4518 4522 4523
+f 4520 4519 4523
+f 4525 4524 4528
+f 4522 4526 4527
+f 4523 4527 4528
+f 4528 4532 4533
+f 4527 4526 4530
+f 4528 4527 4531
+f 4533 4532 4536
+f 4530 4534 4535
+f 4531 4535 4536
+f 4537 4536 4540
+f 4534 4538 4539
+f 4536 4535 4539
+f 4541 4540 4544
+f 4539 4538 4542
+f 4539 4543 4544
+f 4544 4548 4549
+f 4542 4546 4547
+f 4544 4543 4547
+f 4548 4552 4553
+f 4547 4546 4550
+f 4547 4551 4552
+f 4552 4556 4557
+f 4551 4550 4554
+f 4552 4551 4555
+f 4557 4556 4560
+f 4554 4558 4559
+f 4555 4559 4560
+f 4560 4564 4565
+f 4559 4558 4562
+f 4560 4559 4563
+f 4565 4564 4568
+f 4562 4566 4567
+f 4563 4567 4568
+f 4569 4568 4572
+f 4567 4566 4570
+f 4568 4567 4571
+f 4572 4576 4577
+f 4570 4574 4575
+f 4571 4575 4576
+f 4576 4580 4581
+f 4575 4574 4578
+f 4576 4575 4579
+f 4580 4584 4585
+f 4578 4582 4583
+f 4579 4583 4584
+f 4584 4588 4589
+f 4583 4582 4586
+f 4584 4583 4587
+f 4589 4588 4592
+f 4586 4590 4591
+f 4587 4591 4592
+f 4592 4596 4597
+f 4591 4590 4594
+f 4592 4591 4595
+f 4597 4596 4601
+f 4594 4599 4600
+f 4595 4600 4601
+f 4602 4601 4605
+f 4600 4599 4603
+f 4601 4600 4604
+f 4606 4605 4480
+f 4603 4478 4479
+f 4604 4479 4480
+f 4615 4614 4627
+f 4612 4625 4626
+f 4611 4610 4623
+f 4609 4608 4621
+f 4618 4617 4630
+f 4615 4628 4629
+f 4613 4626 4627
+f 4612 4611 4624
+f 4609 4622 4623
+f 4607 4620 4621
+f 4619 4618 4631
+f 4617 4616 4629
+f 4622 4635 4636
+f 4620 4633 4634
+f 4632 4631 4644
+f 4629 4642 4643
+f 4628 4627 4640
+f 4626 4625 4638
+f 4624 4623 4636
+f 4622 4621 4634
+f 4631 4630 4643
+f 4628 4641 4642
+f 4626 4639 4640
+f 4624 4637 4638
+f 4635 4648 4649
+f 4634 4633 4646
+f 4644 4657 4658
+f 4642 4655 4656
+f 4641 4640 4653
+f 4639 4638 4651
+f 4636 4649 4650
+f 4635 4634 4647
+f 4643 4656 4657
+f 4642 4641 4654
+f 4640 4639 4652
+f 4637 4650 4651
+f 4648 4661 4662
+f 4646 4659 4660
+f 4658 4657 4670
+f 4656 4655 4668
+f 4653 4666 4667
+f 4651 4664 4665
+f 4649 4662 4663
+f 4648 4647 4660
+f 4656 4669 4670
+f 4654 4667 4668
+f 4653 4652 4665
+f 4651 4650 4663
+f 4661 4674 4675
+f 4660 4659 4672
+f 4670 4683 4684
+f 4668 4681 4682
+f 4667 4666 4679
+f 4665 4664 4677
+f 4663 4662 4675
+f 4661 4660 4673
+f 4670 4669 4682
+f 4667 4680 4681
+f 4665 4678 4679
+f 4663 4676 4677
+f 4675 4674 4687
+f 4672 4685 4686
+f 4684 4683 4696
+f 4682 4681 4694
+f 4679 4692 4693
+f 4678 4677 4690
+f 4676 4675 4688
+f 4673 4686 4687
+f 4683 4682 4695
+f 4681 4680 4693
+f 4678 4691 4692
+f 4676 4689 4690
+f 4688 4687 4700
+f 4686 4685 4698
+f 4696 4709 4710
+f 4695 4694 4707
+f 4692 4705 4706
+f 4691 4690 4703
+f 4688 4701 4702
+f 4686 4699 4700
+f 4695 4708 4709
+f 4694 4693 4706
+f 4692 4691 4704
+f 4689 4702 4703
+f 4701 4700 4713
+f 4699 4698 4711
+f 4710 4709 4722
+f 4707 4720 4721
+f 4705 4718 4719
+f 4704 4703 4716
+f 4701 4714 4715
+f 4699 4712 4713
+f 4709 4708 4721
+f 4707 4706 4719
+f 4705 4704 4717
+f 4702 4715 4716
+f 4713 4726 4727
+f 4711 4724 4725
+f 4723 4722 4735
+f 4721 4720 4733
+f 4719 4718 4731
+f 4716 4729 4730
+f 4715 4714 4727
+f 4713 4712 4725
+f 4721 4734 4735
+f 4719 4732 4733
+f 4717 4730 4731
+f 4716 4715 4728
+f 4726 4739 4740
+f 4724 4737 4738
+f 4736 4735 4748
+f 4733 4746 4747
+f 4732 4731 4744
+f 4730 4729 4742
+f 4728 4727 4740
+f 4726 4725 4738
+f 4735 4734 4747
+f 4732 4745 4746
+f 4730 4743 4744
+f 4728 4741 4742
+f 4739 4752 4753
+f 4738 4737 4750
+f 4748 4761 4762
+f 4746 4759 4760
+f 4745 4744 4757
+f 4743 4742 4755
+f 4740 4753 4754
+f 4739 4738 4751
+f 4747 4760 4761
+f 4745 4758 4759
+f 4744 4743 4756
+f 4741 4754 4755
+f 4753 4752 4765
+f 4750 4763 4764
+f 4762 4761 4774
+f 4759 4772 4773
+f 4758 4757 4770
+f 4755 4768 4769
+f 4753 4766 4767
+f 4752 4751 4764
+f 4760 4773 4774
+f 4758 4771 4772
+f 4757 4756 4769
+f 4755 4754 4767
+f 4765 4778 4779
+f 4764 4763 4776
+f 4774 4787 4788
+f 4772 4785 4786
+f 4771 4770 4783
+f 4769 4768 4781
+f 4767 4766 4779
+f 4765 4764 4777
+f 4774 4773 4786
+f 4771 4784 4785
+f 4769 4782 4783
+f 4767 4780 4781
+f 4779 4778 4791
+f 4776 4789 4790
+f 4788 4787 4800
+f 4786 4785 4798
+f 4783 4796 4797
+f 4782 4781 4794
+f 4780 4779 4792
+f 4777 4790 4791
+f 4787 4786 4799
+f 4785 4784 4797
+f 4782 4795 4796
+f 4780 4793 4794
+f 4792 4791 4804
+f 4790 4789 4802
+f 4800 4813 4814
+f 4799 4798 4811
+f 4796 4809 4810
+f 4795 4794 4807
+f 4792 4805 4806
+f 4790 4803 4804
+f 4799 4812 4813
+f 4798 4797 4810
+f 4796 4795 4808
+f 4793 4806 4807
+f 4805 4804 4817
+f 4803 4802 4815
+f 4814 4813 4826
+f 4811 4824 4825
+f 4809 4822 4823
+f 4808 4807 4820
+f 4805 4818 4819
+f 4803 4816 4817
+f 4813 4812 4825
+f 4811 4810 4823
+f 4809 4808 4821
+f 4806 4819 4820
+f 4817 4830 4831
+f 4815 4828 4829
+f 4826 4839 4840
+f 4825 4824 4837
+f 4823 4822 4835
+f 4820 4833 4834
+f 4819 4818 4831
+f 4817 4816 4829
+f 4825 4838 4839
+f 4823 4836 4837
+f 4821 4834 4835
+f 4820 4819 4832
+f 4830 4843 4844
+f 4828 4841 4842
+f 4840 4839 4852
+f 4837 4850 4851
+f 4836 4835 4848
+f 4834 4833 4846
+f 4832 4831 4844
+f 4830 4829 4842
+f 4839 4838 4851
+f 4836 4849 4850
+f 4834 4847 4848
+f 4832 4845 4846
+f 4843 4856 4857
+f 4842 4841 4854
+f 4852 4865 4866
+f 4850 4863 4864
+f 4849 4848 4861
+f 4847 4846 4859
+f 4844 4857 4858
+f 4843 4842 4855
+f 4851 4864 4865
+f 4849 4862 4863
+f 4848 4847 4860
+f 4845 4858 4859
+f 4857 4856 4869
+f 4855 4854 4867
+f 4866 4865 4878
+f 4864 4863 4876
+f 4862 4861 4874
+f 4859 4872 4873
+f 4857 4870 4871
+f 4856 4855 4868
+f 4864 4877 4878
+f 4862 4875 4876
+f 4861 4860 4873
+f 4859 4858 4871
+f 4869 4882 4883
+f 4868 4867 4880
+f 4878 4891 4892
+f 4877 4876 4889
+f 4874 4887 4888
+f 4873 4872 4885
+f 4871 4870 4883
+f 4868 4881 4882
+f 4878 4877 4890
+f 4876 4875 4888
+f 4873 4886 4887
+f 4871 4884 4885
+f 4883 4882 4896
+f 4880 4894 4895
+f 4892 4891 4905
+f 4890 4889 4903
+f 4887 4901 4902
+f 4885 4899 4900
+f 4884 4883 4897
+f 4881 4895 4896
+f 4891 4890 4904
+f 4889 4888 4902
+f 4886 4900 4901
+f 4885 4884 4898
+f 4897 4896 4909
+f 4895 4894 4907
+f 4905 4918 4919
+f 4904 4903 4916
+f 4901 4914 4915
+f 4899 4912 4913
+f 4897 4910 4911
+f 4895 4908 4909
+f 4904 4917 4918
+f 4903 4902 4915
+f 4901 4900 4913
+f 4899 4898 4911
+f 4910 4909 4922
+f 4908 4907 4920
+f 4919 4918 4931
+f 4916 4929 4930
+f 4914 4927 4928
+f 4913 4912 4925
+f 4910 4923 4924
+f 4908 4921 4922
+f 4918 4917 4930
+f 4916 4915 4928
+f 4914 4913 4926
+f 4911 4924 4925
+f 4922 4935 4936
+f 4920 4933 4934
+f 4931 4944 4945
+f 4930 4929 4942
+f 4928 4927 4940
+f 4925 4938 4939
+f 4924 4923 4936
+f 4922 4921 4934
+f 4930 4943 4944
+f 4928 4941 4942
+f 4926 4939 4940
+f 4925 4924 4937
+f 4935 4948 4949
+f 4933 4946 4947
+f 4945 4944 4957
+f 4942 4955 4956
+f 4941 4940 4953
+f 4938 4951 4952
+f 4937 4936 4949
+f 4935 4934 4947
+f 4944 4943 4956
+f 4941 4954 4955
+f 4939 4952 4953
+f 4938 4937 4950
+f 4948 4961 4962
+f 4947 4946 4959
+f 4957 4970 4971
+f 4955 4968 4969
+f 4954 4953 4966
+f 4951 4964 4965
+f 4949 4962 4963
+f 4948 4947 4960
+f 4956 4969 4970
+f 4954 4967 4968
+f 4953 4952 4965
+f 4951 4950 4963
+f 4962 4961 4974
+f 4959 4972 4973
+f 4971 4970 4983
+f 4968 4981 4982
+f 4966 4979 4980
+f 4964 4977 4978
+f 4962 4975 4976
+f 4960 4973 4974
+f 4969 4982 4983
+f 4967 4980 4981
+f 4966 4965 4978
+f 4964 4963 4976
+f 4975 4974 4987
+f 4973 4972 4985
+f 4983 4996 4997
+f 4981 4994 4995
+f 4979 4992 4993
+f 4978 4977 4990
+f 4976 4975 4988
+f 4973 4986 4987
+f 4983 4982 4995
+f 4981 4980 4993
+f 4978 4991 4992
+f 4976 4989 4990
+f 4988 4987 5000
+f 4985 4998 4999
+f 4997 4996 5009
+f 4995 4994 5007
+f 4992 5005 5006
+f 4991 4990 5003
+f 4989 4988 5001
+f 4986 4999 5000
+f 4996 4995 5008
+f 4994 4993 5006
+f 4991 5004 5005
+f 4989 5002 5003
+f 5001 5000 5013
+f 4999 4998 5011
+f 5009 5022 5023
+f 5008 5007 5020
+f 5005 5018 5019
+f 5004 5003 5016
+f 5001 5014 5015
+f 4999 5012 5013
+f 5008 5021 5022
+f 5007 5006 5019
+f 5005 5004 5017
+f 5002 5015 5016
+f 5014 5013 4609
+f 5012 5011 4607
+f 5022 4618 4619
+f 5020 4616 4617
+f 5018 4614 4615
+f 5017 5016 4612
+f 5014 4610 4611
+f 5012 4608 4609
+f 5021 4617 4618
+f 5020 5019 4615
+f 5018 5017 4613
+f 5015 4611 4612
+f 4481 4607 5011
+f 5011 4998 4602
+f 4998 4985 4597
+f 4985 4972 4593
+f 4593 4972 4959
+f 4589 4959 4946
+f 4585 4946 4933
+f 4933 4920 4577
+f 4577 4920 4907
+f 4907 4894 4569
+f 4894 4880 4565
+f 4880 4867 4561
+f 4561 4867 4854
+f 4557 4854 4841
+f 4553 4841 4828
+f 4828 4815 4545
+f 4545 4815 4802
+f 4802 4789 4537
+f 4789 4776 4533
+f 4776 4763 4529
+f 4529 4763 4750
+f 4525 4750 4737
+f 4521 4737 4724
+f 4724 4711 4513
+f 4513 4711 4698
+f 4698 4685 4505
+f 4685 4672 4501
+f 4672 4659 4497
+f 4497 4659 4646
+f 4493 4646 4633
+f 4489 4633 4620
+f 4620 4607 4481
\ No newline at end of file
diff --git a/src/pixel3dmm/preprocessing/MICA/data/FLAME2020/landmark_embedding.npy b/src/pixel3dmm/preprocessing/MICA/data/FLAME2020/landmark_embedding.npy
new file mode 100644
index 0000000000000000000000000000000000000000..046c64f7e638b9a4b9a86ffaa0960ac79cad4d0d
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/data/FLAME2020/landmark_embedding.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8095348eeafce5a02f6bd8765146307f9567a3f03b316d788a2e47336d667954
+size 31292
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/README.md b/src/pixel3dmm/preprocessing/MICA/datasets/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..5ece115b6837da2d211007fa4a721689dcbdaa57
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/README.md
@@ -0,0 +1,128 @@
+MICA - Dataset
+
+The MICA dataset consists of eight smaller datasets for about 2315 subjects, built by unifying existing small- and medium-scale datasets under a common FLAME topology. It consists of shape geometry only, therefore, to obtain images for each subject please refer to the primary dataset.
+
+This dataset contains registration meshes together with corresponding fitted FLAME parameters. Actors are split between individual folders with a unique identifier based on the original dataset. The folder name of the parameters and mesh is the same as the ones in the analogous dataset with images.
+
+To obtain the dataset please follow each link separately and request the given subset.
+
+In the case of any questions feel free to email us.
+
+
+
+Each subset zip file has the following structure:
+```shell
+root\
+ FLAME_parameters\
+ actor_id\
+ *.npz
+ registrations\
+ actor_id\
+ *.obj
+```
+
+To retrieve FLAME2020 parameters you can simply do:
+```python
+import numpy as np
+import torch
+
+params = np.load('path.npz', allow_pickle=True)
+pose = torch.tensor(params['pose']).float()
+betas = torch.tensor(params['betas']).float()
+
+flame = {
+ 'shape_params': betas[:300],
+ 'expression_params': betas[300:],
+ 'pose_params': torch.cat([pose[:3], pose[6:9]]),
+}
+```
+
+### MICA Training Dataset Preparation
+
+To prepare the MICA training dataset you can follow the scripts from the [creation](https://github.com/Zielon/MICA/tree/master/datasets/creation) folder. Additionally, the complete list of images used for the training can be found in [image_paths](https://github.com/Zielon/MICA/tree/master/datasets/image_paths) folder. It contains the name of the FLAME parameters file `.npz` and a list of all images used for the training with their original name stored as dictionary.
+
+### Licence
+This dataset is for academic, non-commercial usage only. Moreover, it is an extension of already existing datasets, therefore, the license is shared and applies equivalently to both, the original and the corresponding derived one. Please read the original license of each original dataset for more information, especially in the context of data privacy.
+
+### Additional Information
+
+Please note that some subsets do not have an identical amount of subjects due to registration errors.
+
+#### BP4D+ Subset
+
+The original dataset contains 140 subjects from where we selected scans in neutral pose and successfully registered 127 FLAME meshes for them.
+
+#### FRGC Subset
+
+1) A group desiring to obtain FLAME results on the FRGC data must license the FRGC 2.0 data set from CVRL using the licensing procedure at the website: http://cvrl.nd.edu .
+2) Once the license form is completed and CVRL has approved it, an email will be sent from Globus indicating that you have been authorized to retrieve FRGC 2.0.
+3) That authorization will serve as authorization to receive any derivative work, therefore, please send a confirmation to mica [AT] tue.mpg.de address to receive the MICA dataset.
+
+### Citation
+If you use this dataset in your research please cite MICA:
+```bibtex
+@proceedings{MICA:ECCV2022,
+ author = {Zielonka, Wojciech and Bolkart, Timo and Thies, Justus},
+ title = {Towards Metrical Reconstruction of Human Faces},
+ journal = {European Conference on Computer Vision},
+ year = {2022}
+}
+```
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/__init__.py b/src/pixel3dmm/preprocessing/MICA/datasets/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..1a38c66a8eb9b2f1229a1a7aeb1d4f780bb58023
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/__init__.py
@@ -0,0 +1,55 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import numpy as np
+from torch.utils.data import ConcatDataset
+
+from datasets.base import BaseDataset
+
+
+def build_train(config, device):
+ data_list = []
+ total_images = 0
+ for dataset in config.training_data:
+ dataset_name = dataset.upper()
+ config.n_train = np.Inf
+ if type(dataset) is list:
+ dataset_name, n_train = dataset
+ config.n_train = n_train
+
+ dataset = BaseDataset(name=dataset_name, config=config, device=device, isEval=False)
+ data_list.append(dataset)
+ total_images += dataset.total_images
+
+ return ConcatDataset(data_list), total_images
+
+
+def build_val(config, device):
+ data_list = []
+ total_images = 0
+ for dataset in config.eval_data:
+ dataset_name = dataset.upper()
+ config.n_train = np.Inf
+ if type(dataset) is list:
+ dataset_name, n_train = dataset
+ config.n_train = n_train
+
+ dataset = BaseDataset(name=dataset_name, config=config, device=device, isEval=True)
+ data_list.append(dataset)
+ total_images += dataset.total_images
+
+ return ConcatDataset(data_list), total_images
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/base.py b/src/pixel3dmm/preprocessing/MICA/datasets/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..8d2200dc4249770596c7b7c5ee27305cad1fdd1f
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/base.py
@@ -0,0 +1,125 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import os
+import re
+from abc import ABC
+from functools import reduce
+from pathlib import Path
+
+import loguru
+import numpy as np
+import torch
+from loguru import logger
+from skimage.io import imread
+from torch.utils.data import Dataset
+from torchvision import transforms
+
+
+class BaseDataset(Dataset, ABC):
+ def __init__(self, name, config, device, isEval):
+ self.K = config.K
+ self.isEval = isEval
+ self.n_train = np.Inf
+ self.imagepaths = []
+ self.face_dict = {}
+ self.name = name
+ self.device = device
+ self.min_max_K = 0
+ self.cluster = False
+ self.dataset_root = config.root
+ self.total_images = 0
+ self.image_folder = 'arcface_input'
+ self.flame_folder = 'FLAME_parameters'
+ self.initialize()
+
+ def initialize(self):
+ logger.info(f'[{self.name}] Initialization')
+ image_list = f'{os.path.abspath(os.path.dirname(__file__))}/image_paths/{self.name}.npy'
+ logger.info(f'[{self.name}] Load cached file list: ' + image_list)
+ self.face_dict = np.load(image_list, allow_pickle=True).item()
+ self.imagepaths = list(self.face_dict.keys())
+ logger.info(f'[Dataset {self.name}] Total {len(self.imagepaths)} actors loaded!')
+ self.set_smallest_k()
+
+ def set_smallest_k(self):
+ self.min_max_K = np.Inf
+ max_min_k = -np.Inf
+ for key in self.face_dict.keys():
+ length = len(self.face_dict[key][0])
+ if length < self.min_max_K:
+ self.min_max_K = length
+ if length > max_min_k:
+ max_min_k = length
+
+ self.total_images = reduce(lambda k, l: l + k, map(lambda e: len(self.face_dict[e][0]), self.imagepaths))
+ loguru.logger.info(f'Dataset {self.name} with min K = {self.min_max_K} max K = {max_min_k} length = {len(self.face_dict)} total images = {self.total_images}')
+ return self.min_max_K
+
+ def compose_transforms(self, *args):
+ self.transforms = transforms.Compose([t for t in args])
+
+ def get_arcface_path(self, image_path):
+ return re.sub('png|jpg', 'npy', str(image_path))
+
+ def __len__(self):
+ return len(self.imagepaths)
+
+ def __getitem__(self, index):
+ actor = self.imagepaths[index]
+ images, params_path = self.face_dict[actor]
+ images = [Path(self.dataset_root, self.name, self.image_folder, path) for path in images]
+ sample_list = np.array(np.random.choice(range(len(images)), size=self.K, replace=False))
+
+ K = self.K
+ if self.isEval:
+ K = max(0, min(200, self.min_max_K))
+ sample_list = np.array(range(len(images))[:K])
+
+ params = np.load(os.path.join(self.dataset_root, self.name, self.flame_folder, params_path), allow_pickle=True)
+ pose = torch.tensor(params['pose']).float()
+ betas = torch.tensor(params['betas']).float()
+
+ flame = {
+ 'shape_params': torch.cat(K * [betas[:300][None]], dim=0),
+ 'expression_params': torch.cat(K * [betas[300:][None]], dim=0),
+ 'pose_params': torch.cat(K * [torch.cat([pose[:3], pose[6:9]])[None]], dim=0),
+ }
+
+ images_list = []
+ arcface_list = []
+
+ for i in sample_list:
+ image_path = images[i]
+ image = np.array(imread(image_path))
+ image = image / 255.
+ image = image.transpose(2, 0, 1)
+ arcface_image = np.load(self.get_arcface_path(image_path), allow_pickle=True)
+
+ images_list.append(image)
+ arcface_list.append(torch.tensor(arcface_image))
+
+ images_array = torch.from_numpy(np.array(images_list)).float()
+ arcface_array = torch.stack(arcface_list).float()
+
+ return {
+ 'image': images_array,
+ 'arcface': arcface_array,
+ 'imagename': actor,
+ 'dataset': self.name,
+ 'flame': flame,
+ }
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/creation/__init__.py b/src/pixel3dmm/preprocessing/MICA/datasets/creation/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/creation/generator.py b/src/pixel3dmm/preprocessing/MICA/datasets/creation/generator.py
new file mode 100644
index 0000000000000000000000000000000000000000..225800dc076b5c2dc49632d486b54da850bcaa31
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/creation/generator.py
@@ -0,0 +1,98 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import os
+from glob import glob
+from multiprocessing import Pool
+from pathlib import Path
+from typing import List
+
+import cv2
+import numpy as np
+from insightface.app import FaceAnalysis
+from insightface.app.common import Face
+from insightface.utils import face_align
+from loguru import logger
+from tqdm import tqdm
+
+from datasets.creation.instances.instance import Instance
+from datasets.creation.util import get_image, get_center, get_arcface_input
+
+
+def _transfer(src, dst):
+ src.parent.mkdir(parents=True, exist_ok=True)
+ dst.parent.mkdir(parents=True, exist_ok=True)
+ os.system(f'cp {str(src)} {str(dst)}')
+
+
+def _copy(payload):
+ instance, func, target, transform_path = payload
+ files = func()
+ for actor in files.keys():
+ for file in files[actor]:
+ _transfer(Path(file), Path(instance.get_dst(), target, actor, transform_path(file)))
+
+
+class Generator:
+ def __init__(self, instances):
+ self.instances: List[Instance] = instances
+ self.ARCFACE = 'arcface_input'
+
+ def copy(self):
+ logger.info('Start copying...')
+ for instance in tqdm(self.instances):
+ payloads = [(instance, instance.get_images, 'images', instance.transform_path)]
+ with Pool(processes=len(payloads)) as pool:
+ for _ in tqdm(pool.imap_unordered(_copy, payloads), total=len(payloads)):
+ pass
+
+ def preprocess(self):
+ logger.info('Start preprocessing...')
+ for instance in tqdm(self.instances):
+ instance.preprocess()
+
+ def arcface(self):
+ app = FaceAnalysis(name='antelopev2', providers=['CUDAExecutionProvider'])
+ app.prepare(ctx_id=0, det_size=(224, 224))
+
+ logger.info('Start arcface...')
+ for instance in tqdm(self.instances):
+ src = instance.get_dst()
+ for image_path in tqdm(sorted(glob(f'{src}/images/*/*'))):
+ dst = image_path.replace('images', self.ARCFACE)
+ Path(dst).parent.mkdir(exist_ok=True, parents=True)
+ for img in instance.transform_image(get_image(image_path[0:-4])):
+ bboxes, kpss = app.det_model.detect(img, max_num=0, metric='default')
+ if bboxes.shape[0] == 0:
+ continue
+ i = get_center(bboxes, img)
+ bbox = bboxes[i, 0:4]
+ det_score = bboxes[i, 4]
+ if det_score < instance.get_min_det_score():
+ continue
+ kps = None
+ if kpss is not None:
+ kps = kpss[i]
+ face = Face(bbox=bbox, kps=kps, det_score=det_score)
+ blob, aimg = get_arcface_input(face, img)
+ np.save(dst[0:-4], blob)
+ cv2.imwrite(dst, face_align.norm_crop(img, landmark=face.kps, image_size=224))
+
+ def run(self):
+ self.copy()
+ self.preprocess()
+ self.arcface()
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/__init__.py b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/bu3dfe.py b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/bu3dfe.py
new file mode 100644
index 0000000000000000000000000000000000000000..c704bf21242e22eb9d6193fe63e947a43b0137b7
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/bu3dfe.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+from abc import ABC
+from glob import glob
+from pathlib import Path
+
+from pytorch3d.io import load_objs_as_meshes
+
+from datasets.creation.instances.instance import Instance
+
+
+class BU3DFE(Instance, ABC):
+ def __init__(self):
+ super(BU3DFE, self).__init__()
+ self.dst = '/scratch/NFC/OnFlame/BU3DFE/'
+ self.src = '/scratch/NFC/BU-3DFE/'
+
+ def get_images(self):
+ images = {}
+ for actor in sorted(glob(self.get_src().replace('BU-3DFE', 'BU-3DFE_clean') + 'images/*')):
+ images[Path(actor).name] = glob(f'{actor}/*.jpg')
+
+ return images
+
+ def get_flame_params(self):
+ prams = {}
+ for actor in sorted(glob(self.get_src() + 'FLAME_parameters/iter2/*')):
+ prams[Path(actor).name] = glob(f'{actor}/*.npz')
+
+ return prams
+
+ def get_registrations(self):
+ registrations = {}
+ for actor in sorted(glob(self.get_src() + 'registrations/iter2/neutral_align/*')):
+ registrations[Path(actor).name] = glob(f'{actor}/*.obj')
+
+ return registrations
+
+ def get_meshes(self):
+ meshes = {}
+ files = sorted(glob(self.get_src() + 'raw_ne_data/*'))
+ actors = set(map(lambda f: Path(f).name[0:5], files))
+ for actor in actors:
+ meshes[Path(actor).name] = next(filter(lambda f: actor in f and 'obj' in f, files))
+
+ return meshes
+
+ def transform_mesh(self, path):
+ self.update_obj(path)
+ mesh = load_objs_as_meshes([path], device=self.device)
+ vertices = mesh._verts_list[0]
+ center = vertices.mean(0)
+ mesh._verts_list = [vertices - center]
+ mesh.scale_verts_(0.01)
+
+ return mesh.clone()
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/d3dfacs.py b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/d3dfacs.py
new file mode 100644
index 0000000000000000000000000000000000000000..9c219687e0f121abf9abbf7f50608d922da7ef16
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/d3dfacs.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+from abc import ABC
+from glob import glob
+from pathlib import Path
+
+from datasets.creation.instances.instance import Instance
+
+
+class D3DFACS(Instance, ABC):
+ def __init__(self):
+ super(D3DFACS, self).__init__()
+ self.dst = '/scratch/NFC/OnFlame/D3DFACS/'
+ self.src = '/home/wzielonka/datasets/D3DFACS/'
+
+ def get_images(self):
+ images = {}
+ for file in sorted(glob(self.get_src() + 'processed/images/*')):
+ actor = Path(file).stem
+ images[actor] = glob(f'{file}/*.jpg')
+
+ return images
+
+ def get_flame_params(self):
+ params = {}
+ for file in sorted(glob(self.get_src() + 'processed/FLAME/*.npz')):
+ actor = Path(file).stem
+ params[actor] = [file]
+
+ return params
+
+ def get_registrations(self):
+ registrations = {}
+ for file in sorted(glob(self.get_src() + 'processed/registrations/*')):
+ actor = Path(file).stem.split('_')[0]
+ registrations[actor] = [file]
+
+ return registrations
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/facewarehouse.py b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/facewarehouse.py
new file mode 100644
index 0000000000000000000000000000000000000000..f953b8371ef54f798da7c164003d9bdac5839f8b
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/facewarehouse.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+from abc import ABC
+from glob import glob
+from pathlib import Path
+
+from datasets.creation.instances.instance import Instance
+
+
+class FaceWarehouse(Instance, ABC):
+ def __init__(self):
+ super(FaceWarehouse, self).__init__()
+ self.dst = '/scratch/NFC/OnFlame/FACEWAREHOUSE/'
+ self.src = '/scratch/NFC/FaceWarehouse/'
+
+ def get_images(self):
+ images = {}
+ for actor in sorted(glob(self.get_src() + 'Images/*')):
+ images[Path(actor).stem] = glob(f'{actor}/*.png')
+
+ return images
+
+ def get_flame_params(self):
+ params = {}
+ for actor in sorted(glob(self.get_src() + 'FLAME_fits/*')):
+ params[Path(actor).stem] = [sorted(glob(f'{actor}/*.npz'))[0]]
+
+ return params
+
+ def get_registrations(self):
+ registrations = {}
+ for actor in sorted(glob(self.get_src() + 'FLAME_fits/*')):
+ registrations[Path(actor).stem] = [f'{actor}/tmp/pose_0__def_trafo_fit.obj']
+
+ return registrations
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/florence.py b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/florence.py
new file mode 100644
index 0000000000000000000000000000000000000000..7ae6e0d0a7592e09c21180b79ffb7090f117d703
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/florence.py
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+from abc import ABC
+from glob import glob
+from pathlib import Path
+
+import numpy as np
+
+from datasets.creation.instances.instance import Instance
+
+
+class Florence(Instance, ABC):
+ def __init__(self):
+ super(Florence, self).__init__()
+ self.dst = '/scratch/NFC/OnFlame/FLORENCE/'
+ self.src = '/scratch/NFC/MICC_Florence/'
+
+ def get_min_det_score(self):
+ return 0.85
+
+ def get_images(self):
+ images = {}
+ for actor in sorted(glob(self.get_src() + 'images/*')):
+ imgs = sorted(list(filter(lambda f: 'PTZ-Outdoor' not in f, glob(f'{actor}/*/*.jpg'))))
+ indecies = np.random.choice(len(imgs), 1000, replace=False)
+ images[Path(actor).stem] = [imgs[i] for i in indecies]
+
+ return images
+
+ def get_flame_params(self):
+ params = {}
+ for actor in sorted(glob(self.get_src() + 'FLAME_parameters/iter1/*')):
+ params[Path(actor).stem] = glob(f'{actor}/*.npz')
+
+ return params
+
+ def get_registrations(self):
+ registrations = {}
+ for actor in sorted(glob(self.get_src() + 'registrations/iter1/*')):
+ if 'rendering' in actor:
+ continue
+ registrations[Path(actor).stem] = glob(f'{actor}/*.obj')
+
+ return registrations
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/frgc.py b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/frgc.py
new file mode 100644
index 0000000000000000000000000000000000000000..9d4c42e8019c2805aa006f8640d4a68cf9af289a
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/frgc.py
@@ -0,0 +1,84 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+from abc import ABC
+from glob import glob
+from pathlib import Path
+
+import numpy as np
+from pytorch3d.io import load_objs_as_meshes
+
+from datasets.creation.instances.instance import Instance
+
+
+class FRGC(Instance, ABC):
+ def __init__(self):
+ super(FRGC, self).__init__()
+ self.dst = '/scratch/NFC/OnFlame/FRGC/'
+ self.src = '/scratch/NFC/FRGC_v2/'
+
+ def get_images(self):
+ images = {}
+ for actor in sorted(glob(self.get_src() + 'images/*')):
+ imgs = list(filter(lambda f: 'Spring2003range' not in f, glob(f'/{actor}/*/*.jpg')))
+ images[Path(actor).name] = imgs
+
+ return images
+
+ def get_flame_params(self):
+ prams = {}
+ for actor in sorted(glob(self.get_src() + 'FLAME_parameters/*')):
+ prams[Path(actor).name] = glob(f'/{actor}/*.npz')
+
+ return prams
+
+ def get_registrations(self):
+ registrations = {}
+ for actor in sorted(glob(self.get_src() + 'registrations/*')):
+ registrations[Path(actor).name] = glob(f'/{actor}/*.obj')
+
+ return registrations
+
+ def get_meshes(self):
+ meshes = {}
+ for file in sorted(glob(self.get_src() + 'registrations_tmp_new/*')):
+ meshes[Path(file).name] = glob(f'/{file}/*.obj')
+
+ sessions = np.load('/home/wzielonka/documents/scans_to_session.npy', allow_pickle=True)[()]
+ valid = []
+ for key in sessions.keys():
+ if 'Spring2003range' not in sessions[key]:
+ valid.append(key)
+
+ filtered = {}
+ for actor in meshes.keys():
+ files = meshes[actor]
+ selected = list(filter(lambda f: Path(f).stem in valid, files))
+ if len(selected) > 0:
+ filtered[actor] = selected
+
+ return filtered
+
+ def transform_mesh(self, path):
+ self.update_obj(path[0])
+ mesh = load_objs_as_meshes(path, device=self.device)
+ mesh.scale_verts_(10.0)
+ vertices = mesh._verts_list[0]
+ center = vertices.mean(0)
+ mesh._verts_list = [vertices - center]
+
+ return mesh.clone()
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/instance.py b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/instance.py
new file mode 100644
index 0000000000000000000000000000000000000000..f4fb30aa320c181767a50b46729a95d7cbb37e33
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/instance.py
@@ -0,0 +1,127 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import os
+from abc import abstractmethod
+from pathlib import Path
+
+from pytorch3d.transforms import RotateAxisAngle
+
+
+class Instance:
+ def __init__(self):
+ self.mount = '/home/wzielonka/Cluster/lustre'
+ self.dst = 'empty'
+ self.src = 'empty'
+ self.device = 'cuda:0'
+ self.actors = []
+ self.use_mount = os.path.exists(self.mount)
+
+ def get_dst(self):
+ return self.dst if not self.use_mount else self.mount + self.dst
+
+ def get_src(self):
+ return self.src if not self.use_mount else self.mount + self.src
+
+ @abstractmethod
+ def get_min_det_score(self):
+ return 0
+
+ @abstractmethod
+ def preprocess(self):
+ pass
+
+ @abstractmethod
+ def get_images(self):
+ return {}
+
+ @abstractmethod
+ def get_flame_params(self):
+ return {}
+
+ @abstractmethod
+ def get_registrations(self):
+ return {}
+
+ @abstractmethod
+ def get_meshes(self):
+ return {}
+
+ @abstractmethod
+ def transform_mesh(self, path):
+ return None
+
+ @abstractmethod
+ def transform_image(self, img):
+ return [img]
+
+ @abstractmethod
+ def transform_path(self, file):
+ return Path(file).name
+
+ @abstractmethod
+ def get_rotations(self):
+ rots = {}
+ degree = 2.5
+ step = int(15 / degree / 2)
+ X = range(-step, step + 1)
+ degree = 8.0
+ step = int(144 / degree / 2)
+ Y = range(-step, step + 1)
+ for a, angles in [('X', X), ('Y', Y)]:
+ r = []
+ for i in angles:
+ r.append((RotateAxisAngle(float(degree * i), axis=a, device=self.device), float(degree * i)))
+ rots[a] = r
+ return rots
+
+ @abstractmethod
+ def update_obj(self, path, fix_mtl=False):
+ mesh = Path(path).stem
+ with open(path, 'r') as file:
+ filedata = file.readlines()
+
+ input = []
+ for line in filedata:
+ if 'usemtl' in line or 'newmtl' in line:
+ continue
+ input.append(line)
+
+ output = []
+ for line in input:
+ if 'mtllib' in line:
+ mtl = line.split(' ')[-1].split('.')[0]
+ line += f'usemtl {mtl}\n'
+ output.append(line)
+ with open(path, 'w') as file:
+ file_lines = "".join(output)
+ file.write(file_lines)
+
+ if not fix_mtl:
+ return
+
+ with open(path.replace('obj', 'mtl'), 'r') as file:
+ filedata = file.readlines()
+
+ output = []
+ for line in filedata:
+ if 'newmtl' in line:
+ line = 'newmtl ' + mesh + '\n'
+ output.append(line)
+ with open(path.replace('obj', 'mtl'), 'w') as file:
+ file_lines = "".join(output)
+ file.write(file_lines)
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/lyhm.py b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/lyhm.py
new file mode 100644
index 0000000000000000000000000000000000000000..2f36a5da822ecbf1592f114943cbce159885ceb2
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/lyhm.py
@@ -0,0 +1,78 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+from abc import ABC
+from glob import glob
+from pathlib import Path
+
+from PIL import ImageFile
+
+ImageFile.LOAD_TRUNCATED_IMAGES = True
+from pytorch3d.io import load_objs_as_meshes
+from pytorch3d.transforms import RotateAxisAngle
+
+from datasets.creation.instances.instance import Instance
+
+
+class LYHM(Instance, ABC):
+ def __init__(self):
+ super(LYHM, self).__init__()
+ self.dst = '/scratch/NFC/MICA/LYHM/'
+ self.src = '/scratch/NFC/LYHM/'
+
+ def get_images(self):
+ images = {}
+ for actor in sorted(glob(self.get_src() + '/*')):
+ images[Path(actor).name] = glob(f'/{actor}/*.png')
+
+ return images
+
+ def get_flame_params(self):
+ prams = {}
+ for actor in sorted(glob(self.get_src() + '/*')):
+ prams[Path(actor).name] = glob(f'/{actor}/*.npz')
+
+ return prams
+
+ def get_registrations(self):
+ registrations = {}
+ for actor in sorted(glob(self.get_src() + '/*')):
+ all = glob(f'/{actor}/*.obj')
+ registrations[Path(actor).name] = list(filter(lambda m: 'model_fit' not in m, all))
+
+ return registrations
+
+ def get_meshes(self):
+ meshes = {}
+ for actor in sorted(glob(self.get_src() + '/*')):
+ meshes[Path(actor).name] = glob(f'/{actor}/scan/*.obj')
+
+ return meshes
+
+ def transform_mesh(self, path):
+ mesh = load_objs_as_meshes(path, device=self.device)
+ vertices = mesh._verts_list[0]
+ center = vertices.mean(0)
+ mesh._verts_list = [vertices - center]
+ mesh.scale_verts_(0.01)
+
+ rot = RotateAxisAngle(-45, axis='X', device=self.device)
+ mesh._verts_list = [rot.transform_points(mesh.verts_list()[0])]
+ rot = RotateAxisAngle(-45, axis='Y', device=self.device)
+ mesh._verts_list = [rot.transform_points(mesh.verts_list()[0])]
+
+ return mesh.clone()
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/pb4d.py b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/pb4d.py
new file mode 100644
index 0000000000000000000000000000000000000000..2897751edcc6553af931a05f140594b0a1726f14
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/pb4d.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+from abc import ABC
+from glob import glob
+from pathlib import Path
+
+import numpy as np
+from pytorch3d.io import load_objs_as_meshes
+
+from datasets.creation.instances.instance import Instance
+
+
+class PB4D(Instance, ABC):
+ def __init__(self):
+ super(PB4D, self).__init__()
+ self.dst = '/scratch/NFC/OnFlame/BP4D/'
+ self.src = '/scratch/NFC/BP4D/'
+
+ def get_images(self):
+ images = {}
+ for actor in sorted(glob(self.get_src() + 'images/*')):
+ imgs = sorted(glob(f'/{actor}/*.jpg'))
+ indecies = np.random.choice(len(imgs), 100, replace=False)
+ images[Path(actor).name] = [imgs[i] for i in indecies]
+
+ return images
+
+ def get_flame_params(self):
+ prams = {}
+ for file in sorted(glob(self.get_src() + 'FLAME_parameters/*.npz')):
+ prams[Path(file).stem] = [file]
+
+ return prams
+
+ def get_registrations(self):
+ registrations = {}
+ for file in sorted(glob(self.get_src() + 'registrations/*')):
+ registrations[Path(file).stem] = [file]
+
+ return registrations
+
+ def get_meshes(self):
+ meshes = {}
+ for file in sorted(glob(self.get_src() + 'scans/*.obj')):
+ meshes[Path(file).stem] = [file]
+
+ return meshes
+
+ def transform_mesh(self, path):
+ mesh = load_objs_as_meshes(path, device=self.device)
+ mesh.scale_verts_(0.01)
+ vertices = mesh._verts_list[0]
+ center = vertices.mean(0)
+ mesh._verts_list = [vertices - center]
+
+ return mesh.clone()
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/stirling.py b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/stirling.py
new file mode 100644
index 0000000000000000000000000000000000000000..e845b0da58c028c4bd8c2e5efa45e5fd34ad4460
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/creation/instances/stirling.py
@@ -0,0 +1,85 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+from abc import ABC
+from glob import glob
+from pathlib import Path
+
+from pytorch3d.io import load_objs_as_meshes
+
+from datasets.creation.instances.instance import Instance
+
+
+class Stirling(Instance, ABC):
+ def __init__(self):
+ super(Stirling, self).__init__()
+ self.dst = '/scratch/NFC/OnFlame/STIRLING/'
+ self.src = '/scratch/NFC/Stirling/'
+
+ def get_min_det_score(self):
+ return 0.75
+
+ def get_images(self):
+ images = {}
+ for file in sorted(glob(self.get_src() + 'images/Real_images__Subset_2D_FG2018/HQ/*')):
+ actor = Path(file).stem.split('_')[0].upper()
+ if actor not in images:
+ images[actor] = []
+ images[actor].append(file)
+
+ return images
+
+ def get_flame_params(self):
+ prams = {}
+ for file in sorted(glob(self.get_src() + 'FLAME_parameters/iter1/*/*.npz')):
+ actor = Path(file).stem[0:5].upper()
+ prams[Path(actor).name] = [file]
+
+ return prams
+
+ def get_registrations(self):
+ registrations = {}
+ for file in sorted(glob(self.get_src() + 'registrations/iter1/*/*')):
+ if 'obj' not in file:
+ continue
+ actor = Path(file).stem[0:5].upper()
+ registrations[Path(actor).name] = [file]
+
+ return registrations
+
+ def get_meshes(self):
+ meshes = {}
+ for file in sorted(glob(self.get_src() + 'scans/*/*.obj')):
+ actor = Path(file).stem[0:5].upper()
+ if 'obj' in file:
+ meshes[actor] = file
+
+ return meshes
+
+ def transform_mesh(self, path):
+ self.update_obj(path, fix_mtl=True)
+ mesh = load_objs_as_meshes([path], device=self.device)
+ vertices = mesh._verts_list[0]
+ center = vertices.mean(0)
+ mesh._verts_list = [vertices - center]
+ mesh.scale_verts_(0.01)
+
+ return mesh.clone()
+
+ def transform_path(self, file):
+ name = Path(file).name
+ return name
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/creation/main.py b/src/pixel3dmm/preprocessing/MICA/datasets/creation/main.py
new file mode 100644
index 0000000000000000000000000000000000000000..aa447e40ba3bb8847a7395908605521f64234d18
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/creation/main.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import numpy as np
+import torch
+
+from datasets.creation.generator import Generator
+from datasets.creation.instances.bu3dfe import BU3DFE
+from datasets.creation.instances.d3dfacs import D3DFACS
+from datasets.creation.instances.facewarehouse import FaceWarehouse
+from datasets.creation.instances.florence import Florence
+from datasets.creation.instances.frgc import FRGC
+from datasets.creation.instances.lyhm import LYHM
+from datasets.creation.instances.pb4d import PB4D
+from datasets.creation.instances.stirling import Stirling
+
+np.random.seed(42)
+
+if __name__ == '__main__':
+ torch.multiprocessing.set_start_method('spawn')
+
+ datasets = [FaceWarehouse(), LYHM(), D3DFACS(), FRGC(), Florence(), Stirling(), BU3DFE(), PB4D()]
+ generator = Generator([FaceWarehouse()])
+
+ generator.run()
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/creation/util.py b/src/pixel3dmm/preprocessing/MICA/datasets/creation/util.py
new file mode 100644
index 0000000000000000000000000000000000000000..c3612f100b443ae1d5032b6225bdc1e624a2651f
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/creation/util.py
@@ -0,0 +1,146 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import os
+import os.path as osp
+from pathlib import Path
+
+import cv2
+import numpy as np
+from insightface.utils import face_align
+from numpy.lib import math
+
+input_mean = 127.5
+input_std = 127.5
+
+
+def create_folders(folders):
+ if not type(folders) is list:
+ folders = folders.split('/')
+ parents = '/'
+ for folder in folders:
+ parents = os.path.join(parents, folder)
+ if os.path.exists(parents):
+ continue
+ Path(parents).mkdir(exist_ok=True)
+
+
+def get_arcface_input(face, img):
+ aimg = face_align.norm_crop(img, landmark=face.kps)
+ blob = cv2.dnn.blobFromImages([aimg], 1.0 / input_std, (112, 112), (input_mean, input_mean, input_mean), swapRB=True)
+ return blob[0], aimg
+
+
+def get_image(name, to_rgb=False):
+ images_dir = osp.join(Path(__file__).parent.absolute(), '../images')
+ ext_names = ['.jpg', '.png', '.jpeg']
+ image_file = None
+ for ext_name in ext_names:
+ _image_file = osp.join(images_dir, "%s%s" % (name, ext_name))
+ if osp.exists(_image_file):
+ image_file = _image_file
+ break
+ assert image_file is not None, '%s not found' % name
+ img = cv2.imread(image_file)
+ if to_rgb:
+ img = img[:, :, ::-1]
+ return img
+
+
+# from the original insightface.app.face_analysis.py file
+def draw_on(img, faces):
+ import cv2
+ dimg = img.copy()
+ for i in range(len(faces)):
+ face = faces[i]
+ box = face.bbox.astype(np.int)
+ color = (0, 0, 255)
+ cv2.rectangle(dimg, (box[0], box[1]), (box[2], box[3]), color, 2)
+ if face.kps is not None:
+ kps = face.kps.astype(np.int)
+ # print(landmark.shape)
+ for l in range(kps.shape[0]):
+ color = (0, 0, 255)
+ if l == 0 or l == 3:
+ color = (0, 255, 0)
+ cv2.circle(dimg, (kps[l][0], kps[l][1]), 1, color,
+ 2)
+ if face.gender is not None and face.age is not None:
+ cv2.putText(dimg, '%s,%d' % (face.sex, face.age), (box[0] - 1, box[1] - 4), cv2.FONT_HERSHEY_COMPLEX, 0.7, (0, 255, 0), 1)
+
+ return dimg
+
+
+def dist(p1, p2):
+ return math.sqrt(((p1[0] - p2[0]) ** 2) + ((p1[1] - p2[1]) ** 2))
+
+
+def get_center(bboxes, img):
+ img_center = img.shape[1] // 2, img.shape[0] // 2
+ size = bboxes.shape[0]
+ distance = np.Inf
+ j = 0
+ for i in range(size):
+ x1, y1, x2, y2 = bboxes[i, 0:4]
+ dx = abs(x2 - x1) / 2.0
+ dy = abs(y2 - y1) / 2.0
+ current = dist((x1 + dx, y1 + dy), img_center)
+ if current < distance:
+ distance = current
+ j = i
+
+ return j
+
+
+def bbox2point(left, right, top, bottom, type='bbox'):
+ if type == 'kpt68':
+ old_size = (right - left + bottom - top) / 2 * 1.1
+ center = np.array([right - (right - left) / 2.0, bottom - (bottom - top) / 2.0])
+ elif type == 'bbox':
+ old_size = (right - left + bottom - top) / 2
+ center = np.array([right - (right - left) / 2.0, bottom - (bottom - top) / 2.0 + old_size * 0.12])
+ else:
+ raise NotImplementedError
+ return old_size, center
+
+
+def get_bbox(image, lmks, bb_scale=1.0):
+ h, w, c = image.shape
+ bbox = []
+ for i in range(lmks.shape[0]):
+ lmks = lmks.astype(np.int32)
+ x_min, x_max, y_min, y_max = np.min(lmks[i, :, 0]), np.max(lmks[i, :, 0]), np.min(lmks[i, :, 1]), np.max(lmks[i, :, 1])
+ x_center, y_center = int((x_max + x_min) / 2.0), int((y_max + y_min) / 2.0)
+ size = int(bb_scale * 2 * max(x_center - x_min, y_center - y_min))
+ xb_min, xb_max, yb_min, yb_max = max(x_center - size // 2, 0), min(x_center + size // 2, w - 1), \
+ max(y_center - size // 2, 0), min(y_center + size // 2, h - 1)
+
+ yb_max = min(yb_max, h - 1)
+ xb_max = min(xb_max, w - 1)
+ yb_min = max(yb_min, 0)
+ xb_min = max(xb_min, 0)
+
+ if (xb_max - xb_min) % 2 != 0:
+ xb_min += 1
+
+ if (yb_max - yb_min) % 2 != 0:
+ yb_min += 1
+
+ # x1, y1, x2, y2
+ bbox.append(np.array([xb_min, yb_min, xb_max, yb_max, 0]))
+
+ return np.stack(bbox)
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/BP4D.npy b/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/BP4D.npy
new file mode 100644
index 0000000000000000000000000000000000000000..0cc6554d4eb585bc13a409db3a1d2b2a4ede5801
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/BP4D.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e625d679f72e3aa2e00ac4faed0b07d6957c94986a96746a4f8de29997e77bba
+size 284773
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/BU3DFE.npy b/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/BU3DFE.npy
new file mode 100644
index 0000000000000000000000000000000000000000..29a945ee68ef3db15239e572c0706a5aa997610d
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/BU3DFE.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3f144740162e25f9067a73f298e8229de60a17eccf5ba5e56ab8f0789688518e
+size 196059
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/D3DFACS.npy b/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/D3DFACS.npy
new file mode 100644
index 0000000000000000000000000000000000000000..7eb8b82c8cedf93ffb8b8394e79110c2b83da24e
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/D3DFACS.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0270c09ae15eec72a94cfbc3a1502b970330698e290a10f2c4b8176341f4ae87
+size 207899
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/FACEWAREHOUSE.npy b/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/FACEWAREHOUSE.npy
new file mode 100644
index 0000000000000000000000000000000000000000..8a2071ac035ba226fa52061bcec3c5e26a6f13b2
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/FACEWAREHOUSE.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2e7e9f2bce6fce23e4f5861ebe71e0402bafdf6383217a9d20e1130787b01d3b
+size 101433
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/FLORENCE.npy b/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/FLORENCE.npy
new file mode 100644
index 0000000000000000000000000000000000000000..6653b3a4d89e7446ee4a7ca03e1f039267894c2a
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/FLORENCE.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:794583b78501cda2658aa1fac4061ac7f75271ee35d71f090127f3935491ce42
+size 500567
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/FRGC.npy b/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/FRGC.npy
new file mode 100644
index 0000000000000000000000000000000000000000..63d097ba38b1c3a0229b1e9a51e43f4bb7be905a
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/FRGC.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dfa3e47a66f6f79b2a44091322c5b5680a32018774c2121d7e53f0f72a358c28
+size 1232221
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/LYHM.npy b/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/LYHM.npy
new file mode 100644
index 0000000000000000000000000000000000000000..6a169e0b1f681d6914353a778de049fc6bfc763b
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/LYHM.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ab68b388abd4b5cf2dbc8cf46058ba8c26a171b8bbbff06d20082ba1b2780974
+size 126716
diff --git a/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/STIRLING.npy b/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/STIRLING.npy
new file mode 100644
index 0000000000000000000000000000000000000000..2658999a331d26e13cfb0068fa38d1beb3a50bfe
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/datasets/image_paths/STIRLING.npy
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4fcd43ec4af033f908c660a825a948621d071ac9fa7f0292943c3519dfd6805c
+size 23761
diff --git a/src/pixel3dmm/preprocessing/MICA/demo.py b/src/pixel3dmm/preprocessing/MICA/demo.py
new file mode 100644
index 0000000000000000000000000000000000000000..3dd68fbb684d4d4b18f7c804e26a0fd69ca2a7ac
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/demo.py
@@ -0,0 +1,156 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import argparse
+import os
+import random
+from glob import glob
+from pathlib import Path
+
+import cv2
+import numpy as np
+import torch
+import torch.backends.cudnn as cudnn
+import trimesh
+from insightface.app.common import Face
+from insightface.utils import face_align
+from loguru import logger
+from skimage.io import imread
+from tqdm import tqdm
+
+from configs.config import get_cfg_defaults
+from datasets.creation.util import get_arcface_input, get_center, draw_on
+from utils import util
+from utils.landmark_detector import LandmarksDetector, detectors
+
+
+def deterministic(rank):
+ torch.manual_seed(rank)
+ torch.cuda.manual_seed(rank)
+ np.random.seed(rank)
+ random.seed(rank)
+
+ cudnn.deterministic = True
+ cudnn.benchmark = False
+
+
+def process(args, app, image_size=224, draw_bbox=False):
+ dst = Path(args.a)
+ dst.mkdir(parents=True, exist_ok=True)
+ processes = []
+ image_paths = sorted(glob(args.i + '/*.*'))
+ for image_path in tqdm(image_paths):
+ name = Path(image_path).stem
+ img = cv2.imread(image_path)
+ bboxes, kpss = app.detect(img)
+ if bboxes.shape[0] == 0:
+ logger.error(f'[ERROR] Face not detected for {image_path}')
+ continue
+ i = get_center(bboxes, img)
+ bbox = bboxes[i, 0:4]
+ det_score = bboxes[i, 4]
+ kps = None
+ if kpss is not None:
+ kps = kpss[i]
+ face = Face(bbox=bbox, kps=kps, det_score=det_score)
+ blob, aimg = get_arcface_input(face, img)
+ file = str(Path(dst, name))
+ np.save(file, blob)
+ processes.append(file + '.npy')
+ cv2.imwrite(file + '.jpg', face_align.norm_crop(img, landmark=face.kps, image_size=image_size))
+ if draw_bbox:
+ dimg = draw_on(img, [face])
+ cv2.imwrite(file + '_bbox.jpg', dimg)
+
+ return processes
+
+
+def to_batch(path):
+ src = path.replace('npy', 'jpg')
+ if not os.path.exists(src):
+ src = path.replace('npy', 'png')
+
+ image = imread(src)[:, :, :3]
+ image = image / 255.
+ image = cv2.resize(image, (224, 224)).transpose(2, 0, 1)
+ image = torch.tensor(image).cuda()[None]
+
+ arcface = np.load(path)
+ arcface = torch.tensor(arcface).cuda()[None]
+
+ return image, arcface
+
+
+def load_checkpoint(args, mica):
+ checkpoint = torch.load(args.m)
+ if 'arcface' in checkpoint:
+ mica.arcface.load_state_dict(checkpoint['arcface'])
+ if 'flameModel' in checkpoint:
+ mica.flameModel.load_state_dict(checkpoint['flameModel'])
+
+
+def main(cfg, args):
+ device = 'cuda:0'
+ cfg.model.testing = True
+ mica = util.find_model_using_name(model_dir='micalib.models', model_name=cfg.model.name)(cfg, device)
+ load_checkpoint(args, mica)
+ mica.eval()
+
+ faces = mica.flameModel.generator.faces_tensor.cpu()
+ Path(args.o).mkdir(exist_ok=True, parents=True)
+
+ app = LandmarksDetector(model=detectors.RETINAFACE)
+
+ with torch.no_grad():
+ logger.info(f'Processing has started...')
+ paths = process(args, app, draw_bbox=False)
+ for path in tqdm(paths):
+ name = Path(path).stem
+ images, arcface = to_batch(path)
+ codedict = mica.encode(images, arcface)
+ opdict = mica.decode(codedict)
+ meshes = opdict['pred_canonical_shape_vertices']
+ code = opdict['pred_shape_code']
+ lmk = mica.flame.compute_landmarks(meshes)
+
+ mesh = meshes[0]
+ landmark_51 = lmk[0, 17:]
+ landmark_7 = landmark_51[[19, 22, 25, 28, 16, 31, 37]]
+
+ dst = Path(args.o, name)
+ dst.mkdir(parents=True, exist_ok=True)
+ trimesh.Trimesh(vertices=mesh.cpu() * 1000.0, faces=faces, process=False).export(f'{dst}/mesh.ply') # save in millimeters
+ trimesh.Trimesh(vertices=mesh.cpu() * 1000.0, faces=faces, process=False).export(f'{dst}/mesh.obj')
+ np.save(f'{dst}/identity', code[0].cpu().numpy())
+ np.save(f'{dst}/kpt7', landmark_7.cpu().numpy() * 1000.0)
+ np.save(f'{dst}/kpt68', lmk.cpu().numpy() * 1000.0)
+
+ logger.info(f'Processing finished. Results has been saved in {args.o}')
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='MICA - Towards Metrical Reconstruction of Human Faces')
+ parser.add_argument('-i', default='demo/input', type=str, help='Input folder with images')
+ parser.add_argument('-o', default='demo/output', type=str, help='Output folder')
+ parser.add_argument('-a', default='demo/arcface', type=str, help='Processed images for MICA input')
+ parser.add_argument('-m', default='data/pretrained/mica.tar', type=str, help='Pretrained model path')
+
+ args = parser.parse_args()
+ cfg = get_cfg_defaults()
+
+ deterministic(42)
+ main(cfg, args)
diff --git a/src/pixel3dmm/preprocessing/MICA/demo/input/carell.jpg b/src/pixel3dmm/preprocessing/MICA/demo/input/carell.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..6d6bff41bc0fa7089082501bfc1d53c34ad8496f
Binary files /dev/null and b/src/pixel3dmm/preprocessing/MICA/demo/input/carell.jpg differ
diff --git a/src/pixel3dmm/preprocessing/MICA/demo/input/connelly.jpg b/src/pixel3dmm/preprocessing/MICA/demo/input/connelly.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..dacf4b44493930ec433447e0cab18428db5e208a
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/demo/input/connelly.jpg
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d8ea5311249a7af7939bd2b9e750d44b3e77b6d265376df332d520566c73df58
+size 436464
diff --git a/src/pixel3dmm/preprocessing/MICA/demo/input/justin.png b/src/pixel3dmm/preprocessing/MICA/demo/input/justin.png
new file mode 100644
index 0000000000000000000000000000000000000000..2d95beac442763a1f917debaf4ee4e7d3c566428
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/demo/input/justin.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0898e5ddcfbd030ad7a2702184a65376d538563ba0fd65170d1ec8936d2e268f
+size 1504316
diff --git a/src/pixel3dmm/preprocessing/MICA/demo/input/lawrence.jpg b/src/pixel3dmm/preprocessing/MICA/demo/input/lawrence.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..98e525ffc7fe88d4ce23b33936042872d283ef21
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/demo/input/lawrence.jpg
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a472971bee1a518b5a930fabe005ff2a44cd5aca446d52281fde5154de988020
+size 510168
diff --git a/src/pixel3dmm/preprocessing/MICA/documents/BP4D.gif b/src/pixel3dmm/preprocessing/MICA/documents/BP4D.gif
new file mode 100644
index 0000000000000000000000000000000000000000..d5f3ce02aa1c30c0d63b1c6291e809f480a27d86
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/documents/BP4D.gif
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2b457204c19ba1810dcc9418d36931a8c4c868b784d972ad3f64ea57717df08e
+size 1819951
diff --git a/src/pixel3dmm/preprocessing/MICA/documents/D3DFACS.gif b/src/pixel3dmm/preprocessing/MICA/documents/D3DFACS.gif
new file mode 100644
index 0000000000000000000000000000000000000000..eda5b84848e0143e346356ba22903e4d43184068
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/documents/D3DFACS.gif
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0cabc16735225cd21131f905fe52fadddd7e8d0774ee1a729afff524a72d7a78
+size 1628262
diff --git a/src/pixel3dmm/preprocessing/MICA/documents/FACEWAREHOUSE.gif b/src/pixel3dmm/preprocessing/MICA/documents/FACEWAREHOUSE.gif
new file mode 100644
index 0000000000000000000000000000000000000000..ba048cc3c18148edaeaa60a5e1d6dda69082b7e3
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/documents/FACEWAREHOUSE.gif
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f836cc88df0fcb0fdf7c69ad6e7f36e11d2c0af3aabb8486e152dc0da8214612
+size 1690483
diff --git a/src/pixel3dmm/preprocessing/MICA/documents/FLORENCE.gif b/src/pixel3dmm/preprocessing/MICA/documents/FLORENCE.gif
new file mode 100644
index 0000000000000000000000000000000000000000..fd8407ae43a59c6e451dc0abb18c727d6db58a99
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/documents/FLORENCE.gif
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7767a5942a061ea153f0388147becd1e2cd84e92c23430d87f882770824b92b8
+size 1773841
diff --git a/src/pixel3dmm/preprocessing/MICA/documents/FRGC.gif b/src/pixel3dmm/preprocessing/MICA/documents/FRGC.gif
new file mode 100644
index 0000000000000000000000000000000000000000..6b45356fba594412b5e78a611ef1ad1b9247239d
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/documents/FRGC.gif
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ce3a7a7bb2df1c48de46cb18b65b04baf14e3e903a45eba0591eb5904f84168a
+size 1784016
diff --git a/src/pixel3dmm/preprocessing/MICA/documents/LYHM.gif b/src/pixel3dmm/preprocessing/MICA/documents/LYHM.gif
new file mode 100644
index 0000000000000000000000000000000000000000..fe80f478b72ac26d5eca61471deaf06673b946a2
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/documents/LYHM.gif
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:68cd3aa87a49c782c29e4d6c324aedce693877300cbe62b12f1757823cd24321
+size 1815181
diff --git a/src/pixel3dmm/preprocessing/MICA/documents/STIRLING.gif b/src/pixel3dmm/preprocessing/MICA/documents/STIRLING.gif
new file mode 100644
index 0000000000000000000000000000000000000000..548a1c83f9dd5bed901689696e8e1a9bcac5eca1
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/documents/STIRLING.gif
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d8708aa2c6fb3d0b28cf89cca8613760d99f083a4ae89d9d9f7caa7c87df03b8
+size 1794509
diff --git a/src/pixel3dmm/preprocessing/MICA/documents/teaser.jpg b/src/pixel3dmm/preprocessing/MICA/documents/teaser.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..880a6f3e4a6cc0a83590e2242824c510a48a286c
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/documents/teaser.jpg
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:98875c6ca3e079fc532a6fb24748a87884bd986ea9fcd959569574de3ae1d8d4
+size 345161
diff --git a/src/pixel3dmm/preprocessing/MICA/documents/voxceleb.gif b/src/pixel3dmm/preprocessing/MICA/documents/voxceleb.gif
new file mode 100644
index 0000000000000000000000000000000000000000..bab88a478fcd49482adab6908b12e20a1b26d119
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/documents/voxceleb.gif
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:27cd3107ef3dcc55edd5890ca6ea8387dac9e7ebce3c19b83933ffafff9b3902
+size 6301645
diff --git a/src/pixel3dmm/preprocessing/MICA/environment.yml b/src/pixel3dmm/preprocessing/MICA/environment.yml
new file mode 100644
index 0000000000000000000000000000000000000000..dd456dde007e1e9290b663bbc83678a2d13ce81d
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/environment.yml
@@ -0,0 +1,187 @@
+name: MICA
+channels:
+ - pytorch
+ - nvidia
+ - defaults
+dependencies:
+ - _libgcc_mutex=0.1=main
+ - _openmp_mutex=5.1=1_gnu
+ - blas=1.0=mkl
+ - brotlipy=0.7.0=py39h27cfd23_1003
+ - bzip2=1.0.8=h7b6447c_0
+ - ca-certificates=2022.10.11=h06a4308_0
+ - certifi=2022.9.24=py39h06a4308_0
+ - cffi=1.15.1=py39h5eee18b_2
+ - charset-normalizer=2.0.4=pyhd3eb1b0_0
+ - cryptography=38.0.1=py39h9ce1e76_0
+ - cuda=11.6.2=0
+ - cuda-cccl=11.6.55=hf6102b2_0
+ - cuda-command-line-tools=11.6.2=0
+ - cuda-compiler=11.6.2=0
+ - cuda-cudart=11.6.55=he381448_0
+ - cuda-cudart-dev=11.6.55=h42ad0f4_0
+ - cuda-cuobjdump=11.6.124=h2eeebcb_0
+ - cuda-cupti=11.6.124=h86345e5_0
+ - cuda-cuxxfilt=11.6.124=hecbf4f6_0
+ - cuda-driver-dev=11.6.55=0
+ - cuda-gdb=11.8.86=0
+ - cuda-libraries=11.6.2=0
+ - cuda-libraries-dev=11.6.2=0
+ - cuda-memcheck=11.8.86=0
+ - cuda-nsight=11.8.86=0
+ - cuda-nsight-compute=11.8.0=0
+ - cuda-nvcc=11.6.124=hbba6d2d_0
+ - cuda-nvdisasm=11.8.86=0
+ - cuda-nvml-dev=11.6.55=haa9ef22_0
+ - cuda-nvprof=11.8.87=0
+ - cuda-nvprune=11.6.124=he22ec0a_0
+ - cuda-nvrtc=11.6.124=h020bade_0
+ - cuda-nvrtc-dev=11.6.124=h249d397_0
+ - cuda-nvtx=11.6.124=h0630a44_0
+ - cuda-nvvp=11.8.87=0
+ - cuda-runtime=11.6.2=0
+ - cuda-samples=11.6.101=h8efea70_0
+ - cuda-sanitizer-api=11.8.86=0
+ - cuda-toolkit=11.6.2=0
+ - cuda-tools=11.6.2=0
+ - cuda-visual-tools=11.6.2=0
+ - ffmpeg=4.3=hf484d3e_0
+ - freetype=2.12.1=h4a9f257_0
+ - gds-tools=1.4.0.31=0
+ - giflib=5.2.1=h7b6447c_0
+ - gmp=6.2.1=h295c915_3
+ - gnutls=3.6.15=he1e5248_0
+ - idna=3.4=py39h06a4308_0
+ - intel-openmp=2021.4.0=h06a4308_3561
+ - jpeg=9e=h7f8727e_0
+ - lame=3.100=h7b6447c_0
+ - lcms2=2.12=h3be6417_0
+ - ld_impl_linux-64=2.38=h1181459_1
+ - lerc=3.0=h295c915_0
+ - libcublas=11.11.3.6=0
+ - libcublas-dev=11.11.3.6=0
+ - libcufft=10.9.0.58=0
+ - libcufft-dev=10.9.0.58=0
+ - libcufile=1.4.0.31=0
+ - libcufile-dev=1.4.0.31=0
+ - libcurand=10.3.0.86=0
+ - libcurand-dev=10.3.0.86=0
+ - libcusolver=11.4.1.48=0
+ - libcusolver-dev=11.4.1.48=0
+ - libcusparse=11.7.5.86=0
+ - libcusparse-dev=11.7.5.86=0
+ - libdeflate=1.8=h7f8727e_5
+ - libffi=3.4.2=h6a678d5_6
+ - libgcc-ng=11.2.0=h1234567_1
+ - libgomp=11.2.0=h1234567_1
+ - libiconv=1.16=h7f8727e_2
+ - libidn2=2.3.2=h7f8727e_0
+ - libnpp=11.8.0.86=0
+ - libnpp-dev=11.8.0.86=0
+ - libnvjpeg=11.9.0.86=0
+ - libnvjpeg-dev=11.9.0.86=0
+ - libpng=1.6.37=hbc83047_0
+ - libstdcxx-ng=11.2.0=h1234567_1
+ - libtasn1=4.16.0=h27cfd23_0
+ - libtiff=4.4.0=hecacb30_2
+ - libunistring=0.9.10=h27cfd23_0
+ - libwebp=1.2.4=h11a3e52_0
+ - libwebp-base=1.2.4=h5eee18b_0
+ - lz4-c=1.9.3=h295c915_1
+ - mkl=2021.4.0=h06a4308_640
+ - mkl-service=2.4.0=py39h7f8727e_0
+ - mkl_fft=1.3.1=py39hd3c417c_0
+ - mkl_random=1.2.2=py39h51133e4_0
+ - ncurses=6.3=h5eee18b_3
+ - nettle=3.7.3=hbbd107a_1
+ - nsight-compute=2022.3.0.22=0
+ - numpy=1.23.4=py39h14f4228_0
+ - numpy-base=1.23.4=py39h31eccc5_0
+ - openh264=2.1.1=h4ff587b_0
+ - openssl=1.1.1s=h7f8727e_0
+ - pillow=9.2.0=py39hace64e9_1
+ - pip=22.2.2=py39h06a4308_0
+ - pycparser=2.21=pyhd3eb1b0_0
+ - pyopenssl=22.0.0=pyhd3eb1b0_0
+ - pysocks=1.7.1=py39h06a4308_0
+ - python=3.9.15=h7a1cb2a_2
+ - pytorch=1.13.0=py3.9_cuda11.6_cudnn8.3.2_0
+ - pytorch-cuda=11.6=h867d48c_0
+ - pytorch-mutex=1.0=cuda
+ - readline=8.2=h5eee18b_0
+ - requests=2.28.1=py39h06a4308_0
+ - setuptools=65.5.0=py39h06a4308_0
+ - six=1.16.0=pyhd3eb1b0_1
+ - sqlite=3.40.0=h5082296_0
+ - tk=8.6.12=h1ccaba5_0
+ - torchaudio=0.13.0=py39_cu116
+ - torchvision=0.14.0=py39_cu116
+ - typing_extensions=4.3.0=py39h06a4308_0
+ - tzdata=2022f=h04d1e81_0
+ - urllib3=1.26.12=py39h06a4308_0
+ - wheel=0.37.1=pyhd3eb1b0_0
+ - xz=5.2.6=h5eee18b_0
+ - zlib=1.2.13=h5eee18b_0
+ - zstd=1.5.2=ha4553b6_0
+ - pip:
+ - albumentations==1.3.0
+ - cachetools==5.2.0
+ - chumpy==0.70
+ - coloredlogs==15.0.1
+ - contourpy==1.0.6
+ - cycler==0.11.0
+ - cython==0.29.32
+ - easydict==1.10
+ - face-alignment==1.3.5
+ - falcon==3.1.1
+ - falcon-multipart==0.2.0
+ - flatbuffers==22.11.23
+ - fonttools==4.38.0
+ - google-api-core==2.11.0
+ - google-api-python-client==2.69.0
+ - google-auth==2.15.0
+ - google-auth-httplib2==0.1.0
+ - googleapis-common-protos==1.57.0
+ - gunicorn==20.1.0
+ - httplib2==0.21.0
+ - humanfriendly==10.0
+ - imageio==2.22.4
+ - insightface==0.7
+ - joblib==1.2.0
+ - kiwisolver==1.4.4
+ - llvmlite==0.39.1
+ - loguru==0.6.0
+ - matplotlib==3.6.2
+ - mpmath==1.2.1
+ - networkx==2.8.8
+ - numba==0.56.4
+ - oauth2client==4.1.3
+ - onnx==1.13.0
+ - onnxruntime==1.13.1
+ - opencv-python==4.7.0.72
+ - opencv-python-headless==4.6.0.66
+ - packaging==21.3
+ - prettytable==3.5.0
+ - protobuf==3.20.2
+ - pyasn1==0.4.8
+ - pyasn1-modules==0.2.8
+ - pydrive2==1.15.0
+ - pyparsing==3.0.9
+ - python-datauri==1.1.0
+ - python-dateutil==2.8.2
+ - pywavelets==1.4.1
+ - pyyaml==6.0
+ - qudida==0.0.4
+ - rsa==4.9
+ - scikit-image==0.19.3
+ - scikit-learn==1.1.3
+ - scipy==1.9.3
+ - sympy==1.11.1
+ - threadpoolctl==3.1.0
+ - tifffile==2022.10.10
+ - tqdm==4.64.1
+ - trimesh==3.16.4
+ - uritemplate==4.1.1
+ - wcwidth==0.2.5
+ - yacs==0.1.8
+prefix: /home/wzielonka/miniconda3/envs/MICA
diff --git a/src/pixel3dmm/preprocessing/MICA/install.sh b/src/pixel3dmm/preprocessing/MICA/install.sh
new file mode 100644
index 0000000000000000000000000000000000000000..351a7491324f77ebcd5696bad4c29be4f0497fbd
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/install.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+urle () { [[ "${1}" ]] || return 1; local LANG=C i x; for (( i = 0; i < ${#1}; i++ )); do x="${1:i:1}"; [[ "${x}" == [a-zA-Z0-9.~-] ]] && echo -n "${x}" || printf '%%%02X' "'${x}"; done; echo; }
+
+# username and password input
+echo -e "\nIf you do not have an account you can register at https://flame.is.tue.mpg.de/ following the installation instruction."
+read -p "Username (FLAME):" username
+read -p "Password (FLAME):" password
+username=$(urle $username)
+password=$(urle $password)
+
+echo -e "\nDownloading FLAME..."
+mkdir -p data/FLAME2020/
+wget --post-data "username=$username&password=$password" 'https://download.is.tue.mpg.de/download.php?domain=flame&sfile=FLAME2020.zip&resume=1' -O './FLAME2020.zip' --no-check-certificate --continue
+unzip FLAME2020.zip -d data/FLAME2020/
+rm -rf FLAME2020.zip
+
+# Install gdown if not installed
+if ! command -v gdown &> /dev/null; then
+ echo "Installing gdown..."
+ pip install gdown
+fi
+
+echo -e "\nDownloading MICA..."
+mkdir -p data/pretrained/
+gdown --id 1bYsI_spptzyuFmfLYqYkcJA6GZWZViNt -O data/pretrained/mica.tar
+
+# https://github.com/deepinsight/insightface/issues/1896
+# Insightface has problems with hosting the models
+echo -e "\nDownloading insightface models..."
+mkdir -p ~/.insightface/models/
+if [ ! -d ~/.insightface/models/antelopev2 ]; then
+ gdown --id 16PWKI_RjjbE4_kqpElG-YFqe8FpXjads -O ~/.insightface/models/antelopev2.zip
+ unzip ~/.insightface/models/antelopev2.zip -d ~/.insightface/models/antelopev2
+fi
+if [ ! -d ~/.insightface/models/buffalo_l ]; then
+ gdown --id 1navJMy0DTr1_DHjLWu1i48owCPvXWfYc -O ~/.insightface/models/buffalo_l.zip
+ unzip ~/.insightface/models/buffalo_l.zip -d ~/.insightface/models/buffalo_l
+fi
+
+echo -e "\nInstalling conda env..."
+conda env create -f environment.yml
+
+echo -e "\nInstallation has finished!"
diff --git a/src/pixel3dmm/preprocessing/MICA/jobs.py b/src/pixel3dmm/preprocessing/MICA/jobs.py
new file mode 100644
index 0000000000000000000000000000000000000000..751eb8fbcb80f554275950861706947a36861f83
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/jobs.py
@@ -0,0 +1,92 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import os
+import random
+import sys
+
+import numpy as np
+import torch
+import torch.backends.cudnn as cudnn
+import torch.distributed as dist
+import yaml
+from loguru import logger
+
+from micalib.tester import Tester
+from micalib.trainer import Trainer
+from utils import util
+
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '.')))
+
+
+def setup(rank, world_size, port):
+ os.environ['MASTER_ADDR'] = 'localhost'
+ os.environ['MASTER_PORT'] = str(port)
+ dist.init_process_group("nccl", rank=rank, world_size=world_size, init_method="env://")
+
+
+def deterministic(rank):
+ torch.manual_seed(rank)
+ torch.cuda.manual_seed(rank)
+ np.random.seed(rank)
+ random.seed(rank)
+
+ cudnn.deterministic = True
+ cudnn.benchmark = False
+
+
+def test(rank, world_size, cfg, args):
+ port = np.random.randint(low=0, high=2000)
+ setup(rank, world_size, 12310 + port)
+
+ deterministic(rank)
+
+ cfg.model.testing = True
+ mica = util.find_model_using_name(model_dir='micalib.models', model_name=cfg.model.name)(cfg, rank)
+ tester = Tester(nfc_model=mica, config=cfg, device=rank)
+ tester.render_mesh = True
+
+ if args.test_dataset.upper() == 'STIRLING':
+ tester.test_stirling(args.checkpoint)
+ elif args.test_dataset.upper() == 'NOW':
+ tester.test_now(args.checkpoint)
+ else:
+ logger.error('[TESTER] Test dataset was not specified!')
+
+ dist.destroy_process_group()
+
+
+def train(rank, world_size, cfg):
+ port = np.random.randint(low=0, high=2000)
+ setup(rank, world_size, 12310 + port)
+
+ logger.info(f'[MAIN] output_dir: {cfg.output_dir}')
+ os.makedirs(os.path.join(cfg.output_dir, cfg.train.log_dir), exist_ok=True)
+ os.makedirs(os.path.join(cfg.output_dir, cfg.train.vis_dir), exist_ok=True)
+ os.makedirs(os.path.join(cfg.output_dir, cfg.train.val_vis_dir), exist_ok=True)
+
+ with open(os.path.join(cfg.output_dir, cfg.train.log_dir, 'full_config.yaml'), 'w') as f:
+ yaml.dump(cfg, f, default_flow_style=False)
+ # shutil.copy(cfg.cfg_file, os.path.join(cfg.output_dir, 'config.yaml'))
+
+ deterministic(rank)
+
+ nfc = util.find_model_using_name(model_dir='micalib.models', model_name=cfg.model.name)(cfg, rank)
+ trainer = Trainer(nfc_model=nfc, config=cfg, device=rank)
+ trainer.fit()
+
+ dist.destroy_process_group()
diff --git a/src/pixel3dmm/preprocessing/MICA/micalib/__init__.py b/src/pixel3dmm/preprocessing/MICA/micalib/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/pixel3dmm/preprocessing/MICA/micalib/base_model.py b/src/pixel3dmm/preprocessing/MICA/micalib/base_model.py
new file mode 100644
index 0000000000000000000000000000000000000000..dccd387975532cd640eb779afbbee19508904257
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/micalib/base_model.py
@@ -0,0 +1,108 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+from abc import abstractmethod
+
+import numpy as np
+import torch
+import torch.nn as nn
+
+from configs.config import cfg
+from models.flame import FLAME
+from utils.masking import Masking
+
+
+class BaseModel(nn.Module):
+ def __init__(self, config=None, device=None, tag=''):
+ super(BaseModel, self).__init__()
+ if config is None:
+ self.cfg = cfg
+ else:
+ self.cfg = config
+
+ self.tag = tag
+ self.use_mask = self.cfg.train.use_mask
+ self.device = device
+ self.masking = Masking(config)
+ self.testing = self.cfg.model.testing
+
+ def initialize(self):
+ self.create_flame(self.cfg.model)
+ self.create_model(self.cfg.model)
+ self.load_model()
+ self.setup_renderer(self.cfg.model)
+
+ self.create_weights()
+
+ def create_flame(self, model_cfg):
+ self.flame = FLAME(model_cfg).to(self.device)
+ self.average_face = self.flame.v_template.clone()[None]
+
+ self.flame.eval()
+
+ @abstractmethod
+ def create_model(self):
+ return
+
+ @abstractmethod
+ def create_load(self):
+ return
+
+ @abstractmethod
+ def model_dict(self):
+ return
+
+ @abstractmethod
+ def parameters_to_optimize(self):
+ return
+
+ @abstractmethod
+ def encode(self, images, arcface_images):
+ return
+
+ @abstractmethod
+ def decode(self, codedict, epoch):
+ pass
+
+ @abstractmethod
+ def compute_losses(self, input, encoder_output, decoder_output):
+ pass
+
+ @abstractmethod
+ def compute_masks(self, input, decoder_output):
+ pass
+
+ def setup_renderer(self, model_cfg):
+ self.verts_template_neutral = self.flame.v_template[None]
+ self.verts_template = None
+ self.verts_template_uv = None
+
+ def create_weights(self):
+ self.vertices_mask = self.masking.get_weights_per_vertex().to(self.device)
+ self.triangle_mask = self.masking.get_weights_per_triangle().to(self.device)
+
+ def create_template(self, B):
+ with torch.no_grad():
+ if self.verts_template is None:
+ self.verts_template_neutral = self.flame.v_template[None]
+ pose = torch.zeros(B, self.cfg.model.n_pose, device=self.device)
+ pose[:, 3] = 10.0 * np.pi / 180.0 # 48
+ self.verts_template, _, _ = self.flame(shape_params=torch.zeros(B, self.cfg.model.n_shape, device=self.device), expression_params=torch.zeros(B, self.cfg.model.n_exp, device=self.device), pose_params=pose) # use template mesh with open mouth
+
+ if self.verts_template.shape[0] != B:
+ self.verts_template_neutral = self.verts_template_neutral[0:1].repeat(B, 1, 1)
+ self.verts_template = self.verts_template[0:1].repeat(B, 1, 1)
diff --git a/src/pixel3dmm/preprocessing/MICA/micalib/models/__init__.py b/src/pixel3dmm/preprocessing/MICA/micalib/models/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/pixel3dmm/preprocessing/MICA/micalib/models/mica.py b/src/pixel3dmm/preprocessing/MICA/micalib/models/mica.py
new file mode 100644
index 0000000000000000000000000000000000000000..bb9b5f53e32c6f4a8f1b0ba58d1f8c005ae2f555
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/micalib/models/mica.py
@@ -0,0 +1,120 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import os
+import sys
+
+sys.path.append("./nfclib")
+
+import torch
+import torch.nn.functional as F
+
+from models.arcface import Arcface
+from models.generator import Generator
+from micalib.base_model import BaseModel
+
+from loguru import logger
+
+
+class MICA(BaseModel):
+ def __init__(self, config=None, device=None, tag='MICA'):
+ super(MICA, self).__init__(config, device, tag)
+
+ self.initialize()
+
+ def create_model(self, model_cfg):
+ mapping_layers = model_cfg.mapping_layers
+ pretrained_path = None
+ if not model_cfg.use_pretrained:
+ pretrained_path = model_cfg.arcface_pretrained_model
+ self.arcface = Arcface(pretrained_path=pretrained_path).to(self.device)
+ self.flameModel = Generator(512, 300, self.cfg.model.n_shape, mapping_layers, model_cfg, self.device)
+
+ def load_model(self):
+ model_path = os.path.join(self.cfg.output_dir, 'model.tar')
+ if os.path.exists(self.cfg.pretrained_model_path) and self.cfg.model.use_pretrained:
+ model_path = self.cfg.pretrained_model_path
+ if os.path.exists(model_path):
+ logger.info(f'[{self.tag}] Trained model found. Path: {model_path} | GPU: {self.device}')
+ checkpoint = torch.load(model_path)
+ if 'arcface' in checkpoint:
+ self.arcface.load_state_dict(checkpoint['arcface'])
+ if 'flameModel' in checkpoint:
+ self.flameModel.load_state_dict(checkpoint['flameModel'])
+ else:
+ logger.info(f'[{self.tag}] Checkpoint not available starting from scratch!')
+
+ def model_dict(self):
+ return {
+ 'flameModel': self.flameModel.state_dict(),
+ 'arcface': self.arcface.state_dict()
+ }
+
+ def parameters_to_optimize(self):
+ return [
+ {'params': self.flameModel.parameters(), 'lr': self.cfg.train.lr},
+ {'params': self.arcface.parameters(), 'lr': self.cfg.train.arcface_lr},
+ ]
+
+ def encode(self, images, arcface_imgs):
+ codedict = {}
+
+ codedict['arcface'] = F.normalize(self.arcface(arcface_imgs))
+ codedict['images'] = images
+
+ return codedict
+
+ def decode(self, codedict, epoch=0):
+ self.epoch = epoch
+
+ flame_verts_shape = None
+ shapecode = None
+
+ if not self.testing:
+ flame = codedict['flame']
+ shapecode = flame['shape_params'].view(-1, flame['shape_params'].shape[2])
+ shapecode = shapecode.to(self.device)[:, :self.cfg.model.n_shape]
+ with torch.no_grad():
+ flame_verts_shape, _, _ = self.flame(shape_params=shapecode)
+
+ identity_code = codedict['arcface']
+ pred_canonical_vertices, pred_shape_code = self.flameModel(identity_code)
+
+ output = {
+ 'flame_verts_shape': flame_verts_shape,
+ 'flame_shape_code': shapecode,
+ 'pred_canonical_shape_vertices': pred_canonical_vertices,
+ 'pred_shape_code': pred_shape_code,
+ 'faceid': codedict['arcface']
+ }
+
+ return output
+
+ def compute_losses(self, input, encoder_output, decoder_output):
+ losses = {}
+
+ pred_verts = decoder_output['pred_canonical_shape_vertices']
+ gt_verts = decoder_output['flame_verts_shape'].detach()
+
+ pred_verts_shape_canonical_diff = (pred_verts - gt_verts).abs()
+
+ if self.use_mask:
+ pred_verts_shape_canonical_diff *= self.vertices_mask
+
+ losses['pred_verts_shape_canonical_diff'] = torch.mean(pred_verts_shape_canonical_diff) * 1000.0
+
+ return losses
diff --git a/src/pixel3dmm/preprocessing/MICA/micalib/renderer.py b/src/pixel3dmm/preprocessing/MICA/micalib/renderer.py
new file mode 100644
index 0000000000000000000000000000000000000000..8b6d782d75c03b89ca3d6944d0700397b64a5c32
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/micalib/renderer.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import pytorch3d
+import torch
+import torch.nn as nn
+from pytorch3d.io import load_obj
+from pytorch3d.renderer import (
+ FoVPerspectiveCameras, look_at_view_transform,
+ RasterizationSettings, MeshRenderer, MeshRasterizer, SoftPhongShader, TexturesVertex
+)
+
+
+class MeshShapeRenderer(nn.Module):
+ def __init__(self, obj_filename):
+ super().__init__()
+
+ verts, faces, aux = load_obj(obj_filename)
+ faces = faces.verts_idx[None, ...].cuda()
+ self.register_buffer('faces', faces)
+
+ R, T = look_at_view_transform(2.7, 10.0, 10.0)
+ self.cameras = FoVPerspectiveCameras(device='cuda:0', R=R, T=T, fov=6)
+ raster_settings = RasterizationSettings(
+ image_size=512,
+ blur_radius=0.0,
+ faces_per_pixel=1,
+ perspective_correct=True
+ )
+
+ lights = pytorch3d.renderer.DirectionalLights(
+ device='cuda:0',
+ direction=((0, 0, 1),),
+ ambient_color=((0.4, 0.4, 0.4),),
+ diffuse_color=((0.35, 0.35, 0.35),),
+ specular_color=((0.05, 0.05, 0.05),))
+
+ self.renderer = MeshRenderer(
+ rasterizer=MeshRasterizer(cameras=self.cameras, raster_settings=raster_settings),
+ shader=SoftPhongShader(device='cuda:0', cameras=self.cameras, lights=lights)
+ )
+
+ def render_mesh(self, vertices, faces=None, verts_rgb=None):
+ B, N, V = vertices.shape
+ if faces is None:
+ faces = self.faces.repeat(B, 1, 1)
+ else:
+ faces = faces.repeat(B, 1, 1)
+
+ if verts_rgb is None:
+ verts_rgb = torch.ones_like(vertices)
+ textures = TexturesVertex(verts_features=verts_rgb.cuda())
+ meshes = pytorch3d.structures.Meshes(verts=vertices, faces=faces, textures=textures)
+
+ rendering = self.renderer(meshes).permute(0, 3, 1, 2)
+ color = rendering[:, 0:3, ...]
+
+ return color
diff --git a/src/pixel3dmm/preprocessing/MICA/micalib/tester.py b/src/pixel3dmm/preprocessing/MICA/micalib/tester.py
new file mode 100644
index 0000000000000000000000000000000000000000..03b1ccb0e5d6ce4986855aeac710497e25565d6f
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/micalib/tester.py
@@ -0,0 +1,334 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import os
+from glob import glob
+
+import cv2
+import numpy as np
+import torch
+import torch.distributed as dist
+from insightface.app import FaceAnalysis
+from insightface.app.common import Face
+from insightface.utils import face_align
+from loguru import logger
+from pytorch3d.io import save_ply
+from skimage.io import imread
+from skimage.transform import estimate_transform, warp
+from tqdm import tqdm
+
+from configs.config import cfg
+from utils import util
+
+input_mean = 127.5
+input_std = 127.5
+
+NOW_SCANS = '/home/wzielonka/datasets/NoWDataset/final_release_version/scans/'
+NOW_PICTURES = '/home/wzielonka/datasets/NoWDataset/final_release_version/iphone_pictures/'
+NOW_BBOX = '/home/wzielonka/datasets/NoWDataset/final_release_version/detected_face/'
+STIRLING_PICTURES = '/home/wzielonka/datasets/Stirling/images/'
+
+
+class Tester(object):
+ def __init__(self, nfc_model, config=None, device=None):
+ if config is None:
+ self.cfg = cfg
+ else:
+ self.cfg = config
+
+ self.device = device
+ self.batch_size = self.cfg.dataset.batch_size
+ self.K = self.cfg.dataset.K
+ self.render_mesh = True
+ self.embeddings_lyhm = {}
+
+ # deca model
+ self.nfc = nfc_model.to(self.device)
+ self.nfc.testing = True
+
+ logger.info(f'[INFO] {torch.cuda.get_device_name(device)}')
+
+ def load_checkpoint(self, model_path):
+ dist.barrier()
+ map_location = {'cuda:%d' % 0: 'cuda:%d' % self.device}
+
+ checkpoint = torch.load(model_path, map_location)
+
+ if 'arcface' in checkpoint:
+ self.nfc.arcface.load_state_dict(checkpoint['arcface'])
+ if 'flameModel' in checkpoint:
+ self.nfc.flameModel.load_state_dict(checkpoint['flameModel'])
+
+ logger.info(f"[TESTER] Resume from {model_path}")
+
+ def load_model_dict(self, model_dict):
+ dist.barrier()
+
+ self.nfc.canonicalModel.load_state_dict(model_dict['canonicalModel'])
+ self.nfc.arcface.load_state_dict(model_dict['arcface'])
+
+ def process_image(self, img, app):
+ images = []
+ bboxes, kpss = app.det_model.detect(img, max_num=0, metric='default')
+ if bboxes.shape[0] != 1:
+ logger.error('Face not detected!')
+ return images
+ i = 0
+ bbox = bboxes[i, 0:4]
+ det_score = bboxes[i, 4]
+ kps = None
+ if kpss is not None:
+ kps = kpss[i]
+ face = Face(bbox=bbox, kps=kps, det_score=det_score)
+ aimg = face_align.norm_crop(img, landmark=face.kps)
+ blob = cv2.dnn.blobFromImages([aimg], 1.0 / input_std, (112, 112), (input_mean, input_mean, input_mean), swapRB=True)
+
+ images.append(torch.tensor(blob[0])[None])
+
+ return images
+
+ def process_folder(self, folder, app):
+ images = []
+ arcface = []
+ files_actor = sorted(sorted(os.listdir(folder)))
+ for file in files_actor:
+ image_path = folder + '/' + file
+ logger.info(image_path)
+
+ ### NOW CROPPING
+ scale = 1.6
+ # scale = np.random.rand() * (1.8 - 1.1) + 1.1
+ bbx_path = image_path.replace('.jpg', '.npy').replace('iphone_pictures', 'detected_face')
+ bbx_data = np.load(bbx_path, allow_pickle=True, encoding='latin1').item()
+ left = bbx_data['left']
+ right = bbx_data['right']
+ top = bbx_data['top']
+ bottom = bbx_data['bottom']
+
+ image = imread(image_path)[:, :, :3]
+
+ h, w, _ = image.shape
+ old_size = (right - left + bottom - top) / 2
+ center = np.array([right - (right - left) / 2.0, bottom - (bottom - top) / 2.0])
+ size = int(old_size * scale)
+
+ crop_size = 224
+ # crop image
+ src_pts = np.array([[center[0] - size / 2, center[1] - size / 2], [center[0] - size / 2, center[1] + size / 2], [center[0] + size / 2, center[1] - size / 2]])
+ DST_PTS = np.array([[0, 0], [0, crop_size - 1], [crop_size - 1, 0]])
+ tform = estimate_transform('similarity', src_pts, DST_PTS)
+
+ image = image / 255.
+ dst_image = warp(image, tform.inverse, output_shape=(crop_size, crop_size))
+
+ arcface += self.process_image(cv2.cvtColor(dst_image.astype(np.float32) * 255.0, cv2.COLOR_RGB2BGR), app)
+
+ dst_image = dst_image.transpose(2, 0, 1)
+ images.append(torch.tensor(dst_image)[None])
+
+ images = torch.cat(images, dim=0).float()
+ arcface = torch.cat(arcface, dim=0).float()
+
+ return images, arcface
+
+ def get_name(self, best_model, id):
+ if '_' in best_model:
+ name = id if id is not None else best_model.split('_')[-1][0:-4]
+ else:
+ name = id if id is not None else best_model.split('/')[-1][0:-4]
+ return name
+
+ def test_now(self, best_model, id=None):
+ self.load_checkpoint(best_model)
+ name = self.get_name(best_model, id)
+ self.now(name)
+
+ def test_stirling(self, best_model, id=None):
+ self.load_checkpoint(best_model)
+ name = self.get_name(best_model, id)
+ self.stirling(name)
+
+ def save_mesh(self, file, vertices):
+ scaled = vertices * 1000.0
+ save_ply(file, scaled.cpu(), self.nfc.render.faces[0].cpu())
+
+ # mask = self.nfc.masking.get_triangle_whole_mask()
+ # v, f = self.nfc.masking.get_masked_mesh(vertices, mask)
+ # save_obj(file, v[0], f[0])
+
+ def cache_to_cuda(self, cache):
+ for key in cache.keys():
+ i, a = cache[key]
+ cache[key] = (i.to(self.device), a.to(self.device))
+ return cache
+
+ def create_now_cache(self):
+ if os.path.exists('test_now_cache.pt'):
+ cache = self.cache_to_cuda(torch.load('test_now_cache.pt'))
+ return cache
+ else:
+ cache = {}
+
+ app = FaceAnalysis(name='antelopev2', providers=['CUDAExecutionProvider'])
+ app.prepare(ctx_id=0, det_size=(224, 224), det_thresh=0.4)
+
+ for actor in tqdm(sorted(os.listdir(NOW_PICTURES))):
+ image_paths = sorted(glob(NOW_PICTURES + actor + '/*'))
+ for folder in image_paths:
+ images, arcface = self.process_folder(folder, app)
+ cache[folder] = (images, arcface)
+
+ torch.save(cache, 'test_now_cache.pt')
+ return self.cache_to_cuda(cache)
+
+ def create_stirling_cache(self):
+ if os.path.exists('test_stirling_cache.pt'):
+ cache = torch.load('test_stirling_cache.pt')
+ return cache
+ else:
+ cache = {}
+
+ app = FaceAnalysis(name='antelopev2', providers=['CUDAExecutionProvider'])
+ app.prepare(ctx_id=0, det_size=(224, 224), det_thresh=0.1)
+
+ cache['HQ'] = {}
+ cache['LQ'] = {}
+
+ for folder in ['Real_images__Subset_2D_FG2018']:
+ for quality in ['HQ', 'LQ']:
+ for path in tqdm(sorted(glob(STIRLING_PICTURES + folder + '/' + quality + '/*.jpg'))):
+ actor = path.split('/')[-1][:9].upper()
+ image = imread(path)[:, :, :3]
+ blobs = self.process_image(cv2.cvtColor(image, cv2.COLOR_RGB2BGR), app)
+ if len(blobs) == 0:
+ continue
+ image = image / 255.
+ image = cv2.resize(image, (224, 224)).transpose(2, 0, 1)
+ image = torch.tensor(image).cuda()[None]
+
+ if actor not in cache[quality]:
+ cache[quality][actor] = []
+ cache[quality][actor].append((image, blobs[0]))
+
+ for q in cache.keys():
+ for a in cache[q].keys():
+ images, arcface = list(zip(*cache[q][a]))
+ images = torch.cat(images, dim=0).float()
+ arcface = torch.cat(arcface, dim=0).float()
+ cache[q][a] = (images, arcface)
+
+ torch.save(cache, 'test_stirling_cache.pt')
+ return self.cache_to_cuda(cache)
+
+ def update_embeddings(self, actor, arcface):
+ if actor not in self.embeddings_lyhm:
+ self.embeddings_lyhm[actor] = []
+ self.embeddings_lyhm[actor] += [arcface[i].data.cpu().numpy() for i in range(arcface.shape[0])]
+
+ def stirling(self, best_id):
+ logger.info(f"[TESTER] Stirling testing has begun!")
+ self.nfc.eval()
+ cache = self.create_stirling_cache()
+ for quality in cache.keys():
+ images_processed = 0
+ self.embeddings_lyhm = {}
+ for actor in tqdm(cache[quality].keys()):
+ images, arcface = cache[quality][actor]
+ with torch.no_grad():
+ codedict = self.nfc.encode(images.cuda(), arcface.cuda())
+ opdict = self.nfc.decode(codedict, 0)
+
+ self.update_embeddings(actor, codedict['arcface'])
+
+ dst_actor = actor[:5]
+ os.makedirs(os.path.join(self.cfg.output_dir, f'stirling_test_{best_id}', 'predicted_meshes', quality), exist_ok=True)
+ dst_folder = os.path.join(self.cfg.output_dir, f'stirling_test_{best_id}', 'predicted_meshes', quality, dst_actor)
+ os.makedirs(dst_folder, exist_ok=True)
+
+ meshes = opdict['pred_canonical_shape_vertices']
+ lmk = self.nfc.flame.compute_landmarks(meshes)
+
+ for m in range(meshes.shape[0]):
+ v = torch.reshape(meshes[m], (-1, 3))
+ savepath = os.path.join(self.cfg.output_dir, f'stirling_test_{best_id}', 'predicted_meshes', quality, dst_actor, f'{actor}.ply')
+ self.save_mesh(savepath, v)
+ landmark_51 = lmk[m, 17:]
+ landmark_7 = landmark_51[[19, 22, 25, 28, 16, 31, 37]]
+ landmark_7 = landmark_7.cpu().numpy() * 1000.0
+ np.save(os.path.join(self.cfg.output_dir, f'stirling_test_{best_id}', 'predicted_meshes', quality, dst_actor, f'{actor}.npy'), landmark_7)
+ images_processed += 1
+
+ pred = self.nfc.render.render_mesh(meshes)
+ dict = {
+ 'pred': pred,
+ 'images': images
+ }
+
+ savepath = os.path.join(self.cfg.output_dir, f'stirling_test_{best_id}', 'predicted_meshes', quality, dst_actor, f'{actor}.jpg')
+ util.visualize_grid(dict, savepath, size=512)
+
+ logger.info(f"[TESTER] Stirling dataset {quality} with {images_processed} processed!")
+
+ # util.save_embedding_projection(self.embeddings_lyhm, f'{self.cfg.output_dir}/stirling_test_{best_id}/stirling_{quality}_arcface_embeds.pdf')
+
+ def now(self, best_id):
+ logger.info(f"[TESTER] NoW testing has begun!")
+ self.nfc.eval()
+ cache = self.create_now_cache()
+ # for actor in tqdm(sorted(os.listdir(NOW_SCANS))): # only 20
+ for actor in tqdm(sorted(os.listdir(NOW_PICTURES))): # all 100
+ image_paths = sorted(glob(NOW_PICTURES + actor + '/*'))
+ for folder in image_paths:
+ files_actor = sorted(os.listdir(folder))
+ images, arcface = cache[folder]
+ with torch.no_grad():
+ codedict = self.nfc.encode(images, arcface)
+ opdict = self.nfc.decode(codedict, 0)
+
+ self.update_embeddings(actor.split('_')[2], codedict['arcface'])
+
+ type = folder.split('/')[-1]
+ os.makedirs(os.path.join(self.cfg.output_dir, f'now_test_{best_id}', 'predicted_meshes'), exist_ok=True)
+ dst_folder = os.path.join(self.cfg.output_dir, f'now_test_{best_id}', 'predicted_meshes', actor, type)
+ os.makedirs(dst_folder, exist_ok=True)
+
+ meshes = opdict['pred_canonical_shape_vertices']
+ lmk = self.nfc.flame.compute_landmarks(meshes)
+
+ for m in range(meshes.shape[0]):
+ a = files_actor[m]
+ v = torch.reshape(meshes[m], (-1, 3))
+ savepath = os.path.join(self.cfg.output_dir, f'now_test_{best_id}', 'predicted_meshes', actor, type, a.replace('jpg', 'ply'))
+ self.save_mesh(savepath, v)
+ landmark_51 = lmk[m, 17:]
+ landmark_7 = landmark_51[[19, 22, 25, 28, 16, 31, 37]]
+ landmark_7 = landmark_7.cpu().numpy() * 1000.0
+ np.save(os.path.join(self.cfg.output_dir, f'now_test_{best_id}', 'predicted_meshes', actor, type, a.replace('jpg', 'npy')), landmark_7)
+
+ if self.render_mesh:
+ pred = self.nfc.render.render_mesh(meshes)
+
+ dict = {
+ 'pred': pred,
+ # 'deca': deca,
+ 'images': images
+ }
+
+ savepath = os.path.join(dst_folder, 'render.jpg')
+ util.visualize_grid(dict, savepath, size=512)
+
+ # util.save_embedding_projection(self.embeddings_lyhm, f'{self.cfg.output_dir}/now_test_{best_id}/now_arcface_embeds.pdf')
diff --git a/src/pixel3dmm/preprocessing/MICA/micalib/trainer.py b/src/pixel3dmm/preprocessing/MICA/micalib/trainer.py
new file mode 100644
index 0000000000000000000000000000000000000000..f0c93bddb816a5142f2ae57a90725d7035032311
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/micalib/trainer.py
@@ -0,0 +1,286 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import os
+import random
+import sys
+from datetime import datetime
+
+import numpy as np
+import torch
+import torch.distributed as dist
+from loguru import logger
+from torch.utils.data import DataLoader
+from tqdm import tqdm
+
+import datasets
+from configs.config import cfg
+from utils import util
+
+sys.path.append("./micalib")
+from validator import Validator
+
+
+def print_info(rank):
+ props = torch.cuda.get_device_properties(rank)
+
+ logger.info(f'[INFO] {torch.cuda.get_device_name(rank)}')
+ logger.info(f'[INFO] Rank: {str(rank)}')
+ logger.info(f'[INFO] Memory: {round(props.total_memory / 1024 ** 3, 1)} GB')
+ logger.info(f'[INFO] Allocated: {round(torch.cuda.memory_allocated(rank) / 1024 ** 3, 1)} GB')
+ logger.info(f'[INFO] Cached: {round(torch.cuda.memory_reserved(rank) / 1024 ** 3, 1)} GB')
+
+
+def seed_worker(worker_id):
+ worker_seed = torch.initial_seed() % 2 ** 32
+ np.random.seed(worker_seed)
+ random.seed(worker_seed)
+
+
+class Trainer(object):
+ def __init__(self, nfc_model, config=None, device=None):
+ if config is None:
+ self.cfg = cfg
+ else:
+ self.cfg = config
+
+ logger.add(os.path.join(self.cfg.output_dir, self.cfg.train.log_dir, 'train.log'))
+
+ self.device = device
+ self.batch_size = self.cfg.dataset.batch_size
+ self.K = self.cfg.dataset.K
+
+ # deca model
+ self.nfc = nfc_model.to(self.device)
+
+ self.validator = Validator(self)
+ self.configure_optimizers()
+ self.load_checkpoint()
+
+ # reset optimizer if loaded from pretrained model
+ if self.cfg.train.reset_optimizer:
+ self.configure_optimizers() # reset optimizer
+ logger.info(f"[TRAINER] Optimizer was reset")
+
+ if self.cfg.train.write_summary and self.device == 0:
+ from torch.utils.tensorboard import SummaryWriter
+ self.writer = SummaryWriter(log_dir=os.path.join(self.cfg.output_dir, self.cfg.train.log_dir))
+
+ print_info(device)
+
+ def configure_optimizers(self):
+ self.opt = torch.optim.AdamW(
+ lr=self.cfg.train.lr,
+ weight_decay=self.cfg.train.weight_decay,
+ params=self.nfc.parameters_to_optimize(),
+ amsgrad=False)
+
+ self.scheduler = torch.optim.lr_scheduler.StepLR(self.opt, step_size=1, gamma=0.1)
+
+ def load_checkpoint(self):
+ self.epoch = 0
+ self.global_step = 0
+ dist.barrier()
+ map_location = {'cuda:%d' % 0: 'cuda:%d' % self.device}
+ model_path = os.path.join(self.cfg.output_dir, 'model.tar')
+ if os.path.exists(self.cfg.pretrained_model_path):
+ model_path = self.cfg.pretrained_model_path
+ if os.path.exists(model_path):
+ checkpoint = torch.load(model_path, map_location)
+ if 'opt' in checkpoint:
+ self.opt.load_state_dict(checkpoint['opt'])
+ if 'scheduler' in checkpoint:
+ self.scheduler.load_state_dict(checkpoint['scheduler'])
+ if 'epoch' in checkpoint:
+ self.epoch = checkpoint['epoch']
+ if 'global_step' in checkpoint:
+ self.global_step = checkpoint['global_step']
+ logger.info(f"[TRAINER] Resume training from {model_path}")
+ logger.info(f"[TRAINER] Start from step {self.global_step}")
+ logger.info(f"[TRAINER] Start from epoch {self.epoch}")
+ else:
+ logger.info('[TRAINER] Model path not found, start training from scratch')
+
+ def save_checkpoint(self, filename):
+ if self.device == 0:
+ model_dict = self.nfc.model_dict()
+
+ model_dict['opt'] = self.opt.state_dict()
+ model_dict['scheduler'] = self.scheduler.state_dict()
+ model_dict['validator'] = self.validator.state_dict()
+ model_dict['epoch'] = self.epoch
+ model_dict['global_step'] = self.global_step
+ model_dict['batch_size'] = self.batch_size
+
+ torch.save(model_dict, filename)
+
+ def training_step(self, batch):
+ self.nfc.train()
+
+ images = batch['image'].to(self.device)
+ images = images.view(-1, images.shape[-3], images.shape[-2], images.shape[-1])
+ flame = batch['flame']
+ arcface = batch['arcface']
+ arcface = arcface.view(-1, arcface.shape[-3], arcface.shape[-2], arcface.shape[-1]).to(self.device)
+
+ inputs = {
+ 'images': images,
+ 'dataset': batch['dataset'][0]
+ }
+
+ encoder_output = self.nfc.encode(images, arcface)
+ encoder_output['flame'] = flame
+
+ decoder_output = self.nfc.decode(encoder_output, self.epoch)
+ losses = self.nfc.compute_losses(inputs, encoder_output, decoder_output)
+
+ all_loss = 0.
+ losses_key = losses.keys()
+
+ for key in losses_key:
+ all_loss = all_loss + losses[key]
+
+ losses['all_loss'] = all_loss
+
+ opdict = \
+ {
+ 'images': images,
+ 'flame_verts_shape': decoder_output['flame_verts_shape'],
+ 'pred_canonical_shape_vertices': decoder_output['pred_canonical_shape_vertices'],
+ }
+
+ if 'deca' in decoder_output:
+ opdict['deca'] = decoder_output['deca']
+
+ return losses, opdict
+
+ def validation_step(self):
+ self.validator.run()
+
+ def evaluation_step(self):
+ pass
+
+ def prepare_data(self):
+ generator = torch.Generator()
+ generator.manual_seed(self.device)
+
+ self.train_dataset, total_images = datasets.build_train(self.cfg.dataset, self.device)
+ self.train_dataloader = DataLoader(
+ self.train_dataset, batch_size=self.batch_size,
+ num_workers=self.cfg.dataset.num_workers,
+ shuffle=True,
+ pin_memory=True,
+ drop_last=False,
+ worker_init_fn=seed_worker,
+ generator=generator)
+
+ self.train_iter = iter(self.train_dataloader)
+ logger.info(f'[TRAINER] Training dataset is ready with {len(self.train_dataset)} actors and {total_images} images.')
+
+ def fit(self):
+ self.prepare_data()
+ iters_every_epoch = int(len(self.train_dataset) / self.batch_size)
+ max_epochs = int(self.cfg.train.max_steps / iters_every_epoch)
+ start_epoch = self.epoch
+ for epoch in range(start_epoch, max_epochs):
+ for step in tqdm(range(iters_every_epoch), desc=f"Epoch[{epoch + 1}/{max_epochs}]"):
+ if self.global_step > self.cfg.train.max_steps:
+ break
+ try:
+ batch = next(self.train_iter)
+ except Exception as e:
+ self.train_iter = iter(self.train_dataloader)
+ batch = next(self.train_iter)
+
+ visualizeTraining = self.global_step % self.cfg.train.vis_steps == 0
+
+ self.opt.zero_grad()
+ losses, opdict = self.training_step(batch)
+
+ all_loss = losses['all_loss']
+ all_loss.backward()
+ self.opt.step()
+ self.global_step += 1
+
+ if self.global_step % self.cfg.train.log_steps == 0 and self.device == 0:
+ loss_info = f"\n" \
+ f" Epoch: {epoch}\n" \
+ f" Step: {self.global_step}\n" \
+ f" Iter: {step}/{iters_every_epoch}\n" \
+ f" LR: {self.opt.param_groups[0]['lr']}\n" \
+ f" Time: {datetime.now().strftime('%Y-%m-%d-%H:%M:%S')}\n"
+ for k, v in losses.items():
+ loss_info = loss_info + f' {k}: {v:.4f}\n'
+ if self.cfg.train.write_summary:
+ self.writer.add_scalar('train_loss/' + k, v, global_step=self.global_step)
+ logger.info(loss_info)
+
+ if visualizeTraining and self.device == 0:
+ visdict = {
+ 'input_images': opdict['images'],
+ }
+ # add images to tensorboard
+ for k, v in visdict.items():
+ self.writer.add_images(k, np.clip(v.detach().cpu(), 0.0, 1.0), self.global_step)
+
+ pred_canonical_shape_vertices = torch.empty(0, 3, 512, 512).cuda()
+ flame_verts_shape = torch.empty(0, 3, 512, 512).cuda()
+ deca_images = torch.empty(0, 3, 512, 512).cuda()
+ input_images = torch.empty(0, 3, 224, 224).cuda()
+ L = opdict['pred_canonical_shape_vertices'].shape[0]
+ S = 4 if L > 4 else L
+ for n in np.random.choice(range(L), size=S, replace=False):
+ rendering = self.nfc.render.render_mesh(opdict['pred_canonical_shape_vertices'][n:n + 1, ...])
+ pred_canonical_shape_vertices = torch.cat([pred_canonical_shape_vertices, rendering])
+ rendering = self.nfc.render.render_mesh(opdict['flame_verts_shape'][n:n + 1, ...])
+ flame_verts_shape = torch.cat([flame_verts_shape, rendering])
+ input_images = torch.cat([input_images, opdict['images'][n:n + 1, ...]])
+ if 'deca' in opdict:
+ deca = self.nfc.render.render_mesh(opdict['deca'][n:n + 1, ...])
+ deca_images = torch.cat([deca_images, deca])
+
+ visdict = {}
+
+ if 'deca' in opdict:
+ visdict['deca'] = deca_images
+
+ visdict["pred_canonical_shape_vertices"] = pred_canonical_shape_vertices
+ visdict["flame_verts_shape"] = flame_verts_shape
+ visdict["images"] = input_images
+
+ savepath = os.path.join(self.cfg.output_dir, 'train_images/train_' + str(epoch) + '.jpg')
+ util.visualize_grid(visdict, savepath, size=512)
+
+ if self.global_step % self.cfg.train.val_steps == 0:
+ self.validation_step()
+
+ if self.global_step % self.cfg.train.lr_update_step == 0:
+ self.scheduler.step()
+
+ if self.global_step % self.cfg.train.eval_steps == 0:
+ self.evaluation_step()
+
+ if self.global_step % self.cfg.train.checkpoint_steps == 0:
+ self.save_checkpoint(os.path.join(self.cfg.output_dir, 'model' + '.tar'))
+
+ if self.global_step % self.cfg.train.checkpoint_epochs_steps == 0:
+ self.save_checkpoint(os.path.join(self.cfg.output_dir, 'model_' + str(self.global_step) + '.tar'))
+
+ self.epoch += 1
+
+ self.save_checkpoint(os.path.join(self.cfg.output_dir, 'model' + '.tar'))
+ logger.info(f'[TRAINER] Fitting has ended!')
diff --git a/src/pixel3dmm/preprocessing/MICA/micalib/validator.py b/src/pixel3dmm/preprocessing/MICA/micalib/validator.py
new file mode 100644
index 0000000000000000000000000000000000000000..e0856f7dcff36eb6914e0eade2e51cdf92d3f095
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/micalib/validator.py
@@ -0,0 +1,192 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import os
+import subprocess
+from copy import deepcopy
+from datetime import datetime
+
+import numpy as np
+import torch
+from loguru import logger
+from torch.utils.data import DataLoader
+
+import datasets
+from utils import util
+from utils.best_model import BestModel
+
+
+class Validator(object):
+ def __init__(self, trainer):
+ self.trainer = trainer
+ self.device = self.trainer.device
+ self.nfc = self.trainer.nfc
+ self.cfg = deepcopy(self.trainer.cfg)
+ self.device = trainer.device
+
+ # Create a separate instance only for predictions
+ # nfc = util.find_model_using_name(model_dir='nfclib.models', model_name=self.cfg.model.name)(self.cfg, self.device)
+ # self.tester = Tester(nfc, self.cfg, self.device)
+ # self.tester.render_mesh = False
+
+ self.embeddings_lyhm = {}
+ self.best_model = BestModel(trainer)
+ self.prepare_data()
+
+ def prepare_data(self):
+ self.val_dataset, total_images = datasets.build_val(self.cfg.dataset, self.device)
+ self.val_dataloader = DataLoader(
+ self.val_dataset,
+ batch_size=2,
+ shuffle=False,
+ num_workers=4,
+ pin_memory=True,
+ drop_last=False)
+
+ self.val_iter = iter(self.val_dataloader)
+ logger.info(f'[VALIDATOR] Validation dataset is ready with {len(self.val_dataset)} actors and {total_images} images.')
+
+ def state_dict(self):
+ return {
+ 'embeddings_lyhm': self.embeddings_lyhm,
+ 'best_model': self.best_model.state_dict(),
+ }
+
+ def load_state_dict(self, dict):
+ self.embeddings_lyhm = dict['embeddings_lyhm']
+ self.best_model.load_state_dict(dict['best_model'])
+
+ def update_embeddings(self, actors, arcface):
+ B = len(actors)
+ for i in range(B):
+ actor = actors[i]
+ if actor not in self.embeddings_lyhm:
+ self.embeddings_lyhm[actor] = []
+ self.embeddings_lyhm[actor].append(arcface[i].data.cpu().numpy())
+
+ def run(self):
+ with torch.no_grad():
+ # In the case of using multiple GPUs
+ if self.trainer.device != 0:
+ return
+
+ self.nfc.eval()
+ optdicts = []
+ while True:
+ try:
+ batch = next(self.val_iter)
+ except Exception as e:
+ print(e)
+ self.val_iter = iter(self.val_dataloader)
+ break
+
+ actors = batch['imagename']
+ dataset = batch['dataset']
+ images = batch['image'].cuda()
+ images = images.view(-1, images.shape[-3], images.shape[-2], images.shape[-1])
+ arcface = batch['arcface'].cuda()
+ arcface = arcface.view(-1, arcface.shape[-3], arcface.shape[-2], arcface.shape[-1]).to(self.device)
+ flame = batch['flame']
+
+ codedict = self.nfc.encode(images, arcface)
+ codedict['flame'] = flame
+ opdict = self.nfc.decode(codedict, self.trainer.epoch)
+ self.update_embeddings(actors, opdict['faceid'])
+ loss = self.nfc.compute_losses(None, None, opdict)['pred_verts_shape_canonical_diff']
+ optdicts.append((opdict, images, dataset, actors, loss))
+
+ # Calculate averages
+ weighted_average = 0.
+ average = 0.
+ avg_per_dataset = {}
+ for optdict in optdicts:
+ opdict, images, dataset, actors, loss = optdict
+ name = dataset[0]
+ average += loss
+ if name not in avg_per_dataset:
+ avg_per_dataset[name] = (loss, 1.)
+ else:
+ l, i = avg_per_dataset[name]
+ avg_per_dataset[name] = (l + loss, i + 1.)
+
+ average = average.item() / len(optdicts)
+
+ loss_info = f"Step: {self.trainer.global_step}, Time: {datetime.now().strftime('%Y-%m-%d-%H:%M:%S')} \n"
+ loss_info += f' validation loss (average) : {average:.5f} \n'
+ logger.info(loss_info)
+
+ self.trainer.writer.add_scalar('val/average', average, global_step=self.trainer.global_step)
+ for key in avg_per_dataset.keys():
+ l, i = avg_per_dataset[key]
+ avg = l.item() / i
+ self.trainer.writer.add_scalar(f'val/average_{key}', avg, global_step=self.trainer.global_step)
+
+ # Save best model
+ smoothed_weighted, smoothed = self.best_model(weighted_average, average)
+ self.trainer.writer.add_scalar(f'val/smoothed_average', smoothed, global_step=self.trainer.global_step)
+
+ # self.now()
+
+ # Print embeddings every nth validation step
+ if self.trainer.global_step % (self.cfg.train.val_steps * 5) == 0:
+ lyhm_keys = list(self.embeddings_lyhm.keys())
+ embeddings = {**{key: self.embeddings_lyhm[key] for key in lyhm_keys}}
+ # util.save_embedding_projection(embeddings, os.path.join(self.cfg.output_dir, self.cfg.train.val_vis_dir, f'{self.trainer.global_step:08}_embeddings.jpg'))
+ self.embeddings_lyhm = {}
+
+ # Render predicted meshes
+ if self.trainer.global_step % self.cfg.train.val_save_img != 0:
+ return
+
+ pred_canonical_shape_vertices = torch.empty(0, 3, 512, 512).cuda()
+ flame_verts_shape = torch.empty(0, 3, 512, 512).cuda()
+ input_images = torch.empty(0, 3, 224, 224).cuda()
+
+ for i in np.random.choice(range(0, len(optdicts)), size=4, replace=False):
+ opdict, images, _, _, _ = optdicts[i]
+ n = np.random.randint(0, len(images) - 1)
+ rendering = self.nfc.render.render_mesh(opdict['pred_canonical_shape_vertices'][n:n + 1, ...])
+ pred_canonical_shape_vertices = torch.cat([pred_canonical_shape_vertices, rendering])
+ rendering = self.nfc.render.render_mesh(opdict['flame_verts_shape'][n:n + 1, ...])
+ flame_verts_shape = torch.cat([flame_verts_shape, rendering])
+ input_images = torch.cat([input_images, images[n:n + 1, ...]])
+
+ visdict = {
+ "pred_canonical_shape_vertices": pred_canonical_shape_vertices,
+ "flame_verts_shape": flame_verts_shape,
+ "input": input_images
+ }
+
+ savepath = os.path.join(self.cfg.output_dir, self.cfg.train.val_vis_dir, f'{self.trainer.global_step:08}.jpg')
+ util.visualize_grid(visdict, savepath, size=512)
+
+ def now(self):
+ logger.info(f'[Validator] NoW testing has begun...')
+ # self.tester.test_now('', 'training', self.nfc.model_dict())
+ root = os.path.abspath(os.path.join(os.path.dirname(__file__), '../'))
+ path = f'{root}{self.cfg.output_dir[1:]}/now_test_training/predicted_meshes'
+ cmd = f'./now_validation.sh {path}'
+ subprocess.call(cmd, shell=True)
+ errors = np.load(f'{path}/results/_computed_distances.npy', allow_pickle=True, encoding="latin1").item()['computed_distances']
+ median = np.median(np.hstack(errors))
+ mean = np.mean(np.hstack(errors))
+ std = np.std(np.hstack(errors))
+
+ self.best_model.now(median, mean, std)
+
+ self.trainer.writer.add_scalar(f'val/now_mean', mean, global_step=self.trainer.global_step)
+ logger.info(f'[Validator] NoW testing has ended...')
diff --git a/src/pixel3dmm/preprocessing/MICA/models/__init__.py b/src/pixel3dmm/preprocessing/MICA/models/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/pixel3dmm/preprocessing/MICA/models/arcface.py b/src/pixel3dmm/preprocessing/MICA/models/arcface.py
new file mode 100644
index 0000000000000000000000000000000000000000..6e0bced12f02e6156f9a6629e7ca5495a95c22dd
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/models/arcface.py
@@ -0,0 +1,199 @@
+### Taken from https://github.com/deepinsight/insightface/blob/master/recognition/arcface_torch/backbones/iresnet.py
+
+import os
+
+import torch
+from loguru import logger
+from torch import nn
+
+__all__ = ['iresnet18', 'iresnet34', 'iresnet50', 'iresnet100', 'iresnet200']
+
+
+def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1):
+ """3x3 convolution with padding"""
+ return nn.Conv2d(in_planes,
+ out_planes,
+ kernel_size=3,
+ stride=stride,
+ padding=dilation,
+ groups=groups,
+ bias=False,
+ dilation=dilation)
+
+
+def conv1x1(in_planes, out_planes, stride=1):
+ """1x1 convolution"""
+ return nn.Conv2d(in_planes,
+ out_planes,
+ kernel_size=1,
+ stride=stride,
+ bias=False)
+
+
+class IBasicBlock(nn.Module):
+ expansion = 1
+
+ def __init__(self, inplanes, planes, stride=1, downsample=None,
+ groups=1, base_width=64, dilation=1):
+ super(IBasicBlock, self).__init__()
+ if groups != 1 or base_width != 64:
+ raise ValueError('BasicBlock only supports groups=1 and base_width=64')
+ if dilation > 1:
+ raise NotImplementedError("Dilation > 1 not supported in BasicBlock")
+ self.bn1 = nn.BatchNorm2d(inplanes, eps=1e-05, )
+ self.conv1 = conv3x3(inplanes, planes)
+ self.bn2 = nn.BatchNorm2d(planes, eps=1e-05, )
+ self.prelu = nn.PReLU(planes)
+ self.conv2 = conv3x3(planes, planes, stride)
+ self.bn3 = nn.BatchNorm2d(planes, eps=1e-05, )
+ self.downsample = downsample
+ self.stride = stride
+
+ def forward(self, x):
+ identity = x
+ out = self.bn1(x)
+ out = self.conv1(out)
+ out = self.bn2(out)
+ out = self.prelu(out)
+ out = self.conv2(out)
+ out = self.bn3(out)
+ if self.downsample is not None:
+ identity = self.downsample(x)
+ out += identity
+ return out
+
+
+class IResNet(nn.Module):
+ fc_scale = 7 * 7
+
+ def __init__(self,
+ block, layers, dropout=0, num_features=512, zero_init_residual=False,
+ groups=1, width_per_group=64, replace_stride_with_dilation=None, fp16=False):
+ super(IResNet, self).__init__()
+ self.fp16 = fp16
+ self.inplanes = 64
+ self.dilation = 1
+ self.block = block
+ if replace_stride_with_dilation is None:
+ replace_stride_with_dilation = [False, False, False]
+ if len(replace_stride_with_dilation) != 3:
+ raise ValueError("replace_stride_with_dilation should be None "
+ "or a 3-element tuple, got {}".format(replace_stride_with_dilation))
+ self.groups = groups
+ self.base_width = width_per_group
+ self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=3, stride=1, padding=1, bias=False)
+ self.bn1 = nn.BatchNorm2d(self.inplanes, eps=1e-05)
+ self.prelu = nn.PReLU(self.inplanes)
+ self.layer1 = self._make_layer(block, 64, layers[0], stride=2)
+ self.layer2 = self._make_layer(block,
+ 128,
+ layers[1],
+ stride=2,
+ dilate=replace_stride_with_dilation[0])
+ self.layer3 = self._make_layer(block,
+ 256,
+ layers[2],
+ stride=2,
+ dilate=replace_stride_with_dilation[1])
+ self.layer4 = self._make_layer(block,
+ 512,
+ layers[3],
+ stride=2,
+ dilate=replace_stride_with_dilation[2])
+ self.bn2 = nn.BatchNorm2d(512 * block.expansion, eps=1e-05, )
+ self.dropout = nn.Dropout(p=dropout, inplace=True)
+ self.fc = nn.Linear(512 * block.expansion * self.fc_scale, num_features)
+ self.features = nn.BatchNorm1d(num_features, eps=1e-05)
+ nn.init.constant_(self.features.weight, 1.0)
+ self.features.weight.requires_grad = False
+
+ for m in self.modules():
+ if isinstance(m, nn.Conv2d):
+ nn.init.normal_(m.weight, 0, 0.1)
+ elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
+ nn.init.constant_(m.weight, 1)
+ nn.init.constant_(m.bias, 0)
+
+ if zero_init_residual:
+ for m in self.modules():
+ if isinstance(m, IBasicBlock):
+ nn.init.constant_(m.bn2.weight, 0)
+
+ def _make_layer(self, block, planes, blocks, stride=1, dilate=False):
+ downsample = None
+ previous_dilation = self.dilation
+ if dilate:
+ self.dilation *= stride
+ stride = 1
+ if stride != 1 or self.inplanes != planes * block.expansion:
+ downsample = nn.Sequential(
+ conv1x1(self.inplanes, planes * block.expansion, stride),
+ nn.BatchNorm2d(planes * block.expansion, eps=1e-05, ),
+ )
+ layers = []
+ layers.append(
+ block(self.inplanes, planes, stride, downsample, self.groups,
+ self.base_width, previous_dilation))
+ self.inplanes = planes * block.expansion
+ for _ in range(1, blocks):
+ layers.append(
+ block(self.inplanes,
+ planes,
+ groups=self.groups,
+ base_width=self.base_width,
+ dilation=self.dilation))
+
+ return nn.Sequential(*layers)
+
+ def forward(self, x):
+ with torch.cuda.amp.autocast(self.fp16):
+ x = self.conv1(x)
+ x = self.bn1(x)
+ x = self.prelu(x)
+ x = self.layer1(x)
+ x = self.layer2(x)
+ x = self.layer3(x)
+ x = self.layer4(x)
+ x = self.bn2(x)
+ x = torch.flatten(x, 1)
+ x = self.dropout(x)
+ x = self.fc(x.float() if self.fp16 else x)
+ x = self.features(x)
+ return x
+
+
+class Arcface(IResNet):
+ def __init__(self, pretrained_path=None, **kwargs):
+ super(Arcface, self).__init__(IBasicBlock, [3, 13, 30, 3], **kwargs)
+ if pretrained_path is not None and os.path.exists(pretrained_path):
+ logger.info(f'[Arcface] Initializing from insightface model from {pretrained_path}.')
+ self.load_state_dict(torch.load(pretrained_path))
+ self.freezer([self.layer1, self.layer2, self.layer3, self.conv1, self.bn1, self.prelu])
+
+ def freezer(self, layers):
+ for layer in layers:
+ for block in layer.parameters():
+ block.requires_grad = False
+
+ def forward(self, images):
+ x = self.forward_arcface(images)
+ return x
+
+ def forward_arcface(self, x):
+ with torch.cuda.amp.autocast(self.fp16):
+ ### FROZEN ###
+ with torch.no_grad():
+ x = self.conv1(x)
+ x = self.bn1(x)
+ x = self.prelu(x)
+ x = self.layer1(x)
+ x = self.layer2(x)
+ x = self.layer3(x)
+
+ x = self.layer4(x)
+ x = self.bn2(x)
+ x = torch.flatten(x, 1)
+ x = self.dropout(x)
+ x = self.fc(x.float() if self.fp16 else x)
+ x = self.features(x)
+ return x
diff --git a/src/pixel3dmm/preprocessing/MICA/models/flame.py b/src/pixel3dmm/preprocessing/MICA/models/flame.py
new file mode 100644
index 0000000000000000000000000000000000000000..bae26116b5bf4ed0efda5d04e8707bda8ec36af3
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/models/flame.py
@@ -0,0 +1,298 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de
+
+
+import pickle
+
+import loguru
+import numpy as np
+import torch
+import torch.nn as nn
+
+from .lbs import lbs, batch_rodrigues, vertices2landmarks, rot_mat_to_euler
+
+
+def to_tensor(array, dtype=torch.float32):
+ if 'torch.tensor' not in str(type(array)):
+ return torch.tensor(array, dtype=dtype)
+
+
+def to_np(array, dtype=np.float32):
+ if 'scipy.sparse' in str(type(array)):
+ array = array.todense()
+ return np.array(array, dtype=dtype)
+
+
+class Struct(object):
+ def __init__(self, **kwargs):
+ for key, val in kwargs.items():
+ setattr(self, key, val)
+
+
+class FLAME(nn.Module):
+ """
+ borrowed from https://github.com/soubhiksanyal/FLAME_PyTorch/blob/master/FLAME.py
+ Given flame parameters this class generates a differentiable FLAME function
+ which outputs the a mesh and 2D/3D facial landmarks
+ """
+
+ def __init__(self, config, optimize_basis=False):
+ super(FLAME, self).__init__()
+ loguru.logger.info("[FLAME] creating the FLAME Decoder")
+ with open(config.flame_model_path, 'rb') as f:
+ ss = pickle.load(f, encoding='latin1')
+ flame_model = Struct(**ss)
+
+ self.optimize_basis = optimize_basis
+ self.cfg = config
+ self.dtype = torch.float32
+ self.register_buffer('faces_tensor', to_tensor(to_np(flame_model.f, dtype=np.int64), dtype=torch.long))
+ # The vertices of the template model
+ self.register_buffer('v_template', to_tensor(to_np(flame_model.v_template), dtype=self.dtype))
+ self.n_vertices = self.v_template.shape[0]
+ # The shape components and expression
+ shapedirs = to_tensor(to_np(flame_model.shapedirs), dtype=self.dtype)
+ shapedirs = torch.cat([shapedirs[:, :, :config.n_shape], shapedirs[:, :, 300:]], 2)
+
+ if optimize_basis:
+ self.register_parameter('shapedirs', torch.nn.Parameter(shapedirs))
+ else:
+ self.register_buffer('shapedirs', shapedirs)
+
+ self.n_shape = config.n_shape
+ # The pose components
+ num_pose_basis = flame_model.posedirs.shape[-1]
+ posedirs = np.reshape(flame_model.posedirs, [-1, num_pose_basis]).T
+ self.register_buffer('posedirs', to_tensor(to_np(posedirs), dtype=self.dtype))
+ #
+ self.register_buffer('J_regressor', to_tensor(to_np(flame_model.J_regressor), dtype=self.dtype))
+ parents = to_tensor(to_np(flame_model.kintree_table[0])).long();
+ parents[0] = -1
+ self.register_buffer('parents', parents)
+ self.register_buffer('lbs_weights', to_tensor(to_np(flame_model.weights), dtype=self.dtype))
+
+ # Fixing Eyeball and neck rotation
+ default_eyball_pose = torch.zeros([1, 6], dtype=self.dtype, requires_grad=False)
+ self.register_parameter('eye_pose', nn.Parameter(default_eyball_pose, requires_grad=False))
+ default_neck_pose = torch.zeros([1, 3], dtype=self.dtype, requires_grad=False)
+ self.register_parameter('neck_pose', nn.Parameter(default_neck_pose, requires_grad=False))
+
+ # Static and Dynamic Landmark embeddings for FLAME
+ lmk_embeddings = np.load(config.flame_lmk_embedding_path, allow_pickle=True, encoding='latin1')
+ lmk_embeddings = lmk_embeddings[()]
+ self.register_buffer('lmk_faces_idx', torch.from_numpy(lmk_embeddings['static_lmk_faces_idx']).long())
+ self.register_buffer('lmk_bary_coords', torch.from_numpy(lmk_embeddings['static_lmk_bary_coords']).to(self.dtype))
+ self.register_buffer('dynamic_lmk_faces_idx', lmk_embeddings['dynamic_lmk_faces_idx'].long())
+ self.register_buffer('dynamic_lmk_bary_coords', lmk_embeddings['dynamic_lmk_bary_coords'].to(self.dtype))
+ self.register_buffer('full_lmk_faces_idx', torch.from_numpy(lmk_embeddings['full_lmk_faces_idx']).long())
+ self.register_buffer('full_lmk_bary_coords', torch.from_numpy(lmk_embeddings['full_lmk_bary_coords']).to(self.dtype))
+
+ neck_kin_chain = [];
+ NECK_IDX = 1
+ curr_idx = torch.tensor(NECK_IDX, dtype=torch.long)
+ while curr_idx != -1:
+ neck_kin_chain.append(curr_idx)
+ curr_idx = self.parents[curr_idx]
+ self.register_buffer('neck_kin_chain', torch.stack(neck_kin_chain))
+
+ def _find_dynamic_lmk_idx_and_bcoords(self, pose, dynamic_lmk_faces_idx,
+ dynamic_lmk_b_coords,
+ neck_kin_chain, dtype=torch.float32):
+ """
+ Selects the face contour depending on the reletive position of the head
+ Input:
+ vertices: N X num_of_vertices X 3
+ pose: N X full pose
+ dynamic_lmk_faces_idx: The list of contour face indexes
+ dynamic_lmk_b_coords: The list of contour barycentric weights
+ neck_kin_chain: The tree to consider for the relative rotation
+ dtype: Data type
+ return:
+ The contour face indexes and the corresponding barycentric weights
+ """
+
+ batch_size = pose.shape[0]
+
+ aa_pose = torch.index_select(pose.view(batch_size, -1, 3), 1,
+ neck_kin_chain)
+ rot_mats = batch_rodrigues(
+ aa_pose.view(-1, 3), dtype=dtype).view(batch_size, -1, 3, 3)
+
+ rel_rot_mat = torch.eye(3, device=pose.device,
+ dtype=dtype).unsqueeze_(dim=0).expand(batch_size, -1, -1)
+ for idx in range(len(neck_kin_chain)):
+ rel_rot_mat = torch.bmm(rot_mats[:, idx], rel_rot_mat)
+
+ y_rot_angle = torch.round(
+ torch.clamp(rot_mat_to_euler(rel_rot_mat) * 180.0 / np.pi,
+ max=39)).to(dtype=torch.long)
+
+ neg_mask = y_rot_angle.lt(0).to(dtype=torch.long)
+ mask = y_rot_angle.lt(-39).to(dtype=torch.long)
+ neg_vals = mask * 78 + (1 - mask) * (39 - y_rot_angle)
+ y_rot_angle = (neg_mask * neg_vals +
+ (1 - neg_mask) * y_rot_angle)
+
+ dyn_lmk_faces_idx = torch.index_select(dynamic_lmk_faces_idx,
+ 0, y_rot_angle)
+ dyn_lmk_b_coords = torch.index_select(dynamic_lmk_b_coords,
+ 0, y_rot_angle)
+ return dyn_lmk_faces_idx, dyn_lmk_b_coords
+
+ def _vertices2landmarks(self, vertices, faces, lmk_faces_idx, lmk_bary_coords):
+ """
+ Calculates landmarks by barycentric interpolation
+ Input:
+ vertices: torch.tensor NxVx3, dtype = torch.float32
+ The tensor of input vertices
+ faces: torch.tensor (N*F)x3, dtype = torch.long
+ The faces of the mesh
+ lmk_faces_idx: torch.tensor N X L, dtype = torch.long
+ The tensor with the indices of the faces used to calculate the
+ landmarks.
+ lmk_bary_coords: torch.tensor N X L X 3, dtype = torch.float32
+ The tensor of barycentric coordinates that are used to interpolate
+ the landmarks
+
+ Returns:
+ landmarks: torch.tensor NxLx3, dtype = torch.float32
+ The coordinates of the landmarks for each mesh in the batch
+ """
+ # Extract the indices of the vertices for each face
+ # NxLx3
+ batch_size, num_verts = vertices.shape[:dd2]
+ lmk_faces = torch.index_select(faces, 0, lmk_faces_idx.view(-1)).view(
+ 1, -1, 3).view(batch_size, lmk_faces_idx.shape[1], -1)
+
+ lmk_faces += torch.arange(batch_size, dtype=torch.long).view(-1, 1, 1).to(
+ device=vertices.device) * num_verts
+
+ lmk_vertices = vertices.view(-1, 3)[lmk_faces]
+ landmarks = torch.einsum('blfi,blf->bli', [lmk_vertices, lmk_bary_coords])
+ return landmarks
+
+ # def seletec_3d68(self, vertices):
+ def compute_landmarks(self, vertices):
+ landmarks3d = vertices2landmarks(vertices, self.faces_tensor,
+ self.full_lmk_faces_idx.repeat(vertices.shape[0], 1),
+ self.full_lmk_bary_coords.repeat(vertices.shape[0], 1, 1))
+ return landmarks3d
+
+ def seletec_3d68(self, vertices):
+ landmarks3d = vertices2landmarks(vertices, self.faces_tensor,
+ self.full_lmk_faces_idx.repeat(vertices.shape[0], 1),
+ self.full_lmk_bary_coords.repeat(vertices.shape[0], 1, 1))
+ return landmarks3d
+
+ def project_to_shape_basis(self, shape_vector, shape_as_offset=False):
+ batch_size = shape_vector.shape[0]
+ n_vertices = self.v_template.shape[0]
+ n_eigenvectors = self.n_shape
+ # shape_params = basis dot (shape_vector - average) # uses properties of the PCA
+ if shape_as_offset:
+ diff = shape_vector
+ else:
+ diff = shape_vector - self.v_template
+ return torch.matmul(diff.reshape(batch_size, -1), self.shapedirs[:, :, :n_eigenvectors].reshape(3 * n_vertices, n_eigenvectors))
+
+ def compute_distance_to_basis(self, shape_vector, shape_as_offset=False):
+ batch_size = shape_vector.shape[0]
+ n_vertices = self.v_template.shape[0]
+ n_eigenvectors = self.n_shape
+
+ # shape_vector torch.Size([3, 5023, 3])
+ # self.v_template torch.Size([5023, 3])
+ # self.shapedirs torch.Size([5023, 3, 150])
+ # diff torch.Size([3, 5023, 3])
+ # shape_params torch.Size([5023, 15069])
+
+ # shape_params = basis dot (shape_vector - average) # uses properties of the PCA
+ if shape_as_offset:
+ diff = shape_vector
+ else:
+ diff = shape_vector - self.v_template
+ shape_params = torch.matmul(diff.reshape(batch_size, -1), self.shapedirs[:, :, :n_eigenvectors].reshape(3 * n_vertices, n_eigenvectors))
+ distance = diff - torch.matmul(shape_params, self.shapedirs[:, :, :n_eigenvectors].reshape(n_vertices * 3, n_eigenvectors).t()).reshape(batch_size, n_vertices, 3)
+ return distance
+
+ def get_std(self):
+ n_eigenvectors = self.cfg.n_shape
+ basis = self.shapedirs[:, :, :n_eigenvectors]
+ std = torch.norm(basis.reshape(-1, n_eigenvectors), dim=0)
+
+ return std
+
+ def compute_closest_shape(self, shape_vector):
+ B = shape_vector.shape[0]
+ N = self.v_template.shape[0]
+ n_eigenvectors = self.cfg.n_shape
+
+ basis = self.shapedirs[:, :, :n_eigenvectors]
+ diff = (shape_vector - self.v_template).reshape(B, -1)
+ std = torch.norm(basis.reshape(-1, n_eigenvectors), dim=0)
+ inv = 1.0 / std.square()
+ params = inv * torch.matmul(diff, basis.reshape(3 * N, n_eigenvectors))
+ # params = torch.max(torch.min(params, std*-3.0), std*3.0)
+
+ return self.v_template + torch.matmul(params, basis.reshape(N * 3, n_eigenvectors).T).reshape(B, N, 3), params
+
+ def forward(self, shape_params=None, expression_params=None, pose_params=None, eye_pose_params=None, neck_pose_params=None, shape_basis_delta=None):
+ """
+ Input:
+ shape_params: N X number of shape parameters
+ expression_params: N X number of expression parameters
+ pose_params: N X number of pose parameters (6)
+ return:d
+ vertices: N X V X 3
+ landmarks: N X number of landmarks X 3
+ """
+ batch_size = shape_params.shape[0]
+ if pose_params is None:
+ pose_params = self.eye_pose.expand(batch_size, -1)
+ if eye_pose_params is None:
+ eye_pose_params = self.eye_pose.expand(batch_size, -1)
+ if neck_pose_params is None:
+ neck_pose_params = self.neck_pose.expand(batch_size, -1)
+ if expression_params is None:
+ expression_params = torch.zeros([1, 100], dtype=self.dtype, requires_grad=False, device=self.neck_pose.device).expand(batch_size, -1)
+
+ betas = torch.cat([shape_params, expression_params], dim=1)
+ full_pose = torch.cat([pose_params[:, :3], neck_pose_params, pose_params[:, 3:], eye_pose_params], dim=1)
+ template_vertices = self.v_template.unsqueeze(0).expand(batch_size, -1, -1)
+
+ vertices, _ = lbs(betas, full_pose, template_vertices,
+ self.shapedirs, self.posedirs,
+ self.J_regressor, self.parents,
+ self.lbs_weights, dtype=self.dtype)
+
+ lmk_faces_idx = self.lmk_faces_idx.unsqueeze(dim=0).expand(batch_size, -1)
+ lmk_bary_coords = self.lmk_bary_coords.unsqueeze(dim=0).expand(batch_size, -1, -1)
+
+ dyn_lmk_faces_idx, dyn_lmk_bary_coords = self._find_dynamic_lmk_idx_and_bcoords(
+ full_pose, self.dynamic_lmk_faces_idx,
+ self.dynamic_lmk_bary_coords,
+ self.neck_kin_chain, dtype=self.dtype)
+ lmk_faces_idx = torch.cat([dyn_lmk_faces_idx, lmk_faces_idx], 1)
+ lmk_bary_coords = torch.cat([dyn_lmk_bary_coords, lmk_bary_coords], 1)
+
+ landmarks2d = vertices2landmarks(vertices, self.faces_tensor,
+ lmk_faces_idx,
+ lmk_bary_coords)
+ bz = vertices.shape[0]
+ landmarks3d = vertices2landmarks(vertices, self.faces_tensor,
+ self.full_lmk_faces_idx.repeat(bz, 1),
+ self.full_lmk_bary_coords.repeat(bz, 1, 1))
+ return vertices, landmarks2d, landmarks3d
diff --git a/src/pixel3dmm/preprocessing/MICA/models/generator.py b/src/pixel3dmm/preprocessing/MICA/models/generator.py
new file mode 100644
index 0000000000000000000000000000000000000000..95f1752a9887777a723e8f1e442057cd687af828
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/models/generator.py
@@ -0,0 +1,82 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import torch
+import torch.nn as nn
+import torch.nn.functional as Functional
+
+from models.flame import FLAME
+
+
+def kaiming_leaky_init(m):
+ classname = m.__class__.__name__
+ if classname.find('Linear') != -1:
+ torch.nn.init.kaiming_normal_(m.weight, a=0.2, mode='fan_in', nonlinearity='leaky_relu')
+
+
+class MappingNetwork(nn.Module):
+ def __init__(self, z_dim, map_hidden_dim, map_output_dim, hidden=2):
+ super().__init__()
+
+ if hidden > 5:
+ self.skips = [int(hidden / 2)]
+ else:
+ self.skips = []
+
+ self.network = nn.ModuleList(
+ [nn.Linear(z_dim, map_hidden_dim)] +
+ [nn.Linear(map_hidden_dim, map_hidden_dim) if i not in self.skips else
+ nn.Linear(map_hidden_dim + z_dim, map_hidden_dim) for i in range(hidden)]
+ )
+
+ self.output = nn.Linear(map_hidden_dim, map_output_dim)
+ self.network.apply(kaiming_leaky_init)
+ with torch.no_grad():
+ self.output.weight *= 0.25
+
+ def forward(self, z):
+ h = z
+ for i, l in enumerate(self.network):
+ h = self.network[i](h)
+ h = Functional.leaky_relu(h, negative_slope=0.2)
+ if i in self.skips:
+ h = torch.cat([z, h], 1)
+
+ output = self.output(h)
+ return output
+
+
+class Generator(nn.Module):
+ def __init__(self, z_dim, map_hidden_dim, map_output_dim, hidden, model_cfg, device, regress=True):
+ super().__init__()
+ self.device = device
+ self.cfg = model_cfg
+ self.regress = regress
+
+ if self.regress:
+ self.regressor = MappingNetwork(z_dim, map_hidden_dim, map_output_dim, hidden).to(self.device)
+ self.generator = FLAME(model_cfg).to(self.device)
+
+ def forward(self, arcface):
+ if self.regress:
+ shape = self.regressor(arcface)
+ else:
+ shape = arcface
+
+ prediction, _, _ = self.generator(shape_params=shape)
+
+ return prediction, shape
diff --git a/src/pixel3dmm/preprocessing/MICA/models/lbs.py b/src/pixel3dmm/preprocessing/MICA/models/lbs.py
new file mode 100644
index 0000000000000000000000000000000000000000..5e04834e2254e9d94974b4438d9f47734b313378
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/models/lbs.py
@@ -0,0 +1,379 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de
+
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import numpy as np
+import torch
+import torch.nn.functional as F
+
+
+def rot_mat_to_euler(rot_mats):
+ # Calculates rotation matrix to euler angles
+ # Careful for extreme cases of eular angles like [0.0, pi, 0.0]
+
+ sy = torch.sqrt(rot_mats[:, 0, 0] * rot_mats[:, 0, 0] +
+ rot_mats[:, 1, 0] * rot_mats[:, 1, 0])
+ return torch.atan2(-rot_mats[:, 2, 0], sy)
+
+
+def find_dynamic_lmk_idx_and_bcoords(vertices, pose, dynamic_lmk_faces_idx,
+ dynamic_lmk_b_coords,
+ neck_kin_chain, dtype=torch.float32):
+ ''' Compute the faces, barycentric coordinates for the dynamic landmarks
+
+
+ To do so, we first compute the rotation of the neck around the y-axis
+ and then use a pre-computed look-up table to find the faces and the
+ barycentric coordinates that will be used.
+
+ Special thanks to Soubhik Sanyal (soubhik.sanyal@tuebingen.mpg.de)
+ for providing the original TensorFlow implementation and for the LUT.
+
+ Parameters
+ ----------
+ vertices: torch.tensor BxVx3, dtype = torch.float32
+ The tensor of input vertices
+ pose: torch.tensor Bx(Jx3), dtype = torch.float32
+ The current pose of the body model
+ dynamic_lmk_faces_idx: torch.tensor L, dtype = torch.long
+ The look-up table from neck rotation to faces
+ dynamic_lmk_b_coords: torch.tensor Lx3, dtype = torch.float32
+ The look-up table from neck rotation to barycentric coordinates
+ neck_kin_chain: list
+ A python list that contains the indices of the joints that form the
+ kinematic chain of the neck.
+ dtype: torch.dtype, optional
+
+ Returns
+ -------
+ dyn_lmk_faces_idx: torch.tensor, dtype = torch.long
+ A tensor of size BxL that contains the indices of the faces that
+ will be used to compute the current dynamic landmarks.
+ dyn_lmk_b_coords: torch.tensor, dtype = torch.float32
+ A tensor of size BxL that contains the indices of the faces that
+ will be used to compute the current dynamic landmarks.
+ '''
+
+ batch_size = vertices.shape[0]
+
+ aa_pose = torch.index_select(pose.view(batch_size, -1, 3), 1,
+ neck_kin_chain)
+ rot_mats = batch_rodrigues(
+ aa_pose.view(-1, 3), dtype=dtype).view(batch_size, -1, 3, 3)
+
+ rel_rot_mat = torch.eye(3, device=vertices.device,
+ dtype=dtype).unsqueeze_(dim=0)
+ for idx in range(len(neck_kin_chain)):
+ rel_rot_mat = torch.bmm(rot_mats[:, idx], rel_rot_mat)
+
+ y_rot_angle = torch.round(
+ torch.clamp(-rot_mat_to_euler(rel_rot_mat) * 180.0 / np.pi,
+ max=39)).to(dtype=torch.long)
+ neg_mask = y_rot_angle.lt(0).to(dtype=torch.long)
+ mask = y_rot_angle.lt(-39).to(dtype=torch.long)
+ neg_vals = mask * 78 + (1 - mask) * (39 - y_rot_angle)
+ y_rot_angle = (neg_mask * neg_vals +
+ (1 - neg_mask) * y_rot_angle)
+
+ dyn_lmk_faces_idx = torch.index_select(dynamic_lmk_faces_idx,
+ 0, y_rot_angle)
+ dyn_lmk_b_coords = torch.index_select(dynamic_lmk_b_coords,
+ 0, y_rot_angle)
+
+ return dyn_lmk_faces_idx, dyn_lmk_b_coords
+
+
+def vertices2landmarks(vertices, faces, lmk_faces_idx, lmk_bary_coords):
+ ''' Calculates landmarks by barycentric interpolation
+
+ Parameters
+ ----------
+ vertices: torch.tensor BxVx3, dtype = torch.float32
+ The tensor of input vertices
+ faces: torch.tensor Fx3, dtype = torch.long
+ The faces of the mesh
+ lmk_faces_idx: torch.tensor L, dtype = torch.long
+ The tensor with the indices of the faces used to calculate the
+ landmarks.
+ lmk_bary_coords: torch.tensor Lx3, dtype = torch.float32
+ The tensor of barycentric coordinates that are used to interpolate
+ the landmarks
+
+ Returns
+ -------
+ landmarks: torch.tensor BxLx3, dtype = torch.float32
+ The coordinates of the landmarks for each mesh in the batch
+ '''
+ # Extract the indices of the vertices for each face
+ # BxLx3
+ batch_size, num_verts = vertices.shape[:2]
+ device = vertices.device
+
+ lmk_faces = torch.index_select(faces, 0, lmk_faces_idx.view(-1)).view(
+ batch_size, -1, 3)
+
+ lmk_faces += torch.arange(
+ batch_size, dtype=torch.long, device=device).view(-1, 1, 1) * num_verts
+
+ lmk_vertices = vertices.view(-1, 3)[lmk_faces].view(
+ batch_size, -1, 3, 3)
+
+ landmarks = torch.einsum('blfi,blf->bli', [lmk_vertices, lmk_bary_coords])
+ return landmarks
+
+
+def lbs(betas, pose, v_template, shapedirs, posedirs, J_regressor, parents,
+ lbs_weights, pose2rot=True, dtype=torch.float32):
+ ''' Performs Linear Blend Skinning with the given shape and pose parameters
+
+ Parameters
+ ----------
+ betas : torch.tensor BxNB
+ The tensor of shape parameters
+ pose : torch.tensor Bx(J + 1) * 3
+ The pose parameters in axis-angle format
+ v_template torch.tensor BxVx3
+ The template mesh that will be deformed
+ shapedirs : torch.tensor 1xNB
+ The tensor of PCA shape displacements
+ posedirs : torch.tensor Px(V * 3)
+ The pose PCA coefficients
+ J_regressor : torch.tensor JxV
+ The regressor array that is used to calculate the joints from
+ the position of the vertices
+ parents: torch.tensor J
+ The array that describes the kinematic tree for the model
+ lbs_weights: torch.tensor N x V x (J + 1)
+ The linear blend skinning weights that represent how much the
+ rotation matrix of each part affects each vertex
+ pose2rot: bool, optional
+ Flag on whether to convert the input pose tensor to rotation
+ matrices. The default value is True. If False, then the pose tensor
+ should already contain rotation matrices and have a size of
+ Bx(J + 1)x9
+ dtype: torch.dtype, optional
+
+ Returns
+ -------
+ verts: torch.tensor BxVx3
+ The vertices of the mesh after applying the shape and pose
+ displacements.
+ joints: torch.tensor BxJx3
+ The joints of the model
+ '''
+
+ batch_size = max(betas.shape[0], pose.shape[0])
+ device = betas.device
+
+ # Add shape contribution
+ v_shaped = v_template + blend_shapes(betas, shapedirs)
+
+ # Get the joints
+ # NxJx3 array
+ J = vertices2joints(J_regressor, v_shaped)
+
+ # 3. Add pose blend shapes
+ # N x J x 3 x 3
+ ident = torch.eye(3, dtype=dtype, device=device)
+ if pose2rot:
+ rot_mats = batch_rodrigues(
+ pose.view(-1, 3), dtype=dtype).view([batch_size, -1, 3, 3])
+
+ pose_feature = (rot_mats[:, 1:, :, :] - ident).view([batch_size, -1])
+ # (N x P) x (P, V * 3) -> N x V x 3
+ pose_offsets = torch.matmul(pose_feature, posedirs) \
+ .view(batch_size, -1, 3)
+ else:
+ pose_feature = pose[:, 1:].view(batch_size, -1, 3, 3) - ident
+ rot_mats = pose.view(batch_size, -1, 3, 3)
+
+ pose_offsets = torch.matmul(pose_feature.view(batch_size, -1),
+ posedirs).view(batch_size, -1, 3)
+
+ v_posed = pose_offsets + v_shaped
+ # 4. Get the global joint location
+ J_transformed, A = batch_rigid_transform(rot_mats, J, parents, dtype=dtype)
+
+ # 5. Do skinning:
+ # W is N x V x (J + 1)
+ W = lbs_weights.unsqueeze(dim=0).expand([batch_size, -1, -1])
+ # (N x V x (J + 1)) x (N x (J + 1) x 16)
+ num_joints = J_regressor.shape[0]
+ T = torch.matmul(W, A.view(batch_size, num_joints, 16)) \
+ .view(batch_size, -1, 4, 4)
+
+ homogen_coord = torch.ones([batch_size, v_posed.shape[1], 1],
+ dtype=dtype, device=device)
+ v_posed_homo = torch.cat([v_posed, homogen_coord], dim=2)
+ v_homo = torch.matmul(T, torch.unsqueeze(v_posed_homo, dim=-1))
+
+ verts = v_homo[:, :, :3, 0]
+
+ return verts, J_transformed
+
+
+def vertices2joints(J_regressor, vertices):
+ ''' Calculates the 3D joint locations from the vertices
+
+ Parameters
+ ----------
+ J_regressor : torch.tensor JxV
+ The regressor array that is used to calculate the joints from the
+ position of the vertices
+ vertices : torch.tensor BxVx3
+ The tensor of mesh vertices
+
+ Returns
+ -------
+ torch.tensor BxJx3
+ The location of the joints
+ '''
+
+ return torch.einsum('bik,ji->bjk', [vertices, J_regressor])
+
+
+def blend_shapes(betas, shape_disps):
+ ''' Calculates the per vertex displacement due to the blend shapes
+
+
+ Parameters
+ ----------
+ betas : torch.tensor Bx(num_betas)
+ Blend shape coefficients
+ shape_disps: torch.tensor Vx3x(num_betas)
+ Blend shapes
+
+ Returns
+ -------
+ torch.tensor BxVx3
+ The per-vertex displacement due to shape deformation
+ '''
+
+ # Displacement[b, m, k] = sum_{l} betas[b, l] * shape_disps[m, k, l]
+ # i.e. Multiply each shape displacement by its corresponding beta and
+ # then sum them.
+ blend_shape = torch.einsum('bl,mkl->bmk', [betas, shape_disps])
+ return blend_shape
+
+
+def batch_rodrigues(rot_vecs, epsilon=1e-8, dtype=torch.float32):
+ ''' Calculates the rotation matrices for a batch of rotation vectors
+ Parameters
+ ----------
+ rot_vecs: torch.tensor Nx3
+ array of N axis-angle vectors
+ Returns
+ -------
+ R: torch.tensor Nx3x3
+ The rotation matrices for the given axis-angle parameters
+ '''
+
+ batch_size = rot_vecs.shape[0]
+ device = rot_vecs.device
+
+ angle = torch.norm(rot_vecs + 1e-8, dim=1, keepdim=True)
+ rot_dir = rot_vecs / angle
+
+ cos = torch.unsqueeze(torch.cos(angle), dim=1)
+ sin = torch.unsqueeze(torch.sin(angle), dim=1)
+
+ # Bx1 arrays
+ rx, ry, rz = torch.split(rot_dir, 1, dim=1)
+ K = torch.zeros((batch_size, 3, 3), dtype=dtype, device=device)
+
+ zeros = torch.zeros((batch_size, 1), dtype=dtype, device=device)
+ K = torch.cat([zeros, -rz, ry, rz, zeros, -rx, -ry, rx, zeros], dim=1) \
+ .view((batch_size, 3, 3))
+
+ ident = torch.eye(3, dtype=dtype, device=device).unsqueeze(dim=0)
+ rot_mat = ident + sin * K + (1 - cos) * torch.bmm(K, K)
+ return rot_mat
+
+
+def transform_mat(R, t):
+ ''' Creates a batch of transformation matrices
+ Args:
+ - R: Bx3x3 array of a batch of rotation matrices
+ - t: Bx3x1 array of a batch of translation vectors
+ Returns:
+ - T: Bx4x4 Transformation matrix
+ '''
+ # No padding left or right, only add an extra row
+ return torch.cat([F.pad(R, [0, 0, 0, 1]),
+ F.pad(t, [0, 0, 0, 1], value=1)], dim=2)
+
+
+def batch_rigid_transform(rot_mats, joints, parents, dtype=torch.float32):
+ """
+ Applies a batch of rigid transformations to the joints
+
+ Parameters
+ ----------
+ rot_mats : torch.tensor BxNx3x3
+ Tensor of rotation matrices
+ joints : torch.tensor BxNx3
+ Locations of joints
+ parents : torch.tensor BxN
+ The kinematic tree of each object
+ dtype : torch.dtype, optional:
+ The data type of the created tensors, the default is torch.float32
+
+ Returns
+ -------
+ posed_joints : torch.tensor BxNx3
+ The locations of the joints after applying the pose rotations
+ rel_transforms : torch.tensor BxNx4x4
+ The relative (with respect to the root joint) rigid transformations
+ for all the joints
+ """
+
+ joints = torch.unsqueeze(joints, dim=-1)
+
+ rel_joints = joints.clone()
+ rel_joints[:, 1:] -= joints[:, parents[1:]]
+
+ # transforms_mat = transform_mat(
+ # rot_mats.view(-1, 3, 3),
+ # rel_joints.view(-1, 3, 1)).view(-1, joints.shape[1], 4, 4)
+ transforms_mat = transform_mat(
+ rot_mats.view(-1, 3, 3),
+ rel_joints.reshape(-1, 3, 1)).reshape(-1, joints.shape[1], 4, 4)
+
+ transform_chain = [transforms_mat[:, 0]]
+ for i in range(1, parents.shape[0]):
+ # Subtract the joint location at the rest pose
+ # No need for rotation, since it's identity when at rest
+ curr_res = torch.matmul(transform_chain[parents[i]], transforms_mat[:, i])
+ transform_chain.append(curr_res)
+
+ transforms = torch.stack(transform_chain, dim=1)
+
+ # The last column of the transformations contains the posed joints
+ posed_joints = transforms[:, :, :3, 3]
+
+ # The last column of the transformations contains the posed joints
+ posed_joints = transforms[:, :, :3, 3]
+
+ joints_homogen = F.pad(joints, [0, 0, 0, 1])
+
+ rel_transforms = transforms - F.pad(
+ torch.matmul(transforms, joints_homogen), [3, 0, 0, 0, 0, 0, 0, 0])
+
+ return posed_joints, rel_transforms
diff --git a/src/pixel3dmm/preprocessing/MICA/render_dataset.py b/src/pixel3dmm/preprocessing/MICA/render_dataset.py
new file mode 100644
index 0000000000000000000000000000000000000000..01d73b7d2545c57af514ecac8bca0afa59c8512b
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/render_dataset.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+import os
+import random
+from glob import glob
+from pathlib import Path
+
+import cv2
+import numpy as np
+import torch
+import trimesh
+from tqdm import tqdm
+
+from configs.config import get_cfg_defaults
+from micalib.renderer import MeshShapeRenderer
+from models.flame import FLAME
+
+np.random.seed(125)
+random.seed(125)
+
+
+def main():
+ cfg = get_cfg_defaults()
+ render = MeshShapeRenderer(obj_filename=cfg.model.topology_path)
+ flame = FLAME(cfg.model).to('cuda:0')
+ datasets = sorted(glob('/home/wzielonka/datasets/MICA/*'))
+ for dataset in tqdm(datasets):
+ meshes = sorted(glob(f'{dataset}/FLAME_parameters/*/*.npz'))
+ sample_list = np.array(np.random.choice(range(len(meshes)), size=30 * 5))
+ dst = Path('./output', Path(dataset).name)
+ dst.mkdir(parents=True, exist_ok=True)
+ j = 0
+ k = 0
+ images = np.zeros((512, 512 * 5, 3))
+ for i in sample_list:
+ params = np.load(meshes[i], allow_pickle=True)
+ betas = torch.tensor(params['betas']).float().cuda()
+ shape_params = betas[:300][None]
+ v = flame(shape_params=shape_params)[0]
+ rendering = render.render_mesh(v)
+ image = (rendering[0].cpu().numpy().transpose(1, 2, 0).copy() * 255)[:, :, [2, 1, 0]]
+ image = np.minimum(np.maximum(image, 0), 255).astype(np.uint8)
+ images[0:512, 512 * j:512 * (j + 1), :] = image
+ j += 1
+
+ if j % 5 == 0 and j > 0:
+ dst.mkdir(parents=True, exist_ok=True)
+ cv2.imwrite(f'{dst}/{str(k).zfill(4)}.png', images)
+ images = np.zeros((512, 512 * 5, 3))
+ j = 0
+ k += 1
+
+ os.system(f'ffmpeg -y -framerate 1 -pattern_type glob -i \'{dst}/*.png\' -c:v libx264 -pix_fmt yuv420p {dst}/video.mp4')
+ os.system(f'gifski -o ./output/{Path(dataset).name}.gif {dst}/*.png --quality 100 --fps 1')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/src/pixel3dmm/preprocessing/MICA/test.py b/src/pixel3dmm/preprocessing/MICA/test.py
new file mode 100644
index 0000000000000000000000000000000000000000..b8acff3991de4951610aaca26f07a01c91ef14df
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/test.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import os
+import sys
+
+import torch
+import torch.backends.cudnn as cudnn
+import torch.multiprocessing as mp
+
+from jobs import test
+
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '.')))
+
+if __name__ == '__main__':
+ from configs.config import parse_args
+
+ cfg, args = parse_args()
+
+ if cfg.cfg_file is not None:
+ exp_name = cfg.cfg_file.split('/')[-1].split('.')[0]
+ cfg.output_dir = os.path.join('./output', exp_name)
+
+ cudnn.benchmark = False
+ cudnn.deterministic = True
+ torch.cuda.empty_cache()
+ num_gpus = torch.cuda.device_count()
+
+ mp.spawn(test, args=(num_gpus, cfg, args), nprocs=num_gpus, join=True)
+
+ exit(0)
diff --git a/src/pixel3dmm/preprocessing/MICA/testing/now/now.py b/src/pixel3dmm/preprocessing/MICA/testing/now/now.py
new file mode 100644
index 0000000000000000000000000000000000000000..920f41ef036857f8ef2a6d08dc4c0abc98c8e9c7
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/testing/now/now.py
@@ -0,0 +1,68 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import os
+import time
+from glob import glob
+from shutil import copyfile
+
+logs = '/home/wzielonka/projects/MICA/testing/now/logs/'
+jobs = '/home/wzielonka/projects/MICA/testing/now/jobs/'
+root = '/home/wzielonka/projects/MICA/output/'
+
+experiments = []
+
+
+def test():
+ global experiments
+ if len(experiments) == 0:
+ experiments = list(filter(lambda f: 'condor' not in f, os.listdir('../../output/')))
+
+ os.system('rm -rf logs')
+ os.system('rm -rf jobs')
+
+ os.makedirs('logs', exist_ok=True)
+ os.makedirs('jobs', exist_ok=True)
+
+ for experiment in sorted(experiments):
+ print(f'Testing {experiment}')
+ copyfile(f'{root}{experiment}/model.tar', f'{root}{experiment}/best_models/best_model_last.tar')
+ for idx, checkpoint in enumerate(glob(root + experiment + f'/best_models/*.tar')):
+ model_name = checkpoint.split('/')[-1].split('.')[0]
+ model_name = model_name.replace('best_model_', 'now_test_')
+ predicted_meshes = f'{root}{experiment}/{model_name}/predicted_meshes/'
+ run = f'{experiment}_{str(idx).zfill(5)}'
+ with open(f'{jobs}/{run}.sub', 'w') as fid:
+ fid.write('executable = /bin/bash\n')
+ arguments = f'/home/wzielonka/projects/MICA/testing/now/template.sh {experiment} {checkpoint} now {predicted_meshes}'
+ fid.write(f'arguments = {arguments}\n')
+ fid.write(f'error = {logs}{run}.err\n')
+ fid.write(f'output = {logs}{run}.out\n')
+ fid.write(f'log = {logs}{run}.log\n')
+ fid.write(f'request_cpus = 4\n')
+ fid.write(f'request_gpus = 1\n')
+ fid.write(f'requirements = (TARGET.CUDAGlobalMemoryMb > 5000) && (TARGET.CUDAGlobalMemoryMb < 33000)\n')
+ fid.write(f'request_memory = 8192\n')
+ fid.write(f'queue\n')
+
+ os.system(f'condor_submit_bid 512 {jobs}/{run}.sub')
+
+ time.sleep(2)
+
+
+if __name__ == '__main__':
+ test()
diff --git a/src/pixel3dmm/preprocessing/MICA/testing/now/template.sh b/src/pixel3dmm/preprocessing/MICA/testing/now/template.sh
new file mode 100644
index 0000000000000000000000000000000000000000..3194777d2678e02d557d1691e73e798ec23a2afa
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/testing/now/template.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+
+PYTHON_ENV=/home/wzielonka/miniconda3/etc/profile.d/conda.sh
+SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
+export PATH=/usr/local/bin:/usr/bin:/bin:/usr/sbin:$PATH
+export LD_LIBRARY_PATH=/is/software/nvidia/nccl-2.4.8-cuda10.1/lib/
+
+source ${PYTHON_ENV}
+module load cuda/10.1
+module load gcc/4.9
+
+EXPERIMENT=''
+CHECKPOINT=''
+BENCHMARK=''
+PREDICTED=''
+
+echo 'Testing has started...'
+
+if [ -n "$1" ]; then EXPERIMENT=${1}; fi
+if [ -n "$2" ]; then CHECKPOINT=${2}; fi
+if [ -n "$3" ]; then BENCHMARK=${3}; fi
+if [ -n "$4" ]; then PREDICTED=${4}; fi
+
+ROOT=/home/wzielonka/projects/MICA/output/
+NOW=/home/wzielonka/datasets/NoWDataset/final_release_version/
+
+conda activate NFC
+
+cd /home/wzielonka/projects/MICA
+python test.py --cfg /home/wzielonka/projects/MICA/configs/${EXPERIMENT}.yml --test_dataset ${BENCHMARK} --checkpoint ${CHECKPOINT}
+
+source /home/wzielonka/.virtualenvs/NoW/bin/activate
+cd /home/wzielonka/projects/NoW
+python compute_error.py ${NOW} ${PREDICTED} true
+
+# Plot diagram
+# source /home/wzielonka/.virtualenvs/NoW/bin/activate
+# python cumulative_errors.py
\ No newline at end of file
diff --git a/src/pixel3dmm/preprocessing/MICA/testing/stirling/stirling.py b/src/pixel3dmm/preprocessing/MICA/testing/stirling/stirling.py
new file mode 100644
index 0000000000000000000000000000000000000000..86f60fb3f41689405c10358eb001b9b068a5efe5
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/testing/stirling/stirling.py
@@ -0,0 +1,69 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import os
+import time
+from glob import glob
+from shutil import copyfile
+
+logs = '/home/wzielonka/projects/MICA/testing/stirling/logs/'
+jobs = '/home/wzielonka/projects/MICA/testing/stirling/jobs/'
+root = '/home/wzielonka/projects/MICA/output/'
+
+experiments = []
+
+
+def test():
+ global experiments
+ if len(experiments) == 0:
+ experiments = list(filter(lambda f: 'condor' not in f and 'resnet' in f, os.listdir('../../output/')))
+ # experiments = list(filter(lambda f: 'experiment_' in f, os.listdir('../../output/')))
+
+ os.system('rm -rf logs')
+ os.system('rm -rf jobs')
+
+ os.makedirs('logs', exist_ok=True)
+ os.makedirs('jobs', exist_ok=True)
+
+ for experiment in sorted(experiments):
+ print(f'Testing {experiment}')
+ copyfile(f'{root}{experiment}/model.tar', f'{root}{experiment}/best_models/best_model_last.tar')
+ for idx, checkpoint in enumerate(glob(root + experiment + f'/best_models/*.tar')):
+ model_name = checkpoint.split('/')[-1].split('.')[0]
+ model_name = model_name.replace('best_model_', 'stirling_test_')
+ predicted_meshes = f'{root}{experiment}/{model_name}/predicted_meshes/'
+ run = f'{experiment}_{str(idx).zfill(5)}'
+ with open(f'{jobs}/{run}.sub', 'w') as fid:
+ fid.write('executable = /bin/bash\n')
+ arguments = f'/home/wzielonka/projects/MICA/testing/stirling/template.sh {experiment} {checkpoint} stirling {predicted_meshes}'
+ fid.write(f'arguments = {arguments}\n')
+ fid.write(f'error = {logs}{run}.err\n')
+ fid.write(f'output = {logs}{run}.out\n')
+ fid.write(f'log = {logs}{run}.log\n')
+ fid.write(f'request_cpus = 8\n')
+ fid.write(f'request_gpus = 1\n')
+ fid.write(f'requirements = (TARGET.CUDAGlobalMemoryMb > 5000) && (TARGET.CUDAGlobalMemoryMb < 12000)\n')
+ fid.write(f'request_memory = 8192\n')
+ fid.write(f'queue\n')
+
+ os.system(f'condor_submit_bid 512 {jobs}/{run}.sub')
+
+ time.sleep(2)
+
+
+if __name__ == '__main__':
+ test()
diff --git a/src/pixel3dmm/preprocessing/MICA/testing/stirling/template.sh b/src/pixel3dmm/preprocessing/MICA/testing/stirling/template.sh
new file mode 100644
index 0000000000000000000000000000000000000000..bcb16bed66a23dab495a82554aa7573bc3f722a2
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/testing/stirling/template.sh
@@ -0,0 +1,45 @@
+#!/bin/bash
+
+PYTHON_ENV=/home/wzielonka/miniconda3/etc/profile.d/conda.sh
+SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
+export PATH=/usr/local/bin:/usr/bin:/bin:/usr/sbin:$PATH
+export LD_LIBRARY_PATH=/is/software/nvidia/nccl-2.4.8-cuda10.1/lib/
+
+source ${PYTHON_ENV}
+module load cuda/10.1
+module load gcc/4.9
+
+EXPERIMENT=''
+CHECKPOINT=''
+BENCHMARK=''
+PREDICTED=''
+
+echo 'Testing has started...'
+
+if [ -n "$1" ]; then EXPERIMENT=${1}; fi
+if [ -n "$2" ]; then CHECKPOINT=${2}; fi
+if [ -n "$3" ]; then BENCHMARK=${3}; fi
+if [ -n "$4" ]; then PREDICTED=${4}; fi
+
+ROOT=/home/wzielonka/projects/MICA/output/
+NOW=/home/wzielonka/datasets/NoWDataset/final_release_version/
+
+conda activate NFC
+
+cd /home/wzielonka/projects/MICA
+python test.py --cfg /home/wzielonka/projects/MICA/configs/${EXPERIMENT}.yml --test_dataset ${BENCHMARK} --checkpoint ${CHECKPOINT}
+
+source /home/wzielonka/.virtualenvs/NoW/bin/activate
+cd /home/wzielonka/projects/NoW
+
+# Arguments for NoW
+# predicted_mesh_folder = sys.argv[1]
+# fixed = sys.argv[2]
+# now = sys.argv[3]
+
+# python feng_error.py ${PREDICTED}/HQ true true
+# python feng_error.py ${PREDICTED}/LQ true true
+
+# Plot diagram
+# source /home/wzielonka/.virtualenvs/NoW/bin/activate
+# python cumulative_errors.py --type hq
diff --git a/src/pixel3dmm/preprocessing/MICA/train.py b/src/pixel3dmm/preprocessing/MICA/train.py
new file mode 100644
index 0000000000000000000000000000000000000000..22ba7f2c5812dc53ca294f43394bd511a44238df
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/train.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import os
+import sys
+
+import torch
+import torch.backends.cudnn as cudnn
+import torch.multiprocessing as mp
+
+from jobs import train
+
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '.')))
+
+if __name__ == '__main__':
+ from configs.config import parse_args
+
+ cfg, args = parse_args()
+
+ if cfg.cfg_file is not None:
+ exp_name = cfg.cfg_file.split('/')[-1].split('.')[0]
+ cfg.output_dir = os.path.join('./output', exp_name)
+
+ cudnn.benchmark = False
+ cudnn.deterministic = True
+ torch.cuda.empty_cache()
+ num_gpus = torch.cuda.device_count()
+
+ mp.spawn(train, args=(num_gpus, cfg), nprocs=num_gpus, join=True)
+
+ exit(0)
diff --git a/src/pixel3dmm/preprocessing/MICA/utils/__init__.py b/src/pixel3dmm/preprocessing/MICA/utils/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/pixel3dmm/preprocessing/MICA/utils/best_model.py b/src/pixel3dmm/preprocessing/MICA/utils/best_model.py
new file mode 100644
index 0000000000000000000000000000000000000000..3aacd471e163f6cba422bbf1236d2541e41076f4
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/utils/best_model.py
@@ -0,0 +1,106 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import os
+
+import numpy as np
+from loguru import logger
+
+
+class BestModel:
+ def __init__(self, trainer):
+ self.average = np.Inf
+ self.weighted_average = np.Inf
+ self.smoothed_average = np.Inf
+ self.smoothed_weighted_average = np.Inf
+ self.running_average = np.Inf
+ self.running_weighted_average = np.Inf
+ self.now_mean = None
+
+ self.trainer = trainer
+ self.counter = None
+
+ self.N = trainer.cfg.running_average
+
+ os.makedirs(os.path.join(self.trainer.cfg.output_dir, 'best_models'), exist_ok=True)
+
+ def state_dict(self):
+ return {
+ 'average': self.average,
+ 'smoothed_average': self.smoothed_average,
+ 'running_average': self.running_average,
+ 'now_mean': self.now_mean,
+ 'counter': self.counter,
+ }
+
+ def load_state_dict(self, dict):
+ self.average = dict['average']
+ self.smoothed_average = dict['smoothed_average']
+ self.running_average = dict['running_average']
+ self.now_mean = dict['now_mean']
+ self.counter = dict['counter']
+
+ logger.info(f'[BEST] Best score weighted average: '
+ f'NoW mean: {self.now_mean:.6f} | '
+ f'average: {self.average:.6f} | '
+ f'smoothed average: {self.running_average:.6f}')
+
+ def __call__(self, weighted_average, average):
+ if self.counter is None:
+ self.counter = 1
+ self.average = average
+ self.weighted_average = weighted_average
+ self.running_weighted_average = weighted_average
+ self.running_average = average
+
+ return weighted_average, average
+
+ if weighted_average < self.weighted_average:
+ delta = self.weighted_average - weighted_average
+ self.weighted_average = weighted_average
+ logger.info(f'[BEST] (Average weighted) {self.trainer.global_step} | {delta:.6f} improvement and value: {self.weighted_average:.6f}')
+ self.trainer.save_checkpoint(os.path.join(self.trainer.cfg.output_dir, 'best_models', f'best_model_0.tar'))
+
+ if average < self.average:
+ delta = self.average - average
+ self.average = average
+ logger.info(f'[BEST] (Average) {self.trainer.global_step} | {delta:.6f} improvement and value: {self.average:.6f}')
+ self.trainer.save_checkpoint(os.path.join(self.trainer.cfg.output_dir, 'best_models', f'best_model_1.tar'))
+
+ n = self.N
+
+ self.running_average = self.running_average * ((n - 1) / n) + (average / n)
+ if self.running_average < self.smoothed_average:
+ delta = self.smoothed_average - self.running_average
+ self.smoothed_average = self.running_average
+ logger.info(f'[BEST] (Average Smoothed) {self.trainer.global_step} | {delta:.6f} improvement and value: {self.smoothed_average:.6f} | counter: {self.counter} | window: {n}')
+ self.trainer.save_checkpoint(os.path.join(self.trainer.cfg.output_dir, 'best_models', f'best_model_3.tar'))
+
+ self.counter += 1
+
+ return self.running_weighted_average, self.running_average
+
+ def now(self, median, mean, std):
+ if self.now_mean is None:
+ self.now_mean = mean
+ return
+
+ if mean < self.now_mean:
+ delta = self.now_mean - mean
+ self.now_mean = mean
+ logger.info(f'[BEST] (NoW) {self.trainer.global_step} | {delta:.6f} improvement and mean: {self.now_mean:.6f} std: {std} median: {median}')
+ self.trainer.save_checkpoint(os.path.join(self.trainer.cfg.output_dir, 'best_models', f'best_model_now.tar'))
diff --git a/src/pixel3dmm/preprocessing/MICA/utils/landmark_detector.py b/src/pixel3dmm/preprocessing/MICA/utils/landmark_detector.py
new file mode 100644
index 0000000000000000000000000000000000000000..fa171c1de0abe6cfdd1baf97ab7bb570078740a4
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/utils/landmark_detector.py
@@ -0,0 +1,68 @@
+# -*- coding: utf-8 -*-
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import face_alignment
+import numpy as np
+from insightface.app import FaceAnalysis
+from loguru import logger
+
+from datasets.creation.util import get_bbox
+
+
+class Detectors:
+ def __init__(self):
+ self.RETINAFACE = 'RETINAFACE'
+ self.FAN = 'FAN'
+
+
+detectors = Detectors()
+
+
+class LandmarksDetector:
+ def __init__(self, model='retinaface', device='cuda:0'):
+ model = model.upper()
+ self.predictor = model
+ if model == detectors.RETINAFACE:
+ self._face_detector = FaceAnalysis(name='antelopev2', providers=['CUDAExecutionProvider'])
+ self._face_detector.prepare(ctx_id=0, det_size=(224, 224))
+ elif model == detectors.FAN:
+ self._face_detector = face_alignment.FaceAlignment(face_alignment.LandmarksType._2D, device=device)
+ else:
+ logger.error(f'[ERROR] Landmark predictor not supported {model}')
+ exit(-1)
+
+ logger.info(f'[DETECTOR] Selected {model} as landmark detector.')
+
+ def detect(self, img):
+ if self.predictor == detectors.RETINAFACE:
+ bboxes, kpss = self._face_detector.det_model.detect(img, max_num=0, metric='default')
+ return bboxes, kpss
+
+ if self.predictor == detectors.FAN:
+ lmks, scores, detected_faces = self._face_detector.get_landmarks_from_image(img, return_landmark_score=True, return_bboxes=True)
+ if detected_faces is None:
+ return np.empty(0), np.empty(0)
+ bboxes = np.stack(detected_faces)
+ # bboxes = get_bbox(img, np.stack(lmks))
+ # bboxes[:, 4] = detected_faces[:, 4]
+ # https://github.com/Rubikplayer/flame-fitting/blob/master/data/landmarks_51_annotated.png
+ lmk51 = np.stack(lmks)[:, 17:, :]
+ kpss = lmk51[:, [20, 27, 13, 43, 47], :] # left eye, right eye, nose, left mouth, right mouth
+ kpss[:, 0, :] = lmk51[:, [21, 24], :].mean(1) # center of eye
+ kpss[:, 1, :] = lmk51[:, [27, 29], :].mean(1)
+ return bboxes, kpss
+
+ return None, None
diff --git a/src/pixel3dmm/preprocessing/MICA/utils/masking.py b/src/pixel3dmm/preprocessing/MICA/utils/masking.py
new file mode 100644
index 0000000000000000000000000000000000000000..06e5b48ed2462bcf9d37e20a665be558b7525c2e
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/utils/masking.py
@@ -0,0 +1,207 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+import os
+import pickle
+
+import numpy as np
+import torch
+import torch.nn as nn
+from trimesh import Trimesh
+
+
+def to_tensor(array, dtype=torch.float32):
+ if 'torch.tensor' not in str(type(array)):
+ return torch.tensor(array, dtype=dtype)
+
+
+def to_np(array, dtype=np.float32):
+ if 'scipy.sparse' in str(type(array)):
+ array = array.todense()
+ return np.array(array, dtype=dtype)
+
+
+class Struct(object):
+ def __init__(self, **kwargs):
+ for key, val in kwargs.items():
+ setattr(self, key, val)
+
+
+class Masking(nn.Module):
+ def __init__(self, config):
+ super(Masking, self).__init__()
+ ROOT_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')
+ with open(f'{ROOT_DIR}/data/FLAME2020/FLAME_masks/FLAME_masks.pkl', 'rb') as f:
+ ss = pickle.load(f, encoding='latin1')
+ self.masks = Struct(**ss)
+
+ with open(f'{ROOT_DIR}/data/FLAME2020/generic_model.pkl', 'rb') as f:
+ ss = pickle.load(f, encoding='latin1')
+ flame_model = Struct(**ss)
+
+ self.masked_faces = None
+
+ self.cfg = config.mask_weights
+ self.dtype = torch.float32
+ self.register_buffer('faces', to_tensor(to_np(flame_model.f, dtype=np.int64), dtype=torch.long))
+ self.register_buffer('vertices', to_tensor(to_np(flame_model.v_template), dtype=self.dtype))
+
+ self.neighbours = {}
+ for f in self.faces.numpy():
+ for v in f:
+ if str(v) not in self.neighbours:
+ self.neighbours[str(v)] = set()
+ for a in list(filter(lambda i: i != v, f)):
+ self.neighbours[str(v)].add(a)
+
+ def get_faces(self):
+ return self.faces
+
+ def get_mask_face(self):
+ return self.masks.face
+
+ def get_mask_eyes(self):
+ left = self.masks.left_eyeball
+ right = self.masks.right_eyeball
+
+ return np.unique(np.concatenate((left, right)))
+
+ def get_mask_forehead(self):
+ return self.masks.forehead
+
+ def get_mask_lips(self):
+ return self.masks.lips
+
+ def get_mask_eye_region(self):
+ return self.masks.eye_region
+
+ def get_mask_lr_eye_region(self):
+ left = self.masks.left_eye_region
+ right = self.masks.right_eye_region
+
+ return np.unique(np.concatenate((left, right, self.get_mask_eyes())))
+
+ def get_mask_nose(self):
+ return self.masks.nose
+
+ def get_mask_ears(self):
+ left = self.masks.left_ear
+ right = self.masks.right_ear
+
+ return np.unique(np.concatenate((left, right)))
+
+ def get_triangle_face_mask(self):
+ m = self.masks.face
+ return self.get_triangle_mask(m)
+
+ def get_triangle_eyes_mask(self):
+ m = self.get_mask_eyes()
+ return self.get_triangle_mask(m)
+
+ def get_triangle_whole_mask(self):
+ m = self.get_whole_mask()
+ return self.get_triangle_mask(m)
+
+ def get_triangle_mask(self, m):
+ f = self.faces.cpu().numpy()
+ selected = []
+ for i in range(f.shape[0]):
+ l = f[i]
+ valid = 0
+ for j in range(3):
+ if l[j] in m:
+ valid += 1
+ if valid == 3:
+ selected.append(i)
+
+ return np.unique(selected)
+
+ def make_soft(self, mask, value, degree=4):
+ soft = []
+ mask = set(mask)
+ for ring in range(degree):
+ soft_ring = []
+ for v in mask.copy():
+ for n in self.neighbours[str(v)]:
+ if n in mask:
+ continue
+ soft_ring.append(n)
+ mask.add(n)
+
+ soft.append((soft_ring, value / (ring + 2)))
+
+ return soft
+
+ def get_binary_triangle_mask(self):
+ mask = self.get_whole_mask()
+ faces = self.faces.cpu().numpy()
+ reduced_faces = []
+ for f in faces:
+ valid = 0
+ for v in f:
+ if v in mask:
+ valid += 1
+ reduced_faces.append(True if valid == 3 else False)
+
+ return reduced_faces
+
+ def get_masked_faces(self):
+ if self.masked_faces is None:
+ faces = self.faces.cpu().numpy()
+ vertices = self.vertices.cpu().numpy()
+ m = Trimesh(vertices=vertices, faces=faces, process=False)
+ m.update_faces(self.get_binary_triangle_mask())
+ self.masked_faces = torch.from_numpy(np.array(m.faces)).cuda().long()[None]
+
+ return self.masked_faces
+
+ def get_weights_per_triangle(self):
+ mask = torch.ones_like(self.get_faces()[None]).detach() * self.cfg.whole
+
+ mask[:, self.get_triangle_eyes_mask(), :] = self.cfg.eyes
+ mask[:, self.get_triangle_face_mask(), :] = self.cfg.face
+
+ return mask[:, :, 0:1]
+
+ def get_weights_per_vertex(self):
+ mask = torch.ones_like(self.vertices[None]).detach() * self.cfg.whole
+
+ mask[:, self.get_mask_eyes(), :] = self.cfg.eyes
+ mask[:, self.get_mask_ears(), :] = self.cfg.ears
+ mask[:, self.get_mask_face(), :] = self.cfg.face
+
+ return mask
+
+ def get_masked_mesh(self, vertices, triangle_mask):
+ if len(vertices.shape) == 2:
+ vertices = vertices[None]
+ B, N, V = vertices.shape
+ faces = self.faces.cpu().numpy()
+ masked_vertices = torch.empty(0, 0, 3).cuda()
+ masked_faces = torch.empty(0, 0, 3).cuda()
+ for i in range(B):
+ m = Trimesh(vertices=vertices[i].detach().cpu().numpy(), faces=faces, process=False)
+ m.update_faces(triangle_mask)
+ m.process()
+ f = torch.from_numpy(np.array(m.faces)).cuda()[None]
+ v = torch.from_numpy(np.array(m.vertices)).cuda()[None].float()
+ if masked_vertices.shape[1] != v.shape[1]:
+ masked_vertices = torch.empty(0, v.shape[1], 3).cuda()
+ if masked_faces.shape[1] != f.shape[1]:
+ masked_faces = torch.empty(0, f.shape[1], 3).cuda()
+ masked_vertices = torch.cat([masked_vertices, v])
+ masked_faces = torch.cat([masked_faces, f])
+
+ return masked_vertices, masked_faces
diff --git a/src/pixel3dmm/preprocessing/MICA/utils/util.py b/src/pixel3dmm/preprocessing/MICA/utils/util.py
new file mode 100644
index 0000000000000000000000000000000000000000..7bd8ab1940e15d9f1463b3ceedc102a7840205eb
--- /dev/null
+++ b/src/pixel3dmm/preprocessing/MICA/utils/util.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+
+# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is
+# holder of all proprietary rights on this computer program.
+# You can only use this computer program if you have closed
+# a license agreement with MPG or you get the right to use the computer
+# program from someone who is authorized to grant you that right.
+# Any use of the computer program without a valid license is prohibited and
+# liable to prosecution.
+#
+# Copyright©2023 Max-Planck-Gesellschaft zur Förderung
+# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
+# for Intelligent Systems. All rights reserved.
+#
+# Contact: mica@tue.mpg.de
+
+
+import importlib
+
+import cv2
+import numpy as np
+import torch
+import torch.nn.functional as F
+import torchvision
+
+
+def find_model_using_name(model_dir, model_name):
+ # adapted from pix2pix framework: https://github.com/junyanz/pytorch-CycleGAN-and-pix2pix/blob/master/models/__init__.py#L25
+ # import "model_dir/modelname.py"
+ model_filename = model_dir + "." + model_name
+ modellib = importlib.import_module(model_filename, package=model_dir)
+
+ # In the file, the class called ModelName() will
+ # be instantiated. It has to be a subclass of BaseModel,
+ # and it is case-insensitive.
+ model = None
+ target_model_name = model_name.replace('_', '')
+ for name, cls in modellib.__dict__.items():
+ # if name.lower() == target_model_name.lower() and issubclass(cls, BaseModel):
+ if name.lower() == target_model_name.lower():
+ model = cls
+
+ if model is None:
+ print("In %s.py, there should be a class with class name that matches %s in lowercase." % (model_filename, target_model_name))
+ exit(0)
+
+ return model
+
+
+def visualize_grid(visdict, savepath=None, size=224, dim=1, return_gird=True):
+ '''
+ image range should be [0,1]
+ dim: 2 for horizontal. 1 for vertical
+ '''
+ assert dim == 1 or dim == 2
+ grids = {}
+ for key in visdict:
+ b, c, h, w = visdict[key].shape
+ if dim == 2:
+ new_h = size
+ new_w = int(w * size / h)
+ elif dim == 1:
+ new_h = int(h * size / w)
+ new_w = size
+ grids[key] = torchvision.utils.make_grid(F.interpolate(visdict[key], [new_h, new_w]).detach().cpu(), nrow=b, padding=0)
+ grid = torch.cat(list(grids.values()), dim)
+ grid_image = (grid.numpy().transpose(1, 2, 0).copy() * 255)[:, :, [2, 1, 0]]
+ grid_image = np.minimum(np.maximum(grid_image, 0), 255).astype(np.uint8)
+ if savepath:
+ cv2.imwrite(savepath, grid_image)
+ if return_gird:
+ return grid_image