amit-scans simonduerr commited on
Commit
fba25b8
·
0 Parent(s):

Duplicate from simonduerr/diffdock

Browse files

Co-authored-by: Simon Duerr <[email protected]>

This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +23 -0
  2. .gitignore +164 -0
  3. LICENSE +21 -0
  4. README.md +158 -0
  5. app.py +610 -0
  6. baselines/baseline_evaluation.py +219 -0
  7. baselines/baseline_gnina.py +175 -0
  8. baselines/baseline_run_tankbind_parallel.sh +5 -0
  9. baselines/baseline_tankbind_evaluation.py +239 -0
  10. baselines/baseline_tankbind_runtime.py +342 -0
  11. confidence/confidence_train.py +319 -0
  12. confidence/dataset.py +272 -0
  13. data/protein_ligand_example_csv.csv +2 -0
  14. data/splits/timesplit_no_lig_overlap_train +16379 -0
  15. data/splits/timesplit_no_lig_overlap_val +968 -0
  16. data/splits/timesplit_test +363 -0
  17. data/splits/timesplit_test_no_rec_overlap +144 -0
  18. datasets/__init__.py +0 -0
  19. datasets/conformer_matching.py +196 -0
  20. datasets/esm_embedding_preparation.py +88 -0
  21. datasets/esm_embeddings_to_pt.py +17 -0
  22. datasets/pdbbind.py +705 -0
  23. datasets/pdbbind_lm_embedding_preparation.py +94 -0
  24. datasets/process_mols.py +550 -0
  25. environment.yml +102 -0
  26. esm/LICENSE +21 -0
  27. esm/esm/__init__.py +12 -0
  28. esm/esm/axial_attention.py +239 -0
  29. esm/esm/constants.py +10 -0
  30. esm/esm/data.py +493 -0
  31. esm/esm/inverse_folding/__init__.py +8 -0
  32. esm/esm/inverse_folding/features.py +352 -0
  33. esm/esm/inverse_folding/gvp_encoder.py +56 -0
  34. esm/esm/inverse_folding/gvp_modules.py +473 -0
  35. esm/esm/inverse_folding/gvp_transformer.py +137 -0
  36. esm/esm/inverse_folding/gvp_transformer_encoder.py +184 -0
  37. esm/esm/inverse_folding/gvp_utils.py +68 -0
  38. esm/esm/inverse_folding/multichain_util.py +151 -0
  39. esm/esm/inverse_folding/transformer_decoder.py +228 -0
  40. esm/esm/inverse_folding/transformer_layer.py +304 -0
  41. esm/esm/inverse_folding/util.py +320 -0
  42. esm/esm/model/esm1.py +200 -0
  43. esm/esm/model/esm2.py +147 -0
  44. esm/esm/model/msa_transformer.py +238 -0
  45. esm/esm/modules.py +418 -0
  46. esm/esm/multihead_attention.py +508 -0
  47. esm/esm/pretrained.py +397 -0
  48. esm/esm/rotary_embedding.py +69 -0
  49. esm/esm/version.py +6 -0
  50. esm/scripts/extract.py +142 -0
.gitattributes ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.ipynb linguist-vendored=false
2
+ *.ipynb linguist-detectable=false
3
+ /jupyter_notebooks linguist-vendored=false
4
+ jupyter_notebooks/** linguist-vendored
5
+ jupyter_notebooks/** linguist-vendored=false
6
+ jupyter_notebooks/* linguist-vendored
7
+ jupyter_notebooks/* linguist-vendored=false
8
+ data/esm2_output_back/1a46_protein_processed.pdb_chain_1.pt filter=lfs diff=lfs merge=lfs -text
9
+ data/esm2_output/1a46_protein_processed0t6cxvmshf54nq5e.pdb_chain_0.pt filter=lfs diff=lfs merge=lfs -text
10
+ data/esm2_output/1a46_protein_processedhoiape3r6cbddini.pdb_chain_0.pt filter=lfs diff=lfs merge=lfs -text
11
+ data/esm2_output/1a46_protein_processedhoiape3r6cbddini.pdb_chain_1.pt filter=lfs diff=lfs merge=lfs -text
12
+ data/esm2_output/1a46_protein_processeds5c7cs3aj6rvbhy_.pdb_chain_0.pt filter=lfs diff=lfs merge=lfs -text
13
+ data/esm2_output/1a46_protein_processedtyfuvx9z8y4_86ft.pdb_chain_0.pt filter=lfs diff=lfs merge=lfs -text
14
+ data/esm2_output_back/1a46_protein_processed.pdb_chain_0.pt filter=lfs diff=lfs merge=lfs -text
15
+ data/esm2_output/1a46_protein_processed.pdb_chain_0.pt filter=lfs diff=lfs merge=lfs -text
16
+ data/esm2_output_back/1cbr_protein.pdb_chain_1.pt filter=lfs diff=lfs merge=lfs -text
17
+ workdir/paper_confidence_model/best_model_epoch75.pt filter=lfs diff=lfs merge=lfs -text
18
+ data/esm2_output/1a46_protein_processeds5c7cs3aj6rvbhy_.pdb_chain_1.pt filter=lfs diff=lfs merge=lfs -text
19
+ data/esm2_output_back/1cbr_protein.pdb_chain_0.pt filter=lfs diff=lfs merge=lfs -text
20
+ data/esm2_output/1a46_protein_processed0t6cxvmshf54nq5e.pdb_chain_1.pt filter=lfs diff=lfs merge=lfs -text
21
+ data/esm2_output/1a46_protein_processed.pdb_chain_1.pt filter=lfs diff=lfs merge=lfs -text
22
+ data/esm2_output/1a46_protein_processedtyfuvx9z8y4_86ft.pdb_chain_1.pt filter=lfs diff=lfs merge=lfs -text
23
+ workdir/paper_score_model/best_ema_inference_epoch_model.pt filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ homework
2
+ inference_out_dir_not_specified
3
+ .plotly_cache
4
+ .DS_store
5
+ renew.sh
6
+ tmux_renew.sh
7
+ images
8
+ # Byte-compiled / optimized / DLL files
9
+ __pycache__/
10
+ *.py[cod]
11
+ *$py.class
12
+
13
+ # C extensions
14
+ *.so
15
+ .so3_*
16
+
17
+ # Distribution / packaging
18
+ .Python
19
+ build/
20
+ develop-eggs/
21
+ dist/
22
+ downloads/
23
+ eggs/
24
+ .eggs/
25
+ lib/
26
+ lib64/
27
+ parts/
28
+ sdist/
29
+ var/
30
+ wheels/
31
+ *.egg-info/
32
+ .installed.cfg
33
+ *.egg
34
+ MANIFEST
35
+
36
+ # PyInstaller
37
+ # Usually these files are written by a python script from a template
38
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
39
+ *.manifest
40
+ *.spec
41
+
42
+ # Installer logs
43
+ pip-log.txt
44
+ pip-delete-this-directory.txt
45
+
46
+ # Unit test / coverage reports
47
+ htmlcov/
48
+ .tox/
49
+ .coverage
50
+ .coverage.*
51
+ .cache
52
+ nosetests.xml
53
+ coverage.xml
54
+ *.cover
55
+ .hypothesis/
56
+ .pytest_cache/
57
+
58
+ # Translations
59
+ *.mo
60
+ *.pot
61
+
62
+ # Django stuff:
63
+ *.log
64
+ local_settings.py
65
+ db.sqlite3
66
+
67
+ # Flask stuff:
68
+ instance/
69
+ .webassets-cache
70
+
71
+ # Scrapy stuff:
72
+ .scrapy
73
+
74
+ # Sphinx documentation
75
+ docs/_build/
76
+
77
+ # PyBuilder
78
+ target/
79
+
80
+ # Jupyter Notebook
81
+ .ipynb_checkpoints
82
+
83
+ # pyenv
84
+ .python-version
85
+
86
+ # celery beat schedule file
87
+ celerybeat-schedule
88
+
89
+ # SageMath parsed files
90
+ *.sage.py
91
+
92
+ # Environments
93
+ .env
94
+ .venv
95
+ env/
96
+ venv/
97
+ ENV/
98
+ env.bak/
99
+ venv.bak/
100
+
101
+ # Spyder project settings
102
+ .spyderproject
103
+ .spyproject
104
+
105
+ # Rope project settings
106
+ .ropeproject
107
+
108
+ # mkdocs documentation
109
+ /site
110
+
111
+ # mypy
112
+ .mypy_cache/
113
+ local_config_inference2.yml
114
+ .vscode/
115
+
116
+
117
+ *.zip
118
+
119
+ .idea/
120
+
121
+
122
+ #################### Project specific
123
+ .p.npy
124
+ .score.npy
125
+ # this ignores everything in data except for the file
126
+ !/data
127
+ /data/*
128
+ !/data/splits
129
+ !/data/protein_ligand_example_csv.csv
130
+ !/data/testset_csv.csv
131
+ !/data/INDEX_general_PL_data.2020
132
+ test_run
133
+
134
+ cache
135
+ wandb
136
+ logs
137
+
138
+ # temporary files
139
+ .openbabel_cache
140
+ temp/
141
+ bsub*
142
+ stderr*
143
+ stdout*
144
+ !/workdir
145
+ /workdir/*
146
+ !/workdir/paper_confidence_model
147
+ !/workdir/paper_score_model
148
+ runs2
149
+ results
150
+ # this excludes everything in the runs directory except for that specific run
151
+ !/runs
152
+ /runs/*
153
+ !/runs/rigid_redocking
154
+ !/runs/flexible_self_docking
155
+ local_config.yml
156
+ local_config_inference.yml
157
+ local_config_confidence.yml
158
+ temp1.py
159
+ temp5.py
160
+ temp3.py
161
+ temp4.py
162
+ temp5.py
163
+ temp6.py
164
+ temp7.py
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2022 Gabriele Corso, Hannes Stärk, Bowen Jing
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
README.md ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Diffdock
3
+ emoji: 🐠
4
+ colorFrom: indigo
5
+ colorTo: pink
6
+ sdk: gradio
7
+ sdk_version: 3.4.1
8
+ app_file: app.py
9
+ pinned: false
10
+ license: mit
11
+ duplicated_from: simonduerr/diffdock
12
+ ---
13
+
14
+ # DiffDock: Diffusion Steps, Twists, and Turns for Molecular Docking
15
+
16
+ ### [Paper on arXiv](https://arxiv.org/abs/2210.01776)
17
+
18
+ Implementation of DiffDock, state-of-the-art method for molecular docking, by Gabriele Corso*, Hannes Stark*, Bowen Jing*, Regina Barzilay and Tommi Jaakkola.
19
+ This repository contains all code, instructions and model weights necessary to run the method or to retrain a model.
20
+ If you have any question, feel free to open an issue or reach out to us: [[email protected]]([email protected]), [[email protected]]([email protected]), [[email protected]]([email protected]).
21
+
22
+ ![Alt Text](visualizations/overview.png)
23
+
24
+ The repository also contains all the scripts to run the baselines and generate the figures.
25
+ Additionally, there are visualization videos in `visualizations`.
26
+
27
+
28
+
29
+ # Dataset
30
+
31
+ The files in `data` contain the names for the time-based data split.
32
+
33
+ If you want to train one of our models with the data then:
34
+ 1. download it from [zenodo](https://zenodo.org/record/6034088)
35
+ 2. unzip the directory and place it into `data` such that you have the path `data/PDBBind_processed`
36
+
37
+
38
+
39
+ ## Setup Environment
40
+
41
+ We will set up the environment using [Anaconda](https://docs.anaconda.com/anaconda/install/index.html). Clone the
42
+ current repo
43
+
44
+ git clone https://github.com/gcorso/DiffDock.git
45
+
46
+ Create a new environment with all required packages using `environment.yml`. While in the project directory run:
47
+
48
+ conda env create
49
+
50
+ Activate the environment
51
+
52
+ conda activate diffdock
53
+
54
+ If you want to install the packages yourself in case something does not work, these are the required ones:
55
+
56
+ pytorch
57
+ pyg
58
+ pyyaml
59
+ scipy
60
+ networkx
61
+ biopython
62
+ rdkit-pypi
63
+ e3nn
64
+ spyrmsd
65
+ pandas
66
+ biopandas
67
+
68
+ # Running DiffDock on your own complexes
69
+ We support multiple input formats depending on whether you only want to make predictions for a single complex or for many at once.\
70
+ The protein inputs need to be .pdb files. The ligand input can either be a SMILES string or a filetype that RDKit can read like `.sdf` or `.mol2`.
71
+
72
+ For a single complex: specify the protein with, e.g., `--protein_path protein.pdb` and the ligand with `--ligand_path ligand.sdf` or `--ligand_smiles COc(cc1)ccc1C#N`
73
+
74
+ For many complexes: create a csv file with paths to proteins and ligand files or SMILES. The first column of the .csv has to be called `protein_path` and the second one `ligand`.
75
+ An example .csv is at `data/protein_ligand_example_csv.csv` and you would use it with `--protein_ligand_csv protein_ligand_example_csv.csv`.
76
+
77
+ ### Generate the ESM2 embeddings for the proteins
78
+ We will soon also provide weights of a trained model without ESM2 embeddings such that this step is not necessary. Luckily, it is rather easy. First prepare a fasta for ESM2 (for a single protein use `--protein_path protein.pdb` instead):
79
+
80
+ python datasets/esm_embedding_preparation.py --protein_ligand_csv data/protein_ligand_example_csv.csv --out_file data/prepared_for_esm.fasta
81
+
82
+ Generate the embeddings with ESM2 (assuming that you are in the DiffDock directory):
83
+
84
+ git clone https://github.com/facebookresearch/esm
85
+ cd esm
86
+ pip install -e .
87
+ cd ..
88
+ HOME=esm/model_weights python esm/scripts/extract.py esm2_t33_650M_UR50D data/prepared_for_esm.fasta data/esm2_output --repr_layers 33 --include per_tok
89
+
90
+ And done, that is it!
91
+
92
+ ### Run inference
93
+
94
+ python -m inference --protein_ligand_csv data/protein_ligand_example_csv.csv --out_dir results/user_predictions_small --inference_steps 20 --samples_per_complex 40 --batch_size 10
95
+
96
+
97
+
98
+ # Running DiffDock to reproduce paper numbers
99
+ Download the data and place it as described in the "Dataset" section above.
100
+
101
+ ### Generate the ESM2 embeddings for the proteins
102
+ First run:
103
+
104
+ python datasets/pdbbind_lm_embedding_preparation.py
105
+
106
+ Use the generated file `data/pdbbind_sequences.fasta` to generate the ESM2 language model embeddings using the library https://github.com/facebookresearch/esm by installing their repository and executing the following in their repository:
107
+
108
+ python scripts/extract.py esm2_t33_650M_UR50D pdbbind_sequences.fasta embeddings_output --repr_layers 33 --include per_tok
109
+
110
+ This generates the `embeddings_output` directory which you have to copy into the `data` folder of our repository to have `data/embeddings_output`.
111
+ Then run the command:
112
+
113
+ python datasets/esm_embeddings_to_pt.py
114
+
115
+ ### Using the provided model weights for evaluation
116
+ To predict binding structures using the provided model weights run:
117
+
118
+ python -m evaluate --model_dir workdir/paper_score_model --ckpt best_ema_inference_epoch_model.pt --confidence_ckpt best_model_epoch75.pt --confidence_model_dir workdir/paper_confidence_model --run_name DiffDockInference --inference_steps 20 --split_path data/splits/timesplit_test --samples_per_complex 40 --batch_size 10
119
+
120
+ To additionally save the .sdf files of the generated molecules, add the flag `--save_visualisation`
121
+
122
+ ### Training a model yourself and using those weights
123
+ Train the large score model:
124
+
125
+ python -m train --run_name big_score_model --test_sigma_intervals --esm_embeddings_path data/esm2_3billion_embeddings.pt --log_dir workdir --lr 1e-3 --tr_sigma_min 0.1 --tr_sigma_max 19 --rot_sigma_min 0.03 --rot_sigma_max 1.55 --batch_size 16 --ns 48 --nv 10 --num_conv_layers 6 --dynamic_max_cross --scheduler plateau --scale_by_sigma --dropout 0.1 --sampling_alpha 1 --sampling_beta 1 --remove_hs --c_alpha_max_neighbors 24 --receptor_radius 15 --num_dataloader_workers 1 --cudnn_benchmark --rot_alpha 1 --rot_beta 1 --tor_alpha 1 --tor_beta 1 --val_inference_freq 5 --num_inference_complexes 500 --use_ema --distance_embed_dim 64 --cross_distance_embed_dim 64 --sigma_embed_dim 64 --scheduler_patience 30 --n_epochs 850
126
+
127
+ The model weights are saved in the `workdir` directory.
128
+
129
+ Train a small score model with higher maximum translation sigma that will be used to generate the samples for training the confidence model:
130
+
131
+ python -m train --run_name small_score_model --test_sigma_intervals --esm_embeddings_path data/esm2_3billion_embeddings.pt --log_dir workdir --lr 1e-3 --tr_sigma_min 0.1 --tr_sigma_max 34 --rot_sigma_min 0.03 --rot_sigma_max 1.55 --batch_size 16 --ns 24 --nv 6 --num_conv_layers 5 --dynamic_max_cross --scheduler plateau --scale_by_sigma --dropout 0.1 --sampling_alpha 1 --sampling_beta 1 --remove_hs --c_alpha_max_neighbors 24 --receptor_radius 15 --num_dataloader_workers 1 --cudnn_benchmark --rot_alpha 1 --rot_beta 1 --tor_alpha 1 --tor_beta 1 --val_inference_freq 5 --num_inference_complexes 500 --use_ema --scheduler_patience 30 --n_epochs 300
132
+
133
+ In practice, you could also likely achieve the same or better results by using the first score model for creating the samples to train the confidence model, but this is what we did in the paper.
134
+ The score model used to generate the samples to train the confidence model does not have to be the same as the score model that is used with that confidence model during inference.
135
+
136
+ Train the confidence model by running the following:
137
+
138
+ python -m confidence.confidence_train --original_model_dir workdir/small_score_model --run_name confidence_model --inference_steps 20 --samples_per_complex 7 --inf_sched_alpha 1 --inf_sched_beta 1 --batch_size 16 --n_epochs 100 --lr 3e-4 --scheduler_patience 50 --tr_sigma_min 0.1 --tr_sigma_max 34 --rot_sigma_min 0.03 --rot_sigma_max 1.55 --ns 24 --nv 6 --num_conv_layers 5 --dynamic_max_cross --scale_by_sigma --dropout 0.1 --all_atoms --remove_hs --c_alpha_max_neighbors 24 --receptor_radius 15 --esm_embeddings_path data/esm2_3billion_embeddings.pt --main_metric loss --main_metric_goal min --best_model_save_frequency 5 --rmsd_classification_cutoff 2 --cache_creation_id 1 --cache_ids_to_combine 1 2 3 4
139
+
140
+ first with `--cache_creation_id 1` then `--cache_creation_id 2` etc. up to 4
141
+
142
+ Now everything is trained and you can run inference with:
143
+
144
+ python -m evaluate --model_dir workdir/big_score_model --ckpt best_ema_inference_epoch_model.pt --confidence_ckpt best_model_epoch75.pt --confidence_model_dir workdir/confidence_model --run_name DiffDockInference --inference_steps 20 --split_path data/splits/timesplit_test --samples_per_complex 40 --batch_size 10
145
+
146
+
147
+ ## Citation
148
+ @article{corso2022diffdock,
149
+ title={DiffDock: Diffusion Steps, Twists, and Turns for Molecular Docking},
150
+ author = {Corso, Gabriele and Stärk, Hannes and Jing, Bowen and Barzilay, Regina and Jaakkola, Tommi},
151
+ journal={arXiv preprint arXiv:2210.01776},
152
+ year={2022}
153
+ }
154
+
155
+ ## License
156
+ MIT
157
+
158
+ ![Alt Text](visualizations/example_6agt_symmetric.gif)
app.py ADDED
@@ -0,0 +1,610 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import os
3
+
4
+ import copy
5
+ import os
6
+ import torch
7
+
8
+ import subprocess
9
+
10
+
11
+ import time
12
+ from argparse import ArgumentParser, Namespace, FileType
13
+ from rdkit.Chem import RemoveHs
14
+ from functools import partial
15
+ import numpy as np
16
+ import pandas as pd
17
+ from rdkit import RDLogger
18
+ from rdkit.Chem import MolFromSmiles, AddHs
19
+ from torch_geometric.loader import DataLoader
20
+ import yaml
21
+ import sys
22
+ import csv
23
+
24
+ csv.field_size_limit(sys.maxsize)
25
+
26
+ print(torch.__version__)
27
+ os.makedirs("data/esm2_output", exist_ok=True)
28
+ os.makedirs("results", exist_ok=True)
29
+ from datasets.process_mols import (
30
+ read_molecule,
31
+ generate_conformer,
32
+ write_mol_with_coords,
33
+ )
34
+ from datasets.pdbbind import PDBBind
35
+ from utils.diffusion_utils import t_to_sigma as t_to_sigma_compl, get_t_schedule
36
+ from utils.sampling import randomize_position, sampling
37
+ from utils.utils import get_model
38
+ from utils.visualise import PDBFile
39
+ from tqdm import tqdm
40
+ from datasets.esm_embedding_preparation import esm_embedding_prep
41
+ import subprocess
42
+
43
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
44
+
45
+ with open(f"workdir/paper_score_model/model_parameters.yml") as f:
46
+ score_model_args = Namespace(**yaml.full_load(f))
47
+
48
+ with open(f"workdir/paper_confidence_model/model_parameters.yml") as f:
49
+ confidence_args = Namespace(**yaml.full_load(f))
50
+
51
+ import shutil
52
+
53
+ t_to_sigma = partial(t_to_sigma_compl, args=score_model_args)
54
+
55
+ model = get_model(score_model_args, device, t_to_sigma=t_to_sigma, no_parallel=True)
56
+ state_dict = torch.load(
57
+ f"workdir/paper_score_model/best_ema_inference_epoch_model.pt",
58
+ map_location=torch.device("cpu"),
59
+ )
60
+ model.load_state_dict(state_dict, strict=True)
61
+ model = model.to(device)
62
+ model.eval()
63
+
64
+ confidence_model = get_model(
65
+ confidence_args,
66
+ device,
67
+ t_to_sigma=t_to_sigma,
68
+ no_parallel=True,
69
+ confidence_mode=True,
70
+ )
71
+ state_dict = torch.load(
72
+ f"workdir/paper_confidence_model/best_model_epoch75.pt",
73
+ map_location=torch.device("cpu"),
74
+ )
75
+ confidence_model.load_state_dict(state_dict, strict=True)
76
+ confidence_model = confidence_model.to(device)
77
+ confidence_model.eval()
78
+
79
+
80
+ def get_pdb(pdb_code="", filepath=""):
81
+ try:
82
+ return filepath.name
83
+ except AttributeError as e:
84
+ if pdb_code is None or pdb_code == "":
85
+ return None
86
+ else:
87
+ os.system(f"wget -qnc https://files.rcsb.org/view/{pdb_code}.pdb")
88
+ return f"{pdb_code}.pdb"
89
+
90
+
91
+ def get_ligand(smiles="", filepath=""):
92
+ if smiles is None or smiles == "":
93
+ try:
94
+ return filepath.name
95
+ except AttributeError as e:
96
+ return None
97
+ else:
98
+ return smiles
99
+
100
+
101
+ def read_mol(molpath):
102
+ with open(molpath, "r") as fp:
103
+ lines = fp.readlines()
104
+ mol = ""
105
+ for l in lines:
106
+ mol += l
107
+ return mol
108
+
109
+
110
+ def molecule(input_pdb, ligand_pdb, original_ligand):
111
+
112
+ structure = read_mol(input_pdb)
113
+ mol = read_mol(ligand_pdb)
114
+
115
+ try:
116
+ ligand = read_mol(original_ligand.name)
117
+ _, ext = os.path.splitext(original_ligand.name)
118
+ lig_str_1 = """let original_ligand = `""" + ligand + """`"""
119
+ lig_str_2 = f"""
120
+ viewer.addModel( original_ligand, "{ext[1:]}" );
121
+ viewer.getModel(2).setStyle({{stick:{{colorscheme:"greenCarbon"}}}});"""
122
+ except AttributeError as e:
123
+ ligand = None
124
+ lig_str_1 = ""
125
+ lig_str_2 = ""
126
+
127
+ x = (
128
+ """<!DOCTYPE html>
129
+ <html>
130
+ <head>
131
+ <meta http-equiv="content-type" content="text/html; charset=UTF-8" />
132
+ <style>
133
+ body{
134
+ font-family:sans-serif
135
+ }
136
+ .mol-container {
137
+ width: 600px;
138
+ height: 600px;
139
+ position: relative;
140
+ mx-auto:0
141
+ }
142
+ .mol-container select{
143
+ background-image:None;
144
+ }
145
+ .green{
146
+ width:20px;
147
+ height:20px;
148
+ background-color:#33ff45;
149
+ display:inline-block;
150
+ }
151
+ .magenta{
152
+ width:20px;
153
+ height:20px;
154
+ background-color:magenta;
155
+ display:inline-block;
156
+ }
157
+ </style>
158
+ <script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.6.3/jquery.min.js" integrity="sha512-STof4xm1wgkfm7heWqFJVn58Hm3EtS31XFaagaa8VMReCXAkQnJZ+jEy8PCC/iT18dFy95WcExNHFTqLyp72eQ==" crossorigin="anonymous" referrerpolicy="no-referrer"></script>
159
+ <script src="https://3Dmol.csb.pitt.edu/build/3Dmol-min.js"></script>
160
+ </head>
161
+ <body>
162
+ <button id="startanimation">Replay diffusion process</button>
163
+ <button id="togglesurface">Toggle surface representation</button>
164
+ <div>
165
+ <span class="green"></span> Uploaded ligand position
166
+ <span class="magenta"></span> Predicted ligand position
167
+ </div>
168
+ <div id="container" class="mol-container"></div>
169
+
170
+ <script>
171
+ let ligand = `"""
172
+ + mol
173
+ + """`
174
+ let structure = `"""
175
+ + structure
176
+ + """`
177
+ """
178
+ + lig_str_1
179
+ + """
180
+
181
+ let viewer = null;
182
+ let surface = false;
183
+ let surf = null;
184
+ $(document).ready(function () {
185
+ let element = $("#container");
186
+ let config = { backgroundColor: "white" };
187
+ viewer = $3Dmol.createViewer(element, config);
188
+ viewer.addModel( structure, "pdb" );
189
+ viewer.setStyle({}, {cartoon: {color: "gray"}});
190
+ viewer.zoomTo();
191
+ viewer.zoom(0.7);
192
+ viewer.addModelsAsFrames(ligand, "pdb");
193
+ viewer.animate({loop: "forward",reps: 1});
194
+
195
+ viewer.getModel(1).setStyle({stick:{colorscheme:"magentaCarbon"}});
196
+ """
197
+ + lig_str_2
198
+ + """
199
+ viewer.render();
200
+
201
+ })
202
+
203
+ $("#startanimation").click(function() {
204
+ viewer.animate({loop: "forward",reps: 1});
205
+ });
206
+ $("#togglesurface").click(function() {
207
+ if (surface != true) {
208
+ surf = viewer.addSurface($3Dmol.SurfaceType.VDW, { "opacity": 0.9, "color": "white" }, { model: 0 });
209
+ surface = true;
210
+ } else {
211
+ viewer.removeAllSurfaces()
212
+ surface = false;
213
+ }
214
+ });
215
+ </script>
216
+ </body></html>"""
217
+ )
218
+
219
+ return f"""<iframe style="width: 100%; height: 700px" name="result" allow="midi; geolocation; microphone; camera;
220
+ display-capture; encrypted-media;" sandbox="allow-modals allow-forms
221
+ allow-scripts allow-same-origin allow-popups
222
+ allow-top-navigation-by-user-activation allow-downloads" allowfullscreen=""
223
+ allowpaymentrequest="" frameborder="0" srcdoc='{x}'></iframe>"""
224
+
225
+
226
+ import sys
227
+
228
+
229
+ def esm(protein_path, out_file):
230
+ print("running esm")
231
+ esm_embedding_prep(out_file, protein_path)
232
+ # create args object with defaults
233
+ os.environ["HOME"] = "esm/model_weights"
234
+ subprocess.call(
235
+ f"python esm/scripts/extract.py esm2_t33_650M_UR50D {out_file} data/esm2_output --repr_layers 33 --include per_tok",
236
+ shell=True,
237
+ env=os.environ,
238
+ )
239
+
240
+
241
+ def update(inp, file, ligand_inp, ligand_file, n_it, n_samples, actual_steps, no_final_step_noise):
242
+ pdb_path = get_pdb(inp, file)
243
+ ligand_path = get_ligand(ligand_inp, ligand_file)
244
+
245
+ esm(
246
+ pdb_path,
247
+ f"data/{os.path.basename(pdb_path)}_prepared_for_esm.fasta",
248
+ )
249
+ tr_schedule = get_t_schedule(inference_steps=n_it)
250
+ rot_schedule = tr_schedule
251
+ tor_schedule = tr_schedule
252
+ print("common t schedule", tr_schedule)
253
+ (
254
+ failures,
255
+ skipped,
256
+ confidences_list,
257
+ names_list,
258
+ run_times,
259
+ min_self_distances_list,
260
+ ) = (
261
+ 0,
262
+ 0,
263
+ [],
264
+ [],
265
+ [],
266
+ [],
267
+ )
268
+ N = n_samples # number of samples to generate
269
+ protein_path_list = [pdb_path]
270
+ ligand_descriptions = [ligand_path]
271
+ no_random = False
272
+ ode = False
273
+ no_final_step_noise = no_final_step_noise
274
+ out_dir = "results/"
275
+ test_dataset = PDBBind(
276
+ transform=None,
277
+ root="",
278
+ protein_path_list=protein_path_list,
279
+ ligand_descriptions=ligand_descriptions,
280
+ receptor_radius=score_model_args.receptor_radius,
281
+ cache_path="data/cache",
282
+ remove_hs=score_model_args.remove_hs,
283
+ max_lig_size=None,
284
+ c_alpha_max_neighbors=score_model_args.c_alpha_max_neighbors,
285
+ matching=False,
286
+ keep_original=False,
287
+ popsize=score_model_args.matching_popsize,
288
+ maxiter=score_model_args.matching_maxiter,
289
+ all_atoms=score_model_args.all_atoms,
290
+ atom_radius=score_model_args.atom_radius,
291
+ atom_max_neighbors=score_model_args.atom_max_neighbors,
292
+ esm_embeddings_path="data/esm2_output",
293
+ require_ligand=True,
294
+ num_workers=1,
295
+ keep_local_structures=False,
296
+ )
297
+ test_loader = DataLoader(dataset=test_dataset, batch_size=1, shuffle=False)
298
+ confidence_test_dataset = PDBBind(
299
+ transform=None,
300
+ root="",
301
+ protein_path_list=protein_path_list,
302
+ ligand_descriptions=ligand_descriptions,
303
+ receptor_radius=confidence_args.receptor_radius,
304
+ cache_path="data/cache",
305
+ remove_hs=confidence_args.remove_hs,
306
+ max_lig_size=None,
307
+ c_alpha_max_neighbors=confidence_args.c_alpha_max_neighbors,
308
+ matching=False,
309
+ keep_original=False,
310
+ popsize=confidence_args.matching_popsize,
311
+ maxiter=confidence_args.matching_maxiter,
312
+ all_atoms=confidence_args.all_atoms,
313
+ atom_radius=confidence_args.atom_radius,
314
+ atom_max_neighbors=confidence_args.atom_max_neighbors,
315
+ esm_embeddings_path="data/esm2_output",
316
+ require_ligand=True,
317
+ num_workers=1,
318
+ )
319
+ confidence_complex_dict = {d.name: d for d in confidence_test_dataset}
320
+ for idx, orig_complex_graph in tqdm(enumerate(test_loader)):
321
+ if (
322
+ confidence_model is not None
323
+ and not (
324
+ confidence_args.use_original_model_cache
325
+ or confidence_args.transfer_weights
326
+ )
327
+ and orig_complex_graph.name[0] not in confidence_complex_dict.keys()
328
+ ):
329
+ skipped += 1
330
+ print(
331
+ f"HAPPENING | The confidence dataset did not contain {orig_complex_graph.name[0]}. We are skipping this complex."
332
+ )
333
+ continue
334
+ try:
335
+ data_list = [copy.deepcopy(orig_complex_graph) for _ in range(N)]
336
+ randomize_position(
337
+ data_list,
338
+ score_model_args.no_torsion,
339
+ no_random,
340
+ score_model_args.tr_sigma_max,
341
+ )
342
+ pdb = None
343
+ lig = orig_complex_graph.mol[0]
344
+ visualization_list = []
345
+ for graph in data_list:
346
+ pdb = PDBFile(lig)
347
+ pdb.add(lig, 0, 0)
348
+ pdb.add(
349
+ (
350
+ orig_complex_graph["ligand"].pos
351
+ + orig_complex_graph.original_center
352
+ )
353
+ .detach()
354
+ .cpu(),
355
+ 1,
356
+ 0,
357
+ )
358
+ pdb.add(
359
+ (graph["ligand"].pos + graph.original_center).detach().cpu(),
360
+ part=1,
361
+ order=1,
362
+ )
363
+ visualization_list.append(pdb)
364
+
365
+ start_time = time.time()
366
+ if confidence_model is not None and not (
367
+ confidence_args.use_original_model_cache
368
+ or confidence_args.transfer_weights
369
+ ):
370
+ confidence_data_list = [
371
+ copy.deepcopy(confidence_complex_dict[orig_complex_graph.name[0]])
372
+ for _ in range(N)
373
+ ]
374
+ else:
375
+ confidence_data_list = None
376
+
377
+ data_list, confidence = sampling(
378
+ data_list=data_list,
379
+ model=model,
380
+ inference_steps=actual_steps,
381
+ tr_schedule=tr_schedule,
382
+ rot_schedule=rot_schedule,
383
+ tor_schedule=tor_schedule,
384
+ device=device,
385
+ t_to_sigma=t_to_sigma,
386
+ model_args=score_model_args,
387
+ no_random=no_random,
388
+ ode=ode,
389
+ visualization_list=visualization_list,
390
+ confidence_model=confidence_model,
391
+ confidence_data_list=confidence_data_list,
392
+ confidence_model_args=confidence_args,
393
+ batch_size=1,
394
+ no_final_step_noise=no_final_step_noise,
395
+ )
396
+ ligand_pos = np.asarray(
397
+ [
398
+ complex_graph["ligand"].pos.cpu().numpy()
399
+ + orig_complex_graph.original_center.cpu().numpy()
400
+ for complex_graph in data_list
401
+ ]
402
+ )
403
+ run_times.append(time.time() - start_time)
404
+
405
+ if confidence is not None and isinstance(
406
+ confidence_args.rmsd_classification_cutoff, list
407
+ ):
408
+ confidence = confidence[:, 0]
409
+ if confidence is not None:
410
+ confidence = confidence.cpu().numpy()
411
+ re_order = np.argsort(confidence)[::-1]
412
+ confidence = confidence[re_order]
413
+ confidences_list.append(confidence)
414
+ ligand_pos = ligand_pos[re_order]
415
+ write_dir = (
416
+ f'{out_dir}/index{idx}_{data_list[0]["name"][0].replace("/","-")}'
417
+ )
418
+ os.makedirs(write_dir, exist_ok=True)
419
+ confidences = []
420
+ for rank, pos in enumerate(ligand_pos):
421
+ mol_pred = copy.deepcopy(lig)
422
+ if score_model_args.remove_hs:
423
+ mol_pred = RemoveHs(mol_pred)
424
+ if rank == 0:
425
+ write_mol_with_coords(
426
+ mol_pred, pos, os.path.join(write_dir, f"rank{rank+1}.sdf")
427
+ )
428
+ confidences.append(confidence[rank])
429
+ write_mol_with_coords(
430
+ mol_pred,
431
+ pos,
432
+ os.path.join(
433
+ write_dir, f"rank{rank+1}_confidence{confidence[rank]:.2f}.sdf"
434
+ ),
435
+ )
436
+ self_distances = np.linalg.norm(
437
+ ligand_pos[:, :, None, :] - ligand_pos[:, None, :, :], axis=-1
438
+ )
439
+ self_distances = np.where(
440
+ np.eye(self_distances.shape[2]), np.inf, self_distances
441
+ )
442
+ min_self_distances_list.append(np.min(self_distances, axis=(1, 2)))
443
+
444
+ filenames = []
445
+ if confidence is not None:
446
+ for rank, batch_idx in enumerate(re_order):
447
+ visualization_list[batch_idx].write(
448
+ os.path.join(write_dir, f"rank{rank+1}_reverseprocess.pdb")
449
+ )
450
+ filenames.append(
451
+ os.path.join(write_dir, f"rank{rank+1}_reverseprocess.pdb")
452
+ )
453
+ else:
454
+ for rank, batch_idx in enumerate(ligand_pos):
455
+ visualization_list[batch_idx].write(
456
+ os.path.join(write_dir, f"rank{rank+1}_reverseprocess.pdb")
457
+ )
458
+ filenames.append(
459
+ os.path.join(write_dir, f"rank{rank+1}_reverseprocess.pdb")
460
+ )
461
+ names_list.append(orig_complex_graph.name[0])
462
+ except Exception as e:
463
+ print("Failed on", orig_complex_graph["name"], e)
464
+ failures += 1
465
+ return None
466
+ # zip outputs
467
+ zippath = shutil.make_archive(
468
+ os.path.join("results", os.path.basename(pdb_path)), "zip", write_dir
469
+ )
470
+ print("Zipped outputs to", zippath)
471
+ labels = [
472
+ f"rank {i+1}, confidence {confidences[i]:.2f}" for i in range(len(filenames))
473
+ ]
474
+
475
+ torch.cuda.empty_cache()
476
+ return (
477
+ molecule(pdb_path, filenames[0], ligand_file),
478
+ gr.Dropdown.update(choices=labels, value=labels[0]),
479
+ filenames,
480
+ pdb_path,
481
+ zippath,
482
+ )
483
+
484
+
485
+ def updateView(out, filenames, pdb, ligand_file):
486
+ print("updating view")
487
+ i = out # int(out.replace("rank", ""))
488
+ print(i)
489
+ i = int(i.split(",")[0].replace("rank", "")) - 1
490
+ return molecule(pdb, filenames[i], ligand_file)
491
+
492
+
493
+ demo = gr.Blocks()
494
+
495
+ with demo:
496
+ gr.Markdown("# DiffDock")
497
+ gr.Markdown(
498
+ ">**DiffDock: Diffusion Steps, Twists, and Turns for Molecular Docking**, Corso, Gabriele and Stärk, Hannes and Jing, Bowen and Barzilay, Regina and Jaakkola, Tommi, arXiv:2210.01776 [GitHub](https://github.com/gcorso/diffdock)"
499
+ )
500
+ gr.Markdown("")
501
+ with gr.Box():
502
+ with gr.Row():
503
+ with gr.Column():
504
+ gr.Markdown("## Protein")
505
+ inp = gr.Textbox(
506
+ placeholder="PDB Code or upload file below", label="Input structure"
507
+ )
508
+ file = gr.File(file_count="single", label="Input PDB")
509
+ with gr.Column():
510
+ gr.Markdown("## Ligand")
511
+ ligand_inp = gr.Textbox(
512
+ placeholder="Provide SMILES input or upload mol2/sdf file below",
513
+ label="SMILES string",
514
+ )
515
+ ligand_file = gr.File(file_count="single", label="Input Ligand")
516
+ n_it = gr.Slider(value=20,
517
+ minimum=10, maximum=40, label="Number of inference steps", step=1
518
+ )
519
+ actual_steps = gr.Slider(value=18,
520
+ minimum=10, maximum=40, label="Number of actual inference steps", step=1
521
+ )
522
+ n_samples = gr.Slider(value=40,
523
+ minimum=10, maximum=40, label="Number of samples", step=1
524
+ )
525
+ no_final_step_noise = gr.Checkbox(value=True,label="No final step noise"
526
+ )
527
+
528
+ btn = gr.Button("Run predictions")
529
+
530
+ gr.Markdown("## Output")
531
+ pdb = gr.Variable()
532
+ filenames = gr.Variable()
533
+ out = gr.Dropdown(interactive=True, label="Ranked samples")
534
+ mol = gr.HTML()
535
+ output_file = gr.File(file_count="single", label="Output files")
536
+ gr.Examples(
537
+ [
538
+ [
539
+ "6w70",
540
+ "examples/6w70.pdb",
541
+ "COc1ccc(cc1)n2c3c(c(n2)C(=O)N)CCN(C3=O)c4ccc(cc4)N5CCCCC5=O",
542
+ "examples/6w70_ligand.sdf",
543
+ 20,
544
+ 10,
545
+ 18,
546
+ True
547
+ ],
548
+ [
549
+ "6moa",
550
+ "examples/6moa_protein_processed.pdb",
551
+ "",
552
+ "examples/6moa_ligand.sdf",
553
+ 20,
554
+ 10,
555
+ 18,
556
+ True
557
+ ],
558
+ [
559
+ "",
560
+ "examples/6o5u_protein_processed.pdb",
561
+ "",
562
+ "examples/6o5u_ligand.sdf",
563
+ 20,
564
+ 10,
565
+ 18,
566
+ True
567
+ ],
568
+ [
569
+ "",
570
+ "examples/6o5u_protein_processed.pdb",
571
+ "[NH3+]C[C@H]1O[C@H](O[C@@H]2[C@@H]([NH3+])C[C@H]([C@@H]([C@H]2O)O[C@H]2O[C@H](CO)[C@H]([C@@H]([C@H]2O)[NH3+])O)[NH3+])[C@@H]([C@H]([C@@H]1O)O)O",
572
+ "examples/6o5u_ligand.sdf",
573
+ 20,
574
+ 10,
575
+ 18,
576
+ True
577
+ ],
578
+ [
579
+ "",
580
+ "examples/6o5u_protein_processed.pdb",
581
+ "",
582
+ "examples/6o5u_ligand.sdf",
583
+ 20,
584
+ 10,
585
+ 18,
586
+ True
587
+ ],
588
+ [
589
+ "",
590
+ "examples/6ahs_protein_processed.pdb",
591
+ "",
592
+ "examples/6ahs_ligand.sdf",
593
+ 20,
594
+ 10,
595
+ 18,
596
+ True
597
+ ],
598
+ ],
599
+ [inp, file, ligand_inp, ligand_file, n_it, n_samples, actual_steps, no_final_step_noise],
600
+ [mol, out, filenames, pdb, output_file],
601
+ # fn=update,
602
+ # cache_examples=True,
603
+ )
604
+ btn.click(
605
+ fn=update,
606
+ inputs=[inp, file, ligand_inp, ligand_file, n_it, n_samples, actual_steps, no_final_step_noise],
607
+ outputs=[mol, out, filenames, pdb, output_file],
608
+ )
609
+ out.change(fn=updateView, inputs=[out, filenames, pdb, ligand_file], outputs=mol)
610
+ demo.launch()
baselines/baseline_evaluation.py ADDED
@@ -0,0 +1,219 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # small script to extract the ligand and save it in a separate file because GNINA will use the ligand position as initial pose
2
+ import os
3
+
4
+ import plotly.express as px
5
+ import time
6
+ from argparse import FileType, ArgumentParser
7
+
8
+ import numpy as np
9
+ import pandas as pd
10
+ import wandb
11
+ from biopandas.pdb import PandasPdb
12
+ from rdkit import Chem
13
+
14
+ from tqdm import tqdm
15
+
16
+ from datasets.pdbbind import read_mol
17
+ from datasets.process_mols import read_molecule
18
+ from utils.utils import read_strings_from_txt, get_symmetry_rmsd
19
+
20
+ parser = ArgumentParser()
21
+ parser.add_argument('--config', type=FileType(mode='r'), default=None)
22
+ parser.add_argument('--run_name', type=str, default='gnina_results', help='')
23
+ parser.add_argument('--data_dir', type=str, default='data/PDBBind_processed', help='')
24
+ parser.add_argument('--results_path', type=str, default='results/user_inference', help='Path to folder with trained model and hyperparameters')
25
+ parser.add_argument('--file_suffix', type=str, default='_baseline_ligand.pdb', help='Path to folder with trained model and hyperparameters')
26
+ parser.add_argument('--project', type=str, default='ligbind_inf', help='')
27
+ parser.add_argument('--wandb', action='store_true', default=False, help='')
28
+ parser.add_argument('--file_to_exclude', type=str, default=None, help='')
29
+ parser.add_argument('--all_dirs_in_results', action='store_true', default=True, help='Evaluate all directories in the results path instead of using directly looking for the names')
30
+ parser.add_argument('--num_predictions', type=int, default=10, help='')
31
+ parser.add_argument('--no_id_in_filename', action='store_true', default=False, help='')
32
+ args = parser.parse_args()
33
+
34
+ print('Reading paths and names.')
35
+ names = read_strings_from_txt(f'data/splits/timesplit_test')
36
+ names_no_rec_overlap = read_strings_from_txt(f'data/splits/timesplit_test_no_rec_overlap')
37
+ results_path_containments = os.listdir(args.results_path)
38
+
39
+ if args.wandb:
40
+ wandb.init(
41
+ entity='coarse-graining-mit',
42
+ settings=wandb.Settings(start_method="fork"),
43
+ project=args.project,
44
+ name=args.run_name,
45
+ config=args
46
+ )
47
+
48
+ all_times = []
49
+ successful_names_list = []
50
+ rmsds_list = []
51
+ centroid_distances_list = []
52
+ min_cross_distances_list = []
53
+ min_self_distances_list = []
54
+ without_rec_overlap_list = []
55
+ start_time = time.time()
56
+ for i, name in enumerate(tqdm(names)):
57
+ mol = read_mol(args.data_dir, name, remove_hs=True)
58
+ mol = Chem.RemoveAllHs(mol)
59
+ orig_ligand_pos = np.array(mol.GetConformer().GetPositions())
60
+
61
+ if args.all_dirs_in_results:
62
+ directory_with_name = [directory for directory in results_path_containments if name in directory][0]
63
+ ligand_pos = []
64
+ for i in range(args.num_predictions):
65
+ file_paths = os.listdir(os.path.join(args.results_path, directory_with_name))
66
+ file_path = [path for path in file_paths if f'rank{i+1}' in path][0]
67
+ if args.file_to_exclude is not None and args.file_to_exclude in file_path: continue
68
+ mol_pred = read_molecule(os.path.join(args.results_path, directory_with_name, file_path),remove_hs=True, sanitize=True)
69
+ mol_pred = Chem.RemoveAllHs(mol_pred)
70
+ ligand_pos.append(mol_pred.GetConformer().GetPositions())
71
+ ligand_pos = np.asarray(ligand_pos)
72
+ else:
73
+ if not os.path.exists(os.path.join(args.results_path, name, f'{"" if args.no_id_in_filename else name}{args.file_suffix}')): raise Exception('path did not exists:', os.path.join(args.results_path, name, f'{"" if args.no_id_in_filename else name}{args.file_suffix}'))
74
+ mol_pred = read_molecule(os.path.join(args.results_path, name, f'{"" if args.no_id_in_filename else name}{args.file_suffix}'), remove_hs=True, sanitize=True)
75
+ if mol_pred == None:
76
+ print("Skipping ", name, ' because RDKIT could not read it.')
77
+ continue
78
+ mol_pred = Chem.RemoveAllHs(mol_pred)
79
+ ligand_pos = np.asarray([np.array(mol_pred.GetConformer(i).GetPositions()) for i in range(args.num_predictions)])
80
+ try:
81
+ rmsd = get_symmetry_rmsd(mol, orig_ligand_pos, [l for l in ligand_pos], mol_pred)
82
+ except Exception as e:
83
+ print("Using non corrected RMSD because of the error:", e)
84
+ rmsd = np.sqrt(((ligand_pos - orig_ligand_pos) ** 2).sum(axis=2).mean(axis=1))
85
+
86
+ rmsds_list.append(rmsd)
87
+ centroid_distances_list.append(np.linalg.norm(ligand_pos.mean(axis=1) - orig_ligand_pos[None,:].mean(axis=1), axis=1))
88
+
89
+ rec_path = os.path.join(args.data_dir, name, f'{name}_protein_processed.pdb')
90
+ if not os.path.exists(rec_path):
91
+ rec_path = os.path.join(args.data_dir, name,f'{name}_protein_obabel_reduce.pdb')
92
+ rec = PandasPdb().read_pdb(rec_path)
93
+ rec_df = rec.df['ATOM']
94
+ receptor_pos = rec_df[['x_coord', 'y_coord', 'z_coord']].to_numpy().squeeze().astype(np.float32)
95
+ receptor_pos = np.tile(receptor_pos, (args.num_predictions, 1, 1))
96
+
97
+ cross_distances = np.linalg.norm(receptor_pos[:, :, None, :] - ligand_pos[:, None, :, :], axis=-1)
98
+ self_distances = np.linalg.norm(ligand_pos[:, :, None, :] - ligand_pos[:, None, :, :], axis=-1)
99
+ self_distances = np.where(np.eye(self_distances.shape[2]), np.inf, self_distances)
100
+ min_cross_distances_list.append(np.min(cross_distances, axis=(1,2)))
101
+ min_self_distances_list.append(np.min(self_distances, axis=(1, 2)))
102
+ successful_names_list.append(name)
103
+ without_rec_overlap_list.append(1 if name in names_no_rec_overlap else 0)
104
+ performance_metrics = {}
105
+ for overlap in ['', 'no_overlap_']:
106
+ if 'no_overlap_' == overlap:
107
+ without_rec_overlap = np.array(without_rec_overlap_list, dtype=bool)
108
+ rmsds = np.array(rmsds_list)[without_rec_overlap]
109
+ centroid_distances = np.array(centroid_distances_list)[without_rec_overlap]
110
+ min_cross_distances = np.array(min_cross_distances_list)[without_rec_overlap]
111
+ min_self_distances = np.array(min_self_distances_list)[without_rec_overlap]
112
+ successful_names = np.array(successful_names_list)[without_rec_overlap]
113
+ else:
114
+ rmsds = np.array(rmsds_list)
115
+ centroid_distances = np.array(centroid_distances_list)
116
+ min_cross_distances = np.array(min_cross_distances_list)
117
+ min_self_distances = np.array(min_self_distances_list)
118
+ successful_names = np.array(successful_names_list)
119
+
120
+ np.save(os.path.join(args.results_path, f'{overlap}rmsds.npy'), rmsds)
121
+ np.save(os.path.join(args.results_path, f'{overlap}names.npy'), successful_names)
122
+ np.save(os.path.join(args.results_path, f'{overlap}min_cross_distances.npy'), np.array(min_cross_distances))
123
+ np.save(os.path.join(args.results_path, f'{overlap}min_self_distances.npy'), np.array(min_self_distances))
124
+
125
+ performance_metrics.update({
126
+ f'{overlap}steric_clash_fraction': (100 * (min_cross_distances < 0.4).sum() / len(min_cross_distances) / args.num_predictions).__round__(2),
127
+ f'{overlap}self_intersect_fraction': (100 * (min_self_distances < 0.4).sum() / len(min_self_distances) / args.num_predictions).__round__(2),
128
+ f'{overlap}mean_rmsd': rmsds[:,0].mean(),
129
+ f'{overlap}rmsds_below_2': (100 * (rmsds[:,0] < 2).sum() / len(rmsds[:,0])),
130
+ f'{overlap}rmsds_below_5': (100 * (rmsds[:,0] < 5).sum() / len(rmsds[:,0])),
131
+ f'{overlap}rmsds_percentile_25': np.percentile(rmsds[:,0], 25).round(2),
132
+ f'{overlap}rmsds_percentile_50': np.percentile(rmsds[:,0], 50).round(2),
133
+ f'{overlap}rmsds_percentile_75': np.percentile(rmsds[:,0], 75).round(2),
134
+
135
+ f'{overlap}mean_centroid': centroid_distances[:,0].mean().__round__(2),
136
+ f'{overlap}centroid_below_2': (100 * (centroid_distances[:,0] < 2).sum() / len(centroid_distances[:,0])).__round__(2),
137
+ f'{overlap}centroid_below_5': (100 * (centroid_distances[:,0] < 5).sum() / len(centroid_distances[:,0])).__round__(2),
138
+ f'{overlap}centroid_percentile_25': np.percentile(centroid_distances[:,0], 25).round(2),
139
+ f'{overlap}centroid_percentile_50': np.percentile(centroid_distances[:,0], 50).round(2),
140
+ f'{overlap}centroid_percentile_75': np.percentile(centroid_distances[:,0], 75).round(2),
141
+ })
142
+
143
+ top5_rmsds = np.min(rmsds[:, :5], axis=1)
144
+ top5_centroid_distances = centroid_distances[np.arange(rmsds.shape[0])[:,None],np.argsort(rmsds[:, :5], axis=1)][:,0]
145
+ top5_min_cross_distances = min_cross_distances[np.arange(rmsds.shape[0])[:,None],np.argsort(rmsds[:, :5], axis=1)][:,0]
146
+ top5_min_self_distances = min_self_distances[np.arange(rmsds.shape[0])[:,None],np.argsort(rmsds[:, :5], axis=1)][:,0]
147
+ performance_metrics.update({
148
+ f'{overlap}top5_steric_clash_fraction': (100 * (top5_min_cross_distances < 0.4).sum() / len(top5_min_cross_distances)).__round__(2),
149
+ f'{overlap}top5_self_intersect_fraction': (100 * (top5_min_self_distances < 0.4).sum() / len(top5_min_self_distances)).__round__(2),
150
+ f'{overlap}top5_rmsds_below_2': (100 * (top5_rmsds < 2).sum() / len(top5_rmsds)).__round__(2),
151
+ f'{overlap}top5_rmsds_below_5': (100 * (top5_rmsds < 5).sum() / len(top5_rmsds)).__round__(2),
152
+ f'{overlap}top5_rmsds_percentile_25': np.percentile(top5_rmsds, 25).round(2),
153
+ f'{overlap}top5_rmsds_percentile_50': np.percentile(top5_rmsds, 50).round(2),
154
+ f'{overlap}top5_rmsds_percentile_75': np.percentile(top5_rmsds, 75).round(2),
155
+
156
+ f'{overlap}top5_centroid_below_2': (100 * (top5_centroid_distances < 2).sum() / len(top5_centroid_distances)).__round__(2),
157
+ f'{overlap}top5_centroid_below_5': (100 * (top5_centroid_distances < 5).sum() / len(top5_centroid_distances)).__round__(2),
158
+ f'{overlap}top5_centroid_percentile_25': np.percentile(top5_centroid_distances, 25).round(2),
159
+ f'{overlap}top5_centroid_percentile_50': np.percentile(top5_centroid_distances, 50).round(2),
160
+ f'{overlap}top5_centroid_percentile_75': np.percentile(top5_centroid_distances, 75).round(2),
161
+ })
162
+
163
+
164
+ top10_rmsds = np.min(rmsds[:, :10], axis=1)
165
+ top10_centroid_distances = centroid_distances[np.arange(rmsds.shape[0])[:,None],np.argsort(rmsds[:, :10], axis=1)][:,0]
166
+ top10_min_cross_distances = min_cross_distances[np.arange(rmsds.shape[0])[:,None],np.argsort(rmsds[:, :10], axis=1)][:,0]
167
+ top10_min_self_distances = min_self_distances[np.arange(rmsds.shape[0])[:,None],np.argsort(rmsds[:, :10], axis=1)][:,0]
168
+ performance_metrics.update({
169
+ f'{overlap}top10_self_intersect_fraction': (100 * (top10_min_self_distances < 0.4).sum() / len(top10_min_self_distances)).__round__(2),
170
+ f'{overlap}top10_steric_clash_fraction': ( 100 * (top10_min_cross_distances < 0.4).sum() / len(top10_min_cross_distances)).__round__(2),
171
+ f'{overlap}top10_rmsds_below_2': (100 * (top10_rmsds < 2).sum() / len(top10_rmsds)).__round__(2),
172
+ f'{overlap}top10_rmsds_below_5': (100 * (top10_rmsds < 5).sum() / len(top10_rmsds)).__round__(2),
173
+ f'{overlap}top10_rmsds_percentile_25': np.percentile(top10_rmsds, 25).round(2),
174
+ f'{overlap}top10_rmsds_percentile_50': np.percentile(top10_rmsds, 50).round(2),
175
+ f'{overlap}top10_rmsds_percentile_75': np.percentile(top10_rmsds, 75).round(2),
176
+
177
+ f'{overlap}top10_centroid_below_2': (100 * (top10_centroid_distances < 2).sum() / len(top10_centroid_distances)).__round__(2),
178
+ f'{overlap}top10_centroid_below_5': (100 * (top10_centroid_distances < 5).sum() / len(top10_centroid_distances)).__round__(2),
179
+ f'{overlap}top10_centroid_percentile_25': np.percentile(top10_centroid_distances, 25).round(2),
180
+ f'{overlap}top10_centroid_percentile_50': np.percentile(top10_centroid_distances, 50).round(2),
181
+ f'{overlap}top10_centroid_percentile_75': np.percentile(top10_centroid_distances, 75).round(2),
182
+ })
183
+ for k in performance_metrics:
184
+ print(k, performance_metrics[k])
185
+
186
+ if args.wandb:
187
+ wandb.log(performance_metrics)
188
+ histogram_metrics_list = [('rmsd', rmsds[:,0]),
189
+ ('centroid_distance', centroid_distances[:,0]),
190
+ ('mean_rmsd', rmsds[:,0]),
191
+ ('mean_centroid_distance', centroid_distances[:,0])]
192
+ histogram_metrics_list.append(('top5_rmsds', top5_rmsds))
193
+ histogram_metrics_list.append(('top5_centroid_distances', top5_centroid_distances))
194
+ histogram_metrics_list.append(('top10_rmsds', top10_rmsds))
195
+ histogram_metrics_list.append(('top10_centroid_distances', top10_centroid_distances))
196
+
197
+ os.makedirs(f'.plotly_cache/baseline_cache', exist_ok=True)
198
+ images = []
199
+ for metric_name, metric in histogram_metrics_list:
200
+ d = {args.results_path: metric}
201
+ df = pd.DataFrame(data=d)
202
+ fig = px.ecdf(df, width=900, height=600, range_x=[0, 40])
203
+ fig.add_vline(x=2, annotation_text='2 A;', annotation_font_size=20, annotation_position="top right",
204
+ line_dash='dash', line_color='firebrick', annotation_font_color='firebrick')
205
+ fig.add_vline(x=5, annotation_text='5 A;', annotation_font_size=20, annotation_position="top right",
206
+ line_dash='dash', line_color='green', annotation_font_color='green')
207
+ fig.update_xaxes(title=f'{metric_name} in Angstrom', title_font={"size": 20}, tickfont={"size": 20})
208
+ fig.update_yaxes(title=f'Fraction of predictions with lower error', title_font={"size": 20},
209
+ tickfont={"size": 20})
210
+ fig.update_layout(autosize=False, margin={'l': 0, 'r': 0, 't': 0, 'b': 0}, plot_bgcolor='white',
211
+ paper_bgcolor='white', legend_title_text='Method', legend_title_font_size=17,
212
+ legend=dict(yanchor="bottom", y=0.1, xanchor="right", x=0.99, font=dict(size=17), ), )
213
+ fig.update_xaxes(showgrid=True, gridcolor='lightgrey')
214
+ fig.update_yaxes(showgrid=True, gridcolor='lightgrey')
215
+
216
+ fig.write_image(os.path.join(f'.plotly_cache/baseline_cache', f'{metric_name}.png'))
217
+ wandb.log({metric_name: wandb.Image(os.path.join(f'.plotly_cache/baseline_cache', f'{metric_name}.png'), caption=f"{metric_name}")})
218
+ images.append(wandb.Image(os.path.join(f'.plotly_cache/baseline_cache', f'{metric_name}.png'), caption=f"{metric_name}"))
219
+ wandb.log({'images': images})
baselines/baseline_gnina.py ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # small script to extract the ligand and save it in a separate file because GNINA will use the ligand position as
2
+ # initial pose
3
+ import os
4
+ import shutil
5
+ import subprocess
6
+ import sys
7
+
8
+ import time
9
+ from argparse import ArgumentParser, FileType
10
+ from datetime import datetime
11
+
12
+ import numpy as np
13
+ import pandas as pd
14
+ from biopandas.pdb import PandasPdb
15
+ from rdkit import Chem
16
+ from rdkit.Chem import AllChem, MolToPDBFile
17
+ from scipy.spatial.distance import cdist
18
+
19
+ from datasets.pdbbind import read_mol
20
+ from utils.utils import read_strings_from_txt
21
+
22
+ parser = ArgumentParser()
23
+ parser.add_argument('--data_dir', type=str, default='data/PDBBind_processed', help='')
24
+ parser.add_argument('--file_suffix', type=str, default='_baseline_ligand', help='Path to folder with trained model and hyperparameters')
25
+ parser.add_argument('--results_path', type=str, default='results/gnina_predictions', help='')
26
+ parser.add_argument('--complex_names_path', type=str, default='data/splits/timesplit_test', help='')
27
+ parser.add_argument('--seed_molecules_path', type=str, default=None, help='Use the molecules at seed molecule path as initialization and only search around them')
28
+ parser.add_argument('--seed_molecule_filename', type=str, default='equibind_corrected.sdf', help='Use the molecules at seed molecule path as initialization and only search around them')
29
+ parser.add_argument('--smina', action='store_true', default=False, help='')
30
+ parser.add_argument('--no_gpu', action='store_true', default=False, help='')
31
+ parser.add_argument('--exhaustiveness', type=int, default=8, help='')
32
+ parser.add_argument('--num_cpu', type=int, default=16, help='')
33
+ parser.add_argument('--pocket_mode', action='store_true', default=False, help='')
34
+ parser.add_argument('--pocket_cutoff', type=int, default=5, help='')
35
+ parser.add_argument('--num_modes', type=int, default=10, help='')
36
+ parser.add_argument('--autobox_add', type=int, default=4, help='')
37
+ parser.add_argument('--use_p2rank_pocket', action='store_true', default=False, help='')
38
+ parser.add_argument('--skip_p2rank', action='store_true', default=False, help='')
39
+ parser.add_argument('--prank_path', type=str, default='/Users/hstark/projects/p2rank_2.3/prank', help='')
40
+ parser.add_argument('--skip_existing', action='store_true', default=False, help='')
41
+
42
+
43
+
44
+
45
+
46
+ args = parser.parse_args()
47
+
48
+ class Logger(object):
49
+ def __init__(self, logpath, syspart=sys.stdout):
50
+ self.terminal = syspart
51
+ self.log = open(logpath, "a")
52
+
53
+ def write(self, message):
54
+ self.terminal.write(message)
55
+ self.log.write(message)
56
+ self.log.flush()
57
+
58
+ def flush(self):
59
+ # this flush method is needed for python 3 compatibility.
60
+ # this handles the flush command by doing nothing.
61
+ # you might want to specify some extra behavior here.
62
+ pass
63
+
64
+
65
+ def log(*args):
66
+ print(f'[{datetime.now()}]', *args)
67
+
68
+
69
+ # parameters
70
+ names = read_strings_from_txt(args.complex_names_path)
71
+
72
+ if os.path.exists(args.results_path) and not args.skip_existing:
73
+ shutil.rmtree(args.results_path)
74
+ os.makedirs(args.results_path, exist_ok=True)
75
+ sys.stdout = Logger(logpath=f'{args.results_path}/gnina.log', syspart=sys.stdout)
76
+ sys.stderr = Logger(logpath=f'{args.results_path}/error.log', syspart=sys.stderr)
77
+
78
+ p2rank_cache_path = "results/.p2rank_cache"
79
+ if args.use_p2rank_pocket and not args.skip_p2rank:
80
+ os.makedirs(p2rank_cache_path, exist_ok=True)
81
+ pdb_files_cache = os.path.join(p2rank_cache_path,'pdb_files')
82
+ os.makedirs(pdb_files_cache, exist_ok=True)
83
+ with open(f"{p2rank_cache_path}/pdb_list_p2rank.txt", "w") as out:
84
+ for name in names:
85
+ shutil.copy(os.path.join(args.data_dir, name, f'{name}_protein_processed.pdb'), f'{pdb_files_cache}/{name}_protein_processed.pdb')
86
+ out.write(os.path.join('pdb_files', f'{name}_protein_processed.pdb\n'))
87
+ cmd = f"bash {args.prank_path} predict {p2rank_cache_path}/pdb_list_p2rank.txt -o {p2rank_cache_path}/p2rank_output -threads 4"
88
+ os.system(cmd)
89
+
90
+
91
+ all_times = []
92
+ start_time = time.time()
93
+ for i, name in enumerate(names):
94
+ os.makedirs(os.path.join(args.results_path, name), exist_ok=True)
95
+ log('\n')
96
+ log(f'complex {i} of {len(names)}')
97
+ # call gnina to find binding pose
98
+ rec_path = os.path.join(args.data_dir, name, f'{name}_protein_processed.pdb')
99
+ prediction_output_name = os.path.join(args.results_path, name, f'{name}{args.file_suffix}.pdb')
100
+ log_path = os.path.join(args.results_path, name, f'{name}{args.file_suffix}.log')
101
+ if args.seed_molecules_path is not None: seed_mol_path = os.path.join(args.seed_molecules_path, name, f'{args.seed_molecule_filename}')
102
+ if args.skip_existing and os.path.exists(prediction_output_name): continue
103
+
104
+ if args.pocket_mode:
105
+ mol = read_mol(args.data_dir, name, remove_hs=False)
106
+ rec = PandasPdb().read_pdb(rec_path)
107
+ rec_df = rec.get(s='c-alpha')
108
+ rec_pos = rec_df[['x_coord', 'y_coord', 'z_coord']].to_numpy().squeeze().astype(np.float32)
109
+ lig_pos = mol.GetConformer().GetPositions()
110
+ d = cdist(rec_pos, lig_pos)
111
+ label = np.any(d < args.pocket_cutoff, axis=1)
112
+
113
+ if np.any(label):
114
+ center_pocket = rec_pos[label].mean(axis=0)
115
+ else:
116
+ print("No pocket residue below minimum distance ", args.pocket_cutoff, "taking closest at", np.min(d))
117
+ center_pocket = rec_pos[np.argmin(np.min(d, axis=1)[0])]
118
+ radius_pocket = np.max(np.linalg.norm(lig_pos - center_pocket[None, :], axis=1))
119
+ diameter_pocket = radius_pocket * 2
120
+ center_x = center_pocket[0]
121
+ size_x = diameter_pocket + 8
122
+ center_y = center_pocket[1]
123
+ size_y = diameter_pocket + 8
124
+ center_z = center_pocket[2]
125
+ size_z = diameter_pocket + 8
126
+
127
+
128
+ mol_rdkit = read_mol(args.data_dir, name, remove_hs=False)
129
+ single_time = time.time()
130
+
131
+ mol_rdkit.RemoveAllConformers()
132
+ ps = AllChem.ETKDGv2()
133
+ id = AllChem.EmbedMolecule(mol_rdkit, ps)
134
+ if id == -1:
135
+ print('rdkit pos could not be generated without using random pos. using random pos now.')
136
+ ps.useRandomCoords = True
137
+ AllChem.EmbedMolecule(mol_rdkit, ps)
138
+ AllChem.MMFFOptimizeMolecule(mol_rdkit, confId=0)
139
+ rdkit_mol_path = os.path.join(args.data_dir, name, f'{name}_rdkit_ligand.pdb')
140
+ MolToPDBFile(mol_rdkit, rdkit_mol_path)
141
+
142
+ fallback_without_p2rank = False
143
+ if args.use_p2rank_pocket:
144
+ df = pd.read_csv(f'{p2rank_cache_path}/p2rank_output/{name}_protein_processed.pdb_predictions.csv')
145
+ rdkit_lig_pos = mol_rdkit.GetConformer().GetPositions()
146
+ diameter_pocket = np.max(cdist(rdkit_lig_pos, rdkit_lig_pos))
147
+ size_x = diameter_pocket + args.autobox_add * 2
148
+ size_y = diameter_pocket + args.autobox_add * 2
149
+ size_z = diameter_pocket + args.autobox_add * 2
150
+ if df.empty:
151
+ fallback_without_p2rank = True
152
+ else:
153
+ center_x = df.iloc[0][' center_x']
154
+ center_y = df.iloc[0][' center_y']
155
+ center_z = df.iloc[0][' center_z']
156
+
157
+
158
+
159
+ log(f'processing {rec_path}')
160
+ if not args.pocket_mode and not args.use_p2rank_pocket or fallback_without_p2rank:
161
+ return_code = subprocess.run(
162
+ f"gnina --receptor {rec_path} --ligand {rdkit_mol_path} --num_modes {args.num_modes} -o {prediction_output_name} {'--no_gpu' if args.no_gpu else ''} --autobox_ligand {rec_path if args.seed_molecules_path is None else seed_mol_path} --autobox_add {args.autobox_add} --log {log_path} --exhaustiveness {args.exhaustiveness} --cpu {args.num_cpu} {'--cnn_scoring none' if args.smina else ''}",
163
+ shell=True)
164
+ else:
165
+ return_code = subprocess.run(
166
+ f"gnina --receptor {rec_path} --ligand {rdkit_mol_path} --num_modes {args.num_modes} -o {prediction_output_name} {'--no_gpu' if args.no_gpu else ''} --log {log_path} --exhaustiveness {args.exhaustiveness} --cpu {args.num_cpu} {'--cnn_scoring none' if args.smina else ''} --center_x {center_x} --center_y {center_y} --center_z {center_z} --size_x {size_x} --size_y {size_y} --size_z {size_z}",
167
+ shell=True)
168
+ log(return_code)
169
+ all_times.append(time.time() - single_time)
170
+
171
+ log("single time: --- %s seconds ---" % (time.time() - single_time))
172
+ log("time so far: --- %s seconds ---" % (time.time() - start_time))
173
+ log('\n')
174
+ log(all_times)
175
+ log("--- %s seconds ---" % (time.time() - start_time))
baselines/baseline_run_tankbind_parallel.sh ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ for i in $(seq 0 15); do
2
+ python baseline_tankbind_runtime.py --parallel_id $i --parallel_tot 16 --prank_path /data/rsg/nlp/hstark/TankBind/packages/p2rank_2.3/prank --data_dir /data/rsg/nlp/hstark/ligbind/data/PDBBind_processed --split_path /data/rsg/nlp/hstark/ligbind/data/splits/timesplit_test --results_path /data/rsg/nlp/hstark/ligbind/results/tankbind_16_worker_runtime --device cpu --skip_p2rank --num_workers 1 --skip_multiple_pocket_outputs &
3
+ done
4
+ wait
5
+
baselines/baseline_tankbind_evaluation.py ADDED
@@ -0,0 +1,239 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import copy
3
+ import os
4
+
5
+ import plotly.express as px
6
+ import time
7
+ from argparse import FileType, ArgumentParser
8
+
9
+ import numpy as np
10
+ import pandas as pd
11
+ import wandb
12
+ from biopandas.pdb import PandasPdb
13
+ from rdkit import Chem
14
+ from rdkit.Chem import RemoveHs
15
+
16
+ from tqdm import tqdm
17
+
18
+ from datasets.pdbbind import read_mol
19
+ from datasets.process_mols import read_molecule, read_sdf_or_mol2
20
+ from utils.utils import read_strings_from_txt, get_symmetry_rmsd, remove_all_hs
21
+
22
+ parser = ArgumentParser()
23
+ parser.add_argument('--config', type=FileType(mode='r'), default=None)
24
+ parser.add_argument('--run_name', type=str, default='tankbind', help='')
25
+ parser.add_argument('--data_dir', type=str, default='data/PDBBind_processed', help='')
26
+ parser.add_argument('--renumbered_atoms_dir', type=str, default='../TankBind/examples/tankbind_pdb/renumber_atom_index_same_as_smiles', help='')
27
+ parser.add_argument('--results_path', type=str, default='results/tankbind_top5', help='Path to folder with trained model and hyperparameters')
28
+ parser.add_argument('--project', type=str, default='ligbind_inf', help='')
29
+ parser.add_argument('--wandb', action='store_true', default=True, help='')
30
+ parser.add_argument('--num_predictions', type=int, default=5, help='')
31
+ args = parser.parse_args()
32
+
33
+ names = read_strings_from_txt(f'data/splits/timesplit_test')
34
+ names_no_rec_overlap = read_strings_from_txt(f'data/splits/timesplit_test_no_rec_overlap')
35
+
36
+ if args.wandb:
37
+ wandb.init(
38
+ entity='coarse-graining-mit',
39
+ settings=wandb.Settings(start_method="fork"),
40
+ project=args.project,
41
+ name=args.run_name,
42
+ config=args
43
+ )
44
+
45
+ all_times = []
46
+ rmsds_list = []
47
+ unsym_rmsds_list = []
48
+ centroid_distances_list = []
49
+ min_cross_distances_list = []
50
+ min_self_distances_list = []
51
+ made_prediction_list = []
52
+ steric_clash_list = []
53
+ without_rec_overlap_list = []
54
+
55
+ start_time = time.time()
56
+ successful_names_list = []
57
+ for i, name in enumerate(tqdm(names)):
58
+ mol, _ = read_sdf_or_mol2(f"{args.renumbered_atoms_dir}/{name}.sdf", None)
59
+ sm = Chem.MolToSmiles(mol)
60
+ m_order = list(mol.GetPropsAsDict(includePrivate=True, includeComputed=True)['_smilesAtomOutputOrder'])
61
+ mol = Chem.RenumberAtoms(mol, m_order)
62
+ mol = Chem.RemoveHs(mol)
63
+ orig_ligand_pos = np.array(mol.GetConformer().GetPositions())
64
+
65
+ assert(os.path.exists(os.path.join(args.results_path, name, f'{name}_tankbind_0.sdf')))
66
+ ligand_pos = []
67
+ for i in range(args.num_predictions):
68
+ if not os.path.exists(os.path.join(args.results_path, name, f'{name}_tankbind_{i}.sdf')): break
69
+ mol_pred, _ = read_sdf_or_mol2(os.path.join(args.results_path, name, f'{name}_tankbind_{i}.sdf'),None)
70
+ sm = Chem.MolToSmiles(mol_pred)
71
+ m_order = list(mol_pred.GetPropsAsDict(includePrivate=True, includeComputed=True)['_smilesAtomOutputOrder'])
72
+ mol_pred = Chem.RenumberAtoms(mol_pred, m_order)
73
+ mol_pred = RemoveHs(mol_pred)
74
+ ligand_pos.append(np.array(mol_pred.GetConformer().GetPositions()))
75
+ ligand_pos = np.asarray(ligand_pos)
76
+
77
+ try:
78
+ unsym_rmsd = np.sqrt(((ligand_pos - orig_ligand_pos) ** 2).sum(axis=2).mean(axis=1))
79
+ rmsd = np.array(get_symmetry_rmsd(mol, orig_ligand_pos, [l for l in ligand_pos], mol_pred))
80
+ except Exception as e:
81
+ print("Using non corrected RMSD because of the error:", e)
82
+ rmsd = np.sqrt(((ligand_pos - orig_ligand_pos) ** 2).sum(axis=2).mean(axis=1))
83
+
84
+ num_pockets = len(ligand_pos)
85
+ unsym_rmsds_list.append(np.lib.pad(unsym_rmsd, (0,10-len(unsym_rmsd)), 'constant', constant_values=(0)) )
86
+ rmsds_list.append(np.lib.pad(rmsd, (0,10-len(rmsd)), 'constant', constant_values=(0)) )
87
+ centroid_distance = np.linalg.norm(ligand_pos.mean(axis=1) - orig_ligand_pos[None,:].mean(axis=1), axis=1)
88
+ centroid_distances_list.append(np.lib.pad(centroid_distance, (0,10-len(rmsd)), 'constant', constant_values=(0)) )
89
+
90
+ rec_path = os.path.join(args.data_dir, name, f'{name}_protein_processed.pdb')
91
+ if not os.path.exists(rec_path):
92
+ rec_path = os.path.join(args.data_dir, name,f'{name}_protein_obabel_reduce.pdb')
93
+ rec = PandasPdb().read_pdb(rec_path)
94
+ rec_df = rec.df['ATOM']
95
+ receptor_pos = rec_df[['x_coord', 'y_coord', 'z_coord']].to_numpy().squeeze().astype(np.float32)
96
+ receptor_pos = np.tile(receptor_pos, (10, 1, 1))
97
+
98
+ ligand_pos_padded = np.lib.pad(ligand_pos, ((0,10-len(ligand_pos)), (0,0), (0,0)), 'constant', constant_values=(np.inf))
99
+ ligand_pos_padded_zero = np.lib.pad(ligand_pos, ((0, 10 - len(ligand_pos)), (0, 0), (0, 0)), 'constant',constant_values=0)
100
+ cross_distances = np.linalg.norm(receptor_pos[:, :, None, :] - ligand_pos_padded[:, None, :, :], axis=-1)
101
+ self_distances = np.linalg.norm(ligand_pos_padded_zero[:, :, None, :] - ligand_pos_padded_zero[:, None, :, :], axis=-1)
102
+ self_distances = np.where(np.eye(self_distances.shape[2]), np.inf, self_distances)
103
+ min_self_distances_list.append(np.min(self_distances, axis=(1, 2)))
104
+ min_cross_distance = np.min(cross_distances, axis=(1, 2))
105
+ individual_made_prediction = np.lib.pad(np.ones(num_pockets), (0,10-len(rmsd)), 'constant', constant_values=(0))
106
+ made_prediction_list.append(individual_made_prediction)
107
+ min_cross_distances_list.append(min_cross_distance)
108
+ successful_names_list.append(name)
109
+ without_rec_overlap_list.append(1 if name in names_no_rec_overlap else 0)
110
+
111
+ performance_metrics = {}
112
+ for overlap in ['', 'no_overlap_']:
113
+ if 'no_overlap_' == overlap:
114
+ without_rec_overlap = np.array(without_rec_overlap_list, dtype=bool)
115
+ unsym_rmsds = np.array(unsym_rmsds_list)[without_rec_overlap]
116
+ rmsds = np.array(rmsds_list)[without_rec_overlap]
117
+ centroid_distances = np.array(centroid_distances_list)[without_rec_overlap]
118
+ min_cross_distances = np.array(min_cross_distances_list)[without_rec_overlap]
119
+ min_self_distances = np.array(min_self_distances_list)[without_rec_overlap]
120
+ made_prediction = np.array(made_prediction_list)[without_rec_overlap]
121
+ successful_names = np.array(successful_names_list)[without_rec_overlap]
122
+ else:
123
+ unsym_rmsds = np.array(unsym_rmsds_list)
124
+ rmsds = np.array(rmsds_list)
125
+ centroid_distances = np.array(centroid_distances_list)
126
+ min_cross_distances = np.array(min_cross_distances_list)
127
+ min_self_distances = np.array(min_self_distances_list)
128
+ made_prediction = np.array(made_prediction_list)
129
+ successful_names = np.array(successful_names_list)
130
+
131
+ inf_rmsds = copy.deepcopy(rmsds)
132
+ inf_rmsds[~made_prediction.astype(bool)] = np.inf
133
+ inf_centroid_distances = copy.deepcopy(centroid_distances)
134
+ inf_centroid_distances[~made_prediction.astype(bool)] = np.inf
135
+
136
+ np.save(os.path.join(args.results_path, f'{overlap}rmsds.npy'), rmsds)
137
+ np.save(os.path.join(args.results_path, f'{overlap}names.npy'), np.array(successful_names))
138
+ np.save(os.path.join(args.results_path, f'{overlap}centroid_distances.npy'), centroid_distances)
139
+ np.save(os.path.join(args.results_path, f'{overlap}min_cross_distances.npy'), min_cross_distances)
140
+ np.save(os.path.join(args.results_path, f'{overlap}min_self_distances.npy'), min_self_distances)
141
+
142
+ performance_metrics.update({
143
+ f'{overlap}self_intersect_fraction': (100 * (min_self_distances[:, 0] < 0.4).sum() / len(min_self_distances[:, 0])),
144
+ f'{overlap}steric_clash_fraction': (100 * (min_cross_distances[:,0] < 0.4).sum() / len(min_cross_distances[:,0])),
145
+ f'{overlap}mean_rmsd': rmsds[:,0].mean(),
146
+ f'{overlap}unsym_rmsds_below_2': (100 * (unsym_rmsds[:,0] < 2).sum() / len(unsym_rmsds[:,0])),
147
+ f'{overlap}rmsds_below_2': (100 * (rmsds[:,0] < 2).sum() / len(rmsds[:,0])),
148
+ f'{overlap}rmsds_below_5': (100 * (rmsds[:,0] < 5).sum() / len(rmsds[:,0])),
149
+ f'{overlap}rmsds_percentile_25': np.percentile(rmsds[:,0], 25).round(2),
150
+ f'{overlap}rmsds_percentile_50': np.percentile(rmsds[:,0], 50).round(2),
151
+ f'{overlap}rmsds_percentile_75': np.percentile(rmsds[:,0], 75).round(2),
152
+
153
+ f'{overlap}mean_centroid': centroid_distances[:,0].mean().__round__(2),
154
+ f'{overlap}centroid_below_2': (100 * (centroid_distances[:,0] < 2).sum() / len(centroid_distances[:,0])).__round__(2),
155
+ f'{overlap}centroid_below_5': (100 * (centroid_distances[:,0] < 5).sum() / len(centroid_distances[:,0])).__round__(2),
156
+ f'{overlap}centroid_percentile_25': np.percentile(centroid_distances[:,0], 25).round(2),
157
+ f'{overlap}centroid_percentile_50': np.percentile(centroid_distances[:,0], 50).round(2),
158
+ f'{overlap}centroid_percentile_75': np.percentile(centroid_distances[:,0], 75).round(2),
159
+ })
160
+
161
+ top5_rmsds = np.min(inf_rmsds[:, :5], axis=1)
162
+ top5_centroid_distances = centroid_distances[np.arange(rmsds.shape[0])[:,None],np.argsort(inf_rmsds[:, :5], axis=1)][:,0]
163
+ top5_min_cross_distances = min_cross_distances[np.arange(rmsds.shape[0])[:,None],np.argsort(inf_rmsds[:, :5], axis=1)][:,0]
164
+ top5_min_self_distances = min_self_distances[np.arange(rmsds.shape[0])[:,None],np.argsort(inf_rmsds[:, :5], axis=1)][:,0]
165
+ performance_metrics.update({
166
+ f'{overlap}top5_steric_clash_fraction': (100 * (top5_min_cross_distances < 0.4).sum() / len(top5_min_cross_distances)).__round__(2),
167
+ f'{overlap}top5_self_intersect_fraction': (100 * (top5_min_self_distances < 0.4).sum() / len(top5_min_self_distances)).__round__(2),
168
+ f'{overlap}top5_rmsds_below_2': (100 * (top5_rmsds < 2).sum() / len(top5_rmsds)).__round__(2),
169
+ f'{overlap}top5_rmsds_below_5': (100 * (top5_rmsds < 5).sum() / len(top5_rmsds)).__round__(2),
170
+ f'{overlap}top5_rmsds_percentile_25': np.percentile(top5_rmsds, 25).round(2),
171
+ f'{overlap}top5_rmsds_percentile_50': np.percentile(top5_rmsds, 50).round(2),
172
+ f'{overlap}top5_rmsds_percentile_75': np.percentile(top5_rmsds, 75).round(2),
173
+
174
+ f'{overlap}top5_centroid_below_2': (100 * (top5_centroid_distances < 2).sum() / len(top5_centroid_distances)).__round__(2),
175
+ f'{overlap}top5_centroid_below_5': (100 * (top5_centroid_distances < 5).sum() / len(top5_centroid_distances)).__round__(2),
176
+ f'{overlap}top5_centroid_percentile_25': np.percentile(top5_centroid_distances, 25).round(2),
177
+ f'{overlap}top5_centroid_percentile_50': np.percentile(top5_centroid_distances, 50).round(2),
178
+ f'{overlap}top5_centroid_percentile_75': np.percentile(top5_centroid_distances, 75).round(2),
179
+ })
180
+
181
+
182
+
183
+
184
+ top10_rmsds = np.min(inf_rmsds[:, :10], axis=1)
185
+ top10_centroid_distances = centroid_distances[np.arange(rmsds.shape[0])[:,None],np.argsort(inf_rmsds[:, :10], axis=1)][:,0]
186
+ top10_min_cross_distances = min_cross_distances[np.arange(rmsds.shape[0])[:,None],np.argsort(inf_rmsds[:, :10], axis=1)][:,0]
187
+ top10_min_self_distances = min_self_distances[np.arange(rmsds.shape[0])[:,None],np.argsort(inf_rmsds[:, :10], axis=1)][:,0]
188
+ performance_metrics.update({
189
+ f'{overlap}top10_steric_clash_fraction': (100 * (top10_min_cross_distances < 0.4).sum() / len(top10_min_cross_distances)).__round__(2),
190
+ f'{overlap}top10_self_intersect_fraction': (100 * (top10_min_self_distances < 0.4).sum() / len(top10_min_self_distances)).__round__(2),
191
+ f'{overlap}top10_rmsds_below_2': (100 * (top10_rmsds < 2).sum() / len(top10_rmsds)).__round__(2),
192
+ f'{overlap}top10_rmsds_below_5': (100 * (top10_rmsds < 5).sum() / len(top10_rmsds)).__round__(2),
193
+ f'{overlap}top10_rmsds_percentile_25': np.percentile(top10_rmsds, 25).round(2),
194
+ f'{overlap}top10_rmsds_percentile_50': np.percentile(top10_rmsds, 50).round(2),
195
+ f'{overlap}top10_rmsds_percentile_75': np.percentile(top10_rmsds, 75).round(2),
196
+
197
+ f'{overlap}top10_centroid_below_2': (100 * (top10_centroid_distances < 2).sum() / len(top10_centroid_distances)).__round__(2),
198
+ f'{overlap}top10_centroid_below_5': (100 * (top10_centroid_distances < 5).sum() / len(top10_centroid_distances)).__round__(2),
199
+ f'{overlap}top10_centroid_percentile_25': np.percentile(top10_centroid_distances, 25).round(2),
200
+ f'{overlap}top10_centroid_percentile_50': np.percentile(top10_centroid_distances, 50).round(2),
201
+ f'{overlap}top10_centroid_percentile_75': np.percentile(top10_centroid_distances, 75).round(2),
202
+ })
203
+ for k in performance_metrics:
204
+ print(k, performance_metrics[k])
205
+
206
+ if args.wandb:
207
+ wandb.log(performance_metrics)
208
+ histogram_metrics_list = [('rmsd', rmsds[:,0]),
209
+ ('centroid_distance', centroid_distances[:,0]),
210
+ ('mean_rmsd', rmsds[:,0]),
211
+ ('mean_centroid_distance', centroid_distances[:,0])]
212
+ histogram_metrics_list.append(('top5_rmsds', top5_rmsds))
213
+ histogram_metrics_list.append(('top5_centroid_distances', top5_centroid_distances))
214
+ histogram_metrics_list.append(('top10_rmsds', top10_rmsds))
215
+ histogram_metrics_list.append(('top10_centroid_distances', top10_centroid_distances))
216
+
217
+ os.makedirs(f'.plotly_cache/baseline_cache', exist_ok=True)
218
+ images = []
219
+ for metric_name, metric in histogram_metrics_list:
220
+ d = {args.results_path: metric}
221
+ df = pd.DataFrame(data=d)
222
+ fig = px.ecdf(df, width=900, height=600, range_x=[0, 40])
223
+ fig.add_vline(x=2, annotation_text='2 A;', annotation_font_size=20, annotation_position="top right",
224
+ line_dash='dash', line_color='firebrick', annotation_font_color='firebrick')
225
+ fig.add_vline(x=5, annotation_text='5 A;', annotation_font_size=20, annotation_position="top right",
226
+ line_dash='dash', line_color='green', annotation_font_color='green')
227
+ fig.update_xaxes(title=f'{metric_name} in Angstrom', title_font={"size": 20}, tickfont={"size": 20})
228
+ fig.update_yaxes(title=f'Fraction of predictions with lower error', title_font={"size": 20},
229
+ tickfont={"size": 20})
230
+ fig.update_layout(autosize=False, margin={'l': 0, 'r': 0, 't': 0, 'b': 0}, plot_bgcolor='white',
231
+ paper_bgcolor='white', legend_title_text='Method', legend_title_font_size=17,
232
+ legend=dict(yanchor="bottom", y=0.1, xanchor="right", x=0.99, font=dict(size=17), ), )
233
+ fig.update_xaxes(showgrid=True, gridcolor='lightgrey')
234
+ fig.update_yaxes(showgrid=True, gridcolor='lightgrey')
235
+
236
+ fig.write_image(os.path.join(f'.plotly_cache/baseline_cache', f'{metric_name}.png'))
237
+ wandb.log({metric_name: wandb.Image(os.path.join(f'.plotly_cache/baseline_cache', f'{metric_name}.png'), caption=f"{metric_name}")})
238
+ images.append(wandb.Image(os.path.join(f'.plotly_cache/baseline_cache', f'{metric_name}.png'), caption=f"{metric_name}"))
239
+ wandb.log({'images': images})
baselines/baseline_tankbind_runtime.py ADDED
@@ -0,0 +1,342 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file needs to be ran in the TANKBind repository together with baseline_run_tankbind_parallel.sh
2
+
3
+ import sys
4
+ import time
5
+ from multiprocessing import Pool
6
+
7
+
8
+ import copy
9
+ import warnings
10
+ from argparse import ArgumentParser
11
+
12
+ from rdkit.Chem import AllChem, RemoveHs
13
+
14
+ from feature_utils import save_cleaned_protein, read_mol
15
+ from generation_utils import get_LAS_distance_constraint_mask, get_info_pred_distance, write_with_new_coords
16
+ import logging
17
+ from torch_geometric.loader import DataLoader
18
+ from tqdm import tqdm # pip install tqdm if fails.
19
+ from model import get_model
20
+ # from utils import *
21
+ import torch
22
+
23
+
24
+ from data import TankBind_prediction
25
+
26
+ import os
27
+ import numpy as np
28
+ import pandas as pd
29
+ import rdkit.Chem as Chem
30
+ from feature_utils import generate_sdf_from_smiles_using_rdkit
31
+ from feature_utils import get_protein_feature
32
+ from Bio.PDB import PDBParser
33
+ from feature_utils import extract_torchdrug_feature_from_mol
34
+
35
+
36
+ def read_strings_from_txt(path):
37
+ # every line will be one element of the returned list
38
+ with open(path) as file:
39
+ lines = file.readlines()
40
+ return [line.rstrip() for line in lines]
41
+
42
+
43
+ def read_molecule(molecule_file, sanitize=False, calc_charges=False, remove_hs=False):
44
+ if molecule_file.endswith('.mol2'):
45
+ mol = Chem.MolFromMol2File(molecule_file, sanitize=False, removeHs=False)
46
+ elif molecule_file.endswith('.sdf'):
47
+ supplier = Chem.SDMolSupplier(molecule_file, sanitize=False, removeHs=False)
48
+ mol = supplier[0]
49
+ elif molecule_file.endswith('.pdbqt'):
50
+ with open(molecule_file) as file:
51
+ pdbqt_data = file.readlines()
52
+ pdb_block = ''
53
+ for line in pdbqt_data:
54
+ pdb_block += '{}\n'.format(line[:66])
55
+ mol = Chem.MolFromPDBBlock(pdb_block, sanitize=False, removeHs=False)
56
+ elif molecule_file.endswith('.pdb'):
57
+ mol = Chem.MolFromPDBFile(molecule_file, sanitize=False, removeHs=False)
58
+ else:
59
+ return ValueError('Expect the format of the molecule_file to be '
60
+ 'one of .mol2, .sdf, .pdbqt and .pdb, got {}'.format(molecule_file))
61
+ try:
62
+ if sanitize or calc_charges:
63
+ Chem.SanitizeMol(mol)
64
+
65
+ if calc_charges:
66
+ # Compute Gasteiger charges on the molecule.
67
+ try:
68
+ AllChem.ComputeGasteigerCharges(mol)
69
+ except:
70
+ warnings.warn('Unable to compute charges for the molecule.')
71
+
72
+ if remove_hs:
73
+ mol = Chem.RemoveHs(mol, sanitize=sanitize)
74
+ except:
75
+ return None
76
+
77
+ return mol
78
+
79
+
80
+ def parallel_save_prediction(arguments):
81
+ dataset, y_pred_list, chosen,rdkit_mol_path, result_folder, name = arguments
82
+ for idx, line in chosen.iterrows():
83
+ pocket_name = line['pocket_name']
84
+ compound_name = line['compound_name']
85
+ ligandName = compound_name.split("_")[1]
86
+ dataset_index = line['dataset_index']
87
+ coords = dataset[dataset_index].coords.to('cpu')
88
+ protein_nodes_xyz = dataset[dataset_index].node_xyz.to('cpu')
89
+ n_compound = coords.shape[0]
90
+ n_protein = protein_nodes_xyz.shape[0]
91
+ y_pred = y_pred_list[dataset_index].reshape(n_protein, n_compound).to('cpu')
92
+ compound_pair_dis_constraint = torch.cdist(coords, coords)
93
+ mol = Chem.MolFromMolFile(rdkit_mol_path)
94
+ LAS_distance_constraint_mask = get_LAS_distance_constraint_mask(mol).bool()
95
+ pred_dist_info = get_info_pred_distance(coords, y_pred, protein_nodes_xyz, compound_pair_dis_constraint,
96
+ LAS_distance_constraint_mask=LAS_distance_constraint_mask,
97
+ n_repeat=1, show_progress=False)
98
+
99
+ toFile = f'{result_folder}/{name}_tankbind_chosen.sdf'
100
+ new_coords = pred_dist_info.sort_values("loss")['coords'].iloc[0].astype(np.double)
101
+ write_with_new_coords(mol, new_coords, toFile)
102
+
103
+ if __name__ == '__main__':
104
+ tankbind_src_folder = "../tankbind"
105
+ sys.path.insert(0, tankbind_src_folder)
106
+ torch.set_num_threads(16)
107
+ parser = ArgumentParser()
108
+ parser.add_argument('--data_dir', type=str, default='/Users/hstark/projects/ligbind/data/PDBBind_processed', help='')
109
+ parser.add_argument('--split_path', type=str, default='/Users/hstark/projects/ligbind/data/splits/timesplit_test', help='')
110
+ parser.add_argument('--prank_path', type=str, default='/Users/hstark/projects/p2rank_2.3/prank', help='')
111
+ parser.add_argument('--results_path', type=str, default='results/tankbind_results', help='')
112
+ parser.add_argument('--skip_existing', action='store_true', default=False, help='')
113
+ parser.add_argument('--skip_p2rank', action='store_true', default=False, help='')
114
+ parser.add_argument('--skip_multiple_pocket_outputs', action='store_true', default=False, help='')
115
+ parser.add_argument('--device', type=str, default='cpu', help='')
116
+ parser.add_argument('--num_workers', type=int, default=1, help='')
117
+ parser.add_argument('--parallel_id', type=int, default=0, help='')
118
+ parser.add_argument('--parallel_tot', type=int, default=1, help='')
119
+ args = parser.parse_args()
120
+
121
+ device = args.device
122
+ cache_path = "tankbind_cache"
123
+ os.makedirs(cache_path, exist_ok=True)
124
+ os.makedirs(args.results_path, exist_ok=True)
125
+
126
+
127
+
128
+ logging.basicConfig(level=logging.INFO)
129
+ model = get_model(0, logging, device)
130
+ # re-dock model
131
+ # modelFile = "../saved_models/re_dock.pt"
132
+ # self-dock model
133
+ modelFile = f"{tankbind_src_folder}/../saved_models/self_dock.pt"
134
+
135
+ model.load_state_dict(torch.load(modelFile, map_location=device))
136
+ _ = model.eval()
137
+ batch_size = 5
138
+ names = read_strings_from_txt(args.split_path)
139
+ if args.parallel_tot > 1:
140
+ size = len(names) // args.parallel_tot + 1
141
+ names = names[args.parallel_id*size:(args.parallel_id+1)*size]
142
+ rmsds = []
143
+
144
+ forward_pass_time = []
145
+ times_preprocess = []
146
+ times_inference = []
147
+ top_10_generation_time = []
148
+ top_1_generation_time = []
149
+ start_time = time.time()
150
+ if not args.skip_p2rank:
151
+ for name in names:
152
+ if args.skip_existing and os.path.exists(f'{args.results_path}/{name}/{name}_tankbind_1.sdf'): continue
153
+ print("Now processing: ", name)
154
+ protein_path = f'{args.data_dir}/{name}/{name}_protein_processed.pdb'
155
+ cleaned_protein_path = f"{cache_path}/{name}_protein_tankbind_cleaned.pdb" # if you change this you also need to change below
156
+ parser = PDBParser(QUIET=True)
157
+ s = parser.get_structure(name, protein_path)
158
+ c = s[0]
159
+ clean_res_list, ligand_list = save_cleaned_protein(c, cleaned_protein_path)
160
+
161
+ with open(f"{cache_path}/pdb_list_p2rank.txt", "w") as out:
162
+ for name in names:
163
+ out.write(f"{name}_protein_tankbind_cleaned.pdb\n")
164
+ cmd = f"bash {args.prank_path} predict {cache_path}/pdb_list_p2rank.txt -o {cache_path}/p2rank -threads 4"
165
+ os.system(cmd)
166
+ times_preprocess.append(time.time() - start_time)
167
+ p2_rank_time = time.time() - start_time
168
+
169
+
170
+
171
+
172
+ list_to_parallelize = []
173
+ for name in tqdm(names):
174
+ single_preprocess_time = time.time()
175
+ if args.skip_existing and os.path.exists(f'{args.results_path}/{name}/{name}_tankbind_1.sdf'): continue
176
+ print("Now processing: ", name)
177
+ protein_path = f'{args.data_dir}/{name}/{name}_protein_processed.pdb'
178
+ ligand_path = f"{args.data_dir}/{name}/{name}_ligand.sdf"
179
+ cleaned_protein_path = f"{cache_path}/{name}_protein_tankbind_cleaned.pdb" # if you change this you also need to change below
180
+ rdkit_mol_path = f"{cache_path}/{name}_rdkit_ligand.sdf"
181
+
182
+ parser = PDBParser(QUIET=True)
183
+ s = parser.get_structure(name, protein_path)
184
+ c = s[0]
185
+ clean_res_list, ligand_list = save_cleaned_protein(c, cleaned_protein_path)
186
+ lig, _ = read_mol(f"{args.data_dir}/{name}/{name}_ligand.sdf", f"{args.data_dir}/{name}/{name}_ligand.mol2")
187
+
188
+ lig = RemoveHs(lig)
189
+ smiles = Chem.MolToSmiles(lig)
190
+ generate_sdf_from_smiles_using_rdkit(smiles, rdkit_mol_path, shift_dis=0)
191
+
192
+ parser = PDBParser(QUIET=True)
193
+ s = parser.get_structure("x", cleaned_protein_path)
194
+ res_list = list(s.get_residues())
195
+
196
+ protein_dict = {}
197
+ protein_dict[name] = get_protein_feature(res_list)
198
+ compound_dict = {}
199
+
200
+ mol = Chem.MolFromMolFile(rdkit_mol_path)
201
+ compound_dict[name + f"_{name}" + "_rdkit"] = extract_torchdrug_feature_from_mol(mol, has_LAS_mask=True)
202
+
203
+ info = []
204
+ for compound_name in list(compound_dict.keys()):
205
+ # use protein center as the block center.
206
+ com = ",".join([str(a.round(3)) for a in protein_dict[name][0].mean(axis=0).numpy()])
207
+ info.append([name, compound_name, "protein_center", com])
208
+
209
+ p2rankFile = f"{cache_path}/p2rank/{name}_protein_tankbind_cleaned.pdb_predictions.csv"
210
+ pocket = pd.read_csv(p2rankFile)
211
+ pocket.columns = pocket.columns.str.strip()
212
+ pocket_coms = pocket[['center_x', 'center_y', 'center_z']].values
213
+ for ith_pocket, com in enumerate(pocket_coms):
214
+ com = ",".join([str(a.round(3)) for a in com])
215
+ info.append([name, compound_name, f"pocket_{ith_pocket + 1}", com])
216
+ info = pd.DataFrame(info, columns=['protein_name', 'compound_name', 'pocket_name', 'pocket_com'])
217
+
218
+ dataset_path = f"{cache_path}/{name}_dataset/"
219
+ os.system(f"rm -r {dataset_path}")
220
+ os.system(f"mkdir -p {dataset_path}")
221
+ dataset = TankBind_prediction(dataset_path, data=info, protein_dict=protein_dict, compound_dict=compound_dict)
222
+
223
+ # dataset = TankBind_prediction(dataset_path)
224
+ times_preprocess.append(time.time() - single_preprocess_time)
225
+ single_forward_pass_time = time.time()
226
+ data_loader = DataLoader(dataset, batch_size=batch_size, follow_batch=['x', 'y', 'compound_pair'], shuffle=False,
227
+ num_workers=0)
228
+ affinity_pred_list = []
229
+ y_pred_list = []
230
+ for data in tqdm(data_loader):
231
+ data = data.to(device)
232
+ y_pred, affinity_pred = model(data)
233
+ affinity_pred_list.append(affinity_pred.detach().cpu())
234
+ for i in range(data.y_batch.max() + 1):
235
+ y_pred_list.append((y_pred[data['y_batch'] == i]).detach().cpu())
236
+
237
+ affinity_pred_list = torch.cat(affinity_pred_list)
238
+ forward_pass_time.append(time.time() - single_forward_pass_time)
239
+ output_info = copy.deepcopy(dataset.data)
240
+ output_info['affinity'] = affinity_pred_list
241
+ output_info['dataset_index'] = range(len(output_info))
242
+ output_info_sorted = output_info.sort_values('affinity', ascending=False)
243
+
244
+
245
+ result_folder = f'{args.results_path}/{name}'
246
+ os.makedirs(result_folder, exist_ok=True)
247
+ output_info_sorted.to_csv(f"{result_folder}/output_info_sorted_by_affinity.csv")
248
+
249
+ if not args.skip_multiple_pocket_outputs:
250
+ for idx, (dataframe_idx, line) in enumerate(copy.deepcopy(output_info_sorted).iterrows()):
251
+ single_top10_generation_time = time.time()
252
+ pocket_name = line['pocket_name']
253
+ compound_name = line['compound_name']
254
+ ligandName = compound_name.split("_")[1]
255
+ coords = dataset[dataframe_idx].coords.to('cpu')
256
+ protein_nodes_xyz = dataset[dataframe_idx].node_xyz.to('cpu')
257
+ n_compound = coords.shape[0]
258
+ n_protein = protein_nodes_xyz.shape[0]
259
+ y_pred = y_pred_list[dataframe_idx].reshape(n_protein, n_compound).to('cpu')
260
+ y = dataset[dataframe_idx].dis_map.reshape(n_protein, n_compound).to('cpu')
261
+ compound_pair_dis_constraint = torch.cdist(coords, coords)
262
+ mol = Chem.MolFromMolFile(rdkit_mol_path)
263
+ LAS_distance_constraint_mask = get_LAS_distance_constraint_mask(mol).bool()
264
+ pred_dist_info = get_info_pred_distance(coords, y_pred, protein_nodes_xyz, compound_pair_dis_constraint,
265
+ LAS_distance_constraint_mask=LAS_distance_constraint_mask,
266
+ n_repeat=1, show_progress=False)
267
+
268
+ toFile = f'{result_folder}/{name}_tankbind_{idx}.sdf'
269
+ new_coords = pred_dist_info.sort_values("loss")['coords'].iloc[0].astype(np.double)
270
+ write_with_new_coords(mol, new_coords, toFile)
271
+ if idx < 10:
272
+ top_10_generation_time.append(time.time() - single_top10_generation_time)
273
+ if idx == 0:
274
+ top_1_generation_time.append(time.time() - single_top10_generation_time)
275
+
276
+ output_info_chosen = copy.deepcopy(dataset.data)
277
+ output_info_chosen['affinity'] = affinity_pred_list
278
+ output_info_chosen['dataset_index'] = range(len(output_info_chosen))
279
+ chosen = output_info_chosen.loc[
280
+ output_info_chosen.groupby(['protein_name', 'compound_name'], sort=False)['affinity'].agg(
281
+ 'idxmax')].reset_index()
282
+
283
+ list_to_parallelize.append((dataset, y_pred_list, chosen, rdkit_mol_path, result_folder, name))
284
+
285
+ chosen_generation_start_time = time.time()
286
+ if args.num_workers > 1:
287
+ p = Pool(args.num_workers, maxtasksperchild=1)
288
+ p.__enter__()
289
+ with tqdm(total=len(list_to_parallelize), desc=f'running optimization {i}/{len(list_to_parallelize)}') as pbar:
290
+ map_fn = p.imap_unordered if args.num_workers > 1 else map
291
+ for t in map_fn(parallel_save_prediction, list_to_parallelize):
292
+ pbar.update()
293
+ if args.num_workers > 1: p.__exit__(None, None, None)
294
+ chosen_generation_time = time.time() - chosen_generation_start_time
295
+ """
296
+ lig, _ = read_mol(f"{args.data_dir}/{name}/{name}_ligand.sdf", f"{args.data_dir}/{name}/{name}_ligand.mol2")
297
+ sm = Chem.MolToSmiles(lig)
298
+ m_order = list(lig.GetPropsAsDict(includePrivate=True, includeComputed=True)['_smilesAtomOutputOrder'])
299
+ lig = Chem.RenumberAtoms(lig, m_order)
300
+ lig = Chem.RemoveAllHs(lig)
301
+ lig = RemoveHs(lig)
302
+ true_ligand_pos = np.array(lig.GetConformer().GetPositions())
303
+
304
+ toFile = f'{result_folder}/{name}_tankbind_chosen.sdf'
305
+ mol_pred, _ = read_mol(toFile, None)
306
+ sm = Chem.MolToSmiles(mol_pred)
307
+ m_order = list(mol_pred.GetPropsAsDict(includePrivate=True, includeComputed=True)['_smilesAtomOutputOrder'])
308
+ mol_pred = Chem.RenumberAtoms(mol_pred, m_order)
309
+ mol_pred = RemoveHs(mol_pred)
310
+ mol_pred_pos = np.array(mol_pred.GetConformer().GetPositions())
311
+ rmsds.append(np.sqrt(((true_ligand_pos - mol_pred_pos) ** 2).sum(axis=1).mean(axis=0)))
312
+ print(np.sqrt(((true_ligand_pos - mol_pred_pos) ** 2).sum(axis=1).mean(axis=0)))
313
+ """
314
+ forward_pass_time = np.array(forward_pass_time).sum()
315
+ times_preprocess = np.array(times_preprocess).sum()
316
+ times_inference = np.array(times_inference).sum()
317
+ top_10_generation_time = np.array(top_10_generation_time).sum()
318
+ top_1_generation_time = np.array(top_1_generation_time).sum()
319
+
320
+ rmsds = np.array(rmsds)
321
+
322
+ print(f'forward_pass_time: {forward_pass_time}')
323
+ print(f'times_preprocess: {times_preprocess}')
324
+ print(f'times_inference: {times_inference}')
325
+ print(f'top_10_generation_time: {top_10_generation_time}')
326
+ print(f'top_1_generation_time: {top_1_generation_time}')
327
+ print(f'chosen_generation_time: {chosen_generation_time}')
328
+ print(f'rmsds_below_2: {(100 * (rmsds < 2).sum() / len(rmsds))}')
329
+ print(f'p2rank Time: {p2_rank_time}')
330
+ print(
331
+ f'total_time: '
332
+ f'{forward_pass_time + times_preprocess + times_inference + top_10_generation_time + top_1_generation_time + p2_rank_time}')
333
+
334
+ with open(os.path.join(args.results_path, 'tankbind_log.log'), 'w') as file:
335
+ file.write(f'forward_pass_time: {forward_pass_time}')
336
+ file.write(f'times_preprocess: {times_preprocess}')
337
+ file.write(f'times_inference: {times_inference}')
338
+ file.write(f'top_10_generation_time: {top_10_generation_time}')
339
+ file.write(f'top_1_generation_time: {top_1_generation_time}')
340
+ file.write(f'rmsds_below_2: {(100 * (rmsds < 2).sum() / len(rmsds))}')
341
+ file.write(f'p2rank Time: {p2_rank_time}')
342
+ file.write(f'total_time: {forward_pass_time + times_preprocess + times_inference + top_10_generation_time + top_1_generation_time + p2_rank_time}')
confidence/confidence_train.py ADDED
@@ -0,0 +1,319 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gc
2
+ import math
3
+ import os
4
+
5
+ import shutil
6
+
7
+ from argparse import Namespace, ArgumentParser, FileType
8
+ import torch.nn.functional as F
9
+
10
+ import wandb
11
+ import torch
12
+ from sklearn.metrics import roc_auc_score
13
+ from torch_geometric.loader import DataListLoader, DataLoader
14
+ from tqdm import tqdm
15
+
16
+ from confidence.dataset import ConfidenceDataset
17
+ from utils.training import AverageMeter
18
+
19
+ torch.multiprocessing.set_sharing_strategy('file_system')
20
+
21
+ import yaml
22
+ from utils.utils import save_yaml_file, get_optimizer_and_scheduler, get_model
23
+
24
+
25
+ parser = ArgumentParser()
26
+ parser.add_argument('--config', type=FileType(mode='r'), default=None)
27
+ parser.add_argument('--original_model_dir', type=str, default='workdir', help='Path to folder with trained model and hyperparameters')
28
+ parser.add_argument('--restart_dir', type=str, default=None, help='')
29
+ parser.add_argument('--use_original_model_cache', action='store_true', default=False, help='If this is true, the same dataset as in the original model will be used. Otherwise, the dataset parameters are used.')
30
+ parser.add_argument('--data_dir', type=str, default='data/PDBBind_processed/', help='Folder containing original structures')
31
+ parser.add_argument('--ckpt', type=str, default='best_model.pt', help='Checkpoint to use inside the folder')
32
+ parser.add_argument('--model_save_frequency', type=int, default=0, help='Frequency with which to save the last model. If 0, then only the early stopping criterion best model is saved and overwritten.')
33
+ parser.add_argument('--best_model_save_frequency', type=int, default=0, help='Frequency with which to save the best model. If 0, then only the early stopping criterion best model is saved and overwritten.')
34
+ parser.add_argument('--run_name', type=str, default='test_confidence', help='')
35
+ parser.add_argument('--project', type=str, default='diffdock_confidence', help='')
36
+ parser.add_argument('--split_train', type=str, default='data/splits/timesplit_no_lig_overlap_train', help='Path of file defining the split')
37
+ parser.add_argument('--split_val', type=str, default='data/splits/timesplit_no_lig_overlap_val', help='Path of file defining the split')
38
+ parser.add_argument('--split_test', type=str, default='data/splits/timesplit_test', help='Path of file defining the split')
39
+
40
+ # Inference parameters for creating the positions and rmsds that the confidence predictor will be trained on.
41
+ parser.add_argument('--cache_path', type=str, default='data/cacheNew', help='Folder from where to load/restore cached dataset')
42
+ parser.add_argument('--cache_ids_to_combine', nargs='+', type=str, default=None, help='RMSD value below which a prediction is considered a postitive. This can also be multiple cutoffs.')
43
+ parser.add_argument('--cache_creation_id', type=int, default=None, help='number of times that inference is run on the full dataset before concatenating it and coming up with the full confidence dataset')
44
+ parser.add_argument('--wandb', action='store_true', default=False, help='')
45
+ parser.add_argument('--inference_steps', type=int, default=2, help='Number of denoising steps')
46
+ parser.add_argument('--samples_per_complex', type=int, default=3, help='')
47
+ parser.add_argument('--balance', action='store_true', default=False, help='If this is true than we do not force the samples seen during training to be the same amount of negatives as positives')
48
+ parser.add_argument('--rmsd_prediction', action='store_true', default=False, help='')
49
+ parser.add_argument('--rmsd_classification_cutoff', nargs='+', type=float, default=2, help='RMSD value below which a prediction is considered a postitive. This can also be multiple cutoffs.')
50
+
51
+ parser.add_argument('--log_dir', type=str, default='workdir', help='')
52
+ parser.add_argument('--main_metric', type=str, default='accuracy', help='Metric to track for early stopping. Mostly [loss, accuracy, ROC AUC]')
53
+ parser.add_argument('--main_metric_goal', type=str, default='max', help='Can be [min, max]')
54
+ parser.add_argument('--transfer_weights', action='store_true', default=False, help='')
55
+ parser.add_argument('--batch_size', type=int, default=5, help='')
56
+ parser.add_argument('--lr', type=float, default=1e-3, help='')
57
+ parser.add_argument('--w_decay', type=float, default=0.0, help='')
58
+ parser.add_argument('--scheduler', type=str, default='plateau', help='')
59
+ parser.add_argument('--scheduler_patience', type=int, default=20, help='')
60
+ parser.add_argument('--n_epochs', type=int, default=5, help='')
61
+
62
+ # Dataset
63
+ parser.add_argument('--limit_complexes', type=int, default=0, help='')
64
+ parser.add_argument('--all_atoms', action='store_true', default=True, help='')
65
+ parser.add_argument('--multiplicity', type=int, default=1, help='')
66
+ parser.add_argument('--chain_cutoff', type=float, default=10, help='')
67
+ parser.add_argument('--receptor_radius', type=float, default=30, help='')
68
+ parser.add_argument('--c_alpha_max_neighbors', type=int, default=10, help='')
69
+ parser.add_argument('--atom_radius', type=float, default=5, help='')
70
+ parser.add_argument('--atom_max_neighbors', type=int, default=8, help='')
71
+ parser.add_argument('--matching_popsize', type=int, default=20, help='')
72
+ parser.add_argument('--matching_maxiter', type=int, default=20, help='')
73
+ parser.add_argument('--max_lig_size', type=int, default=None, help='Maximum number of heavy atoms')
74
+ parser.add_argument('--remove_hs', action='store_true', default=False, help='remove Hs')
75
+ parser.add_argument('--num_conformers', type=int, default=1, help='')
76
+ parser.add_argument('--esm_embeddings_path', type=str, default=None,help='If this is set then the LM embeddings at that path will be used for the receptor features')
77
+ parser.add_argument('--no_torsion', action='store_true', default=False, help='')
78
+
79
+ # Model
80
+ parser.add_argument('--num_conv_layers', type=int, default=2, help='Number of interaction layers')
81
+ parser.add_argument('--max_radius', type=float, default=5.0, help='Radius cutoff for geometric graph')
82
+ parser.add_argument('--scale_by_sigma', action='store_true', default=True, help='Whether to normalise the score')
83
+ parser.add_argument('--ns', type=int, default=16, help='Number of hidden features per node of order 0')
84
+ parser.add_argument('--nv', type=int, default=4, help='Number of hidden features per node of order >0')
85
+ parser.add_argument('--distance_embed_dim', type=int, default=32, help='')
86
+ parser.add_argument('--cross_distance_embed_dim', type=int, default=32, help='')
87
+ parser.add_argument('--no_batch_norm', action='store_true', default=False, help='If set, it removes the batch norm')
88
+ parser.add_argument('--use_second_order_repr', action='store_true', default=False, help='Whether to use only up to first order representations or also second')
89
+ parser.add_argument('--cross_max_distance', type=float, default=80, help='')
90
+ parser.add_argument('--dynamic_max_cross', action='store_true', default=False, help='')
91
+ parser.add_argument('--dropout', type=float, default=0.0, help='MLP dropout')
92
+ parser.add_argument('--embedding_type', type=str, default="sinusoidal", help='')
93
+ parser.add_argument('--sigma_embed_dim', type=int, default=32, help='')
94
+ parser.add_argument('--embedding_scale', type=int, default=10000, help='')
95
+ parser.add_argument('--confidence_no_batchnorm', action='store_true', default=False, help='')
96
+ parser.add_argument('--confidence_dropout', type=float, default=0.0, help='MLP dropout in confidence readout')
97
+
98
+ args = parser.parse_args()
99
+ if args.config:
100
+ config_dict = yaml.load(args.config, Loader=yaml.FullLoader)
101
+ arg_dict = args.__dict__
102
+ for key, value in config_dict.items():
103
+ if isinstance(value, list):
104
+ for v in value:
105
+ arg_dict[key].append(v)
106
+ else:
107
+ arg_dict[key] = value
108
+ args.config = args.config.name
109
+ assert(args.main_metric_goal == 'max' or args.main_metric_goal == 'min')
110
+
111
+ def train_epoch(model, loader, optimizer, rmsd_prediction):
112
+ model.train()
113
+ meter = AverageMeter(['confidence_loss'])
114
+
115
+ for data in tqdm(loader, total=len(loader)):
116
+ if device.type == 'cuda' and len(data) % torch.cuda.device_count() == 1 or device.type == 'cpu' and data.num_graphs == 1:
117
+ print("Skipping batch of size 1 since otherwise batchnorm would not work.")
118
+ optimizer.zero_grad()
119
+ try:
120
+ pred = model(data)
121
+ if rmsd_prediction:
122
+ labels = torch.cat([graph.rmsd for graph in data]).to(device) if isinstance(data, list) else data.rmsd
123
+ confidence_loss = F.mse_loss(pred, labels)
124
+ else:
125
+ if isinstance(args.rmsd_classification_cutoff, list):
126
+ labels = torch.cat([graph.y_binned for graph in data]).to(device) if isinstance(data, list) else data.y_binned
127
+ confidence_loss = F.cross_entropy(pred, labels)
128
+ else:
129
+ labels = torch.cat([graph.y for graph in data]).to(device) if isinstance(data, list) else data.y
130
+ confidence_loss = F.binary_cross_entropy_with_logits(pred, labels)
131
+ confidence_loss.backward()
132
+ optimizer.step()
133
+ meter.add([confidence_loss.cpu().detach()])
134
+ except RuntimeError as e:
135
+ if 'out of memory' in str(e):
136
+ print('| WARNING: ran out of memory, skipping batch')
137
+ for p in model.parameters():
138
+ if p.grad is not None:
139
+ del p.grad # free some memory
140
+ torch.cuda.empty_cache()
141
+ gc.collect()
142
+ continue
143
+ else:
144
+ raise e
145
+
146
+ return meter.summary()
147
+
148
+ def test_epoch(model, loader, rmsd_prediction):
149
+ model.eval()
150
+ meter = AverageMeter(['loss'], unpooled_metrics=True) if rmsd_prediction else AverageMeter(['confidence_loss', 'accuracy', 'ROC AUC'], unpooled_metrics=True)
151
+ all_labels = []
152
+ all_affinities = []
153
+ for data in tqdm(loader, total=len(loader)):
154
+ try:
155
+ with torch.no_grad():
156
+ pred = model(data)
157
+ affinity_loss = torch.tensor(0.0, dtype=torch.float, device=pred[0].device)
158
+ accuracy = torch.tensor(0.0, dtype=torch.float, device=pred[0].device)
159
+ if rmsd_prediction:
160
+ labels = torch.cat([graph.rmsd for graph in data]).to(device) if isinstance(data, list) else data.rmsd
161
+ confidence_loss = F.mse_loss(pred, labels)
162
+ meter.add([confidence_loss.cpu().detach()])
163
+ else:
164
+ if isinstance(args.rmsd_classification_cutoff, list):
165
+ labels = torch.cat([graph.y_binned for graph in data]).to(device) if isinstance(data,list) else data.y_binned
166
+ confidence_loss = F.cross_entropy(pred, labels)
167
+ else:
168
+ labels = torch.cat([graph.y for graph in data]).to(device) if isinstance(data, list) else data.y
169
+ confidence_loss = F.binary_cross_entropy_with_logits(pred, labels)
170
+ accuracy = torch.mean((labels == (pred > 0).float()).float())
171
+ try:
172
+ roc_auc = roc_auc_score(labels.detach().cpu().numpy(), pred.detach().cpu().numpy())
173
+ except ValueError as e:
174
+ if 'Only one class present in y_true. ROC AUC score is not defined in that case.' in str(e):
175
+ roc_auc = 0
176
+ else:
177
+ raise e
178
+ meter.add([confidence_loss.cpu().detach(), accuracy.cpu().detach(), torch.tensor(roc_auc)])
179
+ all_labels.append(labels)
180
+
181
+ except RuntimeError as e:
182
+ if 'out of memory' in str(e):
183
+ print('| WARNING: ran out of memory, skipping batch')
184
+ for p in model.parameters():
185
+ if p.grad is not None:
186
+ del p.grad # free some memory
187
+ torch.cuda.empty_cache()
188
+ continue
189
+ else:
190
+ raise e
191
+
192
+ all_labels = torch.cat(all_labels)
193
+
194
+ if rmsd_prediction:
195
+ baseline_metric = ((all_labels - all_labels.mean()).abs()).mean()
196
+ else:
197
+ baseline_metric = all_labels.sum() / len(all_labels)
198
+ results = meter.summary()
199
+ results.update({'baseline_metric': baseline_metric})
200
+ return meter.summary(), baseline_metric
201
+
202
+
203
+ def train(args, model, optimizer, scheduler, train_loader, val_loader, run_dir):
204
+ best_val_metric = math.inf if args.main_metric_goal == 'min' else 0
205
+ best_epoch = 0
206
+
207
+ print("Starting training...")
208
+ for epoch in range(args.n_epochs):
209
+ logs = {}
210
+ train_metrics = train_epoch(model, train_loader, optimizer, args.rmsd_prediction)
211
+ print("Epoch {}: Training loss {:.4f}".format(epoch, train_metrics['confidence_loss']))
212
+
213
+ val_metrics, baseline_metric = test_epoch(model, val_loader, args.rmsd_prediction)
214
+ if args.rmsd_prediction:
215
+ print("Epoch {}: Validation loss {:.4f}".format(epoch, val_metrics['confidence_loss']))
216
+ else:
217
+ print("Epoch {}: Validation loss {:.4f} accuracy {:.4f}".format(epoch, val_metrics['confidence_loss'], val_metrics['accuracy']))
218
+
219
+ if args.wandb:
220
+ logs.update({'valinf_' + k: v for k, v in val_metrics.items()}, step=epoch + 1)
221
+ logs.update({'train_' + k: v for k, v in train_metrics.items()}, step=epoch + 1)
222
+ logs.update({'mean_rmsd' if args.rmsd_prediction else 'fraction_positives': baseline_metric,
223
+ 'current_lr': optimizer.param_groups[0]['lr']})
224
+ wandb.log(logs, step=epoch + 1)
225
+
226
+ if scheduler:
227
+ scheduler.step(val_metrics[args.main_metric])
228
+
229
+ state_dict = model.module.state_dict() if device.type == 'cuda' else model.state_dict()
230
+
231
+ if args.main_metric_goal == 'min' and val_metrics[args.main_metric] < best_val_metric or \
232
+ args.main_metric_goal == 'max' and val_metrics[args.main_metric] > best_val_metric:
233
+ best_val_metric = val_metrics[args.main_metric]
234
+ best_epoch = epoch
235
+ torch.save(state_dict, os.path.join(run_dir, 'best_model.pt'))
236
+ if args.model_save_frequency > 0 and (epoch + 1) % args.model_save_frequency == 0:
237
+ torch.save(state_dict, os.path.join(run_dir, f'model_epoch{epoch+1}.pt'))
238
+ if args.best_model_save_frequency > 0 and (epoch + 1) % args.best_model_save_frequency == 0:
239
+ shutil.copyfile(os.path.join(run_dir, 'best_model.pt'), os.path.join(run_dir, f'best_model_epoch{epoch+1}.pt'))
240
+
241
+ torch.save({
242
+ 'epoch': epoch,
243
+ 'model': state_dict,
244
+ 'optimizer': optimizer.state_dict(),
245
+ }, os.path.join(run_dir, 'last_model.pt'))
246
+
247
+ print("Best Validation accuracy {} on Epoch {}".format(best_val_metric, best_epoch))
248
+
249
+
250
+ def construct_loader_confidence(args, device):
251
+ common_args = {'cache_path': args.cache_path, 'original_model_dir': args.original_model_dir, 'device': device,
252
+ 'inference_steps': args.inference_steps, 'samples_per_complex': args.samples_per_complex,
253
+ 'limit_complexes': args.limit_complexes, 'all_atoms': args.all_atoms, 'balance': args.balance, 'rmsd_classification_cutoff': args.rmsd_classification_cutoff,
254
+ 'use_original_model_cache': args.use_original_model_cache, 'cache_creation_id': args.cache_creation_id, "cache_ids_to_combine": args.cache_ids_to_combine}
255
+ loader_class = DataListLoader if torch.cuda.is_available() else DataLoader
256
+
257
+ exception_flag = False
258
+ try:
259
+ train_dataset = ConfidenceDataset(split="train", args=args, **common_args)
260
+ train_loader = loader_class(dataset=train_dataset, batch_size=args.batch_size, shuffle=True)
261
+ except Exception as e:
262
+ if 'The generated ligand positions with cache_id do not exist:' in str(e):
263
+ print("HAPPENING | Encountered the following exception when loading the confidence train dataset:")
264
+ print(str(e))
265
+ print("HAPPENING | We are still continuing because we want to try to generate the validation dataset if it has not been created yet:")
266
+ exception_flag = True
267
+ else: raise e
268
+
269
+ val_dataset = ConfidenceDataset(split="val", args=args, **common_args)
270
+ val_loader = loader_class(dataset=val_dataset, batch_size=args.batch_size, shuffle=True)
271
+
272
+ if exception_flag: raise Exception('We encountered the exception during train dataset loading: ', e)
273
+ return train_loader, val_loader
274
+
275
+
276
+ if __name__ == '__main__':
277
+ device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
278
+ with open(f'{args.original_model_dir}/model_parameters.yml') as f:
279
+ score_model_args = Namespace(**yaml.full_load(f))
280
+
281
+ # construct loader
282
+ train_loader, val_loader = construct_loader_confidence(args, device)
283
+ model = get_model(score_model_args if args.transfer_weights else args, device, t_to_sigma=None, confidence_mode=True)
284
+ optimizer, scheduler = get_optimizer_and_scheduler(args, model, scheduler_mode=args.main_metric_goal)
285
+
286
+ if args.transfer_weights:
287
+ print("HAPPENING | Transferring weights from original_model_dir to the new model after using original_model_dir's arguments to construct the new model.")
288
+ checkpoint = torch.load(os.path.join(args.original_model_dir,args.ckpt), map_location=device)
289
+ model_state_dict = model.state_dict()
290
+ transfer_weights_dict = {k: v for k, v in checkpoint.items() if k in list(model_state_dict.keys())}
291
+ model_state_dict.update(transfer_weights_dict) # update the layers with the pretrained weights
292
+ model.load_state_dict(model_state_dict)
293
+
294
+ elif args.restart_dir:
295
+ dict = torch.load(f'{args.restart_dir}/last_model.pt', map_location=torch.device('cpu'))
296
+ model.module.load_state_dict(dict['model'], strict=True)
297
+ optimizer.load_state_dict(dict['optimizer'])
298
+ print("Restarting from epoch", dict['epoch'])
299
+
300
+ numel = sum([p.numel() for p in model.parameters()])
301
+ print('Model with', numel, 'parameters')
302
+
303
+ if args.wandb:
304
+ wandb.init(
305
+ entity='entity',
306
+ settings=wandb.Settings(start_method="fork"),
307
+ project=args.project,
308
+ name=args.run_name,
309
+ config=args
310
+ )
311
+ wandb.log({'numel': numel})
312
+
313
+ # record parameters
314
+ run_dir = os.path.join(args.log_dir, args.run_name)
315
+ yaml_file_name = os.path.join(run_dir, 'model_parameters.yml')
316
+ save_yaml_file(yaml_file_name, args.__dict__)
317
+ args.device = device
318
+
319
+ train(args, model, optimizer, scheduler, train_loader, val_loader, run_dir)
confidence/dataset.py ADDED
@@ -0,0 +1,272 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import itertools
2
+ import math
3
+ import os
4
+ import pickle
5
+ import random
6
+ from argparse import Namespace
7
+ from functools import partial
8
+ import copy
9
+
10
+ import numpy as np
11
+ import pandas as pd
12
+ import torch
13
+ import yaml
14
+ from torch_geometric.data import Dataset, Data
15
+ from torch_geometric.loader import DataLoader
16
+ from tqdm import tqdm
17
+
18
+ from datasets.pdbbind import PDBBind
19
+ from utils.diffusion_utils import get_t_schedule
20
+ from utils.sampling import randomize_position, sampling
21
+ from utils.utils import get_model
22
+ from utils.diffusion_utils import t_to_sigma as t_to_sigma_compl
23
+
24
+
25
+ class ListDataset(Dataset):
26
+ def __init__(self, list):
27
+ super().__init__()
28
+ self.data_list = list
29
+
30
+ def len(self) -> int:
31
+ return len(self.data_list)
32
+
33
+ def get(self, idx: int) -> Data:
34
+ return self.data_list[idx]
35
+
36
+ def get_cache_path(args, split):
37
+ cache_path = args.cache_path
38
+ if not args.no_torsion:
39
+ cache_path += '_torsion'
40
+ if args.all_atoms:
41
+ cache_path += '_allatoms'
42
+ split_path = args.split_train if split == 'train' else args.split_val
43
+ cache_path = os.path.join(cache_path, f'limit{args.limit_complexes}_INDEX{os.path.splitext(os.path.basename(split_path))[0]}_maxLigSize{args.max_lig_size}_H{int(not args.remove_hs)}_recRad{args.receptor_radius}_recMax{args.c_alpha_max_neighbors}'
44
+ + ('' if not args.all_atoms else f'_atomRad{args.atom_radius}_atomMax{args.atom_max_neighbors}')
45
+ + ('' if args.no_torsion or args.num_conformers == 1 else
46
+ f'_confs{args.num_conformers}')
47
+ + ('' if args.esm_embeddings_path is None else f'_esmEmbeddings'))
48
+ return cache_path
49
+
50
+ def get_args_and_cache_path(original_model_dir, split):
51
+ with open(f'{original_model_dir}/model_parameters.yml') as f:
52
+ model_args = Namespace(**yaml.full_load(f))
53
+ return model_args, get_cache_path(model_args,split)
54
+
55
+
56
+
57
+ class ConfidenceDataset(Dataset):
58
+ def __init__(self, cache_path, original_model_dir, split, device, limit_complexes,
59
+ inference_steps, samples_per_complex, all_atoms,
60
+ args, balance=False, use_original_model_cache=True, rmsd_classification_cutoff=2,
61
+ cache_ids_to_combine= None, cache_creation_id=None):
62
+
63
+ super(ConfidenceDataset, self).__init__()
64
+
65
+ self.device = device
66
+ self.inference_steps = inference_steps
67
+ self.limit_complexes = limit_complexes
68
+ self.all_atoms = all_atoms
69
+ self.original_model_dir = original_model_dir
70
+ self.balance = balance
71
+ self.use_original_model_cache = use_original_model_cache
72
+ self.rmsd_classification_cutoff = rmsd_classification_cutoff
73
+ self.cache_ids_to_combine = cache_ids_to_combine
74
+ self.cache_creation_id = cache_creation_id
75
+ self.samples_per_complex = samples_per_complex
76
+
77
+ self.original_model_args, original_model_cache = get_args_and_cache_path(original_model_dir, split)
78
+ self.complex_graphs_cache = original_model_cache if self.use_original_model_cache else get_cache_path(args, split)
79
+ print('Using the cached complex graphs of the original model args' if self.use_original_model_cache else 'Not using the cached complex graphs of the original model args. Instead the complex graphs are used that are at the location given by the dataset parameters given to confidence_train.py')
80
+ print(self.complex_graphs_cache)
81
+ if not os.path.exists(os.path.join(self.complex_graphs_cache, "heterographs.pkl")):
82
+ print(f'HAPPENING | Complex graphs path does not exist yet: {os.path.join(self.complex_graphs_cache, "heterographs.pkl")}. For that reason, we are now creating the dataset.')
83
+ PDBBind(transform=None, root=args.data_dir, limit_complexes=args.limit_complexes,
84
+ receptor_radius=args.receptor_radius,
85
+ cache_path=args.cache_path, split_path=args.split_val if split == 'val' else args.split_train,
86
+ remove_hs=args.remove_hs, max_lig_size=None,
87
+ c_alpha_max_neighbors=args.c_alpha_max_neighbors,
88
+ matching=not args.no_torsion, keep_original=True,
89
+ popsize=args.matching_popsize,
90
+ maxiter=args.matching_maxiter,
91
+ all_atoms=args.all_atoms,
92
+ atom_radius=args.atom_radius,
93
+ atom_max_neighbors=args.atom_max_neighbors,
94
+ esm_embeddings_path=args.esm_embeddings_path,
95
+ require_ligand=True)
96
+
97
+ print(f'HAPPENING | Loading complex graphs from: {os.path.join(self.complex_graphs_cache, "heterographs.pkl")}')
98
+ with open(os.path.join(self.complex_graphs_cache, "heterographs.pkl"), 'rb') as f:
99
+ complex_graphs = pickle.load(f)
100
+ self.complex_graph_dict = {d.name: d for d in complex_graphs}
101
+
102
+ self.full_cache_path = os.path.join(cache_path, f'model_{os.path.splitext(os.path.basename(original_model_dir))[0]}'
103
+ f'_split_{split}_limit_{limit_complexes}')
104
+
105
+ if (not os.path.exists(os.path.join(self.full_cache_path, "ligand_positions.pkl")) and self.cache_creation_id is None) or \
106
+ (not os.path.exists(os.path.join(self.full_cache_path, f"ligand_positions_id{self.cache_creation_id}.pkl")) and self.cache_creation_id is not None):
107
+ os.makedirs(self.full_cache_path, exist_ok=True)
108
+ self.preprocessing(original_model_cache)
109
+
110
+ if self.cache_ids_to_combine is None:
111
+ print(f'HAPPENING | Loading positions and rmsds from: {os.path.join(self.full_cache_path, "ligand_positions.pkl")}')
112
+ with open(os.path.join(self.full_cache_path, "ligand_positions.pkl"), 'rb') as f:
113
+ self.full_ligand_positions, self.rmsds = pickle.load(f)
114
+ if os.path.exists(os.path.join(self.full_cache_path, "complex_names_in_same_order.pkl")):
115
+ with open(os.path.join(self.full_cache_path, "complex_names_in_same_order.pkl"), 'rb') as f:
116
+ generated_rmsd_complex_names = pickle.load(f)
117
+ else:
118
+ print('HAPPENING | The path, ', os.path.join(self.full_cache_path, "complex_names_in_same_order.pkl"),
119
+ ' does not exist. \n => We assume that means that we are using a ligand_positions.pkl where the '
120
+ 'code was not saving the complex names for them yet. We now instead use the complex names of '
121
+ 'the dataset that the original model used to create the ligand positions and RMSDs.')
122
+ with open(os.path.join(original_model_cache, "heterographs.pkl"), 'rb') as f:
123
+ original_model_complex_graphs = pickle.load(f)
124
+ generated_rmsd_complex_names = [d.name for d in original_model_complex_graphs]
125
+ assert (len(self.rmsds) == len(generated_rmsd_complex_names))
126
+ else:
127
+ all_rmsds_unsorted, all_full_ligand_positions_unsorted, all_names_unsorted = [], [], []
128
+ for idx, cache_id in enumerate(self.cache_ids_to_combine):
129
+ print(f'HAPPENING | Loading positions and rmsds from cache_id from the path: {os.path.join(self.full_cache_path, "ligand_positions_"+ str(cache_id)+ ".pkl")}')
130
+ if not os.path.exists(os.path.join(self.full_cache_path, f"ligand_positions_id{cache_id}.pkl")): raise Exception(f'The generated ligand positions with cache_id do not exist: {cache_id}') # be careful with changing this error message since it is sometimes cought in a try catch
131
+ with open(os.path.join(self.full_cache_path, f"ligand_positions_id{cache_id}.pkl"), 'rb') as f:
132
+ full_ligand_positions, rmsds = pickle.load(f)
133
+ with open(os.path.join(self.full_cache_path, f"complex_names_in_same_order_id{cache_id}.pkl"), 'rb') as f:
134
+ names_unsorted = pickle.load(f)
135
+ all_names_unsorted.append(names_unsorted)
136
+ all_rmsds_unsorted.append(rmsds)
137
+ all_full_ligand_positions_unsorted.append(full_ligand_positions)
138
+ names_order = list(set(sum(all_names_unsorted, [])))
139
+ all_rmsds, all_full_ligand_positions, all_names = [], [], []
140
+ for idx, (rmsds_unsorted, full_ligand_positions_unsorted, names_unsorted) in enumerate(zip(all_rmsds_unsorted,all_full_ligand_positions_unsorted, all_names_unsorted)):
141
+ name_to_pos_dict = {name: (rmsd, pos) for name, rmsd, pos in zip(names_unsorted, full_ligand_positions_unsorted, rmsds_unsorted) }
142
+ intermediate_rmsds = [name_to_pos_dict[name][1] for name in names_order]
143
+ all_rmsds.append((intermediate_rmsds))
144
+ intermediate_pos = [name_to_pos_dict[name][0] for name in names_order]
145
+ all_full_ligand_positions.append((intermediate_pos))
146
+ self.full_ligand_positions, self.rmsds = [], []
147
+ for positions_tuple in list(zip(*all_full_ligand_positions)):
148
+ self.full_ligand_positions.append(np.concatenate(positions_tuple, axis=0))
149
+ for positions_tuple in list(zip(*all_rmsds)):
150
+ self.rmsds.append(np.concatenate(positions_tuple, axis=0))
151
+ generated_rmsd_complex_names = names_order
152
+ print('Number of complex graphs: ', len(self.complex_graph_dict))
153
+ print('Number of RMSDs and positions for the complex graphs: ', len(self.full_ligand_positions))
154
+
155
+ self.all_samples_per_complex = samples_per_complex * (1 if self.cache_ids_to_combine is None else len(self.cache_ids_to_combine))
156
+
157
+ self.positions_rmsds_dict = {name: (pos, rmsd) for name, pos, rmsd in zip (generated_rmsd_complex_names, self.full_ligand_positions, self.rmsds)}
158
+ self.dataset_names = list(set(self.positions_rmsds_dict.keys()) & set(self.complex_graph_dict.keys()))
159
+ if limit_complexes > 0:
160
+ self.dataset_names = self.dataset_names[:limit_complexes]
161
+
162
+ def len(self):
163
+ return len(self.dataset_names)
164
+
165
+ def get(self, idx):
166
+ complex_graph = copy.deepcopy(self.complex_graph_dict[self.dataset_names[idx]])
167
+ positions, rmsds = self.positions_rmsds_dict[self.dataset_names[idx]]
168
+
169
+ if self.balance:
170
+ if isinstance(self.rmsd_classification_cutoff, list): raise ValueError("a list for --rmsd_classification_cutoff can only be used without --balance")
171
+ label = random.randint(0, 1)
172
+ success = rmsds < self.rmsd_classification_cutoff
173
+ n_success = np.count_nonzero(success)
174
+ if label == 0 and n_success != self.all_samples_per_complex:
175
+ # sample negative complex
176
+ sample = random.randint(0, self.all_samples_per_complex - n_success - 1)
177
+ lig_pos = positions[~success][sample]
178
+ complex_graph['ligand'].pos = torch.from_numpy(lig_pos)
179
+ else:
180
+ # sample positive complex
181
+ if n_success > 0: # if no successfull sample returns the matched complex
182
+ sample = random.randint(0, n_success - 1)
183
+ lig_pos = positions[success][sample]
184
+ complex_graph['ligand'].pos = torch.from_numpy(lig_pos)
185
+ complex_graph.y = torch.tensor(label).float()
186
+ else:
187
+ sample = random.randint(0, self.all_samples_per_complex - 1)
188
+ complex_graph['ligand'].pos = torch.from_numpy(positions[sample])
189
+ complex_graph.y = torch.tensor(rmsds[sample] < self.rmsd_classification_cutoff).float().unsqueeze(0)
190
+ if isinstance(self.rmsd_classification_cutoff, list):
191
+ complex_graph.y_binned = torch.tensor(np.logical_and(rmsds[sample] < self.rmsd_classification_cutoff + [math.inf],rmsds[sample] >= [0] + self.rmsd_classification_cutoff), dtype=torch.float).unsqueeze(0)
192
+ complex_graph.y = torch.tensor(rmsds[sample] < self.rmsd_classification_cutoff[0]).unsqueeze(0).float()
193
+ complex_graph.rmsd = torch.tensor(rmsds[sample]).unsqueeze(0).float()
194
+
195
+ complex_graph['ligand'].node_t = {'tr': 0 * torch.ones(complex_graph['ligand'].num_nodes),
196
+ 'rot': 0 * torch.ones(complex_graph['ligand'].num_nodes),
197
+ 'tor': 0 * torch.ones(complex_graph['ligand'].num_nodes)}
198
+ complex_graph['receptor'].node_t = {'tr': 0 * torch.ones(complex_graph['receptor'].num_nodes),
199
+ 'rot': 0 * torch.ones(complex_graph['receptor'].num_nodes),
200
+ 'tor': 0 * torch.ones(complex_graph['receptor'].num_nodes)}
201
+ if self.all_atoms:
202
+ complex_graph['atom'].node_t = {'tr': 0 * torch.ones(complex_graph['atom'].num_nodes),
203
+ 'rot': 0 * torch.ones(complex_graph['atom'].num_nodes),
204
+ 'tor': 0 * torch.ones(complex_graph['atom'].num_nodes)}
205
+ complex_graph.complex_t = {'tr': 0 * torch.ones(1), 'rot': 0 * torch.ones(1), 'tor': 0 * torch.ones(1)}
206
+ return complex_graph
207
+
208
+ def preprocessing(self, original_model_cache):
209
+ t_to_sigma = partial(t_to_sigma_compl, args=self.original_model_args)
210
+
211
+ model = get_model(self.original_model_args, self.device, t_to_sigma=t_to_sigma, no_parallel=True)
212
+ state_dict = torch.load(f'{self.original_model_dir}/best_model.pt', map_location=torch.device('cpu'))
213
+ model.load_state_dict(state_dict, strict=True)
214
+ model = model.to(self.device)
215
+ model.eval()
216
+
217
+ tr_schedule = get_t_schedule(inference_steps=self.inference_steps)
218
+ rot_schedule = tr_schedule
219
+ tor_schedule = tr_schedule
220
+ print('common t schedule', tr_schedule)
221
+
222
+ print('HAPPENING | loading cached complexes of the original model to create the confidence dataset RMSDs and predicted positions. Doing that from: ', os.path.join(self.complex_graphs_cache, "heterographs.pkl"))
223
+ with open(os.path.join(original_model_cache, "heterographs.pkl"), 'rb') as f:
224
+ complex_graphs = pickle.load(f)
225
+ dataset = ListDataset(complex_graphs)
226
+ loader = DataLoader(dataset=dataset, batch_size=1, shuffle=False)
227
+
228
+ rmsds, full_ligand_positions, names = [], [], []
229
+ for idx, orig_complex_graph in tqdm(enumerate(loader)):
230
+ data_list = [copy.deepcopy(orig_complex_graph) for _ in range(self.samples_per_complex)]
231
+ randomize_position(data_list, self.original_model_args.no_torsion, False, self.original_model_args.tr_sigma_max)
232
+
233
+ predictions_list = None
234
+ failed_convergence_counter = 0
235
+ while predictions_list is None:
236
+ try:
237
+ predictions_list, confidences = sampling(data_list=data_list, model=model, inference_steps=self.inference_steps,
238
+ tr_schedule=tr_schedule, rot_schedule=rot_schedule, tor_schedule=tor_schedule,
239
+ device=self.device, t_to_sigma=t_to_sigma, model_args=self.original_model_args)
240
+ except Exception as e:
241
+ if 'failed to converge' in str(e):
242
+ failed_convergence_counter += 1
243
+ if failed_convergence_counter > 5:
244
+ print('| WARNING: SVD failed to converge 5 times - skipping the complex')
245
+ break
246
+ print('| WARNING: SVD failed to converge - trying again with a new sample')
247
+ else:
248
+ raise e
249
+ if failed_convergence_counter > 5: predictions_list = data_list
250
+ if self.original_model_args.no_torsion:
251
+ orig_complex_graph['ligand'].orig_pos = (orig_complex_graph['ligand'].pos.cpu().numpy() + orig_complex_graph.original_center.cpu().numpy())
252
+
253
+ filterHs = torch.not_equal(predictions_list[0]['ligand'].x[:, 0], 0).cpu().numpy()
254
+
255
+ if isinstance(orig_complex_graph['ligand'].orig_pos, list):
256
+ orig_complex_graph['ligand'].orig_pos = orig_complex_graph['ligand'].orig_pos[0]
257
+
258
+ ligand_pos = np.asarray([complex_graph['ligand'].pos.cpu().numpy()[filterHs] for complex_graph in predictions_list])
259
+ orig_ligand_pos = np.expand_dims(orig_complex_graph['ligand'].orig_pos[filterHs] - orig_complex_graph.original_center.cpu().numpy(), axis=0)
260
+ rmsd = np.sqrt(((ligand_pos - orig_ligand_pos) ** 2).sum(axis=2).mean(axis=1))
261
+
262
+ rmsds.append(rmsd)
263
+ full_ligand_positions.append(np.asarray([complex_graph['ligand'].pos.cpu().numpy() for complex_graph in predictions_list]))
264
+ names.append(orig_complex_graph.name[0])
265
+ assert(len(orig_complex_graph.name) == 1) # I just put this assert here because of the above line where I assumed that the list is always only lenght 1. Just in case it isn't maybe check what the names in there are.
266
+ with open(os.path.join(self.full_cache_path, f"ligand_positions{'' if self.cache_creation_id is None else '_id' + str(self.cache_creation_id)}.pkl"), 'wb') as f:
267
+ pickle.dump((full_ligand_positions, rmsds), f)
268
+ with open(os.path.join(self.full_cache_path, f"complex_names_in_same_order{'' if self.cache_creation_id is None else '_id' + str(self.cache_creation_id)}.pkl"), 'wb') as f:
269
+ pickle.dump((names), f)
270
+
271
+
272
+
data/protein_ligand_example_csv.csv ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ protein_path,ligand
2
+ examples/1cbr_protein.pdb,CCCC
data/splits/timesplit_no_lig_overlap_train ADDED
@@ -0,0 +1,16379 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 3dpf
2
+ 2zy1
3
+ 6h77
4
+ 5c0m
5
+ 4o2c
6
+ 3a2c
7
+ 3b2t
8
+ 1v2q
9
+ 4q87
10
+ 4j28
11
+ 3efw
12
+ 4mi6
13
+ 3k8c
14
+ 3s54
15
+ 2fda
16
+ 2q55
17
+ 5ylv
18
+ 4ozo
19
+ 3fzn
20
+ 3eoc
21
+ 4tn2
22
+ 3hfv
23
+ 5gg4
24
+ 5svi
25
+ 2r58
26
+ 5d1n
27
+ 4yqv
28
+ 1hs6
29
+ 3zxh
30
+ 1ft7
31
+ 4hww
32
+ 6b1j
33
+ 3di6
34
+ 5jaz
35
+ 5ema
36
+ 6gip
37
+ 5am5
38
+ 4bnt
39
+ 1fq6
40
+ 3pqz
41
+ 6eil
42
+ 2q11
43
+ 4nw7
44
+ 4ahr
45
+ 1p1o
46
+ 3le8
47
+ 3nzk
48
+ 1eub
49
+ 1eoc
50
+ 3std
51
+ 3qw8
52
+ 4za0
53
+ 5l8a
54
+ 5teg
55
+ 5u6j
56
+ 3g2n
57
+ 2wp1
58
+ 1zyr
59
+ 1xs7
60
+ 4m6q
61
+ 2jq9
62
+ 4m5h
63
+ 4d63
64
+ 3shb
65
+ 3i6m
66
+ 4hw3
67
+ 6eq5
68
+ 3hqy
69
+ 3zos
70
+ 6h96
71
+ 2gv6
72
+ 5yjb
73
+ 4lm4
74
+ 4lwv
75
+ 3g30
76
+ 5f32
77
+ 4dkr
78
+ 6b4l
79
+ 4j9a
80
+ 5w86
81
+ 4mpn
82
+ 5m51
83
+ 1ro6
84
+ 5q0m
85
+ 3bea
86
+ 1mu6
87
+ 2wtd
88
+ 3sr4
89
+ 6dqb
90
+ 5hh5
91
+ 5q0e
92
+ 2r03
93
+ 4xv9
94
+ 4y0a
95
+ 5ota
96
+ 3u5l
97
+ 3m3o
98
+ 4idz
99
+ 2w47
100
+ 2g5u
101
+ 6cec
102
+ 5wzs
103
+ 3iu7
104
+ 5mky
105
+ 3ibc
106
+ 2qwf
107
+ 3p76
108
+ 4np3
109
+ 4kb8
110
+ 1uyi
111
+ 4tkf
112
+ 2fai
113
+ 2xvd
114
+ 5ar8
115
+ 2lzg
116
+ 3e62
117
+ 1p6e
118
+ 5wqa
119
+ 1p02
120
+ 6gjr
121
+ 1nhu
122
+ 4yyl
123
+ 3jzq
124
+ 4pce
125
+ 4gvu
126
+ 4n7g
127
+ 3s9e
128
+ 3k3b
129
+ 1pmn
130
+ 1ogd
131
+ 2j4i
132
+ 1oss
133
+ 1v0k
134
+ 6cn6
135
+ 3k3e
136
+ 2btr
137
+ 1ppk
138
+ 6egw
139
+ 4zx1
140
+ 4zfi
141
+ 5yso
142
+ 2rfn
143
+ 5cep
144
+ 2xyu
145
+ 3eou
146
+ 4mme
147
+ 4fnn
148
+ 4og7
149
+ 4gkh
150
+ 6d1l
151
+ 6dlj
152
+ 5i94
153
+ 1eb2
154
+ 4qzw
155
+ 3fu0
156
+ 4aif
157
+ 5eec
158
+ 2qlj
159
+ 5cfb
160
+ 5sy3
161
+ 1mhw
162
+ 2flu
163
+ 3e6y
164
+ 4wiv
165
+ 5oot
166
+ 4bae
167
+ 6bfd
168
+ 3po6
169
+ 2wo9
170
+ 1rp7
171
+ 1l2s
172
+ 5ab1
173
+ 3pce
174
+ 2w6z
175
+ 1iew
176
+ 3e85
177
+ 3e12
178
+ 5arf
179
+ 4jgv
180
+ 1ikw
181
+ 6goo
182
+ 4zdu
183
+ 3wcb
184
+ 2w1h
185
+ 4x2i
186
+ 6ggv
187
+ 3h0b
188
+ 5nwe
189
+ 1o46
190
+ 5f2u
191
+ 2feq
192
+ 2jnw
193
+ 3gpj
194
+ 5uex
195
+ 1ndw
196
+ 1zhk
197
+ 4mcd
198
+ 3r4m
199
+ 4cps
200
+ 3gcp
201
+ 2g9v
202
+ 5try
203
+ 4c2v
204
+ 4og3
205
+ 3otq
206
+ 2yns
207
+ 4m0e
208
+ 5tkj
209
+ 2m3z
210
+ 3pa5
211
+ 3iww
212
+ 5o0b
213
+ 3m58
214
+ 5lz2
215
+ 5u6k
216
+ 4yw6
217
+ 3ewh
218
+ 3wc7
219
+ 1rhk
220
+ 3gi4
221
+ 2wd7
222
+ 3jq8
223
+ 5yr5
224
+ 6fnf
225
+ 3hx3
226
+ 4mz6
227
+ 1ol2
228
+ 4aa5
229
+ 1rry
230
+ 1ozv
231
+ 5jf6
232
+ 4mnq
233
+ 5wa5
234
+ 3nlb
235
+ 3f6e
236
+ 2xu5
237
+ 3wk7
238
+ 4p6w
239
+ 2pmc
240
+ 3byo
241
+ 5ehe
242
+ 6chq
243
+ 6c8x
244
+ 3pp1
245
+ 3uev
246
+ 3ad7
247
+ 5aqv
248
+ 3qzq
249
+ 2z97
250
+ 5d1r
251
+ 5ajy
252
+ 4k67
253
+ 2w1e
254
+ 5j3v
255
+ 3upk
256
+ 4gj8
257
+ 1td7
258
+ 5lz9
259
+ 6erw
260
+ 4rhx
261
+ 1hxw
262
+ 6cbf
263
+ 5qal
264
+ 1q8w
265
+ 4mf0
266
+ 5t1s
267
+ 1fzk
268
+ 4tju
269
+ 5t2y
270
+ 4el5
271
+ 2v87
272
+ 1epo
273
+ 2r5p
274
+ 4pis
275
+ 3ehy
276
+ 5erg
277
+ 2r1x
278
+ 2f8i
279
+ 2wi7
280
+ 5wre
281
+ 3dek
282
+ 3hp2
283
+ 4bcw
284
+ 4uau
285
+ 5ap3
286
+ 3udy
287
+ 2ksa
288
+ 5ekg
289
+ 5lh8
290
+ 2uw7
291
+ 3uvq
292
+ 2glp
293
+ 3m3r
294
+ 6bsx
295
+ 2ddf
296
+ 6eqm
297
+ 3ufl
298
+ 2mwp
299
+ 1h4n
300
+ 5f4r
301
+ 4ybj
302
+ 4kz8
303
+ 2z60
304
+ 4ogi
305
+ 5ufp
306
+ 4uiw
307
+ 3igv
308
+ 5b56
309
+ 1bn3
310
+ 2xg9
311
+ 1u59
312
+ 4m5j
313
+ 5oua
314
+ 5qaa
315
+ 6h7j
316
+ 2o7v
317
+ 5v7t
318
+ 2r02
319
+ 2j94
320
+ 5o87
321
+ 1o3k
322
+ 4mr4
323
+ 5am7
324
+ 6g35
325
+ 1ljt
326
+ 3srv
327
+ 4bi6
328
+ 2yoh
329
+ 4zed
330
+ 5tln
331
+ 3rkb
332
+ 5jiq
333
+ 2ll6
334
+ 1tkz
335
+ 2vj6
336
+ 3qrk
337
+ 3c45
338
+ 4pks
339
+ 3nf3
340
+ 2oaz
341
+ 3pwk
342
+ 3ryy
343
+ 2y7k
344
+ 6dm8
345
+ 5ism
346
+ 5i7u
347
+ 2qtn
348
+ 5cuu
349
+ 5n1y
350
+ 4kot
351
+ 5wa1
352
+ 2wyi
353
+ 188l
354
+ 3h2o
355
+ 1sw1
356
+ 1zuc
357
+ 2nnq
358
+ 4l8m
359
+ 5j75
360
+ 4afe
361
+ 5nwi
362
+ 2n06
363
+ 3l4w
364
+ 3ibl
365
+ 4l31
366
+ 4ruy
367
+ 4ufm
368
+ 5sxk
369
+ 4hnn
370
+ 5q0x
371
+ 4zij
372
+ 3fdn
373
+ 5fe6
374
+ 3nuj
375
+ 5h8x
376
+ 5ets
377
+ 4x5z
378
+ 1w3k
379
+ 3hs9
380
+ 3ro0
381
+ 3zi8
382
+ 1nl4
383
+ 5cao
384
+ 4gjd
385
+ 2p3d
386
+ 2z8e
387
+ 3w2p
388
+ 1f28
389
+ 2iw6
390
+ 1h9l
391
+ 4o78
392
+ 1soj
393
+ 3krw
394
+ 5g4n
395
+ 3t4v
396
+ 5drr
397
+ 5o0a
398
+ 4c4i
399
+ 2vwx
400
+ 6miq
401
+ 6guk
402
+ 5c3k
403
+ 5fhn
404
+ 3o75
405
+ 5kap
406
+ 4zo5
407
+ 2jko
408
+ 3iss
409
+ 4eg6
410
+ 4fcd
411
+ 3v7s
412
+ 4bcf
413
+ 2qyn
414
+ 1ikx
415
+ 6f6i
416
+ 5os5
417
+ 1om2
418
+ 6axb
419
+ 3s3n
420
+ 1fl6
421
+ 4luo
422
+ 1qiw
423
+ 5f2w
424
+ 3udj
425
+ 5tbj
426
+ 5k0c
427
+ 5ndf
428
+ 1ek2
429
+ 4zb8
430
+ 3r9o
431
+ 5qah
432
+ 3avb
433
+ 4c52
434
+ 4ybi
435
+ 5mi6
436
+ 6fu5
437
+ 1yc4
438
+ 4ymq
439
+ 1xn3
440
+ 3dpo
441
+ 4z7q
442
+ 5ehn
443
+ 5mri
444
+ 2vcw
445
+ 2byp
446
+ 5du8
447
+ 1i7z
448
+ 1ec3
449
+ 5jmx
450
+ 4yxi
451
+ 3iw7
452
+ 3l4v
453
+ 3wcf
454
+ 5igk
455
+ 1sz0
456
+ 4x6i
457
+ 1wum
458
+ 1d3d
459
+ 3rv6
460
+ 1x7r
461
+ 2c57
462
+ 4u82
463
+ 6cwh
464
+ 2vrj
465
+ 2xag
466
+ 5nw7
467
+ 5qau
468
+ 4ocz
469
+ 3chg
470
+ 5oh1
471
+ 1hkm
472
+ 3ayd
473
+ 3mn8
474
+ 5ngf
475
+ 4bo6
476
+ 1b57
477
+ 5wzt
478
+ 4uzd
479
+ 1cyn
480
+ 3eys
481
+ 4umc
482
+ 6guh
483
+ 2e99
484
+ 2xb8
485
+ 5tus
486
+ 6ap7
487
+ 4ajn
488
+ 4od7
489
+ 6g2l
490
+ 5v40
491
+ 5wjj
492
+ 2bow
493
+ 3kc0
494
+ 3zs1
495
+ 4igq
496
+ 3b24
497
+ 5e8r
498
+ 5a4l
499
+ 4ci1
500
+ 3qd0
501
+ 2ohv
502
+ 3uw5
503
+ 3lp4
504
+ 3ftw
505
+ 6c5q
506
+ 4e0x
507
+ 4hbw
508
+ 2yoj
509
+ 4b2i
510
+ 3mkn
511
+ 1i5r
512
+ 5tig
513
+ 5bqs
514
+ 5j2x
515
+ 1oz0
516
+ 5a2s
517
+ 3fkv
518
+ 3qnj
519
+ 5w5o
520
+ 2cht
521
+ 5dj5
522
+ 5oje
523
+ 5o9y
524
+ 4hxs
525
+ 5t36
526
+ 5gmh
527
+ 4eh9
528
+ 3kr2
529
+ 3rk5
530
+ 2b54
531
+ 4ufh
532
+ 2zxa
533
+ 4mji
534
+ 4e70
535
+ 4huo
536
+ 3zrk
537
+ 3wb5
538
+ 2iw8
539
+ 4ge4
540
+ 6ccs
541
+ 5nwc
542
+ 5lmb
543
+ 4i6h
544
+ 4dhu
545
+ 5y6e
546
+ 5bvn
547
+ 4lxd
548
+ 3b3s
549
+ 5j9f
550
+ 3jzp
551
+ 2no3
552
+ 1ouk
553
+ 5epk
554
+ 1fo2
555
+ 1g7g
556
+ 4dve
557
+ 6au2
558
+ 4wt6
559
+ 3w2q
560
+ 5tdr
561
+ 4xx9
562
+ 1jg0
563
+ 2n27
564
+ 5lyn
565
+ 1p1q
566
+ 1hi3
567
+ 2wih
568
+ 4hym
569
+ 4ew3
570
+ 5ixq
571
+ 1h60
572
+ 3ppk
573
+ 4x8s
574
+ 5nzn
575
+ 3ddf
576
+ 1yvm
577
+ 1kv2
578
+ 4djq
579
+ 3sym
580
+ 4fz3
581
+ 1ze8
582
+ 5ia1
583
+ 2qcm
584
+ 5m9d
585
+ 2erz
586
+ 4g0l
587
+ 5tqg
588
+ 2i5j
589
+ 3ow6
590
+ 1met
591
+ 3r42
592
+ 5tdi
593
+ 3i97
594
+ 1alw
595
+ 1doj
596
+ 3f5j
597
+ 2r5q
598
+ 6eir
599
+ 2ie4
600
+ 5cav
601
+ 1n4m
602
+ 5wj6
603
+ 2o7n
604
+ 2r4f
605
+ 5nkn
606
+ 2g6p
607
+ 1bnu
608
+ 4uc5
609
+ 3kze
610
+ 5dg6
611
+ 1nym
612
+ 6cki
613
+ 4kmz
614
+ 6mjf
615
+ 4ch8
616
+ 6hsh
617
+ 1txr
618
+ 4zj8
619
+ 6ggn
620
+ 4qzx
621
+ 4qsu
622
+ 5ety
623
+ 1o5a
624
+ 4otf
625
+ 1pgp
626
+ 5wex
627
+ 4p4b
628
+ 4h4m
629
+ 3k54
630
+ 4j53
631
+ 5dva
632
+ 1e3v
633
+ 4wop
634
+ 1c3i
635
+ 4jls
636
+ 5mxv
637
+ 4qrc
638
+ 3fvn
639
+ 5ivn
640
+ 4xu1
641
+ 2o4n
642
+ 1m5b
643
+ 3t1a
644
+ 1mqi
645
+ 5n84
646
+ 5ifu
647
+ 5eie
648
+ 5lgr
649
+ 5unf
650
+ 5w5u
651
+ 5tq1
652
+ 3dkj
653
+ 5ye8
654
+ 1br8
655
+ 3tsd
656
+ 6dvn
657
+ 1oxg
658
+ 4kz5
659
+ 5v3h
660
+ 5in9
661
+ 4avi
662
+ 220l
663
+ 4b7z
664
+ 2xbv
665
+ 3twx
666
+ 5zo8
667
+ 2w4i
668
+ 5opc
669
+ 4q8y
670
+ 5acy
671
+ 5ncz
672
+ 3in4
673
+ 3arp
674
+ 4rt1
675
+ 4mq6
676
+ 6ey7
677
+ 3uzd
678
+ 2b9a
679
+ 5o4z
680
+ 2ow0
681
+ 6f5w
682
+ 1x7q
683
+ 5agv
684
+ 5pzm
685
+ 6afh
686
+ 4z1e
687
+ 5flp
688
+ 5xsr
689
+ 1b4z
690
+ 5f1r
691
+ 1o2g
692
+ 1uwh
693
+ 3bmo
694
+ 1p19
695
+ 2ans
696
+ 6fbv
697
+ 4lrr
698
+ 1gi9
699
+ 4xuh
700
+ 2weo
701
+ 5xpi
702
+ 4btm
703
+ 5h0e
704
+ 4gye
705
+ 6bgu
706
+ 5wlt
707
+ 5oul
708
+ 1t49
709
+ 5k1v
710
+ 5lz4
711
+ 1fsw
712
+ 6cvx
713
+ 6c3e
714
+ 6cj5
715
+ 5fqc
716
+ 1cze
717
+ 3uwl
718
+ 5l3g
719
+ 5jnc
720
+ 3ps6
721
+ 6csr
722
+ 6bw4
723
+ 3hau
724
+ 3ud9
725
+ 6csp
726
+ 5uyu
727
+ 5lqf
728
+ 4bfz
729
+ 2xp6
730
+ 3sjo
731
+ 5os3
732
+ 4z8d
733
+ 5un1
734
+ 6esy
735
+ 3kjq
736
+ 3sl4
737
+ 5mgl
738
+ 5dp5
739
+ 3n0h
740
+ 5fao
741
+ 4dmy
742
+ 6dxl
743
+ 4qw0
744
+ 3q92
745
+ 3rwf
746
+ 2xke
747
+ 4gfo
748
+ 3t03
749
+ 3pd8
750
+ 3f18
751
+ 4utr
752
+ 4cu7
753
+ 5n1s
754
+ 3wkb
755
+ 2zdt
756
+ 2yit
757
+ 4cmt
758
+ 5dpa
759
+ 4rt0
760
+ 2y5h
761
+ 3cde
762
+ 5lud
763
+ 3vva
764
+ 2wev
765
+ 5mrm
766
+ 4ypf
767
+ 5ea4
768
+ 3fr5
769
+ 4tkh
770
+ 5tyi
771
+ 1db1
772
+ 4hxj
773
+ 3hu1
774
+ 5ftg
775
+ 2eh8
776
+ 1jiz
777
+ 1gww
778
+ 5nih
779
+ 1rr6
780
+ 4fp1
781
+ 4y24
782
+ 1fki
783
+ 3lea
784
+ 4lch
785
+ 2xk1
786
+ 4qjr
787
+ 4os1
788
+ 4pft
789
+ 5eb3
790
+ 5qad
791
+ 3cpc
792
+ 3c94
793
+ 5jhb
794
+ 6b22
795
+ 3vfj
796
+ 6hmx
797
+ 1kuk
798
+ 2gh9
799
+ 1xh6
800
+ 3eyl
801
+ 5hng
802
+ 3nes
803
+ 3i1y
804
+ 2anl
805
+ 4xit
806
+ 3n2e
807
+ 2n0u
808
+ 5hdx
809
+ 4zt6
810
+ 4f6v
811
+ 5n7g
812
+ 4mc1
813
+ 4yc9
814
+ 3q6w
815
+ 2cmb
816
+ 5g4m
817
+ 3pww
818
+ 5d7x
819
+ 4cpr
820
+ 3muf
821
+ 5f95
822
+ 5o9o
823
+ 5e2w
824
+ 2chx
825
+ 2pk6
826
+ 3bet
827
+ 6cje
828
+ 2rd6
829
+ 5khk
830
+ 4eh5
831
+ 1i9l
832
+ 1swg
833
+ 4q4o
834
+ 1kf0
835
+ 1llb
836
+ 5aep
837
+ 2zp0
838
+ 3zze
839
+ 5csp
840
+ 5eyk
841
+ 3u7n
842
+ 4z0e
843
+ 5fh6
844
+ 5kj2
845
+ 2o4k
846
+ 5llg
847
+ 4b5t
848
+ 3li2
849
+ 3of8
850
+ 3mvh
851
+ 1jut
852
+ 1auj
853
+ 5awj
854
+ 5x73
855
+ 1eol
856
+ 1f90
857
+ 4crd
858
+ 1d2e
859
+ 2oht
860
+ 1ua4
861
+ 2h9m
862
+ 5byz
863
+ 3rse
864
+ 4mp2
865
+ 3hhk
866
+ 5tur
867
+ 2xxr
868
+ 2r3w
869
+ 4u6e
870
+ 4qlt
871
+ 3db8
872
+ 2bub
873
+ 1ths
874
+ 3s8o
875
+ 3x1k
876
+ 3eg6
877
+ 1jaq
878
+ 3anr
879
+ 3ua8
880
+ 2kfg
881
+ 4m3g
882
+ 3d3x
883
+ 5a5v
884
+ 3nuu
885
+ 1jh1
886
+ 1iy7
887
+ 5dlz
888
+ 4wpf
889
+ 4pvy
890
+ 1sve
891
+ 4qmy
892
+ 1pyg
893
+ 5drq
894
+ 4ivt
895
+ 2gcd
896
+ 1azg
897
+ 3wjw
898
+ 2qiq
899
+ 4leq
900
+ 2qfu
901
+ 1thz
902
+ 1y3a
903
+ 5yjp
904
+ 5cgv
905
+ 1oi9
906
+ 3cd5
907
+ 3w1f
908
+ 2v12
909
+ 5f37
910
+ 1e03
911
+ 3b67
912
+ 4q8x
913
+ 5uk8
914
+ 4v25
915
+ 3u8k
916
+ 5l8o
917
+ 5hlb
918
+ 4qfn
919
+ 4li7
920
+ 1dth
921
+ 4gkm
922
+ 6f3e
923
+ 4rn4
924
+ 3vtb
925
+ 5l6i
926
+ 4lrh
927
+ 4jvq
928
+ 5est
929
+ 3gtc
930
+ 2jbu
931
+ 2yfx
932
+ 4yx9
933
+ 5bs0
934
+ 5ea3
935
+ 1vyf
936
+ 5a4e
937
+ 3kku
938
+ 3cyw
939
+ 3tvx
940
+ 5mkj
941
+ 6b4d
942
+ 4bny
943
+ 5j8x
944
+ 5tza
945
+ 1uk1
946
+ 5yg2
947
+ 1d6v
948
+ 5t4e
949
+ 2j4g
950
+ 5tci
951
+ 1z9y
952
+ 2ca8
953
+ 4hyf
954
+ 5mg2
955
+ 4bcq
956
+ 1lqf
957
+ 6b3v
958
+ 5htc
959
+ 2qh6
960
+ 4pul
961
+ 1gj7
962
+ 4mmf
963
+ 3sfg
964
+ 5ap2
965
+ 2oi9
966
+ 4h81
967
+ 3s77
968
+ 1awh
969
+ 3s74
970
+ 3oyp
971
+ 4u5u
972
+ 6euz
973
+ 6eq3
974
+ 4obq
975
+ 4u4x
976
+ 2r23
977
+ 4tjw
978
+ 4anq
979
+ 2a4z
980
+ 2mwy
981
+ 4pzh
982
+ 5m7m
983
+ 4mzj
984
+ 1o3l
985
+ 3d62
986
+ 2xow
987
+ 5hog
988
+ 3rwq
989
+ 2kmx
990
+ 4fpf
991
+ 5ajx
992
+ 1f5k
993
+ 4de7
994
+ 5c7c
995
+ 3q77
996
+ 4pnr
997
+ 5tkb
998
+ 5hz8
999
+ 2vle
1000
+ 5ho7
1001
+ 5xmx
1002
+ 2xpk
1003
+ 6mdq
1004
+ 3tge
1005
+ 5fls
1006
+ 3oku
1007
+ 5jr2
1008
+ 3e0q
1009
+ 2l98
1010
+ 2uzj
1011
+ 5csz
1012
+ 5yr6
1013
+ 3f7i
1014
+ 1ex8
1015
+ 6ft7
1016
+ 2bdj
1017
+ 5l7g
1018
+ 1cnx
1019
+ 1mmp
1020
+ 5j8m
1021
+ 2ay2
1022
+ 6fnq
1023
+ 5eng
1024
+ 1pg2
1025
+ 4a6v
1026
+ 3ivv
1027
+ 2zq2
1028
+ 4wag
1029
+ 3s7a
1030
+ 4is6
1031
+ 1lhw
1032
+ 5tq8
1033
+ 3v51
1034
+ 4w9x
1035
+ 5lto
1036
+ 1o86
1037
+ 5a69
1038
+ 4zg7
1039
+ 4unq
1040
+ 2q88
1041
+ 5c1m
1042
+ 6bgy
1043
+ 4fil
1044
+ 2qi1
1045
+ 2x7o
1046
+ 6exj
1047
+ 3nkk
1048
+ 1q65
1049
+ 2xxw
1050
+ 3l7d
1051
+ 1om9
1052
+ 4jq7
1053
+ 1lf2
1054
+ 4xg4
1055
+ 3sgx
1056
+ 1sdt
1057
+ 3ow4
1058
+ 4kp8
1059
+ 2cem
1060
+ 5nzo
1061
+ 3oaw
1062
+ 2wwj
1063
+ 2v6n
1064
+ 4g11
1065
+ 1lf3
1066
+ 3ljz
1067
+ 1amn
1068
+ 6hm4
1069
+ 3jq9
1070
+ 1qx1
1071
+ 2wi5
1072
+ 4old
1073
+ 1g5s
1074
+ 3tic
1075
+ 3cs8
1076
+ 4er4
1077
+ 5kr0
1078
+ 2pe0
1079
+ 5abf
1080
+ 3v04
1081
+ 4e6q
1082
+ 2yc3
1083
+ 3bys
1084
+ 5jeo
1085
+ 4b00
1086
+ 4bek
1087
+ 4fci
1088
+ 4qna
1089
+ 5eh5
1090
+ 4r02
1091
+ 5ef7
1092
+ 4hu1
1093
+ 5v5n
1094
+ 5ehp
1095
+ 5yr4
1096
+ 3dt1
1097
+ 4cd0
1098
+ 6gl8
1099
+ 3uvl
1100
+ 3v0l
1101
+ 5ne5
1102
+ 4nvp
1103
+ 4qok
1104
+ 4ipj
1105
+ 4zyi
1106
+ 1o3j
1107
+ 1lrt
1108
+ 3iog
1109
+ 5zwe
1110
+ 4hbn
1111
+ 2gz2
1112
+ 5tco
1113
+ 3gc5
1114
+ 4l6t
1115
+ 5mnx
1116
+ 5m23
1117
+ 2etr
1118
+ 5gwy
1119
+ 3b5j
1120
+ 1oai
1121
+ 2ath
1122
+ 2v11
1123
+ 5wuk
1124
+ 2ym6
1125
+ 4mwu
1126
+ 5u69
1127
+ 4i8w
1128
+ 6g86
1129
+ 4f7l
1130
+ 2yi5
1131
+ 4nan
1132
+ 1j37
1133
+ 2xup
1134
+ 2byi
1135
+ 4f63
1136
+ 3m3x
1137
+ 2bpm
1138
+ 2wb5
1139
+ 6fel
1140
+ 3tdc
1141
+ 4z1s
1142
+ 4fgy
1143
+ 3cyz
1144
+ 6bh1
1145
+ 3m8p
1146
+ 2df6
1147
+ 3jrs
1148
+ 2wmv
1149
+ 6fau
1150
+ 5oah
1151
+ 3d50
1152
+ 3aza
1153
+ 1uwt
1154
+ 4nct
1155
+ 1jqe
1156
+ 4nb3
1157
+ 4y3b
1158
+ 1okw
1159
+ 1db4
1160
+ 4n98
1161
+ 3inh
1162
+ 6hzp
1163
+ 1hdt
1164
+ 6aoy
1165
+ 4o3u
1166
+ 5es1
1167
+ 5i9i
1168
+ 6ccm
1169
+ 5nev
1170
+ 5wp5
1171
+ 4bo7
1172
+ 5byy
1173
+ 3su2
1174
+ 4hlf
1175
+ 4c1g
1176
+ 3sww
1177
+ 2os9
1178
+ 4men
1179
+ 2wxd
1180
+ 4qq4
1181
+ 5eei
1182
+ 4hnf
1183
+ 3b26
1184
+ 3eht
1185
+ 3oqf
1186
+ 3dd0
1187
+ 3si4
1188
+ 4v1f
1189
+ 4e6d
1190
+ 2c8w
1191
+ 1jyi
1192
+ 4ob1
1193
+ 5x4n
1194
+ 5alc
1195
+ 2pbw
1196
+ 5eym
1197
+ 4e28
1198
+ 2xui
1199
+ 4qtl
1200
+ 5jz9
1201
+ 3chr
1202
+ 2p2a
1203
+ 1v1j
1204
+ 4o3b
1205
+ 5v83
1206
+ 5v5e
1207
+ 3byu
1208
+ 7abp
1209
+ 5jga
1210
+ 3sw2
1211
+ 5jgb
1212
+ 4dtk
1213
+ 1q91
1214
+ 3cqu
1215
+ 1nje
1216
+ 3vhd
1217
+ 4knm
1218
+ 4yrt
1219
+ 1bgo
1220
+ 5ih5
1221
+ 6afe
1222
+ 4res
1223
+ 1akt
1224
+ 1kyv
1225
+ 1sqn
1226
+ 4oas
1227
+ 5u5l
1228
+ 1v79
1229
+ 3hp5
1230
+ 5ivc
1231
+ 1q84
1232
+ 3avg
1233
+ 3tws
1234
+ 1rmz
1235
+ 4rse
1236
+ 5zae
1237
+ 5w2q
1238
+ 5glu
1239
+ 5b4w
1240
+ 5f62
1241
+ 4y6r
1242
+ 4mha
1243
+ 5etk
1244
+ 5nzm
1245
+ 4o2b
1246
+ 4cwq
1247
+ 2pj8
1248
+ 3lvw
1249
+ 2fw3
1250
+ 1dwc
1251
+ 3zln
1252
+ 3ejq
1253
+ 2f6y
1254
+ 6bic
1255
+ 4p0x
1256
+ 2cct
1257
+ 3dxh
1258
+ 2xhx
1259
+ 1d3q
1260
+ 3qbh
1261
+ 5tkt
1262
+ 4acd
1263
+ 1yt7
1264
+ 5aki
1265
+ 3zy2
1266
+ 4tw7
1267
+ 3b3w
1268
+ 1nkm
1269
+ 5uoo
1270
+ 2l75
1271
+ 4l7u
1272
+ 4iut
1273
+ 1pbq
1274
+ 4dow
1275
+ 3hab
1276
+ 1owe
1277
+ 6ft3
1278
+ 4y2q
1279
+ 2hs1
1280
+ 6gu6
1281
+ 4dpi
1282
+ 5aac
1283
+ 4j2c
1284
+ 1s50
1285
+ 6nao
1286
+ 5fox
1287
+ 3jvs
1288
+ 4ozl
1289
+ 1zub
1290
+ 5wxp
1291
+ 5iuh
1292
+ 5oh7
1293
+ 2c90
1294
+ 5owl
1295
+ 3oof
1296
+ 4ona
1297
+ 4ydg
1298
+ 1h08
1299
+ 1uk0
1300
+ 2fxs
1301
+ 3f37
1302
+ 4xtz
1303
+ 4mww
1304
+ 6gg4
1305
+ 4zz3
1306
+ 5cf4
1307
+ 3blr
1308
+ 4twc
1309
+ 3zsz
1310
+ 4er2
1311
+ 4uuh
1312
+ 1i8j
1313
+ 6faw
1314
+ 5opv
1315
+ 3voz
1316
+ 3d1g
1317
+ 5q0n
1318
+ 3kfc
1319
+ 3qqs
1320
+ 1oay
1321
+ 1uvs
1322
+ 5os4
1323
+ 4abe
1324
+ 1r0x
1325
+ 4jwr
1326
+ 1l6m
1327
+ 3nf6
1328
+ 3qip
1329
+ 1nlt
1330
+ 3zdg
1331
+ 3o84
1332
+ 3ehn
1333
+ 3k16
1334
+ 5szb
1335
+ 2j2u
1336
+ 5aiv
1337
+ 2weg
1338
+ 3dz4
1339
+ 4jdf
1340
+ 1ttm
1341
+ 3wyy
1342
+ 3ifo
1343
+ 5j41
1344
+ 3fl5
1345
+ 1q1m
1346
+ 2y80
1347
+ 3fr4
1348
+ 3o56
1349
+ 1yrs
1350
+ 2gvv
1351
+ 3uxk
1352
+ 2zq0
1353
+ 5uac
1354
+ 3sfi
1355
+ 2wk6
1356
+ 184l
1357
+ 6gvz
1358
+ 6dik
1359
+ 3u8w
1360
+ 3rxb
1361
+ 2qpq
1362
+ 3nwe
1363
+ 13gs
1364
+ 4gpl
1365
+ 4rxz
1366
+ 4qbm
1367
+ 5y9l
1368
+ 4nh8
1369
+ 4rra
1370
+ 5ey8
1371
+ 4j03
1372
+ 2ou7
1373
+ 5tt3
1374
+ 1uwf
1375
+ 4dgr
1376
+ 5xzr
1377
+ 4mw4
1378
+ 3n5k
1379
+ 4umq
1380
+ 2l6e
1381
+ 6drg
1382
+ 6ghp
1383
+ 2ksp
1384
+ 1sps
1385
+ 5hna
1386
+ 6gnp
1387
+ 3dxk
1388
+ 5eom
1389
+ 1e34
1390
+ 2xfi
1391
+ 2itp
1392
+ 2zlf
1393
+ 4iva
1394
+ 2y4m
1395
+ 4o1b
1396
+ 2o9v
1397
+ 4i7f
1398
+ 4ezw
1399
+ 1k4h
1400
+ 2xgm
1401
+ 4nud
1402
+ 3m55
1403
+ 6ezg
1404
+ 4x34
1405
+ 1czo
1406
+ 5fqr
1407
+ 5he4
1408
+ 2h6k
1409
+ 3fmr
1410
+ 4kzb
1411
+ 2g9q
1412
+ 3tpx
1413
+ 4hpy
1414
+ 2bo4
1415
+ 4nhx
1416
+ 4mvh
1417
+ 2nq7
1418
+ 3n5u
1419
+ 3rjw
1420
+ 3emg
1421
+ 2xm9
1422
+ 1kzn
1423
+ 5faq
1424
+ 5f1u
1425
+ 1c6y
1426
+ 4j8g
1427
+ 5lvr
1428
+ 1xuo
1429
+ 2vgo
1430
+ 1grp
1431
+ 4uxb
1432
+ 2rol
1433
+ 5cbm
1434
+ 6hke
1435
+ 5tr6
1436
+ 1uyc
1437
+ 3d6o
1438
+ 5awu
1439
+ 4wz8
1440
+ 5egm
1441
+ 1duv
1442
+ 6b4n
1443
+ 1t7j
1444
+ 5aqj
1445
+ 4jh0
1446
+ 4q7s
1447
+ 5cpr
1448
+ 1bqm
1449
+ 1g9c
1450
+ 4fem
1451
+ 4wnk
1452
+ 4uv8
1453
+ 5hgq
1454
+ 1ayu
1455
+ 6co4
1456
+ 6ck3
1457
+ 3uzj
1458
+ 2qwe
1459
+ 3u2q
1460
+ 1nl9
1461
+ 6bd1
1462
+ 5yzd
1463
+ 4r92
1464
+ 2i0j
1465
+ 4hvh
1466
+ 4ool
1467
+ 3s0n
1468
+ 1ba8
1469
+ 3b4p
1470
+ 2ea4
1471
+ 5k76
1472
+ 2xzq
1473
+ 4yur
1474
+ 1pq6
1475
+ 2y81
1476
+ 3f8s
1477
+ 2hwh
1478
+ 1q4w
1479
+ 4h39
1480
+ 3zmh
1481
+ 1ftl
1482
+ 4ok5
1483
+ 2qcn
1484
+ 3qtf
1485
+ 1g52
1486
+ 3vw0
1487
+ 3i4a
1488
+ 5x79
1489
+ 2am2
1490
+ 2ym7
1491
+ 5odu
1492
+ 3adu
1493
+ 6cea
1494
+ 3t6y
1495
+ 4pgc
1496
+ 1k1j
1497
+ 3d14
1498
+ 3el4
1499
+ 4nyf
1500
+ 2y0j
1501
+ 2w1c
1502
+ 5l7k
1503
+ 4n6g
1504
+ 5cp5
1505
+ 3ekr
1506
+ 2isc
1507
+ 5cbs
1508
+ 5hvu
1509
+ 4n00
1510
+ 5mpn
1511
+ 5zoo
1512
+ 3v43
1513
+ 1t69
1514
+ 1oj5
1515
+ 5lle
1516
+ 5wef
1517
+ 2ya8
1518
+ 4ezk
1519
+ 2c6o
1520
+ 5vrl
1521
+ 1b4d
1522
+ 5za8
1523
+ 1i80
1524
+ 2ccb
1525
+ 6ce2
1526
+ 4cfx
1527
+ 3m5e
1528
+ 1k22
1529
+ 6eea
1530
+ 3s3m
1531
+ 5t4f
1532
+ 2xyn
1533
+ 6br3
1534
+ 3krl
1535
+ 3upy
1536
+ 2rm0
1537
+ 5q1f
1538
+ 4app
1539
+ 7gch
1540
+ 5hx8
1541
+ 4ok6
1542
+ 4zla
1543
+ 5g3w
1544
+ 1h27
1545
+ 1fkb
1546
+ 1idg
1547
+ 5c8n
1548
+ 1h46
1549
+ 3w33
1550
+ 3rzb
1551
+ 4xg6
1552
+ 2rib
1553
+ 2uuo
1554
+ 1r1h
1555
+ 5agu
1556
+ 6dcz
1557
+ 1r58
1558
+ 1nlo
1559
+ 3g70
1560
+ 2ofu
1561
+ 4k6z
1562
+ 4x13
1563
+ 5ihc
1564
+ 2yiw
1565
+ 6bgw
1566
+ 6gji
1567
+ 1tve
1568
+ 1gpn
1569
+ 5tq5
1570
+ 4ujb
1571
+ 4l33
1572
+ 6cqz
1573
+ 4avh
1574
+ 5uga
1575
+ 5tzy
1576
+ 5op6
1577
+ 1m7d
1578
+ 4aia
1579
+ 2bxt
1580
+ 4qgh
1581
+ 3uuo
1582
+ 5vkm
1583
+ 4jr3
1584
+ 1wdy
1585
+ 4o4r
1586
+ 2c3j
1587
+ 2r9m
1588
+ 5ual
1589
+ 3vfq
1590
+ 4c16
1591
+ 6h7l
1592
+ 3tv6
1593
+ 2pyn
1594
+ 2bj4
1595
+ 4qvy
1596
+ 4abk
1597
+ 3m53
1598
+ 3cwk
1599
+ 3d52
1600
+ 1a4q
1601
+ 4rvl
1602
+ 1d04
1603
+ 3thd
1604
+ 1ogz
1605
+ 3luo
1606
+ 2ftd
1607
+ 4ran
1608
+ 3uph
1609
+ 3c8b
1610
+ 2xp3
1611
+ 2vtl
1612
+ 1fj4
1613
+ 2zm3
1614
+ 3rxl
1615
+ 1xmy
1616
+ 3uo5
1617
+ 1q1y
1618
+ 2ql9
1619
+ 1bhf
1620
+ 2jg8
1621
+ 6fuh
1622
+ 2q2n
1623
+ 4aof
1624
+ 3ff3
1625
+ 1heg
1626
+ 4cae
1627
+ 6fkq
1628
+ 4i9i
1629
+ 3ask
1630
+ 1dbj
1631
+ 4zbf
1632
+ 2io6
1633
+ 3d1y
1634
+ 5t2l
1635
+ 3kwf
1636
+ 1juj
1637
+ 3i0r
1638
+ 4gui
1639
+ 2hy0
1640
+ 3i7c
1641
+ 6g9m
1642
+ 3nnx
1643
+ 5u48
1644
+ 2v2v
1645
+ 6f5h
1646
+ 5mwh
1647
+ 5m0d
1648
+ 5fue
1649
+ 3k5k
1650
+ 5edi
1651
+ 4d2p
1652
+ 2cm7
1653
+ 8cpa
1654
+ 3doy
1655
+ 3k99
1656
+ 1vyg
1657
+ 6ckw
1658
+ 6bmv
1659
+ 5m4q
1660
+ 5j4n
1661
+ 4tpt
1662
+ 2jbv
1663
+ 3uqr
1664
+ 4di2
1665
+ 5ue4
1666
+ 5enj
1667
+ 3csl
1668
+ 3m35
1669
+ 4a9t
1670
+ 5khx
1671
+ 5f0h
1672
+ 4wq3
1673
+ 4ktu
1674
+ 2qbr
1675
+ 1sbr
1676
+ 2f6z
1677
+ 6cgt
1678
+ 1t4v
1679
+ 5nra
1680
+ 3fed
1681
+ 5l2o
1682
+ 3mb7
1683
+ 4bzn
1684
+ 1efi
1685
+ 3nrm
1686
+ 5kjm
1687
+ 2cle
1688
+ 1j07
1689
+ 3h9k
1690
+ 4d83
1691
+ 5n4s
1692
+ 5yie
1693
+ 5yz7
1694
+ 5cr7
1695
+ 4e35
1696
+ 6di0
1697
+ 3omm
1698
+ 4w9n
1699
+ 4z1j
1700
+ 6b98
1701
+ 4x61
1702
+ 4x6m
1703
+ 2pu1
1704
+ 4q1s
1705
+ 3n7a
1706
+ 4qbb
1707
+ 6fng
1708
+ 5ya5
1709
+ 3qiz
1710
+ 5awt
1711
+ 3sv8
1712
+ 3doz
1713
+ 6dlx
1714
+ 4rkx
1715
+ 3h91
1716
+ 4uma
1717
+ 1lvc
1718
+ 1uy9
1719
+ 5gso
1720
+ 5gsa
1721
+ 5drc
1722
+ 4j5e
1723
+ 2o22
1724
+ 4loo
1725
+ 5t66
1726
+ 3rz0
1727
+ 5unj
1728
+ 4g17
1729
+ 6bhv
1730
+ 3cii
1731
+ 5qcl
1732
+ 4nw6
1733
+ 4qqi
1734
+ 3r0i
1735
+ 5uff
1736
+ 4lbp
1737
+ 4bqy
1738
+ 4l51
1739
+ 5akh
1740
+ 2yjb
1741
+ 3dx0
1742
+ 3v49
1743
+ 3g42
1744
+ 5xpp
1745
+ 2c4g
1746
+ 2bz6
1747
+ 4fny
1748
+ 3x00
1749
+ 4nat
1750
+ 6f8g
1751
+ 4ppc
1752
+ 3lpb
1753
+ 3tlh
1754
+ 4bcs
1755
+ 3nok
1756
+ 3lfs
1757
+ 3o9h
1758
+ 2vf6
1759
+ 3we4
1760
+ 2yig
1761
+ 2kgi
1762
+ 5llc
1763
+ 2jgs
1764
+ 1fkh
1765
+ 5ogb
1766
+ 5w8i
1767
+ 1a42
1768
+ 5web
1769
+ 2llq
1770
+ 5j7b
1771
+ 2c6l
1772
+ 4u0e
1773
+ 2g9r
1774
+ 1lt5
1775
+ 5ai5
1776
+ 3khj
1777
+ 6hh3
1778
+ 4uac
1779
+ 5nob
1780
+ 1ynd
1781
+ 2on6
1782
+ 6hd4
1783
+ 3msj
1784
+ 5ut4
1785
+ 2wkz
1786
+ 5jvi
1787
+ 3gww
1788
+ 4uja
1789
+ 1pq3
1790
+ 4u5v
1791
+ 4z83
1792
+ 6dh8
1793
+ 1h36
1794
+ 3vjk
1795
+ 1unh
1796
+ 3hv7
1797
+ 2vxj
1798
+ 3l38
1799
+ 1a08
1800
+ 5lgo
1801
+ 1fgi
1802
+ 5fqs
1803
+ 5a5p
1804
+ 5oss
1805
+ 4o0z
1806
+ 1gvx
1807
+ 1xur
1808
+ 5vqe
1809
+ 2j7w
1810
+ 2adm
1811
+ 3b1m
1812
+ 5zah
1813
+ 1iwq
1814
+ 4zx8
1815
+ 3wkd
1816
+ 5mtv
1817
+ 5myd
1818
+ 3t6r
1819
+ 2c6n
1820
+ 2c6e
1821
+ 2gpp
1822
+ 3opm
1823
+ 5n9r
1824
+ 3r0t
1825
+ 6gue
1826
+ 4b70
1827
+ 3gnv
1828
+ 5nyz
1829
+ 1fh7
1830
+ 5wae
1831
+ 2bvs
1832
+ 1tow
1833
+ 1toj
1834
+ 5een
1835
+ 5fog
1836
+ 6e9a
1837
+ 5jek
1838
+ 1jet
1839
+ 5qck
1840
+ 6coj
1841
+ 1if8
1842
+ 5t6g
1843
+ 4b85
1844
+ 2fm0
1845
+ 5eje
1846
+ 1ezf
1847
+ 3twd
1848
+ 3mo8
1849
+ 4rvt
1850
+ 6bbu
1851
+ 4ejl
1852
+ 1waw
1853
+ 4heu
1854
+ 4hgs
1855
+ 6b67
1856
+ 2cmc
1857
+ 3arv
1858
+ 4r4t
1859
+ 1j80
1860
+ 5ads
1861
+ 6abp
1862
+ 3el7
1863
+ 6fex
1864
+ 4m5o
1865
+ 3qgw
1866
+ 5t8o
1867
+ 3pcn
1868
+ 3nw3
1869
+ 5w84
1870
+ 3bqn
1871
+ 4pin
1872
+ 2w3o
1873
+ 5yic
1874
+ 4lwh
1875
+ 3vo3
1876
+ 4unr
1877
+ 4cp5
1878
+ 4b73
1879
+ 6c6o
1880
+ 1xz8
1881
+ 5zo9
1882
+ 2qn3
1883
+ 2g1r
1884
+ 4xc2
1885
+ 1oe8
1886
+ 2z94
1887
+ 5aic
1888
+ 4m12
1889
+ 1sld
1890
+ 2h96
1891
+ 6cn5
1892
+ 3bvb
1893
+ 5x27
1894
+ 3usx
1895
+ 1ddm
1896
+ 3fal
1897
+ 2iit
1898
+ 3fzt
1899
+ 5vgi
1900
+ 5jv1
1901
+ 1py1
1902
+ 5ktx
1903
+ 4b35
1904
+ 1m0q
1905
+ 5kjk
1906
+ 2vey
1907
+ 6dkg
1908
+ 5ei3
1909
+ 6gbx
1910
+ 3e01
1911
+ 1p03
1912
+ 4zuq
1913
+ 4wbo
1914
+ 4rfd
1915
+ 3zlo
1916
+ 5alb
1917
+ 5b5o
1918
+ 3liw
1919
+ 2mwo
1920
+ 4yjl
1921
+ 4ab8
1922
+ 5nau
1923
+ 4at4
1924
+ 1ke0
1925
+ 1zz2
1926
+ 2ccs
1927
+ 5hz6
1928
+ 2cll
1929
+ 4m2v
1930
+ 1q3d
1931
+ 3ozt
1932
+ 4ge7
1933
+ 3pkd
1934
+ 5t70
1935
+ 3ts4
1936
+ 5q0y
1937
+ 5q1a
1938
+ 3ogm
1939
+ 5t18
1940
+ 5c7d
1941
+ 3g2k
1942
+ 1ikv
1943
+ 2j7b
1944
+ 4obp
1945
+ 4k6y
1946
+ 2fb8
1947
+ 3vbd
1948
+ 5ung
1949
+ 4b32
1950
+ 2qlq
1951
+ 3jzs
1952
+ 3tiz
1953
+ 1v2k
1954
+ 6ee2
1955
+ 3zzf
1956
+ 1hpv
1957
+ 5xo2
1958
+ 5e2l
1959
+ 1jn4
1960
+ 5aly
1961
+ 2jjr
1962
+ 4agm
1963
+ 3rwp
1964
+ 3r7r
1965
+ 3zbf
1966
+ 4url
1967
+ 2wm0
1968
+ 6dcy
1969
+ 2brg
1970
+ 5dlx
1971
+ 4g2y
1972
+ 5jzs
1973
+ 5fd2
1974
+ 5nqr
1975
+ 4jpe
1976
+ 3tkm
1977
+ 5ti0
1978
+ 5lxp
1979
+ 3avi
1980
+ 5zuj
1981
+ 3p3h
1982
+ 3d9v
1983
+ 4gs8
1984
+ 4cwn
1985
+ 5q1i
1986
+ 6b7a
1987
+ 1gjc
1988
+ 4dbn
1989
+ 5yp5
1990
+ 5ukj
1991
+ 6arj
1992
+ 4iuo
1993
+ 4ibk
1994
+ 5ali
1995
+ 6fgq
1996
+ 1b5h
1997
+ 2bvx
1998
+ 2y36
1999
+ 3f81
2000
+ 4e3f
2001
+ 3t84
2002
+ 4na7
2003
+ 2ow7
2004
+ 4ob0
2005
+ 1qb1
2006
+ 2iku
2007
+ 5kh7
2008
+ 1eve
2009
+ 1k2v
2010
+ 3npc
2011
+ 2w68
2012
+ 6gqm
2013
+ 1b2h
2014
+ 4dfl
2015
+ 4l19
2016
+ 3dri
2017
+ 5ab9
2018
+ 4qzs
2019
+ 3sqq
2020
+ 1d4l
2021
+ 4dn0
2022
+ 9hvp
2023
+ 2lko
2024
+ 1lev
2025
+ 3aau
2026
+ 4mib
2027
+ 3g2l
2028
+ 1lee
2029
+ 1o5e
2030
+ 4os4
2031
+ 4b34
2032
+ 3m56
2033
+ 4qvw
2034
+ 4i4f
2035
+ 5wf6
2036
+ 3u7k
2037
+ 4j5d
2038
+ 1r4w
2039
+ 5iaw
2040
+ 3buw
2041
+ 1eou
2042
+ 3vbq
2043
+ 6g5j
2044
+ 5t4u
2045
+ 3fuh
2046
+ 2pj4
2047
+ 2zn7
2048
+ 4n4v
2049
+ 2obo
2050
+ 1tjp
2051
+ 6b1w
2052
+ 1ru2
2053
+ 1fh9
2054
+ 4wcf
2055
+ 6hh5
2056
+ 5orz
2057
+ 4xmb
2058
+ 4jfv
2059
+ 6ekq
2060
+ 5q0i
2061
+ 3h1z
2062
+ 4ycv
2063
+ 4ura
2064
+ 4prn
2065
+ 6ev0
2066
+ 1v3x
2067
+ 2pqz
2068
+ 2xjx
2069
+ 3k5i
2070
+ 5dtq
2071
+ 4pn1
2072
+ 5bue
2073
+ 3ik1
2074
+ 2h5d
2075
+ 4jbl
2076
+ 2x97
2077
+ 2q72
2078
+ 2qpj
2079
+ 1acj
2080
+ 4k6t
2081
+ 6b1e
2082
+ 3f3e
2083
+ 2yog
2084
+ 4kio
2085
+ 3qem
2086
+ 3ued
2087
+ 5mge
2088
+ 2bfq
2089
+ 3mxs
2090
+ 4k1b
2091
+ 4p6g
2092
+ 3td4
2093
+ 5mnh
2094
+ 5ien
2095
+ 3lk8
2096
+ 2zx6
2097
+ 1rql
2098
+ 3rm4
2099
+ 5u4c
2100
+ 4kb7
2101
+ 5ap1
2102
+ 4rj8
2103
+ 5tkk
2104
+ 4j04
2105
+ 6i8b
2106
+ 2nxl
2107
+ 3zm4
2108
+ 4dy6
2109
+ 3h0z
2110
+ 4zyf
2111
+ 3e7o
2112
+ 5li3
2113
+ 4qjw
2114
+ 4aq4
2115
+ 3kpu
2116
+ 5oq7
2117
+ 3h2c
2118
+ 2qnq
2119
+ 5jn9
2120
+ 5lvx
2121
+ 1r10
2122
+ 2l8r
2123
+ 5ega
2124
+ 2wi3
2125
+ 3rxk
2126
+ 1upf
2127
+ 6chm
2128
+ 4w9k
2129
+ 1vja
2130
+ 4zw7
2131
+ 5ov8
2132
+ 5wag
2133
+ 1x8j
2134
+ 5a6b
2135
+ 6br2
2136
+ 4owv
2137
+ 5ev8
2138
+ 3drs
2139
+ 6fdc
2140
+ 3tvl
2141
+ 5mlo
2142
+ 2xaf
2143
+ 3h0j
2144
+ 4yas
2145
+ 1o35
2146
+ 5dex
2147
+ 2x52
2148
+ 3lgs
2149
+ 3fkt
2150
+ 3dcr
2151
+ 2ax6
2152
+ 6cnj
2153
+ 4y5d
2154
+ 4umb
2155
+ 3l4y
2156
+ 6fkp
2157
+ 5aoj
2158
+ 1ony
2159
+ 6b95
2160
+ 5ueu
2161
+ 5ahj
2162
+ 2xel
2163
+ 2g1y
2164
+ 6iiv
2165
+ 3b1t
2166
+ 5dht
2167
+ 4zup
2168
+ 4rlk
2169
+ 1axs
2170
+ 5fnj
2171
+ 1izi
2172
+ 4gvm
2173
+ 4xmr
2174
+ 5yhg
2175
+ 3g45
2176
+ 3tao
2177
+ 2q7o
2178
+ 1pme
2179
+ 4rrs
2180
+ 6gl3
2181
+ 4pct
2182
+ 2tsr
2183
+ 3qtw
2184
+ 4uua
2185
+ 5abe
2186
+ 3ozp
2187
+ 4lc7
2188
+ 2bu5
2189
+ 5kya
2190
+ 2q6h
2191
+ 1n95
2192
+ 3k84
2193
+ 3bl1
2194
+ 3ui2
2195
+ 2cgv
2196
+ 4ez5
2197
+ 3t3c
2198
+ 3puk
2199
+ 6css
2200
+ 1ghw
2201
+ 3ezr
2202
+ 5il1
2203
+ 1a2c
2204
+ 5j6l
2205
+ 2gss
2206
+ 1n1m
2207
+ 5f1x
2208
+ 5vt4
2209
+ 4y62
2210
+ 1bcd
2211
+ 1pwu
2212
+ 4fht
2213
+ 3w9k
2214
+ 3qu0
2215
+ 1zxc
2216
+ 2vtj
2217
+ 3dnt
2218
+ 2pu2
2219
+ 4hej
2220
+ 4x3e
2221
+ 5ive
2222
+ 2wo8
2223
+ 4ozn
2224
+ 5i2f
2225
+ 6dj7
2226
+ 4a6w
2227
+ 4q1x
2228
+ 2g63
2229
+ 1jak
2230
+ 5knr
2231
+ 5y7z
2232
+ 5npb
2233
+ 2izx
2234
+ 3jpx
2235
+ 2x38
2236
+ 4knr
2237
+ 3rwe
2238
+ 4aze
2239
+ 4nrm
2240
+ 6er3
2241
+ 6d59
2242
+ 5a2k
2243
+ 4cr9
2244
+ 5i29
2245
+ 4mbi
2246
+ 6ht1
2247
+ 2d1n
2248
+ 1uj5
2249
+ 3ljt
2250
+ 1kyn
2251
+ 5osd
2252
+ 5whc
2253
+ 4qws
2254
+ 3uwo
2255
+ 3th9
2256
+ 2r2l
2257
+ 5n8v
2258
+ 2y54
2259
+ 4m5n
2260
+ 1yy6
2261
+ 6fui
2262
+ 3lvp
2263
+ 2gdo
2264
+ 3hyf
2265
+ 2zva
2266
+ 3zmu
2267
+ 2pt9
2268
+ 5nxg
2269
+ 4zsm
2270
+ 5eif
2271
+ 4qsk
2272
+ 4djr
2273
+ 2ceo
2274
+ 4apo
2275
+ 5gj9
2276
+ 1q4x
2277
+ 4wz5
2278
+ 2vx9
2279
+ 2qzl
2280
+ 4keq
2281
+ 2b17
2282
+ 4kzu
2283
+ 1bdr
2284
+ 4zjr
2285
+ 3ftz
2286
+ 3cic
2287
+ 2c2l
2288
+ 4yo8
2289
+ 1vj5
2290
+ 1w4p
2291
+ 1aaq
2292
+ 2q9n
2293
+ 2yay
2294
+ 4bea
2295
+ 1szd
2296
+ 5acb
2297
+ 5db1
2298
+ 5ne1
2299
+ 1vj6
2300
+ 3n5h
2301
+ 3kpv
2302
+ 2hvc
2303
+ 3arr
2304
+ 3v0p
2305
+ 2fqw
2306
+ 5jf7
2307
+ 3udq
2308
+ 2n14
2309
+ 5mk3
2310
+ 2pj6
2311
+ 4pp5
2312
+ 4c68
2313
+ 5q15
2314
+ 3u8h
2315
+ 4erw
2316
+ 1sb1
2317
+ 5iz8
2318
+ 6iiu
2319
+ 3txo
2320
+ 4nah
2321
+ 4ir6
2322
+ 4ufy
2323
+ 6fer
2324
+ 4zba
2325
+ 1jij
2326
+ 2piy
2327
+ 5td2
2328
+ 4yzc
2329
+ 5qap
2330
+ 5yhl
2331
+ 1h1p
2332
+ 5ni7
2333
+ 4pf3
2334
+ 4a6b
2335
+ 4uy1
2336
+ 2xwy
2337
+ 4el0
2338
+ 2w71
2339
+ 2p8h
2340
+ 4d7b
2341
+ 4yoj
2342
+ 2ms4
2343
+ 4iwz
2344
+ 5ctb
2345
+ 3t85
2346
+ 4lpg
2347
+ 5bvw
2348
+ 3uri
2349
+ 3hfb
2350
+ 4mr3
2351
+ 3lq8
2352
+ 3mlb
2353
+ 4qwf
2354
+ 4ixv
2355
+ 4njd
2356
+ 5tzo
2357
+ 5qa9
2358
+ 5ai8
2359
+ 3o23
2360
+ 4rn6
2361
+ 5i22
2362
+ 5u2j
2363
+ 4o72
2364
+ 4ej8
2365
+ 1rv1
2366
+ 4ixh
2367
+ 1w82
2368
+ 3arx
2369
+ 3k02
2370
+ 3uf9
2371
+ 5wij
2372
+ 4oq5
2373
+ 1z4o
2374
+ 2qf6
2375
+ 3lq5
2376
+ 4yw2
2377
+ 3u4h
2378
+ 4f20
2379
+ 4mg5
2380
+ 5ufi
2381
+ 4n8d
2382
+ 6mu1
2383
+ 5zag
2384
+ 4az2
2385
+ 4fbx
2386
+ 4emv
2387
+ 4bqt
2388
+ 3t2q
2389
+ 5ajw
2390
+ 4q81
2391
+ 2fqx
2392
+ 4kza
2393
+ 5k51
2394
+ 3u4i
2395
+ 5ho8
2396
+ 3lkz
2397
+ 4ciz
2398
+ 1yy4
2399
+ 3bmy
2400
+ 3vf5
2401
+ 2w6p
2402
+ 6fa2
2403
+ 4flh
2404
+ 4w4s
2405
+ 3i28
2406
+ 3oyl
2407
+ 2nn1
2408
+ 5lrk
2409
+ 4e3i
2410
+ 4xg9
2411
+ 4d8n
2412
+ 3lik
2413
+ 4aj2
2414
+ 4jt8
2415
+ 5j7w
2416
+ 5std
2417
+ 3h2a
2418
+ 2pwd
2419
+ 6bjo
2420
+ 3nik
2421
+ 5lxc
2422
+ 3qce
2423
+ 1vrt
2424
+ 4qxo
2425
+ 2y1w
2426
+ 1wok
2427
+ 5lne
2428
+ 5am6
2429
+ 5er1
2430
+ 3ob1
2431
+ 1w4l
2432
+ 6e2m
2433
+ 5wh6
2434
+ 3kf4
2435
+ 4lnp
2436
+ 3el5
2437
+ 1w84
2438
+ 4gm8
2439
+ 3o6t
2440
+ 1q83
2441
+ 6fut
2442
+ 2ycm
2443
+ 3cqw
2444
+ 5ot3
2445
+ 5dp8
2446
+ 1g53
2447
+ 5va9
2448
+ 4wx7
2449
+ 5kww
2450
+ 6bu1
2451
+ 1q6t
2452
+ 1uto
2453
+ 2bks
2454
+ 4fmq
2455
+ 5opr
2456
+ 3ies
2457
+ 6hti
2458
+ 5u66
2459
+ 4qz6
2460
+ 6b1o
2461
+ 4gy5
2462
+ 5mrb
2463
+ 2xml
2464
+ 3skk
2465
+ 1c2t
2466
+ 4in9
2467
+ 4mss
2468
+ 4kju
2469
+ 1hti
2470
+ 5l6o
2471
+ 4wkb
2472
+ 4dus
2473
+ 2am1
2474
+ 3t5i
2475
+ 1dkd
2476
+ 2onc
2477
+ 6hvh
2478
+ 1ypg
2479
+ 6cvf
2480
+ 6bkw
2481
+ 2jt5
2482
+ 3fdt
2483
+ 4wh7
2484
+ 5i3m
2485
+ 1azx
2486
+ 3fck
2487
+ 5u6c
2488
+ 4ded
2489
+ 4k0u
2490
+ 5xmu
2491
+ 3wk9
2492
+ 4kz0
2493
+ 5a7c
2494
+ 6g9j
2495
+ 3iw6
2496
+ 4ddk
2497
+ 1icj
2498
+ 6eyt
2499
+ 4z2o
2500
+ 4e3h
2501
+ 4c70
2502
+ 2y57
2503
+ 3dzt
2504
+ 5e0g
2505
+ 4nrb
2506
+ 4qr3
2507
+ 5ltn
2508
+ 3omg
2509
+ 6bod
2510
+ 4jpa
2511
+ 3fzs
2512
+ 6f5l
2513
+ 5edd
2514
+ 2oyl
2515
+ 2bmc
2516
+ 2xqq
2517
+ 3kqy
2518
+ 1oiy
2519
+ 5y13
2520
+ 4d1s
2521
+ 3ked
2522
+ 5djp
2523
+ 3bxe
2524
+ 4bo0
2525
+ 3oy3
2526
+ 4o2a
2527
+ 5em6
2528
+ 5tfx
2529
+ 4d9p
2530
+ 3tku
2531
+ 4p1r
2532
+ 5nwz
2533
+ 4bs4
2534
+ 3oys
2535
+ 5lgs
2536
+ 4nyi
2537
+ 3l58
2538
+ 2vxn
2539
+ 1y98
2540
+ 3ck7
2541
+ 1nh0
2542
+ 4an0
2543
+ 1mcz
2544
+ 2h9n
2545
+ 3h0s
2546
+ 2ycq
2547
+ 2x4s
2548
+ 4acc
2549
+ 5ows
2550
+ 3rxa
2551
+ 4dhm
2552
+ 5lhi
2553
+ 5l8c
2554
+ 3rxi
2555
+ 3r00
2556
+ 5oq5
2557
+ 1jev
2558
+ 4ztn
2559
+ 4yb7
2560
+ 4a7j
2561
+ 4os7
2562
+ 3e3c
2563
+ 4acu
2564
+ 5hvt
2565
+ 3lf0
2566
+ 4jfj
2567
+ 4p44
2568
+ 5dxh
2569
+ 1d4k
2570
+ 5ewk
2571
+ 1gx8
2572
+ 1x6u
2573
+ 3g2z
2574
+ 1i7c
2575
+ 5opb
2576
+ 4y85
2577
+ 6fii
2578
+ 6c7q
2579
+ 4jkt
2580
+ 4lbl
2581
+ 1w76
2582
+ 5dgz
2583
+ 3run
2584
+ 4qoc
2585
+ 456c
2586
+ 5twz
2587
+ 4a4h
2588
+ 6gl9
2589
+ 6h7z
2590
+ 4fxz
2591
+ 2hu6
2592
+ 2ohp
2593
+ 4ayt
2594
+ 4fgx
2595
+ 1x8b
2596
+ 2y7x
2597
+ 4yb6
2598
+ 5i8c
2599
+ 2aoi
2600
+ 3kgt
2601
+ 3g2h
2602
+ 3gwx
2603
+ 5lyr
2604
+ 3nk8
2605
+ 1mrx
2606
+ 1pf8
2607
+ 6dj1
2608
+ 4jss
2609
+ 3tu9
2610
+ 5cqt
2611
+ 1yds
2612
+ 3hy9
2613
+ 5y5u
2614
+ 6dko
2615
+ 3s0j
2616
+ 5hm0
2617
+ 3s3o
2618
+ 5om7
2619
+ 2j9n
2620
+ 4bfd
2621
+ 5wzw
2622
+ 1d3v
2623
+ 4pvv
2624
+ 4i0s
2625
+ 4pmm
2626
+ 6ftp
2627
+ 4hct
2628
+ 1c5t
2629
+ 5i3v
2630
+ 5urk
2631
+ 5q0f
2632
+ 5xv7
2633
+ 4bj9
2634
+ 1css
2635
+ 4azc
2636
+ 1hqf
2637
+ 3kej
2638
+ 2p8n
2639
+ 5tg1
2640
+ 3cvk
2641
+ 6awp
2642
+ 1e3g
2643
+ 1t4e
2644
+ 2nns
2645
+ 5al5
2646
+ 3cr5
2647
+ 6equ
2648
+ 5fl5
2649
+ 2yex
2650
+ 5lsh
2651
+ 4fgz
2652
+ 3mv0
2653
+ 3l5d
2654
+ 1q6m
2655
+ 5khh
2656
+ 5c85
2657
+ 2f3e
2658
+ 1npw
2659
+ 5w1e
2660
+ 4j0a
2661
+ 4mvn
2662
+ 5ioz
2663
+ 4yve
2664
+ 5tbm
2665
+ 2fx7
2666
+ 4oex
2667
+ 4pjt
2668
+ 1nnu
2669
+ 3c4h
2670
+ 3vp1
2671
+ 1bky
2672
+ 5ajc
2673
+ 1jcx
2674
+ 8hvp
2675
+ 4wvs
2676
+ 2vcx
2677
+ 5hkh
2678
+ 2llo
2679
+ 2jkm
2680
+ 1q6p
2681
+ 5mf6
2682
+ 5afj
2683
+ 4l50
2684
+ 1o3e
2685
+ 6evm
2686
+ 1pyw
2687
+ 3l3a
2688
+ 4mnx
2689
+ 4fll
2690
+ 6hx5
2691
+ 3ivc
2692
+ 6mx8
2693
+ 5wf5
2694
+ 5a1f
2695
+ 4c4g
2696
+ 1w5w
2697
+ 2w26
2698
+ 4an9
2699
+ 5n8j
2700
+ 3aqt
2701
+ 4dho
2702
+ 4l7r
2703
+ 4aro
2704
+ 1stp
2705
+ 6g07
2706
+ 4hze
2707
+ 2cbj
2708
+ 5wbr
2709
+ 5afx
2710
+ 3bh3
2711
+ 4ca8
2712
+ 6aam
2713
+ 1r78
2714
+ 6aol
2715
+ 1gcz
2716
+ 3bjc
2717
+ 1o2x
2718
+ 1b40
2719
+ 4u45
2720
+ 5nat
2721
+ 4qiy
2722
+ 3v30
2723
+ 3kyq
2724
+ 3unn
2725
+ 5elf
2726
+ 1xoe
2727
+ 2oqi
2728
+ 5mpz
2729
+ 4hxz
2730
+ 4rfr
2731
+ 3vp3
2732
+ 6upj
2733
+ 5xyy
2734
+ 4wci
2735
+ 2fwz
2736
+ 6buu
2737
+ 3ocz
2738
+ 3v3v
2739
+ 2yq6
2740
+ 4m8e
2741
+ 4r6e
2742
+ 5ihh
2743
+ 5ngs
2744
+ 2v77
2745
+ 3nzs
2746
+ 5tyk
2747
+ 4l7g
2748
+ 3dne
2749
+ 1m5f
2750
+ 2xk8
2751
+ 4urx
2752
+ 3ft5
2753
+ 4br3
2754
+ 1bgq
2755
+ 3fa3
2756
+ 2y07
2757
+ 4lpb
2758
+ 5k0m
2759
+ 4tlr
2760
+ 1ocq
2761
+ 4j5c
2762
+ 4awm
2763
+ 1ado
2764
+ 2n3k
2765
+ 4mf1
2766
+ 4oho
2767
+ 2xv1
2768
+ 5hh6
2769
+ 5kmh
2770
+ 4lkt
2771
+ 3n2v
2772
+ 3o7u
2773
+ 2ieo
2774
+ 1wbo
2775
+ 5i25
2776
+ 4mhz
2777
+ 5mmp
2778
+ 3qs8
2779
+ 2ykj
2780
+ 5igm
2781
+ 4zji
2782
+ 3zxz
2783
+ 4b71
2784
+ 1fpp
2785
+ 2y76
2786
+ 6eya
2787
+ 5lch
2788
+ 5gp7
2789
+ 3mmr
2790
+ 4qc1
2791
+ 3sbi
2792
+ 4bsq
2793
+ 5yhe
2794
+ 5hj9
2795
+ 4qw1
2796
+ 1px4
2797
+ 1q5l
2798
+ 4i9z
2799
+ 1pbk
2800
+ 1o5b
2801
+ 1ryf
2802
+ 3oeu
2803
+ 3nx7
2804
+ 4izy
2805
+ 5lvf
2806
+ 3hll
2807
+ 4jbp
2808
+ 5icv
2809
+ 2yln
2810
+ 5om3
2811
+ 1e2l
2812
+ 2oyk
2813
+ 2c0o
2814
+ 5w5v
2815
+ 1b2i
2816
+ 3w8o
2817
+ 4x60
2818
+ 6h2z
2819
+ 2a3i
2820
+ 5jnl
2821
+ 6m9d
2822
+ 4twp
2823
+ 5gmu
2824
+ 5wg4
2825
+ 2mkr
2826
+ 5e89
2827
+ 2j78
2828
+ 3elc
2829
+ 3eb1
2830
+ 5wlv
2831
+ 3udk
2832
+ 4p0v
2833
+ 5ahu
2834
+ 5two
2835
+ 2jf4
2836
+ 1mm6
2837
+ 3c4c
2838
+ 5a2j
2839
+ 3ti4
2840
+ 3pd9
2841
+ 4rx8
2842
+ 4yhz
2843
+ 2vr0
2844
+ 6mim
2845
+ 2k00
2846
+ 3fr2
2847
+ 5q0o
2848
+ 5dgw
2849
+ 3tiy
2850
+ 4jxs
2851
+ 5xst
2852
+ 5tpx
2853
+ 4j1h
2854
+ 5xva
2855
+ 1o49
2856
+ 2mlm
2857
+ 4alu
2858
+ 1tv6
2859
+ 4g2f
2860
+ 2xp4
2861
+ 1lhg
2862
+ 5aoi
2863
+ 2bz8
2864
+ 4kbc
2865
+ 1h22
2866
+ 5tho
2867
+ 1wun
2868
+ 1d8m
2869
+ 3m17
2870
+ 1slg
2871
+ 3pcc
2872
+ 1oh4
2873
+ 2vio
2874
+ 3v6r
2875
+ 4n7h
2876
+ 3fv8
2877
+ 4f08
2878
+ 2p33
2879
+ 5a5d
2880
+ 3n1v
2881
+ 4j4o
2882
+ 5o1d
2883
+ 5ewm
2884
+ 3ds4
2885
+ 4b3b
2886
+ 1sqi
2887
+ 2nmx
2888
+ 1gai
2889
+ 1fax
2890
+ 4deg
2891
+ 3suu
2892
+ 4rcd
2893
+ 6f6s
2894
+ 1juf
2895
+ 4zx3
2896
+ 1m48
2897
+ 5ttf
2898
+ 4man
2899
+ 5ljt
2900
+ 1y0l
2901
+ 6hsk
2902
+ 1gyy
2903
+ 3bi6
2904
+ 3fue
2905
+ 1c87
2906
+ 4byj
2907
+ 4r18
2908
+ 6bnh
2909
+ 3qpn
2910
+ 4r4i
2911
+ 4e5h
2912
+ 4ojq
2913
+ 5aqt
2914
+ 3fu3
2915
+ 3oap
2916
+ 3d1x
2917
+ 4j48
2918
+ 5uig
2919
+ 5vcz
2920
+ 5tzd
2921
+ 4hva
2922
+ 3mvl
2923
+ 6b1c
2924
+ 4e96
2925
+ 4d88
2926
+ 4ufi
2927
+ 4uvu
2928
+ 3met
2929
+ 5aeh
2930
+ 6dh5
2931
+ 4mrh
2932
+ 5fqv
2933
+ 5t8q
2934
+ 6f7c
2935
+ 5l17
2936
+ 3zim
2937
+ 3k83
2938
+ 2p3a
2939
+ 4wsj
2940
+ 4cd1
2941
+ 2xk7
2942
+ 5w6u
2943
+ 4zs3
2944
+ 4u58
2945
+ 2qn1
2946
+ 6f5m
2947
+ 2c8y
2948
+ 3vw6
2949
+ 1o42
2950
+ 5uor
2951
+ 2ycs
2952
+ 2i4z
2953
+ 4q1d
2954
+ 5m0s
2955
+ 3qup
2956
+ 1tr7
2957
+ 5xpn
2958
+ 2aa6
2959
+ 5uv2
2960
+ 3pgl
2961
+ 4mk7
2962
+ 5uir
2963
+ 1p57
2964
+ 4ezx
2965
+ 3shj
2966
+ 5csw
2967
+ 3ibn
2968
+ 4ag8
2969
+ 6ceh
2970
+ 2p98
2971
+ 4yht
2972
+ 3k0k
2973
+ 5neb
2974
+ 4ysi
2975
+ 1bo5
2976
+ 1a28
2977
+ 3wha
2978
+ 4nq6
2979
+ 1czc
2980
+ 1ckb
2981
+ 5j6d
2982
+ 3hr1
2983
+ 4acm
2984
+ 6hrq
2985
+ 1py5
2986
+ 3km4
2987
+ 4c66
2988
+ 1sr7
2989
+ 1rnm
2990
+ 3krx
2991
+ 3d25
2992
+ 5d6e
2993
+ 5w4v
2994
+ 1xbc
2995
+ 2x8e
2996
+ 4cmu
2997
+ 4f09
2998
+ 5afv
2999
+ 6bkx
3000
+ 3lqj
3001
+ 2hug
3002
+ 5dnu
3003
+ 2woa
3004
+ 5xvw
3005
+ 3fi3
3006
+ 4yho
3007
+ 1h1s
3008
+ 1w3j
3009
+ 5y59
3010
+ 1sdv
3011
+ 5tmn
3012
+ 5myk
3013
+ 5f29
3014
+ 1efy
3015
+ 4ayy
3016
+ 4ykk
3017
+ 3m6p
3018
+ 1utl
3019
+ 3zls
3020
+ 4ff8
3021
+ 5f6d
3022
+ 4k8s
3023
+ 2mpa
3024
+ 2gfs
3025
+ 1c86
3026
+ 5e5g
3027
+ 3zvy
3028
+ 2f3r
3029
+ 4e5j
3030
+ 3l5r
3031
+ 5l01
3032
+ 2vj8
3033
+ 4b9w
3034
+ 4i7m
3035
+ 4qwu
3036
+ 3rpr
3037
+ 4i7k
3038
+ 2c1n
3039
+ 4ret
3040
+ 1owk
3041
+ 4mxc
3042
+ 2ohs
3043
+ 1akw
3044
+ 5tyh
3045
+ 5huw
3046
+ 5w14
3047
+ 5lb7
3048
+ 1f8e
3049
+ 4ciy
3050
+ 5etm
3051
+ 2pwr
3052
+ 5o1f
3053
+ 4r1e
3054
+ 4ncg
3055
+ 4abf
3056
+ 1my4
3057
+ 4jib
3058
+ 4nxr
3059
+ 2jbo
3060
+ 1njd
3061
+ 1pdq
3062
+ 1qj7
3063
+ 5fi6
3064
+ 2vtp
3065
+ 5dro
3066
+ 1o3b
3067
+ 4jln
3068
+ 2yis
3069
+ 1rt2
3070
+ 4feq
3071
+ 3gjs
3072
+ 5eef
3073
+ 2zx8
3074
+ 3ujd
3075
+ 1fkf
3076
+ 5eto
3077
+ 1c5p
3078
+ 5tys
3079
+ 6ayo
3080
+ 4eh4
3081
+ 3djv
3082
+ 3mhm
3083
+ 2pow
3084
+ 1w0y
3085
+ 4dbm
3086
+ 2ywp
3087
+ 5uhi
3088
+ 1mj7
3089
+ 5am3
3090
+ 1pxp
3091
+ 4uff
3092
+ 2bq6
3093
+ 1n1g
3094
+ 5nxq
3095
+ 966c
3096
+ 3vws
3097
+ 2of2
3098
+ 2ydf
3099
+ 4nbk
3100
+ 1l0a
3101
+ 4u43
3102
+ 5trh
3103
+ 1rpa
3104
+ 5etv
3105
+ 1h61
3106
+ 2vcb
3107
+ 3d1v
3108
+ 1xq0
3109
+ 1b3h
3110
+ 3svj
3111
+ 5fun
3112
+ 4bw1
3113
+ 1kti
3114
+ 4jof
3115
+ 4izm
3116
+ 4phu
3117
+ 3jy9
3118
+ 4urw
3119
+ 5ael
3120
+ 4irx
3121
+ 3avn
3122
+ 5mm9
3123
+ 5twl
3124
+ 4w9l
3125
+ 2ad5
3126
+ 4b4g
3127
+ 4a22
3128
+ 5vfc
3129
+ 3l3q
3130
+ 6fn9
3131
+ 4m14
3132
+ 2gl0
3133
+ 4c35
3134
+ 3iob
3135
+ 6apr
3136
+ 6g3v
3137
+ 4cik
3138
+ 2xpb
3139
+ 3loo
3140
+ 3drr
3141
+ 3e4a
3142
+ 5thi
3143
+ 5mgm
3144
+ 3mrv
3145
+ 2c4v
3146
+ 4i8n
3147
+ 5obr
3148
+ 2qyl
3149
+ 6ee4
3150
+ 2wzx
3151
+ 5j8z
3152
+ 2itz
3153
+ 5t2i
3154
+ 4mdn
3155
+ 5nw0
3156
+ 5l9o
3157
+ 1c5n
3158
+ 3bel
3159
+ 1nt1
3160
+ 4g69
3161
+ 5ovv
3162
+ 1i9n
3163
+ 1hlk
3164
+ 5hh4
3165
+ 2hhn
3166
+ 5v13
3167
+ 2ww0
3168
+ 2rcx
3169
+ 5zms
3170
+ 2ncz
3171
+ 1a86
3172
+ 4uuq
3173
+ 5g2n
3174
+ 1o5g
3175
+ 2gfj
3176
+ 5d3j
3177
+ 3dpe
3178
+ 5ldp
3179
+ 3veu
3180
+ 2vaq
3181
+ 4ui5
3182
+ 1hp5
3183
+ 5g1a
3184
+ 3f39
3185
+ 4owm
3186
+ 1dl7
3187
+ 3ce3
3188
+ 2qmg
3189
+ 4poh
3190
+ 5azg
3191
+ 6by8
3192
+ 5l7f
3193
+ 4awo
3194
+ 6fh7
3195
+ 4ifi
3196
+ 6b7e
3197
+ 5i3x
3198
+ 5lws
3199
+ 5sve
3200
+ 3kn0
3201
+ 3q2h
3202
+ 5eoc
3203
+ 6b7h
3204
+ 5ekn
3205
+ 3rqw
3206
+ 4erk
3207
+ 5fso
3208
+ 5e80
3209
+ 2bpx
3210
+ 1yw8
3211
+ 3sc1
3212
+ 5o4t
3213
+ 5c2o
3214
+ 4u0f
3215
+ 2w17
3216
+ 5nxy
3217
+ 1oy7
3218
+ 6dvm
3219
+ 3k00
3220
+ 1py2
3221
+ 4zx0
3222
+ 4nkt
3223
+ 3s1h
3224
+ 2ym5
3225
+ 6cis
3226
+ 3hdm
3227
+ 2yj2
3228
+ 4ef4
3229
+ 3kvw
3230
+ 5cu2
3231
+ 6c1s
3232
+ 4j6i
3233
+ 6bij
3234
+ 5xgh
3235
+ 5u2c
3236
+ 3m3e
3237
+ 1yid
3238
+ 1b6k
3239
+ 4gd6
3240
+ 5lp1
3241
+ 4ww6
3242
+ 1zhy
3243
+ 3eky
3244
+ 2q38
3245
+ 4jym
3246
+ 2ymt
3247
+ 3gur
3248
+ 3suv
3249
+ 1njs
3250
+ 4na8
3251
+ 4msa
3252
+ 5nk8
3253
+ 1o4k
3254
+ 3l6h
3255
+ 1i5h
3256
+ 3t6b
3257
+ 5e1s
3258
+ 6gzd
3259
+ 2eum
3260
+ 1z71
3261
+ 2yc5
3262
+ 6hai
3263
+ 2a14
3264
+ 4aa4
3265
+ 4ask
3266
+ 4ea1
3267
+ 3pxz
3268
+ 4n4s
3269
+ 5wkm
3270
+ 4jg0
3271
+ 3sie
3272
+ 3pkc
3273
+ 1w1g
3274
+ 5y8c
3275
+ 2c6m
3276
+ 5ou3
3277
+ 6d50
3278
+ 5v3y
3279
+ 5lzj
3280
+ 5tya
3281
+ 4c9x
3282
+ 3eyu
3283
+ 4xtm
3284
+ 3agl
3285
+ 2brh
3286
+ 4mbf
3287
+ 5d4a
3288
+ 2zz6
3289
+ 2gbi
3290
+ 1t5f
3291
+ 5jiy
3292
+ 1nw7
3293
+ 3hl7
3294
+ 6h7f
3295
+ 1zd4
3296
+ 3suw
3297
+ 5fdc
3298
+ 3cct
3299
+ 4pm0
3300
+ 2y1x
3301
+ 5n9n
3302
+ 4gj2
3303
+ 5d6f
3304
+ 4joj
3305
+ 5klz
3306
+ 4jju
3307
+ 2gg7
3308
+ 5hn9
3309
+ 4tjy
3310
+ 4r91
3311
+ 6ffn
3312
+ 5usf
3313
+ 2clk
3314
+ 5n70
3315
+ 2ohu
3316
+ 3eka
3317
+ 5eq1
3318
+ 4qlq
3319
+ 6eis
3320
+ 4acf
3321
+ 5t35
3322
+ 4orx
3323
+ 2hmw
3324
+ 3fz1
3325
+ 3qx5
3326
+ 4zhm
3327
+ 3rxo
3328
+ 1atl
3329
+ 5t1k
3330
+ 3np7
3331
+ 3zt4
3332
+ 1bdl
3333
+ 4u6y
3334
+ 2jbk
3335
+ 5l13
3336
+ 5mob
3337
+ 2igy
3338
+ 5jlz
3339
+ 4flj
3340
+ 2zu4
3341
+ 2w70
3342
+ 5d6y
3343
+ 4kip
3344
+ 2uw0
3345
+ 4msg
3346
+ 6f8x
3347
+ 1r2b
3348
+ 4qvm
3349
+ 2ydo
3350
+ 1w4q
3351
+ 5okt
3352
+ 5op4
3353
+ 3al3
3354
+ 2rvn
3355
+ 3dp1
3356
+ 4luz
3357
+ 3o87
3358
+ 1w2h
3359
+ 1fiv
3360
+ 1uh1
3361
+ 6c8p
3362
+ 4g8m
3363
+ 4hai
3364
+ 2evc
3365
+ 2q2z
3366
+ 4yyt
3367
+ 4od0
3368
+ 5k9w
3369
+ 3fyj
3370
+ 5vh0
3371
+ 5ale
3372
+ 3th8
3373
+ 4yhp
3374
+ 4pvo
3375
+ 4whs
3376
+ 1c4v
3377
+ 5hed
3378
+ 5qbv
3379
+ 2oei
3380
+ 4y2s
3381
+ 1z6q
3382
+ 4px6
3383
+ 2knh
3384
+ 5kns
3385
+ 1m4h
3386
+ 2g96
3387
+ 3cm7
3388
+ 4gcj
3389
+ 1qft
3390
+ 3ztd
3391
+ 4yzu
3392
+ 1vwl
3393
+ 4kiq
3394
+ 6ax1
3395
+ 3zyh
3396
+ 2c1q
3397
+ 6mx3
3398
+ 5o9r
3399
+ 1q8t
3400
+ 4ue1
3401
+ 2y7z
3402
+ 3mdz
3403
+ 4gtr
3404
+ 6fh6
3405
+ 4qvq
3406
+ 3tti
3407
+ 5ap6
3408
+ 4pkt
3409
+ 3tdj
3410
+ 3qnd
3411
+ 4lvt
3412
+ 1xo2
3413
+ 1pcg
3414
+ 5fiv
3415
+ 1g3e
3416
+ 6gx3
3417
+ 5dhj
3418
+ 5ixt
3419
+ 1xhm
3420
+ 4po7
3421
+ 1a5g
3422
+ 1haa
3423
+ 6egs
3424
+ 3iw8
3425
+ 5qag
3426
+ 5o1b
3427
+ 5brz
3428
+ 3mpt
3429
+ 3ral
3430
+ 1j14
3431
+ 5wdj
3432
+ 4io3
3433
+ 2irz
3434
+ 3f35
3435
+ 5jan
3436
+ 6mrg
3437
+ 2oi3
3438
+ 1onp
3439
+ 4bjx
3440
+ 2i0v
3441
+ 4hmk
3442
+ 5mat
3443
+ 4h2m
3444
+ 4agl
3445
+ 4ayx
3446
+ 5iwg
3447
+ 2xez
3448
+ 5em7
3449
+ 5kdf
3450
+ 3vv6
3451
+ 4y59
3452
+ 3ti6
3453
+ 5gn6
3454
+ 4u7v
3455
+ 3rz7
3456
+ 5wxo
3457
+ 1o2w
3458
+ 4r0i
3459
+ 1a30
3460
+ 5nz2
3461
+ 3rf4
3462
+ 6ee6
3463
+ 4oiv
3464
+ 1a9u
3465
+ 3iop
3466
+ 5x4p
3467
+ 3gl6
3468
+ 1hqg
3469
+ 2oji
3470
+ 4ps3
3471
+ 2w05
3472
+ 1tnh
3473
+ 3djq
3474
+ 2c9b
3475
+ 2flb
3476
+ 5itd
3477
+ 5k00
3478
+ 1t37
3479
+ 3g0b
3480
+ 6hdo
3481
+ 2rfh
3482
+ 1b5g
3483
+ 4bdi
3484
+ 4ikn
3485
+ 3inf
3486
+ 1rww
3487
+ 4g68
3488
+ 4djw
3489
+ 5yum
3490
+ 4hxl
3491
+ 3dbd
3492
+ 4hw7
3493
+ 5mys
3494
+ 3p7i
3495
+ 2j7h
3496
+ 2vvt
3497
+ 3r2y
3498
+ 5ng9
3499
+ 1y2g
3500
+ 2vwm
3501
+ 3if7
3502
+ 2hl4
3503
+ 3ffp
3504
+ 4qwg
3505
+ 3g6z
3506
+ 5dhf
3507
+ 4y79
3508
+ 3m57
3509
+ 4oyi
3510
+ 3s0b
3511
+ 1yvx
3512
+ 3v7c
3513
+ 4md6
3514
+ 4g9c
3515
+ 2w6m
3516
+ 5tt7
3517
+ 3pkb
3518
+ 3upi
3519
+ 2va6
3520
+ 2weh
3521
+ 4bao
3522
+ 5gid
3523
+ 2w1g
3524
+ 3mxc
3525
+ 2fw6
3526
+ 3diw
3527
+ 1ind
3528
+ 3i3d
3529
+ 5k0i
3530
+ 4npv
3531
+ 2wf5
3532
+ 2q7q
3533
+ 4wyz
3534
+ 5xaf
3535
+ 1gj4
3536
+ 5hf1
3537
+ 2aw1
3538
+ 3dbu
3539
+ 3o9i
3540
+ 4l34
3541
+ 5j0d
3542
+ 5afl
3543
+ 2w3i
3544
+ 5ew3
3545
+ 5e0m
3546
+ 5jfu
3547
+ 4fm7
3548
+ 1o4h
3549
+ 4w9p
3550
+ 6eyb
3551
+ 4onf
3552
+ 4zhl
3553
+ 3mj2
3554
+ 3k22
3555
+ 2xxy
3556
+ 5m5q
3557
+ 4aci
3558
+ 5kab
3559
+ 4dpt
3560
+ 4rsk
3561
+ 1rsi
3562
+ 5c42
3563
+ 3tb6
3564
+ 6bmi
3565
+ 4myd
3566
+ 2i40
3567
+ 5nhj
3568
+ 5moc
3569
+ 1tom
3570
+ 3ds6
3571
+ 1lt6
3572
+ 3eft
3573
+ 2w8g
3574
+ 4d1y
3575
+ 4yrc
3576
+ 1tu6
3577
+ 5hmh
3578
+ 5nw2
3579
+ 1k9r
3580
+ 3mxf
3581
+ 2q1j
3582
+ 1u0h
3583
+ 4ye3
3584
+ 4k60
3585
+ 5zop
3586
+ 6htg
3587
+ 2fx9
3588
+ 4q83
3589
+ 4y8y
3590
+ 4gr0
3591
+ 3uqf
3592
+ 3vha
3593
+ 2yaz
3594
+ 1k1m
3595
+ 4i2w
3596
+ 5ovx
3597
+ 3db6
3598
+ 1v2p
3599
+ 5njx
3600
+ 1wtg
3601
+ 3uvn
3602
+ 3h89
3603
+ 2q7y
3604
+ 4o2f
3605
+ 3fv1
3606
+ 4eu3
3607
+ 3pwm
3608
+ 4tmp
3609
+ 5ulp
3610
+ 6di1
3611
+ 4iks
3612
+ 4dhs
3613
+ 5hg8
3614
+ 6b1x
3615
+ 5nw8
3616
+ 3tjd
3617
+ 5yve
3618
+ 4uwg
3619
+ 1jd5
3620
+ 5km5
3621
+ 2w4x
3622
+ 3umx
3623
+ 2ony
3624
+ 1bm2
3625
+ 4pku
3626
+ 2pj9
3627
+ 2m41
3628
+ 4z8a
3629
+ 2yjq
3630
+ 6mwe
3631
+ 1prl
3632
+ 5y1y
3633
+ 1c5y
3634
+ 5ak6
3635
+ 1ggn
3636
+ 2usn
3637
+ 2pv2
3638
+ 1w25
3639
+ 1c4u
3640
+ 4ba3
3641
+ 6c2x
3642
+ 4ijl
3643
+ 3gxy
3644
+ 3f3u
3645
+ 4io2
3646
+ 4drm
3647
+ 3cjo
3648
+ 4dst
3649
+ 5c87
3650
+ 5ni8
3651
+ 2ll7
3652
+ 2pem
3653
+ 2x7d
3654
+ 3zc6
3655
+ 1m2q
3656
+ 4gk2
3657
+ 3lqi
3658
+ 3c1x
3659
+ 2az9
3660
+ 1fv0
3661
+ 1gz3
3662
+ 1gfy
3663
+ 5mte
3664
+ 2nxm
3665
+ 4yml
3666
+ 4yog
3667
+ 2g6q
3668
+ 1t79
3669
+ 3hdk
3670
+ 3uij
3671
+ 5vl2
3672
+ 4ufk
3673
+ 4aph
3674
+ 6d1u
3675
+ 4rn2
3676
+ 1zxv
3677
+ 2p4i
3678
+ 1w80
3679
+ 3bl7
3680
+ 2vh0
3681
+ 3kqa
3682
+ 5vdu
3683
+ 4b2d
3684
+ 5jgi
3685
+ 4u0i
3686
+ 3g9n
3687
+ 5var
3688
+ 4lnf
3689
+ 3e9h
3690
+ 2fzz
3691
+ 3cfv
3692
+ 1ws4
3693
+ 5h21
3694
+ 2p7z
3695
+ 4j8s
3696
+ 5fdr
3697
+ 5z7b
3698
+ 6cj1
3699
+ 5tts
3700
+ 6bmx
3701
+ 1g49
3702
+ 1mqd
3703
+ 5ost
3704
+ 2qhd
3705
+ 6e4a
3706
+ 3fur
3707
+ 3v1r
3708
+ 4wwn
3709
+ 4hni
3710
+ 6e83
3711
+ 1gqs
3712
+ 3eko
3713
+ 1dmp
3714
+ 2wnj
3715
+ 4i6q
3716
+ 4d8s
3717
+ 2jb5
3718
+ 4p58
3719
+ 4usj
3720
+ 6eq6
3721
+ 3o0j
3722
+ 5apj
3723
+ 4ty1
3724
+ 5avf
3725
+ 2xef
3726
+ 1way
3727
+ 1a7c
3728
+ 5f2r
3729
+ 5k1f
3730
+ 2c9d
3731
+ 3a9i
3732
+ 3deh
3733
+ 3q3k
3734
+ 4xtw
3735
+ 6f0y
3736
+ 2r3l
3737
+ 4mra
3738
+ 5qa4
3739
+ 4zpg
3740
+ 4ijp
3741
+ 3wyx
3742
+ 1s5z
3743
+ 1y2h
3744
+ 3qi1
3745
+ 1c83
3746
+ 5dts
3747
+ 5e1d
3748
+ 5j9k
3749
+ 5o4f
3750
+ 2wxi
3751
+ 5f04
3752
+ 2qmj
3753
+ 3kvx
3754
+ 4l7j
3755
+ 1d5r
3756
+ 2vyt
3757
+ 4cfv
3758
+ 1qcp
3759
+ 4hlm
3760
+ 5d48
3761
+ 3snl
3762
+ 5uwn
3763
+ 4yll
3764
+ 2brm
3765
+ 4dzw
3766
+ 4q7v
3767
+ 5teh
3768
+ 1c4y
3769
+ 5mro
3770
+ 4ymg
3771
+ 3b1u
3772
+ 2z9g
3773
+ 2rt5
3774
+ 3gxz
3775
+ 6e0q
3776
+ 6f7b
3777
+ 4qz5
3778
+ 5dh5
3779
+ 3v3q
3780
+ 4do5
3781
+ 3k0h
3782
+ 5dt2
3783
+ 3l54
3784
+ 3ebh
3785
+ 2hd6
3786
+ 5hbe
3787
+ 4wri
3788
+ 2qi7
3789
+ 1jao
3790
+ 5ajp
3791
+ 6ej3
3792
+ 3gjq
3793
+ 6f8w
3794
+ 5y20
3795
+ 1rwx
3796
+ 4ibj
3797
+ 5op2
3798
+ 3c6u
3799
+ 2w0s
3800
+ 1c7e
3801
+ 5yyb
3802
+ 3ni5
3803
+ 3t8w
3804
+ 5ntp
3805
+ 4dfu
3806
+ 1o4e
3807
+ 5tqu
3808
+ 2xyd
3809
+ 4pl0
3810
+ 5tl9
3811
+ 2vpo
3812
+ 4ov5
3813
+ 4qps
3814
+ 5wdc
3815
+ 4awp
3816
+ 4a9s
3817
+ 3atk
3818
+ 4nja
3819
+ 4ms0
3820
+ 3fzr
3821
+ 1i9m
3822
+ 4b5s
3823
+ 2y71
3824
+ 3f33
3825
+ 5q0h
3826
+ 5x9p
3827
+ 4p4j
3828
+ 5ajo
3829
+ 3c56
3830
+ 4pnl
3831
+ 4fmu
3832
+ 3uli
3833
+ 5nlk
3834
+ 4cs9
3835
+ 6hkz
3836
+ 2vo5
3837
+ 2fts
3838
+ 3sfc
3839
+ 3pke
3840
+ 4dk5
3841
+ 1lor
3842
+ 2jdn
3843
+ 3jya
3844
+ 6bqg
3845
+ 3p9t
3846
+ 3qkk
3847
+ 6fnj
3848
+ 6fmc
3849
+ 5c0k
3850
+ 1mmq
3851
+ 4fck
3852
+ 5npe
3853
+ 4j59
3854
+ 4eol
3855
+ 5m57
3856
+ 4efk
3857
+ 3bt9
3858
+ 1oiq
3859
+ 5dwr
3860
+ 3wyk
3861
+ 4qf9
3862
+ 4c5w
3863
+ 5tuq
3864
+ 3oqk
3865
+ 4xj0
3866
+ 5mwg
3867
+ 2vwu
3868
+ 6dpy
3869
+ 4ara
3870
+ 3wi6
3871
+ 5owf
3872
+ 3nw5
3873
+ 4fhh
3874
+ 3sna
3875
+ 1uye
3876
+ 2vcg
3877
+ 5j7j
3878
+ 4gtt
3879
+ 5jc1
3880
+ 2ph6
3881
+ 4zpe
3882
+ 2hz0
3883
+ 5l4i
3884
+ 1y1m
3885
+ 5vo1
3886
+ 4y2v
3887
+ 1ecq
3888
+ 6eeo
3889
+ 5jrs
3890
+ 4l23
3891
+ 6f34
3892
+ 4r4q
3893
+ 4quo
3894
+ 6dq6
3895
+ 4e3k
3896
+ 3zov
3897
+ 1cet
3898
+ 5e2k
3899
+ 5jyy
3900
+ 2jld
3901
+ 5jna
3902
+ 4qls
3903
+ 4ksq
3904
+ 4py4
3905
+ 6biy
3906
+ 2hqu
3907
+ 4yk5
3908
+ 4i12
3909
+ 4km0
3910
+ 5ttu
3911
+ 2za3
3912
+ 4o5g
3913
+ 2y1g
3914
+ 4cff
3915
+ 4ztr
3916
+ 4jps
3917
+ 4uin
3918
+ 2q93
3919
+ 2x7t
3920
+ 2q6c
3921
+ 6ayr
3922
+ 1pwy
3923
+ 5lwe
3924
+ 5cxi
3925
+ 4n9b
3926
+ 1fzo
3927
+ 5ukm
3928
+ 1ork
3929
+ 4mk0
3930
+ 1jbd
3931
+ 1k2i
3932
+ 4ifh
3933
+ 4yh4
3934
+ 3fee
3935
+ 1flm
3936
+ 5tqe
3937
+ 1jj9
3938
+ 4li5
3939
+ 4jc1
3940
+ 5i8p
3941
+ 1lah
3942
+ 1apw
3943
+ 3ppm
3944
+ 4kn7
3945
+ 1m5w
3946
+ 3vbx
3947
+ 4ie2
3948
+ 3sur
3949
+ 1nz7
3950
+ 4trw
3951
+ 4qmu
3952
+ 4qnu
3953
+ 4os6
3954
+ 2bge
3955
+ 5xvg
3956
+ 4o6w
3957
+ 1nfu
3958
+ 2zjf
3959
+ 1fpi
3960
+ 5id1
3961
+ 3ccz
3962
+ 2d3u
3963
+ 5tyj
3964
+ 1fls
3965
+ 1ggd
3966
+ 5ofx
3967
+ 4o12
3968
+ 4wf4
3969
+ 3pck
3970
+ 4ayv
3971
+ 5jt9
3972
+ 5nb7
3973
+ 4b84
3974
+ 5ggk
3975
+ 6h5x
3976
+ 1r5w
3977
+ 4zw6
3978
+ 5d6j
3979
+ 5ep7
3980
+ 4duh
3981
+ 4ps0
3982
+ 4drk
3983
+ 1los
3984
+ 3s6t
3985
+ 3be9
3986
+ 4ipi
3987
+ 3wf7
3988
+ 4pow
3989
+ 4y46
3990
+ 5izj
3991
+ 1d4j
3992
+ 4qg7
3993
+ 2avm
3994
+ 1g85
3995
+ 1r1i
3996
+ 3g1v
3997
+ 3l7c
3998
+ 5wyr
3999
+ 4oti
4000
+ 4a4o
4001
+ 1fvv
4002
+ 3uok
4003
+ 5n3v
4004
+ 4tim
4005
+ 3tv8
4006
+ 4a4g
4007
+ 6c0s
4008
+ 4fak
4009
+ 1w12
4010
+ 4jn2
4011
+ 4oyp
4012
+ 4zt7
4013
+ 5emj
4014
+ 2rc8
4015
+ 2ypo
4016
+ 3kmc
4017
+ 4hwp
4018
+ 3q2j
4019
+ 4k5l
4020
+ 3hwn
4021
+ 5i83
4022
+ 1j4r
4023
+ 4fyh
4024
+ 1kfy
4025
+ 5g43
4026
+ 4bnv
4027
+ 5fpu
4028
+ 1o48
4029
+ 6ei4
4030
+ 6ekd
4031
+ 2k3w
4032
+ 3ksq
4033
+ 5wkl
4034
+ 4hzz
4035
+ 2f8g
4036
+ 2uw3
4037
+ 6mi6
4038
+ 4u91
4039
+ 3f7u
4040
+ 3zk6
4041
+ 5n2f
4042
+ 2qx0
4043
+ 3mrt
4044
+ 2f0z
4045
+ 3qvv
4046
+ 1l7x
4047
+ 2oc1
4048
+ 3gbe
4049
+ 5he3
4050
+ 1pun
4051
+ 5m4i
4052
+ 3pp7
4053
+ 5o91
4054
+ 4mjo
4055
+ 4fns
4056
+ 4xnv
4057
+ 3tay
4058
+ 3ejs
4059
+ 4c1w
4060
+ 3njq
4061
+ 6guc
4062
+ 4dv8
4063
+ 4el9
4064
+ 1iep
4065
+ 1ta2
4066
+ 3h0e
4067
+ 1bsk
4068
+ 4pri
4069
+ 3pcb
4070
+ 4jg8
4071
+ 3lbj
4072
+ 1i8h
4073
+ 4a4x
4074
+ 1lhd
4075
+ 2wa3
4076
+ 5tcy
4077
+ 3wth
4078
+ 4baq
4079
+ 1ylv
4080
+ 5bve
4081
+ 4g34
4082
+ 5u28
4083
+ 1qbv
4084
+ 5z5f
4085
+ 4ot6
4086
+ 2g97
4087
+ 5n16
4088
+ 1m5e
4089
+ 3efk
4090
+ 4h7q
4091
+ 2v5a
4092
+ 5vs6
4093
+ 2cne
4094
+ 1pxn
4095
+ 1jqy
4096
+ 5j6s
4097
+ 4abv
4098
+ 6d6t
4099
+ 1t4s
4100
+ 3zsx
4101
+ 1p01
4102
+ 2f4j
4103
+ 2gh7
4104
+ 3f78
4105
+ 5i38
4106
+ 3h0w
4107
+ 5d3x
4108
+ 3zst
4109
+ 2lbm
4110
+ 6fil
4111
+ 2vk2
4112
+ 6ew7
4113
+ 2iu0
4114
+ 3t0l
4115
+ 5ai9
4116
+ 5wbm
4117
+ 4ark
4118
+ 3hlo
4119
+ 1np0
4120
+ 2w6c
4121
+ 6dk1
4122
+ 5xih
4123
+ 5hk2
4124
+ 1qhc
4125
+ 3v5l
4126
+ 1vr1
4127
+ 4no8
4128
+ 5hmy
4129
+ 2jql
4130
+ 1a46
4131
+ 3kf7
4132
+ 4lv4
4133
+ 5hbh
4134
+ 3wf9
4135
+ 4ieh
4136
+ 2vr3
4137
+ 5dyw
4138
+ 4xuz
4139
+ 1mmr
4140
+ 5ar0
4141
+ 4h4b
4142
+ 5knj
4143
+ 5elv
4144
+ 2g79
4145
+ 4w52
4146
+ 2rnx
4147
+ 4rgd
4148
+ 2az8
4149
+ 3c1k
4150
+ 5ick
4151
+ 4lv3
4152
+ 4mwb
4153
+ 5a83
4154
+ 1prm
4155
+ 4gg5
4156
+ 1rlq
4157
+ 1rq2
4158
+ 5fbn
4159
+ 4e49
4160
+ 1f47
4161
+ 4yfi
4162
+ 2v10
4163
+ 3pa3
4164
+ 4j1f
4165
+ 4jvi
4166
+ 4gmy
4167
+ 2v3d
4168
+ 4cft
4169
+ 1epp
4170
+ 2hoc
4171
+ 4r5g
4172
+ 4k0y
4173
+ 5zz2
4174
+ 1d3p
4175
+ 4ng9
4176
+ 6fav
4177
+ 5hor
4178
+ 3qci
4179
+ 4mbp
4180
+ 4msu
4181
+ 5hg1
4182
+ 1pyn
4183
+ 1hmt
4184
+ 2lpr
4185
+ 4yha
4186
+ 5hoa
4187
+ 1sln
4188
+ 4h3c
4189
+ 4k64
4190
+ 2w0d
4191
+ 4lmu
4192
+ 3tf6
4193
+ 4z2p
4194
+ 2hny
4195
+ 5ctc
4196
+ 6cck
4197
+ 5vd1
4198
+ 3qcf
4199
+ 5ia0
4200
+ 3l7g
4201
+ 1eld
4202
+ 1xh9
4203
+ 1h3c
4204
+ 4urm
4205
+ 4j7i
4206
+ 4ly9
4207
+ 1nu1
4208
+ 5sz5
4209
+ 6cdl
4210
+ 1uvu
4211
+ 2wl4
4212
+ 4eo4
4213
+ 5lj2
4214
+ 2qrl
4215
+ 3kc1
4216
+ 5nqe
4217
+ 6gon
4218
+ 2xs8
4219
+ 5gmj
4220
+ 5eyd
4221
+ 6c42
4222
+ 4wks
4223
+ 3ka2
4224
+ 4p6e
4225
+ 4nnn
4226
+ 3zvw
4227
+ 4q6r
4228
+ 5v8v
4229
+ 2jdv
4230
+ 4lxm
4231
+ 5qcm
4232
+ 5e8f
4233
+ 5y80
4234
+ 3ud5
4235
+ 4n70
4236
+ 1jwt
4237
+ 4cgi
4238
+ 2uw8
4239
+ 4ajw
4240
+ 4ivb
4241
+ 4m6u
4242
+ 2xu1
4243
+ 6bxy
4244
+ 4ud7
4245
+ 3k5f
4246
+ 2arm
4247
+ 5jin
4248
+ 3fmq
4249
+ 2nt7
4250
+ 1p4r
4251
+ 3hxf
4252
+ 4zwx
4253
+ 4xx3
4254
+ 4yee
4255
+ 6aak
4256
+ 2xae
4257
+ 5t31
4258
+ 3bhb
4259
+ 5llh
4260
+ 5nge
4261
+ 4b9h
4262
+ 5oxl
4263
+ 5ipa
4264
+ 1q3w
4265
+ 6d56
4266
+ 6df1
4267
+ 3ole
4268
+ 2v16
4269
+ 3g2s
4270
+ 4led
4271
+ 2uz6
4272
+ 4pnu
4273
+ 3nrz
4274
+ 1f7b
4275
+ 1j4k
4276
+ 5ku9
4277
+ 1a4g
4278
+ 4uia
4279
+ 5kv8
4280
+ 2rke
4281
+ 4e8z
4282
+ 5ykp
4283
+ 4z1n
4284
+ 5sz1
4285
+ 4oak
4286
+ 3twu
4287
+ 2r3p
4288
+ 5cf6
4289
+ 1ym4
4290
+ 5l4j
4291
+ 4exs
4292
+ 4qpa
4293
+ 2k1q
4294
+ 4eny
4295
+ 4i0r
4296
+ 2xz5
4297
+ 5fxq
4298
+ 3re4
4299
+ 6cf5
4300
+ 2cbr
4301
+ 5ece
4302
+ 5t2t
4303
+ 1x8r
4304
+ 4n3w
4305
+ 1klu
4306
+ 5nxx
4307
+ 5jcb
4308
+ 5klt
4309
+ 4usw
4310
+ 5ek9
4311
+ 2mov
4312
+ 6bid
4313
+ 1mjj
4314
+ 1tlo
4315
+ 2hb9
4316
+ 2f94
4317
+ 4luv
4318
+ 5gu4
4319
+ 3wf8
4320
+ 5ufo
4321
+ 4n5t
4322
+ 5hcx
4323
+ 5mtw
4324
+ 1o38
4325
+ 5z1c
4326
+ 4w4z
4327
+ 4v27
4328
+ 1k21
4329
+ 3jrx
4330
+ 5l96
4331
+ 1esz
4332
+ 2m3m
4333
+ 5ivf
4334
+ 1o3p
4335
+ 6std
4336
+ 4auj
4337
+ 2lyb
4338
+ 2w87
4339
+ 5j3l
4340
+ 2fgu
4341
+ 3snd
4342
+ 2w5g
4343
+ 2wsx
4344
+ 4qfl
4345
+ 2vuk
4346
+ 4a23
4347
+ 5iu6
4348
+ 1siv
4349
+ 4ht2
4350
+ 2ltz
4351
+ 1o39
4352
+ 4p0w
4353
+ 3p8p
4354
+ 5ypy
4355
+ 3zvt
4356
+ 2c5y
4357
+ 1ayv
4358
+ 3erk
4359
+ 6g4y
4360
+ 4ucd
4361
+ 3e16
4362
+ 4i7p
4363
+ 5alv
4364
+ 1osg
4365
+ 6gw1
4366
+ 5wxg
4367
+ 4bvb
4368
+ 1nc1
4369
+ 5ugg
4370
+ 5t1w
4371
+ 3tvw
4372
+ 5hkb
4373
+ 2r3n
4374
+ 2iog
4375
+ 1h1r
4376
+ 5eyc
4377
+ 1ogx
4378
+ 3er3
4379
+ 5q0p
4380
+ 3rlq
4381
+ 5itp
4382
+ 4nuc
4383
+ 4w9j
4384
+ 2b8l
4385
+ 5khj
4386
+ 4mz4
4387
+ 3zlv
4388
+ 3okh
4389
+ 2epn
4390
+ 1pwq
4391
+ 3gsm
4392
+ 3hzv
4393
+ 6dpx
4394
+ 4hvg
4395
+ 1s17
4396
+ 6arm
4397
+ 5jq7
4398
+ 2v88
4399
+ 5lay
4400
+ 5nn4
4401
+ 4hj2
4402
+ 3pp0
4403
+ 1rst
4404
+ 3qcq
4405
+ 3rx8
4406
+ 5ewz
4407
+ 5mgx
4408
+ 4c1t
4409
+ 3cso
4410
+ 4wq2
4411
+ 4gxl
4412
+ 5isz
4413
+ 2xj2
4414
+ 4y5h
4415
+ 3zcw
4416
+ 1l8g
4417
+ 5vex
4418
+ 4tky
4419
+ 4elh
4420
+ 4ejn
4421
+ 4pgh
4422
+ 2wuf
4423
+ 2xbp
4424
+ 6gw4
4425
+ 3d7g
4426
+ 5ksu
4427
+ 2qrp
4428
+ 4jg6
4429
+ 4eqf
4430
+ 3hbo
4431
+ 5yjy
4432
+ 2gv7
4433
+ 4hdf
4434
+ 1yi3
4435
+ 5edr
4436
+ 5nmf
4437
+ 4w7p
4438
+ 4fz6
4439
+ 1g4j
4440
+ 4a2a
4441
+ 5kx8
4442
+ 5a0b
4443
+ 4qta
4444
+ 4mho
4445
+ 4fn5
4446
+ 1gj8
4447
+ 3d94
4448
+ 1kdw
4449
+ 6euw
4450
+ 6gpb
4451
+ 6glb
4452
+ 2xaq
4453
+ 3vh9
4454
+ 1aku
4455
+ 5j4v
4456
+ 2f1g
4457
+ 2vfk
4458
+ 2r5a
4459
+ 1g1e
4460
+ 3buf
4461
+ 1bzj
4462
+ 4abg
4463
+ 3cho
4464
+ 5t6j
4465
+ 4jda
4466
+ 5ywy
4467
+ 6cdo
4468
+ 6fo9
4469
+ 1g74
4470
+ 3oxi
4471
+ 5t8p
4472
+ 3pcu
4473
+ 1fyr
4474
+ 1fm9
4475
+ 5ddc
4476
+ 5hwu
4477
+ 1xud
4478
+ 5ai1
4479
+ 4e9u
4480
+ 4uyn
4481
+ 5k05
4482
+ 6fgl
4483
+ 5iha
4484
+ 4aa2
4485
+ 1wzy
4486
+ 2w85
4487
+ 3s2v
4488
+ 4yuy
4489
+ 6dud
4490
+ 1xdg
4491
+ 3bgs
4492
+ 4hiq
4493
+ 3rqf
4494
+ 1jmg
4495
+ 2xeg
4496
+ 4g93
4497
+ 6c7b
4498
+ 3zso
4499
+ 4xjt
4500
+ 4rsc
4501
+ 5q0u
4502
+ 5iyv
4503
+ 1qxy
4504
+ 5n20
4505
+ 1you
4506
+ 5c7e
4507
+ 4x9v
4508
+ 5tjx
4509
+ 3rtm
4510
+ 4b05
4511
+ 3tg5
4512
+ 1b52
4513
+ 1w1v
4514
+ 5lg3
4515
+ 2bmg
4516
+ 5wii
4517
+ 5wew
4518
+ 5iua
4519
+ 2g00
4520
+ 1nm6
4521
+ 4wef
4522
+ 1i7m
4523
+ 4hrc
4524
+ 6at0
4525
+ 1biw
4526
+ 4uhg
4527
+ 5hvp
4528
+ 1ugy
4529
+ 3ery
4530
+ 5ll4
4531
+ 5v6y
4532
+ 1ru1
4533
+ 2o4l
4534
+ 2evo
4535
+ 5lvn
4536
+ 2rc9
4537
+ 2fci
4538
+ 6evq
4539
+ 3rah
4540
+ 3g2v
4541
+ 5ny1
4542
+ 1p10
4543
+ 3o96
4544
+ 5vsd
4545
+ 4cxx
4546
+ 5kcb
4547
+ 4g2l
4548
+ 5vqi
4549
+ 5svl
4550
+ 5qax
4551
+ 5h6v
4552
+ 4aa0
4553
+ 6g1w
4554
+ 3ddu
4555
+ 1pzi
4556
+ 3sax
4557
+ 4nmr
4558
+ 4c1f
4559
+ 5d26
4560
+ 1elr
4561
+ 3gc7
4562
+ 3c84
4563
+ 2y1d
4564
+ 3vd4
4565
+ 2uzo
4566
+ 3v8s
4567
+ 5mqv
4568
+ 4zly
4569
+ 4p0a
4570
+ 5dlv
4571
+ 3qqa
4572
+ 4g3g
4573
+ 3h9f
4574
+ 4wr8
4575
+ 5win
4576
+ 4hzm
4577
+ 3rqe
4578
+ 4j1c
4579
+ 4kii
4580
+ 5mw6
4581
+ 5mwj
4582
+ 1xap
4583
+ 4zv2
4584
+ 1ol1
4585
+ 1a69
4586
+ 4btt
4587
+ 5t4h
4588
+ 2oi2
4589
+ 6bbs
4590
+ 6aaj
4591
+ 2y4a
4592
+ 5i80
4593
+ 1hte
4594
+ 6cfd
4595
+ 1o4l
4596
+ 5kbe
4597
+ 3saz
4598
+ 3kqr
4599
+ 2b07
4600
+ 3iue
4601
+ 3bxf
4602
+ 1is0
4603
+ 5a4t
4604
+ 5ktu
4605
+ 4agd
4606
+ 3nf8
4607
+ 2nd0
4608
+ 5yyz
4609
+ 2ay4
4610
+ 6fhq
4611
+ 3f3t
4612
+ 1zhl
4613
+ 3vbv
4614
+ 1g9s
4615
+ 5aut
4616
+ 4u71
4617
+ 1q66
4618
+ 5vb9
4619
+ 3qtr
4620
+ 4nw2
4621
+ 3qin
4622
+ 5mzg
4623
+ 1d9i
4624
+ 5h08
4625
+ 3rv7
4626
+ 3rv8
4627
+ 6ckx
4628
+ 2itk
4629
+ 4ij1
4630
+ 1pxo
4631
+ 5nt0
4632
+ 5x62
4633
+ 4oc4
4634
+ 5qao
4635
+ 3le9
4636
+ 5dtr
4637
+ 4a6s
4638
+ 3bls
4639
+ 1em6
4640
+ 1pus
4641
+ 3asx
4642
+ 4lng
4643
+ 3mvj
4644
+ 4q3q
4645
+ 3s9t
4646
+ 4gfd
4647
+ 2r8q
4648
+ 5wqc
4649
+ 3ri1
4650
+ 5ih8
4651
+ 6g6w
4652
+ 3cjf
4653
+ 6duh
4654
+ 3shv
4655
+ 4pda
4656
+ 3cpb
4657
+ 1bp0
4658
+ 4lmn
4659
+ 1zm6
4660
+ 2kup
4661
+ 3ij0
4662
+ 6cz3
4663
+ 3v3b
4664
+ 3vru
4665
+ 3arg
4666
+ 2o64
4667
+ 6ayi
4668
+ 4qab
4669
+ 4xu2
4670
+ 5wzu
4671
+ 4lh2
4672
+ 1w6y
4673
+ 5eis
4674
+ 1czr
4675
+ 5jzy
4676
+ 4ejf
4677
+ 4ufe
4678
+ 5wqd
4679
+ 5f0f
4680
+ 4aw5
4681
+ 3iqg
4682
+ 4p90
4683
+ 2ank
4684
+ 4n1u
4685
+ 6eku
4686
+ 2h23
4687
+ 1j5i
4688
+ 2y7w
4689
+ 5alj
4690
+ 5jfr
4691
+ 5eak
4692
+ 2gfk
4693
+ 3be2
4694
+ 5hln
4695
+ 5uc1
4696
+ 3ax5
4697
+ 4f3k
4698
+ 5qb0
4699
+ 4llj
4700
+ 5ldk
4701
+ 4eyr
4702
+ 5v3x
4703
+ 4fys
4704
+ 4crl
4705
+ 2zxd
4706
+ 2etk
4707
+ 3ddp
4708
+ 4q0a
4709
+ 3el0
4710
+ 2ica
4711
+ 4yv8
4712
+ 4jin
4713
+ 5enc
4714
+ 1pb9
4715
+ 5jur
4716
+ 5n1z
4717
+ 5ou2
4718
+ 4y8d
4719
+ 4gby
4720
+ 5acw
4721
+ 4r76
4722
+ 5hrv
4723
+ 2jt6
4724
+ 4hhz
4725
+ 3ocb
4726
+ 4qxj
4727
+ 5er5
4728
+ 3hu2
4729
+ 1v2l
4730
+ 3dz5
4731
+ 4j8b
4732
+ 5vqs
4733
+ 5khd
4734
+ 5fho
4735
+ 2f71
4736
+ 3kdt
4737
+ 5gmm
4738
+ 3sd5
4739
+ 5fck
4740
+ 3q8h
4741
+ 4h36
4742
+ 6bm6
4743
+ 4qmz
4744
+ 4ks5
4745
+ 4dew
4746
+ 1nf8
4747
+ 5ap0
4748
+ 3s73
4749
+ 6chp
4750
+ 4r3b
4751
+ 2oag
4752
+ 2w06
4753
+ 5j74
4754
+ 2a2g
4755
+ 4ju7
4756
+ 3i81
4757
+ 5aol
4758
+ 1af2
4759
+ 5hda
4760
+ 3nu3
4761
+ 6das
4762
+ 1swp
4763
+ 5njz
4764
+ 1b74
4765
+ 1sts
4766
+ 2co0
4767
+ 5od1
4768
+ 3pab
4769
+ 5u06
4770
+ 2gnh
4771
+ 3uig
4772
+ 2uw5
4773
+ 2vrx
4774
+ 1ele
4775
+ 3hp9
4776
+ 3fqa
4777
+ 4mh7
4778
+ 2xkc
4779
+ 2w3l
4780
+ 5w73
4781
+ 5hip
4782
+ 4xv2
4783
+ 1fch
4784
+ 2j4k
4785
+ 1o33
4786
+ 2v57
4787
+ 1kqb
4788
+ 1bhx
4789
+ 4a4w
4790
+ 5bmm
4791
+ 3dcw
4792
+ 4i9o
4793
+ 4ydn
4794
+ 5xii
4795
+ 5duf
4796
+ 5j1r
4797
+ 4no1
4798
+ 1gvu
4799
+ 1ycm
4800
+ 2l3r
4801
+ 4wnp
4802
+ 4yzn
4803
+ 4p75
4804
+ 1b1h
4805
+ 5hwv
4806
+ 1s4d
4807
+ 3rlb
4808
+ 1c5f
4809
+ 4fqo
4810
+ 1iau
4811
+ 5mim
4812
+ 5y93
4813
+ 5qa6
4814
+ 3wk5
4815
+ 4mcb
4816
+ 4qms
4817
+ 6b5m
4818
+ 6fba
4819
+ 2q8z
4820
+ 3r92
4821
+ 5mo0
4822
+ 3wvm
4823
+ 4qh8
4824
+ 6dgz
4825
+ 2w66
4826
+ 2joa
4827
+ 6gih
4828
+ 5fqt
4829
+ 2viw
4830
+ 4o76
4831
+ 2j83
4832
+ 3cm2
4833
+ 3ion
4834
+ 4y2u
4835
+ 3fhb
4836
+ 2rip
4837
+ 2q94
4838
+ 6eiz
4839
+ 3ieo
4840
+ 1uyg
4841
+ 1vsn
4842
+ 5fnf
4843
+ 2vgp
4844
+ 2pk5
4845
+ 2f6t
4846
+ 1zfq
4847
+ 5ypo
4848
+ 2nnv
4849
+ 4jvj
4850
+ 2cej
4851
+ 3hqw
4852
+ 2boj
4853
+ 2h21
4854
+ 3bzi
4855
+ 5ut6
4856
+ 2toh
4857
+ 5urm
4858
+ 5ml8
4859
+ 2qpu
4860
+ 6f5u
4861
+ 3t3y
4862
+ 2zx5
4863
+ 5alu
4864
+ 2vxa
4865
+ 1z4n
4866
+ 1c8k
4867
+ 4w53
4868
+ 5cf8
4869
+ 4hcz
4870
+ 1jik
4871
+ 4b81
4872
+ 5y86
4873
+ 5ul6
4874
+ 5l2n
4875
+ 3dhk
4876
+ 5vio
4877
+ 2piz
4878
+ 3jyj
4879
+ 5ly2
4880
+ 6do4
4881
+ 1v2s
4882
+ 4ezo
4883
+ 1iht
4884
+ 3hl8
4885
+ 4q1c
4886
+ 3sk2
4887
+ 2xjj
4888
+ 6cdj
4889
+ 4w4w
4890
+ 1bm7
4891
+ 5lca
4892
+ 3rxf
4893
+ 4q7w
4894
+ 5ceq
4895
+ 5aen
4896
+ 3kjf
4897
+ 2psj
4898
+ 4avt
4899
+ 5nxo
4900
+ 3qn7
4901
+ 1d6w
4902
+ 5ory
4903
+ 3ckr
4904
+ 6eyz
4905
+ 4lxa
4906
+ 2chz
4907
+ 6eqw
4908
+ 3nzc
4909
+ 5umw
4910
+ 1ilh
4911
+ 1o4j
4912
+ 2pks
4913
+ 6evp
4914
+ 2wks
4915
+ 2yc0
4916
+ 4xy8
4917
+ 2jg0
4918
+ 1u33
4919
+ 5iez
4920
+ 2zxb
4921
+ 2qnz
4922
+ 5wfz
4923
+ 3ob0
4924
+ 1g36
4925
+ 4at3
4926
+ 3pd3
4927
+ 5jjr
4928
+ 6eo8
4929
+ 4omc
4930
+ 1k3t
4931
+ 2vl8
4932
+ 4y8x
4933
+ 3l4t
4934
+ 3s2a
4935
+ 6f4x
4936
+ 2qrm
4937
+ 4u68
4938
+ 4o3f
4939
+ 4rpn
4940
+ 2zns
4941
+ 4zvi
4942
+ 5ul1
4943
+ 3fwv
4944
+ 1exw
4945
+ 2vvv
4946
+ 2aof
4947
+ 5vfd
4948
+ 2qcf
4949
+ 3bl0
4950
+ 3gk1
4951
+ 5i2i
4952
+ 5w8v
4953
+ 4ptc
4954
+ 3piy
4955
+ 1nlj
4956
+ 4ddy
4957
+ 2vwo
4958
+ 3kdu
4959
+ 6bt0
4960
+ 5l99
4961
+ 3rtn
4962
+ 2yk1
4963
+ 6f28
4964
+ 1zh7
4965
+ 5uiu
4966
+ 4eh7
4967
+ 5epp
4968
+ 1o6h
4969
+ 1b05
4970
+ 5t5g
4971
+ 6b1k
4972
+ 4k75
4973
+ 5hva
4974
+ 3ips
4975
+ 5obg
4976
+ 5a3o
4977
+ 2q89
4978
+ 5owc
4979
+ 5ggo
4980
+ 4xk9
4981
+ 3cf9
4982
+ 1czl
4983
+ 2ewy
4984
+ 3a1e
4985
+ 1xlz
4986
+ 5k0b
4987
+ 2nta
4988
+ 4gvc
4989
+ 1axr
4990
+ 5fou
4991
+ 1kkq
4992
+ 4u7o
4993
+ 5i4o
4994
+ 4cwr
4995
+ 5fa7
4996
+ 1i5d
4997
+ 5hdu
4998
+ 2zg3
4999
+ 2pmk
5000
+ 3mrx
5001
+ 3s1g
5002
+ 6mvu
5003
+ 1wkm
5004
+ 6dvl
5005
+ 5fe9
5006
+ 5odx
5007
+ 3sbh
5008
+ 2jiw
5009
+ 2r43
5010
+ 4ibb
5011
+ 4o05
5012
+ 2v13
5013
+ 4io8
5014
+ 4z2h
5015
+ 2vmc
5016
+ 5ggn
5017
+ 2afx
5018
+ 1g3c
5019
+ 2cn0
5020
+ 3oki
5021
+ 4xty
5022
+ 1xgi
5023
+ 3mmf
5024
+ 5v1d
5025
+ 2uxz
5026
+ 4wwo
5027
+ 4kpz
5028
+ 4xbb
5029
+ 5up0
5030
+ 2gzl
5031
+ 4r5x
5032
+ 3iet
5033
+ 1oq5
5034
+ 6afd
5035
+ 1b55
5036
+ 6chl
5037
+ 1xp0
5038
+ 2q5k
5039
+ 5ti7
5040
+ 5uqx
5041
+ 6bm5
5042
+ 5w94
5043
+ 1k1o
5044
+ 4lm3
5045
+ 1ek1
5046
+ 4ayr
5047
+ 2xxn
5048
+ 5lsc
5049
+ 1o0m
5050
+ 4ui7
5051
+ 1ppl
5052
+ 6beh
5053
+ 3ud7
5054
+ 4zxx
5055
+ 6gzh
5056
+ 1uu9
5057
+ 5jgq
5058
+ 2q95
5059
+ 5l2y
5060
+ 3v3l
5061
+ 2gmv
5062
+ 5bvo
5063
+ 3snb
5064
+ 5q13
5065
+ 5upf
5066
+ 5tq3
5067
+ 3gw5
5068
+ 4wwp
5069
+ 5wbo
5070
+ 2j7g
5071
+ 3rmf
5072
+ 1fhd
5073
+ 2w2u
5074
+ 4flk
5075
+ 2r0y
5076
+ 2wbb
5077
+ 3da6
5078
+ 3l4z
5079
+ 1v2w
5080
+ 2azb
5081
+ 2bz5
5082
+ 2oxn
5083
+ 3f8w
5084
+ 1w7g
5085
+ 3moh
5086
+ 5m3a
5087
+ 5aqq
5088
+ 3tz0
5089
+ 3zyu
5090
+ 2pzy
5091
+ 5usy
5092
+ 4mx0
5093
+ 4cpq
5094
+ 4aq6
5095
+ 5eld
5096
+ 5gmv
5097
+ 3qbn
5098
+ 6rsa
5099
+ 3u5j
5100
+ 5h1t
5101
+ 1ms0
5102
+ 1ctr
5103
+ 5lub
5104
+ 4h5d
5105
+ 4n99
5106
+ 4cc2
5107
+ 4hzw
5108
+ 5lbq
5109
+ 3ati
5110
+ 3pd2
5111
+ 1f74
5112
+ 1qyg
5113
+ 5dhg
5114
+ 4mwx
5115
+ 5ohy
5116
+ 4ob2
5117
+ 2b2v
5118
+ 3c0z
5119
+ 2gtv
5120
+ 4r75
5121
+ 5w2s
5122
+ 3wsy
5123
+ 1rs2
5124
+ 2r6w
5125
+ 4agn
5126
+ 4gk4
5127
+ 4qq5
5128
+ 6bfa
5129
+ 1n7t
5130
+ 1ocn
5131
+ 1gnj
5132
+ 4u79
5133
+ 3hmm
5134
+ 2vl1
5135
+ 1ec1
5136
+ 5f4u
5137
+ 2zsc
5138
+ 4u5j
5139
+ 4jfk
5140
+ 5kew
5141
+ 4btw
5142
+ 2cbu
5143
+ 1vkj
5144
+ 4jnc
5145
+ 1nli
5146
+ 6bw3
5147
+ 3fyk
5148
+ 3cl2
5149
+ 6f3i
5150
+ 1w3l
5151
+ 3gv9
5152
+ 4h4e
5153
+ 2ylc
5154
+ 2fgi
5155
+ 2ha5
5156
+ 5sz9
5157
+ 5gjg
5158
+ 2q1q
5159
+ 4aza
5160
+ 5yia
5161
+ 2rny
5162
+ 1o30
5163
+ 3m1k
5164
+ 6dj5
5165
+ 1pfu
5166
+ 4awf
5167
+ 5oqu
5168
+ 4k42
5169
+ 2wcg
5170
+ 3oj8
5171
+ 4ngs
5172
+ 4lq9
5173
+ 1jzs
5174
+ 6g9h
5175
+ 6g9u
5176
+ 3axm
5177
+ 4qp7
5178
+ 4mma
5179
+ 1g98
5180
+ 5wi0
5181
+ 5iep
5182
+ 2opy
5183
+ 6eq1
5184
+ 2pri
5185
+ 4y2j
5186
+ 4x1s
5187
+ 1bux
5188
+ 4cjn
5189
+ 2vnn
5190
+ 5d2a
5191
+ 6cms
5192
+ 3g72
5193
+ 4wz4
5194
+ 2las
5195
+ 4his
5196
+ 5ohi
5197
+ 1apv
5198
+ 2v86
5199
+ 5nap
5200
+ 3wdc
5201
+ 3d9m
5202
+ 4kzl
5203
+ 5ixf
5204
+ 1fd7
5205
+ 5f25
5206
+ 5js3
5207
+ 3d7h
5208
+ 4ceb
5209
+ 3dst
5210
+ 4yym
5211
+ 4x7n
5212
+ 4cr5
5213
+ 1gar
5214
+ 4z2b
5215
+ 3iph
5216
+ 5t37
5217
+ 3zqe
5218
+ 6m9t
5219
+ 3m54
5220
+ 3szm
5221
+ 2oxx
5222
+ 4mlx
5223
+ 3uxg
5224
+ 3qri
5225
+ 4l0v
5226
+ 1a07
5227
+ 5qay
5228
+ 4cwo
5229
+ 3r0y
5230
+ 1wbs
5231
+ 8gpb
5232
+ 6bgz
5233
+ 1ecv
5234
+ 1ha2
5235
+ 2kfx
5236
+ 2be2
5237
+ 6cjw
5238
+ 2p94
5239
+ 5tq7
5240
+ 5fsc
5241
+ 4btb
5242
+ 5yov
5243
+ 6eox
5244
+ 5am2
5245
+ 2pvm
5246
+ 3gn7
5247
+ 2ql7
5248
+ 5j58
5249
+ 1msm
5250
+ 2bjm
5251
+ 3rt8
5252
+ 3ds3
5253
+ 1m13
5254
+ 4nxo
5255
+ 2vj9
5256
+ 3fqe
5257
+ 2m0v
5258
+ 3so9
5259
+ 4r5b
5260
+ 1v2j
5261
+ 4bks
5262
+ 4tkj
5263
+ 1jld
5264
+ 2ojf
5265
+ 1z5m
5266
+ 3m1s
5267
+ 4lpf
5268
+ 4zls
5269
+ 4af3
5270
+ 5eue
5271
+ 2jh6
5272
+ 2kvm
5273
+ 5etl
5274
+ 5hn0
5275
+ 3f6g
5276
+ 3fdm
5277
+ 2jj3
5278
+ 2zjw
5279
+ 3sfh
5280
+ 1ong
5281
+ 1noi
5282
+ 3cen
5283
+ 2p4j
5284
+ 4rvr
5285
+ 4az5
5286
+ 2wqp
5287
+ 6fnt
5288
+ 3ejt
5289
+ 5c2h
5290
+ 3sow
5291
+ 3ml2
5292
+ 1ld7
5293
+ 1yz3
5294
+ 3m2u
5295
+ 2wcx
5296
+ 1ec2
5297
+ 3iqj
5298
+ 3ccn
5299
+ 3r1v
5300
+ 5foo
5301
+ 3cn0
5302
+ 2z4w
5303
+ 4ks1
5304
+ 3rtf
5305
+ 4bfr
5306
+ 2pvk
5307
+ 3ppr
5308
+ 3r93
5309
+ 4x9w
5310
+ 2z7i
5311
+ 4hdb
5312
+ 4bti
5313
+ 1eef
5314
+ 6bln
5315
+ 4wyy
5316
+ 4bw3
5317
+ 3mks
5318
+ 3iit
5319
+ 2axi
5320
+ 4urz
5321
+ 3q2a
5322
+ 2w12
5323
+ 3aho
5324
+ 6e06
5325
+ 5dy7
5326
+ 1gi1
5327
+ 3az8
5328
+ 4ew2
5329
+ 3tgs
5330
+ 3zc5
5331
+ 2ggx
5332
+ 4ih3
5333
+ 5u00
5334
+ 4qkx
5335
+ 4oef
5336
+ 6gjn
5337
+ 1ewj
5338
+ 2xii
5339
+ 5al2
5340
+ 1o4p
5341
+ 2xck
5342
+ 3dcs
5343
+ 2g0h
5344
+ 5adr
5345
+ 1kv1
5346
+ 6h33
5347
+ 2wl5
5348
+ 6h3k
5349
+ 1qq9
5350
+ 5vdr
5351
+ 5x4q
5352
+ 2xk3
5353
+ 3p4f
5354
+ 5prc
5355
+ 4gxs
5356
+ 1qm5
5357
+ 4aw8
5358
+ 2eep
5359
+ 5l15
5360
+ 3tk2
5361
+ 5heb
5362
+ 5aia
5363
+ 4nh9
5364
+ 5nki
5365
+ 3eos
5366
+ 4m8y
5367
+ 5u49
5368
+ 5yh8
5369
+ 5ylu
5370
+ 6f8r
5371
+ 3nxq
5372
+ 2x3t
5373
+ 5exl
5374
+ 4x0z
5375
+ 4rj6
5376
+ 5ly1
5377
+ 3zo4
5378
+ 2nw4
5379
+ 3upx
5380
+ 5btv
5381
+ 4n3r
5382
+ 5ek0
5383
+ 3otx
5384
+ 4r4c
5385
+ 1y3n
5386
+ 5cal
5387
+ 4dld
5388
+ 5z89
5389
+ 5sys
5390
+ 1ybo
5391
+ 2cmo
5392
+ 1k3q
5393
+ 2ima
5394
+ 3wv2
5395
+ 3caj
5396
+ 3vv7
5397
+ 1xug
5398
+ 4l2k
5399
+ 3aya
5400
+ 3gs6
5401
+ 5alg
5402
+ 4b4n
5403
+ 3zyr
5404
+ 4hxm
5405
+ 2ov4
5406
+ 3n9s
5407
+ 4n07
5408
+ 2jnp
5409
+ 3zxr
5410
+ 3v7x
5411
+ 1aid
5412
+ 2zis
5413
+ 4u8w
5414
+ 6e2o
5415
+ 5yqn
5416
+ 3oc0
5417
+ 4b1d
5418
+ 3g9e
5419
+ 4qw7
5420
+ 1c1r
5421
+ 1zsf
5422
+ 2nm1
5423
+ 3cs7
5424
+ 2x24
5425
+ 5d7c
5426
+ 1bji
5427
+ 4yl3
5428
+ 4k78
5429
+ 2f9u
5430
+ 4kab
5431
+ 3rl7
5432
+ 4n8r
5433
+ 5etu
5434
+ 5drs
5435
+ 6es0
5436
+ 2g8n
5437
+ 5ol3
5438
+ 2i6a
5439
+ 5gvk
5440
+ 2qrk
5441
+ 3sx4
5442
+ 4q4r
5443
+ 5nq5
5444
+ 3ctt
5445
+ 5kxc
5446
+ 1tyr
5447
+ 4j0z
5448
+ 5z4h
5449
+ 2uzn
5450
+ 4oon
5451
+ 2ohr
5452
+ 4re4
5453
+ 3prs
5454
+ 1f8c
5455
+ 4jvb
5456
+ 3eig
5457
+ 1dbk
5458
+ 4p74
5459
+ 4zg9
5460
+ 3wtn
5461
+ 3l7a
5462
+ 4cdr
5463
+ 4fli
5464
+ 2am9
5465
+ 2jkp
5466
+ 2bts
5467
+ 1u9q
5468
+ 5ea6
5469
+ 6db3
5470
+ 4b6o
5471
+ 3nnw
5472
+ 3p5k
5473
+ 2c4f
5474
+ 1rbp
5475
+ 2xuc
5476
+ 4hvd
5477
+ 3ijz
5478
+ 5n4t
5479
+ 4jr5
5480
+ 6e9w
5481
+ 5f63
5482
+ 4cjq
5483
+ 1b46
5484
+ 4j4n
5485
+ 4kcx
5486
+ 2a2x
5487
+ 4h3i
5488
+ 4k4f
5489
+ 6bky
5490
+ 2bak
5491
+ 5csd
5492
+ 4mgc
5493
+ 4mwv
5494
+ 1mzs
5495
+ 4ym2
5496
+ 4ztm
5497
+ 5epn
5498
+ 5v7w
5499
+ 5hes
5500
+ 3n49
5501
+ 2zlg
5502
+ 2kbr
5503
+ 5fng
5504
+ 2fgv
5505
+ 1f4e
5506
+ 2gd8
5507
+ 3g6h
5508
+ 5u0f
5509
+ 4j44
5510
+ 5v35
5511
+ 1c9d
5512
+ 6g6y
5513
+ 4azi
5514
+ 5kpk
5515
+ 4ui8
5516
+ 1no9
5517
+ 4p3h
5518
+ 3gcs
5519
+ 5ngu
5520
+ 6bl1
5521
+ 1q6j
5522
+ 2zda
5523
+ 3b27
5524
+ 1o2j
5525
+ 3u2k
5526
+ 6mil
5527
+ 1n7i
5528
+ 2wec
5529
+ 1h6h
5530
+ 3owl
5531
+ 3g34
5532
+ 1ec9
5533
+ 4crf
5534
+ 3hha
5535
+ 3qiy
5536
+ 4dh6
5537
+ 4pid
5538
+ 2y55
5539
+ 5hhx
5540
+ 5etf
5541
+ 4xar
5542
+ 2pvu
5543
+ 5ito
5544
+ 1li3
5545
+ 3dya
5546
+ 1xsc
5547
+ 3ti3
5548
+ 1tze
5549
+ 6fmf
5550
+ 1di9
5551
+ 2yke
5552
+ 5f4p
5553
+ 5sz7
5554
+ 1m2p
5555
+ 1uu8
5556
+ 4y64
5557
+ 3rt6
5558
+ 3vs3
5559
+ 3nfl
5560
+ 4bdg
5561
+ 5jv0
5562
+ 3wd1
5563
+ 4pge
5564
+ 5cxz
5565
+ 5ak4
5566
+ 4mdt
5567
+ 2j27
5568
+ 4lte
5569
+ 2ayr
5570
+ 5m6f
5571
+ 5dv4
5572
+ 2qbu
5573
+ 6ciy
5574
+ 3ekq
5575
+ 3c7q
5576
+ 2osc
5577
+ 4r6w
5578
+ 2ph9
5579
+ 3qkm
5580
+ 3hhu
5581
+ 4xqb
5582
+ 5jr6
5583
+ 2uwl
5584
+ 5ezz
5585
+ 1y3y
5586
+ 1i8i
5587
+ 3sxf
5588
+ 2i7c
5589
+ 4m1j
5590
+ 6rnt
5591
+ 3g5y
5592
+ 3hwx
5593
+ 2p83
5594
+ 1mik
5595
+ 1mrn
5596
+ 1sm3
5597
+ 3py1
5598
+ 5fah
5599
+ 4pnn
5600
+ 3tza
5601
+ 3zmp
5602
+ 2wxl
5603
+ 5qaz
5604
+ 4odf
5605
+ 1eix
5606
+ 5b2d
5607
+ 5kdr
5608
+ 3hcm
5609
+ 2x91
5610
+ 3frg
5611
+ 6gch
5612
+ 4qz2
5613
+ 1hvh
5614
+ 1g3m
5615
+ 1tnl
5616
+ 3s7f
5617
+ 6cpa
5618
+ 6g0w
5619
+ 6ffh
5620
+ 3kec
5621
+ 1nj5
5622
+ 6afr
5623
+ 5kup
5624
+ 6ftn
5625
+ 4o97
5626
+ 2i5f
5627
+ 5nzf
5628
+ 2f89
5629
+ 5fsn
5630
+ 3nex
5631
+ 6b2c
5632
+ 4zt5
5633
+ 3gt9
5634
+ 3sud
5635
+ 5btx
5636
+ 4fab
5637
+ 3fxb
5638
+ 2x4z
5639
+ 3l4x
5640
+ 5svx
5641
+ 5u98
5642
+ 1oim
5643
+ 3lpr
5644
+ 4pqa
5645
+ 4pnk
5646
+ 4ogj
5647
+ 5k0s
5648
+ 4oba
5649
+ 6h7m
5650
+ 3aas
5651
+ 4zae
5652
+ 5u5t
5653
+ 4n6h
5654
+ 4dsy
5655
+ 1ywh
5656
+ 4w9o
5657
+ 2qbp
5658
+ 5d45
5659
+ 5lhu
5660
+ 2bro
5661
+ 1uz4
5662
+ 1ugx
5663
+ 6avi
5664
+ 2aox
5665
+ 5nb6
5666
+ 4he9
5667
+ 2oz5
5668
+ 5n24
5669
+ 1h8l
5670
+ 3pwd
5671
+ 1it6
5672
+ 4rvk
5673
+ 6b27
5674
+ 2qu5
5675
+ 4x7j
5676
+ 5tyo
5677
+ 2ym3
5678
+ 5mjn
5679
+ 3h26
5680
+ 5ufs
5681
+ 2wj1
5682
+ 1rt1
5683
+ 1kz8
5684
+ 2zbk
5685
+ 4krs
5686
+ 5j87
5687
+ 3u3u
5688
+ 3g1d
5689
+ 5bms
5690
+ 1u9w
5691
+ 2z7g
5692
+ 4xv1
5693
+ 4ngm
5694
+ 4j8t
5695
+ 5d11
5696
+ 5c11
5697
+ 3f17
5698
+ 4alw
5699
+ 3q3b
5700
+ 3rwd
5701
+ 1c70
5702
+ 3iiw
5703
+ 1hww
5704
+ 5kv9
5705
+ 4wey
5706
+ 4ok3
5707
+ 2ctc
5708
+ 4bcb
5709
+ 6e8m
5710
+ 3shy
5711
+ 4mnw
5712
+ 5ut0
5713
+ 3lcd
5714
+ 5yc3
5715
+ 2vwz
5716
+ 2xbx
5717
+ 1o0o
5718
+ 2vj1
5719
+ 6b33
5720
+ 4hnp
5721
+ 1w70
5722
+ 4bh3
5723
+ 4gng
5724
+ 2q8m
5725
+ 4rrr
5726
+ 4rqv
5727
+ 1hih
5728
+ 3u1i
5729
+ 5ovr
5730
+ 4ach
5731
+ 6c0u
5732
+ 4qiz
5733
+ 4k18
5734
+ 1e72
5735
+ 4ocq
5736
+ 1b11
5737
+ 3gds
5738
+ 3mwu
5739
+ 5ivs
5740
+ 3bux
5741
+ 3qc4
5742
+ 5ovc
5743
+ 1pu7
5744
+ 1y3w
5745
+ 4v01
5746
+ 6e7j
5747
+ 1uu3
5748
+ 5dh3
5749
+ 3tu1
5750
+ 4q3s
5751
+ 4r5a
5752
+ 4z16
5753
+ 3uyr
5754
+ 5wir
5755
+ 4exg
5756
+ 5kby
5757
+ 4iur
5758
+ 4zv1
5759
+ 5xsu
5760
+ 4gtp
5761
+ 2o8h
5762
+ 3wqw
5763
+ 5th7
5764
+ 5tv3
5765
+ 1tfz
5766
+ 2v3e
5767
+ 5ntw
5768
+ 3cyy
5769
+ 1ny2
5770
+ 4q6f
5771
+ 2pjt
5772
+ 5w7x
5773
+ 5jap
5774
+ 3b68
5775
+ 5tyl
5776
+ 2a4w
5777
+ 2v00
5778
+ 4nks
5779
+ 1ets
5780
+ 2fq6
5781
+ 5hbs
5782
+ 4r59
5783
+ 2ama
5784
+ 2web
5785
+ 6e4t
5786
+ 4xm6
5787
+ 3bkl
5788
+ 6fty
5789
+ 5v0n
5790
+ 6ge7
5791
+ 2boh
5792
+ 1ikt
5793
+ 2r5d
5794
+ 5alh
5795
+ 5szc
5796
+ 1cr6
5797
+ 4zzy
5798
+ 2p1c
5799
+ 1phw
5800
+ 5w92
5801
+ 3l9n
5802
+ 3k8q
5803
+ 3eml
5804
+ 3nij
5805
+ 5amg
5806
+ 3bv2
5807
+ 2w73
5808
+ 1h35
5809
+ 3tne
5810
+ 3jyr
5811
+ 5l9g
5812
+ 2xsb
5813
+ 4f70
5814
+ 2iuz
5815
+ 1ml1
5816
+ 3ewj
5817
+ 4mk9
5818
+ 2lo6
5819
+ 2jiu
5820
+ 4es0
5821
+ 6dqa
5822
+ 3kab
5823
+ 5mxk
5824
+ 2i0y
5825
+ 4zy2
5826
+ 6cqt
5827
+ 2i47
5828
+ 2i0d
5829
+ 2i80
5830
+ 1nfw
5831
+ 5uzk
5832
+ 6hd6
5833
+ 1y6a
5834
+ 1vcu
5835
+ 1e2k
5836
+ 4wz6
5837
+ 1nc6
5838
+ 4zyv
5839
+ 1wcc
5840
+ 4og4
5841
+ 3il6
5842
+ 1kzk
5843
+ 4frk
5844
+ 1nms
5845
+ 5kax
5846
+ 1fwu
5847
+ 5djr
5848
+ 3a1c
5849
+ 1hdq
5850
+ 5ml6
5851
+ 5l4h
5852
+ 4rj3
5853
+ 2x4t
5854
+ 5nk9
5855
+ 2xj1
5856
+ 3mfw
5857
+ 3gy3
5858
+ 5nvv
5859
+ 4rh5
5860
+ 4jxv
5861
+ 4zs2
5862
+ 5ivv
5863
+ 4axa
5864
+ 5lli
5865
+ 3q5h
5866
+ 4wmu
5867
+ 2z50
5868
+ 4lkf
5869
+ 1rjk
5870
+ 6g93
5871
+ 4nni
5872
+ 6exs
5873
+ 1nki
5874
+ 3i7g
5875
+ 6ei5
5876
+ 6erv
5877
+ 1z3c
5878
+ 3r5n
5879
+ 1lol
5880
+ 3qk5
5881
+ 1rhm
5882
+ 4jfe
5883
+ 1xm4
5884
+ 4gny
5885
+ 3ti1
5886
+ 4hhy
5887
+ 5b5b
5888
+ 4fjz
5889
+ 3w2o
5890
+ 1oxq
5891
+ 3wk8
5892
+ 6bl2
5893
+ 3oe8
5894
+ 3f7z
5895
+ 4j3i
5896
+ 5ogl
5897
+ 4my6
5898
+ 6fob
5899
+ 4o1l
5900
+ 2uy0
5901
+ 5dqc
5902
+ 1g46
5903
+ 5orl
5904
+ 1uwu
5905
+ 185l
5906
+ 4z07
5907
+ 2w8w
5908
+ 3l3l
5909
+ 3vvy
5910
+ 3jwq
5911
+ 1o2z
5912
+ 5lsy
5913
+ 3iok
5914
+ 3g6m
5915
+ 1sdu
5916
+ 4xtp
5917
+ 4qt2
5918
+ 5fdd
5919
+ 5ia2
5920
+ 1ux7
5921
+ 5jhk
5922
+ 4ca4
5923
+ 3qfz
5924
+ 1qvt
5925
+ 3t9t
5926
+ 4j0y
5927
+ 4x49
5928
+ 3lpf
5929
+ 2bpv
5930
+ 3jzr
5931
+ 1fcz
5932
+ 3qps
5933
+ 4p7m
5934
+ 4hvb
5935
+ 3cp9
5936
+ 5jm4
5937
+ 2q9y
5938
+ 3eid
5939
+ 4o4y
5940
+ 3cid
5941
+ 3nc4
5942
+ 6b1f
5943
+ 3rth
5944
+ 3rxe
5945
+ 1sl3
5946
+ 4ap0
5947
+ 4mu7
5948
+ 1pvn
5949
+ 4jfm
5950
+ 1rej
5951
+ 3ens
5952
+ 3e9b
5953
+ 4hby
5954
+ 6guf
5955
+ 3iiy
5956
+ 2z4y
5957
+ 4to8
5958
+ 5t1i
5959
+ 5upj
5960
+ 3c89
5961
+ 4dju
5962
+ 3pcg
5963
+ 6gu2
5964
+ 2euk
5965
+ 3vbw
5966
+ 3px8
5967
+ 3g4k
5968
+ 6dh0
5969
+ 1ukh
5970
+ 4rxh
5971
+ 2ien
5972
+ 2upj
5973
+ 5m44
5974
+ 3gzn
5975
+ 3eio
5976
+ 2hk5
5977
+ 3emh
5978
+ 1o3f
5979
+ 5ipj
5980
+ 6dai
5981
+ 3su5
5982
+ 3sh1
5983
+ 4iic
5984
+ 5e0h
5985
+ 4fs4
5986
+ 4m3q
5987
+ 4wf2
5988
+ 2osf
5989
+ 2pj3
5990
+ 2r0z
5991
+ 1jrs
5992
+ 1i9q
5993
+ 2zgx
5994
+ 2jfh
5995
+ 2ow9
5996
+ 2aa9
5997
+ 5vt1
5998
+ 5l0c
5999
+ 4ybk
6000
+ 3zmt
6001
+ 5kr1
6002
+ 2r7b
6003
+ 2lh8
6004
+ 2wyn
6005
+ 3ckz
6006
+ 6lpr
6007
+ 1usn
6008
+ 3v2n
6009
+ 5oq8
6010
+ 6bhd
6011
+ 2pym
6012
+ 4xaq
6013
+ 3puj
6014
+ 4ek9
6015
+ 1eb1
6016
+ 5o1a
6017
+ 5k5c
6018
+ 2rnw
6019
+ 5qa8
6020
+ 2zg1
6021
+ 4gtm
6022
+ 3uec
6023
+ 4ncm
6024
+ 4alg
6025
+ 5hvs
6026
+ 1lxh
6027
+ 1y6r
6028
+ 5aux
6029
+ 2qwb
6030
+ 2h2d
6031
+ 4qir
6032
+ 1s89
6033
+ 1qwf
6034
+ 2avv
6035
+ 5knt
6036
+ 4odn
6037
+ 3o1e
6038
+ 3lj7
6039
+ 3eov
6040
+ 6cpw
6041
+ 1qca
6042
+ 1jii
6043
+ 3lkh
6044
+ 3rul
6045
+ 4g4p
6046
+ 1qk4
6047
+ 5nkh
6048
+ 1w0z
6049
+ 1jlq
6050
+ 4ngt
6051
+ 4meo
6052
+ 4pmt
6053
+ 2v2h
6054
+ 4lm2
6055
+ 3bu6
6056
+ 1bzs
6057
+ 2vvc
6058
+ 1v0n
6059
+ 5a7y
6060
+ 3d67
6061
+ 4u0c
6062
+ 4uxj
6063
+ 4b78
6064
+ 4bo2
6065
+ 2jsd
6066
+ 4aa1
6067
+ 3ga5
6068
+ 5hka
6069
+ 3vtd
6070
+ 2gm1
6071
+ 4x6x
6072
+ 6ma2
6073
+ 2ces
6074
+ 4ayq
6075
+ 1tpw
6076
+ 2uxi
6077
+ 3gc4
6078
+ 5ye7
6079
+ 2cbv
6080
+ 1v0p
6081
+ 2uzb
6082
+ 5xvq
6083
+ 3r7b
6084
+ 3umo
6085
+ 3b8q
6086
+ 2y6c
6087
+ 3bc5
6088
+ 2bes
6089
+ 5wa7
6090
+ 4f6x
6091
+ 3uvk
6092
+ 1hgt
6093
+ 6m8y
6094
+ 5twg
6095
+ 4avu
6096
+ 5vwi
6097
+ 1ms6
6098
+ 5xo7
6099
+ 3dcq
6100
+ 5km9
6101
+ 2ygf
6102
+ 1zsr
6103
+ 1f40
6104
+ 5m2q
6105
+ 1bl7
6106
+ 4k7n
6107
+ 4hvs
6108
+ 4z2i
6109
+ 5voj
6110
+ 3ck8
6111
+ 5d6q
6112
+ 3g0e
6113
+ 5y0f
6114
+ 3dpk
6115
+ 1bjv
6116
+ 6bw5
6117
+ 3dow
6118
+ 2zif
6119
+ 1mzc
6120
+ 2nng
6121
+ 2xnm
6122
+ 3f8e
6123
+ 2ndg
6124
+ 4lh5
6125
+ 3e8r
6126
+ 2e9o
6127
+ 3uih
6128
+ 4hlc
6129
+ 5moo
6130
+ 4epy
6131
+ 2i3h
6132
+ 6fby
6133
+ 6ew6
6134
+ 3ama
6135
+ 4l0t
6136
+ 1vj9
6137
+ 3pup
6138
+ 2xct
6139
+ 5fh8
6140
+ 5ivz
6141
+ 3pxq
6142
+ 1str
6143
+ 2xxx
6144
+ 2pqj
6145
+ 5mra
6146
+ 4o28
6147
+ 1d6n
6148
+ 1pr5
6149
+ 1rpf
6150
+ 4tt2
6151
+ 3bhx
6152
+ 2cma
6153
+ 3dct
6154
+ 4qt0
6155
+ 6f1j
6156
+ 3gvu
6157
+ 5yy9
6158
+ 2x81
6159
+ 2cv3
6160
+ 6fa4
6161
+ 5jo0
6162
+ 3l0e
6163
+ 3p3j
6164
+ 1eed
6165
+ 5aib
6166
+ 3qbc
6167
+ 1n7j
6168
+ 5fpi
6169
+ 6bnl
6170
+ 5v8q
6171
+ 6gxw
6172
+ 1rw8
6173
+ 5zw6
6174
+ 4e3d
6175
+ 2mji
6176
+ 6ayh
6177
+ 3lcv
6178
+ 1w2k
6179
+ 3zns
6180
+ 5mty
6181
+ 6c3u
6182
+ 4ks4
6183
+ 3ds9
6184
+ 4hld
6185
+ 3elj
6186
+ 4j45
6187
+ 5v41
6188
+ 2yne
6189
+ 5nad
6190
+ 5lvq
6191
+ 3h0a
6192
+ 3d9p
6193
+ 4tu4
6194
+ 3ip5
6195
+ 1w22
6196
+ 4kbk
6197
+ 5lqq
6198
+ 5wyx
6199
+ 3ps1
6200
+ 5ou1
6201
+ 5j9x
6202
+ 2xru
6203
+ 3t4p
6204
+ 3uzp
6205
+ 2o2u
6206
+ 3dei
6207
+ 3u93
6208
+ 4z8m
6209
+ 3lhs
6210
+ 2f81
6211
+ 3fn0
6212
+ 4iqt
6213
+ 5laz
6214
+ 1xot
6215
+ 2z52
6216
+ 1zs0
6217
+ 1oau
6218
+ 5d47
6219
+ 3o4k
6220
+ 6dcg
6221
+ 1o4q
6222
+ 1csi
6223
+ 3tpp
6224
+ 5nwd
6225
+ 2wca
6226
+ 4ie5
6227
+ 1u1w
6228
+ 2w92
6229
+ 3r4n
6230
+ 5aae
6231
+ 4gja
6232
+ 2pj2
6233
+ 5f2p
6234
+ 5hjq
6235
+ 2yme
6236
+ 3cbs
6237
+ 5u0y
6238
+ 5w3i
6239
+ 5qaw
6240
+ 5d3h
6241
+ 3sb0
6242
+ 5alx
6243
+ 5g57
6244
+ 3uhm
6245
+ 6afj
6246
+ 5jf5
6247
+ 4ccd
6248
+ 5mhq
6249
+ 5etx
6250
+ 1clu
6251
+ 4k6u
6252
+ 6g28
6253
+ 4zyz
6254
+ 1sj0
6255
+ 4eh3
6256
+ 2r5b
6257
+ 4pyn
6258
+ 5eq0
6259
+ 1cil
6260
+ 5le1
6261
+ 5nyh
6262
+ 6bdy
6263
+ 2auz
6264
+ 5vm0
6265
+ 2ff1
6266
+ 3f8f
6267
+ 2rka
6268
+ 3dda
6269
+ 4iu4
6270
+ 5bs4
6271
+ 5a0a
6272
+ 2qn2
6273
+ 4fyo
6274
+ 1p2a
6275
+ 2uze
6276
+ 1x38
6277
+ 4ps8
6278
+ 2g19
6279
+ 3nzu
6280
+ 3gi6
6281
+ 5dtt
6282
+ 1gaf
6283
+ 3nii
6284
+ 2xm8
6285
+ 4att
6286
+ 2vex
6287
+ 4jx7
6288
+ 6h34
6289
+ 4tvj
6290
+ 4lh3
6291
+ 3pvw
6292
+ 1ai7
6293
+ 1kc7
6294
+ 2rl5
6295
+ 6c0r
6296
+ 4rcp
6297
+ 1nhv
6298
+ 5mw2
6299
+ 4mg7
6300
+ 1drk
6301
+ 4mzh
6302
+ 1r6g
6303
+ 5xpl
6304
+ 4o74
6305
+ 3bu8
6306
+ 3cgf
6307
+ 4nie
6308
+ 2a3b
6309
+ 1xos
6310
+ 2wlz
6311
+ 1a37
6312
+ 5lpj
6313
+ 1yvh
6314
+ 4xhe
6315
+ 2lct
6316
+ 3lau
6317
+ 3zki
6318
+ 1g2l
6319
+ 4uiv
6320
+ 6b4u
6321
+ 3jqf
6322
+ 3ixk
6323
+ 4pvt
6324
+ 5lpl
6325
+ 3sdk
6326
+ 5km3
6327
+ 1t7f
6328
+ 4z2l
6329
+ 4tk1
6330
+ 2j9a
6331
+ 3bbt
6332
+ 3tjc
6333
+ 2qi4
6334
+ 4acg
6335
+ 5tuz
6336
+ 4lbo
6337
+ 5uuu
6338
+ 5ckr
6339
+ 2h2e
6340
+ 5qai
6341
+ 1qxk
6342
+ 2wyf
6343
+ 3f0r
6344
+ 1rd4
6345
+ 2ipo
6346
+ 1ukt
6347
+ 2xx2
6348
+ 3hvh
6349
+ 2bmz
6350
+ 1jqd
6351
+ 5d24
6352
+ 4cpy
6353
+ 4bdk
6354
+ 2xu4
6355
+ 2evm
6356
+ 6alc
6357
+ 4r5y
6358
+ 4oc5
6359
+ 3kdm
6360
+ 4gk3
6361
+ 5wvd
6362
+ 4pz5
6363
+ 1x1z
6364
+ 2duv
6365
+ 6fnx
6366
+ 5u4b
6367
+ 4dfn
6368
+ 4agp
6369
+ 1n9a
6370
+ 5mny
6371
+ 3hqh
6372
+ 3o8h
6373
+ 4qem
6374
+ 3l81
6375
+ 4c0r
6376
+ 1utp
6377
+ 5z68
6378
+ 5tuy
6379
+ 5fb7
6380
+ 3wv3
6381
+ 3n7r
6382
+ 3bze
6383
+ 2bvd
6384
+ 2fx6
6385
+ 5vcw
6386
+ 4ibf
6387
+ 1zom
6388
+ 4f6w
6389
+ 2fdp
6390
+ 1i91
6391
+ 1d2s
6392
+ 3ikd
6393
+ 4w9e
6394
+ 3fej
6395
+ 4ntj
6396
+ 4wk7
6397
+ 2a29
6398
+ 3f15
6399
+ 4u6c
6400
+ 1ing
6401
+ 3wq6
6402
+ 4ock
6403
+ 2yb0
6404
+ 3wtl
6405
+ 5yl2
6406
+ 2hz4
6407
+ 2gbf
6408
+ 3r8z
6409
+ 5cso
6410
+ 2j95
6411
+ 5jxq
6412
+ 4ofb
6413
+ 2fr3
6414
+ 5b4l
6415
+ 3ovx
6416
+ 6cha
6417
+ 4qwl
6418
+ 1tl9
6419
+ 3mo2
6420
+ 3c8e
6421
+ 3k5x
6422
+ 1uw6
6423
+ 3fhr
6424
+ 1b3f
6425
+ 5a8z
6426
+ 5bvk
6427
+ 4o62
6428
+ 2hb1
6429
+ 3qxd
6430
+ 3i4y
6431
+ 6g9d
6432
+ 6gop
6433
+ 5ajv
6434
+ 1oar
6435
+ 5flt
6436
+ 2zhd
6437
+ 2xey
6438
+ 4ikr
6439
+ 4ckr
6440
+ 4ufg
6441
+ 5xvk
6442
+ 2vmd
6443
+ 5oci
6444
+ 1syo
6445
+ 4lq3
6446
+ 1dtq
6447
+ 5bpp
6448
+ 6fg6
6449
+ 4cki
6450
+ 5wei
6451
+ 3fl9
6452
+ 5cnj
6453
+ 4os5
6454
+ 6awn
6455
+ 3atm
6456
+ 1ypj
6457
+ 1odj
6458
+ 5msb
6459
+ 4f2w
6460
+ 2h5a
6461
+ 2w3k
6462
+ 5w1v
6463
+ 1pi4
6464
+ 4kx8
6465
+ 5m25
6466
+ 1dhj
6467
+ 2qki
6468
+ 4b72
6469
+ 1n0t
6470
+ 3oag
6471
+ 5dyo
6472
+ 1ql9
6473
+ 5u7k
6474
+ 5mod
6475
+ 5mtx
6476
+ 4u6w
6477
+ 1yk7
6478
+ 3d8z
6479
+ 2pnc
6480
+ 5em8
6481
+ 5cil
6482
+ 3iny
6483
+ 5l2t
6484
+ 3u6a
6485
+ 5fow
6486
+ 2owb
6487
+ 1k1y
6488
+ 5nf9
6489
+ 3ijg
6490
+ 1qan
6491
+ 4ad2
6492
+ 2p09
6493
+ 4xir
6494
+ 4tqn
6495
+ 5q11
6496
+ 3zzh
6497
+ 3qel
6498
+ 5jha
6499
+ 3rxh
6500
+ 2ha0
6501
+ 5xqx
6502
+ 3q2g
6503
+ 5wl0
6504
+ 4bic
6505
+ 6ccn
6506
+ 1mh5
6507
+ 2ai8
6508
+ 1xp9
6509
+ 4ef6
6510
+ 5jzb
6511
+ 2h8h
6512
+ 5sxm
6513
+ 3dm6
6514
+ 2ym8
6515
+ 6fmi
6516
+ 5f1h
6517
+ 4qyg
6518
+ 2qtu
6519
+ 3vfb
6520
+ 1vwf
6521
+ 1ivp
6522
+ 3vf7
6523
+ 5typ
6524
+ 5igq
6525
+ 4h1j
6526
+ 3sug
6527
+ 3h85
6528
+ 2viy
6529
+ 6hf5
6530
+ 2poq
6531
+ 5q17
6532
+ 6afa
6533
+ 4mpc
6534
+ 6c0t
6535
+ 4zsh
6536
+ 5dit
6537
+ 6aom
6538
+ 3h8c
6539
+ 2e9v
6540
+ 2hfp
6541
+ 4u5n
6542
+ 5caq
6543
+ 4m2w
6544
+ 4tv3
6545
+ 3oy8
6546
+ 1pot
6547
+ 3owk
6548
+ 3arn
6549
+ 2dri
6550
+ 4ymj
6551
+ 3qto
6552
+ 5ix0
6553
+ 4jr0
6554
+ 5i5x
6555
+ 1nzl
6556
+ 5u4a
6557
+ 4eg4
6558
+ 1wbw
6559
+ 3nee
6560
+ 5fdp
6561
+ 4cqe
6562
+ 1hvs
6563
+ 2w6q
6564
+ 1n8v
6565
+ 5ct7
6566
+ 5dx4
6567
+ 2fdd
6568
+ 4ydf
6569
+ 5c2e
6570
+ 3qsb
6571
+ 5zk3
6572
+ 5vgy
6573
+ 5ot8
6574
+ 6boe
6575
+ 1c29
6576
+ 2pjl
6577
+ 2euf
6578
+ 6cnk
6579
+ 3as1
6580
+ 3abu
6581
+ 3dpd
6582
+ 5aku
6583
+ 4k6w
6584
+ 4j3l
6585
+ 4nau
6586
+ 3fpm
6587
+ 5w7u
6588
+ 3c3q
6589
+ 3mg7
6590
+ 1c12
6591
+ 3buo
6592
+ 3rhx
6593
+ 5lyh
6594
+ 1b0h
6595
+ 4ynd
6596
+ 5y0x
6597
+ 5enb
6598
+ 2a5b
6599
+ 5mqe
6600
+ 3l5e
6601
+ 1tl1
6602
+ 3way
6603
+ 3gs7
6604
+ 3ex3
6605
+ 4np9
6606
+ 5mxq
6607
+ 2cnh
6608
+ 1swr
6609
+ 3t1m
6610
+ 4eyj
6611
+ 6mj7
6612
+ 5f01
6613
+ 3rkz
6614
+ 4ln2
6615
+ 4jia
6616
+ 4l4z
6617
+ 4qr4
6618
+ 2er9
6619
+ 5vnb
6620
+ 5xff
6621
+ 4prp
6622
+ 2jqk
6623
+ 1gi8
6624
+ 2ynr
6625
+ 1gfz
6626
+ 5nk2
6627
+ 1rsd
6628
+ 5umz
6629
+ 3jzf
6630
+ 4k69
6631
+ 6b5o
6632
+ 4o4k
6633
+ 4jal
6634
+ 3nht
6635
+ 4j1i
6636
+ 5xof
6637
+ 4ie7
6638
+ 4f9w
6639
+ 5tvn
6640
+ 2qoh
6641
+ 2jdy
6642
+ 3wym
6643
+ 1w31
6644
+ 5c6o
6645
+ 3nim
6646
+ 1vso
6647
+ 4z2g
6648
+ 1o37
6649
+ 3lbz
6650
+ 4bqs
6651
+ 3tfv
6652
+ 5i8g
6653
+ 3utu
6654
+ 3msl
6655
+ 1bra
6656
+ 2k46
6657
+ 4mg9
6658
+ 2w76
6659
+ 4wyp
6660
+ 5jcj
6661
+ 3b92
6662
+ 3zha
6663
+ 2i4x
6664
+ 3wbl
6665
+ 5trk
6666
+ 2n8t
6667
+ 3ge7
6668
+ 4unn
6669
+ 2ria
6670
+ 4wgi
6671
+ 4bds
6672
+ 2y06
6673
+ 6bhh
6674
+ 4raq
6675
+ 5i56
6676
+ 3vv8
6677
+ 2xk4
6678
+ 4az0
6679
+ 3ov1
6680
+ 4ycw
6681
+ 5ei4
6682
+ 4u1b
6683
+ 3bv3
6684
+ 5etp
6685
+ 3kcf
6686
+ 6fod
6687
+ 3eq9
6688
+ 1awf
6689
+ 1eat
6690
+ 4giu
6691
+ 3vyd
6692
+ 2pvh
6693
+ 5aaf
6694
+ 2hdr
6695
+ 1lgw
6696
+ 2lsp
6697
+ 4l02
6698
+ 6b7b
6699
+ 3ioe
6700
+ 4c1m
6701
+ 5kxi
6702
+ 5yof
6703
+ 4obv
6704
+ 3r6t
6705
+ 1uj6
6706
+ 1a09
6707
+ 4yk6
6708
+ 1rzx
6709
+ 3cy2
6710
+ 6f8v
6711
+ 1xor
6712
+ 4la7
6713
+ 1qwe
6714
+ 5mxo
6715
+ 2opb
6716
+ 3psl
6717
+ 1aqi
6718
+ 5y5w
6719
+ 2j4q
6720
+ 5vfn
6721
+ 3e37
6722
+ 4fzg
6723
+ 5qim
6724
+ 6da4
6725
+ 1oxr
6726
+ 5hgc
6727
+ 3at3
6728
+ 5ieg
6729
+ 6cq0
6730
+ 2ioa
6731
+ 3lpg
6732
+ 4mzo
6733
+ 1w7x
6734
+ 2aia
6735
+ 5v2l
6736
+ 5abg
6737
+ 5x5o
6738
+ 2xcn
6739
+ 5toz
6740
+ 5tp0
6741
+ 4z22
6742
+ 4wlb
6743
+ 4pd5
6744
+ 3zbx
6745
+ 3pb9
6746
+ 3f5l
6747
+ 6bke
6748
+ 4p7s
6749
+ 6dhc
6750
+ 5enh
6751
+ 4m3p
6752
+ 4jmg
6753
+ 1k03
6754
+ 4pni
6755
+ 4asy
6756
+ 1a4r
6757
+ 4deu
6758
+ 4hy9
6759
+ 4g0p
6760
+ 5b5p
6761
+ 3wqv
6762
+ 2r3m
6763
+ 6q73
6764
+ 5ar5
6765
+ 1drv
6766
+ 4ua8
6767
+ 4loy
6768
+ 5eh8
6769
+ 4wy3
6770
+ 3jzh
6771
+ 2aov
6772
+ 6dj2
6773
+ 5t27
6774
+ 1kdk
6775
+ 2og8
6776
+ 10gs
6777
+ 4uw1
6778
+ 4hge
6779
+ 5qcn
6780
+ 6c4d
6781
+ 1bai
6782
+ 3i5n
6783
+ 6epa
6784
+ 3dgq
6785
+ 5tzx
6786
+ 4h3b
6787
+ 5a4q
6788
+ 1e5a
6789
+ 5n93
6790
+ 2exg
6791
+ 5k4l
6792
+ 1qpb
6793
+ 1f3e
6794
+ 4a0j
6795
+ 5ula
6796
+ 4cc3
6797
+ 2oym
6798
+ 1dis
6799
+ 3ns7
6800
+ 3uz5
6801
+ 3ewz
6802
+ 5q0r
6803
+ 1qbo
6804
+ 1nd5
6805
+ 2qzr
6806
+ 4oq3
6807
+ 6e9l
6808
+ 2n0w
6809
+ 2hr6
6810
+ 5vse
6811
+ 5jsm
6812
+ 1ssq
6813
+ 3l4u
6814
+ 1y2k
6815
+ 2r3k
6816
+ 3e3u
6817
+ 4ibi
6818
+ 3kgu
6819
+ 1tni
6820
+ 5hfj
6821
+ 1dva
6822
+ 5mno
6823
+ 1qbu
6824
+ 3px9
6825
+ 5xpm
6826
+ 5q1b
6827
+ 4zjw
6828
+ 4gqp
6829
+ 3r21
6830
+ 3lpp
6831
+ 4bdt
6832
+ 4ks2
6833
+ 2v58
6834
+ 3bft
6835
+ 1il5
6836
+ 5yco
6837
+ 4p5e
6838
+ 3rhk
6839
+ 5fq9
6840
+ 5ko1
6841
+ 5bwc
6842
+ 4q7p
6843
+ 1ro7
6844
+ 3lil
6845
+ 4mwq
6846
+ 3wf6
6847
+ 1tft
6848
+ 3hpt
6849
+ 5t3n
6850
+ 4oo9
6851
+ 2b5j
6852
+ 2bet
6853
+ 3nuy
6854
+ 5nea
6855
+ 3kdd
6856
+ 5ls6
6857
+ 6f2n
6858
+ 4rac
6859
+ 4j47
6860
+ 3iut
6861
+ 4uvv
6862
+ 2i4v
6863
+ 1a8i
6864
+ 5k48
6865
+ 2hpa
6866
+ 1mrs
6867
+ 6fv4
6868
+ 1q8u
6869
+ 5aqz
6870
+ 4ytc
6871
+ 1ibc
6872
+ 4eft
6873
+ 2v95
6874
+ 3me9
6875
+ 4o0b
6876
+ 4kn0
6877
+ 3g2j
6878
+ 1ai5
6879
+ 1n51
6880
+ 1gnm
6881
+ 4zur
6882
+ 4zsj
6883
+ 3v8w
6884
+ 4tzm
6885
+ 1swn
6886
+ 5ih9
6887
+ 3gen
6888
+ 4r6t
6889
+ 2ykb
6890
+ 5u7i
6891
+ 4e81
6892
+ 3v01
6893
+ 3n4b
6894
+ 1jmi
6895
+ 4dcx
6896
+ 3at4
6897
+ 2c6c
6898
+ 4mzl
6899
+ 5ybi
6900
+ 5tcj
6901
+ 2w54
6902
+ 4o13
6903
+ 5ufc
6904
+ 2xs0
6905
+ 4flp
6906
+ 4yqm
6907
+ 3o8p
6908
+ 3tpu
6909
+ 5yft
6910
+ 5mql
6911
+ 5w6e
6912
+ 2yjx
6913
+ 4pqn
6914
+ 4m48
6915
+ 2iiv
6916
+ 4ohm
6917
+ 4a9c
6918
+ 6en4
6919
+ 3wk4
6920
+ 3p50
6921
+ 3bpr
6922
+ 5gwz
6923
+ 1mai
6924
+ 5wys
6925
+ 4oel
6926
+ 1uy8
6927
+ 4u01
6928
+ 4ibc
6929
+ 1qti
6930
+ 4jpy
6931
+ 1q41
6932
+ 2gvd
6933
+ 5aip
6934
+ 5i0b
6935
+ 4zyt
6936
+ 4yxd
6937
+ 4qt1
6938
+ 4w4y
6939
+ 1c8l
6940
+ 3e5u
6941
+ 3kqd
6942
+ 4uj2
6943
+ 3k4q
6944
+ 4o42
6945
+ 5o1h
6946
+ 3b0w
6947
+ 3t64
6948
+ 1hty
6949
+ 5dgm
6950
+ 3bjm
6951
+ 1b8o
6952
+ 4l3p
6953
+ 2b1z
6954
+ 4yyi
6955
+ 5ceh
6956
+ 3l3m
6957
+ 3ebl
6958
+ 4bo8
6959
+ 4fvq
6960
+ 3fup
6961
+ 5ot9
6962
+ 3eax
6963
+ 3vhk
6964
+ 3cda
6965
+ 5c1y
6966
+ 3r2a
6967
+ 1obx
6968
+ 4tzn
6969
+ 1d7i
6970
+ 3qd3
6971
+ 3wka
6972
+ 3ovz
6973
+ 3ihz
6974
+ 5iis
6975
+ 5vo6
6976
+ 5tc0
6977
+ 3fqk
6978
+ 1dm2
6979
+ 4y63
6980
+ 6mvx
6981
+ 4ucu
6982
+ 5oht
6983
+ 4jff
6984
+ 1n8u
6985
+ 4xua
6986
+ 2ybp
6987
+ 2cgf
6988
+ 3d4q
6989
+ 4gsy
6990
+ 4i32
6991
+ 2psv
6992
+ 2ych
6993
+ 5lck
6994
+ 1xxe
6995
+ 5wrs
6996
+ 5trg
6997
+ 5dp6
6998
+ 2lnw
6999
+ 5kr2
7000
+ 4utx
7001
+ 1a8t
7002
+ 4qw5
7003
+ 6emu
7004
+ 2itt
7005
+ 4m7y
7006
+ 3kqo
7007
+ 5ku3
7008
+ 4n7u
7009
+ 1oit
7010
+ 1w1t
7011
+ 4j26
7012
+ 4nrp
7013
+ 4yh3
7014
+ 3nzw
7015
+ 5zh2
7016
+ 3vw9
7017
+ 2hyy
7018
+ 1qj1
7019
+ 1csr
7020
+ 2we3
7021
+ 6mnf
7022
+ 4ijh
7023
+ 2jdk
7024
+ 4kcg
7025
+ 1kmv
7026
+ 3buh
7027
+ 4ovh
7028
+ 1mxu
7029
+ 4mg8
7030
+ 3lka
7031
+ 4twy
7032
+ 5vsb
7033
+ 4ofl
7034
+ 6cdm
7035
+ 2f3k
7036
+ 5aqp
7037
+ 4tkg
7038
+ 5w4e
7039
+ 3zot
7040
+ 2yak
7041
+ 3qkd
7042
+ 2pv1
7043
+ 5mz8
7044
+ 5gut
7045
+ 3avz
7046
+ 2q8g
7047
+ 3ilq
7048
+ 3zw3
7049
+ 4ckj
7050
+ 1qbr
7051
+ 4q08
7052
+ 3zpu
7053
+ 5u8f
7054
+ 2ydv
7055
+ 3rtp
7056
+ 4b95
7057
+ 2f9b
7058
+ 2uz9
7059
+ 4ehg
7060
+ 4xsz
7061
+ 1bkj
7062
+ 6f22
7063
+ 1tet
7064
+ 3f3w
7065
+ 4abd
7066
+ 3sjf
7067
+ 2byh
7068
+ 5jsg
7069
+ 4op3
7070
+ 6fsy
7071
+ 3twv
7072
+ 5d25
7073
+ 2vto
7074
+ 2oa0
7075
+ 6csq
7076
+ 6dh3
7077
+ 5jq9
7078
+ 4xkx
7079
+ 4tmf
7080
+ 4brx
7081
+ 2ghg
7082
+ 2gvj
7083
+ 4wnm
7084
+ 5ayf
7085
+ 4osf
7086
+ 1u65
7087
+ 3lzs
7088
+ 1i90
7089
+ 3gdt
7090
+ 6eru
7091
+ 5ops
7092
+ 2mpm
7093
+ 4cc7
7094
+ 3ckp
7095
+ 1o4d
7096
+ 2vvs
7097
+ 6ep4
7098
+ 4bkz
7099
+ 4aom
7100
+ 1wc6
7101
+ 1mpa
7102
+ 2pl9
7103
+ 2p53
7104
+ 4zyr
7105
+ 3gqz
7106
+ 4og8
7107
+ 4mlt
7108
+ 1mf4
7109
+ 1xxh
7110
+ 3p3g
7111
+ 3g0c
7112
+ 5dbm
7113
+ 4xhk
7114
+ 5lny
7115
+ 3g8i
7116
+ 5g60
7117
+ 2zas
7118
+ 2z7h
7119
+ 3kfa
7120
+ 4a9u
7121
+ 4gj7
7122
+ 4nwd
7123
+ 4ei4
7124
+ 5yjf
7125
+ 2won
7126
+ 2f2c
7127
+ 3mg8
7128
+ 3i06
7129
+ 2wa4
7130
+ 3hzm
7131
+ 3wkc
7132
+ 4j7e
7133
+ 3dxm
7134
+ 3m9f
7135
+ 1gzv
7136
+ 4clp
7137
+ 5a09
7138
+ 5tg5
7139
+ 4wkn
7140
+ 1njf
7141
+ 3ce0
7142
+ 1tpz
7143
+ 6bh0
7144
+ 3vfa
7145
+ 3su1
7146
+ 2vtn
7147
+ 5yp6
7148
+ 5twj
7149
+ 4i74
7150
+ 5zk8
7151
+ 2al5
7152
+ 3bh9
7153
+ 4djo
7154
+ 4oi6
7155
+ 5m4u
7156
+ 1wva
7157
+ 3uvx
7158
+ 2aod
7159
+ 1pqc
7160
+ 4n4t
7161
+ 5fdi
7162
+ 1lcp
7163
+ 4ere
7164
+ 1ci7
7165
+ 1ykp
7166
+ 2qju
7167
+ 1ftj
7168
+ 5hk9
7169
+ 5y3o
7170
+ 5q0v
7171
+ 6ayn
7172
+ 5os1
7173
+ 2j9l
7174
+ 2qm9
7175
+ 2x6j
7176
+ 5ei2
7177
+ 5izc
7178
+ 3u18
7179
+ 4zek
7180
+ 4s1g
7181
+ 2wq4
7182
+ 1nfy
7183
+ 6bnt
7184
+ 4jai
7185
+ 2jb6
7186
+ 5ahw
7187
+ 4uit
7188
+ 3bmn
7189
+ 4gzx
7190
+ 6gxu
7191
+ 4gki
7192
+ 3ccb
7193
+ 4ezq
7194
+ 5v7a
7195
+ 6dzq
7196
+ 4az3
7197
+ 4ucr
7198
+ 4pnt
7199
+ 2wtj
7200
+ 5amn
7201
+ 4ux6
7202
+ 3t8v
7203
+ 1i33
7204
+ 3r7n
7205
+ 5f00
7206
+ 5nvy
7207
+ 3k41
7208
+ 4aq3
7209
+ 3tdh
7210
+ 3wdz
7211
+ 5nib
7212
+ 6b8y
7213
+ 1v2h
7214
+ 5jpt
7215
+ 1xge
7216
+ 4umn
7217
+ 2xrw
7218
+ 2vvn
7219
+ 3t07
7220
+ 2vpp
7221
+ 1ghv
7222
+ 4ydq
7223
+ 2dbl
7224
+ 3ho9
7225
+ 5w88
7226
+ 2g71
7227
+ 2lk1
7228
+ 2pwg
7229
+ 4ai5
7230
+ 1o41
7231
+ 1tsv
7232
+ 5al3
7233
+ 4fk7
7234
+ 2o4z
7235
+ 4a51
7236
+ 4gwk
7237
+ 3ay0
7238
+ 3own
7239
+ 4e5g
7240
+ 1lcj
7241
+ 1hk3
7242
+ 4rhy
7243
+ 4yoi
7244
+ 3prf
7245
+ 5z99
7246
+ 3slz
7247
+ 4tmk
7248
+ 1au0
7249
+ 2qwg
7250
+ 1bdq
7251
+ 5h8b
7252
+ 4emt
7253
+ 6bec
7254
+ 1g9d
7255
+ 1qf2
7256
+ 2r0u
7257
+ 1f2o
7258
+ 1b9t
7259
+ 2c1p
7260
+ 1hn4
7261
+ 3skg
7262
+ 4ee0
7263
+ 6c2y
7264
+ 4pg3
7265
+ 3uw4
7266
+ 1wdn
7267
+ 3mho
7268
+ 4yl1
7269
+ 6cex
7270
+ 3b7r
7271
+ 4pop
7272
+ 5f02
7273
+ 3d7b
7274
+ 1xbb
7275
+ 2zft
7276
+ 4i8z
7277
+ 4oya
7278
+ 3otf
7279
+ 2xhm
7280
+ 5c20
7281
+ 1zrz
7282
+ 4rqk
7283
+ 5k8o
7284
+ 1tvo
7285
+ 6fnr
7286
+ 1dwd
7287
+ 4zlo
7288
+ 1nyx
7289
+ 5aad
7290
+ 3kqw
7291
+ 3nw7
7292
+ 5ti6
7293
+ 6e59
7294
+ 1ywr
7295
+ 2xn5
7296
+ 4cy1
7297
+ 1yfz
7298
+ 5e3a
7299
+ 4a7c
7300
+ 5ljq
7301
+ 5h9r
7302
+ 1gi6
7303
+ 2b1p
7304
+ 1kc5
7305
+ 3kpw
7306
+ 1q5k
7307
+ 4aml
7308
+ 4ibg
7309
+ 6e49
7310
+ 2w8y
7311
+ 3kmm
7312
+ 2w6o
7313
+ 3hdn
7314
+ 6cef
7315
+ 1u8t
7316
+ 4e8y
7317
+ 4gj6
7318
+ 4hbm
7319
+ 4g3f
7320
+ 4eok
7321
+ 2rox
7322
+ 1t48
7323
+ 2xd6
7324
+ 2fjn
7325
+ 5ndb
7326
+ 4j77
7327
+ 1n43
7328
+ 3ui7
7329
+ 5e73
7330
+ 1h1h
7331
+ 4mhs
7332
+ 3ohf
7333
+ 3kdc
7334
+ 4edy
7335
+ 2wzs
7336
+ 5h9s
7337
+ 3cth
7338
+ 2j34
7339
+ 1h62
7340
+ 4jnm
7341
+ 6bil
7342
+ 3mhc
7343
+ 4zw8
7344
+ 3g90
7345
+ 4f7v
7346
+ 4eh8
7347
+ 5wkh
7348
+ 4ycm
7349
+ 4mk8
7350
+ 2xj0
7351
+ 2tpi
7352
+ 4clz
7353
+ 4ko8
7354
+ 5qaj
7355
+ 5dus
7356
+ 4xm7
7357
+ 2uzl
7358
+ 1k1l
7359
+ 4g95
7360
+ 1c3e
7361
+ 5g10
7362
+ 4nnr
7363
+ 3e64
7364
+ 1fq8
7365
+ 4c71
7366
+ 4b0c
7367
+ 3ft3
7368
+ 1ybg
7369
+ 4y2t
7370
+ 5dk4
7371
+ 4dds
7372
+ 3ggw
7373
+ 5axi
7374
+ 3ueo
7375
+ 2r9b
7376
+ 3ppj
7377
+ 1meu
7378
+ 4z7o
7379
+ 3n8k
7380
+ 2jup
7381
+ 3p9j
7382
+ 3hxe
7383
+ 1e06
7384
+ 3ika
7385
+ 5yyf
7386
+ 4kc4
7387
+ 3su3
7388
+ 4z7h
7389
+ 4x2l
7390
+ 5hcv
7391
+ 5ir1
7392
+ 1f9g
7393
+ 5law
7394
+ 3o99
7395
+ 1yci
7396
+ 3g3d
7397
+ 3aje
7398
+ 2o5d
7399
+ 3b7i
7400
+ 6af9
7401
+ 2gvz
7402
+ 4qmt
7403
+ 3qj0
7404
+ 2y9q
7405
+ 1nw5
7406
+ 5wg8
7407
+ 4bzr
7408
+ 1k6t
7409
+ 5hd0
7410
+ 3fui
7411
+ 1lek
7412
+ 5kql
7413
+ 5d21
7414
+ 1iig
7415
+ 5ewh
7416
+ 2vhq
7417
+ 2gh6
7418
+ 4x6y
7419
+ 4mbl
7420
+ 2v54
7421
+ 5ug9
7422
+ 3hvk
7423
+ 3qts
7424
+ 3q43
7425
+ 4nvq
7426
+ 3co9
7427
+ 4uxh
7428
+ 2gj4
7429
+ 4dwb
7430
+ 5q16
7431
+ 1uyf
7432
+ 5jbi
7433
+ 2il2
7434
+ 1yt9
7435
+ 3t6j
7436
+ 2zu5
7437
+ 5buj
7438
+ 2xiy
7439
+ 2fs9
7440
+ 1al8
7441
+ 4cig
7442
+ 3cgy
7443
+ 4mdq
7444
+ 3st6
7445
+ 4dzy
7446
+ 6fcl
7447
+ 1m0o
7448
+ 3spf
7449
+ 1lkk
7450
+ 4uu7
7451
+ 1zky
7452
+ 5d9l
7453
+ 1zd3
7454
+ 5t6f
7455
+ 1g2o
7456
+ 2i1m
7457
+ 3khv
7458
+ 3o88
7459
+ 5fi2
7460
+ 3idp
7461
+ 2pov
7462
+ 4x11
7463
+ 2qe4
7464
+ 3qtz
7465
+ 5alm
7466
+ 4pli
7467
+ 3fts
7468
+ 2x4u
7469
+ 4o45
7470
+ 1tkx
7471
+ 2q8h
7472
+ 1mue
7473
+ 4eop
7474
+ 2j7e
7475
+ 5akk
7476
+ 5wtt
7477
+ 3h9o
7478
+ 3v4x
7479
+ 5b6c
7480
+ 5boy
7481
+ 5hdz
7482
+ 5nkg
7483
+ 6ajg
7484
+ 4dum
7485
+ 3whw
7486
+ 6b59
7487
+ 5aan
7488
+ 6fo5
7489
+ 4pmp
7490
+ 5i0l
7491
+ 2xk9
7492
+ 5mon
7493
+ 3d4y
7494
+ 2xib
7495
+ 3c2r
7496
+ 5mfr
7497
+ 2ra6
7498
+ 2xl2
7499
+ 4w5a
7500
+ 3zm6
7501
+ 3zm5
7502
+ 3zhz
7503
+ 3ogq
7504
+ 4rrq
7505
+ 4wxi
7506
+ 4asd
7507
+ 5txy
7508
+ 5u0w
7509
+ 4xta
7510
+ 3t2v
7511
+ 2q9m
7512
+ 5lyw
7513
+ 4kon
7514
+ 3sv2
7515
+ 2j7d
7516
+ 3hmv
7517
+ 6g92
7518
+ 1z6f
7519
+ 5a46
7520
+ 3gnw
7521
+ 5k03
7522
+ 2bgn
7523
+ 3ohi
7524
+ 3acw
7525
+ 3hng
7526
+ 4j06
7527
+ 1owj
7528
+ 4fbe
7529
+ 1gbt
7530
+ 4fmo
7531
+ 4g8v
7532
+ 2fzc
7533
+ 2vx0
7534
+ 2o1c
7535
+ 5unp
7536
+ 4o7c
7537
+ 4rxe
7538
+ 5t90
7539
+ 5jyo
7540
+ 2wzy
7541
+ 1tfq
7542
+ 1tqf
7543
+ 3s78
7544
+ 6g0v
7545
+ 2ei6
7546
+ 4r95
7547
+ 3peq
7548
+ 3o9f
7549
+ 1mq6
7550
+ 1k6v
7551
+ 6gfs
7552
+ 6q6y
7553
+ 5m39
7554
+ 3wz7
7555
+ 1nwl
7556
+ 3ard
7557
+ 5e3d
7558
+ 2woq
7559
+ 4gq6
7560
+ 3iw5
7561
+ 2h15
7562
+ 3avh
7563
+ 2azr
7564
+ 5vsk
7565
+ 6ckr
7566
+ 4n84
7567
+ 4eh2
7568
+ 4cpw
7569
+ 5f91
7570
+ 4k3r
7571
+ 4dgn
7572
+ 2yer
7573
+ 5ayt
7574
+ 4cpz
7575
+ 4y29
7576
+ 5tq6
7577
+ 3bcn
7578
+ 3roc
7579
+ 5lwm
7580
+ 3gjd
7581
+ 3k5v
7582
+ 4zip
7583
+ 4und
7584
+ 6exm
7585
+ 5qak
7586
+ 4u0n
7587
+ 5wo4
7588
+ 1rrw
7589
+ 6b2p
7590
+ 5hls
7591
+ 3pjc
7592
+ 1cin
7593
+ 3b2w
7594
+ 1kmy
7595
+ 4ys7
7596
+ 4f0c
7597
+ 1zgi
7598
+ 3vqu
7599
+ 3uoh
7600
+ 1qr3
7601
+ 5i8b
7602
+ 4l7h
7603
+ 2aay
7604
+ 4jlm
7605
+ 3wnr
7606
+ 4en4
7607
+ 5w0f
7608
+ 4lxz
7609
+ 4cjr
7610
+ 4jj7
7611
+ 2h5i
7612
+ 2avi
7613
+ 3ezv
7614
+ 4j09
7615
+ 3hkw
7616
+ 3uxm
7617
+ 4b5d
7618
+ 5vcv
7619
+ 2vtt
7620
+ 3bl9
7621
+ 2y1n
7622
+ 5nin
7623
+ 3uvo
7624
+ 5anw
7625
+ 5nvw
7626
+ 4b9k
7627
+ 1j4p
7628
+ 4io5
7629
+ 4dkp
7630
+ 2jdt
7631
+ 1iem
7632
+ 4lm0
7633
+ 5wyq
7634
+ 5nr8
7635
+ 6f7t
7636
+ 5a81
7637
+ 3lnk
7638
+ 2xuf
7639
+ 5ort
7640
+ 1h1q
7641
+ 4twd
7642
+ 1e1y
7643
+ 5lo5
7644
+ 4anm
7645
+ 1e1v
7646
+ 4tki
7647
+ 4wv6
7648
+ 4htx
7649
+ 1t13
7650
+ 4euo
7651
+ 6gy1
7652
+ 4civ
7653
+ 3dux
7654
+ 5knv
7655
+ 5hn7
7656
+ 6gi6
7657
+ 5vb5
7658
+ 3f5p
7659
+ 2kfh
7660
+ 4b3u
7661
+ 5nf5
7662
+ 5mes
7663
+ 4w55
7664
+ 1sv3
7665
+ 3p7a
7666
+ 6frf
7667
+ 6eks
7668
+ 5afm
7669
+ 5z4o
7670
+ 3cow
7671
+ 5hu1
7672
+ 6f9g
7673
+ 3ds1
7674
+ 6a6w
7675
+ 2g8r
7676
+ 2ze1
7677
+ 5q18
7678
+ 5cau
7679
+ 1s38
7680
+ 1zge
7681
+ 1me4
7682
+ 3cj4
7683
+ 4pr5
7684
+ 4gzf
7685
+ 1q54
7686
+ 5l9i
7687
+ 1z6j
7688
+ 2ez5
7689
+ 3unk
7690
+ 3qwc
7691
+ 3o86
7692
+ 4j51
7693
+ 6fyi
7694
+ 3cj2
7695
+ 2viz
7696
+ 5ekm
7697
+ 3f9y
7698
+ 2uy5
7699
+ 2brb
7700
+ 5g45
7701
+ 5jgd
7702
+ 4j1e
7703
+ 4ikt
7704
+ 5opu
7705
+ 4mwc
7706
+ 1t7d
7707
+ 2viq
7708
+ 3ocp
7709
+ 3wuv
7710
+ 4yay
7711
+ 3d7f
7712
+ 2vh6
7713
+ 4foc
7714
+ 2ra0
7715
+ 5fwg
7716
+ 1o3c
7717
+ 4bi0
7718
+ 1kll
7719
+ 3qtq
7720
+ 3cl0
7721
+ 3ho2
7722
+ 3vw7
7723
+ 6h37
7724
+ 4rlw
7725
+ 6dak
7726
+ 3p3t
7727
+ 3m11
7728
+ 6fdp
7729
+ 4c73
7730
+ 5ksx
7731
+ 6bcy
7732
+ 1okl
7733
+ 4gqq
7734
+ 3sji
7735
+ 2x2i
7736
+ 1hkj
7737
+ 5hu9
7738
+ 5vom
7739
+ 5fdo
7740
+ 1uu7
7741
+ 5iay
7742
+ 5nu5
7743
+ 6ghh
7744
+ 4q06
7745
+ 3apc
7746
+ 1re8
7747
+ 5uey
7748
+ 2ha6
7749
+ 4aaw
7750
+ 1tcx
7751
+ 3lpj
7752
+ 3w0l
7753
+ 3s45
7754
+ 3n5j
7755
+ 1rhj
7756
+ 5d0r
7757
+ 2qp6
7758
+ 2vqj
7759
+ 2zaz
7760
+ 2zir
7761
+ 4mq1
7762
+ 5alt
7763
+ 5he5
7764
+ 3atp
7765
+ 5w5j
7766
+ 3ekw
7767
+ 4xas
7768
+ 5mwy
7769
+ 4mwy
7770
+ 2wzf
7771
+ 2z1w
7772
+ 1d4t
7773
+ 4yk0
7774
+ 3hnb
7775
+ 4h3j
7776
+ 6g36
7777
+ 1rhr
7778
+ 2r3f
7779
+ 6bcr
7780
+ 5owh
7781
+ 5idp
7782
+ 1odi
7783
+ 3vw1
7784
+ 4n6z
7785
+ 3qcx
7786
+ 4z89
7787
+ 4pkb
7788
+ 2ewp
7789
+ 3d9z
7790
+ 4c5d
7791
+ 1xoz
7792
+ 4meq
7793
+ 3so6
7794
+ 3tiw
7795
+ 6h7u
7796
+ 3u0d
7797
+ 1ur9
7798
+ 4lar
7799
+ 4css
7800
+ 2a3c
7801
+ 1h3a
7802
+ 4aqc
7803
+ 5l2s
7804
+ 5vi6
7805
+ 6cmr
7806
+ 2wf4
7807
+ 3chs
7808
+ 5uc4
7809
+ 5tca
7810
+ 2x6e
7811
+ 4gne
7812
+ 5ey9
7813
+ 4ih5
7814
+ 1mto
7815
+ 4qwi
7816
+ 5nai
7817
+ 4ym4
7818
+ 3prz
7819
+ 2nnp
7820
+ 1vij
7821
+ 5bvd
7822
+ 5swf
7823
+ 3pn3
7824
+ 2pql
7825
+ 3ekv
7826
+ 3nq9
7827
+ 5c7n
7828
+ 1els
7829
+ 4k5m
7830
+ 6bq0
7831
+ 1o2k
7832
+ 2aux
7833
+ 4x69
7834
+ 5mts
7835
+ 1e9h
7836
+ 4ykj
7837
+ 1mkd
7838
+ 6i3s
7839
+ 1zpb
7840
+ 5mo8
7841
+ 2q92
7842
+ 4u70
7843
+ 4wph
7844
+ 4xy9
7845
+ 4ith
7846
+ 5aph
7847
+ 5cy3
7848
+ 3fat
7849
+ 4ly1
7850
+ 2vti
7851
+ 1f0q
7852
+ 4wsy
7853
+ 3ej1
7854
+ 3ine
7855
+ 3rux
7856
+ 2vvu
7857
+ 5a3t
7858
+ 4qtd
7859
+ 5a5s
7860
+ 3t7g
7861
+ 1xon
7862
+ 4gah
7863
+ 5flq
7864
+ 2hw2
7865
+ 3aig
7866
+ 4hl5
7867
+ 5wzr
7868
+ 2imd
7869
+ 3oxz
7870
+ 1ndv
7871
+ 5yji
7872
+ 2hxm
7873
+ 3l59
7874
+ 4jpx
7875
+ 1ws5
7876
+ 4uix
7877
+ 2iv9
7878
+ 4dwk
7879
+ 4qvp
7880
+ 2c1b
7881
+ 5cw8
7882
+ 2r6n
7883
+ 4cws
7884
+ 2d3z
7885
+ 2oc9
7886
+ 4ehz
7887
+ 1ett
7888
+ 1fdq
7889
+ 2avo
7890
+ 4r0a
7891
+ 1zzz
7892
+ 3moe
7893
+ 4mmp
7894
+ 5muc
7895
+ 5zh5
7896
+ 3zqi
7897
+ 6f08
7898
+ 6elo
7899
+ 6dif
7900
+ 3mf5
7901
+ 5qas
7902
+ 3fu5
7903
+ 3rv9
7904
+ 4jne
7905
+ 5urj
7906
+ 5oxk
7907
+ 4y3y
7908
+ 5hki
7909
+ 4tyb
7910
+ 2glm
7911
+ 2xnb
7912
+ 3tt0
7913
+ 6gmq
7914
+ 4i5p
7915
+ 1f92
7916
+ 3psd
7917
+ 5hcy
7918
+ 1q6n
7919
+ 4tmn
7920
+ 1tmn
7921
+ 2gg0
7922
+ 3zep
7923
+ 1c88
7924
+ 4fea
7925
+ 5nwb
7926
+ 1ppc
7927
+ 5myx
7928
+ 4ci3
7929
+ 1z6d
7930
+ 4kww
7931
+ 4ixu
7932
+ 1leg
7933
+ 3a5y
7934
+ 4bo3
7935
+ 5cyi
7936
+ 5uzj
7937
+ 4mw0
7938
+ 5j18
7939
+ 5ml3
7940
+ 2bmk
7941
+ 1e66
7942
+ 2brc
7943
+ 2fwy
7944
+ 5oxm
7945
+ 1bn1
7946
+ 4qlv
7947
+ 4qip
7948
+ 3ms2
7949
+ 5wxf
7950
+ 5yib
7951
+ 3ig7
7952
+ 4pgb
7953
+ 4p73
7954
+ 2y4s
7955
+ 5zh4
7956
+ 4ch2
7957
+ 5tex
7958
+ 3b4f
7959
+ 3poa
7960
+ 4zyx
7961
+ 4m0y
7962
+ 2ot1
7963
+ 5e91
7964
+ 1yc5
7965
+ 3iqi
7966
+ 2xpc
7967
+ 1o2s
7968
+ 1pmu
7969
+ 4l58
7970
+ 4mx1
7971
+ 5am4
7972
+ 2hs2
7973
+ 4yne
7974
+ 1ctt
7975
+ 3l5c
7976
+ 2zm1
7977
+ 1lxf
7978
+ 2f1b
7979
+ 4zcs
7980
+ 4zw5
7981
+ 2oh0
7982
+ 5n2z
7983
+ 3f19
7984
+ 2obf
7985
+ 6f9t
7986
+ 2f01
7987
+ 6euc
7988
+ 5vee
7989
+ 4oyo
7990
+ 5moq
7991
+ 3ft4
7992
+ 6cbg
7993
+ 5wuu
7994
+ 5f5b
7995
+ 4xs2
7996
+ 1ype
7997
+ 4rwl
7998
+ 4hpi
7999
+ 4e34
8000
+ 1yc1
8001
+ 5jy0
8002
+ 2fxr
8003
+ 5e1m
8004
+ 5oq6
8005
+ 1nhz
8006
+ 3pjt
8007
+ 5jzi
8008
+ 4elf
8009
+ 4fxy
8010
+ 6dpz
8011
+ 5a6a
8012
+ 2waj
8013
+ 2fqt
8014
+ 3mxy
8015
+ 1y91
8016
+ 6biv
8017
+ 4j1k
8018
+ 3zmi
8019
+ 5xjm
8020
+ 2fv5
8021
+ 5db3
8022
+ 3ryj
8023
+ 3b95
8024
+ 3i5r
8025
+ 2ajl
8026
+ 6c99
8027
+ 3bx5
8028
+ 5ave
8029
+ 3muz
8030
+ 2ye9
8031
+ 2rf2
8032
+ 2j2i
8033
+ 5g0q
8034
+ 4yuw
8035
+ 2r3g
8036
+ 4cwf
8037
+ 3t3h
8038
+ 5ajz
8039
+ 2bgr
8040
+ 4l9i
8041
+ 4igk
8042
+ 6cz4
8043
+ 4qfp
8044
+ 5ty8
8045
+ 4odm
8046
+ 3hs8
8047
+ 4ele
8048
+ 5zia
8049
+ 2pvj
8050
+ 5w5k
8051
+ 4m5l
8052
+ 3wz8
8053
+ 4egh
8054
+ 5w7j
8055
+ 4jfw
8056
+ 6eda
8057
+ 5tpb
8058
+ 5m5d
8059
+ 4o71
8060
+ 3kag
8061
+ 4u93
8062
+ 6eed
8063
+ 4zam
8064
+ 2jbp
8065
+ 4z3v
8066
+ 2pu0
8067
+ 4zzd
8068
+ 4f14
8069
+ 4h4d
8070
+ 1pxh
8071
+ 2hwp
8072
+ 1u9e
8073
+ 4wko
8074
+ 5kqx
8075
+ 1tog
8076
+ 4yqu
8077
+ 2yi0
8078
+ 2xf0
8079
+ 4uof
8080
+ 4io7
8081
+ 4ds1
8082
+ 5diq
8083
+ 3ti5
8084
+ 4x8u
8085
+ 2lp8
8086
+ 3avm
8087
+ 3kqm
8088
+ 4zph
8089
+ 2wou
8090
+ 4jyu
8091
+ 3ip8
8092
+ 5ti3
8093
+ 4cku
8094
+ 3h78
8095
+ 3vdb
8096
+ 3chd
8097
+ 4i5h
8098
+ 5i2r
8099
+ 3tyv
8100
+ 4uwk
8101
+ 5j6a
8102
+ 3lk0
8103
+ 5k13
8104
+ 2hj4
8105
+ 3bfu
8106
+ 4ajk
8107
+ 3n45
8108
+ 3te5
8109
+ 3ujb
8110
+ 5v9p
8111
+ 2bzz
8112
+ 3a6t
8113
+ 2y6o
8114
+ 3b28
8115
+ 1hvl
8116
+ 5a2i
8117
+ 4mx9
8118
+ 4g8y
8119
+ 4dko
8120
+ 3kdb
8121
+ 1lf9
8122
+ 4qmp
8123
+ 5q10
8124
+ 3hqr
8125
+ 4d2d
8126
+ 3w32
8127
+ 4ksp
8128
+ 6f8b
8129
+ 3h6z
8130
+ 5k0h
8131
+ 3h2n
8132
+ 5ke0
8133
+ 5w4w
8134
+ 5mxr
8135
+ 5igl
8136
+ 2h9p
8137
+ 4iuu
8138
+ 4d8a
8139
+ 3dvp
8140
+ 1lox
8141
+ 3ao4
8142
+ 4h38
8143
+ 4bdb
8144
+ 4j1p
8145
+ 1o4b
8146
+ 4mro
8147
+ 5eel
8148
+ 2ay3
8149
+ 3e8n
8150
+ 6g8m
8151
+ 2pyy
8152
+ 4m0z
8153
+ 2mas
8154
+ 4ks3
8155
+ 4p72
8156
+ 6ccx
8157
+ 3ttn
8158
+ 4jsr
8159
+ 6aqq
8160
+ 6em6
8161
+ 5o3r
8162
+ 4rqz
8163
+ 4hkk
8164
+ 4qll
8165
+ 2q6f
8166
+ 5he1
8167
+ 4fe9
8168
+ 2wu6
8169
+ 2pj7
8170
+ 1bty
8171
+ 1ule
8172
+ 6dry
8173
+ 5ezg
8174
+ 5h17
8175
+ 3s7b
8176
+ 4x48
8177
+ 3atw
8178
+ 1kug
8179
+ 2hdu
8180
+ 3odu
8181
+ 3l9m
8182
+ 1d1p
8183
+ 2ihj
8184
+ 5mgk
8185
+ 2n1g
8186
+ 6hk6
8187
+ 1mq5
8188
+ 4kp5
8189
+ 1yyr
8190
+ 5al4
8191
+ 1pzj
8192
+ 6axl
8193
+ 2z4b
8194
+ 5mym
8195
+ 4gv8
8196
+ 2cni
8197
+ 4xub
8198
+ 6cjh
8199
+ 3ig1
8200
+ 4m2r
8201
+ 1tt1
8202
+ 5ime
8203
+ 5oei
8204
+ 6d6u
8205
+ 1utj
8206
+ 1dfo
8207
+ 3ixj
8208
+ 5anu
8209
+ 2ogz
8210
+ 5bwb
8211
+ 5vad
8212
+ 2x9e
8213
+ 6epz
8214
+ 4yv2
8215
+ 2jdh
8216
+ 1w13
8217
+ 4eos
8218
+ 4x1f
8219
+ 4gao
8220
+ 5c8k
8221
+ 4k5n
8222
+ 3pj1
8223
+ 5hz5
8224
+ 1x7a
8225
+ 1h9z
8226
+ 6gjm
8227
+ 3ipe
8228
+ 3fuj
8229
+ 4m3d
8230
+ 5g11
8231
+ 4asj
8232
+ 5xig
8233
+ 4ybs
8234
+ 5gmi
8235
+ 3d04
8236
+ 3dv1
8237
+ 2fie
8238
+ 4twt
8239
+ 3ary
8240
+ 4anb
8241
+ 6hmg
8242
+ 4n9c
8243
+ 2lsk
8244
+ 2ksb
8245
+ 5mwd
8246
+ 1gfw
8247
+ 5gja
8248
+ 3u3z
8249
+ 2f7o
8250
+ 3mzc
8251
+ 5w9g
8252
+ 5yz2
8253
+ 4nmq
8254
+ 2y2k
8255
+ 5lz5
8256
+ 4qz7
8257
+ 1bug
8258
+ 4zmf
8259
+ 1o8b
8260
+ 5jf3
8261
+ 2qft
8262
+ 5jop
8263
+ 4jaj
8264
+ 3s9i
8265
+ 6cvy
8266
+ 3oaf
8267
+ 4hlw
8268
+ 5ta6
8269
+ 5mae
8270
+ 2yg2
8271
+ 5m2v
8272
+ 4zxy
8273
+ 1g9r
8274
+ 3iqu
8275
+ 5i9y
8276
+ 3pch
8277
+ 5kkt
8278
+ 2fqo
8279
+ 5mn1
8280
+ 3e3b
8281
+ 4jlj
8282
+ 4tq3
8283
+ 4zga
8284
+ 4jlg
8285
+ 1pf7
8286
+ 5ewd
8287
+ 4dhf
8288
+ 4ywa
8289
+ 5fxr
8290
+ 4e1k
8291
+ 2zmd
8292
+ 4jyv
8293
+ 1fkg
8294
+ 2lwi
8295
+ 6iik
8296
+ 3zxe
8297
+ 6dgx
8298
+ 2vx1
8299
+ 3hav
8300
+ 3pkn
8301
+ 1trd
8302
+ 3u10
8303
+ 2a0c
8304
+ 3ly2
8305
+ 2xn6
8306
+ 1irs
8307
+ 2ntf
8308
+ 3rum
8309
+ 6iin
8310
+ 5hzx
8311
+ 1urw
8312
+ 2xb7
8313
+ 4umr
8314
+ 5apr
8315
+ 5ngt
8316
+ 3q71
8317
+ 3og7
8318
+ 4oeg
8319
+ 4tte
8320
+ 2ow6
8321
+ 3sff
8322
+ 3eu7
8323
+ 2zju
8324
+ 5tx5
8325
+ 3v8t
8326
+ 1nqc
8327
+ 3t2c
8328
+ 4afh
8329
+ 3vhu
8330
+ 2d1x
8331
+ 2h2h
8332
+ 5nvz
8333
+ 4qy8
8334
+ 3w2s
8335
+ 4w9h
8336
+ 4r6v
8337
+ 5esq
8338
+ 5v5d
8339
+ 3rbu
8340
+ 3d51
8341
+ 4q07
8342
+ 4jrv
8343
+ 2in6
8344
+ 3bki
8345
+ 5ov9
8346
+ 6ayd
8347
+ 4zyc
8348
+ 4ybm
8349
+ 4xip
8350
+ 3ibu
8351
+ 5kde
8352
+ 1br5
8353
+ 1lan
8354
+ 1gwq
8355
+ 3ggv
8356
+ 3cke
8357
+ 4av0
8358
+ 4qpd
8359
+ 4urk
8360
+ 4o04
8361
+ 3d4z
8362
+ 4uct
8363
+ 5uwj
8364
+ 4b8y
8365
+ 5nvc
8366
+ 6f23
8367
+ 5w0e
8368
+ 4m84
8369
+ 2hdq
8370
+ 5c6p
8371
+ 2amt
8372
+ 2rgu
8373
+ 4ax9
8374
+ 6g9n
8375
+ 6hu2
8376
+ 6cho
8377
+ 3v7d
8378
+ 5iz9
8379
+ 2vev
8380
+ 3dej
8381
+ 2hxq
8382
+ 4mvx
8383
+ 2c3l
8384
+ 5c29
8385
+ 1we2
8386
+ 6f9u
8387
+ 1m6p
8388
+ 2f80
8389
+ 2ooz
8390
+ 4py2
8391
+ 4zcw
8392
+ 5kx7
8393
+ 4m13
8394
+ 4dhp
8395
+ 4kz6
8396
+ 5ypw
8397
+ 2zdn
8398
+ 1qf5
8399
+ 2i4w
8400
+ 2xu3
8401
+ 1wbn
8402
+ 5lt9
8403
+ 3zmv
8404
+ 4efs
8405
+ 7cpa
8406
+ 2i72
8407
+ 4rpo
8408
+ 2h13
8409
+ 4obo
8410
+ 3f88
8411
+ 6g47
8412
+ 2y6d
8413
+ 3o57
8414
+ 5a0c
8415
+ 2fl6
8416
+ 5mja
8417
+ 5jal
8418
+ 3aqa
8419
+ 3g9l
8420
+ 5g22
8421
+ 5znp
8422
+ 1hi4
8423
+ 1y2a
8424
+ 6ay5
8425
+ 3cf8
8426
+ 3f5k
8427
+ 1a0q
8428
+ 3et7
8429
+ 4eky
8430
+ 6bvb
8431
+ 5u94
8432
+ 1m1b
8433
+ 3ryv
8434
+ 2peh
8435
+ 3vw2
8436
+ 5e1e
8437
+ 3i51
8438
+ 5fb0
8439
+ 1rtl
8440
+ 2i4p
8441
+ 1a9m
8442
+ 6au5
8443
+ 4jyt
8444
+ 4k3k
8445
+ 2c5o
8446
+ 2qu6
8447
+ 4hwr
8448
+ 6bqk
8449
+ 4bqh
8450
+ 5cs6
8451
+ 4dea
8452
+ 4x3r
8453
+ 4iid
8454
+ 6gzm
8455
+ 2y2n
8456
+ 3fuk
8457
+ 2ypi
8458
+ 1b3l
8459
+ 4zsq
8460
+ 4o44
8461
+ 2e1w
8462
+ 3rdo
8463
+ 4zme
8464
+ 4zz2
8465
+ 4ucs
8466
+ 3sl0
8467
+ 6ey9
8468
+ 5arg
8469
+ 3p3r
8470
+ 6an1
8471
+ 3eor
8472
+ 2uue
8473
+ 1n4h
8474
+ 3gk2
8475
+ 3oot
8476
+ 5i5z
8477
+ 3hv3
8478
+ 2hzi
8479
+ 5yc2
8480
+ 1g48
8481
+ 3bxg
8482
+ 5wbq
8483
+ 4a1w
8484
+ 5q0q
8485
+ 4axm
8486
+ 6g91
8487
+ 4q0l
8488
+ 1d8e
8489
+ 1bb0
8490
+ 1lpg
8491
+ 1gmy
8492
+ 6gin
8493
+ 5yy4
8494
+ 2pvv
8495
+ 6ht8
8496
+ 5j82
8497
+ 2x5o
8498
+ 3orn
8499
+ 2nmb
8500
+ 1pk0
8501
+ 3rik
8502
+ 4lkd
8503
+ 1v2n
8504
+ 4ufl
8505
+ 6ahi
8506
+ 5bsk
8507
+ 3uza
8508
+ 2w8f
8509
+ 4nka
8510
+ 6bef
8511
+ 5abw
8512
+ 6bkh
8513
+ 5l30
8514
+ 2clx
8515
+ 4an1
8516
+ 4ufz
8517
+ 5nk6
8518
+ 1syi
8519
+ 3zsy
8520
+ 5ul5
8521
+ 3irx
8522
+ 5ux4
8523
+ 4r5n
8524
+ 4bdc
8525
+ 5fut
8526
+ 3e2m
8527
+ 3lgp
8528
+ 2ivz
8529
+ 1akv
8530
+ 2ydi
8531
+ 4i11
8532
+ 3zxv
8533
+ 4m3e
8534
+ 4kwg
8535
+ 4g2w
8536
+ 3o95
8537
+ 6gu4
8538
+ 2doo
8539
+ 5yun
8540
+ 3n5e
8541
+ 3mqf
8542
+ 2y5k
8543
+ 2vnm
8544
+ 3g0g
8545
+ 1qfs
8546
+ 4o7b
8547
+ 3ty0
8548
+ 4uil
8549
+ 3st5
8550
+ 3f36
8551
+ 6fqo
8552
+ 4pkw
8553
+ 5j6m
8554
+ 4xbo
8555
+ 1hxk
8556
+ 6ghj
8557
+ 1h8y
8558
+ 2pq9
8559
+ 5em3
8560
+ 1rlp
8561
+ 1yvf
8562
+ 4cqg
8563
+ 3srb
8564
+ 6fmk
8565
+ 4o6e
8566
+ 5xmv
8567
+ 4agq
8568
+ 2j3q
8569
+ 1ke9
8570
+ 1mfd
8571
+ 1a5h
8572
+ 4b1j
8573
+ 4i0f
8574
+ 3up2
8575
+ 2ay5
8576
+ 5mqx
8577
+ 3lpu
8578
+ 3bmq
8579
+ 5wfw
8580
+ 4j5b
8581
+ 5alp
8582
+ 5ee8
8583
+ 4qj0
8584
+ 1ka7
8585
+ 3k3h
8586
+ 4np2
8587
+ 3bwj
8588
+ 5osl
8589
+ 5all
8590
+ 4jze
8591
+ 3b3x
8592
+ 3sm0
8593
+ 2xa4
8594
+ 3t3v
8595
+ 3t1n
8596
+ 4yp8
8597
+ 4bc5
8598
+ 5f08
8599
+ 4qw6
8600
+ 5ofv
8601
+ 2bdl
8602
+ 4y83
8603
+ 4y4j
8604
+ 2prj
8605
+ 6cfc
8606
+ 5eth
8607
+ 1oky
8608
+ 2ban
8609
+ 4blb
8610
+ 3f38
8611
+ 2uy3
8612
+ 6fdq
8613
+ 4qmx
8614
+ 4bo5
8615
+ 5wh5
8616
+ 6cvv
8617
+ 2oph
8618
+ 3owb
8619
+ 1xlx
8620
+ 2cf9
8621
+ 4av4
8622
+ 2wxv
8623
+ 5v8p
8624
+ 4eqj
8625
+ 4aoc
8626
+ 5jk3
8627
+ 4xtx
8628
+ 2aoj
8629
+ 1v7a
8630
+ 3lzv
8631
+ 3bum
8632
+ 3fxw
8633
+ 4qhc
8634
+ 1b6j
8635
+ 3f80
8636
+ 4or0
8637
+ 6dq5
8638
+ 5lsx
8639
+ 4bdd
8640
+ 4x8v
8641
+ 4od9
8642
+ 3vye
8643
+ 2yix
8644
+ 1i7i
8645
+ 4eoy
8646
+ 2k4i
8647
+ 2bv4
8648
+ 4z0f
8649
+ 5dp7
8650
+ 4xiq
8651
+ 3d20
8652
+ 5llo
8653
+ 2ymd
8654
+ 4hn2
8655
+ 5n31
8656
+ 4ffs
8657
+ 4b5b
8658
+ 5jf1
8659
+ 4i73
8660
+ 2pqb
8661
+ 2i19
8662
+ 1cbx
8663
+ 1gsz
8664
+ 5ws3
8665
+ 4h3q
8666
+ 5a5q
8667
+ 4uvy
8668
+ 4guj
8669
+ 1i7g
8670
+ 3c8a
8671
+ 4ll3
8672
+ 4pp0
8673
+ 5hk1
8674
+ 4rux
8675
+ 3coj
8676
+ 3d6p
8677
+ 1ndj
8678
+ 1o79
8679
+ 3nsh
8680
+ 5hl9
8681
+ 2e27
8682
+ 5iui
8683
+ 4og5
8684
+ 3wdd
8685
+ 1lnm
8686
+ 3igb
8687
+ 4x24
8688
+ 4tsx
8689
+ 2gmk
8690
+ 5ox5
8691
+ 1bqn
8692
+ 4e8w
8693
+ 5xyf
8694
+ 4uco
8695
+ 1eas
8696
+ 2b7f
8697
+ 4m5u
8698
+ 5evk
8699
+ 3ipu
8700
+ 4p5d
8701
+ 2fsa
8702
+ 4ay5
8703
+ 4bcp
8704
+ 5thj
8705
+ 3tam
8706
+ 4jsc
8707
+ 3ug2
8708
+ 5e7n
8709
+ 1ofz
8710
+ 3uo4
8711
+ 5clm
8712
+ 3gr2
8713
+ 2m0u
8714
+ 6f7q
8715
+ 3sdg
8716
+ 1sme
8717
+ 3g2i
8718
+ 1oyt
8719
+ 3h3c
8720
+ 1xff
8721
+ 4oc6
8722
+ 3b3c
8723
+ 1xa5
8724
+ 1fe3
8725
+ 1g32
8726
+ 5olb
8727
+ 5a85
8728
+ 2oz2
8729
+ 4d8i
8730
+ 5swh
8731
+ 1y2d
8732
+ 1inc
8733
+ 4obz
8734
+ 1c7f
8735
+ 2clh
8736
+ 1qkn
8737
+ 2etm
8738
+ 4i5c
8739
+ 3vd7
8740
+ 1p28
8741
+ 5j7s
8742
+ 1y2j
8743
+ 1aht
8744
+ 4iq6
8745
+ 2pe2
8746
+ 4f3c
8747
+ 4ih7
8748
+ 1hkk
8749
+ 4puj
8750
+ 3o1g
8751
+ 4w9i
8752
+ 3qrj
8753
+ 1qj6
8754
+ 4azg
8755
+ 4joo
8756
+ 6dxx
8757
+ 4sga
8758
+ 1gja
8759
+ 5k4j
8760
+ 4yef
8761
+ 1sbg
8762
+ 3s00
8763
+ 5fcw
8764
+ 4txe
8765
+ 4mr5
8766
+ 1bxo
8767
+ 5xxf
8768
+ 5jic
8769
+ 4abh
8770
+ 3nzx
8771
+ 5edl
8772
+ 3pcf
8773
+ 3k98
8774
+ 3qqu
8775
+ 1hgj
8776
+ 2qch
8777
+ 2yim
8778
+ 4dtt
8779
+ 4l09
8780
+ 3f07
8781
+ 4hki
8782
+ 4cl6
8783
+ 2z5o
8784
+ 4e7r
8785
+ 5amd
8786
+ 1c1v
8787
+ 1ym1
8788
+ 1s64
8789
+ 2w6t
8790
+ 3tl5
8791
+ 1w1y
8792
+ 4eoh
8793
+ 5i58
8794
+ 4z5w
8795
+ 1u2y
8796
+ 6hu1
8797
+ 3wav
8798
+ 1nxy
8799
+ 3n4c
8800
+ 5c91
8801
+ 2kzu
8802
+ 5kre
8803
+ 2jkk
8804
+ 4hkp
8805
+ 4y2y
8806
+ 4w54
8807
+ 4bnu
8808
+ 3lp7
8809
+ 3bla
8810
+ 1gni
8811
+ 2idw
8812
+ 2xy9
8813
+ 2q6b
8814
+ 3uxd
8815
+ 5o9p
8816
+ 3ao1
8817
+ 3v2o
8818
+ 1i00
8819
+ 5ewj
8820
+ 2rgp
8821
+ 2xln
8822
+ 6ma5
8823
+ 4d2w
8824
+ 6eww
8825
+ 4lkq
8826
+ 4o7a
8827
+ 5m4k
8828
+ 5g3n
8829
+ 1wm1
8830
+ 4fzj
8831
+ 2nyr
8832
+ 2mc1
8833
+ 2qky
8834
+ 6eh2
8835
+ 5v1y
8836
+ 4glw
8837
+ 3arb
8838
+ 4gq4
8839
+ 2vin
8840
+ 4rss
8841
+ 3q7j
8842
+ 2h4g
8843
+ 2x2c
8844
+ 1gt4
8845
+ 1fo0
8846
+ 1ftk
8847
+ 4kby
8848
+ 1ch8
8849
+ 5fnu
8850
+ 3sv7
8851
+ 2vvo
8852
+ 5uah
8853
+ 3k97
8854
+ 2wpa
8855
+ 1a4h
8856
+ 5lhg
8857
+ 6eeh
8858
+ 4mse
8859
+ 3nnv
8860
+ 6eq4
8861
+ 5f1l
8862
+ 3q0z
8863
+ 3elm
8864
+ 2uzd
8865
+ 3p2e
8866
+ 1r0p
8867
+ 1j16
8868
+ 2qhz
8869
+ 5h0b
8870
+ 4nyt
8871
+ 5ij7
8872
+ 4l2l
8873
+ 1afl
8874
+ 5vd0
8875
+ 4i10
8876
+ 1opi
8877
+ 2ves
8878
+ 5tyn
8879
+ 3gf2
8880
+ 3nuo
8881
+ 5w2p
8882
+ 4uv9
8883
+ 3sut
8884
+ 2f3f
8885
+ 1dmb
8886
+ 3wd2
8887
+ 4xm8
8888
+ 4kp4
8889
+ 1hy7
8890
+ 4xbd
8891
+ 5m6h
8892
+ 1g05
8893
+ 4zh3
8894
+ 5d0c
8895
+ 2e7l
8896
+ 3kba
8897
+ 5nhh
8898
+ 2pax
8899
+ 1p0y
8900
+ 2l7u
8901
+ 3wns
8902
+ 5u4d
8903
+ 3axk
8904
+ 4qpl
8905
+ 5jt2
8906
+ 2rk7
8907
+ 5lss
8908
+ 3sni
8909
+ 5aaa
8910
+ 5wfm
8911
+ 4pb1
8912
+ 3oe4
8913
+ 5m63
8914
+ 5xg5
8915
+ 3lle
8916
+ 2yac
8917
+ 4odk
8918
+ 2x95
8919
+ 2ig0
8920
+ 2zdl
8921
+ 4ay6
8922
+ 2c1a
8923
+ 1nny
8924
+ 1nw4
8925
+ 2fj0
8926
+ 3kah
8927
+ 4db7
8928
+ 1bxq
8929
+ 3ifl
8930
+ 5f9e
8931
+ 4p2t
8932
+ 4exh
8933
+ 6d2o
8934
+ 5t2b
8935
+ 6eee
8936
+ 2oi0
8937
+ 3lpl
8938
+ 4tk3
8939
+ 1o6i
8940
+ 6eux
8941
+ 2ojj
8942
+ 3pyy
8943
+ 4edz
8944
+ 3jy0
8945
+ 3r5t
8946
+ 4xdo
8947
+ 3qo9
8948
+ 4u2w
8949
+ 5nme
8950
+ 6hdn
8951
+ 4mre
8952
+ 4ryg
8953
+ 5ez0
8954
+ 3c14
8955
+ 5kzi
8956
+ 6bix
8957
+ 3uug
8958
+ 5ldm
8959
+ 1o2v
8960
+ 4llx
8961
+ 3lw0
8962
+ 2op9
8963
+ 4nhc
8964
+ 5wqj
8965
+ 5wi1
8966
+ 4e20
8967
+ 6fjf
8968
+ 2x2m
8969
+ 5l98
8970
+ 3qsd
8971
+ 5lcj
8972
+ 5t23
8973
+ 1adl
8974
+ 2wa8
8975
+ 4q9y
8976
+ 2rkg
8977
+ 4gid
8978
+ 3f70
8979
+ 2xk6
8980
+ 5t8e
8981
+ 4lxb
8982
+ 2xp2
8983
+ 3juk
8984
+ 5uf0
8985
+ 3l1s
8986
+ 6evr
8987
+ 1lag
8988
+ 1awi
8989
+ 6ej4
8990
+ 1pmx
8991
+ 4kln
8992
+ 3wgg
8993
+ 5g61
8994
+ 5n69
8995
+ 4o9v
8996
+ 3kqc
8997
+ 1him
8998
+ 3cd0
8999
+ 2kbs
9000
+ 1moq
9001
+ 2f10
9002
+ 2zb0
9003
+ 2d41
9004
+ 4bdh
9005
+ 1ebz
9006
+ 5lcf
9007
+ 5u13
9008
+ 4qwk
9009
+ 4cd8
9010
+ 4d85
9011
+ 1tvr
9012
+ 4l6s
9013
+ 3pd4
9014
+ 1hq5
9015
+ 5tiu
9016
+ 1o44
9017
+ 6g22
9018
+ 2ckm
9019
+ 4btk
9020
+ 2cli
9021
+ 6cb5
9022
+ 3o0u
9023
+ 5u4x
9024
+ 4lp6
9025
+ 5afk
9026
+ 2o3p
9027
+ 1zpc
9028
+ 3qo3
9029
+ 3d1z
9030
+ 2z3h
9031
+ 1hms
9032
+ 5ox6
9033
+ 5fpp
9034
+ 3f3d
9035
+ 5t78
9036
+ 5hg5
9037
+ 3mxe
9038
+ 4i7l
9039
+ 1tuf
9040
+ 5lmk
9041
+ 3nu9
9042
+ 5om9
9043
+ 2qi3
9044
+ 3l5b
9045
+ 4v0i
9046
+ 2nv7
9047
+ 2wkt
9048
+ 4ndu
9049
+ 1aq1
9050
+ 3ddq
9051
+ 1vyj
9052
+ 5ods
9053
+ 1b8n
9054
+ 4xhl
9055
+ 4jft
9056
+ 3n7o
9057
+ 4og6
9058
+ 5j9z
9059
+ 5ufr
9060
+ 4qjm
9061
+ 3g4g
9062
+ 5jox
9063
+ 2std
9064
+ 4uvx
9065
+ 2yb9
9066
+ 3qaa
9067
+ 5mwo
9068
+ 2onz
9069
+ 2hiw
9070
+ 1xnz
9071
+ 2agv
9072
+ 4qgg
9073
+ 5x28
9074
+ 2dw7
9075
+ 2ohk
9076
+ 4oue
9077
+ 1o4a
9078
+ 6ds0
9079
+ 3t5u
9080
+ 3ij1
9081
+ 4klv
9082
+ 3lgl
9083
+ 2wey
9084
+ 1qk3
9085
+ 4i0d
9086
+ 1rti
9087
+ 3clp
9088
+ 4f8h
9089
+ 4qwj
9090
+ 1bmb
9091
+ 5jar
9092
+ 5a5z
9093
+ 5ofu
9094
+ 2yiu
9095
+ 2z5t
9096
+ 5emm
9097
+ 4ibm
9098
+ 5lzh
9099
+ 4xh2
9100
+ 3t09
9101
+ 2p95
9102
+ 1fmb
9103
+ 1gj5
9104
+ 3s72
9105
+ 5ia5
9106
+ 4kp6
9107
+ 1s9t
9108
+ 2fhy
9109
+ 4del
9110
+ 3ao5
9111
+ 4xii
9112
+ 5cp9
9113
+ 3vb4
9114
+ 3sha
9115
+ 5el2
9116
+ 2uup
9117
+ 6bfe
9118
+ 4jv6
9119
+ 3e8u
9120
+ 3ubd
9121
+ 1tys
9122
+ 4p10
9123
+ 5ekj
9124
+ 1d5j
9125
+ 4bpi
9126
+ 3n23
9127
+ 3amb
9128
+ 4gue
9129
+ 4n8q
9130
+ 1o0d
9131
+ 4d1j
9132
+ 1a9q
9133
+ 2vtq
9134
+ 1h1d
9135
+ 5uw5
9136
+ 6bfp
9137
+ 3tfu
9138
+ 5xyz
9139
+ 2q1l
9140
+ 4m7j
9141
+ 3m8u
9142
+ 5kbf
9143
+ 3btl
9144
+ 4ctj
9145
+ 1koj
9146
+ 2ajb
9147
+ 3unz
9148
+ 2xgs
9149
+ 4pd9
9150
+ 5g1c
9151
+ 5w19
9152
+ 5vqw
9153
+ 5yc8
9154
+ 4ufd
9155
+ 1w83
9156
+ 4qdk
9157
+ 1tyn
9158
+ 6buv
9159
+ 4amw
9160
+ 3dnd
9161
+ 1d7j
9162
+ 1nzv
9163
+ 4ryl
9164
+ 2f6j
9165
+ 6bbx
9166
+ 5lgp
9167
+ 6ay3
9168
+ 1fwv
9169
+ 2qg0
9170
+ 3mg4
9171
+ 3qmk
9172
+ 4dma
9173
+ 2r3c
9174
+ 1d6s
9175
+ 5dsx
9176
+ 1tg5
9177
+ 6do3
9178
+ 5n7v
9179
+ 1g5f
9180
+ 1c5q
9181
+ 1lb6
9182
+ 4pv7
9183
+ 3ii5
9184
+ 4rab
9185
+ 4eke
9186
+ 2w10
9187
+ 5i86
9188
+ 1uho
9189
+ 5vsf
9190
+ 3g08
9191
+ 2zxg
9192
+ 4btl
9193
+ 3hb4
9194
+ 5j4y
9195
+ 5lgt
9196
+ 6boy
9197
+ 5kva
9198
+ 5mka
9199
+ 5wa8
9200
+ 2i0e
9201
+ 3ptg
9202
+ 5vp9
9203
+ 4mw2
9204
+ 3djf
9205
+ 6b7d
9206
+ 5t1a
9207
+ 5hrw
9208
+ 5e2o
9209
+ 4xuc
9210
+ 3vg1
9211
+ 6c4g
9212
+ 2csn
9213
+ 3q32
9214
+ 4q9m
9215
+ 3vry
9216
+ 3fv7
9217
+ 5lhh
9218
+ 3fh5
9219
+ 2qry
9220
+ 4f9g
9221
+ 4c8r
9222
+ 4m6p
9223
+ 4riu
9224
+ 3dp3
9225
+ 2r3i
9226
+ 3bwk
9227
+ 6ew3
9228
+ 4bs5
9229
+ 4g3e
9230
+ 2wvz
9231
+ 3wc5
9232
+ 4nb6
9233
+ 5xmr
9234
+ 4ps5
9235
+ 4dvi
9236
+ 3uvm
9237
+ 5os7
9238
+ 2xix
9239
+ 5alw
9240
+ 3iaw
9241
+ 2xnp
9242
+ 6dvo
9243
+ 1cgl
9244
+ 4xv3
9245
+ 5dya
9246
+ 3c79
9247
+ 2e9a
9248
+ 6dil
9249
+ 4caf
9250
+ 4tpw
9251
+ 5fov
9252
+ 5n3w
9253
+ 5ttv
9254
+ 6f3b
9255
+ 5cas
9256
+ 5aqg
9257
+ 3dz2
9258
+ 1xhy
9259
+ 3t01
9260
+ 5th4
9261
+ 4i80
9262
+ 4q3t
9263
+ 2ceq
9264
+ 4yff
9265
+ 3bti
9266
+ 5vll
9267
+ 4hy4
9268
+ 6g3y
9269
+ 4ykn
9270
+ 5lu2
9271
+ 5wg7
9272
+ 3fnu
9273
+ 6g84
9274
+ 6ap6
9275
+ 6hpg
9276
+ 1c3x
9277
+ 5if4
9278
+ 5wgp
9279
+ 3d6q
9280
+ 2wer
9281
+ 2qrq
9282
+ 4cfl
9283
+ 3uib
9284
+ 6fgf
9285
+ 5tx3
9286
+ 3plu
9287
+ 1zzl
9288
+ 6db4
9289
+ 5tpg
9290
+ 3k8o
9291
+ 2fxv
9292
+ 3c2u
9293
+ 1w8l
9294
+ 1di8
9295
+ 3udh
9296
+ 4cu1
9297
+ 5gvl
9298
+ 5nvx
9299
+ 1bt6
9300
+ 6d9x
9301
+ 1vjb
9302
+ 6b31
9303
+ 2qo1
9304
+ 3daz
9305
+ 1v0l
9306
+ 5nuu
9307
+ 2qtb
9308
+ 5ewy
9309
+ 4i9u
9310
+ 4afj
9311
+ 5hzn
9312
+ 2ohq
9313
+ 4eqc
9314
+ 3p8h
9315
+ 5y53
9316
+ 1kat
9317
+ 4kwo
9318
+ 2hiz
9319
+ 2cmf
9320
+ 4bgx
9321
+ 4hys
9322
+ 4lww
9323
+ 3k39
9324
+ 3rdq
9325
+ 4ega
9326
+ 5fsl
9327
+ 3ehw
9328
+ 5gnk
9329
+ 5n8b
9330
+ 6h29
9331
+ 1dy4
9332
+ 3nu6
9333
+ 5dxu
9334
+ 5jf8
9335
+ 2qd9
9336
+ 5zaj
9337
+ 1oif
9338
+ 2v8w
9339
+ 1ywi
9340
+ 5uiq
9341
+ 1xr8
9342
+ 4yvz
9343
+ 2y82
9344
+ 1pkx
9345
+ 2qbw
9346
+ 1ebg
9347
+ 3a4p
9348
+ 3ful
9349
+ 4d62
9350
+ 5qb2
9351
+ 6bgx
9352
+ 1ela
9353
+ 5j59
9354
+ 2q63
9355
+ 6cmj
9356
+ 2p9a
9357
+ 5nwk
9358
+ 3w54
9359
+ 4rrg
9360
+ 2l1r
9361
+ 2gsu
9362
+ 2wva
9363
+ 2vpn
9364
+ 1qji
9365
+ 4gts
9366
+ 6gnw
9367
+ 4afg
9368
+ 5ty9
9369
+ 4ps7
9370
+ 3r2f
9371
+ 3qak
9372
+ 4dgg
9373
+ 5f67
9374
+ 1kak
9375
+ 4mc9
9376
+ 4dpu
9377
+ 4u5o
9378
+ 3h06
9379
+ 2b4m
9380
+ 4q99
9381
+ 4dgb
9382
+ 4hw2
9383
+ 5uvc
9384
+ 1yxd
9385
+ 4cix
9386
+ 4ehm
9387
+ 5his
9388
+ 2ihq
9389
+ 4kmd
9390
+ 5cin
9391
+ 5v7i
9392
+ 3itz
9393
+ 3il5
9394
+ 3brn
9395
+ 1h4w
9396
+ 2ilp
9397
+ 6bnk
9398
+ 1me3
9399
+ 5fh7
9400
+ 1rgk
9401
+ 3iub
9402
+ 3p4q
9403
+ 1njt
9404
+ 1ya4
9405
+ 2x6i
9406
+ 3e81
9407
+ 4yl0
9408
+ 1at6
9409
+ 3pr0
9410
+ 5wf3
9411
+ 4uzh
9412
+ 5f2k
9413
+ 4j93
9414
+ 4dwg
9415
+ 5ti2
9416
+ 4pcs
9417
+ 3qcj
9418
+ 4qjo
9419
+ 4dpf
9420
+ 4qer
9421
+ 2kp8
9422
+ 4o2p
9423
+ 4pd6
9424
+ 2op3
9425
+ 6got
9426
+ 2weq
9427
+ 4lh7
9428
+ 5l97
9429
+ 6bed
9430
+ 5o1e
9431
+ 4ezt
9432
+ 5y8w
9433
+ 1err
9434
+ 4rj7
9435
+ 2yol
9436
+ 5t68
9437
+ 2ltx
9438
+ 5g17
9439
+ 1mxl
9440
+ 4c7t
9441
+ 3uvu
9442
+ 5a3r
9443
+ 4yuz
9444
+ 3nf9
9445
+ 2fix
9446
+ 3pgu
9447
+ 4kn1
9448
+ 3mi3
9449
+ 5dpw
9450
+ 3e5a
9451
+ 1y1z
9452
+ 5j8i
9453
+ 4e5d
9454
+ 3cd8
9455
+ 6ccl
9456
+ 4x9r
9457
+ 3rtx
9458
+ 2zmj
9459
+ 1xp6
9460
+ 6cks
9461
+ 3fw3
9462
+ 4znx
9463
+ 4nyj
9464
+ 1q1g
9465
+ 2ews
9466
+ 5o5m
9467
+ 4w4v
9468
+ 2w1d
9469
+ 5d1j
9470
+ 5fbe
9471
+ 3ipq
9472
+ 2qk5
9473
+ 2ydj
9474
+ 3gv6
9475
+ 1mxo
9476
+ 5l9h
9477
+ 3odk
9478
+ 6cze
9479
+ 6c98
9480
+ 2wmx
9481
+ 3qa2
9482
+ 5oax
9483
+ 6gnm
9484
+ 3q6z
9485
+ 2oqv
9486
+ 5exn
9487
+ 3s53
9488
+ 5lzg
9489
+ 3mo5
9490
+ 6drz
9491
+ 5nn5
9492
+ 4x1q
9493
+ 2mow
9494
+ 3ow3
9495
+ 4ebw
9496
+ 4a4c
9497
+ 6ema
9498
+ 2h6q
9499
+ 2fjm
9500
+ 5uez
9501
+ 6et8
9502
+ 6ep9
9503
+ 1avd
9504
+ 2q80
9505
+ 4nj3
9506
+ 4mdr
9507
+ 5ely
9508
+ 5dri
9509
+ 1v2r
9510
+ 1hk4
9511
+ 5xfj
9512
+ 4z0q
9513
+ 4heg
9514
+ 5h3q
9515
+ 2buv
9516
+ 4l7d
9517
+ 3nb5
9518
+ 1g9b
9519
+ 3oay
9520
+ 4aj4
9521
+ 1bxr
9522
+ 2w9h
9523
+ 2iws
9524
+ 3oy1
9525
+ 3qpo
9526
+ 2hog
9527
+ 3rl8
9528
+ 1zgb
9529
+ 1a4w
9530
+ 3vvz
9531
+ 5ia4
9532
+ 6f1n
9533
+ 1x70
9534
+ 3q5u
9535
+ 4do4
9536
+ 2pcp
9537
+ 5m77
9538
+ 4a50
9539
+ 1elb
9540
+ 1rri
9541
+ 2ydm
9542
+ 3ldp
9543
+ 4k9g
9544
+ 2ptz
9545
+ 3uo6
9546
+ 2qwd
9547
+ 5fdz
9548
+ 6fiv
9549
+ 1hqh
9550
+ 5q12
9551
+ 5anv
9552
+ 5k5e
9553
+ 5aer
9554
+ 1jys
9555
+ 2zu3
9556
+ 2oiq
9557
+ 5lwn
9558
+ 1i9o
9559
+ 1npv
9560
+ 4piq
9561
+ 2w0j
9562
+ 6c5f
9563
+ 1w8m
9564
+ 5gjd
9565
+ 5yql
9566
+ 1m83
9567
+ 4djv
9568
+ 3ns9
9569
+ 3zn1
9570
+ 4eg7
9571
+ 3u4r
9572
+ 6hu0
9573
+ 4awq
9574
+ 4l6q
9575
+ 1ad8
9576
+ 3d9k
9577
+ 5ucj
9578
+ 3piz
9579
+ 3ekt
9580
+ 1g9a
9581
+ 3m2w
9582
+ 5q1c
9583
+ 1mv0
9584
+ 6emh
9585
+ 1ttv
9586
+ 5u12
9587
+ 1lhf
9588
+ 1w1p
9589
+ 2i4t
9590
+ 4z68
9591
+ 4gqr
9592
+ 3uqp
9593
+ 1rhq
9594
+ 3oxc
9595
+ 4mc6
9596
+ 3adt
9597
+ 3ivi
9598
+ 3r8u
9599
+ 1udt
9600
+ 1o36
9601
+ 4ou3
9602
+ 6apz
9603
+ 5gmp
9604
+ 2l84
9605
+ 2gg9
9606
+ 3o64
9607
+ 3uo9
9608
+ 4qht
9609
+ 6f6r
9610
+ 3hy7
9611
+ 1nox
9612
+ 5u6v
9613
+ 3ans
9614
+ 1mdl
9615
+ 3d2e
9616
+ 2fsv
9617
+ 5t2m
9618
+ 3p79
9619
+ 4je7
9620
+ 3gta
9621
+ 4q9z
9622
+ 6f55
9623
+ 1gu1
9624
+ 5y6k
9625
+ 3ert
9626
+ 3ant
9627
+ 5a3h
9628
+ 3zdv
9629
+ 3o1d
9630
+ 6ar4
9631
+ 2ym4
9632
+ 3s3v
9633
+ 3fk1
9634
+ 3pbb
9635
+ 3r6c
9636
+ 1xh8
9637
+ 5lh4
9638
+ 5q0w
9639
+ 2psu
9640
+ 5gty
9641
+ 2qi0
9642
+ 2nn7
9643
+ 5vb6
9644
+ 4b8o
9645
+ 1zp8
9646
+ 5uqv
9647
+ 4pci
9648
+ 5f5z
9649
+ 4rj4
9650
+ 6dh4
9651
+ 3lce
9652
+ 4ea2
9653
+ 4y6m
9654
+ 5d3p
9655
+ 1gsf
9656
+ 4l7l
9657
+ 1tkt
9658
+ 3a29
9659
+ 5ljj
9660
+ 4gpk
9661
+ 3ayc
9662
+ 3qkv
9663
+ 3ern
9664
+ 3zhf
9665
+ 1f4y
9666
+ 6bgg
9667
+ 5uz0
9668
+ 2znt
9669
+ 5du4
9670
+ 1utn
9671
+ 5ayy
9672
+ 1lzo
9673
+ 1gym
9674
+ 5k8s
9675
+ 3kb3
9676
+ 3ijy
9677
+ 5oa2
9678
+ 5wio
9679
+ 1yei
9680
+ 4hlh
9681
+ 2eg8
9682
+ 5tkd
9683
+ 1kpm
9684
+ 3tpr
9685
+ 3mt9
9686
+ 3tcy
9687
+ 4qxr
9688
+ 3uyt
9689
+ 3o9l
9690
+ 5ejv
9691
+ 6ekn
9692
+ 2fky
9693
+ 5vp0
9694
+ 4rxa
9695
+ 2xpa
9696
+ 1kav
9697
+ 3ecn
9698
+ 5vlh
9699
+ 4lko
9700
+ 2g83
9701
+ 4m5r
9702
+ 5iql
9703
+ 5g53
9704
+ 3fhe
9705
+ 2xj7
9706
+ 3u78
9707
+ 3zt3
9708
+ 4hkn
9709
+ 4h58
9710
+ 4f1l
9711
+ 5q0t
9712
+ 2jkh
9713
+ 5w13
9714
+ 3r88
9715
+ 1yvz
9716
+ 4qmq
9717
+ 5aag
9718
+ 1b6m
9719
+ 4i47
9720
+ 6d5e
9721
+ 6c91
9722
+ 1z95
9723
+ 4ca6
9724
+ 4ysl
9725
+ 4gbz
9726
+ 1aj7
9727
+ 1wcq
9728
+ 3m96
9729
+ 5t92
9730
+ 6gjl
9731
+ 4yxo
9732
+ 5l2m
9733
+ 4wov
9734
+ 3l2y
9735
+ 5v5y
9736
+ 3a3y
9737
+ 2gmx
9738
+ 2fo4
9739
+ 2h5j
9740
+ 2r05
9741
+ 2qtt
9742
+ 4jn4
9743
+ 5npf
9744
+ 1gt5
9745
+ 4jfd
9746
+ 2qhr
9747
+ 2sfp
9748
+ 3rx5
9749
+ 4e4l
9750
+ 4mao
9751
+ 1zkl
9752
+ 3syr
9753
+ 3jzj
9754
+ 5h9q
9755
+ 5one
9756
+ 4io6
9757
+ 3lp2
9758
+ 2i4j
9759
+ 3rlp
9760
+ 5eta
9761
+ 4yth
9762
+ 3r24
9763
+ 3gsg
9764
+ 2of4
9765
+ 5inh
9766
+ 3ed0
9767
+ 3zlr
9768
+ 5llp
9769
+ 6esj
9770
+ 5c8m
9771
+ 3dkf
9772
+ 4z6h
9773
+ 4x6o
9774
+ 2j47
9775
+ 1usk
9776
+ 5w8j
9777
+ 4x1p
9778
+ 1xb7
9779
+ 1lqe
9780
+ 4apr
9781
+ 5khm
9782
+ 2ga2
9783
+ 2ly0
9784
+ 4zro
9785
+ 4e26
9786
+ 4g16
9787
+ 4uvw
9788
+ 1f5l
9789
+ 4ipn
9790
+ 4hwt
9791
+ 4g90
9792
+ 1kds
9793
+ 4bck
9794
+ 5k6s
9795
+ 5eqp
9796
+ 1vwn
9797
+ 4uye
9798
+ 4gtq
9799
+ 6fe1
9800
+ 6gjj
9801
+ 3hb8
9802
+ 2zdm
9803
+ 5alz
9804
+ 3eyg
9805
+ 4lkk
9806
+ 4os2
9807
+ 1j4q
9808
+ 4o36
9809
+ 5dtm
9810
+ 2wgi
9811
+ 6faf
9812
+ 1mx1
9813
+ 3lfn
9814
+ 2i1r
9815
+ 3hzy
9816
+ 1a4k
9817
+ 4zl4
9818
+ 1dub
9819
+ 3qi3
9820
+ 3i90
9821
+ 3qgy
9822
+ 6gfy
9823
+ 1o2u
9824
+ 4xyn
9825
+ 3rk7
9826
+ 6atv
9827
+ 2wu7
9828
+ 4tnw
9829
+ 2uwo
9830
+ 1z6p
9831
+ 3p8o
9832
+ 3my1
9833
+ 3s3q
9834
+ 5w0l
9835
+ 4tw8
9836
+ 6b8u
9837
+ 5jg1
9838
+ 2azm
9839
+ 4qw3
9840
+ 5ntk
9841
+ 5jq8
9842
+ 3gcq
9843
+ 2zcs
9844
+ 2pvw
9845
+ 1o0n
9846
+ 1ph0
9847
+ 3nu5
9848
+ 5loh
9849
+ 1t08
9850
+ 4m7b
9851
+ 1qpl
9852
+ 4ge5
9853
+ 4w9s
9854
+ 5fat
9855
+ 5l2w
9856
+ 5nx2
9857
+ 2gst
9858
+ 2pix
9859
+ 5myn
9860
+ 3ump
9861
+ 2xoi
9862
+ 4r7m
9863
+ 5ful
9864
+ 1kf6
9865
+ 2on3
9866
+ 5wa9
9867
+ 5tw3
9868
+ 1q4k
9869
+ 6fu4
9870
+ 5umx
9871
+ 2bdy
9872
+ 4ezy
9873
+ 4crj
9874
+ 4pyx
9875
+ 5t97
9876
+ 2cet
9877
+ 1fao
9878
+ 1n46
9879
+ 2ooh
9880
+ 3dgo
9881
+ 4io4
9882
+ 1xjd
9883
+ 1q72
9884
+ 3mag
9885
+ 6euv
9886
+ 4g5f
9887
+ 4dgm
9888
+ 5o1c
9889
+ 3ken
9890
+ 4d2r
9891
+ 5fnq
9892
+ 4z7f
9893
+ 2o3z
9894
+ 4xgz
9895
+ 4i2z
9896
+ 6g15
9897
+ 5ypp
9898
+ 2x09
9899
+ 5lt6
9900
+ 3k5g
9901
+ 2xej
9902
+ 1uyh
9903
+ 4tln
9904
+ 4mrw
9905
+ 3c10
9906
+ 3sdi
9907
+ 4tw6
9908
+ 3pa4
9909
+ 3q4k
9910
+ 5l3a
9911
+ 3b9g
9912
+ 4ohp
9913
+ 3nq3
9914
+ 5vfj
9915
+ 4rqi
9916
+ 4x7k
9917
+ 2q8s
9918
+ 1ca8
9919
+ 5a3x
9920
+ 4wf6
9921
+ 5oh4
9922
+ 2qu3
9923
+ 5dq8
9924
+ 6ffs
9925
+ 4loq
9926
+ 4prb
9927
+ 5vwk
9928
+ 2yhw
9929
+ 5eob
9930
+ 5dgj
9931
+ 2i4d
9932
+ 5wlo
9933
+ 5lo6
9934
+ 5aln
9935
+ 3p5l
9936
+ 4q1y
9937
+ 6c4u
9938
+ 1gi7
9939
+ 2jkt
9940
+ 4yv1
9941
+ 3ctj
9942
+ 4qfr
9943
+ 4wmy
9944
+ 5c84
9945
+ 3ie3
9946
+ 5mnn
9947
+ 3l8v
9948
+ 6eij
9949
+ 1o2n
9950
+ 4nmv
9951
+ 5ok6
9952
+ 4yab
9953
+ 2vnf
9954
+ 3r0w
9955
+ 2uyq
9956
+ 5j19
9957
+ 1y2c
9958
+ 3nth
9959
+ 5ni0
9960
+ 4tpk
9961
+ 1vyq
9962
+ 4txs
9963
+ 2wgs
9964
+ 5gv2
9965
+ 1b3g
9966
+ 3kad
9967
+ 6hgz
9968
+ 3dng
9969
+ 4u7q
9970
+ 5fnd
9971
+ 2c5x
9972
+ 5ho6
9973
+ 2aqu
9974
+ 1ke6
9975
+ 5g5f
9976
+ 3wff
9977
+ 3m36
9978
+ 4bb4
9979
+ 3ai8
9980
+ 3ee2
9981
+ 5u14
9982
+ 4xe1
9983
+ 4ggz
9984
+ 4av5
9985
+ 3bgc
9986
+ 5i4v
9987
+ 3g0d
9988
+ 4bkj
9989
+ 2aei
9990
+ 4hup
9991
+ 4yv9
9992
+ 4uwf
9993
+ 2ovy
9994
+ 2oo8
9995
+ 4dlj
9996
+ 2v83
9997
+ 1ct8
9998
+ 5qan
9999
+ 4nwc
10000
+ 4msc
10001
+ 4iti
10002
+ 3guz
10003
+ 6i8l
10004
+ 2yof
10005
+ 1oyn
10006
+ 3qti
10007
+ 5nf6
10008
+ 5n7x
10009
+ 3t0t
10010
+ 3oyw
10011
+ 4ab9
10012
+ 3aw0
10013
+ 2x2l
10014
+ 2lkk
10015
+ 3oyq
10016
+ 6c28
10017
+ 2ofv
10018
+ 1b58
10019
+ 4cpt
10020
+ 4em7
10021
+ 1t5a
10022
+ 4xyf
10023
+ 3duy
10024
+ 4xh6
10025
+ 1j15
10026
+ 3b7j
10027
+ 2isw
10028
+ 3eqs
10029
+ 3tkz
10030
+ 3dgl
10031
+ 3qw5
10032
+ 6df4
10033
+ 2jjb
10034
+ 5auy
10035
+ 2rcu
10036
+ 5mqy
10037
+ 2aoe
10038
+ 1d8f
10039
+ 1kl5
10040
+ 4zsg
10041
+ 3e0p
10042
+ 3su6
10043
+ 5tz3
10044
+ 4hco
10045
+ 3i6c
10046
+ 2ow1
10047
+ 2yjw
10048
+ 4nue
10049
+ 1uyd
10050
+ 4hod
10051
+ 3oik
10052
+ 2viv
10053
+ 4wtu
10054
+ 4gih
10055
+ 1b9s
10056
+ 5i40
10057
+ 4zb6
10058
+ 5fns
10059
+ 4x6h
10060
+ 5hg9
10061
+ 5naw
10062
+ 1h37
10063
+ 1uj0
10064
+ 3e9i
10065
+ 3i5z
10066
+ 1v2v
10067
+ 5wb3
10068
+ 3lpt
10069
+ 3k4d
10070
+ 3nhi
10071
+ 1cbr
10072
+ 5uis
10073
+ 3f68
10074
+ 5nk3
10075
+ 3okv
10076
+ 3krr
10077
+ 4gnf
10078
+ 3zyf
10079
+ 3ivg
10080
+ 5d3n
10081
+ 4rs0
10082
+ 5oxn
10083
+ 1owd
10084
+ 1w7h
10085
+ 5o4s
10086
+ 3hrf
10087
+ 2r3h
10088
+ 1yp9
10089
+ 5oyd
10090
+ 4iif
10091
+ 4uvb
10092
+ 1b9j
10093
+ 2bgd
10094
+ 6giu
10095
+ 5eek
10096
+ 3tnh
10097
+ 3gp0
10098
+ 3bxs
10099
+ 2ay1
10100
+ 5cap
10101
+ 3ta0
10102
+ 3mkf
10103
+ 4uyf
10104
+ 4zx6
10105
+ 5a3n
10106
+ 3hek
10107
+ 6b5r
10108
+ 1fbm
10109
+ 5wou
10110
+ 3g76
10111
+ 2rkf
10112
+ 5j86
10113
+ 5lav
10114
+ 2rkn
10115
+ 3jzo
10116
+ 3kr5
10117
+ 2vo4
10118
+ 4lzr
10119
+ 2srt
10120
+ 4wbg
10121
+ 1vyz
10122
+ 3ebi
10123
+ 2qbx
10124
+ 3gbb
10125
+ 3vnt
10126
+ 3erd
10127
+ 1o34
10128
+ 1lbk
10129
+ 6czi
10130
+ 4d1c
10131
+ 1k1p
10132
+ 2bmv
10133
+ 1jlr
10134
+ 5hx6
10135
+ 2igw
10136
+ 4o0r
10137
+ 2isv
10138
+ 3kr8
10139
+ 4k3m
10140
+ 5xiw
10141
+ 2z5s
10142
+ 2w97
10143
+ 4u03
10144
+ 4p4d
10145
+ 1pwp
10146
+ 5j27
10147
+ 3w5e
10148
+ 5a3s
10149
+ 1zz3
10150
+ 4y5i
10151
+ 4rx9
10152
+ 4ty7
10153
+ 6g4z
10154
+ 4j3e
10155
+ 1s3k
10156
+ 6ajh
10157
+ 2wei
10158
+ 5v5o
10159
+ 4p45
10160
+ 4nbl
10161
+ 4rxd
10162
+ 1cp6
10163
+ 1r5h
10164
+ 4nnw
10165
+ 4cga
10166
+ 3kb7
10167
+ 4phw
10168
+ 3zpq
10169
+ 4b3c
10170
+ 1wv7
10171
+ 5hja
10172
+ 1mfi
10173
+ 3jzk
10174
+ 1hsh
10175
+ 6aro
10176
+ 3vyf
10177
+ 2v7d
10178
+ 4k66
10179
+ 5eeq
10180
+ 5mos
10181
+ 3ska
10182
+ 4bg1
10183
+ 4ql8
10184
+ 4lgg
10185
+ 4anw
10186
+ 6gr7
10187
+ 2x7s
10188
+ 4ie4
10189
+ 1p05
10190
+ 4zbi
10191
+ 5c6v
10192
+ 3qzv
10193
+ 5etq
10194
+ 1nhw
10195
+ 2xx4
10196
+ 1uys
10197
+ 3zsq
10198
+ 4h75
10199
+ 4d0w
10200
+ 4pml
10201
+ 3gyn
10202
+ 2fm5
10203
+ 4r93
10204
+ 5usq
10205
+ 4e3b
10206
+ 2hwi
10207
+ 3kl8
10208
+ 2vr4
10209
+ 3o0e
10210
+ 3dab
10211
+ 5h8g
10212
+ 4ynb
10213
+ 1kr3
10214
+ 4w9d
10215
+ 4msl
10216
+ 5k8v
10217
+ 5ix1
10218
+ 5zaf
10219
+ 6cve
10220
+ 4h85
10221
+ 3d5m
10222
+ 3v78
10223
+ 5uox
10224
+ 5d1u
10225
+ 4fcm
10226
+ 2zz1
10227
+ 4i1r
10228
+ 5hkm
10229
+ 4y8c
10230
+ 1fkn
10231
+ 5he2
10232
+ 3n3g
10233
+ 3g5v
10234
+ 2evl
10235
+ 3k3j
10236
+ 1utr
10237
+ 3ixg
10238
+ 4u44
10239
+ 4ahs
10240
+ 1o2y
10241
+ 3fcf
10242
+ 5dde
10243
+ 3bgm
10244
+ 2nxd
10245
+ 4yhq
10246
+ 4k19
10247
+ 5y7w
10248
+ 3v3m
10249
+ 4ph4
10250
+ 1nja
10251
+ 5i23
10252
+ 3rsv
10253
+ 5n17
10254
+ 4bt5
10255
+ 2ojg
10256
+ 5x72
10257
+ 6eq7
10258
+ 3ehx
10259
+ 1a5v
10260
+ 6clv
10261
+ 3qcs
10262
+ 5cy9
10263
+ 3bgp
10264
+ 3nf7
10265
+ 4dmn
10266
+ 2dxs
10267
+ 4msk
10268
+ 2wmu
10269
+ 4hws
10270
+ 5c2a
10271
+ 3lco
10272
+ 4x3u
10273
+ 5c7a
10274
+ 1pa9
10275
+ 5he7
10276
+ 3m67
10277
+ 1j7z
10278
+ 3s5y
10279
+ 3oe0
10280
+ 4kql
10281
+ 5l8y
10282
+ 5cks
10283
+ 4cc5
10284
+ 2rcb
10285
+ 3bi1
10286
+ 3jzc
10287
+ 2qzx
10288
+ 4u73
10289
+ 2an5
10290
+ 2uvm
10291
+ 6g2n
10292
+ 1dzj
10293
+ 6b97
10294
+ 5y6d
10295
+ 1qvu
10296
+ 4ymx
10297
+ 1yej
10298
+ 6f3d
10299
+ 5tg7
10300
+ 2w1i
10301
+ 3dv5
10302
+ 1vyw
10303
+ 3sw9
10304
+ 4qz3
10305
+ 8a3h
10306
+ 3o3j
10307
+ 5gn9
10308
+ 4b7j
10309
+ 2i4u
10310
+ 3m8q
10311
+ 2fu8
10312
+ 4hfp
10313
+ 1swi
10314
+ 2vd0
10315
+ 3pz1
10316
+ 2jo9
10317
+ 3q4c
10318
+ 4bzo
10319
+ 4tyl
10320
+ 6fuj
10321
+ 2x7c
10322
+ 3hg1
10323
+ 3pty
10324
+ 5f3i
10325
+ 4q9o
10326
+ 3e1r
10327
+ 4j78
10328
+ 4ucv
10329
+ 6h38
10330
+ 3wtk
10331
+ 4lno
10332
+ 2h65
10333
+ 3s4q
10334
+ 3lp0
10335
+ 4ajo
10336
+ 5maj
10337
+ 3d7d
10338
+ 4xe0
10339
+ 6hpw
10340
+ 4p0b
10341
+ 5fsb
10342
+ 4f6u
10343
+ 5h0g
10344
+ 2zdk
10345
+ 6hoy
10346
+ 6bib
10347
+ 3rpv
10348
+ 1kel
10349
+ 4v24
10350
+ 5k1i
10351
+ 4j4v
10352
+ 3kai
10353
+ 3c9e
10354
+ 2v59
10355
+ 5cs5
10356
+ 5jqb
10357
+ 6as8
10358
+ 5dw2
10359
+ 1mq1
10360
+ 4gw1
10361
+ 4gj9
10362
+ 6e2n
10363
+ 4qlu
10364
+ 5jog
10365
+ 4iu1
10366
+ 3tyq
10367
+ 3r2b
10368
+ 5alr
10369
+ 5c4s
10370
+ 3pa8
10371
+ 2qq7
10372
+ 4qn9
10373
+ 2uwd
10374
+ 1yye
10375
+ 1oir
10376
+ 1gpy
10377
+ 6ert
10378
+ 3d7z
10379
+ 6dq8
10380
+ 4lsj
10381
+ 2wxm
10382
+ 3pj2
10383
+ 3nwb
10384
+ 3qxc
10385
+ 4hwo
10386
+ 3afk
10387
+ 2rk8
10388
+ 6acb
10389
+ 6g85
10390
+ 1kna
10391
+ 4pvx
10392
+ 5hex
10393
+ 3jqa
10394
+ 5xms
10395
+ 4an2
10396
+ 3lcu
10397
+ 3rdh
10398
+ 2r3y
10399
+ 4w7t
10400
+ 6few
10401
+ 4cd4
10402
+ 2gg2
10403
+ 4ga3
10404
+ 6anl
10405
+ 1yyy
10406
+ 3r16
10407
+ 4al4
10408
+ 4uru
10409
+ 2ovv
10410
+ 4ya8
10411
+ 4re3
10412
+ 3bgq
10413
+ 5lpr
10414
+ 5fap
10415
+ 5cei
10416
+ 3v5q
10417
+ 1maw
10418
+ 4p4f
10419
+ 4jx9
10420
+ 4xqa
10421
+ 3ghe
10422
+ 3fci
10423
+ 1zfp
10424
+ 2l12
10425
+ 4lil
10426
+ 6eq2
10427
+ 5vdv
10428
+ 6b0v
10429
+ 4l0i
10430
+ 4zsl
10431
+ 5yu9
10432
+ 1p04
10433
+ 1rnt
10434
+ 2chm
10435
+ 1nhx
10436
+ 3ckb
10437
+ 2vot
10438
+ 5n1p
10439
+ 2jke
10440
+ 5ztn
10441
+ 4j82
10442
+ 3hxb
10443
+ 3qvu
10444
+ 1hk5
10445
+ 4pyo
10446
+ 3atl
10447
+ 3jzi
10448
+ 4ad3
10449
+ 4b6r
10450
+ 3g8o
10451
+ 4kc2
10452
+ 7upj
10453
+ 5eyz
10454
+ 6do5
10455
+ 5wfj
10456
+ 2j4a
10457
+ 3kmx
10458
+ 5oy3
10459
+ 4o55
10460
+ 2qi5
10461
+ 4jfs
10462
+ 4h3g
10463
+ 2y68
10464
+ 3fv2
10465
+ 3ioc
10466
+ 5jyp
10467
+ 2gde
10468
+ 4mm8
10469
+ 4jjm
10470
+ 5ccl
10471
+ 5umy
10472
+ 4n7e
10473
+ 4f64
10474
+ 3p9h
10475
+ 4hz5
10476
+ 4kin
10477
+ 6cqf
10478
+ 3feg
10479
+ 3aaq
10480
+ 3sl1
10481
+ 1uxb
10482
+ 4oee
10483
+ 4i7d
10484
+ 5ll5
10485
+ 4qlk
10486
+ 3s9y
10487
+ 3t3g
10488
+ 2p3b
10489
+ 2jbj
10490
+ 2x8i
10491
+ 4ui3
10492
+ 3po1
10493
+ 5ji6
10494
+ 3u90
10495
+ 4lts
10496
+ 4xrq
10497
+ 4jzr
10498
+ 5nkb
10499
+ 1km3
10500
+ 2qnx
10501
+ 3myg
10502
+ 2va7
10503
+ 4ymh
10504
+ 4nms
10505
+ 5ar7
10506
+ 2h3e
10507
+ 4lp0
10508
+ 2ay9
10509
+ 5trs
10510
+ 4fse
10511
+ 6cdg
10512
+ 6ful
10513
+ 4x8n
10514
+ 4w4x
10515
+ 5m29
10516
+ 4ocx
10517
+ 3sov
10518
+ 4fxp
10519
+ 3ex2
10520
+ 5uwl
10521
+ 4wpn
10522
+ 3bi0
10523
+ 5nwh
10524
+ 1vik
10525
+ 4uj9
10526
+ 5tg4
10527
+ 5hrx
10528
+ 3wz6
10529
+ 1mm7
10530
+ 5v37
10531
+ 5eol
10532
+ 4egi
10533
+ 5n21
10534
+ 3vhe
10535
+ 3cj3
10536
+ 4w9q
10537
+ 2qtg
10538
+ 4z93
10539
+ 3jqb
10540
+ 5f3c
10541
+ 5ll9
10542
+ 4zts
10543
+ 5aei
10544
+ 1o9d
10545
+ 2iw9
10546
+ 5b5g
10547
+ 1g54
10548
+ 3v5p
10549
+ 2yk9
10550
+ 1ez9
10551
+ 4c6x
10552
+ 2g24
10553
+ 1m9n
10554
+ 5f60
10555
+ 1vzq
10556
+ 1rt9
10557
+ 5jq5
10558
+ 3ha6
10559
+ 2yj9
10560
+ 4nra
10561
+ 2h9t
10562
+ 6afi
10563
+ 3el8
10564
+ 4kni
10565
+ 5v79
10566
+ 4fxf
10567
+ 3ncg
10568
+ 4x3s
10569
+ 1dif
10570
+ 4aa7
10571
+ 4d1b
10572
+ 5av0
10573
+ 2clm
10574
+ 4l10
10575
+ 1exv
10576
+ 2ltv
10577
+ 6g1v
10578
+ 3rwh
10579
+ 5yfs
10580
+ 5xkm
10581
+ 5mjb
10582
+ 2i03
10583
+ 2v0z
10584
+ 3udm
10585
+ 3i3b
10586
+ 1h1b
10587
+ 6esa
10588
+ 4ai8
10589
+ 5ak3
10590
+ 5j20
10591
+ 5jjm
10592
+ 3wto
10593
+ 1std
10594
+ 1jjt
10595
+ 4kxb
10596
+ 1jdj
10597
+ 2chw
10598
+ 3ll8
10599
+ 5j79
10600
+ 5adq
10601
+ 4r74
10602
+ 5n53
10603
+ 3ljg
10604
+ 2lsr
10605
+ 3rk9
10606
+ 1add
10607
+ 5ja0
10608
+ 5x02
10609
+ 5cc2
10610
+ 3u9c
10611
+ 5d12
10612
+ 1o6q
10613
+ 1xn0
10614
+ 5f20
10615
+ 4lgh
10616
+ 5etr
10617
+ 3mj5
10618
+ 4km2
10619
+ 4f9u
10620
+ 6el5
10621
+ 1hmr
10622
+ 5n9k
10623
+ 3are
10624
+ 1lv8
10625
+ 5n99
10626
+ 4gly
10627
+ 1sh9
10628
+ 4uns
10629
+ 4cpu
10630
+ 2or4
10631
+ 2rkd
10632
+ 4psq
10633
+ 5wcm
10634
+ 5vsc
10635
+ 4nl1
10636
+ 4um9
10637
+ 4wa9
10638
+ 3fxv
10639
+ 2obj
10640
+ 2ohm
10641
+ 4zw3
10642
+ 5jao
10643
+ 6htz
10644
+ 3n2p
10645
+ 5dda
10646
+ 2vwy
10647
+ 4ge2
10648
+ 5d7e
10649
+ 4wx4
10650
+ 3sgv
10651
+ 5w12
10652
+ 4gmc
10653
+ 4dij
10654
+ 1r5g
10655
+ 4r3w
10656
+ 5hn8
10657
+ 4fk6
10658
+ 5ggl
10659
+ 2ks9
10660
+ 1tq4
10661
+ 2qu2
10662
+ 4oyk
10663
+ 1my8
10664
+ 6mv3
10665
+ 3kqs
10666
+ 3gz9
10667
+ 3eta
10668
+ 1uv5
10669
+ 2bok
10670
+ 1b42
10671
+ 5mlw
10672
+ 5d29
10673
+ 4qr5
10674
+ 1r6n
10675
+ 6f6u
10676
+ 4pio
10677
+ 6cee
10678
+ 3mp1
10679
+ 4pz8
10680
+ 6fgy
10681
+ 5moe
10682
+ 5yfz
10683
+ 5zwh
10684
+ 4kb9
10685
+ 2wmw
10686
+ 1nax
10687
+ 1o4n
10688
+ 2znu
10689
+ 5gvp
10690
+ 3ma3
10691
+ 3k15
10692
+ 5qae
10693
+ 1v48
10694
+ 2psx
10695
+ 4d8e
10696
+ 1m5d
10697
+ 2v2c
10698
+ 2ql5
10699
+ 3h0y
10700
+ 3wpn
10701
+ 2qa8
10702
+ 1rs4
10703
+ 5tgy
10704
+ 2b7d
10705
+ 4pl6
10706
+ 2w8j
10707
+ 3tdz
10708
+ 2wxj
10709
+ 5g2b
10710
+ 4nrk
10711
+ 3gwv
10712
+ 1kne
10713
+ 3kqe
10714
+ 5khg
10715
+ 4uiu
10716
+ 4kyh
10717
+ 4e93
10718
+ 2qg2
10719
+ 1a4m
10720
+ 4byi
10721
+ 5ngz
10722
+ 4c1c
10723
+ 6fam
10724
+ 5lgq
10725
+ 2hb3
10726
+ 4c1h
10727
+ 3ms9
10728
+ 2pnx
10729
+ 4qmw
10730
+ 1lzq
10731
+ 3aid
10732
+ 3psb
10733
+ 5w6r
10734
+ 4c4e
10735
+ 4jxw
10736
+ 5ngr
10737
+ 1utz
10738
+ 3r7o
10739
+ 3umq
10740
+ 3hw1
10741
+ 1tkc
10742
+ 1abt
10743
+ 3mbp
10744
+ 3i9g
10745
+ 1rwq
10746
+ 4iz0
10747
+ 6dh2
10748
+ 4rx7
10749
+ 4kwf
10750
+ 2cn8
10751
+ 3bar
10752
+ 1cny
10753
+ 1npz
10754
+ 2bys
10755
+ 4y6p
10756
+ 4y6s
10757
+ 5q0s
10758
+ 2uxx
10759
+ 6eaa
10760
+ 5cs2
10761
+ 4c1e
10762
+ 4mgv
10763
+ 2wxf
10764
+ 5q1e
10765
+ 5o83
10766
+ 3r4o
10767
+ 6fyk
10768
+ 1x8s
10769
+ 5b25
10770
+ 4f39
10771
+ 4tk5
10772
+ 5nhp
10773
+ 4lov
10774
+ 1kui
10775
+ 3art
10776
+ 4d2v
10777
+ 1bnv
10778
+ 4kov
10779
+ 6cyc
10780
+ 4fkk
10781
+ 6hvj
10782
+ 2j4z
10783
+ 2p3c
10784
+ 1f0s
10785
+ 3dy6
10786
+ 5lz7
10787
+ 3mww
10788
+ 1hvk
10789
+ 1x76
10790
+ 2aez
10791
+ 1bio
10792
+ 1lhe
10793
+ 3k48
10794
+ 2krd
10795
+ 4bbg
10796
+ 5otc
10797
+ 3kqp
10798
+ 4esi
10799
+ 2rku
10800
+ 2kpl
10801
+ 1erb
10802
+ 2xx5
10803
+ 2fl2
10804
+ 4a4v
10805
+ 1lqd
10806
+ 1lfo
10807
+ 3hf6
10808
+ 3bm8
10809
+ 1a99
10810
+ 3zly
10811
+ 6cwn
10812
+ 3d8y
10813
+ 5icz
10814
+ 1hgi
10815
+ 4qgf
10816
+ 6fug
10817
+ 3tn8
10818
+ 6f3g
10819
+ 5kq5
10820
+ 3tz2
10821
+ 1xws
10822
+ 5uov
10823
+ 2o9j
10824
+ 3coh
10825
+ 3h98
10826
+ 1mes
10827
+ 3ot3
10828
+ 6f1x
10829
+ 5gs9
10830
+ 2hha
10831
+ 1pfy
10832
+ 4j3j
10833
+ 4rn1
10834
+ 4ze6
10835
+ 3zpt
10836
+ 4f9y
10837
+ 4lbu
10838
+ 2nno
10839
+ 5v42
10840
+ 2fyv
10841
+ 4j8m
10842
+ 6hjk
10843
+ 4ha5
10844
+ 3tu7
10845
+ 1izh
10846
+ 3o9d
10847
+ 6g4m
10848
+ 2r2b
10849
+ 2xc0
10850
+ 1g30
10851
+ 3anq
10852
+ 5uv5
10853
+ 4ty8
10854
+ 2vew
10855
+ 3br9
10856
+ 1df8
10857
+ 3fgc
10858
+ 1utt
10859
+ 6cd8
10860
+ 6f4w
10861
+ 3l8x
10862
+ 4fe6
10863
+ 5adt
10864
+ 5nzq
10865
+ 6htt
10866
+ 5k0t
10867
+ 3pn1
10868
+ 4ryc
10869
+ 4nga
10870
+ 4crc
10871
+ 4xcu
10872
+ 3f3c
10873
+ 3obx
10874
+ 6agg
10875
+ 6cq5
10876
+ 3w2r
10877
+ 3dsu
10878
+ 3zs0
10879
+ 4wkv
10880
+ 3w07
10881
+ 1ntk
10882
+ 4dk8
10883
+ 3kgq
10884
+ 4anv
10885
+ 4ery
10886
+ 4jzd
10887
+ 3tjh
10888
+ 4jwk
10889
+ 4hyi
10890
+ 4kne
10891
+ 3u9q
10892
+ 5ih2
10893
+ 2h02
10894
+ 4c6u
10895
+ 1i8z
10896
+ 4bid
10897
+ 3lmp
10898
+ 5ohj
10899
+ 4j81
10900
+ 5dey
10901
+ 5k8n
10902
+ 1pz5
10903
+ 1shd
10904
+ 3hhm
10905
+ 5n0f
10906
+ 5kh3
10907
+ 3nba
10908
+ 4u5s
10909
+ 2ndo
10910
+ 3shc
10911
+ 3vid
10912
+ 1ik4
10913
+ 1al7
10914
+ 1fq7
10915
+ 2x4r
10916
+ 4kz3
10917
+ 6cen
10918
+ 3s8l
10919
+ 2aoc
10920
+ 2xh5
10921
+ 2p93
10922
+ 4qfg
10923
+ 5huy
10924
+ 3ebo
10925
+ 1agm
10926
+ 1fv9
10927
+ 4jnj
10928
+ 4c72
10929
+ 1vfn
10930
+ 4xg3
10931
+ 3hl5
10932
+ 2xah
10933
+ 5n58
10934
+ 3mof
10935
+ 3r22
10936
+ 5o45
10937
+ 1utm
10938
+ 3p8e
10939
+ 1kl3
10940
+ 5o5h
10941
+ 5lpk
10942
+ 2zdx
10943
+ 6q74
10944
+ 3miy
10945
+ 5bpe
10946
+ 2vta
10947
+ 1bnq
10948
+ 6f90
10949
+ 4myh
10950
+ 5llm
10951
+ 4poj
10952
+ 1q6s
10953
+ 1jws
10954
+ 5xmp
10955
+ 4ahv
10956
+ 4e4n
10957
+ 4qgi
10958
+ 4o75
10959
+ 3ogp
10960
+ 3dk1
10961
+ 4bdf
10962
+ 2ybs
10963
+ 4ght
10964
+ 1e5j
10965
+ 1veb
10966
+ 4wrq
10967
+ 5cqu
10968
+ 1ow7
10969
+ 5nsp
10970
+ 2wtx
10971
+ 2rqu
10972
+ 5kpl
10973
+ 3g19
10974
+ 3gy7
10975
+ 4bhi
10976
+ 3rjm
10977
+ 3f3v
10978
+ 4h42
10979
+ 4y16
10980
+ 1o3g
10981
+ 1utc
10982
+ 3wzu
10983
+ 6b5q
10984
+ 1u2r
10985
+ 3sx9
10986
+ 1nju
10987
+ 3drp
10988
+ 6azk
10989
+ 3r9d
10990
+ 4awi
10991
+ 1ce5
10992
+ 5vd2
10993
+ 5kzq
10994
+ 2bal
10995
+ 1y2f
10996
+ 2xxt
10997
+ 5hlp
10998
+ 2y8c
10999
+ 5auu
11000
+ 1bwa
11001
+ 6gbe
11002
+ 2zv2
11003
+ 3f7b
11004
+ 1e4h
11005
+ 6arn
11006
+ 3ryw
11007
+ 6fr0
11008
+ 5wbz
11009
+ 1c8v
11010
+ 5kau
11011
+ 1v41
11012
+ 4oew
11013
+ 5fbo
11014
+ 3l9h
11015
+ 5hi7
11016
+ 3igg
11017
+ 4hy0
11018
+ 5enf
11019
+ 1cwc
11020
+ 5yjo
11021
+ 2e92
11022
+ 186l
11023
+ 6epy
11024
+ 3qk0
11025
+ 6cq4
11026
+ 2jkq
11027
+ 3gb2
11028
+ 5xmt
11029
+ 2ybu
11030
+ 3arz
11031
+ 2o5k
11032
+ 5c13
11033
+ 2amv
11034
+ 4std
11035
+ 4gfm
11036
+ 2fm2
11037
+ 4pv0
11038
+ 3qx9
11039
+ 1hnn
11040
+ 3bl2
11041
+ 5dhp
11042
+ 6c7i
11043
+ 5pzn
11044
+ 5ur9
11045
+ 1pye
11046
+ 3upf
11047
+ 3ooz
11048
+ 3zcl
11049
+ 2vkm
11050
+ 2jdl
11051
+ 2uyi
11052
+ 1usi
11053
+ 1ezq
11054
+ 4qz0
11055
+ 3g5d
11056
+ 3ry8
11057
+ 2vnt
11058
+ 5ehy
11059
+ 1mqj
11060
+ 5gs4
11061
+ 5xg4
11062
+ 3h59
11063
+ 1bv7
11064
+ 1br6
11065
+ 1mns
11066
+ 2ybk
11067
+ 1bcj
11068
+ 3ktr
11069
+ 1e02
11070
+ 1ykr
11071
+ 5vja
11072
+ 3r5j
11073
+ 5edu
11074
+ 3kga
11075
+ 6b5t
11076
+ 5f74
11077
+ 5qaq
11078
+ 1jpl
11079
+ 5hmi
11080
+ 3mjl
11081
+ 1eei
11082
+ 4n8e
11083
+ 1hrn
11084
+ 2o4s
11085
+ 1agw
11086
+ 4yz5
11087
+ 4wn1
11088
+ 3tc5
11089
+ 2pjb
11090
+ 6ea1
11091
+ 5epy
11092
+ 3vc4
11093
+ 2i0g
11094
+ 3ah8
11095
+ 1ogg
11096
+ 1onh
11097
+ 1tmb
11098
+ 6bau
11099
+ 4ad6
11100
+ 1a3e
11101
+ 4drn
11102
+ 1aw1
11103
+ 5fl1
11104
+ 2h2j
11105
+ 4prg
11106
+ 4ibe
11107
+ 2yem
11108
+ 3fql
11109
+ 5zwf
11110
+ 1x0n
11111
+ 3pi5
11112
+ 6ehh
11113
+ 1cps
11114
+ 5akl
11115
+ 3g2t
11116
+ 3vsx
11117
+ 4dhl
11118
+ 1hsg
11119
+ 3vhv
11120
+ 3ro4
11121
+ 5myl
11122
+ 4lyw
11123
+ 3sl5
11124
+ 3e92
11125
+ 4o91
11126
+ 3ipa
11127
+ 1imx
11128
+ 1u71
11129
+ 5dx3
11130
+ 4hvi
11131
+ 1h5v
11132
+ 3p7b
11133
+ 5v8o
11134
+ 2rg5
11135
+ 5nx9
11136
+ 4i8x
11137
+ 4rcf
11138
+ 4yrd
11139
+ 4bcn
11140
+ 4d09
11141
+ 5f8y
11142
+ 5mwp
11143
+ 3n76
11144
+ 3gba
11145
+ 4lyn
11146
+ 1unl
11147
+ 3e6v
11148
+ 4dce
11149
+ 3rsr
11150
+ 1nlp
11151
+ 5dxg
11152
+ 3rcd
11153
+ 2cji
11154
+ 5zwi
11155
+ 3o9a
11156
+ 2x2k
11157
+ 4n5d
11158
+ 5l4f
11159
+ 5gx7
11160
+ 5yqo
11161
+ 4gzt
11162
+ 3tki
11163
+ 5dd0
11164
+ 5ivt
11165
+ 5mnr
11166
+ 3asl
11167
+ 2oc0
11168
+ 3l3n
11169
+ 5izf
11170
+ 1i6v
11171
+ 2y56
11172
+ 4x0f
11173
+ 2h4k
11174
+ 3ivh
11175
+ 4yy6
11176
+ 1drj
11177
+ 3hku
11178
+ 3kfn
11179
+ 6gmd
11180
+ 2c3k
11181
+ 1w5v
11182
+ 5oha
11183
+ 4myq
11184
+ 3new
11185
+ 1o5c
11186
+ 4k5y
11187
+ 5kbi
11188
+ 5of0
11189
+ 5dhs
11190
+ 1y19
11191
+ 3hky
11192
+ 5nka
11193
+ 1wax
11194
+ 6h9b
11195
+ 3i7i
11196
+ 4b6c
11197
+ 1h3h
11198
+ 5xw6
11199
+ 4jql
11200
+ 4cra
11201
+ 3d4l
11202
+ 2nwn
11203
+ 2zyb
11204
+ 3rm9
11205
+ 3q6s
11206
+ 3g4i
11207
+ 3zqt
11208
+ 4bbe
11209
+ 3olf
11210
+ 6gxa
11211
+ 5h4j
11212
+ 4ljh
11213
+ 4a4f
11214
+ 4nw5
11215
+ 2vi5
11216
+ 3eqy
11217
+ 4tpp
11218
+ 1pi5
11219
+ 4bt3
11220
+ 3kiv
11221
+ 1pb8
11222
+ 4xyc
11223
+ 1zd2
11224
+ 2vur
11225
+ 2k2r
11226
+ 5j1x
11227
+ 3pfp
11228
+ 6g37
11229
+ 3nsq
11230
+ 6eq8
11231
+ 2wi6
11232
+ 1bxl
11233
+ 5eci
11234
+ 5jrq
11235
+ 5ehr
11236
+ 4qz1
11237
+ 1nmk
11238
+ 5zkb
11239
+ 4f65
11240
+ 5a7b
11241
+ 5sym
11242
+ 3wti
11243
+ 1j1a
11244
+ 4nwk
11245
+ 5l8t
11246
+ 5k0f
11247
+ 1mqh
11248
+ 1h3b
11249
+ 1wxz
11250
+ 3tt4
11251
+ 5k0x
11252
+ 2ez7
11253
+ 5wbp
11254
+ 4ddl
11255
+ 2ygu
11256
+ 3e7a
11257
+ 1aqj
11258
+ 5m7s
11259
+ 2odd
11260
+ 4eon
11261
+ 3fq7
11262
+ 3vf9
11263
+ 1pxm
11264
+ 4umt
11265
+ 4rlt
11266
+ 4f8j
11267
+ 5dp9
11268
+ 1ung
11269
+ 4r5w
11270
+ 5ais
11271
+ 4btu
11272
+ 4hyb
11273
+ 3nyx
11274
+ 3nif
11275
+ 5hmk
11276
+ 2qyk
11277
+ 3tz4
11278
+ 3qxm
11279
+ 6isd
11280
+ 6g97
11281
+ 5ccm
11282
+ 5u0z
11283
+ 2wbd
11284
+ 3aav
11285
+ 5vzu
11286
+ 2hai
11287
+ 6evn
11288
+ 2yga
11289
+ 1p17
11290
+ 5fus
11291
+ 5elw
11292
+ 4azt
11293
+ 6ea2
11294
+ 1xbo
11295
+ 2yhy
11296
+ 4u0m
11297
+ 6b3e
11298
+ 5hff
11299
+ 3iu8
11300
+ 1dqx
11301
+ 4o0v
11302
+ 5t1u
11303
+ 4gs6
11304
+ 2xei
11305
+ 6gn1
11306
+ 2pze
11307
+ 2j87
11308
+ 4hy1
11309
+ 3udn
11310
+ 5nmg
11311
+ 2pgz
11312
+ 1wdq
11313
+ 4er1
11314
+ 2l6j
11315
+ 4cmo
11316
+ 4gbd
11317
+ 3itu
11318
+ 5a7j
11319
+ 3su4
11320
+ 3r6g
11321
+ 5ea7
11322
+ 6eiq
11323
+ 5qb1
11324
+ 4o61
11325
+ 3grj
11326
+ 4xjs
11327
+ 2x85
11328
+ 3b66
11329
+ 3le6
11330
+ 5t19
11331
+ 3itc
11332
+ 5ett
11333
+ 2esm
11334
+ 3dc2
11335
+ 3p2k
11336
+ 3u7s
11337
+ 3c6w
11338
+ 4fcr
11339
+ 2clo
11340
+ 3d0e
11341
+ 2jqi
11342
+ 1ekb
11343
+ 4ovz
11344
+ 3soq
11345
+ 1hef
11346
+ 5os0
11347
+ 4htp
11348
+ 4tk2
11349
+ 4j22
11350
+ 4etz
11351
+ 3gus
11352
+ 3s76
11353
+ 4pyv
11354
+ 3c49
11355
+ 1ktt
11356
+ 2yz3
11357
+ 1xnx
11358
+ 1elc
11359
+ 5i24
11360
+ 6eif
11361
+ 5t9z
11362
+ 2qt5
11363
+ 5h1e
11364
+ 5ekh
11365
+ 5vdw
11366
+ 3fbr
11367
+ 2za0
11368
+ 5wej
11369
+ 2yel
11370
+ 4anx
11371
+ 1rtf
11372
+ 1r5y
11373
+ 1pzo
11374
+ 2ydw
11375
+ 5kam
11376
+ 5i13
11377
+ 5exw
11378
+ 2xyr
11379
+ 3qs1
11380
+ 2x9f
11381
+ 2np8
11382
+ 5ci7
11383
+ 1m7y
11384
+ 1fcx
11385
+ 5ok3
11386
+ 1hp0
11387
+ 3l79
11388
+ 4jq8
11389
+ 4bhn
11390
+ 1i9p
11391
+ 4hyh
11392
+ 1jil
11393
+ 5f2s
11394
+ 1bwn
11395
+ 1xh7
11396
+ 5kbg
11397
+ 4bg6
11398
+ 3qfy
11399
+ 4ycu
11400
+ 3lxk
11401
+ 4w57
11402
+ 4kxl
11403
+ 5mk9
11404
+ 6er4
11405
+ 2rcw
11406
+ 4jpc
11407
+ 6h1u
11408
+ 3iae
11409
+ 3fnm
11410
+ 3mxr
11411
+ 3c2o
11412
+ 5eva
11413
+ 4kbi
11414
+ 3ogx
11415
+ 5owa
11416
+ 6bmr
11417
+ 4mgb
11418
+ 5ka9
11419
+ 2bza
11420
+ 3sad
11421
+ 4wj5
11422
+ 4o1d
11423
+ 4pnc
11424
+ 4mzf
11425
+ 1f9e
11426
+ 4whl
11427
+ 2imb
11428
+ 4llk
11429
+ 4ih6
11430
+ 1vjd
11431
+ 3stj
11432
+ 3usn
11433
+ 5tnt
11434
+ 4i4e
11435
+ 4hg7
11436
+ 5dxe
11437
+ 1hc9
11438
+ 3dcc
11439
+ 3d27
11440
+ 5azf
11441
+ 5c5h
11442
+ 4m7x
11443
+ 1ysg
11444
+ 5nve
11445
+ 4dcy
11446
+ 2qrh
11447
+ 1r9l
11448
+ 3l0k
11449
+ 4iuv
11450
+ 5j31
11451
+ 5ulg
11452
+ 2wc3
11453
+ 2jxr
11454
+ 4qwx
11455
+ 4joe
11456
+ 4djh
11457
+ 3sm1
11458
+ 4prd
11459
+ 5etn
11460
+ 1ihy
11461
+ 3ut5
11462
+ 4gjc
11463
+ 5c1x
11464
+ 2oxy
11465
+ 5can
11466
+ 5hz9
11467
+ 4uxq
11468
+ 3nzi
11469
+ 4hfz
11470
+ 4mn3
11471
+ 1jif
11472
+ 1svh
11473
+ 4zsa
11474
+ 5y24
11475
+ 5f9b
11476
+ 4mka
11477
+ 5wf7
11478
+ 4o0y
11479
+ 5w8h
11480
+ 1t1s
11481
+ 5w7i
11482
+ 4o2e
11483
+ 3g58
11484
+ 4jmu
11485
+ 3dg8
11486
+ 5u7o
11487
+ 5owt
11488
+ 2oic
11489
+ 3mt7
11490
+ 5ka3
11491
+ 2wgj
11492
+ 1orw
11493
+ 1vea
11494
+ 1j01
11495
+ 4yec
11496
+ 6b7c
11497
+ 5wg9
11498
+ 2fv9
11499
+ 1e55
11500
+ 3cfs
11501
+ 5nhy
11502
+ 4x6s
11503
+ 5ak0
11504
+ 4q1w
11505
+ 4o5b
11506
+ 1b32
11507
+ 2fzk
11508
+ 2p3g
11509
+ 1h24
11510
+ 3qfd
11511
+ 4xu0
11512
+ 3pe2
11513
+ 5ow1
11514
+ 4uyg
11515
+ 3du8
11516
+ 4cby
11517
+ 1mtr
11518
+ 3bug
11519
+ 4uj1
11520
+ 4omj
11521
+ 2y58
11522
+ 1onz
11523
+ 3esj
11524
+ 1y2b
11525
+ 1zyj
11526
+ 1fig
11527
+ 3cd7
11528
+ 2hf8
11529
+ 2v3u
11530
+ 1o4f
11531
+ 5w0q
11532
+ 1ql7
11533
+ 3kv2
11534
+ 2wez
11535
+ 2loz
11536
+ 4zg6
11537
+ 5f4l
11538
+ 4mo4
11539
+ 3mam
11540
+ 2h44
11541
+ 3que
11542
+ 4y32
11543
+ 5cwa
11544
+ 5hyr
11545
+ 4mnp
11546
+ 3wix
11547
+ 5tw5
11548
+ 4dem
11549
+ 5di1
11550
+ 3zlq
11551
+ 6en5
11552
+ 4ogv
11553
+ 2r6y
11554
+ 4k8o
11555
+ 6c7w
11556
+ 6eqv
11557
+ 3dx4
11558
+ 5qa7
11559
+ 2qlm
11560
+ 4tn4
11561
+ 5vm6
11562
+ 4x5r
11563
+ 3k23
11564
+ 2q54
11565
+ 2pe1
11566
+ 4k2y
11567
+ 4rfc
11568
+ 6hqy
11569
+ 4ie3
11570
+ 2qbq
11571
+ 5o07
11572
+ 7std
11573
+ 4mm7
11574
+ 3axz
11575
+ 1a85
11576
+ 5etb
11577
+ 4ylj
11578
+ 2vpe
11579
+ 2qln
11580
+ 5zfi
11581
+ 5wbk
11582
+ 4bty
11583
+ 1nvs
11584
+ 5ftq
11585
+ 3dyo
11586
+ 2c9t
11587
+ 2h6b
11588
+ 5nz4
11589
+ 5ar4
11590
+ 1xfv
11591
+ 6g6z
11592
+ 4e1n
11593
+ 2q2y
11594
+ 2zwz
11595
+ 2ozr
11596
+ 3fei
11597
+ 2xe4
11598
+ 3nin
11599
+ 4py1
11600
+ 4j52
11601
+ 4mrf
11602
+ 1ngw
11603
+ 1m7q
11604
+ 4m8t
11605
+ 4qhp
11606
+ 1my3
11607
+ 5fgk
11608
+ 2ff2
11609
+ 5qat
11610
+ 3chf
11611
+ 4kox
11612
+ 2nsx
11613
+ 5v1b
11614
+ 1det
11615
+ 4cq0
11616
+ 2iwx
11617
+ 4isi
11618
+ 4bcg
11619
+ 1pro
11620
+ 4ebv
11621
+ 2jh5
11622
+ 4mm6
11623
+ 4b82
11624
+ 4m5m
11625
+ 4pp3
11626
+ 3v31
11627
+ 4yt7
11628
+ 3vp4
11629
+ 3imy
11630
+ 1rbo
11631
+ 4b1c
11632
+ 2w7y
11633
+ 1k9q
11634
+ 4erq
11635
+ 5tzh
11636
+ 2a3x
11637
+ 5f61
11638
+ 5orj
11639
+ 3efr
11640
+ 5g4o
11641
+ 3el1
11642
+ 5lkr
11643
+ 1mn9
11644
+ 5wdw
11645
+ 4oym
11646
+ 2iko
11647
+ 1h39
11648
+ 5ddd
11649
+ 5nu1
11650
+ 4qz4
11651
+ 1o5p
11652
+ 4i9c
11653
+ 3wmc
11654
+ 4ajl
11655
+ 4zud
11656
+ 3kig
11657
+ 3wde
11658
+ 6fdu
11659
+ 4psb
11660
+ 4bw2
11661
+ 1hk1
11662
+ 1pph
11663
+ 3wut
11664
+ 2ate
11665
+ 3nw9
11666
+ 5ceo
11667
+ 1g2m
11668
+ 1o3i
11669
+ 5ald
11670
+ 2hjb
11671
+ 2cvd
11672
+ 5za1
11673
+ 4cnh
11674
+ 4pnw
11675
+ 4uwl
11676
+ 5c1w
11677
+ 1m0n
11678
+ 2nsj
11679
+ 4imz
11680
+ 4x3k
11681
+ 4zgk
11682
+ 3pvu
11683
+ 2fah
11684
+ 6ex0
11685
+ 6gz9
11686
+ 1ke8
11687
+ 3tkw
11688
+ 2xfj
11689
+ 4bo9
11690
+ 4cqf
11691
+ 5sy2
11692
+ 3fh7
11693
+ 5u4f
11694
+ 1k27
11695
+ 1msn
11696
+ 5ow8
11697
+ 5ekx
11698
+ 4re2
11699
+ 1dar
11700
+ 6iil
11701
+ 1bmm
11702
+ 3tsk
11703
+ 3tzm
11704
+ 3ttz
11705
+ 4ibd
11706
+ 4zx5
11707
+ 2c4w
11708
+ 1qf4
11709
+ 2xgo
11710
+ 3bm6
11711
+ 2zfp
11712
+ 2uyw
11713
+ 3h8b
11714
+ 4acx
11715
+ 3ouh
11716
+ 4zy1
11717
+ 1c1u
11718
+ 3btc
11719
+ 1azl
11720
+ 4ivd
11721
+ 1o4i
11722
+ 5evb
11723
+ 5dpx
11724
+ 4k9y
11725
+ 3wtm
11726
+ 6mxe
11727
+ 5ygf
11728
+ 4mpe
11729
+ 1o2q
11730
+ 3ru1
11731
+ 4qtb
11732
+ 3qw6
11733
+ 4yb5
11734
+ 2hzl
11735
+ 1b5i
11736
+ 1dbb
11737
+ 4gj3
11738
+ 6bfw
11739
+ 5y5n
11740
+ 5jdi
11741
+ 1kv5
11742
+ 4der
11743
+ 2xc4
11744
+ 3s3r
11745
+ 5lso
11746
+ 5cu4
11747
+ 1ysi
11748
+ 1n2v
11749
+ 4jje
11750
+ 5akw
11751
+ 5f3z
11752
+ 6dl9
11753
+ 5wgd
11754
+ 3muk
11755
+ 3uw9
11756
+ 1o32
11757
+ 1jq3
11758
+ 2zpk
11759
+ 3hk1
11760
+ 2f1a
11761
+ 2haw
11762
+ 4psx
11763
+ 4i54
11764
+ 5d3s
11765
+ 5m56
11766
+ 2bkz
11767
+ 5dxt
11768
+ 1qw7
11769
+ 4abu
11770
+ 2ggb
11771
+ 5fjw
11772
+ 4k5p
11773
+ 5als
11774
+ 2xjg
11775
+ 5eqy
11776
+ 4g2j
11777
+ 2a4g
11778
+ 3hf8
11779
+ 5em9
11780
+ 4ztl
11781
+ 2g1q
11782
+ 4phv
11783
+ 2ylq
11784
+ 5tdb
11785
+ 6dh7
11786
+ 5kz0
11787
+ 3rz3
11788
+ 2vww
11789
+ 5t4b
11790
+ 5xs8
11791
+ 5j7g
11792
+ 1f4f
11793
+ 5mev
11794
+ 5cgd
11795
+ 3vrw
11796
+ 5t8j
11797
+ 2o48
11798
+ 4cgj
11799
+ 4cts
11800
+ 4cbt
11801
+ 5ofw
11802
+ 3q2m
11803
+ 4ce2
11804
+ 2qqs
11805
+ 2j75
11806
+ 4yw7
11807
+ 5wip
11808
+ 6afl
11809
+ 6b16
11810
+ 2cgu
11811
+ 6exi
11812
+ 3mt8
11813
+ 4o24
11814
+ 1z34
11815
+ 3fvg
11816
+ 1w6j
11817
+ 3py0
11818
+ 5nkc
11819
+ 1os5
11820
+ 3t70
11821
+ 2nwl
11822
+ 6dq4
11823
+ 1n3z
11824
+ 4rme
11825
+ 5z95
11826
+ 1tx7
11827
+ 4yc8
11828
+ 3t0m
11829
+ 4e5w
11830
+ 4f1s
11831
+ 1hvr
11832
+ 1xzx
11833
+ 3fty
11834
+ 6au9
11835
+ 4eu0
11836
+ 4d1a
11837
+ 2p7a
11838
+ 4z2k
11839
+ 5ap7
11840
+ 1sqb
11841
+ 3wt5
11842
+ 3eqb
11843
+ 4o3c
11844
+ 1rgl
11845
+ 3r7q
11846
+ 2ay8
11847
+ 5enm
11848
+ 2ha7
11849
+ 5ak5
11850
+ 6dug
11851
+ 3wgw
11852
+ 6bfn
11853
+ 2fx8
11854
+ 4rxc
11855
+ 6bny
11856
+ 6h36
11857
+ 5ugm
11858
+ 5trj
11859
+ 5xpo
11860
+ 1jmf
11861
+ 3ugc
11862
+ 4jhz
11863
+ 3p44
11864
+ 1zp5
11865
+ 1v2o
11866
+ 5tbe
11867
+ 1o2r
11868
+ 4wsk
11869
+ 5vdk
11870
+ 3rni
11871
+ 3kc3
11872
+ 4mi3
11873
+ 4pb2
11874
+ 5euk
11875
+ 5ukk
11876
+ 2k2g
11877
+ 4a4q
11878
+ 3m6q
11879
+ 3btj
11880
+ 4gtv
11881
+ 5u0g
11882
+ 4zh4
11883
+ 3v2p
11884
+ 3kqt
11885
+ 4im0
11886
+ 1ni1
11887
+ 3wp0
11888
+ 3vi7
11889
+ 4tkn
11890
+ 6bg3
11891
+ 1o1s
11892
+ 5d3l
11893
+ 2qc6
11894
+ 5lp6
11895
+ 2br6
11896
+ 5ku6
11897
+ 5d3c
11898
+ 5ny3
11899
+ 5hjb
11900
+ 4txc
11901
+ 3waw
11902
+ 5b1s
11903
+ 4trz
11904
+ 2pwc
11905
+ 3nu4
11906
+ 4x3i
11907
+ 3djk
11908
+ 3qw7
11909
+ 3ivx
11910
+ 6b8j
11911
+ 1y0x
11912
+ 1ros
11913
+ 4rwj
11914
+ 6bto
11915
+ 3ztc
11916
+ 1jlx
11917
+ 5h13
11918
+ 3l0v
11919
+ 4ycn
11920
+ 5qam
11921
+ 4ke1
11922
+ 3wv1
11923
+ 4mep
11924
+ 5tzc
11925
+ 1bil
11926
+ 4fl2
11927
+ 4u6r
11928
+ 4k6i
11929
+ 4g2r
11930
+ 4pg9
11931
+ 2hzn
11932
+ 3r8v
11933
+ 5d9k
11934
+ 4gg7
11935
+ 3d9n
11936
+ 6cct
11937
+ 2c93
11938
+ 1uxa
11939
+ 3nfk
11940
+ 5f27
11941
+ 2w6n
11942
+ 3k8d
11943
+ 4pl4
11944
+ 4b9z
11945
+ 4uiy
11946
+ 5fl4
11947
+ 2bbb
11948
+ 3b50
11949
+ 1qon
11950
+ 6ce8
11951
+ 1ydk
11952
+ 4jv8
11953
+ 2bw7
11954
+ 1li2
11955
+ 4qag
11956
+ 3hyg
11957
+ 2qtr
11958
+ 2cen
11959
+ 4k3n
11960
+ 2fes
11961
+ 5kat
11962
+ 1dzp
11963
+ 3gcu
11964
+ 4nbn
11965
+ 4ctk
11966
+ 1okx
11967
+ 1nzq
11968
+ 5fb1
11969
+ 4r5v
11970
+ 4b7n
11971
+ 4det
11972
+ 2xm2
11973
+ 3exe
11974
+ 1jje
11975
+ 4wup
11976
+ 3sjt
11977
+ 6h8s
11978
+ 6bx6
11979
+ 5tqf
11980
+ 3lpk
11981
+ 3h1x
11982
+ 4ps1
11983
+ 3c72
11984
+ 2dua
11985
+ 4w97
11986
+ 5diu
11987
+ 2cbs
11988
+ 4zy4
11989
+ 4alx
11990
+ 1y6b
11991
+ 1tnj
11992
+ 2vo7
11993
+ 4qye
11994
+ 4no9
11995
+ 5ct1
11996
+ 4elb
11997
+ 5sz2
11998
+ 4u0x
11999
+ 4ztq
12000
+ 2nnd
12001
+ 1xom
12002
+ 4u0a
12003
+ 4abj
12004
+ 3uh4
12005
+ 1pop
12006
+ 3hv5
12007
+ 3zll
12008
+ 4rak
12009
+ 5u7m
12010
+ 5tuo
12011
+ 5ur1
12012
+ 4iqu
12013
+ 3kac
12014
+ 1t7r
12015
+ 4lqg
12016
+ 4um1
12017
+ 5mwa
12018
+ 3djp
12019
+ 4zeb
12020
+ 3r91
12021
+ 4o77
12022
+ 4r5t
12023
+ 4ijq
12024
+ 4mxa
12025
+ 3d0b
12026
+ 5ld8
12027
+ 3hdz
12028
+ 4bxn
12029
+ 3vb5
12030
+ 5q0j
12031
+ 3rx7
12032
+ 1k9s
12033
+ 6cjy
12034
+ 4zjj
12035
+ 5i3y
12036
+ 2wos
12037
+ 1bbz
12038
+ 4rx5
12039
+ 4nzm
12040
+ 1m5c
12041
+ 4djs
12042
+ 5ect
12043
+ 5ejw
12044
+ 4fvr
12045
+ 5vtb
12046
+ 6f20
12047
+ 1u9x
12048
+ 1zea
12049
+ 2zzu
12050
+ 3rxp
12051
+ 2z3z
12052
+ 1oeb
12053
+ 1gux
12054
+ 4g8r
12055
+ 1hk2
12056
+ 1g4k
12057
+ 5dfp
12058
+ 5vc4
12059
+ 4g55
12060
+ 4o7d
12061
+ 1nc3
12062
+ 6cdp
12063
+ 4gu9
12064
+ 5gvn
12065
+ 3l8s
12066
+ 4bis
12067
+ 4lwi
12068
+ 2baj
12069
+ 5wik
12070
+ 5a7i
12071
+ 4pnq
12072
+ 2xde
12073
+ 6e4u
12074
+ 1b38
12075
+ 3wzn
12076
+ 5nn0
12077
+ 4fl1
12078
+ 3q6k
12079
+ 6eab
12080
+ 3s68
12081
+ 2dq7
12082
+ 1yw7
12083
+ 4x8p
12084
+ 5vgo
12085
+ 2o9k
12086
+ 4yo6
12087
+ 4lv1
12088
+ 5ose
12089
+ 4ogt
12090
+ 5e8a
12091
+ 4k76
12092
+ 2pr9
12093
+ 1z3j
12094
+ 2g72
12095
+ 2l1b
12096
+ 4gdy
12097
+ 1lq2
12098
+ 6g0q
12099
+ 6cv8
12100
+ 5tri
12101
+ 1g7q
12102
+ 1o4g
12103
+ 4cg8
12104
+ 2ohl
12105
+ 1v2u
12106
+ 4b83
12107
+ 3wyj
12108
+ 4hrd
12109
+ 4kfq
12110
+ 1no6
12111
+ 5lax
12112
+ 3egk
12113
+ 3f7g
12114
+ 4deh
12115
+ 5tmp
12116
+ 4rad
12117
+ 3d78
12118
+ 5l72
12119
+ 3avj
12120
+ 4rg0
12121
+ 4qev
12122
+ 5nt4
12123
+ 5svk
12124
+ 2w08
12125
+ 3lxo
12126
+ 3ljj
12127
+ 4kif
12128
+ 2jdo
12129
+ 1v0o
12130
+ 1k3n
12131
+ 1rgj
12132
+ 4zt8
12133
+ 6dub
12134
+ 1zdp
12135
+ 187l
12136
+ 6dkb
12137
+ 3n6k
12138
+ 1tnk
12139
+ 2h2g
12140
+ 4mjq
12141
+ 4a9i
12142
+ 4pre
12143
+ 2gni
12144
+ 3bgb
12145
+ 4ddm
12146
+ 2p99
12147
+ 1mwt
12148
+ 5nhl
12149
+ 6b4w
12150
+ 3sou
12151
+ 3sm2
12152
+ 5qik
12153
+ 1loq
12154
+ 4yqh
12155
+ 5mi7
12156
+ 4ynl
12157
+ 2f9v
12158
+ 1qbn
12159
+ 2r2m
12160
+ 3uod
12161
+ 2w0z
12162
+ 4ewo
12163
+ 1fsy
12164
+ 1tmm
12165
+ 3va4
12166
+ 6b0y
12167
+ 2adu
12168
+ 5dtj
12169
+ 3k3a
12170
+ 1sm2
12171
+ 3ozs
12172
+ 3o8g
12173
+ 5g42
12174
+ 4h3a
12175
+ 2fwp
12176
+ 6fbw
12177
+ 5tzw
12178
+ 1xkk
12179
+ 3tdu
12180
+ 3efj
12181
+ 3as2
12182
+ 2ity
12183
+ 4zwy
12184
+ 2nmy
12185
+ 4amy
12186
+ 5kad
12187
+ 3qtv
12188
+ 3ncq
12189
+ 3p58
12190
+ 6h1i
12191
+ 5x13
12192
+ 4d1d
12193
+ 3zev
12194
+ 6aum
12195
+ 1ftm
12196
+ 4dsu
12197
+ 4hla
12198
+ 2qoe
12199
+ 5em5
12200
+ 3u0p
12201
+ 3dx2
12202
+ 4kn2
12203
+ 4ce1
12204
+ 3np9
12205
+ 3t2w
12206
+ 4b7r
12207
+ 5o9q
12208
+ 1rth
12209
+ 3eq7
12210
+ 3i0s
12211
+ 2pgl
12212
+ 2yj8
12213
+ 2z6w
12214
+ 3v9b
12215
+ 6fs0
12216
+ 1dhi
12217
+ 4hdp
12218
+ 5svz
12219
+ 4mz5
12220
+ 1a1c
12221
+ 4p0n
12222
+ 3n7s
12223
+ 2aow
12224
+ 1j19
12225
+ 4k3o
12226
+ 1m7i
12227
+ 3s9z
12228
+ 1igb
12229
+ 4o9s
12230
+ 5xyx
12231
+ 4mex
12232
+ 5jy3
12233
+ 4jht
12234
+ 3hxc
12235
+ 2c3i
12236
+ 4itp
12237
+ 2fl5
12238
+ 4b12
12239
+ 3o6m
12240
+ 2vd4
12241
+ 3tib
12242
+ 5yj8
12243
+ 1aze
12244
+ 4yad
12245
+ 3w37
12246
+ 5e8z
12247
+ 3pz4
12248
+ 2p4y
12249
+ 2xda
12250
+ 3n4l
12251
+ 1i1e
12252
+ 1tbz
12253
+ 2bxu
12254
+ 1g6r
12255
+ 4h5e
12256
+ 3fuz
12257
+ 1f2p
12258
+ 1b51
12259
+ 5lm6
12260
+ 5gx6
12261
+ 4wn0
12262
+ 3h21
12263
+ 3vd9
12264
+ 1x39
12265
+ 4bjc
12266
+ 6d5g
12267
+ 3nnu
12268
+ 3d32
12269
+ 3l9l
12270
+ 5i3w
12271
+ 4kqo
12272
+ 5w38
12273
+ 2p2i
12274
+ 4ohk
12275
+ 5l2i
12276
+ 4jlh
12277
+ 4whh
12278
+ 3u6i
12279
+ 2khh
12280
+ 4gu6
12281
+ 1snk
12282
+ 1ogu
12283
+ 2p0d
12284
+ 1zeo
12285
+ 1iq1
12286
+ 4knb
12287
+ 3lrh
12288
+ 5a5r
12289
+ 5iok
12290
+ 5t6p
12291
+ 4dx9
12292
+ 2a0t
12293
+ 1c5x
12294
+ 1qxw
12295
+ 3d83
12296
+ 3pwh
12297
+ 5g46
12298
+ 4wy6
12299
+ 1yw2
12300
+ 1bm6
12301
+ 4yhm
12302
+ 1tka
12303
+ 3qpp
12304
+ 4g19
12305
+ 1pdz
12306
+ 4fnz
12307
+ 5mby
12308
+ 3zt1
12309
+ 1y3v
12310
+ 3tv4
12311
+ 2qmd
12312
+ 2phb
12313
+ 1bzf
12314
+ 1m2r
12315
+ 6g46
12316
+ 3hvj
12317
+ 3mhl
12318
+ 4w9c
12319
+ 4imq
12320
+ 5k4i
12321
+ 3abt
12322
+ 3vuc
12323
+ 4oyt
12324
+ 5qb3
12325
+ 5yv5
12326
+ 4n1b
12327
+ 1pip
12328
+ 5iu8
12329
+ 3zke
12330
+ 5f1z
12331
+ 4qvx
12332
+ 4zlz
12333
+ 4qfs
12334
+ 5ai0
12335
+ 4op2
12336
+ 3tfn
12337
+ 5cnm
12338
+ 6dge
12339
+ 1htg
12340
+ 3rey
12341
+ 1o6r
12342
+ 3w5n
12343
+ 4l52
12344
+ 4b76
12345
+ 5qar
12346
+ 4nj9
12347
+ 2kaw
12348
+ 4ezz
12349
+ 3hkt
12350
+ 5e1b
12351
+ 3ufa
12352
+ 4btx
12353
+ 5mgj
12354
+ 2ha2
12355
+ 5cst
12356
+ 5ea5
12357
+ 5kgn
12358
+ 3jq7
12359
+ 1qy2
12360
+ 4fpk
12361
+ 3hq5
12362
+ 1at5
12363
+ 1hge
12364
+ 5ty1
12365
+ 4b3d
12366
+ 1lyb
12367
+ 5mrd
12368
+ 4zt4
12369
+ 4ovg
12370
+ 4pp7
12371
+ 2w5i
12372
+ 6ced
12373
+ 4elg
12374
+ 5j71
12375
+ 3i6o
12376
+ 4mx5
12377
+ 4pnm
12378
+ 2g0g
12379
+ 1x81
12380
+ 4wkp
12381
+ 4kxm
12382
+ 1wht
12383
+ 6q3q
12384
+ 4gee
12385
+ 2f70
12386
+ 2wd1
12387
+ 6aah
12388
+ 4rfm
12389
+ 2wvt
12390
+ 3dx3
12391
+ 5t4v
12392
+ 1owh
12393
+ 4k7o
12394
+ 4z7n
12395
+ 5lla
12396
+ 5ehw
12397
+ 3jsi
12398
+ 1ohr
12399
+ 4k3h
12400
+ 5y8y
12401
+ 1pu8
12402
+ 2mg5
12403
+ 3l5f
12404
+ 3tfk
12405
+ 1t31
12406
+ 3t08
12407
+ 4iho
12408
+ 3i91
12409
+ 5fdg
12410
+ 5ntq
12411
+ 5wa4
12412
+ 2iqg
12413
+ 5tol
12414
+ 4tz2
12415
+ 2d1o
12416
+ 5ene
12417
+ 4mc2
12418
+ 6ax4
12419
+ 4aoi
12420
+ 1u3r
12421
+ 4ngq
12422
+ 3src
12423
+ 5zzw
12424
+ 1sw2
12425
+ 6g2m
12426
+ 5fwa
12427
+ 2q7m
12428
+ 1xn2
12429
+ 5g6u
12430
+ 1puq
12431
+ 5ha9
12432
+ 3s56
12433
+ 3ckt
12434
+ 2jaj
12435
+ 2qlb
12436
+ 1l6y
12437
+ 4tzq
12438
+ 5u9d
12439
+ 4nzb
12440
+ 3ccc
12441
+ 3isj
12442
+ 5kbr
12443
+ 3u92
12444
+ 5duw
12445
+ 3ijh
12446
+ 1uti
12447
+ 5tw2
12448
+ 2hd1
12449
+ 4wuy
12450
+ 1riw
12451
+ 3u8n
12452
+ 6flg
12453
+ 2qzk
12454
+ 5w6o
12455
+ 2v2q
12456
+ 6e86
12457
+ 4e0w
12458
+ 5jf4
12459
+ 5a8y
12460
+ 2nq6
12461
+ 1tsy
12462
+ 1rek
12463
+ 2xht
12464
+ 2gtk
12465
+ 4kom
12466
+ 6en6
12467
+ 3zmz
12468
+ 3efs
12469
+ 4whr
12470
+ 5j1v
12471
+ 1ld8
12472
+ 4o37
12473
+ 5qa5
12474
+ 4c9w
12475
+ 2zdz
12476
+ 1l5q
12477
+ 3kek
12478
+ 2olb
12479
+ 4dru
12480
+ 2h42
12481
+ 5jnn
12482
+ 1w0x
12483
+ 6chh
12484
+ 3av9
12485
+ 3uzc
12486
+ 1l83
12487
+ 2osm
12488
+ 5i89
12489
+ 5wb6
12490
+ 3vdc
12491
+ 1b4h
12492
+ 5e0a
12493
+ 1nyy
12494
+ 4bqg
12495
+ 5otr
12496
+ 5axp
12497
+ 2w1f
12498
+ 4z1q
12499
+ 4q4q
12500
+ 4o07
12501
+ 3g0f
12502
+ 4alv
12503
+ 4jfx
12504
+ 5kjn
12505
+ 2jal
12506
+ 4u0w
12507
+ 2k0g
12508
+ 3uwk
12509
+ 3k26
12510
+ 5iq6
12511
+ 6hjj
12512
+ 3o0g
12513
+ 3iu9
12514
+ 4xg8
12515
+ 4xya
12516
+ 1zfk
12517
+ 3jvk
12518
+ 5wxh
12519
+ 3t0w
12520
+ 5yg3
12521
+ 1xpz
12522
+ 4b60
12523
+ 4j2t
12524
+ 3ds0
12525
+ 2l0i
12526
+ 5y1u
12527
+ 1pxk
12528
+ 4da5
12529
+ 1z6e
12530
+ 6fi5
12531
+ 4pry
12532
+ 8lpr
12533
+ 2ya7
12534
+ 3zv7
12535
+ 4bnz
12536
+ 6bh5
12537
+ 4x2s
12538
+ 2vth
12539
+ 3tvc
12540
+ 5fky
12541
+ 1i43
12542
+ 5jau
12543
+ 4qsw
12544
+ 1m21
12545
+ 4xsy
12546
+ 2pvn
12547
+ 5htl
12548
+ 5o7i
12549
+ 3skh
12550
+ 5wic
12551
+ 1fvt
12552
+ 4rio
12553
+ 3si3
12554
+ 6dnp
12555
+ 4c4n
12556
+ 4ufu
12557
+ 2ork
12558
+ 5dkr
12559
+ 3m59
12560
+ 3tmk
12561
+ 3btr
12562
+ 5nps
12563
+ 2oah
12564
+ 5ywx
12565
+ 4nxs
12566
+ 3r8i
12567
+ 1z1h
12568
+ 4jik
12569
+ 3oui
12570
+ 4pzx
12571
+ 5o1g
12572
+ 5auw
12573
+ 6cjv
12574
+ 1t46
12575
+ 1uwb
12576
+ 4lgu
12577
+ 5xvu
12578
+ 4x3t
12579
+ 5dtk
12580
+ 3qkl
12581
+ 3meu
12582
+ 3ove
12583
+ 5mw3
12584
+ 2zcq
12585
+ 4n7y
12586
+ 4e4a
12587
+ 2avs
12588
+ 4e3l
12589
+ 1hbv
12590
+ 3tl0
12591
+ 3g15
12592
+ 6c5t
12593
+ 4ayu
12594
+ 4aba
12595
+ 5ddb
12596
+ 5mmg
12597
+ 5k09
12598
+ 3fj7
12599
+ 4y3j
12600
+ 3dxg
12601
+ 5a6n
12602
+ 5lvd
12603
+ 3c2f
12604
+ 1w51
12605
+ 4m3m
12606
+ 4x6k
12607
+ 6h7b
12608
+ 5gow
12609
+ 5n6s
12610
+ 2f18
12611
+ 4kvm
12612
+ 6d5j
12613
+ 2qhn
12614
+ 1ppm
12615
+ 5y2f
12616
+ 3fi2
12617
+ 3vzv
12618
+ 4r73
12619
+ 5h5q
12620
+ 5gic
12621
+ 1g6g
12622
+ 5j47
12623
+ 4tmr
12624
+ 4inb
12625
+ 1w6h
12626
+ 1inf
12627
+ 5alk
12628
+ 3u8l
12629
+ 2bvr
12630
+ 5bnj
12631
+ 5nk4
12632
+ 2c6g
12633
+ 5tbn
12634
+ 5ug8
12635
+ 2wbg
12636
+ 6bgv
12637
+ 3fzc
12638
+ 5mgf
12639
+ 4ngp
12640
+ 4akn
12641
+ 3k27
12642
+ 4gfn
12643
+ 1vjc
12644
+ 6ckc
12645
+ 3bqc
12646
+ 2k0x
12647
+ 4o0a
12648
+ 4m1d
12649
+ 3cfn
12650
+ 2ds1
12651
+ 3wnt
12652
+ 2ogy
12653
+ 6d28
12654
+ 6ffi
12655
+ 6eu6
12656
+ 2it4
12657
+ 6h78
12658
+ 3s0e
12659
+ 3u1y
12660
+ 3r04
12661
+ 2nsl
12662
+ 4knj
12663
+ 5q14
12664
+ 1e1x
12665
+ 5cs3
12666
+ 5hfc
12667
+ 5zob
12668
+ 4erz
12669
+ 5ant
12670
+ 6ft4
12671
+ 3th0
12672
+ 1ijr
12673
+ 4lnw
12674
+ 4pyq
12675
+ 2r3t
12676
+ 3ik3
12677
+ 5mkr
12678
+ 2gz7
12679
+ 1joj
12680
+ 1j17
12681
+ 1c5o
12682
+ 3avf
12683
+ 3u0t
12684
+ 1c85
12685
+ 5w99
12686
+ 6d4o
12687
+ 2qic
12688
+ 2tmn
12689
+ 4cc6
12690
+ 5q1h
12691
+ 5k5n
12692
+ 5n55
12693
+ 3rxj
12694
+ 3njy
12695
+ 3srg
12696
+ 4bgm
12697
+ 4m0r
12698
+ 5nxv
12699
+ 5j1w
12700
+ 4bbf
12701
+ 4mot
12702
+ 1tl7
12703
+ 6gf9
12704
+ 4ivs
12705
+ 1bmn
12706
+ 1bju
12707
+ 1w4o
12708
+ 2pmn
12709
+ 1k6p
12710
+ 5akz
12711
+ 4ca7
12712
+ 4joa
12713
+ 1mfa
12714
+ 4qqc
12715
+ 5e2q
12716
+ 1dgm
12717
+ 2fxu
12718
+ 3ejp
12719
+ 1a1b
12720
+ 5k0j
12721
+ 3h5u
12722
+ 5ha1
12723
+ 2rg6
12724
+ 4b77
12725
+ 4hzt
12726
+ 5fe7
12727
+ 4pgd
12728
+ 5uwm
12729
+ 4rro
12730
+ 4rr6
12731
+ 4o70
12732
+ 5cfa
12733
+ 4ke0
12734
+ 4ju3
12735
+ 2wtw
12736
+ 2qhm
12737
+ 3m3z
12738
+ 2vqt
12739
+ 5q0d
12740
+ 3gcv
12741
+ 4fmn
12742
+ 3s7l
12743
+ 5kgx
12744
+ 2n9e
12745
+ 2vmf
12746
+ 4x63
12747
+ 3fc1
12748
+ 1ok7
12749
+ 4pzv
12750
+ 5dd9
12751
+ 4nru
12752
+ 1udu
12753
+ 6fmp
12754
+ 5fxs
12755
+ 4dk7
12756
+ 4e5f
12757
+ 5ml0
12758
+ 3v6s
12759
+ 4eeh
12760
+ 5cjf
12761
+ 5e1o
12762
+ 6bw8
12763
+ 3h2f
12764
+ 1yly
12765
+ 4avs
12766
+ 1sri
12767
+ 1avn
12768
+ 3i25
12769
+ 4fm8
12770
+ 4jg7
12771
+ 3fu6
12772
+ 1g7f
12773
+ 4l1u
12774
+ 4dr9
12775
+ 3ocg
12776
+ 5wal
12777
+ 4dq2
12778
+ 1uz8
12779
+ 4qjp
12780
+ 2onb
12781
+ 4hgc
12782
+ 2lyw
12783
+ 5xgl
12784
+ 1ydr
12785
+ 2r0h
12786
+ 4q4s
12787
+ 5j64
12788
+ 4riv
12789
+ 4cp7
12790
+ 5d9p
12791
+ 1il4
12792
+ 2qaf
12793
+ 1idb
12794
+ 3ft8
12795
+ 1hos
12796
+ 2xnn
12797
+ 5myv
12798
+ 5ti4
12799
+ 3gi5
12800
+ 2wxh
12801
+ 4eo6
12802
+ 3spk
12803
+ 1g3f
12804
+ 5iy4
12805
+ 5h1v
12806
+ 1mem
12807
+ 1rdt
12808
+ 2c92
12809
+ 1n7m
12810
+ 5dif
12811
+ 2py4
12812
+ 4k9h
12813
+ 4ehr
12814
+ 1nfx
12815
+ 4cst
12816
+ 3h0v
12817
+ 5z7j
12818
+ 5ta2
12819
+ 5nu3
12820
+ 3k3i
12821
+ 3gpe
12822
+ 3zq9
12823
+ 5cqj
12824
+ 6axk
12825
+ 4len
12826
+ 4nmp
12827
+ 3b25
12828
+ 3ch9
12829
+ 5djv
12830
+ 4hxw
12831
+ 4euc
12832
+ 1hvy
12833
+ 2jk9
12834
+ 3ncz
12835
+ 3bu1
12836
+ 4bb2
12837
+ 3wqh
12838
+ 6alz
12839
+ 3qch
12840
+ 4jv7
12841
+ 2jmj
12842
+ 3nan
12843
+ 5vds
12844
+ 1oth
12845
+ 2aie
12846
+ 3dln
12847
+ 3mv5
12848
+ 5egu
12849
+ 6dki
12850
+ 5am0
12851
+ 3vrt
12852
+ 6dpt
12853
+ 1ew9
12854
+ 2w6u
12855
+ 2ggu
12856
+ 5jsj
12857
+ 2xwd
12858
+ 3zya
12859
+ 5uwp
12860
+ 2exc
12861
+ 5e28
12862
+ 5k32
12863
+ 3g4f
12864
+ 2xng
12865
+ 4x5y
12866
+ 6esm
12867
+ 1gi4
12868
+ 3d2t
12869
+ 5ttw
12870
+ 3ekx
12871
+ 2uwp
12872
+ 1klg
12873
+ 4y2x
12874
+ 2dwx
12875
+ 5lrq
12876
+ 1od8
12877
+ 1ceb
12878
+ 4ci2
12879
+ 4m5i
12880
+ 1o9e
12881
+ 1nq7
12882
+ 3ejr
12883
+ 4wrs
12884
+ 4eor
12885
+ 1g1d
12886
+ 5ofi
12887
+ 2kce
12888
+ 5eko
12889
+ 4mhy
12890
+ 1ndy
12891
+ 2cc7
12892
+ 2nnk
12893
+ 5ybe
12894
+ 1svg
12895
+ 5aes
12896
+ 4pkr
12897
+ 5j5x
12898
+ 1cka
12899
+ 3n7h
12900
+ 3pxe
12901
+ 4kz7
12902
+ 1tcw
12903
+ 3v5j
12904
+ 2xp8
12905
+ 4bv2
12906
+ 1gyx
12907
+ 4i6b
12908
+ 4oyb
12909
+ 1ox9
12910
+ 2wf2
12911
+ 3zvv
12912
+ 5f90
12913
+ 1rdj
12914
+ 4azb
12915
+ 3o5n
12916
+ 4uub
12917
+ 3d1e
12918
+ 1n5z
12919
+ 6b1y
12920
+ 2o7e
12921
+ 3o9v
12922
+ 4hf4
12923
+ 6ety
12924
+ 2qi6
12925
+ 5jas
12926
+ 3gss
12927
+ 1c5c
12928
+ 5l9l
12929
+ 3ifp
12930
+ 1czk
12931
+ 4fi9
12932
+ 1cs4
12933
+ 5e88
12934
+ 1mui
12935
+ 5vp1
12936
+ 3vbt
12937
+ 3sh0
12938
+ 4avw
12939
+ 2asu
12940
+ 1qf1
12941
+ 3hjo
12942
+ 1iiq
12943
+ 2r2w
12944
+ 1ztq
12945
+ 2c8x
12946
+ 3wtj
12947
+ 4lys
12948
+ 4lv2
12949
+ 1u9v
12950
+ 1dx6
12951
+ 3mtd
12952
+ 5ii1
12953
+ 6d5h
12954
+ 4nku
12955
+ 1w1d
12956
+ 1e37
12957
+ 4pte
12958
+ 3c7p
12959
+ 4glx
12960
+ 1hvi
12961
+ 5f4n
12962
+ 5f6u
12963
+ 1y8p
12964
+ 3kwb
12965
+ 5ggj
12966
+ 3gx0
12967
+ 3ztx
12968
+ 4uiz
12969
+ 3ueu
12970
+ 2vl4
12971
+ 2wnl
12972
+ 4cl9
12973
+ 5aqn
12974
+ 3ipx
12975
+ 1i72
12976
+ 3dsz
12977
+ 3mhi
12978
+ 4ap7
12979
+ 2xcg
12980
+ 7prc
12981
+ 4e3o
12982
+ 4int
12983
+ 1zz1
12984
+ 5bjt
12985
+ 2xas
12986
+ 4ny3
12987
+ 5fi7
12988
+ 1owi
12989
+ 3qd4
12990
+ 1ew8
12991
+ 4hdc
12992
+ 4ac3
12993
+ 3h52
12994
+ 5nzp
12995
+ 1lbf
12996
+ 1b6p
12997
+ 2ylp
12998
+ 3qxt
12999
+ 3kyr
13000
+ 1ksn
13001
+ 3twp
13002
+ 3ti8
13003
+ 6ewe
13004
+ 3t19
13005
+ 3eu5
13006
+ 4dvf
13007
+ 2flr
13008
+ 1oe7
13009
+ 2or9
13010
+ 1v2m
13011
+ 5fjx
13012
+ 1m2x
13013
+ 2c80
13014
+ 3nm6
13015
+ 4nak
13016
+ 5y12
13017
+ 2vwn
13018
+ 5chk
13019
+ 4dv9
13020
+ 5akg
13021
+ 5x9h
13022
+ 5m7u
13023
+ 6axp
13024
+ 1upk
13025
+ 1j36
13026
+ 1uv6
13027
+ 3f2a
13028
+ 1yet
13029
+ 1ov3
13030
+ 4kwp
13031
+ 1eoj
13032
+ 1yms
13033
+ 4o4g
13034
+ 3p0g
13035
+ 3tsz
13036
+ 2hrm
13037
+ 4tz8
13038
+ 5k4x
13039
+ 3hzk
13040
+ 5w5s
13041
+ 4ekg
13042
+ 6gh9
13043
+ 5l26
13044
+ 2lsv
13045
+ 6eqa
13046
+ 5fp0
13047
+ 2zc9
13048
+ 2jk7
13049
+ 3h03
13050
+ 3wb4
13051
+ 1xow
13052
+ 4hra
13053
+ 4mk1
13054
+ 1y20
13055
+ 5yc1
13056
+ 5lx6
13057
+ 2er0
13058
+ 5x4m
13059
+ 4yih
13060
+ 3mbl
13061
+ 4a7i
13062
+ 5koq
13063
+ 3cx9
13064
+ 4zom
13065
+ 5sz6
13066
+ 2vqm
13067
+ 4yyn
13068
+ 1syh
13069
+ 1kwr
13070
+ 6cyb
13071
+ 3pm1
13072
+ 2zym
13073
+ 5fv7
13074
+ 3shz
13075
+ 4dmw
13076
+ 6b5j
13077
+ 1wn6
13078
+ 5vd3
13079
+ 1njc
13080
+ 3ew2
13081
+ 5ia3
13082
+ 4a4e
13083
+ 4a9n
13084
+ 4iue
13085
+ 2ewa
13086
+ 1akq
13087
+ 5ew9
13088
+ 5n8e
13089
+ 5a54
13090
+ 4kow
13091
+ 5w6i
13092
+ 5mgg
13093
+ 5oh3
13094
+ 4hmh
13095
+ 4f3i
13096
+ 3kd7
13097
+ 1w9u
13098
+ 5da3
13099
+ 4n6y
13100
+ 5jid
13101
+ 1juy
13102
+ 1f0r
13103
+ 5iw0
13104
+ 4ui4
13105
+ 3tkh
13106
+ 2gc8
13107
+ 5qc4
13108
+ 3wzj
13109
+ 5l7h
13110
+ 2kdh
13111
+ 1uyk
13112
+ 3sv9
13113
+ 3uex
13114
+ 5os8
13115
+ 4b7q
13116
+ 5a6i
13117
+ 5om2
13118
+ 5lgu
13119
+ 5uwk
13120
+ 5u1q
13121
+ 5unh
13122
+ 5nw1
13123
+ 4ck3
13124
+ 1g42
13125
+ 2e9d
13126
+ 5k4z
13127
+ 4l7b
13128
+ 1so2
13129
+ 6esn
13130
+ 4pzw
13131
+ 1ida
13132
+ 3jzb
13133
+ 1o2h
13134
+ 5qby
13135
+ 1nde
13136
+ 6dne
13137
+ 1f8d
13138
+ 1nvr
13139
+ 5ult
13140
+ 1db5
13141
+ 4fr3
13142
+ 1g27
13143
+ 5alo
13144
+ 4o15
13145
+ 4ryd
13146
+ 4oar
13147
+ 4zy0
13148
+ 4pd7
13149
+ 2aq7
13150
+ 6etj
13151
+ 2zga
13152
+ 5luu
13153
+ 2l8j
13154
+ 2wky
13155
+ 4yrg
13156
+ 2v22
13157
+ 2ndf
13158
+ 1jsv
13159
+ 4u6z
13160
+ 5wdl
13161
+ 5nxi
13162
+ 6i8y
13163
+ 4u6x
13164
+ 4mw5
13165
+ 4ufj
13166
+ 3ith
13167
+ 1wb0
13168
+ 3arq
13169
+ 3dp2
13170
+ 1lst
13171
+ 3fqh
13172
+ 2ivu
13173
+ 1s5q
13174
+ 6ct7
13175
+ 3pma
13176
+ 2ay6
13177
+ 5e90
13178
+ 1re1
13179
+ 4l32
13180
+ 3g5k
13181
+ 4bw4
13182
+ 1pig
13183
+ 2xne
13184
+ 4qme
13185
+ 5ugh
13186
+ 1cpi
13187
+ 3wd9
13188
+ 1o5m
13189
+ 5wfc
13190
+ 4tun
13191
+ 2y77
13192
+ 5laq
13193
+ 1ax2
13194
+ 1b39
13195
+ 3hrb
13196
+ 5dah
13197
+ 5iza
13198
+ 3thb
13199
+ 4bi1
13200
+ 3arf
13201
+ 4gwi
13202
+ 6h1h
13203
+ 5yzc
13204
+ 5tg6
13205
+ 3vap
13206
+ 4lkh
13207
+ 2q2a
13208
+ 2fle
13209
+ 2vwf
13210
+ 1g2k
13211
+ 2zz2
13212
+ 5xhz
13213
+ 4zim
13214
+ 5fwr
13215
+ 6g9i
13216
+ 6bqa
13217
+ 1g45
13218
+ 2ao6
13219
+ 3is9
13220
+ 2qmf
13221
+ 3wt7
13222
+ 5vlr
13223
+ 5fnr
13224
+ 6cf6
13225
+ 5z9e
13226
+ 5t1m
13227
+ 3uda
13228
+ 3ur9
13229
+ 3dp4
13230
+ 6c7g
13231
+ 5kmf
13232
+ 4hev
13233
+ 6d8e
13234
+ 5acx
13235
+ 5ouh
13236
+ 2fde
13237
+ 6c0n
13238
+ 4wt2
13239
+ 6cgp
13240
+ 5zun
13241
+ 5dyt
13242
+ 4cwb
13243
+ 2xcs
13244
+ 4z1k
13245
+ 4b11
13246
+ 2i2c
13247
+ 5ko5
13248
+ 2ccc
13249
+ 1xgj
13250
+ 6ayt
13251
+ 1ao8
13252
+ 1c5z
13253
+ 1b9v
13254
+ 5toe
13255
+ 4bup
13256
+ 4pox
13257
+ 1gnn
13258
+ 3a4o
13259
+ 5kqg
13260
+ 5am1
13261
+ 1i37
13262
+ 5uln
13263
+ 3gpo
13264
+ 2g01
13265
+ 3fx6
13266
+ 5nba
13267
+ 1ke7
13268
+ 3my5
13269
+ 4inr
13270
+ 1jp5
13271
+ 5eqe
13272
+ 4muf
13273
+ 5xhr
13274
+ 2wmr
13275
+ 3ebb
13276
+ 4mwe
13277
+ 5ap4
13278
+ 5nwg
13279
+ 4bde
13280
+ 4x8t
13281
+ 5tbp
13282
+ 1nl6
13283
+ 4oz1
13284
+ 3hvc
13285
+ 1flr
13286
+ 3pz3
13287
+ 4pkv
13288
+ 4drp
13289
+ 4awk
13290
+ 2ww2
13291
+ 4tw9
13292
+ 5b4k
13293
+ 4o3t
13294
+ 4fev
13295
+ 1o45
13296
+ 4bcj
13297
+ 3upz
13298
+ 1qkb
13299
+ 5nho
13300
+ 2x8d
13301
+ 1aq7
13302
+ 5y21
13303
+ 5i12
13304
+ 3bsc
13305
+ 4wki
13306
+ 1ouy
13307
+ 4jv9
13308
+ 5o9h
13309
+ 4ez3
13310
+ 3dd8
13311
+ 5h7g
13312
+ 1sqt
13313
+ 3pn4
13314
+ 5aab
13315
+ 2ha4
13316
+ 4enx
13317
+ 1bap
13318
+ 6enm
13319
+ 5za7
13320
+ 3dbs
13321
+ 4mvw
13322
+ 3vrv
13323
+ 2azc
13324
+ 3mss
13325
+ 3i60
13326
+ 5hld
13327
+ 1zkn
13328
+ 2wxq
13329
+ 4y87
13330
+ 4m8x
13331
+ 5f6v
13332
+ 4fhi
13333
+ 5auv
13334
+ 4l7c
13335
+ 3wcl
13336
+ 3gqo
13337
+ 5dhh
13338
+ 5u7d
13339
+ 6grp
13340
+ 1xm6
13341
+ 1lpz
13342
+ 2wf3
13343
+ 5cvd
13344
+ 3zmj
13345
+ 1g6s
13346
+ 5eud
13347
+ 3wyl
13348
+ 4wh9
13349
+ 2oq6
13350
+ 2who
13351
+ 2nmz
13352
+ 2xiz
13353
+ 4hbx
13354
+ 6bee
13355
+ 3ryx
13356
+ 5ans
13357
+ 5zvw
13358
+ 4zwz
13359
+ 3q96
13360
+ 4v05
13361
+ 2pyi
13362
+ 5jf2
13363
+ 1wbv
13364
+ 6c7x
13365
+ 4qsx
13366
+ 5m6u
13367
+ 1m0b
13368
+ 3qg6
13369
+ 3nyn
13370
+ 1rm8
13371
+ 3lhj
13372
+ 3g86
13373
+ 1ny0
13374
+ 3zj8
13375
+ 3o6l
13376
+ 2vb8
13377
+ 3p9l
13378
+ 4bnx
13379
+ 5uch
13380
+ 3sv6
13381
+ 4ynk
13382
+ 223l
13383
+ 5orr
13384
+ 4o0j
13385
+ 3rxm
13386
+ 3rw9
13387
+ 4po0
13388
+ 4z84
13389
+ 4j21
13390
+ 5yjm
13391
+ 5n25
13392
+ 4bky
13393
+ 5svy
13394
+ 5gmn
13395
+ 5u7l
13396
+ 1f1j
13397
+ 4aji
13398
+ 2oj9
13399
+ 3oli
13400
+ 4r4o
13401
+ 6ayq
13402
+ 4ql1
13403
+ 3f7h
13404
+ 1skj
13405
+ 2wfj
13406
+ 1wc1
13407
+ 2r3o
13408
+ 3m5a
13409
+ 4lwc
13410
+ 4j5p
13411
+ 3n46
13412
+ 3ttp
13413
+ 1jjk
13414
+ 5vb7
13415
+ 2pj0
13416
+ 3dog
13417
+ 2bve
13418
+ 5vfm
13419
+ 4l2x
13420
+ 3uvp
13421
+ 4wno
13422
+ 4y38
13423
+ 2c69
13424
+ 6eol
13425
+ 2zx9
13426
+ 4q9s
13427
+ 2alv
13428
+ 3ip6
13429
+ 3sio
13430
+ 4xoe
13431
+ 3sn8
13432
+ 5y48
13433
+ 5u7j
13434
+ 2g9u
13435
+ 5f3g
13436
+ 1xh4
13437
+ 4j79
13438
+ 5ghv
13439
+ 2i2b
13440
+ 1gah
13441
+ 3kr1
13442
+ 5npd
13443
+ 5mks
13444
+ 1jwu
13445
+ 4inh
13446
+ 1tsl
13447
+ 2wq5
13448
+ 6hkj
13449
+ 2yek
13450
+ 1hpo
13451
+ 2e9u
13452
+ 3mwe
13453
+ 6aff
13454
+ 6e99
13455
+ 5vlp
13456
+ 2uxu
13457
+ 4u5t
13458
+ 2hwo
13459
+ 4ury
13460
+ 5byi
13461
+ 4v04
13462
+ 5e2s
13463
+ 3lj3
13464
+ 4bt9
13465
+ 4jt9
13466
+ 4rce
13467
+ 6b5a
13468
+ 4mk2
13469
+ 4idv
13470
+ 4bah
13471
+ 5lj1
13472
+ 3nal
13473
+ 6d1m
13474
+ 5kpm
13475
+ 5sxn
13476
+ 5doh
13477
+ 3m6f
13478
+ 3u81
13479
+ 6bsk
13480
+ 5nfa
13481
+ 1gui
13482
+ 5i1q
13483
+ 4m4q
13484
+ 4ow0
13485
+ 5ur6
13486
+ 5iu7
13487
+ 6d55
13488
+ 1bma
13489
+ 5uxn
13490
+ 1oiu
13491
+ 4ln7
13492
+ 2a5s
13493
+ 5u5k
13494
+ 5e4w
13495
+ 6afg
13496
+ 1oyq
13497
+ 4xtv
13498
+ 6eji
13499
+ 2xdl
13500
+ 3at1
13501
+ 1njb
13502
+ 4u0g
13503
+ 1tou
13504
+ 2pj5
13505
+ 4q1a
13506
+ 3avk
13507
+ 6fmj
13508
+ 2lto
13509
+ 6czc
13510
+ 3u8m
13511
+ 2y5l
13512
+ 3fjg
13513
+ 5uwi
13514
+ 6m9c
13515
+ 5m3b
13516
+ 3jxw
13517
+ 4zqt
13518
+ 5vc6
13519
+ 4bj8
13520
+ 5knx
13521
+ 3pix
13522
+ 3e6k
13523
+ 4f7j
13524
+ 3koo
13525
+ 5n3y
13526
+ 3jdw
13527
+ 4pl3
13528
+ 4nmo
13529
+ 1ody
13530
+ 5tpc
13531
+ 6bsm
13532
+ 2cm8
13533
+ 5gn7
13534
+ 5hu0
13535
+ 1yq7
13536
+ 2gga
13537
+ 4wy7
13538
+ 2brn
13539
+ 2fmb
13540
+ 5h14
13541
+ 4zsp
13542
+ 3nus
13543
+ 5n8t
13544
+ 5x5g
13545
+ 5yls
13546
+ 5u3b
13547
+ 4x14
13548
+ 4puk
13549
+ 5wa6
13550
+ 2y2h
13551
+ 4nmt
13552
+ 3uol
13553
+ 4jzf
13554
+ 4um3
13555
+ 2wi2
13556
+ 5k7h
13557
+ 2ccu
13558
+ 5x74
13559
+ 3ddb
13560
+ 1xd0
13561
+ 4mm9
13562
+ 1ulg
13563
+ 3coz
13564
+ 5epl
13565
+ 4ear
13566
+ 3vzg
13567
+ 4qac
13568
+ 4l5j
13569
+ 1ai6
13570
+ 3d8w
13571
+ 4mi9
13572
+ 2wtv
13573
+ 5iee
13574
+ 6ge0
13575
+ 4q3r
13576
+ 4qb3
13577
+ 5cdh
13578
+ 2vip
13579
+ 3pz2
13580
+ 3f34
13581
+ 3rin
13582
+ 5n2t
13583
+ 5oqw
13584
+ 3p4w
13585
+ 4n5g
13586
+ 2pvl
13587
+ 1b0f
13588
+ 5a3u
13589
+ 4p6x
13590
+ 3rsx
13591
+ 3rcj
13592
+ 5i9z
13593
+ 5ai6
13594
+ 3c4f
13595
+ 1o4o
13596
+ 1sje
13597
+ 3alt
13598
+ 5f41
13599
+ 5yid
13600
+ 3bgl
13601
+ 4rdn
13602
+ 4hso
13603
+ 4ttv
13604
+ 3ok9
13605
+ 5y0g
13606
+ 5twx
13607
+ 1wug
13608
+ 4a6c
13609
+ 3gxt
13610
+ 4lzs
13611
+ 2zx7
13612
+ 3jvr
13613
+ 4as9
13614
+ 3f1a
13615
+ 4cd6
13616
+ 4r06
13617
+ 5h5s
13618
+ 7gpb
13619
+ 5w0i
13620
+ 4wp7
13621
+ 5e3g
13622
+ 2ydt
13623
+ 3hec
13624
+ 4abb
13625
+ 5ji8
13626
+ 4olc
13627
+ 5ieo
13628
+ 5i2z
13629
+ 4zy5
13630
+ 4qga
13631
+ 3t82
13632
+ 4dff
13633
+ 2wr8
13634
+ 3i7b
13635
+ 3v4v
13636
+ 4gw8
13637
+ 2hxl
13638
+ 4bkt
13639
+ 2wj2
13640
+ 4xwk
13641
+ 1xk9
13642
+ 2fgh
13643
+ 3tv7
13644
+ 1ajp
13645
+ 1tps
13646
+ 3ag9
13647
+ 3ls4
13648
+ 5wg3
13649
+ 1cwb
13650
+ 3mea
13651
+ 1bjr
13652
+ 5auz
13653
+ 2y67
13654
+ 1q9m
13655
+ 2i3i
13656
+ 5ivj
13657
+ 2vd1
13658
+ 1p2g
13659
+ 4l7n
13660
+ 5efj
13661
+ 4jzi
13662
+ 5mt0
13663
+ 5vlk
13664
+ 3rt4
13665
+ 3zmm
13666
+ 2wn9
13667
+ 2a25
13668
+ 4hmq
13669
+ 5jim
13670
+ 4zyy
13671
+ 4bi7
13672
+ 1f3j
13673
+ 2b52
13674
+ 2xkd
13675
+ 1yys
13676
+ 3vtr
13677
+ 3zrl
13678
+ 4kao
13679
+ 5fnc
13680
+ 1ke5
13681
+ 6hwz
13682
+ 4d0x
13683
+ 4mzs
13684
+ 5mi5
13685
+ 4a6l
13686
+ 4eg5
13687
+ 1sgu
13688
+ 1w11
13689
+ 3dkg
13690
+ 1s19
13691
+ 4wyo
13692
+ 3ppp
13693
+ 4rlu
13694
+ 2jjk
13695
+ 5ovp
13696
+ 6cd9
13697
+ 3qio
13698
+ 4wrb
13699
+ 5h9p
13700
+ 5na0
13701
+ 1wbt
13702
+ 4ael
13703
+ 5eu1
13704
+ 3nw6
13705
+ 1c5s
13706
+ 1bl4
13707
+ 4ual
13708
+ 2vsl
13709
+ 5vil
13710
+ 4o7f
13711
+ 4dhr
13712
+ 1pw6
13713
+ 1b5j
13714
+ 3iad
13715
+ 3rwg
13716
+ 5wev
13717
+ 2pjc
13718
+ 3vi2
13719
+ 4xum
13720
+ 2zmm
13721
+ 3fv3
13722
+ 3ukr
13723
+ 5myo
13724
+ 3lbl
13725
+ 4ppb
13726
+ 1q0b
13727
+ 4abi
13728
+ 6bfx
13729
+ 3kmy
13730
+ 1q6k
13731
+ 5ygd
13732
+ 4z90
13733
+ 4omk
13734
+ 5bqh
13735
+ 5lrj
13736
+ 1ta6
13737
+ 5jzn
13738
+ 3ad8
13739
+ 5izu
13740
+ 5ief
13741
+ 4frj
13742
+ 2pre
13743
+ 4ty6
13744
+ 5nut
13745
+ 3zps
13746
+ 5e2v
13747
+ 4k43
13748
+ 5e7r
13749
+ 6ftz
13750
+ 3rjc
13751
+ 4uvc
13752
+ 3uix
13753
+ 5up3
13754
+ 4i31
13755
+ 2p15
13756
+ 1ag9
13757
+ 1gu3
13758
+ 6cw8
13759
+ 5ei8
13760
+ 4psh
13761
+ 4i71
13762
+ 6f9v
13763
+ 4yoz
13764
+ 6hu3
13765
+ 5e2m
13766
+ 5ap5
13767
+ 4fob
13768
+ 1uod
13769
+ 3e63
13770
+ 3mke
13771
+ 3io7
13772
+ 4whz
13773
+ 4mt9
13774
+ 4x21
13775
+ 1ppi
13776
+ 3t2p
13777
+ 5q0g
13778
+ 5jjs
13779
+ 5icp
13780
+ 4g1f
13781
+ 1csh
13782
+ 4mwr
13783
+ 5al1
13784
+ 4qxq
13785
+ 3o9c
13786
+ 2wxo
13787
+ 4xmo
13788
+ 4ii9
13789
+ 5mlj
13790
+ 5hm3
13791
+ 4wr7
13792
+ 3iqv
13793
+ 4xjr
13794
+ 4w9w
13795
+ 5fhm
13796
+ 5o2d
13797
+ 5osk
13798
+ 3bv9
13799
+ 3i7e
13800
+ 4kz4
13801
+ 2oz7
13802
+ 2vzr
13803
+ 4d4d
13804
+ 6eog
13805
+ 1sfi
13806
+ 5ehq
13807
+ 5uy8
13808
+ 4u0d
13809
+ 2l11
13810
+ 5v2p
13811
+ 2vtm
13812
+ 5ef8
13813
+ 1zgv
13814
+ 4b2l
13815
+ 3g32
13816
+ 4q4e
13817
+ 4x50
13818
+ 2oax
13819
+ 4r8y
13820
+ 4c61
13821
+ 1w2g
13822
+ 5er2
13823
+ 6b2q
13824
+ 3up7
13825
+ 3r01
13826
+ 5izq
13827
+ 3l6f
13828
+ 3p9m
13829
+ 6ey8
13830
+ 4b74
13831
+ 6fyz
13832
+ 5g1z
13833
+ 3t4h
13834
+ 4wkc
13835
+ 2o4j
13836
+ 4bdj
13837
+ 5mek
13838
+ 4gvd
13839
+ 5qil
13840
+ 5go2
13841
+ 4m3f
13842
+ 2e2b
13843
+ 4bi2
13844
+ 5l0h
13845
+ 5ugb
13846
+ 2ynd
13847
+ 2wzz
13848
+ 5wyz
13849
+ 5may
13850
+ 4eev
13851
+ 4kiw
13852
+ 2y7i
13853
+ 3tcg
13854
+ 3s71
13855
+ 2qcd
13856
+ 1vru
13857
+ 1h00
13858
+ 4uat
13859
+ 3nyd
13860
+ 3bkk
13861
+ 4n9d
13862
+ 1bim
13863
+ 5zt1
13864
+ 5ml2
13865
+ 3fpd
13866
+ 6h7o
13867
+ 3p8z
13868
+ 4ewh
13869
+ 4uva
13870
+ 5a3q
13871
+ 4dai
13872
+ 5o55
13873
+ 3f69
13874
+ 4puz
13875
+ 6f05
13876
+ 3g35
13877
+ 4qij
13878
+ 4dxg
13879
+ 3n1c
13880
+ 4dt2
13881
+ 5u2e
13882
+ 6c7j
13883
+ 4cj4
13884
+ 2w7x
13885
+ 6ccq
13886
+ 4qsv
13887
+ 5upz
13888
+ 5iu4
13889
+ 4x0u
13890
+ 3ffg
13891
+ 1y6q
13892
+ 5yba
13893
+ 2f35
13894
+ 4gsc
13895
+ 1jt1
13896
+ 4qvl
13897
+ 1mwn
13898
+ 1jeu
13899
+ 1c3b
13900
+ 3ual
13901
+ 1rdn
13902
+ 6dh1
13903
+ 5iub
13904
+ 5mav
13905
+ 3t83
13906
+ 3iof
13907
+ 6cwf
13908
+ 2fpz
13909
+ 5ml4
13910
+ 5f2f
13911
+ 1fpc
13912
+ 7kme
13913
+ 1erq
13914
+ 5t9u
13915
+ 1lhv
13916
+ 2qlf
13917
+ 5jmp
13918
+ 4s3f
13919
+ 6hm6
13920
+ 2qt9
13921
+ 4p4i
13922
+ 5vih
13923
+ 3mnu
13924
+ 6djc
13925
+ 5db0
13926
+ 4ca5
13927
+ 5mar
13928
+ 2w77
13929
+ 2avq
13930
+ 5eqq
13931
+ 1tsi
13932
+ 2vw2
13933
+ 2ggd
13934
+ 6f6d
13935
+ 1f4g
13936
+ 3ndm
13937
+ 3l17
13938
+ 5fni
13939
+ 1c3r
13940
+ 5trf
13941
+ 1pau
13942
+ 3vf3
13943
+ 6ewk
13944
+ 4re9
13945
+ 4li8
13946
+ 1n4k
13947
+ 4l7o
13948
+ 5kbh
13949
+ 2gj5
13950
+ 2oh4
13951
+ 4wht
13952
+ 1joc
13953
+ 1qxl
13954
+ 6elp
13955
+ 4qge
13956
+ 2yfe
13957
+ 3g8e
13958
+ 1cea
13959
+ 6czu
13960
+ 2fs8
13961
+ 4mk5
13962
+ 4i60
13963
+ 5h5f
13964
+ 5tcc
13965
+ 4dpy
13966
+ 3fh8
13967
+ 4h1m
13968
+ 2anm
13969
+ 1uom
13970
+ 3oim
13971
+ 6ezi
13972
+ 3da9
13973
+ 3r17
13974
+ 1l6s
13975
+ 2uzv
13976
+ 4e1e
13977
+ 3d28
13978
+ 4bcd
13979
+ 5kez
13980
+ 1bn4
13981
+ 6biz
13982
+ 4lwt
13983
+ 3jpv
13984
+ 5fsm
13985
+ 4mds
13986
+ 4e6c
13987
+ 3o9p
13988
+ 2ylo
13989
+ 5fyq
13990
+ 4hxq
13991
+ 3mj1
13992
+ 1zm7
13993
+ 1s63
13994
+ 4nk9
13995
+ 4qmm
13996
+ 3g7l
13997
+ 4zyw
13998
+ 2q8i
13999
+ 3gfe
14000
+ 5ih6
14001
+ 4owo
14002
+ 5o5f
14003
+ 4x7q
14004
+ 4wy1
14005
+ 1qbq
14006
+ 5du6
14007
+ 1oe0
14008
+ 1g2a
14009
+ 4iie
14010
+ 4x5p
14011
+ 1fhr
14012
+ 1ntv
14013
+ 1o5f
14014
+ 3os3
14015
+ 2cbz
14016
+ 4arb
14017
+ 3lxs
14018
+ 3lxg
14019
+ 2g94
14020
+ 4gv1
14021
+ 5n87
14022
+ 1gz9
14023
+ 1b6l
14024
+ 5kcx
14025
+ 2xni
14026
+ 3ig6
14027
+ 1p4u
14028
+ 3r9h
14029
+ 5n2x
14030
+ 6aud
14031
+ 5nee
14032
+ 1bnw
14033
+ 6bo6
14034
+ 3kaf
14035
+ 2fvd
14036
+ 2xaj
14037
+ 4a95
14038
+ 5iv2
14039
+ 3n2u
14040
+ 4q6e
14041
+ 1i2s
14042
+ 2f34
14043
+ 6evo
14044
+ 5ksv
14045
+ 2e9n
14046
+ 5cgc
14047
+ 1m51
14048
+ 1d4h
14049
+ 5ncy
14050
+ 5kls
14051
+ 3s2p
14052
+ 4o09
14053
+ 1zaj
14054
+ 1f0u
14055
+ 4k3l
14056
+ 4tww
14057
+ 5f88
14058
+ 3fcb
14059
+ 4arw
14060
+ 4o9w
14061
+ 2ay7
14062
+ 5dcz
14063
+ 5dqf
14064
+ 3ttj
14065
+ 3lc3
14066
+ 2xg3
14067
+ 1uml
14068
+ 3qt6
14069
+ 4oty
14070
+ 4wzv
14071
+ 1hxb
14072
+ 5l87
14073
+ 4ah9
14074
+ 5ma7
14075
+ 3npa
14076
+ 4hs8
14077
+ 4qgd
14078
+ 5zkc
14079
+ 4d2t
14080
+ 5elz
14081
+ 1jk7
14082
+ 5cuh
14083
+ 3jqg
14084
+ 5w4s
14085
+ 4n7m
14086
+ 3tct
14087
+ 5ie1
14088
+ 4n1t
14089
+ 5aqh
14090
+ 5oku
14091
+ 5nhf
14092
+ 2yge
14093
+ 4gs9
14094
+ 6bu3
14095
+ 5vqr
14096
+ 2am4
14097
+ 5wr7
14098
+ 4j58
14099
+ 4zsr
14100
+ 1yon
14101
+ 2bkt
14102
+ 4buq
14103
+ 3u7m
14104
+ 3u51
14105
+ 3w55
14106
+ 2gfa
14107
+ 3fum
14108
+ 4hyu
14109
+ 4pra
14110
+ 4kyk
14111
+ 4gzp
14112
+ 3ekn
14113
+ 3v2w
14114
+ 2pog
14115
+ 2bb7
14116
+ 3qj9
14117
+ 5d3t
14118
+ 2w67
14119
+ 5fs5
14120
+ 5qaf
14121
+ 4crb
14122
+ 6fe0
14123
+ 3zmq
14124
+ 5t1t
14125
+ 2xm1
14126
+ 4oaz
14127
+ 6e8k
14128
+ 2fys
14129
+ 5egs
14130
+ 4aac
14131
+ 1akr
14132
+ 2wi1
14133
+ 2qbs
14134
+ 5h2u
14135
+ 1lke
14136
+ 3smq
14137
+ 5eds
14138
+ 3wmb
14139
+ 4azy
14140
+ 3cz1
14141
+ 1s39
14142
+ 1n94
14143
+ 5lc0
14144
+ 5wg5
14145
+ 5ttg
14146
+ 1ndz
14147
+ 6gmx
14148
+ 4cfe
14149
+ 3fsj
14150
+ 1cim
14151
+ 3kry
14152
+ 2f2h
14153
+ 2g9x
14154
+ 1o47
14155
+ 6aqf
14156
+ 4knn
14157
+ 1t29
14158
+ 4k5z
14159
+ 2ycf
14160
+ 5ijr
14161
+ 5kks
14162
+ 5tg2
14163
+ 4yxu
14164
+ 4i7c
14165
+ 3ftv
14166
+ 1ke3
14167
+ 4bib
14168
+ 3coy
14169
+ 5orw
14170
+ 5bns
14171
+ 4bs0
14172
+ 3igp
14173
+ 2uw4
14174
+ 4avj
14175
+ 3qcy
14176
+ 1yqj
14177
+ 3a73
14178
+ 4uu8
14179
+ 4h3f
14180
+ 4rwk
14181
+ 5j5r
14182
+ 2rkm
14183
+ 1w14
14184
+ 4edu
14185
+ 3e73
14186
+ 5o1s
14187
+ 4ngn
14188
+ 1mnc
14189
+ 2ypp
14190
+ 3wq5
14191
+ 4j74
14192
+ 3rz5
14193
+ 5upe
14194
+ 4eh6
14195
+ 1c84
14196
+ 4fcf
14197
+ 1sc8
14198
+ 3agm
14199
+ 5w10
14200
+ 1gzg
14201
+ 4qvv
14202
+ 4qp6
14203
+ 4why
14204
+ 3ip9
14205
+ 5he0
14206
+ 4yv5
14207
+ 3ril
14208
+ 4uyd
14209
+ 1nu3
14210
+ 3ozr
14211
+ 5qin
14212
+ 4utv
14213
+ 5x9o
14214
+ 1okv
14215
+ 5m4c
14216
+ 4xhv
14217
+ 5ll7
14218
+ 3cyx
14219
+ 4gm3
14220
+ 2w78
14221
+ 5q1d
14222
+ 3bym
14223
+ 5aqo
14224
+ 1ga8
14225
+ 1p5e
14226
+ 4c4f
14227
+ 2mps
14228
+ 4cxw
14229
+ 4w9f
14230
+ 3lmk
14231
+ 3sz1
14232
+ 4tya
14233
+ 4qjx
14234
+ 2v85
14235
+ 5bw4
14236
+ 2ael
14237
+ 4pv5
14238
+ 5osy
14239
+ 4m8h
14240
+ 3i02
14241
+ 6fs1
14242
+ 3twr
14243
+ 2vt3
14244
+ 4rrn
14245
+ 1h5u
14246
+ 2qoa
14247
+ 4lk6
14248
+ 3qtu
14249
+ 3k5d
14250
+ 2qhy
14251
+ 5jmw
14252
+ 3bun
14253
+ 1ghy
14254
+ 4l70
14255
+ 6cd4
14256
+ 4ymb
14257
+ 6bh2
14258
+ 5h0h
14259
+ 1ppx
14260
+ 3r4p
14261
+ 6gnr
14262
+ 2x6k
14263
+ 3f9n
14264
+ 4oys
14265
+ 3rf5
14266
+ 3hub
14267
+ 4fcq
14268
+ 5o1i
14269
+ 3ohh
14270
+ 4mqu
14271
+ 4gw5
14272
+ 5m4f
14273
+ 2r9w
14274
+ 5yea
14275
+ 1xp1
14276
+ 5lm4
14277
+ 2csm
14278
+ 5t2g
14279
+ 3n1w
14280
+ 3t8s
14281
+ 5nhz
14282
+ 4zk5
14283
+ 1e6q
14284
+ 4cg9
14285
+ 4bf1
14286
+ 2ycr
14287
+ 1c50
14288
+ 3hd3
14289
+ 4ehe
14290
+ 5tku
14291
+ 5wle
14292
+ 2r64
14293
+ 5l3j
14294
+ 6ft8
14295
+ 5ti5
14296
+ 4tyt
14297
+ 5lma
14298
+ 3oct
14299
+ 3zt2
14300
+ 5ai4
14301
+ 3zh8
14302
+ 3fl8
14303
+ 3old
14304
+ 3fqs
14305
+ 4xct
14306
+ 4mjp
14307
+ 3g2y
14308
+ 6g8n
14309
+ 5kkr
14310
+ 1fjs
14311
+ 6dxg
14312
+ 4cxy
14313
+ 3che
14314
+ 3qfv
14315
+ 1r5v
14316
+ 1pr1
14317
+ 3nsn
14318
+ 4p1u
14319
+ 3szb
14320
+ 5u11
14321
+ 4o0t
14322
+ 3ux0
14323
+ 4q4i
14324
+ 2nww
14325
+ 2qp8
14326
+ 4rsp
14327
+ 2p0x
14328
+ 1fsg
14329
+ 1u0g
14330
+ 4djx
14331
+ 6cco
14332
+ 3opp
14333
+ 4lg6
14334
+ 2clv
14335
+ 1hps
14336
+ 6bhe
14337
+ 6bir
14338
+ 5h09
14339
+ 3kwj
14340
+ 2bed
14341
+ 3e51
14342
+ 3msk
14343
+ 6axj
14344
+ 3udd
14345
+ 1zvx
14346
+ 5u6d
14347
+ 4qnb
14348
+ 4eox
14349
+ 2rly
14350
+ 3ob2
14351
+ 3osi
14352
+ 5m7t
14353
+ 5vqy
14354
+ 5ei6
14355
+ 3mbz
14356
+ 4f7n
14357
+ 3ipb
14358
+ 1w6r
14359
+ 4zx9
14360
+ 6gy5
14361
+ 2hh5
14362
+ 3nmq
14363
+ 1vjy
14364
+ 4hy5
14365
+ 4ts1
14366
+ 2qcg
14367
+ 5lwd
14368
+ 1kjr
14369
+ 3oyn
14370
+ 3rbq
14371
+ 3ujc
14372
+ 3ekp
14373
+ 3p7c
14374
+ 4urv
14375
+ 3gws
14376
+ 3fw4
14377
+ 1ow8
14378
+ 4pov
14379
+ 4k4j
14380
+ 4m0f
14381
+ 1ow4
14382
+ 3qaq
14383
+ 4ovf
14384
+ 1ps3
14385
+ 4gw6
14386
+ 5a4c
14387
+ 2ovx
14388
+ 5q19
14389
+ 1gwm
14390
+ 2kwn
14391
+ 4uce
14392
+ 4y2p
14393
+ 2q2c
14394
+ 5lgn
14395
+ 5oxg
14396
+ 3i4b
14397
+ 4qmo
14398
+ 2wyg
14399
+ 5eay
14400
+ 2b4l
14401
+ 2xd9
14402
+ 3sus
14403
+ 5zu0
14404
+ 3gjt
14405
+ 2b1v
14406
+ 3ctq
14407
+ 5tzz
14408
+ 1h28
14409
+ 4b13
14410
+ 2q15
14411
+ 2aac
14412
+ 3kw9
14413
+ 3qox
14414
+ 4wku
14415
+ 5wqk
14416
+ 2fjp
14417
+ 5eij
14418
+ 4mga
14419
+ 5ak2
14420
+ 5dp4
14421
+ 6f8u
14422
+ 4inu
14423
+ 4nrc
14424
+ 5drt
14425
+ 1tng
14426
+ 3vhc
14427
+ 1xh3
14428
+ 1toi
14429
+ 2lya
14430
+ 3mpm
14431
+ 4k55
14432
+ 1qb9
14433
+ 5hlw
14434
+ 6ej2
14435
+ 4i33
14436
+ 4cwp
14437
+ 2fvc
14438
+ 3r02
14439
+ 6ezh
14440
+ 4muw
14441
+ 3d91
14442
+ 3img
14443
+ 4op1
14444
+ 4s3e
14445
+ 3qxp
14446
+ 4f5y
14447
+ 4qmv
14448
+ 5u62
14449
+ 4qtc
14450
+ 6tim
14451
+ 4h2j
14452
+ 1x7e
14453
+ 1b6h
14454
+ 3opr
14455
+ 4gjb
14456
+ 5u5h
14457
+ 5v3o
14458
+ 5l44
14459
+ 2c6i
14460
+ 4ezr
14461
+ 5u4e
14462
+ 1fta
14463
+ 4z2j
14464
+ 3n3j
14465
+ 5eg4
14466
+ 2yiq
14467
+ 4jmx
14468
+ 4q2k
14469
+ 2x2r
14470
+ 2fzg
14471
+ 1ajx
14472
+ 2yki
14473
+ 3ccw
14474
+ 5u0e
14475
+ 5j32
14476
+ 1y2e
14477
+ 6gwr
14478
+ 6gu7
14479
+ 5dxb
14480
+ 1hyo
14481
+ 3lc5
14482
+ 3arw
14483
+ 3rz1
14484
+ 5kr8
14485
+ 5jv2
14486
+ 4xpj
14487
+ 4l2f
14488
+ 3vi5
14489
+ 5etj
14490
+ 2a3w
14491
+ 3bra
14492
+ 1h79
14493
+ 6ed6
14494
+ 5w85
14495
+ 6mub
14496
+ 5od5
14497
+ 5ejl
14498
+ 3ubx
14499
+ 2c6k
14500
+ 4rn0
14501
+ 3pdq
14502
+ 4unp
14503
+ 5mng
14504
+ 4w5j
14505
+ 5afn
14506
+ 6h0b
14507
+ 5yc4
14508
+ 1ahx
14509
+ 2jst
14510
+ 1o3h
14511
+ 3rz8
14512
+ 4na9
14513
+ 1f0t
14514
+ 4eym
14515
+ 5vew
14516
+ 4k3p
14517
+ 4zs9
14518
+ 3b9s
14519
+ 5wcl
14520
+ 4mmm
14521
+ 2l65
14522
+ 4lph
14523
+ 3frz
14524
+ 3rz9
14525
+ 4kba
14526
+ 5a8x
14527
+ 5eps
14528
+ 2wor
14529
+ 5f1j
14530
+ 2oxd
14531
+ 3ldq
14532
+ 4g0k
14533
+ 1u32
14534
+ 5gr9
14535
+ 4fl3
14536
+ 3uxl
14537
+ 3fud
14538
+ 4x68
14539
+ 3k2f
14540
+ 5lsg
14541
+ 1h23
14542
+ 3zrm
14543
+ 5v6u
14544
+ 4lke
14545
+ 1o4r
14546
+ 5mw4
14547
+ 4fcb
14548
+ 4qp8
14549
+ 1htf
14550
+ 4q0k
14551
+ 4yc0
14552
+ 3ebp
14553
+ 3q4b
14554
+ 5dh4
14555
+ 1bqo
14556
+ 3sn7
14557
+ 4waf
14558
+ 3gst
14559
+ 5ecv
14560
+ 1hvj
14561
+ 3m3c
14562
+ 4ocv
14563
+ 2ewb
14564
+ 5h19
14565
+ 3ryz
14566
+ 3a2o
14567
+ 4ea3
14568
+ 4wx6
14569
+ 1i32
14570
+ 4lge
14571
+ 3cwe
14572
+ 2hmu
14573
+ 1h0r
14574
+ 5m9w
14575
+ 3jwr
14576
+ 2q70
14577
+ 4iu0
14578
+ 1tl3
14579
+ 2gqn
14580
+ 3bc3
14581
+ 4gk7
14582
+ 2wd3
14583
+ 2x8z
14584
+ 4ybt
14585
+ 5dms
14586
+ 1nhg
14587
+ 4mw1
14588
+ 3cgo
14589
+ 5v4q
14590
+ 4x6n
14591
+ 3bh8
14592
+ 3f6h
14593
+ 4au7
14594
+ 5i3a
14595
+ 3l6x
14596
+ 5d0j
14597
+ 4c1u
14598
+ 6fzx
14599
+ 3ook
14600
+ 5mt4
14601
+ 4hzx
14602
+ 3udp
14603
+ 4g8o
14604
+ 4mrz
14605
+ 4tyo
14606
+ 4umj
14607
+ 4rfz
14608
+ 1rxp
14609
+ 2zyn
14610
+ 5t54
14611
+ 6bh3
14612
+ 4cd5
14613
+ 4hlk
14614
+ 3t0x
14615
+ 2az5
14616
+ 5dy5
14617
+ 1ft4
14618
+ 5org
14619
+ 5to8
14620
+ 6eqx
14621
+ 3b2q
14622
+ 6ez9
14623
+ 6aox
14624
+ 2y5f
14625
+ 4x7o
14626
+ 4y4v
14627
+ 5oae
14628
+ 1h25
14629
+ 5d2r
14630
+ 2xch
14631
+ 5ndd
14632
+ 2cf8
14633
+ 2yi7
14634
+ 4nzo
14635
+ 3mct
14636
+ 5dia
14637
+ 1iih
14638
+ 3ime
14639
+ 3tll
14640
+ 2jle
14641
+ 1h26
14642
+ 3l7b
14643
+ 5v88
14644
+ 1o2o
14645
+ 3e90
14646
+ 4j46
14647
+ 3dp9
14648
+ 5ngb
14649
+ 5jh6
14650
+ 3rpy
14651
+ 2vfz
14652
+ 5xn3
14653
+ 1e6s
14654
+ 4xx4
14655
+ 3ppo
14656
+ 4qfo
14657
+ 1o3d
14658
+ 3s43
14659
+ 6ar2
14660
+ 3cyu
14661
+ 1qf0
14662
+ 1ow6
14663
+ 5eml
14664
+ 4amz
14665
+ 5ye9
14666
+ 4mti
14667
+ 6f29
14668
+ 5mk1
14669
+ 1qsc
14670
+ 1uvr
14671
+ 2off
14672
+ 4whq
14673
+ 4d08
14674
+ 6gu3
14675
+ 4b6q
14676
+ 4agc
14677
+ 4uu5
14678
+ 3blt
14679
+ 2zb1
14680
+ 1g37
14681
+ 3krd
14682
+ 3lm1
14683
+ 1noj
14684
+ 1t32
14685
+ 3i6z
14686
+ 5kbq
14687
+ 1gwr
14688
+ 4k6v
14689
+ 2vba
14690
+ 3zrc
14691
+ 4nmx
14692
+ 4mbc
14693
+ 1ie9
14694
+ 3bgz
14695
+ 2k62
14696
+ 1mqg
14697
+ 1qy1
14698
+ 1mau
14699
+ 2x7u
14700
+ 3iqq
14701
+ 5h8e
14702
+ 2aig
14703
+ 5n9l
14704
+ 2g5p
14705
+ 4fri
14706
+ 4q3u
14707
+ 5flo
14708
+ 4bo1
14709
+ 5tyr
14710
+ 5x26
14711
+ 2drc
14712
+ 4bbh
14713
+ 3gkz
14714
+ 5e6o
14715
+ 4qy3
14716
+ 1nq0
14717
+ 5e8w
14718
+ 2gu8
14719
+ 1w9v
14720
+ 5uci
14721
+ 2veu
14722
+ 5hvy
14723
+ 3ewu
14724
+ 1jm4
14725
+ 5yas
14726
+ 4ojr
14727
+ 1h0w
14728
+ 5eou
14729
+ 3as0
14730
+ 4e3g
14731
+ 4euv
14732
+ 2wly
14733
+ 3lir
14734
+ 3znr
14735
+ 3q3t
14736
+ 6ap8
14737
+ 3g3n
14738
+ 3c52
14739
+ 3g2w
14740
+ 4iku
14741
+ 5bry
14742
+ 4wvu
14743
+ 3neo
14744
+ 1abf
14745
+ 5db2
14746
+ 1sjh
14747
+ 3hc8
14748
+ 2liq
14749
+ 5dtw
14750
+ 5dhu
14751
+ 1qhr
14752
+ 2pl0
14753
+ 4wcu
14754
+ 3exo
14755
+ 1jvp
14756
+ 4mrd
14757
+ 5jxn
14758
+ 2yjc
14759
+ 2o9r
14760
+ 5en3
14761
+ 5hpm
14762
+ 4qf8
14763
+ 3k05
14764
+ 3h0q
14765
+ 2a4l
14766
+ 4xue
14767
+ 3ewc
14768
+ 1dtt
14769
+ 4yzm
14770
+ 4q4p
14771
+ 3d9o
14772
+ 3s8x
14773
+ 4mq2
14774
+ 6ek3
14775
+ 5tgc
14776
+ 2hds
14777
+ 5fwj
14778
+ 3kme
14779
+ 1zog
14780
+ 6ce6
14781
+ 4c6v
14782
+ 1xdd
14783
+ 5bvf
14784
+ 1dud
14785
+ 3lzu
14786
+ 4c1d
14787
+ 2er6
14788
+ 3f48
14789
+ 2xuz
14790
+ 5fto
14791
+ 4zei
14792
+ 3c39
14793
+ 6fqu
14794
+ 4fc0
14795
+ 1bzc
14796
+ 3pj8
14797
+ 2auc
14798
+ 4kp0
14799
+ 2y7p
14800
+ 4ono
14801
+ 3nc9
14802
+ 4bjb
14803
+ 4qo4
14804
+ 3mz3
14805
+ 5edc
14806
+ 2xzg
14807
+ 6bqj
14808
+ 1ga9
14809
+ 4oq6
14810
+ 4b80
14811
+ 5ee7
14812
+ 1bcu
14813
+ 4y8z
14814
+ 5ka1
14815
+ 5lpd
14816
+ 3qi4
14817
+ 5vii
14818
+ 5ele
14819
+ 2ew6
14820
+ 1s9v
14821
+ 4bqw
14822
+ 2x6w
14823
+ 5mnc
14824
+ 5eni
14825
+ 2xi7
14826
+ 1wqv
14827
+ 2b8v
14828
+ 6f92
14829
+ 4jve
14830
+ 3oy0
14831
+ 6h41
14832
+ 4zx4
14833
+ 4q15
14834
+ 6czv
14835
+ 2wti
14836
+ 5j9l
14837
+ 1o5r
14838
+ 5n1v
14839
+ 2bq7
14840
+ 4fic
14841
+ 2ole
14842
+ 3umw
14843
+ 1a1e
14844
+ 1ax0
14845
+ 4ux9
14846
+ 6bhi
14847
+ 3pj3
14848
+ 5m55
14849
+ 2uw6
14850
+ 2bqv
14851
+ 2ea2
14852
+ 5dl1
14853
+ 6g7f
14854
+ 5o8t
14855
+ 5nze
14856
+ 5fkj
14857
+ 5t1l
14858
+ 4wj7
14859
+ 3ibi
14860
+ 5h22
14861
+ 4bgh
14862
+ 2j9h
14863
+ 2iyf
14864
+ 5m28
14865
+ 2v0c
14866
+ 6ez6
14867
+ 5b6g
14868
+ 2q3z
14869
+ 4des
14870
+ 3pcj
14871
+ 5y0z
14872
+ 3u6h
14873
+ 3dtc
14874
+ 3cr4
14875
+ 2igv
14876
+ 4qaa
14877
+ 1yhm
14878
+ 3n35
14879
+ 5ylj
14880
+ 5dls
14881
+ 5h1u
14882
+ 4qw4
14883
+ 4hv3
14884
+ 4jfi
14885
+ 2go4
14886
+ 2m0o
14887
+ 5vzy
14888
+ 2h6t
14889
+ 2qnn
14890
+ 2aqb
14891
+ 1zd5
14892
+ 3pxf
14893
+ 1p1n
14894
+ 1bnt
14895
+ 2hm1
14896
+ 4z46
14897
+ 5e2n
14898
+ 2p8s
14899
+ 1h2k
14900
+ 3uh2
14901
+ 1b2m
14902
+ 5eiw
14903
+ 4bfy
14904
+ 5csx
14905
+ 2z92
14906
+ 2rjp
14907
+ 5o7n
14908
+ 3d3p
14909
+ 4ban
14910
+ 4l0l
14911
+ 3zo2
14912
+ 3zhx
14913
+ 2hah
14914
+ 3dx1
14915
+ 4dkt
14916
+ 3sz9
14917
+ 3n0n
14918
+ 4l4m
14919
+ 6asz
14920
+ 3hv4
14921
+ 3imc
14922
+ 3eju
14923
+ 5t6z
14924
+ 4jaz
14925
+ 1gjb
14926
+ 5fl0
14927
+ 3r9n
14928
+ 3sw8
14929
+ 5ggp
14930
+ 1btn
14931
+ 5j8u
14932
+ 1x8t
14933
+ 3bva
14934
+ 2wtc
14935
+ 5lom
14936
+ 2vwc
14937
+ 6bw2
14938
+ 6m9f
14939
+ 4n1z
14940
+ 3lzb
14941
+ 1fwe
14942
+ 2w16
14943
+ 2i6b
14944
+ 5ytu
14945
+ 4j8r
14946
+ 4je8
14947
+ 3t60
14948
+ 2n9x
14949
+ 6f8t
14950
+ 4lwu
14951
+ 6ebe
14952
+ 6ajj
14953
+ 5wap
14954
+ 5a6k
14955
+ 2xp7
14956
+ 3ql9
14957
+ 1q7a
14958
+ 5dkn
14959
+ 2gnl
14960
+ 3rwc
14961
+ 5npr
14962
+ 2e9c
14963
+ 1ajv
14964
+ 6e4f
14965
+ 1o7o
14966
+ 5alf
14967
+ 2xys
14968
+ 6h5w
14969
+ 2jfz
14970
+ 2xdx
14971
+ 4bda
14972
+ 3rxd
14973
+ 1f57
14974
+ 9icd
14975
+ 3zpr
14976
+ 4ciw
14977
+ 2zcr
14978
+ 2w9r
14979
+ 3hnz
14980
+ 4r1y
14981
+ 3olg
14982
+ 5lce
14983
+ 1kwq
14984
+ 5lrg
14985
+ 5f0c
14986
+ 3u8j
14987
+ 1bl6
14988
+ 3amv
14989
+ 1ej4
14990
+ 4ht0
14991
+ 3u6w
14992
+ 4q09
14993
+ 5jss
14994
+ 3qlc
14995
+ 2b1i
14996
+ 5cls
14997
+ 3eyf
14998
+ 6eln
14999
+ 5mrh
15000
+ 3nox
15001
+ 2qe2
15002
+ 3u6j
15003
+ 1qka
15004
+ 3k5u
15005
+ 1ws1
15006
+ 5ew0
15007
+ 6ft9
15008
+ 3kr4
15009
+ 5iug
15010
+ 1ajn
15011
+ 3zn0
15012
+ 2i3z
15013
+ 1y3x
15014
+ 2r9s
15015
+ 5vpm
15016
+ 4occ
15017
+ 4nym
15018
+ 3osw
15019
+ 3hy5
15020
+ 4j86
15021
+ 4dfg
15022
+ 5f1v
15023
+ 5l4e
15024
+ 2qve
15025
+ 4erf
15026
+ 1wss
15027
+ 1ajq
15028
+ 5h5r
15029
+ 1ym2
15030
+ 1hee
15031
+ 3rq7
15032
+ 1dxp
15033
+ 1vgc
15034
+ 3huc
15035
+ 5fqb
15036
+ 4mue
15037
+ 1qbs
15038
+ 1ujk
15039
+ 6fa3
15040
+ 4xo8
15041
+ 4ww8
15042
+ 4qxt
15043
+ 3in3
15044
+ 2wxp
15045
+ 5j3s
15046
+ 2h1h
15047
+ 5d7a
15048
+ 4lhm
15049
+ 2rjs
15050
+ 5nvf
15051
+ 3l0n
15052
+ 5b5f
15053
+ 4oow
15054
+ 3ej5
15055
+ 5fum
15056
+ 1li6
15057
+ 5swg
15058
+ 6eum
15059
+ 3fas
15060
+ 5klr
15061
+ 2vv9
15062
+ 3v5t
15063
+ 2byr
15064
+ 5nr7
15065
+ 3suf
15066
+ 2xb9
15067
+ 1f4x
15068
+ 5v86
15069
+ 3kl6
15070
+ 1qng
15071
+ 2a8g
15072
+ 2j62
15073
+ 3zi0
15074
+ 1h6e
15075
+ 3ap7
15076
+ 3zj6
15077
+ 4hp0
15078
+ 1bmk
15079
+ 1nj1
15080
+ 5xp7
15081
+ 4iwd
15082
+ 1if7
15083
+ 4ia0
15084
+ 3ms7
15085
+ 5yjk
15086
+ 5j7f
15087
+ 5aba
15088
+ 5ake
15089
+ 1h2t
15090
+ 4mzk
15091
+ 4ezj
15092
+ 4an3
15093
+ 5jfp
15094
+ 5v24
15095
+ 2xmy
15096
+ 1yqy
15097
+ 3aox
15098
+ 4yjn
15099
+ 2i0h
15100
+ 4lwe
15101
+ 4frs
15102
+ 2hwg
15103
+ 3o4l
15104
+ 3ddg
15105
+ 4cu8
15106
+ 2zfs
15107
+ 5wfd
15108
+ 4lnb
15109
+ 4j17
15110
+ 4qyy
15111
+ 2r3j
15112
+ 4eoi
15113
+ 3pdh
15114
+ 1y3p
15115
+ 4g5y
15116
+ 3v4t
15117
+ 4okp
15118
+ 5meh
15119
+ 1xpc
15120
+ 3avl
15121
+ 6gzl
15122
+ 2pqc
15123
+ 4g0y
15124
+ 3wab
15125
+ 5iop
15126
+ 2ax9
15127
+ 3bex
15128
+ 3cdb
15129
+ 6afc
15130
+ 1v0m
15131
+ 3ksl
15132
+ 3fvl
15133
+ 6c7d
15134
+ 1t2v
15135
+ 5bot
15136
+ 5yto
15137
+ 2wed
15138
+ 1jn2
15139
+ 4k4e
15140
+ 1a94
15141
+ 3ozj
15142
+ 1j81
15143
+ 3e93
15144
+ 1q9d
15145
+ 4idt
15146
+ 4k2f
15147
+ 2whp
15148
+ 3snc
15149
+ 4otg
15150
+ 2lty
15151
+ 5bnr
15152
+ 6c7r
15153
+ 3as3
15154
+ 4ge9
15155
+ 4xg7
15156
+ 5dw1
15157
+ 3ivq
15158
+ 2qrg
15159
+ 2xdk
15160
+ 1gno
15161
+ 2gg5
15162
+ 1d4y
15163
+ 5dqe
15164
+ 3rwi
15165
+ 4x8o
15166
+ 3el9
15167
+ 4kup
15168
+ 3eks
15169
+ 2kff
15170
+ 1w5y
15171
+ 2flh
15172
+ 2vwv
15173
+ 3rvg
15174
+ 4oeu
15175
+ 3u4o
15176
+ 2pv3
15177
+ 4i0t
15178
+ 3aru
15179
+ 4yv0
15180
+ 5ura
15181
+ 5uv1
15182
+ 3vjc
15183
+ 4bam
15184
+ 5vc5
15185
+ 2ito
15186
+ 1l5s
15187
+ 3adv
15188
+ 3oe6
15189
+ 5mnb
15190
+ 5gvm
15191
+ 1r1j
15192
+ 2qe5
15193
+ 2rjr
15194
+ 3wiz
15195
+ 4cfm
15196
+ 1gz4
15197
+ 3c1n
15198
+ 6ma3
15199
+ 1z9h
15200
+ 6g98
15201
+ 1qm4
15202
+ 4j24
15203
+ 3pju
15204
+ 5cqx
15205
+ 1wvj
15206
+ 2yde
15207
+ 4mqp
15208
+ 2np9
15209
+ 4zyu
15210
+ 3ua9
15211
+ 3fc2
15212
+ 2xp5
15213
+ 1umw
15214
+ 5jer
15215
+ 3dcv
15216
+ 4aft
15217
+ 4rhu
15218
+ 3ahn
15219
+ 4ytf
15220
+ 5os2
15221
+ 5el9
15222
+ 3gjw
15223
+ 4oth
15224
+ 1lkl
15225
+ 6ela
15226
+ 5vkf
15227
+ 2oz6
15228
+ 4q1f
15229
+ 5usz
15230
+ 2xfk
15231
+ 3n51
15232
+ 4fxq
15233
+ 4oc3
15234
+ 3ggu
15235
+ 1hiv
15236
+ 4d2s
15237
+ 2fkf
15238
+ 4oz3
15239
+ 6fkz
15240
+ 3lhg
15241
+ 1jyq
15242
+ 3uvw
15243
+ 4f6s
15244
+ 2qk8
15245
+ 5jat
15246
+ 1lvu
15247
+ 11gs
15248
+ 4cwt
15249
+ 4gz3
15250
+ 5vkc
15251
+ 1wu1
15252
+ 5m17
15253
+ 1rne
15254
+ 1l2z
15255
+ 3s0d
15256
+ 4nxq
15257
+ 5iu2
15258
+ 1x78
15259
+ 6bc9
15260
+ 6drx
15261
+ 5juz
15262
+ 4qvn
15263
+ 5i7x
15264
+ 5o7e
15265
+ 4ktc
15266
+ 1fl3
15267
+ 1enu
15268
+ 2clf
15269
+ 4zs0
15270
+ 4llp
15271
+ 4djy
15272
+ 5nrf
15273
+ 4z0k
15274
+ 4k8a
15275
+ 6cdc
15276
+ 6bin
15277
+ 4e1z
15278
+ 1a61
15279
+ 2q64
15280
+ 3s0o
15281
+ 4z0u
15282
+ 1ii5
15283
+ 4lk7
15284
+ 5ubt
15285
+ 5m1z
15286
+ 1t4j
15287
+ 1g7v
15288
+ 4q1n
15289
+ 3l08
15290
+ 5d7r
15291
+ 2emt
15292
+ 4y2b
15293
+ 4j0t
15294
+ 2zof
15295
+ 6ecz
15296
+ 4gb9
15297
+ 4jfl
15298
+ 2ow3
15299
+ 1ibg
15300
+ 3fyz
15301
+ 5g5z
15302
+ 7lpr
15303
+ 4g8n
15304
+ 4bie
15305
+ 2yir
15306
+ 4yps
15307
+ 5jdc
15308
+ 3p4v
15309
+ 2fum
15310
+ 5c4o
15311
+ 1kcs
15312
+ 4r3s
15313
+ 1cj1
15314
+ 6f09
15315
+ 4ce3
15316
+ 4ft2
15317
+ 2pcu
15318
+ 4mo8
15319
+ 2g2r
15320
+ 4lxk
15321
+ 4utn
15322
+ 1stc
15323
+ 5nxp
15324
+ 5t52
15325
+ 3iod
15326
+ 1h01
15327
+ 4deb
15328
+ 4jjs
15329
+ 5ts0
15330
+ 3su0
15331
+ 4yhf
15332
+ 1y4z
15333
+ 3heg
15334
+ 2g70
15335
+ 3p2h
15336
+ 1sqq
15337
+ 1swk
15338
+ 2f4b
15339
+ 1igj
15340
+ 3fcq
15341
+ 2pja
15342
+ 4ase
15343
+ 2o63
15344
+ 2xyf
15345
+ 4j0v
15346
+ 5ka7
15347
+ 5kzp
15348
+ 3oob
15349
+ 5w6t
15350
+ 1h2u
15351
+ 5a00
15352
+ 4zt3
15353
+ 2cer
15354
+ 1w96
15355
+ 2pzi
15356
+ 4qf7
15357
+ 3c88
15358
+ 4y67
15359
+ 4nrl
15360
+ 4du8
15361
+ 4qtn
15362
+ 3sue
15363
+ 6fhu
15364
+ 1qs4
15365
+ 5u6b
15366
+ 5edb
15367
+ 2p2h
15368
+ 5gtr
15369
+ 4hbp
15370
+ 3tia
15371
+ 3cbp
15372
+ 2cgw
15373
+ 4bio
15374
+ 3gwt
15375
+ 5t8r
15376
+ 6fo7
15377
+ 1g9t
15378
+ 3kr0
15379
+ 1gt3
15380
+ 5c83
15381
+ 2r9x
15382
+ 3f8c
15383
+ 5nk7
15384
+ 4cfu
15385
+ 5e2r
15386
+ 2wf1
15387
+ 3uil
15388
+ 5fl6
15389
+ 1ms7
15390
+ 4csj
15391
+ 4yat
15392
+ 4f1q
15393
+ 4lm1
15394
+ 4oc0
15395
+ 3hj0
15396
+ 2xwe
15397
+ 3ms4
15398
+ 2jh0
15399
+ 3oad
15400
+ 2y2j
15401
+ 4bd3
15402
+ 6h7n
15403
+ 5exm
15404
+ 1ctu
15405
+ 5l3f
15406
+ 2ajd
15407
+ 2c94
15408
+ 3g3r
15409
+ 2zo3
15410
+ 4dro
15411
+ 1uou
15412
+ 6bsl
15413
+ 1uy7
15414
+ 3zyb
15415
+ 3t0b
15416
+ 4o43
15417
+ 5q0l
15418
+ 3wch
15419
+ 5orb
15420
+ 3kgp
15421
+ 3zeb
15422
+ 4pyy
15423
+ 4b0j
15424
+ 1jq8
15425
+ 3u4w
15426
+ 2xyt
15427
+ 5jhd
15428
+ 3hmp
15429
+ 4dxj
15430
+ 5hg7
15431
+ 3zlx
15432
+ 1zkk
15433
+ 1gny
15434
+ 4ju6
15435
+ 1il3
15436
+ 4daf
15437
+ 4o0x
15438
+ 5ygi
15439
+ 6gla
15440
+ 3mtw
15441
+ 3mg0
15442
+ 3d9l
15443
+ 2vcj
15444
+ 5edq
15445
+ 4bci
15446
+ 4e4x
15447
+ 2qdt
15448
+ 3wuu
15449
+ 3udv
15450
+ 4dkq
15451
+ 4bfp
15452
+ 5i2e
15453
+ 3q1x
15454
+ 1qaq
15455
+ 3rm8
15456
+ 4ahu
15457
+ 1gx4
15458
+ 3wi2
15459
+ 4de2
15460
+ 5g2g
15461
+ 2wuu
15462
+ 5nud
15463
+ 4hcu
15464
+ 3k37
15465
+ 6prc
15466
+ 2e7f
15467
+ 4bb9
15468
+ 1amk
15469
+ 4l1a
15470
+ 4lqy
15471
+ 4a16
15472
+ 5wkf
15473
+ 4qsm
15474
+ 3sxu
15475
+ 3uqg
15476
+ 5lif
15477
+ 4azf
15478
+ 5ntt
15479
+ 3vsw
15480
+ 5fos
15481
+ 1qin
15482
+ 3e7b
15483
+ 1ll4
15484
+ 3vjm
15485
+ 3ny3
15486
+ 6aqs
15487
+ 1k6c
15488
+ 4ngr
15489
+ 6fc6
15490
+ 2gnj
15491
+ 4zy6
15492
+ 3u8d
15493
+ 2aou
15494
+ 4b0b
15495
+ 5t2p
15496
+ 5efb
15497
+ 5u0d
15498
+ 5vjp
15499
+ 5bui
15500
+ 5e13
15501
+ 5f5i
15502
+ 4ivk
15503
+ 7hvp
15504
+ 3d7k
15505
+ 4vgc
15506
+ 5g1p
15507
+ 6b1h
15508
+ 2br8
15509
+ 1cqp
15510
+ 4w50
15511
+ 4ehv
15512
+ 5kqf
15513
+ 2yfa
15514
+ 3ws9
15515
+ 4xkb
15516
+ 5mrp
15517
+ 3str
15518
+ 5nkd
15519
+ 5mpk
15520
+ 5kqy
15521
+ 4lw1
15522
+ 1avp
15523
+ 3qdd
15524
+ 5xxk
15525
+ 4oks
15526
+ 4hxr
15527
+ 3ur0
15528
+ 4pns
15529
+ 4i5m
15530
+ 4hgl
15531
+ 2vtd
15532
+ 2gph
15533
+ 3rme
15534
+ 5fqp
15535
+ 5hfu
15536
+ 2iok
15537
+ 4yux
15538
+ 6g1u
15539
+ 2hvx
15540
+ 1n0s
15541
+ 4l4v
15542
+ 3s1y
15543
+ 1g7p
15544
+ 2rr4
15545
+ 2x00
15546
+ 2b55
15547
+ 4wmv
15548
+ 4kc1
15549
+ 2x39
15550
+ 5joh
15551
+ 2w0p
15552
+ 4az6
15553
+ 1o0f
15554
+ 3haw
15555
+ 5w44
15556
+ 2xhr
15557
+ 1h8s
15558
+ 4ge1
15559
+ 3phe
15560
+ 1evh
15561
+ 6b30
15562
+ 1g35
15563
+ 2qfo
15564
+ 5gjf
15565
+ 6bt6
15566
+ 2gg3
15567
+ 5trr
15568
+ 5ly3
15569
+ 5eh0
15570
+ 5ioy
15571
+ 1xt8
15572
+ 3c3o
15573
+ 5e74
15574
+ 3acl
15575
+ 5abh
15576
+ 5fol
15577
+ 5ddf
15578
+ 3qxv
15579
+ 3iwy
15580
+ 3ttm
15581
+ 5n18
15582
+ 6beb
15583
+ 5nxw
15584
+ 5lxb
15585
+ 4ra1
15586
+ 4muk
15587
+ 4mm4
15588
+ 4hs6
15589
+ 5ncq
15590
+ 3zmg
15591
+ 4hcv
15592
+ 4i72
15593
+ 4pee
15594
+ 1qtn
15595
+ 4awg
15596
+ 4rfy
15597
+ 5dyy
15598
+ 3ud8
15599
+ 5itf
15600
+ 5cuq
15601
+ 5f1c
15602
+ 5hey
15603
+ 2ko7
15604
+ 3oil
15605
+ 5tks
15606
+ 3t3u
15607
+ 5o3q
15608
+ 4k72
15609
+ 4oma
15610
+ 3hv6
15611
+ 6df7
15612
+ 5er4
15613
+ 4no6
15614
+ 5ugd
15615
+ 1ciz
15616
+ 1rev
15617
+ 6hdq
15618
+ 3c43
15619
+ 1a7t
15620
+ 6fky
15621
+ 5n1r
15622
+ 3rr4
15623
+ 5bqg
15624
+ 3b8r
15625
+ 2pfy
15626
+ 5c7f
15627
+ 5l6j
15628
+ 1etz
15629
+ 3drf
15630
+ 3k7f
15631
+ 4b6e
15632
+ 6asu
15633
+ 1fd0
15634
+ 4zjc
15635
+ 1y3g
15636
+ 4joh
15637
+ 4c38
15638
+ 4h5c
15639
+ 5kej
15640
+ 6aji
15641
+ 4mvy
15642
+ 3lpi
15643
+ 4pms
15644
+ 6fx1
15645
+ 3ioi
15646
+ 2xbj
15647
+ 1bnm
15648
+ 4tuh
15649
+ 4fgt
15650
+ 5mhp
15651
+ 4hwb
15652
+ 6eo9
15653
+ 4aje
15654
+ 1v2t
15655
+ 5h85
15656
+ 1x8d
15657
+ 3uoj
15658
+ 2aog
15659
+ 3o5x
15660
+ 5vr8
15661
+ 2o4p
15662
+ 1fh8
15663
+ 4jz1
15664
+ 5oq4
15665
+ 3zdh
15666
+ 5otz
15667
+ 3lnz
15668
+ 1n9m
15669
+ 3rak
15670
+ 6ba7
15671
+ 6czb
15672
+ 2wyj
15673
+ 4qyo
15674
+ 3mxd
15675
+ 2ri9
15676
+ 2zq1
15677
+ 3ljo
15678
+ 1kvo
15679
+ 6g2a
15680
+ 1gj6
15681
+ 5oug
15682
+ 1g4o
15683
+ 3mvm
15684
+ 1iyl
15685
+ 2cgx
15686
+ 3v2x
15687
+ 5iv4
15688
+ 4m2u
15689
+ 4q1b
15690
+ 3ggc
15691
+ 2c68
15692
+ 3ind
15693
+ 1laf
15694
+ 4kxn
15695
+ 2ada
15696
+ 1om1
15697
+ 5eh7
15698
+ 4jk5
15699
+ 4q93
15700
+ 1b8y
15701
+ 1x11
15702
+ 4mr6
15703
+ 4c4j
15704
+ 4yik
15705
+ 1osv
15706
+ 4y7r
15707
+ 4zow
15708
+ 2hdx
15709
+ 4ui6
15710
+ 4r6x
15711
+ 1pxi
15712
+ 5ni5
15713
+ 5v4b
15714
+ 1h0a
15715
+ 4pf5
15716
+ 5ggz
15717
+ 1o4m
15718
+ 1urc
15719
+ 5mqt
15720
+ 3ads
15721
+ 5gn5
15722
+ 4mrg
15723
+ 5ylt
15724
+ 5k6a
15725
+ 4nld
15726
+ 2xl3
15727
+ 4auy
15728
+ 1zoe
15729
+ 4pum
15730
+ 5lxd
15731
+ 5csh
15732
+ 3gbq
15733
+ 5xs2
15734
+ 4ggl
15735
+ 5a14
15736
+ 4au8
15737
+ 5n49
15738
+ 5x8i
15739
+ 6c7f
15740
+ 3fvk
15741
+ 4kqp
15742
+ 5y97
15743
+ 6gub
15744
+ 5mxx
15745
+ 3lox
15746
+ 2oc7
15747
+ 3g1m
15748
+ 6hsz
15749
+ 1ilq
15750
+ 3twj
15751
+ 5vcx
15752
+ 3svv
15753
+ 4na4
15754
+ 3v5g
15755
+ 4p7e
15756
+ 2vwl
15757
+ 4umu
15758
+ 3v4j
15759
+ 6gfz
15760
+ 6duf
15761
+ 5aqf
15762
+ 5ezh
15763
+ 5nar
15764
+ 4yti
15765
+ 4zyq
15766
+ 4lop
15767
+ 4nus
15768
+ 2a3a
15769
+ 2o9i
15770
+ 3ia6
15771
+ 5nfh
15772
+ 1e00
15773
+ 5u8a
15774
+ 3n8n
15775
+ 4axd
15776
+ 3kxz
15777
+ 3owd
15778
+ 4fxj
15779
+ 1gx0
15780
+ 4u90
15781
+ 1d7x
15782
+ 3eq8
15783
+ 3q44
15784
+ 4ago
15785
+ 5e2p
15786
+ 1pxj
15787
+ 4p6c
15788
+ 1zoh
15789
+ 4i7j
15790
+ 4hlg
15791
+ 5j6n
15792
+ 3s2o
15793
+ 4ec0
15794
+ 3wiy
15795
+ 4k77
15796
+ 5yf1
15797
+ 1hsl
15798
+ 4tjz
15799
+ 6cbh
15800
+ 6g38
15801
+ 4efu
15802
+ 5wep
15803
+ 1jwm
15804
+ 3wfg
15805
+ 5lvl
15806
+ 5kqd
15807
+ 1fq5
15808
+ 5nn6
15809
+ 3b5r
15810
+ 5xag
15811
+ 3w69
15812
+ 3udr
15813
+ 4c4h
15814
+ 5mgi
15815
+ 5ad1
15816
+ 5zh3
15817
+ 4i7b
15818
+ 6cwi
15819
+ 1tkb
15820
+ 3unj
15821
+ 4u54
15822
+ 2x0y
15823
+ 3isw
15824
+ 2xbw
15825
+ 6fzu
15826
+ 5wgq
15827
+ 4kzc
15828
+ 1yhs
15829
+ 5btr
15830
+ 1juq
15831
+ 2idk
15832
+ 2v25
15833
+ 4prh
15834
+ 5o5a
15835
+ 3r0h
15836
+ 1sle
15837
+ 2ieh
15838
+ 5ous
15839
+ 2fqy
15840
+ 1p7m
15841
+ 3fuf
15842
+ 1w10
15843
+ 1vot
15844
+ 1guw
15845
+ 1gbq
15846
+ 5thn
15847
+ 6b41
15848
+ 4xoc
15849
+ 5ugc
15850
+ 4h71
15851
+ 5i43
15852
+ 4oc1
15853
+ 3t0d
15854
+ 4kw6
15855
+ 4jrg
15856
+ 4ra5
15857
+ 4azp
15858
+ 5sz3
15859
+ 5hbj
15860
+ 1npa
15861
+ 4wi1
15862
+ 4b7p
15863
+ 3biz
15864
+ 4bpj
15865
+ 3lxe
15866
+ 5qav
15867
+ 4ge6
15868
+ 4k1e
15869
+ 3vje
15870
+ 4mic
15871
+ 4qp9
15872
+ 2xg5
15873
+ 1w5x
15874
+ 2rin
15875
+ 3omc
15876
+ 5ii2
15877
+ 3pax
15878
+ 5elq
15879
+ 3bpc
15880
+ 1ec0
15881
+ 4x8g
15882
+ 3cft
15883
+ 4qd6
15884
+ 5n1x
15885
+ 2v96
15886
+ 4ucc
15887
+ 5ocj
15888
+ 4mul
15889
+ 3lok
15890
+ 2e5y
15891
+ 3eql
15892
+ 5kyj
15893
+ 1n5r
15894
+ 3hfz
15895
+ 2pg2
15896
+ 3g3m
15897
+ 4fs3
15898
+ 1nnk
15899
+ 4n9a
15900
+ 4jp9
15901
+ 2j77
15902
+ 6cvd
15903
+ 4tkb
15904
+ 3tfp
15905
+ 5a5o
15906
+ 5nsx
15907
+ 6bbv
15908
+ 5myg
15909
+ 3ws8
15910
+ 6ex1
15911
+ 2jkr
15912
+ 5kcv
15913
+ 2vw5
15914
+ 4ezl
15915
+ 3dp0
15916
+ 5hyq
15917
+ 5iyy
15918
+ 2oc4
15919
+ 5div
15920
+ 1gag
15921
+ 2r38
15922
+ 3chp
15923
+ 3ot8
15924
+ 2wqb
15925
+ 6bqh
15926
+ 3gxl
15927
+ 2pgj
15928
+ 6gfx
15929
+ 2i3v
15930
+ 5yqw
15931
+ 5n9t
15932
+ 6cc9
15933
+ 3p3s
15934
+ 3oe9
15935
+ 2y1o
15936
+ 2o4r
15937
+ 1dzm
15938
+ 3vp2
15939
+ 3djo
15940
+ 4oz2
15941
+ 1p6d
15942
+ 5mih
15943
+ 4xt2
15944
+ 3tf7
15945
+ 5v19
15946
+ 6g9k
15947
+ 4ipf
15948
+ 3bim
15949
+ 4nwm
15950
+ 2aeb
15951
+ 3blu
15952
+ 3vtc
15953
+ 3nww
15954
+ 3mp6
15955
+ 1nu8
15956
+ 3wzk
15957
+ 4zec
15958
+ 3gvb
15959
+ 4bt4
15960
+ 2f6v
15961
+ 5xwr
15962
+ 5m53
15963
+ 4yvc
15964
+ 1ai4
15965
+ 1z1r
15966
+ 1ryh
15967
+ 1rhu
15968
+ 5d1s
15969
+ 3g0i
15970
+ 1gpk
15971
+ 4clb
15972
+ 5cte
15973
+ 4kmu
15974
+ 6f9r
15975
+ 3pjg
15976
+ 1uz1
15977
+ 3tww
15978
+ 2lbv
15979
+ 5w1w
15980
+ 1s26
15981
+ 3w9r
15982
+ 4yes
15983
+ 3sl8
15984
+ 5km0
15985
+ 2j79
15986
+ 2h03
15987
+ 1qnh
15988
+ 4cpx
15989
+ 5enk
15990
+ 4mnv
15991
+ 3r5m
15992
+ 4tk4
15993
+ 5wg6
15994
+ 5uwf
15995
+ 4cjp
15996
+ 3uu1
15997
+ 4bxu
15998
+ 3h30
15999
+ 4qwr
16000
+ 2gyi
16001
+ 1cnw
16002
+ 4fzc
16003
+ 6gdg
16004
+ 4e5i
16005
+ 3oq5
16006
+ 3wig
16007
+ 5my8
16008
+ 4aj1
16009
+ 3b65
16010
+ 2e95
16011
+ 4xcb
16012
+ 3wf5
16013
+ 5h15
16014
+ 5htb
16015
+ 3t3e
16016
+ 3h5b
16017
+ 1ugp
16018
+ 5anq
16019
+ 6g4n
16020
+ 1apb
16021
+ 2ai7
16022
+ 4xxs
16023
+ 1lrh
16024
+ 1ove
16025
+ 3v7t
16026
+ 1xk5
16027
+ 4fiv
16028
+ 4mcv
16029
+ 1mrw
16030
+ 4b14
16031
+ 5isl
16032
+ 5cj6
16033
+ 5un9
16034
+ 3nil
16035
+ 1x07
16036
+ 6fni
16037
+ 4u2y
16038
+ 4hbv
16039
+ 6h2t
16040
+ 4q6d
16041
+ 3b8z
16042
+ 2cnf
16043
+ 4isu
16044
+ 5u2f
16045
+ 2cng
16046
+ 4q90
16047
+ 4l0b
16048
+ 1z6s
16049
+ 6g3q
16050
+ 4ylu
16051
+ 3w5t
16052
+ 4dei
16053
+ 2zjv
16054
+ 2x6y
16055
+ 3ikg
16056
+ 5j7p
16057
+ 4mcc
16058
+ 3vqs
16059
+ 3u9n
16060
+ 2g5t
16061
+ 3owj
16062
+ 5bnm
16063
+ 3hkn
16064
+ 3qtx
16065
+ 3qzt
16066
+ 5bpa
16067
+ 3p17
16068
+ 6fag
16069
+ 5nfb
16070
+ 1o43
16071
+ 1nok
16072
+ 3vby
16073
+ 6as6
16074
+ 5vjn
16075
+ 3skc
16076
+ 3bg8
16077
+ 1tc1
16078
+ 5orh
16079
+ 5bq0
16080
+ 5qac
16081
+ 3l16
16082
+ 5kw2
16083
+ 3uj9
16084
+ 6byk
16085
+ 5kgw
16086
+ 4wvt
16087
+ 1bv9
16088
+ 4ruz
16089
+ 1fzq
16090
+ 1mu8
16091
+ 1srg
16092
+ 4y73
16093
+ 3upv
16094
+ 4jj8
16095
+ 5xvf
16096
+ 1xka
16097
+ 4ewn
16098
+ 4ewr
16099
+ 4a7b
16100
+ 2xab
16101
+ 6g9b
16102
+ 4u69
16103
+ 1qb6
16104
+ 3c6t
16105
+ 3qar
16106
+ 4k5o
16107
+ 2gnf
16108
+ 1u3q
16109
+ 3g31
16110
+ 3oev
16111
+ 4oc2
16112
+ 1eby
16113
+ 3uef
16114
+ 4k0o
16115
+ 5ehi
16116
+ 5h63
16117
+ 3wcg
16118
+ 6hvi
16119
+ 1hii
16120
+ 4jhq
16121
+ 3sgt
16122
+ 4aua
16123
+ 4hnc
16124
+ 3bxh
16125
+ 5g5v
16126
+ 4j08
16127
+ 4anu
16128
+ 3hac
16129
+ 2vcq
16130
+ 2f7p
16131
+ 4ycl
16132
+ 3s22
16133
+ 3obq
16134
+ 1o9k
16135
+ 4xy2
16136
+ 3k9x
16137
+ 1xog
16138
+ 5b0x
16139
+ 4m5g
16140
+ 4czs
16141
+ 5eyr
16142
+ 4lm5
16143
+ 5xup
16144
+ 4rao
16145
+ 3qcl
16146
+ 3lkj
16147
+ 2xno
16148
+ 1hfs
16149
+ 1gjd
16150
+ 4bzs
16151
+ 4j7d
16152
+ 1fw0
16153
+ 1jfh
16154
+ 2cgr
16155
+ 4pd8
16156
+ 3mz6
16157
+ 5obj
16158
+ 3gm0
16159
+ 4j3u
16160
+ 1uvt
16161
+ 5ml5
16162
+ 4fut
16163
+ 4r3c
16164
+ 4kfp
16165
+ 4jck
16166
+ 5xhs
16167
+ 5mwz
16168
+ 3n86
16169
+ 5qab
16170
+ 5zeq
16171
+ 1fo3
16172
+ 4eki
16173
+ 5n8w
16174
+ 2exm
16175
+ 4klb
16176
+ 5ikb
16177
+ 2qhc
16178
+ 5aqr
16179
+ 4qn7
16180
+ 5nbw
16181
+ 4k2g
16182
+ 3qck
16183
+ 3zo1
16184
+ 4knx
16185
+ 3m6r
16186
+ 6dh6
16187
+ 3p8n
16188
+ 5f3e
16189
+ 2vie
16190
+ 4uyh
16191
+ 5uoy
16192
+ 4gr3
16193
+ 6cyd
16194
+ 2f14
16195
+ 5v2q
16196
+ 3h2m
16197
+ 3ao2
16198
+ 1k1n
16199
+ 3gfw
16200
+ 4q19
16201
+ 2b1g
16202
+ 4pdk
16203
+ 1oxn
16204
+ 2pj1
16205
+ 4o3a
16206
+ 2y6s
16207
+ 1inh
16208
+ 4u5l
16209
+ 4esg
16210
+ 4mg6
16211
+ 4de5
16212
+ 3wke
16213
+ 6dgy
16214
+ 4mp7
16215
+ 4de1
16216
+ 4z6i
16217
+ 2r4b
16218
+ 5q0z
16219
+ 4b33
16220
+ 2koh
16221
+ 4qyh
16222
+ 5y94
16223
+ 6fo8
16224
+ 3p4r
16225
+ 4mny
16226
+ 2vjx
16227
+ 5lj0
16228
+ 5yqx
16229
+ 5c26
16230
+ 4eb8
16231
+ 3lk1
16232
+ 3mg6
16233
+ 4oru
16234
+ 2wpb
16235
+ 4k7i
16236
+ 5fsy
16237
+ 3ava
16238
+ 2z7r
16239
+ 3ex6
16240
+ 4b8p
16241
+ 3l13
16242
+ 5fbi
16243
+ 3s3k
16244
+ 3chc
16245
+ 2wf0
16246
+ 4hgt
16247
+ 6b5i
16248
+ 4ayw
16249
+ 3gk4
16250
+ 2lcs
16251
+ 5fas
16252
+ 1lyx
16253
+ 6bvh
16254
+ 6edr
16255
+ 1ghz
16256
+ 1dg9
16257
+ 1egh
16258
+ 2x6f
16259
+ 5o48
16260
+ 4avg
16261
+ 3znc
16262
+ 1czq
16263
+ 5f39
16264
+ 2bpy
16265
+ 2vc9
16266
+ 5op5
16267
+ 4ory
16268
+ 3hu3
16269
+ 1xuc
16270
+ 3hmo
16271
+ 2ybt
16272
+ 1v1k
16273
+ 5o22
16274
+ 3qt7
16275
+ 4eo8
16276
+ 4bco
16277
+ 2p3i
16278
+ 3hii
16279
+ 2xdm
16280
+ 6axq
16281
+ 2xhs
16282
+ 5cyv
16283
+ 1sre
16284
+ 2y8o
16285
+ 9lpr
16286
+ 4l3o
16287
+ 1dzk
16288
+ 3lnj
16289
+ 3fsm
16290
+ 4ghi
16291
+ 2nqi
16292
+ 1i48
16293
+ 5mo2
16294
+ 6fr2
16295
+ 1bnn
16296
+ 2jbl
16297
+ 1r17
16298
+ 4rrf
16299
+ 5jah
16300
+ 6apw
16301
+ 4trc
16302
+ 1lgt
16303
+ 2wl0
16304
+ 4i0z
16305
+ 3oe5
16306
+ 3fjz
16307
+ 2uyn
16308
+ 3hxd
16309
+ 6g3o
16310
+ 2nqg
16311
+ 3fc8
16312
+ 2zvj
16313
+ 3mw1
16314
+ 2c5n
16315
+ 3poz
16316
+ 5ax9
16317
+ 3mhw
16318
+ 5tdw
16319
+ 5n7b
16320
+ 4kij
16321
+ 5axq
16322
+ 3pxy
16323
+ 2q96
16324
+ 3kjd
16325
+ 5cf5
16326
+ 5id0
16327
+ 5e0j
16328
+ 4kjv
16329
+ 4e1m
16330
+ 3gy4
16331
+ 5m34
16332
+ 1xqc
16333
+ 2hzy
16334
+ 5brn
16335
+ 2d0k
16336
+ 3s8n
16337
+ 2j7f
16338
+ 5zc5
16339
+ 3p5o
16340
+ 1xh5
16341
+ 2vj7
16342
+ 4zt2
16343
+ 2b7a
16344
+ 1thl
16345
+ 1jmq
16346
+ 5h7h
16347
+ 5mz3
16348
+ 3dxj
16349
+ 5q1g
16350
+ 6ays
16351
+ 3w2t
16352
+ 1bkm
16353
+ 3pb3
16354
+ 3vjl
16355
+ 2a8h
16356
+ 4pax
16357
+ 5tt8
16358
+ 2y2p
16359
+ 5lyx
16360
+ 1zaf
16361
+ 3mb6
16362
+ 2fuu
16363
+ 4mw6
16364
+ 2j6m
16365
+ 4rll
16366
+ 3qai
16367
+ 4kzq
16368
+ 2uym
16369
+ 5v84
16370
+ 3bm9
16371
+ 4egk
16372
+ 2bqw
16373
+ 2q8y
16374
+ 4pji
16375
+ 6ary
16376
+ 4daw
16377
+ 4i06
16378
+ 2wxk
16379
+ 1r5n
data/splits/timesplit_no_lig_overlap_val ADDED
@@ -0,0 +1,968 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 4lp9
2
+ 1me7
3
+ 2zv9
4
+ 2qo8
5
+ 1cw2
6
+ 3k5c
7
+ 2o65
8
+ 4kqq
9
+ 3rdv
10
+ 1d4w
11
+ 1q4l
12
+ 4b5w
13
+ 4bgg
14
+ 4mm5
15
+ 3iej
16
+ 3ftu
17
+ 830c
18
+ 2xye
19
+ 1olu
20
+ 2wk2
21
+ 4pxf
22
+ 5o0j
23
+ 1my2
24
+ 5czm
25
+ 4jit
26
+ 5mb1
27
+ 1sqp
28
+ 3zlw
29
+ 4xqu
30
+ 3hkq
31
+ 6fns
32
+ 5e0l
33
+ 2p8o
34
+ 4gzw
35
+ 3n87
36
+ 1lhc
37
+ 4itj
38
+ 4m7c
39
+ 4olh
40
+ 4q1e
41
+ 5l7e
42
+ 3faa
43
+ 5vqx
44
+ 3pka
45
+ 5x54
46
+ 5a9u
47
+ 4n9e
48
+ 4est
49
+ 1il9
50
+ 4igr
51
+ 3t2t
52
+ 6dar
53
+ 3gol
54
+ 3vbg
55
+ 2ydk
56
+ 4zpf
57
+ 5zo7
58
+ 4xnw
59
+ 1fpy
60
+ 2r1y
61
+ 6m8w
62
+ 2jds
63
+ 5icx
64
+ 1hwr
65
+ 6bj2
66
+ 4b4m
67
+ 1zsb
68
+ 4do3
69
+ 3t3i
70
+ 1f8a
71
+ 2ke1
72
+ 5ezx
73
+ 3p78
74
+ 4rvm
75
+ 3ovn
76
+ 5wzv
77
+ 4udb
78
+ 1okz
79
+ 1mpl
80
+ 5npc
81
+ 5ff6
82
+ 1hlf
83
+ 1nvq
84
+ 4bhf
85
+ 4y4g
86
+ 5mkz
87
+ 2o0u
88
+ 3bcs
89
+ 1wvc
90
+ 4fsl
91
+ 3oz1
92
+ 6dgt
93
+ 1me8
94
+ 2puy
95
+ 4odp
96
+ 1hpx
97
+ 4nrq
98
+ 1z2b
99
+ 3uik
100
+ 3mfv
101
+ 3vqh
102
+ 4w9g
103
+ 4xek
104
+ 4jok
105
+ 2wap
106
+ 1g50
107
+ 4j0p
108
+ 2o9a
109
+ 3m94
110
+ 4i1c
111
+ 5a82
112
+ 4i9h
113
+ 1k1i
114
+ 4uro
115
+ 2f7i
116
+ 5fpk
117
+ 2lgf
118
+ 4l7f
119
+ 1g3d
120
+ 4ir5
121
+ 3mta
122
+ 3jzg
123
+ 5f94
124
+ 4nrt
125
+ 4yax
126
+ 5nhv
127
+ 2xtk
128
+ 4qh7
129
+ 1tok
130
+ 4b6p
131
+ 3rg2
132
+ 3q8d
133
+ 3obu
134
+ 4awj
135
+ 3daj
136
+ 2j50
137
+ 5l2z
138
+ 5bml
139
+ 2bba
140
+ 5n34
141
+ 2xvn
142
+ 1dpu
143
+ 5fnt
144
+ 1jyc
145
+ 4zz1
146
+ 6hm7
147
+ 4rrv
148
+ 4rww
149
+ 5orv
150
+ 3qo2
151
+ 3uii
152
+ 6d1x
153
+ 3juq
154
+ 4qk4
155
+ 6mr5
156
+ 5hjc
157
+ 2p4s
158
+ 2hnc
159
+ 1k4g
160
+ 4g0c
161
+ 2y5g
162
+ 4u3f
163
+ 3tv5
164
+ 1i3z
165
+ 4mw7
166
+ 3n2c
167
+ 6cvw
168
+ 3v66
169
+ 3wzp
170
+ 3s7m
171
+ 5ujv
172
+ 1p06
173
+ 3ipy
174
+ 4wkt
175
+ 4ie0
176
+ 5fot
177
+ 5i59
178
+ 5za9
179
+ 4gii
180
+ 4h2o
181
+ 4yrs
182
+ 5a6h
183
+ 2xo8
184
+ 4e3n
185
+ 4m5k
186
+ 3dga
187
+ 6fse
188
+ 6ck6
189
+ 1sqc
190
+ 4x1r
191
+ 3dnj
192
+ 3rvi
193
+ 2a58
194
+ 4bf6
195
+ 3zlk
196
+ 4mbj
197
+ 4tpm
198
+ 4d8c
199
+ 1ejn
200
+ 4yt6
201
+ 2x7x
202
+ 4qp1
203
+ 4de3
204
+ 5yg4
205
+ 1x7b
206
+ 5n9s
207
+ 2fme
208
+ 1ydt
209
+ 2bdf
210
+ 6baw
211
+ 6fsd
212
+ 2xn3
213
+ 4tk0
214
+ 3q4j
215
+ 1u9l
216
+ 1oqp
217
+ 5htz
218
+ 4glr
219
+ 5kj0
220
+ 5ukl
221
+ 3fun
222
+ 4wk2
223
+ 4ht6
224
+ 5hv1
225
+ 1uze
226
+ 4bcc
227
+ 3ff6
228
+ 5if6
229
+ 1tsm
230
+ 2r59
231
+ 3iqh
232
+ 2v7a
233
+ 5d10
234
+ 5nvh
235
+ 3eqr
236
+ 1jq9
237
+ 1u1b
238
+ 6cer
239
+ 5uq9
240
+ 1u3s
241
+ 5icy
242
+ 3exh
243
+ 2oqs
244
+ 1pzp
245
+ 1d4i
246
+ 4x6p
247
+ 4mb9
248
+ 5emk
249
+ 1iky
250
+ 6b7f
251
+ 3chq
252
+ 3h5s
253
+ 5zmq
254
+ 4ib5
255
+ 2wej
256
+ 6fjm
257
+ 5ewa
258
+ 2igx
259
+ 2z78
260
+ 5lpm
261
+ 4wet
262
+ 3lxl
263
+ 2xba
264
+ 5wbl
265
+ 5zla
266
+ 2x6x
267
+ 4mw9
268
+ 5t2d
269
+ 4j3m
270
+ 4aqh
271
+ 3lbk
272
+ 4djp
273
+ 4odl
274
+ 4x6j
275
+ 1ero
276
+ 5f3t
277
+ 4k3q
278
+ 5ta4
279
+ 1caq
280
+ 2eg7
281
+ 1f73
282
+ 3rxg
283
+ 6ezq
284
+ 1qkt
285
+ 5l3e
286
+ 5c28
287
+ 4pp9
288
+ 4bgk
289
+ 3iaf
290
+ 5vrp
291
+ 5zz4
292
+ 5ur5
293
+ 3ft2
294
+ 5ech
295
+ 4jjq
296
+ 5iz6
297
+ 5dhr
298
+ 4l2g
299
+ 4r17
300
+ 3wk6
301
+ 4h1e
302
+ 2aq9
303
+ 5g1n
304
+ 3zm9
305
+ 5c4l
306
+ 5mfs
307
+ 1fzj
308
+ 2ltw
309
+ 4x7i
310
+ 4c94
311
+ 2cfg
312
+ 2va5
313
+ 3vb6
314
+ 2hob
315
+ 5ah2
316
+ 5syn
317
+ 3g6g
318
+ 3rwj
319
+ 5sz4
320
+ 4f9v
321
+ 5n2d
322
+ 3n9r
323
+ 5ldo
324
+ 3vb7
325
+ 1sqo
326
+ 3drg
327
+ 5j9y
328
+ 6b96
329
+ 4yz9
330
+ 1vcj
331
+ 5epr
332
+ 4tx6
333
+ 3dz6
334
+ 3czv
335
+ 5v49
336
+ 1ahy
337
+ 3wzq
338
+ 1bq4
339
+ 5u8c
340
+ 6bj3
341
+ 2qnb
342
+ 4a9m
343
+ 3d4f
344
+ 5oui
345
+ 5wmg
346
+ 6ma4
347
+ 4x5q
348
+ 5cbr
349
+ 6msy
350
+ 5avi
351
+ 1g3b
352
+ 2wi4
353
+ 3kjn
354
+ 4dhn
355
+ 4o7e
356
+ 5kit
357
+ 5y5t
358
+ 3hfj
359
+ 2qd8
360
+ 5vsj
361
+ 2y2i
362
+ 5m0m
363
+ 3tcp
364
+ 4bhz
365
+ 1jd6
366
+ 5idn
367
+ 4zzx
368
+ 4kn4
369
+ 2a5c
370
+ 6hly
371
+ 1au2
372
+ 4jbo
373
+ 5cgj
374
+ 3ske
375
+ 3lq2
376
+ 4pxm
377
+ 2wxg
378
+ 5tb6
379
+ 2vc7
380
+ 3iw4
381
+ 5hct
382
+ 3skf
383
+ 5lyy
384
+ 3fmz
385
+ 4p5z
386
+ 5ktw
387
+ 6e4w
388
+ 1cx9
389
+ 6em7
390
+ 4mjr
391
+ 4u7t
392
+ 3rde
393
+ 4ux4
394
+ 4i6f
395
+ 3l3x
396
+ 4ie6
397
+ 4j70
398
+ 1jd0
399
+ 4iaw
400
+ 1szm
401
+ 2afw
402
+ 3ess
403
+ 3sap
404
+ 1olx
405
+ 1bzh
406
+ 5hfb
407
+ 4x3h
408
+ 5we9
409
+ 3zsw
410
+ 5ny6
411
+ 1hn2
412
+ 3l3z
413
+ 4qp2
414
+ 1d4p
415
+ 4xkc
416
+ 2is0
417
+ 6c7e
418
+ 5zku
419
+ 4fai
420
+ 6g9a
421
+ 4xu3
422
+ 5dry
423
+ 4d8z
424
+ 3zcz
425
+ 3kbz
426
+ 2y59
427
+ 4nal
428
+ 4rpv
429
+ 4yje
430
+ 3vf8
431
+ 4bqx
432
+ 4z9l
433
+ 4ep2
434
+ 4ylk
435
+ 5mme
436
+ 4dht
437
+ 2uy4
438
+ 6mu3
439
+ 3kx1
440
+ 5o0s
441
+ 4bch
442
+ 5c4k
443
+ 2br1
444
+ 4ddh
445
+ 2f9k
446
+ 2w2i
447
+ 4ogn
448
+ 4up5
449
+ 5o4y
450
+ 5hjd
451
+ 2qw1
452
+ 5y8z
453
+ 4kqr
454
+ 1o2t
455
+ 6e05
456
+ 3u7l
457
+ 2mip
458
+ 3hvg
459
+ 2p59
460
+ 4d3h
461
+ 4pl5
462
+ 3tzd
463
+ 2vnp
464
+ 4e3m
465
+ 3vgc
466
+ 5bqi
467
+ 1b7h
468
+ 1lhu
469
+ 3rlr
470
+ 3h22
471
+ 2wnc
472
+ 2wot
473
+ 5d1t
474
+ 3mo0
475
+ 4wn5
476
+ 3p3u
477
+ 1nfs
478
+ 4e90
479
+ 5aqu
480
+ 1bmq
481
+ 3kwz
482
+ 6f6n
483
+ 4rj5
484
+ 4omd
485
+ 6min
486
+ 1ujj
487
+ 4ppa
488
+ 4uxl
489
+ 5y3n
490
+ 6df2
491
+ 4wvl
492
+ 1xt3
493
+ 5oaj
494
+ 4a9r
495
+ 5mli
496
+ 4p4e
497
+ 3juo
498
+ 1z9g
499
+ 2ykc
500
+ 5a0e
501
+ 3g0w
502
+ 5t9w
503
+ 1sqa
504
+ 3wci
505
+ 1fkw
506
+ 5u4g
507
+ 4mfe
508
+ 4kpx
509
+ 3nti
510
+ 3azb
511
+ 2xog
512
+ 3c3r
513
+ 2buc
514
+ 1hyz
515
+ 4dcd
516
+ 6azl
517
+ 3t3d
518
+ 3q4l
519
+ 4few
520
+ 1q95
521
+ 4u0b
522
+ 3b7u
523
+ 4bo4
524
+ 4o10
525
+ 5wmt
526
+ 5v9t
527
+ 5aok
528
+ 1jtq
529
+ 5uit
530
+ 2vgc
531
+ 2gfd
532
+ 3mna
533
+ 1aqc
534
+ 4xtt
535
+ 4z0d
536
+ 4ty9
537
+ 2yiv
538
+ 2hrp
539
+ 4zh2
540
+ 2z4o
541
+ 1qku
542
+ 2xdw
543
+ 4n7j
544
+ 4yp1
545
+ 3exf
546
+ 4c6z
547
+ 6ccu
548
+ 2wxn
549
+ 1bwb
550
+ 2gvf
551
+ 1hiy
552
+ 5c4t
553
+ 2za5
554
+ 2xkf
555
+ 4q18
556
+ 1o2p
557
+ 5th2
558
+ 4dj7
559
+ 3eyd
560
+ 4j0r
561
+ 2m3o
562
+ 2b53
563
+ 4m3b
564
+ 2izl
565
+ 2vtr
566
+ 2x6d
567
+ 2i0a
568
+ 5ehg
569
+ 6cw4
570
+ 4c37
571
+ 3cwj
572
+ 1azm
573
+ 2qci
574
+ 5sz0
575
+ 2gkl
576
+ 2z4z
577
+ 6awo
578
+ 1v11
579
+ 4l53
580
+ 3p55
581
+ 2ynn
582
+ 2vu3
583
+ 4dli
584
+ 2bcd
585
+ 4l0s
586
+ 4uda
587
+ 3m37
588
+ 5j5t
589
+ 2p16
590
+ 4gh6
591
+ 1mfg
592
+ 3s3i
593
+ 4j73
594
+ 2v5x
595
+ 2h4n
596
+ 4jsz
597
+ 4wk1
598
+ 4igt
599
+ 4k63
600
+ 3qqk
601
+ 16pk
602
+ 5aom
603
+ 1hyv
604
+ 5a3w
605
+ 3veh
606
+ 3g4l
607
+ 2ph8
608
+ 5mkx
609
+ 5c4u
610
+ 4gto
611
+ 3cj5
612
+ 4prj
613
+ 2vd7
614
+ 5duc
615
+ 3odi
616
+ 6bg5
617
+ 1qwu
618
+ 5jn8
619
+ 1v1m
620
+ 1qpe
621
+ 5v3r
622
+ 2wc4
623
+ 2vte
624
+ 1a52
625
+ 4dhq
626
+ 2qta
627
+ 6ccy
628
+ 4jog
629
+ 4bgy
630
+ 5u9i
631
+ 3az9
632
+ 1gt1
633
+ 2jew
634
+ 3pdc
635
+ 1n3i
636
+ 5fyx
637
+ 4f49
638
+ 4nzn
639
+ 6hm2
640
+ 4a4l
641
+ 5xij
642
+ 5vk0
643
+ 4xsx
644
+ 2aj8
645
+ 4odq
646
+ 2n7b
647
+ 4ygf
648
+ 2a4q
649
+ 2jc0
650
+ 4jsa
651
+ 1inq
652
+ 3dc3
653
+ 5tob
654
+ 4urn
655
+ 6bik
656
+ 4ju4
657
+ 5nya
658
+ 5oh2
659
+ 5znr
660
+ 5ct2
661
+ 3u4u
662
+ 4x7h
663
+ 3max
664
+ 3rbm
665
+ 3krj
666
+ 1aj6
667
+ 1pmv
668
+ 5n0e
669
+ 4nhy
670
+ 4oem
671
+ 6fi4
672
+ 4e3j
673
+ 1fq4
674
+ 5myr
675
+ 2hkf
676
+ 1os0
677
+ 3rqg
678
+ 4ivc
679
+ 5c7b
680
+ 3lq4
681
+ 1u6q
682
+ 1qxz
683
+ 1l5r
684
+ 4xxh
685
+ 3m40
686
+ 5or9
687
+ 4okg
688
+ 4d89
689
+ 2gm9
690
+ 5x33
691
+ 4de0
692
+ 4gr8
693
+ 5lz8
694
+ 1p93
695
+ 2brp
696
+ 2gg8
697
+ 6fdt
698
+ 5cxh
699
+ 1jvu
700
+ 3wp1
701
+ 1fzm
702
+ 5cxa
703
+ 2gbg
704
+ 2g78
705
+ 5aml
706
+ 2y34
707
+ 2qnp
708
+ 1v16
709
+ 1njj
710
+ 2a5u
711
+ 4z88
712
+ 4wmx
713
+ 5vo2
714
+ 4fod
715
+ 2pou
716
+ 3jsw
717
+ 2ow2
718
+ 5g3m
719
+ 3odl
720
+ 3o9e
721
+ 3eyh
722
+ 4ej2
723
+ 3c4e
724
+ 4b6f
725
+ 1pl0
726
+ 3pb8
727
+ 6fap
728
+ 4iax
729
+ 2bua
730
+ 6fgg
731
+ 2o4h
732
+ 4uwh
733
+ 5wbf
734
+ 2yxj
735
+ 1ff1
736
+ 2giu
737
+ 1qbt
738
+ 2ovq
739
+ 4bak
740
+ 2y3p
741
+ 2iwu
742
+ 3hvi
743
+ 2w0x
744
+ 3fcl
745
+ 1zpa
746
+ 5czb
747
+ 3t1l
748
+ 2cfd
749
+ 3k3g
750
+ 4cfw
751
+ 2e91
752
+ 5op8
753
+ 3hig
754
+ 6h7y
755
+ 3mtb
756
+ 4eb9
757
+ 4lkg
758
+ 5ehv
759
+ 5ier
760
+ 4ode
761
+ 1xoq
762
+ 5d6p
763
+ 3kwa
764
+ 5np8
765
+ 5v82
766
+ 6ma1
767
+ 3bz3
768
+ 3myq
769
+ 4j0s
770
+ 4f4p
771
+ 4lh6
772
+ 1uef
773
+ 4j3d
774
+ 4yx4
775
+ 4amx
776
+ 4ptg
777
+ 2c97
778
+ 4ec4
779
+ 4r1v
780
+ 1zc9
781
+ 4nuf
782
+ 3g2u
783
+ 6hlx
784
+ 5vij
785
+ 2x4o
786
+ 6hlz
787
+ 4lkj
788
+ 3s75
789
+ 2gz8
790
+ 1gvk
791
+ 2yhd
792
+ 3hqz
793
+ 3pb7
794
+ 1thr
795
+ 4ris
796
+ 5twh
797
+ 4gql
798
+ 3n3l
799
+ 3acx
800
+ 5yvx
801
+ 3gy2
802
+ 1xmu
803
+ 5l6p
804
+ 5l8n
805
+ 4msn
806
+ 4rz1
807
+ 3f66
808
+ 3ucj
809
+ 5hcl
810
+ 1t1r
811
+ 3kce
812
+ 3u15
813
+ 1wbg
814
+ 5khi
815
+ 3er5
816
+ 4qew
817
+ 5mft
818
+ 6eqp
819
+ 5gsw
820
+ 2qd7
821
+ 4cli
822
+ 3f9w
823
+ 3msc
824
+ 1jgl
825
+ 3kid
826
+ 1ymx
827
+ 1ui0
828
+ 3d1f
829
+ 1pxl
830
+ 5kos
831
+ 3vzd
832
+ 5fcz
833
+ 3ara
834
+ 4li6
835
+ 5ks7
836
+ 4wym
837
+ 5j7q
838
+ 4qsh
839
+ 2ce9
840
+ 5vqz
841
+ 3o2m
842
+ 4bcm
843
+ 5orx
844
+ 1i41
845
+ 3c5u
846
+ 4kai
847
+ 6gjy
848
+ 4tsz
849
+ 5o0e
850
+ 6drt
851
+ 1y57
852
+ 3kqb
853
+ 3jup
854
+ 5ork
855
+ 3ikc
856
+ 3gwu
857
+ 4wke
858
+ 4x7l
859
+ 3lp1
860
+ 5ivy
861
+ 3f16
862
+ 4c36
863
+ 1w2x
864
+ 2d06
865
+ 1hbj
866
+ 1ols
867
+ 1iup
868
+ 5aix
869
+ 1ydd
870
+ 5w4r
871
+ 3h23
872
+ 3rj7
873
+ 4ish
874
+ 1ebw
875
+ 1fcy
876
+ 1d09
877
+ 5hdv
878
+ 4x1n
879
+ 5boj
880
+ 2xn7
881
+ 4b6s
882
+ 3f82
883
+ 4clj
884
+ 4zzz
885
+ 5j5d
886
+ 2vts
887
+ 1k08
888
+ 3u3f
889
+ 4jk6
890
+ 4csy
891
+ 6hth
892
+ 2mnz
893
+ 2vpg
894
+ 2qd6
895
+ 4jkw
896
+ 3ml5
897
+ 1ih0
898
+ 4at5
899
+ 5dgu
900
+ 4g31
901
+ 5n0d
902
+ 5aa9
903
+ 4u4s
904
+ 5oa6
905
+ 2wzm
906
+ 4b4q
907
+ 6fi1
908
+ 6chn
909
+ 1z4u
910
+ 5aa8
911
+ 1lpk
912
+ 3cib
913
+ 5d75
914
+ 5x4o
915
+ 1ydb
916
+ 5dhq
917
+ 5t28
918
+ 4zz0
919
+ 3evf
920
+ 5vyy
921
+ 6eip
922
+ 1q63
923
+ 3ldw
924
+ 5tq4
925
+ 5uxf
926
+ 2j7x
927
+ 4kil
928
+ 1yda
929
+ 3bc4
930
+ 2ew5
931
+ 6ee3
932
+ 4yrr
933
+ 3wax
934
+ 3bzf
935
+ 5ody
936
+ 1k06
937
+ 4j84
938
+ 5l6h
939
+ 5eok
940
+ 5nne
941
+ 5m6m
942
+ 2a4r
943
+ 3p1d
944
+ 2ayp
945
+ 3iux
946
+ 4b0g
947
+ 1jr1
948
+ 4qo9
949
+ 4bh4
950
+ 4xt9
951
+ 2ok1
952
+ 2r7g
953
+ 4uib
954
+ 5mmn
955
+ 5akj
956
+ 3hs4
957
+ 5wpb
958
+ 6e5x
959
+ 5vnd
960
+ 5evd
961
+ 5wlg
962
+ 5l4m
963
+ 4kiu
964
+ 4own
965
+ 5oh9
966
+ 6arv
967
+ 1xr9
968
+ 4hv7
data/splits/timesplit_test ADDED
@@ -0,0 +1,363 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 6qqw
2
+ 6d08
3
+ 6jap
4
+ 6np2
5
+ 6uvp
6
+ 6oxq
7
+ 6jsn
8
+ 6hzb
9
+ 6qrc
10
+ 6oio
11
+ 6jag
12
+ 6moa
13
+ 6hld
14
+ 6i9a
15
+ 6e4c
16
+ 6g24
17
+ 6jb4
18
+ 6s55
19
+ 6seo
20
+ 6dyz
21
+ 5zk5
22
+ 6jid
23
+ 5ze6
24
+ 6qlu
25
+ 6a6k
26
+ 6qgf
27
+ 6e3z
28
+ 6te6
29
+ 6pka
30
+ 6g2o
31
+ 6jsf
32
+ 5zxk
33
+ 6qxd
34
+ 6n97
35
+ 6jt3
36
+ 6qtr
37
+ 6oy1
38
+ 6n96
39
+ 6qzh
40
+ 6qqz
41
+ 6qmt
42
+ 6ibx
43
+ 6hmt
44
+ 5zk7
45
+ 6k3l
46
+ 6cjs
47
+ 6n9l
48
+ 6ibz
49
+ 6ott
50
+ 6gge
51
+ 6hot
52
+ 6e3p
53
+ 6md6
54
+ 6hlb
55
+ 6fe5
56
+ 6uwp
57
+ 6npp
58
+ 6g2f
59
+ 6mo7
60
+ 6bqd
61
+ 6nsv
62
+ 6i76
63
+ 6n53
64
+ 6g2c
65
+ 6eeb
66
+ 6n0m
67
+ 6uvy
68
+ 6ovz
69
+ 6olx
70
+ 6v5l
71
+ 6hhg
72
+ 5zcu
73
+ 6dz2
74
+ 6mjq
75
+ 6efk
76
+ 6s9w
77
+ 6gdy
78
+ 6kqi
79
+ 6ueg
80
+ 6oxt
81
+ 6oy0
82
+ 6qr7
83
+ 6i41
84
+ 6cyg
85
+ 6qmr
86
+ 6g27
87
+ 6ggb
88
+ 6g3c
89
+ 6n4e
90
+ 6fcj
91
+ 6quv
92
+ 6iql
93
+ 6i74
94
+ 6qr4
95
+ 6rnu
96
+ 6jib
97
+ 6izq
98
+ 6qw8
99
+ 6qto
100
+ 6qrd
101
+ 6hza
102
+ 6e5s
103
+ 6dz3
104
+ 6e6w
105
+ 6cyh
106
+ 5zlf
107
+ 6om4
108
+ 6gga
109
+ 6pgp
110
+ 6qqv
111
+ 6qtq
112
+ 6gj6
113
+ 6os5
114
+ 6s07
115
+ 6i77
116
+ 6hhj
117
+ 6ahs
118
+ 6oxx
119
+ 6mjj
120
+ 6hor
121
+ 6jb0
122
+ 6i68
123
+ 6pz4
124
+ 6mhb
125
+ 6uim
126
+ 6jsg
127
+ 6i78
128
+ 6oxy
129
+ 6gbw
130
+ 6mo0
131
+ 6ggf
132
+ 6qge
133
+ 6cjr
134
+ 6oxp
135
+ 6d07
136
+ 6i63
137
+ 6ten
138
+ 6uii
139
+ 6qlr
140
+ 6sen
141
+ 6oxv
142
+ 6g2b
143
+ 5zr3
144
+ 6kjf
145
+ 6qr9
146
+ 6g9f
147
+ 6e6v
148
+ 5zk9
149
+ 6pnn
150
+ 6nri
151
+ 6uwv
152
+ 6ooz
153
+ 6npi
154
+ 6oip
155
+ 6miv
156
+ 6s57
157
+ 6p8x
158
+ 6hoq
159
+ 6qts
160
+ 6ggd
161
+ 6pnm
162
+ 6oy2
163
+ 6oi8
164
+ 6mhd
165
+ 6agt
166
+ 6i5p
167
+ 6hhr
168
+ 6p8z
169
+ 6c85
170
+ 6g5u
171
+ 6j06
172
+ 6qsz
173
+ 6jbb
174
+ 6hhp
175
+ 6np5
176
+ 6nlj
177
+ 6qlp
178
+ 6n94
179
+ 6e13
180
+ 6qls
181
+ 6uil
182
+ 6st3
183
+ 6n92
184
+ 6s56
185
+ 6hzd
186
+ 6uhv
187
+ 6k05
188
+ 6q36
189
+ 6ic0
190
+ 6hhi
191
+ 6e3m
192
+ 6qtx
193
+ 6jse
194
+ 5zjy
195
+ 6o3y
196
+ 6rpg
197
+ 6rr0
198
+ 6gzy
199
+ 6qlt
200
+ 6ufo
201
+ 6o0h
202
+ 6o3x
203
+ 5zjz
204
+ 6i8t
205
+ 6ooy
206
+ 6oiq
207
+ 6od6
208
+ 6nrh
209
+ 6qra
210
+ 6hhh
211
+ 6m7h
212
+ 6ufn
213
+ 6qr0
214
+ 6o5u
215
+ 6h14
216
+ 6jwa
217
+ 6ny0
218
+ 6jan
219
+ 6ftf
220
+ 6oxw
221
+ 6jon
222
+ 6cf7
223
+ 6rtn
224
+ 6jsz
225
+ 6o9c
226
+ 6mo8
227
+ 6qln
228
+ 6qqu
229
+ 6i66
230
+ 6mja
231
+ 6gwe
232
+ 6d3z
233
+ 6oxr
234
+ 6r4k
235
+ 6hle
236
+ 6h9v
237
+ 6hou
238
+ 6nv9
239
+ 6py0
240
+ 6qlq
241
+ 6nv7
242
+ 6n4b
243
+ 6jaq
244
+ 6i8m
245
+ 6dz0
246
+ 6oxs
247
+ 6k2n
248
+ 6cjj
249
+ 6ffg
250
+ 6a73
251
+ 6qqt
252
+ 6a1c
253
+ 6oxu
254
+ 6qre
255
+ 6qtw
256
+ 6np4
257
+ 6hv2
258
+ 6n55
259
+ 6e3o
260
+ 6kjd
261
+ 6sfc
262
+ 6qi7
263
+ 6hzc
264
+ 6k04
265
+ 6op0
266
+ 6q38
267
+ 6n8x
268
+ 6np3
269
+ 6uvv
270
+ 6pgo
271
+ 6jbe
272
+ 6i75
273
+ 6qqq
274
+ 6i62
275
+ 6j9y
276
+ 6g29
277
+ 6h7d
278
+ 6mo9
279
+ 6jao
280
+ 6jmf
281
+ 6hmy
282
+ 6qfe
283
+ 5zml
284
+ 6i65
285
+ 6e7m
286
+ 6i61
287
+ 6rz6
288
+ 6qtm
289
+ 6qlo
290
+ 6oie
291
+ 6miy
292
+ 6nrf
293
+ 6gj5
294
+ 6jad
295
+ 6mj4
296
+ 6h12
297
+ 6d3y
298
+ 6qr2
299
+ 6qxa
300
+ 6o9b
301
+ 6ckl
302
+ 6oir
303
+ 6d40
304
+ 6e6j
305
+ 6i7a
306
+ 6g25
307
+ 6oin
308
+ 6jam
309
+ 6oxz
310
+ 6hop
311
+ 6rot
312
+ 6uhu
313
+ 6mji
314
+ 6nrj
315
+ 6nt2
316
+ 6op9
317
+ 6pno
318
+ 6e4v
319
+ 6k1s
320
+ 6a87
321
+ 6oim
322
+ 6cjp
323
+ 6pyb
324
+ 6h13
325
+ 6qrf
326
+ 6mhc
327
+ 6j9w
328
+ 6nrg
329
+ 6fff
330
+ 6n93
331
+ 6jut
332
+ 6g2e
333
+ 6nd3
334
+ 6os6
335
+ 6dql
336
+ 6inz
337
+ 6i67
338
+ 6quw
339
+ 6qwi
340
+ 6npm
341
+ 6i64
342
+ 6e3n
343
+ 6qrg
344
+ 6nxz
345
+ 6iby
346
+ 6gj7
347
+ 6qr3
348
+ 6qr1
349
+ 6s9x
350
+ 6q4q
351
+ 6hbn
352
+ 6nw3
353
+ 6tel
354
+ 6p8y
355
+ 6d5w
356
+ 6t6a
357
+ 6o5g
358
+ 6r7d
359
+ 6pya
360
+ 6ffe
361
+ 6d3x
362
+ 6gj8
363
+ 6mo2
data/splits/timesplit_test_no_rec_overlap ADDED
@@ -0,0 +1,144 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 6qqw
2
+ 6jap
3
+ 6np2
4
+ 6qrc
5
+ 6oio
6
+ 6jag
7
+ 6i9a
8
+ 6jb4
9
+ 6seo
10
+ 6jid
11
+ 5ze6
12
+ 6pka
13
+ 6n97
14
+ 6qtr
15
+ 6n96
16
+ 6qzh
17
+ 6qqz
18
+ 6k3l
19
+ 6cjs
20
+ 6n9l
21
+ 6ott
22
+ 6npp
23
+ 6nsv
24
+ 6n53
25
+ 6eeb
26
+ 6n0m
27
+ 6ovz
28
+ 5zcu
29
+ 6mjq
30
+ 6efk
31
+ 6gdy
32
+ 6kqi
33
+ 6ueg
34
+ 6qr7
35
+ 6g3c
36
+ 6iql
37
+ 6qr4
38
+ 6jib
39
+ 6qto
40
+ 6qrd
41
+ 6e5s
42
+ 5zlf
43
+ 6om4
44
+ 6qqv
45
+ 6qtq
46
+ 6os5
47
+ 6s07
48
+ 6mjj
49
+ 6jb0
50
+ 6uim
51
+ 6mo0
52
+ 6cjr
53
+ 6uii
54
+ 6sen
55
+ 6kjf
56
+ 6qr9
57
+ 6g9f
58
+ 6npi
59
+ 6oip
60
+ 6miv
61
+ 6qts
62
+ 6oi8
63
+ 6c85
64
+ 6qsz
65
+ 6jbb
66
+ 6np5
67
+ 6nlj
68
+ 6n94
69
+ 6e13
70
+ 6uil
71
+ 6n92
72
+ 6uhv
73
+ 6q36
74
+ 6qtx
75
+ 6rr0
76
+ 6ufo
77
+ 6oiq
78
+ 6qra
79
+ 6m7h
80
+ 6ufn
81
+ 6qr0
82
+ 6o5u
83
+ 6ny0
84
+ 6jan
85
+ 6ftf
86
+ 6jon
87
+ 6cf7
88
+ 6o9c
89
+ 6qqu
90
+ 6mja
91
+ 6r4k
92
+ 6h9v
93
+ 6py0
94
+ 6jaq
95
+ 6k2n
96
+ 6cjj
97
+ 6a73
98
+ 6qqt
99
+ 6qre
100
+ 6qtw
101
+ 6np4
102
+ 6n55
103
+ 6kjd
104
+ 6np3
105
+ 6jbe
106
+ 6qqq
107
+ 6j9y
108
+ 6h7d
109
+ 6jao
110
+ 6e7m
111
+ 6rz6
112
+ 6qtm
113
+ 6miy
114
+ 6jad
115
+ 6mj4
116
+ 6qr2
117
+ 6qxa
118
+ 6o9b
119
+ 6ckl
120
+ 6oir
121
+ 6oin
122
+ 6jam
123
+ 6uhu
124
+ 6mji
125
+ 6nt2
126
+ 6op9
127
+ 6e4v
128
+ 6a87
129
+ 6cjp
130
+ 6qrf
131
+ 6j9w
132
+ 6n93
133
+ 6nd3
134
+ 6os6
135
+ 6dql
136
+ 6qwi
137
+ 6npm
138
+ 6qrg
139
+ 6nxz
140
+ 6qr3
141
+ 6qr1
142
+ 6o5g
143
+ 6r7d
144
+ 6mo2
datasets/__init__.py ADDED
File without changes
datasets/conformer_matching.py ADDED
@@ -0,0 +1,196 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import copy, time
2
+ import numpy as np
3
+ from collections import defaultdict
4
+ from rdkit import Chem, RDLogger
5
+ from rdkit.Chem import AllChem, rdMolTransforms
6
+ from rdkit import Geometry
7
+ import networkx as nx
8
+ from scipy.optimize import differential_evolution
9
+
10
+ RDLogger.DisableLog('rdApp.*')
11
+
12
+ """
13
+ Conformer matching routines from Torsional Diffusion
14
+ """
15
+
16
+ def GetDihedral(conf, atom_idx):
17
+ return rdMolTransforms.GetDihedralRad(conf, atom_idx[0], atom_idx[1], atom_idx[2], atom_idx[3])
18
+
19
+
20
+ def SetDihedral(conf, atom_idx, new_vale):
21
+ rdMolTransforms.SetDihedralRad(conf, atom_idx[0], atom_idx[1], atom_idx[2], atom_idx[3], new_vale)
22
+
23
+
24
+ def apply_changes(mol, values, rotable_bonds, conf_id):
25
+ opt_mol = copy.copy(mol)
26
+ [SetDihedral(opt_mol.GetConformer(conf_id), rotable_bonds[r], values[r]) for r in range(len(rotable_bonds))]
27
+ return opt_mol
28
+
29
+
30
+ def optimize_rotatable_bonds(mol, true_mol, rotable_bonds, probe_id=-1, ref_id=-1, seed=0, popsize=15, maxiter=500,
31
+ mutation=(0.5, 1), recombination=0.8):
32
+ opt = OptimizeConformer(mol, true_mol, rotable_bonds, seed=seed, probe_id=probe_id, ref_id=ref_id)
33
+ max_bound = [np.pi] * len(opt.rotable_bonds)
34
+ min_bound = [-np.pi] * len(opt.rotable_bonds)
35
+ bounds = (min_bound, max_bound)
36
+ bounds = list(zip(bounds[0], bounds[1]))
37
+
38
+ # Optimize conformations
39
+ result = differential_evolution(opt.score_conformation, bounds,
40
+ maxiter=maxiter, popsize=popsize,
41
+ mutation=mutation, recombination=recombination, disp=False, seed=seed)
42
+ opt_mol = apply_changes(opt.mol, result['x'], opt.rotable_bonds, conf_id=probe_id)
43
+
44
+ return opt_mol
45
+
46
+
47
+ class OptimizeConformer:
48
+ def __init__(self, mol, true_mol, rotable_bonds, probe_id=-1, ref_id=-1, seed=None):
49
+ super(OptimizeConformer, self).__init__()
50
+ if seed:
51
+ np.random.seed(seed)
52
+ self.rotable_bonds = rotable_bonds
53
+ self.mol = mol
54
+ self.true_mol = true_mol
55
+ self.probe_id = probe_id
56
+ self.ref_id = ref_id
57
+
58
+ def score_conformation(self, values):
59
+ for i, r in enumerate(self.rotable_bonds):
60
+ SetDihedral(self.mol.GetConformer(self.probe_id), r, values[i])
61
+ return RMSD(self.mol, self.true_mol, self.probe_id, self.ref_id)
62
+
63
+
64
+ def get_torsion_angles(mol):
65
+ torsions_list = []
66
+ G = nx.Graph()
67
+ for i, atom in enumerate(mol.GetAtoms()):
68
+ G.add_node(i)
69
+ nodes = set(G.nodes())
70
+ for bond in mol.GetBonds():
71
+ start, end = bond.GetBeginAtomIdx(), bond.GetEndAtomIdx()
72
+ G.add_edge(start, end)
73
+ for e in G.edges():
74
+ G2 = copy.deepcopy(G)
75
+ G2.remove_edge(*e)
76
+ if nx.is_connected(G2): continue
77
+ l = list(sorted(nx.connected_components(G2), key=len)[0])
78
+ if len(l) < 2: continue
79
+ n0 = list(G2.neighbors(e[0]))
80
+ n1 = list(G2.neighbors(e[1]))
81
+ torsions_list.append(
82
+ (n0[0], e[0], e[1], n1[0])
83
+ )
84
+ return torsions_list
85
+
86
+
87
+ # GeoMol
88
+ def get_torsions(mol_list):
89
+ print('USING GEOMOL GET TORSIONS FUNCTION')
90
+ atom_counter = 0
91
+ torsionList = []
92
+ for m in mol_list:
93
+ torsionSmarts = '[!$(*#*)&!D1]-&!@[!$(*#*)&!D1]'
94
+ torsionQuery = Chem.MolFromSmarts(torsionSmarts)
95
+ matches = m.GetSubstructMatches(torsionQuery)
96
+ for match in matches:
97
+ idx2 = match[0]
98
+ idx3 = match[1]
99
+ bond = m.GetBondBetweenAtoms(idx2, idx3)
100
+ jAtom = m.GetAtomWithIdx(idx2)
101
+ kAtom = m.GetAtomWithIdx(idx3)
102
+ for b1 in jAtom.GetBonds():
103
+ if (b1.GetIdx() == bond.GetIdx()):
104
+ continue
105
+ idx1 = b1.GetOtherAtomIdx(idx2)
106
+ for b2 in kAtom.GetBonds():
107
+ if ((b2.GetIdx() == bond.GetIdx())
108
+ or (b2.GetIdx() == b1.GetIdx())):
109
+ continue
110
+ idx4 = b2.GetOtherAtomIdx(idx3)
111
+ # skip 3-membered rings
112
+ if (idx4 == idx1):
113
+ continue
114
+ if m.GetAtomWithIdx(idx4).IsInRing():
115
+ torsionList.append(
116
+ (idx4 + atom_counter, idx3 + atom_counter, idx2 + atom_counter, idx1 + atom_counter))
117
+ break
118
+ else:
119
+ torsionList.append(
120
+ (idx1 + atom_counter, idx2 + atom_counter, idx3 + atom_counter, idx4 + atom_counter))
121
+ break
122
+ break
123
+
124
+ atom_counter += m.GetNumAtoms()
125
+ return torsionList
126
+
127
+
128
+ def A_transpose_matrix(alpha):
129
+ return np.array([[np.cos(alpha), np.sin(alpha)], [-np.sin(alpha), np.cos(alpha)]], dtype=np.double)
130
+
131
+
132
+ def S_vec(alpha):
133
+ return np.array([[np.cos(alpha)], [np.sin(alpha)]], dtype=np.double)
134
+
135
+
136
+ def GetDihedralFromPointCloud(Z, atom_idx):
137
+ p = Z[list(atom_idx)]
138
+ b = p[:-1] - p[1:]
139
+ b[0] *= -1
140
+ v = np.array([v - (v.dot(b[1]) / b[1].dot(b[1])) * b[1] for v in [b[0], b[2]]])
141
+ # Normalize vectors
142
+ v /= np.sqrt(np.einsum('...i,...i', v, v)).reshape(-1, 1)
143
+ b1 = b[1] / np.linalg.norm(b[1])
144
+ x = np.dot(v[0], v[1])
145
+ m = np.cross(v[0], b1)
146
+ y = np.dot(m, v[1])
147
+ return np.arctan2(y, x)
148
+
149
+
150
+ def get_dihedral_vonMises(mol, conf, atom_idx, Z):
151
+ Z = np.array(Z)
152
+ v = np.zeros((2, 1))
153
+ iAtom = mol.GetAtomWithIdx(atom_idx[1])
154
+ jAtom = mol.GetAtomWithIdx(atom_idx[2])
155
+ k_0 = atom_idx[0]
156
+ i = atom_idx[1]
157
+ j = atom_idx[2]
158
+ l_0 = atom_idx[3]
159
+ for b1 in iAtom.GetBonds():
160
+ k = b1.GetOtherAtomIdx(i)
161
+ if k == j:
162
+ continue
163
+ for b2 in jAtom.GetBonds():
164
+ l = b2.GetOtherAtomIdx(j)
165
+ if l == i:
166
+ continue
167
+ assert k != l
168
+ s_star = S_vec(GetDihedralFromPointCloud(Z, (k, i, j, l)))
169
+ a_mat = A_transpose_matrix(GetDihedral(conf, (k, i, j, k_0)) + GetDihedral(conf, (l_0, i, j, l)))
170
+ v = v + np.matmul(a_mat, s_star)
171
+ v = v / np.linalg.norm(v)
172
+ v = v.reshape(-1)
173
+ return np.arctan2(v[1], v[0])
174
+
175
+
176
+ def get_von_mises_rms(mol, mol_rdkit, rotable_bonds, conf_id):
177
+ new_dihedrals = np.zeros(len(rotable_bonds))
178
+ for idx, r in enumerate(rotable_bonds):
179
+ new_dihedrals[idx] = get_dihedral_vonMises(mol_rdkit,
180
+ mol_rdkit.GetConformer(conf_id), r,
181
+ mol.GetConformer().GetPositions())
182
+ mol_rdkit = apply_changes(mol_rdkit, new_dihedrals, rotable_bonds, conf_id)
183
+ return RMSD(mol_rdkit, mol, conf_id)
184
+
185
+
186
+ def mmff_func(mol):
187
+ mol_mmff = copy.deepcopy(mol)
188
+ AllChem.MMFFOptimizeMoleculeConfs(mol_mmff, mmffVariant='MMFF94s')
189
+ for i in range(mol.GetNumConformers()):
190
+ coords = mol_mmff.GetConformers()[i].GetPositions()
191
+ for j in range(coords.shape[0]):
192
+ mol.GetConformer(i).SetAtomPosition(j,
193
+ Geometry.Point3D(*coords[j]))
194
+
195
+
196
+ RMSD = AllChem.AlignMol
datasets/esm_embedding_preparation.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from argparse import FileType, ArgumentParser
3
+
4
+ import numpy as np
5
+ import pandas as pd
6
+ from Bio.PDB import PDBParser
7
+ from Bio.Seq import Seq
8
+ from Bio.SeqRecord import SeqRecord
9
+ from tqdm import tqdm
10
+ from Bio import SeqIO
11
+
12
+
13
+
14
+ def esm_embedding_prep(out_file, protein_path):
15
+ biopython_parser = PDBParser()
16
+
17
+ three_to_one = {
18
+ "ALA": "A",
19
+ "ARG": "R",
20
+ "ASN": "N",
21
+ "ASP": "D",
22
+ "CYS": "C",
23
+ "GLN": "Q",
24
+ "GLU": "E",
25
+ "GLY": "G",
26
+ "HIS": "H",
27
+ "ILE": "I",
28
+ "LEU": "L",
29
+ "LYS": "K",
30
+ "MET": "M",
31
+ "MSE": "M", # MSE this is almost the same AA as MET. The sulfur is just replaced by Selen
32
+ "PHE": "F",
33
+ "PRO": "P",
34
+ "PYL": "O",
35
+ "SER": "S",
36
+ "SEC": "U",
37
+ "THR": "T",
38
+ "TRP": "W",
39
+ "TYR": "Y",
40
+ "VAL": "V",
41
+ "ASX": "B",
42
+ "GLX": "Z",
43
+ "XAA": "X",
44
+ "XLE": "J",
45
+ }
46
+
47
+ file_paths = [protein_path]
48
+ sequences = []
49
+ ids = []
50
+ for file_path in tqdm(file_paths):
51
+ structure = biopython_parser.get_structure("random_id", file_path)
52
+ structure = structure[0]
53
+ for i, chain in enumerate(structure):
54
+ seq = ""
55
+ for res_idx, residue in enumerate(chain):
56
+ if residue.get_resname() == "HOH":
57
+ continue
58
+ residue_coords = []
59
+ c_alpha, n, c = None, None, None
60
+ for atom in residue:
61
+ if atom.name == "CA":
62
+ c_alpha = list(atom.get_vector())
63
+ if atom.name == "N":
64
+ n = list(atom.get_vector())
65
+ if atom.name == "C":
66
+ c = list(atom.get_vector())
67
+ if (
68
+ c_alpha != None and n != None and c != None
69
+ ): # only append residue if it is an amino acid
70
+ try:
71
+ seq += three_to_one[residue.get_resname()]
72
+ except Exception as e:
73
+ seq += "-"
74
+ print(
75
+ "encountered unknown AA: ",
76
+ residue.get_resname(),
77
+ " in the complex ",
78
+ file_path,
79
+ ". Replacing it with a dash - .",
80
+ )
81
+ sequences.append(seq)
82
+ ids.append(f"{os.path.basename(file_path)}_chain_{i}")
83
+ records = []
84
+ for (index, seq) in zip(ids, sequences):
85
+ record = SeqRecord(Seq(seq), str(index))
86
+ record.description = ""
87
+ records.append(record)
88
+ SeqIO.write(records, out_file, "fasta")
datasets/esm_embeddings_to_pt.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import os
3
+ from argparse import ArgumentParser
4
+
5
+ import torch
6
+ from tqdm import tqdm
7
+
8
+
9
+ parser = ArgumentParser()
10
+ parser.add_argument('--esm_embeddings_path', type=str, default='data/embeddings_output', help='')
11
+ parser.add_argument('--output_path', type=str, default='data/esm2_3billion_embeddings.pt', help='')
12
+ args = parser.parse_args()
13
+
14
+ dict = {}
15
+ for filename in tqdm(os.listdir(args.esm_embeddings_path)):
16
+ dict[filename.split('.')[0]] = torch.load(os.path.join(args.esm_embeddings_path,filename))['representations'][33]
17
+ torch.save(dict,args.output_path)
datasets/pdbbind.py ADDED
@@ -0,0 +1,705 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import binascii
2
+ import glob
3
+ import hashlib
4
+ import os
5
+ import pickle
6
+ from collections import defaultdict
7
+ from multiprocessing import Pool
8
+ import random
9
+ import copy
10
+
11
+ import numpy as np
12
+ import torch
13
+ from rdkit.Chem import MolToSmiles, MolFromSmiles, AddHs
14
+ from torch_geometric.data import Dataset, HeteroData
15
+ from torch_geometric.loader import DataLoader, DataListLoader
16
+ from torch_geometric.transforms import BaseTransform
17
+ from tqdm import tqdm
18
+
19
+ from datasets.process_mols import (
20
+ read_molecule,
21
+ get_rec_graph,
22
+ generate_conformer,
23
+ get_lig_graph_with_matching,
24
+ extract_receptor_structure,
25
+ parse_receptor,
26
+ parse_pdb_from_path,
27
+ )
28
+ from utils.diffusion_utils import modify_conformer, set_time
29
+ from utils.utils import read_strings_from_txt
30
+ from utils import so3, torus
31
+
32
+
33
+ class NoiseTransform(BaseTransform):
34
+ def __init__(self, t_to_sigma, no_torsion, all_atom):
35
+ self.t_to_sigma = t_to_sigma
36
+ self.no_torsion = no_torsion
37
+ self.all_atom = all_atom
38
+
39
+ def __call__(self, data):
40
+ t = np.random.uniform()
41
+ t_tr, t_rot, t_tor = t, t, t
42
+ return self.apply_noise(data, t_tr, t_rot, t_tor)
43
+
44
+ def apply_noise(
45
+ self,
46
+ data,
47
+ t_tr,
48
+ t_rot,
49
+ t_tor,
50
+ tr_update=None,
51
+ rot_update=None,
52
+ torsion_updates=None,
53
+ ):
54
+ if not torch.is_tensor(data["ligand"].pos):
55
+ data["ligand"].pos = random.choice(data["ligand"].pos)
56
+
57
+ tr_sigma, rot_sigma, tor_sigma = self.t_to_sigma(t_tr, t_rot, t_tor)
58
+ set_time(data, t_tr, t_rot, t_tor, 1, self.all_atom, device=None)
59
+
60
+ tr_update = (
61
+ torch.normal(mean=0, std=tr_sigma, size=(1, 3))
62
+ if tr_update is None
63
+ else tr_update
64
+ )
65
+ rot_update = so3.sample_vec(eps=rot_sigma) if rot_update is None else rot_update
66
+ torsion_updates = (
67
+ np.random.normal(
68
+ loc=0.0, scale=tor_sigma, size=data["ligand"].edge_mask.sum()
69
+ )
70
+ if torsion_updates is None
71
+ else torsion_updates
72
+ )
73
+ torsion_updates = None if self.no_torsion else torsion_updates
74
+ modify_conformer(
75
+ data, tr_update, torch.from_numpy(rot_update).float(), torsion_updates
76
+ )
77
+
78
+ data.tr_score = -tr_update / tr_sigma**2
79
+ data.rot_score = (
80
+ torch.from_numpy(so3.score_vec(vec=rot_update, eps=rot_sigma))
81
+ .float()
82
+ .unsqueeze(0)
83
+ )
84
+ data.tor_score = (
85
+ None
86
+ if self.no_torsion
87
+ else torch.from_numpy(torus.score(torsion_updates, tor_sigma)).float()
88
+ )
89
+ data.tor_sigma_edge = (
90
+ None
91
+ if self.no_torsion
92
+ else np.ones(data["ligand"].edge_mask.sum()) * tor_sigma
93
+ )
94
+ return data
95
+
96
+
97
+ class PDBBind(Dataset):
98
+ def __init__(
99
+ self,
100
+ root,
101
+ transform=None,
102
+ cache_path="data/cache",
103
+ split_path="data/",
104
+ limit_complexes=0,
105
+ receptor_radius=30,
106
+ num_workers=1,
107
+ c_alpha_max_neighbors=None,
108
+ popsize=15,
109
+ maxiter=15,
110
+ matching=True,
111
+ keep_original=False,
112
+ max_lig_size=None,
113
+ remove_hs=False,
114
+ num_conformers=1,
115
+ all_atoms=False,
116
+ atom_radius=5,
117
+ atom_max_neighbors=None,
118
+ esm_embeddings_path=None,
119
+ require_ligand=False,
120
+ ligands_list=None,
121
+ protein_path_list=None,
122
+ ligand_descriptions=None,
123
+ keep_local_structures=False,
124
+ ):
125
+
126
+ super(PDBBind, self).__init__(root, transform)
127
+ self.pdbbind_dir = root
128
+ self.max_lig_size = max_lig_size
129
+ self.split_path = split_path
130
+ self.limit_complexes = limit_complexes
131
+ self.receptor_radius = receptor_radius
132
+ self.num_workers = num_workers
133
+ self.c_alpha_max_neighbors = c_alpha_max_neighbors
134
+ self.remove_hs = remove_hs
135
+ self.esm_embeddings_path = esm_embeddings_path
136
+ self.require_ligand = require_ligand
137
+ self.protein_path_list = protein_path_list
138
+ self.ligand_descriptions = ligand_descriptions
139
+ self.keep_local_structures = keep_local_structures
140
+ if (
141
+ matching
142
+ or protein_path_list is not None
143
+ and ligand_descriptions is not None
144
+ ):
145
+ cache_path += "_torsion"
146
+ if all_atoms:
147
+ cache_path += "_allatoms"
148
+ self.full_cache_path = os.path.join(
149
+ cache_path,
150
+ f"limit{self.limit_complexes}"
151
+ f"_INDEX{os.path.splitext(os.path.basename(self.split_path))[0]}"
152
+ f"_maxLigSize{self.max_lig_size}_H{int(not self.remove_hs)}"
153
+ f"_recRad{self.receptor_radius}_recMax{self.c_alpha_max_neighbors}"
154
+ + (
155
+ ""
156
+ if not all_atoms
157
+ else f"_atomRad{atom_radius}_atomMax{atom_max_neighbors}"
158
+ )
159
+ + ("" if not matching or num_conformers == 1 else f"_confs{num_conformers}")
160
+ + ("" if self.esm_embeddings_path is None else f"_esmEmbeddings")
161
+ + ("" if not keep_local_structures else f"_keptLocalStruct")
162
+ + (
163
+ ""
164
+ if protein_path_list is None or ligand_descriptions is None
165
+ else str(
166
+ binascii.crc32(
167
+ "".join(ligand_descriptions + protein_path_list).encode()
168
+ )
169
+ )
170
+ ),
171
+ )
172
+ self.popsize, self.maxiter = popsize, maxiter
173
+ self.matching, self.keep_original = matching, keep_original
174
+ self.num_conformers = num_conformers
175
+ self.all_atoms = all_atoms
176
+ self.atom_radius, self.atom_max_neighbors = atom_radius, atom_max_neighbors
177
+ if not os.path.exists(
178
+ os.path.join(self.full_cache_path, "heterographs.pkl")
179
+ ) or (
180
+ require_ligand
181
+ and not os.path.exists(
182
+ os.path.join(self.full_cache_path, "rdkit_ligands.pkl")
183
+ )
184
+ ):
185
+ os.makedirs(self.full_cache_path, exist_ok=True)
186
+ if protein_path_list is None or ligand_descriptions is None:
187
+ self.preprocessing()
188
+ else:
189
+ self.inference_preprocessing()
190
+
191
+ print(
192
+ "loading data from memory: ",
193
+ os.path.join(self.full_cache_path, "heterographs.pkl"),
194
+ )
195
+ with open(os.path.join(self.full_cache_path, "heterographs.pkl"), "rb") as f:
196
+ self.complex_graphs = pickle.load(f)
197
+ if require_ligand:
198
+ with open(
199
+ os.path.join(self.full_cache_path, "rdkit_ligands.pkl"), "rb"
200
+ ) as f:
201
+ self.rdkit_ligands = pickle.load(f)
202
+
203
+ print_statistics(self.complex_graphs)
204
+
205
+ def len(self):
206
+ return len(self.complex_graphs)
207
+
208
+ def get(self, idx):
209
+ if self.require_ligand:
210
+ complex_graph = copy.deepcopy(self.complex_graphs[idx])
211
+ complex_graph.mol = copy.deepcopy(self.rdkit_ligands[idx])
212
+ return complex_graph
213
+ else:
214
+ return copy.deepcopy(self.complex_graphs[idx])
215
+
216
+ def preprocessing(self):
217
+ print(
218
+ f"Processing complexes from [{self.split_path}] and saving it to [{self.full_cache_path}]"
219
+ )
220
+
221
+ complex_names_all = read_strings_from_txt(self.split_path)
222
+ if self.limit_complexes is not None and self.limit_complexes != 0:
223
+ complex_names_all = complex_names_all[: self.limit_complexes]
224
+ print(f"Loading {len(complex_names_all)} complexes.")
225
+
226
+ if self.esm_embeddings_path is not None:
227
+ id_to_embeddings = torch.load(self.esm_embeddings_path)
228
+ chain_embeddings_dictlist = defaultdict(list)
229
+ for key, embedding in id_to_embeddings.items():
230
+ key_name = key.split("_")[0]
231
+ if key_name in complex_names_all:
232
+ chain_embeddings_dictlist[key_name].append(embedding)
233
+ lm_embeddings_chains_all = []
234
+ for name in complex_names_all:
235
+ lm_embeddings_chains_all.append(chain_embeddings_dictlist[name])
236
+ else:
237
+ lm_embeddings_chains_all = [None] * len(complex_names_all)
238
+
239
+ if self.num_workers > 1:
240
+ # running preprocessing in parallel on multiple workers and saving the progress every 1000 complexes
241
+ for i in range(len(complex_names_all) // 1000 + 1):
242
+ if os.path.exists(
243
+ os.path.join(self.full_cache_path, f"heterographs{i}.pkl")
244
+ ):
245
+ continue
246
+ complex_names = complex_names_all[1000 * i : 1000 * (i + 1)]
247
+ lm_embeddings_chains = lm_embeddings_chains_all[
248
+ 1000 * i : 1000 * (i + 1)
249
+ ]
250
+ complex_graphs, rdkit_ligands = [], []
251
+ if self.num_workers > 1:
252
+ p = Pool(self.num_workers, maxtasksperchild=1)
253
+ p.__enter__()
254
+ with tqdm(
255
+ total=len(complex_names),
256
+ desc=f"loading complexes {i}/{len(complex_names_all)//1000+1}",
257
+ ) as pbar:
258
+ map_fn = p.imap_unordered if self.num_workers > 1 else map
259
+ for t in map_fn(
260
+ self.get_complex,
261
+ zip(
262
+ complex_names,
263
+ lm_embeddings_chains,
264
+ [None] * len(complex_names),
265
+ [None] * len(complex_names),
266
+ ),
267
+ ):
268
+ complex_graphs.extend(t[0])
269
+ rdkit_ligands.extend(t[1])
270
+ pbar.update()
271
+ if self.num_workers > 1:
272
+ p.__exit__(None, None, None)
273
+
274
+ with open(
275
+ os.path.join(self.full_cache_path, f"heterographs{i}.pkl"), "wb"
276
+ ) as f:
277
+ pickle.dump((complex_graphs), f)
278
+ with open(
279
+ os.path.join(self.full_cache_path, f"rdkit_ligands{i}.pkl"), "wb"
280
+ ) as f:
281
+ pickle.dump((rdkit_ligands), f)
282
+
283
+ complex_graphs_all = []
284
+ for i in range(len(complex_names_all) // 1000 + 1):
285
+ with open(
286
+ os.path.join(self.full_cache_path, f"heterographs{i}.pkl"), "rb"
287
+ ) as f:
288
+ l = pickle.load(f)
289
+ complex_graphs_all.extend(l)
290
+ with open(
291
+ os.path.join(self.full_cache_path, f"heterographs.pkl"), "wb"
292
+ ) as f:
293
+ pickle.dump((complex_graphs_all), f)
294
+
295
+ rdkit_ligands_all = []
296
+ for i in range(len(complex_names_all) // 1000 + 1):
297
+ with open(
298
+ os.path.join(self.full_cache_path, f"rdkit_ligands{i}.pkl"), "rb"
299
+ ) as f:
300
+ l = pickle.load(f)
301
+ rdkit_ligands_all.extend(l)
302
+ with open(
303
+ os.path.join(self.full_cache_path, f"rdkit_ligands.pkl"), "wb"
304
+ ) as f:
305
+ pickle.dump((rdkit_ligands_all), f)
306
+ else:
307
+ complex_graphs, rdkit_ligands = [], []
308
+ with tqdm(total=len(complex_names_all), desc="loading complexes") as pbar:
309
+ for t in map(
310
+ self.get_complex,
311
+ zip(
312
+ complex_names_all,
313
+ lm_embeddings_chains_all,
314
+ [None] * len(complex_names_all),
315
+ [None] * len(complex_names_all),
316
+ ),
317
+ ):
318
+ complex_graphs.extend(t[0])
319
+ rdkit_ligands.extend(t[1])
320
+ pbar.update()
321
+ with open(
322
+ os.path.join(self.full_cache_path, "heterographs.pkl"), "wb"
323
+ ) as f:
324
+ pickle.dump((complex_graphs), f)
325
+ with open(
326
+ os.path.join(self.full_cache_path, "rdkit_ligands.pkl"), "wb"
327
+ ) as f:
328
+ pickle.dump((rdkit_ligands), f)
329
+
330
+ def inference_preprocessing(self):
331
+ ligands_list = []
332
+ print("Reading molecules and generating local structures with RDKit")
333
+ for ligand_description in tqdm(self.ligand_descriptions):
334
+ mol = MolFromSmiles(ligand_description) # check if it is a smiles or a path
335
+ if mol is not None:
336
+ mol = AddHs(mol)
337
+ generate_conformer(mol)
338
+ ligands_list.append(mol)
339
+ else:
340
+ mol = read_molecule(ligand_description, remove_hs=False, sanitize=True)
341
+ if not self.keep_local_structures:
342
+ mol.RemoveAllConformers()
343
+ mol = AddHs(mol)
344
+ generate_conformer(mol)
345
+ ligands_list.append(mol)
346
+
347
+ if self.esm_embeddings_path is not None:
348
+ print("Reading language model embeddings.")
349
+ lm_embeddings_chains_all = []
350
+ if not os.path.exists(self.esm_embeddings_path):
351
+ raise Exception(
352
+ "ESM embeddings path does not exist: ", self.esm_embeddings_path
353
+ )
354
+ for protein_path in self.protein_path_list:
355
+ embeddings_paths = sorted(
356
+ glob.glob(
357
+ os.path.join(
358
+ self.esm_embeddings_path, os.path.basename(protein_path)
359
+ )
360
+ + "*"
361
+ )
362
+ )
363
+ lm_embeddings_chains = []
364
+ for embeddings_path in embeddings_paths:
365
+ lm_embeddings_chains.append(
366
+ torch.load(embeddings_path)["representations"][33]
367
+ )
368
+ lm_embeddings_chains_all.append(lm_embeddings_chains)
369
+ else:
370
+ lm_embeddings_chains_all = [None] * len(self.protein_path_list)
371
+
372
+ print("Generating graphs for ligands and proteins")
373
+ if self.num_workers > 1:
374
+ # running preprocessing in parallel on multiple workers and saving the progress every 1000 complexes
375
+ for i in range(len(self.protein_path_list) // 1000 + 1):
376
+ if os.path.exists(
377
+ os.path.join(self.full_cache_path, f"heterographs{i}.pkl")
378
+ ):
379
+ continue
380
+ protein_paths_chunk = self.protein_path_list[1000 * i : 1000 * (i + 1)]
381
+ ligand_description_chunk = self.ligand_descriptions[
382
+ 1000 * i : 1000 * (i + 1)
383
+ ]
384
+ ligands_chunk = ligands_list[1000 * i : 1000 * (i + 1)]
385
+ lm_embeddings_chains = lm_embeddings_chains_all[
386
+ 1000 * i : 1000 * (i + 1)
387
+ ]
388
+ complex_graphs, rdkit_ligands = [], []
389
+ if self.num_workers > 1:
390
+ p = Pool(self.num_workers, maxtasksperchild=1)
391
+ p.__enter__()
392
+ with tqdm(
393
+ total=len(protein_paths_chunk),
394
+ desc=f"loading complexes {i}/{len(protein_paths_chunk)//1000+1}",
395
+ ) as pbar:
396
+ map_fn = p.imap_unordered if self.num_workers > 1 else map
397
+ for t in map_fn(
398
+ self.get_complex,
399
+ zip(
400
+ protein_paths_chunk,
401
+ lm_embeddings_chains,
402
+ ligands_chunk,
403
+ ligand_description_chunk,
404
+ ),
405
+ ):
406
+ complex_graphs.extend(t[0])
407
+ rdkit_ligands.extend(t[1])
408
+ pbar.update()
409
+ if self.num_workers > 1:
410
+ p.__exit__(None, None, None)
411
+
412
+ with open(
413
+ os.path.join(self.full_cache_path, f"heterographs{i}.pkl"), "wb"
414
+ ) as f:
415
+ pickle.dump((complex_graphs), f)
416
+ with open(
417
+ os.path.join(self.full_cache_path, f"rdkit_ligands{i}.pkl"), "wb"
418
+ ) as f:
419
+ pickle.dump((rdkit_ligands), f)
420
+
421
+ complex_graphs_all = []
422
+ for i in range(len(self.protein_path_list) // 1000 + 1):
423
+ with open(
424
+ os.path.join(self.full_cache_path, f"heterographs{i}.pkl"), "rb"
425
+ ) as f:
426
+ l = pickle.load(f)
427
+ complex_graphs_all.extend(l)
428
+ with open(
429
+ os.path.join(self.full_cache_path, f"heterographs.pkl"), "wb"
430
+ ) as f:
431
+ pickle.dump((complex_graphs_all), f)
432
+
433
+ rdkit_ligands_all = []
434
+ for i in range(len(self.protein_path_list) // 1000 + 1):
435
+ with open(
436
+ os.path.join(self.full_cache_path, f"rdkit_ligands{i}.pkl"), "rb"
437
+ ) as f:
438
+ l = pickle.load(f)
439
+ rdkit_ligands_all.extend(l)
440
+ with open(
441
+ os.path.join(self.full_cache_path, f"rdkit_ligands.pkl"), "wb"
442
+ ) as f:
443
+ pickle.dump((rdkit_ligands_all), f)
444
+ else:
445
+ complex_graphs, rdkit_ligands = [], []
446
+ with tqdm(
447
+ total=len(self.protein_path_list), desc="loading complexes"
448
+ ) as pbar:
449
+ for t in map(
450
+ self.get_complex,
451
+ zip(
452
+ self.protein_path_list,
453
+ lm_embeddings_chains_all,
454
+ ligands_list,
455
+ self.ligand_descriptions,
456
+ ),
457
+ ):
458
+ complex_graphs.extend(t[0])
459
+ rdkit_ligands.extend(t[1])
460
+ pbar.update()
461
+ with open(
462
+ os.path.join(self.full_cache_path, "heterographs.pkl"), "wb"
463
+ ) as f:
464
+ pickle.dump((complex_graphs), f)
465
+ with open(
466
+ os.path.join(self.full_cache_path, "rdkit_ligands.pkl"), "wb"
467
+ ) as f:
468
+ pickle.dump((rdkit_ligands), f)
469
+
470
+ def get_complex(self, par):
471
+ name, lm_embedding_chains, ligand, ligand_description = par
472
+ if not os.path.exists(os.path.join(self.pdbbind_dir, name)) and ligand is None:
473
+ print("Folder not found", name)
474
+ return [], []
475
+
476
+ if ligand is not None:
477
+ rec_model = parse_pdb_from_path(name)
478
+ name = f"{name}____{ligand_description}"
479
+ ligs = [ligand]
480
+ else:
481
+ try:
482
+ rec_model = parse_receptor(name, self.pdbbind_dir)
483
+ except Exception as e:
484
+ print(f"Skipping {name} because of the error:")
485
+ print(e)
486
+ return [], []
487
+
488
+ ligs = read_mols(self.pdbbind_dir, name, remove_hs=False)
489
+ complex_graphs = []
490
+ for i, lig in enumerate(ligs):
491
+ if (
492
+ self.max_lig_size is not None
493
+ and lig.GetNumHeavyAtoms() > self.max_lig_size
494
+ ):
495
+ print(
496
+ f"Ligand with {lig.GetNumHeavyAtoms()} heavy atoms is larger than max_lig_size {self.max_lig_size}. Not including {name} in preprocessed data."
497
+ )
498
+ continue
499
+ complex_graph = HeteroData()
500
+ complex_graph["name"] = name
501
+ try:
502
+ get_lig_graph_with_matching(
503
+ lig,
504
+ complex_graph,
505
+ self.popsize,
506
+ self.maxiter,
507
+ self.matching,
508
+ self.keep_original,
509
+ self.num_conformers,
510
+ remove_hs=self.remove_hs,
511
+ )
512
+ print(lm_embedding_chains)
513
+ (
514
+ rec,
515
+ rec_coords,
516
+ c_alpha_coords,
517
+ n_coords,
518
+ c_coords,
519
+ lm_embeddings,
520
+ ) = extract_receptor_structure(
521
+ copy.deepcopy(rec_model),
522
+ lig,
523
+ lm_embedding_chains=lm_embedding_chains,
524
+ )
525
+ if lm_embeddings is not None and len(c_alpha_coords) != len(
526
+ lm_embeddings
527
+ ):
528
+ print(
529
+ f"LM embeddings for complex {name} did not have the right length for the protein. Skipping {name}."
530
+ )
531
+ continue
532
+
533
+ get_rec_graph(
534
+ rec,
535
+ rec_coords,
536
+ c_alpha_coords,
537
+ n_coords,
538
+ c_coords,
539
+ complex_graph,
540
+ rec_radius=self.receptor_radius,
541
+ c_alpha_max_neighbors=self.c_alpha_max_neighbors,
542
+ all_atoms=self.all_atoms,
543
+ atom_radius=self.atom_radius,
544
+ atom_max_neighbors=self.atom_max_neighbors,
545
+ remove_hs=self.remove_hs,
546
+ lm_embeddings=lm_embeddings,
547
+ )
548
+
549
+ except Exception as e:
550
+ print(f"Skipping {name} because of the error:")
551
+ print(e)
552
+ raise e
553
+ continue
554
+
555
+ protein_center = torch.mean(
556
+ complex_graph["receptor"].pos, dim=0, keepdim=True
557
+ )
558
+ complex_graph["receptor"].pos -= protein_center
559
+ if self.all_atoms:
560
+ complex_graph["atom"].pos -= protein_center
561
+
562
+ if (not self.matching) or self.num_conformers == 1:
563
+ complex_graph["ligand"].pos -= protein_center
564
+ else:
565
+ for p in complex_graph["ligand"].pos:
566
+ p -= protein_center
567
+
568
+ complex_graph.original_center = protein_center
569
+ complex_graphs.append(complex_graph)
570
+ return complex_graphs, ligs
571
+
572
+
573
+ def print_statistics(complex_graphs):
574
+ statistics = ([], [], [], [])
575
+
576
+ for complex_graph in complex_graphs:
577
+ lig_pos = (
578
+ complex_graph["ligand"].pos
579
+ if torch.is_tensor(complex_graph["ligand"].pos)
580
+ else complex_graph["ligand"].pos[0]
581
+ )
582
+ radius_protein = torch.max(
583
+ torch.linalg.vector_norm(complex_graph["receptor"].pos, dim=1)
584
+ )
585
+ molecule_center = torch.mean(lig_pos, dim=0)
586
+ radius_molecule = torch.max(
587
+ torch.linalg.vector_norm(lig_pos - molecule_center.unsqueeze(0), dim=1)
588
+ )
589
+ distance_center = torch.linalg.vector_norm(molecule_center)
590
+ statistics[0].append(radius_protein)
591
+ statistics[1].append(radius_molecule)
592
+ statistics[2].append(distance_center)
593
+ if "rmsd_matching" in complex_graph:
594
+ statistics[3].append(complex_graph.rmsd_matching)
595
+ else:
596
+ statistics[3].append(0)
597
+
598
+ name = [
599
+ "radius protein",
600
+ "radius molecule",
601
+ "distance protein-mol",
602
+ "rmsd matching",
603
+ ]
604
+ print("Number of complexes: ", len(complex_graphs))
605
+ for i in range(4):
606
+ array = np.asarray(statistics[i])
607
+ print(
608
+ f"{name[i]}: mean {np.mean(array)}, std {np.std(array)}, max {np.max(array)}"
609
+ )
610
+
611
+
612
+ def construct_loader(args, t_to_sigma):
613
+ transform = NoiseTransform(
614
+ t_to_sigma=t_to_sigma, no_torsion=args.no_torsion, all_atom=args.all_atoms
615
+ )
616
+
617
+ common_args = {
618
+ "transform": transform,
619
+ "root": args.data_dir,
620
+ "limit_complexes": args.limit_complexes,
621
+ "receptor_radius": args.receptor_radius,
622
+ "c_alpha_max_neighbors": args.c_alpha_max_neighbors,
623
+ "remove_hs": args.remove_hs,
624
+ "max_lig_size": args.max_lig_size,
625
+ "matching": not args.no_torsion,
626
+ "popsize": args.matching_popsize,
627
+ "maxiter": args.matching_maxiter,
628
+ "num_workers": args.num_workers,
629
+ "all_atoms": args.all_atoms,
630
+ "atom_radius": args.atom_radius,
631
+ "atom_max_neighbors": args.atom_max_neighbors,
632
+ "esm_embeddings_path": args.esm_embeddings_path,
633
+ }
634
+
635
+ train_dataset = PDBBind(
636
+ cache_path=args.cache_path,
637
+ split_path=args.split_train,
638
+ keep_original=True,
639
+ num_conformers=args.num_conformers,
640
+ **common_args,
641
+ )
642
+ val_dataset = PDBBind(
643
+ cache_path=args.cache_path,
644
+ split_path=args.split_val,
645
+ keep_original=True,
646
+ **common_args,
647
+ )
648
+
649
+ loader_class = DataListLoader if torch.cuda.is_available() else DataLoader
650
+ train_loader = loader_class(
651
+ dataset=train_dataset,
652
+ batch_size=args.batch_size,
653
+ num_workers=args.num_dataloader_workers,
654
+ shuffle=True,
655
+ pin_memory=args.pin_memory,
656
+ )
657
+ val_loader = loader_class(
658
+ dataset=val_dataset,
659
+ batch_size=args.batch_size,
660
+ num_workers=args.num_dataloader_workers,
661
+ shuffle=True,
662
+ pin_memory=args.pin_memory,
663
+ )
664
+
665
+ return train_loader, val_loader
666
+
667
+
668
+ def read_mol(pdbbind_dir, name, remove_hs=False):
669
+ lig = read_molecule(
670
+ os.path.join(pdbbind_dir, name, f"{name}_ligand.sdf"),
671
+ remove_hs=remove_hs,
672
+ sanitize=True,
673
+ )
674
+ if lig is None: # read mol2 file if sdf file cannot be sanitized
675
+ lig = read_molecule(
676
+ os.path.join(pdbbind_dir, name, f"{name}_ligand.mol2"),
677
+ remove_hs=remove_hs,
678
+ sanitize=True,
679
+ )
680
+ return lig
681
+
682
+
683
+ def read_mols(pdbbind_dir, name, remove_hs=False):
684
+ ligs = []
685
+ for file in os.listdir(os.path.join(pdbbind_dir, name)):
686
+ if file.endswith(".sdf") and "rdkit" not in file:
687
+ lig = read_molecule(
688
+ os.path.join(pdbbind_dir, name, file),
689
+ remove_hs=remove_hs,
690
+ sanitize=True,
691
+ )
692
+ if lig is None and os.path.exists(
693
+ os.path.join(pdbbind_dir, name, file[:-4] + ".mol2")
694
+ ): # read mol2 file if sdf file cannot be sanitized
695
+ print(
696
+ "Using the .sdf file failed. We found a .mol2 file instead and are trying to use that."
697
+ )
698
+ lig = read_molecule(
699
+ os.path.join(pdbbind_dir, name, file[:-4] + ".mol2"),
700
+ remove_hs=remove_hs,
701
+ sanitize=True,
702
+ )
703
+ if lig is not None:
704
+ ligs.append(lig)
705
+ return ligs
datasets/pdbbind_lm_embedding_preparation.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from argparse import FileType, ArgumentParser
3
+
4
+ import numpy as np
5
+ from Bio.PDB import PDBParser
6
+ from Bio.Seq import Seq
7
+ from Bio.SeqRecord import SeqRecord
8
+ from tqdm import tqdm
9
+
10
+ parser = ArgumentParser()
11
+ parser.add_argument('--data_dir', type=str, default='data/PDBBind_processed', help='')
12
+ parser.add_argument('--chain_cutoff', type=int, default=10, help='')
13
+ parser.add_argument('--out_file', type=str, default="data/pdbbind_sequences.fasta")
14
+ args = parser.parse_args()
15
+
16
+ cutoff = args.chain_cutoff
17
+ data_dir = args.data_dir
18
+ names = os.listdir(data_dir)
19
+ #%%
20
+ from Bio import SeqIO
21
+ biopython_parser = PDBParser()
22
+
23
+ three_to_one = {'ALA': 'A',
24
+ 'ARG': 'R',
25
+ 'ASN': 'N',
26
+ 'ASP': 'D',
27
+ 'CYS': 'C',
28
+ 'GLN': 'Q',
29
+ 'GLU': 'E',
30
+ 'GLY': 'G',
31
+ 'HIS': 'H',
32
+ 'ILE': 'I',
33
+ 'LEU': 'L',
34
+ 'LYS': 'K',
35
+ 'MET': 'M',
36
+ 'MSE': 'M', # this is almost the same AA as MET. The sulfur is just replaced by Selen
37
+ 'PHE': 'F',
38
+ 'PRO': 'P',
39
+ 'PYL': 'O',
40
+ 'SER': 'S',
41
+ 'SEC': 'U',
42
+ 'THR': 'T',
43
+ 'TRP': 'W',
44
+ 'TYR': 'Y',
45
+ 'VAL': 'V',
46
+ 'ASX': 'B',
47
+ 'GLX': 'Z',
48
+ 'XAA': 'X',
49
+ 'XLE': 'J'}
50
+
51
+ sequences = []
52
+ ids = []
53
+ for name in tqdm(names):
54
+ if name == '.DS_Store': continue
55
+ if os.path.exists(os.path.join(data_dir, name, f'{name}_protein_processed.pdb')):
56
+ rec_path = os.path.join(data_dir, name, f'{name}_protein_processed.pdb')
57
+ else:
58
+ rec_path = os.path.join(data_dir, name, f'{name}_protein.pdb')
59
+ if cutoff > 10:
60
+ rec_path = os.path.join(data_dir, name, f'{name}_protein_obabel_reduce.pdb')
61
+ if not os.path.exists(rec_path):
62
+ rec_path = os.path.join(data_dir, name, f'{name}_protein.pdb')
63
+ structure = biopython_parser.get_structure('random_id', rec_path)
64
+ structure = structure[0]
65
+ for i, chain in enumerate(structure):
66
+ seq = ''
67
+ for res_idx, residue in enumerate(chain):
68
+ if residue.get_resname() == 'HOH':
69
+ continue
70
+ residue_coords = []
71
+ c_alpha, n, c = None, None, None
72
+ for atom in residue:
73
+ if atom.name == 'CA':
74
+ c_alpha = list(atom.get_vector())
75
+ if atom.name == 'N':
76
+ n = list(atom.get_vector())
77
+ if atom.name == 'C':
78
+ c = list(atom.get_vector())
79
+ if c_alpha != None and n != None and c != None: # only append residue if it is an amino acid and not
80
+ try:
81
+ seq += three_to_one[residue.get_resname()]
82
+ except Exception as e:
83
+ seq += '-'
84
+ print("encountered unknown AA: ", residue.get_resname(), ' in the complex ', name, '. Replacing it with a dash - .')
85
+ sequences.append(seq)
86
+ ids.append(f'{name}_chain_{i}')
87
+ records = []
88
+ for (index, seq) in zip(ids,sequences):
89
+ record = SeqRecord(Seq(seq), str(index))
90
+ record.description = ''
91
+ records.append(record)
92
+ SeqIO.write(records, args.out_file, "fasta")
93
+
94
+
datasets/process_mols.py ADDED
@@ -0,0 +1,550 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import copy
2
+ import os
3
+ import warnings
4
+
5
+ import numpy as np
6
+ import scipy.spatial as spa
7
+ import torch
8
+ from Bio.PDB import PDBParser
9
+ from Bio.PDB.PDBExceptions import PDBConstructionWarning
10
+ from rdkit import Chem
11
+ from rdkit.Chem.rdchem import BondType as BT
12
+ from rdkit.Chem import AllChem, GetPeriodicTable, RemoveHs
13
+ from rdkit.Geometry import Point3D
14
+ from scipy import spatial
15
+ from scipy.special import softmax
16
+ from torch_cluster import radius_graph
17
+
18
+
19
+ import torch.nn.functional as F
20
+
21
+ from datasets.conformer_matching import get_torsion_angles, optimize_rotatable_bonds
22
+ from utils.torsion import get_transformation_mask
23
+
24
+
25
+ biopython_parser = PDBParser()
26
+ periodic_table = GetPeriodicTable()
27
+ allowable_features = {
28
+ 'possible_atomic_num_list': list(range(1, 119)) + ['misc'],
29
+ 'possible_chirality_list': [
30
+ 'CHI_UNSPECIFIED',
31
+ 'CHI_TETRAHEDRAL_CW',
32
+ 'CHI_TETRAHEDRAL_CCW',
33
+ 'CHI_OTHER'
34
+ ],
35
+ 'possible_degree_list': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 'misc'],
36
+ 'possible_numring_list': [0, 1, 2, 3, 4, 5, 6, 'misc'],
37
+ 'possible_implicit_valence_list': [0, 1, 2, 3, 4, 5, 6, 'misc'],
38
+ 'possible_formal_charge_list': [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 'misc'],
39
+ 'possible_numH_list': [0, 1, 2, 3, 4, 5, 6, 7, 8, 'misc'],
40
+ 'possible_number_radical_e_list': [0, 1, 2, 3, 4, 'misc'],
41
+ 'possible_hybridization_list': [
42
+ 'SP', 'SP2', 'SP3', 'SP3D', 'SP3D2', 'misc'
43
+ ],
44
+ 'possible_is_aromatic_list': [False, True],
45
+ 'possible_is_in_ring3_list': [False, True],
46
+ 'possible_is_in_ring4_list': [False, True],
47
+ 'possible_is_in_ring5_list': [False, True],
48
+ 'possible_is_in_ring6_list': [False, True],
49
+ 'possible_is_in_ring7_list': [False, True],
50
+ 'possible_is_in_ring8_list': [False, True],
51
+ 'possible_amino_acids': ['ALA', 'ARG', 'ASN', 'ASP', 'CYS', 'GLN', 'GLU', 'GLY', 'HIS', 'ILE', 'LEU', 'LYS', 'MET',
52
+ 'PHE', 'PRO', 'SER', 'THR', 'TRP', 'TYR', 'VAL', 'HIP', 'HIE', 'TPO', 'HID', 'LEV', 'MEU',
53
+ 'PTR', 'GLV', 'CYT', 'SEP', 'HIZ', 'CYM', 'GLM', 'ASQ', 'TYS', 'CYX', 'GLZ', 'misc'],
54
+ 'possible_atom_type_2': ['C*', 'CA', 'CB', 'CD', 'CE', 'CG', 'CH', 'CZ', 'N*', 'ND', 'NE', 'NH', 'NZ', 'O*', 'OD',
55
+ 'OE', 'OG', 'OH', 'OX', 'S*', 'SD', 'SG', 'misc'],
56
+ 'possible_atom_type_3': ['C', 'CA', 'CB', 'CD', 'CD1', 'CD2', 'CE', 'CE1', 'CE2', 'CE3', 'CG', 'CG1', 'CG2', 'CH2',
57
+ 'CZ', 'CZ2', 'CZ3', 'N', 'ND1', 'ND2', 'NE', 'NE1', 'NE2', 'NH1', 'NH2', 'NZ', 'O', 'OD1',
58
+ 'OD2', 'OE1', 'OE2', 'OG', 'OG1', 'OH', 'OXT', 'SD', 'SG', 'misc'],
59
+ }
60
+ bonds = {BT.SINGLE: 0, BT.DOUBLE: 1, BT.TRIPLE: 2, BT.AROMATIC: 3}
61
+
62
+ lig_feature_dims = (list(map(len, [
63
+ allowable_features['possible_atomic_num_list'],
64
+ allowable_features['possible_chirality_list'],
65
+ allowable_features['possible_degree_list'],
66
+ allowable_features['possible_formal_charge_list'],
67
+ allowable_features['possible_implicit_valence_list'],
68
+ allowable_features['possible_numH_list'],
69
+ allowable_features['possible_number_radical_e_list'],
70
+ allowable_features['possible_hybridization_list'],
71
+ allowable_features['possible_is_aromatic_list'],
72
+ allowable_features['possible_numring_list'],
73
+ allowable_features['possible_is_in_ring3_list'],
74
+ allowable_features['possible_is_in_ring4_list'],
75
+ allowable_features['possible_is_in_ring5_list'],
76
+ allowable_features['possible_is_in_ring6_list'],
77
+ allowable_features['possible_is_in_ring7_list'],
78
+ allowable_features['possible_is_in_ring8_list'],
79
+ ])), 0) # number of scalar features
80
+
81
+ rec_atom_feature_dims = (list(map(len, [
82
+ allowable_features['possible_amino_acids'],
83
+ allowable_features['possible_atomic_num_list'],
84
+ allowable_features['possible_atom_type_2'],
85
+ allowable_features['possible_atom_type_3'],
86
+ ])), 0)
87
+
88
+ rec_residue_feature_dims = (list(map(len, [
89
+ allowable_features['possible_amino_acids']
90
+ ])), 0)
91
+
92
+
93
+ def lig_atom_featurizer(mol):
94
+ ringinfo = mol.GetRingInfo()
95
+ atom_features_list = []
96
+ for idx, atom in enumerate(mol.GetAtoms()):
97
+ atom_features_list.append([
98
+ safe_index(allowable_features['possible_atomic_num_list'], atom.GetAtomicNum()),
99
+ allowable_features['possible_chirality_list'].index(str(atom.GetChiralTag())),
100
+ safe_index(allowable_features['possible_degree_list'], atom.GetTotalDegree()),
101
+ safe_index(allowable_features['possible_formal_charge_list'], atom.GetFormalCharge()),
102
+ safe_index(allowable_features['possible_implicit_valence_list'], atom.GetImplicitValence()),
103
+ safe_index(allowable_features['possible_numH_list'], atom.GetTotalNumHs()),
104
+ safe_index(allowable_features['possible_number_radical_e_list'], atom.GetNumRadicalElectrons()),
105
+ safe_index(allowable_features['possible_hybridization_list'], str(atom.GetHybridization())),
106
+ allowable_features['possible_is_aromatic_list'].index(atom.GetIsAromatic()),
107
+ safe_index(allowable_features['possible_numring_list'], ringinfo.NumAtomRings(idx)),
108
+ allowable_features['possible_is_in_ring3_list'].index(ringinfo.IsAtomInRingOfSize(idx, 3)),
109
+ allowable_features['possible_is_in_ring4_list'].index(ringinfo.IsAtomInRingOfSize(idx, 4)),
110
+ allowable_features['possible_is_in_ring5_list'].index(ringinfo.IsAtomInRingOfSize(idx, 5)),
111
+ allowable_features['possible_is_in_ring6_list'].index(ringinfo.IsAtomInRingOfSize(idx, 6)),
112
+ allowable_features['possible_is_in_ring7_list'].index(ringinfo.IsAtomInRingOfSize(idx, 7)),
113
+ allowable_features['possible_is_in_ring8_list'].index(ringinfo.IsAtomInRingOfSize(idx, 8)),
114
+ ])
115
+
116
+ return torch.tensor(atom_features_list)
117
+
118
+
119
+ def rec_residue_featurizer(rec):
120
+ feature_list = []
121
+ for residue in rec.get_residues():
122
+ feature_list.append([safe_index(allowable_features['possible_amino_acids'], residue.get_resname())])
123
+ return torch.tensor(feature_list, dtype=torch.float32) # (N_res, 1)
124
+
125
+
126
+ def safe_index(l, e):
127
+ """ Return index of element e in list l. If e is not present, return the last index """
128
+ try:
129
+ return l.index(e)
130
+ except:
131
+ return len(l) - 1
132
+
133
+
134
+
135
+ def parse_receptor(pdbid, pdbbind_dir):
136
+ rec = parsePDB(pdbid, pdbbind_dir)
137
+ return rec
138
+
139
+
140
+ def parsePDB(pdbid, pdbbind_dir):
141
+ rec_path = os.path.join(pdbbind_dir, pdbid, f'{pdbid}_protein_processed.pdb')
142
+ return parse_pdb_from_path(rec_path)
143
+
144
+ def parse_pdb_from_path(path):
145
+ with warnings.catch_warnings():
146
+ warnings.filterwarnings("ignore", category=PDBConstructionWarning)
147
+ structure = biopython_parser.get_structure('random_id', path)
148
+ rec = structure[0]
149
+ return rec
150
+
151
+
152
+ def extract_receptor_structure(rec, lig, lm_embedding_chains=None):
153
+ conf = lig.GetConformer()
154
+ lig_coords = conf.GetPositions()
155
+ min_distances = []
156
+ coords = []
157
+ c_alpha_coords = []
158
+ n_coords = []
159
+ c_coords = []
160
+ valid_chain_ids = []
161
+ lengths = []
162
+ for i, chain in enumerate(rec):
163
+ chain_coords = [] # num_residues, num_atoms, 3
164
+ chain_c_alpha_coords = []
165
+ chain_n_coords = []
166
+ chain_c_coords = []
167
+ count = 0
168
+ invalid_res_ids = []
169
+ for res_idx, residue in enumerate(chain):
170
+ if residue.get_resname() == 'HOH':
171
+ invalid_res_ids.append(residue.get_id())
172
+ continue
173
+ residue_coords = []
174
+ c_alpha, n, c = None, None, None
175
+ for atom in residue:
176
+ if atom.name == 'CA':
177
+ c_alpha = list(atom.get_vector())
178
+ if atom.name == 'N':
179
+ n = list(atom.get_vector())
180
+ if atom.name == 'C':
181
+ c = list(atom.get_vector())
182
+ residue_coords.append(list(atom.get_vector()))
183
+
184
+ if c_alpha != None and n != None and c != None:
185
+ # only append residue if it is an amino acid and not some weird molecule that is part of the complex
186
+ chain_c_alpha_coords.append(c_alpha)
187
+ chain_n_coords.append(n)
188
+ chain_c_coords.append(c)
189
+ chain_coords.append(np.array(residue_coords))
190
+ count += 1
191
+ else:
192
+ invalid_res_ids.append(residue.get_id())
193
+ for res_id in invalid_res_ids:
194
+ chain.detach_child(res_id)
195
+ if len(chain_coords) > 0:
196
+ all_chain_coords = np.concatenate(chain_coords, axis=0)
197
+ distances = spatial.distance.cdist(lig_coords, all_chain_coords)
198
+ min_distance = distances.min()
199
+ else:
200
+ min_distance = np.inf
201
+
202
+ min_distances.append(min_distance)
203
+ lengths.append(count)
204
+ coords.append(chain_coords)
205
+ c_alpha_coords.append(np.array(chain_c_alpha_coords))
206
+ n_coords.append(np.array(chain_n_coords))
207
+ c_coords.append(np.array(chain_c_coords))
208
+ if not count == 0: valid_chain_ids.append(chain.get_id())
209
+
210
+ min_distances = np.array(min_distances)
211
+ if len(valid_chain_ids) == 0:
212
+ valid_chain_ids.append(np.argmin(min_distances))
213
+ valid_coords = []
214
+ valid_c_alpha_coords = []
215
+ valid_n_coords = []
216
+ valid_c_coords = []
217
+ valid_lengths = []
218
+ invalid_chain_ids = []
219
+ valid_lm_embeddings = []
220
+ for i, chain in enumerate(rec):
221
+ if chain.get_id() in valid_chain_ids:
222
+ valid_coords.append(coords[i])
223
+ valid_c_alpha_coords.append(c_alpha_coords[i])
224
+ if lm_embedding_chains is not None:
225
+ if i >= len(lm_embedding_chains):
226
+ raise ValueError('Encountered valid chain id that was not present in the LM embeddings')
227
+ valid_lm_embeddings.append(lm_embedding_chains[i])
228
+ valid_n_coords.append(n_coords[i])
229
+ valid_c_coords.append(c_coords[i])
230
+ valid_lengths.append(lengths[i])
231
+ else:
232
+ invalid_chain_ids.append(chain.get_id())
233
+ coords = [item for sublist in valid_coords for item in sublist] # list with n_residues arrays: [n_atoms, 3]
234
+
235
+ c_alpha_coords = np.concatenate(valid_c_alpha_coords, axis=0) # [n_residues, 3]
236
+ n_coords = np.concatenate(valid_n_coords, axis=0) # [n_residues, 3]
237
+ c_coords = np.concatenate(valid_c_coords, axis=0) # [n_residues, 3]
238
+ lm_embeddings = np.concatenate(valid_lm_embeddings, axis=0) if lm_embedding_chains is not None else None
239
+ for invalid_id in invalid_chain_ids:
240
+ rec.detach_child(invalid_id)
241
+
242
+ assert len(c_alpha_coords) == len(n_coords)
243
+ assert len(c_alpha_coords) == len(c_coords)
244
+ assert sum(valid_lengths) == len(c_alpha_coords)
245
+ return rec, coords, c_alpha_coords, n_coords, c_coords, lm_embeddings
246
+
247
+
248
+ def get_lig_graph(mol, complex_graph):
249
+ lig_coords = torch.from_numpy(mol.GetConformer().GetPositions()).float()
250
+ atom_feats = lig_atom_featurizer(mol)
251
+
252
+ row, col, edge_type = [], [], []
253
+ for bond in mol.GetBonds():
254
+ start, end = bond.GetBeginAtomIdx(), bond.GetEndAtomIdx()
255
+ row += [start, end]
256
+ col += [end, start]
257
+ edge_type += 2 * [bonds[bond.GetBondType()]] if bond.GetBondType() != BT.UNSPECIFIED else [0, 0]
258
+
259
+ edge_index = torch.tensor([row, col], dtype=torch.long)
260
+ edge_type = torch.tensor(edge_type, dtype=torch.long)
261
+ edge_attr = F.one_hot(edge_type, num_classes=len(bonds)).to(torch.float)
262
+
263
+ complex_graph['ligand'].x = atom_feats
264
+ complex_graph['ligand'].pos = lig_coords
265
+ complex_graph['ligand', 'lig_bond', 'ligand'].edge_index = edge_index
266
+ complex_graph['ligand', 'lig_bond', 'ligand'].edge_attr = edge_attr
267
+ return
268
+
269
+ def generate_conformer(mol):
270
+ ps = AllChem.ETKDGv2()
271
+ id = AllChem.EmbedMolecule(mol, ps)
272
+ if id == -1:
273
+ print('rdkit coords could not be generated without using random coords. using random coords now.')
274
+ ps.useRandomCoords = True
275
+ AllChem.EmbedMolecule(mol, ps)
276
+ AllChem.MMFFOptimizeMolecule(mol, confId=0)
277
+ # else:
278
+ # AllChem.MMFFOptimizeMolecule(mol_rdkit, confId=0)
279
+
280
+ def get_lig_graph_with_matching(mol_, complex_graph, popsize, maxiter, matching, keep_original, num_conformers, remove_hs):
281
+ if matching:
282
+ mol_maybe_noh = copy.deepcopy(mol_)
283
+ if remove_hs:
284
+ mol_maybe_noh = RemoveHs(mol_maybe_noh, sanitize=True)
285
+ if keep_original:
286
+ complex_graph['ligand'].orig_pos = mol_maybe_noh.GetConformer().GetPositions()
287
+
288
+ rotable_bonds = get_torsion_angles(mol_maybe_noh)
289
+ if not rotable_bonds: print("no_rotable_bonds but still using it")
290
+
291
+ for i in range(num_conformers):
292
+ mol_rdkit = copy.deepcopy(mol_)
293
+
294
+ mol_rdkit.RemoveAllConformers()
295
+ mol_rdkit = AllChem.AddHs(mol_rdkit)
296
+ generate_conformer(mol_rdkit)
297
+ if remove_hs:
298
+ mol_rdkit = RemoveHs(mol_rdkit, sanitize=True)
299
+ mol = copy.deepcopy(mol_maybe_noh)
300
+ if rotable_bonds:
301
+ optimize_rotatable_bonds(mol_rdkit, mol, rotable_bonds, popsize=popsize, maxiter=maxiter)
302
+ mol.AddConformer(mol_rdkit.GetConformer())
303
+ rms_list = []
304
+ AllChem.AlignMolConformers(mol, RMSlist=rms_list)
305
+ mol_rdkit.RemoveAllConformers()
306
+ mol_rdkit.AddConformer(mol.GetConformers()[1])
307
+
308
+ if i == 0:
309
+ complex_graph.rmsd_matching = rms_list[0]
310
+ get_lig_graph(mol_rdkit, complex_graph)
311
+ else:
312
+ if torch.is_tensor(complex_graph['ligand'].pos):
313
+ complex_graph['ligand'].pos = [complex_graph['ligand'].pos]
314
+ complex_graph['ligand'].pos.append(torch.from_numpy(mol_rdkit.GetConformer().GetPositions()).float())
315
+
316
+ else: # no matching
317
+ complex_graph.rmsd_matching = 0
318
+ if remove_hs: mol_ = RemoveHs(mol_)
319
+ get_lig_graph(mol_, complex_graph)
320
+
321
+ edge_mask, mask_rotate = get_transformation_mask(complex_graph)
322
+ complex_graph['ligand'].edge_mask = torch.tensor(edge_mask)
323
+ complex_graph['ligand'].mask_rotate = mask_rotate
324
+
325
+ return
326
+
327
+
328
+ def get_calpha_graph(rec, c_alpha_coords, n_coords, c_coords, complex_graph, cutoff=20, max_neighbor=None, lm_embeddings=None):
329
+ n_rel_pos = n_coords - c_alpha_coords
330
+ c_rel_pos = c_coords - c_alpha_coords
331
+ num_residues = len(c_alpha_coords)
332
+ if num_residues <= 1:
333
+ raise ValueError(f"rec contains only 1 residue!")
334
+
335
+ # Build the k-NN graph
336
+ distances = spa.distance.cdist(c_alpha_coords, c_alpha_coords)
337
+ src_list = []
338
+ dst_list = []
339
+ mean_norm_list = []
340
+ for i in range(num_residues):
341
+ dst = list(np.where(distances[i, :] < cutoff)[0])
342
+ dst.remove(i)
343
+ if max_neighbor != None and len(dst) > max_neighbor:
344
+ dst = list(np.argsort(distances[i, :]))[1: max_neighbor + 1]
345
+ if len(dst) == 0:
346
+ dst = list(np.argsort(distances[i, :]))[1:2] # choose second because first is i itself
347
+ print(f'The c_alpha_cutoff {cutoff} was too small for one c_alpha such that it had no neighbors. '
348
+ f'So we connected it to the closest other c_alpha')
349
+ assert i not in dst
350
+ src = [i] * len(dst)
351
+ src_list.extend(src)
352
+ dst_list.extend(dst)
353
+ valid_dist = list(distances[i, dst])
354
+ valid_dist_np = distances[i, dst]
355
+ sigma = np.array([1., 2., 5., 10., 30.]).reshape((-1, 1))
356
+ weights = softmax(- valid_dist_np.reshape((1, -1)) ** 2 / sigma, axis=1) # (sigma_num, neigh_num)
357
+ assert weights[0].sum() > 1 - 1e-2 and weights[0].sum() < 1.01
358
+ diff_vecs = c_alpha_coords[src, :] - c_alpha_coords[dst, :] # (neigh_num, 3)
359
+ mean_vec = weights.dot(diff_vecs) # (sigma_num, 3)
360
+ denominator = weights.dot(np.linalg.norm(diff_vecs, axis=1)) # (sigma_num,)
361
+ mean_vec_ratio_norm = np.linalg.norm(mean_vec, axis=1) / denominator # (sigma_num,)
362
+ mean_norm_list.append(mean_vec_ratio_norm)
363
+ assert len(src_list) == len(dst_list)
364
+
365
+ node_feat = rec_residue_featurizer(rec)
366
+ mu_r_norm = torch.from_numpy(np.array(mean_norm_list).astype(np.float32))
367
+ side_chain_vecs = torch.from_numpy(
368
+ np.concatenate([np.expand_dims(n_rel_pos, axis=1), np.expand_dims(c_rel_pos, axis=1)], axis=1))
369
+
370
+ complex_graph['receptor'].x = torch.cat([node_feat, torch.tensor(lm_embeddings)], axis=1) if lm_embeddings is not None else node_feat
371
+ complex_graph['receptor'].pos = torch.from_numpy(c_alpha_coords).float()
372
+ complex_graph['receptor'].mu_r_norm = mu_r_norm
373
+ complex_graph['receptor'].side_chain_vecs = side_chain_vecs.float()
374
+ complex_graph['receptor', 'rec_contact', 'receptor'].edge_index = torch.from_numpy(np.asarray([src_list, dst_list]))
375
+
376
+ return
377
+
378
+
379
+ def rec_atom_featurizer(rec):
380
+ atom_feats = []
381
+ for i, atom in enumerate(rec.get_atoms()):
382
+ atom_name, element = atom.name, atom.element
383
+ if element == 'CD':
384
+ element = 'C'
385
+ assert not element == ''
386
+ try:
387
+ atomic_num = periodic_table.GetAtomicNumber(element)
388
+ except:
389
+ atomic_num = -1
390
+ atom_feat = [safe_index(allowable_features['possible_amino_acids'], atom.get_parent().get_resname()),
391
+ safe_index(allowable_features['possible_atomic_num_list'], atomic_num),
392
+ safe_index(allowable_features['possible_atom_type_2'], (atom_name + '*')[:2]),
393
+ safe_index(allowable_features['possible_atom_type_3'], atom_name)]
394
+ atom_feats.append(atom_feat)
395
+
396
+ return atom_feats
397
+
398
+
399
+ def get_rec_graph(rec, rec_coords, c_alpha_coords, n_coords, c_coords, complex_graph, rec_radius, c_alpha_max_neighbors=None, all_atoms=False,
400
+ atom_radius=5, atom_max_neighbors=None, remove_hs=False, lm_embeddings=None):
401
+ if all_atoms:
402
+ return get_fullrec_graph(rec, rec_coords, c_alpha_coords, n_coords, c_coords, complex_graph,
403
+ c_alpha_cutoff=rec_radius, c_alpha_max_neighbors=c_alpha_max_neighbors,
404
+ atom_cutoff=atom_radius, atom_max_neighbors=atom_max_neighbors, remove_hs=remove_hs,lm_embeddings=lm_embeddings)
405
+ else:
406
+ return get_calpha_graph(rec, c_alpha_coords, n_coords, c_coords, complex_graph, rec_radius, c_alpha_max_neighbors,lm_embeddings=lm_embeddings)
407
+
408
+
409
+ def get_fullrec_graph(rec, rec_coords, c_alpha_coords, n_coords, c_coords, complex_graph, c_alpha_cutoff=20,
410
+ c_alpha_max_neighbors=None, atom_cutoff=5, atom_max_neighbors=None, remove_hs=False, lm_embeddings=None):
411
+ # builds the receptor graph with both residues and atoms
412
+
413
+ n_rel_pos = n_coords - c_alpha_coords
414
+ c_rel_pos = c_coords - c_alpha_coords
415
+ num_residues = len(c_alpha_coords)
416
+ if num_residues <= 1:
417
+ raise ValueError(f"rec contains only 1 residue!")
418
+
419
+ # Build the k-NN graph of residues
420
+ distances = spa.distance.cdist(c_alpha_coords, c_alpha_coords)
421
+ src_list = []
422
+ dst_list = []
423
+ mean_norm_list = []
424
+ for i in range(num_residues):
425
+ dst = list(np.where(distances[i, :] < c_alpha_cutoff)[0])
426
+ dst.remove(i)
427
+ if c_alpha_max_neighbors != None and len(dst) > c_alpha_max_neighbors:
428
+ dst = list(np.argsort(distances[i, :]))[1: c_alpha_max_neighbors + 1]
429
+ if len(dst) == 0:
430
+ dst = list(np.argsort(distances[i, :]))[1:2] # choose second because first is i itself
431
+ print(f'The c_alpha_cutoff {c_alpha_cutoff} was too small for one c_alpha such that it had no neighbors. '
432
+ f'So we connected it to the closest other c_alpha')
433
+ assert i not in dst
434
+ src = [i] * len(dst)
435
+ src_list.extend(src)
436
+ dst_list.extend(dst)
437
+ valid_dist = list(distances[i, dst])
438
+ valid_dist_np = distances[i, dst]
439
+ sigma = np.array([1., 2., 5., 10., 30.]).reshape((-1, 1))
440
+ weights = softmax(- valid_dist_np.reshape((1, -1)) ** 2 / sigma, axis=1) # (sigma_num, neigh_num)
441
+ assert 1 - 1e-2 < weights[0].sum() < 1.01
442
+ diff_vecs = c_alpha_coords[src, :] - c_alpha_coords[dst, :] # (neigh_num, 3)
443
+ mean_vec = weights.dot(diff_vecs) # (sigma_num, 3)
444
+ denominator = weights.dot(np.linalg.norm(diff_vecs, axis=1)) # (sigma_num,)
445
+ mean_vec_ratio_norm = np.linalg.norm(mean_vec, axis=1) / denominator # (sigma_num,)
446
+ mean_norm_list.append(mean_vec_ratio_norm)
447
+ assert len(src_list) == len(dst_list)
448
+
449
+ node_feat = rec_residue_featurizer(rec)
450
+ mu_r_norm = torch.from_numpy(np.array(mean_norm_list).astype(np.float32))
451
+ side_chain_vecs = torch.from_numpy(
452
+ np.concatenate([np.expand_dims(n_rel_pos, axis=1), np.expand_dims(c_rel_pos, axis=1)], axis=1))
453
+
454
+ complex_graph['receptor'].x = torch.cat([node_feat, torch.tensor(lm_embeddings)], axis=1) if lm_embeddings is not None else node_feat
455
+ complex_graph['receptor'].pos = torch.from_numpy(c_alpha_coords).float()
456
+ complex_graph['receptor'].mu_r_norm = mu_r_norm
457
+ complex_graph['receptor'].side_chain_vecs = side_chain_vecs.float()
458
+ complex_graph['receptor', 'rec_contact', 'receptor'].edge_index = torch.from_numpy(np.asarray([src_list, dst_list]))
459
+
460
+ src_c_alpha_idx = np.concatenate([np.asarray([i]*len(l)) for i, l in enumerate(rec_coords)])
461
+ atom_feat = torch.from_numpy(np.asarray(rec_atom_featurizer(rec)))
462
+ atom_coords = torch.from_numpy(np.concatenate(rec_coords, axis=0)).float()
463
+
464
+ if remove_hs:
465
+ not_hs = (atom_feat[:, 1] != 0)
466
+ src_c_alpha_idx = src_c_alpha_idx[not_hs]
467
+ atom_feat = atom_feat[not_hs]
468
+ atom_coords = atom_coords[not_hs]
469
+
470
+ atoms_edge_index = radius_graph(atom_coords, atom_cutoff, max_num_neighbors=atom_max_neighbors if atom_max_neighbors else 1000)
471
+ atom_res_edge_index = torch.from_numpy(np.asarray([np.arange(len(atom_feat)), src_c_alpha_idx])).long()
472
+
473
+ complex_graph['atom'].x = atom_feat
474
+ complex_graph['atom'].pos = atom_coords
475
+ complex_graph['atom', 'atom_contact', 'atom'].edge_index = atoms_edge_index
476
+ complex_graph['atom', 'atom_rec_contact', 'receptor'].edge_index = atom_res_edge_index
477
+
478
+ return
479
+
480
+ def write_mol_with_coords(mol, new_coords, path):
481
+ w = Chem.SDWriter(path)
482
+ conf = mol.GetConformer()
483
+ for i in range(mol.GetNumAtoms()):
484
+ x,y,z = new_coords.astype(np.double)[i]
485
+ conf.SetAtomPosition(i,Point3D(x,y,z))
486
+ w.write(mol)
487
+ w.close()
488
+
489
+ def read_molecule(molecule_file, sanitize=False, calc_charges=False, remove_hs=False):
490
+ if molecule_file.endswith('.mol2'):
491
+ mol = Chem.MolFromMol2File(molecule_file, sanitize=False, removeHs=False)
492
+ elif molecule_file.endswith('.sdf'):
493
+ print(molecule_file)
494
+ supplier = Chem.SDMolSupplier(molecule_file, sanitize=False, removeHs=False)
495
+ mol = supplier[0]
496
+ print(mol)
497
+ elif molecule_file.endswith('.pdbqt'):
498
+ with open(molecule_file) as file:
499
+ pdbqt_data = file.readlines()
500
+ pdb_block = ''
501
+ for line in pdbqt_data:
502
+ pdb_block += '{}\n'.format(line[:66])
503
+ mol = Chem.MolFromPDBBlock(pdb_block, sanitize=False, removeHs=False)
504
+ elif molecule_file.endswith('.pdb'):
505
+ mol = Chem.MolFromPDBFile(molecule_file, sanitize=False, removeHs=False)
506
+ else:
507
+ return ValueError('Expect the format of the molecule_file to be '
508
+ 'one of .mol2, .sdf, .pdbqt and .pdb, got {}'.format(molecule_file))
509
+
510
+ print(sanitize, calc_charges, remove_hs)
511
+
512
+ try:
513
+ if sanitize or calc_charges:
514
+ Chem.SanitizeMol(mol)
515
+
516
+ if calc_charges:
517
+ # Compute Gasteiger charges on the molecule.
518
+ try:
519
+ AllChem.ComputeGasteigerCharges(mol)
520
+ except:
521
+ warnings.warn('Unable to compute charges for the molecule.')
522
+
523
+ if remove_hs:
524
+ mol = Chem.RemoveHs(mol, sanitize=sanitize)
525
+ except Exception as e:
526
+ print(e)
527
+ return None
528
+
529
+ return mol
530
+
531
+
532
+ def read_sdf_or_mol2(sdf_fileName, mol2_fileName):
533
+
534
+ mol = Chem.MolFromMolFile(sdf_fileName, sanitize=False)
535
+ problem = False
536
+ try:
537
+ Chem.SanitizeMol(mol)
538
+ mol = Chem.RemoveHs(mol)
539
+ except Exception as e:
540
+ problem = True
541
+ if problem:
542
+ mol = Chem.MolFromMol2File(mol2_fileName, sanitize=False)
543
+ try:
544
+ Chem.SanitizeMol(mol)
545
+ mol = Chem.RemoveHs(mol)
546
+ problem = False
547
+ except Exception as e:
548
+ problem = True
549
+
550
+ return mol, problem
environment.yml ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: diffdock
2
+ channels:
3
+ - pytorch
4
+ - defaults
5
+ dependencies:
6
+ - blas=1.0
7
+ - brotlipy=0.7.0
8
+ - bzip2=1.0.8
9
+ - ca-certificates=2022.07.19
10
+ - certifi=2022.9.14
11
+ - cffi=1.15.1
12
+ - charset-normalizer=2.0.4
13
+ - cryptography=37.0.1
14
+ - ffmpeg=4.3
15
+ - freetype=2.11.0
16
+ - gettext=0.21.0
17
+ - giflib=5.2.1
18
+ - gmp=6.2.1
19
+ - gnutls=3.6.15
20
+ - icu=58.2
21
+ - idna=3.3
22
+ - intel-openmp=2021.4.0
23
+ - jpeg=9e
24
+ - lame=3.100
25
+ - lcms2=2.12
26
+ - lerc=3.0
27
+ - libcxx=14.0.6
28
+ - libdeflate=1.8
29
+ - libffi=3.3
30
+ - libiconv=1.16
31
+ - libidn2=2.3.2
32
+ - libpng=1.6.37
33
+ - libtasn1=4.16.0
34
+ - libtiff=4.4.0
35
+ - libunistring=0.9.10
36
+ - libwebp=1.2.2
37
+ - libwebp-base=1.2.2
38
+ - libxml2=2.9.14
39
+ - llvm-openmp=14.0.6
40
+ - lz4-c=1.9.3
41
+ - mkl=2021.4.0
42
+ - mkl-service=2.4.0
43
+ - mkl_fft=1.3.1
44
+ - mkl_random=1.2.2
45
+ - ncurses=6.3
46
+ - nettle=3.7.3
47
+ - numpy=1.23.1
48
+ - numpy-base=1.23.1
49
+ - openh264=2.1.1
50
+ - openssl=1.1.1q
51
+ - pillow=9.2.0
52
+ - pip=22.2.2
53
+ - pycparser=2.21
54
+ - pyopenssl=22.0.0
55
+ - pysocks=1.7.1
56
+ - python=3.9.13
57
+ - pytorch=1.12.1
58
+ - readline=8.1.2
59
+ - requests=2.28.1
60
+ - setuptools=63.4.1
61
+ - six=1.16.0
62
+ - sqlite=3.39.3
63
+ - tk=8.6.12
64
+ - torchaudio=0.12.1
65
+ - torchvision=0.13.1
66
+ - typing_extensions=4.3.0
67
+ - tzdata=2022c
68
+ - urllib3=1.26.11
69
+ - wheel=0.37.1
70
+ - xz=5.2.6
71
+ - zlib=1.2.12
72
+ - zstd=1.5.2
73
+ - pip:
74
+ - biopandas==0.4.1
75
+ - biopython==1.79
76
+ - e3nn==0.5.0
77
+ - jinja2==3.1.2
78
+ - joblib==1.2.0
79
+ - markupsafe==2.1.1
80
+ - mpmath==1.2.1
81
+ - networkx==2.8.7
82
+ - opt-einsum==3.3.0
83
+ - opt-einsum-fx==0.1.4
84
+ - packaging==21.3
85
+ - pandas==1.5.0
86
+ - pyaml==21.10.1
87
+ - pyparsing==3.0.9
88
+ - python-dateutil==2.8.2
89
+ - pytz==2022.4
90
+ - pyyaml==6.0
91
+ - rdkit-pypi==2022.3.5
92
+ - scikit-learn==1.1.2
93
+ - scipy==1.9.1
94
+ - spyrmsd==0.5.2
95
+ - sympy==1.11.1
96
+ - threadpoolctl==3.1.0
97
+ - torch-cluster==1.6.0
98
+ - torch-geometric==2.1.0.post1
99
+ - torch-scatter==2.0.9
100
+ - torch-sparse==0.6.15
101
+ - torch-spline-conv==1.2.1
102
+ - tqdm==4.64.1
esm/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) Facebook, Inc. and its affiliates.
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
esm/esm/__init__.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ from .version import version as __version__ # noqa
7
+
8
+ from .data import Alphabet, BatchConverter, FastaBatchedDataset # noqa
9
+ from .model.esm1 import ProteinBertModel # noqa
10
+ from .model.esm2 import ESM2 # noqa
11
+ from .model.msa_transformer import MSATransformer #noqa
12
+ from . import pretrained # noqa
esm/esm/axial_attention.py ADDED
@@ -0,0 +1,239 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ import math
7
+ import torch
8
+ import torch.nn as nn
9
+
10
+
11
+ class RowSelfAttention(nn.Module):
12
+ """Compute self-attention over rows of a 2D input."""
13
+
14
+ def __init__(
15
+ self,
16
+ embed_dim,
17
+ num_heads,
18
+ dropout=0.0,
19
+ max_tokens_per_msa: int = 2 ** 16,
20
+ ):
21
+ super().__init__()
22
+ self.num_heads = num_heads
23
+ self.dropout = dropout
24
+ self.head_dim = embed_dim // num_heads
25
+ self.scaling = self.head_dim ** -0.5
26
+ self.max_tokens_per_msa = max_tokens_per_msa
27
+ self.attn_shape = "hnij"
28
+
29
+ self.k_proj = nn.Linear(embed_dim, embed_dim)
30
+ self.v_proj = nn.Linear(embed_dim, embed_dim)
31
+ self.q_proj = nn.Linear(embed_dim, embed_dim)
32
+
33
+ self.out_proj = nn.Linear(embed_dim, embed_dim)
34
+ self.dropout_module = nn.Dropout(dropout)
35
+
36
+ def align_scaling(self, q):
37
+ num_rows = q.size(0)
38
+ return self.scaling / math.sqrt(num_rows)
39
+
40
+ def _batched_forward(
41
+ self,
42
+ x,
43
+ self_attn_mask=None,
44
+ self_attn_padding_mask=None,
45
+ ):
46
+ num_rows, num_cols, batch_size, embed_dim = x.size()
47
+ max_rows = max(1, self.max_tokens_per_msa // num_cols)
48
+ attns = 0
49
+ scaling = self.align_scaling(x)
50
+ for start in range(0, num_rows, max_rows):
51
+ attn_weights = self.compute_attention_weights(
52
+ x[start : start + max_rows],
53
+ scaling,
54
+ self_attn_mask=self_attn_mask,
55
+ self_attn_padding_mask=self_attn_padding_mask[:, start : start + max_rows]
56
+ if self_attn_padding_mask is not None
57
+ else None,
58
+ )
59
+ attns += attn_weights
60
+ attn_probs = attns.softmax(-1)
61
+ attn_probs = self.dropout_module(attn_probs)
62
+
63
+ outputs = []
64
+ for start in range(0, num_rows, max_rows):
65
+ output = self.compute_attention_update(x[start : start + max_rows], attn_probs)
66
+ outputs.append(output)
67
+
68
+ output = torch.cat(outputs, 0)
69
+ return output, attn_probs
70
+
71
+ def compute_attention_weights(
72
+ self,
73
+ x,
74
+ scaling: float,
75
+ self_attn_mask=None,
76
+ self_attn_padding_mask=None,
77
+ ):
78
+ num_rows, num_cols, batch_size, embed_dim = x.size()
79
+ q = self.q_proj(x).view(num_rows, num_cols, batch_size, self.num_heads, self.head_dim)
80
+ k = self.k_proj(x).view(num_rows, num_cols, batch_size, self.num_heads, self.head_dim)
81
+ q *= scaling
82
+ if self_attn_padding_mask is not None:
83
+ # Zero out any padded aligned positions - this is important since
84
+ # we take a sum across the alignment axis.
85
+ q *= 1 - self_attn_padding_mask.permute(1, 2, 0).unsqueeze(3).unsqueeze(4).to(q)
86
+
87
+ attn_weights = torch.einsum(f"rinhd,rjnhd->{self.attn_shape}", q, k)
88
+
89
+ if self_attn_mask is not None:
90
+ raise NotImplementedError
91
+ # Mask Size: [B x R x C], Weights Size: [H x B x C x C]
92
+
93
+ if self_attn_padding_mask is not None:
94
+ attn_weights = attn_weights.masked_fill(
95
+ self_attn_padding_mask[:, 0].unsqueeze(0).unsqueeze(2),
96
+ -10000,
97
+ )
98
+
99
+ return attn_weights
100
+
101
+ def compute_attention_update(
102
+ self,
103
+ x,
104
+ attn_probs,
105
+ ):
106
+ num_rows, num_cols, batch_size, embed_dim = x.size()
107
+ v = self.v_proj(x).view(num_rows, num_cols, batch_size, self.num_heads, self.head_dim)
108
+ context = torch.einsum(f"{self.attn_shape},rjnhd->rinhd", attn_probs, v)
109
+ context = context.contiguous().view(num_rows, num_cols, batch_size, embed_dim)
110
+ output = self.out_proj(context)
111
+ return output
112
+
113
+ def forward(
114
+ self,
115
+ x,
116
+ self_attn_mask=None,
117
+ self_attn_padding_mask=None,
118
+ ):
119
+ num_rows, num_cols, batch_size, embed_dim = x.size()
120
+ if (num_rows * num_cols > self.max_tokens_per_msa) and not torch.is_grad_enabled():
121
+ return self._batched_forward(x, self_attn_mask, self_attn_padding_mask)
122
+ else:
123
+ scaling = self.align_scaling(x)
124
+ attn_weights = self.compute_attention_weights(
125
+ x, scaling, self_attn_mask, self_attn_padding_mask
126
+ )
127
+ attn_probs = attn_weights.softmax(-1)
128
+ attn_probs = self.dropout_module(attn_probs)
129
+ output = self.compute_attention_update(x, attn_probs)
130
+ return output, attn_probs
131
+
132
+
133
+ class ColumnSelfAttention(nn.Module):
134
+ """Compute self-attention over columns of a 2D input."""
135
+
136
+ def __init__(
137
+ self,
138
+ embed_dim,
139
+ num_heads,
140
+ dropout=0.0,
141
+ max_tokens_per_msa: int = 2 ** 16,
142
+ ):
143
+ super().__init__()
144
+
145
+ self.num_heads = num_heads
146
+ self.dropout = dropout
147
+ self.head_dim = embed_dim // num_heads
148
+ self.scaling = self.head_dim ** -0.5
149
+ self.max_tokens_per_msa = max_tokens_per_msa
150
+
151
+ self.k_proj = nn.Linear(embed_dim, embed_dim)
152
+ self.v_proj = nn.Linear(embed_dim, embed_dim)
153
+ self.q_proj = nn.Linear(embed_dim, embed_dim)
154
+
155
+ self.out_proj = nn.Linear(embed_dim, embed_dim)
156
+ self.dropout_module = nn.Dropout(dropout)
157
+
158
+ def _batched_forward(
159
+ self,
160
+ x,
161
+ self_attn_mask=None,
162
+ self_attn_padding_mask=None,
163
+ ):
164
+ num_rows, num_cols, batch_size, embed_dim = x.size()
165
+ max_cols = max(1, self.max_tokens_per_msa // num_rows)
166
+ outputs = []
167
+ attns = []
168
+ for start in range(0, num_cols, max_cols):
169
+ output, attn = self(
170
+ x[:, start : start + max_cols],
171
+ self_attn_mask=self_attn_mask,
172
+ self_attn_padding_mask=self_attn_padding_mask[:, :, start : start + max_cols]
173
+ if self_attn_padding_mask is not None
174
+ else None,
175
+ )
176
+ outputs.append(output)
177
+ attns.append(attn)
178
+ output = torch.cat(outputs, 1)
179
+ attns = torch.cat(attns, 1)
180
+ return output, attns
181
+
182
+ def compute_attention_update(
183
+ self,
184
+ x,
185
+ self_attn_mask=None,
186
+ self_attn_padding_mask=None,
187
+ ):
188
+ num_rows, num_cols, batch_size, embed_dim = x.size()
189
+ if num_rows == 1:
190
+ # if there is only 1 position, this is equivalent and doesn't break with padding
191
+ attn_probs = torch.ones(
192
+ self.num_heads,
193
+ num_cols,
194
+ batch_size,
195
+ num_rows,
196
+ num_rows,
197
+ device=x.device,
198
+ dtype=x.dtype,
199
+ )
200
+ output = self.out_proj(self.v_proj(x))
201
+ else:
202
+ q = self.q_proj(x).view(num_rows, num_cols, batch_size, self.num_heads, self.head_dim)
203
+ k = self.k_proj(x).view(num_rows, num_cols, batch_size, self.num_heads, self.head_dim)
204
+ v = self.v_proj(x).view(num_rows, num_cols, batch_size, self.num_heads, self.head_dim)
205
+ q *= self.scaling
206
+
207
+ attn_weights = torch.einsum("icnhd,jcnhd->hcnij", q, k)
208
+
209
+ if self_attn_mask is not None:
210
+ raise NotImplementedError
211
+ if self_attn_padding_mask is not None:
212
+ attn_weights = attn_weights.masked_fill(
213
+ self_attn_padding_mask.permute(2, 0, 1).unsqueeze(0).unsqueeze(3),
214
+ -10000,
215
+ )
216
+
217
+ attn_probs = attn_weights.softmax(-1)
218
+ attn_probs = self.dropout_module(attn_probs)
219
+ context = torch.einsum("hcnij,jcnhd->icnhd", attn_probs, v)
220
+ context = context.contiguous().view(num_rows, num_cols, batch_size, embed_dim)
221
+ output = self.out_proj(context)
222
+ return output, attn_probs
223
+
224
+ def forward(
225
+ self,
226
+ x,
227
+ self_attn_mask=None,
228
+ self_attn_padding_mask=None,
229
+ ):
230
+ num_rows, num_cols, batch_size, embed_dim = x.size()
231
+ # if False and num_rows * num_cols > 2 ** 14 and not torch.is_grad_enabled():
232
+ if (num_rows * num_cols) > self.max_tokens_per_msa and not torch.is_grad_enabled():
233
+ return self._batched_forward(
234
+ x,
235
+ self_attn_mask,
236
+ self_attn_padding_mask,
237
+ )
238
+ else:
239
+ return self.compute_attention_update(x, self_attn_mask, self_attn_padding_mask)
esm/esm/constants.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ # fmt: off
7
+ proteinseq_toks = {
8
+ 'toks': ['L', 'A', 'G', 'V', 'S', 'E', 'R', 'T', 'I', 'D', 'P', 'K', 'Q', 'N', 'F', 'Y', 'M', 'H', 'W', 'C', 'X', 'B', 'U', 'Z', 'O', '.', '-']
9
+ }
10
+ # fmt: on
esm/esm/data.py ADDED
@@ -0,0 +1,493 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ import itertools
7
+ import os
8
+ from typing import Sequence, Tuple, List, Union
9
+ import pickle
10
+ import re
11
+ import shutil
12
+ import torch
13
+ from pathlib import Path
14
+ from esm.constants import proteinseq_toks
15
+
16
+ RawMSA = Sequence[Tuple[str, str]]
17
+
18
+
19
+ class FastaBatchedDataset(object):
20
+ def __init__(self, sequence_labels, sequence_strs):
21
+ self.sequence_labels = list(sequence_labels)
22
+ self.sequence_strs = list(sequence_strs)
23
+
24
+ @classmethod
25
+ def from_file(cls, fasta_file):
26
+ sequence_labels, sequence_strs = [], []
27
+ cur_seq_label = None
28
+ buf = []
29
+
30
+ def _flush_current_seq():
31
+ nonlocal cur_seq_label, buf
32
+ if cur_seq_label is None:
33
+ return
34
+ sequence_labels.append(cur_seq_label)
35
+ sequence_strs.append("".join(buf))
36
+ cur_seq_label = None
37
+ buf = []
38
+
39
+ with open(fasta_file, "r") as infile:
40
+ for line_idx, line in enumerate(infile):
41
+ if line.startswith(">"): # label line
42
+ _flush_current_seq()
43
+ line = line[1:].strip()
44
+ if len(line) > 0:
45
+ cur_seq_label = line
46
+ else:
47
+ cur_seq_label = f"seqnum{line_idx:09d}"
48
+ else: # sequence line
49
+ buf.append(line.strip())
50
+
51
+ _flush_current_seq()
52
+
53
+ assert len(set(sequence_labels)) == len(
54
+ sequence_labels
55
+ ), "Found duplicate sequence labels"
56
+
57
+ return cls(sequence_labels, sequence_strs)
58
+
59
+ def __len__(self):
60
+ return len(self.sequence_labels)
61
+
62
+ def __getitem__(self, idx):
63
+ return self.sequence_labels[idx], self.sequence_strs[idx]
64
+
65
+ def get_batch_indices(self, toks_per_batch, extra_toks_per_seq=0):
66
+ sizes = [(len(s), i) for i, s in enumerate(self.sequence_strs)]
67
+ sizes.sort()
68
+ batches = []
69
+ buf = []
70
+ max_len = 0
71
+
72
+ def _flush_current_buf():
73
+ nonlocal max_len, buf
74
+ if len(buf) == 0:
75
+ return
76
+ batches.append(buf)
77
+ buf = []
78
+ max_len = 0
79
+
80
+ for sz, i in sizes:
81
+ sz += extra_toks_per_seq
82
+ if max(sz, max_len) * (len(buf) + 1) > toks_per_batch:
83
+ _flush_current_buf()
84
+ max_len = max(max_len, sz)
85
+ buf.append(i)
86
+
87
+ _flush_current_buf()
88
+ return batches
89
+
90
+
91
+ class Alphabet(object):
92
+ def __init__(
93
+ self,
94
+ standard_toks: Sequence[str],
95
+ prepend_toks: Sequence[str] = ("<null_0>", "<pad>", "<eos>", "<unk>"),
96
+ append_toks: Sequence[str] = ("<cls>", "<mask>", "<sep>"),
97
+ prepend_bos: bool = True,
98
+ append_eos: bool = False,
99
+ use_msa: bool = False,
100
+ ):
101
+ self.standard_toks = list(standard_toks)
102
+ self.prepend_toks = list(prepend_toks)
103
+ self.append_toks = list(append_toks)
104
+ self.prepend_bos = prepend_bos
105
+ self.append_eos = append_eos
106
+ self.use_msa = use_msa
107
+
108
+ self.all_toks = list(self.prepend_toks)
109
+ self.all_toks.extend(self.standard_toks)
110
+ for i in range((8 - (len(self.all_toks) % 8)) % 8):
111
+ self.all_toks.append(f"<null_{i + 1}>")
112
+ self.all_toks.extend(self.append_toks)
113
+
114
+ self.tok_to_idx = {tok: i for i, tok in enumerate(self.all_toks)}
115
+
116
+ self.unk_idx = self.tok_to_idx["<unk>"]
117
+ self.padding_idx = self.get_idx("<pad>")
118
+ self.cls_idx = self.get_idx("<cls>")
119
+ self.mask_idx = self.get_idx("<mask>")
120
+ self.eos_idx = self.get_idx("<eos>")
121
+ self.all_special_tokens = ['<eos>', '<unk>', '<pad>', '<cls>', '<mask>']
122
+ self.unique_no_split_tokens = self.all_toks
123
+
124
+ def __len__(self):
125
+ return len(self.all_toks)
126
+
127
+ def get_idx(self, tok):
128
+ return self.tok_to_idx.get(tok, self.unk_idx)
129
+
130
+ def get_tok(self, ind):
131
+ return self.all_toks[ind]
132
+
133
+ def to_dict(self):
134
+ return self.tok_to_idx.copy()
135
+
136
+ def get_batch_converter(self, truncation_seq_length: int = None):
137
+ if self.use_msa:
138
+ return MSABatchConverter(self, truncation_seq_length)
139
+ else:
140
+ return BatchConverter(self, truncation_seq_length)
141
+
142
+ @classmethod
143
+ def from_architecture(cls, name: str) -> "Alphabet":
144
+ if name in ("ESM-1", "protein_bert_base"):
145
+ standard_toks = proteinseq_toks["toks"]
146
+ prepend_toks: Tuple[str, ...] = ("<null_0>", "<pad>", "<eos>", "<unk>")
147
+ append_toks: Tuple[str, ...] = ("<cls>", "<mask>", "<sep>")
148
+ prepend_bos = True
149
+ append_eos = False
150
+ use_msa = False
151
+ elif name in ("ESM-1b", "roberta_large"):
152
+ standard_toks = proteinseq_toks["toks"]
153
+ prepend_toks = ("<cls>", "<pad>", "<eos>", "<unk>")
154
+ append_toks = ("<mask>",)
155
+ prepend_bos = True
156
+ append_eos = True
157
+ use_msa = False
158
+ elif name in ("MSA Transformer", "msa_transformer"):
159
+ standard_toks = proteinseq_toks["toks"]
160
+ prepend_toks = ("<cls>", "<pad>", "<eos>", "<unk>")
161
+ append_toks = ("<mask>",)
162
+ prepend_bos = True
163
+ append_eos = False
164
+ use_msa = True
165
+ elif "invariant_gvp" in name.lower():
166
+ standard_toks = proteinseq_toks["toks"]
167
+ prepend_toks = ("<null_0>", "<pad>", "<eos>", "<unk>")
168
+ append_toks = ("<mask>", "<cath>", "<af2>")
169
+ prepend_bos = True
170
+ append_eos = False
171
+ use_msa = False
172
+ else:
173
+ raise ValueError("Unknown architecture selected")
174
+ return cls(standard_toks, prepend_toks, append_toks, prepend_bos, append_eos, use_msa)
175
+
176
+ def _tokenize(self, text) -> str:
177
+ return text.split()
178
+
179
+ def tokenize(self, text, **kwargs) -> List[str]:
180
+ """
181
+ Inspired by https://github.com/huggingface/transformers/blob/master/src/transformers/tokenization_utils.py
182
+ Converts a string in a sequence of tokens, using the tokenizer.
183
+
184
+ Args:
185
+ text (:obj:`str`):
186
+ The sequence to be encoded.
187
+
188
+ Returns:
189
+ :obj:`List[str]`: The list of tokens.
190
+ """
191
+
192
+ def split_on_token(tok, text):
193
+ result = []
194
+ split_text = text.split(tok)
195
+ for i, sub_text in enumerate(split_text):
196
+ # AddedToken can control whitespace stripping around them.
197
+ # We use them for GPT2 and Roberta to have different behavior depending on the special token
198
+ # Cf. https://github.com/huggingface/transformers/pull/2778
199
+ # and https://github.com/huggingface/transformers/issues/3788
200
+ # We strip left and right by default
201
+ if i < len(split_text) - 1:
202
+ sub_text = sub_text.rstrip()
203
+ if i > 0:
204
+ sub_text = sub_text.lstrip()
205
+
206
+ if i == 0 and not sub_text:
207
+ result.append(tok)
208
+ elif i == len(split_text) - 1:
209
+ if sub_text:
210
+ result.append(sub_text)
211
+ else:
212
+ pass
213
+ else:
214
+ if sub_text:
215
+ result.append(sub_text)
216
+ result.append(tok)
217
+ return result
218
+
219
+ def split_on_tokens(tok_list, text):
220
+ if not text.strip():
221
+ return []
222
+
223
+ tokenized_text = []
224
+ text_list = [text]
225
+ for tok in tok_list:
226
+ tokenized_text = []
227
+ for sub_text in text_list:
228
+ if sub_text not in self.unique_no_split_tokens:
229
+ tokenized_text.extend(split_on_token(tok, sub_text))
230
+ else:
231
+ tokenized_text.append(sub_text)
232
+ text_list = tokenized_text
233
+
234
+ return list(
235
+ itertools.chain.from_iterable(
236
+ (
237
+ self._tokenize(token)
238
+ if token not in self.unique_no_split_tokens
239
+ else [token]
240
+ for token in tokenized_text
241
+ )
242
+ )
243
+ )
244
+
245
+ no_split_token = self.unique_no_split_tokens
246
+ tokenized_text = split_on_tokens(no_split_token, text)
247
+ return tokenized_text
248
+
249
+ def encode(self, text):
250
+ return [self.tok_to_idx[tok] for tok in self.tokenize(text)]
251
+
252
+
253
+ class BatchConverter(object):
254
+ """Callable to convert an unprocessed (labels + strings) batch to a
255
+ processed (labels + tensor) batch.
256
+ """
257
+
258
+ def __init__(self, alphabet, truncation_seq_length: int = None):
259
+ self.alphabet = alphabet
260
+ self.truncation_seq_length = truncation_seq_length
261
+
262
+ def __call__(self, raw_batch: Sequence[Tuple[str, str]]):
263
+ # RoBERTa uses an eos token, while ESM-1 does not.
264
+ batch_size = len(raw_batch)
265
+ batch_labels, seq_str_list = zip(*raw_batch)
266
+ seq_encoded_list = [self.alphabet.encode(seq_str) for seq_str in seq_str_list]
267
+ if self.truncation_seq_length:
268
+ seq_encoded_list = [seq_str[:self.truncation_seq_length] for seq_str in seq_encoded_list]
269
+ max_len = max(len(seq_encoded) for seq_encoded in seq_encoded_list)
270
+ tokens = torch.empty(
271
+ (
272
+ batch_size,
273
+ max_len + int(self.alphabet.prepend_bos) + int(self.alphabet.append_eos),
274
+ ),
275
+ dtype=torch.int64,
276
+ )
277
+ tokens.fill_(self.alphabet.padding_idx)
278
+ labels = []
279
+ strs = []
280
+
281
+ for i, (label, seq_str, seq_encoded) in enumerate(
282
+ zip(batch_labels, seq_str_list, seq_encoded_list)
283
+ ):
284
+ labels.append(label)
285
+ strs.append(seq_str)
286
+ if self.alphabet.prepend_bos:
287
+ tokens[i, 0] = self.alphabet.cls_idx
288
+ seq = torch.tensor(seq_encoded, dtype=torch.int64)
289
+ tokens[
290
+ i,
291
+ int(self.alphabet.prepend_bos) : len(seq_encoded)
292
+ + int(self.alphabet.prepend_bos),
293
+ ] = seq
294
+ if self.alphabet.append_eos:
295
+ tokens[i, len(seq_encoded) + int(self.alphabet.prepend_bos)] = self.alphabet.eos_idx
296
+
297
+ return labels, strs, tokens
298
+
299
+
300
+ class MSABatchConverter(BatchConverter):
301
+ def __call__(self, inputs: Union[Sequence[RawMSA], RawMSA]):
302
+ if isinstance(inputs[0][0], str):
303
+ # Input is a single MSA
304
+ raw_batch: Sequence[RawMSA] = [inputs] # type: ignore
305
+ else:
306
+ raw_batch = inputs # type: ignore
307
+
308
+ batch_size = len(raw_batch)
309
+ max_alignments = max(len(msa) for msa in raw_batch)
310
+ max_seqlen = max(len(msa[0][1]) for msa in raw_batch)
311
+
312
+ tokens = torch.empty(
313
+ (
314
+ batch_size,
315
+ max_alignments,
316
+ max_seqlen + int(self.alphabet.prepend_bos) + int(self.alphabet.append_eos),
317
+ ),
318
+ dtype=torch.int64,
319
+ )
320
+ tokens.fill_(self.alphabet.padding_idx)
321
+ labels = []
322
+ strs = []
323
+
324
+ for i, msa in enumerate(raw_batch):
325
+ msa_seqlens = set(len(seq) for _, seq in msa)
326
+ if not len(msa_seqlens) == 1:
327
+ raise RuntimeError(
328
+ "Received unaligned sequences for input to MSA, all sequence "
329
+ "lengths must be equal."
330
+ )
331
+ msa_labels, msa_strs, msa_tokens = super().__call__(msa)
332
+ labels.append(msa_labels)
333
+ strs.append(msa_strs)
334
+ tokens[i, : msa_tokens.size(0), : msa_tokens.size(1)] = msa_tokens
335
+
336
+ return labels, strs, tokens
337
+
338
+
339
+ def read_fasta(
340
+ path,
341
+ keep_gaps=True,
342
+ keep_insertions=True,
343
+ to_upper=False,
344
+ ):
345
+ with open(path, "r") as f:
346
+ for result in read_alignment_lines(
347
+ f, keep_gaps=keep_gaps, keep_insertions=keep_insertions, to_upper=to_upper
348
+ ):
349
+ yield result
350
+
351
+
352
+ def read_alignment_lines(
353
+ lines,
354
+ keep_gaps=True,
355
+ keep_insertions=True,
356
+ to_upper=False,
357
+ ):
358
+ seq = desc = None
359
+
360
+ def parse(s):
361
+ if not keep_gaps:
362
+ s = re.sub("-", "", s)
363
+ if not keep_insertions:
364
+ s = re.sub("[a-z]", "", s)
365
+ return s.upper() if to_upper else s
366
+
367
+ for line in lines:
368
+ # Line may be empty if seq % file_line_width == 0
369
+ if len(line) > 0 and line[0] == ">":
370
+ if seq is not None:
371
+ yield desc, parse(seq)
372
+ desc = line.strip()
373
+ seq = ""
374
+ else:
375
+ assert isinstance(seq, str)
376
+ seq += line.strip()
377
+ assert isinstance(seq, str) and isinstance(desc, str)
378
+ yield desc, parse(seq)
379
+
380
+
381
+ class ESMStructuralSplitDataset(torch.utils.data.Dataset):
382
+ """
383
+ Structural Split Dataset as described in section A.10 of the supplement of our paper.
384
+ https://doi.org/10.1101/622803
385
+
386
+ We use the full version of SCOPe 2.07, clustered at 90% sequence identity,
387
+ generated on January 23, 2020.
388
+
389
+ For each SCOPe domain:
390
+ - We extract the sequence from the corresponding PDB file
391
+ - We extract the 3D coordinates of the Carbon beta atoms, aligning them
392
+ to the sequence. We put NaN where Cb atoms are missing.
393
+ - From the 3D coordinates, we calculate a pairwise distance map, based
394
+ on L2 distance
395
+ - We use DSSP to generate secondary structure labels for the corresponding
396
+ PDB file. This is also aligned to the sequence. We put - where SSP
397
+ labels are missing.
398
+
399
+ For each SCOPe classification level of family/superfamily/fold (in order of difficulty),
400
+ we have split the data into 5 partitions for cross validation. These are provided
401
+ in a downloaded splits folder, in the format:
402
+ splits/{split_level}/{cv_partition}/{train|valid}.txt
403
+ where train is the partition and valid is the concatentation of the remaining 4.
404
+
405
+ For each SCOPe domain, we provide a pkl dump that contains:
406
+ - seq : The domain sequence, stored as an L-length string
407
+ - ssp : The secondary structure labels, stored as an L-length string
408
+ - dist : The distance map, stored as an LxL numpy array
409
+ - coords : The 3D coordinates, stored as an Lx3 numpy array
410
+
411
+ """
412
+
413
+ base_folder = "structural-data"
414
+ file_list = [
415
+ # url tar filename filename MD5 Hash
416
+ (
417
+ "https://dl.fbaipublicfiles.com/fair-esm/structural-data/splits.tar.gz",
418
+ "splits.tar.gz",
419
+ "splits",
420
+ "456fe1c7f22c9d3d8dfe9735da52411d",
421
+ ),
422
+ (
423
+ "https://dl.fbaipublicfiles.com/fair-esm/structural-data/pkl.tar.gz",
424
+ "pkl.tar.gz",
425
+ "pkl",
426
+ "644ea91e56066c750cd50101d390f5db",
427
+ ),
428
+ ]
429
+
430
+ def __init__(
431
+ self,
432
+ split_level,
433
+ cv_partition,
434
+ split,
435
+ root_path=os.path.expanduser("~/.cache/torch/data/esm"),
436
+ download=False,
437
+ ):
438
+ super().__init__()
439
+ assert split in [
440
+ "train",
441
+ "valid",
442
+ ], "train_valid must be 'train' or 'valid'"
443
+ self.root_path = root_path
444
+ self.base_path = os.path.join(self.root_path, self.base_folder)
445
+
446
+ # check if root path has what you need or else download it
447
+ if download:
448
+ self.download()
449
+
450
+ self.split_file = os.path.join(
451
+ self.base_path, "splits", split_level, cv_partition, f"{split}.txt"
452
+ )
453
+ self.pkl_dir = os.path.join(self.base_path, "pkl")
454
+ self.names = []
455
+ with open(self.split_file) as f:
456
+ self.names = f.read().splitlines()
457
+
458
+ def __len__(self):
459
+ return len(self.names)
460
+
461
+ def _check_exists(self) -> bool:
462
+ for (_, _, filename, _) in self.file_list:
463
+ fpath = os.path.join(self.base_path, filename)
464
+ if not os.path.exists(fpath) or not os.path.isdir(fpath):
465
+ return False
466
+ return True
467
+
468
+ def download(self):
469
+
470
+ if self._check_exists():
471
+ print("Files already downloaded and verified")
472
+ return
473
+
474
+ from torchvision.datasets.utils import download_url
475
+
476
+ for url, tar_filename, filename, md5_hash in self.file_list:
477
+ download_path = os.path.join(self.base_path, tar_filename)
478
+ download_url(url=url, root=self.base_path, filename=tar_filename, md5=md5_hash)
479
+ shutil.unpack_archive(download_path, self.base_path)
480
+
481
+ def __getitem__(self, idx):
482
+ """
483
+ Returns a dict with the following entires
484
+ - seq : Str (domain sequence)
485
+ - ssp : Str (SSP labels)
486
+ - dist : np.array (distance map)
487
+ - coords : np.array (3D coordinates)
488
+ """
489
+ name = self.names[idx]
490
+ pkl_fname = os.path.join(self.pkl_dir, name[1:3], f"{name}.pkl")
491
+ with open(pkl_fname, "rb") as f:
492
+ obj = pickle.load(f)
493
+ return obj
esm/esm/inverse_folding/__init__.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ from . import gvp_transformer
7
+ from . import util
8
+ from . import multichain_util
esm/esm/inverse_folding/features.py ADDED
@@ -0,0 +1,352 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+ #
6
+ # Portions of this file were adapted from the open source code for the following
7
+ # two papers:
8
+ #
9
+ # Ingraham, J., Garg, V., Barzilay, R., & Jaakkola, T. (2019). Generative
10
+ # models for graph-based protein design. Advances in Neural Information
11
+ # Processing Systems, 32.
12
+ #
13
+ # Jing, B., Eismann, S., Suriana, P., Townshend, R. J. L., & Dror, R. (2020).
14
+ # Learning from Protein Structure with Geometric Vector Perceptrons. In
15
+ # International Conference on Learning Representations.
16
+ #
17
+ # MIT License
18
+ #
19
+ # Copyright (c) 2020 Bowen Jing, Stephan Eismann, Patricia Suriana, Raphael Townshend, Ron Dror
20
+ #
21
+ # Permission is hereby granted, free of charge, to any person obtaining a copy
22
+ # of this software and associated documentation files (the "Software"), to deal
23
+ # in the Software without restriction, including without limitation the rights
24
+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
25
+ # copies of the Software, and to permit persons to whom the Software is
26
+ # furnished to do so, subject to the following conditions:
27
+ #
28
+ # The above copyright notice and this permission notice shall be included in all
29
+ # copies or substantial portions of the Software.
30
+ #
31
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
32
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
33
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
34
+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
35
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
36
+ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
37
+ # SOFTWARE.
38
+ #
39
+ # ================================================================
40
+ # The below license applies to the portions of the code (parts of
41
+ # src/datasets.py and src/models.py) adapted from Ingraham, et al.
42
+ # ================================================================
43
+ #
44
+ # MIT License
45
+ #
46
+ # Copyright (c) 2019 John Ingraham, Vikas Garg, Regina Barzilay, Tommi Jaakkola
47
+ #
48
+ # Permission is hereby granted, free of charge, to any person obtaining a copy
49
+ # of this software and associated documentation files (the "Software"), to deal
50
+ # in the Software without restriction, including without limitation the rights
51
+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
52
+ # copies of the Software, and to permit persons to whom the Software is
53
+ # furnished to do so, subject to the following conditions:
54
+ #
55
+ # The above copyright notice and this permission notice shall be included in all
56
+ # copies or substantial portions of the Software.
57
+ #
58
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
59
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
60
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
61
+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
62
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
63
+ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
64
+ # SOFTWARE.
65
+
66
+ import math
67
+ import numpy as np
68
+ import torch
69
+ import torch.nn as nn
70
+ import torch.nn.functional as F
71
+
72
+ from .gvp_utils import flatten_graph
73
+ from .gvp_modules import GVP, LayerNorm
74
+ from .util import normalize, norm, nan_to_num, rbf
75
+
76
+
77
+ class GVPInputFeaturizer(nn.Module):
78
+
79
+ @staticmethod
80
+ def get_node_features(coords, coord_mask, with_coord_mask=True):
81
+ # scalar features
82
+ node_scalar_features = GVPInputFeaturizer._dihedrals(coords)
83
+ if with_coord_mask:
84
+ node_scalar_features = torch.cat([
85
+ node_scalar_features,
86
+ coord_mask.float().unsqueeze(-1)
87
+ ], dim=-1)
88
+ # vector features
89
+ X_ca = coords[:, :, 1]
90
+ orientations = GVPInputFeaturizer._orientations(X_ca)
91
+ sidechains = GVPInputFeaturizer._sidechains(coords)
92
+ node_vector_features = torch.cat([orientations, sidechains.unsqueeze(-2)], dim=-2)
93
+ return node_scalar_features, node_vector_features
94
+
95
+ @staticmethod
96
+ def _orientations(X):
97
+ forward = normalize(X[:, 1:] - X[:, :-1])
98
+ backward = normalize(X[:, :-1] - X[:, 1:])
99
+ forward = F.pad(forward, [0, 0, 0, 1])
100
+ backward = F.pad(backward, [0, 0, 1, 0])
101
+ return torch.cat([forward.unsqueeze(-2), backward.unsqueeze(-2)], -2)
102
+
103
+ @staticmethod
104
+ def _sidechains(X):
105
+ n, origin, c = X[:, :, 0], X[:, :, 1], X[:, :, 2]
106
+ c, n = normalize(c - origin), normalize(n - origin)
107
+ bisector = normalize(c + n)
108
+ perp = normalize(torch.cross(c, n, dim=-1))
109
+ vec = -bisector * math.sqrt(1 / 3) - perp * math.sqrt(2 / 3)
110
+ return vec
111
+
112
+ @staticmethod
113
+ def _dihedrals(X, eps=1e-7):
114
+ X = torch.flatten(X[:, :, :3], 1, 2)
115
+ bsz = X.shape[0]
116
+ dX = X[:, 1:] - X[:, :-1]
117
+ U = normalize(dX, dim=-1)
118
+ u_2 = U[:, :-2]
119
+ u_1 = U[:, 1:-1]
120
+ u_0 = U[:, 2:]
121
+
122
+ # Backbone normals
123
+ n_2 = normalize(torch.cross(u_2, u_1, dim=-1), dim=-1)
124
+ n_1 = normalize(torch.cross(u_1, u_0, dim=-1), dim=-1)
125
+
126
+ # Angle between normals
127
+ cosD = torch.sum(n_2 * n_1, -1)
128
+ cosD = torch.clamp(cosD, -1 + eps, 1 - eps)
129
+ D = torch.sign(torch.sum(u_2 * n_1, -1)) * torch.acos(cosD)
130
+
131
+ # This scheme will remove phi[0], psi[-1], omega[-1]
132
+ D = F.pad(D, [1, 2])
133
+ D = torch.reshape(D, [bsz, -1, 3])
134
+ # Lift angle representations to the circle
135
+ D_features = torch.cat([torch.cos(D), torch.sin(D)], -1)
136
+ return D_features
137
+
138
+ @staticmethod
139
+ def _positional_embeddings(edge_index,
140
+ num_embeddings=None,
141
+ num_positional_embeddings=16,
142
+ period_range=[2, 1000]):
143
+ # From https://github.com/jingraham/neurips19-graph-protein-design
144
+ num_embeddings = num_embeddings or num_positional_embeddings
145
+ d = edge_index[0] - edge_index[1]
146
+
147
+ frequency = torch.exp(
148
+ torch.arange(0, num_embeddings, 2, dtype=torch.float32,
149
+ device=edge_index.device)
150
+ * -(np.log(10000.0) / num_embeddings)
151
+ )
152
+ angles = d.unsqueeze(-1) * frequency
153
+ E = torch.cat((torch.cos(angles), torch.sin(angles)), -1)
154
+ return E
155
+
156
+ @staticmethod
157
+ def _dist(X, coord_mask, padding_mask, top_k_neighbors, eps=1e-8):
158
+ """ Pairwise euclidean distances """
159
+ bsz, maxlen = X.size(0), X.size(1)
160
+ coord_mask_2D = torch.unsqueeze(coord_mask,1) * torch.unsqueeze(coord_mask,2)
161
+ residue_mask = ~padding_mask
162
+ residue_mask_2D = torch.unsqueeze(residue_mask,1) * torch.unsqueeze(residue_mask,2)
163
+ dX = torch.unsqueeze(X,1) - torch.unsqueeze(X,2)
164
+ D = coord_mask_2D * norm(dX, dim=-1)
165
+
166
+ # sorting preference: first those with coords, then among the residues that
167
+ # exist but are masked use distance in sequence as tie breaker, and then the
168
+ # residues that came from padding are last
169
+ seqpos = torch.arange(maxlen, device=X.device)
170
+ Dseq = torch.abs(seqpos.unsqueeze(1) - seqpos.unsqueeze(0)).repeat(bsz, 1, 1)
171
+ D_adjust = nan_to_num(D) + (~coord_mask_2D) * (1e8 + Dseq*1e6) + (
172
+ ~residue_mask_2D) * (1e10)
173
+
174
+ if top_k_neighbors == -1:
175
+ D_neighbors = D_adjust
176
+ E_idx = seqpos.repeat(
177
+ *D_neighbors.shape[:-1], 1)
178
+ else:
179
+ # Identify k nearest neighbors (including self)
180
+ k = min(top_k_neighbors, X.size(1))
181
+ D_neighbors, E_idx = torch.topk(D_adjust, k, dim=-1, largest=False)
182
+
183
+ coord_mask_neighbors = (D_neighbors < 5e7)
184
+ residue_mask_neighbors = (D_neighbors < 5e9)
185
+ return D_neighbors, E_idx, coord_mask_neighbors, residue_mask_neighbors
186
+
187
+
188
+ class Normalize(nn.Module):
189
+ def __init__(self, features, epsilon=1e-6):
190
+ super(Normalize, self).__init__()
191
+ self.gain = nn.Parameter(torch.ones(features))
192
+ self.bias = nn.Parameter(torch.zeros(features))
193
+ self.epsilon = epsilon
194
+
195
+ def forward(self, x, dim=-1):
196
+ mu = x.mean(dim, keepdim=True)
197
+ sigma = torch.sqrt(x.var(dim, keepdim=True) + self.epsilon)
198
+ gain = self.gain
199
+ bias = self.bias
200
+ # Reshape
201
+ if dim != -1:
202
+ shape = [1] * len(mu.size())
203
+ shape[dim] = self.gain.size()[0]
204
+ gain = gain.view(shape)
205
+ bias = bias.view(shape)
206
+ return gain * (x - mu) / (sigma + self.epsilon) + bias
207
+
208
+
209
+ class DihedralFeatures(nn.Module):
210
+ def __init__(self, node_embed_dim):
211
+ """ Embed dihedral angle features. """
212
+ super(DihedralFeatures, self).__init__()
213
+ # 3 dihedral angles; sin and cos of each angle
214
+ node_in = 6
215
+ # Normalization and embedding
216
+ self.node_embedding = nn.Linear(node_in, node_embed_dim, bias=True)
217
+ self.norm_nodes = Normalize(node_embed_dim)
218
+
219
+ def forward(self, X):
220
+ """ Featurize coordinates as an attributed graph """
221
+ V = self._dihedrals(X)
222
+ V = self.node_embedding(V)
223
+ V = self.norm_nodes(V)
224
+ return V
225
+
226
+ @staticmethod
227
+ def _dihedrals(X, eps=1e-7, return_angles=False):
228
+ # First 3 coordinates are N, CA, C
229
+ X = X[:,:,:3,:].reshape(X.shape[0], 3*X.shape[1], 3)
230
+
231
+ # Shifted slices of unit vectors
232
+ dX = X[:,1:,:] - X[:,:-1,:]
233
+ U = F.normalize(dX, dim=-1)
234
+ u_2 = U[:,:-2,:]
235
+ u_1 = U[:,1:-1,:]
236
+ u_0 = U[:,2:,:]
237
+ # Backbone normals
238
+ n_2 = F.normalize(torch.cross(u_2, u_1, dim=-1), dim=-1)
239
+ n_1 = F.normalize(torch.cross(u_1, u_0, dim=-1), dim=-1)
240
+
241
+ # Angle between normals
242
+ cosD = (n_2 * n_1).sum(-1)
243
+ cosD = torch.clamp(cosD, -1+eps, 1-eps)
244
+ D = torch.sign((u_2 * n_1).sum(-1)) * torch.acos(cosD)
245
+
246
+ # This scheme will remove phi[0], psi[-1], omega[-1]
247
+ D = F.pad(D, (1,2), 'constant', 0)
248
+ D = D.view((D.size(0), int(D.size(1)/3), 3))
249
+ phi, psi, omega = torch.unbind(D,-1)
250
+
251
+ if return_angles:
252
+ return phi, psi, omega
253
+
254
+ # Lift angle representations to the circle
255
+ D_features = torch.cat((torch.cos(D), torch.sin(D)), 2)
256
+ return D_features
257
+
258
+
259
+ class GVPGraphEmbedding(GVPInputFeaturizer):
260
+
261
+ def __init__(self, args):
262
+ super().__init__()
263
+ self.top_k_neighbors = args.top_k_neighbors
264
+ self.num_positional_embeddings = 16
265
+ self.remove_edges_without_coords = True
266
+ node_input_dim = (7, 3)
267
+ edge_input_dim = (34, 1)
268
+ node_hidden_dim = (args.node_hidden_dim_scalar,
269
+ args.node_hidden_dim_vector)
270
+ edge_hidden_dim = (args.edge_hidden_dim_scalar,
271
+ args.edge_hidden_dim_vector)
272
+ self.embed_node = nn.Sequential(
273
+ GVP(node_input_dim, node_hidden_dim, activations=(None, None)),
274
+ LayerNorm(node_hidden_dim, eps=1e-4)
275
+ )
276
+ self.embed_edge = nn.Sequential(
277
+ GVP(edge_input_dim, edge_hidden_dim, activations=(None, None)),
278
+ LayerNorm(edge_hidden_dim, eps=1e-4)
279
+ )
280
+ self.embed_confidence = nn.Linear(16, args.node_hidden_dim_scalar)
281
+
282
+ def forward(self, coords, coord_mask, padding_mask, confidence):
283
+ with torch.no_grad():
284
+ node_features = self.get_node_features(coords, coord_mask)
285
+ edge_features, edge_index = self.get_edge_features(
286
+ coords, coord_mask, padding_mask)
287
+ node_embeddings_scalar, node_embeddings_vector = self.embed_node(node_features)
288
+ edge_embeddings = self.embed_edge(edge_features)
289
+
290
+ rbf_rep = rbf(confidence, 0., 1.)
291
+ node_embeddings = (
292
+ node_embeddings_scalar + self.embed_confidence(rbf_rep),
293
+ node_embeddings_vector
294
+ )
295
+
296
+ node_embeddings, edge_embeddings, edge_index = flatten_graph(
297
+ node_embeddings, edge_embeddings, edge_index)
298
+ return node_embeddings, edge_embeddings, edge_index
299
+
300
+ def get_edge_features(self, coords, coord_mask, padding_mask):
301
+ X_ca = coords[:, :, 1]
302
+ # Get distances to the top k neighbors
303
+ E_dist, E_idx, E_coord_mask, E_residue_mask = GVPInputFeaturizer._dist(
304
+ X_ca, coord_mask, padding_mask, self.top_k_neighbors)
305
+ # Flatten the graph to be batch size 1 for torch_geometric package
306
+ dest = E_idx
307
+ B, L, k = E_idx.shape[:3]
308
+ src = torch.arange(L, device=E_idx.device).view([1, L, 1]).expand(B, L, k)
309
+ # After flattening, [2, B, E]
310
+ edge_index = torch.stack([src, dest], dim=0).flatten(2, 3)
311
+ # After flattening, [B, E]
312
+ E_dist = E_dist.flatten(1, 2)
313
+ E_coord_mask = E_coord_mask.flatten(1, 2).unsqueeze(-1)
314
+ E_residue_mask = E_residue_mask.flatten(1, 2)
315
+ # Calculate relative positional embeddings and distance RBF
316
+ pos_embeddings = GVPInputFeaturizer._positional_embeddings(
317
+ edge_index,
318
+ num_positional_embeddings=self.num_positional_embeddings,
319
+ )
320
+ D_rbf = rbf(E_dist, 0., 20.)
321
+ # Calculate relative orientation
322
+ X_src = X_ca.unsqueeze(2).expand(-1, -1, k, -1).flatten(1, 2)
323
+ X_dest = torch.gather(
324
+ X_ca,
325
+ 1,
326
+ edge_index[1, :, :].unsqueeze(-1).expand([B, L*k, 3])
327
+ )
328
+ coord_mask_src = coord_mask.unsqueeze(2).expand(-1, -1, k).flatten(1, 2)
329
+ coord_mask_dest = torch.gather(
330
+ coord_mask,
331
+ 1,
332
+ edge_index[1, :, :].expand([B, L*k])
333
+ )
334
+ E_vectors = X_src - X_dest
335
+ # For the ones without coordinates, substitute in the average vector
336
+ E_vector_mean = torch.sum(E_vectors * E_coord_mask, dim=1,
337
+ keepdims=True) / torch.sum(E_coord_mask, dim=1, keepdims=True)
338
+ E_vectors = E_vectors * E_coord_mask + E_vector_mean * ~(E_coord_mask)
339
+ # Normalize and remove nans
340
+ edge_s = torch.cat([D_rbf, pos_embeddings], dim=-1)
341
+ edge_v = normalize(E_vectors).unsqueeze(-2)
342
+ edge_s, edge_v = map(nan_to_num, (edge_s, edge_v))
343
+ # Also add indications of whether the coordinates are present
344
+ edge_s = torch.cat([
345
+ edge_s,
346
+ (~coord_mask_src).float().unsqueeze(-1),
347
+ (~coord_mask_dest).float().unsqueeze(-1),
348
+ ], dim=-1)
349
+ edge_index[:, ~E_residue_mask] = -1
350
+ if self.remove_edges_without_coords:
351
+ edge_index[:, ~E_coord_mask.squeeze(-1)] = -1
352
+ return (edge_s, edge_v), edge_index.transpose(0, 1)
esm/esm/inverse_folding/gvp_encoder.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ from argparse import Namespace
7
+
8
+ import torch
9
+ import torch.nn as nn
10
+ import torch.nn.functional as F
11
+
12
+ from .features import GVPGraphEmbedding
13
+ from .gvp_modules import GVPConvLayer, LayerNorm
14
+ from .gvp_utils import unflatten_graph
15
+
16
+
17
+
18
+ class GVPEncoder(nn.Module):
19
+
20
+ def __init__(self, args):
21
+ super().__init__()
22
+ self.args = args
23
+ self.embed_graph = GVPGraphEmbedding(args)
24
+
25
+ node_hidden_dim = (args.node_hidden_dim_scalar,
26
+ args.node_hidden_dim_vector)
27
+ edge_hidden_dim = (args.edge_hidden_dim_scalar,
28
+ args.edge_hidden_dim_vector)
29
+
30
+ conv_activations = (F.relu, torch.sigmoid)
31
+ self.encoder_layers = nn.ModuleList(
32
+ GVPConvLayer(
33
+ node_hidden_dim,
34
+ edge_hidden_dim,
35
+ drop_rate=args.dropout,
36
+ vector_gate=True,
37
+ attention_heads=0,
38
+ n_message=3,
39
+ conv_activations=conv_activations,
40
+ n_edge_gvps=0,
41
+ eps=1e-4,
42
+ layernorm=True,
43
+ )
44
+ for i in range(args.num_encoder_layers)
45
+ )
46
+
47
+ def forward(self, coords, coord_mask, padding_mask, confidence):
48
+ node_embeddings, edge_embeddings, edge_index = self.embed_graph(
49
+ coords, coord_mask, padding_mask, confidence)
50
+
51
+ for i, layer in enumerate(self.encoder_layers):
52
+ node_embeddings, edge_embeddings = layer(node_embeddings,
53
+ edge_index, edge_embeddings)
54
+
55
+ node_embeddings = unflatten_graph(node_embeddings, coords.shape[0])
56
+ return node_embeddings
esm/esm/inverse_folding/gvp_modules.py ADDED
@@ -0,0 +1,473 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Contents of this file are from the open source code for
2
+ #
3
+ # Jing, B., Eismann, S., Suriana, P., Townshend, R. J. L., & Dror, R. (2020).
4
+ # Learning from Protein Structure with Geometric Vector Perceptrons. In
5
+ # International Conference on Learning Representations.
6
+ #
7
+ # MIT License
8
+ #
9
+ # Copyright (c) 2020 Bowen Jing, Stephan Eismann, Patricia Suriana, Raphael Townshend, Ron Dror
10
+ #
11
+ # Permission is hereby granted, free of charge, to any person obtaining a copy
12
+ # of this software and associated documentation files (the "Software"), to deal
13
+ # in the Software without restriction, including without limitation the rights
14
+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
15
+ # copies of the Software, and to permit persons to whom the Software is
16
+ # furnished to do so, subject to the following conditions:
17
+ #
18
+ # The above copyright notice and this permission notice shall be included in all
19
+ # copies or substantial portions of the Software.
20
+ #
21
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
22
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
23
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
24
+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
25
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
26
+ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
27
+ # SOFTWARE.
28
+
29
+ import typing as T
30
+ import torch
31
+ from torch import nn
32
+ import torch.nn.functional as F
33
+ from torch_geometric.nn import MessagePassing
34
+ from torch_scatter import scatter_add, scatter
35
+
36
+ def tuple_size(tp):
37
+ return tuple([0 if a is None else a.size() for a in tp])
38
+
39
+ def tuple_sum(tp1, tp2):
40
+ s1, v1 = tp1
41
+ s2, v2 = tp2
42
+ if v2 is None and v2 is None:
43
+ return (s1 + s2, None)
44
+ return (s1 + s2, v1 + v2)
45
+
46
+ def tuple_cat(*args, dim=-1):
47
+ '''
48
+ Concatenates any number of tuples (s, V) elementwise.
49
+
50
+ :param dim: dimension along which to concatenate when viewed
51
+ as the `dim` index for the scalar-channel tensors.
52
+ This means that `dim=-1` will be applied as
53
+ `dim=-2` for the vector-channel tensors.
54
+ '''
55
+ dim %= len(args[0][0].shape)
56
+ s_args, v_args = list(zip(*args))
57
+ return torch.cat(s_args, dim=dim), torch.cat(v_args, dim=dim)
58
+
59
+ def tuple_index(x, idx):
60
+ '''
61
+ Indexes into a tuple (s, V) along the first dimension.
62
+
63
+ :param idx: any object which can be used to index into a `torch.Tensor`
64
+ '''
65
+ return x[0][idx], x[1][idx]
66
+
67
+ def randn(n, dims, device="cpu"):
68
+ '''
69
+ Returns random tuples (s, V) drawn elementwise from a normal distribution.
70
+
71
+ :param n: number of data points
72
+ :param dims: tuple of dimensions (n_scalar, n_vector)
73
+
74
+ :return: (s, V) with s.shape = (n, n_scalar) and
75
+ V.shape = (n, n_vector, 3)
76
+ '''
77
+ return torch.randn(n, dims[0], device=device), \
78
+ torch.randn(n, dims[1], 3, device=device)
79
+
80
+ def _norm_no_nan(x, axis=-1, keepdims=False, eps=1e-8, sqrt=True):
81
+ '''
82
+ L2 norm of tensor clamped above a minimum value `eps`.
83
+
84
+ :param sqrt: if `False`, returns the square of the L2 norm
85
+ '''
86
+ # clamp is slow
87
+ # out = torch.clamp(torch.sum(torch.square(x), axis, keepdims), min=eps)
88
+ out = torch.sum(torch.square(x), axis, keepdims) + eps
89
+ return torch.sqrt(out) if sqrt else out
90
+
91
+ def _split(x, nv):
92
+ '''
93
+ Splits a merged representation of (s, V) back into a tuple.
94
+ Should be used only with `_merge(s, V)` and only if the tuple
95
+ representation cannot be used.
96
+
97
+ :param x: the `torch.Tensor` returned from `_merge`
98
+ :param nv: the number of vector channels in the input to `_merge`
99
+ '''
100
+ v = torch.reshape(x[..., -3*nv:], x.shape[:-1] + (nv, 3))
101
+ s = x[..., :-3*nv]
102
+ return s, v
103
+
104
+ def _merge(s, v):
105
+ '''
106
+ Merges a tuple (s, V) into a single `torch.Tensor`, where the
107
+ vector channels are flattened and appended to the scalar channels.
108
+ Should be used only if the tuple representation cannot be used.
109
+ Use `_split(x, nv)` to reverse.
110
+ '''
111
+ v = torch.reshape(v, v.shape[:-2] + (3*v.shape[-2],))
112
+ return torch.cat([s, v], -1)
113
+
114
+ class GVP(nn.Module):
115
+ '''
116
+ Geometric Vector Perceptron. See manuscript and README.md
117
+ for more details.
118
+
119
+ :param in_dims: tuple (n_scalar, n_vector)
120
+ :param out_dims: tuple (n_scalar, n_vector)
121
+ :param h_dim: intermediate number of vector channels, optional
122
+ :param activations: tuple of functions (scalar_act, vector_act)
123
+ :param tuple_io: whether to keep accepting tuple inputs and outputs when vi
124
+ or vo = 0
125
+ '''
126
+ def __init__(self, in_dims, out_dims, h_dim=None, vector_gate=False,
127
+ activations=(F.relu, torch.sigmoid), tuple_io=True,
128
+ eps=1e-8):
129
+ super(GVP, self).__init__()
130
+ self.si, self.vi = in_dims
131
+ self.so, self.vo = out_dims
132
+ self.tuple_io = tuple_io
133
+ if self.vi:
134
+ self.h_dim = h_dim or max(self.vi, self.vo)
135
+ self.wh = nn.Linear(self.vi, self.h_dim, bias=False)
136
+ self.ws = nn.Linear(self.h_dim + self.si, self.so)
137
+ if self.vo:
138
+ self.wv = nn.Linear(self.h_dim, self.vo, bias=False)
139
+ if vector_gate:
140
+ self.wg = nn.Linear(self.so, self.vo)
141
+ else:
142
+ self.ws = nn.Linear(self.si, self.so)
143
+
144
+ self.vector_gate = vector_gate
145
+ self.scalar_act, self.vector_act = activations
146
+ self.eps = eps
147
+
148
+ def forward(self, x):
149
+ '''
150
+ :param x: tuple (s, V) of `torch.Tensor`,
151
+ or (if vectors_in is 0), a single `torch.Tensor`
152
+ :return: tuple (s, V) of `torch.Tensor`,
153
+ or (if vectors_out is 0), a single `torch.Tensor`
154
+ '''
155
+ if self.vi:
156
+ s, v = x
157
+ v = torch.transpose(v, -1, -2)
158
+ vh = self.wh(v)
159
+ vn = _norm_no_nan(vh, axis=-2, eps=self.eps)
160
+ s = self.ws(torch.cat([s, vn], -1))
161
+ if self.scalar_act:
162
+ s = self.scalar_act(s)
163
+ if self.vo:
164
+ v = self.wv(vh)
165
+ v = torch.transpose(v, -1, -2)
166
+ if self.vector_gate:
167
+ g = self.wg(s).unsqueeze(-1)
168
+ else:
169
+ g = _norm_no_nan(v, axis=-1, keepdims=True, eps=self.eps)
170
+ if self.vector_act:
171
+ g = self.vector_act(g)
172
+ v = v * g
173
+ else:
174
+ if self.tuple_io:
175
+ assert x[1] is None
176
+ x = x[0]
177
+ s = self.ws(x)
178
+ if self.scalar_act:
179
+ s = self.scalar_act(s)
180
+ if self.vo:
181
+ v = torch.zeros(list(s.shape)[:-1] + [self.vo, 3],
182
+ device=s.device)
183
+
184
+ if self.vo:
185
+ return (s, v)
186
+ elif self.tuple_io:
187
+ return (s, None)
188
+ else:
189
+ return s
190
+
191
+
192
+ class _VDropout(nn.Module):
193
+ '''
194
+ Vector channel dropout where the elements of each
195
+ vector channel are dropped together.
196
+ '''
197
+ def __init__(self, drop_rate):
198
+ super(_VDropout, self).__init__()
199
+ self.drop_rate = drop_rate
200
+
201
+ def forward(self, x):
202
+ '''
203
+ :param x: `torch.Tensor` corresponding to vector channels
204
+ '''
205
+ if x is None:
206
+ return None
207
+ device = x.device
208
+ if not self.training:
209
+ return x
210
+ mask = torch.bernoulli(
211
+ (1 - self.drop_rate) * torch.ones(x.shape[:-1], device=device)
212
+ ).unsqueeze(-1)
213
+ x = mask * x / (1 - self.drop_rate)
214
+ return x
215
+
216
+ class Dropout(nn.Module):
217
+ '''
218
+ Combined dropout for tuples (s, V).
219
+ Takes tuples (s, V) as input and as output.
220
+ '''
221
+ def __init__(self, drop_rate):
222
+ super(Dropout, self).__init__()
223
+ self.sdropout = nn.Dropout(drop_rate)
224
+ self.vdropout = _VDropout(drop_rate)
225
+
226
+ def forward(self, x):
227
+ '''
228
+ :param x: tuple (s, V) of `torch.Tensor`,
229
+ or single `torch.Tensor`
230
+ (will be assumed to be scalar channels)
231
+ '''
232
+ if type(x) is torch.Tensor:
233
+ return self.sdropout(x)
234
+ s, v = x
235
+ return self.sdropout(s), self.vdropout(v)
236
+
237
+ class LayerNorm(nn.Module):
238
+ '''
239
+ Combined LayerNorm for tuples (s, V).
240
+ Takes tuples (s, V) as input and as output.
241
+ '''
242
+ def __init__(self, dims, tuple_io=True, eps=1e-8):
243
+ super(LayerNorm, self).__init__()
244
+ self.tuple_io = tuple_io
245
+ self.s, self.v = dims
246
+ self.scalar_norm = nn.LayerNorm(self.s)
247
+ self.eps = eps
248
+
249
+ def forward(self, x):
250
+ '''
251
+ :param x: tuple (s, V) of `torch.Tensor`,
252
+ or single `torch.Tensor`
253
+ (will be assumed to be scalar channels)
254
+ '''
255
+ if not self.v:
256
+ if self.tuple_io:
257
+ return self.scalar_norm(x[0]), None
258
+ return self.scalar_norm(x)
259
+ s, v = x
260
+ vn = _norm_no_nan(v, axis=-1, keepdims=True, sqrt=False, eps=self.eps)
261
+ nonzero_mask = (vn > 2 * self.eps)
262
+ vn = torch.sum(vn * nonzero_mask, dim=-2, keepdim=True
263
+ ) / (self.eps + torch.sum(nonzero_mask, dim=-2, keepdim=True))
264
+ vn = torch.sqrt(vn + self.eps)
265
+ v = nonzero_mask * (v / vn)
266
+ return self.scalar_norm(s), v
267
+
268
+ class GVPConv(MessagePassing):
269
+ '''
270
+ Graph convolution / message passing with Geometric Vector Perceptrons.
271
+ Takes in a graph with node and edge embeddings,
272
+ and returns new node embeddings.
273
+
274
+ This does NOT do residual updates and pointwise feedforward layers
275
+ ---see `GVPConvLayer`.
276
+
277
+ :param in_dims: input node embedding dimensions (n_scalar, n_vector)
278
+ :param out_dims: output node embedding dimensions (n_scalar, n_vector)
279
+ :param edge_dims: input edge embedding dimensions (n_scalar, n_vector)
280
+ :param n_layers: number of GVPs in the message function
281
+ :param module_list: preconstructed message function, overrides n_layers
282
+ :param aggr: should be "add" if some incoming edges are masked, as in
283
+ a masked autoregressive decoder architecture
284
+ '''
285
+ def __init__(self, in_dims, out_dims, edge_dims, n_layers=3,
286
+ vector_gate=False, module_list=None, aggr="mean", eps=1e-8,
287
+ activations=(F.relu, torch.sigmoid)):
288
+ super(GVPConv, self).__init__(aggr=aggr)
289
+ self.eps = eps
290
+ self.si, self.vi = in_dims
291
+ self.so, self.vo = out_dims
292
+ self.se, self.ve = edge_dims
293
+
294
+ module_list = module_list or []
295
+ if not module_list:
296
+ if n_layers == 1:
297
+ module_list.append(
298
+ GVP((2*self.si + self.se, 2*self.vi + self.ve),
299
+ (self.so, self.vo), activations=(None, None)))
300
+ else:
301
+ module_list.append(
302
+ GVP((2*self.si + self.se, 2*self.vi + self.ve), out_dims,
303
+ vector_gate=vector_gate, activations=activations)
304
+ )
305
+ for i in range(n_layers - 2):
306
+ module_list.append(GVP(out_dims, out_dims,
307
+ vector_gate=vector_gate))
308
+ module_list.append(GVP(out_dims, out_dims,
309
+ activations=(None, None)))
310
+ self.message_func = nn.Sequential(*module_list)
311
+
312
+ def forward(self, x, edge_index, edge_attr):
313
+ '''
314
+ :param x: tuple (s, V) of `torch.Tensor`
315
+ :param edge_index: array of shape [2, n_edges]
316
+ :param edge_attr: tuple (s, V) of `torch.Tensor`
317
+ '''
318
+ x_s, x_v = x
319
+ message = self.propagate(edge_index,
320
+ s=x_s, v=x_v.reshape(x_v.shape[0], 3*x_v.shape[1]),
321
+ edge_attr=edge_attr)
322
+ return _split(message, self.vo)
323
+
324
+ def message(self, s_i, v_i, s_j, v_j, edge_attr):
325
+ v_j = v_j.view(v_j.shape[0], v_j.shape[1]//3, 3)
326
+ v_i = v_i.view(v_i.shape[0], v_i.shape[1]//3, 3)
327
+ message = tuple_cat((s_j, v_j), edge_attr, (s_i, v_i))
328
+ message = self.message_func(message)
329
+ return _merge(*message)
330
+
331
+
332
+ class GVPConvLayer(nn.Module):
333
+ '''
334
+ Full graph convolution / message passing layer with
335
+ Geometric Vector Perceptrons. Residually updates node embeddings with
336
+ aggregated incoming messages, applies a pointwise feedforward
337
+ network to node embeddings, and returns updated node embeddings.
338
+
339
+ To only compute the aggregated messages, see `GVPConv`.
340
+
341
+ :param node_dims: node embedding dimensions (n_scalar, n_vector)
342
+ :param edge_dims: input edge embedding dimensions (n_scalar, n_vector)
343
+ :param n_message: number of GVPs to use in message function
344
+ :param n_feedforward: number of GVPs to use in feedforward function
345
+ :param drop_rate: drop probability in all dropout layers
346
+ :param autoregressive: if `True`, this `GVPConvLayer` will be used
347
+ with a different set of input node embeddings for messages
348
+ where src >= dst
349
+ '''
350
+ def __init__(self, node_dims, edge_dims, vector_gate=False,
351
+ n_message=3, n_feedforward=2, drop_rate=.1,
352
+ autoregressive=False, attention_heads=0,
353
+ conv_activations=(F.relu, torch.sigmoid),
354
+ n_edge_gvps=0, layernorm=True, eps=1e-8):
355
+
356
+ super(GVPConvLayer, self).__init__()
357
+ if attention_heads == 0:
358
+ self.conv = GVPConv(
359
+ node_dims, node_dims, edge_dims, n_layers=n_message,
360
+ vector_gate=vector_gate,
361
+ aggr="add" if autoregressive else "mean",
362
+ activations=conv_activations,
363
+ eps=eps,
364
+ )
365
+ else:
366
+ raise NotImplementedError
367
+ if layernorm:
368
+ self.norm = nn.ModuleList([LayerNorm(node_dims, eps=eps) for _ in range(2)])
369
+ else:
370
+ self.norm = nn.ModuleList([nn.Identity() for _ in range(2)])
371
+ self.dropout = nn.ModuleList([Dropout(drop_rate) for _ in range(2)])
372
+
373
+ ff_func = []
374
+ if n_feedforward == 1:
375
+ ff_func.append(GVP(node_dims, node_dims, activations=(None, None)))
376
+ else:
377
+ hid_dims = 4*node_dims[0], 2*node_dims[1]
378
+ ff_func.append(GVP(node_dims, hid_dims, vector_gate=vector_gate))
379
+ for i in range(n_feedforward-2):
380
+ ff_func.append(GVP(hid_dims, hid_dims, vector_gate=vector_gate))
381
+ ff_func.append(GVP(hid_dims, node_dims, activations=(None, None)))
382
+ self.ff_func = nn.Sequential(*ff_func)
383
+
384
+ self.edge_message_func = None
385
+ if n_edge_gvps > 0:
386
+ si, vi = node_dims
387
+ se, ve = edge_dims
388
+ module_list = [
389
+ GVP((2*si + se, 2*vi + ve), edge_dims, vector_gate=vector_gate)
390
+ ]
391
+ for i in range(n_edge_gvps - 2):
392
+ module_list.append(GVP(edge_dims, edge_dims,
393
+ vector_gate=vector_gate))
394
+ if n_edge_gvps > 1:
395
+ module_list.append(GVP(edge_dims, edge_dims,
396
+ activations=(None, None)))
397
+ self.edge_message_func = nn.Sequential(*module_list)
398
+ if layernorm:
399
+ self.edge_norm = LayerNorm(edge_dims, eps=eps)
400
+ else:
401
+ self.edge_norm = nn.Identity()
402
+ self.edge_dropout = Dropout(drop_rate)
403
+
404
+ def forward(self, x, edge_index, edge_attr,
405
+ autoregressive_x=None, node_mask=None):
406
+ '''
407
+ :param x: tuple (s, V) of `torch.Tensor`
408
+ :param edge_index: array of shape [2, n_edges]
409
+ :param edge_attr: tuple (s, V) of `torch.Tensor`
410
+ :param autoregressive_x: tuple (s, V) of `torch.Tensor`.
411
+ If not `None`, will be used as srcqq node embeddings
412
+ for forming messages where src >= dst. The corrent node
413
+ embeddings `x` will still be the base of the update and the
414
+ pointwise feedforward.
415
+ :param node_mask: array of type `bool` to index into the first
416
+ dim of node embeddings (s, V). If not `None`, only
417
+ these nodes will be updated.
418
+ '''
419
+ if self.edge_message_func:
420
+ src, dst = edge_index
421
+ if autoregressive_x is None:
422
+ x_src = x[0][src], x[1][src]
423
+ else:
424
+ mask = (src < dst).unsqueeze(-1)
425
+ x_src = (
426
+ torch.where(mask, x[0][src], autoregressive_x[0][src]),
427
+ torch.where(mask.unsqueeze(-1), x[1][src],
428
+ autoregressive_x[1][src])
429
+ )
430
+ x_dst = x[0][dst], x[1][dst]
431
+ x_edge = (
432
+ torch.cat([x_src[0], edge_attr[0], x_dst[0]], dim=-1),
433
+ torch.cat([x_src[1], edge_attr[1], x_dst[1]], dim=-2)
434
+ )
435
+ edge_attr_dh = self.edge_message_func(x_edge)
436
+ edge_attr = self.edge_norm(tuple_sum(edge_attr,
437
+ self.edge_dropout(edge_attr_dh)))
438
+
439
+ if autoregressive_x is not None:
440
+ src, dst = edge_index
441
+ mask = src < dst
442
+ edge_index_forward = edge_index[:, mask]
443
+ edge_index_backward = edge_index[:, ~mask]
444
+ edge_attr_forward = tuple_index(edge_attr, mask)
445
+ edge_attr_backward = tuple_index(edge_attr, ~mask)
446
+
447
+ dh = tuple_sum(
448
+ self.conv(x, edge_index_forward, edge_attr_forward),
449
+ self.conv(autoregressive_x, edge_index_backward, edge_attr_backward)
450
+ )
451
+
452
+ count = scatter_add(torch.ones_like(dst), dst,
453
+ dim_size=dh[0].size(0)).clamp(min=1).unsqueeze(-1)
454
+
455
+ dh = dh[0] / count, dh[1] / count.unsqueeze(-1)
456
+
457
+ else:
458
+ dh = self.conv(x, edge_index, edge_attr)
459
+
460
+ if node_mask is not None:
461
+ x_ = x
462
+ x, dh = tuple_index(x, node_mask), tuple_index(dh, node_mask)
463
+
464
+ x = self.norm[0](tuple_sum(x, self.dropout[0](dh)))
465
+
466
+ dh = self.ff_func(x)
467
+ x = self.norm[1](tuple_sum(x, self.dropout[1](dh)))
468
+
469
+ if node_mask is not None:
470
+ x_[0][node_mask], x_[1][node_mask] = x[0], x[1]
471
+ x = x_
472
+
473
+ return x, edge_attr
esm/esm/inverse_folding/gvp_transformer.py ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ import argparse
7
+ from typing import Any, Dict, List, Optional, Tuple, NamedTuple
8
+ import torch
9
+ from torch import nn
10
+ from torch import Tensor
11
+ import torch.nn.functional as F
12
+ from scipy.spatial import transform
13
+
14
+ from esm.data import Alphabet
15
+
16
+ from .features import DihedralFeatures
17
+ from .gvp_encoder import GVPEncoder
18
+ from .gvp_utils import unflatten_graph
19
+ from .gvp_transformer_encoder import GVPTransformerEncoder
20
+ from .transformer_decoder import TransformerDecoder
21
+ from .util import rotate, CoordBatchConverter
22
+
23
+
24
+ class GVPTransformerModel(nn.Module):
25
+ """
26
+ GVP-Transformer inverse folding model.
27
+
28
+ Architecture: Geometric GVP-GNN as initial layers, followed by
29
+ sequence-to-sequence Transformer encoder and decoder.
30
+ """
31
+
32
+ def __init__(self, args, alphabet):
33
+ super().__init__()
34
+ encoder_embed_tokens = self.build_embedding(
35
+ args, alphabet, args.encoder_embed_dim,
36
+ )
37
+ decoder_embed_tokens = self.build_embedding(
38
+ args, alphabet, args.decoder_embed_dim,
39
+ )
40
+ encoder = self.build_encoder(args, alphabet, encoder_embed_tokens)
41
+ decoder = self.build_decoder(args, alphabet, decoder_embed_tokens)
42
+ self.args = args
43
+ self.encoder = encoder
44
+ self.decoder = decoder
45
+
46
+ @classmethod
47
+ def build_encoder(cls, args, src_dict, embed_tokens):
48
+ encoder = GVPTransformerEncoder(args, src_dict, embed_tokens)
49
+ return encoder
50
+
51
+ @classmethod
52
+ def build_decoder(cls, args, tgt_dict, embed_tokens):
53
+ decoder = TransformerDecoder(
54
+ args,
55
+ tgt_dict,
56
+ embed_tokens,
57
+ )
58
+ return decoder
59
+
60
+ @classmethod
61
+ def build_embedding(cls, args, dictionary, embed_dim):
62
+ num_embeddings = len(dictionary)
63
+ padding_idx = dictionary.padding_idx
64
+ emb = nn.Embedding(num_embeddings, embed_dim, padding_idx)
65
+ nn.init.normal_(emb.weight, mean=0, std=embed_dim ** -0.5)
66
+ nn.init.constant_(emb.weight[padding_idx], 0)
67
+ return emb
68
+
69
+ def forward(
70
+ self,
71
+ coords,
72
+ padding_mask,
73
+ confidence,
74
+ prev_output_tokens,
75
+ return_all_hiddens: bool = False,
76
+ features_only: bool = False,
77
+ ):
78
+ encoder_out = self.encoder(coords, padding_mask, confidence,
79
+ return_all_hiddens=return_all_hiddens)
80
+ logits, extra = self.decoder(
81
+ prev_output_tokens,
82
+ encoder_out=encoder_out,
83
+ features_only=features_only,
84
+ return_all_hiddens=return_all_hiddens,
85
+ )
86
+ return logits, extra
87
+
88
+ def sample(self, coords, partial_seq=None, temperature=1.0, confidence=None):
89
+ """
90
+ Samples sequences based on multinomial sampling (no beam search).
91
+
92
+ Args:
93
+ coords: L x 3 x 3 list representing one backbone
94
+ partial_seq: Optional, partial sequence with mask tokens if part of
95
+ the sequence is known
96
+ temperature: sampling temperature, use low temperature for higher
97
+ sequence recovery and high temperature for higher diversity
98
+ confidence: optional length L list of confidence scores for coordinates
99
+ """
100
+ L = len(coords)
101
+ # Convert to batch format
102
+ batch_converter = CoordBatchConverter(self.decoder.dictionary)
103
+ batch_coords, confidence, _, _, padding_mask = (
104
+ batch_converter([(coords, confidence, None)])
105
+ )
106
+
107
+ # Start with prepend token
108
+ mask_idx = self.decoder.dictionary.get_idx('<mask>')
109
+ sampled_tokens = torch.full((1, 1+L), mask_idx, dtype=int)
110
+ sampled_tokens[0, 0] = self.decoder.dictionary.get_idx('<cath>')
111
+ if partial_seq is not None:
112
+ for i, c in enumerate(partial_seq):
113
+ sampled_tokens[0, i+1] = self.decoder.dictionary.get_idx(c)
114
+
115
+ # Save incremental states for faster sampling
116
+ incremental_state = dict()
117
+
118
+ # Run encoder only once
119
+ encoder_out = self.encoder(batch_coords, padding_mask, confidence)
120
+
121
+ # Decode one token at a time
122
+ for i in range(1, L+1):
123
+ if sampled_tokens[0, i] != mask_idx:
124
+ continue
125
+ logits, _ = self.decoder(
126
+ sampled_tokens[:, :i],
127
+ encoder_out,
128
+ incremental_state=incremental_state,
129
+ )
130
+ logits = logits[0].transpose(0, 1)
131
+ logits /= temperature
132
+ probs = F.softmax(logits, dim=-1)
133
+ sampled_tokens[:, i] = torch.multinomial(probs, 1).squeeze(-1)
134
+ sampled_seq = sampled_tokens[0, 1:]
135
+
136
+ # Convert back to string via lookup
137
+ return ''.join([self.decoder.dictionary.get_tok(a) for a in sampled_seq])
esm/esm/inverse_folding/gvp_transformer_encoder.py ADDED
@@ -0,0 +1,184 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # Contents of this file were adapted from the open source fairseq repository.
4
+ #
5
+ # This source code is licensed under the MIT license found in the
6
+ # LICENSE file in the root directory of this source tree.
7
+
8
+ import argparse
9
+ import math
10
+ from typing import Dict, List, Optional
11
+
12
+ import torch
13
+ import torch.nn as nn
14
+ from torch import Tensor
15
+
16
+ from esm.modules import SinusoidalPositionalEmbedding
17
+ from .features import GVPInputFeaturizer, DihedralFeatures
18
+ from .gvp_encoder import GVPEncoder
19
+ from .transformer_layer import TransformerEncoderLayer
20
+ from .util import nan_to_num, get_rotation_frames, rotate, rbf
21
+
22
+
23
+ class GVPTransformerEncoder(nn.Module):
24
+ """
25
+ Transformer encoder consisting of *args.encoder.layers* layers. Each layer
26
+ is a :class:`TransformerEncoderLayer`.
27
+
28
+ Args:
29
+ args (argparse.Namespace): parsed command-line arguments
30
+ dictionary (~fairseq.data.Dictionary): encoding dictionary
31
+ embed_tokens (torch.nn.Embedding): input embedding
32
+ """
33
+
34
+ def __init__(self, args, dictionary, embed_tokens):
35
+ super().__init__()
36
+ self.args = args
37
+ self.dictionary = dictionary
38
+
39
+ self.dropout_module = nn.Dropout(args.dropout)
40
+
41
+ embed_dim = embed_tokens.embedding_dim
42
+ self.padding_idx = embed_tokens.padding_idx
43
+
44
+ self.embed_tokens = embed_tokens
45
+ self.embed_scale = math.sqrt(embed_dim)
46
+ self.embed_positions = SinusoidalPositionalEmbedding(
47
+ embed_dim,
48
+ self.padding_idx,
49
+ )
50
+ self.embed_gvp_input_features = nn.Linear(15, embed_dim)
51
+ self.embed_confidence = nn.Linear(16, embed_dim)
52
+ self.embed_dihedrals = DihedralFeatures(embed_dim)
53
+
54
+ gvp_args = argparse.Namespace()
55
+ for k, v in vars(args).items():
56
+ if k.startswith("gvp_"):
57
+ setattr(gvp_args, k[4:], v)
58
+ self.gvp_encoder = GVPEncoder(gvp_args)
59
+ gvp_out_dim = gvp_args.node_hidden_dim_scalar + (3 *
60
+ gvp_args.node_hidden_dim_vector)
61
+ self.embed_gvp_output = nn.Linear(gvp_out_dim, embed_dim)
62
+
63
+ self.layers = nn.ModuleList([])
64
+ self.layers.extend(
65
+ [self.build_encoder_layer(args) for i in range(args.encoder_layers)]
66
+ )
67
+ self.num_layers = len(self.layers)
68
+ self.layer_norm = nn.LayerNorm(embed_dim)
69
+
70
+ def build_encoder_layer(self, args):
71
+ return TransformerEncoderLayer(args)
72
+
73
+ def forward_embedding(self, coords, padding_mask, confidence):
74
+ """
75
+ Args:
76
+ coords: N, CA, C backbone coordinates in shape length x 3 (atoms) x 3
77
+ padding_mask: boolean Tensor (true for padding) of shape length
78
+ confidence: confidence scores between 0 and 1 of shape length
79
+ """
80
+ components = dict()
81
+ coord_mask = torch.all(torch.all(torch.isfinite(coords), dim=-1), dim=-1)
82
+ coords = nan_to_num(coords)
83
+ mask_tokens = (
84
+ padding_mask * self.dictionary.padding_idx +
85
+ ~padding_mask * self.dictionary.get_idx("<mask>")
86
+ )
87
+ components["tokens"] = self.embed_tokens(mask_tokens) * self.embed_scale
88
+ components["diherals"] = self.embed_dihedrals(coords)
89
+
90
+ # GVP encoder
91
+ gvp_out_scalars, gvp_out_vectors = self.gvp_encoder(coords,
92
+ coord_mask, padding_mask, confidence)
93
+ R = get_rotation_frames(coords)
94
+ # Rotate to local rotation frame for rotation-invariance
95
+ gvp_out_features = torch.cat([
96
+ gvp_out_scalars,
97
+ rotate(gvp_out_vectors, R.transpose(-2, -1)).flatten(-2, -1),
98
+ ], dim=-1)
99
+ components["gvp_out"] = self.embed_gvp_output(gvp_out_features)
100
+
101
+ components["confidence"] = self.embed_confidence(
102
+ rbf(confidence, 0., 1.))
103
+
104
+ # In addition to GVP encoder outputs, also directly embed GVP input node
105
+ # features to the Transformer
106
+ scalar_features, vector_features = GVPInputFeaturizer.get_node_features(
107
+ coords, coord_mask, with_coord_mask=False)
108
+ features = torch.cat([
109
+ scalar_features,
110
+ rotate(vector_features, R.transpose(-2, -1)).flatten(-2, -1),
111
+ ], dim=-1)
112
+ components["gvp_input_features"] = self.embed_gvp_input_features(features)
113
+
114
+ embed = sum(components.values())
115
+ # for k, v in components.items():
116
+ # print(k, torch.mean(v, dim=(0,1)), torch.std(v, dim=(0,1)))
117
+
118
+ x = embed
119
+ x = x + self.embed_positions(mask_tokens)
120
+ x = self.dropout_module(x)
121
+ return x, components
122
+
123
+ def forward(
124
+ self,
125
+ coords,
126
+ encoder_padding_mask,
127
+ confidence,
128
+ return_all_hiddens: bool = False,
129
+ ):
130
+ """
131
+ Args:
132
+ coords (Tensor): backbone coordinates
133
+ shape batch_size x num_residues x num_atoms (3 for N, CA, C) x 3
134
+ encoder_padding_mask (ByteTensor): the positions of
135
+ padding elements of shape `(batch_size x num_residues)`
136
+ confidence (Tensor): the confidence score of shape (batch_size x
137
+ num_residues). The value is between 0. and 1. for each residue
138
+ coordinate, or -1. if no coordinate is given
139
+ return_all_hiddens (bool, optional): also return all of the
140
+ intermediate hidden states (default: False).
141
+
142
+ Returns:
143
+ dict:
144
+ - **encoder_out** (Tensor): the last encoder layer's output of
145
+ shape `(num_residues, batch_size, embed_dim)`
146
+ - **encoder_padding_mask** (ByteTensor): the positions of
147
+ padding elements of shape `(batch_size, num_residues)`
148
+ - **encoder_embedding** (Tensor): the (scaled) embedding lookup
149
+ of shape `(batch_size, num_residues, embed_dim)`
150
+ - **encoder_states** (List[Tensor]): all intermediate
151
+ hidden states of shape `(num_residues, batch_size, embed_dim)`.
152
+ Only populated if *return_all_hiddens* is True.
153
+ """
154
+ x, encoder_embedding = self.forward_embedding(coords,
155
+ encoder_padding_mask, confidence)
156
+ # account for padding while computing the representation
157
+ x = x * (1 - encoder_padding_mask.unsqueeze(-1).type_as(x))
158
+
159
+ # B x T x C -> T x B x C
160
+ x = x.transpose(0, 1)
161
+
162
+ encoder_states = []
163
+
164
+ if return_all_hiddens:
165
+ encoder_states.append(x)
166
+
167
+ # encoder layers
168
+ for layer in self.layers:
169
+ x = layer(
170
+ x, encoder_padding_mask=encoder_padding_mask
171
+ )
172
+ if return_all_hiddens:
173
+ assert encoder_states is not None
174
+ encoder_states.append(x)
175
+
176
+ if self.layer_norm is not None:
177
+ x = self.layer_norm(x)
178
+
179
+ return {
180
+ "encoder_out": [x], # T x B x C
181
+ "encoder_padding_mask": [encoder_padding_mask], # B x T
182
+ "encoder_embedding": [encoder_embedding], # dictionary
183
+ "encoder_states": encoder_states, # List[T x B x C]
184
+ }
esm/esm/inverse_folding/gvp_utils.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ import torch
7
+
8
+
9
+ def flatten_graph(node_embeddings, edge_embeddings, edge_index):
10
+ """
11
+ Flattens the graph into a batch size one (with disconnected subgraphs for
12
+ each example) to be compatible with pytorch-geometric package.
13
+ Args:
14
+ node_embeddings: node embeddings in tuple form (scalar, vector)
15
+ - scalar: shape batch size x nodes x node_embed_dim
16
+ - vector: shape batch size x nodes x node_embed_dim x 3
17
+ edge_embeddings: edge embeddings of in tuple form (scalar, vector)
18
+ - scalar: shape batch size x edges x edge_embed_dim
19
+ - vector: shape batch size x edges x edge_embed_dim x 3
20
+ edge_index: shape batch_size x 2 (source node and target node) x edges
21
+ Returns:
22
+ node_embeddings: node embeddings in tuple form (scalar, vector)
23
+ - scalar: shape batch total_nodes x node_embed_dim
24
+ - vector: shape batch total_nodes x node_embed_dim x 3
25
+ edge_embeddings: edge embeddings of in tuple form (scalar, vector)
26
+ - scalar: shape batch total_edges x edge_embed_dim
27
+ - vector: shape batch total_edges x edge_embed_dim x 3
28
+ edge_index: shape 2 x total_edges
29
+ """
30
+ x_s, x_v = node_embeddings
31
+ e_s, e_v = edge_embeddings
32
+ batch_size, N = x_s.shape[0], x_s.shape[1]
33
+ node_embeddings = (torch.flatten(x_s, 0, 1), torch.flatten(x_v, 0, 1))
34
+ edge_embeddings = (torch.flatten(e_s, 0, 1), torch.flatten(e_v, 0, 1))
35
+
36
+ edge_mask = torch.any(edge_index != -1, dim=1)
37
+ # Re-number the nodes by adding batch_idx * N to each batch
38
+ edge_index = edge_index + (torch.arange(batch_size, device=edge_index.device) *
39
+ N).unsqueeze(-1).unsqueeze(-1)
40
+ edge_index = edge_index.permute(1, 0, 2).flatten(1, 2)
41
+ edge_mask = edge_mask.flatten()
42
+ edge_index = edge_index[:, edge_mask]
43
+ edge_embeddings = (
44
+ edge_embeddings[0][edge_mask, :],
45
+ edge_embeddings[1][edge_mask, :]
46
+ )
47
+ return node_embeddings, edge_embeddings, edge_index
48
+
49
+
50
+ def unflatten_graph(node_embeddings, batch_size):
51
+ """
52
+ Unflattens node embeddings.
53
+ Args:
54
+ node_embeddings: node embeddings in tuple form (scalar, vector)
55
+ - scalar: shape batch total_nodes x node_embed_dim
56
+ - vector: shape batch total_nodes x node_embed_dim x 3
57
+ batch_size: int
58
+ Returns:
59
+ node_embeddings: node embeddings in tuple form (scalar, vector)
60
+ - scalar: shape batch size x nodes x node_embed_dim
61
+ - vector: shape batch size x nodes x node_embed_dim x 3
62
+ """
63
+ x_s, x_v = node_embeddings
64
+ x_s = x_s.reshape(batch_size, -1, x_s.shape[1])
65
+ x_v = x_v.reshape(batch_size, -1, x_v.shape[1], x_v.shape[2])
66
+ return (x_s, x_v)
67
+
68
+
esm/esm/inverse_folding/multichain_util.py ADDED
@@ -0,0 +1,151 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ import biotite.structure
7
+ import numpy as np
8
+ import torch
9
+ from typing import Sequence, Tuple, List
10
+
11
+ from esm.inverse_folding.util import (
12
+ load_structure,
13
+ extract_coords_from_structure,
14
+ load_coords,
15
+ get_sequence_loss,
16
+ get_encoder_output,
17
+ )
18
+
19
+
20
+ def extract_coords_from_complex(structure: biotite.structure.AtomArray):
21
+ """
22
+ Args:
23
+ structure: biotite AtomArray
24
+ Returns:
25
+ Tuple (coords_list, seq_list)
26
+ - coords: Dictionary mapping chain ids to L x 3 x 3 array for N, CA, C
27
+ coordinates representing the backbone of each chain
28
+ - seqs: Dictionary mapping chain ids to native sequences of each chain
29
+ """
30
+ coords = {}
31
+ seqs = {}
32
+ all_chains = biotite.structure.get_chains(structure)
33
+ for chain_id in all_chains:
34
+ chain = structure[structure.chain_id == chain_id]
35
+ coords[chain_id], seqs[chain_id] = extract_coords_from_structure(chain)
36
+ return coords, seqs
37
+
38
+
39
+ def load_complex_coords(fpath, chains):
40
+ """
41
+ Args:
42
+ fpath: filepath to either pdb or cif file
43
+ chains: the chain ids (the order matters for autoregressive model)
44
+ Returns:
45
+ Tuple (coords_list, seq_list)
46
+ - coords: Dictionary mapping chain ids to L x 3 x 3 array for N, CA, C
47
+ coordinates representing the backbone of each chain
48
+ - seqs: Dictionary mapping chain ids to native sequences of each chain
49
+ """
50
+ structure = load_structure(fpath, chains)
51
+ return extract_coords_from_complex(structure)
52
+
53
+
54
+ def _concatenate_coords(coords, target_chain_id, padding_length=10):
55
+ """
56
+ Args:
57
+ coords: Dictionary mapping chain ids to L x 3 x 3 array for N, CA, C
58
+ coordinates representing the backbone of each chain
59
+ target_chain_id: The chain id to sample sequences for
60
+ padding_length: Length of padding between concatenated chains
61
+ Returns:
62
+ Tuple (coords, seq)
63
+ - coords is an L x 3 x 3 array for N, CA, C coordinates, a
64
+ concatenation of the chains with padding in between
65
+ - seq is the extracted sequence, with padding tokens inserted
66
+ between the concatenated chains
67
+ """
68
+ pad_coords = np.full((padding_length, 3, 3), np.nan, dtype=np.float32)
69
+ # For best performance, put the target chain first in concatenation.
70
+ coords_list = [coords[target_chain_id]]
71
+ for chain_id in coords:
72
+ if chain_id == target_chain_id:
73
+ continue
74
+ coords_list.append(pad_coords)
75
+ coords_list.append(coords[chain_id])
76
+ coords_concatenated = np.concatenate(coords_list, axis=0)
77
+ return coords_concatenated
78
+
79
+
80
+ def sample_sequence_in_complex(model, coords, target_chain_id, temperature=1.,
81
+ padding_length=10):
82
+ """
83
+ Samples sequence for one chain in a complex.
84
+ Args:
85
+ model: An instance of the GVPTransformer model
86
+ coords: Dictionary mapping chain ids to L x 3 x 3 array for N, CA, C
87
+ coordinates representing the backbone of each chain
88
+ target_chain_id: The chain id to sample sequences for
89
+ padding_length: padding length in between chains
90
+ Returns:
91
+ Sampled sequence for the target chain
92
+ """
93
+ target_chain_len = coords[target_chain_id].shape[0]
94
+ all_coords = _concatenate_coords(coords, target_chain_id)
95
+
96
+ # Supply padding tokens for other chains to avoid unused sampling for speed
97
+ padding_pattern = ['<pad>'] * all_coords.shape[0]
98
+ for i in range(target_chain_len):
99
+ padding_pattern[i] = '<mask>'
100
+ sampled = model.sample(all_coords, partial_seq=padding_pattern,
101
+ temperature=temperature)
102
+ sampled = sampled[:target_chain_len]
103
+ return sampled
104
+
105
+
106
+ def score_sequence_in_complex(model, alphabet, coords, target_chain_id,
107
+ target_seq, padding_length=10):
108
+ """
109
+ Scores sequence for one chain in a complex.
110
+ Args:
111
+ model: An instance of the GVPTransformer model
112
+ alphabet: Alphabet for the model
113
+ coords: Dictionary mapping chain ids to L x 3 x 3 array for N, CA, C
114
+ coordinates representing the backbone of each chain
115
+ target_chain_id: The chain id to sample sequences for
116
+ target_seq: Target sequence for the target chain for scoring.
117
+ padding_length: padding length in between chains
118
+ Returns:
119
+ Tuple (ll_fullseq, ll_withcoord)
120
+ - ll_fullseq: Average log-likelihood over the full target chain
121
+ - ll_withcoord: Average log-likelihood in target chain excluding those
122
+ residues without coordinates
123
+ """
124
+ all_coords = _concatenate_coords(coords, target_chain_id)
125
+
126
+ loss, target_padding_mask = get_sequence_loss(model, alphabet, all_coords,
127
+ target_seq)
128
+ ll_fullseq = -np.sum(loss * ~target_padding_mask) / np.sum(
129
+ ~target_padding_mask)
130
+
131
+ # Also calculate average when excluding masked portions
132
+ coord_mask = np.all(np.isfinite(coords[target_chain_id]), axis=(-1, -2))
133
+ ll_withcoord = -np.sum(loss * coord_mask) / np.sum(coord_mask)
134
+ return ll_fullseq, ll_withcoord
135
+
136
+
137
+ def get_encoder_output_for_complex(model, alphabet, coords, target_chain_id):
138
+ """
139
+ Args:
140
+ model: An instance of the GVPTransformer model
141
+ alphabet: Alphabet for the model
142
+ coords: Dictionary mapping chain ids to L x 3 x 3 array for N, CA, C
143
+ coordinates representing the backbone of each chain
144
+ target_chain_id: The chain id to sample sequences for
145
+ Returns:
146
+ Dictionary mapping chain id to encoder output for each chain
147
+ """
148
+ all_coords = _concatenate_coords(coords, target_chain_id)
149
+ all_rep = get_encoder_output(model, alphabet, all_coords)
150
+ target_chain_len = coords[target_chain_id].shape[0]
151
+ return all_rep[:target_chain_len]
esm/esm/inverse_folding/transformer_decoder.py ADDED
@@ -0,0 +1,228 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # Contents of this file were adapted from the open source fairseq repository.
4
+ #
5
+ # This source code is licensed under the MIT license found in the
6
+ # LICENSE file in the root directory of this source tree.
7
+
8
+ import math
9
+ from typing import Any, Dict, List, Optional
10
+
11
+ import torch
12
+ import torch.nn as nn
13
+ from torch import Tensor
14
+
15
+ from esm.modules import SinusoidalPositionalEmbedding
16
+ from .transformer_layer import TransformerDecoderLayer
17
+
18
+
19
+ def fill_with_neg_inf(t):
20
+ """FP16-compatible function that fills a tensor with -inf."""
21
+ return t.float().fill_(float("-inf")).type_as(t)
22
+
23
+
24
+ class TransformerDecoder(nn.Module):
25
+ """
26
+ Transformer decoder consisting of *args.decoder.layers* layers. Each layer
27
+ is a :class:`TransformerDecoderLayer`.
28
+
29
+ Args:
30
+ args (argparse.Namespace): parsed command-line arguments
31
+ dictionary (~fairseq.data.Dictionary): decoding dictionary
32
+ embed_tokens (torch.nn.Embedding): output embedding
33
+ no_encoder_attn (bool, optional): whether to attend to encoder outputs
34
+ (default: False).
35
+ """
36
+
37
+ def __init__(
38
+ self,
39
+ args,
40
+ dictionary,
41
+ embed_tokens,
42
+ ):
43
+ super().__init__()
44
+ self.args = args
45
+ self.dictionary = dictionary
46
+ self._future_mask = torch.empty(0)
47
+
48
+ self.dropout_module = nn.Dropout(args.dropout)
49
+
50
+ input_embed_dim = embed_tokens.embedding_dim
51
+ embed_dim = args.decoder_embed_dim
52
+ self.embed_dim = embed_dim
53
+
54
+ self.padding_idx = embed_tokens.padding_idx
55
+
56
+ self.embed_tokens = embed_tokens
57
+ self.embed_scale = math.sqrt(embed_dim)
58
+
59
+ self.project_in_dim = (
60
+ nn.Linear(input_embed_dim, embed_dim, bias=False)
61
+ if embed_dim != input_embed_dim
62
+ else None
63
+ )
64
+ self.embed_positions = SinusoidalPositionalEmbedding(
65
+ embed_dim,
66
+ self.padding_idx,
67
+ )
68
+
69
+ self.layers = nn.ModuleList([])
70
+ self.layers.extend(
71
+ [
72
+ self.build_decoder_layer(args)
73
+ for _ in range(args.decoder_layers)
74
+ ]
75
+ )
76
+ self.num_layers = len(self.layers)
77
+ self.layer_norm = nn.LayerNorm(embed_dim)
78
+
79
+ self.build_output_projection(args, dictionary)
80
+
81
+ def build_output_projection(self, args, dictionary):
82
+ self.output_projection = nn.Linear(
83
+ args.decoder_embed_dim, len(dictionary), bias=False
84
+ )
85
+ nn.init.normal_(
86
+ self.output_projection.weight, mean=0, std=args.decoder_embed_dim ** -0.5
87
+ )
88
+
89
+ def build_decoder_layer(self, args):
90
+ return TransformerDecoderLayer(args)
91
+
92
+ def forward(
93
+ self,
94
+ prev_output_tokens,
95
+ encoder_out: Optional[Dict[str, List[Tensor]]] = None,
96
+ incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None,
97
+ features_only: bool = False,
98
+ return_all_hiddens: bool = False,
99
+ ):
100
+ """
101
+ Args:
102
+ prev_output_tokens (LongTensor): previous decoder outputs of shape
103
+ `(batch, tgt_len)`, for teacher forcing
104
+ encoder_out (optional): output from the encoder, used for
105
+ encoder-side attention, should be of size T x B x C
106
+ incremental_state (dict): dictionary used for storing state during
107
+ :ref:`Incremental decoding`
108
+ features_only (bool, optional): only return features without
109
+ applying output layer (default: False).
110
+
111
+ Returns:
112
+ tuple:
113
+ - the decoder's output of shape `(batch, tgt_len, vocab)`
114
+ - a dictionary with any model-specific outputs
115
+ """
116
+
117
+ x, extra = self.extract_features(
118
+ prev_output_tokens,
119
+ encoder_out=encoder_out,
120
+ incremental_state=incremental_state,
121
+ )
122
+
123
+ if not features_only:
124
+ x = self.output_layer(x)
125
+ x = x.transpose(1, 2) # B x T x C -> B x C x T
126
+ return x, extra
127
+
128
+ def extract_features(
129
+ self,
130
+ prev_output_tokens,
131
+ encoder_out: Optional[Dict[str, List[Tensor]]],
132
+ incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None,
133
+ ):
134
+ """
135
+ Similar to *forward* but only return features.
136
+
137
+ Includes several features from "Jointly Learning to Align and
138
+ Translate with Transformer Models" (Garg et al., EMNLP 2019).
139
+
140
+ Returns:
141
+ tuple:
142
+ - the decoder's features of shape `(batch, tgt_len, embed_dim)`
143
+ - a dictionary with any model-specific outputs
144
+ """
145
+ bs, slen = prev_output_tokens.size()
146
+
147
+ enc: Optional[Tensor] = None
148
+ padding_mask: Optional[Tensor] = None
149
+ if encoder_out is not None and len(encoder_out["encoder_out"]) > 0:
150
+ enc = encoder_out["encoder_out"][0]
151
+ assert (
152
+ enc.size()[1] == bs
153
+ ), f"Expected enc.shape == (t, {bs}, c) got {enc.shape}"
154
+ if encoder_out is not None and len(encoder_out["encoder_padding_mask"]) > 0:
155
+ padding_mask = encoder_out["encoder_padding_mask"][0]
156
+
157
+ # embed positions
158
+ positions = self.embed_positions(
159
+ prev_output_tokens
160
+ )
161
+
162
+ if incremental_state is not None:
163
+ prev_output_tokens = prev_output_tokens[:, -1:]
164
+ positions = positions[:, -1:]
165
+
166
+ # embed tokens and positions
167
+ x = self.embed_scale * self.embed_tokens(prev_output_tokens)
168
+
169
+ if self.project_in_dim is not None:
170
+ x = self.project_in_dim(x)
171
+
172
+ x += positions
173
+
174
+ x = self.dropout_module(x)
175
+
176
+ # B x T x C -> T x B x C
177
+ x = x.transpose(0, 1)
178
+
179
+ self_attn_padding_mask: Optional[Tensor] = None
180
+ if prev_output_tokens.eq(self.padding_idx).any():
181
+ self_attn_padding_mask = prev_output_tokens.eq(self.padding_idx)
182
+
183
+ # decoder layers
184
+ attn: Optional[Tensor] = None
185
+ inner_states: List[Optional[Tensor]] = [x]
186
+ for idx, layer in enumerate(self.layers):
187
+ if incremental_state is None:
188
+ self_attn_mask = self.buffered_future_mask(x)
189
+ else:
190
+ self_attn_mask = None
191
+
192
+ x, layer_attn, _ = layer(
193
+ x,
194
+ enc,
195
+ padding_mask,
196
+ incremental_state,
197
+ self_attn_mask=self_attn_mask,
198
+ self_attn_padding_mask=self_attn_padding_mask,
199
+ need_attn=False,
200
+ need_head_weights=False,
201
+ )
202
+ inner_states.append(x)
203
+
204
+ if self.layer_norm is not None:
205
+ x = self.layer_norm(x)
206
+
207
+ # T x B x C -> B x C x T
208
+ x = x.transpose(0, 1)
209
+
210
+ return x, {"inner_states": inner_states}
211
+
212
+ def output_layer(self, features):
213
+ """Project features to the vocabulary size."""
214
+ return self.output_projection(features)
215
+
216
+ def buffered_future_mask(self, tensor):
217
+ dim = tensor.size(0)
218
+ # self._future_mask.device != tensor.device is not working in TorchScript. This is a workaround.
219
+ if (
220
+ self._future_mask.size(0) == 0
221
+ or (not self._future_mask.device == tensor.device)
222
+ or self._future_mask.size(0) < dim
223
+ ):
224
+ self._future_mask = torch.triu(
225
+ fill_with_neg_inf(torch.zeros([dim, dim])), 1
226
+ )
227
+ self._future_mask = self._future_mask.to(tensor)
228
+ return self._future_mask[:dim, :dim]
esm/esm/inverse_folding/transformer_layer.py ADDED
@@ -0,0 +1,304 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # Contents of this file were adapted from the open source fairseq repository.
4
+ #
5
+ # This source code is licensed under the MIT license found in the
6
+ # LICENSE file in the root directory of this source tree.
7
+
8
+ from typing import Dict, List, Optional
9
+
10
+ import torch
11
+ import torch.nn as nn
12
+ import torch.nn.functional as F
13
+ from esm.multihead_attention import MultiheadAttention
14
+ from torch import Tensor
15
+
16
+
17
+ class TransformerEncoderLayer(nn.Module):
18
+ """Encoder layer block.
19
+ `layernorm -> dropout -> add residual`
20
+
21
+ Args:
22
+ args (argparse.Namespace): parsed command-line arguments
23
+ """
24
+
25
+ def __init__(self, args):
26
+ super().__init__()
27
+ self.args = args
28
+ self.embed_dim = args.encoder_embed_dim
29
+ self.self_attn = self.build_self_attention(self.embed_dim, args)
30
+ self.self_attn_layer_norm = torch.nn.LayerNorm(self.embed_dim)
31
+ self.dropout_module = nn.Dropout(args.dropout)
32
+ self.activation_fn = F.relu
33
+ self.fc1 = self.build_fc1(
34
+ self.embed_dim,
35
+ args.encoder_ffn_embed_dim,
36
+ )
37
+ self.fc2 = self.build_fc2(
38
+ args.encoder_ffn_embed_dim,
39
+ self.embed_dim,
40
+ )
41
+
42
+ self.final_layer_norm = nn.LayerNorm(self.embed_dim)
43
+
44
+ def build_fc1(self, input_dim, output_dim):
45
+ return nn.Linear(input_dim, output_dim)
46
+
47
+ def build_fc2(self, input_dim, output_dim):
48
+ return nn.Linear(input_dim, output_dim)
49
+
50
+ def build_self_attention(self, embed_dim, args):
51
+ return MultiheadAttention(
52
+ embed_dim,
53
+ args.encoder_attention_heads,
54
+ dropout=args.attention_dropout,
55
+ self_attention=True,
56
+ )
57
+
58
+ def residual_connection(self, x, residual):
59
+ return residual + x
60
+
61
+ def forward(
62
+ self,
63
+ x,
64
+ encoder_padding_mask: Optional[Tensor],
65
+ attn_mask: Optional[Tensor] = None,
66
+ ):
67
+ """
68
+ Args:
69
+ x (Tensor): input to the layer of shape `(seq_len, batch, embed_dim)`
70
+ encoder_padding_mask (ByteTensor): binary ByteTensor of shape
71
+ `(batch, seq_len)` where padding elements are indicated by ``1``.
72
+ attn_mask (ByteTensor): binary tensor of shape `(tgt_len, src_len)`,
73
+ where `tgt_len` is the length of output and `src_len` is the
74
+ length of input, though here both are equal to `seq_len`.
75
+ `attn_mask[tgt_i, src_j] = 1` means that when calculating the
76
+ embedding for `tgt_i`, we exclude (mask out) `src_j`. This is
77
+ useful for strided self-attention.
78
+
79
+ Returns:
80
+ encoded output of shape `(seq_len, batch, embed_dim)`
81
+ """
82
+ # anything in original attn_mask = 1, becomes -1e8
83
+ # anything in original attn_mask = 0, becomes 0
84
+ # Note that we cannot use -inf here, because at some edge cases,
85
+ # the attention weight (before softmax) for some padded element in query
86
+ # will become -inf, which results in NaN in model parameters
87
+ if attn_mask is not None:
88
+ attn_mask = attn_mask.masked_fill(
89
+ attn_mask.to(torch.bool), -1e8 if x.dtype == torch.float32 else -1e4
90
+ )
91
+
92
+ residual = x
93
+ x = self.self_attn_layer_norm(x)
94
+ x, _ = self.self_attn(
95
+ query=x,
96
+ key=x,
97
+ value=x,
98
+ key_padding_mask=encoder_padding_mask,
99
+ need_weights=False,
100
+ attn_mask=attn_mask,
101
+ )
102
+ x = self.dropout_module(x)
103
+ x = self.residual_connection(x, residual)
104
+
105
+ residual = x
106
+ x = self.final_layer_norm(x)
107
+ x = self.activation_fn(self.fc1(x))
108
+ x = self.fc2(x)
109
+ x = self.dropout_module(x)
110
+ x = self.residual_connection(x, residual)
111
+ return x
112
+
113
+
114
+ class TransformerDecoderLayer(nn.Module):
115
+ """Decoder layer block.
116
+ `layernorm -> dropout -> add residual`
117
+
118
+ Args:
119
+ args (argparse.Namespace): parsed command-line arguments
120
+ no_encoder_attn (bool, optional): whether to attend to encoder outputs
121
+ (default: False).
122
+ """
123
+
124
+ def __init__(
125
+ self, args, no_encoder_attn=False, add_bias_kv=False, add_zero_attn=False
126
+ ):
127
+ super().__init__()
128
+ self.embed_dim = args.decoder_embed_dim
129
+ self.dropout_module = nn.Dropout(args.dropout)
130
+
131
+ self.self_attn = self.build_self_attention(
132
+ self.embed_dim,
133
+ args,
134
+ add_bias_kv=add_bias_kv,
135
+ add_zero_attn=add_zero_attn,
136
+ )
137
+ self.nh = self.self_attn.num_heads
138
+ self.head_dim = self.self_attn.head_dim
139
+
140
+ self.activation_fn = F.relu
141
+
142
+ self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim)
143
+
144
+ if no_encoder_attn:
145
+ self.encoder_attn = None
146
+ self.encoder_attn_layer_norm = None
147
+ else:
148
+ self.encoder_attn = self.build_encoder_attention(self.embed_dim, args)
149
+ self.encoder_attn_layer_norm = nn.LayerNorm(self.embed_dim)
150
+
151
+ self.ffn_layernorm = (
152
+ LayerNorm(args.decoder_ffn_embed_dim)
153
+ if getattr(args, "scale_fc", False)
154
+ else None
155
+ )
156
+ self.w_resid = (
157
+ nn.Parameter(
158
+ torch.ones(
159
+ self.embed_dim,
160
+ ),
161
+ requires_grad=True,
162
+ )
163
+ if getattr(args, "scale_resids", False)
164
+ else None
165
+ )
166
+
167
+ self.fc1 = self.build_fc1(
168
+ self.embed_dim,
169
+ args.decoder_ffn_embed_dim,
170
+ )
171
+ self.fc2 = self.build_fc2(
172
+ args.decoder_ffn_embed_dim,
173
+ self.embed_dim,
174
+ )
175
+
176
+ self.final_layer_norm = nn.LayerNorm(self.embed_dim)
177
+ self.need_attn = True
178
+
179
+ def build_fc1(self, input_dim, output_dim):
180
+ return nn.Linear(input_dim, output_dim)
181
+
182
+ def build_fc2(self, input_dim, output_dim):
183
+ return nn.Linear(input_dim, output_dim)
184
+
185
+ def build_self_attention(
186
+ self, embed_dim, args, add_bias_kv=False, add_zero_attn=False
187
+ ):
188
+ return MultiheadAttention(
189
+ embed_dim,
190
+ args.decoder_attention_heads,
191
+ dropout=args.attention_dropout,
192
+ add_bias_kv=add_bias_kv,
193
+ add_zero_attn=add_zero_attn,
194
+ self_attention=True,
195
+ )
196
+
197
+ def build_encoder_attention(self, embed_dim, args):
198
+ return MultiheadAttention(
199
+ embed_dim,
200
+ args.decoder_attention_heads,
201
+ kdim=args.encoder_embed_dim,
202
+ vdim=args.encoder_embed_dim,
203
+ dropout=args.attention_dropout,
204
+ encoder_decoder_attention=True,
205
+ )
206
+
207
+ def residual_connection(self, x, residual):
208
+ return residual + x
209
+
210
+ def forward(
211
+ self,
212
+ x,
213
+ encoder_out: Optional[torch.Tensor] = None,
214
+ encoder_padding_mask: Optional[torch.Tensor] = None,
215
+ incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None,
216
+ prev_self_attn_state: Optional[List[torch.Tensor]] = None,
217
+ prev_attn_state: Optional[List[torch.Tensor]] = None,
218
+ self_attn_mask: Optional[torch.Tensor] = None,
219
+ self_attn_padding_mask: Optional[torch.Tensor] = None,
220
+ need_attn: bool = False,
221
+ need_head_weights: bool = False,
222
+ ):
223
+ """
224
+ Args:
225
+ x (Tensor): input to the layer of shape `(seq_len, batch, embed_dim)`
226
+ encoder_padding_mask (ByteTensor, optional): binary
227
+ ByteTensor of shape `(batch, src_len)` where padding
228
+ elements are indicated by ``1``.
229
+ need_attn (bool, optional): return attention weights
230
+ need_head_weights (bool, optional): return attention weights
231
+ for each head (default: return average over heads).
232
+
233
+ Returns:
234
+ encoded output of shape `(seq_len, batch, embed_dim)`
235
+ """
236
+ if need_head_weights:
237
+ need_attn = True
238
+
239
+ residual = x
240
+ x = self.self_attn_layer_norm(x)
241
+ if prev_self_attn_state is not None:
242
+ prev_key, prev_value = prev_self_attn_state[:2]
243
+ saved_state: Dict[str, Optional[Tensor]] = {
244
+ "prev_key": prev_key,
245
+ "prev_value": prev_value,
246
+ }
247
+ if len(prev_self_attn_state) >= 3:
248
+ saved_state["prev_key_padding_mask"] = prev_self_attn_state[2]
249
+ assert incremental_state is not None
250
+ self.self_attn._set_input_buffer(incremental_state, saved_state)
251
+ _self_attn_input_buffer = self.self_attn._get_input_buffer(incremental_state)
252
+ y = x
253
+
254
+ x, attn = self.self_attn(
255
+ query=x,
256
+ key=y,
257
+ value=y,
258
+ key_padding_mask=self_attn_padding_mask,
259
+ incremental_state=incremental_state,
260
+ need_weights=False,
261
+ attn_mask=self_attn_mask,
262
+ )
263
+ x = self.dropout_module(x)
264
+ x = self.residual_connection(x, residual)
265
+
266
+ if self.encoder_attn is not None and encoder_out is not None:
267
+ residual = x
268
+ x = self.encoder_attn_layer_norm(x)
269
+ if prev_attn_state is not None:
270
+ prev_key, prev_value = prev_attn_state[:2]
271
+ saved_state: Dict[str, Optional[Tensor]] = {
272
+ "prev_key": prev_key,
273
+ "prev_value": prev_value,
274
+ }
275
+ if len(prev_attn_state) >= 3:
276
+ saved_state["prev_key_padding_mask"] = prev_attn_state[2]
277
+ assert incremental_state is not None
278
+ self.encoder_attn._set_input_buffer(incremental_state, saved_state)
279
+
280
+ x, attn = self.encoder_attn(
281
+ query=x,
282
+ key=encoder_out,
283
+ value=encoder_out,
284
+ key_padding_mask=encoder_padding_mask,
285
+ incremental_state=incremental_state,
286
+ static_kv=True,
287
+ need_weights=need_attn or (not self.training and self.need_attn),
288
+ need_head_weights=need_head_weights,
289
+ )
290
+ x = self.dropout_module(x)
291
+ x = self.residual_connection(x, residual)
292
+
293
+ residual = x
294
+ x = self.final_layer_norm(x)
295
+
296
+ x = self.activation_fn(self.fc1(x))
297
+ if self.ffn_layernorm is not None:
298
+ x = self.ffn_layernorm(x)
299
+ x = self.fc2(x)
300
+ x = self.dropout_module(x)
301
+ if self.w_resid is not None:
302
+ residual = torch.mul(self.w_resid, residual)
303
+ x = self.residual_connection(x, residual)
304
+ return x, attn, None
esm/esm/inverse_folding/util.py ADDED
@@ -0,0 +1,320 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ import json
7
+ import math
8
+
9
+ import biotite.structure
10
+ from biotite.structure.io import pdbx, pdb
11
+ from biotite.structure.residues import get_residues
12
+ from biotite.structure import filter_backbone
13
+ from biotite.structure import get_chains
14
+ from biotite.sequence import ProteinSequence
15
+ import numpy as np
16
+ from scipy.spatial import transform
17
+ from scipy.stats import special_ortho_group
18
+ import torch
19
+ import torch.nn as nn
20
+ import torch.nn.functional as F
21
+ import torch.utils.data as data
22
+ from typing import Sequence, Tuple, List
23
+
24
+ from esm.data import BatchConverter
25
+
26
+
27
+ def load_structure(fpath, chain=None):
28
+ """
29
+ Args:
30
+ fpath: filepath to either pdb or cif file
31
+ chain: the chain id or list of chain ids to load
32
+ Returns:
33
+ biotite.structure.AtomArray
34
+ """
35
+ if fpath.endswith('cif'):
36
+ with open(fpath) as fin:
37
+ pdbxf = pdbx.PDBxFile.read(fin)
38
+ structure = pdbx.get_structure(pdbxf, model=1)
39
+ elif fpath.endswith('pdb'):
40
+ with open(fpath) as fin:
41
+ pdbf = pdb.PDBFile.read(fin)
42
+ structure = pdb.get_structure(pdbf, model=1)
43
+ bbmask = filter_backbone(structure)
44
+ structure = structure[bbmask]
45
+ all_chains = get_chains(structure)
46
+ if len(all_chains) == 0:
47
+ raise ValueError('No chains found in the input file.')
48
+ if chain is None:
49
+ chain_ids = all_chains
50
+ elif isinstance(chain, list):
51
+ chain_ids = chain
52
+ else:
53
+ chain_ids = [chain]
54
+ for chain in chain_ids:
55
+ if chain not in all_chains:
56
+ raise ValueError(f'Chain {chain} not found in input file')
57
+ chain_filter = [a.chain_id in chain_ids for a in structure]
58
+ structure = structure[chain_filter]
59
+ return structure
60
+
61
+
62
+ def extract_coords_from_structure(structure: biotite.structure.AtomArray):
63
+ """
64
+ Args:
65
+ structure: An instance of biotite AtomArray
66
+ Returns:
67
+ Tuple (coords, seq)
68
+ - coords is an L x 3 x 3 array for N, CA, C coordinates
69
+ - seq is the extracted sequence
70
+ """
71
+ coords = get_atom_coords_residuewise(["N", "CA", "C"], structure)
72
+ residue_identities = get_residues(structure)[1]
73
+ seq = ''.join([ProteinSequence.convert_letter_3to1(r) for r in residue_identities])
74
+ return coords, seq
75
+
76
+
77
+ def load_coords(fpath, chain):
78
+ """
79
+ Args:
80
+ fpath: filepath to either pdb or cif file
81
+ chain: the chain id
82
+ Returns:
83
+ Tuple (coords, seq)
84
+ - coords is an L x 3 x 3 array for N, CA, C coordinates
85
+ - seq is the extracted sequence
86
+ """
87
+ structure = load_structure(fpath, chain)
88
+ return extract_coords_from_structure(structure)
89
+
90
+
91
+ def get_atom_coords_residuewise(atoms: List[str], struct: biotite.structure.AtomArray):
92
+ """
93
+ Example for atoms argument: ["N", "CA", "C"]
94
+ """
95
+ def filterfn(s, axis=None):
96
+ filters = np.stack([s.atom_name == name for name in atoms], axis=1)
97
+ sum = filters.sum(0)
98
+ if not np.all(sum <= np.ones(filters.shape[1])):
99
+ raise RuntimeError("structure has multiple atoms with same name")
100
+ index = filters.argmax(0)
101
+ coords = s[index].coord
102
+ coords[sum == 0] = float("nan")
103
+ return coords
104
+
105
+ return biotite.structure.apply_residue_wise(struct, struct, filterfn)
106
+
107
+
108
+ def get_sequence_loss(model, alphabet, coords, seq):
109
+ batch_converter = CoordBatchConverter(alphabet)
110
+ batch = [(coords, None, seq)]
111
+ coords, confidence, strs, tokens, padding_mask = batch_converter(batch)
112
+
113
+ prev_output_tokens = tokens[:, :-1]
114
+ target = tokens[:, 1:]
115
+ target_padding_mask = (target == alphabet.padding_idx)
116
+ logits, _ = model.forward(coords, padding_mask, confidence, prev_output_tokens)
117
+ loss = F.cross_entropy(logits, target, reduction='none')
118
+ loss = loss[0].detach().numpy()
119
+ target_padding_mask = target_padding_mask[0].numpy()
120
+ return loss, target_padding_mask
121
+
122
+
123
+ def score_sequence(model, alphabet, coords, seq):
124
+ loss, target_padding_mask = get_sequence_loss(model, alphabet, coords, seq)
125
+ ll_fullseq = -np.sum(loss * ~target_padding_mask) / np.sum(~target_padding_mask)
126
+ # Also calculate average when excluding masked portions
127
+ coord_mask = np.all(np.isfinite(coords), axis=(-1, -2))
128
+ ll_withcoord = -np.sum(loss * coord_mask) / np.sum(coord_mask)
129
+ return ll_fullseq, ll_withcoord
130
+
131
+
132
+ def get_encoder_output(model, alphabet, coords):
133
+ batch_converter = CoordBatchConverter(alphabet)
134
+ # the batch_converter is essential for forming the correct input format
135
+ batch = [(coords, None, None)]
136
+ coords, confidence, _, _, padding_mask = batch_converter(batch)
137
+ encoder_out = model.encoder.forward(coords, padding_mask, confidence,
138
+ return_all_hiddens=False)
139
+ # remove beginning and end (bos and eos tokens)
140
+ return encoder_out['encoder_out'][0][1:-1, 0]
141
+
142
+
143
+ def rotate(v, R):
144
+ """
145
+ Rotates a vector by a rotation matrix.
146
+
147
+ Args:
148
+ v: 3D vector, tensor of shape (length x batch_size x channels x 3)
149
+ R: rotation matrix, tensor of shape (length x batch_size x 3 x 3)
150
+
151
+ Returns:
152
+ Rotated version of v by rotation matrix R.
153
+ """
154
+ R = R.unsqueeze(-3)
155
+ v = v.unsqueeze(-1)
156
+ return torch.sum(v * R, dim=-2)
157
+
158
+
159
+ def get_rotation_frames(coords):
160
+ """
161
+ Returns a local rotation frame defined by N, CA, C positions.
162
+
163
+ Args:
164
+ coords: coordinates, tensor of shape (batch_size x length x 3 x 3)
165
+ where the third dimension is in order of N, CA, C
166
+
167
+ Returns:
168
+ Local relative rotation frames in shape (batch_size x length x 3 x 3)
169
+ """
170
+ v1 = coords[:, :, 2] - coords[:, :, 1]
171
+ v2 = coords[:, :, 0] - coords[:, :, 1]
172
+ e1 = normalize(v1, dim=-1)
173
+ u2 = v2 - e1 * torch.sum(e1 * v2, dim=-1, keepdim=True)
174
+ e2 = normalize(u2, dim=-1)
175
+ e3 = torch.cross(e1, e2, dim=-1)
176
+ R = torch.stack([e1, e2, e3], dim=-2)
177
+ return R
178
+
179
+
180
+ def nan_to_num(ts, val=0.0):
181
+ """
182
+ Replaces nans in tensor with a fixed value.
183
+ """
184
+ val = torch.tensor(val, dtype=ts.dtype, device=ts.device)
185
+ return torch.where(~torch.isfinite(ts), val, ts)
186
+
187
+
188
+ def rbf(values, v_min, v_max, n_bins=16):
189
+ """
190
+ Returns RBF encodings in a new dimension at the end.
191
+ """
192
+ rbf_centers = torch.linspace(v_min, v_max, n_bins, device=values.device)
193
+ rbf_centers = rbf_centers.view([1] * len(values.shape) + [-1])
194
+ rbf_std = (v_max - v_min) / n_bins
195
+ v_expand = torch.unsqueeze(values, -1)
196
+ z = (values.unsqueeze(-1) - rbf_centers) / rbf_std
197
+ return torch.exp(-z ** 2)
198
+
199
+
200
+ def norm(tensor, dim, eps=1e-8, keepdim=False):
201
+ """
202
+ Returns L2 norm along a dimension.
203
+ """
204
+ return torch.sqrt(
205
+ torch.sum(torch.square(tensor), dim=dim, keepdim=keepdim) + eps)
206
+
207
+
208
+ def normalize(tensor, dim=-1):
209
+ """
210
+ Normalizes a tensor along a dimension after removing nans.
211
+ """
212
+ return nan_to_num(
213
+ torch.div(tensor, norm(tensor, dim=dim, keepdim=True))
214
+ )
215
+
216
+
217
+ class CoordBatchConverter(BatchConverter):
218
+ def __call__(self, raw_batch: Sequence[Tuple[Sequence, str]], device=None):
219
+ """
220
+ Args:
221
+ raw_batch: List of tuples (coords, confidence, seq)
222
+ In each tuple,
223
+ coords: list of floats, shape L x 3 x 3
224
+ confidence: list of floats, shape L; or scalar float; or None
225
+ seq: string of length L
226
+ Returns:
227
+ coords: Tensor of shape batch_size x L x 3 x 3
228
+ confidence: Tensor of shape batch_size x L
229
+ strs: list of strings
230
+ tokens: LongTensor of shape batch_size x L
231
+ padding_mask: ByteTensor of shape batch_size x L
232
+ """
233
+ self.alphabet.cls_idx = self.alphabet.get_idx("<cath>")
234
+ batch = []
235
+ for coords, confidence, seq in raw_batch:
236
+ if confidence is None:
237
+ confidence = 1.
238
+ if isinstance(confidence, float) or isinstance(confidence, int):
239
+ confidence = [float(confidence)] * len(coords)
240
+ if seq is None:
241
+ seq = 'X' * len(coords)
242
+ batch.append(((coords, confidence), seq))
243
+
244
+ coords_and_confidence, strs, tokens = super().__call__(batch)
245
+
246
+ # pad beginning and end of each protein due to legacy reasons
247
+ coords = [
248
+ F.pad(torch.tensor(cd), (0, 0, 0, 0, 1, 1), value=np.inf)
249
+ for cd, _ in coords_and_confidence
250
+ ]
251
+ confidence = [
252
+ F.pad(torch.tensor(cf), (1, 1), value=-1.)
253
+ for _, cf in coords_and_confidence
254
+ ]
255
+ coords = self.collate_dense_tensors(coords, pad_v=np.nan)
256
+ confidence = self.collate_dense_tensors(confidence, pad_v=-1.)
257
+ if device is not None:
258
+ coords = coords.to(device)
259
+ confidence = confidence.to(device)
260
+ tokens = tokens.to(device)
261
+ padding_mask = torch.isnan(coords[:,:,0,0])
262
+ coord_mask = torch.isfinite(coords.sum(-2).sum(-1))
263
+ confidence = confidence * coord_mask + (-1.) * padding_mask
264
+ return coords, confidence, strs, tokens, padding_mask
265
+
266
+ def from_lists(self, coords_list, confidence_list=None, seq_list=None, device=None):
267
+ """
268
+ Args:
269
+ coords_list: list of length batch_size, each item is a list of
270
+ floats in shape L x 3 x 3 to describe a backbone
271
+ confidence_list: one of
272
+ - None, default to highest confidence
273
+ - list of length batch_size, each item is a scalar
274
+ - list of length batch_size, each item is a list of floats of
275
+ length L to describe the confidence scores for the backbone
276
+ with values between 0. and 1.
277
+ seq_list: either None or a list of strings
278
+ Returns:
279
+ coords: Tensor of shape batch_size x L x 3 x 3
280
+ confidence: Tensor of shape batch_size x L
281
+ strs: list of strings
282
+ tokens: LongTensor of shape batch_size x L
283
+ padding_mask: ByteTensor of shape batch_size x L
284
+ """
285
+ batch_size = len(coords_list)
286
+ if confidence_list is None:
287
+ confidence_list = [None] * batch_size
288
+ if seq_list is None:
289
+ seq_list = [None] * batch_size
290
+ raw_batch = zip(coords_list, confidence_list, seq_list)
291
+ return self.__call__(raw_batch, device)
292
+
293
+ @staticmethod
294
+ def collate_dense_tensors(samples, pad_v):
295
+ """
296
+ Takes a list of tensors with the following dimensions:
297
+ [(d_11, ..., d_1K),
298
+ (d_21, ..., d_2K),
299
+ ...,
300
+ (d_N1, ..., d_NK)]
301
+ and stack + pads them into a single tensor of:
302
+ (N, max_i=1,N { d_i1 }, ..., max_i=1,N {diK})
303
+ """
304
+ if len(samples) == 0:
305
+ return torch.Tensor()
306
+ if len(set(x.dim() for x in samples)) != 1:
307
+ raise RuntimeError(
308
+ f"Samples has varying dimensions: {[x.dim() for x in samples]}"
309
+ )
310
+ (device,) = tuple(set(x.device for x in samples)) # assumes all on same device
311
+ max_shape = [max(lst) for lst in zip(*[x.shape for x in samples])]
312
+ result = torch.empty(
313
+ len(samples), *max_shape, dtype=samples[0].dtype, device=device
314
+ )
315
+ result.fill_(pad_v)
316
+ for i in range(len(samples)):
317
+ result_i = result[i]
318
+ t = samples[i]
319
+ result_i[tuple(slice(0, k) for k in t.shape)] = t
320
+ return result
esm/esm/model/esm1.py ADDED
@@ -0,0 +1,200 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ import math
7
+
8
+ import torch
9
+ import torch.nn as nn
10
+ import torch.nn.functional as F
11
+
12
+ from ..modules import (
13
+ TransformerLayer,
14
+ LearnedPositionalEmbedding,
15
+ SinusoidalPositionalEmbedding,
16
+ RobertaLMHead,
17
+ ESM1bLayerNorm,
18
+ ContactPredictionHead,
19
+ )
20
+
21
+
22
+ class ProteinBertModel(nn.Module):
23
+ @classmethod
24
+ def add_args(cls, parser):
25
+ parser.add_argument(
26
+ "--num_layers", default=36, type=int, metavar="N", help="number of layers"
27
+ )
28
+ parser.add_argument(
29
+ "--embed_dim", default=1280, type=int, metavar="N", help="embedding dimension"
30
+ )
31
+ parser.add_argument(
32
+ "--logit_bias", action="store_true", help="whether to apply bias to logits"
33
+ )
34
+ parser.add_argument(
35
+ "--ffn_embed_dim",
36
+ default=5120,
37
+ type=int,
38
+ metavar="N",
39
+ help="embedding dimension for FFN",
40
+ )
41
+ parser.add_argument(
42
+ "--attention_heads",
43
+ default=20,
44
+ type=int,
45
+ metavar="N",
46
+ help="number of attention heads",
47
+ )
48
+
49
+ def __init__(self, args, alphabet):
50
+ super().__init__()
51
+ self.args = args
52
+ self.alphabet_size = len(alphabet)
53
+ self.padding_idx = alphabet.padding_idx
54
+ self.mask_idx = alphabet.mask_idx
55
+ self.cls_idx = alphabet.cls_idx
56
+ self.eos_idx = alphabet.eos_idx
57
+ self.prepend_bos = alphabet.prepend_bos
58
+ self.append_eos = alphabet.append_eos
59
+ self.emb_layer_norm_before = getattr(self.args, "emb_layer_norm_before", False)
60
+ if self.args.arch == "roberta_large":
61
+ self.model_version = "ESM-1b"
62
+ self._init_submodules_esm1b()
63
+ else:
64
+ self.model_version = "ESM-1"
65
+ self._init_submodules_esm1()
66
+
67
+ def _init_submodules_common(self):
68
+ self.embed_tokens = nn.Embedding(
69
+ self.alphabet_size, self.args.embed_dim, padding_idx=self.padding_idx
70
+ )
71
+ self.layers = nn.ModuleList(
72
+ [
73
+ TransformerLayer(
74
+ self.args.embed_dim,
75
+ self.args.ffn_embed_dim,
76
+ self.args.attention_heads,
77
+ add_bias_kv=(self.model_version != "ESM-1b"),
78
+ use_esm1b_layer_norm=(self.model_version == "ESM-1b"),
79
+ )
80
+ for _ in range(self.args.layers)
81
+ ]
82
+ )
83
+
84
+ self.contact_head = ContactPredictionHead(
85
+ self.args.layers * self.args.attention_heads,
86
+ self.prepend_bos,
87
+ self.append_eos,
88
+ eos_idx=self.eos_idx,
89
+ )
90
+
91
+ def _init_submodules_esm1b(self):
92
+ self._init_submodules_common()
93
+ self.embed_scale = 1
94
+ self.embed_positions = LearnedPositionalEmbedding(
95
+ self.args.max_positions, self.args.embed_dim, self.padding_idx
96
+ )
97
+ self.emb_layer_norm_before = (
98
+ ESM1bLayerNorm(self.args.embed_dim) if self.emb_layer_norm_before else None
99
+ )
100
+ self.emb_layer_norm_after = ESM1bLayerNorm(self.args.embed_dim)
101
+ self.lm_head = RobertaLMHead(
102
+ embed_dim=self.args.embed_dim,
103
+ output_dim=self.alphabet_size,
104
+ weight=self.embed_tokens.weight,
105
+ )
106
+
107
+ def _init_submodules_esm1(self):
108
+ self._init_submodules_common()
109
+ self.embed_scale = math.sqrt(self.args.embed_dim)
110
+ self.embed_positions = SinusoidalPositionalEmbedding(self.args.embed_dim, self.padding_idx)
111
+ self.embed_out = nn.Parameter(torch.zeros((self.alphabet_size, self.args.embed_dim)))
112
+ self.embed_out_bias = None
113
+ if self.args.final_bias:
114
+ self.embed_out_bias = nn.Parameter(torch.zeros(self.alphabet_size))
115
+
116
+ def forward(self, tokens, repr_layers=[], need_head_weights=False, return_contacts=False):
117
+ if return_contacts:
118
+ need_head_weights = True
119
+
120
+ assert tokens.ndim == 2
121
+ padding_mask = tokens.eq(self.padding_idx) # B, T
122
+
123
+ x = self.embed_scale * self.embed_tokens(tokens)
124
+
125
+ if getattr(self.args, "token_dropout", False):
126
+ x.masked_fill_((tokens == self.mask_idx).unsqueeze(-1), 0.0)
127
+ # x: B x T x C
128
+ mask_ratio_train = 0.15 * 0.8
129
+ src_lengths = (~padding_mask).sum(-1)
130
+ mask_ratio_observed = (tokens == self.mask_idx).sum(-1).float() / src_lengths
131
+ x = x * (1 - mask_ratio_train) / (1 - mask_ratio_observed)[:, None, None]
132
+
133
+ x = x + self.embed_positions(tokens)
134
+
135
+ if self.model_version == "ESM-1b":
136
+ if self.emb_layer_norm_before:
137
+ x = self.emb_layer_norm_before(x)
138
+ if padding_mask is not None:
139
+ x = x * (1 - padding_mask.unsqueeze(-1).type_as(x))
140
+
141
+ repr_layers = set(repr_layers)
142
+ hidden_representations = {}
143
+ if 0 in repr_layers:
144
+ hidden_representations[0] = x
145
+
146
+ if need_head_weights:
147
+ attn_weights = []
148
+
149
+ # (B, T, E) => (T, B, E)
150
+ x = x.transpose(0, 1)
151
+
152
+ if not padding_mask.any():
153
+ padding_mask = None
154
+
155
+ for layer_idx, layer in enumerate(self.layers):
156
+ x, attn = layer(
157
+ x, self_attn_padding_mask=padding_mask, need_head_weights=need_head_weights
158
+ )
159
+ if (layer_idx + 1) in repr_layers:
160
+ hidden_representations[layer_idx + 1] = x.transpose(0, 1)
161
+ if need_head_weights:
162
+ # (H, B, T, T) => (B, H, T, T)
163
+ attn_weights.append(attn.transpose(1, 0))
164
+
165
+ if self.model_version == "ESM-1b":
166
+ x = self.emb_layer_norm_after(x)
167
+ x = x.transpose(0, 1) # (T, B, E) => (B, T, E)
168
+
169
+ # last hidden representation should have layer norm applied
170
+ if (layer_idx + 1) in repr_layers:
171
+ hidden_representations[layer_idx + 1] = x
172
+ x = self.lm_head(x)
173
+ else:
174
+ x = F.linear(x, self.embed_out, bias=self.embed_out_bias)
175
+ x = x.transpose(0, 1) # (T, B, E) => (B, T, E)
176
+
177
+ result = {"logits": x, "representations": hidden_representations}
178
+ if need_head_weights:
179
+ # attentions: B x L x H x T x T
180
+ attentions = torch.stack(attn_weights, 1)
181
+ if self.model_version == "ESM-1":
182
+ # ESM-1 models have an additional null-token for attention, which we remove
183
+ attentions = attentions[..., :-1]
184
+ if padding_mask is not None:
185
+ attention_mask = 1 - padding_mask.type_as(attentions)
186
+ attention_mask = attention_mask.unsqueeze(1) * attention_mask.unsqueeze(2)
187
+ attentions = attentions * attention_mask[:, None, None, :, :]
188
+ result["attentions"] = attentions
189
+ if return_contacts:
190
+ contacts = self.contact_head(tokens, attentions)
191
+ result["contacts"] = contacts
192
+
193
+ return result
194
+
195
+ def predict_contacts(self, tokens):
196
+ return self(tokens, return_contacts=True)["contacts"]
197
+
198
+ @property
199
+ def num_layers(self):
200
+ return self.args.layers
esm/esm/model/esm2.py ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ from typing import Union
7
+ import torch
8
+ import torch.nn as nn
9
+
10
+ import esm
11
+ from esm.modules import ContactPredictionHead, ESM1bLayerNorm, RobertaLMHead, TransformerLayer
12
+
13
+
14
+ class ESM2(nn.Module):
15
+ def __init__(
16
+ self,
17
+ num_layers: int = 33,
18
+ embed_dim: int = 1280,
19
+ attention_heads: int = 20,
20
+ alphabet: Union[esm.data.Alphabet, str] = "ESM-1b",
21
+ token_dropout: bool = True,
22
+ ):
23
+ super().__init__()
24
+ self.num_layers = num_layers
25
+ self.embed_dim = embed_dim
26
+ self.attention_heads = attention_heads
27
+ if not isinstance(alphabet, esm.data.Alphabet):
28
+ alphabet = esm.data.Alphabet.from_architecture(alphabet)
29
+ self.alphabet = alphabet
30
+ self.alphabet_size = len(alphabet)
31
+ self.padding_idx = alphabet.padding_idx
32
+ self.mask_idx = alphabet.mask_idx
33
+ self.cls_idx = alphabet.cls_idx
34
+ self.eos_idx = alphabet.eos_idx
35
+ self.prepend_bos = alphabet.prepend_bos
36
+ self.append_eos = alphabet.append_eos
37
+ self.token_dropout = token_dropout
38
+
39
+ self._init_submodules()
40
+
41
+ def _init_submodules(self):
42
+ self.embed_scale = 1
43
+ self.embed_tokens = nn.Embedding(
44
+ self.alphabet_size,
45
+ self.embed_dim,
46
+ padding_idx=self.padding_idx,
47
+ )
48
+
49
+ self.layers = nn.ModuleList(
50
+ [
51
+ TransformerLayer(
52
+ self.embed_dim,
53
+ 4 * self.embed_dim,
54
+ self.attention_heads,
55
+ add_bias_kv=False,
56
+ use_esm1b_layer_norm=True,
57
+ use_rotary_embeddings=True,
58
+ )
59
+ for _ in range(self.num_layers)
60
+ ]
61
+ )
62
+
63
+ self.contact_head = ContactPredictionHead(
64
+ self.num_layers * self.attention_heads,
65
+ self.prepend_bos,
66
+ self.append_eos,
67
+ eos_idx=self.eos_idx,
68
+ )
69
+ self.emb_layer_norm_after = ESM1bLayerNorm(self.embed_dim)
70
+
71
+ self.lm_head = RobertaLMHead(
72
+ embed_dim=self.embed_dim,
73
+ output_dim=self.alphabet_size,
74
+ weight=self.embed_tokens.weight,
75
+ )
76
+
77
+ def forward(self, tokens, repr_layers=[], need_head_weights=False, return_contacts=False):
78
+ if return_contacts:
79
+ need_head_weights = True
80
+
81
+ assert tokens.ndim == 2
82
+ padding_mask = tokens.eq(self.padding_idx) # B, T
83
+
84
+ x = self.embed_scale * self.embed_tokens(tokens)
85
+
86
+ if self.token_dropout:
87
+ x.masked_fill_((tokens == self.mask_idx).unsqueeze(-1), 0.0)
88
+ # x: B x T x C
89
+ mask_ratio_train = 0.15 * 0.8
90
+ src_lengths = (~padding_mask).sum(-1)
91
+ mask_ratio_observed = (tokens == self.mask_idx).sum(-1).to(x.dtype) / src_lengths
92
+ x = x * (1 - mask_ratio_train) / (1 - mask_ratio_observed)[:, None, None]
93
+
94
+ if padding_mask is not None:
95
+ x = x * (1 - padding_mask.unsqueeze(-1).type_as(x))
96
+
97
+ repr_layers = set(repr_layers)
98
+ hidden_representations = {}
99
+ if 0 in repr_layers:
100
+ hidden_representations[0] = x
101
+
102
+ if need_head_weights:
103
+ attn_weights = []
104
+
105
+ # (B, T, E) => (T, B, E)
106
+ x = x.transpose(0, 1)
107
+
108
+ if not padding_mask.any():
109
+ padding_mask = None
110
+
111
+ for layer_idx, layer in enumerate(self.layers):
112
+ x, attn = layer(
113
+ x,
114
+ self_attn_padding_mask=padding_mask,
115
+ need_head_weights=need_head_weights,
116
+ )
117
+ if (layer_idx + 1) in repr_layers:
118
+ hidden_representations[layer_idx + 1] = x.transpose(0, 1)
119
+ if need_head_weights:
120
+ # (H, B, T, T) => (B, H, T, T)
121
+ attn_weights.append(attn.transpose(1, 0))
122
+
123
+ x = self.emb_layer_norm_after(x)
124
+ x = x.transpose(0, 1) # (T, B, E) => (B, T, E)
125
+
126
+ # last hidden representation should have layer norm applied
127
+ if (layer_idx + 1) in repr_layers:
128
+ hidden_representations[layer_idx + 1] = x
129
+ x = self.lm_head(x)
130
+
131
+ result = {"logits": x, "representations": hidden_representations}
132
+ if need_head_weights:
133
+ # attentions: B x L x H x T x T
134
+ attentions = torch.stack(attn_weights, 1)
135
+ if padding_mask is not None:
136
+ attention_mask = 1 - padding_mask.type_as(attentions)
137
+ attention_mask = attention_mask.unsqueeze(1) * attention_mask.unsqueeze(2)
138
+ attentions = attentions * attention_mask[:, None, None, :, :]
139
+ result["attentions"] = attentions
140
+ if return_contacts:
141
+ contacts = self.contact_head(tokens, attentions)
142
+ result["contacts"] = contacts
143
+
144
+ return result
145
+
146
+ def predict_contacts(self, tokens):
147
+ return self(tokens, return_contacts=True)["contacts"]
esm/esm/model/msa_transformer.py ADDED
@@ -0,0 +1,238 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ import torch
7
+ import torch.nn as nn
8
+
9
+ from ..modules import (
10
+ AxialTransformerLayer,
11
+ LearnedPositionalEmbedding,
12
+ RobertaLMHead,
13
+ ESM1bLayerNorm,
14
+ ContactPredictionHead,
15
+ )
16
+
17
+ from ..axial_attention import RowSelfAttention, ColumnSelfAttention
18
+
19
+
20
+
21
+ class MSATransformer(nn.Module):
22
+ @classmethod
23
+ def add_args(cls, parser):
24
+ # fmt: off
25
+ parser.add_argument(
26
+ "--num_layers",
27
+ default=12,
28
+ type=int,
29
+ metavar="N",
30
+ help="number of layers"
31
+ )
32
+ parser.add_argument(
33
+ "--embed_dim",
34
+ default=768,
35
+ type=int,
36
+ metavar="N",
37
+ help="embedding dimension"
38
+ )
39
+ parser.add_argument(
40
+ "--logit_bias",
41
+ action="store_true",
42
+ help="whether to apply bias to logits"
43
+ )
44
+ parser.add_argument(
45
+ "--ffn_embed_dim",
46
+ default=3072,
47
+ type=int,
48
+ metavar="N",
49
+ help="embedding dimension for FFN",
50
+ )
51
+ parser.add_argument(
52
+ "--attention_heads",
53
+ default=12,
54
+ type=int,
55
+ metavar="N",
56
+ help="number of attention heads",
57
+ )
58
+ parser.add_argument(
59
+ "--dropout",
60
+ default=0.1,
61
+ type=float,
62
+ help="Dropout to apply."
63
+ )
64
+ parser.add_argument(
65
+ "--attention_dropout",
66
+ default=0.1,
67
+ type=float,
68
+ help="Dropout to apply."
69
+ )
70
+ parser.add_argument(
71
+ "--activation_dropout",
72
+ default=0.1,
73
+ type=float,
74
+ help="Dropout to apply."
75
+ )
76
+ parser.add_argument(
77
+ "--max_tokens_per_msa",
78
+ default=2 ** 14,
79
+ type=int,
80
+ help=(
81
+ "Used during inference to batch attention computations in a single "
82
+ "forward pass. This allows increased input sizes with less memory."
83
+ ),
84
+ )
85
+ # fmt: on
86
+
87
+ def __init__(self, args, alphabet):
88
+ super().__init__()
89
+ self.args = args
90
+ self.alphabet_size = len(alphabet)
91
+ self.padding_idx = alphabet.padding_idx
92
+ self.mask_idx = alphabet.mask_idx
93
+ self.cls_idx = alphabet.cls_idx
94
+ self.eos_idx = alphabet.eos_idx
95
+ self.prepend_bos = alphabet.prepend_bos
96
+ self.append_eos = alphabet.append_eos
97
+
98
+ self.embed_tokens = nn.Embedding(
99
+ self.alphabet_size, self.args.embed_dim, padding_idx=self.padding_idx
100
+ )
101
+
102
+ if getattr(self.args, "embed_positions_msa", False):
103
+ emb_dim = getattr(self.args, "embed_positions_msa_dim", self.args.embed_dim)
104
+ self.msa_position_embedding = nn.Parameter(
105
+ 0.01 * torch.randn(1, 1024, 1, emb_dim),
106
+ requires_grad=True,
107
+ )
108
+ else:
109
+ self.register_parameter("msa_position_embedding", None)
110
+
111
+ self.dropout_module = nn.Dropout(self.args.dropout)
112
+ self.layers = nn.ModuleList(
113
+ [
114
+ AxialTransformerLayer(
115
+ self.args.embed_dim,
116
+ self.args.ffn_embed_dim,
117
+ self.args.attention_heads,
118
+ self.args.dropout,
119
+ self.args.attention_dropout,
120
+ self.args.activation_dropout,
121
+ getattr(self.args, "max_tokens_per_msa", self.args.max_tokens),
122
+ )
123
+ for _ in range(self.args.layers)
124
+ ]
125
+ )
126
+
127
+ self.contact_head = ContactPredictionHead(
128
+ self.args.layers * self.args.attention_heads,
129
+ self.prepend_bos,
130
+ self.append_eos,
131
+ eos_idx=self.eos_idx,
132
+ )
133
+ self.embed_positions = LearnedPositionalEmbedding(
134
+ self.args.max_positions,
135
+ self.args.embed_dim,
136
+ self.padding_idx,
137
+ )
138
+ self.emb_layer_norm_before = ESM1bLayerNorm(self.args.embed_dim)
139
+ self.emb_layer_norm_after = ESM1bLayerNorm(self.args.embed_dim)
140
+ self.lm_head = RobertaLMHead(
141
+ embed_dim=self.args.embed_dim,
142
+ output_dim=self.alphabet_size,
143
+ weight=self.embed_tokens.weight,
144
+ )
145
+
146
+ def forward(self, tokens, repr_layers=[], need_head_weights=False, return_contacts=False):
147
+ if return_contacts:
148
+ need_head_weights = True
149
+
150
+ assert tokens.ndim == 3
151
+ batch_size, num_alignments, seqlen = tokens.size()
152
+ padding_mask = tokens.eq(self.padding_idx) # B, R, C
153
+ if not padding_mask.any():
154
+ padding_mask = None
155
+
156
+ x = self.embed_tokens(tokens)
157
+ x += self.embed_positions(tokens.view(batch_size * num_alignments, seqlen)).view(x.size())
158
+ if self.msa_position_embedding is not None:
159
+ if x.size(1) > 1024:
160
+ raise RuntimeError(
161
+ "Using model with MSA position embedding trained on maximum MSA "
162
+ f"depth of 1024, but received {x.size(1)} alignments."
163
+ )
164
+ x += self.msa_position_embedding[:, :num_alignments]
165
+
166
+ x = self.emb_layer_norm_before(x)
167
+
168
+ x = self.dropout_module(x)
169
+
170
+ if padding_mask is not None:
171
+ x = x * (1 - padding_mask.unsqueeze(-1).type_as(x))
172
+
173
+ repr_layers = set(repr_layers)
174
+ hidden_representations = {}
175
+ if 0 in repr_layers:
176
+ hidden_representations[0] = x
177
+
178
+ if need_head_weights:
179
+ row_attn_weights = []
180
+ col_attn_weights = []
181
+
182
+ # B x R x C x D -> R x C x B x D
183
+ x = x.permute(1, 2, 0, 3)
184
+
185
+ for layer_idx, layer in enumerate(self.layers):
186
+ x = layer(
187
+ x,
188
+ self_attn_padding_mask=padding_mask,
189
+ need_head_weights=need_head_weights,
190
+ )
191
+ if need_head_weights:
192
+ x, col_attn, row_attn = x
193
+ # H x C x B x R x R -> B x H x C x R x R
194
+ col_attn_weights.append(col_attn.permute(2, 0, 1, 3, 4))
195
+ # H x B x C x C -> B x H x C x C
196
+ row_attn_weights.append(row_attn.permute(1, 0, 2, 3))
197
+ if (layer_idx + 1) in repr_layers:
198
+ hidden_representations[layer_idx + 1] = x.permute(2, 0, 1, 3)
199
+
200
+ x = self.emb_layer_norm_after(x)
201
+ x = x.permute(2, 0, 1, 3) # R x C x B x D -> B x R x C x D
202
+
203
+ # last hidden representation should have layer norm applied
204
+ if (layer_idx + 1) in repr_layers:
205
+ hidden_representations[layer_idx + 1] = x
206
+ x = self.lm_head(x)
207
+
208
+ result = {"logits": x, "representations": hidden_representations}
209
+ if need_head_weights:
210
+ # col_attentions: B x L x H x C x R x R
211
+ col_attentions = torch.stack(col_attn_weights, 1)
212
+ # row_attentions: B x L x H x C x C
213
+ row_attentions = torch.stack(row_attn_weights, 1)
214
+ result["col_attentions"] = col_attentions
215
+ result["row_attentions"] = row_attentions
216
+ if return_contacts:
217
+ contacts = self.contact_head(tokens, row_attentions)
218
+ result["contacts"] = contacts
219
+
220
+ return result
221
+
222
+ def predict_contacts(self, tokens):
223
+ return self(tokens, return_contacts=True)["contacts"]
224
+
225
+ @property
226
+ def num_layers(self):
227
+ return self.args.layers
228
+
229
+ def max_tokens_per_msa_(self, value: int) -> None:
230
+ """The MSA Transformer automatically batches attention computations when
231
+ gradients are disabled to allow you to pass in larger MSAs at test time than
232
+ you can fit in GPU memory. By default this occurs when more than 2^14 tokens
233
+ are passed in the input MSA. You can set this value to infinity to disable
234
+ this behavior.
235
+ """
236
+ for module in self.modules():
237
+ if isinstance(module, (RowSelfAttention, ColumnSelfAttention)):
238
+ module.max_tokens_per_msa = value
esm/esm/modules.py ADDED
@@ -0,0 +1,418 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ import math
7
+ from typing import Optional
8
+
9
+ import torch
10
+ import torch.nn as nn
11
+ import torch.nn.functional as F
12
+
13
+ from .multihead_attention import MultiheadAttention # noqa
14
+ from .axial_attention import ColumnSelfAttention, RowSelfAttention
15
+
16
+
17
+ def gelu(x):
18
+ """Implementation of the gelu activation function.
19
+
20
+ For information: OpenAI GPT's gelu is slightly different
21
+ (and gives slightly different results):
22
+ 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
23
+ """
24
+ return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
25
+
26
+
27
+ def symmetrize(x):
28
+ "Make layer symmetric in final two dimensions, used for contact prediction."
29
+ return x + x.transpose(-1, -2)
30
+
31
+
32
+ def apc(x):
33
+ "Perform average product correct, used for contact prediction."
34
+ a1 = x.sum(-1, keepdims=True)
35
+ a2 = x.sum(-2, keepdims=True)
36
+ a12 = x.sum((-1, -2), keepdims=True)
37
+
38
+ avg = a1 * a2
39
+ avg.div_(a12) # in-place to reduce memory
40
+ normalized = x - avg
41
+ return normalized
42
+
43
+
44
+ class ESM1LayerNorm(nn.Module):
45
+ def __init__(self, hidden_size, eps=1e-12, affine=True):
46
+ """Construct a layernorm layer in the TF style (eps inside the sqrt)."""
47
+ super().__init__()
48
+ self.hidden_size = (hidden_size,) if isinstance(hidden_size, int) else tuple(hidden_size)
49
+ self.eps = eps
50
+ self.affine = bool(affine)
51
+ if self.affine:
52
+ self.weight = nn.Parameter(torch.ones(hidden_size))
53
+ self.bias = nn.Parameter(torch.zeros(hidden_size))
54
+ else:
55
+ self.weight, self.bias = None, None
56
+
57
+ def forward(self, x):
58
+ dims = tuple(-(i + 1) for i in range(len(self.hidden_size)))
59
+ means = x.mean(dims, keepdim=True)
60
+ x_zeromean = x - means
61
+ variances = x_zeromean.pow(2).mean(dims, keepdim=True)
62
+ x = x_zeromean / torch.sqrt(variances + self.eps)
63
+ if self.affine:
64
+ x = (self.weight * x) + self.bias
65
+ return x
66
+
67
+
68
+ try:
69
+ from apex.normalization import FusedLayerNorm as _FusedLayerNorm
70
+
71
+ class ESM1bLayerNorm(_FusedLayerNorm):
72
+ @torch.jit.unused
73
+ def forward(self, x):
74
+ if not x.is_cuda:
75
+ return super().forward(x)
76
+ else:
77
+ with torch.cuda.device(x.device):
78
+ return super().forward(x)
79
+
80
+ except ImportError:
81
+ from torch.nn import LayerNorm as ESM1bLayerNorm
82
+
83
+
84
+ class TransformerLayer(nn.Module):
85
+ """Transformer layer block."""
86
+
87
+ def __init__(
88
+ self,
89
+ embed_dim,
90
+ ffn_embed_dim,
91
+ attention_heads,
92
+ add_bias_kv=True,
93
+ use_esm1b_layer_norm=False,
94
+ use_rotary_embeddings: bool = False,
95
+ ):
96
+ super().__init__()
97
+ self.embed_dim = embed_dim
98
+ self.ffn_embed_dim = ffn_embed_dim
99
+ self.attention_heads = attention_heads
100
+ self.use_rotary_embeddings = use_rotary_embeddings
101
+ self._init_submodules(add_bias_kv, use_esm1b_layer_norm)
102
+
103
+ def _init_submodules(self, add_bias_kv, use_esm1b_layer_norm):
104
+ BertLayerNorm = ESM1bLayerNorm if use_esm1b_layer_norm else ESM1LayerNorm
105
+
106
+ self.self_attn = MultiheadAttention(
107
+ self.embed_dim,
108
+ self.attention_heads,
109
+ add_bias_kv=add_bias_kv,
110
+ add_zero_attn=False,
111
+ use_rotary_embeddings=self.use_rotary_embeddings,
112
+ )
113
+ self.self_attn_layer_norm = BertLayerNorm(self.embed_dim)
114
+
115
+ self.fc1 = nn.Linear(self.embed_dim, self.ffn_embed_dim)
116
+ self.fc2 = nn.Linear(self.ffn_embed_dim, self.embed_dim)
117
+
118
+ self.final_layer_norm = BertLayerNorm(self.embed_dim)
119
+
120
+ def forward(
121
+ self, x, self_attn_mask=None, self_attn_padding_mask=None, need_head_weights=False
122
+ ):
123
+ residual = x
124
+ x = self.self_attn_layer_norm(x)
125
+ x, attn = self.self_attn(
126
+ query=x,
127
+ key=x,
128
+ value=x,
129
+ key_padding_mask=self_attn_padding_mask,
130
+ need_weights=True,
131
+ need_head_weights=need_head_weights,
132
+ attn_mask=self_attn_mask,
133
+ )
134
+ x = residual + x
135
+
136
+ residual = x
137
+ x = self.final_layer_norm(x)
138
+ x = gelu(self.fc1(x))
139
+ x = self.fc2(x)
140
+ x = residual + x
141
+
142
+ return x, attn
143
+
144
+
145
+ class AxialTransformerLayer(nn.Module):
146
+ """Implements an Axial MSA Transformer block."""
147
+
148
+ def __init__(
149
+ self,
150
+ embedding_dim: int = 768,
151
+ ffn_embedding_dim: int = 3072,
152
+ num_attention_heads: int = 8,
153
+ dropout: float = 0.1,
154
+ attention_dropout: float = 0.1,
155
+ activation_dropout: float = 0.1,
156
+ max_tokens_per_msa: int = 2**14,
157
+ ) -> None:
158
+ super().__init__()
159
+
160
+ # Initialize parameters
161
+ self.embedding_dim = embedding_dim
162
+ self.dropout_prob = dropout
163
+
164
+ row_self_attention = RowSelfAttention(
165
+ embedding_dim,
166
+ num_attention_heads,
167
+ dropout=dropout,
168
+ max_tokens_per_msa=max_tokens_per_msa,
169
+ )
170
+
171
+ column_self_attention = ColumnSelfAttention(
172
+ embedding_dim,
173
+ num_attention_heads,
174
+ dropout=dropout,
175
+ max_tokens_per_msa=max_tokens_per_msa,
176
+ )
177
+
178
+ feed_forward_layer = FeedForwardNetwork(
179
+ embedding_dim,
180
+ ffn_embedding_dim,
181
+ activation_dropout=activation_dropout,
182
+ max_tokens_per_msa=max_tokens_per_msa,
183
+ )
184
+
185
+ self.row_self_attention = self.build_residual(row_self_attention)
186
+ self.column_self_attention = self.build_residual(column_self_attention)
187
+ self.feed_forward_layer = self.build_residual(feed_forward_layer)
188
+
189
+ def build_residual(self, layer: nn.Module):
190
+ return NormalizedResidualBlock(
191
+ layer,
192
+ self.embedding_dim,
193
+ self.dropout_prob,
194
+ )
195
+
196
+ def forward(
197
+ self,
198
+ x: torch.Tensor,
199
+ self_attn_mask: Optional[torch.Tensor] = None,
200
+ self_attn_padding_mask: Optional[torch.Tensor] = None,
201
+ need_head_weights: bool = False,
202
+ ):
203
+ """
204
+ LayerNorm is applied either before or after the self-attention/ffn
205
+ modules similar to the original Transformer implementation.
206
+ """
207
+ x, row_attn = self.row_self_attention(
208
+ x,
209
+ self_attn_mask=self_attn_mask,
210
+ self_attn_padding_mask=self_attn_padding_mask,
211
+ )
212
+ x, column_attn = self.column_self_attention(
213
+ x,
214
+ self_attn_mask=self_attn_mask,
215
+ self_attn_padding_mask=self_attn_padding_mask,
216
+ )
217
+ x = self.feed_forward_layer(x)
218
+ if need_head_weights:
219
+ return x, column_attn, row_attn
220
+ else:
221
+ return x
222
+
223
+
224
+ class LearnedPositionalEmbedding(nn.Embedding):
225
+ """
226
+ This module learns positional embeddings up to a fixed maximum size.
227
+ Padding ids are ignored by either offsetting based on padding_idx
228
+ or by setting padding_idx to None and ensuring that the appropriate
229
+ position ids are passed to the forward function.
230
+ """
231
+
232
+ def __init__(self, num_embeddings: int, embedding_dim: int, padding_idx: int):
233
+ if padding_idx is not None:
234
+ num_embeddings_ = num_embeddings + padding_idx + 1
235
+ else:
236
+ num_embeddings_ = num_embeddings
237
+ super().__init__(num_embeddings_, embedding_dim, padding_idx)
238
+ self.max_positions = num_embeddings
239
+
240
+ def forward(self, input: torch.Tensor):
241
+ """Input is expected to be of size [bsz x seqlen]."""
242
+ if input.size(1) > self.max_positions:
243
+ raise ValueError(
244
+ f"Sequence length {input.size(1)} above maximum "
245
+ f" sequence length of {self.max_positions}"
246
+ )
247
+ mask = input.ne(self.padding_idx).int()
248
+ positions = (torch.cumsum(mask, dim=1).type_as(mask) * mask).long() + self.padding_idx
249
+ return F.embedding(
250
+ positions,
251
+ self.weight,
252
+ self.padding_idx,
253
+ self.max_norm,
254
+ self.norm_type,
255
+ self.scale_grad_by_freq,
256
+ self.sparse,
257
+ )
258
+
259
+
260
+ class SinusoidalPositionalEmbedding(nn.Module):
261
+ def __init__(self, embed_dim, padding_idx, learned=False):
262
+ super().__init__()
263
+ self.embed_dim = embed_dim
264
+ self.padding_idx = padding_idx
265
+ self.register_buffer("_float_tensor", torch.FloatTensor(1))
266
+ self.weights = None
267
+
268
+ def forward(self, x):
269
+ bsz, seq_len = x.shape
270
+ max_pos = self.padding_idx + 1 + seq_len
271
+ if self.weights is None or max_pos > self.weights.size(0):
272
+ self.weights = self.get_embedding(max_pos)
273
+ self.weights = self.weights.type_as(self._float_tensor)
274
+
275
+ positions = self.make_positions(x)
276
+ return self.weights.index_select(0, positions.view(-1)).view(bsz, seq_len, -1).detach()
277
+
278
+ def make_positions(self, x):
279
+ mask = x.ne(self.padding_idx)
280
+ range_buf = torch.arange(x.size(1), device=x.device).expand_as(x) + self.padding_idx + 1
281
+ positions = range_buf.expand_as(x)
282
+ return positions * mask.long() + self.padding_idx * (1 - mask.long())
283
+
284
+ def get_embedding(self, num_embeddings):
285
+ half_dim = self.embed_dim // 2
286
+ emb = math.log(10000) / (half_dim - 1)
287
+ emb = torch.exp(torch.arange(half_dim, dtype=torch.float) * -emb)
288
+ emb = torch.arange(num_embeddings, dtype=torch.float).unsqueeze(1) * emb.unsqueeze(0)
289
+ emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1).view(num_embeddings, -1)
290
+ if self.embed_dim % 2 == 1:
291
+ # zero pad
292
+ emb = torch.cat([emb, torch.zeros(num_embeddings, 1)], dim=1)
293
+ if self.padding_idx is not None:
294
+ emb[self.padding_idx, :] = 0
295
+ return emb
296
+
297
+
298
+ class RobertaLMHead(nn.Module):
299
+ """Head for masked language modeling."""
300
+
301
+ def __init__(self, embed_dim, output_dim, weight):
302
+ super().__init__()
303
+ self.dense = nn.Linear(embed_dim, embed_dim)
304
+ self.layer_norm = ESM1bLayerNorm(embed_dim)
305
+ self.weight = weight
306
+ self.bias = nn.Parameter(torch.zeros(output_dim))
307
+
308
+ def forward(self, features):
309
+ x = self.dense(features)
310
+ x = gelu(x)
311
+ x = self.layer_norm(x)
312
+ # project back to size of vocabulary with bias
313
+ x = F.linear(x, self.weight) + self.bias
314
+ return x
315
+
316
+
317
+ class ContactPredictionHead(nn.Module):
318
+ """Performs symmetrization, apc, and computes a logistic regression on the output features"""
319
+
320
+ def __init__(
321
+ self,
322
+ in_features: int,
323
+ prepend_bos: bool,
324
+ append_eos: bool,
325
+ bias=True,
326
+ eos_idx: Optional[int] = None,
327
+ ):
328
+ super().__init__()
329
+ self.in_features = in_features
330
+ self.prepend_bos = prepend_bos
331
+ self.append_eos = append_eos
332
+ if append_eos and eos_idx is None:
333
+ raise ValueError("Using an alphabet with eos token, but no eos token was passed in.")
334
+ self.eos_idx = eos_idx
335
+ self.regression = nn.Linear(in_features, 1, bias)
336
+ self.activation = nn.Sigmoid()
337
+
338
+ def forward(self, tokens, attentions):
339
+ # remove eos token attentions
340
+ if self.append_eos:
341
+ eos_mask = tokens.ne(self.eos_idx).to(attentions)
342
+ eos_mask = eos_mask.unsqueeze(1) * eos_mask.unsqueeze(2)
343
+ attentions = attentions * eos_mask[:, None, None, :, :]
344
+ attentions = attentions[..., :-1, :-1]
345
+ # remove cls token attentions
346
+ if self.prepend_bos:
347
+ attentions = attentions[..., 1:, 1:]
348
+ batch_size, layers, heads, seqlen, _ = attentions.size()
349
+ attentions = attentions.view(batch_size, layers * heads, seqlen, seqlen)
350
+
351
+ # features: B x C x T x T
352
+ attentions = attentions.to(
353
+ self.regression.weight.device
354
+ ) # attentions always float32, may need to convert to float16
355
+ attentions = apc(symmetrize(attentions))
356
+ attentions = attentions.permute(0, 2, 3, 1)
357
+ return self.activation(self.regression(attentions).squeeze(3))
358
+
359
+
360
+ class NormalizedResidualBlock(nn.Module):
361
+ def __init__(
362
+ self,
363
+ layer: nn.Module,
364
+ embedding_dim: int,
365
+ dropout: float = 0.1,
366
+ ):
367
+ super().__init__()
368
+ self.embedding_dim = embedding_dim
369
+
370
+ self.layer = layer
371
+ self.dropout_module = nn.Dropout(
372
+ dropout,
373
+ )
374
+ self.layer_norm = ESM1bLayerNorm(self.embedding_dim)
375
+
376
+ def forward(self, x, *args, **kwargs):
377
+ residual = x
378
+ x = self.layer_norm(x)
379
+ outputs = self.layer(x, *args, **kwargs)
380
+ if isinstance(outputs, tuple):
381
+ x, *out = outputs
382
+ else:
383
+ x = outputs
384
+ out = None
385
+
386
+ x = self.dropout_module(x)
387
+ x = residual + x
388
+
389
+ if out is not None:
390
+ return (x,) + tuple(out)
391
+ else:
392
+ return x
393
+
394
+
395
+ class FeedForwardNetwork(nn.Module):
396
+ def __init__(
397
+ self,
398
+ embedding_dim: int,
399
+ ffn_embedding_dim: int,
400
+ activation_dropout: float = 0.1,
401
+ max_tokens_per_msa: int = 2**14,
402
+ ):
403
+ super().__init__()
404
+ self.embedding_dim = embedding_dim
405
+ self.ffn_embedding_dim = ffn_embedding_dim
406
+ self.max_tokens_per_msa = max_tokens_per_msa
407
+ self.activation_fn = nn.GELU()
408
+ self.activation_dropout_module = nn.Dropout(
409
+ activation_dropout,
410
+ )
411
+ self.fc1 = nn.Linear(embedding_dim, ffn_embedding_dim)
412
+ self.fc2 = nn.Linear(ffn_embedding_dim, embedding_dim)
413
+
414
+ def forward(self, x):
415
+ x = self.activation_fn(self.fc1(x))
416
+ x = self.activation_dropout_module(x)
417
+ x = self.fc2(x)
418
+ return x
esm/esm/multihead_attention.py ADDED
@@ -0,0 +1,508 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ import math
7
+ from typing import Dict, Optional, Tuple
8
+
9
+ import torch
10
+ import torch.nn.functional as F
11
+ from torch import Tensor, nn
12
+ from torch.nn import Parameter
13
+ from esm.rotary_embedding import RotaryEmbedding
14
+
15
+ import uuid
16
+
17
+
18
+ def utils_softmax(x, dim: int, onnx_trace: bool = False):
19
+ if onnx_trace:
20
+ return F.softmax(x.float(), dim=dim)
21
+ else:
22
+ return F.softmax(x, dim=dim, dtype=torch.float32)
23
+
24
+
25
+ class FairseqIncrementalState(object):
26
+ def __init__(self, *args, **kwargs):
27
+ super().__init__(*args, **kwargs)
28
+ self.init_incremental_state()
29
+
30
+ def init_incremental_state(self):
31
+ self._incremental_state_id = str(uuid.uuid4())
32
+
33
+ def _get_full_incremental_state_key(self, key: str) -> str:
34
+ return "{}.{}".format(self._incremental_state_id, key)
35
+
36
+ def get_incremental_state(
37
+ self,
38
+ incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]],
39
+ key: str,
40
+ ) -> Optional[Dict[str, Optional[Tensor]]]:
41
+ """Helper for getting incremental state for an nn.Module."""
42
+ full_key = self._get_full_incremental_state_key(key)
43
+ if incremental_state is None or full_key not in incremental_state:
44
+ return None
45
+ return incremental_state[full_key]
46
+
47
+ def set_incremental_state(
48
+ self,
49
+ incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]],
50
+ key: str,
51
+ value: Dict[str, Optional[Tensor]],
52
+ ) -> Optional[Dict[str, Dict[str, Optional[Tensor]]]]:
53
+ """Helper for setting incremental state for an nn.Module."""
54
+ if incremental_state is not None:
55
+ full_key = self._get_full_incremental_state_key(key)
56
+ incremental_state[full_key] = value
57
+ return incremental_state
58
+
59
+
60
+ def with_incremental_state(cls):
61
+ cls.__bases__ = (FairseqIncrementalState,) + tuple(
62
+ b for b in cls.__bases__ if b != FairseqIncrementalState
63
+ )
64
+ return cls
65
+
66
+
67
+ @with_incremental_state
68
+ class MultiheadAttention(nn.Module):
69
+ """Multi-headed attention.
70
+
71
+ See "Attention Is All You Need" for more details.
72
+ """
73
+
74
+ def __init__(
75
+ self,
76
+ embed_dim,
77
+ num_heads,
78
+ kdim=None,
79
+ vdim=None,
80
+ dropout=0.0,
81
+ bias=True,
82
+ add_bias_kv: bool = False,
83
+ add_zero_attn: bool = False,
84
+ self_attention: bool = False,
85
+ encoder_decoder_attention: bool = False,
86
+ use_rotary_embeddings: bool = False,
87
+ ):
88
+ super().__init__()
89
+ self.embed_dim = embed_dim
90
+ self.kdim = kdim if kdim is not None else embed_dim
91
+ self.vdim = vdim if vdim is not None else embed_dim
92
+ self.qkv_same_dim = self.kdim == embed_dim and self.vdim == embed_dim
93
+
94
+ self.num_heads = num_heads
95
+ self.dropout = dropout
96
+ self.head_dim = embed_dim // num_heads
97
+ assert (
98
+ self.head_dim * num_heads == self.embed_dim
99
+ ), "embed_dim must be divisible by num_heads"
100
+ self.scaling = self.head_dim**-0.5
101
+
102
+ self.self_attention = self_attention
103
+ self.encoder_decoder_attention = encoder_decoder_attention
104
+
105
+ assert not self.self_attention or self.qkv_same_dim, (
106
+ "Self-attention requires query, key and " "value to be of the same size"
107
+ )
108
+
109
+ self.k_proj = nn.Linear(self.kdim, embed_dim, bias=bias)
110
+ self.v_proj = nn.Linear(self.vdim, embed_dim, bias=bias)
111
+ self.q_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
112
+
113
+ self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
114
+
115
+ if add_bias_kv:
116
+ self.bias_k = Parameter(torch.Tensor(1, 1, embed_dim))
117
+ self.bias_v = Parameter(torch.Tensor(1, 1, embed_dim))
118
+ else:
119
+ self.bias_k = self.bias_v = None
120
+
121
+ self.add_zero_attn = add_zero_attn
122
+
123
+ self.reset_parameters()
124
+
125
+ self.onnx_trace = False
126
+ self.rot_emb = None
127
+ if use_rotary_embeddings:
128
+ self.rot_emb = RotaryEmbedding(dim=self.head_dim)
129
+
130
+ self.enable_torch_version = False
131
+ if hasattr(F, "multi_head_attention_forward"):
132
+ self.enable_torch_version = True
133
+ else:
134
+ self.enable_torch_version = False
135
+
136
+ def prepare_for_onnx_export_(self):
137
+ self.onnx_trace = True
138
+
139
+ def reset_parameters(self):
140
+ if self.qkv_same_dim:
141
+ # Empirically observed the convergence to be much better with
142
+ # the scaled initialization
143
+ nn.init.xavier_uniform_(self.k_proj.weight, gain=1 / math.sqrt(2))
144
+ nn.init.xavier_uniform_(self.v_proj.weight, gain=1 / math.sqrt(2))
145
+ nn.init.xavier_uniform_(self.q_proj.weight, gain=1 / math.sqrt(2))
146
+ else:
147
+ nn.init.xavier_uniform_(self.k_proj.weight)
148
+ nn.init.xavier_uniform_(self.v_proj.weight)
149
+ nn.init.xavier_uniform_(self.q_proj.weight)
150
+
151
+ nn.init.xavier_uniform_(self.out_proj.weight)
152
+ if self.out_proj.bias is not None:
153
+ nn.init.constant_(self.out_proj.bias, 0.0)
154
+ if self.bias_k is not None:
155
+ nn.init.xavier_normal_(self.bias_k)
156
+ if self.bias_v is not None:
157
+ nn.init.xavier_normal_(self.bias_v)
158
+
159
+ def forward(
160
+ self,
161
+ query,
162
+ key: Optional[Tensor],
163
+ value: Optional[Tensor],
164
+ key_padding_mask: Optional[Tensor] = None,
165
+ incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None,
166
+ need_weights: bool = True,
167
+ static_kv: bool = False,
168
+ attn_mask: Optional[Tensor] = None,
169
+ before_softmax: bool = False,
170
+ need_head_weights: bool = False,
171
+ ) -> Tuple[Tensor, Optional[Tensor]]:
172
+ """Input shape: Time x Batch x Channel
173
+
174
+ Args:
175
+ key_padding_mask (ByteTensor, optional): mask to exclude
176
+ keys that are pads, of shape `(batch, src_len)`, where
177
+ padding elements are indicated by 1s.
178
+ need_weights (bool, optional): return the attention weights,
179
+ averaged over heads (default: False).
180
+ attn_mask (ByteTensor, optional): typically used to
181
+ implement causal attention, where the mask prevents the
182
+ attention from looking forward in time (default: None).
183
+ before_softmax (bool, optional): return the raw attention
184
+ weights and values before the attention softmax.
185
+ need_head_weights (bool, optional): return the attention
186
+ weights for each head. Implies *need_weights*. Default:
187
+ return the average attention weights over all heads.
188
+ """
189
+ if need_head_weights:
190
+ need_weights = True
191
+
192
+ tgt_len, bsz, embed_dim = query.size()
193
+ assert embed_dim == self.embed_dim
194
+ assert list(query.size()) == [tgt_len, bsz, embed_dim]
195
+
196
+ if (
197
+ not self.rot_emb
198
+ and self.enable_torch_version
199
+ and not self.onnx_trace
200
+ and incremental_state is None
201
+ and not static_kv
202
+ # A workaround for quantization to work. Otherwise JIT compilation
203
+ # treats bias in linear module as method.
204
+ and not torch.jit.is_scripting()
205
+ and not need_head_weights
206
+ ):
207
+ assert key is not None and value is not None
208
+ return F.multi_head_attention_forward(
209
+ query,
210
+ key,
211
+ value,
212
+ self.embed_dim,
213
+ self.num_heads,
214
+ torch.empty([0]),
215
+ torch.cat((self.q_proj.bias, self.k_proj.bias, self.v_proj.bias)),
216
+ self.bias_k,
217
+ self.bias_v,
218
+ self.add_zero_attn,
219
+ self.dropout,
220
+ self.out_proj.weight,
221
+ self.out_proj.bias,
222
+ self.training,
223
+ key_padding_mask,
224
+ need_weights,
225
+ attn_mask,
226
+ use_separate_proj_weight=True,
227
+ q_proj_weight=self.q_proj.weight,
228
+ k_proj_weight=self.k_proj.weight,
229
+ v_proj_weight=self.v_proj.weight,
230
+ )
231
+ if incremental_state is not None:
232
+ saved_state = self._get_input_buffer(incremental_state)
233
+ if saved_state is not None and "prev_key" in saved_state:
234
+ # previous time steps are cached - no need to recompute
235
+ # key and value if they are static
236
+ if static_kv:
237
+ assert self.encoder_decoder_attention and not self.self_attention
238
+ key = value = None
239
+ else:
240
+ saved_state = None
241
+
242
+ if self.self_attention:
243
+ q = self.q_proj(query)
244
+ k = self.k_proj(query)
245
+ v = self.v_proj(query)
246
+ elif self.encoder_decoder_attention:
247
+ # encoder-decoder attention
248
+ q = self.q_proj(query)
249
+ if key is None:
250
+ assert value is None
251
+ k = v = None
252
+ else:
253
+ k = self.k_proj(key)
254
+ v = self.v_proj(key)
255
+
256
+ else:
257
+ assert key is not None and value is not None
258
+ q = self.q_proj(query)
259
+ k = self.k_proj(key)
260
+ v = self.v_proj(value)
261
+ q *= self.scaling
262
+
263
+ if self.bias_k is not None:
264
+ assert self.bias_v is not None
265
+ k = torch.cat([k, self.bias_k.repeat(1, bsz, 1)])
266
+ v = torch.cat([v, self.bias_v.repeat(1, bsz, 1)])
267
+ if attn_mask is not None:
268
+ attn_mask = torch.cat(
269
+ [attn_mask, attn_mask.new_zeros(attn_mask.size(0), 1)], dim=1
270
+ )
271
+ if key_padding_mask is not None:
272
+ key_padding_mask = torch.cat(
273
+ [
274
+ key_padding_mask,
275
+ key_padding_mask.new_zeros(key_padding_mask.size(0), 1),
276
+ ],
277
+ dim=1,
278
+ )
279
+
280
+ q = q.contiguous().view(tgt_len, bsz * self.num_heads, self.head_dim).transpose(0, 1)
281
+ if k is not None:
282
+ k = k.contiguous().view(-1, bsz * self.num_heads, self.head_dim).transpose(0, 1)
283
+ if v is not None:
284
+ v = v.contiguous().view(-1, bsz * self.num_heads, self.head_dim).transpose(0, 1)
285
+
286
+ if saved_state is not None:
287
+ # saved states are stored with shape (bsz, num_heads, seq_len, head_dim)
288
+ if "prev_key" in saved_state:
289
+ _prev_key = saved_state["prev_key"]
290
+ assert _prev_key is not None
291
+ prev_key = _prev_key.view(bsz * self.num_heads, -1, self.head_dim)
292
+ if static_kv:
293
+ k = prev_key
294
+ else:
295
+ assert k is not None
296
+ k = torch.cat([prev_key, k], dim=1)
297
+ if "prev_value" in saved_state:
298
+ _prev_value = saved_state["prev_value"]
299
+ assert _prev_value is not None
300
+ prev_value = _prev_value.view(bsz * self.num_heads, -1, self.head_dim)
301
+ if static_kv:
302
+ v = prev_value
303
+ else:
304
+ assert v is not None
305
+ v = torch.cat([prev_value, v], dim=1)
306
+ prev_key_padding_mask: Optional[Tensor] = None
307
+ if "prev_key_padding_mask" in saved_state:
308
+ prev_key_padding_mask = saved_state["prev_key_padding_mask"]
309
+ assert k is not None and v is not None
310
+ key_padding_mask = MultiheadAttention._append_prev_key_padding_mask(
311
+ key_padding_mask=key_padding_mask,
312
+ prev_key_padding_mask=prev_key_padding_mask,
313
+ batch_size=bsz,
314
+ src_len=k.size(1),
315
+ static_kv=static_kv,
316
+ )
317
+
318
+ saved_state["prev_key"] = k.view(bsz, self.num_heads, -1, self.head_dim)
319
+ saved_state["prev_value"] = v.view(bsz, self.num_heads, -1, self.head_dim)
320
+ saved_state["prev_key_padding_mask"] = key_padding_mask
321
+ # In this branch incremental_state is never None
322
+ assert incremental_state is not None
323
+ incremental_state = self._set_input_buffer(incremental_state, saved_state)
324
+ assert k is not None
325
+ src_len = k.size(1)
326
+
327
+ # This is part of a workaround to get around fork/join parallelism
328
+ # not supporting Optional types.
329
+ if key_padding_mask is not None and key_padding_mask.dim() == 0:
330
+ key_padding_mask = None
331
+
332
+ if key_padding_mask is not None:
333
+ assert key_padding_mask.size(0) == bsz
334
+ assert key_padding_mask.size(1) == src_len
335
+
336
+ if self.add_zero_attn:
337
+ assert v is not None
338
+ src_len += 1
339
+ k = torch.cat([k, k.new_zeros((k.size(0), 1) + k.size()[2:])], dim=1)
340
+ v = torch.cat([v, v.new_zeros((v.size(0), 1) + v.size()[2:])], dim=1)
341
+ if attn_mask is not None:
342
+ attn_mask = torch.cat(
343
+ [attn_mask, attn_mask.new_zeros(attn_mask.size(0), 1)], dim=1
344
+ )
345
+ if key_padding_mask is not None:
346
+ key_padding_mask = torch.cat(
347
+ [
348
+ key_padding_mask,
349
+ torch.zeros(key_padding_mask.size(0), 1).type_as(key_padding_mask),
350
+ ],
351
+ dim=1,
352
+ )
353
+
354
+ if self.rot_emb:
355
+ q, k = self.rot_emb(q, k)
356
+
357
+ attn_weights = torch.bmm(q, k.transpose(1, 2))
358
+ attn_weights = MultiheadAttention.apply_sparse_mask(attn_weights, tgt_len, src_len, bsz)
359
+
360
+ assert list(attn_weights.size()) == [bsz * self.num_heads, tgt_len, src_len]
361
+
362
+ if attn_mask is not None:
363
+ attn_mask = attn_mask.unsqueeze(0)
364
+ if self.onnx_trace:
365
+ attn_mask = attn_mask.repeat(attn_weights.size(0), 1, 1)
366
+ attn_weights += attn_mask
367
+
368
+ if key_padding_mask is not None:
369
+ # don't attend to padding symbols
370
+ attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len)
371
+ attn_weights = attn_weights.masked_fill(
372
+ key_padding_mask.unsqueeze(1).unsqueeze(2).to(torch.bool), float("-inf")
373
+ )
374
+ attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len)
375
+
376
+ if before_softmax:
377
+ return attn_weights, v
378
+
379
+ attn_weights_float = utils_softmax(attn_weights, dim=-1, onnx_trace=self.onnx_trace)
380
+ attn_weights = attn_weights_float.type_as(attn_weights)
381
+ attn_probs = F.dropout(
382
+ attn_weights_float.type_as(attn_weights),
383
+ p=self.dropout,
384
+ training=self.training,
385
+ )
386
+ assert v is not None
387
+ attn = torch.bmm(attn_probs, v)
388
+ assert list(attn.size()) == [bsz * self.num_heads, tgt_len, self.head_dim]
389
+ if self.onnx_trace and attn.size(1) == 1:
390
+ # when ONNX tracing a single decoder step (sequence length == 1)
391
+ # the transpose is a no-op copy before view, thus unnecessary
392
+ attn = attn.contiguous().view(tgt_len, bsz, embed_dim)
393
+ else:
394
+ attn = attn.transpose(0, 1).contiguous().view(tgt_len, bsz, embed_dim)
395
+ attn = self.out_proj(attn)
396
+ attn_weights: Optional[Tensor] = None
397
+ if need_weights:
398
+ attn_weights = attn_weights_float.view(
399
+ bsz, self.num_heads, tgt_len, src_len
400
+ ).type_as(attn).transpose(1, 0)
401
+ if not need_head_weights:
402
+ # average attention weights over heads
403
+ attn_weights = attn_weights.mean(dim=0)
404
+
405
+ return attn, attn_weights
406
+
407
+ @staticmethod
408
+ def _append_prev_key_padding_mask(
409
+ key_padding_mask: Optional[Tensor],
410
+ prev_key_padding_mask: Optional[Tensor],
411
+ batch_size: int,
412
+ src_len: int,
413
+ static_kv: bool,
414
+ ) -> Optional[Tensor]:
415
+ # saved key padding masks have shape (bsz, seq_len)
416
+ if prev_key_padding_mask is not None and static_kv:
417
+ new_key_padding_mask = prev_key_padding_mask
418
+ elif prev_key_padding_mask is not None and key_padding_mask is not None:
419
+ new_key_padding_mask = torch.cat(
420
+ [prev_key_padding_mask.float(), key_padding_mask.float()], dim=1
421
+ )
422
+ # During incremental decoding, as the padding token enters and
423
+ # leaves the frame, there will be a time when prev or current
424
+ # is None
425
+ elif prev_key_padding_mask is not None:
426
+ filler = torch.zeros(
427
+ (batch_size, src_len - prev_key_padding_mask.size(1)),
428
+ device=prev_key_padding_mask.device,
429
+ )
430
+ new_key_padding_mask = torch.cat(
431
+ [prev_key_padding_mask.float(), filler.float()], dim=1
432
+ )
433
+ elif key_padding_mask is not None:
434
+ filler = torch.zeros(
435
+ (batch_size, src_len - key_padding_mask.size(1)),
436
+ device=key_padding_mask.device,
437
+ )
438
+ new_key_padding_mask = torch.cat([filler.float(), key_padding_mask.float()], dim=1)
439
+ else:
440
+ new_key_padding_mask = prev_key_padding_mask
441
+ return new_key_padding_mask
442
+
443
+ @torch.jit.export
444
+ def reorder_incremental_state(
445
+ self, incremental_state: Dict[str, Dict[str, Optional[Tensor]]], new_order: Tensor
446
+ ):
447
+ """Reorder buffered internal state (for incremental generation)."""
448
+ input_buffer = self._get_input_buffer(incremental_state)
449
+ if input_buffer is not None:
450
+ for k in input_buffer.keys():
451
+ input_buffer_k = input_buffer[k]
452
+ if input_buffer_k is not None:
453
+ if self.encoder_decoder_attention and input_buffer_k.size(0) == new_order.size(
454
+ 0
455
+ ):
456
+ break
457
+ input_buffer[k] = input_buffer_k.index_select(0, new_order)
458
+ incremental_state = self._set_input_buffer(incremental_state, input_buffer)
459
+ return incremental_state
460
+
461
+ def _get_input_buffer(
462
+ self, incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]]
463
+ ) -> Dict[str, Optional[Tensor]]:
464
+ result = self.get_incremental_state(incremental_state, "attn_state")
465
+ if result is not None:
466
+ return result
467
+ else:
468
+ empty_result: Dict[str, Optional[Tensor]] = {}
469
+ return empty_result
470
+
471
+ def _set_input_buffer(
472
+ self,
473
+ incremental_state: Dict[str, Dict[str, Optional[Tensor]]],
474
+ buffer: Dict[str, Optional[Tensor]],
475
+ ):
476
+ return self.set_incremental_state(incremental_state, "attn_state", buffer)
477
+
478
+ def apply_sparse_mask(attn_weights, tgt_len: int, src_len: int, bsz: int):
479
+ return attn_weights
480
+
481
+ def upgrade_state_dict_named(self, state_dict, name):
482
+ prefix = name + "." if name != "" else ""
483
+ items_to_add = {}
484
+ keys_to_remove = []
485
+ for k in state_dict.keys():
486
+ if k.endswith(prefix + "in_proj_weight"):
487
+ # in_proj_weight used to be q + k + v with same dimensions
488
+ dim = int(state_dict[k].shape[0] / 3)
489
+ items_to_add[prefix + "q_proj.weight"] = state_dict[k][:dim]
490
+ items_to_add[prefix + "k_proj.weight"] = state_dict[k][dim : 2 * dim]
491
+ items_to_add[prefix + "v_proj.weight"] = state_dict[k][2 * dim :]
492
+
493
+ keys_to_remove.append(k)
494
+
495
+ k_bias = prefix + "in_proj_bias"
496
+ if k_bias in state_dict.keys():
497
+ dim = int(state_dict[k].shape[0] / 3)
498
+ items_to_add[prefix + "q_proj.bias"] = state_dict[k_bias][:dim]
499
+ items_to_add[prefix + "k_proj.bias"] = state_dict[k_bias][dim : 2 * dim]
500
+ items_to_add[prefix + "v_proj.bias"] = state_dict[k_bias][2 * dim :]
501
+
502
+ keys_to_remove.append(prefix + "in_proj_bias")
503
+
504
+ for k in keys_to_remove:
505
+ del state_dict[k]
506
+
507
+ for key, value in items_to_add.items():
508
+ state_dict[key] = value
esm/esm/pretrained.py ADDED
@@ -0,0 +1,397 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ import re
7
+ import urllib
8
+ import warnings
9
+ from argparse import Namespace
10
+ from pathlib import Path
11
+
12
+ import torch
13
+
14
+ import esm
15
+ from esm.model.esm2 import ESM2
16
+
17
+
18
+ def _has_regression_weights(model_name):
19
+ """Return whether we expect / require regression weights;
20
+ Right now that is all models except ESM-1v and ESM-IF"""
21
+ return not ("esm1v" in model_name or "esm_if" in model_name)
22
+
23
+
24
+ def load_model_and_alphabet(model_name):
25
+ if model_name.endswith(".pt"): # treat as filepath
26
+ return load_model_and_alphabet_local(model_name)
27
+ else:
28
+ return load_model_and_alphabet_hub(model_name)
29
+
30
+
31
+ def load_hub_workaround(url):
32
+ try:
33
+ data = torch.hub.load_state_dict_from_url(url, progress=False, map_location="cpu")
34
+ except RuntimeError:
35
+ # Pytorch version issue - see https://github.com/pytorch/pytorch/issues/43106
36
+ fn = Path(url).name
37
+ data = torch.load(
38
+ f"{torch.hub.get_dir()}/checkpoints/{fn}",
39
+ map_location="cpu",
40
+ )
41
+ except urllib.error.HTTPError as e:
42
+ raise Exception(f"Could not load {url}, check if you specified a correct model name?")
43
+ return data
44
+
45
+
46
+ def load_regression_hub(model_name):
47
+ url = f"https://dl.fbaipublicfiles.com/fair-esm/regression/{model_name}-contact-regression.pt"
48
+ regression_data = load_hub_workaround(url)
49
+ return regression_data
50
+
51
+
52
+ def _download_model_and_regression_data(model_name):
53
+ url = f"https://dl.fbaipublicfiles.com/fair-esm/models/{model_name}.pt"
54
+ model_data = load_hub_workaround(url)
55
+ if _has_regression_weights(model_name):
56
+ regression_data = load_regression_hub(model_name)
57
+ else:
58
+ regression_data = None
59
+ return model_data, regression_data
60
+
61
+
62
+ def load_model_and_alphabet_hub(model_name):
63
+ model_data, regression_data = _download_model_and_regression_data(model_name)
64
+ return load_model_and_alphabet_core(model_name, model_data, regression_data)
65
+
66
+
67
+ def load_model_and_alphabet_local(model_location):
68
+ """Load from local path. The regression weights need to be co-located"""
69
+ model_location = Path(model_location)
70
+ model_data = torch.load(str(model_location), map_location="cpu")
71
+ model_name = model_location.stem
72
+ if _has_regression_weights(model_name):
73
+ regression_location = str(model_location.with_suffix("")) + "-contact-regression.pt"
74
+ regression_data = torch.load(regression_location, map_location="cpu")
75
+ else:
76
+ regression_data = None
77
+ return load_model_and_alphabet_core(model_name, model_data, regression_data)
78
+
79
+
80
+ def has_emb_layer_norm_before(model_state):
81
+ """Determine whether layer norm needs to be applied before the encoder"""
82
+ return any(k.startswith("emb_layer_norm_before") for k, param in model_state.items())
83
+
84
+
85
+ def _load_model_and_alphabet_core_v1(model_data):
86
+ import esm # since esm.inverse_folding is imported below, you actually have to re-import esm here
87
+
88
+ alphabet = esm.Alphabet.from_architecture(model_data["args"].arch)
89
+
90
+ if model_data["args"].arch == "roberta_large":
91
+ # upgrade state dict
92
+ pra = lambda s: "".join(s.split("encoder_")[1:] if "encoder" in s else s)
93
+ prs1 = lambda s: "".join(s.split("encoder.")[1:] if "encoder" in s else s)
94
+ prs2 = lambda s: "".join(
95
+ s.split("sentence_encoder.")[1:] if "sentence_encoder" in s else s
96
+ )
97
+ model_args = {pra(arg[0]): arg[1] for arg in vars(model_data["args"]).items()}
98
+ model_state = {prs1(prs2(arg[0])): arg[1] for arg in model_data["model"].items()}
99
+ model_state["embed_tokens.weight"][alphabet.mask_idx].zero_() # For token drop
100
+ model_args["emb_layer_norm_before"] = has_emb_layer_norm_before(model_state)
101
+ model_type = esm.ProteinBertModel
102
+
103
+ elif model_data["args"].arch == "protein_bert_base":
104
+
105
+ # upgrade state dict
106
+ pra = lambda s: "".join(s.split("decoder_")[1:] if "decoder" in s else s)
107
+ prs = lambda s: "".join(s.split("decoder.")[1:] if "decoder" in s else s)
108
+ model_args = {pra(arg[0]): arg[1] for arg in vars(model_data["args"]).items()}
109
+ model_state = {prs(arg[0]): arg[1] for arg in model_data["model"].items()}
110
+ model_type = esm.ProteinBertModel
111
+ elif model_data["args"].arch == "msa_transformer":
112
+
113
+ # upgrade state dict
114
+ pra = lambda s: "".join(s.split("encoder_")[1:] if "encoder" in s else s)
115
+ prs1 = lambda s: "".join(s.split("encoder.")[1:] if "encoder" in s else s)
116
+ prs2 = lambda s: "".join(
117
+ s.split("sentence_encoder.")[1:] if "sentence_encoder" in s else s
118
+ )
119
+ prs3 = lambda s: s.replace("row", "column") if "row" in s else s.replace("column", "row")
120
+ model_args = {pra(arg[0]): arg[1] for arg in vars(model_data["args"]).items()}
121
+ model_state = {prs1(prs2(prs3(arg[0]))): arg[1] for arg in model_data["model"].items()}
122
+ if model_args.get("embed_positions_msa", False):
123
+ emb_dim = model_state["msa_position_embedding"].size(-1)
124
+ model_args["embed_positions_msa_dim"] = emb_dim # initial release, bug: emb_dim==1
125
+
126
+ model_type = esm.MSATransformer
127
+
128
+ elif "invariant_gvp" in model_data["args"].arch:
129
+ import esm.inverse_folding
130
+
131
+ model_type = esm.inverse_folding.gvp_transformer.GVPTransformerModel
132
+ model_args = vars(model_data["args"]) # convert Namespace -> dict
133
+
134
+ def update_name(s):
135
+ # Map the module names in checkpoints trained with internal code to
136
+ # the updated module names in open source code
137
+ s = s.replace("W_v", "embed_graph.embed_node")
138
+ s = s.replace("W_e", "embed_graph.embed_edge")
139
+ s = s.replace("embed_scores.0", "embed_confidence")
140
+ s = s.replace("embed_score.", "embed_graph.embed_confidence.")
141
+ s = s.replace("seq_logits_projection.", "")
142
+ s = s.replace("embed_ingraham_features", "embed_dihedrals")
143
+ s = s.replace("embed_gvp_in_local_frame.0", "embed_gvp_output")
144
+ s = s.replace("embed_features_in_local_frame.0", "embed_gvp_input_features")
145
+ return s
146
+
147
+ model_state = {
148
+ update_name(sname): svalue
149
+ for sname, svalue in model_data["model"].items()
150
+ if "version" not in sname
151
+ }
152
+
153
+ else:
154
+ raise ValueError("Unknown architecture selected")
155
+
156
+ model = model_type(
157
+ Namespace(**model_args),
158
+ alphabet,
159
+ )
160
+
161
+ return model, alphabet, model_state
162
+
163
+
164
+ def _load_model_and_alphabet_core_v2(model_data):
165
+ def upgrade_state_dict(state_dict):
166
+ """Removes prefixes 'model.encoder.sentence_encoder.' and 'model.encoder.'."""
167
+ prefixes = ["encoder.sentence_encoder.", "encoder."]
168
+ pattern = re.compile("^" + "|".join(prefixes))
169
+ state_dict = {pattern.sub("", name): param for name, param in state_dict.items()}
170
+ return state_dict
171
+
172
+ cfg = model_data["cfg"]["model"]
173
+ state_dict = model_data["model"]
174
+ state_dict = upgrade_state_dict(state_dict)
175
+ alphabet = esm.data.Alphabet.from_architecture("ESM-1b")
176
+ model = ESM2(
177
+ num_layers=cfg.encoder_layers,
178
+ embed_dim=cfg.encoder_embed_dim,
179
+ attention_heads=cfg.encoder_attention_heads,
180
+ alphabet=alphabet,
181
+ token_dropout=cfg.token_dropout,
182
+ )
183
+ return model, alphabet, state_dict
184
+
185
+
186
+ def load_model_and_alphabet_core(model_name, model_data, regression_data=None):
187
+ if regression_data is not None:
188
+ model_data["model"].update(regression_data["model"])
189
+
190
+ if model_name.startswith("esm2"):
191
+ model, alphabet, model_state = _load_model_and_alphabet_core_v2(model_data)
192
+ else:
193
+ model, alphabet, model_state = _load_model_and_alphabet_core_v1(model_data)
194
+
195
+ expected_keys = set(model.state_dict().keys())
196
+ found_keys = set(model_state.keys())
197
+
198
+ if regression_data is None:
199
+ expected_missing = {"contact_head.regression.weight", "contact_head.regression.bias"}
200
+ error_msgs = []
201
+ missing = (expected_keys - found_keys) - expected_missing
202
+ if missing:
203
+ error_msgs.append(f"Missing key(s) in state_dict: {missing}.")
204
+ unexpected = found_keys - expected_keys
205
+ if unexpected:
206
+ error_msgs.append(f"Unexpected key(s) in state_dict: {unexpected}.")
207
+
208
+ if error_msgs:
209
+ raise RuntimeError(
210
+ "Error(s) in loading state_dict for {}:\n\t{}".format(
211
+ model.__class__.__name__, "\n\t".join(error_msgs)
212
+ )
213
+ )
214
+ if expected_missing - found_keys:
215
+ warnings.warn(
216
+ "Regression weights not found, predicting contacts will not produce correct results."
217
+ )
218
+
219
+ model.load_state_dict(model_state, strict=regression_data is not None)
220
+
221
+ return model, alphabet
222
+
223
+
224
+ def esm1_t34_670M_UR50S():
225
+ """34 layer transformer model with 670M params, trained on Uniref50 Sparse.
226
+
227
+ Returns a tuple of (Model, Alphabet).
228
+ """
229
+ return load_model_and_alphabet_hub("esm1_t34_670M_UR50S")
230
+
231
+
232
+ def esm1_t34_670M_UR50D():
233
+ """34 layer transformer model with 670M params, trained on Uniref50 Dense.
234
+
235
+ Returns a tuple of (Model, Alphabet).
236
+ """
237
+ return load_model_and_alphabet_hub("esm1_t34_670M_UR50D")
238
+
239
+
240
+ def esm1_t34_670M_UR100():
241
+ """34 layer transformer model with 670M params, trained on Uniref100.
242
+
243
+ Returns a tuple of (Model, Alphabet).
244
+ """
245
+ return load_model_and_alphabet_hub("esm1_t34_670M_UR100")
246
+
247
+
248
+ def esm1_t12_85M_UR50S():
249
+ """12 layer transformer model with 85M params, trained on Uniref50 Sparse.
250
+
251
+ Returns a tuple of (Model, Alphabet).
252
+ """
253
+ return load_model_and_alphabet_hub("esm1_t12_85M_UR50S")
254
+
255
+
256
+ def esm1_t6_43M_UR50S():
257
+ """6 layer transformer model with 43M params, trained on Uniref50 Sparse.
258
+
259
+ Returns a tuple of (Model, Alphabet).
260
+ """
261
+ return load_model_and_alphabet_hub("esm1_t6_43M_UR50S")
262
+
263
+
264
+ def esm1b_t33_650M_UR50S():
265
+ """33 layer transformer model with 650M params, trained on Uniref50 Sparse.
266
+ This is our best performing model, which will be described in a future publication.
267
+
268
+ Returns a tuple of (Model, Alphabet).
269
+ """
270
+ return load_model_and_alphabet_hub("esm1b_t33_650M_UR50S")
271
+
272
+
273
+ def esm_msa1_t12_100M_UR50S():
274
+ warnings.warn(
275
+ "This model had a minor bug in the positional embeddings, "
276
+ "please use ESM-MSA-1b: esm.pretrained.esm_msa1b_t12_100M_UR50S()",
277
+ )
278
+ return load_model_and_alphabet_hub("esm_msa1_t12_100M_UR50S")
279
+
280
+
281
+ def esm_msa1b_t12_100M_UR50S():
282
+ return load_model_and_alphabet_hub("esm_msa1b_t12_100M_UR50S")
283
+
284
+
285
+ def esm1v_t33_650M_UR90S():
286
+ """33 layer transformer model with 650M params, trained on Uniref90.
287
+ This is model 1 of a 5 model ensemble.
288
+
289
+ Returns a tuple of (Model, Alphabet).
290
+ """
291
+ return load_model_and_alphabet_hub("esm1v_t33_650M_UR90S_1")
292
+
293
+
294
+ def esm1v_t33_650M_UR90S_1():
295
+ """33 layer transformer model with 650M params, trained on Uniref90.
296
+ This is model 1 of a 5 model ensemble.
297
+
298
+ Returns a tuple of (Model, Alphabet).
299
+ """
300
+ return load_model_and_alphabet_hub("esm1v_t33_650M_UR90S_1")
301
+
302
+
303
+ def esm1v_t33_650M_UR90S_2():
304
+ """33 layer transformer model with 650M params, trained on Uniref90.
305
+ This is model 2 of a 5 model ensemble.
306
+
307
+ Returns a tuple of (Model, Alphabet).
308
+ """
309
+ return load_model_and_alphabet_hub("esm1v_t33_650M_UR90S_2")
310
+
311
+
312
+ def esm1v_t33_650M_UR90S_3():
313
+ """33 layer transformer model with 650M params, trained on Uniref90.
314
+ This is model 3 of a 5 model ensemble.
315
+
316
+ Returns a tuple of (Model, Alphabet).
317
+ """
318
+ return load_model_and_alphabet_hub("esm1v_t33_650M_UR90S_3")
319
+
320
+
321
+ def esm1v_t33_650M_UR90S_4():
322
+ """33 layer transformer model with 650M params, trained on Uniref90.
323
+ This is model 4 of a 5 model ensemble.
324
+
325
+ Returns a tuple of (Model, Alphabet).
326
+ """
327
+ return load_model_and_alphabet_hub("esm1v_t33_650M_UR90S_4")
328
+
329
+
330
+ def esm1v_t33_650M_UR90S_5():
331
+ """33 layer transformer model with 650M params, trained on Uniref90.
332
+ This is model 5 of a 5 model ensemble.
333
+
334
+ Returns a tuple of (Model, Alphabet).
335
+ """
336
+ return load_model_and_alphabet_hub("esm1v_t33_650M_UR90S_5")
337
+
338
+
339
+ def esm_if1_gvp4_t16_142M_UR50():
340
+ """Inverse folding model with 142M params, with 4 GVP-GNN layers, 8
341
+ Transformer encoder layers, and 8 Transformer decoder layers, trained on
342
+ CATH structures and 12 million alphafold2 predicted structures from UniRef50
343
+ sequences.
344
+
345
+ Returns a tuple of (Model, Alphabet).
346
+ """
347
+ return load_model_and_alphabet_hub("esm_if1_gvp4_t16_142M_UR50")
348
+
349
+
350
+ def esm2_t6_8M_UR50D():
351
+ """6 layer ESM-2 model with 8M params, trained on UniRef50.
352
+
353
+ Returns a tuple of (Model, Alphabet).
354
+ """
355
+ return load_model_and_alphabet_hub("esm2_t6_8M_UR50D")
356
+
357
+
358
+ def esm2_t12_35M_UR50D():
359
+ """12 layer ESM-2 model with 35M params, trained on UniRef50.
360
+
361
+ Returns a tuple of (Model, Alphabet).
362
+ """
363
+ return load_model_and_alphabet_hub("esm2_t12_35M_UR50D")
364
+
365
+
366
+ def esm2_t30_150M_UR50D():
367
+ """30 layer ESM-2 model with 150M params, trained on UniRef50.
368
+
369
+ Returns a tuple of (Model, Alphabet).
370
+ """
371
+ return load_model_and_alphabet_hub("esm2_t30_150M_UR50D")
372
+
373
+
374
+ def esm2_t33_650M_UR50D():
375
+ """33 layer ESM-2 model with 650M params, trained on UniRef50.
376
+
377
+ Returns a tuple of (Model, Alphabet).
378
+ """
379
+ return load_model_and_alphabet_hub("esm2_t33_650M_UR50D")
380
+
381
+
382
+ def esm2_t36_3B_UR50D():
383
+ """36 layer ESM-2 model with 3B params, trained on UniRef50.
384
+
385
+ Returns a tuple of (Model, Alphabet).
386
+ """
387
+ return load_model_and_alphabet_hub("esm2_t36_3B_UR50D")
388
+
389
+
390
+ def esm2_t48_15B_UR50D():
391
+ """48 layer ESM-2 model with 15B params, trained on UniRef50.
392
+ If you have OOM while loading this model, please refer to README
393
+ on how to employ FSDP and ZeRO CPU offloading
394
+
395
+ Returns a tuple of (Model, Alphabet).
396
+ """
397
+ return load_model_and_alphabet_hub("esm2_t48_15B_UR50D")
esm/esm/rotary_embedding.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ from typing import Tuple
7
+
8
+ import torch
9
+
10
+
11
+ def rotate_half(x):
12
+ x1, x2 = x.chunk(2, dim=-1)
13
+ return torch.cat((-x2, x1), dim=-1)
14
+
15
+
16
+ def apply_rotary_pos_emb(x, cos, sin):
17
+ cos = cos[:, : x.shape[-2], :]
18
+ sin = sin[:, : x.shape[-2], :]
19
+
20
+ return (x * cos) + (rotate_half(x) * sin)
21
+
22
+
23
+ class RotaryEmbedding(torch.nn.Module):
24
+ """
25
+ The rotary position embeddings from RoFormer_ (Su et. al).
26
+ A crucial insight from the method is that the query and keys are
27
+ transformed by rotation matrices which depend on the relative positions.
28
+ Other implementations are available in the Rotary Transformer repo_ and in
29
+ GPT-NeoX_, GPT-NeoX was an inspiration
30
+ .. _RoFormer: https://arxiv.org/abs/2104.09864
31
+ .. _repo: https://github.com/ZhuiyiTechnology/roformer
32
+ .. _GPT-NeoX: https://github.com/EleutherAI/gpt-neox
33
+ .. warning: Please note that this embedding is not registered on purpose, as it is transformative
34
+ (it does not create the embedding dimension) and will likely be picked up (imported) on a ad-hoc basis
35
+ """
36
+
37
+ def __init__(self, dim: int, *_, **__):
38
+ super().__init__()
39
+ # Generate and save the inverse frequency buffer (non trainable)
40
+ inv_freq = 1.0 / (10000 ** (torch.arange(0, dim, 2).float() / dim))
41
+ self.register_buffer("inv_freq", inv_freq)
42
+
43
+ self._seq_len_cached = None
44
+ self._cos_cached = None
45
+ self._sin_cached = None
46
+
47
+ def _update_cos_sin_tables(self, x, seq_dimension=1):
48
+ seq_len = x.shape[seq_dimension]
49
+
50
+ # Reset the tables if the sequence length has changed,
51
+ # or if we're on a new device (possibly due to tracing for instance)
52
+ if seq_len != self._seq_len_cached or self._cos_cached.device != x.device:
53
+ self._seq_len_cached = seq_len
54
+ t = torch.arange(x.shape[seq_dimension], device=x.device).type_as(self.inv_freq)
55
+ freqs = torch.einsum("i,j->ij", t, self.inv_freq)
56
+ emb = torch.cat((freqs, freqs), dim=-1).to(x.device)
57
+
58
+ self._cos_cached = emb.cos()[None, :, :]
59
+ self._sin_cached = emb.sin()[None, :, :]
60
+
61
+ return self._cos_cached, self._sin_cached
62
+
63
+ def forward(self, q: torch.Tensor, k: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:
64
+ self._cos_cached, self._sin_cached = self._update_cos_sin_tables(k, seq_dimension=-2)
65
+
66
+ return (
67
+ apply_rotary_pos_emb(q, self._cos_cached, self._sin_cached),
68
+ apply_rotary_pos_emb(k, self._cos_cached, self._sin_cached),
69
+ )
esm/esm/version.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ version = "1.0.2"
esm/scripts/extract.py ADDED
@@ -0,0 +1,142 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3 -u
2
+ # Copyright (c) Facebook, Inc. and its affiliates.
3
+ #
4
+ # This source code is licensed under the MIT license found in the
5
+ # LICENSE file in the root directory of this source tree.
6
+
7
+ import argparse
8
+ import pathlib
9
+ import sys
10
+ print("using", sys.executable)
11
+
12
+ sys.path.insert( 0,"/home/user/.local/lib/python3.8/site-packages")
13
+ sys.path.insert( 0,"/home/user/app/esm/")
14
+ import os
15
+
16
+ import torch
17
+
18
+ from esm import Alphabet, FastaBatchedDataset, ProteinBertModel, pretrained, MSATransformer
19
+
20
+
21
+ def create_parser():
22
+ parser = argparse.ArgumentParser(
23
+ description="Extract per-token representations and model outputs for sequences in a FASTA file" # noqa
24
+ )
25
+
26
+ parser.add_argument(
27
+ "model_location",
28
+ type=str,
29
+ help="PyTorch model file OR name of pretrained model to download (see README for models)",
30
+ )
31
+ parser.add_argument(
32
+ "fasta_file",
33
+ type=pathlib.Path,
34
+ help="FASTA file on which to extract representations",
35
+ )
36
+ parser.add_argument(
37
+ "output_dir",
38
+ type=pathlib.Path,
39
+ help="output directory for extracted representations",
40
+ )
41
+
42
+ parser.add_argument("--toks_per_batch", type=int, default=4096, help="maximum batch size")
43
+ parser.add_argument(
44
+ "--repr_layers",
45
+ type=int,
46
+ default=[-1],
47
+ nargs="+",
48
+ help="layers indices from which to extract representations (0 to num_layers, inclusive)",
49
+ )
50
+ parser.add_argument(
51
+ "--include",
52
+ type=str,
53
+ nargs="+",
54
+ choices=["mean", "per_tok", "bos", "contacts"],
55
+ help="specify which representations to return",
56
+ required=True,
57
+ )
58
+ parser.add_argument(
59
+ "--truncation_seq_length",
60
+ type=int,
61
+ default=1022,
62
+ help="truncate sequences longer than the given value",
63
+ )
64
+
65
+ parser.add_argument("--nogpu", action="store_true", help="Do not use GPU even if available")
66
+ return parser
67
+
68
+
69
+ def main(args):
70
+ model, alphabet = pretrained.load_model_and_alphabet(args.model_location)
71
+ model.eval()
72
+ if isinstance(model, MSATransformer):
73
+ raise ValueError(
74
+ "This script currently does not handle models with MSA input (MSA Transformer)."
75
+ )
76
+ if torch.cuda.is_available() and not args.nogpu:
77
+ model = model.cuda()
78
+ print("Transferred model to GPU")
79
+
80
+ dataset = FastaBatchedDataset.from_file(args.fasta_file)
81
+ batches = dataset.get_batch_indices(args.toks_per_batch, extra_toks_per_seq=1)
82
+ data_loader = torch.utils.data.DataLoader(
83
+ dataset, collate_fn=alphabet.get_batch_converter(args.truncation_seq_length), batch_sampler=batches
84
+ )
85
+ print(f"Read {args.fasta_file} with {len(dataset)} sequences")
86
+
87
+ args.output_dir.mkdir(parents=True, exist_ok=True)
88
+ return_contacts = "contacts" in args.include
89
+
90
+ assert all(-(model.num_layers + 1) <= i <= model.num_layers for i in args.repr_layers)
91
+ repr_layers = [(i + model.num_layers + 1) % (model.num_layers + 1) for i in args.repr_layers]
92
+
93
+ with torch.no_grad():
94
+ for batch_idx, (labels, strs, toks) in enumerate(data_loader):
95
+ print(
96
+ f"Processing {batch_idx + 1} of {len(batches)} batches ({toks.size(0)} sequences)"
97
+ )
98
+ if torch.cuda.is_available() and not args.nogpu:
99
+ toks = toks.to(device="cuda", non_blocking=True)
100
+
101
+ out = model(toks, repr_layers=repr_layers, return_contacts=return_contacts)
102
+
103
+ logits = out["logits"].to(device="cpu")
104
+ representations = {
105
+ layer: t.to(device="cpu") for layer, t in out["representations"].items()
106
+ }
107
+ if return_contacts:
108
+ contacts = out["contacts"].to(device="cpu")
109
+
110
+ for i, label in enumerate(labels):
111
+ args.output_file = args.output_dir / f"{label}.pt"
112
+ args.output_file.parent.mkdir(parents=True, exist_ok=True)
113
+ result = {"label": label}
114
+ # Call clone on tensors to ensure tensors are not views into a larger representation
115
+ # See https://github.com/pytorch/pytorch/issues/1995
116
+ if "per_tok" in args.include:
117
+ result["representations"] = {
118
+ layer: t[i, 1 : len(strs[i]) + 1].clone()
119
+ for layer, t in representations.items()
120
+ }
121
+ if "mean" in args.include:
122
+ result["mean_representations"] = {
123
+ layer: t[i, 1 : len(strs[i]) + 1].mean(0).clone()
124
+ for layer, t in representations.items()
125
+ }
126
+ if "bos" in args.include:
127
+ result["bos_representations"] = {
128
+ layer: t[i, 0].clone() for layer, t in representations.items()
129
+ }
130
+ if return_contacts:
131
+ result["contacts"] = contacts[i, : len(strs[i]), : len(strs[i])].clone()
132
+
133
+ torch.save(
134
+ result,
135
+ args.output_file,
136
+ )
137
+
138
+
139
+ if __name__ == "__main__":
140
+ parser = create_parser()
141
+ args = parser.parse_args()
142
+ main(args)