{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "colab": {
      "name": "semigreedy_refinement_4_models.ipynb",
      "provenance": [],
      "collapsed_sections": [],
      "authorship_tag": "ABX9TyN4wuQkRswgF3n+yu1fsFUx",
      "include_colab_link": true
    },
    "kernelspec": {
      "name": "python3",
      "display_name": "Python 3"
    },
    "language_info": {
      "name": "python"
    },
    "accelerator": "GPU"
  },
  "cells": [
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "view-in-github",
        "colab_type": "text"
      },
      "source": [
        "<a href=\"https://colab.research.google.com/github/sokrypton/af_backprop/blob/main/examples/sc_hall/semigreedy_refinement_4_models.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "5IOr3jQEvoe6",
        "outputId": "63354ed1-48d3-4b35-e91c-20a0aade9e02"
      },
      "source": [
        "%%bash\n",
        "if [ ! -d af_backprop ]; then\n",
        "  git clone https://github.com/sokrypton/af_backprop.git\n",
        "  pip -q install dm-haiku py3Dmol biopython ml_collections\n",
        "fi\n",
        "if [ ! -d params ]; then\n",
        "  mkdir params\n",
        "  curl -fsSL https://storage.googleapis.com/alphafold/alphafold_params_2021-07-14.tar | tar x -C params\n",
        "fi"
      ],
      "execution_count": 1,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "Cloning into 'af_backprop'...\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "3Ym3Vie7v1Yb"
      },
      "source": [
        "import os\n",
        "import sys\n",
        "sys.path.append('af_backprop')\n",
        "\n",
        "import numpy as np\n",
        "import matplotlib.pyplot as plt\n",
        "import py3Dmol\n",
        "\n",
        "import jax\n",
        "import jax.numpy as jnp\n",
        "\n",
        "from jax.experimental.optimizers import adam\n",
        "\n",
        "from alphafold.common import protein\n",
        "from alphafold.data import pipeline\n",
        "from alphafold.model import data, config, model, modules\n",
        "from alphafold.common import residue_constants\n",
        "\n",
        "from alphafold.model import all_atom\n",
        "from alphafold.model import folding\n",
        "\n",
        "# custom functions\n",
        "from alphafold.data import prep_inputs\n",
        "from utils import *"
      ],
      "execution_count": 2,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "shh_V1eswjrH"
      },
      "source": [
        "# setup which model params to use\n",
        "model_name = \"model_3_ptm\"\n",
        "model_config = config.model_config(model_name)\n",
        "\n",
        "# enable checkpointing\n",
        "model_config.model.global_config.use_remat = True\n",
        "\n",
        "# number of recycles\n",
        "model_config.model.num_recycle = 3\n",
        "model_config.data.common.num_recycle = 3\n",
        "\n",
        "# backprop through recycles\n",
        "model_config.model.backprop_recycle = False\n",
        "model_config.model.embeddings_and_evoformer.backprop_dgram = False\n",
        "\n",
        "# custom relative features (needed for insertion/deletion)\n",
        "INDELS = False\n",
        "model_config.model.embeddings_and_evoformer.custom_relative_features = INDELS\n",
        "\n",
        "# number of sequences\n",
        "N = 1\n",
        "model_config.data.eval.max_msa_clusters = N\n",
        "model_config.data.common.max_extra_msa = 1\n",
        "model_config.data.eval.masked_msa_replace_fraction = 0\n",
        "\n",
        "# dropout\n",
        "model_config = set_dropout(model_config, 0.0)\n",
        "\n",
        "# setup model\n",
        "model_params = [data.get_model_haiku_params(model_name=model_name, data_dir=\".\")]\n",
        "model_runner = model.RunModel(model_config, model_params[0], is_training=True)\n",
        "\n",
        "# load the other models to sample during design.\n",
        "for model_name in [\"model_1_ptm\",\"model_2_ptm\",\"model_5_ptm\",\"model_4_ptm\"]:\n",
        "  params = data.get_model_haiku_params(model_name, '.')\n",
        "  model_params.append({k: params[k] for k in model_runner.params.keys()})"
      ],
      "execution_count": 3,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "XOB0t6dslBAD"
      },
      "source": [
        "#################\n",
        "# USER INPUT\n",
        "#################\n",
        "# native structure you want to pull active site from\n",
        "pos_idx_ref = [13,37,98] # note: zero indexed\n",
        "PDB_REF = \"af_backprop/examples/sc_hall/1QJG.pdb\"\n",
        "\n",
        "# starting structure (for random starting sequence, set PDB=None and LEN to desired length)\n",
        "pos_idx = [74+5,32+5,7+5]\n",
        "MODE = \"af_backprop/examples/sc_hall/1QJS_starting\"\n",
        "PDB = f\"{MODE}.pdb\"\n",
        "LEN = 105"
      ],
      "execution_count": 4,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "yYiNDSxNwmVw"
      },
      "source": [
        "# prep reference (native) features\n",
        "OBJ_REF = protein.from_pdb_string(pdb_to_string(PDB_REF), chain_id=\"A\")\n",
        "SEQ_REF = jax.nn.one_hot(OBJ_REF.aatype,20)\n",
        "START_SEQ_REF = \"\".join([order_restype[a] for a in OBJ_REF.aatype])\n",
        "\n",
        "batch_ref = {'aatype': OBJ_REF.aatype,\n",
        "             'all_atom_positions': OBJ_REF.atom_positions,\n",
        "             'all_atom_mask': OBJ_REF.atom_mask}\n",
        "batch_ref.update(all_atom.atom37_to_frames(**batch_ref))\n",
        "batch_ref.update(prep_inputs.make_atom14_positions(batch_ref))\n",
        "batch_ref[\"idx\"] = pos_idx_ref\n",
        "\n",
        "# prep starting (design) features\n",
        "if PDB is not None:\n",
        "  OBJ = protein.from_pdb_string(pdb_to_string(PDB), chain_id=\"A\")\n",
        "  SEQ = jax.nn.one_hot(OBJ.aatype,20)\n",
        "  START_SEQ = \"\".join([order_restype[a] for a in OBJ.aatype])\n",
        "\n",
        "  batch = {'aatype': OBJ.aatype,\n",
        "          'all_atom_positions': OBJ.atom_positions,\n",
        "          'all_atom_mask': OBJ.atom_mask}\n",
        "  batch.update(all_atom.atom37_to_frames(**batch))\n",
        "  batch.update(prep_inputs.make_atom14_positions(batch))\n",
        "else:\n",
        "  SEQ = jnp.zeros(LEN).at[jnp.asarray(pos_idx)].set([OBJ_REF.aatype[i] for i in pos_idx_ref])\n",
        "  START_SEQ = \"\".join([order_restype[a] for a in SEQ])\n",
        "  SEQ = jax.nn.one_hot(SEQ,20)\n",
        "\n",
        "# prep input features\n",
        "feature_dict = {\n",
        "    **pipeline.make_sequence_features(sequence=START_SEQ,description=\"none\",num_res=len(START_SEQ)),\n",
        "    **pipeline.make_msa_features(msas=[N*[START_SEQ]], deletion_matrices=[N*[[0]*len(START_SEQ)]]),\n",
        "}\n",
        "inputs = model_runner.process_features(feature_dict, random_seed=0)\n",
        "\n",
        "if N > 1:\n",
        "  inputs[\"msa_row_mask\"] = jnp.ones_like(inputs[\"msa_row_mask\"])\n",
        "  inputs[\"msa_mask\"] = jnp.ones_like(inputs[\"msa_mask\"])"
      ],
      "execution_count": 5,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "ADmZt232wr8O",
        "outputId": "67056d1f-d0b4-4825-8ad7-13a799b4d905"
      },
      "source": [
        "print([START_SEQ[i] for i in pos_idx])\n",
        "print([START_SEQ_REF[i] for i in pos_idx_ref])"
      ],
      "execution_count": 6,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "['Y', 'N', 'D']\n",
            "['Y', 'N', 'D']\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "GfdnXo9ywwVg"
      },
      "source": [
        "def get_grad_fn(model_runner, inputs, pos_idx_ref, inc_backbone=False):\n",
        "  \n",
        "  def mod(params, key, model_params, opt):\n",
        "    pos_idx = opt[\"pos_idx\"]\n",
        "    pos_idx_ref = batch_ref[\"idx\"]\n",
        "    ############################\n",
        "    # set amino acid sequence\n",
        "    ############################\n",
        "    seq_logits = jax.random.permutation(key, params[\"msa\"])\n",
        "    seq_soft = jax.nn.softmax(seq_logits)\n",
        "    seq = jax.lax.stop_gradient(jax.nn.one_hot(seq_soft.argmax(-1),20) - seq_soft) + seq_soft\n",
        "    seq = seq.at[:,pos_idx,:].set(SEQ_REF[pos_idx_ref,:])\n",
        "\n",
        "    oh_mask = opt[\"oh_mask\"][:,None]\n",
        "    pseudo_seq = oh_mask * seq + (1-oh_mask) * seq_logits\n",
        "\n",
        "    inputs_mod = inputs.copy()\n",
        "    update_seq(pseudo_seq, inputs_mod, msa_input=(\"msa\" in params))\n",
        "\n",
        "    if \"msa_mask\" in opt:\n",
        "      inputs_mod[\"msa_mask\"] = inputs_mod[\"msa_mask\"] * opt[\"msa_mask\"][None,:,None]\n",
        "      inputs_mod[\"msa_row_mask\"] = inputs_mod[\"msa_row_mask\"] * opt[\"msa_mask\"][None,:]\n",
        "    \n",
        "    ####################\n",
        "    # set sidechains identity\n",
        "    ####################\n",
        "    B,L = inputs_mod[\"aatype\"].shape[:2]\n",
        "    ALA = jax.nn.one_hot(residue_constants.restype_order[\"A\"],21)\n",
        "\n",
        "    aatype = jnp.zeros((B,L,21)).at[...,:20].set(seq[0])\n",
        "    ala_mask = opt[\"ala_mask\"][:,None]\n",
        "    aatype_ala = jnp.zeros((B,L,21)).at[:].set(ALA)\n",
        "    aatype_ala = aatype_ala.at[:,pos_idx,:20].set(SEQ_REF[pos_idx_ref,:])\n",
        "    aatype_pseudo = ala_mask * aatype + (1-ala_mask) * aatype_ala\n",
        "    update_aatype(aatype_pseudo, inputs_mod)\n",
        "\n",
        "    ############################################################\n",
        "    if model_runner.config.model.embeddings_and_evoformer.custom_relative_features:\n",
        "      # set positions\n",
        "      active_pos = jax.nn.sigmoid(params[\"active_pos\"])\n",
        "      active_pos = active_pos.at[jnp.asarray(pos_idx)].set(1.0)\n",
        "\n",
        "      # hard constraint\n",
        "      active_pos = jax.lax.stop_gradient((active_pos > 0.5).astype(jnp.float32) - active_pos) + active_pos\n",
        "      \n",
        "      residue_idx = jax.lax.scan(lambda x,y:(x+y,x), 0, active_pos)[1]\n",
        "      offset = residue_idx[:, None] - residue_idx[None, :]\n",
        "      rel_pos = jax.nn.softmax(-jnp.square(offset[...,None] - jnp.arange(-32,33,dtype=jnp.float32)))\n",
        "\n",
        "      inputs_mod[\"rel_pos\"] = jnp.tile(rel_pos[None],[B,1,1,1])\n",
        "      inputs_mod[\"seq_mask\"] = jnp.zeros_like(inputs_mod[\"seq_mask\"]).at[...,:].set(active_pos)\n",
        "      inputs_mod[\"msa_mask\"] = jnp.zeros_like(inputs_mod[\"msa_mask\"]).at[...,:].set(active_pos)\n",
        "\n",
        "      inputs_mod[\"atom14_atom_exists\"] *= active_pos[None,:,None]\n",
        "      inputs_mod[\"atom37_atom_exists\"] *= active_pos[None,:,None]\n",
        "      inputs_mod[\"residx_atom14_to_atom37\"] *= active_pos[None,:,None,None]\n",
        "      inputs_mod[\"residx_atom37_to_atom14\"] *= active_pos[None,:,None,None]\n",
        "\n",
        "    ############################################################\n",
        "    \n",
        "    # get output\n",
        "    outputs = model_runner.apply(model_params, key, inputs_mod)\n",
        "\n",
        "    ###################\n",
        "    # structure loss\n",
        "    ###################\n",
        "    fape_loss = get_fape_loss_idx(batch_ref, outputs, pos_idx, model_config, backbone=inc_backbone, sidechain=True)\n",
        "    rmsd_loss = get_sidechain_rmsd_idx(batch_ref, outputs, pos_idx, model_config)\n",
        "    dgram_loss = get_dgram_loss_idx(batch_ref, outputs, pos_idx, model_config)\n",
        "\n",
        "    losses = {\"fape\":fape_loss,\n",
        "              \"rmsd\":rmsd_loss,\n",
        "              \"dgram\":dgram_loss}\n",
        "\n",
        "    if \"sc_weight_fape\" in opt: fape_loss *= opt[\"sc_weight_fape\"]\n",
        "    if \"sc_weight_rmsd\" in opt: rmsd_loss *= opt[\"sc_weight_rmsd\"]\n",
        "    if \"sc_weight_dgram\" in opt: dgram_loss *= opt[\"sc_weight_dgram\"]\n",
        "\n",
        "    loss = (rmsd_loss + fape_loss + dgram_loss) * opt[\"sc_weight\"]\n",
        "  \n",
        "    ################### \n",
        "    # background loss\n",
        "    ###################\n",
        "    if \"conf_weight\" in opt:\n",
        "      pae = jax.nn.softmax(outputs[\"predicted_aligned_error\"][\"logits\"])\n",
        "      plddt = jax.nn.softmax(outputs['predicted_lddt']['logits'])\n",
        "      pae_loss = (pae * jnp.arange(pae.shape[-1])).sum(-1)\n",
        "      plddt_loss = (plddt * jnp.arange(plddt.shape[-1])[::-1]).sum(-1)\n",
        "\n",
        "      if model_runner.config.model.embeddings_and_evoformer.custom_relative_features:\n",
        "        active_pos_mask = active_pos[:,None] * active_pos[None,:]\n",
        "        pae_loss = (pae_loss * active_pos_mask).sum() / (1e-8 + active_pos_mask.sum())\n",
        "        plddt_loss = (plddt_loss * active_pos).sum() / (1e-8 + active_pos.sum())\n",
        "      else:\n",
        "        pae_loss = pae_loss.mean()\n",
        "        plddt_loss = plddt_loss.mean()\n",
        "\n",
        "      loss = loss + (pae_loss + plddt_loss) * opt[\"conf_weight\"]\n",
        "      losses[\"pae\"] = pae_loss\n",
        "      losses[\"plddt\"] = plddt_loss\n",
        "\n",
        "    if \"rg_weight\" in opt:\n",
        "      ca_coords = outputs[\"structure_module\"][\"final_atom_positions\"][:,1,:]\n",
        "      rg_loss = jnp.sqrt(jnp.square(ca_coords - ca_coords.mean(0)).sum(-1).mean() + 1e-8)\n",
        "      loss = loss + rg_loss * opt[\"rg_weight\"]\n",
        "      losses[\"rg\"] = rg_loss\n",
        "          \n",
        "    if \"msa\" in params and \"ent_weight\" in opt:\n",
        "      seq_prf = seq.mean(0)\n",
        "      ent_loss = -(seq_prf * jnp.log(seq_prf + 1e-8)).sum(-1).mean()\n",
        "      loss = loss + ent_loss * opt[\"ent_weight\"]\n",
        "      losses[\"ent\"] = ent_loss\n",
        "    else:\n",
        "      ent_loss = 0\n",
        "\n",
        "    outs = {\"final_atom_positions\":outputs[\"structure_module\"][\"final_atom_positions\"],\n",
        "            \"final_atom_mask\":outputs[\"structure_module\"][\"final_atom_mask\"]}\n",
        "\n",
        "    if model_runner.config.model.embeddings_and_evoformer.custom_relative_features:\n",
        "      outs[\"residue_idx\"] = residue_idx\n",
        "\n",
        "    seq_ = seq[0] if \"msa\" in params else seq\n",
        "\n",
        "    return loss, ({\"losses\":losses, \"outputs\":outs, \"seq\":seq_})\n",
        "  loss_fn = mod\n",
        "  grad_fn = jax.value_and_grad(mod, has_aux=True, argnums=0)\n",
        "  return loss_fn, grad_fn"
      ],
      "execution_count": 7,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "vavxyvYJwyPC"
      },
      "source": [
        "# gradient function (note for greedy search we won't be using grad_fn, only loss_fn)\n",
        "loss_fn, grad_fn = get_grad_fn(model_runner, inputs, pos_idx_ref=pos_idx_ref)\n",
        "loss_fn = jax.jit(loss_fn)\n",
        "\n",
        "# stack model params (we exclude the last model: model_4_ptm for validation)\n",
        "model_params_multi = jax.tree_multimap(lambda *values: jnp.stack(values, axis=0), *model_params[:-1])\n",
        "loss_fn_multi = jax.jit(jax.vmap(loss_fn,(None,None,0,None)))"
      ],
      "execution_count": 8,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "lsLk1lLCQNcw"
      },
      "source": [
        "key = jax.random.PRNGKey(0)\n",
        "L,A = len(START_SEQ),20\n",
        "\n",
        "pos_idx_ = jnp.asarray(pos_idx)\n",
        "pos_idx_ref_ = jnp.asarray(pos_idx_ref)\n",
        "\n",
        "msa = SEQ[None]\n",
        "params = {\"msa\":msa, \"active_pos\":jnp.ones(L)}"
      ],
      "execution_count": 9,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "J7ZL2zc4t9I0"
      },
      "source": [
        "def mut(params, indel=False):\n",
        "  L,A = params[\"msa\"].shape[-2:]\n",
        "  while True:\n",
        "    i = np.random.randint(L)\n",
        "    a = np.random.randint(A)\n",
        "    if i not in pos_idx and params[\"msa\"][0,i,a] == 0 and (params[\"active_pos\"][i] == 1 or indel):\n",
        "      break\n",
        "\n",
        "  params_ = params.copy()\n",
        "  params_[\"msa\"] = params[\"msa\"].at[:,i,:].set(jnp.eye(A)[a])\n",
        "\n",
        "  if indel:\n",
        "    state = -1 if params[\"active_pos\"][i] == 1 else 1\n",
        "    params_[\"active_pos\"] = params[\"active_pos\"].at[i].set(state)\n",
        "  return params_"
      ],
      "execution_count": 10,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "wWlPaQGB4Nq8"
      },
      "source": [
        "multi-model refinement"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "dqM44WHW3DMw",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "562d6eae-9592-44a4-e1db-b65df3931fa9"
      },
      "source": [
        "oh_mask = jnp.ones((L,))\n",
        "ala_mask = jnp.ones((L,))\n",
        "msa_mask = jnp.ones((N,))\n",
        "opt={\"oh_mask\":oh_mask,\n",
        "     \"msa_mask\":msa_mask,\n",
        "     \"ala_mask\":ala_mask,\n",
        "     \"sc_weight\":1.0,\n",
        "     \"sc_weight_rmsd\":1.0,\n",
        "     \"sc_weight_fape\":1.0,\n",
        "     \"sc_weight_dgram\":0.0,\n",
        "     \"conf_weight\":0.01,\n",
        "     \"pos_idx\":pos_idx_}\n",
        "loss, outs = loss_fn_multi(params, key, model_params_multi, opt)\n",
        "print(np.mean(loss),\n",
        "      np.mean(outs[\"losses\"][\"rmsd\"]),\n",
        "      np.mean(outs[\"losses\"][\"fape\"]))\n",
        "\n",
        "print(outs[\"losses\"][\"rmsd\"])"
      ],
      "execution_count": 12,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "5.0831347 4.0395207 0.5736508\n",
            "[0.32047477 6.103425   3.9624836  5.7717004 ]\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "VAPNQZ4232bN",
        "outputId": "5e2fefc9-ade5-406c-f860-f33bd296cf4a"
      },
      "source": [
        "LOSS = np.mean(loss)\n",
        "OVERALL_RMSD = np.mean(outs[\"losses\"][\"rmsd\"])\n",
        "OVERALL_FAPE = np.mean(outs[\"losses\"][\"fape\"])\n",
        "OVERALL_LOSS = LOSS\n",
        "key = jax.random.PRNGKey(0)\n",
        "for n in range(10):\n",
        "  params_ = params.copy()\n",
        "  buff_p,buff_l,buff_o = [],[],[]\n",
        "  for m in range(20):\n",
        "    key,subkey = jax.random.split(key)\n",
        "    do_indel = False #np.random.uniform() < 0.25\n",
        "    p = mut(params, indel=do_indel)\n",
        "    l,o = loss_fn_multi(p, subkey, model_params_multi, opt)\n",
        "    print(\"-----------\", m, np.mean(o[\"losses\"][\"rmsd\"]), list(o[\"losses\"][\"rmsd\"]))\n",
        "    buff_p.append(p); buff_l.append(l); buff_o.append(o)\n",
        "  best = np.argmin(np.asarray(buff_l).mean(-1))\n",
        "  params, LOSS, outs = buff_p[best], buff_l[best], buff_o[best]\n",
        "  LOSS = np.mean(LOSS)\n",
        "  RMSD = np.mean(outs[\"losses\"][\"rmsd\"])\n",
        "  FAPE = np.mean(outs[\"losses\"][\"fape\"])\n",
        "\n",
        "  outs = jax.tree_map(lambda x: x[0], outs)\n",
        "  if RMSD < OVERALL_RMSD:\n",
        "    OVERALL_RMSD = RMSD\n",
        "    save_pdb(outs,f\"{MODE}_best_rmsd.pdb\")\n",
        "  if FAPE < OVERALL_FAPE:\n",
        "    OVERALL_FAPE = FAPE\n",
        "    save_pdb(outs,f\"{MODE}_best_fape.pdb\")\n",
        "  if LOSS < OVERALL_LOSS:\n",
        "    OVERALL_LOSS = LOSS\n",
        "    save_pdb(outs,f\"{MODE}_best_loss.pdb\")\n",
        "  print(n, LOSS, RMSD, FAPE, (params[\"active_pos\"] > 0).sum(), len(buff_l))"
      ],
      "execution_count": 13,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "----------- 0 10.582149 [12.104791, 11.438794, 6.8327885, 11.95222]\n",
            "----------- 1 5.7060966 [0.34404072, 7.8016953, 6.8321495, 7.8465023]\n",
            "----------- 2 4.9283195 [0.38857314, 7.33674, 3.9765234, 8.011441]\n",
            "----------- 3 4.596478 [1.2857033, 8.185179, 3.6270323, 5.2879977]\n",
            "----------- 4 4.33639 [0.34355134, 7.6549, 3.8751266, 5.471982]\n",
            "----------- 5 6.8581476 [7.102624, 8.735716, 4.742427, 6.851823]\n",
            "----------- 6 3.9228725 [0.37756792, 6.210618, 3.7853143, 5.317991]\n",
            "----------- 7 4.8257957 [0.3333986, 8.438012, 5.8835196, 4.648253]\n",
            "----------- 8 4.437786 [0.32170418, 8.433742, 4.022462, 4.9732375]\n",
            "----------- 9 4.3738766 [0.3662424, 8.010605, 3.854046, 5.264613]\n",
            "----------- 10 5.4476604 [0.3221591, 7.0530643, 5.5048957, 8.910523]\n",
            "----------- 11 4.311565 [0.33136475, 6.864394, 4.1079164, 5.9425855]\n",
            "----------- 12 8.771259 [7.6971173, 9.20276, 8.589025, 9.596133]\n",
            "----------- 13 4.458911 [0.32208133, 8.985687, 4.7378616, 3.7900143]\n",
            "----------- 14 4.6013637 [0.35995653, 6.008712, 4.1167917, 7.919994]\n",
            "----------- 15 4.414083 [0.323973, 8.743649, 3.9161198, 4.672591]\n",
            "----------- 16 5.3490214 [1.0300151, 11.366785, 4.0967994, 4.902487]\n",
            "----------- 17 4.038536 [0.33273467, 6.64519, 4.211386, 4.9648337]\n",
            "----------- 18 4.9283195 [0.38857314, 7.33674, 3.9765234, 8.011441]\n",
            "----------- 19 3.6465042 [0.344874, 5.7528996, 3.7142928, 4.7739506]\n",
            "0 4.665577 3.6465042 0.57121277 105 20\n",
            "----------- 0 4.8786836 [0.389769, 4.1099725, 6.0193267, 8.9956665]\n",
            "----------- 1 3.5066729 [0.34728014, 6.085302, 3.764575, 3.8295348]\n",
            "----------- 2 3.6940656 [0.3797003, 6.742977, 3.6415355, 4.01205]\n",
            "----------- 3 3.7695255 [0.36818364, 5.561354, 4.4751167, 4.6734476]\n",
            "----------- 4 3.6419036 [0.3596261, 5.3361835, 3.9025295, 4.9692755]\n",
            "----------- 5 3.8179073 [0.3272733, 5.84101, 4.1292744, 4.9740715]\n",
            "----------- 6 4.6439056 [0.345108, 6.285658, 8.125, 3.8198566]\n",
            "----------- 7 3.8682199 [1.9818479, 6.9291024, 2.6291928, 3.9327354]\n",
            "----------- 8 4.152326 [0.35060146, 5.0279503, 5.719198, 5.5115542]\n",
            "----------- 9 8.809828 [12.948995, 5.485416, 7.6189466, 9.185956]\n",
            "----------- 10 4.5837007 [2.7283385, 6.2634826, 4.454889, 4.8880925]\n",
            "----------- 11 4.010665 [0.42540643, 8.533676, 4.0781097, 3.0054672]\n",
            "----------- 12 3.7557948 [0.3431681, 4.354364, 5.114262, 5.211385]\n",
            "----------- 13 3.7458181 [0.368222, 5.6166067, 4.855059, 4.1433854]\n",
            "----------- 14 3.7996607 [0.3551456, 5.5467305, 4.9242435, 4.372523]\n",
            "----------- 15 3.6419036 [0.3596261, 5.3361835, 3.9025295, 4.9692755]\n",
            "----------- 16 3.7063732 [0.3709019, 5.440703, 4.383684, 4.6302032]\n",
            "----------- 17 4.4927187 [2.5542479, 5.6653757, 3.2255049, 6.5257473]\n",
            "----------- 18 2.923112 [0.4098379, 4.310831, 6.3609443, 0.6108337]\n",
            "----------- 19 4.8385973 [1.1260694, 7.660815, 4.391685, 6.1758204]\n",
            "1 3.8471584 2.923112 0.47667003 105 20\n",
            "----------- 0 2.4071047 [0.40594643, 4.268774, 4.386216, 0.567483]\n",
            "----------- 1 3.5637014 [3.4049003, 5.7517843, 4.466902, 0.631219]\n",
            "----------- 2 6.385169 [0.42317274, 7.9488277, 10.441073, 6.7276015]\n",
            "----------- 3 7.003173 [1.0159206, 8.392572, 10.478087, 8.12611]\n",
            "----------- 4 4.4137754 [0.44963905, 4.5486503, 5.6105924, 7.0462213]\n",
            "----------- 5 4.247386 [0.46088308, 5.267075, 4.02893, 7.2326555]\n",
            "----------- 6 6.511106 [0.62076414, 10.929417, 7.5565777, 6.937666]\n",
            "----------- 7 6.4648147 [0.94779783, 8.850026, 9.21556, 6.8458743]\n",
            "----------- 8 2.9776573 [0.4029673, 5.3560576, 5.5093575, 0.64224577]\n",
            "----------- 9 3.6655078 [0.37931573, 3.7486086, 5.3787656, 5.155341]\n",
            "----------- 10 3.8851724 [0.4140052, 4.2403994, 7.3400993, 3.5461855]\n",
            "----------- 11 3.404581 [1.050032, 7.233591, 4.872441, 0.46226132]\n",
            "----------- 12 2.5674534 [0.4273828, 3.6238961, 5.5727496, 0.64578414]\n",
            "----------- 13 4.3858285 [0.46913218, 6.663731, 5.7499824, 4.6604686]\n",
            "----------- 14 4.559425 [0.4063991, 4.701339, 8.109741, 5.0202203]\n",
            "----------- 15 3.7580328 [0.3951192, 5.44158, 8.7475815, 0.44785148]\n",
            "----------- 16 2.678008 [0.4227554, 4.3767376, 5.2928696, 0.6196703]\n",
            "----------- 17 3.5162039 [0.45585072, 5.0549345, 7.816131, 0.7378993]\n",
            "----------- 18 2.1885293 [0.4061838, 3.8336825, 3.8553498, 0.65890104]\n",
            "----------- 19 2.6114652 [0.46717995, 3.6555567, 5.7019725, 0.6211518]\n",
            "2 3.067666 2.1885293 0.46251404 105 20\n",
            "----------- 0 2.3537211 [0.4172569, 4.672165, 3.645054, 0.68040824]\n",
            "----------- 1 6.565819 [0.46917838, 9.24697, 13.639761, 2.9073648]\n",
            "----------- 2 2.5486968 [0.42084527, 5.1198244, 4.0136366, 0.64048123]\n",
            "----------- 3 2.265966 [0.44895777, 4.072321, 3.9046526, 0.63793194]\n",
            "----------- 4 6.415695 [5.689722, 7.4149528, 6.209097, 6.349009]\n",
            "----------- 5 6.701787 [6.1952324, 4.0250883, 9.226227, 7.360599]\n",
            "----------- 6 4.2596745 [0.4309035, 6.793351, 5.283859, 4.530585]\n",
            "----------- 7 4.2896414 [0.4131705, 6.1410913, 5.336551, 5.267752]\n",
            "----------- 8 4.533273 [0.41316566, 3.5603185, 8.388473, 5.7711363]\n",
            "----------- 9 2.173087 [0.48648486, 3.431612, 4.1570277, 0.61722285]\n",
            "----------- 10 2.343117 [0.41405722, 4.579, 3.7360349, 0.6433763]\n",
            "----------- 11 2.2203813 [0.408508, 4.2504196, 3.6253512, 0.5972458]\n",
            "----------- 12 2.8376088 [0.40539894, 3.930316, 3.6116252, 3.4030957]\n",
            "----------- 13 2.9735963 [0.45622826, 6.1875687, 4.7524834, 0.4981052]\n",
            "----------- 14 3.9415567 [0.48253584, 5.8193073, 6.4667287, 2.9976552]\n",
            "----------- 15 2.789737 [0.39956462, 7.160851, 2.9988146, 0.5997168]\n",
            "----------- 16 5.3915973 [0.41455936, 7.5174804, 8.774411, 4.859938]\n",
            "----------- 17 3.1227882 [0.40277773, 4.0853357, 4.670654, 3.3323848]\n",
            "----------- 18 3.9933128 [0.5662083, 6.5397005, 5.448346, 3.418997]\n",
            "----------- 19 8.413696 [5.8865013, 10.086903, 11.466493, 6.2148895]\n",
            "3 3.0549202 2.173087 0.46643674 105 20\n",
            "----------- 0 3.0471392 [0.47183362, 5.662226, 5.3606963, 0.6938004]\n",
            "----------- 1 4.0131655 [0.5177209, 4.983051, 6.9292545, 3.6226368]\n",
            "----------- 2 2.17895 [0.50002897, 3.2442615, 4.345606, 0.6259041]\n",
            "----------- 3 2.374694 [0.4852461, 5.912502, 2.445409, 0.65562004]\n",
            "----------- 4 2.3062282 [0.44955295, 3.5542312, 4.5708838, 0.65024453]\n",
            "----------- 5 6.3752103 [0.5182901, 10.698274, 7.579367, 6.70491]\n",
            "----------- 6 4.6237783 [0.5973701, 5.4147644, 3.7537475, 8.729232]\n",
            "----------- 7 3.6459582 [0.6986359, 6.070299, 3.8660624, 3.9488356]\n",
            "----------- 8 4.2973356 [0.48809233, 2.9792452, 10.25743, 3.464575]\n",
            "----------- 9 3.4267287 [0.47858143, 8.201494, 4.4400992, 0.5867403]\n",
            "----------- 10 6.5054493 [0.4609542, 8.021693, 7.273257, 10.265895]\n",
            "----------- 11 6.4646187 [0.45118427, 11.614124, 8.662091, 5.131075]\n",
            "----------- 12 3.44925 [0.5102441, 6.75787, 5.6903915, 0.83849436]\n",
            "----------- 13 1.5510874 [0.46765023, 3.1209612, 1.937722, 0.67801625]\n",
            "----------- 14 4.1438828 [0.5995776, 8.073297, 4.360333, 3.5423234]\n",
            "----------- 15 3.0242546 [0.4761192, 6.872156, 4.1430902, 0.6056532]\n",
            "----------- 16 3.354655 [0.5910091, 7.9251766, 4.2511578, 0.65127695]\n",
            "----------- 17 4.737821 [0.52790046, 12.361472, 4.349435, 1.7124759]\n",
            "----------- 18 3.3077118 [0.48165753, 2.8231888, 5.756186, 4.169815]\n",
            "----------- 19 3.858794 [0.46834785, 6.2006445, 5.3235, 3.442683]\n",
            "4 2.3836753 1.5510874 0.40621892 105 20\n",
            "----------- 0 2.408711 [0.59259933, 2.994746, 5.3717113, 0.6757873]\n",
            "----------- 1 3.583911 [0.59484565, 3.0236213, 6.1144195, 4.602757]\n",
            "----------- 2 3.7031112 [1.9154848, 4.1394815, 3.6726053, 5.084873]\n",
            "----------- 3 4.259982 [0.6372889, 6.2120976, 6.1206055, 4.0699363]\n",
            "----------- 4 6.560093 [0.4791305, 12.672071, 6.238699, 6.850472]\n",
            "----------- 5 4.8324957 [0.47789723, 7.418234, 6.5990267, 4.8348246]\n",
            "----------- 6 4.253159 [0.6549075, 7.017164, 7.2769494, 2.0636148]\n",
            "----------- 7 3.104456 [0.45488828, 3.344442, 4.431807, 4.186686]\n",
            "----------- 8 5.6592755 [3.4889212, 6.648672, 9.099052, 3.4004571]\n",
            "----------- 9 6.041602 [2.0915234, 7.53962, 5.7716804, 8.763584]\n",
            "----------- 10 2.513018 [0.986735, 3.3085477, 4.969964, 0.7868239]\n",
            "----------- 11 2.6217945 [0.45438075, 3.1789834, 6.181294, 0.6725199]\n",
            "----------- 12 3.941772 [0.6494507, 3.9241767, 5.559955, 5.6335053]\n",
            "----------- 13 6.465238 [7.7704196, 9.203311, 5.048996, 3.8382263]\n",
            "----------- 14 9.766651 [9.576719, 12.081644, 6.587141, 10.821101]\n",
            "----------- 15 4.38824 [0.86634266, 5.365814, 6.151191, 5.1696105]\n",
            "----------- 16 4.7873554 [1.5247656, 7.3221726, 5.155653, 5.146831]\n",
            "----------- 17 3.2191634 [0.45180723, 6.635691, 5.1962996, 0.5928558]\n",
            "----------- 18 1.0649618 [0.4786889, 2.5209634, 0.5535031, 0.70669174]\n",
            "----------- 19 2.9940672 [0.46470752, 3.1425028, 4.3368435, 4.0322146]\n",
            "5 1.7999133 1.0649618 0.35217547 105 20\n",
            "----------- 0 2.9137554 [0.5797037, 3.3810425, 2.3903174, 5.3039575]\n",
            "----------- 1 1.3706349 [0.60660183, 3.5293875, 0.5786312, 0.76791924]\n",
            "----------- 2 3.2712543 [0.6478559, 6.8537245, 5.073406, 0.51003104]\n",
            "----------- 3 2.3383627 [0.5436503, 2.9409494, 3.0435867, 2.8252642]\n",
            "----------- 4 2.665322 [0.47297964, 4.5350113, 5.0160937, 0.63720375]\n",
            "----------- 5 1.1749109 [0.49457368, 3.0391412, 0.5314649, 0.6344639]\n",
            "----------- 6 4.515865 [3.1955972, 7.380846, 4.317868, 3.1691482]\n",
            "----------- 7 3.0060768 [0.48280758, 6.405368, 0.59157217, 4.544559]\n",
            "----------- 8 2.8049726 [0.47285232, 2.9449687, 0.5356863, 7.266383]\n",
            "----------- 9 1.2798908 [0.741102, 2.8399186, 0.77493376, 0.7636087]\n",
            "----------- 10 1.2820382 [0.69970554, 3.0904496, 0.63325167, 0.70474607]\n",
            "----------- 11 2.962519 [0.4278536, 4.5380526, 6.266224, 0.61794597]\n",
            "----------- 12 3.182825 [0.4819531, 8.55946, 0.8070526, 2.8828354]\n",
            "----------- 13 3.605903 [0.4913268, 6.5359335, 4.999935, 2.396417]\n",
            "----------- 14 2.740058 [0.54361457, 2.5836465, 5.5235553, 2.3094163]\n",
            "----------- 15 6.772599 [6.7603326, 8.049665, 5.5924, 6.6879997]\n",
            "----------- 16 1.4681029 [1.3279539, 3.2857857, 0.6039699, 0.6547022]\n",
            "----------- 17 5.723872 [5.6197824, 6.501586, 7.962329, 2.8117895]\n",
            "----------- 18 8.821382 [6.989175, 9.054461, 11.158386, 8.083503]\n",
            "----------- 19 4.487301 [0.47633642, 7.56953, 7.7259836, 2.1773546]\n",
            "6 1.9241389 1.1749109 0.35637453 105 20\n",
            "----------- 0 5.563302 [0.5416731, 8.03251, 5.338444, 8.340583]\n",
            "----------- 1 5.9103003 [0.49172208, 7.6001053, 7.972028, 7.577347]\n",
            "----------- 2 2.5772943 [0.5054023, 6.534628, 2.565927, 0.7032203]\n",
            "----------- 3 3.149089 [2.9514372, 2.9636126, 4.853099, 1.8282076]\n",
            "----------- 4 5.3910804 [0.5104481, 7.3227873, 8.840729, 4.8903584]\n",
            "----------- 5 2.8454852 [0.519192, 6.0013995, 2.2649152, 2.5964339]\n",
            "----------- 6 5.9070616 [0.47018862, 10.039518, 5.3783193, 7.7402215]\n",
            "----------- 7 3.656711 [0.4781471, 2.7352362, 6.1736174, 5.239844]\n",
            "----------- 8 8.381494 [8.087517, 7.3926554, 10.748937, 7.2968645]\n",
            "----------- 9 5.4170775 [3.1999342, 7.563527, 2.8614478, 8.043402]\n",
            "----------- 10 2.9442503 [0.77243835, 6.1304655, 3.8895233, 0.98457426]\n",
            "----------- 11 5.116531 [0.5584655, 6.541425, 9.796945, 3.5692887]\n",
            "----------- 12 3.4091916 [0.5013325, 4.0858235, 3.5191205, 5.53049]\n",
            "----------- 13 3.9140825 [0.5068759, 7.5968504, 2.8186543, 4.73395]\n",
            "----------- 14 4.0137873 [0.5880374, 6.1081376, 4.78733, 4.5716434]\n",
            "----------- 15 3.6466699 [0.48415077, 7.088579, 3.2573853, 3.7565641]\n",
            "----------- 16 2.868372 [0.4403102, 3.0626845, 4.2559824, 3.7145107]\n",
            "----------- 17 6.698191 [2.7048624, 8.386596, 7.608087, 8.09322]\n",
            "----------- 18 2.0177314 [0.49044654, 6.0936966, 0.6455321, 0.84125084]\n",
            "----------- 19 4.445796 [0.49084687, 9.170093, 2.4752064, 5.647039]\n",
            "7 2.854764 2.0177314 0.37624437 105 20\n",
            "----------- 0 1.0931559 [0.49172053, 2.6824176, 0.5764521, 0.62203354]\n",
            "----------- 1 4.773432 [1.517017, 10.158834, 5.3810177, 2.0368586]\n",
            "----------- 2 1.613029 [0.9292759, 3.1221318, 1.5021861, 0.89852214]\n",
            "----------- 3 2.6226964 [1.8922995, 2.640792, 3.1363952, 2.8212986]\n",
            "----------- 4 3.0730336 [0.49155927, 7.6547313, 2.3609092, 1.7849343]\n",
            "----------- 5 6.935523 [7.507216, 7.1102643, 6.5306544, 6.5939574]\n",
            "----------- 6 4.0209823 [0.46951586, 7.1055694, 3.2235124, 5.2853317]\n",
            "----------- 7 5.5829983 [7.1345463, 3.6441324, 5.4739, 6.0794134]\n",
            "----------- 8 3.455373 [0.48668343, 3.0806482, 2.7548976, 7.499263]\n",
            "----------- 9 4.1937394 [2.2798746, 7.402249, 6.465774, 0.62705946]\n",
            "----------- 10 6.605816 [9.141533, 5.757394, 6.2058573, 5.31848]\n",
            "----------- 11 3.533581 [0.4979934, 4.1526065, 5.30537, 4.1783543]\n",
            "----------- 12 3.5613365 [0.49206477, 6.739398, 2.5934587, 4.4204245]\n",
            "----------- 13 3.6620011 [3.5900655, 3.119967, 7.2964883, 0.6414829]\n",
            "----------- 14 2.321041 [0.5420785, 2.5902116, 0.5915193, 5.5603547]\n",
            "----------- 15 6.608162 [0.69685817, 11.084266, 11.961578, 2.6899447]\n",
            "----------- 16 6.5043054 [3.85887, 7.975657, 8.475497, 5.707197]\n",
            "----------- 17 5.5829983 [7.1345463, 3.6441324, 5.4739, 6.0794134]\n",
            "----------- 18 4.681854 [4.05151, 2.8608103, 5.5413437, 6.273751]\n",
            "----------- 19 3.0863926 [0.4749892, 7.594475, 2.212759, 2.0633478]\n",
            "8 1.8283346 1.0931559 0.35270125 105 20\n",
            "----------- 0 7.171954 [6.307287, 8.116448, 8.80696, 5.4571204]\n",
            "----------- 1 2.5624762 [0.51132023, 3.0274832, 2.454945, 4.256156]\n",
            "----------- 2 3.8772728 [0.48861453, 7.6562514, 5.951251, 1.4129744]\n",
            "----------- 3 3.6076546 [0.47928056, 7.586958, 0.70100075, 5.663379]\n",
            "----------- 4 6.3786244 [5.0353436, 8.022774, 5.9298387, 6.5265427]\n",
            "----------- 5 2.8925982 [1.2213205, 2.6918755, 1.9224159, 5.7347803]\n",
            "----------- 6 3.9868257 [1.578602, 6.206632, 0.95245385, 7.2096148]\n",
            "----------- 7 2.8703656 [0.52009696, 2.3494925, 7.980505, 0.6313675]\n",
            "----------- 8 2.6793237 [0.49006915, 8.2979965, 0.5241353, 1.4050932]\n",
            "----------- 9 1.0168215 [0.46446496, 2.638379, 0.547157, 0.4172848]\n",
            "----------- 10 3.8984096 [0.48657328, 7.467347, 6.977629, 0.66208917]\n",
            "----------- 11 1.1310002 [0.49976665, 2.7513857, 0.5728694, 0.69997895]\n",
            "----------- 12 1.6584501 [0.4734316, 2.7602808, 2.7501695, 0.6499184]\n",
            "----------- 13 7.321125 [0.61161333, 10.463595, 6.7591476, 11.450143]\n",
            "----------- 14 6.9232044 [4.7297215, 9.2294235, 7.205658, 6.5280156]\n",
            "----------- 15 3.6137753 [0.48802245, 2.9645853, 5.6860676, 5.3164253]\n",
            "----------- 16 4.172943 [0.49637416, 8.188139, 2.3736038, 5.633655]\n",
            "----------- 17 2.35454 [0.4830108, 7.8507514, 0.53617835, 0.54821944]\n",
            "----------- 18 1.6584501 [0.4734316, 2.7602808, 2.7501695, 0.6499184]\n",
            "----------- 19 2.228596 [0.53493524, 3.9120338, 0.61403877, 3.8533762]\n",
            "9 1.7148367 1.0168215 0.3432635 105 20\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "nMcwzOU7M2tg",
        "outputId": "db49d356-7c0c-4541-e6b8-ff66cd05e7b9"
      },
      "source": [
        "for n in range(300):\n",
        "  params_ = params.copy()\n",
        "  buff_p,buff_l,buff_o = [],[],[]\n",
        "  for _ in range(20):\n",
        "    key,subkey = jax.random.split(key)\n",
        "    do_indel = (INDELS and np.random.uniform() < 0.25)\n",
        "    p = mut(params, indel=do_indel)\n",
        "    l,o = loss_fn_multi(p, subkey, model_params_multi, opt)\n",
        "    buff_p.append(p); buff_l.append(l); buff_o.append(o)\n",
        "    if np.mean(l) < LOSS: break\n",
        "  best = np.argmin(np.asarray(buff_l).mean(-1))\n",
        "  params, LOSS, outs = buff_p[best], buff_l[best], buff_o[best]\n",
        "  LOSS = np.mean(LOSS)\n",
        "  RMSD = np.mean(outs[\"losses\"][\"rmsd\"])\n",
        "  FAPE = np.mean(outs[\"losses\"][\"fape\"])\n",
        "\n",
        "  outs = jax.tree_map(lambda x: x[0], outs)\n",
        "  if RMSD < OVERALL_RMSD:\n",
        "    OVERALL_RMSD = RMSD\n",
        "    save_pdb(outs,f\"{MODE}_best_rmsd.pdb\")\n",
        "  if FAPE < OVERALL_FAPE:\n",
        "    OVERALL_FAPE = FAPE\n",
        "    save_pdb(outs,f\"{MODE}_best_fape.pdb\")\n",
        "  if LOSS < OVERALL_LOSS:\n",
        "    OVERALL_LOSS = LOSS\n",
        "    save_pdb(outs,f\"{MODE}_best_loss.pdb\")\n",
        "  l4,o4 = loss_fn(params, subkey, model_params[-1], opt)\n",
        "  print(n, LOSS, RMSD, FAPE, (params[\"active_pos\"] > 0).sum(), len(buff_l), o4[\"losses\"][\"rmsd\"])"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "0 1.6684058 0.97566986 0.33917558 105 13 6.570461\n",
            "1 1.7213771 1.0266101 0.34318787 105 20 0.5521997\n",
            "2 1.7309557 1.0141158 0.35105625 105 20 2.0901546\n",
            "3 1.6896502 0.9780132 0.34791833 105 2 3.075305\n",
            "4 1.2836819 0.63118124 0.2795934 105 9 3.0850897\n",
            "5 1.0221133 0.47783357 0.26446223 105 2 0.58060026\n",
            "6 1.0106819 0.471611 0.2663944 105 17 0.579923\n",
            "7 0.9707656 0.44111815 0.26466346 105 8 0.5299034\n",
            "8 0.95611095 0.46036988 0.26778838 105 2 0.4459814\n",
            "9 0.94456077 0.44426697 0.2588649 105 7 0.51522875\n",
            "10 0.92737645 0.41471896 0.26362437 105 11 0.4391624\n",
            "11 0.9707828 0.43690652 0.2637179 105 20 3.0559409\n",
            "12 1.0268421 0.48600835 0.27266476 105 20 1.6863861\n",
            "13 1.000179 0.45579082 0.26858282 105 9 1.6001016\n",
            "14 1.0144405 0.46453598 0.2691497 105 20 3.1118512\n",
            "15 0.9921475 0.45157805 0.26792496 105 6 3.04773\n",
            "16 1.0790019 0.51705253 0.27363735 105 20 1.599278\n",
            "17 0.9422333 0.44149858 0.26067227 105 5 0.49108532\n",
            "18 0.95633763 0.4589308 0.26509196 105 20 0.46267003\n",
            "19 0.91339755 0.4367942 0.26139438 105 2 0.4516135\n",
            "20 0.9490597 0.44927266 0.25715116 105 20 1.9399705\n",
            "21 0.92148614 0.44103473 0.25952393 105 10 0.5380588\n",
            "22 0.9095851 0.44128704 0.26291054 105 5 0.52902484\n",
            "23 0.9001045 0.43394685 0.26216513 105 11 0.507325\n",
            "24 0.935823 0.44728646 0.26444185 105 20 0.5153226\n",
            "25 0.92956334 0.4358675 0.25304818 105 1 0.50120604\n",
            "26 0.93669957 0.44484594 0.25761187 105 20 0.5072669\n",
            "27 0.92630804 0.4272276 0.25512138 105 17 0.50545853\n",
            "28 0.90342486 0.40582314 0.25182638 105 7 0.42194045\n",
            "29 0.9023535 0.3999255 0.2542632 105 18 0.5808631\n",
            "30 0.8987059 0.39197487 0.25395167 105 4 0.698982\n",
            "31 0.89763 0.398355 0.2523234 105 8 0.7569108\n",
            "32 0.8851971 0.39835936 0.2517715 105 10 0.40553972\n",
            "33 0.87716496 0.39584976 0.2536103 105 18 0.43690002\n",
            "34 0.8672372 0.39135563 0.2522359 105 19 0.46205065\n",
            "35 0.86573654 0.40179157 0.24927694 105 5 0.43356445\n",
            "36 0.86034834 0.39331198 0.24897593 105 5 0.41553757\n",
            "37 0.85704434 0.3940274 0.24887043 105 15 0.42463255\n",
            "38 0.85812235 0.39109135 0.24861696 105 20 1.5544825\n",
            "39 0.8563244 0.38625145 0.2548745 105 2 0.46328327\n",
            "40 0.85393006 0.38619673 0.25590825 105 1 0.65370554\n",
            "41 0.8434426 0.37833232 0.25496525 105 3 0.6442375\n",
            "42 0.84568226 0.3767385 0.25646347 105 20 0.45805952\n",
            "43 0.8283565 0.36935914 0.25602132 105 16 0.43450886\n",
            "44 0.8259764 0.3721248 0.2545429 105 9 0.44600332\n",
            "45 0.8283949 0.37862396 0.25495532 105 20 0.4076944\n",
            "46 0.8460327 0.39139068 0.2576192 105 20 0.41672665\n",
            "47 0.85488296 0.38974053 0.25892863 105 20 0.4298103\n",
            "48 0.88770354 0.41136163 0.26714876 105 20 0.5820464\n",
            "49 0.850071 0.38693905 0.2618707 105 19 0.45211282\n",
            "50 0.87110484 0.40780997 0.25945115 105 20 0.61626506\n",
            "51 0.85944444 0.4052241 0.26254568 105 2 0.58453935\n",
            "52 0.8510729 0.39752704 0.25937673 105 17 0.42348662\n",
            "53 0.85884845 0.39929175 0.26002827 105 20 0.42013463\n",
            "54 0.8492986 0.39085233 0.2594124 105 20 0.4298053\n",
            "55 0.8502816 0.39487204 0.26038072 105 20 0.45310912\n",
            "56 0.8514839 0.396228 0.25912333 105 20 0.43081018\n",
            "57 0.84856915 0.3966666 0.25921145 105 1 0.42670247\n",
            "58 0.8441243 0.39192587 0.2584624 105 1 0.43249077\n",
            "59 0.8388046 0.39230713 0.25792277 105 9 0.4469592\n",
            "60 0.8518801 0.4112948 0.26125896 105 20 0.42879182\n",
            "61 0.8495097 0.40249667 0.2577355 105 5 0.41180924\n",
            "62 0.85121775 0.40240282 0.25863898 105 20 0.4375655\n",
            "63 0.84820503 0.39923126 0.26354426 105 4 0.4226636\n",
            "64 0.8493221 0.39893606 0.26149994 105 20 0.55794966\n",
            "65 0.8680916 0.41174316 0.26457256 105 20 0.52679855\n",
            "66 0.8690653 0.4129597 0.26212114 105 20 0.40440193\n",
            "67 0.8550012 0.3974012 0.25566924 105 2 0.61957014\n",
            "68 0.8269218 0.3811624 0.25568238 105 2 0.5771368\n",
            "69 0.82573533 0.38210166 0.2538772 105 3 0.3971571\n",
            "70 0.8257129 0.3830118 0.2558058 105 1 0.37792858\n",
            "71 0.82546926 0.38395628 0.2544651 105 12 0.6366667\n",
            "72 0.843058 0.39773583 0.25683063 105 20 0.47362173\n",
            "73 0.82866305 0.3778933 0.25027376 105 14 0.80487704\n",
            "74 0.82513636 0.3715942 0.24893484 105 11 0.62339985\n",
            "75 0.7955326 0.36016932 0.24934904 105 14 0.4288529\n",
            "76 0.7816151 0.35555938 0.2501549 105 4 0.37923443\n",
            "77 0.79697263 0.37449193 0.2569723 105 20 0.38451034\n",
            "78 0.79643744 0.37195808 0.2569681 105 8 0.3864982\n",
            "79 0.79316473 0.37074983 0.25291863 105 5 0.3956389\n",
            "80 0.79527825 0.37851173 0.255054 105 20 0.41184312\n",
            "81 0.80367917 0.38413125 0.2587978 105 20 0.38481775\n",
            "82 0.80195075 0.38171855 0.25619864 105 19 0.38471878\n",
            "83 0.79585415 0.37876022 0.25607145 105 3 0.38093916\n",
            "84 0.79297435 0.36952016 0.25707933 105 13 0.38803926\n",
            "85 0.7860149 0.36533368 0.25712195 105 5 0.39711148\n",
            "86 0.78051245 0.36108324 0.25746024 105 15 0.4006365\n",
            "87 0.7777182 0.3615132 0.2557983 105 2 0.4549559\n",
            "88 0.7739917 0.3581917 0.25417912 105 10 0.37290683\n",
            "89 0.774044 0.3581766 0.2542063 105 20 0.39311743\n",
            "90 0.7884384 0.37267008 0.2572489 105 20 0.3724221\n",
            "91 0.7724658 0.36318746 0.253304 105 15 0.46365902\n",
            "92 0.7829318 0.36188036 0.25197 105 20 0.73234975\n",
            "93 0.7881139 0.3734092 0.25267625 105 20 0.4020775\n",
            "94 0.7744282 0.36122805 0.25204524 105 14 0.83379424\n",
            "95 0.7931042 0.36954057 0.2545379 105 20 0.37395406\n",
            "96 0.8209839 0.38647577 0.25685614 105 20 0.37840688\n",
            "97 0.84438217 0.38822842 0.2526749 105 20 7.4663424\n",
            "98 0.84945333 0.39228576 0.25577798 105 20 2.3648822\n",
            "99 0.8253926 0.3976271 0.2493584 105 3 1.4438024\n",
            "100 0.81372565 0.38538033 0.2464183 105 20 1.6098862\n",
            "101 0.8090967 0.3809868 0.25952134 105 10 0.5458334\n",
            "102 0.81226456 0.38264075 0.26079932 105 20 1.8315157\n",
            "103 0.80271786 0.38448375 0.2629453 105 5 1.6406913\n",
            "104 0.7911093 0.37916207 0.26077878 105 11 1.5000534\n",
            "105 0.7949822 0.3803891 0.25980854 105 20 2.5178695\n",
            "106 0.79229176 0.3745945 0.26065707 105 14 1.8893733\n",
            "107 0.78998435 0.3735939 0.26069322 105 7 0.53828716\n",
            "108 0.79045296 0.3755261 0.25611162 105 20 0.49430197\n",
            "109 0.79229045 0.37573683 0.256774 105 20 0.5186832\n",
            "110 0.7838174 0.3678024 0.256809 105 19 0.45888138\n",
            "111 0.78609425 0.3710606 0.25731885 105 20 0.44479498\n",
            "112 0.7958678 0.3732134 0.25617442 105 20 0.45889676\n",
            "113 0.78859544 0.37344506 0.25625542 105 14 0.42856166\n",
            "114 0.7868535 0.37011522 0.25506067 105 8 0.5194302\n",
            "115 0.79009044 0.3758374 0.25532395 105 20 0.48774284\n",
            "116 0.7797909 0.36544997 0.25534868 105 1 0.4631959\n",
            "117 0.784542 0.36659348 0.25564831 105 20 0.44635403\n",
            "118 0.7873805 0.3621328 0.25646555 105 20 0.56861454\n",
            "119 0.7807896 0.36947665 0.25755095 105 11 0.5583043\n",
            "120 0.78442085 0.37247407 0.2567057 105 20 0.5963035\n",
            "121 0.7767699 0.37258375 0.25616622 105 16 0.62414336\n",
            "122 0.7766647 0.36842233 0.25467855 105 4 0.5682627\n",
            "123 0.7774848 0.36421263 0.25515088 105 20 0.62787694\n",
            "124 0.77390444 0.36387503 0.25549126 105 12 0.6466514\n",
            "125 0.7843654 0.3667697 0.25571302 105 20 0.66524696\n",
            "126 0.77834857 0.3649481 0.25825095 105 12 0.5677394\n",
            "127 0.76237154 0.3616419 0.2528265 105 7 0.6747014\n",
            "128 0.762183 0.361956 0.25155586 105 19 0.67311424\n",
            "129 0.77545625 0.3703637 0.25180376 105 20 0.5765867\n",
            "130 0.77570856 0.36720365 0.25119123 105 20 0.5863473\n",
            "131 0.7715299 0.3634779 0.24999247 105 12 0.5599069\n",
            "132 0.7726184 0.3661586 0.25005862 105 20 0.57318836\n",
            "133 0.77815336 0.3716631 0.25104138 105 20 0.37932914\n",
            "134 0.7815921 0.36497754 0.25097612 105 20 0.4990302\n",
            "135 0.77946424 0.3639908 0.25069007 105 1 0.41056108\n",
            "136 0.7800069 0.36707234 0.25195867 105 20 0.39035815\n",
            "137 0.7680088 0.35747153 0.25032467 105 4 0.5359447\n",
            "138 0.7680603 0.3517444 0.24826044 105 20 0.53611857\n",
            "139 0.7804315 0.3633203 0.24894942 105 20 0.48859638\n",
            "140 0.77528805 0.3598613 0.24950016 105 5 0.5153854\n",
            "141 0.7719278 0.3596449 0.25640878 105 6 0.63598144\n",
            "142 0.7580904 0.35209888 0.2516064 105 9 0.6285438\n",
            "143 0.7646548 0.35849902 0.25306997 105 20 0.6293292\n"
          ]
        }
      ]
    }
  ]
}