diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..726ca77a7db89cc31aafd394ce4581743b927974
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,32 @@
+ARG UV_VERSION="0.6.14"
+
+FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv-fetcher
+
+FROM python:3.11-slim AS runtime
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ wget build-essential git && \
+ rm -rf /var/lib/apt/lists/*
+
+COPY --from=uv-fetcher /uv /uvx /usr/local/bin/
+
+ENV VIRTUAL_ENV=/modules/.venv
+RUN uv venv "$VIRTUAL_ENV" && . "$VIRTUAL_ENV/bin/activate"
+ENV PATH="$VIRTUAL_ENV/bin:$PATH"
+
+# Set the working directory to the user's home directory
+WORKDIR /app
+
+COPY pyproject.toml uv.lock /app/
+
+RUN uv sync --frozen --active --directory /app --inexact
+
+COPY folding-studio /app/folding-studio
+RUN cd /app/folding-studio && uv pip install -e .
+
+COPY app.py /app/app.py
+
+EXPOSE 7860
+ENV GRADIO_SERVER_NAME="0.0.0.0"
+
+CMD ["python3", "app.py"]
diff --git a/README.md b/README.md
index 86e1b08eeb8b16e7d04f7402bb0d78fe87fdc8e1..f8723a5291d76a8f3d3358e978d1705472899e34 100644
--- a/README.md
+++ b/README.md
@@ -3,8 +3,7 @@ title: Fs
emoji: 🚀
colorFrom: indigo
colorTo: red
-sdk: gradio
-sdk_version: 5.30.0
+sdk: docker
app_file: app.py
pinned: false
short_description: folding studio test
diff --git a/folding-studio/CONTRIBUTING.md b/folding-studio/CONTRIBUTING.md
new file mode 100644
index 0000000000000000000000000000000000000000..3e0032a511858fa0218a89da780f4391ee0b5868
--- /dev/null
+++ b/folding-studio/CONTRIBUTING.md
@@ -0,0 +1,8 @@
+# Contributing to the Documentation
+
+## Serve the Documentation Locally
+```bash
+poetry install --with docs
+poetry run mkdocs serve
+```
+This will start a local server at http://127.0.0.1:8000.
diff --git a/folding-studio/README.md b/folding-studio/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/folding-studio/docs/app.yaml b/folding-studio/docs/app.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..6b7c41544e385852f4af64d28d734ad0dc78af92
--- /dev/null
+++ b/folding-studio/docs/app.yaml
@@ -0,0 +1,24 @@
+# This is the configuration file for the Google App Engine instance.
+# GAE is capable of hosting any sort of web app,
+# however, here we focus on the static website hosting capabilities.
+# https://cloud.google.com/appengine/docs/standard/hosting-a-static-website
+runtime: python39
+
+handlers:
+ # static files with a URL ending with a file extension
+ # (e.g. favicon.ico, manifest.json, jylade.png)
+ - url: /(.*\..+)$
+ static_files: site/\1
+ upload: site/(.*\..+)$
+ # index page
+ - url: /
+ static_files: site/index.html
+ upload: site/index.html
+ # anything that ends with a slash (e.g. /docs/)
+ - url: /(.*)/$
+ static_files: site/\1/index.html
+ upload: site/(.*)
+ # anything else (e.g. /docs)
+ - url: /(.*)
+ static_files: site/\1/index.html
+ upload: site/(.*)
diff --git a/folding-studio/docs/docs/css/main.css b/folding-studio/docs/docs/css/main.css
new file mode 100644
index 0000000000000000000000000000000000000000..186ed07fd4df763746942b25964ff6fd69e94510
--- /dev/null
+++ b/folding-studio/docs/docs/css/main.css
@@ -0,0 +1,8 @@
+.md-sidebar__inner .md-nav__title {
+ display: none !important;
+}
+
+/* Hide the copy button for .no-copy blocks */
+.no-copy .md-clipboard {
+ display: none;
+}
diff --git a/folding-studio/docs/docs/explanation/advanced_algorithms.md b/folding-studio/docs/docs/explanation/advanced_algorithms.md
new file mode 100644
index 0000000000000000000000000000000000000000..87b1b4f09c43fbbe85d5c2c1e086d768f0a4ba3a
--- /dev/null
+++ b/folding-studio/docs/docs/explanation/advanced_algorithms.md
@@ -0,0 +1,100 @@
+## Gap trick
+
+Gap Trick allows folding multimer complexes using the AlphaFold2/OpenFold **monomer**
+models.
+
+It is only available when using custom templates. Additionally, the provided
+templates **MUST** exclusively include chains that precisely correspond to the
+query sequences, no more, no less, and in the same order.
+
+## Initial guess
+
+The initial guess algorithm allows to use a pre-defined structure in the first
+recycling stage of the AlphaFold2 forward pass. The original algorithm is
+described in [Bennett et al.](https://www.nature.com/articles/s41467-023-38328-5)
+and can be found in their [GitHub repo](https://github.com/nrbennet/dl_binder_design/blob/main/af2_initial_guess/).
+
+## MSA subsampling
+
+MSA subsampling allows to change the default number of MSA representation to
+give as input to the model.
+
+This feature is only supported for monomer.
+
+The impact of MSA subsampling has been studied by [D. Del Alamo et al.](https://elifesciences.org/articles/75751).
+
+!!! quote
+ "Whereas models of most proteins generated using the default AF2
+ pipeline are conformationally homogeneous and nearly identical to one
+ another, reducing the depth of the input multiple sequence alignments by
+ stochastic subsampling led to the generation of accurate models in multiple
+ conformations." - D. Del Alamo et al.
+
+## Template masking
+
+Template Masking enables the creation of template features by masking regions
+of the structures considered less important for resolving the multimer
+interface.
+
+This method is exclusively available in Gap Trick mode. Therefore
+the same constraints on the input template structures applies, i.e they
+**MUST** exclusively include chains that precisely correspond to the query
+sequences, no more, no less, and in the same order.
+
+Consider an input `FASTA` file containing 3 chains of a multimer complex, for
+example an antigen chain and two antibody chains:
+
+```
+>Antigen
+VRFPNITNLCPFHEVFNATTFASVYAWNRKRISNCVADYSVIYNFAPFFAFKCYGVSPTKLNDLCFTNVYADSFVIRGNEVSQIAPGQTGNIADYNYKLPDDFTGCVIAWNSNKLDSKPSGNYNYLYRLFRKSKLKPFERDISTEIYQAGNKPCNGVAGPNCYSPLQSYGFRPTYGVGHQPYRVVVLSFELLHAPATVCGPK
+>Antibody | Chain 1
+DIQMTQSPSSLSASVGDRVTITCRASQSISSYLNWYQQKPGKAPKLLIYAASSLQSGVPSRFSGSGSGTDFTLTISSLQPEDFATYYCQQSYSTPGVTFGPGTKVDIK
+>Antibody | Chain 2
+QVQLVESGGGVVQPGRSLRLSCAASGFTFSSYDMHWVRQAPGKGLEWVAVISYDGSSKFYAESVKGRFTISRDNSKNTLYLQMNSLRAEETAVYYCVKDGEQLVPLFDYWGQGTLVTVSS
+```
+
+The user aims to resolve the binding interface between the antigen and antibody
+chains, which is a common scenario for applying the template masking algorithm.
+In this case, a single template file is used, and the masking pattern
+alternately masks the binding partners and the binding interface. To activate
+the template masking algorithm, the user must provide a template mask file that
+defines the masking pattern:
+
+```json
+{
+ "templates_masks": [
+ {
+ "template_name": "7si2_chothia_CGF.cif",
+ "masks": [
+ "----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------",
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
+ ]
+ },
+ {
+ "template_name": "7si2_chothia_CGF.cif",
+ "masks": [
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
+ "------------------------------------------------------------------------------------------------------------",
+ "------------------------------------------------------------------------------------------------------------------------"
+ ]
+ },
+ {
+ "template_name": "7si2_chothia_CGF.cif",
+ "masks": [
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX--XXXXXX----------XX--XXXXXXXXXXXXXXXXXXXXX-XXXXXXXXXXXXXX---XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX-XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX-----XXXXXXXXXXXX",
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX-XXXXXXXXXXXXXXXX----XX--X-XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX-XX----XXXXXXXXXXXXXXX"
+ ]
+ }
+ ]
+}
+```
+
+The `masks` fields must adhere to the following constraints:
+
+- Contain as many mask line as chains in the input `FASTA` file,
+- Each mask line must have as many characters as the corresponding chain in the
+ input `FASTA` file,
+- Only contain `X` (residue is masked) or `-` (residue is not masked)
+ characters,
diff --git a/folding-studio/docs/docs/explanation/index.md b/folding-studio/docs/docs/explanation/index.md
new file mode 100644
index 0000000000000000000000000000000000000000..b3d256ccab7f3984fcba0f4a824e37209feeac6b
--- /dev/null
+++ b/folding-studio/docs/docs/explanation/index.md
@@ -0,0 +1,4 @@
+# Explanation
+This **Explanation section** provides deeper insights into the models and advanced algorithms supported in our custom folding CLI. Unlike the **How-to guides**, which focus on practical steps, these explanations cover the underlying principles that influence model selection and algorithm performance.
+
+You will find an overview of [each model](./supported_models.md), its strengths, and its limitations, as well as details on [advanced features](./advanced_algorithms.md) like MSA subsampling and template masking. The goal is to help you understand how these components fit together so you can make the right choices for your predictions.
diff --git a/folding-studio/docs/docs/explanation/supported_models.md b/folding-studio/docs/docs/explanation/supported_models.md
new file mode 100644
index 0000000000000000000000000000000000000000..1b51cb164928c83e570471884eeb180e2c13cc01
--- /dev/null
+++ b/folding-studio/docs/docs/explanation/supported_models.md
@@ -0,0 +1,125 @@
+This documentation provides an overview of the supported protein folding models
+in Folding Studio. Please refer to the official documentation page for more details on a model features, how it was trained and how it is claimed to perform with respect to similar models. Users are encouraged run their own benchmarking tests tailored to their use-cases.
+Folding Studio models can be organized into two categories: AlphaFold2-like and AlphaFold3-like architectures.
+
+- AlphaFold2-like models: This group includes **AlphaFold2**, **OpenFold**, and **SoloSeq**.
+- AlphaFold3-like models: This group includes **Boltz-1**, **Chai-1**, and **Protenix**.
+
+AlphaFold3 is the new state-of-the-art method for protein-antibody interface prediction, improving upon AlphaFold2. Key advancements include:
+
+- The replacement of AlphaFold2's Structure module with a Diffusion module, enhancing predictions without additional constraints.
+- An expanded vocabulary to support RNA, DNA, and other molecules like ligands.
+- A smaller MSA module and the removal of the MSA from the Pairformer module, which replaces AlphaFold2's Evoformer module.
+- Internal changes in the Pairformer module, including information flow from pairs to single representations.
+
+These modifications improve complex predictions and integrate recent ML advancements.
+
+!!! Data
+ The key difference between AlphaFold2-like models and AlphaFold3-like models input data is that while AlphaFold 2 primarily requires only a protein sequence as input, AlphaFold3 can accept a wider range of molecular information, including not just protein sequences but also details about other molecules like DNA, RNA, and ligands, allowing it to predict the structures of protein complexes involving these molecules.
+
+Please note that the MSA search process (applicable to all models with MSA support) runs on servers hosted by InstaDeep. This ensures complete confidentiality of all user inputs.
+
+## AlphaFold2
+
+[](https://github.com/google-deepmind/alphafold)
+[](https://www.nature.com/articles/s41586-021-03819-2)
+
+**Overview:** AlphaFold2 (AF2) is a deep learning model developed by DeepMind
+for high-accuracy protein structure prediction. It leverages evolutionary
+relationships and deep learning techniques to generate atomic-level protein
+structures.
+
+**Key Features:**
+
+- Uses multiple sequence alignments (MSAs) for improved accuracy.
+- Evoformer module for deep sequence and pairwise residue learning.
+- Structure module with iterative 3D refinement.
+- Suitable for high-precision structural biology and drug discovery.
+
+___
+
+## OpenFold
+
+[](https://github.com/aqlaboratory/openfold)
+[](https://www.nature.com/articles/s41592-024-02272-z)
+[](https://openfold.readthedocs.io/en/latest/index.html)
+
+**Overview:** OpenFold is an open-source reimplementation of AlphaFold2 in
+PyTorch, designed for efficiency and accessibility.
+
+**Key Features:**
+
+- Optimizations in the implementation which allow for faster inference time (at least for proteins of few hundreds of residues) and lesser RAM consumption.
+
+___
+
+## SoloSeq
+
+[](https://openfold.readthedocs.io/en/latest/Single_Sequence_Inference.html)
+
+**Overview:** SoloSeq is a modified version of OpenFold which replaces MSA features by ESM-1b sequence embeddings.
+
+**Key Features:**
+
+- MSA-free Prediction: Uses ESM-1b model embeddings to predict structure without multiple sequence alignments (MSA).
+- Flexible Optimization: Offers flags for controlling relaxation, output saving, and MMCIF file generation.
+
+___
+
+## Chai-1
+
+[](https://github.com/chaidiscovery/chai-lab)
+[](https://www.biorxiv.org/content/10.1101/2024.10.10.615955v1.full.pdf)
+
+**Overview:** Chai-1 is a multi-modal protein structure prediction model that
+supports proteins, RNA, DNA, and small molecules.
+
+**Key Features:**
+
+- Multi-modal capabilities for protein-protein and biomolecule interactions.
+- Transformer-based architecture with deep representation learning.
+- Best for users looking for an MSA-free alternative with high versatility.
+
+___
+
+## Boltz-1
+
+[](https://github.com/jwohlwend/boltz?)
+[](https://www.biorxiv.org/content/10.1101/2024.11.19.624167v1.full)
+
+**Overview:** Boltz-1 is a protein folding model developed by MIT Jameel Clinic,
+optimized for computational efficiency.
+
+**Key Features:**
+
+- Hybrid attention mechanisms improve residue interaction modeling.
+- Optimized inference for fast predictions.
+- Advanced multi-chain folding for complex biomolecular structures.
+- Ideal for researchers needing high-speed, scalable structure prediction.
+
+**Supported data format**
+
+| Feature | Fasta | YAML |
+| -------- |--------------------| ------- |
+| Polymers | :white_check_mark: | :white_check_mark: |
+| Smiles | :white_check_mark: | :white_check_mark: |
+| CCD code | :white_check_mark: | :white_check_mark: |
+| Custom MSA | :white_check_mark: | :white_check_mark: |
+| Modified Residues | :x: | :white_check_mark: |
+| Covalent bonds | :x: | :white_check_mark: |
+| Pocket conditioning | :x: | :white_check_mark: |
+
+
+___
+
+## Protenix
+
+[](https://github.com/bytedance/Protenix)
+[](https://www.biorxiv.org/content/10.1101/2025.01.08.631967v1.full-text)
+
+**Overview:** Protenix is an open-source alternative to AlphaFold3,
+developed for scalability and custom applications.
+
+**Key Features:**
+
+- Protenix only support MSA search with MMSeqs2 and does not support template features
diff --git a/folding-studio/docs/docs/how-to-guides/af2_openfold/advanced_algorithms/gap_trick_job.md b/folding-studio/docs/docs/how-to-guides/af2_openfold/advanced_algorithms/gap_trick_job.md
new file mode 100644
index 0000000000000000000000000000000000000000..3e7b6029412fd2e7fb4fe5620dc906e704f9e4d6
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af2_openfold/advanced_algorithms/gap_trick_job.md
@@ -0,0 +1,21 @@
+## Goal
+The Gap Trick enables the folding of multimer complexes using monomer models from AlphaFold2/OpenFold **monomer** models.
+
+## Application
+
+It is only available when using custom templates (i.e. `"template_mode"` must be set to `"custom"`). Additionally, the provided templates **MUST** exclusively include chains that precisely correspond to the query sequences, no more, no less, and in the same order.
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict af2 path/to/my/file.fasta --project-code "your-project-code" --gap-trick --template_mode custom --custom_template /path/to/template_1.cif
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.predict import af2 as af2_predict
+
+ af2_predict(source=Path("path/to/my/file.fasta"), project_code="your-project-code", gap_trick=True, template_mode="custom", custom_template=[Path("/path/to/template_1.cif")], num_recycle=3, random_seed=0)
+ ```
diff --git a/folding-studio/docs/docs/how-to-guides/af2_openfold/advanced_algorithms/initial_guess_af2.md b/folding-studio/docs/docs/how-to-guides/af2_openfold/advanced_algorithms/initial_guess_af2.md
new file mode 100644
index 0000000000000000000000000000000000000000..ed032aebfeda3da82f06d1b7ee0039e6a8e9e706
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af2_openfold/advanced_algorithms/initial_guess_af2.md
@@ -0,0 +1,150 @@
+## Goal
+
+The initial guess algorithm allows to use a pre-defined structure in the first recycling stage of the AlphaFold2 forward pass. The original algorithm is described in [Bennett et al.](https://www.nature.com/articles/s41467-023-38328-5) and in the [GitHub repo](https://github.com/nrbennet/dl_binder_design/blob/main/af2_initial_guess/).
+
+!!! warning
+ Unlike the other features in the how-to guides, this algorithm is only available with the AlphaFold2 model.
+
+## Application
+
+The initial guess structure file **must be** in `.cif` format. This algorithm is only available when disabling MSA and templates (i.e. `"msa_mode"` and `"template_mode"` must both be set to `"none"`). Additionally, the provided initial guess file **MUST** exclusively include chains that precisely correspond to the query sequences, no more, no less.
+
+=== ":octicons-command-palette-16: CLI single job"
+
+ ```bash
+ folding predict af2 3bve.fasta --project-code "your-project-code" --initial-guess-file 3bve_dimer.cif --msa-mode none --template-mode none
+ ```
+
+=== ":octicons-command-palette-16: CLI batch job"
+
+ ```bash
+ folding predict af2 batch_jobs.csv --project-code "your-project-code"
+ ```
+
+ === ":material-table: CSV example"
+
+ ```csv
+ complex_id,description,fasta_sequence,num_recycle,random_seed,msa_mode,template_mode,initial_guess_file
+ 3bve-ig,chain A,PMLSKDIIKLLNEQVNKEMNSSNLYMSMSSWCYTHSLDGAGLFLFDHAAEEYEHAKKLIIFLNENNVPVQLTSISAPEHKFEGLTQIFQKAYEHEQHISESINNIVDHAIKSKDHATFNFLQWYVAEQHEEEVLFKDILDKIELIGNENHGLYLADQYVKGIAKSRK,3,145,none,none,initial_guess/3bve_dimer.cif
+ 3bve-ig,chain B,PMLSKDIIKLLNEQVNKEMNSSNLYMSMSSWCYTHSLDGAGLFLFDHAAEEYEHAKKLIIFLNENNVPVQLTSISAPEHKFEGLTQIFQKAYEHEQHISESINNIVDHAIKSKDHATFNFLQWYVAEQHEEEVLFKDILDKIELIGNENHGLYLADQYVKGIAKSRK,3,145,none,none,initial_guess/3bve_dimer.cif
+ 1hqk-ig,chain 1,MQIYEGKLTAEGLRFGIVASRFNHALVDRLVEGAIDCIVRHGGREEDITLVRVPGSWEIPVAAGELARKEDIDAVIAIGVLIRGATPHFDYIASEVSKGLANLSLELRKPITFGVITADTLEQAIERAGTKHGNKGWEAALSAIEMANLFKSLR,3,145,none,none,initial_guess/1hqk_pentamer.cif
+ 1hqk-ig,chain 2,MQIYEGKLTAEGLRFGIVASRFNHALVDRLVEGAIDCIVRHGGREEDITLVRVPGSWEIPVAAGELARKEDIDAVIAIGVLIRGATPHFDYIASEVSKGLANLSLELRKPITFGVITADTLEQAIERAGTKHGNKGWEAALSAIEMANLFKSLR,3,145,none,none,initial_guess/1hqk_pentamer.cif
+ 1hqk-ig,chain 3,MQIYEGKLTAEGLRFGIVASRFNHALVDRLVEGAIDCIVRHGGREEDITLVRVPGSWEIPVAAGELARKEDIDAVIAIGVLIRGATPHFDYIASEVSKGLANLSLELRKPITFGVITADTLEQAIERAGTKHGNKGWEAALSAIEMANLFKSLR,3,145,none,none,initial_guess/1hqk_pentamer.cif
+ 1hqk-ig,chain 4,MQIYEGKLTAEGLRFGIVASRFNHALVDRLVEGAIDCIVRHGGREEDITLVRVPGSWEIPVAAGELARKEDIDAVIAIGVLIRGATPHFDYIASEVSKGLANLSLELRKPITFGVITADTLEQAIERAGTKHGNKGWEAALSAIEMANLFKSLR,3,145,none,none,initial_guess/1hqk_pentamer.cif
+ 1hqk-ig,chain 5,MQIYEGKLTAEGLRFGIVASRFNHALVDRLVEGAIDCIVRHGGREEDITLVRVPGSWEIPVAAGELARKEDIDAVIAIGVLIRGATPHFDYIASEVSKGLANLSLELRKPITFGVITADTLEQAIERAGTKHGNKGWEAALSAIEMANLFKSLR,3,145,none,none,initial_guess/1hqk_pentamer.cif
+ ```
+
+
+=== ":material-language-python: script for single job"
+
+ To submit jobs with custom files programmatically, we use helper functions made available in the [`folding_studio` package](../../../tutorials/installation.md#cli-and-folding_studio-library) library.
+
+ ```python
+ import json
+ from pathlib import Path
+
+ from folding_studio import get_id_token
+ from folding_studio import single_job_prediction
+ from folding_studio_data_models import AF2Parameters, FeatureMode
+
+ parameters = AF2Parameters(
+ initial_guess_file="3bve_dimer.cif",
+ template_mode=FeatureMode.NONE,
+ msa_mode=FeatureMode.NONE,
+ random_seed=0,
+ num_recycle=3,
+ )
+
+ # Obtain the identity token from gcloud auth
+ identity_token = get_id_token()
+
+ try:
+ response = single_job_prediction(
+ identity_token=identity_token,
+ fasta_file=Path("3bve.fasta"),
+ parameters=parameters,
+ )
+ json.dump(response, open("submission_initial_guess.json", "w"))
+ except Exception as err:
+ print("Error during submission.")
+ print(err)
+ ```
+
+=== ":material-language-python: script for batch job"
+
+ To submit jobs with custom files programmatically, we use helper functions made available in the [`folding_studio` package](../../../tutorials/installation.md#cli-and-folding_studio-library) library.
+
+ ```python
+ import json
+ from pathlib import Path
+
+ from folding_studio import batch_prediction_from_file
+ from folding_studio import get_id_token
+ from folding_studio_data_models import (
+ AF2Parameters,
+ AF2Request,
+ BatchRequest,
+ FeatureMode,
+ Sequence,
+ )
+
+ requests = [
+ AF2Request(
+ complex_id="3bve-ig",
+ sequences=[
+ Sequence(
+ description="chain A",
+ fasta_sequence="PMLSKDIIKLLNEQVNKEMNSSNLYMSMSSWCYTHSLDGAGLFLFDHAAEEYEHAKKLIIFLNENNVPVQLTSISAPEHKFEGLTQIFQKAYEHEQHISESINNIVDHAIKSKDHATFNFLQWYVAEQHEEEVLFKDILDKIELIGNENHGLYLADQYVKGIAKSRK",
+ ),
+ Sequence(
+ description="chain B",
+ fasta_sequence="PMLSKDIIKLLNEQVNKEMNSSNLYMSMSSWCYTHSLDGAGLFLFDHAAEEYEHAKKLIIFLNENNVPVQLTSISAPEHKFEGLTQIFQKAYEHEQHISESINNIVDHAIKSKDHATFNFLQWYVAEQHEEEVLFKDILDKIELIGNENHGLYLADQYVKGIAKSRK",
+ ),
+ ],
+ parameters= AF2Parameters(
+ num_recycle=3,
+ random_seed=145,
+ msa_mode=FeatureMode.NONE,
+ template_mode=FeatureMode.NONE,
+ initial_guess_file="3bve_dimer.cif",
+ ),
+ ),
+ AF2Request(
+ complex_id="1hqk-ig",
+ sequences=[
+ Sequence(
+ description=f"chain {i + 1}",
+ fasta_sequence="MQIYEGKLTAEGLRFGIVASRFNHALVDRLVEGAIDCIVRHGGREEDITLVRVPGSWEIPVAAGELARKEDIDAVIAIGVLIRGATPHFDYIASEVSKGLANLSLELRKPITFGVITADTLEQAIERAGTKHGNKGWEAALSAIEMANLFKSLR",
+ )
+ for i in range(5)
+ ],
+ parameters= AF2Parameters(
+ num_recycle=3,
+ random_seed=145,
+ msa_mode=FeatureMode.NONE,
+ template_mode=FeatureMode.NONE,
+ initial_guess_file="1hqk_pentamer.cif",
+ ),
+ ),
+ ]
+
+ # Build and validate the request
+ batch_request = BatchRequest(requests=requests)
+
+ # Prepare the batch request file for submission
+ json_data = batch_request.model_dump_json()
+ batch_file = Path("batch_request.json")
+ batch_file.write_text(json_data)
+
+ # Obtain the identity token from gcloud auth
+ identity_token = get_id_token()
+
+ try:
+ response = batch_prediction_from_file(
+ identity_token=identity_token, file=batch_file
+ )
+ json.dump(response, open("submission_batch.json", "w"))
+ except Exception as err:
+ print("Error during batch submission.")
+ print(err)
+ ```
diff --git a/folding-studio/docs/docs/how-to-guides/af2_openfold/advanced_algorithms/msa_subsampling_job.md b/folding-studio/docs/docs/how-to-guides/af2_openfold/advanced_algorithms/msa_subsampling_job.md
new file mode 100644
index 0000000000000000000000000000000000000000..e722c24c30f192159766e6ec2472b9b4345bacec
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af2_openfold/advanced_algorithms/msa_subsampling_job.md
@@ -0,0 +1,37 @@
+## Goal
+MSA subsampling allows to change the default number of MSA representation to give as input to the model.
+
+This feature is only supported for monomer.
+
+The impact of MSA subsampling has been studied by [D. Del Alamo et al.](https://elifesciences.org/articles/75751).
+
+!!! quote
+ "Whereas models of most proteins generated using the default AF2 pipeline are conformationally homogeneous and nearly identical to one another, reducing the depth of the input multiple sequence alignments by stochastic subsampling led to the generation of accurate models in multiple conformations."
+ - D. Del Alamo et al.
+
+## Application
+To enable MSA subsampling, you can modify the `max_extra_msa` and `max_msa_cluster` parameters.
+
+- `max_extra_msa` : maximum number of non-clustered MSA representation to use as input
+- `max_msa_cluster` : maximum number of clustered MSA representation to use as input
+
+By default `max_msa_cluster` will be half the value of `max_extra_msa`, up to 512.
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict af2 path/to/my/file.fasta --max-extra-msa 124 --max-msa-cluster 32
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.predict import af2 as af2_predict
+
+ af2_predict(source=Path("path/to/my/file.fasta"), max_extra_msa=124, max_msa_cluster=32)
+ ```
+
+
+!!! warning
+ The specified `max_extra_msa` or `max_msa_cluster` values are applied to all AlphaFold2/OpenFold models.
diff --git a/folding-studio/docs/docs/how-to-guides/af2_openfold/advanced_algorithms/template_masking_job.md b/folding-studio/docs/docs/how-to-guides/af2_openfold/advanced_algorithms/template_masking_job.md
new file mode 100644
index 0000000000000000000000000000000000000000..b51a3693fe896f41af72921bb6572e1914a3d85a
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af2_openfold/advanced_algorithms/template_masking_job.md
@@ -0,0 +1,549 @@
+## Goal
+Template Masking enables the creation of template features by masking regions
+of the structures considered less important for resolving the multimer
+interface.
+
+## Application
+This method is exclusively available in Gap Trick mode. Therefore
+the same constraints on the input template structures applies, i.e they
+**MUST** exclusively include chains that precisely correspond to the query
+sequences, no more, no less, and in the same order.
+
+Consider an input `FASTA` file containing 3 chains of a multimer complex, for example an antigen chain and two antibody chains:
+
+!!! example
+ ```title="antibody_antigen.fasta"
+ >Antigen
+ VRFPNITNLCPFHEVFNATTFASVYAWNRKRISNCVADYSVIYNFAPFFAFKCYGVSPTKLNDLCFTNVYADSFVIRGNEVSQIAPGQTGNIADYNYKLPDDFTGCVIAWNSNKLDSKPSGNYNYLYRLFRKSKLKPFERDISTEIYQAGNKPCNGVAGPNCYSPLQSYGFRPTYGVGHQPYRVVVLSFELLHAPATVCGPK
+ >Antibody | Chain 1
+ DIQMTQSPSSLSASVGDRVTITCRASQSISSYLNWYQQKPGKAPKLLIYAASSLQSGVPSRFSGSGSGTDFTLTISSLQPEDFATYYCQQSYSTPGVTFGPGTKVDIK
+ >Antibody | Chain 2
+ QVQLVESGGGVVQPGRSLRLSCAASGFTFSSYDMHWVRQAPGKGLEWVAVISYDGSSKFYAESVKGRFTISRDNSKNTLYLQMNSLRAEETAVYYCVKDGEQLVPLFDYWGQGTLVTVSS
+ ```
+
+The user aims to resolve the binding interface between the antigen and antibody
+chains, which is a common scenario for applying the template masking algorithm.
+In this case, a single template file is used, and the masking pattern
+alternately masks the binding partners and the binding interface. To
+activate the template masking algorithm, the user must provide a template mask
+file that defines the masking pattern:
+
+!!! example
+
+ ```json title="masks.json"
+ {
+ "templates_masks": [
+ {
+ "template_name": "7si2_chothia_CGF.cif",
+ "masks": [
+ "----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------",
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
+ ]
+ },
+ {
+ "template_name": "7si2_chothia_CGF.cif",
+ "masks": [
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
+ "------------------------------------------------------------------------------------------------------------",
+ "------------------------------------------------------------------------------------------------------------------------"
+ ]
+ },
+ {
+ "template_name": "7si2_chothia_CGF.cif",
+ "masks": [
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX--XXXXXX----------XX--XXXXXXXXXXXXXXXXXXXXX-XXXXXXXXXXXXXX---XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX-XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX-----XXXXXXXXXXXX",
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX-XXXXXXXXXXXXXXXX----XX--X-XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX-XX----XXXXXXXXXXXXXXX"
+ ]
+ }
+ ]
+ }
+ ```
+The `masks` fields must adhere to the following constraints:
+
+- Contain as many mask line as chains in the input `FASTA` file,
+- Each mask line must have as many characters as the corresponding chain in the input `FASTA` file,
+- Only contain `X` (residue is masked) or `-` (residue is not masked) characters,
+
+## Submission
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict af2 antibody_antigen.fasta --gap-trick --template-mode custom --custom-template 7si2_chothia_CGF.cif --templates-masks-file masks.json
+ ```
+
+=== ":octicons-command-palette-16: CLI batch job"
+
+ ```bash
+ folding predict af2 batch_jobs.csv
+ ```
+ === ":material-table: CSV example"
+
+ ```csv
+ complex_id,description,fasta_sequence,num_recycle,random_seed,gap_trick,msa_mode,template_mode,custom_templates,templates_masks_file
+ Antibody + Antigen,Antigen,VRFPNITNLCPFHEVFNATTFASVYAWNRKRISNCVADYSVIYNFAPFFAFKCYGVSPTKLNDLCFTNVYADSFVIRGNEVSQIAPGQTGNIADYNYKLPDDFTGCVIAWNSNKLDSKPSGNYNYLYRLFRKSKLKPFERDISTEIYQAGNKPCNGVAGPNCYSPLQSYGFRPTYGVGHQPYRVVVLSFELLHAPATVCGPK,3,145,1,none,custom,['templates/7si2_chothia_CGF.cif'],templates_masks/ab_ag_mask_0.json
+ Antibody + Antigen,Antigen | Chain 1,DIQMTQSPSSLSASVGDRVTITCRASQSISSYLNWYQQKPGKAPKLLIYAASSLQSGVPSRFSGSGSGTDFTLTISSLQPEDFATYYCQQSYSTPGVTFGPGTKVDIK,3,145,1,none,custom,['templates/7si2_chothia_CGF.cif'],templates_masks/ab_ag_mask_0.json
+ Antibody + Antigen,Antibody | Chain 2,QVQLVESGGGVVQPGRSLRLSCAASGFTFSSYDMHWVRQAPGKGLEWVAVISYDGSSKFYAESVKGRFTISRDNSKNTLYLQMNSLRAEETAVYYCVKDGEQLVPLFDYWGQGTLVTVSS,3,145,1,none,custom,['templates/7si2_chothia_CGF.cif'],templates_masks/ab_ag_mask_0.json
+ ```
+
+=== ":material-language-python: script for single job"
+
+ To submit jobs with custom files programmatically, we use helper functions made available in the [`folding_studio` package](../../../tutorials/installation.md#cli-and-folding_studio-library) library.
+
+ ```python
+ import json
+ from pathlib import Path
+
+ from folding_studio import get_id_token
+ from folding_studio import single_job_prediction
+ from folding_studio_data_models import AF2Parameters, FeatureMode
+
+ parameters = AF2Parameters(
+ custom_templates=["7si2_chothia_CGF.cif"],
+ template_mode=FeatureMode.CUSTOM,
+ gap_trick=True,
+ templates_masks_file="ab_ag_mask_0.json",
+ random_seed=0,
+ num_recycle=3,
+ )
+
+ # Obtain the identity token from gcloud auth
+ identity_token = get_id_token()
+
+ try:
+ response = single_job_prediction(
+ identity_token=identity_token,
+ fasta_file=Path("ab_ag.fasta"),
+ parameters=parameters,
+ ignore_cache=True,
+ )
+ json.dump(response, open("submission_tm.json", "w"))
+ except Exception as err:
+ print("Error during submission.")
+ print(err)
+ ```
+
+=== ":material-language-python: script for batch job"
+
+ To submit jobs with custom files programmatically, we use helper functions made available in the [`folding_studio` package](../../../tutorials/installation.md#cli-and-folding_studio-library) library.
+
+ ```python
+ import json
+ from pathlib import Path
+
+ from folding_studio import batch_prediction_from_file
+ from folding_studio import get_id_token
+ from folding_studio_data_models import (
+ AF2Parameters,
+ AF2Request,
+ BatchRequest,
+ FeatureMode,
+ Sequence,
+ )
+
+ requests = [
+ AF2Request(
+ complex_id="Antibody + Antigen",
+ sequences=[
+ Sequence(
+ description="Antigen",
+ fasta_sequence="VRFPNITNLCPFHEVFNATTFASVYAWNRKRISNCVADYSVIYNFAPFFAFKCYGVSPTKLNDLCFTNVYADSFVIRGNEVSQIAPGQTGNIADYNYKLPDDFTGCVIAWNSNKLDSKPSGNYNYLYRLFRKSKLKPFERDISTEIYQAGNKPCNGVAGPNCYSPLQSYGFRPTYGVGHQPYRVVVLSFELLHAPATVCGPK",
+ ),
+ Sequence(
+ description="Antibody | Chain 1",
+ fasta_sequence="DIQMTQSPSSLSASVGDRVTITCRASQSISSYLNWYQQKPGKAPKLLIYAASSLQSGVPSRFSGSGSGTDFTLTISSLQPEDFATYYCQQSYSTPGVTFGPGTKVDIK",
+ ),
+ Sequence(
+ description="Antibody | Chain 2",
+ fasta_sequence="QVQLVESGGGVVQPGRSLRLSCAASGFTFSSYDMHWVRQAPGKGLEWVAVISYDGSSKFYAESVKGRFTISRDNSKNTLYLQMNSLRAEETAVYYCVKDGEQLVPLFDYWGQGTLVTVSS",
+ ),
+ ],
+ parameters= AF2Parameters(
+ template_mode=FeatureMode.CUSTOM,
+ gap_trick=True,
+ custom_templates=["templates/7si2_chothia_CGF.cif"],
+ templates_masks_file="templates_masks/ab_ag_mask_0.json",
+ ),
+ ),
+ AF2Request(
+ complex_id="Mutated Antibody + Antigen",
+ sequences=[
+ Sequence(
+ description="Antigen",
+ fasta_sequence="VRFPNITNLCPFHEVFNATTFASVYAWNRKRISNCVADYSVIYNFAPFFAFKCYGVSPTKLNDLCFTNVYADSFVIRGNEVSQIAPGQTGNIADYNYKLPDDFTGCVIAWNSNKLDSKPSGNYNYLYRLFRKSKLKPFERDISTEIYQAGNKPCNGVAGPNCYSPLQSYGFRPTYGVGHQPYRVVVLSFELLHAPATVCGPK",
+ ),
+ Sequence(
+ description="Antibody | Chain 1",
+ fasta_sequence="DIQMTQSPSSLSASVGDRVTITCRASQSISSYLNWYQQKPGKAPKLLIYAASSLQSGVPSRFSGSGSGTDFTLTISSLQPEDFATYYCQQSYSTPGVTFGPGTKVDIM",
+ ),
+ Sequence(
+ description="Antibody | Chain 2",
+ fasta_sequence="QVQLVESGGGVVQPGRSLRLSCAASGFTFSSYDMHWVRQAPGKGLEWVAVISYDGSSKFYAESVKGRFTISRDNSKNTLYLQMNSLRAEETAVYYCVKDGEQLVPLFDYWGQGTLVTVSM",
+ ),
+ ],
+ parameters= AF2Parameters(
+ template_mode=FeatureMode.CUSTOM,
+ gap_trick=True,
+ custom_templates=["templates/7si2_chothia_CGF.cif"],
+ templates_masks_file="templates_masks/ab_ag_mask_0.json",
+ ),
+ ),
+ ]
+ # Build and validate the request
+ batch_request = BatchRequest(requests=requests)
+
+ # Prepare the batch request file for submission
+ json_data = batch_request.model_dump_json()
+ batch_file = Path("batch_request.json")
+ batch_file.write_text(json_data)
+
+ # Obtain the identity token from gcloud auth
+ identity_token = get_id_token()
+
+ try:
+ response = batch_prediction_from_file(
+ identity_token=identity_token, file=batch_file
+ )
+ json.dump(response, open("submission_batch.json", "w"))
+ except Exception as err:
+ print("Error during batch submission.")
+ print(err)
+ ```
+
+## Generate mask
+
+We provide the following guide for generating the mask file for binding
+partners: A (antigen chain) and B and C (antibody chains). This process
+requires the `bioblocks` package. Please refer to this
+[page](https://instadeep.gitlab.io/bioai-group/BioBlocks/setup/setup.html) for
+installation instructions.
+
+ ```python
+
+ from __future__ import annotations
+ import numpy as np
+ from pathlib import Path
+ import warnings
+ import json
+
+ from bioblocks.blocks import Model, Residue, Chain
+ from bioblocks.io import read_model
+ from bioblocks.sequence import get_alignment_score, align_sequences
+
+
+ THRESHOLD: float = 10.0
+ MASK_TOKEN: str = "X"
+ KEEP_TOKEN: str = "-"
+
+
+ def get_ca_atom_coord(residue: Residue) -> np.ndarray:
+ """Get CA atoms or None if not present.
+
+ Args:
+ residue (Residue): Residue entity.
+
+ Returns:
+ np.ndarray: Array of coordinates
+ """
+ try:
+ coords = np.array(residue["CA"].coord)
+ except:
+ chain_id = residue.parent.id
+ model_id = residue.parent.parent.id
+ warnings.warn(
+ f"{residue.id}, chain {chain_id}, model {model_id} misses CA atom"
+ )
+ coords = np.array([None, None, None])
+ return coords
+
+
+ def fix_nan_ca_coords(ca_atoms: np.ndarray) -> np.ndarray:
+ """Interpolate missing CA atom coordinates.
+
+ Args:
+ ca_atoms (np.ndarray): CA atom coordinates.
+
+ Returns:
+ np.ndarray: Updated atom coordinates without missing entries.
+ """
+ nan_entries = np.unique(np.where(ca_atoms == None)[0])
+ if nan_entries.size == 0:
+ return ca_atoms
+ ca_atoms_out = ca_atoms.copy()
+ for idx in nan_entries:
+ if idx == 0:
+ ca_atoms_out[idx, :] = ca_atoms_out[idx + 1, :]
+ else:
+ ca_atoms_out[idx, :] = ca_atoms_out[idx - 1, :]
+ ca_atoms_out = ca_atoms_out.astype(np.float32)
+ return ca_atoms_out
+
+
+ def compute_chain_contact_mask(
+ chain: Chain, docking_partner_ca_atoms: np.ndarray, threshold: float = THRESHOLD
+ ) -> str:
+ """Compute contact mask for a chain.
+
+ Args:
+ chain (Chain): Chain entity of a loaded structure.
+ docking_partner_ca_atoms (np.ndarray): Selected atoms of the docking partner counter to
+ the current chain.
+ threshold (float): Threshold on CA-CA distance.
+
+ Returns:
+ str: String defining contact residues for the chain.
+ """
+ chain_atoms = np.array([get_ca_atom_coord(r) for r in chain])
+ chain_atoms = fix_nan_ca_coords(chain_atoms)
+
+ distances = np.linalg.norm(
+ chain_atoms[:, None, :] - docking_partner_ca_atoms[None, :, :], axis=-1
+ )
+ distance_mask = (distances < threshold).any(axis=-1)
+ mask = "".join(
+ KEEP_TOKEN if distance_mask_i else MASK_TOKEN
+ for distance_mask_i in distance_mask
+ )
+ return mask
+
+
+ def generate_contact_masks_from_template(
+ model: Model, docking_partners: str, threshold: float = THRESHOLD
+ ) -> dict[str, np.ndarray]:
+ """Generate contact masks from a template.
+
+ Args:
+ model (Model): Model entity of a loaded structure.
+ docking_partners (str): Docking partners split by "_", e.g. A_BC.
+ threshold (float): Threshold to compute contact residues based on CA distances.
+
+ Returns:
+ dict[str, np.ndarry]: Contact map for each chain.
+ """
+ left_partners, right_partners = docking_partners.split("_")
+ left_partner_atoms = np.array(
+ [get_ca_atom_coord(r) for chain_id in left_partners for r in model[chain_id]]
+ )
+ right_partner_atoms = np.array(
+ [get_ca_atom_coord(r) for chain_id in right_partners for r in model[chain_id]]
+ )
+ left_partner_atoms = fix_nan_ca_coords(left_partner_atoms)
+ right_partner_atoms = fix_nan_ca_coords(right_partner_atoms)
+ mask_left = {
+ chain_id: compute_chain_contact_mask(
+ model[chain_id], right_partner_atoms, threshold
+ )
+ for chain_id in left_partners
+ }
+ mask_right = {
+ chain_id: compute_chain_contact_mask(
+ model[chain_id], left_partner_atoms, threshold
+ )
+ for chain_id in right_partners
+ }
+ return {**mask_left, **mask_right}
+
+
+ def parse_fasta_as_dict(fasta_string: str) -> dict[str, str]:
+ """Parses FASTA string and returns dictionary of description and sequence.
+
+ Args:
+ fasta_string (str): The string contents of a FASTA file.
+
+ Returns:
+ dict[str, str]: Mapping between description and sequences of the FASTA string.
+ """
+ sequences = []
+ descriptions = []
+ index = -1
+ for line in fasta_string.splitlines():
+ line = line.strip()
+ if line.startswith(">"):
+ index += 1
+ descriptions.append(line[1:]) # Remove the '>' at the beginning.
+ sequences.append("")
+ continue
+ elif not line:
+ continue # Skip blank lines.
+ sequences[index] += line
+
+ return {d: s for d, s in zip(descriptions, sequences)}
+
+
+ def fasta_to_template_chains(fasta_map: dict[str, str], model: Model) -> dict[str, str]:
+ """Map fasta sequences to template model chain labels based on sequence similarity.
+
+ Note:
+ Template structure must have at least the same number of chains as a number of query fasta sequences.
+
+ Args:
+ fasta_map (dict[str, str]): Mapping between description and sequences.
+ model (Model): Model entity of a loaded structure.
+
+ Returns:
+ dict[str, str | None]: Mapping between model descriptions and structure chain labels.
+ """
+ chain_dict = {
+ chain.id: chain.sequence.replace("X", "") for chain in model.get_chains()
+ }
+ output_mapping = {desc_i: None for desc_i in fasta_map}
+ for desc_i, seq_i in fasta_map.items():
+ if chain_dict:
+ scores = [(k, get_alignment_score(v, seq_i)) for k, v in chain_dict.items()]
+ scores.sort(key=lambda x: x[1])
+ output_mapping[desc_i] = scores[-1][0]
+ del chain_dict[scores[-1][0]]
+ return output_mapping
+
+
+ def get_alignment_index_mapping(
+ seq_1: str, seq_2: str, aligned: bool = True
+ ) -> dict[int, int]:
+ """Mapping of aligned residue indices between two sequences.
+
+ Args:
+ seq_1 (str): First sequence.
+ seq_2 (str): Second sequence.
+ aligned (bool): Flag denoting whether the sequences are aligned or not.
+
+ Returns:
+ dict[int, int]: Dictionary containing the mapping between ``seq_1`` and ``seq_2``
+ indices. The keys correspond to the residue indices in the original target sequence,
+ the values correspond to the residue indices in the original template sequence.
+ """
+ index_mapping: dict[int, int] = {}
+ res_idx_1, res_idx_2 = 0, 0
+ for res_1, res_2 in (
+ zip(seq_1, seq_2) if aligned else zip(align_sequences(seq_1, seq_2))
+ ):
+ # If the chains do not contain the same residues, ignore
+ if res_1 == "-" or res_2 == "-":
+ res_idx_1 += res_1 != "-"
+ res_idx_2 += res_2 != "-"
+ continue
+
+ index_mapping[res_idx_1] = res_idx_2
+ res_idx_1 += 1
+ res_idx_2 += 1
+ return index_mapping
+
+
+ def get_sequence_contact_mask(
+ fasta_map: dict[str, str],
+ model: Model,
+ docking_partners: str,
+ threshold: float = THRESHOLD,
+ not_mapped_fill_token: str = MASK_TOKEN,
+ ) -> dict[str, str]:
+ """Generate contact mask for each sequence in the fasta file corresponding to a template.
+
+ Args:
+ fasta_map (dict[str, str]): Mapping between description and sequences.
+ model (Model): Model entity of a loaded structure.
+ docking_partners (str): Docking partners split by "_", e.g. A_BC.
+ threshold (float): Threshold to compute contact residues based on CA distances.
+ not_mapped_fill_token (str): Fill token for sequences that were not mapped to template.
+
+ Returns:
+ dict[str,str]: Mapping between fasta descriptions and sequence contact maps.
+ """
+ fasta2template = fasta_to_template_chains(fasta_map, model)
+ template_contact_masks = generate_contact_masks_from_template(
+ model, docking_partners, threshold
+ )
+ contact_masks: dict[str, str] = {}
+ for desc_i, chain_id in fasta2template.items():
+ fasta_seq = fasta_map[desc_i]
+ if chain_id:
+ template_seq = model[chain_id].sequence.replace("X", "")
+ aligned_sequences = align_sequences(fasta_seq, template_seq)
+ chain_mask = template_contact_masks[chain_id]
+ idx_mapping = get_alignment_index_mapping(*aligned_sequences)
+ fasta_seq_mask = "".join(
+ not_mapped_fill_token
+ if i not in idx_mapping
+ else chain_mask[idx_mapping[i]]
+ for i in range(len(fasta_seq))
+ )
+ else:
+ fasta_seq_mask = not_mapped_fill_token * len(fasta_seq)
+ contact_masks[desc_i] = fasta_seq_mask
+ return contact_masks
+
+
+ def get_partners_masks(
+ fasta_map: dict[str, str],
+ model: Model,
+ docking_partners: str,
+ not_mapped_fill_token: str = MASK_TOKEN,
+ ) -> list[dict[str, str]]:
+ """Generate masks for chains of each docking partner.
+
+ Args:
+ fasta_map (dict[str, str]): Mapping between description and sequences.
+ model (Model): Model entity of a loaded structure.
+ docking_partners (str): Docking partners split by "_", e.g. A_BC.
+ not_mapped_fill_token (str): Fill token for sequences that were not mapped to template.
+
+ Returns:
+ list[dict[str,str]]: List of mappings for each docking partner.
+ """
+ fasta2template = fasta_to_template_chains(fasta_map, model)
+ fasta2template_inv = {v: k for k, v in fasta2template.items() if v}
+ docking_partners_split = docking_partners.split("_")
+ output_masks: list[dict[str, str]] = []
+ for dp_i in docking_partners_split:
+ output_masks_i: dict[str, str] = {
+ k: not_mapped_fill_token * len(fasta_map[k]) for k in fasta2template
+ }
+ for chain_i in fasta2template_inv:
+ desc = fasta2template_inv[chain_i]
+ if chain_i in dp_i:
+ output_masks_i[desc] = KEEP_TOKEN * len(fasta_map[desc])
+ else:
+ output_masks_i[desc] = MASK_TOKEN * len(fasta_map[desc])
+ output_masks.append(output_masks_i)
+ return output_masks
+
+
+ if __name__ == "__main__":
+ model_path = Path("7si2_chothia_CGF.cif")
+ docking_partners = "A_BC"
+
+ model = read_model(model_path)
+
+ fasta = (
+ ">Antigen\n"
+ "VRFPNITNLCPFHEVFNATTFASVYAWNRKRISNCVADYSVIYNFAPFFAFKCYGVSPTKLNDLCFTNVYADSFVIRGNEVSQIAPGQTGNIADYNYKLPDDFTGCVIAWNSNKLDSKPSGNYNYLYRLFRKSKLKPFERDISTEIYQAGNKPCNGVAGPNCYSPLQSYGFRPTYGVGHQPYRVVVLSFELLHAPATVCGPK\n"
+ ">Antibody | Chain 1\n"
+ "DIQMTQSPSSLSASVGDRVTITCRASQSISSYLNWYQQKPGKAPKLLIYAASSLQSGVPSRFSGSGSGTDFTLTISSLQPEDFATYYCQQSYSTPGVTFGPGTKVDIK\n"
+ ">Antibody | Chain 2\n"
+ "QVQLVESGGGVVQPGRSLRLSCAASGFTFSSYDMHWVRQAPGKGLEWVAVISYDGSSKFYAESVKGRFTISRDNSKNTLYLQMNSLRAEETAVYYCVKDGEQLVPLFDYWGQGTLVTVSS"
+ )
+
+ fasta_map = parse_fasta_as_dict(fasta_string=fasta)
+
+ contact_masks = get_sequence_contact_mask(fasta_map, model, docking_partners)
+ partners_masks = get_partners_masks(fasta_map, model, docking_partners)
+
+ output_dict = {"templates_masks": []}
+ for partner_mask_i in partners_masks:
+ output_dict["templates_masks"].append(
+ {
+ "template_name": model_path.name,
+ "masks": list(partner_mask_i.values()),
+ }
+ )
+ output_dict["templates_masks"].append(
+ {
+ "template_name": model_path.name,
+ "masks": list(contact_masks.values()),
+ }
+ )
+
+ mask_file = "masks.json"
+ with open(mask_file, "w") as f:
+ json.dump(output_dict, f, indent=2)
+ ```
diff --git a/folding-studio/docs/docs/how-to-guides/af2_openfold/batch_job_from_configuration_file.md b/folding-studio/docs/docs/how-to-guides/af2_openfold/batch_job_from_configuration_file.md
new file mode 100644
index 0000000000000000000000000000000000000000..bf1d14ba8eda98c9dc5fc965328b926076e213b7
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af2_openfold/batch_job_from_configuration_file.md
@@ -0,0 +1,204 @@
+## Goal
+
+A batch job allow to submit multiple jobs at once. This avoids making too much API calls.
+To submit a batch job through the folding CLI, you can pass a configuration file in `json` or `csv` format.
+
+
+## Defining a batch job configuration file
+A common way of submitting a batch job is via a configuration file in `json` or `csv` format.
+When submitting a batch job through a configuration file, all the options passed to the predict command will be ignored.
+
+### JSON
+
+For a `json` format configuration file, its structure needs to be the same as a [`BatchRequest`](https://folding-studio-data-models-dot-int-bio-foldingstudio-gcp.nw.r.appspot.com/request/#folding_studio_data_models.request.BatchRequest) object.
+
+=== "json"
+
+ ```json
+ {
+ "requests": [
+ {
+ "complex_id": "mono_2LIS_auto_msa_custom_template",
+ "sequences": [
+ {
+ "description": ">mono_2LIS_auto_msa_custom_template|2LIS_1|Chain A|SPERM LYSIN|Haliotis rufescens (6454)",
+ "fasta_sequence": "RSWHYVEPKFLNKAFEVALKVQIIAGFDRGLVKWLRVHGRTLSTVQKKALYFVNRRYMQTHWANYMLWINKKIDALGRTPVVGDYTRLGAEIGRRIDMAYFYDFLKDKNMIPKYLPYMEEINRMRPADVPVKYMGK"
+ }
+ ],
+ "folding_model": "af2",
+ "parameters": {
+ "num_recycle": 3,
+ "random_seed": null,
+ "custom_templates": ["5II8"],
+ "gap_trick": false,
+ "msa_mode": "search",
+ "template_mode": "custom"
+ }
+ },
+ {
+ "complex_id": "multi_6M0J_standard",
+ "sequences": [
+ {
+ "description": ">multi_6M0J_standard|6M0J_1|Chain A|Angiotensin-converting enzyme 2|Homo sapiens (9606)",
+ "fasta_sequence": "STIEEQAKTFLDKFNHEAEDLFYQSSLASWNYNTNITEENVQNMNNAGDKWSAFLKEQSTLAQMYPLQEIQNLTVKLQLQALQQNGSSVLSEDKSKRLNTILNTMSTIYSTGKVCNPDNPQECLLLEPGLNEIMANSLDYNERLWAWESWRSEVGKQLRPLYEEYVVLKNEMARANHYEDYGDYWRGDYEVNGVDGYDYSRGQLIEDVEHTFEEIKPLYEHLHAYVRAKLMNAYPSYISPIGCLPAHLLGDMWGRFWTNLYSLTVPFGQKPNIDVTDAMVDQAWDAQRIFKEAEKFFVSVGLPNMTQGFWENSMLTDPGNVQKAVCHPTAWDLGKGDFRILMCTKVTMDDFLTAHHEMGHIQYDMAYAAQPFLLRNGANEGFHEAVGEIMSLSAATPKHLKSIGLLSPDFQEDNETEINFLLKQALTIVGTLPFTYMLEKWRWMVFKGEIPKDQWMKKWWEMKREIVGVVEPVPHDETYCDPASLFHVSNDYSFIRYYTRTLYQFQFQEALCQAAKHEGPLHKCDISNSTEAGQKLFNMLRLGKSEPWTLALENVVGAKNMNVRPLLNYFEPLFTWLKDQNKNSFVGWSTDWSPYADHHHHHH"
+ },
+ {
+ "description": ">multi_6M0J_standard|6M0J_2|Chain B[auth E]|Spike protein S1|Severe acute respiratory syndrome coronavirus 2 (2697049)",
+ "fasta_sequence": "RVQPTESIVRFPNITNLCPFGEVFNATRFASVYAWNRKRISNCVADYSVLYNSASFSTFKCYGVSPTKLNDLCFTNVYADSFVIRGDEVRQIAPGQTGKIADYNYKLPDDFTGCVIAWNSNNLDSKVGGNYNYLYRLFRKSNLKPFERDISTEIYQAGSTPCNGVEGFNCYFPLQSYGFQPTNGVGYQPYRVVVLSFELLHAPATVCGPKKSTNLVKNKCVNFHHHHHH"
+ }
+ ],
+ "folding_model": "openfold",
+ "parameters": {
+ "num_recycle": 2,
+ "random_seed": 5,
+ "gap_trick": false,
+ "msa_mode": "search",
+ "template_mode": "search"
+ }
+ }
+ ]
+ }
+ ```
+
+### CSV
+
+In a `CSV` format configuration file, each row represents a different request. The columns `complex_id`, `description` and `fasta_sequence` describe the proteins, while the others defines options passed for the folding process.
+
+Multimer proteins are specified by listing each chain on separate lines and assigning them the same `complex_id`. Parameters can be repeated or left empty after the first sequence, as the API will only keep the parameters defined for the first sequence of the multimer.
+
+!!! Note
+ The `CSV` format might be a bit tricky to set up in particular when using custom templates and MSAs. In that case, it might be more convenient to use the [`JSON` format](#json).
+
+=== "csv"
+
+ ```csv
+ complex_id,description,fasta_sequence,folding_model,custom_templates,num_recycle,random_seed,msa_mode,template_mode,gap_trick,custom_msas
+ mono_2LIS_auto_msa_custom_template,>2LIS_1|Chain A|SPERM LYSIN|Haliotis rufescens (6454),RSWHYVEPKFLNKAFEVALKVQIIAGFDRGLVKWLRVHGRTLSTVQKKALYFVNRRYMQTHWANYMLWINKKIDALGRTPVVGDYTRLGAEIGRRIDMAYFYDFLKDKNMIPKYLPYMEEINRMRPADVPVKYMGK,af2,"['1agw.cif','1agz.cif']",3,0,search,custom,0,"['1agb_A.sto','1agb_B.sto']"
+ multi_6M0J_standard,>6M0J_1|Chain A|Angiotensin-converting enzyme 2|Homo sapiens (9606),STIEEQAKTFLDKFNHEAEDLFYQSSLASWNYNTNITEENVQNMNNAGDKWSAFLKEQSTLAQMYPLQEIQNLTVKLQLQALQQNGSSVLSEDKSKRLNTILNTMSTIYSTGKVCNPDNPQECLLLEPGLNEIMANSLDYNERLWAWESWRSEVGKQLRPLYEEYVVLKNEMARANHYEDYGDYWRGDYEVNGVDGYDYSRGQLIEDVEHTFEEIKPLYEHLHAYVRAKLMNAYPSYISPIGCLPAHLLGDMWGRFWTNLYSLTVPFGQKPNIDVTDAMVDQAWDAQRIFKEAEKFFVSVGLPNMTQGFWENSMLTDPGNVQKAVCHPTAWDLGKGDFRILMCTKVTMDDFLTAHHEMGHIQYDMAYAAQPFLLRNGANEGFHEAVGEIMSLSAATPKHLKSIGLLSPDFQEDNETEINFLLKQALTIVGTLPFTYMLEKWRWMVFKGEIPKDQWMKKWWEMKREIVGVVEPVPHDETYCDPASLFHVSNDYSFIRYYTRTLYQFQFQEALCQAAKHEGPLHKCDISNSTEAGQKLFNMLRLGKSEPWTLALENVVGAKNMNVRPLLNYFEPLFTWLKDQNKNSFVGWSTDWSPYADHHHHHH,openfold,,2,5,search,search,0,
+ multi_6M0J_standard,>6M0J_2|Chain B[auth E]|Spike protein S1|Severe acute respiratory syndrome coronavirus 2 (2697049),RVQPTESIVRFPNITNLCPFGEVFNATRFASVYAWNRKRISNCVADYSVLYNSASFSTFKCYGVSPTKLNDLCFTNVYADSFVIRGDEVRQIAPGQTGKIADYNYKLPDDFTGCVIAWNSNNLDSKVGGNYNYLYRLFRKSNLKPFERDISTEIYQAGSTPCNGVEGFNCYFPLQSYGFQPTNGVGYQPYRVVVLSFELLHAPATVCGPKKSTNLVKNKCVNFHHHHHH,,,2,5,search,search,0,
+ ```
+
+## Application
+
+### Using the CLI
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict af2 batch_job.csv # or batch_job.json
+ ```
+
+### Using Python scripts
+
+To submit a batch job with scripts, we advise using helper functions made
+available in the [`folding_studio` package](../../tutorials/installation.md#cli-and-folding_studio-library) library. This helper
+function will automatically upload the custom files (MSA, templates, initial guess, templates
+masks) you specified in your configuration file.
+You can use a configuration file (see [Defining a batch job configuration file](#defining-a-batch-job-configuration-file)) or define a `JSON` object.
+
+#### From a json object
+
+To submit a batch job from a `JSON` object defined programmatically, we must
+build a
+[`BatchRequest`](https://folding-studio-data-models-dot-int-bio-foldingstudio-gcp.nw.r.appspot.com/request/#folding_studio_data_models.request.BatchRequest)
+object. Once the `BatchRequest` object is built, it is written to a `JSON` file
+and submitted with the `batch_prediction_from_file` helper function from the
+[`folding_studio` package](../../tutorials/installation.md#cli-and-folding_studio-library) library. This helper function will
+automatically upload the custom files (MSA, templates, initial guess, templates
+masks) you specified in your configuration file.
+
+```python
+import json
+from pathlib import Path
+
+from folding_studio import batch_prediction_from_file
+from folding_studio import get_id_token
+from folding_studio_data_models import (
+ AF2Parameters,
+ AF2Request,
+ BatchRequest,
+ FeatureMode,
+ OpenFoldParameters,
+ OpenFoldRequest,
+ Sequence,
+)
+
+folding_requests = [
+ # Monomer job with default AF2 parameters
+ AF2Request(
+ complex_id="Monomer Construct 0001",
+ sequences=[
+ Sequence(description="Wild Type + mutation X", fasta_sequence="MVFKLLLP")
+ ],
+ parameters= AF2Parameters(),
+ ),
+ # Monomer job with default OpenFold parameters
+ OpenFoldRequest(
+ complex_id="Monomer Construct 0001",
+ sequences=[
+ Sequence(description="Wild Type + mutation X", fasta_sequence="MVFKLLLP")
+ ],
+ parameters=OpenFoldParameters(),
+ ),
+ # Multimer job with deactivated template and 5 recycles.
+ AF2Request(
+ complex_id="Multimer Construct id 0001",
+ sequences=[
+ Sequence(description="Wild Type + mutation X", fasta_sequence="MVFKLLLP"),
+ Sequence(description="Antibody S203 Heavy Chain", fasta_sequence="MPAAFFF"),
+ Sequence(description="Antibody S203 Light Chain", fasta_sequence="MPAKK"),
+ ],
+ parameters= AF2Parameters(
+ msa_mode=FeatureMode.SEARCH,
+ template_mode=FeatureMode.NONE,
+ random_seed=0,
+ num_recycle=5,
+ ),
+ ),
+]
+
+# Build and validate the request
+batch_request = BatchRequest(requests=folding_requests)
+
+# Prepare the batch request file for submission
+json_data = batch_request.model_dump_json()
+batch_file = Path("batch_request.json")
+batch_file.write_text(json_data)
+
+# Obtain the identity token from gcloud auth
+identity_token = get_id_token()
+
+try:
+ response = batch_prediction_from_file(
+ identity_token=identity_token, file=batch_file
+ )
+ json.dump(response, open("submission_batch.json", "w"))
+except Exception as err:
+ print("Error during batch submission.")
+ print(err)
+```
+
+#### From a configuration file
+
+To submit a batch job from a configuration file, simply pass it to the `batch_prediction_from_file` helper function from the [`folding_studio` package](../../tutorials/installation.md#cli-and-folding_studio-library) library. This helper function will automatically upload the custom files (MSA, templates, initial guess, templates masks) you specified in your configuration file.
+
+```python
+import json
+from pathlib import Path
+
+from folding_studio import batch_prediction_from_file
+from folding_studio import get_id_token
+
+# Obtain the identity token from gcloud auth
+identity_token = get_id_token()
+
+batch_file = Path("my-batch-file.csv")
+try:
+ response = batch_prediction_from_file(
+ identity_token=identity_token, file=batch_file
+ )
+ json.dump(response, open("submission_batch.json", "w"))
+except Exception as err:
+ print("Error during batch submission.")
+ print(err)
+```
diff --git a/folding-studio/docs/docs/how-to-guides/af2_openfold/batch_job_from_directory.md b/folding-studio/docs/docs/how-to-guides/af2_openfold/batch_job_from_directory.md
new file mode 100644
index 0000000000000000000000000000000000000000..7b3f6082402bdd04f8b7298966746484c32b582d
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af2_openfold/batch_job_from_directory.md
@@ -0,0 +1,45 @@
+## Goal
+
+A batch job allow to submit multiple jobs at once. This avoid making too much API calls.
+To submit a batch job through the folding CLI, you can simply pass a directory of FASTA files.
+
+## Application
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict af2 path/to/my/fasta/directory --num-recycle 3 --random-seed 0
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.predict import af2 as af2_predict
+
+ af2_predict(source=Path("path/to/my/fasta/directory"), num_recycle=3, random_seed=0)
+ ```
+
+Using the CLI, you will get the following information if the job was successfully submitted.
+
+``` { .shell .no-copy }
+Batch prediction job metadata written to batch_prediction_20250305172626.json
+This file contains your experiments ids.
+Batch prediction job submitted successfully !
+The following experiments have been submitted (see batch_prediction_20250305172626.json for the full list):
+['dfdddbc4e2969e327863260ba50f5a3cc1c62992']
+For example, you can query an experiment status with the command:
+
+ folding experiment status dfdddbc4e2969e327863260ba50f5a3cc1c62992
+
+The results of the following experiments were found in the cache (see batch_prediction_20250305172626.json for the full list):
+['a13e8c9003695773b2623179fd0eafdf6296602d']
+Use the `folding experiment results id` command to download the prediction results. For example:
+
+ folding experiment results a13e8c9003695773b2623179fd0eafdf6296602d
+
+```
+
+!!! warning
+ If you submit a batch job using a directory of `FASTA` files, the options passed to the `predict` command will be applied to **ALL** the proteins.
+ If you want to pass protein specific options, you need to submit you batch job through a [configuration file](./batch_job_from_configuration_file.md)
diff --git a/folding-studio/docs/docs/how-to-guides/af2_openfold/cancel_experiment.md b/folding-studio/docs/docs/how-to-guides/af2_openfold/cancel_experiment.md
new file mode 100644
index 0000000000000000000000000000000000000000..835d51739dccf48711fde66d35b66467933ad6bf
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af2_openfold/cancel_experiment.md
@@ -0,0 +1,31 @@
+## Goal
+
+You can cancel one or more ongoing submission from their `experiment_id`.
+
+For a batch job submission, cancelling one submission won't interfere with the other jobs in the batch.
+
+If you want to cancel a whole batch submission, you have to cancel all the submission experiment ids.
+
+## Application
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding experiment cancel experiment_id_1
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.commands.experiment import cancel
+
+ cancel(exp_id=["experiment_id_1"])
+ ```
+
+!!! Note
+ Requests cannot be cancelled with keyboard interruption
+
+ Cancelling a running process of the CLI or a Python script with a keyboard
+ interruption (`ctrl+c`) **will most likely not** cancel the job submission process.
+ This is because once the `POST` request has reached the server, there is no way
+ to send an interruption signal to the running process on the server.
diff --git a/folding-studio/docs/docs/how-to-guides/af2_openfold/download_logs.md b/folding-studio/docs/docs/how-to-guides/af2_openfold/download_logs.md
new file mode 100644
index 0000000000000000000000000000000000000000..53bf19533ff908f83b3bbc2308dc383bc11faba5
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af2_openfold/download_logs.md
@@ -0,0 +1,19 @@
+## Goal
+This How-to guide explains how to download the logs of an experiment.
+
+## Application
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding experiment logs b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9 --output ./logs_exp_b21b09.zip
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.experiment import logs
+
+ logs(exp_id="b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9", output=Path("./logs_exp_b21b09.zip"))
+ ```
diff --git a/folding-studio/docs/docs/how-to-guides/af2_openfold/download_prediction_results.md b/folding-studio/docs/docs/how-to-guides/af2_openfold/download_prediction_results.md
new file mode 100644
index 0000000000000000000000000000000000000000..b1456c218202157fd66d56835148e35d831f6fc6
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af2_openfold/download_prediction_results.md
@@ -0,0 +1,55 @@
+## Goal
+
+AlphaFold2/OpenFold models generate predictions and but also confidence metrics. Once the prediction process of an experiment has finished, all of them are saved into a zip file.
+
+The zip file contains :
+
+- The confidence metrics of the models in `.json` format.
+- The relaxed predictions per models in `.pdb` format.
+- The raw predictions per models in `.pkl` format.
+
+Here is an example of the zip file structure :
+
+``` { .shell .no-copy }
+results
+├── metrics_per_model.json
+├── msa_coverage.json
+├── relaxed_predictions
+│ ├── model_1_ptm.pdb
+│ ├── model_2_ptm.pdb
+│ ├── model_3_ptm.pdb
+│ ├── model_4_ptm.pdb
+│ └── model_5_ptm.pdb
+├── rmsd_per_model.json
+└── unrelaxed_predictions
+ ├── model_1_ptm.pdb
+ ├── model_2_ptm.pdb
+ ├── model_3_ptm.pdb
+ ├── model_4_ptm.pdb
+ └── model_5_ptm.pdb
+```
+
+## Application
+
+Once the experiment has finished, you can download the results zip file.
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding experiment results b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9 --output ./result_exp_b21b09.zip
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.experiment import results
+
+ results(exp_id="b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9", output=Path("./result_exp_b21b09.zip"))
+ ```
+
+You will get the message:
+
+``` { .shell .no-copy }
+File downloaded successfully to result_exp_b21b09.zip.
+```
diff --git a/folding-studio/docs/docs/how-to-guides/af2_openfold/fetch_folding_job_status.md b/folding-studio/docs/docs/how-to-guides/af2_openfold/fetch_folding_job_status.md
new file mode 100644
index 0000000000000000000000000000000000000000..5d1c6ee63eb0bed0d7b7377a91e72a57b9b6e7b6
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af2_openfold/fetch_folding_job_status.md
@@ -0,0 +1,74 @@
+## Goal
+This how-to guide explains how to manage folding jobs using the `experiment_id`. Each submission creates a unique experiment, enabling caching and avoiding redundant computations. You can track the status of individual or batch jobs with the `experiment_id`.
+
+## Application
+
+### Fetch a job `experiment_id`
+Submitting a folding job creates an experiment. This allows caching and avoid
+useless re-computation of previously submitted folding job.
+This applies also to batch jobs, if you submit a batch of `n` jobs, `n`
+experiments will be created.
+
+Each experiment is associated with a unique `experiment_id`. Its generation is
+deterministic, created from the submitted FASTA sequence (without taking into
+account the description) and the job parameters.
+
+Once your folding job has been submitted, and thus the experiment created, you
+can get various information from the `experiment_id`.
+
+You can get the list of your experiment ids that succeeded or are still pending
+using :
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding experiment list
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.commands.experiment import list
+
+ list()
+ ```
+
+You will get a table with the different experiments launched:
+
+``` { .shell .no-copy }
+Done and pending experiments list written to None
+ Done and pending experiments
+┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┓
+┃ Experiment ID ┃ Status ┃
+┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━┩
+│ a13e8c9003695773b2623179fd0eafdf6296602d │ Done │
+│ 37d816fd1ad0461dd4291963ec10ca5c631058db │Pending │
+└──────────────────────────────────────────┴────────┘
+```
+
+### Retrieve a job status
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding experiment status b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.commands.experiment import status
+
+ status(exp_id="b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9")
+ ```
+
+The experiment status is the current state of the experiment.
+
+| VALUE | DESCRIPTION |
+| ----------- | ------------------------------------------------------------------------------- |
+| `Done` | The experiment is done and its features and results are available for download. |
+| `Pending` | The experiment is still ongoing. |
+| `Failed` | The experiment has failed. |
+| `Cancelled` | The experiment was cancelled. |
+
+Once you have submitted a folding job, you can get its status at any time.
diff --git a/folding-studio/docs/docs/how-to-guides/af2_openfold/get_experiment_features.md b/folding-studio/docs/docs/how-to-guides/af2_openfold/get_experiment_features.md
new file mode 100644
index 0000000000000000000000000000000000000000..233322342e5152f6bc738571a8d2401f86791cf2
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af2_openfold/get_experiment_features.md
@@ -0,0 +1,60 @@
+## Goal
+Before making a folding prediction, a feature generation process will build the two main features needed by the AlphaFold2 model :
+
+- The [Multiple Sequence Alignment](https://en.wikipedia.org/wiki/Multiple_sequence_alignment) (MSA) search results. By default, the Folding Studio pipeline will trigger an MSA search on Uniref90, small BFD, Mgnify and Uniprot (multimer jobs only) using the `jackhmmer` algorithm.
+
+- The protein [template](https://en.wikipedia.org/wiki/Homology_modeling) search results. By default, the Folding Studio pipeline will trigger a template search on the PDB70 structure databases using the `hhsearch` algorithm (`hhblits` for multimer jobs).
+
+Once the feature generation process of an experiment has finished, all the generated features are saved into a zip file.
+
+The zip file contains :
+
+- The full pickled features in `.pkl` format.
+- The output of the MSA search: the MSA search results on multiple databases in `.a3m` format.
+- The output of the template search : the four best matching templates in `.cif` format.
+
+Here is an example of the zip file structure for a monomer :
+
+``` { .shell .no-copy }
+extracted_experiment_features_zip
+├── features.pkl
+├── msas
+│ ├── mgnify_hits.a3m
+│ ├── pdb_hits.hhr
+│ ├── small_bfd_hits.a3m
+│ └── uniref90_hits.a3m
+└── templates
+ ├── 5kwb.cif
+ ├── 6m0j.cif
+ ├── 6vsj.cif
+ ├── 6vw1.cif
+ └── selected_templates.json
+```
+
+For multimer, the structure is similar except that there is a dedicated subdirectory for each protein in the `msas` and `templates` directory.
+
+You can download this zip file to check the generated features and use its content for specifying [custom MSA feature](./../af2_openfold/set_af_folding_parameters.md#custom-msas) or [custom template features](../af2_openfold/set_af_folding_parameters.md#custom-templates) in further experiments.
+
+
+## Application
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding experiment features b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9 --output ./features_exp_b21b09.zip
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.experiment import features
+
+ features(exp_id="b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9", output=Path("./features_exp_b21b09.zip"))
+ ```
+
+Once the features are downloaded, you will get the following message:
+
+``` { .shell .no-copy }
+File downloaded successfully to features_exp_b21b09.zip.
+```
diff --git a/folding-studio/docs/docs/how-to-guides/af2_openfold/provide_input_data.md b/folding-studio/docs/docs/how-to-guides/af2_openfold/provide_input_data.md
new file mode 100644
index 0000000000000000000000000000000000000000..69b19841b4c0a7de8d90e2963c2c007dcc4b94f6
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af2_openfold/provide_input_data.md
@@ -0,0 +1,32 @@
+## Goal
+This guide will help you determine whether a given input sequence is compatible with the Alphafold2 and OpenFold models.
+
+## Supported inputs
+
+To submit an folding job with Alphafold2 and OpenFold, you need the sequence input file in
+[`FASTA`](https://en.wikipedia.org/wiki/FASTA_format) format containing your
+protein sequence.
+
+It can be a monomer or a multimer sequence.
+
+=== "monomer"
+
+ ```text
+ >SARS-CoV-2|RBD|Omicron variant
+ RVQPTESIVRFPNITNLCPFDEVFNATRFASVYAWNRKRISNCVADYSVLYNLAPFFTFK
+ CYGVSPTKLNDLCFTNVYADSFVIRGDEVRQIAPGQTGNIADYNYKLPDDFTGCVIAWNS
+ NKLDSKVSGNYNYLYRLFRKSNLKPFERDISTEIYQAGNKPCNGVAGFNCYFPLRSYSFR
+ PTYGVGHQPYRVVVLSFELLHAPATVCGPKKSTNLVKNKCVNF
+ ```
+
+=== "multimer"
+
+ ```text
+ >SARS-CoV-2|RBD|Omicron variant
+ RVQPTESIVRFPNITNLCPFDEVFNATRFASVYAWNRKRISNCVADYSVLYNLAPFFTFK
+ CYGVSPTKLNDLCFTNVYADSFVIRGDEVRQIAPGQTGNIADYNYKLPDDFTGCVIAWNS
+ NKLDSKVSGNYNYLYRLFRKSNLKPFERDISTEIYQAGNKPCNGVAGFNCYFPLRSYSFR
+ PTYGVGHQPYRVVVLSFELLHAPATVCGPKKSTNLVKNKCVNF
+ >Processed angiotensin-converting enzyme 2|Homo sapiens (9606)
+ STIEEQAKTFLDKFNHEAEDLFYQSSLASWNYNTNITEENVQNMNNAGDKWSAFLKEQSTLAQMYPLQEIQNLTVKLQLQALQQNGSSVLSEDKSKRLNTILNTMSTIYSTGKVCNPDNPQECLLLEPGLNEIMANSLDYNERLWAWESWRSEVGKQLRPLYEEYVVLKNEMARANHYEDYGDYWRGDYEVNGVDGYDYSRGQLIEDVEHTFEEIKPLYEHLHAYVRAKLMNAYPSYISPIGCLPAHLLGDMWGRFWTNLYSLTVPFGQKPNIDVTDAMVDQAWDAQRIFKEAEKFFVSVGLPNMTQGFWENSMLTDPGNVQKAVCHPTAWDLGKGDFRILMCTKVTMDDFLTAHHEMGHIQYDMAYAAQPFLLRNGANEGFHEAVGEIMSLSAATPKHLKSIGLLSPDFQEDNETEINFLLKQALTIVGTLPFTYMLEKWRWMVFKGEIPKDQWMKKWWEMKREIVGVVEPVPHDETYCDPASLFHVSNDYSFIRYYTRTLYQFQFQEALCQAAKHEGPLHKCDISNSTEAGQKLFNMLRLGKSEPWTLALENVVGAKNMNVRPLLNYFEPLFTWLKDQNKNSFVGWSTDWSPYADRHHHHHH
+ ```
diff --git a/folding-studio/docs/docs/how-to-guides/af2_openfold/set_af_folding_parameters.md b/folding-studio/docs/docs/how-to-guides/af2_openfold/set_af_folding_parameters.md
new file mode 100644
index 0000000000000000000000000000000000000000..dad28b8b5eab194c4ee75e33b2ecf1a0bec61f4c
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af2_openfold/set_af_folding_parameters.md
@@ -0,0 +1,407 @@
+## Goal
+
+The different folding parameters are detailed in the [reference section](../../reference/cli.md#predict) but this how-to guide gives some examples of how to use them.
+
+## Application
+
+### Number of recycle steps
+
+You can choose the number of recycling steps that the input undergoes.
+Recycling steps refer to the iterative refinement process where the input data
+is repeatedly processed through the entire network. During each recycling step,
+the network uses the output from the previous cycle as a new input, allowing it
+to progressively refine and improve its predictions.
+
+This iterative approach can enhance the accuracy of the final output,
+especially for complex structures or cases where more nuanced adjustments are
+needed. By adjusting the number of recycling steps, you can control the balance
+between computational time and the desired level of refinement for your
+predictions
+
+By default, it is set to 3.
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict af2 path/to/my/monomer.fasta --num-recycle 5
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.predict import af2 as af2_predict
+
+ af2_predict(source=Path("path/to/my/monomer.fasta"), num_recycle=5)
+ ```
+
+### Random seed
+
+To generate different results from the same input sequence, you can change the random seed used during the forward pass.
+
+By default, it is set to 0.
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict af2 path/to/my/monomer.fasta --random-seed 42
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.predict import af2 as af2_predict
+
+ af2_predict(source=Path("path/to/my/monomer.fasta"), random_seed=42)
+ ```
+
+If you would like to submit a random seed scan job with the CLI, you can use the `--num-seed` option which specifies the number of random seed values to submit.
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict af2 path/to/my/monomer.fasta --num-seed 10
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.predict import af2 as af2_predict
+
+ af2_predict(source=Path("path/to/my/monomer.fasta"), num_seed=10)
+ ```
+
+### Ignore cached experiments
+
+By default, if you submit a job that has already been submitted, it will not run, and the cached results will be returned.
+This is determined by the job experiment id, see [Fetch a job experiment_id](../../how-to-guides/af2_openfold/fetch_folding_job_status.md#fetch-a-job-experiment_id) for more details.
+
+However, you can override this behavior and force the job to run, even if it was submitted earlier.
+
+!!! warning
+ This will overwrite the previous experiment results, replacing them with the most recent ones.
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict af2 path/to/my/monomer.fasta --no-cache
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.predict import af2 as af2_predict
+
+ af2_predict(source=Path("path/to/my/monomer.fasta"), cache=False)
+ ```
+
+### Use specific AlphaFold2/OpenFold models
+
+By default, predictions are generated using all five AlphaFold2/OpenFold models. They
+each have slight variations in how they predict protein structures, providing a
+range of potential outcomes.
+Leveraging all five models increases the robustness of the predictions, as it
+allows for a more comprehensive exploration of possible protein conformations.
+
+However, if you wish to narrow down the prediction process to specific models,
+you can do so by specifying the IDs of the models you want to use.
+It allows you to focus on particular models that may be better suited for your
+specific use case or to reduce computational time by excluding models that are
+less relevant for your needs.
+
+Find more details about the difference between the model training procedures
+and inputs
+[here](https://static-content.springer.com/esm/art%3A10.1038%2Fs41586-021-03819-2/MediaObjects/41586_2021_3819_MOESM1_ESM.pdf#page=47).
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict af2 path/to/my/monomer.fasta --model-subset 1 --model-subset 2 --model-subset 3
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.commands.predict import af2 as af2_predict
+
+ af2_predict(source=path/to/my/monomer.fasta, model_subset=[1, 2, 3])
+ ```
+
+### Features generation mode
+
+=== "MSA"
+
+ | Value | Description |
+ | -------------------- | -------------------------------------------------------------------------------------------- |
+ | `"search"` (default) | automated MSA search of sequence databases using `JackHMMer` (Uniref90, small_bfd and mgnfy) |
+ | `"mmseqs"` | automated search of Uniref30 and Colabfold_env_db using the **self-hosted** `MMSeqs2` server |
+ | `"none"` | deactivate MSA features features |
+ | `"custom"` | use user provided MSA input (`.sto` or `.a3m` format) |
+
+ !!! note
+ `MMSeqs2` produce fairly different MSA results compared to JackHMMer, they use different datasets (Uniref30 and colbafold_env_db) and different search algorithms. However the MSA produced by `MMSeqs2` is generally more diverse and can be leverage predict structures with higher accuracy (see this [publication](https://www.nature.com/articles/s41592-023-02130-4)). For more information about MMSeqs2, please refer to the corresponding [paper](https://www.biorxiv.org/content/10.1101/079681v5).
+
+=== "Templates"
+
+ | Value | Description |
+ | -------------------- | -------------------------------------------------------------------------------------------- |
+ | `"search"` (default) | automated search of PDB70 structure databases using `hhsearch` (`hhblits` for multimer jobs) |
+ | `"mmseqs"` | automated search of pdb100 structure database using the **self-hosted** `mmseqs2` server |
+ | `"none"` | deactivate template features |
+ | `"custom"` | use user provided template structures (`.cif` format) or PDB code. |
+
+#### Custom features
+
+Instead of using the default experiment feature generation process, you might want to specify your own MSA or template features, or even remove them altogether.
+
+We see two main use cases where you might want to override the default feature generation process :
+
+- You want to utilize custom features specifically tailored for your folding job. For example, you have obtained a protein structure from crystallography experiment and want to use as custom templates.
+
+- You already submitted a folding a job for the same protein and you want to use its features to speed up the new jobs.
+
+##### Custom MSAs
+
+To modify the default MSA feature computation behavior, you need to specify the MSA feature computation mode using the `msa_mode="custom"` mode. If you pass custom MSA features but didn't set the MSA feature mode to `custom`, they won't be taken into account and the MSA feature mode will be the default (`search`).
+
+We support specifying custom MSA features as file of `.sto` or `.a3m` formats :
+
+- You need to provide **at least as many** `.sto` or `.a3m` files as chains in the FASTA file. You may provide several MSA files for a chain.
+
+- To assign a custom MSA to a specific chain, append the chain identifier as a suffix to the file name.
+
+!!! example
+ For instance, in a complex with two chains A and B (as listed in the FASTA file), name your custom MSA files `my_custom_msa_A.sto` and `my_custom_msa_B.sto`.
+
+!!! warning
+ If you use `.a3m` files downloaded from a
+ [ColabFold](https://colab.research.google.com/github/sokrypton/ColabFold/blob/main/AlphaFold2.ipynb)
+ notebook run, make sure to remove lines starting with a `#` before submitting
+ the job. These are artifacts added by the ColabFold pipeline after the MMSeqs2
+ run which are not relevant to Folding Studio but **will cause a crash**.
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ # monomer folding job submission with custom msa features
+ folding predict af2 path/to/my/monomer.fasta --msa-mode custom --custom-msa /path/to/monomer_custom_msa_A.sto
+
+ # multimer folding job submission with custom msa features
+ folding predict af2 path/to/my/multimer.fasta --msa-mode custom --custom-msa path/to/custom_msa_1_A.sto --custom-msa path/to/custom_msa_2_A.sto --custom-msa path/to/custom_msa_B.sto
+
+ # monomer folding job submission with mmseqs for msa and templates
+ folding predict af2 path/to/my/monomer.fasta --msa-mode mmseqs --template-mode mmseqs
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.predict import af2 as af2_predict
+
+ # monomer folding job submission with custom msa features
+ af2_predict(source=Path("path/to/my/monomer.fasta"), msa_mode="custom", custom_msa=[Path("/path/to/monomer_custom_msa_A.sto")])
+
+ # multimer folding job submission with custom msa features
+ af2_predict(source=Path("path/to/my/multimer.fasta"), msa_mode="custom", custom_msa=[Path("/path/to/custom_msa_1_A.sto"), Path("/path/to/custom_msa_2_A.sto"), Path("/path/to/custom_msa_B.sto")])
+
+ # monomer folding job submission with mmseqs for msa and templates
+ af2_predict(source=Path("path/to/my/monomer.fasta"), msa_mode="mmseqs", template_mode="mmseqs")
+ ```
+
+##### Custom templates
+
+To modify the default template feature computation behavior, you need to specify the template feature computation mode using the `template_mode` parameter :
+
+We support specifying custom templates as file of `.cif` format or as PDB codes of crystal structures. Both options can be used at the same time for the same job.
+
+You are free to provide as many templates as you wish. However the AlphaFold2 pipeline will only keep the best 4 matching templates.
+
+Note that by design, AlphaFold2 **monomer** models 3, 4, and 5 do not incorporate template features. As a result, modifying the template feature calculations will not affect their predictions.
+
+!!! note
+ If `"template_mode": "search"` is used with `"msa_mode": "none"`, an automated MSA search on Uniref90 will still be run in order to obtain an MSA necessary for `hhsearch/hhblits` to complete the template search.
+ However these MSA search results will not be included in the features.
+
+!!! warning
+ If you pass custom template features but didn't set the template feature mode to `custom`, they won't be taken into account and the template feature mode will be the default (`search`).
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict af2 path/to/my/monomer.fasta--template-mode custom --custom-template-id 5ii8 --custom-template-id 6m0j --custom-template /path/to/template_1.cif --custom-template /path/to/template_2.cif --custom-template /path/to/template_3.cif
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.predict import af2 as af2_predict
+
+ af2_predict(source=Path("path/to/my/monomer.fasta"), template_mode="custom", custom_template_id=["5ii8", "6m0j"], custom_template=[Path("/path/to/template_1.cif"), Path("/path/to/template_2.cif"), Path("/path/to/template_3.cif")])
+ ```
+
+### Specification for batch jobs
+
+ To submit jobs with custom files programmatically, we use helper functions made available in the `folding_studio` [package](../../tutorials/installation.md#cli-and-folding_studio-library).
+
+=== "Custom templates"
+
+ ```python
+ import json
+ from pathlib import Path
+
+ from folding_studio import batch_prediction_from_file
+ from folding_studio import get_id_token
+ from folding_studio_data_models import (
+ AF2Parameters,
+ AF2Request,
+ BatchRequest,
+ FeatureMode,
+ OpenFoldParameters,
+ OpenFoldRequest,
+ Sequence,
+ )
+
+ # Define local templates path
+ template_A_local_path = "/path/to/custom_template_A.cif"
+ template_B_local_path = "/path/to/custom_template_B.cif"
+ template_C_local_path = "/path/to/custom_template_C.cif"
+
+ # Build the batch request
+ requests = [
+ AF2Request(
+ complex_id="Monomer Construct 0001",
+ sequences=[
+ Sequence(description="Wild Type + mutation X", fasta_sequence="MVFKLLLP")
+ ],
+ parameters= AF2Parameters(
+ template_mode=FeatureMode.CUSTOM, custom_templates=[template_A_local_path]
+ ),
+ ),
+ OpenFoldRequest(
+ complex_id="Monomer Construct 0001 with OpenFold",
+ sequences=[
+ Sequence(description="Wild Type + mutation X", fasta_sequence="MVFKLLLP")
+ ],
+ parameters=OpenFoldParameters(
+ template_mode=FeatureMode.CUSTOM, custom_templates=[template_A_local_path]
+ ),
+ ),
+ AF2Request(
+ complex_id="Multimer Construct id 0001",
+ sequences=[
+ Sequence(description="Wild Type + mutation X", fasta_sequence="MVFKLLLP"),
+ Sequence(description="Antibody S203 Heavy Chain", fasta_sequence="MPAAFFF"),
+ Sequence(description="Antibody S203 Light Chain", fasta_sequence="MPAKK"),
+ ],
+ parameters= AF2Parameters(
+ template_mode=FeatureMode.CUSTOM,
+ custom_templates=[
+ template_A_local_path,
+ template_B_local_path,
+ template_C_local_path,
+ ],
+ ),
+ ),
+ ]
+ # Build and validate the request
+ batch_request = BatchRequest(requests=requests)
+
+ # Prepare the batch request file for submission
+ json_data = batch_request.model_dump_json()
+ batch_file = Path("batch_request.json")
+ batch_file.write_text(json_data)
+
+ # Obtain the identity token from gcloud auth
+ identity_token = get_id_token()
+
+ try:
+ response = batch_prediction_from_file(
+ identity_token=identity_token, file=batch_file
+ )
+ json.dump(response, open("submission_batch.json", "w"))
+ except Exception as err:
+ print("Error during batch submission.")
+ print(err)
+ ```
+
+=== "Custom MSAs"
+
+ ```python
+ import json
+ from pathlib import Path
+
+ from folding_studio import batch_prediction_from_file
+ from folding_studio import get_id_token
+ from folding_studio_data_models import (
+ AF2Parameters,
+ AF2Request,
+ BatchRequest,
+ FeatureMode,
+ OpenFoldParameters,
+ OpenFoldRequest,
+ Sequence,
+ )
+
+ # Define local MSA path
+ msa_A_local_path = "/path/to/custom_msa_A.sto"
+ msa_B_local_path = "/path/to/custom_msa_B.sto"
+ msa_C_local_path = "/path/to/custom_msa_C.sto"
+
+ # Build the batch request
+ requests = [
+ AF2Request(
+ complex_id="Monomer Construct 0001",
+ sequences=[
+ Sequence(description="Wild Type + mutation X", fasta_sequence="MVFKLLLP")
+ ],
+ parameters= AF2Parameters(
+ msa_mode=FeatureMode.CUSTOM, custom_msas=[msa_A_local_path]
+ ),
+ ),
+ OpenFoldRequest(
+ complex_id="Monomer Construct 0001 with OpenFold",
+ sequences=[
+ Sequence(description="Wild Type + mutation X", fasta_sequence="MVFKLLLP")
+ ],
+ parameters=OpenFoldParameters(
+ msa_mode=FeatureMode.CUSTOM, custom_msas=[msa_A_local_path]
+ ),
+ ),
+ AF2Request(
+ complex_id="Multimer Construct id 0001",
+ sequences=[
+ Sequence(description="Wild Type + mutation X", fasta_sequence="MVFKLLLP"),
+ Sequence(description="Antibody S203 Heavy Chain", fasta_sequence="MPAAFFF"),
+ Sequence(description="Antibody S203 Light Chain", fasta_sequence="MPAKK"),
+ ],
+ parameters= AF2Parameters(
+ msa_mode=FeatureMode.CUSTOM,
+ custom_msas=[msa_A_local_path, msa_B_local_path, msa_C_local_path],
+ ),
+ ),
+ ]
+ # Build and validate the request
+ batch_request = BatchRequest(requests=requests)
+
+ # Prepare the batch request file for submission
+ json_data = batch_request.model_dump_json()
+ batch_file = Path("batch_request.json")
+ batch_file.write_text(json_data)
+
+ # Obtain the identity token from gcloud auth
+ identity_token = get_id_token()
+
+ try:
+ response = batch_prediction_from_file(
+ identity_token=identity_token, file=batch_file
+ )
+ json.dump(response, open("submission_batch.json", "w"))
+ except Exception as err:
+ print("Error during batch submission.")
+ print(err)
+ ```
diff --git a/folding-studio/docs/docs/how-to-guides/af2_openfold/single_af2_job.md b/folding-studio/docs/docs/how-to-guides/af2_openfold/single_af2_job.md
new file mode 100644
index 0000000000000000000000000000000000000000..84fe014b93252c8b4108faa85a2165f01c73fe40
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af2_openfold/single_af2_job.md
@@ -0,0 +1,27 @@
+## Goal
+This how-to guide shows how to run a folding job using AlphaFold2.
+
+!!! Note
+ All the other how-to guides in **AlphaFold2/OpenFold section** of **How-to guides** apply to Alphafold2.
+
+## Application
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict af2 path/to/my/file.fasta --num-recycle 3 --random-seed 0
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.predict import af2 as af2_predict
+
+ af2_predict(source=Path("path/to/my/file.fasta"), num_recycle=3, random_seed=0)
+ ```
+
+!!! Warning
+ If you consider to submit >10 folding jobs, it is **strongly** advised to
+ use batch job submission from a [directory](./batch_job_from_directory.md) or from a [configuration file](./batch_job_from_configuration_file.md). A batch job will mutualize the feature generation steps, speeding up significantly the jobs processing if
+ they use similar features.
diff --git a/folding-studio/docs/docs/how-to-guides/af2_openfold/single_openfold_job.md b/folding-studio/docs/docs/how-to-guides/af2_openfold/single_openfold_job.md
new file mode 100644
index 0000000000000000000000000000000000000000..64dec52657a53ba2fbb26e341c53c32733a6aea6
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af2_openfold/single_openfold_job.md
@@ -0,0 +1,27 @@
+## Goal
+This how-to guide shows how to run a folding job using OpenFold, an alternative option to Alphafold2 in Folding Studio.
+
+!!! Note
+ All the other how-to guides in **AlphaFold2/OpenFold section** of **How-to guides** apply to OpenFold.
+
+## Application
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict openfold path/to/my/file.fasta --num-recycle 3 --random-seed 0
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.predict import openfold as openfold_predict
+
+ openfold_predict(source=Path("path/to/my/file.fasta"), num_recycle=3, random_seed=0)
+ ```
+
+!!! Warning
+ If you consider to submit >10 folding jobs, it is **strongly** advised to
+ use batch job submission from a [directory](./batch_job_from_directory.md) or from a [configuration file](./batch_job_from_configuration_file.md). A batch job will mutualize the feature generation steps, speeding up significantly the jobs processing if
+ they use similar features.
diff --git a/folding-studio/docs/docs/how-to-guides/af2_openfold/soloseq_job.md b/folding-studio/docs/docs/how-to-guides/af2_openfold/soloseq_job.md
new file mode 100644
index 0000000000000000000000000000000000000000..b390b40eaf1d97ccca71a17040a90dea3ae05c76
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af2_openfold/soloseq_job.md
@@ -0,0 +1,104 @@
+## Goal
+
+This tutorial explains how to run structure predictions using SoloSeq, an OpenFold-like model.
+
+!!! Warning
+ SoloSeq is a synchronous model and uses different feature engineering process compared to AlphaFold2/Openfold. Therefore, the process of submitting SoloSeq jobs and retrieving output differs significantly from the processes described for AlphaFold2/OpenFold and the different other how-to guides from **AlphaFold2/OpenFold** folder do not apply to SoloSeq.
+
+## Application
+
+### Launch a job from a FASTA file
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict soloseq path/to/my/fasta/file.fasta --project-code "your-project-code" --output-dir ./
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.client import Client
+ from folding_studio.query.soloseq import SoloSeqQuery
+
+ inference_parameters = {"project_code": "your-project-code",
+ "seed":42}
+
+ file_path = "path/to/my/fasta/file.fasta"
+
+ # Create client
+ client = Client.from_jwt()
+
+ # Define query
+ query = SoloSeqQuery.from_file(path=file_path, parameters=inference_parameters)
+
+ # Send request
+ response = client.send_request(query)
+
+ # Download results
+ output_path = "./output.zip"
+ response.download_results(output_path, force=True, unzip=True)
+ ```
+
+### Launch a job from a directory of FASTA files
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict soloseq path/to/my/fasta/directory --project-code "your-project-code" --output-dir ./
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.client import Client
+ from folding_studio.query.soloseq import SoloSeqQuery
+
+ inference_parameters = {"project_code": "your-project-code",
+ "seed":42}
+
+ directory_path = "path/to/my/fasta/directory"
+
+ # Create client
+ client = Client.from_jwt()
+
+ # Define query
+ query = SoloSeqQuery.from_directory(path=directory_path, parameters=inference_parameters)
+
+ # Send request
+ response = client.send_request(query)
+
+ # Download results
+ output_path = "./output.zip"
+ response.download_results(output_path, force=True, unzip=True)
+ ```
+
+### Launch a job from a protein sequence directly
+
+`SoloSeq` allow passing the protein sequence directly as input.
+This makes the prediction job easier to integrate with your code.
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.client import Client
+ from folding_studio.query.soloseq import SoloSeqQuery
+
+ inference_parameters = {"project_code": "your-project-code",
+ "seed":42}
+
+ sequence = ">A|protein\\nQLEDSEVEAVAKGLEEMYANGVTEDNFKNYVKNNFAQQEISSVEEELNVNIS"
+
+ # Create client
+ client = Client.from_jwt()
+
+ # Define query
+ query = SoloSeqQuery.from_protein_sequence(sequence=sequence, parameters=inference_parameters)
+
+ # Send request
+ response = client.send_request(query)
+
+ # Download results
+ output_path = "./output.zip"
+ response.download_results(output_path, force=True, unzip=True)
+ ```
diff --git a/folding-studio/docs/docs/how-to-guides/af3/batch_job_from_directory.md b/folding-studio/docs/docs/how-to-guides/af3/batch_job_from_directory.md
new file mode 100644
index 0000000000000000000000000000000000000000..d8d9d7778d8144519ec731d4d2fa834e6baa7cc6
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af3/batch_job_from_directory.md
@@ -0,0 +1,40 @@
+## Goal
+
+A batch job allow to submit multiple jobs at once. This avoid making too much API calls.
+To submit a batch job through the folding CLI, you can simply pass a directory of FASTA files (or both FASTA and YAML files if you are using **Boltz-1** model)
+
+## Application
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict boltz path/to/my/fasta/directory --num-recycle 3 --random-seed 0 --output ./ --unzip
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.client import Client
+ from folding_studio.query.boltz import BoltzQuery
+
+ inference_parameters = {"project_code": "your-project-code",
+ "seed":42}
+
+ directory_path = "path/to/my/fasta/directory"
+
+ # Create client
+ client = Client.authenticate()
+
+ # Define query
+ query = BoltzQuery.from_directory(path=directory_path, parameters=inference_parameters)
+
+ # Send request
+ response = client.send_request(query)
+
+ # Download results
+ output_path = "./output.zip"
+ response.download_results(output_path, force=True, unzip=True)
+ ```
+
+!!! Note
+ Depending on the model you want to use, you can replace `predict boltz` with `predict chai` or `predict protenix`.
diff --git a/folding-studio/docs/docs/how-to-guides/af3/boltz_single_yaml_job.md b/folding-studio/docs/docs/how-to-guides/af3/boltz_single_yaml_job.md
new file mode 100644
index 0000000000000000000000000000000000000000..4a83743fe42a32b06feb684ebab0154a9690ff20
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af3/boltz_single_yaml_job.md
@@ -0,0 +1,38 @@
+## Goal
+**Boltz-1** model allows passing a YAML file (or a directory containing YAML files, see [batch job from directory section](./batch_job_from_directory.md) for more details about launching jobs from a directory) as input. The YAML format is more flexible and allows for more complex inputs, particularly around covalent bonds.
+This YAML has to follow the Boltz-1 format below.
+
+See [Boltz-1 documentation about prediction](https://github.com/jwohlwend/boltz/blob/main/docs/prediction.md) for details about YAML input format.
+
+## Application
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict boltz path/to/my/file.yaml --project-code "your-project-code" --output ./
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.client import Client
+ from folding_studio.query.boltz import BoltzQuery
+
+ inference_parameters = {"project_code": "your-project-code",
+ "seed":42}
+
+ file_path = "path/to/my/file.yaml"
+
+ # Create client
+ client = Client.authenticate()
+
+ # Define query
+ query = BoltzQuery.from_file(path=file_path, parameters=inference_parameters)
+
+ # Send request
+ response = client.send_request(query)
+
+ # Download results
+ output_path = "./output.zip"
+ response.download_results(output_path, force=True)
+ ```
diff --git a/folding-studio/docs/docs/how-to-guides/af3/provide_input_data.md b/folding-studio/docs/docs/how-to-guides/af3/provide_input_data.md
new file mode 100644
index 0000000000000000000000000000000000000000..da11a87be17b2589c0c27f5b9691d2235d27cff0
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af3/provide_input_data.md
@@ -0,0 +1,39 @@
+## Goal
+This guide will help you determine whether a given input sequence is compatible with the Alphafold3-like models.
+
+## Application
+
+### Alphafold3-like models supported inputs
+To submit an folding job with Alphafold3-like models, you need the sequence input file in
+[`FASTA`](https://en.wikipedia.org/wiki/FASTA_format) format containing your sequence.
+
+AlphaFold3-like models support various molecular types for structure prediction, and these can be provided as input in different formats. Below is a summary of the supported molecular types:
+
+- **Proteins**: Protein sequences are widely supported and can be provided for structure prediction tasks.
+ - monomer
+ - multimer
+- **DNA** and **RNA**: Both DNA and RNA sequences are supported for structure prediction.-
+- **Ligands**: Ligands can be specified in two ways:
+ - SMILES: A textual format describing the chemical structure of molecules.
+ - CCD Code: A standard identifier for chemical compounds, defined in the Chemical Component Dictionary.
+
+**You can find detailed explanations of the input data formats for each model at the links below.**
+
+### Boltz-1
+Here is an explanation of the different input data formats that can be used for Boltz-1 prediction: [Boltz-1 documentation](https://github.com/jwohlwend/boltz/blob/main/docs/prediction.md). You can also find some input [examples](https://github.com/jwohlwend/boltz/tree/main/examples).
+
+### Chai-1
+Here is an explanation of the different input data formats that can be used for Chai-1 with restraints: [Chai-1 documentation](https://github.com/chaidiscovery/chai-lab/blob/main/examples/restraints/README.md). You can also find some input [examples](https://github.com/chaidiscovery/chai-lab/tree/main/examples).
+
+### Protenix
+Here is an explanation of the different input data formats that can be used for Protenix: [Protenix documentation](https://github.com/bytedance/Protenix/blob/main/docs/infer_json_format.md).
+
+!!! Warning
+ The Protenix endpoint currently does not support JSON format. It is a work in progress.
+
+!!! note
+ The Protenix endpoint also accepts RCSB FASTA format following this structure. The number of chains will automatically be derived from the description (e.g. 2 chains in the example below):
+ ``` { .shell .no-copy }
+ >1HSG_1|Chains A, B|HIV-1 PROTEASE|Human immunodeficiency virus 1 (11676)
+ PQITLWQRPLVTIKIGGQLKEALLDTGADDTVLEEMSLPGRWKPKMIGGIGGFIKVRQYDQILIEICGHKAIGTVLVGPTPVNIIGRNLLTQIGCTLNF
+ ```
diff --git a/folding-studio/docs/docs/how-to-guides/af3/single_job_boltz.md b/folding-studio/docs/docs/how-to-guides/af3/single_job_boltz.md
new file mode 100644
index 0000000000000000000000000000000000000000..6a7c5f8cd36b604fc242742c37502a8d6d7a3a03
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af3/single_job_boltz.md
@@ -0,0 +1,72 @@
+## Goal
+
+**Boltz-1** is one of the AlphaFold3-like models supported by Folding Studio. Here is a how-to guide to learn how to launch a simple single folding job on a FASTA file using **Boltz-1**.
+
+## Application
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict boltz path/to/my/file.fasta --output ./ --unzip --project-code "your-project-code"
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.client import Client
+ from folding_studio.query.boltz import BoltzQuery
+
+ inference_parameters = {"project_code": "your-project-code"}
+
+ file_path = "path/to/my/file.fasta"
+
+ # Create client
+ client = Client.authenticate()
+
+ # Define query
+ query = BoltzQuery.from_file(path=file_path, parameters=inference_parameters)
+
+ # Send request
+ response = client.send_request(query)
+
+ # Download results
+ output_path = "./output.zip"
+ response.download_results(output_path, force=True, unzip=True)
+ ```
+
+!!! note
+ Do not forget that Boltz-1 accepts FASTA format following this structure
+ ``` { .shell .no-copy }
+ >CHAIN_ID|ENTITY_TYPE|MSA_PATH
+ SEQUENCE
+ ```
+ `MSA_PATH` is ignored if `--use-msa-server` is used.
+
+ For further information on YAML format, check [documentation](https://github.com/jwohlwend/boltz/blob/main/docs/prediction.md).
+
+Using the CLI, you will get the following information if the job was successfully submitted.
+
+``` { .shell .no-copy }
+╭───────────────────────────────╮
+│ 🧬 Boltz1 Folding submission │
+╰───────────────────────────────╯
+🔑 Authenticating client ✅
+📦 Generating query ✅
+Generated query: {
+ "fasta_files": {
+ "file": ">A|protein|\nQLEDSEVEAVAKGLEEMYANGVTEDNFKNYVKNNFAQQEISSVEEELNVNISDSCVANKIKDEFFAMISISAIVKAAQKKAWKELAVTVLRFAKANGLKTNAIIVAGQLALWAVQCG"
+ },
+ "yaml_files": {},
+ "parameters": {
+ "seed": 42,
+ "recycling_steps": 3,
+ "sampling_steps": 200,
+ "diffusion_samples": 1,
+ "step_scale": 1.638,
+ "msa_pairing_strategy": "greedy",
+ "write_full_pae": false,
+ "write_full_pde": false
+ }
+}
+🧠 Processing folding job ✅
+```
diff --git a/folding-studio/docs/docs/how-to-guides/af3/single_job_chai.md b/folding-studio/docs/docs/how-to-guides/af3/single_job_chai.md
new file mode 100644
index 0000000000000000000000000000000000000000..cd8a41b4aeb4af2b62a362f9a06b713e3296e572
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af3/single_job_chai.md
@@ -0,0 +1,66 @@
+## Goal
+
+**Chai-1** is one of the AlphaFold3-like models supported by Folding Studio. Here is a how-to guide to learn how to launch a simple single folding job on a FASTA file using **Chai-1**.
+
+## Application
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict chai path/to/my/file.fasta --output ./ --unzip --project-code "your-project-code"
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.client import Client
+ from folding_studio.query.chai import ChaiQuery
+
+ inference_parameters = {"project_code": "your-project-code"}
+
+ file_path = "path/to/my/file.fasta"
+
+ # Create client
+ client = Client.authenticate()
+
+ # Define query
+ query = ChaiQuery.from_file(path=file_path, parameters=inference_parameters)
+
+ # Send request
+ response = client.send_request(query)
+
+ # Download results
+ output_path = "./output.zip"
+ response.download_results(output_path, force=True, unzip=True)
+ ```
+
+!!! note
+ Do not forget that Chai-1 accepts FASTA format following this structure
+ ``` { .shell .no-copy }
+ >ENTITY_TYPE|STRUCTURE_ID
+ SEQUENCE
+ ```
+
+Using the CLI, you will get the following information if the job was successfully submitted.
+
+``` { .shell .no-copy }
+╭───────────────────────────────╮
+│ 🧬 Chai-1 Folding submission │
+╰───────────────────────────────╯
+🔑 Authenticating client ✅
+📦 Generating query ✅
+Generated query: {
+ "fasta_files": {
+ "file": ">A|protein|\nQLEDSEVEAVAKGLEEMYANGVTEDNFKNYVKNNFAQQEISSVEEELNVNISDSCVANKIKDEFFAMISISAIVKAAQKKAWKELAVTVLRFAKANGLKTNAIIVAGQLALWAVQCG\n"
+ },
+ "use_msa_server": false,
+ "use_templates_server": false,
+ "num_trunk_recycles": 3,
+ "seed": 0,
+ "num_diffn_timesteps": 200,
+ "restraints": null,
+ "recycle_msa_subsample": 0,
+ "num_trunk_samples": 1
+}
+🧠 Processing folding job ✅
+```
diff --git a/folding-studio/docs/docs/how-to-guides/af3/single_job_from_protein_sequence.md b/folding-studio/docs/docs/how-to-guides/af3/single_job_from_protein_sequence.md
new file mode 100644
index 0000000000000000000000000000000000000000..29424e702642ee2e5b2a9b15f245f18ae4a29c74
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af3/single_job_from_protein_sequence.md
@@ -0,0 +1,32 @@
+## Goal
+**Boltz-1**, **Chai-1** and **Protenix** models allow passing the protein sequence directly as input.
+This makes the prediction jobs easier to integrate with your code.
+
+## Application
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.client import Client
+ from folding_studio.query.boltz import BoltzQuery
+
+ inference_parameters = {"project_code": "your-project-code",
+ "seed":42}
+
+ sequence = ">A|protein\nQLEDSEVEAVAKGLEEMYANGVTEDNFKNYVKNNFAQQEISSVEEELNVNIS"
+
+ # Create client
+ client = Client.authenticate()
+
+ # Define query
+ query = BoltzQuery.from_protein_sequence(sequence=sequence, parameters=inference_parameters)
+
+ # Send request
+ response = client.send_request(query)
+
+ # Download results
+ output_path = out_dir / "output.zip"
+ response.download_results(output_path, force=True, unzip=True)
+ ```
+
+!!! Note
+ Depending on the model you want to use, you can replace `BoltzQuery` with `ChaiQuery` or `ProtenixQuery`.
diff --git a/folding-studio/docs/docs/how-to-guides/af3/single_job_protenix.md b/folding-studio/docs/docs/how-to-guides/af3/single_job_protenix.md
new file mode 100644
index 0000000000000000000000000000000000000000..62f289436a5a1535cdb74a7fda0886b89ab79871
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/af3/single_job_protenix.md
@@ -0,0 +1,65 @@
+## Goal
+
+**Protenix** is one of the AlphaFold3-like models supported by Folding Studio. Here is a how-to guide to learn how to launch a simple single folding job on a FASTA file using **Protenix**.
+
+## Application
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict protenix path/to/my/file.fasta --output ./ --unzip --project-code "your-project-code"
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.client import Client
+ from folding_studio.query.protenix import ProtenixQuery
+
+ inference_parameters = {"project_code": "your-project-code"}
+
+ file_path = "path/to/my/file.fasta"
+
+ # Create client
+ client = Client.authenticate()
+
+ # Define query
+ query = ProtenixQuery.from_file(path=file_path, parameters=inference_parameters)
+
+ # Send request
+ response = client.send_request(query)
+
+ # Download results
+ output_path = "./output.zip"
+ response.download_results(output_path, force=True, unzip=True)
+ ```
+
+!!! note
+ Protenix also accepts RCSB FASTA format following this structure. The number of chains will automatically be derived from the description (e.g. 2 chains in the example below):
+ ``` { .shell .no-copy }
+ >1HSG_1|Chains A, B|HIV-1 PROTEASE|Human immunodeficiency virus 1 (11676)
+ PQITLWQRPLVTIKIGGQLKEALLDTGADDTVLEEMSLPGRWKPKMIGGIGGFIKVRQYDQILIEICGHKAIGTVLVGPTPVNIIGRNLLTQIGCTLNF
+ ```
+
+!!! warning
+ In this preview version, Protenix is only compatible with MSA search mode (`--use-msa-server`) and is enabled by default.
+
+Using the CLI, you will get the following information if the job was successfully submitted.
+
+``` { .shell .no-copy }
+╭─────────────────────────────────╮
+│ 🧬 Protenix Folding submission │
+╰─────────────────────────────────╯
+🔑 Authenticating client ✅
+📦 Generating query ✅
+Generated query: {
+ "fasta_files": {
+ "file":
+">A|protein\nMASWSHPQFEKGGTHVAETSAPTRSEPDTRVLTLPGTASAPEFRLIDIDGLLNNRATTDV\nRDLGSGRLNAWGNSFPAAELPAPGSLITVAGIPFTWANAHAR>GDNIRCEGQVVDIPPGQY\nDWIYLLAASERRSEDTIWAHYDDGHADPLRVGISDFLDGTPAFGELSAFRTSR
+MHYPHHV\nQEGLPTTMWLTRVGMPRHGVARSLRLPRSVAMHVFALTLRTAAAVRLAEGATT\n"
+ },
+ "use_msa_server": true,
+ "seeds": "0"
+}
+🧠 Processing folding job ✅
+```
diff --git a/folding-studio/docs/docs/how-to-guides/index.md b/folding-studio/docs/docs/how-to-guides/index.md
new file mode 100644
index 0000000000000000000000000000000000000000..2e055111c1587f9488ad2c9d14ab9c2475c39ec5
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/index.md
@@ -0,0 +1,54 @@
+# How-to guides
+
+The **How-to guides section** provides step-by-step instructions for using Folding Studio efficiently. You will find step-by-step guides for various workflows, from setting up and submitting folding jobs to analyzing the results.
+
+## AlphaFold2/OpenFold
+
+Learn how to work with **AlphaFold2/OpenFold** models in Folding Studio:
+
+- [Provide Input Data](./af2_openfold/provide_input_data.md): Understand what data you can use for folding tasks.
+- [Launch a Folding Job using AlphaFold2](./af2_openfold/single_af2_job.md): Instructions to launch a folding job with the AlphaFold2 model.
+- [Launch a Folding Job using OpenFold](./af2_openfold/single_openfold_job.md): Instructions to launch a folding job with the OpenFold model.
+- [Set Folding Parameters](./af2_openfold/set_af_folding_parameters.md): Discover how to configure custom folding parameters for your jobs.
+- Launch a Batch Folding Job: Learn how to submit batch jobs [from configuration files](./af2_openfold/batch_job_from_configuration_file.md) or [directories](./af2_openfold/batch_job_from_directory.md) containing multiple fasta files.
+- [Check Job Status](./af2_openfold/fetch_folding_job_status.md): Instructions on how to fetch the status of your folding job.
+- [Download Job Logs](./af2_openfold/download_logs.md): Instructions on how to download the logs of your folding job.
+- [Cancel a Job Submission](./af2_openfold/cancel_experiment.md): Learn how to cancel a folding job that is in progress.
+- [Retrieve Features](./af2_openfold/get_experiment_features.md): Discover how to extract features generated by your folding jobs, such as structural metrics.
+- [Download Results](./af2_openfold/download_prediction_results.md): Learn how to download the results from completed folding jobs.
+- Launch Jobs using advanced algorithms:
+ - [Launch a Folding Job using MSA subsampling](./af2_openfold/advanced_algorithms/msa_subsampling_job.md): Learn how to use MSA subsampling to optimize sequence alignments for structure prediction.
+ - [Launch a Folding Job using the Gap Trick for Folding Multimer Complexes](./af2_openfold/advanced_algorithms/gap_trick_job.md): Discover how to fold multimer complexes using monomer models with the Gap Trick approach.
+ - [Launch a Folding Job using an Initial Guess Structure in AlphaFold2](./af2_openfold/advanced_algorithms/initial_guess_af2.md): Learn how to provide an initial structure guess to guide the AlphaFold2 folding process.
+ - [Launch a Folding Job applying Template Masking in Gap Trick Mode](./af2_openfold/advanced_algorithms/template_masking_job.md): Learn how to mask template regions to refine multimer folding while using Gap Trick mode.
+- [Launch a Folding Job using SoloSeq](./af2_openfold/soloseq_job.md): Discover how to run a folding job with the **SoloSeq** model, an OpenFold-like model.
+
+
+## Preview - AlphaFold3-like Models
+
+For advanced folding tasks, Folding Studio supports models similar to **AlphaFold3**:
+
+- [Provide Input Data](./af3/provide_input_data.md): Understand what data you can use for AlphaFold3-like jobs.
+- [Launch a Single Job using Boltz-1](./af3/single_job_boltz.md): Instructions for running a single folding job from a FASTA file using Boltz-1 model.
+- [Launch a Single Job using Chai-1](./af3/single_job_chai.md): Instructions for running a single folding job from a FASTA file using Chai-1 model.
+- [Launch a Single Job using Protenix](./af3/single_job_protenix.md): Instructions for running a single folding job from a FASTA file using Protenix model.
+- [Launch a Single Job from a YAML File using Boltz-1](./af3/boltz_single_yaml_job.md): Instructions for running a single folding job from a YAML file using Boltz-1 model.
+- [Launch a Batch Job from a Directory](./af3/batch_job_from_directory.md): Learn how to submit a batch job by organizing your fasta files in a directory.
+- [Launch a Job from a Protein Sequence](./af3/single_job_from_protein_sequence.md): Step-by-step guide to running a job directly from a protein sequence.
+
+## Post-processing
+
+Once your folding or alignment jobs are complete, you can perform post-processing tasks:
+
+- [Calculate Interface pLDDT and pAE](./other/pLDDT_pAE_calculation.md): Learn how to calculate pLDDT and pAE to assess protein model quality.
+
+## Multiple Sequence Alignment (MSA)
+
+Folding Studio also supports performing MSA searches:
+
+- [Provide Input Data for MSA](./msa_search/provide_input_data.md): Understand what data you can use for MSA search tasks.
+- [Launch an MSA Search with MMSeqs2](./msa_search/msa_search_mmseqs2.md): Step-by-step guide to running an MSA search.
+- [Launch an MSA Search ignoring cache](./msa_search/msa_no_cache.md): Learn how to run an MSA search while bypassing previously cached results.
+- [Check MSA Job Status](./msa_search/fetch_msa_job_status.md): Discover how to check the status of your MSA job.
+- [Download MSA Job Logs](./msa_search/download_msa_logs.md): Instructions on how to download the logs of your MSA job.
+- [Download MSA Results](./msa_search/download_msa_search_results.md): Learn how to download the results of an MSA search.
diff --git a/folding-studio/docs/docs/how-to-guides/msa_search/download_msa_logs.md b/folding-studio/docs/docs/how-to-guides/msa_search/download_msa_logs.md
new file mode 100644
index 0000000000000000000000000000000000000000..75748ad7b0a6c145fb97018ad46a39fc0e8d3758
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/msa_search/download_msa_logs.md
@@ -0,0 +1,19 @@
+## Goal
+This How-to guide explains how to download the logs of an MSA experiment.
+
+## Application
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding msa experiment logs b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9 --output ./msa_logs_exp_b21b09.zip
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.msa import logs
+
+ logs(msa_exp_id="b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9", output=Path("./msa_logs_exp_b21b09.zip"))
+ ```
diff --git a/folding-studio/docs/docs/how-to-guides/msa_search/download_msa_search_results.md b/folding-studio/docs/docs/how-to-guides/msa_search/download_msa_search_results.md
new file mode 100644
index 0000000000000000000000000000000000000000..9f8f5b084c5aefac44b5efc4863d349f07340162
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/msa_search/download_msa_search_results.md
@@ -0,0 +1,53 @@
+## Goal
+
+The MSA search results will comprise:
+
+- The [Multiple Sequence Alignment](https://en.wikipedia.org/wiki/Multiple_sequence_alignment) (MSA) search results. By default, the Folding Studio pipeline will trigger an MSA search on Uniref90, small BFD, Mgnify and Uniprot (multimer jobs only) using the `jackhmmer` algorithm.
+- The msa coverage file.
+
+Once the MSA search job has finished, all the generated features are saved into a zip file.
+
+The zip file contains :
+
+- The output of the MSA search: the MSA search results on multiple databases in `.a3m` format.
+- The output of the template search : the four best matching templates in `.cif` format.
+
+Here is an example of the zip file structure for a monomer :
+
+``` { .shell .no-copy }
+extracted_experiment_features_zip
+├── msas
+│ ├── mgnify_hits.a3m
+│ ├── pdb_hits.hhr
+│ ├── small_bfd_hits.a3m
+│ └── uniref90_hits.a3m
+└── msa_coverage.json
+└── logs.txt
+
+```
+
+For multimer, the structure is similar except that there is a dedicated subdirectory for each protein.
+
+## Application
+
+You can download the zip file above to check the generated features by running this command.
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding msa experiment features b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9 --output ./msa_features_exp_b21b09.zip
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.msa import features
+
+ features(msa_exp_id="b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9", output=Path("./msa_features_exp_b21b09.zip"))
+ ```
+Once the MSA features are downloaded, you will get the following message:
+
+``` { .shell .no-copy }
+File downloaded successfully to msa_features_exp_b21b09.zip.
+```
diff --git a/folding-studio/docs/docs/how-to-guides/msa_search/fetch_msa_job_status.md b/folding-studio/docs/docs/how-to-guides/msa_search/fetch_msa_job_status.md
new file mode 100644
index 0000000000000000000000000000000000000000..9b4ff7c40cc487b55bc7304b3eabeb889773d439
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/msa_search/fetch_msa_job_status.md
@@ -0,0 +1,60 @@
+## Goal
+This how-to guide explains how to manage MSA search jobs using the `msa_experiment_id`. Each submission creates a unique experiment, enabling caching and avoiding redundant computations.
+
+## Application
+
+### Fetch a search job `msa_experiment_id`
+Submitting an MSA search job creates an experiment. This allows caching and avoid
+useless re-computation of previously submitted MSA search job.
+
+Each experiment is associated with a unique `msa_experiment_id`. Its generation is
+deterministic, created from the submitted FASTA sequence (without taking into
+account the description) and the job parameters.
+
+By default, if you resubmit an MSA experiment with the same sequence and parameters, it will not be triggered and the response will inform you of the status of the original MSA experiment and its results, if any available. This can be overridden in the options (see [Launch an MSA search ignoring cache](./msa_no_cache.md)).
+
+Once your MSA job has been submitted, and thus the msa experiment created, you
+can get various information from the `msa_experiment_id`.
+
+You can get the list of your msa experiment ids that succeeded or are still pending
+using :
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding msa experiment list
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.commands.msa import list
+
+ list()
+ ```
+
+### Retrieve a search job status
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding msa experiment status b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.msa import status
+
+ status(msa_exp_id="b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9")
+ ```
+
+The experiment status is the current state of the experiment.
+
+| VALUE | DESCRIPTION |
+| ----------- | ------------------------------------------------------------------------------- |
+| `Done` | The experiment is done and its features and results are available for download. |
+| `Pending` | The experiment is still ongoing. |
+| `Failed` | The experiment has failed. |
+| `Cancelled` | The experiment was cancelled.
diff --git a/folding-studio/docs/docs/how-to-guides/msa_search/msa_no_cache.md b/folding-studio/docs/docs/how-to-guides/msa_search/msa_no_cache.md
new file mode 100644
index 0000000000000000000000000000000000000000..7c79e5ce57bbc5cdeedd8d5b2c98bdd34fbaf5d5
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/msa_search/msa_no_cache.md
@@ -0,0 +1,19 @@
+## Goal
+This tutorial guides you through running an MSA (Multiple Sequence Alignment) search ...
+
+## Application
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding msa search path/to/my/file.fasta --no-cache
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.msa import search
+
+ search(source=Path("path/to/my/file.fasta"), cache=False)
+ ```
diff --git a/folding-studio/docs/docs/how-to-guides/msa_search/msa_search_mmseqs2.md b/folding-studio/docs/docs/how-to-guides/msa_search/msa_search_mmseqs2.md
new file mode 100644
index 0000000000000000000000000000000000000000..c16ce310f88ebcbbc498d77bf181a24c7969814b
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/msa_search/msa_search_mmseqs2.md
@@ -0,0 +1,29 @@
+## Goal
+This tutorial guides you through running an MSA (Multiple Sequence Alignment) search using MMseqs2 instead of JackHMMer, which is the default method when no specific option is provided in the command.
+
+## Application
+You can choose between two MSA search mode :
+
+| Value | Description |
+| -------------------- | -------------------------------------------------------------------------------------------- |
+| `"search"` (default) | automated search of Uniref90, small_bfd and MGnfy databases using `JackHMMer` |
+| `"mmseqs"` | automated search of Uniref30 and Colabfold_env_db using the **self-hosted** `MMSeqs2` server |
+
+
+!!! note
+ `MMSeqs2` produce fairly different MSA results compared to JackHMMer, they use different datasets (Uniref30 and colbafold_env_db) and different search algorithms. However the MSA produced by `MMSeqs2` is generally more diverse and can be leverage predict structures with higher accuracy (see this [publication](https://www.nature.com/articles/s41592-023-02130-4)). For more information about MMSeqs2, please refer to the corresponding [paper](https://www.biorxiv.org/content/10.1101/079681v5).
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding msa search path/to/my/file.fasta --msa-mode mmseqs
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.msa import search
+
+ search(source=Path("path/to/my/file.fasta"), msa_mode="mmseqs")
+ ```
diff --git a/folding-studio/docs/docs/how-to-guides/msa_search/provide_input_data.md b/folding-studio/docs/docs/how-to-guides/msa_search/provide_input_data.md
new file mode 100644
index 0000000000000000000000000000000000000000..b70db1bc0e2552384fe342db015936fe722087be
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/msa_search/provide_input_data.md
@@ -0,0 +1,32 @@
+## Goal
+This guide will help you determine whether a given input sequence is compatible for launching an MSA search.
+
+## Supported inputs
+
+To submit an MSA search job, you need the sequence input file in
+[`FASTA`](https://en.wikipedia.org/wiki/FASTA_format) format containing your
+protein sequence.
+
+It can be a monomer or a multimer sequence.
+
+=== "monomer"
+
+ ```text
+ >SARS-CoV-2|RBD|Omicron variant
+ RVQPTESIVRFPNITNLCPFDEVFNATRFASVYAWNRKRISNCVADYSVLYNLAPFFTFK
+ CYGVSPTKLNDLCFTNVYADSFVIRGDEVRQIAPGQTGNIADYNYKLPDDFTGCVIAWNS
+ NKLDSKVSGNYNYLYRLFRKSNLKPFERDISTEIYQAGNKPCNGVAGFNCYFPLRSYSFR
+ PTYGVGHQPYRVVVLSFELLHAPATVCGPKKSTNLVKNKCVNF
+ ```
+
+=== "multimer"
+
+ ```text
+ >SARS-CoV-2|RBD|Omicron variant
+ RVQPTESIVRFPNITNLCPFDEVFNATRFASVYAWNRKRISNCVADYSVLYNLAPFFTFK
+ CYGVSPTKLNDLCFTNVYADSFVIRGDEVRQIAPGQTGNIADYNYKLPDDFTGCVIAWNS
+ NKLDSKVSGNYNYLYRLFRKSNLKPFERDISTEIYQAGNKPCNGVAGFNCYFPLRSYSFR
+ PTYGVGHQPYRVVVLSFELLHAPATVCGPKKSTNLVKNKCVNF
+ >Processed angiotensin-converting enzyme 2|Homo sapiens (9606)
+ STIEEQAKTFLDKFNHEAEDLFYQSSLASWNYNTNITEENVQNMNNAGDKWSAFLKEQSTLAQMYPLQEIQNLTVKLQLQALQQNGSSVLSEDKSKRLNTILNTMSTIYSTGKVCNPDNPQECLLLEPGLNEIMANSLDYNERLWAWESWRSEVGKQLRPLYEEYVVLKNEMARANHYEDYGDYWRGDYEVNGVDGYDYSRGQLIEDVEHTFEEIKPLYEHLHAYVRAKLMNAYPSYISPIGCLPAHLLGDMWGRFWTNLYSLTVPFGQKPNIDVTDAMVDQAWDAQRIFKEAEKFFVSVGLPNMTQGFWENSMLTDPGNVQKAVCHPTAWDLGKGDFRILMCTKVTMDDFLTAHHEMGHIQYDMAYAAQPFLLRNGANEGFHEAVGEIMSLSAATPKHLKSIGLLSPDFQEDNETEINFLLKQALTIVGTLPFTYMLEKWRWMVFKGEIPKDQWMKKWWEMKREIVGVVEPVPHDETYCDPASLFHVSNDYSFIRYYTRTLYQFQFQEALCQAAKHEGPLHKCDISNSTEAGQKLFNMLRLGKSEPWTLALENVVGAKNMNVRPLLNYFEPLFTWLKDQNKNSFVGWSTDWSPYADRHHHHHH
+ ```
diff --git a/folding-studio/docs/docs/how-to-guides/other/pLDDT_pAE_calculation.md b/folding-studio/docs/docs/how-to-guides/other/pLDDT_pAE_calculation.md
new file mode 100644
index 0000000000000000000000000000000000000000..e53fda5fd5062e62dc6e6a716a64e730f585ee0b
--- /dev/null
+++ b/folding-studio/docs/docs/how-to-guides/other/pLDDT_pAE_calculation.md
@@ -0,0 +1,178 @@
+Here we propose recipes to compute interface PLDDT (ipLDDT) and interface PAE (iPAE). The value of these metrics depend on the **chosen** definition of interface residues.
+
+These scripts will use `bioblocks`, you can follow this [link](https://instadeep.gitlab.io/bioai-group/BioBlocks/setup/setup.html) to get detailed steps for installation, you will also need this specific [dependency](https://instadeep.gitlab.io/bioai-group/BioBlocks/setup/setup.html#manual-msms-installation). We use the `get_interface_residues` function to define the interface residues. Please refer to the [`bioblocks` documentation page](https://instadeep.gitlab.io/bioai-group/BioBlocks/api/generated/bioblocks.geometry.get_interface_residues.html#bioblocks-geometry-get-interface-residues) for more details on the input parameters.
+
+### ipLDDT
+
+To calculate the ipLDDT, make sure you downloaded the results of the experiment in question. You will need the metrics file path as well as the prediction path.
+
+1. Download your desired experiments results by running:
+
+```bash
+folding experiment results 6fdb36d380c3f9ba49137af47cf2eed5a6774cab --output result_exp_6fdb3.zip
+unzip result_exp_6fdb3.zip
+```
+
+2. Download the code snippet below into a Python file.
+3. Select a model prediction and edit in the file both `model_name` and `prediction_path`.
+4. Edit the `metrics_file_path` with the metrics `metrics_per_model.json` file path.
+5. Run your Python file.
+
+```python
+import numpy as np
+import json
+from bioblocks.io import read_model
+from bioblocks.geometry import get_interface_residues
+
+def calculate_mean_plddt_all_interfaces(
+ prediction_path: str, model_name: str, metrics_path: str
+) -> float:
+ """
+ Calculate the mean pLDDT for the interface residues between all chain pairs in the model.
+
+ Args:
+ prediction_path: Path to the prediction PDB file.
+ model_name: Name of the model.
+ metrics_path: Path to the JSON file containing pLDDT scores.
+
+ Returns:
+ float: The mean pLDDT score for the interface residues across all chain pairs.
+ """
+ pred = read_model(prediction_path)
+ chains = list(pred.get_chains())
+
+ with open(metrics_path, "r") as f:
+ metrics = json.load(f)
+
+ plddt = np.array(metrics[model_name]["plddt"])
+
+ # Create the offset list
+ offset = [0]
+ for i in range(len(chains) - 1):
+ offset.append(offset[-1] + len(list(chains[i].get_residues())))
+
+ all_interface_indices = []
+
+ # Iterate over all pairs of chains to calculate the interface residues
+ for i in range(len(chains)):
+ chain_a = chains[i]
+ for j in range(i + 1, len(chains)):
+ chain_b = chains[j]
+ residues = get_interface_residues(chain_a, chain_b)
+ interface_indexes_a = [
+ res.id.residue_index - 1 + offset[i] for res in residues[0]
+ ]
+ interface_indexes_b = [
+ res.id.residue_index - 1 + offset[j] for res in residues[1]
+ ]
+ all_interface_indices.extend(interface_indexes_a + interface_indexes_b)
+
+ all_interface_indices = np.unique(np.array(all_interface_indices))
+ mean_plddt = np.mean(plddt[all_interface_indices])
+
+ return mean_plddt
+
+if __name__ == "__main__":
+
+ prediction_path = "/PATH/TO/YOUR/PREDICTION/PDB/FILE"
+ metrics_file_path = "/PATH/TO/YOUR/PREDICTION/METRICS/JSON/FILE"
+ model_name = "model_1_multimer_v3"
+
+ iplddt = calculate_mean_plddt_all_interfaces(prediction_path, model_name, metrics_file_path)
+ print(f"Mean Interface PLDDT across all chain pairs: {iplddt}")
+
+```
+
+### ipAE
+
+To calculate the ipAE, make sure you downloaded the results of the experiment in question. You will need the metrics file path as well as the prediction path.
+
+1. Download your desired experiments results by running:
+
+```bash
+folding experiment results 6fdb36d380c3f9ba49137af47cf2eed5a6774cab --output result_exp_6fdb3.zip
+unzip result_exp_6fdb3.zip
+```
+
+2. Download the code snippet below into a Python file.
+3. Select a model prediction and edit in the file both `model_name` and `prediction_path`.
+4. Edit the `metrics_file_path` with the metrics `metrics_per_model.json` file path.
+5. Run your Python file.
+
+```python
+
+import numpy as np
+import json
+from bioblocks.io import read_model
+from bioblocks.geometry import get_interface_residues
+
+
+def calculate_interface_pae_all_chains(
+ prediction_path: str, model_name: str, metrics_path: str
+) -> float:
+ """
+ Calculate the Interface PAE by averaging the PAE of cross-interface residue pairs between all chain pairs.
+
+ Args:
+ prediction_path: Path to the prediction PDB file.
+ model_name: Name of the model.
+ metrics_path: Path to the JSON file containing PAE matrix.
+
+ Returns:
+ float: The average Interface PAE score across all chain pairs.
+ """
+ pred = read_model(prediction_path)
+
+ chains = list(pred.get_chains())
+
+ with open(metrics_path, "r") as f:
+ metrics = json.load(f)
+
+ pae_matrix = np.array(metrics[model_name]["pae"])
+
+ # Create the offset list
+ offset = [0]
+ for i in range(len(chains) - 1):
+ offset.append(offset[-1] + len(list(chains[i].get_residues())))
+
+ interface_pae_values = []
+
+ for i in range(len(chains)):
+ chain_a = chains[i]
+ for j in range(i + 1, len(chains)):
+ chain_b = chains[j]
+
+ residues = get_interface_residues(chain_a, chain_b)
+ interface_indexes_a = [
+ res.id.residue_index - 1 + offset[i] for res in residues[0]
+ ]
+ interface_indexes_b = [
+ res.id.residue_index - 1 + offset[j] for res in residues[1]
+ ]
+
+ # Calculate PAE for cross-interface residue pairs
+ for idx_a in interface_indexes_a:
+ for idx_b in interface_indexes_b:
+ interface_pae_values.append(pae_matrix[idx_a, idx_b])
+ interface_pae_values.append(
+ pae_matrix[idx_b, idx_a]
+ ) # Pae is not symmetrical
+
+ # Calculate the average PAE for the cross-interface residue pairs
+ if interface_pae_values:
+ mean_pae = np.mean(interface_pae_values)
+ else:
+ mean_pae = 0.0
+
+ return mean_pae
+
+if __name__ == "__main__":
+
+ prediction_path = "/PATH/TO/YOUR/PREDICTION/PDB/FILE"
+ metrics_file_path = "/PATH/TO/YOUR/PREDICTION/METRICS/JSON/FILE"
+ model_name = "model_1_multimer_v3"
+
+ ipae = calculate_interface_pae_all_chains(prediction_path, model_name, metrics_file_path)
+ print(f"Mean Interface PAE across all chain pairs: {ipae}")
+
+```
diff --git a/folding-studio/docs/docs/index.md b/folding-studio/docs/docs/index.md
new file mode 100644
index 0000000000000000000000000000000000000000..8c79d8d9a2edf659cc7a95ad11f16dc008aae72d
--- /dev/null
+++ b/folding-studio/docs/docs/index.md
@@ -0,0 +1,82 @@
+---
+title: "Folding Studio"
+---
+
+## Welcome to Folding Studio
+
+Folding Studio enables you to perform protein structure predictions within the Google Cloud Platform (GCP) environment. By leveraging GCP's robust and scalable cloud infrastructure, it allows for fast and high-throughput protein folding predictions.
+
+This documentation will guide you through interacting programmatically with the Folding Studio CLI and python library, helping you efficiently utilize our tools for protein structure prediction. After reading through this documentation, you will be equipped to set up and submit single and batch jobs by defining input and model parameters, monitor the status of your jobs and retrieve job results, including structure predictions and confidence metrics.
+
+Some of the key features of Folding Studio include:
+
+- Flexible input options: Support for a variety of input formats, including FASTA sequences, YAML files, and more.
+- Batch job submission: Submit multiple jobs simultaneously, streamlining the prediction process for large datasets.
+- Confidence metrics: Retrieve structure predictions along with confidence metrics (e.g., pLDDT, pAE) to assess model quality.
+- Generated features: Access important features like multiple sequence alignments (MSA), templates, and more.
+- Real-time job monitoring: Track job progress, status, and results with ease using the CLI and the python library.
+
+To install Folding Studio, simply follow the [Installation Tutorial](tutorials/installation.md).
+
+Folding Studio supports a range of structure prediction models, categorized into AlphaFold2-like and AlphaFold3-like architectures.
+
+- AlphaFold2-like models: This category includes **AlphaFold2**, **OpenFold**, and **SoloSeq**. AlphaFold2 and OpenFold operate asynchronously, allowing users to submit predictions and retrieve results later. SoloSeq follows a similar modeling approach but runs synchronously.
+
+- AlphaFold3-like models: This category includes **Boltz-1**, **Chai-1**, and **Protenix**. Unlike AlphaFold2 and OpenFold, all models in this category operate synchronously, returning results upon query completion. Users of the `folding-studio` Python library will notice a different interaction pattern when working with these models, as they do not require polling for job status or retrieving results in separate steps.
+
+See [supported models section](./explanation/supported_models.md) for more details about this subject.
+
+## Documentation overview
+
+This documentation is organized as follows:
+
+- **Tutorials**: Step-by-step guides to get you started with Folding Studio, including installation, job submissions, and MSA searches.
+- **How-to Guides**: Detailed instructions for using specific features, such as setting folding parameters or calculating pLDDT and pAE.
+- **Reference**: Comprehensive details on CLI commands, input flags, and the Python library.
+- **Explanation**: In-depth explanation of supported models and advanced algorithms.
+
+---
+
+
+
+- :material-clock-fast:{ .lg .middle } **Tutorials**
+
+ ---
+
+ [:octicons-arrow-right-24: Install Folding Studio](./tutorials/installation.md)
+
+ [:octicons-arrow-right-24: Run AlphaFold2 on a protein sequence](./tutorials/single_folding_job_af2.md)
+
+ [:octicons-arrow-right-24: Perform a Multiple Sequence Alignment (MSA) search](./tutorials/msa_search.md)
+
+ [:octicons-arrow-right-24: Preview - Run folding jobs with AlphaFold3-like models](./tutorials/single_folding_job_af3.md)
+
+- :octicons-rocket-16:{ .lg .middle } **How-to Guides**
+
+ ---
+
+ [:octicons-arrow-right-24: Alphafold2 / Openfold guides](./how-to-guides/af2_openfold/provide_input_data.md)
+
+ [:octicons-arrow-right-24: Multiple Sequence Alignment Search (MSA) guides](./how-to-guides/msa_search/provide_input_data.md)
+
+ [:octicons-arrow-right-24: Preview - Alphafold3-like models guides](./how-to-guides/af3/provide_input_data.md)
+
+ [:octicons-arrow-right-24: Post-processing recipes](./how-to-guides/other/pLDDT_pAE_calculation.md)
+
+- :material-dna:{ .lg .middle } **Explanation**
+
+ ---
+
+ [:octicons-arrow-right-24: Supported models](./explanation/supported_models.md)
+
+ [:octicons-arrow-right-24: Advanced algorithms](./explanation/advanced_algorithms.md)
+
+- :octicons-book-16:{ .lg .middle } **Reference**
+
+ ---
+
+ [:octicons-arrow-right-24: CLI](./reference/cli.md)
+
+ [:octicons-arrow-right-24: Python Library](./reference/python_lib_docs.md)
+
+
diff --git a/folding-studio/docs/docs/reference/cli.md b/folding-studio/docs/docs/reference/cli.md
new file mode 100644
index 0000000000000000000000000000000000000000..44711908a3cd3762603d5aa69d83a978a9c80a83
--- /dev/null
+++ b/folding-studio/docs/docs/reference/cli.md
@@ -0,0 +1,435 @@
+## `experiment`
+### `experiment status`
+
+Get an experiment status.
+
+**Usage**:
+
+```console
+folding experiment status EXP_ID
+```
+
+**Arguments**:
+
+| ARGUMENT | DESCRIPTION | VALUE TYPE |
+| -------- | ----------- | ----------- |
+| EXP_ID | ID of the experiment. | str |
+
+### `experiment list`
+
+Get all your done and pending experiment ids. The IDs are provided in the order of submission, starting with the most recent.
+
+**Usage**:
+
+```console
+folding experiment list [OPTIONS]
+```
+
+**Options**:
+
+| OPTIONS | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| --limit | Max number of experiment to display in the terminal. | int | 100 |
+| --output | Path to the file where the job metadata returned by the server are written. | Path | No default |
+
+### `experiment features`
+
+Get an experiment features.
+
+**Usage**:
+
+```console
+folding experiment features [OPTIONS] EXP_ID
+```
+
+**Arguments**:
+
+| ARGUMENT | DESCRIPTION | VALUE TYPE |
+| -------- | ----------- | ----------- |
+| EXP_ID | ID of the experiment. | str |
+
+**Options**:
+
+| OPTIONS | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| --output | Local path to download the zip to. | Path | <exp_id>_features.zip |
+| --force / --no-force | Forces the download to overwrite any existing file with the same name in the specified location. | bool | --no-force |
+| --unzip / --no-unzip | Automatically unzip the file after its download. | bool | --no-unzip |
+
+### `experiment results`
+
+Get an experiment results.
+
+**Usage**:
+
+```console
+folding experiment results [OPTIONS] EXP_ID
+```
+
+**Arguments**:
+
+| ARGUMENT | DESCRIPTION | VALUE TYPE |
+| -------- | ----------- | ----------- |
+| EXP_ID | ID of the experiment. | str |
+
+**Options**:
+
+| OPTIONS | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| --output | Local path to download the zip to. | Path | <exp_id>_results.zip |
+| --force / --no-force | Forces the download to overwrite any existing file with the same name in the specified location. | bool | --no-force |
+| --unzip / --no-unzip | Automatically unzip the file after its download. | bool | --no-unzip |
+
+### `experiment cancel`
+
+Cancel experiments job executions. You can pass one or more experiment id
+
+**Usage**:
+
+```console
+folding experiment cancel EXP_ID
+```
+
+**Arguments**:
+
+| ARGUMENT | DESCRIPTION | VALUE TYPE |
+| -------- | ----------- | ----------- |
+| EXP_ID | ID of the experiment. | List[str] |
+
+### `experiment logs`
+
+Get an experiment logs.
+
+**Usage**:
+
+```console
+folding experiment logs [OPTIONS] EXP_ID
+```
+
+**Arguments**:
+
+| ARGUMENT | DESCRIPTION | VALUE TYPE |
+| -------- | ----------- | ----------- |
+| EXP_ID | ID of the experiment. | str |
+
+**Options**:
+
+| OPTIONS | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| --output | Local path to download the logs to. | Path | <exp_id>_logs.txt |
+| --force / --no-force | Forces the download to overwrite any existing file with the same name in the specified location. | bool | --no-force |
+
+## `msa`
+### `msa search`
+
+Run an MSA tool. Read more at
+
+**Usage**:
+
+```console
+folding msa search [OPTIONS] SOURCE
+```
+
+**Arguments**:
+
+| ARGUMENT | DESCRIPTION | VALUE TYPE |
+| -------- | ----------- | ----------- |
+| SOURCE | Path to the input fasta file. | Path |
+
+**Options**:
+
+| OPTIONS | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| --project-code | Project code. If unknown, contact your PM or the Folding Studio team. | str | No default |
+| --cache / --no-cache | Use cached experiment results if any. | bool | --cache |
+| --msa-mode | Mode of the MSA features generation. | FeatureMode | search |
+| --metadata-file | Path to the file where the job metadata returned by the server are written. | Path | No default |
+
+### `msa experiment`
+#### `msa experiment status`
+
+Get an MSA experiment status.
+
+**Usage**:
+
+```console
+folding msa experiment status MSA_EXP_ID
+```
+
+**Arguments**:
+
+| ARGUMENT | DESCRIPTION | VALUE TYPE |
+| -------- | ----------- | ----------- |
+| MSA_EXP_ID | ID of the MSA experiment. | str |
+
+#### `msa experiment features`
+
+Get an experiment features.
+
+**Usage**:
+
+```console
+folding msa experiment features [OPTIONS] MSA_EXP_ID
+```
+
+**Arguments**:
+
+| ARGUMENT | DESCRIPTION | VALUE TYPE |
+| -------- | ----------- | ----------- |
+| MSA_EXP_ID | ID of the MSA experiment. | str |
+
+**Options**:
+
+| OPTIONS | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| --output | Local path to download the zip to. | Path | <msa_exp_id>_features.zip |
+| --force / --no-force | Forces the download to overwrite any existing file with the same name in the specified location. | bool | --no-force |
+| --unzip / --no-unzip | Automatically unzip the file after its download. | bool | --no-unzip |
+
+#### `msa experiment logs`
+
+Get an experiment logs.
+
+**Usage**:
+
+```console
+folding msa experiment logs [OPTIONS] MSA_EXP_ID
+```
+
+**Arguments**:
+
+| ARGUMENT | DESCRIPTION | VALUE TYPE |
+| -------- | ----------- | ----------- |
+| MSA_EXP_ID | ID of the MSA experiment. | str |
+
+**Options**:
+
+| OPTIONS | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| --output | Local path to download the logs to. | Path | <exp_id>_logs.txt |
+| --force / --no-force | Forces the download to overwrite any existing file with the same name in the specified location. | bool | --no-force |
+
+#### `msa experiment list`
+
+Get all your done and pending experiment ids. The IDs are provided in the order of submission, starting with the most recent.
+
+**Usage**:
+
+```console
+folding msa experiment list [OPTIONS]
+```
+
+**Options**:
+
+| OPTIONS | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| --limit | Max number of experiment to display in the terminal. | int | 100 |
+| --output | Path to the file where the job metadata returned by the server are written. | Path | No default |
+
+## `predict`
+### `predict af2`
+
+Asynchronous AF2 folding submission. Read more at If the source is a CSV or JSON file describing a batch prediction request, all the other options will be overlooked.
+
+**Usage**:
+
+```console
+folding predict af2 [OPTIONS] SOURCE
+```
+
+**Arguments**:
+
+| ARGUMENT | DESCRIPTION | VALUE TYPE |
+| -------- | ----------- | ----------- |
+| SOURCE | Path to the data source. Either a fasta file, a directory of fasta files or a csv/json file describing a batch prediction request. | Path |
+
+**Options**:
+
+| OPTIONS | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| --project-code | Project code. If unknown, contact your PM or the Folding Studio team. | str | No default |
+| --cache / --no-cache | Use cached experiment results if any. | bool | --cache |
+| --template-mode | Mode of the template features generation. | FeatureMode | search |
+| --custom-template | Path to a custom template or a directory of custom templates. To pass multiple inputs, simply repeat the flag (e.g. `--custom_template template_1.cif --custom_template template_2.cif`). | List[Path] | [] |
+| --custom-template-id | ID of a custom template. To pass multiple inputs, simply repeat the flag (e.g. `--custom_template_id template_ID_1 --custom_template_id template_ID_2`). | List[str] | [] |
+| --initial-guess-file | Path to an initial guess file. | Path | No default |
+| --templates-masks-file | Path to a templates masks file. | Path | No default |
+| --msa-mode | Mode of the MSA features generation. | FeatureMode | search |
+| --custom-msa | Path to a custom msa or a directory of custom msas. To pass multiple inputs, simply repeat the flag (e.g. `--custom_msa msa_1.sto --custom_msa msa_2.sto`). | List[Path] | [] |
+| --max-msa-clusters | Max number of MSA clusters to search. | int | -1 |
+| --max-extra-msa | Max extra non-clustered MSA representation to use as source. | int | -1 |
+| --gap-trick / --no-gap-trick | Activate gap trick, allowing to model complexes with monomer models. | bool | --no-gap-trick |
+| --num-recycle | Number of refinement iterations of the predicted structures. | int | 3 |
+| --model-subset | Subset of AF2 model ids to use, between 1 and 5 included. | List[int] | [] |
+| --random-seed | Random seed used during the MSA sampling. Different random seed values will introduce variations in the predictions. | int | 0 |
+| --num-seed | Number of random seeds to use. Creates a batch prediction. | int | No default |
+| --metadata-file | Path to the file where the job metadata returned by the server are written. | Path | No default |
+
+### `predict openfold`
+
+Asynchronous OpenFold folding submission. Read more at If the source is a CSV or JSON file describing a batch prediction request, all the other options will be overlooked.
+
+**Usage**:
+
+```console
+folding predict openfold [OPTIONS] SOURCE
+```
+
+**Arguments**:
+
+| ARGUMENT | DESCRIPTION | VALUE TYPE |
+| -------- | ----------- | ----------- |
+| SOURCE | Path to the data source. Either a fasta file, a directory of fasta files or a csv/json file describing a batch prediction request. | Path |
+
+**Options**:
+
+| OPTIONS | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| --project-code | Project code. If unknown, contact your PM or the Folding Studio team. | str | No default |
+| --cache / --no-cache | Use cached experiment results if any. | bool | --cache |
+| --template-mode | Mode of the template features generation. | FeatureMode | search |
+| --custom-template | Path to a custom template or a directory of custom templates. To pass multiple inputs, simply repeat the flag (e.g. `--custom_template template_1.cif --custom_template template_2.cif`). | List[Path] | [] |
+| --custom-template-id | ID of a custom template. To pass multiple inputs, simply repeat the flag (e.g. `--custom_template_id template_ID_1 --custom_template_id template_ID_2`). | List[str] | [] |
+| --templates-masks-file | Path to a templates masks file. | Path | No default |
+| --msa-mode | Mode of the MSA features generation. | FeatureMode | search |
+| --custom-msa | Path to a custom msa or a directory of custom msas. To pass multiple inputs, simply repeat the flag (e.g. `--custom_msa msa_1.sto --custom_msa msa_2.sto`). | List[Path] | [] |
+| --max-msa-clusters | Max number of MSA clusters to search. | int | -1 |
+| --max-extra-msa | Max extra non-clustered MSA representation to use as source. | int | -1 |
+| --gap-trick / --no-gap-trick | Activate gap trick, allowing to model complexes with monomer models. | bool | --no-gap-trick |
+| --num-recycle | Number of refinement iterations of the predicted structures. | int | 3 |
+| --model-subset | Subset of AF2 model ids to use, between 1 and 5 included. | List[int] | [] |
+| --random-seed | Random seed used during the MSA sampling. Different random seed values will introduce variations in the predictions. | int | 0 |
+| --num-seed | Number of random seeds to use. Creates a batch prediction. | int | No default |
+| --metadata-file | Path to the file where the job metadata returned by the server are written. | Path | No default |
+
+### `predict boltz`
+
+Synchronous Boltz-1 folding submission.
+
+**Usage**:
+
+```console
+folding predict boltz [OPTIONS] SOURCE
+```
+
+**Arguments**:
+
+| ARGUMENT | DESCRIPTION | VALUE TYPE |
+| -------- | ----------- | ----------- |
+| SOURCE | Path to the data source. Either a FASTA file, a YAML file, or a directory containing FASTA and YAML files. | Path |
+
+**Options**:
+
+| OPTIONS | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| --project-code | Project code. If unknown, contact your PM or the Folding Studio team. | str | No default |
+| --parameters-json | Path to JSON file containing Boltz inference parameters. | Path | No default |
+| --recycling-steps | Number of recycling steps for prediction. | int | 3 |
+| --sampling-steps | Number of sampling steps for prediction. | int | 200 |
+| --diffusion-samples | Number of diffusion samples for prediction. | int | 1 |
+| --step-scale | Step size related to the temperature at which the diffusion process samples the distribution. | float | 1.638 |
+| --msa-pairing-strategy | Pairing strategy for MSA generation. | str | greedy |
+| --write-full-pae / --no-write-full-pae | Whether to save the full PAE matrix as a file. | bool | --no-write-full-pae |
+| --write-full-pde / --no-write-full-pde | Whether to save the full PDE matrix as a file. | bool | --no-write-full-pde |
+| --use-msa-server / --no-use-msa-server | Flag to use the MSA server for inference. | bool | --use-msa-server |
+| --msa-path | Path to the custom MSAs. It can be a .a3m or .aligned.pqt file, or a directory containing these files. | str | No default |
+| --seed | Seed for random number generation. | int | 0 |
+| --output | Local path to download the result zip and query parameters to. Default to 'boltz_results'. | Path | boltz_results |
+| --force / --no-force | Forces the download to overwrite any existing file with the same name in the specified location. | bool | --no-force |
+| --unzip / --no-unzip | Unzip the file after its download. | bool | --no-unzip |
+| --spinner / --no-spinner | Use live spinner in log output. | bool | --spinner |
+
+### `predict chai`
+
+Synchronous Chai-1 folding submission.
+
+**Usage**:
+
+```console
+folding predict chai [OPTIONS] SOURCE
+```
+
+**Arguments**:
+
+| ARGUMENT | DESCRIPTION | VALUE TYPE |
+| -------- | ----------- | ----------- |
+| SOURCE | Path to the data source. Either a fasta file, a directory of fasta files or a csv/json file describing a batch prediction request. | Path |
+
+**Options**:
+
+| OPTIONS | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| --project-code | Project code. If unknown, contact your PM or the Folding Studio team. | str | No default |
+| --use-msa-server / --no-use-msa-server | Flag to enable MSA features. MSA search is performed by InstaDeep's MMseqs2 server. | bool | --use-msa-server |
+| --use-templates-server / --no-use-templates-server | Flag to enable templates. Templates search is performed by InstaDeep's MMseqs2 server. | bool | --no-use-templates-server |
+| --num-trunk-recycles | Number of trunk recycles during inference. | int | 3 |
+| --seed | Random seed for inference. | int | 0 |
+| --num-diffn-timesteps | Number of diffusion timesteps to run. | int | 200 |
+| --restraints | Restraints information. | str | No default |
+| --recycle-msa-subsample | Subsample parameter for recycling MSA during inference. | int | 0 |
+| --num-trunk-samples | Number of trunk samples to generate during inference. | int | 1 |
+| --msa-path | Path to the custom MSAs. It can be a .a3m or .aligned.pqt file, or a directory containing these files. | str | No default |
+| --output | Local path to download the result zip and query parameters to. Default to 'chai_results'. | Path | chai_results |
+| --force / --no-force | Forces the download to overwrite any existing file with the same name in the specified location. | bool | --no-force |
+| --unzip / --no-unzip | Unzip the file after its download. | bool | --no-unzip |
+| --spinner / --no-spinner | Use live spinner in log output. | bool | --spinner |
+
+### `predict protenix`
+
+Synchronous Protenix folding submission.
+
+**Usage**:
+
+```console
+folding predict protenix [OPTIONS] SOURCE
+```
+
+**Arguments**:
+
+| ARGUMENT | DESCRIPTION | VALUE TYPE |
+| -------- | ----------- | ----------- |
+| SOURCE | Path to the data source. Either a fasta file, a directory of fasta filesdescribing a batch prediction request. | Path |
+
+**Options**:
+
+| OPTIONS | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| --project-code | Project code. If unknown, contact your PM or the Folding Studio team. | str | No default |
+| --use-msa-server / --no-use-msa-server | Flag to use the MSA server for inference. Forced to True. | bool | --use-msa-server |
+| --seed | Random seed. | int | 0 |
+| --cycle | Pairformer cycle number. | int | 10 |
+| --step | Number of steps for the diffusion process. | int | 200 |
+| --sample | Number of samples in each seed. | int | 5 |
+| --output | Local path to download the result zip and query parameters to. Default to 'protenix_results'. | Path | protenix_results |
+| --force / --no-force | Forces the download to overwrite any existing file with the same name in the specified location. | bool | --no-force |
+| --unzip / --no-unzip | Unzip the file after its download. | bool | --no-unzip |
+| --spinner / --no-spinner | Use live spinner in log output. | bool | --spinner |
+
+### `predict soloseq`
+
+Synchronous SoloSeq folding submission
+
+**Usage**:
+
+```console
+folding predict soloseq [OPTIONS] SOURCE
+```
+
+**Arguments**:
+
+| ARGUMENT | DESCRIPTION | VALUE TYPE |
+| -------- | ----------- | ----------- |
+| SOURCE | Path to the data source. Either a fasta file or a directory of fasta files. | Path |
+
+**Options**:
+
+| OPTIONS | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| --project-code | Project code. If unknown, contact your PM or the Folding Studio team. | str | No default |
+| --seed | Random seed. | int | 0 |
+| --skip-relaxation / --no-skip-relaxation | Skip the relaxation process. | bool | --no-skip-relaxation |
+| --subtract-plddt / --no-subtract-plddt | Output (100 - pLDDT) instead of the pLDDT itself. | bool | --no-subtract-plddt |
+| --output | Local path to download the result zip and query parameters to. Default to 'soloseq_results'. | Path | soloseq_results |
+| --force / --no-force | Forces the download to overwrite any existing file with the same name in the specified location. | bool | --no-force |
+| --unzip / --no-unzip | Unzip the file after its download. | bool | --no-unzip |
+| --spinner / --no-spinner | Use live spinner in log output. | bool | --spinner |
diff --git a/folding-studio/docs/docs/reference/python_lib_docs.md b/folding-studio/docs/docs/reference/python_lib_docs.md
new file mode 100644
index 0000000000000000000000000000000000000000..c0a63f120263e3bc923debee562f9c5a6a91dc93
--- /dev/null
+++ b/folding-studio/docs/docs/reference/python_lib_docs.md
@@ -0,0 +1,719 @@
+# `folding-studio` Python Library
+
+This document provides an overview of the available functions and classes in `folding_studio`.
+
+## `get_id_token`
+
+**Signature:**
+```{ .python .no-copy }
+get_id_token() -> str
+```
+
+**Description:**
+
+Get the user's gcp token id.
+
+---
+
+## `single_job_prediction`
+
+**Signature:**
+```{ .python .no-copy }
+single_job_prediction(identity_token: str, fasta_file: pathlib.Path,
+parameters: folding_studio_data_models.parameters.alphafold.AF2Parameters | folding_studio_data_models.parameters.openfold.OpenFoldParameters | None = None,
+project_code: str | None = None, *, ignore_cache: bool = False, **kwargs) -> dict
+```
+
+**Description:**
+
+Make a single job prediction from folding parameters and a FASTA file.
+
+
+### Parameters:
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| --------- | ----------- | ---------- | ------------- |
+| identity_token | GCP identity token | str | No default |
+| fasta_file | Input FASTA file | Path | No default |
+| parameters | Job parameters | AF2Parameters or OpenFoldParameters or None | None |
+| project_code | Project code under which the jobs are billed. If None, value is attempted to be read from environment. | str | None |
+| ignore_cache | Force the job submission or not | bool | False |
+
+---
+
+## `batch_prediction_from_file`
+
+**Signature:**
+```{ .python .no-copy }
+batch_prediction_from_file(identity_token: str, file: pathlib.Path, project_code: str | None = None) -> dict
+```
+
+**Description:**
+
+Make a batch prediction from a configuration files.
+
+### Parameters:
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| --------- | ----------- | ---------- | ------------- |
+| identity_token | GCP identity token | str | No default |
+| file | Configuration file path | Path | No default |
+| project_code | Project code under which the jobs are billed. If None, value is attempted to be read from environment. | str | None |
+
+---
+
+## `af2`
+
+**Signature:**
+```{ .python .no-copy }
+af2(
+ source: Path,
+ project_code: str,
+ cache: bool = True,
+ template_mode: FeatureMode = FeatureMode.SEARCH,
+ custom_template: List[Path] = [],
+ custom_template_id: List[str] = [],
+ initial_guess_file: Optional[Path] = None,
+ templates_masks_file: Optional[Path] = None,
+ msa_mode: FeatureMode = FeatureMode.SEARCH,
+ custom_msa: List[Path] = [],
+ max_msa_clusters: int = -1,
+ max_extra_msa: int = -1,
+ gap_trick: bool = False,
+ num_recycle: int = 3,
+ model_subset: List[int] = [],
+ random_seed: int = 0,
+ num_seed: Optional[int] = None,
+ metadata_file: Optional[Path] = None,
+)
+```
+
+**Description:**
+
+Asynchronous AF2 folding submission. This command is used to submit a folding job to the AlphaFold2 model for protein structure prediction.
+If the `source` is a CSV or JSON file describing a batch prediction request, all the other options will be overlooked.
+
+### Parameters:
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| source | Path to the data source. Either a fasta file, a directory of fasta files or a csv/json file describing a batch prediction request. | Path |
+| project_code | Project code. If unknown, contact your PM or the Folding Studio team. | str | |
+| cache | Use cached experiment results if any. | bool | True |
+| template_mode | Mode of the template features generation. | FeatureMode | FeatureMode.SEARCH |
+| custom_template | Path to a custom template or a directory of custom templates. | List[Path] | [] |
+| custom_template_id | ID of a custom template. | List[str] | [] |
+| initial_guess_file | Path to an initial guess file. | Path | No default |
+| templates_masks_file | Path to a templates masks file. | Path | No default |
+| msa_mode | Mode of the MSA features generation. | FeatureMode | FeatureMode.SEARCH |
+| custom_msa | Path to a custom msa or a directory of custom msas. | List[Path] | [] |
+| max_msa_clusters | Max number of MSA clusters to search. | int | -1 |
+| max_extra_msa | Max extra non-clustered MSA representation to use as source. | int | -1 |
+| gap_trick | Activate gap trick, allowing to model complexes with monomer models. | bool | False |
+| num_recycle | Number of refinement iterations of the predicted structures. | int | 3 |
+| model_subset | Subset of AF2 model ids to use, between 1 and 5 included. | List[int] | [] |
+| random_seed | Random seed used during the MSA sampling. Different random seed values will introduce variations in the predictions. | int | 0 |
+| num_seed | Number of random seeds to use. Creates a batch prediction. | int | No default |
+| metadata_file | Path to the file where the job metadata returned by the server are written. | Path | No default |
+
+---
+
+## `openfold`
+
+**Signature:**
+```{ .python .no-copy }
+openfold(
+ source: Path,
+ project_code: str,
+ cache: bool = True,
+ template_mode: FeatureMode = FeatureMode.SEARCH,
+ custom_template: List[Path] = [],
+ custom_template_id: List[str] = [],
+ templates_masks_file: Optional[Path] = None,
+ msa_mode: FeatureMode = FeatureMode.SEARCH,
+ custom_msa: List[Path] = [],
+ max_msa_clusters: int = -1,
+ max_extra_msa: int = -1,
+ gap_trick: bool = False,
+ num_recycle: int = 3,
+ model_subset: List[int] = [],
+ random_seed: int = 0,
+ num_seed: Optional[int] = None,
+ metadata_file: Optional[Path] = None,
+)
+```
+
+**Description:**
+
+Asynchronous OpenFold folding submission. This command is used to submit a folding job to the OpenFold model for protein structure prediction.
+
+If the source is a CSV or JSON file describing a batch prediction request, all the other options will be overlooked.
+
+### Parameters:
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| source | Path to the data source. Either a fasta file, a directory of fasta files or a csv/json file describing a batch prediction request. | Path |
+| project_code | Project code. If unknown, contact your PM or the Folding Studio team. | str | No default |
+| cache | Use cached experiment results if any. | bool | True |
+| template_mode | Mode of the template features generation. | FeatureMode | FeatureMode.SEARCH |
+| custom_template | Path to a custom template or a directory of custom templates. | List[Path] | [] |
+| custom_template_id | ID of a custom template. | List[str] | [] |
+| templates_masks_file | Path to a templates masks file. | Path | No default |
+| msa_mode | Mode of the MSA features generation. | FeatureMode | FeatureMode.SEARCH |
+| custom_msa | Path to a custom msa or a directory of custom msas.| List[Path] | [] |
+| max_msa_clusters | Max number of MSA clusters to search. | int | -1 |
+| max_extra_msa | Max extra non-clustered MSA representation to use as source. | int | -1 |
+| gap_trick | Activate gap trick, allowing to model complexes with monomer models. | bool | False |
+| num_recycle | Number of refinement iterations of the predicted structures. | int | 3 |
+| model_subset | Subset of AF2 model ids to use, between 1 and 5 included. | List[int] | [] |
+| random_seed | Random seed used during the MSA sampling. Different random seed values will introduce variations in the predictions. | int | 0 |
+| num_seed | Number of random seeds to use. Creates a batch prediction. | int | No default |
+| metadata_file | Path to the file where the job metadata returned by the server are written. | Path | No default |
+
+---
+
+## `list` (experiment)
+
+**Signature:**
+```{ .python .no-copy }
+list()
+```
+
+**Description:**
+
+Fetches a list of all completed and pending experiments.
+
+---
+
+## `status` (experiment)
+
+**Signature:**
+```{ .python .no-copy }
+status(
+ exp_id: str
+)
+```
+
+**Description:**
+
+Fetches the status of a specific experiment using its ID. The function makes a GET request to the server to retrieve the status of the experiment.
+
+### Parameters:
+
+| PARAMETER | DESCRIPTION | VALUE TYPE |
+| ------- | ----------- | ---------- |
+| exp_id | The experiment ID for which the status needs to be fetched | str |
+
+---
+
+## `cancel` (experiment)
+
+**Signature:**
+```{ .python .no-copy }
+cancel(
+ exp_id: List[str]
+)
+```
+
+**Description:**
+
+Cancels the execution of one or more experiment jobs by their IDs.
+
+### Parameters:
+
+| PARAMETER | DESCRIPTION | VALUE TYPE |
+| ------- | ----------- | ---------- |
+| exp_id | A list of experiment IDs to cancel | List[str] |
+
+---
+
+## `results` (experiment)
+
+**Signature:**
+```{ .python .no-copy }
+results(
+ exp_id: str,
+ output: Optional[Path] = None,
+ force: bool = False,
+ unzip: bool = False
+)
+```
+
+**Description:**
+
+Downloads the results of a specified experiment, given its experiment ID.
+
+### Parameters:
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| exp_id | str | The experiment ID of the results to retrieve. | |
+| output | Optional[Path] | The local path where the zip file will be downloaded. | |
+| force | bool | Whether to overwrite an existing file at the specified location.| False |
+| unzip | bool | Whether to automatically unzip the downloaded file after the download completes. | False |
+
+---
+
+## `features` (experiment)
+
+**Signature:**
+```{ .python .no-copy }
+features(
+ exp_id: str,
+ output: Optional[Path] = None,
+ force: bool = False,
+ unzip: bool = False
+)
+```
+
+**Description:**
+
+Downloads the features of a specified experiment, given its experiment ID.
+
+### Parameters:
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| exp_id | str | The experiment ID of the results to retrieve. | |
+| output | Optional[Path] | The local path where the zip file will be downloaded. | |
+| force | bool | Whether to overwrite an existing file at the specified location.| False |
+| unzip | bool | Whether to automatically unzip the downloaded file after the download completes. | False |
+
+
+---
+
+## `search` (multiple sequences alignment - msa)
+
+**Signature:**
+```{ .python .no-copy }
+search(
+ source: Path,
+ project_code: str,
+ cache: bool = True,
+ msa_mode: FeatureMode = FeatureMode.SEARCH,
+)
+
+```
+
+**Description:**
+
+Runs an MSA (Multiple Sequence Alignment).
+
+### Parameters:
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| source | Path to the data source. Either a fasta file, a directory of fasta files or a csv/json file describing a batch prediction request. | Path |
+| project_code | Project code. If unknown, contact your PM or the Folding Studio team. | str | |
+| cache | Use cached experiment results if any. | bool | True |
+| msa_mode | Mode of the MSA features generation. | FeatureMode | FeatureMode.SEARCH |
+
+
+---
+
+## `list` (multiple sequences alignment - msa)
+
+**Signature:**
+```{ .python .no-copy }
+list()
+```
+
+**Description:**
+
+Fetches a list of all completed and pending experiments.
+
+---
+
+## `status` (multiple sequences alignment - msa)
+
+**Signature:**
+```{ .python .no-copy }
+status(
+ msa_exp_id: str
+)
+```
+
+**Description:**
+
+Fetches the status of a specific MSA experiment using its ID.
+
+### Parameters:
+
+| PARAMETER | DESCRIPTION | VALUE TYPE |
+| ------- | ----------- | ---------- |
+| ms_exp_id | The MSA experiment ID for which the status needs to be fetched | str |
+
+
+---
+
+## `features` (multiple sequences alignment - msa)
+
+**Signature:**
+```{ .python .no-copy }
+features(
+ msa_exp_id: str,
+ output: Optional[Path] = None,
+ force: bool = False,
+ unzip: bool = False
+)
+```
+
+**Description:**
+
+Fetches the features of a given MSA (Multiple Sequence Alignment) experiment.
+
+### Parameters:
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| ------- | ----------- | ---------- | ------------- |
+| msa_exp_id | str | The MSA experiment ID of the results to retrieve. | |
+| output | Optional[Path] | The local path where the zip file will be downloaded. | |
+| force | bool | Whether to overwrite an existing file at the specified location.| False |
+| unzip | bool | Whether to automatically unzip the downloaded file after the download completes. | False |
+
+
+---
+
+## `Client`
+
+**Signature:**
+```{ .python .no-copy }
+Client(api_key: 'str | None' = None, token_manager: 'TokenManager | None' = None) -> 'None'
+```
+
+**Description:**
+
+The `Client` class is used to send requests to a prediction endpoint. It supports authentication via an API key or a Google Cloud JWT token, and it handles sending queries, managing errors, and receiving responses.
+
+### Parameters:
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| --------- | ----------- | ---------- | ------------- |
+| api_key | API key for authentication | str or None | None |
+| token_manager | JWT token manager | TokenManager or None | None |
+
+### Class methods:
+
+#### `from_api_key`
+Creates a Client instance using an API key.
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| --------- | ----------- | ---------- | ------------- |
+| api_key | API key for authentication | str or None | None |
+
+#### `from_jwt`
+Creates a Client instance using a Google Cloud JWT token.
+
+
+### Instance methods:
+
+#### `send_request`
+Sends a prediction request to the server.
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| --------- | ----------- | ---------- | ------------- |
+| query | A `Query` object containing the prediction data to send. | str or None | None |
+
+---
+
+## `SoloSeqQuery`
+
+**Description:**
+
+The `SoloSeqQuery` class is used to query the SoloSeq prediction API with FASTA data.
+
+### Class methods:
+
+#### `from_protein_sequence`
+Creates a `SoloSeqQuery` instance from a protein sequence given as a string.
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| --------- | ----------- | ---------- | ------------- |
+| sequence | Input sequence to make prediction on. | str | |
+| parameters | Prediction parameters.| SoloSeqParameters | Default values below. |
+
+with
+
+```{ .python .no-copy }
+class SoloSeqParameters(BaseModel):
+ """SoloSeq inference parameters."""
+
+ data_random_seed: int = Field(alias="seed", default=0)
+ skip_relaxation: bool = False
+ subtract_plddt: bool = False
+```
+
+#### `from_file`
+Creates a `SoloSeqQuery` instance from a FASTA file located at the given path.
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| --------- | ----------- | ---------- | ------------- |
+| path | Path to the FASTA file. | str or Path | |
+| parameters | Prediction parameters.| SoloSeqParameters | Default values below. |
+
+with
+
+```{ .python .no-copy }
+class SoloSeqParameters(BaseModel):
+ """SoloSeq inference parameters."""
+
+ data_random_seed: int = Field(alias="seed", default=0)
+ skip_relaxation: bool = False
+ subtract_plddt: bool = False
+```
+
+#### `from_directory`
+Creates a `SoloSeqQuery` instance from a directory containing multiple FASTA files.
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| --------- | ----------- | ---------- | ------------- |
+| path | Path to the directory containing FASTA files.| str or Path| |
+| parameters | Prediction parameters.| SoloSeqParameters | Default values below. |
+
+with
+
+```{ .python .no-copy }
+class SoloSeqParameters(BaseModel):
+ """SoloSeq inference parameters."""
+
+ data_random_seed: int = Field(alias="seed", default=0)
+ skip_relaxation: bool = False
+ subtract_plddt: bool = False
+```
+
+---
+
+## `BoltzQuery`
+
+**Description:**
+
+The `BoltzQuery` class is used to query the Boltz-1 prediction API with FASTA data.
+
+### Class methods:
+
+#### `from_protein_sequence`
+Creates a `BoltzQuery` instance from a protein sequence given as a string.
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| --------- | ----------- | ---------- | ------------- |
+| sequence | Input sequence to make prediction on. | str | |
+| parameters | Prediction parameters.| BoltzParameters | Default values below. |
+
+with
+
+```{ .python .no-copy }
+class BoltzParameters(BaseModel):
+ """Boltz inference parameters."""
+
+ seed: int = 0
+ recycling_steps: int = 3
+ sampling_steps: int = 200
+ diffusion_samples: int = 1
+ step_scale: float = 1.638
+ msa_pairing_strategy: str = "greedy"
+ write_full_pae: bool = False
+ write_full_pde: bool = False
+ use_msa_server: bool = True
+```
+
+#### `from_file`
+Creates a `BoltzQuery` instance from a FASTA file or a YAML file located at the given path.
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| --------- | ----------- | ---------- | ------------- |
+| path | Path to the FASTA or YAML file. | str or Path | |
+| parameters | Prediction parameters.| BoltzParameters | Default values below. |
+
+with
+
+```{ .python .no-copy }
+class BoltzParameters(BaseModel):
+ """Boltz inference parameters."""
+
+ seed: int = 0
+ recycling_steps: int = 3
+ sampling_steps: int = 200
+ diffusion_samples: int = 1
+ step_scale: float = 1.638
+ msa_pairing_strategy: str = "greedy"
+ write_full_pae: bool = False
+ write_full_pde: bool = False
+ use_msa_server: bool = True
+```
+
+#### `from_directory`
+Creates a `BoltzQuery` instance from a directory containing multiple FASTA and/or YAML files.
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| --------- | ----------- | ---------- | ------------- |
+| path | Path to the directory containing FASTA and/or YAML files.| str or Path| |
+| parameters | Prediction parameters.| BoltzParameters | Default values below. |
+
+with
+
+```{ .python .no-copy }
+class BoltzParameters(BaseModel):
+ """Boltz inference parameters."""
+
+ seed: int = 0
+ recycling_steps: int = 3
+ sampling_steps: int = 200
+ diffusion_samples: int = 1
+ step_scale: float = 1.638
+ msa_pairing_strategy: str = "greedy"
+ write_full_pae: bool = False
+ write_full_pde: bool = False
+ use_msa_server: bool = True
+```
+
+---
+
+## `ChaiQuery`
+
+**Description:**
+
+The `ChaiQuery` class is used to query the Chai-1 prediction API with FASTA data.
+
+### Class methods:
+
+#### `from_protein_sequence`
+Creates a `ChaiQuery` instance from a protein sequence given as a string.
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| --------- | ----------- | ---------- | ------------- |
+| sequence | Input sequence to make prediction on. | str | |
+| parameters | Prediction parameters.| ChaiParameters | Default values below. |
+
+with
+
+```{ .python .no-copy }
+class ChaiParameters(BaseModel):
+ """Chai inference parameters."""
+
+ seed: int = 0
+ num_trunk_recycles: int = 3
+ num_diffn_timesteps: int = 200
+ recycle_msa_subsample: int = 0
+ num_trunk_samples: int = 1
+ restraints: str | None = None
+ use_msa_server: bool = False
+ use_templates_server: bool = False
+ custom_msa_paths: dict[str, str] | None = None
+```
+
+#### `from_file`
+Creates a `ChaiQuery` instance from a FASTA file located at the given path.
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| --------- | ----------- | ---------- | ------------- |
+| path | Path to the FASTA file. | str or Path | |
+| parameters | Prediction parameters.| ChaiParameters | Default values below. |
+
+with
+
+```{ .python .no-copy }
+class ChaiParameters(BaseModel):
+ """Chai inference parameters."""
+
+ seed: int = 0
+ num_trunk_recycles: int = 3
+ num_diffn_timesteps: int = 200
+ recycle_msa_subsample: int = 0
+ num_trunk_samples: int = 1
+ restraints: str | None = None
+ use_msa_server: bool = False
+ use_templates_server: bool = False
+ custom_msa_paths: dict[str, str] | None = None
+```
+
+#### `from_directory`
+Creates a `ChaiQuery` instance from a directory containing multiple FASTA files.
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| --------- | ----------- | ---------- | ------------- |
+| path | Path to the directory containing FASTA files.| str or Path| |
+| parameters | Prediction parameters.| ChaiParameters | Default values below. |
+
+with
+
+```{ .python .no-copy }
+class ChaiParameters(BaseModel):
+ """Chai inference parameters."""
+
+ seed: int = 0
+ num_trunk_recycles: int = 3
+ num_diffn_timesteps: int = 200
+ recycle_msa_subsample: int = 0
+ num_trunk_samples: int = 1
+ restraints: str | None = None
+ use_msa_server: bool = False
+ use_templates_server: bool = False
+ custom_msa_paths: dict[str, str] | None = None
+```
+
+---
+
+## `ProtenixQuery`
+
+**Description:**
+
+The `ProtenixQuery` class is used to query the Protenix prediction API with FASTA data.
+
+### Class methods:
+
+#### `from_protein_sequence`
+Creates a `ProtenixQuery` instance from a protein sequence given as a string.
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| --------- | ----------- | ---------- | ------------- |
+| sequence | Input sequence to make prediction on. | str | |
+| parameters | Prediction parameters.| ProtenixParameters | Default values below. |
+
+with
+
+```{ .python .no-copy }
+class ProtenixParameters(BaseModel):
+ """Protenix inference parameters."""
+
+ seeds: str = Field(alias="seed", default="0", coerce_numbers_to_str=True)
+ use_msa_server: bool = True
+```
+
+#### `from_file`
+Creates a `ProtenixQuery` instance from a FASTA file located at the given path.
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| --------- | ----------- | ---------- | ------------- |
+| path | Path to the FASTA file. | str or Path | |
+| parameters | Prediction parameters.| ProtenixParameters | Default values below. |
+
+with
+
+```{ .python .no-copy }
+class ProtenixParameters(BaseModel):
+ """Protenix inference parameters."""
+
+ seeds: str = Field(alias="seed", default="0", coerce_numbers_to_str=True)
+ use_msa_server: bool = True
+```
+
+#### `from_directory`
+Creates a `ProtenixQuery` instance from a directory containing multiple FASTA files.
+
+| PARAMETER | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |
+| --------- | ----------- | ---------- | ------------- |
+| path | Path to the directory containing FASTA files.| str or Path| |
+| parameters | Prediction parameters.| ProtenixParameters | Default values below. |
+
+with
+
+```{ .python .no-copy }
+class ProtenixParameters(BaseModel):
+ """Protenix inference parameters."""
+
+ seeds: str = Field(alias="seed", default="0", coerce_numbers_to_str=True)
+ use_msa_server: bool = True
+```
+
+---
diff --git a/folding-studio/docs/docs/tutorials/index.md b/folding-studio/docs/docs/tutorials/index.md
new file mode 100644
index 0000000000000000000000000000000000000000..d6268a62360714f2a1f0966744d4bfcaabf5e0f7
--- /dev/null
+++ b/folding-studio/docs/docs/tutorials/index.md
@@ -0,0 +1,9 @@
+# Tutorials
+The Tutorials section of the documentation provides step-by-step guides on installing and using Folding Studio for protein structure prediction.
+
+It is structured into the following subsections:
+
+1. [Install Folding Studio](./installation.md)
+1. [Make a single prediction using AlphaFold2](./single_folding_job_af2.md)
+1. [Perform a Multiple Sequence Alignment (MSA) search](./msa_search.md)
+1. [Preview - Run folding jobs with AlphaFold3-like models](./single_folding_job_af3.md)
diff --git a/folding-studio/docs/docs/tutorials/installation.md b/folding-studio/docs/docs/tutorials/installation.md
new file mode 100644
index 0000000000000000000000000000000000000000..eb419e8a641c55adc3fa05a2a5392c5b1ae9094e
--- /dev/null
+++ b/folding-studio/docs/docs/tutorials/installation.md
@@ -0,0 +1,61 @@
+## Prerequisites
+
+1. Install the `gcloud` CLI following the official Google Cloud [instructions page](https://cloud.google.com/sdk/docs/install).
+1. Create and activate a virtual environment, for example `folding-pipeline`, using your favourite tool (conda, pyenv, venv, ...). **Only Python version `3.11` is supported**.
+1. It is recommended to make `gcloud` use the same Python interpreter as your virtual environment:
+
+ ```bash
+ # Use a Python you have installed in a special location
+ export CLOUDSDK_PYTHON=
+ ```
+
+1. Authenticate to `gcloud`:
+
+ ```bash
+ gcloud auth application-default login
+ ```
+
+1. To simplify the `gcloud` authentication process when you install a Python package from InstaDeep's Google Artifact Registry, install the `keyring.google-artifactregistry-auth` package in your virtual environment:
+
+ ```bash
+ pip install keyrings.google-artifactregistry-auth
+ ```
+
+ It automatically searches for credentials from the environment and authenticates to Artifact Registry. Otherwise, you will have to specify your GCP credentials during the package installation.
+
+1. Provide a project code
+To submit folding jobs, you need to provide a project code. Ensure that you use the project code that corresponds to your project or Statement of Work (SOW).
+
+ - Projects/SOW codes are communicated to PMs.
+ - The project code can be defined as an environment variable or passed as an argument
+ to the CLI
+
+ ```bash
+ export FOLDING_PROJECT_CODE=
+ ```
+
+1. If you do not possess an InstaDeep account or you intend to use Folding Studio on a server or with a service account, you are able to authenticate using an API key.
+
+ - API keys are generated on demand, please contact the Folding Studio team.
+ - The API key must be defined as an environment variable.
+
+
+ ```bash
+ export FOLDING_API_KEY=
+ ```
+
+## CLI and `folding_studio` library
+
+To install the CLI and `folding-studio` library of helper functions, simply use pip in your virtual environment:
+
+```bash
+pip install --extra-index-url https://europe-west4-python.pkg.dev/instadeep/folding-studio/simple folding-studio
+```
+
+This package will install both the CLI, available under the `folding` command, and the `folding_studio` suite of helper functions available in Python.
+
+## Troubleshooting
+
+### While installing the CLI I get prompted `User for http://europe-west4-python.pkg.dev/:`
+
+This message means that the `gcloud` credentials were not found. Please refer to steps 4 and 5 of [Prerequisites](#prerequisites).
diff --git a/folding-studio/docs/docs/tutorials/msa_search.md b/folding-studio/docs/docs/tutorials/msa_search.md
new file mode 100644
index 0000000000000000000000000000000000000000..8de2eedb5ade51ef3956f53f5ac33d152a6b0998
--- /dev/null
+++ b/folding-studio/docs/docs/tutorials/msa_search.md
@@ -0,0 +1,115 @@
+In this tutorial, we will guide you through the process of submitting an MSA (Multiple Sequence Alignment) search job using the command-line interface (CLI) . You will learn how to prepare your sequence input file and submit the job to retrieve the aligned sequences.
+
+An MSA search helps in finding similar sequences in biological databases, allowing you to analyze protein structures and relationships more effectively. We support two types of sequence submissions: monomer and multimer. The process automatically detects the sequence type, so you can focus on your analysis.
+
+## Input data
+To submit an MSA search job, you need a sequence input file in
+[`FASTA`](https://en.wikipedia.org/wiki/FASTA_format) format containing your
+protein sequence as well as the msa search method. You can use the following monomer.
+
+=== "monomer"
+
+ ```text
+ >SARS-CoV-2|RBD|Omicron variant
+ RVQPTESIVRFPNITNLCPFDEVFNATRFASVYAWNRKRISNCVADYSVLYNLAPFFTFK
+ CYGVSPTKLNDLCFTNVYADSFVIRGDEVRQIAPGQTGNIADYNYKLPDDFTGCVIAWNS
+ NKLDSKVSGNYNYLYRLFRKSNLKPFERDISTEIYQAGNKPCNGVAGFNCYFPLRSYSFR
+ PTYGVGHQPYRVVVLSFELLHAPATVCGPKKSTNLVKNKCVNF
+ ```
+
+## Submit a MSA search job
+You simply use the `msa` command of the CLI to submit your job.
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding msa search path/to/my/file.fasta --project-code "your-project-code"
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.commands.msa import search
+
+ search(source="path/to/my/file.fasta", project_code="your-project-code")
+ ```
+
+## Identify the `experiment_id` of your search job
+
+Your experiment is associated with a unique `experiment_id`.
+
+You get the list of your experiment ids that succeeded or are still pending
+using:
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding msa experiment list
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.commands.msa import list
+
+ list()
+ ```
+
+
+Once you have submitted a folding job, you can get its status at any time.
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding msa experiment status b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.commands.msa import status
+
+ status(msa_exp_id="b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9")
+ ```
+
+The experiment status is the current state of the experiment.
+
+| VALUE | DESCRIPTION |
+| ----------- | ------------------------------------------------------------------------------- |
+| `Done` | The experiment is done and its features and results are available for download. |
+| `Pending` | The experiment is still ongoing. |
+| `Failed` | The experiment has failed. |
+| `Cancelled` | The experiment was cancelled.
+
+## Download Results
+
+You download the search job results by running the following command.
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding msa experiment features b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9 --output ./features_exp_b21b09.zip
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.msa import features
+
+ features(msa_exp_id="b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9", output=Path("./features_exp_b21b09.zip"))
+ ```
+
+Here is an example of the zip file structure for a monomer :
+
+``` { .shell .no-copy }
+extracted_experiment_features_zip
+├── msas
+│ ├── mgnify_hits.a3m
+│ ├── pdb_hits.hhr
+│ ├── small_bfd_hits.a3m
+│ └── uniref90_hits.a3m
+└── msa_coverage.json
+└── logs.txt
+
+```
diff --git a/folding-studio/docs/docs/tutorials/single_folding_job_af2.md b/folding-studio/docs/docs/tutorials/single_folding_job_af2.md
new file mode 100644
index 0000000000000000000000000000000000000000..f5fd9335403c18995bace9100548f6f21686b506
--- /dev/null
+++ b/folding-studio/docs/docs/tutorials/single_folding_job_af2.md
@@ -0,0 +1,154 @@
+
+In this tutorial, you will learn how to run predictions using the AlphaFold2 model.
+Once the job is submitted, you can track its status, retrieve results, and access various metrics from the generated predictions. To gain a deeper understanding of the different inference parameters and how to fine-tune them for your needs, we recommend reviewing the [How-to guides in the AlphaFold2 and OpenFold section](./../how-to-guides/af2_openfold/provide_input_data.md) for detailed examples and usage tips.
+
+## Input data
+
+To submit a folding job, you need a sequence input file in
+[`FASTA`](https://en.wikipedia.org/wiki/FASTA_format) format containing your
+protein sequence. You can use the following monomer.
+
+=== "monomer"
+
+ ```text
+ >SARS-CoV-2|RBD|Omicron variant
+ RVQPTESIVRFPNITNLCPFDEVFNATRFASVYAWNRKRISNCVADYSVLYNLAPFFTFK
+ CYGVSPTKLNDLCFTNVYADSFVIRGDEVRQIAPGQTGNIADYNYKLPDDFTGCVIAWNS
+ NKLDSKVSGNYNYLYRLFRKSNLKPFERDISTEIYQAGNKPCNGVAGFNCYFPLRSYSFR
+ PTYGVGHQPYRVVVLSFELLHAPATVCGPKKSTNLVKNKCVNF
+ ```
+
+## Submit job
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict af2 path/to/my/file.fasta --project-code "your-project-code"
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.predict import af2 as af2_predict
+
+ af2_predict(source=Path("path/to/my/file.fasta"), project_code="your-project-code")
+ ```
+
+Using the CLI, you will get the following information once the job was successfully submitted.
+
+``` { .shell .no-copy }
+Single job successfully submitted.
+Experiment submitted successfully !
+The experiment id is b938c1adaec932e8a6ba765c80144492b6a3f1e6
+Prediction job metadata written to simple_prediction_20250305172707.json
+You can query your experiment status with the command:
+
+ folding experiment status b938c1adaec932e8a6ba765c80144492b6a3f1e6
+```
+
+For details about the inference parameters / flags for each model check the [reference section](../reference/cli.md#predict).
+
+## Identify the `experiment_id` of your job
+
+Your experiment is associated with a unique `experiment_id`.
+
+You get the list of the experiment ids that succeeded or are still pending
+using:
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding experiment list
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.commands.experiment import list
+
+ list()
+ ```
+
+You will get a table with the status of the job you just launched:
+
+``` { .shell .no-copy }
+Done and pending experiments list written to None
+ Done and pending experiments
+┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┓
+┃ Experiment ID ┃ Status ┃
+┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━┩
+│ b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9 │Pending │
+└──────────────────────────────────────────┴────────┘
+```
+
+## Get your job status
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding experiment status b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.commands.experiment import status
+
+ status(exp_id="b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9")
+ ```
+
+The experiment status is the current state of the experiment.
+
+| VALUE | DESCRIPTION |
+| ----------- | ------------------------------------------------------------------------------- |
+| `Done` | The experiment is done and its features and results are available for download. |
+| `Pending` | The experiment is still ongoing. |
+| `Failed` | The experiment has failed. |
+| `Cancelled` | The experiment was cancelled. |
+
+## Download results
+
+After your experiment has finished, you download the results zip file.
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding experiment results b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9 --output ./result_exp_b21b09.zip
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from pathlib import Path
+ from folding_studio.commands.experiment import results
+
+ results(exp_id="b21b09a6a43dcfb282bdc00ec79bd7ae06de97b9", output=Path("./result_exp_b21b09.zip"))
+ ```
+
+You will get the message:
+
+```bash
+File downloaded successfully to result_exp_b21b09.zip.
+```
+
+Here is an example of the zip file structure :
+
+``` { .shell .no-copy }
+results
+├── metrics_per_model.json
+├── msa_coverage.json
+├── relaxed_predictions
+│ ├── model_1_ptm.pdb
+│ ├── model_2_ptm.pdb
+│ ├── model_3_ptm.pdb
+│ ├── model_4_ptm.pdb
+│ └── model_5_ptm.pdb
+├── rmsd_per_model.json
+└── unrelaxed_predictions
+ ├── model_1_ptm.pdb
+ ├── model_2_ptm.pdb
+ ├── model_3_ptm.pdb
+ ├── model_4_ptm.pdb
+ └── model_5_ptm.pdb
+```
diff --git a/folding-studio/docs/docs/tutorials/single_folding_job_af3.md b/folding-studio/docs/docs/tutorials/single_folding_job_af3.md
new file mode 100644
index 0000000000000000000000000000000000000000..c7ad2a2d80ded960761af8b0b47c40d4e0ab0207
--- /dev/null
+++ b/folding-studio/docs/docs/tutorials/single_folding_job_af3.md
@@ -0,0 +1,111 @@
+In this tutorial, you will learn how to run the **Boltz-1**, **Chai-1** and **Protenix** models, which are all AlphaFold3-like models. These models features differ slightly from AlphaFold2 and OpenFold models.
+To help you get started, we recommend exploring the other How-to guides in the AlphaFold3-like models section for more detailed examples and usage instructions tailored to each model.
+
+This tutorial focuses on using the Boltz-1 model.
+
+## Input data
+
+To submit a folding job, you need a sequence input file in
+[`FASTA`](https://en.wikipedia.org/wiki/FASTA_format) format containing your
+protein sequence. You can use the following monomer.
+
+=== "monomer"
+
+ ```text
+ >SARS-CoV-2|RBD|Omicron variant
+ RVQPTESIVRFPNITNLCPFDEVFNATRFASVYAWNRKRISNCVADYSVLYNLAPFFTFK
+ CYGVSPTKLNDLCFTNVYADSFVIRGDEVRQIAPGQTGNIADYNYKLPDDFTGCVIAWNS
+ NKLDSKVSGNYNYLYRLFRKSNLKPFERDISTEIYQAGNKPCNGVAGFNCYFPLRSYSFR
+ PTYGVGHQPYRVVVLSFELLHAPATVCGPKKSTNLVKNKCVNF
+ ```
+
+## Submit job and get results
+
+You use the `predict boltz` command of the CLI. You can then add the different inference parameters as flags. In particular, the `--output-path` flag allows you to specify the path of the zip file that will contain the results.
+
+For details about the inference parameters / flags for each model check the [reference section](../reference/cli.md#predict).
+
+=== ":octicons-command-palette-16: CLI"
+
+ ```bash
+ folding predict boltz path/to/my/file.fasta --project-code "your-project-code" --output-path ./output.zip --seed 42
+ ```
+
+=== ":material-language-python: Python"
+
+ ```python
+ from folding_studio.client import Client
+ from folding_studio.query.boltz import BoltzQuery
+
+ inference_parameters = {"project_code": "your-project-code",
+ "seed":42}
+
+ file_path = "path/to/my/file.fasta"
+
+ # Create client
+ client = Client.from_jwt()
+
+ # Define query
+ query = BoltzQuery.from_file(path=file_path, parameters=inference_parameters)
+
+ # Send request
+ response = client.send_request(query)
+
+ # Download results
+ output_path = "./output.zip"
+ response.download_results(output_path, force=True, unzip=True)
+ ```
+
+Using the CLI, you will get the following information if the job was successfully submitted.
+
+``` { .shell .no-copy }
+╭───────────────────────────────╮
+│ 🧬 Boltz1 Folding submission │
+╰───────────────────────────────╯
+🔑 Authenticating client ✅
+📦 Generating query ✅
+Generated query: {
+ "fasta_files": {
+ "file": ">A|protein|\nQLEDSEVEAVAKGLEEMYANGVTEDNFKNYVKNNFAQQEISSVEEELNVNISDSCVANKIKDEFFAMISISAIVKAAQKKAWKELAVTVLRFAKANGLKTNAIIVAGQLALWAVQCG"
+ },
+ "yaml_files": {},
+ "parameters": {
+ "seed": 42,
+ "recycling_steps": 3,
+ "sampling_steps": 200,
+ "diffusion_samples": 1,
+ "step_scale": 1.638,
+ "msa_pairing_strategy": "greedy",
+ "write_full_pae": false,
+ "write_full_pde": false
+ }
+}
+🧠 Processing folding job ✅
+```
+
+And you will get the following information once the job is completed and the results are downloaded.
+
+``` { .shell .no-copy }
+Confidence data: {
+ "prot_73bcabf6-54e5-4762-8745-97e6de0f9c22": {
+ "chains_ptm": {
+ "0": 0.7689766883850098
+ },
+ "complex_ipde": 0,
+ "complex_iplddt": 0,
+ "complex_pde": 0.8002958297729492,
+ "complex_plddt": 0.8243614435195923,
+ "confidence_score": 0.8132845163345337,
+ "iptm": 0,
+ "ligand_iptm": 0,
+ "pair_chains_iptm": {
+ "0": {
+ "0": 0.7689766883850098
+ }
+ },
+ "protein_iptm": 0,
+ "ptm": 0.7689766883850098
+ }
+}
+💾 Downloading results to `boltz_results` ✅
+```
diff --git a/folding-studio/docs/generate_cli_docs.py b/folding-studio/docs/generate_cli_docs.py
new file mode 100644
index 0000000000000000000000000000000000000000..cc55fddb8e8409fdb0b06b7d927eb8e93920679f
--- /dev/null
+++ b/folding-studio/docs/generate_cli_docs.py
@@ -0,0 +1,209 @@
+import re
+import types
+from typing import Union, get_args, get_origin, get_type_hints
+
+import typer
+from folding_studio.cli import app
+from typer.models import TyperInfo
+from typer.utils import get_params_from_function
+
+
+def extract_base_type(annotation):
+ """Extract annotation type.
+
+ Handles List[X], Optional[X] and Union[X, Y] cases.
+ """
+ origin = get_origin(annotation)
+
+ if origin is list:
+ inner_type = get_args(annotation)[0]
+ return (
+ f"List[{inner_type.__name__}]"
+ if hasattr(inner_type, "__name__")
+ else "List[Unknown]"
+ )
+
+ if origin is Union or isinstance(origin, types.UnionType):
+ inner_types = [t.__name__ for t in get_args(annotation) if t is not type(None)]
+ return " | ".join(inner_types) if inner_types else "Unknown"
+
+ if isinstance(annotation, types.UnionType):
+ inner_types = [t.__name__ for t in get_args(annotation) if t is not type(None)]
+ return " | ".join(inner_types) if inner_types else "Unknown"
+
+ return annotation.__name__ if hasattr(annotation, "__name__") else "Unknown"
+
+
+def update_params_with_default(param_info: dict[str, str]) -> dict[str, str]:
+ """
+ Update the 'default' value in the params dictionary based on its description.
+
+ Args:
+ param_info (dict): A dictionary describing a parameter.
+
+ Returns:
+ dict: The updated param_info dictionary.
+ """
+ if param_info.get("default") != "No default":
+ return param_info
+ description = param_info.get("description", "")
+ # Regular expression to find a sentence starting with 'Default to ' and ending with a period
+ match = re.search(r"Default to '([^']+)'", description)
+ if not match:
+ return param_info
+ # Extract the default value
+ default_value = match.group(1)
+ # Update the 'description' by removing the 'Default to ' phrase
+ param_info["description"] = re.sub(
+ r"Default to '[^']+'\.\s*", "", description
+ ).strip()
+ # Ensure the description ends with a single period
+ if not param_info["description"].endswith("."):
+ param_info["description"] += "."
+ # Update the 'default' key with the extracted value
+ param_info["default"] = default_value
+ param_info["default"] = param_info["default"].replace("<", "<")
+ param_info["default"] = param_info["default"].replace(">", ">")
+ return param_info
+
+
+def extract_command_info(command: typer.models.CommandInfo):
+ func = command.callback
+ parameters = get_params_from_function(func)
+
+ hints = get_type_hints(func)
+ docstring = func.__doc__ or "No docstring provided."
+ docstring = docstring.replace("\n", " ").replace("\n\n", " ")
+ docstring = re.sub(r"(https?://\S+)", r"<\1>", docstring)
+ command_info = {
+ "docstring": docstring,
+ "name": command.name if command.name else func.__name__,
+ "params": [],
+ "options": [],
+ }
+
+ for name, param in parameters.items():
+ raw_type = hints.get(name, None)
+ base_type = extract_base_type(raw_type)
+ description = str(param.default.help)
+ description = description.replace("\n", " ").replace("\n\n", " ")
+ param_info = {
+ "name": name,
+ "type": base_type,
+ "description": description,
+ }
+
+ if isinstance(param.default, typer.models.OptionInfo):
+ name = name.replace("_", "-")
+ if base_type == "bool":
+ name = name + f" / --no-{name}"
+ name = f"--{name}"
+ param_info["name"] = name
+ default_value = param.default.default
+ if param_info["type"] == "bool":
+ values = param_info["name"].split(" / ")
+ default_value = values[0] if param.default.default else values[1]
+ if default_value is Ellipsis:
+ default_value = "No default"
+ param_info["default"] = default_value
+
+ # if No default, check if default value is in description
+ param_info = update_params_with_default(param_info)
+
+ command_info["options"].append(param_info)
+ else:
+ command_info["params"].append(param_info)
+
+ return command_info
+
+
+def generate_markdown_level_docs(
+ f, group: TyperInfo, cli_name: str, level=1, base_name=None
+):
+ base_level = "#" * level
+ f.write(f"{base_level} `{base_name + ' ' if base_name else ''}{group.name}`\n")
+
+ for subcommand in group.typer_instance.registered_commands:
+ subcommand_info = extract_command_info(subcommand)
+ subcommand_name = (
+ subcommand.name
+ if subcommand.name is not None
+ else subcommand.callback.__name__
+ )
+ command_name = (
+ f"{base_name + ' ' if base_name else ''}{group.name} {subcommand_name}"
+ )
+ f.write(f"{base_level}# `{command_name}`\n\n")
+
+ f.write(f"{subcommand_info['docstring']}\n\n")
+
+ usage = "**Usage**:\n\n"
+ usage += "```console\n"
+ usage += f"{cli_name} {command_name}{' [OPTIONS]' if subcommand_info['options'] else ''}"
+ if subcommand_info["params"]:
+ usage += f" {' '.join(param['name'].upper() for param in subcommand_info['params'])}"
+ usage += "\n```\n\n"
+ f.write(usage)
+
+ if subcommand_info["params"]:
+ # Arguments
+ f.write("**Arguments**:\n\n")
+ f.write("| ARGUMENT | DESCRIPTION | VALUE TYPE |\n")
+ f.write("| -------- | ----------- | ----------- |\n")
+ for param in subcommand_info["params"]:
+ param["description"] = (
+ param["description"]
+ if param.get("description")
+ else "No description"
+ )
+ param["type"] = param["type"] if param["type"] else "No type"
+
+ f.write(
+ f"| {param['name'].upper()} | {param['description']} | {param['type']} |\n"
+ )
+
+ f.write("\n")
+
+ # Options
+ if subcommand_info["options"]:
+ f.write("**Options**:\n\n")
+ f.write("| OPTIONS | DESCRIPTION | VALUE TYPE | DEFAULT VALUE |\n")
+ f.write("| ------- | ----------- | ---------- | ------------- |\n")
+ for param in subcommand_info["options"]:
+ param["description"] = (
+ param["description"]
+ if param.get("description")
+ else "No description"
+ )
+ param["type"] = param["type"] if param["type"] else "No type"
+ param["default"] = (
+ param["default"] if param["default"] is not None else "No default"
+ )
+ # if No default, check if default value is in description
+ param = update_params_with_default(param)
+ f.write(
+ f"| {param['name']} | {param['description']} | {param['type']} | {param['default']} |\n"
+ )
+
+ f.write("\n")
+ for subgroup in group.typer_instance.registered_groups:
+ generate_markdown_level_docs(f, subgroup, cli_name, level + 1, group.name)
+
+
+def generate_markdown_docs() -> None:
+ """
+ Generate markdown documentation for all registered commands and subcommands in the application.
+ The documentation will include descriptions, arguments, and options.
+
+ The generated markdown is saved in the 'docs/reference/cli.md' file.
+ """
+ # Iterate through each group in app.registered_groups
+ with open("docs/reference/cli.md", "w") as f:
+ for group in app.registered_groups:
+ if group.name == "key":
+ continue
+ generate_markdown_level_docs(f, group, "folding", 2)
+
+
+if __name__ == "__main__":
+ generate_markdown_docs()
diff --git a/folding-studio/docs/mkdocs.yml b/folding-studio/docs/mkdocs.yml
new file mode 100644
index 0000000000000000000000000000000000000000..e0c02f0408ff51c5c0dad693faee0b577864e7e9
--- /dev/null
+++ b/folding-studio/docs/mkdocs.yml
@@ -0,0 +1,116 @@
+extra_css:
+ - css/main.css
+markdown_extensions:
+ - admonition
+ - pymdownx.details
+ - pymdownx.superfences:
+ custom_fences:
+ - class: mermaid
+ format: !!python/name:pymdownx.superfences.fence_code_format ''
+ name: mermaid
+ - pymdownx.tabbed:
+ alternate_style: true
+ - attr_list
+ - md_in_html
+ - pymdownx.emoji:
+ emoji_generator: !!python/name:material.extensions.emoji.to_svg ''
+ emoji_index: !!python/name:material.extensions.emoji.twemoji ''
+nav:
+ - index.md
+ - Tutorials:
+ - tutorials/index.md
+ - Install Folding Studio: tutorials/installation.md
+ - Run AlphaFold2 on a protein sequence: tutorials/single_folding_job_af2.md
+ - Perform a Multiple Sequence Alignment (MSA) search: tutorials/msa_search.md
+ - Preview - Run folding jobs with AlphaFold3-like models: tutorials/single_folding_job_af3.md
+ - How-to guides:
+ - how-to-guides/index.md
+ - AlphaFold2/OpenFold:
+ - Provide Input Data: how-to-guides/af2_openfold/provide_input_data.md
+ - Launch a Folding Job using AlphaFold2: how-to-guides/af2_openfold/single_af2_job.md
+ - Launch a Folding Job using OpenFold: how-to-guides/af2_openfold/single_openfold_job.md
+ - Launch a Folding Job with custom parameters: how-to-guides/af2_openfold/set_af_folding_parameters.md
+ - Launch a batch Folding Job from a configuration file: how-to-guides/af2_openfold/batch_job_from_configuration_file.md
+ - Launch a batch Folding Job from a directory of fasta files: how-to-guides/af2_openfold/batch_job_from_directory.md
+ - Check Job Status: how-to-guides/af2_openfold/fetch_folding_job_status.md
+ - Download Job Logs: how-to-guides/af2_openfold/download_logs.md
+ - Cancel a Folding Job submission: how-to-guides/af2_openfold/cancel_experiment.md
+ - Retrieve Features from a Folding Job: how-to-guides/af2_openfold/get_experiment_features.md
+ - Download results of a folding job: how-to-guides/af2_openfold/download_prediction_results.md
+ - Advanced Algorithms:
+ - Launch a Folding Job using MSA subsampling: how-to-guides/af2_openfold/advanced_algorithms/msa_subsampling_job.md
+ - Launch a Folding Job using the Gap Trick for Folding Multimer Complexes: how-to-guides/af2_openfold/advanced_algorithms/gap_trick_job.md
+ - Launch a Folding Job using an Initial Guess Structure in AlphaFold2: how-to-guides/af2_openfold/advanced_algorithms/initial_guess_af2.md
+ - Launch a Folding Job applying Template Masking in Gap Trick Mode: how-to-guides/af2_openfold/advanced_algorithms/template_masking_job.md
+ - Preview - Launch a folding job using SoloSeq model: how-to-guides/af2_openfold/soloseq_job.md
+ - Preview - AlphaFold3-like:
+ - Provide Input Data: how-to-guides/af3/provide_input_data.md
+ - Launch a Single Job using Boltz-1: how-to-guides/af3/single_job_boltz.md
+ - Launch a Single Job using Chai-1: how-to-guides/af3/single_job_chai.md
+ - Launch a Single Job using Protenix: how-to-guides/af3/single_job_protenix.md
+ - Launch a Single Job from a YAML file using Boltz-1: how-to-guides/af3/boltz_single_yaml_job.md
+ - Launch a Batch Job from a directory: how-to-guides/af3/batch_job_from_directory.md
+ - Launch a Single Job from a Protein Sequence: how-to-guides/af3/single_job_from_protein_sequence.md
+ - Post-processing recipes:
+ - Calculate Interface pLDDT and pAE: how-to-guides/other/pLDDT_pAE_calculation.md
+ - Multiple Sequence Alignment Search:
+ - Provide Input Data for MSA: how-to-guides/msa_search/provide_input_data.md
+ - Launch an MSA Search with MMSeqs2: how-to-guides/msa_search/msa_search_mmseqs2.md
+ - Launch an MSA Search ignoring cache: how-to-guides/msa_search/msa_no_cache.md
+ - Check an MSA Job Status: how-to-guides/msa_search/fetch_msa_job_status.md
+ - Download MSA Job Logs: how-to-guides/msa_search/download_msa_logs.md
+ - Download Results of an MSA Search: how-to-guides/msa_search/download_msa_search_results.md
+ - Explanation:
+ - explanation/index.md
+ - Supported models: explanation/supported_models.md
+ - Advanced algorithms: explanation/advanced_algorithms.md
+ - Reference:
+ - CLI: reference/cli.md
+ - Python Library: reference/python_lib_docs.md
+plugins:
+ - swagger-ui-tag
+ - search
+site_name: Folding Studio
+site_url: https://int-bio-foldingstudio-gcp.nw.r.appspot.com
+theme:
+ colormode: auto
+ features:
+ - content.code.select
+ - content.code.copy
+ - navigation.indexes
+ - navigation.tracking
+ - navigation.sections
+ - navigation.top
+ highlightjs: true
+ hljs_languages:
+ - python
+ - bash
+ icon:
+ admonition:
+ abstract: octicons/checklist-16
+ bug: octicons/bug-16
+ danger: octicons/zap-16
+ example: octicons/beaker-16
+ failure: octicons/x-circle-16
+ info: octicons/info-16
+ note: octicons/tag-16
+ question: octicons/question-16
+ quote: octicons/quote-16
+ success: octicons/check-16
+ tip: octicons/squirrel-16
+ warning: octicons/alert-16
+ logo: material/dna
+ name: material
+ nav_style: primary
+ palette:
+ - media: '(prefers-color-scheme: light)'
+ scheme: default
+ toggle:
+ icon: material/brightness-7
+ name: Switch to dark mode
+ - media: '(prefers-color-scheme: dark)'
+ scheme: slate
+ toggle:
+ icon: material/brightness-4
+ name: Switch to light mode
+ user_color_mode_toggle: true
diff --git a/folding-studio/folding_studio/__init__.py b/folding-studio/folding_studio/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..d023983aba13d3cf83d78bde22e6c8fabf988b99
--- /dev/null
+++ b/folding-studio/folding_studio/__init__.py
@@ -0,0 +1,15 @@
+"""Expose useful functions and classes"""
+
+from .api_call.predict.batch_predict import (
+ batch_prediction_from_file,
+)
+from .api_call.predict.simple_predict import single_job_prediction
+from .api_call.upload_custom_files import upload_custom_files
+from .utils.gcp import get_id_token
+
+__all__ = [
+ "single_job_prediction",
+ "batch_prediction_from_file",
+ "get_id_token",
+ "upload_custom_files",
+]
diff --git a/folding-studio/folding_studio/api_call/__init__.py b/folding-studio/folding_studio/api_call/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/folding-studio/folding_studio/api_call/msa.py b/folding-studio/folding_studio/api_call/msa.py
new file mode 100644
index 0000000000000000000000000000000000000000..eb70913157aa8e6b860659d977357c8fba10c0c5
--- /dev/null
+++ b/folding-studio/folding_studio/api_call/msa.py
@@ -0,0 +1,51 @@
+"""API simple MSA search call wrappers."""
+
+from contextlib import ExitStack
+from pathlib import Path
+
+import requests
+import typer
+from rich import print # pylint:disable=redefined-builtin
+
+from folding_studio.config import API_URL, REQUEST_TIMEOUT
+from folding_studio.utils.data_model import MSARequestParams
+from folding_studio.utils.headers import get_auth_headers
+from folding_studio.utils.project_validation import define_project_code_or_raise
+
+
+def simple_msa(
+ file: Path,
+ params: MSARequestParams,
+ project_code: str | None = None,
+) -> dict:
+ """Make a simple MSA calculation from a file.
+
+ Args:
+ file (Path): Data source file path.
+ params (MSARequestParams): API request parameters.
+ project_code (str|None): Project code under which the jobs are billed.
+
+ Raises:
+ typer.Exit: If an error occurs during the API call.
+ """
+ project_code = define_project_code_or_raise(project_code=project_code)
+ url = API_URL + "searchMSA"
+
+ with ExitStack() as stack:
+ fasta_file = [("fasta_file", stack.enter_context(file.open("rb")))]
+ files = fasta_file
+ response = requests.post(
+ url,
+ data=params.model_dump(mode="json"),
+ headers=get_auth_headers(),
+ files=files,
+ timeout=REQUEST_TIMEOUT,
+ params={"project_code": project_code},
+ )
+
+ if not response.ok:
+ print(f"An error occurred: {response.content.decode()}")
+ raise typer.Exit(code=1)
+
+ response_json = response.json()
+ return response_json
diff --git a/folding-studio/folding_studio/api_call/predict/__init__.py b/folding-studio/folding_studio/api_call/predict/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..9ef6ecc2b9891d4e7b26e06421ae6d11d1a4a74b
--- /dev/null
+++ b/folding-studio/folding_studio/api_call/predict/__init__.py
@@ -0,0 +1,10 @@
+"""Import predict methods."""
+
+from .batch_predict import batch_prediction, batch_prediction_from_file
+from .simple_predict import simple_prediction
+
+__all__ = [
+ "batch_prediction",
+ "batch_prediction_from_file",
+ "simple_prediction",
+]
diff --git a/folding-studio/folding_studio/api_call/predict/batch_predict.py b/folding-studio/folding_studio/api_call/predict/batch_predict.py
new file mode 100644
index 0000000000000000000000000000000000000000..aa1e5d113c0c5c62f5df1173d8b413ebca1d5089
--- /dev/null
+++ b/folding-studio/folding_studio/api_call/predict/batch_predict.py
@@ -0,0 +1,197 @@
+"""API batch prediction call wrappers."""
+
+from io import StringIO
+from pathlib import Path
+
+import requests
+import typer
+from Bio import SeqIO
+from folding_studio_data_models import (
+ AF2Request,
+ BatchRequest,
+ FoldingModel,
+ OpenFoldRequest,
+ Sequence,
+)
+from rich import print # pylint:disable=redefined-builtin
+
+from folding_studio.config import API_URL, REQUEST_TIMEOUT
+from folding_studio.utils.data_model import (
+ PredictRequestCustomFiles,
+ PredictRequestParams,
+)
+from folding_studio.utils.headers import get_auth_headers
+from folding_studio.utils.project_validation import define_project_code_or_raise
+
+
+def _extract_sequences_from_file(file: Path) -> list[Sequence]:
+ content = SeqIO.parse(StringIO(file.read_text()), "fasta")
+ sequences = []
+ for records in content:
+ description = str(records.description)
+ sequences.append(
+ Sequence(description=description, fasta_sequence=str(records.seq))
+ )
+ return sequences
+
+
+def _build_request_from_fasta(
+ file: Path,
+ folding_model: FoldingModel,
+ params: PredictRequestParams,
+ custom_files: PredictRequestCustomFiles,
+) -> AF2Request | OpenFoldRequest:
+ """Build an AF2Request from a fasta file path and request parameters.
+
+ Args:
+ file (Path): Path to a file describing the protein.
+ folding_model (FoldingModel): Folding model to run the inference with.
+ params (PredictRequestParams): API request parameters.
+ custom_files (PredictRequestCustomFiles): API request custom files.
+
+ Returns:
+ AF2Request | OpenFoldRequest: Request object.
+ """
+ parameters = dict(
+ num_recycle=params.num_recycle,
+ random_seed=params.random_seed,
+ custom_templates=params.custom_template_ids
+ + [str(f) for f in custom_files.templates],
+ custom_msas=[str(f) for f in custom_files.msas],
+ gap_trick=params.gap_trick,
+ msa_mode=params.msa_mode,
+ max_msa_clusters=params.max_msa_clusters,
+ max_extra_msa=params.max_extra_msa,
+ template_mode=params.template_mode,
+ model_subset=params.model_subset,
+ initial_guess_file=custom_files.initial_guess_files,
+ templates_masks_file=custom_files.templates_masks_files,
+ )
+ if folding_model == FoldingModel.AF2:
+ return AF2Request(
+ complex_id=file.stem,
+ sequences=_extract_sequences_from_file(file),
+ parameters=parameters,
+ ignore_cache=params.ignore_cache,
+ )
+ return OpenFoldRequest(
+ complex_id=file.stem,
+ sequences=_extract_sequences_from_file(file),
+ parameters=parameters,
+ ignore_cache=params.ignore_cache,
+ )
+
+
+def batch_prediction(
+ files: list[Path],
+ folding_model: FoldingModel,
+ params: PredictRequestParams,
+ custom_files: PredictRequestCustomFiles,
+ project_code: str | None = None,
+ num_seed: int | None = None,
+) -> dict:
+ """Make a batch prediction from a list of files.
+
+ Args:
+ files (list[Path]): List of data source file paths.
+ params (PredictRequestParams): API request parameters.
+ custom_files (PredictRequestCustomFiles): API request custom files.
+ project_code (str|None): Project code under which the jobs are billed.
+ num_seed (int | None, optional): Number of random seeds. Defaults to None.
+
+ Raises:
+ typer.Exit: If an error occurs during the API call.
+ """
+ project_code = define_project_code_or_raise(project_code=project_code)
+ # upload custom files if any
+ custom_files.upload()
+
+ if num_seed is not None:
+ folding_requests = []
+ for seed in range(num_seed):
+ params.random_seed = seed
+ folding_requests += [
+ _build_request_from_fasta(
+ file=file,
+ folding_model=folding_model,
+ params=params,
+ custom_files=custom_files,
+ )
+ for file in files
+ ]
+ else:
+ folding_requests = [
+ _build_request_from_fasta(
+ file=file,
+ folding_model=folding_model,
+ params=params,
+ custom_files=custom_files,
+ )
+ for file in files
+ ]
+ batch_request = BatchRequest(requests=folding_requests)
+ url = API_URL + "batchPredict"
+
+ response = requests.post(
+ url,
+ data={"batch_jobs_request": batch_request.model_dump_json()},
+ params={"project_code": project_code},
+ headers=get_auth_headers(),
+ timeout=REQUEST_TIMEOUT,
+ )
+
+ if not response.ok:
+ print(f"An error occurred: {response.content.decode()}")
+ raise typer.Exit(code=1)
+
+ response_json = response.json()
+ return response_json
+
+
+def batch_prediction_from_file(
+ file: Path,
+ project_code: str | None = None,
+) -> dict:
+ """Make a batch prediction from a configuration files.
+
+ Args:
+ file (Path): Configuration file path.
+ project_code (str|None): Project code under which the jobs are billed.
+
+ Raises:
+ typer.Exit: If an error occurs during the API call.
+ """
+ project_code = define_project_code_or_raise(project_code=project_code)
+ url = API_URL + "batchPredictFromFile"
+
+ custom_files = PredictRequestCustomFiles.from_batch_jobs_file(batch_jobs_file=file)
+ local_to_uploaded = custom_files.upload()
+
+ if local_to_uploaded:
+ content = file.read_text()
+ for local, uploaded in local_to_uploaded.items():
+ content = content.replace(local, uploaded)
+ tmp_file = Path("tmp_batch_job" + file.suffix)
+ tmp_file.write_text(content)
+ file_to_upload = tmp_file
+ else:
+ tmp_file = None
+ file_to_upload = file
+
+ with file_to_upload.open("rb") as input_file:
+ response = requests.post(
+ url,
+ headers=get_auth_headers(),
+ files=[("batch_jobs_file", input_file)],
+ params={"project_code": project_code},
+ timeout=REQUEST_TIMEOUT,
+ )
+
+ if tmp_file and tmp_file.exists():
+ tmp_file.unlink()
+
+ if not response.ok:
+ print(f"An error occurred: {response.content.decode()}")
+ raise typer.Exit(code=1)
+
+ return response.json()
diff --git a/folding-studio/folding_studio/api_call/predict/simple_predict.py b/folding-studio/folding_studio/api_call/predict/simple_predict.py
new file mode 100644
index 0000000000000000000000000000000000000000..f3b1d8c40df63361f2e75aac16b6973242d687b3
--- /dev/null
+++ b/folding-studio/folding_studio/api_call/predict/simple_predict.py
@@ -0,0 +1,175 @@
+"""API simple prediction call wrappers."""
+
+import logging
+import warnings
+from pathlib import Path
+
+import requests
+import typer
+from folding_studio_data_models import AF2Parameters, OpenFoldParameters
+from folding_studio_data_models.request.folding import FoldingModel
+
+from folding_studio.config import API_URL, REQUEST_TIMEOUT
+from folding_studio.utils.data_model import (
+ PredictRequestCustomFiles,
+ PredictRequestParams,
+)
+from folding_studio.utils.file_helpers import partition_template_pdb_from_file
+from folding_studio.utils.headers import get_auth_headers
+from folding_studio.utils.project_validation import define_project_code_or_raise
+
+
+def single_job_prediction(
+ fasta_file: Path,
+ parameters: AF2Parameters | OpenFoldParameters | None = None,
+ project_code: str | None = None,
+ *,
+ ignore_cache: bool = False,
+ **kwargs,
+) -> dict:
+ """Make a single job prediction from folding parameters and a FASTA file.
+
+ This is a helper function to be called in users scripts.
+
+ Args:
+ fasta_file (Path): Input FASTA file
+ parameters (AF2Parameters | OpenFoldParameters | None, optional): Job parameters.
+ For backward compatibility, can be aliased with `af2_parameters`. Defaults to None.
+ project_code (str | None, optional): Project code under which the jobs are billed.
+ If None, value is attempted to be read from environment. Defaults to None.
+ ignore_cache (bool, optional): Force the job submission or not. Defaults to False.
+
+ Raises:
+ ValueError: _description_
+ typer.Exit: If an error occurs during the API call.
+
+ Returns:
+ dict: API response.
+ """
+
+ old_parameters = kwargs.get("af2_parameters")
+ if parameters is None:
+ if old_parameters is None:
+ msg = "Argument `parameters` must be specified if deprecated alias `af2_parameters` is not. "
+ raise ValueError(msg)
+ else:
+ warnings.warn(
+ "Argument 'af2_parameters' is deprecated and will be removed in future release; use 'parameters' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ parameters = old_parameters
+ elif old_parameters is not None:
+ raise ValueError("Use either 'parameters' or 'af2_parameters', not both.")
+
+ project_code = define_project_code_or_raise(project_code=project_code)
+
+ custom_files = PredictRequestCustomFiles(
+ templates=parameters.custom_templates,
+ msas=parameters.custom_msas,
+ initial_guess_files=[parameters.initial_guess_file]
+ if parameters.initial_guess_file
+ else None,
+ templates_masks_files=[parameters.templates_masks_file]
+ if parameters.templates_masks_file
+ else None,
+ )
+ _ = custom_files.upload()
+
+ params = parameters.model_dump(mode="json")
+ pdb_ids, _ = partition_template_pdb_from_file(
+ custom_templates=parameters.custom_templates
+ )
+
+ folding_model = (
+ FoldingModel.OPENFOLD
+ if isinstance(parameters, OpenFoldParameters)
+ else FoldingModel.AF2
+ )
+
+ params.update(
+ {
+ "folding_model": folding_model.value,
+ "custom_msa_files": custom_files.msas,
+ "custom_template_ids": list(pdb_ids),
+ "custom_template_files": custom_files.templates,
+ "initial_guess_file": custom_files.initial_guess_files[0]
+ if custom_files.initial_guess_files
+ else None,
+ "templates_masks_file": custom_files.templates_masks_files[0]
+ if custom_files.templates_masks_files
+ else None,
+ "ignore_cache": ignore_cache,
+ }
+ )
+
+ url = API_URL + "predict"
+ response = requests.post(
+ url,
+ data=params,
+ headers=get_auth_headers(),
+ files=[("fasta_file", fasta_file.open("rb"))],
+ params={"project_code": project_code},
+ timeout=REQUEST_TIMEOUT,
+ )
+ response.raise_for_status()
+
+ logging.info("Single job successfully submitted.")
+ response_json = response.json()
+ return response_json
+
+
+def simple_prediction(
+ file: Path,
+ folding_model: FoldingModel,
+ params: PredictRequestParams,
+ custom_files: PredictRequestCustomFiles,
+ project_code: str | None = None,
+) -> dict:
+ """Make a simple prediction from a file.
+
+ Args:
+ file (Path): Data source file path.
+ params (PredictRequestParams): API request parameters.
+ custom_files (PredictRequestCustomFiles): API request custom files.
+ project_code (str|None): Project code under which the jobs are billed.
+
+ Raises:
+ typer.Exit: If an error occurs during the API call.
+ """
+ project_code = define_project_code_or_raise(project_code=project_code)
+
+ url = API_URL + "predict"
+
+ _ = custom_files.upload()
+
+ params = params.model_dump(mode="json")
+ params.update(
+ {
+ "folding_model": folding_model.value,
+ "custom_msa_files": custom_files.msas,
+ "custom_template_files": custom_files.templates,
+ "initial_guess_file": custom_files.initial_guess_files[0]
+ if custom_files.initial_guess_files
+ else None,
+ "templates_masks_file": custom_files.templates_masks_files[0]
+ if custom_files.templates_masks_files
+ else None,
+ }
+ )
+ response = requests.post(
+ url,
+ data=params,
+ headers=get_auth_headers(),
+ files=[("fasta_file", file.open("rb"))],
+ params={"project_code": project_code},
+ timeout=REQUEST_TIMEOUT,
+ )
+
+ if not response.ok:
+ print(f"An error occurred: {response.content.decode()}")
+ raise typer.Exit(code=1)
+
+ print("Single job successfully submitted.")
+ response_json = response.json()
+ return response_json
diff --git a/folding-studio/folding_studio/api_call/upload_custom_files.py b/folding-studio/folding_studio/api_call/upload_custom_files.py
new file mode 100644
index 0000000000000000000000000000000000000000..4c64c7f00adac36dd065e9ae81657bcb81a322ee
--- /dev/null
+++ b/folding-studio/folding_studio/api_call/upload_custom_files.py
@@ -0,0 +1,204 @@
+"""API custom file upload call wrappers."""
+
+import hashlib
+import os
+import shutil
+import tempfile
+from copy import copy
+from datetime import datetime
+from pathlib import Path
+from zipfile import ZIP_DEFLATED, ZipFile
+
+import requests
+import typer
+from folding_studio_data_models import CustomFileType
+from rich import (
+ print, # pylint:disable=redefined-builtin
+)
+from tqdm import tqdm
+from tqdm.utils import CallbackIOWrapper
+
+from folding_studio.config import API_URL, REQUEST_TIMEOUT
+
+
+def _upload_file_to_signed_url(
+ signed_url: str,
+ src: str,
+ headers: dict[str, str],
+) -> requests.Response:
+ """Upload a local file to a GCS bucket using a signed URL.
+
+ Use a PUT request.
+
+ Args:
+ signed_url (str): the signed URL corresponding to the GCS path.
+ src (src | Path): the local file path.
+ headers (dict[str, str]): HTTP request headers.
+ Raises:
+ requests.exceptions.HTTPError: if something went wrong during the uploading.
+ Returns:
+ A response to the PUT request.
+ """
+
+ put_headers = copy(headers)
+ put_headers["Content-type"] = "application/octet-stream"
+ file_size = os.path.getsize(src)
+ with open(src, "rb") as fd:
+ with tqdm(
+ desc=f"Uploading {src}",
+ total=file_size,
+ unit="B",
+ unit_scale=True,
+ unit_divisor=1024,
+ ) as t:
+ reader_wrapper = CallbackIOWrapper(t.update, fd, "read")
+ response = requests.put(
+ url=signed_url,
+ data=reader_wrapper,
+ headers=put_headers,
+ )
+ response.raise_for_status()
+ return response
+
+
+def _get_blob_name_from_file_content(src: str | Path) -> str:
+ """Get a unique file name based on its content.
+
+ This file name is used as blob name when uploading the file to a bucket.
+
+ Args:
+ src (str | Path): Path to local file.
+
+ Returns:
+ The unique blob name.
+ """
+ src = Path(src)
+ file_hash = hashlib.md5()
+ with src.open("rb") as fd:
+ fd.seek(0)
+ while chunk := fd.read(8192):
+ file_hash.update(chunk)
+
+ hexcode = file_hash.hexdigest()[:8]
+ # Take file name from src first as maybe a path with directories
+ # Then only extract the stem. There maybe more than 1 extension
+ # example: data/templates/custom_msa.a3m.pqt
+ file_stem = src.name.split(".")[0]
+ suffix = "".join(src.suffixes)
+ return f"{file_stem}_{hexcode}{suffix}"
+
+
+def _copy_and_zip_files(
+ file_list: list[Path],
+ temp_dir: tempfile.TemporaryDirectory,
+ zip_name: str = "files.zip",
+):
+ """
+ Copies a list of files to a temporary directory and zips them into one archive
+ with the highest compression level.
+
+ Args:
+ file_list (list): List of file paths to be copied and zipped.
+ temp_dir: (TemporaryDirectory): Path to the temporary directory.
+ zip_name (str): Name of the resulting zip file.
+
+ Returns:
+ str: Path to the created zip file.
+ """
+ to_zip = []
+ for file_path in file_list:
+ if file_path.is_file():
+ blob_name = _get_blob_name_from_file_content(src=file_path)
+ dest_file = os.path.join(temp_dir, blob_name)
+ shutil.copy(file_path, dest_file)
+ to_zip.append(dest_file)
+ else:
+ print(f"Warning: {file_path} does not exist or is not a file.")
+
+ zip_path = os.path.join(temp_dir, zip_name)
+ with ZipFile(zip_path, "w", compression=ZIP_DEFLATED, compresslevel=7) as zipf:
+ for file_name in to_zip:
+ zipf.write(file_name, arcname=Path(file_name).name)
+ return zip_path
+
+
+def _get_blob_zip_name(file_type: str):
+ timestamp = datetime.now().strftime("%Y%m%d%H%M%S%f")
+ return f"{file_type}_files_{timestamp}.zip"
+
+
+def upload_custom_files(
+ headers: dict[str, str],
+ paths: list[Path],
+ file_type: CustomFileType,
+) -> dict[str, str]:
+ """Upload custom files to the GCS bucket.
+ Args:
+ headers (dict[str, str]): HTTP request headers.
+ paths (list[Path]): List of custom template files path.
+ file_type (CustomFileType): Type of file to upload.
+ Raises:
+ typer.Exit: If an error occurs during the API call.
+ Returns:
+ dict[str, str]: Mapping of local filenames to GCS paths.
+ """
+
+ url = API_URL + "getUploadSignedURL"
+
+ paths = set(paths)
+ print(f"Uploading {len(paths)}: {tuple(str(p) for p in paths)}.")
+
+ blobs = [_get_blob_name_from_file_content(src=file) for file in paths]
+
+ # Zip files and upload archive
+ blob_zip = _get_blob_zip_name(file_type.value)
+ with tempfile.TemporaryDirectory() as temp_dir:
+ zip_path = _copy_and_zip_files(
+ file_list=paths,
+ temp_dir=temp_dir,
+ zip_name=blob_zip,
+ )
+
+ url_response = requests.get(
+ url,
+ params={
+ "blob_name": blob_zip,
+ "file_type": file_type.value,
+ },
+ headers=headers,
+ timeout=REQUEST_TIMEOUT,
+ )
+
+ if not url_response.ok:
+ print(f"Error while generating signed URL: {url_response.content.decode()}")
+ raise typer.Exit(code=1)
+
+ json_response = url_response.json()
+ signed_url = json_response["signed_url"]
+
+ upload_response = _upload_file_to_signed_url(
+ signed_url=signed_url, src=zip_path, headers=headers
+ )
+ if not upload_response.ok:
+ print(f"Error while uploading {zip_path}.")
+ raise typer.Exit(code=1)
+
+ # Unzip in dest bucket
+ unzip_response = requests.post(
+ API_URL + "unzipFileInBucket",
+ params={
+ "zip_file_path": json_response["destination_file"],
+ },
+ headers=headers,
+ timeout=REQUEST_TIMEOUT,
+ )
+ if not unzip_response.ok:
+ print(f"Error while unzip custom files: {unzip_response.content.decode()}")
+ raise typer.Exit(code=1)
+
+ local_to_gcs = {
+ str(file): f"{json_response['destination_bucket']}/{blob_name}"
+ for file, blob_name in zip(paths, blobs)
+ }
+ print("Custom files successfully uploaded.")
+ return local_to_gcs
diff --git a/folding-studio/folding_studio/cli.py b/folding-studio/folding_studio/cli.py
new file mode 100644
index 0000000000000000000000000000000000000000..b66924033d40e9c8215c603b1c9693e02e0bd9af
--- /dev/null
+++ b/folding-studio/folding_studio/cli.py
@@ -0,0 +1,36 @@
+"""Folding studio CLI."""
+
+import typer
+
+from folding_studio.commands.experiment import app as experiment_app
+from folding_studio.commands.key import app as key_app
+from folding_studio.commands.msa import app as msa_app
+from folding_studio.commands.predict import (
+ af2,
+ boltz,
+ chai,
+ openfold,
+ protenix,
+ soloseq,
+)
+
+app = typer.Typer(
+ no_args_is_help=True, context_settings={"help_option_names": ["-h", "--help"]}
+)
+app.add_typer(experiment_app, name="experiment")
+app.add_typer(msa_app, name="msa")
+app.add_typer(key_app, name="key")
+
+predict_app = typer.Typer(no_args_is_help=True, help="Submit folding jobs")
+predict_app.command()(af2)
+predict_app.command()(openfold)
+predict_app.command()(boltz)
+predict_app.command()(chai)
+predict_app.command()(protenix)
+predict_app.command()(soloseq)
+
+
+app.add_typer(predict_app, name="predict")
+
+if __name__ == "__main__":
+ app()
diff --git a/folding-studio/folding_studio/client.py b/folding-studio/folding_studio/client.py
new file mode 100644
index 0000000000000000000000000000000000000000..2a0b520283122e8ff279755fea1d0963b1f519c3
--- /dev/null
+++ b/folding-studio/folding_studio/client.py
@@ -0,0 +1,122 @@
+"""Common interfaces for handling queries to prediction endpoints."""
+
+from __future__ import annotations
+
+import logging
+from datetime import datetime
+from pathlib import Path
+from typing import Any, Dict
+
+import requests
+
+from folding_studio.config import API_URL, FOLDING_API_KEY, REQUEST_TIMEOUT
+from folding_studio.query import Query
+from folding_studio.utils.gcp import TokenManager, download_file_from_signed_url
+
+vertexi_ai_forwarding_url = API_URL + "predictWithVertexEndpoints"
+
+
+class Client:
+ """Prediction client to send the request with."""
+
+ def __init__(
+ self, api_key: str | None = None, token_manager: TokenManager | None = None
+ ) -> None:
+ self.api_key = api_key
+ self.token_manager = token_manager
+
+ @classmethod
+ def authenticate(cls) -> Client:
+ """Instantiates an authenticated client."""
+ if FOLDING_API_KEY:
+ return cls.from_api_key(api_key=FOLDING_API_KEY)
+ else:
+ return cls.from_jwt()
+
+ @classmethod
+ def from_api_key(cls, api_key: str) -> Client:
+ """Instantiates a Client object using an API key."""
+ return cls(api_key=api_key, token_manager=None)
+
+ @classmethod
+ def from_jwt(cls) -> Client:
+ """Instantiates a Client object using a Google Cloud JWT token."""
+ token_manager = TokenManager()
+ return cls(token_manager=token_manager)
+
+ def send_request(
+ self, query: Query, project_code: str, timeout: int = REQUEST_TIMEOUT
+ ) -> Response:
+ """Sends a request to the endpoint, handling authentication and errors
+
+ Args:
+ query (Query): Folding query to send.
+ project_code (str): Project code to associate to the query.
+ timeout (int, optional): Request timeout. Defaults to REQUEST_TIMEOUT.
+
+ Returns:
+ Response: Prediction endpoint response.
+ """
+ headers = {}
+ if self.api_key:
+ headers["X-API-Key"] = self.api_key
+ elif self.token_manager:
+ headers["Authorization"] = f"Bearer {self.token_manager.get_token()}"
+ params = {"project_code": project_code, "model": query.MODEL}
+
+ try:
+ response = requests.post(
+ url=vertexi_ai_forwarding_url,
+ params=params,
+ json=query.payload,
+ headers=headers,
+ timeout=timeout,
+ )
+ response.raise_for_status() # Raise HTTPError for bad responses (4xx, 5xx)
+ json_response = response.json()
+
+ return Response(
+ output_signed_url=json_response["signed_url"],
+ confidence_data=json_response["confidence_data"],
+ )
+ except requests.exceptions.RequestException as e:
+ logging.error(f"Error sending request: {e}")
+ raise e
+
+
+class Response:
+ """Class to handle the endpoints JSON responses."""
+
+ def __init__(self, output_signed_url: str, confidence_data: Dict[str, Any]) -> None:
+ self.output_signed_url = output_signed_url
+ self._confidence_data = confidence_data
+ self.unzip_folder_name = ""
+
+ def download_results(
+ self, output_dir: Path, *, force: bool = False, unzip: bool = False
+ ) -> None:
+ """Downloads and optionally unzips the result file.
+
+ Args:
+ output_dir (Path): Path where the file will be saved.
+ force (bool): Overwrite existing file if True.
+ unzip (bool): Extract contents if the file is a zip.
+
+ Raises:
+ typer.Exit: If an error occurs or file already exists without `force`.
+ """
+ output_path = (
+ output_dir / f"results_{datetime.now().strftime('%Y%m%d%H%M%S')}.zip"
+ )
+ download_file_from_signed_url(
+ self.output_signed_url,
+ output_path,
+ force=force,
+ unzip=unzip,
+ unzip_dir=output_dir,
+ )
+
+ @property
+ def confidence_data(self) -> Dict[str, Any]:
+ """Prediction confidence data."""
+ return self._confidence_data
diff --git a/folding-studio/folding_studio/commands/__init__.py b/folding-studio/folding_studio/commands/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/folding-studio/folding_studio/commands/experiment.py b/folding-studio/folding_studio/commands/experiment.py
new file mode 100644
index 0000000000000000000000000000000000000000..e491a37bb2a98e653f73c8d4ec325763231ec216
--- /dev/null
+++ b/folding-studio/folding_studio/commands/experiment.py
@@ -0,0 +1,313 @@
+"""CLI experiment command and sub-commands."""
+
+import json
+import os
+import shutil
+import zipfile
+from pathlib import Path
+from typing import List, Optional
+
+import requests
+import typer
+from rich import print # pylint:disable=redefined-builtin
+from rich.console import Console
+from rich.table import Table
+from typing_extensions import Annotated
+
+from folding_studio.config import API_URL, REQUEST_TIMEOUT
+from folding_studio.utils.headers import get_auth_headers
+
+app = typer.Typer(
+ no_args_is_help=True,
+ help=(
+ "Get experiment information and metadata, like its status, "
+ "results or the generated features (msa, templates, etc.).\n"
+ "Read more at https://int-bio-foldingstudio-gcp.nw.r.appspot.com/how-to-guides/af2_openfold/fetch_folding_job_status/."
+ ),
+)
+
+experiment_ID_argument = typer.Argument(help="ID of the experiment.")
+
+
+def _download_file_from_signed_url(
+ exp_id: str,
+ endpoint: str,
+ output: Path,
+ force: bool,
+ unzip: bool = False,
+) -> None:
+ """Download a zip file from an experiment id.
+
+ Args:
+ exp_id (str): Experiment id.
+ endpoint (str): API endpoint to call.
+ output (Path): Output file path.
+ force (bool): Force file writing if it already exists.
+ unzip (bool): Unzip the zip file after downloading.
+
+ Raises:
+ typer.Exit: If output file path exists but force set to false.
+ typer.Exit: If unzip set to true but the directory already exists and force set to false.
+ typer.Exit: If an error occurred during the initial API call.
+ """
+ if output.exists() and not force:
+ print(
+ f"Warning: The file '{output}' already exists. Use the --force flag to overwrite it."
+ )
+ raise typer.Exit(code=1)
+
+ if unzip:
+ if not output.suffix == ".zip":
+ print(
+ "Error: The downloaded file is not a .zip file. Please ensure the correct file format."
+ )
+ raise typer.Exit(code=1)
+
+ dir_path = output.with_suffix("")
+ if dir_path.exists() and not force:
+ print(
+ f"Warning: The --unzip flag is raised but the directory '{dir_path}' "
+ "already exists. Use the --force flag to overwrite it."
+ )
+ raise typer.Exit(code=1)
+
+ headers = get_auth_headers()
+ url = API_URL + endpoint
+
+ response = requests.get(
+ url,
+ params={"experiment_id": exp_id},
+ headers=headers,
+ timeout=REQUEST_TIMEOUT,
+ )
+ if not response.ok:
+ print(f"Failed to download the file: {response.content.decode()}.")
+ raise typer.Exit(code=1)
+
+ file_response = requests.get(
+ response.json()["signed_url"],
+ stream=True,
+ timeout=REQUEST_TIMEOUT,
+ )
+ with output.open("wb") as f:
+ file_response.raw.decode_content = True
+ shutil.copyfileobj(file_response.raw, f)
+ print(f"File downloaded successfully to {output}.")
+
+ if unzip:
+ dir_path.mkdir(parents=True, exist_ok=True)
+ with zipfile.ZipFile(output, "r") as zip_ref:
+ zip_ref.extractall(dir_path)
+ print(f"Extracted all files to {dir_path}.")
+
+
+@app.command()
+def status(
+ exp_id: Annotated[str, experiment_ID_argument],
+):
+ """Get an experiment status."""
+ headers = get_auth_headers()
+ url = API_URL + "getExperimentStatus"
+ response = requests.get(
+ url,
+ params={"experiment_id": exp_id},
+ headers=headers,
+ timeout=REQUEST_TIMEOUT,
+ )
+
+ if not response.ok:
+ print(f"An error occurred : {response.content.decode()}")
+ raise typer.Exit(code=1)
+
+ message = response.json()
+ print(message["status"])
+
+
+@app.command()
+def list(
+ limit: Annotated[
+ int,
+ typer.Option(
+ help=("Max number of experiment to display in the terminal."),
+ ),
+ ] = 100,
+ output: Annotated[
+ Optional[Path],
+ typer.Option(
+ "--output",
+ "-o",
+ help=(
+ "Path to the file where the job metadata returned by the server are written."
+ ),
+ ),
+ ] = None,
+): # pylint:disable=redefined-builtin
+ """Get all your done and pending experiment ids. The IDs are provided in the order of submission, starting with the most recent."""
+ headers = get_auth_headers()
+ url = API_URL + "getDoneAndPendingExperiments"
+ response = requests.get(
+ url,
+ headers=headers,
+ timeout=REQUEST_TIMEOUT,
+ )
+
+ if not response.ok:
+ print(f"An error occurred : {response.content.decode()}")
+ raise typer.Exit(code=1)
+
+ response_json = response.json()
+ if output:
+ with open(output, "w") as f:
+ json.dump(response_json, f, indent=4)
+
+ print(f"Done and pending experiments list written to [bold]{output}[/bold]")
+
+ table = Table(title="Done and pending experiments")
+
+ table.add_column("Experiment ID", justify="right", style="cyan", no_wrap=True)
+ table.add_column("Status", style="magenta")
+
+ total_exp_nb = 0
+ for status, exp_list in response_json.items():
+ total_exp_nb += len(exp_list)
+ for exp in exp_list:
+ table.add_row(exp, status)
+ if limit < total_exp_nb:
+ print(
+ f"The table below is truncated to the last [bold]{limit}[/bold] submitted experiments. Increase '--limit' to see more."
+ )
+ if not output:
+ print("Use '--output' to get the full list in file format.")
+ else:
+ print(f"See the full list in file format at [bold]{output}[/bold]")
+ break
+
+ console = Console()
+ console.print(table)
+
+
+@app.command()
+def features(
+ exp_id: Annotated[str, experiment_ID_argument],
+ output: Annotated[
+ Optional[Path],
+ typer.Option(
+ help="Local path to download the zip to. Default to '_features.zip'."
+ ),
+ ] = None,
+ force: Annotated[
+ bool,
+ typer.Option(
+ help=(
+ "Forces the download to overwrite any existing file "
+ "with the same name in the specified location."
+ )
+ ),
+ ] = False,
+ unzip: Annotated[
+ bool, typer.Option(help="Automatically unzip the file after its download.")
+ ] = False,
+):
+ """Get an experiment features."""
+ if output is None:
+ output = Path(f"{exp_id}_features.zip")
+
+ _download_file_from_signed_url(
+ exp_id=exp_id,
+ endpoint="getZippedExperimentFeatures",
+ output=output,
+ force=force,
+ unzip=unzip,
+ )
+
+
+@app.command()
+def results(
+ exp_id: Annotated[str, experiment_ID_argument],
+ output: Annotated[
+ Optional[Path],
+ typer.Option(
+ help="Local path to download the zip to. Default to '_results.zip'."
+ ),
+ ] = None,
+ force: Annotated[
+ bool,
+ typer.Option(
+ help=(
+ "Forces the download to overwrite any existing file "
+ "with the same name in the specified location."
+ )
+ ),
+ ] = False,
+ unzip: Annotated[
+ bool, typer.Option(help="Automatically unzip the file after its download.")
+ ] = False,
+):
+ """Get an experiment results."""
+
+ if output is None:
+ output = Path(f"{exp_id}_results.zip")
+
+ _download_file_from_signed_url(
+ exp_id=exp_id,
+ endpoint="getZippedExperimentResults",
+ output=output,
+ force=force,
+ unzip=unzip,
+ )
+
+
+@app.command()
+def cancel(
+ exp_id: Annotated[List[str], experiment_ID_argument],
+):
+ """Cancel experiments job executions.
+
+ You can pass one or more experiment id
+ """
+ headers = get_auth_headers()
+
+ url = API_URL + "cancelJob"
+ response = requests.post(
+ url,
+ data={"experiment_ids": exp_id},
+ headers=headers,
+ timeout=REQUEST_TIMEOUT,
+ )
+
+ if not response.ok:
+ print(f"An error occurred : {response.content.decode()}")
+ raise typer.Exit(code=1)
+ message = response.json()
+ print(message)
+
+
+@app.command()
+def logs(
+ exp_id: Annotated[str, experiment_ID_argument],
+ output: Annotated[
+ Optional[Path],
+ typer.Option(
+ help="Local path to download the logs to. Default to '_logs.txt'."
+ ),
+ ] = None,
+ force: Annotated[
+ bool,
+ typer.Option(
+ help=(
+ "Forces the download to overwrite any existing file "
+ "with the same name in the specified location."
+ )
+ ),
+ ] = False,
+):
+ """Get an experiment logs."""
+ if output is None:
+ output = Path(f"{exp_id}_logs.txt")
+
+ _download_file_from_signed_url(
+ exp_id=exp_id,
+ endpoint="getExperimentLogs",
+ output=output,
+ force=force,
+ )
diff --git a/folding-studio/folding_studio/commands/key.py b/folding-studio/folding_studio/commands/key.py
new file mode 100644
index 0000000000000000000000000000000000000000..368b21eb438bfd3b5e2d8cccf72e19ad56c68d08
--- /dev/null
+++ b/folding-studio/folding_studio/commands/key.py
@@ -0,0 +1,110 @@
+"""CLI Key command and sub-commands."""
+
+import os
+from typing import Annotated
+
+import requests
+import typer
+
+BACKEND_API_URL = os.getenv(
+ "BACKEND_API_URL", "https://dev-webapi-service-560808695349.europe-west4.run.app"
+)
+
+app = typer.Typer(help="Handle API key.")
+api_key_argument = typer.Argument(help="API Key")
+
+
+@app.command(name="generate")
+def generate(
+ user_id: Annotated[str, typer.Argument(help="User email.")],
+ master_key: Annotated[str, typer.Argument(help="API Master Key.")],
+):
+ """
+ Retrieves an API key for the specified user from a specified API endpoint.
+
+ Args:
+ user_id: The ID of the user.
+ master_key: API Master Key.
+ """
+
+ url = f"{BACKEND_API_URL}/key"
+
+ headers = {"x-api-master-key": f"{master_key}"}
+
+ data = {"userId": user_id}
+
+ try:
+ response = requests.post(url, json=data, headers=headers)
+ response.raise_for_status()
+
+ api_key = response.text
+
+ if api_key:
+ typer.echo(f"API Key for user [{user_id}] : {api_key}")
+ else:
+ typer.echo(f"Unable to generate an API Key for user {user_id}")
+
+ except requests.exceptions.RequestException as e:
+ typer.echo(f"Error: {e}")
+
+
+@app.command(name="owner")
+def owner(
+ api_key: Annotated[str, api_key_argument],
+):
+ """
+ Get the API key owner.
+
+ Args:
+ api_key: The API Key.
+ """
+
+ url = f"{BACKEND_API_URL}/key/owner"
+
+ headers = {"x-api-key": f"{api_key}"}
+
+ try:
+ response = requests.get(url, headers=headers)
+ response.raise_for_status()
+
+ data = response.json()
+ if api_key:
+ typer.echo(f"API Key owner : {data}")
+ else:
+ typer.echo(f"Unable to get owner for {api_key}")
+
+ except requests.exceptions.RequestException as e:
+ typer.echo(f"Error: {e}")
+
+
+@app.command(name="decode")
+def decode(
+ api_key: Annotated[str, api_key_argument],
+):
+ """
+ Decodes an API key.
+
+ Args:
+ api_key: The API Key.
+ """
+
+ url = f"{BACKEND_API_URL}/key/decode"
+
+ headers = {"x-api-key": f"{api_key}"}
+
+ try:
+ response = requests.get(url, headers=headers)
+ response.raise_for_status()
+
+ data = response.text
+ if api_key:
+ typer.echo(f"API Key decoded : \n{data}")
+ else:
+ typer.echo(f"Unable to decode {api_key}")
+
+ except requests.exceptions.RequestException as e:
+ typer.echo(f"Error: {e}")
+
+
+if __name__ == "__main__":
+ app()
diff --git a/folding-studio/folding_studio/commands/msa.py b/folding-studio/folding_studio/commands/msa.py
new file mode 100644
index 0000000000000000000000000000000000000000..f7c373a2541ef6afc31b38658bca700cac8e0dba
--- /dev/null
+++ b/folding-studio/folding_studio/commands/msa.py
@@ -0,0 +1,381 @@
+"""CLI MSA search command and sub-commands."""
+
+import json
+import shutil
+import zipfile
+from datetime import datetime
+from pathlib import Path
+from typing import Optional
+
+import requests
+import typer
+from folding_studio_data_models import (
+ FeatureMode,
+ MessageStatus,
+ MSAPublication,
+)
+from rich import print # pylint:disable=redefined-builtin
+from rich.console import Console
+from rich.markdown import Markdown
+from rich.table import Table
+from typing_extensions import Annotated
+
+from folding_studio.api_call.msa import simple_msa
+from folding_studio.config import API_URL, REQUEST_TIMEOUT
+from folding_studio.utils.data_model import (
+ MSARequestParams,
+ SimpleInputFile,
+)
+from folding_studio.utils.headers import get_auth_headers
+
+app = typer.Typer(help="Handle MSA operation")
+msa_experiment_app = typer.Typer(help="Commands related to MSA experiments.")
+app.add_typer(msa_experiment_app, name="experiment")
+
+msa_experiment_ID_argument = typer.Argument(help="ID of the MSA experiment.")
+
+
+def _validate_source_path(path: Path) -> Path:
+ """Validate the msa job input source path.
+
+ Args:
+ path (Path): Source path.
+
+ Raises:
+ typer.BadParameter: If the source is an unsupported file.
+
+ Returns:
+ Path: The source.
+ """
+ supported_simple_msa = tuple(item.value for item in SimpleInputFile)
+
+ if path.suffix not in supported_simple_msa:
+ raise typer.BadParameter(
+ f"The source file '{path}' is not supported. "
+ f"Only {supported_simple_msa} files are supported."
+ )
+ return path
+
+
+def _print_instructions_simple(response_json: dict, metadata_file: Path | None) -> None:
+ """Print pretty instructions after successful call to batch predict endpoint.
+
+ Args:
+ response_json (dict): Server json response
+ metadata_file: (Path | None): File path where job submission metadata are written.
+ """
+ pub = MSAPublication.model_validate(response_json)
+ msa_experiment_id = pub.message.msa_experiment_id
+
+ console = Console()
+ if pub.status == MessageStatus.NOT_PUBLISHED_DONE:
+ print(
+ f"The results of your msa_experiment {msa_experiment_id} were found in the cache."
+ )
+ print("Use the following command to download the msa results:")
+ md = f"""```shell
+ folding msa experiment features {msa_experiment_id}
+ """
+ console.print(Markdown(md))
+ elif pub.status == MessageStatus.NOT_PUBLISHED_PENDING:
+ print(
+ f"Your msa_experiment [bold]{msa_experiment_id}[/bold] is [bold green]still running.[/bold green]"
+ )
+ print("Use the following command to check on its status at a later time.")
+ md = f"""```shell
+ folding msa experiment status {msa_experiment_id}
+ """
+ console.print(Markdown(md))
+ elif pub.status == MessageStatus.PUBLISHED:
+ print("[bold green]Experiment submitted successfully ![/bold green]")
+ print(f"The msa_experiment_id is [bold]{msa_experiment_id}[/bold]")
+
+ if not metadata_file:
+ timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
+ metadata_file = f"simple_prediction_{timestamp}.json"
+ with open(metadata_file, "w") as f:
+ json.dump(response_json, f, indent=4)
+
+ print(f"Prediction job metadata written to [bold]{metadata_file}[/bold]")
+ print("You can query your experiment status with the command:")
+ md = f"""```shell
+ folding msa experiment status {msa_experiment_id}
+ """
+ console.print(Markdown(md))
+ else:
+ raise ValueError(f"Unknown publication status: {pub.status}")
+
+
+@app.command()
+def search( # pylint: disable=dangerous-default-value, too-many-arguments, too-many-locals
+ source: Annotated[
+ Path,
+ typer.Argument(
+ help=("Path to the input fasta file."),
+ callback=_validate_source_path,
+ exists=True,
+ ),
+ ],
+ project_code: Annotated[
+ str,
+ typer.Option(
+ help=(
+ "Project code. If unknown, contact your PM or the Folding Studio team."
+ ),
+ exists=True,
+ envvar="FOLDING_PROJECT_CODE",
+ ),
+ ],
+ cache: Annotated[
+ bool,
+ typer.Option(help="Use cached experiment results if any."),
+ ] = True,
+ msa_mode: Annotated[
+ FeatureMode,
+ typer.Option(help="Mode of the MSA features generation."),
+ ] = FeatureMode.SEARCH,
+ metadata_file: Annotated[
+ Optional[Path],
+ typer.Option(
+ help=(
+ "Path to the file where the job metadata returned by the server are written."
+ ),
+ ),
+ ] = None,
+):
+ """Run an MSA tool. Read more at https://int-bio-foldingstudio-gcp.nw.r.appspot.com/tutorials/msa_search/."""
+
+ params = MSARequestParams(
+ ignore_cache=not cache,
+ msa_mode=msa_mode,
+ )
+ response = simple_msa(
+ file=source,
+ params=params,
+ project_code=project_code,
+ )
+
+ _print_instructions_simple(response_json=response, metadata_file=metadata_file)
+
+
+def _download_file_from_signed_url(
+ msa_exp_id: str,
+ endpoint: str,
+ output: Path,
+ force: bool,
+ unzip: bool = False,
+) -> None:
+ """Download a zip file from an experiment id.
+
+ Args:
+ msa_exp_id (str): MSA Experiment id.
+ endpoint (str): API endpoint to call.
+ output (Path): Output file path.
+ force (bool): Force file writing if it already exists.
+ unzip (bool): Unzip the zip file after downloading.
+
+ Raises:
+ typer.Exit: If output file path exists but force set to false.
+ typer.Exit: If unzip set to true but the directory already exists and force set to false.
+ typer.Exit: If an error occurred during the initial API call.
+ """
+ if output.exists() and not force:
+ print(
+ f"Warning: The file '{output}' already exists. Use the --force flag to overwrite it."
+ )
+ raise typer.Exit(code=1)
+
+ if unzip:
+ if not output.suffix == ".zip":
+ print(
+ "Error: The downloaded file is not a .zip file. Please ensure the correct file format."
+ )
+ raise typer.Exit(code=1)
+
+ dir_path = output.with_suffix("")
+ if dir_path.exists() and not force:
+ print(
+ f"Warning: The --unzip flag is raised but the directory '{dir_path}' "
+ "already exists. Use the --force flag to overwrite it."
+ )
+ raise typer.Exit(code=1)
+
+ url = API_URL + endpoint
+
+ headers = get_auth_headers()
+ response = requests.get(
+ url,
+ params={"msa_experiment_id": msa_exp_id},
+ headers=headers,
+ timeout=REQUEST_TIMEOUT,
+ )
+ if not response.ok:
+ print(f"Failed to download the file: {response.content.decode()}.")
+ raise typer.Exit(code=1)
+
+ file_response = requests.get(
+ response.json()["signed_url"],
+ stream=True,
+ timeout=REQUEST_TIMEOUT,
+ )
+ with output.open("wb") as f:
+ file_response.raw.decode_content = True
+ shutil.copyfileobj(file_response.raw, f)
+ print(f"File downloaded successfully to {output}.")
+
+ if unzip:
+ dir_path.mkdir(parents=True, exist_ok=True)
+ with zipfile.ZipFile(output, "r") as zip_ref:
+ zip_ref.extractall(dir_path)
+ print(f"Extracted all files to {dir_path}.")
+
+
+@msa_experiment_app.command()
+def status(
+ msa_exp_id: Annotated[str, msa_experiment_ID_argument],
+):
+ """Get an MSA experiment status."""
+ url = API_URL + "getMSAExperimentStatus"
+ headers = get_auth_headers()
+ response = requests.get(
+ url,
+ params={"msa_experiment_id": msa_exp_id},
+ headers=headers,
+ timeout=REQUEST_TIMEOUT,
+ )
+
+ if not response.ok:
+ print(f"An error occurred : {response.content.decode()}")
+ raise typer.Exit(code=1)
+
+ message = response.json()
+ print(message["status"])
+
+
+@msa_experiment_app.command()
+def features(
+ msa_exp_id: Annotated[str, msa_experiment_ID_argument],
+ output: Annotated[
+ Optional[Path],
+ typer.Option(
+ help="Local path to download the zip to. Default to '_features.zip'."
+ ),
+ ] = None,
+ force: Annotated[
+ bool,
+ typer.Option(
+ help=(
+ "Forces the download to overwrite any existing file "
+ "with the same name in the specified location."
+ )
+ ),
+ ] = False,
+ unzip: Annotated[
+ bool, typer.Option(help="Automatically unzip the file after its download.")
+ ] = False,
+):
+ """Get an experiment features."""
+ if output is None:
+ output = Path(f"{msa_exp_id}_features.zip")
+
+ _download_file_from_signed_url(
+ msa_exp_id=msa_exp_id,
+ endpoint="getZippedMSAExperimentFeatures",
+ output=output,
+ force=force,
+ unzip=unzip,
+ )
+
+
+@msa_experiment_app.command()
+def logs(
+ msa_exp_id: Annotated[str, msa_experiment_ID_argument],
+ output: Annotated[
+ Optional[Path],
+ typer.Option(
+ help="Local path to download the logs to. Default to '_logs.txt'."
+ ),
+ ] = None,
+ force: Annotated[
+ bool,
+ typer.Option(
+ help=(
+ "Forces the download to overwrite any existing file "
+ "with the same name in the specified location."
+ )
+ ),
+ ] = False,
+):
+ """Get an experiment logs."""
+ if output is None:
+ output = Path(f"{msa_exp_id}_logs.txt")
+
+ _download_file_from_signed_url(
+ msa_exp_id=msa_exp_id,
+ endpoint="getExperimentLogs",
+ output=output,
+ force=force,
+ )
+
+
+@msa_experiment_app.command()
+def list(
+ limit: Annotated[
+ int,
+ typer.Option(
+ help=("Max number of experiment to display in the terminal."),
+ ),
+ ] = 100,
+ output: Annotated[
+ Optional[Path],
+ typer.Option(
+ "--output",
+ "-o",
+ help=(
+ "Path to the file where the job metadata returned by the server are written."
+ ),
+ ),
+ ] = None,
+): # pylint:disable=redefined-builtin
+ """Get all your done and pending experiment ids. The IDs are provided in the order of submission, starting with the most recent."""
+ headers = get_auth_headers()
+ url = API_URL + "getDoneAndPendingMSAExperiments"
+ response = requests.get(
+ url,
+ headers=headers,
+ timeout=REQUEST_TIMEOUT,
+ )
+
+ if not response.ok:
+ print(f"An error occurred : {response.content.decode()}")
+ raise typer.Exit(code=1)
+
+ response_json = response.json()
+ if output:
+ with open(output, "w") as f:
+ json.dump(response_json, f, indent=4)
+
+ print(f"Done and pending MSA experiments list written to [bold]{output}[/bold]")
+
+ table = Table(title="Done and pending MSA experiments")
+
+ table.add_column("MSA Experiment ID", justify="right", style="cyan", no_wrap=True)
+ table.add_column("Status", style="magenta")
+
+ total_exp_nb = 0
+ for status, exp_list in response_json.items():
+ total_exp_nb += len(exp_list)
+ for exp in exp_list:
+ table.add_row(exp, status)
+ if limit < total_exp_nb:
+ print(
+ f"The table below is truncated to the last [bold]{limit}[/bold] submitted MSA experiments. Increase '--limit' to see more."
+ )
+ if not output:
+ print("Use '--output' to get the full list in file format.")
+ else:
+ print(f"See the full list in file format at [bold]{output}[/bold]")
+ break
+
+ console = Console()
+ console.print(table)
diff --git a/folding-studio/folding_studio/commands/predict/__init__.py b/folding-studio/folding_studio/commands/predict/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..213ea619715a37c097bfb09eefe9e707e41349f0
--- /dev/null
+++ b/folding-studio/folding_studio/commands/predict/__init__.py
@@ -0,0 +1,10 @@
+"""Predict commands module."""
+
+from .af2_predict import af2
+from .boltz_predict import boltz
+from .chai_predict import chai
+from .openfold_predict import openfold
+from .protenix_predict import protenix
+from .soloseq_predict import soloseq
+
+__all__ = ["af2", "boltz", "chai", "openfold", "protenix", "soloseq"]
diff --git a/folding-studio/folding_studio/commands/predict/af2_predict.py b/folding-studio/folding_studio/commands/predict/af2_predict.py
new file mode 100644
index 0000000000000000000000000000000000000000..fb459e79e35ce3a139c4c6771794b70fa30df7da
--- /dev/null
+++ b/folding-studio/folding_studio/commands/predict/af2_predict.py
@@ -0,0 +1,250 @@
+"""AF2 folding submission command."""
+
+from pathlib import Path
+from typing import List, Optional
+
+import typer
+from folding_studio_data_models import (
+ FeatureMode,
+)
+from folding_studio_data_models.request.folding import FoldingModel
+from typing_extensions import Annotated
+
+from folding_studio.api_call.predict import (
+ batch_prediction,
+ batch_prediction_from_file,
+ simple_prediction,
+)
+from folding_studio.commands.predict.utils import (
+ print_instructions_batch,
+ print_instructions_simple,
+ validate_model_subset,
+ validate_source_path,
+)
+from folding_studio.config import FOLDING_API_KEY
+from folding_studio.console import console
+from folding_studio.utils.data_model import (
+ BatchInputFile,
+ PredictRequestCustomFiles,
+ PredictRequestParams,
+)
+from folding_studio.utils.input_validation import (
+ extract_and_validate_custom_msas,
+ extract_and_validate_custom_templates,
+ validate_initial_guess,
+)
+
+
+def af2( # pylint: disable=dangerous-default-value, too-many-arguments, too-many-locals
+ source: Annotated[
+ Path,
+ typer.Argument(
+ help=(
+ "Path to the data source. Either a fasta file, a directory of fasta files "
+ "or a csv/json file describing a batch prediction request."
+ ),
+ callback=validate_source_path,
+ exists=True,
+ ),
+ ],
+ project_code: Annotated[
+ str,
+ typer.Option(
+ help=(
+ "Project code. If unknown, contact your PM or the Folding Studio team."
+ ),
+ exists=True,
+ envvar="FOLDING_PROJECT_CODE",
+ ),
+ ],
+ cache: Annotated[
+ bool,
+ typer.Option(help="Use cached experiment results if any."),
+ ] = True,
+ template_mode: Annotated[
+ FeatureMode,
+ typer.Option(help="Mode of the template features generation."),
+ ] = FeatureMode.SEARCH,
+ custom_template: Annotated[
+ List[Path],
+ typer.Option(
+ help=(
+ "Path to a custom template or a directory of custom templates. "
+ "To pass multiple inputs, simply repeat the flag "
+ "(e.g. `--custom_template template_1.cif --custom_template template_2.cif`)."
+ ),
+ callback=extract_and_validate_custom_templates,
+ exists=True,
+ ),
+ ] = [],
+ custom_template_id: Annotated[
+ List[str],
+ typer.Option(
+ help=(
+ "ID of a custom template. "
+ "To pass multiple inputs, simply repeat the flag "
+ "(e.g. `--custom_template_id template_ID_1 --custom_template_id template_ID_2`)."
+ )
+ ),
+ ] = [],
+ initial_guess_file: Annotated[
+ Path | None,
+ typer.Option(
+ help=("Path to an initial guess file."),
+ callback=validate_initial_guess,
+ exists=True,
+ ),
+ ] = None,
+ templates_masks_file: Annotated[
+ Path | None,
+ typer.Option(
+ help=("Path to a templates masks file."),
+ exists=True,
+ ),
+ ] = None,
+ msa_mode: Annotated[
+ FeatureMode,
+ typer.Option(help="Mode of the MSA features generation."),
+ ] = FeatureMode.SEARCH,
+ custom_msa: Annotated[
+ List[Path],
+ typer.Option(
+ help=(
+ "Path to a custom msa or a directory of custom msas. "
+ "To pass multiple inputs, simply repeat the flag "
+ "(e.g. `--custom_msa msa_1.sto --custom_msa msa_2.sto`)."
+ ),
+ callback=extract_and_validate_custom_msas,
+ exists=True,
+ ),
+ ] = [],
+ max_msa_clusters: Annotated[
+ int,
+ typer.Option(help="Max number of MSA clusters to search."),
+ ] = -1,
+ max_extra_msa: Annotated[
+ int,
+ typer.Option(
+ help="Max extra non-clustered MSA representation to use as source."
+ ),
+ ] = -1,
+ gap_trick: Annotated[
+ bool,
+ typer.Option(
+ help="Activate gap trick, allowing to model complexes with monomer models."
+ ),
+ ] = False,
+ num_recycle: Annotated[
+ int,
+ typer.Option(
+ help="Number of refinement iterations of the predicted structures."
+ ),
+ ] = 3,
+ model_subset: Annotated[
+ list[int],
+ typer.Option(
+ help="Subset of AF2 model ids to use, between 1 and 5 included.",
+ callback=validate_model_subset,
+ ),
+ ] = [],
+ random_seed: Annotated[
+ int,
+ typer.Option(
+ help=(
+ "Random seed used during the MSA sampling. "
+ "Different random seed values will introduce variations in the predictions."
+ )
+ ),
+ ] = 0,
+ num_seed: Annotated[
+ Optional[int],
+ typer.Option(
+ help="Number of random seeds to use. Creates a batch prediction.", min=2
+ ),
+ ] = None,
+ metadata_file: Annotated[
+ Optional[Path],
+ typer.Option(
+ help=(
+ "Path to the file where the job metadata returned by the server are written."
+ ),
+ ),
+ ] = None,
+):
+ """Asynchronous AF2 folding submission.
+
+ Read more at https://int-bio-foldingstudio-gcp.nw.r.appspot.com/how-to-guides/af2_openfold/single_af2_job/.
+
+ If the source is a CSV or JSON file describing a batch prediction request, all the other
+ options will be overlooked.
+ """
+
+ if FOLDING_API_KEY:
+ console.print(":key: Using detected API key for authentication.")
+ else:
+ console.print(":yellow_circle: Using JWT for authentication.")
+
+ is_batch = source.is_dir() or source.suffix in BatchInputFile.__members__.values()
+ is_multi_seed = num_seed is not None
+ is_batch = is_batch or is_multi_seed
+
+ params = PredictRequestParams(
+ ignore_cache=not cache,
+ template_mode=template_mode,
+ custom_template_ids=custom_template_id,
+ msa_mode=msa_mode,
+ max_msa_clusters=max_msa_clusters,
+ max_extra_msa=max_extra_msa,
+ gap_trick=gap_trick,
+ num_recycle=num_recycle,
+ random_seed=random_seed,
+ model_subset=model_subset,
+ )
+
+ custom_files = PredictRequestCustomFiles(
+ templates=custom_template,
+ msas=custom_msa,
+ initial_guess_files=[initial_guess_file] if initial_guess_file else None,
+ templates_masks_files=[templates_masks_file] if templates_masks_file else None,
+ )
+
+ if is_batch:
+ if is_multi_seed:
+ response = batch_prediction(
+ files=[source],
+ folding_model=FoldingModel.AF2,
+ params=params,
+ custom_files=custom_files,
+ num_seed=num_seed,
+ project_code=project_code,
+ )
+ elif source.is_file():
+ console.print(
+ f"Submitting batch jobs configuration file [bold]{source}[/bold]"
+ )
+ console.print(
+ "Input options are [bold yellow]ignored[/bold yellow] in favor of the configuration file content."
+ )
+ response = batch_prediction_from_file(
+ file=source,
+ project_code=project_code,
+ )
+ elif source.is_dir():
+ response = batch_prediction(
+ files=list(f for f in source.iterdir() if f.is_file()),
+ folding_model=FoldingModel.AF2,
+ params=params,
+ custom_files=custom_files,
+ num_seed=num_seed,
+ project_code=project_code,
+ )
+ print_instructions_batch(response_json=response, metadata_file=metadata_file)
+ else:
+ response = simple_prediction(
+ file=source,
+ folding_model=FoldingModel.AF2,
+ params=params,
+ custom_files=custom_files,
+ project_code=project_code,
+ )
+ print_instructions_simple(response_json=response, metadata_file=metadata_file)
diff --git a/folding-studio/folding_studio/commands/predict/boltz_predict.py b/folding-studio/folding_studio/commands/predict/boltz_predict.py
new file mode 100644
index 0000000000000000000000000000000000000000..295e7093367ca2d33dafaa4243096de6a5f20142
--- /dev/null
+++ b/folding-studio/folding_studio/commands/predict/boltz_predict.py
@@ -0,0 +1,175 @@
+"""Boltz-1 folding submission command."""
+
+import json
+from datetime import datetime
+from pathlib import Path
+from typing import Annotated, Any, Optional
+
+import typer
+from rich.json import JSON
+from rich.panel import Panel
+
+from folding_studio.client import Client
+from folding_studio.commands.utils import (
+ success_fail_catch_print,
+ success_fail_catch_spinner,
+)
+from folding_studio.config import FOLDING_API_KEY
+from folding_studio.console import console
+from folding_studio.query.boltz import BoltzQuery
+
+
+def boltz(
+ source: Annotated[
+ Path,
+ typer.Argument(
+ help=(
+ "Path to the data source. Either a FASTA file, a YAML file, "
+ "or a directory containing FASTA and YAML files."
+ ),
+ exists=True,
+ ),
+ ],
+ project_code: Annotated[
+ str,
+ typer.Option(
+ help="Project code. If unknown, contact your PM or the Folding Studio team.",
+ envvar="FOLDING_PROJECT_CODE",
+ # exists=True,
+ ),
+ ],
+ parameters_json: Annotated[
+ Path | None,
+ typer.Option(help="Path to JSON file containing Boltz inference parameters."),
+ ] = None,
+ recycling_steps: Annotated[
+ int, typer.Option(help="Number of recycling steps for prediction.")
+ ] = 3,
+ sampling_steps: Annotated[
+ int, typer.Option(help="Number of sampling steps for prediction.")
+ ] = 200,
+ diffusion_samples: Annotated[
+ int, typer.Option(help="Number of diffusion samples for prediction.")
+ ] = 1,
+ step_scale: Annotated[
+ float,
+ typer.Option(
+ help="Step size related to the temperature at which the diffusion process samples the distribution."
+ ),
+ ] = 1.638,
+ msa_pairing_strategy: Annotated[
+ str, typer.Option(help="Pairing strategy for MSA generation.")
+ ] = "greedy",
+ write_full_pae: Annotated[
+ bool, typer.Option(help="Whether to save the full PAE matrix as a file.")
+ ] = False,
+ write_full_pde: Annotated[
+ bool, typer.Option(help="Whether to save the full PDE matrix as a file.")
+ ] = False,
+ use_msa_server: Annotated[
+ bool,
+ typer.Option(help="Flag to use the MSA server for inference.", is_flag=True),
+ ] = True,
+ msa_path: Annotated[
+ Optional[str],
+ typer.Option(
+ help="Path to the custom MSAs. It can be a .a3m or .aligned.pqt file, or a directory containing these files."
+ ),
+ ] = None,
+ seed: Annotated[
+ int | None, typer.Option(help="Seed for random number generation.")
+ ] = 0,
+ output: Annotated[
+ Path,
+ typer.Option(
+ help="Local path to download the result zip and query parameters to. "
+ "Default to 'boltz_results'."
+ ),
+ ] = "boltz_results",
+ force: Annotated[
+ bool,
+ typer.Option(
+ help=(
+ "Forces the download to overwrite any existing file "
+ "with the same name in the specified location."
+ )
+ ),
+ ] = False,
+ unzip: Annotated[
+ bool, typer.Option(help="Unzip the file after its download.")
+ ] = False,
+ spinner: Annotated[
+ bool, typer.Option(help="Use live spinner in log output.")
+ ] = True,
+):
+ """Synchronous Boltz-1 folding submission."""
+
+ success_fail_catch = (
+ success_fail_catch_spinner if spinner else success_fail_catch_print
+ )
+
+ # If a custom MSA path is provided, disable automated MSA search.
+ if msa_path is not None:
+ console.print(
+ "\n[yellow]:warning: Custom MSA path provided. Disabling automated MSA search.[/yellow]"
+ )
+ use_msa_server = False
+
+ console.print(
+ Panel("[bold cyan]:dna: Boltz1 Folding submission [/bold cyan]", expand=False)
+ )
+ output_dir = output / f"submission_{datetime.now().strftime('%Y%m%d%H%M%S')}"
+
+ # Initialize parameters with CLI-provided values
+ parameters = {
+ "recycling_steps": recycling_steps,
+ "sampling_steps": sampling_steps,
+ "diffusion_samples": diffusion_samples,
+ "step_scale": step_scale,
+ "msa_pairing_strategy": msa_pairing_strategy,
+ "write_full_pae": write_full_pae,
+ "write_full_pde": write_full_pde,
+ "use_msa_server": use_msa_server,
+ "seed": seed,
+ "custom_msa_paths": msa_path,
+ }
+
+ if parameters_json:
+ try:
+ with open(parameters_json, "r") as f:
+ json_parameters: dict[str, Any] = json.load(f)
+ except Exception as e:
+ raise ValueError(f"Error reading JSON file: {e}")
+ console.print(
+ ":warning: Parameters specified in the configuration file will "
+ "take precedence over the CLI options."
+ )
+ parameters.update(json_parameters)
+
+ # Create a client using API key or JWT
+ with success_fail_catch(":key: Authenticating client"):
+ client = Client.authenticate()
+
+ # Define query
+ with success_fail_catch(":package: Generating query"):
+ query_builder = (
+ BoltzQuery.from_file if source.is_file() else BoltzQuery.from_directory
+ )
+ query: BoltzQuery = query_builder(source, **parameters)
+ query.save_parameters(output_dir)
+
+ console.print("[blue]Generated query: [/blue]", end="")
+ console.print(JSON.from_data(query.payload), style="blue")
+
+ # Send a request
+ with success_fail_catch(":brain: Processing folding job"):
+ response = client.send_request(query, project_code)
+
+ # Access confidence data
+ console.print("[blue]Confidence data:[/blue]", end=" ")
+ console.print(JSON.from_data(response.confidence_data), style="blue")
+
+ with success_fail_catch(
+ f":floppy_disk: Downloading results to `[green]{output_dir}[/green]`"
+ ):
+ response.download_results(output_dir=output_dir, force=force, unzip=unzip)
diff --git a/folding-studio/folding_studio/commands/predict/chai_predict.py b/folding-studio/folding_studio/commands/predict/chai_predict.py
new file mode 100644
index 0000000000000000000000000000000000000000..29d4f4e9360a6fc4597f08b6a6a4eddcf7eb5981
--- /dev/null
+++ b/folding-studio/folding_studio/commands/predict/chai_predict.py
@@ -0,0 +1,156 @@
+"""Chai-1 folding submission command."""
+
+from datetime import datetime
+from pathlib import Path
+from typing import Annotated, Optional
+
+import typer
+from rich.json import JSON
+from rich.panel import Panel
+
+from folding_studio.client import Client
+from folding_studio.commands.utils import (
+ success_fail_catch_print,
+ success_fail_catch_spinner,
+)
+from folding_studio.console import console
+from folding_studio.query.chai import ChaiQuery
+
+
+def chai(
+ source: Annotated[
+ Path,
+ typer.Argument(
+ help=(
+ "Path to the data source. Either a fasta file, a directory of fasta files "
+ "or a csv/json file describing a batch prediction request."
+ ),
+ exists=True,
+ ),
+ ],
+ project_code: Annotated[
+ str,
+ typer.Option(
+ help="Project code. If unknown, contact your PM or the Folding Studio team.",
+ envvar="FOLDING_PROJECT_CODE",
+ exists=True,
+ ),
+ ],
+ use_msa_server: Annotated[
+ bool,
+ typer.Option(
+ help="Flag to enable MSA features. MSA search is performed by InstaDeep's MMseqs2 server.",
+ is_flag=True,
+ ),
+ ] = True,
+ use_templates_server: Annotated[
+ bool,
+ typer.Option(
+ help="Flag to enable templates. Templates search is performed by InstaDeep's MMseqs2 server.",
+ is_flag=True,
+ ),
+ ] = False,
+ num_trunk_recycles: Annotated[
+ int, typer.Option(help="Number of trunk recycles during inference.")
+ ] = 3,
+ seed: Annotated[int, typer.Option(help="Random seed for inference.")] = 0,
+ num_diffn_timesteps: Annotated[
+ int, typer.Option(help="Number of diffusion timesteps to run.")
+ ] = 200,
+ restraints: Annotated[
+ Optional[str],
+ typer.Option(help="Restraints information."),
+ ] = None,
+ recycle_msa_subsample: Annotated[
+ int,
+ typer.Option(help="Subsample parameter for recycling MSA during inference."),
+ ] = 0,
+ num_trunk_samples: Annotated[
+ int, typer.Option(help="Number of trunk samples to generate during inference.")
+ ] = 1,
+ msa_path: Annotated[
+ Optional[str],
+ typer.Option(
+ help="Path to the custom MSAs. It can be a .a3m or .aligned.pqt file, or a directory containing these files."
+ ),
+ ] = None,
+ output: Annotated[
+ Path,
+ typer.Option(
+ help="Local path to download the result zip and query parameters to. "
+ "Default to 'chai_results'."
+ ),
+ ] = "chai_results",
+ force: Annotated[
+ bool,
+ typer.Option(
+ help=(
+ "Forces the download to overwrite any existing file "
+ "with the same name in the specified location."
+ )
+ ),
+ ] = False,
+ unzip: Annotated[
+ bool, typer.Option(help="Unzip the file after its download.")
+ ] = False,
+ spinner: Annotated[
+ bool, typer.Option(help="Use live spinner in log output.")
+ ] = True,
+):
+ """Synchronous Chai-1 folding submission."""
+ # If a custom MSA path is provided, disable automated MSA search.
+ if msa_path is not None:
+ console.print(
+ "\n[yellow]:warning: Custom MSA path provided. Disabling automated MSA search.[/yellow]"
+ )
+ use_msa_server = False
+
+ console.print(
+ Panel("[bold cyan]:dna: Chai-1 Folding submission [/bold cyan]", expand=False)
+ )
+
+ success_fail_catch = (
+ success_fail_catch_spinner if spinner else success_fail_catch_print
+ )
+
+ # Create a client using API key or JWT
+ with success_fail_catch(":key: Authenticating client"):
+ client = Client.authenticate()
+
+ output_dir = output / f"submission_{datetime.now().strftime('%Y%m%d%H%M%S')}"
+ output_dir.mkdir(parents=True, exist_ok=True)
+ # Define a query
+ with success_fail_catch(":package: Generating query"):
+ query_builder = (
+ ChaiQuery.from_file if source.is_file() else ChaiQuery.from_directory
+ )
+ query: ChaiQuery = query_builder(
+ source,
+ restraints=restraints,
+ use_msa_server=use_msa_server,
+ use_templates_server=use_templates_server,
+ num_trunk_recycles=num_trunk_recycles,
+ seed=seed,
+ num_diffn_timesteps=num_diffn_timesteps,
+ recycle_msa_subsample=recycle_msa_subsample,
+ num_trunk_samples=num_trunk_samples,
+ custom_msa_paths=msa_path,
+ )
+ query.save_parameters(output_dir)
+
+ console.print("[blue]Generated query: [/blue]", end="")
+ console.print(JSON.from_data(query.payload), style="blue")
+
+ # Send a request
+ with success_fail_catch(":brain: Processing folding job"):
+ response = client.send_request(query, project_code)
+
+ # Access confidence data
+ console.print("[blue]Confidence Data:[/blue]", end=" ")
+ console.print(JSON.from_data(response.confidence_data), style="blue")
+
+ # Download results
+ with success_fail_catch(
+ f":floppy_disk: Downloading results to `[green]{output_dir}[/green]`"
+ ):
+ response.download_results(output_dir=output_dir, force=force, unzip=unzip)
diff --git a/folding-studio/folding_studio/commands/predict/openfold_predict.py b/folding-studio/folding_studio/commands/predict/openfold_predict.py
new file mode 100644
index 0000000000000000000000000000000000000000..8ddf3133a42cf3e216fd5c403ca11c644caaf207
--- /dev/null
+++ b/folding-studio/folding_studio/commands/predict/openfold_predict.py
@@ -0,0 +1,240 @@
+"""OpenFold folding submission command."""
+
+from pathlib import Path
+from typing import List, Optional
+
+import typer
+from folding_studio_data_models import (
+ FeatureMode,
+)
+from folding_studio_data_models.request.folding import FoldingModel
+from typing_extensions import Annotated
+
+from folding_studio.api_call.predict import (
+ batch_prediction,
+ batch_prediction_from_file,
+ simple_prediction,
+)
+from folding_studio.commands.predict.utils import (
+ print_instructions_batch,
+ print_instructions_simple,
+ validate_model_subset,
+ validate_source_path,
+)
+from folding_studio.config import FOLDING_API_KEY
+from folding_studio.console import console
+from folding_studio.utils.data_model import (
+ BatchInputFile,
+ PredictRequestCustomFiles,
+ PredictRequestParams,
+)
+from folding_studio.utils.input_validation import (
+ extract_and_validate_custom_msas,
+ extract_and_validate_custom_templates,
+)
+
+
+def openfold( # pylint: disable=dangerous-default-value, too-many-arguments, too-many-locals
+ source: Annotated[
+ Path,
+ typer.Argument(
+ help=(
+ "Path to the data source. Either a fasta file, a directory of fasta files "
+ "or a csv/json file describing a batch prediction request."
+ ),
+ callback=validate_source_path,
+ exists=True,
+ ),
+ ],
+ project_code: Annotated[
+ str,
+ typer.Option(
+ help=(
+ "Project code. If unknown, contact your PM or the Folding Studio team."
+ ),
+ exists=True,
+ envvar="FOLDING_PROJECT_CODE",
+ ),
+ ],
+ cache: Annotated[
+ bool,
+ typer.Option(help="Use cached experiment results if any."),
+ ] = True,
+ template_mode: Annotated[
+ FeatureMode,
+ typer.Option(help="Mode of the template features generation."),
+ ] = FeatureMode.SEARCH,
+ custom_template: Annotated[
+ List[Path],
+ typer.Option(
+ help=(
+ "Path to a custom template or a directory of custom templates. "
+ "To pass multiple inputs, simply repeat the flag "
+ "(e.g. `--custom_template template_1.cif --custom_template template_2.cif`)."
+ ),
+ callback=extract_and_validate_custom_templates,
+ exists=True,
+ ),
+ ] = [],
+ custom_template_id: Annotated[
+ List[str],
+ typer.Option(
+ help=(
+ "ID of a custom template. "
+ "To pass multiple inputs, simply repeat the flag "
+ "(e.g. `--custom_template_id template_ID_1 --custom_template_id template_ID_2`)."
+ )
+ ),
+ ] = [],
+ templates_masks_file: Annotated[
+ Path | None,
+ typer.Option(
+ help=("Path to a templates masks file."),
+ exists=True,
+ ),
+ ] = None,
+ msa_mode: Annotated[
+ FeatureMode,
+ typer.Option(help="Mode of the MSA features generation."),
+ ] = FeatureMode.SEARCH,
+ custom_msa: Annotated[
+ List[Path],
+ typer.Option(
+ help=(
+ "Path to a custom msa or a directory of custom msas. "
+ "To pass multiple inputs, simply repeat the flag "
+ "(e.g. `--custom_msa msa_1.sto --custom_msa msa_2.sto`)."
+ ),
+ callback=extract_and_validate_custom_msas,
+ exists=True,
+ ),
+ ] = [],
+ max_msa_clusters: Annotated[
+ int,
+ typer.Option(help="Max number of MSA clusters to search."),
+ ] = -1,
+ max_extra_msa: Annotated[
+ int,
+ typer.Option(
+ help="Max extra non-clustered MSA representation to use as source."
+ ),
+ ] = -1,
+ gap_trick: Annotated[
+ bool,
+ typer.Option(
+ help="Activate gap trick, allowing to model complexes with monomer models."
+ ),
+ ] = False,
+ num_recycle: Annotated[
+ int,
+ typer.Option(
+ help="Number of refinement iterations of the predicted structures."
+ ),
+ ] = 3,
+ model_subset: Annotated[
+ list[int],
+ typer.Option(
+ help="Subset of AF2 model ids to use, between 1 and 5 included.",
+ callback=validate_model_subset,
+ ),
+ ] = [],
+ random_seed: Annotated[
+ int,
+ typer.Option(
+ help=(
+ "Random seed used during the MSA sampling. "
+ "Different random seed values will introduce variations in the predictions."
+ )
+ ),
+ ] = 0,
+ num_seed: Annotated[
+ Optional[int],
+ typer.Option(
+ help="Number of random seeds to use. Creates a batch prediction.", min=2
+ ),
+ ] = None,
+ metadata_file: Annotated[
+ Optional[Path],
+ typer.Option(
+ help=(
+ "Path to the file where the job metadata returned by the server are written."
+ ),
+ ),
+ ] = None,
+):
+ """Asynchronous OpenFold folding submission.
+
+ Read more at https://int-bio-foldingstudio-gcp.nw.r.appspot.com/how-to-guides/af2_openfold/single_openfold_job/.
+
+ If the source is a CSV or JSON file describing a batch prediction request, all the other
+ options will be overlooked.
+ """
+
+ if FOLDING_API_KEY:
+ console.print(":key: Using detected API key for authentication.")
+ else:
+ console.print(":yellow_circle: Using JWT for authentication.")
+
+ is_batch = source.is_dir() or source.suffix in BatchInputFile.__members__.values()
+ is_multi_seed = num_seed is not None
+ is_batch = is_batch or is_multi_seed
+
+ params = PredictRequestParams(
+ ignore_cache=not cache,
+ template_mode=template_mode,
+ custom_template_ids=custom_template_id,
+ msa_mode=msa_mode,
+ max_msa_clusters=max_msa_clusters,
+ max_extra_msa=max_extra_msa,
+ gap_trick=gap_trick,
+ num_recycle=num_recycle,
+ random_seed=random_seed,
+ model_subset=model_subset,
+ )
+
+ custom_files = PredictRequestCustomFiles(
+ templates=custom_template,
+ msas=custom_msa,
+ templates_masks_files=[templates_masks_file] if templates_masks_file else None,
+ )
+
+ if is_batch:
+ if is_multi_seed:
+ response = batch_prediction(
+ files=[source],
+ folding_model=FoldingModel.OPENFOLD,
+ params=params,
+ custom_files=custom_files,
+ num_seed=num_seed,
+ project_code=project_code,
+ )
+ elif source.is_file():
+ console.print(
+ f"Submitting batch jobs configuration file [bold]{source}[/bold]"
+ )
+ console.print(
+ "Input options are [bold yellow]ignored[/bold yellow] in favor of the configuration file content."
+ )
+ response = batch_prediction_from_file(
+ file=source,
+ project_code=project_code,
+ )
+ elif source.is_dir():
+ response = batch_prediction(
+ files=list(f for f in source.iterdir() if f.is_file()),
+ folding_model=FoldingModel.OPENFOLD,
+ params=params,
+ custom_files=custom_files,
+ num_seed=num_seed,
+ project_code=project_code,
+ )
+ print_instructions_batch(response_json=response, metadata_file=metadata_file)
+ else:
+ response = simple_prediction(
+ file=source,
+ folding_model=FoldingModel.OPENFOLD,
+ params=params,
+ custom_files=custom_files,
+ project_code=project_code,
+ )
+ print_instructions_simple(response_json=response, metadata_file=metadata_file)
diff --git a/folding-studio/folding_studio/commands/predict/protenix_predict.py b/folding-studio/folding_studio/commands/predict/protenix_predict.py
new file mode 100644
index 0000000000000000000000000000000000000000..427502f1a254a46eef3919a4764a09804131f86d
--- /dev/null
+++ b/folding-studio/folding_studio/commands/predict/protenix_predict.py
@@ -0,0 +1,125 @@
+"""Protenix folding submission command."""
+
+from datetime import datetime
+from pathlib import Path
+from typing import Annotated
+
+import typer
+from rich.json import JSON
+from rich.panel import Panel
+
+from folding_studio.client import Client
+from folding_studio.commands.utils import (
+ success_fail_catch_print,
+ success_fail_catch_spinner,
+)
+from folding_studio.config import FOLDING_API_KEY
+from folding_studio.console import console
+from folding_studio.query import ProtenixQuery
+
+
+def protenix(
+ source: Annotated[
+ Path,
+ typer.Argument(
+ help=(
+ "Path to the data source. Either a fasta file, a directory of fasta files"
+ "describing a batch prediction request."
+ ),
+ # callback=_validate_source_path,
+ exists=True,
+ ),
+ ],
+ project_code: Annotated[ # noqa: ANN001
+ str,
+ typer.Option(
+ help=(
+ "Project code. If unknown, contact your PM or the Folding Studio team."
+ ),
+ exists=True,
+ envvar="FOLDING_PROJECT_CODE",
+ ),
+ ],
+ use_msa_server: Annotated[ # pylint: disable=unused-argument
+ bool,
+ typer.Option(
+ help="Flag to use the MSA server for inference. Forced to True.",
+ is_flag=True,
+ ),
+ ] = True,
+ seed: Annotated[int, typer.Option(help="Random seed.")] = 0,
+ cycle: Annotated[int, typer.Option(help="Pairformer cycle number.")] = 10,
+ step: Annotated[
+ int, typer.Option(help="Number of steps for the diffusion process.")
+ ] = 200,
+ sample: Annotated[int, typer.Option(help="Number of samples in each seed.")] = 5,
+ output: Annotated[
+ Path,
+ typer.Option(
+ help="Local path to download the result zip and query parameters to. "
+ "Default to 'protenix_results'."
+ ),
+ ] = "protenix_results",
+ force: Annotated[
+ bool,
+ typer.Option(
+ help=(
+ "Forces the download to overwrite any existing file "
+ "with the same name in the specified location."
+ )
+ ),
+ ] = False,
+ unzip: Annotated[
+ bool, typer.Option(help="Unzip the file after its download.")
+ ] = False,
+ spinner: Annotated[
+ bool, typer.Option(help="Use live spinner in log output.")
+ ] = True,
+):
+ """Synchronous Protenix folding submission."""
+
+ success_fail_catch = (
+ success_fail_catch_spinner if spinner else success_fail_catch_print
+ )
+
+ console.print(
+ Panel("[bold cyan]:dna: Protenix Folding submission [/bold cyan]", expand=False)
+ )
+ output_dir = output / f"submission_{datetime.now().strftime('%Y%m%d%H%M%S')}"
+
+ # Create a client using API key or JWT
+ with success_fail_catch(":key: Authenticating client"):
+ client = Client.authenticate()
+
+ # Define a query
+ with success_fail_catch(":package: Generating query"):
+ query_builder = (
+ ProtenixQuery.from_file
+ if source.is_file()
+ else ProtenixQuery.from_directory
+ )
+ query: ProtenixQuery = query_builder(
+ source,
+ use_msa_server=True,
+ seed=seed,
+ cycle=cycle,
+ step=step,
+ sample=sample,
+ )
+ query.save_parameters(output_dir)
+
+ console.print("[blue]Generated query: [/blue]", end="")
+ console.print(JSON.from_data(query.payload), style="blue")
+
+ # Send a request
+ with success_fail_catch(":brain: Processing folding job"):
+ response = client.send_request(query, project_code)
+
+ # Access confidence data
+ console.print("[blue]Confidence Data:[/blue]", end=" ")
+ console.print(JSON.from_data(response.confidence_data), style="blue")
+
+ with success_fail_catch(
+ f":floppy_disk: Downloading results to `[green]{output_dir}[/green]`"
+ ):
+ response.download_results(output_dir=output_dir, force=force, unzip=unzip)
diff --git a/folding-studio/folding_studio/commands/predict/soloseq_predict.py b/folding-studio/folding_studio/commands/predict/soloseq_predict.py
new file mode 100644
index 0000000000000000000000000000000000000000..3d543fd1f25759d62b85eb857b434004d0101582
--- /dev/null
+++ b/folding-studio/folding_studio/commands/predict/soloseq_predict.py
@@ -0,0 +1,110 @@
+"""SoloSeq folding submission command."""
+
+from datetime import datetime
+from pathlib import Path
+from typing import Annotated
+
+import typer
+from rich.panel import Panel
+
+from folding_studio.client import Client
+from folding_studio.commands.utils import (
+ success_fail_catch_print,
+ success_fail_catch_spinner,
+)
+from folding_studio.config import FOLDING_API_KEY
+from folding_studio.console import console
+from folding_studio.query.soloseq import SoloSeqQuery
+
+
+def soloseq(
+ source: Annotated[
+ Path,
+ typer.Argument(
+ help=(
+ "Path to the data source. Either a fasta file or a directory of fasta files."
+ ),
+ exists=True,
+ ),
+ ],
+ project_code: Annotated[
+ str,
+ typer.Option(
+ help="Project code. If unknown, contact your PM or the Folding Studio team.",
+ envvar="FOLDING_PROJECT_CODE",
+ exists=True,
+ ),
+ ],
+ seed: Annotated[int, typer.Option(help="Random seed.")] = 0,
+ skip_relaxation: Annotated[
+ bool,
+ typer.Option(help="Skip the relaxation process."),
+ ] = False,
+ subtract_plddt: Annotated[
+ bool,
+ typer.Option(help="Output (100 - pLDDT) instead of the pLDDT itself."),
+ ] = False,
+ output: Annotated[
+ Path,
+ typer.Option(
+ help="Local path to download the result zip and query parameters to. "
+ "Default to 'soloseq_results'."
+ ),
+ ] = "soloseq_results",
+ force: Annotated[
+ bool,
+ typer.Option(
+ help=(
+ "Forces the download to overwrite any existing file "
+ "with the same name in the specified location."
+ )
+ ),
+ ] = False,
+ unzip: Annotated[
+ bool, typer.Option(help="Unzip the file after its download.")
+ ] = False,
+ spinner: Annotated[
+ bool, typer.Option(help="Use live spinner in log output.")
+ ] = True,
+):
+ """Synchronous SoloSeq folding submission"""
+ success_fail_catch = (
+ success_fail_catch_spinner if spinner else success_fail_catch_print
+ )
+ console.print(
+ Panel("[bold cyan]:dna: SoloSeq Folding submission [/bold cyan]", expand=False)
+ )
+ output_dir = output / f"submission_{datetime.now().strftime('%Y%m%d%H%M%S')}"
+
+ # Create a client using API key or JWT
+ with success_fail_catch(":key: Authenticating client"):
+ client = Client.authenticate()
+
+ # Define a query
+ with success_fail_catch(":package: Generating query"):
+ query_builder = (
+ SoloSeqQuery.from_file if source.is_file() else SoloSeqQuery.from_directory
+ )
+ query: SoloSeqQuery = query_builder(
+ source,
+ seed=seed,
+ skip_relaxation=skip_relaxation,
+ subtract_plddt=subtract_plddt,
+ )
+ query.save_parameters(output_dir)
+
+ console.print("[blue]Generated query: [/blue]")
+ console.print_json(data=query.payload)
+
+ # Send a request
+ with success_fail_catch(":brain: Processing folding job"):
+ response = client.send_request(query, project_code)
+
+ # Access confidence data
+ console.print("[blue]Confidence Data:[/blue]", end=" ")
+ console.print_json(data=response.confidence_data)
+
+ with success_fail_catch(
+ f":floppy_disk: Downloading results to `[green]{output_dir}[/green]`"
+ ):
+ response.download_results(output_dir=output_dir, force=force, unzip=unzip)
diff --git a/folding-studio/folding_studio/commands/predict/utils.py b/folding-studio/folding_studio/commands/predict/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..39876645bf22b6bf7004f96bf290205f03b79b43
--- /dev/null
+++ b/folding-studio/folding_studio/commands/predict/utils.py
@@ -0,0 +1,199 @@
+"""Shared utils for the predict command."""
+
+import json
+from datetime import datetime
+from pathlib import Path
+
+import typer
+from folding_studio_data_models import BatchPublication, MessageStatus, Publication
+from rich.markdown import Markdown
+
+from folding_studio.console import console
+from folding_studio.utils.data_model import BatchInputFile, SimpleInputFile
+
+
+def validate_source_path(path: Path) -> Path:
+ """Validate the prediction source path.
+
+ Args:
+ path (Path): Source path.
+
+ Raises:
+ typer.BadParameter: If the source is an empty directory.
+ typer.BadParameter: If the source is a directory containing unsupported files.
+ typer.BadParameter: If the source is an unsupported file.
+
+ Returns:
+ Path: The source.
+ """
+ supported_simple_prediction = tuple(item.value for item in SimpleInputFile)
+ supported_batch_prediction = tuple(item.value for item in BatchInputFile)
+
+ if path.is_dir():
+ if not any(path.iterdir()):
+ raise typer.BadParameter(f"The source directory `{path}` is empty.")
+
+ for file in path.iterdir():
+ if file.is_file():
+ if file.suffix not in supported_simple_prediction:
+ raise typer.BadParameter(
+ f"The source directory '{path}' contains unsupported files. "
+ f"Only {supported_simple_prediction} files are supported."
+ )
+
+ elif path.suffix not in supported_simple_prediction + supported_batch_prediction:
+ raise typer.BadParameter(
+ f"The source file '{path}' is not supported. "
+ f"Only {supported_simple_prediction + supported_batch_prediction} files are supported."
+ )
+ return path
+
+
+def validate_model_subset(model_subset: list[int]) -> list[int]:
+ """Validate the model_subset argument.
+
+ Args:
+ model_subset (list[int]): List of model subset requested.
+
+ Raises:
+ typer.BadParameter: If more than 5 model ids are specified.
+ typer.BadParameter: If model ids not between 1 and 5 (included).
+
+ Returns:
+ list[int]: List of model subset requested.
+ """
+ if len(model_subset) == 0:
+ return model_subset
+ elif len(model_subset) > 5:
+ raise typer.BadParameter(
+ f"--model_subset accept 5 model ids at most but `{len(model_subset)}` were specified."
+ )
+ elif min(model_subset) < 1 or max(model_subset) > 5:
+ raise typer.BadParameter(
+ "Model subset id out of supported range. --model_subset accepts ids between 1 and 5 (included)."
+ )
+ return model_subset
+
+
+def print_instructions_simple(response_json: dict, metadata_file: Path | None) -> None:
+ """Print pretty instructions after successful call to predict endpoint.
+
+ Args:
+ response_json (dict): Server json response
+ metadata_file: (Path | None): File path where job submission metadata are written.
+ """
+ pub = Publication.model_validate(response_json)
+ experiment_id = pub.message.experiment_id
+
+ if pub.status == MessageStatus.NOT_PUBLISHED_DONE:
+ console.print(
+ f"The results of your experiment {experiment_id} were found in the cache."
+ )
+ console.print("Use the following command to download the prediction results:")
+ md = f"""```shell
+ folding experiment results {experiment_id}
+ """
+ console.print(Markdown(md))
+ elif pub.status == MessageStatus.NOT_PUBLISHED_PENDING:
+ console.print(
+ f"Your experiment [bold]{experiment_id}[/bold] is [bold green]still running.[/bold green]"
+ )
+ console.print(
+ "Use the following command to check on its status at a later time."
+ )
+ md = f"""```shell
+ folding experiment status {experiment_id}
+ """
+ console.print(Markdown(md))
+ elif pub.status == MessageStatus.PUBLISHED:
+ console.print("[bold green]Experiment submitted successfully ![/bold green]")
+ console.print(f"The experiment id is [bold]{experiment_id}[/bold]")
+
+ if not metadata_file:
+ timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
+ metadata_file = f"simple_prediction_{timestamp}.json"
+ with open(metadata_file, "w") as f:
+ json.dump(response_json, f, indent=4)
+
+ console.print(
+ f"Prediction job metadata written to [bold]{metadata_file}[/bold]"
+ )
+ console.print("You can query your experiment status with the command:")
+ md = f"""```shell
+ folding experiment status {experiment_id}
+ """
+ console.print(Markdown(md))
+ else:
+ raise ValueError(f"Unknown publication status: {pub.status}")
+
+
+def print_instructions_batch(response_json: dict, metadata_file: Path | None) -> None:
+ """Print pretty instructions after successful call to batch predict endpoint.
+
+ Args:
+ response_json (dict): Server json response
+ metadata_file: (Path | None): File path where job submission metadata are written.
+ """
+ pub = BatchPublication.model_validate(response_json)
+ non_cached_exps = [
+ non_cached_pub.message.experiment_id for non_cached_pub in pub.publications
+ ]
+ cached_exps = [
+ cached_pub.message.experiment_id for cached_pub in pub.cached_publications
+ ]
+ done_exps = [
+ cached_pub.message.experiment_id
+ for cached_pub in pub.cached_publications
+ if cached_pub.status == MessageStatus.NOT_PUBLISHED_DONE
+ ]
+
+ if not metadata_file:
+ timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
+ metadata_file = f"batch_prediction_{timestamp}.json"
+ with open(metadata_file, "w") as f:
+ json.dump(response_json, f, indent=4)
+ console.print(f"Batch prediction job metadata written to {metadata_file}")
+ console.print("This file contains your experiments ids.")
+
+ if pub.cached:
+ console.print(
+ "The results of your experiments were [bold]all found in the cache.[/bold]"
+ )
+ console.print("The experiment ids are:")
+ console.print(f"{cached_exps}")
+ console.print(
+ "Use the `folding experiment status id` command to check on their status. For example:"
+ )
+ md = f"""```shell
+ folding experiment status {cached_exps[0]}
+ """
+ console.print(Markdown(md))
+ else:
+ console.print(
+ "[bold green]Batch prediction job submitted successfully ![/bold green]"
+ )
+
+ console.print(
+ f"The following experiments have been [bold]submitted[/bold] (see [bold]{metadata_file}[/bold] for the full list):"
+ )
+ console.print(non_cached_exps)
+ console.print(
+ "For example, you can query an experiment status with the command:"
+ )
+ md = f"""```shell
+ folding experiment status {non_cached_exps[0]}
+ """
+ console.print(Markdown(md))
+
+ if done_exps:
+ console.print(
+ f"The results of the following experiments [bold]were found in the cache[/bold] (see [bold]{metadata_file}[/bold] for the full list):"
+ )
+ console.print(done_exps)
+ console.print(
+ "Use the `folding experiment results id` command to download the prediction results. For example:"
+ )
+ md = f"""```shell
+ folding experiment results {cached_exps[0]}
+ """
+ console.print(Markdown(md))
diff --git a/folding-studio/folding_studio/commands/utils.py b/folding-studio/folding_studio/commands/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..7c32a269beb23f3a5e5638ab73f8e82db4177186
--- /dev/null
+++ b/folding-studio/folding_studio/commands/utils.py
@@ -0,0 +1,193 @@
+"""Utilitaries methods for the commands module."""
+
+import hashlib
+from contextlib import contextmanager
+
+from rich.progress import Progress, SpinnerColumn, TextColumn
+
+from folding_studio.api_call.upload_custom_files import (
+ CustomFileType,
+ upload_custom_files,
+)
+from folding_studio.console import console
+
+
+@contextmanager
+def success_fail_catch_spinner(message: str, spinner_name: str = "dots"):
+ """Wrapper around a rich progress spinner that adapt its state icon.
+
+ Args:
+ message (str): message to show supporting rich format.
+ spinner_name (str, optional): rich SpinnerColumn spinner_name attribute. Defaults to "dots".
+
+ Examples:
+ ```
+ with success_fail_catch_spinner("Running Task"):
+ ...
+
+ >>> Running Task ⠼ # spins as long as the context manager is running
+ # if no error raised then transforms into
+ >>> Running Task ✅
+ # otherwise transforms into
+ >>> Running Task ❌
+ An error occurred:
+ ...
+ ```
+ """
+ err = None
+ with Progress(
+ TextColumn("{task.description}"),
+ SpinnerColumn(spinner_name, finished_text=""),
+ console=console,
+ ) as progress:
+ task_id = progress.add_task(message, total=1)
+ # response = client.send_request(query)
+ try:
+ yield
+ progress.update(
+ task_id, completed=1, description=f"{message} :white_check_mark:"
+ )
+ except Exception as e:
+ progress.update(task_id, completed=1, description=f"{message} :x:")
+ err = e
+ # for coherent message order the print has to be made outside the Progress context manager
+ if err is not None:
+ console.print(f"An error occurred: {err}")
+ raise err
+
+
+@contextmanager
+def success_fail_catch_print(*args, **kwargs):
+ """Wrapper around rich `print` that adapts its state icon.
+
+ Examples:
+ ```
+ with success_fail_catch_print("Running Task..."):
+ ...
+
+ >>> Running Task...
+ # if no error raised then transforms into
+ >>> Running Task... ✅
+ # otherwise transforms into
+ >>> Running Task... ❌
+ An error occurred:
+ ...
+ ```
+ """
+ console.print(*args, **kwargs, end=" ")
+ try:
+ yield
+ console.print(":white_check_mark:")
+ except Exception as e:
+ console.print(":x:")
+ console.print(f"An error occurred: {e}")
+ raise e
+
+
+def a3m_to_aligned_pqt(directory: str) -> str:
+ """
+ Finds .a3m files in a directory and merges them into a single aligned Parquet file.
+
+ Args:
+ directory (str): Path to the directory containing .a3m files.
+
+ Returns:
+ str: The path to the saved Parquet file.
+
+ Raises:
+ ValueError: If the directory is invalid, if no records are found in a file,
+ or if query sequences differ among files.
+ """
+ dir_path = Path(directory)
+ if not dir_path.is_dir():
+ raise ValueError(f"{directory} is not a valid directory.")
+
+ mapped_files = {}
+ for file in dir_path.glob("*.a3m"):
+ dbname = file.stem.replace("_hits", "").replace("hits_", "")
+ source = dbname.lower() if dbname else "uniref90"
+ mapped_files[file] = source
+
+ def parse_a3m(file_path: Path, source: str) -> pd.DataFrame:
+ """
+ Parses a simple FASTA file.
+ The first record is flagged with source "query"; subsequent records use the provided source.
+ Uses the header both as a comment and (if desired) as a pairing key.
+ """
+ with open(file_path, "r") as f:
+ lines = f.read().splitlines()
+
+ records = []
+ header = None
+ seq_lines = []
+ for line in lines:
+ if line.startswith(">"):
+ if header is not None:
+ seq = "".join(seq_lines).strip()
+ record_source = "query" if not records else source
+ records.append(
+ {
+ "sequence": seq,
+ "source_database": record_source,
+ "pairing_key": header,
+ "comment": header,
+ }
+ )
+ header = line[1:].strip()
+ seq_lines = []
+ else:
+ seq_lines.append(line.strip())
+ if header is not None:
+ seq = "".join(seq_lines).strip()
+ record_source = "query" if not records else source
+ records.append(
+ {
+ "sequence": seq,
+ "source_database": record_source,
+ "pairing_key": header,
+ "comment": header,
+ }
+ )
+ if not records:
+ raise ValueError(f"No records found in {file_path}")
+ return pd.DataFrame.from_records(records)
+
+ dfs = {}
+ for file, source in mapped_files.items():
+ dfs[file] = parse_a3m(file, source)
+
+ query_set = {df.iloc[0]["sequence"] for df in dfs.values()}
+ if len(query_set) != 1:
+ raise ValueError("Query sequences differ among files.")
+
+ merged_df = None
+ for df in dfs.values():
+ if merged_df is None:
+ merged_df = df.iloc[0:1].copy()
+ merged_df = pd.concat([merged_df, df.iloc[1:]], ignore_index=True)
+
+ query_seq = merged_df.iloc[0]["sequence"]
+
+ def hash_sequence(seq: str) -> str:
+ return hashlib.sha256(seq.upper().encode()).hexdigest()
+
+ output_filename = f"{hash_sequence(query_seq)}.aligned.pqt"
+
+ dir_path.mkdir(exist_ok=True, parents=True)
+ out_path = dir_path / output_filename
+
+ merged_df.to_parquet(out_path, index=False)
+ return str(out_path)
+
+
+def process_uploaded_msas(msa_files, headers):
+ """
+ Uploads the given MSA files and returns a dictionary mapping file names to their uploaded values.
+ """
+ uploaded = upload_custom_files(
+ headers=headers, paths=msa_files, file_type=CustomFileType.MSA
+ )
+ msa_paths = {}
+ for f in msa_files:
+ msa_paths[f.name] = uploaded.get(str(f)) or uploaded.get(f.name)
+ return msa_paths
diff --git a/folding-studio/folding_studio/config.py b/folding-studio/folding_studio/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..5d5f363028a795d7a03f7ab35cd71fd5d410fe09
--- /dev/null
+++ b/folding-studio/folding_studio/config.py
@@ -0,0 +1,17 @@
+"""CLI configuration."""
+
+import os
+
+from dotenv import find_dotenv, load_dotenv
+
+load_dotenv(find_dotenv())
+
+FOLDING_API_KEY = os.environ.get("FOLDING_API_KEY", None)
+
+API_URL = os.environ.get(
+ "API_URL",
+ "https://production-fastapi-service-560808695349.europe-west4.run.app/",
+)
+
+REQUEST_TIMEOUT = int(os.environ.get("FOLDING_REQUEST_TIMEOUT", 3001))
+FOLDING_PROJECT_CODE = os.environ.get("FOLDING_PROJECT_CODE", None)
diff --git a/folding-studio/folding_studio/console.py b/folding-studio/folding_studio/console.py
new file mode 100644
index 0000000000000000000000000000000000000000..44e0986745971acac94e792e582aae64b7836d5b
--- /dev/null
+++ b/folding-studio/folding_studio/console.py
@@ -0,0 +1,5 @@
+"""Rich console module."""
+
+from rich.console import Console
+
+console = Console()
diff --git a/folding-studio/folding_studio/query/__init__.py b/folding-studio/folding_studio/query/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..d2bfdbb08014605d7b721fd2afd71c0b24604106
--- /dev/null
+++ b/folding-studio/folding_studio/query/__init__.py
@@ -0,0 +1,15 @@
+"""Query module."""
+
+from .base import Query
+from .boltz import BoltzQuery
+from .chai import ChaiQuery
+from .protenix import ProtenixQuery
+from .soloseq import SoloSeqQuery
+
+__all__ = [
+ "Query",
+ "BoltzQuery",
+ "ProtenixQuery",
+ "ChaiQuery",
+ "SoloSeqQuery",
+]
diff --git a/folding-studio/folding_studio/query/base.py b/folding-studio/folding_studio/query/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..470f70b566df21c5dfca1b0fe658e9a68ec34def
--- /dev/null
+++ b/folding-studio/folding_studio/query/base.py
@@ -0,0 +1,60 @@
+"""Base module for model prediction endpoint query."""
+
+from __future__ import annotations
+
+import json
+import logging
+from abc import ABC, abstractmethod
+from pathlib import Path
+from typing import Any
+
+from folding_studio_data_models import FoldingModel
+from pydantic import BaseModel
+
+
+class Query(ABC):
+ """Interface to define folding job queries."""
+
+ MODEL: FoldingModel | None = None
+
+ @classmethod
+ @abstractmethod
+ def from_file(cls, path: str | Path, **kwargs) -> Query:
+ """Instantiates a Query object from a file."""
+ ...
+
+ @classmethod
+ @abstractmethod
+ def from_directory(cls, path: str | Path, **kwargs) -> Query:
+ """Instantiates a Query object from a directory."""
+ ...
+
+ @classmethod
+ @abstractmethod
+ def from_protein_sequence(cls, protein: str, **kwargs) -> Query:
+ """Instantiates a Query object from string representation of a protein."""
+ ...
+
+ @property
+ @abstractmethod
+ def payload(self) -> dict[str, Any]:
+ """Returns the payload to be sent in the POST request."""
+ ...
+
+ @property
+ @abstractmethod
+ def parameters(self) -> BaseModel:
+ """Parameters of the query."""
+ ...
+
+ def save_parameters(self, output_dir: Path) -> None:
+ """Writes the input parameters to a JSON file inside the output directory.
+
+ Args:
+ output_dir (Path): The directory where the inference parameters JSON file will be saved.
+ """
+ inference_parameters_path = output_dir / "query_parameters.json"
+ output_dir.mkdir(parents=True, exist_ok=True)
+ with inference_parameters_path.open("w", encoding="utf-8") as f:
+ json.dump(self.parameters.model_dump(mode="json"), f, indent=4)
+ logging.info(f"Input parameters written to {inference_parameters_path}")
diff --git a/folding-studio/folding_studio/query/boltz.py b/folding-studio/folding_studio/query/boltz.py
new file mode 100644
index 0000000000000000000000000000000000000000..5b4074690565bb6fcf138a727b56bd96f0ed4c68
--- /dev/null
+++ b/folding-studio/folding_studio/query/boltz.py
@@ -0,0 +1,233 @@
+"""Boltz-1 query to prediction endpoint."""
+
+from __future__ import annotations
+
+from io import StringIO
+from pathlib import Path
+from typing import Any
+
+import yaml
+from folding_studio_data_models import FoldingModel
+from pydantic import BaseModel
+
+from folding_studio.commands.utils import process_uploaded_msas
+from folding_studio.query import Query
+from folding_studio.utils.fasta import validate_fasta
+from folding_studio.utils.headers import get_auth_headers
+from folding_studio.utils.path_helpers import validate_path
+
+
+class BoltzParameters(BaseModel):
+ """Boltz inference parameters."""
+
+ seed: int = 0
+ recycling_steps: int = 3
+ sampling_steps: int = 200
+ diffusion_samples: int = 1
+ step_scale: float = 1.638
+ msa_pairing_strategy: str = "greedy"
+ write_full_pae: bool = False
+ write_full_pde: bool = False
+ use_msa_server: bool = True
+ custom_msa_paths: dict[str, str] | None = None
+
+
+class BoltzQuery(Query):
+ """Boltz1 model query."""
+
+ MODEL = FoldingModel.BOLTZ
+
+ def __init__(
+ self,
+ fasta_dict: dict[str, str],
+ yaml_dict: dict[str, str],
+ query_name: str,
+ parameters: BoltzParameters = BoltzParameters(),
+ ):
+ self.fasta_dict = fasta_dict
+ self.yaml_dict = yaml_dict
+ self.query_name = query_name
+ self._parameters = parameters
+
+ @staticmethod
+ def _process_file(file_path: Path) -> tuple[dict[str, str], dict[str, str]]:
+ """Processes a single file and extracts its contents.
+
+ Args:
+ file_path (Path): Path to the file.
+
+ Returns:
+ tuple[dict[str, str], dict[str, str]]: A tuple containing FASTA and YAML dictionaries.
+
+ Raises:
+ ValueError: If the file format is unsupported.
+ """
+ fasta_dict = {}
+ yaml_dict = {}
+ if file_path.suffix in (".fasta", ".fa"):
+ fasta_content = validate_fasta(file_path, str_output=True)
+ fasta_dict = {file_path.stem: fasta_content}
+ elif file_path.suffix in (".yaml", ".yml"):
+ with file_path.open("r", encoding="utf-8") as f:
+ yaml_dict = {file_path.stem: yaml.safe_load(f)}
+ else:
+ raise ValueError(f"Unsupported format: {file_path.suffix}")
+ return fasta_dict, yaml_dict
+
+ @classmethod
+ def from_protein_sequence(
+ cls: BoltzQuery, sequence: str, query_name: str | None = None, **kwargs
+ ) -> BoltzQuery:
+ """Initialize a BoltzQuery from a str protein sequence.
+
+ Args:
+ sequence (str): The protein sequence in string format.
+ query_name (str | None, optional): User-defined query name. Defaults to None.
+ **kwargs: Additional parameters for the query.
+
+ Returns:
+ BoltzQuery
+ """
+ record = validate_fasta(StringIO(sequence))
+
+ custom_msa_paths = kwargs.pop("custom_msa_paths", None)
+ if custom_msa_paths:
+ kwargs["custom_msa_paths"] = cls._upload_custom_msa_files(custom_msa_paths)
+
+ query_name = (
+ query_name
+ if query_name is not None
+ else record.description.split("|", maxsplit=1)[0] # first tag
+ )
+ return cls(
+ fasta_files={query_name: sequence},
+ query_name=query_name,
+ parameters=BoltzParameters(**kwargs),
+ )
+
+ @classmethod
+ def from_file(
+ cls: BoltzQuery, path: str | Path, query_name: str | None = None, **kwargs
+ ) -> BoltzQuery:
+ """Initialize a BoltzQuery instance from a file.
+
+ Supported file format are:
+ - FASTA
+ - YAML
+
+ Args:
+ path (str | Path): Path to the file.
+ **kwargs: Additional parameters for the query.
+
+ Returns:
+ BoltzQuery: An instance of BoltzQuery.
+ """
+ path = validate_path(
+ path, is_file=True, file_suffix=(".fasta", ".fa", ".yaml", ".yml")
+ )
+ fasta_dict, yaml_dict = cls._process_file(path)
+ query_name = query_name or path.stem
+ custom_msa_paths = kwargs.pop("custom_msa_paths", None)
+ if custom_msa_paths:
+ kwargs["custom_msa_paths"] = cls._upload_custom_msa_files(custom_msa_paths)
+ return cls(
+ fasta_dict=fasta_dict,
+ yaml_dict=yaml_dict,
+ query_name=query_name,
+ parameters=BoltzParameters(**kwargs),
+ )
+
+ @classmethod
+ def from_directory(
+ cls: BoltzQuery, path: str | Path, query_name: str | None = None, **kwargs: Any
+ ) -> BoltzQuery:
+ """Initialize a BoltzQuery instance from a directory.
+
+ Supported file format in directory are:
+ - FASTA
+ - YAML
+
+ Args:
+ directory_path (Path): Path to the directory.
+ **kwargs: Additional parameters for the query.
+
+ Returns:
+ BoltzQuery: An instance of BoltzQuery.
+ """
+ custom_msa_paths = kwargs.pop("custom_msa_paths", None)
+ if custom_msa_paths:
+ kwargs["custom_msa_paths"] = cls._upload_custom_msa_files(custom_msa_paths)
+ path = validate_path(path, is_dir=True)
+ fasta_dict = {}
+ yaml_dict = {}
+ for file in path.iterdir():
+ file_fasta_dict, file_yaml_dict = cls._process_file(file)
+ fasta_dict.update(file_fasta_dict)
+ yaml_dict.update(file_yaml_dict)
+
+ if not (fasta_dict or yaml_dict):
+ raise ValueError(f"No FASTA or YAML files found in directory '{path}'.")
+
+ query_name = query_name or path.name
+ return cls(
+ fasta_dict=fasta_dict,
+ yaml_dict=yaml_dict,
+ query_name=query_name,
+ parameters=BoltzParameters(**kwargs),
+ )
+
+ @property
+ def payload(self) -> dict[str, Any]:
+ """Payload to send to the prediction API endpoint."""
+ return {
+ "fasta_files": self.fasta_dict,
+ "yaml_files": self.yaml_dict,
+ "parameters": self.parameters.model_dump(mode="json"),
+ }
+
+ @property
+ def parameters(self) -> BoltzParameters:
+ """Parameters of the query."""
+ return self._parameters
+
+ @staticmethod
+ def _upload_custom_msa_files(
+ source: str, headers: str | None = None
+ ) -> dict[str, str]:
+ """Reads MSA files from a file or directory and uploads them to GCS.
+
+ Args:
+ source (str): Path to an .a3m or .csv file, or a directory containing such files.
+ headers (str | None, optional): GCP authentication headers. Defaults to None.
+
+ Raises:
+ ValueError: If the file has an unsupported extension.
+ ValueError: If a directory contains no .a3m or .csv files.
+
+ Returns:
+ dict[str, str]: A mapping of uploaded file names to their GCS URLs.
+ """
+ headers = headers or get_auth_headers()
+ source_path = validate_path(source)
+ valid_extensions = {".a3m", ".csv"} # Allow both a3m and csv files
+
+ # Process if source is a file
+ if source_path.is_file():
+ if source_path.suffix not in valid_extensions:
+ raise ValueError(
+ f"Invalid file type: {source_path.suffix}. Expected one of {valid_extensions}."
+ )
+ return process_uploaded_msas([source_path], headers)
+
+ # Process if source is a directory
+ elif source_path.is_dir():
+ valid_files = [
+ file
+ for file in source_path.iterdir()
+ if file.suffix in valid_extensions
+ ]
+ if not valid_files:
+ raise ValueError(
+ f"Directory '{source}' contains no valid files with extensions {valid_extensions}."
+ )
+ return process_uploaded_msas(valid_files, headers)
diff --git a/folding-studio/folding_studio/query/chai.py b/folding-studio/folding_studio/query/chai.py
new file mode 100644
index 0000000000000000000000000000000000000000..fcd4afb5c5fd98fd5f599bc664318464beecd05b
--- /dev/null
+++ b/folding-studio/folding_studio/query/chai.py
@@ -0,0 +1,243 @@
+"""Query module for Chai prediction endpoint."""
+
+from __future__ import annotations
+
+import shutil
+import tempfile
+from io import StringIO
+from itertools import chain
+from pathlib import Path
+from typing import Any
+
+from folding_studio_data_models import FoldingModel
+from pydantic import BaseModel, field_validator
+
+from folding_studio.commands.utils import (
+ a3m_to_aligned_pqt,
+ process_uploaded_msas,
+)
+from folding_studio.query import Query
+from folding_studio.utils.fasta import validate_fasta
+from folding_studio.utils.headers import get_auth_headers
+from folding_studio.utils.path_helpers import validate_path
+
+
+class ChaiParameters(BaseModel):
+ """Chai1 inference parameters."""
+
+ seed: int = 0
+ num_trunk_recycles: int = 3
+ num_diffn_timesteps: int = 200
+ recycle_msa_subsample: int = 0
+ num_trunk_samples: int = 1
+ restraints: str | None = None
+ use_msa_server: bool = False
+ use_templates_server: bool = False
+ custom_msa_paths: dict[str, str] | None = None
+
+ @field_validator("restraints", mode="before")
+ def read_restraints(
+ cls: ChaiParameters, restraints: str | Path | None
+ ) -> str | None:
+ """Reads restraints from a CSV file and returns its content as a string."""
+ if restraints is None:
+ return
+ path = validate_path(restraints, is_file=True, file_suffix=(".csv"))
+ with path.open(newline="", encoding="utf-8") as csvfile:
+ return csvfile.read().strip()
+
+
+class ChaiQuery(Query):
+ """Chai1 model query."""
+
+ MODEL = FoldingModel.CHAI
+
+ def __init__(
+ self,
+ fasta_files: dict[str, str],
+ query_name: str,
+ parameters: ChaiParameters = ChaiParameters(),
+ ):
+ """Initializes a ChaiQuery instance."""
+ if not fasta_files:
+ raise ValueError("FASTA files dictionary cannot be empty.")
+
+ self.fasta_files = fasta_files
+ self.query_name = query_name
+ self._parameters = parameters
+
+ @classmethod
+ def from_protein_sequence(
+ cls: ChaiQuery, sequence: str, query_name: str | None = None, **kwargs
+ ) -> ChaiQuery:
+ """Initialize a ChaiQuery instance from a str protein sequence.
+
+ Args:
+ sequence (str): Protein amino-acid sequence
+ query_name (str | None, optional): User-defined query name. Defaults to None.
+
+ Raises:
+ NotAMonomer: If the sequence is not a monomer complex.
+
+ Returns:
+ ChaiQuery
+ """
+ record = validate_fasta(StringIO(sequence))
+
+ custom_msa_paths = kwargs.pop("custom_msa_paths", None)
+ if custom_msa_paths:
+ kwargs["custom_msa_paths"] = cls._upload_custom_msa_files(custom_msa_paths)
+
+ query_name = (
+ query_name
+ if query_name is not None
+ else record.description.split("|", maxsplit=1)[0] # first tag
+ )
+ return cls(
+ fasta_files={query_name: sequence},
+ query_name=query_name,
+ parameters=ChaiParameters(**kwargs),
+ )
+
+ @classmethod
+ def from_file(
+ cls: ChaiQuery, path: str | Path, query_name: str | None = None, **kwargs
+ ) -> ChaiQuery:
+ """Initialize a ChaiQuery instance from a file.
+
+ Supported file format are:
+ - FASTA
+
+ Args:
+ path (str | Path): Path of the FASTA file.
+ query_name (str | None, optional): User-defined query name. Defaults to None.
+
+
+ Returns:
+ ChaiQuery
+ """
+ path = validate_path(path, is_file=True, file_suffix=(".fasta", ".fa"))
+
+ custom_msa_paths = kwargs.pop("custom_msa_paths", None)
+ if custom_msa_paths:
+ kwargs["custom_msa_paths"] = cls._upload_custom_msa_files(custom_msa_paths)
+
+ query_name = query_name or path.stem
+ return cls(
+ fasta_files={path.stem: validate_fasta(path, str_output=True)},
+ query_name=query_name,
+ parameters=ChaiParameters(**kwargs),
+ )
+
+ @classmethod
+ def from_directory(
+ cls: ChaiQuery, path: str | Path, query_name: str | None = None, **kwargs
+ ) -> ChaiQuery:
+ """Initialize a ChaiQuery instance from a directory.
+
+ Supported file format in directory are:
+ - FASTA
+
+ Args:
+ path (str | Path): Path to a directory of FASTA files.
+ query_name (str | None, optional): User-defined query name. Defaults to None.
+ seed (int, optional): Random seed. Defaults to 0.
+
+ Raises:
+ ValueError: If no FASTA file are present in the directory.
+
+ Returns:
+ ChaiQuery
+ """
+ path = validate_path(path, is_dir=True)
+ custom_msa_paths = kwargs.pop("custom_msa_paths", None)
+ if custom_msa_paths:
+ kwargs["custom_msa_paths"] = cls._upload_custom_msa_files(custom_msa_paths)
+ print(kwargs["custom_msa_paths"])
+ fasta_files = {
+ file.stem: validate_fasta(file, str_output=True)
+ for file in chain(path.glob("*.fasta"), path.glob("*.fa"))
+ }
+ if not fasta_files:
+ raise ValueError(f"No FASTA files found in directory '{path}'.")
+ query_name = query_name or path.name
+ return cls(
+ fasta_files=fasta_files,
+ query_name=query_name,
+ parameters=ChaiParameters(**kwargs),
+ )
+
+ @property
+ def payload(self) -> dict[str, Any]:
+ """Payload to send to the prediction API endpoint."""
+ return {
+ "fasta_files": self.fasta_files,
+ "use_msa_server": self.parameters.use_msa_server,
+ "use_templates_server": self.parameters.use_templates_server,
+ "num_trunk_recycles": self.parameters.num_trunk_recycles,
+ "seed": self.parameters.seed,
+ "num_diffn_timesteps": self.parameters.num_diffn_timesteps,
+ "restraints": self.parameters.restraints,
+ "recycle_msa_subsample": self.parameters.recycle_msa_subsample,
+ "num_trunk_samples": self.parameters.num_trunk_samples,
+ "custom_msa_paths": self.parameters.custom_msa_paths,
+ }
+
+ @property
+ def parameters(self) -> ChaiParameters:
+ """Parameters of the query."""
+ return self._parameters
+
+ @staticmethod
+ def _upload_custom_msa_files(
+ source: str, headers: str | None = None
+ ) -> dict[str, str]:
+ """Read A3M or MSA files from a file or directory and uploads them to GCS.
+
+ Args:
+ source (str): Path to an .a3m or .aligned.pqt file or a directory containing .a3m or .aligned.pqt files
+ identity_token (str | None, optional): GCP identity token. Defaults to None.
+
+ Raises:
+ ValueError: If file has unsupported extension.
+ ValueError: If directory has no supported file.
+
+ Returns:
+ dict[str, str]: _description_
+ """
+
+ headers = headers or get_auth_headers()
+ source_path = validate_path(source)
+
+ # Process if source is a file.
+ if source_path.is_file():
+ if source_path.suffix == ".a3m":
+ with tempfile.TemporaryDirectory() as tmpdir:
+ tmp_path = Path(tmpdir)
+ shutil.copy(source_path, tmp_path / source_path.name)
+ pqt_file = a3m_to_aligned_pqt(str(tmp_path))
+ return process_uploaded_msas([Path(pqt_file)], headers)
+ elif source_path.name.endswith(".aligned.pqt"):
+ return process_uploaded_msas([source_path], headers)
+ else:
+ raise ValueError(
+ f"Invalid file type: {source_path.suffix}. Expected '.a3m' or a file ending with '.aligned.pqt'."
+ )
+
+ # Process if source is a directory.
+ elif source_path.is_dir():
+ pqt_files = list(source_path.glob("*.aligned.pqt"))
+ if pqt_files:
+ return process_uploaded_msas(pqt_files, headers)
+
+ a3m_files = list(source_path.glob("*.a3m"))
+ if not a3m_files:
+ raise ValueError(
+ f"Directory '{source}' contains no files ending with '.aligned.pqt' or '.a3m'."
+ )
+ with tempfile.TemporaryDirectory() as tmpdir:
+ tmp_path = Path(tmpdir)
+ for file in a3m_files:
+ shutil.copy(file, tmp_path / file.name)
+ pqt_file = a3m_to_aligned_pqt(str(tmp_path))
+ return process_uploaded_msas([Path(pqt_file)], headers)
diff --git a/folding-studio/folding_studio/query/protenix.py b/folding-studio/folding_studio/query/protenix.py
new file mode 100644
index 0000000000000000000000000000000000000000..1ccf7b014bc76a8cbe5e4a52e55c19a9a4adf8f0
--- /dev/null
+++ b/folding-studio/folding_studio/query/protenix.py
@@ -0,0 +1,143 @@
+"""Query module for for Protenix prediction endpoint."""
+
+from __future__ import annotations
+
+from io import StringIO
+from itertools import chain
+from pathlib import Path
+from typing import Any
+
+from folding_studio_data_models import FoldingModel
+from pydantic import BaseModel, Field
+
+from folding_studio.query import Query
+from folding_studio.utils.fasta import validate_fasta
+from folding_studio.utils.path_helpers import validate_path
+
+
+class ProtenixParameters(BaseModel):
+ """Protenix inference parameters."""
+
+ seeds: str = Field(alias="seed", default="0", coerce_numbers_to_str=True)
+ use_msa_server: bool = True
+
+
+class ProtenixQuery(Query):
+ """Protenix model query."""
+
+ MODEL = FoldingModel.PROTENIX
+
+ def __init__(
+ self,
+ fasta_files: dict[str, Any],
+ query_name: str,
+ parameters: ProtenixParameters = ProtenixParameters(),
+ ):
+ if not fasta_files:
+ raise ValueError("FASTA files dictionary cannot be empty.")
+
+ self.fasta_files = fasta_files
+ self.query_name = query_name
+ self._parameters = parameters
+
+ @classmethod
+ def from_protein_sequence(
+ cls, sequence: str, query_name: str | None = None, **kwargs
+ ) -> ProtenixQuery:
+ """Initialize a ProtenixQuery from a str protein sequence.
+
+ Args:
+ sequence (str): The protein sequence in string format.
+ query_name (str | None, optional): User-defined query name. Defaults to None.
+ seed (int, optional): Random seed. Defaults to 0.
+ use_msa_server (bool, optional): Use the MSA server for inference. Defaults to False.
+
+ Returns:
+ ProtenixQuery: An instance of ProtenixQuery with the sequence stored as a FASTA file.
+ """
+ record = validate_fasta(StringIO(sequence))
+
+ query_name = (
+ query_name
+ if query_name is not None
+ else record.description.split("|", maxsplit=1)[0] # first tag
+ )
+ return cls(
+ fasta_files={query_name: sequence},
+ query_name=query_name,
+ parameters=ProtenixParameters(**kwargs),
+ )
+
+ @classmethod
+ def from_file(
+ cls: ProtenixQuery, path: str | Path, query_name: str | None = None, **kwargs
+ ) -> ProtenixQuery:
+ """Initialize a ProtenixQuery instance from a file.
+
+ Supported file format are:
+ - FASTA
+
+ Args:
+ path (str | Path): Path of the FASTA file.
+ query_name (str | None, optional): User-defined query name. Defaults to None.
+ seed (int, optional): Random seed. Defaults to 0.
+ use_msa_server (bool, optional): Use the MSA server for inference. Defaults to False.
+
+ Returns:
+ ProtenixQuery
+ """
+ path = validate_path(path, is_file=True, file_suffix=(".fasta", ".fa"))
+ query_name = query_name or path.stem
+ return cls(
+ fasta_files={path.stem: validate_fasta(path, str_output=True)},
+ query_name=query_name,
+ parameters=ProtenixParameters(**kwargs),
+ )
+
+ @classmethod
+ def from_directory(
+ cls: ProtenixQuery, path: str | Path, query_name: str | None = None, **kwargs
+ ) -> ProtenixQuery:
+ """Initialize a ProtenixQuery instance from a directory.
+
+ Supported file format in directory are:
+ - FASTA
+
+ Args:
+ path (str | Path): Path to a directory of FASTA files.
+ query_name (str | None, optional): User-defined query name. Defaults to None.
+ seed (int, optional): Random seed. Defaults to 0.
+ use_msa_server (bool, optional): Use the MSA server for inference. Defaults to False.
+
+ Raises:
+ ValueError: If no FASTA file are present in the directory.
+
+ Returns:
+ ProtenixQuery
+ """
+ path = validate_path(path, is_dir=True)
+ fasta_files = {}
+ for file in chain(path.glob("*.fasta"), path.glob("*.fa")):
+ fasta_files[file.stem] = validate_fasta(file, str_output=True)
+ if not fasta_files:
+ raise ValueError(f"No FASTA files found in directory '{path}'.")
+ query_name = query_name or path.name
+ return cls(
+ fasta_files=fasta_files,
+ query_name=query_name,
+ parameters=ProtenixParameters(**kwargs),
+ )
+
+ @property
+ def payload(self) -> dict[str, Any]:
+ """Payload to send to the prediction API endpoint."""
+ return {
+ "fasta_files": self.fasta_files,
+ "use_msa_server": self.parameters.use_msa_server,
+ "seeds": self.parameters.seeds,
+ }
+
+ @property
+ def parameters(self) -> ProtenixParameters:
+ """Parameters of the query."""
+ return self._parameters
diff --git a/folding-studio/folding_studio/query/soloseq.py b/folding-studio/folding_studio/query/soloseq.py
new file mode 100644
index 0000000000000000000000000000000000000000..c72100389207c4d6890aab017a48e89c2cfd580e
--- /dev/null
+++ b/folding-studio/folding_studio/query/soloseq.py
@@ -0,0 +1,174 @@
+"""Query module for SoloSeq prediction endpoint."""
+
+from __future__ import annotations
+
+from io import StringIO
+from pathlib import Path
+from typing import Any
+
+from folding_studio_data_models import FoldingModel
+from pydantic import BaseModel, Field
+
+from folding_studio.query import Query
+from folding_studio.utils.fasta import validate_fasta
+from folding_studio.utils.path_helpers import validate_path
+
+MAX_AA_LENGTH = 1024
+
+
+class SoloSeqParameters(BaseModel):
+ """SoloSeq inference parameters."""
+
+ data_random_seed: int = Field(alias="seed", default=0)
+ skip_relaxation: bool = False
+ subtract_plddt: bool = False
+
+
+class SoloSeqQuery(Query):
+ """SoloSeq model query."""
+
+ MODEL = FoldingModel.SOLOSEQ
+
+ def __init__(
+ self,
+ fasta_files: dict[str, str],
+ query_name: str,
+ parameters: SoloSeqParameters = SoloSeqParameters(),
+ ):
+ if not fasta_files:
+ raise ValueError("FASTA files dictionary cannot be empty.")
+
+ self.fasta_files = fasta_files
+ self.query_name = query_name
+ self._parameters = parameters
+
+ def __eq__(self, value):
+ if not isinstance(value, SoloSeqQuery):
+ return False
+ return (
+ self.fasta_files == value.fasta_files
+ and self.query_name == value.query_name
+ and self.parameters == value.parameters
+ )
+
+ @classmethod
+ def from_protein_sequence(
+ cls: SoloSeqQuery, sequence: str, query_name: str | None = None, **kwargs
+ ) -> SoloSeqQuery:
+ """Initialize a SoloSeqQuery instance from a str protein sequence.
+
+ Args:
+ sequence (str): Protein amino-acid sequence
+ query_name (str | None, optional): User-defined query name. Defaults to None.
+ seed (int, optional): Random seed. Defaults to 0.
+ skip_relaxation (bool, optional): Run the skip_relaxation process.
+ Defaults to False.
+ subtract_plddt (bool, optional): Output (100 - pLDDT) instead
+ of the pLDDT itself. Defaults to False.
+
+ Raises:
+ NotAMonomer: If the sequence is not a monomer complex.
+
+ Returns:
+ SoloSeqQuery
+ """
+ record = validate_fasta(
+ StringIO(sequence), allow_multimer=False, max_aa_length=MAX_AA_LENGTH
+ )
+ query_name = (
+ query_name
+ if query_name is not None
+ else record.description.split("|", maxsplit=1)[0] # first tag
+ )
+ return cls(
+ fasta_files={query_name: sequence},
+ query_name=query_name,
+ parameters=SoloSeqParameters(**kwargs),
+ )
+
+ @classmethod
+ def from_file(
+ cls: SoloSeqQuery, path: str | Path, query_name: str | None = None, **kwargs
+ ) -> SoloSeqQuery:
+ """Initialize a SoloSeqQuery instance from a file.
+
+ Supported file format are:
+ - FASTA
+
+ Args:
+ path (str | Path): Path of the FASTA file.
+ query_name (str | None, optional): User-defined query name. Defaults to None.
+ seed (int, optional): Random seed. Defaults to 0.
+ skip_relaxation (bool, optional): Run the skip_relaxation process.
+ Defaults to False.
+ subtract_plddt (bool, optional): Output (100 - pLDDT) instead
+ of the pLDDT itself. Defaults to False.
+
+ Raises:
+ NotAMonomer: If the FASTA file contains non-monomer complex.
+
+ Returns:
+ SoloSeqQuery
+ """
+ path = validate_path(path, is_file=True, file_suffix=(".fasta", ".fa"))
+ record = validate_fasta(path, allow_multimer=False, max_aa_length=MAX_AA_LENGTH)
+ query_name = query_name or path.stem
+ return cls(
+ fasta_files={path.stem: record.format("fasta").strip()},
+ query_name=query_name,
+ parameters=SoloSeqParameters(**kwargs),
+ )
+
+ @classmethod
+ def from_directory(
+ cls: SoloSeqQuery, path: str | Path, query_name: str | None = None, **kwargs
+ ) -> SoloSeqQuery:
+ """Initialize a SoloSeqQuery instance from a directory.
+
+ Supported file format in directory are:
+ - FASTA
+
+ Args:
+ path (str | Path): Path to a directory of FASTA files.
+ query_name (str | None, optional): User-defined query name. Defaults to None.
+ seed (int, optional): Random seed. Defaults to 0.
+ skip_relaxation (bool, optional): Run the skip_relaxation process.
+ Defaults to False.
+ subtract_plddt (bool, optional): Output (100 - pLDDT) instead
+ of the pLDDT itself. Defaults to False.
+
+ Raises:
+ ValueError: If no FASTA file are present in the directory.
+ NotAMonomer: If a FASTA file in the directory contains non monomer complex.
+
+ Returns:
+ SoloSeqQuery
+ """
+ path = validate_path(path, is_dir=True)
+ fasta_files = {}
+ for filepath in (f for f in path.iterdir() if f.suffix in (".fasta", ".fa")):
+ record = validate_fasta(
+ filepath, allow_multimer=False, max_aa_length=MAX_AA_LENGTH
+ )
+ fasta_files[filepath.stem] = record.format("fasta").strip()
+ if not fasta_files:
+ raise ValueError(f"No FASTA files found in directory '{path}'.")
+ query_name = query_name or path.name
+ return cls(
+ fasta_files=fasta_files,
+ query_name=query_name,
+ parameters=SoloSeqParameters(**kwargs),
+ )
+
+ @property
+ def payload(self) -> dict[str, Any]:
+ """Payload to send to the prediction API endpoint."""
+ return {
+ "fasta_files": self.fasta_files,
+ "parameters": self.parameters.model_dump(mode="json"),
+ }
+
+ @property
+ def parameters(self) -> SoloSeqParameters:
+ """Parameters of the query."""
+ return self._parameters
diff --git a/folding-studio/folding_studio/utils/__init__.py b/folding-studio/folding_studio/utils/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/folding-studio/folding_studio/utils/data_model.py b/folding-studio/folding_studio/utils/data_model.py
new file mode 100644
index 0000000000000000000000000000000000000000..89384bc6b6c8f73d347b1d206cbe022f43b5a874
--- /dev/null
+++ b/folding-studio/folding_studio/utils/data_model.py
@@ -0,0 +1,296 @@
+"""API entries data model."""
+
+from __future__ import annotations
+
+import csv
+import json
+import tempfile
+from ast import literal_eval
+from enum import Enum
+from pathlib import Path
+from typing import List
+
+import cloudpathlib
+from folding_studio_data_models import CustomFileType, FeatureMode
+from folding_studio_data_models.content import TemplateMaskCollection
+from folding_studio_data_models.exceptions import (
+ TemplatesMasksSettingsError,
+)
+from pydantic import BaseModel, ConfigDict, model_validator
+from rich import print # pylint:disable=redefined-builtin
+from typing_extensions import Self
+
+from folding_studio.api_call.upload_custom_files import upload_custom_files
+from folding_studio.utils.file_helpers import (
+ partition_template_pdb_from_file,
+)
+from folding_studio.utils.headers import get_auth_headers
+
+
+class SimpleInputFile(str, Enum):
+ """Supported simple prediction file source extensions."""
+
+ FASTA = ".fasta"
+
+
+class BatchInputFile(str, Enum):
+ """Supported batch prediction file source extensions."""
+
+ CSV = ".csv"
+ JSON = ".json"
+
+
+class PredictRequestParams(BaseModel):
+ """Prediction parameters model."""
+
+ ignore_cache: bool
+ template_mode: FeatureMode
+ custom_template_ids: List[str]
+ msa_mode: FeatureMode
+ max_msa_clusters: int
+ max_extra_msa: int
+ gap_trick: bool
+ num_recycle: int
+ random_seed: int
+ model_subset: set[int]
+
+ model_config = ConfigDict(protected_namespaces=())
+
+
+class MSARequestParams(BaseModel):
+ """MSA parameters model."""
+
+ ignore_cache: bool
+ msa_mode: FeatureMode
+
+
+class PredictRequestCustomFiles(BaseModel):
+ """Prediction custom files model."""
+
+ templates: List[Path | str]
+ msas: List[Path]
+ initial_guess_files: List[Path] | None = None
+ templates_masks_files: List[Path] | None = None
+ uploaded: bool = False
+ _local_to_uploaded: dict | None = None
+
+ @model_validator(mode="after")
+ def _check_templates_and_masks_content(self) -> Self:
+ """Checks if templates used by mask are being uploaded."""
+ if not self.templates_masks_files:
+ return self
+
+ custom_templates_names = [Path(m).name for m in self.templates]
+ for tm_file in self.templates_masks_files:
+ tm_collection = TemplateMaskCollection.model_validate_json(
+ tm_file.read_text()
+ )
+ if not (
+ all(
+ tm.template_name in custom_templates_names
+ for tm in tm_collection.templates_masks
+ )
+ ):
+ err = "Templates files are missing. Check your input command."
+ raise TemplatesMasksSettingsError(err)
+ return self
+
+ @classmethod
+ def _from_json_batch_file(cls, batch_jobs_file: Path) -> PredictRequestCustomFiles:
+ """
+ Reads a JSON batch jobs file and extracts custom templates and MSAs.
+
+ Args:
+ batch_jobs_file (Path): The path to the batch jobs file in JSON format.
+
+ Returns:
+ An instance of PredictRequestCustomFiles.
+ """
+ custom_templates = []
+ custom_msas = []
+ initial_guess_files = []
+ templates_masks_files = []
+
+ jobs = json.loads(batch_jobs_file.read_text())
+ for req in jobs["requests"]:
+ tmpl = req["parameters"].get("custom_templates", [])
+ custom_templates.extend(tmpl)
+
+ msa = req["parameters"].get("custom_msas", [])
+ custom_msas.extend(msa)
+
+ ig = req["parameters"].get("initial_guess_file")
+ if ig:
+ initial_guess_files.append(ig)
+
+ tm = req["parameters"].get("templates_masks_file")
+ if tm:
+ templates_masks_files.append(tm)
+
+ return cls(
+ templates=custom_templates,
+ msas=custom_msas,
+ initial_guess_files=initial_guess_files,
+ templates_masks_files=templates_masks_files,
+ )
+
+ @classmethod
+ def _from_csv_batch_file(cls, batch_jobs_file: Path) -> PredictRequestCustomFiles:
+ """
+ Reads a CSV batch jobs file and extracts custom templates and MSAs.
+
+ Args:
+ batch_jobs_file (Path): The path to the batch jobs file in CSV format.
+
+ Returns:
+ An instance of PredictRequestCustomFiles.
+ """
+ custom_templates = []
+ custom_msas = []
+ initial_guess_files = []
+ templates_masks_files = []
+
+ with batch_jobs_file.open("r") as file:
+ jobs_reader = csv.DictReader(
+ file,
+ quotechar='"',
+ delimiter=",",
+ quoting=csv.QUOTE_ALL,
+ )
+ for row in jobs_reader:
+ tmpl = row.get("custom_templates")
+ if tmpl:
+ tmpl = literal_eval(tmpl)
+ custom_templates.extend(tmpl)
+
+ msa = row.get("custom_msas")
+ if msa:
+ msa = literal_eval(msa)
+ custom_msas.extend(msa)
+
+ ig = row.get("initial_guess_file")
+ if ig:
+ initial_guess_files.extend([ig])
+
+ tm = row.get("templates_masks_file")
+ if tm:
+ templates_masks_files.extend([tm])
+ return cls(
+ templates=custom_templates,
+ msas=custom_msas,
+ initial_guess_files=initial_guess_files,
+ templates_masks_files=templates_masks_files,
+ )
+
+ @classmethod
+ def from_batch_jobs_file(cls, batch_jobs_file: Path) -> PredictRequestCustomFiles:
+ """Creates a PredictRequestCustomFiles instance from a batch jobs file (CSV or JSON).
+
+ This function reads a batch jobs file, resolves file paths for custom templates and MSAs,
+ and returns a PredictRequestCustomFiles object.
+
+ Args:
+ batch_jobs_file (Path): The path to the batch jobs file. Must be a CSV or JSON file.
+
+ Returns:
+ PredictRequestCustomFiles: An instance containing the custom templates and MSAs.
+
+ Raises:
+ ValueError: If the file is not a CSV or JSON file.
+ """
+ if batch_jobs_file.suffix == BatchInputFile.CSV:
+ return cls._from_csv_batch_file(batch_jobs_file)
+ elif batch_jobs_file.suffix == BatchInputFile.JSON:
+ return cls._from_json_batch_file(batch_jobs_file)
+ else:
+ raise ValueError(
+ f"Unsupported file type {batch_jobs_file.suffix}: {batch_jobs_file}"
+ )
+
+ def upload(self) -> None:
+ """Upload local custom paths to GCP through an API request.
+ Returns:
+ A dict mapping local to uploaded files path.
+ """
+ if self.uploaded:
+ print("Custom files already uploaded, skipping upload.")
+ return self._local_to_uploaded
+
+ local_to_uploaded = {}
+
+ headers = get_auth_headers()
+ if len(self.templates) > 0:
+ _, templates_to_upload = partition_template_pdb_from_file(
+ custom_templates=self.templates
+ )
+ filename_to_gcs_path = upload_custom_files(
+ headers=headers,
+ paths=[Path(t) for t in templates_to_upload],
+ file_type=CustomFileType.TEMPLATE,
+ )
+ self.templates = list(filename_to_gcs_path.values())
+ local_to_uploaded.update(filename_to_gcs_path)
+
+ if len(self.msas) > 0:
+ filename_to_gcs_path = upload_custom_files(
+ headers=headers,
+ paths=[Path(m) for m in self.msas],
+ file_type=CustomFileType.MSA,
+ )
+ self.msas = list(filename_to_gcs_path.values())
+ local_to_uploaded.update(filename_to_gcs_path)
+
+ if self.initial_guess_files:
+ filename_to_gcs_path = upload_custom_files(
+ headers=headers,
+ paths=[Path(ig) for ig in self.initial_guess_files]
+ if self.initial_guess_files
+ else self.initial_guess_files,
+ file_type=CustomFileType.INITIAL_GUESS,
+ )
+ self.initial_guess_files = list(filename_to_gcs_path.values())
+ local_to_uploaded.update(filename_to_gcs_path)
+
+ if self.templates_masks_files:
+ # Replace content of tm files to match the uploaded template file
+ new_tm_files = _replace_tm_file_template_content(
+ templates_masks_files=self.templates_masks_files,
+ local_to_uploaded=local_to_uploaded,
+ )
+ filename_to_gcs_path = upload_custom_files(
+ headers=headers,
+ paths=new_tm_files.values(),
+ file_type=CustomFileType.TEMPLATE_MASK,
+ )
+ for k, v in new_tm_files.items():
+ new_tm_files[k] = filename_to_gcs_path[str(v)]
+ self.templates_masks_files = list(new_tm_files.values())
+ local_to_uploaded.update(new_tm_files)
+
+ self.uploaded = True
+ self._local_to_uploaded = local_to_uploaded
+ return local_to_uploaded
+
+
+def _replace_tm_file_template_content(
+ templates_masks_files: List[Path], local_to_uploaded: dict
+):
+ """Helper function to replace the template name in TM files."""
+ new_tm_files = {}
+ for tm in templates_masks_files:
+ mask_content = tm.read_text()
+ for (
+ template,
+ uploaded_file,
+ ) in local_to_uploaded.items():
+ mask_content = mask_content.replace(
+ template.split("/")[-1],
+ cloudpathlib.CloudPath(uploaded_file).name,
+ )
+ # Get the default temporary directory
+ # and write a new tm file which contains the uploaded template file name
+ temp_dir = tempfile.gettempdir()
+ temp_file_path = Path(temp_dir) / tm.name
+ temp_file_path.write_text(mask_content)
+ new_tm_files[str(tm)] = temp_file_path
+ return new_tm_files
diff --git a/folding-studio/folding_studio/utils/exceptions.py b/folding-studio/folding_studio/utils/exceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..0df4d3c073b4c073d31de8988c62869d74557341
--- /dev/null
+++ b/folding-studio/folding_studio/utils/exceptions.py
@@ -0,0 +1,5 @@
+"""Custom exceptions raised by CLI and helper functions."""
+
+
+class ProjectCodeNotFound(Exception):
+ """Raised when no project code could be found."""
diff --git a/folding-studio/folding_studio/utils/fasta.py b/folding-studio/folding_studio/utils/fasta.py
new file mode 100644
index 0000000000000000000000000000000000000000..fa6d9d0d3737b45beac8a61b6e8ac795cf96168e
--- /dev/null
+++ b/folding-studio/folding_studio/utils/fasta.py
@@ -0,0 +1,58 @@
+"""Helpers for handling fasta files."""
+
+from __future__ import annotations
+
+from pathlib import Path
+
+from Bio import SeqIO
+from Bio.SeqRecord import SeqRecord
+from folding_studio_data_models.exceptions import FastaValidationError
+
+
+def validate_fasta(
+ fasta_input: str | Path,
+ allow_multimer: bool = True,
+ max_aa_length: int | None = None,
+ str_output: bool = False,
+) -> SeqRecord | list[SeqRecord] | str:
+ """Validate a fasta content.
+
+ Args:
+ fasta_input (str | Path): Input fasta.
+ allow_multimer (bool, optional): Allow mutlimer in the fasta representation. Defaults to True.
+ max_aa_length (int | None, optional): Max AA lenght supported. Defaults to None.
+
+ Raises:
+ FastaValidationError: If no sequence found in the FASTA content.
+ FastaValidationError: If one or more sequence has 0 residue.
+ FastaValidationError: If only monomer are supported but a multimer was given.
+ FastaValidationError: If unuspported sequence lenght in FASTA content
+
+ Returns:
+ SeqRecord | list[SeqRecord]: Unique SeqRecord if `allow_multimer` set to False,
+ otherwise a list of SeqRecord.
+ """
+
+ records: list[SeqRecord] = list(SeqIO.parse(fasta_input, "fasta"))
+ if not records:
+ raise FastaValidationError("No sequence found in the FASTA content.")
+ elif any(len(record.seq) == 0 for record in records):
+ raise FastaValidationError("One or more sequence has 0 residue.")
+ elif not allow_multimer and len(records) > 1:
+ raise FastaValidationError(
+ "Only monomer are supported but a multimer was given."
+ )
+ elif max_aa_length is not None and any(
+ len(record.seq) > max_aa_length for record in records
+ ):
+ raise FastaValidationError(
+ "Unuspported sequence lenght in FASTA content. "
+ f"Max supported sequence lenght is {max_aa_length}AA."
+ )
+ if str_output:
+ return "\n".join(
+ f">{record.description}\n{str(record.seq)}" for record in records
+ )
+ elif allow_multimer:
+ return records
+ return records[0]
diff --git a/folding-studio/folding_studio/utils/file_helpers.py b/folding-studio/folding_studio/utils/file_helpers.py
new file mode 100644
index 0000000000000000000000000000000000000000..35341018a5f01afdfb1ff58d4f28184cc28ef9d3
--- /dev/null
+++ b/folding-studio/folding_studio/utils/file_helpers.py
@@ -0,0 +1,29 @@
+"""Helper methods for reading input files."""
+
+import re
+from itertools import filterfalse, tee
+from pathlib import Path
+
+
+def partition_template_pdb_from_file(
+ custom_templates: list[str | Path],
+) -> tuple[list[str], list[Path]]:
+ """Partitions custom templates files from PDB codes and removes duplicates.
+
+ Inspired from `partition` function in itertools cookbook:
+ https://docs.python.org/dev/library/itertools.html#itertools-recipes
+
+ Args:
+ custom_templates (list[str | Path]): List of custom templates
+
+ Returns:
+ The list of PDB codes and the list of custom files.
+ """
+
+ def pred(x):
+ return re.match(r"^[a-zA-Z0-9]{4}$", str(x))
+
+ t1, t2 = tee(custom_templates)
+ pdb_codes = filter(pred, t1)
+ custom_files = filterfalse(pred, t2)
+ return (pdb_codes, custom_files)
diff --git a/folding-studio/folding_studio/utils/gcp.py b/folding-studio/folding_studio/utils/gcp.py
new file mode 100644
index 0000000000000000000000000000000000000000..1cacd4776d44569f3eadb6b83c299efdae598cb8
--- /dev/null
+++ b/folding-studio/folding_studio/utils/gcp.py
@@ -0,0 +1,113 @@
+"""Helper methods for GCP."""
+
+import logging
+import shutil
+import subprocess
+import time
+import zipfile
+from pathlib import Path
+
+import requests
+
+from folding_studio.config import REQUEST_TIMEOUT
+
+TOKEN_EXPIRY_SECONDS = 15 * 60 # 15 Minutes
+
+
+class TokenManager:
+ """Class to handle token updating."""
+
+ def __init__(self) -> None:
+ """Initialize TokenManager class.
+
+ Args:
+ host_url: the url to obtain the token for.
+ """
+ self.access_token = None
+ self.token_generation_time = 0
+
+ def get_token(self) -> str:
+ """Get the token (self updating every 15 mins).
+
+ Return:
+ The updated token
+ """
+ current_time = time.time()
+ # Check if the token is expired
+ if (
+ self.access_token is None
+ or current_time - self.token_generation_time >= TOKEN_EXPIRY_SECONDS
+ ):
+ self.access_token = get_id_token()
+
+ return self.access_token
+
+
+def get_id_token() -> str:
+ """Get the user's gcp token id.
+
+ Returns:
+ str: The user's gcp token id.
+ """
+ cmd_output = subprocess.run(
+ ["gcloud", "auth", "print-identity-token"],
+ capture_output=True,
+ text=True,
+ check=False,
+ )
+ return cmd_output.stdout.strip()
+
+
+def download_file_from_signed_url(
+ signed_url: str,
+ output_path: Path,
+ force: bool = False,
+ unzip: bool = False,
+ unzip_dir: str | None = None,
+) -> None:
+ """Download a file from a signed url.
+
+ Args:
+ signed_url (str): GCP signed url.
+ output_path (Path): Output file path.
+ force (bool, optional): Force file writing if it already exists.Defaults to False.
+ unzip (bool, optional): Unzip the zip file after downloading. Defaults to False.
+ unzip_dir (str | None, optional): Directory where to extract all members of the archive.
+ Defaults to None.
+
+ Raises:
+ ValueError: If output file path exists but force set to false.
+ ValueError: If unzip but the output path is not a zip file.
+ Exception: If an error occurs during the download.
+ ValueError: If unzip but the downloaded file is not a valid zip archive.
+ """
+ if output_path.exists() and not force:
+ msg = f"The file '{output_path}' already exists. Use the --force flag to overwrite it."
+ raise ValueError(msg)
+
+ if unzip and not output_path.suffix == ".zip":
+ msg = "The output path must be a zip file."
+ raise ValueError(msg)
+
+ unzip_dir = unzip_dir or output_path.with_suffix("")
+
+ try:
+ response = requests.get(signed_url, stream=True, timeout=REQUEST_TIMEOUT)
+ response.raise_for_status()
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ with output_path.open("wb") as f:
+ shutil.copyfileobj(response.raw, f)
+ except Exception as e:
+ msg = f"Error downloading from signed url: {e}"
+ raise Exception(msg) from e
+
+ if unzip:
+ unzip_dir.mkdir(parents=True, exist_ok=True)
+ try:
+ with zipfile.ZipFile(output_path, "r") as zip_ref:
+ zip_ref.extractall(unzip_dir)
+ except zipfile.BadZipFile:
+ msg = f"File {output_path} is not a valid zip archive."
+ raise ValueError(msg)
+
+ logging.info(f"Extracted all files of {output_path} to {unzip_dir}.")
diff --git a/folding-studio/folding_studio/utils/headers.py b/folding-studio/folding_studio/utils/headers.py
new file mode 100644
index 0000000000000000000000000000000000000000..777fcc8e972ef5246203fb71285ef7bdd817d0e5
--- /dev/null
+++ b/folding-studio/folding_studio/utils/headers.py
@@ -0,0 +1,21 @@
+"""Headers for HTTP requests."""
+
+from folding_studio.config import FOLDING_API_KEY
+from folding_studio.utils.gcp import get_id_token
+
+
+def get_auth_headers() -> dict[str, str]:
+ """
+ Create authentication headers based on available credentials.
+
+ API key is the default authentication.
+ If none is provided, we fallback to JWT from Google Cloud.
+
+ Returns:
+ dict: Authentication headers for API requests.
+ """
+ if FOLDING_API_KEY:
+ return {"X-API-Key": FOLDING_API_KEY}
+
+ identity_token = get_id_token()
+ return {"Authorization": f"Bearer {identity_token}"}
diff --git a/folding-studio/folding_studio/utils/input_validation.py b/folding-studio/folding_studio/utils/input_validation.py
new file mode 100644
index 0000000000000000000000000000000000000000..c9cd2ee465af208ee0aaf7475fc882cd431ee304
--- /dev/null
+++ b/folding-studio/folding_studio/utils/input_validation.py
@@ -0,0 +1,173 @@
+"""Helper methods for validating cli entries."""
+
+from enum import Enum
+from pathlib import Path
+
+import typer
+
+from folding_studio.utils.path_helpers import extract_files
+
+
+class SupportedCustomTemplateFile(str, Enum):
+ """Supported custom template file extensions."""
+
+ CIF = ".cif"
+
+
+class SupportedCustomMSAFile(str, Enum):
+ """Supported custom msa file extensions."""
+
+ STO = ".sto"
+ A3M = ".a3m"
+
+
+class SupportedTemplateMaskFile(str, Enum):
+ """Supported custom template file extensions."""
+
+ JSON = ".json"
+
+
+def extract_and_validate_custom_templates(
+ paths: list[Path],
+) -> list[Path]:
+ """Extract and validate a list of path to use as custom templates.
+
+ Before checking the files, if a directory path is in the list, its files are extracted.
+
+ Args:
+ paths (list[Path]): List of paths.
+
+ Raises:
+ typer.BadParameter: If a file is not supported.
+
+ Returns:
+ list[Path]: Extracted and validated custom template files path.
+ """
+ extracted_paths = extract_files(paths)
+ supported_files = [item.value for item in SupportedCustomTemplateFile]
+ for path in extracted_paths:
+ if path.suffix not in supported_files:
+ raise typer.BadParameter(
+ f"The file '{path}' is not supported. "
+ f"Only {supported_files} are supported for custom templates."
+ )
+ return extracted_paths
+
+
+def extract_and_validate_custom_msas(paths: list[Path]) -> list[Path]:
+ """Extract and validate a list of path to use as custom msas.
+
+ Before checking the files, if a directory path is in the list, its files are extracted.
+
+ Args:
+ paths (list[Path]): List of paths.
+
+ Raises:
+ typer.BadParameter: If a file is not supported.
+
+ Returns:
+ list[Path]: Extracted and validated custom msas files path.
+ """
+ extracted_paths = extract_files(paths)
+ supported_files = [item.value for item in SupportedCustomMSAFile]
+ for path in extracted_paths:
+ if path.suffix not in supported_files:
+ raise typer.BadParameter(
+ f"The file '{path}' is not supported. "
+ f"Only {supported_files} are supported for custom msas."
+ )
+ return extracted_paths
+
+
+def validate_initial_guess(
+ initial_guess_file: Path | None,
+) -> Path:
+ """Extract and validate the file used for initial guess.
+
+ Args:
+ path (Path | None): Initial guess file parameter.
+
+ Raises:
+ typer.BadParameter: If a file is not supported.
+
+ Returns:
+ Path | None: Validated initial guess file path or None.
+ """
+ if not initial_guess_file:
+ return None
+ supported_files = [
+ item.value for item in SupportedCustomTemplateFile
+ ] # initial guess files are equivalent to template files
+ if initial_guess_file.suffix not in supported_files:
+ raise typer.BadParameter(
+ f"The file '{initial_guess_file}' is not supported. "
+ f"Only {supported_files} are supported for initial guess."
+ )
+ return initial_guess_file
+
+
+def validate_template_mask(
+ template_mask_file: Path | None,
+) -> Path:
+ """Extract and validate the file used for template masks.g.
+
+ Args:
+ path (Path | None): template masks.g file parameter.
+
+ Raises:
+ typer.BadParameter: If a file is not supported.
+
+ Returns:
+ Path | None: Validated template masks.g file path or None.
+ """
+ if not template_mask_file:
+ return None
+ supported_files = [
+ item.value for item in SupportedTemplateMaskFile
+ ] # template masks.g files are equivalent to template files
+ if template_mask_file.suffix not in supported_files:
+ raise typer.BadParameter(
+ f"The file '{template_mask_file}' is not supported. "
+ f"Only {supported_files} are supported for template masks.g."
+ )
+ return template_mask_file
+
+
+def validate_list_initial_guess(ig_files: list[Path]) -> list[Path]:
+ """Extract and validate a list of initial guess files.
+
+ Args:
+ ig_files (list[Path]): List of initial guess files path.
+
+ Returns:
+ list[Path]: Extracted and validated initial guess files path.
+
+ """
+ extracted_paths = extract_files(ig_files)
+ initial_guess_files = []
+ for path in extracted_paths:
+ validated_ig_path = validate_initial_guess(initial_guess_file=path)
+ if validated_ig_path:
+ initial_guess_files.append(validated_ig_path)
+
+ return initial_guess_files
+
+
+def validate_list_template_mask(tm_files: list[Path]) -> list[Path]:
+ """Extract and validate a list of template mask
+
+ Args:
+ tm_files (list[Path]): List of template mask files path.
+
+ Returns:
+ list[Path]: Extracted and validated template mask files path.
+
+ """
+ extracted_paths = extract_files(tm_files)
+ template_mask_files = []
+ for path in extracted_paths:
+ validated_tm_path = validate_template_mask(template_mask_file=path)
+ if validated_tm_path:
+ template_mask_files.append(validated_tm_path)
+
+ return template_mask_files
diff --git a/folding-studio/folding_studio/utils/path_helpers.py b/folding-studio/folding_studio/utils/path_helpers.py
new file mode 100644
index 0000000000000000000000000000000000000000..4843486d7621afb54a5570336c4d8e8c964f6c53
--- /dev/null
+++ b/folding-studio/folding_studio/utils/path_helpers.py
@@ -0,0 +1,63 @@
+"""Helper methods for handling paths."""
+
+from pathlib import Path
+from typing import Iterable
+
+
+def extract_files(paths: list[Path]) -> list[Path]:
+ """Extract all the files paths from a list of file or directory paths.
+
+ If a directory path is in the list, all its files are extracted.
+
+ Args:
+ paths (list[Path]): List of custom file path or directory containing custom files.
+
+ Returns:
+ list[Path]: List of extracted path.
+ """
+ extracted_paths: list[Path] = []
+ for path in paths:
+ if path.is_dir():
+ extracted_paths += [file for file in path.iterdir() if file.is_file()]
+ else:
+ extracted_paths.append(path)
+ return extracted_paths
+
+
+def validate_path(
+ path: str | Path,
+ is_file: bool = False,
+ is_dir: bool = False,
+ file_suffix: Iterable[str] | None = None,
+) -> Path:
+ """Helper method to check a path existence and other constraints.
+
+ By defaut, a path existence check will be made and the path must point to either a file or a directory.
+
+ Args:
+ path (str | Path): Path to check.
+ is_file (bool, optional): Validate path leads to a file. Defaults to False.
+ is_dir (bool, optional): Validate path leads to a directory. Defaults to False.
+ file_suffix (Iterable[str] | None, optional): Validate file suffix must be in specified list. Defaults to None.
+ """
+ path = Path(path)
+
+ if not path.exists():
+ raise FileNotFoundError(f"The path {path} does not exist.")
+
+ if not (path.is_file() or path.is_dir()):
+ raise ValueError(f"The path {path} is neither a file or a directory.")
+
+ if is_file:
+ if not path.is_file():
+ raise FileNotFoundError(f"The path {path} is not a file.")
+ if file_suffix is not None and not (
+ (path.suffix in file_suffix) or ("".join(path.suffixes) in file_suffix)
+ ):
+ raise ValueError(
+ f"Unsupported suffix '{''.join(path.suffixes)}' for path {path}. "
+ f"Supported are {tuple(file_suffix)}."
+ )
+ elif is_dir and not path.is_dir():
+ raise NotADirectoryError(f"The path {path} is not a directory.")
+ return path
diff --git a/folding-studio/folding_studio/utils/project_validation.py b/folding-studio/folding_studio/utils/project_validation.py
new file mode 100644
index 0000000000000000000000000000000000000000..9846414b0d8a93ecb433d43a988b897315f16b82
--- /dev/null
+++ b/folding-studio/folding_studio/utils/project_validation.py
@@ -0,0 +1,23 @@
+"""Validation of project code."""
+
+from folding_studio.config import FOLDING_PROJECT_CODE
+from folding_studio.utils.exceptions import ProjectCodeNotFound
+
+
+def define_project_code_or_raise(project_code: str | None) -> str:
+ """
+ Args:
+ project_code (str|None): Project code under which the jobs are billed.
+
+ Return:
+ The project code.
+ """
+ # Look into env var if None was passed
+ project_code = project_code or FOLDING_PROJECT_CODE
+ if project_code is None:
+ msg = (
+ "Project code is not set. Please either:\n"
+ "set the env var FOLDING_PROJECT_CODE."
+ )
+ raise ProjectCodeNotFound(msg)
+ return project_code
diff --git a/folding-studio/offline-packages/folding_studio_data_models-0.13.3-py3-none-any.whl b/folding-studio/offline-packages/folding_studio_data_models-0.13.3-py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..5cf47deff94dd8b6a1b2496de6e3949b44519df9
Binary files /dev/null and b/folding-studio/offline-packages/folding_studio_data_models-0.13.3-py3-none-any.whl differ
diff --git a/folding-studio/pyproject.toml b/folding-studio/pyproject.toml
new file mode 100644
index 0000000000000000000000000000000000000000..a5a3185e4a3485909a46e0eff53b15150176fba1
--- /dev/null
+++ b/folding-studio/pyproject.toml
@@ -0,0 +1,47 @@
+[tool.poetry]
+name = "folding-studio"
+version = "3.3.1"
+description = ""
+authors = ["AchilleSoulieID "]
+readme = "README.md"
+
+[tool.poetry.scripts]
+folding = "folding_studio.cli:app"
+
+[tool.poetry.dependencies]
+python = ">=3.10"
+typer = "^0.15.4"
+requests = "^2.32.3"
+pydantic = "^2.11.4"
+folding-studio-data-models = { path = "offline-packages/folding_studio_data_models-0.13.3-py3-none-any.whl" }
+tqdm = "^4.67.0"
+pyyaml = "^6.0.2"
+python-dotenv = "^1.0.1"
+
+
+[tool.poetry.group.dev.dependencies]
+pylint = "^3.2.6"
+pytest = "^8.3.2"
+pytest-env = "^1.1.3"
+requests-mock = "^1.12.1"
+
+
+[tool.poetry.group.docs.dependencies]
+mkdocs = "^1.6.1"
+mkdocs-material = "^9.6.5"
+mkdocs-swagger-ui-tag = "^0.6.11"
+mkdocstrings = "^0.24.0"
+
+[[tool.poetry.source]]
+name = "gcp"
+url = "https://europe-west4-python.pkg.dev/instadeep/folding-studio/simple"
+priority = "explicit"
+
+[[tool.poetry.source]]
+name = "staging"
+url = "https://europe-west4-python.pkg.dev/instadeep/folding-studio/simple"
+priority = "explicit"
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"
diff --git a/folding-studio/pytest.ini b/folding-studio/pytest.ini
new file mode 100644
index 0000000000000000000000000000000000000000..9523e32d59e8e9f5c764f2bb10b14312e64bbdfe
--- /dev/null
+++ b/folding-studio/pytest.ini
@@ -0,0 +1,5 @@
+[pytest]
+markers =
+ apikeytest: marks tests that operate with the API key rather than JWT.
+env =
+ FOLDING_API_KEY = MY_KEY
diff --git a/folding-studio/tests/__init__.py b/folding-studio/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/folding-studio/tests/api_call/data/batch.csv b/folding-studio/tests/api_call/data/batch.csv
new file mode 100644
index 0000000000000000000000000000000000000000..558ca187580f22e262f7ffa88ff7016fdce82a88
--- /dev/null
+++ b/folding-studio/tests/api_call/data/batch.csv
@@ -0,0 +1,4 @@
+complex_id,description,fasta_sequence,custom_templates,num_recycle,random_seed,msa_mode,template_mode,gap_trick,custom_msas
+mono_2LIS_auto_msa_custom_template,>2LIS_1|Chain A|SPERM LYSIN|Haliotis rufescens (6454),RSWHYVEPKFLNKAFEVALKVQIIAGFDRGLVKWLRVHGRTLSTVQKKALYFVNRRYMQTHWANYMLWINKKIDALGRTPVVGDYTRLGAEIGRRIDMAYFYDFLKDKNMIPKYLPYMEEINRMRPADVPVKYMGK,"['1agw.cif','1agz.cif']",3,0,search,custom,0,"['1agb_A.sto','1agb_B.sto']"
+multi_6M0J_standard,>6M0J_1|Chain A|Angiotensin-converting enzyme 2|Homo sapiens (9606),STIEEQAKTFLDKFNHEAEDLFYQSSLASWNYNTNITEENVQNMNNAGDKWSAFLKEQSTLAQMYPLQEIQNLTVKLQLQALQQNGSSVLSEDKSKRLNTILNTMSTIYSTGKVCNPDNPQECLLLEPGLNEIMANSLDYNERLWAWESWRSEVGKQLRPLYEEYVVLKNEMARANHYEDYGDYWRGDYEVNGVDGYDYSRGQLIEDVEHTFEEIKPLYEHLHAYVRAKLMNAYPSYISPIGCLPAHLLGDMWGRFWTNLYSLTVPFGQKPNIDVTDAMVDQAWDAQRIFKEAEKFFVSVGLPNMTQGFWENSMLTDPGNVQKAVCHPTAWDLGKGDFRILMCTKVTMDDFLTAHHEMGHIQYDMAYAAQPFLLRNGANEGFHEAVGEIMSLSAATPKHLKSIGLLSPDFQEDNETEINFLLKQALTIVGTLPFTYMLEKWRWMVFKGEIPKDQWMKKWWEMKREIVGVVEPVPHDETYCDPASLFHVSNDYSFIRYYTRTLYQFQFQEALCQAAKHEGPLHKCDISNSTEAGQKLFNMLRLGKSEPWTLALENVVGAKNMNVRPLLNYFEPLFTWLKDQNKNSFVGWSTDWSPYADHHHHHH,,2,5,search,search,0,"['6m0j_A.sto','6m0j_B.sto']"
+multi_6M0J_standard,>6M0J_2|Chain B[auth E]|Spike protein S1|Severe acute respiratory syndrome coronavirus 2 (2697049),RVQPTESIVRFPNITNLCPFGEVFNATRFASVYAWNRKRISNCVADYSVLYNSASFSTFKCYGVSPTKLNDLCFTNVYADSFVIRGDEVRQIAPGQTGKIADYNYKLPDDFTGCVIAWNSNNLDSKVGGNYNYLYRLFRKSNLKPFERDISTEIYQAGSTPCNGVEGFNCYFPLQSYGFQPTNGVGYQPYRVVVLSFELLHAPATVCGPKKSTNLVKNKCVNFHHHHHH,,2,5,search,search,0,
diff --git a/folding-studio/tests/api_call/data/batch.json b/folding-studio/tests/api_call/data/batch.json
new file mode 100644
index 0000000000000000000000000000000000000000..beac1b962700b0fee923a345d2ac43a1398e4d53
--- /dev/null
+++ b/folding-studio/tests/api_call/data/batch.json
@@ -0,0 +1,80 @@
+{
+ "requests": [
+ {
+ "complex_id": "multi_1agb_custom_msa_gap_trick",
+ "sequences": [
+ {
+ "description": ">multi_1agb_custom_msa_gap_trick|pdb:A",
+ "fasta_sequence": "GSHSMRYFDTAMSRPGRGEPRFISVGYVDDTQFVRFDSDAASPREEPRAPWIEQEGPEYWDRNTQIFKTNTQTDRESLRNLRGYYNQSEAGSHTLQSMYGCDVGPDGRLLRGHNQYAYDGKDYIALNEDLRSWTAADTAAQITQRKWEAARVAEQDRAYLEGTCVEWLRRYLENGKDTLERADPPKTHVTHHPISDHEATLRCWALGFYPAEITLTWQRDGEDQTQDTELVETRPAGDRTFQKWAAVVVPSGEEQRYTCHVQHEGLPKPLTLRWEG"
+ },
+ {
+ "description": ">multi_1agb_custom_msa_gap_trick|pdb:B",
+ "fasta_sequence": "GGRKKYKL"
+ }
+ ],
+ "ignore_cache": true,
+ "parameters": {
+ "num_recycle": 5,
+ "random_seed": 129,
+ "custom_templates": [
+ "5II8",
+ "1agw.cif",
+ "1agz.cif"
+ ],
+ "custom_msas": [
+ "1agb_A.sto",
+ "1agb_B.sto"
+ ],
+ "gap_trick": true,
+ "msa_mode": "custom",
+ "template_mode": "custom"
+ }
+ },
+ {
+ "complex_id": "multi_6M0J_custom_msa_no_template",
+ "sequences": [
+ {
+ "description": ">multi_6M0J_custom_msa_no_template|6M0J_1|Chain A|Angiotensin-converting enzyme 2|Homo sapiens (9606)",
+ "fasta_sequence": "STIEEQAKTFLDKFNHEAEDLFYQSSLASWNYNTNITEENVQNMNNAGDKWSAFLKEQSTLAQMYPLQEIQNLTVKLQLQALQQNGSSVLSEDKSKRLNTILNTMSTIYSTGKVCNPDNPQECLLLEPGLNEIMANSLDYNERLWAWESWRSEVGKQLRPLYEEYVVLKNEMARANHYEDYGDYWRGDYEVNGVDGYDYSRGQLIEDVEHTFEEIKPLYEHLHAYVRAKLMNAYPSYISPIGCLPAHLLGDMWGRFWTNLYSLTVPFGQKPNIDVTDAMVDQAWDAQRIFKEAEKFFVSVGLPNMTQGFWENSMLTDPGNVQKAVCHPTAWDLGKGDFRILMCTKVTMDDFLTAHHEMGHIQYDMAYAAQPFLLRNGANEGFHEAVGEIMSLSAATPKHLKSIGLLSPDFQEDNETEINFLLKQALTIVGTLPFTYMLEKWRWMVFKGEIPKDQWMKKWWEMKREIVGVVEPVPHDETYCDPASLFHVSNDYSFIRYYTRTLYQFQFQEALCQAAKHEGPLHKCDISNSTEAGQKLFNMLRLGKSEPWTLALENVVGAKNMNVRPLLNYFEPLFTWLKDQNKNSFVGWSTDWSPYADHHHHHP"
+ },
+ {
+ "description": ">multi_6M0J_custom_msa_no_template|6M0J_2|Chain B[auth E]|Spike protein S1|Severe acute respiratory syndrome coronavirus 2 (2697049)",
+ "fasta_sequence": "RVQPTESIVRFPNITNLCPFGEVFNATRFASVYAWNRKRISNCVADYSVLYNSASFSTFKCYGVSPTKLNDLCFTNVYADSFVIRGDEVRQIAPGQTGKIADYNYKLPDDFTGCVIAWNSNNLDSKVGGNYNYLYRLFRKSNLKPFERDISTEIYQAGSTPCNGVEGFNCYFPLQSYGFQPTNGVGYQPYRVVVLSFELLHAPATVCGPKKSTNLVKNKCVNFHHHHHP"
+ }
+ ],
+ "ignore_cache": true,
+ "parameters": {
+ "num_recycle": 3,
+ "random_seed": 129,
+ "custom_msas": [
+ "6m0j_A.sto",
+ "6m0j_B.sto"
+ ],
+ "gap_trick": false,
+ "msa_mode": "custom",
+ "template_mode": "none"
+ }
+ },
+ {
+ "complex_id": "multi_6M0J_custom_no_no",
+ "sequences": [
+ {
+ "description": ">multi_6M0J_custom_msa_no_template|6M0J_1|Chain A|Angiotensin-converting enzyme 2|Homo sapiens (9606)",
+ "fasta_sequence": "STIEEQAKTFLDKFNHEAEDLFYQSSLASWNYNTNITEENVQNMNNAGDKWSAFLKEQSTLAQMYPLQEIQNLTVKLQLQALQQNGSSVLSEDKSKRLNTILNTMSTIYSTGKVCNPDNPQECLLLEPGLNEIMANSLDYNERLWAWESWRSEVGKQLRPLYEEYVVLKNEMARANHYEDYGDYWRGDYEVNGVDGYDYSRGQLIEDVEHTFEEIKPLYEHLHAYVRAKLMNAYPSYISPIGCLPAHLLGDMWGRFWTNLYSLTVPFGQKPNIDVTDAMVDQAWDAQRIFKEAEKFFVSVGLPNMTQGFWENSMLTDPGNVQKAVCHPTAWDLGKGDFRILMCTKVTMDDFLTAHHEMGHIQYDMAYAAQPFLLRNGANEGFHEAVGEIMSLSAATPKHLKSIGLLSPDFQEDNETEINFLLKQALTIVGTLPFTYMLEKWRWMVFKGEIPKDQWMKKWWEMKREIVGVVEPVPHDETYCDPASLFHVSNDYSFIRYYTRTLYQFQFQEALCQAAKHEGPLHKCDISNSTEAGQKLFNMLRLGKSEPWTLALENVVGAKNMNVRPLLNYFEPLFTWLKDQNKNSFVGWSTDWSPYADHHHHHP"
+ },
+ {
+ "description": ">multi_6M0J_custom_msa_no_template|6M0J_2|Chain B[auth E]|Spike protein S1|Severe acute respiratory syndrome coronavirus 2 (2697049)",
+ "fasta_sequence": "RVQPTESIVRFPNITNLCPFGEVFNATRFASVYAWNRKRISNCVADYSVLYNSASFSTFKCYGVSPTKLNDLCFTNVYADSFVIRGDEVRQIAPGQTGKIADYNYKLPDDFTGCVIAWNSNNLDSKVGGNYNYLYRLFRKSNLKPFERDISTEIYQAGSTPCNGVEGFNCYFPLQSYGFQPTNGVGYQPYRVVVLSFELLHAPATVCGPKKSTNLVKNKCVNFHHHHHP"
+ }
+ ],
+ "ignore_cache": true,
+ "parameters": {
+ "num_recycle": 3,
+ "random_seed": 129,
+ "gap_trick": false,
+ "msa_mode": "none",
+ "template_mode": "none"
+ }
+ }
+ ]
+}
diff --git a/folding-studio/tests/api_call/data/protein.fasta b/folding-studio/tests/api_call/data/protein.fasta
new file mode 100644
index 0000000000000000000000000000000000000000..f68b797b82a7feb760a7382396cce8fe10fbb7b8
--- /dev/null
+++ b/folding-studio/tests/api_call/data/protein.fasta
@@ -0,0 +1,9 @@
+>SARS-CoV-2|RBD|Omicron variant
+RVQPTESIVRFPNITNLCPFDEVFNATRFA
+SVYAWNRKRISNCVADYSVLYNLAPFFTFK
+CYGVSPTKLNDLCFTNVYADSFVIRGDEVR
+QIAPGQTGNIADYNYKLPDDFTGCVIAWNS
+NKLDSKVSGNYNYLYRLFRKSNLKPFERDI
+STEIYQAGNKPCNGVAGFNCYFPLRSYSFR
+PTYGVGHQPYRVVVLSFELLHAPATVCGPK
+KSTNLVKNKCVNF
diff --git a/folding-studio/tests/api_call/test_batch_predict.py b/folding-studio/tests/api_call/test_batch_predict.py
new file mode 100644
index 0000000000000000000000000000000000000000..a85247922d7937f5b2da19e7ddfc2482fe42a190
--- /dev/null
+++ b/folding-studio/tests/api_call/test_batch_predict.py
@@ -0,0 +1,273 @@
+"""Test simple prediction."""
+
+from pathlib import Path
+from unittest import mock
+
+import pytest
+from folding_studio.api_call.predict.batch_predict import (
+ _build_request_from_fasta,
+ _extract_sequences_from_file,
+ batch_prediction,
+ batch_prediction_from_file,
+)
+from folding_studio.config import API_URL, REQUEST_TIMEOUT
+from folding_studio.utils.data_model import (
+ PredictRequestCustomFiles,
+ PredictRequestParams,
+)
+from folding_studio_data_models import (
+ AF2Request,
+ BatchRequest,
+ FeatureMode,
+ FoldingModel,
+ OpenFoldRequest,
+)
+from folding_studio_data_models.exceptions import DuplicatedRequest
+
+current_workdir = Path(__file__).parent.resolve()
+data_dir = Path(current_workdir / "data")
+
+@pytest.fixture()
+def mock_post():
+ post_mock = mock.Mock()
+ mock_response = mock.MagicMock()
+ mock_response.ok = True
+ post_mock.return_value = mock_response
+ with mock.patch("requests.post", post_mock):
+ yield post_mock
+
+@pytest.fixture(autouse=True)
+def mock_get_auth_headers():
+ with mock.patch(
+ "folding_studio.api_call.predict.batch_predict.get_auth_headers", return_value={'Authorization': 'Bearer identity_token'}
+ ) as m:
+ yield m
+
+def test_build_request_from_fasta_pass(folding_model: FoldingModel):
+ file = Path(data_dir, "protein.fasta")
+
+ params = PredictRequestParams(
+ ignore_cache=False,
+ template_mode=FeatureMode.SEARCH,
+ custom_template_ids=["AB12"],
+ msa_mode=FeatureMode.SEARCH,
+ max_msa_clusters=-1,
+ max_extra_msa=-1,
+ gap_trick=False,
+ num_recycle=3,
+ random_seed=0,
+ model_subset=[1, 3, 4],
+ )
+ custom_files = PredictRequestCustomFiles(
+ templates=[Path("gs://custom_template.cif")],
+ msas=[Path("gs://custom_msa.sto")],
+ uploaded=True,
+ )
+ request = _build_request_from_fasta(
+ file,
+ folding_model=folding_model,
+ params=params,
+ custom_files=custom_files,
+ )
+
+ parameters = dict(
+ num_recycle=params.num_recycle,
+ random_seed=params.random_seed,
+ custom_templates=params.custom_template_ids + ["gs://custom_template.cif"],
+ custom_msas=["gs://custom_msa.sto"],
+ gap_trick=params.gap_trick,
+ msa_mode=params.msa_mode,
+ max_msa_clusters=params.max_msa_clusters,
+ max_extra_msa=params.max_extra_msa,
+ template_mode=params.template_mode,
+ model_subset=params.model_subset,
+ )
+
+ if folding_model == FoldingModel.AF2:
+ expected_request = AF2Request(
+ complex_id="protein",
+ sequences=_extract_sequences_from_file(file),
+ parameters=parameters,
+ )
+ else:
+ expected_request = OpenFoldRequest(
+ complex_id="protein",
+ sequences=_extract_sequences_from_file(file),
+ parameters=parameters,
+ )
+ assert request == expected_request
+
+
+@pytest.mark.parametrize(
+ "fasta_files, file_contents",
+ [
+ (
+ ["fasta_file_1.fasta", "fasta_file_1_duplicate.fasta"],
+ [">A\nA\n", ">A\nA\n"],
+ ), # Duplicate content, different files
+ ],
+)
+def test_batch_prediction_fail_duplicate(
+ tmp_path: Path, mock_post: mock.Mock, fasta_files, file_contents, folding_model, headers,
+):
+ """Test batch prediction for duplicates and unique files."""
+
+ fasta_paths = [tmp_path / file_name for file_name in fasta_files]
+ for f, content in zip(fasta_paths, file_contents):
+ with open(f, "w") as fasta_file:
+ fasta_file.write(content)
+
+ params = PredictRequestParams(
+ ignore_cache=False,
+ template_mode=FeatureMode.SEARCH,
+ custom_template_ids=[],
+ msa_mode=FeatureMode.SEARCH,
+ max_msa_clusters=-1,
+ max_extra_msa=-1,
+ gap_trick=False,
+ num_recycle=3,
+ random_seed=0,
+ model_subset=[],
+ )
+ custom_files = PredictRequestCustomFiles(templates=[], msas=[], uploaded=True)
+
+ with pytest.raises(DuplicatedRequest):
+ expected_request = BatchRequest(
+ requests=[
+ _build_request_from_fasta(
+ file,
+ folding_model=folding_model,
+ params=params,
+ custom_files=custom_files,
+ )
+ for file in fasta_paths
+ ]
+ )
+
+ mocked_local_to_uploaded = {}
+ with mock.patch(
+ "folding_studio.api_call.predict.batch_predict.PredictRequestCustomFiles.upload",
+ return_value=mocked_local_to_uploaded,
+ ):
+ with pytest.raises(DuplicatedRequest):
+ batch_prediction(
+ fasta_paths,
+ folding_model,
+ params,
+ custom_files,
+ num_seed=None,
+ project_code="FOLDING_DEV",
+ )
+ mock_post.assert_called_once_with(
+ API_URL + "batchPredict",
+ data={"batch_jobs_request": expected_request.model_dump_json()},
+ headers=headers,
+ timeout=REQUEST_TIMEOUT,
+ params={"project_code": "FOLDING_DEV"},
+ )
+
+
+@pytest.mark.parametrize(
+ "fasta_files, file_contents",
+ [
+ (["fasta_file_1.fasta", "fasta_file_1_unique.fasta"], [">A\nA\n", ">B\nb\n"]),
+ ],
+)
+def test_batch_prediction_pass(
+ tmp_path: Path, mock_post: mock.Mock, fasta_files, file_contents, folding_model, headers,
+):
+ """Test batch prediction for duplicates and unique files."""
+
+ fasta_paths = [tmp_path / file_name for file_name in fasta_files]
+ for f, content in zip(fasta_paths, file_contents):
+ with open(f, "w") as fasta_file:
+ fasta_file.write(content)
+
+ params = PredictRequestParams(
+ ignore_cache=False,
+ template_mode=FeatureMode.SEARCH,
+ custom_template_ids=[],
+ msa_mode=FeatureMode.SEARCH,
+ max_msa_clusters=-1,
+ max_extra_msa=-1,
+ gap_trick=False,
+ num_recycle=3,
+ random_seed=0,
+ model_subset=[],
+ )
+ custom_files = PredictRequestCustomFiles(templates=[], msas=[], uploaded=True)
+
+ expected_request = BatchRequest(
+ requests=[
+ _build_request_from_fasta(
+ file,
+ folding_model=folding_model,
+ params=params,
+ custom_files=custom_files,
+ )
+ for file in fasta_paths
+ ]
+ )
+
+ mocked_local_to_uploaded = {}
+ with mock.patch(
+ "folding_studio.api_call.predict.batch_predict.PredictRequestCustomFiles.upload",
+ return_value=mocked_local_to_uploaded,
+ ):
+ batch_prediction(
+ fasta_paths,
+ folding_model,
+ params,
+ custom_files,
+ num_seed=None,
+ project_code="FOLDING_DEV",
+ )
+ mock_post.assert_called_once_with(
+ API_URL + "batchPredict",
+ data={"batch_jobs_request": expected_request.model_dump_json()},
+ headers=headers,
+ timeout=REQUEST_TIMEOUT,
+ params={"project_code": "FOLDING_DEV"},
+ )
+
+
+@pytest.mark.parametrize("batch_file", ["batch.json", "batch.csv"])
+def test_batch_prediction_from_file_pass(mock_post: mock.Mock, batch_file: str, headers: dict[str, str]):
+ """Test batch prediction pass."""
+
+
+ batch_jobs_file = Path(__file__).parent / f"data/{batch_file}"
+ custom_files = [
+ batch_jobs_file.parent / "1agw.cif",
+ batch_jobs_file.parent / "1agz.cif",
+ batch_jobs_file.parent / "1agb_A.sto",
+ batch_jobs_file.parent / "1agb_B.sto",
+ batch_jobs_file.parent / "6m0j_A.sto",
+ batch_jobs_file.parent / "6m0j_B.sto",
+ ]
+ for f in custom_files:
+ f.touch()
+
+ mocked_local_to_uploaded = {}
+
+ for local in custom_files:
+ mocked_local_to_uploaded[str(local.name)] = f"gs://bucket/{local.name}"
+ with mock.patch(
+ "folding_studio.api_call.predict.batch_predict.PredictRequestCustomFiles.upload",
+ return_value=mocked_local_to_uploaded,
+ ):
+ batch_prediction_from_file(batch_jobs_file, project_code="FOLDING_DEV")
+
+ try:
+ mock_post.assert_called_once_with(
+ API_URL + "batchPredictFromFile",
+ headers=headers,
+ files=[
+ ("batch_jobs_file", mock.ANY),
+ ],
+ timeout=REQUEST_TIMEOUT,
+ params={"project_code": "FOLDING_DEV"},
+ )
+ finally:
+ for f in custom_files:
+ f.unlink()
diff --git a/folding-studio/tests/api_call/test_simple_msa.py b/folding-studio/tests/api_call/test_simple_msa.py
new file mode 100644
index 0000000000000000000000000000000000000000..9c8edbabc434f1453bf622b6f70024936460ba19
--- /dev/null
+++ b/folding-studio/tests/api_call/test_simple_msa.py
@@ -0,0 +1,57 @@
+"""Test simple msa."""
+
+from pathlib import Path
+from unittest import mock
+
+import pytest
+from folding_studio.api_call.msa import simple_msa
+from folding_studio.config import API_URL, REQUEST_TIMEOUT
+from folding_studio.utils.data_model import MSARequestParams
+
+
+@pytest.fixture(autouse=True)
+def mock_post():
+ post_mock = mock.Mock()
+ mock_response = mock.MagicMock()
+ mock_response.ok = True
+ post_mock.return_value = mock_response
+ with mock.patch("requests.post", post_mock):
+ yield post_mock
+
+
+@pytest.fixture(autouse=True)
+def mock_get_auth_headers():
+ with mock.patch(
+ "folding_studio.api_call.msa.get_auth_headers",
+ return_value={"Authorization": "Bearer identity_token"},
+ ) as m:
+ yield m
+
+
+def test_simple_msa_pass(
+ tmp_path: Path, mock_post: pytest.FixtureRequest, headers: dict[str, str]
+):
+ """Test simple msa pass."""
+
+ file = tmp_path / "fasta_file.fasta"
+ file.touch()
+
+ params = MSARequestParams(
+ ignore_cache=False,
+ msa_mode="search",
+ )
+
+ simple_msa(
+ file,
+ params,
+ project_code="FOLDING_DEV",
+ )
+
+ mock_post.assert_called_once_with(
+ API_URL + "searchMSA",
+ data=params.model_dump(mode="json"),
+ headers=headers,
+ files=mock.ANY,
+ timeout=REQUEST_TIMEOUT,
+ params={"project_code": "FOLDING_DEV"},
+ )
diff --git a/folding-studio/tests/api_call/test_simple_predict.py b/folding-studio/tests/api_call/test_simple_predict.py
new file mode 100644
index 0000000000000000000000000000000000000000..4cc6873b22d1adda460294e727cf9b67bdb4080b
--- /dev/null
+++ b/folding-studio/tests/api_call/test_simple_predict.py
@@ -0,0 +1,186 @@
+"""Test simple prediction."""
+
+import os
+from pathlib import Path
+from unittest import mock
+
+
+import pytest
+from folding_studio.api_call.predict.simple_predict import (
+ simple_prediction,
+ single_job_prediction,
+)
+from folding_studio.config import API_URL, REQUEST_TIMEOUT
+from folding_studio.utils.data_model import (
+ PredictRequestCustomFiles,
+ PredictRequestParams,
+)
+from folding_studio.utils.exceptions import ProjectCodeNotFound
+from folding_studio_data_models import AF2Parameters, OpenFoldParameters
+from folding_studio_data_models.request.folding import FoldingModel
+
+
+@pytest.fixture(autouse=True)
+def mock_post():
+ post_mock = mock.Mock()
+ mock_response = mock.MagicMock()
+ mock_response.ok = True
+ post_mock.return_value = mock_response
+ with mock.patch("requests.post", post_mock):
+ yield post_mock
+
+@pytest.fixture(autouse=True)
+def mock_get_auth_headers():
+ with mock.patch(
+ "folding_studio.api_call.predict.simple_predict.get_auth_headers", return_value={'Authorization': 'Bearer identity_token'}
+ ) as m:
+ yield m
+
+def test_simple_prediction_pass(
+ tmp_path: Path, mock_post: pytest.FixtureRequest, folding_model: FoldingModel, headers: dict[str, str]
+):
+ """Test simple prediction pass."""
+
+ file = tmp_path / "fasta_file.fasta"
+ file.touch()
+
+ params = PredictRequestParams(
+ ignore_cache=False,
+ template_mode="search",
+ custom_template_ids=[],
+ msa_mode="search",
+ max_msa_clusters=-1,
+ max_extra_msa=-1,
+ gap_trick=False,
+ num_recycle=3,
+ random_seed=0,
+ model_subset=[1, 3, 4],
+ )
+ custom_files = PredictRequestCustomFiles(templates=[], msas=[])
+
+ simple_prediction(
+ file,
+ folding_model,
+ params,
+ custom_files,
+ project_code="FOLDING_DEV",
+ )
+
+ params = params.model_dump(mode="json")
+ params.update(
+ {
+ "folding_model": folding_model,
+ "custom_msa_files": [],
+ "custom_template_files": [],
+ "initial_guess_file": None,
+ "templates_masks_file": None,
+ }
+ )
+ mock_post.assert_called_once_with(
+ API_URL + "predict",
+ data=params,
+ headers=headers,
+ files=mock.ANY,
+ timeout=REQUEST_TIMEOUT,
+ params={"project_code": "FOLDING_DEV"},
+ )
+
+
+def test_simple_prediction_fail_because_no_project_code(
+ remove_project_code_from_env_var, tmp_path: Path
+):
+ """Test simple prediction fails due to unset project code."""
+
+ file = tmp_path / "fasta_file.fasta"
+ file.touch()
+
+ assert os.environ.get("FOLDING_PROJECT_CODE") is None
+ params = PredictRequestParams(
+ ignore_cache=False,
+ template_mode="search",
+ custom_template_ids=[],
+ msa_mode="search",
+ max_msa_clusters=-1,
+ max_extra_msa=-1,
+ gap_trick=False,
+ num_recycle=3,
+ random_seed=0,
+ model_subset=[1, 3, 4],
+ )
+ custom_files = PredictRequestCustomFiles(templates=[], msas=[])
+
+ with pytest.raises(ProjectCodeNotFound):
+ simple_prediction(
+ file,
+ FoldingModel.AF2,
+ params,
+ custom_files,
+ )
+
+
+def test_single_job_prediction_pass(
+ tmp_path: Path, mock_post: pytest.FixtureRequest, folding_model: FoldingModel, headers: dict[str, str]
+):
+ """Test simple prediction pass."""
+
+ file = tmp_path / "fasta_file.fasta"
+ file.touch()
+
+ parameters = (
+ OpenFoldParameters()
+ if folding_model == FoldingModel.OPENFOLD
+ else AF2Parameters()
+ )
+
+ single_job_prediction(
+ fasta_file=file,
+ parameters=parameters,
+ project_code="FOLDING_DEV",
+ )
+
+ params = parameters.model_dump(mode="json")
+ params.update(
+ {
+ "folding_model": folding_model.value,
+ "custom_msa_files": [],
+ "custom_template_ids": [],
+ "custom_template_files": [],
+ "initial_guess_file": None,
+ "templates_masks_file": None,
+ "ignore_cache": False,
+ }
+ )
+ mock_post.assert_called_once_with(
+ API_URL + "predict",
+ data=params,
+ headers=headers,
+ files=mock.ANY,
+ timeout=REQUEST_TIMEOUT,
+ params={"project_code": "FOLDING_DEV"},
+ )
+
+
+def test_single_job_prediction_handle_deprecated_af2_parameters(
+ tmp_path: Path, mock_post: pytest.FixtureRequest, headers: dict[str, str]
+):
+ """Test simple prediction pass."""
+
+ file = tmp_path / "fasta_file.fasta"
+ file.touch()
+
+ parameters = AF2Parameters()
+
+ with pytest.deprecated_call():
+ single_job_prediction(
+ fasta_file=file,
+ af2_parameters=parameters,
+ project_code="FOLDING_DEV",
+ )
+
+ with pytest.raises(ValueError):
+ single_job_prediction(
+ fasta_file=file,
+ af2_parameters=parameters,
+ parameters=parameters,
+ project_code="FOLDING_DEV",
+ )
diff --git a/folding-studio/tests/api_call/test_upload_custom_files.py b/folding-studio/tests/api_call/test_upload_custom_files.py
new file mode 100644
index 0000000000000000000000000000000000000000..14867c1e118e5e0515558a363059c6aa922c793c
--- /dev/null
+++ b/folding-studio/tests/api_call/test_upload_custom_files.py
@@ -0,0 +1,106 @@
+"""Test upload custom files."""
+
+import datetime
+from unittest import mock
+from unittest.mock import patch
+import pytest
+from pathlib import Path
+import requests_mock
+from folding_studio_data_models import CustomFileType
+from folding_studio.api_call.upload_custom_files import upload_custom_files
+from folding_studio.config import API_URL
+
+fixed_date = datetime.datetime(2024, 11, 18, 12, 34, 56, 123456)
+formatted_fixed_date = fixed_date.strftime("%Y%m%d%H%M%S%f")
+
+
+# Helper to mock the signed URL request
+def mock_signed_url_request(
+ mocker, filename: str, signed_url: str, file_type: str, bucket: str
+):
+ """Helper to mock signed URL API response."""
+ file_base, ext = str(filename).split(".")
+ url = f"{API_URL}getUploadSignedURL?blob_name={filename}&file_type={file_type}"
+ return mocker.get(
+ url=url,
+ json={
+ "signed_url": signed_url,
+ "destination_file": f"gs://{bucket}/{file_base}_d41d8cd9.{ext}",
+ "destination_bucket": f"gs://{bucket}",
+ },
+ )
+
+
+# Parameterized test to handle different file types and buckets
+@pytest.mark.parametrize(
+ "filenames, bucket, file_type_enum",
+ [
+ (
+ ["file_1.cif", "file_2.cif", "file_3.cif"],
+ "custom_templates",
+ CustomFileType.TEMPLATE,
+ ),
+ (
+ ["file_1.sto", "file_2.a3m"],
+ "custom_msas",
+ CustomFileType.MSA,
+ ),
+ (
+ ["file_1.cif", "file_2.cif"],
+ "initial_guess",
+ CustomFileType.INITIAL_GUESS,
+ ),
+ (
+ ["file_1.json", "file_2.json"],
+ "templates_masks",
+ CustomFileType.TEMPLATE_MASK,
+ ),
+ (
+ ["file_1.aligned.pqt", "file_2.aligned.pqt"],
+ "custom_msas",
+ CustomFileType.MSA
+ ),
+ ],
+)
+def test_upload_custom_files_pass(
+ tmp_path: Path,
+ filenames: list,
+ bucket: str,
+ file_type_enum: CustomFileType,
+ headers: dict[str, str]
+):
+ """Test upload custom files pass for different file types and buckets."""
+
+ token = "identity_token"
+ file_paths = [tmp_path / filename for filename in filenames]
+
+ # Create the files
+ for file in file_paths:
+ file.touch()
+
+ signed_url = "https://storage.googleapis.com/some_url"
+
+ with patch(
+ "folding_studio.api_call.upload_custom_files._get_blob_zip_name"
+ ) as mock_blob_name, requests_mock.Mocker() as m:
+ # Mock the signed URL requests for each file
+ for _ in filenames:
+ mock_blob_name.return_value = "mock_file.zip"
+ mock_signed_url_request(
+ mocker=m,
+ filename="mock_file.zip",
+ signed_url=signed_url,
+ file_type=file_type_enum.value,
+ bucket=bucket,
+ )
+
+ # Mock the PUT request to upload the file
+ m.put(signed_url, status_code=200)
+ m.post(API_URL + "unzipFileInBucket", status_code=200)
+
+ # Call the function being tested
+ output = upload_custom_files(headers, file_paths, file_type=file_type_enum)
+
+ # Assertions
+ assert set(output.keys()) == {str(f) for f in file_paths}
+ assert all(f"gs://{bucket}/" in val for val in output.values())
diff --git a/folding-studio/tests/commands/__init__.py b/folding-studio/tests/commands/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/folding-studio/tests/commands/conftest.py b/folding-studio/tests/commands/conftest.py
new file mode 100644
index 0000000000000000000000000000000000000000..610d05e9552754836f7febd58334829cf42d3271
--- /dev/null
+++ b/folding-studio/tests/commands/conftest.py
@@ -0,0 +1,33 @@
+from pathlib import Path
+
+import pytest
+
+
+@pytest.fixture(scope="module")
+def tmp_files(tmp_directory: Path, tmp_files: dict):
+ files = {
+ "invalid_template": tmp_directory / "template.txt",
+ "valid_template": tmp_directory / "template.cif",
+ "valid_template_2": tmp_directory / "template_2.cif",
+ "valid_template_3": tmp_directory / "7si2_chothia_CGF.cif",
+ "valid_template_4": tmp_directory / "7xd2_chothia_CGF.cif",
+ "invalid_msa": tmp_directory / "msa.txt",
+ "valid_msa_sto": tmp_directory / "msa.sto",
+ "valid_msa_a3m": tmp_directory / "msa.a3m",
+ "invalid_initial_guess": tmp_directory / "ig.txt",
+ "valid_initial_guess": tmp_directory / "ig.cif",
+ "invalid_templates_masks": tmp_directory / "ab_ag_masks_nok.txt",
+ "valid_batch_file_csv": tmp_directory / "batch_config.csv",
+ "valid_batch_file_json": tmp_directory / "batch_config.json",
+ "metadata_file": tmp_directory / "submission_metadata.json",
+ }
+
+ for path in files.values():
+ path.touch()
+ tmp_files.update(files)
+ yield tmp_files
+
+
+@pytest.fixture()
+def valid_templates_masks():
+ return Path(__file__).parent / "data/ab_ag_masks_ok.json"
diff --git a/folding-studio/tests/commands/data/ab_ag_masks_ok.json b/folding-studio/tests/commands/data/ab_ag_masks_ok.json
new file mode 100644
index 0000000000000000000000000000000000000000..cb2d35967726d3c751ef814fcaa7e21a70caa495
--- /dev/null
+++ b/folding-studio/tests/commands/data/ab_ag_masks_ok.json
@@ -0,0 +1,28 @@
+{
+ "templates_masks": [
+ {
+ "template_name": "7si2_chothia_CGF.cif",
+ "masks": [
+ "----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------",
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
+ ]
+ },
+ {
+ "template_name": "7si2_chothia_CGF.cif",
+ "masks": [
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
+ "------------------------------------------------------------------------------------------------------------",
+ "------------------------------------------------------------------------------------------------------------------------"
+ ]
+ },
+ {
+ "template_name": "7xd2_chothia_CGF.cif",
+ "masks": [
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX--XXXXXX----------XX--XXXXXXXXXXXXXXXXXXXXX-XXXXXXXXXXXXXX---XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX-XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX-----XXXXXXXXXXXX",
+ "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX-XXXXXXXXXXXXXXXX----XX--X-XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX-XX----XXXXXXXXXXXXXXX"
+ ]
+ }
+ ]
+}
diff --git a/folding-studio/tests/commands/predict/conftest.py b/folding-studio/tests/commands/predict/conftest.py
new file mode 100644
index 0000000000000000000000000000000000000000..e7b08b969cca4d123a5ff71b58e591afc7aae583
--- /dev/null
+++ b/folding-studio/tests/commands/predict/conftest.py
@@ -0,0 +1,23 @@
+from unittest import mock
+
+import pytest
+from folding_studio.client import Response
+
+
+@pytest.fixture()
+def mock_send_request():
+ with mock.patch(
+ "folding_studio.client.Client.send_request",
+ return_value=Response(
+ output_signed_url="url", confidence_data={"metric": "value"}
+ ),
+ ) as m:
+ yield m
+
+
+@pytest.fixture()
+def mock_download_results():
+ with mock.patch(
+ "folding_studio.client.Response.download_results",
+ ) as m:
+ yield m
diff --git a/folding-studio/tests/commands/predict/test_af2.py b/folding-studio/tests/commands/predict/test_af2.py
new file mode 100644
index 0000000000000000000000000000000000000000..6e2dd6edda9bcd830438f02f24b3179f47274b2a
--- /dev/null
+++ b/folding-studio/tests/commands/predict/test_af2.py
@@ -0,0 +1,568 @@
+import os
+from unittest import mock
+
+import pytest
+from folding_studio.cli import app
+from folding_studio.utils.data_model import (
+ PredictRequestCustomFiles,
+ PredictRequestParams,
+)
+from folding_studio.utils.headers import get_auth_headers
+from folding_studio_data_models import (
+ AF2Parameters,
+ BatchMessageStatus,
+ BatchPublication,
+ FeatureMode,
+ FoldingModel,
+ Message,
+ MessageStatus,
+ Publication,
+)
+from typer.testing import CliRunner
+
+runner = CliRunner()
+
+
+@pytest.fixture(autouse=True)
+def mock_get_auth_headers(request):
+ if "apikeytest" in request.keywords:
+ os.environ["FOLDING_API_KEY"] = "MY_KEY"
+ return_value = get_auth_headers()
+ else:
+ return_value = {"Authorization": "Bearer identity_token"}
+ with mock.patch(
+ "folding_studio.api_call.predict.simple_predict.get_auth_headers",
+ return_value=return_value,
+ ) as m:
+ yield m
+
+
+@pytest.fixture(autouse=True)
+def mock_batch_prediction_from_file():
+ batch_pub = BatchPublication(
+ publications=[
+ Publication(
+ folding_model=FoldingModel.AF2,
+ message=Message(
+ pipeline_name="alphafold_inference_pipeline",
+ user_id="default-user",
+ project_code="default-project",
+ parameters=AF2Parameters(),
+ experiment_id="dummy-experiment",
+ model_preset="monomer",
+ fasta_file_name=f"monomer_{idx}.fasta",
+ ignore_cache=False,
+ ),
+ status=MessageStatus.PUBLISHED,
+ )
+ for idx in range(3)
+ ],
+ batch_id="batch_id",
+ cached_publications=[],
+ status=BatchMessageStatus.PUBLISHED,
+ cached=False,
+ )
+ with mock.patch(
+ "folding_studio.commands.predict.af2_predict.batch_prediction_from_file",
+ return_value=batch_pub.model_dump(mode="json"),
+ ) as m:
+ yield m
+
+
+@pytest.fixture()
+def mock_batch_prediction():
+ batch_pub = BatchPublication(
+ publications=[
+ Publication(
+ folding_model=FoldingModel.AF2,
+ message=Message(
+ pipeline_name="alphafold_inference_pipeline",
+ user_id="default-user",
+ project_code="default-project",
+ parameters=AF2Parameters(),
+ experiment_id="dummy-experiment",
+ model_preset="monomer",
+ fasta_file_name=f"monomer_{idx}.fasta",
+ ignore_cache=False,
+ ),
+ status=MessageStatus.PUBLISHED,
+ )
+ for idx in range(3)
+ ],
+ batch_id="batch_id",
+ cached_publications=[],
+ status=BatchMessageStatus.PUBLISHED,
+ cached=False,
+ )
+
+ with mock.patch(
+ "folding_studio.commands.predict.af2_predict.batch_prediction",
+ return_value=batch_pub.model_dump(mode="json"),
+ ) as m:
+ yield m
+
+
+@pytest.fixture()
+def mock_simple_prediction():
+ pub = Publication(
+ folding_model=FoldingModel.AF2,
+ message=Message(
+ pipeline_name="alphafold_inference_pipeline",
+ user_id="default-user",
+ project_code="default-project",
+ parameters=AF2Parameters(),
+ experiment_id="dummy-experiment",
+ model_preset="monomer",
+ fasta_file_name="monomer.fasta",
+ ignore_cache=False,
+ ),
+ status=MessageStatus.PUBLISHED,
+ )
+
+ with mock.patch(
+ "folding_studio.commands.predict.af2_predict.simple_prediction",
+ return_value=pub.model_dump(mode="json"),
+ ) as m:
+ yield m
+
+
+@pytest.fixture()
+def default_params():
+ yield PredictRequestParams(
+ ignore_cache=False,
+ template_mode=FeatureMode.SEARCH,
+ custom_template_ids=[],
+ msa_mode=FeatureMode.SEARCH,
+ max_msa_clusters=-1,
+ max_extra_msa=-1,
+ gap_trick=False,
+ num_recycle=3,
+ random_seed=0,
+ model_subset=[],
+ project_code="FOLDING_DEV",
+ )
+
+
+def test_predict_with_unsupported_file_fails(tmp_files):
+ result = runner.invoke(app, ["predict", "af2", str(tmp_files["invalid_source"])])
+ assert result.exit_code == 2
+ assert "Invalid value for 'SOURCE'" in result.stdout
+
+
+def test_predict_with_directory_containing_unsupported_file_fails(tmp_files):
+ result = runner.invoke(app, ["predict", "af2", str(tmp_files["invalid_dir"])])
+ assert result.exit_code == 2
+ assert "Invalid value for 'SOURCE'" in result.stdout
+
+
+def test_predict_with_empty_directory_fails(tmp_files):
+ result = runner.invoke(app, ["predict", "af2", str(tmp_files["empty_dir"])])
+ assert result.exit_code == 2
+ assert "Invalid value for 'SOURCE'" in result.stdout
+
+
+def test_predict_with_unsupported_custom_template_file_fails(tmp_files):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "af2",
+ str(tmp_files["monomer_fasta"]),
+ "--custom-template",
+ tmp_files["invalid_template"],
+ ],
+ )
+ assert result.exit_code == 2
+ assert "Invalid value for '--custom-template'" in result.stdout
+
+
+def test_predict_with_unsupported_custom_msa_file_fails(tmp_files):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "af2",
+ str(tmp_files["monomer_fasta"]),
+ "--custom-msa",
+ tmp_files["invalid_msa"],
+ ],
+ )
+ assert result.exit_code == 2
+ assert "Invalid value for '--custom-msa'" in result.stdout
+
+
+@pytest.mark.parametrize(
+ "mock_simple_prediction",
+ (
+ MessageStatus.PUBLISHED,
+ MessageStatus.NOT_PUBLISHED_DONE,
+ MessageStatus.NOT_PUBLISHED_PENDING,
+ ),
+ indirect=True,
+)
+def test_predict_with_fasta_file_pass(
+ mock_simple_prediction: mock.Mock,
+ tmp_files,
+ default_params,
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "af2",
+ str(tmp_files["monomer_fasta"]),
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ "--project-code",
+ "FOLDING_DEV",
+ ],
+ )
+ assert result.exit_code == 0, result.output
+
+ custom_files = PredictRequestCustomFiles(templates=[], msas=[])
+
+ mock_simple_prediction.assert_called_once_with(
+ file=tmp_files["monomer_fasta"],
+ folding_model=FoldingModel.AF2,
+ params=default_params,
+ custom_files=custom_files,
+ project_code="FOLDING_DEV",
+ )
+
+
+@pytest.mark.parametrize(
+ "mock_simple_prediction",
+ (
+ MessageStatus.PUBLISHED,
+ MessageStatus.NOT_PUBLISHED_DONE,
+ MessageStatus.NOT_PUBLISHED_PENDING,
+ ),
+ indirect=True,
+)
+def test_predict_with_fasta_file_pass_with_project_code_from_env_var(
+ mock_simple_prediction: mock.Mock, tmp_files, default_params
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "af2",
+ str(tmp_files["monomer_fasta"]),
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ # Here we deliberately leave out the
+ # "--project-code" option to check that
+ # env variable is correctly used
+ ],
+ )
+ assert result.exit_code == 0, result.output
+
+ custom_files = PredictRequestCustomFiles(templates=[], msas=[])
+
+ mock_simple_prediction.assert_called_once_with(
+ file=tmp_files["monomer_fasta"],
+ params=default_params,
+ custom_files=custom_files,
+ folding_model=FoldingModel.AF2,
+ project_code="FOLDING_DEV",
+ )
+
+
+@pytest.mark.parametrize(
+ "mock_simple_prediction",
+ (
+ MessageStatus.PUBLISHED,
+ MessageStatus.NOT_PUBLISHED_DONE,
+ MessageStatus.NOT_PUBLISHED_PENDING,
+ ),
+ indirect=True,
+)
+@pytest.mark.apikeytest
+def test_predict_with_fasta_file_pass_with_jwt(
+ mock_simple_prediction: mock.Mock,
+ tmp_files,
+ default_params,
+ remove_api_key_from_env_var,
+):
+ assert os.environ.get("FOLDING_API_KEY") is None
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "af2",
+ str(tmp_files["monomer_fasta"]),
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ ],
+ )
+ assert result.exit_code == 0, result.output
+
+ custom_files = PredictRequestCustomFiles(templates=[], msas=[])
+
+ mock_simple_prediction.assert_called_once_with(
+ file=tmp_files["monomer_fasta"],
+ params=default_params,
+ custom_files=custom_files,
+ folding_model=FoldingModel.AF2,
+ project_code="FOLDING_DEV",
+ )
+
+
+def test_predict_unset_project_code_fails(
+ tmp_files,
+ remove_project_code_from_env_var,
+):
+ assert os.environ.get("FOLDING_PROJECT_CODE") is None
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "af2",
+ str(tmp_files["monomer_fasta"]),
+ ],
+ )
+ assert result.exit_code == 2, result.output
+
+
+@pytest.mark.parametrize(
+ "mock_simple_prediction",
+ (
+ MessageStatus.PUBLISHED,
+ MessageStatus.NOT_PUBLISHED_DONE,
+ MessageStatus.NOT_PUBLISHED_PENDING,
+ ),
+ indirect=True,
+)
+def test_predict_with_fasta_file_with_initial_guess_pass(
+ mock_simple_prediction: mock.Mock, tmp_files, default_params
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "af2",
+ str(tmp_files["monomer_fasta"]),
+ "--initial-guess-file",
+ str(tmp_files["valid_initial_guess"]),
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ "--project-code",
+ "FOLDING_DEV",
+ ],
+ )
+ assert result.exit_code == 0, result.output
+
+ custom_files = PredictRequestCustomFiles(
+ templates=[], msas=[], initial_guess_files=[tmp_files["valid_initial_guess"]]
+ )
+
+ mock_simple_prediction.assert_called_once_with(
+ file=tmp_files["monomer_fasta"],
+ folding_model=FoldingModel.AF2,
+ params=default_params,
+ custom_files=custom_files,
+ project_code="FOLDING_DEV",
+ )
+
+
+@pytest.mark.parametrize(
+ "mock_simple_prediction",
+ (
+ MessageStatus.PUBLISHED,
+ MessageStatus.NOT_PUBLISHED_DONE,
+ MessageStatus.NOT_PUBLISHED_PENDING,
+ ),
+ indirect=True,
+)
+def test_predict_with_fasta_file_with_templates_masks_pass(
+ mock_simple_prediction: mock.Mock, tmp_files, default_params, valid_templates_masks
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "af2",
+ str(tmp_files["monomer_fasta"]),
+ "--templates-masks-file",
+ str(valid_templates_masks),
+ "--custom-template",
+ str(tmp_files["valid_template_3"]),
+ "--custom-template",
+ str(tmp_files["valid_template_4"]),
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ "--project-code",
+ "FOLDING_DEV",
+ ],
+ )
+ assert result.exit_code == 0, result.output
+
+ custom_files = PredictRequestCustomFiles(
+ templates=[tmp_files["valid_template_3"], tmp_files["valid_template_4"]],
+ msas=[],
+ templates_masks_files=[valid_templates_masks],
+ )
+
+ mock_simple_prediction.assert_called_once_with(
+ file=tmp_files["monomer_fasta"],
+ folding_model=FoldingModel.AF2,
+ params=default_params,
+ custom_files=custom_files,
+ project_code="FOLDING_DEV",
+ )
+
+
+def test_predict_with_fasta_file_with_templates_masks_fail(
+ tmp_files, valid_templates_masks
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "af2",
+ str(tmp_files["monomer_fasta"]),
+ "--templates-masks-file",
+ str(valid_templates_masks),
+ "--custom-template",
+ str(tmp_files["valid_template_3"]),
+ "--custom-template",
+ str(tmp_files["valid_template_2"]),
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ "--project-code",
+ "FOLDING_DEV",
+ ],
+ )
+ assert result.exit_code == 1, result.output
+ assert "Check your input command." in str(result.exception)
+
+
+def test_predict_with_fasta_file_with_multi_seed_pass(
+ mock_batch_prediction: mock.Mock,
+ tmp_files,
+ default_params,
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "af2",
+ str(tmp_files["monomer_fasta"]),
+ "--num-seed",
+ 5,
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ "--project-code",
+ "FOLDING_DEV",
+ ],
+ )
+
+ assert result.exit_code == 0, result.output
+
+ custom_files = PredictRequestCustomFiles(templates=[], msas=[])
+
+ mock_batch_prediction.assert_called_once_with(
+ files=[tmp_files["monomer_fasta"]],
+ folding_model=FoldingModel.AF2,
+ params=default_params,
+ custom_files=custom_files,
+ num_seed=5,
+ project_code="FOLDING_DEV",
+ )
+
+
+def test_predict_with_fasta_file_with_initial_guess_with_multi_seed_pass(
+ mock_batch_prediction: mock.Mock, tmp_files, default_params
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "af2",
+ str(tmp_files["monomer_fasta"]),
+ "--num-seed",
+ 5,
+ "--initial-guess-file",
+ str(tmp_files["valid_initial_guess"]),
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ "--project-code",
+ "FOLDING_DEV",
+ ],
+ )
+
+ assert result.exit_code == 0, result.output
+
+ custom_files = PredictRequestCustomFiles(
+ templates=[], msas=[], initial_guess_files=[tmp_files["valid_initial_guess"]]
+ )
+
+ mock_batch_prediction.assert_called_once_with(
+ files=[tmp_files["monomer_fasta"]],
+ folding_model=FoldingModel.AF2,
+ params=default_params,
+ custom_files=custom_files,
+ num_seed=5,
+ project_code="FOLDING_DEV",
+ )
+
+
+def test_predict_with_directory_pass(
+ mock_batch_prediction: mock.Mock,
+ tmp_files,
+ default_params,
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "af2",
+ str(tmp_files["valid_dir"]),
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ "--project-code",
+ "FOLDING_DEV",
+ ],
+ )
+
+ assert result.exit_code == 0, result.output
+
+ custom_files = PredictRequestCustomFiles(templates=[], msas=[])
+
+ mock_batch_prediction.assert_called_once_with(
+ files=mock.ANY,
+ params=default_params,
+ folding_model=FoldingModel.AF2,
+ custom_files=custom_files,
+ num_seed=None,
+ project_code="FOLDING_DEV",
+ )
+ _, kwargs = mock_batch_prediction.call_args
+ assert sorted(kwargs["files"]) == sorted(
+ list(tmp_files["valid_dir"].iterdir()),
+ )
+
+
+@pytest.mark.parametrize("file", ["valid_batch_file_json", "valid_batch_file_csv"])
+def test_predict_with_json_batch_file_pass(
+ file, mock_batch_prediction_from_file: mock.Mock, tmp_files
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "af2",
+ str(tmp_files[file]),
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ "--project-code",
+ "FOLDING_DEV",
+ ],
+ )
+
+ assert result.exit_code == 0, result.output
+
+ mock_batch_prediction_from_file.assert_called_once_with(
+ file=tmp_files[file],
+ project_code="FOLDING_DEV",
+ )
diff --git a/folding-studio/tests/commands/predict/test_boltz.py b/folding-studio/tests/commands/predict/test_boltz.py
new file mode 100644
index 0000000000000000000000000000000000000000..5fac575693cb3deb875ca0453c936393a0bc6cfb
--- /dev/null
+++ b/folding-studio/tests/commands/predict/test_boltz.py
@@ -0,0 +1,131 @@
+from unittest import mock
+
+import pytest
+from folding_studio.cli import app
+from folding_studio.query import BoltzQuery
+from typer.testing import CliRunner
+
+runner = CliRunner()
+
+
+@pytest.fixture
+def inference_parameters():
+ """Provides the inference parameters dictionary to tests."""
+ return {
+ "recycling_steps": 3,
+ "sampling_steps": 200,
+ "diffusion_samples": 1,
+ "step_scale": 1.638,
+ "output_format": "mmcif",
+ "num_workers": 2,
+ "msa_pairing_strategy": "greedy",
+ "write_full_pae": False,
+ "write_full_pde": False,
+ "seed": 0,
+ }
+
+
+@pytest.fixture()
+def mock_save_inference_parameters():
+ """Monkeypatch Response.download_results so that no real file download occurs."""
+ with mock.patch("folding_studio.query.Query.save_parameters") as m:
+ yield m
+
+
+def test_boltz_fold_with_valid_fasta_file(
+ mock_send_request,
+ mock_download_results,
+ mock_save_inference_parameters,
+ tmp_files,
+ inference_parameters,
+):
+ """
+ Running the command with a valid FASTA file should
+ process successfully and generate the correct payload.
+ """
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "boltz",
+ str(tmp_files["monomer_fasta"]),
+ "--project-code",
+ "test_project",
+ "--output",
+ tmp_files["output_dir"],
+ ],
+ )
+ assert result.exit_code == 0, result.output
+
+ # Create the expected query using the same parameters as defaults.
+ expected_query = BoltzQuery.from_file(
+ path=str(tmp_files["monomer_fasta"]), **inference_parameters
+ )
+ mock_send_request.assert_called_once()
+ actual_query = mock_send_request.call_args[0][0]
+ # Compare payload keys
+ payload = actual_query.payload
+ expected = expected_query.payload
+ for key in ["fasta_files", "yaml_files", "parameters"]:
+ assert payload[key] == expected[key]
+
+ mock_save_inference_parameters.assert_called_once()
+ mock_download_results.assert_called_once()
+
+
+def test_boltz_fold_with_valid_fasta_directory(
+ mock_send_request,
+ mock_download_results,
+ mock_save_inference_parameters,
+ tmp_files,
+ inference_parameters,
+):
+ """
+ Running the command with a directory containing valid FASTA files should
+ process successfully and combine all FASTA files into the payload.
+ """
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "boltz",
+ str(tmp_files["valid_dir"]),
+ "--project-code",
+ "test_project",
+ "--output",
+ tmp_files["output_dir"],
+ ],
+ )
+ assert result.exit_code == 0, result.output
+
+ expected_query = BoltzQuery.from_directory(
+ path=tmp_files["valid_dir"], **inference_parameters
+ )
+ mock_send_request.assert_called_once()
+ actual_query = mock_send_request.call_args[0][0]
+ payload = actual_query.payload
+ expected = expected_query.payload
+ assert payload["fasta_files"] == expected["fasta_files"]
+
+ mock_save_inference_parameters.assert_called_once()
+ mock_download_results.assert_called_once()
+
+
+def test_boltz_fold_with_empty_directory(tmp_files):
+ """
+ Running the command with an empty directory should return an error,
+ since no FASTA files are available.
+ """
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "boltz",
+ str(tmp_files["empty_dir"]),
+ "--project-code",
+ "test_project",
+ "--output",
+ tmp_files["output_dir"],
+ ],
+ )
+ assert result.exit_code != 0, result.output
diff --git a/folding-studio/tests/commands/predict/test_chai.py b/folding-studio/tests/commands/predict/test_chai.py
new file mode 100644
index 0000000000000000000000000000000000000000..8ec32397d6af69394e25195fd2f8e0c5062a2be5
--- /dev/null
+++ b/folding-studio/tests/commands/predict/test_chai.py
@@ -0,0 +1,201 @@
+from pathlib import Path
+
+import pytest
+from folding_studio.cli import app
+from folding_studio.query import ChaiQuery
+from typer.testing import CliRunner
+
+runner = CliRunner()
+
+
+RESTRAINTS_CSV_CONTENT = """
+chainA,res_idxA,chainB,res_idxB,connection_type,confidence,min_distance_angstrom,max_distance_angstrom,comment,restraint_id
+A,C387,B,Y101,contact,1.0,0.0,5.5,protein-heavy,restraint_1
+C,I32,A,S483,contact,1.0,0.0,5.5,protein-light,restraint_2
+"""
+
+
+@pytest.fixture(scope="module")
+def tmp_files(tmp_directory: Path, tmp_files: dict):
+ """Generate temporary files."""
+
+ # A valid restraints CSV file
+ valid_restraints = tmp_directory / "example_restraints.csv"
+ valid_restraints.write_text(RESTRAINTS_CSV_CONTENT)
+
+ tmp_files["valid_restraints"] = valid_restraints
+
+ yield tmp_files
+
+
+def test_chai_fold_with_invalid_source(tmp_files):
+ """
+ Running the command with an unsupported file type should return an error.
+ """
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "chai",
+ str(tmp_files["invalid_source"]),
+ "--project-code",
+ "TEST_PROJECT",
+ ],
+ )
+ assert result.exit_code != 0
+
+
+def test_chai_fold_with_valid_fasta_file(
+ mock_send_request, mock_download_results, tmp_files
+):
+ """
+ Running the command with a valid FASTA file (without restraints) should
+ process successfully and generate the correct payload.
+ """
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "chai",
+ str(tmp_files["monomer_fasta"]),
+ "--project-code",
+ "TEST_PROJECT",
+ "--output",
+ tmp_files["output_dir"],
+ ],
+ )
+ assert result.exit_code == 0, result.output
+
+ expected_query = ChaiQuery.from_file(
+ path=str(tmp_files["monomer_fasta"]),
+ use_msa_server=True,
+ use_templates_server=False,
+ num_trunk_recycles=3,
+ seed=0,
+ num_diffn_timesteps=200,
+ restraints=None,
+ recycle_msa_subsample=0,
+ num_trunk_samples=1,
+ )
+ mock_send_request.assert_called_once()
+ actual_query = mock_send_request.call_args[0][0]
+ payload = actual_query.payload
+ expected = expected_query.payload
+ assert payload == expected
+
+ mock_download_results.assert_called_once()
+
+
+def test_chai_fold_with_valid_fasta_and_restraints(
+ mock_send_request, mock_download_results, tmp_files
+):
+ """
+ Running the command with a valid FASTA file and a valid restraints CSV file
+ should include the restraints content in the payload.
+ """
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "chai",
+ str(tmp_files["monomer_fasta"]),
+ "--project-code",
+ "TEST_PROJECT",
+ "--restraints",
+ str(tmp_files["valid_restraints"]),
+ "--output",
+ tmp_files["output_dir"],
+ ],
+ )
+ assert result.exit_code == 0, result.output
+
+ restraints_content = tmp_files["valid_restraints"].read_text().strip()
+
+ expected_query = ChaiQuery.from_file(
+ path=str(tmp_files["monomer_fasta"]),
+ use_msa_server=True,
+ use_templates_server=False,
+ num_trunk_recycles=3,
+ seed=0,
+ num_diffn_timesteps=200,
+ restraints=str(tmp_files["valid_restraints"]),
+ recycle_msa_subsample=0,
+ num_trunk_samples=1,
+ )
+ mock_send_request.assert_called_once()
+ actual_query = mock_send_request.call_args[0][0]
+ payload = actual_query.payload
+ expected = expected_query.payload
+ assert payload == expected
+ assert payload["restraints"] == restraints_content
+
+ mock_download_results.assert_called_once()
+
+
+def test_chai_fold_with_valid_fasta_directory(
+ mock_send_request, mock_download_results, tmp_files
+):
+ """
+ Running the command with a directory containing valid FASTA files should
+ process successfully and submit a single request for the directory.
+ """
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "chai",
+ str(tmp_files["valid_dir"]),
+ "--project-code",
+ "TEST_PROJECT",
+ "--output",
+ tmp_files["output_dir"],
+ ],
+ )
+ assert result.exit_code == 0, result.output
+
+ expected_query = ChaiQuery.from_directory(
+ path=str(tmp_files["valid_dir"]),
+ use_msa_server=True,
+ use_templates_server=False,
+ num_trunk_recycles=3,
+ seed=0,
+ num_diffn_timesteps=200,
+ restraints=None,
+ recycle_msa_subsample=0,
+ num_trunk_samples=1,
+ )
+
+ expected_fasta_files = expected_query.payload["fasta_files"]
+
+ actual_fasta_files = [
+ call_args[0][0].payload["fasta_files"]
+ for call_args in mock_send_request.call_args_list
+ ]
+
+ # Check that the expected FASTA files match one of the actual request payloads
+ assert expected_fasta_files in actual_fasta_files, (
+ f"FASTA file {expected_fasta_files} was expected but not found in actual requests."
+ )
+
+ mock_download_results.assert_called()
+
+
+
+def test_chai_fold_with_empty_directory(tmp_files):
+ """
+ Running the command with an empty directory should return an error,
+ since no FASTA files are available.
+ """
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "chai",
+ str(tmp_files["empty_dir"]),
+ "--project-code",
+ "TEST_PROJECT",
+ "--output",
+ tmp_files["output_dir"],
+ ],
+ )
+ assert result.exit_code != 0, result.output
diff --git a/folding-studio/tests/commands/predict/test_openfold.py b/folding-studio/tests/commands/predict/test_openfold.py
new file mode 100644
index 0000000000000000000000000000000000000000..bf3e521c50fc140b2482ecb0242983bab785eb23
--- /dev/null
+++ b/folding-studio/tests/commands/predict/test_openfold.py
@@ -0,0 +1,449 @@
+import os
+from unittest import mock
+
+import pytest
+from folding_studio.cli import app
+from folding_studio.utils.data_model import (
+ PredictRequestCustomFiles,
+ PredictRequestParams,
+)
+from folding_studio.utils.headers import get_auth_headers
+from folding_studio_data_models import (
+ BatchMessageStatus,
+ BatchPublication,
+ FeatureMode,
+ FoldingModel,
+ Message,
+ MessageStatus,
+ OpenFoldParameters,
+ Publication,
+)
+from typer.testing import CliRunner
+
+runner = CliRunner()
+
+
+@pytest.fixture(autouse=True)
+def mock_get_auth_headers(request):
+ if "apikeytest" in request.keywords:
+ os.environ["FOLDING_API_KEY"] = "MY_KEY"
+ return_value = get_auth_headers()
+ else:
+ return_value = {"Authorization": "Bearer identity_token"}
+ with mock.patch(
+ "folding_studio.api_call.predict.simple_predict.get_auth_headers",
+ return_value=return_value,
+ ) as m:
+ yield m
+
+
+@pytest.fixture(autouse=True)
+def mock_batch_prediction_from_file():
+ batch_pub = BatchPublication(
+ publications=[
+ Publication(
+ folding_model=FoldingModel.OPENFOLD,
+ message=Message(
+ pipeline_name="alphafold_inference_pipeline",
+ user_id="default-user",
+ project_code="default-project",
+ parameters=OpenFoldParameters(),
+ experiment_id="dummy-experiment",
+ model_preset="monomer",
+ fasta_file_name=f"monomer_{idx}.fasta",
+ ignore_cache=False,
+ ),
+ status=MessageStatus.PUBLISHED,
+ )
+ for idx in range(3)
+ ],
+ batch_id="batch_id",
+ cached_publications=[],
+ status=BatchMessageStatus.PUBLISHED,
+ cached=False,
+ )
+ with mock.patch(
+ "folding_studio.commands.predict.openfold_predict.batch_prediction_from_file",
+ return_value=batch_pub.model_dump(mode="json"),
+ ) as m:
+ yield m
+
+
+@pytest.fixture()
+def mock_batch_prediction():
+ batch_pub = BatchPublication(
+ publications=[
+ Publication(
+ folding_model=FoldingModel.OPENFOLD,
+ message=Message(
+ pipeline_name="alphafold_inference_pipeline",
+ user_id="default-user",
+ project_code="default-project",
+ parameters=OpenFoldParameters(),
+ experiment_id="dummy-experiment",
+ model_preset="monomer",
+ fasta_file_name=f"monomer_{idx}.fasta",
+ ignore_cache=False,
+ ),
+ status=MessageStatus.PUBLISHED,
+ )
+ for idx in range(3)
+ ],
+ batch_id="batch_id",
+ cached_publications=[],
+ status=BatchMessageStatus.PUBLISHED,
+ cached=False,
+ )
+
+ with mock.patch(
+ "folding_studio.commands.predict.openfold_predict.batch_prediction",
+ return_value=batch_pub.model_dump(mode="json"),
+ ) as m:
+ yield m
+
+
+@pytest.fixture()
+def mock_simple_prediction():
+ pub = Publication(
+ folding_model=FoldingModel.OPENFOLD,
+ message=Message(
+ pipeline_name="alphafold_inference_pipeline",
+ user_id="default-user",
+ project_code="default-project",
+ parameters=OpenFoldParameters(),
+ experiment_id="dummy-experiment",
+ model_preset="monomer",
+ fasta_file_name="monomer.fasta",
+ ignore_cache=False,
+ ),
+ status=MessageStatus.PUBLISHED,
+ )
+
+ with mock.patch(
+ "folding_studio.commands.predict.openfold_predict.simple_prediction",
+ return_value=pub.model_dump(mode="json"),
+ ) as m:
+ yield m
+
+
+@pytest.fixture()
+def default_params():
+ yield PredictRequestParams(
+ ignore_cache=False,
+ template_mode=FeatureMode.SEARCH,
+ custom_template_ids=[],
+ msa_mode=FeatureMode.SEARCH,
+ max_msa_clusters=-1,
+ max_extra_msa=-1,
+ gap_trick=False,
+ num_recycle=3,
+ random_seed=0,
+ model_subset=[],
+ project_code="FOLDING_DEV",
+ )
+
+
+def test_predict_with_unsupported_file_fails(tmp_files):
+ result = runner.invoke(
+ app, ["predict", "openfold", str(tmp_files["invalid_source"])]
+ )
+ assert result.exit_code == 2
+ assert "Invalid value for 'SOURCE'" in result.stdout
+
+
+def test_predict_with_directory_containing_unsupported_file_fails(tmp_files):
+ result = runner.invoke(app, ["predict", "openfold", str(tmp_files["invalid_dir"])])
+ assert result.exit_code == 2
+ assert "Invalid value for 'SOURCE'" in result.stdout
+
+
+def test_predict_with_empty_directory_fails(tmp_files):
+ result = runner.invoke(app, ["predict", "openfold", str(tmp_files["empty_dir"])])
+ assert result.exit_code == 2
+ assert "Invalid value for 'SOURCE'" in result.stdout
+
+
+def test_predict_with_unsupported_custom_template_file_fails(tmp_files):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "openfold",
+ str(tmp_files["monomer_fasta"]),
+ "--custom-template",
+ tmp_files["invalid_template"],
+ ],
+ )
+ assert result.exit_code == 2
+ assert "Invalid value for '--custom-template'" in result.stdout
+
+
+def test_predict_with_unsupported_custom_msa_file_fails(tmp_files):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "openfold",
+ str(tmp_files["monomer_fasta"]),
+ "--custom-msa",
+ tmp_files["invalid_msa"],
+ ],
+ )
+ assert result.exit_code == 2
+ assert "Invalid value for '--custom-msa'" in result.stdout
+
+
+@pytest.mark.parametrize(
+ "mock_simple_prediction",
+ (
+ MessageStatus.PUBLISHED,
+ MessageStatus.NOT_PUBLISHED_DONE,
+ MessageStatus.NOT_PUBLISHED_PENDING,
+ ),
+ indirect=True,
+)
+def test_predict_with_fasta_file_pass(
+ mock_simple_prediction: mock.Mock,
+ tmp_files,
+ default_params,
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "openfold",
+ str(tmp_files["monomer_fasta"]),
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ "--project-code",
+ "FOLDING_DEV",
+ ],
+ )
+ assert result.exit_code == 0, result.output
+
+ custom_files = PredictRequestCustomFiles(templates=[], msas=[])
+
+ mock_simple_prediction.assert_called_once_with(
+ file=tmp_files["monomer_fasta"],
+ folding_model=FoldingModel.OPENFOLD,
+ params=default_params,
+ custom_files=custom_files,
+ project_code="FOLDING_DEV",
+ )
+
+
+@pytest.mark.parametrize(
+ "mock_simple_prediction",
+ (
+ MessageStatus.PUBLISHED,
+ MessageStatus.NOT_PUBLISHED_DONE,
+ MessageStatus.NOT_PUBLISHED_PENDING,
+ ),
+ indirect=True,
+)
+def test_predict_with_fasta_file_pass_with_project_code_from_env_var(
+ mock_simple_prediction: mock.Mock, tmp_files, default_params
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "openfold",
+ str(tmp_files["monomer_fasta"]),
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ # Here we deliberately leave out the
+ # "--project-code" option to check that
+ # env variable is correctly used
+ ],
+ )
+ assert result.exit_code == 0, result.output
+
+ custom_files = PredictRequestCustomFiles(templates=[], msas=[])
+
+ mock_simple_prediction.assert_called_once_with(
+ file=tmp_files["monomer_fasta"],
+ params=default_params,
+ custom_files=custom_files,
+ folding_model=FoldingModel.OPENFOLD,
+ project_code="FOLDING_DEV",
+ )
+
+
+def test_predict_unset_project_code_fails(tmp_files, remove_project_code_from_env_var):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "openfold",
+ str(tmp_files["monomer_fasta"]),
+ ],
+ )
+ assert result.exit_code == 2, result.output
+
+
+@pytest.mark.parametrize(
+ "mock_simple_prediction",
+ (
+ MessageStatus.PUBLISHED,
+ MessageStatus.NOT_PUBLISHED_DONE,
+ MessageStatus.NOT_PUBLISHED_PENDING,
+ ),
+ indirect=True,
+)
+def test_predict_with_fasta_file_with_templates_masks_pass(
+ mock_simple_prediction: mock.Mock, tmp_files, default_params, valid_templates_masks
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "openfold",
+ str(tmp_files["monomer_fasta"]),
+ "--templates-masks-file",
+ str(valid_templates_masks),
+ "--custom-template",
+ str(tmp_files["valid_template_3"]),
+ "--custom-template",
+ str(tmp_files["valid_template_4"]),
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ "--project-code",
+ "FOLDING_DEV",
+ ],
+ )
+ assert result.exit_code == 0, result.output
+
+ custom_files = PredictRequestCustomFiles(
+ templates=[tmp_files["valid_template_3"], tmp_files["valid_template_4"]],
+ msas=[],
+ templates_masks_files=[valid_templates_masks],
+ )
+
+ mock_simple_prediction.assert_called_once_with(
+ file=tmp_files["monomer_fasta"],
+ folding_model=FoldingModel.OPENFOLD,
+ params=default_params,
+ custom_files=custom_files,
+ project_code="FOLDING_DEV",
+ )
+
+
+def test_predict_with_fasta_file_with_templates_masks_fail(
+ tmp_files, valid_templates_masks
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "openfold",
+ str(tmp_files["monomer_fasta"]),
+ "--templates-masks-file",
+ str(valid_templates_masks),
+ "--custom-template",
+ str(tmp_files["valid_template_3"]),
+ "--custom-template",
+ str(tmp_files["valid_template_2"]),
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ "--project-code",
+ "FOLDING_DEV",
+ ],
+ )
+ assert result.exit_code == 1, result.output
+ assert "Check your input command." in str(result.exception)
+
+
+def test_predict_with_fasta_file_with_multi_seed_pass(
+ mock_batch_prediction: mock.Mock,
+ tmp_files,
+ default_params,
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "openfold",
+ str(tmp_files["monomer_fasta"]),
+ "--num-seed",
+ 5,
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ "--project-code",
+ "FOLDING_DEV",
+ ],
+ )
+
+ assert result.exit_code == 0, result.output
+
+ custom_files = PredictRequestCustomFiles(templates=[], msas=[])
+
+ mock_batch_prediction.assert_called_once_with(
+ files=[tmp_files["monomer_fasta"]],
+ folding_model=FoldingModel.OPENFOLD,
+ params=default_params,
+ custom_files=custom_files,
+ num_seed=5,
+ project_code="FOLDING_DEV",
+ )
+
+
+def test_predict_with_directory_pass(
+ mock_batch_prediction: mock.Mock,
+ tmp_files,
+ default_params,
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "openfold",
+ str(tmp_files["valid_dir"]),
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ "--project-code",
+ "FOLDING_DEV",
+ ],
+ )
+
+ assert result.exit_code == 0, result.output
+
+ custom_files = PredictRequestCustomFiles(templates=[], msas=[])
+
+ mock_batch_prediction.assert_called_once_with(
+ files=mock.ANY,
+ params=default_params,
+ folding_model=FoldingModel.OPENFOLD,
+ custom_files=custom_files,
+ num_seed=None,
+ project_code="FOLDING_DEV",
+ )
+ _, kwargs = mock_batch_prediction.call_args
+ assert sorted(kwargs["files"]) == sorted(
+ list(tmp_files["valid_dir"].iterdir()),
+ )
+
+
+@pytest.mark.parametrize("file", ["valid_batch_file_json", "valid_batch_file_csv"])
+def test_predict_with_json_batch_file_pass(
+ file, mock_batch_prediction_from_file: mock.Mock, tmp_files
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "openfold",
+ str(tmp_files[file]),
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ "--project-code",
+ "FOLDING_DEV",
+ ],
+ )
+
+ assert result.exit_code == 0, result.output
+
+ mock_batch_prediction_from_file.assert_called_once_with(
+ file=tmp_files[file],
+ project_code="FOLDING_DEV",
+ )
diff --git a/folding-studio/tests/commands/predict/test_protenix.py b/folding-studio/tests/commands/predict/test_protenix.py
new file mode 100644
index 0000000000000000000000000000000000000000..7d3239093d5c869ec4586fbfe0fa4d486af62ad8
--- /dev/null
+++ b/folding-studio/tests/commands/predict/test_protenix.py
@@ -0,0 +1,111 @@
+from folding_studio.cli import app
+from folding_studio.query import ProtenixQuery
+from typer.testing import CliRunner
+
+runner = CliRunner()
+
+
+def test_protenix_predict_with_invalid_file(tmp_files):
+ """Running the command with an unsupported file type should return an error."""
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "protenix",
+ str(tmp_files["invalid_source"]),
+ "--project-code",
+ "TEST_PROJECT",
+ "--output",
+ tmp_files["output_dir"],
+ ],
+ )
+ assert result.exit_code != 0
+
+
+def test_protenix_predict_with_valid_fasta_file(
+ mock_send_request, mock_download_results, tmp_files
+):
+ """
+ Running the command with a valid FASTA file should
+ process successfully and generate the correct payload.
+ """
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "protenix",
+ str(tmp_files["monomer_fasta"]),
+ "--project-code",
+ "TEST_PROJECT",
+ "--output",
+ tmp_files["output_dir"],
+ "--seed",
+ 42,
+ ],
+ )
+ assert result.exit_code == 0, result.output
+
+ # Create the expected query using the same parameters as defaults.
+ expected_query = ProtenixQuery.from_file(tmp_files["monomer_fasta"], seed=42)
+ mock_send_request.assert_called_once()
+ actual_query = mock_send_request.call_args[0][0]
+ # Compare payload keys
+ payload = actual_query.payload
+ expected = expected_query.payload
+ assert payload["fasta_files"] == expected["fasta_files"]
+ assert payload["use_msa_server"] == expected["use_msa_server"]
+ assert payload["seeds"] == expected["seeds"]
+
+ mock_download_results.assert_called_once()
+
+
+def test_protenix_predict_with_valid_fasta_directory(
+ mock_send_request, mock_download_results, tmp_files
+):
+ """
+ Running the command with a directory containing valid FASTA files should
+ process successfully and combine all FASTA files into the payload.
+ """
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "protenix",
+ str(tmp_files["valid_dir"]),
+ "--project-code",
+ "TEST_PROJECT",
+ "--output",
+ tmp_files["output_dir"],
+ ],
+ )
+ assert result.exit_code == 0, result.output
+
+ expected_query = ProtenixQuery.from_directory(tmp_files["valid_dir"])
+ mock_send_request.assert_called_once()
+ actual_query = mock_send_request.call_args[0][0]
+ payload = actual_query.payload
+ expected = expected_query.payload
+ # For directories, the fasta_files dictionary should contain an entry for each FASTA file.
+ assert payload["fasta_files"] == expected["fasta_files"]
+
+ mock_download_results.assert_called_once()
+
+
+def test_proteinx_predict_with_empty_directory(tmp_files):
+ """
+ Running the command with an empty directory should return an error,
+ since no FASTA files are available.
+ """
+ result = runner.invoke(
+ app,
+ [
+ "protenix",
+ "predict",
+ str(tmp_files["empty_dir"]),
+ "--project-code",
+ "TEST_PROJECT",
+ "--output",
+ tmp_files["output_dir"],
+ ],
+ )
+ assert result.exit_code != 0, result.output
diff --git a/folding-studio/tests/commands/predict/test_soloseq.py b/folding-studio/tests/commands/predict/test_soloseq.py
new file mode 100644
index 0000000000000000000000000000000000000000..5a6a57e218ce0d9c63fa43d115e30d565f0348cc
--- /dev/null
+++ b/folding-studio/tests/commands/predict/test_soloseq.py
@@ -0,0 +1,67 @@
+from pathlib import Path
+from unittest import mock
+
+from folding_studio.cli import app
+from folding_studio.query import SoloSeqQuery
+from typer.testing import CliRunner
+
+current_workdir = Path(__file__).parent.resolve()
+data_dir = Path(current_workdir / "data")
+
+runner = CliRunner()
+
+
+def test_predict_with_fasta_file_pass(
+ mock_send_request: mock.Mock,
+ mock_download_results: mock.Mock,
+ tmp_files,
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "soloseq",
+ str(tmp_files["monomer_fasta"]),
+ "--output",
+ tmp_files["output_dir"],
+ "--project-code",
+ "my_project_code",
+ ],
+ )
+ assert result.exit_code == 0, result.output
+
+ expected_query = SoloSeqQuery(
+ fasta_files={"monomer": ">tag1|tag2\nABCDEGF"}, query_name="monomer"
+ )
+ mock_send_request.assert_called_once_with(expected_query, "my_project_code")
+ mock_download_results.assert_called_once()
+
+
+def test_predict_with_fasta_dir_pass(
+ mock_send_request: mock.Mock,
+ mock_download_results: mock.Mock,
+ tmp_files,
+):
+ result = runner.invoke(
+ app,
+ [
+ "predict",
+ "soloseq",
+ str(tmp_files["valid_dir"]),
+ "--output",
+ tmp_files["output_dir"],
+ "--project-code",
+ "my_project_code",
+ ],
+ )
+ assert result.exit_code == 0, result.output
+
+ expected_query = SoloSeqQuery(
+ fasta_files={
+ "monomer_1": ">tag1|tag2\nABCDEGF",
+ "monomer_2": ">tag1|tag2\nABCDEGF",
+ },
+ query_name="valid_dir",
+ )
+ mock_send_request.assert_called_once_with(expected_query, "my_project_code")
+ mock_download_results.assert_called_once()
diff --git a/folding-studio/tests/commands/test_experiment.py b/folding-studio/tests/commands/test_experiment.py
new file mode 100644
index 0000000000000000000000000000000000000000..fc8c6116def2f2eb7106bf7acd8a5d2b0ca183b8
--- /dev/null
+++ b/folding-studio/tests/commands/test_experiment.py
@@ -0,0 +1,69 @@
+from pathlib import Path
+from unittest import mock
+
+import pytest
+from folding_studio.cli import app
+from folding_studio.config import API_URL, REQUEST_TIMEOUT
+from typer.testing import CliRunner
+from folding_studio.utils.headers import get_auth_headers
+
+current_workdir = Path(__file__).parent.resolve()
+data_dir = Path(current_workdir / "data")
+
+runner = CliRunner()
+
+
+@pytest.fixture(autouse=True)
+def mock_get_auth_headers(request):
+ if "apikeytest" in request.keywords:
+ return_value = get_auth_headers(api_key="MY_KEY")
+ else:
+ return_value = {"Authorization": "Bearer identity_token"}
+ with mock.patch(
+ "folding_studio.commands.experiment.get_auth_headers", return_value=return_value
+ ) as m:
+ yield m
+
+
+
+@pytest.fixture()
+def mock_request_get():
+ with mock.patch("requests.get") as m:
+ yield m
+
+
+def test_experiment_status_pass(mock_request_get: mock.Mock, headers):
+ mock_response = mock.MagicMock()
+ mock_response.ok = True
+ mock_response.json.return_value = {"status": "Done."}
+ mock_request_get.return_value = mock_response
+
+ result = runner.invoke(app, ["experiment", "status", "ID"])
+ assert result.exit_code == 0
+ assert "Done." in result.stdout
+ mock_request_get.assert_called_once_with(
+ API_URL + "getExperimentStatus",
+ params={"experiment_id": "ID"},
+ headers=headers,
+ timeout=REQUEST_TIMEOUT,
+ )
+
+
+def test_experiment_list_pass(mock_request_get: mock.Mock, tmp_files, headers):
+ mock_response = mock.MagicMock()
+ mock_response.ok = True
+ mock_response.json.return_value = {
+ "Done": ["exp_id_1", "exp_id_2"],
+ "Pending": ["exp_id_3"],
+ }
+ mock_request_get.return_value = mock_response
+
+ result = runner.invoke(
+ app, ["experiment", "list", "--output", str(tmp_files["metadata_file"])]
+ )
+ assert result.exit_code == 0
+ mock_request_get.assert_called_once_with(
+ API_URL + "getDoneAndPendingExperiments",
+ headers=headers,
+ timeout=REQUEST_TIMEOUT,
+ )
diff --git a/folding-studio/tests/commands/test_msa.py b/folding-studio/tests/commands/test_msa.py
new file mode 100644
index 0000000000000000000000000000000000000000..290ba40840cf5528747ced1401a9a8a68251de80
--- /dev/null
+++ b/folding-studio/tests/commands/test_msa.py
@@ -0,0 +1,96 @@
+from pathlib import Path
+from unittest import mock
+
+import pytest
+from folding_studio.cli import app
+from folding_studio.utils.data_model import MSARequestParams
+from folding_studio_data_models import (
+ FeatureMode,
+ MessageStatus,
+ MSAMessage,
+ MSAPublication,
+)
+from typer.testing import CliRunner
+
+current_workdir = Path(__file__).parent.resolve()
+data_dir = Path(current_workdir / "data")
+
+runner = CliRunner()
+
+
+@pytest.fixture(autouse=True)
+def mock_get_auth_headers():
+ with mock.patch(
+ "folding_studio.commands.msa.get_auth_headers", return_value={"Authorization": "Bearer identity_token"}
+ ) as m:
+ yield m
+
+
+@pytest.fixture()
+def mock_simple_msa(request):
+ pub = MSAPublication(
+ message=MSAMessage(
+ pipeline_name="msa",
+ user_id="default-user",
+ project_code="default-project",
+ msa_experiment_id="dummy-experiment",
+ model_preset="monomer",
+ fasta_file_name="monomer.fasta",
+ ignore_cache=False,
+ ),
+ status=request.param,
+ )
+
+ with mock.patch(
+ "folding_studio.commands.msa.simple_msa",
+ return_value=pub.model_dump(mode="json"),
+ ) as m:
+ yield m
+
+
+@pytest.fixture()
+def default_params():
+ yield MSARequestParams(
+ ignore_cache=False,
+ msa_mode=FeatureMode.SEARCH,
+ )
+
+
+def test_search_with_unsupported_file_fails(tmp_files):
+ result = runner.invoke(app, ["msa", "search", str(tmp_files["invalid_source"])])
+ assert result.exit_code == 2
+ assert "Invalid value for 'SOURCE'" in result.stdout
+
+
+@pytest.mark.parametrize(
+ "mock_simple_msa",
+ (
+ MessageStatus.PUBLISHED,
+ MessageStatus.NOT_PUBLISHED_DONE,
+ MessageStatus.NOT_PUBLISHED_PENDING,
+ ),
+ indirect=True,
+)
+def test_search_with_fasta_file_pass(
+ mock_simple_msa: mock.Mock,
+ tmp_files,
+ default_params,
+ headers,
+):
+ result = runner.invoke(
+ app,
+ [
+ "msa",
+ "search",
+ str(tmp_files["monomer_fasta"]),
+ "--metadata-file",
+ str(tmp_files["metadata_file"]),
+ ],
+ )
+ assert result.exit_code == 0
+
+ mock_simple_msa.assert_called_once_with(
+ file=tmp_files["monomer_fasta"],
+ params=default_params,
+ project_code="FOLDING_DEV"
+ )
diff --git a/folding-studio/tests/conftest.py b/folding-studio/tests/conftest.py
new file mode 100644
index 0000000000000000000000000000000000000000..aaad0a9e3335abbfa88e973fd1fd4d6a49d2e250
--- /dev/null
+++ b/folding-studio/tests/conftest.py
@@ -0,0 +1,126 @@
+import os
+import shutil
+from pathlib import Path
+from unittest import mock
+
+
+import pytest
+import yaml
+from folding_studio_data_models.request.folding import FoldingModel
+
+
+@pytest.fixture(scope="session", autouse=True)
+def set_env():
+ """
+ We set the env var here instead of pytest.ini, because
+ we want to override the env vars in some tests.
+ I could not find an easy way to override the content of pytest.ini.
+ """
+ os.environ["API_URL"] = "https://test_api_url/"
+ os.environ["FOLDING_PROJECT_CODE"] = "FOLDING_DEV"
+ os.environ["FOLDING_API_KEY"] = "MY_KEY"
+
+
+@pytest.fixture
+def remove_project_code_from_env_var(monkeypatch):
+ with mock.patch.dict(os.environ):
+ monkeypatch.delenv("FOLDING_PROJECT_CODE")
+ yield
+
+@pytest.fixture
+def remove_api_key_from_env_var(monkeypatch):
+ with mock.patch.dict(os.environ):
+ monkeypatch.delenv("FOLDING_API_KEY")
+ yield
+
+@pytest.fixture(params=[FoldingModel.AF2, FoldingModel.OPENFOLD])
+def folding_model(request):
+ return request.param
+
+@pytest.fixture
+def headers():
+ return {"Authorization": "Bearer identity_token"}
+
+
+@pytest.fixture(scope="module")
+def tmp_directory(tmp_path_factory):
+ tmp_dir_path = tmp_path_factory.mktemp("data")
+ yield tmp_dir_path
+ shutil.rmtree(tmp_dir_path)
+
+
+@pytest.fixture(scope="module")
+def tmp_files(tmp_directory: Path):
+ """Generate temporary files."""
+
+ # generate fasta files
+ empty_fasta = tmp_directory / "empty.fasta"
+ empty_fasta.touch()
+
+ monomer_fasta = tmp_directory / "monomer.fasta"
+ with monomer_fasta.open("w") as f:
+ f.write(">tag1|tag2\nABCDEGF")
+
+ multimer_fasta = tmp_directory / "multimer.fasta"
+ with multimer_fasta.open("w") as f:
+ f.write(">tag1\nABCDEGF\n>tag2\nABCDEGF")
+
+ # generate yaml file
+ yaml_content = {
+ "version": 1,
+ "sequences": [{"protein": {"id": "A", "sequence": "QLEDSEVEAVAKGLEE"}}],
+ }
+ yaml_file_path = tmp_directory / "protein.yaml"
+ with yaml_file_path.open("w") as f:
+ yaml.safe_dump(yaml_content, f, default_flow_style=False)
+
+ # invalid source
+ invalid_source = tmp_directory / "protein.txt"
+ invalid_source.touch()
+
+ # batch directory with invalid source files
+ dir_with_multimer = tmp_directory / "dir_with_multimer"
+ dir_with_multimer.mkdir(parents=True, exist_ok=True)
+ shutil.copy(multimer_fasta, dir_with_multimer / "multimer.fasta")
+ shutil.copy(monomer_fasta, dir_with_multimer / "monomer.fasta")
+
+ invalid_dir = tmp_directory / "invalid_dir"
+ invalid_dir.mkdir(parents=True, exist_ok=True)
+ (invalid_dir / "protein_A.txt").touch()
+ (invalid_dir / "protein_B.txt").touch()
+
+ mixed_fasta_txt_dir = tmp_directory / "mixed_fasta_txt_dir"
+ mixed_fasta_txt_dir.mkdir(parents=True, exist_ok=True)
+ (mixed_fasta_txt_dir / "protein_A.fasta").touch()
+ (mixed_fasta_txt_dir / "protein_B.txt").touch()
+
+ mixed_fasta_yaml_dir = tmp_directory / "mixed_fasta_yaml_dir"
+ mixed_fasta_yaml_dir.mkdir(parents=True, exist_ok=True)
+ shutil.copy(monomer_fasta, mixed_fasta_yaml_dir / "protein_A.fasta")
+ shutil.copy(yaml_file_path, mixed_fasta_yaml_dir / "protein_B.yaml")
+
+ empty_dir = tmp_directory / "empty_dir"
+ empty_dir.mkdir(parents=True, exist_ok=True)
+
+ # valid batch directory
+ valid_dir = tmp_directory / "valid_dir"
+ valid_dir.mkdir(parents=True, exist_ok=True)
+ shutil.copy(monomer_fasta, valid_dir / "monomer_1.fasta")
+ shutil.copy(monomer_fasta, valid_dir / "monomer_2.fasta")
+
+ files = {
+ "invalid_source": invalid_source,
+ "empty_fasta": empty_fasta,
+ "monomer_fasta": monomer_fasta,
+ "multimer_fasta": multimer_fasta,
+ "yaml_file_path": yaml_file_path,
+ "valid_dir": valid_dir,
+ "invalid_dir": invalid_dir,
+ "dir_with_multimer": dir_with_multimer,
+ "mixed_dir": mixed_fasta_txt_dir,
+ "empty_dir": empty_dir,
+ "output_dir": tmp_directory / "output",
+ "mixed_fasta_yaml_dir": mixed_fasta_yaml_dir,
+ }
+
+ yield files
diff --git a/folding-studio/tests/query/__init__.py b/folding-studio/tests/query/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/folding-studio/tests/query/test_boltz.py b/folding-studio/tests/query/test_boltz.py
new file mode 100644
index 0000000000000000000000000000000000000000..965cb2ea2391e52678a71bf223a93130facfecc1
--- /dev/null
+++ b/folding-studio/tests/query/test_boltz.py
@@ -0,0 +1,92 @@
+import pytest
+from folding_studio.query import BoltzQuery
+
+
+@pytest.fixture
+def inference_parameters():
+ """
+ Provides the inference parameters dictionary to tests.
+ """
+ return {
+ "recycling_steps": 3,
+ "sampling_steps": 200,
+ "diffusion_samples": 1,
+ "step_scale": 1.638,
+ "output_format": "mmcif",
+ "num_workers": 2,
+ "msa_pairing_strategy": "greedy",
+ "write_full_pae": False,
+ "write_full_pde": False,
+ "seed": 0,
+ }
+
+
+def test_process_fasta_file(tmp_files):
+ """Test processing a valid FASTA file."""
+ file_path = tmp_files["monomer_fasta"]
+ fasta_dict, yaml_dict = BoltzQuery._process_file(file_path)
+ expected_fasta = ">tag1|tag2\nABCDEGF"
+ expected_fasta_dict = {file_path.stem: expected_fasta}
+
+ assert fasta_dict == expected_fasta_dict
+ assert yaml_dict == {}
+
+
+def test_process_yaml_file(tmp_files):
+ """Test processing a valid YAML file."""
+ file_path = tmp_files["yaml_file_path"]
+ fasta_dict, yaml_dict = BoltzQuery._process_file(file_path)
+
+ expected_yaml = {
+ "version": 1,
+ "sequences": [{"protein": {"id": "A", "sequence": "QLEDSEVEAVAKGLEE"}}],
+ }
+ expected_yaml_dict = {file_path.stem: expected_yaml}
+
+ assert fasta_dict == {}
+ assert yaml_dict == expected_yaml_dict
+
+
+def test_process_file_invalid_extension(tmp_files):
+ """Test processing a file with an invalid extension."""
+ invalid_source = tmp_files["invalid_source"]
+ with pytest.raises(
+ ValueError, match=f"Unsupported format: {invalid_source.suffix}"
+ ):
+ BoltzQuery._process_file(invalid_source)
+
+
+def test_from_file_fasta(tmp_files, inference_parameters):
+ """Test creating BoltzQuery from a FASTA file."""
+ file_path = tmp_files["monomer_fasta"]
+ query = BoltzQuery.from_file(file_path, **inference_parameters)
+ assert file_path.stem in query.fasta_dict
+ assert not query.yaml_dict
+
+
+def test_from_file_yaml(tmp_files, inference_parameters):
+ """Test creating BoltzQuery from a YAML file."""
+ file_path = tmp_files["yaml_file_path"]
+ query = BoltzQuery.from_file(file_path, **inference_parameters)
+ assert not query.fasta_dict
+ assert file_path.stem in query.yaml_dict
+
+
+def test_from_directory(tmp_files, inference_parameters):
+ """Test creating BoltzQuery from a directory containing both FASTA and YAML files."""
+ query = BoltzQuery.from_directory(
+ tmp_files["mixed_fasta_yaml_dir"], **inference_parameters
+ )
+ assert "protein_A" in query.fasta_dict
+ assert "protein_B" in query.yaml_dict
+
+
+def test_from_source_file(tmp_files, inference_parameters):
+ """Test creating BoltzQuery from a single file."""
+ file_path = tmp_files["monomer_fasta"]
+ query = BoltzQuery.from_file(str(file_path), **inference_parameters)
+ assert "monomer" in query.fasta_dict
+
+
+def test_from_source_directory(tmp_files, inference_parameters):
+ """Test creating BoltzQuery from a directory."""
diff --git a/folding-studio/tests/query/test_chai.py b/folding-studio/tests/query/test_chai.py
new file mode 100644
index 0000000000000000000000000000000000000000..2b41cb6f3735ae74490daadba959918c98c2b5a2
--- /dev/null
+++ b/folding-studio/tests/query/test_chai.py
@@ -0,0 +1,226 @@
+from pathlib import Path
+from unittest import mock
+
+import pytest
+from folding_studio.query import ChaiQuery
+from folding_studio.query.chai import ChaiParameters
+
+RESTRAINTS_CSV_CONTENT = """
+chainA,res_idxA,chainB,res_idxB,connection_type,confidence,min_distance_angstrom,max_distance_angstrom,comment,restraint_id
+A,C387,B,Y101,contact,1.0,0.0,5.5,protein-heavy,restraint_1
+C,I32,A,S483,contact,1.0,0.0,5.5,protein-light,restraint_2
+"""
+
+A3M_CONTENT = """\
+>101
+RVQPTESIVRFPNITNLCPFGEVFNATRFASVYAWNRKRISNCVADYSVLYNSASFSTFKCYGVSPTKLNDLCFTNVYADSFVIRGDEVRQIAPGQTGKIADYNYKLPDDFTGCVIAWNSNNLDSKVGGNYNYLYRLFRKSNLKPFERDISTEIYQAGSTPCNGVEGFNCYFPLQSYGFQPTNGVGYQPYRVVVLSFELLHAPATVCGPKKSTNLVKNKCVNFHHHHHP
+>UniRef100_UPI00024DB110 356 0.741 7.627E-108 0 227 229 0 226 228
+RVVPSGDVVRFPNITNLCPFGEVFNATKFPSVYAWERKKISNCVADYSVLYNSTFFSTFKCYGVSATKLNDLCFSNVYADSFVVKGDDVRQIAPGQTGVIADYNYKLPDDFMGCVLAWNTRNIDATSTGNYNYKYRFLRHGKLRPFERDISNVPFSPDGKPCT-PPAFNCYWPLNDYGFYTTTGIGYQPYRVVVLSFELLNAPATVCGPKLSTDLIKNQCVNFHHHHH-
+"""
+
+
+@pytest.fixture(autouse=True)
+def mock_get_auth_headers():
+ with mock.patch(
+ "folding_studio.utils.headers.get_auth_headers", return_value="headers"
+ ) as m:
+ yield m
+
+
+@pytest.fixture(autouse=True)
+def mock_upload_custom_files():
+ def side_effect(paths: list[Path], **kwargs):
+ return {str(p): "new_url" for p in paths}
+
+ upload_custom_files_mock = mock.Mock(side_effect=side_effect)
+ with mock.patch(
+ "folding_studio.commands.utils.upload_custom_files",
+ upload_custom_files_mock,
+ ):
+ yield upload_custom_files_mock
+
+
+@pytest.fixture(scope="module")
+def tmp_files(tmp_directory: Path, tmp_files: dict):
+ """Generate temporary files."""
+
+ # A valid restraints CSV file
+ valid_restraints = tmp_directory / "example_restraints.csv"
+ valid_restraints.write_text(RESTRAINTS_CSV_CONTENT)
+
+ tmp_files["valid_restraints"] = valid_restraints
+
+ # Directory for A3M files
+ valid_a3m_dir = tmp_directory / "valid_a3m_dir"
+ valid_a3m_dir.mkdir()
+ alignment_1 = valid_a3m_dir / "alignment1.a3m"
+ alignment_1.write_text(A3M_CONTENT)
+ alignment_2 = valid_a3m_dir / "alignment1.a3m"
+ alignment_2.write_text(A3M_CONTENT)
+ tmp_files["valid_a3m_dir"] = valid_a3m_dir
+
+ # An aligned.pqt file
+ custom_msa = tmp_directory / "custom_msa.aligned.pqt"
+ custom_msa.write_text("")
+ tmp_files["aligned_pqt"] = custom_msa
+
+ yield tmp_files
+
+
+def test_from_fasta_file(tmp_files):
+ """Test _from_fasta_file with a valid FASTA file."""
+ fasta_path = tmp_files["monomer_fasta"]
+ query = ChaiQuery.from_file(
+ fasta_path,
+ use_msa_server=True,
+ use_templates_server=False,
+ num_trunk_recycles=3,
+ seed=42,
+ num_diffn_timesteps=100,
+ custom_msa_paths=tmp_files["aligned_pqt"],
+ )
+ payload = query.payload
+ assert fasta_path.stem in payload["fasta_files"]
+ assert payload["use_msa_server"] is True
+ assert payload["use_templates_server"] is False
+ assert payload["num_trunk_recycles"] == 3
+ assert payload["seed"] == 42
+ assert payload["num_diffn_timesteps"] == 100
+ assert payload["recycle_msa_subsample"] == 0
+ assert payload["num_trunk_samples"] == 1
+
+
+def test_from_fasta_file_invalid_extension(tmp_files):
+ """Test _from_fasta_file with an invalid file extension."""
+ with pytest.raises(
+ ValueError,
+ match=f"Unsupported suffix '{tmp_files['invalid_source'].suffix}'",
+ ):
+ ChaiQuery.from_file(
+ tmp_files["invalid_source"],
+ use_msa_server=True,
+ use_templates_server=False,
+ num_trunk_recycles=3,
+ seed=42,
+ num_diffn_timesteps=100,
+ custom_msa_paths=tmp_files["aligned_pqt"],
+ )
+
+
+def test_from_fasta_directory(tmp_files):
+ """Test _from_fasta_directory with valid FASTA files."""
+ query = ChaiQuery.from_directory(
+ path=str(tmp_files["valid_dir"]),
+ use_msa_server=False,
+ use_templates_server=True,
+ num_trunk_recycles=2,
+ seed=123,
+ num_diffn_timesteps=50,
+ custom_msa_paths=str(tmp_files["valid_a3m_dir"]),
+ )
+ payload = query.payload
+ fasta_files = payload["fasta_files"]
+
+ # Verify that both expected FASTA files are included in the payload.
+ assert "monomer_1" in fasta_files, "Expected 'monomer_1' in FASTA files."
+ assert "monomer_2" in fasta_files, "Expected 'monomer_2' in FASTA files."
+
+ # Verify other parameters are correctly set.
+ assert payload["use_msa_server"] is False
+ assert payload["use_templates_server"] is True
+ assert payload["num_trunk_recycles"] == 2
+ assert payload["seed"] == 123
+ assert payload["num_diffn_timesteps"] == 50
+ assert payload["recycle_msa_subsample"] == 0
+ assert payload["num_trunk_samples"] == 1
+
+
+
+
+def test_from_empty_fasta_directory(tmp_files):
+ """Test _from_fasta_directory with an empty directory."""
+ with pytest.raises(ValueError, match="No FASTA files found in directory"):
+ ChaiQuery.from_directory(
+ tmp_files["empty_dir"],
+ use_msa_server=False,
+ use_templates_server=False,
+ num_trunk_recycles=2,
+ seed=123,
+ num_diffn_timesteps=50,
+ custom_msa_paths=tmp_files["valid_a3m_dir"],
+ )
+
+
+def test_from_fasta_directory_with_invalid_files(tmp_files):
+ """Test _from_fasta_directory ignores invalid file extensions."""
+ with pytest.raises(ValueError, match="No FASTA files found in directory"):
+ ChaiQuery.from_directory(
+ tmp_files["invalid_dir"],
+ use_msa_server=False,
+ num_trunk_recycles=2,
+ seed=123,
+ num_diffn_timesteps=50,
+ custom_msa_paths=tmp_files["valid_a3m_dir"],
+ )
+
+
+def test_from_file_with_restraints(tmp_files):
+ """Test from_file with a FASTA file and restraints."""
+ query = ChaiQuery.from_file(
+ tmp_files["monomer_fasta"],
+ use_msa_server=True,
+ use_templates_server=True,
+ num_trunk_recycles=4,
+ seed=10,
+ num_diffn_timesteps=200,
+ restraints=tmp_files["valid_restraints"],
+ )
+ payload = query.payload
+ assert tmp_files["monomer_fasta"].stem in payload["fasta_files"]
+ assert payload["restraints"] == RESTRAINTS_CSV_CONTENT.strip()
+ assert payload["use_msa_server"] is True
+ assert payload["use_templates_server"] is True
+ assert payload["num_trunk_recycles"] == 4
+ assert payload["seed"] == 10
+ assert payload["num_diffn_timesteps"] == 200
+ assert payload["recycle_msa_subsample"] == 0
+ assert payload["num_trunk_samples"] == 1
+
+
+def test_from_fasta_directory_with_invalid_sources(tmp_files):
+ """Test _from_fasta_directory ignores invalid file extensions."""
+ with pytest.raises(ValueError, match="No FASTA files found in directory"):
+ ChaiQuery.from_directory(
+ tmp_files["invalid_dir"],
+ use_msa_server=False,
+ num_trunk_recycles=2,
+ seed=123,
+ num_diffn_timesteps=50,
+ )
+
+
+def test_ChaiParameters_read_restraints(tmp_files):
+ """Test _read_restraints with a valid CSV file."""
+ parameters = ChaiParameters(restraints=tmp_files["valid_restraints"])
+ assert parameters.restraints == RESTRAINTS_CSV_CONTENT.strip()
+
+
+def test_read_restraints_invalid_extension(tmp_files):
+ """Test _read_restraints with a non-CSV file."""
+ with pytest.raises(
+ ValueError,
+ match=f"Unsupported suffix '{tmp_files['invalid_source'].suffix}'",
+ ):
+ ChaiParameters(restraints=tmp_files["invalid_source"])
+
+
+def test_from_nonexistent_file():
+ """Test from_file with a nonexistent file."""
+ with pytest.raises(FileNotFoundError):
+ ChaiQuery.from_file(
+ "nonexistent.fasta",
+ use_msa_server=True,
+ num_trunk_recycles=3,
+ seed=42,
+ num_diffn_timesteps=100,
+ )
diff --git a/folding-studio/tests/query/test_protenix.py b/folding-studio/tests/query/test_protenix.py
new file mode 100644
index 0000000000000000000000000000000000000000..b959303a93a4b0fe53eb128c886dd21f4b2b8979
--- /dev/null
+++ b/folding-studio/tests/query/test_protenix.py
@@ -0,0 +1,64 @@
+import pytest
+from folding_studio.query import ProtenixQuery
+
+
+def test_from_file(tmp_files):
+ """Test from_file with a valid FASTA file."""
+ fasta_path = tmp_files["monomer_fasta"]
+ query = ProtenixQuery.from_file(fasta_path, use_msa_server=True, seed=0)
+ payload = query.payload
+ assert fasta_path.stem in payload["fasta_files"]
+ assert payload["use_msa_server"] is True
+ assert payload["seeds"] == "0"
+
+
+def test_from_file_invalid_extension(tmp_files):
+ """Test from_file with an invalid file extension."""
+ with pytest.raises(
+ ValueError,
+ match=f"Unsupported suffix '{tmp_files['invalid_source'].suffix}'",
+ ):
+ ProtenixQuery.from_file(
+ tmp_files["invalid_source"], use_msa_server=True, seed=0
+ )
+
+
+def test_from_directory(tmp_files):
+ """Test from_directory with valid FASTA files."""
+ query = ProtenixQuery.from_directory(
+ tmp_files["valid_dir"], use_msa_server=False, seed=0
+ )
+ payload = query.payload
+ assert "monomer_1" in payload["fasta_files"]
+ assert "monomer_2" in payload["fasta_files"]
+ assert payload["use_msa_server"] is False
+ assert payload["seeds"] == "0"
+
+
+def test_from_empty_fasta_directory(tmp_files):
+ """Test from_directory with an empty directory."""
+ with pytest.raises(ValueError, match="No FASTA files found in directory"):
+ ProtenixQuery.from_directory(
+ tmp_files["empty_dir"], use_msa_server=False, seed=0
+ )
+
+
+def test_from_directory_with_invalid_sources(tmp_files):
+ """Test from_directory ignores invalid file extensions."""
+ with pytest.raises(ValueError, match="No FASTA files found in directory"):
+ ProtenixQuery.from_directory(
+ tmp_files["invalid_dir"], use_msa_server=False, seed=0
+ )
+
+
+def test_from_file_with_msa_server(tmp_files):
+ """Test from_file with a FASTA file."""
+ query = ProtenixQuery.from_file(
+ str(tmp_files["monomer_fasta"]),
+ use_msa_server=True,
+ seed=10,
+ )
+ payload = query.payload
+ assert tmp_files["monomer_fasta"].stem in payload["fasta_files"]
+ assert payload["use_msa_server"] is True
+ assert payload["seeds"] == "10"
diff --git a/folding-studio/tests/query/test_soloseq.py b/folding-studio/tests/query/test_soloseq.py
new file mode 100644
index 0000000000000000000000000000000000000000..3de14d290f5109f66b57027f97a6858bea5211ae
--- /dev/null
+++ b/folding-studio/tests/query/test_soloseq.py
@@ -0,0 +1,74 @@
+import pytest
+from folding_studio.query.soloseq import SoloSeqParameters, SoloSeqQuery
+from folding_studio_data_models.exceptions import FastaValidationError
+
+
+def test_soloseqquery_from_protein_sequence():
+ sequence = ">tag1|tag2\nABCDEGF"
+ query = SoloSeqQuery.from_protein_sequence(sequence)
+ assert query.fasta_files == {"tag1": ">tag1|tag2\nABCDEGF"}
+ assert query.query_name == "tag1"
+ assert query.parameters == SoloSeqParameters()
+
+ parameters = {"data_random_seed": 42, "skip_relaxation": False, "unsupported": 123}
+ query = SoloSeqQuery.from_protein_sequence(sequence, **parameters)
+ assert query.fasta_files == {"tag1": ">tag1|tag2\nABCDEGF"}
+ assert query.query_name == "tag1"
+ assert query.parameters == SoloSeqParameters(
+ data_random_seed=42, skip_relaxation=False
+ )
+
+ with pytest.raises(FastaValidationError):
+ multimer_sequence = ">tag1|tag2\nABCDEGF\n>tag3\nABCDEGF"
+ SoloSeqQuery.from_protein_sequence(multimer_sequence)
+
+
+def test_soloseqquery_from_fasta_file(tmp_files):
+ with pytest.raises(ValueError, match="Unsupported suffix"):
+ SoloSeqQuery.from_file(tmp_files["invalid_source"])
+
+ with pytest.raises(FastaValidationError):
+ SoloSeqQuery.from_file(tmp_files["empty_fasta"])
+
+ with pytest.raises(FastaValidationError):
+ SoloSeqQuery.from_file(tmp_files["multimer_fasta"])
+
+ query = SoloSeqQuery.from_file(tmp_files["monomer_fasta"])
+ assert query.fasta_files == {"monomer": ">tag1|tag2\nABCDEGF"}
+ assert query.query_name == "monomer"
+ assert query.parameters == SoloSeqParameters()
+
+ parameters = {"data_random_seed": 42, "skip_relaxation": False, "unsupported": 123}
+ query = SoloSeqQuery.from_file(tmp_files["monomer_fasta"], **parameters)
+ assert query.fasta_files == {"monomer": ">tag1|tag2\nABCDEGF"}
+ assert query.query_name == "monomer"
+ assert query.parameters == SoloSeqParameters(
+ data_random_seed=42, skip_relaxation=False
+ )
+
+
+def test_soloseqquery_from_directory(tmp_files):
+ with pytest.raises(ValueError, match="No FASTA files found in directory"):
+ SoloSeqQuery.from_directory(tmp_files["empty_dir"])
+
+ with pytest.raises(FastaValidationError):
+ SoloSeqQuery.from_directory(tmp_files["dir_with_multimer"])
+
+ query = SoloSeqQuery.from_directory(tmp_files["valid_dir"])
+ assert query.fasta_files == {
+ "monomer_1": ">tag1|tag2\nABCDEGF",
+ "monomer_2": ">tag1|tag2\nABCDEGF",
+ }
+ assert query.query_name == "valid_dir"
+ assert query.parameters == SoloSeqParameters()
+
+ parameters = {"data_random_seed": 42, "skip_relaxation": False, "unsupported": 123}
+ query = SoloSeqQuery.from_directory(tmp_files["valid_dir"], **parameters)
+ assert query.fasta_files == {
+ "monomer_1": ">tag1|tag2\nABCDEGF",
+ "monomer_2": ">tag1|tag2\nABCDEGF",
+ }
+ assert query.query_name == "valid_dir"
+ assert query.parameters == SoloSeqParameters(
+ data_random_seed=42, skip_relaxation=False
+ )
diff --git a/folding-studio/tests/utils/test_headers.py b/folding-studio/tests/utils/test_headers.py
new file mode 100644
index 0000000000000000000000000000000000000000..77561f497e2798e0ed204c20c0b66d6349e3a110
--- /dev/null
+++ b/folding-studio/tests/utils/test_headers.py
@@ -0,0 +1,25 @@
+"""Test headers"""
+import pytest
+from unittest import mock
+from folding_studio.utils.headers import get_auth_headers
+
+
+def test_get_auth_headers_api_key():
+ result = get_auth_headers()
+ assert result == {"X-API-Key": "MY_KEY"}
+
+@pytest.fixture()
+def mock_get_auth_headers():
+ with mock.patch(
+ "folding_studio.utils.headers.get_id_token", return_value="identity_token"
+ ) as m:
+ yield m
+
+
+@mock.patch("folding_studio.utils.headers.FOLDING_API_KEY", None)
+def test_get_auth_headers_token(mock_get_auth_headers):
+ result = get_auth_headers()
+ assert result == {"Authorization": "Bearer identity_token"}
+
+
+
diff --git a/folding-studio/tests/utils/test_input_validation.py b/folding-studio/tests/utils/test_input_validation.py
new file mode 100644
index 0000000000000000000000000000000000000000..339b3979217097cd827d9fdbbe7b906179106c4a
--- /dev/null
+++ b/folding-studio/tests/utils/test_input_validation.py
@@ -0,0 +1,107 @@
+"""Test path helpers."""
+
+from pathlib import Path
+
+import pytest
+import typer
+from folding_studio.utils.input_validation import (
+ extract_and_validate_custom_msas,
+ extract_and_validate_custom_templates,
+ validate_initial_guess,
+)
+
+
+def test_extract_and_validate_custom_templates_pass(tmp_path: Path):
+ """Test extract and validate custom templates pass."""
+
+ test_str_paths = [
+ tmp_path / "template.cif",
+ tmp_path / "dir_1/template_1.cif",
+ tmp_path / "dir_1/template_2.cif",
+ tmp_path / "dir_1/dir_11/template.cif",
+ tmp_path / "dir_2/template_1.cif",
+ tmp_path / "dir_2/template_2.cif",
+ tmp_path / "dir_3/template_1.cif",
+ tmp_path / "dir_3/template_2.cif",
+ ]
+ for path in test_str_paths:
+ path.parent.mkdir(exist_ok=True, parents=True)
+ path.touch()
+
+ test_paths = [
+ tmp_path / "template.cif",
+ tmp_path / "dir_1",
+ tmp_path / "dir_2",
+ tmp_path / "dir_3/template_1.cif",
+ ]
+
+ extracted_paths = extract_and_validate_custom_templates(test_paths)
+
+ assert len(extracted_paths) == 6
+
+
+def test_extract_and_validate_custom_templates_fails_if_unsupported(tmp_path: Path):
+ """Test extract and validate custom templates fails if an unsupported files is passed."""
+ file = tmp_path / "template.txt"
+ file.touch()
+ test_paths = [Path(file)]
+ with pytest.raises(
+ typer.BadParameter, match=f"The file '{file}' is not supported."
+ ):
+ extract_and_validate_custom_templates(test_paths)
+
+
+def test_extract_and_validate_custom_msass_pass(tmp_path: Path):
+ """Test extract and validate custom msas pass."""
+
+ test_str_paths = [
+ tmp_path / "msa.sto",
+ tmp_path / "dir_1/msa_1.sto",
+ tmp_path / "dir_1/msa_2.a3m",
+ tmp_path / "dir_1/dir_11/msa.sto",
+ tmp_path / "dir_2/msa_1.sto",
+ tmp_path / "dir_2/msa_2.a3m",
+ tmp_path / "dir_3/msa_1.sto",
+ tmp_path / "dir_3/msa_2.a3m",
+ ]
+ for path in test_str_paths:
+ path.parent.mkdir(exist_ok=True, parents=True)
+ path.touch()
+
+ test_paths = [
+ tmp_path / "msa.sto",
+ tmp_path / "dir_1",
+ tmp_path / "dir_2",
+ tmp_path / "dir_3/msa_1.sto",
+ ]
+
+ extracted_paths = extract_and_validate_custom_msas(test_paths)
+
+ assert len(extracted_paths) == 6
+
+
+def test_extract_and_validate_custom_msas_fails_if_unsupported(tmp_path: Path):
+ """Test extract and validate custom msas fails if an unsupported file is passed."""
+ file = tmp_path / "msa.txt"
+ file.touch()
+ test_paths = [Path(file)]
+ with pytest.raises(
+ typer.BadParameter, match=f"The file '{file}' is not supported."
+ ):
+ extract_and_validate_custom_templates(test_paths)
+
+
+@pytest.mark.parametrize(
+ ("file", "expected_path"),
+ [
+ (Path("initial_guess.cif"), Path("initial_guess.cif")),
+ (Path("dir_1/initial_guess_1.cif"), Path("dir_1/initial_guess_1.cif")),
+ (None, None),
+ ],
+)
+def test_extract_and_validate_initial_guess_path(file: str, expected_path):
+ """Test extract and validate initial guess file path."""
+
+ validated_path = validate_initial_guess(file)
+
+ assert validated_path == expected_path
diff --git a/folding-studio/tests/utils/test_path_helpers.py b/folding-studio/tests/utils/test_path_helpers.py
new file mode 100644
index 0000000000000000000000000000000000000000..b344a84a0e978f98f45bede21f0951b1a99ef299
--- /dev/null
+++ b/folding-studio/tests/utils/test_path_helpers.py
@@ -0,0 +1,63 @@
+"""Test path helpers."""
+
+from pathlib import Path
+
+import pytest
+from folding_studio.utils.path_helpers import extract_files, validate_path
+
+
+def test_extract_files(tmp_path: Path):
+ """Test extract files."""
+
+ test_str_paths = [
+ tmp_path / "file.txt",
+ tmp_path / "dir_1/file.txt",
+ tmp_path / "dir_1/file_2.txt",
+ tmp_path / "dir_1/dir_11/file.txt",
+ tmp_path / "dir_2/file.txt",
+ tmp_path / "dir_2/file_2.txt",
+ ]
+ for path in test_str_paths:
+ path.parent.mkdir(exist_ok=True, parents=True)
+ path.touch()
+
+ test_paths = [
+ tmp_path / "file.txt",
+ tmp_path / "dir_1",
+ tmp_path / "dir_2/file.txt",
+ ]
+
+ extracted_paths = extract_files(test_paths)
+
+ assert len(extracted_paths) == 4
+
+
+def test_validate_path(tmp_directory: Path):
+ with pytest.raises(FileNotFoundError):
+ validate_path("unknonw_path")
+
+ file_path = tmp_directory / "file.txt"
+ file_path.touch()
+
+ file_multi_suffix_path = tmp_directory / "file.tar.gz"
+ file_multi_suffix_path.touch()
+
+ dir_path = tmp_directory / "directory"
+ dir_path.mkdir()
+
+ with pytest.raises(FileNotFoundError):
+ validate_path(dir_path, is_file=True)
+
+ with pytest.raises(NotADirectoryError):
+ validate_path(file_path, is_dir=True)
+
+ with pytest.raises(ValueError):
+ validate_path(file_path, is_file=True, file_suffix=[".csv"])
+
+ validate_path(file_path, is_file=True, file_suffix=[".txt"])
+
+ with pytest.raises(ValueError):
+ validate_path(file_multi_suffix_path, is_file=True, file_suffix=[".tar"])
+
+ validate_path(file_multi_suffix_path, is_file=True, file_suffix=[".gz"])
+ validate_path(file_multi_suffix_path, is_file=True, file_suffix=[".tar.gz"])
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000000000000000000000000000000000000..70195955db74d30e7ab1d87288ffff800e447735
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,9 @@
+[project]
+name = "fs"
+version = "0.1.0"
+description = "Add your description here"
+readme = "README.md"
+requires-python = ">=3.11"
+dependencies = [
+ "gradio==5.30.0",
+]
diff --git a/uv.lock b/uv.lock
new file mode 100644
index 0000000000000000000000000000000000000000..9860b7c44fbef621aa0367be56a612a60c226ffe
--- /dev/null
+++ b/uv.lock
@@ -0,0 +1,1006 @@
+version = 1
+revision = 1
+requires-python = ">=3.11"
+resolution-markers = [
+ "python_full_version >= '3.13'",
+ "python_full_version == '3.12.*'",
+ "python_full_version < '3.12'",
+]
+
+[[package]]
+name = "aiofiles"
+version = "24.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c", size = 30247 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5", size = 15896 },
+]
+
+[[package]]
+name = "annotated-types"
+version = "0.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 },
+]
+
+[[package]]
+name = "anyio"
+version = "4.9.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "idna" },
+ { name = "sniffio" },
+ { name = "typing-extensions", marker = "python_full_version < '3.13'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916 },
+]
+
+[[package]]
+name = "audioop-lts"
+version = "0.2.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/dd/3b/69ff8a885e4c1c42014c2765275c4bd91fe7bc9847e9d8543dbcbb09f820/audioop_lts-0.2.1.tar.gz", hash = "sha256:e81268da0baa880431b68b1308ab7257eb33f356e57a5f9b1f915dfb13dd1387", size = 30204 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/01/91/a219253cc6e92db2ebeaf5cf8197f71d995df6f6b16091d1f3ce62cb169d/audioop_lts-0.2.1-cp313-abi3-macosx_10_13_universal2.whl", hash = "sha256:fd1345ae99e17e6910f47ce7d52673c6a1a70820d78b67de1b7abb3af29c426a", size = 46252 },
+ { url = "https://files.pythonhosted.org/packages/ec/f6/3cb21e0accd9e112d27cee3b1477cd04dafe88675c54ad8b0d56226c1e0b/audioop_lts-0.2.1-cp313-abi3-macosx_10_13_x86_64.whl", hash = "sha256:e175350da05d2087e12cea8e72a70a1a8b14a17e92ed2022952a4419689ede5e", size = 27183 },
+ { url = "https://files.pythonhosted.org/packages/ea/7e/f94c8a6a8b2571694375b4cf94d3e5e0f529e8e6ba280fad4d8c70621f27/audioop_lts-0.2.1-cp313-abi3-macosx_11_0_arm64.whl", hash = "sha256:4a8dd6a81770f6ecf019c4b6d659e000dc26571b273953cef7cd1d5ce2ff3ae6", size = 26726 },
+ { url = "https://files.pythonhosted.org/packages/ef/f8/a0e8e7a033b03fae2b16bc5aa48100b461c4f3a8a38af56d5ad579924a3a/audioop_lts-0.2.1-cp313-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1cd3c0b6f2ca25c7d2b1c3adeecbe23e65689839ba73331ebc7d893fcda7ffe", size = 80718 },
+ { url = "https://files.pythonhosted.org/packages/8f/ea/a98ebd4ed631c93b8b8f2368862cd8084d75c77a697248c24437c36a6f7e/audioop_lts-0.2.1-cp313-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff3f97b3372c97782e9c6d3d7fdbe83bce8f70de719605bd7ee1839cd1ab360a", size = 88326 },
+ { url = "https://files.pythonhosted.org/packages/33/79/e97a9f9daac0982aa92db1199339bd393594d9a4196ad95ae088635a105f/audioop_lts-0.2.1-cp313-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a351af79edefc2a1bd2234bfd8b339935f389209943043913a919df4b0f13300", size = 80539 },
+ { url = "https://files.pythonhosted.org/packages/b2/d3/1051d80e6f2d6f4773f90c07e73743a1e19fcd31af58ff4e8ef0375d3a80/audioop_lts-0.2.1-cp313-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aeb6f96f7f6da80354330470b9134d81b4cf544cdd1c549f2f45fe964d28059", size = 78577 },
+ { url = "https://files.pythonhosted.org/packages/7a/1d/54f4c58bae8dc8c64a75071c7e98e105ddaca35449376fcb0180f6e3c9df/audioop_lts-0.2.1-cp313-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c589f06407e8340e81962575fcffbba1e92671879a221186c3d4662de9fe804e", size = 82074 },
+ { url = "https://files.pythonhosted.org/packages/36/89/2e78daa7cebbea57e72c0e1927413be4db675548a537cfba6a19040d52fa/audioop_lts-0.2.1-cp313-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fbae5d6925d7c26e712f0beda5ed69ebb40e14212c185d129b8dfbfcc335eb48", size = 84210 },
+ { url = "https://files.pythonhosted.org/packages/a5/57/3ff8a74df2ec2fa6d2ae06ac86e4a27d6412dbb7d0e0d41024222744c7e0/audioop_lts-0.2.1-cp313-abi3-musllinux_1_2_i686.whl", hash = "sha256:d2d5434717f33117f29b5691fbdf142d36573d751716249a288fbb96ba26a281", size = 85664 },
+ { url = "https://files.pythonhosted.org/packages/16/01/21cc4e5878f6edbc8e54be4c108d7cb9cb6202313cfe98e4ece6064580dd/audioop_lts-0.2.1-cp313-abi3-musllinux_1_2_ppc64le.whl", hash = "sha256:f626a01c0a186b08f7ff61431c01c055961ee28769591efa8800beadd27a2959", size = 93255 },
+ { url = "https://files.pythonhosted.org/packages/3e/28/7f7418c362a899ac3b0bf13b1fde2d4ffccfdeb6a859abd26f2d142a1d58/audioop_lts-0.2.1-cp313-abi3-musllinux_1_2_s390x.whl", hash = "sha256:05da64e73837f88ee5c6217d732d2584cf638003ac72df124740460531e95e47", size = 87760 },
+ { url = "https://files.pythonhosted.org/packages/6d/d8/577a8be87dc7dd2ba568895045cee7d32e81d85a7e44a29000fe02c4d9d4/audioop_lts-0.2.1-cp313-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:56b7a0a4dba8e353436f31a932f3045d108a67b5943b30f85a5563f4d8488d77", size = 84992 },
+ { url = "https://files.pythonhosted.org/packages/ef/9a/4699b0c4fcf89936d2bfb5425f55f1a8b86dff4237cfcc104946c9cd9858/audioop_lts-0.2.1-cp313-abi3-win32.whl", hash = "sha256:6e899eb8874dc2413b11926b5fb3857ec0ab55222840e38016a6ba2ea9b7d5e3", size = 26059 },
+ { url = "https://files.pythonhosted.org/packages/3a/1c/1f88e9c5dd4785a547ce5fd1eb83fff832c00cc0e15c04c1119b02582d06/audioop_lts-0.2.1-cp313-abi3-win_amd64.whl", hash = "sha256:64562c5c771fb0a8b6262829b9b4f37a7b886c01b4d3ecdbae1d629717db08b4", size = 30412 },
+ { url = "https://files.pythonhosted.org/packages/c4/e9/c123fd29d89a6402ad261516f848437472ccc602abb59bba522af45e281b/audioop_lts-0.2.1-cp313-abi3-win_arm64.whl", hash = "sha256:c45317debeb64002e980077642afbd977773a25fa3dfd7ed0c84dccfc1fafcb0", size = 23578 },
+ { url = "https://files.pythonhosted.org/packages/7a/99/bb664a99561fd4266687e5cb8965e6ec31ba4ff7002c3fce3dc5ef2709db/audioop_lts-0.2.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:3827e3fce6fee4d69d96a3d00cd2ab07f3c0d844cb1e44e26f719b34a5b15455", size = 46827 },
+ { url = "https://files.pythonhosted.org/packages/c4/e3/f664171e867e0768ab982715e744430cf323f1282eb2e11ebfb6ee4c4551/audioop_lts-0.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:161249db9343b3c9780ca92c0be0d1ccbfecdbccac6844f3d0d44b9c4a00a17f", size = 27479 },
+ { url = "https://files.pythonhosted.org/packages/a6/0d/2a79231ff54eb20e83b47e7610462ad6a2bea4e113fae5aa91c6547e7764/audioop_lts-0.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5b7b4ff9de7a44e0ad2618afdc2ac920b91f4a6d3509520ee65339d4acde5abf", size = 27056 },
+ { url = "https://files.pythonhosted.org/packages/86/46/342471398283bb0634f5a6df947806a423ba74b2e29e250c7ec0e3720e4f/audioop_lts-0.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72e37f416adb43b0ced93419de0122b42753ee74e87070777b53c5d2241e7fab", size = 87802 },
+ { url = "https://files.pythonhosted.org/packages/56/44/7a85b08d4ed55517634ff19ddfbd0af05bf8bfd39a204e4445cd0e6f0cc9/audioop_lts-0.2.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:534ce808e6bab6adb65548723c8cbe189a3379245db89b9d555c4210b4aaa9b6", size = 95016 },
+ { url = "https://files.pythonhosted.org/packages/a8/2a/45edbca97ea9ee9e6bbbdb8d25613a36e16a4d1e14ae01557392f15cc8d3/audioop_lts-0.2.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2de9b6fb8b1cf9f03990b299a9112bfdf8b86b6987003ca9e8a6c4f56d39543", size = 87394 },
+ { url = "https://files.pythonhosted.org/packages/14/ae/832bcbbef2c510629593bf46739374174606e25ac7d106b08d396b74c964/audioop_lts-0.2.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f24865991b5ed4b038add5edbf424639d1358144f4e2a3e7a84bc6ba23e35074", size = 84874 },
+ { url = "https://files.pythonhosted.org/packages/26/1c/8023c3490798ed2f90dfe58ec3b26d7520a243ae9c0fc751ed3c9d8dbb69/audioop_lts-0.2.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bdb3b7912ccd57ea53197943f1bbc67262dcf29802c4a6df79ec1c715d45a78", size = 88698 },
+ { url = "https://files.pythonhosted.org/packages/2c/db/5379d953d4918278b1f04a5a64b2c112bd7aae8f81021009da0dcb77173c/audioop_lts-0.2.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:120678b208cca1158f0a12d667af592e067f7a50df9adc4dc8f6ad8d065a93fb", size = 90401 },
+ { url = "https://files.pythonhosted.org/packages/99/6e/3c45d316705ab1aec2e69543a5b5e458d0d112a93d08994347fafef03d50/audioop_lts-0.2.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:54cd4520fc830b23c7d223693ed3e1b4d464997dd3abc7c15dce9a1f9bd76ab2", size = 91864 },
+ { url = "https://files.pythonhosted.org/packages/08/58/6a371d8fed4f34debdb532c0b00942a84ebf3e7ad368e5edc26931d0e251/audioop_lts-0.2.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:d6bd20c7a10abcb0fb3d8aaa7508c0bf3d40dfad7515c572014da4b979d3310a", size = 98796 },
+ { url = "https://files.pythonhosted.org/packages/ee/77/d637aa35497e0034ff846fd3330d1db26bc6fd9dd79c406e1341188b06a2/audioop_lts-0.2.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:f0ed1ad9bd862539ea875fb339ecb18fcc4148f8d9908f4502df28f94d23491a", size = 94116 },
+ { url = "https://files.pythonhosted.org/packages/1a/60/7afc2abf46bbcf525a6ebc0305d85ab08dc2d1e2da72c48dbb35eee5b62c/audioop_lts-0.2.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e1af3ff32b8c38a7d900382646e91f2fc515fd19dea37e9392275a5cbfdbff63", size = 91520 },
+ { url = "https://files.pythonhosted.org/packages/65/6d/42d40da100be1afb661fd77c2b1c0dfab08af1540df57533621aea3db52a/audioop_lts-0.2.1-cp313-cp313t-win32.whl", hash = "sha256:f51bb55122a89f7a0817d7ac2319744b4640b5b446c4c3efcea5764ea99ae509", size = 26482 },
+ { url = "https://files.pythonhosted.org/packages/01/09/f08494dca79f65212f5b273aecc5a2f96691bf3307cac29acfcf84300c01/audioop_lts-0.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f0f2f336aa2aee2bce0b0dcc32bbba9178995454c7b979cf6ce086a8801e14c7", size = 30780 },
+ { url = "https://files.pythonhosted.org/packages/5d/35/be73b6015511aa0173ec595fc579133b797ad532996f2998fd6b8d1bbe6b/audioop_lts-0.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:78bfb3703388c780edf900be66e07de5a3d4105ca8e8720c5c4d67927e0b15d0", size = 23918 },
+]
+
+[[package]]
+name = "certifi"
+version = "2025.4.26"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618 },
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.4.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794 },
+ { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846 },
+ { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350 },
+ { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657 },
+ { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260 },
+ { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164 },
+ { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571 },
+ { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952 },
+ { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959 },
+ { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030 },
+ { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015 },
+ { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106 },
+ { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402 },
+ { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936 },
+ { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790 },
+ { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924 },
+ { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626 },
+ { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567 },
+ { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957 },
+ { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408 },
+ { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399 },
+ { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815 },
+ { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537 },
+ { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565 },
+ { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357 },
+ { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776 },
+ { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622 },
+ { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435 },
+ { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653 },
+ { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231 },
+ { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243 },
+ { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442 },
+ { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147 },
+ { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057 },
+ { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454 },
+ { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174 },
+ { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166 },
+ { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064 },
+ { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641 },
+ { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626 },
+]
+
+[[package]]
+name = "click"
+version = "8.1.8"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 },
+]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
+]
+
+[[package]]
+name = "fastapi"
+version = "0.115.12"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pydantic" },
+ { name = "starlette" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f4/55/ae499352d82338331ca1e28c7f4a63bfd09479b16395dce38cf50a39e2c2/fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681", size = 295236 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/50/b3/b51f09c2ba432a576fe63758bddc81f78f0c6309d9e5c10d194313bf021e/fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d", size = 95164 },
+]
+
+[[package]]
+name = "ffmpy"
+version = "0.5.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4d/66/5697a7421c418ccbfae87b7e6503b480070f7cb16c25c77201afc6246348/ffmpy-0.5.0.tar.gz", hash = "sha256:277e131f246d18e9dcfee9bb514c50749031c43582ce5ef82c57b51e3d3955c3", size = 5523 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/53/5d/65f40bd333463b3230b3a72d93873caaf49b0cbb5228598fafb75fcc5357/ffmpy-0.5.0-py3-none-any.whl", hash = "sha256:df3799cf5816daa56d4959a023630ee53c6768b66009dae6d131519ba4b80233", size = 6008 },
+]
+
+[[package]]
+name = "filelock"
+version = "3.18.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215 },
+]
+
+[[package]]
+name = "fs"
+version = "0.1.0"
+source = { virtual = "." }
+dependencies = [
+ { name = "gradio" },
+]
+
+[package.metadata]
+requires-dist = [{ name = "gradio", specifier = "==5.30.0" }]
+
+[[package]]
+name = "fsspec"
+version = "2025.5.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f2/77/deb99b97981e2e191913454da82d406702405178631c31cd623caebaf1b1/fsspec-2025.5.0.tar.gz", hash = "sha256:e4f4623bb6221f7407fd695cc535d1f857a077eb247580f4ada34f5dc25fd5c8", size = 300989 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2c/a9/a7022f58e081149ec0184c31ea81dcee605e1d46380b48122e1ef94ac24e/fsspec-2025.5.0-py3-none-any.whl", hash = "sha256:0ca253eca6b5333d8a2b8bd98c7326fe821f1f0fdbd34e1b445bddde8e804c95", size = 196164 },
+]
+
+[[package]]
+name = "gradio"
+version = "5.30.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiofiles" },
+ { name = "anyio" },
+ { name = "audioop-lts", marker = "python_full_version >= '3.13'" },
+ { name = "fastapi" },
+ { name = "ffmpy" },
+ { name = "gradio-client" },
+ { name = "groovy" },
+ { name = "httpx" },
+ { name = "huggingface-hub" },
+ { name = "jinja2" },
+ { name = "markupsafe" },
+ { name = "numpy" },
+ { name = "orjson" },
+ { name = "packaging" },
+ { name = "pandas" },
+ { name = "pillow" },
+ { name = "pydantic" },
+ { name = "pydub" },
+ { name = "python-multipart" },
+ { name = "pyyaml" },
+ { name = "ruff", marker = "sys_platform != 'emscripten'" },
+ { name = "safehttpx" },
+ { name = "semantic-version" },
+ { name = "starlette", marker = "sys_platform != 'emscripten'" },
+ { name = "tomlkit" },
+ { name = "typer", marker = "sys_platform != 'emscripten'" },
+ { name = "typing-extensions" },
+ { name = "urllib3", marker = "sys_platform == 'emscripten'" },
+ { name = "uvicorn", marker = "sys_platform != 'emscripten'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/19/f7/c47bd11f6e1e9e1692ee0c76b1fbf85df576541aa01f5aa253ba58727ddf/gradio-5.30.0.tar.gz", hash = "sha256:83d2dfaf5ecb2911e1aa61ccbee4aa62b7bde112396fb107237d5d8134b6131b", size = 64740431 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/58/9e/39b1e0a0c0eaa8bd7ef28274e8ac639575cd3a7290500e51af68534f2f6f/gradio-5.30.0-py3-none-any.whl", hash = "sha256:47508c4a69789f9433a1d95c5ffb5893570573431d05754394e2ffeaa6fbc2d3", size = 54161002 },
+]
+
+[[package]]
+name = "gradio-client"
+version = "1.10.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "fsspec" },
+ { name = "httpx" },
+ { name = "huggingface-hub" },
+ { name = "packaging" },
+ { name = "typing-extensions" },
+ { name = "websockets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b9/5e/f0e513041613aacc916f7d19eb98f6d209adf278921fd967750b0803afb8/gradio_client-1.10.1.tar.gz", hash = "sha256:550662eae8dc0d06d44cb8d42be74f214db1e793ad4d789d7b7ecb42e82ca045", size = 321147 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/55/6f/03eb8e0e0ec80eced5ed35a63376dabfc7391b1538502f8e85e9dc5bab02/gradio_client-1.10.1-py3-none-any.whl", hash = "sha256:fcff53f6aad3dfa9dd082adedb94256172d6b20666b1ef66480d82023e1907db", size = 323141 },
+]
+
+[[package]]
+name = "groovy"
+version = "0.1.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/52/36/bbdede67400277bef33d3ec0e6a31750da972c469f75966b4930c753218f/groovy-0.1.2.tar.gz", hash = "sha256:25c1dc09b3f9d7e292458aa762c6beb96ea037071bf5e917fc81fb78d2231083", size = 17325 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/28/27/3d6dcadc8a3214d8522c1e7f6a19554e33659be44546d44a2f7572ac7d2a/groovy-0.1.2-py3-none-any.whl", hash = "sha256:7f7975bab18c729a257a8b1ae9dcd70b7cafb1720481beae47719af57c35fa64", size = 14090 },
+]
+
+[[package]]
+name = "h11"
+version = "0.16.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515 },
+]
+
+[[package]]
+name = "httpcore"
+version = "1.0.9"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+ { name = "h11" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784 },
+]
+
+[[package]]
+name = "httpx"
+version = "0.28.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "certifi" },
+ { name = "httpcore" },
+ { name = "idna" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 },
+]
+
+[[package]]
+name = "huggingface-hub"
+version = "0.31.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "filelock" },
+ { name = "fsspec" },
+ { name = "packaging" },
+ { name = "pyyaml" },
+ { name = "requests" },
+ { name = "tqdm" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/05/a0/7445e07427a917399db619e3c7383de3cd723c20d3b3a8a527a096c49a44/huggingface_hub-0.31.4.tar.gz", hash = "sha256:5a7bc710b9f9c028aee5b1476867b4ec5c1b92f043cb364d5fdc54354757e4ce", size = 407736 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/33/c7/852d4473788cfd7d79b73951244b87a6d75fdac296c90aeb5e85dbb2fb5e/huggingface_hub-0.31.4-py3-none-any.whl", hash = "sha256:4f70704760296cc69b612916056e9845f5490a33782b924fc531767967acc15d", size = 489319 },
+]
+
+[[package]]
+name = "idna"
+version = "3.10"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 },
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markupsafe" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 },
+]
+
+[[package]]
+name = "markdown-it-py"
+version = "3.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mdurl" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 },
+]
+
+[[package]]
+name = "markupsafe"
+version = "3.0.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353 },
+ { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392 },
+ { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984 },
+ { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120 },
+ { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032 },
+ { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057 },
+ { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359 },
+ { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306 },
+ { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094 },
+ { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521 },
+ { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 },
+ { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 },
+ { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 },
+ { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 },
+ { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 },
+ { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 },
+ { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 },
+ { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 },
+ { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 },
+ { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 },
+ { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 },
+ { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 },
+ { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 },
+ { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 },
+ { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 },
+ { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 },
+ { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 },
+ { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 },
+ { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 },
+ { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 },
+ { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 },
+ { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 },
+ { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 },
+ { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 },
+ { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 },
+ { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 },
+ { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 },
+ { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 },
+ { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 },
+ { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 },
+]
+
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 },
+]
+
+[[package]]
+name = "numpy"
+version = "2.2.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/76/21/7d2a95e4bba9dc13d043ee156a356c0a8f0c6309dff6b21b4d71a073b8a8/numpy-2.2.6.tar.gz", hash = "sha256:e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd", size = 20276440 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/da/a8/4f83e2aa666a9fbf56d6118faaaf5f1974d456b1823fda0a176eff722839/numpy-2.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f9f1adb22318e121c5c69a09142811a201ef17ab257a1e66ca3025065b7f53ae", size = 21176963 },
+ { url = "https://files.pythonhosted.org/packages/b3/2b/64e1affc7972decb74c9e29e5649fac940514910960ba25cd9af4488b66c/numpy-2.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c820a93b0255bc360f53eca31a0e676fd1101f673dda8da93454a12e23fc5f7a", size = 14406743 },
+ { url = "https://files.pythonhosted.org/packages/4a/9f/0121e375000b5e50ffdd8b25bf78d8e1a5aa4cca3f185d41265198c7b834/numpy-2.2.6-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3d70692235e759f260c3d837193090014aebdf026dfd167834bcba43e30c2a42", size = 5352616 },
+ { url = "https://files.pythonhosted.org/packages/31/0d/b48c405c91693635fbe2dcd7bc84a33a602add5f63286e024d3b6741411c/numpy-2.2.6-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:481b49095335f8eed42e39e8041327c05b0f6f4780488f61286ed3c01368d491", size = 6889579 },
+ { url = "https://files.pythonhosted.org/packages/52/b8/7f0554d49b565d0171eab6e99001846882000883998e7b7d9f0d98b1f934/numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a", size = 14312005 },
+ { url = "https://files.pythonhosted.org/packages/b3/dd/2238b898e51bd6d389b7389ffb20d7f4c10066d80351187ec8e303a5a475/numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf", size = 16821570 },
+ { url = "https://files.pythonhosted.org/packages/83/6c/44d0325722cf644f191042bf47eedad61c1e6df2432ed65cbe28509d404e/numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1", size = 15818548 },
+ { url = "https://files.pythonhosted.org/packages/ae/9d/81e8216030ce66be25279098789b665d49ff19eef08bfa8cb96d4957f422/numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab", size = 18620521 },
+ { url = "https://files.pythonhosted.org/packages/6a/fd/e19617b9530b031db51b0926eed5345ce8ddc669bb3bc0044b23e275ebe8/numpy-2.2.6-cp311-cp311-win32.whl", hash = "sha256:0678000bb9ac1475cd454c6b8c799206af8107e310843532b04d49649c717a47", size = 6525866 },
+ { url = "https://files.pythonhosted.org/packages/31/0a/f354fb7176b81747d870f7991dc763e157a934c717b67b58456bc63da3df/numpy-2.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:e8213002e427c69c45a52bbd94163084025f533a55a59d6f9c5b820774ef3303", size = 12907455 },
+ { url = "https://files.pythonhosted.org/packages/82/5d/c00588b6cf18e1da539b45d3598d3557084990dcc4331960c15ee776ee41/numpy-2.2.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41c5a21f4a04fa86436124d388f6ed60a9343a6f767fced1a8a71c3fbca038ff", size = 20875348 },
+ { url = "https://files.pythonhosted.org/packages/66/ee/560deadcdde6c2f90200450d5938f63a34b37e27ebff162810f716f6a230/numpy-2.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de749064336d37e340f640b05f24e9e3dd678c57318c7289d222a8a2f543e90c", size = 14119362 },
+ { url = "https://files.pythonhosted.org/packages/3c/65/4baa99f1c53b30adf0acd9a5519078871ddde8d2339dc5a7fde80d9d87da/numpy-2.2.6-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:894b3a42502226a1cac872f840030665f33326fc3dac8e57c607905773cdcde3", size = 5084103 },
+ { url = "https://files.pythonhosted.org/packages/cc/89/e5a34c071a0570cc40c9a54eb472d113eea6d002e9ae12bb3a8407fb912e/numpy-2.2.6-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:71594f7c51a18e728451bb50cc60a3ce4e6538822731b2933209a1f3614e9282", size = 6625382 },
+ { url = "https://files.pythonhosted.org/packages/f8/35/8c80729f1ff76b3921d5c9487c7ac3de9b2a103b1cd05e905b3090513510/numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87", size = 14018462 },
+ { url = "https://files.pythonhosted.org/packages/8c/3d/1e1db36cfd41f895d266b103df00ca5b3cbe965184df824dec5c08c6b803/numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249", size = 16527618 },
+ { url = "https://files.pythonhosted.org/packages/61/c6/03ed30992602c85aa3cd95b9070a514f8b3c33e31124694438d88809ae36/numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49", size = 15505511 },
+ { url = "https://files.pythonhosted.org/packages/b7/25/5761d832a81df431e260719ec45de696414266613c9ee268394dd5ad8236/numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de", size = 18313783 },
+ { url = "https://files.pythonhosted.org/packages/57/0a/72d5a3527c5ebffcd47bde9162c39fae1f90138c961e5296491ce778e682/numpy-2.2.6-cp312-cp312-win32.whl", hash = "sha256:4eeaae00d789f66c7a25ac5f34b71a7035bb474e679f410e5e1a94deb24cf2d4", size = 6246506 },
+ { url = "https://files.pythonhosted.org/packages/36/fa/8c9210162ca1b88529ab76b41ba02d433fd54fecaf6feb70ef9f124683f1/numpy-2.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:c1f9540be57940698ed329904db803cf7a402f3fc200bfe599334c9bd84a40b2", size = 12614190 },
+ { url = "https://files.pythonhosted.org/packages/f9/5c/6657823f4f594f72b5471f1db1ab12e26e890bb2e41897522d134d2a3e81/numpy-2.2.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0811bb762109d9708cca4d0b13c4f67146e3c3b7cf8d34018c722adb2d957c84", size = 20867828 },
+ { url = "https://files.pythonhosted.org/packages/dc/9e/14520dc3dadf3c803473bd07e9b2bd1b69bc583cb2497b47000fed2fa92f/numpy-2.2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:287cc3162b6f01463ccd86be154f284d0893d2b3ed7292439ea97eafa8170e0b", size = 14143006 },
+ { url = "https://files.pythonhosted.org/packages/4f/06/7e96c57d90bebdce9918412087fc22ca9851cceaf5567a45c1f404480e9e/numpy-2.2.6-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:f1372f041402e37e5e633e586f62aa53de2eac8d98cbfb822806ce4bbefcb74d", size = 5076765 },
+ { url = "https://files.pythonhosted.org/packages/73/ed/63d920c23b4289fdac96ddbdd6132e9427790977d5457cd132f18e76eae0/numpy-2.2.6-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:55a4d33fa519660d69614a9fad433be87e5252f4b03850642f88993f7b2ca566", size = 6617736 },
+ { url = "https://files.pythonhosted.org/packages/85/c5/e19c8f99d83fd377ec8c7e0cf627a8049746da54afc24ef0a0cb73d5dfb5/numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f92729c95468a2f4f15e9bb94c432a9229d0d50de67304399627a943201baa2f", size = 14010719 },
+ { url = "https://files.pythonhosted.org/packages/19/49/4df9123aafa7b539317bf6d342cb6d227e49f7a35b99c287a6109b13dd93/numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bc23a79bfabc5d056d106f9befb8d50c31ced2fbc70eedb8155aec74a45798f", size = 16526072 },
+ { url = "https://files.pythonhosted.org/packages/b2/6c/04b5f47f4f32f7c2b0e7260442a8cbcf8168b0e1a41ff1495da42f42a14f/numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e3143e4451880bed956e706a3220b4e5cf6172ef05fcc397f6f36a550b1dd868", size = 15503213 },
+ { url = "https://files.pythonhosted.org/packages/17/0a/5cd92e352c1307640d5b6fec1b2ffb06cd0dabe7d7b8227f97933d378422/numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4f13750ce79751586ae2eb824ba7e1e8dba64784086c98cdbbcc6a42112ce0d", size = 18316632 },
+ { url = "https://files.pythonhosted.org/packages/f0/3b/5cba2b1d88760ef86596ad0f3d484b1cbff7c115ae2429678465057c5155/numpy-2.2.6-cp313-cp313-win32.whl", hash = "sha256:5beb72339d9d4fa36522fc63802f469b13cdbe4fdab4a288f0c441b74272ebfd", size = 6244532 },
+ { url = "https://files.pythonhosted.org/packages/cb/3b/d58c12eafcb298d4e6d0d40216866ab15f59e55d148a5658bb3132311fcf/numpy-2.2.6-cp313-cp313-win_amd64.whl", hash = "sha256:b0544343a702fa80c95ad5d3d608ea3599dd54d4632df855e4c8d24eb6ecfa1c", size = 12610885 },
+ { url = "https://files.pythonhosted.org/packages/6b/9e/4bf918b818e516322db999ac25d00c75788ddfd2d2ade4fa66f1f38097e1/numpy-2.2.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0bca768cd85ae743b2affdc762d617eddf3bcf8724435498a1e80132d04879e6", size = 20963467 },
+ { url = "https://files.pythonhosted.org/packages/61/66/d2de6b291507517ff2e438e13ff7b1e2cdbdb7cb40b3ed475377aece69f9/numpy-2.2.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fc0c5673685c508a142ca65209b4e79ed6740a4ed6b2267dbba90f34b0b3cfda", size = 14225144 },
+ { url = "https://files.pythonhosted.org/packages/e4/25/480387655407ead912e28ba3a820bc69af9adf13bcbe40b299d454ec011f/numpy-2.2.6-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:5bd4fc3ac8926b3819797a7c0e2631eb889b4118a9898c84f585a54d475b7e40", size = 5200217 },
+ { url = "https://files.pythonhosted.org/packages/aa/4a/6e313b5108f53dcbf3aca0c0f3e9c92f4c10ce57a0a721851f9785872895/numpy-2.2.6-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:fee4236c876c4e8369388054d02d0e9bb84821feb1a64dd59e137e6511a551f8", size = 6712014 },
+ { url = "https://files.pythonhosted.org/packages/b7/30/172c2d5c4be71fdf476e9de553443cf8e25feddbe185e0bd88b096915bcc/numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1dda9c7e08dc141e0247a5b8f49cf05984955246a327d4c48bda16821947b2f", size = 14077935 },
+ { url = "https://files.pythonhosted.org/packages/12/fb/9e743f8d4e4d3c710902cf87af3512082ae3d43b945d5d16563f26ec251d/numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f447e6acb680fd307f40d3da4852208af94afdfab89cf850986c3ca00562f4fa", size = 16600122 },
+ { url = "https://files.pythonhosted.org/packages/12/75/ee20da0e58d3a66f204f38916757e01e33a9737d0b22373b3eb5a27358f9/numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:389d771b1623ec92636b0786bc4ae56abafad4a4c513d36a55dce14bd9ce8571", size = 15586143 },
+ { url = "https://files.pythonhosted.org/packages/76/95/bef5b37f29fc5e739947e9ce5179ad402875633308504a52d188302319c8/numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8e9ace4a37db23421249ed236fdcdd457d671e25146786dfc96835cd951aa7c1", size = 18385260 },
+ { url = "https://files.pythonhosted.org/packages/09/04/f2f83279d287407cf36a7a8053a5abe7be3622a4363337338f2585e4afda/numpy-2.2.6-cp313-cp313t-win32.whl", hash = "sha256:038613e9fb8c72b0a41f025a7e4c3f0b7a1b5d768ece4796b674c8f3fe13efff", size = 6377225 },
+ { url = "https://files.pythonhosted.org/packages/67/0e/35082d13c09c02c011cf21570543d202ad929d961c02a147493cb0c2bdf5/numpy-2.2.6-cp313-cp313t-win_amd64.whl", hash = "sha256:6031dd6dfecc0cf9f668681a37648373bddd6421fff6c66ec1624eed0180ee06", size = 12771374 },
+]
+
+[[package]]
+name = "orjson"
+version = "3.10.18"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/81/0b/fea456a3ffe74e70ba30e01ec183a9b26bec4d497f61dcfce1b601059c60/orjson-3.10.18.tar.gz", hash = "sha256:e8da3947d92123eda795b68228cafe2724815621fe35e8e320a9e9593a4bcd53", size = 5422810 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/97/c7/c54a948ce9a4278794f669a353551ce7db4ffb656c69a6e1f2264d563e50/orjson-3.10.18-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e0a183ac3b8e40471e8d843105da6fbe7c070faab023be3b08188ee3f85719b8", size = 248929 },
+ { url = "https://files.pythonhosted.org/packages/9e/60/a9c674ef1dd8ab22b5b10f9300e7e70444d4e3cda4b8258d6c2488c32143/orjson-3.10.18-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:5ef7c164d9174362f85238d0cd4afdeeb89d9e523e4651add6a5d458d6f7d42d", size = 133364 },
+ { url = "https://files.pythonhosted.org/packages/c1/4e/f7d1bdd983082216e414e6d7ef897b0c2957f99c545826c06f371d52337e/orjson-3.10.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afd14c5d99cdc7bf93f22b12ec3b294931518aa019e2a147e8aa2f31fd3240f7", size = 136995 },
+ { url = "https://files.pythonhosted.org/packages/17/89/46b9181ba0ea251c9243b0c8ce29ff7c9796fa943806a9c8b02592fce8ea/orjson-3.10.18-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b672502323b6cd133c4af6b79e3bea36bad2d16bca6c1f645903fce83909a7a", size = 132894 },
+ { url = "https://files.pythonhosted.org/packages/ca/dd/7bce6fcc5b8c21aef59ba3c67f2166f0a1a9b0317dcca4a9d5bd7934ecfd/orjson-3.10.18-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51f8c63be6e070ec894c629186b1c0fe798662b8687f3d9fdfa5e401c6bd7679", size = 137016 },
+ { url = "https://files.pythonhosted.org/packages/1c/4a/b8aea1c83af805dcd31c1f03c95aabb3e19a016b2a4645dd822c5686e94d/orjson-3.10.18-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f9478ade5313d724e0495d167083c6f3be0dd2f1c9c8a38db9a9e912cdaf947", size = 138290 },
+ { url = "https://files.pythonhosted.org/packages/36/d6/7eb05c85d987b688707f45dcf83c91abc2251e0dd9fb4f7be96514f838b1/orjson-3.10.18-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:187aefa562300a9d382b4b4eb9694806e5848b0cedf52037bb5c228c61bb66d4", size = 142829 },
+ { url = "https://files.pythonhosted.org/packages/d2/78/ddd3ee7873f2b5f90f016bc04062713d567435c53ecc8783aab3a4d34915/orjson-3.10.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da552683bc9da222379c7a01779bddd0ad39dd699dd6300abaf43eadee38334", size = 132805 },
+ { url = "https://files.pythonhosted.org/packages/8c/09/c8e047f73d2c5d21ead9c180203e111cddeffc0848d5f0f974e346e21c8e/orjson-3.10.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e450885f7b47a0231979d9c49b567ed1c4e9f69240804621be87c40bc9d3cf17", size = 135008 },
+ { url = "https://files.pythonhosted.org/packages/0c/4b/dccbf5055ef8fb6eda542ab271955fc1f9bf0b941a058490293f8811122b/orjson-3.10.18-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5e3c9cc2ba324187cd06287ca24f65528f16dfc80add48dc99fa6c836bb3137e", size = 413419 },
+ { url = "https://files.pythonhosted.org/packages/8a/f3/1eac0c5e2d6d6790bd2025ebfbefcbd37f0d097103d76f9b3f9302af5a17/orjson-3.10.18-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:50ce016233ac4bfd843ac5471e232b865271d7d9d44cf9d33773bcd883ce442b", size = 153292 },
+ { url = "https://files.pythonhosted.org/packages/1f/b4/ef0abf64c8f1fabf98791819ab502c2c8c1dc48b786646533a93637d8999/orjson-3.10.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b3ceff74a8f7ffde0b2785ca749fc4e80e4315c0fd887561144059fb1c138aa7", size = 137182 },
+ { url = "https://files.pythonhosted.org/packages/a9/a3/6ea878e7b4a0dc5c888d0370d7752dcb23f402747d10e2257478d69b5e63/orjson-3.10.18-cp311-cp311-win32.whl", hash = "sha256:fdba703c722bd868c04702cac4cb8c6b8ff137af2623bc0ddb3b3e6a2c8996c1", size = 142695 },
+ { url = "https://files.pythonhosted.org/packages/79/2a/4048700a3233d562f0e90d5572a849baa18ae4e5ce4c3ba6247e4ece57b0/orjson-3.10.18-cp311-cp311-win_amd64.whl", hash = "sha256:c28082933c71ff4bc6ccc82a454a2bffcef6e1d7379756ca567c772e4fb3278a", size = 134603 },
+ { url = "https://files.pythonhosted.org/packages/03/45/10d934535a4993d27e1c84f1810e79ccf8b1b7418cef12151a22fe9bb1e1/orjson-3.10.18-cp311-cp311-win_arm64.whl", hash = "sha256:a6c7c391beaedd3fa63206e5c2b7b554196f14debf1ec9deb54b5d279b1b46f5", size = 131400 },
+ { url = "https://files.pythonhosted.org/packages/21/1a/67236da0916c1a192d5f4ccbe10ec495367a726996ceb7614eaa687112f2/orjson-3.10.18-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:50c15557afb7f6d63bc6d6348e0337a880a04eaa9cd7c9d569bcb4e760a24753", size = 249184 },
+ { url = "https://files.pythonhosted.org/packages/b3/bc/c7f1db3b1d094dc0c6c83ed16b161a16c214aaa77f311118a93f647b32dc/orjson-3.10.18-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:356b076f1662c9813d5fa56db7d63ccceef4c271b1fb3dd522aca291375fcf17", size = 133279 },
+ { url = "https://files.pythonhosted.org/packages/af/84/664657cd14cc11f0d81e80e64766c7ba5c9b7fc1ec304117878cc1b4659c/orjson-3.10.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:559eb40a70a7494cd5beab2d73657262a74a2c59aff2068fdba8f0424ec5b39d", size = 136799 },
+ { url = "https://files.pythonhosted.org/packages/9a/bb/f50039c5bb05a7ab024ed43ba25d0319e8722a0ac3babb0807e543349978/orjson-3.10.18-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f3c29eb9a81e2fbc6fd7ddcfba3e101ba92eaff455b8d602bf7511088bbc0eae", size = 132791 },
+ { url = "https://files.pythonhosted.org/packages/93/8c/ee74709fc072c3ee219784173ddfe46f699598a1723d9d49cbc78d66df65/orjson-3.10.18-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6612787e5b0756a171c7d81ba245ef63a3533a637c335aa7fcb8e665f4a0966f", size = 137059 },
+ { url = "https://files.pythonhosted.org/packages/6a/37/e6d3109ee004296c80426b5a62b47bcadd96a3deab7443e56507823588c5/orjson-3.10.18-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ac6bd7be0dcab5b702c9d43d25e70eb456dfd2e119d512447468f6405b4a69c", size = 138359 },
+ { url = "https://files.pythonhosted.org/packages/4f/5d/387dafae0e4691857c62bd02839a3bf3fa648eebd26185adfac58d09f207/orjson-3.10.18-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9f72f100cee8dde70100406d5c1abba515a7df926d4ed81e20a9730c062fe9ad", size = 142853 },
+ { url = "https://files.pythonhosted.org/packages/27/6f/875e8e282105350b9a5341c0222a13419758545ae32ad6e0fcf5f64d76aa/orjson-3.10.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dca85398d6d093dd41dc0983cbf54ab8e6afd1c547b6b8a311643917fbf4e0c", size = 133131 },
+ { url = "https://files.pythonhosted.org/packages/48/b2/73a1f0b4790dcb1e5a45f058f4f5dcadc8a85d90137b50d6bbc6afd0ae50/orjson-3.10.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22748de2a07fcc8781a70edb887abf801bb6142e6236123ff93d12d92db3d406", size = 134834 },
+ { url = "https://files.pythonhosted.org/packages/56/f5/7ed133a5525add9c14dbdf17d011dd82206ca6840811d32ac52a35935d19/orjson-3.10.18-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:3a83c9954a4107b9acd10291b7f12a6b29e35e8d43a414799906ea10e75438e6", size = 413368 },
+ { url = "https://files.pythonhosted.org/packages/11/7c/439654221ed9c3324bbac7bdf94cf06a971206b7b62327f11a52544e4982/orjson-3.10.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:303565c67a6c7b1f194c94632a4a39918e067bd6176a48bec697393865ce4f06", size = 153359 },
+ { url = "https://files.pythonhosted.org/packages/48/e7/d58074fa0cc9dd29a8fa2a6c8d5deebdfd82c6cfef72b0e4277c4017563a/orjson-3.10.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:86314fdb5053a2f5a5d881f03fca0219bfdf832912aa88d18676a5175c6916b5", size = 137466 },
+ { url = "https://files.pythonhosted.org/packages/57/4d/fe17581cf81fb70dfcef44e966aa4003360e4194d15a3f38cbffe873333a/orjson-3.10.18-cp312-cp312-win32.whl", hash = "sha256:187ec33bbec58c76dbd4066340067d9ece6e10067bb0cc074a21ae3300caa84e", size = 142683 },
+ { url = "https://files.pythonhosted.org/packages/e6/22/469f62d25ab5f0f3aee256ea732e72dc3aab6d73bac777bd6277955bceef/orjson-3.10.18-cp312-cp312-win_amd64.whl", hash = "sha256:f9f94cf6d3f9cd720d641f8399e390e7411487e493962213390d1ae45c7814fc", size = 134754 },
+ { url = "https://files.pythonhosted.org/packages/10/b0/1040c447fac5b91bc1e9c004b69ee50abb0c1ffd0d24406e1350c58a7fcb/orjson-3.10.18-cp312-cp312-win_arm64.whl", hash = "sha256:3d600be83fe4514944500fa8c2a0a77099025ec6482e8087d7659e891f23058a", size = 131218 },
+ { url = "https://files.pythonhosted.org/packages/04/f0/8aedb6574b68096f3be8f74c0b56d36fd94bcf47e6c7ed47a7bd1474aaa8/orjson-3.10.18-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:69c34b9441b863175cc6a01f2935de994025e773f814412030f269da4f7be147", size = 249087 },
+ { url = "https://files.pythonhosted.org/packages/bc/f7/7118f965541aeac6844fcb18d6988e111ac0d349c9b80cda53583e758908/orjson-3.10.18-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:1ebeda919725f9dbdb269f59bc94f861afbe2a27dce5608cdba2d92772364d1c", size = 133273 },
+ { url = "https://files.pythonhosted.org/packages/fb/d9/839637cc06eaf528dd8127b36004247bf56e064501f68df9ee6fd56a88ee/orjson-3.10.18-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5adf5f4eed520a4959d29ea80192fa626ab9a20b2ea13f8f6dc58644f6927103", size = 136779 },
+ { url = "https://files.pythonhosted.org/packages/2b/6d/f226ecfef31a1f0e7d6bf9a31a0bbaf384c7cbe3fce49cc9c2acc51f902a/orjson-3.10.18-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7592bb48a214e18cd670974f289520f12b7aed1fa0b2e2616b8ed9e069e08595", size = 132811 },
+ { url = "https://files.pythonhosted.org/packages/73/2d/371513d04143c85b681cf8f3bce743656eb5b640cb1f461dad750ac4b4d4/orjson-3.10.18-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f872bef9f042734110642b7a11937440797ace8c87527de25e0c53558b579ccc", size = 137018 },
+ { url = "https://files.pythonhosted.org/packages/69/cb/a4d37a30507b7a59bdc484e4a3253c8141bf756d4e13fcc1da760a0b00cb/orjson-3.10.18-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0315317601149c244cb3ecef246ef5861a64824ccbcb8018d32c66a60a84ffbc", size = 138368 },
+ { url = "https://files.pythonhosted.org/packages/1e/ae/cd10883c48d912d216d541eb3db8b2433415fde67f620afe6f311f5cd2ca/orjson-3.10.18-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0da26957e77e9e55a6c2ce2e7182a36a6f6b180ab7189315cb0995ec362e049", size = 142840 },
+ { url = "https://files.pythonhosted.org/packages/6d/4c/2bda09855c6b5f2c055034c9eda1529967b042ff8d81a05005115c4e6772/orjson-3.10.18-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb70d489bc79b7519e5803e2cc4c72343c9dc1154258adf2f8925d0b60da7c58", size = 133135 },
+ { url = "https://files.pythonhosted.org/packages/13/4a/35971fd809a8896731930a80dfff0b8ff48eeb5d8b57bb4d0d525160017f/orjson-3.10.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e9e86a6af31b92299b00736c89caf63816f70a4001e750bda179e15564d7a034", size = 134810 },
+ { url = "https://files.pythonhosted.org/packages/99/70/0fa9e6310cda98365629182486ff37a1c6578e34c33992df271a476ea1cd/orjson-3.10.18-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:c382a5c0b5931a5fc5405053d36c1ce3fd561694738626c77ae0b1dfc0242ca1", size = 413491 },
+ { url = "https://files.pythonhosted.org/packages/32/cb/990a0e88498babddb74fb97855ae4fbd22a82960e9b06eab5775cac435da/orjson-3.10.18-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8e4b2ae732431127171b875cb2668f883e1234711d3c147ffd69fe5be51a8012", size = 153277 },
+ { url = "https://files.pythonhosted.org/packages/92/44/473248c3305bf782a384ed50dd8bc2d3cde1543d107138fd99b707480ca1/orjson-3.10.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2d808e34ddb24fc29a4d4041dcfafbae13e129c93509b847b14432717d94b44f", size = 137367 },
+ { url = "https://files.pythonhosted.org/packages/ad/fd/7f1d3edd4ffcd944a6a40e9f88af2197b619c931ac4d3cfba4798d4d3815/orjson-3.10.18-cp313-cp313-win32.whl", hash = "sha256:ad8eacbb5d904d5591f27dee4031e2c1db43d559edb8f91778efd642d70e6bea", size = 142687 },
+ { url = "https://files.pythonhosted.org/packages/4b/03/c75c6ad46be41c16f4cfe0352a2d1450546f3c09ad2c9d341110cd87b025/orjson-3.10.18-cp313-cp313-win_amd64.whl", hash = "sha256:aed411bcb68bf62e85588f2a7e03a6082cc42e5a2796e06e72a962d7c6310b52", size = 134794 },
+ { url = "https://files.pythonhosted.org/packages/c2/28/f53038a5a72cc4fd0b56c1eafb4ef64aec9685460d5ac34de98ca78b6e29/orjson-3.10.18-cp313-cp313-win_arm64.whl", hash = "sha256:f54c1385a0e6aba2f15a40d703b858bedad36ded0491e55d35d905b2c34a4cc3", size = 131186 },
+]
+
+[[package]]
+name = "packaging"
+version = "25.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469 },
+]
+
+[[package]]
+name = "pandas"
+version = "2.2.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "numpy" },
+ { name = "python-dateutil" },
+ { name = "pytz" },
+ { name = "tzdata" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a8/44/d9502bf0ed197ba9bf1103c9867d5904ddcaf869e52329787fc54ed70cc8/pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039", size = 12602222 },
+ { url = "https://files.pythonhosted.org/packages/52/11/9eac327a38834f162b8250aab32a6781339c69afe7574368fffe46387edf/pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd", size = 11321274 },
+ { url = "https://files.pythonhosted.org/packages/45/fb/c4beeb084718598ba19aa9f5abbc8aed8b42f90930da861fcb1acdb54c3a/pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698", size = 15579836 },
+ { url = "https://files.pythonhosted.org/packages/cd/5f/4dba1d39bb9c38d574a9a22548c540177f78ea47b32f99c0ff2ec499fac5/pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc", size = 13058505 },
+ { url = "https://files.pythonhosted.org/packages/b9/57/708135b90391995361636634df1f1130d03ba456e95bcf576fada459115a/pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3", size = 16744420 },
+ { url = "https://files.pythonhosted.org/packages/86/4a/03ed6b7ee323cf30404265c284cee9c65c56a212e0a08d9ee06984ba2240/pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32", size = 14440457 },
+ { url = "https://files.pythonhosted.org/packages/ed/8c/87ddf1fcb55d11f9f847e3c69bb1c6f8e46e2f40ab1a2d2abadb2401b007/pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5", size = 11617166 },
+ { url = "https://files.pythonhosted.org/packages/17/a3/fb2734118db0af37ea7433f57f722c0a56687e14b14690edff0cdb4b7e58/pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9", size = 12529893 },
+ { url = "https://files.pythonhosted.org/packages/e1/0c/ad295fd74bfac85358fd579e271cded3ac969de81f62dd0142c426b9da91/pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4", size = 11363475 },
+ { url = "https://files.pythonhosted.org/packages/c6/2a/4bba3f03f7d07207481fed47f5b35f556c7441acddc368ec43d6643c5777/pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3", size = 15188645 },
+ { url = "https://files.pythonhosted.org/packages/38/f8/d8fddee9ed0d0c0f4a2132c1dfcf0e3e53265055da8df952a53e7eaf178c/pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319", size = 12739445 },
+ { url = "https://files.pythonhosted.org/packages/20/e8/45a05d9c39d2cea61ab175dbe6a2de1d05b679e8de2011da4ee190d7e748/pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8", size = 16359235 },
+ { url = "https://files.pythonhosted.org/packages/1d/99/617d07a6a5e429ff90c90da64d428516605a1ec7d7bea494235e1c3882de/pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a", size = 14056756 },
+ { url = "https://files.pythonhosted.org/packages/29/d4/1244ab8edf173a10fd601f7e13b9566c1b525c4f365d6bee918e68381889/pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13", size = 11504248 },
+ { url = "https://files.pythonhosted.org/packages/64/22/3b8f4e0ed70644e85cfdcd57454686b9057c6c38d2f74fe4b8bc2527214a/pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015", size = 12477643 },
+ { url = "https://files.pythonhosted.org/packages/e4/93/b3f5d1838500e22c8d793625da672f3eec046b1a99257666c94446969282/pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28", size = 11281573 },
+ { url = "https://files.pythonhosted.org/packages/f5/94/6c79b07f0e5aab1dcfa35a75f4817f5c4f677931d4234afcd75f0e6a66ca/pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0", size = 15196085 },
+ { url = "https://files.pythonhosted.org/packages/e8/31/aa8da88ca0eadbabd0a639788a6da13bb2ff6edbbb9f29aa786450a30a91/pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24", size = 12711809 },
+ { url = "https://files.pythonhosted.org/packages/ee/7c/c6dbdb0cb2a4344cacfb8de1c5808ca885b2e4dcfde8008266608f9372af/pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659", size = 16356316 },
+ { url = "https://files.pythonhosted.org/packages/57/b7/8b757e7d92023b832869fa8881a992696a0bfe2e26f72c9ae9f255988d42/pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb", size = 14022055 },
+ { url = "https://files.pythonhosted.org/packages/3b/bc/4b18e2b8c002572c5a441a64826252ce5da2aa738855747247a971988043/pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d", size = 11481175 },
+ { url = "https://files.pythonhosted.org/packages/76/a3/a5d88146815e972d40d19247b2c162e88213ef51c7c25993942c39dbf41d/pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468", size = 12615650 },
+ { url = "https://files.pythonhosted.org/packages/9c/8c/f0fd18f6140ddafc0c24122c8a964e48294acc579d47def376fef12bcb4a/pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18", size = 11290177 },
+ { url = "https://files.pythonhosted.org/packages/ed/f9/e995754eab9c0f14c6777401f7eece0943840b7a9fc932221c19d1abee9f/pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2", size = 14651526 },
+ { url = "https://files.pythonhosted.org/packages/25/b0/98d6ae2e1abac4f35230aa756005e8654649d305df9a28b16b9ae4353bff/pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4", size = 11871013 },
+ { url = "https://files.pythonhosted.org/packages/cc/57/0f72a10f9db6a4628744c8e8f0df4e6e21de01212c7c981d31e50ffc8328/pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d", size = 15711620 },
+ { url = "https://files.pythonhosted.org/packages/ab/5f/b38085618b950b79d2d9164a711c52b10aefc0ae6833b96f626b7021b2ed/pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a", size = 13098436 },
+]
+
+[[package]]
+name = "pillow"
+version = "11.2.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/af/cb/bb5c01fcd2a69335b86c22142b2bccfc3464087efb7fd382eee5ffc7fdf7/pillow-11.2.1.tar.gz", hash = "sha256:a64dd61998416367b7ef979b73d3a85853ba9bec4c2925f74e588879a58716b6", size = 47026707 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/68/08/3fbf4b98924c73037a8e8b4c2c774784805e0fb4ebca6c5bb60795c40125/pillow-11.2.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35ca289f712ccfc699508c4658a1d14652e8033e9b69839edf83cbdd0ba39e70", size = 3198450 },
+ { url = "https://files.pythonhosted.org/packages/84/92/6505b1af3d2849d5e714fc75ba9e69b7255c05ee42383a35a4d58f576b16/pillow-11.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0409af9f829f87a2dfb7e259f78f317a5351f2045158be321fd135973fff7bf", size = 3030550 },
+ { url = "https://files.pythonhosted.org/packages/3c/8c/ac2f99d2a70ff966bc7eb13dacacfaab57c0549b2ffb351b6537c7840b12/pillow-11.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4e5c5edee874dce4f653dbe59db7c73a600119fbea8d31f53423586ee2aafd7", size = 4415018 },
+ { url = "https://files.pythonhosted.org/packages/1f/e3/0a58b5d838687f40891fff9cbaf8669f90c96b64dc8f91f87894413856c6/pillow-11.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b93a07e76d13bff9444f1a029e0af2964e654bfc2e2c2d46bfd080df5ad5f3d8", size = 4498006 },
+ { url = "https://files.pythonhosted.org/packages/21/f5/6ba14718135f08fbfa33308efe027dd02b781d3f1d5c471444a395933aac/pillow-11.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:e6def7eed9e7fa90fde255afaf08060dc4b343bbe524a8f69bdd2a2f0018f600", size = 4517773 },
+ { url = "https://files.pythonhosted.org/packages/20/f2/805ad600fc59ebe4f1ba6129cd3a75fb0da126975c8579b8f57abeb61e80/pillow-11.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8f4f3724c068be008c08257207210c138d5f3731af6c155a81c2b09a9eb3a788", size = 4607069 },
+ { url = "https://files.pythonhosted.org/packages/71/6b/4ef8a288b4bb2e0180cba13ca0a519fa27aa982875882392b65131401099/pillow-11.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a0a6709b47019dff32e678bc12c63008311b82b9327613f534e496dacaefb71e", size = 4583460 },
+ { url = "https://files.pythonhosted.org/packages/62/ae/f29c705a09cbc9e2a456590816e5c234382ae5d32584f451c3eb41a62062/pillow-11.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f6b0c664ccb879109ee3ca702a9272d877f4fcd21e5eb63c26422fd6e415365e", size = 4661304 },
+ { url = "https://files.pythonhosted.org/packages/6e/1a/c8217b6f2f73794a5e219fbad087701f412337ae6dbb956db37d69a9bc43/pillow-11.2.1-cp311-cp311-win32.whl", hash = "sha256:cc5d875d56e49f112b6def6813c4e3d3036d269c008bf8aef72cd08d20ca6df6", size = 2331809 },
+ { url = "https://files.pythonhosted.org/packages/e2/72/25a8f40170dc262e86e90f37cb72cb3de5e307f75bf4b02535a61afcd519/pillow-11.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:0f5c7eda47bf8e3c8a283762cab94e496ba977a420868cb819159980b6709193", size = 2676338 },
+ { url = "https://files.pythonhosted.org/packages/06/9e/76825e39efee61efea258b479391ca77d64dbd9e5804e4ad0fa453b4ba55/pillow-11.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:4d375eb838755f2528ac8cbc926c3e31cc49ca4ad0cf79cff48b20e30634a4a7", size = 2414918 },
+ { url = "https://files.pythonhosted.org/packages/c7/40/052610b15a1b8961f52537cc8326ca6a881408bc2bdad0d852edeb6ed33b/pillow-11.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:78afba22027b4accef10dbd5eed84425930ba41b3ea0a86fa8d20baaf19d807f", size = 3190185 },
+ { url = "https://files.pythonhosted.org/packages/e5/7e/b86dbd35a5f938632093dc40d1682874c33dcfe832558fc80ca56bfcb774/pillow-11.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78092232a4ab376a35d68c4e6d5e00dfd73454bd12b230420025fbe178ee3b0b", size = 3030306 },
+ { url = "https://files.pythonhosted.org/packages/a4/5c/467a161f9ed53e5eab51a42923c33051bf8d1a2af4626ac04f5166e58e0c/pillow-11.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25a5f306095c6780c52e6bbb6109624b95c5b18e40aab1c3041da3e9e0cd3e2d", size = 4416121 },
+ { url = "https://files.pythonhosted.org/packages/62/73/972b7742e38ae0e2ac76ab137ca6005dcf877480da0d9d61d93b613065b4/pillow-11.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c7b29dbd4281923a2bfe562acb734cee96bbb129e96e6972d315ed9f232bef4", size = 4501707 },
+ { url = "https://files.pythonhosted.org/packages/e4/3a/427e4cb0b9e177efbc1a84798ed20498c4f233abde003c06d2650a6d60cb/pillow-11.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e645b020f3209a0181a418bffe7b4a93171eef6c4ef6cc20980b30bebf17b7d", size = 4522921 },
+ { url = "https://files.pythonhosted.org/packages/fe/7c/d8b1330458e4d2f3f45d9508796d7caf0c0d3764c00c823d10f6f1a3b76d/pillow-11.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b2dbea1012ccb784a65349f57bbc93730b96e85b42e9bf7b01ef40443db720b4", size = 4612523 },
+ { url = "https://files.pythonhosted.org/packages/b3/2f/65738384e0b1acf451de5a573d8153fe84103772d139e1e0bdf1596be2ea/pillow-11.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:da3104c57bbd72948d75f6a9389e6727d2ab6333c3617f0a89d72d4940aa0443", size = 4587836 },
+ { url = "https://files.pythonhosted.org/packages/6a/c5/e795c9f2ddf3debb2dedd0df889f2fe4b053308bb59a3cc02a0cd144d641/pillow-11.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:598174aef4589af795f66f9caab87ba4ff860ce08cd5bb447c6fc553ffee603c", size = 4669390 },
+ { url = "https://files.pythonhosted.org/packages/96/ae/ca0099a3995976a9fce2f423166f7bff9b12244afdc7520f6ed38911539a/pillow-11.2.1-cp312-cp312-win32.whl", hash = "sha256:1d535df14716e7f8776b9e7fee118576d65572b4aad3ed639be9e4fa88a1cad3", size = 2332309 },
+ { url = "https://files.pythonhosted.org/packages/7c/18/24bff2ad716257fc03da964c5e8f05d9790a779a8895d6566e493ccf0189/pillow-11.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:14e33b28bf17c7a38eede290f77db7c664e4eb01f7869e37fa98a5aa95978941", size = 2676768 },
+ { url = "https://files.pythonhosted.org/packages/da/bb/e8d656c9543276517ee40184aaa39dcb41e683bca121022f9323ae11b39d/pillow-11.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:21e1470ac9e5739ff880c211fc3af01e3ae505859392bf65458c224d0bf283eb", size = 2415087 },
+ { url = "https://files.pythonhosted.org/packages/36/9c/447528ee3776e7ab8897fe33697a7ff3f0475bb490c5ac1456a03dc57956/pillow-11.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fdec757fea0b793056419bca3e9932eb2b0ceec90ef4813ea4c1e072c389eb28", size = 3190098 },
+ { url = "https://files.pythonhosted.org/packages/b5/09/29d5cd052f7566a63e5b506fac9c60526e9ecc553825551333e1e18a4858/pillow-11.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0e130705d568e2f43a17bcbe74d90958e8a16263868a12c3e0d9c8162690830", size = 3030166 },
+ { url = "https://files.pythonhosted.org/packages/71/5d/446ee132ad35e7600652133f9c2840b4799bbd8e4adba881284860da0a36/pillow-11.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bdb5e09068332578214cadd9c05e3d64d99e0e87591be22a324bdbc18925be0", size = 4408674 },
+ { url = "https://files.pythonhosted.org/packages/69/5f/cbe509c0ddf91cc3a03bbacf40e5c2339c4912d16458fcb797bb47bcb269/pillow-11.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d189ba1bebfbc0c0e529159631ec72bb9e9bc041f01ec6d3233d6d82eb823bc1", size = 4496005 },
+ { url = "https://files.pythonhosted.org/packages/f9/b3/dd4338d8fb8a5f312021f2977fb8198a1184893f9b00b02b75d565c33b51/pillow-11.2.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:191955c55d8a712fab8934a42bfefbf99dd0b5875078240943f913bb66d46d9f", size = 4518707 },
+ { url = "https://files.pythonhosted.org/packages/13/eb/2552ecebc0b887f539111c2cd241f538b8ff5891b8903dfe672e997529be/pillow-11.2.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:ad275964d52e2243430472fc5d2c2334b4fc3ff9c16cb0a19254e25efa03a155", size = 4610008 },
+ { url = "https://files.pythonhosted.org/packages/72/d1/924ce51bea494cb6e7959522d69d7b1c7e74f6821d84c63c3dc430cbbf3b/pillow-11.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:750f96efe0597382660d8b53e90dd1dd44568a8edb51cb7f9d5d918b80d4de14", size = 4585420 },
+ { url = "https://files.pythonhosted.org/packages/43/ab/8f81312d255d713b99ca37479a4cb4b0f48195e530cdc1611990eb8fd04b/pillow-11.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fe15238d3798788d00716637b3d4e7bb6bde18b26e5d08335a96e88564a36b6b", size = 4667655 },
+ { url = "https://files.pythonhosted.org/packages/94/86/8f2e9d2dc3d308dfd137a07fe1cc478df0a23d42a6c4093b087e738e4827/pillow-11.2.1-cp313-cp313-win32.whl", hash = "sha256:3fe735ced9a607fee4f481423a9c36701a39719252a9bb251679635f99d0f7d2", size = 2332329 },
+ { url = "https://files.pythonhosted.org/packages/6d/ec/1179083b8d6067a613e4d595359b5fdea65d0a3b7ad623fee906e1b3c4d2/pillow-11.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:74ee3d7ecb3f3c05459ba95eed5efa28d6092d751ce9bf20e3e253a4e497e691", size = 2676388 },
+ { url = "https://files.pythonhosted.org/packages/23/f1/2fc1e1e294de897df39fa8622d829b8828ddad938b0eaea256d65b84dd72/pillow-11.2.1-cp313-cp313-win_arm64.whl", hash = "sha256:5119225c622403afb4b44bad4c1ca6c1f98eed79db8d3bc6e4e160fc6339d66c", size = 2414950 },
+ { url = "https://files.pythonhosted.org/packages/c4/3e/c328c48b3f0ead7bab765a84b4977acb29f101d10e4ef57a5e3400447c03/pillow-11.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8ce2e8411c7aaef53e6bb29fe98f28cd4fbd9a1d9be2eeea434331aac0536b22", size = 3192759 },
+ { url = "https://files.pythonhosted.org/packages/18/0e/1c68532d833fc8b9f404d3a642991441d9058eccd5606eab31617f29b6d4/pillow-11.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9ee66787e095127116d91dea2143db65c7bb1e232f617aa5957c0d9d2a3f23a7", size = 3033284 },
+ { url = "https://files.pythonhosted.org/packages/b7/cb/6faf3fb1e7705fd2db74e070f3bf6f88693601b0ed8e81049a8266de4754/pillow-11.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9622e3b6c1d8b551b6e6f21873bdcc55762b4b2126633014cea1803368a9aa16", size = 4445826 },
+ { url = "https://files.pythonhosted.org/packages/07/94/8be03d50b70ca47fb434a358919d6a8d6580f282bbb7af7e4aa40103461d/pillow-11.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63b5dff3a68f371ea06025a1a6966c9a1e1ee452fc8020c2cd0ea41b83e9037b", size = 4527329 },
+ { url = "https://files.pythonhosted.org/packages/fd/a4/bfe78777076dc405e3bd2080bc32da5ab3945b5a25dc5d8acaa9de64a162/pillow-11.2.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:31df6e2d3d8fc99f993fd253e97fae451a8db2e7207acf97859732273e108406", size = 4549049 },
+ { url = "https://files.pythonhosted.org/packages/65/4d/eaf9068dc687c24979e977ce5677e253624bd8b616b286f543f0c1b91662/pillow-11.2.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:062b7a42d672c45a70fa1f8b43d1d38ff76b63421cbbe7f88146b39e8a558d91", size = 4635408 },
+ { url = "https://files.pythonhosted.org/packages/1d/26/0fd443365d9c63bc79feb219f97d935cd4b93af28353cba78d8e77b61719/pillow-11.2.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4eb92eca2711ef8be42fd3f67533765d9fd043b8c80db204f16c8ea62ee1a751", size = 4614863 },
+ { url = "https://files.pythonhosted.org/packages/49/65/dca4d2506be482c2c6641cacdba5c602bc76d8ceb618fd37de855653a419/pillow-11.2.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f91ebf30830a48c825590aede79376cb40f110b387c17ee9bd59932c961044f9", size = 4692938 },
+ { url = "https://files.pythonhosted.org/packages/b3/92/1ca0c3f09233bd7decf8f7105a1c4e3162fb9142128c74adad0fb361b7eb/pillow-11.2.1-cp313-cp313t-win32.whl", hash = "sha256:e0b55f27f584ed623221cfe995c912c61606be8513bfa0e07d2c674b4516d9dd", size = 2335774 },
+ { url = "https://files.pythonhosted.org/packages/a5/ac/77525347cb43b83ae905ffe257bbe2cc6fd23acb9796639a1f56aa59d191/pillow-11.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:36d6b82164c39ce5482f649b437382c0fb2395eabc1e2b1702a6deb8ad647d6e", size = 2681895 },
+ { url = "https://files.pythonhosted.org/packages/67/32/32dc030cfa91ca0fc52baebbba2e009bb001122a1daa8b6a79ad830b38d3/pillow-11.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:225c832a13326e34f212d2072982bb1adb210e0cc0b153e688743018c94a2681", size = 2417234 },
+ { url = "https://files.pythonhosted.org/packages/a4/ad/2613c04633c7257d9481ab21d6b5364b59fc5d75faafd7cb8693523945a3/pillow-11.2.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:80f1df8dbe9572b4b7abdfa17eb5d78dd620b1d55d9e25f834efdbee872d3aed", size = 3181734 },
+ { url = "https://files.pythonhosted.org/packages/a4/fd/dcdda4471ed667de57bb5405bb42d751e6cfdd4011a12c248b455c778e03/pillow-11.2.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ea926cfbc3957090becbcbbb65ad177161a2ff2ad578b5a6ec9bb1e1cd78753c", size = 2999841 },
+ { url = "https://files.pythonhosted.org/packages/ac/89/8a2536e95e77432833f0db6fd72a8d310c8e4272a04461fb833eb021bf94/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:738db0e0941ca0376804d4de6a782c005245264edaa253ffce24e5a15cbdc7bd", size = 3437470 },
+ { url = "https://files.pythonhosted.org/packages/9d/8f/abd47b73c60712f88e9eda32baced7bfc3e9bd6a7619bb64b93acff28c3e/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db98ab6565c69082ec9b0d4e40dd9f6181dab0dd236d26f7a50b8b9bfbd5076", size = 3460013 },
+ { url = "https://files.pythonhosted.org/packages/f6/20/5c0a0aa83b213b7a07ec01e71a3d6ea2cf4ad1d2c686cc0168173b6089e7/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:036e53f4170e270ddb8797d4c590e6dd14d28e15c7da375c18978045f7e6c37b", size = 3527165 },
+ { url = "https://files.pythonhosted.org/packages/58/0e/2abab98a72202d91146abc839e10c14f7cf36166f12838ea0c4db3ca6ecb/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:14f73f7c291279bd65fda51ee87affd7c1e097709f7fdd0188957a16c264601f", size = 3571586 },
+ { url = "https://files.pythonhosted.org/packages/21/2c/5e05f58658cf49b6667762cca03d6e7d85cededde2caf2ab37b81f80e574/pillow-11.2.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:208653868d5c9ecc2b327f9b9ef34e0e42a4cdd172c2988fd81d62d2bc9bc044", size = 2674751 },
+]
+
+[[package]]
+name = "pydantic"
+version = "2.9.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "annotated-types" },
+ { name = "pydantic-core" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a9/b7/d9e3f12af310e1120c21603644a1cd86f59060e040ec5c3a80b8f05fae30/pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f", size = 769917 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/df/e4/ba44652d562cbf0bf320e0f3810206149c8a4e99cdbf66da82e97ab53a15/pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12", size = 434928 },
+]
+
+[[package]]
+name = "pydantic-core"
+version = "2.23.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e2/aa/6b6a9b9f8537b872f552ddd46dd3da230367754b6f707b8e1e963f515ea3/pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863", size = 402156 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5d/30/890a583cd3f2be27ecf32b479d5d615710bb926d92da03e3f7838ff3e58b/pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8", size = 1865160 },
+ { url = "https://files.pythonhosted.org/packages/1d/9a/b634442e1253bc6889c87afe8bb59447f106ee042140bd57680b3b113ec7/pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d", size = 1776777 },
+ { url = "https://files.pythonhosted.org/packages/75/9a/7816295124a6b08c24c96f9ce73085032d8bcbaf7e5a781cd41aa910c891/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e", size = 1799244 },
+ { url = "https://files.pythonhosted.org/packages/a9/8f/89c1405176903e567c5f99ec53387449e62f1121894aa9fc2c4fdc51a59b/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607", size = 1805307 },
+ { url = "https://files.pythonhosted.org/packages/d5/a5/1a194447d0da1ef492e3470680c66048fef56fc1f1a25cafbea4bc1d1c48/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd", size = 2000663 },
+ { url = "https://files.pythonhosted.org/packages/13/a5/1df8541651de4455e7d587cf556201b4f7997191e110bca3b589218745a5/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea", size = 2655941 },
+ { url = "https://files.pythonhosted.org/packages/44/31/a3899b5ce02c4316865e390107f145089876dff7e1dfc770a231d836aed8/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e", size = 2052105 },
+ { url = "https://files.pythonhosted.org/packages/1b/aa/98e190f8745d5ec831f6d5449344c48c0627ac5fed4e5340a44b74878f8e/pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b", size = 1919967 },
+ { url = "https://files.pythonhosted.org/packages/ae/35/b6e00b6abb2acfee3e8f85558c02a0822e9a8b2f2d812ea8b9079b118ba0/pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0", size = 1964291 },
+ { url = "https://files.pythonhosted.org/packages/13/46/7bee6d32b69191cd649bbbd2361af79c472d72cb29bb2024f0b6e350ba06/pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64", size = 2109666 },
+ { url = "https://files.pythonhosted.org/packages/39/ef/7b34f1b122a81b68ed0a7d0e564da9ccdc9a2924c8d6c6b5b11fa3a56970/pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f", size = 1732940 },
+ { url = "https://files.pythonhosted.org/packages/2f/76/37b7e76c645843ff46c1d73e046207311ef298d3f7b2f7d8f6ac60113071/pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3", size = 1916804 },
+ { url = "https://files.pythonhosted.org/packages/74/7b/8e315f80666194b354966ec84b7d567da77ad927ed6323db4006cf915f3f/pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231", size = 1856459 },
+ { url = "https://files.pythonhosted.org/packages/14/de/866bdce10ed808323d437612aca1ec9971b981e1c52e5e42ad9b8e17a6f6/pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee", size = 1770007 },
+ { url = "https://files.pythonhosted.org/packages/dc/69/8edd5c3cd48bb833a3f7ef9b81d7666ccddd3c9a635225214e044b6e8281/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87", size = 1790245 },
+ { url = "https://files.pythonhosted.org/packages/80/33/9c24334e3af796ce80d2274940aae38dd4e5676298b4398eff103a79e02d/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8", size = 1801260 },
+ { url = "https://files.pythonhosted.org/packages/a5/6f/e9567fd90104b79b101ca9d120219644d3314962caa7948dd8b965e9f83e/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327", size = 1996872 },
+ { url = "https://files.pythonhosted.org/packages/2d/ad/b5f0fe9e6cfee915dd144edbd10b6e9c9c9c9d7a56b69256d124b8ac682e/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2", size = 2661617 },
+ { url = "https://files.pythonhosted.org/packages/06/c8/7d4b708f8d05a5cbfda3243aad468052c6e99de7d0937c9146c24d9f12e9/pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36", size = 2071831 },
+ { url = "https://files.pythonhosted.org/packages/89/4d/3079d00c47f22c9a9a8220db088b309ad6e600a73d7a69473e3a8e5e3ea3/pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126", size = 1917453 },
+ { url = "https://files.pythonhosted.org/packages/e9/88/9df5b7ce880a4703fcc2d76c8c2d8eb9f861f79d0c56f4b8f5f2607ccec8/pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e", size = 1968793 },
+ { url = "https://files.pythonhosted.org/packages/e3/b9/41f7efe80f6ce2ed3ee3c2dcfe10ab7adc1172f778cc9659509a79518c43/pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24", size = 2116872 },
+ { url = "https://files.pythonhosted.org/packages/63/08/b59b7a92e03dd25554b0436554bf23e7c29abae7cce4b1c459cd92746811/pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84", size = 1738535 },
+ { url = "https://files.pythonhosted.org/packages/88/8d/479293e4d39ab409747926eec4329de5b7129beaedc3786eca070605d07f/pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9", size = 1917992 },
+ { url = "https://files.pythonhosted.org/packages/ad/ef/16ee2df472bf0e419b6bc68c05bf0145c49247a1095e85cee1463c6a44a1/pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc", size = 1856143 },
+ { url = "https://files.pythonhosted.org/packages/da/fa/bc3dbb83605669a34a93308e297ab22be82dfb9dcf88c6cf4b4f264e0a42/pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd", size = 1770063 },
+ { url = "https://files.pythonhosted.org/packages/4e/48/e813f3bbd257a712303ebdf55c8dc46f9589ec74b384c9f652597df3288d/pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05", size = 1790013 },
+ { url = "https://files.pythonhosted.org/packages/b4/e0/56eda3a37929a1d297fcab1966db8c339023bcca0b64c5a84896db3fcc5c/pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d", size = 1801077 },
+ { url = "https://files.pythonhosted.org/packages/04/be/5e49376769bfbf82486da6c5c1683b891809365c20d7c7e52792ce4c71f3/pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510", size = 1996782 },
+ { url = "https://files.pythonhosted.org/packages/bc/24/e3ee6c04f1d58cc15f37bcc62f32c7478ff55142b7b3e6d42ea374ea427c/pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6", size = 2661375 },
+ { url = "https://files.pythonhosted.org/packages/c1/f8/11a9006de4e89d016b8de74ebb1db727dc100608bb1e6bbe9d56a3cbbcce/pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b", size = 2071635 },
+ { url = "https://files.pythonhosted.org/packages/7c/45/bdce5779b59f468bdf262a5bc9eecbae87f271c51aef628d8c073b4b4b4c/pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327", size = 1916994 },
+ { url = "https://files.pythonhosted.org/packages/d8/fa/c648308fe711ee1f88192cad6026ab4f925396d1293e8356de7e55be89b5/pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6", size = 1968877 },
+ { url = "https://files.pythonhosted.org/packages/16/16/b805c74b35607d24d37103007f899abc4880923b04929547ae68d478b7f4/pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f", size = 2116814 },
+ { url = "https://files.pythonhosted.org/packages/d1/58/5305e723d9fcdf1c5a655e6a4cc2a07128bf644ff4b1d98daf7a9dbf57da/pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769", size = 1738360 },
+ { url = "https://files.pythonhosted.org/packages/a5/ae/e14b0ff8b3f48e02394d8acd911376b7b66e164535687ef7dc24ea03072f/pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5", size = 1919411 },
+]
+
+[[package]]
+name = "pydub"
+version = "0.25.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fe/9a/e6bca0eed82db26562c73b5076539a4a08d3cffd19c3cc5913a3e61145fd/pydub-0.25.1.tar.gz", hash = "sha256:980a33ce9949cab2a569606b65674d748ecbca4f0796887fd6f46173a7b0d30f", size = 38326 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a6/53/d78dc063216e62fc55f6b2eebb447f6a4b0a59f55c8406376f76bf959b08/pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6", size = 32327 },
+]
+
+[[package]]
+name = "pygments"
+version = "2.19.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293 },
+]
+
+[[package]]
+name = "python-dateutil"
+version = "2.9.0.post0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "six" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 },
+]
+
+[[package]]
+name = "python-multipart"
+version = "0.0.20"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546 },
+]
+
+[[package]]
+name = "pytz"
+version = "2025.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225 },
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612 },
+ { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040 },
+ { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829 },
+ { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167 },
+ { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952 },
+ { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301 },
+ { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638 },
+ { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850 },
+ { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980 },
+ { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 },
+ { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 },
+ { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 },
+ { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 },
+ { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 },
+ { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 },
+ { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 },
+ { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 },
+ { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 },
+ { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 },
+ { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 },
+ { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 },
+ { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 },
+ { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 },
+ { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 },
+ { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 },
+ { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 },
+ { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 },
+]
+
+[[package]]
+name = "requests"
+version = "2.32.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+ { name = "charset-normalizer" },
+ { name = "idna" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 },
+]
+
+[[package]]
+name = "rich"
+version = "14.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markdown-it-py" },
+ { name = "pygments" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229 },
+]
+
+[[package]]
+name = "ruff"
+version = "0.11.10"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e8/4c/4a3c5a97faaae6b428b336dcca81d03ad04779f8072c267ad2bd860126bf/ruff-0.11.10.tar.gz", hash = "sha256:d522fb204b4959909ecac47da02830daec102eeb100fb50ea9554818d47a5fa6", size = 4165632 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2f/9f/596c628f8824a2ce4cd12b0f0b4c0629a62dfffc5d0f742c19a1d71be108/ruff-0.11.10-py3-none-linux_armv6l.whl", hash = "sha256:859a7bfa7bc8888abbea31ef8a2b411714e6a80f0d173c2a82f9041ed6b50f58", size = 10316243 },
+ { url = "https://files.pythonhosted.org/packages/3c/38/c1e0b77ab58b426f8c332c1d1d3432d9fc9a9ea622806e208220cb133c9e/ruff-0.11.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:968220a57e09ea5e4fd48ed1c646419961a0570727c7e069842edd018ee8afed", size = 11083636 },
+ { url = "https://files.pythonhosted.org/packages/23/41/b75e15961d6047d7fe1b13886e56e8413be8467a4e1be0a07f3b303cd65a/ruff-0.11.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1067245bad978e7aa7b22f67113ecc6eb241dca0d9b696144256c3a879663bca", size = 10441624 },
+ { url = "https://files.pythonhosted.org/packages/b6/2c/e396b6703f131406db1811ea3d746f29d91b41bbd43ad572fea30da1435d/ruff-0.11.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4854fd09c7aed5b1590e996a81aeff0c9ff51378b084eb5a0b9cd9518e6cff2", size = 10624358 },
+ { url = "https://files.pythonhosted.org/packages/bd/8c/ee6cca8bdaf0f9a3704796022851a33cd37d1340bceaf4f6e991eb164e2e/ruff-0.11.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b4564e9f99168c0f9195a0fd5fa5928004b33b377137f978055e40008a082c5", size = 10176850 },
+ { url = "https://files.pythonhosted.org/packages/e9/ce/4e27e131a434321b3b7c66512c3ee7505b446eb1c8a80777c023f7e876e6/ruff-0.11.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b6a9cc5b62c03cc1fea0044ed8576379dbaf751d5503d718c973d5418483641", size = 11759787 },
+ { url = "https://files.pythonhosted.org/packages/58/de/1e2e77fc72adc7cf5b5123fd04a59ed329651d3eab9825674a9e640b100b/ruff-0.11.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:607ecbb6f03e44c9e0a93aedacb17b4eb4f3563d00e8b474298a201622677947", size = 12430479 },
+ { url = "https://files.pythonhosted.org/packages/07/ed/af0f2340f33b70d50121628ef175523cc4c37619e98d98748c85764c8d88/ruff-0.11.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b3a522fa389402cd2137df9ddefe848f727250535c70dafa840badffb56b7a4", size = 11919760 },
+ { url = "https://files.pythonhosted.org/packages/24/09/d7b3d3226d535cb89234390f418d10e00a157b6c4a06dfbe723e9322cb7d/ruff-0.11.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f071b0deed7e9245d5820dac235cbdd4ef99d7b12ff04c330a241ad3534319f", size = 14041747 },
+ { url = "https://files.pythonhosted.org/packages/62/b3/a63b4e91850e3f47f78795e6630ee9266cb6963de8f0191600289c2bb8f4/ruff-0.11.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a60e3a0a617eafba1f2e4186d827759d65348fa53708ca547e384db28406a0b", size = 11550657 },
+ { url = "https://files.pythonhosted.org/packages/46/63/a4f95c241d79402ccdbdb1d823d156c89fbb36ebfc4289dce092e6c0aa8f/ruff-0.11.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:da8ec977eaa4b7bf75470fb575bea2cb41a0e07c7ea9d5a0a97d13dbca697bf2", size = 10489671 },
+ { url = "https://files.pythonhosted.org/packages/6a/9b/c2238bfebf1e473495659c523d50b1685258b6345d5ab0b418ca3f010cd7/ruff-0.11.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ddf8967e08227d1bd95cc0851ef80d2ad9c7c0c5aab1eba31db49cf0a7b99523", size = 10160135 },
+ { url = "https://files.pythonhosted.org/packages/ba/ef/ba7251dd15206688dbfba7d413c0312e94df3b31b08f5d695580b755a899/ruff-0.11.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5a94acf798a82db188f6f36575d80609072b032105d114b0f98661e1679c9125", size = 11170179 },
+ { url = "https://files.pythonhosted.org/packages/73/9f/5c336717293203ba275dbfa2ea16e49b29a9fd9a0ea8b6febfc17e133577/ruff-0.11.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3afead355f1d16d95630df28d4ba17fb2cb9c8dfac8d21ced14984121f639bad", size = 11626021 },
+ { url = "https://files.pythonhosted.org/packages/d9/2b/162fa86d2639076667c9aa59196c020dc6d7023ac8f342416c2f5ec4bda0/ruff-0.11.10-py3-none-win32.whl", hash = "sha256:dc061a98d32a97211af7e7f3fa1d4ca2fcf919fb96c28f39551f35fc55bdbc19", size = 10494958 },
+ { url = "https://files.pythonhosted.org/packages/24/f3/66643d8f32f50a4b0d09a4832b7d919145ee2b944d43e604fbd7c144d175/ruff-0.11.10-py3-none-win_amd64.whl", hash = "sha256:5cc725fbb4d25b0f185cb42df07ab6b76c4489b4bfb740a175f3a59c70e8a224", size = 11650285 },
+ { url = "https://files.pythonhosted.org/packages/95/3a/2e8704d19f376c799748ff9cb041225c1d59f3e7711bc5596c8cfdc24925/ruff-0.11.10-py3-none-win_arm64.whl", hash = "sha256:ef69637b35fb8b210743926778d0e45e1bffa850a7c61e428c6b971549b5f5d1", size = 10765278 },
+]
+
+[[package]]
+name = "safehttpx"
+version = "0.1.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "httpx" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/67/4c/19db75e6405692b2a96af8f06d1258f8aa7290bdc35ac966f03e207f6d7f/safehttpx-0.1.6.tar.gz", hash = "sha256:b356bfc82cee3a24c395b94a2dbeabbed60aff1aa5fa3b5fe97c4f2456ebce42", size = 9987 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/4d/c0/1108ad9f01567f66b3154063605b350b69c3c9366732e09e45f9fd0d1deb/safehttpx-0.1.6-py3-none-any.whl", hash = "sha256:407cff0b410b071623087c63dd2080c3b44dc076888d8c5823c00d1e58cb381c", size = 8692 },
+]
+
+[[package]]
+name = "semantic-version"
+version = "2.10.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7d/31/f2289ce78b9b473d582568c234e104d2a342fd658cc288a7553d83bb8595/semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c", size = 52289 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6a/23/8146aad7d88f4fcb3a6218f41a60f6c2d4e3a72de72da1825dc7c8f7877c/semantic_version-2.10.0-py2.py3-none-any.whl", hash = "sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177", size = 15552 },
+]
+
+[[package]]
+name = "shellingham"
+version = "1.5.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 },
+]
+
+[[package]]
+name = "six"
+version = "1.17.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 },
+]
+
+[[package]]
+name = "sniffio"
+version = "1.3.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 },
+]
+
+[[package]]
+name = "starlette"
+version = "0.46.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ce/20/08dfcd9c983f6a6f4a1000d934b9e6d626cff8d2eeb77a89a68eef20a2b7/starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5", size = 2580846 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8b/0c/9d30a4ebeb6db2b25a841afbb80f6ef9a854fc3b41be131d249a977b4959/starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35", size = 72037 },
+]
+
+[[package]]
+name = "tomlkit"
+version = "0.13.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b1/09/a439bec5888f00a54b8b9f05fa94d7f901d6735ef4e55dcec9bc37b5d8fa/tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79", size = 192885 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f9/b6/a447b5e4ec71e13871be01ba81f5dfc9d0af7e473da256ff46bc0e24026f/tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde", size = 37955 },
+]
+
+[[package]]
+name = "tqdm"
+version = "4.67.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 },
+]
+
+[[package]]
+name = "typer"
+version = "0.12.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "rich" },
+ { name = "shellingham" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c5/58/a79003b91ac2c6890fc5d90145c662fd5771c6f11447f116b63300436bc9/typer-0.12.5.tar.gz", hash = "sha256:f592f089bedcc8ec1b974125d64851029c3b1af145f04aca64d69410f0c9b722", size = 98953 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a8/2b/886d13e742e514f704c33c4caa7df0f3b89e5a25ef8db02aa9ca3d9535d5/typer-0.12.5-py3-none-any.whl", hash = "sha256:62fe4e471711b147e3365034133904df3e235698399bc4de2b36c8579298d52b", size = 47288 },
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.13.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806 },
+]
+
+[[package]]
+name = "tzdata"
+version = "2025.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839 },
+]
+
+[[package]]
+name = "urllib3"
+version = "2.4.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680 },
+]
+
+[[package]]
+name = "uvicorn"
+version = "0.34.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "h11" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a6/ae/9bbb19b9e1c450cf9ecaef06463e40234d98d95bf572fab11b4f19ae5ded/uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328", size = 76815 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b1/4b/4cef6ce21a2aaca9d852a6e84ef4f135d99fcd74fa75105e2fc0c8308acd/uvicorn-0.34.2-py3-none-any.whl", hash = "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403", size = 62483 },
+]
+
+[[package]]
+name = "websockets"
+version = "15.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423 },
+ { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082 },
+ { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330 },
+ { url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878 },
+ { url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883 },
+ { url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252 },
+ { url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521 },
+ { url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958 },
+ { url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918 },
+ { url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388 },
+ { url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828 },
+ { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437 },
+ { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096 },
+ { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332 },
+ { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152 },
+ { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096 },
+ { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523 },
+ { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790 },
+ { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165 },
+ { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160 },
+ { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395 },
+ { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841 },
+ { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440 },
+ { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098 },
+ { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329 },
+ { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111 },
+ { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054 },
+ { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496 },
+ { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829 },
+ { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217 },
+ { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195 },
+ { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393 },
+ { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837 },
+ { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743 },
+]