MegaScale / src /03.1_upload_data.py
maom's picture
add predicted structures set
6605441
# install huggingface_hub from the command line:
#
# pip install huggingface_hub
# pip install datasets
#
# Log into huggingface hub (this only needs to be done once per project, and then it is cached)
#
# huggingface-cli login
#
# This will ask you for an access token
import datasets
# dataset1
# dataset2
# dataset3
# dataset3_single
# dataset3_single_cv
##### dataset1 #######
dataset = datasets.load_dataset(
"parquet",
name = "dataset1",
data_dir = "./intermediate",
data_files = {
"train" : "dataset1.parquet"},
cache_dir = "/scratch/maom_root/maom0/maom",
keep_in_memory = True)
dataset.push_to_hub(
repo_id = "maom/MegaScale",
config_name = "dataset1",
data_dir = "dataset1/data",
commit_message = "Upload dataset1")
##### dataset2 #######
dataset = datasets.load_dataset(
"parquet",
name = "dataset2",
data_dir = "./intermediate",
data_files = {
"train" : "dataset2.parquet"},
cache_dir = "/scratch/maom_root/maom0/maom",
keep_in_memory = True)
dataset.push_to_hub(
repo_id = "maom/MegaScale",
config_name = "dataset2",
data_dir = "dataset2/data",
commit_message = "Upload dataset2")
##### dataset3 #######
dataset = datasets.load_dataset(
"parquet",
name = "dataset3",
data_dir = "./intermediate",
data_files = {
"train" : "dataset3.parquet"},
cache_dir = "/scratch/maom_root/maom0/maom",
keep_in_memory = True)
dataset.push_to_hub(
repo_id = "maom/MegaScale",
config_name = "dataset3",
data_dir = "dataset3/data",
commit_message = "Upload dataset3")
##### dataset3_single #######
dataset = datasets.load_dataset(
"parquet",
name = "dataset3_single",
data_dir = "./intermediate",
data_files = {
"train" : "dataset3_single_train.parquet",
"val" : "dataset3_single_val.parquet",
"test" : "dataset3_single_test.parquet"},
cache_dir = "/scratch/maom_root/maom0/maom",
keep_in_memory = True)
dataset.push_to_hub(
repo_id = "maom/MegaScale",
config_name = "dataset3_single",
data_dir = "dataset3_single/data",
commit_message = "Upload dataset3_single")
##### dataset3_single_cv #######
dataset = datasets.load_dataset(
"parquet",
name = "dataset3_single_cv",
data_dir = "./intermediate",
data_files = {
"train_0" : "dataset3_single_cv_train_0.parquet",
"train_1" : "dataset3_single_cv_train_1.parquet",
"train_2" : "dataset3_single_cv_train_2.parquet",
"train_3" : "dataset3_single_cv_train_3.parquet",
"train_4" : "dataset3_single_cv_train_4.parquet",
"val_0" : "dataset3_single_cv_val_0.parquet",
"val_1" : "dataset3_single_cv_val_1.parquet",
"val_2" : "dataset3_single_cv_val_2.parquet",
"val_3" : "dataset3_single_cv_val_3.parquet",
"val_4" : "dataset3_single_cv_val_4.parquet",
"test_0" : "dataset3_single_cv_test_0.parquet",
"test_1" : "dataset3_single_cv_test_1.parquet",
"test_2" : "dataset3_single_cv_test_2.parquet",
"test_3" : "dataset3_single_cv_test_3.parquet",
"test_4" : "dataset3_single_cv_test_4.parquet"},
cache_dir = "/scratch/maom_root/maom0/maom",
keep_in_memory = True)
dataset.push_to_hub(
repo_id = "maom/MegaScale",
config_name = "dataset3_single_cv",
data_dir = "datase3_single_cv/data",
commit_message = "Upload dataset3_single_cv")
##### AlphaFold2_model_PDBs ####
dataset = datasets.load_dataset(
"parquet",
name = "AlphaFold_model_PDBs",
data_dir = "./intermediate",
data_files = {
"train" : "AlphaFold_model_PDBs.parquet"},
cache_dir = "/scratch/maom_root/maom0/maom",
keep_in_memory = True)
dataset.push_to_hub(
repo_id = "maom/MegaScale",
config_name = "AlphaFold_model_PDBs",
data_dir = "AlphaFold_model_PDBs/data",
commit_message = "Upload AlphaFold_model_PDBs")