LittletreeZou commited on
Commit
a0d9889
·
1 Parent(s): cb952d3

initial upload

Browse files
config.yaml ADDED
@@ -0,0 +1,214 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # lightning.pytorch==2.4.0
2
+ seed_everything: 42
3
+ trainer:
4
+ accelerator: auto
5
+ strategy:
6
+ class_path: lightning.pytorch.strategies.DDPStrategy
7
+ init_args:
8
+ accelerator: null
9
+ parallel_devices: null
10
+ cluster_environment: null
11
+ checkpoint_io: null
12
+ precision_plugin: null
13
+ ddp_comm_state: null
14
+ ddp_comm_hook: null
15
+ ddp_comm_wrapper: null
16
+ model_averaging_period: null
17
+ process_group_backend: null
18
+ timeout: 0:30:00
19
+ start_method: popen
20
+ output_device: null
21
+ dim: 0
22
+ broadcast_buffers: true
23
+ process_group: null
24
+ bucket_cap_mb: 25
25
+ find_unused_parameters: false
26
+ check_reduction: false
27
+ gradient_as_bucket_view: false
28
+ static_graph: false
29
+ delay_all_reduce_named_params: null
30
+ param_to_hook_all_reduce: null
31
+ mixed_precision: null
32
+ device_mesh: null
33
+ devices: auto
34
+ num_nodes: 1
35
+ precision: 32
36
+ logger:
37
+ class_path: lightning.pytorch.loggers.WandbLogger
38
+ init_args:
39
+ name: pa_hsapiens_rnafmـcds_1.6B_fold0
40
+ save_dir: genbio_finetune/logs
41
+ version: null
42
+ offline: false
43
+ dir: null
44
+ id: null
45
+ anonymous: null
46
+ project: rna_tasks
47
+ log_model: false
48
+ experiment: null
49
+ prefix: ''
50
+ checkpoint_name: null
51
+ job_type: null
52
+ config: null
53
+ entity: null
54
+ reinit: null
55
+ tags: null
56
+ group: null
57
+ notes: null
58
+ magic: null
59
+ config_exclude_keys: null
60
+ config_include_keys: null
61
+ mode: null
62
+ allow_val_change: null
63
+ resume: null
64
+ force: null
65
+ tensorboard: null
66
+ sync_tensorboard: null
67
+ monitor_gym: null
68
+ save_code: true
69
+ settings: null
70
+ callbacks:
71
+ - class_path: lightning.pytorch.callbacks.LearningRateMonitor
72
+ init_args:
73
+ logging_interval: step
74
+ log_momentum: false
75
+ log_weight_decay: false
76
+ - class_path: lightning.pytorch.callbacks.ModelCheckpoint
77
+ init_args:
78
+ dirpath: genbio_finetune/logs/rna_tasks/pa_hsapiens_rnafmـcds_1.6B_fold1
79
+ filename: best_val:{epoch}-{val_pearson:.3f}
80
+ monitor: val_pearson
81
+ verbose: false
82
+ save_last: null
83
+ save_top_k: 1
84
+ save_weights_only: false
85
+ mode: max
86
+ auto_insert_metric_name: true
87
+ every_n_train_steps: null
88
+ train_time_interval: null
89
+ every_n_epochs: 1
90
+ save_on_train_epoch_end: null
91
+ enable_version_counter: true
92
+ fast_dev_run: false
93
+ max_epochs: 15
94
+ min_epochs: null
95
+ max_steps: -1
96
+ min_steps: null
97
+ max_time: null
98
+ limit_train_batches: null
99
+ limit_val_batches: null
100
+ limit_test_batches: null
101
+ limit_predict_batches: null
102
+ overfit_batches: 0.0
103
+ val_check_interval: null
104
+ check_val_every_n_epoch: 1
105
+ num_sanity_val_steps: null
106
+ log_every_n_steps: 50
107
+ enable_checkpointing: null
108
+ enable_progress_bar: null
109
+ enable_model_summary: null
110
+ accumulate_grad_batches: 1
111
+ gradient_clip_val: 1
112
+ gradient_clip_algorithm: null
113
+ deterministic: null
114
+ benchmark: null
115
+ inference_mode: true
116
+ use_distributed_sampler: true
117
+ profiler:
118
+ class_path: lightning.pytorch.profilers.PyTorchProfiler
119
+ init_args:
120
+ dirpath: null
121
+ filename: null
122
+ group_by_input_shapes: false
123
+ emit_nvtx: false
124
+ export_to_chrome: true
125
+ row_limit: 20
126
+ sort_by_key: null
127
+ record_module_names: true
128
+ table_kwargs: null
129
+ record_shapes: false
130
+ dict_kwargs:
131
+ profile_memory: true
132
+ detect_anomaly: false
133
+ barebones: false
134
+ plugins: null
135
+ sync_batchnorm: false
136
+ reload_dataloaders_every_n_epochs: 0
137
+ default_root_dir: genbio_finetune/logs
138
+ model:
139
+ class_path: genbio_finetune.tasks.SequenceRegression
140
+ init_args:
141
+ backbone:
142
+ class_path: genbio_finetune.models.rnafm_cds
143
+ init_args:
144
+ from_scratch: false
145
+ max_length: 1024
146
+ use_peft: true
147
+ save_peft_only: true
148
+ lora_r: 32
149
+ lora_alpha: 64
150
+ lora_dropout: 0.1
151
+ lora_target_modules:
152
+ - query
153
+ - value
154
+ config_overwrites:
155
+ hidden_dropout_prob: 0.1
156
+ attention_probs_dropout_prob: 0.1
157
+ model_init_args: null
158
+ adapter:
159
+ class_path: genbio_finetune.models.MLPPoolAdapter
160
+ init_args:
161
+ pooling: mean_pooling
162
+ hidden_sizes:
163
+ - 512
164
+ bias: true
165
+ dropout: 0.1
166
+ dropout_in_middle: false
167
+ num_outputs: 1
168
+ optimizer:
169
+ class_path: torch.optim.AdamW
170
+ init_args:
171
+ lr: 0.0003
172
+ betas:
173
+ - 0.9
174
+ - 0.999
175
+ eps: 1.0e-08
176
+ weight_decay: 0.01
177
+ amsgrad: false
178
+ maximize: false
179
+ foreach: null
180
+ capturable: false
181
+ differentiable: false
182
+ fused: null
183
+ lr_scheduler:
184
+ class_path: genbio_finetune.lr_schedulers.CosineWithWarmup
185
+ init_args:
186
+ warmup_ratio: 0.01
187
+ num_warmup_steps: null
188
+ last_epoch: -1
189
+ verbose: deprecated
190
+ use_legacy_adapter: false
191
+ strict_loading: true
192
+ reset_optimizer_states: false
193
+ data:
194
+ class_path: genbio_finetune.data.ProteinAbundance
195
+ init_args:
196
+ path: genbio-ai/rna-downstream-tasks
197
+ config_name: protein_abundance_hsapiens
198
+ normalize: true
199
+ train_split_name: train
200
+ train_split_files: null
201
+ test_split_files: null
202
+ valid_split_files: null
203
+ random_seed: 42
204
+ batch_size: 4 # global_batch_size = 16
205
+ shuffle: true
206
+ sampler: null
207
+ num_workers: 0
208
+ pin_memory: true
209
+ persistent_workers: false
210
+ cv_num_folds: 5
211
+ cv_test_fold_id: 0
212
+ cv_enable_val_fold: true
213
+ cv_fold_id_col: fold_id
214
+ ckpt_path: null
fold0/model.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93cbd2a0975afe99f78562541991bf07d772c2da12c986bab42f595dbf58daaf
3
+ size 113543952
fold1/model.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:32523a91a19be888eafe98e6348eaa593855d9aed6ddc1b76b64b0ff5fc01f9f
3
+ size 113543952
fold2/model.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ac99255d6c85891db12186dc42cb7ad938a014cc1d974a28f724f3598e149c2
3
+ size 113543952
fold3/model.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:151fdc632d862b4c99fc228047d0cba1123cc8d4aeb567f3a6ec507e601f710b
3
+ size 113543952
fold4/model.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:101b20a8b4d9f61aa89a0c8b1ae5067e107dd0e739db089db36e7da0c6bff9c4
3
+ size 113543952