Hack90 commited on
Commit
4b3c600
·
verified ·
1 Parent(s): 231ac8a

Upload config.yaml

Browse files
Files changed (1) hide show
  1. config.yaml +83 -0
config.yaml ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ defaults:
2
+ - _self_
3
+ - override hydra/launcher: submitit_slurm
4
+
5
+ compute:
6
+ ngpus: 1
7
+ nodes: 1
8
+
9
+ logging:
10
+ log_freq: 100
11
+ log_lr_every: ${logging.log_freq}
12
+ log_file_name: stdout.log
13
+ enable_wandb: True
14
+ entity: flows
15
+ project: flow_matching
16
+ group: null
17
+
18
+ data:
19
+ train: DNA-LLM/experiment_one_viral_genomes_train_set_v2
20
+ valid: DNA-LLM/experiment_one_viral_genomes_val_set_v2
21
+ cache_dir: /huggingface/
22
+ num_workers: 8
23
+
24
+ training:
25
+ batch_size: 64
26
+ snapshot: 2000 # 2000
27
+ eval_freq: 20000 # 20000
28
+ perplexity_freq: 200000 #2000
29
+ seed: 42
30
+
31
+ eval:
32
+ batch_size: 64
33
+ sample_batch_size: 16
34
+ perplexity: True
35
+ perplexity_batch_size: 16
36
+
37
+ optim:
38
+ weight_decay: 0.03
39
+ optimizer: AdamW
40
+ lr: 3e-4
41
+ beta1: 0.9
42
+ beta2: 0.95
43
+ eps: 1e-8
44
+ warmup: 2500
45
+ grad_clip: 1.
46
+ eta_min_ratio: 0.1
47
+ fused: false
48
+ n_iters: 1000000
49
+ log_lr_every: ${logging.log_lr_every}
50
+
51
+ flow:
52
+ source_distribution: uniform # [uniform, mask]
53
+ loss_function: cross_entropy # [cross_entropy, generalized_kl]
54
+ exponent: 1.
55
+ scheduler_type: polynomial
56
+ sampling_steps: 2048
57
+
58
+ model:
59
+ hidden_size: 768
60
+ cond_dim: 128
61
+ length: 2048
62
+ n_blocks: 12
63
+ n_heads: 12
64
+ dropout: 0.1
65
+ compile: true
66
+
67
+ hydra_dir: /user/hassanahmed.hassan/u12592/.project/dir.lustre-grete/learning-nucleoTIDEs/flow_matching-main/model_runs
68
+
69
+ hydra:
70
+ run:
71
+ dir: ${hydra_dir}/${now:%Y.%m.%d}/${now:%H%M%S}
72
+ sweep:
73
+ dir: ${hydra_dir}/${now:%Y.%m.%d}/${now:%H%M%S}
74
+ subdir: ${hydra.job.num}
75
+ launcher:
76
+ max_num_timeout: 100000
77
+ timeout_min: 4320
78
+ partition: learn
79
+ qos: # TODO: change it to your own qos
80
+ gpus_per_node: ${compute.ngpus}
81
+ mem_gb: 1760
82
+ cpus_per_task: 32
83
+ nodes: ${compute.nodes}