kiyoonkim commited on
Commit
af3a377
1 Parent(s): f17d4f1

Upload config.yaml with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.yaml +4 -3
config.yaml CHANGED
@@ -26,7 +26,7 @@ dataloader:
26
  worker_init_fn: null
27
  dataset:
28
  _target_: mlproject.datasets.config.build_gulp_dataset
29
- data_dir: ???
30
  dataset_name: hmdb51-gulprgb
31
  sets_to_include: null
32
  download_checkpoint_with_name: null
@@ -65,7 +65,6 @@ model:
65
  pretrained: kinetics400
66
  num_workers: 4
67
  optimizer:
68
- _partial_: true
69
  _target_: torch.optim.adamw.AdamW
70
  amsgrad: false
71
  betas:
@@ -78,6 +77,7 @@ optimizer:
78
  fused: null
79
  lr: 1.0e-05
80
  maximize: false
 
81
  weight_decay: 1.0e-05
82
  print_config: false
83
  repo_path: kiyoonkim/aaf
@@ -85,7 +85,6 @@ resume: false
85
  resume_from_checkpoint: null
86
  root_experiment_dir: /disk/scratch2/experiments
87
  scheduler:
88
- _partial_: true
89
  _target_: timm.scheduler.cosine_lr.CosineLRScheduler
90
  cycle_decay: 1.0
91
  cycle_limit: 1
@@ -97,7 +96,9 @@ scheduler:
97
  noise_range_t: null
98
  noise_seed: 42
99
  noise_std: 1.0
 
100
  t_in_epochs: true
 
101
  warmup_lr_init: 0
102
  warmup_prefix: false
103
  warmup_t: 0
 
26
  worker_init_fn: null
27
  dataset:
28
  _target_: mlproject.datasets.config.build_gulp_dataset
29
+ data_dir: /disk/scratch_fast1/datasets
30
  dataset_name: hmdb51-gulprgb
31
  sets_to_include: null
32
  download_checkpoint_with_name: null
 
65
  pretrained: kinetics400
66
  num_workers: 4
67
  optimizer:
 
68
  _target_: torch.optim.adamw.AdamW
69
  amsgrad: false
70
  betas:
 
77
  fused: null
78
  lr: 1.0e-05
79
  maximize: false
80
+ params: ???
81
  weight_decay: 1.0e-05
82
  print_config: false
83
  repo_path: kiyoonkim/aaf
 
85
  resume_from_checkpoint: null
86
  root_experiment_dir: /disk/scratch2/experiments
87
  scheduler:
 
88
  _target_: timm.scheduler.cosine_lr.CosineLRScheduler
89
  cycle_decay: 1.0
90
  cycle_limit: 1
 
96
  noise_range_t: null
97
  noise_seed: 42
98
  noise_std: 1.0
99
+ optimizer: ???
100
  t_in_epochs: true
101
+ t_initial: ???
102
  warmup_lr_init: 0
103
  warmup_prefix: false
104
  warmup_t: 0