File size: 8,831 Bytes
55e09cd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 |
# pytorch_lightning==1.8.0
seed_everything: 123
trainer:
logger:
- class_path: pytorch_lightning.loggers.TensorBoardLogger
init_args:
save_dir: lightning_logs
name: tmp
version: null
log_graph: false
default_hp_metric: true
prefix: ''
sub_dir: null
comment: ''
purge_step: null
max_queue: 10
flush_secs: 120
filename_suffix: ''
- class_path: pytorch_lightning.loggers.WandbLogger
init_args:
name: null
save_dir: .
version: null
offline: false
dir: null
id: null
anonymous: null
project: tmp
log_model: false
experiment: null
prefix: ''
job_type: null
config: null
entity: null
reinit: null
tags: null
group: null
notes: null
magic: null
config_exclude_keys: null
config_include_keys: null
mode: null
allow_val_change: null
resume: null
force: null
tensorboard: null
sync_tensorboard: null
monitor_gym: null
save_code: null
settings: null
enable_checkpointing: true
callbacks:
- class_path: pytorch_lightning.callbacks.ModelCheckpoint
init_args:
dirpath: null
filename: best-val-loss-{epoch}-{step}
monitor: loss/loss/val
verbose: false
save_last: null
save_top_k: 1
save_weights_only: false
mode: min
auto_insert_metric_name: true
every_n_train_steps: null
train_time_interval: null
every_n_epochs: null
save_on_train_epoch_end: null
- class_path: pytorch_lightning.callbacks.ModelCheckpoint
init_args:
dirpath: null
filename: best-eer-loss-{epoch}-{step}
monitor: EER evaluation proj/val
verbose: false
save_last: null
save_top_k: 1
save_weights_only: false
mode: min
auto_insert_metric_name: true
every_n_train_steps: null
train_time_interval: null
every_n_epochs: null
save_on_train_epoch_end: null
- class_path: pytorch_lightning.callbacks.ModelCheckpoint
init_args:
dirpath: null
filename: best-invar-val-{epoch}-{step}
monitor: loss/invariance/val
verbose: false
save_last: null
save_top_k: 1
save_weights_only: false
mode: min
auto_insert_metric_name: true
every_n_train_steps: null
train_time_interval: null
every_n_epochs: null
save_on_train_epoch_end: null
- class_path: pytorch_lightning.callbacks.ModelCheckpoint
init_args:
dirpath: null
filename: best-order-val-{epoch}-{step}
monitor: Order evaluation mean proj/val
verbose: false
save_last: null
save_top_k: 1
save_weights_only: false
mode: min
auto_insert_metric_name: true
every_n_train_steps: null
train_time_interval: null
every_n_epochs: null
save_on_train_epoch_end: null
- class_path: pytorch_lightning.callbacks.ModelCheckpoint
init_args:
dirpath: null
filename: cptk-{epoch}-{step}
monitor: null
verbose: false
save_last: null
save_top_k: -1
save_weights_only: false
mode: min
auto_insert_metric_name: true
every_n_train_steps: null
train_time_interval: null
every_n_epochs: 50
save_on_train_epoch_end: null
- class_path: pytorch_lightning.callbacks.RichProgressBar
init_args:
refresh_rate: 1
leave: false
theme:
description: white
progress_bar: '#6206E0'
progress_bar_finished: '#6206E0'
progress_bar_pulse: '#6206E0'
batch_progress: white
time: grey54
processing_speed: grey70
metrics: white
console_kwargs: null
- class_path: callbacks.hypersphere.HypersphereEvaluation
init_args:
normalize: true
use_projections: false
- class_path: callbacks.hypersphere.HypersphereEvaluation
init_args:
normalize: true
use_projections: true
- class_path: callbacks.lr_logger.LearningRateLogger
- class_path: bernardo.callbacks.evaluation.OrderEvaluation
init_args:
log_n_epochs: 5
on_train: true
use_projection: true
- class_path: bernardo.callbacks.evaluation.EEREvaluation
init_args:
use_more_neg: false
log_n_epochs: 5
on_train: false
use_projection: true
default_root_dir: null
gradient_clip_val: null
gradient_clip_algorithm: null
num_nodes: 1
num_processes: null
devices:
- 1
gpus: null
auto_select_gpus: false
tpu_cores: null
ipus: null
enable_progress_bar: true
overfit_batches: 0.0
track_grad_norm: -1
check_val_every_n_epoch: 1
fast_dev_run: false
accumulate_grad_batches: null
max_epochs: 500
min_epochs: null
max_steps: -1
min_steps: null
max_time: null
limit_train_batches: null
limit_val_batches: null
limit_test_batches: null
limit_predict_batches: null
val_check_interval: null
log_every_n_steps: 50
accelerator: gpu
strategy: null
sync_batchnorm: false
precision: 32
enable_model_summary: true
num_sanity_val_steps: 2
resume_from_checkpoint: null
profiler: null
benchmark: null
deterministic: null
reload_dataloaders_every_n_epochs: 0
auto_lr_find: false
replace_sampler_ddp: true
detect_anomaly: false
auto_scale_batch_size: false
plugins: null
amp_backend: native
amp_level: null
move_metrics_to_cpu: false
multiple_trainloader_mode: max_size_cycle
inference_mode: true
ckpt_path: null
model:
class_path: models.byol.BYOL
init_args:
module:
class_path: networks.my_siamese_arm.SiameseArm
init_args:
encoder:
class_path: bernardo.models.model.Encoder
init_args:
backbone: efficientnet_b0
feature_extractor:
class_path: bernardo.models.model.FeatureExtractor
init_args:
spec_layer: melspectogram
n_fft: 2048
hop_length: 512
embedding_dim: 1000
pretrained: true
progress: true
stochastic_depth_prob: 0.2
norm_layer: null
projector:
class_path: bernardo.models.model.Projection
init_args:
input_dim: 1000
output_dim: 128
nonlinearity: null
is_identity: false
l2_normalize: true
predictor:
class_path: networks.mlp.MLP
init_args:
dims:
- 128
- 1024
- 128
activation: true
use_batchnorm: true
batchnorm_fn: null
last_layer: null
bias: null
layer_init: null
normalize_representations: false
normalize_projections: true
loss_fn:
class_path: torch.nn.MSELoss
init_args:
size_average: null
reduce: null
reduction: mean
weight_callback:
class_path: callbacks.ma_updates.MAWeightUpdate
init_args:
initial_tau: 0.99
max_epochs: 1000
should_update: true
optimizer:
class_path: utils.optim.Adam
init_args:
lr: 3.0e-05
betas:
- 0.9
- 0.999
eps: 1.0e-08
weight_decay: 1.5e-06
amsgrad: false
scheduler:
class_path: utils.optim.LinearWarmupCosineAnnealing
init_args:
warmup_epochs: 10
max_epochs: 1000
warmup_start_lr: 0.0
eta_min: 0.0
last_epoch: -1
data:
class_path: bernardo.data.vocals.VocalsDataModule
init_args:
augs_neg:
enable: false
gaussian_noise: 0.5
pitch_shift_naive: 0
time_stretch: 0
gain: 0.5
shift: 0
parametric_eq: 0
tanh_distortion: 0
time_mask: 0
formant_shift_parselmouth: 0
pitch_shift_parselmouth: 0
pitch_range_parselmouth: 0
pitch_shift_parselmouth_prob: 0
positive_examples: same_clip
dataset_dirs:
- tencys_vocals
- ghero_vocals_3
- ghero_vocals_4
batch_size: 120
batch_size_val: 120
nr_samples: 176000
normalize: true
num_workers: 40
sr: 44100
batch_sampling_mode: sample_clips
eval_frac: 0.105
group_name_is_folder: true
group_by_artist: true
augs:
enable: true
gaussian_noise: 0.5
pitch_shift_naive: 0
time_stretch: 0
gain: 0.5
shift: 0
parametric_eq: 0
tanh_distortion: 0
time_mask: 0.5
formant_shift_parselmouth: 0
pitch_shift_parselmouth:
- 1
- 1.4
pitch_range_parselmouth: 1.5
pitch_shift_parselmouth_prob: 0.5
transform_override: false
verbose: true
use_random_loader: false
max_groups: -1
multi_epoch: 1
classification: false
|