|
runner: |
|
total_steps: 30000 |
|
gradient_clipping: 1 |
|
gradient_accumulate_steps: 4 |
|
|
|
log_step: 500 |
|
eval_step: 500 |
|
save_step: 500 |
|
max_keep: 1 |
|
eval_dataloaders: |
|
- dev |
|
|
|
optimizer: |
|
name: TorchOptim |
|
torch_optim_name: Adam |
|
lr: 1.0e-4 |
|
|
|
downstream_expert: |
|
datarc: |
|
chunk_size: 2000 |
|
subsampling: 1 |
|
label_delay: 0 |
|
num_speakers: 2 |
|
rate: 16000 |
|
frame_shift: null |
|
|
|
loaderrc: |
|
num_workers: 8 |
|
train_batchsize: 8 |
|
eval_batchsize: 1 |
|
train_dir: ./downstream/diarization/data/train |
|
dev_dir: ./downstream/diarization/data/dev |
|
test_dir: ./downstream/diarization/data/test |
|
|
|
scorerc: |
|
save_predictions: True |
|
|
|
modelrc: |
|
rnn_layers: 1 |
|
hidden_size: 512 |
|
|