mtasic85 commited on
Commit
e66e4fa
1 Parent(s): b579b9b

pretrain model

Browse files
Files changed (1) hide show
  1. scripts/pretrain-model.yaml +1 -1
scripts/pretrain-model.yaml CHANGED
@@ -68,7 +68,7 @@ train:
68
  global_batch_size: 512
69
 
70
  # Number of samples per data-parallel rank (type: int, default: 4)
71
- micro_batch_size: 2
72
 
73
  # Number of iterations with learning rate warmup active (type: int, default: 2000)
74
  lr_warmup_steps: 2000
 
68
  global_batch_size: 512
69
 
70
  # Number of samples per data-parallel rank (type: int, default: 4)
71
+ micro_batch_size: 1
72
 
73
  # Number of iterations with learning rate warmup active (type: int, default: 2000)
74
  lr_warmup_steps: 2000