|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.0, |
|
"global_step": 14684, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.264792841487469e-05, |
|
"loss": 2.4357, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.18522291767544e-05, |
|
"loss": 1.6579, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.1054935350982572e-05, |
|
"loss": 1.4934, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.0257641525210737e-05, |
|
"loss": 1.4199, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.9461942287090446e-05, |
|
"loss": 1.3474, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.8666243048970158e-05, |
|
"loss": 1.2855, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.7868949223198323e-05, |
|
"loss": 1.2739, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.7071655397426488e-05, |
|
"loss": 1.0821, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.6275956159306204e-05, |
|
"loss": 1.0334, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.547866233353437e-05, |
|
"loss": 0.9984, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.4681368507762535e-05, |
|
"loss": 1.0293, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.38840746819907e-05, |
|
"loss": 0.9678, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.308678085621887e-05, |
|
"loss": 0.9749, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.2289487030447036e-05, |
|
"loss": 0.9688, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.1492193204675202e-05, |
|
"loss": 0.9228, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.0696493966554914e-05, |
|
"loss": 0.7258, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 9.900794728434625e-06, |
|
"loss": 0.7486, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.10350090266279e-06, |
|
"loss": 0.7574, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 8.306207076890958e-06, |
|
"loss": 0.699, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 7.508913251119124e-06, |
|
"loss": 0.724, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 6.711619425347291e-06, |
|
"loss": 0.7335, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 5.9159201872270015e-06, |
|
"loss": 0.7182, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 5.120220949106712e-06, |
|
"loss": 0.5679, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 4.3229271233348795e-06, |
|
"loss": 0.5544, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 3.5256332975630462e-06, |
|
"loss": 0.5581, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 2.728339471791213e-06, |
|
"loss": 0.5367, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.9310456460193797e-06, |
|
"loss": 0.5468, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.1353464078990904e-06, |
|
"loss": 0.532, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.3805258212725726e-07, |
|
"loss": 0.5323, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"step": 14684, |
|
"total_flos": 81896991162368.0, |
|
"train_loss": 0.9538487096925224, |
|
"train_runtime": 2153.9453, |
|
"train_samples_per_second": 163.575, |
|
"train_steps_per_second": 6.817 |
|
} |
|
], |
|
"max_steps": 14684, |
|
"num_train_epochs": 4, |
|
"total_flos": 81896991162368.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|