|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 23.076923076923077, |
|
"eval_steps": 100, |
|
"global_step": 1500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.5384615384615383, |
|
"grad_norm": 47.11796188354492, |
|
"learning_rate": 9.990900000000001e-06, |
|
"loss": 3.6644, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.5384615384615383, |
|
"eval_loss": 2.4919605255126953, |
|
"eval_runtime": 12.5517, |
|
"eval_samples_per_second": 10.437, |
|
"eval_steps_per_second": 1.354, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 3.076923076923077, |
|
"grad_norm": 31.28130531311035, |
|
"learning_rate": 9.980900000000001e-06, |
|
"loss": 2.2347, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 3.076923076923077, |
|
"eval_loss": 2.156316041946411, |
|
"eval_runtime": 11.1792, |
|
"eval_samples_per_second": 11.718, |
|
"eval_steps_per_second": 1.521, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 4.615384615384615, |
|
"grad_norm": 39.616390228271484, |
|
"learning_rate": 9.970900000000001e-06, |
|
"loss": 2.0254, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 4.615384615384615, |
|
"eval_loss": 2.024153709411621, |
|
"eval_runtime": 11.1934, |
|
"eval_samples_per_second": 11.703, |
|
"eval_steps_per_second": 1.519, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 6.153846153846154, |
|
"grad_norm": 28.285825729370117, |
|
"learning_rate": 9.960900000000001e-06, |
|
"loss": 1.9361, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 6.153846153846154, |
|
"eval_loss": 1.9094743728637695, |
|
"eval_runtime": 11.3855, |
|
"eval_samples_per_second": 11.506, |
|
"eval_steps_per_second": 1.493, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 7.6923076923076925, |
|
"grad_norm": 34.14302062988281, |
|
"learning_rate": 9.950900000000002e-06, |
|
"loss": 1.8531, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 7.6923076923076925, |
|
"eval_loss": 1.8729331493377686, |
|
"eval_runtime": 11.2935, |
|
"eval_samples_per_second": 11.6, |
|
"eval_steps_per_second": 1.505, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 9.23076923076923, |
|
"grad_norm": 39.09531784057617, |
|
"learning_rate": 9.940900000000002e-06, |
|
"loss": 1.7669, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 9.23076923076923, |
|
"eval_loss": 1.831756830215454, |
|
"eval_runtime": 11.0535, |
|
"eval_samples_per_second": 11.851, |
|
"eval_steps_per_second": 1.538, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 10.76923076923077, |
|
"grad_norm": 93.24444580078125, |
|
"learning_rate": 9.930900000000002e-06, |
|
"loss": 1.7518, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 10.76923076923077, |
|
"eval_loss": 1.7832175493240356, |
|
"eval_runtime": 11.1684, |
|
"eval_samples_per_second": 11.729, |
|
"eval_steps_per_second": 1.522, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 12.307692307692308, |
|
"grad_norm": 32.21013641357422, |
|
"learning_rate": 9.920900000000002e-06, |
|
"loss": 1.7149, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 12.307692307692308, |
|
"eval_loss": 1.7581098079681396, |
|
"eval_runtime": 11.101, |
|
"eval_samples_per_second": 11.801, |
|
"eval_steps_per_second": 1.531, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 13.846153846153847, |
|
"grad_norm": 59.90657043457031, |
|
"learning_rate": 9.9109e-06, |
|
"loss": 1.6734, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 13.846153846153847, |
|
"eval_loss": 1.7163844108581543, |
|
"eval_runtime": 11.1167, |
|
"eval_samples_per_second": 11.784, |
|
"eval_steps_per_second": 1.529, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 15.384615384615385, |
|
"grad_norm": 20.61592674255371, |
|
"learning_rate": 9.9009e-06, |
|
"loss": 1.6612, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 15.384615384615385, |
|
"eval_loss": 1.6949567794799805, |
|
"eval_runtime": 11.0663, |
|
"eval_samples_per_second": 11.838, |
|
"eval_steps_per_second": 1.536, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 16.923076923076923, |
|
"grad_norm": 17.60099220275879, |
|
"learning_rate": 9.8909e-06, |
|
"loss": 1.6199, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 16.923076923076923, |
|
"eval_loss": 1.6769332885742188, |
|
"eval_runtime": 11.0531, |
|
"eval_samples_per_second": 11.852, |
|
"eval_steps_per_second": 1.538, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 18.46153846153846, |
|
"grad_norm": 20.802692413330078, |
|
"learning_rate": 9.8809e-06, |
|
"loss": 1.6008, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 18.46153846153846, |
|
"eval_loss": 1.6524990797042847, |
|
"eval_runtime": 11.0831, |
|
"eval_samples_per_second": 11.82, |
|
"eval_steps_per_second": 1.534, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"grad_norm": 21.809823989868164, |
|
"learning_rate": 9.8709e-06, |
|
"loss": 1.5812, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_loss": 1.6428295373916626, |
|
"eval_runtime": 11.1093, |
|
"eval_samples_per_second": 11.792, |
|
"eval_steps_per_second": 1.53, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 21.53846153846154, |
|
"grad_norm": 46.8908576965332, |
|
"learning_rate": 9.8609e-06, |
|
"loss": 1.5419, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 21.53846153846154, |
|
"eval_loss": 1.6006404161453247, |
|
"eval_runtime": 11.2393, |
|
"eval_samples_per_second": 11.655, |
|
"eval_steps_per_second": 1.513, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 23.076923076923077, |
|
"grad_norm": 27.15238380432129, |
|
"learning_rate": 9.8509e-06, |
|
"loss": 1.5374, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 23.076923076923077, |
|
"eval_loss": 1.5862094163894653, |
|
"eval_runtime": 11.1815, |
|
"eval_samples_per_second": 11.716, |
|
"eval_steps_per_second": 1.52, |
|
"step": 1500 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 100000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1539, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.73364749312e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|