|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9971803864933636, |
|
"eval_steps": 500, |
|
"global_step": 14500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.828072347156317e-05, |
|
"loss": 1.3416, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.656144694312634e-05, |
|
"loss": 1.1277, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.4842170414689496e-05, |
|
"loss": 1.0744, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.312289388625267e-05, |
|
"loss": 1.034, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.140361735781583e-05, |
|
"loss": 1.013, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.9684340829379003e-05, |
|
"loss": 0.9948, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.796506430094217e-05, |
|
"loss": 0.9951, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.624578777250533e-05, |
|
"loss": 0.9735, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.45265112440685e-05, |
|
"loss": 0.9796, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.280723471563166e-05, |
|
"loss": 0.9688, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.108795818719483e-05, |
|
"loss": 0.9511, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.9368681658757994e-05, |
|
"loss": 0.9531, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.764940513032116e-05, |
|
"loss": 0.9337, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.593012860188433e-05, |
|
"loss": 0.9208, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.4210852073447495e-05, |
|
"loss": 0.9241, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.249157554501066e-05, |
|
"loss": 0.9049, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.0772299016573828e-05, |
|
"loss": 0.9192, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9053022488136992e-05, |
|
"loss": 0.9219, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.733374595970016e-05, |
|
"loss": 0.9034, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.5614469431263325e-05, |
|
"loss": 0.9074, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3895192902826493e-05, |
|
"loss": 0.9088, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2175916374389657e-05, |
|
"loss": 0.9119, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0456639845952824e-05, |
|
"loss": 0.8996, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.73736331751599e-06, |
|
"loss": 0.8881, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.018086789079156e-06, |
|
"loss": 0.9013, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.298810260642322e-06, |
|
"loss": 0.8916, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.5795337322054882e-06, |
|
"loss": 0.8953, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.8602572037686543e-06, |
|
"loss": 0.8992, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.4098067533182036e-07, |
|
"loss": 0.8785, |
|
"step": 14500 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 14541, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"total_flos": 3788734464000000.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|