Gemma7b-SFT / trainer_state.json
skuma307's picture
Upload folder using huggingface_hub
acca882 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.9987368421052631,
"eval_steps": 100,
"global_step": 593,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.17,
"grad_norm": 0.4642287790775299,
"learning_rate": 1.972335510674149e-05,
"loss": 1.7213,
"step": 100
},
{
"epoch": 0.17,
"eval_loss": 1.06580650806427,
"eval_runtime": 102.5963,
"eval_samples_per_second": 4.873,
"eval_steps_per_second": 1.218,
"step": 100
},
{
"epoch": 0.34,
"grad_norm": 0.873336136341095,
"learning_rate": 1.678421935868953e-05,
"loss": 1.0726,
"step": 200
},
{
"epoch": 0.34,
"eval_loss": 1.022551417350769,
"eval_runtime": 102.2722,
"eval_samples_per_second": 4.889,
"eval_steps_per_second": 1.222,
"step": 200
},
{
"epoch": 0.51,
"grad_norm": 0.7002689838409424,
"learning_rate": 1.1555611635532276e-05,
"loss": 1.0456,
"step": 300
},
{
"epoch": 0.51,
"eval_loss": 1.0069515705108643,
"eval_runtime": 102.5086,
"eval_samples_per_second": 4.878,
"eval_steps_per_second": 1.219,
"step": 300
},
{
"epoch": 0.67,
"grad_norm": 0.7483100295066833,
"learning_rate": 5.8020313170922526e-06,
"loss": 1.0417,
"step": 400
},
{
"epoch": 0.67,
"eval_loss": 0.9990952014923096,
"eval_runtime": 102.3159,
"eval_samples_per_second": 4.887,
"eval_steps_per_second": 1.222,
"step": 400
},
{
"epoch": 0.84,
"grad_norm": 0.563865602016449,
"learning_rate": 1.46514039852793e-06,
"loss": 1.0169,
"step": 500
},
{
"epoch": 0.84,
"eval_loss": 0.9966371059417725,
"eval_runtime": 102.2788,
"eval_samples_per_second": 4.889,
"eval_steps_per_second": 1.222,
"step": 500
}
],
"logging_steps": 100,
"max_steps": 593,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"total_flos": 1.001225674624598e+18,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}