alieddine's picture
Upload folder using huggingface_hub
bcf23f1 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.1770682148040637,
"eval_steps": 500,
"global_step": 3000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.36,
"grad_norm": 0.018075617030262947,
"learning_rate": 4.3952588292210934e-05,
"loss": 0.1598,
"step": 500
},
{
"epoch": 0.73,
"grad_norm": 0.014405773021280766,
"learning_rate": 3.790517658442187e-05,
"loss": 0.0001,
"step": 1000
},
{
"epoch": 1.09,
"grad_norm": 0.0018536851275712252,
"learning_rate": 3.18577648766328e-05,
"loss": 0.0001,
"step": 1500
},
{
"epoch": 1.45,
"grad_norm": 0.0023344324436038733,
"learning_rate": 2.5810353168843737e-05,
"loss": 0.0,
"step": 2000
},
{
"epoch": 1.81,
"grad_norm": 0.0010399112943559885,
"learning_rate": 1.9762941461054672e-05,
"loss": 0.0,
"step": 2500
},
{
"epoch": 2.18,
"grad_norm": 0.001136175007559359,
"learning_rate": 1.3715529753265602e-05,
"loss": 0.0013,
"step": 3000
}
],
"logging_steps": 500,
"max_steps": 4134,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"total_flos": 15184878501888.0,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}