minmingzhu02's picture
Upload folder using huggingface_hub
3c186aa verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.49419322955275513,
"eval_steps": 500,
"global_step": 500,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01,
"grad_norm": 1.339092493057251,
"learning_rate": 5.6012058970266934e-05,
"loss": 1.6822,
"max_memory_allocated (GB)": 91.88,
"memory_allocated (GB)": 14.99,
"step": 10,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.02,
"grad_norm": 1.4976561069488525,
"learning_rate": 7.287336883921704e-05,
"loss": 1.3895,
"max_memory_allocated (GB)": 91.9,
"memory_allocated (GB)": 14.99,
"step": 20,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.03,
"grad_norm": 0.5813677310943604,
"learning_rate": 8.273660282559241e-05,
"loss": 1.2399,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 30,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.04,
"grad_norm": 0.3463669717311859,
"learning_rate": 8.973467870816715e-05,
"loss": 1.2044,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 40,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.05,
"grad_norm": 0.3429594039916992,
"learning_rate": 9.516280807158375e-05,
"loss": 1.1798,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 50,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.06,
"grad_norm": 0.3272635042667389,
"learning_rate": 9.959791269454252e-05,
"loss": 1.153,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 60,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.07,
"grad_norm": 0.37938210368156433,
"learning_rate": 9.959204487506375e-05,
"loss": 1.1266,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 70,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.08,
"grad_norm": 0.4513859152793884,
"learning_rate": 9.908210096889343e-05,
"loss": 1.1218,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 80,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.09,
"grad_norm": 0.5865214467048645,
"learning_rate": 9.85721570627231e-05,
"loss": 1.1048,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 90,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.1,
"grad_norm": 0.6466606855392456,
"learning_rate": 9.806221315655279e-05,
"loss": 1.1064,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 100,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.11,
"grad_norm": 0.5907604098320007,
"learning_rate": 9.755226925038246e-05,
"loss": 1.0716,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 110,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.12,
"grad_norm": 0.7884001135826111,
"learning_rate": 9.704232534421214e-05,
"loss": 1.0656,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 120,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.13,
"grad_norm": 0.7752586007118225,
"learning_rate": 9.653238143804181e-05,
"loss": 1.065,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 130,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.14,
"grad_norm": 0.5188919901847839,
"learning_rate": 9.60224375318715e-05,
"loss": 1.0606,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 140,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.15,
"grad_norm": 0.5289068818092346,
"learning_rate": 9.551249362570118e-05,
"loss": 1.0537,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 150,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.16,
"grad_norm": 1.0006146430969238,
"learning_rate": 9.500254971953085e-05,
"loss": 1.0528,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 160,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.17,
"grad_norm": 0.5318694710731506,
"learning_rate": 9.449260581336054e-05,
"loss": 1.0357,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 170,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.18,
"grad_norm": 0.5409672260284424,
"learning_rate": 9.398266190719021e-05,
"loss": 1.0264,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 180,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.19,
"grad_norm": 0.5338054299354553,
"learning_rate": 9.347271800101989e-05,
"loss": 1.0319,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 190,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.2,
"grad_norm": 0.5304291844367981,
"learning_rate": 9.296277409484956e-05,
"loss": 1.0301,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 200,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.21,
"grad_norm": 0.5799819231033325,
"learning_rate": 9.245283018867925e-05,
"loss": 1.0179,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 210,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.22,
"grad_norm": 0.4919432997703552,
"learning_rate": 9.194288628250894e-05,
"loss": 1.0174,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 220,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.23,
"grad_norm": 0.5090098977088928,
"learning_rate": 9.14329423763386e-05,
"loss": 1.0261,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 230,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.24,
"grad_norm": 0.5532674193382263,
"learning_rate": 9.092299847016829e-05,
"loss": 1.0239,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 240,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.25,
"grad_norm": 0.5546780824661255,
"learning_rate": 9.041305456399796e-05,
"loss": 1.0072,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 250,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.26,
"grad_norm": 0.5483475923538208,
"learning_rate": 8.990311065782764e-05,
"loss": 1.0121,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 260,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.27,
"grad_norm": 0.4962722063064575,
"learning_rate": 8.939316675165733e-05,
"loss": 1.0097,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 270,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.28,
"grad_norm": 0.5032678842544556,
"learning_rate": 8.8883222845487e-05,
"loss": 1.0085,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 280,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.29,
"grad_norm": 0.48048779368400574,
"learning_rate": 8.837327893931669e-05,
"loss": 1.006,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 290,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.3,
"grad_norm": 0.493956595659256,
"learning_rate": 8.786333503314635e-05,
"loss": 0.9991,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 300,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.31,
"grad_norm": 0.4832962155342102,
"learning_rate": 8.735339112697604e-05,
"loss": 0.9994,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 310,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.32,
"grad_norm": 0.44359833002090454,
"learning_rate": 8.684344722080571e-05,
"loss": 0.9949,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 320,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.33,
"grad_norm": 0.432824045419693,
"learning_rate": 8.633350331463539e-05,
"loss": 0.9945,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 330,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.34,
"grad_norm": 0.5194958448410034,
"learning_rate": 8.582355940846507e-05,
"loss": 1.0005,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 340,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.35,
"grad_norm": 0.4381203353404999,
"learning_rate": 8.531361550229475e-05,
"loss": 0.9971,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 350,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.36,
"grad_norm": 0.4479101300239563,
"learning_rate": 8.480367159612444e-05,
"loss": 0.9834,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 360,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.37,
"grad_norm": 0.44543156027793884,
"learning_rate": 8.42937276899541e-05,
"loss": 0.9811,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 370,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.38,
"grad_norm": 0.46895870566368103,
"learning_rate": 8.378378378378379e-05,
"loss": 0.9969,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 380,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.39,
"grad_norm": 0.42161303758621216,
"learning_rate": 8.327383987761347e-05,
"loss": 0.9852,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 390,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.4,
"grad_norm": 0.4941897690296173,
"learning_rate": 8.276389597144315e-05,
"loss": 0.9878,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 400,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.41,
"grad_norm": 0.4448719918727875,
"learning_rate": 8.225395206527282e-05,
"loss": 0.9956,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 410,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.42,
"grad_norm": 0.4166922867298126,
"learning_rate": 8.17440081591025e-05,
"loss": 0.9899,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 420,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.43,
"grad_norm": 0.40304499864578247,
"learning_rate": 8.123406425293219e-05,
"loss": 0.9908,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 430,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.43,
"grad_norm": 0.43452388048171997,
"learning_rate": 8.072412034676186e-05,
"loss": 0.9705,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 440,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.44,
"grad_norm": 0.4060077965259552,
"learning_rate": 8.021417644059154e-05,
"loss": 0.9825,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 450,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.45,
"grad_norm": 0.4520680904388428,
"learning_rate": 7.970423253442122e-05,
"loss": 0.9782,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 460,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.46,
"grad_norm": 0.4969607889652252,
"learning_rate": 7.91942886282509e-05,
"loss": 0.9798,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 470,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.47,
"grad_norm": 0.48629072308540344,
"learning_rate": 7.868434472208057e-05,
"loss": 0.9795,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 480,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.48,
"grad_norm": 0.4386264979839325,
"learning_rate": 7.817440081591025e-05,
"loss": 0.9767,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 490,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.49,
"grad_norm": 0.44945308566093445,
"learning_rate": 7.766445690973994e-05,
"loss": 0.9673,
"max_memory_allocated (GB)": 91.96,
"memory_allocated (GB)": 14.99,
"step": 500,
"total_memory_available (GB)": 94.62
}
],
"logging_steps": 10,
"max_steps": 2022,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 500,
"total_flos": 5.597410974551245e+18,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}