|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.996835443037975, |
|
"eval_steps": 500, |
|
"global_step": 210, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 10.745909104013029, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 3.7358, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 11.845140833151277, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 3.6856, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 11.845140833151277, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 3.2154, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 11.845140833151277, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 3.5768, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 11.845140833151277, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 3.7658, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 8.676633993597468, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 3.0183, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 8.614508991145888, |
|
"learning_rate": 1.1428571428571429e-05, |
|
"loss": 3.4849, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 8.614508991145888, |
|
"learning_rate": 1.1428571428571429e-05, |
|
"loss": 3.6352, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 6.822334238607632, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 3.4481, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 9.922253600804103, |
|
"learning_rate": 1.7142857142857142e-05, |
|
"loss": 3.2409, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 15.131671312747258, |
|
"learning_rate": 2e-05, |
|
"loss": 4.014, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 10.832518268660342, |
|
"learning_rate": 1.9998802517966852e-05, |
|
"loss": 3.43, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 10.832518268660342, |
|
"learning_rate": 1.9998802517966852e-05, |
|
"loss": 3.8237, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 10.832518268660342, |
|
"learning_rate": 1.9998802517966852e-05, |
|
"loss": 3.3954, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 10.832518268660342, |
|
"learning_rate": 1.9998802517966852e-05, |
|
"loss": 3.8053, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 10.832518268660342, |
|
"learning_rate": 1.9998802517966852e-05, |
|
"loss": 2.9549, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 10.832518268660342, |
|
"learning_rate": 1.9998802517966852e-05, |
|
"loss": 3.3261, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 10.832518268660342, |
|
"learning_rate": 1.9998802517966852e-05, |
|
"loss": 3.3888, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 10.832518268660342, |
|
"learning_rate": 1.9998802517966852e-05, |
|
"loss": 3.1681, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 9.859756972439765, |
|
"learning_rate": 1.9995210358660037e-05, |
|
"loss": 3.3455, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 8.150942345154002, |
|
"learning_rate": 1.9989224382388813e-05, |
|
"loss": 3.4818, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 8.547022254412862, |
|
"learning_rate": 1.9980846022772978e-05, |
|
"loss": 3.6079, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 8.547022254412862, |
|
"learning_rate": 1.9980846022772978e-05, |
|
"loss": 3.0004, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 7.504048709301798, |
|
"learning_rate": 1.997007728639956e-05, |
|
"loss": 3.3215, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 6.539987611168741, |
|
"learning_rate": 1.9956920752342226e-05, |
|
"loss": 3.0279, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 6.539987611168741, |
|
"learning_rate": 1.9956920752342226e-05, |
|
"loss": 3.4405, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 6.539987611168741, |
|
"learning_rate": 1.9956920752342226e-05, |
|
"loss": 3.0967, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 6.539987611168741, |
|
"learning_rate": 1.9956920752342226e-05, |
|
"loss": 3.088, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 6.539987611168741, |
|
"learning_rate": 1.9956920752342226e-05, |
|
"loss": 2.7919, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 5.442478841503217, |
|
"learning_rate": 1.9941379571543597e-05, |
|
"loss": 3.4199, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 5.442478841503217, |
|
"learning_rate": 1.9941379571543597e-05, |
|
"loss": 3.1446, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 4.683918200190512, |
|
"learning_rate": 1.9923457466060637e-05, |
|
"loss": 3.2384, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 4.829342444256826, |
|
"learning_rate": 1.9903158728173206e-05, |
|
"loss": 3.1535, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.829342444256826, |
|
"learning_rate": 1.9903158728173206e-05, |
|
"loss": 2.8555, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 4.829342444256826, |
|
"learning_rate": 1.9903158728173206e-05, |
|
"loss": 2.7903, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 9.6288495968318, |
|
"learning_rate": 1.9880488219356086e-05, |
|
"loss": 4.6632, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 4.404098574989548, |
|
"learning_rate": 1.9855451369114677e-05, |
|
"loss": 2.9321, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 4.404098574989548, |
|
"learning_rate": 1.9855451369114677e-05, |
|
"loss": 2.4582, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 4.404098574989548, |
|
"learning_rate": 1.9855451369114677e-05, |
|
"loss": 3.016, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 4.404098574989548, |
|
"learning_rate": 1.9855451369114677e-05, |
|
"loss": 3.0601, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 4.404098574989548, |
|
"learning_rate": 1.9855451369114677e-05, |
|
"loss": 2.3929, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 4.266672410944561, |
|
"learning_rate": 1.9828054173684646e-05, |
|
"loss": 2.3491, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 4.266672410944561, |
|
"learning_rate": 1.9828054173684646e-05, |
|
"loss": 3.1275, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 4.534933083803799, |
|
"learning_rate": 1.9798303194595846e-05, |
|
"loss": 2.7681, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 4.3832140222345215, |
|
"learning_rate": 1.976620555710087e-05, |
|
"loss": 2.569, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 5.567358591451238, |
|
"learning_rate": 1.973176894846855e-05, |
|
"loss": 2.6533, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 4.972447642952155, |
|
"learning_rate": 1.9695001616142916e-05, |
|
"loss": 2.4035, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 4.972447642952155, |
|
"learning_rate": 1.9695001616142916e-05, |
|
"loss": 3.6331, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 4.972447642952155, |
|
"learning_rate": 1.9695001616142916e-05, |
|
"loss": 3.4085, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 4.972447642952155, |
|
"learning_rate": 1.9695001616142916e-05, |
|
"loss": 3.5906, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.0, |
|
"learning_rate": 1.9695001616142916e-05, |
|
"loss": 2.8645, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.0, |
|
"learning_rate": 1.9695001616142916e-05, |
|
"loss": 3.2456, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.0, |
|
"learning_rate": 1.9695001616142916e-05, |
|
"loss": 3.3121, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.0, |
|
"learning_rate": 1.9695001616142916e-05, |
|
"loss": 3.0871, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 4.577929884798453, |
|
"learning_rate": 1.965591236576794e-05, |
|
"loss": 2.4911, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 5.8893577161525945, |
|
"learning_rate": 1.9614510559078626e-05, |
|
"loss": 2.4741, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 5.738612095999555, |
|
"learning_rate": 1.95708061116589e-05, |
|
"loss": 2.448, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 5.738612095999555, |
|
"learning_rate": 1.95708061116589e-05, |
|
"loss": 2.9737, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 5.060793895344798, |
|
"learning_rate": 1.9524809490566878e-05, |
|
"loss": 2.3368, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 4.455225187190013, |
|
"learning_rate": 1.9476531711828027e-05, |
|
"loss": 2.2524, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 4.455225187190013, |
|
"learning_rate": 1.9476531711828027e-05, |
|
"loss": 3.4379, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 4.455225187190013, |
|
"learning_rate": 1.9476531711828027e-05, |
|
"loss": 3.1004, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 4.455225187190013, |
|
"learning_rate": 1.9476531711828027e-05, |
|
"loss": 3.0873, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 4.455225187190013, |
|
"learning_rate": 1.9476531711828027e-05, |
|
"loss": 2.7873, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 6.133721749895579, |
|
"learning_rate": 1.942598433779687e-05, |
|
"loss": 2.322, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 6.133721749895579, |
|
"learning_rate": 1.942598433779687e-05, |
|
"loss": 3.181, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 5.44690388478305, |
|
"learning_rate": 1.9373179474387858e-05, |
|
"loss": 2.3634, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 5.399452825068645, |
|
"learning_rate": 1.9318129768176033e-05, |
|
"loss": 2.3324, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 5.399452825068645, |
|
"learning_rate": 1.9318129768176033e-05, |
|
"loss": 2.9248, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 5.399452825068645, |
|
"learning_rate": 1.9318129768176033e-05, |
|
"loss": 2.8532, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 5.246839436820852, |
|
"learning_rate": 1.926084840336821e-05, |
|
"loss": 2.8837, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 6.320135133123584, |
|
"learning_rate": 1.9201349098645433e-05, |
|
"loss": 3.3816, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 6.320135133123584, |
|
"learning_rate": 1.9201349098645433e-05, |
|
"loss": 2.9535, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 4.3276729104087295, |
|
"learning_rate": 1.9139646103877378e-05, |
|
"loss": 2.8788, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 5.437918020961627, |
|
"learning_rate": 1.9075754196709574e-05, |
|
"loss": 2.8748, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 5.437918020961627, |
|
"learning_rate": 1.9075754196709574e-05, |
|
"loss": 2.9727, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 5.437918020961627, |
|
"learning_rate": 1.9075754196709574e-05, |
|
"loss": 3.2969, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 5.437918020961627, |
|
"learning_rate": 1.9075754196709574e-05, |
|
"loss": 3.0428, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 5.437918020961627, |
|
"learning_rate": 1.9075754196709574e-05, |
|
"loss": 2.8944, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 4.920021016187099, |
|
"learning_rate": 1.900968867902419e-05, |
|
"loss": 3.2227, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 4.711625687761842, |
|
"learning_rate": 1.894146537327533e-05, |
|
"loss": 3.3434, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 3.9759783965517874, |
|
"learning_rate": 1.8871100618699553e-05, |
|
"loss": 2.8305, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 6.647061826435325, |
|
"learning_rate": 1.8798611267402745e-05, |
|
"loss": 3.6359, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 6.647061826435325, |
|
"learning_rate": 1.8798611267402745e-05, |
|
"loss": 2.7353, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 6.647061826435325, |
|
"learning_rate": 1.8798611267402745e-05, |
|
"loss": 3.1311, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 6.461044629826689, |
|
"learning_rate": 1.872401468032406e-05, |
|
"loss": 3.6868, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 4.5526001477701765, |
|
"learning_rate": 1.864732872307804e-05, |
|
"loss": 2.2986, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 4.5526001477701765, |
|
"learning_rate": 1.864732872307804e-05, |
|
"loss": 2.41, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 4.5526001477701765, |
|
"learning_rate": 1.864732872307804e-05, |
|
"loss": 2.9752, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 4.5526001477701765, |
|
"learning_rate": 1.864732872307804e-05, |
|
"loss": 2.9911, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 4.5526001477701765, |
|
"learning_rate": 1.864732872307804e-05, |
|
"loss": 2.342, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 4.066251715555047, |
|
"learning_rate": 1.8568571761675893e-05, |
|
"loss": 1.8423, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 4.066251715555047, |
|
"learning_rate": 1.8568571761675893e-05, |
|
"loss": 3.0791, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 4.068156270977161, |
|
"learning_rate": 1.8487762658126872e-05, |
|
"loss": 2.1676, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 3.8689084112155943, |
|
"learning_rate": 1.8404920765920898e-05, |
|
"loss": 1.9859, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 4.7150189322065374, |
|
"learning_rate": 1.8320065925393468e-05, |
|
"loss": 1.885, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 4.354015459893575, |
|
"learning_rate": 1.8233218458973984e-05, |
|
"loss": 1.605, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 4.354015459893575, |
|
"learning_rate": 1.8233218458973984e-05, |
|
"loss": 3.7732, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 4.354015459893575, |
|
"learning_rate": 1.8233218458973984e-05, |
|
"loss": 3.4874, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 4.354015459893575, |
|
"learning_rate": 1.8233218458973984e-05, |
|
"loss": 3.7074, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.0, |
|
"learning_rate": 1.8233218458973984e-05, |
|
"loss": 3.2998, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 4.626614627051075, |
|
"learning_rate": 1.814439916631857e-05, |
|
"loss": 1.5941, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 4.983632390206286, |
|
"learning_rate": 1.8053629319328662e-05, |
|
"loss": 1.4982, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 4.983632390206286, |
|
"learning_rate": 1.8053629319328662e-05, |
|
"loss": 3.1327, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 4.983632390206286, |
|
"learning_rate": 1.8053629319328662e-05, |
|
"loss": 3.0876, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 4.7246349324630454, |
|
"learning_rate": 1.796093065705644e-05, |
|
"loss": 1.7214, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 6.478189895530414, |
|
"learning_rate": 1.786632538049842e-05, |
|
"loss": 2.2154, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 6.478189895530414, |
|
"learning_rate": 1.786632538049842e-05, |
|
"loss": 3.3134, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 5.5921238659005725, |
|
"learning_rate": 1.776983614727838e-05, |
|
"loss": 2.2517, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 5.534943060953176, |
|
"learning_rate": 1.7671486066220965e-05, |
|
"loss": 2.082, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 5.534943060953176, |
|
"learning_rate": 1.7671486066220965e-05, |
|
"loss": 3.1982, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 5.534943060953176, |
|
"learning_rate": 1.7671486066220965e-05, |
|
"loss": 3.5452, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 5.534943060953176, |
|
"learning_rate": 1.7671486066220965e-05, |
|
"loss": 3.2572, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 5.534943060953176, |
|
"learning_rate": 1.7671486066220965e-05, |
|
"loss": 3.093, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 6.028567841224762, |
|
"learning_rate": 1.757129869181718e-05, |
|
"loss": 2.4001, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"grad_norm": 6.3075138811264475, |
|
"learning_rate": 1.746929801858317e-05, |
|
"loss": 2.547, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"grad_norm": 5.078979153195962, |
|
"learning_rate": 1.736550847531366e-05, |
|
"loss": 2.2615, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"grad_norm": 6.8686015465430925, |
|
"learning_rate": 1.725995491923131e-05, |
|
"loss": 2.5536, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 6.8686015465430925, |
|
"learning_rate": 1.725995491923131e-05, |
|
"loss": 2.9249, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 6.8686015465430925, |
|
"learning_rate": 1.725995491923131e-05, |
|
"loss": 3.3196, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 6.790172880419949, |
|
"learning_rate": 1.7152662630033506e-05, |
|
"loss": 2.9955, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 6.536740333558469, |
|
"learning_rate": 1.7043657303837965e-05, |
|
"loss": 3.3799, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"grad_norm": 6.536740333558469, |
|
"learning_rate": 1.7043657303837965e-05, |
|
"loss": 3.1293, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 7.076899389694803, |
|
"learning_rate": 1.693296504702862e-05, |
|
"loss": 3.7154, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"grad_norm": 7.076899389694803, |
|
"learning_rate": 1.693296504702862e-05, |
|
"loss": 3.01, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 7.076899389694803, |
|
"learning_rate": 1.693296504702862e-05, |
|
"loss": 3.0141, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"grad_norm": 7.076899389694803, |
|
"learning_rate": 1.693296504702862e-05, |
|
"loss": 3.4354, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"grad_norm": 7.076899389694803, |
|
"learning_rate": 1.693296504702862e-05, |
|
"loss": 2.937, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 7.31807712363732, |
|
"learning_rate": 1.682061237000322e-05, |
|
"loss": 3.1611, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"grad_norm": 7.31807712363732, |
|
"learning_rate": 1.682061237000322e-05, |
|
"loss": 3.0004, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"grad_norm": 7.31807712363732, |
|
"learning_rate": 1.682061237000322e-05, |
|
"loss": 3.0685, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"grad_norm": 6.751859406216805, |
|
"learning_rate": 1.6706626180824185e-05, |
|
"loss": 3.1218, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"grad_norm": 6.085383457658417, |
|
"learning_rate": 1.659103377877423e-05, |
|
"loss": 3.0829, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"grad_norm": 6.085383457658417, |
|
"learning_rate": 1.659103377877423e-05, |
|
"loss": 3.1848, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"grad_norm": 6.085383457658417, |
|
"learning_rate": 1.659103377877423e-05, |
|
"loss": 3.0089, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 9.365062981714178, |
|
"learning_rate": 1.647386284781828e-05, |
|
"loss": 3.6731, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 6.610030487412542, |
|
"learning_rate": 1.6355141449973254e-05, |
|
"loss": 1.9927, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 6.610030487412542, |
|
"learning_rate": 1.6355141449973254e-05, |
|
"loss": 2.4353, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 6.610030487412542, |
|
"learning_rate": 1.6355141449973254e-05, |
|
"loss": 3.0121, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 6.610030487412542, |
|
"learning_rate": 1.6355141449973254e-05, |
|
"loss": 3.0222, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 6.610030487412542, |
|
"learning_rate": 1.6355141449973254e-05, |
|
"loss": 2.3583, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 5.5450540859525095, |
|
"learning_rate": 1.6234898018587336e-05, |
|
"loss": 1.5504, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 5.5450540859525095, |
|
"learning_rate": 1.6234898018587336e-05, |
|
"loss": 3.111, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 5.733431452998195, |
|
"learning_rate": 1.611316135153026e-05, |
|
"loss": 1.7954, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 5.034610729523613, |
|
"learning_rate": 1.598996060429634e-05, |
|
"loss": 1.6, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"grad_norm": 4.914809599403493, |
|
"learning_rate": 1.586532528302183e-05, |
|
"loss": 1.4157, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"grad_norm": 4.744024069441604, |
|
"learning_rate": 1.5739285237418323e-05, |
|
"loss": 1.1664, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 4.744024069441604, |
|
"learning_rate": 1.5739285237418323e-05, |
|
"loss": 3.9526, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 4.744024069441604, |
|
"learning_rate": 1.5739285237418323e-05, |
|
"loss": 3.6555, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 4.744024069441604, |
|
"learning_rate": 1.5739285237418323e-05, |
|
"loss": 3.8614, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"grad_norm": 4.1768497310203125, |
|
"learning_rate": 1.5611870653623826e-05, |
|
"loss": 1.1103, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"grad_norm": 3.910064558844921, |
|
"learning_rate": 1.548311204697331e-05, |
|
"loss": 1.4397, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"grad_norm": 3.910064558844921, |
|
"learning_rate": 1.548311204697331e-05, |
|
"loss": 4.2716, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 3.910064558844921, |
|
"learning_rate": 1.548311204697331e-05, |
|
"loss": 3.7013, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"grad_norm": 3.910064558844921, |
|
"learning_rate": 1.548311204697331e-05, |
|
"loss": 4.1961, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"grad_norm": 3.910064558844921, |
|
"learning_rate": 1.548311204697331e-05, |
|
"loss": 3.2478, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 3.910064558844921, |
|
"learning_rate": 1.548311204697331e-05, |
|
"loss": 3.6599, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"grad_norm": 3.910064558844921, |
|
"learning_rate": 1.548311204697331e-05, |
|
"loss": 3.7179, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"grad_norm": 3.910064558844921, |
|
"learning_rate": 1.548311204697331e-05, |
|
"loss": 3.5381, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"grad_norm": 5.202243594230388, |
|
"learning_rate": 1.5353040254690396e-05, |
|
"loss": 1.7474, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"grad_norm": 5.234307741068539, |
|
"learning_rate": 1.5221686428501929e-05, |
|
"loss": 1.5038, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"grad_norm": 5.454826834012641, |
|
"learning_rate": 1.5089082027177291e-05, |
|
"loss": 1.3784, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"grad_norm": 5.454826834012641, |
|
"learning_rate": 1.5089082027177291e-05, |
|
"loss": 3.3226, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"grad_norm": 5.377130386285983, |
|
"learning_rate": 1.4955258808994096e-05, |
|
"loss": 1.4163, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"grad_norm": 5.630263152807122, |
|
"learning_rate": 1.4820248824132221e-05, |
|
"loss": 1.4215, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 5.630263152807122, |
|
"learning_rate": 1.4820248824132221e-05, |
|
"loss": 3.822, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"grad_norm": 5.630263152807122, |
|
"learning_rate": 1.4820248824132221e-05, |
|
"loss": 3.4931, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 5.630263152807122, |
|
"learning_rate": 1.4820248824132221e-05, |
|
"loss": 3.5133, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"grad_norm": 5.630263152807122, |
|
"learning_rate": 1.4820248824132221e-05, |
|
"loss": 3.1357, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"grad_norm": 5.788425426880448, |
|
"learning_rate": 1.4684084406997903e-05, |
|
"loss": 1.2746, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 5.788425426880448, |
|
"learning_rate": 1.4684084406997903e-05, |
|
"loss": 3.5869, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"grad_norm": 4.874795766816558, |
|
"learning_rate": 1.4546798168479756e-05, |
|
"loss": 0.8619, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"grad_norm": 4.729179130081543, |
|
"learning_rate": 1.4408422988138585e-05, |
|
"loss": 0.765, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"grad_norm": 4.729179130081543, |
|
"learning_rate": 1.4408422988138585e-05, |
|
"loss": 3.3551, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 4.729179130081543, |
|
"learning_rate": 1.4408422988138585e-05, |
|
"loss": 3.3105, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"grad_norm": 5.290997290947021, |
|
"learning_rate": 1.4268992006332847e-05, |
|
"loss": 0.826, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"grad_norm": 5.646691617523086, |
|
"learning_rate": 1.412853861628166e-05, |
|
"loss": 1.0013, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"grad_norm": 5.646691617523086, |
|
"learning_rate": 1.412853861628166e-05, |
|
"loss": 3.592, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"grad_norm": 5.293182617266683, |
|
"learning_rate": 1.3987096456067236e-05, |
|
"loss": 1.0919, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"grad_norm": 5.709228089784577, |
|
"learning_rate": 1.3844699400578696e-05, |
|
"loss": 0.8435, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"grad_norm": 5.709228089784577, |
|
"learning_rate": 1.3844699400578696e-05, |
|
"loss": 3.6846, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"grad_norm": 5.709228089784577, |
|
"learning_rate": 1.3844699400578696e-05, |
|
"loss": 4.0876, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"grad_norm": 5.709228089784577, |
|
"learning_rate": 1.3844699400578696e-05, |
|
"loss": 3.7682, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"grad_norm": 5.709228089784577, |
|
"learning_rate": 1.3844699400578696e-05, |
|
"loss": 3.5682, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"grad_norm": 6.993362529212867, |
|
"learning_rate": 1.3701381553399147e-05, |
|
"loss": 1.1277, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"grad_norm": 10.857085356276176, |
|
"learning_rate": 1.3557177238637987e-05, |
|
"loss": 2.0418, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"grad_norm": 7.182316855738663, |
|
"learning_rate": 1.3412120992710425e-05, |
|
"loss": 1.5863, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"grad_norm": 7.182316855738663, |
|
"learning_rate": 1.3412120992710425e-05, |
|
"loss": 2.8401, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"grad_norm": 7.182316855738663, |
|
"learning_rate": 1.3412120992710425e-05, |
|
"loss": 3.5382, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"grad_norm": 7.182316855738663, |
|
"learning_rate": 1.3412120992710425e-05, |
|
"loss": 3.5492, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"grad_norm": 7.182316855738663, |
|
"learning_rate": 1.3412120992710425e-05, |
|
"loss": 2.7031, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"grad_norm": 5.454569997331726, |
|
"learning_rate": 1.3266247556066122e-05, |
|
"loss": 1.2064, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"grad_norm": 5.454569997331726, |
|
"learning_rate": 1.3266247556066122e-05, |
|
"loss": 3.4975, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"grad_norm": 6.05413267530256, |
|
"learning_rate": 1.3119591864868979e-05, |
|
"loss": 1.3958, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"grad_norm": 4.86763555215196, |
|
"learning_rate": 1.2972189042630044e-05, |
|
"loss": 1.1746, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"grad_norm": 4.818024421862567, |
|
"learning_rate": 1.2824074391795571e-05, |
|
"loss": 0.7361, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"grad_norm": 3.9313752517714686, |
|
"learning_rate": 1.2675283385292212e-05, |
|
"loss": 0.5405, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"grad_norm": 3.9313752517714686, |
|
"learning_rate": 1.2675283385292212e-05, |
|
"loss": 3.8222, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"grad_norm": 3.9313752517714686, |
|
"learning_rate": 1.2675283385292212e-05, |
|
"loss": 3.8457, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"grad_norm": 3.9313752517714686, |
|
"learning_rate": 1.2675283385292212e-05, |
|
"loss": 4.0919, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"grad_norm": 0.0, |
|
"learning_rate": 1.2675283385292212e-05, |
|
"loss": 3.8764, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"grad_norm": 0.0, |
|
"learning_rate": 1.2675283385292212e-05, |
|
"loss": 3.5465, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"grad_norm": 0.0, |
|
"learning_rate": 1.2675283385292212e-05, |
|
"loss": 3.5141, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"grad_norm": 0.0, |
|
"learning_rate": 1.2675283385292212e-05, |
|
"loss": 3.1509, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"grad_norm": 5.547510343712912, |
|
"learning_rate": 1.252585165803135e-05, |
|
"loss": 0.7488, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"grad_norm": 5.547510343712912, |
|
"learning_rate": 1.252585165803135e-05, |
|
"loss": 3.6736, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"grad_norm": 4.401046390597762, |
|
"learning_rate": 1.2375814998374714e-05, |
|
"loss": 0.5788, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"grad_norm": 3.5202118867023735, |
|
"learning_rate": 1.2225209339563144e-05, |
|
"loss": 0.5075, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"grad_norm": 3.5202118867023735, |
|
"learning_rate": 1.2225209339563144e-05, |
|
"loss": 3.4362, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"grad_norm": 3.5202118867023735, |
|
"learning_rate": 1.2225209339563144e-05, |
|
"loss": 3.3736, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"step": 210, |
|
"total_flos": 0.0, |
|
"train_loss": 0.12574415859721955, |
|
"train_runtime": 2485.1499, |
|
"train_samples_per_second": 171.382, |
|
"train_steps_per_second": 0.085 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 210, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 6, |
|
"save_steps": 25, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|