|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 154, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.012987012987012988, |
|
"grad_norm": 0.7649294288089501, |
|
"learning_rate": 5e-05, |
|
"loss": 1.0642, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.025974025974025976, |
|
"grad_norm": 0.16744224046876113, |
|
"learning_rate": 4.999472998758978e-05, |
|
"loss": 1.0811, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03896103896103896, |
|
"grad_norm": 0.1506244473047185, |
|
"learning_rate": 4.99789221722016e-05, |
|
"loss": 1.0819, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.05194805194805195, |
|
"grad_norm": 0.15366192883175545, |
|
"learning_rate": 4.995258321842611e-05, |
|
"loss": 1.1197, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.06493506493506493, |
|
"grad_norm": 0.11707148712561326, |
|
"learning_rate": 4.991572423079236e-05, |
|
"loss": 1.0702, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.07792207792207792, |
|
"grad_norm": 0.13073876345749866, |
|
"learning_rate": 4.986836074908616e-05, |
|
"loss": 1.0454, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 0.14345020845358708, |
|
"learning_rate": 4.98105127417984e-05, |
|
"loss": 1.0145, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.1038961038961039, |
|
"grad_norm": 0.12284158867400687, |
|
"learning_rate": 4.974220459770639e-05, |
|
"loss": 0.9749, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.11688311688311688, |
|
"grad_norm": 0.1316122136803943, |
|
"learning_rate": 4.966346511559149e-05, |
|
"loss": 0.9844, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.12987012987012986, |
|
"grad_norm": 0.12129084788561034, |
|
"learning_rate": 4.957432749209755e-05, |
|
"loss": 0.937, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.14285714285714285, |
|
"grad_norm": 0.11402446255258117, |
|
"learning_rate": 4.9474829307735115e-05, |
|
"loss": 0.9119, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.15584415584415584, |
|
"grad_norm": 0.13670373476126882, |
|
"learning_rate": 4.9365012511037514e-05, |
|
"loss": 0.9048, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.16883116883116883, |
|
"grad_norm": 0.17167936568998776, |
|
"learning_rate": 4.9244923400875245e-05, |
|
"loss": 0.8975, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 0.13651826129682004, |
|
"learning_rate": 4.911461260693638e-05, |
|
"loss": 0.815, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.19480519480519481, |
|
"grad_norm": 0.1973535264967678, |
|
"learning_rate": 4.8974135068381036e-05, |
|
"loss": 0.7728, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.2077922077922078, |
|
"grad_norm": 0.17362622068648217, |
|
"learning_rate": 4.882355001067892e-05, |
|
"loss": 0.7792, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.22077922077922077, |
|
"grad_norm": 0.1972301833866661, |
|
"learning_rate": 4.8662920920639866e-05, |
|
"loss": 0.7213, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.23376623376623376, |
|
"grad_norm": 0.20220023509712592, |
|
"learning_rate": 4.849231551964771e-05, |
|
"loss": 0.6796, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.24675324675324675, |
|
"grad_norm": 0.21311631984189247, |
|
"learning_rate": 4.8311805735108894e-05, |
|
"loss": 0.6554, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.2597402597402597, |
|
"grad_norm": 0.2369381772825553, |
|
"learning_rate": 4.81214676701278e-05, |
|
"loss": 0.563, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"grad_norm": 0.26507656791198936, |
|
"learning_rate": 4.792138157142158e-05, |
|
"loss": 0.5178, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 0.26498752467037984, |
|
"learning_rate": 4.7711631795488096e-05, |
|
"loss": 0.4661, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2987012987012987, |
|
"grad_norm": 0.24893170518457297, |
|
"learning_rate": 4.749230677304114e-05, |
|
"loss": 0.4262, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.3116883116883117, |
|
"grad_norm": 0.22992415120842585, |
|
"learning_rate": 4.726349897172791e-05, |
|
"loss": 0.3719, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.3246753246753247, |
|
"grad_norm": 0.17800562793366614, |
|
"learning_rate": 4.702530485714461e-05, |
|
"loss": 0.3395, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.33766233766233766, |
|
"grad_norm": 0.1678374692636547, |
|
"learning_rate": 4.677782485216644e-05, |
|
"loss": 0.3205, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.35064935064935066, |
|
"grad_norm": 0.14760981845430549, |
|
"learning_rate": 4.6521163294609196e-05, |
|
"loss": 0.297, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 0.12851418569866138, |
|
"learning_rate": 4.625542839324036e-05, |
|
"loss": 0.3212, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.37662337662337664, |
|
"grad_norm": 0.12667316817519966, |
|
"learning_rate": 4.598073218215817e-05, |
|
"loss": 0.3153, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.38961038961038963, |
|
"grad_norm": 0.11051907979123522, |
|
"learning_rate": 4.5697190473557946e-05, |
|
"loss": 0.2752, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.4025974025974026, |
|
"grad_norm": 0.1268943489369776, |
|
"learning_rate": 4.540492280890555e-05, |
|
"loss": 0.2866, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.4155844155844156, |
|
"grad_norm": 0.12871191005606444, |
|
"learning_rate": 4.510405240853854e-05, |
|
"loss": 0.2705, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.42857142857142855, |
|
"grad_norm": 0.13732367872755494, |
|
"learning_rate": 4.4794706119716455e-05, |
|
"loss": 0.2961, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.44155844155844154, |
|
"grad_norm": 0.12777955862078405, |
|
"learning_rate": 4.447701436314176e-05, |
|
"loss": 0.2868, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 0.12098600481328763, |
|
"learning_rate": 4.415111107797445e-05, |
|
"loss": 0.256, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.4675324675324675, |
|
"grad_norm": 0.1284047348249079, |
|
"learning_rate": 4.381713366536311e-05, |
|
"loss": 0.2679, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.4805194805194805, |
|
"grad_norm": 0.11163107591942552, |
|
"learning_rate": 4.347522293051648e-05, |
|
"loss": 0.2457, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.4935064935064935, |
|
"grad_norm": 0.14312777584685432, |
|
"learning_rate": 4.312552302333982e-05, |
|
"loss": 0.2615, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.5064935064935064, |
|
"grad_norm": 0.14239613661086223, |
|
"learning_rate": 4.276818137766118e-05, |
|
"loss": 0.2722, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.5194805194805194, |
|
"grad_norm": 0.12803795478640206, |
|
"learning_rate": 4.2403348649073174e-05, |
|
"loss": 0.2662, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.5324675324675324, |
|
"grad_norm": 0.10402277357673333, |
|
"learning_rate": 4.203117865141635e-05, |
|
"loss": 0.2454, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 0.10586149604773042, |
|
"learning_rate": 4.1651828291931264e-05, |
|
"loss": 0.2684, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.5584415584415584, |
|
"grad_norm": 0.1212655510313763, |
|
"learning_rate": 4.126545750510605e-05, |
|
"loss": 0.258, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.11476596613325958, |
|
"learning_rate": 4.0872229185248075e-05, |
|
"loss": 0.2716, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.5844155844155844, |
|
"grad_norm": 0.11875414394829936, |
|
"learning_rate": 4.047230911780737e-05, |
|
"loss": 0.2255, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.5974025974025974, |
|
"grad_norm": 0.11819788647354397, |
|
"learning_rate": 4.0065865909481417e-05, |
|
"loss": 0.2492, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.6103896103896104, |
|
"grad_norm": 0.13638858604883952, |
|
"learning_rate": 3.965307091713037e-05, |
|
"loss": 0.2494, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.6233766233766234, |
|
"grad_norm": 0.115798492930415, |
|
"learning_rate": 3.923409817553284e-05, |
|
"loss": 0.2326, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.6363636363636364, |
|
"grad_norm": 0.10245941653264991, |
|
"learning_rate": 3.880912432401265e-05, |
|
"loss": 0.235, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.6493506493506493, |
|
"grad_norm": 0.09819274391733306, |
|
"learning_rate": 3.837832853196751e-05, |
|
"loss": 0.246, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.6623376623376623, |
|
"grad_norm": 0.0979091297539171, |
|
"learning_rate": 3.794189242333106e-05, |
|
"loss": 0.2588, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.6753246753246753, |
|
"grad_norm": 0.11797293731222826, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.2333, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.6883116883116883, |
|
"grad_norm": 0.10853973446787894, |
|
"learning_rate": 3.705283756425872e-05, |
|
"loss": 0.2564, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.7012987012987013, |
|
"grad_norm": 0.09739725780688968, |
|
"learning_rate": 3.6600593640234086e-05, |
|
"loss": 0.2467, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 0.13136414642559058, |
|
"learning_rate": 3.6143458894413465e-05, |
|
"loss": 0.2489, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 0.10623115343843, |
|
"learning_rate": 3.568162605525953e-05, |
|
"loss": 0.2445, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.7402597402597403, |
|
"grad_norm": 0.1023205505796002, |
|
"learning_rate": 3.5215289831955786e-05, |
|
"loss": 0.2499, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.7532467532467533, |
|
"grad_norm": 0.1002792505451181, |
|
"learning_rate": 3.474464683231698e-05, |
|
"loss": 0.2261, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.7662337662337663, |
|
"grad_norm": 0.11002829565848689, |
|
"learning_rate": 3.426989547989902e-05, |
|
"loss": 0.2513, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.7792207792207793, |
|
"grad_norm": 0.10861164319015883, |
|
"learning_rate": 3.379123593034342e-05, |
|
"loss": 0.2252, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.7922077922077922, |
|
"grad_norm": 0.09758507903096522, |
|
"learning_rate": 3.330886998699149e-05, |
|
"loss": 0.2272, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.8051948051948052, |
|
"grad_norm": 0.10548706725662228, |
|
"learning_rate": 3.282300101580386e-05, |
|
"loss": 0.2393, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.8181818181818182, |
|
"grad_norm": 0.09385065956952136, |
|
"learning_rate": 3.2333833859621153e-05, |
|
"loss": 0.2179, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.8311688311688312, |
|
"grad_norm": 0.1608126010863833, |
|
"learning_rate": 3.1841574751802076e-05, |
|
"loss": 0.2341, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.8441558441558441, |
|
"grad_norm": 0.10253273432410331, |
|
"learning_rate": 3.13464312292752e-05, |
|
"loss": 0.2258, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.8571428571428571, |
|
"grad_norm": 0.11329982372255963, |
|
"learning_rate": 3.084861204504122e-05, |
|
"loss": 0.2223, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.8701298701298701, |
|
"grad_norm": 0.13350806545341848, |
|
"learning_rate": 3.0348327080162435e-05, |
|
"loss": 0.2456, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.8831168831168831, |
|
"grad_norm": 0.10995445961760825, |
|
"learning_rate": 2.9845787255276753e-05, |
|
"loss": 0.2321, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.8961038961038961, |
|
"grad_norm": 0.1037165714945774, |
|
"learning_rate": 2.9341204441673266e-05, |
|
"loss": 0.2171, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 0.10919814710978967, |
|
"learning_rate": 2.8834791371967142e-05, |
|
"loss": 0.2472, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.922077922077922, |
|
"grad_norm": 0.09681778664572314, |
|
"learning_rate": 2.8326761550411345e-05, |
|
"loss": 0.2364, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.935064935064935, |
|
"grad_norm": 0.1074259579148838, |
|
"learning_rate": 2.781732916288303e-05, |
|
"loss": 0.2391, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.948051948051948, |
|
"grad_norm": 0.1054775490154967, |
|
"learning_rate": 2.7306708986582553e-05, |
|
"loss": 0.2595, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.961038961038961, |
|
"grad_norm": 0.09815954447405159, |
|
"learning_rate": 2.679511629948319e-05, |
|
"loss": 0.2426, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.974025974025974, |
|
"grad_norm": 0.09306732537511961, |
|
"learning_rate": 2.628276678956974e-05, |
|
"loss": 0.2423, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.987012987012987, |
|
"grad_norm": 0.11169931049945912, |
|
"learning_rate": 2.5769876463904265e-05, |
|
"loss": 0.2348, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.10138472477596566, |
|
"learning_rate": 2.5256661557557247e-05, |
|
"loss": 0.2387, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.0129870129870129, |
|
"grad_norm": 0.09555843194904001, |
|
"learning_rate": 2.4743338442442755e-05, |
|
"loss": 0.2323, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.025974025974026, |
|
"grad_norm": 0.1181816997966792, |
|
"learning_rate": 2.4230123536095748e-05, |
|
"loss": 0.2436, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.0389610389610389, |
|
"grad_norm": 0.08998379085998691, |
|
"learning_rate": 2.3717233210430256e-05, |
|
"loss": 0.2208, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.051948051948052, |
|
"grad_norm": 0.09999438916245779, |
|
"learning_rate": 2.3204883700516812e-05, |
|
"loss": 0.236, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.0649350649350648, |
|
"grad_norm": 0.10658908314699578, |
|
"learning_rate": 2.2693291013417453e-05, |
|
"loss": 0.2267, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.077922077922078, |
|
"grad_norm": 0.1017206414672713, |
|
"learning_rate": 2.2182670837116975e-05, |
|
"loss": 0.2476, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.0909090909090908, |
|
"grad_norm": 0.10154042905245364, |
|
"learning_rate": 2.1673238449588668e-05, |
|
"loss": 0.2455, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.103896103896104, |
|
"grad_norm": 0.09488446331830326, |
|
"learning_rate": 2.116520862803286e-05, |
|
"loss": 0.2066, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.1168831168831168, |
|
"grad_norm": 0.10202897851829487, |
|
"learning_rate": 2.0658795558326743e-05, |
|
"loss": 0.2007, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.12987012987013, |
|
"grad_norm": 0.1183943667827613, |
|
"learning_rate": 2.015421274472325e-05, |
|
"loss": 0.2026, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.1428571428571428, |
|
"grad_norm": 0.09921431477749063, |
|
"learning_rate": 1.965167291983757e-05, |
|
"loss": 0.2104, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.155844155844156, |
|
"grad_norm": 0.10093734214597283, |
|
"learning_rate": 1.9151387954958794e-05, |
|
"loss": 0.2046, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.1688311688311688, |
|
"grad_norm": 0.09360429992532161, |
|
"learning_rate": 1.8653568770724806e-05, |
|
"loss": 0.2131, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.1818181818181819, |
|
"grad_norm": 0.12020649129202392, |
|
"learning_rate": 1.815842524819793e-05, |
|
"loss": 0.1989, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.1948051948051948, |
|
"grad_norm": 0.10680381676149939, |
|
"learning_rate": 1.7666166140378852e-05, |
|
"loss": 0.2078, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.2077922077922079, |
|
"grad_norm": 0.09568299370975282, |
|
"learning_rate": 1.7176998984196146e-05, |
|
"loss": 0.2101, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.2207792207792207, |
|
"grad_norm": 0.12879980775489425, |
|
"learning_rate": 1.6691130013008514e-05, |
|
"loss": 0.2166, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.2337662337662338, |
|
"grad_norm": 0.09751174219709864, |
|
"learning_rate": 1.620876406965658e-05, |
|
"loss": 0.216, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.2467532467532467, |
|
"grad_norm": 0.12307588066370435, |
|
"learning_rate": 1.5730104520100982e-05, |
|
"loss": 0.2355, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.2597402597402598, |
|
"grad_norm": 0.0984790807665978, |
|
"learning_rate": 1.5255353167683017e-05, |
|
"loss": 0.2283, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.2727272727272727, |
|
"grad_norm": 0.11200751310649058, |
|
"learning_rate": 1.4784710168044213e-05, |
|
"loss": 0.2401, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.2857142857142856, |
|
"grad_norm": 0.10465475693157676, |
|
"learning_rate": 1.4318373944740484e-05, |
|
"loss": 0.1995, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.2987012987012987, |
|
"grad_norm": 0.10184278002913967, |
|
"learning_rate": 1.3856541105586545e-05, |
|
"loss": 0.2068, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.3116883116883118, |
|
"grad_norm": 0.11277983230555819, |
|
"learning_rate": 1.339940635976592e-05, |
|
"loss": 0.1994, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.3246753246753247, |
|
"grad_norm": 0.10951913478829725, |
|
"learning_rate": 1.2947162435741278e-05, |
|
"loss": 0.2446, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.3376623376623376, |
|
"grad_norm": 0.10294119354543721, |
|
"learning_rate": 1.2500000000000006e-05, |
|
"loss": 0.2223, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.3506493506493507, |
|
"grad_norm": 0.10683807554925215, |
|
"learning_rate": 1.205810757666894e-05, |
|
"loss": 0.2225, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.3636363636363638, |
|
"grad_norm": 0.10650031782405078, |
|
"learning_rate": 1.1621671468032493e-05, |
|
"loss": 0.2026, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.3766233766233766, |
|
"grad_norm": 0.10799384532554564, |
|
"learning_rate": 1.1190875675987356e-05, |
|
"loss": 0.2395, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.3896103896103895, |
|
"grad_norm": 0.1099680907946163, |
|
"learning_rate": 1.0765901824467167e-05, |
|
"loss": 0.2266, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.4025974025974026, |
|
"grad_norm": 0.10063986485693889, |
|
"learning_rate": 1.0346929082869641e-05, |
|
"loss": 0.2244, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.4155844155844157, |
|
"grad_norm": 0.09898088073892639, |
|
"learning_rate": 9.934134090518593e-06, |
|
"loss": 0.1933, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.4285714285714286, |
|
"grad_norm": 0.10525578859634234, |
|
"learning_rate": 9.527690882192636e-06, |
|
"loss": 0.2202, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.4415584415584415, |
|
"grad_norm": 0.1087751424503037, |
|
"learning_rate": 9.127770814751933e-06, |
|
"loss": 0.2164, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.4545454545454546, |
|
"grad_norm": 0.10675193255605053, |
|
"learning_rate": 8.734542494893955e-06, |
|
"loss": 0.2557, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.4675324675324675, |
|
"grad_norm": 0.09433236678416125, |
|
"learning_rate": 8.348171708068747e-06, |
|
"loss": 0.2047, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.4805194805194806, |
|
"grad_norm": 0.10552231126115215, |
|
"learning_rate": 7.968821348583644e-06, |
|
"loss": 0.2354, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.4935064935064934, |
|
"grad_norm": 0.1090179336847755, |
|
"learning_rate": 7.5966513509268365e-06, |
|
"loss": 0.1903, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.5064935064935066, |
|
"grad_norm": 0.10671951695163927, |
|
"learning_rate": 7.231818622338823e-06, |
|
"loss": 0.2296, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.5194805194805194, |
|
"grad_norm": 0.10006542534798159, |
|
"learning_rate": 6.8744769766601854e-06, |
|
"loss": 0.2204, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.5324675324675323, |
|
"grad_norm": 0.10963040390997872, |
|
"learning_rate": 6.524777069483526e-06, |
|
"loss": 0.2086, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.5454545454545454, |
|
"grad_norm": 0.09907281406279463, |
|
"learning_rate": 6.182866334636889e-06, |
|
"loss": 0.1897, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.5584415584415585, |
|
"grad_norm": 0.10508976076834439, |
|
"learning_rate": 5.848888922025553e-06, |
|
"loss": 0.2282, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.5714285714285714, |
|
"grad_norm": 0.134246328671402, |
|
"learning_rate": 5.522985636858239e-06, |
|
"loss": 0.2264, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.5844155844155843, |
|
"grad_norm": 0.1059879782400766, |
|
"learning_rate": 5.205293880283552e-06, |
|
"loss": 0.1896, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.5974025974025974, |
|
"grad_norm": 0.10587695869132589, |
|
"learning_rate": 4.8959475914614554e-06, |
|
"loss": 0.2229, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.6103896103896105, |
|
"grad_norm": 0.12969941386231867, |
|
"learning_rate": 4.5950771910944605e-06, |
|
"loss": 0.2168, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.6233766233766234, |
|
"grad_norm": 0.23291844260878844, |
|
"learning_rate": 4.3028095264420535e-06, |
|
"loss": 0.2216, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.6363636363636362, |
|
"grad_norm": 0.12515572322774973, |
|
"learning_rate": 4.019267817841835e-06, |
|
"loss": 0.2287, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.6493506493506493, |
|
"grad_norm": 0.10857583181073842, |
|
"learning_rate": 3.7445716067596503e-06, |
|
"loss": 0.1996, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.6623376623376624, |
|
"grad_norm": 0.11253155054511829, |
|
"learning_rate": 3.478836705390809e-06, |
|
"loss": 0.2334, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.6753246753246753, |
|
"grad_norm": 0.10767090625172462, |
|
"learning_rate": 3.222175147833556e-06, |
|
"loss": 0.2188, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.6883116883116882, |
|
"grad_norm": 0.11985791222342175, |
|
"learning_rate": 2.974695142855388e-06, |
|
"loss": 0.2386, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.7012987012987013, |
|
"grad_norm": 0.10876307711132034, |
|
"learning_rate": 2.7365010282720952e-06, |
|
"loss": 0.2153, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.7142857142857144, |
|
"grad_norm": 0.12451466520756925, |
|
"learning_rate": 2.507693226958871e-06, |
|
"loss": 0.2031, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.7272727272727273, |
|
"grad_norm": 0.100099707537048, |
|
"learning_rate": 2.2883682045119063e-06, |
|
"loss": 0.1956, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.7402597402597402, |
|
"grad_norm": 0.11691453452769905, |
|
"learning_rate": 2.0786184285784297e-06, |
|
"loss": 0.2099, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.7532467532467533, |
|
"grad_norm": 0.13343643028694882, |
|
"learning_rate": 1.8785323298722097e-06, |
|
"loss": 0.2442, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.7662337662337664, |
|
"grad_norm": 0.10550388815035454, |
|
"learning_rate": 1.6881942648911076e-06, |
|
"loss": 0.2089, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.7792207792207793, |
|
"grad_norm": 0.09864757317858987, |
|
"learning_rate": 1.5076844803522922e-06, |
|
"loss": 0.2116, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.7922077922077921, |
|
"grad_norm": 0.11851794361322249, |
|
"learning_rate": 1.3370790793601373e-06, |
|
"loss": 0.2316, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.8051948051948052, |
|
"grad_norm": 0.13304376665401985, |
|
"learning_rate": 1.1764499893210878e-06, |
|
"loss": 0.1851, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 0.11193724349100125, |
|
"learning_rate": 1.0258649316189722e-06, |
|
"loss": 0.1954, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.8311688311688312, |
|
"grad_norm": 0.09758738574949906, |
|
"learning_rate": 8.85387393063622e-07, |
|
"loss": 0.2079, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.844155844155844, |
|
"grad_norm": 0.14859065932159254, |
|
"learning_rate": 7.550765991247654e-07, |
|
"loss": 0.2141, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.8571428571428572, |
|
"grad_norm": 0.11555838452156621, |
|
"learning_rate": 6.349874889624962e-07, |
|
"loss": 0.2076, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.87012987012987, |
|
"grad_norm": 0.09704689285498966, |
|
"learning_rate": 5.25170692264887e-07, |
|
"loss": 0.2014, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.883116883116883, |
|
"grad_norm": 0.13020262018312262, |
|
"learning_rate": 4.256725079024554e-07, |
|
"loss": 0.2265, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.896103896103896, |
|
"grad_norm": 0.12798337534645113, |
|
"learning_rate": 3.3653488440851255e-07, |
|
"loss": 0.224, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.9090909090909092, |
|
"grad_norm": 0.10348239456828633, |
|
"learning_rate": 2.5779540229361745e-07, |
|
"loss": 0.21, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.922077922077922, |
|
"grad_norm": 0.0993975437154903, |
|
"learning_rate": 1.8948725820160662e-07, |
|
"loss": 0.1995, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.935064935064935, |
|
"grad_norm": 0.11597426567128678, |
|
"learning_rate": 1.3163925091384533e-07, |
|
"loss": 0.2236, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.948051948051948, |
|
"grad_norm": 0.10428192963009006, |
|
"learning_rate": 8.427576920763958e-08, |
|
"loss": 0.1893, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.9610389610389611, |
|
"grad_norm": 0.1077579100092834, |
|
"learning_rate": 4.741678157389739e-08, |
|
"loss": 0.1985, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.974025974025974, |
|
"grad_norm": 0.1058358938118017, |
|
"learning_rate": 2.1077827798404726e-08, |
|
"loss": 0.2055, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.987012987012987, |
|
"grad_norm": 0.1523430022468915, |
|
"learning_rate": 5.270012410216185e-09, |
|
"loss": 0.1858, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.11217948034066874, |
|
"learning_rate": 0.0, |
|
"loss": 0.1922, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 154, |
|
"total_flos": 98079354388480.0, |
|
"train_loss": 0.32445608553561295, |
|
"train_runtime": 1604.0651, |
|
"train_samples_per_second": 0.382, |
|
"train_steps_per_second": 0.096 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 154, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 98079354388480.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|