|
{ |
|
"best_metric": 1.4315598011016846, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.07202016564638099, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00036010082823190496, |
|
"grad_norm": 0.7863615155220032, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.2713, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00036010082823190496, |
|
"eval_loss": 1.800669550895691, |
|
"eval_runtime": 350.5123, |
|
"eval_samples_per_second": 13.343, |
|
"eval_steps_per_second": 3.338, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0007202016564638099, |
|
"grad_norm": 0.915640115737915, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.2562, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0010803024846957148, |
|
"grad_norm": 0.8614652156829834, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.1652, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0014404033129276198, |
|
"grad_norm": 0.8120066523551941, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 1.1665, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0018005041411595247, |
|
"grad_norm": 0.8058522939682007, |
|
"learning_rate": 4e-05, |
|
"loss": 1.1652, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0021606049693914295, |
|
"grad_norm": 0.7672634124755859, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.1551, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0025207057976233344, |
|
"grad_norm": 0.6031802296638489, |
|
"learning_rate": 5.6e-05, |
|
"loss": 1.0732, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0028808066258552397, |
|
"grad_norm": 0.4752897322177887, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 1.1206, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0032409074540871445, |
|
"grad_norm": 0.39508315920829773, |
|
"learning_rate": 7.2e-05, |
|
"loss": 1.0632, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0036010082823190494, |
|
"grad_norm": 0.38076189160346985, |
|
"learning_rate": 8e-05, |
|
"loss": 1.2369, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.003961109110550955, |
|
"grad_norm": 0.40446725487709045, |
|
"learning_rate": 7.999453219969877e-05, |
|
"loss": 1.1194, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.004321209938782859, |
|
"grad_norm": 0.4369296729564667, |
|
"learning_rate": 7.997813029363704e-05, |
|
"loss": 1.0702, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.004681310767014764, |
|
"grad_norm": 0.41360488533973694, |
|
"learning_rate": 7.99507987659322e-05, |
|
"loss": 1.0601, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.005041411595246669, |
|
"grad_norm": 0.38158196210861206, |
|
"learning_rate": 7.991254508875098e-05, |
|
"loss": 1.0773, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.005401512423478574, |
|
"grad_norm": 0.4101528525352478, |
|
"learning_rate": 7.98633797202668e-05, |
|
"loss": 1.0604, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.005761613251710479, |
|
"grad_norm": 0.4122559130191803, |
|
"learning_rate": 7.980331610180046e-05, |
|
"loss": 1.2719, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.006121714079942384, |
|
"grad_norm": 0.34931090474128723, |
|
"learning_rate": 7.973237065414553e-05, |
|
"loss": 1.2838, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.006481814908174289, |
|
"grad_norm": 0.38246577978134155, |
|
"learning_rate": 7.965056277307902e-05, |
|
"loss": 1.1784, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0068419157364061935, |
|
"grad_norm": 0.3690750300884247, |
|
"learning_rate": 7.955791482405875e-05, |
|
"loss": 1.2598, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.007202016564638099, |
|
"grad_norm": 0.4142758548259735, |
|
"learning_rate": 7.94544521361089e-05, |
|
"loss": 1.3088, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.007562117392870004, |
|
"grad_norm": 0.43574273586273193, |
|
"learning_rate": 7.93402029948953e-05, |
|
"loss": 1.5369, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.00792221822110191, |
|
"grad_norm": 0.37755557894706726, |
|
"learning_rate": 7.921519863499239e-05, |
|
"loss": 1.2371, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.008282319049333813, |
|
"grad_norm": 0.361796110868454, |
|
"learning_rate": 7.907947323134398e-05, |
|
"loss": 1.2302, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.008642419877565718, |
|
"grad_norm": 0.416102796792984, |
|
"learning_rate": 7.893306388992023e-05, |
|
"loss": 1.412, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.009002520705797623, |
|
"grad_norm": 0.4576001763343811, |
|
"learning_rate": 7.877601063757323e-05, |
|
"loss": 1.4526, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.009362621534029529, |
|
"grad_norm": 0.4764253497123718, |
|
"learning_rate": 7.860835641109395e-05, |
|
"loss": 1.5504, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.009722722362261434, |
|
"grad_norm": 0.45336440205574036, |
|
"learning_rate": 7.843014704547393e-05, |
|
"loss": 1.5157, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.010082823190493338, |
|
"grad_norm": 0.4694438576698303, |
|
"learning_rate": 7.824143126137431e-05, |
|
"loss": 1.4271, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.010442924018725243, |
|
"grad_norm": 0.4710056185722351, |
|
"learning_rate": 7.804226065180615e-05, |
|
"loss": 1.596, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.010803024846957148, |
|
"grad_norm": 0.4954136610031128, |
|
"learning_rate": 7.783268966802539e-05, |
|
"loss": 1.6747, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.011163125675189053, |
|
"grad_norm": 0.5075744390487671, |
|
"learning_rate": 7.761277560464645e-05, |
|
"loss": 1.6001, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.011523226503420959, |
|
"grad_norm": 0.5597767233848572, |
|
"learning_rate": 7.738257858397844e-05, |
|
"loss": 1.6792, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.011883327331652862, |
|
"grad_norm": 0.5473968982696533, |
|
"learning_rate": 7.71421615395883e-05, |
|
"loss": 1.5957, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.012243428159884768, |
|
"grad_norm": 0.628345787525177, |
|
"learning_rate": 7.68915901990954e-05, |
|
"loss": 1.8923, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.012603528988116673, |
|
"grad_norm": 0.6227551698684692, |
|
"learning_rate": 7.663093306620231e-05, |
|
"loss": 1.724, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.012963629816348578, |
|
"grad_norm": 0.6094886064529419, |
|
"learning_rate": 7.636026140196651e-05, |
|
"loss": 1.888, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.013323730644580483, |
|
"grad_norm": 0.657168984413147, |
|
"learning_rate": 7.607964920531837e-05, |
|
"loss": 1.6704, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.013683831472812387, |
|
"grad_norm": 0.6255415678024292, |
|
"learning_rate": 7.578917319283055e-05, |
|
"loss": 1.8029, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.014043932301044292, |
|
"grad_norm": 0.7949183583259583, |
|
"learning_rate": 7.548891277774448e-05, |
|
"loss": 1.85, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.014404033129276198, |
|
"grad_norm": 0.7664933800697327, |
|
"learning_rate": 7.517895004825956e-05, |
|
"loss": 1.8477, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.014764133957508103, |
|
"grad_norm": 0.7252883315086365, |
|
"learning_rate": 7.48593697450911e-05, |
|
"loss": 1.9001, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.015124234785740008, |
|
"grad_norm": 0.9363314509391785, |
|
"learning_rate": 7.453025923830296e-05, |
|
"loss": 2.0366, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.015484335613971912, |
|
"grad_norm": 0.9252744317054749, |
|
"learning_rate": 7.419170850342156e-05, |
|
"loss": 2.055, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.01584443644220382, |
|
"grad_norm": 1.0645928382873535, |
|
"learning_rate": 7.384381009683742e-05, |
|
"loss": 2.0096, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.01620453727043572, |
|
"grad_norm": 1.0589935779571533, |
|
"learning_rate": 7.348665913050115e-05, |
|
"loss": 2.1188, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.016564638098667626, |
|
"grad_norm": 1.3916031122207642, |
|
"learning_rate": 7.312035324592081e-05, |
|
"loss": 2.1085, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.01692473892689953, |
|
"grad_norm": 1.385343313217163, |
|
"learning_rate": 7.274499258746771e-05, |
|
"loss": 1.9352, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.017284839755131436, |
|
"grad_norm": 1.647487998008728, |
|
"learning_rate": 7.236067977499791e-05, |
|
"loss": 2.1542, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.01764494058336334, |
|
"grad_norm": 2.208531379699707, |
|
"learning_rate": 7.196751987579699e-05, |
|
"loss": 2.1791, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.018005041411595247, |
|
"grad_norm": 3.1761276721954346, |
|
"learning_rate": 7.156562037585576e-05, |
|
"loss": 2.4123, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.018005041411595247, |
|
"eval_loss": 1.5850434303283691, |
|
"eval_runtime": 352.3186, |
|
"eval_samples_per_second": 13.275, |
|
"eval_steps_per_second": 3.321, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.018365142239827152, |
|
"grad_norm": 0.5396647453308105, |
|
"learning_rate": 7.11550911504845e-05, |
|
"loss": 1.3308, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.018725243068059057, |
|
"grad_norm": 0.5886169672012329, |
|
"learning_rate": 7.073604443427437e-05, |
|
"loss": 1.1507, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.019085343896290963, |
|
"grad_norm": 0.6277225613594055, |
|
"learning_rate": 7.03085947904134e-05, |
|
"loss": 1.2221, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.019445444724522868, |
|
"grad_norm": 0.552457869052887, |
|
"learning_rate": 6.987285907936617e-05, |
|
"loss": 1.0, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.01980554555275477, |
|
"grad_norm": 0.47208383679389954, |
|
"learning_rate": 6.942895642692527e-05, |
|
"loss": 1.1913, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.020165646380986675, |
|
"grad_norm": 0.40564197301864624, |
|
"learning_rate": 6.897700819164357e-05, |
|
"loss": 1.1485, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.02052574720921858, |
|
"grad_norm": 0.34190523624420166, |
|
"learning_rate": 6.851713793165589e-05, |
|
"loss": 1.0862, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.020885848037450486, |
|
"grad_norm": 0.31114333868026733, |
|
"learning_rate": 6.804947137089955e-05, |
|
"loss": 0.9792, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.02124594886568239, |
|
"grad_norm": 0.2748708128929138, |
|
"learning_rate": 6.757413636474263e-05, |
|
"loss": 1.1398, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.021606049693914296, |
|
"grad_norm": 0.28262221813201904, |
|
"learning_rate": 6.709126286502965e-05, |
|
"loss": 1.0137, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0219661505221462, |
|
"grad_norm": 0.3110496401786804, |
|
"learning_rate": 6.660098288455393e-05, |
|
"loss": 1.1065, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.022326251350378107, |
|
"grad_norm": 0.28911370038986206, |
|
"learning_rate": 6.610343046096674e-05, |
|
"loss": 1.0633, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.022686352178610012, |
|
"grad_norm": 0.30962398648262024, |
|
"learning_rate": 6.559874162013267e-05, |
|
"loss": 1.2423, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.023046453006841917, |
|
"grad_norm": 0.30083462595939636, |
|
"learning_rate": 6.508705433894149e-05, |
|
"loss": 1.1315, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.02340655383507382, |
|
"grad_norm": 0.3204285800457001, |
|
"learning_rate": 6.456850850758673e-05, |
|
"loss": 1.1024, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.023766654663305724, |
|
"grad_norm": 0.32156258821487427, |
|
"learning_rate": 6.404324589132101e-05, |
|
"loss": 1.1587, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.02412675549153763, |
|
"grad_norm": 0.3117810785770416, |
|
"learning_rate": 6.351141009169893e-05, |
|
"loss": 1.0853, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.024486856319769535, |
|
"grad_norm": 0.32040199637413025, |
|
"learning_rate": 6.297314650731775e-05, |
|
"loss": 1.1984, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.02484695714800144, |
|
"grad_norm": 0.3659551441669464, |
|
"learning_rate": 6.242860229406692e-05, |
|
"loss": 1.117, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.025207057976233346, |
|
"grad_norm": 0.3537808060646057, |
|
"learning_rate": 6.18779263248971e-05, |
|
"loss": 1.2022, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02556715880446525, |
|
"grad_norm": 0.33273327350616455, |
|
"learning_rate": 6.132126914911976e-05, |
|
"loss": 1.2369, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.025927259632697156, |
|
"grad_norm": 0.3848983943462372, |
|
"learning_rate": 6.075878295124861e-05, |
|
"loss": 1.6059, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.02628736046092906, |
|
"grad_norm": 0.3831624686717987, |
|
"learning_rate": 6.019062150939376e-05, |
|
"loss": 1.381, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.026647461289160967, |
|
"grad_norm": 0.3995639979839325, |
|
"learning_rate": 5.9616940153220336e-05, |
|
"loss": 1.3304, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.02700756211739287, |
|
"grad_norm": 0.439020037651062, |
|
"learning_rate": 5.903789572148295e-05, |
|
"loss": 1.4991, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.027367662945624774, |
|
"grad_norm": 0.38694366812705994, |
|
"learning_rate": 5.845364651914752e-05, |
|
"loss": 1.3432, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.02772776377385668, |
|
"grad_norm": 0.4259042739868164, |
|
"learning_rate": 5.786435227411227e-05, |
|
"loss": 1.4677, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.028087864602088584, |
|
"grad_norm": 0.42080992460250854, |
|
"learning_rate": 5.727017409353971e-05, |
|
"loss": 1.5115, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.02844796543032049, |
|
"grad_norm": 0.4072776734828949, |
|
"learning_rate": 5.667127441981162e-05, |
|
"loss": 1.4107, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.028808066258552395, |
|
"grad_norm": 0.43466779589653015, |
|
"learning_rate": 5.606781698611879e-05, |
|
"loss": 1.5465, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0291681670867843, |
|
"grad_norm": 0.4168336093425751, |
|
"learning_rate": 5.5459966771698096e-05, |
|
"loss": 1.6694, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.029528267915016206, |
|
"grad_norm": 0.5250816345214844, |
|
"learning_rate": 5.4847889956728834e-05, |
|
"loss": 1.796, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.02988836874324811, |
|
"grad_norm": 0.5515179634094238, |
|
"learning_rate": 5.423175387690067e-05, |
|
"loss": 1.6945, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.030248469571480016, |
|
"grad_norm": 0.5818299651145935, |
|
"learning_rate": 5.361172697766573e-05, |
|
"loss": 1.634, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.030608570399711918, |
|
"grad_norm": 0.5926039814949036, |
|
"learning_rate": 5.298797876818735e-05, |
|
"loss": 1.7657, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.030968671227943823, |
|
"grad_norm": 0.6447036862373352, |
|
"learning_rate": 5.23606797749979e-05, |
|
"loss": 1.8414, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.03132877205617573, |
|
"grad_norm": 0.574548602104187, |
|
"learning_rate": 5.17300014953786e-05, |
|
"loss": 1.8444, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.03168887288440764, |
|
"grad_norm": 0.645301103591919, |
|
"learning_rate": 5.109611635047379e-05, |
|
"loss": 1.7747, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.03204897371263954, |
|
"grad_norm": 0.6297435760498047, |
|
"learning_rate": 5.04591976381528e-05, |
|
"loss": 1.9292, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.03240907454087144, |
|
"grad_norm": 0.6943683624267578, |
|
"learning_rate": 4.981941948563197e-05, |
|
"loss": 1.8509, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03276917536910335, |
|
"grad_norm": 0.7677454352378845, |
|
"learning_rate": 4.9176956801870065e-05, |
|
"loss": 1.7289, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.03312927619733525, |
|
"grad_norm": 0.846775233745575, |
|
"learning_rate": 4.853198522974988e-05, |
|
"loss": 1.8011, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.03348937702556716, |
|
"grad_norm": 0.9709502458572388, |
|
"learning_rate": 4.788468109805921e-05, |
|
"loss": 2.0327, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.03384947785379906, |
|
"grad_norm": 0.9475632309913635, |
|
"learning_rate": 4.7235221373284407e-05, |
|
"loss": 1.8737, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.03420957868203097, |
|
"grad_norm": 0.979775607585907, |
|
"learning_rate": 4.658378361122936e-05, |
|
"loss": 1.7808, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.03456967951026287, |
|
"grad_norm": 1.1734482049942017, |
|
"learning_rate": 4.593054590847368e-05, |
|
"loss": 1.9409, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.03492978033849478, |
|
"grad_norm": 1.2772958278656006, |
|
"learning_rate": 4.5275686853682765e-05, |
|
"loss": 1.9487, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.03528988116672668, |
|
"grad_norm": 1.4188159704208374, |
|
"learning_rate": 4.4619385478783456e-05, |
|
"loss": 1.9205, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.035649981994958585, |
|
"grad_norm": 1.8587150573730469, |
|
"learning_rate": 4.396182121001852e-05, |
|
"loss": 2.1834, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.036010082823190494, |
|
"grad_norm": 3.5931663513183594, |
|
"learning_rate": 4.33031738188933e-05, |
|
"loss": 2.2956, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.036010082823190494, |
|
"eval_loss": 1.4850772619247437, |
|
"eval_runtime": 353.549, |
|
"eval_samples_per_second": 13.229, |
|
"eval_steps_per_second": 3.309, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.036370183651422396, |
|
"grad_norm": 0.31662315130233765, |
|
"learning_rate": 4.264362337302798e-05, |
|
"loss": 1.0832, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.036730284479654304, |
|
"grad_norm": 0.35080909729003906, |
|
"learning_rate": 4.1983350186928894e-05, |
|
"loss": 0.964, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.037090385307886206, |
|
"grad_norm": 0.36373916268348694, |
|
"learning_rate": 4.132253477269233e-05, |
|
"loss": 0.9958, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.037450486136118115, |
|
"grad_norm": 0.4141411781311035, |
|
"learning_rate": 4.0661357790654345e-05, |
|
"loss": 1.0403, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.03781058696435002, |
|
"grad_norm": 0.42372411489486694, |
|
"learning_rate": 4e-05, |
|
"loss": 1.0866, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.038170687792581925, |
|
"grad_norm": 0.48200201988220215, |
|
"learning_rate": 3.933864220934566e-05, |
|
"loss": 1.0786, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.03853078862081383, |
|
"grad_norm": 0.4316590130329132, |
|
"learning_rate": 3.8677465227307676e-05, |
|
"loss": 0.9489, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.038890889449045736, |
|
"grad_norm": 0.40134397149086, |
|
"learning_rate": 3.8016649813071106e-05, |
|
"loss": 1.0985, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.03925099027727764, |
|
"grad_norm": 0.43675699830055237, |
|
"learning_rate": 3.735637662697203e-05, |
|
"loss": 1.0101, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.03961109110550954, |
|
"grad_norm": 0.38096266984939575, |
|
"learning_rate": 3.669682618110671e-05, |
|
"loss": 1.0738, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.03997119193374145, |
|
"grad_norm": 0.31821751594543457, |
|
"learning_rate": 3.6038178789981494e-05, |
|
"loss": 1.0365, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.04033129276197335, |
|
"grad_norm": 0.35773828625679016, |
|
"learning_rate": 3.538061452121656e-05, |
|
"loss": 0.9939, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.04069139359020526, |
|
"grad_norm": 0.33209124207496643, |
|
"learning_rate": 3.472431314631724e-05, |
|
"loss": 0.979, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.04105149441843716, |
|
"grad_norm": 0.3005915582180023, |
|
"learning_rate": 3.406945409152632e-05, |
|
"loss": 1.0878, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.04141159524666907, |
|
"grad_norm": 0.2932342290878296, |
|
"learning_rate": 3.341621638877064e-05, |
|
"loss": 1.0154, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.04177169607490097, |
|
"grad_norm": 0.3328295052051544, |
|
"learning_rate": 3.276477862671562e-05, |
|
"loss": 1.156, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.04213179690313288, |
|
"grad_norm": 0.34108060598373413, |
|
"learning_rate": 3.21153189019408e-05, |
|
"loss": 1.3088, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.04249189773136478, |
|
"grad_norm": 0.319013774394989, |
|
"learning_rate": 3.146801477025013e-05, |
|
"loss": 1.3014, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.042851998559596684, |
|
"grad_norm": 0.3462431728839874, |
|
"learning_rate": 3.082304319812994e-05, |
|
"loss": 1.3673, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.04321209938782859, |
|
"grad_norm": 0.34266746044158936, |
|
"learning_rate": 3.0180580514368037e-05, |
|
"loss": 1.4107, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.043572200216060494, |
|
"grad_norm": 0.31460511684417725, |
|
"learning_rate": 2.9540802361847212e-05, |
|
"loss": 1.1764, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.0439323010442924, |
|
"grad_norm": 0.31650468707084656, |
|
"learning_rate": 2.890388364952623e-05, |
|
"loss": 1.1812, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.044292401872524305, |
|
"grad_norm": 0.34145647287368774, |
|
"learning_rate": 2.8269998504621416e-05, |
|
"loss": 1.368, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.044652502700756214, |
|
"grad_norm": 0.3476177155971527, |
|
"learning_rate": 2.7639320225002108e-05, |
|
"loss": 1.188, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.045012603528988115, |
|
"grad_norm": 0.4023088812828064, |
|
"learning_rate": 2.7012021231812666e-05, |
|
"loss": 1.5084, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.045372704357220024, |
|
"grad_norm": 0.39794808626174927, |
|
"learning_rate": 2.638827302233428e-05, |
|
"loss": 1.5975, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.045732805185451926, |
|
"grad_norm": 0.46040594577789307, |
|
"learning_rate": 2.576824612309934e-05, |
|
"loss": 1.5714, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.046092906013683835, |
|
"grad_norm": 0.4382641017436981, |
|
"learning_rate": 2.5152110043271166e-05, |
|
"loss": 1.575, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.04645300684191574, |
|
"grad_norm": 0.4103611409664154, |
|
"learning_rate": 2.454003322830192e-05, |
|
"loss": 1.5312, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.04681310767014764, |
|
"grad_norm": 0.443014919757843, |
|
"learning_rate": 2.393218301388123e-05, |
|
"loss": 1.3191, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.04717320849837955, |
|
"grad_norm": 0.4959178864955902, |
|
"learning_rate": 2.3328725580188395e-05, |
|
"loss": 1.5888, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.04753330932661145, |
|
"grad_norm": 0.5022261738777161, |
|
"learning_rate": 2.272982590646029e-05, |
|
"loss": 1.5632, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.04789341015484336, |
|
"grad_norm": 0.45459896326065063, |
|
"learning_rate": 2.2135647725887744e-05, |
|
"loss": 1.5, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.04825351098307526, |
|
"grad_norm": 0.504411518573761, |
|
"learning_rate": 2.1546353480852495e-05, |
|
"loss": 1.6626, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.04861361181130717, |
|
"grad_norm": 0.5704218149185181, |
|
"learning_rate": 2.096210427851706e-05, |
|
"loss": 1.9689, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.04897371263953907, |
|
"grad_norm": 0.5692024230957031, |
|
"learning_rate": 2.038305984677969e-05, |
|
"loss": 1.5937, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.04933381346777098, |
|
"grad_norm": 0.560541033744812, |
|
"learning_rate": 1.9809378490606264e-05, |
|
"loss": 1.7188, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.04969391429600288, |
|
"grad_norm": 0.5904568433761597, |
|
"learning_rate": 1.9241217048751406e-05, |
|
"loss": 1.6569, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.05005401512423478, |
|
"grad_norm": 0.6273323893547058, |
|
"learning_rate": 1.867873085088026e-05, |
|
"loss": 1.9395, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.05041411595246669, |
|
"grad_norm": 0.7930026650428772, |
|
"learning_rate": 1.8122073675102935e-05, |
|
"loss": 1.866, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05077421678069859, |
|
"grad_norm": 0.7339215278625488, |
|
"learning_rate": 1.75713977059331e-05, |
|
"loss": 1.7738, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.0511343176089305, |
|
"grad_norm": 0.7646173238754272, |
|
"learning_rate": 1.702685349268226e-05, |
|
"loss": 1.8332, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.051494418437162404, |
|
"grad_norm": 0.8946234583854675, |
|
"learning_rate": 1.648858990830108e-05, |
|
"loss": 1.9375, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.05185451926539431, |
|
"grad_norm": 1.015981912612915, |
|
"learning_rate": 1.5956754108678996e-05, |
|
"loss": 2.0351, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.052214620093626214, |
|
"grad_norm": 1.0599825382232666, |
|
"learning_rate": 1.5431491492413288e-05, |
|
"loss": 1.959, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.05257472092185812, |
|
"grad_norm": 1.3140443563461304, |
|
"learning_rate": 1.491294566105852e-05, |
|
"loss": 1.9238, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.052934821750090025, |
|
"grad_norm": 1.3724371194839478, |
|
"learning_rate": 1.4401258379867335e-05, |
|
"loss": 1.8381, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.053294922578321934, |
|
"grad_norm": 1.6155120134353638, |
|
"learning_rate": 1.3896569539033253e-05, |
|
"loss": 1.9153, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.053655023406553835, |
|
"grad_norm": 1.8847979307174683, |
|
"learning_rate": 1.3399017115446067e-05, |
|
"loss": 2.0038, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.05401512423478574, |
|
"grad_norm": 2.680107831954956, |
|
"learning_rate": 1.2908737134970367e-05, |
|
"loss": 2.086, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05401512423478574, |
|
"eval_loss": 1.4379560947418213, |
|
"eval_runtime": 352.6306, |
|
"eval_samples_per_second": 13.263, |
|
"eval_steps_per_second": 3.318, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.054375225063017646, |
|
"grad_norm": 0.17845895886421204, |
|
"learning_rate": 1.242586363525737e-05, |
|
"loss": 1.2068, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.05473532589124955, |
|
"grad_norm": 0.182268425822258, |
|
"learning_rate": 1.1950528629100457e-05, |
|
"loss": 0.9337, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.055095426719481456, |
|
"grad_norm": 0.2000552862882614, |
|
"learning_rate": 1.1482862068344121e-05, |
|
"loss": 1.063, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.05545552754771336, |
|
"grad_norm": 0.24774210155010223, |
|
"learning_rate": 1.1022991808356442e-05, |
|
"loss": 0.9987, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.05581562837594527, |
|
"grad_norm": 0.24467074871063232, |
|
"learning_rate": 1.0571043573074737e-05, |
|
"loss": 1.0237, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.05617572920417717, |
|
"grad_norm": 0.26388347148895264, |
|
"learning_rate": 1.0127140920633857e-05, |
|
"loss": 1.094, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.05653583003240908, |
|
"grad_norm": 0.26566648483276367, |
|
"learning_rate": 9.69140520958662e-06, |
|
"loss": 0.9359, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.05689593086064098, |
|
"grad_norm": 0.2803097665309906, |
|
"learning_rate": 9.263955565725648e-06, |
|
"loss": 0.9668, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.05725603168887288, |
|
"grad_norm": 0.3015349805355072, |
|
"learning_rate": 8.844908849515509e-06, |
|
"loss": 0.9699, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.05761613251710479, |
|
"grad_norm": 0.32080352306365967, |
|
"learning_rate": 8.434379624144261e-06, |
|
"loss": 1.0425, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.05797623334533669, |
|
"grad_norm": 0.3093397617340088, |
|
"learning_rate": 8.032480124203013e-06, |
|
"loss": 1.0277, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.0583363341735686, |
|
"grad_norm": 0.35267412662506104, |
|
"learning_rate": 7.639320225002106e-06, |
|
"loss": 1.0362, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.0586964350018005, |
|
"grad_norm": 0.3147299885749817, |
|
"learning_rate": 7.255007412532307e-06, |
|
"loss": 1.2137, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.05905653583003241, |
|
"grad_norm": 0.3240807354450226, |
|
"learning_rate": 6.8796467540791986e-06, |
|
"loss": 1.1423, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.05941663665826431, |
|
"grad_norm": 0.34494078159332275, |
|
"learning_rate": 6.513340869498859e-06, |
|
"loss": 1.0648, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.05977673748649622, |
|
"grad_norm": 0.344433456659317, |
|
"learning_rate": 6.1561899031625794e-06, |
|
"loss": 1.1728, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.060136838314728124, |
|
"grad_norm": 0.3598881959915161, |
|
"learning_rate": 5.808291496578435e-06, |
|
"loss": 1.243, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.06049693914296003, |
|
"grad_norm": 0.32712891697883606, |
|
"learning_rate": 5.469740761697044e-06, |
|
"loss": 1.1395, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.060857039971191934, |
|
"grad_norm": 0.3846420347690582, |
|
"learning_rate": 5.140630254908905e-06, |
|
"loss": 1.228, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.061217140799423836, |
|
"grad_norm": 0.38621753454208374, |
|
"learning_rate": 4.821049951740442e-06, |
|
"loss": 1.2447, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.061577241627655745, |
|
"grad_norm": 0.3678813874721527, |
|
"learning_rate": 4.511087222255528e-06, |
|
"loss": 1.4654, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.061937342455887646, |
|
"grad_norm": 0.42819997668266296, |
|
"learning_rate": 4.2108268071694616e-06, |
|
"loss": 1.346, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.062297443284119555, |
|
"grad_norm": 0.4565679430961609, |
|
"learning_rate": 3.9203507946816445e-06, |
|
"loss": 1.5151, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.06265754411235146, |
|
"grad_norm": 0.45869430899620056, |
|
"learning_rate": 3.6397385980335e-06, |
|
"loss": 1.5592, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.06301764494058336, |
|
"grad_norm": 0.4956825375556946, |
|
"learning_rate": 3.3690669337977e-06, |
|
"loss": 1.492, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.06337774576881527, |
|
"grad_norm": 0.485346257686615, |
|
"learning_rate": 3.1084098009046106e-06, |
|
"loss": 1.3435, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.06373784659704718, |
|
"grad_norm": 0.48392120003700256, |
|
"learning_rate": 2.8578384604117217e-06, |
|
"loss": 1.2057, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.06409794742527908, |
|
"grad_norm": 0.549443781375885, |
|
"learning_rate": 2.6174214160215704e-06, |
|
"loss": 1.5785, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.06445804825351098, |
|
"grad_norm": 0.6267411112785339, |
|
"learning_rate": 2.3872243953535535e-06, |
|
"loss": 1.4255, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.06481814908174288, |
|
"grad_norm": 0.6748881936073303, |
|
"learning_rate": 2.1673103319746146e-06, |
|
"loss": 1.5805, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.0651782499099748, |
|
"grad_norm": 0.5772015452384949, |
|
"learning_rate": 1.957739348193859e-06, |
|
"loss": 1.5119, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.0655383507382067, |
|
"grad_norm": 0.6203552484512329, |
|
"learning_rate": 1.7585687386256944e-06, |
|
"loss": 1.8784, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.0658984515664386, |
|
"grad_norm": 0.6191961169242859, |
|
"learning_rate": 1.5698529545260744e-06, |
|
"loss": 1.6006, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.0662585523946705, |
|
"grad_norm": 0.6806838512420654, |
|
"learning_rate": 1.3916435889060575e-06, |
|
"loss": 1.8019, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.06661865322290242, |
|
"grad_norm": 0.6513646841049194, |
|
"learning_rate": 1.2239893624267852e-06, |
|
"loss": 1.7764, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.06697875405113432, |
|
"grad_norm": 0.7780475616455078, |
|
"learning_rate": 1.0669361100797704e-06, |
|
"loss": 1.6974, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.06733885487936622, |
|
"grad_norm": 0.7961820363998413, |
|
"learning_rate": 9.205267686560293e-07, |
|
"loss": 1.7828, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.06769895570759812, |
|
"grad_norm": 0.8191195726394653, |
|
"learning_rate": 7.848013650076258e-07, |
|
"loss": 1.6961, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.06805905653583003, |
|
"grad_norm": 0.751133143901825, |
|
"learning_rate": 6.597970051047053e-07, |
|
"loss": 1.7882, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.06841915736406194, |
|
"grad_norm": 1.0630866289138794, |
|
"learning_rate": 5.455478638911071e-07, |
|
"loss": 1.9139, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.06877925819229384, |
|
"grad_norm": 0.9345506429672241, |
|
"learning_rate": 4.420851759412603e-07, |
|
"loss": 2.0675, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.06913935902052575, |
|
"grad_norm": 1.0234652757644653, |
|
"learning_rate": 3.4943722692099224e-07, |
|
"loss": 1.7824, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.06949945984875765, |
|
"grad_norm": 1.1317789554595947, |
|
"learning_rate": 2.676293458544743e-07, |
|
"loss": 2.1633, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.06985956067698956, |
|
"grad_norm": 1.1043999195098877, |
|
"learning_rate": 1.9668389819954338e-07, |
|
"loss": 1.6105, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.07021966150522146, |
|
"grad_norm": 0.9714563488960266, |
|
"learning_rate": 1.3662027973320614e-07, |
|
"loss": 1.9675, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.07057976233345337, |
|
"grad_norm": 1.1052969694137573, |
|
"learning_rate": 8.745491124901861e-08, |
|
"loss": 1.7856, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.07093986316168527, |
|
"grad_norm": 1.3346896171569824, |
|
"learning_rate": 4.920123406781052e-08, |
|
"loss": 2.0093, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.07129996398991717, |
|
"grad_norm": 1.2368483543395996, |
|
"learning_rate": 2.1869706362958044e-08, |
|
"loss": 1.7745, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.07166006481814909, |
|
"grad_norm": 1.3380067348480225, |
|
"learning_rate": 5.467800301239834e-09, |
|
"loss": 1.7581, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.07202016564638099, |
|
"grad_norm": 1.7450910806655884, |
|
"learning_rate": 0.0, |
|
"loss": 1.7731, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07202016564638099, |
|
"eval_loss": 1.4315598011016846, |
|
"eval_runtime": 352.6531, |
|
"eval_samples_per_second": 13.262, |
|
"eval_steps_per_second": 3.318, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 4, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.7075605394200986e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|