|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.01655073207492277, |
|
"eval_steps": 500, |
|
"global_step": 256, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 6.465129716766708e-05, |
|
"grad_norm": 52.735591888427734, |
|
"learning_rate": 6.451612903225807e-08, |
|
"loss": 1.9276, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00012930259433533415, |
|
"grad_norm": 54.205406188964844, |
|
"learning_rate": 1.2903225806451614e-07, |
|
"loss": 1.8498, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00019395389150300124, |
|
"grad_norm": 55.422950744628906, |
|
"learning_rate": 1.935483870967742e-07, |
|
"loss": 2.0204, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0002586051886706683, |
|
"grad_norm": 52.57908248901367, |
|
"learning_rate": 2.580645161290323e-07, |
|
"loss": 1.9851, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0003232564858383354, |
|
"grad_norm": 53.102073669433594, |
|
"learning_rate": 3.2258064516129035e-07, |
|
"loss": 1.9575, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0003879077830060025, |
|
"grad_norm": 39.96001052856445, |
|
"learning_rate": 3.870967741935484e-07, |
|
"loss": 1.9575, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.00045255908017366957, |
|
"grad_norm": 32.277435302734375, |
|
"learning_rate": 4.5161290322580644e-07, |
|
"loss": 1.8748, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0005172103773413366, |
|
"grad_norm": 35.69306945800781, |
|
"learning_rate": 5.161290322580646e-07, |
|
"loss": 1.8319, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0005818616745090037, |
|
"grad_norm": 32.267822265625, |
|
"learning_rate": 5.806451612903227e-07, |
|
"loss": 1.8492, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0006465129716766708, |
|
"grad_norm": 15.390063285827637, |
|
"learning_rate": 6.451612903225807e-07, |
|
"loss": 1.8046, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0007111642688443379, |
|
"grad_norm": 25.763904571533203, |
|
"learning_rate": 7.096774193548388e-07, |
|
"loss": 1.7649, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.000775815566012005, |
|
"grad_norm": 29.1771297454834, |
|
"learning_rate": 7.741935483870968e-07, |
|
"loss": 1.7684, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.000840466863179672, |
|
"grad_norm": 19.140987396240234, |
|
"learning_rate": 8.38709677419355e-07, |
|
"loss": 1.7782, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0009051181603473391, |
|
"grad_norm": 13.840346336364746, |
|
"learning_rate": 9.032258064516129e-07, |
|
"loss": 1.8303, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0009697694575150061, |
|
"grad_norm": 26.080778121948242, |
|
"learning_rate": 9.67741935483871e-07, |
|
"loss": 1.776, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0010344207546826732, |
|
"grad_norm": 34.16382598876953, |
|
"learning_rate": 1.0322580645161291e-06, |
|
"loss": 1.6359, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0010990720518503404, |
|
"grad_norm": 25.75543785095215, |
|
"learning_rate": 1.0967741935483872e-06, |
|
"loss": 1.6014, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0011637233490180074, |
|
"grad_norm": 11.885198593139648, |
|
"learning_rate": 1.1612903225806454e-06, |
|
"loss": 1.6831, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0012283746461856746, |
|
"grad_norm": 14.924345016479492, |
|
"learning_rate": 1.2258064516129033e-06, |
|
"loss": 1.7223, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0012930259433533416, |
|
"grad_norm": 18.01887321472168, |
|
"learning_rate": 1.2903225806451614e-06, |
|
"loss": 1.6811, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0013576772405210086, |
|
"grad_norm": 12.34964656829834, |
|
"learning_rate": 1.3548387096774195e-06, |
|
"loss": 1.6169, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0014223285376886757, |
|
"grad_norm": 11.162965774536133, |
|
"learning_rate": 1.4193548387096776e-06, |
|
"loss": 1.6814, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0014869798348563427, |
|
"grad_norm": 8.694445610046387, |
|
"learning_rate": 1.4838709677419356e-06, |
|
"loss": 1.5999, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.00155163113202401, |
|
"grad_norm": 9.510068893432617, |
|
"learning_rate": 1.5483870967741937e-06, |
|
"loss": 1.6327, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.001616282429191677, |
|
"grad_norm": 9.788911819458008, |
|
"learning_rate": 1.6129032258064516e-06, |
|
"loss": 1.6985, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.001680933726359344, |
|
"grad_norm": 8.04429817199707, |
|
"learning_rate": 1.67741935483871e-06, |
|
"loss": 1.6618, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.001745585023527011, |
|
"grad_norm": 7.545624256134033, |
|
"learning_rate": 1.7419354838709678e-06, |
|
"loss": 1.5472, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0018102363206946783, |
|
"grad_norm": 7.069305419921875, |
|
"learning_rate": 1.8064516129032258e-06, |
|
"loss": 1.643, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0018748876178623453, |
|
"grad_norm": 9.195314407348633, |
|
"learning_rate": 1.870967741935484e-06, |
|
"loss": 1.5956, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0019395389150300122, |
|
"grad_norm": 6.884894371032715, |
|
"learning_rate": 1.935483870967742e-06, |
|
"loss": 1.5752, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0020041902121976794, |
|
"grad_norm": 7.165380477905273, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.5812, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0020688415093653464, |
|
"grad_norm": 6.563286781311035, |
|
"learning_rate": 2.0645161290322582e-06, |
|
"loss": 1.5938, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0021334928065330134, |
|
"grad_norm": 5.998967170715332, |
|
"learning_rate": 2.129032258064516e-06, |
|
"loss": 1.5716, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.002198144103700681, |
|
"grad_norm": 6.407791614532471, |
|
"learning_rate": 2.1935483870967745e-06, |
|
"loss": 1.5848, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.002262795400868348, |
|
"grad_norm": 6.110191822052002, |
|
"learning_rate": 2.2580645161290324e-06, |
|
"loss": 1.643, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0023274466980360148, |
|
"grad_norm": 6.058048248291016, |
|
"learning_rate": 2.3225806451612907e-06, |
|
"loss": 1.5242, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0023920979952036818, |
|
"grad_norm": 7.348267078399658, |
|
"learning_rate": 2.3870967741935486e-06, |
|
"loss": 1.5112, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.002456749292371349, |
|
"grad_norm": 6.676770210266113, |
|
"learning_rate": 2.4516129032258066e-06, |
|
"loss": 1.5184, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.002521400589539016, |
|
"grad_norm": 9.655611038208008, |
|
"learning_rate": 2.5161290322580645e-06, |
|
"loss": 1.4867, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.002586051886706683, |
|
"grad_norm": 10.357527732849121, |
|
"learning_rate": 2.580645161290323e-06, |
|
"loss": 1.5255, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.00265070318387435, |
|
"grad_norm": 6.523240566253662, |
|
"learning_rate": 2.645161290322581e-06, |
|
"loss": 1.5326, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.002715354481042017, |
|
"grad_norm": 5.7459282875061035, |
|
"learning_rate": 2.709677419354839e-06, |
|
"loss": 1.5533, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0027800057782096845, |
|
"grad_norm": 5.776258945465088, |
|
"learning_rate": 2.774193548387097e-06, |
|
"loss": 1.5261, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0028446570753773515, |
|
"grad_norm": 7.176516056060791, |
|
"learning_rate": 2.8387096774193553e-06, |
|
"loss": 1.4434, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.0029093083725450185, |
|
"grad_norm": 6.083931922912598, |
|
"learning_rate": 2.903225806451613e-06, |
|
"loss": 1.5127, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0029739596697126855, |
|
"grad_norm": 8.212278366088867, |
|
"learning_rate": 2.967741935483871e-06, |
|
"loss": 1.569, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.003038610966880353, |
|
"grad_norm": 6.138173580169678, |
|
"learning_rate": 3.0322580645161295e-06, |
|
"loss": 1.4985, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.00310326226404802, |
|
"grad_norm": 9.2051362991333, |
|
"learning_rate": 3.0967741935483874e-06, |
|
"loss": 1.4528, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.003167913561215687, |
|
"grad_norm": 9.6658353805542, |
|
"learning_rate": 3.1612903225806453e-06, |
|
"loss": 1.4432, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.003232564858383354, |
|
"grad_norm": 6.775389671325684, |
|
"learning_rate": 3.225806451612903e-06, |
|
"loss": 1.4649, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.003297216155551021, |
|
"grad_norm": 11.512835502624512, |
|
"learning_rate": 3.2903225806451615e-06, |
|
"loss": 1.4106, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.003361867452718688, |
|
"grad_norm": 5.44810676574707, |
|
"learning_rate": 3.35483870967742e-06, |
|
"loss": 1.4714, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.003426518749886355, |
|
"grad_norm": 6.4585185050964355, |
|
"learning_rate": 3.4193548387096773e-06, |
|
"loss": 1.5064, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.003491170047054022, |
|
"grad_norm": 8.523478507995605, |
|
"learning_rate": 3.4838709677419357e-06, |
|
"loss": 1.3392, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.003555821344221689, |
|
"grad_norm": 5.496897220611572, |
|
"learning_rate": 3.548387096774194e-06, |
|
"loss": 1.5616, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0036204726413893566, |
|
"grad_norm": 8.096810340881348, |
|
"learning_rate": 3.6129032258064515e-06, |
|
"loss": 1.4941, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0036851239385570236, |
|
"grad_norm": 6.6727705001831055, |
|
"learning_rate": 3.67741935483871e-06, |
|
"loss": 1.5466, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.0037497752357246905, |
|
"grad_norm": 7.022820949554443, |
|
"learning_rate": 3.741935483870968e-06, |
|
"loss": 1.4893, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.0038144265328923575, |
|
"grad_norm": 6.383000373840332, |
|
"learning_rate": 3.8064516129032257e-06, |
|
"loss": 1.4271, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.0038790778300600245, |
|
"grad_norm": 5.412594318389893, |
|
"learning_rate": 3.870967741935484e-06, |
|
"loss": 1.5204, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.003943729127227692, |
|
"grad_norm": 6.230757713317871, |
|
"learning_rate": 3.935483870967742e-06, |
|
"loss": 1.4508, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.004008380424395359, |
|
"grad_norm": 7.456300258636475, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.4439, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.004073031721563026, |
|
"grad_norm": 5.5062432289123535, |
|
"learning_rate": 4.064516129032259e-06, |
|
"loss": 1.4589, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.004137683018730693, |
|
"grad_norm": 6.719184398651123, |
|
"learning_rate": 4.1290322580645165e-06, |
|
"loss": 1.4779, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.00420233431589836, |
|
"grad_norm": 5.637788772583008, |
|
"learning_rate": 4.193548387096774e-06, |
|
"loss": 1.6323, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.004266985613066027, |
|
"grad_norm": 5.063558578491211, |
|
"learning_rate": 4.258064516129032e-06, |
|
"loss": 1.5131, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.004331636910233695, |
|
"grad_norm": 6.299156188964844, |
|
"learning_rate": 4.32258064516129e-06, |
|
"loss": 1.4999, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.004396288207401362, |
|
"grad_norm": 5.770033836364746, |
|
"learning_rate": 4.387096774193549e-06, |
|
"loss": 1.3813, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.004460939504569029, |
|
"grad_norm": 7.366373062133789, |
|
"learning_rate": 4.451612903225807e-06, |
|
"loss": 1.5073, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.004525590801736696, |
|
"grad_norm": 5.583267688751221, |
|
"learning_rate": 4.516129032258065e-06, |
|
"loss": 1.4769, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.004590242098904363, |
|
"grad_norm": 9.454716682434082, |
|
"learning_rate": 4.580645161290323e-06, |
|
"loss": 1.5203, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.0046548933960720296, |
|
"grad_norm": 6.360778331756592, |
|
"learning_rate": 4.6451612903225815e-06, |
|
"loss": 1.4848, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.0047195446932396965, |
|
"grad_norm": 4.729072093963623, |
|
"learning_rate": 4.7096774193548385e-06, |
|
"loss": 1.4475, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.0047841959904073635, |
|
"grad_norm": 6.975382328033447, |
|
"learning_rate": 4.774193548387097e-06, |
|
"loss": 1.5431, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.0048488472875750305, |
|
"grad_norm": 6.034017562866211, |
|
"learning_rate": 4.838709677419355e-06, |
|
"loss": 1.4028, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.004913498584742698, |
|
"grad_norm": 5.913821697235107, |
|
"learning_rate": 4.903225806451613e-06, |
|
"loss": 1.4364, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.004978149881910365, |
|
"grad_norm": 9.405481338500977, |
|
"learning_rate": 4.967741935483871e-06, |
|
"loss": 1.5207, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.005042801179078032, |
|
"grad_norm": 6.983378887176514, |
|
"learning_rate": 5.032258064516129e-06, |
|
"loss": 1.4487, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.005107452476245699, |
|
"grad_norm": 5.04365348815918, |
|
"learning_rate": 5.096774193548387e-06, |
|
"loss": 1.4282, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.005172103773413366, |
|
"grad_norm": 7.815653324127197, |
|
"learning_rate": 5.161290322580646e-06, |
|
"loss": 1.3231, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.005236755070581033, |
|
"grad_norm": 6.0287041664123535, |
|
"learning_rate": 5.2258064516129035e-06, |
|
"loss": 1.4106, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.0053014063677487, |
|
"grad_norm": 5.727312088012695, |
|
"learning_rate": 5.290322580645162e-06, |
|
"loss": 1.52, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.005366057664916367, |
|
"grad_norm": 4.75112771987915, |
|
"learning_rate": 5.35483870967742e-06, |
|
"loss": 1.434, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.005430708962084034, |
|
"grad_norm": 5.614027500152588, |
|
"learning_rate": 5.419354838709678e-06, |
|
"loss": 1.4501, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.005495360259251702, |
|
"grad_norm": 6.246868133544922, |
|
"learning_rate": 5.483870967741935e-06, |
|
"loss": 1.4065, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.005560011556419369, |
|
"grad_norm": 4.8930559158325195, |
|
"learning_rate": 5.548387096774194e-06, |
|
"loss": 1.4059, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.005624662853587036, |
|
"grad_norm": 9.081551551818848, |
|
"learning_rate": 5.612903225806452e-06, |
|
"loss": 1.4045, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.005689314150754703, |
|
"grad_norm": 6.593941688537598, |
|
"learning_rate": 5.677419354838711e-06, |
|
"loss": 1.4229, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.00575396544792237, |
|
"grad_norm": 4.863624095916748, |
|
"learning_rate": 5.7419354838709685e-06, |
|
"loss": 1.4073, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.005818616745090037, |
|
"grad_norm": 5.167389392852783, |
|
"learning_rate": 5.806451612903226e-06, |
|
"loss": 1.5046, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.005883268042257704, |
|
"grad_norm": 4.816722869873047, |
|
"learning_rate": 5.8709677419354835e-06, |
|
"loss": 1.4358, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.005947919339425371, |
|
"grad_norm": 5.505555629730225, |
|
"learning_rate": 5.935483870967742e-06, |
|
"loss": 1.5727, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.006012570636593038, |
|
"grad_norm": 5.764698505401611, |
|
"learning_rate": 6e-06, |
|
"loss": 1.4799, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.006077221933760706, |
|
"grad_norm": 5.312406063079834, |
|
"learning_rate": 6.064516129032259e-06, |
|
"loss": 1.5233, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.006141873230928373, |
|
"grad_norm": 6.0215253829956055, |
|
"learning_rate": 6.129032258064517e-06, |
|
"loss": 1.3764, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.00620652452809604, |
|
"grad_norm": 6.582176208496094, |
|
"learning_rate": 6.193548387096775e-06, |
|
"loss": 1.3687, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.006271175825263707, |
|
"grad_norm": 4.946031093597412, |
|
"learning_rate": 6.2580645161290335e-06, |
|
"loss": 1.4276, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.006335827122431374, |
|
"grad_norm": 5.586654186248779, |
|
"learning_rate": 6.3225806451612906e-06, |
|
"loss": 1.4366, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.006400478419599041, |
|
"grad_norm": 6.844956874847412, |
|
"learning_rate": 6.3870967741935485e-06, |
|
"loss": 1.3953, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.006465129716766708, |
|
"grad_norm": 5.709033012390137, |
|
"learning_rate": 6.451612903225806e-06, |
|
"loss": 1.441, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.006529781013934375, |
|
"grad_norm": 5.266170501708984, |
|
"learning_rate": 6.516129032258065e-06, |
|
"loss": 1.5336, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.006594432311102042, |
|
"grad_norm": 5.025949954986572, |
|
"learning_rate": 6.580645161290323e-06, |
|
"loss": 1.3602, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.0066590836082697094, |
|
"grad_norm": 6.4935126304626465, |
|
"learning_rate": 6.645161290322582e-06, |
|
"loss": 1.4612, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.006723734905437376, |
|
"grad_norm": 5.3272881507873535, |
|
"learning_rate": 6.70967741935484e-06, |
|
"loss": 1.4056, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.006788386202605043, |
|
"grad_norm": 5.2499213218688965, |
|
"learning_rate": 6.774193548387097e-06, |
|
"loss": 1.4908, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.00685303749977271, |
|
"grad_norm": 5.568053722381592, |
|
"learning_rate": 6.838709677419355e-06, |
|
"loss": 1.3711, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.006917688796940377, |
|
"grad_norm": 5.129876613616943, |
|
"learning_rate": 6.9032258064516135e-06, |
|
"loss": 1.3819, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.006982340094108044, |
|
"grad_norm": 6.102237224578857, |
|
"learning_rate": 6.967741935483871e-06, |
|
"loss": 1.4534, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.007046991391275711, |
|
"grad_norm": 6.690826892852783, |
|
"learning_rate": 7.03225806451613e-06, |
|
"loss": 1.3944, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.007111642688443378, |
|
"grad_norm": 4.548076629638672, |
|
"learning_rate": 7.096774193548388e-06, |
|
"loss": 1.5244, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.007176293985611045, |
|
"grad_norm": 4.901518821716309, |
|
"learning_rate": 7.161290322580646e-06, |
|
"loss": 1.4677, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.007240945282778713, |
|
"grad_norm": 5.2007155418396, |
|
"learning_rate": 7.225806451612903e-06, |
|
"loss": 1.4194, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.00730559657994638, |
|
"grad_norm": 5.50141716003418, |
|
"learning_rate": 7.290322580645162e-06, |
|
"loss": 1.4837, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.007370247877114047, |
|
"grad_norm": 5.740527153015137, |
|
"learning_rate": 7.35483870967742e-06, |
|
"loss": 1.5181, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.007434899174281714, |
|
"grad_norm": 6.510746002197266, |
|
"learning_rate": 7.4193548387096784e-06, |
|
"loss": 1.3614, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.007499550471449381, |
|
"grad_norm": 6.607003211975098, |
|
"learning_rate": 7.483870967741936e-06, |
|
"loss": 1.4396, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.007564201768617048, |
|
"grad_norm": 6.828821182250977, |
|
"learning_rate": 7.548387096774194e-06, |
|
"loss": 1.4888, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.007628853065784715, |
|
"grad_norm": 5.51243782043457, |
|
"learning_rate": 7.612903225806451e-06, |
|
"loss": 1.4666, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.007693504362952382, |
|
"grad_norm": 5.797337532043457, |
|
"learning_rate": 7.67741935483871e-06, |
|
"loss": 1.3761, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.007758155660120049, |
|
"grad_norm": 5.460038185119629, |
|
"learning_rate": 7.741935483870968e-06, |
|
"loss": 1.4361, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.007822806957287716, |
|
"grad_norm": 5.366038799285889, |
|
"learning_rate": 7.806451612903227e-06, |
|
"loss": 1.4087, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.007887458254455384, |
|
"grad_norm": 6.333535194396973, |
|
"learning_rate": 7.870967741935484e-06, |
|
"loss": 1.4527, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.00795210955162305, |
|
"grad_norm": 5.908946514129639, |
|
"learning_rate": 7.935483870967743e-06, |
|
"loss": 1.4128, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.008016760848790718, |
|
"grad_norm": 5.050029754638672, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.4311, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.008081412145958386, |
|
"grad_norm": 6.2919816970825195, |
|
"learning_rate": 8.064516129032258e-06, |
|
"loss": 1.3892, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.008146063443126052, |
|
"grad_norm": 5.789970874786377, |
|
"learning_rate": 8.129032258064517e-06, |
|
"loss": 1.4024, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.00821071474029372, |
|
"grad_norm": 5.259674549102783, |
|
"learning_rate": 8.193548387096774e-06, |
|
"loss": 1.3323, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.008275366037461386, |
|
"grad_norm": 5.545688152313232, |
|
"learning_rate": 8.258064516129033e-06, |
|
"loss": 1.332, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.008340017334629054, |
|
"grad_norm": 6.5449676513671875, |
|
"learning_rate": 8.322580645161292e-06, |
|
"loss": 1.3696, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.00840466863179672, |
|
"grad_norm": 4.6496806144714355, |
|
"learning_rate": 8.387096774193549e-06, |
|
"loss": 1.4208, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.008469319928964388, |
|
"grad_norm": 5.263716220855713, |
|
"learning_rate": 8.451612903225808e-06, |
|
"loss": 1.3825, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.008533971226132054, |
|
"grad_norm": 5.719542980194092, |
|
"learning_rate": 8.516129032258065e-06, |
|
"loss": 1.445, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.008598622523299721, |
|
"grad_norm": 4.548699855804443, |
|
"learning_rate": 8.580645161290323e-06, |
|
"loss": 1.3844, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.00866327382046739, |
|
"grad_norm": 4.696458339691162, |
|
"learning_rate": 8.64516129032258e-06, |
|
"loss": 1.3721, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.008727925117635055, |
|
"grad_norm": 5.142691612243652, |
|
"learning_rate": 8.70967741935484e-06, |
|
"loss": 1.3952, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.008792576414802723, |
|
"grad_norm": 4.870302677154541, |
|
"learning_rate": 8.774193548387098e-06, |
|
"loss": 1.462, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.00885722771197039, |
|
"grad_norm": 5.207381248474121, |
|
"learning_rate": 8.838709677419357e-06, |
|
"loss": 1.4555, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.008921879009138057, |
|
"grad_norm": 4.8738226890563965, |
|
"learning_rate": 8.903225806451614e-06, |
|
"loss": 1.3776, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.008986530306305723, |
|
"grad_norm": 5.064065456390381, |
|
"learning_rate": 8.967741935483871e-06, |
|
"loss": 1.4316, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.009051181603473391, |
|
"grad_norm": 5.246875286102295, |
|
"learning_rate": 9.03225806451613e-06, |
|
"loss": 1.5043, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.009115832900641057, |
|
"grad_norm": 5.119837284088135, |
|
"learning_rate": 9.096774193548388e-06, |
|
"loss": 1.4734, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.009180484197808725, |
|
"grad_norm": 6.316291332244873, |
|
"learning_rate": 9.161290322580645e-06, |
|
"loss": 1.4854, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.009245135494976393, |
|
"grad_norm": 4.688175201416016, |
|
"learning_rate": 9.225806451612904e-06, |
|
"loss": 1.4162, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.009309786792144059, |
|
"grad_norm": 5.010703086853027, |
|
"learning_rate": 9.290322580645163e-06, |
|
"loss": 1.3963, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.009374438089311727, |
|
"grad_norm": 4.959914207458496, |
|
"learning_rate": 9.35483870967742e-06, |
|
"loss": 1.3585, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.009439089386479393, |
|
"grad_norm": 4.275010585784912, |
|
"learning_rate": 9.419354838709677e-06, |
|
"loss": 1.4689, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.009503740683647061, |
|
"grad_norm": 4.852663040161133, |
|
"learning_rate": 9.483870967741936e-06, |
|
"loss": 1.4583, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.009568391980814727, |
|
"grad_norm": 4.935046195983887, |
|
"learning_rate": 9.548387096774195e-06, |
|
"loss": 1.4283, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.009633043277982395, |
|
"grad_norm": 4.662842273712158, |
|
"learning_rate": 9.612903225806453e-06, |
|
"loss": 1.4022, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.009697694575150061, |
|
"grad_norm": 6.32338285446167, |
|
"learning_rate": 9.67741935483871e-06, |
|
"loss": 1.4224, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.009762345872317729, |
|
"grad_norm": 4.785338401794434, |
|
"learning_rate": 9.74193548387097e-06, |
|
"loss": 1.4596, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.009826997169485397, |
|
"grad_norm": 4.482601642608643, |
|
"learning_rate": 9.806451612903226e-06, |
|
"loss": 1.4124, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.009891648466653063, |
|
"grad_norm": 4.771188259124756, |
|
"learning_rate": 9.870967741935485e-06, |
|
"loss": 1.4576, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.00995629976382073, |
|
"grad_norm": 4.497616291046143, |
|
"learning_rate": 9.935483870967742e-06, |
|
"loss": 1.4631, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.010020951060988397, |
|
"grad_norm": 4.109068870544434, |
|
"learning_rate": 1e-05, |
|
"loss": 1.403, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.010085602358156065, |
|
"grad_norm": 4.779379367828369, |
|
"learning_rate": 9.999999894761194e-06, |
|
"loss": 1.3295, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.01015025365532373, |
|
"grad_norm": 4.87537145614624, |
|
"learning_rate": 9.999999579044782e-06, |
|
"loss": 1.3468, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.010214904952491399, |
|
"grad_norm": 4.218890190124512, |
|
"learning_rate": 9.999999052850775e-06, |
|
"loss": 1.3863, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.010279556249659065, |
|
"grad_norm": 5.191406726837158, |
|
"learning_rate": 9.999998316179195e-06, |
|
"loss": 1.4638, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.010344207546826733, |
|
"grad_norm": 4.713414669036865, |
|
"learning_rate": 9.999997369030074e-06, |
|
"loss": 1.3629, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.0104088588439944, |
|
"grad_norm": 4.200235366821289, |
|
"learning_rate": 9.999996211403454e-06, |
|
"loss": 1.4429, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.010473510141162067, |
|
"grad_norm": 4.418542385101318, |
|
"learning_rate": 9.999994843299381e-06, |
|
"loss": 1.3971, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.010538161438329734, |
|
"grad_norm": 4.672099590301514, |
|
"learning_rate": 9.999993264717911e-06, |
|
"loss": 1.3352, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.0106028127354974, |
|
"grad_norm": 4.304332256317139, |
|
"learning_rate": 9.999991475659115e-06, |
|
"loss": 1.3116, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.010667464032665068, |
|
"grad_norm": 4.34376335144043, |
|
"learning_rate": 9.999989476123067e-06, |
|
"loss": 1.3822, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.010732115329832734, |
|
"grad_norm": 4.60739278793335, |
|
"learning_rate": 9.999987266109848e-06, |
|
"loss": 1.3655, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.010796766627000402, |
|
"grad_norm": 6.895850658416748, |
|
"learning_rate": 9.999984845619553e-06, |
|
"loss": 1.3699, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.010861417924168068, |
|
"grad_norm": 3.9377450942993164, |
|
"learning_rate": 9.999982214652286e-06, |
|
"loss": 1.4356, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.010926069221335736, |
|
"grad_norm": 4.687614440917969, |
|
"learning_rate": 9.999979373208155e-06, |
|
"loss": 1.3916, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.010990720518503404, |
|
"grad_norm": 4.686786651611328, |
|
"learning_rate": 9.99997632128728e-06, |
|
"loss": 1.3448, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01105537181567107, |
|
"grad_norm": 4.827507495880127, |
|
"learning_rate": 9.999973058889791e-06, |
|
"loss": 1.3514, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.011120023112838738, |
|
"grad_norm": 4.3625617027282715, |
|
"learning_rate": 9.999969586015824e-06, |
|
"loss": 1.4232, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.011184674410006404, |
|
"grad_norm": 4.434966087341309, |
|
"learning_rate": 9.999965902665524e-06, |
|
"loss": 1.4217, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.011249325707174072, |
|
"grad_norm": 5.053067207336426, |
|
"learning_rate": 9.99996200883905e-06, |
|
"loss": 1.4709, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.011313977004341738, |
|
"grad_norm": 4.309473514556885, |
|
"learning_rate": 9.999957904536562e-06, |
|
"loss": 1.3668, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.011378628301509406, |
|
"grad_norm": 4.460648536682129, |
|
"learning_rate": 9.999953589758235e-06, |
|
"loss": 1.4512, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.011443279598677072, |
|
"grad_norm": 4.450823783874512, |
|
"learning_rate": 9.99994906450425e-06, |
|
"loss": 1.2986, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.01150793089584474, |
|
"grad_norm": 4.192991256713867, |
|
"learning_rate": 9.999944328774796e-06, |
|
"loss": 1.413, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.011572582193012408, |
|
"grad_norm": 4.157929420471191, |
|
"learning_rate": 9.999939382570075e-06, |
|
"loss": 1.4509, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.011637233490180074, |
|
"grad_norm": 3.6802518367767334, |
|
"learning_rate": 9.999934225890294e-06, |
|
"loss": 1.3918, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.011701884787347742, |
|
"grad_norm": 5.291732311248779, |
|
"learning_rate": 9.999928858735668e-06, |
|
"loss": 1.4045, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.011766536084515408, |
|
"grad_norm": 4.054734230041504, |
|
"learning_rate": 9.999923281106427e-06, |
|
"loss": 1.3101, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.011831187381683076, |
|
"grad_norm": 4.11862325668335, |
|
"learning_rate": 9.999917493002803e-06, |
|
"loss": 1.4398, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.011895838678850742, |
|
"grad_norm": 4.4882307052612305, |
|
"learning_rate": 9.999911494425041e-06, |
|
"loss": 1.3585, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.01196048997601841, |
|
"grad_norm": 4.65657377243042, |
|
"learning_rate": 9.999905285373392e-06, |
|
"loss": 1.3673, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.012025141273186076, |
|
"grad_norm": 3.629178047180176, |
|
"learning_rate": 9.999898865848119e-06, |
|
"loss": 1.4998, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.012089792570353744, |
|
"grad_norm": 4.499670028686523, |
|
"learning_rate": 9.99989223584949e-06, |
|
"loss": 1.4083, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.012154443867521411, |
|
"grad_norm": 4.342660427093506, |
|
"learning_rate": 9.999885395377788e-06, |
|
"loss": 1.3362, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.012219095164689078, |
|
"grad_norm": 4.276759147644043, |
|
"learning_rate": 9.999878344433297e-06, |
|
"loss": 1.3912, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.012283746461856745, |
|
"grad_norm": 3.7648189067840576, |
|
"learning_rate": 9.999871083016316e-06, |
|
"loss": 1.3583, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.012348397759024412, |
|
"grad_norm": 4.836668491363525, |
|
"learning_rate": 9.999863611127149e-06, |
|
"loss": 1.3465, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.01241304905619208, |
|
"grad_norm": 4.41436767578125, |
|
"learning_rate": 9.999855928766113e-06, |
|
"loss": 1.4314, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.012477700353359746, |
|
"grad_norm": 3.9474074840545654, |
|
"learning_rate": 9.99984803593353e-06, |
|
"loss": 1.4435, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.012542351650527413, |
|
"grad_norm": 4.373626232147217, |
|
"learning_rate": 9.999839932629732e-06, |
|
"loss": 1.3644, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.01260700294769508, |
|
"grad_norm": 4.185675621032715, |
|
"learning_rate": 9.999831618855058e-06, |
|
"loss": 1.3399, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.012671654244862747, |
|
"grad_norm": 4.8992109298706055, |
|
"learning_rate": 9.999823094609862e-06, |
|
"loss": 1.2623, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.012736305542030415, |
|
"grad_norm": 4.317060470581055, |
|
"learning_rate": 9.999814359894501e-06, |
|
"loss": 1.5297, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.012800956839198081, |
|
"grad_norm": 4.501911640167236, |
|
"learning_rate": 9.999805414709344e-06, |
|
"loss": 1.4305, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.01286560813636575, |
|
"grad_norm": 4.288606643676758, |
|
"learning_rate": 9.999796259054765e-06, |
|
"loss": 1.4358, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.012930259433533415, |
|
"grad_norm": 4.692774772644043, |
|
"learning_rate": 9.99978689293115e-06, |
|
"loss": 1.4448, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.012994910730701083, |
|
"grad_norm": 4.893410682678223, |
|
"learning_rate": 9.999777316338897e-06, |
|
"loss": 1.4313, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.01305956202786875, |
|
"grad_norm": 4.01968240737915, |
|
"learning_rate": 9.999767529278403e-06, |
|
"loss": 1.3831, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.013124213325036417, |
|
"grad_norm": 4.3122076988220215, |
|
"learning_rate": 9.999757531750086e-06, |
|
"loss": 1.3605, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.013188864622204083, |
|
"grad_norm": 3.9625604152679443, |
|
"learning_rate": 9.999747323754363e-06, |
|
"loss": 1.2944, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.013253515919371751, |
|
"grad_norm": 4.135870456695557, |
|
"learning_rate": 9.999736905291664e-06, |
|
"loss": 1.3465, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.013318167216539419, |
|
"grad_norm": 3.485560655593872, |
|
"learning_rate": 9.999726276362429e-06, |
|
"loss": 1.4901, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.013382818513707085, |
|
"grad_norm": 4.223531246185303, |
|
"learning_rate": 9.999715436967104e-06, |
|
"loss": 1.4342, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.013447469810874753, |
|
"grad_norm": 4.688872814178467, |
|
"learning_rate": 9.999704387106147e-06, |
|
"loss": 1.3735, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.013512121108042419, |
|
"grad_norm": 4.388930320739746, |
|
"learning_rate": 9.999693126780022e-06, |
|
"loss": 1.4288, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.013576772405210087, |
|
"grad_norm": 3.7054548263549805, |
|
"learning_rate": 9.999681655989203e-06, |
|
"loss": 1.3322, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.013641423702377753, |
|
"grad_norm": 4.013354778289795, |
|
"learning_rate": 9.999669974734172e-06, |
|
"loss": 1.4192, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.01370607499954542, |
|
"grad_norm": 4.022690773010254, |
|
"learning_rate": 9.999658083015423e-06, |
|
"loss": 1.3474, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.013770726296713087, |
|
"grad_norm": 3.8308322429656982, |
|
"learning_rate": 9.999645980833454e-06, |
|
"loss": 1.3902, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.013835377593880755, |
|
"grad_norm": 4.453736305236816, |
|
"learning_rate": 9.999633668188778e-06, |
|
"loss": 1.4876, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.013900028891048423, |
|
"grad_norm": 4.379161834716797, |
|
"learning_rate": 9.99962114508191e-06, |
|
"loss": 1.369, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.013964680188216089, |
|
"grad_norm": 4.23476505279541, |
|
"learning_rate": 9.999608411513378e-06, |
|
"loss": 1.371, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.014029331485383757, |
|
"grad_norm": 3.8091630935668945, |
|
"learning_rate": 9.999595467483719e-06, |
|
"loss": 1.3317, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.014093982782551423, |
|
"grad_norm": 4.4740118980407715, |
|
"learning_rate": 9.999582312993476e-06, |
|
"loss": 1.3864, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.01415863407971909, |
|
"grad_norm": 3.8283002376556396, |
|
"learning_rate": 9.999568948043206e-06, |
|
"loss": 1.3924, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.014223285376886757, |
|
"grad_norm": 3.9413399696350098, |
|
"learning_rate": 9.99955537263347e-06, |
|
"loss": 1.365, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.014287936674054424, |
|
"grad_norm": 3.7700750827789307, |
|
"learning_rate": 9.999541586764836e-06, |
|
"loss": 1.3265, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.01435258797122209, |
|
"grad_norm": 4.468739986419678, |
|
"learning_rate": 9.999527590437889e-06, |
|
"loss": 1.4056, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.014417239268389758, |
|
"grad_norm": 3.847881555557251, |
|
"learning_rate": 9.999513383653216e-06, |
|
"loss": 1.3369, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.014481890565557426, |
|
"grad_norm": 4.515076637268066, |
|
"learning_rate": 9.999498966411415e-06, |
|
"loss": 1.3715, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.014546541862725092, |
|
"grad_norm": 4.114704608917236, |
|
"learning_rate": 9.999484338713096e-06, |
|
"loss": 1.376, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.01461119315989276, |
|
"grad_norm": 4.159117221832275, |
|
"learning_rate": 9.999469500558872e-06, |
|
"loss": 1.3388, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.014675844457060426, |
|
"grad_norm": 4.336222171783447, |
|
"learning_rate": 9.999454451949364e-06, |
|
"loss": 1.3121, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.014740495754228094, |
|
"grad_norm": 3.4951186180114746, |
|
"learning_rate": 9.999439192885212e-06, |
|
"loss": 1.3861, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.01480514705139576, |
|
"grad_norm": 4.519493579864502, |
|
"learning_rate": 9.999423723367056e-06, |
|
"loss": 1.3151, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.014869798348563428, |
|
"grad_norm": 3.895230770111084, |
|
"learning_rate": 9.999408043395546e-06, |
|
"loss": 1.3877, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.014934449645731094, |
|
"grad_norm": 3.6650257110595703, |
|
"learning_rate": 9.999392152971344e-06, |
|
"loss": 1.3744, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.014999100942898762, |
|
"grad_norm": 4.416625022888184, |
|
"learning_rate": 9.999376052095117e-06, |
|
"loss": 1.2943, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.01506375224006643, |
|
"grad_norm": 4.903157711029053, |
|
"learning_rate": 9.999359740767545e-06, |
|
"loss": 1.3302, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.015128403537234096, |
|
"grad_norm": 4.176599502563477, |
|
"learning_rate": 9.999343218989313e-06, |
|
"loss": 1.3421, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.015193054834401764, |
|
"grad_norm": 4.2415876388549805, |
|
"learning_rate": 9.999326486761114e-06, |
|
"loss": 1.3693, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.01525770613156943, |
|
"grad_norm": 4.100305080413818, |
|
"learning_rate": 9.999309544083657e-06, |
|
"loss": 1.3251, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.015322357428737098, |
|
"grad_norm": 3.9971888065338135, |
|
"learning_rate": 9.999292390957653e-06, |
|
"loss": 1.4118, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.015387008725904764, |
|
"grad_norm": 4.218728065490723, |
|
"learning_rate": 9.999275027383826e-06, |
|
"loss": 1.371, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.015451660023072432, |
|
"grad_norm": 5.075481414794922, |
|
"learning_rate": 9.999257453362903e-06, |
|
"loss": 1.3753, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.015516311320240098, |
|
"grad_norm": 4.296051025390625, |
|
"learning_rate": 9.999239668895627e-06, |
|
"loss": 1.4116, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.015580962617407766, |
|
"grad_norm": 4.260445594787598, |
|
"learning_rate": 9.999221673982747e-06, |
|
"loss": 1.3179, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.015645613914575432, |
|
"grad_norm": 4.778342247009277, |
|
"learning_rate": 9.999203468625017e-06, |
|
"loss": 1.3185, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.0157102652117431, |
|
"grad_norm": 3.723858594894409, |
|
"learning_rate": 9.999185052823207e-06, |
|
"loss": 1.3, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.015774916508910768, |
|
"grad_norm": 3.748918294906616, |
|
"learning_rate": 9.99916642657809e-06, |
|
"loss": 1.455, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.015839567806078435, |
|
"grad_norm": 4.436662197113037, |
|
"learning_rate": 9.999147589890452e-06, |
|
"loss": 1.3895, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.0159042191032461, |
|
"grad_norm": 4.519418716430664, |
|
"learning_rate": 9.999128542761085e-06, |
|
"loss": 1.2948, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.015968870400413768, |
|
"grad_norm": 4.407564640045166, |
|
"learning_rate": 9.99910928519079e-06, |
|
"loss": 1.5275, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.016033521697581436, |
|
"grad_norm": 4.254813194274902, |
|
"learning_rate": 9.999089817180378e-06, |
|
"loss": 1.3428, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.016098172994749103, |
|
"grad_norm": 4.610138893127441, |
|
"learning_rate": 9.999070138730668e-06, |
|
"loss": 1.3733, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.01616282429191677, |
|
"grad_norm": 3.6939423084259033, |
|
"learning_rate": 9.99905024984249e-06, |
|
"loss": 1.3943, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.016227475589084436, |
|
"grad_norm": 3.755028247833252, |
|
"learning_rate": 9.999030150516681e-06, |
|
"loss": 1.4256, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.016292126886252103, |
|
"grad_norm": 4.2649149894714355, |
|
"learning_rate": 9.999009840754085e-06, |
|
"loss": 1.4257, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.01635677818341977, |
|
"grad_norm": 3.718479633331299, |
|
"learning_rate": 9.998989320555562e-06, |
|
"loss": 1.3312, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.01642142948058744, |
|
"grad_norm": 3.7253224849700928, |
|
"learning_rate": 9.998968589921969e-06, |
|
"loss": 1.37, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.016486080777755104, |
|
"grad_norm": 3.8125829696655273, |
|
"learning_rate": 9.998947648854182e-06, |
|
"loss": 1.3721, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.01655073207492277, |
|
"grad_norm": 4.105193138122559, |
|
"learning_rate": 9.998926497353084e-06, |
|
"loss": 1.3238, |
|
"step": 256 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 15467, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 16, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.637374085936087e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|