|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 20.0, |
|
"eval_steps": 500, |
|
"global_step": 1400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2e-05, |
|
"loss": 2.2919, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4e-05, |
|
"loss": 2.3664, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6e-05, |
|
"loss": 2.5039, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 8e-05, |
|
"loss": 2.3538, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0001, |
|
"loss": 2.3211, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.992831541218638e-05, |
|
"loss": 2.1066, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.985663082437276e-05, |
|
"loss": 2.1203, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.978494623655914e-05, |
|
"loss": 2.1782, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.971326164874553e-05, |
|
"loss": 1.9107, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.96415770609319e-05, |
|
"loss": 2.0875, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.956989247311828e-05, |
|
"loss": 1.8469, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.949820788530466e-05, |
|
"loss": 1.9198, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.942652329749105e-05, |
|
"loss": 1.8764, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.935483870967742e-05, |
|
"loss": 1.5738, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.928315412186381e-05, |
|
"loss": 1.7708, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.921146953405019e-05, |
|
"loss": 1.6138, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.913978494623656e-05, |
|
"loss": 1.4795, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.906810035842294e-05, |
|
"loss": 1.7306, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.899641577060932e-05, |
|
"loss": 1.7529, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.892473118279571e-05, |
|
"loss": 1.6125, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.885304659498207e-05, |
|
"loss": 1.4683, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.878136200716847e-05, |
|
"loss": 1.5695, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.870967741935484e-05, |
|
"loss": 1.6194, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.863799283154122e-05, |
|
"loss": 1.4536, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.85663082437276e-05, |
|
"loss": 1.6215, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.849462365591399e-05, |
|
"loss": 1.4687, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.842293906810037e-05, |
|
"loss": 1.5447, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.835125448028674e-05, |
|
"loss": 1.5991, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.827956989247312e-05, |
|
"loss": 1.3734, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.820788530465951e-05, |
|
"loss": 1.5202, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.813620071684588e-05, |
|
"loss": 1.3936, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.806451612903226e-05, |
|
"loss": 1.6183, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.799283154121865e-05, |
|
"loss": 1.7974, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.792114695340502e-05, |
|
"loss": 1.4831, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.78494623655914e-05, |
|
"loss": 1.3791, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.777777777777778e-05, |
|
"loss": 1.5898, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.770609318996417e-05, |
|
"loss": 1.4183, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.763440860215054e-05, |
|
"loss": 1.4216, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.756272401433693e-05, |
|
"loss": 1.5615, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.74910394265233e-05, |
|
"loss": 1.4294, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.741935483870968e-05, |
|
"loss": 1.3912, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.734767025089606e-05, |
|
"loss": 1.7205, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.727598566308245e-05, |
|
"loss": 1.2721, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.720430107526883e-05, |
|
"loss": 1.1908, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.713261648745519e-05, |
|
"loss": 1.2543, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.706093189964158e-05, |
|
"loss": 1.2523, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 9.698924731182796e-05, |
|
"loss": 1.2512, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.691756272401434e-05, |
|
"loss": 1.0955, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.684587813620072e-05, |
|
"loss": 1.6172, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.677419354838711e-05, |
|
"loss": 1.345, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 9.670250896057349e-05, |
|
"loss": 1.4891, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.663082437275986e-05, |
|
"loss": 1.5186, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.655913978494624e-05, |
|
"loss": 1.4853, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.648745519713263e-05, |
|
"loss": 1.3203, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.6415770609319e-05, |
|
"loss": 1.4817, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.634408602150539e-05, |
|
"loss": 1.4074, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.627240143369176e-05, |
|
"loss": 1.0041, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.620071684587814e-05, |
|
"loss": 0.9736, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.612903225806452e-05, |
|
"loss": 1.1534, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.60573476702509e-05, |
|
"loss": 1.1337, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.598566308243729e-05, |
|
"loss": 1.4201, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 9.591397849462365e-05, |
|
"loss": 1.4095, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.584229390681004e-05, |
|
"loss": 1.0914, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.577060931899642e-05, |
|
"loss": 0.9977, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.56989247311828e-05, |
|
"loss": 1.1515, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.562724014336918e-05, |
|
"loss": 1.3172, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.555555555555557e-05, |
|
"loss": 0.8516, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 9.548387096774195e-05, |
|
"loss": 1.1091, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.541218637992831e-05, |
|
"loss": 0.9911, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.53405017921147e-05, |
|
"loss": 0.9427, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.9063950777053833, |
|
"eval_runtime": 29.2016, |
|
"eval_samples_per_second": 4.109, |
|
"eval_steps_per_second": 0.514, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.526881720430108e-05, |
|
"loss": 0.7439, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.519713261648746e-05, |
|
"loss": 0.6516, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.512544802867383e-05, |
|
"loss": 0.8568, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.505376344086023e-05, |
|
"loss": 0.7717, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.49820788530466e-05, |
|
"loss": 0.7564, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.491039426523298e-05, |
|
"loss": 0.593, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.483870967741936e-05, |
|
"loss": 0.8772, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.476702508960575e-05, |
|
"loss": 0.5562, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.469534050179211e-05, |
|
"loss": 0.7107, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 9.46236559139785e-05, |
|
"loss": 0.6734, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 9.455197132616488e-05, |
|
"loss": 0.8054, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.448028673835126e-05, |
|
"loss": 0.4643, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.440860215053764e-05, |
|
"loss": 0.5161, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 9.433691756272402e-05, |
|
"loss": 0.3869, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 9.42652329749104e-05, |
|
"loss": 0.5715, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 9.419354838709677e-05, |
|
"loss": 0.3854, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 9.412186379928316e-05, |
|
"loss": 0.4301, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.405017921146954e-05, |
|
"loss": 0.5839, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 9.397849462365592e-05, |
|
"loss": 0.3165, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 9.39068100358423e-05, |
|
"loss": 0.4854, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 9.383512544802869e-05, |
|
"loss": 0.7052, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 9.376344086021506e-05, |
|
"loss": 0.6839, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 9.369175627240144e-05, |
|
"loss": 0.6856, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 9.362007168458782e-05, |
|
"loss": 0.4064, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 9.35483870967742e-05, |
|
"loss": 0.3277, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 9.347670250896057e-05, |
|
"loss": 0.4991, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 9.340501792114695e-05, |
|
"loss": 0.5538, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 0.3563, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 9.326164874551971e-05, |
|
"loss": 0.5073, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 9.31899641577061e-05, |
|
"loss": 0.4787, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.311827956989248e-05, |
|
"loss": 0.4723, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 9.304659498207887e-05, |
|
"loss": 0.4211, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 9.297491039426523e-05, |
|
"loss": 0.5097, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 9.290322580645162e-05, |
|
"loss": 0.4332, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 9.2831541218638e-05, |
|
"loss": 0.3212, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 9.275985663082438e-05, |
|
"loss": 0.5159, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 9.268817204301076e-05, |
|
"loss": 0.4215, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 9.261648745519713e-05, |
|
"loss": 0.2474, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.254480286738351e-05, |
|
"loss": 0.7533, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.247311827956989e-05, |
|
"loss": 0.409, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.240143369175628e-05, |
|
"loss": 0.2315, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.232974910394266e-05, |
|
"loss": 0.3415, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.225806451612904e-05, |
|
"loss": 0.4312, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.218637992831541e-05, |
|
"loss": 0.2231, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.21146953405018e-05, |
|
"loss": 0.2538, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.204301075268817e-05, |
|
"loss": 0.2535, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.197132616487456e-05, |
|
"loss": 0.2562, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 9.189964157706094e-05, |
|
"loss": 0.2448, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.182795698924731e-05, |
|
"loss": 0.2333, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.175627240143369e-05, |
|
"loss": 0.2554, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.168458781362007e-05, |
|
"loss": 0.458, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 9.161290322580646e-05, |
|
"loss": 0.1911, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 9.154121863799283e-05, |
|
"loss": 0.3257, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 9.146953405017922e-05, |
|
"loss": 0.368, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 9.13978494623656e-05, |
|
"loss": 0.2023, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 9.132616487455197e-05, |
|
"loss": 0.2151, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 9.125448028673835e-05, |
|
"loss": 0.2101, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 9.118279569892474e-05, |
|
"loss": 0.2763, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 9.111111111111112e-05, |
|
"loss": 0.2098, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 9.10394265232975e-05, |
|
"loss": 0.303, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 9.096774193548387e-05, |
|
"loss": 0.5747, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 9.089605734767026e-05, |
|
"loss": 0.2052, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 9.082437275985663e-05, |
|
"loss": 0.4412, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.0752688172043e-05, |
|
"loss": 0.2414, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 9.06810035842294e-05, |
|
"loss": 0.2269, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 9.060931899641578e-05, |
|
"loss": 0.2451, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 9.053763440860215e-05, |
|
"loss": 0.1992, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 9.046594982078853e-05, |
|
"loss": 0.2655, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 9.039426523297492e-05, |
|
"loss": 0.306, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.032258064516129e-05, |
|
"loss": 0.2001, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 2.630777597427368, |
|
"eval_runtime": 29.2109, |
|
"eval_samples_per_second": 4.108, |
|
"eval_steps_per_second": 0.514, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.025089605734768e-05, |
|
"loss": 0.1669, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 9.017921146953405e-05, |
|
"loss": 0.1358, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 9.010752688172043e-05, |
|
"loss": 0.2005, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 9.003584229390681e-05, |
|
"loss": 0.1524, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 8.99641577060932e-05, |
|
"loss": 0.1667, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 8.989247311827958e-05, |
|
"loss": 0.1812, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 8.982078853046594e-05, |
|
"loss": 0.1708, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 8.974910394265233e-05, |
|
"loss": 0.1979, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 8.967741935483871e-05, |
|
"loss": 0.1459, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 8.960573476702509e-05, |
|
"loss": 0.1768, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 8.953405017921147e-05, |
|
"loss": 0.1606, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 8.946236559139786e-05, |
|
"loss": 0.1625, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 8.939068100358424e-05, |
|
"loss": 0.1414, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 8.931899641577061e-05, |
|
"loss": 0.1508, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 8.924731182795699e-05, |
|
"loss": 0.1811, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 8.917562724014338e-05, |
|
"loss": 0.1333, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 8.910394265232975e-05, |
|
"loss": 0.2154, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 8.903225806451614e-05, |
|
"loss": 0.1688, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 8.896057347670252e-05, |
|
"loss": 0.1293, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 8.888888888888889e-05, |
|
"loss": 0.1286, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 8.881720430107527e-05, |
|
"loss": 0.1673, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 8.874551971326165e-05, |
|
"loss": 0.1949, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 8.867383512544804e-05, |
|
"loss": 0.1398, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 8.86021505376344e-05, |
|
"loss": 0.1165, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 8.85304659498208e-05, |
|
"loss": 0.1639, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 8.845878136200717e-05, |
|
"loss": 0.2318, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 8.838709677419355e-05, |
|
"loss": 0.1378, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 8.831541218637993e-05, |
|
"loss": 0.1299, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 8.824372759856632e-05, |
|
"loss": 0.1556, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 8.81720430107527e-05, |
|
"loss": 0.1871, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 8.810035842293907e-05, |
|
"loss": 0.1254, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.802867383512545e-05, |
|
"loss": 0.1314, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.795698924731183e-05, |
|
"loss": 0.1352, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.788530465949821e-05, |
|
"loss": 0.1462, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.781362007168459e-05, |
|
"loss": 0.1643, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.774193548387098e-05, |
|
"loss": 0.1427, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 8.767025089605735e-05, |
|
"loss": 0.1304, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 8.759856630824373e-05, |
|
"loss": 0.1738, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 8.752688172043011e-05, |
|
"loss": 0.1636, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 8.74551971326165e-05, |
|
"loss": 0.147, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 8.738351254480286e-05, |
|
"loss": 0.162, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 8.731182795698926e-05, |
|
"loss": 0.1702, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.724014336917563e-05, |
|
"loss": 0.1243, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.716845878136201e-05, |
|
"loss": 0.2052, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 8.709677419354839e-05, |
|
"loss": 0.1606, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 8.702508960573477e-05, |
|
"loss": 0.1722, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 8.695340501792116e-05, |
|
"loss": 0.1113, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 8.688172043010752e-05, |
|
"loss": 0.1679, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 8.681003584229391e-05, |
|
"loss": 0.1209, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 8.673835125448029e-05, |
|
"loss": 0.1685, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 0.2003, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 8.659498207885305e-05, |
|
"loss": 0.1669, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 8.652329749103944e-05, |
|
"loss": 0.1431, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 8.645161290322581e-05, |
|
"loss": 0.1572, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 8.637992831541219e-05, |
|
"loss": 0.1185, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 8.630824372759857e-05, |
|
"loss": 0.1722, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 8.623655913978495e-05, |
|
"loss": 0.1286, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 8.616487455197133e-05, |
|
"loss": 0.1147, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 8.60931899641577e-05, |
|
"loss": 0.1638, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 8.60215053763441e-05, |
|
"loss": 0.1337, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 8.594982078853047e-05, |
|
"loss": 0.1632, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.587813620071685e-05, |
|
"loss": 0.1508, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 8.580645161290323e-05, |
|
"loss": 0.1468, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 8.573476702508962e-05, |
|
"loss": 0.178, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 8.566308243727598e-05, |
|
"loss": 0.1672, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 8.559139784946237e-05, |
|
"loss": 0.153, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 8.551971326164875e-05, |
|
"loss": 0.1692, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 8.544802867383513e-05, |
|
"loss": 0.1739, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 8.53763440860215e-05, |
|
"loss": 0.1266, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 8.530465949820788e-05, |
|
"loss": 0.1129, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 2.900357484817505, |
|
"eval_runtime": 29.2063, |
|
"eval_samples_per_second": 4.109, |
|
"eval_steps_per_second": 0.514, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.523297491039428e-05, |
|
"loss": 0.097, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.516129032258064e-05, |
|
"loss": 0.0781, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.508960573476703e-05, |
|
"loss": 0.0905, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 8.501792114695341e-05, |
|
"loss": 0.0957, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 8.494623655913979e-05, |
|
"loss": 0.1099, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 8.487455197132616e-05, |
|
"loss": 0.1027, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 8.480286738351255e-05, |
|
"loss": 0.1097, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 8.473118279569893e-05, |
|
"loss": 0.1255, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 8.465949820788531e-05, |
|
"loss": 0.0926, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 8.458781362007169e-05, |
|
"loss": 0.101, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 8.451612903225808e-05, |
|
"loss": 0.0964, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 8.444444444444444e-05, |
|
"loss": 0.0875, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 8.437275985663082e-05, |
|
"loss": 0.1222, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 8.430107526881721e-05, |
|
"loss": 0.0802, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 8.422939068100359e-05, |
|
"loss": 0.1002, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 8.415770609318997e-05, |
|
"loss": 0.1212, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 8.408602150537634e-05, |
|
"loss": 0.075, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 8.401433691756274e-05, |
|
"loss": 0.1154, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 8.39426523297491e-05, |
|
"loss": 0.0961, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 8.387096774193549e-05, |
|
"loss": 0.1074, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 8.379928315412187e-05, |
|
"loss": 0.1138, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 8.372759856630825e-05, |
|
"loss": 0.0955, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 8.365591397849462e-05, |
|
"loss": 0.1033, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 8.358422939068102e-05, |
|
"loss": 0.0716, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 8.351254480286739e-05, |
|
"loss": 0.0964, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 8.344086021505376e-05, |
|
"loss": 0.0918, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 8.336917562724015e-05, |
|
"loss": 0.1311, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 8.329749103942653e-05, |
|
"loss": 0.0905, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 8.32258064516129e-05, |
|
"loss": 0.0874, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 8.315412186379928e-05, |
|
"loss": 0.0739, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 8.308243727598567e-05, |
|
"loss": 0.1231, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 8.301075268817205e-05, |
|
"loss": 0.0936, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 8.293906810035843e-05, |
|
"loss": 0.0927, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 8.28673835125448e-05, |
|
"loss": 0.0757, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 8.27956989247312e-05, |
|
"loss": 0.1012, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 8.272401433691756e-05, |
|
"loss": 0.0965, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 8.265232974910395e-05, |
|
"loss": 0.1043, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 8.258064516129033e-05, |
|
"loss": 0.094, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 8.250896057347671e-05, |
|
"loss": 0.0806, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 8.243727598566309e-05, |
|
"loss": 0.0944, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 8.236559139784946e-05, |
|
"loss": 0.1048, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 8.229390681003585e-05, |
|
"loss": 0.1076, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 8.222222222222222e-05, |
|
"loss": 0.1038, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 8.215053763440861e-05, |
|
"loss": 0.0992, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 8.207885304659499e-05, |
|
"loss": 0.097, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 8.200716845878136e-05, |
|
"loss": 0.1338, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 8.193548387096774e-05, |
|
"loss": 0.1194, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 8.186379928315413e-05, |
|
"loss": 0.1079, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 8.179211469534051e-05, |
|
"loss": 0.0796, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 8.172043010752689e-05, |
|
"loss": 0.1322, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 8.164874551971327e-05, |
|
"loss": 0.1085, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 8.157706093189964e-05, |
|
"loss": 0.0989, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 8.150537634408602e-05, |
|
"loss": 0.0964, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 8.14336917562724e-05, |
|
"loss": 0.1235, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 8.136200716845879e-05, |
|
"loss": 0.1125, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 8.129032258064517e-05, |
|
"loss": 0.0965, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 8.121863799283155e-05, |
|
"loss": 0.1099, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 8.114695340501792e-05, |
|
"loss": 0.1224, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 8.107526881720431e-05, |
|
"loss": 0.0729, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 8.100358422939068e-05, |
|
"loss": 0.1326, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 8.093189964157707e-05, |
|
"loss": 0.1136, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 8.086021505376345e-05, |
|
"loss": 0.0966, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 8.078853046594983e-05, |
|
"loss": 0.1128, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 8.07168458781362e-05, |
|
"loss": 0.0898, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 8.064516129032258e-05, |
|
"loss": 0.139, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 8.057347670250897e-05, |
|
"loss": 0.097, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 8.050179211469534e-05, |
|
"loss": 0.1424, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 8.043010752688173e-05, |
|
"loss": 0.1348, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 8.03584229390681e-05, |
|
"loss": 0.0942, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 8.028673835125448e-05, |
|
"loss": 0.0803, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 3.033568859100342, |
|
"eval_runtime": 29.2135, |
|
"eval_samples_per_second": 4.108, |
|
"eval_steps_per_second": 0.513, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.021505376344086e-05, |
|
"loss": 0.0755, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 8.014336917562725e-05, |
|
"loss": 0.056, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 8.007168458781363e-05, |
|
"loss": 0.081, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 8e-05, |
|
"loss": 0.0863, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 7.992831541218638e-05, |
|
"loss": 0.0525, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 7.985663082437278e-05, |
|
"loss": 0.056, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 7.978494623655914e-05, |
|
"loss": 0.0897, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 7.971326164874552e-05, |
|
"loss": 0.0475, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 7.964157706093191e-05, |
|
"loss": 0.0704, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 7.956989247311829e-05, |
|
"loss": 0.0713, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 7.949820788530466e-05, |
|
"loss": 0.0592, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 7.942652329749104e-05, |
|
"loss": 0.0809, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 7.935483870967743e-05, |
|
"loss": 0.0689, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 7.92831541218638e-05, |
|
"loss": 0.0628, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 7.921146953405019e-05, |
|
"loss": 0.0584, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 7.913978494623657e-05, |
|
"loss": 0.0833, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 7.906810035842294e-05, |
|
"loss": 0.063, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 7.899641577060932e-05, |
|
"loss": 0.0608, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 7.892473118279571e-05, |
|
"loss": 0.0614, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 7.885304659498209e-05, |
|
"loss": 0.0634, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 7.878136200716845e-05, |
|
"loss": 0.0734, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 7.870967741935484e-05, |
|
"loss": 0.0673, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 7.863799283154122e-05, |
|
"loss": 0.0539, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 7.85663082437276e-05, |
|
"loss": 0.0743, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 7.849462365591398e-05, |
|
"loss": 0.0752, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 7.842293906810037e-05, |
|
"loss": 0.0601, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 7.835125448028673e-05, |
|
"loss": 0.0667, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 7.827956989247312e-05, |
|
"loss": 0.0802, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 7.82078853046595e-05, |
|
"loss": 0.0829, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 7.81362007168459e-05, |
|
"loss": 0.0643, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 7.806451612903226e-05, |
|
"loss": 0.0659, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 7.799283154121865e-05, |
|
"loss": 0.0587, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 7.792114695340503e-05, |
|
"loss": 0.0464, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 7.784946236559139e-05, |
|
"loss": 0.0794, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 7.777777777777778e-05, |
|
"loss": 0.057, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 7.770609318996416e-05, |
|
"loss": 0.0612, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 7.763440860215054e-05, |
|
"loss": 0.0619, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 7.756272401433691e-05, |
|
"loss": 0.0646, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 7.74910394265233e-05, |
|
"loss": 0.0694, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 7.741935483870968e-05, |
|
"loss": 0.075, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 7.734767025089606e-05, |
|
"loss": 0.0767, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 7.727598566308244e-05, |
|
"loss": 0.062, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 7.720430107526883e-05, |
|
"loss": 0.0654, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 7.71326164874552e-05, |
|
"loss": 0.0712, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 7.706093189964157e-05, |
|
"loss": 0.0545, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 7.698924731182796e-05, |
|
"loss": 0.06, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 7.691756272401434e-05, |
|
"loss": 0.058, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 7.684587813620072e-05, |
|
"loss": 0.0797, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 7.67741935483871e-05, |
|
"loss": 0.0648, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 7.670250896057349e-05, |
|
"loss": 0.0687, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 7.663082437275985e-05, |
|
"loss": 0.0714, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 7.655913978494624e-05, |
|
"loss": 0.0738, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 7.648745519713262e-05, |
|
"loss": 0.0757, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 7.6415770609319e-05, |
|
"loss": 0.074, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 7.634408602150538e-05, |
|
"loss": 0.0899, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 7.627240143369177e-05, |
|
"loss": 0.0913, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 7.620071684587814e-05, |
|
"loss": 0.0754, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 7.612903225806451e-05, |
|
"loss": 0.0738, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 7.60573476702509e-05, |
|
"loss": 0.093, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 7.598566308243728e-05, |
|
"loss": 0.0849, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 7.591397849462365e-05, |
|
"loss": 0.0708, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 7.584229390681003e-05, |
|
"loss": 0.0894, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 7.577060931899642e-05, |
|
"loss": 0.0949, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 7.56989247311828e-05, |
|
"loss": 0.0684, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 7.562724014336918e-05, |
|
"loss": 0.0901, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 7.555555555555556e-05, |
|
"loss": 0.0637, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 7.548387096774195e-05, |
|
"loss": 0.0783, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 7.541218637992831e-05, |
|
"loss": 0.0674, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 7.53405017921147e-05, |
|
"loss": 0.0866, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 7.526881720430108e-05, |
|
"loss": 0.0665, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 3.0398199558258057, |
|
"eval_runtime": 29.2059, |
|
"eval_samples_per_second": 4.109, |
|
"eval_steps_per_second": 0.514, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 7.519713261648746e-05, |
|
"loss": 0.0533, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 7.512544802867384e-05, |
|
"loss": 0.0481, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 7.505376344086021e-05, |
|
"loss": 0.0618, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 7.49820788530466e-05, |
|
"loss": 0.0384, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 7.491039426523297e-05, |
|
"loss": 0.0684, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 7.483870967741936e-05, |
|
"loss": 0.0479, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 7.476702508960574e-05, |
|
"loss": 0.0481, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 7.469534050179212e-05, |
|
"loss": 0.0457, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 7.462365591397849e-05, |
|
"loss": 0.0407, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 7.455197132616488e-05, |
|
"loss": 0.0442, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 7.448028673835126e-05, |
|
"loss": 0.065, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 7.440860215053764e-05, |
|
"loss": 0.0533, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 7.433691756272402e-05, |
|
"loss": 0.0559, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 7.42652329749104e-05, |
|
"loss": 0.045, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 7.419354838709677e-05, |
|
"loss": 0.0498, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 7.412186379928315e-05, |
|
"loss": 0.0445, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 7.405017921146954e-05, |
|
"loss": 0.0536, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 7.397849462365592e-05, |
|
"loss": 0.0326, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 7.39068100358423e-05, |
|
"loss": 0.0532, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 7.383512544802867e-05, |
|
"loss": 0.0389, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 7.376344086021507e-05, |
|
"loss": 0.0578, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 7.369175627240143e-05, |
|
"loss": 0.0425, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 7.362007168458782e-05, |
|
"loss": 0.0514, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 7.35483870967742e-05, |
|
"loss": 0.0473, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 7.347670250896058e-05, |
|
"loss": 0.0527, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 7.340501792114695e-05, |
|
"loss": 0.0627, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 0.0424, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 7.326164874551972e-05, |
|
"loss": 0.064, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 7.318996415770609e-05, |
|
"loss": 0.0523, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 7.311827956989248e-05, |
|
"loss": 0.056, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 7.304659498207886e-05, |
|
"loss": 0.0489, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 7.297491039426523e-05, |
|
"loss": 0.0569, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 7.290322580645161e-05, |
|
"loss": 0.07, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 7.2831541218638e-05, |
|
"loss": 0.0555, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 7.275985663082438e-05, |
|
"loss": 0.0479, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 7.268817204301076e-05, |
|
"loss": 0.0493, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 7.261648745519714e-05, |
|
"loss": 0.0703, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 7.254480286738353e-05, |
|
"loss": 0.0446, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 7.247311827956989e-05, |
|
"loss": 0.0477, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 7.240143369175627e-05, |
|
"loss": 0.0435, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 7.232974910394266e-05, |
|
"loss": 0.0527, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 7.225806451612904e-05, |
|
"loss": 0.0674, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 7.218637992831541e-05, |
|
"loss": 0.0377, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 7.211469534050179e-05, |
|
"loss": 0.0392, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 7.204301075268818e-05, |
|
"loss": 0.0566, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 7.197132616487455e-05, |
|
"loss": 0.0457, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 7.189964157706094e-05, |
|
"loss": 0.0562, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 7.182795698924732e-05, |
|
"loss": 0.0365, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 7.17562724014337e-05, |
|
"loss": 0.0502, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 7.168458781362007e-05, |
|
"loss": 0.0587, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 7.161290322580646e-05, |
|
"loss": 0.042, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 7.154121863799284e-05, |
|
"loss": 0.0493, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 7.14695340501792e-05, |
|
"loss": 0.0517, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 7.13978494623656e-05, |
|
"loss": 0.0534, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 7.132616487455197e-05, |
|
"loss": 0.0429, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 7.125448028673835e-05, |
|
"loss": 0.0468, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 7.118279569892473e-05, |
|
"loss": 0.0498, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 7.111111111111112e-05, |
|
"loss": 0.0539, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 7.10394265232975e-05, |
|
"loss": 0.0503, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 7.096774193548388e-05, |
|
"loss": 0.0576, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 7.089605734767025e-05, |
|
"loss": 0.0692, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 7.082437275985664e-05, |
|
"loss": 0.0446, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 7.075268817204301e-05, |
|
"loss": 0.0464, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 7.06810035842294e-05, |
|
"loss": 0.0496, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 7.060931899641578e-05, |
|
"loss": 0.0622, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 7.053763440860215e-05, |
|
"loss": 0.0487, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 7.046594982078853e-05, |
|
"loss": 0.0819, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 7.039426523297491e-05, |
|
"loss": 0.0515, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 7.03225806451613e-05, |
|
"loss": 0.0506, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 7.025089605734767e-05, |
|
"loss": 0.0396, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 3.0768535137176514, |
|
"eval_runtime": 29.2391, |
|
"eval_samples_per_second": 4.104, |
|
"eval_steps_per_second": 0.513, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 7.017921146953406e-05, |
|
"loss": 0.0488, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 7.010752688172043e-05, |
|
"loss": 0.0502, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 7.003584229390681e-05, |
|
"loss": 0.0448, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 6.996415770609319e-05, |
|
"loss": 0.0394, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 6.989247311827958e-05, |
|
"loss": 0.0339, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 6.982078853046596e-05, |
|
"loss": 0.0545, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 6.974910394265234e-05, |
|
"loss": 0.0426, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 6.967741935483871e-05, |
|
"loss": 0.0497, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 6.960573476702509e-05, |
|
"loss": 0.0474, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 6.953405017921147e-05, |
|
"loss": 0.0363, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 6.946236559139785e-05, |
|
"loss": 0.0407, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 6.939068100358424e-05, |
|
"loss": 0.0302, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 6.931899641577062e-05, |
|
"loss": 0.0422, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 6.924731182795699e-05, |
|
"loss": 0.0396, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 6.917562724014337e-05, |
|
"loss": 0.0476, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 6.910394265232976e-05, |
|
"loss": 0.0305, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 6.903225806451613e-05, |
|
"loss": 0.0492, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 6.896057347670252e-05, |
|
"loss": 0.0454, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 6.88888888888889e-05, |
|
"loss": 0.0423, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 6.881720430107527e-05, |
|
"loss": 0.0493, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 6.874551971326165e-05, |
|
"loss": 0.0362, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 6.867383512544803e-05, |
|
"loss": 0.0508, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 6.860215053763442e-05, |
|
"loss": 0.0374, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 6.853046594982078e-05, |
|
"loss": 0.0447, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 6.845878136200717e-05, |
|
"loss": 0.0343, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 6.838709677419355e-05, |
|
"loss": 0.049, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 6.831541218637993e-05, |
|
"loss": 0.0429, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 6.824372759856631e-05, |
|
"loss": 0.038, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 6.81720430107527e-05, |
|
"loss": 0.0396, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 6.810035842293908e-05, |
|
"loss": 0.0355, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 6.802867383512545e-05, |
|
"loss": 0.0349, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 6.795698924731183e-05, |
|
"loss": 0.0478, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 6.788530465949821e-05, |
|
"loss": 0.0359, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 6.781362007168459e-05, |
|
"loss": 0.0338, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 6.774193548387096e-05, |
|
"loss": 0.0393, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 6.767025089605736e-05, |
|
"loss": 0.0331, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 6.759856630824373e-05, |
|
"loss": 0.0368, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 6.752688172043011e-05, |
|
"loss": 0.0353, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 6.745519713261649e-05, |
|
"loss": 0.0534, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 6.738351254480288e-05, |
|
"loss": 0.0422, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 6.731182795698924e-05, |
|
"loss": 0.0528, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 6.724014336917564e-05, |
|
"loss": 0.0408, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 6.716845878136201e-05, |
|
"loss": 0.0351, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 6.709677419354839e-05, |
|
"loss": 0.0353, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 6.702508960573477e-05, |
|
"loss": 0.0283, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 6.695340501792115e-05, |
|
"loss": 0.0374, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 6.688172043010754e-05, |
|
"loss": 0.0542, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 6.68100358422939e-05, |
|
"loss": 0.0364, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 6.673835125448029e-05, |
|
"loss": 0.0391, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 0.0888, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 6.659498207885305e-05, |
|
"loss": 0.0317, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 6.652329749103943e-05, |
|
"loss": 0.0484, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 6.645161290322582e-05, |
|
"loss": 0.0424, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 6.63799283154122e-05, |
|
"loss": 0.0398, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 6.630824372759857e-05, |
|
"loss": 0.0367, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 6.623655913978495e-05, |
|
"loss": 0.0311, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 6.616487455197134e-05, |
|
"loss": 0.0375, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 6.60931899641577e-05, |
|
"loss": 0.0507, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 6.602150537634408e-05, |
|
"loss": 0.0462, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 6.594982078853047e-05, |
|
"loss": 0.0378, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 6.587813620071685e-05, |
|
"loss": 0.0475, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 6.580645161290323e-05, |
|
"loss": 0.033, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 6.57347670250896e-05, |
|
"loss": 0.0366, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 6.5663082437276e-05, |
|
"loss": 0.04, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 6.559139784946236e-05, |
|
"loss": 0.0431, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 6.551971326164875e-05, |
|
"loss": 0.0531, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 6.544802867383513e-05, |
|
"loss": 0.0307, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 6.537634408602151e-05, |
|
"loss": 0.0458, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 6.530465949820789e-05, |
|
"loss": 0.0374, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 6.523297491039428e-05, |
|
"loss": 0.0429, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 3.150350332260132, |
|
"eval_runtime": 29.1939, |
|
"eval_samples_per_second": 4.11, |
|
"eval_steps_per_second": 0.514, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 6.516129032258065e-05, |
|
"loss": 0.0268, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 6.508960573476702e-05, |
|
"loss": 0.0377, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 6.501792114695341e-05, |
|
"loss": 0.0309, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 6.494623655913979e-05, |
|
"loss": 0.0334, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 6.487455197132617e-05, |
|
"loss": 0.0266, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 6.480286738351254e-05, |
|
"loss": 0.0326, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 6.473118279569893e-05, |
|
"loss": 0.0319, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 6.465949820788531e-05, |
|
"loss": 0.0269, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 6.458781362007169e-05, |
|
"loss": 0.0335, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 6.451612903225807e-05, |
|
"loss": 0.0333, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 6.444444444444446e-05, |
|
"loss": 0.0297, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 6.437275985663082e-05, |
|
"loss": 0.0399, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 6.430107526881721e-05, |
|
"loss": 0.0341, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 6.422939068100359e-05, |
|
"loss": 0.0243, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 6.415770609318996e-05, |
|
"loss": 0.0423, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 6.408602150537635e-05, |
|
"loss": 0.0373, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 6.401433691756272e-05, |
|
"loss": 0.0416, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 6.39426523297491e-05, |
|
"loss": 0.0397, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 6.387096774193548e-05, |
|
"loss": 0.0279, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 6.379928315412187e-05, |
|
"loss": 0.0331, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 6.372759856630825e-05, |
|
"loss": 0.0296, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 6.365591397849463e-05, |
|
"loss": 0.0356, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 6.3584229390681e-05, |
|
"loss": 0.0301, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 6.35125448028674e-05, |
|
"loss": 0.0236, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 6.344086021505376e-05, |
|
"loss": 0.0299, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 6.336917562724015e-05, |
|
"loss": 0.0494, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 6.329749103942653e-05, |
|
"loss": 0.0372, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 6.32258064516129e-05, |
|
"loss": 0.0275, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 6.315412186379928e-05, |
|
"loss": 0.0298, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 6.308243727598566e-05, |
|
"loss": 0.0359, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 6.301075268817205e-05, |
|
"loss": 0.0318, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 6.293906810035842e-05, |
|
"loss": 0.0294, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 6.286738351254481e-05, |
|
"loss": 0.0369, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 6.279569892473119e-05, |
|
"loss": 0.0305, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 6.272401433691756e-05, |
|
"loss": 0.0315, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 6.265232974910394e-05, |
|
"loss": 0.033, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 6.258064516129033e-05, |
|
"loss": 0.0396, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 6.250896057347671e-05, |
|
"loss": 0.0416, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 6.243727598566309e-05, |
|
"loss": 0.0376, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 6.236559139784946e-05, |
|
"loss": 0.0428, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 6.229390681003584e-05, |
|
"loss": 0.0367, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 6.222222222222222e-05, |
|
"loss": 0.0307, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 6.21505376344086e-05, |
|
"loss": 0.0283, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 6.207885304659499e-05, |
|
"loss": 0.0329, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 6.200716845878137e-05, |
|
"loss": 0.0372, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 6.193548387096774e-05, |
|
"loss": 0.0331, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 6.186379928315412e-05, |
|
"loss": 0.0283, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 6.179211469534051e-05, |
|
"loss": 0.0331, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 6.172043010752688e-05, |
|
"loss": 0.0415, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 6.164874551971327e-05, |
|
"loss": 0.0349, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 6.157706093189965e-05, |
|
"loss": 0.0315, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 6.150537634408602e-05, |
|
"loss": 0.0346, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 6.14336917562724e-05, |
|
"loss": 0.0349, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 6.136200716845878e-05, |
|
"loss": 0.0268, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 6.129032258064517e-05, |
|
"loss": 0.0258, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 6.121863799283153e-05, |
|
"loss": 0.0347, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 6.114695340501793e-05, |
|
"loss": 0.0314, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 6.10752688172043e-05, |
|
"loss": 0.0303, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 6.100358422939069e-05, |
|
"loss": 0.0392, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 6.093189964157706e-05, |
|
"loss": 0.0289, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 6.086021505376345e-05, |
|
"loss": 0.0389, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 6.078853046594982e-05, |
|
"loss": 0.0365, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 6.071684587813621e-05, |
|
"loss": 0.0386, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 6.064516129032258e-05, |
|
"loss": 0.0323, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 6.057347670250897e-05, |
|
"loss": 0.0402, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 6.0501792114695344e-05, |
|
"loss": 0.0351, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 6.0430107526881715e-05, |
|
"loss": 0.0323, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 6.0358422939068106e-05, |
|
"loss": 0.043, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 6.028673835125448e-05, |
|
"loss": 0.0244, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 6.021505376344086e-05, |
|
"loss": 0.0318, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 3.2689783573150635, |
|
"eval_runtime": 29.2298, |
|
"eval_samples_per_second": 4.105, |
|
"eval_steps_per_second": 0.513, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 6.014336917562724e-05, |
|
"loss": 0.0246, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 6.0071684587813624e-05, |
|
"loss": 0.0281, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 6e-05, |
|
"loss": 0.025, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 5.9928315412186386e-05, |
|
"loss": 0.0283, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 5.9856630824372764e-05, |
|
"loss": 0.026, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 5.978494623655915e-05, |
|
"loss": 0.037, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 5.971326164874552e-05, |
|
"loss": 0.0284, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 5.964157706093191e-05, |
|
"loss": 0.0316, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 5.956989247311828e-05, |
|
"loss": 0.027, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 5.949820788530466e-05, |
|
"loss": 0.0242, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 5.942652329749104e-05, |
|
"loss": 0.023, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 5.935483870967742e-05, |
|
"loss": 0.0243, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 5.9283154121863805e-05, |
|
"loss": 0.034, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 5.9211469534050176e-05, |
|
"loss": 0.027, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 5.913978494623657e-05, |
|
"loss": 0.0265, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 5.906810035842294e-05, |
|
"loss": 0.0269, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 5.899641577060932e-05, |
|
"loss": 0.0239, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 5.89247311827957e-05, |
|
"loss": 0.0331, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 5.8853046594982085e-05, |
|
"loss": 0.0244, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 5.878136200716846e-05, |
|
"loss": 0.0257, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 5.870967741935483e-05, |
|
"loss": 0.0279, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 5.8637992831541224e-05, |
|
"loss": 0.0235, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 5.8566308243727595e-05, |
|
"loss": 0.0265, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 5.849462365591398e-05, |
|
"loss": 0.0206, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 5.842293906810036e-05, |
|
"loss": 0.0278, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 5.835125448028674e-05, |
|
"loss": 0.0287, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 5.827956989247312e-05, |
|
"loss": 0.0283, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 5.8207885304659504e-05, |
|
"loss": 0.0315, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 5.813620071684588e-05, |
|
"loss": 0.0289, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 5.8064516129032266e-05, |
|
"loss": 0.0273, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 5.799283154121864e-05, |
|
"loss": 0.0328, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 5.792114695340503e-05, |
|
"loss": 0.0279, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 5.78494623655914e-05, |
|
"loss": 0.0273, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 5.7777777777777776e-05, |
|
"loss": 0.0355, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 5.770609318996416e-05, |
|
"loss": 0.0267, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 5.763440860215054e-05, |
|
"loss": 0.0256, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 5.756272401433692e-05, |
|
"loss": 0.0289, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 5.7491039426523294e-05, |
|
"loss": 0.0296, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 5.7419354838709685e-05, |
|
"loss": 0.0245, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 5.7347670250896056e-05, |
|
"loss": 0.0348, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 5.727598566308244e-05, |
|
"loss": 0.025, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 5.720430107526882e-05, |
|
"loss": 0.0278, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 5.71326164874552e-05, |
|
"loss": 0.034, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 5.706093189964158e-05, |
|
"loss": 0.0251, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 5.6989247311827965e-05, |
|
"loss": 0.0274, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 5.691756272401434e-05, |
|
"loss": 0.0292, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 5.684587813620071e-05, |
|
"loss": 0.0299, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 5.67741935483871e-05, |
|
"loss": 0.0336, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 5.6702508960573475e-05, |
|
"loss": 0.0332, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 5.663082437275986e-05, |
|
"loss": 0.0334, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 5.655913978494624e-05, |
|
"loss": 0.0332, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 5.648745519713262e-05, |
|
"loss": 0.0252, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 5.6415770609319e-05, |
|
"loss": 0.027, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 5.6344086021505384e-05, |
|
"loss": 0.0356, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 5.6272401433691755e-05, |
|
"loss": 0.0308, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 5.6200716845878146e-05, |
|
"loss": 0.0293, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 5.612903225806452e-05, |
|
"loss": 0.0315, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 5.60573476702509e-05, |
|
"loss": 0.0319, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 5.598566308243728e-05, |
|
"loss": 0.0246, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 5.5913978494623656e-05, |
|
"loss": 0.0263, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 5.584229390681004e-05, |
|
"loss": 0.0241, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 5.577060931899641e-05, |
|
"loss": 0.0236, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 5.56989247311828e-05, |
|
"loss": 0.0257, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 5.5627240143369174e-05, |
|
"loss": 0.0389, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 5.555555555555556e-05, |
|
"loss": 0.0272, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 5.5483870967741936e-05, |
|
"loss": 0.0351, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 5.541218637992832e-05, |
|
"loss": 0.0288, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 5.53405017921147e-05, |
|
"loss": 0.0295, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 5.526881720430108e-05, |
|
"loss": 0.0295, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 5.519713261648746e-05, |
|
"loss": 0.03, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 3.481818675994873, |
|
"eval_runtime": 29.2267, |
|
"eval_samples_per_second": 4.106, |
|
"eval_steps_per_second": 0.513, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 5.5125448028673844e-05, |
|
"loss": 0.0323, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 5.5053763440860215e-05, |
|
"loss": 0.03, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 5.498207885304659e-05, |
|
"loss": 0.0231, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 5.491039426523298e-05, |
|
"loss": 0.0247, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 5.4838709677419355e-05, |
|
"loss": 0.0241, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 5.476702508960574e-05, |
|
"loss": 0.0375, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 5.469534050179212e-05, |
|
"loss": 0.0218, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 5.46236559139785e-05, |
|
"loss": 0.0222, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 5.455197132616487e-05, |
|
"loss": 0.0236, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 5.4480286738351264e-05, |
|
"loss": 0.0324, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 5.4408602150537635e-05, |
|
"loss": 0.0234, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 5.433691756272402e-05, |
|
"loss": 0.0196, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 5.4265232974910397e-05, |
|
"loss": 0.0235, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 5.419354838709678e-05, |
|
"loss": 0.0196, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 5.412186379928316e-05, |
|
"loss": 0.0287, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 5.405017921146953e-05, |
|
"loss": 0.0237, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 5.397849462365592e-05, |
|
"loss": 0.0278, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 5.390681003584229e-05, |
|
"loss": 0.0234, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 5.3835125448028676e-05, |
|
"loss": 0.0285, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 5.3763440860215054e-05, |
|
"loss": 0.0292, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 5.369175627240144e-05, |
|
"loss": 0.0205, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 5.3620071684587816e-05, |
|
"loss": 0.0285, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 5.35483870967742e-05, |
|
"loss": 0.0353, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 5.347670250896057e-05, |
|
"loss": 0.0289, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 5.340501792114696e-05, |
|
"loss": 0.0231, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 0.0329, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 5.3261648745519724e-05, |
|
"loss": 0.0257, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 5.3189964157706095e-05, |
|
"loss": 0.0275, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 5.311827956989247e-05, |
|
"loss": 0.0275, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 5.304659498207886e-05, |
|
"loss": 0.0304, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 5.297491039426523e-05, |
|
"loss": 0.0233, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 5.290322580645162e-05, |
|
"loss": 0.0253, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 5.283154121863799e-05, |
|
"loss": 0.0296, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 5.2759856630824375e-05, |
|
"loss": 0.0239, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 5.268817204301075e-05, |
|
"loss": 0.0315, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 5.261648745519714e-05, |
|
"loss": 0.0297, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 5.2544802867383514e-05, |
|
"loss": 0.026, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 5.24731182795699e-05, |
|
"loss": 0.0235, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 5.2401433691756277e-05, |
|
"loss": 0.0279, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 5.232974910394266e-05, |
|
"loss": 0.0223, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 5.225806451612903e-05, |
|
"loss": 0.0229, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 5.218637992831541e-05, |
|
"loss": 0.0259, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 5.2114695340501794e-05, |
|
"loss": 0.0262, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 5.204301075268817e-05, |
|
"loss": 0.0266, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 5.1971326164874556e-05, |
|
"loss": 0.0217, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 5.1899641577060934e-05, |
|
"loss": 0.0243, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 5.182795698924732e-05, |
|
"loss": 0.0318, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 5.175627240143369e-05, |
|
"loss": 0.0239, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 5.168458781362008e-05, |
|
"loss": 0.0289, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 5.161290322580645e-05, |
|
"loss": 0.0282, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 5.1541218637992835e-05, |
|
"loss": 0.0288, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 5.146953405017921e-05, |
|
"loss": 0.0269, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 5.13978494623656e-05, |
|
"loss": 0.0259, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 5.1326164874551975e-05, |
|
"loss": 0.0306, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 5.1254480286738346e-05, |
|
"loss": 0.0246, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 5.118279569892474e-05, |
|
"loss": 0.0243, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 5.111111111111111e-05, |
|
"loss": 0.0231, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 5.103942652329749e-05, |
|
"loss": 0.0317, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 5.096774193548387e-05, |
|
"loss": 0.0276, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 5.0896057347670255e-05, |
|
"loss": 0.0375, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 5.082437275985663e-05, |
|
"loss": 0.033, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 5.075268817204302e-05, |
|
"loss": 0.0272, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 5.0681003584229394e-05, |
|
"loss": 0.0262, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 5.060931899641578e-05, |
|
"loss": 0.0224, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 5.053763440860215e-05, |
|
"loss": 0.0313, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 5.046594982078854e-05, |
|
"loss": 0.0303, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 5.039426523297491e-05, |
|
"loss": 0.0337, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 5.032258064516129e-05, |
|
"loss": 0.0342, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"learning_rate": 5.0250896057347674e-05, |
|
"loss": 0.0314, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 5.017921146953405e-05, |
|
"loss": 0.0258, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 3.6010735034942627, |
|
"eval_runtime": 29.22, |
|
"eval_samples_per_second": 4.107, |
|
"eval_steps_per_second": 0.513, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 10.01, |
|
"learning_rate": 5.0107526881720436e-05, |
|
"loss": 0.0205, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"learning_rate": 5.003584229390681e-05, |
|
"loss": 0.0217, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 10.04, |
|
"learning_rate": 4.996415770609319e-05, |
|
"loss": 0.0279, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 10.06, |
|
"learning_rate": 4.989247311827957e-05, |
|
"loss": 0.0252, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 10.07, |
|
"learning_rate": 4.982078853046595e-05, |
|
"loss": 0.0241, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 10.09, |
|
"learning_rate": 4.974910394265233e-05, |
|
"loss": 0.0243, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 10.1, |
|
"learning_rate": 4.967741935483871e-05, |
|
"loss": 0.0208, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"learning_rate": 4.960573476702509e-05, |
|
"loss": 0.0209, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 10.13, |
|
"learning_rate": 4.953405017921147e-05, |
|
"loss": 0.0275, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 4.9462365591397855e-05, |
|
"loss": 0.0237, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 10.16, |
|
"learning_rate": 4.939068100358423e-05, |
|
"loss": 0.0246, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 10.17, |
|
"learning_rate": 4.931899641577061e-05, |
|
"loss": 0.0242, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 10.19, |
|
"learning_rate": 4.9247311827956995e-05, |
|
"loss": 0.033, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"learning_rate": 4.917562724014337e-05, |
|
"loss": 0.0309, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 4.910394265232976e-05, |
|
"loss": 0.0185, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 10.23, |
|
"learning_rate": 4.903225806451613e-05, |
|
"loss": 0.0205, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 10.24, |
|
"learning_rate": 4.896057347670251e-05, |
|
"loss": 0.0265, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 10.26, |
|
"learning_rate": 4.888888888888889e-05, |
|
"loss": 0.0241, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 10.27, |
|
"learning_rate": 4.881720430107527e-05, |
|
"loss": 0.0248, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 10.29, |
|
"learning_rate": 4.874551971326165e-05, |
|
"loss": 0.0247, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 10.3, |
|
"learning_rate": 4.867383512544803e-05, |
|
"loss": 0.027, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 10.31, |
|
"learning_rate": 4.8602150537634414e-05, |
|
"loss": 0.0267, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 10.33, |
|
"learning_rate": 4.853046594982079e-05, |
|
"loss": 0.0281, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 10.34, |
|
"learning_rate": 4.845878136200717e-05, |
|
"loss": 0.0234, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 10.36, |
|
"learning_rate": 4.8387096774193554e-05, |
|
"loss": 0.0264, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 10.37, |
|
"learning_rate": 4.831541218637993e-05, |
|
"loss": 0.0274, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 10.39, |
|
"learning_rate": 4.8243727598566316e-05, |
|
"loss": 0.021, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 10.4, |
|
"learning_rate": 4.8172043010752693e-05, |
|
"loss": 0.0252, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 10.41, |
|
"learning_rate": 4.810035842293907e-05, |
|
"loss": 0.0262, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"learning_rate": 4.802867383512545e-05, |
|
"loss": 0.0269, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 10.44, |
|
"learning_rate": 4.7956989247311826e-05, |
|
"loss": 0.0263, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 10.46, |
|
"learning_rate": 4.788530465949821e-05, |
|
"loss": 0.0317, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 10.47, |
|
"learning_rate": 4.781362007168459e-05, |
|
"loss": 0.0264, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 10.49, |
|
"learning_rate": 4.774193548387097e-05, |
|
"loss": 0.033, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 4.767025089605735e-05, |
|
"loss": 0.0234, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 10.51, |
|
"learning_rate": 4.759856630824373e-05, |
|
"loss": 0.026, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 10.53, |
|
"learning_rate": 4.752688172043011e-05, |
|
"loss": 0.0251, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 10.54, |
|
"learning_rate": 4.745519713261649e-05, |
|
"loss": 0.0251, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"learning_rate": 4.7383512544802875e-05, |
|
"loss": 0.0246, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"learning_rate": 4.731182795698925e-05, |
|
"loss": 0.0263, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 10.59, |
|
"learning_rate": 4.724014336917563e-05, |
|
"loss": 0.0304, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 10.6, |
|
"learning_rate": 4.716845878136201e-05, |
|
"loss": 0.029, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 10.61, |
|
"learning_rate": 4.7096774193548385e-05, |
|
"loss": 0.0192, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"learning_rate": 4.702508960573477e-05, |
|
"loss": 0.0267, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"learning_rate": 4.695340501792115e-05, |
|
"loss": 0.0286, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"learning_rate": 4.688172043010753e-05, |
|
"loss": 0.0222, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 10.67, |
|
"learning_rate": 4.681003584229391e-05, |
|
"loss": 0.026, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"learning_rate": 4.673835125448029e-05, |
|
"loss": 0.0248, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 10.7, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 0.0209, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 10.71, |
|
"learning_rate": 4.659498207885305e-05, |
|
"loss": 0.0291, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 10.73, |
|
"learning_rate": 4.6523297491039434e-05, |
|
"loss": 0.0245, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 10.74, |
|
"learning_rate": 4.645161290322581e-05, |
|
"loss": 0.0255, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"learning_rate": 4.637992831541219e-05, |
|
"loss": 0.0254, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 10.77, |
|
"learning_rate": 4.630824372759857e-05, |
|
"loss": 0.0247, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 10.79, |
|
"learning_rate": 4.6236559139784944e-05, |
|
"loss": 0.0236, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"learning_rate": 4.616487455197133e-05, |
|
"loss": 0.023, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 10.81, |
|
"learning_rate": 4.6093189964157706e-05, |
|
"loss": 0.023, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"learning_rate": 4.6021505376344084e-05, |
|
"loss": 0.0262, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 10.84, |
|
"learning_rate": 4.594982078853047e-05, |
|
"loss": 0.0207, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 10.86, |
|
"learning_rate": 4.5878136200716846e-05, |
|
"loss": 0.0248, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 10.87, |
|
"learning_rate": 4.580645161290323e-05, |
|
"loss": 0.0246, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 10.89, |
|
"learning_rate": 4.573476702508961e-05, |
|
"loss": 0.0307, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"learning_rate": 4.5663082437275986e-05, |
|
"loss": 0.0215, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 10.91, |
|
"learning_rate": 4.559139784946237e-05, |
|
"loss": 0.0266, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"learning_rate": 4.551971326164875e-05, |
|
"loss": 0.029, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 10.94, |
|
"learning_rate": 4.544802867383513e-05, |
|
"loss": 0.0233, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 10.96, |
|
"learning_rate": 4.53763440860215e-05, |
|
"loss": 0.0236, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 10.97, |
|
"learning_rate": 4.530465949820789e-05, |
|
"loss": 0.0293, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"learning_rate": 4.5232974910394265e-05, |
|
"loss": 0.031, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 4.516129032258064e-05, |
|
"loss": 0.0247, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_loss": 3.75777530670166, |
|
"eval_runtime": 29.2261, |
|
"eval_samples_per_second": 4.106, |
|
"eval_steps_per_second": 0.513, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 11.01, |
|
"learning_rate": 4.508960573476703e-05, |
|
"loss": 0.0254, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 11.03, |
|
"learning_rate": 4.5017921146953405e-05, |
|
"loss": 0.0222, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 11.04, |
|
"learning_rate": 4.494623655913979e-05, |
|
"loss": 0.0321, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 11.06, |
|
"learning_rate": 4.487455197132617e-05, |
|
"loss": 0.0387, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"learning_rate": 4.4802867383512545e-05, |
|
"loss": 0.0288, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 11.09, |
|
"learning_rate": 4.473118279569893e-05, |
|
"loss": 0.0198, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 11.1, |
|
"learning_rate": 4.465949820788531e-05, |
|
"loss": 0.0196, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"learning_rate": 4.458781362007169e-05, |
|
"loss": 0.0276, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 11.13, |
|
"learning_rate": 4.451612903225807e-05, |
|
"loss": 0.0229, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 11.14, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 0.022, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"learning_rate": 4.4372759856630824e-05, |
|
"loss": 0.0204, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 11.17, |
|
"learning_rate": 4.43010752688172e-05, |
|
"loss": 0.0238, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 11.19, |
|
"learning_rate": 4.4229390681003586e-05, |
|
"loss": 0.0269, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 11.2, |
|
"learning_rate": 4.4157706093189964e-05, |
|
"loss": 0.032, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 4.408602150537635e-05, |
|
"loss": 0.0229, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 11.23, |
|
"learning_rate": 4.4014336917562726e-05, |
|
"loss": 0.0224, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 11.24, |
|
"learning_rate": 4.3942652329749104e-05, |
|
"loss": 0.0188, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 11.26, |
|
"learning_rate": 4.387096774193549e-05, |
|
"loss": 0.026, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 11.27, |
|
"learning_rate": 4.3799283154121866e-05, |
|
"loss": 0.0189, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 11.29, |
|
"learning_rate": 4.372759856630825e-05, |
|
"loss": 0.0224, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 11.3, |
|
"learning_rate": 4.365591397849463e-05, |
|
"loss": 0.0209, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 11.31, |
|
"learning_rate": 4.3584229390681005e-05, |
|
"loss": 0.0228, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 11.33, |
|
"learning_rate": 4.351254480286738e-05, |
|
"loss": 0.0236, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 11.34, |
|
"learning_rate": 4.344086021505376e-05, |
|
"loss": 0.0172, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 11.36, |
|
"learning_rate": 4.3369175627240145e-05, |
|
"loss": 0.0227, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 11.37, |
|
"learning_rate": 4.329749103942652e-05, |
|
"loss": 0.0258, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 11.39, |
|
"learning_rate": 4.322580645161291e-05, |
|
"loss": 0.0242, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 11.4, |
|
"learning_rate": 4.3154121863799285e-05, |
|
"loss": 0.0268, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 11.41, |
|
"learning_rate": 4.308243727598566e-05, |
|
"loss": 0.0251, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 11.43, |
|
"learning_rate": 4.301075268817205e-05, |
|
"loss": 0.027, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 11.44, |
|
"learning_rate": 4.2939068100358425e-05, |
|
"loss": 0.0239, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 11.46, |
|
"learning_rate": 4.286738351254481e-05, |
|
"loss": 0.0202, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 11.47, |
|
"learning_rate": 4.279569892473119e-05, |
|
"loss": 0.0277, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"learning_rate": 4.2724014336917564e-05, |
|
"loss": 0.0277, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"learning_rate": 4.265232974910394e-05, |
|
"loss": 0.0268, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 11.51, |
|
"learning_rate": 4.258064516129032e-05, |
|
"loss": 0.0223, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"learning_rate": 4.2508960573476704e-05, |
|
"loss": 0.0273, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 11.54, |
|
"learning_rate": 4.243727598566308e-05, |
|
"loss": 0.0194, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"learning_rate": 4.2365591397849466e-05, |
|
"loss": 0.0228, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 11.57, |
|
"learning_rate": 4.2293906810035844e-05, |
|
"loss": 0.031, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 11.59, |
|
"learning_rate": 4.222222222222222e-05, |
|
"loss": 0.027, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 11.6, |
|
"learning_rate": 4.2150537634408606e-05, |
|
"loss": 0.0231, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 11.61, |
|
"learning_rate": 4.2078853046594984e-05, |
|
"loss": 0.0246, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 11.63, |
|
"learning_rate": 4.200716845878137e-05, |
|
"loss": 0.0249, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 11.64, |
|
"learning_rate": 4.1935483870967746e-05, |
|
"loss": 0.0217, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 11.66, |
|
"learning_rate": 4.186379928315412e-05, |
|
"loss": 0.0289, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"learning_rate": 4.179211469534051e-05, |
|
"loss": 0.023, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 11.69, |
|
"learning_rate": 4.172043010752688e-05, |
|
"loss": 0.026, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 11.7, |
|
"learning_rate": 4.164874551971326e-05, |
|
"loss": 0.0175, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 11.71, |
|
"learning_rate": 4.157706093189964e-05, |
|
"loss": 0.0232, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 11.73, |
|
"learning_rate": 4.1505376344086025e-05, |
|
"loss": 0.0215, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 11.74, |
|
"learning_rate": 4.14336917562724e-05, |
|
"loss": 0.0237, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 11.76, |
|
"learning_rate": 4.136200716845878e-05, |
|
"loss": 0.0231, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 11.77, |
|
"learning_rate": 4.1290322580645165e-05, |
|
"loss": 0.0256, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 11.79, |
|
"learning_rate": 4.121863799283154e-05, |
|
"loss": 0.0221, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 11.8, |
|
"learning_rate": 4.114695340501793e-05, |
|
"loss": 0.0232, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 11.81, |
|
"learning_rate": 4.1075268817204305e-05, |
|
"loss": 0.0246, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 11.83, |
|
"learning_rate": 4.100358422939068e-05, |
|
"loss": 0.0292, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 11.84, |
|
"learning_rate": 4.093189964157707e-05, |
|
"loss": 0.0301, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 11.86, |
|
"learning_rate": 4.0860215053763444e-05, |
|
"loss": 0.0242, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 11.87, |
|
"learning_rate": 4.078853046594982e-05, |
|
"loss": 0.0223, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 11.89, |
|
"learning_rate": 4.07168458781362e-05, |
|
"loss": 0.0222, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 11.9, |
|
"learning_rate": 4.0645161290322584e-05, |
|
"loss": 0.0275, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 11.91, |
|
"learning_rate": 4.057347670250896e-05, |
|
"loss": 0.0253, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 11.93, |
|
"learning_rate": 4.050179211469534e-05, |
|
"loss": 0.0231, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 11.94, |
|
"learning_rate": 4.0430107526881724e-05, |
|
"loss": 0.0274, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 11.96, |
|
"learning_rate": 4.03584229390681e-05, |
|
"loss": 0.0239, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 11.97, |
|
"learning_rate": 4.0286738351254486e-05, |
|
"loss": 0.0269, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 11.99, |
|
"learning_rate": 4.0215053763440864e-05, |
|
"loss": 0.027, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 4.014336917562724e-05, |
|
"loss": 0.0287, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_loss": 3.883384943008423, |
|
"eval_runtime": 29.2074, |
|
"eval_samples_per_second": 4.109, |
|
"eval_steps_per_second": 0.514, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 12.01, |
|
"learning_rate": 4.0071684587813626e-05, |
|
"loss": 0.0214, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 12.03, |
|
"learning_rate": 4e-05, |
|
"loss": 0.03, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 12.04, |
|
"learning_rate": 3.992831541218639e-05, |
|
"loss": 0.0271, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 12.06, |
|
"learning_rate": 3.985663082437276e-05, |
|
"loss": 0.0249, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 12.07, |
|
"learning_rate": 3.978494623655914e-05, |
|
"loss": 0.022, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 12.09, |
|
"learning_rate": 3.971326164874552e-05, |
|
"loss": 0.0244, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 12.1, |
|
"learning_rate": 3.96415770609319e-05, |
|
"loss": 0.02, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 12.11, |
|
"learning_rate": 3.956989247311828e-05, |
|
"loss": 0.0193, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 12.13, |
|
"learning_rate": 3.949820788530466e-05, |
|
"loss": 0.0205, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 12.14, |
|
"learning_rate": 3.9426523297491045e-05, |
|
"loss": 0.0246, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 12.16, |
|
"learning_rate": 3.935483870967742e-05, |
|
"loss": 0.0173, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 12.17, |
|
"learning_rate": 3.92831541218638e-05, |
|
"loss": 0.0189, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 12.19, |
|
"learning_rate": 3.9211469534050185e-05, |
|
"loss": 0.0186, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 12.2, |
|
"learning_rate": 3.913978494623656e-05, |
|
"loss": 0.0177, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"learning_rate": 3.906810035842295e-05, |
|
"loss": 0.0209, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 12.23, |
|
"learning_rate": 3.8996415770609324e-05, |
|
"loss": 0.0245, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 12.24, |
|
"learning_rate": 3.8924731182795695e-05, |
|
"loss": 0.0221, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 12.26, |
|
"learning_rate": 3.885304659498208e-05, |
|
"loss": 0.0211, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 12.27, |
|
"learning_rate": 3.878136200716846e-05, |
|
"loss": 0.0284, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"learning_rate": 3.870967741935484e-05, |
|
"loss": 0.0191, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 12.3, |
|
"learning_rate": 3.863799283154122e-05, |
|
"loss": 0.0254, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 12.31, |
|
"learning_rate": 3.85663082437276e-05, |
|
"loss": 0.0228, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 12.33, |
|
"learning_rate": 3.849462365591398e-05, |
|
"loss": 0.0194, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 12.34, |
|
"learning_rate": 3.842293906810036e-05, |
|
"loss": 0.0218, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 12.36, |
|
"learning_rate": 3.8351254480286743e-05, |
|
"loss": 0.026, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"learning_rate": 3.827956989247312e-05, |
|
"loss": 0.0201, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 12.39, |
|
"learning_rate": 3.82078853046595e-05, |
|
"loss": 0.0272, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"learning_rate": 3.813620071684588e-05, |
|
"loss": 0.0207, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 12.41, |
|
"learning_rate": 3.8064516129032254e-05, |
|
"loss": 0.0268, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 12.43, |
|
"learning_rate": 3.799283154121864e-05, |
|
"loss": 0.0189, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 12.44, |
|
"learning_rate": 3.7921146953405016e-05, |
|
"loss": 0.0213, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 12.46, |
|
"learning_rate": 3.78494623655914e-05, |
|
"loss": 0.0227, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 12.47, |
|
"learning_rate": 3.777777777777778e-05, |
|
"loss": 0.0241, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 12.49, |
|
"learning_rate": 3.7706093189964156e-05, |
|
"loss": 0.0228, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 3.763440860215054e-05, |
|
"loss": 0.0257, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 12.51, |
|
"learning_rate": 3.756272401433692e-05, |
|
"loss": 0.0192, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 12.53, |
|
"learning_rate": 3.74910394265233e-05, |
|
"loss": 0.0232, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 12.54, |
|
"learning_rate": 3.741935483870968e-05, |
|
"loss": 0.026, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 12.56, |
|
"learning_rate": 3.734767025089606e-05, |
|
"loss": 0.0213, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 12.57, |
|
"learning_rate": 3.727598566308244e-05, |
|
"loss": 0.0345, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 12.59, |
|
"learning_rate": 3.720430107526882e-05, |
|
"loss": 0.0209, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 12.6, |
|
"learning_rate": 3.71326164874552e-05, |
|
"loss": 0.0221, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"learning_rate": 3.7060931899641575e-05, |
|
"loss": 0.0305, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 12.63, |
|
"learning_rate": 3.698924731182796e-05, |
|
"loss": 0.035, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 12.64, |
|
"learning_rate": 3.691756272401434e-05, |
|
"loss": 0.0365, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 12.66, |
|
"learning_rate": 3.6845878136200715e-05, |
|
"loss": 0.0228, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"learning_rate": 3.67741935483871e-05, |
|
"loss": 0.0245, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 12.69, |
|
"learning_rate": 3.670250896057348e-05, |
|
"loss": 0.0265, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 12.7, |
|
"learning_rate": 3.663082437275986e-05, |
|
"loss": 0.0263, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 12.71, |
|
"learning_rate": 3.655913978494624e-05, |
|
"loss": 0.0278, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 12.73, |
|
"learning_rate": 3.648745519713262e-05, |
|
"loss": 0.0219, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 12.74, |
|
"learning_rate": 3.6415770609319e-05, |
|
"loss": 0.0247, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 12.76, |
|
"learning_rate": 3.634408602150538e-05, |
|
"loss": 0.0207, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 12.77, |
|
"learning_rate": 3.627240143369176e-05, |
|
"loss": 0.0322, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 12.79, |
|
"learning_rate": 3.6200716845878134e-05, |
|
"loss": 0.0242, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 12.8, |
|
"learning_rate": 3.612903225806452e-05, |
|
"loss": 0.026, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 12.81, |
|
"learning_rate": 3.6057347670250896e-05, |
|
"loss": 0.0233, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 12.83, |
|
"learning_rate": 3.5985663082437274e-05, |
|
"loss": 0.0288, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"learning_rate": 3.591397849462366e-05, |
|
"loss": 0.0265, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 12.86, |
|
"learning_rate": 3.5842293906810036e-05, |
|
"loss": 0.0251, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 12.87, |
|
"learning_rate": 3.577060931899642e-05, |
|
"loss": 0.0247, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 12.89, |
|
"learning_rate": 3.56989247311828e-05, |
|
"loss": 0.022, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 12.9, |
|
"learning_rate": 3.5627240143369176e-05, |
|
"loss": 0.024, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 12.91, |
|
"learning_rate": 3.555555555555556e-05, |
|
"loss": 0.0294, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 12.93, |
|
"learning_rate": 3.548387096774194e-05, |
|
"loss": 0.0293, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 12.94, |
|
"learning_rate": 3.541218637992832e-05, |
|
"loss": 0.0234, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 12.96, |
|
"learning_rate": 3.53405017921147e-05, |
|
"loss": 0.0248, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"learning_rate": 3.526881720430108e-05, |
|
"loss": 0.0242, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 12.99, |
|
"learning_rate": 3.5197132616487455e-05, |
|
"loss": 0.0228, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 3.512544802867383e-05, |
|
"loss": 0.0257, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_loss": 3.9491729736328125, |
|
"eval_runtime": 29.1968, |
|
"eval_samples_per_second": 4.11, |
|
"eval_steps_per_second": 0.514, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 13.01, |
|
"learning_rate": 3.505376344086022e-05, |
|
"loss": 0.0185, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 13.03, |
|
"learning_rate": 3.4982078853046595e-05, |
|
"loss": 0.0244, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 13.04, |
|
"learning_rate": 3.491039426523298e-05, |
|
"loss": 0.0264, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 13.06, |
|
"learning_rate": 3.483870967741936e-05, |
|
"loss": 0.0218, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 13.07, |
|
"learning_rate": 3.4767025089605734e-05, |
|
"loss": 0.0186, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 13.09, |
|
"learning_rate": 3.469534050179212e-05, |
|
"loss": 0.0277, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 13.1, |
|
"learning_rate": 3.4623655913978497e-05, |
|
"loss": 0.0202, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 13.11, |
|
"learning_rate": 3.455197132616488e-05, |
|
"loss": 0.0188, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 13.13, |
|
"learning_rate": 3.448028673835126e-05, |
|
"loss": 0.0208, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 13.14, |
|
"learning_rate": 3.4408602150537636e-05, |
|
"loss": 0.0218, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 13.16, |
|
"learning_rate": 3.4336917562724014e-05, |
|
"loss": 0.0199, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 13.17, |
|
"learning_rate": 3.426523297491039e-05, |
|
"loss": 0.0175, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 13.19, |
|
"learning_rate": 3.4193548387096776e-05, |
|
"loss": 0.0226, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 13.2, |
|
"learning_rate": 3.4121863799283154e-05, |
|
"loss": 0.0253, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 13.21, |
|
"learning_rate": 3.405017921146954e-05, |
|
"loss": 0.0178, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 13.23, |
|
"learning_rate": 3.3978494623655916e-05, |
|
"loss": 0.0253, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 13.24, |
|
"learning_rate": 3.3906810035842293e-05, |
|
"loss": 0.0248, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 13.26, |
|
"learning_rate": 3.383512544802868e-05, |
|
"loss": 0.0276, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 13.27, |
|
"learning_rate": 3.3763440860215055e-05, |
|
"loss": 0.0308, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 13.29, |
|
"learning_rate": 3.369175627240144e-05, |
|
"loss": 0.0235, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 13.3, |
|
"learning_rate": 3.362007168458782e-05, |
|
"loss": 0.0256, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 13.31, |
|
"learning_rate": 3.3548387096774195e-05, |
|
"loss": 0.0201, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 13.33, |
|
"learning_rate": 3.347670250896057e-05, |
|
"loss": 0.0206, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 13.34, |
|
"learning_rate": 3.340501792114695e-05, |
|
"loss": 0.0213, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 13.36, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.0246, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 13.37, |
|
"learning_rate": 3.326164874551971e-05, |
|
"loss": 0.0258, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 13.39, |
|
"learning_rate": 3.31899641577061e-05, |
|
"loss": 0.0239, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 13.4, |
|
"learning_rate": 3.3118279569892475e-05, |
|
"loss": 0.0242, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 13.41, |
|
"learning_rate": 3.304659498207885e-05, |
|
"loss": 0.0177, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 13.43, |
|
"learning_rate": 3.297491039426524e-05, |
|
"loss": 0.0227, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 13.44, |
|
"learning_rate": 3.2903225806451614e-05, |
|
"loss": 0.0209, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 13.46, |
|
"learning_rate": 3.2831541218638e-05, |
|
"loss": 0.0226, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 13.47, |
|
"learning_rate": 3.2759856630824376e-05, |
|
"loss": 0.0266, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 13.49, |
|
"learning_rate": 3.2688172043010754e-05, |
|
"loss": 0.0289, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 13.5, |
|
"learning_rate": 3.261648745519714e-05, |
|
"loss": 0.023, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 13.51, |
|
"learning_rate": 3.254480286738351e-05, |
|
"loss": 0.0303, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 13.53, |
|
"learning_rate": 3.2473118279569894e-05, |
|
"loss": 0.0294, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 13.54, |
|
"learning_rate": 3.240143369175627e-05, |
|
"loss": 0.0265, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 13.56, |
|
"learning_rate": 3.2329749103942656e-05, |
|
"loss": 0.0215, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 13.57, |
|
"learning_rate": 3.2258064516129034e-05, |
|
"loss": 0.0228, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 13.59, |
|
"learning_rate": 3.218637992831541e-05, |
|
"loss": 0.0252, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 13.6, |
|
"learning_rate": 3.2114695340501796e-05, |
|
"loss": 0.0209, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 13.61, |
|
"learning_rate": 3.204301075268817e-05, |
|
"loss": 0.0252, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 13.63, |
|
"learning_rate": 3.197132616487455e-05, |
|
"loss": 0.0262, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 13.64, |
|
"learning_rate": 3.1899641577060935e-05, |
|
"loss": 0.0246, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 13.66, |
|
"learning_rate": 3.182795698924731e-05, |
|
"loss": 0.0241, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 13.67, |
|
"learning_rate": 3.17562724014337e-05, |
|
"loss": 0.0251, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 13.69, |
|
"learning_rate": 3.1684587813620075e-05, |
|
"loss": 0.0265, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 13.7, |
|
"learning_rate": 3.161290322580645e-05, |
|
"loss": 0.022, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 13.71, |
|
"learning_rate": 3.154121863799283e-05, |
|
"loss": 0.0217, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 13.73, |
|
"learning_rate": 3.146953405017921e-05, |
|
"loss": 0.0233, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 13.74, |
|
"learning_rate": 3.139784946236559e-05, |
|
"loss": 0.0267, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 13.76, |
|
"learning_rate": 3.132616487455197e-05, |
|
"loss": 0.0226, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 13.77, |
|
"learning_rate": 3.1254480286738355e-05, |
|
"loss": 0.0257, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 13.79, |
|
"learning_rate": 3.118279569892473e-05, |
|
"loss": 0.028, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 13.8, |
|
"learning_rate": 3.111111111111111e-05, |
|
"loss": 0.0256, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 13.81, |
|
"learning_rate": 3.1039426523297494e-05, |
|
"loss": 0.0186, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 13.83, |
|
"learning_rate": 3.096774193548387e-05, |
|
"loss": 0.0236, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 13.84, |
|
"learning_rate": 3.0896057347670256e-05, |
|
"loss": 0.021, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 13.86, |
|
"learning_rate": 3.0824372759856634e-05, |
|
"loss": 0.0242, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 13.87, |
|
"learning_rate": 3.075268817204301e-05, |
|
"loss": 0.0291, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 13.89, |
|
"learning_rate": 3.068100358422939e-05, |
|
"loss": 0.0224, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 13.9, |
|
"learning_rate": 3.060931899641577e-05, |
|
"loss": 0.0296, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 13.91, |
|
"learning_rate": 3.053763440860215e-05, |
|
"loss": 0.0279, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 13.93, |
|
"learning_rate": 3.046594982078853e-05, |
|
"loss": 0.03, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 13.94, |
|
"learning_rate": 3.039426523297491e-05, |
|
"loss": 0.0199, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 13.96, |
|
"learning_rate": 3.032258064516129e-05, |
|
"loss": 0.02, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 13.97, |
|
"learning_rate": 3.0250896057347672e-05, |
|
"loss": 0.0217, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 13.99, |
|
"learning_rate": 3.0179211469534053e-05, |
|
"loss": 0.0229, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 3.010752688172043e-05, |
|
"loss": 0.0267, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_loss": 3.9646317958831787, |
|
"eval_runtime": 29.2015, |
|
"eval_samples_per_second": 4.109, |
|
"eval_steps_per_second": 0.514, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 14.01, |
|
"learning_rate": 3.0035842293906812e-05, |
|
"loss": 0.0197, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 14.03, |
|
"learning_rate": 2.9964157706093193e-05, |
|
"loss": 0.0187, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 14.04, |
|
"learning_rate": 2.9892473118279574e-05, |
|
"loss": 0.0256, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 14.06, |
|
"learning_rate": 2.9820788530465955e-05, |
|
"loss": 0.0205, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 14.07, |
|
"learning_rate": 2.974910394265233e-05, |
|
"loss": 0.0239, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 14.09, |
|
"learning_rate": 2.967741935483871e-05, |
|
"loss": 0.0204, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 14.1, |
|
"learning_rate": 2.9605734767025088e-05, |
|
"loss": 0.0238, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 14.11, |
|
"learning_rate": 2.953405017921147e-05, |
|
"loss": 0.0219, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 14.13, |
|
"learning_rate": 2.946236559139785e-05, |
|
"loss": 0.0346, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 14.14, |
|
"learning_rate": 2.939068100358423e-05, |
|
"loss": 0.0228, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 14.16, |
|
"learning_rate": 2.9318996415770612e-05, |
|
"loss": 0.029, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 14.17, |
|
"learning_rate": 2.924731182795699e-05, |
|
"loss": 0.0274, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 14.19, |
|
"learning_rate": 2.917562724014337e-05, |
|
"loss": 0.024, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 14.2, |
|
"learning_rate": 2.9103942652329752e-05, |
|
"loss": 0.0231, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 14.21, |
|
"learning_rate": 2.9032258064516133e-05, |
|
"loss": 0.0188, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 14.23, |
|
"learning_rate": 2.8960573476702514e-05, |
|
"loss": 0.022, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 14.24, |
|
"learning_rate": 2.8888888888888888e-05, |
|
"loss": 0.0241, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 14.26, |
|
"learning_rate": 2.881720430107527e-05, |
|
"loss": 0.0232, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 14.27, |
|
"learning_rate": 2.8745519713261647e-05, |
|
"loss": 0.0242, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 14.29, |
|
"learning_rate": 2.8673835125448028e-05, |
|
"loss": 0.0216, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 14.3, |
|
"learning_rate": 2.860215053763441e-05, |
|
"loss": 0.0173, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 14.31, |
|
"learning_rate": 2.853046594982079e-05, |
|
"loss": 0.0224, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 14.33, |
|
"learning_rate": 2.845878136200717e-05, |
|
"loss": 0.0207, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 14.34, |
|
"learning_rate": 2.838709677419355e-05, |
|
"loss": 0.0214, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 14.36, |
|
"learning_rate": 2.831541218637993e-05, |
|
"loss": 0.0184, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 14.37, |
|
"learning_rate": 2.824372759856631e-05, |
|
"loss": 0.0232, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 14.39, |
|
"learning_rate": 2.8172043010752692e-05, |
|
"loss": 0.022, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 14.4, |
|
"learning_rate": 2.8100358422939073e-05, |
|
"loss": 0.027, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 14.41, |
|
"learning_rate": 2.802867383512545e-05, |
|
"loss": 0.029, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 14.43, |
|
"learning_rate": 2.7956989247311828e-05, |
|
"loss": 0.0272, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 14.44, |
|
"learning_rate": 2.7885304659498206e-05, |
|
"loss": 0.0286, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 14.46, |
|
"learning_rate": 2.7813620071684587e-05, |
|
"loss": 0.0241, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 14.47, |
|
"learning_rate": 2.7741935483870968e-05, |
|
"loss": 0.0269, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 14.49, |
|
"learning_rate": 2.767025089605735e-05, |
|
"loss": 0.0236, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"learning_rate": 2.759856630824373e-05, |
|
"loss": 0.0291, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 14.51, |
|
"learning_rate": 2.7526881720430108e-05, |
|
"loss": 0.0242, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 14.53, |
|
"learning_rate": 2.745519713261649e-05, |
|
"loss": 0.0209, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 14.54, |
|
"learning_rate": 2.738351254480287e-05, |
|
"loss": 0.0228, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 14.56, |
|
"learning_rate": 2.731182795698925e-05, |
|
"loss": 0.0248, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 14.57, |
|
"learning_rate": 2.7240143369175632e-05, |
|
"loss": 0.0249, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 14.59, |
|
"learning_rate": 2.716845878136201e-05, |
|
"loss": 0.0212, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 14.6, |
|
"learning_rate": 2.709677419354839e-05, |
|
"loss": 0.0209, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 14.61, |
|
"learning_rate": 2.7025089605734765e-05, |
|
"loss": 0.0229, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 14.63, |
|
"learning_rate": 2.6953405017921146e-05, |
|
"loss": 0.0245, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 14.64, |
|
"learning_rate": 2.6881720430107527e-05, |
|
"loss": 0.021, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 14.66, |
|
"learning_rate": 2.6810035842293908e-05, |
|
"loss": 0.0206, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 14.67, |
|
"learning_rate": 2.6738351254480286e-05, |
|
"loss": 0.0258, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 14.69, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.0259, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 14.7, |
|
"learning_rate": 2.6594982078853048e-05, |
|
"loss": 0.0242, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 14.71, |
|
"learning_rate": 2.652329749103943e-05, |
|
"loss": 0.0204, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 14.73, |
|
"learning_rate": 2.645161290322581e-05, |
|
"loss": 0.0208, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 14.74, |
|
"learning_rate": 2.6379928315412187e-05, |
|
"loss": 0.0259, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 14.76, |
|
"learning_rate": 2.630824372759857e-05, |
|
"loss": 0.0203, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 14.77, |
|
"learning_rate": 2.623655913978495e-05, |
|
"loss": 0.0226, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 14.79, |
|
"learning_rate": 2.616487455197133e-05, |
|
"loss": 0.0219, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 14.8, |
|
"learning_rate": 2.6093189964157705e-05, |
|
"loss": 0.0272, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 14.81, |
|
"learning_rate": 2.6021505376344086e-05, |
|
"loss": 0.0217, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 14.83, |
|
"learning_rate": 2.5949820788530467e-05, |
|
"loss": 0.0215, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 14.84, |
|
"learning_rate": 2.5878136200716844e-05, |
|
"loss": 0.0203, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 14.86, |
|
"learning_rate": 2.5806451612903226e-05, |
|
"loss": 0.0223, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 14.87, |
|
"learning_rate": 2.5734767025089607e-05, |
|
"loss": 0.0236, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 14.89, |
|
"learning_rate": 2.5663082437275988e-05, |
|
"loss": 0.0243, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 14.9, |
|
"learning_rate": 2.559139784946237e-05, |
|
"loss": 0.0326, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 14.91, |
|
"learning_rate": 2.5519713261648746e-05, |
|
"loss": 0.0222, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 14.93, |
|
"learning_rate": 2.5448028673835127e-05, |
|
"loss": 0.0265, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 14.94, |
|
"learning_rate": 2.537634408602151e-05, |
|
"loss": 0.0247, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 14.96, |
|
"learning_rate": 2.530465949820789e-05, |
|
"loss": 0.0229, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 14.97, |
|
"learning_rate": 2.523297491039427e-05, |
|
"loss": 0.0224, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 14.99, |
|
"learning_rate": 2.5161290322580645e-05, |
|
"loss": 0.0192, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 2.5089605734767026e-05, |
|
"loss": 0.0205, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_loss": 4.015718936920166, |
|
"eval_runtime": 29.1779, |
|
"eval_samples_per_second": 4.113, |
|
"eval_steps_per_second": 0.514, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 15.01, |
|
"learning_rate": 2.5017921146953403e-05, |
|
"loss": 0.0174, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 15.03, |
|
"learning_rate": 2.4946236559139784e-05, |
|
"loss": 0.0207, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 15.04, |
|
"learning_rate": 2.4874551971326165e-05, |
|
"loss": 0.0196, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 15.06, |
|
"learning_rate": 2.4802867383512547e-05, |
|
"loss": 0.0262, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 15.07, |
|
"learning_rate": 2.4731182795698928e-05, |
|
"loss": 0.0237, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 15.09, |
|
"learning_rate": 2.4659498207885305e-05, |
|
"loss": 0.0225, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 15.1, |
|
"learning_rate": 2.4587813620071686e-05, |
|
"loss": 0.0202, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"learning_rate": 2.4516129032258064e-05, |
|
"loss": 0.0212, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 15.13, |
|
"learning_rate": 2.4444444444444445e-05, |
|
"loss": 0.0243, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 15.14, |
|
"learning_rate": 2.4372759856630826e-05, |
|
"loss": 0.0205, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 15.16, |
|
"learning_rate": 2.4301075268817207e-05, |
|
"loss": 0.0197, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 15.17, |
|
"learning_rate": 2.4229390681003585e-05, |
|
"loss": 0.0202, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 15.19, |
|
"learning_rate": 2.4157706093189966e-05, |
|
"loss": 0.0211, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 15.2, |
|
"learning_rate": 2.4086021505376347e-05, |
|
"loss": 0.0234, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 15.21, |
|
"learning_rate": 2.4014336917562724e-05, |
|
"loss": 0.0237, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 15.23, |
|
"learning_rate": 2.3942652329749105e-05, |
|
"loss": 0.0213, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 15.24, |
|
"learning_rate": 2.3870967741935486e-05, |
|
"loss": 0.0202, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 15.26, |
|
"learning_rate": 2.3799283154121864e-05, |
|
"loss": 0.0219, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 15.27, |
|
"learning_rate": 2.3727598566308245e-05, |
|
"loss": 0.0197, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 15.29, |
|
"learning_rate": 2.3655913978494626e-05, |
|
"loss": 0.0247, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 15.3, |
|
"learning_rate": 2.3584229390681004e-05, |
|
"loss": 0.0218, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 15.31, |
|
"learning_rate": 2.3512544802867385e-05, |
|
"loss": 0.0211, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 15.33, |
|
"learning_rate": 2.3440860215053766e-05, |
|
"loss": 0.0244, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 15.34, |
|
"learning_rate": 2.3369175627240144e-05, |
|
"loss": 0.0237, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 15.36, |
|
"learning_rate": 2.3297491039426525e-05, |
|
"loss": 0.0208, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 15.37, |
|
"learning_rate": 2.3225806451612906e-05, |
|
"loss": 0.021, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 15.39, |
|
"learning_rate": 2.3154121863799283e-05, |
|
"loss": 0.0191, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 15.4, |
|
"learning_rate": 2.3082437275985664e-05, |
|
"loss": 0.0242, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 15.41, |
|
"learning_rate": 2.3010752688172042e-05, |
|
"loss": 0.0199, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 15.43, |
|
"learning_rate": 2.2939068100358423e-05, |
|
"loss": 0.0269, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 15.44, |
|
"learning_rate": 2.2867383512544804e-05, |
|
"loss": 0.025, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 15.46, |
|
"learning_rate": 2.2795698924731185e-05, |
|
"loss": 0.0234, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 15.47, |
|
"learning_rate": 2.2724014336917566e-05, |
|
"loss": 0.0202, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 15.49, |
|
"learning_rate": 2.2652329749103944e-05, |
|
"loss": 0.0227, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 15.5, |
|
"learning_rate": 2.258064516129032e-05, |
|
"loss": 0.0223, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 15.51, |
|
"learning_rate": 2.2508960573476703e-05, |
|
"loss": 0.0231, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 15.53, |
|
"learning_rate": 2.2437275985663084e-05, |
|
"loss": 0.0224, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 15.54, |
|
"learning_rate": 2.2365591397849465e-05, |
|
"loss": 0.0226, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 15.56, |
|
"learning_rate": 2.2293906810035846e-05, |
|
"loss": 0.0202, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 15.57, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.0242, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 15.59, |
|
"learning_rate": 2.21505376344086e-05, |
|
"loss": 0.0263, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 15.6, |
|
"learning_rate": 2.2078853046594982e-05, |
|
"loss": 0.0265, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 15.61, |
|
"learning_rate": 2.2007168458781363e-05, |
|
"loss": 0.0245, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 15.63, |
|
"learning_rate": 2.1935483870967744e-05, |
|
"loss": 0.0243, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 15.64, |
|
"learning_rate": 2.1863799283154125e-05, |
|
"loss": 0.0192, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 15.66, |
|
"learning_rate": 2.1792114695340503e-05, |
|
"loss": 0.0306, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 15.67, |
|
"learning_rate": 2.172043010752688e-05, |
|
"loss": 0.0348, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 15.69, |
|
"learning_rate": 2.164874551971326e-05, |
|
"loss": 0.0283, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 15.7, |
|
"learning_rate": 2.1577060931899642e-05, |
|
"loss": 0.0177, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 15.71, |
|
"learning_rate": 2.1505376344086024e-05, |
|
"loss": 0.0235, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 15.73, |
|
"learning_rate": 2.1433691756272405e-05, |
|
"loss": 0.0215, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 15.74, |
|
"learning_rate": 2.1362007168458782e-05, |
|
"loss": 0.0228, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 15.76, |
|
"learning_rate": 2.129032258064516e-05, |
|
"loss": 0.0295, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 15.77, |
|
"learning_rate": 2.121863799283154e-05, |
|
"loss": 0.0243, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 15.79, |
|
"learning_rate": 2.1146953405017922e-05, |
|
"loss": 0.0182, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 15.8, |
|
"learning_rate": 2.1075268817204303e-05, |
|
"loss": 0.0284, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 15.81, |
|
"learning_rate": 2.1003584229390684e-05, |
|
"loss": 0.021, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 15.83, |
|
"learning_rate": 2.093189964157706e-05, |
|
"loss": 0.0302, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 15.84, |
|
"learning_rate": 2.086021505376344e-05, |
|
"loss": 0.0208, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 15.86, |
|
"learning_rate": 2.078853046594982e-05, |
|
"loss": 0.0218, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 15.87, |
|
"learning_rate": 2.07168458781362e-05, |
|
"loss": 0.0219, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 15.89, |
|
"learning_rate": 2.0645161290322582e-05, |
|
"loss": 0.0221, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 15.9, |
|
"learning_rate": 2.0573476702508963e-05, |
|
"loss": 0.0277, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 15.91, |
|
"learning_rate": 2.050179211469534e-05, |
|
"loss": 0.0267, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 15.93, |
|
"learning_rate": 2.0430107526881722e-05, |
|
"loss": 0.0264, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 15.94, |
|
"learning_rate": 2.03584229390681e-05, |
|
"loss": 0.0312, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 15.96, |
|
"learning_rate": 2.028673835125448e-05, |
|
"loss": 0.0271, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 15.97, |
|
"learning_rate": 2.0215053763440862e-05, |
|
"loss": 0.0241, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 15.99, |
|
"learning_rate": 2.0143369175627243e-05, |
|
"loss": 0.0193, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"learning_rate": 2.007168458781362e-05, |
|
"loss": 0.0202, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_loss": 4.051829814910889, |
|
"eval_runtime": 29.1684, |
|
"eval_samples_per_second": 4.114, |
|
"eval_steps_per_second": 0.514, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 16.01, |
|
"learning_rate": 2e-05, |
|
"loss": 0.0217, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 16.03, |
|
"learning_rate": 1.992831541218638e-05, |
|
"loss": 0.0193, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 16.04, |
|
"learning_rate": 1.985663082437276e-05, |
|
"loss": 0.0151, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 16.06, |
|
"learning_rate": 1.978494623655914e-05, |
|
"loss": 0.0256, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 16.07, |
|
"learning_rate": 1.9713261648745522e-05, |
|
"loss": 0.0297, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 16.09, |
|
"learning_rate": 1.96415770609319e-05, |
|
"loss": 0.0225, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 16.1, |
|
"learning_rate": 1.956989247311828e-05, |
|
"loss": 0.0208, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 16.11, |
|
"learning_rate": 1.9498207885304662e-05, |
|
"loss": 0.0203, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 16.13, |
|
"learning_rate": 1.942652329749104e-05, |
|
"loss": 0.0221, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 16.14, |
|
"learning_rate": 1.935483870967742e-05, |
|
"loss": 0.0217, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 16.16, |
|
"learning_rate": 1.92831541218638e-05, |
|
"loss": 0.0207, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 16.17, |
|
"learning_rate": 1.921146953405018e-05, |
|
"loss": 0.0193, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 16.19, |
|
"learning_rate": 1.913978494623656e-05, |
|
"loss": 0.0211, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 16.2, |
|
"learning_rate": 1.906810035842294e-05, |
|
"loss": 0.0286, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 16.21, |
|
"learning_rate": 1.899641577060932e-05, |
|
"loss": 0.0227, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 16.23, |
|
"learning_rate": 1.89247311827957e-05, |
|
"loss": 0.0237, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 16.24, |
|
"learning_rate": 1.8853046594982078e-05, |
|
"loss": 0.0241, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 16.26, |
|
"learning_rate": 1.878136200716846e-05, |
|
"loss": 0.0214, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 16.27, |
|
"learning_rate": 1.870967741935484e-05, |
|
"loss": 0.0212, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 16.29, |
|
"learning_rate": 1.863799283154122e-05, |
|
"loss": 0.0238, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 16.3, |
|
"learning_rate": 1.85663082437276e-05, |
|
"loss": 0.0233, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 16.31, |
|
"learning_rate": 1.849462365591398e-05, |
|
"loss": 0.0238, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 16.33, |
|
"learning_rate": 1.8422939068100357e-05, |
|
"loss": 0.0246, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 16.34, |
|
"learning_rate": 1.835125448028674e-05, |
|
"loss": 0.0235, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 16.36, |
|
"learning_rate": 1.827956989247312e-05, |
|
"loss": 0.0243, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 16.37, |
|
"learning_rate": 1.82078853046595e-05, |
|
"loss": 0.019, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 16.39, |
|
"learning_rate": 1.813620071684588e-05, |
|
"loss": 0.0197, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 16.4, |
|
"learning_rate": 1.806451612903226e-05, |
|
"loss": 0.0234, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 16.41, |
|
"learning_rate": 1.7992831541218637e-05, |
|
"loss": 0.0229, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 16.43, |
|
"learning_rate": 1.7921146953405018e-05, |
|
"loss": 0.0212, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 16.44, |
|
"learning_rate": 1.78494623655914e-05, |
|
"loss": 0.0237, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 16.46, |
|
"learning_rate": 1.777777777777778e-05, |
|
"loss": 0.0236, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 16.47, |
|
"learning_rate": 1.770609318996416e-05, |
|
"loss": 0.0213, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 16.49, |
|
"learning_rate": 1.763440860215054e-05, |
|
"loss": 0.0249, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"learning_rate": 1.7562724014336916e-05, |
|
"loss": 0.027, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 16.51, |
|
"learning_rate": 1.7491039426523297e-05, |
|
"loss": 0.0217, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 16.53, |
|
"learning_rate": 1.741935483870968e-05, |
|
"loss": 0.0216, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 16.54, |
|
"learning_rate": 1.734767025089606e-05, |
|
"loss": 0.0221, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 16.56, |
|
"learning_rate": 1.727598566308244e-05, |
|
"loss": 0.0238, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 16.57, |
|
"learning_rate": 1.7204301075268818e-05, |
|
"loss": 0.0239, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 16.59, |
|
"learning_rate": 1.7132616487455196e-05, |
|
"loss": 0.0234, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 16.6, |
|
"learning_rate": 1.7060931899641577e-05, |
|
"loss": 0.0211, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 16.61, |
|
"learning_rate": 1.6989247311827958e-05, |
|
"loss": 0.0228, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 16.63, |
|
"learning_rate": 1.691756272401434e-05, |
|
"loss": 0.0185, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 16.64, |
|
"learning_rate": 1.684587813620072e-05, |
|
"loss": 0.0236, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 16.66, |
|
"learning_rate": 1.6774193548387098e-05, |
|
"loss": 0.0215, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 16.67, |
|
"learning_rate": 1.6702508960573475e-05, |
|
"loss": 0.0209, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 16.69, |
|
"learning_rate": 1.6630824372759856e-05, |
|
"loss": 0.0209, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 16.7, |
|
"learning_rate": 1.6559139784946237e-05, |
|
"loss": 0.0205, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 16.71, |
|
"learning_rate": 1.648745519713262e-05, |
|
"loss": 0.0202, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 16.73, |
|
"learning_rate": 1.6415770609319e-05, |
|
"loss": 0.0262, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 16.74, |
|
"learning_rate": 1.6344086021505377e-05, |
|
"loss": 0.0203, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 16.76, |
|
"learning_rate": 1.6272401433691755e-05, |
|
"loss": 0.0243, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 16.77, |
|
"learning_rate": 1.6200716845878136e-05, |
|
"loss": 0.0182, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 16.79, |
|
"learning_rate": 1.6129032258064517e-05, |
|
"loss": 0.0247, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 16.8, |
|
"learning_rate": 1.6057347670250898e-05, |
|
"loss": 0.0269, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 16.81, |
|
"learning_rate": 1.5985663082437275e-05, |
|
"loss": 0.0274, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 16.83, |
|
"learning_rate": 1.5913978494623657e-05, |
|
"loss": 0.0243, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 16.84, |
|
"learning_rate": 1.5842293906810038e-05, |
|
"loss": 0.0298, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 16.86, |
|
"learning_rate": 1.5770609318996415e-05, |
|
"loss": 0.022, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 16.87, |
|
"learning_rate": 1.5698924731182796e-05, |
|
"loss": 0.0222, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 16.89, |
|
"learning_rate": 1.5627240143369177e-05, |
|
"loss": 0.0233, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 16.9, |
|
"learning_rate": 1.5555555555555555e-05, |
|
"loss": 0.0226, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 16.91, |
|
"learning_rate": 1.5483870967741936e-05, |
|
"loss": 0.0255, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 16.93, |
|
"learning_rate": 1.5412186379928317e-05, |
|
"loss": 0.0213, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 16.94, |
|
"learning_rate": 1.5340501792114695e-05, |
|
"loss": 0.0302, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 16.96, |
|
"learning_rate": 1.5268817204301076e-05, |
|
"loss": 0.0275, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 16.97, |
|
"learning_rate": 1.5197132616487455e-05, |
|
"loss": 0.0241, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 16.99, |
|
"learning_rate": 1.5125448028673836e-05, |
|
"loss": 0.0186, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"learning_rate": 1.5053763440860215e-05, |
|
"loss": 0.0222, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"eval_loss": 4.085420608520508, |
|
"eval_runtime": 29.2514, |
|
"eval_samples_per_second": 4.102, |
|
"eval_steps_per_second": 0.513, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 17.01, |
|
"learning_rate": 1.4982078853046596e-05, |
|
"loss": 0.02, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 17.03, |
|
"learning_rate": 1.4910394265232978e-05, |
|
"loss": 0.0217, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 17.04, |
|
"learning_rate": 1.4838709677419355e-05, |
|
"loss": 0.0275, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 17.06, |
|
"learning_rate": 1.4767025089605735e-05, |
|
"loss": 0.0238, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 17.07, |
|
"learning_rate": 1.4695340501792116e-05, |
|
"loss": 0.0218, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 17.09, |
|
"learning_rate": 1.4623655913978495e-05, |
|
"loss": 0.0191, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 17.1, |
|
"learning_rate": 1.4551971326164876e-05, |
|
"loss": 0.0213, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 17.11, |
|
"learning_rate": 1.4480286738351257e-05, |
|
"loss": 0.0236, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 17.13, |
|
"learning_rate": 1.4408602150537635e-05, |
|
"loss": 0.0191, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 17.14, |
|
"learning_rate": 1.4336917562724014e-05, |
|
"loss": 0.019, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 17.16, |
|
"learning_rate": 1.4265232974910395e-05, |
|
"loss": 0.0238, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 17.17, |
|
"learning_rate": 1.4193548387096774e-05, |
|
"loss": 0.0246, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 17.19, |
|
"learning_rate": 1.4121863799283155e-05, |
|
"loss": 0.0197, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 17.2, |
|
"learning_rate": 1.4050179211469536e-05, |
|
"loss": 0.0196, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 17.21, |
|
"learning_rate": 1.3978494623655914e-05, |
|
"loss": 0.0233, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 17.23, |
|
"learning_rate": 1.3906810035842293e-05, |
|
"loss": 0.0241, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 17.24, |
|
"learning_rate": 1.3835125448028674e-05, |
|
"loss": 0.0253, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 17.26, |
|
"learning_rate": 1.3763440860215054e-05, |
|
"loss": 0.0215, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 17.27, |
|
"learning_rate": 1.3691756272401435e-05, |
|
"loss": 0.0183, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 17.29, |
|
"learning_rate": 1.3620071684587816e-05, |
|
"loss": 0.0224, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 17.3, |
|
"learning_rate": 1.3548387096774195e-05, |
|
"loss": 0.0232, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 17.31, |
|
"learning_rate": 1.3476702508960573e-05, |
|
"loss": 0.0216, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 17.33, |
|
"learning_rate": 1.3405017921146954e-05, |
|
"loss": 0.0225, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 17.34, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.0193, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 17.36, |
|
"learning_rate": 1.3261648745519714e-05, |
|
"loss": 0.0233, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 17.37, |
|
"learning_rate": 1.3189964157706094e-05, |
|
"loss": 0.0214, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 17.39, |
|
"learning_rate": 1.3118279569892475e-05, |
|
"loss": 0.0228, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 17.4, |
|
"learning_rate": 1.3046594982078852e-05, |
|
"loss": 0.0233, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 17.41, |
|
"learning_rate": 1.2974910394265233e-05, |
|
"loss": 0.025, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 17.43, |
|
"learning_rate": 1.2903225806451613e-05, |
|
"loss": 0.0193, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 17.44, |
|
"learning_rate": 1.2831541218637994e-05, |
|
"loss": 0.0203, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 17.46, |
|
"learning_rate": 1.2759856630824373e-05, |
|
"loss": 0.026, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 17.47, |
|
"learning_rate": 1.2688172043010754e-05, |
|
"loss": 0.0223, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 17.49, |
|
"learning_rate": 1.2616487455197135e-05, |
|
"loss": 0.0232, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"learning_rate": 1.2544802867383513e-05, |
|
"loss": 0.0229, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 17.51, |
|
"learning_rate": 1.2473118279569892e-05, |
|
"loss": 0.0218, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 17.53, |
|
"learning_rate": 1.2401433691756273e-05, |
|
"loss": 0.0208, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 17.54, |
|
"learning_rate": 1.2329749103942653e-05, |
|
"loss": 0.0195, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 17.56, |
|
"learning_rate": 1.2258064516129032e-05, |
|
"loss": 0.02, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 17.57, |
|
"learning_rate": 1.2186379928315413e-05, |
|
"loss": 0.0199, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 17.59, |
|
"learning_rate": 1.2114695340501792e-05, |
|
"loss": 0.0246, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 17.6, |
|
"learning_rate": 1.2043010752688173e-05, |
|
"loss": 0.0174, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 17.61, |
|
"learning_rate": 1.1971326164874553e-05, |
|
"loss": 0.0212, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 17.63, |
|
"learning_rate": 1.1899641577060932e-05, |
|
"loss": 0.0204, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 17.64, |
|
"learning_rate": 1.1827956989247313e-05, |
|
"loss": 0.0255, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 17.66, |
|
"learning_rate": 1.1756272401433692e-05, |
|
"loss": 0.0221, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 17.67, |
|
"learning_rate": 1.1684587813620072e-05, |
|
"loss": 0.0249, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 17.69, |
|
"learning_rate": 1.1612903225806453e-05, |
|
"loss": 0.0242, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 17.7, |
|
"learning_rate": 1.1541218637992832e-05, |
|
"loss": 0.0228, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 17.71, |
|
"learning_rate": 1.1469534050179212e-05, |
|
"loss": 0.0231, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 17.73, |
|
"learning_rate": 1.1397849462365593e-05, |
|
"loss": 0.0216, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 17.74, |
|
"learning_rate": 1.1326164874551972e-05, |
|
"loss": 0.0205, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 17.76, |
|
"learning_rate": 1.1254480286738351e-05, |
|
"loss": 0.0248, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 17.77, |
|
"learning_rate": 1.1182795698924732e-05, |
|
"loss": 0.0214, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 17.79, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.0198, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 17.8, |
|
"learning_rate": 1.1039426523297491e-05, |
|
"loss": 0.0271, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 17.81, |
|
"learning_rate": 1.0967741935483872e-05, |
|
"loss": 0.0212, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 17.83, |
|
"learning_rate": 1.0896057347670251e-05, |
|
"loss": 0.0226, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 17.84, |
|
"learning_rate": 1.082437275985663e-05, |
|
"loss": 0.0271, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 17.86, |
|
"learning_rate": 1.0752688172043012e-05, |
|
"loss": 0.0249, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 17.87, |
|
"learning_rate": 1.0681003584229391e-05, |
|
"loss": 0.0229, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 17.89, |
|
"learning_rate": 1.060931899641577e-05, |
|
"loss": 0.0239, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 17.9, |
|
"learning_rate": 1.0537634408602151e-05, |
|
"loss": 0.027, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 17.91, |
|
"learning_rate": 1.046594982078853e-05, |
|
"loss": 0.0223, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 17.93, |
|
"learning_rate": 1.039426523297491e-05, |
|
"loss": 0.0228, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 17.94, |
|
"learning_rate": 1.0322580645161291e-05, |
|
"loss": 0.0254, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 17.96, |
|
"learning_rate": 1.025089605734767e-05, |
|
"loss": 0.0223, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 17.97, |
|
"learning_rate": 1.017921146953405e-05, |
|
"loss": 0.0225, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 17.99, |
|
"learning_rate": 1.0107526881720431e-05, |
|
"loss": 0.0228, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 1.003584229390681e-05, |
|
"loss": 0.0203, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"eval_loss": 4.122922897338867, |
|
"eval_runtime": 29.2501, |
|
"eval_samples_per_second": 4.103, |
|
"eval_steps_per_second": 0.513, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 18.01, |
|
"learning_rate": 9.96415770609319e-06, |
|
"loss": 0.0217, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 18.03, |
|
"learning_rate": 9.89247311827957e-06, |
|
"loss": 0.0199, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 18.04, |
|
"learning_rate": 9.82078853046595e-06, |
|
"loss": 0.0254, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 18.06, |
|
"learning_rate": 9.749103942652331e-06, |
|
"loss": 0.0219, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 18.07, |
|
"learning_rate": 9.67741935483871e-06, |
|
"loss": 0.0207, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 18.09, |
|
"learning_rate": 9.60573476702509e-06, |
|
"loss": 0.0295, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 18.1, |
|
"learning_rate": 9.53405017921147e-06, |
|
"loss": 0.023, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 18.11, |
|
"learning_rate": 9.46236559139785e-06, |
|
"loss": 0.0276, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 18.13, |
|
"learning_rate": 9.39068100358423e-06, |
|
"loss": 0.023, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 18.14, |
|
"learning_rate": 9.31899641577061e-06, |
|
"loss": 0.0226, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 18.16, |
|
"learning_rate": 9.24731182795699e-06, |
|
"loss": 0.0203, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 18.17, |
|
"learning_rate": 9.17562724014337e-06, |
|
"loss": 0.022, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 18.19, |
|
"learning_rate": 9.10394265232975e-06, |
|
"loss": 0.0208, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 18.2, |
|
"learning_rate": 9.03225806451613e-06, |
|
"loss": 0.0152, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 18.21, |
|
"learning_rate": 8.960573476702509e-06, |
|
"loss": 0.023, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 18.23, |
|
"learning_rate": 8.88888888888889e-06, |
|
"loss": 0.0236, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 18.24, |
|
"learning_rate": 8.81720430107527e-06, |
|
"loss": 0.0197, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 18.26, |
|
"learning_rate": 8.745519713261649e-06, |
|
"loss": 0.0206, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 18.27, |
|
"learning_rate": 8.67383512544803e-06, |
|
"loss": 0.0212, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 18.29, |
|
"learning_rate": 8.602150537634409e-06, |
|
"loss": 0.0288, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 18.3, |
|
"learning_rate": 8.530465949820788e-06, |
|
"loss": 0.019, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 18.31, |
|
"learning_rate": 8.45878136200717e-06, |
|
"loss": 0.0224, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 18.33, |
|
"learning_rate": 8.387096774193549e-06, |
|
"loss": 0.0208, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 18.34, |
|
"learning_rate": 8.315412186379928e-06, |
|
"loss": 0.0157, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 18.36, |
|
"learning_rate": 8.24372759856631e-06, |
|
"loss": 0.0226, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 18.37, |
|
"learning_rate": 8.172043010752689e-06, |
|
"loss": 0.0204, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 18.39, |
|
"learning_rate": 8.100358422939068e-06, |
|
"loss": 0.0228, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 18.4, |
|
"learning_rate": 8.028673835125449e-06, |
|
"loss": 0.0237, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 18.41, |
|
"learning_rate": 7.956989247311828e-06, |
|
"loss": 0.021, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 18.43, |
|
"learning_rate": 7.885304659498208e-06, |
|
"loss": 0.0264, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 18.44, |
|
"learning_rate": 7.813620071684589e-06, |
|
"loss": 0.0205, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 18.46, |
|
"learning_rate": 7.741935483870968e-06, |
|
"loss": 0.0275, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 18.47, |
|
"learning_rate": 7.670250896057347e-06, |
|
"loss": 0.0228, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 18.49, |
|
"learning_rate": 7.5985663082437275e-06, |
|
"loss": 0.0186, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 18.5, |
|
"learning_rate": 7.526881720430108e-06, |
|
"loss": 0.0225, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 18.51, |
|
"learning_rate": 7.455197132616489e-06, |
|
"loss": 0.0272, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 18.53, |
|
"learning_rate": 7.383512544802867e-06, |
|
"loss": 0.0216, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 18.54, |
|
"learning_rate": 7.3118279569892475e-06, |
|
"loss": 0.0273, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 18.56, |
|
"learning_rate": 7.2401433691756285e-06, |
|
"loss": 0.0247, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 18.57, |
|
"learning_rate": 7.168458781362007e-06, |
|
"loss": 0.021, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 18.59, |
|
"learning_rate": 7.096774193548387e-06, |
|
"loss": 0.0183, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 18.6, |
|
"learning_rate": 7.025089605734768e-06, |
|
"loss": 0.0201, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 18.61, |
|
"learning_rate": 6.953405017921147e-06, |
|
"loss": 0.0223, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 18.63, |
|
"learning_rate": 6.881720430107527e-06, |
|
"loss": 0.0193, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 18.64, |
|
"learning_rate": 6.810035842293908e-06, |
|
"loss": 0.0254, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 18.66, |
|
"learning_rate": 6.7383512544802865e-06, |
|
"loss": 0.0206, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 18.67, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.0187, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 18.69, |
|
"learning_rate": 6.594982078853047e-06, |
|
"loss": 0.0189, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 18.7, |
|
"learning_rate": 6.523297491039426e-06, |
|
"loss": 0.0242, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 18.71, |
|
"learning_rate": 6.451612903225806e-06, |
|
"loss": 0.0198, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 18.73, |
|
"learning_rate": 6.3799283154121866e-06, |
|
"loss": 0.0202, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 18.74, |
|
"learning_rate": 6.308243727598568e-06, |
|
"loss": 0.0237, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 18.76, |
|
"learning_rate": 6.236559139784946e-06, |
|
"loss": 0.021, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 18.77, |
|
"learning_rate": 6.164874551971326e-06, |
|
"loss": 0.0231, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 18.79, |
|
"learning_rate": 6.0931899641577065e-06, |
|
"loss": 0.0173, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 18.8, |
|
"learning_rate": 6.021505376344087e-06, |
|
"loss": 0.0229, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 18.81, |
|
"learning_rate": 5.949820788530466e-06, |
|
"loss": 0.0312, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 18.83, |
|
"learning_rate": 5.878136200716846e-06, |
|
"loss": 0.022, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 18.84, |
|
"learning_rate": 5.806451612903226e-06, |
|
"loss": 0.0205, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 18.86, |
|
"learning_rate": 5.734767025089606e-06, |
|
"loss": 0.0194, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 18.87, |
|
"learning_rate": 5.663082437275986e-06, |
|
"loss": 0.0238, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 18.89, |
|
"learning_rate": 5.591397849462366e-06, |
|
"loss": 0.0314, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 18.9, |
|
"learning_rate": 5.5197132616487455e-06, |
|
"loss": 0.0232, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 18.91, |
|
"learning_rate": 5.448028673835126e-06, |
|
"loss": 0.0187, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 18.93, |
|
"learning_rate": 5.376344086021506e-06, |
|
"loss": 0.0252, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 18.94, |
|
"learning_rate": 5.304659498207885e-06, |
|
"loss": 0.0247, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 18.96, |
|
"learning_rate": 5.232974910394265e-06, |
|
"loss": 0.0204, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 18.97, |
|
"learning_rate": 5.161290322580646e-06, |
|
"loss": 0.0202, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 18.99, |
|
"learning_rate": 5.089605734767025e-06, |
|
"loss": 0.0261, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"learning_rate": 5.017921146953405e-06, |
|
"loss": 0.0231, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"eval_loss": 4.1433234214782715, |
|
"eval_runtime": 29.1935, |
|
"eval_samples_per_second": 4.111, |
|
"eval_steps_per_second": 0.514, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 19.01, |
|
"learning_rate": 4.946236559139785e-06, |
|
"loss": 0.0252, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 19.03, |
|
"learning_rate": 4.8745519713261655e-06, |
|
"loss": 0.0313, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 19.04, |
|
"learning_rate": 4.802867383512545e-06, |
|
"loss": 0.0241, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 19.06, |
|
"learning_rate": 4.731182795698925e-06, |
|
"loss": 0.0218, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 19.07, |
|
"learning_rate": 4.659498207885305e-06, |
|
"loss": 0.0273, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 19.09, |
|
"learning_rate": 4.587813620071685e-06, |
|
"loss": 0.0222, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 19.1, |
|
"learning_rate": 4.516129032258065e-06, |
|
"loss": 0.0215, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 19.11, |
|
"learning_rate": 4.444444444444445e-06, |
|
"loss": 0.0226, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 19.13, |
|
"learning_rate": 4.372759856630824e-06, |
|
"loss": 0.022, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 19.14, |
|
"learning_rate": 4.3010752688172045e-06, |
|
"loss": 0.0229, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 19.16, |
|
"learning_rate": 4.229390681003585e-06, |
|
"loss": 0.019, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 19.17, |
|
"learning_rate": 4.157706093189964e-06, |
|
"loss": 0.023, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 19.19, |
|
"learning_rate": 4.086021505376344e-06, |
|
"loss": 0.0214, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 19.2, |
|
"learning_rate": 4.0143369175627245e-06, |
|
"loss": 0.0263, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 19.21, |
|
"learning_rate": 3.942652329749104e-06, |
|
"loss": 0.0236, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 19.23, |
|
"learning_rate": 3.870967741935484e-06, |
|
"loss": 0.0184, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 19.24, |
|
"learning_rate": 3.7992831541218638e-06, |
|
"loss": 0.0179, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 19.26, |
|
"learning_rate": 3.7275985663082444e-06, |
|
"loss": 0.027, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 19.27, |
|
"learning_rate": 3.6559139784946237e-06, |
|
"loss": 0.0243, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 19.29, |
|
"learning_rate": 3.5842293906810035e-06, |
|
"loss": 0.0233, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 19.3, |
|
"learning_rate": 3.512544802867384e-06, |
|
"loss": 0.0179, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 19.31, |
|
"learning_rate": 3.4408602150537635e-06, |
|
"loss": 0.0214, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 19.33, |
|
"learning_rate": 3.3691756272401432e-06, |
|
"loss": 0.0195, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 19.34, |
|
"learning_rate": 3.2974910394265234e-06, |
|
"loss": 0.0214, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 19.36, |
|
"learning_rate": 3.225806451612903e-06, |
|
"loss": 0.0219, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 19.37, |
|
"learning_rate": 3.154121863799284e-06, |
|
"loss": 0.0197, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 19.39, |
|
"learning_rate": 3.082437275985663e-06, |
|
"loss": 0.0223, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 19.4, |
|
"learning_rate": 3.0107526881720433e-06, |
|
"loss": 0.0189, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 19.41, |
|
"learning_rate": 2.939068100358423e-06, |
|
"loss": 0.0179, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 19.43, |
|
"learning_rate": 2.867383512544803e-06, |
|
"loss": 0.0248, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 19.44, |
|
"learning_rate": 2.795698924731183e-06, |
|
"loss": 0.0196, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 19.46, |
|
"learning_rate": 2.724014336917563e-06, |
|
"loss": 0.0238, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 19.47, |
|
"learning_rate": 2.6523297491039426e-06, |
|
"loss": 0.0173, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 19.49, |
|
"learning_rate": 2.580645161290323e-06, |
|
"loss": 0.0264, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 19.5, |
|
"learning_rate": 2.5089605734767026e-06, |
|
"loss": 0.0185, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 19.51, |
|
"learning_rate": 2.4372759856630828e-06, |
|
"loss": 0.0222, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 19.53, |
|
"learning_rate": 2.3655913978494625e-06, |
|
"loss": 0.0241, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 19.54, |
|
"learning_rate": 2.2939068100358423e-06, |
|
"loss": 0.0193, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 19.56, |
|
"learning_rate": 2.2222222222222225e-06, |
|
"loss": 0.021, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 19.57, |
|
"learning_rate": 2.1505376344086023e-06, |
|
"loss": 0.018, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 19.59, |
|
"learning_rate": 2.078853046594982e-06, |
|
"loss": 0.0182, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 19.6, |
|
"learning_rate": 2.0071684587813622e-06, |
|
"loss": 0.0199, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 19.61, |
|
"learning_rate": 1.935483870967742e-06, |
|
"loss": 0.0212, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 19.63, |
|
"learning_rate": 1.8637992831541222e-06, |
|
"loss": 0.0283, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 19.64, |
|
"learning_rate": 1.7921146953405017e-06, |
|
"loss": 0.0252, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 19.66, |
|
"learning_rate": 1.7204301075268817e-06, |
|
"loss": 0.024, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 19.67, |
|
"learning_rate": 1.6487455197132617e-06, |
|
"loss": 0.0192, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 19.69, |
|
"learning_rate": 1.577060931899642e-06, |
|
"loss": 0.0191, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 19.7, |
|
"learning_rate": 1.5053763440860217e-06, |
|
"loss": 0.0201, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 19.71, |
|
"learning_rate": 1.4336917562724014e-06, |
|
"loss": 0.0226, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 19.73, |
|
"learning_rate": 1.3620071684587814e-06, |
|
"loss": 0.0275, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 19.74, |
|
"learning_rate": 1.2903225806451614e-06, |
|
"loss": 0.0246, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 19.76, |
|
"learning_rate": 1.2186379928315414e-06, |
|
"loss": 0.018, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 19.77, |
|
"learning_rate": 1.1469534050179212e-06, |
|
"loss": 0.0239, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 19.79, |
|
"learning_rate": 1.0752688172043011e-06, |
|
"loss": 0.0203, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 19.8, |
|
"learning_rate": 1.0035842293906811e-06, |
|
"loss": 0.0229, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 19.81, |
|
"learning_rate": 9.318996415770611e-07, |
|
"loss": 0.0251, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 19.83, |
|
"learning_rate": 8.602150537634409e-07, |
|
"loss": 0.0278, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 19.84, |
|
"learning_rate": 7.88530465949821e-07, |
|
"loss": 0.0207, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 19.86, |
|
"learning_rate": 7.168458781362007e-07, |
|
"loss": 0.0178, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 19.87, |
|
"learning_rate": 6.451612903225807e-07, |
|
"loss": 0.0185, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 19.89, |
|
"learning_rate": 5.734767025089606e-07, |
|
"loss": 0.0197, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 19.9, |
|
"learning_rate": 5.017921146953406e-07, |
|
"loss": 0.0221, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 19.91, |
|
"learning_rate": 4.3010752688172043e-07, |
|
"loss": 0.0211, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 19.93, |
|
"learning_rate": 3.5842293906810036e-07, |
|
"loss": 0.0198, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 19.94, |
|
"learning_rate": 2.867383512544803e-07, |
|
"loss": 0.0197, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 19.96, |
|
"learning_rate": 2.1505376344086022e-07, |
|
"loss": 0.0206, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 19.97, |
|
"learning_rate": 1.4336917562724014e-07, |
|
"loss": 0.0219, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 19.99, |
|
"learning_rate": 7.168458781362007e-08, |
|
"loss": 0.0211, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.0199, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_loss": 4.1544904708862305, |
|
"eval_runtime": 29.2342, |
|
"eval_samples_per_second": 4.105, |
|
"eval_steps_per_second": 0.513, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"step": 1400, |
|
"total_flos": 1.342333327835136e+17, |
|
"train_loss": 0.135591789316386, |
|
"train_runtime": 4699.1287, |
|
"train_samples_per_second": 1.192, |
|
"train_steps_per_second": 0.298 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1400, |
|
"num_train_epochs": 20, |
|
"save_steps": 500, |
|
"total_flos": 1.342333327835136e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|