|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9990358348063637, |
|
"global_step": 4665, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 2.8558, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5e-06, |
|
"loss": 2.897, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1e-05, |
|
"loss": 2.8778, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.5e-05, |
|
"loss": 2.9056, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2e-05, |
|
"loss": 2.8865, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.5e-05, |
|
"loss": 2.8852, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3e-05, |
|
"loss": 2.9159, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.5e-05, |
|
"loss": 2.8774, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4e-05, |
|
"loss": 2.8732, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.5e-05, |
|
"loss": 2.8753, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5e-05, |
|
"loss": 2.8977, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9890470974808325e-05, |
|
"loss": 2.9238, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.978094194961665e-05, |
|
"loss": 2.8907, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.967141292442498e-05, |
|
"loss": 2.8952, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9561883899233295e-05, |
|
"loss": 2.8704, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9452354874041625e-05, |
|
"loss": 2.9006, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.934282584884995e-05, |
|
"loss": 2.8912, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.923329682365827e-05, |
|
"loss": 2.8691, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9123767798466594e-05, |
|
"loss": 2.8859, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.901423877327492e-05, |
|
"loss": 2.8636, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.890470974808325e-05, |
|
"loss": 2.8825, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.879518072289157e-05, |
|
"loss": 2.8866, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.868565169769989e-05, |
|
"loss": 2.9145, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.8576122672508216e-05, |
|
"loss": 2.8909, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.8466593647316546e-05, |
|
"loss": 2.8754, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.835706462212486e-05, |
|
"loss": 2.8581, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.824753559693319e-05, |
|
"loss": 2.8592, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.813800657174151e-05, |
|
"loss": 2.9211, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.802847754654984e-05, |
|
"loss": 2.851, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.791894852135816e-05, |
|
"loss": 2.8774, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.7809419496166485e-05, |
|
"loss": 2.8707, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.7699890470974815e-05, |
|
"loss": 2.8243, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.759036144578313e-05, |
|
"loss": 2.8519, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.748083242059146e-05, |
|
"loss": 2.8624, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.7371303395399784e-05, |
|
"loss": 2.8551, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.726177437020811e-05, |
|
"loss": 2.8593, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.715224534501643e-05, |
|
"loss": 2.9018, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.704271631982475e-05, |
|
"loss": 2.8707, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.6933187294633076e-05, |
|
"loss": 2.8595, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.6823658269441406e-05, |
|
"loss": 2.8713, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.671412924424973e-05, |
|
"loss": 2.8765, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.660460021905805e-05, |
|
"loss": 2.8672, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.6495071193866376e-05, |
|
"loss": 2.8757, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.63855421686747e-05, |
|
"loss": 2.8763, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.627601314348303e-05, |
|
"loss": 2.8511, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.6166484118291345e-05, |
|
"loss": 2.8628, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.6056955093099675e-05, |
|
"loss": 2.8471, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5947426067908e-05, |
|
"loss": 2.8648, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.583789704271632e-05, |
|
"loss": 2.8346, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.572836801752465e-05, |
|
"loss": 2.8836, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.561883899233297e-05, |
|
"loss": 2.8472, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.55093099671413e-05, |
|
"loss": 2.8497, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.539978094194962e-05, |
|
"loss": 2.8357, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.529025191675794e-05, |
|
"loss": 2.8375, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.5180722891566266e-05, |
|
"loss": 2.8139, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.507119386637459e-05, |
|
"loss": 2.8254, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.496166484118291e-05, |
|
"loss": 2.8252, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.485213581599124e-05, |
|
"loss": 2.8282, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.4742606790799566e-05, |
|
"loss": 2.8091, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.463307776560789e-05, |
|
"loss": 2.831, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.452354874041621e-05, |
|
"loss": 2.8862, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.4414019715224535e-05, |
|
"loss": 2.8276, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.4304490690032865e-05, |
|
"loss": 2.8614, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.419496166484118e-05, |
|
"loss": 2.7905, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.408543263964951e-05, |
|
"loss": 2.8561, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.3975903614457834e-05, |
|
"loss": 2.837, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.386637458926616e-05, |
|
"loss": 2.8084, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.375684556407448e-05, |
|
"loss": 2.7978, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.36473165388828e-05, |
|
"loss": 2.8264, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.353778751369113e-05, |
|
"loss": 2.7847, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.3428258488499456e-05, |
|
"loss": 2.8349, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.331872946330778e-05, |
|
"loss": 2.8281, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.32092004381161e-05, |
|
"loss": 2.8142, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.3099671412924426e-05, |
|
"loss": 2.8364, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.299014238773275e-05, |
|
"loss": 2.8077, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.288061336254108e-05, |
|
"loss": 2.7992, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.27710843373494e-05, |
|
"loss": 2.8266, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.2661555312157725e-05, |
|
"loss": 2.8064, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.255202628696605e-05, |
|
"loss": 2.8159, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.244249726177437e-05, |
|
"loss": 2.8324, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.23329682365827e-05, |
|
"loss": 2.8898, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.222343921139102e-05, |
|
"loss": 2.8212, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.211391018619935e-05, |
|
"loss": 2.8234, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.2004381161007663e-05, |
|
"loss": 2.8306, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.189485213581599e-05, |
|
"loss": 2.8276, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.1785323110624316e-05, |
|
"loss": 2.8373, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.167579408543264e-05, |
|
"loss": 2.8032, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.156626506024097e-05, |
|
"loss": 2.8054, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.145673603504929e-05, |
|
"loss": 2.806, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.1347207009857616e-05, |
|
"loss": 2.822, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.123767798466594e-05, |
|
"loss": 2.8198, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.112814895947426e-05, |
|
"loss": 2.8682, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.1018619934282585e-05, |
|
"loss": 2.8474, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.0909090909090915e-05, |
|
"loss": 2.8419, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.079956188389923e-05, |
|
"loss": 2.8299, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.069003285870756e-05, |
|
"loss": 2.8078, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.0580503833515884e-05, |
|
"loss": 2.8012, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.047097480832421e-05, |
|
"loss": 2.793, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.036144578313254e-05, |
|
"loss": 2.8289, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.025191675794085e-05, |
|
"loss": 2.8005, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.014238773274918e-05, |
|
"loss": 2.8124, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.00328587075575e-05, |
|
"loss": 2.7928, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.992332968236583e-05, |
|
"loss": 2.8334, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.981380065717415e-05, |
|
"loss": 2.8234, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.9704271631982476e-05, |
|
"loss": 2.8509, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.9594742606790806e-05, |
|
"loss": 2.7912, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.948521358159912e-05, |
|
"loss": 2.8191, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.937568455640745e-05, |
|
"loss": 2.7925, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.9266155531215775e-05, |
|
"loss": 2.8202, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.91566265060241e-05, |
|
"loss": 2.8411, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.904709748083242e-05, |
|
"loss": 2.8074, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.893756845564075e-05, |
|
"loss": 2.8486, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.882803943044907e-05, |
|
"loss": 2.8377, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.87185104052574e-05, |
|
"loss": 2.7922, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.860898138006572e-05, |
|
"loss": 2.7544, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.849945235487404e-05, |
|
"loss": 2.8116, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.838992332968237e-05, |
|
"loss": 2.8353, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.828039430449069e-05, |
|
"loss": 2.7945, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.817086527929902e-05, |
|
"loss": 2.8528, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.8061336254107336e-05, |
|
"loss": 2.7954, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7951807228915666e-05, |
|
"loss": 2.8015, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.784227820372399e-05, |
|
"loss": 2.7983, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.773274917853231e-05, |
|
"loss": 2.8139, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.7623220153340635e-05, |
|
"loss": 2.8045, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.751369112814896e-05, |
|
"loss": 2.8147, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.740416210295729e-05, |
|
"loss": 2.8169, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.729463307776561e-05, |
|
"loss": 2.8122, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.7185104052573934e-05, |
|
"loss": 2.8188, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.707557502738226e-05, |
|
"loss": 2.8409, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.696604600219058e-05, |
|
"loss": 2.7812, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.6856516976998903e-05, |
|
"loss": 2.8083, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.674698795180723e-05, |
|
"loss": 2.7675, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.6637458926615556e-05, |
|
"loss": 2.8239, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.652792990142388e-05, |
|
"loss": 2.8222, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.64184008762322e-05, |
|
"loss": 2.7656, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.6308871851040526e-05, |
|
"loss": 2.8149, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.6199342825848856e-05, |
|
"loss": 2.7705, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.608981380065717e-05, |
|
"loss": 2.7853, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.59802847754655e-05, |
|
"loss": 2.7719, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5870755750273825e-05, |
|
"loss": 2.755, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.576122672508215e-05, |
|
"loss": 2.8628, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.565169769989047e-05, |
|
"loss": 2.7986, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.5542168674698794e-05, |
|
"loss": 2.8426, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.5432639649507124e-05, |
|
"loss": 2.7956, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.532311062431545e-05, |
|
"loss": 2.7683, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.521358159912377e-05, |
|
"loss": 2.7692, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.510405257393209e-05, |
|
"loss": 2.8034, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4994523548740416e-05, |
|
"loss": 2.7739, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.488499452354874e-05, |
|
"loss": 2.7754, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.477546549835707e-05, |
|
"loss": 2.7881, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.4665936473165386e-05, |
|
"loss": 2.8022, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.4556407447973716e-05, |
|
"loss": 2.8127, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.444687842278204e-05, |
|
"loss": 2.7977, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.433734939759036e-05, |
|
"loss": 2.8056, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.422782037239869e-05, |
|
"loss": 2.7901, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.411829134720701e-05, |
|
"loss": 2.8467, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.400876232201534e-05, |
|
"loss": 2.7446, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.389923329682366e-05, |
|
"loss": 2.7269, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3789704271631984e-05, |
|
"loss": 2.7249, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.368017524644031e-05, |
|
"loss": 2.7381, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.357064622124863e-05, |
|
"loss": 2.7462, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.346111719605696e-05, |
|
"loss": 2.761, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.335158817086528e-05, |
|
"loss": 2.7193, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.3242059145673606e-05, |
|
"loss": 2.7272, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.313253012048193e-05, |
|
"loss": 2.7436, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.302300109529025e-05, |
|
"loss": 2.738, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.2913472070098576e-05, |
|
"loss": 2.7364, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.2803943044906906e-05, |
|
"loss": 2.7036, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.269441401971522e-05, |
|
"loss": 2.7632, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.258488499452355e-05, |
|
"loss": 2.749, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.2475355969331875e-05, |
|
"loss": 2.7418, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.23658269441402e-05, |
|
"loss": 2.718, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.225629791894853e-05, |
|
"loss": 2.6969, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.2146768893756844e-05, |
|
"loss": 2.7334, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.2037239868565174e-05, |
|
"loss": 2.7445, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.192771084337349e-05, |
|
"loss": 2.7153, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.181818181818182e-05, |
|
"loss": 2.7625, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.1708652792990143e-05, |
|
"loss": 2.7302, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.1599123767798467e-05, |
|
"loss": 2.7519, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.148959474260679e-05, |
|
"loss": 2.7566, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.138006571741512e-05, |
|
"loss": 2.7113, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.127053669222344e-05, |
|
"loss": 2.761, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.1161007667031766e-05, |
|
"loss": 2.7201, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.105147864184009e-05, |
|
"loss": 2.7285, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.094194961664841e-05, |
|
"loss": 2.7033, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.083242059145674e-05, |
|
"loss": 2.697, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.072289156626506e-05, |
|
"loss": 2.7614, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.061336254107339e-05, |
|
"loss": 2.7281, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.0503833515881708e-05, |
|
"loss": 2.7169, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.0394304490690034e-05, |
|
"loss": 2.7, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.028477546549836e-05, |
|
"loss": 2.7139, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.017524644030668e-05, |
|
"loss": 2.6886, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.0065717415115007e-05, |
|
"loss": 2.7662, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.995618838992333e-05, |
|
"loss": 2.7315, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9846659364731656e-05, |
|
"loss": 2.7221, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9737130339539983e-05, |
|
"loss": 2.6996, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.9627601314348303e-05, |
|
"loss": 2.7292, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.951807228915663e-05, |
|
"loss": 2.7306, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.940854326396495e-05, |
|
"loss": 2.7258, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.9299014238773275e-05, |
|
"loss": 2.7372, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.9189485213581602e-05, |
|
"loss": 2.7249, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.907995618838992e-05, |
|
"loss": 2.7432, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8970427163198248e-05, |
|
"loss": 2.716, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8860898138006575e-05, |
|
"loss": 2.7356, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8751369112814898e-05, |
|
"loss": 2.7392, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8641840087623224e-05, |
|
"loss": 2.7202, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8532311062431544e-05, |
|
"loss": 2.751, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.842278203723987e-05, |
|
"loss": 2.7147, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.8313253012048197e-05, |
|
"loss": 2.766, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.8203723986856517e-05, |
|
"loss": 2.7035, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.8094194961664843e-05, |
|
"loss": 2.7446, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7984665936473166e-05, |
|
"loss": 2.7258, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.787513691128149e-05, |
|
"loss": 2.7014, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7765607886089816e-05, |
|
"loss": 2.7351, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.765607886089814e-05, |
|
"loss": 2.714, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.7546549835706465e-05, |
|
"loss": 2.7449, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.7437020810514785e-05, |
|
"loss": 2.7405, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.732749178532311e-05, |
|
"loss": 2.7395, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.7217962760131438e-05, |
|
"loss": 2.748, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.7108433734939758e-05, |
|
"loss": 2.7138, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6998904709748084e-05, |
|
"loss": 2.7807, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6889375684556407e-05, |
|
"loss": 2.759, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6779846659364734e-05, |
|
"loss": 2.7023, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.667031763417306e-05, |
|
"loss": 2.7701, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.656078860898138e-05, |
|
"loss": 2.7268, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.6451259583789707e-05, |
|
"loss": 2.7427, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.6341730558598033e-05, |
|
"loss": 2.6808, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.6232201533406353e-05, |
|
"loss": 2.7167, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.612267250821468e-05, |
|
"loss": 2.71, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.6013143483023e-05, |
|
"loss": 2.7486, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5903614457831325e-05, |
|
"loss": 2.7368, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5794085432639652e-05, |
|
"loss": 2.7369, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5684556407447975e-05, |
|
"loss": 2.7162, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.55750273822563e-05, |
|
"loss": 2.7446, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.546549835706462e-05, |
|
"loss": 2.7379, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.5355969331872948e-05, |
|
"loss": 2.7436, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.5246440306681274e-05, |
|
"loss": 2.6969, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.5136911281489594e-05, |
|
"loss": 2.7595, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.502738225629792e-05, |
|
"loss": 2.7417, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4917853231106244e-05, |
|
"loss": 2.6993, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4808324205914567e-05, |
|
"loss": 2.6811, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4698795180722893e-05, |
|
"loss": 2.7363, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.458926615553122e-05, |
|
"loss": 2.7304, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4479737130339543e-05, |
|
"loss": 2.706, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.4370208105147866e-05, |
|
"loss": 2.7094, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.426067907995619e-05, |
|
"loss": 2.7501, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.4151150054764512e-05, |
|
"loss": 2.7244, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.404162102957284e-05, |
|
"loss": 2.7131, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.393209200438116e-05, |
|
"loss": 2.7198, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3822562979189485e-05, |
|
"loss": 2.7474, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.371303395399781e-05, |
|
"loss": 2.7216, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3603504928806138e-05, |
|
"loss": 2.7487, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.349397590361446e-05, |
|
"loss": 2.7063, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.3384446878422784e-05, |
|
"loss": 2.7108, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.3274917853231107e-05, |
|
"loss": 2.7471, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.316538882803943e-05, |
|
"loss": 2.6941, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.3055859802847757e-05, |
|
"loss": 2.7476, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.294633077765608e-05, |
|
"loss": 2.6889, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2836801752464403e-05, |
|
"loss": 2.7739, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.272727272727273e-05, |
|
"loss": 2.7491, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2617743702081052e-05, |
|
"loss": 2.725, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.250821467688938e-05, |
|
"loss": 2.7143, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.2398685651697702e-05, |
|
"loss": 2.7077, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.2289156626506025e-05, |
|
"loss": 2.7286, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.2179627601314348e-05, |
|
"loss": 2.7379, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.2070098576122675e-05, |
|
"loss": 2.7116, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1960569550930998e-05, |
|
"loss": 2.7165, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.185104052573932e-05, |
|
"loss": 2.7406, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1741511500547644e-05, |
|
"loss": 2.7019, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.163198247535597e-05, |
|
"loss": 2.738, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.1522453450164297e-05, |
|
"loss": 2.688, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.141292442497262e-05, |
|
"loss": 2.7302, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.1303395399780943e-05, |
|
"loss": 2.7449, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.1193866374589266e-05, |
|
"loss": 2.7179, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.1084337349397593e-05, |
|
"loss": 2.6978, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0974808324205916e-05, |
|
"loss": 2.718, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.086527929901424e-05, |
|
"loss": 2.7595, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0755750273822562e-05, |
|
"loss": 2.7445, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.064622124863089e-05, |
|
"loss": 2.6969, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0536692223439215e-05, |
|
"loss": 2.7149, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.0427163198247538e-05, |
|
"loss": 2.6722, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.031763417305586e-05, |
|
"loss": 2.7336, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.0208105147864184e-05, |
|
"loss": 2.7387, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 2.009857612267251e-05, |
|
"loss": 2.753, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9989047097480834e-05, |
|
"loss": 2.7185, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9879518072289157e-05, |
|
"loss": 2.7218, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.976998904709748e-05, |
|
"loss": 2.7595, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9660460021905803e-05, |
|
"loss": 2.7301, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.955093099671413e-05, |
|
"loss": 2.7108, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.9441401971522456e-05, |
|
"loss": 2.7146, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.933187294633078e-05, |
|
"loss": 2.7083, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.9222343921139102e-05, |
|
"loss": 2.6983, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.9112814895947426e-05, |
|
"loss": 2.6832, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.9003285870755752e-05, |
|
"loss": 2.7189, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8893756845564075e-05, |
|
"loss": 2.7567, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8784227820372398e-05, |
|
"loss": 2.6915, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.867469879518072e-05, |
|
"loss": 2.7368, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8565169769989048e-05, |
|
"loss": 2.7478, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8455640744797374e-05, |
|
"loss": 2.71, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8346111719605697e-05, |
|
"loss": 2.7042, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.823658269441402e-05, |
|
"loss": 2.7431, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.8127053669222344e-05, |
|
"loss": 2.7226, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.801752464403067e-05, |
|
"loss": 2.7644, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7907995618838993e-05, |
|
"loss": 2.7904, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7798466593647316e-05, |
|
"loss": 2.686, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.768893756845564e-05, |
|
"loss": 2.6975, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7579408543263966e-05, |
|
"loss": 2.7183, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7469879518072292e-05, |
|
"loss": 2.7052, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.7360350492880616e-05, |
|
"loss": 2.7286, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.725082146768894e-05, |
|
"loss": 2.7089, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.7141292442497262e-05, |
|
"loss": 2.7264, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.7031763417305588e-05, |
|
"loss": 2.7051, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.692223439211391e-05, |
|
"loss": 2.6688, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6812705366922234e-05, |
|
"loss": 2.6887, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6703176341730558e-05, |
|
"loss": 2.6779, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.659364731653888e-05, |
|
"loss": 2.6838, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6484118291347207e-05, |
|
"loss": 2.6666, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.6374589266155534e-05, |
|
"loss": 2.6558, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.6265060240963857e-05, |
|
"loss": 2.6693, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.615553121577218e-05, |
|
"loss": 2.6539, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.6046002190580506e-05, |
|
"loss": 2.6365, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.593647316538883e-05, |
|
"loss": 2.6712, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5826944140197152e-05, |
|
"loss": 2.6668, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5717415115005476e-05, |
|
"loss": 2.7002, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.56078860898138e-05, |
|
"loss": 2.641, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5498357064622125e-05, |
|
"loss": 2.6985, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.538882803943045e-05, |
|
"loss": 2.6769, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5279299014238775e-05, |
|
"loss": 2.6481, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.5169769989047098e-05, |
|
"loss": 2.6592, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.5060240963855424e-05, |
|
"loss": 2.6225, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4950711938663747e-05, |
|
"loss": 2.661, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.484118291347207e-05, |
|
"loss": 2.6897, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4731653888280394e-05, |
|
"loss": 2.6965, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4622124863088718e-05, |
|
"loss": 2.6726, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4512595837897045e-05, |
|
"loss": 2.6131, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4403066812705368e-05, |
|
"loss": 2.6998, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4293537787513691e-05, |
|
"loss": 2.6998, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.4184008762322016e-05, |
|
"loss": 2.6854, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.4074479737130339e-05, |
|
"loss": 2.681, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3964950711938666e-05, |
|
"loss": 2.6531, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3855421686746989e-05, |
|
"loss": 2.7184, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3745892661555312e-05, |
|
"loss": 2.7018, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3636363636363637e-05, |
|
"loss": 2.6654, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3526834611171963e-05, |
|
"loss": 2.6593, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3417305585980286e-05, |
|
"loss": 2.6994, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.330777656078861e-05, |
|
"loss": 2.653, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3198247535596932e-05, |
|
"loss": 2.6511, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.3088718510405257e-05, |
|
"loss": 2.6701, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2979189485213584e-05, |
|
"loss": 2.6723, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2869660460021907e-05, |
|
"loss": 2.6456, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.276013143483023e-05, |
|
"loss": 2.6542, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2650602409638555e-05, |
|
"loss": 2.6936, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2541073384446881e-05, |
|
"loss": 2.6799, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2431544359255204e-05, |
|
"loss": 2.6849, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.2322015334063527e-05, |
|
"loss": 2.692, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.221248630887185e-05, |
|
"loss": 2.6228, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.2102957283680175e-05, |
|
"loss": 2.7349, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.19934282584885e-05, |
|
"loss": 2.6931, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1883899233296825e-05, |
|
"loss": 2.6742, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1774370208105148e-05, |
|
"loss": 2.685, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1664841182913473e-05, |
|
"loss": 2.7104, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1555312157721798e-05, |
|
"loss": 2.6751, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.144578313253012e-05, |
|
"loss": 2.6685, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1336254107338445e-05, |
|
"loss": 2.6653, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1226725082146769e-05, |
|
"loss": 2.7115, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.1117196056955093e-05, |
|
"loss": 2.702, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.1007667031763418e-05, |
|
"loss": 2.6685, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0898138006571743e-05, |
|
"loss": 2.6765, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0788608981380066e-05, |
|
"loss": 2.6778, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0679079956188389e-05, |
|
"loss": 2.6618, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0569550930996714e-05, |
|
"loss": 2.6208, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0460021905805039e-05, |
|
"loss": 2.6446, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0350492880613364e-05, |
|
"loss": 2.7144, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0240963855421687e-05, |
|
"loss": 2.6631, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0131434830230011e-05, |
|
"loss": 2.6769, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.0021905805038336e-05, |
|
"loss": 2.7273, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.912376779846661e-06, |
|
"loss": 2.6302, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.802847754654984e-06, |
|
"loss": 2.6877, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.693318729463307e-06, |
|
"loss": 2.658, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.583789704271632e-06, |
|
"loss": 2.7129, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.474260679079957e-06, |
|
"loss": 2.7103, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.364731653888282e-06, |
|
"loss": 2.6904, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.255202628696605e-06, |
|
"loss": 2.6706, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.14567360350493e-06, |
|
"loss": 2.6615, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 9.036144578313253e-06, |
|
"loss": 2.7039, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.926615553121577e-06, |
|
"loss": 2.7451, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.817086527929902e-06, |
|
"loss": 2.6681, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.707557502738225e-06, |
|
"loss": 2.6745, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.59802847754655e-06, |
|
"loss": 2.707, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.488499452354875e-06, |
|
"loss": 2.672, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.3789704271632e-06, |
|
"loss": 2.647, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.269441401971523e-06, |
|
"loss": 2.6512, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.159912376779848e-06, |
|
"loss": 2.6808, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 8.05038335158817e-06, |
|
"loss": 2.6612, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.940854326396495e-06, |
|
"loss": 2.6702, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.83132530120482e-06, |
|
"loss": 2.661, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.721796276013143e-06, |
|
"loss": 2.716, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.612267250821468e-06, |
|
"loss": 2.6917, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.502738225629792e-06, |
|
"loss": 2.6875, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.393209200438117e-06, |
|
"loss": 2.704, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.283680175246441e-06, |
|
"loss": 2.6584, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.174151150054764e-06, |
|
"loss": 2.6978, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.06462212486309e-06, |
|
"loss": 2.6987, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.955093099671413e-06, |
|
"loss": 2.6952, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.8455640744797375e-06, |
|
"loss": 2.6737, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.7360350492880615e-06, |
|
"loss": 2.6625, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.626506024096386e-06, |
|
"loss": 2.6267, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.51697699890471e-06, |
|
"loss": 2.6574, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.407447973713033e-06, |
|
"loss": 2.6545, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.297918948521359e-06, |
|
"loss": 2.687, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.188389923329683e-06, |
|
"loss": 2.7007, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.078860898138007e-06, |
|
"loss": 2.6227, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.969331872946331e-06, |
|
"loss": 2.6677, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.859802847754655e-06, |
|
"loss": 2.6416, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.7502738225629795e-06, |
|
"loss": 2.681, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.6407447973713035e-06, |
|
"loss": 2.6312, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.531215772179628e-06, |
|
"loss": 2.6801, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.421686746987952e-06, |
|
"loss": 2.6593, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.312157721796276e-06, |
|
"loss": 2.6815, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.202628696604601e-06, |
|
"loss": 2.618, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.093099671412924e-06, |
|
"loss": 2.6588, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.983570646221249e-06, |
|
"loss": 2.6882, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.874041621029573e-06, |
|
"loss": 2.6454, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.764512595837898e-06, |
|
"loss": 2.6749, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.6549835706462216e-06, |
|
"loss": 2.6714, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 2.6335, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.43592552026287e-06, |
|
"loss": 2.6734, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.326396495071194e-06, |
|
"loss": 2.6764, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.216867469879518e-06, |
|
"loss": 2.6899, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.107338444687842e-06, |
|
"loss": 2.7273, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.997809419496167e-06, |
|
"loss": 2.6337, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.888280394304491e-06, |
|
"loss": 2.6679, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.7787513691128153e-06, |
|
"loss": 2.6505, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6692223439211392e-06, |
|
"loss": 2.6732, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.5596933187294636e-06, |
|
"loss": 2.6806, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.450164293537788e-06, |
|
"loss": 2.6498, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.3406352683461115e-06, |
|
"loss": 2.6626, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.231106243154436e-06, |
|
"loss": 2.6762, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.1215772179627602e-06, |
|
"loss": 2.6767, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.0120481927710846e-06, |
|
"loss": 2.6712, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.9025191675794086e-06, |
|
"loss": 2.6342, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.792990142387733e-06, |
|
"loss": 2.7124, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.683461117196057e-06, |
|
"loss": 2.6639, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.5739320920043813e-06, |
|
"loss": 2.6494, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.4644030668127056e-06, |
|
"loss": 2.6704, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.35487404162103e-06, |
|
"loss": 2.67, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.245345016429354e-06, |
|
"loss": 2.66, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.135815991237678e-06, |
|
"loss": 2.6743, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.0262869660460023e-06, |
|
"loss": 2.6514, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.9167579408543262e-06, |
|
"loss": 2.6848, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8072289156626506e-06, |
|
"loss": 2.7078, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.697699890470975e-06, |
|
"loss": 2.6466, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.5881708652792991e-06, |
|
"loss": 2.6073, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4786418400876233e-06, |
|
"loss": 2.7111, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3691128148959475e-06, |
|
"loss": 2.688, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.2595837897042716e-06, |
|
"loss": 2.6719, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1500547645125958e-06, |
|
"loss": 2.638, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.0405257393209202e-06, |
|
"loss": 2.6686, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.309967141292442e-07, |
|
"loss": 2.7157, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.214676889375685e-07, |
|
"loss": 2.6681, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.119386637458927e-07, |
|
"loss": 2.6724, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.024096385542169e-07, |
|
"loss": 2.6502, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.928806133625412e-07, |
|
"loss": 2.6625, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.833515881708653e-07, |
|
"loss": 2.6823, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.738225629791895e-07, |
|
"loss": 2.6877, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.642935377875137e-07, |
|
"loss": 2.6878, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 5.47645125958379e-08, |
|
"loss": 2.7003, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 4665, |
|
"total_flos": 2.0794722991315354e+17, |
|
"train_loss": 1.1010990204151803, |
|
"train_runtime": 40118.9781, |
|
"train_samples_per_second": 1.396, |
|
"train_steps_per_second": 0.116 |
|
} |
|
], |
|
"max_steps": 4665, |
|
"num_train_epochs": 3, |
|
"total_flos": 2.0794722991315354e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|