|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 296598, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.991571082744995e-05, |
|
"loss": 3.2049, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9831421654899904e-05, |
|
"loss": 3.1398, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.974713248234985e-05, |
|
"loss": 3.1086, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.96628433097998e-05, |
|
"loss": 3.0911, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.957855413724975e-05, |
|
"loss": 3.0871, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.94942649646997e-05, |
|
"loss": 3.0705, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.940997579214965e-05, |
|
"loss": 3.0688, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.932568661959959e-05, |
|
"loss": 3.0577, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.924139744704954e-05, |
|
"loss": 3.0453, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.915710827449949e-05, |
|
"loss": 3.0344, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.907281910194944e-05, |
|
"loss": 3.0257, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.898852992939939e-05, |
|
"loss": 3.025, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.890424075684934e-05, |
|
"loss": 3.0335, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.881995158429929e-05, |
|
"loss": 3.0088, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.873566241174924e-05, |
|
"loss": 3.0059, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.865137323919919e-05, |
|
"loss": 3.0019, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.856708406664914e-05, |
|
"loss": 3.0, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8482794894099085e-05, |
|
"loss": 2.9878, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8398505721549036e-05, |
|
"loss": 2.9937, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8314216548998987e-05, |
|
"loss": 2.9842, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.822992737644894e-05, |
|
"loss": 2.9703, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.814563820389888e-05, |
|
"loss": 2.9832, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.8061349031348826e-05, |
|
"loss": 2.9667, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.797705985879878e-05, |
|
"loss": 2.974, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.789277068624873e-05, |
|
"loss": 2.9646, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.780848151369868e-05, |
|
"loss": 2.963, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.772419234114863e-05, |
|
"loss": 2.9695, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7639903168598573e-05, |
|
"loss": 2.9626, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7555613996048524e-05, |
|
"loss": 2.9545, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7471324823498475e-05, |
|
"loss": 2.9565, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7387035650948426e-05, |
|
"loss": 2.9575, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.730274647839838e-05, |
|
"loss": 2.9429, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.721845730584832e-05, |
|
"loss": 2.9444, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.713416813329827e-05, |
|
"loss": 2.9417, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.704987896074822e-05, |
|
"loss": 2.9429, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.6965589788198174e-05, |
|
"loss": 2.9307, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.688130061564812e-05, |
|
"loss": 2.9382, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.679701144309806e-05, |
|
"loss": 2.937, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.671272227054801e-05, |
|
"loss": 2.9272, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6628433097997964e-05, |
|
"loss": 2.9366, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6544143925447915e-05, |
|
"loss": 2.9221, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6459854752897866e-05, |
|
"loss": 2.9125, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.637556558034781e-05, |
|
"loss": 2.9132, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.629127640779776e-05, |
|
"loss": 2.9154, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.620698723524771e-05, |
|
"loss": 2.9117, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.612269806269766e-05, |
|
"loss": 2.9072, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.6038408890147614e-05, |
|
"loss": 2.9119, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.595411971759756e-05, |
|
"loss": 2.9128, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.586983054504751e-05, |
|
"loss": 2.9075, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.578554137249746e-05, |
|
"loss": 2.9084, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5701252199947404e-05, |
|
"loss": 2.9053, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5616963027397354e-05, |
|
"loss": 2.9007, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.55326738548473e-05, |
|
"loss": 2.8959, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.544838468229725e-05, |
|
"loss": 2.8986, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.53640955097472e-05, |
|
"loss": 2.8892, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.527980633719715e-05, |
|
"loss": 2.9073, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.51955171646471e-05, |
|
"loss": 2.8856, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5111227992097046e-05, |
|
"loss": 2.9008, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5026938819547e-05, |
|
"loss": 2.8927, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.494264964699695e-05, |
|
"loss": 2.8888, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.48583604744469e-05, |
|
"loss": 2.8921, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.477407130189685e-05, |
|
"loss": 2.8846, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4689782129346794e-05, |
|
"loss": 2.8751, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4605492956796745e-05, |
|
"loss": 2.8746, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.4521203784246696e-05, |
|
"loss": 2.8942, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.443691461169664e-05, |
|
"loss": 2.8707, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.435262543914659e-05, |
|
"loss": 2.8806, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.426833626659654e-05, |
|
"loss": 2.8747, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.4184047094046486e-05, |
|
"loss": 2.8662, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.409975792149644e-05, |
|
"loss": 2.8774, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.401546874894639e-05, |
|
"loss": 2.8689, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.393117957639634e-05, |
|
"loss": 2.8776, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.384689040384628e-05, |
|
"loss": 2.8637, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3762601231296234e-05, |
|
"loss": 2.8669, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3678312058746185e-05, |
|
"loss": 2.8762, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3594022886196135e-05, |
|
"loss": 2.8616, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3509733713646086e-05, |
|
"loss": 2.8536, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.342544454109603e-05, |
|
"loss": 2.8629, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.334115536854598e-05, |
|
"loss": 2.8569, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3256866195995926e-05, |
|
"loss": 2.8668, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.3172577023445876e-05, |
|
"loss": 2.8663, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.308828785089583e-05, |
|
"loss": 2.8561, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.300399867834578e-05, |
|
"loss": 2.8532, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.291970950579572e-05, |
|
"loss": 2.8613, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.283542033324567e-05, |
|
"loss": 2.8415, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.2751131160695624e-05, |
|
"loss": 2.8532, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2666841988145575e-05, |
|
"loss": 2.8374, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.258255281559552e-05, |
|
"loss": 2.8397, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.249826364304547e-05, |
|
"loss": 2.8543, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.241397447049542e-05, |
|
"loss": 2.8493, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.232968529794537e-05, |
|
"loss": 2.855, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.224539612539532e-05, |
|
"loss": 2.8461, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.216110695284527e-05, |
|
"loss": 2.8354, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.207681778029522e-05, |
|
"loss": 2.8242, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.199252860774516e-05, |
|
"loss": 2.8476, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.190823943519511e-05, |
|
"loss": 2.8374, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1823950262645064e-05, |
|
"loss": 2.8434, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.1739661090095015e-05, |
|
"loss": 2.8252, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.165537191754496e-05, |
|
"loss": 2.8262, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.157108274499491e-05, |
|
"loss": 2.8298, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.148679357244486e-05, |
|
"loss": 2.8274, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.140250439989481e-05, |
|
"loss": 2.8396, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1318215227344756e-05, |
|
"loss": 2.8272, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1233926054794707e-05, |
|
"loss": 2.8236, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.114963688224466e-05, |
|
"loss": 2.8257, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.106534770969461e-05, |
|
"loss": 2.8229, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.098105853714456e-05, |
|
"loss": 2.8268, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.08967693645945e-05, |
|
"loss": 2.8362, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.081248019204445e-05, |
|
"loss": 2.8156, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.07281910194944e-05, |
|
"loss": 2.8108, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.064390184694435e-05, |
|
"loss": 2.8118, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.05596126743943e-05, |
|
"loss": 2.8157, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.047532350184425e-05, |
|
"loss": 2.8124, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0391034329294195e-05, |
|
"loss": 2.8243, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0306745156744146e-05, |
|
"loss": 2.8224, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.02224559841941e-05, |
|
"loss": 2.8236, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.013816681164405e-05, |
|
"loss": 2.8103, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.005387763909399e-05, |
|
"loss": 2.8157, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.996958846654394e-05, |
|
"loss": 2.8117, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9885299293993894e-05, |
|
"loss": 2.8158, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9801010121443845e-05, |
|
"loss": 2.8058, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9716720948893796e-05, |
|
"loss": 2.8253, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.963243177634374e-05, |
|
"loss": 2.8229, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9548142603793684e-05, |
|
"loss": 2.801, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9463853431243635e-05, |
|
"loss": 2.8053, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.9379564258693586e-05, |
|
"loss": 2.8019, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.929527508614354e-05, |
|
"loss": 2.8091, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.921098591359349e-05, |
|
"loss": 2.8076, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.912669674104343e-05, |
|
"loss": 2.8127, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.904240756849338e-05, |
|
"loss": 2.8014, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.8958118395943333e-05, |
|
"loss": 2.7887, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8873829223393284e-05, |
|
"loss": 2.8, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.878954005084323e-05, |
|
"loss": 2.7967, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.870525087829318e-05, |
|
"loss": 2.7931, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.862096170574313e-05, |
|
"loss": 2.8072, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.853667253319308e-05, |
|
"loss": 2.8006, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.845238336064303e-05, |
|
"loss": 2.8016, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8368094188092976e-05, |
|
"loss": 2.8004, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.828380501554292e-05, |
|
"loss": 2.7946, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.819951584299287e-05, |
|
"loss": 2.7983, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.811522667044282e-05, |
|
"loss": 2.7897, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.803093749789277e-05, |
|
"loss": 2.7887, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7946648325342724e-05, |
|
"loss": 2.7874, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.786235915279267e-05, |
|
"loss": 2.7945, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.777806998024262e-05, |
|
"loss": 2.7927, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.769378080769257e-05, |
|
"loss": 2.7905, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.760949163514252e-05, |
|
"loss": 2.7932, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7525202462592465e-05, |
|
"loss": 2.7897, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7440913290042416e-05, |
|
"loss": 2.7847, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.735662411749237e-05, |
|
"loss": 2.7833, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.727233494494232e-05, |
|
"loss": 2.7842, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.718804577239227e-05, |
|
"loss": 2.7852, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.710375659984221e-05, |
|
"loss": 2.79, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.701946742729216e-05, |
|
"loss": 2.7874, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.693517825474211e-05, |
|
"loss": 2.7764, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.685088908219206e-05, |
|
"loss": 2.7826, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.676659990964201e-05, |
|
"loss": 2.7794, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.668231073709196e-05, |
|
"loss": 2.7807, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6598021564541905e-05, |
|
"loss": 2.775, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6513732391991855e-05, |
|
"loss": 2.7683, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6429443219441806e-05, |
|
"loss": 2.7781, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.634515404689176e-05, |
|
"loss": 2.7757, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.62608648743417e-05, |
|
"loss": 2.7852, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.617657570179165e-05, |
|
"loss": 2.7624, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.60922865292416e-05, |
|
"loss": 2.7703, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.6007997356691554e-05, |
|
"loss": 2.7605, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.5923708184141505e-05, |
|
"loss": 2.7586, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.583941901159145e-05, |
|
"loss": 2.7741, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.575512983904139e-05, |
|
"loss": 2.7726, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.5670840666491344e-05, |
|
"loss": 2.777, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.5586551493941295e-05, |
|
"loss": 2.7612, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.5502262321391246e-05, |
|
"loss": 2.7697, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.54179731488412e-05, |
|
"loss": 2.7688, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.533368397629114e-05, |
|
"loss": 2.7717, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.524939480374109e-05, |
|
"loss": 2.7681, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.516510563119104e-05, |
|
"loss": 2.7544, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.5080816458640994e-05, |
|
"loss": 2.7579, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.499652728609094e-05, |
|
"loss": 2.766, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.491223811354089e-05, |
|
"loss": 2.7545, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.482794894099084e-05, |
|
"loss": 2.7641, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.474365976844079e-05, |
|
"loss": 2.7608, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4659370595890735e-05, |
|
"loss": 2.7578, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4575081423340686e-05, |
|
"loss": 2.7582, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.449079225079063e-05, |
|
"loss": 2.7615, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.440650307824058e-05, |
|
"loss": 2.764, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.432221390569053e-05, |
|
"loss": 2.7639, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.423792473314048e-05, |
|
"loss": 2.7492, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.415363556059043e-05, |
|
"loss": 2.7489, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.406934638804038e-05, |
|
"loss": 2.7606, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.398505721549033e-05, |
|
"loss": 2.7515, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.390076804294028e-05, |
|
"loss": 2.7501, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.381647887039023e-05, |
|
"loss": 2.7507, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3732189697840174e-05, |
|
"loss": 2.7472, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3647900525290125e-05, |
|
"loss": 2.7443, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3563611352740076e-05, |
|
"loss": 2.7465, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.347932218019003e-05, |
|
"loss": 2.7585, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.339503300763997e-05, |
|
"loss": 2.746, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.331074383508992e-05, |
|
"loss": 2.7441, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3226454662539866e-05, |
|
"loss": 2.7499, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.314216548998982e-05, |
|
"loss": 2.7432, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.305787631743977e-05, |
|
"loss": 2.7424, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.297358714488972e-05, |
|
"loss": 2.7472, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.288929797233967e-05, |
|
"loss": 2.7356, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.2805008799789614e-05, |
|
"loss": 2.743, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.2720719627239565e-05, |
|
"loss": 2.7381, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.2636430454689516e-05, |
|
"loss": 2.7316, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2552141282139467e-05, |
|
"loss": 2.7328, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.246785210958941e-05, |
|
"loss": 2.7237, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.238356293703936e-05, |
|
"loss": 2.7305, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.229927376448931e-05, |
|
"loss": 2.732, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.221498459193926e-05, |
|
"loss": 2.7413, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.213069541938921e-05, |
|
"loss": 2.7324, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.204640624683916e-05, |
|
"loss": 2.7378, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.19621170742891e-05, |
|
"loss": 2.7392, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.1877827901739053e-05, |
|
"loss": 2.7269, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.1793538729189004e-05, |
|
"loss": 2.722, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1709249556638955e-05, |
|
"loss": 2.7302, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1624960384088906e-05, |
|
"loss": 2.7289, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.154067121153885e-05, |
|
"loss": 2.724, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.14563820389888e-05, |
|
"loss": 2.7316, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.137209286643875e-05, |
|
"loss": 2.7203, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.12878036938887e-05, |
|
"loss": 2.7259, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.120351452133865e-05, |
|
"loss": 2.7283, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.11192253487886e-05, |
|
"loss": 2.7148, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.103493617623855e-05, |
|
"loss": 2.7135, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.095064700368849e-05, |
|
"loss": 2.7176, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.0866357831138444e-05, |
|
"loss": 2.7331, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.0782068658588395e-05, |
|
"loss": 2.7176, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.069777948603834e-05, |
|
"loss": 2.7286, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.061349031348829e-05, |
|
"loss": 2.7187, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.052920114093824e-05, |
|
"loss": 2.7229, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0444911968388192e-05, |
|
"loss": 2.713, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.036062279583814e-05, |
|
"loss": 2.7242, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.027633362328809e-05, |
|
"loss": 2.7252, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0192044450738038e-05, |
|
"loss": 2.7172, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.010775527818799e-05, |
|
"loss": 2.7168, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0023466105637936e-05, |
|
"loss": 2.7048, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9939176933087887e-05, |
|
"loss": 2.7179, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9854887760537838e-05, |
|
"loss": 2.7169, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.977059858798778e-05, |
|
"loss": 2.7137, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.968630941543773e-05, |
|
"loss": 2.7089, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.960202024288768e-05, |
|
"loss": 2.7173, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9517731070337628e-05, |
|
"loss": 2.71, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.943344189778758e-05, |
|
"loss": 2.7175, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9349152725237526e-05, |
|
"loss": 2.7116, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9264863552687477e-05, |
|
"loss": 2.707, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9180574380137428e-05, |
|
"loss": 2.7079, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9096285207587376e-05, |
|
"loss": 2.7086, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.9011996035037327e-05, |
|
"loss": 2.7133, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.8927706862487274e-05, |
|
"loss": 2.7133, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8843417689937225e-05, |
|
"loss": 2.7194, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8759128517387172e-05, |
|
"loss": 2.7042, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8674839344837123e-05, |
|
"loss": 2.7169, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8590550172287074e-05, |
|
"loss": 2.7127, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8506260999737015e-05, |
|
"loss": 2.7083, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8421971827186966e-05, |
|
"loss": 2.6979, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8337682654636917e-05, |
|
"loss": 2.7132, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8253393482086864e-05, |
|
"loss": 2.7092, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8169104309536815e-05, |
|
"loss": 2.7059, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8084815136986763e-05, |
|
"loss": 2.7132, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8000525964436714e-05, |
|
"loss": 2.7022, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7916236791886665e-05, |
|
"loss": 2.6979, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7831947619336612e-05, |
|
"loss": 2.7117, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7747658446786563e-05, |
|
"loss": 2.708, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.766336927423651e-05, |
|
"loss": 2.6955, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.757908010168646e-05, |
|
"loss": 2.6978, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.749479092913641e-05, |
|
"loss": 2.6984, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.741050175658636e-05, |
|
"loss": 2.7051, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7326212584036304e-05, |
|
"loss": 2.703, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7241923411486255e-05, |
|
"loss": 2.6906, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7157634238936202e-05, |
|
"loss": 2.6879, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.7073345066386153e-05, |
|
"loss": 2.7085, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.69890558938361e-05, |
|
"loss": 2.7025, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.690476672128605e-05, |
|
"loss": 2.6959, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6820477548736e-05, |
|
"loss": 2.6965, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.673618837618595e-05, |
|
"loss": 2.6864, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.66518992036359e-05, |
|
"loss": 2.6942, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.656761003108585e-05, |
|
"loss": 2.6922, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.64833208585358e-05, |
|
"loss": 2.6821, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6399031685985747e-05, |
|
"loss": 2.6894, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6314742513435698e-05, |
|
"loss": 2.7, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6230453340885645e-05, |
|
"loss": 2.684, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6146164168335596e-05, |
|
"loss": 2.6796, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.606187499578554e-05, |
|
"loss": 2.6908, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.597758582323549e-05, |
|
"loss": 2.6923, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.589329665068544e-05, |
|
"loss": 2.6849, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.580900747813539e-05, |
|
"loss": 2.6806, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5724718305585337e-05, |
|
"loss": 2.6912, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5640429133035288e-05, |
|
"loss": 2.6886, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5556139960485236e-05, |
|
"loss": 2.6743, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5471850787935187e-05, |
|
"loss": 2.6913, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5387561615385137e-05, |
|
"loss": 2.6743, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5303272442835085e-05, |
|
"loss": 2.669, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5218983270285036e-05, |
|
"loss": 2.6743, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5134694097734983e-05, |
|
"loss": 2.6862, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5050404925184934e-05, |
|
"loss": 2.6805, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4966115752634882e-05, |
|
"loss": 2.678, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.488182658008483e-05, |
|
"loss": 2.6831, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.479753740753478e-05, |
|
"loss": 2.6879, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4713248234984728e-05, |
|
"loss": 2.6804, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.462895906243468e-05, |
|
"loss": 2.6884, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4544669889884626e-05, |
|
"loss": 2.6821, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4460380717334574e-05, |
|
"loss": 2.6737, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4376091544784525e-05, |
|
"loss": 2.6864, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4291802372234472e-05, |
|
"loss": 2.6789, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4207513199684423e-05, |
|
"loss": 2.687, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4123224027134374e-05, |
|
"loss": 2.6736, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.403893485458432e-05, |
|
"loss": 2.681, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.395464568203427e-05, |
|
"loss": 2.6775, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3870356509484216e-05, |
|
"loss": 2.6682, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3786067336934167e-05, |
|
"loss": 2.6805, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3701778164384118e-05, |
|
"loss": 2.6673, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3617488991834066e-05, |
|
"loss": 2.6671, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3533199819284017e-05, |
|
"loss": 2.6681, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3448910646733964e-05, |
|
"loss": 2.6715, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.336462147418391e-05, |
|
"loss": 2.6756, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3280332301633863e-05, |
|
"loss": 2.6682, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.319604312908381e-05, |
|
"loss": 2.6782, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.311175395653376e-05, |
|
"loss": 2.6712, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.302746478398371e-05, |
|
"loss": 2.6817, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.294317561143366e-05, |
|
"loss": 2.6757, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.285888643888361e-05, |
|
"loss": 2.6733, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2774597266333558e-05, |
|
"loss": 2.6607, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2690308093783505e-05, |
|
"loss": 2.6658, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2606018921233453e-05, |
|
"loss": 2.6802, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2521729748683404e-05, |
|
"loss": 2.6647, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2437440576133355e-05, |
|
"loss": 2.6761, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2353151403583302e-05, |
|
"loss": 2.6698, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2268862231033253e-05, |
|
"loss": 2.6661, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.21845730584832e-05, |
|
"loss": 2.672, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2100283885933148e-05, |
|
"loss": 2.6659, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.20159947133831e-05, |
|
"loss": 2.6549, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.1931705540833047e-05, |
|
"loss": 2.6637, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1847416368282997e-05, |
|
"loss": 2.6652, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1763127195732945e-05, |
|
"loss": 2.6758, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1678838023182896e-05, |
|
"loss": 2.6623, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1594548850632847e-05, |
|
"loss": 2.6694, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.151025967808279e-05, |
|
"loss": 2.6673, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1425970505532742e-05, |
|
"loss": 2.6625, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.134168133298269e-05, |
|
"loss": 2.6634, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.125739216043264e-05, |
|
"loss": 2.6714, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.117310298788259e-05, |
|
"loss": 2.6588, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.108881381533254e-05, |
|
"loss": 2.6633, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.100452464278249e-05, |
|
"loss": 2.6737, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.0920235470232437e-05, |
|
"loss": 2.6581, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0835946297682385e-05, |
|
"loss": 2.6577, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0751657125132335e-05, |
|
"loss": 2.6471, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0667367952582283e-05, |
|
"loss": 2.6634, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0583078780032234e-05, |
|
"loss": 2.6643, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.049878960748218e-05, |
|
"loss": 2.6565, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0414500434932132e-05, |
|
"loss": 2.6571, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0330211262382083e-05, |
|
"loss": 2.6525, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0245922089832027e-05, |
|
"loss": 2.6625, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0161632917281978e-05, |
|
"loss": 2.6666, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.0077343744731926e-05, |
|
"loss": 2.6516, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.9993054572181877e-05, |
|
"loss": 2.6542, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9908765399631828e-05, |
|
"loss": 2.6476, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9824476227081775e-05, |
|
"loss": 2.6557, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9740187054531726e-05, |
|
"loss": 2.6586, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9655897881981673e-05, |
|
"loss": 2.6577, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.957160870943162e-05, |
|
"loss": 2.6679, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9487319536881572e-05, |
|
"loss": 2.648, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.940303036433152e-05, |
|
"loss": 2.6476, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.931874119178147e-05, |
|
"loss": 2.6495, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9234452019231418e-05, |
|
"loss": 2.6612, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.915016284668137e-05, |
|
"loss": 2.6535, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.9065873674131316e-05, |
|
"loss": 2.6491, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.8981584501581264e-05, |
|
"loss": 2.6428, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8897295329031215e-05, |
|
"loss": 2.6447, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8813006156481162e-05, |
|
"loss": 2.6513, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8728716983931113e-05, |
|
"loss": 2.6495, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8644427811381064e-05, |
|
"loss": 2.6485, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.856013863883101e-05, |
|
"loss": 2.6468, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.847584946628096e-05, |
|
"loss": 2.6464, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.839156029373091e-05, |
|
"loss": 2.6365, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8307271121180857e-05, |
|
"loss": 2.6435, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.822298194863081e-05, |
|
"loss": 2.6421, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8138692776080756e-05, |
|
"loss": 2.6409, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8054403603530707e-05, |
|
"loss": 2.646, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.7970114430980654e-05, |
|
"loss": 2.6346, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7885825258430605e-05, |
|
"loss": 2.6401, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7801536085880553e-05, |
|
"loss": 2.6436, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.77172469133305e-05, |
|
"loss": 2.6305, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.763295774078045e-05, |
|
"loss": 2.6384, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.75486685682304e-05, |
|
"loss": 2.6544, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.746437939568035e-05, |
|
"loss": 2.6471, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.73800902231303e-05, |
|
"loss": 2.6434, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7295801050580248e-05, |
|
"loss": 2.6536, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7211511878030195e-05, |
|
"loss": 2.6454, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7127222705480146e-05, |
|
"loss": 2.6486, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.7042933532930094e-05, |
|
"loss": 2.6388, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.6958644360380045e-05, |
|
"loss": 2.6336, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6874355187829992e-05, |
|
"loss": 2.6323, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6790066015279943e-05, |
|
"loss": 2.6373, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.670577684272989e-05, |
|
"loss": 2.6447, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6621487670179838e-05, |
|
"loss": 2.6284, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.653719849762979e-05, |
|
"loss": 2.6412, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6452909325079737e-05, |
|
"loss": 2.6405, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6368620152529688e-05, |
|
"loss": 2.6313, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.628433097997964e-05, |
|
"loss": 2.6166, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6200041807429586e-05, |
|
"loss": 2.6377, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6115752634879537e-05, |
|
"loss": 2.6241, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.6031463462329484e-05, |
|
"loss": 2.6264, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.5947174289779432e-05, |
|
"loss": 2.643, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5862885117229383e-05, |
|
"loss": 2.625, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.577859594467933e-05, |
|
"loss": 2.6252, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.569430677212928e-05, |
|
"loss": 2.6346, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.561001759957923e-05, |
|
"loss": 2.6386, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.552572842702918e-05, |
|
"loss": 2.6184, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5441439254479127e-05, |
|
"loss": 2.618, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5357150081929075e-05, |
|
"loss": 2.6353, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5272860909379026e-05, |
|
"loss": 2.6235, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5188571736828975e-05, |
|
"loss": 2.6273, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5104282564278924e-05, |
|
"loss": 2.6355, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5019993391728873e-05, |
|
"loss": 2.6263, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.4935704219178822e-05, |
|
"loss": 2.6198, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4851415046628772e-05, |
|
"loss": 2.6261, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4767125874078719e-05, |
|
"loss": 2.6248, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4682836701528668e-05, |
|
"loss": 2.6223, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4598547528978618e-05, |
|
"loss": 2.6145, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4514258356428567e-05, |
|
"loss": 2.6052, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4429969183878516e-05, |
|
"loss": 2.6333, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4345680011328467e-05, |
|
"loss": 2.6264, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4261390838778416e-05, |
|
"loss": 2.6179, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4177101666228362e-05, |
|
"loss": 2.6244, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4092812493678311e-05, |
|
"loss": 2.6188, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4008523321128262e-05, |
|
"loss": 2.6219, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.3924234148578211e-05, |
|
"loss": 2.6194, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.383994497602816e-05, |
|
"loss": 2.6162, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.375565580347811e-05, |
|
"loss": 2.6168, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3671366630928059e-05, |
|
"loss": 2.6193, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3587077458378008e-05, |
|
"loss": 2.6176, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3502788285827956e-05, |
|
"loss": 2.6134, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3418499113277905e-05, |
|
"loss": 2.6201, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3334209940727854e-05, |
|
"loss": 2.6193, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3249920768177803e-05, |
|
"loss": 2.6234, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3165631595627752e-05, |
|
"loss": 2.6301, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3081342423077703e-05, |
|
"loss": 2.6179, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2997053250527652e-05, |
|
"loss": 2.6159, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2912764077977598e-05, |
|
"loss": 2.6185, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2828474905427548e-05, |
|
"loss": 2.6332, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2744185732877498e-05, |
|
"loss": 2.6102, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2659896560327448e-05, |
|
"loss": 2.6275, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2575607387777397e-05, |
|
"loss": 2.6165, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2491318215227344e-05, |
|
"loss": 2.6281, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2407029042677294e-05, |
|
"loss": 2.6035, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2322739870127244e-05, |
|
"loss": 2.6089, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2238450697577194e-05, |
|
"loss": 2.6036, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2154161525027141e-05, |
|
"loss": 2.6155, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.206987235247709e-05, |
|
"loss": 2.6175, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.198558317992704e-05, |
|
"loss": 2.6189, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1901294007376989e-05, |
|
"loss": 2.6122, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1817004834826938e-05, |
|
"loss": 2.6106, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1732715662276887e-05, |
|
"loss": 2.6046, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1648426489726836e-05, |
|
"loss": 2.6153, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1564137317176784e-05, |
|
"loss": 2.6077, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1479848144626735e-05, |
|
"loss": 2.6124, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1395558972076684e-05, |
|
"loss": 2.6124, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1311269799526633e-05, |
|
"loss": 2.6148, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.122698062697658e-05, |
|
"loss": 2.6068, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.114269145442653e-05, |
|
"loss": 2.6067, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1058402281876481e-05, |
|
"loss": 2.6046, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.0974113109326428e-05, |
|
"loss": 2.6109, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0889823936776378e-05, |
|
"loss": 2.6105, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0805534764226327e-05, |
|
"loss": 2.6085, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0721245591676276e-05, |
|
"loss": 2.6026, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0636956419126225e-05, |
|
"loss": 2.6069, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0552667246576174e-05, |
|
"loss": 2.614, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0468378074026124e-05, |
|
"loss": 2.6024, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0384088901476073e-05, |
|
"loss": 2.6143, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.029979972892602e-05, |
|
"loss": 2.6064, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0215510556375971e-05, |
|
"loss": 2.6094, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.013122138382592e-05, |
|
"loss": 2.5996, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0046932211275868e-05, |
|
"loss": 2.5943, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.962643038725817e-06, |
|
"loss": 2.6049, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.878353866175766e-06, |
|
"loss": 2.6066, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.794064693625717e-06, |
|
"loss": 2.5907, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.709775521075665e-06, |
|
"loss": 2.6141, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.625486348525614e-06, |
|
"loss": 2.6055, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.541197175975563e-06, |
|
"loss": 2.6001, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.456908003425512e-06, |
|
"loss": 2.6049, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.372618830875462e-06, |
|
"loss": 2.5989, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.288329658325411e-06, |
|
"loss": 2.6159, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.20404048577536e-06, |
|
"loss": 2.6031, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.119751313225308e-06, |
|
"loss": 2.6079, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.035462140675257e-06, |
|
"loss": 2.6025, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.951172968125208e-06, |
|
"loss": 2.5971, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.866883795575157e-06, |
|
"loss": 2.6006, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.782594623025104e-06, |
|
"loss": 2.5976, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.698305450475054e-06, |
|
"loss": 2.6043, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.614016277925003e-06, |
|
"loss": 2.5983, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.529727105374952e-06, |
|
"loss": 2.5852, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.445437932824901e-06, |
|
"loss": 2.5987, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.36114876027485e-06, |
|
"loss": 2.5991, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.2768595877248e-06, |
|
"loss": 2.5982, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.192570415174749e-06, |
|
"loss": 2.5968, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.108281242624698e-06, |
|
"loss": 2.5988, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.023992070074647e-06, |
|
"loss": 2.6092, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.939702897524597e-06, |
|
"loss": 2.6021, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.855413724974544e-06, |
|
"loss": 2.5998, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.771124552424495e-06, |
|
"loss": 2.5953, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.686835379874444e-06, |
|
"loss": 2.5998, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.602546207324392e-06, |
|
"loss": 2.6013, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.518257034774341e-06, |
|
"loss": 2.5959, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.433967862224291e-06, |
|
"loss": 2.596, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.34967868967424e-06, |
|
"loss": 2.5912, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.2653895171241885e-06, |
|
"loss": 2.6033, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.181100344574138e-06, |
|
"loss": 2.6009, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.096811172024087e-06, |
|
"loss": 2.6039, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.012521999474036e-06, |
|
"loss": 2.5874, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.9282328269239845e-06, |
|
"loss": 2.6138, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.843943654373934e-06, |
|
"loss": 2.5908, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.759654481823884e-06, |
|
"loss": 2.5949, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.675365309273831e-06, |
|
"loss": 2.5909, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.591076136723781e-06, |
|
"loss": 2.5896, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.5067869641737305e-06, |
|
"loss": 2.5954, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.42249779162368e-06, |
|
"loss": 2.5958, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.338208619073628e-06, |
|
"loss": 2.5959, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.253919446523577e-06, |
|
"loss": 2.6011, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.169630273973527e-06, |
|
"loss": 2.5919, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.085341101423476e-06, |
|
"loss": 2.593, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.001051928873425e-06, |
|
"loss": 2.5867, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.916762756323374e-06, |
|
"loss": 2.5942, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.8324735837733225e-06, |
|
"loss": 2.6071, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.7481844112232726e-06, |
|
"loss": 2.5872, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.663895238673221e-06, |
|
"loss": 2.5915, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.57960606612317e-06, |
|
"loss": 2.5836, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.495316893573119e-06, |
|
"loss": 2.5838, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.4110277210230685e-06, |
|
"loss": 2.5989, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.326738548473018e-06, |
|
"loss": 2.5965, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.242449375922967e-06, |
|
"loss": 2.5949, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.158160203372916e-06, |
|
"loss": 2.5905, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.0738710308228645e-06, |
|
"loss": 2.5857, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.989581858272814e-06, |
|
"loss": 2.592, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.905292685722763e-06, |
|
"loss": 2.5899, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.821003513172712e-06, |
|
"loss": 2.5816, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.736714340622661e-06, |
|
"loss": 2.5801, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.6524251680726106e-06, |
|
"loss": 2.5802, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.568135995522559e-06, |
|
"loss": 2.586, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.483846822972509e-06, |
|
"loss": 2.5848, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.399557650422457e-06, |
|
"loss": 2.5755, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.3152684778724066e-06, |
|
"loss": 2.5991, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.230979305322356e-06, |
|
"loss": 2.5985, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.146690132772305e-06, |
|
"loss": 2.5851, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.062400960222254e-06, |
|
"loss": 2.5937, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.9781117876722025e-06, |
|
"loss": 2.5802, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.893822615122153e-06, |
|
"loss": 2.5929, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.809533442572101e-06, |
|
"loss": 2.5922, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.7252442700220506e-06, |
|
"loss": 2.587, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6409550974719994e-06, |
|
"loss": 2.5863, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.556665924921948e-06, |
|
"loss": 2.575, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.4723767523718974e-06, |
|
"loss": 2.575, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.388087579821846e-06, |
|
"loss": 2.5856, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.303798407271796e-06, |
|
"loss": 2.5876, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.2195092347217446e-06, |
|
"loss": 2.5759, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1352200621716942e-06, |
|
"loss": 2.5928, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.0509308896216426e-06, |
|
"loss": 2.5861, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.9666417170715918e-06, |
|
"loss": 2.5819, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.882352544521541e-06, |
|
"loss": 2.5835, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.79806337197149e-06, |
|
"loss": 2.5848, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.7137741994214394e-06, |
|
"loss": 2.5865, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.629485026871388e-06, |
|
"loss": 2.5817, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.5451958543213374e-06, |
|
"loss": 2.5815, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.4609066817712866e-06, |
|
"loss": 2.5861, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.376617509221236e-06, |
|
"loss": 2.5848, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.2923283366711846e-06, |
|
"loss": 2.5898, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.208039164121134e-06, |
|
"loss": 2.5826, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1237499915710826e-06, |
|
"loss": 2.5775, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.039460819021032e-06, |
|
"loss": 2.5835, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.955171646470981e-06, |
|
"loss": 2.5801, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8708824739209302e-06, |
|
"loss": 2.5827, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.7865933013708792e-06, |
|
"loss": 2.5859, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.7023041288208284e-06, |
|
"loss": 2.59, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6180149562707774e-06, |
|
"loss": 2.5833, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.5337257837207264e-06, |
|
"loss": 2.5859, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4494366111706756e-06, |
|
"loss": 2.5802, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3651474386206246e-06, |
|
"loss": 2.581, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.2808582660705736e-06, |
|
"loss": 2.5807, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1965690935205228e-06, |
|
"loss": 2.5815, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1122799209704718e-06, |
|
"loss": 2.5805, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.027990748420421e-06, |
|
"loss": 2.5773, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.437015758703701e-07, |
|
"loss": 2.5879, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.59412403320319e-07, |
|
"loss": 2.5698, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 7.751232307702682e-07, |
|
"loss": 2.5811, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.908340582202172e-07, |
|
"loss": 2.581, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.065448856701664e-07, |
|
"loss": 2.5752, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.222557131201154e-07, |
|
"loss": 2.5815, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.3796654057006455e-07, |
|
"loss": 2.5742, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.5367736802001365e-07, |
|
"loss": 2.5797, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.693881954699627e-07, |
|
"loss": 2.5838, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.850990229199118e-07, |
|
"loss": 2.5824, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.0080985036986089e-07, |
|
"loss": 2.5832, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.652067781980998e-08, |
|
"loss": 2.5764, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 296598, |
|
"total_flos": 3.610936727382461e+18, |
|
"train_loss": 2.7145460446965, |
|
"train_runtime": 366319.2711, |
|
"train_samples_per_second": 207.274, |
|
"train_steps_per_second": 0.81 |
|
} |
|
], |
|
"max_steps": 296598, |
|
"num_train_epochs": 3, |
|
"total_flos": 3.610936727382461e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|