|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 261390, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.990435747350702e-05, |
|
"loss": 3.0142, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.980871494701404e-05, |
|
"loss": 2.0464, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.971307242052106e-05, |
|
"loss": 1.7157, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9617429894028086e-05, |
|
"loss": 1.5985, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9521787367535106e-05, |
|
"loss": 1.5032, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.942614484104212e-05, |
|
"loss": 1.4132, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9330502314549144e-05, |
|
"loss": 1.3507, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9234859788056164e-05, |
|
"loss": 1.3238, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.913921726156318e-05, |
|
"loss": 1.2726, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.90435747350702e-05, |
|
"loss": 1.2343, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.894793220857722e-05, |
|
"loss": 1.2175, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.885228968208425e-05, |
|
"loss": 1.1914, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.875664715559126e-05, |
|
"loss": 1.1483, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8661004629098287e-05, |
|
"loss": 1.138, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8565362102605306e-05, |
|
"loss": 1.124, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8469719576112325e-05, |
|
"loss": 1.1216, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8374077049619345e-05, |
|
"loss": 1.0871, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8278434523126364e-05, |
|
"loss": 1.1015, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.818279199663338e-05, |
|
"loss": 1.0644, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.808714947014041e-05, |
|
"loss": 1.0549, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.799150694364742e-05, |
|
"loss": 1.0561, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.789586441715445e-05, |
|
"loss": 1.0315, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.780022189066147e-05, |
|
"loss": 1.0248, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7704579364168487e-05, |
|
"loss": 1.0313, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7608936837675506e-05, |
|
"loss": 1.0072, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7513294311182525e-05, |
|
"loss": 1.0038, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.741765178468955e-05, |
|
"loss": 1.0004, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7322009258196564e-05, |
|
"loss": 1.001, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.722636673170358e-05, |
|
"loss": 0.9823, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.713072420521061e-05, |
|
"loss": 0.9753, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.703508167871763e-05, |
|
"loss": 0.978, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.693943915222465e-05, |
|
"loss": 0.9699, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.684379662573167e-05, |
|
"loss": 0.9432, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.674815409923869e-05, |
|
"loss": 0.9581, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6652511572745706e-05, |
|
"loss": 0.9526, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6556869046252725e-05, |
|
"loss": 0.9385, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.646122651975975e-05, |
|
"loss": 0.9398, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.636558399326677e-05, |
|
"loss": 0.9368, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.626994146677378e-05, |
|
"loss": 0.9213, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.617429894028081e-05, |
|
"loss": 0.9399, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.607865641378783e-05, |
|
"loss": 0.9326, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.598301388729485e-05, |
|
"loss": 0.9334, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.588737136080187e-05, |
|
"loss": 0.9183, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.579172883430889e-05, |
|
"loss": 0.9175, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.569608630781591e-05, |
|
"loss": 0.9083, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.560044378132293e-05, |
|
"loss": 0.9111, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.550480125482995e-05, |
|
"loss": 0.8906, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.540915872833697e-05, |
|
"loss": 0.8879, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.531351620184399e-05, |
|
"loss": 0.8914, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.521787367535101e-05, |
|
"loss": 0.8783, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.512223114885803e-05, |
|
"loss": 0.879, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.502658862236505e-05, |
|
"loss": 0.8882, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.4930946095872074e-05, |
|
"loss": 0.8955, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.483530356937909e-05, |
|
"loss": 0.8973, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.473966104288611e-05, |
|
"loss": 0.8785, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.464401851639313e-05, |
|
"loss": 0.87, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.454837598990015e-05, |
|
"loss": 0.8668, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.445273346340717e-05, |
|
"loss": 0.8644, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.435709093691419e-05, |
|
"loss": 0.8532, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4261448410421216e-05, |
|
"loss": 0.8529, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.416580588392823e-05, |
|
"loss": 0.8454, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.407016335743525e-05, |
|
"loss": 0.8585, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.3974520830942274e-05, |
|
"loss": 0.8393, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3878878304449294e-05, |
|
"loss": 0.8612, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.378323577795631e-05, |
|
"loss": 0.8448, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.368759325146333e-05, |
|
"loss": 0.839, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.359195072497035e-05, |
|
"loss": 0.8311, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.349630819847738e-05, |
|
"loss": 0.826, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.340066567198439e-05, |
|
"loss": 0.834, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3305023145491416e-05, |
|
"loss": 0.8266, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.3209380618998436e-05, |
|
"loss": 0.8284, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.311373809250545e-05, |
|
"loss": 0.8308, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.3018095566012474e-05, |
|
"loss": 0.8156, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.2922453039519494e-05, |
|
"loss": 0.8217, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.282681051302651e-05, |
|
"loss": 0.8274, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.273116798653353e-05, |
|
"loss": 0.825, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.263552546004055e-05, |
|
"loss": 0.8207, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.253988293354758e-05, |
|
"loss": 0.8264, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.24442404070546e-05, |
|
"loss": 0.8192, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2348597880561616e-05, |
|
"loss": 0.8085, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2252955354068636e-05, |
|
"loss": 0.8223, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2157312827575655e-05, |
|
"loss": 0.8004, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.2061670301082674e-05, |
|
"loss": 0.8074, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.1966027774589694e-05, |
|
"loss": 0.813, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.187038524809671e-05, |
|
"loss": 0.7991, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.177474272160374e-05, |
|
"loss": 0.7942, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.167910019511075e-05, |
|
"loss": 0.8021, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.158345766861778e-05, |
|
"loss": 0.8121, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.14878151421248e-05, |
|
"loss": 0.802, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1392172615631816e-05, |
|
"loss": 0.7872, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1296530089138836e-05, |
|
"loss": 0.8041, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1200887562645855e-05, |
|
"loss": 0.7901, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.110524503615288e-05, |
|
"loss": 0.7762, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.1009602509659894e-05, |
|
"loss": 0.7955, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.091395998316691e-05, |
|
"loss": 0.7851, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.081831745667394e-05, |
|
"loss": 0.7947, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.072267493018096e-05, |
|
"loss": 0.7956, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.062703240368798e-05, |
|
"loss": 0.7767, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0531389877195e-05, |
|
"loss": 0.7884, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0435747350702016e-05, |
|
"loss": 0.78, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.034010482420904e-05, |
|
"loss": 0.782, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0244462297716055e-05, |
|
"loss": 0.783, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.014881977122308e-05, |
|
"loss": 0.769, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.00531772447301e-05, |
|
"loss": 0.7708, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.995753471823711e-05, |
|
"loss": 0.7611, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.986189219174414e-05, |
|
"loss": 0.7738, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.976624966525116e-05, |
|
"loss": 0.7758, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.967060713875818e-05, |
|
"loss": 0.7804, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.95749646122652e-05, |
|
"loss": 0.7703, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9479322085772217e-05, |
|
"loss": 0.7743, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.938367955927924e-05, |
|
"loss": 0.7666, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.928803703278626e-05, |
|
"loss": 0.7633, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.919239450629328e-05, |
|
"loss": 0.7707, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.90967519798003e-05, |
|
"loss": 0.7553, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.900110945330732e-05, |
|
"loss": 0.7537, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.890546692681434e-05, |
|
"loss": 0.754, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.880982440032136e-05, |
|
"loss": 0.7448, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.871418187382838e-05, |
|
"loss": 0.747, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8618539347335404e-05, |
|
"loss": 0.7593, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8522896820842417e-05, |
|
"loss": 0.7612, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.842725429434944e-05, |
|
"loss": 0.7512, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.833161176785646e-05, |
|
"loss": 0.7438, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.823596924136348e-05, |
|
"loss": 0.7528, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.81403267148705e-05, |
|
"loss": 0.7306, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.804468418837752e-05, |
|
"loss": 0.7493, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7949041661884546e-05, |
|
"loss": 0.7472, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.785339913539156e-05, |
|
"loss": 0.7412, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.775775660889858e-05, |
|
"loss": 0.7538, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.7662114082405604e-05, |
|
"loss": 0.7316, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7566471555912623e-05, |
|
"loss": 0.754, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.747082902941964e-05, |
|
"loss": 0.7274, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.737518650292666e-05, |
|
"loss": 0.7258, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.727954397643368e-05, |
|
"loss": 0.7351, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.718390144994071e-05, |
|
"loss": 0.7626, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.708825892344772e-05, |
|
"loss": 0.7364, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.6992616396954746e-05, |
|
"loss": 0.7374, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6896973870461766e-05, |
|
"loss": 0.7237, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6801331343968785e-05, |
|
"loss": 0.7444, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6705688817475804e-05, |
|
"loss": 0.7336, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6610046290982823e-05, |
|
"loss": 0.7209, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.651440376448985e-05, |
|
"loss": 0.7277, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.641876123799686e-05, |
|
"loss": 0.7329, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.632311871150388e-05, |
|
"loss": 0.7197, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.622747618501091e-05, |
|
"loss": 0.729, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.613183365851793e-05, |
|
"loss": 0.7243, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.6036191132024946e-05, |
|
"loss": 0.7297, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.5940548605531966e-05, |
|
"loss": 0.7192, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5844906079038985e-05, |
|
"loss": 0.716, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.574926355254601e-05, |
|
"loss": 0.7183, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.5653621026053024e-05, |
|
"loss": 0.7101, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.555797849956004e-05, |
|
"loss": 0.7158, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.546233597306707e-05, |
|
"loss": 0.709, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.536669344657408e-05, |
|
"loss": 0.7308, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.527105092008111e-05, |
|
"loss": 0.7171, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.517540839358813e-05, |
|
"loss": 0.7185, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.5079765867095146e-05, |
|
"loss": 0.7236, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.4984123340602166e-05, |
|
"loss": 0.7006, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.4888480814109185e-05, |
|
"loss": 0.7201, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.479283828761621e-05, |
|
"loss": 0.7258, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.469719576112323e-05, |
|
"loss": 0.7117, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.460155323463024e-05, |
|
"loss": 0.7279, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.450591070813727e-05, |
|
"loss": 0.7085, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.441026818164429e-05, |
|
"loss": 0.7076, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.431462565515131e-05, |
|
"loss": 0.7101, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.421898312865833e-05, |
|
"loss": 0.7102, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4123340602165346e-05, |
|
"loss": 0.7012, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.402769807567237e-05, |
|
"loss": 0.7119, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.3932055549179385e-05, |
|
"loss": 0.7079, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.383641302268641e-05, |
|
"loss": 0.7056, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.374077049619343e-05, |
|
"loss": 0.7021, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.364512796970045e-05, |
|
"loss": 0.7188, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.354948544320747e-05, |
|
"loss": 0.6963, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.345384291671449e-05, |
|
"loss": 0.6971, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.3358200390221515e-05, |
|
"loss": 0.6967, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.326255786372853e-05, |
|
"loss": 0.6435, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3166915337235546e-05, |
|
"loss": 0.6471, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.307127281074257e-05, |
|
"loss": 0.6381, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.297563028424959e-05, |
|
"loss": 0.6394, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.287998775775661e-05, |
|
"loss": 0.6357, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.278434523126363e-05, |
|
"loss": 0.6419, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.268870270477065e-05, |
|
"loss": 0.642, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.2593060178277676e-05, |
|
"loss": 0.6415, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.249741765178469e-05, |
|
"loss": 0.6439, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2401775125291715e-05, |
|
"loss": 0.6374, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2306132598798734e-05, |
|
"loss": 0.6303, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.2210490072305746e-05, |
|
"loss": 0.643, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.211484754581277e-05, |
|
"loss": 0.6355, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.201920501931979e-05, |
|
"loss": 0.6401, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.192356249282681e-05, |
|
"loss": 0.6384, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.182791996633383e-05, |
|
"loss": 0.6194, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.173227743984085e-05, |
|
"loss": 0.6294, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1636634913347876e-05, |
|
"loss": 0.6356, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1540992386854895e-05, |
|
"loss": 0.6409, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.144534986036191e-05, |
|
"loss": 0.63, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1349707333868934e-05, |
|
"loss": 0.6353, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.125406480737595e-05, |
|
"loss": 0.6359, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.115842228088297e-05, |
|
"loss": 0.6345, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.106277975438999e-05, |
|
"loss": 0.6362, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.096713722789701e-05, |
|
"loss": 0.636, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.087149470140404e-05, |
|
"loss": 0.6427, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.077585217491105e-05, |
|
"loss": 0.6348, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0680209648418076e-05, |
|
"loss": 0.6407, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0584567121925095e-05, |
|
"loss": 0.6257, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0488924595432118e-05, |
|
"loss": 0.6382, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0393282068939134e-05, |
|
"loss": 0.6317, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0297639542446153e-05, |
|
"loss": 0.642, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0201997015953176e-05, |
|
"loss": 0.6272, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0106354489460192e-05, |
|
"loss": 0.6395, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0010711962967215e-05, |
|
"loss": 0.6204, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9915069436474234e-05, |
|
"loss": 0.6241, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9819426909981257e-05, |
|
"loss": 0.6422, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9723784383488273e-05, |
|
"loss": 0.632, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9628141856995295e-05, |
|
"loss": 0.6326, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9532499330502318e-05, |
|
"loss": 0.631, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9436856804009337e-05, |
|
"loss": 0.6369, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9341214277516353e-05, |
|
"loss": 0.6192, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9245571751023376e-05, |
|
"loss": 0.6422, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.91499292245304e-05, |
|
"loss": 0.6365, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.9054286698037415e-05, |
|
"loss": 0.6395, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.8958644171544434e-05, |
|
"loss": 0.6395, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8863001645051457e-05, |
|
"loss": 0.6305, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.876735911855848e-05, |
|
"loss": 0.6224, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8671716592065495e-05, |
|
"loss": 0.6189, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8576074065572518e-05, |
|
"loss": 0.6183, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8480431539079538e-05, |
|
"loss": 0.6451, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.838478901258656e-05, |
|
"loss": 0.6311, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8289146486093576e-05, |
|
"loss": 0.631, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.81935039596006e-05, |
|
"loss": 0.6239, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8097861433107618e-05, |
|
"loss": 0.6369, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.800221890661464e-05, |
|
"loss": 0.6278, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7906576380121657e-05, |
|
"loss": 0.6256, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.781093385362868e-05, |
|
"loss": 0.621, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7715291327135702e-05, |
|
"loss": 0.627, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7619648800642718e-05, |
|
"loss": 0.6212, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7524006274149738e-05, |
|
"loss": 0.6291, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.742836374765676e-05, |
|
"loss": 0.6246, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7332721221163783e-05, |
|
"loss": 0.6194, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.72370786946708e-05, |
|
"loss": 0.6166, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7141436168177818e-05, |
|
"loss": 0.6181, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.704579364168484e-05, |
|
"loss": 0.6226, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.6950151115191864e-05, |
|
"loss": 0.6198, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.685450858869888e-05, |
|
"loss": 0.6252, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.67588660622059e-05, |
|
"loss": 0.624, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.666322353571292e-05, |
|
"loss": 0.6279, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6567581009219938e-05, |
|
"loss": 0.6156, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.647193848272696e-05, |
|
"loss": 0.6257, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6376295956233983e-05, |
|
"loss": 0.6091, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6280653429741002e-05, |
|
"loss": 0.6215, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.618501090324802e-05, |
|
"loss": 0.619, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.608936837675504e-05, |
|
"loss": 0.6097, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.5993725850262064e-05, |
|
"loss": 0.6267, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5898083323769083e-05, |
|
"loss": 0.618, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.58024407972761e-05, |
|
"loss": 0.6072, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5706798270783122e-05, |
|
"loss": 0.6112, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5611155744290144e-05, |
|
"loss": 0.6135, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.551551321779716e-05, |
|
"loss": 0.6109, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5419870691304183e-05, |
|
"loss": 0.6177, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5324228164811202e-05, |
|
"loss": 0.6009, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5228585638318225e-05, |
|
"loss": 0.6052, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.513294311182524e-05, |
|
"loss": 0.6166, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5037300585332264e-05, |
|
"loss": 0.6126, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4941658058839283e-05, |
|
"loss": 0.6214, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4846015532346302e-05, |
|
"loss": 0.6171, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4750373005853325e-05, |
|
"loss": 0.6037, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4654730479360344e-05, |
|
"loss": 0.6085, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4559087952867364e-05, |
|
"loss": 0.614, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4463445426374383e-05, |
|
"loss": 0.6233, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4367802899881402e-05, |
|
"loss": 0.6103, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4272160373388425e-05, |
|
"loss": 0.6175, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4176517846895445e-05, |
|
"loss": 0.6139, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4080875320402467e-05, |
|
"loss": 0.6109, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3985232793909483e-05, |
|
"loss": 0.6091, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3889590267416506e-05, |
|
"loss": 0.6034, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3793947740923525e-05, |
|
"loss": 0.6161, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3698305214430548e-05, |
|
"loss": 0.6114, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3602662687937567e-05, |
|
"loss": 0.6006, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3507020161444583e-05, |
|
"loss": 0.5987, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3411377634951606e-05, |
|
"loss": 0.6084, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3315735108458625e-05, |
|
"loss": 0.6022, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3220092581965648e-05, |
|
"loss": 0.6033, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3124450055472667e-05, |
|
"loss": 0.608, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.3028807528979687e-05, |
|
"loss": 0.6088, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.2933165002486706e-05, |
|
"loss": 0.613, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.283752247599373e-05, |
|
"loss": 0.6048, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2741879949500748e-05, |
|
"loss": 0.6156, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2646237423007767e-05, |
|
"loss": 0.5922, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2550594896514787e-05, |
|
"loss": 0.5913, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.245495237002181e-05, |
|
"loss": 0.6193, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.235930984352883e-05, |
|
"loss": 0.5952, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2263667317035848e-05, |
|
"loss": 0.5864, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2168024790542867e-05, |
|
"loss": 0.5968, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.2072382264049887e-05, |
|
"loss": 0.593, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.197673973755691e-05, |
|
"loss": 0.5931, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.188109721106393e-05, |
|
"loss": 0.6066, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1785454684570948e-05, |
|
"loss": 0.598, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1689812158077967e-05, |
|
"loss": 0.5971, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.159416963158499e-05, |
|
"loss": 0.6013, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.149852710509201e-05, |
|
"loss": 0.5954, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1402884578599032e-05, |
|
"loss": 0.5965, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1307242052106048e-05, |
|
"loss": 0.5851, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1211599525613067e-05, |
|
"loss": 0.5922, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.111595699912009e-05, |
|
"loss": 0.5861, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.102031447262711e-05, |
|
"loss": 0.5923, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.0924671946134132e-05, |
|
"loss": 0.591, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0829029419641148e-05, |
|
"loss": 0.5974, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.073338689314817e-05, |
|
"loss": 0.6003, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.063774436665519e-05, |
|
"loss": 0.6029, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0542101840162213e-05, |
|
"loss": 0.5909, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0446459313669232e-05, |
|
"loss": 0.5972, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.035081678717625e-05, |
|
"loss": 0.6074, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.025517426068327e-05, |
|
"loss": 0.5821, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.015953173419029e-05, |
|
"loss": 0.5863, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.0063889207697313e-05, |
|
"loss": 0.5801, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.9968246681204332e-05, |
|
"loss": 0.5914, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.987260415471135e-05, |
|
"loss": 0.6046, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.977696162821837e-05, |
|
"loss": 0.5955, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9681319101725394e-05, |
|
"loss": 0.591, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9585676575232413e-05, |
|
"loss": 0.588, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9490034048739432e-05, |
|
"loss": 0.5932, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.939439152224645e-05, |
|
"loss": 0.5826, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9298748995753474e-05, |
|
"loss": 0.5823, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9203106469260494e-05, |
|
"loss": 0.5978, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9107463942767513e-05, |
|
"loss": 0.5848, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.9011821416274532e-05, |
|
"loss": 0.5881, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.891617888978155e-05, |
|
"loss": 0.5879, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8820536363288574e-05, |
|
"loss": 0.5926, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8724893836795594e-05, |
|
"loss": 0.5869, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8629251310302613e-05, |
|
"loss": 0.5831, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8533608783809632e-05, |
|
"loss": 0.579, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8437966257316655e-05, |
|
"loss": 0.5863, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8342323730823674e-05, |
|
"loss": 0.5817, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8246681204330697e-05, |
|
"loss": 0.5901, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8151038677837713e-05, |
|
"loss": 0.5863, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8055396151344736e-05, |
|
"loss": 0.5831, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.7959753624851755e-05, |
|
"loss": 0.5853, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7864111098358774e-05, |
|
"loss": 0.5858, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7768468571865797e-05, |
|
"loss": 0.5876, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7672826045372813e-05, |
|
"loss": 0.5796, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7577183518879836e-05, |
|
"loss": 0.5896, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7481540992386855e-05, |
|
"loss": 0.5882, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7385898465893878e-05, |
|
"loss": 0.5791, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7290255939400897e-05, |
|
"loss": 0.5875, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7194613412907916e-05, |
|
"loss": 0.5855, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7098970886414936e-05, |
|
"loss": 0.5885, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.700332835992196e-05, |
|
"loss": 0.571, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6907685833428978e-05, |
|
"loss": 0.5803, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6812043306935997e-05, |
|
"loss": 0.5805, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6716400780443016e-05, |
|
"loss": 0.5853, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6620758253950036e-05, |
|
"loss": 0.5407, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.652511572745706e-05, |
|
"loss": 0.5119, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6429473200964078e-05, |
|
"loss": 0.5168, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6333830674471097e-05, |
|
"loss": 0.5199, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6238188147978116e-05, |
|
"loss": 0.5095, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.614254562148514e-05, |
|
"loss": 0.5177, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.604690309499216e-05, |
|
"loss": 0.5131, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.5951260568499178e-05, |
|
"loss": 0.5133, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5855618042006197e-05, |
|
"loss": 0.516, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5759975515513217e-05, |
|
"loss": 0.5193, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.566433298902024e-05, |
|
"loss": 0.5109, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.556869046252726e-05, |
|
"loss": 0.5071, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5473047936034278e-05, |
|
"loss": 0.5152, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5377405409541297e-05, |
|
"loss": 0.5164, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.528176288304832e-05, |
|
"loss": 0.5195, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.518612035655534e-05, |
|
"loss": 0.5251, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.509047783006236e-05, |
|
"loss": 0.5204, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.499483530356938e-05, |
|
"loss": 0.5234, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.48991927770764e-05, |
|
"loss": 0.5029, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.480355025058342e-05, |
|
"loss": 0.5111, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.470790772409044e-05, |
|
"loss": 0.5099, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.461226519759746e-05, |
|
"loss": 0.519, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.451662267110448e-05, |
|
"loss": 0.5072, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.44209801446115e-05, |
|
"loss": 0.5064, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.432533761811852e-05, |
|
"loss": 0.511, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4229695091625541e-05, |
|
"loss": 0.514, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.413405256513256e-05, |
|
"loss": 0.516, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4038410038639583e-05, |
|
"loss": 0.5226, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.39427675121466e-05, |
|
"loss": 0.505, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3847124985653623e-05, |
|
"loss": 0.5158, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3751482459160641e-05, |
|
"loss": 0.5163, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3655839932667664e-05, |
|
"loss": 0.5156, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3560197406174683e-05, |
|
"loss": 0.5174, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.34645548796817e-05, |
|
"loss": 0.5163, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3368912353188723e-05, |
|
"loss": 0.5079, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3273269826695741e-05, |
|
"loss": 0.5082, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3177627300202764e-05, |
|
"loss": 0.5132, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3081984773709781e-05, |
|
"loss": 0.5174, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2986342247216804e-05, |
|
"loss": 0.5105, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2890699720723823e-05, |
|
"loss": 0.4967, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2795057194230844e-05, |
|
"loss": 0.5078, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2699414667737864e-05, |
|
"loss": 0.5116, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2603772141244885e-05, |
|
"loss": 0.5141, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2508129614751904e-05, |
|
"loss": 0.5174, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2412487088258923e-05, |
|
"loss": 0.5151, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2316844561765945e-05, |
|
"loss": 0.5078, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2221202035272966e-05, |
|
"loss": 0.5187, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2125559508779985e-05, |
|
"loss": 0.5154, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.2029916982287006e-05, |
|
"loss": 0.5165, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.1934274455794024e-05, |
|
"loss": 0.5042, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1838631929301045e-05, |
|
"loss": 0.5042, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1742989402808066e-05, |
|
"loss": 0.5185, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1647346876315085e-05, |
|
"loss": 0.5047, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1551704349822106e-05, |
|
"loss": 0.5055, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1456061823329125e-05, |
|
"loss": 0.5035, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1360419296836146e-05, |
|
"loss": 0.5121, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1264776770343166e-05, |
|
"loss": 0.5112, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1169134243850187e-05, |
|
"loss": 0.5035, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1073491717357206e-05, |
|
"loss": 0.5009, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.0977849190864227e-05, |
|
"loss": 0.5067, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0882206664371246e-05, |
|
"loss": 0.5055, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0786564137878266e-05, |
|
"loss": 0.5025, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0690921611385287e-05, |
|
"loss": 0.5044, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0595279084892306e-05, |
|
"loss": 0.5091, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0499636558399327e-05, |
|
"loss": 0.5074, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0403994031906348e-05, |
|
"loss": 0.5142, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0308351505413367e-05, |
|
"loss": 0.4958, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0212708978920388e-05, |
|
"loss": 0.5208, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0117066452427408e-05, |
|
"loss": 0.5013, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0021423925934429e-05, |
|
"loss": 0.5009, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.925781399441448e-06, |
|
"loss": 0.5109, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.830138872948469e-06, |
|
"loss": 0.4979, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.734496346455488e-06, |
|
"loss": 0.5099, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.638853819962508e-06, |
|
"loss": 0.5016, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.543211293469529e-06, |
|
"loss": 0.5104, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.447568766976548e-06, |
|
"loss": 0.4964, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.351926240483569e-06, |
|
"loss": 0.5036, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.256283713990588e-06, |
|
"loss": 0.5175, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.16064118749761e-06, |
|
"loss": 0.5002, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.06499866100463e-06, |
|
"loss": 0.5012, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.96935613451165e-06, |
|
"loss": 0.5001, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.87371360801867e-06, |
|
"loss": 0.5012, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.77807108152569e-06, |
|
"loss": 0.5074, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.682428555032711e-06, |
|
"loss": 0.506, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.58678602853973e-06, |
|
"loss": 0.5022, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.49114350204675e-06, |
|
"loss": 0.5072, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.39550097555377e-06, |
|
"loss": 0.5027, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.29985844906079e-06, |
|
"loss": 0.5044, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.204215922567811e-06, |
|
"loss": 0.496, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.10857339607483e-06, |
|
"loss": 0.4946, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.012930869581852e-06, |
|
"loss": 0.5032, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.917288343088871e-06, |
|
"loss": 0.4993, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.821645816595892e-06, |
|
"loss": 0.5091, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.726003290102913e-06, |
|
"loss": 0.4929, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.630360763609932e-06, |
|
"loss": 0.4995, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.5347182371169516e-06, |
|
"loss": 0.504, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.439075710623972e-06, |
|
"loss": 0.5026, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.343433184130992e-06, |
|
"loss": 0.5056, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.247790657638012e-06, |
|
"loss": 0.4925, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.152148131145032e-06, |
|
"loss": 0.4944, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.0565056046520524e-06, |
|
"loss": 0.5029, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.9608630781590735e-06, |
|
"loss": 0.5002, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.865220551666094e-06, |
|
"loss": 0.5027, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.769578025173114e-06, |
|
"loss": 0.5052, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.673935498680134e-06, |
|
"loss": 0.5096, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.578292972187154e-06, |
|
"loss": 0.5041, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.482650445694174e-06, |
|
"loss": 0.4935, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.387007919201194e-06, |
|
"loss": 0.5016, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.291365392708214e-06, |
|
"loss": 0.4932, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.195722866215234e-06, |
|
"loss": 0.4973, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.100080339722254e-06, |
|
"loss": 0.4995, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.004437813229274e-06, |
|
"loss": 0.4871, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.9087952867362945e-06, |
|
"loss": 0.4925, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.813152760243315e-06, |
|
"loss": 0.4979, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.717510233750335e-06, |
|
"loss": 0.4907, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.621867707257355e-06, |
|
"loss": 0.5001, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.526225180764375e-06, |
|
"loss": 0.4919, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.430582654271395e-06, |
|
"loss": 0.5017, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.334940127778416e-06, |
|
"loss": 0.4891, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.239297601285436e-06, |
|
"loss": 0.4921, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.143655074792456e-06, |
|
"loss": 0.492, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.048012548299476e-06, |
|
"loss": 0.509, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.952370021806496e-06, |
|
"loss": 0.5025, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.8567274953135165e-06, |
|
"loss": 0.4915, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.761084968820537e-06, |
|
"loss": 0.4895, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.665442442327557e-06, |
|
"loss": 0.4965, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.569799915834577e-06, |
|
"loss": 0.4998, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.474157389341597e-06, |
|
"loss": 0.5009, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.378514862848617e-06, |
|
"loss": 0.4882, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.2828723363556375e-06, |
|
"loss": 0.4896, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.187229809862658e-06, |
|
"loss": 0.5006, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.091587283369678e-06, |
|
"loss": 0.4912, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.995944756876697e-06, |
|
"loss": 0.4915, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.900302230383718e-06, |
|
"loss": 0.4993, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.804659703890738e-06, |
|
"loss": 0.4902, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.709017177397758e-06, |
|
"loss": 0.5008, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6133746509047787e-06, |
|
"loss": 0.4951, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.517732124411799e-06, |
|
"loss": 0.5028, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.4220895979188187e-06, |
|
"loss": 0.4866, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.326447071425839e-06, |
|
"loss": 0.4928, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.230804544932859e-06, |
|
"loss": 0.4833, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.135162018439879e-06, |
|
"loss": 0.4892, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.0395194919468998e-06, |
|
"loss": 0.4934, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.9438769654539195e-06, |
|
"loss": 0.4917, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.8482344389609397e-06, |
|
"loss": 0.501, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.75259191246796e-06, |
|
"loss": 0.4844, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.65694938597498e-06, |
|
"loss": 0.4945, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.5613068594820002e-06, |
|
"loss": 0.4906, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.4656643329890204e-06, |
|
"loss": 0.4933, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.3700218064960406e-06, |
|
"loss": 0.4926, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.2743792800030608e-06, |
|
"loss": 0.4892, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1787367535100805e-06, |
|
"loss": 0.4965, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.083094227017101e-06, |
|
"loss": 0.4979, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.9874517005241213e-06, |
|
"loss": 0.4997, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8918091740311412e-06, |
|
"loss": 0.4931, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.7961666475381614e-06, |
|
"loss": 0.4998, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.7005241210451818e-06, |
|
"loss": 0.4904, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6048815945522018e-06, |
|
"loss": 0.4939, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.509239068059222e-06, |
|
"loss": 0.4875, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.4135965415662421e-06, |
|
"loss": 0.4834, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3179540150732623e-06, |
|
"loss": 0.4835, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.2223114885802825e-06, |
|
"loss": 0.4854, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1266689620873024e-06, |
|
"loss": 0.4905, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.0310264355943226e-06, |
|
"loss": 0.4839, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.353839091013429e-07, |
|
"loss": 0.4851, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.397413826083631e-07, |
|
"loss": 0.4846, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.440988561153832e-07, |
|
"loss": 0.4837, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.484563296224033e-07, |
|
"loss": 0.4912, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.528138031294235e-07, |
|
"loss": 0.4962, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.571712766364437e-07, |
|
"loss": 0.4901, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.6152875014346377e-07, |
|
"loss": 0.495, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.65886223650484e-07, |
|
"loss": 0.4968, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.7024369715750412e-07, |
|
"loss": 0.4846, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 7.460117066452428e-08, |
|
"loss": 0.4792, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 261390, |
|
"total_flos": 2.999406650108805e+17, |
|
"train_loss": 0.6627257539563876, |
|
"train_runtime": 78487.0606, |
|
"train_samples_per_second": 33.304, |
|
"train_steps_per_second": 3.33 |
|
} |
|
], |
|
"max_steps": 261390, |
|
"num_train_epochs": 3, |
|
"total_flos": 2.999406650108805e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|