|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 28.999985039122695, |
|
"eval_steps": 500, |
|
"global_step": 969194, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.5e-05, |
|
"loss": 30.4144, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00015, |
|
"loss": 5.067, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.000225, |
|
"loss": 4.7223, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0003, |
|
"loss": 3.2436, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00029985008994603234, |
|
"loss": 2.6616, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00029970017989206475, |
|
"loss": 2.3832, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0002995502698380971, |
|
"loss": 2.206, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0002994003597841295, |
|
"loss": 2.1107, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0002992504497301619, |
|
"loss": 2.0405, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00029910053967619426, |
|
"loss": 2.0127, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0002989506296222266, |
|
"loss": 1.9404, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00029880071956825903, |
|
"loss": 1.9067, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0002986508095142914, |
|
"loss": 1.9101, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0002985008994603238, |
|
"loss": 1.8644, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0002983509894063562, |
|
"loss": 1.8294, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00029820107935238854, |
|
"loss": 1.811, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0002980511692984209, |
|
"loss": 1.8067, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0002979012592444533, |
|
"loss": 1.7794, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0002977513491904857, |
|
"loss": 1.7366, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00029760143913651804, |
|
"loss": 1.7349, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00029745152908255046, |
|
"loss": 1.7221, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0002973016190285828, |
|
"loss": 1.7171, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00029715170897461524, |
|
"loss": 1.7112, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002970017989206476, |
|
"loss": 1.7109, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00029685188886667996, |
|
"loss": 1.695, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0002967019788127124, |
|
"loss": 1.6686, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00029655206875874474, |
|
"loss": 1.6822, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0002964021587047771, |
|
"loss": 1.686, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00029625224865080946, |
|
"loss": 1.6344, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0002961023385968419, |
|
"loss": 1.6272, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00029595242854287424, |
|
"loss": 1.6296, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0002958025184889066, |
|
"loss": 1.6365, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.000295652608434939, |
|
"loss": 1.6434, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002955026983809714, |
|
"loss": 1.6387, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0002953527883270038, |
|
"loss": 1.6126, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00029520287827303616, |
|
"loss": 1.5874, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0002950529682190685, |
|
"loss": 1.6019, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00029490305816510094, |
|
"loss": 1.5916, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0002947531481111333, |
|
"loss": 1.607, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00029460323805716567, |
|
"loss": 1.5834, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00029445332800319803, |
|
"loss": 1.5662, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00029430341794923045, |
|
"loss": 1.5921, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.0002941535078952628, |
|
"loss": 1.5616, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00029400359784129517, |
|
"loss": 1.5467, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.0002938536877873276, |
|
"loss": 1.5701, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00029370377773335995, |
|
"loss": 1.5428, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00029355386767939237, |
|
"loss": 1.5373, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00029340395762542473, |
|
"loss": 1.5422, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0002932540475714571, |
|
"loss": 1.5579, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0002931041375174895, |
|
"loss": 1.5478, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00029295422746352187, |
|
"loss": 1.5409, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00029280431740955423, |
|
"loss": 1.5254, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.0002926544073555866, |
|
"loss": 1.5163, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.000292504497301619, |
|
"loss": 1.5214, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00029235458724765137, |
|
"loss": 1.5224, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00029220467719368373, |
|
"loss": 1.52, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00029205476713971615, |
|
"loss": 1.5289, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.0002919048570857485, |
|
"loss": 1.4892, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.00029175494703178093, |
|
"loss": 1.5054, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.0002916050369778133, |
|
"loss": 1.5075, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.0002914551269238457, |
|
"loss": 1.5178, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00029130521686987807, |
|
"loss": 1.4731, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00029115530681591043, |
|
"loss": 1.4926, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.0002910053967619428, |
|
"loss": 1.5006, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.0002908554867079752, |
|
"loss": 1.4902, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.0002907055766540076, |
|
"loss": 1.479, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00029055566660003994, |
|
"loss": 1.4597, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.0002904057565460723, |
|
"loss": 1.4347, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.0002902558464921047, |
|
"loss": 1.4184, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.0002901059364381371, |
|
"loss": 1.427, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.0002899560263841695, |
|
"loss": 1.416, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.00028980611633020186, |
|
"loss": 1.3993, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.00028965620627623427, |
|
"loss": 1.4331, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.00028950629622226663, |
|
"loss": 1.4232, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.000289356386168299, |
|
"loss": 1.4256, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.00028920647611433136, |
|
"loss": 1.4239, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.0002890565660603638, |
|
"loss": 1.4312, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.00028890665600639614, |
|
"loss": 1.438, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.0002887567459524285, |
|
"loss": 1.4126, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.00028860683589846086, |
|
"loss": 1.4097, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.0002884569258444933, |
|
"loss": 1.4009, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.00028830701579052564, |
|
"loss": 1.4133, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.00028815710573655806, |
|
"loss": 1.3991, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.0002880071956825904, |
|
"loss": 1.4189, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.00028785728562862284, |
|
"loss": 1.3955, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.0002877073755746552, |
|
"loss": 1.4144, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.00028755746552068756, |
|
"loss": 1.4202, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.0002874075554667199, |
|
"loss": 1.3879, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 0.00028725764541275234, |
|
"loss": 1.3883, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 0.0002871077353587847, |
|
"loss": 1.3962, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 0.00028695782530481706, |
|
"loss": 1.4029, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.00028680791525084943, |
|
"loss": 1.3723, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.00028665800519688184, |
|
"loss": 1.3893, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.0002865080951429142, |
|
"loss": 1.375, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 0.0002863581850889466, |
|
"loss": 1.3956, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 0.000286208275034979, |
|
"loss": 1.3975, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.0002860583649810114, |
|
"loss": 1.4012, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.00028590845492704376, |
|
"loss": 1.3918, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.0002857585448730761, |
|
"loss": 1.3892, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 0.0002856086348191085, |
|
"loss": 1.4061, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 0.0002854587247651409, |
|
"loss": 1.4248, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.00028530881471117327, |
|
"loss": 1.4064, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 0.00028515890465720563, |
|
"loss": 1.4046, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 0.000285008994603238, |
|
"loss": 1.3945, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 0.0002848590845492704, |
|
"loss": 1.3688, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 0.00028470917449530277, |
|
"loss": 1.413, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 0.0002845592644413352, |
|
"loss": 1.3868, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 0.00028440935438736755, |
|
"loss": 1.3916, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 0.00028425944433339997, |
|
"loss": 1.3999, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 0.00028410953427943233, |
|
"loss": 1.3678, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 0.0002839596242254647, |
|
"loss": 1.3921, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 0.0002838097141714971, |
|
"loss": 1.3744, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 0.00028365980411752947, |
|
"loss": 1.39, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 0.00028350989406356183, |
|
"loss": 1.3837, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 0.0002833599840095942, |
|
"loss": 1.3628, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 0.0002832100739556266, |
|
"loss": 1.3798, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 0.00028306016390165897, |
|
"loss": 1.3719, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 0.00028291025384769133, |
|
"loss": 1.3689, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 0.00028276034379372375, |
|
"loss": 1.393, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 0.0002826104337397561, |
|
"loss": 1.3628, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 0.00028246052368578853, |
|
"loss": 1.3739, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 0.0002823106136318209, |
|
"loss": 1.3624, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 0.00028216070357785325, |
|
"loss": 1.3475, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 0.00028201079352388567, |
|
"loss": 1.3492, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 0.00028186088346991803, |
|
"loss": 1.3637, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 0.0002817109734159504, |
|
"loss": 1.3515, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 0.00028156106336198276, |
|
"loss": 1.3759, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 0.0002814111533080152, |
|
"loss": 1.368, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 0.00028126124325404754, |
|
"loss": 1.3684, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 0.0002811113332000799, |
|
"loss": 1.3591, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 0.0002809614231461123, |
|
"loss": 1.3438, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 0.0002808115130921447, |
|
"loss": 1.3392, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 0.0002806616030381771, |
|
"loss": 1.3517, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 0.00028051169298420946, |
|
"loss": 1.3496, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 0.0002803617829302418, |
|
"loss": 1.2833, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 0.00028021187287627424, |
|
"loss": 1.3091, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 0.0002800619628223066, |
|
"loss": 1.2997, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 0.00027991205276833896, |
|
"loss": 1.2987, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 0.0002797621427143713, |
|
"loss": 1.2982, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 0.00027961223266040374, |
|
"loss": 1.2895, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 0.0002794623226064361, |
|
"loss": 1.3113, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 0.00027931241255246846, |
|
"loss": 1.3079, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 0.0002791625024985009, |
|
"loss": 1.3029, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 0.00027901259244453324, |
|
"loss": 1.3064, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 0.00027886268239056566, |
|
"loss": 1.2945, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 0.000278712772336598, |
|
"loss": 1.3019, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 0.0002785628622826304, |
|
"loss": 1.3151, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 0.0002784129522286628, |
|
"loss": 1.3004, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 0.00027826304217469516, |
|
"loss": 1.2726, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 0.0002781131321207275, |
|
"loss": 1.3012, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 0.0002779632220667599, |
|
"loss": 1.2726, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 0.0002778133120127923, |
|
"loss": 1.2873, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 0.00027766340195882467, |
|
"loss": 1.3153, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 0.0002775134919048571, |
|
"loss": 1.3091, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 0.00027736358185088944, |
|
"loss": 1.3058, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 0.0002772136717969218, |
|
"loss": 1.2905, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 0.0002770637617429542, |
|
"loss": 1.2976, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 0.0002769138516889866, |
|
"loss": 1.3113, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 0.00027676394163501895, |
|
"loss": 1.3111, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 0.00027661403158105136, |
|
"loss": 1.27, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 0.0002764641215270837, |
|
"loss": 1.2939, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 0.0002763142114731161, |
|
"loss": 1.3067, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 0.00027616430141914845, |
|
"loss": 1.2816, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 0.00027601439136518087, |
|
"loss": 1.2998, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 0.00027586448131121323, |
|
"loss": 1.2766, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 0.00027571457125724565, |
|
"loss": 1.2851, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 0.000275564661203278, |
|
"loss": 1.2945, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 0.00027541475114931037, |
|
"loss": 1.2823, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 0.0002752648410953428, |
|
"loss": 1.2745, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 0.00027511493104137515, |
|
"loss": 1.2625, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 0.00027496502098740757, |
|
"loss": 1.2775, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 0.00027481511093343993, |
|
"loss": 1.2713, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 0.0002746652008794723, |
|
"loss": 1.2873, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 0.00027451529082550465, |
|
"loss": 1.2731, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 0.00027436538077153707, |
|
"loss": 1.2847, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 0.00027421547071756943, |
|
"loss": 1.2885, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 0.0002740655606636018, |
|
"loss": 1.2725, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 0.0002739156506096342, |
|
"loss": 1.2936, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 0.00027376574055566657, |
|
"loss": 1.2674, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 0.000273615830501699, |
|
"loss": 1.2706, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 0.00027346592044773135, |
|
"loss": 1.2623, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 0.0002733160103937637, |
|
"loss": 1.2824, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 0.00027316610033979613, |
|
"loss": 1.2823, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 0.0002730161902858285, |
|
"loss": 1.2593, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 0.00027286628023186085, |
|
"loss": 1.2816, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 0.0002727163701778932, |
|
"loss": 1.2774, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 0.00027256646012392563, |
|
"loss": 1.2799, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 0.000272416550069958, |
|
"loss": 1.2415, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 0.00027226664001599036, |
|
"loss": 1.265, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 0.0002721167299620228, |
|
"loss": 1.2706, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 0.00027196681990805514, |
|
"loss": 1.2417, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 0.00027181690985408755, |
|
"loss": 1.271, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 0.0002716669998001199, |
|
"loss": 1.248, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 0.0002715170897461523, |
|
"loss": 1.2573, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 0.0002713671796921847, |
|
"loss": 1.2717, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 0.00027121726963821706, |
|
"loss": 1.2769, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 0.0002710673595842494, |
|
"loss": 1.2758, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 0.0002709174495302818, |
|
"loss": 1.2814, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 0.0002707675394763142, |
|
"loss": 1.262, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 0.00027061762942234656, |
|
"loss": 1.2618, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 0.0002704677193683789, |
|
"loss": 1.2202, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 0.00027031780931441134, |
|
"loss": 1.2024, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 0.0002701678992604437, |
|
"loss": 1.1845, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 0.0002700179892064761, |
|
"loss": 1.219, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 0.0002698680791525085, |
|
"loss": 1.1941, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 0.00026971816909854084, |
|
"loss": 1.2169, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 0.00026956825904457326, |
|
"loss": 1.2369, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 0.0002694183489906056, |
|
"loss": 1.2122, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 0.000269268438936638, |
|
"loss": 1.2156, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 0.00026911852888267035, |
|
"loss": 1.209, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 0.00026896861882870276, |
|
"loss": 1.2089, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 0.0002688187087747351, |
|
"loss": 1.2065, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 0.0002686687987207675, |
|
"loss": 1.2393, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 0.0002685188886667999, |
|
"loss": 1.212, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 0.00026836897861283227, |
|
"loss": 1.2137, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 0.0002682190685588647, |
|
"loss": 1.2214, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 0.00026806915850489704, |
|
"loss": 1.2086, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 0.0002679192484509294, |
|
"loss": 1.2069, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 0.0002677693383969618, |
|
"loss": 1.2314, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 0.0002676194283429942, |
|
"loss": 1.211, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 0.00026746951828902655, |
|
"loss": 1.257, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 0.00026731960823505896, |
|
"loss": 1.1988, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 0.0002671696981810913, |
|
"loss": 1.2205, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 0.0002670197881271237, |
|
"loss": 1.2193, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 0.00026686987807315605, |
|
"loss": 1.2047, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 0.00026671996801918847, |
|
"loss": 1.2171, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 0.00026657005796522083, |
|
"loss": 1.2197, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 0.00026642014791125325, |
|
"loss": 1.244, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 0.0002662702378572856, |
|
"loss": 1.2117, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 0.000266120327803318, |
|
"loss": 1.2069, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 0.0002659704177493504, |
|
"loss": 1.2264, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 0.00026582050769538275, |
|
"loss": 1.2018, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 0.0002656705976414151, |
|
"loss": 1.2016, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 0.00026552068758744753, |
|
"loss": 1.2417, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 0.0002653707775334799, |
|
"loss": 1.2086, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 0.00026522086747951225, |
|
"loss": 1.2082, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 0.0002650709574255446, |
|
"loss": 1.2135, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 0.00026492104737157703, |
|
"loss": 1.2326, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 0.0002647711373176094, |
|
"loss": 1.1959, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 0.0002646212272636418, |
|
"loss": 1.2416, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 0.0002644713172096742, |
|
"loss": 1.23, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 0.0002643214071557066, |
|
"loss": 1.2089, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 0.00026417149710173895, |
|
"loss": 1.2097, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 0.0002640215870477713, |
|
"loss": 1.1949, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 0.0002638716769938037, |
|
"loss": 1.2055, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 0.0002637217669398361, |
|
"loss": 1.2159, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 0.00026357185688586846, |
|
"loss": 1.2248, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 0.0002634219468319008, |
|
"loss": 1.2073, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 0.0002632720367779332, |
|
"loss": 1.1995, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 0.0002631221267239656, |
|
"loss": 1.2039, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 0.00026297221666999796, |
|
"loss": 1.1936, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 0.0002628223066160304, |
|
"loss": 1.2106, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 0.00026267239656206274, |
|
"loss": 1.2228, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 0.00026252248650809515, |
|
"loss": 1.219, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 0.0002623725764541275, |
|
"loss": 1.2075, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 0.0002622226664001599, |
|
"loss": 1.1894, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 0.00026207275634619224, |
|
"loss": 1.2125, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 0.00026192284629222466, |
|
"loss": 1.1979, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 0.000261772936238257, |
|
"loss": 1.1828, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 0.0002616230261842894, |
|
"loss": 1.1993, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 0.00026147311613032174, |
|
"loss": 1.2015, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 0.00026132320607635416, |
|
"loss": 1.1925, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 0.0002611732960223865, |
|
"loss": 1.2037, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 0.00026102338596841894, |
|
"loss": 1.2205, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 0.0002608734759144513, |
|
"loss": 1.1913, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 0.0002607235658604837, |
|
"loss": 1.2173, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 0.0002605736558065161, |
|
"loss": 1.1957, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 0.00026042374575254844, |
|
"loss": 1.1741, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 0.0002602738356985808, |
|
"loss": 1.1411, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 0.0002601239256446132, |
|
"loss": 1.1493, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 0.0002599740155906456, |
|
"loss": 1.133, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 0.00025982410553667795, |
|
"loss": 1.1409, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 0.00025967419548271036, |
|
"loss": 1.139, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 0.0002595242854287427, |
|
"loss": 1.1655, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 0.0002593743753747751, |
|
"loss": 1.1608, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 0.0002592244653208075, |
|
"loss": 1.1603, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 0.00025907455526683987, |
|
"loss": 1.1399, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 0.0002589246452128723, |
|
"loss": 1.1543, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 0.00025877473515890464, |
|
"loss": 1.144, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 0.000258624825104937, |
|
"loss": 1.1363, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 0.0002584749150509694, |
|
"loss": 1.1544, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 0.0002583250049970018, |
|
"loss": 1.169, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 0.00025817509494303415, |
|
"loss": 1.1481, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 0.0002580251848890665, |
|
"loss": 1.1531, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 0.0002578752748350989, |
|
"loss": 1.1625, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 0.0002577253647811313, |
|
"loss": 1.1779, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 0.00025757545472716365, |
|
"loss": 1.1435, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 0.00025742554467319607, |
|
"loss": 1.151, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 0.00025727563461922843, |
|
"loss": 1.1468, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 0.00025712572456526085, |
|
"loss": 1.1577, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 0.0002569758145112932, |
|
"loss": 1.1787, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 0.00025682590445732557, |
|
"loss": 1.1418, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 0.000256675994403358, |
|
"loss": 1.1805, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 0.00025652608434939035, |
|
"loss": 1.1471, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 0.0002563761742954227, |
|
"loss": 1.1518, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 0.0002562262642414551, |
|
"loss": 1.1567, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 0.0002560763541874875, |
|
"loss": 1.1673, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 0.00025592644413351985, |
|
"loss": 1.1741, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 0.0002557765340795522, |
|
"loss": 1.1622, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 0.00025562662402558463, |
|
"loss": 1.1753, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 0.000255476713971617, |
|
"loss": 1.1571, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 0.0002553268039176494, |
|
"loss": 1.1766, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 0.0002551768938636818, |
|
"loss": 1.1542, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 0.00025502698380971414, |
|
"loss": 1.1719, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 0.00025487707375574655, |
|
"loss": 1.1891, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 0.0002547271637017789, |
|
"loss": 1.1458, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 0.0002545772536478113, |
|
"loss": 1.1779, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 0.00025442734359384364, |
|
"loss": 1.1506, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 0.00025427743353987606, |
|
"loss": 1.1356, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 0.0002541275234859084, |
|
"loss": 1.1545, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 0.00025397761343194083, |
|
"loss": 1.1592, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 0.0002538277033779732, |
|
"loss": 1.1687, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 0.00025367779332400556, |
|
"loss": 1.165, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 0.000253527883270038, |
|
"loss": 1.1673, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 0.00025337797321607034, |
|
"loss": 1.1652, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 0.0002532280631621027, |
|
"loss": 1.1822, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 0.0002530781531081351, |
|
"loss": 1.1549, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 0.0002529282430541675, |
|
"loss": 1.1391, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 0.00025277833300019984, |
|
"loss": 1.1788, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 0.0002526284229462322, |
|
"loss": 1.1718, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 0.0002524785128922646, |
|
"loss": 1.1428, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 0.000252328602838297, |
|
"loss": 1.1427, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 0.0002521786927843294, |
|
"loss": 1.1297, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 0.00025202878273036176, |
|
"loss": 1.1884, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 0.0002518788726763941, |
|
"loss": 1.1633, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 0.00025172896262242654, |
|
"loss": 1.167, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 0.0002515790525684589, |
|
"loss": 1.1662, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 0.0002514291425144913, |
|
"loss": 1.167, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 0.0002512792324605237, |
|
"loss": 1.1626, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 0.00025112932240655604, |
|
"loss": 1.1482, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 0.0002509794123525884, |
|
"loss": 1.1431, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 0.0002508295022986208, |
|
"loss": 1.1669, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 0.0002506795922446532, |
|
"loss": 1.1443, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 0.00025052968219068555, |
|
"loss": 1.1483, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 0.00025037977213671796, |
|
"loss": 1.1247, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 0.0002502298620827503, |
|
"loss": 1.1053, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 0.00025007995202878274, |
|
"loss": 1.0971, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 0.0002499300419748151, |
|
"loss": 1.0936, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 0.00024978013192084747, |
|
"loss": 1.1065, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 0.0002496302218668799, |
|
"loss": 1.1023, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 0.00024948031181291225, |
|
"loss": 1.0882, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 0.0002493304017589446, |
|
"loss": 1.105, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 0.00024918049170497697, |
|
"loss": 1.0959, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 0.0002490305816510094, |
|
"loss": 1.1116, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 0.00024888067159704175, |
|
"loss": 1.1049, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 0.0002487307615430741, |
|
"loss": 1.085, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 0.00024858085148910653, |
|
"loss": 1.093, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 0.0002484309414351389, |
|
"loss": 1.1143, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 0.0002482810313811713, |
|
"loss": 1.101, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 0.00024813112132720367, |
|
"loss": 1.115, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 0.00024798121127323603, |
|
"loss": 1.096, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 0.00024783130121926845, |
|
"loss": 1.0904, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 0.0002476813911653008, |
|
"loss": 1.1095, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 0.00024753148111133317, |
|
"loss": 1.137, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 0.00024738157105736553, |
|
"loss": 1.1363, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 0.00024723166100339795, |
|
"loss": 1.1347, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 0.0002470817509494303, |
|
"loss": 1.0815, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 0.0002469318408954627, |
|
"loss": 1.1037, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 0.0002467819308414951, |
|
"loss": 1.1074, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 0.00024663202078752745, |
|
"loss": 1.1297, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 0.00024648211073355987, |
|
"loss": 1.1063, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 0.00024633220067959223, |
|
"loss": 1.1097, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 0.0002461822906256246, |
|
"loss": 1.1157, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 0.000246032380571657, |
|
"loss": 1.1281, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 0.0002458824705176894, |
|
"loss": 1.098, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 0.00024573256046372174, |
|
"loss": 1.1329, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 0.0002455826504097541, |
|
"loss": 1.1089, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 0.0002454327403557865, |
|
"loss": 1.105, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 0.0002452828303018189, |
|
"loss": 1.1242, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 0.00024513292024785124, |
|
"loss": 1.0894, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 0.00024498301019388366, |
|
"loss": 1.1279, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 0.000244833100139916, |
|
"loss": 1.1087, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 0.00024468319008594843, |
|
"loss": 1.1328, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 0.0002445332800319808, |
|
"loss": 1.1211, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 0.00024438336997801316, |
|
"loss": 1.1238, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 0.0002442334599240456, |
|
"loss": 1.1188, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 0.00024408354987007794, |
|
"loss": 1.1226, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 0.0002439336398161103, |
|
"loss": 1.0867, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 0.0002437837297621427, |
|
"loss": 1.1203, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 0.00024363381970817508, |
|
"loss": 1.1332, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 0.00024348390965420744, |
|
"loss": 1.1316, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 0.00024333399960023983, |
|
"loss": 1.1005, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 0.00024318408954627222, |
|
"loss": 1.1159, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 0.0002430341794923046, |
|
"loss": 1.1314, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 0.00024288426943833697, |
|
"loss": 1.1039, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 0.00024273435938436936, |
|
"loss": 1.1146, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 0.00024258444933040175, |
|
"loss": 1.124, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 0.0002424345392764341, |
|
"loss": 1.0976, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 0.0002422846292224665, |
|
"loss": 1.1254, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 0.00024213471916849886, |
|
"loss": 1.1296, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 0.00024198480911453128, |
|
"loss": 1.1354, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 0.00024183489906056364, |
|
"loss": 1.1201, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 0.00024168498900659603, |
|
"loss": 1.1199, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 0.0002415350789526284, |
|
"loss": 1.1305, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 0.00024138516889866078, |
|
"loss": 1.1289, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 0.00024123525884469317, |
|
"loss": 1.1193, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 0.00024108534879072554, |
|
"loss": 1.127, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 0.00024093543873675793, |
|
"loss": 1.1204, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 0.00024078552868279031, |
|
"loss": 1.1143, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 0.00024063561862882268, |
|
"loss": 1.1198, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 0.00024048570857485507, |
|
"loss": 1.1097, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 0.00024033579852088743, |
|
"loss": 1.0679, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 0.00024018588846691985, |
|
"loss": 1.0696, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 0.0002400359784129522, |
|
"loss": 1.0768, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 0.0002398860683589846, |
|
"loss": 1.0765, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 0.00023973615830501696, |
|
"loss": 1.0638, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 0.00023958624825104935, |
|
"loss": 1.0645, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 0.00023943633819708174, |
|
"loss": 1.077, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 0.0002392864281431141, |
|
"loss": 1.0588, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 0.0002391365180891465, |
|
"loss": 1.0927, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 0.00023898660803517888, |
|
"loss": 1.0748, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 0.00023883669798121127, |
|
"loss": 1.0775, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 0.00023868678792724363, |
|
"loss": 1.0952, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 0.000238536877873276, |
|
"loss": 1.0825, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 0.0002383869678193084, |
|
"loss": 1.0919, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 0.00023823705776534077, |
|
"loss": 1.08, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 0.00023808714771137316, |
|
"loss": 1.0646, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 0.00023793723765740552, |
|
"loss": 1.0872, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 0.00023778732760343794, |
|
"loss": 1.071, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 0.0002376374175494703, |
|
"loss": 1.0879, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 0.00023748750749550266, |
|
"loss": 1.1079, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 0.00023733759744153505, |
|
"loss": 1.069, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 0.00023718768738756744, |
|
"loss": 1.0817, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 0.00023703777733359983, |
|
"loss": 1.1054, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 0.0002368878672796322, |
|
"loss": 1.0796, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 0.00023673795722566456, |
|
"loss": 1.0909, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 0.00023658804717169697, |
|
"loss": 1.0552, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 0.00023643813711772934, |
|
"loss": 1.0737, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 0.00023628822706376173, |
|
"loss": 1.0872, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 0.0002361383170097941, |
|
"loss": 1.0796, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 0.0002359884069558265, |
|
"loss": 1.0838, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 0.00023583849690185887, |
|
"loss": 1.0849, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 0.00023568858684789123, |
|
"loss": 1.0451, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 0.00023553867679392362, |
|
"loss": 1.1126, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 0.000235388766739956, |
|
"loss": 1.0708, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 0.0002352388566859884, |
|
"loss": 1.0912, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 0.00023508894663202076, |
|
"loss": 1.0902, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 0.00023493903657805318, |
|
"loss": 1.0682, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 0.00023478912652408554, |
|
"loss": 1.0921, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 0.0002346392164701179, |
|
"loss": 1.0653, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 0.0002344893064161503, |
|
"loss": 1.1109, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 0.00023433939636218268, |
|
"loss": 1.0818, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 0.00023418948630821507, |
|
"loss": 1.0729, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 0.00023403957625424743, |
|
"loss": 1.08, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 0.0002338896662002798, |
|
"loss": 1.0888, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 0.0002337397561463122, |
|
"loss": 1.1045, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 0.00023358984609234457, |
|
"loss": 1.0762, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 0.00023343993603837696, |
|
"loss": 1.0806, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 0.00023329002598440932, |
|
"loss": 1.1107, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 0.00023314011593044174, |
|
"loss": 1.0842, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 0.0002329902058764741, |
|
"loss": 1.0825, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 0.00023284029582250646, |
|
"loss": 1.1027, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 0.00023269038576853885, |
|
"loss": 1.0718, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 0.00023254047571457124, |
|
"loss": 1.1151, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 0.00023239056566060363, |
|
"loss": 1.0906, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 0.000232240655606636, |
|
"loss": 1.1104, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 0.00023209074555266836, |
|
"loss": 1.0796, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 0.00023194083549870077, |
|
"loss": 1.0795, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 0.00023179092544473314, |
|
"loss": 1.0719, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 0.00023164101539076553, |
|
"loss": 1.073, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 0.0002314911053367979, |
|
"loss": 1.065, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 0.0002313411952828303, |
|
"loss": 1.1047, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 0.00023119128522886267, |
|
"loss": 1.0833, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 0.00023104137517489503, |
|
"loss": 1.0887, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 0.00023089146512092742, |
|
"loss": 1.0704, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 0.0002307415550669598, |
|
"loss": 1.0901, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 0.0002305916450129922, |
|
"loss": 1.074, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 0.00023044173495902456, |
|
"loss": 1.068, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 0.00023029182490505692, |
|
"loss": 1.0309, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 0.00023014191485108934, |
|
"loss": 1.0351, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 0.0002299920047971217, |
|
"loss": 1.0033, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 0.0002298420947431541, |
|
"loss": 1.0328, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 0.00022969218468918645, |
|
"loss": 1.0369, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 0.00022954227463521887, |
|
"loss": 1.0205, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 0.00022939236458125123, |
|
"loss": 1.0258, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 0.0002292424545272836, |
|
"loss": 1.042, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 0.00022909254447331598, |
|
"loss": 1.0626, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 0.00022894263441934837, |
|
"loss": 1.0357, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 0.00022879272436538076, |
|
"loss": 1.0716, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 0.00022864281431141312, |
|
"loss": 1.0499, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 0.00022849290425744549, |
|
"loss": 1.0401, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 0.0002283429942034779, |
|
"loss": 1.0554, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 0.00022819308414951027, |
|
"loss": 1.0459, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 0.00022804317409554265, |
|
"loss": 1.0593, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 0.00022789326404157502, |
|
"loss": 1.0419, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 0.00022774335398760743, |
|
"loss": 1.0745, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 0.0002275934439336398, |
|
"loss": 1.0466, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 0.00022744353387967216, |
|
"loss": 1.0444, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 0.00022729362382570457, |
|
"loss": 1.0562, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 0.00022714371377173694, |
|
"loss": 1.0787, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 0.00022699380371776933, |
|
"loss": 1.047, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 0.0002268438936638017, |
|
"loss": 1.0636, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 0.0002266939836098341, |
|
"loss": 1.0331, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 0.00022654407355586647, |
|
"loss": 1.055, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 0.00022639416350189883, |
|
"loss": 1.0523, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 0.00022624425344793122, |
|
"loss": 1.0567, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 0.0002260943433939636, |
|
"loss": 1.0808, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 0.000225944433339996, |
|
"loss": 1.0701, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 0.00022579452328602836, |
|
"loss": 1.0627, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 0.00022564461323206072, |
|
"loss": 1.0374, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 0.00022549470317809314, |
|
"loss": 1.052, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 0.0002253447931241255, |
|
"loss": 1.0602, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 0.0002251948830701579, |
|
"loss": 1.0607, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 0.00022504497301619025, |
|
"loss": 1.04, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 0.00022489506296222267, |
|
"loss": 1.0593, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 0.00022474515290825503, |
|
"loss": 1.0523, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 0.0002245952428542874, |
|
"loss": 1.039, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 0.00022444533280031978, |
|
"loss": 1.0631, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 0.00022429542274635217, |
|
"loss": 1.0646, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 0.00022414551269238456, |
|
"loss": 1.0412, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 0.00022399560263841692, |
|
"loss": 1.0458, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 0.0002238456925844493, |
|
"loss": 1.0551, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 0.0002236957825304817, |
|
"loss": 1.0807, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 0.00022354587247651407, |
|
"loss": 1.0642, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 0.00022339596242254645, |
|
"loss": 1.0652, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 0.00022324605236857882, |
|
"loss": 1.0507, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 0.00022309614231461123, |
|
"loss": 1.059, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 0.0002229462322606436, |
|
"loss": 1.0689, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 0.00022279632220667596, |
|
"loss": 1.0624, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 0.00022264641215270835, |
|
"loss": 1.0385, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 0.00022249650209874074, |
|
"loss": 1.0617, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 0.00022234659204477313, |
|
"loss": 1.0405, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 0.0002221966819908055, |
|
"loss": 1.0557, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 0.00022204677193683785, |
|
"loss": 1.0661, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 0.00022189686188287027, |
|
"loss": 1.0588, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 0.00022174695182890263, |
|
"loss": 1.0534, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 0.00022159704177493502, |
|
"loss": 1.0431, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 0.00022144713172096738, |
|
"loss": 1.0428, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 0.0002212972216669998, |
|
"loss": 1.0493, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 0.00022114731161303216, |
|
"loss": 1.0231, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 0.00022099740155906452, |
|
"loss": 1.0692, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 0.0002208474915050969, |
|
"loss": 1.0527, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 0.0002206975814511293, |
|
"loss": 1.0426, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 0.0002205476713971617, |
|
"loss": 1.0317, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 0.00022039776134319405, |
|
"loss": 1.0274, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 0.00022024785128922644, |
|
"loss": 1.0115, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 0.00022009794123525883, |
|
"loss": 1.0309, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 0.0002199480311812912, |
|
"loss": 0.9979, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 0.00021979812112732358, |
|
"loss": 1.0112, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 0.00021964821107335595, |
|
"loss": 0.9851, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 0.00021949830101938836, |
|
"loss": 1.0091, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 0.00021934839096542072, |
|
"loss": 1.0126, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 0.00021919848091145311, |
|
"loss": 0.9994, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 0.0002190485708574855, |
|
"loss": 1.0088, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 0.00021889866080351787, |
|
"loss": 1.0244, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 0.00021874875074955025, |
|
"loss": 1.0268, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 0.00021859884069558262, |
|
"loss": 1.0217, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 0.00021844893064161503, |
|
"loss": 1.0068, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 0.0002182990205876474, |
|
"loss": 1.0047, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 0.00021814911053367979, |
|
"loss": 0.9962, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 0.00021799920047971215, |
|
"loss": 1.0165, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 0.00021784929042574454, |
|
"loss": 1.028, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 0.00021769938037177693, |
|
"loss": 1.0038, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 0.0002175494703178093, |
|
"loss": 1.0408, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 0.00021739956026384168, |
|
"loss": 1.0173, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 0.00021724965020987407, |
|
"loss": 1.0205, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 0.00021709974015590643, |
|
"loss": 1.0113, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 0.00021694983010193882, |
|
"loss": 0.9955, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 0.00021679992004797118, |
|
"loss": 1.0178, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 0.0002166500099940036, |
|
"loss": 1.0103, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 0.00021650009994003596, |
|
"loss": 1.0114, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 0.00021635018988606835, |
|
"loss": 1.0205, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 0.0002162002798321007, |
|
"loss": 1.0345, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 0.0002160503697781331, |
|
"loss": 1.0241, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 0.0002159004597241655, |
|
"loss": 1.0025, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 0.00021575054967019785, |
|
"loss": 1.0134, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 0.00021560063961623024, |
|
"loss": 1.0285, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 0.00021545072956226263, |
|
"loss": 1.0104, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 0.00021530081950829502, |
|
"loss": 1.0321, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 0.00021515090945432738, |
|
"loss": 1.0247, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 0.00021500099940035975, |
|
"loss": 1.0604, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 0.00021485108934639216, |
|
"loss": 1.0477, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 0.00021470117929242452, |
|
"loss": 1.0212, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 0.00021455126923845691, |
|
"loss": 0.9966, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 0.00021440135918448928, |
|
"loss": 1.0401, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 0.0002142514491305217, |
|
"loss": 1.0141, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 0.00021410153907655405, |
|
"loss": 1.0218, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 0.00021395162902258642, |
|
"loss": 1.0181, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 0.0002138017189686188, |
|
"loss": 1.0322, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 0.0002136518089146512, |
|
"loss": 1.0203, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 0.00021350189886068359, |
|
"loss": 1.0237, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 0.00021335198880671595, |
|
"loss": 1.0193, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 0.0002132020787527483, |
|
"loss": 1.0192, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 0.00021305216869878073, |
|
"loss": 1.0402, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 0.0002129022586448131, |
|
"loss": 1.0278, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 0.00021275234859084548, |
|
"loss": 1.0245, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 0.00021260243853687784, |
|
"loss": 1.0114, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 0.00021245252848291026, |
|
"loss": 1.0531, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 0.00021230261842894262, |
|
"loss": 1.0417, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 0.00021215270837497498, |
|
"loss": 1.044, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 0.00021200279832100737, |
|
"loss": 1.042, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 0.00021185288826703976, |
|
"loss": 1.023, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 0.00021170297821307215, |
|
"loss": 1.0228, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 0.0002115530681591045, |
|
"loss": 1.0452, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 0.00021140315810513687, |
|
"loss": 1.0476, |
|
"step": 297500 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 0.0002112532480511693, |
|
"loss": 1.0348, |
|
"step": 298000 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 0.00021110333799720165, |
|
"loss": 1.0083, |
|
"step": 298500 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 0.00021095342794323404, |
|
"loss": 1.0312, |
|
"step": 299000 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 0.00021080351788926643, |
|
"loss": 1.0221, |
|
"step": 299500 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 0.00021065360783529882, |
|
"loss": 1.0182, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 0.00021050369778133118, |
|
"loss": 1.0241, |
|
"step": 300500 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 0.00021035378772736355, |
|
"loss": 0.9948, |
|
"step": 301000 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 0.00021020387767339596, |
|
"loss": 0.9737, |
|
"step": 301500 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 0.00021005396761942832, |
|
"loss": 0.974, |
|
"step": 302000 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 0.00020990405756546071, |
|
"loss": 0.9561, |
|
"step": 302500 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 0.00020975414751149308, |
|
"loss": 0.9664, |
|
"step": 303000 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 0.0002096042374575255, |
|
"loss": 0.9937, |
|
"step": 303500 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 0.00020945432740355786, |
|
"loss": 0.9755, |
|
"step": 304000 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 0.00020930441734959022, |
|
"loss": 0.9804, |
|
"step": 304500 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 0.0002091545072956226, |
|
"loss": 0.9867, |
|
"step": 305000 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 0.000209004597241655, |
|
"loss": 0.9938, |
|
"step": 305500 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 0.00020885468718768739, |
|
"loss": 0.9943, |
|
"step": 306000 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 0.00020870477713371975, |
|
"loss": 0.9824, |
|
"step": 306500 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 0.0002085548670797521, |
|
"loss": 0.9998, |
|
"step": 307000 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 0.00020840495702578453, |
|
"loss": 0.9841, |
|
"step": 307500 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 0.0002082550469718169, |
|
"loss": 0.9928, |
|
"step": 308000 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 0.00020810513691784928, |
|
"loss": 0.9668, |
|
"step": 308500 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 0.00020795522686388164, |
|
"loss": 0.9988, |
|
"step": 309000 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 0.00020780531680991406, |
|
"loss": 0.9962, |
|
"step": 309500 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 0.00020765540675594642, |
|
"loss": 0.9967, |
|
"step": 310000 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 0.00020750549670197878, |
|
"loss": 0.9969, |
|
"step": 310500 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 0.00020735558664801117, |
|
"loss": 0.9816, |
|
"step": 311000 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 0.00020720567659404356, |
|
"loss": 0.9854, |
|
"step": 311500 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 0.00020705576654007595, |
|
"loss": 0.9915, |
|
"step": 312000 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 0.0002069058564861083, |
|
"loss": 1.0018, |
|
"step": 312500 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 0.00020675594643214067, |
|
"loss": 0.9815, |
|
"step": 313000 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 0.0002066060363781731, |
|
"loss": 0.9979, |
|
"step": 313500 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 0.00020645612632420545, |
|
"loss": 1.0003, |
|
"step": 314000 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 0.00020630621627023784, |
|
"loss": 1.012, |
|
"step": 314500 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 0.0002061563062162702, |
|
"loss": 0.9768, |
|
"step": 315000 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 0.00020600639616230262, |
|
"loss": 1.025, |
|
"step": 315500 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 0.00020585648610833498, |
|
"loss": 0.9873, |
|
"step": 316000 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 0.00020570657605436735, |
|
"loss": 1.0034, |
|
"step": 316500 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 0.00020555666600039974, |
|
"loss": 1.0018, |
|
"step": 317000 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 0.00020540675594643212, |
|
"loss": 0.9662, |
|
"step": 317500 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"learning_rate": 0.00020525684589246451, |
|
"loss": 0.9955, |
|
"step": 318000 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 0.00020510693583849688, |
|
"loss": 0.9955, |
|
"step": 318500 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 0.00020495702578452924, |
|
"loss": 0.9984, |
|
"step": 319000 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 0.00020480711573056166, |
|
"loss": 0.9734, |
|
"step": 319500 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 0.00020465720567659402, |
|
"loss": 0.9852, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 0.0002045072956226264, |
|
"loss": 1.0189, |
|
"step": 320500 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 0.00020435738556865877, |
|
"loss": 1.0255, |
|
"step": 321000 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 0.00020420747551469119, |
|
"loss": 0.9947, |
|
"step": 321500 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 0.00020405756546072355, |
|
"loss": 0.9939, |
|
"step": 322000 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 0.0002039076554067559, |
|
"loss": 0.9875, |
|
"step": 322500 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 0.0002037577453527883, |
|
"loss": 1.0007, |
|
"step": 323000 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 0.0002036078352988207, |
|
"loss": 1.0034, |
|
"step": 323500 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 0.00020345792524485308, |
|
"loss": 1.0097, |
|
"step": 324000 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 0.00020330801519088544, |
|
"loss": 0.9848, |
|
"step": 324500 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 0.00020315810513691786, |
|
"loss": 1.0256, |
|
"step": 325000 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 0.00020300819508295022, |
|
"loss": 0.9808, |
|
"step": 325500 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 0.00020285828502898258, |
|
"loss": 1.0022, |
|
"step": 326000 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 0.00020270837497501497, |
|
"loss": 1.0201, |
|
"step": 326500 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 0.00020255846492104736, |
|
"loss": 0.959, |
|
"step": 327000 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 0.00020240855486707975, |
|
"loss": 0.9806, |
|
"step": 327500 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 0.0002022586448131121, |
|
"loss": 1.0115, |
|
"step": 328000 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 0.00020210873475914447, |
|
"loss": 0.9884, |
|
"step": 328500 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 0.0002019588247051769, |
|
"loss": 1.0163, |
|
"step": 329000 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 0.00020180891465120925, |
|
"loss": 1.011, |
|
"step": 329500 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 0.00020165900459724164, |
|
"loss": 1.0079, |
|
"step": 330000 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 0.000201509094543274, |
|
"loss": 1.0157, |
|
"step": 330500 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 0.00020135918448930642, |
|
"loss": 1.0011, |
|
"step": 331000 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 0.00020120927443533878, |
|
"loss": 0.9829, |
|
"step": 331500 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 0.00020105936438137115, |
|
"loss": 1.0082, |
|
"step": 332000 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 0.00020090945432740354, |
|
"loss": 0.9941, |
|
"step": 332500 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 0.00020075954427343593, |
|
"loss": 0.9964, |
|
"step": 333000 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 0.00020060963421946831, |
|
"loss": 1.0093, |
|
"step": 333500 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"learning_rate": 0.00020045972416550068, |
|
"loss": 1.0038, |
|
"step": 334000 |
|
}, |
|
{ |
|
"epoch": 10.01, |
|
"learning_rate": 0.00020030981411153304, |
|
"loss": 0.959, |
|
"step": 334500 |
|
}, |
|
{ |
|
"epoch": 10.02, |
|
"learning_rate": 0.00020015990405756546, |
|
"loss": 0.9419, |
|
"step": 335000 |
|
}, |
|
{ |
|
"epoch": 10.04, |
|
"learning_rate": 0.00020000999400359782, |
|
"loss": 0.9299, |
|
"step": 335500 |
|
}, |
|
{ |
|
"epoch": 10.05, |
|
"learning_rate": 0.0001998600839496302, |
|
"loss": 0.9677, |
|
"step": 336000 |
|
}, |
|
{ |
|
"epoch": 10.07, |
|
"learning_rate": 0.00019971017389566257, |
|
"loss": 0.9707, |
|
"step": 336500 |
|
}, |
|
{ |
|
"epoch": 10.08, |
|
"learning_rate": 0.00019956026384169499, |
|
"loss": 0.9857, |
|
"step": 337000 |
|
}, |
|
{ |
|
"epoch": 10.1, |
|
"learning_rate": 0.00019941035378772735, |
|
"loss": 0.9555, |
|
"step": 337500 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"learning_rate": 0.0001992604437337597, |
|
"loss": 0.9549, |
|
"step": 338000 |
|
}, |
|
{ |
|
"epoch": 10.13, |
|
"learning_rate": 0.0001991105336797921, |
|
"loss": 0.9652, |
|
"step": 338500 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 0.0001989606236258245, |
|
"loss": 0.9826, |
|
"step": 339000 |
|
}, |
|
{ |
|
"epoch": 10.16, |
|
"learning_rate": 0.00019881071357185688, |
|
"loss": 0.969, |
|
"step": 339500 |
|
}, |
|
{ |
|
"epoch": 10.17, |
|
"learning_rate": 0.00019866080351788924, |
|
"loss": 0.9575, |
|
"step": 340000 |
|
}, |
|
{ |
|
"epoch": 10.19, |
|
"learning_rate": 0.0001985108934639216, |
|
"loss": 0.9538, |
|
"step": 340500 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"learning_rate": 0.00019836098340995402, |
|
"loss": 0.93, |
|
"step": 341000 |
|
}, |
|
{ |
|
"epoch": 10.22, |
|
"learning_rate": 0.00019821107335598638, |
|
"loss": 0.959, |
|
"step": 341500 |
|
}, |
|
{ |
|
"epoch": 10.23, |
|
"learning_rate": 0.00019806116330201877, |
|
"loss": 0.9662, |
|
"step": 342000 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 0.00019791125324805113, |
|
"loss": 0.9498, |
|
"step": 342500 |
|
}, |
|
{ |
|
"epoch": 10.26, |
|
"learning_rate": 0.00019776134319408355, |
|
"loss": 0.9753, |
|
"step": 343000 |
|
}, |
|
{ |
|
"epoch": 10.28, |
|
"learning_rate": 0.0001976114331401159, |
|
"loss": 0.9582, |
|
"step": 343500 |
|
}, |
|
{ |
|
"epoch": 10.29, |
|
"learning_rate": 0.00019746152308614827, |
|
"loss": 0.9584, |
|
"step": 344000 |
|
}, |
|
{ |
|
"epoch": 10.31, |
|
"learning_rate": 0.00019731161303218066, |
|
"loss": 0.9476, |
|
"step": 344500 |
|
}, |
|
{ |
|
"epoch": 10.32, |
|
"learning_rate": 0.00019716170297821305, |
|
"loss": 0.9644, |
|
"step": 345000 |
|
}, |
|
{ |
|
"epoch": 10.34, |
|
"learning_rate": 0.00019701179292424544, |
|
"loss": 0.9705, |
|
"step": 345500 |
|
}, |
|
{ |
|
"epoch": 10.35, |
|
"learning_rate": 0.0001968618828702778, |
|
"loss": 0.9617, |
|
"step": 346000 |
|
}, |
|
{ |
|
"epoch": 10.37, |
|
"learning_rate": 0.0001967119728163102, |
|
"loss": 0.9613, |
|
"step": 346500 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"learning_rate": 0.00019656206276234258, |
|
"loss": 0.9387, |
|
"step": 347000 |
|
}, |
|
{ |
|
"epoch": 10.4, |
|
"learning_rate": 0.00019641215270837495, |
|
"loss": 0.9691, |
|
"step": 347500 |
|
}, |
|
{ |
|
"epoch": 10.41, |
|
"learning_rate": 0.00019626224265440734, |
|
"loss": 0.9984, |
|
"step": 348000 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"learning_rate": 0.0001961123326004397, |
|
"loss": 0.9879, |
|
"step": 348500 |
|
}, |
|
{ |
|
"epoch": 10.44, |
|
"learning_rate": 0.00019596242254647211, |
|
"loss": 0.9735, |
|
"step": 349000 |
|
}, |
|
{ |
|
"epoch": 10.46, |
|
"learning_rate": 0.00019581251249250448, |
|
"loss": 0.9573, |
|
"step": 349500 |
|
}, |
|
{ |
|
"epoch": 10.47, |
|
"learning_rate": 0.00019566260243853687, |
|
"loss": 0.9824, |
|
"step": 350000 |
|
}, |
|
{ |
|
"epoch": 10.49, |
|
"learning_rate": 0.00019551269238456923, |
|
"loss": 0.9736, |
|
"step": 350500 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 0.00019536278233060162, |
|
"loss": 0.9701, |
|
"step": 351000 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"learning_rate": 0.000195212872276634, |
|
"loss": 0.9647, |
|
"step": 351500 |
|
}, |
|
{ |
|
"epoch": 10.53, |
|
"learning_rate": 0.00019506296222266637, |
|
"loss": 0.9814, |
|
"step": 352000 |
|
}, |
|
{ |
|
"epoch": 10.55, |
|
"learning_rate": 0.00019491305216869879, |
|
"loss": 0.9543, |
|
"step": 352500 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"learning_rate": 0.00019476314211473115, |
|
"loss": 0.9871, |
|
"step": 353000 |
|
}, |
|
{ |
|
"epoch": 10.58, |
|
"learning_rate": 0.00019461323206076354, |
|
"loss": 0.9823, |
|
"step": 353500 |
|
}, |
|
{ |
|
"epoch": 10.59, |
|
"learning_rate": 0.0001944633220067959, |
|
"loss": 0.9663, |
|
"step": 354000 |
|
}, |
|
{ |
|
"epoch": 10.61, |
|
"learning_rate": 0.0001943134119528283, |
|
"loss": 1.0079, |
|
"step": 354500 |
|
}, |
|
{ |
|
"epoch": 10.62, |
|
"learning_rate": 0.00019416350189886068, |
|
"loss": 0.9627, |
|
"step": 355000 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"learning_rate": 0.00019401359184489304, |
|
"loss": 0.9991, |
|
"step": 355500 |
|
}, |
|
{ |
|
"epoch": 10.65, |
|
"learning_rate": 0.00019386368179092543, |
|
"loss": 0.9705, |
|
"step": 356000 |
|
}, |
|
{ |
|
"epoch": 10.67, |
|
"learning_rate": 0.00019371377173695782, |
|
"loss": 0.9432, |
|
"step": 356500 |
|
}, |
|
{ |
|
"epoch": 10.68, |
|
"learning_rate": 0.00019356386168299018, |
|
"loss": 0.9685, |
|
"step": 357000 |
|
}, |
|
{ |
|
"epoch": 10.7, |
|
"learning_rate": 0.00019341395162902257, |
|
"loss": 0.9643, |
|
"step": 357500 |
|
}, |
|
{ |
|
"epoch": 10.71, |
|
"learning_rate": 0.00019326404157505493, |
|
"loss": 0.9625, |
|
"step": 358000 |
|
}, |
|
{ |
|
"epoch": 10.73, |
|
"learning_rate": 0.00019311413152108735, |
|
"loss": 0.9732, |
|
"step": 358500 |
|
}, |
|
{ |
|
"epoch": 10.74, |
|
"learning_rate": 0.0001929642214671197, |
|
"loss": 0.9731, |
|
"step": 359000 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"learning_rate": 0.0001928143114131521, |
|
"loss": 0.9769, |
|
"step": 359500 |
|
}, |
|
{ |
|
"epoch": 10.77, |
|
"learning_rate": 0.00019266440135918446, |
|
"loss": 0.9679, |
|
"step": 360000 |
|
}, |
|
{ |
|
"epoch": 10.79, |
|
"learning_rate": 0.00019251449130521685, |
|
"loss": 0.9545, |
|
"step": 360500 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"learning_rate": 0.00019236458125124924, |
|
"loss": 0.9519, |
|
"step": 361000 |
|
}, |
|
{ |
|
"epoch": 10.82, |
|
"learning_rate": 0.0001922146711972816, |
|
"loss": 0.9708, |
|
"step": 361500 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"learning_rate": 0.000192064761143314, |
|
"loss": 0.9907, |
|
"step": 362000 |
|
}, |
|
{ |
|
"epoch": 10.85, |
|
"learning_rate": 0.00019191485108934638, |
|
"loss": 0.9822, |
|
"step": 362500 |
|
}, |
|
{ |
|
"epoch": 10.86, |
|
"learning_rate": 0.00019176494103537877, |
|
"loss": 0.9835, |
|
"step": 363000 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"learning_rate": 0.00019161503098141114, |
|
"loss": 0.9853, |
|
"step": 363500 |
|
}, |
|
{ |
|
"epoch": 10.89, |
|
"learning_rate": 0.0001914651209274435, |
|
"loss": 0.9657, |
|
"step": 364000 |
|
}, |
|
{ |
|
"epoch": 10.91, |
|
"learning_rate": 0.00019131521087347591, |
|
"loss": 0.9775, |
|
"step": 364500 |
|
}, |
|
{ |
|
"epoch": 10.92, |
|
"learning_rate": 0.00019116530081950828, |
|
"loss": 0.9758, |
|
"step": 365000 |
|
}, |
|
{ |
|
"epoch": 10.94, |
|
"learning_rate": 0.00019101539076554067, |
|
"loss": 0.9763, |
|
"step": 365500 |
|
}, |
|
{ |
|
"epoch": 10.95, |
|
"learning_rate": 0.00019086548071157303, |
|
"loss": 0.977, |
|
"step": 366000 |
|
}, |
|
{ |
|
"epoch": 10.97, |
|
"learning_rate": 0.00019071557065760545, |
|
"loss": 0.9699, |
|
"step": 366500 |
|
}, |
|
{ |
|
"epoch": 10.98, |
|
"learning_rate": 0.0001905656606036378, |
|
"loss": 0.9689, |
|
"step": 367000 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 0.00019041575054967017, |
|
"loss": 0.9726, |
|
"step": 367500 |
|
}, |
|
{ |
|
"epoch": 11.01, |
|
"learning_rate": 0.00019026584049570256, |
|
"loss": 0.9607, |
|
"step": 368000 |
|
}, |
|
{ |
|
"epoch": 11.03, |
|
"learning_rate": 0.00019011593044173495, |
|
"loss": 0.9214, |
|
"step": 368500 |
|
}, |
|
{ |
|
"epoch": 11.04, |
|
"learning_rate": 0.00018996602038776734, |
|
"loss": 0.9327, |
|
"step": 369000 |
|
}, |
|
{ |
|
"epoch": 11.06, |
|
"learning_rate": 0.0001898161103337997, |
|
"loss": 0.9466, |
|
"step": 369500 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"learning_rate": 0.00018966620027983206, |
|
"loss": 0.9361, |
|
"step": 370000 |
|
}, |
|
{ |
|
"epoch": 11.09, |
|
"learning_rate": 0.00018951629022586448, |
|
"loss": 0.9178, |
|
"step": 370500 |
|
}, |
|
{ |
|
"epoch": 11.1, |
|
"learning_rate": 0.00018936638017189684, |
|
"loss": 0.9375, |
|
"step": 371000 |
|
}, |
|
{ |
|
"epoch": 11.12, |
|
"learning_rate": 0.00018921647011792923, |
|
"loss": 0.9297, |
|
"step": 371500 |
|
}, |
|
{ |
|
"epoch": 11.13, |
|
"learning_rate": 0.0001890665600639616, |
|
"loss": 0.9306, |
|
"step": 372000 |
|
}, |
|
{ |
|
"epoch": 11.15, |
|
"learning_rate": 0.000188916650009994, |
|
"loss": 0.9331, |
|
"step": 372500 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"learning_rate": 0.00018876673995602637, |
|
"loss": 0.921, |
|
"step": 373000 |
|
}, |
|
{ |
|
"epoch": 11.18, |
|
"learning_rate": 0.00018861682990205873, |
|
"loss": 0.9353, |
|
"step": 373500 |
|
}, |
|
{ |
|
"epoch": 11.19, |
|
"learning_rate": 0.00018846691984809112, |
|
"loss": 0.9464, |
|
"step": 374000 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 0.0001883170097941235, |
|
"loss": 0.931, |
|
"step": 374500 |
|
}, |
|
{ |
|
"epoch": 11.22, |
|
"learning_rate": 0.0001881670997401559, |
|
"loss": 0.9276, |
|
"step": 375000 |
|
}, |
|
{ |
|
"epoch": 11.24, |
|
"learning_rate": 0.00018801718968618826, |
|
"loss": 0.9448, |
|
"step": 375500 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"learning_rate": 0.00018786727963222063, |
|
"loss": 0.9229, |
|
"step": 376000 |
|
}, |
|
{ |
|
"epoch": 11.27, |
|
"learning_rate": 0.00018771736957825304, |
|
"loss": 0.9417, |
|
"step": 376500 |
|
}, |
|
{ |
|
"epoch": 11.28, |
|
"learning_rate": 0.0001875674595242854, |
|
"loss": 0.9312, |
|
"step": 377000 |
|
}, |
|
{ |
|
"epoch": 11.3, |
|
"learning_rate": 0.0001874175494703178, |
|
"loss": 0.945, |
|
"step": 377500 |
|
}, |
|
{ |
|
"epoch": 11.31, |
|
"learning_rate": 0.00018726763941635016, |
|
"loss": 0.9455, |
|
"step": 378000 |
|
}, |
|
{ |
|
"epoch": 11.33, |
|
"learning_rate": 0.00018711772936238257, |
|
"loss": 0.9265, |
|
"step": 378500 |
|
}, |
|
{ |
|
"epoch": 11.34, |
|
"learning_rate": 0.00018696781930841494, |
|
"loss": 0.9421, |
|
"step": 379000 |
|
}, |
|
{ |
|
"epoch": 11.36, |
|
"learning_rate": 0.0001868179092544473, |
|
"loss": 0.9346, |
|
"step": 379500 |
|
}, |
|
{ |
|
"epoch": 11.37, |
|
"learning_rate": 0.00018666799920047971, |
|
"loss": 0.9236, |
|
"step": 380000 |
|
}, |
|
{ |
|
"epoch": 11.39, |
|
"learning_rate": 0.00018651808914651208, |
|
"loss": 0.9323, |
|
"step": 380500 |
|
}, |
|
{ |
|
"epoch": 11.4, |
|
"learning_rate": 0.00018636817909254447, |
|
"loss": 0.9259, |
|
"step": 381000 |
|
}, |
|
{ |
|
"epoch": 11.42, |
|
"learning_rate": 0.00018621826903857683, |
|
"loss": 0.9277, |
|
"step": 381500 |
|
}, |
|
{ |
|
"epoch": 11.43, |
|
"learning_rate": 0.00018606835898460925, |
|
"loss": 0.9224, |
|
"step": 382000 |
|
}, |
|
{ |
|
"epoch": 11.45, |
|
"learning_rate": 0.0001859184489306416, |
|
"loss": 0.9492, |
|
"step": 382500 |
|
}, |
|
{ |
|
"epoch": 11.46, |
|
"learning_rate": 0.00018576853887667397, |
|
"loss": 0.9672, |
|
"step": 383000 |
|
}, |
|
{ |
|
"epoch": 11.47, |
|
"learning_rate": 0.00018561862882270636, |
|
"loss": 0.9576, |
|
"step": 383500 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"learning_rate": 0.00018546871876873875, |
|
"loss": 0.9401, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"learning_rate": 0.00018531880871477114, |
|
"loss": 0.9426, |
|
"step": 384500 |
|
}, |
|
{ |
|
"epoch": 11.52, |
|
"learning_rate": 0.0001851688986608035, |
|
"loss": 0.9591, |
|
"step": 385000 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"learning_rate": 0.00018501898860683586, |
|
"loss": 0.9455, |
|
"step": 385500 |
|
}, |
|
{ |
|
"epoch": 11.55, |
|
"learning_rate": 0.00018486907855286828, |
|
"loss": 0.9456, |
|
"step": 386000 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"learning_rate": 0.00018471916849890064, |
|
"loss": 0.9433, |
|
"step": 386500 |
|
}, |
|
{ |
|
"epoch": 11.58, |
|
"learning_rate": 0.00018456925844493303, |
|
"loss": 0.9589, |
|
"step": 387000 |
|
}, |
|
{ |
|
"epoch": 11.59, |
|
"learning_rate": 0.0001844193483909654, |
|
"loss": 0.9247, |
|
"step": 387500 |
|
}, |
|
{ |
|
"epoch": 11.61, |
|
"learning_rate": 0.0001842694383369978, |
|
"loss": 0.9239, |
|
"step": 388000 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"learning_rate": 0.00018411952828303017, |
|
"loss": 0.9506, |
|
"step": 388500 |
|
}, |
|
{ |
|
"epoch": 11.64, |
|
"learning_rate": 0.00018396961822906253, |
|
"loss": 0.9336, |
|
"step": 389000 |
|
}, |
|
{ |
|
"epoch": 11.65, |
|
"learning_rate": 0.00018381970817509492, |
|
"loss": 0.9592, |
|
"step": 389500 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"learning_rate": 0.0001836697981211273, |
|
"loss": 0.9645, |
|
"step": 390000 |
|
}, |
|
{ |
|
"epoch": 11.68, |
|
"learning_rate": 0.0001835198880671597, |
|
"loss": 0.9602, |
|
"step": 390500 |
|
}, |
|
{ |
|
"epoch": 11.7, |
|
"learning_rate": 0.00018336997801319206, |
|
"loss": 0.9246, |
|
"step": 391000 |
|
}, |
|
{ |
|
"epoch": 11.71, |
|
"learning_rate": 0.00018322006795922443, |
|
"loss": 0.9641, |
|
"step": 391500 |
|
}, |
|
{ |
|
"epoch": 11.73, |
|
"learning_rate": 0.00018307015790525684, |
|
"loss": 0.9349, |
|
"step": 392000 |
|
}, |
|
{ |
|
"epoch": 11.74, |
|
"learning_rate": 0.0001829202478512892, |
|
"loss": 0.9634, |
|
"step": 392500 |
|
}, |
|
{ |
|
"epoch": 11.76, |
|
"learning_rate": 0.0001827703377973216, |
|
"loss": 0.9641, |
|
"step": 393000 |
|
}, |
|
{ |
|
"epoch": 11.77, |
|
"learning_rate": 0.00018262042774335396, |
|
"loss": 0.9404, |
|
"step": 393500 |
|
}, |
|
{ |
|
"epoch": 11.79, |
|
"learning_rate": 0.00018247051768938637, |
|
"loss": 0.9647, |
|
"step": 394000 |
|
}, |
|
{ |
|
"epoch": 11.8, |
|
"learning_rate": 0.00018232060763541874, |
|
"loss": 0.9416, |
|
"step": 394500 |
|
}, |
|
{ |
|
"epoch": 11.82, |
|
"learning_rate": 0.0001821706975814511, |
|
"loss": 0.9555, |
|
"step": 395000 |
|
}, |
|
{ |
|
"epoch": 11.83, |
|
"learning_rate": 0.0001820207875274835, |
|
"loss": 0.9487, |
|
"step": 395500 |
|
}, |
|
{ |
|
"epoch": 11.85, |
|
"learning_rate": 0.00018187087747351588, |
|
"loss": 0.9609, |
|
"step": 396000 |
|
}, |
|
{ |
|
"epoch": 11.86, |
|
"learning_rate": 0.00018172096741954827, |
|
"loss": 0.9481, |
|
"step": 396500 |
|
}, |
|
{ |
|
"epoch": 11.88, |
|
"learning_rate": 0.00018157105736558063, |
|
"loss": 0.9449, |
|
"step": 397000 |
|
}, |
|
{ |
|
"epoch": 11.89, |
|
"learning_rate": 0.000181421147311613, |
|
"loss": 0.9381, |
|
"step": 397500 |
|
}, |
|
{ |
|
"epoch": 11.91, |
|
"learning_rate": 0.0001812712372576454, |
|
"loss": 0.9726, |
|
"step": 398000 |
|
}, |
|
{ |
|
"epoch": 11.92, |
|
"learning_rate": 0.00018112132720367777, |
|
"loss": 0.9615, |
|
"step": 398500 |
|
}, |
|
{ |
|
"epoch": 11.94, |
|
"learning_rate": 0.00018097141714971016, |
|
"loss": 0.9523, |
|
"step": 399000 |
|
}, |
|
{ |
|
"epoch": 11.95, |
|
"learning_rate": 0.00018082150709574252, |
|
"loss": 0.9286, |
|
"step": 399500 |
|
}, |
|
{ |
|
"epoch": 11.97, |
|
"learning_rate": 0.00018067159704177494, |
|
"loss": 0.961, |
|
"step": 400000 |
|
}, |
|
{ |
|
"epoch": 11.98, |
|
"learning_rate": 0.0001805216869878073, |
|
"loss": 0.947, |
|
"step": 400500 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 0.00018037177693383966, |
|
"loss": 0.9594, |
|
"step": 401000 |
|
}, |
|
{ |
|
"epoch": 12.01, |
|
"learning_rate": 0.00018022186687987205, |
|
"loss": 0.9237, |
|
"step": 401500 |
|
}, |
|
{ |
|
"epoch": 12.03, |
|
"learning_rate": 0.00018007195682590444, |
|
"loss": 0.9109, |
|
"step": 402000 |
|
}, |
|
{ |
|
"epoch": 12.04, |
|
"learning_rate": 0.00017992204677193683, |
|
"loss": 0.9255, |
|
"step": 402500 |
|
}, |
|
{ |
|
"epoch": 12.06, |
|
"learning_rate": 0.0001797721367179692, |
|
"loss": 0.9117, |
|
"step": 403000 |
|
}, |
|
{ |
|
"epoch": 12.07, |
|
"learning_rate": 0.00017962222666400156, |
|
"loss": 0.9218, |
|
"step": 403500 |
|
}, |
|
{ |
|
"epoch": 12.09, |
|
"learning_rate": 0.00017947231661003397, |
|
"loss": 0.9003, |
|
"step": 404000 |
|
}, |
|
{ |
|
"epoch": 12.1, |
|
"learning_rate": 0.00017932240655606633, |
|
"loss": 0.9156, |
|
"step": 404500 |
|
}, |
|
{ |
|
"epoch": 12.12, |
|
"learning_rate": 0.00017917249650209872, |
|
"loss": 0.9, |
|
"step": 405000 |
|
}, |
|
{ |
|
"epoch": 12.13, |
|
"learning_rate": 0.0001790225864481311, |
|
"loss": 0.9066, |
|
"step": 405500 |
|
}, |
|
{ |
|
"epoch": 12.15, |
|
"learning_rate": 0.0001788726763941635, |
|
"loss": 0.895, |
|
"step": 406000 |
|
}, |
|
{ |
|
"epoch": 12.16, |
|
"learning_rate": 0.00017872276634019586, |
|
"loss": 0.9223, |
|
"step": 406500 |
|
}, |
|
{ |
|
"epoch": 12.18, |
|
"learning_rate": 0.00017857285628622823, |
|
"loss": 0.9268, |
|
"step": 407000 |
|
}, |
|
{ |
|
"epoch": 12.19, |
|
"learning_rate": 0.00017842294623226064, |
|
"loss": 0.9282, |
|
"step": 407500 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"learning_rate": 0.000178273036178293, |
|
"loss": 0.9169, |
|
"step": 408000 |
|
}, |
|
{ |
|
"epoch": 12.22, |
|
"learning_rate": 0.0001781231261243254, |
|
"loss": 0.9232, |
|
"step": 408500 |
|
}, |
|
{ |
|
"epoch": 12.24, |
|
"learning_rate": 0.00017797321607035776, |
|
"loss": 0.9058, |
|
"step": 409000 |
|
}, |
|
{ |
|
"epoch": 12.25, |
|
"learning_rate": 0.00017782330601639017, |
|
"loss": 0.905, |
|
"step": 409500 |
|
}, |
|
{ |
|
"epoch": 12.27, |
|
"learning_rate": 0.00017767339596242254, |
|
"loss": 0.9204, |
|
"step": 410000 |
|
}, |
|
{ |
|
"epoch": 12.28, |
|
"learning_rate": 0.0001775234859084549, |
|
"loss": 0.9418, |
|
"step": 410500 |
|
}, |
|
{ |
|
"epoch": 12.3, |
|
"learning_rate": 0.0001773735758544873, |
|
"loss": 0.8982, |
|
"step": 411000 |
|
}, |
|
{ |
|
"epoch": 12.31, |
|
"learning_rate": 0.00017722366580051968, |
|
"loss": 0.9224, |
|
"step": 411500 |
|
}, |
|
{ |
|
"epoch": 12.33, |
|
"learning_rate": 0.00017707375574655207, |
|
"loss": 0.9563, |
|
"step": 412000 |
|
}, |
|
{ |
|
"epoch": 12.34, |
|
"learning_rate": 0.00017692384569258443, |
|
"loss": 0.8992, |
|
"step": 412500 |
|
}, |
|
{ |
|
"epoch": 12.36, |
|
"learning_rate": 0.0001767739356386168, |
|
"loss": 0.9165, |
|
"step": 413000 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"learning_rate": 0.0001766240255846492, |
|
"loss": 0.9241, |
|
"step": 413500 |
|
}, |
|
{ |
|
"epoch": 12.39, |
|
"learning_rate": 0.00017647411553068157, |
|
"loss": 0.9174, |
|
"step": 414000 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"learning_rate": 0.00017632420547671396, |
|
"loss": 0.9213, |
|
"step": 414500 |
|
}, |
|
{ |
|
"epoch": 12.42, |
|
"learning_rate": 0.00017617429542274632, |
|
"loss": 0.9066, |
|
"step": 415000 |
|
}, |
|
{ |
|
"epoch": 12.43, |
|
"learning_rate": 0.00017602438536877874, |
|
"loss": 0.9126, |
|
"step": 415500 |
|
}, |
|
{ |
|
"epoch": 12.45, |
|
"learning_rate": 0.0001758744753148111, |
|
"loss": 0.891, |
|
"step": 416000 |
|
}, |
|
{ |
|
"epoch": 12.46, |
|
"learning_rate": 0.00017572456526084346, |
|
"loss": 0.9023, |
|
"step": 416500 |
|
}, |
|
{ |
|
"epoch": 12.48, |
|
"learning_rate": 0.00017557465520687585, |
|
"loss": 0.9307, |
|
"step": 417000 |
|
}, |
|
{ |
|
"epoch": 12.49, |
|
"learning_rate": 0.00017542474515290824, |
|
"loss": 0.9265, |
|
"step": 417500 |
|
}, |
|
{ |
|
"epoch": 12.51, |
|
"learning_rate": 0.00017527483509894063, |
|
"loss": 0.8943, |
|
"step": 418000 |
|
}, |
|
{ |
|
"epoch": 12.52, |
|
"learning_rate": 0.000175124925044973, |
|
"loss": 0.9179, |
|
"step": 418500 |
|
}, |
|
{ |
|
"epoch": 12.54, |
|
"learning_rate": 0.00017497501499100536, |
|
"loss": 0.9252, |
|
"step": 419000 |
|
}, |
|
{ |
|
"epoch": 12.55, |
|
"learning_rate": 0.00017482510493703777, |
|
"loss": 0.9232, |
|
"step": 419500 |
|
}, |
|
{ |
|
"epoch": 12.57, |
|
"learning_rate": 0.00017467519488307013, |
|
"loss": 0.9389, |
|
"step": 420000 |
|
}, |
|
{ |
|
"epoch": 12.58, |
|
"learning_rate": 0.00017452528482910252, |
|
"loss": 0.898, |
|
"step": 420500 |
|
}, |
|
{ |
|
"epoch": 12.6, |
|
"learning_rate": 0.00017437537477513489, |
|
"loss": 0.9148, |
|
"step": 421000 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"learning_rate": 0.0001742254647211673, |
|
"loss": 0.9147, |
|
"step": 421500 |
|
}, |
|
{ |
|
"epoch": 12.63, |
|
"learning_rate": 0.00017407555466719967, |
|
"loss": 0.8992, |
|
"step": 422000 |
|
}, |
|
{ |
|
"epoch": 12.64, |
|
"learning_rate": 0.00017392564461323203, |
|
"loss": 0.9361, |
|
"step": 422500 |
|
}, |
|
{ |
|
"epoch": 12.66, |
|
"learning_rate": 0.00017377573455926442, |
|
"loss": 0.9256, |
|
"step": 423000 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"learning_rate": 0.0001736258245052968, |
|
"loss": 0.9341, |
|
"step": 423500 |
|
}, |
|
{ |
|
"epoch": 12.69, |
|
"learning_rate": 0.0001734759144513292, |
|
"loss": 0.8993, |
|
"step": 424000 |
|
}, |
|
{ |
|
"epoch": 12.7, |
|
"learning_rate": 0.00017332600439736156, |
|
"loss": 0.9187, |
|
"step": 424500 |
|
}, |
|
{ |
|
"epoch": 12.72, |
|
"learning_rate": 0.00017317609434339395, |
|
"loss": 0.9244, |
|
"step": 425000 |
|
}, |
|
{ |
|
"epoch": 12.73, |
|
"learning_rate": 0.00017302618428942634, |
|
"loss": 0.9375, |
|
"step": 425500 |
|
}, |
|
{ |
|
"epoch": 12.75, |
|
"learning_rate": 0.0001728762742354587, |
|
"loss": 0.9166, |
|
"step": 426000 |
|
}, |
|
{ |
|
"epoch": 12.76, |
|
"learning_rate": 0.0001727263641814911, |
|
"loss": 0.9181, |
|
"step": 426500 |
|
}, |
|
{ |
|
"epoch": 12.78, |
|
"learning_rate": 0.00017257645412752345, |
|
"loss": 0.9124, |
|
"step": 427000 |
|
}, |
|
{ |
|
"epoch": 12.79, |
|
"learning_rate": 0.00017242654407355587, |
|
"loss": 0.9271, |
|
"step": 427500 |
|
}, |
|
{ |
|
"epoch": 12.81, |
|
"learning_rate": 0.00017227663401958823, |
|
"loss": 0.9301, |
|
"step": 428000 |
|
}, |
|
{ |
|
"epoch": 12.82, |
|
"learning_rate": 0.00017212672396562062, |
|
"loss": 0.9357, |
|
"step": 428500 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"learning_rate": 0.00017197681391165298, |
|
"loss": 0.9518, |
|
"step": 429000 |
|
}, |
|
{ |
|
"epoch": 12.85, |
|
"learning_rate": 0.00017182690385768537, |
|
"loss": 0.9481, |
|
"step": 429500 |
|
}, |
|
{ |
|
"epoch": 12.87, |
|
"learning_rate": 0.00017167699380371776, |
|
"loss": 0.9176, |
|
"step": 430000 |
|
}, |
|
{ |
|
"epoch": 12.88, |
|
"learning_rate": 0.00017152708374975012, |
|
"loss": 0.9442, |
|
"step": 430500 |
|
}, |
|
{ |
|
"epoch": 12.9, |
|
"learning_rate": 0.0001713771736957825, |
|
"loss": 0.901, |
|
"step": 431000 |
|
}, |
|
{ |
|
"epoch": 12.91, |
|
"learning_rate": 0.0001712272636418149, |
|
"loss": 0.9295, |
|
"step": 431500 |
|
}, |
|
{ |
|
"epoch": 12.93, |
|
"learning_rate": 0.00017107735358784726, |
|
"loss": 0.9319, |
|
"step": 432000 |
|
}, |
|
{ |
|
"epoch": 12.94, |
|
"learning_rate": 0.00017092744353387965, |
|
"loss": 0.9313, |
|
"step": 432500 |
|
}, |
|
{ |
|
"epoch": 12.96, |
|
"learning_rate": 0.00017077753347991204, |
|
"loss": 0.9186, |
|
"step": 433000 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"learning_rate": 0.00017062762342594443, |
|
"loss": 0.9097, |
|
"step": 433500 |
|
}, |
|
{ |
|
"epoch": 12.99, |
|
"learning_rate": 0.0001704777133719768, |
|
"loss": 0.9129, |
|
"step": 434000 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 0.00017032780331800918, |
|
"loss": 0.9398, |
|
"step": 434500 |
|
}, |
|
{ |
|
"epoch": 13.02, |
|
"learning_rate": 0.00017017789326404157, |
|
"loss": 0.8845, |
|
"step": 435000 |
|
}, |
|
{ |
|
"epoch": 13.03, |
|
"learning_rate": 0.00017002798321007393, |
|
"loss": 0.8695, |
|
"step": 435500 |
|
}, |
|
{ |
|
"epoch": 13.05, |
|
"learning_rate": 0.00016987807315610632, |
|
"loss": 0.8749, |
|
"step": 436000 |
|
}, |
|
{ |
|
"epoch": 13.06, |
|
"learning_rate": 0.00016972816310213869, |
|
"loss": 0.8873, |
|
"step": 436500 |
|
}, |
|
{ |
|
"epoch": 13.08, |
|
"learning_rate": 0.0001695782530481711, |
|
"loss": 0.8687, |
|
"step": 437000 |
|
}, |
|
{ |
|
"epoch": 13.09, |
|
"learning_rate": 0.00016942834299420347, |
|
"loss": 0.8894, |
|
"step": 437500 |
|
}, |
|
{ |
|
"epoch": 13.11, |
|
"learning_rate": 0.00016927843294023585, |
|
"loss": 0.8918, |
|
"step": 438000 |
|
}, |
|
{ |
|
"epoch": 13.12, |
|
"learning_rate": 0.00016912852288626822, |
|
"loss": 0.8928, |
|
"step": 438500 |
|
}, |
|
{ |
|
"epoch": 13.14, |
|
"learning_rate": 0.0001689786128323006, |
|
"loss": 0.8914, |
|
"step": 439000 |
|
}, |
|
{ |
|
"epoch": 13.15, |
|
"learning_rate": 0.000168828702778333, |
|
"loss": 0.8893, |
|
"step": 439500 |
|
}, |
|
{ |
|
"epoch": 13.17, |
|
"learning_rate": 0.00016867879272436536, |
|
"loss": 0.8915, |
|
"step": 440000 |
|
}, |
|
{ |
|
"epoch": 13.18, |
|
"learning_rate": 0.00016852888267039775, |
|
"loss": 0.9007, |
|
"step": 440500 |
|
}, |
|
{ |
|
"epoch": 13.2, |
|
"learning_rate": 0.00016837897261643014, |
|
"loss": 0.9053, |
|
"step": 441000 |
|
}, |
|
{ |
|
"epoch": 13.21, |
|
"learning_rate": 0.00016822906256246253, |
|
"loss": 0.8975, |
|
"step": 441500 |
|
}, |
|
{ |
|
"epoch": 13.23, |
|
"learning_rate": 0.0001680791525084949, |
|
"loss": 0.9001, |
|
"step": 442000 |
|
}, |
|
{ |
|
"epoch": 13.24, |
|
"learning_rate": 0.00016792924245452725, |
|
"loss": 0.897, |
|
"step": 442500 |
|
}, |
|
{ |
|
"epoch": 13.26, |
|
"learning_rate": 0.00016777933240055967, |
|
"loss": 0.8888, |
|
"step": 443000 |
|
}, |
|
{ |
|
"epoch": 13.27, |
|
"learning_rate": 0.00016762942234659203, |
|
"loss": 0.8957, |
|
"step": 443500 |
|
}, |
|
{ |
|
"epoch": 13.29, |
|
"learning_rate": 0.00016747951229262442, |
|
"loss": 0.9098, |
|
"step": 444000 |
|
}, |
|
{ |
|
"epoch": 13.3, |
|
"learning_rate": 0.00016732960223865678, |
|
"loss": 0.9032, |
|
"step": 444500 |
|
}, |
|
{ |
|
"epoch": 13.32, |
|
"learning_rate": 0.0001671796921846892, |
|
"loss": 0.9009, |
|
"step": 445000 |
|
}, |
|
{ |
|
"epoch": 13.33, |
|
"learning_rate": 0.00016702978213072156, |
|
"loss": 0.8848, |
|
"step": 445500 |
|
}, |
|
{ |
|
"epoch": 13.35, |
|
"learning_rate": 0.00016687987207675392, |
|
"loss": 0.8927, |
|
"step": 446000 |
|
}, |
|
{ |
|
"epoch": 13.36, |
|
"learning_rate": 0.0001667299620227863, |
|
"loss": 0.9166, |
|
"step": 446500 |
|
}, |
|
{ |
|
"epoch": 13.38, |
|
"learning_rate": 0.0001665800519688187, |
|
"loss": 0.8934, |
|
"step": 447000 |
|
}, |
|
{ |
|
"epoch": 13.39, |
|
"learning_rate": 0.0001664301419148511, |
|
"loss": 0.8561, |
|
"step": 447500 |
|
}, |
|
{ |
|
"epoch": 13.4, |
|
"learning_rate": 0.00016628023186088345, |
|
"loss": 0.8928, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 13.42, |
|
"learning_rate": 0.00016613032180691582, |
|
"loss": 0.8949, |
|
"step": 448500 |
|
}, |
|
{ |
|
"epoch": 13.43, |
|
"learning_rate": 0.00016598041175294823, |
|
"loss": 0.9076, |
|
"step": 449000 |
|
}, |
|
{ |
|
"epoch": 13.45, |
|
"learning_rate": 0.0001658305016989806, |
|
"loss": 0.8815, |
|
"step": 449500 |
|
}, |
|
{ |
|
"epoch": 13.46, |
|
"learning_rate": 0.00016568059164501298, |
|
"loss": 0.9033, |
|
"step": 450000 |
|
}, |
|
{ |
|
"epoch": 13.48, |
|
"learning_rate": 0.00016553068159104535, |
|
"loss": 0.9035, |
|
"step": 450500 |
|
}, |
|
{ |
|
"epoch": 13.49, |
|
"learning_rate": 0.00016538077153707776, |
|
"loss": 0.9178, |
|
"step": 451000 |
|
}, |
|
{ |
|
"epoch": 13.51, |
|
"learning_rate": 0.00016523086148311012, |
|
"loss": 0.8993, |
|
"step": 451500 |
|
}, |
|
{ |
|
"epoch": 13.52, |
|
"learning_rate": 0.0001650809514291425, |
|
"loss": 0.9015, |
|
"step": 452000 |
|
}, |
|
{ |
|
"epoch": 13.54, |
|
"learning_rate": 0.00016493104137517488, |
|
"loss": 0.9005, |
|
"step": 452500 |
|
}, |
|
{ |
|
"epoch": 13.55, |
|
"learning_rate": 0.00016478113132120727, |
|
"loss": 0.8866, |
|
"step": 453000 |
|
}, |
|
{ |
|
"epoch": 13.57, |
|
"learning_rate": 0.00016463122126723965, |
|
"loss": 0.8973, |
|
"step": 453500 |
|
}, |
|
{ |
|
"epoch": 13.58, |
|
"learning_rate": 0.00016448131121327202, |
|
"loss": 0.8787, |
|
"step": 454000 |
|
}, |
|
{ |
|
"epoch": 13.6, |
|
"learning_rate": 0.00016433140115930438, |
|
"loss": 0.8802, |
|
"step": 454500 |
|
}, |
|
{ |
|
"epoch": 13.61, |
|
"learning_rate": 0.0001641814911053368, |
|
"loss": 0.9065, |
|
"step": 455000 |
|
}, |
|
{ |
|
"epoch": 13.63, |
|
"learning_rate": 0.00016403158105136916, |
|
"loss": 0.9177, |
|
"step": 455500 |
|
}, |
|
{ |
|
"epoch": 13.64, |
|
"learning_rate": 0.00016388167099740155, |
|
"loss": 0.9184, |
|
"step": 456000 |
|
}, |
|
{ |
|
"epoch": 13.66, |
|
"learning_rate": 0.0001637317609434339, |
|
"loss": 0.9112, |
|
"step": 456500 |
|
}, |
|
{ |
|
"epoch": 13.67, |
|
"learning_rate": 0.00016358185088946633, |
|
"loss": 0.9059, |
|
"step": 457000 |
|
}, |
|
{ |
|
"epoch": 13.69, |
|
"learning_rate": 0.0001634319408354987, |
|
"loss": 0.9168, |
|
"step": 457500 |
|
}, |
|
{ |
|
"epoch": 13.7, |
|
"learning_rate": 0.00016328203078153105, |
|
"loss": 0.89, |
|
"step": 458000 |
|
}, |
|
{ |
|
"epoch": 13.72, |
|
"learning_rate": 0.00016313212072756344, |
|
"loss": 0.9066, |
|
"step": 458500 |
|
}, |
|
{ |
|
"epoch": 13.73, |
|
"learning_rate": 0.00016298221067359583, |
|
"loss": 0.9041, |
|
"step": 459000 |
|
}, |
|
{ |
|
"epoch": 13.75, |
|
"learning_rate": 0.00016283230061962822, |
|
"loss": 0.8919, |
|
"step": 459500 |
|
}, |
|
{ |
|
"epoch": 13.76, |
|
"learning_rate": 0.00016268239056566058, |
|
"loss": 0.8856, |
|
"step": 460000 |
|
}, |
|
{ |
|
"epoch": 13.78, |
|
"learning_rate": 0.000162532480511693, |
|
"loss": 0.8989, |
|
"step": 460500 |
|
}, |
|
{ |
|
"epoch": 13.79, |
|
"learning_rate": 0.00016238257045772536, |
|
"loss": 0.8832, |
|
"step": 461000 |
|
}, |
|
{ |
|
"epoch": 13.81, |
|
"learning_rate": 0.00016223266040375772, |
|
"loss": 0.8902, |
|
"step": 461500 |
|
}, |
|
{ |
|
"epoch": 13.82, |
|
"learning_rate": 0.0001620827503497901, |
|
"loss": 0.9124, |
|
"step": 462000 |
|
}, |
|
{ |
|
"epoch": 13.84, |
|
"learning_rate": 0.0001619328402958225, |
|
"loss": 0.9204, |
|
"step": 462500 |
|
}, |
|
{ |
|
"epoch": 13.85, |
|
"learning_rate": 0.0001617829302418549, |
|
"loss": 0.8828, |
|
"step": 463000 |
|
}, |
|
{ |
|
"epoch": 13.87, |
|
"learning_rate": 0.00016163302018788725, |
|
"loss": 0.909, |
|
"step": 463500 |
|
}, |
|
{ |
|
"epoch": 13.88, |
|
"learning_rate": 0.00016148311013391962, |
|
"loss": 0.8917, |
|
"step": 464000 |
|
}, |
|
{ |
|
"epoch": 13.9, |
|
"learning_rate": 0.00016133320007995203, |
|
"loss": 0.9148, |
|
"step": 464500 |
|
}, |
|
{ |
|
"epoch": 13.91, |
|
"learning_rate": 0.0001611832900259844, |
|
"loss": 0.8905, |
|
"step": 465000 |
|
}, |
|
{ |
|
"epoch": 13.93, |
|
"learning_rate": 0.00016103337997201678, |
|
"loss": 0.9057, |
|
"step": 465500 |
|
}, |
|
{ |
|
"epoch": 13.94, |
|
"learning_rate": 0.00016088346991804915, |
|
"loss": 0.9198, |
|
"step": 466000 |
|
}, |
|
{ |
|
"epoch": 13.96, |
|
"learning_rate": 0.00016073355986408156, |
|
"loss": 0.9132, |
|
"step": 466500 |
|
}, |
|
{ |
|
"epoch": 13.97, |
|
"learning_rate": 0.00016058364981011392, |
|
"loss": 0.9184, |
|
"step": 467000 |
|
}, |
|
{ |
|
"epoch": 13.99, |
|
"learning_rate": 0.0001604337397561463, |
|
"loss": 0.8966, |
|
"step": 467500 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 0.00016028382970217868, |
|
"loss": 0.8882, |
|
"step": 468000 |
|
}, |
|
{ |
|
"epoch": 14.02, |
|
"learning_rate": 0.00016013391964821107, |
|
"loss": 0.8713, |
|
"step": 468500 |
|
}, |
|
{ |
|
"epoch": 14.03, |
|
"learning_rate": 0.00015998400959424345, |
|
"loss": 0.8713, |
|
"step": 469000 |
|
}, |
|
{ |
|
"epoch": 14.05, |
|
"learning_rate": 0.00015983409954027582, |
|
"loss": 0.8868, |
|
"step": 469500 |
|
}, |
|
{ |
|
"epoch": 14.06, |
|
"learning_rate": 0.00015968418948630818, |
|
"loss": 0.8559, |
|
"step": 470000 |
|
}, |
|
{ |
|
"epoch": 14.08, |
|
"learning_rate": 0.0001595342794323406, |
|
"loss": 0.8652, |
|
"step": 470500 |
|
}, |
|
{ |
|
"epoch": 14.09, |
|
"learning_rate": 0.00015938436937837296, |
|
"loss": 0.8722, |
|
"step": 471000 |
|
}, |
|
{ |
|
"epoch": 14.11, |
|
"learning_rate": 0.00015923445932440535, |
|
"loss": 0.8768, |
|
"step": 471500 |
|
}, |
|
{ |
|
"epoch": 14.12, |
|
"learning_rate": 0.0001590845492704377, |
|
"loss": 0.8812, |
|
"step": 472000 |
|
}, |
|
{ |
|
"epoch": 14.14, |
|
"learning_rate": 0.00015893463921647013, |
|
"loss": 0.8712, |
|
"step": 472500 |
|
}, |
|
{ |
|
"epoch": 14.15, |
|
"learning_rate": 0.0001587847291625025, |
|
"loss": 0.8546, |
|
"step": 473000 |
|
}, |
|
{ |
|
"epoch": 14.17, |
|
"learning_rate": 0.00015863481910853485, |
|
"loss": 0.8618, |
|
"step": 473500 |
|
}, |
|
{ |
|
"epoch": 14.18, |
|
"learning_rate": 0.00015848490905456724, |
|
"loss": 0.8755, |
|
"step": 474000 |
|
}, |
|
{ |
|
"epoch": 14.2, |
|
"learning_rate": 0.00015833499900059963, |
|
"loss": 0.8632, |
|
"step": 474500 |
|
}, |
|
{ |
|
"epoch": 14.21, |
|
"learning_rate": 0.00015818508894663202, |
|
"loss": 0.8687, |
|
"step": 475000 |
|
}, |
|
{ |
|
"epoch": 14.23, |
|
"learning_rate": 0.00015803517889266438, |
|
"loss": 0.8771, |
|
"step": 475500 |
|
}, |
|
{ |
|
"epoch": 14.24, |
|
"learning_rate": 0.00015788526883869674, |
|
"loss": 0.8504, |
|
"step": 476000 |
|
}, |
|
{ |
|
"epoch": 14.26, |
|
"learning_rate": 0.00015773535878472916, |
|
"loss": 0.8639, |
|
"step": 476500 |
|
}, |
|
{ |
|
"epoch": 14.27, |
|
"learning_rate": 0.00015758544873076152, |
|
"loss": 0.8786, |
|
"step": 477000 |
|
}, |
|
{ |
|
"epoch": 14.29, |
|
"learning_rate": 0.0001574355386767939, |
|
"loss": 0.8752, |
|
"step": 477500 |
|
}, |
|
{ |
|
"epoch": 14.3, |
|
"learning_rate": 0.00015728562862282627, |
|
"loss": 0.8673, |
|
"step": 478000 |
|
}, |
|
{ |
|
"epoch": 14.32, |
|
"learning_rate": 0.0001571357185688587, |
|
"loss": 0.867, |
|
"step": 478500 |
|
}, |
|
{ |
|
"epoch": 14.33, |
|
"learning_rate": 0.00015698580851489105, |
|
"loss": 0.8565, |
|
"step": 479000 |
|
}, |
|
{ |
|
"epoch": 14.35, |
|
"learning_rate": 0.00015683589846092342, |
|
"loss": 0.8564, |
|
"step": 479500 |
|
}, |
|
{ |
|
"epoch": 14.36, |
|
"learning_rate": 0.0001566859884069558, |
|
"loss": 0.8573, |
|
"step": 480000 |
|
}, |
|
{ |
|
"epoch": 14.38, |
|
"learning_rate": 0.0001565360783529882, |
|
"loss": 0.8612, |
|
"step": 480500 |
|
}, |
|
{ |
|
"epoch": 14.39, |
|
"learning_rate": 0.00015638616829902058, |
|
"loss": 0.8686, |
|
"step": 481000 |
|
}, |
|
{ |
|
"epoch": 14.41, |
|
"learning_rate": 0.00015623625824505295, |
|
"loss": 0.8795, |
|
"step": 481500 |
|
}, |
|
{ |
|
"epoch": 14.42, |
|
"learning_rate": 0.0001560863481910853, |
|
"loss": 0.8617, |
|
"step": 482000 |
|
}, |
|
{ |
|
"epoch": 14.44, |
|
"learning_rate": 0.00015593643813711772, |
|
"loss": 0.8723, |
|
"step": 482500 |
|
}, |
|
{ |
|
"epoch": 14.45, |
|
"learning_rate": 0.0001557865280831501, |
|
"loss": 0.8706, |
|
"step": 483000 |
|
}, |
|
{ |
|
"epoch": 14.47, |
|
"learning_rate": 0.00015563661802918248, |
|
"loss": 0.8566, |
|
"step": 483500 |
|
}, |
|
{ |
|
"epoch": 14.48, |
|
"learning_rate": 0.00015548670797521484, |
|
"loss": 0.8658, |
|
"step": 484000 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"learning_rate": 0.00015533679792124726, |
|
"loss": 0.8949, |
|
"step": 484500 |
|
}, |
|
{ |
|
"epoch": 14.51, |
|
"learning_rate": 0.00015518688786727962, |
|
"loss": 0.8854, |
|
"step": 485000 |
|
}, |
|
{ |
|
"epoch": 14.53, |
|
"learning_rate": 0.00015503697781331198, |
|
"loss": 0.8628, |
|
"step": 485500 |
|
}, |
|
{ |
|
"epoch": 14.54, |
|
"learning_rate": 0.00015488706775934437, |
|
"loss": 0.869, |
|
"step": 486000 |
|
}, |
|
{ |
|
"epoch": 14.56, |
|
"learning_rate": 0.00015473715770537676, |
|
"loss": 0.8877, |
|
"step": 486500 |
|
}, |
|
{ |
|
"epoch": 14.57, |
|
"learning_rate": 0.00015458724765140915, |
|
"loss": 0.886, |
|
"step": 487000 |
|
}, |
|
{ |
|
"epoch": 14.59, |
|
"learning_rate": 0.0001544373375974415, |
|
"loss": 0.8834, |
|
"step": 487500 |
|
}, |
|
{ |
|
"epoch": 14.6, |
|
"learning_rate": 0.00015428742754347393, |
|
"loss": 0.8778, |
|
"step": 488000 |
|
}, |
|
{ |
|
"epoch": 14.62, |
|
"learning_rate": 0.0001541375174895063, |
|
"loss": 0.8602, |
|
"step": 488500 |
|
}, |
|
{ |
|
"epoch": 14.63, |
|
"learning_rate": 0.00015398760743553865, |
|
"loss": 0.8778, |
|
"step": 489000 |
|
}, |
|
{ |
|
"epoch": 14.65, |
|
"learning_rate": 0.00015383769738157104, |
|
"loss": 0.8854, |
|
"step": 489500 |
|
}, |
|
{ |
|
"epoch": 14.66, |
|
"learning_rate": 0.00015368778732760343, |
|
"loss": 0.8619, |
|
"step": 490000 |
|
}, |
|
{ |
|
"epoch": 14.68, |
|
"learning_rate": 0.00015353787727363582, |
|
"loss": 0.8669, |
|
"step": 490500 |
|
}, |
|
{ |
|
"epoch": 14.69, |
|
"learning_rate": 0.00015338796721966818, |
|
"loss": 0.8851, |
|
"step": 491000 |
|
}, |
|
{ |
|
"epoch": 14.71, |
|
"learning_rate": 0.00015323805716570054, |
|
"loss": 0.8724, |
|
"step": 491500 |
|
}, |
|
{ |
|
"epoch": 14.72, |
|
"learning_rate": 0.00015308814711173296, |
|
"loss": 0.8658, |
|
"step": 492000 |
|
}, |
|
{ |
|
"epoch": 14.74, |
|
"learning_rate": 0.00015293823705776532, |
|
"loss": 0.8839, |
|
"step": 492500 |
|
}, |
|
{ |
|
"epoch": 14.75, |
|
"learning_rate": 0.0001527883270037977, |
|
"loss": 0.8859, |
|
"step": 493000 |
|
}, |
|
{ |
|
"epoch": 14.77, |
|
"learning_rate": 0.00015263841694983007, |
|
"loss": 0.8813, |
|
"step": 493500 |
|
}, |
|
{ |
|
"epoch": 14.78, |
|
"learning_rate": 0.0001524885068958625, |
|
"loss": 0.8656, |
|
"step": 494000 |
|
}, |
|
{ |
|
"epoch": 14.8, |
|
"learning_rate": 0.00015233859684189485, |
|
"loss": 0.8635, |
|
"step": 494500 |
|
}, |
|
{ |
|
"epoch": 14.81, |
|
"learning_rate": 0.00015218868678792722, |
|
"loss": 0.8685, |
|
"step": 495000 |
|
}, |
|
{ |
|
"epoch": 14.83, |
|
"learning_rate": 0.0001520387767339596, |
|
"loss": 0.8987, |
|
"step": 495500 |
|
}, |
|
{ |
|
"epoch": 14.84, |
|
"learning_rate": 0.000151888866679992, |
|
"loss": 0.9011, |
|
"step": 496000 |
|
}, |
|
{ |
|
"epoch": 14.86, |
|
"learning_rate": 0.00015173895662602438, |
|
"loss": 0.8785, |
|
"step": 496500 |
|
}, |
|
{ |
|
"epoch": 14.87, |
|
"learning_rate": 0.00015158904657205675, |
|
"loss": 0.8568, |
|
"step": 497000 |
|
}, |
|
{ |
|
"epoch": 14.89, |
|
"learning_rate": 0.0001514391365180891, |
|
"loss": 0.8688, |
|
"step": 497500 |
|
}, |
|
{ |
|
"epoch": 14.9, |
|
"learning_rate": 0.00015128922646412152, |
|
"loss": 0.8804, |
|
"step": 498000 |
|
}, |
|
{ |
|
"epoch": 14.92, |
|
"learning_rate": 0.0001511393164101539, |
|
"loss": 0.8795, |
|
"step": 498500 |
|
}, |
|
{ |
|
"epoch": 14.93, |
|
"learning_rate": 0.00015098940635618628, |
|
"loss": 0.8786, |
|
"step": 499000 |
|
}, |
|
{ |
|
"epoch": 14.95, |
|
"learning_rate": 0.00015083949630221864, |
|
"loss": 0.8608, |
|
"step": 499500 |
|
}, |
|
{ |
|
"epoch": 14.96, |
|
"learning_rate": 0.00015068958624825106, |
|
"loss": 0.9066, |
|
"step": 500000 |
|
}, |
|
{ |
|
"epoch": 14.98, |
|
"learning_rate": 0.00015053967619428342, |
|
"loss": 0.8672, |
|
"step": 500500 |
|
}, |
|
{ |
|
"epoch": 14.99, |
|
"learning_rate": 0.00015038976614031578, |
|
"loss": 0.8646, |
|
"step": 501000 |
|
}, |
|
{ |
|
"epoch": 15.01, |
|
"learning_rate": 0.00015023985608634817, |
|
"loss": 0.8523, |
|
"step": 501500 |
|
}, |
|
{ |
|
"epoch": 15.02, |
|
"learning_rate": 0.00015008994603238056, |
|
"loss": 0.8298, |
|
"step": 502000 |
|
}, |
|
{ |
|
"epoch": 15.04, |
|
"learning_rate": 0.00014994003597841295, |
|
"loss": 0.8402, |
|
"step": 502500 |
|
}, |
|
{ |
|
"epoch": 15.05, |
|
"learning_rate": 0.0001497901259244453, |
|
"loss": 0.8216, |
|
"step": 503000 |
|
}, |
|
{ |
|
"epoch": 15.07, |
|
"learning_rate": 0.0001496402158704777, |
|
"loss": 0.8306, |
|
"step": 503500 |
|
}, |
|
{ |
|
"epoch": 15.08, |
|
"learning_rate": 0.00014949030581651006, |
|
"loss": 0.8117, |
|
"step": 504000 |
|
}, |
|
{ |
|
"epoch": 15.1, |
|
"learning_rate": 0.00014934039576254245, |
|
"loss": 0.8351, |
|
"step": 504500 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"learning_rate": 0.00014919048570857484, |
|
"loss": 0.8559, |
|
"step": 505000 |
|
}, |
|
{ |
|
"epoch": 15.13, |
|
"learning_rate": 0.00014904057565460723, |
|
"loss": 0.8573, |
|
"step": 505500 |
|
}, |
|
{ |
|
"epoch": 15.14, |
|
"learning_rate": 0.0001488906656006396, |
|
"loss": 0.8333, |
|
"step": 506000 |
|
}, |
|
{ |
|
"epoch": 15.16, |
|
"learning_rate": 0.00014874075554667198, |
|
"loss": 0.8369, |
|
"step": 506500 |
|
}, |
|
{ |
|
"epoch": 15.17, |
|
"learning_rate": 0.00014859084549270437, |
|
"loss": 0.8515, |
|
"step": 507000 |
|
}, |
|
{ |
|
"epoch": 15.19, |
|
"learning_rate": 0.00014844093543873673, |
|
"loss": 0.8362, |
|
"step": 507500 |
|
}, |
|
{ |
|
"epoch": 15.2, |
|
"learning_rate": 0.00014829102538476912, |
|
"loss": 0.854, |
|
"step": 508000 |
|
}, |
|
{ |
|
"epoch": 15.22, |
|
"learning_rate": 0.0001481411153308015, |
|
"loss": 0.8529, |
|
"step": 508500 |
|
}, |
|
{ |
|
"epoch": 15.23, |
|
"learning_rate": 0.00014799120527683387, |
|
"loss": 0.8332, |
|
"step": 509000 |
|
}, |
|
{ |
|
"epoch": 15.25, |
|
"learning_rate": 0.00014784129522286626, |
|
"loss": 0.8574, |
|
"step": 509500 |
|
}, |
|
{ |
|
"epoch": 15.26, |
|
"learning_rate": 0.00014769138516889865, |
|
"loss": 0.8549, |
|
"step": 510000 |
|
}, |
|
{ |
|
"epoch": 15.28, |
|
"learning_rate": 0.00014754147511493102, |
|
"loss": 0.8634, |
|
"step": 510500 |
|
}, |
|
{ |
|
"epoch": 15.29, |
|
"learning_rate": 0.0001473915650609634, |
|
"loss": 0.8262, |
|
"step": 511000 |
|
}, |
|
{ |
|
"epoch": 15.3, |
|
"learning_rate": 0.0001472416550069958, |
|
"loss": 0.8455, |
|
"step": 511500 |
|
}, |
|
{ |
|
"epoch": 15.32, |
|
"learning_rate": 0.00014709174495302816, |
|
"loss": 0.8336, |
|
"step": 512000 |
|
}, |
|
{ |
|
"epoch": 15.33, |
|
"learning_rate": 0.00014694183489906055, |
|
"loss": 0.8493, |
|
"step": 512500 |
|
}, |
|
{ |
|
"epoch": 15.35, |
|
"learning_rate": 0.00014679192484509294, |
|
"loss": 0.8368, |
|
"step": 513000 |
|
}, |
|
{ |
|
"epoch": 15.36, |
|
"learning_rate": 0.00014664201479112532, |
|
"loss": 0.8441, |
|
"step": 513500 |
|
}, |
|
{ |
|
"epoch": 15.38, |
|
"learning_rate": 0.0001464921047371577, |
|
"loss": 0.8549, |
|
"step": 514000 |
|
}, |
|
{ |
|
"epoch": 15.39, |
|
"learning_rate": 0.00014634219468319008, |
|
"loss": 0.8619, |
|
"step": 514500 |
|
}, |
|
{ |
|
"epoch": 15.41, |
|
"learning_rate": 0.00014619228462922247, |
|
"loss": 0.8299, |
|
"step": 515000 |
|
}, |
|
{ |
|
"epoch": 15.42, |
|
"learning_rate": 0.00014604237457525483, |
|
"loss": 0.8297, |
|
"step": 515500 |
|
}, |
|
{ |
|
"epoch": 15.44, |
|
"learning_rate": 0.00014589246452128722, |
|
"loss": 0.8415, |
|
"step": 516000 |
|
}, |
|
{ |
|
"epoch": 15.45, |
|
"learning_rate": 0.0001457425544673196, |
|
"loss": 0.8484, |
|
"step": 516500 |
|
}, |
|
{ |
|
"epoch": 15.47, |
|
"learning_rate": 0.000145592644413352, |
|
"loss": 0.8505, |
|
"step": 517000 |
|
}, |
|
{ |
|
"epoch": 15.48, |
|
"learning_rate": 0.00014544273435938436, |
|
"loss": 0.858, |
|
"step": 517500 |
|
}, |
|
{ |
|
"epoch": 15.5, |
|
"learning_rate": 0.00014529282430541675, |
|
"loss": 0.8429, |
|
"step": 518000 |
|
}, |
|
{ |
|
"epoch": 15.51, |
|
"learning_rate": 0.0001451429142514491, |
|
"loss": 0.8437, |
|
"step": 518500 |
|
}, |
|
{ |
|
"epoch": 15.53, |
|
"learning_rate": 0.0001449930041974815, |
|
"loss": 0.8411, |
|
"step": 519000 |
|
}, |
|
{ |
|
"epoch": 15.54, |
|
"learning_rate": 0.0001448430941435139, |
|
"loss": 0.8565, |
|
"step": 519500 |
|
}, |
|
{ |
|
"epoch": 15.56, |
|
"learning_rate": 0.00014469318408954628, |
|
"loss": 0.8379, |
|
"step": 520000 |
|
}, |
|
{ |
|
"epoch": 15.57, |
|
"learning_rate": 0.00014454327403557864, |
|
"loss": 0.8672, |
|
"step": 520500 |
|
}, |
|
{ |
|
"epoch": 15.59, |
|
"learning_rate": 0.00014439336398161103, |
|
"loss": 0.8565, |
|
"step": 521000 |
|
}, |
|
{ |
|
"epoch": 15.6, |
|
"learning_rate": 0.0001442434539276434, |
|
"loss": 0.8351, |
|
"step": 521500 |
|
}, |
|
{ |
|
"epoch": 15.62, |
|
"learning_rate": 0.00014409354387367578, |
|
"loss": 0.8663, |
|
"step": 522000 |
|
}, |
|
{ |
|
"epoch": 15.63, |
|
"learning_rate": 0.00014394363381970817, |
|
"loss": 0.8497, |
|
"step": 522500 |
|
}, |
|
{ |
|
"epoch": 15.65, |
|
"learning_rate": 0.00014379372376574056, |
|
"loss": 0.8472, |
|
"step": 523000 |
|
}, |
|
{ |
|
"epoch": 15.66, |
|
"learning_rate": 0.00014364381371177292, |
|
"loss": 0.8612, |
|
"step": 523500 |
|
}, |
|
{ |
|
"epoch": 15.68, |
|
"learning_rate": 0.0001434939036578053, |
|
"loss": 0.8223, |
|
"step": 524000 |
|
}, |
|
{ |
|
"epoch": 15.69, |
|
"learning_rate": 0.00014334399360383767, |
|
"loss": 0.8514, |
|
"step": 524500 |
|
}, |
|
{ |
|
"epoch": 15.71, |
|
"learning_rate": 0.00014319408354987006, |
|
"loss": 0.8906, |
|
"step": 525000 |
|
}, |
|
{ |
|
"epoch": 15.72, |
|
"learning_rate": 0.00014304417349590245, |
|
"loss": 0.8522, |
|
"step": 525500 |
|
}, |
|
{ |
|
"epoch": 15.74, |
|
"learning_rate": 0.00014289426344193484, |
|
"loss": 0.8439, |
|
"step": 526000 |
|
}, |
|
{ |
|
"epoch": 15.75, |
|
"learning_rate": 0.0001427443533879672, |
|
"loss": 0.8482, |
|
"step": 526500 |
|
}, |
|
{ |
|
"epoch": 15.77, |
|
"learning_rate": 0.0001425944433339996, |
|
"loss": 0.8656, |
|
"step": 527000 |
|
}, |
|
{ |
|
"epoch": 15.78, |
|
"learning_rate": 0.00014244453328003196, |
|
"loss": 0.8668, |
|
"step": 527500 |
|
}, |
|
{ |
|
"epoch": 15.8, |
|
"learning_rate": 0.00014229462322606435, |
|
"loss": 0.8606, |
|
"step": 528000 |
|
}, |
|
{ |
|
"epoch": 15.81, |
|
"learning_rate": 0.00014214471317209674, |
|
"loss": 0.8567, |
|
"step": 528500 |
|
}, |
|
{ |
|
"epoch": 15.83, |
|
"learning_rate": 0.00014199480311812913, |
|
"loss": 0.8632, |
|
"step": 529000 |
|
}, |
|
{ |
|
"epoch": 15.84, |
|
"learning_rate": 0.0001418448930641615, |
|
"loss": 0.8368, |
|
"step": 529500 |
|
}, |
|
{ |
|
"epoch": 15.86, |
|
"learning_rate": 0.00014169498301019388, |
|
"loss": 0.868, |
|
"step": 530000 |
|
}, |
|
{ |
|
"epoch": 15.87, |
|
"learning_rate": 0.00014154507295622624, |
|
"loss": 0.8833, |
|
"step": 530500 |
|
}, |
|
{ |
|
"epoch": 15.89, |
|
"learning_rate": 0.00014139516290225863, |
|
"loss": 0.8784, |
|
"step": 531000 |
|
}, |
|
{ |
|
"epoch": 15.9, |
|
"learning_rate": 0.00014124525284829102, |
|
"loss": 0.8749, |
|
"step": 531500 |
|
}, |
|
{ |
|
"epoch": 15.92, |
|
"learning_rate": 0.0001410953427943234, |
|
"loss": 0.8728, |
|
"step": 532000 |
|
}, |
|
{ |
|
"epoch": 15.93, |
|
"learning_rate": 0.00014094543274035577, |
|
"loss": 0.8622, |
|
"step": 532500 |
|
}, |
|
{ |
|
"epoch": 15.95, |
|
"learning_rate": 0.00014079552268638816, |
|
"loss": 0.8663, |
|
"step": 533000 |
|
}, |
|
{ |
|
"epoch": 15.96, |
|
"learning_rate": 0.00014064561263242052, |
|
"loss": 0.8635, |
|
"step": 533500 |
|
}, |
|
{ |
|
"epoch": 15.98, |
|
"learning_rate": 0.0001404957025784529, |
|
"loss": 0.8524, |
|
"step": 534000 |
|
}, |
|
{ |
|
"epoch": 15.99, |
|
"learning_rate": 0.0001403457925244853, |
|
"loss": 0.8512, |
|
"step": 534500 |
|
}, |
|
{ |
|
"epoch": 16.01, |
|
"learning_rate": 0.0001401958824705177, |
|
"loss": 0.8336, |
|
"step": 535000 |
|
}, |
|
{ |
|
"epoch": 16.02, |
|
"learning_rate": 0.00014004597241655005, |
|
"loss": 0.8164, |
|
"step": 535500 |
|
}, |
|
{ |
|
"epoch": 16.04, |
|
"learning_rate": 0.00013989606236258244, |
|
"loss": 0.8124, |
|
"step": 536000 |
|
}, |
|
{ |
|
"epoch": 16.05, |
|
"learning_rate": 0.0001397461523086148, |
|
"loss": 0.8113, |
|
"step": 536500 |
|
}, |
|
{ |
|
"epoch": 16.07, |
|
"learning_rate": 0.0001395962422546472, |
|
"loss": 0.8323, |
|
"step": 537000 |
|
}, |
|
{ |
|
"epoch": 16.08, |
|
"learning_rate": 0.00013944633220067958, |
|
"loss": 0.8541, |
|
"step": 537500 |
|
}, |
|
{ |
|
"epoch": 16.1, |
|
"learning_rate": 0.00013929642214671197, |
|
"loss": 0.8327, |
|
"step": 538000 |
|
}, |
|
{ |
|
"epoch": 16.11, |
|
"learning_rate": 0.00013914651209274433, |
|
"loss": 0.8329, |
|
"step": 538500 |
|
}, |
|
{ |
|
"epoch": 16.13, |
|
"learning_rate": 0.00013899660203877672, |
|
"loss": 0.8303, |
|
"step": 539000 |
|
}, |
|
{ |
|
"epoch": 16.14, |
|
"learning_rate": 0.00013884669198480909, |
|
"loss": 0.8462, |
|
"step": 539500 |
|
}, |
|
{ |
|
"epoch": 16.16, |
|
"learning_rate": 0.00013869678193084147, |
|
"loss": 0.8202, |
|
"step": 540000 |
|
}, |
|
{ |
|
"epoch": 16.17, |
|
"learning_rate": 0.00013854687187687386, |
|
"loss": 0.8453, |
|
"step": 540500 |
|
}, |
|
{ |
|
"epoch": 16.19, |
|
"learning_rate": 0.00013839696182290625, |
|
"loss": 0.8205, |
|
"step": 541000 |
|
}, |
|
{ |
|
"epoch": 16.2, |
|
"learning_rate": 0.00013824705176893864, |
|
"loss": 0.8206, |
|
"step": 541500 |
|
}, |
|
{ |
|
"epoch": 16.22, |
|
"learning_rate": 0.000138097141714971, |
|
"loss": 0.8158, |
|
"step": 542000 |
|
}, |
|
{ |
|
"epoch": 16.23, |
|
"learning_rate": 0.0001379472316610034, |
|
"loss": 0.847, |
|
"step": 542500 |
|
}, |
|
{ |
|
"epoch": 16.25, |
|
"learning_rate": 0.00013779732160703576, |
|
"loss": 0.8279, |
|
"step": 543000 |
|
}, |
|
{ |
|
"epoch": 16.26, |
|
"learning_rate": 0.00013764741155306815, |
|
"loss": 0.8234, |
|
"step": 543500 |
|
}, |
|
{ |
|
"epoch": 16.28, |
|
"learning_rate": 0.00013749750149910054, |
|
"loss": 0.8109, |
|
"step": 544000 |
|
}, |
|
{ |
|
"epoch": 16.29, |
|
"learning_rate": 0.00013734759144513293, |
|
"loss": 0.8215, |
|
"step": 544500 |
|
}, |
|
{ |
|
"epoch": 16.31, |
|
"learning_rate": 0.0001371976813911653, |
|
"loss": 0.8124, |
|
"step": 545000 |
|
}, |
|
{ |
|
"epoch": 16.32, |
|
"learning_rate": 0.00013704777133719768, |
|
"loss": 0.8266, |
|
"step": 545500 |
|
}, |
|
{ |
|
"epoch": 16.34, |
|
"learning_rate": 0.00013689786128323004, |
|
"loss": 0.8288, |
|
"step": 546000 |
|
}, |
|
{ |
|
"epoch": 16.35, |
|
"learning_rate": 0.00013674795122926243, |
|
"loss": 0.8218, |
|
"step": 546500 |
|
}, |
|
{ |
|
"epoch": 16.37, |
|
"learning_rate": 0.00013659804117529482, |
|
"loss": 0.8223, |
|
"step": 547000 |
|
}, |
|
{ |
|
"epoch": 16.38, |
|
"learning_rate": 0.0001364481311213272, |
|
"loss": 0.8328, |
|
"step": 547500 |
|
}, |
|
{ |
|
"epoch": 16.4, |
|
"learning_rate": 0.00013629822106735957, |
|
"loss": 0.8132, |
|
"step": 548000 |
|
}, |
|
{ |
|
"epoch": 16.41, |
|
"learning_rate": 0.00013614831101339196, |
|
"loss": 0.8351, |
|
"step": 548500 |
|
}, |
|
{ |
|
"epoch": 16.43, |
|
"learning_rate": 0.00013599840095942432, |
|
"loss": 0.8316, |
|
"step": 549000 |
|
}, |
|
{ |
|
"epoch": 16.44, |
|
"learning_rate": 0.0001358484909054567, |
|
"loss": 0.8163, |
|
"step": 549500 |
|
}, |
|
{ |
|
"epoch": 16.46, |
|
"learning_rate": 0.0001356985808514891, |
|
"loss": 0.8258, |
|
"step": 550000 |
|
}, |
|
{ |
|
"epoch": 16.47, |
|
"learning_rate": 0.0001355486707975215, |
|
"loss": 0.8321, |
|
"step": 550500 |
|
}, |
|
{ |
|
"epoch": 16.49, |
|
"learning_rate": 0.00013539876074355385, |
|
"loss": 0.8229, |
|
"step": 551000 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"learning_rate": 0.00013524885068958624, |
|
"loss": 0.8357, |
|
"step": 551500 |
|
}, |
|
{ |
|
"epoch": 16.52, |
|
"learning_rate": 0.0001350989406356186, |
|
"loss": 0.8641, |
|
"step": 552000 |
|
}, |
|
{ |
|
"epoch": 16.53, |
|
"learning_rate": 0.000134949030581651, |
|
"loss": 0.8429, |
|
"step": 552500 |
|
}, |
|
{ |
|
"epoch": 16.55, |
|
"learning_rate": 0.00013479912052768338, |
|
"loss": 0.807, |
|
"step": 553000 |
|
}, |
|
{ |
|
"epoch": 16.56, |
|
"learning_rate": 0.00013464921047371577, |
|
"loss": 0.8377, |
|
"step": 553500 |
|
}, |
|
{ |
|
"epoch": 16.58, |
|
"learning_rate": 0.00013449930041974813, |
|
"loss": 0.8206, |
|
"step": 554000 |
|
}, |
|
{ |
|
"epoch": 16.59, |
|
"learning_rate": 0.00013434939036578052, |
|
"loss": 0.8149, |
|
"step": 554500 |
|
}, |
|
{ |
|
"epoch": 16.61, |
|
"learning_rate": 0.00013419948031181289, |
|
"loss": 0.8204, |
|
"step": 555000 |
|
}, |
|
{ |
|
"epoch": 16.62, |
|
"learning_rate": 0.00013404957025784528, |
|
"loss": 0.8339, |
|
"step": 555500 |
|
}, |
|
{ |
|
"epoch": 16.64, |
|
"learning_rate": 0.00013389966020387766, |
|
"loss": 0.8129, |
|
"step": 556000 |
|
}, |
|
{ |
|
"epoch": 16.65, |
|
"learning_rate": 0.00013374975014991005, |
|
"loss": 0.8141, |
|
"step": 556500 |
|
}, |
|
{ |
|
"epoch": 16.67, |
|
"learning_rate": 0.00013359984009594242, |
|
"loss": 0.8182, |
|
"step": 557000 |
|
}, |
|
{ |
|
"epoch": 16.68, |
|
"learning_rate": 0.0001334499300419748, |
|
"loss": 0.8326, |
|
"step": 557500 |
|
}, |
|
{ |
|
"epoch": 16.7, |
|
"learning_rate": 0.00013330001998800717, |
|
"loss": 0.815, |
|
"step": 558000 |
|
}, |
|
{ |
|
"epoch": 16.71, |
|
"learning_rate": 0.00013315010993403956, |
|
"loss": 0.8301, |
|
"step": 558500 |
|
}, |
|
{ |
|
"epoch": 16.73, |
|
"learning_rate": 0.00013300019988007195, |
|
"loss": 0.8261, |
|
"step": 559000 |
|
}, |
|
{ |
|
"epoch": 16.74, |
|
"learning_rate": 0.00013285028982610434, |
|
"loss": 0.8431, |
|
"step": 559500 |
|
}, |
|
{ |
|
"epoch": 16.76, |
|
"learning_rate": 0.0001327003797721367, |
|
"loss": 0.8281, |
|
"step": 560000 |
|
}, |
|
{ |
|
"epoch": 16.77, |
|
"learning_rate": 0.0001325504697181691, |
|
"loss": 0.8453, |
|
"step": 560500 |
|
}, |
|
{ |
|
"epoch": 16.79, |
|
"learning_rate": 0.00013240055966420145, |
|
"loss": 0.8377, |
|
"step": 561000 |
|
}, |
|
{ |
|
"epoch": 16.8, |
|
"learning_rate": 0.00013225064961023384, |
|
"loss": 0.8218, |
|
"step": 561500 |
|
}, |
|
{ |
|
"epoch": 16.82, |
|
"learning_rate": 0.00013210073955626623, |
|
"loss": 0.83, |
|
"step": 562000 |
|
}, |
|
{ |
|
"epoch": 16.83, |
|
"learning_rate": 0.00013195082950229862, |
|
"loss": 0.8409, |
|
"step": 562500 |
|
}, |
|
{ |
|
"epoch": 16.85, |
|
"learning_rate": 0.00013180091944833098, |
|
"loss": 0.8288, |
|
"step": 563000 |
|
}, |
|
{ |
|
"epoch": 16.86, |
|
"learning_rate": 0.00013165100939436337, |
|
"loss": 0.8411, |
|
"step": 563500 |
|
}, |
|
{ |
|
"epoch": 16.88, |
|
"learning_rate": 0.00013150109934039573, |
|
"loss": 0.8228, |
|
"step": 564000 |
|
}, |
|
{ |
|
"epoch": 16.89, |
|
"learning_rate": 0.00013135118928642812, |
|
"loss": 0.8482, |
|
"step": 564500 |
|
}, |
|
{ |
|
"epoch": 16.91, |
|
"learning_rate": 0.0001312012792324605, |
|
"loss": 0.8383, |
|
"step": 565000 |
|
}, |
|
{ |
|
"epoch": 16.92, |
|
"learning_rate": 0.0001310513691784929, |
|
"loss": 0.8329, |
|
"step": 565500 |
|
}, |
|
{ |
|
"epoch": 16.94, |
|
"learning_rate": 0.00013090145912452526, |
|
"loss": 0.8434, |
|
"step": 566000 |
|
}, |
|
{ |
|
"epoch": 16.95, |
|
"learning_rate": 0.00013075154907055765, |
|
"loss": 0.8338, |
|
"step": 566500 |
|
}, |
|
{ |
|
"epoch": 16.97, |
|
"learning_rate": 0.00013060163901659004, |
|
"loss": 0.8355, |
|
"step": 567000 |
|
}, |
|
{ |
|
"epoch": 16.98, |
|
"learning_rate": 0.0001304517289626224, |
|
"loss": 0.8423, |
|
"step": 567500 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"learning_rate": 0.0001303018189086548, |
|
"loss": 0.8477, |
|
"step": 568000 |
|
}, |
|
{ |
|
"epoch": 17.01, |
|
"learning_rate": 0.00013015190885468718, |
|
"loss": 0.791, |
|
"step": 568500 |
|
}, |
|
{ |
|
"epoch": 17.03, |
|
"learning_rate": 0.00013000199880071957, |
|
"loss": 0.7844, |
|
"step": 569000 |
|
}, |
|
{ |
|
"epoch": 17.04, |
|
"learning_rate": 0.00012985208874675193, |
|
"loss": 0.837, |
|
"step": 569500 |
|
}, |
|
{ |
|
"epoch": 17.06, |
|
"learning_rate": 0.00012970217869278432, |
|
"loss": 0.7895, |
|
"step": 570000 |
|
}, |
|
{ |
|
"epoch": 17.07, |
|
"learning_rate": 0.00012955226863881669, |
|
"loss": 0.8245, |
|
"step": 570500 |
|
}, |
|
{ |
|
"epoch": 17.09, |
|
"learning_rate": 0.00012940235858484908, |
|
"loss": 0.8173, |
|
"step": 571000 |
|
}, |
|
{ |
|
"epoch": 17.1, |
|
"learning_rate": 0.00012925244853088146, |
|
"loss": 0.8003, |
|
"step": 571500 |
|
}, |
|
{ |
|
"epoch": 17.12, |
|
"learning_rate": 0.00012910253847691385, |
|
"loss": 0.8038, |
|
"step": 572000 |
|
}, |
|
{ |
|
"epoch": 17.13, |
|
"learning_rate": 0.00012895262842294622, |
|
"loss": 0.8083, |
|
"step": 572500 |
|
}, |
|
{ |
|
"epoch": 17.15, |
|
"learning_rate": 0.0001288027183689786, |
|
"loss": 0.8155, |
|
"step": 573000 |
|
}, |
|
{ |
|
"epoch": 17.16, |
|
"learning_rate": 0.00012865280831501097, |
|
"loss": 0.807, |
|
"step": 573500 |
|
}, |
|
{ |
|
"epoch": 17.18, |
|
"learning_rate": 0.00012850289826104336, |
|
"loss": 0.795, |
|
"step": 574000 |
|
}, |
|
{ |
|
"epoch": 17.19, |
|
"learning_rate": 0.00012835298820707575, |
|
"loss": 0.7878, |
|
"step": 574500 |
|
}, |
|
{ |
|
"epoch": 17.21, |
|
"learning_rate": 0.00012820307815310814, |
|
"loss": 0.8038, |
|
"step": 575000 |
|
}, |
|
{ |
|
"epoch": 17.22, |
|
"learning_rate": 0.0001280531680991405, |
|
"loss": 0.8051, |
|
"step": 575500 |
|
}, |
|
{ |
|
"epoch": 17.23, |
|
"learning_rate": 0.0001279032580451729, |
|
"loss": 0.8151, |
|
"step": 576000 |
|
}, |
|
{ |
|
"epoch": 17.25, |
|
"learning_rate": 0.00012775334799120525, |
|
"loss": 0.8015, |
|
"step": 576500 |
|
}, |
|
{ |
|
"epoch": 17.26, |
|
"learning_rate": 0.00012760343793723764, |
|
"loss": 0.8142, |
|
"step": 577000 |
|
}, |
|
{ |
|
"epoch": 17.28, |
|
"learning_rate": 0.00012745352788327003, |
|
"loss": 0.813, |
|
"step": 577500 |
|
}, |
|
{ |
|
"epoch": 17.29, |
|
"learning_rate": 0.00012730361782930242, |
|
"loss": 0.815, |
|
"step": 578000 |
|
}, |
|
{ |
|
"epoch": 17.31, |
|
"learning_rate": 0.00012715370777533478, |
|
"loss": 0.8185, |
|
"step": 578500 |
|
}, |
|
{ |
|
"epoch": 17.32, |
|
"learning_rate": 0.00012700379772136717, |
|
"loss": 0.8199, |
|
"step": 579000 |
|
}, |
|
{ |
|
"epoch": 17.34, |
|
"learning_rate": 0.00012685388766739953, |
|
"loss": 0.7992, |
|
"step": 579500 |
|
}, |
|
{ |
|
"epoch": 17.35, |
|
"learning_rate": 0.00012670397761343192, |
|
"loss": 0.819, |
|
"step": 580000 |
|
}, |
|
{ |
|
"epoch": 17.37, |
|
"learning_rate": 0.0001265540675594643, |
|
"loss": 0.8076, |
|
"step": 580500 |
|
}, |
|
{ |
|
"epoch": 17.38, |
|
"learning_rate": 0.0001264041575054967, |
|
"loss": 0.8015, |
|
"step": 581000 |
|
}, |
|
{ |
|
"epoch": 17.4, |
|
"learning_rate": 0.00012625424745152906, |
|
"loss": 0.7965, |
|
"step": 581500 |
|
}, |
|
{ |
|
"epoch": 17.41, |
|
"learning_rate": 0.00012610433739756145, |
|
"loss": 0.8002, |
|
"step": 582000 |
|
}, |
|
{ |
|
"epoch": 17.43, |
|
"learning_rate": 0.00012595442734359381, |
|
"loss": 0.8406, |
|
"step": 582500 |
|
}, |
|
{ |
|
"epoch": 17.44, |
|
"learning_rate": 0.0001258045172896262, |
|
"loss": 0.7868, |
|
"step": 583000 |
|
}, |
|
{ |
|
"epoch": 17.46, |
|
"learning_rate": 0.0001256546072356586, |
|
"loss": 0.8061, |
|
"step": 583500 |
|
}, |
|
{ |
|
"epoch": 17.47, |
|
"learning_rate": 0.00012550469718169098, |
|
"loss": 0.7948, |
|
"step": 584000 |
|
}, |
|
{ |
|
"epoch": 17.49, |
|
"learning_rate": 0.00012535478712772334, |
|
"loss": 0.8087, |
|
"step": 584500 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"learning_rate": 0.00012520487707375573, |
|
"loss": 0.7958, |
|
"step": 585000 |
|
}, |
|
{ |
|
"epoch": 17.52, |
|
"learning_rate": 0.00012505496701978812, |
|
"loss": 0.8001, |
|
"step": 585500 |
|
}, |
|
{ |
|
"epoch": 17.53, |
|
"learning_rate": 0.00012490505696582049, |
|
"loss": 0.8074, |
|
"step": 586000 |
|
}, |
|
{ |
|
"epoch": 17.55, |
|
"learning_rate": 0.00012475514691185288, |
|
"loss": 0.7942, |
|
"step": 586500 |
|
}, |
|
{ |
|
"epoch": 17.56, |
|
"learning_rate": 0.00012460523685788526, |
|
"loss": 0.8019, |
|
"step": 587000 |
|
}, |
|
{ |
|
"epoch": 17.58, |
|
"learning_rate": 0.00012445532680391763, |
|
"loss": 0.8053, |
|
"step": 587500 |
|
}, |
|
{ |
|
"epoch": 17.59, |
|
"learning_rate": 0.00012430541674995002, |
|
"loss": 0.7859, |
|
"step": 588000 |
|
}, |
|
{ |
|
"epoch": 17.61, |
|
"learning_rate": 0.0001241555066959824, |
|
"loss": 0.8108, |
|
"step": 588500 |
|
}, |
|
{ |
|
"epoch": 17.62, |
|
"learning_rate": 0.00012400559664201477, |
|
"loss": 0.7906, |
|
"step": 589000 |
|
}, |
|
{ |
|
"epoch": 17.64, |
|
"learning_rate": 0.00012385568658804716, |
|
"loss": 0.796, |
|
"step": 589500 |
|
}, |
|
{ |
|
"epoch": 17.65, |
|
"learning_rate": 0.00012370577653407955, |
|
"loss": 0.804, |
|
"step": 590000 |
|
}, |
|
{ |
|
"epoch": 17.67, |
|
"learning_rate": 0.0001235558664801119, |
|
"loss": 0.8052, |
|
"step": 590500 |
|
}, |
|
{ |
|
"epoch": 17.68, |
|
"learning_rate": 0.0001234059564261443, |
|
"loss": 0.8186, |
|
"step": 591000 |
|
}, |
|
{ |
|
"epoch": 17.7, |
|
"learning_rate": 0.0001232560463721767, |
|
"loss": 0.7831, |
|
"step": 591500 |
|
}, |
|
{ |
|
"epoch": 17.71, |
|
"learning_rate": 0.00012310613631820908, |
|
"loss": 0.8121, |
|
"step": 592000 |
|
}, |
|
{ |
|
"epoch": 17.73, |
|
"learning_rate": 0.00012295622626424144, |
|
"loss": 0.818, |
|
"step": 592500 |
|
}, |
|
{ |
|
"epoch": 17.74, |
|
"learning_rate": 0.00012280631621027383, |
|
"loss": 0.837, |
|
"step": 593000 |
|
}, |
|
{ |
|
"epoch": 17.76, |
|
"learning_rate": 0.0001226564061563062, |
|
"loss": 0.801, |
|
"step": 593500 |
|
}, |
|
{ |
|
"epoch": 17.77, |
|
"learning_rate": 0.00012250649610233858, |
|
"loss": 0.823, |
|
"step": 594000 |
|
}, |
|
{ |
|
"epoch": 17.79, |
|
"learning_rate": 0.00012235658604837097, |
|
"loss": 0.8078, |
|
"step": 594500 |
|
}, |
|
{ |
|
"epoch": 17.8, |
|
"learning_rate": 0.00012220667599440336, |
|
"loss": 0.8081, |
|
"step": 595000 |
|
}, |
|
{ |
|
"epoch": 17.82, |
|
"learning_rate": 0.00012205676594043574, |
|
"loss": 0.821, |
|
"step": 595500 |
|
}, |
|
{ |
|
"epoch": 17.83, |
|
"learning_rate": 0.00012190685588646811, |
|
"loss": 0.8113, |
|
"step": 596000 |
|
}, |
|
{ |
|
"epoch": 17.85, |
|
"learning_rate": 0.0001217569458325005, |
|
"loss": 0.8105, |
|
"step": 596500 |
|
}, |
|
{ |
|
"epoch": 17.86, |
|
"learning_rate": 0.00012160703577853286, |
|
"loss": 0.7872, |
|
"step": 597000 |
|
}, |
|
{ |
|
"epoch": 17.88, |
|
"learning_rate": 0.00012145712572456525, |
|
"loss": 0.823, |
|
"step": 597500 |
|
}, |
|
{ |
|
"epoch": 17.89, |
|
"learning_rate": 0.00012130721567059763, |
|
"loss": 0.8174, |
|
"step": 598000 |
|
}, |
|
{ |
|
"epoch": 17.91, |
|
"learning_rate": 0.00012115730561663002, |
|
"loss": 0.8317, |
|
"step": 598500 |
|
}, |
|
{ |
|
"epoch": 17.92, |
|
"learning_rate": 0.0001210073955626624, |
|
"loss": 0.812, |
|
"step": 599000 |
|
}, |
|
{ |
|
"epoch": 17.94, |
|
"learning_rate": 0.00012085748550869478, |
|
"loss": 0.8176, |
|
"step": 599500 |
|
}, |
|
{ |
|
"epoch": 17.95, |
|
"learning_rate": 0.00012070757545472715, |
|
"loss": 0.8172, |
|
"step": 600000 |
|
}, |
|
{ |
|
"epoch": 17.97, |
|
"learning_rate": 0.00012055766540075953, |
|
"loss": 0.8251, |
|
"step": 600500 |
|
}, |
|
{ |
|
"epoch": 17.98, |
|
"learning_rate": 0.00012040775534679191, |
|
"loss": 0.8163, |
|
"step": 601000 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 0.0001202578452928243, |
|
"loss": 0.8245, |
|
"step": 601500 |
|
}, |
|
{ |
|
"epoch": 18.01, |
|
"learning_rate": 0.00012010793523885668, |
|
"loss": 0.7855, |
|
"step": 602000 |
|
}, |
|
{ |
|
"epoch": 18.03, |
|
"learning_rate": 0.00011995802518488907, |
|
"loss": 0.7866, |
|
"step": 602500 |
|
}, |
|
{ |
|
"epoch": 18.04, |
|
"learning_rate": 0.00011980811513092143, |
|
"loss": 0.7867, |
|
"step": 603000 |
|
}, |
|
{ |
|
"epoch": 18.06, |
|
"learning_rate": 0.00011965820507695382, |
|
"loss": 0.774, |
|
"step": 603500 |
|
}, |
|
{ |
|
"epoch": 18.07, |
|
"learning_rate": 0.00011950829502298619, |
|
"loss": 0.7726, |
|
"step": 604000 |
|
}, |
|
{ |
|
"epoch": 18.09, |
|
"learning_rate": 0.00011935838496901858, |
|
"loss": 0.7776, |
|
"step": 604500 |
|
}, |
|
{ |
|
"epoch": 18.1, |
|
"learning_rate": 0.00011920847491505096, |
|
"loss": 0.7951, |
|
"step": 605000 |
|
}, |
|
{ |
|
"epoch": 18.12, |
|
"learning_rate": 0.00011905856486108335, |
|
"loss": 0.7891, |
|
"step": 605500 |
|
}, |
|
{ |
|
"epoch": 18.13, |
|
"learning_rate": 0.00011890865480711572, |
|
"loss": 0.7854, |
|
"step": 606000 |
|
}, |
|
{ |
|
"epoch": 18.15, |
|
"learning_rate": 0.0001187587447531481, |
|
"loss": 0.7797, |
|
"step": 606500 |
|
}, |
|
{ |
|
"epoch": 18.16, |
|
"learning_rate": 0.00011860883469918047, |
|
"loss": 0.7791, |
|
"step": 607000 |
|
}, |
|
{ |
|
"epoch": 18.18, |
|
"learning_rate": 0.00011845892464521286, |
|
"loss": 0.7851, |
|
"step": 607500 |
|
}, |
|
{ |
|
"epoch": 18.19, |
|
"learning_rate": 0.00011830901459124524, |
|
"loss": 0.7862, |
|
"step": 608000 |
|
}, |
|
{ |
|
"epoch": 18.21, |
|
"learning_rate": 0.00011815910453727763, |
|
"loss": 0.7797, |
|
"step": 608500 |
|
}, |
|
{ |
|
"epoch": 18.22, |
|
"learning_rate": 0.00011800919448331, |
|
"loss": 0.7657, |
|
"step": 609000 |
|
}, |
|
{ |
|
"epoch": 18.24, |
|
"learning_rate": 0.00011785928442934238, |
|
"loss": 0.7805, |
|
"step": 609500 |
|
}, |
|
{ |
|
"epoch": 18.25, |
|
"learning_rate": 0.00011770937437537476, |
|
"loss": 0.769, |
|
"step": 610000 |
|
}, |
|
{ |
|
"epoch": 18.27, |
|
"learning_rate": 0.00011755946432140715, |
|
"loss": 0.7749, |
|
"step": 610500 |
|
}, |
|
{ |
|
"epoch": 18.28, |
|
"learning_rate": 0.00011740955426743952, |
|
"loss": 0.7916, |
|
"step": 611000 |
|
}, |
|
{ |
|
"epoch": 18.3, |
|
"learning_rate": 0.00011725964421347191, |
|
"loss": 0.7865, |
|
"step": 611500 |
|
}, |
|
{ |
|
"epoch": 18.31, |
|
"learning_rate": 0.00011710973415950429, |
|
"loss": 0.7756, |
|
"step": 612000 |
|
}, |
|
{ |
|
"epoch": 18.33, |
|
"learning_rate": 0.00011695982410553668, |
|
"loss": 0.7845, |
|
"step": 612500 |
|
}, |
|
{ |
|
"epoch": 18.34, |
|
"learning_rate": 0.00011680991405156904, |
|
"loss": 0.7873, |
|
"step": 613000 |
|
}, |
|
{ |
|
"epoch": 18.36, |
|
"learning_rate": 0.00011666000399760143, |
|
"loss": 0.7715, |
|
"step": 613500 |
|
}, |
|
{ |
|
"epoch": 18.37, |
|
"learning_rate": 0.0001165100939436338, |
|
"loss": 0.8001, |
|
"step": 614000 |
|
}, |
|
{ |
|
"epoch": 18.39, |
|
"learning_rate": 0.0001163601838896662, |
|
"loss": 0.7767, |
|
"step": 614500 |
|
}, |
|
{ |
|
"epoch": 18.4, |
|
"learning_rate": 0.00011621027383569857, |
|
"loss": 0.77, |
|
"step": 615000 |
|
}, |
|
{ |
|
"epoch": 18.42, |
|
"learning_rate": 0.00011606036378173096, |
|
"loss": 0.7755, |
|
"step": 615500 |
|
}, |
|
{ |
|
"epoch": 18.43, |
|
"learning_rate": 0.00011591045372776332, |
|
"loss": 0.7775, |
|
"step": 616000 |
|
}, |
|
{ |
|
"epoch": 18.45, |
|
"learning_rate": 0.00011576054367379571, |
|
"loss": 0.782, |
|
"step": 616500 |
|
}, |
|
{ |
|
"epoch": 18.46, |
|
"learning_rate": 0.00011561063361982809, |
|
"loss": 0.7852, |
|
"step": 617000 |
|
}, |
|
{ |
|
"epoch": 18.48, |
|
"learning_rate": 0.00011546072356586048, |
|
"loss": 0.7937, |
|
"step": 617500 |
|
}, |
|
{ |
|
"epoch": 18.49, |
|
"learning_rate": 0.00011531081351189285, |
|
"loss": 0.7965, |
|
"step": 618000 |
|
}, |
|
{ |
|
"epoch": 18.51, |
|
"learning_rate": 0.00011516090345792524, |
|
"loss": 0.7754, |
|
"step": 618500 |
|
}, |
|
{ |
|
"epoch": 18.52, |
|
"learning_rate": 0.0001150109934039576, |
|
"loss": 0.7859, |
|
"step": 619000 |
|
}, |
|
{ |
|
"epoch": 18.54, |
|
"learning_rate": 0.00011486108334998999, |
|
"loss": 0.7706, |
|
"step": 619500 |
|
}, |
|
{ |
|
"epoch": 18.55, |
|
"learning_rate": 0.00011471117329602237, |
|
"loss": 0.7759, |
|
"step": 620000 |
|
}, |
|
{ |
|
"epoch": 18.57, |
|
"learning_rate": 0.00011456126324205476, |
|
"loss": 0.7903, |
|
"step": 620500 |
|
}, |
|
{ |
|
"epoch": 18.58, |
|
"learning_rate": 0.00011441135318808715, |
|
"loss": 0.7997, |
|
"step": 621000 |
|
}, |
|
{ |
|
"epoch": 18.6, |
|
"learning_rate": 0.00011426144313411952, |
|
"loss": 0.7801, |
|
"step": 621500 |
|
}, |
|
{ |
|
"epoch": 18.61, |
|
"learning_rate": 0.00011411153308015191, |
|
"loss": 0.7991, |
|
"step": 622000 |
|
}, |
|
{ |
|
"epoch": 18.63, |
|
"learning_rate": 0.00011396162302618427, |
|
"loss": 0.7887, |
|
"step": 622500 |
|
}, |
|
{ |
|
"epoch": 18.64, |
|
"learning_rate": 0.00011381171297221666, |
|
"loss": 0.7778, |
|
"step": 623000 |
|
}, |
|
{ |
|
"epoch": 18.66, |
|
"learning_rate": 0.00011366180291824904, |
|
"loss": 0.7711, |
|
"step": 623500 |
|
}, |
|
{ |
|
"epoch": 18.67, |
|
"learning_rate": 0.00011351189286428143, |
|
"loss": 0.7817, |
|
"step": 624000 |
|
}, |
|
{ |
|
"epoch": 18.69, |
|
"learning_rate": 0.0001133619828103138, |
|
"loss": 0.7875, |
|
"step": 624500 |
|
}, |
|
{ |
|
"epoch": 18.7, |
|
"learning_rate": 0.0001132120727563462, |
|
"loss": 0.7986, |
|
"step": 625000 |
|
}, |
|
{ |
|
"epoch": 18.72, |
|
"learning_rate": 0.00011306216270237856, |
|
"loss": 0.7796, |
|
"step": 625500 |
|
}, |
|
{ |
|
"epoch": 18.73, |
|
"learning_rate": 0.00011291225264841095, |
|
"loss": 0.7672, |
|
"step": 626000 |
|
}, |
|
{ |
|
"epoch": 18.75, |
|
"learning_rate": 0.00011276234259444332, |
|
"loss": 0.7805, |
|
"step": 626500 |
|
}, |
|
{ |
|
"epoch": 18.76, |
|
"learning_rate": 0.00011261243254047571, |
|
"loss": 0.793, |
|
"step": 627000 |
|
}, |
|
{ |
|
"epoch": 18.78, |
|
"learning_rate": 0.00011246252248650809, |
|
"loss": 0.7845, |
|
"step": 627500 |
|
}, |
|
{ |
|
"epoch": 18.79, |
|
"learning_rate": 0.00011231261243254048, |
|
"loss": 0.7806, |
|
"step": 628000 |
|
}, |
|
{ |
|
"epoch": 18.81, |
|
"learning_rate": 0.00011216270237857284, |
|
"loss": 0.7976, |
|
"step": 628500 |
|
}, |
|
{ |
|
"epoch": 18.82, |
|
"learning_rate": 0.00011201279232460523, |
|
"loss": 0.7803, |
|
"step": 629000 |
|
}, |
|
{ |
|
"epoch": 18.84, |
|
"learning_rate": 0.0001118628822706376, |
|
"loss": 0.7834, |
|
"step": 629500 |
|
}, |
|
{ |
|
"epoch": 18.85, |
|
"learning_rate": 0.00011171297221667, |
|
"loss": 0.813, |
|
"step": 630000 |
|
}, |
|
{ |
|
"epoch": 18.87, |
|
"learning_rate": 0.00011156306216270237, |
|
"loss": 0.7723, |
|
"step": 630500 |
|
}, |
|
{ |
|
"epoch": 18.88, |
|
"learning_rate": 0.00011141315210873476, |
|
"loss": 0.802, |
|
"step": 631000 |
|
}, |
|
{ |
|
"epoch": 18.9, |
|
"learning_rate": 0.00011126324205476712, |
|
"loss": 0.7794, |
|
"step": 631500 |
|
}, |
|
{ |
|
"epoch": 18.91, |
|
"learning_rate": 0.00011111333200079951, |
|
"loss": 0.7882, |
|
"step": 632000 |
|
}, |
|
{ |
|
"epoch": 18.93, |
|
"learning_rate": 0.00011096342194683189, |
|
"loss": 0.786, |
|
"step": 632500 |
|
}, |
|
{ |
|
"epoch": 18.94, |
|
"learning_rate": 0.00011081351189286428, |
|
"loss": 0.7925, |
|
"step": 633000 |
|
}, |
|
{ |
|
"epoch": 18.96, |
|
"learning_rate": 0.00011066360183889665, |
|
"loss": 0.7908, |
|
"step": 633500 |
|
}, |
|
{ |
|
"epoch": 18.97, |
|
"learning_rate": 0.00011051369178492904, |
|
"loss": 0.8137, |
|
"step": 634000 |
|
}, |
|
{ |
|
"epoch": 18.99, |
|
"learning_rate": 0.0001103637817309614, |
|
"loss": 0.7613, |
|
"step": 634500 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"learning_rate": 0.00011021387167699379, |
|
"loss": 0.7876, |
|
"step": 635000 |
|
}, |
|
{ |
|
"epoch": 19.02, |
|
"learning_rate": 0.00011006396162302617, |
|
"loss": 0.7526, |
|
"step": 635500 |
|
}, |
|
{ |
|
"epoch": 19.03, |
|
"learning_rate": 0.00010991405156905856, |
|
"loss": 0.7465, |
|
"step": 636000 |
|
}, |
|
{ |
|
"epoch": 19.05, |
|
"learning_rate": 0.00010976414151509093, |
|
"loss": 0.7527, |
|
"step": 636500 |
|
}, |
|
{ |
|
"epoch": 19.06, |
|
"learning_rate": 0.00010961423146112332, |
|
"loss": 0.7546, |
|
"step": 637000 |
|
}, |
|
{ |
|
"epoch": 19.08, |
|
"learning_rate": 0.00010946432140715569, |
|
"loss": 0.7488, |
|
"step": 637500 |
|
}, |
|
{ |
|
"epoch": 19.09, |
|
"learning_rate": 0.00010931441135318808, |
|
"loss": 0.7402, |
|
"step": 638000 |
|
}, |
|
{ |
|
"epoch": 19.11, |
|
"learning_rate": 0.00010916450129922045, |
|
"loss": 0.7522, |
|
"step": 638500 |
|
}, |
|
{ |
|
"epoch": 19.12, |
|
"learning_rate": 0.00010901459124525284, |
|
"loss": 0.7469, |
|
"step": 639000 |
|
}, |
|
{ |
|
"epoch": 19.13, |
|
"learning_rate": 0.00010886468119128522, |
|
"loss": 0.7611, |
|
"step": 639500 |
|
}, |
|
{ |
|
"epoch": 19.15, |
|
"learning_rate": 0.0001087147711373176, |
|
"loss": 0.7638, |
|
"step": 640000 |
|
}, |
|
{ |
|
"epoch": 19.16, |
|
"learning_rate": 0.00010856486108334997, |
|
"loss": 0.7631, |
|
"step": 640500 |
|
}, |
|
{ |
|
"epoch": 19.18, |
|
"learning_rate": 0.00010841495102938236, |
|
"loss": 0.7469, |
|
"step": 641000 |
|
}, |
|
{ |
|
"epoch": 19.19, |
|
"learning_rate": 0.00010826504097541473, |
|
"loss": 0.7798, |
|
"step": 641500 |
|
}, |
|
{ |
|
"epoch": 19.21, |
|
"learning_rate": 0.00010811513092144712, |
|
"loss": 0.7436, |
|
"step": 642000 |
|
}, |
|
{ |
|
"epoch": 19.22, |
|
"learning_rate": 0.0001079652208674795, |
|
"loss": 0.7482, |
|
"step": 642500 |
|
}, |
|
{ |
|
"epoch": 19.24, |
|
"learning_rate": 0.00010781531081351189, |
|
"loss": 0.7438, |
|
"step": 643000 |
|
}, |
|
{ |
|
"epoch": 19.25, |
|
"learning_rate": 0.00010766540075954426, |
|
"loss": 0.7723, |
|
"step": 643500 |
|
}, |
|
{ |
|
"epoch": 19.27, |
|
"learning_rate": 0.00010751549070557664, |
|
"loss": 0.7569, |
|
"step": 644000 |
|
}, |
|
{ |
|
"epoch": 19.28, |
|
"learning_rate": 0.00010736558065160902, |
|
"loss": 0.7418, |
|
"step": 644500 |
|
}, |
|
{ |
|
"epoch": 19.3, |
|
"learning_rate": 0.0001072156705976414, |
|
"loss": 0.777, |
|
"step": 645000 |
|
}, |
|
{ |
|
"epoch": 19.31, |
|
"learning_rate": 0.00010706576054367378, |
|
"loss": 0.777, |
|
"step": 645500 |
|
}, |
|
{ |
|
"epoch": 19.33, |
|
"learning_rate": 0.00010691585048970617, |
|
"loss": 0.7607, |
|
"step": 646000 |
|
}, |
|
{ |
|
"epoch": 19.34, |
|
"learning_rate": 0.00010676594043573855, |
|
"loss": 0.7646, |
|
"step": 646500 |
|
}, |
|
{ |
|
"epoch": 19.36, |
|
"learning_rate": 0.00010661603038177092, |
|
"loss": 0.7722, |
|
"step": 647000 |
|
}, |
|
{ |
|
"epoch": 19.37, |
|
"learning_rate": 0.0001064661203278033, |
|
"loss": 0.7728, |
|
"step": 647500 |
|
}, |
|
{ |
|
"epoch": 19.39, |
|
"learning_rate": 0.00010631621027383569, |
|
"loss": 0.7713, |
|
"step": 648000 |
|
}, |
|
{ |
|
"epoch": 19.4, |
|
"learning_rate": 0.00010616630021986808, |
|
"loss": 0.769, |
|
"step": 648500 |
|
}, |
|
{ |
|
"epoch": 19.42, |
|
"learning_rate": 0.00010601639016590045, |
|
"loss": 0.7701, |
|
"step": 649000 |
|
}, |
|
{ |
|
"epoch": 19.43, |
|
"learning_rate": 0.00010586648011193284, |
|
"loss": 0.7586, |
|
"step": 649500 |
|
}, |
|
{ |
|
"epoch": 19.45, |
|
"learning_rate": 0.00010571657005796522, |
|
"loss": 0.7718, |
|
"step": 650000 |
|
}, |
|
{ |
|
"epoch": 19.46, |
|
"learning_rate": 0.00010556666000399759, |
|
"loss": 0.7559, |
|
"step": 650500 |
|
}, |
|
{ |
|
"epoch": 19.48, |
|
"learning_rate": 0.00010541674995002997, |
|
"loss": 0.7584, |
|
"step": 651000 |
|
}, |
|
{ |
|
"epoch": 19.49, |
|
"learning_rate": 0.00010526683989606236, |
|
"loss": 0.7553, |
|
"step": 651500 |
|
}, |
|
{ |
|
"epoch": 19.51, |
|
"learning_rate": 0.00010511692984209473, |
|
"loss": 0.7665, |
|
"step": 652000 |
|
}, |
|
{ |
|
"epoch": 19.52, |
|
"learning_rate": 0.00010496701978812712, |
|
"loss": 0.7818, |
|
"step": 652500 |
|
}, |
|
{ |
|
"epoch": 19.54, |
|
"learning_rate": 0.0001048171097341595, |
|
"loss": 0.769, |
|
"step": 653000 |
|
}, |
|
{ |
|
"epoch": 19.55, |
|
"learning_rate": 0.00010466719968019189, |
|
"loss": 0.78, |
|
"step": 653500 |
|
}, |
|
{ |
|
"epoch": 19.57, |
|
"learning_rate": 0.00010451728962622425, |
|
"loss": 0.7654, |
|
"step": 654000 |
|
}, |
|
{ |
|
"epoch": 19.58, |
|
"learning_rate": 0.00010436737957225664, |
|
"loss": 0.7674, |
|
"step": 654500 |
|
}, |
|
{ |
|
"epoch": 19.6, |
|
"learning_rate": 0.00010421746951828902, |
|
"loss": 0.7649, |
|
"step": 655000 |
|
}, |
|
{ |
|
"epoch": 19.61, |
|
"learning_rate": 0.0001040675594643214, |
|
"loss": 0.7472, |
|
"step": 655500 |
|
}, |
|
{ |
|
"epoch": 19.63, |
|
"learning_rate": 0.00010391764941035378, |
|
"loss": 0.7706, |
|
"step": 656000 |
|
}, |
|
{ |
|
"epoch": 19.64, |
|
"learning_rate": 0.00010376773935638617, |
|
"loss": 0.7545, |
|
"step": 656500 |
|
}, |
|
{ |
|
"epoch": 19.66, |
|
"learning_rate": 0.00010361782930241853, |
|
"loss": 0.7544, |
|
"step": 657000 |
|
}, |
|
{ |
|
"epoch": 19.67, |
|
"learning_rate": 0.00010346791924845092, |
|
"loss": 0.7495, |
|
"step": 657500 |
|
}, |
|
{ |
|
"epoch": 19.69, |
|
"learning_rate": 0.0001033180091944833, |
|
"loss": 0.7769, |
|
"step": 658000 |
|
}, |
|
{ |
|
"epoch": 19.7, |
|
"learning_rate": 0.00010316809914051569, |
|
"loss": 0.7843, |
|
"step": 658500 |
|
}, |
|
{ |
|
"epoch": 19.72, |
|
"learning_rate": 0.00010301818908654806, |
|
"loss": 0.7679, |
|
"step": 659000 |
|
}, |
|
{ |
|
"epoch": 19.73, |
|
"learning_rate": 0.00010286827903258045, |
|
"loss": 0.7687, |
|
"step": 659500 |
|
}, |
|
{ |
|
"epoch": 19.75, |
|
"learning_rate": 0.00010271836897861282, |
|
"loss": 0.7832, |
|
"step": 660000 |
|
}, |
|
{ |
|
"epoch": 19.76, |
|
"learning_rate": 0.0001025684589246452, |
|
"loss": 0.778, |
|
"step": 660500 |
|
}, |
|
{ |
|
"epoch": 19.78, |
|
"learning_rate": 0.00010241854887067758, |
|
"loss": 0.77, |
|
"step": 661000 |
|
}, |
|
{ |
|
"epoch": 19.79, |
|
"learning_rate": 0.00010226863881670997, |
|
"loss": 0.7664, |
|
"step": 661500 |
|
}, |
|
{ |
|
"epoch": 19.81, |
|
"learning_rate": 0.00010211872876274235, |
|
"loss": 0.7876, |
|
"step": 662000 |
|
}, |
|
{ |
|
"epoch": 19.82, |
|
"learning_rate": 0.00010196881870877474, |
|
"loss": 0.7612, |
|
"step": 662500 |
|
}, |
|
{ |
|
"epoch": 19.84, |
|
"learning_rate": 0.0001018189086548071, |
|
"loss": 0.7588, |
|
"step": 663000 |
|
}, |
|
{ |
|
"epoch": 19.85, |
|
"learning_rate": 0.00010166899860083949, |
|
"loss": 0.7595, |
|
"step": 663500 |
|
}, |
|
{ |
|
"epoch": 19.87, |
|
"learning_rate": 0.00010151908854687186, |
|
"loss": 0.7599, |
|
"step": 664000 |
|
}, |
|
{ |
|
"epoch": 19.88, |
|
"learning_rate": 0.00010136917849290425, |
|
"loss": 0.7689, |
|
"step": 664500 |
|
}, |
|
{ |
|
"epoch": 19.9, |
|
"learning_rate": 0.00010121926843893663, |
|
"loss": 0.764, |
|
"step": 665000 |
|
}, |
|
{ |
|
"epoch": 19.91, |
|
"learning_rate": 0.00010106935838496902, |
|
"loss": 0.7808, |
|
"step": 665500 |
|
}, |
|
{ |
|
"epoch": 19.93, |
|
"learning_rate": 0.00010091944833100138, |
|
"loss": 0.7665, |
|
"step": 666000 |
|
}, |
|
{ |
|
"epoch": 19.94, |
|
"learning_rate": 0.00010076953827703377, |
|
"loss": 0.7719, |
|
"step": 666500 |
|
}, |
|
{ |
|
"epoch": 19.96, |
|
"learning_rate": 0.00010061962822306614, |
|
"loss": 0.7652, |
|
"step": 667000 |
|
}, |
|
{ |
|
"epoch": 19.97, |
|
"learning_rate": 0.00010046971816909853, |
|
"loss": 0.7793, |
|
"step": 667500 |
|
}, |
|
{ |
|
"epoch": 19.99, |
|
"learning_rate": 0.00010031980811513091, |
|
"loss": 0.7464, |
|
"step": 668000 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 0.0001001698980611633, |
|
"loss": 0.7796, |
|
"step": 668500 |
|
}, |
|
{ |
|
"epoch": 20.02, |
|
"learning_rate": 0.00010001998800719566, |
|
"loss": 0.724, |
|
"step": 669000 |
|
}, |
|
{ |
|
"epoch": 20.03, |
|
"learning_rate": 9.987007795322805e-05, |
|
"loss": 0.7415, |
|
"step": 669500 |
|
}, |
|
{ |
|
"epoch": 20.05, |
|
"learning_rate": 9.972016789926043e-05, |
|
"loss": 0.7282, |
|
"step": 670000 |
|
}, |
|
{ |
|
"epoch": 20.06, |
|
"learning_rate": 9.957025784529282e-05, |
|
"loss": 0.7349, |
|
"step": 670500 |
|
}, |
|
{ |
|
"epoch": 20.08, |
|
"learning_rate": 9.942034779132519e-05, |
|
"loss": 0.7546, |
|
"step": 671000 |
|
}, |
|
{ |
|
"epoch": 20.09, |
|
"learning_rate": 9.927043773735758e-05, |
|
"loss": 0.7421, |
|
"step": 671500 |
|
}, |
|
{ |
|
"epoch": 20.11, |
|
"learning_rate": 9.912052768338994e-05, |
|
"loss": 0.726, |
|
"step": 672000 |
|
}, |
|
{ |
|
"epoch": 20.12, |
|
"learning_rate": 9.897061762942233e-05, |
|
"loss": 0.725, |
|
"step": 672500 |
|
}, |
|
{ |
|
"epoch": 20.14, |
|
"learning_rate": 9.882070757545471e-05, |
|
"loss": 0.7467, |
|
"step": 673000 |
|
}, |
|
{ |
|
"epoch": 20.15, |
|
"learning_rate": 9.86707975214871e-05, |
|
"loss": 0.7317, |
|
"step": 673500 |
|
}, |
|
{ |
|
"epoch": 20.17, |
|
"learning_rate": 9.852088746751947e-05, |
|
"loss": 0.7282, |
|
"step": 674000 |
|
}, |
|
{ |
|
"epoch": 20.18, |
|
"learning_rate": 9.837097741355186e-05, |
|
"loss": 0.7455, |
|
"step": 674500 |
|
}, |
|
{ |
|
"epoch": 20.2, |
|
"learning_rate": 9.822106735958425e-05, |
|
"loss": 0.7315, |
|
"step": 675000 |
|
}, |
|
{ |
|
"epoch": 20.21, |
|
"learning_rate": 9.807115730561662e-05, |
|
"loss": 0.7273, |
|
"step": 675500 |
|
}, |
|
{ |
|
"epoch": 20.23, |
|
"learning_rate": 9.7921247251649e-05, |
|
"loss": 0.7523, |
|
"step": 676000 |
|
}, |
|
{ |
|
"epoch": 20.24, |
|
"learning_rate": 9.777133719768138e-05, |
|
"loss": 0.7213, |
|
"step": 676500 |
|
}, |
|
{ |
|
"epoch": 20.26, |
|
"learning_rate": 9.762142714371377e-05, |
|
"loss": 0.7507, |
|
"step": 677000 |
|
}, |
|
{ |
|
"epoch": 20.27, |
|
"learning_rate": 9.747151708974615e-05, |
|
"loss": 0.733, |
|
"step": 677500 |
|
}, |
|
{ |
|
"epoch": 20.29, |
|
"learning_rate": 9.732160703577854e-05, |
|
"loss": 0.7152, |
|
"step": 678000 |
|
}, |
|
{ |
|
"epoch": 20.3, |
|
"learning_rate": 9.71716969818109e-05, |
|
"loss": 0.7536, |
|
"step": 678500 |
|
}, |
|
{ |
|
"epoch": 20.32, |
|
"learning_rate": 9.702178692784329e-05, |
|
"loss": 0.7405, |
|
"step": 679000 |
|
}, |
|
{ |
|
"epoch": 20.33, |
|
"learning_rate": 9.687187687387566e-05, |
|
"loss": 0.7512, |
|
"step": 679500 |
|
}, |
|
{ |
|
"epoch": 20.35, |
|
"learning_rate": 9.672196681990805e-05, |
|
"loss": 0.7611, |
|
"step": 680000 |
|
}, |
|
{ |
|
"epoch": 20.36, |
|
"learning_rate": 9.657205676594043e-05, |
|
"loss": 0.7535, |
|
"step": 680500 |
|
}, |
|
{ |
|
"epoch": 20.38, |
|
"learning_rate": 9.642214671197282e-05, |
|
"loss": 0.7368, |
|
"step": 681000 |
|
}, |
|
{ |
|
"epoch": 20.39, |
|
"learning_rate": 9.627223665800518e-05, |
|
"loss": 0.7505, |
|
"step": 681500 |
|
}, |
|
{ |
|
"epoch": 20.41, |
|
"learning_rate": 9.612232660403757e-05, |
|
"loss": 0.7415, |
|
"step": 682000 |
|
}, |
|
{ |
|
"epoch": 20.42, |
|
"learning_rate": 9.597241655006995e-05, |
|
"loss": 0.7752, |
|
"step": 682500 |
|
}, |
|
{ |
|
"epoch": 20.44, |
|
"learning_rate": 9.582250649610233e-05, |
|
"loss": 0.7458, |
|
"step": 683000 |
|
}, |
|
{ |
|
"epoch": 20.45, |
|
"learning_rate": 9.567259644213471e-05, |
|
"loss": 0.7421, |
|
"step": 683500 |
|
}, |
|
{ |
|
"epoch": 20.47, |
|
"learning_rate": 9.55226863881671e-05, |
|
"loss": 0.7389, |
|
"step": 684000 |
|
}, |
|
{ |
|
"epoch": 20.48, |
|
"learning_rate": 9.537277633419948e-05, |
|
"loss": 0.7445, |
|
"step": 684500 |
|
}, |
|
{ |
|
"epoch": 20.5, |
|
"learning_rate": 9.522286628023185e-05, |
|
"loss": 0.741, |
|
"step": 685000 |
|
}, |
|
{ |
|
"epoch": 20.51, |
|
"learning_rate": 9.507295622626423e-05, |
|
"loss": 0.7429, |
|
"step": 685500 |
|
}, |
|
{ |
|
"epoch": 20.53, |
|
"learning_rate": 9.492304617229662e-05, |
|
"loss": 0.7493, |
|
"step": 686000 |
|
}, |
|
{ |
|
"epoch": 20.54, |
|
"learning_rate": 9.477313611832899e-05, |
|
"loss": 0.7443, |
|
"step": 686500 |
|
}, |
|
{ |
|
"epoch": 20.56, |
|
"learning_rate": 9.462322606436138e-05, |
|
"loss": 0.7411, |
|
"step": 687000 |
|
}, |
|
{ |
|
"epoch": 20.57, |
|
"learning_rate": 9.447331601039376e-05, |
|
"loss": 0.7465, |
|
"step": 687500 |
|
}, |
|
{ |
|
"epoch": 20.59, |
|
"learning_rate": 9.432340595642613e-05, |
|
"loss": 0.751, |
|
"step": 688000 |
|
}, |
|
{ |
|
"epoch": 20.6, |
|
"learning_rate": 9.417349590245851e-05, |
|
"loss": 0.7332, |
|
"step": 688500 |
|
}, |
|
{ |
|
"epoch": 20.62, |
|
"learning_rate": 9.40235858484909e-05, |
|
"loss": 0.7249, |
|
"step": 689000 |
|
}, |
|
{ |
|
"epoch": 20.63, |
|
"learning_rate": 9.387367579452327e-05, |
|
"loss": 0.7333, |
|
"step": 689500 |
|
}, |
|
{ |
|
"epoch": 20.65, |
|
"learning_rate": 9.372376574055566e-05, |
|
"loss": 0.75, |
|
"step": 690000 |
|
}, |
|
{ |
|
"epoch": 20.66, |
|
"learning_rate": 9.357385568658804e-05, |
|
"loss": 0.7328, |
|
"step": 690500 |
|
}, |
|
{ |
|
"epoch": 20.68, |
|
"learning_rate": 9.342394563262043e-05, |
|
"loss": 0.7474, |
|
"step": 691000 |
|
}, |
|
{ |
|
"epoch": 20.69, |
|
"learning_rate": 9.327403557865279e-05, |
|
"loss": 0.7118, |
|
"step": 691500 |
|
}, |
|
{ |
|
"epoch": 20.71, |
|
"learning_rate": 9.312412552468518e-05, |
|
"loss": 0.761, |
|
"step": 692000 |
|
}, |
|
{ |
|
"epoch": 20.72, |
|
"learning_rate": 9.297421547071756e-05, |
|
"loss": 0.7404, |
|
"step": 692500 |
|
}, |
|
{ |
|
"epoch": 20.74, |
|
"learning_rate": 9.282430541674995e-05, |
|
"loss": 0.7349, |
|
"step": 693000 |
|
}, |
|
{ |
|
"epoch": 20.75, |
|
"learning_rate": 9.267439536278232e-05, |
|
"loss": 0.7429, |
|
"step": 693500 |
|
}, |
|
{ |
|
"epoch": 20.77, |
|
"learning_rate": 9.252448530881471e-05, |
|
"loss": 0.7511, |
|
"step": 694000 |
|
}, |
|
{ |
|
"epoch": 20.78, |
|
"learning_rate": 9.237457525484707e-05, |
|
"loss": 0.7384, |
|
"step": 694500 |
|
}, |
|
{ |
|
"epoch": 20.8, |
|
"learning_rate": 9.222466520087946e-05, |
|
"loss": 0.7365, |
|
"step": 695000 |
|
}, |
|
{ |
|
"epoch": 20.81, |
|
"learning_rate": 9.207475514691184e-05, |
|
"loss": 0.7331, |
|
"step": 695500 |
|
}, |
|
{ |
|
"epoch": 20.83, |
|
"learning_rate": 9.192484509294423e-05, |
|
"loss": 0.7343, |
|
"step": 696000 |
|
}, |
|
{ |
|
"epoch": 20.84, |
|
"learning_rate": 9.17749350389766e-05, |
|
"loss": 0.7515, |
|
"step": 696500 |
|
}, |
|
{ |
|
"epoch": 20.86, |
|
"learning_rate": 9.1625024985009e-05, |
|
"loss": 0.7536, |
|
"step": 697000 |
|
}, |
|
{ |
|
"epoch": 20.87, |
|
"learning_rate": 9.147511493104136e-05, |
|
"loss": 0.7434, |
|
"step": 697500 |
|
}, |
|
{ |
|
"epoch": 20.89, |
|
"learning_rate": 9.132520487707375e-05, |
|
"loss": 0.742, |
|
"step": 698000 |
|
}, |
|
{ |
|
"epoch": 20.9, |
|
"learning_rate": 9.117529482310612e-05, |
|
"loss": 0.7389, |
|
"step": 698500 |
|
}, |
|
{ |
|
"epoch": 20.92, |
|
"learning_rate": 9.102538476913851e-05, |
|
"loss": 0.7434, |
|
"step": 699000 |
|
}, |
|
{ |
|
"epoch": 20.93, |
|
"learning_rate": 9.087547471517089e-05, |
|
"loss": 0.74, |
|
"step": 699500 |
|
}, |
|
{ |
|
"epoch": 20.95, |
|
"learning_rate": 9.072556466120328e-05, |
|
"loss": 0.7416, |
|
"step": 700000 |
|
}, |
|
{ |
|
"epoch": 20.96, |
|
"learning_rate": 9.057565460723564e-05, |
|
"loss": 0.7508, |
|
"step": 700500 |
|
}, |
|
{ |
|
"epoch": 20.98, |
|
"learning_rate": 9.042574455326803e-05, |
|
"loss": 0.7466, |
|
"step": 701000 |
|
}, |
|
{ |
|
"epoch": 20.99, |
|
"learning_rate": 9.027583449930042e-05, |
|
"loss": 0.7605, |
|
"step": 701500 |
|
}, |
|
{ |
|
"epoch": 21.01, |
|
"learning_rate": 9.012592444533279e-05, |
|
"loss": 0.7463, |
|
"step": 702000 |
|
}, |
|
{ |
|
"epoch": 21.02, |
|
"learning_rate": 8.997601439136518e-05, |
|
"loss": 0.6942, |
|
"step": 702500 |
|
}, |
|
{ |
|
"epoch": 21.03, |
|
"learning_rate": 8.982610433739756e-05, |
|
"loss": 0.7358, |
|
"step": 703000 |
|
}, |
|
{ |
|
"epoch": 21.05, |
|
"learning_rate": 8.967619428342995e-05, |
|
"loss": 0.7123, |
|
"step": 703500 |
|
}, |
|
{ |
|
"epoch": 21.06, |
|
"learning_rate": 8.952628422946231e-05, |
|
"loss": 0.7118, |
|
"step": 704000 |
|
}, |
|
{ |
|
"epoch": 21.08, |
|
"learning_rate": 8.93763741754947e-05, |
|
"loss": 0.7199, |
|
"step": 704500 |
|
}, |
|
{ |
|
"epoch": 21.09, |
|
"learning_rate": 8.922646412152707e-05, |
|
"loss": 0.7228, |
|
"step": 705000 |
|
}, |
|
{ |
|
"epoch": 21.11, |
|
"learning_rate": 8.907655406755946e-05, |
|
"loss": 0.722, |
|
"step": 705500 |
|
}, |
|
{ |
|
"epoch": 21.12, |
|
"learning_rate": 8.892664401359184e-05, |
|
"loss": 0.7148, |
|
"step": 706000 |
|
}, |
|
{ |
|
"epoch": 21.14, |
|
"learning_rate": 8.877673395962423e-05, |
|
"loss": 0.7344, |
|
"step": 706500 |
|
}, |
|
{ |
|
"epoch": 21.15, |
|
"learning_rate": 8.862682390565659e-05, |
|
"loss": 0.7272, |
|
"step": 707000 |
|
}, |
|
{ |
|
"epoch": 21.17, |
|
"learning_rate": 8.847691385168898e-05, |
|
"loss": 0.7139, |
|
"step": 707500 |
|
}, |
|
{ |
|
"epoch": 21.18, |
|
"learning_rate": 8.832700379772136e-05, |
|
"loss": 0.7126, |
|
"step": 708000 |
|
}, |
|
{ |
|
"epoch": 21.2, |
|
"learning_rate": 8.817709374375375e-05, |
|
"loss": 0.7043, |
|
"step": 708500 |
|
}, |
|
{ |
|
"epoch": 21.21, |
|
"learning_rate": 8.802718368978612e-05, |
|
"loss": 0.7254, |
|
"step": 709000 |
|
}, |
|
{ |
|
"epoch": 21.23, |
|
"learning_rate": 8.787727363581851e-05, |
|
"loss": 0.7072, |
|
"step": 709500 |
|
}, |
|
{ |
|
"epoch": 21.24, |
|
"learning_rate": 8.772736358185087e-05, |
|
"loss": 0.7152, |
|
"step": 710000 |
|
}, |
|
{ |
|
"epoch": 21.26, |
|
"learning_rate": 8.757745352788326e-05, |
|
"loss": 0.7134, |
|
"step": 710500 |
|
}, |
|
{ |
|
"epoch": 21.27, |
|
"learning_rate": 8.742754347391564e-05, |
|
"loss": 0.7019, |
|
"step": 711000 |
|
}, |
|
{ |
|
"epoch": 21.29, |
|
"learning_rate": 8.727763341994803e-05, |
|
"loss": 0.7126, |
|
"step": 711500 |
|
}, |
|
{ |
|
"epoch": 21.3, |
|
"learning_rate": 8.71277233659804e-05, |
|
"loss": 0.7193, |
|
"step": 712000 |
|
}, |
|
{ |
|
"epoch": 21.32, |
|
"learning_rate": 8.69778133120128e-05, |
|
"loss": 0.7195, |
|
"step": 712500 |
|
}, |
|
{ |
|
"epoch": 21.33, |
|
"learning_rate": 8.682790325804516e-05, |
|
"loss": 0.7284, |
|
"step": 713000 |
|
}, |
|
{ |
|
"epoch": 21.35, |
|
"learning_rate": 8.667799320407755e-05, |
|
"loss": 0.7042, |
|
"step": 713500 |
|
}, |
|
{ |
|
"epoch": 21.36, |
|
"learning_rate": 8.652808315010992e-05, |
|
"loss": 0.7415, |
|
"step": 714000 |
|
}, |
|
{ |
|
"epoch": 21.38, |
|
"learning_rate": 8.637817309614231e-05, |
|
"loss": 0.7127, |
|
"step": 714500 |
|
}, |
|
{ |
|
"epoch": 21.39, |
|
"learning_rate": 8.622826304217469e-05, |
|
"loss": 0.7264, |
|
"step": 715000 |
|
}, |
|
{ |
|
"epoch": 21.41, |
|
"learning_rate": 8.607835298820708e-05, |
|
"loss": 0.7006, |
|
"step": 715500 |
|
}, |
|
{ |
|
"epoch": 21.42, |
|
"learning_rate": 8.592844293423944e-05, |
|
"loss": 0.7225, |
|
"step": 716000 |
|
}, |
|
{ |
|
"epoch": 21.44, |
|
"learning_rate": 8.577853288027183e-05, |
|
"loss": 0.7225, |
|
"step": 716500 |
|
}, |
|
{ |
|
"epoch": 21.45, |
|
"learning_rate": 8.56286228263042e-05, |
|
"loss": 0.7278, |
|
"step": 717000 |
|
}, |
|
{ |
|
"epoch": 21.47, |
|
"learning_rate": 8.547871277233659e-05, |
|
"loss": 0.7053, |
|
"step": 717500 |
|
}, |
|
{ |
|
"epoch": 21.48, |
|
"learning_rate": 8.532880271836897e-05, |
|
"loss": 0.718, |
|
"step": 718000 |
|
}, |
|
{ |
|
"epoch": 21.5, |
|
"learning_rate": 8.517889266440136e-05, |
|
"loss": 0.7299, |
|
"step": 718500 |
|
}, |
|
{ |
|
"epoch": 21.51, |
|
"learning_rate": 8.502898261043372e-05, |
|
"loss": 0.7361, |
|
"step": 719000 |
|
}, |
|
{ |
|
"epoch": 21.53, |
|
"learning_rate": 8.487907255646611e-05, |
|
"loss": 0.7269, |
|
"step": 719500 |
|
}, |
|
{ |
|
"epoch": 21.54, |
|
"learning_rate": 8.472916250249849e-05, |
|
"loss": 0.7084, |
|
"step": 720000 |
|
}, |
|
{ |
|
"epoch": 21.56, |
|
"learning_rate": 8.457925244853087e-05, |
|
"loss": 0.703, |
|
"step": 720500 |
|
}, |
|
{ |
|
"epoch": 21.57, |
|
"learning_rate": 8.442934239456325e-05, |
|
"loss": 0.7262, |
|
"step": 721000 |
|
}, |
|
{ |
|
"epoch": 21.59, |
|
"learning_rate": 8.427943234059564e-05, |
|
"loss": 0.7207, |
|
"step": 721500 |
|
}, |
|
{ |
|
"epoch": 21.6, |
|
"learning_rate": 8.412952228662802e-05, |
|
"loss": 0.7139, |
|
"step": 722000 |
|
}, |
|
{ |
|
"epoch": 21.62, |
|
"learning_rate": 8.397961223266039e-05, |
|
"loss": 0.7218, |
|
"step": 722500 |
|
}, |
|
{ |
|
"epoch": 21.63, |
|
"learning_rate": 8.382970217869277e-05, |
|
"loss": 0.7413, |
|
"step": 723000 |
|
}, |
|
{ |
|
"epoch": 21.65, |
|
"learning_rate": 8.367979212472516e-05, |
|
"loss": 0.7296, |
|
"step": 723500 |
|
}, |
|
{ |
|
"epoch": 21.66, |
|
"learning_rate": 8.352988207075753e-05, |
|
"loss": 0.7139, |
|
"step": 724000 |
|
}, |
|
{ |
|
"epoch": 21.68, |
|
"learning_rate": 8.337997201678992e-05, |
|
"loss": 0.7162, |
|
"step": 724500 |
|
}, |
|
{ |
|
"epoch": 21.69, |
|
"learning_rate": 8.32300619628223e-05, |
|
"loss": 0.7115, |
|
"step": 725000 |
|
}, |
|
{ |
|
"epoch": 21.71, |
|
"learning_rate": 8.308015190885467e-05, |
|
"loss": 0.7271, |
|
"step": 725500 |
|
}, |
|
{ |
|
"epoch": 21.72, |
|
"learning_rate": 8.293024185488705e-05, |
|
"loss": 0.7199, |
|
"step": 726000 |
|
}, |
|
{ |
|
"epoch": 21.74, |
|
"learning_rate": 8.278033180091944e-05, |
|
"loss": 0.7108, |
|
"step": 726500 |
|
}, |
|
{ |
|
"epoch": 21.75, |
|
"learning_rate": 8.263042174695182e-05, |
|
"loss": 0.7193, |
|
"step": 727000 |
|
}, |
|
{ |
|
"epoch": 21.77, |
|
"learning_rate": 8.24805116929842e-05, |
|
"loss": 0.737, |
|
"step": 727500 |
|
}, |
|
{ |
|
"epoch": 21.78, |
|
"learning_rate": 8.233060163901658e-05, |
|
"loss": 0.7248, |
|
"step": 728000 |
|
}, |
|
{ |
|
"epoch": 21.8, |
|
"learning_rate": 8.218069158504897e-05, |
|
"loss": 0.723, |
|
"step": 728500 |
|
}, |
|
{ |
|
"epoch": 21.81, |
|
"learning_rate": 8.203078153108135e-05, |
|
"loss": 0.7185, |
|
"step": 729000 |
|
}, |
|
{ |
|
"epoch": 21.83, |
|
"learning_rate": 8.188087147711372e-05, |
|
"loss": 0.7156, |
|
"step": 729500 |
|
}, |
|
{ |
|
"epoch": 21.84, |
|
"learning_rate": 8.173096142314611e-05, |
|
"loss": 0.7256, |
|
"step": 730000 |
|
}, |
|
{ |
|
"epoch": 21.86, |
|
"learning_rate": 8.158105136917849e-05, |
|
"loss": 0.7303, |
|
"step": 730500 |
|
}, |
|
{ |
|
"epoch": 21.87, |
|
"learning_rate": 8.143114131521088e-05, |
|
"loss": 0.7176, |
|
"step": 731000 |
|
}, |
|
{ |
|
"epoch": 21.89, |
|
"learning_rate": 8.128123126124325e-05, |
|
"loss": 0.7116, |
|
"step": 731500 |
|
}, |
|
{ |
|
"epoch": 21.9, |
|
"learning_rate": 8.113132120727564e-05, |
|
"loss": 0.7327, |
|
"step": 732000 |
|
}, |
|
{ |
|
"epoch": 21.92, |
|
"learning_rate": 8.0981411153308e-05, |
|
"loss": 0.7312, |
|
"step": 732500 |
|
}, |
|
{ |
|
"epoch": 21.93, |
|
"learning_rate": 8.083150109934039e-05, |
|
"loss": 0.7383, |
|
"step": 733000 |
|
}, |
|
{ |
|
"epoch": 21.95, |
|
"learning_rate": 8.068159104537277e-05, |
|
"loss": 0.7232, |
|
"step": 733500 |
|
}, |
|
{ |
|
"epoch": 21.96, |
|
"learning_rate": 8.053168099140516e-05, |
|
"loss": 0.7309, |
|
"step": 734000 |
|
}, |
|
{ |
|
"epoch": 21.98, |
|
"learning_rate": 8.038177093743753e-05, |
|
"loss": 0.7291, |
|
"step": 734500 |
|
}, |
|
{ |
|
"epoch": 21.99, |
|
"learning_rate": 8.023186088346992e-05, |
|
"loss": 0.7378, |
|
"step": 735000 |
|
}, |
|
{ |
|
"epoch": 22.01, |
|
"learning_rate": 8.008195082950229e-05, |
|
"loss": 0.7132, |
|
"step": 735500 |
|
}, |
|
{ |
|
"epoch": 22.02, |
|
"learning_rate": 7.993204077553468e-05, |
|
"loss": 0.6955, |
|
"step": 736000 |
|
}, |
|
{ |
|
"epoch": 22.04, |
|
"learning_rate": 7.978213072156705e-05, |
|
"loss": 0.6798, |
|
"step": 736500 |
|
}, |
|
{ |
|
"epoch": 22.05, |
|
"learning_rate": 7.963222066759944e-05, |
|
"loss": 0.7013, |
|
"step": 737000 |
|
}, |
|
{ |
|
"epoch": 22.07, |
|
"learning_rate": 7.948231061363182e-05, |
|
"loss": 0.6835, |
|
"step": 737500 |
|
}, |
|
{ |
|
"epoch": 22.08, |
|
"learning_rate": 7.93324005596642e-05, |
|
"loss": 0.7122, |
|
"step": 738000 |
|
}, |
|
{ |
|
"epoch": 22.1, |
|
"learning_rate": 7.918249050569657e-05, |
|
"loss": 0.6835, |
|
"step": 738500 |
|
}, |
|
{ |
|
"epoch": 22.11, |
|
"learning_rate": 7.903258045172896e-05, |
|
"loss": 0.6997, |
|
"step": 739000 |
|
}, |
|
{ |
|
"epoch": 22.13, |
|
"learning_rate": 7.888267039776133e-05, |
|
"loss": 0.7069, |
|
"step": 739500 |
|
}, |
|
{ |
|
"epoch": 22.14, |
|
"learning_rate": 7.873276034379372e-05, |
|
"loss": 0.6875, |
|
"step": 740000 |
|
}, |
|
{ |
|
"epoch": 22.16, |
|
"learning_rate": 7.85828502898261e-05, |
|
"loss": 0.7001, |
|
"step": 740500 |
|
}, |
|
{ |
|
"epoch": 22.17, |
|
"learning_rate": 7.843294023585849e-05, |
|
"loss": 0.706, |
|
"step": 741000 |
|
}, |
|
{ |
|
"epoch": 22.19, |
|
"learning_rate": 7.828303018189085e-05, |
|
"loss": 0.6986, |
|
"step": 741500 |
|
}, |
|
{ |
|
"epoch": 22.2, |
|
"learning_rate": 7.813312012792324e-05, |
|
"loss": 0.6979, |
|
"step": 742000 |
|
}, |
|
{ |
|
"epoch": 22.22, |
|
"learning_rate": 7.798321007395562e-05, |
|
"loss": 0.7109, |
|
"step": 742500 |
|
}, |
|
{ |
|
"epoch": 22.23, |
|
"learning_rate": 7.7833300019988e-05, |
|
"loss": 0.6914, |
|
"step": 743000 |
|
}, |
|
{ |
|
"epoch": 22.25, |
|
"learning_rate": 7.768338996602038e-05, |
|
"loss": 0.7009, |
|
"step": 743500 |
|
}, |
|
{ |
|
"epoch": 22.26, |
|
"learning_rate": 7.753347991205277e-05, |
|
"loss": 0.6839, |
|
"step": 744000 |
|
}, |
|
{ |
|
"epoch": 22.28, |
|
"learning_rate": 7.738356985808513e-05, |
|
"loss": 0.7103, |
|
"step": 744500 |
|
}, |
|
{ |
|
"epoch": 22.29, |
|
"learning_rate": 7.723365980411752e-05, |
|
"loss": 0.6977, |
|
"step": 745000 |
|
}, |
|
{ |
|
"epoch": 22.31, |
|
"learning_rate": 7.70837497501499e-05, |
|
"loss": 0.7084, |
|
"step": 745500 |
|
}, |
|
{ |
|
"epoch": 22.32, |
|
"learning_rate": 7.693383969618229e-05, |
|
"loss": 0.7139, |
|
"step": 746000 |
|
}, |
|
{ |
|
"epoch": 22.34, |
|
"learning_rate": 7.678392964221466e-05, |
|
"loss": 0.7109, |
|
"step": 746500 |
|
}, |
|
{ |
|
"epoch": 22.35, |
|
"learning_rate": 7.663401958824705e-05, |
|
"loss": 0.6968, |
|
"step": 747000 |
|
}, |
|
{ |
|
"epoch": 22.37, |
|
"learning_rate": 7.648410953427941e-05, |
|
"loss": 0.6996, |
|
"step": 747500 |
|
}, |
|
{ |
|
"epoch": 22.38, |
|
"learning_rate": 7.63341994803118e-05, |
|
"loss": 0.7147, |
|
"step": 748000 |
|
}, |
|
{ |
|
"epoch": 22.4, |
|
"learning_rate": 7.618428942634418e-05, |
|
"loss": 0.7082, |
|
"step": 748500 |
|
}, |
|
{ |
|
"epoch": 22.41, |
|
"learning_rate": 7.603437937237657e-05, |
|
"loss": 0.7011, |
|
"step": 749000 |
|
}, |
|
{ |
|
"epoch": 22.43, |
|
"learning_rate": 7.588446931840894e-05, |
|
"loss": 0.7058, |
|
"step": 749500 |
|
}, |
|
{ |
|
"epoch": 22.44, |
|
"learning_rate": 7.573455926444133e-05, |
|
"loss": 0.7101, |
|
"step": 750000 |
|
}, |
|
{ |
|
"epoch": 22.46, |
|
"learning_rate": 7.55846492104737e-05, |
|
"loss": 0.7117, |
|
"step": 750500 |
|
}, |
|
{ |
|
"epoch": 22.47, |
|
"learning_rate": 7.543473915650609e-05, |
|
"loss": 0.6966, |
|
"step": 751000 |
|
}, |
|
{ |
|
"epoch": 22.49, |
|
"learning_rate": 7.528482910253846e-05, |
|
"loss": 0.7064, |
|
"step": 751500 |
|
}, |
|
{ |
|
"epoch": 22.5, |
|
"learning_rate": 7.513491904857085e-05, |
|
"loss": 0.701, |
|
"step": 752000 |
|
}, |
|
{ |
|
"epoch": 22.52, |
|
"learning_rate": 7.498500899460323e-05, |
|
"loss": 0.7059, |
|
"step": 752500 |
|
}, |
|
{ |
|
"epoch": 22.53, |
|
"learning_rate": 7.483509894063562e-05, |
|
"loss": 0.6802, |
|
"step": 753000 |
|
}, |
|
{ |
|
"epoch": 22.55, |
|
"learning_rate": 7.468518888666799e-05, |
|
"loss": 0.699, |
|
"step": 753500 |
|
}, |
|
{ |
|
"epoch": 22.56, |
|
"learning_rate": 7.453527883270037e-05, |
|
"loss": 0.7006, |
|
"step": 754000 |
|
}, |
|
{ |
|
"epoch": 22.58, |
|
"learning_rate": 7.438536877873276e-05, |
|
"loss": 0.7014, |
|
"step": 754500 |
|
}, |
|
{ |
|
"epoch": 22.59, |
|
"learning_rate": 7.423545872476513e-05, |
|
"loss": 0.6971, |
|
"step": 755000 |
|
}, |
|
{ |
|
"epoch": 22.61, |
|
"learning_rate": 7.408554867079751e-05, |
|
"loss": 0.6937, |
|
"step": 755500 |
|
}, |
|
{ |
|
"epoch": 22.62, |
|
"learning_rate": 7.39356386168299e-05, |
|
"loss": 0.7018, |
|
"step": 756000 |
|
}, |
|
{ |
|
"epoch": 22.64, |
|
"learning_rate": 7.378572856286227e-05, |
|
"loss": 0.7045, |
|
"step": 756500 |
|
}, |
|
{ |
|
"epoch": 22.65, |
|
"learning_rate": 7.363581850889465e-05, |
|
"loss": 0.7015, |
|
"step": 757000 |
|
}, |
|
{ |
|
"epoch": 22.67, |
|
"learning_rate": 7.348590845492704e-05, |
|
"loss": 0.7073, |
|
"step": 757500 |
|
}, |
|
{ |
|
"epoch": 22.68, |
|
"learning_rate": 7.333599840095942e-05, |
|
"loss": 0.7053, |
|
"step": 758000 |
|
}, |
|
{ |
|
"epoch": 22.7, |
|
"learning_rate": 7.318608834699179e-05, |
|
"loss": 0.7054, |
|
"step": 758500 |
|
}, |
|
{ |
|
"epoch": 22.71, |
|
"learning_rate": 7.303617829302418e-05, |
|
"loss": 0.7045, |
|
"step": 759000 |
|
}, |
|
{ |
|
"epoch": 22.73, |
|
"learning_rate": 7.288626823905656e-05, |
|
"loss": 0.72, |
|
"step": 759500 |
|
}, |
|
{ |
|
"epoch": 22.74, |
|
"learning_rate": 7.273635818508893e-05, |
|
"loss": 0.712, |
|
"step": 760000 |
|
}, |
|
{ |
|
"epoch": 22.76, |
|
"learning_rate": 7.258644813112132e-05, |
|
"loss": 0.6888, |
|
"step": 760500 |
|
}, |
|
{ |
|
"epoch": 22.77, |
|
"learning_rate": 7.24365380771537e-05, |
|
"loss": 0.6955, |
|
"step": 761000 |
|
}, |
|
{ |
|
"epoch": 22.79, |
|
"learning_rate": 7.228662802318607e-05, |
|
"loss": 0.7007, |
|
"step": 761500 |
|
}, |
|
{ |
|
"epoch": 22.8, |
|
"learning_rate": 7.213671796921846e-05, |
|
"loss": 0.7146, |
|
"step": 762000 |
|
}, |
|
{ |
|
"epoch": 22.82, |
|
"learning_rate": 7.198680791525084e-05, |
|
"loss": 0.7208, |
|
"step": 762500 |
|
}, |
|
{ |
|
"epoch": 22.83, |
|
"learning_rate": 7.183689786128323e-05, |
|
"loss": 0.685, |
|
"step": 763000 |
|
}, |
|
{ |
|
"epoch": 22.85, |
|
"learning_rate": 7.16869878073156e-05, |
|
"loss": 0.7014, |
|
"step": 763500 |
|
}, |
|
{ |
|
"epoch": 22.86, |
|
"learning_rate": 7.153707775334798e-05, |
|
"loss": 0.6998, |
|
"step": 764000 |
|
}, |
|
{ |
|
"epoch": 22.88, |
|
"learning_rate": 7.138716769938037e-05, |
|
"loss": 0.7147, |
|
"step": 764500 |
|
}, |
|
{ |
|
"epoch": 22.89, |
|
"learning_rate": 7.123725764541274e-05, |
|
"loss": 0.6832, |
|
"step": 765000 |
|
}, |
|
{ |
|
"epoch": 22.91, |
|
"learning_rate": 7.108734759144512e-05, |
|
"loss": 0.704, |
|
"step": 765500 |
|
}, |
|
{ |
|
"epoch": 22.92, |
|
"learning_rate": 7.093743753747751e-05, |
|
"loss": 0.6902, |
|
"step": 766000 |
|
}, |
|
{ |
|
"epoch": 22.94, |
|
"learning_rate": 7.078752748350989e-05, |
|
"loss": 0.6992, |
|
"step": 766500 |
|
}, |
|
{ |
|
"epoch": 22.95, |
|
"learning_rate": 7.063761742954226e-05, |
|
"loss": 0.6888, |
|
"step": 767000 |
|
}, |
|
{ |
|
"epoch": 22.96, |
|
"learning_rate": 7.048770737557465e-05, |
|
"loss": 0.7033, |
|
"step": 767500 |
|
}, |
|
{ |
|
"epoch": 22.98, |
|
"learning_rate": 7.033779732160703e-05, |
|
"loss": 0.7144, |
|
"step": 768000 |
|
}, |
|
{ |
|
"epoch": 22.99, |
|
"learning_rate": 7.01878872676394e-05, |
|
"loss": 0.7113, |
|
"step": 768500 |
|
}, |
|
{ |
|
"epoch": 23.01, |
|
"learning_rate": 7.003797721367179e-05, |
|
"loss": 0.6798, |
|
"step": 769000 |
|
}, |
|
{ |
|
"epoch": 23.02, |
|
"learning_rate": 6.988806715970418e-05, |
|
"loss": 0.6728, |
|
"step": 769500 |
|
}, |
|
{ |
|
"epoch": 23.04, |
|
"learning_rate": 6.973815710573656e-05, |
|
"loss": 0.6779, |
|
"step": 770000 |
|
}, |
|
{ |
|
"epoch": 23.05, |
|
"learning_rate": 6.958824705176893e-05, |
|
"loss": 0.6711, |
|
"step": 770500 |
|
}, |
|
{ |
|
"epoch": 23.07, |
|
"learning_rate": 6.943833699780132e-05, |
|
"loss": 0.6738, |
|
"step": 771000 |
|
}, |
|
{ |
|
"epoch": 23.08, |
|
"learning_rate": 6.92884269438337e-05, |
|
"loss": 0.6686, |
|
"step": 771500 |
|
}, |
|
{ |
|
"epoch": 23.1, |
|
"learning_rate": 6.913851688986607e-05, |
|
"loss": 0.6869, |
|
"step": 772000 |
|
}, |
|
{ |
|
"epoch": 23.11, |
|
"learning_rate": 6.898860683589846e-05, |
|
"loss": 0.6738, |
|
"step": 772500 |
|
}, |
|
{ |
|
"epoch": 23.13, |
|
"learning_rate": 6.883869678193084e-05, |
|
"loss": 0.6937, |
|
"step": 773000 |
|
}, |
|
{ |
|
"epoch": 23.14, |
|
"learning_rate": 6.868878672796322e-05, |
|
"loss": 0.6813, |
|
"step": 773500 |
|
}, |
|
{ |
|
"epoch": 23.16, |
|
"learning_rate": 6.85388766739956e-05, |
|
"loss": 0.6874, |
|
"step": 774000 |
|
}, |
|
{ |
|
"epoch": 23.17, |
|
"learning_rate": 6.838896662002798e-05, |
|
"loss": 0.6621, |
|
"step": 774500 |
|
}, |
|
{ |
|
"epoch": 23.19, |
|
"learning_rate": 6.823905656606036e-05, |
|
"loss": 0.683, |
|
"step": 775000 |
|
}, |
|
{ |
|
"epoch": 23.2, |
|
"learning_rate": 6.808914651209275e-05, |
|
"loss": 0.6892, |
|
"step": 775500 |
|
}, |
|
{ |
|
"epoch": 23.22, |
|
"learning_rate": 6.793923645812512e-05, |
|
"loss": 0.672, |
|
"step": 776000 |
|
}, |
|
{ |
|
"epoch": 23.23, |
|
"learning_rate": 6.77893264041575e-05, |
|
"loss": 0.6959, |
|
"step": 776500 |
|
}, |
|
{ |
|
"epoch": 23.25, |
|
"learning_rate": 6.763941635018989e-05, |
|
"loss": 0.6863, |
|
"step": 777000 |
|
}, |
|
{ |
|
"epoch": 23.26, |
|
"learning_rate": 6.748950629622226e-05, |
|
"loss": 0.6691, |
|
"step": 777500 |
|
}, |
|
{ |
|
"epoch": 23.28, |
|
"learning_rate": 6.733959624225464e-05, |
|
"loss": 0.685, |
|
"step": 778000 |
|
}, |
|
{ |
|
"epoch": 23.29, |
|
"learning_rate": 6.718968618828703e-05, |
|
"loss": 0.6724, |
|
"step": 778500 |
|
}, |
|
{ |
|
"epoch": 23.31, |
|
"learning_rate": 6.70397761343194e-05, |
|
"loss": 0.6846, |
|
"step": 779000 |
|
}, |
|
{ |
|
"epoch": 23.32, |
|
"learning_rate": 6.688986608035178e-05, |
|
"loss": 0.6773, |
|
"step": 779500 |
|
}, |
|
{ |
|
"epoch": 23.34, |
|
"learning_rate": 6.673995602638417e-05, |
|
"loss": 0.6831, |
|
"step": 780000 |
|
}, |
|
{ |
|
"epoch": 23.35, |
|
"learning_rate": 6.659004597241655e-05, |
|
"loss": 0.6989, |
|
"step": 780500 |
|
}, |
|
{ |
|
"epoch": 23.37, |
|
"learning_rate": 6.644013591844892e-05, |
|
"loss": 0.681, |
|
"step": 781000 |
|
}, |
|
{ |
|
"epoch": 23.38, |
|
"learning_rate": 6.629022586448131e-05, |
|
"loss": 0.6751, |
|
"step": 781500 |
|
}, |
|
{ |
|
"epoch": 23.4, |
|
"learning_rate": 6.614031581051369e-05, |
|
"loss": 0.6723, |
|
"step": 782000 |
|
}, |
|
{ |
|
"epoch": 23.41, |
|
"learning_rate": 6.599040575654606e-05, |
|
"loss": 0.6995, |
|
"step": 782500 |
|
}, |
|
{ |
|
"epoch": 23.43, |
|
"learning_rate": 6.584049570257845e-05, |
|
"loss": 0.6783, |
|
"step": 783000 |
|
}, |
|
{ |
|
"epoch": 23.44, |
|
"learning_rate": 6.569058564861083e-05, |
|
"loss": 0.6738, |
|
"step": 783500 |
|
}, |
|
{ |
|
"epoch": 23.46, |
|
"learning_rate": 6.55406755946432e-05, |
|
"loss": 0.6933, |
|
"step": 784000 |
|
}, |
|
{ |
|
"epoch": 23.47, |
|
"learning_rate": 6.539076554067559e-05, |
|
"loss": 0.706, |
|
"step": 784500 |
|
}, |
|
{ |
|
"epoch": 23.49, |
|
"learning_rate": 6.524085548670797e-05, |
|
"loss": 0.6589, |
|
"step": 785000 |
|
}, |
|
{ |
|
"epoch": 23.5, |
|
"learning_rate": 6.509094543274034e-05, |
|
"loss": 0.68, |
|
"step": 785500 |
|
}, |
|
{ |
|
"epoch": 23.52, |
|
"learning_rate": 6.494103537877273e-05, |
|
"loss": 0.6614, |
|
"step": 786000 |
|
}, |
|
{ |
|
"epoch": 23.53, |
|
"learning_rate": 6.479112532480511e-05, |
|
"loss": 0.6856, |
|
"step": 786500 |
|
}, |
|
{ |
|
"epoch": 23.55, |
|
"learning_rate": 6.464121527083749e-05, |
|
"loss": 0.6812, |
|
"step": 787000 |
|
}, |
|
{ |
|
"epoch": 23.56, |
|
"learning_rate": 6.449130521686987e-05, |
|
"loss": 0.6689, |
|
"step": 787500 |
|
}, |
|
{ |
|
"epoch": 23.58, |
|
"learning_rate": 6.434139516290225e-05, |
|
"loss": 0.6791, |
|
"step": 788000 |
|
}, |
|
{ |
|
"epoch": 23.59, |
|
"learning_rate": 6.419148510893463e-05, |
|
"loss": 0.69, |
|
"step": 788500 |
|
}, |
|
{ |
|
"epoch": 23.61, |
|
"learning_rate": 6.404157505496702e-05, |
|
"loss": 0.6721, |
|
"step": 789000 |
|
}, |
|
{ |
|
"epoch": 23.62, |
|
"learning_rate": 6.389166500099939e-05, |
|
"loss": 0.6702, |
|
"step": 789500 |
|
}, |
|
{ |
|
"epoch": 23.64, |
|
"learning_rate": 6.374175494703177e-05, |
|
"loss": 0.6939, |
|
"step": 790000 |
|
}, |
|
{ |
|
"epoch": 23.65, |
|
"learning_rate": 6.359184489306416e-05, |
|
"loss": 0.6791, |
|
"step": 790500 |
|
}, |
|
{ |
|
"epoch": 23.67, |
|
"learning_rate": 6.344193483909653e-05, |
|
"loss": 0.6826, |
|
"step": 791000 |
|
}, |
|
{ |
|
"epoch": 23.68, |
|
"learning_rate": 6.329202478512891e-05, |
|
"loss": 0.6859, |
|
"step": 791500 |
|
}, |
|
{ |
|
"epoch": 23.7, |
|
"learning_rate": 6.31421147311613e-05, |
|
"loss": 0.6778, |
|
"step": 792000 |
|
}, |
|
{ |
|
"epoch": 23.71, |
|
"learning_rate": 6.299220467719367e-05, |
|
"loss": 0.6745, |
|
"step": 792500 |
|
}, |
|
{ |
|
"epoch": 23.73, |
|
"learning_rate": 6.284229462322605e-05, |
|
"loss": 0.7018, |
|
"step": 793000 |
|
}, |
|
{ |
|
"epoch": 23.74, |
|
"learning_rate": 6.269238456925844e-05, |
|
"loss": 0.6882, |
|
"step": 793500 |
|
}, |
|
{ |
|
"epoch": 23.76, |
|
"learning_rate": 6.254247451529081e-05, |
|
"loss": 0.6737, |
|
"step": 794000 |
|
}, |
|
{ |
|
"epoch": 23.77, |
|
"learning_rate": 6.239256446132319e-05, |
|
"loss": 0.681, |
|
"step": 794500 |
|
}, |
|
{ |
|
"epoch": 23.79, |
|
"learning_rate": 6.224265440735558e-05, |
|
"loss": 0.6998, |
|
"step": 795000 |
|
}, |
|
{ |
|
"epoch": 23.8, |
|
"learning_rate": 6.209274435338797e-05, |
|
"loss": 0.6691, |
|
"step": 795500 |
|
}, |
|
{ |
|
"epoch": 23.82, |
|
"learning_rate": 6.194283429942035e-05, |
|
"loss": 0.668, |
|
"step": 796000 |
|
}, |
|
{ |
|
"epoch": 23.83, |
|
"learning_rate": 6.179292424545272e-05, |
|
"loss": 0.6894, |
|
"step": 796500 |
|
}, |
|
{ |
|
"epoch": 23.85, |
|
"learning_rate": 6.164301419148511e-05, |
|
"loss": 0.674, |
|
"step": 797000 |
|
}, |
|
{ |
|
"epoch": 23.86, |
|
"learning_rate": 6.149310413751749e-05, |
|
"loss": 0.675, |
|
"step": 797500 |
|
}, |
|
{ |
|
"epoch": 23.88, |
|
"learning_rate": 6.134319408354986e-05, |
|
"loss": 0.6843, |
|
"step": 798000 |
|
}, |
|
{ |
|
"epoch": 23.89, |
|
"learning_rate": 6.119328402958225e-05, |
|
"loss": 0.6909, |
|
"step": 798500 |
|
}, |
|
{ |
|
"epoch": 23.91, |
|
"learning_rate": 6.104337397561463e-05, |
|
"loss": 0.6875, |
|
"step": 799000 |
|
}, |
|
{ |
|
"epoch": 23.92, |
|
"learning_rate": 6.089346392164701e-05, |
|
"loss": 0.6623, |
|
"step": 799500 |
|
}, |
|
{ |
|
"epoch": 23.94, |
|
"learning_rate": 6.074355386767939e-05, |
|
"loss": 0.6976, |
|
"step": 800000 |
|
}, |
|
{ |
|
"epoch": 23.95, |
|
"learning_rate": 6.059364381371177e-05, |
|
"loss": 0.6769, |
|
"step": 800500 |
|
}, |
|
{ |
|
"epoch": 23.97, |
|
"learning_rate": 6.044373375974415e-05, |
|
"loss": 0.6848, |
|
"step": 801000 |
|
}, |
|
{ |
|
"epoch": 23.98, |
|
"learning_rate": 6.0293823705776534e-05, |
|
"loss": 0.677, |
|
"step": 801500 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"learning_rate": 6.014391365180891e-05, |
|
"loss": 0.6994, |
|
"step": 802000 |
|
}, |
|
{ |
|
"epoch": 24.01, |
|
"learning_rate": 5.999400359784129e-05, |
|
"loss": 0.6587, |
|
"step": 802500 |
|
}, |
|
{ |
|
"epoch": 24.03, |
|
"learning_rate": 5.9844093543873675e-05, |
|
"loss": 0.6472, |
|
"step": 803000 |
|
}, |
|
{ |
|
"epoch": 24.04, |
|
"learning_rate": 5.969418348990605e-05, |
|
"loss": 0.6612, |
|
"step": 803500 |
|
}, |
|
{ |
|
"epoch": 24.06, |
|
"learning_rate": 5.954427343593843e-05, |
|
"loss": 0.666, |
|
"step": 804000 |
|
}, |
|
{ |
|
"epoch": 24.07, |
|
"learning_rate": 5.9394363381970816e-05, |
|
"loss": 0.6726, |
|
"step": 804500 |
|
}, |
|
{ |
|
"epoch": 24.09, |
|
"learning_rate": 5.924445332800319e-05, |
|
"loss": 0.6788, |
|
"step": 805000 |
|
}, |
|
{ |
|
"epoch": 24.1, |
|
"learning_rate": 5.9094543274035574e-05, |
|
"loss": 0.6556, |
|
"step": 805500 |
|
}, |
|
{ |
|
"epoch": 24.12, |
|
"learning_rate": 5.894463322006796e-05, |
|
"loss": 0.6668, |
|
"step": 806000 |
|
}, |
|
{ |
|
"epoch": 24.13, |
|
"learning_rate": 5.879472316610033e-05, |
|
"loss": 0.6642, |
|
"step": 806500 |
|
}, |
|
{ |
|
"epoch": 24.15, |
|
"learning_rate": 5.8644813112132715e-05, |
|
"loss": 0.6391, |
|
"step": 807000 |
|
}, |
|
{ |
|
"epoch": 24.16, |
|
"learning_rate": 5.84949030581651e-05, |
|
"loss": 0.6513, |
|
"step": 807500 |
|
}, |
|
{ |
|
"epoch": 24.18, |
|
"learning_rate": 5.8344993004197474e-05, |
|
"loss": 0.6436, |
|
"step": 808000 |
|
}, |
|
{ |
|
"epoch": 24.19, |
|
"learning_rate": 5.8195082950229857e-05, |
|
"loss": 0.666, |
|
"step": 808500 |
|
}, |
|
{ |
|
"epoch": 24.21, |
|
"learning_rate": 5.804517289626224e-05, |
|
"loss": 0.6687, |
|
"step": 809000 |
|
}, |
|
{ |
|
"epoch": 24.22, |
|
"learning_rate": 5.7895262842294615e-05, |
|
"loss": 0.6581, |
|
"step": 809500 |
|
}, |
|
{ |
|
"epoch": 24.24, |
|
"learning_rate": 5.7745352788327e-05, |
|
"loss": 0.6699, |
|
"step": 810000 |
|
}, |
|
{ |
|
"epoch": 24.25, |
|
"learning_rate": 5.759544273435938e-05, |
|
"loss": 0.6666, |
|
"step": 810500 |
|
}, |
|
{ |
|
"epoch": 24.27, |
|
"learning_rate": 5.7445532680391756e-05, |
|
"loss": 0.6646, |
|
"step": 811000 |
|
}, |
|
{ |
|
"epoch": 24.28, |
|
"learning_rate": 5.729562262642414e-05, |
|
"loss": 0.6724, |
|
"step": 811500 |
|
}, |
|
{ |
|
"epoch": 24.3, |
|
"learning_rate": 5.714571257245652e-05, |
|
"loss": 0.6653, |
|
"step": 812000 |
|
}, |
|
{ |
|
"epoch": 24.31, |
|
"learning_rate": 5.6995802518488904e-05, |
|
"loss": 0.6727, |
|
"step": 812500 |
|
}, |
|
{ |
|
"epoch": 24.33, |
|
"learning_rate": 5.684589246452128e-05, |
|
"loss": 0.667, |
|
"step": 813000 |
|
}, |
|
{ |
|
"epoch": 24.34, |
|
"learning_rate": 5.669598241055366e-05, |
|
"loss": 0.6709, |
|
"step": 813500 |
|
}, |
|
{ |
|
"epoch": 24.36, |
|
"learning_rate": 5.6546072356586045e-05, |
|
"loss": 0.6627, |
|
"step": 814000 |
|
}, |
|
{ |
|
"epoch": 24.37, |
|
"learning_rate": 5.639616230261842e-05, |
|
"loss": 0.6675, |
|
"step": 814500 |
|
}, |
|
{ |
|
"epoch": 24.39, |
|
"learning_rate": 5.6246252248650803e-05, |
|
"loss": 0.672, |
|
"step": 815000 |
|
}, |
|
{ |
|
"epoch": 24.4, |
|
"learning_rate": 5.6096342194683186e-05, |
|
"loss": 0.6638, |
|
"step": 815500 |
|
}, |
|
{ |
|
"epoch": 24.42, |
|
"learning_rate": 5.594643214071556e-05, |
|
"loss": 0.6593, |
|
"step": 816000 |
|
}, |
|
{ |
|
"epoch": 24.43, |
|
"learning_rate": 5.5796522086747945e-05, |
|
"loss": 0.6783, |
|
"step": 816500 |
|
}, |
|
{ |
|
"epoch": 24.45, |
|
"learning_rate": 5.564661203278033e-05, |
|
"loss": 0.6722, |
|
"step": 817000 |
|
}, |
|
{ |
|
"epoch": 24.46, |
|
"learning_rate": 5.54967019788127e-05, |
|
"loss": 0.6607, |
|
"step": 817500 |
|
}, |
|
{ |
|
"epoch": 24.48, |
|
"learning_rate": 5.5346791924845086e-05, |
|
"loss": 0.6755, |
|
"step": 818000 |
|
}, |
|
{ |
|
"epoch": 24.49, |
|
"learning_rate": 5.519688187087747e-05, |
|
"loss": 0.6711, |
|
"step": 818500 |
|
}, |
|
{ |
|
"epoch": 24.51, |
|
"learning_rate": 5.5046971816909844e-05, |
|
"loss": 0.6612, |
|
"step": 819000 |
|
}, |
|
{ |
|
"epoch": 24.52, |
|
"learning_rate": 5.489706176294223e-05, |
|
"loss": 0.6583, |
|
"step": 819500 |
|
}, |
|
{ |
|
"epoch": 24.54, |
|
"learning_rate": 5.474715170897461e-05, |
|
"loss": 0.65, |
|
"step": 820000 |
|
}, |
|
{ |
|
"epoch": 24.55, |
|
"learning_rate": 5.4597241655006985e-05, |
|
"loss": 0.6659, |
|
"step": 820500 |
|
}, |
|
{ |
|
"epoch": 24.57, |
|
"learning_rate": 5.444733160103937e-05, |
|
"loss": 0.6639, |
|
"step": 821000 |
|
}, |
|
{ |
|
"epoch": 24.58, |
|
"learning_rate": 5.429742154707175e-05, |
|
"loss": 0.6719, |
|
"step": 821500 |
|
}, |
|
{ |
|
"epoch": 24.6, |
|
"learning_rate": 5.4147511493104126e-05, |
|
"loss": 0.6724, |
|
"step": 822000 |
|
}, |
|
{ |
|
"epoch": 24.61, |
|
"learning_rate": 5.3997601439136516e-05, |
|
"loss": 0.6769, |
|
"step": 822500 |
|
}, |
|
{ |
|
"epoch": 24.63, |
|
"learning_rate": 5.38476913851689e-05, |
|
"loss": 0.6643, |
|
"step": 823000 |
|
}, |
|
{ |
|
"epoch": 24.64, |
|
"learning_rate": 5.369778133120128e-05, |
|
"loss": 0.6632, |
|
"step": 823500 |
|
}, |
|
{ |
|
"epoch": 24.66, |
|
"learning_rate": 5.354787127723366e-05, |
|
"loss": 0.6651, |
|
"step": 824000 |
|
}, |
|
{ |
|
"epoch": 24.67, |
|
"learning_rate": 5.339796122326604e-05, |
|
"loss": 0.6684, |
|
"step": 824500 |
|
}, |
|
{ |
|
"epoch": 24.69, |
|
"learning_rate": 5.324805116929842e-05, |
|
"loss": 0.6965, |
|
"step": 825000 |
|
}, |
|
{ |
|
"epoch": 24.7, |
|
"learning_rate": 5.30981411153308e-05, |
|
"loss": 0.6577, |
|
"step": 825500 |
|
}, |
|
{ |
|
"epoch": 24.72, |
|
"learning_rate": 5.294823106136318e-05, |
|
"loss": 0.6623, |
|
"step": 826000 |
|
}, |
|
{ |
|
"epoch": 24.73, |
|
"learning_rate": 5.279832100739556e-05, |
|
"loss": 0.6553, |
|
"step": 826500 |
|
}, |
|
{ |
|
"epoch": 24.75, |
|
"learning_rate": 5.264841095342794e-05, |
|
"loss": 0.6655, |
|
"step": 827000 |
|
}, |
|
{ |
|
"epoch": 24.76, |
|
"learning_rate": 5.249850089946032e-05, |
|
"loss": 0.6778, |
|
"step": 827500 |
|
}, |
|
{ |
|
"epoch": 24.78, |
|
"learning_rate": 5.2348590845492704e-05, |
|
"loss": 0.6642, |
|
"step": 828000 |
|
}, |
|
{ |
|
"epoch": 24.79, |
|
"learning_rate": 5.219868079152508e-05, |
|
"loss": 0.651, |
|
"step": 828500 |
|
}, |
|
{ |
|
"epoch": 24.81, |
|
"learning_rate": 5.204877073755746e-05, |
|
"loss": 0.6645, |
|
"step": 829000 |
|
}, |
|
{ |
|
"epoch": 24.82, |
|
"learning_rate": 5.1898860683589845e-05, |
|
"loss": 0.6534, |
|
"step": 829500 |
|
}, |
|
{ |
|
"epoch": 24.84, |
|
"learning_rate": 5.174895062962222e-05, |
|
"loss": 0.6581, |
|
"step": 830000 |
|
}, |
|
{ |
|
"epoch": 24.85, |
|
"learning_rate": 5.1599040575654604e-05, |
|
"loss": 0.6703, |
|
"step": 830500 |
|
}, |
|
{ |
|
"epoch": 24.86, |
|
"learning_rate": 5.1449130521686986e-05, |
|
"loss": 0.6563, |
|
"step": 831000 |
|
}, |
|
{ |
|
"epoch": 24.88, |
|
"learning_rate": 5.129922046771936e-05, |
|
"loss": 0.6623, |
|
"step": 831500 |
|
}, |
|
{ |
|
"epoch": 24.89, |
|
"learning_rate": 5.1149310413751745e-05, |
|
"loss": 0.6609, |
|
"step": 832000 |
|
}, |
|
{ |
|
"epoch": 24.91, |
|
"learning_rate": 5.099940035978413e-05, |
|
"loss": 0.6681, |
|
"step": 832500 |
|
}, |
|
{ |
|
"epoch": 24.92, |
|
"learning_rate": 5.08494903058165e-05, |
|
"loss": 0.6611, |
|
"step": 833000 |
|
}, |
|
{ |
|
"epoch": 24.94, |
|
"learning_rate": 5.0699580251848886e-05, |
|
"loss": 0.6411, |
|
"step": 833500 |
|
}, |
|
{ |
|
"epoch": 24.95, |
|
"learning_rate": 5.054967019788127e-05, |
|
"loss": 0.6546, |
|
"step": 834000 |
|
}, |
|
{ |
|
"epoch": 24.97, |
|
"learning_rate": 5.039976014391365e-05, |
|
"loss": 0.6809, |
|
"step": 834500 |
|
}, |
|
{ |
|
"epoch": 24.98, |
|
"learning_rate": 5.024985008994603e-05, |
|
"loss": 0.6626, |
|
"step": 835000 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"learning_rate": 5.009994003597841e-05, |
|
"loss": 0.6552, |
|
"step": 835500 |
|
}, |
|
{ |
|
"epoch": 25.01, |
|
"learning_rate": 4.995002998201079e-05, |
|
"loss": 0.6281, |
|
"step": 836000 |
|
}, |
|
{ |
|
"epoch": 25.03, |
|
"learning_rate": 4.980011992804317e-05, |
|
"loss": 0.6428, |
|
"step": 836500 |
|
}, |
|
{ |
|
"epoch": 25.04, |
|
"learning_rate": 4.965020987407555e-05, |
|
"loss": 0.6264, |
|
"step": 837000 |
|
}, |
|
{ |
|
"epoch": 25.06, |
|
"learning_rate": 4.950029982010793e-05, |
|
"loss": 0.6402, |
|
"step": 837500 |
|
}, |
|
{ |
|
"epoch": 25.07, |
|
"learning_rate": 4.935038976614031e-05, |
|
"loss": 0.6562, |
|
"step": 838000 |
|
}, |
|
{ |
|
"epoch": 25.09, |
|
"learning_rate": 4.920047971217269e-05, |
|
"loss": 0.6611, |
|
"step": 838500 |
|
}, |
|
{ |
|
"epoch": 25.1, |
|
"learning_rate": 4.9050569658205074e-05, |
|
"loss": 0.6615, |
|
"step": 839000 |
|
}, |
|
{ |
|
"epoch": 25.12, |
|
"learning_rate": 4.890065960423745e-05, |
|
"loss": 0.6239, |
|
"step": 839500 |
|
}, |
|
{ |
|
"epoch": 25.13, |
|
"learning_rate": 4.875074955026983e-05, |
|
"loss": 0.6465, |
|
"step": 840000 |
|
}, |
|
{ |
|
"epoch": 25.15, |
|
"learning_rate": 4.8600839496302215e-05, |
|
"loss": 0.6687, |
|
"step": 840500 |
|
}, |
|
{ |
|
"epoch": 25.16, |
|
"learning_rate": 4.845092944233459e-05, |
|
"loss": 0.6259, |
|
"step": 841000 |
|
}, |
|
{ |
|
"epoch": 25.18, |
|
"learning_rate": 4.8301019388366974e-05, |
|
"loss": 0.6472, |
|
"step": 841500 |
|
}, |
|
{ |
|
"epoch": 25.19, |
|
"learning_rate": 4.8151109334399356e-05, |
|
"loss": 0.6368, |
|
"step": 842000 |
|
}, |
|
{ |
|
"epoch": 25.21, |
|
"learning_rate": 4.800119928043173e-05, |
|
"loss": 0.6379, |
|
"step": 842500 |
|
}, |
|
{ |
|
"epoch": 25.22, |
|
"learning_rate": 4.7851289226464115e-05, |
|
"loss": 0.6472, |
|
"step": 843000 |
|
}, |
|
{ |
|
"epoch": 25.24, |
|
"learning_rate": 4.77013791724965e-05, |
|
"loss": 0.6604, |
|
"step": 843500 |
|
}, |
|
{ |
|
"epoch": 25.25, |
|
"learning_rate": 4.755146911852887e-05, |
|
"loss": 0.6564, |
|
"step": 844000 |
|
}, |
|
{ |
|
"epoch": 25.27, |
|
"learning_rate": 4.7401559064561256e-05, |
|
"loss": 0.6387, |
|
"step": 844500 |
|
}, |
|
{ |
|
"epoch": 25.28, |
|
"learning_rate": 4.725164901059364e-05, |
|
"loss": 0.6429, |
|
"step": 845000 |
|
}, |
|
{ |
|
"epoch": 25.3, |
|
"learning_rate": 4.7101738956626014e-05, |
|
"loss": 0.643, |
|
"step": 845500 |
|
}, |
|
{ |
|
"epoch": 25.31, |
|
"learning_rate": 4.69518289026584e-05, |
|
"loss": 0.6486, |
|
"step": 846000 |
|
}, |
|
{ |
|
"epoch": 25.33, |
|
"learning_rate": 4.680191884869078e-05, |
|
"loss": 0.6445, |
|
"step": 846500 |
|
}, |
|
{ |
|
"epoch": 25.34, |
|
"learning_rate": 4.6652008794723155e-05, |
|
"loss": 0.6459, |
|
"step": 847000 |
|
}, |
|
{ |
|
"epoch": 25.36, |
|
"learning_rate": 4.650209874075554e-05, |
|
"loss": 0.6522, |
|
"step": 847500 |
|
}, |
|
{ |
|
"epoch": 25.37, |
|
"learning_rate": 4.635218868678792e-05, |
|
"loss": 0.6538, |
|
"step": 848000 |
|
}, |
|
{ |
|
"epoch": 25.39, |
|
"learning_rate": 4.6202278632820297e-05, |
|
"loss": 0.6454, |
|
"step": 848500 |
|
}, |
|
{ |
|
"epoch": 25.4, |
|
"learning_rate": 4.605236857885268e-05, |
|
"loss": 0.639, |
|
"step": 849000 |
|
}, |
|
{ |
|
"epoch": 25.42, |
|
"learning_rate": 4.590245852488507e-05, |
|
"loss": 0.6469, |
|
"step": 849500 |
|
}, |
|
{ |
|
"epoch": 25.43, |
|
"learning_rate": 4.575254847091745e-05, |
|
"loss": 0.6471, |
|
"step": 850000 |
|
}, |
|
{ |
|
"epoch": 25.45, |
|
"learning_rate": 4.560263841694983e-05, |
|
"loss": 0.6511, |
|
"step": 850500 |
|
}, |
|
{ |
|
"epoch": 25.46, |
|
"learning_rate": 4.545272836298221e-05, |
|
"loss": 0.6218, |
|
"step": 851000 |
|
}, |
|
{ |
|
"epoch": 25.48, |
|
"learning_rate": 4.530281830901459e-05, |
|
"loss": 0.6448, |
|
"step": 851500 |
|
}, |
|
{ |
|
"epoch": 25.49, |
|
"learning_rate": 4.515290825504697e-05, |
|
"loss": 0.6532, |
|
"step": 852000 |
|
}, |
|
{ |
|
"epoch": 25.51, |
|
"learning_rate": 4.500299820107935e-05, |
|
"loss": 0.6322, |
|
"step": 852500 |
|
}, |
|
{ |
|
"epoch": 25.52, |
|
"learning_rate": 4.485308814711173e-05, |
|
"loss": 0.6316, |
|
"step": 853000 |
|
}, |
|
{ |
|
"epoch": 25.54, |
|
"learning_rate": 4.470317809314411e-05, |
|
"loss": 0.6508, |
|
"step": 853500 |
|
}, |
|
{ |
|
"epoch": 25.55, |
|
"learning_rate": 4.455326803917649e-05, |
|
"loss": 0.6638, |
|
"step": 854000 |
|
}, |
|
{ |
|
"epoch": 25.57, |
|
"learning_rate": 4.4403357985208874e-05, |
|
"loss": 0.6425, |
|
"step": 854500 |
|
}, |
|
{ |
|
"epoch": 25.58, |
|
"learning_rate": 4.425344793124126e-05, |
|
"loss": 0.6351, |
|
"step": 855000 |
|
}, |
|
{ |
|
"epoch": 25.6, |
|
"learning_rate": 4.410353787727363e-05, |
|
"loss": 0.6314, |
|
"step": 855500 |
|
}, |
|
{ |
|
"epoch": 25.61, |
|
"learning_rate": 4.3953627823306015e-05, |
|
"loss": 0.6572, |
|
"step": 856000 |
|
}, |
|
{ |
|
"epoch": 25.63, |
|
"learning_rate": 4.38037177693384e-05, |
|
"loss": 0.66, |
|
"step": 856500 |
|
}, |
|
{ |
|
"epoch": 25.64, |
|
"learning_rate": 4.3653807715370774e-05, |
|
"loss": 0.663, |
|
"step": 857000 |
|
}, |
|
{ |
|
"epoch": 25.66, |
|
"learning_rate": 4.3503897661403157e-05, |
|
"loss": 0.6635, |
|
"step": 857500 |
|
}, |
|
{ |
|
"epoch": 25.67, |
|
"learning_rate": 4.335398760743554e-05, |
|
"loss": 0.6706, |
|
"step": 858000 |
|
}, |
|
{ |
|
"epoch": 25.69, |
|
"learning_rate": 4.3204077553467915e-05, |
|
"loss": 0.6471, |
|
"step": 858500 |
|
}, |
|
{ |
|
"epoch": 25.7, |
|
"learning_rate": 4.30541674995003e-05, |
|
"loss": 0.6481, |
|
"step": 859000 |
|
}, |
|
{ |
|
"epoch": 25.72, |
|
"learning_rate": 4.290425744553268e-05, |
|
"loss": 0.6528, |
|
"step": 859500 |
|
}, |
|
{ |
|
"epoch": 25.73, |
|
"learning_rate": 4.2754347391565056e-05, |
|
"loss": 0.6387, |
|
"step": 860000 |
|
}, |
|
{ |
|
"epoch": 25.75, |
|
"learning_rate": 4.260443733759744e-05, |
|
"loss": 0.6513, |
|
"step": 860500 |
|
}, |
|
{ |
|
"epoch": 25.76, |
|
"learning_rate": 4.245452728362982e-05, |
|
"loss": 0.6378, |
|
"step": 861000 |
|
}, |
|
{ |
|
"epoch": 25.78, |
|
"learning_rate": 4.23046172296622e-05, |
|
"loss": 0.6323, |
|
"step": 861500 |
|
}, |
|
{ |
|
"epoch": 25.79, |
|
"learning_rate": 4.215470717569458e-05, |
|
"loss": 0.6511, |
|
"step": 862000 |
|
}, |
|
{ |
|
"epoch": 25.81, |
|
"learning_rate": 4.200479712172696e-05, |
|
"loss": 0.6434, |
|
"step": 862500 |
|
}, |
|
{ |
|
"epoch": 25.82, |
|
"learning_rate": 4.185488706775934e-05, |
|
"loss": 0.651, |
|
"step": 863000 |
|
}, |
|
{ |
|
"epoch": 25.84, |
|
"learning_rate": 4.170497701379172e-05, |
|
"loss": 0.6527, |
|
"step": 863500 |
|
}, |
|
{ |
|
"epoch": 25.85, |
|
"learning_rate": 4.1555066959824103e-05, |
|
"loss": 0.647, |
|
"step": 864000 |
|
}, |
|
{ |
|
"epoch": 25.87, |
|
"learning_rate": 4.140515690585648e-05, |
|
"loss": 0.6418, |
|
"step": 864500 |
|
}, |
|
{ |
|
"epoch": 25.88, |
|
"learning_rate": 4.125524685188886e-05, |
|
"loss": 0.6563, |
|
"step": 865000 |
|
}, |
|
{ |
|
"epoch": 25.9, |
|
"learning_rate": 4.1105336797921245e-05, |
|
"loss": 0.6521, |
|
"step": 865500 |
|
}, |
|
{ |
|
"epoch": 25.91, |
|
"learning_rate": 4.095542674395362e-05, |
|
"loss": 0.6432, |
|
"step": 866000 |
|
}, |
|
{ |
|
"epoch": 25.93, |
|
"learning_rate": 4.0805516689986e-05, |
|
"loss": 0.6474, |
|
"step": 866500 |
|
}, |
|
{ |
|
"epoch": 25.94, |
|
"learning_rate": 4.0655606636018386e-05, |
|
"loss": 0.6422, |
|
"step": 867000 |
|
}, |
|
{ |
|
"epoch": 25.96, |
|
"learning_rate": 4.050569658205076e-05, |
|
"loss": 0.6438, |
|
"step": 867500 |
|
}, |
|
{ |
|
"epoch": 25.97, |
|
"learning_rate": 4.0355786528083144e-05, |
|
"loss": 0.6535, |
|
"step": 868000 |
|
}, |
|
{ |
|
"epoch": 25.99, |
|
"learning_rate": 4.020587647411553e-05, |
|
"loss": 0.6365, |
|
"step": 868500 |
|
}, |
|
{ |
|
"epoch": 26.0, |
|
"learning_rate": 4.00559664201479e-05, |
|
"loss": 0.6319, |
|
"step": 869000 |
|
}, |
|
{ |
|
"epoch": 26.02, |
|
"learning_rate": 3.9906056366180285e-05, |
|
"loss": 0.63, |
|
"step": 869500 |
|
}, |
|
{ |
|
"epoch": 26.03, |
|
"learning_rate": 3.975614631221267e-05, |
|
"loss": 0.6262, |
|
"step": 870000 |
|
}, |
|
{ |
|
"epoch": 26.05, |
|
"learning_rate": 3.960623625824505e-05, |
|
"loss": 0.6163, |
|
"step": 870500 |
|
}, |
|
{ |
|
"epoch": 26.06, |
|
"learning_rate": 3.9456326204277426e-05, |
|
"loss": 0.6219, |
|
"step": 871000 |
|
}, |
|
{ |
|
"epoch": 26.08, |
|
"learning_rate": 3.930641615030981e-05, |
|
"loss": 0.6415, |
|
"step": 871500 |
|
}, |
|
{ |
|
"epoch": 26.09, |
|
"learning_rate": 3.915650609634219e-05, |
|
"loss": 0.6316, |
|
"step": 872000 |
|
}, |
|
{ |
|
"epoch": 26.11, |
|
"learning_rate": 3.900659604237457e-05, |
|
"loss": 0.6261, |
|
"step": 872500 |
|
}, |
|
{ |
|
"epoch": 26.12, |
|
"learning_rate": 3.885668598840695e-05, |
|
"loss": 0.6339, |
|
"step": 873000 |
|
}, |
|
{ |
|
"epoch": 26.14, |
|
"learning_rate": 3.870677593443933e-05, |
|
"loss": 0.6443, |
|
"step": 873500 |
|
}, |
|
{ |
|
"epoch": 26.15, |
|
"learning_rate": 3.855686588047171e-05, |
|
"loss": 0.63, |
|
"step": 874000 |
|
}, |
|
{ |
|
"epoch": 26.17, |
|
"learning_rate": 3.840695582650409e-05, |
|
"loss": 0.6502, |
|
"step": 874500 |
|
}, |
|
{ |
|
"epoch": 26.18, |
|
"learning_rate": 3.8257045772536474e-05, |
|
"loss": 0.6242, |
|
"step": 875000 |
|
}, |
|
{ |
|
"epoch": 26.2, |
|
"learning_rate": 3.810713571856885e-05, |
|
"loss": 0.6333, |
|
"step": 875500 |
|
}, |
|
{ |
|
"epoch": 26.21, |
|
"learning_rate": 3.795722566460123e-05, |
|
"loss": 0.6432, |
|
"step": 876000 |
|
}, |
|
{ |
|
"epoch": 26.23, |
|
"learning_rate": 3.780731561063362e-05, |
|
"loss": 0.6342, |
|
"step": 876500 |
|
}, |
|
{ |
|
"epoch": 26.24, |
|
"learning_rate": 3.7657405556666004e-05, |
|
"loss": 0.6199, |
|
"step": 877000 |
|
}, |
|
{ |
|
"epoch": 26.26, |
|
"learning_rate": 3.750749550269838e-05, |
|
"loss": 0.6216, |
|
"step": 877500 |
|
}, |
|
{ |
|
"epoch": 26.27, |
|
"learning_rate": 3.7357585448730756e-05, |
|
"loss": 0.6354, |
|
"step": 878000 |
|
}, |
|
{ |
|
"epoch": 26.29, |
|
"learning_rate": 3.720767539476314e-05, |
|
"loss": 0.6477, |
|
"step": 878500 |
|
}, |
|
{ |
|
"epoch": 26.3, |
|
"learning_rate": 3.705776534079552e-05, |
|
"loss": 0.629, |
|
"step": 879000 |
|
}, |
|
{ |
|
"epoch": 26.32, |
|
"learning_rate": 3.69078552868279e-05, |
|
"loss": 0.6368, |
|
"step": 879500 |
|
}, |
|
{ |
|
"epoch": 26.33, |
|
"learning_rate": 3.675794523286028e-05, |
|
"loss": 0.6343, |
|
"step": 880000 |
|
}, |
|
{ |
|
"epoch": 26.35, |
|
"learning_rate": 3.660803517889266e-05, |
|
"loss": 0.6169, |
|
"step": 880500 |
|
}, |
|
{ |
|
"epoch": 26.36, |
|
"learning_rate": 3.645812512492504e-05, |
|
"loss": 0.6238, |
|
"step": 881000 |
|
}, |
|
{ |
|
"epoch": 26.38, |
|
"learning_rate": 3.630821507095742e-05, |
|
"loss": 0.6187, |
|
"step": 881500 |
|
}, |
|
{ |
|
"epoch": 26.39, |
|
"learning_rate": 3.61583050169898e-05, |
|
"loss": 0.6278, |
|
"step": 882000 |
|
}, |
|
{ |
|
"epoch": 26.41, |
|
"learning_rate": 3.600839496302218e-05, |
|
"loss": 0.6354, |
|
"step": 882500 |
|
}, |
|
{ |
|
"epoch": 26.42, |
|
"learning_rate": 3.585848490905457e-05, |
|
"loss": 0.6208, |
|
"step": 883000 |
|
}, |
|
{ |
|
"epoch": 26.44, |
|
"learning_rate": 3.5708574855086944e-05, |
|
"loss": 0.619, |
|
"step": 883500 |
|
}, |
|
{ |
|
"epoch": 26.45, |
|
"learning_rate": 3.555866480111933e-05, |
|
"loss": 0.6245, |
|
"step": 884000 |
|
}, |
|
{ |
|
"epoch": 26.47, |
|
"learning_rate": 3.540875474715171e-05, |
|
"loss": 0.6249, |
|
"step": 884500 |
|
}, |
|
{ |
|
"epoch": 26.48, |
|
"learning_rate": 3.5258844693184085e-05, |
|
"loss": 0.6148, |
|
"step": 885000 |
|
}, |
|
{ |
|
"epoch": 26.5, |
|
"learning_rate": 3.510893463921647e-05, |
|
"loss": 0.6341, |
|
"step": 885500 |
|
}, |
|
{ |
|
"epoch": 26.51, |
|
"learning_rate": 3.495902458524885e-05, |
|
"loss": 0.6235, |
|
"step": 886000 |
|
}, |
|
{ |
|
"epoch": 26.53, |
|
"learning_rate": 3.4809114531281226e-05, |
|
"loss": 0.6234, |
|
"step": 886500 |
|
}, |
|
{ |
|
"epoch": 26.54, |
|
"learning_rate": 3.465920447731361e-05, |
|
"loss": 0.6301, |
|
"step": 887000 |
|
}, |
|
{ |
|
"epoch": 26.56, |
|
"learning_rate": 3.450929442334599e-05, |
|
"loss": 0.6289, |
|
"step": 887500 |
|
}, |
|
{ |
|
"epoch": 26.57, |
|
"learning_rate": 3.435938436937837e-05, |
|
"loss": 0.6293, |
|
"step": 888000 |
|
}, |
|
{ |
|
"epoch": 26.59, |
|
"learning_rate": 3.420947431541075e-05, |
|
"loss": 0.638, |
|
"step": 888500 |
|
}, |
|
{ |
|
"epoch": 26.6, |
|
"learning_rate": 3.405956426144313e-05, |
|
"loss": 0.63, |
|
"step": 889000 |
|
}, |
|
{ |
|
"epoch": 26.62, |
|
"learning_rate": 3.390965420747551e-05, |
|
"loss": 0.629, |
|
"step": 889500 |
|
}, |
|
{ |
|
"epoch": 26.63, |
|
"learning_rate": 3.375974415350789e-05, |
|
"loss": 0.6204, |
|
"step": 890000 |
|
}, |
|
{ |
|
"epoch": 26.65, |
|
"learning_rate": 3.3609834099540274e-05, |
|
"loss": 0.6449, |
|
"step": 890500 |
|
}, |
|
{ |
|
"epoch": 26.66, |
|
"learning_rate": 3.3459924045572656e-05, |
|
"loss": 0.6446, |
|
"step": 891000 |
|
}, |
|
{ |
|
"epoch": 26.68, |
|
"learning_rate": 3.331001399160503e-05, |
|
"loss": 0.6259, |
|
"step": 891500 |
|
}, |
|
{ |
|
"epoch": 26.69, |
|
"learning_rate": 3.3160103937637415e-05, |
|
"loss": 0.6269, |
|
"step": 892000 |
|
}, |
|
{ |
|
"epoch": 26.71, |
|
"learning_rate": 3.30101938836698e-05, |
|
"loss": 0.6279, |
|
"step": 892500 |
|
}, |
|
{ |
|
"epoch": 26.72, |
|
"learning_rate": 3.286028382970217e-05, |
|
"loss": 0.6399, |
|
"step": 893000 |
|
}, |
|
{ |
|
"epoch": 26.74, |
|
"learning_rate": 3.2710373775734556e-05, |
|
"loss": 0.6228, |
|
"step": 893500 |
|
}, |
|
{ |
|
"epoch": 26.75, |
|
"learning_rate": 3.256046372176694e-05, |
|
"loss": 0.6184, |
|
"step": 894000 |
|
}, |
|
{ |
|
"epoch": 26.77, |
|
"learning_rate": 3.2410553667799314e-05, |
|
"loss": 0.6319, |
|
"step": 894500 |
|
}, |
|
{ |
|
"epoch": 26.78, |
|
"learning_rate": 3.22606436138317e-05, |
|
"loss": 0.6289, |
|
"step": 895000 |
|
}, |
|
{ |
|
"epoch": 26.79, |
|
"learning_rate": 3.211073355986408e-05, |
|
"loss": 0.6381, |
|
"step": 895500 |
|
}, |
|
{ |
|
"epoch": 26.81, |
|
"learning_rate": 3.1960823505896455e-05, |
|
"loss": 0.6255, |
|
"step": 896000 |
|
}, |
|
{ |
|
"epoch": 26.82, |
|
"learning_rate": 3.1810913451928845e-05, |
|
"loss": 0.6276, |
|
"step": 896500 |
|
}, |
|
{ |
|
"epoch": 26.84, |
|
"learning_rate": 3.166100339796122e-05, |
|
"loss": 0.6345, |
|
"step": 897000 |
|
}, |
|
{ |
|
"epoch": 26.85, |
|
"learning_rate": 3.15110933439936e-05, |
|
"loss": 0.6359, |
|
"step": 897500 |
|
}, |
|
{ |
|
"epoch": 26.87, |
|
"learning_rate": 3.1361183290025986e-05, |
|
"loss": 0.6383, |
|
"step": 898000 |
|
}, |
|
{ |
|
"epoch": 26.88, |
|
"learning_rate": 3.121127323605836e-05, |
|
"loss": 0.6459, |
|
"step": 898500 |
|
}, |
|
{ |
|
"epoch": 26.9, |
|
"learning_rate": 3.1061363182090744e-05, |
|
"loss": 0.6469, |
|
"step": 899000 |
|
}, |
|
{ |
|
"epoch": 26.91, |
|
"learning_rate": 3.091145312812313e-05, |
|
"loss": 0.6389, |
|
"step": 899500 |
|
}, |
|
{ |
|
"epoch": 26.93, |
|
"learning_rate": 3.07615430741555e-05, |
|
"loss": 0.6246, |
|
"step": 900000 |
|
}, |
|
{ |
|
"epoch": 26.94, |
|
"learning_rate": 3.0611633020187886e-05, |
|
"loss": 0.6405, |
|
"step": 900500 |
|
}, |
|
{ |
|
"epoch": 26.96, |
|
"learning_rate": 3.0461722966220265e-05, |
|
"loss": 0.6212, |
|
"step": 901000 |
|
}, |
|
{ |
|
"epoch": 26.97, |
|
"learning_rate": 3.0311812912252647e-05, |
|
"loss": 0.6167, |
|
"step": 901500 |
|
}, |
|
{ |
|
"epoch": 26.99, |
|
"learning_rate": 3.0161902858285027e-05, |
|
"loss": 0.6309, |
|
"step": 902000 |
|
}, |
|
{ |
|
"epoch": 27.0, |
|
"learning_rate": 3.0011992804317406e-05, |
|
"loss": 0.6253, |
|
"step": 902500 |
|
}, |
|
{ |
|
"epoch": 27.02, |
|
"learning_rate": 2.986208275034979e-05, |
|
"loss": 0.611, |
|
"step": 903000 |
|
}, |
|
{ |
|
"epoch": 27.03, |
|
"learning_rate": 2.9712172696382168e-05, |
|
"loss": 0.6216, |
|
"step": 903500 |
|
}, |
|
{ |
|
"epoch": 27.05, |
|
"learning_rate": 2.9562262642414547e-05, |
|
"loss": 0.6245, |
|
"step": 904000 |
|
}, |
|
{ |
|
"epoch": 27.06, |
|
"learning_rate": 2.941235258844693e-05, |
|
"loss": 0.623, |
|
"step": 904500 |
|
}, |
|
{ |
|
"epoch": 27.08, |
|
"learning_rate": 2.926244253447931e-05, |
|
"loss": 0.6232, |
|
"step": 905000 |
|
}, |
|
{ |
|
"epoch": 27.09, |
|
"learning_rate": 2.911253248051169e-05, |
|
"loss": 0.6223, |
|
"step": 905500 |
|
}, |
|
{ |
|
"epoch": 27.11, |
|
"learning_rate": 2.896262242654407e-05, |
|
"loss": 0.6055, |
|
"step": 906000 |
|
}, |
|
{ |
|
"epoch": 27.12, |
|
"learning_rate": 2.881271237257645e-05, |
|
"loss": 0.6126, |
|
"step": 906500 |
|
}, |
|
{ |
|
"epoch": 27.14, |
|
"learning_rate": 2.8662802318608832e-05, |
|
"loss": 0.6177, |
|
"step": 907000 |
|
}, |
|
{ |
|
"epoch": 27.15, |
|
"learning_rate": 2.851289226464121e-05, |
|
"loss": 0.6049, |
|
"step": 907500 |
|
}, |
|
{ |
|
"epoch": 27.17, |
|
"learning_rate": 2.836298221067359e-05, |
|
"loss": 0.6093, |
|
"step": 908000 |
|
}, |
|
{ |
|
"epoch": 27.18, |
|
"learning_rate": 2.8213072156705974e-05, |
|
"loss": 0.6124, |
|
"step": 908500 |
|
}, |
|
{ |
|
"epoch": 27.2, |
|
"learning_rate": 2.8063162102738353e-05, |
|
"loss": 0.6187, |
|
"step": 909000 |
|
}, |
|
{ |
|
"epoch": 27.21, |
|
"learning_rate": 2.7913252048770732e-05, |
|
"loss": 0.6217, |
|
"step": 909500 |
|
}, |
|
{ |
|
"epoch": 27.23, |
|
"learning_rate": 2.7763341994803118e-05, |
|
"loss": 0.6142, |
|
"step": 910000 |
|
}, |
|
{ |
|
"epoch": 27.24, |
|
"learning_rate": 2.7613431940835497e-05, |
|
"loss": 0.6215, |
|
"step": 910500 |
|
}, |
|
{ |
|
"epoch": 27.26, |
|
"learning_rate": 2.746352188686788e-05, |
|
"loss": 0.6221, |
|
"step": 911000 |
|
}, |
|
{ |
|
"epoch": 27.27, |
|
"learning_rate": 2.731361183290026e-05, |
|
"loss": 0.6259, |
|
"step": 911500 |
|
}, |
|
{ |
|
"epoch": 27.29, |
|
"learning_rate": 2.7163701778932638e-05, |
|
"loss": 0.6171, |
|
"step": 912000 |
|
}, |
|
{ |
|
"epoch": 27.3, |
|
"learning_rate": 2.701379172496502e-05, |
|
"loss": 0.6168, |
|
"step": 912500 |
|
}, |
|
{ |
|
"epoch": 27.32, |
|
"learning_rate": 2.68638816709974e-05, |
|
"loss": 0.6197, |
|
"step": 913000 |
|
}, |
|
{ |
|
"epoch": 27.33, |
|
"learning_rate": 2.671397161702978e-05, |
|
"loss": 0.6158, |
|
"step": 913500 |
|
}, |
|
{ |
|
"epoch": 27.35, |
|
"learning_rate": 2.6564061563062162e-05, |
|
"loss": 0.6203, |
|
"step": 914000 |
|
}, |
|
{ |
|
"epoch": 27.36, |
|
"learning_rate": 2.641415150909454e-05, |
|
"loss": 0.5991, |
|
"step": 914500 |
|
}, |
|
{ |
|
"epoch": 27.38, |
|
"learning_rate": 2.626424145512692e-05, |
|
"loss": 0.6053, |
|
"step": 915000 |
|
}, |
|
{ |
|
"epoch": 27.39, |
|
"learning_rate": 2.6114331401159303e-05, |
|
"loss": 0.63, |
|
"step": 915500 |
|
}, |
|
{ |
|
"epoch": 27.41, |
|
"learning_rate": 2.5964421347191682e-05, |
|
"loss": 0.5957, |
|
"step": 916000 |
|
}, |
|
{ |
|
"epoch": 27.42, |
|
"learning_rate": 2.5814511293224065e-05, |
|
"loss": 0.627, |
|
"step": 916500 |
|
}, |
|
{ |
|
"epoch": 27.44, |
|
"learning_rate": 2.5664601239256444e-05, |
|
"loss": 0.6242, |
|
"step": 917000 |
|
}, |
|
{ |
|
"epoch": 27.45, |
|
"learning_rate": 2.5514691185288823e-05, |
|
"loss": 0.6362, |
|
"step": 917500 |
|
}, |
|
{ |
|
"epoch": 27.47, |
|
"learning_rate": 2.5364781131321206e-05, |
|
"loss": 0.6132, |
|
"step": 918000 |
|
}, |
|
{ |
|
"epoch": 27.48, |
|
"learning_rate": 2.5214871077353585e-05, |
|
"loss": 0.6239, |
|
"step": 918500 |
|
}, |
|
{ |
|
"epoch": 27.5, |
|
"learning_rate": 2.5064961023385964e-05, |
|
"loss": 0.6159, |
|
"step": 919000 |
|
}, |
|
{ |
|
"epoch": 27.51, |
|
"learning_rate": 2.4915050969418347e-05, |
|
"loss": 0.6065, |
|
"step": 919500 |
|
}, |
|
{ |
|
"epoch": 27.53, |
|
"learning_rate": 2.4765140915450726e-05, |
|
"loss": 0.6253, |
|
"step": 920000 |
|
}, |
|
{ |
|
"epoch": 27.54, |
|
"learning_rate": 2.4615230861483106e-05, |
|
"loss": 0.6141, |
|
"step": 920500 |
|
}, |
|
{ |
|
"epoch": 27.56, |
|
"learning_rate": 2.4465320807515488e-05, |
|
"loss": 0.6172, |
|
"step": 921000 |
|
}, |
|
{ |
|
"epoch": 27.57, |
|
"learning_rate": 2.4315410753547867e-05, |
|
"loss": 0.6115, |
|
"step": 921500 |
|
}, |
|
{ |
|
"epoch": 27.59, |
|
"learning_rate": 2.4165500699580247e-05, |
|
"loss": 0.6192, |
|
"step": 922000 |
|
}, |
|
{ |
|
"epoch": 27.6, |
|
"learning_rate": 2.401559064561263e-05, |
|
"loss": 0.6169, |
|
"step": 922500 |
|
}, |
|
{ |
|
"epoch": 27.62, |
|
"learning_rate": 2.386568059164501e-05, |
|
"loss": 0.6115, |
|
"step": 923000 |
|
}, |
|
{ |
|
"epoch": 27.63, |
|
"learning_rate": 2.3715770537677394e-05, |
|
"loss": 0.6232, |
|
"step": 923500 |
|
}, |
|
{ |
|
"epoch": 27.65, |
|
"learning_rate": 2.3565860483709774e-05, |
|
"loss": 0.6213, |
|
"step": 924000 |
|
}, |
|
{ |
|
"epoch": 27.66, |
|
"learning_rate": 2.3415950429742153e-05, |
|
"loss": 0.6113, |
|
"step": 924500 |
|
}, |
|
{ |
|
"epoch": 27.68, |
|
"learning_rate": 2.3266040375774536e-05, |
|
"loss": 0.6111, |
|
"step": 925000 |
|
}, |
|
{ |
|
"epoch": 27.69, |
|
"learning_rate": 2.3116130321806915e-05, |
|
"loss": 0.6208, |
|
"step": 925500 |
|
}, |
|
{ |
|
"epoch": 27.71, |
|
"learning_rate": 2.2966220267839294e-05, |
|
"loss": 0.6097, |
|
"step": 926000 |
|
}, |
|
{ |
|
"epoch": 27.72, |
|
"learning_rate": 2.2816310213871677e-05, |
|
"loss": 0.6122, |
|
"step": 926500 |
|
}, |
|
{ |
|
"epoch": 27.74, |
|
"learning_rate": 2.2666400159904056e-05, |
|
"loss": 0.5979, |
|
"step": 927000 |
|
}, |
|
{ |
|
"epoch": 27.75, |
|
"learning_rate": 2.251649010593644e-05, |
|
"loss": 0.6052, |
|
"step": 927500 |
|
}, |
|
{ |
|
"epoch": 27.77, |
|
"learning_rate": 2.2366580051968818e-05, |
|
"loss": 0.5997, |
|
"step": 928000 |
|
}, |
|
{ |
|
"epoch": 27.78, |
|
"learning_rate": 2.2216669998001197e-05, |
|
"loss": 0.6175, |
|
"step": 928500 |
|
}, |
|
{ |
|
"epoch": 27.8, |
|
"learning_rate": 2.206675994403358e-05, |
|
"loss": 0.62, |
|
"step": 929000 |
|
}, |
|
{ |
|
"epoch": 27.81, |
|
"learning_rate": 2.191684989006596e-05, |
|
"loss": 0.6212, |
|
"step": 929500 |
|
}, |
|
{ |
|
"epoch": 27.83, |
|
"learning_rate": 2.1766939836098338e-05, |
|
"loss": 0.6213, |
|
"step": 930000 |
|
}, |
|
{ |
|
"epoch": 27.84, |
|
"learning_rate": 2.161702978213072e-05, |
|
"loss": 0.6364, |
|
"step": 930500 |
|
}, |
|
{ |
|
"epoch": 27.86, |
|
"learning_rate": 2.14671197281631e-05, |
|
"loss": 0.6241, |
|
"step": 931000 |
|
}, |
|
{ |
|
"epoch": 27.87, |
|
"learning_rate": 2.131720967419548e-05, |
|
"loss": 0.6028, |
|
"step": 931500 |
|
}, |
|
{ |
|
"epoch": 27.89, |
|
"learning_rate": 2.116729962022786e-05, |
|
"loss": 0.6253, |
|
"step": 932000 |
|
}, |
|
{ |
|
"epoch": 27.9, |
|
"learning_rate": 2.101738956626024e-05, |
|
"loss": 0.6185, |
|
"step": 932500 |
|
}, |
|
{ |
|
"epoch": 27.92, |
|
"learning_rate": 2.086747951229262e-05, |
|
"loss": 0.6058, |
|
"step": 933000 |
|
}, |
|
{ |
|
"epoch": 27.93, |
|
"learning_rate": 2.0717569458325003e-05, |
|
"loss": 0.6167, |
|
"step": 933500 |
|
}, |
|
{ |
|
"epoch": 27.95, |
|
"learning_rate": 2.0567659404357382e-05, |
|
"loss": 0.624, |
|
"step": 934000 |
|
}, |
|
{ |
|
"epoch": 27.96, |
|
"learning_rate": 2.0417749350389765e-05, |
|
"loss": 0.6288, |
|
"step": 934500 |
|
}, |
|
{ |
|
"epoch": 27.98, |
|
"learning_rate": 2.0267839296422144e-05, |
|
"loss": 0.624, |
|
"step": 935000 |
|
}, |
|
{ |
|
"epoch": 27.99, |
|
"learning_rate": 2.0117929242454523e-05, |
|
"loss": 0.6234, |
|
"step": 935500 |
|
}, |
|
{ |
|
"epoch": 28.01, |
|
"learning_rate": 1.9968019188486906e-05, |
|
"loss": 0.6059, |
|
"step": 936000 |
|
}, |
|
{ |
|
"epoch": 28.02, |
|
"learning_rate": 1.9818109134519285e-05, |
|
"loss": 0.6089, |
|
"step": 936500 |
|
}, |
|
{ |
|
"epoch": 28.04, |
|
"learning_rate": 1.966819908055167e-05, |
|
"loss": 0.6003, |
|
"step": 937000 |
|
}, |
|
{ |
|
"epoch": 28.05, |
|
"learning_rate": 1.951828902658405e-05, |
|
"loss": 0.5908, |
|
"step": 937500 |
|
}, |
|
{ |
|
"epoch": 28.07, |
|
"learning_rate": 1.936837897261643e-05, |
|
"loss": 0.6169, |
|
"step": 938000 |
|
}, |
|
{ |
|
"epoch": 28.08, |
|
"learning_rate": 1.9218468918648812e-05, |
|
"loss": 0.5899, |
|
"step": 938500 |
|
}, |
|
{ |
|
"epoch": 28.1, |
|
"learning_rate": 1.906855886468119e-05, |
|
"loss": 0.606, |
|
"step": 939000 |
|
}, |
|
{ |
|
"epoch": 28.11, |
|
"learning_rate": 1.891864881071357e-05, |
|
"loss": 0.6065, |
|
"step": 939500 |
|
}, |
|
{ |
|
"epoch": 28.13, |
|
"learning_rate": 1.8768738756745953e-05, |
|
"loss": 0.6151, |
|
"step": 940000 |
|
}, |
|
{ |
|
"epoch": 28.14, |
|
"learning_rate": 1.8618828702778332e-05, |
|
"loss": 0.6042, |
|
"step": 940500 |
|
}, |
|
{ |
|
"epoch": 28.16, |
|
"learning_rate": 1.846891864881071e-05, |
|
"loss": 0.5988, |
|
"step": 941000 |
|
}, |
|
{ |
|
"epoch": 28.17, |
|
"learning_rate": 1.8319008594843094e-05, |
|
"loss": 0.6099, |
|
"step": 941500 |
|
}, |
|
{ |
|
"epoch": 28.19, |
|
"learning_rate": 1.8169098540875473e-05, |
|
"loss": 0.6023, |
|
"step": 942000 |
|
}, |
|
{ |
|
"epoch": 28.2, |
|
"learning_rate": 1.8019188486907853e-05, |
|
"loss": 0.6139, |
|
"step": 942500 |
|
}, |
|
{ |
|
"epoch": 28.22, |
|
"learning_rate": 1.7869278432940235e-05, |
|
"loss": 0.6062, |
|
"step": 943000 |
|
}, |
|
{ |
|
"epoch": 28.23, |
|
"learning_rate": 1.7719368378972614e-05, |
|
"loss": 0.6136, |
|
"step": 943500 |
|
}, |
|
{ |
|
"epoch": 28.25, |
|
"learning_rate": 1.7569458325004994e-05, |
|
"loss": 0.6063, |
|
"step": 944000 |
|
}, |
|
{ |
|
"epoch": 28.26, |
|
"learning_rate": 1.7419548271037376e-05, |
|
"loss": 0.6142, |
|
"step": 944500 |
|
}, |
|
{ |
|
"epoch": 28.28, |
|
"learning_rate": 1.7269638217069756e-05, |
|
"loss": 0.6106, |
|
"step": 945000 |
|
}, |
|
{ |
|
"epoch": 28.29, |
|
"learning_rate": 1.7119728163102138e-05, |
|
"loss": 0.5985, |
|
"step": 945500 |
|
}, |
|
{ |
|
"epoch": 28.31, |
|
"learning_rate": 1.6969818109134517e-05, |
|
"loss": 0.6193, |
|
"step": 946000 |
|
}, |
|
{ |
|
"epoch": 28.32, |
|
"learning_rate": 1.6819908055166897e-05, |
|
"loss": 0.6159, |
|
"step": 946500 |
|
}, |
|
{ |
|
"epoch": 28.34, |
|
"learning_rate": 1.666999800119928e-05, |
|
"loss": 0.6019, |
|
"step": 947000 |
|
}, |
|
{ |
|
"epoch": 28.35, |
|
"learning_rate": 1.6520087947231662e-05, |
|
"loss": 0.5911, |
|
"step": 947500 |
|
}, |
|
{ |
|
"epoch": 28.37, |
|
"learning_rate": 1.637017789326404e-05, |
|
"loss": 0.6013, |
|
"step": 948000 |
|
}, |
|
{ |
|
"epoch": 28.38, |
|
"learning_rate": 1.622026783929642e-05, |
|
"loss": 0.6036, |
|
"step": 948500 |
|
}, |
|
{ |
|
"epoch": 28.4, |
|
"learning_rate": 1.6070357785328803e-05, |
|
"loss": 0.6076, |
|
"step": 949000 |
|
}, |
|
{ |
|
"epoch": 28.41, |
|
"learning_rate": 1.5920447731361182e-05, |
|
"loss": 0.6173, |
|
"step": 949500 |
|
}, |
|
{ |
|
"epoch": 28.43, |
|
"learning_rate": 1.577053767739356e-05, |
|
"loss": 0.601, |
|
"step": 950000 |
|
}, |
|
{ |
|
"epoch": 28.44, |
|
"learning_rate": 1.5620627623425944e-05, |
|
"loss": 0.578, |
|
"step": 950500 |
|
}, |
|
{ |
|
"epoch": 28.46, |
|
"learning_rate": 1.5470717569458323e-05, |
|
"loss": 0.6151, |
|
"step": 951000 |
|
}, |
|
{ |
|
"epoch": 28.47, |
|
"learning_rate": 1.5320807515490702e-05, |
|
"loss": 0.609, |
|
"step": 951500 |
|
}, |
|
{ |
|
"epoch": 28.49, |
|
"learning_rate": 1.5170897461523085e-05, |
|
"loss": 0.6098, |
|
"step": 952000 |
|
}, |
|
{ |
|
"epoch": 28.5, |
|
"learning_rate": 1.5020987407555464e-05, |
|
"loss": 0.6045, |
|
"step": 952500 |
|
}, |
|
{ |
|
"epoch": 28.52, |
|
"learning_rate": 1.4871077353587845e-05, |
|
"loss": 0.6059, |
|
"step": 953000 |
|
}, |
|
{ |
|
"epoch": 28.53, |
|
"learning_rate": 1.4721167299620226e-05, |
|
"loss": 0.6018, |
|
"step": 953500 |
|
}, |
|
{ |
|
"epoch": 28.55, |
|
"learning_rate": 1.4571257245652609e-05, |
|
"loss": 0.611, |
|
"step": 954000 |
|
}, |
|
{ |
|
"epoch": 28.56, |
|
"learning_rate": 1.4421347191684988e-05, |
|
"loss": 0.5968, |
|
"step": 954500 |
|
}, |
|
{ |
|
"epoch": 28.58, |
|
"learning_rate": 1.4271437137717369e-05, |
|
"loss": 0.6085, |
|
"step": 955000 |
|
}, |
|
{ |
|
"epoch": 28.59, |
|
"learning_rate": 1.412152708374975e-05, |
|
"loss": 0.5988, |
|
"step": 955500 |
|
}, |
|
{ |
|
"epoch": 28.61, |
|
"learning_rate": 1.3971617029782129e-05, |
|
"loss": 0.5933, |
|
"step": 956000 |
|
}, |
|
{ |
|
"epoch": 28.62, |
|
"learning_rate": 1.382170697581451e-05, |
|
"loss": 0.5988, |
|
"step": 956500 |
|
}, |
|
{ |
|
"epoch": 28.64, |
|
"learning_rate": 1.3671796921846891e-05, |
|
"loss": 0.6041, |
|
"step": 957000 |
|
}, |
|
{ |
|
"epoch": 28.65, |
|
"learning_rate": 1.3521886867879272e-05, |
|
"loss": 0.6055, |
|
"step": 957500 |
|
}, |
|
{ |
|
"epoch": 28.67, |
|
"learning_rate": 1.3371976813911651e-05, |
|
"loss": 0.6016, |
|
"step": 958000 |
|
}, |
|
{ |
|
"epoch": 28.68, |
|
"learning_rate": 1.3222066759944032e-05, |
|
"loss": 0.5995, |
|
"step": 958500 |
|
}, |
|
{ |
|
"epoch": 28.69, |
|
"learning_rate": 1.3072156705976413e-05, |
|
"loss": 0.5996, |
|
"step": 959000 |
|
}, |
|
{ |
|
"epoch": 28.71, |
|
"learning_rate": 1.2922246652008792e-05, |
|
"loss": 0.6126, |
|
"step": 959500 |
|
}, |
|
{ |
|
"epoch": 28.72, |
|
"learning_rate": 1.2772336598041173e-05, |
|
"loss": 0.5835, |
|
"step": 960000 |
|
}, |
|
{ |
|
"epoch": 28.74, |
|
"learning_rate": 1.2622426544073556e-05, |
|
"loss": 0.5804, |
|
"step": 960500 |
|
}, |
|
{ |
|
"epoch": 28.75, |
|
"learning_rate": 1.2472516490105937e-05, |
|
"loss": 0.6171, |
|
"step": 961000 |
|
}, |
|
{ |
|
"epoch": 28.77, |
|
"learning_rate": 1.2322606436138316e-05, |
|
"loss": 0.5997, |
|
"step": 961500 |
|
}, |
|
{ |
|
"epoch": 28.78, |
|
"learning_rate": 1.2172696382170697e-05, |
|
"loss": 0.6153, |
|
"step": 962000 |
|
}, |
|
{ |
|
"epoch": 28.8, |
|
"learning_rate": 1.2022786328203078e-05, |
|
"loss": 0.5941, |
|
"step": 962500 |
|
}, |
|
{ |
|
"epoch": 28.81, |
|
"learning_rate": 1.1872876274235459e-05, |
|
"loss": 0.6022, |
|
"step": 963000 |
|
}, |
|
{ |
|
"epoch": 28.83, |
|
"learning_rate": 1.1722966220267838e-05, |
|
"loss": 0.6248, |
|
"step": 963500 |
|
}, |
|
{ |
|
"epoch": 28.84, |
|
"learning_rate": 1.1573056166300219e-05, |
|
"loss": 0.6033, |
|
"step": 964000 |
|
}, |
|
{ |
|
"epoch": 28.86, |
|
"learning_rate": 1.14231461123326e-05, |
|
"loss": 0.6059, |
|
"step": 964500 |
|
}, |
|
{ |
|
"epoch": 28.87, |
|
"learning_rate": 1.1273236058364979e-05, |
|
"loss": 0.6137, |
|
"step": 965000 |
|
}, |
|
{ |
|
"epoch": 28.89, |
|
"learning_rate": 1.112332600439736e-05, |
|
"loss": 0.6019, |
|
"step": 965500 |
|
}, |
|
{ |
|
"epoch": 28.9, |
|
"learning_rate": 1.097341595042974e-05, |
|
"loss": 0.6003, |
|
"step": 966000 |
|
}, |
|
{ |
|
"epoch": 28.92, |
|
"learning_rate": 1.0823505896462122e-05, |
|
"loss": 0.619, |
|
"step": 966500 |
|
}, |
|
{ |
|
"epoch": 28.93, |
|
"learning_rate": 1.0673595842494503e-05, |
|
"loss": 0.6056, |
|
"step": 967000 |
|
}, |
|
{ |
|
"epoch": 28.95, |
|
"learning_rate": 1.0523685788526884e-05, |
|
"loss": 0.5954, |
|
"step": 967500 |
|
}, |
|
{ |
|
"epoch": 28.96, |
|
"learning_rate": 1.0373775734559264e-05, |
|
"loss": 0.6115, |
|
"step": 968000 |
|
}, |
|
{ |
|
"epoch": 28.98, |
|
"learning_rate": 1.0223865680591645e-05, |
|
"loss": 0.6067, |
|
"step": 968500 |
|
}, |
|
{ |
|
"epoch": 28.99, |
|
"learning_rate": 1.0073955626624025e-05, |
|
"loss": 0.5987, |
|
"step": 969000 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 1002600, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 30, |
|
"save_steps": 500, |
|
"total_flos": 2.445394000189791e+21, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|