|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 15.0, |
|
"eval_steps": 500, |
|
"global_step": 6030, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0024875621890547263, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 3.316749585406302e-07, |
|
"loss": 2.7785, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.012437810945273632, |
|
"grad_norm": 2.125, |
|
"learning_rate": 1.6583747927031512e-06, |
|
"loss": 2.7761, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.024875621890547265, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 3.3167495854063024e-06, |
|
"loss": 2.7915, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03731343283582089, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 4.975124378109453e-06, |
|
"loss": 2.8213, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04975124378109453, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 6.633499170812605e-06, |
|
"loss": 2.7988, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06218905472636816, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 8.291873963515755e-06, |
|
"loss": 2.7963, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07462686567164178, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 9.950248756218906e-06, |
|
"loss": 2.8083, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08706467661691543, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 1.1608623548922057e-05, |
|
"loss": 2.7398, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09950248756218906, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.326699834162521e-05, |
|
"loss": 2.7001, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.11194029850746269, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.4925373134328357e-05, |
|
"loss": 2.6374, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.12437810945273632, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 1.658374792703151e-05, |
|
"loss": 2.6203, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.13681592039800994, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 1.8242122719734662e-05, |
|
"loss": 2.5173, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.14925373134328357, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 1.990049751243781e-05, |
|
"loss": 2.4443, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.16169154228855723, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.155887230514096e-05, |
|
"loss": 2.3755, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.17412935323383086, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 2.3217247097844114e-05, |
|
"loss": 2.3139, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1865671641791045, |
|
"grad_norm": 1.953125, |
|
"learning_rate": 2.4875621890547266e-05, |
|
"loss": 2.2244, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.19900497512437812, |
|
"grad_norm": 26.25, |
|
"learning_rate": 2.653399668325042e-05, |
|
"loss": 2.1543, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.21144278606965175, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 2.8192371475953565e-05, |
|
"loss": 2.095, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.22388059701492538, |
|
"grad_norm": 2.0, |
|
"learning_rate": 2.9850746268656714e-05, |
|
"loss": 2.0589, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.236318407960199, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 3.150912106135987e-05, |
|
"loss": 1.9888, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.24875621890547264, |
|
"grad_norm": 11.875, |
|
"learning_rate": 3.316749585406302e-05, |
|
"loss": 1.9585, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.26119402985074625, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 3.4825870646766175e-05, |
|
"loss": 1.8874, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.2736318407960199, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 3.6484245439469325e-05, |
|
"loss": 1.8321, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2860696517412935, |
|
"grad_norm": 0.875, |
|
"learning_rate": 3.8142620232172474e-05, |
|
"loss": 1.8167, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.29850746268656714, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 3.980099502487562e-05, |
|
"loss": 1.7428, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.31094527363184077, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 4.145936981757877e-05, |
|
"loss": 1.6836, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.32338308457711445, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 4.311774461028192e-05, |
|
"loss": 1.651, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3358208955223881, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 4.477611940298508e-05, |
|
"loss": 1.6164, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.3482587064676617, |
|
"grad_norm": 1.25, |
|
"learning_rate": 4.643449419568823e-05, |
|
"loss": 1.5637, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.36069651741293535, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 4.8092868988391376e-05, |
|
"loss": 1.5586, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.373134328358209, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 4.975124378109453e-05, |
|
"loss": 1.4954, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3855721393034826, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 5.140961857379768e-05, |
|
"loss": 1.467, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.39800995024875624, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 5.306799336650084e-05, |
|
"loss": 1.4484, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.41044776119402987, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 5.472636815920398e-05, |
|
"loss": 1.4312, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.4228855721393035, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 5.638474295190713e-05, |
|
"loss": 1.3851, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.43532338308457713, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 5.8043117744610286e-05, |
|
"loss": 1.3694, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.44776119402985076, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 5.970149253731343e-05, |
|
"loss": 1.3625, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.4601990049751244, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 6.135986733001658e-05, |
|
"loss": 1.3397, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.472636815920398, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 6.301824212271974e-05, |
|
"loss": 1.3142, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.48507462686567165, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 6.46766169154229e-05, |
|
"loss": 1.3054, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.4975124378109453, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 6.633499170812604e-05, |
|
"loss": 1.2999, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5099502487562189, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 6.79933665008292e-05, |
|
"loss": 1.2675, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.5223880597014925, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 6.965174129353235e-05, |
|
"loss": 1.27, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.5348258706467661, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 7.13101160862355e-05, |
|
"loss": 1.2485, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.5472636815920398, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 7.296849087893865e-05, |
|
"loss": 1.2376, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.5597014925373134, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 7.46268656716418e-05, |
|
"loss": 1.2355, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.572139303482587, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 7.628524046434495e-05, |
|
"loss": 1.2192, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5845771144278606, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 7.794361525704809e-05, |
|
"loss": 1.2053, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.5970149253731343, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 7.960199004975125e-05, |
|
"loss": 1.2101, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.6094527363184079, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 8.126036484245439e-05, |
|
"loss": 1.1958, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.6218905472636815, |
|
"grad_norm": 0.375, |
|
"learning_rate": 8.291873963515754e-05, |
|
"loss": 1.1965, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.6343283582089553, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 8.45771144278607e-05, |
|
"loss": 1.1869, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.6467661691542289, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 8.623548922056384e-05, |
|
"loss": 1.189, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.6592039800995025, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 8.7893864013267e-05, |
|
"loss": 1.1812, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.6716417910447762, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 8.955223880597016e-05, |
|
"loss": 1.1753, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.6840796019900498, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 9.12106135986733e-05, |
|
"loss": 1.1753, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.6965174129353234, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 9.286898839137645e-05, |
|
"loss": 1.1671, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.7089552238805971, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 9.452736318407961e-05, |
|
"loss": 1.1669, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.7213930348258707, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 9.618573797678275e-05, |
|
"loss": 1.1556, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.7338308457711443, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 9.784411276948591e-05, |
|
"loss": 1.1538, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.746268656716418, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 9.950248756218906e-05, |
|
"loss": 1.1497, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.7587064676616916, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00010116086235489222, |
|
"loss": 1.137, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.7711442786069652, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.00010281923714759536, |
|
"loss": 1.1431, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.7835820895522388, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.0001044776119402985, |
|
"loss": 1.1302, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.7960199004975125, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.00010613598673300168, |
|
"loss": 1.1457, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.8084577114427861, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.00010779436152570482, |
|
"loss": 1.1432, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.8208955223880597, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00010945273631840796, |
|
"loss": 1.1239, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.00011111111111111112, |
|
"loss": 1.1418, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.845771144278607, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.00011276948590381426, |
|
"loss": 1.1212, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.8582089552238806, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00011442786069651741, |
|
"loss": 1.1255, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.8706467661691543, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.00011608623548922057, |
|
"loss": 1.1217, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.8830845771144279, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.00011774461028192371, |
|
"loss": 1.1276, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.8955223880597015, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.00011940298507462686, |
|
"loss": 1.118, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.9079601990049752, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.00012106135986733003, |
|
"loss": 1.123, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.9203980099502488, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.00012271973466003317, |
|
"loss": 1.1072, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.9328358208955224, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.0001243781094527363, |
|
"loss": 1.0974, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.945273631840796, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00012603648424543948, |
|
"loss": 1.1078, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.9577114427860697, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00012769485903814262, |
|
"loss": 1.1133, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.9701492537313433, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 0.0001293532338308458, |
|
"loss": 1.11, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.9825870646766169, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.00013101160862354893, |
|
"loss": 1.1074, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.9950248756218906, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.00013266998341625208, |
|
"loss": 1.097, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 2.4839065074920654, |
|
"eval_runtime": 0.5413, |
|
"eval_samples_per_second": 18.473, |
|
"eval_steps_per_second": 1.847, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.007462686567164, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.00013432835820895525, |
|
"loss": 1.0948, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.0199004975124377, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.0001359867330016584, |
|
"loss": 1.1004, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.0323383084577114, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.00013764510779436153, |
|
"loss": 1.081, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.044776119402985, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.0001393034825870647, |
|
"loss": 1.074, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.0572139303482586, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 0.00014096185737976784, |
|
"loss": 1.0773, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.0696517412935322, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.000142620232172471, |
|
"loss": 1.085, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.0820895522388059, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 0.00014427860696517416, |
|
"loss": 1.0969, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.0945273631840795, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.0001459369817578773, |
|
"loss": 1.0822, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.1069651741293531, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.00014759535655058044, |
|
"loss": 1.0884, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.1194029850746268, |
|
"grad_norm": 0.8359375, |
|
"learning_rate": 0.0001492537313432836, |
|
"loss": 1.0735, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.1318407960199006, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 0.00015091210613598675, |
|
"loss": 1.0884, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.144278606965174, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 0.0001525704809286899, |
|
"loss": 1.0784, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.1567164179104479, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 0.00015422885572139304, |
|
"loss": 1.0732, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.1691542288557213, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 0.00015588723051409618, |
|
"loss": 1.074, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.1815920398009951, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 0.00015754560530679935, |
|
"loss": 1.0803, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.1940298507462686, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 0.0001592039800995025, |
|
"loss": 1.0588, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.2064676616915424, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00016086235489220564, |
|
"loss": 1.0512, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.2189054726368158, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00016252072968490878, |
|
"loss": 1.0672, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.2313432835820897, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.00016417910447761195, |
|
"loss": 1.0776, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.243781094527363, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.0001658374792703151, |
|
"loss": 1.0782, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.256218905472637, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00016749585406301823, |
|
"loss": 1.0584, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.2686567164179103, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.0001691542288557214, |
|
"loss": 1.0686, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.2810945273631842, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 0.00017081260364842454, |
|
"loss": 1.0715, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.2935323383084576, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.0001724709784411277, |
|
"loss": 1.0738, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.3059701492537314, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 0.00017412935323383086, |
|
"loss": 1.0601, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.3184079601990049, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.000175787728026534, |
|
"loss": 1.0628, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.3308457711442787, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00017744610281923714, |
|
"loss": 1.0583, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.3432835820895521, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 0.0001791044776119403, |
|
"loss": 1.0668, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.355721393034826, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00018076285240464345, |
|
"loss": 1.0683, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.3681592039800994, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.0001824212271973466, |
|
"loss": 1.0443, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.3805970149253732, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00018407960199004977, |
|
"loss": 1.0493, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.3930348258706466, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.0001857379767827529, |
|
"loss": 1.0462, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.4054726368159205, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 0.00018739635157545605, |
|
"loss": 1.0486, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.417910447761194, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 0.00018905472636815922, |
|
"loss": 1.0599, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.4303482587064678, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 0.00019071310116086236, |
|
"loss": 1.0311, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.4427860696517412, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 0.0001923714759535655, |
|
"loss": 1.045, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.455223880597015, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 0.00019402985074626867, |
|
"loss": 1.051, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.4676616915422884, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 0.00019568822553897182, |
|
"loss": 1.0535, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.4800995024875623, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 0.00019734660033167496, |
|
"loss": 1.054, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.4925373134328357, |
|
"grad_norm": 0.875, |
|
"learning_rate": 0.00019900497512437813, |
|
"loss": 1.0383, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.5049751243781095, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 0.00019999993297913182, |
|
"loss": 1.0563, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.517412935323383, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.0001999991789953964, |
|
"loss": 1.0399, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.5298507462686568, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.000199997587258178, |
|
"loss": 1.0369, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.5422885572139302, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.00019999515778081154, |
|
"loss": 1.0362, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.554726368159204, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00019999189058365023, |
|
"loss": 1.0356, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.5671641791044775, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.0001999877856940653, |
|
"loss": 1.0452, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.5796019900497513, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.00019998284314644597, |
|
"loss": 1.0375, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.5920398009950247, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00019997706298219888, |
|
"loss": 1.0387, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.6044776119402986, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 0.00019997044524974799, |
|
"loss": 1.0344, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.616915422885572, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.000199962990004534, |
|
"loss": 1.0329, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.6293532338308458, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 0.00019995469730901407, |
|
"loss": 1.0369, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.6417910447761193, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.00019994556723266103, |
|
"loss": 1.0334, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.654228855721393, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.0001999355998519631, |
|
"loss": 1.0294, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.00019992479525042303, |
|
"loss": 1.0203, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.6791044776119404, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 0.00019991315351855748, |
|
"loss": 1.03, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.6915422885572138, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.00019990067475389626, |
|
"loss": 1.034, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.7039800995024876, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00019988735906098154, |
|
"loss": 1.0294, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.716417910447761, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.00019987320655136693, |
|
"loss": 1.0287, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.728855721393035, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.00019985821734361646, |
|
"loss": 1.0259, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.7412935323383083, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.00019984239156330388, |
|
"loss": 1.0199, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.7537313432835822, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00019982572934301122, |
|
"loss": 1.0248, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.7661691542288556, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.000199808230822328, |
|
"loss": 1.0321, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.7786069651741294, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 0.00019978989614784988, |
|
"loss": 1.0268, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.7910447761194028, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 0.0001997707254731775, |
|
"loss": 1.0317, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.8034825870646767, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 0.00019975071895891514, |
|
"loss": 1.037, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.81592039800995, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.0001997298767726695, |
|
"loss": 1.0209, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.828358208955224, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.00019970819908904814, |
|
"loss": 1.0206, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.8407960199004973, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.00019968568608965808, |
|
"loss": 1.031, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.8532338308457712, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.0001996623379631043, |
|
"loss": 1.0212, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.8656716417910446, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.00019963815490498817, |
|
"loss": 1.0206, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.8781094527363185, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.0001996131371179058, |
|
"loss": 1.0303, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.890547263681592, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 0.00019958728481144622, |
|
"loss": 1.0131, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.9029850746268657, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.00019956059820218982, |
|
"loss": 1.0247, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.9154228855721394, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.00019953307751370647, |
|
"loss": 1.0152, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.927860696517413, |
|
"grad_norm": 0.625, |
|
"learning_rate": 0.00019950472297655355, |
|
"loss": 1.0262, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.9402985074626866, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.00019947553482827418, |
|
"loss": 1.0148, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.9527363184079602, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 0.0001994455133133951, |
|
"loss": 1.0253, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.9651741293532339, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.0001994146586834246, |
|
"loss": 1.005, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.9776119402985075, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.00019938297119685054, |
|
"loss": 1.0135, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.9900497512437811, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.0001993504511191382, |
|
"loss": 1.0176, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 2.453425884246826, |
|
"eval_runtime": 0.5408, |
|
"eval_samples_per_second": 18.492, |
|
"eval_steps_per_second": 1.849, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 2.0024875621890548, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00019931709872272784, |
|
"loss": 1.016, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.014925373134328, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 0.00019928291428703262, |
|
"loss": 1.0027, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.027363184079602, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.0001992478980984362, |
|
"loss": 0.9947, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.0398009950248754, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.00019921205045029036, |
|
"loss": 1.001, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.0522388059701493, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00019917537164291244, |
|
"loss": 1.0045, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.0646766169154227, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00019913786198358298, |
|
"loss": 0.9802, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.0771144278606966, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.000199099521786543, |
|
"loss": 1.0032, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.08955223880597, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.0001990603513729915, |
|
"loss": 0.9995, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.101990049751244, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.0001990203510710827, |
|
"loss": 1.0095, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.1144278606965172, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00019897952121592324, |
|
"loss": 1.0142, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.126865671641791, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 0.00019893786214956945, |
|
"loss": 0.9994, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.1393034825870645, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 0.0001988953742210245, |
|
"loss": 0.9955, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.1517412935323383, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 0.00019885205778623545, |
|
"loss": 1.0022, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.1641791044776117, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.0001988079132080901, |
|
"loss": 0.9958, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.1766169154228856, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00019876294085641435, |
|
"loss": 0.9976, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.189054726368159, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 0.00019871714110796874, |
|
"loss": 0.9886, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.201492537313433, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 0.0001986705143464453, |
|
"loss": 1.0021, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.2139303482587063, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 0.00019862306096246464, |
|
"loss": 0.9989, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.22636815920398, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.00019857478135357234, |
|
"loss": 0.9939, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.2388059701492535, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.0001985256759242359, |
|
"loss": 0.989, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.2512437810945274, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.0001984757450858411, |
|
"loss": 0.9938, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.2636815920398012, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 0.00019842498925668871, |
|
"loss": 1.0024, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.2761194029850746, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 0.00019837340886199096, |
|
"loss": 0.9956, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.288557213930348, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00019832100433386795, |
|
"loss": 0.9936, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.300995024875622, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.00019826777611134402, |
|
"loss": 0.9908, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.3134328358208958, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.00019821372464034416, |
|
"loss": 0.9868, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.325870646766169, |
|
"grad_norm": 0.625, |
|
"learning_rate": 0.00019815885037369015, |
|
"loss": 1.0012, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.3383084577114426, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.0001981031537710968, |
|
"loss": 0.9867, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.3507462686567164, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 0.00019804663529916826, |
|
"loss": 0.9865, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.3631840796019903, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 0.00019798929543139382, |
|
"loss": 1.0003, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.3756218905472637, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00019793113464814416, |
|
"loss": 1.0018, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.388059701492537, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00019787215343666732, |
|
"loss": 0.9893, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.400497512437811, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00019781235229108447, |
|
"loss": 1.0048, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.412935323383085, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 0.00019775173171238588, |
|
"loss": 0.9935, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.425373134328358, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.00019769029220842677, |
|
"loss": 0.9947, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.4378109452736316, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 0.00019762803429392297, |
|
"loss": 0.9814, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.4502487562189055, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.00019756495849044653, |
|
"loss": 0.9852, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.4626865671641793, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.0001975010653264216, |
|
"loss": 0.9856, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.4751243781094527, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.00019743635533711978, |
|
"loss": 0.9778, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.487562189054726, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.00019737082906465565, |
|
"loss": 0.9916, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00019730448705798239, |
|
"loss": 0.9959, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.512437810945274, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.00019723732987288704, |
|
"loss": 0.9839, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.5248756218905473, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.00019716935807198588, |
|
"loss": 0.9926, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.5373134328358207, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00019710057222471967, |
|
"loss": 0.9867, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.5497512437810945, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00019703097290734903, |
|
"loss": 0.9857, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.5621890547263684, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.00019696056070294944, |
|
"loss": 0.972, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.574626865671642, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00019688933620140637, |
|
"loss": 0.988, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.587064676616915, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.00019681729999941058, |
|
"loss": 0.981, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.599502487562189, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 0.0001967444527004527, |
|
"loss": 0.9884, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.611940298507463, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.0001966707949148186, |
|
"loss": 0.9943, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.6243781094527363, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.00019659632725958396, |
|
"loss": 0.9818, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.6368159203980097, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.00019652105035860933, |
|
"loss": 0.9825, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.6492537313432836, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 0.00019644496484253474, |
|
"loss": 0.9854, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.6616915422885574, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00019636807134877442, |
|
"loss": 0.9861, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.674129353233831, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 0.00019629037052151164, |
|
"loss": 0.9851, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.6865671641791042, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.00019621186301169315, |
|
"loss": 0.9746, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.699004975124378, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00019613254947702368, |
|
"loss": 0.9793, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.711442786069652, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00019605243058196063, |
|
"loss": 0.9817, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.7238805970149254, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.00019597150699770835, |
|
"loss": 0.9851, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.7363184079601988, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.00019588977940221248, |
|
"loss": 0.9811, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.7487562189054726, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 0.00019580724848015452, |
|
"loss": 0.9793, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.7611940298507465, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.0001957239149229458, |
|
"loss": 0.9726, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.77363184079602, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.0001956397794287218, |
|
"loss": 0.9852, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.7860696517412933, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.0001955548427023363, |
|
"loss": 0.9828, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.798507462686567, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.00019546910545535558, |
|
"loss": 0.9775, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.810945273631841, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.0001953825684060523, |
|
"loss": 0.9735, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.8233830845771144, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.00019529523227939958, |
|
"loss": 0.9848, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.835820895522388, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00019520709780706486, |
|
"loss": 0.9798, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.8482587064676617, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 0.00019511816572740376, |
|
"loss": 0.9729, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.8606965174129355, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.00019502843678545405, |
|
"loss": 0.9775, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.873134328358209, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.00019493791173292923, |
|
"loss": 0.9693, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.8855721393034823, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.0001948465913282123, |
|
"loss": 0.9836, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.898009950248756, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.0001947544763363494, |
|
"loss": 0.9861, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.91044776119403, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.00019466156752904343, |
|
"loss": 0.9711, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.9228855721393034, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.00019456786568464756, |
|
"loss": 0.9761, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.935323383084577, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00019447337158815865, |
|
"loss": 0.9883, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.9477611940298507, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.00019437808603121087, |
|
"loss": 0.9788, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.9601990049751246, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00019428200981206877, |
|
"loss": 0.974, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.972636815920398, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00019418514373562086, |
|
"loss": 0.9842, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.9850746268656714, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00019408748861337273, |
|
"loss": 0.9771, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.9975124378109452, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.0001939890452634403, |
|
"loss": 0.9746, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 2.4625403881073, |
|
"eval_runtime": 0.5385, |
|
"eval_samples_per_second": 18.57, |
|
"eval_steps_per_second": 1.857, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 3.009950248756219, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.0001938898145105429, |
|
"loss": 0.9575, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 3.0223880597014925, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 0.00019378979718599645, |
|
"loss": 0.9523, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 3.0348258706467663, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00019368899412770653, |
|
"loss": 0.9495, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 3.0472636815920398, |
|
"grad_norm": 0.68359375, |
|
"learning_rate": 0.00019358740618016107, |
|
"loss": 0.9658, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 3.0597014925373136, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 0.0001934850341944237, |
|
"loss": 0.9561, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 3.072139303482587, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00019338187902812634, |
|
"loss": 0.9604, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 3.084577114427861, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.0001932779415454621, |
|
"loss": 0.9641, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 3.0970149253731343, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.00019317322261717794, |
|
"loss": 0.9729, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 3.109452736318408, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00019306772312056758, |
|
"loss": 0.974, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 3.1218905472636815, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.000192961443939464, |
|
"loss": 0.9546, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 3.1343283582089554, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 0.00019285438596423204, |
|
"loss": 0.9575, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 3.146766169154229, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 0.00019274655009176095, |
|
"loss": 0.964, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 3.1592039800995027, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00019263793722545694, |
|
"loss": 0.9619, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 3.171641791044776, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.00019252854827523557, |
|
"loss": 0.9581, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 3.18407960199005, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.0001924183841575141, |
|
"loss": 0.9488, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 3.1965174129353233, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.0001923074457952038, |
|
"loss": 0.9487, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 3.208955223880597, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.00019219573411770235, |
|
"loss": 0.9543, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 3.2213930348258706, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00019208325006088588, |
|
"loss": 0.9569, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 3.2338308457711444, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00019196999456710118, |
|
"loss": 0.9652, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 3.246268656716418, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.000191855968585158, |
|
"loss": 0.9526, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 3.2587064676616917, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.0001917411730703207, |
|
"loss": 0.9433, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 3.271144278606965, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00019162560898430066, |
|
"loss": 0.9718, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 3.283582089552239, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 0.000191509277295248, |
|
"loss": 0.9569, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 3.2960199004975124, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00019139217897774343, |
|
"loss": 0.9624, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 3.308457711442786, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.00019127431501279033, |
|
"loss": 0.9543, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 3.3208955223880596, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.00019115568638780622, |
|
"loss": 0.946, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 3.3333333333333335, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.0001910362940966147, |
|
"loss": 0.9531, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 3.345771144278607, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.00019091613913943705, |
|
"loss": 0.9544, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 3.3582089552238807, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 0.00019079522252288386, |
|
"loss": 0.9701, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 3.370646766169154, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.00019067354525994669, |
|
"loss": 0.9647, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 3.383084577114428, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 0.00019055110836998935, |
|
"loss": 0.9561, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 3.3955223880597014, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.00019042791287873957, |
|
"loss": 0.9601, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 3.4079601990049753, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 0.0001903039598182804, |
|
"loss": 0.9657, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 3.4203980099502487, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00019017925022704138, |
|
"loss": 0.943, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 3.4328358208955225, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00019005378514979008, |
|
"loss": 0.9732, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 3.445273631840796, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00018992756563762314, |
|
"loss": 0.9498, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 3.45771144278607, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.00018980059274795768, |
|
"loss": 0.9501, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 3.470149253731343, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.00018967286754452214, |
|
"loss": 0.9519, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 3.482587064676617, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.0001895443910973477, |
|
"loss": 0.9582, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.4950248756218905, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.00018941516448275908, |
|
"loss": 0.9556, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 3.5074626865671643, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.0001892851887833657, |
|
"loss": 0.9629, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.5199004975124377, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.00018915446508805235, |
|
"loss": 0.9486, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 3.5323383084577116, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.00018902299449197042, |
|
"loss": 0.9488, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.544776119402985, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.0001888907780965284, |
|
"loss": 0.9424, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 3.557213930348259, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.00018875781700938285, |
|
"loss": 0.9472, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.5696517412935322, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.00018862411234442904, |
|
"loss": 0.9673, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 3.582089552238806, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00018848966522179168, |
|
"loss": 0.9684, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.5945273631840795, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.00018835447676781545, |
|
"loss": 0.9586, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 3.6069651741293534, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.00018821854811505565, |
|
"loss": 0.9458, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.6194029850746268, |
|
"grad_norm": 0.75390625, |
|
"learning_rate": 0.00018808188040226868, |
|
"loss": 0.9554, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 3.6318407960199006, |
|
"grad_norm": 0.77734375, |
|
"learning_rate": 0.0001879444747744024, |
|
"loss": 0.9404, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.644278606965174, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.0001878063323825867, |
|
"loss": 0.9525, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 3.656716417910448, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.00018766745438412384, |
|
"loss": 0.9562, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.6691542288557213, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.0001875278419424786, |
|
"loss": 0.9405, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 3.681592039800995, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.00018738749622726863, |
|
"loss": 0.9479, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.6940298507462686, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00018724641841425478, |
|
"loss": 0.9535, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 3.7064676616915424, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00018710460968533103, |
|
"loss": 0.9572, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.718905472636816, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00018696207122851467, |
|
"loss": 0.9583, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 3.7313432835820897, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.00018681880423793642, |
|
"loss": 0.9566, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.743781094527363, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 0.00018667480991383032, |
|
"loss": 0.9539, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 3.756218905472637, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00018653008946252372, |
|
"loss": 0.9479, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 3.7686567164179103, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.00018638464409642723, |
|
"loss": 0.9516, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 3.781094527363184, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00018623847503402446, |
|
"loss": 0.9467, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.7935323383084576, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 0.0001860915834998619, |
|
"loss": 0.9551, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 3.8059701492537314, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 0.00018594397072453856, |
|
"loss": 0.9456, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.818407960199005, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00018579563794469573, |
|
"loss": 0.9467, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 3.8308457711442787, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00018564658640300672, |
|
"loss": 0.9462, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.843283582089552, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.00018549681734816623, |
|
"loss": 0.9539, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 3.855721393034826, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00018534633203488006, |
|
"loss": 0.9501, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.8681592039800994, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00018519513172385446, |
|
"loss": 0.9354, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 3.8805970149253732, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.0001850432176817857, |
|
"loss": 0.9468, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 3.8930348258706466, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00018489059118134948, |
|
"loss": 0.9561, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 3.9054726368159205, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00018473725350119004, |
|
"loss": 0.9508, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 3.917910447761194, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00018458320592590975, |
|
"loss": 0.9344, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 3.9303482587064678, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.0001844284497460581, |
|
"loss": 0.9512, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 3.942786069651741, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.000184272986258121, |
|
"loss": 0.9561, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 3.955223880597015, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.00018411681676450999, |
|
"loss": 0.9522, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 3.9676616915422884, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00018395994257355112, |
|
"loss": 0.9399, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 3.9800995024875623, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.0001838023649994742, |
|
"loss": 0.9556, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.9925373134328357, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.0001836440853624017, |
|
"loss": 0.9525, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 2.458648204803467, |
|
"eval_runtime": 0.5393, |
|
"eval_samples_per_second": 18.544, |
|
"eval_steps_per_second": 1.854, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 4.0049751243781095, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00018348510498833765, |
|
"loss": 0.949, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 4.017412935323383, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00018332542520915665, |
|
"loss": 0.9323, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 4.029850746268656, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00018316504736259255, |
|
"loss": 0.9345, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 4.04228855721393, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.00018300397279222738, |
|
"loss": 0.9221, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 4.054726368159204, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.00018284220284748006, |
|
"loss": 0.93, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 4.067164179104478, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00018267973888359509, |
|
"loss": 0.9341, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 4.079601990049751, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.0001825165822616311, |
|
"loss": 0.9411, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 4.092039800995025, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00018235273434844964, |
|
"loss": 0.9375, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 4.104477611940299, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.00018218819651670356, |
|
"loss": 0.93, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 4.116915422885572, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.00018202297014482558, |
|
"loss": 0.9351, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 4.129353233830845, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.0001818570566170168, |
|
"loss": 0.9414, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 4.141791044776119, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.00018169045732323492, |
|
"loss": 0.934, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 4.154228855721393, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.00018152317365918282, |
|
"loss": 0.9395, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 4.166666666666667, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.00018135520702629675, |
|
"loss": 0.9315, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 4.17910447761194, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 0.00018118655883173456, |
|
"loss": 0.9372, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 4.191542288557214, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 0.000181017230488364, |
|
"loss": 0.924, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 4.203980099502488, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.00018084722341475074, |
|
"loss": 0.9386, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 4.2164179104477615, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.0001806765390351467, |
|
"loss": 0.9386, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 4.2288557213930345, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00018050517877947798, |
|
"loss": 0.9375, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 4.241293532338308, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.00018033314408333283, |
|
"loss": 0.9309, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 4.253731343283582, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.00018016043638794974, |
|
"loss": 0.9309, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 4.266169154228856, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.00017998705714020535, |
|
"loss": 0.9344, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 4.278606965174129, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 0.0001798130077926023, |
|
"loss": 0.9196, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 4.291044776119403, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.00017963828980325697, |
|
"loss": 0.9361, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 4.303482587064677, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00017946290463588746, |
|
"loss": 0.9301, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 4.3159203980099505, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.00017928685375980116, |
|
"loss": 0.93, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 4.3283582089552235, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.00017911013864988252, |
|
"loss": 0.9163, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 4.340796019900497, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.00017893276078658073, |
|
"loss": 0.9297, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 4.353233830845771, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.00017875472165589714, |
|
"loss": 0.932, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 4.365671641791045, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.00017857602274937308, |
|
"loss": 0.9259, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 4.378109452736318, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00017839666556407712, |
|
"loss": 0.9235, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 4.390547263681592, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.0001782166516025927, |
|
"loss": 0.9233, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 4.402985074626866, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.00017803598237300537, |
|
"loss": 0.9393, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 4.41542288557214, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.00017785465938889038, |
|
"loss": 0.9312, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 4.4278606965174125, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.0001776726841692998, |
|
"loss": 0.9398, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 4.440298507462686, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.00017749005823874988, |
|
"loss": 0.9349, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 4.45273631840796, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.0001773067831272083, |
|
"loss": 0.9349, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 4.465174129353234, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.00017712286037008124, |
|
"loss": 0.9343, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 4.477611940298507, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00017693829150820068, |
|
"loss": 0.9227, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 4.490049751243781, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.00017675307808781145, |
|
"loss": 0.9329, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 4.502487562189055, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00017656722166055805, |
|
"loss": 0.935, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 4.514925373134329, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.00017638072378347203, |
|
"loss": 0.9296, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 4.5273631840796025, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.00017619358601895867, |
|
"loss": 0.9235, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 4.539800995024875, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.000176005809934784, |
|
"loss": 0.9298, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 4.552238805970149, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.0001758173971040616, |
|
"loss": 0.9221, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 4.564676616915423, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 0.00017562834910523942, |
|
"loss": 0.9345, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 4.577114427860696, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.00017543866752208674, |
|
"loss": 0.9253, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 4.58955223880597, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 0.00017524835394368065, |
|
"loss": 0.9236, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 4.601990049751244, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.0001750574099643929, |
|
"loss": 0.931, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 4.614427860696518, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.0001748658371838764, |
|
"loss": 0.9365, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 4.6268656716417915, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.00017467363720705204, |
|
"loss": 0.9256, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 4.6393034825870645, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00017448081164409497, |
|
"loss": 0.9338, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 4.651741293532338, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.0001742873621104214, |
|
"loss": 0.9381, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 4.664179104477612, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.0001740932902266747, |
|
"loss": 0.9349, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 4.676616915422885, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.00017389859761871238, |
|
"loss": 0.9322, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 4.689054726368159, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.00017370328591759178, |
|
"loss": 0.9306, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 4.701492537313433, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.00017350735675955697, |
|
"loss": 0.929, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 4.713930348258707, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 0.0001733108117860248, |
|
"loss": 0.9285, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 4.726368159203981, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 0.0001731136526435711, |
|
"loss": 0.9312, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 4.7388059701492535, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 0.000172915880983917, |
|
"loss": 0.9299, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 4.751243781094527, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00017271749846391513, |
|
"loss": 0.935, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 4.763681592039801, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.0001725185067455356, |
|
"loss": 0.9409, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 4.776119402985074, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.0001723189074958521, |
|
"loss": 0.9251, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 4.788557213930348, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.00017211870238702806, |
|
"loss": 0.9373, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 4.800995024875622, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.00017191789309630253, |
|
"loss": 0.9237, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 4.813432835820896, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.00017171648130597612, |
|
"loss": 0.9326, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 4.82587064676617, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.00017151446870339697, |
|
"loss": 0.9258, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 4.838308457711443, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.0001713118569809466, |
|
"loss": 0.9383, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 4.850746268656716, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.0001711086478360257, |
|
"loss": 0.9292, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 4.86318407960199, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00017090484297104, |
|
"loss": 0.9356, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 4.875621890547263, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.0001707004440933858, |
|
"loss": 0.9281, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 4.888059701492537, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.0001704954529154359, |
|
"loss": 0.9259, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 4.900497512437811, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.0001702898711545251, |
|
"loss": 0.9224, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 4.912935323383085, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.00017008370053293596, |
|
"loss": 0.9311, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 4.925373134328359, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 0.00016987694277788417, |
|
"loss": 0.9251, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 4.937810945273632, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.00016966959962150428, |
|
"loss": 0.9303, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 4.9502487562189055, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00016946167280083505, |
|
"loss": 0.917, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 4.962686567164179, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.000169253164057805, |
|
"loss": 0.9191, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 4.975124378109452, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.00016904407513921766, |
|
"loss": 0.9245, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.987562189054726, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.00016883440779673716, |
|
"loss": 0.9232, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.0001686241637868734, |
|
"loss": 0.9361, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 2.4668655395507812, |
|
"eval_runtime": 0.5364, |
|
"eval_samples_per_second": 18.641, |
|
"eval_steps_per_second": 1.864, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 5.012437810945274, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.00016841334487096728, |
|
"loss": 0.9077, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 5.024875621890548, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.0001682019528151762, |
|
"loss": 0.9195, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 5.037313432835821, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.00016798998939045895, |
|
"loss": 0.9167, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 5.0497512437810945, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.00016777745637256106, |
|
"loss": 0.915, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 5.062189054726368, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.00016756435554199999, |
|
"loss": 0.9011, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 5.074626865671641, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.00016735068868404998, |
|
"loss": 0.9136, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 5.087064676616915, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.00016713645758872727, |
|
"loss": 0.9169, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 5.099502487562189, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00016692166405077508, |
|
"loss": 0.9051, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 5.111940298507463, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.0001667063098696485, |
|
"loss": 0.92, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 5.124378109452737, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.0001664903968494995, |
|
"loss": 0.9046, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 5.13681592039801, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.00016627392679916184, |
|
"loss": 0.9215, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 5.149253731343284, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.0001660569015321357, |
|
"loss": 0.9242, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 5.161691542288557, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.0001658393228665728, |
|
"loss": 0.9135, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 5.174129353233831, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00016562119262526092, |
|
"loss": 0.9108, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 5.186567164179104, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 0.00016540251263560878, |
|
"loss": 0.9117, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 5.199004975124378, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 0.00016518328472963064, |
|
"loss": 0.9177, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 5.211442786069652, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 0.000164963510743931, |
|
"loss": 0.9218, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 5.223880597014926, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 0.00016474319251968923, |
|
"loss": 0.9235, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 5.236318407960199, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 0.00016452233190264402, |
|
"loss": 0.9105, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 5.248756218905473, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 0.00016430093074307808, |
|
"loss": 0.9108, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 5.2611940298507465, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 0.00016407899089580262, |
|
"loss": 0.9148, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 5.273631840796019, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 0.00016385651422014166, |
|
"loss": 0.9158, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 5.286069651741293, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00016363350257991662, |
|
"loss": 0.9088, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 5.298507462686567, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.0001634099578434306, |
|
"loss": 0.925, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 5.310945273631841, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.0001631858818834528, |
|
"loss": 0.9164, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 5.323383084577115, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.0001629612765772028, |
|
"loss": 0.912, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 5.335820895522388, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.00016273614380633484, |
|
"loss": 0.9225, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 5.348258706467662, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.000162510485456922, |
|
"loss": 0.9205, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 5.3606965174129355, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.0001622843034194405, |
|
"loss": 0.9132, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 5.373134328358209, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.0001620575995887538, |
|
"loss": 0.905, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 5.385572139303482, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.0001618303758640967, |
|
"loss": 0.9231, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 5.398009950248756, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.00016160263414905957, |
|
"loss": 0.9116, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 5.41044776119403, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.00016137437635157213, |
|
"loss": 0.9223, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 5.422885572139304, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.0001611456043838878, |
|
"loss": 0.9155, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 5.435323383084577, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00016091632016256737, |
|
"loss": 0.9137, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 5.447761194029851, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00016068652560846327, |
|
"loss": 0.913, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 5.460199004975125, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.00016045622264670306, |
|
"loss": 0.8992, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 5.472636815920398, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00016022541320667374, |
|
"loss": 0.9124, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 5.485074626865671, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 0.0001599940992220053, |
|
"loss": 0.9121, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 5.497512437810945, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.00015976228263055463, |
|
"loss": 0.9138, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 5.509950248756219, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.00015952996537438918, |
|
"loss": 0.9153, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 5.522388059701493, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.0001592971493997709, |
|
"loss": 0.9112, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 5.534825870646766, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.00015906383665713968, |
|
"loss": 0.903, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 5.54726368159204, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.00015883002910109726, |
|
"loss": 0.9198, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 5.559701492537314, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.00015859572869039064, |
|
"loss": 0.9182, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 5.572139303482587, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.00015836093738789576, |
|
"loss": 0.9098, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 5.58457711442786, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.0001581256571606011, |
|
"loss": 0.9084, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 5.597014925373134, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.00015788988997959114, |
|
"loss": 0.9134, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 5.609452736318408, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00015765363782002992, |
|
"loss": 0.9103, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 5.621890547263682, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.00015741690266114428, |
|
"loss": 0.9083, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 5.634328358208955, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.00015717968648620764, |
|
"loss": 0.9152, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 5.646766169154229, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.000156941991282523, |
|
"loss": 0.9141, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 5.659203980099503, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00015670381904140664, |
|
"loss": 0.9036, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 5.6716417910447765, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00015646517175817114, |
|
"loss": 0.9097, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 5.6840796019900495, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.00015622605143210882, |
|
"loss": 0.9137, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 5.696517412935323, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00015598646006647506, |
|
"loss": 0.9238, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 5.708955223880597, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.00015574639966847126, |
|
"loss": 0.9135, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 5.721393034825871, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.00015550587224922834, |
|
"loss": 0.9141, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 5.733830845771144, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.00015526487982378968, |
|
"loss": 0.9197, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 5.746268656716418, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.00015502342441109422, |
|
"loss": 0.9142, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 5.758706467661692, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.0001547815080339598, |
|
"loss": 0.9066, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 5.7711442786069655, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00015453913271906586, |
|
"loss": 0.919, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 5.7835820895522385, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00015429630049693674, |
|
"loss": 0.9074, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 5.796019900497512, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.00015405301340192456, |
|
"loss": 0.9093, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 5.808457711442786, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.0001538092734721921, |
|
"loss": 0.9005, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 5.82089552238806, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00015356508274969594, |
|
"loss": 0.8999, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 5.833333333333333, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.00015332044328016914, |
|
"loss": 0.9058, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 5.845771144278607, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.0001530753571131042, |
|
"loss": 0.9086, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 5.858208955223881, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.00015282982630173585, |
|
"loss": 0.9069, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 5.870646766169155, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.00015258385290302395, |
|
"loss": 0.9122, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 5.883084577114428, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00015233743897763612, |
|
"loss": 0.9226, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 5.895522388059701, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.00015209058658993056, |
|
"loss": 0.9119, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 5.907960199004975, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.00015184329780793866, |
|
"loss": 0.915, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 5.920398009950249, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.0001515955747033479, |
|
"loss": 0.9011, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 5.932835820895522, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.0001513474193514842, |
|
"loss": 0.9054, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 5.945273631840796, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.00015109883383129476, |
|
"loss": 0.9111, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 5.95771144278607, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.00015084982022533052, |
|
"loss": 0.9057, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 5.970149253731344, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.00015060038061972874, |
|
"loss": 0.9202, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 5.982587064676617, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.00015035051710419564, |
|
"loss": 0.9188, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 5.9950248756218905, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.0001501002317719886, |
|
"loss": 0.9077, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 2.4844307899475098, |
|
"eval_runtime": 0.5391, |
|
"eval_samples_per_second": 18.548, |
|
"eval_steps_per_second": 1.855, |
|
"step": 2412 |
|
}, |
|
{ |
|
"epoch": 6.007462686567164, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.000149849526719899, |
|
"loss": 0.9001, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 6.019900497512438, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00014959840404823435, |
|
"loss": 0.8907, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 6.032338308457711, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00014934686586080085, |
|
"loss": 0.8994, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 6.044776119402985, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.00014909491426488578, |
|
"loss": 0.9013, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 6.057213930348259, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.0001488425513712397, |
|
"loss": 0.9031, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 6.069651741293533, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.00014858977929405894, |
|
"loss": 0.8951, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 6.082089552238806, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00014833660015096766, |
|
"loss": 0.9004, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 6.0945273631840795, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.0001480830160630005, |
|
"loss": 0.9013, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 6.106965174129353, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.0001478290291545843, |
|
"loss": 0.8982, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 6.119402985074627, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 0.00014757464155352082, |
|
"loss": 0.8949, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 6.1318407960199, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.00014731985539096843, |
|
"loss": 0.8985, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 6.144278606965174, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.00014706467280142473, |
|
"loss": 0.9057, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 6.156716417910448, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.0001468090959227082, |
|
"loss": 0.8972, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 6.169154228855722, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.0001465531268959407, |
|
"loss": 0.8892, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 6.181592039800995, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.0001462967678655292, |
|
"loss": 0.8964, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 6.1940298507462686, |
|
"grad_norm": 0.625, |
|
"learning_rate": 0.00014604002097914806, |
|
"loss": 0.9005, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 6.206467661691542, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 0.00014578288838772095, |
|
"loss": 0.9052, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 6.218905472636816, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.0001455253722454028, |
|
"loss": 0.9027, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 6.231343283582089, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.00014526747470956176, |
|
"loss": 0.8838, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 6.243781094527363, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.00014500919794076112, |
|
"loss": 0.8898, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 6.256218905472637, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00014475054410274132, |
|
"loss": 0.9002, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 6.268656716417911, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.00014449151536240166, |
|
"loss": 0.8977, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 6.281094527363184, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.0001442321138897823, |
|
"loss": 0.896, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 6.293532338308458, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.00014397234185804586, |
|
"loss": 0.8907, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 6.3059701492537314, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.00014371220144345954, |
|
"loss": 0.8975, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 6.318407960199005, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.00014345169482537658, |
|
"loss": 0.8997, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 6.330845771144278, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.00014319082418621815, |
|
"loss": 0.8967, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 6.343283582089552, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.0001429295917114551, |
|
"loss": 0.8958, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 6.355721393034826, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00014266799958958948, |
|
"loss": 0.8975, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 6.3681592039801, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.00014240605001213647, |
|
"loss": 0.9025, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 6.380597014925373, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.00014214374517360575, |
|
"loss": 0.9016, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 6.393034825870647, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.00014188108727148334, |
|
"loss": 0.8996, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 6.4054726368159205, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.000141618078506213, |
|
"loss": 0.8908, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 6.417910447761194, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.00014135472108117787, |
|
"loss": 0.8912, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 6.430348258706467, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.0001410910172026821, |
|
"loss": 0.8922, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 6.442786069651741, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.00014082696907993225, |
|
"loss": 0.908, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 6.455223880597015, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.00014056257892501885, |
|
"loss": 0.892, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 6.467661691542289, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00014029784895289776, |
|
"loss": 0.8944, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 6.480099502487562, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.0001400327813813718, |
|
"loss": 0.8892, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 6.492537313432836, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.00013976737843107202, |
|
"loss": 0.9018, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 6.5049751243781095, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.00013950164232543909, |
|
"loss": 0.896, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 6.517412935323383, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.00013923557529070478, |
|
"loss": 0.8957, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 6.529850746268656, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.00013896917955587328, |
|
"loss": 0.8936, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 6.54228855721393, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.0001387024573527024, |
|
"loss": 0.9142, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 6.554726368159204, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 0.0001384354109156851, |
|
"loss": 0.886, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 6.567164179104478, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00013816804248203052, |
|
"loss": 0.8986, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 6.579601990049751, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.00013790035429164544, |
|
"loss": 0.9073, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 6.592039800995025, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.00013763234858711542, |
|
"loss": 0.8992, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 6.604477611940299, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.00013736402761368598, |
|
"loss": 0.8998, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 6.616915422885572, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.00013709539361924392, |
|
"loss": 0.893, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 6.629353233830845, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.00013682644885429831, |
|
"loss": 0.9035, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 6.641791044776119, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00013655719557196185, |
|
"loss": 0.9081, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 6.654228855721393, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.00013628763602793176, |
|
"loss": 0.8858, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 6.666666666666667, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.00013601777248047105, |
|
"loss": 0.8897, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 6.67910447761194, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.0001357476071903896, |
|
"loss": 0.8972, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 6.691542288557214, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.00013547714242102504, |
|
"loss": 0.9062, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 6.703980099502488, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.00013520638043822405, |
|
"loss": 0.8963, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 6.7164179104477615, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.0001349353235103232, |
|
"loss": 0.8991, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 6.7288557213930345, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.00013466397390812996, |
|
"loss": 0.8994, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 6.741293532338308, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.00013439233390490378, |
|
"loss": 0.8902, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 6.753731343283582, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00013412040577633687, |
|
"loss": 0.9047, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 6.766169154228856, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00013384819180053536, |
|
"loss": 0.9047, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 6.778606965174129, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.00013357569425799997, |
|
"loss": 0.8986, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 6.791044776119403, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.0001333029154316072, |
|
"loss": 0.8988, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 6.803482587064677, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.0001330298576065898, |
|
"loss": 0.9017, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 6.8159203980099505, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00013275652307051815, |
|
"loss": 0.8984, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 6.8283582089552235, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.00013248291411328047, |
|
"loss": 0.9124, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 6.840796019900497, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.00013220903302706426, |
|
"loss": 0.9063, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 6.853233830845771, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00013193488210633668, |
|
"loss": 0.9011, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 6.865671641791045, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.00013166046364782545, |
|
"loss": 0.9073, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 6.878109452736318, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00013138577995049964, |
|
"loss": 0.9044, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 6.890547263681592, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.00013111083331555042, |
|
"loss": 0.8886, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 6.902985074626866, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.0001308356260463717, |
|
"loss": 0.9045, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 6.91542288557214, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00013056016044854087, |
|
"loss": 0.9019, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 6.927860696517413, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.0001302844388297995, |
|
"loss": 0.889, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 6.940298507462686, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.0001300084635000341, |
|
"loss": 0.9024, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 6.95273631840796, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.0001297322367712565, |
|
"loss": 0.8985, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 6.965174129353234, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 0.00012945576095758476, |
|
"loss": 0.9121, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 6.977611940298507, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.0001291790383752237, |
|
"loss": 0.919, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 6.990049751243781, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.00012890207134244537, |
|
"loss": 0.896, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 2.494671583175659, |
|
"eval_runtime": 0.5388, |
|
"eval_samples_per_second": 18.561, |
|
"eval_steps_per_second": 1.856, |
|
"step": 2814 |
|
}, |
|
{ |
|
"epoch": 7.002487562189055, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00012862486217956982, |
|
"loss": 0.895, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 7.014925373134329, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.00012834741320894553, |
|
"loss": 0.8798, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 7.027363184079602, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.00012806972675492997, |
|
"loss": 0.8921, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 7.039800995024875, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.00012779180514387023, |
|
"loss": 0.8885, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 7.052238805970149, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00012751365070408333, |
|
"loss": 0.8869, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 7.064676616915423, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.00012723526576583703, |
|
"loss": 0.8953, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 7.077114427860696, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00012695665266132983, |
|
"loss": 0.8742, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 7.08955223880597, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.00012667781372467202, |
|
"loss": 0.8902, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 7.101990049751244, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.0001263987512918656, |
|
"loss": 0.8798, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 7.114427860696518, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.00012611946770078506, |
|
"loss": 0.8936, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 7.126865671641791, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00012583996529115762, |
|
"loss": 0.8848, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 7.1393034825870645, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.00012556024640454362, |
|
"loss": 0.8828, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 7.151741293532338, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.0001252803133843171, |
|
"loss": 0.8885, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 7.164179104477612, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00012500016857564585, |
|
"loss": 0.8956, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 7.176616915422885, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.0001247198143254721, |
|
"loss": 0.8895, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 7.189054726368159, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.0001244392529824926, |
|
"loss": 0.8808, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 7.201492537313433, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.00012415848689713903, |
|
"loss": 0.8806, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 7.213930348258707, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.00012387751842155847, |
|
"loss": 0.8993, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 7.22636815920398, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.00012359634990959334, |
|
"loss": 0.9013, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 7.2388059701492535, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.00012331498371676204, |
|
"loss": 0.88, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 7.251243781094527, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00012303342220023895, |
|
"loss": 0.8755, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 7.263681592039801, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.0001227516677188349, |
|
"loss": 0.8868, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 7.276119402985074, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.0001224697226329772, |
|
"loss": 0.8876, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 7.288557213930348, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.00012218758930469004, |
|
"loss": 0.886, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 7.300995024875622, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.0001219052700975746, |
|
"loss": 0.8792, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 7.313432835820896, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00012162276737678933, |
|
"loss": 0.8864, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 7.32587064676617, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00012134008350902999, |
|
"loss": 0.8973, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 7.338308457711443, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.00012105722086251, |
|
"loss": 0.8828, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 7.350746268656716, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.0001207741818069405, |
|
"loss": 0.8784, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 7.36318407960199, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.00012049096871351047, |
|
"loss": 0.8832, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 7.375621890547263, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.00012020758395486697, |
|
"loss": 0.8817, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 7.388059701492537, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.00011992402990509515, |
|
"loss": 0.8918, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 7.400497512437811, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.00011964030893969849, |
|
"loss": 0.8882, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 7.412935323383085, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.00011935642343557871, |
|
"loss": 0.9038, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 7.425373134328359, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.00011907237577101611, |
|
"loss": 0.8879, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 7.437810945273632, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.00011878816832564939, |
|
"loss": 0.893, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 7.4502487562189055, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.00011850380348045585, |
|
"loss": 0.8898, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 7.462686567164179, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00011821928361773147, |
|
"loss": 0.8896, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 7.475124378109452, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.00011793461112107085, |
|
"loss": 0.884, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 7.487562189054726, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00011764978837534737, |
|
"loss": 0.8682, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00011736481776669306, |
|
"loss": 0.8928, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 7.512437810945274, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.0001170797016824787, |
|
"loss": 0.8914, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 7.524875621890548, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.00011679444251129392, |
|
"loss": 0.883, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 7.537313432835821, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00011650904264292687, |
|
"loss": 0.8966, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 7.5497512437810945, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.0001162235044683446, |
|
"loss": 0.8856, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 7.562189054726368, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00011593783037967272, |
|
"loss": 0.8855, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 7.574626865671641, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.00011565202277017551, |
|
"loss": 0.8858, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 7.587064676616915, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.0001153660840342358, |
|
"loss": 0.8922, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 7.599502487562189, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.00011508001656733503, |
|
"loss": 0.8943, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 7.611940298507463, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.000114793822766033, |
|
"loss": 0.8926, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 7.624378109452737, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00011450750502794796, |
|
"loss": 0.8792, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 7.63681592039801, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.00011422106575173642, |
|
"loss": 0.8999, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 7.649253731343284, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00011393450733707309, |
|
"loss": 0.8827, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 7.661691542288557, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.00011364783218463078, |
|
"loss": 0.897, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 7.67412935323383, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00011336104269606027, |
|
"loss": 0.8866, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 7.686567164179104, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00011307414127397027, |
|
"loss": 0.889, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 7.699004975124378, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00011278713032190709, |
|
"loss": 0.8844, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 7.711442786069652, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.00011250001224433477, |
|
"loss": 0.8747, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 7.723880597014926, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.00011221278944661473, |
|
"loss": 0.8895, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 7.736318407960199, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.00011192546433498574, |
|
"loss": 0.8956, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 7.748756218905473, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.00011163803931654366, |
|
"loss": 0.8796, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 7.7611940298507465, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.00011135051679922141, |
|
"loss": 0.8838, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 7.773631840796019, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.00011106289919176867, |
|
"loss": 0.8958, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 7.786069651741293, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.00011077518890373167, |
|
"loss": 0.8827, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 7.798507462686567, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.00011048738834543319, |
|
"loss": 0.8829, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 7.810945273631841, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.00011019949992795226, |
|
"loss": 0.8846, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 7.823383084577115, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.00010991152606310392, |
|
"loss": 0.8878, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 7.835820895522388, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00010962346916341903, |
|
"loss": 0.8964, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 7.848258706467662, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00010933533164212415, |
|
"loss": 0.8907, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 7.8606965174129355, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.0001090471159131212, |
|
"loss": 0.8823, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 7.8731343283582085, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.00010875882439096729, |
|
"loss": 0.8919, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 7.885572139303482, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.00010847045949085454, |
|
"loss": 0.8881, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 7.898009950248756, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.0001081820236285898, |
|
"loss": 0.8916, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 7.91044776119403, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.00010789351922057435, |
|
"loss": 0.887, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 7.922885572139304, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.00010760494868378377, |
|
"loss": 0.8791, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 7.935323383084577, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.00010731631443574768, |
|
"loss": 0.8859, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 7.947761194029851, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.0001070276188945293, |
|
"loss": 0.8859, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 7.960199004975125, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.00010673886447870553, |
|
"loss": 0.8793, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 7.9726368159203975, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.00010645005360734637, |
|
"loss": 0.902, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 7.985074626865671, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.00010616118869999483, |
|
"loss": 0.884, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 7.997512437810945, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.00010587227217664654, |
|
"loss": 0.8858, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 2.505563259124756, |
|
"eval_runtime": 0.5386, |
|
"eval_samples_per_second": 18.568, |
|
"eval_steps_per_second": 1.857, |
|
"step": 3216 |
|
}, |
|
{ |
|
"epoch": 8.009950248756219, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00010558330645772971, |
|
"loss": 0.8806, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 8.022388059701493, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.00010529429396408452, |
|
"loss": 0.8683, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 8.034825870646767, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.00010500523711694312, |
|
"loss": 0.8826, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 8.04726368159204, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.00010471613833790917, |
|
"loss": 0.8795, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 8.059701492537313, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.00010442700004893764, |
|
"loss": 0.8767, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 8.072139303482587, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.00010413782467231455, |
|
"loss": 0.8699, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 8.08457711442786, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.00010384861463063649, |
|
"loss": 0.8738, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 8.097014925373134, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00010355937234679065, |
|
"loss": 0.8765, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 8.109452736318408, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.00010327010024393417, |
|
"loss": 0.8866, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 8.121890547263682, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.0001029808007454741, |
|
"loss": 0.8811, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 8.134328358208956, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.00010269147627504692, |
|
"loss": 0.8713, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 8.14676616915423, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.00010240212925649837, |
|
"loss": 0.8869, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 8.159203980099502, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00010211276211386312, |
|
"loss": 0.8789, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 8.171641791044776, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.0001018233772713443, |
|
"loss": 0.8786, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 8.18407960199005, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00010153397715329353, |
|
"loss": 0.8803, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 8.196517412935323, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.00010124456418419019, |
|
"loss": 0.8752, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 8.208955223880597, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.00010095514078862147, |
|
"loss": 0.8733, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 8.221393034825871, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00010066570939126184, |
|
"loss": 0.8791, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 8.233830845771145, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00010037627241685276, |
|
"loss": 0.8731, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 8.246268656716419, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.00010008683229018256, |
|
"loss": 0.8692, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 8.25870646766169, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 9.97973914360658e-05, |
|
"loss": 0.8794, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 8.271144278606965, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 9.950795227932333e-05, |
|
"loss": 0.8775, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 8.283582089552239, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 9.92185172447616e-05, |
|
"loss": 0.8664, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 8.296019900497512, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 9.892908875715264e-05, |
|
"loss": 0.8732, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 8.308457711442786, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 9.863966924121358e-05, |
|
"loss": 0.8806, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 8.32089552238806, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 9.835026112158637e-05, |
|
"loss": 0.8781, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 8.333333333333334, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 9.806086682281758e-05, |
|
"loss": 0.8791, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 8.345771144278608, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 9.777148876933787e-05, |
|
"loss": 0.8838, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 8.35820895522388, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 9.74821293854419e-05, |
|
"loss": 0.8739, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 8.370646766169154, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 9.719279109526785e-05, |
|
"loss": 0.8733, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 8.383084577114428, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 9.69034763227773e-05, |
|
"loss": 0.8805, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 8.395522388059701, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 9.661418749173467e-05, |
|
"loss": 0.8899, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 8.407960199004975, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 9.632492702568714e-05, |
|
"loss": 0.8794, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 8.42039800995025, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 9.603569734794421e-05, |
|
"loss": 0.8786, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 8.432835820895523, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 9.574650088155752e-05, |
|
"loss": 0.8825, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 8.445273631840797, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 9.545734004930037e-05, |
|
"loss": 0.8801, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 8.457711442786069, |
|
"grad_norm": 0.5, |
|
"learning_rate": 9.516821727364765e-05, |
|
"loss": 0.8735, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 8.470149253731343, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 9.487913497675536e-05, |
|
"loss": 0.8822, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 8.482587064676617, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 9.459009558044033e-05, |
|
"loss": 0.8802, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 8.49502487562189, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 9.430110150616014e-05, |
|
"loss": 0.8705, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 8.507462686567164, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 9.40121551749925e-05, |
|
"loss": 0.8792, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 8.519900497512438, |
|
"grad_norm": 0.5, |
|
"learning_rate": 9.372325900761533e-05, |
|
"loss": 0.8809, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 8.532338308457712, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 9.343441542428614e-05, |
|
"loss": 0.8762, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 8.544776119402986, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 9.314562684482202e-05, |
|
"loss": 0.8716, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 8.557213930348258, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 9.285689568857917e-05, |
|
"loss": 0.8884, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 8.569651741293532, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 9.25682243744328e-05, |
|
"loss": 0.8725, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 8.582089552238806, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 9.227961532075671e-05, |
|
"loss": 0.8822, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 8.59452736318408, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 9.199107094540319e-05, |
|
"loss": 0.8784, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 8.606965174129353, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 9.170259366568266e-05, |
|
"loss": 0.8799, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 8.619402985074627, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 9.141418589834339e-05, |
|
"loss": 0.8825, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 8.631840796019901, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 9.112585005955136e-05, |
|
"loss": 0.8922, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 8.644278606965175, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 9.08375885648699e-05, |
|
"loss": 0.8935, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 8.656716417910447, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 9.054940382923953e-05, |
|
"loss": 0.8796, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 8.66915422885572, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 9.02612982669578e-05, |
|
"loss": 0.8869, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 8.681592039800995, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 8.997327429165883e-05, |
|
"loss": 0.8722, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 8.694029850746269, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 8.96853343162934e-05, |
|
"loss": 0.8769, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 8.706467661691542, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 8.939748075310841e-05, |
|
"loss": 0.8802, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 8.718905472636816, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 8.910971601362696e-05, |
|
"loss": 0.891, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 8.73134328358209, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 8.882204250862796e-05, |
|
"loss": 0.8905, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 8.743781094527364, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 8.853446264812599e-05, |
|
"loss": 0.8675, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 8.756218905472636, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 8.824697884135112e-05, |
|
"loss": 0.8834, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 8.76865671641791, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 8.795959349672878e-05, |
|
"loss": 0.8771, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 8.781094527363184, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 8.76723090218594e-05, |
|
"loss": 0.876, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 8.793532338308458, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 8.73851278234985e-05, |
|
"loss": 0.8808, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 8.805970149253731, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 8.709805230753627e-05, |
|
"loss": 0.8843, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 8.818407960199005, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 8.68110848789776e-05, |
|
"loss": 0.8688, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 8.83084577114428, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 8.65242279419219e-05, |
|
"loss": 0.87, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 8.843283582089553, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 8.623748389954283e-05, |
|
"loss": 0.8777, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 8.855721393034825, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 8.595085515406834e-05, |
|
"loss": 0.8747, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 8.868159203980099, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 8.566434410676042e-05, |
|
"loss": 0.8809, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 8.880597014925373, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 8.537795315789509e-05, |
|
"loss": 0.8664, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 8.893034825870647, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 8.509168470674215e-05, |
|
"loss": 0.8791, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 8.90547263681592, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 8.480554115154529e-05, |
|
"loss": 0.8901, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 8.917910447761194, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 8.451952488950166e-05, |
|
"loss": 0.8687, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 8.930348258706468, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 8.423363831674225e-05, |
|
"loss": 0.8844, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 8.942786069651742, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 8.394788382831143e-05, |
|
"loss": 0.8754, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 8.955223880597014, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 8.366226381814697e-05, |
|
"loss": 0.8763, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 8.967661691542288, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 8.337678067906016e-05, |
|
"loss": 0.8876, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 8.980099502487562, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 8.309143680271553e-05, |
|
"loss": 0.8748, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 8.992537313432836, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 8.280623457961102e-05, |
|
"loss": 0.8811, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 2.5127768516540527, |
|
"eval_runtime": 0.543, |
|
"eval_samples_per_second": 18.416, |
|
"eval_steps_per_second": 1.842, |
|
"step": 3618 |
|
}, |
|
{ |
|
"epoch": 9.00497512437811, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 8.252117639905777e-05, |
|
"loss": 0.8707, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 9.017412935323383, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 8.223626464916022e-05, |
|
"loss": 0.8672, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 9.029850746268657, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 8.195150171679608e-05, |
|
"loss": 0.8752, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 9.042288557213931, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 8.16668899875963e-05, |
|
"loss": 0.8641, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 9.054726368159203, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 8.13824318459252e-05, |
|
"loss": 0.8679, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 9.067164179104477, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 8.109812967486025e-05, |
|
"loss": 0.8711, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 9.07960199004975, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 8.08139858561724e-05, |
|
"loss": 0.8657, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 9.092039800995025, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 8.053000277030589e-05, |
|
"loss": 0.8748, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 9.104477611940299, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 8.02461827963585e-05, |
|
"loss": 0.8715, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 9.116915422885572, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 7.996252831206141e-05, |
|
"loss": 0.8654, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 9.129353233830846, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 7.967904169375951e-05, |
|
"loss": 0.8798, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 9.14179104477612, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 7.939572531639128e-05, |
|
"loss": 0.8697, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 9.154228855721392, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 7.911258155346901e-05, |
|
"loss": 0.8774, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 9.166666666666666, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 7.882961277705895e-05, |
|
"loss": 0.8773, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 9.17910447761194, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 7.854682135776131e-05, |
|
"loss": 0.8745, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 9.191542288557214, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 7.826420966469055e-05, |
|
"loss": 0.8786, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 9.203980099502488, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 7.798178006545534e-05, |
|
"loss": 0.8715, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 9.216417910447761, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 7.769953492613899e-05, |
|
"loss": 0.8696, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 9.228855721393035, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 7.741747661127934e-05, |
|
"loss": 0.8716, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 9.24129353233831, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 7.713560748384914e-05, |
|
"loss": 0.8713, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 9.253731343283581, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 7.685392990523626e-05, |
|
"loss": 0.868, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 9.266169154228855, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 7.657244623522378e-05, |
|
"loss": 0.8756, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 9.278606965174129, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 7.629115883197033e-05, |
|
"loss": 0.8671, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 9.291044776119403, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 7.601007005199021e-05, |
|
"loss": 0.8758, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 9.303482587064677, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 7.572918225013387e-05, |
|
"loss": 0.876, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 9.31592039800995, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 7.544849777956792e-05, |
|
"loss": 0.8762, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 9.328358208955224, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 7.516801899175565e-05, |
|
"loss": 0.8699, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 9.340796019900498, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 7.488774823643711e-05, |
|
"loss": 0.8617, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 9.35323383084577, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 7.46076878616096e-05, |
|
"loss": 0.8788, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 9.365671641791044, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 7.432784021350796e-05, |
|
"loss": 0.8798, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 9.378109452736318, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 7.404820763658483e-05, |
|
"loss": 0.8656, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 9.390547263681592, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 7.376879247349111e-05, |
|
"loss": 0.8803, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 9.402985074626866, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 7.348959706505626e-05, |
|
"loss": 0.8653, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 9.41542288557214, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 7.321062375026879e-05, |
|
"loss": 0.8681, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 9.427860696517413, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 7.293187486625646e-05, |
|
"loss": 0.8736, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 9.440298507462687, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 7.265335274826704e-05, |
|
"loss": 0.8726, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 9.45273631840796, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 7.237505972964832e-05, |
|
"loss": 0.8847, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 9.465174129353233, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 7.209699814182899e-05, |
|
"loss": 0.8604, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 9.477611940298507, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 7.181917031429874e-05, |
|
"loss": 0.8757, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 9.490049751243781, |
|
"grad_norm": 0.5, |
|
"learning_rate": 7.154157857458903e-05, |
|
"loss": 0.8663, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 9.502487562189055, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 7.126422524825343e-05, |
|
"loss": 0.874, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 9.514925373134329, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 7.09871126588481e-05, |
|
"loss": 0.8847, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 9.527363184079602, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 7.071024312791253e-05, |
|
"loss": 0.8607, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 9.539800995024876, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 7.043361897494988e-05, |
|
"loss": 0.877, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 9.552238805970148, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 7.015724251740766e-05, |
|
"loss": 0.8644, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 9.564676616915422, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 6.988111607065826e-05, |
|
"loss": 0.8687, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 9.577114427860696, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 6.960524194797959e-05, |
|
"loss": 0.8678, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 9.58955223880597, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 6.932962246053577e-05, |
|
"loss": 0.8669, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 9.601990049751244, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 6.905425991735753e-05, |
|
"loss": 0.8611, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 9.614427860696518, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 6.877915662532317e-05, |
|
"loss": 0.8596, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 9.626865671641792, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 6.850431488913895e-05, |
|
"loss": 0.8703, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 9.639303482587065, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 6.822973701132007e-05, |
|
"loss": 0.8714, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 9.65174129353234, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 6.795542529217109e-05, |
|
"loss": 0.8874, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 9.664179104477611, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 6.76813820297669e-05, |
|
"loss": 0.8675, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 9.676616915422885, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 6.74076095199333e-05, |
|
"loss": 0.8866, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 9.689054726368159, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 6.713411005622786e-05, |
|
"loss": 0.8822, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 9.701492537313433, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 6.686088592992067e-05, |
|
"loss": 0.8782, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 9.713930348258707, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 6.658793942997515e-05, |
|
"loss": 0.8819, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 9.72636815920398, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 6.631527284302892e-05, |
|
"loss": 0.8637, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 9.738805970149254, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 6.604288845337453e-05, |
|
"loss": 0.8692, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 9.751243781094526, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 6.577078854294046e-05, |
|
"loss": 0.8633, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 9.7636815920398, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 6.549897539127185e-05, |
|
"loss": 0.8685, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 9.776119402985074, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 6.522745127551158e-05, |
|
"loss": 0.8737, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 9.788557213930348, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 6.495621847038105e-05, |
|
"loss": 0.8753, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 9.800995024875622, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 6.46852792481612e-05, |
|
"loss": 0.8623, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 9.813432835820896, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 6.44146358786734e-05, |
|
"loss": 0.8542, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 9.82587064676617, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 6.414429062926057e-05, |
|
"loss": 0.8775, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 9.838308457711443, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 6.387424576476804e-05, |
|
"loss": 0.8814, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 9.850746268656717, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 6.360450354752458e-05, |
|
"loss": 0.8681, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 9.86318407960199, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 6.333506623732365e-05, |
|
"loss": 0.8821, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 9.875621890547263, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 6.306593609140416e-05, |
|
"loss": 0.875, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 9.888059701492537, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 6.279711536443185e-05, |
|
"loss": 0.8672, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 9.900497512437811, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 6.252860630848014e-05, |
|
"loss": 0.8776, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 9.912935323383085, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 6.226041117301154e-05, |
|
"loss": 0.8825, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 9.925373134328359, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 6.199253220485856e-05, |
|
"loss": 0.8658, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 9.937810945273633, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 6.1724971648205e-05, |
|
"loss": 0.87, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 9.950248756218905, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 6.145773174456717e-05, |
|
"loss": 0.871, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 9.962686567164178, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 6.119081473277501e-05, |
|
"loss": 0.8748, |
|
"step": 4005 |
|
}, |
|
{ |
|
"epoch": 9.975124378109452, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 6.092422284895352e-05, |
|
"loss": 0.8615, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 9.987562189054726, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 6.0657958326503716e-05, |
|
"loss": 0.8756, |
|
"step": 4015 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 6.039202339608432e-05, |
|
"loss": 0.8634, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 2.5166468620300293, |
|
"eval_runtime": 0.5392, |
|
"eval_samples_per_second": 18.545, |
|
"eval_steps_per_second": 1.854, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 10.012437810945274, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 6.012642028559272e-05, |
|
"loss": 0.8711, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 10.024875621890548, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 5.9861151220146494e-05, |
|
"loss": 0.8676, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 10.037313432835822, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 5.959621842206474e-05, |
|
"loss": 0.859, |
|
"step": 4035 |
|
}, |
|
{ |
|
"epoch": 10.049751243781095, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 5.9331624110849405e-05, |
|
"loss": 0.8715, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 10.062189054726367, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 5.9067370503166764e-05, |
|
"loss": 0.8667, |
|
"step": 4045 |
|
}, |
|
{ |
|
"epoch": 10.074626865671641, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 5.880345981282876e-05, |
|
"loss": 0.8705, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 10.087064676616915, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 5.8539894250774596e-05, |
|
"loss": 0.8655, |
|
"step": 4055 |
|
}, |
|
{ |
|
"epoch": 10.099502487562189, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 5.8276676025052055e-05, |
|
"loss": 0.8762, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 10.111940298507463, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 5.801380734079907e-05, |
|
"loss": 0.8635, |
|
"step": 4065 |
|
}, |
|
{ |
|
"epoch": 10.124378109452737, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 5.7751290400225287e-05, |
|
"loss": 0.8663, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 10.13681592039801, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 5.74891274025936e-05, |
|
"loss": 0.8669, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 10.149253731343283, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 5.722732054420172e-05, |
|
"loss": 0.87, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 10.161691542288557, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 5.696587201836363e-05, |
|
"loss": 0.8608, |
|
"step": 4085 |
|
}, |
|
{ |
|
"epoch": 10.17412935323383, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 5.67047840153915e-05, |
|
"loss": 0.8637, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 10.186567164179104, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 5.6444058722577165e-05, |
|
"loss": 0.8745, |
|
"step": 4095 |
|
}, |
|
{ |
|
"epoch": 10.199004975124378, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 5.6183698324173695e-05, |
|
"loss": 0.8648, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 10.211442786069652, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 5.592370500137734e-05, |
|
"loss": 0.8691, |
|
"step": 4105 |
|
}, |
|
{ |
|
"epoch": 10.223880597014926, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 5.566408093230911e-05, |
|
"loss": 0.8686, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 10.2363184079602, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 5.5404828291996535e-05, |
|
"loss": 0.8663, |
|
"step": 4115 |
|
}, |
|
{ |
|
"epoch": 10.248756218905474, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 5.514594925235548e-05, |
|
"loss": 0.8729, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 10.261194029850746, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 5.4887445982171906e-05, |
|
"loss": 0.8683, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 10.27363184079602, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 5.4629320647083804e-05, |
|
"loss": 0.86, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 10.286069651741293, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 5.437157540956282e-05, |
|
"loss": 0.881, |
|
"step": 4135 |
|
}, |
|
{ |
|
"epoch": 10.298507462686567, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 5.4114212428896424e-05, |
|
"loss": 0.8569, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 10.310945273631841, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 5.385723386116966e-05, |
|
"loss": 0.8605, |
|
"step": 4145 |
|
}, |
|
{ |
|
"epoch": 10.323383084577115, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 5.360064185924714e-05, |
|
"loss": 0.8779, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 10.335820895522389, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 5.334443857275487e-05, |
|
"loss": 0.8614, |
|
"step": 4155 |
|
}, |
|
{ |
|
"epoch": 10.348258706467663, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 5.3088626148062474e-05, |
|
"loss": 0.8687, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 10.360696517412935, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 5.283320672826506e-05, |
|
"loss": 0.8555, |
|
"step": 4165 |
|
}, |
|
{ |
|
"epoch": 10.373134328358208, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 5.257818245316522e-05, |
|
"loss": 0.8609, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 10.385572139303482, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 5.232355545925529e-05, |
|
"loss": 0.8583, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 10.398009950248756, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 5.206932787969927e-05, |
|
"loss": 0.8674, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 10.41044776119403, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 5.1815501844315105e-05, |
|
"loss": 0.8679, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 10.422885572139304, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 5.156207947955662e-05, |
|
"loss": 0.8748, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 10.435323383084578, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 5.1309062908495985e-05, |
|
"loss": 0.8628, |
|
"step": 4195 |
|
}, |
|
{ |
|
"epoch": 10.447761194029852, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 5.105645425080572e-05, |
|
"loss": 0.876, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 10.460199004975124, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 5.0804255622741084e-05, |
|
"loss": 0.8633, |
|
"step": 4205 |
|
}, |
|
{ |
|
"epoch": 10.472636815920398, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 5.055246913712213e-05, |
|
"loss": 0.866, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 10.485074626865671, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 5.030109690331625e-05, |
|
"loss": 0.8763, |
|
"step": 4215 |
|
}, |
|
{ |
|
"epoch": 10.497512437810945, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 5.0050141027220396e-05, |
|
"loss": 0.8677, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 10.509950248756219, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 4.979960361124346e-05, |
|
"loss": 0.8751, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 10.522388059701493, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 4.954948675428853e-05, |
|
"loss": 0.8689, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 10.534825870646767, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 4.929979255173553e-05, |
|
"loss": 0.8655, |
|
"step": 4235 |
|
}, |
|
{ |
|
"epoch": 10.547263681592039, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 4.905052309542352e-05, |
|
"loss": 0.8619, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 10.559701492537313, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 4.880168047363312e-05, |
|
"loss": 0.871, |
|
"step": 4245 |
|
}, |
|
{ |
|
"epoch": 10.572139303482587, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 4.855326677106926e-05, |
|
"loss": 0.8714, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 10.58457711442786, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 4.830528406884351e-05, |
|
"loss": 0.8599, |
|
"step": 4255 |
|
}, |
|
{ |
|
"epoch": 10.597014925373134, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 4.8057734444456536e-05, |
|
"loss": 0.8757, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 10.609452736318408, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 4.781061997178104e-05, |
|
"loss": 0.8704, |
|
"step": 4265 |
|
}, |
|
{ |
|
"epoch": 10.621890547263682, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 4.7563942721044076e-05, |
|
"loss": 0.8665, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 10.634328358208956, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 4.7317704758809946e-05, |
|
"loss": 0.8689, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 10.64676616915423, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 4.707190814796261e-05, |
|
"loss": 0.8459, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 10.659203980099502, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 4.682655494768866e-05, |
|
"loss": 0.8679, |
|
"step": 4285 |
|
}, |
|
{ |
|
"epoch": 10.671641791044776, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 4.658164721345998e-05, |
|
"loss": 0.8686, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 10.68407960199005, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 4.6337186997016505e-05, |
|
"loss": 0.8696, |
|
"step": 4295 |
|
}, |
|
{ |
|
"epoch": 10.696517412935323, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 4.6093176346348955e-05, |
|
"loss": 0.8639, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 10.708955223880597, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 4.584961730568188e-05, |
|
"loss": 0.8643, |
|
"step": 4305 |
|
}, |
|
{ |
|
"epoch": 10.721393034825871, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 4.5606511915456406e-05, |
|
"loss": 0.8694, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 10.733830845771145, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 4.536386221231308e-05, |
|
"loss": 0.8693, |
|
"step": 4315 |
|
}, |
|
{ |
|
"epoch": 10.746268656716419, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 4.512167022907494e-05, |
|
"loss": 0.8674, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 10.75870646766169, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 4.487993799473044e-05, |
|
"loss": 0.8683, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 10.771144278606965, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 4.463866753441644e-05, |
|
"loss": 0.8544, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 10.783582089552239, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 4.439786086940115e-05, |
|
"loss": 0.8694, |
|
"step": 4335 |
|
}, |
|
{ |
|
"epoch": 10.796019900497512, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 4.41575200170674e-05, |
|
"loss": 0.8652, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 10.808457711442786, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 4.3917646990895564e-05, |
|
"loss": 0.8703, |
|
"step": 4345 |
|
}, |
|
{ |
|
"epoch": 10.82089552238806, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 4.3678243800446835e-05, |
|
"loss": 0.8633, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 10.833333333333334, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 4.343931245134616e-05, |
|
"loss": 0.8609, |
|
"step": 4355 |
|
}, |
|
{ |
|
"epoch": 10.845771144278608, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 4.32008549452657e-05, |
|
"loss": 0.8697, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 10.85820895522388, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 4.296287327990797e-05, |
|
"loss": 0.8723, |
|
"step": 4365 |
|
}, |
|
{ |
|
"epoch": 10.870646766169154, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 4.272536944898895e-05, |
|
"loss": 0.8625, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 10.883084577114428, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 4.2488345442221625e-05, |
|
"loss": 0.8754, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 10.895522388059701, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 4.225180324529917e-05, |
|
"loss": 0.8596, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 10.907960199004975, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 4.201574483987836e-05, |
|
"loss": 0.8637, |
|
"step": 4385 |
|
}, |
|
{ |
|
"epoch": 10.92039800995025, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 4.1780172203562916e-05, |
|
"loss": 0.8638, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 10.932835820895523, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 4.1545087309887045e-05, |
|
"loss": 0.8671, |
|
"step": 4395 |
|
}, |
|
{ |
|
"epoch": 10.945273631840797, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 4.131049212829881e-05, |
|
"loss": 0.8745, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 10.957711442786069, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 4.107638862414358e-05, |
|
"loss": 0.8609, |
|
"step": 4405 |
|
}, |
|
{ |
|
"epoch": 10.970149253731343, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 4.084277875864776e-05, |
|
"loss": 0.8658, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 10.982587064676617, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 4.06096644889022e-05, |
|
"loss": 0.8561, |
|
"step": 4415 |
|
}, |
|
{ |
|
"epoch": 10.99502487562189, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 4.03770477678459e-05, |
|
"loss": 0.8758, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_loss": 2.5237340927124023, |
|
"eval_runtime": 0.5398, |
|
"eval_samples_per_second": 18.525, |
|
"eval_steps_per_second": 1.852, |
|
"step": 4422 |
|
}, |
|
{ |
|
"epoch": 11.007462686567164, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 4.014493054424944e-05, |
|
"loss": 0.8648, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 11.019900497512438, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 3.9913314762698974e-05, |
|
"loss": 0.865, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 11.032338308457712, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 3.9682202363579694e-05, |
|
"loss": 0.8569, |
|
"step": 4435 |
|
}, |
|
{ |
|
"epoch": 11.044776119402986, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 3.945159528305971e-05, |
|
"loss": 0.8611, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 11.057213930348258, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 3.922149545307365e-05, |
|
"loss": 0.8578, |
|
"step": 4445 |
|
}, |
|
{ |
|
"epoch": 11.069651741293532, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 3.899190480130675e-05, |
|
"loss": 0.8681, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 11.082089552238806, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 3.876282525117847e-05, |
|
"loss": 0.8712, |
|
"step": 4455 |
|
}, |
|
{ |
|
"epoch": 11.09452736318408, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 3.853425872182644e-05, |
|
"loss": 0.865, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 11.106965174129353, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 3.8306207128090465e-05, |
|
"loss": 0.8679, |
|
"step": 4465 |
|
}, |
|
{ |
|
"epoch": 11.119402985074627, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 3.807867238049642e-05, |
|
"loss": 0.8625, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 11.131840796019901, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 3.785165638524022e-05, |
|
"loss": 0.8588, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 11.144278606965175, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 3.7625161044171854e-05, |
|
"loss": 0.8517, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 11.156716417910447, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 3.739918825477953e-05, |
|
"loss": 0.8687, |
|
"step": 4485 |
|
}, |
|
{ |
|
"epoch": 11.16915422885572, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 3.717373991017371e-05, |
|
"loss": 0.8554, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 11.181592039800995, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 3.6948817899071285e-05, |
|
"loss": 0.8613, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 11.194029850746269, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 3.672442410577965e-05, |
|
"loss": 0.8663, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 11.206467661691542, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 3.650056041018108e-05, |
|
"loss": 0.8575, |
|
"step": 4505 |
|
}, |
|
{ |
|
"epoch": 11.218905472636816, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 3.6277228687716894e-05, |
|
"loss": 0.8682, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 11.23134328358209, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 3.605443080937172e-05, |
|
"loss": 0.8637, |
|
"step": 4515 |
|
}, |
|
{ |
|
"epoch": 11.243781094527364, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 3.583216864165788e-05, |
|
"loss": 0.8592, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 11.256218905472636, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 3.561044404659974e-05, |
|
"loss": 0.8603, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 11.26865671641791, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 3.5389258881718e-05, |
|
"loss": 0.863, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 11.281094527363184, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 3.516861500001435e-05, |
|
"loss": 0.8673, |
|
"step": 4535 |
|
}, |
|
{ |
|
"epoch": 11.293532338308458, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 3.4948514249955734e-05, |
|
"loss": 0.8641, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 11.305970149253731, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 3.472895847545905e-05, |
|
"loss": 0.866, |
|
"step": 4545 |
|
}, |
|
{ |
|
"epoch": 11.318407960199005, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 3.450994951587547e-05, |
|
"loss": 0.8616, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 11.33084577114428, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 3.429148920597529e-05, |
|
"loss": 0.876, |
|
"step": 4555 |
|
}, |
|
{ |
|
"epoch": 11.343283582089553, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 3.407357937593237e-05, |
|
"loss": 0.8626, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 11.355721393034825, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 3.3856221851308946e-05, |
|
"loss": 0.8632, |
|
"step": 4565 |
|
}, |
|
{ |
|
"epoch": 11.368159203980099, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 3.3639418453040116e-05, |
|
"loss": 0.8606, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 11.380597014925373, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 3.342317099741886e-05, |
|
"loss": 0.8715, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 11.393034825870647, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 3.320748129608067e-05, |
|
"loss": 0.8559, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 11.40547263681592, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 3.2992351155988285e-05, |
|
"loss": 0.8631, |
|
"step": 4585 |
|
}, |
|
{ |
|
"epoch": 11.417910447761194, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 3.27777823794168e-05, |
|
"loss": 0.8732, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 11.430348258706468, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 3.256377676393836e-05, |
|
"loss": 0.8718, |
|
"step": 4595 |
|
}, |
|
{ |
|
"epoch": 11.442786069651742, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 3.235033610240721e-05, |
|
"loss": 0.8667, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 11.455223880597014, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 3.213746218294455e-05, |
|
"loss": 0.8648, |
|
"step": 4605 |
|
}, |
|
{ |
|
"epoch": 11.467661691542288, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 3.192515678892374e-05, |
|
"loss": 0.8638, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 11.480099502487562, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 3.1713421698955194e-05, |
|
"loss": 0.8558, |
|
"step": 4615 |
|
}, |
|
{ |
|
"epoch": 11.492537313432836, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 3.150225868687161e-05, |
|
"loss": 0.8654, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 11.50497512437811, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 3.129166952171293e-05, |
|
"loss": 0.8687, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 11.517412935323383, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 3.1081655967711745e-05, |
|
"loss": 0.8616, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 11.529850746268657, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 3.0872219784278354e-05, |
|
"loss": 0.864, |
|
"step": 4635 |
|
}, |
|
{ |
|
"epoch": 11.542288557213931, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 3.066336272598612e-05, |
|
"loss": 0.87, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 11.554726368159203, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 3.0455086542556588e-05, |
|
"loss": 0.8672, |
|
"step": 4645 |
|
}, |
|
{ |
|
"epoch": 11.567164179104477, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 3.02473929788452e-05, |
|
"loss": 0.8551, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 11.57960199004975, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 3.0040283774826215e-05, |
|
"loss": 0.8658, |
|
"step": 4655 |
|
}, |
|
{ |
|
"epoch": 11.592039800995025, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 2.983376066557846e-05, |
|
"loss": 0.8574, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 11.604477611940299, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 2.96278253812707e-05, |
|
"loss": 0.8598, |
|
"step": 4665 |
|
}, |
|
{ |
|
"epoch": 11.616915422885572, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 2.942247964714714e-05, |
|
"loss": 0.8635, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 11.629353233830846, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 2.9217725183512868e-05, |
|
"loss": 0.8666, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 11.64179104477612, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 2.901356370571967e-05, |
|
"loss": 0.8631, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 11.654228855721392, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 2.880999692415147e-05, |
|
"loss": 0.8581, |
|
"step": 4685 |
|
}, |
|
{ |
|
"epoch": 11.666666666666666, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 2.8607026544210114e-05, |
|
"loss": 0.8679, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 11.67910447761194, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 2.840465426630091e-05, |
|
"loss": 0.8675, |
|
"step": 4695 |
|
}, |
|
{ |
|
"epoch": 11.691542288557214, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 2.8202881785818624e-05, |
|
"loss": 0.868, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 11.703980099502488, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 2.8001710793133118e-05, |
|
"loss": 0.8575, |
|
"step": 4705 |
|
}, |
|
{ |
|
"epoch": 11.716417910447761, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 2.7801142973575243e-05, |
|
"loss": 0.8638, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 11.728855721393035, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 2.7601180007422656e-05, |
|
"loss": 0.86, |
|
"step": 4715 |
|
}, |
|
{ |
|
"epoch": 11.74129353233831, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 2.7401823569885832e-05, |
|
"loss": 0.8646, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 11.753731343283581, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 2.7203075331094017e-05, |
|
"loss": 0.8615, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 11.766169154228855, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 2.700493695608113e-05, |
|
"loss": 0.867, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 11.778606965174129, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 2.6807410104771935e-05, |
|
"loss": 0.8605, |
|
"step": 4735 |
|
}, |
|
{ |
|
"epoch": 11.791044776119403, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 2.6610496431968125e-05, |
|
"loss": 0.8723, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 11.803482587064677, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 2.64141975873344e-05, |
|
"loss": 0.8519, |
|
"step": 4745 |
|
}, |
|
{ |
|
"epoch": 11.81592039800995, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 2.6218515215384633e-05, |
|
"loss": 0.8754, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 11.828358208955224, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 2.6023450955468176e-05, |
|
"loss": 0.8616, |
|
"step": 4755 |
|
}, |
|
{ |
|
"epoch": 11.840796019900498, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 2.582900644175611e-05, |
|
"loss": 0.8626, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 11.85323383084577, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 2.5635183303227493e-05, |
|
"loss": 0.862, |
|
"step": 4765 |
|
}, |
|
{ |
|
"epoch": 11.865671641791044, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 2.54419831636557e-05, |
|
"loss": 0.8718, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 11.878109452736318, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 2.5249407641594937e-05, |
|
"loss": 0.8684, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 11.890547263681592, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 2.5057458350366648e-05, |
|
"loss": 0.8745, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 11.902985074626866, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 2.4866136898045843e-05, |
|
"loss": 0.8642, |
|
"step": 4785 |
|
}, |
|
{ |
|
"epoch": 11.91542288557214, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 2.467544488744784e-05, |
|
"loss": 0.8619, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 11.927860696517413, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 2.4485383916114747e-05, |
|
"loss": 0.8642, |
|
"step": 4795 |
|
}, |
|
{ |
|
"epoch": 11.940298507462687, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 2.4295955576301965e-05, |
|
"loss": 0.8637, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 11.952736318407961, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 2.4107161454965088e-05, |
|
"loss": 0.8556, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 11.965174129353233, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 2.3919003133746422e-05, |
|
"loss": 0.8644, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 11.977611940298507, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 2.3731482188961818e-05, |
|
"loss": 0.8622, |
|
"step": 4815 |
|
}, |
|
{ |
|
"epoch": 11.990049751243781, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 2.3544600191587375e-05, |
|
"loss": 0.8644, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_loss": 2.526445150375366, |
|
"eval_runtime": 0.5383, |
|
"eval_samples_per_second": 18.576, |
|
"eval_steps_per_second": 1.858, |
|
"step": 4824 |
|
}, |
|
{ |
|
"epoch": 12.002487562189055, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 2.3358358707246407e-05, |
|
"loss": 0.854, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 12.014925373134329, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 2.317275929619627e-05, |
|
"loss": 0.8554, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 12.027363184079602, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 2.2987803513315253e-05, |
|
"loss": 0.8547, |
|
"step": 4835 |
|
}, |
|
{ |
|
"epoch": 12.039800995024876, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 2.2803492908089553e-05, |
|
"loss": 0.8564, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 12.052238805970148, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 2.261982902460039e-05, |
|
"loss": 0.8588, |
|
"step": 4845 |
|
}, |
|
{ |
|
"epoch": 12.064676616915422, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 2.2436813401510982e-05, |
|
"loss": 0.8769, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 12.077114427860696, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 2.2254447572053693e-05, |
|
"loss": 0.8691, |
|
"step": 4855 |
|
}, |
|
{ |
|
"epoch": 12.08955223880597, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 2.2072733064017103e-05, |
|
"loss": 0.8645, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 12.101990049751244, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 2.189167139973335e-05, |
|
"loss": 0.8752, |
|
"step": 4865 |
|
}, |
|
{ |
|
"epoch": 12.114427860696518, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 2.171126409606532e-05, |
|
"loss": 0.8634, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 12.126865671641792, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 2.1531512664393838e-05, |
|
"loss": 0.8621, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 12.139303482587065, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 2.1352418610605186e-05, |
|
"loss": 0.8669, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 12.151741293532337, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 2.1173983435078325e-05, |
|
"loss": 0.8682, |
|
"step": 4885 |
|
}, |
|
{ |
|
"epoch": 12.164179104477611, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 2.0996208632672475e-05, |
|
"loss": 0.8648, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 12.176616915422885, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 2.081909569271442e-05, |
|
"loss": 0.8674, |
|
"step": 4895 |
|
}, |
|
{ |
|
"epoch": 12.189054726368159, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 2.064264609898616e-05, |
|
"loss": 0.8654, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 12.201492537313433, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 2.0466861329712473e-05, |
|
"loss": 0.8603, |
|
"step": 4905 |
|
}, |
|
{ |
|
"epoch": 12.213930348258707, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 2.0291742857548457e-05, |
|
"loss": 0.8548, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 12.22636815920398, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 2.0117292149567278e-05, |
|
"loss": 0.8561, |
|
"step": 4915 |
|
}, |
|
{ |
|
"epoch": 12.238805970149254, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 1.9943510667247813e-05, |
|
"loss": 0.8615, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 12.251243781094526, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 1.977039986646244e-05, |
|
"loss": 0.8554, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 12.2636815920398, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 1.9597961197464808e-05, |
|
"loss": 0.8663, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 12.276119402985074, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 1.9426196104877735e-05, |
|
"loss": 0.8647, |
|
"step": 4935 |
|
}, |
|
{ |
|
"epoch": 12.288557213930348, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 1.9255106027681126e-05, |
|
"loss": 0.8546, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 12.300995024875622, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 1.9084692399199755e-05, |
|
"loss": 0.8576, |
|
"step": 4945 |
|
}, |
|
{ |
|
"epoch": 12.313432835820896, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 1.89149566470915e-05, |
|
"loss": 0.8595, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 12.32587064676617, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 1.87459001933352e-05, |
|
"loss": 0.8592, |
|
"step": 4955 |
|
}, |
|
{ |
|
"epoch": 12.338308457711443, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 1.857752445421883e-05, |
|
"loss": 0.866, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 12.350746268656717, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 1.8409830840327546e-05, |
|
"loss": 0.859, |
|
"step": 4965 |
|
}, |
|
{ |
|
"epoch": 12.36318407960199, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 1.824282075653201e-05, |
|
"loss": 0.8759, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 12.375621890547263, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 1.8076495601976485e-05, |
|
"loss": 0.8667, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 12.388059701492537, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 1.791085677006722e-05, |
|
"loss": 0.8646, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 12.400497512437811, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 1.7745905648460638e-05, |
|
"loss": 0.8623, |
|
"step": 4985 |
|
}, |
|
{ |
|
"epoch": 12.412935323383085, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 1.758164361905188e-05, |
|
"loss": 0.8624, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 12.425373134328359, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 1.741807205796314e-05, |
|
"loss": 0.8515, |
|
"step": 4995 |
|
}, |
|
{ |
|
"epoch": 12.437810945273633, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 1.7255192335532077e-05, |
|
"loss": 0.8647, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 12.450248756218905, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 1.7093005816300445e-05, |
|
"loss": 0.8607, |
|
"step": 5005 |
|
}, |
|
{ |
|
"epoch": 12.462686567164178, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 1.6931513859002635e-05, |
|
"loss": 0.8665, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 12.475124378109452, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 1.677071781655426e-05, |
|
"loss": 0.8609, |
|
"step": 5015 |
|
}, |
|
{ |
|
"epoch": 12.487562189054726, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 1.6610619036040796e-05, |
|
"loss": 0.865, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 1.6451218858706374e-05, |
|
"loss": 0.8593, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 12.512437810945274, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 1.629251861994249e-05, |
|
"loss": 0.8633, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 12.524875621890548, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 1.613451964927688e-05, |
|
"loss": 0.8699, |
|
"step": 5035 |
|
}, |
|
{ |
|
"epoch": 12.537313432835822, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 1.5977223270362196e-05, |
|
"loss": 0.8753, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 12.549751243781095, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 1.5820630800965252e-05, |
|
"loss": 0.8605, |
|
"step": 5045 |
|
}, |
|
{ |
|
"epoch": 12.562189054726367, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 1.566474355295565e-05, |
|
"loss": 0.8612, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 12.574626865671641, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 1.5509562832294944e-05, |
|
"loss": 0.8664, |
|
"step": 5055 |
|
}, |
|
{ |
|
"epoch": 12.587064676616915, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 1.5355089939025714e-05, |
|
"loss": 0.8596, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 12.599502487562189, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 1.5201326167260644e-05, |
|
"loss": 0.869, |
|
"step": 5065 |
|
}, |
|
{ |
|
"epoch": 12.611940298507463, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 1.5048272805171615e-05, |
|
"loss": 0.863, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 12.624378109452737, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 1.4895931134979068e-05, |
|
"loss": 0.8637, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 12.63681592039801, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 1.4744302432941104e-05, |
|
"loss": 0.8727, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 12.649253731343283, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 1.459338796934293e-05, |
|
"loss": 0.8652, |
|
"step": 5085 |
|
}, |
|
{ |
|
"epoch": 12.661691542288557, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 1.4443189008486046e-05, |
|
"loss": 0.869, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 12.67412935323383, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 1.4293706808677832e-05, |
|
"loss": 0.8548, |
|
"step": 5095 |
|
}, |
|
{ |
|
"epoch": 12.686567164179104, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 1.4144942622220902e-05, |
|
"loss": 0.8591, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 12.699004975124378, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 1.3996897695402677e-05, |
|
"loss": 0.846, |
|
"step": 5105 |
|
}, |
|
{ |
|
"epoch": 12.711442786069652, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 1.3849573268484806e-05, |
|
"loss": 0.8737, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 12.723880597014926, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 1.3702970575692975e-05, |
|
"loss": 0.8637, |
|
"step": 5115 |
|
}, |
|
{ |
|
"epoch": 12.7363184079602, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 1.3557090845206421e-05, |
|
"loss": 0.8468, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 12.748756218905474, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 1.3411935299147737e-05, |
|
"loss": 0.8556, |
|
"step": 5125 |
|
}, |
|
{ |
|
"epoch": 12.761194029850746, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 1.3267505153572501e-05, |
|
"loss": 0.8563, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 12.77363184079602, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 1.3123801618459242e-05, |
|
"loss": 0.8725, |
|
"step": 5135 |
|
}, |
|
{ |
|
"epoch": 12.786069651741293, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 1.2980825897699234e-05, |
|
"loss": 0.8682, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 12.798507462686567, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 1.2838579189086353e-05, |
|
"loss": 0.8563, |
|
"step": 5145 |
|
}, |
|
{ |
|
"epoch": 12.810945273631841, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 1.269706268430716e-05, |
|
"loss": 0.8644, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 12.823383084577115, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 1.255627756893083e-05, |
|
"loss": 0.8618, |
|
"step": 5155 |
|
}, |
|
{ |
|
"epoch": 12.835820895522389, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 1.2416225022399286e-05, |
|
"loss": 0.8569, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 12.84825870646766, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 1.2276906218017193e-05, |
|
"loss": 0.8609, |
|
"step": 5165 |
|
}, |
|
{ |
|
"epoch": 12.860696517412935, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 1.2138322322942286e-05, |
|
"loss": 0.864, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 12.873134328358208, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 1.2000474498175552e-05, |
|
"loss": 0.8628, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 12.885572139303482, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 1.186336389855136e-05, |
|
"loss": 0.8593, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 12.898009950248756, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 1.1726991672728005e-05, |
|
"loss": 0.8576, |
|
"step": 5185 |
|
}, |
|
{ |
|
"epoch": 12.91044776119403, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 1.1591358963177923e-05, |
|
"loss": 0.8574, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 12.922885572139304, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 1.1456466906178208e-05, |
|
"loss": 0.8568, |
|
"step": 5195 |
|
}, |
|
{ |
|
"epoch": 12.935323383084578, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 1.1322316631801022e-05, |
|
"loss": 0.851, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 12.947761194029852, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 1.118890926390419e-05, |
|
"loss": 0.8714, |
|
"step": 5205 |
|
}, |
|
{ |
|
"epoch": 12.960199004975124, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 1.1056245920121788e-05, |
|
"loss": 0.8633, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 12.972636815920398, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 1.0924327711854687e-05, |
|
"loss": 0.8635, |
|
"step": 5215 |
|
}, |
|
{ |
|
"epoch": 12.985074626865671, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 1.0793155744261351e-05, |
|
"loss": 0.8568, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 12.997512437810945, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 1.0662731116248537e-05, |
|
"loss": 0.8641, |
|
"step": 5225 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_loss": 2.5267555713653564, |
|
"eval_runtime": 0.541, |
|
"eval_samples_per_second": 18.484, |
|
"eval_steps_per_second": 1.848, |
|
"step": 5226 |
|
}, |
|
{ |
|
"epoch": 13.009950248756219, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 1.0533054920462105e-05, |
|
"loss": 0.8623, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 13.022388059701493, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 1.0404128243277777e-05, |
|
"loss": 0.8567, |
|
"step": 5235 |
|
}, |
|
{ |
|
"epoch": 13.034825870646767, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 1.0275952164792169e-05, |
|
"loss": 0.8527, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 13.04726368159204, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 1.0148527758813665e-05, |
|
"loss": 0.8612, |
|
"step": 5245 |
|
}, |
|
{ |
|
"epoch": 13.059701492537313, |
|
"grad_norm": 0.625, |
|
"learning_rate": 1.0021856092853432e-05, |
|
"loss": 0.8578, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 13.072139303482587, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 9.895938228116452e-06, |
|
"loss": 0.8562, |
|
"step": 5255 |
|
}, |
|
{ |
|
"epoch": 13.08457711442786, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 9.770775219492689e-06, |
|
"loss": 0.8576, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 13.097014925373134, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 9.646368115548232e-06, |
|
"loss": 0.8617, |
|
"step": 5265 |
|
}, |
|
{ |
|
"epoch": 13.109452736318408, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 9.522717958516492e-06, |
|
"loss": 0.8491, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 13.121890547263682, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 9.39982578428944e-06, |
|
"loss": 0.8518, |
|
"step": 5275 |
|
}, |
|
{ |
|
"epoch": 13.134328358208956, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 9.277692622409018e-06, |
|
"loss": 0.8697, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 13.14676616915423, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 9.156319496058452e-06, |
|
"loss": 0.8639, |
|
"step": 5285 |
|
}, |
|
{ |
|
"epoch": 13.159203980099502, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 9.035707422053663e-06, |
|
"loss": 0.8516, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 13.171641791044776, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 8.915857410834794e-06, |
|
"loss": 0.8524, |
|
"step": 5295 |
|
}, |
|
{ |
|
"epoch": 13.18407960199005, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 8.796770466457749e-06, |
|
"loss": 0.8805, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 13.196517412935323, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 8.678447586585735e-06, |
|
"loss": 0.8565, |
|
"step": 5305 |
|
}, |
|
{ |
|
"epoch": 13.208955223880597, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 8.56088976248095e-06, |
|
"loss": 0.8575, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 13.221393034825871, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 8.444097978996235e-06, |
|
"loss": 0.8625, |
|
"step": 5315 |
|
}, |
|
{ |
|
"epoch": 13.233830845771145, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 8.328073214566868e-06, |
|
"loss": 0.8568, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 13.246268656716419, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 8.212816441202309e-06, |
|
"loss": 0.8539, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 13.25870646766169, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 8.098328624478135e-06, |
|
"loss": 0.8549, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 13.271144278606965, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 7.984610723527897e-06, |
|
"loss": 0.8628, |
|
"step": 5335 |
|
}, |
|
{ |
|
"epoch": 13.283582089552239, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 7.871663691035103e-06, |
|
"loss": 0.8614, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 13.296019900497512, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 7.759488473225196e-06, |
|
"loss": 0.8574, |
|
"step": 5345 |
|
}, |
|
{ |
|
"epoch": 13.308457711442786, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 7.64808600985768e-06, |
|
"loss": 0.8573, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 13.32089552238806, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 7.53745723421827e-06, |
|
"loss": 0.8611, |
|
"step": 5355 |
|
}, |
|
{ |
|
"epoch": 13.333333333333334, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 7.427603073110967e-06, |
|
"loss": 0.8709, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 13.345771144278608, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 7.318524446850394e-06, |
|
"loss": 0.862, |
|
"step": 5365 |
|
}, |
|
{ |
|
"epoch": 13.35820895522388, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 7.2102222692540415e-06, |
|
"loss": 0.8657, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 13.370646766169154, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 7.102697447634643e-06, |
|
"loss": 0.8644, |
|
"step": 5375 |
|
}, |
|
{ |
|
"epoch": 13.383084577114428, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 6.995950882792513e-06, |
|
"loss": 0.8632, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 13.395522388059701, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 6.889983469008055e-06, |
|
"loss": 0.8744, |
|
"step": 5385 |
|
}, |
|
{ |
|
"epoch": 13.407960199004975, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 6.784796094034263e-06, |
|
"loss": 0.8677, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 13.42039800995025, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 6.680389639089291e-06, |
|
"loss": 0.8677, |
|
"step": 5395 |
|
}, |
|
{ |
|
"epoch": 13.432835820895523, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 6.576764978849004e-06, |
|
"loss": 0.8686, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 13.445273631840797, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 6.473922981439728e-06, |
|
"loss": 0.8794, |
|
"step": 5405 |
|
}, |
|
{ |
|
"epoch": 13.457711442786069, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 6.371864508430991e-06, |
|
"loss": 0.8551, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 13.470149253731343, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 6.27059041482817e-06, |
|
"loss": 0.8577, |
|
"step": 5415 |
|
}, |
|
{ |
|
"epoch": 13.482587064676617, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 6.170101549065521e-06, |
|
"loss": 0.869, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 13.49502487562189, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 6.070398752998896e-06, |
|
"loss": 0.861, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 13.507462686567164, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 5.971482861898836e-06, |
|
"loss": 0.8626, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 13.519900497512438, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 5.873354704443423e-06, |
|
"loss": 0.8613, |
|
"step": 5435 |
|
}, |
|
{ |
|
"epoch": 13.532338308457712, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 5.776015102711496e-06, |
|
"loss": 0.8706, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 13.544776119402986, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 5.679464872175666e-06, |
|
"loss": 0.85, |
|
"step": 5445 |
|
}, |
|
{ |
|
"epoch": 13.557213930348258, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 5.583704821695445e-06, |
|
"loss": 0.8716, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 13.569651741293532, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 5.488735753510621e-06, |
|
"loss": 0.8632, |
|
"step": 5455 |
|
}, |
|
{ |
|
"epoch": 13.582089552238806, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 5.394558463234378e-06, |
|
"loss": 0.8563, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 13.59452736318408, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 5.301173739846743e-06, |
|
"loss": 0.868, |
|
"step": 5465 |
|
}, |
|
{ |
|
"epoch": 13.606965174129353, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 5.208582365687886e-06, |
|
"loss": 0.8608, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 13.619402985074627, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 5.116785116451661e-06, |
|
"loss": 0.8625, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 13.631840796019901, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 5.025782761179032e-06, |
|
"loss": 0.8538, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 13.644278606965175, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 4.9355760622516344e-06, |
|
"loss": 0.8609, |
|
"step": 5485 |
|
}, |
|
{ |
|
"epoch": 13.656716417910447, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 4.846165775385459e-06, |
|
"loss": 0.8742, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 13.66915422885572, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 4.757552649624442e-06, |
|
"loss": 0.8628, |
|
"step": 5495 |
|
}, |
|
{ |
|
"epoch": 13.681592039800995, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 4.669737427334242e-06, |
|
"loss": 0.8632, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 13.694029850746269, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 4.5827208441959424e-06, |
|
"loss": 0.8565, |
|
"step": 5505 |
|
}, |
|
{ |
|
"epoch": 13.706467661691542, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 4.496503629200022e-06, |
|
"loss": 0.8601, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 13.718905472636816, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 4.4110865046401055e-06, |
|
"loss": 0.8632, |
|
"step": 5515 |
|
}, |
|
{ |
|
"epoch": 13.73134328358209, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 4.3264701861070345e-06, |
|
"loss": 0.8667, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 13.743781094527364, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 4.242655382482752e-06, |
|
"loss": 0.8646, |
|
"step": 5525 |
|
}, |
|
{ |
|
"epoch": 13.756218905472636, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 4.159642795934471e-06, |
|
"loss": 0.8618, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 13.76865671641791, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 4.077433121908747e-06, |
|
"loss": 0.8679, |
|
"step": 5535 |
|
}, |
|
{ |
|
"epoch": 13.781094527363184, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 3.996027049125639e-06, |
|
"loss": 0.866, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 13.793532338308458, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 3.915425259572947e-06, |
|
"loss": 0.8683, |
|
"step": 5545 |
|
}, |
|
{ |
|
"epoch": 13.805970149253731, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 3.835628428500515e-06, |
|
"loss": 0.8706, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 13.818407960199005, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 3.756637224414572e-06, |
|
"loss": 0.8635, |
|
"step": 5555 |
|
}, |
|
{ |
|
"epoch": 13.83084577114428, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 3.6784523090721114e-06, |
|
"loss": 0.862, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 13.843283582089553, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 3.601074337475352e-06, |
|
"loss": 0.8628, |
|
"step": 5565 |
|
}, |
|
{ |
|
"epoch": 13.855721393034825, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 3.5245039578662764e-06, |
|
"loss": 0.8565, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 13.868159203980099, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 3.448741811721179e-06, |
|
"loss": 0.8708, |
|
"step": 5575 |
|
}, |
|
{ |
|
"epoch": 13.880597014925373, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 3.3737885337452814e-06, |
|
"loss": 0.854, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 13.893034825870647, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 3.2996447518674256e-06, |
|
"loss": 0.8561, |
|
"step": 5585 |
|
}, |
|
{ |
|
"epoch": 13.90547263681592, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 3.226311087234868e-06, |
|
"loss": 0.8579, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 13.917910447761194, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 3.153788154207926e-06, |
|
"loss": 0.8538, |
|
"step": 5595 |
|
}, |
|
{ |
|
"epoch": 13.930348258706468, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 3.0820765603550184e-06, |
|
"loss": 0.8616, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 13.942786069651742, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 3.011176906447444e-06, |
|
"loss": 0.862, |
|
"step": 5605 |
|
}, |
|
{ |
|
"epoch": 13.955223880597014, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 2.9410897864544206e-06, |
|
"loss": 0.8629, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 13.967661691542288, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 2.8718157875380234e-06, |
|
"loss": 0.8473, |
|
"step": 5615 |
|
}, |
|
{ |
|
"epoch": 13.980099502487562, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 2.803355490048365e-06, |
|
"loss": 0.8591, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 13.992537313432836, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 2.735709467518699e-06, |
|
"loss": 0.8607, |
|
"step": 5625 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_loss": 2.5270578861236572, |
|
"eval_runtime": 0.5412, |
|
"eval_samples_per_second": 18.478, |
|
"eval_steps_per_second": 1.848, |
|
"step": 5628 |
|
}, |
|
{ |
|
"epoch": 14.00497512437811, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 2.66887828666057e-06, |
|
"loss": 0.8563, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 14.017412935323383, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 2.6028625073591273e-06, |
|
"loss": 0.8643, |
|
"step": 5635 |
|
}, |
|
{ |
|
"epoch": 14.029850746268657, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 2.5376626826683956e-06, |
|
"loss": 0.8715, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 14.042288557213931, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 2.4732793588066794e-06, |
|
"loss": 0.8645, |
|
"step": 5645 |
|
}, |
|
{ |
|
"epoch": 14.054726368159203, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 2.4097130751519205e-06, |
|
"loss": 0.8701, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 14.067164179104477, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 2.3469643642372586e-06, |
|
"loss": 0.8607, |
|
"step": 5655 |
|
}, |
|
{ |
|
"epoch": 14.07960199004975, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 2.2850337517465124e-06, |
|
"loss": 0.8576, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 14.092039800995025, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 2.223921756509828e-06, |
|
"loss": 0.8513, |
|
"step": 5665 |
|
}, |
|
{ |
|
"epoch": 14.104477611940299, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 2.1636288904992585e-06, |
|
"loss": 0.8489, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 14.116915422885572, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 2.1041556588245357e-06, |
|
"loss": 0.8546, |
|
"step": 5675 |
|
}, |
|
{ |
|
"epoch": 14.129353233830846, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 2.045502559728818e-06, |
|
"loss": 0.8715, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 14.14179104477612, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 1.9876700845845475e-06, |
|
"loss": 0.8585, |
|
"step": 5685 |
|
}, |
|
{ |
|
"epoch": 14.154228855721392, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 1.930658717889233e-06, |
|
"loss": 0.8595, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 14.166666666666666, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 1.874468937261531e-06, |
|
"loss": 0.8627, |
|
"step": 5695 |
|
}, |
|
{ |
|
"epoch": 14.17910447761194, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 1.8191012134371577e-06, |
|
"loss": 0.8577, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 14.191542288557214, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 1.7645560102649395e-06, |
|
"loss": 0.8715, |
|
"step": 5705 |
|
}, |
|
{ |
|
"epoch": 14.203980099502488, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 1.71083378470297e-06, |
|
"loss": 0.8643, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 14.216417910447761, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 1.6579349868147687e-06, |
|
"loss": 0.8727, |
|
"step": 5715 |
|
}, |
|
{ |
|
"epoch": 14.228855721393035, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 1.605860059765496e-06, |
|
"loss": 0.8588, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 14.24129353233831, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 1.5546094398182331e-06, |
|
"loss": 0.856, |
|
"step": 5725 |
|
}, |
|
{ |
|
"epoch": 14.253731343283581, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 1.5041835563303742e-06, |
|
"loss": 0.8671, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 14.266169154228855, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 1.4545828317499842e-06, |
|
"loss": 0.8567, |
|
"step": 5735 |
|
}, |
|
{ |
|
"epoch": 14.278606965174129, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 1.4058076816122589e-06, |
|
"loss": 0.8658, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 14.291044776119403, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 1.3578585145360812e-06, |
|
"loss": 0.8646, |
|
"step": 5745 |
|
}, |
|
{ |
|
"epoch": 14.303482587064677, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 1.3107357322205693e-06, |
|
"loss": 0.8602, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 14.31592039800995, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 1.2644397294417132e-06, |
|
"loss": 0.8763, |
|
"step": 5755 |
|
}, |
|
{ |
|
"epoch": 14.328358208955224, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 1.2189708940490652e-06, |
|
"loss": 0.8715, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 14.340796019900498, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 1.1743296069624987e-06, |
|
"loss": 0.8692, |
|
"step": 5765 |
|
}, |
|
{ |
|
"epoch": 14.35323383084577, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 1.1305162421690441e-06, |
|
"loss": 0.865, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 14.365671641791044, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 1.0875311667196908e-06, |
|
"loss": 0.8671, |
|
"step": 5775 |
|
}, |
|
{ |
|
"epoch": 14.378109452736318, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 1.045374740726357e-06, |
|
"loss": 0.8588, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 14.390547263681592, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 1.0040473173588805e-06, |
|
"loss": 0.8607, |
|
"step": 5785 |
|
}, |
|
{ |
|
"epoch": 14.402985074626866, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 9.635492428420434e-07, |
|
"loss": 0.8547, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 14.41542288557214, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 9.238808564526302e-07, |
|
"loss": 0.8644, |
|
"step": 5795 |
|
}, |
|
{ |
|
"epoch": 14.427860696517413, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 8.850424905166743e-07, |
|
"loss": 0.8673, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 14.440298507462687, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 8.470344704066046e-07, |
|
"loss": 0.8577, |
|
"step": 5805 |
|
}, |
|
{ |
|
"epoch": 14.45273631840796, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 8.098571145385592e-07, |
|
"loss": 0.8646, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 14.465174129353233, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 7.735107343696867e-07, |
|
"loss": 0.8545, |
|
"step": 5815 |
|
}, |
|
{ |
|
"epoch": 14.477611940298507, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 7.379956343955386e-07, |
|
"loss": 0.8613, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 14.490049751243781, |
|
"grad_norm": 0.8125, |
|
"learning_rate": 7.033121121475694e-07, |
|
"loss": 0.8637, |
|
"step": 5825 |
|
}, |
|
{ |
|
"epoch": 14.502487562189055, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 6.694604581905517e-07, |
|
"loss": 0.8659, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 14.514925373134329, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 6.364409561202323e-07, |
|
"loss": 0.8711, |
|
"step": 5835 |
|
}, |
|
{ |
|
"epoch": 14.527363184079602, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 6.042538825609345e-07, |
|
"loss": 0.8632, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 14.539800995024876, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 5.728995071631715e-07, |
|
"loss": 0.8573, |
|
"step": 5845 |
|
}, |
|
{ |
|
"epoch": 14.552238805970148, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 5.42378092601481e-07, |
|
"loss": 0.86, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 14.564676616915422, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 5.126898945721826e-07, |
|
"loss": 0.859, |
|
"step": 5855 |
|
}, |
|
{ |
|
"epoch": 14.577114427860696, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 4.838351617912351e-07, |
|
"loss": 0.8643, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 14.58955223880597, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 4.558141359921386e-07, |
|
"loss": 0.8546, |
|
"step": 5865 |
|
}, |
|
{ |
|
"epoch": 14.601990049751244, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 4.286270519239466e-07, |
|
"loss": 0.8519, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 14.614427860696518, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 4.022741373492678e-07, |
|
"loss": 0.8497, |
|
"step": 5875 |
|
}, |
|
{ |
|
"epoch": 14.626865671641792, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 3.7675561304238994e-07, |
|
"loss": 0.8511, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 14.639303482587065, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 3.520716927873813e-07, |
|
"loss": 0.8585, |
|
"step": 5885 |
|
}, |
|
{ |
|
"epoch": 14.65174129353234, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 3.282225833763364e-07, |
|
"loss": 0.8584, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 14.664179104477611, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 3.0520848460765527e-07, |
|
"loss": 0.8628, |
|
"step": 5895 |
|
}, |
|
{ |
|
"epoch": 14.676616915422885, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 2.8302958928431154e-07, |
|
"loss": 0.8681, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 14.689054726368159, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 2.6168608321233135e-07, |
|
"loss": 0.864, |
|
"step": 5905 |
|
}, |
|
{ |
|
"epoch": 14.701492537313433, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 2.4117814519911684e-07, |
|
"loss": 0.8568, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 14.713930348258707, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 2.2150594705206973e-07, |
|
"loss": 0.8636, |
|
"step": 5915 |
|
}, |
|
{ |
|
"epoch": 14.72636815920398, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 2.0266965357704783e-07, |
|
"loss": 0.8656, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 14.738805970149254, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 1.846694225770551e-07, |
|
"loss": 0.8568, |
|
"step": 5925 |
|
}, |
|
{ |
|
"epoch": 14.751243781094526, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 1.675054048509095e-07, |
|
"loss": 0.8546, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 14.7636815920398, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 1.5117774419193264e-07, |
|
"loss": 0.8538, |
|
"step": 5935 |
|
}, |
|
{ |
|
"epoch": 14.776119402985074, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 1.3568657738678435e-07, |
|
"loss": 0.8566, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 14.788557213930348, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 1.2103203421434117e-07, |
|
"loss": 0.86, |
|
"step": 5945 |
|
}, |
|
{ |
|
"epoch": 14.800995024875622, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 1.0721423744454173e-07, |
|
"loss": 0.8696, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 14.813432835820896, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 9.423330283742093e-08, |
|
"loss": 0.863, |
|
"step": 5955 |
|
}, |
|
{ |
|
"epoch": 14.82587064676617, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 8.208933914208849e-08, |
|
"loss": 0.8672, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 14.838308457711443, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 7.078244809587409e-08, |
|
"loss": 0.8605, |
|
"step": 5965 |
|
}, |
|
{ |
|
"epoch": 14.850746268656717, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 6.031272442341696e-08, |
|
"loss": 0.8627, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 14.86318407960199, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 5.0680255835922065e-08, |
|
"loss": 0.8615, |
|
"step": 5975 |
|
}, |
|
{ |
|
"epoch": 14.875621890547263, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 4.188512303038294e-08, |
|
"loss": 0.864, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 14.888059701492537, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 3.392739968894887e-08, |
|
"loss": 0.8555, |
|
"step": 5985 |
|
}, |
|
{ |
|
"epoch": 14.900497512437811, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 2.6807152478258713e-08, |
|
"loss": 0.875, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 14.912935323383085, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 2.052444104891915e-08, |
|
"loss": 0.8681, |
|
"step": 5995 |
|
}, |
|
{ |
|
"epoch": 14.925373134328359, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 1.5079318035016164e-08, |
|
"loss": 0.8703, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 14.937810945273633, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 1.0471829053615435e-08, |
|
"loss": 0.8529, |
|
"step": 6005 |
|
}, |
|
{ |
|
"epoch": 14.950248756218905, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 6.702012704440374e-09, |
|
"loss": 0.8457, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 14.962686567164178, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 3.769900569505769e-09, |
|
"loss": 0.861, |
|
"step": 6015 |
|
}, |
|
{ |
|
"epoch": 14.975124378109452, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 1.6755172128957163e-09, |
|
"loss": 0.8559, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 14.987562189054726, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 4.1888018053048983e-10, |
|
"loss": 0.8553, |
|
"step": 6025 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.0, |
|
"loss": 0.8609, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_loss": 2.5269672870635986, |
|
"eval_runtime": 0.537, |
|
"eval_samples_per_second": 18.622, |
|
"eval_steps_per_second": 1.862, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"step": 6030, |
|
"total_flos": 3.5418612320488653e+18, |
|
"train_loss": 0.9510796088682083, |
|
"train_runtime": 20707.2957, |
|
"train_samples_per_second": 13.97, |
|
"train_steps_per_second": 0.291 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 6030, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 15, |
|
"save_steps": 100, |
|
"total_flos": 3.5418612320488653e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|