|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9995577178239717, |
|
"eval_steps": 142, |
|
"global_step": 565, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 5.333217144012451, |
|
"learning_rate": 3e-06, |
|
"loss": 3.4697, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"eval_loss": 3.547457695007324, |
|
"eval_runtime": 14.4761, |
|
"eval_samples_per_second": 32.882, |
|
"eval_steps_per_second": 8.22, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 5.45876932144165, |
|
"learning_rate": 6e-06, |
|
"loss": 3.4361, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.8387346267700195, |
|
"learning_rate": 9e-06, |
|
"loss": 3.6111, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.629228591918945, |
|
"learning_rate": 1.2e-05, |
|
"loss": 3.468, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.132396697998047, |
|
"learning_rate": 1.5e-05, |
|
"loss": 3.4794, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.605806350708008, |
|
"learning_rate": 1.8e-05, |
|
"loss": 3.4557, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.7556891441345215, |
|
"learning_rate": 2.1e-05, |
|
"loss": 3.4051, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.8776655197143555, |
|
"learning_rate": 2.4e-05, |
|
"loss": 3.1824, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.992830753326416, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 3.0846, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.38725471496582, |
|
"learning_rate": 3e-05, |
|
"loss": 2.8616, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.4103684425354, |
|
"learning_rate": 2.9999973928796923e-05, |
|
"loss": 2.5679, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.2000298500061035, |
|
"learning_rate": 2.999989571527831e-05, |
|
"loss": 2.2378, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.7659687995910645, |
|
"learning_rate": 2.9999765359716046e-05, |
|
"loss": 1.8411, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.1735832691192627, |
|
"learning_rate": 2.999958286256327e-05, |
|
"loss": 1.519, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.7938477993011475, |
|
"learning_rate": 2.9999348224454367e-05, |
|
"loss": 1.2343, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.5651497840881348, |
|
"learning_rate": 2.9999061446204985e-05, |
|
"loss": 1.0183, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.3572280406951904, |
|
"learning_rate": 2.9998722528812e-05, |
|
"loss": 0.7906, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.2292940616607666, |
|
"learning_rate": 2.9998331473453557e-05, |
|
"loss": 0.6625, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.9572314023971558, |
|
"learning_rate": 2.9997888281489015e-05, |
|
"loss": 0.5271, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.7180088758468628, |
|
"learning_rate": 2.9997392954458985e-05, |
|
"loss": 0.4319, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.5089068412780762, |
|
"learning_rate": 2.9996845494085306e-05, |
|
"loss": 0.351, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.2355144023895264, |
|
"learning_rate": 2.999624590227103e-05, |
|
"loss": 0.2994, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.7736936211585999, |
|
"learning_rate": 2.9995594181100443e-05, |
|
"loss": 0.1704, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.5642102956771851, |
|
"learning_rate": 2.9994890332839027e-05, |
|
"loss": 0.1217, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.34946027398109436, |
|
"learning_rate": 2.9994134359933476e-05, |
|
"loss": 0.0992, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.24627894163131714, |
|
"learning_rate": 2.999332626501167e-05, |
|
"loss": 0.0828, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.28634899854660034, |
|
"learning_rate": 2.9992466050882673e-05, |
|
"loss": 0.0945, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.1457609236240387, |
|
"learning_rate": 2.9991553720536733e-05, |
|
"loss": 0.0924, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3852764964103699, |
|
"learning_rate": 2.9990589277145254e-05, |
|
"loss": 0.0868, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.28345659375190735, |
|
"learning_rate": 2.9989572724060797e-05, |
|
"loss": 0.0845, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.24844343960285187, |
|
"learning_rate": 2.998850406481707e-05, |
|
"loss": 0.0756, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.2215878665447235, |
|
"learning_rate": 2.9987383303128887e-05, |
|
"loss": 0.0799, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.20273327827453613, |
|
"learning_rate": 2.9986210442892215e-05, |
|
"loss": 0.099, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.23849628865718842, |
|
"learning_rate": 2.9984985488184086e-05, |
|
"loss": 0.0709, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.28950855135917664, |
|
"learning_rate": 2.9983708443262656e-05, |
|
"loss": 0.0736, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.4729451537132263, |
|
"learning_rate": 2.9982379312567126e-05, |
|
"loss": 0.0898, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.26402196288108826, |
|
"learning_rate": 2.998099810071777e-05, |
|
"loss": 0.0989, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.21811941266059875, |
|
"learning_rate": 2.9979564812515906e-05, |
|
"loss": 0.0816, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.4151253402233124, |
|
"learning_rate": 2.9978079452943875e-05, |
|
"loss": 0.0646, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.5114066004753113, |
|
"learning_rate": 2.9976542027165016e-05, |
|
"loss": 0.0986, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.23638691008090973, |
|
"learning_rate": 2.9974952540523676e-05, |
|
"loss": 0.0929, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.18284691870212555, |
|
"learning_rate": 2.997331099854516e-05, |
|
"loss": 0.0707, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.415445476770401, |
|
"learning_rate": 2.9971617406935735e-05, |
|
"loss": 0.0875, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.35435256361961365, |
|
"learning_rate": 2.9969871771582596e-05, |
|
"loss": 0.0917, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.5223746299743652, |
|
"learning_rate": 2.996807409855385e-05, |
|
"loss": 0.0839, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.2852335274219513, |
|
"learning_rate": 2.99662243940985e-05, |
|
"loss": 0.0581, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.19847406446933746, |
|
"learning_rate": 2.9964322664646412e-05, |
|
"loss": 0.0581, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.42156773805618286, |
|
"learning_rate": 2.9962368916808308e-05, |
|
"loss": 0.0949, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.1932952105998993, |
|
"learning_rate": 2.9960363157375724e-05, |
|
"loss": 0.0936, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.28940048813819885, |
|
"learning_rate": 2.9958305393321e-05, |
|
"loss": 0.1074, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.4381609857082367, |
|
"learning_rate": 2.995619563179726e-05, |
|
"loss": 0.1053, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.16727806627750397, |
|
"learning_rate": 2.9954033880138368e-05, |
|
"loss": 0.0825, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3134240210056305, |
|
"learning_rate": 2.995182014585892e-05, |
|
"loss": 0.0565, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.16883538663387299, |
|
"learning_rate": 2.9949554436654215e-05, |
|
"loss": 0.0805, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.4369119107723236, |
|
"learning_rate": 2.994723676040022e-05, |
|
"loss": 0.079, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.22722230851650238, |
|
"learning_rate": 2.9944867125153548e-05, |
|
"loss": 0.0707, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.2767599821090698, |
|
"learning_rate": 2.9942445539151432e-05, |
|
"loss": 0.0877, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.2087458074092865, |
|
"learning_rate": 2.9939972010811693e-05, |
|
"loss": 0.0746, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.2058565467596054, |
|
"learning_rate": 2.993744654873272e-05, |
|
"loss": 0.0783, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.2181982547044754, |
|
"learning_rate": 2.993486916169341e-05, |
|
"loss": 0.0764, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.18020497262477875, |
|
"learning_rate": 2.9932239858653183e-05, |
|
"loss": 0.0675, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.2599628269672394, |
|
"learning_rate": 2.992955864875192e-05, |
|
"loss": 0.0957, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.2814478576183319, |
|
"learning_rate": 2.9926825541309928e-05, |
|
"loss": 0.0926, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.21382613480091095, |
|
"learning_rate": 2.9924040545827936e-05, |
|
"loss": 0.0944, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.16025784611701965, |
|
"learning_rate": 2.9921203671987025e-05, |
|
"loss": 0.0727, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.22130753099918365, |
|
"learning_rate": 2.9918314929648637e-05, |
|
"loss": 0.0906, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.16255636513233185, |
|
"learning_rate": 2.991537432885449e-05, |
|
"loss": 0.057, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3057488203048706, |
|
"learning_rate": 2.991238187982659e-05, |
|
"loss": 0.0866, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.24336600303649902, |
|
"learning_rate": 2.9909337592967176e-05, |
|
"loss": 0.0509, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.25192490220069885, |
|
"learning_rate": 2.9906241478858666e-05, |
|
"loss": 0.0677, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.6552413105964661, |
|
"learning_rate": 2.990309354826366e-05, |
|
"loss": 0.1014, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.22106248140335083, |
|
"learning_rate": 2.9899893812124862e-05, |
|
"loss": 0.0779, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.2502892017364502, |
|
"learning_rate": 2.989664228156507e-05, |
|
"loss": 0.0759, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.23127524554729462, |
|
"learning_rate": 2.9893338967887128e-05, |
|
"loss": 0.0735, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.22794046998023987, |
|
"learning_rate": 2.988998388257388e-05, |
|
"loss": 0.048, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.2867235541343689, |
|
"learning_rate": 2.988657703728815e-05, |
|
"loss": 0.0449, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.28723156452178955, |
|
"learning_rate": 2.9883118443872662e-05, |
|
"loss": 0.0525, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.21423418819904327, |
|
"learning_rate": 2.9879608114350064e-05, |
|
"loss": 0.0684, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.22717736661434174, |
|
"learning_rate": 2.9876046060922803e-05, |
|
"loss": 0.0496, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.52623051404953, |
|
"learning_rate": 2.987243229597316e-05, |
|
"loss": 0.1125, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.41985878348350525, |
|
"learning_rate": 2.9868766832063156e-05, |
|
"loss": 0.1115, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.5244089365005493, |
|
"learning_rate": 2.986504968193454e-05, |
|
"loss": 0.1185, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.4969002604484558, |
|
"learning_rate": 2.9861280858508712e-05, |
|
"loss": 0.1123, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.31229168176651, |
|
"learning_rate": 2.9857460374886717e-05, |
|
"loss": 0.082, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.34874776005744934, |
|
"learning_rate": 2.985358824434916e-05, |
|
"loss": 0.0732, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.33042728900909424, |
|
"learning_rate": 2.984966448035619e-05, |
|
"loss": 0.0719, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.9238921403884888, |
|
"learning_rate": 2.9845689096547442e-05, |
|
"loss": 0.1171, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.24976597726345062, |
|
"learning_rate": 2.9841662106741986e-05, |
|
"loss": 0.0875, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.23768329620361328, |
|
"learning_rate": 2.983758352493829e-05, |
|
"loss": 0.0821, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.1419605314731598, |
|
"learning_rate": 2.983345336531415e-05, |
|
"loss": 0.0696, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.15112988650798798, |
|
"learning_rate": 2.9829271642226665e-05, |
|
"loss": 0.0958, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.11599217355251312, |
|
"learning_rate": 2.9825038370212183e-05, |
|
"loss": 0.0838, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.34909361600875854, |
|
"learning_rate": 2.982075356398623e-05, |
|
"loss": 0.0922, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3156541585922241, |
|
"learning_rate": 2.9816417238443482e-05, |
|
"loss": 0.106, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.21198609471321106, |
|
"learning_rate": 2.9812029408657698e-05, |
|
"loss": 0.0832, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.17507284879684448, |
|
"learning_rate": 2.9807590089881687e-05, |
|
"loss": 0.0736, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.09285402297973633, |
|
"learning_rate": 2.980309929754722e-05, |
|
"loss": 0.0816, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.21642349660396576, |
|
"learning_rate": 2.9798557047265023e-05, |
|
"loss": 0.0806, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.20919114351272583, |
|
"learning_rate": 2.979396335482469e-05, |
|
"loss": 0.0852, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.1738620400428772, |
|
"learning_rate": 2.9789318236194618e-05, |
|
"loss": 0.079, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.19599463045597076, |
|
"learning_rate": 2.9784621707521993e-05, |
|
"loss": 0.0635, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.18762755393981934, |
|
"learning_rate": 2.97798737851327e-05, |
|
"loss": 0.0863, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.15935586392879486, |
|
"learning_rate": 2.977507448553128e-05, |
|
"loss": 0.0692, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.25695595145225525, |
|
"learning_rate": 2.9770223825400872e-05, |
|
"loss": 0.0914, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.17666970193386078, |
|
"learning_rate": 2.9765321821603143e-05, |
|
"loss": 0.0617, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.1571740359067917, |
|
"learning_rate": 2.9760368491178244e-05, |
|
"loss": 0.0797, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.13504642248153687, |
|
"learning_rate": 2.9755363851344753e-05, |
|
"loss": 0.0647, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.12887655198574066, |
|
"learning_rate": 2.9750307919499595e-05, |
|
"loss": 0.0662, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.22609621286392212, |
|
"learning_rate": 2.9745200713218002e-05, |
|
"loss": 0.0809, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.2685830295085907, |
|
"learning_rate": 2.9740042250253443e-05, |
|
"loss": 0.084, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.18346565961837769, |
|
"learning_rate": 2.973483254853756e-05, |
|
"loss": 0.0568, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.4906081259250641, |
|
"learning_rate": 2.9729571626180116e-05, |
|
"loss": 0.0911, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.28486496210098267, |
|
"learning_rate": 2.972425950146891e-05, |
|
"loss": 0.0671, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.35737311840057373, |
|
"learning_rate": 2.9718896192869758e-05, |
|
"loss": 0.1005, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.27113574743270874, |
|
"learning_rate": 2.9713481719026368e-05, |
|
"loss": 0.0655, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.17216627299785614, |
|
"learning_rate": 2.970801609876032e-05, |
|
"loss": 0.0463, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.1476421356201172, |
|
"learning_rate": 2.9702499351070992e-05, |
|
"loss": 0.0534, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.3753887116909027, |
|
"learning_rate": 2.969693149513548e-05, |
|
"loss": 0.0763, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.22127705812454224, |
|
"learning_rate": 2.969131255030855e-05, |
|
"loss": 0.0548, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.18454331159591675, |
|
"learning_rate": 2.9685642536122545e-05, |
|
"loss": 0.0494, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.24991334974765778, |
|
"learning_rate": 2.9679921472287358e-05, |
|
"loss": 0.089, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.17822043597698212, |
|
"learning_rate": 2.967414937869031e-05, |
|
"loss": 0.0337, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.19183677434921265, |
|
"learning_rate": 2.9668326275396133e-05, |
|
"loss": 0.0627, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.1919647753238678, |
|
"learning_rate": 2.966245218264687e-05, |
|
"loss": 0.0549, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3507068455219269, |
|
"learning_rate": 2.96565271208618e-05, |
|
"loss": 0.0802, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3020438253879547, |
|
"learning_rate": 2.9650551110637397e-05, |
|
"loss": 0.0803, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.951285183429718, |
|
"learning_rate": 2.964452417274723e-05, |
|
"loss": 0.1049, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.6102455258369446, |
|
"learning_rate": 2.96384463281419e-05, |
|
"loss": 0.0711, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3394455313682556, |
|
"learning_rate": 2.9632317597948968e-05, |
|
"loss": 0.0704, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.296131432056427, |
|
"learning_rate": 2.9626138003472884e-05, |
|
"loss": 0.0726, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.37837737798690796, |
|
"learning_rate": 2.9619907566194915e-05, |
|
"loss": 0.092, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.7398345470428467, |
|
"learning_rate": 2.9613626307773055e-05, |
|
"loss": 0.1055, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.18485528230667114, |
|
"learning_rate": 2.9607294250041965e-05, |
|
"loss": 0.0615, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.4245338439941406, |
|
"learning_rate": 2.96009114150129e-05, |
|
"loss": 0.0938, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.25269758701324463, |
|
"learning_rate": 2.959447782487361e-05, |
|
"loss": 0.0686, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.1531674861907959, |
|
"learning_rate": 2.9587993501988292e-05, |
|
"loss": 0.0515, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.1748104691505432, |
|
"learning_rate": 2.958145846889749e-05, |
|
"loss": 0.0841, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.16989848017692566, |
|
"learning_rate": 2.957487274831803e-05, |
|
"loss": 0.0822, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.16533434391021729, |
|
"learning_rate": 2.9568236363142927e-05, |
|
"loss": 0.0676, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.12993450462818146, |
|
"learning_rate": 2.9561549336441333e-05, |
|
"loss": 0.0837, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.23538929224014282, |
|
"learning_rate": 2.955481169145841e-05, |
|
"loss": 0.0784, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.1557569056749344, |
|
"learning_rate": 2.95480234516153e-05, |
|
"loss": 0.0788, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_loss": 0.08073563128709793, |
|
"eval_runtime": 14.7663, |
|
"eval_samples_per_second": 32.236, |
|
"eval_steps_per_second": 8.059, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.217232346534729, |
|
"learning_rate": 2.9541184640509015e-05, |
|
"loss": 0.0598, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.2527540326118469, |
|
"learning_rate": 2.953429528191236e-05, |
|
"loss": 0.0604, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.1690036803483963, |
|
"learning_rate": 2.9527355399773847e-05, |
|
"loss": 0.0815, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.15807238221168518, |
|
"learning_rate": 2.9520365018217622e-05, |
|
"loss": 0.0695, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.12181195616722107, |
|
"learning_rate": 2.951332416154337e-05, |
|
"loss": 0.0869, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.27408546209335327, |
|
"learning_rate": 2.9506232854226242e-05, |
|
"loss": 0.0725, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.1542072892189026, |
|
"learning_rate": 2.9499091120916757e-05, |
|
"loss": 0.0616, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.31103819608688354, |
|
"learning_rate": 2.9491898986440726e-05, |
|
"loss": 0.0791, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.15201374888420105, |
|
"learning_rate": 2.9484656475799164e-05, |
|
"loss": 0.0681, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.33369091153144836, |
|
"learning_rate": 2.9477363614168197e-05, |
|
"loss": 0.0673, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.1663801521062851, |
|
"learning_rate": 2.9470020426898983e-05, |
|
"loss": 0.0514, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.3342381715774536, |
|
"learning_rate": 2.946262693951762e-05, |
|
"loss": 0.0824, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.15362265706062317, |
|
"learning_rate": 2.9455183177725055e-05, |
|
"loss": 0.0801, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.1435208022594452, |
|
"learning_rate": 2.9447689167397e-05, |
|
"loss": 0.0625, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.23910465836524963, |
|
"learning_rate": 2.944014493458383e-05, |
|
"loss": 0.0977, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.15610359609127045, |
|
"learning_rate": 2.9432550505510516e-05, |
|
"loss": 0.0329, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.23283672332763672, |
|
"learning_rate": 2.942490590657651e-05, |
|
"loss": 0.1056, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.21941441297531128, |
|
"learning_rate": 2.9417211164355668e-05, |
|
"loss": 0.0765, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.16233329474925995, |
|
"learning_rate": 2.9409466305596135e-05, |
|
"loss": 0.056, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.22437363862991333, |
|
"learning_rate": 2.9401671357220297e-05, |
|
"loss": 0.0845, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.24630329012870789, |
|
"learning_rate": 2.9393826346324634e-05, |
|
"loss": 0.0588, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.17304874956607819, |
|
"learning_rate": 2.9385931300179675e-05, |
|
"loss": 0.0582, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.35216909646987915, |
|
"learning_rate": 2.9377986246229853e-05, |
|
"loss": 0.1085, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.13712701201438904, |
|
"learning_rate": 2.9369991212093462e-05, |
|
"loss": 0.0451, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.15788517892360687, |
|
"learning_rate": 2.9361946225562516e-05, |
|
"loss": 0.0572, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.15666936337947845, |
|
"learning_rate": 2.9353851314602676e-05, |
|
"loss": 0.0528, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.17414148151874542, |
|
"learning_rate": 2.9345706507353158e-05, |
|
"loss": 0.0548, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.2488938271999359, |
|
"learning_rate": 2.9337511832126616e-05, |
|
"loss": 0.0904, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.2884049415588379, |
|
"learning_rate": 2.9329267317409053e-05, |
|
"loss": 0.0721, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.3515964150428772, |
|
"learning_rate": 2.9320972991859728e-05, |
|
"loss": 0.1108, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.49220752716064453, |
|
"learning_rate": 2.9312628884311048e-05, |
|
"loss": 0.1056, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.1283823549747467, |
|
"learning_rate": 2.9304235023768465e-05, |
|
"loss": 0.062, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.20535992085933685, |
|
"learning_rate": 2.9295791439410387e-05, |
|
"loss": 0.0591, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.548753559589386, |
|
"learning_rate": 2.9287298160588073e-05, |
|
"loss": 0.1252, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.17065873742103577, |
|
"learning_rate": 2.927875521682551e-05, |
|
"loss": 0.0717, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.15028174221515656, |
|
"learning_rate": 2.9270162637819352e-05, |
|
"loss": 0.059, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.3822297751903534, |
|
"learning_rate": 2.9261520453438775e-05, |
|
"loss": 0.0752, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.17701248824596405, |
|
"learning_rate": 2.9252828693725404e-05, |
|
"loss": 0.0872, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.1596059501171112, |
|
"learning_rate": 2.9244087388893187e-05, |
|
"loss": 0.051, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.13807597756385803, |
|
"learning_rate": 2.9235296569328303e-05, |
|
"loss": 0.0518, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.14610590040683746, |
|
"learning_rate": 2.922645626558905e-05, |
|
"loss": 0.08, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.13961593806743622, |
|
"learning_rate": 2.921756650840574e-05, |
|
"loss": 0.0496, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.12945222854614258, |
|
"learning_rate": 2.92086273286806e-05, |
|
"loss": 0.0725, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.16739709675312042, |
|
"learning_rate": 2.9199638757487648e-05, |
|
"loss": 0.0514, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.1626630425453186, |
|
"learning_rate": 2.9190600826072603e-05, |
|
"loss": 0.0506, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.17024777829647064, |
|
"learning_rate": 2.918151356585276e-05, |
|
"loss": 0.0646, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.12996181845664978, |
|
"learning_rate": 2.9172377008416898e-05, |
|
"loss": 0.0421, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.1544611155986786, |
|
"learning_rate": 2.916319118552515e-05, |
|
"loss": 0.0665, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.37734368443489075, |
|
"learning_rate": 2.9153956129108918e-05, |
|
"loss": 0.0851, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.28739050030708313, |
|
"learning_rate": 2.9144671871270734e-05, |
|
"loss": 0.0786, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.2730344533920288, |
|
"learning_rate": 2.913533844428417e-05, |
|
"loss": 0.0923, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.26277613639831543, |
|
"learning_rate": 2.912595588059371e-05, |
|
"loss": 0.0737, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.15028347074985504, |
|
"learning_rate": 2.9116524212814653e-05, |
|
"loss": 0.0528, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.5112478733062744, |
|
"learning_rate": 2.9107043473733e-05, |
|
"loss": 0.0744, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.1651376336812973, |
|
"learning_rate": 2.9097513696305304e-05, |
|
"loss": 0.0421, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.18412701785564423, |
|
"learning_rate": 2.908793491365861e-05, |
|
"loss": 0.0602, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.26233047246932983, |
|
"learning_rate": 2.90783071590903e-05, |
|
"loss": 0.086, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.17222779989242554, |
|
"learning_rate": 2.9068630466067997e-05, |
|
"loss": 0.0634, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.4317842125892639, |
|
"learning_rate": 2.905890486822943e-05, |
|
"loss": 0.0904, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.1886034905910492, |
|
"learning_rate": 2.9049130399382345e-05, |
|
"loss": 0.0699, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.21989533305168152, |
|
"learning_rate": 2.903930709350436e-05, |
|
"loss": 0.0623, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.19172674417495728, |
|
"learning_rate": 2.9029434984742866e-05, |
|
"loss": 0.0956, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.20230571925640106, |
|
"learning_rate": 2.9019514107414888e-05, |
|
"loss": 0.0865, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.258619487285614, |
|
"learning_rate": 2.9009544496006998e-05, |
|
"loss": 0.077, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.37927359342575073, |
|
"learning_rate": 2.8999526185175155e-05, |
|
"loss": 0.0955, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.1734340786933899, |
|
"learning_rate": 2.898945920974462e-05, |
|
"loss": 0.0698, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.2214702069759369, |
|
"learning_rate": 2.8979343604709818e-05, |
|
"loss": 0.0656, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.13421665132045746, |
|
"learning_rate": 2.8969179405234202e-05, |
|
"loss": 0.0801, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.1412036120891571, |
|
"learning_rate": 2.8958966646650172e-05, |
|
"loss": 0.0413, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.14668437838554382, |
|
"learning_rate": 2.894870536445891e-05, |
|
"loss": 0.067, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.17726927995681763, |
|
"learning_rate": 2.893839559433028e-05, |
|
"loss": 0.0572, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.1916431039571762, |
|
"learning_rate": 2.8928037372102698e-05, |
|
"loss": 0.0761, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.16705051064491272, |
|
"learning_rate": 2.8917630733783004e-05, |
|
"loss": 0.0604, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.17496813833713531, |
|
"learning_rate": 2.8907175715546337e-05, |
|
"loss": 0.0705, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.15736040472984314, |
|
"learning_rate": 2.889667235373603e-05, |
|
"loss": 0.0305, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.274248868227005, |
|
"learning_rate": 2.888612068486344e-05, |
|
"loss": 0.0783, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.24461400508880615, |
|
"learning_rate": 2.887552074560787e-05, |
|
"loss": 0.0643, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.20980973541736603, |
|
"learning_rate": 2.8864872572816407e-05, |
|
"loss": 0.0625, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.20946985483169556, |
|
"learning_rate": 2.885417620350381e-05, |
|
"loss": 0.0834, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.16951480507850647, |
|
"learning_rate": 2.8843431674852366e-05, |
|
"loss": 0.0528, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.1708109974861145, |
|
"learning_rate": 2.883263902421179e-05, |
|
"loss": 0.0728, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.14461833238601685, |
|
"learning_rate": 2.8821798289099054e-05, |
|
"loss": 0.0421, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.24623633921146393, |
|
"learning_rate": 2.881090950719831e-05, |
|
"loss": 0.0854, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.21715879440307617, |
|
"learning_rate": 2.87999727163607e-05, |
|
"loss": 0.0384, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.4943262040615082, |
|
"learning_rate": 2.878898795460426e-05, |
|
"loss": 0.0727, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.21625903248786926, |
|
"learning_rate": 2.8777955260113794e-05, |
|
"loss": 0.0764, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.49219760298728943, |
|
"learning_rate": 2.8766874671240708e-05, |
|
"loss": 0.0645, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.48366281390190125, |
|
"learning_rate": 2.8755746226502914e-05, |
|
"loss": 0.0812, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.23804642260074615, |
|
"learning_rate": 2.874456996458467e-05, |
|
"loss": 0.0412, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.15954728424549103, |
|
"learning_rate": 2.8733345924336448e-05, |
|
"loss": 0.0319, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.1840825229883194, |
|
"learning_rate": 2.872207414477482e-05, |
|
"loss": 0.0391, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.34154170751571655, |
|
"learning_rate": 2.8710754665082295e-05, |
|
"loss": 0.0871, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.28852567076683044, |
|
"learning_rate": 2.8699387524607206e-05, |
|
"loss": 0.0743, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.3130442798137665, |
|
"learning_rate": 2.868797276286355e-05, |
|
"loss": 0.0596, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.15511904656887054, |
|
"learning_rate": 2.8676510419530875e-05, |
|
"loss": 0.0345, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.3511506915092468, |
|
"learning_rate": 2.866500053445412e-05, |
|
"loss": 0.0795, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.21553099155426025, |
|
"learning_rate": 2.86534431476435e-05, |
|
"loss": 0.0685, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.2557514011859894, |
|
"learning_rate": 2.8641838299274336e-05, |
|
"loss": 0.0386, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.22085203230381012, |
|
"learning_rate": 2.863018602968695e-05, |
|
"loss": 0.0741, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.3036896884441376, |
|
"learning_rate": 2.8618486379386496e-05, |
|
"loss": 0.0553, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.24089917540550232, |
|
"learning_rate": 2.8606739389042838e-05, |
|
"loss": 0.0699, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.3548223376274109, |
|
"learning_rate": 2.8594945099490395e-05, |
|
"loss": 0.0573, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.288339763879776, |
|
"learning_rate": 2.8583103551728008e-05, |
|
"loss": 0.0592, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.3822185695171356, |
|
"learning_rate": 2.857121478691881e-05, |
|
"loss": 0.0788, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.3023355305194855, |
|
"learning_rate": 2.855927884639004e-05, |
|
"loss": 0.08, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.15000642836093903, |
|
"learning_rate": 2.854729577163294e-05, |
|
"loss": 0.053, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.38528746366500854, |
|
"learning_rate": 2.8535265604302614e-05, |
|
"loss": 0.132, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.3799440562725067, |
|
"learning_rate": 2.852318838621784e-05, |
|
"loss": 0.0824, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.1958758383989334, |
|
"learning_rate": 2.851106415936098e-05, |
|
"loss": 0.0501, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.47345441579818726, |
|
"learning_rate": 2.8498892965877776e-05, |
|
"loss": 0.076, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4306192398071289, |
|
"learning_rate": 2.848667484807726e-05, |
|
"loss": 0.0907, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.15393877029418945, |
|
"learning_rate": 2.8474409848431562e-05, |
|
"loss": 0.0471, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.1548716276884079, |
|
"learning_rate": 2.8462098009575793e-05, |
|
"loss": 0.0545, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.2719079256057739, |
|
"learning_rate": 2.8449739374307877e-05, |
|
"loss": 0.0712, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.14566250145435333, |
|
"learning_rate": 2.8437333985588418e-05, |
|
"loss": 0.0491, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.17003220319747925, |
|
"learning_rate": 2.8424881886540527e-05, |
|
"loss": 0.078, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.16778719425201416, |
|
"learning_rate": 2.8412383120449707e-05, |
|
"loss": 0.0688, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.2298566699028015, |
|
"learning_rate": 2.839983773076367e-05, |
|
"loss": 0.0654, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.14038778841495514, |
|
"learning_rate": 2.8387245761092203e-05, |
|
"loss": 0.0656, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.2299400120973587, |
|
"learning_rate": 2.8374607255207012e-05, |
|
"loss": 0.0563, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.12228001654148102, |
|
"learning_rate": 2.8361922257041577e-05, |
|
"loss": 0.0426, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.2170531004667282, |
|
"learning_rate": 2.8349190810690977e-05, |
|
"loss": 0.0739, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.25972139835357666, |
|
"learning_rate": 2.8336412960411765e-05, |
|
"loss": 0.0449, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.16110770404338837, |
|
"learning_rate": 2.8323588750621802e-05, |
|
"loss": 0.0429, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.36092159152030945, |
|
"learning_rate": 2.8310718225900095e-05, |
|
"loss": 0.0532, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.13183802366256714, |
|
"learning_rate": 2.8297801430986652e-05, |
|
"loss": 0.0306, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.1969520002603531, |
|
"learning_rate": 2.8284838410782327e-05, |
|
"loss": 0.0729, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.3288170397281647, |
|
"learning_rate": 2.8271829210348657e-05, |
|
"loss": 0.0657, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.17383714020252228, |
|
"learning_rate": 2.82587738749077e-05, |
|
"loss": 0.0296, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.3440845310688019, |
|
"learning_rate": 2.824567244984192e-05, |
|
"loss": 0.055, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5535409450531006, |
|
"learning_rate": 2.8232524980693947e-05, |
|
"loss": 0.0845, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.3418692946434021, |
|
"learning_rate": 2.8219331513166503e-05, |
|
"loss": 0.0501, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.24258548021316528, |
|
"learning_rate": 2.8206092093122195e-05, |
|
"loss": 0.0572, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.2323545515537262, |
|
"learning_rate": 2.8192806766583373e-05, |
|
"loss": 0.036, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.18079668283462524, |
|
"learning_rate": 2.8179475579731967e-05, |
|
"loss": 0.039, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.6126664280891418, |
|
"learning_rate": 2.8166098578909315e-05, |
|
"loss": 0.0964, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.29580938816070557, |
|
"learning_rate": 2.815267581061602e-05, |
|
"loss": 0.0682, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.25424325466156006, |
|
"learning_rate": 2.8139207321511778e-05, |
|
"loss": 0.0682, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.20733873546123505, |
|
"learning_rate": 2.8125693158415217e-05, |
|
"loss": 0.0369, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.28711849451065063, |
|
"learning_rate": 2.8112133368303737e-05, |
|
"loss": 0.0734, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.18627013266086578, |
|
"learning_rate": 2.809852799831334e-05, |
|
"loss": 0.052, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.3224278688430786, |
|
"learning_rate": 2.8084877095738477e-05, |
|
"loss": 0.0973, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 0.06570792198181152, |
|
"eval_runtime": 14.7272, |
|
"eval_samples_per_second": 32.321, |
|
"eval_steps_per_second": 8.08, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.38838616013526917, |
|
"learning_rate": 2.8071180708031874e-05, |
|
"loss": 0.0916, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.24502405524253845, |
|
"learning_rate": 2.8057438882804372e-05, |
|
"loss": 0.0816, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.4185783863067627, |
|
"learning_rate": 2.8043651667824767e-05, |
|
"loss": 0.0776, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.2056495100259781, |
|
"learning_rate": 2.8029819111019618e-05, |
|
"loss": 0.0266, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.19617842137813568, |
|
"learning_rate": 2.8015941260473117e-05, |
|
"loss": 0.0456, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.3548401892185211, |
|
"learning_rate": 2.8002018164426896e-05, |
|
"loss": 0.0507, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.2668496370315552, |
|
"learning_rate": 2.798804987127988e-05, |
|
"loss": 0.0539, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.2864347994327545, |
|
"learning_rate": 2.7974036429588082e-05, |
|
"loss": 0.076, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.26988208293914795, |
|
"learning_rate": 2.7959977888064484e-05, |
|
"loss": 0.0599, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.451043039560318, |
|
"learning_rate": 2.7945874295578827e-05, |
|
"loss": 0.0676, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.2092967927455902, |
|
"learning_rate": 2.7931725701157462e-05, |
|
"loss": 0.0653, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.25097522139549255, |
|
"learning_rate": 2.7917532153983176e-05, |
|
"loss": 0.086, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.1981886476278305, |
|
"learning_rate": 2.790329370339501e-05, |
|
"loss": 0.0616, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.20687969028949738, |
|
"learning_rate": 2.7889010398888104e-05, |
|
"loss": 0.0479, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.16657814383506775, |
|
"learning_rate": 2.7874682290113514e-05, |
|
"loss": 0.0432, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.1697748750448227, |
|
"learning_rate": 2.786030942687805e-05, |
|
"loss": 0.0624, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.17153337597846985, |
|
"learning_rate": 2.7845891859144088e-05, |
|
"loss": 0.035, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.20956017076969147, |
|
"learning_rate": 2.7831429637029402e-05, |
|
"loss": 0.0468, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.1893073171377182, |
|
"learning_rate": 2.7816922810807e-05, |
|
"loss": 0.0783, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.24053029716014862, |
|
"learning_rate": 2.7802371430904936e-05, |
|
"loss": 0.0534, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.15913282334804535, |
|
"learning_rate": 2.7787775547906142e-05, |
|
"loss": 0.0478, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.3303963840007782, |
|
"learning_rate": 2.7773135212548247e-05, |
|
"loss": 0.0698, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.2255176603794098, |
|
"learning_rate": 2.7758450475723405e-05, |
|
"loss": 0.0345, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.20534120500087738, |
|
"learning_rate": 2.774372138847812e-05, |
|
"loss": 0.0668, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.25963813066482544, |
|
"learning_rate": 2.7728948002013054e-05, |
|
"loss": 0.0741, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.2108840048313141, |
|
"learning_rate": 2.771413036768288e-05, |
|
"loss": 0.0691, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.18770939111709595, |
|
"learning_rate": 2.769926853699606e-05, |
|
"loss": 0.0701, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.1566968858242035, |
|
"learning_rate": 2.7684362561614714e-05, |
|
"loss": 0.0196, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.3223302364349365, |
|
"learning_rate": 2.766941249335439e-05, |
|
"loss": 0.0752, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.21899370849132538, |
|
"learning_rate": 2.765441838418393e-05, |
|
"loss": 0.0703, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.21244989335536957, |
|
"learning_rate": 2.7639380286225264e-05, |
|
"loss": 0.0672, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.19558708369731903, |
|
"learning_rate": 2.7624298251753232e-05, |
|
"loss": 0.042, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.2814396917819977, |
|
"learning_rate": 2.7609172333195406e-05, |
|
"loss": 0.0434, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.2828751504421234, |
|
"learning_rate": 2.75940025831319e-05, |
|
"loss": 0.047, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.18190258741378784, |
|
"learning_rate": 2.7578789054295202e-05, |
|
"loss": 0.0619, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.16573399305343628, |
|
"learning_rate": 2.7563531799569984e-05, |
|
"loss": 0.0681, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.23267892003059387, |
|
"learning_rate": 2.75482308719929e-05, |
|
"loss": 0.0518, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.21713323891162872, |
|
"learning_rate": 2.753288632475244e-05, |
|
"loss": 0.0437, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.26459431648254395, |
|
"learning_rate": 2.75174982111887e-05, |
|
"loss": 0.0616, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.5927810668945312, |
|
"learning_rate": 2.7502066584793243e-05, |
|
"loss": 0.0931, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.32426661252975464, |
|
"learning_rate": 2.7486591499208867e-05, |
|
"loss": 0.08, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.18653956055641174, |
|
"learning_rate": 2.7471073008229462e-05, |
|
"loss": 0.0679, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.3730272948741913, |
|
"learning_rate": 2.7455511165799783e-05, |
|
"loss": 0.0399, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.2434634417295456, |
|
"learning_rate": 2.7439906026015297e-05, |
|
"loss": 0.0608, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.27506130933761597, |
|
"learning_rate": 2.742425764312197e-05, |
|
"loss": 0.0586, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.2162889540195465, |
|
"learning_rate": 2.7408566071516087e-05, |
|
"loss": 0.0587, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.2727173864841461, |
|
"learning_rate": 2.7392831365744074e-05, |
|
"loss": 0.0774, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.2565726339817047, |
|
"learning_rate": 2.7377053580502298e-05, |
|
"loss": 0.0961, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.2756386995315552, |
|
"learning_rate": 2.7361232770636856e-05, |
|
"loss": 0.0465, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.30874213576316833, |
|
"learning_rate": 2.7345368991143433e-05, |
|
"loss": 0.0908, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.21297310292720795, |
|
"learning_rate": 2.732946229716707e-05, |
|
"loss": 0.0822, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.21286709606647491, |
|
"learning_rate": 2.7313512744001982e-05, |
|
"loss": 0.0916, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.18058332800865173, |
|
"learning_rate": 2.7297520387091376e-05, |
|
"loss": 0.0459, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.1574922353029251, |
|
"learning_rate": 2.7281485282027252e-05, |
|
"loss": 0.048, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.1783793717622757, |
|
"learning_rate": 2.7265407484550206e-05, |
|
"loss": 0.0733, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.17018386721611023, |
|
"learning_rate": 2.724928705054924e-05, |
|
"loss": 0.064, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.12130982428789139, |
|
"learning_rate": 2.7233124036061575e-05, |
|
"loss": 0.0603, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.19916939735412598, |
|
"learning_rate": 2.7216918497272426e-05, |
|
"loss": 0.0885, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.23629699647426605, |
|
"learning_rate": 2.7200670490514865e-05, |
|
"loss": 0.0696, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.1870480626821518, |
|
"learning_rate": 2.7184380072269558e-05, |
|
"loss": 0.0797, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.22087708115577698, |
|
"learning_rate": 2.7168047299164614e-05, |
|
"loss": 0.0755, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.17880262434482574, |
|
"learning_rate": 2.7151672227975377e-05, |
|
"loss": 0.0582, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.15626394748687744, |
|
"learning_rate": 2.7135254915624213e-05, |
|
"loss": 0.072, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.11785972118377686, |
|
"learning_rate": 2.711879541918034e-05, |
|
"loss": 0.0515, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.13664838671684265, |
|
"learning_rate": 2.71022937958596e-05, |
|
"loss": 0.0542, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.22781230509281158, |
|
"learning_rate": 2.7085750103024296e-05, |
|
"loss": 0.1037, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.1862461417913437, |
|
"learning_rate": 2.7069164398182948e-05, |
|
"loss": 0.0617, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.315283864736557, |
|
"learning_rate": 2.7052536738990125e-05, |
|
"loss": 0.0756, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.22472089529037476, |
|
"learning_rate": 2.7035867183246247e-05, |
|
"loss": 0.0792, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.1294025033712387, |
|
"learning_rate": 2.7019155788897357e-05, |
|
"loss": 0.0738, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.2340669184923172, |
|
"learning_rate": 2.700240261403494e-05, |
|
"loss": 0.0925, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.27781906723976135, |
|
"learning_rate": 2.6985607716895727e-05, |
|
"loss": 0.0793, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.18572118878364563, |
|
"learning_rate": 2.6968771155861464e-05, |
|
"loss": 0.065, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.2246127426624298, |
|
"learning_rate": 2.695189298945875e-05, |
|
"loss": 0.0825, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.23202070593833923, |
|
"learning_rate": 2.6934973276358792e-05, |
|
"loss": 0.0598, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.16124321520328522, |
|
"learning_rate": 2.6918012075377226e-05, |
|
"loss": 0.0825, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.15348608791828156, |
|
"learning_rate": 2.6901009445473912e-05, |
|
"loss": 0.0442, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.18639832735061646, |
|
"learning_rate": 2.6883965445752714e-05, |
|
"loss": 0.0699, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.2920565605163574, |
|
"learning_rate": 2.6866880135461314e-05, |
|
"loss": 0.0798, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.16075679659843445, |
|
"learning_rate": 2.684975357399099e-05, |
|
"loss": 0.0691, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.17678283154964447, |
|
"learning_rate": 2.683258582087641e-05, |
|
"loss": 0.0386, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.4795132875442505, |
|
"learning_rate": 2.681537693579545e-05, |
|
"loss": 0.0988, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.18269479274749756, |
|
"learning_rate": 2.6798126978568942e-05, |
|
"loss": 0.074, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.2764766812324524, |
|
"learning_rate": 2.6780836009160514e-05, |
|
"loss": 0.0565, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.19089283049106598, |
|
"learning_rate": 2.6763504087676346e-05, |
|
"loss": 0.0389, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.15806300938129425, |
|
"learning_rate": 2.674613127436498e-05, |
|
"loss": 0.0482, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.2809430658817291, |
|
"learning_rate": 2.6728717629617093e-05, |
|
"loss": 0.0429, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.12930545210838318, |
|
"learning_rate": 2.671126321396532e-05, |
|
"loss": 0.0255, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.17408722639083862, |
|
"learning_rate": 2.6693768088083994e-05, |
|
"loss": 0.0703, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.3387722671031952, |
|
"learning_rate": 2.6676232312788998e-05, |
|
"loss": 0.0569, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.25352975726127625, |
|
"learning_rate": 2.6658655949037482e-05, |
|
"loss": 0.0914, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.3007981777191162, |
|
"learning_rate": 2.6641039057927724e-05, |
|
"loss": 0.0502, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.20817789435386658, |
|
"learning_rate": 2.662338170069885e-05, |
|
"loss": 0.0408, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.15569907426834106, |
|
"learning_rate": 2.6605683938730666e-05, |
|
"loss": 0.029, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.25259971618652344, |
|
"learning_rate": 2.6587945833543432e-05, |
|
"loss": 0.0336, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.13589264452457428, |
|
"learning_rate": 2.6570167446797657e-05, |
|
"loss": 0.0218, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.34168577194213867, |
|
"learning_rate": 2.6552348840293856e-05, |
|
"loss": 0.0882, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.26590269804000854, |
|
"learning_rate": 2.6534490075972368e-05, |
|
"loss": 0.0576, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.2801959812641144, |
|
"learning_rate": 2.6516591215913118e-05, |
|
"loss": 0.0586, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.3427751660346985, |
|
"learning_rate": 2.6498652322335416e-05, |
|
"loss": 0.0575, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.5352123975753784, |
|
"learning_rate": 2.6480673457597737e-05, |
|
"loss": 0.0915, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.45933809876441956, |
|
"learning_rate": 2.646265468419749e-05, |
|
"loss": 0.0865, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.14740170538425446, |
|
"learning_rate": 2.6444596064770837e-05, |
|
"loss": 0.0447, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.18046963214874268, |
|
"learning_rate": 2.6426497662092424e-05, |
|
"loss": 0.0302, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.2271748036146164, |
|
"learning_rate": 2.6408359539075204e-05, |
|
"loss": 0.067, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.345716267824173, |
|
"learning_rate": 2.6390181758770208e-05, |
|
"loss": 0.0461, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.2197301983833313, |
|
"learning_rate": 2.6371964384366305e-05, |
|
"loss": 0.0252, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.2628111243247986, |
|
"learning_rate": 2.6353707479190022e-05, |
|
"loss": 0.0616, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.41908586025238037, |
|
"learning_rate": 2.6335411106705283e-05, |
|
"loss": 0.0599, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.13220524787902832, |
|
"learning_rate": 2.6317075330513212e-05, |
|
"loss": 0.0116, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.21840068697929382, |
|
"learning_rate": 2.6298700214351922e-05, |
|
"loss": 0.0413, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.3715525269508362, |
|
"learning_rate": 2.628028582209625e-05, |
|
"loss": 0.0643, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.21229352056980133, |
|
"learning_rate": 2.626183221775758e-05, |
|
"loss": 0.0369, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.3171239495277405, |
|
"learning_rate": 2.6243339465483605e-05, |
|
"loss": 0.0712, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.33259809017181396, |
|
"learning_rate": 2.6224807629558094e-05, |
|
"loss": 0.1066, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.14465388655662537, |
|
"learning_rate": 2.6206236774400684e-05, |
|
"loss": 0.034, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.5533297061920166, |
|
"learning_rate": 2.6187626964566644e-05, |
|
"loss": 0.1046, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.17661869525909424, |
|
"learning_rate": 2.6168978264746663e-05, |
|
"loss": 0.0322, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.19023488461971283, |
|
"learning_rate": 2.615029073976661e-05, |
|
"loss": 0.0493, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.1495324969291687, |
|
"learning_rate": 2.6131564454587316e-05, |
|
"loss": 0.0404, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.20712687075138092, |
|
"learning_rate": 2.611279947430436e-05, |
|
"loss": 0.0472, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.233810156583786, |
|
"learning_rate": 2.609399586414782e-05, |
|
"loss": 0.0424, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.14824284613132477, |
|
"learning_rate": 2.607515368948206e-05, |
|
"loss": 0.0318, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.3361953794956207, |
|
"learning_rate": 2.60562730158055e-05, |
|
"loss": 0.0763, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.2170048952102661, |
|
"learning_rate": 2.6037353908750394e-05, |
|
"loss": 0.0643, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.18919886648654938, |
|
"learning_rate": 2.601839643408259e-05, |
|
"loss": 0.0583, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.42277276515960693, |
|
"learning_rate": 2.5999400657701314e-05, |
|
"loss": 0.0698, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.25986483693122864, |
|
"learning_rate": 2.598036664563893e-05, |
|
"loss": 0.041, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.2869338095188141, |
|
"learning_rate": 2.596129446406072e-05, |
|
"loss": 0.0503, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.41842973232269287, |
|
"learning_rate": 2.594218417926464e-05, |
|
"loss": 0.0786, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.4030296504497528, |
|
"learning_rate": 2.592303585768111e-05, |
|
"loss": 0.067, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.31492769718170166, |
|
"learning_rate": 2.590384956587277e-05, |
|
"loss": 0.0479, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5827561020851135, |
|
"learning_rate": 2.5884625370534242e-05, |
|
"loss": 0.1315, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.30939584970474243, |
|
"learning_rate": 2.5865363338491916e-05, |
|
"loss": 0.0759, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.20597557723522186, |
|
"learning_rate": 2.5846063536703706e-05, |
|
"loss": 0.0739, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.31797170639038086, |
|
"learning_rate": 2.582672603225882e-05, |
|
"loss": 0.0892, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.4155197739601135, |
|
"learning_rate": 2.5807350892377517e-05, |
|
"loss": 0.13, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.14263832569122314, |
|
"learning_rate": 2.5787938184410902e-05, |
|
"loss": 0.0489, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.1489766389131546, |
|
"learning_rate": 2.5768487975840655e-05, |
|
"loss": 0.0638, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.30038881301879883, |
|
"learning_rate": 2.5749000334278825e-05, |
|
"loss": 0.0455, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.2359144389629364, |
|
"learning_rate": 2.572947532746758e-05, |
|
"loss": 0.0822, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.16958381235599518, |
|
"learning_rate": 2.570991302327897e-05, |
|
"loss": 0.0454, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"eval_loss": 0.0626850351691246, |
|
"eval_runtime": 14.7469, |
|
"eval_samples_per_second": 32.278, |
|
"eval_steps_per_second": 8.07, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.40241333842277527, |
|
"learning_rate": 2.569031348971471e-05, |
|
"loss": 0.0926, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.2126995176076889, |
|
"learning_rate": 2.5670676794905915e-05, |
|
"loss": 0.085, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.1960797905921936, |
|
"learning_rate": 2.5651003007112892e-05, |
|
"loss": 0.0853, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.13784633576869965, |
|
"learning_rate": 2.5631292194724883e-05, |
|
"loss": 0.0453, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.14578142762184143, |
|
"learning_rate": 2.561154442625983e-05, |
|
"loss": 0.0765, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.14827878773212433, |
|
"learning_rate": 2.559175977036415e-05, |
|
"loss": 0.0594, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.19840463995933533, |
|
"learning_rate": 2.5571938295812476e-05, |
|
"loss": 0.0692, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.11538412421941757, |
|
"learning_rate": 2.555208007150743e-05, |
|
"loss": 0.067, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.3484029173851013, |
|
"learning_rate": 2.553218516647939e-05, |
|
"loss": 0.0799, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.20653250813484192, |
|
"learning_rate": 2.5512253649886237e-05, |
|
"loss": 0.0741, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.18202582001686096, |
|
"learning_rate": 2.5492285591013118e-05, |
|
"loss": 0.0636, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.17838889360427856, |
|
"learning_rate": 2.5472281059272213e-05, |
|
"loss": 0.067, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.2804567217826843, |
|
"learning_rate": 2.5452240124202477e-05, |
|
"loss": 0.0795, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.1692652553319931, |
|
"learning_rate": 2.5432162855469422e-05, |
|
"loss": 0.0722, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.1247120052576065, |
|
"learning_rate": 2.5412049322864847e-05, |
|
"loss": 0.0332, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.1898394376039505, |
|
"learning_rate": 2.539189959630662e-05, |
|
"loss": 0.055, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.20923753082752228, |
|
"learning_rate": 2.537171374583843e-05, |
|
"loss": 0.0728, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.1551828235387802, |
|
"learning_rate": 2.535149184162952e-05, |
|
"loss": 0.0629, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.1762448400259018, |
|
"learning_rate": 2.533123395397448e-05, |
|
"loss": 0.0673, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.12544862926006317, |
|
"learning_rate": 2.5310940153292978e-05, |
|
"loss": 0.0311, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.15880614519119263, |
|
"learning_rate": 2.5290610510129518e-05, |
|
"loss": 0.0402, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.17256905138492584, |
|
"learning_rate": 2.5270245095153198e-05, |
|
"loss": 0.0633, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.1660386472940445, |
|
"learning_rate": 2.524984397915747e-05, |
|
"loss": 0.077, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.13268114626407623, |
|
"learning_rate": 2.5229407233059887e-05, |
|
"loss": 0.0426, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.2337772697210312, |
|
"learning_rate": 2.5208934927901857e-05, |
|
"loss": 0.0836, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.13446028530597687, |
|
"learning_rate": 2.5188427134848395e-05, |
|
"loss": 0.0382, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.18553531169891357, |
|
"learning_rate": 2.5167883925187878e-05, |
|
"loss": 0.0548, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.25128498673439026, |
|
"learning_rate": 2.51473053703318e-05, |
|
"loss": 0.0766, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.1686338186264038, |
|
"learning_rate": 2.5126691541814518e-05, |
|
"loss": 0.0434, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.18022535741329193, |
|
"learning_rate": 2.510604251129301e-05, |
|
"loss": 0.0688, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.17681004106998444, |
|
"learning_rate": 2.5085358350546612e-05, |
|
"loss": 0.0533, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.2677585482597351, |
|
"learning_rate": 2.506463913147679e-05, |
|
"loss": 0.0818, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5509142875671387, |
|
"learning_rate": 2.5043884926106873e-05, |
|
"loss": 0.1005, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.14625947177410126, |
|
"learning_rate": 2.5023095806581802e-05, |
|
"loss": 0.0426, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.3046765923500061, |
|
"learning_rate": 2.5002271845167896e-05, |
|
"loss": 0.0515, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.16383983194828033, |
|
"learning_rate": 2.4981413114252588e-05, |
|
"loss": 0.0455, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.3099363148212433, |
|
"learning_rate": 2.4960519686344168e-05, |
|
"loss": 0.0924, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.25622445344924927, |
|
"learning_rate": 2.4939591634071544e-05, |
|
"loss": 0.0737, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.21525193750858307, |
|
"learning_rate": 2.491862903018398e-05, |
|
"loss": 0.0816, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.18544378876686096, |
|
"learning_rate": 2.4897631947550857e-05, |
|
"loss": 0.0453, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.15406207740306854, |
|
"learning_rate": 2.4876600459161397e-05, |
|
"loss": 0.0477, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.21517229080200195, |
|
"learning_rate": 2.4855534638124427e-05, |
|
"loss": 0.0467, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.27017998695373535, |
|
"learning_rate": 2.4834434557668126e-05, |
|
"loss": 0.0593, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.24762241542339325, |
|
"learning_rate": 2.4813300291139754e-05, |
|
"loss": 0.0587, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.29878756403923035, |
|
"learning_rate": 2.4792131912005407e-05, |
|
"loss": 0.0635, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.18735036253929138, |
|
"learning_rate": 2.4770929493849773e-05, |
|
"loss": 0.0312, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.27927571535110474, |
|
"learning_rate": 2.4749693110375856e-05, |
|
"loss": 0.0668, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.19749711453914642, |
|
"learning_rate": 2.4728422835404735e-05, |
|
"loss": 0.0321, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.12730471789836884, |
|
"learning_rate": 2.4707118742875293e-05, |
|
"loss": 0.0183, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.2711328864097595, |
|
"learning_rate": 2.4685780906843975e-05, |
|
"loss": 0.0677, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.26742562651634216, |
|
"learning_rate": 2.4664409401484522e-05, |
|
"loss": 0.0617, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.5431042313575745, |
|
"learning_rate": 2.4643004301087716e-05, |
|
"loss": 0.1233, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.13498014211654663, |
|
"learning_rate": 2.462156568006112e-05, |
|
"loss": 0.0268, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.22139033675193787, |
|
"learning_rate": 2.4600093612928812e-05, |
|
"loss": 0.0493, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.15270984172821045, |
|
"learning_rate": 2.457858817433115e-05, |
|
"loss": 0.0255, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.158059224486351, |
|
"learning_rate": 2.4557049439024488e-05, |
|
"loss": 0.0574, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.17642827332019806, |
|
"learning_rate": 2.4535477481880923e-05, |
|
"loss": 0.0351, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.1525094211101532, |
|
"learning_rate": 2.451387237788804e-05, |
|
"loss": 0.0242, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.21704228222370148, |
|
"learning_rate": 2.4492234202148643e-05, |
|
"loss": 0.0582, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.2657334804534912, |
|
"learning_rate": 2.44705630298805e-05, |
|
"loss": 0.0458, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.23600801825523376, |
|
"learning_rate": 2.4448858936416093e-05, |
|
"loss": 0.0447, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.20092691481113434, |
|
"learning_rate": 2.442712199720232e-05, |
|
"loss": 0.0462, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.23618610203266144, |
|
"learning_rate": 2.4405352287800268e-05, |
|
"loss": 0.0383, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.18307435512542725, |
|
"learning_rate": 2.4383549883884954e-05, |
|
"loss": 0.0312, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.20053623616695404, |
|
"learning_rate": 2.4361714861245017e-05, |
|
"loss": 0.0352, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.23183472454547882, |
|
"learning_rate": 2.4339847295782508e-05, |
|
"loss": 0.0445, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.554999589920044, |
|
"learning_rate": 2.431794726351258e-05, |
|
"loss": 0.1143, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.3061026334762573, |
|
"learning_rate": 2.4296014840563266e-05, |
|
"loss": 0.0488, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.23472747206687927, |
|
"learning_rate": 2.4274050103175195e-05, |
|
"loss": 0.0549, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.36875736713409424, |
|
"learning_rate": 2.42520531277013e-05, |
|
"loss": 0.0472, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.3055475950241089, |
|
"learning_rate": 2.423002399060661e-05, |
|
"loss": 0.0485, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.33722659945487976, |
|
"learning_rate": 2.420796276846793e-05, |
|
"loss": 0.1004, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.3110818862915039, |
|
"learning_rate": 2.4185869537973613e-05, |
|
"loss": 0.0893, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.5243550539016724, |
|
"learning_rate": 2.4163744375923272e-05, |
|
"loss": 0.0742, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.18637774884700775, |
|
"learning_rate": 2.4141587359227514e-05, |
|
"loss": 0.0453, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.11755359172821045, |
|
"learning_rate": 2.4119398564907685e-05, |
|
"loss": 0.0236, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.145331472158432, |
|
"learning_rate": 2.4097178070095602e-05, |
|
"loss": 0.0432, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.1340416669845581, |
|
"learning_rate": 2.4074925952033263e-05, |
|
"loss": 0.0339, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.2009749859571457, |
|
"learning_rate": 2.4052642288072594e-05, |
|
"loss": 0.033, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.25946885347366333, |
|
"learning_rate": 2.4030327155675192e-05, |
|
"loss": 0.0618, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.338084876537323, |
|
"learning_rate": 2.4007980632412034e-05, |
|
"loss": 0.076, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.16105656325817108, |
|
"learning_rate": 2.398560279596323e-05, |
|
"loss": 0.055, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.2381068766117096, |
|
"learning_rate": 2.3963193724117715e-05, |
|
"loss": 0.0847, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.148463174700737, |
|
"learning_rate": 2.394075349477302e-05, |
|
"loss": 0.0404, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.359811007976532, |
|
"learning_rate": 2.3918282185934986e-05, |
|
"loss": 0.0508, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.2325233817100525, |
|
"learning_rate": 2.3895779875717486e-05, |
|
"loss": 0.0594, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.18048043549060822, |
|
"learning_rate": 2.3873246642342163e-05, |
|
"loss": 0.0267, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.14463412761688232, |
|
"learning_rate": 2.3850682564138145e-05, |
|
"loss": 0.0519, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.17916260659694672, |
|
"learning_rate": 2.3828087719541787e-05, |
|
"loss": 0.0535, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.2539409399032593, |
|
"learning_rate": 2.3805462187096402e-05, |
|
"loss": 0.0579, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.20096299052238464, |
|
"learning_rate": 2.3782806045451963e-05, |
|
"loss": 0.0581, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.15366317331790924, |
|
"learning_rate": 2.376011937336485e-05, |
|
"loss": 0.0392, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.28773725032806396, |
|
"learning_rate": 2.373740224969758e-05, |
|
"loss": 0.1054, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.18706980347633362, |
|
"learning_rate": 2.371465475341852e-05, |
|
"loss": 0.0422, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.263085275888443, |
|
"learning_rate": 2.369187696360161e-05, |
|
"loss": 0.0533, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.35978153347969055, |
|
"learning_rate": 2.3669068959426107e-05, |
|
"loss": 0.0871, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.3314432203769684, |
|
"learning_rate": 2.364623082017629e-05, |
|
"loss": 0.0481, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.19833628833293915, |
|
"learning_rate": 2.3623362625241193e-05, |
|
"loss": 0.0563, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.17452682554721832, |
|
"learning_rate": 2.3600464454114326e-05, |
|
"loss": 0.0464, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.24458369612693787, |
|
"learning_rate": 2.3577536386393416e-05, |
|
"loss": 0.0722, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.309350848197937, |
|
"learning_rate": 2.35545785017801e-05, |
|
"loss": 0.0794, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.42290210723876953, |
|
"learning_rate": 2.3531590880079663e-05, |
|
"loss": 0.0753, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.1993589699268341, |
|
"learning_rate": 2.3508573601200767e-05, |
|
"loss": 0.0589, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.1793728619813919, |
|
"learning_rate": 2.348552674515517e-05, |
|
"loss": 0.0587, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.1571635901927948, |
|
"learning_rate": 2.3462450392057437e-05, |
|
"loss": 0.0427, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.2197573184967041, |
|
"learning_rate": 2.343934462212467e-05, |
|
"loss": 0.0569, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.1468045711517334, |
|
"learning_rate": 2.341620951567624e-05, |
|
"loss": 0.0501, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.1460326462984085, |
|
"learning_rate": 2.339304515313348e-05, |
|
"loss": 0.0446, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.20360475778579712, |
|
"learning_rate": 2.3369851615019436e-05, |
|
"loss": 0.056, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.17234598100185394, |
|
"learning_rate": 2.3346628981958565e-05, |
|
"loss": 0.0493, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.21084345877170563, |
|
"learning_rate": 2.332337733467646e-05, |
|
"loss": 0.0781, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.15120622515678406, |
|
"learning_rate": 2.3300096753999585e-05, |
|
"loss": 0.0508, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.19097232818603516, |
|
"learning_rate": 2.3276787320854967e-05, |
|
"loss": 0.0558, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.14518237113952637, |
|
"learning_rate": 2.3253449116269937e-05, |
|
"loss": 0.0363, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.38005223870277405, |
|
"learning_rate": 2.3230082221371834e-05, |
|
"loss": 0.1022, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.15401460230350494, |
|
"learning_rate": 2.3206686717387742e-05, |
|
"loss": 0.0745, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.15603908896446228, |
|
"learning_rate": 2.3183262685644177e-05, |
|
"loss": 0.035, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.17525097727775574, |
|
"learning_rate": 2.3159810207566832e-05, |
|
"loss": 0.0584, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.17069534957408905, |
|
"learning_rate": 2.3136329364680288e-05, |
|
"loss": 0.0569, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.3221694231033325, |
|
"learning_rate": 2.3112820238607716e-05, |
|
"loss": 0.0878, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.2566680908203125, |
|
"learning_rate": 2.3089282911070613e-05, |
|
"loss": 0.0394, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.17850065231323242, |
|
"learning_rate": 2.3065717463888505e-05, |
|
"loss": 0.0351, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.25362351536750793, |
|
"learning_rate": 2.3042123978978665e-05, |
|
"loss": 0.0596, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.20955722033977509, |
|
"learning_rate": 2.3018502538355827e-05, |
|
"loss": 0.0534, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.19723302125930786, |
|
"learning_rate": 2.2994853224131915e-05, |
|
"loss": 0.0674, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.2044544219970703, |
|
"learning_rate": 2.2971176118515734e-05, |
|
"loss": 0.0505, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.2544882893562317, |
|
"learning_rate": 2.2947471303812708e-05, |
|
"loss": 0.0967, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.16680030524730682, |
|
"learning_rate": 2.2923738862424565e-05, |
|
"loss": 0.0395, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.27806153893470764, |
|
"learning_rate": 2.2899978876849084e-05, |
|
"loss": 0.0788, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.1830155849456787, |
|
"learning_rate": 2.287619142967979e-05, |
|
"loss": 0.0538, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.33155858516693115, |
|
"learning_rate": 2.285237660360566e-05, |
|
"loss": 0.0417, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.2150736302137375, |
|
"learning_rate": 2.2828534481410847e-05, |
|
"loss": 0.0396, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.2404957413673401, |
|
"learning_rate": 2.28046651459744e-05, |
|
"loss": 0.0554, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.22239123284816742, |
|
"learning_rate": 2.278076868026995e-05, |
|
"loss": 0.0592, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.24049235880374908, |
|
"learning_rate": 2.2756845167365452e-05, |
|
"loss": 0.0462, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.24881111085414886, |
|
"learning_rate": 2.273289469042287e-05, |
|
"loss": 0.0483, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.5261463522911072, |
|
"learning_rate": 2.2708917332697908e-05, |
|
"loss": 0.0748, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.3064204752445221, |
|
"learning_rate": 2.26849131775397e-05, |
|
"loss": 0.0532, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.6424417495727539, |
|
"learning_rate": 2.2660882308390547e-05, |
|
"loss": 0.1566, |
|
"step": 565 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1695, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 565, |
|
"total_flos": 5.277457624072192e+16, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|