|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.973856209150327, |
|
"eval_steps": 115, |
|
"global_step": 918, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.002178649237472767, |
|
"grad_norm": 0.4049851596355438, |
|
"learning_rate": 1e-05, |
|
"loss": 1.9628, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002178649237472767, |
|
"eval_loss": 1.915004849433899, |
|
"eval_runtime": 0.9396, |
|
"eval_samples_per_second": 181.996, |
|
"eval_steps_per_second": 13.836, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004357298474945534, |
|
"grad_norm": 0.38593971729278564, |
|
"learning_rate": 2e-05, |
|
"loss": 2.0388, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.006535947712418301, |
|
"grad_norm": 0.38570573925971985, |
|
"learning_rate": 3e-05, |
|
"loss": 1.932, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.008714596949891068, |
|
"grad_norm": 0.35365748405456543, |
|
"learning_rate": 4e-05, |
|
"loss": 1.866, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.010893246187363835, |
|
"grad_norm": 0.4407881796360016, |
|
"learning_rate": 5e-05, |
|
"loss": 1.9959, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.013071895424836602, |
|
"grad_norm": 0.3517284095287323, |
|
"learning_rate": 6e-05, |
|
"loss": 1.8822, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.015250544662309368, |
|
"grad_norm": 0.45338165760040283, |
|
"learning_rate": 7e-05, |
|
"loss": 1.8933, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.017429193899782137, |
|
"grad_norm": 0.3475826680660248, |
|
"learning_rate": 8e-05, |
|
"loss": 1.8337, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0196078431372549, |
|
"grad_norm": 0.4348187744617462, |
|
"learning_rate": 9e-05, |
|
"loss": 1.9261, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02178649237472767, |
|
"grad_norm": 0.3879510164260864, |
|
"learning_rate": 0.0001, |
|
"loss": 1.7648, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.023965141612200435, |
|
"grad_norm": 0.3244905471801758, |
|
"learning_rate": 0.00011000000000000002, |
|
"loss": 1.7833, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.026143790849673203, |
|
"grad_norm": 0.3566845953464508, |
|
"learning_rate": 0.00012, |
|
"loss": 1.786, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02832244008714597, |
|
"grad_norm": 0.35482919216156006, |
|
"learning_rate": 0.00013000000000000002, |
|
"loss": 1.6593, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.030501089324618737, |
|
"grad_norm": 0.3647037744522095, |
|
"learning_rate": 0.00014, |
|
"loss": 1.696, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.032679738562091505, |
|
"grad_norm": 0.41642501950263977, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 1.5964, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.034858387799564274, |
|
"grad_norm": 0.3616989254951477, |
|
"learning_rate": 0.00016, |
|
"loss": 1.7121, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.037037037037037035, |
|
"grad_norm": 0.39412033557891846, |
|
"learning_rate": 0.00017, |
|
"loss": 1.7392, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0392156862745098, |
|
"grad_norm": 0.3788229823112488, |
|
"learning_rate": 0.00018, |
|
"loss": 1.793, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04139433551198257, |
|
"grad_norm": 0.3376384377479553, |
|
"learning_rate": 0.00019, |
|
"loss": 1.6145, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04357298474945534, |
|
"grad_norm": 0.3674631714820862, |
|
"learning_rate": 0.0002, |
|
"loss": 1.6802, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0457516339869281, |
|
"grad_norm": 0.406034916639328, |
|
"learning_rate": 0.00019999985036335823, |
|
"loss": 1.7272, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04793028322440087, |
|
"grad_norm": 0.34544336795806885, |
|
"learning_rate": 0.00019999940145388063, |
|
"loss": 1.592, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.05010893246187364, |
|
"grad_norm": 0.4000962972640991, |
|
"learning_rate": 0.00019999865327291073, |
|
"loss": 1.6177, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.05228758169934641, |
|
"grad_norm": 0.36951783299446106, |
|
"learning_rate": 0.00019999760582268763, |
|
"loss": 1.6261, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.054466230936819175, |
|
"grad_norm": 0.3870888948440552, |
|
"learning_rate": 0.00019999625910634605, |
|
"loss": 1.544, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05664488017429194, |
|
"grad_norm": 0.4127906560897827, |
|
"learning_rate": 0.00019999461312791638, |
|
"loss": 1.5375, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.058823529411764705, |
|
"grad_norm": 0.43752938508987427, |
|
"learning_rate": 0.00019999266789232455, |
|
"loss": 1.4055, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.06100217864923747, |
|
"grad_norm": 0.44983282685279846, |
|
"learning_rate": 0.0001999904234053922, |
|
"loss": 1.5742, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06318082788671024, |
|
"grad_norm": 0.4332844614982605, |
|
"learning_rate": 0.0001999878796738364, |
|
"loss": 1.5264, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06535947712418301, |
|
"grad_norm": 0.4228737950325012, |
|
"learning_rate": 0.00019998503670526994, |
|
"loss": 1.4985, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06753812636165578, |
|
"grad_norm": 0.4225306808948517, |
|
"learning_rate": 0.000199981894508201, |
|
"loss": 1.447, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.06971677559912855, |
|
"grad_norm": 0.5055080056190491, |
|
"learning_rate": 0.00019997845309203334, |
|
"loss": 1.4575, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0718954248366013, |
|
"grad_norm": 0.4757756292819977, |
|
"learning_rate": 0.0001999747124670662, |
|
"loss": 1.3472, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 0.4340977966785431, |
|
"learning_rate": 0.00019997067264449433, |
|
"loss": 1.3273, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07625272331154684, |
|
"grad_norm": 0.4556865692138672, |
|
"learning_rate": 0.0001999663336364078, |
|
"loss": 1.43, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0784313725490196, |
|
"grad_norm": 0.5178071856498718, |
|
"learning_rate": 0.00019996169545579207, |
|
"loss": 1.3286, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.08061002178649238, |
|
"grad_norm": 0.5154844522476196, |
|
"learning_rate": 0.00019995675811652802, |
|
"loss": 1.2845, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.08278867102396514, |
|
"grad_norm": 0.5944285988807678, |
|
"learning_rate": 0.00019995152163339178, |
|
"loss": 1.4411, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.08496732026143791, |
|
"grad_norm": 0.5691947340965271, |
|
"learning_rate": 0.00019994598602205473, |
|
"loss": 1.3807, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.08714596949891068, |
|
"grad_norm": 0.575366199016571, |
|
"learning_rate": 0.00019994015129908346, |
|
"loss": 1.3347, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.08932461873638345, |
|
"grad_norm": 0.5233891010284424, |
|
"learning_rate": 0.00019993401748193978, |
|
"loss": 1.303, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0915032679738562, |
|
"grad_norm": 0.5643051862716675, |
|
"learning_rate": 0.00019992758458898055, |
|
"loss": 1.2618, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.09368191721132897, |
|
"grad_norm": 0.6836549043655396, |
|
"learning_rate": 0.0001999208526394577, |
|
"loss": 1.3218, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.09586056644880174, |
|
"grad_norm": 0.6471132040023804, |
|
"learning_rate": 0.00019991382165351814, |
|
"loss": 1.1933, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.09803921568627451, |
|
"grad_norm": 0.5644765496253967, |
|
"learning_rate": 0.00019990649165220375, |
|
"loss": 1.1135, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.10021786492374728, |
|
"grad_norm": 0.7101904153823853, |
|
"learning_rate": 0.00019989886265745128, |
|
"loss": 1.1919, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.10239651416122005, |
|
"grad_norm": 0.706234872341156, |
|
"learning_rate": 0.00019989093469209224, |
|
"loss": 1.1607, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.10457516339869281, |
|
"grad_norm": 0.6854044795036316, |
|
"learning_rate": 0.00019988270777985292, |
|
"loss": 1.1441, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.10675381263616558, |
|
"grad_norm": 0.6608173251152039, |
|
"learning_rate": 0.00019987418194535427, |
|
"loss": 1.0626, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.10893246187363835, |
|
"grad_norm": 0.7540091276168823, |
|
"learning_rate": 0.00019986535721411186, |
|
"loss": 1.1346, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 0.783423125743866, |
|
"learning_rate": 0.00019985623361253572, |
|
"loss": 1.2105, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.11328976034858387, |
|
"grad_norm": 0.7029076814651489, |
|
"learning_rate": 0.00019984681116793038, |
|
"loss": 0.9689, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.11546840958605664, |
|
"grad_norm": 0.8416129350662231, |
|
"learning_rate": 0.00019983708990849468, |
|
"loss": 1.1176, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.11764705882352941, |
|
"grad_norm": 0.7312731146812439, |
|
"learning_rate": 0.00019982706986332175, |
|
"loss": 1.1695, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.11982570806100218, |
|
"grad_norm": 0.8590166568756104, |
|
"learning_rate": 0.00019981675106239895, |
|
"loss": 1.016, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.12200435729847495, |
|
"grad_norm": 0.8634907603263855, |
|
"learning_rate": 0.00019980613353660763, |
|
"loss": 1.0777, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.12418300653594772, |
|
"grad_norm": 0.6608163714408875, |
|
"learning_rate": 0.00019979521731772323, |
|
"loss": 0.9661, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.12636165577342048, |
|
"grad_norm": 0.9486388564109802, |
|
"learning_rate": 0.00019978400243841508, |
|
"loss": 0.8715, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.12854030501089325, |
|
"grad_norm": 0.8431762456893921, |
|
"learning_rate": 0.00019977248893224636, |
|
"loss": 1.0458, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.13071895424836602, |
|
"grad_norm": 1.00847589969635, |
|
"learning_rate": 0.00019976067683367385, |
|
"loss": 0.9081, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1328976034858388, |
|
"grad_norm": 1.3647116422653198, |
|
"learning_rate": 0.00019974856617804807, |
|
"loss": 1.0181, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.13507625272331156, |
|
"grad_norm": 1.2597001791000366, |
|
"learning_rate": 0.0001997361570016129, |
|
"loss": 0.9373, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.13725490196078433, |
|
"grad_norm": 1.238145351409912, |
|
"learning_rate": 0.00019972344934150577, |
|
"loss": 0.9464, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1394335511982571, |
|
"grad_norm": 1.114610195159912, |
|
"learning_rate": 0.00019971044323575728, |
|
"loss": 0.9163, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.14161220043572983, |
|
"grad_norm": 0.9760491847991943, |
|
"learning_rate": 0.0001996971387232912, |
|
"loss": 0.8424, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1437908496732026, |
|
"grad_norm": 0.999609649181366, |
|
"learning_rate": 0.0001996835358439244, |
|
"loss": 0.8027, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.14596949891067537, |
|
"grad_norm": 0.9615645408630371, |
|
"learning_rate": 0.00019966963463836668, |
|
"loss": 0.9491, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 0.9067331552505493, |
|
"learning_rate": 0.00019965543514822062, |
|
"loss": 0.9756, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.1503267973856209, |
|
"grad_norm": 1.0316940546035767, |
|
"learning_rate": 0.00019964093741598152, |
|
"loss": 0.7276, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.15250544662309368, |
|
"grad_norm": 0.7774396538734436, |
|
"learning_rate": 0.00019962614148503718, |
|
"loss": 0.8904, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15468409586056645, |
|
"grad_norm": 1.0500309467315674, |
|
"learning_rate": 0.0001996110473996679, |
|
"loss": 0.8801, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.1568627450980392, |
|
"grad_norm": 0.8712791800498962, |
|
"learning_rate": 0.00019959565520504623, |
|
"loss": 0.992, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.15904139433551198, |
|
"grad_norm": 1.006437063217163, |
|
"learning_rate": 0.0001995799649472369, |
|
"loss": 0.6761, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.16122004357298475, |
|
"grad_norm": 1.0199809074401855, |
|
"learning_rate": 0.00019956397667319668, |
|
"loss": 0.7066, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.16339869281045752, |
|
"grad_norm": 1.2605611085891724, |
|
"learning_rate": 0.0001995476904307742, |
|
"loss": 0.6546, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1655773420479303, |
|
"grad_norm": 0.9553707242012024, |
|
"learning_rate": 0.00019953110626870979, |
|
"loss": 0.9392, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.16775599128540306, |
|
"grad_norm": 0.909253716468811, |
|
"learning_rate": 0.00019951422423663547, |
|
"loss": 0.8757, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.16993464052287582, |
|
"grad_norm": 1.007814645767212, |
|
"learning_rate": 0.00019949704438507459, |
|
"loss": 0.877, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1721132897603486, |
|
"grad_norm": 1.341426968574524, |
|
"learning_rate": 0.00019947956676544192, |
|
"loss": 0.8002, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.17429193899782136, |
|
"grad_norm": 1.153745174407959, |
|
"learning_rate": 0.00019946179143004325, |
|
"loss": 0.714, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.17647058823529413, |
|
"grad_norm": 1.0699673891067505, |
|
"learning_rate": 0.00019944371843207546, |
|
"loss": 0.9575, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1786492374727669, |
|
"grad_norm": 0.9054269194602966, |
|
"learning_rate": 0.0001994253478256262, |
|
"loss": 0.8967, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.18082788671023964, |
|
"grad_norm": 1.3790533542633057, |
|
"learning_rate": 0.0001994066796656737, |
|
"loss": 1.0535, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.1830065359477124, |
|
"grad_norm": 1.1256961822509766, |
|
"learning_rate": 0.0001993877140080869, |
|
"loss": 0.7872, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.18518518518518517, |
|
"grad_norm": 0.8870573043823242, |
|
"learning_rate": 0.0001993684509096249, |
|
"loss": 0.9137, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.18736383442265794, |
|
"grad_norm": 1.1747201681137085, |
|
"learning_rate": 0.000199348890427937, |
|
"loss": 0.779, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1895424836601307, |
|
"grad_norm": 0.8280813694000244, |
|
"learning_rate": 0.00019932903262156245, |
|
"loss": 0.8289, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.19172113289760348, |
|
"grad_norm": 0.984609842300415, |
|
"learning_rate": 0.00019930887754993044, |
|
"loss": 0.8238, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.19389978213507625, |
|
"grad_norm": 1.030261516571045, |
|
"learning_rate": 0.00019928842527335968, |
|
"loss": 0.7061, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.19607843137254902, |
|
"grad_norm": 0.8822032809257507, |
|
"learning_rate": 0.00019926767585305835, |
|
"loss": 0.8622, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.19825708061002179, |
|
"grad_norm": 0.996427059173584, |
|
"learning_rate": 0.00019924662935112393, |
|
"loss": 0.5348, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.20043572984749455, |
|
"grad_norm": 1.0308480262756348, |
|
"learning_rate": 0.000199225285830543, |
|
"loss": 0.8325, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.20261437908496732, |
|
"grad_norm": 0.8959431648254395, |
|
"learning_rate": 0.000199203645355191, |
|
"loss": 0.6136, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.2047930283224401, |
|
"grad_norm": 0.8773916363716125, |
|
"learning_rate": 0.00019918170798983211, |
|
"loss": 0.577, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.20697167755991286, |
|
"grad_norm": 1.091194748878479, |
|
"learning_rate": 0.00019915947380011898, |
|
"loss": 0.7751, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.20915032679738563, |
|
"grad_norm": 0.8473864197731018, |
|
"learning_rate": 0.00019913694285259256, |
|
"loss": 0.5831, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2113289760348584, |
|
"grad_norm": 0.801262378692627, |
|
"learning_rate": 0.00019911411521468205, |
|
"loss": 0.6089, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.21350762527233116, |
|
"grad_norm": 0.9437965154647827, |
|
"learning_rate": 0.00019909099095470444, |
|
"loss": 0.7343, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.21568627450980393, |
|
"grad_norm": 1.1255544424057007, |
|
"learning_rate": 0.00019906757014186442, |
|
"loss": 0.6728, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.2178649237472767, |
|
"grad_norm": 0.930216372013092, |
|
"learning_rate": 0.00019904385284625424, |
|
"loss": 0.5675, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.22004357298474944, |
|
"grad_norm": 0.8021939396858215, |
|
"learning_rate": 0.00019901983913885344, |
|
"loss": 0.4423, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 1.5028183460235596, |
|
"learning_rate": 0.00019899552909152866, |
|
"loss": 0.9797, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.22440087145969498, |
|
"grad_norm": 0.7115923762321472, |
|
"learning_rate": 0.00019897092277703333, |
|
"loss": 0.4128, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.22657952069716775, |
|
"grad_norm": 0.9592722058296204, |
|
"learning_rate": 0.00019894602026900758, |
|
"loss": 0.8714, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.22875816993464052, |
|
"grad_norm": 0.8745520114898682, |
|
"learning_rate": 0.000198920821641978, |
|
"loss": 0.5991, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.23093681917211328, |
|
"grad_norm": 0.7649117708206177, |
|
"learning_rate": 0.00019889532697135734, |
|
"loss": 0.5501, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.23311546840958605, |
|
"grad_norm": 1.1097913980484009, |
|
"learning_rate": 0.0001988695363334443, |
|
"loss": 0.5863, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.23529411764705882, |
|
"grad_norm": 0.9224969148635864, |
|
"learning_rate": 0.00019884344980542338, |
|
"loss": 0.5883, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.2374727668845316, |
|
"grad_norm": 0.7770025134086609, |
|
"learning_rate": 0.00019881706746536462, |
|
"loss": 0.8375, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.23965141612200436, |
|
"grad_norm": 0.8830885887145996, |
|
"learning_rate": 0.00019879038939222329, |
|
"loss": 0.6464, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.24183006535947713, |
|
"grad_norm": 0.8932918310165405, |
|
"learning_rate": 0.00019876341566583977, |
|
"loss": 0.4851, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2440087145969499, |
|
"grad_norm": 0.8250621557235718, |
|
"learning_rate": 0.0001987361463669392, |
|
"loss": 0.5658, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.24618736383442266, |
|
"grad_norm": 0.9288647174835205, |
|
"learning_rate": 0.00019870858157713123, |
|
"loss": 0.5441, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.24836601307189543, |
|
"grad_norm": 0.8258922100067139, |
|
"learning_rate": 0.00019868072137891002, |
|
"loss": 0.764, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.25054466230936817, |
|
"grad_norm": 0.8087350726127625, |
|
"learning_rate": 0.00019865256585565363, |
|
"loss": 0.5816, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.25054466230936817, |
|
"eval_loss": 0.6156808733940125, |
|
"eval_runtime": 0.937, |
|
"eval_samples_per_second": 182.501, |
|
"eval_steps_per_second": 13.874, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.25272331154684097, |
|
"grad_norm": 1.0198040008544922, |
|
"learning_rate": 0.00019862411509162406, |
|
"loss": 0.9471, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2549019607843137, |
|
"grad_norm": 0.8376523852348328, |
|
"learning_rate": 0.00019859536917196687, |
|
"loss": 0.6166, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.2570806100217865, |
|
"grad_norm": 0.8766109347343445, |
|
"learning_rate": 0.0001985663281827108, |
|
"loss": 0.6737, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.25925925925925924, |
|
"grad_norm": 1.072192668914795, |
|
"learning_rate": 0.00019853699221076792, |
|
"loss": 0.6403, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.26143790849673204, |
|
"grad_norm": 0.8205565214157104, |
|
"learning_rate": 0.00019850736134393286, |
|
"loss": 0.4247, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2636165577342048, |
|
"grad_norm": 1.0622146129608154, |
|
"learning_rate": 0.00019847743567088293, |
|
"loss": 0.6497, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2657952069716776, |
|
"grad_norm": 0.8463292717933655, |
|
"learning_rate": 0.00019844721528117766, |
|
"loss": 0.6587, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2679738562091503, |
|
"grad_norm": 0.9597845673561096, |
|
"learning_rate": 0.0001984167002652586, |
|
"loss": 0.4752, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2701525054466231, |
|
"grad_norm": 0.8975586295127869, |
|
"learning_rate": 0.00019838589071444903, |
|
"loss": 0.7978, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.27233115468409586, |
|
"grad_norm": 0.8363540768623352, |
|
"learning_rate": 0.00019835478672095374, |
|
"loss": 0.7359, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.27450980392156865, |
|
"grad_norm": 1.0208615064620972, |
|
"learning_rate": 0.00019832338837785863, |
|
"loss": 0.518, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.2766884531590414, |
|
"grad_norm": 1.14145028591156, |
|
"learning_rate": 0.0001982916957791306, |
|
"loss": 0.7642, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2788671023965142, |
|
"grad_norm": 0.9274200797080994, |
|
"learning_rate": 0.00019825970901961705, |
|
"loss": 0.5288, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.28104575163398693, |
|
"grad_norm": 0.8783562779426575, |
|
"learning_rate": 0.0001982274281950459, |
|
"loss": 0.5958, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.28322440087145967, |
|
"grad_norm": 0.9028067588806152, |
|
"learning_rate": 0.000198194853402025, |
|
"loss": 0.6075, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.28540305010893247, |
|
"grad_norm": 0.9846379160881042, |
|
"learning_rate": 0.00019816198473804198, |
|
"loss": 0.8254, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.2875816993464052, |
|
"grad_norm": 0.9409753680229187, |
|
"learning_rate": 0.00019812882230146398, |
|
"loss": 0.704, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.289760348583878, |
|
"grad_norm": 0.8969582915306091, |
|
"learning_rate": 0.00019809536619153732, |
|
"loss": 0.6107, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.29193899782135074, |
|
"grad_norm": 0.7812852263450623, |
|
"learning_rate": 0.00019806161650838723, |
|
"loss": 0.5671, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.29411764705882354, |
|
"grad_norm": 0.8860548734664917, |
|
"learning_rate": 0.00019802757335301741, |
|
"loss": 0.5248, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 0.8217918276786804, |
|
"learning_rate": 0.00019799323682731, |
|
"loss": 0.4935, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.2984749455337691, |
|
"grad_norm": 0.7621735334396362, |
|
"learning_rate": 0.00019795860703402505, |
|
"loss": 0.5984, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.3006535947712418, |
|
"grad_norm": 0.9418565630912781, |
|
"learning_rate": 0.00019792368407680025, |
|
"loss": 0.558, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.3028322440087146, |
|
"grad_norm": 0.819114625453949, |
|
"learning_rate": 0.00019788846806015066, |
|
"loss": 0.3791, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.30501089324618735, |
|
"grad_norm": 0.9072156548500061, |
|
"learning_rate": 0.00019785295908946848, |
|
"loss": 0.4462, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.30718954248366015, |
|
"grad_norm": 0.8303220868110657, |
|
"learning_rate": 0.00019781715727102252, |
|
"loss": 0.4959, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.3093681917211329, |
|
"grad_norm": 0.8586477041244507, |
|
"learning_rate": 0.00019778106271195806, |
|
"loss": 0.4701, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.3115468409586057, |
|
"grad_norm": 0.7374873757362366, |
|
"learning_rate": 0.00019774467552029646, |
|
"loss": 0.407, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.3137254901960784, |
|
"grad_norm": 1.1180788278579712, |
|
"learning_rate": 0.00019770799580493494, |
|
"loss": 0.6304, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.3159041394335512, |
|
"grad_norm": 0.9823700189590454, |
|
"learning_rate": 0.000197671023675646, |
|
"loss": 0.5079, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.31808278867102396, |
|
"grad_norm": 0.8474340438842773, |
|
"learning_rate": 0.00019763375924307735, |
|
"loss": 0.5708, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.3202614379084967, |
|
"grad_norm": 0.9172300100326538, |
|
"learning_rate": 0.00019759620261875155, |
|
"loss": 0.418, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.3224400871459695, |
|
"grad_norm": 0.7413074374198914, |
|
"learning_rate": 0.0001975583539150655, |
|
"loss": 0.4531, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.32461873638344224, |
|
"grad_norm": 0.7417133450508118, |
|
"learning_rate": 0.00019752021324529023, |
|
"loss": 0.5158, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.32679738562091504, |
|
"grad_norm": 0.774067223072052, |
|
"learning_rate": 0.00019748178072357065, |
|
"loss": 0.5995, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3289760348583878, |
|
"grad_norm": 0.9123216867446899, |
|
"learning_rate": 0.00019744305646492497, |
|
"loss": 0.6477, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.3311546840958606, |
|
"grad_norm": 0.8347046375274658, |
|
"learning_rate": 0.00019740404058524457, |
|
"loss": 0.6527, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.9968402981758118, |
|
"learning_rate": 0.00019736473320129352, |
|
"loss": 0.5282, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.3355119825708061, |
|
"grad_norm": 0.7431806921958923, |
|
"learning_rate": 0.00019732513443070836, |
|
"loss": 0.5553, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.33769063180827885, |
|
"grad_norm": 0.9280023574829102, |
|
"learning_rate": 0.0001972852443919976, |
|
"loss": 0.7504, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.33986928104575165, |
|
"grad_norm": 0.9563156366348267, |
|
"learning_rate": 0.00019724506320454153, |
|
"loss": 0.4566, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.3420479302832244, |
|
"grad_norm": 0.744659960269928, |
|
"learning_rate": 0.00019720459098859165, |
|
"loss": 0.5929, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.3442265795206972, |
|
"grad_norm": 0.8504654765129089, |
|
"learning_rate": 0.0001971638278652705, |
|
"loss": 0.5152, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.3464052287581699, |
|
"grad_norm": 0.9335165023803711, |
|
"learning_rate": 0.0001971227739565712, |
|
"loss": 0.5115, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.3485838779956427, |
|
"grad_norm": 1.0398534536361694, |
|
"learning_rate": 0.0001970814293853572, |
|
"loss": 0.4626, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.35076252723311546, |
|
"grad_norm": 0.7855281233787537, |
|
"learning_rate": 0.0001970397942753617, |
|
"loss": 0.4245, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.35294117647058826, |
|
"grad_norm": 0.7897714972496033, |
|
"learning_rate": 0.00019699786875118747, |
|
"loss": 0.5555, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.355119825708061, |
|
"grad_norm": 0.8648003935813904, |
|
"learning_rate": 0.0001969556529383064, |
|
"loss": 0.5558, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.3572984749455338, |
|
"grad_norm": 1.0440359115600586, |
|
"learning_rate": 0.00019691314696305913, |
|
"loss": 0.4879, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.35947712418300654, |
|
"grad_norm": 0.8991299867630005, |
|
"learning_rate": 0.00019687035095265475, |
|
"loss": 0.4131, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3616557734204793, |
|
"grad_norm": 1.070555329322815, |
|
"learning_rate": 0.00019682726503517017, |
|
"loss": 0.4959, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3638344226579521, |
|
"grad_norm": 0.751699686050415, |
|
"learning_rate": 0.00019678388933955015, |
|
"loss": 0.4098, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3660130718954248, |
|
"grad_norm": 0.7820857763290405, |
|
"learning_rate": 0.00019674022399560648, |
|
"loss": 0.4611, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3681917211328976, |
|
"grad_norm": 0.7827489376068115, |
|
"learning_rate": 0.00019669626913401792, |
|
"loss": 0.3593, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 0.8705342411994934, |
|
"learning_rate": 0.00019665202488632956, |
|
"loss": 0.4037, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.37254901960784315, |
|
"grad_norm": 0.9181383848190308, |
|
"learning_rate": 0.00019660749138495268, |
|
"loss": 0.5621, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3747276688453159, |
|
"grad_norm": 0.7258014678955078, |
|
"learning_rate": 0.0001965626687631641, |
|
"loss": 0.3909, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3769063180827887, |
|
"grad_norm": 0.7386276721954346, |
|
"learning_rate": 0.00019651755715510602, |
|
"loss": 0.5974, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3790849673202614, |
|
"grad_norm": 0.7849751710891724, |
|
"learning_rate": 0.00019647215669578536, |
|
"loss": 0.4947, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.3812636165577342, |
|
"grad_norm": 0.7632936239242554, |
|
"learning_rate": 0.00019642646752107362, |
|
"loss": 0.4886, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.38344226579520696, |
|
"grad_norm": 0.8370786309242249, |
|
"learning_rate": 0.00019638048976770628, |
|
"loss": 0.4741, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.38562091503267976, |
|
"grad_norm": 0.8441713452339172, |
|
"learning_rate": 0.00019633422357328239, |
|
"loss": 0.4939, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3877995642701525, |
|
"grad_norm": 0.7680661082267761, |
|
"learning_rate": 0.00019628766907626446, |
|
"loss": 0.5976, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3899782135076253, |
|
"grad_norm": 0.8030869364738464, |
|
"learning_rate": 0.00019624082641597754, |
|
"loss": 0.4914, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.39215686274509803, |
|
"grad_norm": 0.8066624402999878, |
|
"learning_rate": 0.00019619369573260924, |
|
"loss": 0.4982, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.39433551198257083, |
|
"grad_norm": 0.7550255060195923, |
|
"learning_rate": 0.00019614627716720912, |
|
"loss": 0.3796, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.39651416122004357, |
|
"grad_norm": 0.761080265045166, |
|
"learning_rate": 0.00019609857086168823, |
|
"loss": 0.4118, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.39869281045751637, |
|
"grad_norm": 1.061673641204834, |
|
"learning_rate": 0.00019605057695881885, |
|
"loss": 0.4461, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.4008714596949891, |
|
"grad_norm": 0.8266555070877075, |
|
"learning_rate": 0.00019600229560223388, |
|
"loss": 0.4915, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.40305010893246185, |
|
"grad_norm": 0.769981861114502, |
|
"learning_rate": 0.00019595372693642654, |
|
"loss": 0.3993, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.40522875816993464, |
|
"grad_norm": 0.8316985368728638, |
|
"learning_rate": 0.00019590487110674983, |
|
"loss": 0.598, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.4074074074074074, |
|
"grad_norm": 0.7869564890861511, |
|
"learning_rate": 0.00019585572825941627, |
|
"loss": 0.5088, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.4095860566448802, |
|
"grad_norm": 0.7254141569137573, |
|
"learning_rate": 0.0001958062985414972, |
|
"loss": 0.3948, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.4117647058823529, |
|
"grad_norm": 0.7505261898040771, |
|
"learning_rate": 0.00019575658210092259, |
|
"loss": 0.3883, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.4139433551198257, |
|
"grad_norm": 0.7498146891593933, |
|
"learning_rate": 0.00019570657908648048, |
|
"loss": 0.4072, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.41612200435729846, |
|
"grad_norm": 0.9777516722679138, |
|
"learning_rate": 0.00019565628964781647, |
|
"loss": 0.4711, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.41830065359477125, |
|
"grad_norm": 0.719313383102417, |
|
"learning_rate": 0.0001956057139354335, |
|
"loss": 0.2645, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.420479302832244, |
|
"grad_norm": 0.826934814453125, |
|
"learning_rate": 0.0001955548521006911, |
|
"loss": 0.4608, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.4226579520697168, |
|
"grad_norm": 0.7773908376693726, |
|
"learning_rate": 0.0001955037042958052, |
|
"loss": 0.6364, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.42483660130718953, |
|
"grad_norm": 0.8829189538955688, |
|
"learning_rate": 0.00019545227067384747, |
|
"loss": 0.5166, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.42701525054466233, |
|
"grad_norm": 0.7444214820861816, |
|
"learning_rate": 0.00019540055138874505, |
|
"loss": 0.4784, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.42919389978213507, |
|
"grad_norm": 0.7535512447357178, |
|
"learning_rate": 0.0001953485465952799, |
|
"loss": 0.328, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.43137254901960786, |
|
"grad_norm": 0.867964506149292, |
|
"learning_rate": 0.00019529625644908847, |
|
"loss": 0.4954, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.4335511982570806, |
|
"grad_norm": 0.8096396923065186, |
|
"learning_rate": 0.00019524368110666122, |
|
"loss": 0.409, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.4357298474945534, |
|
"grad_norm": 0.6851803064346313, |
|
"learning_rate": 0.0001951908207253421, |
|
"loss": 0.3642, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.43790849673202614, |
|
"grad_norm": 1.0261396169662476, |
|
"learning_rate": 0.00019513767546332813, |
|
"loss": 0.5437, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.4400871459694989, |
|
"grad_norm": 0.6751096248626709, |
|
"learning_rate": 0.00019508424547966884, |
|
"loss": 0.3054, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.4422657952069717, |
|
"grad_norm": 0.9070213437080383, |
|
"learning_rate": 0.00019503053093426593, |
|
"loss": 0.5467, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.7909525632858276, |
|
"learning_rate": 0.00019497653198787264, |
|
"loss": 0.4506, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.4466230936819172, |
|
"grad_norm": 0.8636406064033508, |
|
"learning_rate": 0.00019492224880209344, |
|
"loss": 0.4917, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.44880174291938996, |
|
"grad_norm": 0.8827958106994629, |
|
"learning_rate": 0.00019486768153938338, |
|
"loss": 0.6783, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.45098039215686275, |
|
"grad_norm": 0.8108576536178589, |
|
"learning_rate": 0.00019481283036304768, |
|
"loss": 0.5415, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.4531590413943355, |
|
"grad_norm": 0.700524091720581, |
|
"learning_rate": 0.0001947576954372413, |
|
"loss": 0.2578, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.4553376906318083, |
|
"grad_norm": 0.6894106268882751, |
|
"learning_rate": 0.00019470227692696833, |
|
"loss": 0.4008, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.45751633986928103, |
|
"grad_norm": 0.762399435043335, |
|
"learning_rate": 0.00019464657499808152, |
|
"loss": 0.448, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4596949891067538, |
|
"grad_norm": 0.7530399560928345, |
|
"learning_rate": 0.00019459058981728192, |
|
"loss": 0.4793, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.46187363834422657, |
|
"grad_norm": 0.8698046803474426, |
|
"learning_rate": 0.0001945343215521182, |
|
"loss": 0.5627, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.46405228758169936, |
|
"grad_norm": 0.9562462568283081, |
|
"learning_rate": 0.00019447777037098622, |
|
"loss": 0.6461, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.4662309368191721, |
|
"grad_norm": 0.7317548990249634, |
|
"learning_rate": 0.0001944209364431286, |
|
"loss": 0.2913, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.4684095860566449, |
|
"grad_norm": 0.8016018271446228, |
|
"learning_rate": 0.00019436381993863405, |
|
"loss": 0.4218, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.47058823529411764, |
|
"grad_norm": 0.7906899452209473, |
|
"learning_rate": 0.00019430642102843707, |
|
"loss": 0.3951, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.47276688453159044, |
|
"grad_norm": 0.7488080859184265, |
|
"learning_rate": 0.0001942487398843172, |
|
"loss": 0.2522, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.4749455337690632, |
|
"grad_norm": 0.7537955641746521, |
|
"learning_rate": 0.00019419077667889872, |
|
"loss": 0.4182, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.477124183006536, |
|
"grad_norm": 1.090034008026123, |
|
"learning_rate": 0.00019413253158565006, |
|
"loss": 0.4396, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.4793028322440087, |
|
"grad_norm": 0.8850147724151611, |
|
"learning_rate": 0.00019407400477888315, |
|
"loss": 0.4225, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.48148148148148145, |
|
"grad_norm": 0.6652522683143616, |
|
"learning_rate": 0.00019401519643375315, |
|
"loss": 0.3252, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.48366013071895425, |
|
"grad_norm": 0.9398255348205566, |
|
"learning_rate": 0.00019395610672625767, |
|
"loss": 0.3825, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.485838779956427, |
|
"grad_norm": 0.7577664852142334, |
|
"learning_rate": 0.00019389673583323645, |
|
"loss": 0.4972, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.4880174291938998, |
|
"grad_norm": 0.7722988724708557, |
|
"learning_rate": 0.00019383708393237075, |
|
"loss": 0.3655, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.49019607843137253, |
|
"grad_norm": 0.9242033958435059, |
|
"learning_rate": 0.0001937771512021827, |
|
"loss": 0.5322, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4923747276688453, |
|
"grad_norm": 0.7697513699531555, |
|
"learning_rate": 0.00019371693782203498, |
|
"loss": 0.3516, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.49455337690631807, |
|
"grad_norm": 1.0141658782958984, |
|
"learning_rate": 0.00019365644397213014, |
|
"loss": 0.4072, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.49673202614379086, |
|
"grad_norm": 0.8579118847846985, |
|
"learning_rate": 0.00019359566983351013, |
|
"loss": 0.5341, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.4989106753812636, |
|
"grad_norm": 0.9969218969345093, |
|
"learning_rate": 0.0001935346155880557, |
|
"loss": 0.5544, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.5010893246187363, |
|
"grad_norm": 0.7654063105583191, |
|
"learning_rate": 0.0001934732814184859, |
|
"loss": 0.3604, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5010893246187363, |
|
"eval_loss": 0.430705189704895, |
|
"eval_runtime": 0.9369, |
|
"eval_samples_per_second": 182.508, |
|
"eval_steps_per_second": 13.875, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5032679738562091, |
|
"grad_norm": 0.8155052661895752, |
|
"learning_rate": 0.00019341166750835748, |
|
"loss": 0.3841, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.5054466230936819, |
|
"grad_norm": 1.005814790725708, |
|
"learning_rate": 0.00019334977404206443, |
|
"loss": 0.4976, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.5076252723311547, |
|
"grad_norm": 0.795301079750061, |
|
"learning_rate": 0.00019328760120483743, |
|
"loss": 0.347, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.5098039215686274, |
|
"grad_norm": 0.8294497728347778, |
|
"learning_rate": 0.00019322514918274308, |
|
"loss": 0.3366, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.5119825708061002, |
|
"grad_norm": 1.0151102542877197, |
|
"learning_rate": 0.0001931624181626836, |
|
"loss": 0.3217, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.514161220043573, |
|
"grad_norm": 0.8590940833091736, |
|
"learning_rate": 0.00019309940833239626, |
|
"loss": 0.4399, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.5163398692810458, |
|
"grad_norm": 0.8852725028991699, |
|
"learning_rate": 0.00019303611988045257, |
|
"loss": 0.3737, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.5185185185185185, |
|
"grad_norm": 0.8230773210525513, |
|
"learning_rate": 0.00019297255299625797, |
|
"loss": 0.4262, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.5206971677559913, |
|
"grad_norm": 0.6675543785095215, |
|
"learning_rate": 0.00019290870787005114, |
|
"loss": 0.291, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.5228758169934641, |
|
"grad_norm": 0.867770791053772, |
|
"learning_rate": 0.00019284458469290354, |
|
"loss": 0.4791, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5250544662309368, |
|
"grad_norm": 0.6569726467132568, |
|
"learning_rate": 0.0001927801836567187, |
|
"loss": 0.3331, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.5272331154684096, |
|
"grad_norm": 0.9207850098609924, |
|
"learning_rate": 0.00019271550495423168, |
|
"loss": 0.3867, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.5294117647058824, |
|
"grad_norm": 0.9937344789505005, |
|
"learning_rate": 0.00019265054877900858, |
|
"loss": 0.4788, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.5315904139433552, |
|
"grad_norm": 0.6370213627815247, |
|
"learning_rate": 0.00019258531532544585, |
|
"loss": 0.2628, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.5337690631808278, |
|
"grad_norm": 0.7503904104232788, |
|
"learning_rate": 0.00019251980478876985, |
|
"loss": 0.3214, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.5359477124183006, |
|
"grad_norm": 0.8075821399688721, |
|
"learning_rate": 0.00019245401736503608, |
|
"loss": 0.4626, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.5381263616557734, |
|
"grad_norm": 0.7386597990989685, |
|
"learning_rate": 0.0001923879532511287, |
|
"loss": 0.366, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.5403050108932462, |
|
"grad_norm": 0.7394566535949707, |
|
"learning_rate": 0.00019232161264475997, |
|
"loss": 0.387, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.5424836601307189, |
|
"grad_norm": 0.7640907168388367, |
|
"learning_rate": 0.0001922549957444696, |
|
"loss": 0.58, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.5446623093681917, |
|
"grad_norm": 0.6614570021629333, |
|
"learning_rate": 0.00019218810274962417, |
|
"loss": 0.3073, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5468409586056645, |
|
"grad_norm": 0.966230034828186, |
|
"learning_rate": 0.0001921209338604166, |
|
"loss": 0.5173, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.5490196078431373, |
|
"grad_norm": 0.822700560092926, |
|
"learning_rate": 0.00019205348927786532, |
|
"loss": 0.4205, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.55119825708061, |
|
"grad_norm": 0.7053878307342529, |
|
"learning_rate": 0.00019198576920381405, |
|
"loss": 0.3872, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.5533769063180828, |
|
"grad_norm": 0.7365344166755676, |
|
"learning_rate": 0.00019191777384093081, |
|
"loss": 0.2189, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 0.738994300365448, |
|
"learning_rate": 0.0001918495033927076, |
|
"loss": 0.4104, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5577342047930284, |
|
"grad_norm": 0.6301658153533936, |
|
"learning_rate": 0.0001917809580634596, |
|
"loss": 0.217, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.5599128540305011, |
|
"grad_norm": 0.6912908554077148, |
|
"learning_rate": 0.0001917121380583247, |
|
"loss": 0.2477, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.5620915032679739, |
|
"grad_norm": 0.7470822930335999, |
|
"learning_rate": 0.00019164304358326275, |
|
"loss": 0.2822, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.5642701525054467, |
|
"grad_norm": 0.8313736915588379, |
|
"learning_rate": 0.0001915736748450551, |
|
"loss": 0.4136, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.5664488017429193, |
|
"grad_norm": 0.9498727321624756, |
|
"learning_rate": 0.00019150403205130383, |
|
"loss": 0.4656, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5686274509803921, |
|
"grad_norm": 0.7486637830734253, |
|
"learning_rate": 0.0001914341154104312, |
|
"loss": 0.3612, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.5708061002178649, |
|
"grad_norm": 0.7852402925491333, |
|
"learning_rate": 0.00019136392513167903, |
|
"loss": 0.3849, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.5729847494553377, |
|
"grad_norm": 0.7787222862243652, |
|
"learning_rate": 0.00019129346142510812, |
|
"loss": 0.3202, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.5751633986928104, |
|
"grad_norm": 0.7108213305473328, |
|
"learning_rate": 0.00019122272450159745, |
|
"loss": 0.2964, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.5773420479302832, |
|
"grad_norm": 0.7679167985916138, |
|
"learning_rate": 0.00019115171457284382, |
|
"loss": 0.3068, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.579520697167756, |
|
"grad_norm": 0.9409091472625732, |
|
"learning_rate": 0.0001910804318513609, |
|
"loss": 0.3865, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.5816993464052288, |
|
"grad_norm": 0.7475356459617615, |
|
"learning_rate": 0.00019100887655047885, |
|
"loss": 0.3727, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.5838779956427015, |
|
"grad_norm": 0.8223069310188293, |
|
"learning_rate": 0.0001909370488843436, |
|
"loss": 0.3594, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.5860566448801743, |
|
"grad_norm": 0.8433902263641357, |
|
"learning_rate": 0.00019086494906791614, |
|
"loss": 0.3627, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.5882352941176471, |
|
"grad_norm": 0.7213712334632874, |
|
"learning_rate": 0.00019079257731697196, |
|
"loss": 0.3216, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5904139433551199, |
|
"grad_norm": 0.9422861337661743, |
|
"learning_rate": 0.00019071993384810036, |
|
"loss": 0.4973, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 0.7964383363723755, |
|
"learning_rate": 0.0001906470188787039, |
|
"loss": 0.3558, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.5947712418300654, |
|
"grad_norm": 0.8275824785232544, |
|
"learning_rate": 0.0001905738326269975, |
|
"loss": 0.2985, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.5969498910675382, |
|
"grad_norm": 0.9769518971443176, |
|
"learning_rate": 0.00019050037531200814, |
|
"loss": 0.5299, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.599128540305011, |
|
"grad_norm": 0.6739898920059204, |
|
"learning_rate": 0.0001904266471535739, |
|
"loss": 0.2324, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.6013071895424836, |
|
"grad_norm": 0.78001868724823, |
|
"learning_rate": 0.00019035264837234347, |
|
"loss": 0.3311, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.6034858387799564, |
|
"grad_norm": 0.6510067582130432, |
|
"learning_rate": 0.00019027837918977544, |
|
"loss": 0.2284, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.6056644880174292, |
|
"grad_norm": 0.7791504859924316, |
|
"learning_rate": 0.00019020383982813765, |
|
"loss": 0.3247, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.6078431372549019, |
|
"grad_norm": 0.7968712449073792, |
|
"learning_rate": 0.00019012903051050643, |
|
"loss": 0.3379, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.6100217864923747, |
|
"grad_norm": 0.7802785038948059, |
|
"learning_rate": 0.00019005395146076616, |
|
"loss": 0.3113, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.6122004357298475, |
|
"grad_norm": 0.8003398180007935, |
|
"learning_rate": 0.00018997860290360832, |
|
"loss": 0.3467, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.6143790849673203, |
|
"grad_norm": 0.9283706545829773, |
|
"learning_rate": 0.00018990298506453104, |
|
"loss": 0.3301, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.616557734204793, |
|
"grad_norm": 0.95260089635849, |
|
"learning_rate": 0.00018982709816983828, |
|
"loss": 0.4446, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.6187363834422658, |
|
"grad_norm": 0.8746076226234436, |
|
"learning_rate": 0.0001897509424466393, |
|
"loss": 0.3264, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.6209150326797386, |
|
"grad_norm": 0.9784271717071533, |
|
"learning_rate": 0.00018967451812284777, |
|
"loss": 0.3103, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.6230936819172114, |
|
"grad_norm": 0.7417327165603638, |
|
"learning_rate": 0.00018959782542718128, |
|
"loss": 0.3489, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.6252723311546841, |
|
"grad_norm": 0.835861325263977, |
|
"learning_rate": 0.00018952086458916064, |
|
"loss": 0.4059, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.6274509803921569, |
|
"grad_norm": 0.7815294861793518, |
|
"learning_rate": 0.000189443635839109, |
|
"loss": 0.3252, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.6296296296296297, |
|
"grad_norm": 0.7762477993965149, |
|
"learning_rate": 0.00018936613940815145, |
|
"loss": 0.3905, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.6318082788671024, |
|
"grad_norm": 0.7671297192573547, |
|
"learning_rate": 0.00018928837552821404, |
|
"loss": 0.3945, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6339869281045751, |
|
"grad_norm": 0.7193905711174011, |
|
"learning_rate": 0.00018921034443202333, |
|
"loss": 0.2897, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.6361655773420479, |
|
"grad_norm": 0.7806922793388367, |
|
"learning_rate": 0.0001891320463531055, |
|
"loss": 0.3675, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.6383442265795207, |
|
"grad_norm": 0.9675951600074768, |
|
"learning_rate": 0.0001890534815257858, |
|
"loss": 0.2949, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.6405228758169934, |
|
"grad_norm": 0.7414330840110779, |
|
"learning_rate": 0.00018897465018518782, |
|
"loss": 0.2468, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.6427015250544662, |
|
"grad_norm": 0.9123559594154358, |
|
"learning_rate": 0.00018889555256723262, |
|
"loss": 0.3618, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.644880174291939, |
|
"grad_norm": 0.8628408312797546, |
|
"learning_rate": 0.0001888161889086383, |
|
"loss": 0.4728, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.6470588235294118, |
|
"grad_norm": 0.9308063983917236, |
|
"learning_rate": 0.00018873655944691902, |
|
"loss": 0.3469, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.6492374727668845, |
|
"grad_norm": 0.7682824730873108, |
|
"learning_rate": 0.00018865666442038456, |
|
"loss": 0.4138, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.6514161220043573, |
|
"grad_norm": 0.8124529123306274, |
|
"learning_rate": 0.00018857650406813937, |
|
"loss": 0.3172, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.6535947712418301, |
|
"grad_norm": 1.0015543699264526, |
|
"learning_rate": 0.00018849607863008193, |
|
"loss": 0.3274, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6557734204793029, |
|
"grad_norm": 0.8008652329444885, |
|
"learning_rate": 0.0001884153883469041, |
|
"loss": 0.2765, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.6579520697167756, |
|
"grad_norm": 0.8472908139228821, |
|
"learning_rate": 0.0001883344334600904, |
|
"loss": 0.362, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.6601307189542484, |
|
"grad_norm": 0.953235387802124, |
|
"learning_rate": 0.0001882532142119171, |
|
"loss": 0.383, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.6623093681917211, |
|
"grad_norm": 0.9220999479293823, |
|
"learning_rate": 0.00018817173084545176, |
|
"loss": 0.4209, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.664488017429194, |
|
"grad_norm": 0.7949219942092896, |
|
"learning_rate": 0.00018808998360455233, |
|
"loss": 0.3869, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.851177990436554, |
|
"learning_rate": 0.0001880079727338664, |
|
"loss": 0.2691, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.6688453159041394, |
|
"grad_norm": 0.8438466787338257, |
|
"learning_rate": 0.00018792569847883068, |
|
"loss": 0.3314, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.6710239651416122, |
|
"grad_norm": 0.842267632484436, |
|
"learning_rate": 0.00018784316108566996, |
|
"loss": 0.3046, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.673202614379085, |
|
"grad_norm": 0.6835848689079285, |
|
"learning_rate": 0.00018776036080139666, |
|
"loss": 0.2026, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.6753812636165577, |
|
"grad_norm": 0.8685810565948486, |
|
"learning_rate": 0.00018767729787380985, |
|
"loss": 0.2611, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6775599128540305, |
|
"grad_norm": 0.8736341595649719, |
|
"learning_rate": 0.00018759397255149475, |
|
"loss": 0.3135, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.6797385620915033, |
|
"grad_norm": 0.6718863844871521, |
|
"learning_rate": 0.00018751038508382176, |
|
"loss": 0.2202, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.681917211328976, |
|
"grad_norm": 0.686114490032196, |
|
"learning_rate": 0.00018742653572094583, |
|
"loss": 0.2861, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.6840958605664488, |
|
"grad_norm": 0.7779257297515869, |
|
"learning_rate": 0.00018734242471380572, |
|
"loss": 0.2375, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.6862745098039216, |
|
"grad_norm": 0.7334826588630676, |
|
"learning_rate": 0.00018725805231412318, |
|
"loss": 0.2855, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.6884531590413944, |
|
"grad_norm": 0.8215782046318054, |
|
"learning_rate": 0.00018717341877440226, |
|
"loss": 0.2827, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.690631808278867, |
|
"grad_norm": 0.7793420553207397, |
|
"learning_rate": 0.00018708852434792857, |
|
"loss": 0.3593, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.6928104575163399, |
|
"grad_norm": 0.6807507872581482, |
|
"learning_rate": 0.0001870033692887684, |
|
"loss": 0.2489, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.6949891067538126, |
|
"grad_norm": 0.8848825097084045, |
|
"learning_rate": 0.00018691795385176815, |
|
"loss": 0.3622, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.6971677559912854, |
|
"grad_norm": 0.7745735049247742, |
|
"learning_rate": 0.00018683227829255334, |
|
"loss": 0.2808, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6993464052287581, |
|
"grad_norm": 0.8624871969223022, |
|
"learning_rate": 0.00018674634286752805, |
|
"loss": 0.326, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.7015250544662309, |
|
"grad_norm": 0.7500248551368713, |
|
"learning_rate": 0.00018666014783387408, |
|
"loss": 0.3413, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.7037037037037037, |
|
"grad_norm": 0.7700647115707397, |
|
"learning_rate": 0.0001865736934495501, |
|
"loss": 0.2878, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.7058823529411765, |
|
"grad_norm": 0.7682783603668213, |
|
"learning_rate": 0.000186486979973291, |
|
"loss": 0.3626, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.7080610021786492, |
|
"grad_norm": 0.8913851976394653, |
|
"learning_rate": 0.00018640000766460704, |
|
"loss": 0.3684, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.710239651416122, |
|
"grad_norm": 0.6541398763656616, |
|
"learning_rate": 0.0001863127767837831, |
|
"loss": 0.3203, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.7124183006535948, |
|
"grad_norm": 0.9459714889526367, |
|
"learning_rate": 0.00018622528759187795, |
|
"loss": 0.3794, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.7145969498910676, |
|
"grad_norm": 0.8284517526626587, |
|
"learning_rate": 0.0001861375403507233, |
|
"loss": 0.2859, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.7167755991285403, |
|
"grad_norm": 0.7966834306716919, |
|
"learning_rate": 0.00018604953532292323, |
|
"loss": 0.3299, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.7189542483660131, |
|
"grad_norm": 0.9105852842330933, |
|
"learning_rate": 0.00018596127277185329, |
|
"loss": 0.3489, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7211328976034859, |
|
"grad_norm": 0.9086281657218933, |
|
"learning_rate": 0.0001858727529616597, |
|
"loss": 0.4104, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.7233115468409586, |
|
"grad_norm": 0.8128787875175476, |
|
"learning_rate": 0.0001857839761572586, |
|
"loss": 0.3057, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.7254901960784313, |
|
"grad_norm": 0.7139798402786255, |
|
"learning_rate": 0.0001856949426243352, |
|
"loss": 0.2226, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.7276688453159041, |
|
"grad_norm": 0.8051466941833496, |
|
"learning_rate": 0.00018560565262934318, |
|
"loss": 0.2778, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.7298474945533769, |
|
"grad_norm": 0.7806089520454407, |
|
"learning_rate": 0.00018551610643950358, |
|
"loss": 0.3074, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.7320261437908496, |
|
"grad_norm": 0.8499903678894043, |
|
"learning_rate": 0.00018542630432280422, |
|
"loss": 0.2625, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.7342047930283224, |
|
"grad_norm": 0.9724310040473938, |
|
"learning_rate": 0.00018533624654799887, |
|
"loss": 0.3267, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.7363834422657952, |
|
"grad_norm": 0.845237135887146, |
|
"learning_rate": 0.00018524593338460635, |
|
"loss": 0.3624, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.738562091503268, |
|
"grad_norm": 0.8381814360618591, |
|
"learning_rate": 0.00018515536510290987, |
|
"loss": 0.2973, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 0.7939983606338501, |
|
"learning_rate": 0.00018506454197395606, |
|
"loss": 0.2893, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7429193899782135, |
|
"grad_norm": 1.1554635763168335, |
|
"learning_rate": 0.00018497346426955434, |
|
"loss": 0.4355, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.7450980392156863, |
|
"grad_norm": 0.7291870713233948, |
|
"learning_rate": 0.00018488213226227588, |
|
"loss": 0.3172, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.7472766884531591, |
|
"grad_norm": 0.6162805557250977, |
|
"learning_rate": 0.00018479054622545302, |
|
"loss": 0.1914, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.7494553376906318, |
|
"grad_norm": 0.800554096698761, |
|
"learning_rate": 0.0001846987064331783, |
|
"loss": 0.3092, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.7516339869281046, |
|
"grad_norm": 0.8024484515190125, |
|
"learning_rate": 0.00018460661316030365, |
|
"loss": 0.2598, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.7516339869281046, |
|
"eval_loss": 0.35579976439476013, |
|
"eval_runtime": 0.9369, |
|
"eval_samples_per_second": 182.512, |
|
"eval_steps_per_second": 13.875, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.7538126361655774, |
|
"grad_norm": 0.6010091304779053, |
|
"learning_rate": 0.00018451426668243963, |
|
"loss": 0.1268, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.7559912854030502, |
|
"grad_norm": 0.909592866897583, |
|
"learning_rate": 0.0001844216672759546, |
|
"loss": 0.2808, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.7581699346405228, |
|
"grad_norm": 0.8788052201271057, |
|
"learning_rate": 0.0001843288152179739, |
|
"loss": 0.3516, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.7603485838779956, |
|
"grad_norm": 0.987259030342102, |
|
"learning_rate": 0.00018423571078637885, |
|
"loss": 0.4607, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.7625272331154684, |
|
"grad_norm": 1.0777586698532104, |
|
"learning_rate": 0.00018414235425980616, |
|
"loss": 0.3367, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7647058823529411, |
|
"grad_norm": 0.8233873844146729, |
|
"learning_rate": 0.00018404874591764696, |
|
"loss": 0.2616, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.7668845315904139, |
|
"grad_norm": 0.6623446345329285, |
|
"learning_rate": 0.00018395488604004603, |
|
"loss": 0.154, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.7690631808278867, |
|
"grad_norm": 0.9828007817268372, |
|
"learning_rate": 0.0001838607749079009, |
|
"loss": 0.3084, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.7712418300653595, |
|
"grad_norm": 0.7043982148170471, |
|
"learning_rate": 0.00018376641280286107, |
|
"loss": 0.2568, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.7734204793028322, |
|
"grad_norm": 0.7597530484199524, |
|
"learning_rate": 0.00018367180000732706, |
|
"loss": 0.306, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.775599128540305, |
|
"grad_norm": 0.8099778890609741, |
|
"learning_rate": 0.00018357693680444976, |
|
"loss": 0.2389, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.7777777777777778, |
|
"grad_norm": 0.7014642953872681, |
|
"learning_rate": 0.00018348182347812931, |
|
"loss": 0.2601, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.7799564270152506, |
|
"grad_norm": 0.8545548915863037, |
|
"learning_rate": 0.00018338646031301458, |
|
"loss": 0.3604, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.7821350762527233, |
|
"grad_norm": 0.7516512870788574, |
|
"learning_rate": 0.00018329084759450192, |
|
"loss": 0.2489, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.7843137254901961, |
|
"grad_norm": 0.8581446409225464, |
|
"learning_rate": 0.00018319498560873476, |
|
"loss": 0.28, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7864923747276689, |
|
"grad_norm": 0.8704679608345032, |
|
"learning_rate": 0.00018309887464260238, |
|
"loss": 0.3917, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.7886710239651417, |
|
"grad_norm": 0.832797110080719, |
|
"learning_rate": 0.00018300251498373923, |
|
"loss": 0.3356, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.7908496732026143, |
|
"grad_norm": 0.787756085395813, |
|
"learning_rate": 0.00018290590692052398, |
|
"loss": 0.3156, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.7930283224400871, |
|
"grad_norm": 1.014853596687317, |
|
"learning_rate": 0.00018280905074207884, |
|
"loss": 0.3131, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.7952069716775599, |
|
"grad_norm": 0.7135484218597412, |
|
"learning_rate": 0.00018271194673826838, |
|
"loss": 0.1935, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.7973856209150327, |
|
"grad_norm": 0.6172918677330017, |
|
"learning_rate": 0.000182614595199699, |
|
"loss": 0.1916, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.7995642701525054, |
|
"grad_norm": 0.689791738986969, |
|
"learning_rate": 0.00018251699641771784, |
|
"loss": 0.1738, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.8017429193899782, |
|
"grad_norm": 0.8571555614471436, |
|
"learning_rate": 0.00018241915068441196, |
|
"loss": 0.2738, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.803921568627451, |
|
"grad_norm": 0.7348290681838989, |
|
"learning_rate": 0.00018232105829260752, |
|
"loss": 0.1617, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.8061002178649237, |
|
"grad_norm": 0.72556471824646, |
|
"learning_rate": 0.00018222271953586883, |
|
"loss": 0.2201, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.8082788671023965, |
|
"grad_norm": 0.915581226348877, |
|
"learning_rate": 0.0001821241347084975, |
|
"loss": 0.3508, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.8104575163398693, |
|
"grad_norm": 0.9371058344841003, |
|
"learning_rate": 0.00018202530410553163, |
|
"loss": 0.3495, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.8126361655773421, |
|
"grad_norm": 0.696180522441864, |
|
"learning_rate": 0.00018192622802274476, |
|
"loss": 0.2297, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.8148148148148148, |
|
"grad_norm": 1.2849663496017456, |
|
"learning_rate": 0.00018182690675664514, |
|
"loss": 0.3872, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.8169934640522876, |
|
"grad_norm": 0.7671440243721008, |
|
"learning_rate": 0.00018172734060447482, |
|
"loss": 0.2522, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.8191721132897604, |
|
"grad_norm": 0.7630666494369507, |
|
"learning_rate": 0.00018162752986420868, |
|
"loss": 0.1857, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.8213507625272332, |
|
"grad_norm": 0.8664875626564026, |
|
"learning_rate": 0.00018152747483455358, |
|
"loss": 0.3085, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.8235294117647058, |
|
"grad_norm": 0.7418748140335083, |
|
"learning_rate": 0.0001814271758149475, |
|
"loss": 0.2537, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.8257080610021786, |
|
"grad_norm": 0.9873255491256714, |
|
"learning_rate": 0.0001813266331055586, |
|
"loss": 0.335, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.8278867102396514, |
|
"grad_norm": 0.8407981991767883, |
|
"learning_rate": 0.00018122584700728443, |
|
"loss": 0.2625, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8300653594771242, |
|
"grad_norm": 0.8493285179138184, |
|
"learning_rate": 0.0001811248178217507, |
|
"loss": 0.2167, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.8322440087145969, |
|
"grad_norm": 0.7906709909439087, |
|
"learning_rate": 0.00018102354585131092, |
|
"loss": 0.2919, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.8344226579520697, |
|
"grad_norm": 0.8033335208892822, |
|
"learning_rate": 0.00018092203139904496, |
|
"loss": 0.2571, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.8366013071895425, |
|
"grad_norm": 0.616438627243042, |
|
"learning_rate": 0.00018082027476875847, |
|
"loss": 0.2102, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.8387799564270153, |
|
"grad_norm": 0.6517376899719238, |
|
"learning_rate": 0.00018071827626498185, |
|
"loss": 0.2242, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.840958605664488, |
|
"grad_norm": 0.7315549254417419, |
|
"learning_rate": 0.00018061603619296942, |
|
"loss": 0.2594, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.8431372549019608, |
|
"grad_norm": 0.6937726736068726, |
|
"learning_rate": 0.00018051355485869833, |
|
"loss": 0.1991, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.8453159041394336, |
|
"grad_norm": 0.7155842185020447, |
|
"learning_rate": 0.0001804108325688679, |
|
"loss": 0.2051, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.8474945533769063, |
|
"grad_norm": 1.1797689199447632, |
|
"learning_rate": 0.00018030786963089845, |
|
"loss": 0.3421, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.8496732026143791, |
|
"grad_norm": 0.7603274583816528, |
|
"learning_rate": 0.00018020466635293057, |
|
"loss": 0.1985, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.8518518518518519, |
|
"grad_norm": 0.7939961552619934, |
|
"learning_rate": 0.0001801012230438241, |
|
"loss": 0.2753, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.8540305010893247, |
|
"grad_norm": 0.8357366323471069, |
|
"learning_rate": 0.0001799975400131572, |
|
"loss": 0.3453, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.8562091503267973, |
|
"grad_norm": 0.7716991305351257, |
|
"learning_rate": 0.00017989361757122553, |
|
"loss": 0.2441, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.8583877995642701, |
|
"grad_norm": 0.7844340205192566, |
|
"learning_rate": 0.00017978945602904116, |
|
"loss": 0.3635, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.8605664488017429, |
|
"grad_norm": 0.6692304611206055, |
|
"learning_rate": 0.00017968505569833173, |
|
"loss": 0.1895, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.8627450980392157, |
|
"grad_norm": 0.7669034004211426, |
|
"learning_rate": 0.0001795804168915396, |
|
"loss": 0.1833, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.8649237472766884, |
|
"grad_norm": 0.9231857657432556, |
|
"learning_rate": 0.00017947553992182075, |
|
"loss": 0.3172, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.8671023965141612, |
|
"grad_norm": 0.9847748875617981, |
|
"learning_rate": 0.00017937042510304392, |
|
"loss": 0.3479, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.869281045751634, |
|
"grad_norm": 0.719962477684021, |
|
"learning_rate": 0.00017926507274978963, |
|
"loss": 0.2653, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.8714596949891068, |
|
"grad_norm": 0.7490435242652893, |
|
"learning_rate": 0.00017915948317734942, |
|
"loss": 0.2387, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8736383442265795, |
|
"grad_norm": 0.744804322719574, |
|
"learning_rate": 0.00017905365670172458, |
|
"loss": 0.2368, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.8758169934640523, |
|
"grad_norm": 0.6886241436004639, |
|
"learning_rate": 0.00017894759363962554, |
|
"loss": 0.2125, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.8779956427015251, |
|
"grad_norm": 0.7730022072792053, |
|
"learning_rate": 0.0001788412943084707, |
|
"loss": 0.2253, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.8801742919389978, |
|
"grad_norm": 0.8058857321739197, |
|
"learning_rate": 0.00017873475902638553, |
|
"loss": 0.2845, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.8823529411764706, |
|
"grad_norm": 0.6579281091690063, |
|
"learning_rate": 0.0001786279881122017, |
|
"loss": 0.1922, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.8845315904139434, |
|
"grad_norm": 0.6769328713417053, |
|
"learning_rate": 0.00017852098188545602, |
|
"loss": 0.1609, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.8867102396514162, |
|
"grad_norm": 0.7010467648506165, |
|
"learning_rate": 0.0001784137406663895, |
|
"loss": 0.2387, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.7626838088035583, |
|
"learning_rate": 0.00017830626477594654, |
|
"loss": 0.237, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.8910675381263616, |
|
"grad_norm": 0.8782392144203186, |
|
"learning_rate": 0.0001781985545357737, |
|
"loss": 0.3278, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.8932461873638344, |
|
"grad_norm": 0.6178898215293884, |
|
"learning_rate": 0.00017809061026821896, |
|
"loss": 0.1584, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.8954248366013072, |
|
"grad_norm": 0.7101506590843201, |
|
"learning_rate": 0.00017798243229633068, |
|
"loss": 0.2384, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.8976034858387799, |
|
"grad_norm": 0.5807170867919922, |
|
"learning_rate": 0.00017787402094385666, |
|
"loss": 0.1256, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.8997821350762527, |
|
"grad_norm": 0.6202595829963684, |
|
"learning_rate": 0.00017776537653524307, |
|
"loss": 0.168, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.9019607843137255, |
|
"grad_norm": 0.5887298583984375, |
|
"learning_rate": 0.00017765649939563365, |
|
"loss": 0.1411, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.9041394335511983, |
|
"grad_norm": 0.745570957660675, |
|
"learning_rate": 0.0001775473898508685, |
|
"loss": 0.2842, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.906318082788671, |
|
"grad_norm": 0.8904745578765869, |
|
"learning_rate": 0.00017743804822748345, |
|
"loss": 0.3217, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.9084967320261438, |
|
"grad_norm": 0.8730976581573486, |
|
"learning_rate": 0.0001773284748527087, |
|
"loss": 0.3403, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.9106753812636166, |
|
"grad_norm": 0.7911598086357117, |
|
"learning_rate": 0.00017721867005446806, |
|
"loss": 0.2731, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.9128540305010894, |
|
"grad_norm": 0.6702750325202942, |
|
"learning_rate": 0.00017710863416137805, |
|
"loss": 0.2073, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.9150326797385621, |
|
"grad_norm": 0.5198416113853455, |
|
"learning_rate": 0.00017699836750274662, |
|
"loss": 0.129, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.9172113289760349, |
|
"grad_norm": 0.6400766372680664, |
|
"learning_rate": 0.00017688787040857245, |
|
"loss": 0.1682, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.9193899782135077, |
|
"grad_norm": 0.7851924896240234, |
|
"learning_rate": 0.00017677714320954378, |
|
"loss": 0.221, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.9215686274509803, |
|
"grad_norm": 0.7246972918510437, |
|
"learning_rate": 0.0001766661862370376, |
|
"loss": 0.2438, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.9237472766884531, |
|
"grad_norm": 0.7575098872184753, |
|
"learning_rate": 0.00017655499982311847, |
|
"loss": 0.2633, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.9259259259259259, |
|
"grad_norm": 0.902695894241333, |
|
"learning_rate": 0.0001764435843005376, |
|
"loss": 0.3567, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.9281045751633987, |
|
"grad_norm": 0.7910832762718201, |
|
"learning_rate": 0.00017633194000273188, |
|
"loss": 0.2578, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.9302832244008714, |
|
"grad_norm": 0.7221683263778687, |
|
"learning_rate": 0.00017622006726382287, |
|
"loss": 0.2075, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.9324618736383442, |
|
"grad_norm": 0.7584638595581055, |
|
"learning_rate": 0.00017610796641861581, |
|
"loss": 0.2313, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.934640522875817, |
|
"grad_norm": 0.6272245645523071, |
|
"learning_rate": 0.00017599563780259858, |
|
"loss": 0.1876, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.9368191721132898, |
|
"grad_norm": 0.8224185705184937, |
|
"learning_rate": 0.0001758830817519407, |
|
"loss": 0.2558, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.9389978213507625, |
|
"grad_norm": 0.7095481157302856, |
|
"learning_rate": 0.00017577029860349233, |
|
"loss": 0.2256, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.9411764705882353, |
|
"grad_norm": 0.7715668678283691, |
|
"learning_rate": 0.00017565728869478337, |
|
"loss": 0.2483, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.9433551198257081, |
|
"grad_norm": 0.905522882938385, |
|
"learning_rate": 0.00017554405236402222, |
|
"loss": 0.2525, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.9455337690631809, |
|
"grad_norm": 0.6268091797828674, |
|
"learning_rate": 0.00017543058995009503, |
|
"loss": 0.1703, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.9477124183006536, |
|
"grad_norm": 0.730185329914093, |
|
"learning_rate": 0.0001753169017925644, |
|
"loss": 0.2187, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.9498910675381264, |
|
"grad_norm": 0.7279319763183594, |
|
"learning_rate": 0.00017520298823166873, |
|
"loss": 0.2584, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.9520697167755992, |
|
"grad_norm": 0.7050124406814575, |
|
"learning_rate": 0.00017508884960832076, |
|
"loss": 0.1723, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.954248366013072, |
|
"grad_norm": 0.7544010281562805, |
|
"learning_rate": 0.000174974486264107, |
|
"loss": 0.1899, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.9564270152505446, |
|
"grad_norm": 0.8714267015457153, |
|
"learning_rate": 0.00017485989854128627, |
|
"loss": 0.2518, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.9586056644880174, |
|
"grad_norm": 0.7128406167030334, |
|
"learning_rate": 0.00017474508678278915, |
|
"loss": 0.1832, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.9607843137254902, |
|
"grad_norm": 0.7446244955062866, |
|
"learning_rate": 0.00017463005133221645, |
|
"loss": 0.2089, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.9629629629629629, |
|
"grad_norm": 0.7556454539299011, |
|
"learning_rate": 0.00017451479253383857, |
|
"loss": 0.2327, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.9651416122004357, |
|
"grad_norm": 0.5932292938232422, |
|
"learning_rate": 0.00017439931073259427, |
|
"loss": 0.1094, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.9673202614379085, |
|
"grad_norm": 0.909900426864624, |
|
"learning_rate": 0.00017428360627408978, |
|
"loss": 0.2175, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.9694989106753813, |
|
"grad_norm": 0.7743591666221619, |
|
"learning_rate": 0.00017416767950459766, |
|
"loss": 0.1818, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.971677559912854, |
|
"grad_norm": 0.7306420207023621, |
|
"learning_rate": 0.0001740515307710557, |
|
"loss": 0.1957, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.9738562091503268, |
|
"grad_norm": 0.5480270981788635, |
|
"learning_rate": 0.00017393516042106603, |
|
"loss": 0.1477, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.9760348583877996, |
|
"grad_norm": 0.7661142945289612, |
|
"learning_rate": 0.000173818568802894, |
|
"loss": 0.2177, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.9782135076252724, |
|
"grad_norm": 0.8411223292350769, |
|
"learning_rate": 0.00017370175626546728, |
|
"loss": 0.2104, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.9803921568627451, |
|
"grad_norm": 0.816074013710022, |
|
"learning_rate": 0.00017358472315837447, |
|
"loss": 0.2349, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9825708061002179, |
|
"grad_norm": 1.0009855031967163, |
|
"learning_rate": 0.00017346746983186442, |
|
"loss": 0.2917, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.9847494553376906, |
|
"grad_norm": 0.7690507769584656, |
|
"learning_rate": 0.00017334999663684504, |
|
"loss": 0.2216, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.9869281045751634, |
|
"grad_norm": 0.7864702939987183, |
|
"learning_rate": 0.00017323230392488222, |
|
"loss": 0.2601, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.9891067538126361, |
|
"grad_norm": 0.8309422731399536, |
|
"learning_rate": 0.00017311439204819874, |
|
"loss": 0.2257, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.9912854030501089, |
|
"grad_norm": 0.7395288944244385, |
|
"learning_rate": 0.00017299626135967343, |
|
"loss": 0.1859, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.9934640522875817, |
|
"grad_norm": 0.6524999737739563, |
|
"learning_rate": 0.00017287791221283984, |
|
"loss": 0.1523, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.9956427015250545, |
|
"grad_norm": 0.6195773482322693, |
|
"learning_rate": 0.00017275934496188534, |
|
"loss": 0.1307, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.9978213507625272, |
|
"grad_norm": 0.8236491084098816, |
|
"learning_rate": 0.00017264055996165007, |
|
"loss": 0.2214, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.5916262865066528, |
|
"learning_rate": 0.00017252155756762575, |
|
"loss": 0.1633, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.0021786492374727, |
|
"grad_norm": 1.0080763101577759, |
|
"learning_rate": 0.00017240233813595478, |
|
"loss": 0.2227, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.0021786492374727, |
|
"eval_loss": 0.3434114158153534, |
|
"eval_runtime": 0.9376, |
|
"eval_samples_per_second": 182.377, |
|
"eval_steps_per_second": 13.865, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.0043572984749456, |
|
"grad_norm": 0.8395419716835022, |
|
"learning_rate": 0.00017228290202342907, |
|
"loss": 0.1963, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.0065359477124183, |
|
"grad_norm": 0.8859379291534424, |
|
"learning_rate": 0.000172163249587489, |
|
"loss": 0.2992, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.008714596949891, |
|
"grad_norm": 0.8899995684623718, |
|
"learning_rate": 0.00017204338118622232, |
|
"loss": 0.2504, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.0108932461873639, |
|
"grad_norm": 0.7307553291320801, |
|
"learning_rate": 0.00017192329717836315, |
|
"loss": 0.166, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.0130718954248366, |
|
"grad_norm": 0.9266228079795837, |
|
"learning_rate": 0.00017180299792329086, |
|
"loss": 0.2422, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.0152505446623095, |
|
"grad_norm": 0.7402477264404297, |
|
"learning_rate": 0.00017168248378102892, |
|
"loss": 0.1896, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.0174291938997821, |
|
"grad_norm": 0.7960779070854187, |
|
"learning_rate": 0.00017156175511224403, |
|
"loss": 0.254, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.0196078431372548, |
|
"grad_norm": 0.6837732791900635, |
|
"learning_rate": 0.0001714408122782448, |
|
"loss": 0.1974, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.0217864923747277, |
|
"grad_norm": 0.7475525736808777, |
|
"learning_rate": 0.00017131965564098084, |
|
"loss": 0.1873, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.0239651416122004, |
|
"grad_norm": 0.7993423342704773, |
|
"learning_rate": 0.0001711982855630416, |
|
"loss": 0.1735, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.026143790849673, |
|
"grad_norm": 0.803156852722168, |
|
"learning_rate": 0.00017107670240765527, |
|
"loss": 0.2003, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.0021786492374727, |
|
"grad_norm": 0.7278410196304321, |
|
"learning_rate": 0.00017095490653868778, |
|
"loss": 0.1855, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.0043572984749456, |
|
"grad_norm": 0.6922372579574585, |
|
"learning_rate": 0.0001708328983206416, |
|
"loss": 0.1877, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.0065359477124183, |
|
"grad_norm": 0.7497193813323975, |
|
"learning_rate": 0.00017071067811865476, |
|
"loss": 0.1668, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.008714596949891, |
|
"grad_norm": 0.7669565081596375, |
|
"learning_rate": 0.00017058824629849966, |
|
"loss": 0.1425, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.0108932461873639, |
|
"grad_norm": 0.9330861568450928, |
|
"learning_rate": 0.000170465603226582, |
|
"loss": 0.2458, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.0130718954248366, |
|
"grad_norm": 0.6653205156326294, |
|
"learning_rate": 0.00017034274926993977, |
|
"loss": 0.1898, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.0152505446623095, |
|
"grad_norm": 0.6905311346054077, |
|
"learning_rate": 0.00017021968479624203, |
|
"loss": 0.1874, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.0174291938997821, |
|
"grad_norm": 0.5661349892616272, |
|
"learning_rate": 0.00017009641017378784, |
|
"loss": 0.109, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.0196078431372548, |
|
"grad_norm": 0.6994882822036743, |
|
"learning_rate": 0.00016997292577150528, |
|
"loss": 0.1615, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.0217864923747277, |
|
"grad_norm": 0.8208505511283875, |
|
"learning_rate": 0.00016984923195895011, |
|
"loss": 0.2553, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.0239651416122004, |
|
"grad_norm": 0.6243330240249634, |
|
"learning_rate": 0.0001697253291063049, |
|
"loss": 0.1638, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.026143790849673, |
|
"grad_norm": 0.7907949090003967, |
|
"learning_rate": 0.00016960121758437775, |
|
"loss": 0.2016, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.028322440087146, |
|
"grad_norm": 0.8235294222831726, |
|
"learning_rate": 0.0001694768977646013, |
|
"loss": 0.2052, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.0305010893246187, |
|
"grad_norm": 0.6623209714889526, |
|
"learning_rate": 0.00016935237001903158, |
|
"loss": 0.1608, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.0326797385620916, |
|
"grad_norm": 0.7501957416534424, |
|
"learning_rate": 0.00016922763472034685, |
|
"loss": 0.1528, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.0348583877995643, |
|
"grad_norm": 0.7507126331329346, |
|
"learning_rate": 0.00016910269224184655, |
|
"loss": 0.1717, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.037037037037037, |
|
"grad_norm": 0.6387655138969421, |
|
"learning_rate": 0.00016897754295745008, |
|
"loss": 0.1594, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.0392156862745099, |
|
"grad_norm": 0.7997871041297913, |
|
"learning_rate": 0.00016885218724169588, |
|
"loss": 0.2297, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.0413943355119826, |
|
"grad_norm": 0.7794216871261597, |
|
"learning_rate": 0.00016872662546974008, |
|
"loss": 0.2092, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.0435729847494553, |
|
"grad_norm": 0.7575411200523376, |
|
"learning_rate": 0.00016860085801735552, |
|
"loss": 0.1465, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.0457516339869282, |
|
"grad_norm": 0.876860499382019, |
|
"learning_rate": 0.0001684748852609306, |
|
"loss": 0.2041, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.0479302832244008, |
|
"grad_norm": 0.8270288109779358, |
|
"learning_rate": 0.00016834870757746813, |
|
"loss": 0.1615, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.0501089324618735, |
|
"grad_norm": 0.7617962956428528, |
|
"learning_rate": 0.00016822232534458416, |
|
"loss": 0.1708, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.0522875816993464, |
|
"grad_norm": 0.7531927227973938, |
|
"learning_rate": 0.00016809573894050703, |
|
"loss": 0.2026, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.0544662309368191, |
|
"grad_norm": 0.6922577619552612, |
|
"learning_rate": 0.00016796894874407595, |
|
"loss": 0.1573, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.056644880174292, |
|
"grad_norm": 0.75091153383255, |
|
"learning_rate": 0.00016784195513474013, |
|
"loss": 0.2269, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.0588235294117647, |
|
"grad_norm": 0.6911150813102722, |
|
"learning_rate": 0.00016771475849255754, |
|
"loss": 0.1471, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.0610021786492374, |
|
"grad_norm": 0.8379197120666504, |
|
"learning_rate": 0.0001675873591981937, |
|
"loss": 0.2183, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.0631808278867103, |
|
"grad_norm": 0.6960466504096985, |
|
"learning_rate": 0.0001674597576329207, |
|
"loss": 0.1918, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.065359477124183, |
|
"grad_norm": 0.7402621507644653, |
|
"learning_rate": 0.00016733195417861592, |
|
"loss": 0.1682, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.0675381263616557, |
|
"grad_norm": 0.7742673754692078, |
|
"learning_rate": 0.00016720394921776097, |
|
"loss": 0.1559, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.0697167755991286, |
|
"grad_norm": 0.864452064037323, |
|
"learning_rate": 0.00016707574313344048, |
|
"loss": 0.1899, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.0718954248366013, |
|
"grad_norm": 0.8253500461578369, |
|
"learning_rate": 0.000166947336309341, |
|
"loss": 0.1949, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.074074074074074, |
|
"grad_norm": 0.8021739721298218, |
|
"learning_rate": 0.00016681872912974988, |
|
"loss": 0.2002, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.0762527233115469, |
|
"grad_norm": 0.6518934965133667, |
|
"learning_rate": 0.00016668992197955398, |
|
"loss": 0.1708, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.0784313725490196, |
|
"grad_norm": 0.7878454923629761, |
|
"learning_rate": 0.0001665609152442388, |
|
"loss": 0.1642, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.0806100217864925, |
|
"grad_norm": 0.7697330713272095, |
|
"learning_rate": 0.00016643170930988698, |
|
"loss": 0.2059, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.0827886710239651, |
|
"grad_norm": 0.7072964310646057, |
|
"learning_rate": 0.0001663023045631773, |
|
"loss": 0.1654, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.0849673202614378, |
|
"grad_norm": 0.7548463940620422, |
|
"learning_rate": 0.00016617270139138371, |
|
"loss": 0.1562, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.0871459694989107, |
|
"grad_norm": 0.8657066226005554, |
|
"learning_rate": 0.00016604290018237377, |
|
"loss": 0.2446, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.0893246187363834, |
|
"grad_norm": 0.6990319490432739, |
|
"learning_rate": 0.0001659129013246079, |
|
"loss": 0.1481, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.091503267973856, |
|
"grad_norm": 0.7399086952209473, |
|
"learning_rate": 0.0001657827052071379, |
|
"loss": 0.1822, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.093681917211329, |
|
"grad_norm": 0.47281554341316223, |
|
"learning_rate": 0.000165652312219606, |
|
"loss": 0.077, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.0958605664488017, |
|
"grad_norm": 0.7572283744812012, |
|
"learning_rate": 0.00016552172275224357, |
|
"loss": 0.1543, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.0980392156862746, |
|
"grad_norm": 0.5454416275024414, |
|
"learning_rate": 0.00016539093719586994, |
|
"loss": 0.0874, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.1002178649237473, |
|
"grad_norm": 0.7739827036857605, |
|
"learning_rate": 0.00016525995594189144, |
|
"loss": 0.1887, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.10239651416122, |
|
"grad_norm": 0.9118095636367798, |
|
"learning_rate": 0.00016512877938229986, |
|
"loss": 0.2067, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.1045751633986929, |
|
"grad_norm": 0.6182532906532288, |
|
"learning_rate": 0.0001649974079096717, |
|
"loss": 0.0981, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.1067538126361656, |
|
"grad_norm": 0.8062636256217957, |
|
"learning_rate": 0.0001648658419171666, |
|
"loss": 0.1637, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.1089324618736383, |
|
"grad_norm": 0.5750370025634766, |
|
"learning_rate": 0.00016473408179852646, |
|
"loss": 0.091, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 1.024340271949768, |
|
"learning_rate": 0.00016460212794807414, |
|
"loss": 0.206, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.1132897603485838, |
|
"grad_norm": 0.6996582746505737, |
|
"learning_rate": 0.00016446998076071224, |
|
"loss": 0.1382, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.1154684095860565, |
|
"grad_norm": 0.7497454881668091, |
|
"learning_rate": 0.00016433764063192194, |
|
"loss": 0.2302, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.1176470588235294, |
|
"grad_norm": 0.7948709726333618, |
|
"learning_rate": 0.00016420510795776196, |
|
"loss": 0.2015, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.1198257080610021, |
|
"grad_norm": 0.5821182131767273, |
|
"learning_rate": 0.00016407238313486712, |
|
"loss": 0.1278, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.122004357298475, |
|
"grad_norm": 0.773891031742096, |
|
"learning_rate": 0.00016393946656044744, |
|
"loss": 0.1889, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.1241830065359477, |
|
"grad_norm": 0.7371392846107483, |
|
"learning_rate": 0.0001638063586322866, |
|
"loss": 0.1965, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.1263616557734204, |
|
"grad_norm": 0.7355236411094666, |
|
"learning_rate": 0.0001636730597487412, |
|
"loss": 0.1587, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.1285403050108933, |
|
"grad_norm": 0.7013958096504211, |
|
"learning_rate": 0.0001635395703087391, |
|
"loss": 0.1596, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.130718954248366, |
|
"grad_norm": 0.707258403301239, |
|
"learning_rate": 0.00016340589071177854, |
|
"loss": 0.1792, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.132897603485839, |
|
"grad_norm": 0.672795295715332, |
|
"learning_rate": 0.00016327202135792685, |
|
"loss": 0.1562, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.1350762527233116, |
|
"grad_norm": 0.6543575525283813, |
|
"learning_rate": 0.00016313796264781925, |
|
"loss": 0.1348, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.1372549019607843, |
|
"grad_norm": 0.755180299282074, |
|
"learning_rate": 0.00016300371498265763, |
|
"loss": 0.1645, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.1394335511982572, |
|
"grad_norm": 0.7156417965888977, |
|
"learning_rate": 0.0001628692787642094, |
|
"loss": 0.1231, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.1416122004357299, |
|
"grad_norm": 0.7680862545967102, |
|
"learning_rate": 0.00016273465439480618, |
|
"loss": 0.1644, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.1437908496732025, |
|
"grad_norm": 0.6331315040588379, |
|
"learning_rate": 0.00016259984227734285, |
|
"loss": 0.1276, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.1459694989106755, |
|
"grad_norm": 0.8776397109031677, |
|
"learning_rate": 0.000162464842815276, |
|
"loss": 0.1976, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.1481481481481481, |
|
"grad_norm": 0.9892627000808716, |
|
"learning_rate": 0.00016232965641262297, |
|
"loss": 0.2732, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.1503267973856208, |
|
"grad_norm": 0.6039031147956848, |
|
"learning_rate": 0.00016219428347396053, |
|
"loss": 0.126, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.1525054466230937, |
|
"grad_norm": 0.757251501083374, |
|
"learning_rate": 0.0001620587244044237, |
|
"loss": 0.1848, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.1546840958605664, |
|
"grad_norm": 0.6978369355201721, |
|
"learning_rate": 0.0001619229796097046, |
|
"loss": 0.1364, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.156862745098039, |
|
"grad_norm": 0.727165937423706, |
|
"learning_rate": 0.00016178704949605113, |
|
"loss": 0.144, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.159041394335512, |
|
"grad_norm": 0.784420371055603, |
|
"learning_rate": 0.0001616509344702658, |
|
"loss": 0.1917, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.1612200435729847, |
|
"grad_norm": 0.8775203824043274, |
|
"learning_rate": 0.00016151463493970446, |
|
"loss": 0.1981, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.1633986928104576, |
|
"grad_norm": 0.8711812496185303, |
|
"learning_rate": 0.00016137815131227526, |
|
"loss": 0.1516, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.1655773420479303, |
|
"grad_norm": 0.5256686806678772, |
|
"learning_rate": 0.00016124148399643723, |
|
"loss": 0.09, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.167755991285403, |
|
"grad_norm": 0.7360031008720398, |
|
"learning_rate": 0.00016110463340119913, |
|
"loss": 0.1377, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.1699346405228759, |
|
"grad_norm": 0.7044847011566162, |
|
"learning_rate": 0.0001609675999361182, |
|
"loss": 0.117, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.1721132897603486, |
|
"grad_norm": 0.8387324810028076, |
|
"learning_rate": 0.000160830384011299, |
|
"loss": 0.2031, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.1742919389978215, |
|
"grad_norm": 0.8079413175582886, |
|
"learning_rate": 0.00016069298603739216, |
|
"loss": 0.1536, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.1764705882352942, |
|
"grad_norm": 0.8063020706176758, |
|
"learning_rate": 0.00016055540642559305, |
|
"loss": 0.1378, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.1786492374727668, |
|
"grad_norm": 0.6223093271255493, |
|
"learning_rate": 0.0001604176455876408, |
|
"loss": 0.1464, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.1808278867102397, |
|
"grad_norm": 0.6043615341186523, |
|
"learning_rate": 0.00016027970393581666, |
|
"loss": 0.1304, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.1830065359477124, |
|
"grad_norm": 0.6139847636222839, |
|
"learning_rate": 0.00016014158188294326, |
|
"loss": 0.1109, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.1851851851851851, |
|
"grad_norm": 0.790346622467041, |
|
"learning_rate": 0.00016000327984238292, |
|
"loss": 0.1916, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.187363834422658, |
|
"grad_norm": 0.5799733996391296, |
|
"learning_rate": 0.00015986479822803671, |
|
"loss": 0.1181, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.1895424836601307, |
|
"grad_norm": 0.8529340624809265, |
|
"learning_rate": 0.00015972613745434314, |
|
"loss": 0.1856, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.1917211328976034, |
|
"grad_norm": 0.7188248038291931, |
|
"learning_rate": 0.00015958729793627682, |
|
"loss": 0.1505, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.1938997821350763, |
|
"grad_norm": 1.0040041208267212, |
|
"learning_rate": 0.0001594482800893474, |
|
"loss": 0.2638, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.196078431372549, |
|
"grad_norm": 0.6917260885238647, |
|
"learning_rate": 0.00015930908432959808, |
|
"loss": 0.1185, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.1982570806100217, |
|
"grad_norm": 0.7081711292266846, |
|
"learning_rate": 0.00015916971107360461, |
|
"loss": 0.1343, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.2004357298474946, |
|
"grad_norm": 0.8055239319801331, |
|
"learning_rate": 0.0001590301607384739, |
|
"loss": 0.2216, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.2026143790849673, |
|
"grad_norm": 0.9081418514251709, |
|
"learning_rate": 0.00015889043374184286, |
|
"loss": 0.2004, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.2047930283224402, |
|
"grad_norm": 0.7224873304367065, |
|
"learning_rate": 0.00015875053050187706, |
|
"loss": 0.1824, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.2069716775599129, |
|
"grad_norm": 0.515967607498169, |
|
"learning_rate": 0.00015861045143726946, |
|
"loss": 0.0807, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.2091503267973855, |
|
"grad_norm": 0.6762127876281738, |
|
"learning_rate": 0.0001584701969672393, |
|
"loss": 0.1481, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.2113289760348585, |
|
"grad_norm": 0.8208984732627869, |
|
"learning_rate": 0.00015832976751153078, |
|
"loss": 0.1828, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.2135076252723311, |
|
"grad_norm": 0.6228875517845154, |
|
"learning_rate": 0.00015818916349041165, |
|
"loss": 0.1115, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.215686274509804, |
|
"grad_norm": 0.7444784641265869, |
|
"learning_rate": 0.0001580483853246723, |
|
"loss": 0.1559, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.2178649237472767, |
|
"grad_norm": 0.8666041493415833, |
|
"learning_rate": 0.00015790743343562408, |
|
"loss": 0.1908, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.2200435729847494, |
|
"grad_norm": 0.7155983448028564, |
|
"learning_rate": 0.0001577663082450984, |
|
"loss": 0.1807, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.2222222222222223, |
|
"grad_norm": 0.7652324438095093, |
|
"learning_rate": 0.0001576250101754452, |
|
"loss": 0.1689, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.224400871459695, |
|
"grad_norm": 0.5946928262710571, |
|
"learning_rate": 0.00015748353964953186, |
|
"loss": 0.0888, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.2265795206971677, |
|
"grad_norm": 0.6897603869438171, |
|
"learning_rate": 0.00015734189709074188, |
|
"loss": 0.1381, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.2265795206971677, |
|
"eval_loss": 0.3376106917858124, |
|
"eval_runtime": 0.9372, |
|
"eval_samples_per_second": 182.463, |
|
"eval_steps_per_second": 13.871, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.2287581699346406, |
|
"grad_norm": 0.700342059135437, |
|
"learning_rate": 0.00015720008292297364, |
|
"loss": 0.1898, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.2309368191721133, |
|
"grad_norm": 0.6738066077232361, |
|
"learning_rate": 0.00015705809757063897, |
|
"loss": 0.1543, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.233115468409586, |
|
"grad_norm": 0.5636610984802246, |
|
"learning_rate": 0.00015691594145866215, |
|
"loss": 0.1218, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.2352941176470589, |
|
"grad_norm": 0.7896304130554199, |
|
"learning_rate": 0.00015677361501247844, |
|
"loss": 0.1856, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.2374727668845316, |
|
"grad_norm": 0.7217385768890381, |
|
"learning_rate": 0.00015663111865803285, |
|
"loss": 0.1399, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.2396514161220042, |
|
"grad_norm": 0.7752529978752136, |
|
"learning_rate": 0.00015648845282177892, |
|
"loss": 0.1501, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.2418300653594772, |
|
"grad_norm": 0.9684514403343201, |
|
"learning_rate": 0.00015634561793067737, |
|
"loss": 0.246, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.2440087145969498, |
|
"grad_norm": 0.6930341124534607, |
|
"learning_rate": 0.0001562026144121949, |
|
"loss": 0.145, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.2461873638344227, |
|
"grad_norm": 0.701931357383728, |
|
"learning_rate": 0.00015605944269430277, |
|
"loss": 0.1354, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.2483660130718954, |
|
"grad_norm": 0.6491216421127319, |
|
"learning_rate": 0.00015591610320547574, |
|
"loss": 0.1297, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.2505446623093681, |
|
"grad_norm": 0.6969993710517883, |
|
"learning_rate": 0.00015577259637469058, |
|
"loss": 0.1387, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.252723311546841, |
|
"grad_norm": 0.915093183517456, |
|
"learning_rate": 0.00015562892263142487, |
|
"loss": 0.2019, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.2549019607843137, |
|
"grad_norm": 0.5827008485794067, |
|
"learning_rate": 0.00015548508240565583, |
|
"loss": 0.0996, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.2570806100217866, |
|
"grad_norm": 0.6576254963874817, |
|
"learning_rate": 0.00015534107612785874, |
|
"loss": 0.1374, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.2592592592592593, |
|
"grad_norm": 0.7204159498214722, |
|
"learning_rate": 0.00015519690422900593, |
|
"loss": 0.173, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.261437908496732, |
|
"grad_norm": 0.8468919396400452, |
|
"learning_rate": 0.00015505256714056536, |
|
"loss": 0.208, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.263616557734205, |
|
"grad_norm": 0.7702469229698181, |
|
"learning_rate": 0.00015490806529449945, |
|
"loss": 0.1471, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.2657952069716776, |
|
"grad_norm": 0.8383822441101074, |
|
"learning_rate": 0.00015476339912326356, |
|
"loss": 0.2016, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.2679738562091503, |
|
"grad_norm": 0.9662691354751587, |
|
"learning_rate": 0.0001546185690598049, |
|
"loss": 0.1543, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.2701525054466232, |
|
"grad_norm": 0.7026709914207458, |
|
"learning_rate": 0.00015447357553756115, |
|
"loss": 0.1429, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.2723311546840959, |
|
"grad_norm": 0.4856622815132141, |
|
"learning_rate": 0.0001543284189904592, |
|
"loss": 0.0799, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.2745098039215685, |
|
"grad_norm": 0.5788432955741882, |
|
"learning_rate": 0.0001541830998529138, |
|
"loss": 0.1108, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.2766884531590414, |
|
"grad_norm": 0.6814302802085876, |
|
"learning_rate": 0.00015403761855982631, |
|
"loss": 0.1247, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.2788671023965141, |
|
"grad_norm": 0.834804892539978, |
|
"learning_rate": 0.0001538919755465834, |
|
"loss": 0.1314, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.2810457516339868, |
|
"grad_norm": 0.6751343011856079, |
|
"learning_rate": 0.00015374617124905564, |
|
"loss": 0.0955, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.2832244008714597, |
|
"grad_norm": 0.6577438116073608, |
|
"learning_rate": 0.0001536002061035964, |
|
"loss": 0.1376, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.2854030501089324, |
|
"grad_norm": 0.7378783226013184, |
|
"learning_rate": 0.0001534540805470403, |
|
"loss": 0.1563, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.287581699346405, |
|
"grad_norm": 0.7493640780448914, |
|
"learning_rate": 0.00015330779501670217, |
|
"loss": 0.1554, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.289760348583878, |
|
"grad_norm": 0.6582645177841187, |
|
"learning_rate": 0.00015316134995037545, |
|
"loss": 0.1107, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.2919389978213507, |
|
"grad_norm": 0.7354329824447632, |
|
"learning_rate": 0.00015301474578633116, |
|
"loss": 0.1562, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.2941176470588236, |
|
"grad_norm": 0.6455869078636169, |
|
"learning_rate": 0.00015286798296331632, |
|
"loss": 0.1119, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.2962962962962963, |
|
"grad_norm": 0.7667369842529297, |
|
"learning_rate": 0.00015272106192055294, |
|
"loss": 0.1678, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.2984749455337692, |
|
"grad_norm": 0.7993078827857971, |
|
"learning_rate": 0.00015257398309773633, |
|
"loss": 0.179, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.3006535947712419, |
|
"grad_norm": 0.5525803565979004, |
|
"learning_rate": 0.00015242674693503424, |
|
"loss": 0.092, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.3028322440087146, |
|
"grad_norm": 0.6435709595680237, |
|
"learning_rate": 0.00015227935387308511, |
|
"loss": 0.1371, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.3050108932461875, |
|
"grad_norm": 0.8311496376991272, |
|
"learning_rate": 0.00015213180435299698, |
|
"loss": 0.1474, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.3071895424836601, |
|
"grad_norm": 0.8488547205924988, |
|
"learning_rate": 0.00015198409881634617, |
|
"loss": 0.1924, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.3093681917211328, |
|
"grad_norm": 0.6138650178909302, |
|
"learning_rate": 0.00015183623770517586, |
|
"loss": 0.1032, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.3115468409586057, |
|
"grad_norm": 0.720344066619873, |
|
"learning_rate": 0.0001516882214619949, |
|
"loss": 0.1372, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.3137254901960784, |
|
"grad_norm": 0.6820862293243408, |
|
"learning_rate": 0.00015154005052977633, |
|
"loss": 0.1296, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.3159041394335511, |
|
"grad_norm": 0.7137134671211243, |
|
"learning_rate": 0.00015139172535195617, |
|
"loss": 0.1669, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.318082788671024, |
|
"grad_norm": 0.8309161067008972, |
|
"learning_rate": 0.00015124324637243205, |
|
"loss": 0.1803, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.3202614379084967, |
|
"grad_norm": 0.6600125432014465, |
|
"learning_rate": 0.0001510946140355619, |
|
"loss": 0.1044, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.3224400871459694, |
|
"grad_norm": 0.5543960928916931, |
|
"learning_rate": 0.00015094582878616257, |
|
"loss": 0.0907, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.3246187363834423, |
|
"grad_norm": 0.7326435446739197, |
|
"learning_rate": 0.00015079689106950854, |
|
"loss": 0.1385, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.326797385620915, |
|
"grad_norm": 0.6444526314735413, |
|
"learning_rate": 0.00015064780133133067, |
|
"loss": 0.1075, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.3289760348583877, |
|
"grad_norm": 0.7079174518585205, |
|
"learning_rate": 0.0001504985600178147, |
|
"loss": 0.128, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.3311546840958606, |
|
"grad_norm": 0.7377613186836243, |
|
"learning_rate": 0.00015034916757559997, |
|
"loss": 0.1332, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.6091183423995972, |
|
"learning_rate": 0.00015019962445177819, |
|
"loss": 0.0767, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.3355119825708062, |
|
"grad_norm": 0.5283781290054321, |
|
"learning_rate": 0.00015004993109389193, |
|
"loss": 0.077, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.3376906318082789, |
|
"grad_norm": 0.7368810176849365, |
|
"learning_rate": 0.00014990008794993345, |
|
"loss": 0.1237, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.3398692810457518, |
|
"grad_norm": 0.6941757202148438, |
|
"learning_rate": 0.00014975009546834325, |
|
"loss": 0.1072, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.3420479302832244, |
|
"grad_norm": 0.6983378529548645, |
|
"learning_rate": 0.00014959995409800873, |
|
"loss": 0.1323, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.3442265795206971, |
|
"grad_norm": 0.7478280067443848, |
|
"learning_rate": 0.00014944966428826292, |
|
"loss": 0.1574, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.34640522875817, |
|
"grad_norm": 0.6382248997688293, |
|
"learning_rate": 0.00014929922648888308, |
|
"loss": 0.1042, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.3485838779956427, |
|
"grad_norm": 0.616068422794342, |
|
"learning_rate": 0.00014914864115008936, |
|
"loss": 0.1151, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.3507625272331154, |
|
"grad_norm": 0.6361529231071472, |
|
"learning_rate": 0.0001489979087225434, |
|
"loss": 0.1311, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.3529411764705883, |
|
"grad_norm": 0.5792030096054077, |
|
"learning_rate": 0.0001488470296573471, |
|
"loss": 0.1178, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.355119825708061, |
|
"grad_norm": 0.6275602579116821, |
|
"learning_rate": 0.00014869600440604118, |
|
"loss": 0.1048, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.3572984749455337, |
|
"grad_norm": 0.5165741443634033, |
|
"learning_rate": 0.00014854483342060393, |
|
"loss": 0.0938, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.3594771241830066, |
|
"grad_norm": 0.7131217122077942, |
|
"learning_rate": 0.00014839351715344968, |
|
"loss": 0.138, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.3616557734204793, |
|
"grad_norm": 0.7741950750350952, |
|
"learning_rate": 0.0001482420560574276, |
|
"loss": 0.139, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.363834422657952, |
|
"grad_norm": 0.5395483374595642, |
|
"learning_rate": 0.00014809045058582026, |
|
"loss": 0.0794, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.3660130718954249, |
|
"grad_norm": 0.5530499219894409, |
|
"learning_rate": 0.00014793870119234235, |
|
"loss": 0.0871, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.3681917211328976, |
|
"grad_norm": 0.5440542697906494, |
|
"learning_rate": 0.00014778680833113926, |
|
"loss": 0.0921, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.3703703703703702, |
|
"grad_norm": 0.8153759837150574, |
|
"learning_rate": 0.00014763477245678577, |
|
"loss": 0.1351, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.3725490196078431, |
|
"grad_norm": 0.7071852684020996, |
|
"learning_rate": 0.00014748259402428462, |
|
"loss": 0.119, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.3747276688453158, |
|
"grad_norm": 0.6104605793952942, |
|
"learning_rate": 0.00014733027348906518, |
|
"loss": 0.0895, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.3769063180827887, |
|
"grad_norm": 0.6992385983467102, |
|
"learning_rate": 0.00014717781130698212, |
|
"loss": 0.1122, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.3790849673202614, |
|
"grad_norm": 0.6249366402626038, |
|
"learning_rate": 0.00014702520793431404, |
|
"loss": 0.1021, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.3812636165577343, |
|
"grad_norm": 0.7198147773742676, |
|
"learning_rate": 0.00014687246382776205, |
|
"loss": 0.128, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.383442265795207, |
|
"grad_norm": 0.7576356530189514, |
|
"learning_rate": 0.00014671957944444847, |
|
"loss": 0.1337, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.3856209150326797, |
|
"grad_norm": 0.5735803246498108, |
|
"learning_rate": 0.00014656655524191537, |
|
"loss": 0.098, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.3877995642701526, |
|
"grad_norm": 0.723497748374939, |
|
"learning_rate": 0.0001464133916781234, |
|
"loss": 0.1192, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.3899782135076253, |
|
"grad_norm": 0.4777669310569763, |
|
"learning_rate": 0.0001462600892114501, |
|
"loss": 0.0759, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.392156862745098, |
|
"grad_norm": 0.6985266208648682, |
|
"learning_rate": 0.00014610664830068875, |
|
"loss": 0.1172, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.3943355119825709, |
|
"grad_norm": 0.6342020034790039, |
|
"learning_rate": 0.00014595306940504716, |
|
"loss": 0.1173, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.3965141612200436, |
|
"grad_norm": 0.7338960766792297, |
|
"learning_rate": 0.0001457993529841458, |
|
"loss": 0.1088, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.3986928104575163, |
|
"grad_norm": 0.5785338878631592, |
|
"learning_rate": 0.00014564549949801694, |
|
"loss": 0.1058, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.4008714596949892, |
|
"grad_norm": 0.7608123421669006, |
|
"learning_rate": 0.00014549150940710285, |
|
"loss": 0.1345, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.4030501089324618, |
|
"grad_norm": 0.6694321036338806, |
|
"learning_rate": 0.00014533738317225485, |
|
"loss": 0.1019, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.4052287581699345, |
|
"grad_norm": 0.7982690334320068, |
|
"learning_rate": 0.00014518312125473152, |
|
"loss": 0.1409, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.4074074074074074, |
|
"grad_norm": 0.5442335605621338, |
|
"learning_rate": 0.00014502872411619757, |
|
"loss": 0.0952, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.4095860566448801, |
|
"grad_norm": 0.6602448225021362, |
|
"learning_rate": 0.00014487419221872238, |
|
"loss": 0.0968, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.4117647058823528, |
|
"grad_norm": 0.6015776991844177, |
|
"learning_rate": 0.00014471952602477866, |
|
"loss": 0.0994, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.4139433551198257, |
|
"grad_norm": 0.7918272614479065, |
|
"learning_rate": 0.000144564725997241, |
|
"loss": 0.1718, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.4161220043572984, |
|
"grad_norm": 0.6220570802688599, |
|
"learning_rate": 0.0001444097925993845, |
|
"loss": 0.1123, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.4183006535947713, |
|
"grad_norm": 0.6618373394012451, |
|
"learning_rate": 0.0001442547262948835, |
|
"loss": 0.115, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.420479302832244, |
|
"grad_norm": 0.7879993915557861, |
|
"learning_rate": 0.0001440995275478099, |
|
"loss": 0.1448, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.422657952069717, |
|
"grad_norm": 0.6283368468284607, |
|
"learning_rate": 0.00014394419682263218, |
|
"loss": 0.108, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.4248366013071896, |
|
"grad_norm": 0.6305772662162781, |
|
"learning_rate": 0.0001437887345842137, |
|
"loss": 0.104, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.4270152505446623, |
|
"grad_norm": 0.6052739024162292, |
|
"learning_rate": 0.00014363314129781137, |
|
"loss": 0.0865, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.4291938997821352, |
|
"grad_norm": 0.7560857534408569, |
|
"learning_rate": 0.00014347741742907433, |
|
"loss": 0.1383, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.4313725490196079, |
|
"grad_norm": 0.6733012199401855, |
|
"learning_rate": 0.00014332156344404253, |
|
"loss": 0.1277, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.4335511982570806, |
|
"grad_norm": 0.6026163101196289, |
|
"learning_rate": 0.00014316557980914528, |
|
"loss": 0.0738, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.4357298474945535, |
|
"grad_norm": 0.5354239344596863, |
|
"learning_rate": 0.00014300946699119998, |
|
"loss": 0.0796, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.4379084967320261, |
|
"grad_norm": 0.7443612813949585, |
|
"learning_rate": 0.00014285322545741052, |
|
"loss": 0.1208, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.4400871459694988, |
|
"grad_norm": 0.5782439708709717, |
|
"learning_rate": 0.00014269685567536614, |
|
"loss": 0.0812, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.4422657952069717, |
|
"grad_norm": 0.7431501746177673, |
|
"learning_rate": 0.0001425403581130398, |
|
"loss": 0.1537, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.4444444444444444, |
|
"grad_norm": 0.6386312246322632, |
|
"learning_rate": 0.00014238373323878685, |
|
"loss": 0.0868, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.446623093681917, |
|
"grad_norm": 0.6820403337478638, |
|
"learning_rate": 0.00014222698152134374, |
|
"loss": 0.0978, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.44880174291939, |
|
"grad_norm": 0.5556746125221252, |
|
"learning_rate": 0.00014207010342982642, |
|
"loss": 0.0652, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.4509803921568627, |
|
"grad_norm": 0.5212750434875488, |
|
"learning_rate": 0.0001419130994337292, |
|
"loss": 0.0734, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.4531590413943354, |
|
"grad_norm": 0.5288644433021545, |
|
"learning_rate": 0.000141755970002923, |
|
"loss": 0.077, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.4553376906318083, |
|
"grad_norm": 0.8352729082107544, |
|
"learning_rate": 0.00014159871560765432, |
|
"loss": 0.1627, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.457516339869281, |
|
"grad_norm": 0.4913583993911743, |
|
"learning_rate": 0.00014144133671854347, |
|
"loss": 0.0621, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.4596949891067539, |
|
"grad_norm": 0.7057034969329834, |
|
"learning_rate": 0.0001412838338065835, |
|
"loss": 0.1276, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.4618736383442266, |
|
"grad_norm": 0.4989885985851288, |
|
"learning_rate": 0.00014112620734313847, |
|
"loss": 0.0577, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.4640522875816995, |
|
"grad_norm": 0.5323114991188049, |
|
"learning_rate": 0.0001409684577999423, |
|
"loss": 0.0687, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.4662309368191722, |
|
"grad_norm": 0.7068952322006226, |
|
"learning_rate": 0.00014081058564909723, |
|
"loss": 0.0867, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.4684095860566448, |
|
"grad_norm": 0.48235902190208435, |
|
"learning_rate": 0.00014065259136307242, |
|
"loss": 0.0631, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.4705882352941178, |
|
"grad_norm": 0.7386283874511719, |
|
"learning_rate": 0.0001404944754147026, |
|
"loss": 0.1186, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.4727668845315904, |
|
"grad_norm": 0.8523921966552734, |
|
"learning_rate": 0.0001403362382771865, |
|
"loss": 0.1548, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.4749455337690631, |
|
"grad_norm": 0.6278656125068665, |
|
"learning_rate": 0.00014017788042408564, |
|
"loss": 0.0734, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.477124183006536, |
|
"grad_norm": 0.5892928838729858, |
|
"learning_rate": 0.0001400194023293228, |
|
"loss": 0.0718, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.477124183006536, |
|
"eval_loss": 0.337152898311615, |
|
"eval_runtime": 0.9372, |
|
"eval_samples_per_second": 182.455, |
|
"eval_steps_per_second": 13.871, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.4793028322440087, |
|
"grad_norm": 0.5319302082061768, |
|
"learning_rate": 0.00013986080446718043, |
|
"loss": 0.0951, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.4814814814814814, |
|
"grad_norm": 0.43910735845565796, |
|
"learning_rate": 0.00013970208731229974, |
|
"loss": 0.0567, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.4836601307189543, |
|
"grad_norm": 0.429105281829834, |
|
"learning_rate": 0.00013954325133967865, |
|
"loss": 0.0557, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.485838779956427, |
|
"grad_norm": 0.7734906077384949, |
|
"learning_rate": 0.00013938429702467086, |
|
"loss": 0.1351, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.4880174291938997, |
|
"grad_norm": 0.7108368277549744, |
|
"learning_rate": 0.00013922522484298414, |
|
"loss": 0.1096, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.4901960784313726, |
|
"grad_norm": 0.5471106767654419, |
|
"learning_rate": 0.000139066035270679, |
|
"loss": 0.1027, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.4923747276688453, |
|
"grad_norm": 0.875432014465332, |
|
"learning_rate": 0.00013890672878416737, |
|
"loss": 0.1344, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.494553376906318, |
|
"grad_norm": 0.5589685440063477, |
|
"learning_rate": 0.00013874730586021093, |
|
"loss": 0.078, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.4967320261437909, |
|
"grad_norm": 0.6980583667755127, |
|
"learning_rate": 0.00013858776697591997, |
|
"loss": 0.0895, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.4989106753812635, |
|
"grad_norm": 0.7580942511558533, |
|
"learning_rate": 0.00013842811260875168, |
|
"loss": 0.1083, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.5010893246187362, |
|
"grad_norm": 0.6347730159759521, |
|
"learning_rate": 0.000138268343236509, |
|
"loss": 0.0779, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.5032679738562091, |
|
"grad_norm": 0.4675781726837158, |
|
"learning_rate": 0.0001381084593373389, |
|
"loss": 0.0651, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.505446623093682, |
|
"grad_norm": 0.5469012260437012, |
|
"learning_rate": 0.00013794846138973123, |
|
"loss": 0.0888, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.5076252723311547, |
|
"grad_norm": 0.5839260816574097, |
|
"learning_rate": 0.00013778834987251707, |
|
"loss": 0.0861, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.5098039215686274, |
|
"grad_norm": 0.6217342615127563, |
|
"learning_rate": 0.00013762812526486743, |
|
"loss": 0.0938, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.5119825708061003, |
|
"grad_norm": 0.7009667158126831, |
|
"learning_rate": 0.00013746778804629177, |
|
"loss": 0.0989, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.514161220043573, |
|
"grad_norm": 0.7072852253913879, |
|
"learning_rate": 0.0001373073386966365, |
|
"loss": 0.1256, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.5163398692810457, |
|
"grad_norm": 0.6454962491989136, |
|
"learning_rate": 0.0001371467776960837, |
|
"loss": 0.116, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.5185185185185186, |
|
"grad_norm": 0.5956925749778748, |
|
"learning_rate": 0.00013698610552514956, |
|
"loss": 0.085, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.5206971677559913, |
|
"grad_norm": 0.6272696852684021, |
|
"learning_rate": 0.0001368253226646829, |
|
"loss": 0.0776, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.522875816993464, |
|
"grad_norm": 0.5974109172821045, |
|
"learning_rate": 0.00013666442959586395, |
|
"loss": 0.0803, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.5250544662309369, |
|
"grad_norm": 0.6748714447021484, |
|
"learning_rate": 0.00013650342680020258, |
|
"loss": 0.081, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.5272331154684096, |
|
"grad_norm": 0.6029950380325317, |
|
"learning_rate": 0.00013634231475953724, |
|
"loss": 0.0739, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.5294117647058822, |
|
"grad_norm": 0.7018197178840637, |
|
"learning_rate": 0.00013618109395603317, |
|
"loss": 0.0993, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.5315904139433552, |
|
"grad_norm": 0.7441207766532898, |
|
"learning_rate": 0.0001360197648721812, |
|
"loss": 0.1119, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.5337690631808278, |
|
"grad_norm": 0.8434575796127319, |
|
"learning_rate": 0.0001358583279907961, |
|
"loss": 0.1392, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.5359477124183005, |
|
"grad_norm": 0.5164886116981506, |
|
"learning_rate": 0.0001356967837950154, |
|
"loss": 0.0792, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.5381263616557734, |
|
"grad_norm": 0.5095295310020447, |
|
"learning_rate": 0.0001355351327682977, |
|
"loss": 0.0672, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.5403050108932463, |
|
"grad_norm": 0.65059894323349, |
|
"learning_rate": 0.00013537337539442132, |
|
"loss": 0.0802, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.5424836601307188, |
|
"grad_norm": 0.6488040685653687, |
|
"learning_rate": 0.0001352115121574829, |
|
"loss": 0.0894, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.5446623093681917, |
|
"grad_norm": 0.7139689326286316, |
|
"learning_rate": 0.00013504954354189583, |
|
"loss": 0.1185, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.5468409586056646, |
|
"grad_norm": 0.5658460259437561, |
|
"learning_rate": 0.00013488747003238892, |
|
"loss": 0.0983, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.5490196078431373, |
|
"grad_norm": 0.6340675354003906, |
|
"learning_rate": 0.00013472529211400484, |
|
"loss": 0.0945, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.55119825708061, |
|
"grad_norm": 0.6365328431129456, |
|
"learning_rate": 0.00013456301027209882, |
|
"loss": 0.0753, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.553376906318083, |
|
"grad_norm": 0.6497185230255127, |
|
"learning_rate": 0.00013440062499233709, |
|
"loss": 0.0841, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.5555555555555556, |
|
"grad_norm": 0.6614805459976196, |
|
"learning_rate": 0.00013423813676069534, |
|
"loss": 0.0903, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.5577342047930283, |
|
"grad_norm": 0.6074105501174927, |
|
"learning_rate": 0.00013407554606345747, |
|
"loss": 0.0918, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.5599128540305012, |
|
"grad_norm": 0.6045775413513184, |
|
"learning_rate": 0.000133912853387214, |
|
"loss": 0.0918, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.5620915032679739, |
|
"grad_norm": 0.6219077110290527, |
|
"learning_rate": 0.0001337500592188606, |
|
"loss": 0.0884, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.5642701525054465, |
|
"grad_norm": 0.669049084186554, |
|
"learning_rate": 0.0001335871640455968, |
|
"loss": 0.1, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.5664488017429194, |
|
"grad_norm": 0.5828138589859009, |
|
"learning_rate": 0.00013342416835492423, |
|
"loss": 0.0808, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.5686274509803921, |
|
"grad_norm": 0.7363128066062927, |
|
"learning_rate": 0.00013326107263464558, |
|
"loss": 0.1117, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.5708061002178648, |
|
"grad_norm": 0.6877434849739075, |
|
"learning_rate": 0.00013309787737286267, |
|
"loss": 0.115, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.5729847494553377, |
|
"grad_norm": 0.5169875621795654, |
|
"learning_rate": 0.00013293458305797533, |
|
"loss": 0.0697, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.5751633986928104, |
|
"grad_norm": 0.6289706826210022, |
|
"learning_rate": 0.00013277119017867983, |
|
"loss": 0.1014, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.577342047930283, |
|
"grad_norm": 0.601397693157196, |
|
"learning_rate": 0.0001326076992239674, |
|
"loss": 0.0882, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.579520697167756, |
|
"grad_norm": 0.8283075094223022, |
|
"learning_rate": 0.00013244411068312283, |
|
"loss": 0.1334, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.581699346405229, |
|
"grad_norm": 0.6383387446403503, |
|
"learning_rate": 0.00013228042504572285, |
|
"loss": 0.0887, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.5838779956427014, |
|
"grad_norm": 0.610774576663971, |
|
"learning_rate": 0.00013211664280163488, |
|
"loss": 0.1124, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.5860566448801743, |
|
"grad_norm": 0.5463963150978088, |
|
"learning_rate": 0.00013195276444101547, |
|
"loss": 0.078, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.5882352941176472, |
|
"grad_norm": 0.5919215679168701, |
|
"learning_rate": 0.00013178879045430862, |
|
"loss": 0.0797, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.5904139433551199, |
|
"grad_norm": 0.4449659287929535, |
|
"learning_rate": 0.00013162472133224483, |
|
"loss": 0.0545, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.5925925925925926, |
|
"grad_norm": 0.5289342999458313, |
|
"learning_rate": 0.00013146055756583906, |
|
"loss": 0.0668, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.5947712418300655, |
|
"grad_norm": 0.6107696294784546, |
|
"learning_rate": 0.0001312962996463896, |
|
"loss": 0.1107, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.5969498910675382, |
|
"grad_norm": 0.6633789539337158, |
|
"learning_rate": 0.00013113194806547656, |
|
"loss": 0.1044, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.5991285403050108, |
|
"grad_norm": 0.4553978443145752, |
|
"learning_rate": 0.00013096750331496033, |
|
"loss": 0.0611, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.6013071895424837, |
|
"grad_norm": 0.6370587944984436, |
|
"learning_rate": 0.00013080296588698006, |
|
"loss": 0.0876, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.6034858387799564, |
|
"grad_norm": 0.6242510080337524, |
|
"learning_rate": 0.0001306383362739523, |
|
"loss": 0.0961, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.6056644880174291, |
|
"grad_norm": 0.4671713709831238, |
|
"learning_rate": 0.00013047361496856957, |
|
"loss": 0.0653, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.607843137254902, |
|
"grad_norm": 0.5274946689605713, |
|
"learning_rate": 0.00013030880246379866, |
|
"loss": 0.0747, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.6100217864923747, |
|
"grad_norm": 0.4920006990432739, |
|
"learning_rate": 0.00013014389925287943, |
|
"loss": 0.0694, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.6122004357298474, |
|
"grad_norm": 0.5613352060317993, |
|
"learning_rate": 0.00012997890582932303, |
|
"loss": 0.0777, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.6143790849673203, |
|
"grad_norm": 0.5812129974365234, |
|
"learning_rate": 0.00012981382268691084, |
|
"loss": 0.0798, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.616557734204793, |
|
"grad_norm": 0.42552122473716736, |
|
"learning_rate": 0.00012964865031969252, |
|
"loss": 0.0487, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.6187363834422657, |
|
"grad_norm": 0.5462669134140015, |
|
"learning_rate": 0.0001294833892219848, |
|
"loss": 0.0721, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.6209150326797386, |
|
"grad_norm": 0.6898488998413086, |
|
"learning_rate": 0.0001293180398883701, |
|
"loss": 0.0873, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.6230936819172115, |
|
"grad_norm": 0.8160077929496765, |
|
"learning_rate": 0.0001291526028136947, |
|
"loss": 0.1475, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.625272331154684, |
|
"grad_norm": 0.6779285073280334, |
|
"learning_rate": 0.00012898707849306763, |
|
"loss": 0.0983, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.6274509803921569, |
|
"grad_norm": 0.5817242860794067, |
|
"learning_rate": 0.0001288214674218589, |
|
"loss": 0.0718, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.6296296296296298, |
|
"grad_norm": 0.7141191959381104, |
|
"learning_rate": 0.00012865577009569824, |
|
"loss": 0.1006, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.6318082788671024, |
|
"grad_norm": 0.7559797763824463, |
|
"learning_rate": 0.0001284899870104735, |
|
"loss": 0.1181, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.6339869281045751, |
|
"grad_norm": 0.5632848739624023, |
|
"learning_rate": 0.0001283241186623291, |
|
"loss": 0.0782, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.636165577342048, |
|
"grad_norm": 0.5656502842903137, |
|
"learning_rate": 0.00012815816554766476, |
|
"loss": 0.0706, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.6383442265795207, |
|
"grad_norm": 0.7342005372047424, |
|
"learning_rate": 0.00012799212816313376, |
|
"loss": 0.1059, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.6405228758169934, |
|
"grad_norm": 0.5315876603126526, |
|
"learning_rate": 0.00012782600700564166, |
|
"loss": 0.0735, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.6427015250544663, |
|
"grad_norm": 0.527675986289978, |
|
"learning_rate": 0.00012765980257234473, |
|
"loss": 0.0638, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.644880174291939, |
|
"grad_norm": 0.6750484108924866, |
|
"learning_rate": 0.00012749351536064834, |
|
"loss": 0.0896, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.6470588235294117, |
|
"grad_norm": 0.5952948331832886, |
|
"learning_rate": 0.00012732714586820583, |
|
"loss": 0.0878, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.6492374727668846, |
|
"grad_norm": 0.8387467861175537, |
|
"learning_rate": 0.00012716069459291652, |
|
"loss": 0.1307, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.6514161220043573, |
|
"grad_norm": 0.6178788542747498, |
|
"learning_rate": 0.00012699416203292466, |
|
"loss": 0.0923, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.65359477124183, |
|
"grad_norm": 0.5772682428359985, |
|
"learning_rate": 0.0001268275486866177, |
|
"loss": 0.077, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.6557734204793029, |
|
"grad_norm": 0.5710946917533875, |
|
"learning_rate": 0.00012666085505262485, |
|
"loss": 0.0744, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.6579520697167756, |
|
"grad_norm": 0.5186509490013123, |
|
"learning_rate": 0.0001264940816298157, |
|
"loss": 0.0666, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.6601307189542482, |
|
"grad_norm": 0.44019749760627747, |
|
"learning_rate": 0.00012632722891729845, |
|
"loss": 0.0484, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.6623093681917211, |
|
"grad_norm": 0.5319947600364685, |
|
"learning_rate": 0.00012616029741441877, |
|
"loss": 0.058, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.664488017429194, |
|
"grad_norm": 0.682352602481842, |
|
"learning_rate": 0.000125993287620758, |
|
"loss": 0.0854, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.5932778716087341, |
|
"learning_rate": 0.0001258262000361319, |
|
"loss": 0.1003, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.6688453159041394, |
|
"grad_norm": 0.5531163215637207, |
|
"learning_rate": 0.00012565903516058882, |
|
"loss": 0.0619, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.6710239651416123, |
|
"grad_norm": 0.6834267377853394, |
|
"learning_rate": 0.00012549179349440875, |
|
"loss": 0.0971, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.673202614379085, |
|
"grad_norm": 0.5356924533843994, |
|
"learning_rate": 0.00012532447553810126, |
|
"loss": 0.0673, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.6753812636165577, |
|
"grad_norm": 0.7086807489395142, |
|
"learning_rate": 0.0001251570817924042, |
|
"loss": 0.0978, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.6775599128540306, |
|
"grad_norm": 0.6605278253555298, |
|
"learning_rate": 0.00012498961275828247, |
|
"loss": 0.1036, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.6797385620915033, |
|
"grad_norm": 0.47696250677108765, |
|
"learning_rate": 0.00012482206893692604, |
|
"loss": 0.0589, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.681917211328976, |
|
"grad_norm": 0.6112656593322754, |
|
"learning_rate": 0.00012465445082974886, |
|
"loss": 0.0818, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.6840958605664489, |
|
"grad_norm": 0.6264757513999939, |
|
"learning_rate": 0.0001244867589383871, |
|
"loss": 0.0841, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.6862745098039216, |
|
"grad_norm": 0.5560308694839478, |
|
"learning_rate": 0.00012431899376469784, |
|
"loss": 0.0792, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.6884531590413943, |
|
"grad_norm": 0.4631575345993042, |
|
"learning_rate": 0.00012415115581075741, |
|
"loss": 0.0589, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.6906318082788672, |
|
"grad_norm": 0.7023651599884033, |
|
"learning_rate": 0.00012398324557885994, |
|
"loss": 0.1074, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.6928104575163399, |
|
"grad_norm": 0.5364183187484741, |
|
"learning_rate": 0.00012381526357151592, |
|
"loss": 0.0685, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.6949891067538125, |
|
"grad_norm": 0.5863189697265625, |
|
"learning_rate": 0.0001236472102914506, |
|
"loss": 0.0729, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.6971677559912854, |
|
"grad_norm": 0.48796355724334717, |
|
"learning_rate": 0.00012347908624160258, |
|
"loss": 0.0545, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.6993464052287581, |
|
"grad_norm": 0.6581359505653381, |
|
"learning_rate": 0.00012331089192512218, |
|
"loss": 0.1039, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.7015250544662308, |
|
"grad_norm": 0.639834463596344, |
|
"learning_rate": 0.0001231426278453701, |
|
"loss": 0.0921, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.7037037037037037, |
|
"grad_norm": 0.599344789981842, |
|
"learning_rate": 0.00012297429450591575, |
|
"loss": 0.0691, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.7058823529411766, |
|
"grad_norm": 0.7103595733642578, |
|
"learning_rate": 0.00012280589241053585, |
|
"loss": 0.0927, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.708061002178649, |
|
"grad_norm": 0.5287673473358154, |
|
"learning_rate": 0.00012263742206321287, |
|
"loss": 0.0721, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.710239651416122, |
|
"grad_norm": 0.6704793572425842, |
|
"learning_rate": 0.00012246888396813356, |
|
"loss": 0.0881, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.712418300653595, |
|
"grad_norm": 0.5632234215736389, |
|
"learning_rate": 0.00012230027862968743, |
|
"loss": 0.0758, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.7145969498910676, |
|
"grad_norm": 0.61404949426651, |
|
"learning_rate": 0.00012213160655246517, |
|
"loss": 0.1015, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.7167755991285403, |
|
"grad_norm": 0.6605061292648315, |
|
"learning_rate": 0.00012196286824125726, |
|
"loss": 0.0721, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.7189542483660132, |
|
"grad_norm": 0.5778952240943909, |
|
"learning_rate": 0.0001217940642010524, |
|
"loss": 0.0619, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.7211328976034859, |
|
"grad_norm": 0.4803905487060547, |
|
"learning_rate": 0.000121625194937036, |
|
"loss": 0.0623, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.7233115468409586, |
|
"grad_norm": 0.5594003796577454, |
|
"learning_rate": 0.0001214562609545886, |
|
"loss": 0.0664, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.7254901960784315, |
|
"grad_norm": 0.6021897792816162, |
|
"learning_rate": 0.0001212872627592845, |
|
"loss": 0.0747, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.7276688453159041, |
|
"grad_norm": 0.5629754662513733, |
|
"learning_rate": 0.00012111820085689016, |
|
"loss": 0.0684, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.7276688453159041, |
|
"eval_loss": 0.36081361770629883, |
|
"eval_runtime": 0.9368, |
|
"eval_samples_per_second": 182.546, |
|
"eval_steps_per_second": 13.878, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.7298474945533768, |
|
"grad_norm": 0.7455347180366516, |
|
"learning_rate": 0.00012094907575336267, |
|
"loss": 0.0931, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.7320261437908497, |
|
"grad_norm": 0.6510118246078491, |
|
"learning_rate": 0.00012077988795484831, |
|
"loss": 0.0888, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.7342047930283224, |
|
"grad_norm": 0.4980412423610687, |
|
"learning_rate": 0.0001206106379676809, |
|
"loss": 0.0515, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.736383442265795, |
|
"grad_norm": 0.6158175468444824, |
|
"learning_rate": 0.00012044132629838052, |
|
"loss": 0.0688, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.738562091503268, |
|
"grad_norm": 0.6391656398773193, |
|
"learning_rate": 0.00012027195345365167, |
|
"loss": 0.0785, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.7407407407407407, |
|
"grad_norm": 0.5952157378196716, |
|
"learning_rate": 0.00012010251994038211, |
|
"loss": 0.0642, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.7429193899782134, |
|
"grad_norm": 0.5489711165428162, |
|
"learning_rate": 0.00011993302626564102, |
|
"loss": 0.0803, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.7450980392156863, |
|
"grad_norm": 0.5591238737106323, |
|
"learning_rate": 0.00011976347293667769, |
|
"loss": 0.0786, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.7472766884531592, |
|
"grad_norm": 0.5260274410247803, |
|
"learning_rate": 0.00011959386046091998, |
|
"loss": 0.0684, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.7494553376906317, |
|
"grad_norm": 0.46458926796913147, |
|
"learning_rate": 0.00011942418934597266, |
|
"loss": 0.0628, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.7516339869281046, |
|
"grad_norm": 0.6219300031661987, |
|
"learning_rate": 0.00011925446009961607, |
|
"loss": 0.0826, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.7538126361655775, |
|
"grad_norm": 0.5895767211914062, |
|
"learning_rate": 0.0001190846732298045, |
|
"loss": 0.0757, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.7559912854030502, |
|
"grad_norm": 0.45634791254997253, |
|
"learning_rate": 0.00011891482924466471, |
|
"loss": 0.0576, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.7581699346405228, |
|
"grad_norm": 0.5139732956886292, |
|
"learning_rate": 0.00011874492865249434, |
|
"loss": 0.051, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.7603485838779958, |
|
"grad_norm": 0.503699004650116, |
|
"learning_rate": 0.00011857497196176049, |
|
"loss": 0.0565, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.7625272331154684, |
|
"grad_norm": 0.6864826083183289, |
|
"learning_rate": 0.0001184049596810981, |
|
"loss": 0.109, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.7647058823529411, |
|
"grad_norm": 0.4041982889175415, |
|
"learning_rate": 0.00011823489231930854, |
|
"loss": 0.0531, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.766884531590414, |
|
"grad_norm": 0.6555854678153992, |
|
"learning_rate": 0.00011806477038535799, |
|
"loss": 0.0923, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.7690631808278867, |
|
"grad_norm": 0.48353853821754456, |
|
"learning_rate": 0.00011789459438837589, |
|
"loss": 0.0569, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.7712418300653594, |
|
"grad_norm": 0.5575915575027466, |
|
"learning_rate": 0.00011772436483765363, |
|
"loss": 0.0815, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.7734204793028323, |
|
"grad_norm": 0.5701248645782471, |
|
"learning_rate": 0.00011755408224264269, |
|
"loss": 0.0685, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.775599128540305, |
|
"grad_norm": 0.5434954166412354, |
|
"learning_rate": 0.00011738374711295341, |
|
"loss": 0.0621, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 0.6199434399604797, |
|
"learning_rate": 0.00011721335995835336, |
|
"loss": 0.0657, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.7799564270152506, |
|
"grad_norm": 0.7478623390197754, |
|
"learning_rate": 0.00011704292128876573, |
|
"loss": 0.1091, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.7821350762527233, |
|
"grad_norm": 0.5505065321922302, |
|
"learning_rate": 0.00011687243161426793, |
|
"loss": 0.0773, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.784313725490196, |
|
"grad_norm": 0.611953616142273, |
|
"learning_rate": 0.00011670189144509003, |
|
"loss": 0.0756, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.7864923747276689, |
|
"grad_norm": 0.5525700449943542, |
|
"learning_rate": 0.00011653130129161316, |
|
"loss": 0.0643, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.7886710239651418, |
|
"grad_norm": 0.4467664659023285, |
|
"learning_rate": 0.0001163606616643681, |
|
"loss": 0.0552, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.7908496732026142, |
|
"grad_norm": 0.689069926738739, |
|
"learning_rate": 0.00011618997307403367, |
|
"loss": 0.0717, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.7930283224400871, |
|
"grad_norm": 0.5094537138938904, |
|
"learning_rate": 0.00011601923603143519, |
|
"loss": 0.0654, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.79520697167756, |
|
"grad_norm": 0.6739009618759155, |
|
"learning_rate": 0.00011584845104754304, |
|
"loss": 0.0843, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.7973856209150327, |
|
"grad_norm": 0.49725601077079773, |
|
"learning_rate": 0.00011567761863347107, |
|
"loss": 0.0661, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.7995642701525054, |
|
"grad_norm": 0.46219804883003235, |
|
"learning_rate": 0.00011550673930047498, |
|
"loss": 0.056, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.8017429193899783, |
|
"grad_norm": 0.4114397466182709, |
|
"learning_rate": 0.00011533581355995102, |
|
"loss": 0.046, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.803921568627451, |
|
"grad_norm": 0.6808457970619202, |
|
"learning_rate": 0.00011516484192343425, |
|
"loss": 0.1065, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.8061002178649237, |
|
"grad_norm": 0.4873061776161194, |
|
"learning_rate": 0.00011499382490259709, |
|
"loss": 0.0501, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.8082788671023966, |
|
"grad_norm": 0.5296838283538818, |
|
"learning_rate": 0.00011482276300924782, |
|
"loss": 0.0516, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.8104575163398693, |
|
"grad_norm": 0.5966465473175049, |
|
"learning_rate": 0.00011465165675532898, |
|
"loss": 0.0785, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.812636165577342, |
|
"grad_norm": 0.5794910192489624, |
|
"learning_rate": 0.00011448050665291587, |
|
"loss": 0.0704, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.8148148148148149, |
|
"grad_norm": 0.37284550070762634, |
|
"learning_rate": 0.00011430931321421499, |
|
"loss": 0.0415, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.8169934640522876, |
|
"grad_norm": 0.5628785490989685, |
|
"learning_rate": 0.00011413807695156262, |
|
"loss": 0.0711, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.8191721132897603, |
|
"grad_norm": 0.3898080289363861, |
|
"learning_rate": 0.0001139667983774231, |
|
"loss": 0.0453, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.8213507625272332, |
|
"grad_norm": 0.6457688212394714, |
|
"learning_rate": 0.00011379547800438747, |
|
"loss": 0.0814, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.8235294117647058, |
|
"grad_norm": 0.5886730551719666, |
|
"learning_rate": 0.00011362411634517183, |
|
"loss": 0.0719, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.8257080610021785, |
|
"grad_norm": 0.5808268785476685, |
|
"learning_rate": 0.00011345271391261584, |
|
"loss": 0.0669, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.8278867102396514, |
|
"grad_norm": 0.6759737133979797, |
|
"learning_rate": 0.0001132812712196812, |
|
"loss": 0.0844, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.8300653594771243, |
|
"grad_norm": 0.4220186173915863, |
|
"learning_rate": 0.00011310978877945007, |
|
"loss": 0.0509, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.8322440087145968, |
|
"grad_norm": 0.4730389714241028, |
|
"learning_rate": 0.00011293826710512359, |
|
"loss": 0.0495, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.8344226579520697, |
|
"grad_norm": 0.49538376927375793, |
|
"learning_rate": 0.00011276670671002028, |
|
"loss": 0.0561, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.8366013071895426, |
|
"grad_norm": 0.5948292016983032, |
|
"learning_rate": 0.00011259510810757461, |
|
"loss": 0.0766, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.8387799564270153, |
|
"grad_norm": 0.5298624038696289, |
|
"learning_rate": 0.00011242347181133533, |
|
"loss": 0.0794, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.840958605664488, |
|
"grad_norm": 0.5494588017463684, |
|
"learning_rate": 0.00011225179833496402, |
|
"loss": 0.065, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.843137254901961, |
|
"grad_norm": 0.4604671597480774, |
|
"learning_rate": 0.00011208008819223354, |
|
"loss": 0.0484, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.8453159041394336, |
|
"grad_norm": 0.44082626700401306, |
|
"learning_rate": 0.00011190834189702646, |
|
"loss": 0.0493, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.8474945533769063, |
|
"grad_norm": 0.5379127264022827, |
|
"learning_rate": 0.00011173655996333357, |
|
"loss": 0.0594, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.8496732026143792, |
|
"grad_norm": 0.5449596047401428, |
|
"learning_rate": 0.00011156474290525227, |
|
"loss": 0.0623, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.8518518518518519, |
|
"grad_norm": 0.5305776000022888, |
|
"learning_rate": 0.00011139289123698518, |
|
"loss": 0.0548, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.8540305010893245, |
|
"grad_norm": 0.5683318376541138, |
|
"learning_rate": 0.00011122100547283834, |
|
"loss": 0.0625, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.8562091503267975, |
|
"grad_norm": 0.5460466146469116, |
|
"learning_rate": 0.00011104908612722001, |
|
"loss": 0.0583, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.8583877995642701, |
|
"grad_norm": 0.5440402030944824, |
|
"learning_rate": 0.00011087713371463881, |
|
"loss": 0.0594, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.8605664488017428, |
|
"grad_norm": 0.6470115780830383, |
|
"learning_rate": 0.00011070514874970237, |
|
"loss": 0.0937, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.8627450980392157, |
|
"grad_norm": 0.48046019673347473, |
|
"learning_rate": 0.00011053313174711575, |
|
"loss": 0.0543, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.8649237472766884, |
|
"grad_norm": 0.42647895216941833, |
|
"learning_rate": 0.00011036108322167988, |
|
"loss": 0.0529, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.867102396514161, |
|
"grad_norm": 0.5126241445541382, |
|
"learning_rate": 0.00011018900368829006, |
|
"loss": 0.06, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.869281045751634, |
|
"grad_norm": 0.56479412317276, |
|
"learning_rate": 0.00011001689366193433, |
|
"loss": 0.0774, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.871459694989107, |
|
"grad_norm": 0.6062801480293274, |
|
"learning_rate": 0.000109844753657692, |
|
"loss": 0.0688, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.8736383442265794, |
|
"grad_norm": 0.4999787211418152, |
|
"learning_rate": 0.00010967258419073217, |
|
"loss": 0.0461, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.8758169934640523, |
|
"grad_norm": 0.5203256607055664, |
|
"learning_rate": 0.00010950038577631198, |
|
"loss": 0.0575, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.8779956427015252, |
|
"grad_norm": 0.4616885185241699, |
|
"learning_rate": 0.00010932815892977535, |
|
"loss": 0.0409, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.8801742919389977, |
|
"grad_norm": 0.488798588514328, |
|
"learning_rate": 0.00010915590416655117, |
|
"loss": 0.0529, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.8823529411764706, |
|
"grad_norm": 0.49445784091949463, |
|
"learning_rate": 0.00010898362200215197, |
|
"loss": 0.0613, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.8845315904139435, |
|
"grad_norm": 0.5372164249420166, |
|
"learning_rate": 0.00010881131295217225, |
|
"loss": 0.0603, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.8867102396514162, |
|
"grad_norm": 0.4770764708518982, |
|
"learning_rate": 0.00010863897753228687, |
|
"loss": 0.0518, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.8888888888888888, |
|
"grad_norm": 0.616121232509613, |
|
"learning_rate": 0.00010846661625824978, |
|
"loss": 0.0892, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.8910675381263617, |
|
"grad_norm": 0.5405257344245911, |
|
"learning_rate": 0.0001082942296458922, |
|
"loss": 0.0783, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.8932461873638344, |
|
"grad_norm": 0.505785346031189, |
|
"learning_rate": 0.00010812181821112122, |
|
"loss": 0.0616, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.8954248366013071, |
|
"grad_norm": 0.509480357170105, |
|
"learning_rate": 0.00010794938246991817, |
|
"loss": 0.07, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.89760348583878, |
|
"grad_norm": 0.5401344299316406, |
|
"learning_rate": 0.00010777692293833718, |
|
"loss": 0.059, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.8997821350762527, |
|
"grad_norm": 0.3899208903312683, |
|
"learning_rate": 0.0001076044401325036, |
|
"loss": 0.0435, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.9019607843137254, |
|
"grad_norm": 0.5744214653968811, |
|
"learning_rate": 0.00010743193456861227, |
|
"loss": 0.0677, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.9041394335511983, |
|
"grad_norm": 0.5708267092704773, |
|
"learning_rate": 0.00010725940676292636, |
|
"loss": 0.0669, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.906318082788671, |
|
"grad_norm": 0.49386221170425415, |
|
"learning_rate": 0.00010708685723177543, |
|
"loss": 0.0623, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.9084967320261437, |
|
"grad_norm": 0.47566959261894226, |
|
"learning_rate": 0.0001069142864915542, |
|
"loss": 0.0582, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.9106753812636166, |
|
"grad_norm": 0.5212053656578064, |
|
"learning_rate": 0.00010674169505872072, |
|
"loss": 0.0549, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.9128540305010895, |
|
"grad_norm": 0.5509231686592102, |
|
"learning_rate": 0.00010656908344979506, |
|
"loss": 0.0588, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.915032679738562, |
|
"grad_norm": 0.48301365971565247, |
|
"learning_rate": 0.0001063964521813577, |
|
"loss": 0.0623, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.9172113289760349, |
|
"grad_norm": 0.5671520829200745, |
|
"learning_rate": 0.0001062238017700478, |
|
"loss": 0.058, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.9193899782135078, |
|
"grad_norm": 0.4387785792350769, |
|
"learning_rate": 0.00010605113273256205, |
|
"loss": 0.0448, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.9215686274509802, |
|
"grad_norm": 0.48259374499320984, |
|
"learning_rate": 0.00010587844558565261, |
|
"loss": 0.0589, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.9237472766884531, |
|
"grad_norm": 0.530150294303894, |
|
"learning_rate": 0.00010570574084612608, |
|
"loss": 0.0551, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.925925925925926, |
|
"grad_norm": 0.42356571555137634, |
|
"learning_rate": 0.00010553301903084157, |
|
"loss": 0.0462, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.9281045751633987, |
|
"grad_norm": 0.6400770545005798, |
|
"learning_rate": 0.00010536028065670929, |
|
"loss": 0.0794, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.9302832244008714, |
|
"grad_norm": 0.46982961893081665, |
|
"learning_rate": 0.00010518752624068911, |
|
"loss": 0.0532, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.9324618736383443, |
|
"grad_norm": 0.5801010131835938, |
|
"learning_rate": 0.00010501475629978878, |
|
"loss": 0.0647, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.934640522875817, |
|
"grad_norm": 0.47689372301101685, |
|
"learning_rate": 0.00010484197135106263, |
|
"loss": 0.0524, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.9368191721132897, |
|
"grad_norm": 0.695038914680481, |
|
"learning_rate": 0.00010466917191160981, |
|
"loss": 0.0859, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.9389978213507626, |
|
"grad_norm": 0.46906334161758423, |
|
"learning_rate": 0.0001044963584985729, |
|
"loss": 0.0459, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.9411764705882353, |
|
"grad_norm": 0.49414071440696716, |
|
"learning_rate": 0.0001043235316291363, |
|
"loss": 0.0534, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.943355119825708, |
|
"grad_norm": 0.5636811852455139, |
|
"learning_rate": 0.0001041506918205246, |
|
"loss": 0.0695, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.9455337690631809, |
|
"grad_norm": 0.6023524403572083, |
|
"learning_rate": 0.0001039778395900012, |
|
"loss": 0.0805, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.9477124183006536, |
|
"grad_norm": 0.4797222912311554, |
|
"learning_rate": 0.00010380497545486663, |
|
"loss": 0.0592, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.9498910675381262, |
|
"grad_norm": 0.366100013256073, |
|
"learning_rate": 0.00010363209993245708, |
|
"loss": 0.0343, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.9520697167755992, |
|
"grad_norm": 0.5525081157684326, |
|
"learning_rate": 0.00010345921354014279, |
|
"loss": 0.0679, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.954248366013072, |
|
"grad_norm": 0.3858024477958679, |
|
"learning_rate": 0.00010328631679532658, |
|
"loss": 0.0397, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.9564270152505445, |
|
"grad_norm": 0.4831399619579315, |
|
"learning_rate": 0.00010311341021544218, |
|
"loss": 0.0618, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.9586056644880174, |
|
"grad_norm": 0.487746924161911, |
|
"learning_rate": 0.00010294049431795278, |
|
"loss": 0.0522, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.9607843137254903, |
|
"grad_norm": 0.4599170982837677, |
|
"learning_rate": 0.0001027675696203495, |
|
"loss": 0.0456, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.9629629629629628, |
|
"grad_norm": 0.6550068855285645, |
|
"learning_rate": 0.00010259463664014972, |
|
"loss": 0.0781, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.9651416122004357, |
|
"grad_norm": 0.5491052269935608, |
|
"learning_rate": 0.00010242169589489568, |
|
"loss": 0.055, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.9673202614379086, |
|
"grad_norm": 0.5342620611190796, |
|
"learning_rate": 0.0001022487479021528, |
|
"loss": 0.051, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.9694989106753813, |
|
"grad_norm": 0.5309066772460938, |
|
"learning_rate": 0.00010207579317950827, |
|
"loss": 0.0596, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.971677559912854, |
|
"grad_norm": 0.4537419080734253, |
|
"learning_rate": 0.00010190283224456931, |
|
"loss": 0.0451, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.973856209150327, |
|
"grad_norm": 0.573647677898407, |
|
"learning_rate": 0.0001017298656149618, |
|
"loss": 0.0712, |
|
"step": 918 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1836, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 459, |
|
"total_flos": 2.2188978972760474e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|