peft-unit-test-generation-experiments
/
prompt-tuning
/Salesforce
/codegen-350M-multi
/trainer_state.json
{ | |
"best_metric": null, | |
"best_model_checkpoint": null, | |
"epoch": 2.9991668980838657, | |
"eval_steps": 500, | |
"global_step": 1350, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.0022216051096917524, | |
"grad_norm": 0.010440913029015064, | |
"learning_rate": 2.2222222222222223e-05, | |
"loss": 0.8403, | |
"step": 1 | |
}, | |
{ | |
"epoch": 0.004443210219383505, | |
"grad_norm": 0.007241299841552973, | |
"learning_rate": 4.4444444444444447e-05, | |
"loss": 0.6888, | |
"step": 2 | |
}, | |
{ | |
"epoch": 0.006664815329075257, | |
"grad_norm": 0.005549824796617031, | |
"learning_rate": 6.666666666666667e-05, | |
"loss": 0.966, | |
"step": 3 | |
}, | |
{ | |
"epoch": 0.00888642043876701, | |
"grad_norm": 0.008592728525400162, | |
"learning_rate": 8.888888888888889e-05, | |
"loss": 0.8665, | |
"step": 4 | |
}, | |
{ | |
"epoch": 0.011108025548458762, | |
"grad_norm": 0.007739076856523752, | |
"learning_rate": 0.0001111111111111111, | |
"loss": 0.7687, | |
"step": 5 | |
}, | |
{ | |
"epoch": 0.013329630658150514, | |
"grad_norm": 0.008957313373684883, | |
"learning_rate": 0.00013333333333333334, | |
"loss": 0.8916, | |
"step": 6 | |
}, | |
{ | |
"epoch": 0.015551235767842266, | |
"grad_norm": 0.009202866815030575, | |
"learning_rate": 0.00015555555555555556, | |
"loss": 0.8653, | |
"step": 7 | |
}, | |
{ | |
"epoch": 0.01777284087753402, | |
"grad_norm": 0.007248671259731054, | |
"learning_rate": 0.00017777777777777779, | |
"loss": 0.861, | |
"step": 8 | |
}, | |
{ | |
"epoch": 0.01999444598722577, | |
"grad_norm": 0.007237965241074562, | |
"learning_rate": 0.0002, | |
"loss": 0.946, | |
"step": 9 | |
}, | |
{ | |
"epoch": 0.022216051096917523, | |
"grad_norm": 0.006773823872208595, | |
"learning_rate": 0.0002222222222222222, | |
"loss": 0.8272, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.024437656206609277, | |
"grad_norm": 0.004907544236630201, | |
"learning_rate": 0.0002444444444444445, | |
"loss": 0.7749, | |
"step": 11 | |
}, | |
{ | |
"epoch": 0.026659261316301027, | |
"grad_norm": 0.006352128926664591, | |
"learning_rate": 0.0002666666666666667, | |
"loss": 0.8051, | |
"step": 12 | |
}, | |
{ | |
"epoch": 0.02888086642599278, | |
"grad_norm": 0.007070545107126236, | |
"learning_rate": 0.0002888888888888889, | |
"loss": 0.729, | |
"step": 13 | |
}, | |
{ | |
"epoch": 0.03110247153568453, | |
"grad_norm": 0.014602522365748882, | |
"learning_rate": 0.0003111111111111111, | |
"loss": 0.872, | |
"step": 14 | |
}, | |
{ | |
"epoch": 0.03332407664537628, | |
"grad_norm": 0.006523902527987957, | |
"learning_rate": 0.0003333333333333333, | |
"loss": 0.8664, | |
"step": 15 | |
}, | |
{ | |
"epoch": 0.03554568175506804, | |
"grad_norm": 0.005539918318390846, | |
"learning_rate": 0.00035555555555555557, | |
"loss": 0.8333, | |
"step": 16 | |
}, | |
{ | |
"epoch": 0.03776728686475979, | |
"grad_norm": 0.008334115147590637, | |
"learning_rate": 0.00037777777777777777, | |
"loss": 0.8471, | |
"step": 17 | |
}, | |
{ | |
"epoch": 0.03998889197445154, | |
"grad_norm": 0.009435688145458698, | |
"learning_rate": 0.0004, | |
"loss": 0.9297, | |
"step": 18 | |
}, | |
{ | |
"epoch": 0.042210497084143296, | |
"grad_norm": 0.010521095246076584, | |
"learning_rate": 0.00042222222222222227, | |
"loss": 0.9572, | |
"step": 19 | |
}, | |
{ | |
"epoch": 0.044432102193835046, | |
"grad_norm": 0.008772826753556728, | |
"learning_rate": 0.0004444444444444444, | |
"loss": 1.0062, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.0466537073035268, | |
"grad_norm": 0.006737489253282547, | |
"learning_rate": 0.00046666666666666666, | |
"loss": 0.699, | |
"step": 21 | |
}, | |
{ | |
"epoch": 0.048875312413218554, | |
"grad_norm": 0.003660662332549691, | |
"learning_rate": 0.000488888888888889, | |
"loss": 0.7278, | |
"step": 22 | |
}, | |
{ | |
"epoch": 0.051096917522910304, | |
"grad_norm": 0.007398749236017466, | |
"learning_rate": 0.0005111111111111112, | |
"loss": 0.8316, | |
"step": 23 | |
}, | |
{ | |
"epoch": 0.053318522632602054, | |
"grad_norm": 0.006491729989647865, | |
"learning_rate": 0.0005333333333333334, | |
"loss": 0.8999, | |
"step": 24 | |
}, | |
{ | |
"epoch": 0.055540127742293804, | |
"grad_norm": 0.007386940531432629, | |
"learning_rate": 0.0005555555555555556, | |
"loss": 0.8758, | |
"step": 25 | |
}, | |
{ | |
"epoch": 0.05776173285198556, | |
"grad_norm": 0.006012436002492905, | |
"learning_rate": 0.0005777777777777778, | |
"loss": 0.9802, | |
"step": 26 | |
}, | |
{ | |
"epoch": 0.05998333796167731, | |
"grad_norm": 0.007025507278740406, | |
"learning_rate": 0.0006000000000000001, | |
"loss": 0.7958, | |
"step": 27 | |
}, | |
{ | |
"epoch": 0.06220494307136906, | |
"grad_norm": 0.008443865925073624, | |
"learning_rate": 0.0006222222222222223, | |
"loss": 1.0211, | |
"step": 28 | |
}, | |
{ | |
"epoch": 0.06442654818106082, | |
"grad_norm": 0.007126843556761742, | |
"learning_rate": 0.0006444444444444444, | |
"loss": 0.9817, | |
"step": 29 | |
}, | |
{ | |
"epoch": 0.06664815329075256, | |
"grad_norm": 0.007003214210271835, | |
"learning_rate": 0.0006666666666666666, | |
"loss": 0.8581, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.06886975840044432, | |
"grad_norm": 0.00639823405072093, | |
"learning_rate": 0.000688888888888889, | |
"loss": 0.867, | |
"step": 31 | |
}, | |
{ | |
"epoch": 0.07109136351013608, | |
"grad_norm": 0.006634861696511507, | |
"learning_rate": 0.0007111111111111111, | |
"loss": 0.9614, | |
"step": 32 | |
}, | |
{ | |
"epoch": 0.07331296861982782, | |
"grad_norm": 0.006281793583184481, | |
"learning_rate": 0.0007333333333333333, | |
"loss": 0.8645, | |
"step": 33 | |
}, | |
{ | |
"epoch": 0.07553457372951958, | |
"grad_norm": 0.009016958996653557, | |
"learning_rate": 0.0007555555555555555, | |
"loss": 0.9221, | |
"step": 34 | |
}, | |
{ | |
"epoch": 0.07775617883921133, | |
"grad_norm": 0.007040904834866524, | |
"learning_rate": 0.0007777777777777777, | |
"loss": 0.9473, | |
"step": 35 | |
}, | |
{ | |
"epoch": 0.07997778394890308, | |
"grad_norm": 0.008552225306630135, | |
"learning_rate": 0.0008, | |
"loss": 0.7651, | |
"step": 36 | |
}, | |
{ | |
"epoch": 0.08219938905859484, | |
"grad_norm": 0.007091434672474861, | |
"learning_rate": 0.0008222222222222222, | |
"loss": 0.8234, | |
"step": 37 | |
}, | |
{ | |
"epoch": 0.08442099416828659, | |
"grad_norm": 0.009340236894786358, | |
"learning_rate": 0.0008444444444444445, | |
"loss": 0.7472, | |
"step": 38 | |
}, | |
{ | |
"epoch": 0.08664259927797834, | |
"grad_norm": 0.010021640919148922, | |
"learning_rate": 0.0008666666666666666, | |
"loss": 1.039, | |
"step": 39 | |
}, | |
{ | |
"epoch": 0.08886420438767009, | |
"grad_norm": 0.011557168327271938, | |
"learning_rate": 0.0008888888888888888, | |
"loss": 0.8556, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.09108580949736185, | |
"grad_norm": 0.006138915196061134, | |
"learning_rate": 0.0009111111111111111, | |
"loss": 0.8866, | |
"step": 41 | |
}, | |
{ | |
"epoch": 0.0933074146070536, | |
"grad_norm": 0.005869209300726652, | |
"learning_rate": 0.0009333333333333333, | |
"loss": 0.9876, | |
"step": 42 | |
}, | |
{ | |
"epoch": 0.09552901971674535, | |
"grad_norm": 0.006708982866257429, | |
"learning_rate": 0.0009555555555555556, | |
"loss": 1.0414, | |
"step": 43 | |
}, | |
{ | |
"epoch": 0.09775062482643711, | |
"grad_norm": 0.006898691412061453, | |
"learning_rate": 0.000977777777777778, | |
"loss": 0.826, | |
"step": 44 | |
}, | |
{ | |
"epoch": 0.09997222993612885, | |
"grad_norm": 0.007414904423058033, | |
"learning_rate": 0.001, | |
"loss": 0.7444, | |
"step": 45 | |
}, | |
{ | |
"epoch": 0.10219383504582061, | |
"grad_norm": 0.005232702940702438, | |
"learning_rate": 0.0010222222222222223, | |
"loss": 0.8942, | |
"step": 46 | |
}, | |
{ | |
"epoch": 0.10441544015551235, | |
"grad_norm": 0.006096681114286184, | |
"learning_rate": 0.0010444444444444444, | |
"loss": 1.0935, | |
"step": 47 | |
}, | |
{ | |
"epoch": 0.10663704526520411, | |
"grad_norm": 0.008140468038618565, | |
"learning_rate": 0.0010666666666666667, | |
"loss": 0.8532, | |
"step": 48 | |
}, | |
{ | |
"epoch": 0.10885865037489587, | |
"grad_norm": 0.007381773088127375, | |
"learning_rate": 0.001088888888888889, | |
"loss": 0.869, | |
"step": 49 | |
}, | |
{ | |
"epoch": 0.11108025548458761, | |
"grad_norm": 0.011090748943388462, | |
"learning_rate": 0.0011111111111111111, | |
"loss": 0.782, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.11330186059427937, | |
"grad_norm": 0.012807847931981087, | |
"learning_rate": 0.0011333333333333334, | |
"loss": 0.9066, | |
"step": 51 | |
}, | |
{ | |
"epoch": 0.11552346570397112, | |
"grad_norm": 0.0077123180963099, | |
"learning_rate": 0.0011555555555555555, | |
"loss": 0.7865, | |
"step": 52 | |
}, | |
{ | |
"epoch": 0.11774507081366287, | |
"grad_norm": 0.006746234837919474, | |
"learning_rate": 0.0011777777777777778, | |
"loss": 0.8238, | |
"step": 53 | |
}, | |
{ | |
"epoch": 0.11996667592335462, | |
"grad_norm": 0.00544540723785758, | |
"learning_rate": 0.0012000000000000001, | |
"loss": 0.8159, | |
"step": 54 | |
}, | |
{ | |
"epoch": 0.12218828103304638, | |
"grad_norm": 0.004280385095626116, | |
"learning_rate": 0.0012222222222222222, | |
"loss": 0.7477, | |
"step": 55 | |
}, | |
{ | |
"epoch": 0.12440988614273812, | |
"grad_norm": 0.006144772749394178, | |
"learning_rate": 0.0012444444444444445, | |
"loss": 0.7885, | |
"step": 56 | |
}, | |
{ | |
"epoch": 0.12663149125242987, | |
"grad_norm": 0.007963410578668118, | |
"learning_rate": 0.0012666666666666666, | |
"loss": 0.8448, | |
"step": 57 | |
}, | |
{ | |
"epoch": 0.12885309636212164, | |
"grad_norm": 0.005704255774617195, | |
"learning_rate": 0.001288888888888889, | |
"loss": 0.7554, | |
"step": 58 | |
}, | |
{ | |
"epoch": 0.13107470147181338, | |
"grad_norm": 0.0052896905690431595, | |
"learning_rate": 0.0013111111111111112, | |
"loss": 0.7285, | |
"step": 59 | |
}, | |
{ | |
"epoch": 0.13329630658150513, | |
"grad_norm": 0.005001865327358246, | |
"learning_rate": 0.0013333333333333333, | |
"loss": 0.9311, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.1355179116911969, | |
"grad_norm": 0.006869412958621979, | |
"learning_rate": 0.0013555555555555556, | |
"loss": 0.9407, | |
"step": 61 | |
}, | |
{ | |
"epoch": 0.13773951680088864, | |
"grad_norm": 0.009640970267355442, | |
"learning_rate": 0.001377777777777778, | |
"loss": 0.8013, | |
"step": 62 | |
}, | |
{ | |
"epoch": 0.13996112191058038, | |
"grad_norm": 0.009351574815809727, | |
"learning_rate": 0.0014, | |
"loss": 0.9175, | |
"step": 63 | |
}, | |
{ | |
"epoch": 0.14218272702027215, | |
"grad_norm": 0.006775525864213705, | |
"learning_rate": 0.0014222222222222223, | |
"loss": 1.021, | |
"step": 64 | |
}, | |
{ | |
"epoch": 0.1444043321299639, | |
"grad_norm": 0.005892643239349127, | |
"learning_rate": 0.0014444444444444444, | |
"loss": 0.7416, | |
"step": 65 | |
}, | |
{ | |
"epoch": 0.14662593723965564, | |
"grad_norm": 0.008734880946576595, | |
"learning_rate": 0.0014666666666666667, | |
"loss": 0.9335, | |
"step": 66 | |
}, | |
{ | |
"epoch": 0.1488475423493474, | |
"grad_norm": 0.005164718721061945, | |
"learning_rate": 0.001488888888888889, | |
"loss": 1.1011, | |
"step": 67 | |
}, | |
{ | |
"epoch": 0.15106914745903915, | |
"grad_norm": 0.004635809920728207, | |
"learning_rate": 0.001511111111111111, | |
"loss": 0.8691, | |
"step": 68 | |
}, | |
{ | |
"epoch": 0.1532907525687309, | |
"grad_norm": 0.006990849506109953, | |
"learning_rate": 0.0015333333333333332, | |
"loss": 0.9086, | |
"step": 69 | |
}, | |
{ | |
"epoch": 0.15551235767842267, | |
"grad_norm": 0.0074639637023210526, | |
"learning_rate": 0.0015555555555555555, | |
"loss": 0.6275, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.1577339627881144, | |
"grad_norm": 0.00890981312841177, | |
"learning_rate": 0.0015777777777777778, | |
"loss": 0.7847, | |
"step": 71 | |
}, | |
{ | |
"epoch": 0.15995556789780616, | |
"grad_norm": 0.006609317846596241, | |
"learning_rate": 0.0016, | |
"loss": 0.7674, | |
"step": 72 | |
}, | |
{ | |
"epoch": 0.16217717300749793, | |
"grad_norm": 0.012736441567540169, | |
"learning_rate": 0.0016222222222222222, | |
"loss": 0.8132, | |
"step": 73 | |
}, | |
{ | |
"epoch": 0.16439877811718967, | |
"grad_norm": 0.008025340735912323, | |
"learning_rate": 0.0016444444444444445, | |
"loss": 0.842, | |
"step": 74 | |
}, | |
{ | |
"epoch": 0.1666203832268814, | |
"grad_norm": 0.010593456216156483, | |
"learning_rate": 0.0016666666666666668, | |
"loss": 0.8742, | |
"step": 75 | |
}, | |
{ | |
"epoch": 0.16884198833657318, | |
"grad_norm": 0.007079805713146925, | |
"learning_rate": 0.001688888888888889, | |
"loss": 0.7933, | |
"step": 76 | |
}, | |
{ | |
"epoch": 0.17106359344626493, | |
"grad_norm": 0.006508518476039171, | |
"learning_rate": 0.0017111111111111114, | |
"loss": 0.7358, | |
"step": 77 | |
}, | |
{ | |
"epoch": 0.17328519855595667, | |
"grad_norm": 0.007266649045050144, | |
"learning_rate": 0.0017333333333333333, | |
"loss": 0.7264, | |
"step": 78 | |
}, | |
{ | |
"epoch": 0.17550680366564844, | |
"grad_norm": 0.014574556611478329, | |
"learning_rate": 0.0017555555555555556, | |
"loss": 0.9533, | |
"step": 79 | |
}, | |
{ | |
"epoch": 0.17772840877534019, | |
"grad_norm": 0.007840900681912899, | |
"learning_rate": 0.0017777777777777776, | |
"loss": 0.8228, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.17995001388503193, | |
"grad_norm": 0.005117777734994888, | |
"learning_rate": 0.0018, | |
"loss": 0.8011, | |
"step": 81 | |
}, | |
{ | |
"epoch": 0.1821716189947237, | |
"grad_norm": 0.012980911880731583, | |
"learning_rate": 0.0018222222222222223, | |
"loss": 0.7205, | |
"step": 82 | |
}, | |
{ | |
"epoch": 0.18439322410441544, | |
"grad_norm": 0.008068774826824665, | |
"learning_rate": 0.0018444444444444446, | |
"loss": 0.8121, | |
"step": 83 | |
}, | |
{ | |
"epoch": 0.1866148292141072, | |
"grad_norm": 0.007991770282387733, | |
"learning_rate": 0.0018666666666666666, | |
"loss": 0.8732, | |
"step": 84 | |
}, | |
{ | |
"epoch": 0.18883643432379896, | |
"grad_norm": 0.00741413002833724, | |
"learning_rate": 0.001888888888888889, | |
"loss": 0.7656, | |
"step": 85 | |
}, | |
{ | |
"epoch": 0.1910580394334907, | |
"grad_norm": 0.011837368831038475, | |
"learning_rate": 0.0019111111111111113, | |
"loss": 0.8856, | |
"step": 86 | |
}, | |
{ | |
"epoch": 0.19327964454318244, | |
"grad_norm": 0.006025332026183605, | |
"learning_rate": 0.0019333333333333336, | |
"loss": 0.7813, | |
"step": 87 | |
}, | |
{ | |
"epoch": 0.19550124965287421, | |
"grad_norm": 0.006810932885855436, | |
"learning_rate": 0.001955555555555556, | |
"loss": 0.7852, | |
"step": 88 | |
}, | |
{ | |
"epoch": 0.19772285476256596, | |
"grad_norm": 0.01033777091652155, | |
"learning_rate": 0.0019777777777777775, | |
"loss": 0.9799, | |
"step": 89 | |
}, | |
{ | |
"epoch": 0.1999444598722577, | |
"grad_norm": 0.007290220353752375, | |
"learning_rate": 0.002, | |
"loss": 0.8157, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.20216606498194944, | |
"grad_norm": 0.0066700279712677, | |
"learning_rate": 0.002022222222222222, | |
"loss": 0.8832, | |
"step": 91 | |
}, | |
{ | |
"epoch": 0.20438767009164122, | |
"grad_norm": 0.006774111185222864, | |
"learning_rate": 0.0020444444444444447, | |
"loss": 0.8649, | |
"step": 92 | |
}, | |
{ | |
"epoch": 0.20660927520133296, | |
"grad_norm": 0.014007099904119968, | |
"learning_rate": 0.0020666666666666667, | |
"loss": 0.7803, | |
"step": 93 | |
}, | |
{ | |
"epoch": 0.2088308803110247, | |
"grad_norm": 0.013977098278701305, | |
"learning_rate": 0.002088888888888889, | |
"loss": 0.8531, | |
"step": 94 | |
}, | |
{ | |
"epoch": 0.21105248542071647, | |
"grad_norm": 0.010178284719586372, | |
"learning_rate": 0.0021111111111111113, | |
"loss": 0.7527, | |
"step": 95 | |
}, | |
{ | |
"epoch": 0.21327409053040822, | |
"grad_norm": 0.014048648998141289, | |
"learning_rate": 0.0021333333333333334, | |
"loss": 0.8357, | |
"step": 96 | |
}, | |
{ | |
"epoch": 0.21549569564009996, | |
"grad_norm": 0.010518389753997326, | |
"learning_rate": 0.0021555555555555555, | |
"loss": 0.892, | |
"step": 97 | |
}, | |
{ | |
"epoch": 0.21771730074979173, | |
"grad_norm": 0.006465825717896223, | |
"learning_rate": 0.002177777777777778, | |
"loss": 0.7175, | |
"step": 98 | |
}, | |
{ | |
"epoch": 0.21993890585948347, | |
"grad_norm": 0.008858314715325832, | |
"learning_rate": 0.0021999999999999997, | |
"loss": 0.8786, | |
"step": 99 | |
}, | |
{ | |
"epoch": 0.22216051096917522, | |
"grad_norm": 0.022068988531827927, | |
"learning_rate": 0.0022222222222222222, | |
"loss": 0.9224, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.224382116078867, | |
"grad_norm": 0.00683838315308094, | |
"learning_rate": 0.0022444444444444443, | |
"loss": 0.7949, | |
"step": 101 | |
}, | |
{ | |
"epoch": 0.22660372118855873, | |
"grad_norm": 0.008592939004302025, | |
"learning_rate": 0.002266666666666667, | |
"loss": 0.6778, | |
"step": 102 | |
}, | |
{ | |
"epoch": 0.22882532629825048, | |
"grad_norm": 0.005525896325707436, | |
"learning_rate": 0.002288888888888889, | |
"loss": 0.6661, | |
"step": 103 | |
}, | |
{ | |
"epoch": 0.23104693140794225, | |
"grad_norm": 0.006523781456053257, | |
"learning_rate": 0.002311111111111111, | |
"loss": 0.852, | |
"step": 104 | |
}, | |
{ | |
"epoch": 0.233268536517634, | |
"grad_norm": 0.007678350433707237, | |
"learning_rate": 0.0023333333333333335, | |
"loss": 0.7352, | |
"step": 105 | |
}, | |
{ | |
"epoch": 0.23549014162732573, | |
"grad_norm": 0.005192214157432318, | |
"learning_rate": 0.0023555555555555556, | |
"loss": 0.8651, | |
"step": 106 | |
}, | |
{ | |
"epoch": 0.2377117467370175, | |
"grad_norm": 0.007413934450596571, | |
"learning_rate": 0.002377777777777778, | |
"loss": 0.8005, | |
"step": 107 | |
}, | |
{ | |
"epoch": 0.23993335184670925, | |
"grad_norm": 0.004861793946474791, | |
"learning_rate": 0.0024000000000000002, | |
"loss": 0.8363, | |
"step": 108 | |
}, | |
{ | |
"epoch": 0.242154956956401, | |
"grad_norm": 0.005079795140773058, | |
"learning_rate": 0.0024222222222222223, | |
"loss": 0.8443, | |
"step": 109 | |
}, | |
{ | |
"epoch": 0.24437656206609276, | |
"grad_norm": 0.0050173490308225155, | |
"learning_rate": 0.0024444444444444444, | |
"loss": 0.8945, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.2465981671757845, | |
"grad_norm": 0.005878186784684658, | |
"learning_rate": 0.0024666666666666665, | |
"loss": 0.7684, | |
"step": 111 | |
}, | |
{ | |
"epoch": 0.24881977228547625, | |
"grad_norm": 0.005863640923053026, | |
"learning_rate": 0.002488888888888889, | |
"loss": 0.8823, | |
"step": 112 | |
}, | |
{ | |
"epoch": 0.251041377395168, | |
"grad_norm": 0.008415856398642063, | |
"learning_rate": 0.002511111111111111, | |
"loss": 0.7852, | |
"step": 113 | |
}, | |
{ | |
"epoch": 0.25326298250485973, | |
"grad_norm": 0.006449985783547163, | |
"learning_rate": 0.002533333333333333, | |
"loss": 0.8724, | |
"step": 114 | |
}, | |
{ | |
"epoch": 0.2554845876145515, | |
"grad_norm": 0.006899280473589897, | |
"learning_rate": 0.0025555555555555557, | |
"loss": 0.7742, | |
"step": 115 | |
}, | |
{ | |
"epoch": 0.2577061927242433, | |
"grad_norm": 0.005553306546062231, | |
"learning_rate": 0.002577777777777778, | |
"loss": 0.9467, | |
"step": 116 | |
}, | |
{ | |
"epoch": 0.259927797833935, | |
"grad_norm": 0.004053581040352583, | |
"learning_rate": 0.0026000000000000003, | |
"loss": 0.7767, | |
"step": 117 | |
}, | |
{ | |
"epoch": 0.26214940294362676, | |
"grad_norm": 0.006906622089445591, | |
"learning_rate": 0.0026222222222222224, | |
"loss": 0.8731, | |
"step": 118 | |
}, | |
{ | |
"epoch": 0.26437100805331853, | |
"grad_norm": 0.004571325611323118, | |
"learning_rate": 0.0026444444444444445, | |
"loss": 0.7887, | |
"step": 119 | |
}, | |
{ | |
"epoch": 0.26659261316301025, | |
"grad_norm": 0.01691054366528988, | |
"learning_rate": 0.0026666666666666666, | |
"loss": 1.2046, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.268814218272702, | |
"grad_norm": 0.006552200298756361, | |
"learning_rate": 0.0026888888888888887, | |
"loss": 1.078, | |
"step": 121 | |
}, | |
{ | |
"epoch": 0.2710358233823938, | |
"grad_norm": 0.007155867293477058, | |
"learning_rate": 0.002711111111111111, | |
"loss": 0.8023, | |
"step": 122 | |
}, | |
{ | |
"epoch": 0.2732574284920855, | |
"grad_norm": 0.006113141775131226, | |
"learning_rate": 0.0027333333333333333, | |
"loss": 0.8907, | |
"step": 123 | |
}, | |
{ | |
"epoch": 0.2754790336017773, | |
"grad_norm": 0.004279575310647488, | |
"learning_rate": 0.002755555555555556, | |
"loss": 0.8393, | |
"step": 124 | |
}, | |
{ | |
"epoch": 0.27770063871146905, | |
"grad_norm": 0.007299073971807957, | |
"learning_rate": 0.002777777777777778, | |
"loss": 0.7685, | |
"step": 125 | |
}, | |
{ | |
"epoch": 0.27992224382116077, | |
"grad_norm": 0.005895877256989479, | |
"learning_rate": 0.0028, | |
"loss": 0.7787, | |
"step": 126 | |
}, | |
{ | |
"epoch": 0.28214384893085254, | |
"grad_norm": 0.005973066668957472, | |
"learning_rate": 0.0028222222222222225, | |
"loss": 0.873, | |
"step": 127 | |
}, | |
{ | |
"epoch": 0.2843654540405443, | |
"grad_norm": 0.006746681872755289, | |
"learning_rate": 0.0028444444444444446, | |
"loss": 0.8828, | |
"step": 128 | |
}, | |
{ | |
"epoch": 0.286587059150236, | |
"grad_norm": 0.0047051082365214825, | |
"learning_rate": 0.0028666666666666667, | |
"loss": 0.8526, | |
"step": 129 | |
}, | |
{ | |
"epoch": 0.2888086642599278, | |
"grad_norm": 0.0053818318992853165, | |
"learning_rate": 0.0028888888888888888, | |
"loss": 0.7842, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.29103026936961957, | |
"grad_norm": 0.00510416692122817, | |
"learning_rate": 0.002911111111111111, | |
"loss": 0.8911, | |
"step": 131 | |
}, | |
{ | |
"epoch": 0.2932518744793113, | |
"grad_norm": 0.00566230108961463, | |
"learning_rate": 0.0029333333333333334, | |
"loss": 0.8506, | |
"step": 132 | |
}, | |
{ | |
"epoch": 0.29547347958900305, | |
"grad_norm": 0.007671915460377932, | |
"learning_rate": 0.0029555555555555555, | |
"loss": 0.8499, | |
"step": 133 | |
}, | |
{ | |
"epoch": 0.2976950846986948, | |
"grad_norm": 0.005723770707845688, | |
"learning_rate": 0.002977777777777778, | |
"loss": 0.9035, | |
"step": 134 | |
}, | |
{ | |
"epoch": 0.29991668980838654, | |
"grad_norm": 0.008367186412215233, | |
"learning_rate": 0.003, | |
"loss": 1.0066, | |
"step": 135 | |
}, | |
{ | |
"epoch": 0.3021382949180783, | |
"grad_norm": 0.00792329665273428, | |
"learning_rate": 0.0029975308641975312, | |
"loss": 0.8272, | |
"step": 136 | |
}, | |
{ | |
"epoch": 0.3043599000277701, | |
"grad_norm": 0.00764816626906395, | |
"learning_rate": 0.0029950617283950615, | |
"loss": 0.8652, | |
"step": 137 | |
}, | |
{ | |
"epoch": 0.3065815051374618, | |
"grad_norm": 0.007859203033149242, | |
"learning_rate": 0.0029925925925925927, | |
"loss": 0.9529, | |
"step": 138 | |
}, | |
{ | |
"epoch": 0.30880311024715357, | |
"grad_norm": 0.006429871078580618, | |
"learning_rate": 0.0029901234567901234, | |
"loss": 1.022, | |
"step": 139 | |
}, | |
{ | |
"epoch": 0.31102471535684534, | |
"grad_norm": 0.007074374705553055, | |
"learning_rate": 0.0029876543209876546, | |
"loss": 0.8192, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.31324632046653705, | |
"grad_norm": 0.006051207892596722, | |
"learning_rate": 0.002985185185185185, | |
"loss": 0.6763, | |
"step": 141 | |
}, | |
{ | |
"epoch": 0.3154679255762288, | |
"grad_norm": 0.005065048113465309, | |
"learning_rate": 0.002982716049382716, | |
"loss": 0.8691, | |
"step": 142 | |
}, | |
{ | |
"epoch": 0.3176895306859206, | |
"grad_norm": 0.006148957181721926, | |
"learning_rate": 0.0029802469135802472, | |
"loss": 0.6874, | |
"step": 143 | |
}, | |
{ | |
"epoch": 0.3199111357956123, | |
"grad_norm": 0.007386965677142143, | |
"learning_rate": 0.002977777777777778, | |
"loss": 1.0042, | |
"step": 144 | |
}, | |
{ | |
"epoch": 0.3221327409053041, | |
"grad_norm": 0.006896166130900383, | |
"learning_rate": 0.0029753086419753087, | |
"loss": 0.8665, | |
"step": 145 | |
}, | |
{ | |
"epoch": 0.32435434601499585, | |
"grad_norm": 0.010677389800548553, | |
"learning_rate": 0.0029728395061728394, | |
"loss": 0.8079, | |
"step": 146 | |
}, | |
{ | |
"epoch": 0.32657595112468757, | |
"grad_norm": 0.0062642525881528854, | |
"learning_rate": 0.0029703703703703706, | |
"loss": 0.7643, | |
"step": 147 | |
}, | |
{ | |
"epoch": 0.32879755623437934, | |
"grad_norm": 0.013006279245018959, | |
"learning_rate": 0.0029679012345679013, | |
"loss": 0.839, | |
"step": 148 | |
}, | |
{ | |
"epoch": 0.3310191613440711, | |
"grad_norm": 0.011782053858041763, | |
"learning_rate": 0.002965432098765432, | |
"loss": 0.7516, | |
"step": 149 | |
}, | |
{ | |
"epoch": 0.3332407664537628, | |
"grad_norm": 0.0076681142672896385, | |
"learning_rate": 0.002962962962962963, | |
"loss": 0.9896, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.3354623715634546, | |
"grad_norm": 0.006526409648358822, | |
"learning_rate": 0.002960493827160494, | |
"loss": 0.8881, | |
"step": 151 | |
}, | |
{ | |
"epoch": 0.33768397667314637, | |
"grad_norm": 0.00649358332157135, | |
"learning_rate": 0.0029580246913580247, | |
"loss": 0.7183, | |
"step": 152 | |
}, | |
{ | |
"epoch": 0.3399055817828381, | |
"grad_norm": 0.005959691014140844, | |
"learning_rate": 0.0029555555555555555, | |
"loss": 0.8335, | |
"step": 153 | |
}, | |
{ | |
"epoch": 0.34212718689252986, | |
"grad_norm": 0.005547534208744764, | |
"learning_rate": 0.0029530864197530866, | |
"loss": 0.7954, | |
"step": 154 | |
}, | |
{ | |
"epoch": 0.3443487920022216, | |
"grad_norm": 0.005612339824438095, | |
"learning_rate": 0.0029506172839506174, | |
"loss": 0.7647, | |
"step": 155 | |
}, | |
{ | |
"epoch": 0.34657039711191334, | |
"grad_norm": 0.00677449582144618, | |
"learning_rate": 0.002948148148148148, | |
"loss": 0.9327, | |
"step": 156 | |
}, | |
{ | |
"epoch": 0.3487920022216051, | |
"grad_norm": 0.007437689695507288, | |
"learning_rate": 0.002945679012345679, | |
"loss": 0.8094, | |
"step": 157 | |
}, | |
{ | |
"epoch": 0.3510136073312969, | |
"grad_norm": 0.005581557285040617, | |
"learning_rate": 0.00294320987654321, | |
"loss": 0.9587, | |
"step": 158 | |
}, | |
{ | |
"epoch": 0.3532352124409886, | |
"grad_norm": 0.007026318926364183, | |
"learning_rate": 0.0029407407407407407, | |
"loss": 0.9297, | |
"step": 159 | |
}, | |
{ | |
"epoch": 0.35545681755068037, | |
"grad_norm": 0.007772298529744148, | |
"learning_rate": 0.0029382716049382715, | |
"loss": 0.6309, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.35767842266037214, | |
"grad_norm": 0.005535977426916361, | |
"learning_rate": 0.0029358024691358026, | |
"loss": 0.8712, | |
"step": 161 | |
}, | |
{ | |
"epoch": 0.35990002777006386, | |
"grad_norm": 0.0064979312010109425, | |
"learning_rate": 0.0029333333333333334, | |
"loss": 0.7258, | |
"step": 162 | |
}, | |
{ | |
"epoch": 0.36212163287975563, | |
"grad_norm": 0.009692924097180367, | |
"learning_rate": 0.002930864197530864, | |
"loss": 0.8411, | |
"step": 163 | |
}, | |
{ | |
"epoch": 0.3643432379894474, | |
"grad_norm": 0.0048782131634652615, | |
"learning_rate": 0.002928395061728395, | |
"loss": 0.8574, | |
"step": 164 | |
}, | |
{ | |
"epoch": 0.3665648430991391, | |
"grad_norm": 0.005760766100138426, | |
"learning_rate": 0.002925925925925926, | |
"loss": 0.771, | |
"step": 165 | |
}, | |
{ | |
"epoch": 0.3687864482088309, | |
"grad_norm": 0.0057001737877726555, | |
"learning_rate": 0.002923456790123457, | |
"loss": 0.8197, | |
"step": 166 | |
}, | |
{ | |
"epoch": 0.37100805331852266, | |
"grad_norm": 0.0068498472683131695, | |
"learning_rate": 0.0029209876543209875, | |
"loss": 0.8001, | |
"step": 167 | |
}, | |
{ | |
"epoch": 0.3732296584282144, | |
"grad_norm": 0.00659180385991931, | |
"learning_rate": 0.0029185185185185186, | |
"loss": 0.6573, | |
"step": 168 | |
}, | |
{ | |
"epoch": 0.37545126353790614, | |
"grad_norm": 0.005685858894139528, | |
"learning_rate": 0.0029160493827160494, | |
"loss": 0.8331, | |
"step": 169 | |
}, | |
{ | |
"epoch": 0.3776728686475979, | |
"grad_norm": 0.005140091758221388, | |
"learning_rate": 0.0029135802469135805, | |
"loss": 0.6738, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.37989447375728963, | |
"grad_norm": 0.006014992017298937, | |
"learning_rate": 0.002911111111111111, | |
"loss": 0.692, | |
"step": 171 | |
}, | |
{ | |
"epoch": 0.3821160788669814, | |
"grad_norm": 0.00642600329592824, | |
"learning_rate": 0.002908641975308642, | |
"loss": 0.6947, | |
"step": 172 | |
}, | |
{ | |
"epoch": 0.3843376839766732, | |
"grad_norm": 0.0054779211059212685, | |
"learning_rate": 0.002906172839506173, | |
"loss": 0.8936, | |
"step": 173 | |
}, | |
{ | |
"epoch": 0.3865592890863649, | |
"grad_norm": 0.006306678056716919, | |
"learning_rate": 0.002903703703703704, | |
"loss": 0.8784, | |
"step": 174 | |
}, | |
{ | |
"epoch": 0.38878089419605666, | |
"grad_norm": 0.006951971910893917, | |
"learning_rate": 0.0029012345679012346, | |
"loss": 0.8032, | |
"step": 175 | |
}, | |
{ | |
"epoch": 0.39100249930574843, | |
"grad_norm": 0.011171508580446243, | |
"learning_rate": 0.0028987654320987654, | |
"loss": 0.7842, | |
"step": 176 | |
}, | |
{ | |
"epoch": 0.39322410441544015, | |
"grad_norm": 0.008988376706838608, | |
"learning_rate": 0.0028962962962962966, | |
"loss": 0.9934, | |
"step": 177 | |
}, | |
{ | |
"epoch": 0.3954457095251319, | |
"grad_norm": 0.008020875044167042, | |
"learning_rate": 0.0028938271604938273, | |
"loss": 0.887, | |
"step": 178 | |
}, | |
{ | |
"epoch": 0.3976673146348237, | |
"grad_norm": 0.00882386602461338, | |
"learning_rate": 0.002891358024691358, | |
"loss": 0.8859, | |
"step": 179 | |
}, | |
{ | |
"epoch": 0.3998889197445154, | |
"grad_norm": 0.0066726855002343655, | |
"learning_rate": 0.0028888888888888888, | |
"loss": 0.7581, | |
"step": 180 | |
}, | |
{ | |
"epoch": 0.4021105248542072, | |
"grad_norm": 0.011035891249775887, | |
"learning_rate": 0.00288641975308642, | |
"loss": 0.9224, | |
"step": 181 | |
}, | |
{ | |
"epoch": 0.4043321299638989, | |
"grad_norm": 0.0045290132984519005, | |
"learning_rate": 0.0028839506172839507, | |
"loss": 0.6422, | |
"step": 182 | |
}, | |
{ | |
"epoch": 0.40655373507359066, | |
"grad_norm": 0.006385261192917824, | |
"learning_rate": 0.0028814814814814814, | |
"loss": 0.7726, | |
"step": 183 | |
}, | |
{ | |
"epoch": 0.40877534018328243, | |
"grad_norm": 0.006436359137296677, | |
"learning_rate": 0.0028790123456790126, | |
"loss": 0.7782, | |
"step": 184 | |
}, | |
{ | |
"epoch": 0.41099694529297415, | |
"grad_norm": 0.006331396754831076, | |
"learning_rate": 0.0028765432098765433, | |
"loss": 0.8402, | |
"step": 185 | |
}, | |
{ | |
"epoch": 0.4132185504026659, | |
"grad_norm": 0.009812875650823116, | |
"learning_rate": 0.002874074074074074, | |
"loss": 0.7303, | |
"step": 186 | |
}, | |
{ | |
"epoch": 0.4154401555123577, | |
"grad_norm": 0.008723249658942223, | |
"learning_rate": 0.0028716049382716048, | |
"loss": 0.8702, | |
"step": 187 | |
}, | |
{ | |
"epoch": 0.4176617606220494, | |
"grad_norm": 0.007981473580002785, | |
"learning_rate": 0.002869135802469136, | |
"loss": 0.7417, | |
"step": 188 | |
}, | |
{ | |
"epoch": 0.4198833657317412, | |
"grad_norm": 0.00769078591838479, | |
"learning_rate": 0.0028666666666666667, | |
"loss": 0.9278, | |
"step": 189 | |
}, | |
{ | |
"epoch": 0.42210497084143295, | |
"grad_norm": 0.006087169982492924, | |
"learning_rate": 0.0028641975308641974, | |
"loss": 0.7726, | |
"step": 190 | |
}, | |
{ | |
"epoch": 0.42432657595112466, | |
"grad_norm": 0.012171044945716858, | |
"learning_rate": 0.0028617283950617286, | |
"loss": 0.9587, | |
"step": 191 | |
}, | |
{ | |
"epoch": 0.42654818106081643, | |
"grad_norm": 0.0077499267645180225, | |
"learning_rate": 0.0028592592592592593, | |
"loss": 0.9549, | |
"step": 192 | |
}, | |
{ | |
"epoch": 0.4287697861705082, | |
"grad_norm": 0.012135319411754608, | |
"learning_rate": 0.00285679012345679, | |
"loss": 0.868, | |
"step": 193 | |
}, | |
{ | |
"epoch": 0.4309913912801999, | |
"grad_norm": 0.01298799179494381, | |
"learning_rate": 0.0028543209876543208, | |
"loss": 0.6943, | |
"step": 194 | |
}, | |
{ | |
"epoch": 0.4332129963898917, | |
"grad_norm": 0.009397965855896473, | |
"learning_rate": 0.002851851851851852, | |
"loss": 0.5948, | |
"step": 195 | |
}, | |
{ | |
"epoch": 0.43543460149958346, | |
"grad_norm": 0.008861496113240719, | |
"learning_rate": 0.002849382716049383, | |
"loss": 0.8092, | |
"step": 196 | |
}, | |
{ | |
"epoch": 0.4376562066092752, | |
"grad_norm": 0.011813944205641747, | |
"learning_rate": 0.0028469135802469134, | |
"loss": 0.8379, | |
"step": 197 | |
}, | |
{ | |
"epoch": 0.43987781171896695, | |
"grad_norm": 0.008746293373405933, | |
"learning_rate": 0.0028444444444444446, | |
"loss": 0.7222, | |
"step": 198 | |
}, | |
{ | |
"epoch": 0.4420994168286587, | |
"grad_norm": 0.006701031234115362, | |
"learning_rate": 0.0028419753086419753, | |
"loss": 0.7471, | |
"step": 199 | |
}, | |
{ | |
"epoch": 0.44432102193835044, | |
"grad_norm": 0.00861276499927044, | |
"learning_rate": 0.0028395061728395065, | |
"loss": 0.7922, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.4465426270480422, | |
"grad_norm": 0.009883517399430275, | |
"learning_rate": 0.002837037037037037, | |
"loss": 0.7278, | |
"step": 201 | |
}, | |
{ | |
"epoch": 0.448764232157734, | |
"grad_norm": 0.009246917441487312, | |
"learning_rate": 0.002834567901234568, | |
"loss": 0.7314, | |
"step": 202 | |
}, | |
{ | |
"epoch": 0.4509858372674257, | |
"grad_norm": 0.008673238568007946, | |
"learning_rate": 0.002832098765432099, | |
"loss": 0.9159, | |
"step": 203 | |
}, | |
{ | |
"epoch": 0.45320744237711746, | |
"grad_norm": 0.011417444795370102, | |
"learning_rate": 0.00282962962962963, | |
"loss": 0.7767, | |
"step": 204 | |
}, | |
{ | |
"epoch": 0.45542904748680924, | |
"grad_norm": 0.008244509808719158, | |
"learning_rate": 0.0028271604938271606, | |
"loss": 0.5515, | |
"step": 205 | |
}, | |
{ | |
"epoch": 0.45765065259650095, | |
"grad_norm": 0.007160137873142958, | |
"learning_rate": 0.0028246913580246913, | |
"loss": 0.8073, | |
"step": 206 | |
}, | |
{ | |
"epoch": 0.4598722577061927, | |
"grad_norm": 0.00974318664520979, | |
"learning_rate": 0.0028222222222222225, | |
"loss": 0.8644, | |
"step": 207 | |
}, | |
{ | |
"epoch": 0.4620938628158845, | |
"grad_norm": 0.012410882860422134, | |
"learning_rate": 0.0028197530864197532, | |
"loss": 0.8899, | |
"step": 208 | |
}, | |
{ | |
"epoch": 0.4643154679255762, | |
"grad_norm": 0.008747541345655918, | |
"learning_rate": 0.002817283950617284, | |
"loss": 0.7164, | |
"step": 209 | |
}, | |
{ | |
"epoch": 0.466537073035268, | |
"grad_norm": 0.006024535745382309, | |
"learning_rate": 0.0028148148148148147, | |
"loss": 0.8533, | |
"step": 210 | |
}, | |
{ | |
"epoch": 0.46875867814495975, | |
"grad_norm": 0.015827184543013573, | |
"learning_rate": 0.002812345679012346, | |
"loss": 0.8671, | |
"step": 211 | |
}, | |
{ | |
"epoch": 0.47098028325465147, | |
"grad_norm": 0.013850335963070393, | |
"learning_rate": 0.0028098765432098766, | |
"loss": 0.8823, | |
"step": 212 | |
}, | |
{ | |
"epoch": 0.47320188836434324, | |
"grad_norm": 0.0074915774166584015, | |
"learning_rate": 0.0028074074074074073, | |
"loss": 0.7789, | |
"step": 213 | |
}, | |
{ | |
"epoch": 0.475423493474035, | |
"grad_norm": 0.011638727970421314, | |
"learning_rate": 0.0028049382716049385, | |
"loss": 0.7609, | |
"step": 214 | |
}, | |
{ | |
"epoch": 0.4776450985837267, | |
"grad_norm": 0.007853872142732143, | |
"learning_rate": 0.0028024691358024692, | |
"loss": 0.8372, | |
"step": 215 | |
}, | |
{ | |
"epoch": 0.4798667036934185, | |
"grad_norm": 0.010911183431744576, | |
"learning_rate": 0.0028, | |
"loss": 0.8292, | |
"step": 216 | |
}, | |
{ | |
"epoch": 0.48208830880311027, | |
"grad_norm": 0.012936594896018505, | |
"learning_rate": 0.0027975308641975307, | |
"loss": 0.7752, | |
"step": 217 | |
}, | |
{ | |
"epoch": 0.484309913912802, | |
"grad_norm": 0.008519921451807022, | |
"learning_rate": 0.002795061728395062, | |
"loss": 0.8553, | |
"step": 218 | |
}, | |
{ | |
"epoch": 0.48653151902249375, | |
"grad_norm": 0.009515728801488876, | |
"learning_rate": 0.0027925925925925926, | |
"loss": 0.87, | |
"step": 219 | |
}, | |
{ | |
"epoch": 0.4887531241321855, | |
"grad_norm": 0.01014457643032074, | |
"learning_rate": 0.0027901234567901233, | |
"loss": 0.7561, | |
"step": 220 | |
}, | |
{ | |
"epoch": 0.49097472924187724, | |
"grad_norm": 0.007946905680000782, | |
"learning_rate": 0.0027876543209876545, | |
"loss": 0.7532, | |
"step": 221 | |
}, | |
{ | |
"epoch": 0.493196334351569, | |
"grad_norm": 0.0068723889999091625, | |
"learning_rate": 0.0027851851851851852, | |
"loss": 0.8536, | |
"step": 222 | |
}, | |
{ | |
"epoch": 0.4954179394612608, | |
"grad_norm": 0.006598350126296282, | |
"learning_rate": 0.002782716049382716, | |
"loss": 0.7341, | |
"step": 223 | |
}, | |
{ | |
"epoch": 0.4976395445709525, | |
"grad_norm": 0.00824499037116766, | |
"learning_rate": 0.0027802469135802467, | |
"loss": 0.7093, | |
"step": 224 | |
}, | |
{ | |
"epoch": 0.49986114968064427, | |
"grad_norm": 0.007247340399771929, | |
"learning_rate": 0.002777777777777778, | |
"loss": 0.9103, | |
"step": 225 | |
}, | |
{ | |
"epoch": 0.502082754790336, | |
"grad_norm": 0.008267593570053577, | |
"learning_rate": 0.002775308641975309, | |
"loss": 0.8218, | |
"step": 226 | |
}, | |
{ | |
"epoch": 0.5043043599000278, | |
"grad_norm": 0.00702422671020031, | |
"learning_rate": 0.0027728395061728394, | |
"loss": 0.8115, | |
"step": 227 | |
}, | |
{ | |
"epoch": 0.5065259650097195, | |
"grad_norm": 0.005334863904863596, | |
"learning_rate": 0.0027703703703703705, | |
"loss": 0.7843, | |
"step": 228 | |
}, | |
{ | |
"epoch": 0.5087475701194113, | |
"grad_norm": 0.009140919893980026, | |
"learning_rate": 0.0027679012345679013, | |
"loss": 0.8132, | |
"step": 229 | |
}, | |
{ | |
"epoch": 0.510969175229103, | |
"grad_norm": 0.007120530121028423, | |
"learning_rate": 0.0027654320987654324, | |
"loss": 0.7962, | |
"step": 230 | |
}, | |
{ | |
"epoch": 0.5131907803387947, | |
"grad_norm": 0.012158839032053947, | |
"learning_rate": 0.0027629629629629627, | |
"loss": 0.9541, | |
"step": 231 | |
}, | |
{ | |
"epoch": 0.5154123854484866, | |
"grad_norm": 0.015124938450753689, | |
"learning_rate": 0.002760493827160494, | |
"loss": 0.8044, | |
"step": 232 | |
}, | |
{ | |
"epoch": 0.5176339905581783, | |
"grad_norm": 0.007999619469046593, | |
"learning_rate": 0.0027580246913580246, | |
"loss": 0.8035, | |
"step": 233 | |
}, | |
{ | |
"epoch": 0.51985559566787, | |
"grad_norm": 0.02217845618724823, | |
"learning_rate": 0.002755555555555556, | |
"loss": 1.1012, | |
"step": 234 | |
}, | |
{ | |
"epoch": 0.5220772007775618, | |
"grad_norm": 0.010226511396467686, | |
"learning_rate": 0.0027530864197530865, | |
"loss": 0.6606, | |
"step": 235 | |
}, | |
{ | |
"epoch": 0.5242988058872535, | |
"grad_norm": 0.006293569225817919, | |
"learning_rate": 0.0027506172839506173, | |
"loss": 0.7629, | |
"step": 236 | |
}, | |
{ | |
"epoch": 0.5265204109969452, | |
"grad_norm": 0.012506984174251556, | |
"learning_rate": 0.0027481481481481484, | |
"loss": 0.9344, | |
"step": 237 | |
}, | |
{ | |
"epoch": 0.5287420161066371, | |
"grad_norm": 0.009668968617916107, | |
"learning_rate": 0.002745679012345679, | |
"loss": 0.9924, | |
"step": 238 | |
}, | |
{ | |
"epoch": 0.5309636212163288, | |
"grad_norm": 0.01322484202682972, | |
"learning_rate": 0.00274320987654321, | |
"loss": 0.904, | |
"step": 239 | |
}, | |
{ | |
"epoch": 0.5331852263260205, | |
"grad_norm": 0.006201568990945816, | |
"learning_rate": 0.0027407407407407406, | |
"loss": 0.8552, | |
"step": 240 | |
}, | |
{ | |
"epoch": 0.5354068314357123, | |
"grad_norm": 0.013609252870082855, | |
"learning_rate": 0.002738271604938272, | |
"loss": 0.7899, | |
"step": 241 | |
}, | |
{ | |
"epoch": 0.537628436545404, | |
"grad_norm": 0.007919220253825188, | |
"learning_rate": 0.0027358024691358025, | |
"loss": 0.7527, | |
"step": 242 | |
}, | |
{ | |
"epoch": 0.5398500416550958, | |
"grad_norm": 0.00696425000205636, | |
"learning_rate": 0.0027333333333333333, | |
"loss": 0.719, | |
"step": 243 | |
}, | |
{ | |
"epoch": 0.5420716467647876, | |
"grad_norm": 0.008950626477599144, | |
"learning_rate": 0.0027308641975308644, | |
"loss": 0.7014, | |
"step": 244 | |
}, | |
{ | |
"epoch": 0.5442932518744793, | |
"grad_norm": 0.012288985773921013, | |
"learning_rate": 0.002728395061728395, | |
"loss": 0.7899, | |
"step": 245 | |
}, | |
{ | |
"epoch": 0.546514856984171, | |
"grad_norm": 0.011310501024127007, | |
"learning_rate": 0.002725925925925926, | |
"loss": 0.9283, | |
"step": 246 | |
}, | |
{ | |
"epoch": 0.5487364620938628, | |
"grad_norm": 0.015484470874071121, | |
"learning_rate": 0.0027234567901234566, | |
"loss": 0.9007, | |
"step": 247 | |
}, | |
{ | |
"epoch": 0.5509580672035546, | |
"grad_norm": 0.008781633339822292, | |
"learning_rate": 0.002720987654320988, | |
"loss": 0.9231, | |
"step": 248 | |
}, | |
{ | |
"epoch": 0.5531796723132463, | |
"grad_norm": 0.009064835496246815, | |
"learning_rate": 0.0027185185185185185, | |
"loss": 0.7347, | |
"step": 249 | |
}, | |
{ | |
"epoch": 0.5554012774229381, | |
"grad_norm": 0.011060320772230625, | |
"learning_rate": 0.0027160493827160493, | |
"loss": 0.5795, | |
"step": 250 | |
}, | |
{ | |
"epoch": 0.5576228825326298, | |
"grad_norm": 0.00757238594815135, | |
"learning_rate": 0.0027135802469135805, | |
"loss": 0.8312, | |
"step": 251 | |
}, | |
{ | |
"epoch": 0.5598444876423215, | |
"grad_norm": 0.008306944742798805, | |
"learning_rate": 0.002711111111111111, | |
"loss": 0.9226, | |
"step": 252 | |
}, | |
{ | |
"epoch": 0.5620660927520134, | |
"grad_norm": 0.010423055849969387, | |
"learning_rate": 0.002708641975308642, | |
"loss": 0.8448, | |
"step": 253 | |
}, | |
{ | |
"epoch": 0.5642876978617051, | |
"grad_norm": 0.010584446601569653, | |
"learning_rate": 0.0027061728395061727, | |
"loss": 0.8116, | |
"step": 254 | |
}, | |
{ | |
"epoch": 0.5665093029713968, | |
"grad_norm": 0.00733089679852128, | |
"learning_rate": 0.002703703703703704, | |
"loss": 0.6264, | |
"step": 255 | |
}, | |
{ | |
"epoch": 0.5687309080810886, | |
"grad_norm": 0.007233286276459694, | |
"learning_rate": 0.002701234567901235, | |
"loss": 0.9293, | |
"step": 256 | |
}, | |
{ | |
"epoch": 0.5709525131907803, | |
"grad_norm": 0.005947368685156107, | |
"learning_rate": 0.0026987654320987653, | |
"loss": 0.8702, | |
"step": 257 | |
}, | |
{ | |
"epoch": 0.573174118300472, | |
"grad_norm": 0.007871137000620365, | |
"learning_rate": 0.0026962962962962965, | |
"loss": 0.7136, | |
"step": 258 | |
}, | |
{ | |
"epoch": 0.5753957234101639, | |
"grad_norm": 0.012100719846785069, | |
"learning_rate": 0.002693827160493827, | |
"loss": 1.0855, | |
"step": 259 | |
}, | |
{ | |
"epoch": 0.5776173285198556, | |
"grad_norm": 0.012036778964102268, | |
"learning_rate": 0.0026913580246913584, | |
"loss": 0.6768, | |
"step": 260 | |
}, | |
{ | |
"epoch": 0.5798389336295473, | |
"grad_norm": 0.015129570849239826, | |
"learning_rate": 0.0026888888888888887, | |
"loss": 0.8401, | |
"step": 261 | |
}, | |
{ | |
"epoch": 0.5820605387392391, | |
"grad_norm": 0.0090151596814394, | |
"learning_rate": 0.00268641975308642, | |
"loss": 0.729, | |
"step": 262 | |
}, | |
{ | |
"epoch": 0.5842821438489308, | |
"grad_norm": 0.01668744534254074, | |
"learning_rate": 0.0026839506172839506, | |
"loss": 0.8578, | |
"step": 263 | |
}, | |
{ | |
"epoch": 0.5865037489586226, | |
"grad_norm": 0.024113476276397705, | |
"learning_rate": 0.0026814814814814817, | |
"loss": 0.9802, | |
"step": 264 | |
}, | |
{ | |
"epoch": 0.5887253540683144, | |
"grad_norm": 0.006910145748406649, | |
"learning_rate": 0.0026790123456790125, | |
"loss": 0.8169, | |
"step": 265 | |
}, | |
{ | |
"epoch": 0.5909469591780061, | |
"grad_norm": 0.006911164615303278, | |
"learning_rate": 0.002676543209876543, | |
"loss": 0.8629, | |
"step": 266 | |
}, | |
{ | |
"epoch": 0.5931685642876978, | |
"grad_norm": 0.010877076536417007, | |
"learning_rate": 0.0026740740740740744, | |
"loss": 0.8885, | |
"step": 267 | |
}, | |
{ | |
"epoch": 0.5953901693973896, | |
"grad_norm": 0.01698371395468712, | |
"learning_rate": 0.0026716049382716047, | |
"loss": 0.8002, | |
"step": 268 | |
}, | |
{ | |
"epoch": 0.5976117745070814, | |
"grad_norm": 0.008402695879340172, | |
"learning_rate": 0.002669135802469136, | |
"loss": 0.7057, | |
"step": 269 | |
}, | |
{ | |
"epoch": 0.5998333796167731, | |
"grad_norm": 0.010379761457443237, | |
"learning_rate": 0.0026666666666666666, | |
"loss": 0.829, | |
"step": 270 | |
}, | |
{ | |
"epoch": 0.6020549847264649, | |
"grad_norm": 0.009576470591127872, | |
"learning_rate": 0.0026641975308641977, | |
"loss": 0.9202, | |
"step": 271 | |
}, | |
{ | |
"epoch": 0.6042765898361566, | |
"grad_norm": 0.008488965220749378, | |
"learning_rate": 0.0026617283950617285, | |
"loss": 0.7697, | |
"step": 272 | |
}, | |
{ | |
"epoch": 0.6064981949458483, | |
"grad_norm": 0.011320218443870544, | |
"learning_rate": 0.002659259259259259, | |
"loss": 0.756, | |
"step": 273 | |
}, | |
{ | |
"epoch": 0.6087198000555402, | |
"grad_norm": 0.02209623157978058, | |
"learning_rate": 0.0026567901234567904, | |
"loss": 1.0533, | |
"step": 274 | |
}, | |
{ | |
"epoch": 0.6109414051652319, | |
"grad_norm": 0.01179931778460741, | |
"learning_rate": 0.002654320987654321, | |
"loss": 0.8604, | |
"step": 275 | |
}, | |
{ | |
"epoch": 0.6131630102749236, | |
"grad_norm": 0.013143769465386868, | |
"learning_rate": 0.002651851851851852, | |
"loss": 0.8984, | |
"step": 276 | |
}, | |
{ | |
"epoch": 0.6153846153846154, | |
"grad_norm": 0.010282885283231735, | |
"learning_rate": 0.0026493827160493826, | |
"loss": 0.8802, | |
"step": 277 | |
}, | |
{ | |
"epoch": 0.6176062204943071, | |
"grad_norm": 0.007673004176467657, | |
"learning_rate": 0.0026469135802469138, | |
"loss": 0.8439, | |
"step": 278 | |
}, | |
{ | |
"epoch": 0.6198278256039988, | |
"grad_norm": 0.008318427950143814, | |
"learning_rate": 0.0026444444444444445, | |
"loss": 0.7776, | |
"step": 279 | |
}, | |
{ | |
"epoch": 0.6220494307136907, | |
"grad_norm": 0.012036367319524288, | |
"learning_rate": 0.0026419753086419752, | |
"loss": 0.7192, | |
"step": 280 | |
}, | |
{ | |
"epoch": 0.6242710358233824, | |
"grad_norm": 0.0068825045600533485, | |
"learning_rate": 0.0026395061728395064, | |
"loss": 0.7847, | |
"step": 281 | |
}, | |
{ | |
"epoch": 0.6264926409330741, | |
"grad_norm": 0.007046104408800602, | |
"learning_rate": 0.002637037037037037, | |
"loss": 0.7069, | |
"step": 282 | |
}, | |
{ | |
"epoch": 0.6287142460427659, | |
"grad_norm": 0.00804830901324749, | |
"learning_rate": 0.002634567901234568, | |
"loss": 0.9622, | |
"step": 283 | |
}, | |
{ | |
"epoch": 0.6309358511524576, | |
"grad_norm": 0.0072870319709181786, | |
"learning_rate": 0.0026320987654320986, | |
"loss": 0.6801, | |
"step": 284 | |
}, | |
{ | |
"epoch": 0.6331574562621494, | |
"grad_norm": 0.00952050369232893, | |
"learning_rate": 0.0026296296296296298, | |
"loss": 0.8667, | |
"step": 285 | |
}, | |
{ | |
"epoch": 0.6353790613718412, | |
"grad_norm": 0.006871790159493685, | |
"learning_rate": 0.0026271604938271605, | |
"loss": 0.7638, | |
"step": 286 | |
}, | |
{ | |
"epoch": 0.6376006664815329, | |
"grad_norm": 0.006217732559889555, | |
"learning_rate": 0.0026246913580246912, | |
"loss": 0.7272, | |
"step": 287 | |
}, | |
{ | |
"epoch": 0.6398222715912246, | |
"grad_norm": 0.006713525392115116, | |
"learning_rate": 0.0026222222222222224, | |
"loss": 0.7885, | |
"step": 288 | |
}, | |
{ | |
"epoch": 0.6420438767009164, | |
"grad_norm": 0.004922814667224884, | |
"learning_rate": 0.002619753086419753, | |
"loss": 0.6974, | |
"step": 289 | |
}, | |
{ | |
"epoch": 0.6442654818106082, | |
"grad_norm": 0.00727014010772109, | |
"learning_rate": 0.002617283950617284, | |
"loss": 0.9864, | |
"step": 290 | |
}, | |
{ | |
"epoch": 0.6464870869202999, | |
"grad_norm": 0.010591446422040462, | |
"learning_rate": 0.0026148148148148146, | |
"loss": 0.8506, | |
"step": 291 | |
}, | |
{ | |
"epoch": 0.6487086920299917, | |
"grad_norm": 0.009507875889539719, | |
"learning_rate": 0.0026123456790123458, | |
"loss": 0.754, | |
"step": 292 | |
}, | |
{ | |
"epoch": 0.6509302971396834, | |
"grad_norm": 0.00874133687466383, | |
"learning_rate": 0.0026098765432098765, | |
"loss": 0.9275, | |
"step": 293 | |
}, | |
{ | |
"epoch": 0.6531519022493751, | |
"grad_norm": 0.008199427276849747, | |
"learning_rate": 0.0026074074074074072, | |
"loss": 0.8286, | |
"step": 294 | |
}, | |
{ | |
"epoch": 0.655373507359067, | |
"grad_norm": 0.010877394117414951, | |
"learning_rate": 0.0026049382716049384, | |
"loss": 0.6721, | |
"step": 295 | |
}, | |
{ | |
"epoch": 0.6575951124687587, | |
"grad_norm": 0.007632778026163578, | |
"learning_rate": 0.002602469135802469, | |
"loss": 0.796, | |
"step": 296 | |
}, | |
{ | |
"epoch": 0.6598167175784504, | |
"grad_norm": 0.007029394619166851, | |
"learning_rate": 0.0026000000000000003, | |
"loss": 0.7297, | |
"step": 297 | |
}, | |
{ | |
"epoch": 0.6620383226881422, | |
"grad_norm": 0.008049899712204933, | |
"learning_rate": 0.0025975308641975306, | |
"loss": 0.7303, | |
"step": 298 | |
}, | |
{ | |
"epoch": 0.6642599277978339, | |
"grad_norm": 0.009279787540435791, | |
"learning_rate": 0.002595061728395062, | |
"loss": 0.7327, | |
"step": 299 | |
}, | |
{ | |
"epoch": 0.6664815329075257, | |
"grad_norm": 0.006137878634035587, | |
"learning_rate": 0.0025925925925925925, | |
"loss": 0.7083, | |
"step": 300 | |
}, | |
{ | |
"epoch": 0.6687031380172175, | |
"grad_norm": 0.008568949066102505, | |
"learning_rate": 0.0025901234567901237, | |
"loss": 0.9256, | |
"step": 301 | |
}, | |
{ | |
"epoch": 0.6709247431269092, | |
"grad_norm": 0.006636477075517178, | |
"learning_rate": 0.0025876543209876544, | |
"loss": 0.7854, | |
"step": 302 | |
}, | |
{ | |
"epoch": 0.6731463482366009, | |
"grad_norm": 0.008452818728983402, | |
"learning_rate": 0.002585185185185185, | |
"loss": 0.8342, | |
"step": 303 | |
}, | |
{ | |
"epoch": 0.6753679533462927, | |
"grad_norm": 0.0126169603317976, | |
"learning_rate": 0.0025827160493827163, | |
"loss": 0.7861, | |
"step": 304 | |
}, | |
{ | |
"epoch": 0.6775895584559845, | |
"grad_norm": 0.007217437960207462, | |
"learning_rate": 0.002580246913580247, | |
"loss": 0.7743, | |
"step": 305 | |
}, | |
{ | |
"epoch": 0.6798111635656762, | |
"grad_norm": 0.008002759888768196, | |
"learning_rate": 0.002577777777777778, | |
"loss": 0.6823, | |
"step": 306 | |
}, | |
{ | |
"epoch": 0.682032768675368, | |
"grad_norm": 0.013185406103730202, | |
"learning_rate": 0.0025753086419753085, | |
"loss": 0.8276, | |
"step": 307 | |
}, | |
{ | |
"epoch": 0.6842543737850597, | |
"grad_norm": 0.006368585396558046, | |
"learning_rate": 0.0025728395061728397, | |
"loss": 0.6713, | |
"step": 308 | |
}, | |
{ | |
"epoch": 0.6864759788947514, | |
"grad_norm": 0.007478953339159489, | |
"learning_rate": 0.0025703703703703704, | |
"loss": 0.7786, | |
"step": 309 | |
}, | |
{ | |
"epoch": 0.6886975840044433, | |
"grad_norm": 0.008412404917180538, | |
"learning_rate": 0.002567901234567901, | |
"loss": 0.6874, | |
"step": 310 | |
}, | |
{ | |
"epoch": 0.690919189114135, | |
"grad_norm": 0.0067495848052203655, | |
"learning_rate": 0.0025654320987654323, | |
"loss": 0.7412, | |
"step": 311 | |
}, | |
{ | |
"epoch": 0.6931407942238267, | |
"grad_norm": 0.007151488680392504, | |
"learning_rate": 0.002562962962962963, | |
"loss": 0.6853, | |
"step": 312 | |
}, | |
{ | |
"epoch": 0.6953623993335185, | |
"grad_norm": 0.009860935620963573, | |
"learning_rate": 0.002560493827160494, | |
"loss": 0.7972, | |
"step": 313 | |
}, | |
{ | |
"epoch": 0.6975840044432102, | |
"grad_norm": 0.007228904403746128, | |
"learning_rate": 0.0025580246913580245, | |
"loss": 0.816, | |
"step": 314 | |
}, | |
{ | |
"epoch": 0.6998056095529019, | |
"grad_norm": 0.008685276843607426, | |
"learning_rate": 0.0025555555555555557, | |
"loss": 0.9024, | |
"step": 315 | |
}, | |
{ | |
"epoch": 0.7020272146625938, | |
"grad_norm": 0.011443904601037502, | |
"learning_rate": 0.0025530864197530864, | |
"loss": 0.8276, | |
"step": 316 | |
}, | |
{ | |
"epoch": 0.7042488197722855, | |
"grad_norm": 0.00936659425497055, | |
"learning_rate": 0.002550617283950617, | |
"loss": 0.8549, | |
"step": 317 | |
}, | |
{ | |
"epoch": 0.7064704248819772, | |
"grad_norm": 0.007054576184600592, | |
"learning_rate": 0.0025481481481481483, | |
"loss": 0.7386, | |
"step": 318 | |
}, | |
{ | |
"epoch": 0.708692029991669, | |
"grad_norm": 0.008555485866963863, | |
"learning_rate": 0.002545679012345679, | |
"loss": 0.6551, | |
"step": 319 | |
}, | |
{ | |
"epoch": 0.7109136351013607, | |
"grad_norm": 0.02001245878636837, | |
"learning_rate": 0.00254320987654321, | |
"loss": 0.9046, | |
"step": 320 | |
}, | |
{ | |
"epoch": 0.7131352402110525, | |
"grad_norm": 0.018720177933573723, | |
"learning_rate": 0.0025407407407407405, | |
"loss": 0.8019, | |
"step": 321 | |
}, | |
{ | |
"epoch": 0.7153568453207443, | |
"grad_norm": 0.0072449627332389355, | |
"learning_rate": 0.0025382716049382717, | |
"loss": 0.8341, | |
"step": 322 | |
}, | |
{ | |
"epoch": 0.717578450430436, | |
"grad_norm": 0.010015754029154778, | |
"learning_rate": 0.0025358024691358024, | |
"loss": 0.8745, | |
"step": 323 | |
}, | |
{ | |
"epoch": 0.7198000555401277, | |
"grad_norm": 0.008088629692792892, | |
"learning_rate": 0.002533333333333333, | |
"loss": 0.8579, | |
"step": 324 | |
}, | |
{ | |
"epoch": 0.7220216606498195, | |
"grad_norm": 0.014426672831177711, | |
"learning_rate": 0.0025308641975308644, | |
"loss": 0.8284, | |
"step": 325 | |
}, | |
{ | |
"epoch": 0.7242432657595113, | |
"grad_norm": 0.006323180161416531, | |
"learning_rate": 0.002528395061728395, | |
"loss": 0.5411, | |
"step": 326 | |
}, | |
{ | |
"epoch": 0.726464870869203, | |
"grad_norm": 0.0061256056651473045, | |
"learning_rate": 0.0025259259259259263, | |
"loss": 0.7823, | |
"step": 327 | |
}, | |
{ | |
"epoch": 0.7286864759788948, | |
"grad_norm": 0.008604188449680805, | |
"learning_rate": 0.0025234567901234566, | |
"loss": 0.748, | |
"step": 328 | |
}, | |
{ | |
"epoch": 0.7309080810885865, | |
"grad_norm": 0.006540137343108654, | |
"learning_rate": 0.0025209876543209877, | |
"loss": 0.6014, | |
"step": 329 | |
}, | |
{ | |
"epoch": 0.7331296861982782, | |
"grad_norm": 0.007149749435484409, | |
"learning_rate": 0.0025185185185185185, | |
"loss": 0.7551, | |
"step": 330 | |
}, | |
{ | |
"epoch": 0.7353512913079701, | |
"grad_norm": 0.005437437444925308, | |
"learning_rate": 0.0025160493827160496, | |
"loss": 0.7338, | |
"step": 331 | |
}, | |
{ | |
"epoch": 0.7375728964176618, | |
"grad_norm": 0.010258332826197147, | |
"learning_rate": 0.0025135802469135804, | |
"loss": 0.7954, | |
"step": 332 | |
}, | |
{ | |
"epoch": 0.7397945015273535, | |
"grad_norm": 0.007653366308659315, | |
"learning_rate": 0.002511111111111111, | |
"loss": 0.9539, | |
"step": 333 | |
}, | |
{ | |
"epoch": 0.7420161066370453, | |
"grad_norm": 0.0065800040028989315, | |
"learning_rate": 0.0025086419753086423, | |
"loss": 0.7384, | |
"step": 334 | |
}, | |
{ | |
"epoch": 0.744237711746737, | |
"grad_norm": 0.0080635417252779, | |
"learning_rate": 0.002506172839506173, | |
"loss": 0.976, | |
"step": 335 | |
}, | |
{ | |
"epoch": 0.7464593168564287, | |
"grad_norm": 0.007618797477334738, | |
"learning_rate": 0.0025037037037037037, | |
"loss": 0.9015, | |
"step": 336 | |
}, | |
{ | |
"epoch": 0.7486809219661206, | |
"grad_norm": 0.006701778620481491, | |
"learning_rate": 0.0025012345679012345, | |
"loss": 0.6757, | |
"step": 337 | |
}, | |
{ | |
"epoch": 0.7509025270758123, | |
"grad_norm": 0.008706500753760338, | |
"learning_rate": 0.0024987654320987656, | |
"loss": 1.0567, | |
"step": 338 | |
}, | |
{ | |
"epoch": 0.753124132185504, | |
"grad_norm": 0.006723032798618078, | |
"learning_rate": 0.0024962962962962964, | |
"loss": 0.7917, | |
"step": 339 | |
}, | |
{ | |
"epoch": 0.7553457372951958, | |
"grad_norm": 0.004636780824512243, | |
"learning_rate": 0.002493827160493827, | |
"loss": 0.5242, | |
"step": 340 | |
}, | |
{ | |
"epoch": 0.7575673424048875, | |
"grad_norm": 0.006264273542910814, | |
"learning_rate": 0.0024913580246913583, | |
"loss": 1.0525, | |
"step": 341 | |
}, | |
{ | |
"epoch": 0.7597889475145793, | |
"grad_norm": 0.007951164618134499, | |
"learning_rate": 0.002488888888888889, | |
"loss": 0.7083, | |
"step": 342 | |
}, | |
{ | |
"epoch": 0.7620105526242711, | |
"grad_norm": 0.004694493021816015, | |
"learning_rate": 0.0024864197530864197, | |
"loss": 0.776, | |
"step": 343 | |
}, | |
{ | |
"epoch": 0.7642321577339628, | |
"grad_norm": 0.014506370760500431, | |
"learning_rate": 0.0024839506172839505, | |
"loss": 0.7931, | |
"step": 344 | |
}, | |
{ | |
"epoch": 0.7664537628436545, | |
"grad_norm": 0.008699407801032066, | |
"learning_rate": 0.0024814814814814816, | |
"loss": 0.8931, | |
"step": 345 | |
}, | |
{ | |
"epoch": 0.7686753679533463, | |
"grad_norm": 0.00661693187430501, | |
"learning_rate": 0.0024790123456790124, | |
"loss": 0.8803, | |
"step": 346 | |
}, | |
{ | |
"epoch": 0.7708969730630381, | |
"grad_norm": 0.009916028007864952, | |
"learning_rate": 0.002476543209876543, | |
"loss": 0.7263, | |
"step": 347 | |
}, | |
{ | |
"epoch": 0.7731185781727298, | |
"grad_norm": 0.009556926786899567, | |
"learning_rate": 0.0024740740740740743, | |
"loss": 0.8132, | |
"step": 348 | |
}, | |
{ | |
"epoch": 0.7753401832824216, | |
"grad_norm": 0.011330212466418743, | |
"learning_rate": 0.002471604938271605, | |
"loss": 0.7426, | |
"step": 349 | |
}, | |
{ | |
"epoch": 0.7775617883921133, | |
"grad_norm": 0.00721346540376544, | |
"learning_rate": 0.0024691358024691358, | |
"loss": 0.7328, | |
"step": 350 | |
}, | |
{ | |
"epoch": 0.779783393501805, | |
"grad_norm": 0.007094467990100384, | |
"learning_rate": 0.0024666666666666665, | |
"loss": 0.7336, | |
"step": 351 | |
}, | |
{ | |
"epoch": 0.7820049986114969, | |
"grad_norm": 0.0060173156671226025, | |
"learning_rate": 0.0024641975308641977, | |
"loss": 0.6998, | |
"step": 352 | |
}, | |
{ | |
"epoch": 0.7842266037211886, | |
"grad_norm": 0.006421815603971481, | |
"learning_rate": 0.0024617283950617284, | |
"loss": 0.8042, | |
"step": 353 | |
}, | |
{ | |
"epoch": 0.7864482088308803, | |
"grad_norm": 0.005537498742341995, | |
"learning_rate": 0.002459259259259259, | |
"loss": 0.6824, | |
"step": 354 | |
}, | |
{ | |
"epoch": 0.7886698139405721, | |
"grad_norm": 0.007721144240349531, | |
"learning_rate": 0.0024567901234567903, | |
"loss": 0.7393, | |
"step": 355 | |
}, | |
{ | |
"epoch": 0.7908914190502638, | |
"grad_norm": 0.006249431986361742, | |
"learning_rate": 0.002454320987654321, | |
"loss": 0.8343, | |
"step": 356 | |
}, | |
{ | |
"epoch": 0.7931130241599555, | |
"grad_norm": 0.007465182337909937, | |
"learning_rate": 0.002451851851851852, | |
"loss": 0.6554, | |
"step": 357 | |
}, | |
{ | |
"epoch": 0.7953346292696474, | |
"grad_norm": 0.007041784934699535, | |
"learning_rate": 0.0024493827160493825, | |
"loss": 0.7373, | |
"step": 358 | |
}, | |
{ | |
"epoch": 0.7975562343793391, | |
"grad_norm": 0.006842833943665028, | |
"learning_rate": 0.0024469135802469137, | |
"loss": 0.8867, | |
"step": 359 | |
}, | |
{ | |
"epoch": 0.7997778394890308, | |
"grad_norm": 0.009905919432640076, | |
"learning_rate": 0.0024444444444444444, | |
"loss": 0.7637, | |
"step": 360 | |
}, | |
{ | |
"epoch": 0.8019994445987225, | |
"grad_norm": 0.010288221761584282, | |
"learning_rate": 0.0024419753086419756, | |
"loss": 0.8162, | |
"step": 361 | |
}, | |
{ | |
"epoch": 0.8042210497084143, | |
"grad_norm": 0.010223974473774433, | |
"learning_rate": 0.0024395061728395063, | |
"loss": 0.8779, | |
"step": 362 | |
}, | |
{ | |
"epoch": 0.8064426548181061, | |
"grad_norm": 0.012591023929417133, | |
"learning_rate": 0.002437037037037037, | |
"loss": 0.8765, | |
"step": 363 | |
}, | |
{ | |
"epoch": 0.8086642599277978, | |
"grad_norm": 0.005684548057615757, | |
"learning_rate": 0.002434567901234568, | |
"loss": 0.7797, | |
"step": 364 | |
}, | |
{ | |
"epoch": 0.8108858650374896, | |
"grad_norm": 0.0055415453389286995, | |
"learning_rate": 0.002432098765432099, | |
"loss": 0.6947, | |
"step": 365 | |
}, | |
{ | |
"epoch": 0.8131074701471813, | |
"grad_norm": 0.006009736098349094, | |
"learning_rate": 0.0024296296296296297, | |
"loss": 0.9479, | |
"step": 366 | |
}, | |
{ | |
"epoch": 0.815329075256873, | |
"grad_norm": 0.009372019208967686, | |
"learning_rate": 0.0024271604938271604, | |
"loss": 0.8523, | |
"step": 367 | |
}, | |
{ | |
"epoch": 0.8175506803665649, | |
"grad_norm": 0.007912317290902138, | |
"learning_rate": 0.0024246913580246916, | |
"loss": 0.6513, | |
"step": 368 | |
}, | |
{ | |
"epoch": 0.8197722854762566, | |
"grad_norm": 0.011661085300147533, | |
"learning_rate": 0.0024222222222222223, | |
"loss": 0.8989, | |
"step": 369 | |
}, | |
{ | |
"epoch": 0.8219938905859483, | |
"grad_norm": 0.011349090375006199, | |
"learning_rate": 0.002419753086419753, | |
"loss": 0.9422, | |
"step": 370 | |
}, | |
{ | |
"epoch": 0.8242154956956401, | |
"grad_norm": 0.006582579109817743, | |
"learning_rate": 0.002417283950617284, | |
"loss": 0.7312, | |
"step": 371 | |
}, | |
{ | |
"epoch": 0.8264371008053318, | |
"grad_norm": 0.006679638754576445, | |
"learning_rate": 0.002414814814814815, | |
"loss": 0.889, | |
"step": 372 | |
}, | |
{ | |
"epoch": 0.8286587059150236, | |
"grad_norm": 0.006373196840286255, | |
"learning_rate": 0.0024123456790123457, | |
"loss": 0.6953, | |
"step": 373 | |
}, | |
{ | |
"epoch": 0.8308803110247154, | |
"grad_norm": 0.0081919701769948, | |
"learning_rate": 0.0024098765432098764, | |
"loss": 0.9879, | |
"step": 374 | |
}, | |
{ | |
"epoch": 0.8331019161344071, | |
"grad_norm": 0.010226764716207981, | |
"learning_rate": 0.0024074074074074076, | |
"loss": 1.0487, | |
"step": 375 | |
}, | |
{ | |
"epoch": 0.8353235212440988, | |
"grad_norm": 0.00779406912624836, | |
"learning_rate": 0.0024049382716049383, | |
"loss": 0.8523, | |
"step": 376 | |
}, | |
{ | |
"epoch": 0.8375451263537906, | |
"grad_norm": 0.007026501465588808, | |
"learning_rate": 0.002402469135802469, | |
"loss": 0.6678, | |
"step": 377 | |
}, | |
{ | |
"epoch": 0.8397667314634824, | |
"grad_norm": 0.014233388006687164, | |
"learning_rate": 0.0024000000000000002, | |
"loss": 1.0843, | |
"step": 378 | |
}, | |
{ | |
"epoch": 0.8419883365731741, | |
"grad_norm": 0.009694437496364117, | |
"learning_rate": 0.002397530864197531, | |
"loss": 0.7636, | |
"step": 379 | |
}, | |
{ | |
"epoch": 0.8442099416828659, | |
"grad_norm": 0.007724369410425425, | |
"learning_rate": 0.0023950617283950617, | |
"loss": 0.6192, | |
"step": 380 | |
}, | |
{ | |
"epoch": 0.8464315467925576, | |
"grad_norm": 0.009011819027364254, | |
"learning_rate": 0.0023925925925925924, | |
"loss": 0.8457, | |
"step": 381 | |
}, | |
{ | |
"epoch": 0.8486531519022493, | |
"grad_norm": 0.010344129055738449, | |
"learning_rate": 0.0023901234567901236, | |
"loss": 0.7277, | |
"step": 382 | |
}, | |
{ | |
"epoch": 0.8508747570119412, | |
"grad_norm": 0.005570253822952509, | |
"learning_rate": 0.0023876543209876543, | |
"loss": 0.8237, | |
"step": 383 | |
}, | |
{ | |
"epoch": 0.8530963621216329, | |
"grad_norm": 0.00868288055062294, | |
"learning_rate": 0.002385185185185185, | |
"loss": 0.7281, | |
"step": 384 | |
}, | |
{ | |
"epoch": 0.8553179672313246, | |
"grad_norm": 0.009439078159630299, | |
"learning_rate": 0.0023827160493827162, | |
"loss": 0.9108, | |
"step": 385 | |
}, | |
{ | |
"epoch": 0.8575395723410164, | |
"grad_norm": 0.01069068256765604, | |
"learning_rate": 0.002380246913580247, | |
"loss": 0.7893, | |
"step": 386 | |
}, | |
{ | |
"epoch": 0.8597611774507081, | |
"grad_norm": 0.006505821831524372, | |
"learning_rate": 0.002377777777777778, | |
"loss": 0.7375, | |
"step": 387 | |
}, | |
{ | |
"epoch": 0.8619827825603998, | |
"grad_norm": 0.006728131324052811, | |
"learning_rate": 0.0023753086419753084, | |
"loss": 0.883, | |
"step": 388 | |
}, | |
{ | |
"epoch": 0.8642043876700917, | |
"grad_norm": 0.008597608655691147, | |
"learning_rate": 0.0023728395061728396, | |
"loss": 0.7026, | |
"step": 389 | |
}, | |
{ | |
"epoch": 0.8664259927797834, | |
"grad_norm": 0.008809217251837254, | |
"learning_rate": 0.0023703703703703703, | |
"loss": 0.881, | |
"step": 390 | |
}, | |
{ | |
"epoch": 0.8686475978894751, | |
"grad_norm": 0.008603409864008427, | |
"learning_rate": 0.0023679012345679015, | |
"loss": 0.6806, | |
"step": 391 | |
}, | |
{ | |
"epoch": 0.8708692029991669, | |
"grad_norm": 0.011070380918681622, | |
"learning_rate": 0.0023654320987654322, | |
"loss": 0.8248, | |
"step": 392 | |
}, | |
{ | |
"epoch": 0.8730908081088586, | |
"grad_norm": 0.011338909156620502, | |
"learning_rate": 0.002362962962962963, | |
"loss": 0.7086, | |
"step": 393 | |
}, | |
{ | |
"epoch": 0.8753124132185504, | |
"grad_norm": 0.00947530660778284, | |
"learning_rate": 0.002360493827160494, | |
"loss": 1.0779, | |
"step": 394 | |
}, | |
{ | |
"epoch": 0.8775340183282422, | |
"grad_norm": 0.004716199357062578, | |
"learning_rate": 0.002358024691358025, | |
"loss": 0.7149, | |
"step": 395 | |
}, | |
{ | |
"epoch": 0.8797556234379339, | |
"grad_norm": 0.009315990842878819, | |
"learning_rate": 0.0023555555555555556, | |
"loss": 0.7395, | |
"step": 396 | |
}, | |
{ | |
"epoch": 0.8819772285476256, | |
"grad_norm": 0.007294144481420517, | |
"learning_rate": 0.0023530864197530863, | |
"loss": 0.7305, | |
"step": 397 | |
}, | |
{ | |
"epoch": 0.8841988336573174, | |
"grad_norm": 0.015177429653704166, | |
"learning_rate": 0.0023506172839506175, | |
"loss": 0.8675, | |
"step": 398 | |
}, | |
{ | |
"epoch": 0.8864204387670092, | |
"grad_norm": 0.007899096235632896, | |
"learning_rate": 0.002348148148148148, | |
"loss": 0.7365, | |
"step": 399 | |
}, | |
{ | |
"epoch": 0.8886420438767009, | |
"grad_norm": 0.007007989101111889, | |
"learning_rate": 0.002345679012345679, | |
"loss": 0.7847, | |
"step": 400 | |
}, | |
{ | |
"epoch": 0.8908636489863927, | |
"grad_norm": 0.006175495218485594, | |
"learning_rate": 0.00234320987654321, | |
"loss": 0.8364, | |
"step": 401 | |
}, | |
{ | |
"epoch": 0.8930852540960844, | |
"grad_norm": 0.00996660441160202, | |
"learning_rate": 0.002340740740740741, | |
"loss": 0.824, | |
"step": 402 | |
}, | |
{ | |
"epoch": 0.8953068592057761, | |
"grad_norm": 0.007697720546275377, | |
"learning_rate": 0.0023382716049382716, | |
"loss": 0.8092, | |
"step": 403 | |
}, | |
{ | |
"epoch": 0.897528464315468, | |
"grad_norm": 0.02166207879781723, | |
"learning_rate": 0.0023358024691358024, | |
"loss": 0.8932, | |
"step": 404 | |
}, | |
{ | |
"epoch": 0.8997500694251597, | |
"grad_norm": 0.011461895890533924, | |
"learning_rate": 0.0023333333333333335, | |
"loss": 0.7087, | |
"step": 405 | |
}, | |
{ | |
"epoch": 0.9019716745348514, | |
"grad_norm": 0.007513254880905151, | |
"learning_rate": 0.0023308641975308643, | |
"loss": 0.9315, | |
"step": 406 | |
}, | |
{ | |
"epoch": 0.9041932796445432, | |
"grad_norm": 0.005459482315927744, | |
"learning_rate": 0.002328395061728395, | |
"loss": 0.7973, | |
"step": 407 | |
}, | |
{ | |
"epoch": 0.9064148847542349, | |
"grad_norm": 0.01074253860861063, | |
"learning_rate": 0.002325925925925926, | |
"loss": 0.7004, | |
"step": 408 | |
}, | |
{ | |
"epoch": 0.9086364898639266, | |
"grad_norm": 0.011036701500415802, | |
"learning_rate": 0.002323456790123457, | |
"loss": 1.0297, | |
"step": 409 | |
}, | |
{ | |
"epoch": 0.9108580949736185, | |
"grad_norm": 0.0065698218531906605, | |
"learning_rate": 0.0023209876543209876, | |
"loss": 0.8523, | |
"step": 410 | |
}, | |
{ | |
"epoch": 0.9130797000833102, | |
"grad_norm": 0.008030000142753124, | |
"learning_rate": 0.0023185185185185184, | |
"loss": 0.6409, | |
"step": 411 | |
}, | |
{ | |
"epoch": 0.9153013051930019, | |
"grad_norm": 0.010795329697430134, | |
"learning_rate": 0.0023160493827160495, | |
"loss": 0.8743, | |
"step": 412 | |
}, | |
{ | |
"epoch": 0.9175229103026937, | |
"grad_norm": 0.00822600070387125, | |
"learning_rate": 0.0023135802469135803, | |
"loss": 0.8858, | |
"step": 413 | |
}, | |
{ | |
"epoch": 0.9197445154123854, | |
"grad_norm": 0.006053009070456028, | |
"learning_rate": 0.002311111111111111, | |
"loss": 0.8098, | |
"step": 414 | |
}, | |
{ | |
"epoch": 0.9219661205220772, | |
"grad_norm": 0.012354751117527485, | |
"learning_rate": 0.002308641975308642, | |
"loss": 0.8258, | |
"step": 415 | |
}, | |
{ | |
"epoch": 0.924187725631769, | |
"grad_norm": 0.007067359518259764, | |
"learning_rate": 0.002306172839506173, | |
"loss": 0.6538, | |
"step": 416 | |
}, | |
{ | |
"epoch": 0.9264093307414607, | |
"grad_norm": 0.005133168306201696, | |
"learning_rate": 0.0023037037037037036, | |
"loss": 0.7096, | |
"step": 417 | |
}, | |
{ | |
"epoch": 0.9286309358511524, | |
"grad_norm": 0.00972819048911333, | |
"learning_rate": 0.0023012345679012344, | |
"loss": 0.7409, | |
"step": 418 | |
}, | |
{ | |
"epoch": 0.9308525409608442, | |
"grad_norm": 0.009975786320865154, | |
"learning_rate": 0.0022987654320987655, | |
"loss": 0.6634, | |
"step": 419 | |
}, | |
{ | |
"epoch": 0.933074146070536, | |
"grad_norm": 0.007423476316034794, | |
"learning_rate": 0.0022962962962962963, | |
"loss": 0.893, | |
"step": 420 | |
}, | |
{ | |
"epoch": 0.9352957511802277, | |
"grad_norm": 0.011143303476274014, | |
"learning_rate": 0.002293827160493827, | |
"loss": 0.9115, | |
"step": 421 | |
}, | |
{ | |
"epoch": 0.9375173562899195, | |
"grad_norm": 0.011022884398698807, | |
"learning_rate": 0.002291358024691358, | |
"loss": 0.8602, | |
"step": 422 | |
}, | |
{ | |
"epoch": 0.9397389613996112, | |
"grad_norm": 0.012729224748909473, | |
"learning_rate": 0.002288888888888889, | |
"loss": 0.8307, | |
"step": 423 | |
}, | |
{ | |
"epoch": 0.9419605665093029, | |
"grad_norm": 0.011760844849050045, | |
"learning_rate": 0.00228641975308642, | |
"loss": 0.806, | |
"step": 424 | |
}, | |
{ | |
"epoch": 0.9441821716189948, | |
"grad_norm": 0.007908307015895844, | |
"learning_rate": 0.0022839506172839504, | |
"loss": 0.8382, | |
"step": 425 | |
}, | |
{ | |
"epoch": 0.9464037767286865, | |
"grad_norm": 0.008884398266673088, | |
"learning_rate": 0.0022814814814814816, | |
"loss": 0.9276, | |
"step": 426 | |
}, | |
{ | |
"epoch": 0.9486253818383782, | |
"grad_norm": 0.008932596072554588, | |
"learning_rate": 0.0022790123456790123, | |
"loss": 0.838, | |
"step": 427 | |
}, | |
{ | |
"epoch": 0.95084698694807, | |
"grad_norm": 0.008600243367254734, | |
"learning_rate": 0.0022765432098765435, | |
"loss": 0.726, | |
"step": 428 | |
}, | |
{ | |
"epoch": 0.9530685920577617, | |
"grad_norm": 0.006019454915076494, | |
"learning_rate": 0.0022740740740740738, | |
"loss": 0.6559, | |
"step": 429 | |
}, | |
{ | |
"epoch": 0.9552901971674534, | |
"grad_norm": 0.0076181404292583466, | |
"learning_rate": 0.002271604938271605, | |
"loss": 0.7481, | |
"step": 430 | |
}, | |
{ | |
"epoch": 0.9575118022771453, | |
"grad_norm": 0.012168084271252155, | |
"learning_rate": 0.002269135802469136, | |
"loss": 0.8652, | |
"step": 431 | |
}, | |
{ | |
"epoch": 0.959733407386837, | |
"grad_norm": 0.007883667014539242, | |
"learning_rate": 0.002266666666666667, | |
"loss": 0.8239, | |
"step": 432 | |
}, | |
{ | |
"epoch": 0.9619550124965287, | |
"grad_norm": 0.009150558151304722, | |
"learning_rate": 0.0022641975308641976, | |
"loss": 0.6909, | |
"step": 433 | |
}, | |
{ | |
"epoch": 0.9641766176062205, | |
"grad_norm": 0.010145551525056362, | |
"learning_rate": 0.0022617283950617283, | |
"loss": 0.6662, | |
"step": 434 | |
}, | |
{ | |
"epoch": 0.9663982227159122, | |
"grad_norm": 0.00680849514901638, | |
"learning_rate": 0.0022592592592592595, | |
"loss": 0.6533, | |
"step": 435 | |
}, | |
{ | |
"epoch": 0.968619827825604, | |
"grad_norm": 0.01036826428025961, | |
"learning_rate": 0.00225679012345679, | |
"loss": 0.8156, | |
"step": 436 | |
}, | |
{ | |
"epoch": 0.9708414329352958, | |
"grad_norm": 0.008677355945110321, | |
"learning_rate": 0.002254320987654321, | |
"loss": 0.8441, | |
"step": 437 | |
}, | |
{ | |
"epoch": 0.9730630380449875, | |
"grad_norm": 0.00549481064081192, | |
"learning_rate": 0.002251851851851852, | |
"loss": 0.7917, | |
"step": 438 | |
}, | |
{ | |
"epoch": 0.9752846431546792, | |
"grad_norm": 0.006068227346986532, | |
"learning_rate": 0.002249382716049383, | |
"loss": 0.8223, | |
"step": 439 | |
}, | |
{ | |
"epoch": 0.977506248264371, | |
"grad_norm": 0.011117229238152504, | |
"learning_rate": 0.0022469135802469136, | |
"loss": 0.8663, | |
"step": 440 | |
}, | |
{ | |
"epoch": 0.9797278533740628, | |
"grad_norm": 0.004921234678477049, | |
"learning_rate": 0.0022444444444444443, | |
"loss": 0.9496, | |
"step": 441 | |
}, | |
{ | |
"epoch": 0.9819494584837545, | |
"grad_norm": 0.007072269916534424, | |
"learning_rate": 0.0022419753086419755, | |
"loss": 0.8655, | |
"step": 442 | |
}, | |
{ | |
"epoch": 0.9841710635934463, | |
"grad_norm": 0.008353911340236664, | |
"learning_rate": 0.002239506172839506, | |
"loss": 0.6893, | |
"step": 443 | |
}, | |
{ | |
"epoch": 0.986392668703138, | |
"grad_norm": 0.021165434271097183, | |
"learning_rate": 0.002237037037037037, | |
"loss": 0.6942, | |
"step": 444 | |
}, | |
{ | |
"epoch": 0.9886142738128297, | |
"grad_norm": 0.008496430702507496, | |
"learning_rate": 0.002234567901234568, | |
"loss": 0.5909, | |
"step": 445 | |
}, | |
{ | |
"epoch": 0.9908358789225216, | |
"grad_norm": 0.008113761432468891, | |
"learning_rate": 0.002232098765432099, | |
"loss": 0.9439, | |
"step": 446 | |
}, | |
{ | |
"epoch": 0.9930574840322133, | |
"grad_norm": 0.013667792081832886, | |
"learning_rate": 0.0022296296296296296, | |
"loss": 0.961, | |
"step": 447 | |
}, | |
{ | |
"epoch": 0.995279089141905, | |
"grad_norm": 0.010663739405572414, | |
"learning_rate": 0.0022271604938271603, | |
"loss": 0.9028, | |
"step": 448 | |
}, | |
{ | |
"epoch": 0.9975006942515968, | |
"grad_norm": 0.008401602506637573, | |
"learning_rate": 0.0022246913580246915, | |
"loss": 0.9042, | |
"step": 449 | |
}, | |
{ | |
"epoch": 0.9997222993612885, | |
"grad_norm": 0.005140175111591816, | |
"learning_rate": 0.0022222222222222222, | |
"loss": 0.7971, | |
"step": 450 | |
}, | |
{ | |
"epoch": 1.0019439044709804, | |
"grad_norm": 0.007235647179186344, | |
"learning_rate": 0.002219753086419753, | |
"loss": 0.9074, | |
"step": 451 | |
}, | |
{ | |
"epoch": 1.004165509580672, | |
"grad_norm": 0.008099445141851902, | |
"learning_rate": 0.002217283950617284, | |
"loss": 0.7123, | |
"step": 452 | |
}, | |
{ | |
"epoch": 1.0063871146903638, | |
"grad_norm": 0.008628906682133675, | |
"learning_rate": 0.002214814814814815, | |
"loss": 0.7415, | |
"step": 453 | |
}, | |
{ | |
"epoch": 1.0086087198000555, | |
"grad_norm": 0.006053466349840164, | |
"learning_rate": 0.002212345679012346, | |
"loss": 0.7232, | |
"step": 454 | |
}, | |
{ | |
"epoch": 1.0108303249097472, | |
"grad_norm": 0.008410786278545856, | |
"learning_rate": 0.0022098765432098763, | |
"loss": 0.6775, | |
"step": 455 | |
}, | |
{ | |
"epoch": 1.013051930019439, | |
"grad_norm": 0.005019874777644873, | |
"learning_rate": 0.0022074074074074075, | |
"loss": 0.6729, | |
"step": 456 | |
}, | |
{ | |
"epoch": 1.0152735351291309, | |
"grad_norm": 0.00676839891821146, | |
"learning_rate": 0.0022049382716049382, | |
"loss": 0.7891, | |
"step": 457 | |
}, | |
{ | |
"epoch": 1.0174951402388226, | |
"grad_norm": 0.006478031165897846, | |
"learning_rate": 0.0022024691358024694, | |
"loss": 0.5783, | |
"step": 458 | |
}, | |
{ | |
"epoch": 1.0197167453485143, | |
"grad_norm": 0.010523135773837566, | |
"learning_rate": 0.0021999999999999997, | |
"loss": 0.7024, | |
"step": 459 | |
}, | |
{ | |
"epoch": 1.021938350458206, | |
"grad_norm": 0.012737292796373367, | |
"learning_rate": 0.002197530864197531, | |
"loss": 0.8262, | |
"step": 460 | |
}, | |
{ | |
"epoch": 1.0241599555678977, | |
"grad_norm": 0.0061058285646140575, | |
"learning_rate": 0.002195061728395062, | |
"loss": 0.8292, | |
"step": 461 | |
}, | |
{ | |
"epoch": 1.0263815606775895, | |
"grad_norm": 0.01105250883847475, | |
"learning_rate": 0.0021925925925925928, | |
"loss": 0.7996, | |
"step": 462 | |
}, | |
{ | |
"epoch": 1.0286031657872814, | |
"grad_norm": 0.008844085969030857, | |
"learning_rate": 0.0021901234567901235, | |
"loss": 0.7622, | |
"step": 463 | |
}, | |
{ | |
"epoch": 1.030824770896973, | |
"grad_norm": 0.005652655381709337, | |
"learning_rate": 0.0021876543209876542, | |
"loss": 0.8272, | |
"step": 464 | |
}, | |
{ | |
"epoch": 1.0330463760066648, | |
"grad_norm": 0.009466007351875305, | |
"learning_rate": 0.0021851851851851854, | |
"loss": 0.9157, | |
"step": 465 | |
}, | |
{ | |
"epoch": 1.0352679811163565, | |
"grad_norm": 0.007364574354141951, | |
"learning_rate": 0.002182716049382716, | |
"loss": 0.6422, | |
"step": 466 | |
}, | |
{ | |
"epoch": 1.0374895862260483, | |
"grad_norm": 0.008415921591222286, | |
"learning_rate": 0.002180246913580247, | |
"loss": 0.7811, | |
"step": 467 | |
}, | |
{ | |
"epoch": 1.03971119133574, | |
"grad_norm": 0.007536835968494415, | |
"learning_rate": 0.002177777777777778, | |
"loss": 0.6505, | |
"step": 468 | |
}, | |
{ | |
"epoch": 1.041932796445432, | |
"grad_norm": 0.006799593102186918, | |
"learning_rate": 0.0021753086419753088, | |
"loss": 0.7923, | |
"step": 469 | |
}, | |
{ | |
"epoch": 1.0441544015551236, | |
"grad_norm": 0.00823921337723732, | |
"learning_rate": 0.0021728395061728395, | |
"loss": 0.7098, | |
"step": 470 | |
}, | |
{ | |
"epoch": 1.0463760066648153, | |
"grad_norm": 0.008536463603377342, | |
"learning_rate": 0.0021703703703703702, | |
"loss": 0.7643, | |
"step": 471 | |
}, | |
{ | |
"epoch": 1.048597611774507, | |
"grad_norm": 0.008986718021333218, | |
"learning_rate": 0.0021679012345679014, | |
"loss": 0.8968, | |
"step": 472 | |
}, | |
{ | |
"epoch": 1.0508192168841988, | |
"grad_norm": 0.010182259604334831, | |
"learning_rate": 0.002165432098765432, | |
"loss": 0.7392, | |
"step": 473 | |
}, | |
{ | |
"epoch": 1.0530408219938905, | |
"grad_norm": 0.007251216098666191, | |
"learning_rate": 0.002162962962962963, | |
"loss": 0.7097, | |
"step": 474 | |
}, | |
{ | |
"epoch": 1.0552624271035824, | |
"grad_norm": 0.007560486439615488, | |
"learning_rate": 0.002160493827160494, | |
"loss": 0.7698, | |
"step": 475 | |
}, | |
{ | |
"epoch": 1.0574840322132741, | |
"grad_norm": 0.0077867708168923855, | |
"learning_rate": 0.002158024691358025, | |
"loss": 0.8747, | |
"step": 476 | |
}, | |
{ | |
"epoch": 1.0597056373229659, | |
"grad_norm": 0.013194149360060692, | |
"learning_rate": 0.0021555555555555555, | |
"loss": 0.8222, | |
"step": 477 | |
}, | |
{ | |
"epoch": 1.0619272424326576, | |
"grad_norm": 0.00997414905577898, | |
"learning_rate": 0.0021530864197530863, | |
"loss": 0.663, | |
"step": 478 | |
}, | |
{ | |
"epoch": 1.0641488475423493, | |
"grad_norm": 0.011852915398776531, | |
"learning_rate": 0.0021506172839506174, | |
"loss": 0.6943, | |
"step": 479 | |
}, | |
{ | |
"epoch": 1.066370452652041, | |
"grad_norm": 0.008246763609349728, | |
"learning_rate": 0.002148148148148148, | |
"loss": 0.6128, | |
"step": 480 | |
}, | |
{ | |
"epoch": 1.068592057761733, | |
"grad_norm": 0.007343011442571878, | |
"learning_rate": 0.002145679012345679, | |
"loss": 0.8747, | |
"step": 481 | |
}, | |
{ | |
"epoch": 1.0708136628714247, | |
"grad_norm": 0.007888510823249817, | |
"learning_rate": 0.0021432098765432096, | |
"loss": 0.8691, | |
"step": 482 | |
}, | |
{ | |
"epoch": 1.0730352679811164, | |
"grad_norm": 0.008748864755034447, | |
"learning_rate": 0.002140740740740741, | |
"loss": 0.8701, | |
"step": 483 | |
}, | |
{ | |
"epoch": 1.075256873090808, | |
"grad_norm": 0.006701997015625238, | |
"learning_rate": 0.002138271604938272, | |
"loss": 0.9312, | |
"step": 484 | |
}, | |
{ | |
"epoch": 1.0774784782004998, | |
"grad_norm": 0.006520512979477644, | |
"learning_rate": 0.0021358024691358023, | |
"loss": 0.9224, | |
"step": 485 | |
}, | |
{ | |
"epoch": 1.0797000833101915, | |
"grad_norm": 0.011394795030355453, | |
"learning_rate": 0.0021333333333333334, | |
"loss": 0.8161, | |
"step": 486 | |
}, | |
{ | |
"epoch": 1.0819216884198835, | |
"grad_norm": 0.012948388233780861, | |
"learning_rate": 0.002130864197530864, | |
"loss": 0.8619, | |
"step": 487 | |
}, | |
{ | |
"epoch": 1.0841432935295752, | |
"grad_norm": 0.005069487262517214, | |
"learning_rate": 0.0021283950617283953, | |
"loss": 0.8027, | |
"step": 488 | |
}, | |
{ | |
"epoch": 1.0863648986392669, | |
"grad_norm": 0.008600343950092793, | |
"learning_rate": 0.0021259259259259256, | |
"loss": 0.8324, | |
"step": 489 | |
}, | |
{ | |
"epoch": 1.0885865037489586, | |
"grad_norm": 0.00814684759825468, | |
"learning_rate": 0.002123456790123457, | |
"loss": 0.6381, | |
"step": 490 | |
}, | |
{ | |
"epoch": 1.0908081088586503, | |
"grad_norm": 0.008729801513254642, | |
"learning_rate": 0.002120987654320988, | |
"loss": 0.6205, | |
"step": 491 | |
}, | |
{ | |
"epoch": 1.093029713968342, | |
"grad_norm": 0.008048581890761852, | |
"learning_rate": 0.0021185185185185187, | |
"loss": 0.7394, | |
"step": 492 | |
}, | |
{ | |
"epoch": 1.095251319078034, | |
"grad_norm": 0.008639739826321602, | |
"learning_rate": 0.0021160493827160494, | |
"loss": 0.9498, | |
"step": 493 | |
}, | |
{ | |
"epoch": 1.0974729241877257, | |
"grad_norm": 0.006795950699597597, | |
"learning_rate": 0.00211358024691358, | |
"loss": 0.9062, | |
"step": 494 | |
}, | |
{ | |
"epoch": 1.0996945292974174, | |
"grad_norm": 0.007198169827461243, | |
"learning_rate": 0.0021111111111111113, | |
"loss": 0.8697, | |
"step": 495 | |
}, | |
{ | |
"epoch": 1.1019161344071091, | |
"grad_norm": 0.007898751646280289, | |
"learning_rate": 0.002108641975308642, | |
"loss": 0.7633, | |
"step": 496 | |
}, | |
{ | |
"epoch": 1.1041377395168008, | |
"grad_norm": 0.008064448833465576, | |
"learning_rate": 0.002106172839506173, | |
"loss": 0.7843, | |
"step": 497 | |
}, | |
{ | |
"epoch": 1.1063593446264925, | |
"grad_norm": 0.006913089193403721, | |
"learning_rate": 0.002103703703703704, | |
"loss": 0.7572, | |
"step": 498 | |
}, | |
{ | |
"epoch": 1.1085809497361845, | |
"grad_norm": 0.007993088103830814, | |
"learning_rate": 0.0021012345679012347, | |
"loss": 0.7287, | |
"step": 499 | |
}, | |
{ | |
"epoch": 1.1108025548458762, | |
"grad_norm": 0.007741628214716911, | |
"learning_rate": 0.0020987654320987655, | |
"loss": 0.7852, | |
"step": 500 | |
}, | |
{ | |
"epoch": 1.113024159955568, | |
"grad_norm": 0.009709767997264862, | |
"learning_rate": 0.002096296296296296, | |
"loss": 0.8637, | |
"step": 501 | |
}, | |
{ | |
"epoch": 1.1152457650652596, | |
"grad_norm": 0.010323646478354931, | |
"learning_rate": 0.0020938271604938274, | |
"loss": 0.8176, | |
"step": 502 | |
}, | |
{ | |
"epoch": 1.1174673701749513, | |
"grad_norm": 0.013374350033700466, | |
"learning_rate": 0.002091358024691358, | |
"loss": 0.8289, | |
"step": 503 | |
}, | |
{ | |
"epoch": 1.119688975284643, | |
"grad_norm": 0.010094034485518932, | |
"learning_rate": 0.002088888888888889, | |
"loss": 0.7587, | |
"step": 504 | |
}, | |
{ | |
"epoch": 1.121910580394335, | |
"grad_norm": 0.006756503600627184, | |
"learning_rate": 0.00208641975308642, | |
"loss": 0.8676, | |
"step": 505 | |
}, | |
{ | |
"epoch": 1.1241321855040267, | |
"grad_norm": 0.00773257901892066, | |
"learning_rate": 0.0020839506172839507, | |
"loss": 0.6533, | |
"step": 506 | |
}, | |
{ | |
"epoch": 1.1263537906137184, | |
"grad_norm": 0.0079817408695817, | |
"learning_rate": 0.0020814814814814815, | |
"loss": 0.8949, | |
"step": 507 | |
}, | |
{ | |
"epoch": 1.1285753957234101, | |
"grad_norm": 0.0096563920378685, | |
"learning_rate": 0.002079012345679012, | |
"loss": 0.8016, | |
"step": 508 | |
}, | |
{ | |
"epoch": 1.1307970008331019, | |
"grad_norm": 0.006322795990854502, | |
"learning_rate": 0.0020765432098765434, | |
"loss": 0.7104, | |
"step": 509 | |
}, | |
{ | |
"epoch": 1.1330186059427936, | |
"grad_norm": 0.008746332488954067, | |
"learning_rate": 0.002074074074074074, | |
"loss": 0.9368, | |
"step": 510 | |
}, | |
{ | |
"epoch": 1.1352402110524855, | |
"grad_norm": 0.012176689691841602, | |
"learning_rate": 0.002071604938271605, | |
"loss": 0.8829, | |
"step": 511 | |
}, | |
{ | |
"epoch": 1.1374618161621772, | |
"grad_norm": 0.00898228120058775, | |
"learning_rate": 0.0020691358024691356, | |
"loss": 0.7631, | |
"step": 512 | |
}, | |
{ | |
"epoch": 1.139683421271869, | |
"grad_norm": 0.00945940800011158, | |
"learning_rate": 0.0020666666666666667, | |
"loss": 0.606, | |
"step": 513 | |
}, | |
{ | |
"epoch": 1.1419050263815607, | |
"grad_norm": 0.00997233297675848, | |
"learning_rate": 0.002064197530864198, | |
"loss": 0.9461, | |
"step": 514 | |
}, | |
{ | |
"epoch": 1.1441266314912524, | |
"grad_norm": 0.0064164213836193085, | |
"learning_rate": 0.002061728395061728, | |
"loss": 0.742, | |
"step": 515 | |
}, | |
{ | |
"epoch": 1.146348236600944, | |
"grad_norm": 0.007206726353615522, | |
"learning_rate": 0.0020592592592592594, | |
"loss": 0.6104, | |
"step": 516 | |
}, | |
{ | |
"epoch": 1.148569841710636, | |
"grad_norm": 0.008765126578509808, | |
"learning_rate": 0.00205679012345679, | |
"loss": 0.8508, | |
"step": 517 | |
}, | |
{ | |
"epoch": 1.1507914468203277, | |
"grad_norm": 0.009035077877342701, | |
"learning_rate": 0.0020543209876543213, | |
"loss": 0.6652, | |
"step": 518 | |
}, | |
{ | |
"epoch": 1.1530130519300195, | |
"grad_norm": 0.007268165238201618, | |
"learning_rate": 0.0020518518518518516, | |
"loss": 0.7535, | |
"step": 519 | |
}, | |
{ | |
"epoch": 1.1552346570397112, | |
"grad_norm": 0.007550420239567757, | |
"learning_rate": 0.0020493827160493827, | |
"loss": 0.7689, | |
"step": 520 | |
}, | |
{ | |
"epoch": 1.157456262149403, | |
"grad_norm": 0.008769848383963108, | |
"learning_rate": 0.002046913580246914, | |
"loss": 0.9409, | |
"step": 521 | |
}, | |
{ | |
"epoch": 1.1596778672590946, | |
"grad_norm": 0.007353247608989477, | |
"learning_rate": 0.0020444444444444447, | |
"loss": 0.6995, | |
"step": 522 | |
}, | |
{ | |
"epoch": 1.1618994723687863, | |
"grad_norm": 0.010628338903188705, | |
"learning_rate": 0.0020419753086419754, | |
"loss": 0.9438, | |
"step": 523 | |
}, | |
{ | |
"epoch": 1.1641210774784783, | |
"grad_norm": 0.011336445808410645, | |
"learning_rate": 0.002039506172839506, | |
"loss": 0.6474, | |
"step": 524 | |
}, | |
{ | |
"epoch": 1.16634268258817, | |
"grad_norm": 0.008600319735705853, | |
"learning_rate": 0.0020370370370370373, | |
"loss": 0.676, | |
"step": 525 | |
}, | |
{ | |
"epoch": 1.1685642876978617, | |
"grad_norm": 0.007791724521666765, | |
"learning_rate": 0.002034567901234568, | |
"loss": 0.8491, | |
"step": 526 | |
}, | |
{ | |
"epoch": 1.1707858928075534, | |
"grad_norm": 0.008302255533635616, | |
"learning_rate": 0.0020320987654320988, | |
"loss": 0.5874, | |
"step": 527 | |
}, | |
{ | |
"epoch": 1.1730074979172451, | |
"grad_norm": 0.007213059347122908, | |
"learning_rate": 0.00202962962962963, | |
"loss": 0.6567, | |
"step": 528 | |
}, | |
{ | |
"epoch": 1.175229103026937, | |
"grad_norm": 0.006716989912092686, | |
"learning_rate": 0.0020271604938271607, | |
"loss": 0.7785, | |
"step": 529 | |
}, | |
{ | |
"epoch": 1.1774507081366288, | |
"grad_norm": 0.006225584540516138, | |
"learning_rate": 0.0020246913580246914, | |
"loss": 0.8877, | |
"step": 530 | |
}, | |
{ | |
"epoch": 1.1796723132463205, | |
"grad_norm": 0.009407371282577515, | |
"learning_rate": 0.002022222222222222, | |
"loss": 0.6993, | |
"step": 531 | |
}, | |
{ | |
"epoch": 1.1818939183560122, | |
"grad_norm": 0.004865545313805342, | |
"learning_rate": 0.0020197530864197533, | |
"loss": 0.7627, | |
"step": 532 | |
}, | |
{ | |
"epoch": 1.184115523465704, | |
"grad_norm": 0.010101933032274246, | |
"learning_rate": 0.002017283950617284, | |
"loss": 0.8246, | |
"step": 533 | |
}, | |
{ | |
"epoch": 1.1863371285753956, | |
"grad_norm": 0.011003025807440281, | |
"learning_rate": 0.0020148148148148148, | |
"loss": 0.6756, | |
"step": 534 | |
}, | |
{ | |
"epoch": 1.1885587336850874, | |
"grad_norm": 0.01154054794460535, | |
"learning_rate": 0.002012345679012346, | |
"loss": 0.6787, | |
"step": 535 | |
}, | |
{ | |
"epoch": 1.1907803387947793, | |
"grad_norm": 0.012816593050956726, | |
"learning_rate": 0.0020098765432098767, | |
"loss": 0.75, | |
"step": 536 | |
}, | |
{ | |
"epoch": 1.193001943904471, | |
"grad_norm": 0.00780332088470459, | |
"learning_rate": 0.0020074074074074074, | |
"loss": 0.8453, | |
"step": 537 | |
}, | |
{ | |
"epoch": 1.1952235490141627, | |
"grad_norm": 0.006366015411913395, | |
"learning_rate": 0.002004938271604938, | |
"loss": 0.6712, | |
"step": 538 | |
}, | |
{ | |
"epoch": 1.1974451541238544, | |
"grad_norm": 0.004924730863422155, | |
"learning_rate": 0.0020024691358024693, | |
"loss": 0.8163, | |
"step": 539 | |
}, | |
{ | |
"epoch": 1.1996667592335462, | |
"grad_norm": 0.005040746182203293, | |
"learning_rate": 0.002, | |
"loss": 0.7244, | |
"step": 540 | |
}, | |
{ | |
"epoch": 1.201888364343238, | |
"grad_norm": 0.0060347081162035465, | |
"learning_rate": 0.0019975308641975308, | |
"loss": 0.7421, | |
"step": 541 | |
}, | |
{ | |
"epoch": 1.2041099694529298, | |
"grad_norm": 0.00597890792414546, | |
"learning_rate": 0.0019950617283950615, | |
"loss": 0.7418, | |
"step": 542 | |
}, | |
{ | |
"epoch": 1.2063315745626215, | |
"grad_norm": 0.0050376360304653645, | |
"learning_rate": 0.0019925925925925927, | |
"loss": 0.7606, | |
"step": 543 | |
}, | |
{ | |
"epoch": 1.2085531796723132, | |
"grad_norm": 0.0056321881711483, | |
"learning_rate": 0.001990123456790124, | |
"loss": 0.8881, | |
"step": 544 | |
}, | |
{ | |
"epoch": 1.210774784782005, | |
"grad_norm": 0.006516937632113695, | |
"learning_rate": 0.001987654320987654, | |
"loss": 0.7048, | |
"step": 545 | |
}, | |
{ | |
"epoch": 1.2129963898916967, | |
"grad_norm": 0.004874023143202066, | |
"learning_rate": 0.0019851851851851853, | |
"loss": 0.8859, | |
"step": 546 | |
}, | |
{ | |
"epoch": 1.2152179950013884, | |
"grad_norm": 0.007384075317531824, | |
"learning_rate": 0.001982716049382716, | |
"loss": 0.6933, | |
"step": 547 | |
}, | |
{ | |
"epoch": 1.2174396001110803, | |
"grad_norm": 0.005381579045206308, | |
"learning_rate": 0.001980246913580247, | |
"loss": 0.7698, | |
"step": 548 | |
}, | |
{ | |
"epoch": 1.219661205220772, | |
"grad_norm": 0.008545870892703533, | |
"learning_rate": 0.0019777777777777775, | |
"loss": 0.6051, | |
"step": 549 | |
}, | |
{ | |
"epoch": 1.2218828103304638, | |
"grad_norm": 0.007204385939985514, | |
"learning_rate": 0.0019753086419753087, | |
"loss": 0.668, | |
"step": 550 | |
}, | |
{ | |
"epoch": 1.2241044154401555, | |
"grad_norm": 0.021104220300912857, | |
"learning_rate": 0.00197283950617284, | |
"loss": 0.7877, | |
"step": 551 | |
}, | |
{ | |
"epoch": 1.2263260205498472, | |
"grad_norm": 0.009946838952600956, | |
"learning_rate": 0.00197037037037037, | |
"loss": 0.6948, | |
"step": 552 | |
}, | |
{ | |
"epoch": 1.2285476256595391, | |
"grad_norm": 0.007187278009951115, | |
"learning_rate": 0.0019679012345679013, | |
"loss": 0.6236, | |
"step": 553 | |
}, | |
{ | |
"epoch": 1.2307692307692308, | |
"grad_norm": 0.009974513202905655, | |
"learning_rate": 0.001965432098765432, | |
"loss": 0.9304, | |
"step": 554 | |
}, | |
{ | |
"epoch": 1.2329908358789226, | |
"grad_norm": 0.007199294865131378, | |
"learning_rate": 0.0019629629629629632, | |
"loss": 0.8051, | |
"step": 555 | |
}, | |
{ | |
"epoch": 1.2352124409886143, | |
"grad_norm": 0.012498236261308193, | |
"learning_rate": 0.0019604938271604935, | |
"loss": 0.9383, | |
"step": 556 | |
}, | |
{ | |
"epoch": 1.237434046098306, | |
"grad_norm": 0.009159721434116364, | |
"learning_rate": 0.0019580246913580247, | |
"loss": 0.8871, | |
"step": 557 | |
}, | |
{ | |
"epoch": 1.2396556512079977, | |
"grad_norm": 0.006070270668715239, | |
"learning_rate": 0.001955555555555556, | |
"loss": 0.7775, | |
"step": 558 | |
}, | |
{ | |
"epoch": 1.2418772563176894, | |
"grad_norm": 0.009294399060308933, | |
"learning_rate": 0.0019530864197530864, | |
"loss": 0.8176, | |
"step": 559 | |
}, | |
{ | |
"epoch": 1.2440988614273814, | |
"grad_norm": 0.009688743390142918, | |
"learning_rate": 0.0019506172839506173, | |
"loss": 0.9653, | |
"step": 560 | |
}, | |
{ | |
"epoch": 1.246320466537073, | |
"grad_norm": 0.011101442389190197, | |
"learning_rate": 0.001948148148148148, | |
"loss": 1.107, | |
"step": 561 | |
}, | |
{ | |
"epoch": 1.2485420716467648, | |
"grad_norm": 0.006938232574611902, | |
"learning_rate": 0.001945679012345679, | |
"loss": 0.723, | |
"step": 562 | |
}, | |
{ | |
"epoch": 1.2507636767564565, | |
"grad_norm": 0.010605933144688606, | |
"learning_rate": 0.0019432098765432098, | |
"loss": 0.7292, | |
"step": 563 | |
}, | |
{ | |
"epoch": 1.2529852818661482, | |
"grad_norm": 0.008306719362735748, | |
"learning_rate": 0.0019407407407407407, | |
"loss": 0.7772, | |
"step": 564 | |
}, | |
{ | |
"epoch": 1.2552068869758402, | |
"grad_norm": 0.006135668605566025, | |
"learning_rate": 0.0019382716049382714, | |
"loss": 0.6693, | |
"step": 565 | |
}, | |
{ | |
"epoch": 1.2574284920855319, | |
"grad_norm": 0.01065669022500515, | |
"learning_rate": 0.0019358024691358024, | |
"loss": 0.7522, | |
"step": 566 | |
}, | |
{ | |
"epoch": 1.2596500971952236, | |
"grad_norm": 0.009806640446186066, | |
"learning_rate": 0.0019333333333333336, | |
"loss": 0.9129, | |
"step": 567 | |
}, | |
{ | |
"epoch": 1.2618717023049153, | |
"grad_norm": 0.014492296613752842, | |
"learning_rate": 0.001930864197530864, | |
"loss": 0.8762, | |
"step": 568 | |
}, | |
{ | |
"epoch": 1.264093307414607, | |
"grad_norm": 0.008053292520344257, | |
"learning_rate": 0.0019283950617283952, | |
"loss": 0.7453, | |
"step": 569 | |
}, | |
{ | |
"epoch": 1.2663149125242987, | |
"grad_norm": 0.011233272030949593, | |
"learning_rate": 0.0019259259259259258, | |
"loss": 0.7827, | |
"step": 570 | |
}, | |
{ | |
"epoch": 1.2685365176339904, | |
"grad_norm": 0.008643749170005322, | |
"learning_rate": 0.001923456790123457, | |
"loss": 0.8582, | |
"step": 571 | |
}, | |
{ | |
"epoch": 1.2707581227436824, | |
"grad_norm": 0.008974653668701649, | |
"learning_rate": 0.0019209876543209875, | |
"loss": 0.8184, | |
"step": 572 | |
}, | |
{ | |
"epoch": 1.272979727853374, | |
"grad_norm": 0.007029388565570116, | |
"learning_rate": 0.0019185185185185186, | |
"loss": 0.8485, | |
"step": 573 | |
}, | |
{ | |
"epoch": 1.2752013329630658, | |
"grad_norm": 0.008042087778449059, | |
"learning_rate": 0.0019160493827160496, | |
"loss": 0.8451, | |
"step": 574 | |
}, | |
{ | |
"epoch": 1.2774229380727575, | |
"grad_norm": 0.009854896925389767, | |
"learning_rate": 0.0019135802469135803, | |
"loss": 0.9475, | |
"step": 575 | |
}, | |
{ | |
"epoch": 1.2796445431824492, | |
"grad_norm": 0.011435446329414845, | |
"learning_rate": 0.0019111111111111113, | |
"loss": 0.8207, | |
"step": 576 | |
}, | |
{ | |
"epoch": 1.2818661482921412, | |
"grad_norm": 0.013869104906916618, | |
"learning_rate": 0.001908641975308642, | |
"loss": 0.647, | |
"step": 577 | |
}, | |
{ | |
"epoch": 1.284087753401833, | |
"grad_norm": 0.008760456927120686, | |
"learning_rate": 0.001906172839506173, | |
"loss": 0.8233, | |
"step": 578 | |
}, | |
{ | |
"epoch": 1.2863093585115246, | |
"grad_norm": 0.011544842272996902, | |
"learning_rate": 0.0019037037037037037, | |
"loss": 0.6646, | |
"step": 579 | |
}, | |
{ | |
"epoch": 1.2885309636212163, | |
"grad_norm": 0.010519598610699177, | |
"learning_rate": 0.0019012345679012346, | |
"loss": 0.6739, | |
"step": 580 | |
}, | |
{ | |
"epoch": 1.290752568730908, | |
"grad_norm": 0.008361653424799442, | |
"learning_rate": 0.0018987654320987656, | |
"loss": 0.7724, | |
"step": 581 | |
}, | |
{ | |
"epoch": 1.2929741738405998, | |
"grad_norm": 0.007549158297479153, | |
"learning_rate": 0.0018962962962962963, | |
"loss": 0.6881, | |
"step": 582 | |
}, | |
{ | |
"epoch": 1.2951957789502915, | |
"grad_norm": 0.0081331180408597, | |
"learning_rate": 0.0018938271604938273, | |
"loss": 0.754, | |
"step": 583 | |
}, | |
{ | |
"epoch": 1.2974173840599834, | |
"grad_norm": 0.008421644568443298, | |
"learning_rate": 0.001891358024691358, | |
"loss": 0.772, | |
"step": 584 | |
}, | |
{ | |
"epoch": 1.2996389891696751, | |
"grad_norm": 0.00717672286555171, | |
"learning_rate": 0.001888888888888889, | |
"loss": 0.79, | |
"step": 585 | |
}, | |
{ | |
"epoch": 1.3018605942793668, | |
"grad_norm": 0.00759734446182847, | |
"learning_rate": 0.0018864197530864197, | |
"loss": 0.9811, | |
"step": 586 | |
}, | |
{ | |
"epoch": 1.3040821993890586, | |
"grad_norm": 0.009103677235543728, | |
"learning_rate": 0.0018839506172839506, | |
"loss": 0.7995, | |
"step": 587 | |
}, | |
{ | |
"epoch": 1.3063038044987503, | |
"grad_norm": 0.009735452011227608, | |
"learning_rate": 0.0018814814814814816, | |
"loss": 0.8571, | |
"step": 588 | |
}, | |
{ | |
"epoch": 1.3085254096084422, | |
"grad_norm": 0.010691888630390167, | |
"learning_rate": 0.0018790123456790123, | |
"loss": 0.7097, | |
"step": 589 | |
}, | |
{ | |
"epoch": 1.310747014718134, | |
"grad_norm": 0.006676527205854654, | |
"learning_rate": 0.0018765432098765433, | |
"loss": 0.6608, | |
"step": 590 | |
}, | |
{ | |
"epoch": 1.3129686198278256, | |
"grad_norm": 0.008960582315921783, | |
"learning_rate": 0.001874074074074074, | |
"loss": 0.6465, | |
"step": 591 | |
}, | |
{ | |
"epoch": 1.3151902249375174, | |
"grad_norm": 0.0076140440069139, | |
"learning_rate": 0.001871604938271605, | |
"loss": 0.8781, | |
"step": 592 | |
}, | |
{ | |
"epoch": 1.317411830047209, | |
"grad_norm": 0.006687042769044638, | |
"learning_rate": 0.0018691358024691357, | |
"loss": 0.8268, | |
"step": 593 | |
}, | |
{ | |
"epoch": 1.3196334351569008, | |
"grad_norm": 0.011486892588436604, | |
"learning_rate": 0.0018666666666666666, | |
"loss": 0.8591, | |
"step": 594 | |
}, | |
{ | |
"epoch": 1.3218550402665925, | |
"grad_norm": 0.007649209350347519, | |
"learning_rate": 0.0018641975308641974, | |
"loss": 0.8245, | |
"step": 595 | |
}, | |
{ | |
"epoch": 1.3240766453762844, | |
"grad_norm": 0.010610325261950493, | |
"learning_rate": 0.0018617283950617283, | |
"loss": 0.8198, | |
"step": 596 | |
}, | |
{ | |
"epoch": 1.3262982504859762, | |
"grad_norm": 0.013199282810091972, | |
"learning_rate": 0.0018592592592592595, | |
"loss": 0.7064, | |
"step": 597 | |
}, | |
{ | |
"epoch": 1.3285198555956679, | |
"grad_norm": 0.006374049000442028, | |
"learning_rate": 0.00185679012345679, | |
"loss": 0.7449, | |
"step": 598 | |
}, | |
{ | |
"epoch": 1.3307414607053596, | |
"grad_norm": 0.012544116005301476, | |
"learning_rate": 0.0018543209876543212, | |
"loss": 0.5865, | |
"step": 599 | |
}, | |
{ | |
"epoch": 1.3329630658150513, | |
"grad_norm": 0.007943814620375633, | |
"learning_rate": 0.0018518518518518517, | |
"loss": 0.8927, | |
"step": 600 | |
}, | |
{ | |
"epoch": 1.3351846709247432, | |
"grad_norm": 0.007226669695228338, | |
"learning_rate": 0.0018493827160493829, | |
"loss": 0.9163, | |
"step": 601 | |
}, | |
{ | |
"epoch": 1.337406276034435, | |
"grad_norm": 0.006219184026122093, | |
"learning_rate": 0.0018469135802469134, | |
"loss": 0.8312, | |
"step": 602 | |
}, | |
{ | |
"epoch": 1.3396278811441267, | |
"grad_norm": 0.009386156685650349, | |
"learning_rate": 0.0018444444444444446, | |
"loss": 0.62, | |
"step": 603 | |
}, | |
{ | |
"epoch": 1.3418494862538184, | |
"grad_norm": 0.009648386389017105, | |
"learning_rate": 0.0018419753086419755, | |
"loss": 0.8502, | |
"step": 604 | |
}, | |
{ | |
"epoch": 1.34407109136351, | |
"grad_norm": 0.007513147313147783, | |
"learning_rate": 0.0018395061728395062, | |
"loss": 0.7968, | |
"step": 605 | |
}, | |
{ | |
"epoch": 1.3462926964732018, | |
"grad_norm": 0.007452718913555145, | |
"learning_rate": 0.0018370370370370372, | |
"loss": 0.8427, | |
"step": 606 | |
}, | |
{ | |
"epoch": 1.3485143015828935, | |
"grad_norm": 0.0075603355653584, | |
"learning_rate": 0.001834567901234568, | |
"loss": 0.7846, | |
"step": 607 | |
}, | |
{ | |
"epoch": 1.3507359066925855, | |
"grad_norm": 0.008041512221097946, | |
"learning_rate": 0.0018320987654320989, | |
"loss": 1.0172, | |
"step": 608 | |
}, | |
{ | |
"epoch": 1.3529575118022772, | |
"grad_norm": 0.009016762487590313, | |
"learning_rate": 0.0018296296296296296, | |
"loss": 0.7735, | |
"step": 609 | |
}, | |
{ | |
"epoch": 1.355179116911969, | |
"grad_norm": 0.005660157650709152, | |
"learning_rate": 0.0018271604938271606, | |
"loss": 0.704, | |
"step": 610 | |
}, | |
{ | |
"epoch": 1.3574007220216606, | |
"grad_norm": 0.02231772243976593, | |
"learning_rate": 0.0018246913580246915, | |
"loss": 0.9194, | |
"step": 611 | |
}, | |
{ | |
"epoch": 1.3596223271313523, | |
"grad_norm": 0.009612826630473137, | |
"learning_rate": 0.0018222222222222223, | |
"loss": 0.805, | |
"step": 612 | |
}, | |
{ | |
"epoch": 1.3618439322410443, | |
"grad_norm": 0.008082101121544838, | |
"learning_rate": 0.0018197530864197532, | |
"loss": 0.8137, | |
"step": 613 | |
}, | |
{ | |
"epoch": 1.364065537350736, | |
"grad_norm": 0.0074988240376114845, | |
"learning_rate": 0.001817283950617284, | |
"loss": 0.7579, | |
"step": 614 | |
}, | |
{ | |
"epoch": 1.3662871424604277, | |
"grad_norm": 0.008431846275925636, | |
"learning_rate": 0.001814814814814815, | |
"loss": 0.6704, | |
"step": 615 | |
}, | |
{ | |
"epoch": 1.3685087475701194, | |
"grad_norm": 0.00818427000194788, | |
"learning_rate": 0.0018123456790123456, | |
"loss": 0.7728, | |
"step": 616 | |
}, | |
{ | |
"epoch": 1.3707303526798111, | |
"grad_norm": 0.009126820601522923, | |
"learning_rate": 0.0018098765432098766, | |
"loss": 0.5695, | |
"step": 617 | |
}, | |
{ | |
"epoch": 1.3729519577895029, | |
"grad_norm": 0.010422170162200928, | |
"learning_rate": 0.0018074074074074075, | |
"loss": 0.7908, | |
"step": 618 | |
}, | |
{ | |
"epoch": 1.3751735628991946, | |
"grad_norm": 0.00857476331293583, | |
"learning_rate": 0.0018049382716049383, | |
"loss": 0.7064, | |
"step": 619 | |
}, | |
{ | |
"epoch": 1.3773951680088863, | |
"grad_norm": 0.014797719195485115, | |
"learning_rate": 0.0018024691358024692, | |
"loss": 0.9911, | |
"step": 620 | |
}, | |
{ | |
"epoch": 1.3796167731185782, | |
"grad_norm": 0.009074145928025246, | |
"learning_rate": 0.0018, | |
"loss": 0.9399, | |
"step": 621 | |
}, | |
{ | |
"epoch": 1.38183837822827, | |
"grad_norm": 0.00829361006617546, | |
"learning_rate": 0.001797530864197531, | |
"loss": 0.7555, | |
"step": 622 | |
}, | |
{ | |
"epoch": 1.3840599833379617, | |
"grad_norm": 0.008414024487137794, | |
"learning_rate": 0.0017950617283950616, | |
"loss": 0.7842, | |
"step": 623 | |
}, | |
{ | |
"epoch": 1.3862815884476534, | |
"grad_norm": 0.008322815410792828, | |
"learning_rate": 0.0017925925925925926, | |
"loss": 0.7748, | |
"step": 624 | |
}, | |
{ | |
"epoch": 1.3885031935573453, | |
"grad_norm": 0.010673588141798973, | |
"learning_rate": 0.0017901234567901233, | |
"loss": 0.7959, | |
"step": 625 | |
}, | |
{ | |
"epoch": 1.390724798667037, | |
"grad_norm": 0.016830846667289734, | |
"learning_rate": 0.0017876543209876543, | |
"loss": 0.6871, | |
"step": 626 | |
}, | |
{ | |
"epoch": 1.3929464037767287, | |
"grad_norm": 0.031158100813627243, | |
"learning_rate": 0.0017851851851851854, | |
"loss": 1.0221, | |
"step": 627 | |
}, | |
{ | |
"epoch": 1.3951680088864205, | |
"grad_norm": 0.0064673833549022675, | |
"learning_rate": 0.001782716049382716, | |
"loss": 0.8553, | |
"step": 628 | |
}, | |
{ | |
"epoch": 1.3973896139961122, | |
"grad_norm": 0.008676216006278992, | |
"learning_rate": 0.0017802469135802471, | |
"loss": 0.7436, | |
"step": 629 | |
}, | |
{ | |
"epoch": 1.3996112191058039, | |
"grad_norm": 0.006582185626029968, | |
"learning_rate": 0.0017777777777777776, | |
"loss": 0.7052, | |
"step": 630 | |
}, | |
{ | |
"epoch": 1.4018328242154956, | |
"grad_norm": 0.009261371567845345, | |
"learning_rate": 0.0017753086419753088, | |
"loss": 0.8944, | |
"step": 631 | |
}, | |
{ | |
"epoch": 1.4040544293251873, | |
"grad_norm": 0.005905612837523222, | |
"learning_rate": 0.0017728395061728393, | |
"loss": 0.8222, | |
"step": 632 | |
}, | |
{ | |
"epoch": 1.4062760344348793, | |
"grad_norm": 0.0069193001836538315, | |
"learning_rate": 0.0017703703703703705, | |
"loss": 0.7179, | |
"step": 633 | |
}, | |
{ | |
"epoch": 1.408497639544571, | |
"grad_norm": 0.01019218284636736, | |
"learning_rate": 0.0017679012345679015, | |
"loss": 0.8184, | |
"step": 634 | |
}, | |
{ | |
"epoch": 1.4107192446542627, | |
"grad_norm": 0.008972816169261932, | |
"learning_rate": 0.0017654320987654322, | |
"loss": 0.7132, | |
"step": 635 | |
}, | |
{ | |
"epoch": 1.4129408497639544, | |
"grad_norm": 0.011042306199669838, | |
"learning_rate": 0.0017629629629629631, | |
"loss": 0.7855, | |
"step": 636 | |
}, | |
{ | |
"epoch": 1.4151624548736463, | |
"grad_norm": 0.010043452493846416, | |
"learning_rate": 0.0017604938271604939, | |
"loss": 0.9929, | |
"step": 637 | |
}, | |
{ | |
"epoch": 1.417384059983338, | |
"grad_norm": 0.006690305657684803, | |
"learning_rate": 0.0017580246913580248, | |
"loss": 0.8618, | |
"step": 638 | |
}, | |
{ | |
"epoch": 1.4196056650930298, | |
"grad_norm": 0.0102543905377388, | |
"learning_rate": 0.0017555555555555556, | |
"loss": 0.7471, | |
"step": 639 | |
}, | |
{ | |
"epoch": 1.4218272702027215, | |
"grad_norm": 0.011543518863618374, | |
"learning_rate": 0.0017530864197530865, | |
"loss": 0.8827, | |
"step": 640 | |
}, | |
{ | |
"epoch": 1.4240488753124132, | |
"grad_norm": 0.0073388186283409595, | |
"learning_rate": 0.0017506172839506175, | |
"loss": 0.8361, | |
"step": 641 | |
}, | |
{ | |
"epoch": 1.426270480422105, | |
"grad_norm": 0.009625883772969246, | |
"learning_rate": 0.0017481481481481482, | |
"loss": 0.756, | |
"step": 642 | |
}, | |
{ | |
"epoch": 1.4284920855317966, | |
"grad_norm": 0.008923063054680824, | |
"learning_rate": 0.0017456790123456791, | |
"loss": 0.7119, | |
"step": 643 | |
}, | |
{ | |
"epoch": 1.4307136906414883, | |
"grad_norm": 0.008398482576012611, | |
"learning_rate": 0.0017432098765432099, | |
"loss": 0.5496, | |
"step": 644 | |
}, | |
{ | |
"epoch": 1.4329352957511803, | |
"grad_norm": 0.013783407397568226, | |
"learning_rate": 0.0017407407407407408, | |
"loss": 0.7134, | |
"step": 645 | |
}, | |
{ | |
"epoch": 1.435156900860872, | |
"grad_norm": 0.008702287450432777, | |
"learning_rate": 0.0017382716049382716, | |
"loss": 0.8801, | |
"step": 646 | |
}, | |
{ | |
"epoch": 1.4373785059705637, | |
"grad_norm": 0.007398973684757948, | |
"learning_rate": 0.0017358024691358025, | |
"loss": 0.7561, | |
"step": 647 | |
}, | |
{ | |
"epoch": 1.4396001110802554, | |
"grad_norm": 0.009800056926906109, | |
"learning_rate": 0.0017333333333333333, | |
"loss": 0.7184, | |
"step": 648 | |
}, | |
{ | |
"epoch": 1.4418217161899474, | |
"grad_norm": 0.006798363756388426, | |
"learning_rate": 0.0017308641975308642, | |
"loss": 0.8016, | |
"step": 649 | |
}, | |
{ | |
"epoch": 1.444043321299639, | |
"grad_norm": 0.005563552491366863, | |
"learning_rate": 0.0017283950617283952, | |
"loss": 0.727, | |
"step": 650 | |
}, | |
{ | |
"epoch": 1.4462649264093308, | |
"grad_norm": 0.032814882695674896, | |
"learning_rate": 0.0017259259259259259, | |
"loss": 0.733, | |
"step": 651 | |
}, | |
{ | |
"epoch": 1.4484865315190225, | |
"grad_norm": 0.007777430582791567, | |
"learning_rate": 0.0017234567901234568, | |
"loss": 0.8456, | |
"step": 652 | |
}, | |
{ | |
"epoch": 1.4507081366287142, | |
"grad_norm": 0.01004884298890829, | |
"learning_rate": 0.0017209876543209876, | |
"loss": 0.8893, | |
"step": 653 | |
}, | |
{ | |
"epoch": 1.452929741738406, | |
"grad_norm": 0.005339653231203556, | |
"learning_rate": 0.0017185185185185185, | |
"loss": 0.6644, | |
"step": 654 | |
}, | |
{ | |
"epoch": 1.4551513468480977, | |
"grad_norm": 0.009580488316714764, | |
"learning_rate": 0.0017160493827160493, | |
"loss": 0.78, | |
"step": 655 | |
}, | |
{ | |
"epoch": 1.4573729519577894, | |
"grad_norm": 0.008997698314487934, | |
"learning_rate": 0.0017135802469135802, | |
"loss": 0.7838, | |
"step": 656 | |
}, | |
{ | |
"epoch": 1.4595945570674813, | |
"grad_norm": 0.008436165750026703, | |
"learning_rate": 0.0017111111111111114, | |
"loss": 0.7232, | |
"step": 657 | |
}, | |
{ | |
"epoch": 1.461816162177173, | |
"grad_norm": 0.009556714445352554, | |
"learning_rate": 0.001708641975308642, | |
"loss": 0.815, | |
"step": 658 | |
}, | |
{ | |
"epoch": 1.4640377672868647, | |
"grad_norm": 0.01159976702183485, | |
"learning_rate": 0.001706172839506173, | |
"loss": 0.8061, | |
"step": 659 | |
}, | |
{ | |
"epoch": 1.4662593723965565, | |
"grad_norm": 0.0057432996109128, | |
"learning_rate": 0.0017037037037037036, | |
"loss": 0.8892, | |
"step": 660 | |
}, | |
{ | |
"epoch": 1.4684809775062484, | |
"grad_norm": 0.007714425213634968, | |
"learning_rate": 0.0017012345679012348, | |
"loss": 0.8771, | |
"step": 661 | |
}, | |
{ | |
"epoch": 1.4707025826159401, | |
"grad_norm": 0.00997792650014162, | |
"learning_rate": 0.0016987654320987653, | |
"loss": 0.7399, | |
"step": 662 | |
}, | |
{ | |
"epoch": 1.4729241877256318, | |
"grad_norm": 0.008112712763249874, | |
"learning_rate": 0.0016962962962962964, | |
"loss": 0.7373, | |
"step": 663 | |
}, | |
{ | |
"epoch": 1.4751457928353235, | |
"grad_norm": 0.009659236297011375, | |
"learning_rate": 0.0016938271604938274, | |
"loss": 0.823, | |
"step": 664 | |
}, | |
{ | |
"epoch": 1.4773673979450153, | |
"grad_norm": 0.021559547632932663, | |
"learning_rate": 0.0016913580246913581, | |
"loss": 0.8344, | |
"step": 665 | |
}, | |
{ | |
"epoch": 1.479589003054707, | |
"grad_norm": 0.007330529857426882, | |
"learning_rate": 0.001688888888888889, | |
"loss": 0.7308, | |
"step": 666 | |
}, | |
{ | |
"epoch": 1.4818106081643987, | |
"grad_norm": 0.009981613606214523, | |
"learning_rate": 0.0016864197530864198, | |
"loss": 0.7725, | |
"step": 667 | |
}, | |
{ | |
"epoch": 1.4840322132740904, | |
"grad_norm": 0.011183936148881912, | |
"learning_rate": 0.0016839506172839508, | |
"loss": 0.785, | |
"step": 668 | |
}, | |
{ | |
"epoch": 1.4862538183837823, | |
"grad_norm": 0.006825693417340517, | |
"learning_rate": 0.0016814814814814813, | |
"loss": 0.8749, | |
"step": 669 | |
}, | |
{ | |
"epoch": 1.488475423493474, | |
"grad_norm": 0.007616880349814892, | |
"learning_rate": 0.0016790123456790125, | |
"loss": 0.7206, | |
"step": 670 | |
}, | |
{ | |
"epoch": 1.4906970286031658, | |
"grad_norm": 0.008109161630272865, | |
"learning_rate": 0.0016765432098765434, | |
"loss": 0.8849, | |
"step": 671 | |
}, | |
{ | |
"epoch": 1.4929186337128575, | |
"grad_norm": 0.008072765544056892, | |
"learning_rate": 0.0016740740740740741, | |
"loss": 0.6808, | |
"step": 672 | |
}, | |
{ | |
"epoch": 1.4951402388225494, | |
"grad_norm": 0.008430154994130135, | |
"learning_rate": 0.001671604938271605, | |
"loss": 0.8143, | |
"step": 673 | |
}, | |
{ | |
"epoch": 1.4973618439322411, | |
"grad_norm": 0.006939015816897154, | |
"learning_rate": 0.0016691358024691358, | |
"loss": 0.7448, | |
"step": 674 | |
}, | |
{ | |
"epoch": 1.4995834490419329, | |
"grad_norm": 0.00827766116708517, | |
"learning_rate": 0.0016666666666666668, | |
"loss": 0.8428, | |
"step": 675 | |
}, | |
{ | |
"epoch": 1.5018050541516246, | |
"grad_norm": 0.009324070066213608, | |
"learning_rate": 0.0016641975308641975, | |
"loss": 0.8593, | |
"step": 676 | |
}, | |
{ | |
"epoch": 1.5040266592613163, | |
"grad_norm": 0.00814896821975708, | |
"learning_rate": 0.0016617283950617285, | |
"loss": 0.7604, | |
"step": 677 | |
}, | |
{ | |
"epoch": 1.506248264371008, | |
"grad_norm": 0.006904492620378733, | |
"learning_rate": 0.0016592592592592592, | |
"loss": 0.7365, | |
"step": 678 | |
}, | |
{ | |
"epoch": 1.5084698694806997, | |
"grad_norm": 0.011149480938911438, | |
"learning_rate": 0.0016567901234567901, | |
"loss": 0.6607, | |
"step": 679 | |
}, | |
{ | |
"epoch": 1.5106914745903914, | |
"grad_norm": 0.007484333124011755, | |
"learning_rate": 0.001654320987654321, | |
"loss": 0.6669, | |
"step": 680 | |
}, | |
{ | |
"epoch": 1.5129130797000832, | |
"grad_norm": 0.015959644690155983, | |
"learning_rate": 0.0016518518518518518, | |
"loss": 0.8162, | |
"step": 681 | |
}, | |
{ | |
"epoch": 1.515134684809775, | |
"grad_norm": 0.011469387449324131, | |
"learning_rate": 0.0016493827160493828, | |
"loss": 0.8015, | |
"step": 682 | |
}, | |
{ | |
"epoch": 1.5173562899194668, | |
"grad_norm": 0.009255544282495975, | |
"learning_rate": 0.0016469135802469135, | |
"loss": 0.8708, | |
"step": 683 | |
}, | |
{ | |
"epoch": 1.5195778950291585, | |
"grad_norm": 0.02047853358089924, | |
"learning_rate": 0.0016444444444444445, | |
"loss": 0.6925, | |
"step": 684 | |
}, | |
{ | |
"epoch": 1.5217995001388505, | |
"grad_norm": 0.006029823329299688, | |
"learning_rate": 0.0016419753086419752, | |
"loss": 0.6099, | |
"step": 685 | |
}, | |
{ | |
"epoch": 1.5240211052485422, | |
"grad_norm": 0.00622076541185379, | |
"learning_rate": 0.0016395061728395062, | |
"loss": 0.9379, | |
"step": 686 | |
}, | |
{ | |
"epoch": 1.526242710358234, | |
"grad_norm": 0.007114652544260025, | |
"learning_rate": 0.001637037037037037, | |
"loss": 0.8084, | |
"step": 687 | |
}, | |
{ | |
"epoch": 1.5284643154679256, | |
"grad_norm": 0.006938717793673277, | |
"learning_rate": 0.0016345679012345678, | |
"loss": 0.9378, | |
"step": 688 | |
}, | |
{ | |
"epoch": 1.5306859205776173, | |
"grad_norm": 0.006727306172251701, | |
"learning_rate": 0.0016320987654320988, | |
"loss": 0.7257, | |
"step": 689 | |
}, | |
{ | |
"epoch": 1.532907525687309, | |
"grad_norm": 0.006786365061998367, | |
"learning_rate": 0.0016296296296296295, | |
"loss": 0.8699, | |
"step": 690 | |
}, | |
{ | |
"epoch": 1.5351291307970008, | |
"grad_norm": 0.007230743300169706, | |
"learning_rate": 0.0016271604938271605, | |
"loss": 0.7269, | |
"step": 691 | |
}, | |
{ | |
"epoch": 1.5373507359066925, | |
"grad_norm": 0.0071078576147556305, | |
"learning_rate": 0.0016246913580246912, | |
"loss": 0.7598, | |
"step": 692 | |
}, | |
{ | |
"epoch": 1.5395723410163842, | |
"grad_norm": 0.00766537431627512, | |
"learning_rate": 0.0016222222222222222, | |
"loss": 0.7221, | |
"step": 693 | |
}, | |
{ | |
"epoch": 1.5417939461260761, | |
"grad_norm": 0.011207444593310356, | |
"learning_rate": 0.0016197530864197533, | |
"loss": 0.8521, | |
"step": 694 | |
}, | |
{ | |
"epoch": 1.5440155512357678, | |
"grad_norm": 0.009046681225299835, | |
"learning_rate": 0.0016172839506172839, | |
"loss": 0.7945, | |
"step": 695 | |
}, | |
{ | |
"epoch": 1.5462371563454596, | |
"grad_norm": 0.025657106190919876, | |
"learning_rate": 0.001614814814814815, | |
"loss": 0.7099, | |
"step": 696 | |
}, | |
{ | |
"epoch": 1.5484587614551515, | |
"grad_norm": 0.00540280481800437, | |
"learning_rate": 0.0016123456790123455, | |
"loss": 0.8597, | |
"step": 697 | |
}, | |
{ | |
"epoch": 1.5506803665648432, | |
"grad_norm": 0.0077238529920578, | |
"learning_rate": 0.0016098765432098767, | |
"loss": 0.7868, | |
"step": 698 | |
}, | |
{ | |
"epoch": 1.552901971674535, | |
"grad_norm": 0.011015200056135654, | |
"learning_rate": 0.0016074074074074072, | |
"loss": 0.4777, | |
"step": 699 | |
}, | |
{ | |
"epoch": 1.5551235767842266, | |
"grad_norm": 0.008766315877437592, | |
"learning_rate": 0.0016049382716049384, | |
"loss": 0.6334, | |
"step": 700 | |
}, | |
{ | |
"epoch": 1.5573451818939184, | |
"grad_norm": 0.005418060813099146, | |
"learning_rate": 0.0016024691358024693, | |
"loss": 0.7606, | |
"step": 701 | |
}, | |
{ | |
"epoch": 1.55956678700361, | |
"grad_norm": 0.010448934510350227, | |
"learning_rate": 0.0016, | |
"loss": 0.6833, | |
"step": 702 | |
}, | |
{ | |
"epoch": 1.5617883921133018, | |
"grad_norm": 0.007874859496951103, | |
"learning_rate": 0.001597530864197531, | |
"loss": 0.6212, | |
"step": 703 | |
}, | |
{ | |
"epoch": 1.5640099972229935, | |
"grad_norm": 0.01020901370793581, | |
"learning_rate": 0.0015950617283950618, | |
"loss": 0.9533, | |
"step": 704 | |
}, | |
{ | |
"epoch": 1.5662316023326852, | |
"grad_norm": 0.006319139152765274, | |
"learning_rate": 0.0015925925925925927, | |
"loss": 0.8269, | |
"step": 705 | |
}, | |
{ | |
"epoch": 1.5684532074423772, | |
"grad_norm": 0.017285294830799103, | |
"learning_rate": 0.0015901234567901234, | |
"loss": 1.0417, | |
"step": 706 | |
}, | |
{ | |
"epoch": 1.5706748125520689, | |
"grad_norm": 0.009961610659956932, | |
"learning_rate": 0.0015876543209876544, | |
"loss": 0.8643, | |
"step": 707 | |
}, | |
{ | |
"epoch": 1.5728964176617606, | |
"grad_norm": 0.008692831732332706, | |
"learning_rate": 0.0015851851851851851, | |
"loss": 0.7483, | |
"step": 708 | |
}, | |
{ | |
"epoch": 1.5751180227714525, | |
"grad_norm": 0.010625587776303291, | |
"learning_rate": 0.001582716049382716, | |
"loss": 0.7756, | |
"step": 709 | |
}, | |
{ | |
"epoch": 1.5773396278811442, | |
"grad_norm": 0.0054595437832176685, | |
"learning_rate": 0.001580246913580247, | |
"loss": 0.7034, | |
"step": 710 | |
}, | |
{ | |
"epoch": 1.579561232990836, | |
"grad_norm": 0.0060311974957585335, | |
"learning_rate": 0.0015777777777777778, | |
"loss": 0.8508, | |
"step": 711 | |
}, | |
{ | |
"epoch": 1.5817828381005277, | |
"grad_norm": 0.011815550737082958, | |
"learning_rate": 0.0015753086419753087, | |
"loss": 0.8377, | |
"step": 712 | |
}, | |
{ | |
"epoch": 1.5840044432102194, | |
"grad_norm": 0.006426041480153799, | |
"learning_rate": 0.0015728395061728395, | |
"loss": 0.7625, | |
"step": 713 | |
}, | |
{ | |
"epoch": 1.586226048319911, | |
"grad_norm": 0.005769714713096619, | |
"learning_rate": 0.0015703703703703704, | |
"loss": 0.8298, | |
"step": 714 | |
}, | |
{ | |
"epoch": 1.5884476534296028, | |
"grad_norm": 0.008317188359797001, | |
"learning_rate": 0.0015679012345679011, | |
"loss": 0.7285, | |
"step": 715 | |
}, | |
{ | |
"epoch": 1.5906692585392945, | |
"grad_norm": 0.005216104909777641, | |
"learning_rate": 0.001565432098765432, | |
"loss": 0.7872, | |
"step": 716 | |
}, | |
{ | |
"epoch": 1.5928908636489862, | |
"grad_norm": 0.006061375141143799, | |
"learning_rate": 0.001562962962962963, | |
"loss": 0.7549, | |
"step": 717 | |
}, | |
{ | |
"epoch": 1.5951124687586782, | |
"grad_norm": 0.008552168495953083, | |
"learning_rate": 0.0015604938271604938, | |
"loss": 0.9864, | |
"step": 718 | |
}, | |
{ | |
"epoch": 1.59733407386837, | |
"grad_norm": 0.0080868573859334, | |
"learning_rate": 0.0015580246913580247, | |
"loss": 0.7169, | |
"step": 719 | |
}, | |
{ | |
"epoch": 1.5995556789780616, | |
"grad_norm": 0.009083057753741741, | |
"learning_rate": 0.0015555555555555555, | |
"loss": 0.9025, | |
"step": 720 | |
}, | |
{ | |
"epoch": 1.6017772840877536, | |
"grad_norm": 0.015129972249269485, | |
"learning_rate": 0.0015530864197530864, | |
"loss": 0.7955, | |
"step": 721 | |
}, | |
{ | |
"epoch": 1.6039988891974453, | |
"grad_norm": 0.009628325700759888, | |
"learning_rate": 0.0015506172839506172, | |
"loss": 0.801, | |
"step": 722 | |
}, | |
{ | |
"epoch": 1.606220494307137, | |
"grad_norm": 0.008118744008243084, | |
"learning_rate": 0.001548148148148148, | |
"loss": 0.801, | |
"step": 723 | |
}, | |
{ | |
"epoch": 1.6084420994168287, | |
"grad_norm": 0.01229710690677166, | |
"learning_rate": 0.0015456790123456793, | |
"loss": 0.7778, | |
"step": 724 | |
}, | |
{ | |
"epoch": 1.6106637045265204, | |
"grad_norm": 0.00783099327236414, | |
"learning_rate": 0.0015432098765432098, | |
"loss": 0.7599, | |
"step": 725 | |
}, | |
{ | |
"epoch": 1.6128853096362121, | |
"grad_norm": 0.005414724349975586, | |
"learning_rate": 0.001540740740740741, | |
"loss": 0.6664, | |
"step": 726 | |
}, | |
{ | |
"epoch": 1.6151069147459038, | |
"grad_norm": 0.006641815882176161, | |
"learning_rate": 0.0015382716049382715, | |
"loss": 0.769, | |
"step": 727 | |
}, | |
{ | |
"epoch": 1.6173285198555956, | |
"grad_norm": 0.007328651379793882, | |
"learning_rate": 0.0015358024691358026, | |
"loss": 0.7119, | |
"step": 728 | |
}, | |
{ | |
"epoch": 1.6195501249652873, | |
"grad_norm": 0.010657723993062973, | |
"learning_rate": 0.0015333333333333332, | |
"loss": 0.7682, | |
"step": 729 | |
}, | |
{ | |
"epoch": 1.6217717300749792, | |
"grad_norm": 0.009739626199007034, | |
"learning_rate": 0.0015308641975308643, | |
"loss": 0.8293, | |
"step": 730 | |
}, | |
{ | |
"epoch": 1.623993335184671, | |
"grad_norm": 0.0063155414536595345, | |
"learning_rate": 0.0015283950617283948, | |
"loss": 0.6139, | |
"step": 731 | |
}, | |
{ | |
"epoch": 1.6262149402943626, | |
"grad_norm": 0.007235299795866013, | |
"learning_rate": 0.001525925925925926, | |
"loss": 0.8225, | |
"step": 732 | |
}, | |
{ | |
"epoch": 1.6284365454040546, | |
"grad_norm": 0.008075661025941372, | |
"learning_rate": 0.001523456790123457, | |
"loss": 0.9188, | |
"step": 733 | |
}, | |
{ | |
"epoch": 1.6306581505137463, | |
"grad_norm": 0.009136526845395565, | |
"learning_rate": 0.0015209876543209877, | |
"loss": 0.8515, | |
"step": 734 | |
}, | |
{ | |
"epoch": 1.632879755623438, | |
"grad_norm": 0.006905075628310442, | |
"learning_rate": 0.0015185185185185187, | |
"loss": 0.6947, | |
"step": 735 | |
}, | |
{ | |
"epoch": 1.6351013607331297, | |
"grad_norm": 0.006465461570769548, | |
"learning_rate": 0.0015160493827160494, | |
"loss": 0.7776, | |
"step": 736 | |
}, | |
{ | |
"epoch": 1.6373229658428214, | |
"grad_norm": 0.007885467261075974, | |
"learning_rate": 0.0015135802469135803, | |
"loss": 0.8716, | |
"step": 737 | |
}, | |
{ | |
"epoch": 1.6395445709525132, | |
"grad_norm": 0.008669420145452023, | |
"learning_rate": 0.001511111111111111, | |
"loss": 0.8524, | |
"step": 738 | |
}, | |
{ | |
"epoch": 1.6417661760622049, | |
"grad_norm": 0.008928552269935608, | |
"learning_rate": 0.001508641975308642, | |
"loss": 0.7355, | |
"step": 739 | |
}, | |
{ | |
"epoch": 1.6439877811718966, | |
"grad_norm": 0.007420363835990429, | |
"learning_rate": 0.001506172839506173, | |
"loss": 0.7055, | |
"step": 740 | |
}, | |
{ | |
"epoch": 1.6462093862815883, | |
"grad_norm": 0.011267797090113163, | |
"learning_rate": 0.0015037037037037037, | |
"loss": 0.8876, | |
"step": 741 | |
}, | |
{ | |
"epoch": 1.6484309913912802, | |
"grad_norm": 0.00850197859108448, | |
"learning_rate": 0.0015012345679012347, | |
"loss": 0.6682, | |
"step": 742 | |
}, | |
{ | |
"epoch": 1.650652596500972, | |
"grad_norm": 0.008527017198503017, | |
"learning_rate": 0.0014987654320987656, | |
"loss": 0.6904, | |
"step": 743 | |
}, | |
{ | |
"epoch": 1.6528742016106637, | |
"grad_norm": 0.005802887491881847, | |
"learning_rate": 0.0014962962962962963, | |
"loss": 0.862, | |
"step": 744 | |
}, | |
{ | |
"epoch": 1.6550958067203556, | |
"grad_norm": 0.007038978394120932, | |
"learning_rate": 0.0014938271604938273, | |
"loss": 0.7665, | |
"step": 745 | |
}, | |
{ | |
"epoch": 1.6573174118300473, | |
"grad_norm": 0.005823012441396713, | |
"learning_rate": 0.001491358024691358, | |
"loss": 0.6075, | |
"step": 746 | |
}, | |
{ | |
"epoch": 1.659539016939739, | |
"grad_norm": 0.008316169492900372, | |
"learning_rate": 0.001488888888888889, | |
"loss": 0.6311, | |
"step": 747 | |
}, | |
{ | |
"epoch": 1.6617606220494308, | |
"grad_norm": 0.006056457757949829, | |
"learning_rate": 0.0014864197530864197, | |
"loss": 0.7038, | |
"step": 748 | |
}, | |
{ | |
"epoch": 1.6639822271591225, | |
"grad_norm": 0.007399104069918394, | |
"learning_rate": 0.0014839506172839507, | |
"loss": 0.6376, | |
"step": 749 | |
}, | |
{ | |
"epoch": 1.6662038322688142, | |
"grad_norm": 0.011369763873517513, | |
"learning_rate": 0.0014814814814814814, | |
"loss": 0.699, | |
"step": 750 | |
}, | |
{ | |
"epoch": 1.668425437378506, | |
"grad_norm": 0.007361331954598427, | |
"learning_rate": 0.0014790123456790124, | |
"loss": 0.7455, | |
"step": 751 | |
}, | |
{ | |
"epoch": 1.6706470424881976, | |
"grad_norm": 0.007952176034450531, | |
"learning_rate": 0.0014765432098765433, | |
"loss": 0.7287, | |
"step": 752 | |
}, | |
{ | |
"epoch": 1.6728686475978893, | |
"grad_norm": 0.029937606304883957, | |
"learning_rate": 0.001474074074074074, | |
"loss": 1.1535, | |
"step": 753 | |
}, | |
{ | |
"epoch": 1.6750902527075813, | |
"grad_norm": 0.005133952479809523, | |
"learning_rate": 0.001471604938271605, | |
"loss": 0.9189, | |
"step": 754 | |
}, | |
{ | |
"epoch": 1.677311857817273, | |
"grad_norm": 0.008348564617335796, | |
"learning_rate": 0.0014691358024691357, | |
"loss": 0.6906, | |
"step": 755 | |
}, | |
{ | |
"epoch": 1.6795334629269647, | |
"grad_norm": 0.007027298677712679, | |
"learning_rate": 0.0014666666666666667, | |
"loss": 0.717, | |
"step": 756 | |
}, | |
{ | |
"epoch": 1.6817550680366566, | |
"grad_norm": 0.008435823023319244, | |
"learning_rate": 0.0014641975308641974, | |
"loss": 0.761, | |
"step": 757 | |
}, | |
{ | |
"epoch": 1.6839766731463484, | |
"grad_norm": 0.012493236921727657, | |
"learning_rate": 0.0014617283950617286, | |
"loss": 0.6764, | |
"step": 758 | |
}, | |
{ | |
"epoch": 1.68619827825604, | |
"grad_norm": 0.0075070820748806, | |
"learning_rate": 0.0014592592592592593, | |
"loss": 0.7438, | |
"step": 759 | |
}, | |
{ | |
"epoch": 1.6884198833657318, | |
"grad_norm": 0.006954481825232506, | |
"learning_rate": 0.0014567901234567903, | |
"loss": 0.8324, | |
"step": 760 | |
}, | |
{ | |
"epoch": 1.6906414884754235, | |
"grad_norm": 0.011813154444098473, | |
"learning_rate": 0.001454320987654321, | |
"loss": 0.7317, | |
"step": 761 | |
}, | |
{ | |
"epoch": 1.6928630935851152, | |
"grad_norm": 0.010434205643832684, | |
"learning_rate": 0.001451851851851852, | |
"loss": 0.7598, | |
"step": 762 | |
}, | |
{ | |
"epoch": 1.695084698694807, | |
"grad_norm": 0.007777587044984102, | |
"learning_rate": 0.0014493827160493827, | |
"loss": 0.7618, | |
"step": 763 | |
}, | |
{ | |
"epoch": 1.6973063038044987, | |
"grad_norm": 0.006949772592633963, | |
"learning_rate": 0.0014469135802469136, | |
"loss": 0.8828, | |
"step": 764 | |
}, | |
{ | |
"epoch": 1.6995279089141904, | |
"grad_norm": 0.009230799973011017, | |
"learning_rate": 0.0014444444444444444, | |
"loss": 0.7395, | |
"step": 765 | |
}, | |
{ | |
"epoch": 1.7017495140238823, | |
"grad_norm": 0.004457883071154356, | |
"learning_rate": 0.0014419753086419753, | |
"loss": 0.8258, | |
"step": 766 | |
}, | |
{ | |
"epoch": 1.703971119133574, | |
"grad_norm": 0.008095591329038143, | |
"learning_rate": 0.0014395061728395063, | |
"loss": 0.6411, | |
"step": 767 | |
}, | |
{ | |
"epoch": 1.7061927242432657, | |
"grad_norm": 0.008313817903399467, | |
"learning_rate": 0.001437037037037037, | |
"loss": 0.6612, | |
"step": 768 | |
}, | |
{ | |
"epoch": 1.7084143293529577, | |
"grad_norm": 0.015146925114095211, | |
"learning_rate": 0.001434567901234568, | |
"loss": 0.8697, | |
"step": 769 | |
}, | |
{ | |
"epoch": 1.7106359344626494, | |
"grad_norm": 0.00795427430421114, | |
"learning_rate": 0.0014320987654320987, | |
"loss": 0.6983, | |
"step": 770 | |
}, | |
{ | |
"epoch": 1.712857539572341, | |
"grad_norm": 0.010255630128085613, | |
"learning_rate": 0.0014296296296296297, | |
"loss": 0.715, | |
"step": 771 | |
}, | |
{ | |
"epoch": 1.7150791446820328, | |
"grad_norm": 0.008728211745619774, | |
"learning_rate": 0.0014271604938271604, | |
"loss": 0.8291, | |
"step": 772 | |
}, | |
{ | |
"epoch": 1.7173007497917245, | |
"grad_norm": 0.007851282134652138, | |
"learning_rate": 0.0014246913580246916, | |
"loss": 0.6991, | |
"step": 773 | |
}, | |
{ | |
"epoch": 1.7195223549014163, | |
"grad_norm": 0.010601981543004513, | |
"learning_rate": 0.0014222222222222223, | |
"loss": 0.8099, | |
"step": 774 | |
}, | |
{ | |
"epoch": 1.721743960011108, | |
"grad_norm": 0.009771551936864853, | |
"learning_rate": 0.0014197530864197532, | |
"loss": 0.7372, | |
"step": 775 | |
}, | |
{ | |
"epoch": 1.7239655651207997, | |
"grad_norm": 0.015610922127962112, | |
"learning_rate": 0.001417283950617284, | |
"loss": 0.6995, | |
"step": 776 | |
}, | |
{ | |
"epoch": 1.7261871702304914, | |
"grad_norm": 0.00798638816922903, | |
"learning_rate": 0.001414814814814815, | |
"loss": 0.7119, | |
"step": 777 | |
}, | |
{ | |
"epoch": 1.7284087753401833, | |
"grad_norm": 0.007400579750537872, | |
"learning_rate": 0.0014123456790123457, | |
"loss": 0.653, | |
"step": 778 | |
}, | |
{ | |
"epoch": 1.730630380449875, | |
"grad_norm": 0.00567126739770174, | |
"learning_rate": 0.0014098765432098766, | |
"loss": 0.6784, | |
"step": 779 | |
}, | |
{ | |
"epoch": 1.7328519855595668, | |
"grad_norm": 0.010270976461470127, | |
"learning_rate": 0.0014074074074074073, | |
"loss": 0.7221, | |
"step": 780 | |
}, | |
{ | |
"epoch": 1.7350735906692587, | |
"grad_norm": 0.006880102213472128, | |
"learning_rate": 0.0014049382716049383, | |
"loss": 0.6944, | |
"step": 781 | |
}, | |
{ | |
"epoch": 1.7372951957789504, | |
"grad_norm": 0.007795974612236023, | |
"learning_rate": 0.0014024691358024693, | |
"loss": 0.8301, | |
"step": 782 | |
}, | |
{ | |
"epoch": 1.7395168008886421, | |
"grad_norm": 0.009786085225641727, | |
"learning_rate": 0.0014, | |
"loss": 0.7403, | |
"step": 783 | |
}, | |
{ | |
"epoch": 1.7417384059983338, | |
"grad_norm": 0.009969739243388176, | |
"learning_rate": 0.001397530864197531, | |
"loss": 0.6, | |
"step": 784 | |
}, | |
{ | |
"epoch": 1.7439600111080256, | |
"grad_norm": 0.01001895871013403, | |
"learning_rate": 0.0013950617283950617, | |
"loss": 0.7644, | |
"step": 785 | |
}, | |
{ | |
"epoch": 1.7461816162177173, | |
"grad_norm": 0.00807132851332426, | |
"learning_rate": 0.0013925925925925926, | |
"loss": 0.7323, | |
"step": 786 | |
}, | |
{ | |
"epoch": 1.748403221327409, | |
"grad_norm": 0.010031183250248432, | |
"learning_rate": 0.0013901234567901234, | |
"loss": 0.7332, | |
"step": 787 | |
}, | |
{ | |
"epoch": 1.7506248264371007, | |
"grad_norm": 0.010263016447424889, | |
"learning_rate": 0.0013876543209876545, | |
"loss": 0.8214, | |
"step": 788 | |
}, | |
{ | |
"epoch": 1.7528464315467924, | |
"grad_norm": 0.008517063222825527, | |
"learning_rate": 0.0013851851851851853, | |
"loss": 0.8323, | |
"step": 789 | |
}, | |
{ | |
"epoch": 1.7550680366564844, | |
"grad_norm": 0.008784698322415352, | |
"learning_rate": 0.0013827160493827162, | |
"loss": 0.8068, | |
"step": 790 | |
}, | |
{ | |
"epoch": 1.757289641766176, | |
"grad_norm": 0.010176415555179119, | |
"learning_rate": 0.001380246913580247, | |
"loss": 0.9765, | |
"step": 791 | |
}, | |
{ | |
"epoch": 1.7595112468758678, | |
"grad_norm": 0.0070914411917328835, | |
"learning_rate": 0.001377777777777778, | |
"loss": 0.8225, | |
"step": 792 | |
}, | |
{ | |
"epoch": 1.7617328519855595, | |
"grad_norm": 0.011972385458648205, | |
"learning_rate": 0.0013753086419753086, | |
"loss": 0.8802, | |
"step": 793 | |
}, | |
{ | |
"epoch": 1.7639544570952514, | |
"grad_norm": 0.007360584102571011, | |
"learning_rate": 0.0013728395061728396, | |
"loss": 0.8901, | |
"step": 794 | |
}, | |
{ | |
"epoch": 1.7661760622049432, | |
"grad_norm": 0.015350119210779667, | |
"learning_rate": 0.0013703703703703703, | |
"loss": 0.9301, | |
"step": 795 | |
}, | |
{ | |
"epoch": 1.7683976673146349, | |
"grad_norm": 0.005848214030265808, | |
"learning_rate": 0.0013679012345679013, | |
"loss": 0.6417, | |
"step": 796 | |
}, | |
{ | |
"epoch": 1.7706192724243266, | |
"grad_norm": 0.013009141199290752, | |
"learning_rate": 0.0013654320987654322, | |
"loss": 0.8102, | |
"step": 797 | |
}, | |
{ | |
"epoch": 1.7728408775340183, | |
"grad_norm": 0.007184984628111124, | |
"learning_rate": 0.001362962962962963, | |
"loss": 0.8189, | |
"step": 798 | |
}, | |
{ | |
"epoch": 1.77506248264371, | |
"grad_norm": 0.008172586560249329, | |
"learning_rate": 0.001360493827160494, | |
"loss": 0.7398, | |
"step": 799 | |
}, | |
{ | |
"epoch": 1.7772840877534017, | |
"grad_norm": 0.0062285615131258965, | |
"learning_rate": 0.0013580246913580246, | |
"loss": 0.92, | |
"step": 800 | |
}, | |
{ | |
"epoch": 1.7795056928630935, | |
"grad_norm": 0.0068848151713609695, | |
"learning_rate": 0.0013555555555555556, | |
"loss": 0.772, | |
"step": 801 | |
}, | |
{ | |
"epoch": 1.7817272979727852, | |
"grad_norm": 0.009805290959775448, | |
"learning_rate": 0.0013530864197530863, | |
"loss": 0.5684, | |
"step": 802 | |
}, | |
{ | |
"epoch": 1.783948903082477, | |
"grad_norm": 0.013168488629162312, | |
"learning_rate": 0.0013506172839506175, | |
"loss": 0.9256, | |
"step": 803 | |
}, | |
{ | |
"epoch": 1.7861705081921688, | |
"grad_norm": 0.008684579282999039, | |
"learning_rate": 0.0013481481481481482, | |
"loss": 0.9528, | |
"step": 804 | |
}, | |
{ | |
"epoch": 1.7883921133018605, | |
"grad_norm": 0.012764825485646725, | |
"learning_rate": 0.0013456790123456792, | |
"loss": 0.6709, | |
"step": 805 | |
}, | |
{ | |
"epoch": 1.7906137184115525, | |
"grad_norm": 0.009115751832723618, | |
"learning_rate": 0.00134320987654321, | |
"loss": 0.9313, | |
"step": 806 | |
}, | |
{ | |
"epoch": 1.7928353235212442, | |
"grad_norm": 0.008431021124124527, | |
"learning_rate": 0.0013407407407407409, | |
"loss": 1.0696, | |
"step": 807 | |
}, | |
{ | |
"epoch": 1.795056928630936, | |
"grad_norm": 0.01002161018550396, | |
"learning_rate": 0.0013382716049382716, | |
"loss": 0.9138, | |
"step": 808 | |
}, | |
{ | |
"epoch": 1.7972785337406276, | |
"grad_norm": 0.007444395218044519, | |
"learning_rate": 0.0013358024691358023, | |
"loss": 0.8264, | |
"step": 809 | |
}, | |
{ | |
"epoch": 1.7995001388503193, | |
"grad_norm": 0.008543235249817371, | |
"learning_rate": 0.0013333333333333333, | |
"loss": 1.0303, | |
"step": 810 | |
}, | |
{ | |
"epoch": 1.801721743960011, | |
"grad_norm": 0.00885559618473053, | |
"learning_rate": 0.0013308641975308642, | |
"loss": 0.7287, | |
"step": 811 | |
}, | |
{ | |
"epoch": 1.8039433490697028, | |
"grad_norm": 0.01157060731202364, | |
"learning_rate": 0.0013283950617283952, | |
"loss": 0.8287, | |
"step": 812 | |
}, | |
{ | |
"epoch": 1.8061649541793945, | |
"grad_norm": 0.011350276879966259, | |
"learning_rate": 0.001325925925925926, | |
"loss": 0.8068, | |
"step": 813 | |
}, | |
{ | |
"epoch": 1.8083865592890862, | |
"grad_norm": 0.00892575178295374, | |
"learning_rate": 0.0013234567901234569, | |
"loss": 0.8147, | |
"step": 814 | |
}, | |
{ | |
"epoch": 1.8106081643987781, | |
"grad_norm": 0.009176945313811302, | |
"learning_rate": 0.0013209876543209876, | |
"loss": 0.673, | |
"step": 815 | |
}, | |
{ | |
"epoch": 1.8128297695084699, | |
"grad_norm": 0.00625951262190938, | |
"learning_rate": 0.0013185185185185186, | |
"loss": 0.7057, | |
"step": 816 | |
}, | |
{ | |
"epoch": 1.8150513746181616, | |
"grad_norm": 0.00856202095746994, | |
"learning_rate": 0.0013160493827160493, | |
"loss": 0.8703, | |
"step": 817 | |
}, | |
{ | |
"epoch": 1.8172729797278535, | |
"grad_norm": 0.006727920845150948, | |
"learning_rate": 0.0013135802469135802, | |
"loss": 0.6712, | |
"step": 818 | |
}, | |
{ | |
"epoch": 1.8194945848375452, | |
"grad_norm": 0.008964741602540016, | |
"learning_rate": 0.0013111111111111112, | |
"loss": 0.7545, | |
"step": 819 | |
}, | |
{ | |
"epoch": 1.821716189947237, | |
"grad_norm": 0.007025114260613918, | |
"learning_rate": 0.001308641975308642, | |
"loss": 0.7261, | |
"step": 820 | |
}, | |
{ | |
"epoch": 1.8239377950569287, | |
"grad_norm": 0.006972780916839838, | |
"learning_rate": 0.0013061728395061729, | |
"loss": 0.676, | |
"step": 821 | |
}, | |
{ | |
"epoch": 1.8261594001666204, | |
"grad_norm": 0.006812758278101683, | |
"learning_rate": 0.0013037037037037036, | |
"loss": 0.7227, | |
"step": 822 | |
}, | |
{ | |
"epoch": 1.828381005276312, | |
"grad_norm": 0.005802335683256388, | |
"learning_rate": 0.0013012345679012346, | |
"loss": 0.7135, | |
"step": 823 | |
}, | |
{ | |
"epoch": 1.8306026103860038, | |
"grad_norm": 0.01064333040267229, | |
"learning_rate": 0.0012987654320987653, | |
"loss": 1.083, | |
"step": 824 | |
}, | |
{ | |
"epoch": 1.8328242154956955, | |
"grad_norm": 0.009813620708882809, | |
"learning_rate": 0.0012962962962962963, | |
"loss": 0.7178, | |
"step": 825 | |
}, | |
{ | |
"epoch": 1.8350458206053872, | |
"grad_norm": 0.005859369412064552, | |
"learning_rate": 0.0012938271604938272, | |
"loss": 0.7861, | |
"step": 826 | |
}, | |
{ | |
"epoch": 1.8372674257150792, | |
"grad_norm": 0.007550476584583521, | |
"learning_rate": 0.0012913580246913582, | |
"loss": 0.9533, | |
"step": 827 | |
}, | |
{ | |
"epoch": 1.8394890308247709, | |
"grad_norm": 0.011494121514260769, | |
"learning_rate": 0.001288888888888889, | |
"loss": 0.6361, | |
"step": 828 | |
}, | |
{ | |
"epoch": 1.8417106359344626, | |
"grad_norm": 0.01333784218877554, | |
"learning_rate": 0.0012864197530864198, | |
"loss": 0.6448, | |
"step": 829 | |
}, | |
{ | |
"epoch": 1.8439322410441545, | |
"grad_norm": 0.0077012451365590096, | |
"learning_rate": 0.0012839506172839506, | |
"loss": 0.6669, | |
"step": 830 | |
}, | |
{ | |
"epoch": 1.8461538461538463, | |
"grad_norm": 0.008285445161163807, | |
"learning_rate": 0.0012814814814814815, | |
"loss": 0.7666, | |
"step": 831 | |
}, | |
{ | |
"epoch": 1.848375451263538, | |
"grad_norm": 0.010961475782096386, | |
"learning_rate": 0.0012790123456790123, | |
"loss": 0.7424, | |
"step": 832 | |
}, | |
{ | |
"epoch": 1.8505970563732297, | |
"grad_norm": 0.009611285291612148, | |
"learning_rate": 0.0012765432098765432, | |
"loss": 0.6858, | |
"step": 833 | |
}, | |
{ | |
"epoch": 1.8528186614829214, | |
"grad_norm": 0.0063394904136657715, | |
"learning_rate": 0.0012740740740740742, | |
"loss": 0.7984, | |
"step": 834 | |
}, | |
{ | |
"epoch": 1.8550402665926131, | |
"grad_norm": 0.007764410227537155, | |
"learning_rate": 0.001271604938271605, | |
"loss": 0.7263, | |
"step": 835 | |
}, | |
{ | |
"epoch": 1.8572618717023048, | |
"grad_norm": 0.008922139182686806, | |
"learning_rate": 0.0012691358024691359, | |
"loss": 0.759, | |
"step": 836 | |
}, | |
{ | |
"epoch": 1.8594834768119965, | |
"grad_norm": 0.008557251654565334, | |
"learning_rate": 0.0012666666666666666, | |
"loss": 0.6563, | |
"step": 837 | |
}, | |
{ | |
"epoch": 1.8617050819216883, | |
"grad_norm": 0.009035222232341766, | |
"learning_rate": 0.0012641975308641975, | |
"loss": 0.684, | |
"step": 838 | |
}, | |
{ | |
"epoch": 1.8639266870313802, | |
"grad_norm": 0.006750497501343489, | |
"learning_rate": 0.0012617283950617283, | |
"loss": 0.7025, | |
"step": 839 | |
}, | |
{ | |
"epoch": 1.866148292141072, | |
"grad_norm": 0.007769269403070211, | |
"learning_rate": 0.0012592592592592592, | |
"loss": 0.7128, | |
"step": 840 | |
}, | |
{ | |
"epoch": 1.8683698972507636, | |
"grad_norm": 0.005589787382632494, | |
"learning_rate": 0.0012567901234567902, | |
"loss": 0.7813, | |
"step": 841 | |
}, | |
{ | |
"epoch": 1.8705915023604556, | |
"grad_norm": 0.00998506136238575, | |
"learning_rate": 0.0012543209876543211, | |
"loss": 0.7923, | |
"step": 842 | |
}, | |
{ | |
"epoch": 1.8728131074701473, | |
"grad_norm": 0.005361242685467005, | |
"learning_rate": 0.0012518518518518519, | |
"loss": 0.5826, | |
"step": 843 | |
}, | |
{ | |
"epoch": 1.875034712579839, | |
"grad_norm": 0.010085574351251125, | |
"learning_rate": 0.0012493827160493828, | |
"loss": 0.7918, | |
"step": 844 | |
}, | |
{ | |
"epoch": 1.8772563176895307, | |
"grad_norm": 0.006444999016821384, | |
"learning_rate": 0.0012469135802469136, | |
"loss": 0.8084, | |
"step": 845 | |
}, | |
{ | |
"epoch": 1.8794779227992224, | |
"grad_norm": 0.008153421804308891, | |
"learning_rate": 0.0012444444444444445, | |
"loss": 0.7297, | |
"step": 846 | |
}, | |
{ | |
"epoch": 1.8816995279089141, | |
"grad_norm": 0.008666029199957848, | |
"learning_rate": 0.0012419753086419752, | |
"loss": 0.9508, | |
"step": 847 | |
}, | |
{ | |
"epoch": 1.8839211330186059, | |
"grad_norm": 0.006563934497535229, | |
"learning_rate": 0.0012395061728395062, | |
"loss": 0.7055, | |
"step": 848 | |
}, | |
{ | |
"epoch": 1.8861427381282976, | |
"grad_norm": 0.008314713835716248, | |
"learning_rate": 0.0012370370370370371, | |
"loss": 0.7659, | |
"step": 849 | |
}, | |
{ | |
"epoch": 1.8883643432379893, | |
"grad_norm": 0.0050984215922653675, | |
"learning_rate": 0.0012345679012345679, | |
"loss": 0.6926, | |
"step": 850 | |
}, | |
{ | |
"epoch": 1.8905859483476812, | |
"grad_norm": 0.007446047849953175, | |
"learning_rate": 0.0012320987654320988, | |
"loss": 0.9914, | |
"step": 851 | |
}, | |
{ | |
"epoch": 1.892807553457373, | |
"grad_norm": 0.007681590039283037, | |
"learning_rate": 0.0012296296296296296, | |
"loss": 0.5726, | |
"step": 852 | |
}, | |
{ | |
"epoch": 1.8950291585670647, | |
"grad_norm": 0.010494091548025608, | |
"learning_rate": 0.0012271604938271605, | |
"loss": 0.8289, | |
"step": 853 | |
}, | |
{ | |
"epoch": 1.8972507636767566, | |
"grad_norm": 0.01278096903115511, | |
"learning_rate": 0.0012246913580246912, | |
"loss": 0.9755, | |
"step": 854 | |
}, | |
{ | |
"epoch": 1.8994723687864483, | |
"grad_norm": 0.007008662447333336, | |
"learning_rate": 0.0012222222222222222, | |
"loss": 0.741, | |
"step": 855 | |
}, | |
{ | |
"epoch": 1.90169397389614, | |
"grad_norm": 0.008134209550917149, | |
"learning_rate": 0.0012197530864197532, | |
"loss": 0.7209, | |
"step": 856 | |
}, | |
{ | |
"epoch": 1.9039155790058317, | |
"grad_norm": 0.01279931515455246, | |
"learning_rate": 0.001217283950617284, | |
"loss": 0.9444, | |
"step": 857 | |
}, | |
{ | |
"epoch": 1.9061371841155235, | |
"grad_norm": 0.006521604955196381, | |
"learning_rate": 0.0012148148148148148, | |
"loss": 0.7992, | |
"step": 858 | |
}, | |
{ | |
"epoch": 1.9083587892252152, | |
"grad_norm": 0.00859599094837904, | |
"learning_rate": 0.0012123456790123458, | |
"loss": 0.6704, | |
"step": 859 | |
}, | |
{ | |
"epoch": 1.910580394334907, | |
"grad_norm": 0.013226643204689026, | |
"learning_rate": 0.0012098765432098765, | |
"loss": 0.7661, | |
"step": 860 | |
}, | |
{ | |
"epoch": 1.9128019994445986, | |
"grad_norm": 0.00894913263618946, | |
"learning_rate": 0.0012074074074074075, | |
"loss": 0.744, | |
"step": 861 | |
}, | |
{ | |
"epoch": 1.9150236045542903, | |
"grad_norm": 0.009207514114677906, | |
"learning_rate": 0.0012049382716049382, | |
"loss": 0.5921, | |
"step": 862 | |
}, | |
{ | |
"epoch": 1.9172452096639823, | |
"grad_norm": 0.010871309787034988, | |
"learning_rate": 0.0012024691358024692, | |
"loss": 0.7441, | |
"step": 863 | |
}, | |
{ | |
"epoch": 1.919466814773674, | |
"grad_norm": 0.007488877512514591, | |
"learning_rate": 0.0012000000000000001, | |
"loss": 0.7526, | |
"step": 864 | |
}, | |
{ | |
"epoch": 1.9216884198833657, | |
"grad_norm": 0.0074579548090696335, | |
"learning_rate": 0.0011975308641975308, | |
"loss": 0.7413, | |
"step": 865 | |
}, | |
{ | |
"epoch": 1.9239100249930576, | |
"grad_norm": 0.00982628669589758, | |
"learning_rate": 0.0011950617283950618, | |
"loss": 0.8372, | |
"step": 866 | |
}, | |
{ | |
"epoch": 1.9261316301027493, | |
"grad_norm": 0.0074563659727573395, | |
"learning_rate": 0.0011925925925925925, | |
"loss": 1.1102, | |
"step": 867 | |
}, | |
{ | |
"epoch": 1.928353235212441, | |
"grad_norm": 0.007274288684129715, | |
"learning_rate": 0.0011901234567901235, | |
"loss": 0.8402, | |
"step": 868 | |
}, | |
{ | |
"epoch": 1.9305748403221328, | |
"grad_norm": 0.008403385989367962, | |
"learning_rate": 0.0011876543209876542, | |
"loss": 1.1321, | |
"step": 869 | |
}, | |
{ | |
"epoch": 1.9327964454318245, | |
"grad_norm": 0.010185015387833118, | |
"learning_rate": 0.0011851851851851852, | |
"loss": 0.6981, | |
"step": 870 | |
}, | |
{ | |
"epoch": 1.9350180505415162, | |
"grad_norm": 0.009552250616252422, | |
"learning_rate": 0.0011827160493827161, | |
"loss": 0.6831, | |
"step": 871 | |
}, | |
{ | |
"epoch": 1.937239655651208, | |
"grad_norm": 0.00690442556515336, | |
"learning_rate": 0.001180246913580247, | |
"loss": 0.7935, | |
"step": 872 | |
}, | |
{ | |
"epoch": 1.9394612607608996, | |
"grad_norm": 0.007579765748232603, | |
"learning_rate": 0.0011777777777777778, | |
"loss": 0.6622, | |
"step": 873 | |
}, | |
{ | |
"epoch": 1.9416828658705914, | |
"grad_norm": 0.005846850108355284, | |
"learning_rate": 0.0011753086419753088, | |
"loss": 0.5541, | |
"step": 874 | |
}, | |
{ | |
"epoch": 1.9439044709802833, | |
"grad_norm": 0.018095355480909348, | |
"learning_rate": 0.0011728395061728395, | |
"loss": 0.9458, | |
"step": 875 | |
}, | |
{ | |
"epoch": 1.946126076089975, | |
"grad_norm": 0.00899551622569561, | |
"learning_rate": 0.0011703703703703704, | |
"loss": 1.0314, | |
"step": 876 | |
}, | |
{ | |
"epoch": 1.9483476811996667, | |
"grad_norm": 0.010987848974764347, | |
"learning_rate": 0.0011679012345679012, | |
"loss": 0.7561, | |
"step": 877 | |
}, | |
{ | |
"epoch": 1.9505692863093587, | |
"grad_norm": 0.01828991435468197, | |
"learning_rate": 0.0011654320987654321, | |
"loss": 0.7234, | |
"step": 878 | |
}, | |
{ | |
"epoch": 1.9527908914190504, | |
"grad_norm": 0.0066715930588543415, | |
"learning_rate": 0.001162962962962963, | |
"loss": 0.8119, | |
"step": 879 | |
}, | |
{ | |
"epoch": 1.955012496528742, | |
"grad_norm": 0.012585703283548355, | |
"learning_rate": 0.0011604938271604938, | |
"loss": 0.8063, | |
"step": 880 | |
}, | |
{ | |
"epoch": 1.9572341016384338, | |
"grad_norm": 0.007742542307823896, | |
"learning_rate": 0.0011580246913580248, | |
"loss": 0.9048, | |
"step": 881 | |
}, | |
{ | |
"epoch": 1.9594557067481255, | |
"grad_norm": 0.00921573955565691, | |
"learning_rate": 0.0011555555555555555, | |
"loss": 0.8855, | |
"step": 882 | |
}, | |
{ | |
"epoch": 1.9616773118578172, | |
"grad_norm": 0.010648581199347973, | |
"learning_rate": 0.0011530864197530865, | |
"loss": 1.0807, | |
"step": 883 | |
}, | |
{ | |
"epoch": 1.963898916967509, | |
"grad_norm": 0.010228232480585575, | |
"learning_rate": 0.0011506172839506172, | |
"loss": 0.8898, | |
"step": 884 | |
}, | |
{ | |
"epoch": 1.9661205220772007, | |
"grad_norm": 0.012444967404007912, | |
"learning_rate": 0.0011481481481481481, | |
"loss": 0.7, | |
"step": 885 | |
}, | |
{ | |
"epoch": 1.9683421271868924, | |
"grad_norm": 0.007236818317323923, | |
"learning_rate": 0.001145679012345679, | |
"loss": 0.7435, | |
"step": 886 | |
}, | |
{ | |
"epoch": 1.9705637322965843, | |
"grad_norm": 0.01137254387140274, | |
"learning_rate": 0.00114320987654321, | |
"loss": 0.7671, | |
"step": 887 | |
}, | |
{ | |
"epoch": 1.972785337406276, | |
"grad_norm": 0.008345142006874084, | |
"learning_rate": 0.0011407407407407408, | |
"loss": 0.7607, | |
"step": 888 | |
}, | |
{ | |
"epoch": 1.9750069425159678, | |
"grad_norm": 0.008709809742867947, | |
"learning_rate": 0.0011382716049382717, | |
"loss": 0.7433, | |
"step": 889 | |
}, | |
{ | |
"epoch": 1.9772285476256597, | |
"grad_norm": 0.007434303406625986, | |
"learning_rate": 0.0011358024691358025, | |
"loss": 0.7211, | |
"step": 890 | |
}, | |
{ | |
"epoch": 1.9794501527353514, | |
"grad_norm": 0.0068624927662312984, | |
"learning_rate": 0.0011333333333333334, | |
"loss": 0.6605, | |
"step": 891 | |
}, | |
{ | |
"epoch": 1.9816717578450431, | |
"grad_norm": 0.007105222903192043, | |
"learning_rate": 0.0011308641975308641, | |
"loss": 0.7847, | |
"step": 892 | |
}, | |
{ | |
"epoch": 1.9838933629547348, | |
"grad_norm": 0.006928863003849983, | |
"learning_rate": 0.001128395061728395, | |
"loss": 0.8906, | |
"step": 893 | |
}, | |
{ | |
"epoch": 1.9861149680644266, | |
"grad_norm": 0.007032196968793869, | |
"learning_rate": 0.001125925925925926, | |
"loss": 0.8525, | |
"step": 894 | |
}, | |
{ | |
"epoch": 1.9883365731741183, | |
"grad_norm": 0.006375422701239586, | |
"learning_rate": 0.0011234567901234568, | |
"loss": 0.9003, | |
"step": 895 | |
}, | |
{ | |
"epoch": 1.99055817828381, | |
"grad_norm": 0.005244637373834848, | |
"learning_rate": 0.0011209876543209877, | |
"loss": 0.7313, | |
"step": 896 | |
}, | |
{ | |
"epoch": 1.9927797833935017, | |
"grad_norm": 0.007450766395777464, | |
"learning_rate": 0.0011185185185185185, | |
"loss": 0.799, | |
"step": 897 | |
}, | |
{ | |
"epoch": 1.9950013885031934, | |
"grad_norm": 0.00844279583543539, | |
"learning_rate": 0.0011160493827160494, | |
"loss": 0.693, | |
"step": 898 | |
}, | |
{ | |
"epoch": 1.9972229936128854, | |
"grad_norm": 0.014142382889986038, | |
"learning_rate": 0.0011135802469135802, | |
"loss": 0.8434, | |
"step": 899 | |
}, | |
{ | |
"epoch": 1.999444598722577, | |
"grad_norm": 0.0095207579433918, | |
"learning_rate": 0.0011111111111111111, | |
"loss": 0.836, | |
"step": 900 | |
}, | |
{ | |
"epoch": 2.001666203832269, | |
"grad_norm": 0.00861104391515255, | |
"learning_rate": 0.001108641975308642, | |
"loss": 0.9819, | |
"step": 901 | |
}, | |
{ | |
"epoch": 2.0038878089419607, | |
"grad_norm": 0.01199290994554758, | |
"learning_rate": 0.001106172839506173, | |
"loss": 0.8446, | |
"step": 902 | |
}, | |
{ | |
"epoch": 2.0061094140516524, | |
"grad_norm": 0.009869737550616264, | |
"learning_rate": 0.0011037037037037037, | |
"loss": 0.7414, | |
"step": 903 | |
}, | |
{ | |
"epoch": 2.008331019161344, | |
"grad_norm": 0.006185215897858143, | |
"learning_rate": 0.0011012345679012347, | |
"loss": 0.8103, | |
"step": 904 | |
}, | |
{ | |
"epoch": 2.010552624271036, | |
"grad_norm": 0.0077135139144957066, | |
"learning_rate": 0.0010987654320987654, | |
"loss": 0.8215, | |
"step": 905 | |
}, | |
{ | |
"epoch": 2.0127742293807276, | |
"grad_norm": 0.007097164634615183, | |
"learning_rate": 0.0010962962962962964, | |
"loss": 0.9623, | |
"step": 906 | |
}, | |
{ | |
"epoch": 2.0149958344904193, | |
"grad_norm": 0.010442017577588558, | |
"learning_rate": 0.0010938271604938271, | |
"loss": 0.6859, | |
"step": 907 | |
}, | |
{ | |
"epoch": 2.017217439600111, | |
"grad_norm": 0.0054067992605268955, | |
"learning_rate": 0.001091358024691358, | |
"loss": 0.7051, | |
"step": 908 | |
}, | |
{ | |
"epoch": 2.0194390447098027, | |
"grad_norm": 0.012054132297635078, | |
"learning_rate": 0.001088888888888889, | |
"loss": 0.7256, | |
"step": 909 | |
}, | |
{ | |
"epoch": 2.0216606498194944, | |
"grad_norm": 0.0082894591614604, | |
"learning_rate": 0.0010864197530864198, | |
"loss": 0.7671, | |
"step": 910 | |
}, | |
{ | |
"epoch": 2.023882254929186, | |
"grad_norm": 0.00784540269523859, | |
"learning_rate": 0.0010839506172839507, | |
"loss": 0.9397, | |
"step": 911 | |
}, | |
{ | |
"epoch": 2.026103860038878, | |
"grad_norm": 0.007958034053444862, | |
"learning_rate": 0.0010814814814814814, | |
"loss": 0.8694, | |
"step": 912 | |
}, | |
{ | |
"epoch": 2.02832546514857, | |
"grad_norm": 0.012851761654019356, | |
"learning_rate": 0.0010790123456790124, | |
"loss": 0.9297, | |
"step": 913 | |
}, | |
{ | |
"epoch": 2.0305470702582618, | |
"grad_norm": 0.009936337359249592, | |
"learning_rate": 0.0010765432098765431, | |
"loss": 0.7811, | |
"step": 914 | |
}, | |
{ | |
"epoch": 2.0327686753679535, | |
"grad_norm": 0.008913993835449219, | |
"learning_rate": 0.001074074074074074, | |
"loss": 0.6929, | |
"step": 915 | |
}, | |
{ | |
"epoch": 2.034990280477645, | |
"grad_norm": 0.008779041469097137, | |
"learning_rate": 0.0010716049382716048, | |
"loss": 0.694, | |
"step": 916 | |
}, | |
{ | |
"epoch": 2.037211885587337, | |
"grad_norm": 0.02905157022178173, | |
"learning_rate": 0.001069135802469136, | |
"loss": 0.9935, | |
"step": 917 | |
}, | |
{ | |
"epoch": 2.0394334906970286, | |
"grad_norm": 0.004718302749097347, | |
"learning_rate": 0.0010666666666666667, | |
"loss": 0.6955, | |
"step": 918 | |
}, | |
{ | |
"epoch": 2.0416550958067203, | |
"grad_norm": 0.008632673881947994, | |
"learning_rate": 0.0010641975308641977, | |
"loss": 0.6824, | |
"step": 919 | |
}, | |
{ | |
"epoch": 2.043876700916412, | |
"grad_norm": 0.009967481717467308, | |
"learning_rate": 0.0010617283950617284, | |
"loss": 0.6765, | |
"step": 920 | |
}, | |
{ | |
"epoch": 2.0460983060261038, | |
"grad_norm": 0.013824276626110077, | |
"learning_rate": 0.0010592592592592594, | |
"loss": 0.8293, | |
"step": 921 | |
}, | |
{ | |
"epoch": 2.0483199111357955, | |
"grad_norm": 0.007447019685059786, | |
"learning_rate": 0.00105679012345679, | |
"loss": 0.6288, | |
"step": 922 | |
}, | |
{ | |
"epoch": 2.050541516245487, | |
"grad_norm": 0.01773306168615818, | |
"learning_rate": 0.001054320987654321, | |
"loss": 0.8114, | |
"step": 923 | |
}, | |
{ | |
"epoch": 2.052763121355179, | |
"grad_norm": 0.005917937494814396, | |
"learning_rate": 0.001051851851851852, | |
"loss": 0.9694, | |
"step": 924 | |
}, | |
{ | |
"epoch": 2.054984726464871, | |
"grad_norm": 0.007322025019675493, | |
"learning_rate": 0.0010493827160493827, | |
"loss": 0.692, | |
"step": 925 | |
}, | |
{ | |
"epoch": 2.057206331574563, | |
"grad_norm": 0.007124684751033783, | |
"learning_rate": 0.0010469135802469137, | |
"loss": 0.6237, | |
"step": 926 | |
}, | |
{ | |
"epoch": 2.0594279366842545, | |
"grad_norm": 0.007253418676555157, | |
"learning_rate": 0.0010444444444444444, | |
"loss": 0.6917, | |
"step": 927 | |
}, | |
{ | |
"epoch": 2.061649541793946, | |
"grad_norm": 0.011087391525506973, | |
"learning_rate": 0.0010419753086419754, | |
"loss": 0.7639, | |
"step": 928 | |
}, | |
{ | |
"epoch": 2.063871146903638, | |
"grad_norm": 0.005655494052916765, | |
"learning_rate": 0.001039506172839506, | |
"loss": 0.564, | |
"step": 929 | |
}, | |
{ | |
"epoch": 2.0660927520133296, | |
"grad_norm": 0.014756609685719013, | |
"learning_rate": 0.001037037037037037, | |
"loss": 0.971, | |
"step": 930 | |
}, | |
{ | |
"epoch": 2.0683143571230214, | |
"grad_norm": 0.010251183062791824, | |
"learning_rate": 0.0010345679012345678, | |
"loss": 0.8309, | |
"step": 931 | |
}, | |
{ | |
"epoch": 2.070535962232713, | |
"grad_norm": 0.007252897135913372, | |
"learning_rate": 0.001032098765432099, | |
"loss": 0.7928, | |
"step": 932 | |
}, | |
{ | |
"epoch": 2.072757567342405, | |
"grad_norm": 0.007240195758640766, | |
"learning_rate": 0.0010296296296296297, | |
"loss": 0.7123, | |
"step": 933 | |
}, | |
{ | |
"epoch": 2.0749791724520965, | |
"grad_norm": 0.0074341814033687115, | |
"learning_rate": 0.0010271604938271606, | |
"loss": 0.6665, | |
"step": 934 | |
}, | |
{ | |
"epoch": 2.0772007775617882, | |
"grad_norm": 0.008963247761130333, | |
"learning_rate": 0.0010246913580246914, | |
"loss": 0.9262, | |
"step": 935 | |
}, | |
{ | |
"epoch": 2.07942238267148, | |
"grad_norm": 0.009950084611773491, | |
"learning_rate": 0.0010222222222222223, | |
"loss": 0.6412, | |
"step": 936 | |
}, | |
{ | |
"epoch": 2.081643987781172, | |
"grad_norm": 0.009436005726456642, | |
"learning_rate": 0.001019753086419753, | |
"loss": 0.748, | |
"step": 937 | |
}, | |
{ | |
"epoch": 2.083865592890864, | |
"grad_norm": 0.008897105231881142, | |
"learning_rate": 0.001017283950617284, | |
"loss": 0.7382, | |
"step": 938 | |
}, | |
{ | |
"epoch": 2.0860871980005555, | |
"grad_norm": 0.008523879572749138, | |
"learning_rate": 0.001014814814814815, | |
"loss": 0.7756, | |
"step": 939 | |
}, | |
{ | |
"epoch": 2.0883088031102472, | |
"grad_norm": 0.00715121952816844, | |
"learning_rate": 0.0010123456790123457, | |
"loss": 0.7371, | |
"step": 940 | |
}, | |
{ | |
"epoch": 2.090530408219939, | |
"grad_norm": 0.007129182107746601, | |
"learning_rate": 0.0010098765432098766, | |
"loss": 0.8859, | |
"step": 941 | |
}, | |
{ | |
"epoch": 2.0927520133296307, | |
"grad_norm": 0.009402717463672161, | |
"learning_rate": 0.0010074074074074074, | |
"loss": 0.6308, | |
"step": 942 | |
}, | |
{ | |
"epoch": 2.0949736184393224, | |
"grad_norm": 0.016564862802624702, | |
"learning_rate": 0.0010049382716049383, | |
"loss": 0.6037, | |
"step": 943 | |
}, | |
{ | |
"epoch": 2.097195223549014, | |
"grad_norm": 0.009927218779921532, | |
"learning_rate": 0.001002469135802469, | |
"loss": 0.9427, | |
"step": 944 | |
}, | |
{ | |
"epoch": 2.099416828658706, | |
"grad_norm": 0.012924822978675365, | |
"learning_rate": 0.001, | |
"loss": 0.5845, | |
"step": 945 | |
}, | |
{ | |
"epoch": 2.1016384337683975, | |
"grad_norm": 0.0068692732602357864, | |
"learning_rate": 0.0009975308641975308, | |
"loss": 0.825, | |
"step": 946 | |
}, | |
{ | |
"epoch": 2.1038600388780893, | |
"grad_norm": 0.006965239066630602, | |
"learning_rate": 0.000995061728395062, | |
"loss": 0.7674, | |
"step": 947 | |
}, | |
{ | |
"epoch": 2.106081643987781, | |
"grad_norm": 0.012765421532094479, | |
"learning_rate": 0.0009925925925925927, | |
"loss": 0.7373, | |
"step": 948 | |
}, | |
{ | |
"epoch": 2.108303249097473, | |
"grad_norm": 0.00697924941778183, | |
"learning_rate": 0.0009901234567901234, | |
"loss": 0.6757, | |
"step": 949 | |
}, | |
{ | |
"epoch": 2.110524854207165, | |
"grad_norm": 0.008962864056229591, | |
"learning_rate": 0.0009876543209876543, | |
"loss": 0.7667, | |
"step": 950 | |
}, | |
{ | |
"epoch": 2.1127464593168566, | |
"grad_norm": 0.007010742090642452, | |
"learning_rate": 0.000985185185185185, | |
"loss": 0.7324, | |
"step": 951 | |
}, | |
{ | |
"epoch": 2.1149680644265483, | |
"grad_norm": 0.010677567683160305, | |
"learning_rate": 0.000982716049382716, | |
"loss": 0.9426, | |
"step": 952 | |
}, | |
{ | |
"epoch": 2.11718966953624, | |
"grad_norm": 0.006151643116027117, | |
"learning_rate": 0.0009802469135802468, | |
"loss": 0.8181, | |
"step": 953 | |
}, | |
{ | |
"epoch": 2.1194112746459317, | |
"grad_norm": 0.007978886365890503, | |
"learning_rate": 0.000977777777777778, | |
"loss": 0.5944, | |
"step": 954 | |
}, | |
{ | |
"epoch": 2.1216328797556234, | |
"grad_norm": 0.005650858860462904, | |
"learning_rate": 0.0009753086419753087, | |
"loss": 0.8202, | |
"step": 955 | |
}, | |
{ | |
"epoch": 2.123854484865315, | |
"grad_norm": 0.007249223534017801, | |
"learning_rate": 0.0009728395061728395, | |
"loss": 0.9062, | |
"step": 956 | |
}, | |
{ | |
"epoch": 2.126076089975007, | |
"grad_norm": 0.008957468904554844, | |
"learning_rate": 0.0009703703703703704, | |
"loss": 0.7101, | |
"step": 957 | |
}, | |
{ | |
"epoch": 2.1282976950846986, | |
"grad_norm": 0.005973477382212877, | |
"learning_rate": 0.0009679012345679012, | |
"loss": 0.8107, | |
"step": 958 | |
}, | |
{ | |
"epoch": 2.1305193001943903, | |
"grad_norm": 0.009565635584294796, | |
"learning_rate": 0.000965432098765432, | |
"loss": 0.8101, | |
"step": 959 | |
}, | |
{ | |
"epoch": 2.132740905304082, | |
"grad_norm": 0.010100849904119968, | |
"learning_rate": 0.0009629629629629629, | |
"loss": 0.6972, | |
"step": 960 | |
}, | |
{ | |
"epoch": 2.134962510413774, | |
"grad_norm": 0.006807432975620031, | |
"learning_rate": 0.0009604938271604937, | |
"loss": 0.5973, | |
"step": 961 | |
}, | |
{ | |
"epoch": 2.137184115523466, | |
"grad_norm": 0.006458532530814409, | |
"learning_rate": 0.0009580246913580248, | |
"loss": 0.7962, | |
"step": 962 | |
}, | |
{ | |
"epoch": 2.1394057206331576, | |
"grad_norm": 0.008561724796891212, | |
"learning_rate": 0.0009555555555555556, | |
"loss": 0.7268, | |
"step": 963 | |
}, | |
{ | |
"epoch": 2.1416273257428493, | |
"grad_norm": 0.006863057147711515, | |
"learning_rate": 0.0009530864197530865, | |
"loss": 0.7796, | |
"step": 964 | |
}, | |
{ | |
"epoch": 2.143848930852541, | |
"grad_norm": 0.007956072688102722, | |
"learning_rate": 0.0009506172839506173, | |
"loss": 0.7257, | |
"step": 965 | |
}, | |
{ | |
"epoch": 2.1460705359622327, | |
"grad_norm": 0.008310288190841675, | |
"learning_rate": 0.0009481481481481482, | |
"loss": 0.9291, | |
"step": 966 | |
}, | |
{ | |
"epoch": 2.1482921410719245, | |
"grad_norm": 0.010176541283726692, | |
"learning_rate": 0.000945679012345679, | |
"loss": 0.9228, | |
"step": 967 | |
}, | |
{ | |
"epoch": 2.150513746181616, | |
"grad_norm": 0.006355747580528259, | |
"learning_rate": 0.0009432098765432098, | |
"loss": 0.7367, | |
"step": 968 | |
}, | |
{ | |
"epoch": 2.152735351291308, | |
"grad_norm": 0.013814382255077362, | |
"learning_rate": 0.0009407407407407408, | |
"loss": 1.0439, | |
"step": 969 | |
}, | |
{ | |
"epoch": 2.1549569564009996, | |
"grad_norm": 0.007776509039103985, | |
"learning_rate": 0.0009382716049382716, | |
"loss": 0.7209, | |
"step": 970 | |
}, | |
{ | |
"epoch": 2.1571785615106913, | |
"grad_norm": 0.007975906133651733, | |
"learning_rate": 0.0009358024691358025, | |
"loss": 0.7493, | |
"step": 971 | |
}, | |
{ | |
"epoch": 2.159400166620383, | |
"grad_norm": 0.011750995181500912, | |
"learning_rate": 0.0009333333333333333, | |
"loss": 0.7508, | |
"step": 972 | |
}, | |
{ | |
"epoch": 2.1616217717300747, | |
"grad_norm": 0.006207957398146391, | |
"learning_rate": 0.0009308641975308642, | |
"loss": 0.7657, | |
"step": 973 | |
}, | |
{ | |
"epoch": 2.163843376839767, | |
"grad_norm": 0.008501831442117691, | |
"learning_rate": 0.000928395061728395, | |
"loss": 0.708, | |
"step": 974 | |
}, | |
{ | |
"epoch": 2.1660649819494586, | |
"grad_norm": 0.00984508078545332, | |
"learning_rate": 0.0009259259259259259, | |
"loss": 0.7141, | |
"step": 975 | |
}, | |
{ | |
"epoch": 2.1682865870591503, | |
"grad_norm": 0.009435421787202358, | |
"learning_rate": 0.0009234567901234567, | |
"loss": 0.7219, | |
"step": 976 | |
}, | |
{ | |
"epoch": 2.170508192168842, | |
"grad_norm": 0.008115294389426708, | |
"learning_rate": 0.0009209876543209878, | |
"loss": 0.8451, | |
"step": 977 | |
}, | |
{ | |
"epoch": 2.1727297972785338, | |
"grad_norm": 0.00791158340871334, | |
"learning_rate": 0.0009185185185185186, | |
"loss": 0.7585, | |
"step": 978 | |
}, | |
{ | |
"epoch": 2.1749514023882255, | |
"grad_norm": 0.009326069615781307, | |
"learning_rate": 0.0009160493827160494, | |
"loss": 0.8518, | |
"step": 979 | |
}, | |
{ | |
"epoch": 2.177173007497917, | |
"grad_norm": 0.009076416492462158, | |
"learning_rate": 0.0009135802469135803, | |
"loss": 0.822, | |
"step": 980 | |
}, | |
{ | |
"epoch": 2.179394612607609, | |
"grad_norm": 0.006390303373336792, | |
"learning_rate": 0.0009111111111111111, | |
"loss": 0.756, | |
"step": 981 | |
}, | |
{ | |
"epoch": 2.1816162177173006, | |
"grad_norm": 0.008008323609828949, | |
"learning_rate": 0.000908641975308642, | |
"loss": 0.7159, | |
"step": 982 | |
}, | |
{ | |
"epoch": 2.1838378228269923, | |
"grad_norm": 0.011111398227512836, | |
"learning_rate": 0.0009061728395061728, | |
"loss": 0.8722, | |
"step": 983 | |
}, | |
{ | |
"epoch": 2.186059427936684, | |
"grad_norm": 0.012215813621878624, | |
"learning_rate": 0.0009037037037037038, | |
"loss": 0.8533, | |
"step": 984 | |
}, | |
{ | |
"epoch": 2.1882810330463762, | |
"grad_norm": 0.005620912183076143, | |
"learning_rate": 0.0009012345679012346, | |
"loss": 0.6836, | |
"step": 985 | |
}, | |
{ | |
"epoch": 2.190502638156068, | |
"grad_norm": 0.011297494173049927, | |
"learning_rate": 0.0008987654320987655, | |
"loss": 0.7197, | |
"step": 986 | |
}, | |
{ | |
"epoch": 2.1927242432657597, | |
"grad_norm": 0.008543256670236588, | |
"learning_rate": 0.0008962962962962963, | |
"loss": 0.8217, | |
"step": 987 | |
}, | |
{ | |
"epoch": 2.1949458483754514, | |
"grad_norm": 0.008594110608100891, | |
"learning_rate": 0.0008938271604938271, | |
"loss": 0.7988, | |
"step": 988 | |
}, | |
{ | |
"epoch": 2.197167453485143, | |
"grad_norm": 0.007539823185652494, | |
"learning_rate": 0.000891358024691358, | |
"loss": 0.6496, | |
"step": 989 | |
}, | |
{ | |
"epoch": 2.199389058594835, | |
"grad_norm": 0.008808733895421028, | |
"learning_rate": 0.0008888888888888888, | |
"loss": 0.6669, | |
"step": 990 | |
}, | |
{ | |
"epoch": 2.2016106637045265, | |
"grad_norm": 0.00771710928529501, | |
"learning_rate": 0.0008864197530864197, | |
"loss": 0.798, | |
"step": 991 | |
}, | |
{ | |
"epoch": 2.2038322688142182, | |
"grad_norm": 0.01634322665631771, | |
"learning_rate": 0.0008839506172839507, | |
"loss": 0.7739, | |
"step": 992 | |
}, | |
{ | |
"epoch": 2.20605387392391, | |
"grad_norm": 0.0062980130314826965, | |
"learning_rate": 0.0008814814814814816, | |
"loss": 0.6469, | |
"step": 993 | |
}, | |
{ | |
"epoch": 2.2082754790336017, | |
"grad_norm": 0.011210683733224869, | |
"learning_rate": 0.0008790123456790124, | |
"loss": 0.736, | |
"step": 994 | |
}, | |
{ | |
"epoch": 2.2104970841432934, | |
"grad_norm": 0.009300244972109795, | |
"learning_rate": 0.0008765432098765433, | |
"loss": 0.7872, | |
"step": 995 | |
}, | |
{ | |
"epoch": 2.212718689252985, | |
"grad_norm": 0.004812668543308973, | |
"learning_rate": 0.0008740740740740741, | |
"loss": 0.7092, | |
"step": 996 | |
}, | |
{ | |
"epoch": 2.214940294362677, | |
"grad_norm": 0.00885671004652977, | |
"learning_rate": 0.0008716049382716049, | |
"loss": 0.5941, | |
"step": 997 | |
}, | |
{ | |
"epoch": 2.217161899472369, | |
"grad_norm": 0.011171343736350536, | |
"learning_rate": 0.0008691358024691358, | |
"loss": 0.7145, | |
"step": 998 | |
}, | |
{ | |
"epoch": 2.2193835045820607, | |
"grad_norm": 0.00593544589355588, | |
"learning_rate": 0.0008666666666666666, | |
"loss": 0.7192, | |
"step": 999 | |
}, | |
{ | |
"epoch": 2.2216051096917524, | |
"grad_norm": 0.007853595539927483, | |
"learning_rate": 0.0008641975308641976, | |
"loss": 0.8245, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 2.223826714801444, | |
"grad_norm": 0.006899894680827856, | |
"learning_rate": 0.0008617283950617284, | |
"loss": 0.6957, | |
"step": 1001 | |
}, | |
{ | |
"epoch": 2.226048319911136, | |
"grad_norm": 0.007371754385530949, | |
"learning_rate": 0.0008592592592592593, | |
"loss": 0.8765, | |
"step": 1002 | |
}, | |
{ | |
"epoch": 2.2282699250208275, | |
"grad_norm": 0.007087046280503273, | |
"learning_rate": 0.0008567901234567901, | |
"loss": 0.9632, | |
"step": 1003 | |
}, | |
{ | |
"epoch": 2.2304915301305193, | |
"grad_norm": 0.008594956248998642, | |
"learning_rate": 0.000854320987654321, | |
"loss": 0.6357, | |
"step": 1004 | |
}, | |
{ | |
"epoch": 2.232713135240211, | |
"grad_norm": 0.00901806727051735, | |
"learning_rate": 0.0008518518518518518, | |
"loss": 0.8163, | |
"step": 1005 | |
}, | |
{ | |
"epoch": 2.2349347403499027, | |
"grad_norm": 0.010117881000041962, | |
"learning_rate": 0.0008493827160493826, | |
"loss": 0.7334, | |
"step": 1006 | |
}, | |
{ | |
"epoch": 2.2371563454595944, | |
"grad_norm": 0.006173667497932911, | |
"learning_rate": 0.0008469135802469137, | |
"loss": 0.7894, | |
"step": 1007 | |
}, | |
{ | |
"epoch": 2.239377950569286, | |
"grad_norm": 0.006634249817579985, | |
"learning_rate": 0.0008444444444444445, | |
"loss": 0.8352, | |
"step": 1008 | |
}, | |
{ | |
"epoch": 2.2415995556789783, | |
"grad_norm": 0.009885517880320549, | |
"learning_rate": 0.0008419753086419754, | |
"loss": 0.7064, | |
"step": 1009 | |
}, | |
{ | |
"epoch": 2.24382116078867, | |
"grad_norm": 0.006060949992388487, | |
"learning_rate": 0.0008395061728395062, | |
"loss": 0.6139, | |
"step": 1010 | |
}, | |
{ | |
"epoch": 2.2460427658983617, | |
"grad_norm": 0.010281536728143692, | |
"learning_rate": 0.0008370370370370371, | |
"loss": 0.7734, | |
"step": 1011 | |
}, | |
{ | |
"epoch": 2.2482643710080534, | |
"grad_norm": 0.009147711098194122, | |
"learning_rate": 0.0008345679012345679, | |
"loss": 0.7489, | |
"step": 1012 | |
}, | |
{ | |
"epoch": 2.250485976117745, | |
"grad_norm": 0.00839692261070013, | |
"learning_rate": 0.0008320987654320988, | |
"loss": 0.7873, | |
"step": 1013 | |
}, | |
{ | |
"epoch": 2.252707581227437, | |
"grad_norm": 0.011448378674685955, | |
"learning_rate": 0.0008296296296296296, | |
"loss": 0.8176, | |
"step": 1014 | |
}, | |
{ | |
"epoch": 2.2549291863371286, | |
"grad_norm": 0.009830589406192303, | |
"learning_rate": 0.0008271604938271605, | |
"loss": 0.7302, | |
"step": 1015 | |
}, | |
{ | |
"epoch": 2.2571507914468203, | |
"grad_norm": 0.00909352395683527, | |
"learning_rate": 0.0008246913580246914, | |
"loss": 0.6294, | |
"step": 1016 | |
}, | |
{ | |
"epoch": 2.259372396556512, | |
"grad_norm": 0.007446724455803633, | |
"learning_rate": 0.0008222222222222222, | |
"loss": 0.7363, | |
"step": 1017 | |
}, | |
{ | |
"epoch": 2.2615940016662037, | |
"grad_norm": 0.0077172317542135715, | |
"learning_rate": 0.0008197530864197531, | |
"loss": 0.8876, | |
"step": 1018 | |
}, | |
{ | |
"epoch": 2.2638156067758954, | |
"grad_norm": 0.00723393214866519, | |
"learning_rate": 0.0008172839506172839, | |
"loss": 0.8141, | |
"step": 1019 | |
}, | |
{ | |
"epoch": 2.266037211885587, | |
"grad_norm": 0.008728005923330784, | |
"learning_rate": 0.0008148148148148148, | |
"loss": 0.6813, | |
"step": 1020 | |
}, | |
{ | |
"epoch": 2.268258816995279, | |
"grad_norm": 0.006495292764157057, | |
"learning_rate": 0.0008123456790123456, | |
"loss": 0.6796, | |
"step": 1021 | |
}, | |
{ | |
"epoch": 2.270480422104971, | |
"grad_norm": 0.011490698903799057, | |
"learning_rate": 0.0008098765432098767, | |
"loss": 0.7595, | |
"step": 1022 | |
}, | |
{ | |
"epoch": 2.2727020272146627, | |
"grad_norm": 0.009576416574418545, | |
"learning_rate": 0.0008074074074074075, | |
"loss": 0.8657, | |
"step": 1023 | |
}, | |
{ | |
"epoch": 2.2749236323243545, | |
"grad_norm": 0.010087347589433193, | |
"learning_rate": 0.0008049382716049384, | |
"loss": 0.6814, | |
"step": 1024 | |
}, | |
{ | |
"epoch": 2.277145237434046, | |
"grad_norm": 0.0059828865341842175, | |
"learning_rate": 0.0008024691358024692, | |
"loss": 0.8354, | |
"step": 1025 | |
}, | |
{ | |
"epoch": 2.279366842543738, | |
"grad_norm": 0.007989997044205666, | |
"learning_rate": 0.0008, | |
"loss": 0.9329, | |
"step": 1026 | |
}, | |
{ | |
"epoch": 2.2815884476534296, | |
"grad_norm": 0.007406935561448336, | |
"learning_rate": 0.0007975308641975309, | |
"loss": 0.8708, | |
"step": 1027 | |
}, | |
{ | |
"epoch": 2.2838100527631213, | |
"grad_norm": 0.009634488262236118, | |
"learning_rate": 0.0007950617283950617, | |
"loss": 0.7884, | |
"step": 1028 | |
}, | |
{ | |
"epoch": 2.286031657872813, | |
"grad_norm": 0.012707880698144436, | |
"learning_rate": 0.0007925925925925926, | |
"loss": 0.8503, | |
"step": 1029 | |
}, | |
{ | |
"epoch": 2.2882532629825048, | |
"grad_norm": 0.006519515533000231, | |
"learning_rate": 0.0007901234567901235, | |
"loss": 0.6777, | |
"step": 1030 | |
}, | |
{ | |
"epoch": 2.2904748680921965, | |
"grad_norm": 0.007584378123283386, | |
"learning_rate": 0.0007876543209876544, | |
"loss": 0.9648, | |
"step": 1031 | |
}, | |
{ | |
"epoch": 2.292696473201888, | |
"grad_norm": 0.009531829506158829, | |
"learning_rate": 0.0007851851851851852, | |
"loss": 0.8318, | |
"step": 1032 | |
}, | |
{ | |
"epoch": 2.2949180783115803, | |
"grad_norm": 0.008382387459278107, | |
"learning_rate": 0.000782716049382716, | |
"loss": 0.7734, | |
"step": 1033 | |
}, | |
{ | |
"epoch": 2.297139683421272, | |
"grad_norm": 0.02264496311545372, | |
"learning_rate": 0.0007802469135802469, | |
"loss": 0.9009, | |
"step": 1034 | |
}, | |
{ | |
"epoch": 2.2993612885309638, | |
"grad_norm": 0.011763072572648525, | |
"learning_rate": 0.0007777777777777777, | |
"loss": 0.7595, | |
"step": 1035 | |
}, | |
{ | |
"epoch": 2.3015828936406555, | |
"grad_norm": 0.01221343595534563, | |
"learning_rate": 0.0007753086419753086, | |
"loss": 0.8407, | |
"step": 1036 | |
}, | |
{ | |
"epoch": 2.303804498750347, | |
"grad_norm": 0.00810176320374012, | |
"learning_rate": 0.0007728395061728396, | |
"loss": 0.5631, | |
"step": 1037 | |
}, | |
{ | |
"epoch": 2.306026103860039, | |
"grad_norm": 0.011265031062066555, | |
"learning_rate": 0.0007703703703703705, | |
"loss": 0.9348, | |
"step": 1038 | |
}, | |
{ | |
"epoch": 2.3082477089697306, | |
"grad_norm": 0.010535339824855328, | |
"learning_rate": 0.0007679012345679013, | |
"loss": 0.6664, | |
"step": 1039 | |
}, | |
{ | |
"epoch": 2.3104693140794224, | |
"grad_norm": 0.008275596424937248, | |
"learning_rate": 0.0007654320987654322, | |
"loss": 0.8809, | |
"step": 1040 | |
}, | |
{ | |
"epoch": 2.312690919189114, | |
"grad_norm": 0.009080667048692703, | |
"learning_rate": 0.000762962962962963, | |
"loss": 0.7974, | |
"step": 1041 | |
}, | |
{ | |
"epoch": 2.314912524298806, | |
"grad_norm": 0.006603560876101255, | |
"learning_rate": 0.0007604938271604939, | |
"loss": 0.6765, | |
"step": 1042 | |
}, | |
{ | |
"epoch": 2.3171341294084975, | |
"grad_norm": 0.009494476020336151, | |
"learning_rate": 0.0007580246913580247, | |
"loss": 0.6897, | |
"step": 1043 | |
}, | |
{ | |
"epoch": 2.319355734518189, | |
"grad_norm": 0.0070472098886966705, | |
"learning_rate": 0.0007555555555555555, | |
"loss": 0.9325, | |
"step": 1044 | |
}, | |
{ | |
"epoch": 2.321577339627881, | |
"grad_norm": 0.007084066979587078, | |
"learning_rate": 0.0007530864197530865, | |
"loss": 0.7297, | |
"step": 1045 | |
}, | |
{ | |
"epoch": 2.3237989447375726, | |
"grad_norm": 0.010063939727842808, | |
"learning_rate": 0.0007506172839506173, | |
"loss": 1.0324, | |
"step": 1046 | |
}, | |
{ | |
"epoch": 2.326020549847265, | |
"grad_norm": 0.009437466971576214, | |
"learning_rate": 0.0007481481481481482, | |
"loss": 0.845, | |
"step": 1047 | |
}, | |
{ | |
"epoch": 2.3282421549569565, | |
"grad_norm": 0.006999650038778782, | |
"learning_rate": 0.000745679012345679, | |
"loss": 0.7047, | |
"step": 1048 | |
}, | |
{ | |
"epoch": 2.3304637600666482, | |
"grad_norm": 0.008729071356356144, | |
"learning_rate": 0.0007432098765432099, | |
"loss": 0.7548, | |
"step": 1049 | |
}, | |
{ | |
"epoch": 2.33268536517634, | |
"grad_norm": 0.009390546008944511, | |
"learning_rate": 0.0007407407407407407, | |
"loss": 0.7937, | |
"step": 1050 | |
}, | |
{ | |
"epoch": 2.3349069702860317, | |
"grad_norm": 0.008248205296695232, | |
"learning_rate": 0.0007382716049382717, | |
"loss": 0.6691, | |
"step": 1051 | |
}, | |
{ | |
"epoch": 2.3371285753957234, | |
"grad_norm": 0.008162553422152996, | |
"learning_rate": 0.0007358024691358025, | |
"loss": 0.8213, | |
"step": 1052 | |
}, | |
{ | |
"epoch": 2.339350180505415, | |
"grad_norm": 0.005831165239214897, | |
"learning_rate": 0.0007333333333333333, | |
"loss": 0.8251, | |
"step": 1053 | |
}, | |
{ | |
"epoch": 2.341571785615107, | |
"grad_norm": 0.00917159765958786, | |
"learning_rate": 0.0007308641975308643, | |
"loss": 0.7395, | |
"step": 1054 | |
}, | |
{ | |
"epoch": 2.3437933907247985, | |
"grad_norm": 0.007398424670100212, | |
"learning_rate": 0.0007283950617283951, | |
"loss": 0.7261, | |
"step": 1055 | |
}, | |
{ | |
"epoch": 2.3460149958344902, | |
"grad_norm": 0.012491940520703793, | |
"learning_rate": 0.000725925925925926, | |
"loss": 0.84, | |
"step": 1056 | |
}, | |
{ | |
"epoch": 2.3482366009441824, | |
"grad_norm": 0.0047965748235583305, | |
"learning_rate": 0.0007234567901234568, | |
"loss": 0.5854, | |
"step": 1057 | |
}, | |
{ | |
"epoch": 2.350458206053874, | |
"grad_norm": 0.00818257499486208, | |
"learning_rate": 0.0007209876543209877, | |
"loss": 0.7076, | |
"step": 1058 | |
}, | |
{ | |
"epoch": 2.352679811163566, | |
"grad_norm": 0.013595851138234138, | |
"learning_rate": 0.0007185185185185185, | |
"loss": 0.822, | |
"step": 1059 | |
}, | |
{ | |
"epoch": 2.3549014162732576, | |
"grad_norm": 0.013108041137456894, | |
"learning_rate": 0.0007160493827160494, | |
"loss": 0.8705, | |
"step": 1060 | |
}, | |
{ | |
"epoch": 2.3571230213829493, | |
"grad_norm": 0.008615655824542046, | |
"learning_rate": 0.0007135802469135802, | |
"loss": 0.5893, | |
"step": 1061 | |
}, | |
{ | |
"epoch": 2.359344626492641, | |
"grad_norm": 0.0071796211414039135, | |
"learning_rate": 0.0007111111111111111, | |
"loss": 0.6746, | |
"step": 1062 | |
}, | |
{ | |
"epoch": 2.3615662316023327, | |
"grad_norm": 0.009801715612411499, | |
"learning_rate": 0.000708641975308642, | |
"loss": 0.7323, | |
"step": 1063 | |
}, | |
{ | |
"epoch": 2.3637878367120244, | |
"grad_norm": 0.006935875862836838, | |
"learning_rate": 0.0007061728395061728, | |
"loss": 0.6184, | |
"step": 1064 | |
}, | |
{ | |
"epoch": 2.366009441821716, | |
"grad_norm": 0.009273690171539783, | |
"learning_rate": 0.0007037037037037037, | |
"loss": 0.7003, | |
"step": 1065 | |
}, | |
{ | |
"epoch": 2.368231046931408, | |
"grad_norm": 0.008863688446581364, | |
"learning_rate": 0.0007012345679012346, | |
"loss": 0.906, | |
"step": 1066 | |
}, | |
{ | |
"epoch": 2.3704526520410996, | |
"grad_norm": 0.00721506355330348, | |
"learning_rate": 0.0006987654320987655, | |
"loss": 0.69, | |
"step": 1067 | |
}, | |
{ | |
"epoch": 2.3726742571507913, | |
"grad_norm": 0.010040674358606339, | |
"learning_rate": 0.0006962962962962963, | |
"loss": 0.7415, | |
"step": 1068 | |
}, | |
{ | |
"epoch": 2.374895862260483, | |
"grad_norm": 0.04709119722247124, | |
"learning_rate": 0.0006938271604938273, | |
"loss": 0.8354, | |
"step": 1069 | |
}, | |
{ | |
"epoch": 2.3771174673701747, | |
"grad_norm": 0.009449060074985027, | |
"learning_rate": 0.0006913580246913581, | |
"loss": 0.8285, | |
"step": 1070 | |
}, | |
{ | |
"epoch": 2.379339072479867, | |
"grad_norm": 0.009565912187099457, | |
"learning_rate": 0.000688888888888889, | |
"loss": 0.6402, | |
"step": 1071 | |
}, | |
{ | |
"epoch": 2.3815606775895586, | |
"grad_norm": 0.010059806518256664, | |
"learning_rate": 0.0006864197530864198, | |
"loss": 0.9116, | |
"step": 1072 | |
}, | |
{ | |
"epoch": 2.3837822826992503, | |
"grad_norm": 0.008675451390445232, | |
"learning_rate": 0.0006839506172839506, | |
"loss": 0.7108, | |
"step": 1073 | |
}, | |
{ | |
"epoch": 2.386003887808942, | |
"grad_norm": 0.011516880244016647, | |
"learning_rate": 0.0006814814814814815, | |
"loss": 0.7315, | |
"step": 1074 | |
}, | |
{ | |
"epoch": 2.3882254929186337, | |
"grad_norm": 0.0067232525907456875, | |
"learning_rate": 0.0006790123456790123, | |
"loss": 0.7973, | |
"step": 1075 | |
}, | |
{ | |
"epoch": 2.3904470980283254, | |
"grad_norm": 0.007842687889933586, | |
"learning_rate": 0.0006765432098765432, | |
"loss": 0.7277, | |
"step": 1076 | |
}, | |
{ | |
"epoch": 2.392668703138017, | |
"grad_norm": 0.01157988142222166, | |
"learning_rate": 0.0006740740740740741, | |
"loss": 0.7497, | |
"step": 1077 | |
}, | |
{ | |
"epoch": 2.394890308247709, | |
"grad_norm": 0.007398506626486778, | |
"learning_rate": 0.000671604938271605, | |
"loss": 0.827, | |
"step": 1078 | |
}, | |
{ | |
"epoch": 2.3971119133574006, | |
"grad_norm": 0.005851605907082558, | |
"learning_rate": 0.0006691358024691358, | |
"loss": 0.593, | |
"step": 1079 | |
}, | |
{ | |
"epoch": 2.3993335184670923, | |
"grad_norm": 0.006889878306537867, | |
"learning_rate": 0.0006666666666666666, | |
"loss": 0.6059, | |
"step": 1080 | |
}, | |
{ | |
"epoch": 2.4015551235767845, | |
"grad_norm": 0.011620272882282734, | |
"learning_rate": 0.0006641975308641976, | |
"loss": 0.8598, | |
"step": 1081 | |
}, | |
{ | |
"epoch": 2.403776728686476, | |
"grad_norm": 0.009120738133788109, | |
"learning_rate": 0.0006617283950617284, | |
"loss": 0.7034, | |
"step": 1082 | |
}, | |
{ | |
"epoch": 2.405998333796168, | |
"grad_norm": 0.012162731029093266, | |
"learning_rate": 0.0006592592592592593, | |
"loss": 0.6221, | |
"step": 1083 | |
}, | |
{ | |
"epoch": 2.4082199389058596, | |
"grad_norm": 0.00817908439785242, | |
"learning_rate": 0.0006567901234567901, | |
"loss": 0.8944, | |
"step": 1084 | |
}, | |
{ | |
"epoch": 2.4104415440155513, | |
"grad_norm": 0.008096316829323769, | |
"learning_rate": 0.000654320987654321, | |
"loss": 0.7351, | |
"step": 1085 | |
}, | |
{ | |
"epoch": 2.412663149125243, | |
"grad_norm": 0.011929173953831196, | |
"learning_rate": 0.0006518518518518518, | |
"loss": 0.6995, | |
"step": 1086 | |
}, | |
{ | |
"epoch": 2.4148847542349348, | |
"grad_norm": 0.009247666224837303, | |
"learning_rate": 0.0006493827160493827, | |
"loss": 0.7163, | |
"step": 1087 | |
}, | |
{ | |
"epoch": 2.4171063593446265, | |
"grad_norm": 0.00713922968134284, | |
"learning_rate": 0.0006469135802469136, | |
"loss": 0.8009, | |
"step": 1088 | |
}, | |
{ | |
"epoch": 2.419327964454318, | |
"grad_norm": 0.008218400180339813, | |
"learning_rate": 0.0006444444444444444, | |
"loss": 0.6624, | |
"step": 1089 | |
}, | |
{ | |
"epoch": 2.42154956956401, | |
"grad_norm": 0.014460783451795578, | |
"learning_rate": 0.0006419753086419753, | |
"loss": 0.7123, | |
"step": 1090 | |
}, | |
{ | |
"epoch": 2.4237711746737016, | |
"grad_norm": 0.010817009955644608, | |
"learning_rate": 0.0006395061728395061, | |
"loss": 0.7479, | |
"step": 1091 | |
}, | |
{ | |
"epoch": 2.4259927797833933, | |
"grad_norm": 0.005958953872323036, | |
"learning_rate": 0.0006370370370370371, | |
"loss": 0.7389, | |
"step": 1092 | |
}, | |
{ | |
"epoch": 2.428214384893085, | |
"grad_norm": 0.007965938188135624, | |
"learning_rate": 0.0006345679012345679, | |
"loss": 0.7421, | |
"step": 1093 | |
}, | |
{ | |
"epoch": 2.4304359900027768, | |
"grad_norm": 0.011952990666031837, | |
"learning_rate": 0.0006320987654320988, | |
"loss": 0.693, | |
"step": 1094 | |
}, | |
{ | |
"epoch": 2.432657595112469, | |
"grad_norm": 0.006782431621104479, | |
"learning_rate": 0.0006296296296296296, | |
"loss": 0.7593, | |
"step": 1095 | |
}, | |
{ | |
"epoch": 2.4348792002221606, | |
"grad_norm": 0.007503021042793989, | |
"learning_rate": 0.0006271604938271606, | |
"loss": 0.9018, | |
"step": 1096 | |
}, | |
{ | |
"epoch": 2.4371008053318524, | |
"grad_norm": 0.009170000441372395, | |
"learning_rate": 0.0006246913580246914, | |
"loss": 0.779, | |
"step": 1097 | |
}, | |
{ | |
"epoch": 2.439322410441544, | |
"grad_norm": 0.010611280798912048, | |
"learning_rate": 0.0006222222222222223, | |
"loss": 0.7442, | |
"step": 1098 | |
}, | |
{ | |
"epoch": 2.441544015551236, | |
"grad_norm": 0.007238518912345171, | |
"learning_rate": 0.0006197530864197531, | |
"loss": 0.8331, | |
"step": 1099 | |
}, | |
{ | |
"epoch": 2.4437656206609275, | |
"grad_norm": 0.007917318493127823, | |
"learning_rate": 0.0006172839506172839, | |
"loss": 0.6429, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 2.4459872257706192, | |
"grad_norm": 0.007869457826018333, | |
"learning_rate": 0.0006148148148148148, | |
"loss": 0.7167, | |
"step": 1101 | |
}, | |
{ | |
"epoch": 2.448208830880311, | |
"grad_norm": 0.010885112918913364, | |
"learning_rate": 0.0006123456790123456, | |
"loss": 0.9405, | |
"step": 1102 | |
}, | |
{ | |
"epoch": 2.4504304359900027, | |
"grad_norm": 0.005701167043298483, | |
"learning_rate": 0.0006098765432098766, | |
"loss": 0.6204, | |
"step": 1103 | |
}, | |
{ | |
"epoch": 2.4526520410996944, | |
"grad_norm": 0.008726618252694607, | |
"learning_rate": 0.0006074074074074074, | |
"loss": 0.6938, | |
"step": 1104 | |
}, | |
{ | |
"epoch": 2.4548736462093865, | |
"grad_norm": 0.009071066044270992, | |
"learning_rate": 0.0006049382716049383, | |
"loss": 0.7146, | |
"step": 1105 | |
}, | |
{ | |
"epoch": 2.4570952513190782, | |
"grad_norm": 0.00839879922568798, | |
"learning_rate": 0.0006024691358024691, | |
"loss": 0.763, | |
"step": 1106 | |
}, | |
{ | |
"epoch": 2.45931685642877, | |
"grad_norm": 0.010271345265209675, | |
"learning_rate": 0.0006000000000000001, | |
"loss": 0.827, | |
"step": 1107 | |
}, | |
{ | |
"epoch": 2.4615384615384617, | |
"grad_norm": 0.005833013448864222, | |
"learning_rate": 0.0005975308641975309, | |
"loss": 0.8062, | |
"step": 1108 | |
}, | |
{ | |
"epoch": 2.4637600666481534, | |
"grad_norm": 0.01235662680119276, | |
"learning_rate": 0.0005950617283950617, | |
"loss": 0.6257, | |
"step": 1109 | |
}, | |
{ | |
"epoch": 2.465981671757845, | |
"grad_norm": 0.006829334422945976, | |
"learning_rate": 0.0005925925925925926, | |
"loss": 0.8064, | |
"step": 1110 | |
}, | |
{ | |
"epoch": 2.468203276867537, | |
"grad_norm": 0.010129845701158047, | |
"learning_rate": 0.0005901234567901235, | |
"loss": 0.665, | |
"step": 1111 | |
}, | |
{ | |
"epoch": 2.4704248819772285, | |
"grad_norm": 0.009496810846030712, | |
"learning_rate": 0.0005876543209876544, | |
"loss": 0.9311, | |
"step": 1112 | |
}, | |
{ | |
"epoch": 2.4726464870869203, | |
"grad_norm": 0.007677487563341856, | |
"learning_rate": 0.0005851851851851852, | |
"loss": 1.0702, | |
"step": 1113 | |
}, | |
{ | |
"epoch": 2.474868092196612, | |
"grad_norm": 0.006991180591285229, | |
"learning_rate": 0.0005827160493827161, | |
"loss": 1.0201, | |
"step": 1114 | |
}, | |
{ | |
"epoch": 2.4770896973063037, | |
"grad_norm": 0.008119228295981884, | |
"learning_rate": 0.0005802469135802469, | |
"loss": 0.8175, | |
"step": 1115 | |
}, | |
{ | |
"epoch": 2.4793113024159954, | |
"grad_norm": 0.006982423830777407, | |
"learning_rate": 0.0005777777777777778, | |
"loss": 0.6665, | |
"step": 1116 | |
}, | |
{ | |
"epoch": 2.481532907525687, | |
"grad_norm": 0.006513761822134256, | |
"learning_rate": 0.0005753086419753086, | |
"loss": 0.7288, | |
"step": 1117 | |
}, | |
{ | |
"epoch": 2.483754512635379, | |
"grad_norm": 0.009115753695368767, | |
"learning_rate": 0.0005728395061728395, | |
"loss": 0.8318, | |
"step": 1118 | |
}, | |
{ | |
"epoch": 2.485976117745071, | |
"grad_norm": 0.007586904335767031, | |
"learning_rate": 0.0005703703703703704, | |
"loss": 0.725, | |
"step": 1119 | |
}, | |
{ | |
"epoch": 2.4881977228547627, | |
"grad_norm": 0.006441600155085325, | |
"learning_rate": 0.0005679012345679012, | |
"loss": 0.7512, | |
"step": 1120 | |
}, | |
{ | |
"epoch": 2.4904193279644544, | |
"grad_norm": 0.008771124295890331, | |
"learning_rate": 0.0005654320987654321, | |
"loss": 0.7566, | |
"step": 1121 | |
}, | |
{ | |
"epoch": 2.492640933074146, | |
"grad_norm": 0.010517198592424393, | |
"learning_rate": 0.000562962962962963, | |
"loss": 0.7906, | |
"step": 1122 | |
}, | |
{ | |
"epoch": 2.494862538183838, | |
"grad_norm": 0.009617543779313564, | |
"learning_rate": 0.0005604938271604939, | |
"loss": 1.0293, | |
"step": 1123 | |
}, | |
{ | |
"epoch": 2.4970841432935296, | |
"grad_norm": 0.009210485965013504, | |
"learning_rate": 0.0005580246913580247, | |
"loss": 0.881, | |
"step": 1124 | |
}, | |
{ | |
"epoch": 2.4993057484032213, | |
"grad_norm": 0.005644843447953463, | |
"learning_rate": 0.0005555555555555556, | |
"loss": 0.8017, | |
"step": 1125 | |
}, | |
{ | |
"epoch": 2.501527353512913, | |
"grad_norm": 0.010391274467110634, | |
"learning_rate": 0.0005530864197530865, | |
"loss": 0.8173, | |
"step": 1126 | |
}, | |
{ | |
"epoch": 2.5037489586226047, | |
"grad_norm": 0.007817761972546577, | |
"learning_rate": 0.0005506172839506173, | |
"loss": 0.6681, | |
"step": 1127 | |
}, | |
{ | |
"epoch": 2.5059705637322964, | |
"grad_norm": 0.008231202140450478, | |
"learning_rate": 0.0005481481481481482, | |
"loss": 0.6618, | |
"step": 1128 | |
}, | |
{ | |
"epoch": 2.5081921688419886, | |
"grad_norm": 0.008874166756868362, | |
"learning_rate": 0.000545679012345679, | |
"loss": 0.6056, | |
"step": 1129 | |
}, | |
{ | |
"epoch": 2.5104137739516803, | |
"grad_norm": 0.01048712246119976, | |
"learning_rate": 0.0005432098765432099, | |
"loss": 0.7019, | |
"step": 1130 | |
}, | |
{ | |
"epoch": 2.512635379061372, | |
"grad_norm": 0.008290881291031837, | |
"learning_rate": 0.0005407407407407407, | |
"loss": 0.6494, | |
"step": 1131 | |
}, | |
{ | |
"epoch": 2.5148569841710637, | |
"grad_norm": 0.007482048589736223, | |
"learning_rate": 0.0005382716049382716, | |
"loss": 0.8287, | |
"step": 1132 | |
}, | |
{ | |
"epoch": 2.5170785892807555, | |
"grad_norm": 0.008649013936519623, | |
"learning_rate": 0.0005358024691358024, | |
"loss": 0.8579, | |
"step": 1133 | |
}, | |
{ | |
"epoch": 2.519300194390447, | |
"grad_norm": 0.008483688347041607, | |
"learning_rate": 0.0005333333333333334, | |
"loss": 0.6852, | |
"step": 1134 | |
}, | |
{ | |
"epoch": 2.521521799500139, | |
"grad_norm": 0.008373193442821503, | |
"learning_rate": 0.0005308641975308642, | |
"loss": 0.7122, | |
"step": 1135 | |
}, | |
{ | |
"epoch": 2.5237434046098306, | |
"grad_norm": 0.007378621492534876, | |
"learning_rate": 0.000528395061728395, | |
"loss": 0.8437, | |
"step": 1136 | |
}, | |
{ | |
"epoch": 2.5259650097195223, | |
"grad_norm": 0.008011176250874996, | |
"learning_rate": 0.000525925925925926, | |
"loss": 0.5955, | |
"step": 1137 | |
}, | |
{ | |
"epoch": 2.528186614829214, | |
"grad_norm": 0.009291508235037327, | |
"learning_rate": 0.0005234567901234568, | |
"loss": 0.8159, | |
"step": 1138 | |
}, | |
{ | |
"epoch": 2.5304082199389057, | |
"grad_norm": 0.008907923474907875, | |
"learning_rate": 0.0005209876543209877, | |
"loss": 0.8137, | |
"step": 1139 | |
}, | |
{ | |
"epoch": 2.5326298250485975, | |
"grad_norm": 0.008879524655640125, | |
"learning_rate": 0.0005185185185185185, | |
"loss": 0.678, | |
"step": 1140 | |
}, | |
{ | |
"epoch": 2.534851430158289, | |
"grad_norm": 0.00674105528742075, | |
"learning_rate": 0.0005160493827160495, | |
"loss": 0.8209, | |
"step": 1141 | |
}, | |
{ | |
"epoch": 2.537073035267981, | |
"grad_norm": 0.008492392487823963, | |
"learning_rate": 0.0005135802469135803, | |
"loss": 0.8017, | |
"step": 1142 | |
}, | |
{ | |
"epoch": 2.5392946403776726, | |
"grad_norm": 0.012028438970446587, | |
"learning_rate": 0.0005111111111111112, | |
"loss": 0.8741, | |
"step": 1143 | |
}, | |
{ | |
"epoch": 2.5415162454873648, | |
"grad_norm": 0.012991204857826233, | |
"learning_rate": 0.000508641975308642, | |
"loss": 0.6765, | |
"step": 1144 | |
}, | |
{ | |
"epoch": 2.5437378505970565, | |
"grad_norm": 0.007418293505907059, | |
"learning_rate": 0.0005061728395061728, | |
"loss": 0.8896, | |
"step": 1145 | |
}, | |
{ | |
"epoch": 2.545959455706748, | |
"grad_norm": 0.0103315319865942, | |
"learning_rate": 0.0005037037037037037, | |
"loss": 0.8652, | |
"step": 1146 | |
}, | |
{ | |
"epoch": 2.54818106081644, | |
"grad_norm": 0.008294169791042805, | |
"learning_rate": 0.0005012345679012345, | |
"loss": 0.7071, | |
"step": 1147 | |
}, | |
{ | |
"epoch": 2.5504026659261316, | |
"grad_norm": 0.00622114073485136, | |
"learning_rate": 0.0004987654320987654, | |
"loss": 0.8395, | |
"step": 1148 | |
}, | |
{ | |
"epoch": 2.5526242710358233, | |
"grad_norm": 0.006999352015554905, | |
"learning_rate": 0.0004962962962962963, | |
"loss": 0.6672, | |
"step": 1149 | |
}, | |
{ | |
"epoch": 2.554845876145515, | |
"grad_norm": 0.008257112465798855, | |
"learning_rate": 0.0004938271604938272, | |
"loss": 0.7719, | |
"step": 1150 | |
}, | |
{ | |
"epoch": 2.5570674812552068, | |
"grad_norm": 0.0099422512575984, | |
"learning_rate": 0.000491358024691358, | |
"loss": 0.8573, | |
"step": 1151 | |
}, | |
{ | |
"epoch": 2.5592890863648985, | |
"grad_norm": 0.013014703057706356, | |
"learning_rate": 0.000488888888888889, | |
"loss": 0.9367, | |
"step": 1152 | |
}, | |
{ | |
"epoch": 2.5615106914745907, | |
"grad_norm": 0.006244743708521128, | |
"learning_rate": 0.00048641975308641976, | |
"loss": 0.7401, | |
"step": 1153 | |
}, | |
{ | |
"epoch": 2.5637322965842824, | |
"grad_norm": 0.007896228693425655, | |
"learning_rate": 0.0004839506172839506, | |
"loss": 0.6035, | |
"step": 1154 | |
}, | |
{ | |
"epoch": 2.565953901693974, | |
"grad_norm": 0.012634181417524815, | |
"learning_rate": 0.00048148148148148144, | |
"loss": 0.7003, | |
"step": 1155 | |
}, | |
{ | |
"epoch": 2.568175506803666, | |
"grad_norm": 0.010985374450683594, | |
"learning_rate": 0.0004790123456790124, | |
"loss": 1.0121, | |
"step": 1156 | |
}, | |
{ | |
"epoch": 2.5703971119133575, | |
"grad_norm": 0.007849064655601978, | |
"learning_rate": 0.00047654320987654324, | |
"loss": 0.8866, | |
"step": 1157 | |
}, | |
{ | |
"epoch": 2.5726187170230492, | |
"grad_norm": 0.012782498262822628, | |
"learning_rate": 0.0004740740740740741, | |
"loss": 0.8004, | |
"step": 1158 | |
}, | |
{ | |
"epoch": 2.574840322132741, | |
"grad_norm": 0.01364060677587986, | |
"learning_rate": 0.0004716049382716049, | |
"loss": 0.8129, | |
"step": 1159 | |
}, | |
{ | |
"epoch": 2.5770619272424327, | |
"grad_norm": 0.0071340943686664104, | |
"learning_rate": 0.0004691358024691358, | |
"loss": 0.7682, | |
"step": 1160 | |
}, | |
{ | |
"epoch": 2.5792835323521244, | |
"grad_norm": 0.00822313129901886, | |
"learning_rate": 0.00046666666666666666, | |
"loss": 1.0239, | |
"step": 1161 | |
}, | |
{ | |
"epoch": 2.581505137461816, | |
"grad_norm": 0.009147636592388153, | |
"learning_rate": 0.0004641975308641975, | |
"loss": 0.7753, | |
"step": 1162 | |
}, | |
{ | |
"epoch": 2.583726742571508, | |
"grad_norm": 0.00680557219311595, | |
"learning_rate": 0.00046172839506172835, | |
"loss": 0.8775, | |
"step": 1163 | |
}, | |
{ | |
"epoch": 2.5859483476811995, | |
"grad_norm": 0.012209068052470684, | |
"learning_rate": 0.0004592592592592593, | |
"loss": 0.9347, | |
"step": 1164 | |
}, | |
{ | |
"epoch": 2.5881699527908912, | |
"grad_norm": 0.008567320182919502, | |
"learning_rate": 0.00045679012345679014, | |
"loss": 0.667, | |
"step": 1165 | |
}, | |
{ | |
"epoch": 2.590391557900583, | |
"grad_norm": 0.009676763787865639, | |
"learning_rate": 0.000454320987654321, | |
"loss": 0.8293, | |
"step": 1166 | |
}, | |
{ | |
"epoch": 2.5926131630102747, | |
"grad_norm": 0.0067940810695290565, | |
"learning_rate": 0.0004518518518518519, | |
"loss": 0.9675, | |
"step": 1167 | |
}, | |
{ | |
"epoch": 2.594834768119967, | |
"grad_norm": 0.006839894689619541, | |
"learning_rate": 0.0004493827160493827, | |
"loss": 0.8892, | |
"step": 1168 | |
}, | |
{ | |
"epoch": 2.5970563732296585, | |
"grad_norm": 0.006143914069980383, | |
"learning_rate": 0.00044691358024691357, | |
"loss": 0.8653, | |
"step": 1169 | |
}, | |
{ | |
"epoch": 2.5992779783393503, | |
"grad_norm": 0.0072947400622069836, | |
"learning_rate": 0.0004444444444444444, | |
"loss": 0.7919, | |
"step": 1170 | |
}, | |
{ | |
"epoch": 2.601499583449042, | |
"grad_norm": 0.006502422038465738, | |
"learning_rate": 0.00044197530864197536, | |
"loss": 0.7423, | |
"step": 1171 | |
}, | |
{ | |
"epoch": 2.6037211885587337, | |
"grad_norm": 0.006935762241482735, | |
"learning_rate": 0.0004395061728395062, | |
"loss": 0.9463, | |
"step": 1172 | |
}, | |
{ | |
"epoch": 2.6059427936684254, | |
"grad_norm": 0.010217388160526752, | |
"learning_rate": 0.00043703703703703705, | |
"loss": 0.7173, | |
"step": 1173 | |
}, | |
{ | |
"epoch": 2.608164398778117, | |
"grad_norm": 0.010664797388017178, | |
"learning_rate": 0.0004345679012345679, | |
"loss": 0.8849, | |
"step": 1174 | |
}, | |
{ | |
"epoch": 2.610386003887809, | |
"grad_norm": 0.00750716682523489, | |
"learning_rate": 0.0004320987654320988, | |
"loss": 0.7131, | |
"step": 1175 | |
}, | |
{ | |
"epoch": 2.6126076089975006, | |
"grad_norm": 0.009238378144800663, | |
"learning_rate": 0.00042962962962962963, | |
"loss": 0.6628, | |
"step": 1176 | |
}, | |
{ | |
"epoch": 2.6148292141071927, | |
"grad_norm": 0.019038479775190353, | |
"learning_rate": 0.0004271604938271605, | |
"loss": 0.8115, | |
"step": 1177 | |
}, | |
{ | |
"epoch": 2.6170508192168844, | |
"grad_norm": 0.008630185388028622, | |
"learning_rate": 0.0004246913580246913, | |
"loss": 0.6493, | |
"step": 1178 | |
}, | |
{ | |
"epoch": 2.619272424326576, | |
"grad_norm": 0.0108924126252532, | |
"learning_rate": 0.00042222222222222227, | |
"loss": 0.986, | |
"step": 1179 | |
}, | |
{ | |
"epoch": 2.621494029436268, | |
"grad_norm": 0.005341158714145422, | |
"learning_rate": 0.0004197530864197531, | |
"loss": 0.8286, | |
"step": 1180 | |
}, | |
{ | |
"epoch": 2.6237156345459596, | |
"grad_norm": 0.008509661071002483, | |
"learning_rate": 0.00041728395061728396, | |
"loss": 0.7908, | |
"step": 1181 | |
}, | |
{ | |
"epoch": 2.6259372396556513, | |
"grad_norm": 0.013711505569517612, | |
"learning_rate": 0.0004148148148148148, | |
"loss": 0.7929, | |
"step": 1182 | |
}, | |
{ | |
"epoch": 2.628158844765343, | |
"grad_norm": 0.008806861005723476, | |
"learning_rate": 0.0004123456790123457, | |
"loss": 0.9112, | |
"step": 1183 | |
}, | |
{ | |
"epoch": 2.6303804498750347, | |
"grad_norm": 0.01078883744776249, | |
"learning_rate": 0.00040987654320987654, | |
"loss": 0.5432, | |
"step": 1184 | |
}, | |
{ | |
"epoch": 2.6326020549847264, | |
"grad_norm": 0.007406941149383783, | |
"learning_rate": 0.0004074074074074074, | |
"loss": 0.8226, | |
"step": 1185 | |
}, | |
{ | |
"epoch": 2.634823660094418, | |
"grad_norm": 0.016257604584097862, | |
"learning_rate": 0.00040493827160493833, | |
"loss": 0.7704, | |
"step": 1186 | |
}, | |
{ | |
"epoch": 2.63704526520411, | |
"grad_norm": 0.00873923022300005, | |
"learning_rate": 0.0004024691358024692, | |
"loss": 0.7444, | |
"step": 1187 | |
}, | |
{ | |
"epoch": 2.6392668703138016, | |
"grad_norm": 0.010216313414275646, | |
"learning_rate": 0.0004, | |
"loss": 0.7796, | |
"step": 1188 | |
}, | |
{ | |
"epoch": 2.6414884754234933, | |
"grad_norm": 0.015559897758066654, | |
"learning_rate": 0.00039753086419753086, | |
"loss": 0.8496, | |
"step": 1189 | |
}, | |
{ | |
"epoch": 2.643710080533185, | |
"grad_norm": 0.007304896134883165, | |
"learning_rate": 0.00039506172839506176, | |
"loss": 0.88, | |
"step": 1190 | |
}, | |
{ | |
"epoch": 2.6459316856428767, | |
"grad_norm": 0.007256948854774237, | |
"learning_rate": 0.0003925925925925926, | |
"loss": 0.7941, | |
"step": 1191 | |
}, | |
{ | |
"epoch": 2.648153290752569, | |
"grad_norm": 0.0061131990514695644, | |
"learning_rate": 0.00039012345679012345, | |
"loss": 0.824, | |
"step": 1192 | |
}, | |
{ | |
"epoch": 2.6503748958622606, | |
"grad_norm": 0.009228897280991077, | |
"learning_rate": 0.0003876543209876543, | |
"loss": 0.9218, | |
"step": 1193 | |
}, | |
{ | |
"epoch": 2.6525965009719523, | |
"grad_norm": 0.007349466439336538, | |
"learning_rate": 0.00038518518518518524, | |
"loss": 0.674, | |
"step": 1194 | |
}, | |
{ | |
"epoch": 2.654818106081644, | |
"grad_norm": 0.0076787457801401615, | |
"learning_rate": 0.0003827160493827161, | |
"loss": 0.7444, | |
"step": 1195 | |
}, | |
{ | |
"epoch": 2.6570397111913358, | |
"grad_norm": 0.010441628284752369, | |
"learning_rate": 0.0003802469135802469, | |
"loss": 0.9451, | |
"step": 1196 | |
}, | |
{ | |
"epoch": 2.6592613163010275, | |
"grad_norm": 0.01100853830575943, | |
"learning_rate": 0.00037777777777777777, | |
"loss": 0.9497, | |
"step": 1197 | |
}, | |
{ | |
"epoch": 2.661482921410719, | |
"grad_norm": 0.008857887238264084, | |
"learning_rate": 0.00037530864197530867, | |
"loss": 0.812, | |
"step": 1198 | |
}, | |
{ | |
"epoch": 2.663704526520411, | |
"grad_norm": 0.0065878876484930515, | |
"learning_rate": 0.0003728395061728395, | |
"loss": 0.8649, | |
"step": 1199 | |
}, | |
{ | |
"epoch": 2.6659261316301026, | |
"grad_norm": 0.009736582636833191, | |
"learning_rate": 0.00037037037037037035, | |
"loss": 0.5912, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 2.6681477367397948, | |
"grad_norm": 0.012062900699675083, | |
"learning_rate": 0.00036790123456790125, | |
"loss": 0.9503, | |
"step": 1201 | |
}, | |
{ | |
"epoch": 2.6703693418494865, | |
"grad_norm": 0.016475075855851173, | |
"learning_rate": 0.00036543209876543215, | |
"loss": 0.635, | |
"step": 1202 | |
}, | |
{ | |
"epoch": 2.672590946959178, | |
"grad_norm": 0.008248421363532543, | |
"learning_rate": 0.000362962962962963, | |
"loss": 0.741, | |
"step": 1203 | |
}, | |
{ | |
"epoch": 2.67481255206887, | |
"grad_norm": 0.006326109170913696, | |
"learning_rate": 0.00036049382716049383, | |
"loss": 0.6972, | |
"step": 1204 | |
}, | |
{ | |
"epoch": 2.6770341571785616, | |
"grad_norm": 0.007503593806177378, | |
"learning_rate": 0.0003580246913580247, | |
"loss": 0.7367, | |
"step": 1205 | |
}, | |
{ | |
"epoch": 2.6792557622882534, | |
"grad_norm": 0.0090335663408041, | |
"learning_rate": 0.00035555555555555557, | |
"loss": 0.8782, | |
"step": 1206 | |
}, | |
{ | |
"epoch": 2.681477367397945, | |
"grad_norm": 0.013283466920256615, | |
"learning_rate": 0.0003530864197530864, | |
"loss": 0.8175, | |
"step": 1207 | |
}, | |
{ | |
"epoch": 2.683698972507637, | |
"grad_norm": 0.007493285927921534, | |
"learning_rate": 0.0003506172839506173, | |
"loss": 0.7386, | |
"step": 1208 | |
}, | |
{ | |
"epoch": 2.6859205776173285, | |
"grad_norm": 0.011668765917420387, | |
"learning_rate": 0.00034814814814814816, | |
"loss": 1.0218, | |
"step": 1209 | |
}, | |
{ | |
"epoch": 2.68814218272702, | |
"grad_norm": 0.008925607427954674, | |
"learning_rate": 0.00034567901234567905, | |
"loss": 0.7263, | |
"step": 1210 | |
}, | |
{ | |
"epoch": 2.690363787836712, | |
"grad_norm": 0.009130970574915409, | |
"learning_rate": 0.0003432098765432099, | |
"loss": 0.7787, | |
"step": 1211 | |
}, | |
{ | |
"epoch": 2.6925853929464036, | |
"grad_norm": 0.006965592969208956, | |
"learning_rate": 0.00034074074074074074, | |
"loss": 0.7084, | |
"step": 1212 | |
}, | |
{ | |
"epoch": 2.6948069980560954, | |
"grad_norm": 0.03262852504849434, | |
"learning_rate": 0.0003382716049382716, | |
"loss": 0.8719, | |
"step": 1213 | |
}, | |
{ | |
"epoch": 2.697028603165787, | |
"grad_norm": 0.00794121716171503, | |
"learning_rate": 0.0003358024691358025, | |
"loss": 0.7461, | |
"step": 1214 | |
}, | |
{ | |
"epoch": 2.699250208275479, | |
"grad_norm": 0.00642223609611392, | |
"learning_rate": 0.0003333333333333333, | |
"loss": 0.8822, | |
"step": 1215 | |
}, | |
{ | |
"epoch": 2.701471813385171, | |
"grad_norm": 0.009445881471037865, | |
"learning_rate": 0.0003308641975308642, | |
"loss": 0.7372, | |
"step": 1216 | |
}, | |
{ | |
"epoch": 2.7036934184948627, | |
"grad_norm": 0.012715525925159454, | |
"learning_rate": 0.00032839506172839506, | |
"loss": 0.6933, | |
"step": 1217 | |
}, | |
{ | |
"epoch": 2.7059150236045544, | |
"grad_norm": 0.016722604632377625, | |
"learning_rate": 0.0003259259259259259, | |
"loss": 0.9278, | |
"step": 1218 | |
}, | |
{ | |
"epoch": 2.708136628714246, | |
"grad_norm": 0.010087000206112862, | |
"learning_rate": 0.0003234567901234568, | |
"loss": 0.862, | |
"step": 1219 | |
}, | |
{ | |
"epoch": 2.710358233823938, | |
"grad_norm": 0.007342969998717308, | |
"learning_rate": 0.00032098765432098765, | |
"loss": 0.8919, | |
"step": 1220 | |
}, | |
{ | |
"epoch": 2.7125798389336295, | |
"grad_norm": 0.006463713478296995, | |
"learning_rate": 0.00031851851851851854, | |
"loss": 0.7057, | |
"step": 1221 | |
}, | |
{ | |
"epoch": 2.7148014440433212, | |
"grad_norm": 0.008394156582653522, | |
"learning_rate": 0.0003160493827160494, | |
"loss": 0.7823, | |
"step": 1222 | |
}, | |
{ | |
"epoch": 2.717023049153013, | |
"grad_norm": 0.007742198649793863, | |
"learning_rate": 0.0003135802469135803, | |
"loss": 0.7605, | |
"step": 1223 | |
}, | |
{ | |
"epoch": 2.7192446542627047, | |
"grad_norm": 0.008673381991684437, | |
"learning_rate": 0.0003111111111111111, | |
"loss": 0.7759, | |
"step": 1224 | |
}, | |
{ | |
"epoch": 2.7214662593723964, | |
"grad_norm": 0.00807158648967743, | |
"learning_rate": 0.00030864197530864197, | |
"loss": 0.8065, | |
"step": 1225 | |
}, | |
{ | |
"epoch": 2.7236878644820885, | |
"grad_norm": 0.006811562459915876, | |
"learning_rate": 0.0003061728395061728, | |
"loss": 0.7656, | |
"step": 1226 | |
}, | |
{ | |
"epoch": 2.7259094695917803, | |
"grad_norm": 0.015771763399243355, | |
"learning_rate": 0.0003037037037037037, | |
"loss": 0.8297, | |
"step": 1227 | |
}, | |
{ | |
"epoch": 2.728131074701472, | |
"grad_norm": 0.011830280534923077, | |
"learning_rate": 0.00030123456790123455, | |
"loss": 0.7387, | |
"step": 1228 | |
}, | |
{ | |
"epoch": 2.7303526798111637, | |
"grad_norm": 0.008170777931809425, | |
"learning_rate": 0.00029876543209876545, | |
"loss": 0.6999, | |
"step": 1229 | |
}, | |
{ | |
"epoch": 2.7325742849208554, | |
"grad_norm": 0.028427088633179665, | |
"learning_rate": 0.0002962962962962963, | |
"loss": 0.7808, | |
"step": 1230 | |
}, | |
{ | |
"epoch": 2.734795890030547, | |
"grad_norm": 0.0069966730661690235, | |
"learning_rate": 0.0002938271604938272, | |
"loss": 0.8691, | |
"step": 1231 | |
}, | |
{ | |
"epoch": 2.737017495140239, | |
"grad_norm": 0.02192625217139721, | |
"learning_rate": 0.00029135802469135803, | |
"loss": 0.7229, | |
"step": 1232 | |
}, | |
{ | |
"epoch": 2.7392391002499306, | |
"grad_norm": 0.0061951447278261185, | |
"learning_rate": 0.0002888888888888889, | |
"loss": 0.5309, | |
"step": 1233 | |
}, | |
{ | |
"epoch": 2.7414607053596223, | |
"grad_norm": 0.005088411271572113, | |
"learning_rate": 0.00028641975308641977, | |
"loss": 0.8791, | |
"step": 1234 | |
}, | |
{ | |
"epoch": 2.743682310469314, | |
"grad_norm": 0.011514096520841122, | |
"learning_rate": 0.0002839506172839506, | |
"loss": 0.6298, | |
"step": 1235 | |
}, | |
{ | |
"epoch": 2.7459039155790057, | |
"grad_norm": 0.00723766814917326, | |
"learning_rate": 0.0002814814814814815, | |
"loss": 0.7606, | |
"step": 1236 | |
}, | |
{ | |
"epoch": 2.7481255206886974, | |
"grad_norm": 0.009152224287390709, | |
"learning_rate": 0.00027901234567901236, | |
"loss": 0.8691, | |
"step": 1237 | |
}, | |
{ | |
"epoch": 2.750347125798389, | |
"grad_norm": 0.010683218017220497, | |
"learning_rate": 0.00027654320987654325, | |
"loss": 0.7259, | |
"step": 1238 | |
}, | |
{ | |
"epoch": 2.752568730908081, | |
"grad_norm": 0.014296338893473148, | |
"learning_rate": 0.0002740740740740741, | |
"loss": 0.6487, | |
"step": 1239 | |
}, | |
{ | |
"epoch": 2.7547903360177726, | |
"grad_norm": 0.017391860485076904, | |
"learning_rate": 0.00027160493827160494, | |
"loss": 0.642, | |
"step": 1240 | |
}, | |
{ | |
"epoch": 2.7570119411274647, | |
"grad_norm": 0.007915521040558815, | |
"learning_rate": 0.0002691358024691358, | |
"loss": 0.9457, | |
"step": 1241 | |
}, | |
{ | |
"epoch": 2.7592335462371564, | |
"grad_norm": 0.01041811890900135, | |
"learning_rate": 0.0002666666666666667, | |
"loss": 0.7203, | |
"step": 1242 | |
}, | |
{ | |
"epoch": 2.761455151346848, | |
"grad_norm": 0.01122908852994442, | |
"learning_rate": 0.0002641975308641975, | |
"loss": 0.911, | |
"step": 1243 | |
}, | |
{ | |
"epoch": 2.76367675645654, | |
"grad_norm": 0.011805608868598938, | |
"learning_rate": 0.0002617283950617284, | |
"loss": 0.7091, | |
"step": 1244 | |
}, | |
{ | |
"epoch": 2.7658983615662316, | |
"grad_norm": 0.008046625182032585, | |
"learning_rate": 0.00025925925925925926, | |
"loss": 0.6665, | |
"step": 1245 | |
}, | |
{ | |
"epoch": 2.7681199666759233, | |
"grad_norm": 0.008950291201472282, | |
"learning_rate": 0.00025679012345679016, | |
"loss": 0.5604, | |
"step": 1246 | |
}, | |
{ | |
"epoch": 2.770341571785615, | |
"grad_norm": 0.009476016275584698, | |
"learning_rate": 0.000254320987654321, | |
"loss": 0.6785, | |
"step": 1247 | |
}, | |
{ | |
"epoch": 2.7725631768953067, | |
"grad_norm": 0.007629558444023132, | |
"learning_rate": 0.00025185185185185185, | |
"loss": 0.9354, | |
"step": 1248 | |
}, | |
{ | |
"epoch": 2.7747847820049985, | |
"grad_norm": 0.011383921839296818, | |
"learning_rate": 0.0002493827160493827, | |
"loss": 0.7957, | |
"step": 1249 | |
}, | |
{ | |
"epoch": 2.7770063871146906, | |
"grad_norm": 0.009743484668433666, | |
"learning_rate": 0.0002469135802469136, | |
"loss": 0.7978, | |
"step": 1250 | |
}, | |
{ | |
"epoch": 2.7792279922243823, | |
"grad_norm": 0.008847765624523163, | |
"learning_rate": 0.0002444444444444445, | |
"loss": 0.9675, | |
"step": 1251 | |
}, | |
{ | |
"epoch": 2.781449597334074, | |
"grad_norm": 0.007048389874398708, | |
"learning_rate": 0.0002419753086419753, | |
"loss": 0.8923, | |
"step": 1252 | |
}, | |
{ | |
"epoch": 2.7836712024437658, | |
"grad_norm": 0.011864888481795788, | |
"learning_rate": 0.0002395061728395062, | |
"loss": 0.8361, | |
"step": 1253 | |
}, | |
{ | |
"epoch": 2.7858928075534575, | |
"grad_norm": 0.007807235699146986, | |
"learning_rate": 0.00023703703703703704, | |
"loss": 0.8768, | |
"step": 1254 | |
}, | |
{ | |
"epoch": 2.788114412663149, | |
"grad_norm": 0.006801323965191841, | |
"learning_rate": 0.0002345679012345679, | |
"loss": 0.7097, | |
"step": 1255 | |
}, | |
{ | |
"epoch": 2.790336017772841, | |
"grad_norm": 0.005747576244175434, | |
"learning_rate": 0.00023209876543209875, | |
"loss": 0.6689, | |
"step": 1256 | |
}, | |
{ | |
"epoch": 2.7925576228825326, | |
"grad_norm": 0.007229214999824762, | |
"learning_rate": 0.00022962962962962965, | |
"loss": 0.7671, | |
"step": 1257 | |
}, | |
{ | |
"epoch": 2.7947792279922243, | |
"grad_norm": 0.006883475463837385, | |
"learning_rate": 0.0002271604938271605, | |
"loss": 0.7366, | |
"step": 1258 | |
}, | |
{ | |
"epoch": 2.797000833101916, | |
"grad_norm": 0.009386280551552773, | |
"learning_rate": 0.00022469135802469136, | |
"loss": 0.7915, | |
"step": 1259 | |
}, | |
{ | |
"epoch": 2.7992224382116078, | |
"grad_norm": 0.010489873588085175, | |
"learning_rate": 0.0002222222222222222, | |
"loss": 0.9666, | |
"step": 1260 | |
}, | |
{ | |
"epoch": 2.8014440433212995, | |
"grad_norm": 0.009199917316436768, | |
"learning_rate": 0.0002197530864197531, | |
"loss": 0.8122, | |
"step": 1261 | |
}, | |
{ | |
"epoch": 2.803665648430991, | |
"grad_norm": 0.00824709516018629, | |
"learning_rate": 0.00021728395061728395, | |
"loss": 0.9059, | |
"step": 1262 | |
}, | |
{ | |
"epoch": 2.805887253540683, | |
"grad_norm": 0.008696146309375763, | |
"learning_rate": 0.00021481481481481482, | |
"loss": 0.6446, | |
"step": 1263 | |
}, | |
{ | |
"epoch": 2.8081088586503746, | |
"grad_norm": 0.008835419081151485, | |
"learning_rate": 0.00021234567901234566, | |
"loss": 1.0159, | |
"step": 1264 | |
}, | |
{ | |
"epoch": 2.810330463760067, | |
"grad_norm": 0.010744646191596985, | |
"learning_rate": 0.00020987654320987656, | |
"loss": 0.8361, | |
"step": 1265 | |
}, | |
{ | |
"epoch": 2.8125520688697585, | |
"grad_norm": 0.015377935022115707, | |
"learning_rate": 0.0002074074074074074, | |
"loss": 0.8772, | |
"step": 1266 | |
}, | |
{ | |
"epoch": 2.81477367397945, | |
"grad_norm": 0.008018149994313717, | |
"learning_rate": 0.00020493827160493827, | |
"loss": 0.8452, | |
"step": 1267 | |
}, | |
{ | |
"epoch": 2.816995279089142, | |
"grad_norm": 0.008665296249091625, | |
"learning_rate": 0.00020246913580246917, | |
"loss": 0.7223, | |
"step": 1268 | |
}, | |
{ | |
"epoch": 2.8192168841988337, | |
"grad_norm": 0.011783248744904995, | |
"learning_rate": 0.0002, | |
"loss": 0.9633, | |
"step": 1269 | |
}, | |
{ | |
"epoch": 2.8214384893085254, | |
"grad_norm": 0.00910250935703516, | |
"learning_rate": 0.00019753086419753088, | |
"loss": 0.6806, | |
"step": 1270 | |
}, | |
{ | |
"epoch": 2.823660094418217, | |
"grad_norm": 0.006656398996710777, | |
"learning_rate": 0.00019506172839506172, | |
"loss": 0.837, | |
"step": 1271 | |
}, | |
{ | |
"epoch": 2.825881699527909, | |
"grad_norm": 0.010195860639214516, | |
"learning_rate": 0.00019259259259259262, | |
"loss": 0.6618, | |
"step": 1272 | |
}, | |
{ | |
"epoch": 2.8281033046376005, | |
"grad_norm": 0.007249714806675911, | |
"learning_rate": 0.00019012345679012346, | |
"loss": 0.7854, | |
"step": 1273 | |
}, | |
{ | |
"epoch": 2.8303249097472927, | |
"grad_norm": 0.010114341974258423, | |
"learning_rate": 0.00018765432098765433, | |
"loss": 0.5872, | |
"step": 1274 | |
}, | |
{ | |
"epoch": 2.8325465148569844, | |
"grad_norm": 0.006100023165345192, | |
"learning_rate": 0.00018518518518518518, | |
"loss": 0.7327, | |
"step": 1275 | |
}, | |
{ | |
"epoch": 2.834768119966676, | |
"grad_norm": 0.007089332211762667, | |
"learning_rate": 0.00018271604938271607, | |
"loss": 0.7358, | |
"step": 1276 | |
}, | |
{ | |
"epoch": 2.836989725076368, | |
"grad_norm": 0.007861738093197346, | |
"learning_rate": 0.00018024691358024692, | |
"loss": 0.7768, | |
"step": 1277 | |
}, | |
{ | |
"epoch": 2.8392113301860595, | |
"grad_norm": 0.007813625037670135, | |
"learning_rate": 0.00017777777777777779, | |
"loss": 0.8741, | |
"step": 1278 | |
}, | |
{ | |
"epoch": 2.8414329352957512, | |
"grad_norm": 0.007935810834169388, | |
"learning_rate": 0.00017530864197530866, | |
"loss": 0.6432, | |
"step": 1279 | |
}, | |
{ | |
"epoch": 2.843654540405443, | |
"grad_norm": 0.01766696572303772, | |
"learning_rate": 0.00017283950617283953, | |
"loss": 0.7577, | |
"step": 1280 | |
}, | |
{ | |
"epoch": 2.8458761455151347, | |
"grad_norm": 0.0071344939060509205, | |
"learning_rate": 0.00017037037037037037, | |
"loss": 0.8434, | |
"step": 1281 | |
}, | |
{ | |
"epoch": 2.8480977506248264, | |
"grad_norm": 0.007316224742680788, | |
"learning_rate": 0.00016790123456790124, | |
"loss": 0.6397, | |
"step": 1282 | |
}, | |
{ | |
"epoch": 2.850319355734518, | |
"grad_norm": 0.008054982870817184, | |
"learning_rate": 0.0001654320987654321, | |
"loss": 0.6832, | |
"step": 1283 | |
}, | |
{ | |
"epoch": 2.85254096084421, | |
"grad_norm": 0.01453061867505312, | |
"learning_rate": 0.00016296296296296295, | |
"loss": 0.773, | |
"step": 1284 | |
}, | |
{ | |
"epoch": 2.8547625659539015, | |
"grad_norm": 0.008884952403604984, | |
"learning_rate": 0.00016049382716049382, | |
"loss": 0.8363, | |
"step": 1285 | |
}, | |
{ | |
"epoch": 2.8569841710635933, | |
"grad_norm": 0.007511654868721962, | |
"learning_rate": 0.0001580246913580247, | |
"loss": 0.9314, | |
"step": 1286 | |
}, | |
{ | |
"epoch": 2.859205776173285, | |
"grad_norm": 0.008190833032131195, | |
"learning_rate": 0.00015555555555555556, | |
"loss": 0.7633, | |
"step": 1287 | |
}, | |
{ | |
"epoch": 2.8614273812829767, | |
"grad_norm": 0.006483402568846941, | |
"learning_rate": 0.0001530864197530864, | |
"loss": 0.9713, | |
"step": 1288 | |
}, | |
{ | |
"epoch": 2.863648986392669, | |
"grad_norm": 0.007926386781036854, | |
"learning_rate": 0.00015061728395061728, | |
"loss": 0.9167, | |
"step": 1289 | |
}, | |
{ | |
"epoch": 2.8658705915023606, | |
"grad_norm": 0.0071045164950191975, | |
"learning_rate": 0.00014814814814814815, | |
"loss": 0.7618, | |
"step": 1290 | |
}, | |
{ | |
"epoch": 2.8680921966120523, | |
"grad_norm": 0.0076851435005664825, | |
"learning_rate": 0.00014567901234567902, | |
"loss": 0.7558, | |
"step": 1291 | |
}, | |
{ | |
"epoch": 2.870313801721744, | |
"grad_norm": 0.006890943273901939, | |
"learning_rate": 0.00014320987654320989, | |
"loss": 0.883, | |
"step": 1292 | |
}, | |
{ | |
"epoch": 2.8725354068314357, | |
"grad_norm": 0.007618566509336233, | |
"learning_rate": 0.00014074074074074076, | |
"loss": 0.9588, | |
"step": 1293 | |
}, | |
{ | |
"epoch": 2.8747570119411274, | |
"grad_norm": 0.010207663290202618, | |
"learning_rate": 0.00013827160493827163, | |
"loss": 1.0954, | |
"step": 1294 | |
}, | |
{ | |
"epoch": 2.876978617050819, | |
"grad_norm": 0.01138080283999443, | |
"learning_rate": 0.00013580246913580247, | |
"loss": 0.7984, | |
"step": 1295 | |
}, | |
{ | |
"epoch": 2.879200222160511, | |
"grad_norm": 0.006852086633443832, | |
"learning_rate": 0.00013333333333333334, | |
"loss": 0.647, | |
"step": 1296 | |
}, | |
{ | |
"epoch": 2.8814218272702026, | |
"grad_norm": 0.006972672417759895, | |
"learning_rate": 0.0001308641975308642, | |
"loss": 0.7138, | |
"step": 1297 | |
}, | |
{ | |
"epoch": 2.8836434323798947, | |
"grad_norm": 0.008358401246368885, | |
"learning_rate": 0.00012839506172839508, | |
"loss": 0.6586, | |
"step": 1298 | |
}, | |
{ | |
"epoch": 2.8858650374895864, | |
"grad_norm": 0.010818328708410263, | |
"learning_rate": 0.00012592592592592592, | |
"loss": 0.729, | |
"step": 1299 | |
}, | |
{ | |
"epoch": 2.888086642599278, | |
"grad_norm": 0.007076184265315533, | |
"learning_rate": 0.0001234567901234568, | |
"loss": 0.6661, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 2.89030824770897, | |
"grad_norm": 0.009897168725728989, | |
"learning_rate": 0.00012098765432098765, | |
"loss": 1.1523, | |
"step": 1301 | |
}, | |
{ | |
"epoch": 2.8925298528186616, | |
"grad_norm": 0.009953767992556095, | |
"learning_rate": 0.00011851851851851852, | |
"loss": 0.7484, | |
"step": 1302 | |
}, | |
{ | |
"epoch": 2.8947514579283533, | |
"grad_norm": 0.010332675650715828, | |
"learning_rate": 0.00011604938271604938, | |
"loss": 0.6528, | |
"step": 1303 | |
}, | |
{ | |
"epoch": 2.896973063038045, | |
"grad_norm": 0.006213752087205648, | |
"learning_rate": 0.00011358024691358025, | |
"loss": 0.5496, | |
"step": 1304 | |
}, | |
{ | |
"epoch": 2.8991946681477367, | |
"grad_norm": 0.005716019310057163, | |
"learning_rate": 0.0001111111111111111, | |
"loss": 0.7742, | |
"step": 1305 | |
}, | |
{ | |
"epoch": 2.9014162732574285, | |
"grad_norm": 0.007558442186564207, | |
"learning_rate": 0.00010864197530864197, | |
"loss": 0.7281, | |
"step": 1306 | |
}, | |
{ | |
"epoch": 2.90363787836712, | |
"grad_norm": 0.009284201078116894, | |
"learning_rate": 0.00010617283950617283, | |
"loss": 0.7379, | |
"step": 1307 | |
}, | |
{ | |
"epoch": 2.905859483476812, | |
"grad_norm": 0.00817166455090046, | |
"learning_rate": 0.0001037037037037037, | |
"loss": 0.8667, | |
"step": 1308 | |
}, | |
{ | |
"epoch": 2.9080810885865036, | |
"grad_norm": 0.011280300095677376, | |
"learning_rate": 0.00010123456790123458, | |
"loss": 0.8758, | |
"step": 1309 | |
}, | |
{ | |
"epoch": 2.9103026936961953, | |
"grad_norm": 0.006844306364655495, | |
"learning_rate": 9.876543209876544e-05, | |
"loss": 0.7103, | |
"step": 1310 | |
}, | |
{ | |
"epoch": 2.912524298805887, | |
"grad_norm": 0.005340827163308859, | |
"learning_rate": 9.629629629629631e-05, | |
"loss": 0.6803, | |
"step": 1311 | |
}, | |
{ | |
"epoch": 2.9147459039155788, | |
"grad_norm": 0.006157919764518738, | |
"learning_rate": 9.382716049382717e-05, | |
"loss": 0.8298, | |
"step": 1312 | |
}, | |
{ | |
"epoch": 2.916967509025271, | |
"grad_norm": 0.019693652167916298, | |
"learning_rate": 9.135802469135804e-05, | |
"loss": 0.8528, | |
"step": 1313 | |
}, | |
{ | |
"epoch": 2.9191891141349626, | |
"grad_norm": 0.008847893215715885, | |
"learning_rate": 8.888888888888889e-05, | |
"loss": 0.9522, | |
"step": 1314 | |
}, | |
{ | |
"epoch": 2.9214107192446543, | |
"grad_norm": 0.03038479946553707, | |
"learning_rate": 8.641975308641976e-05, | |
"loss": 0.7119, | |
"step": 1315 | |
}, | |
{ | |
"epoch": 2.923632324354346, | |
"grad_norm": 0.009875786490738392, | |
"learning_rate": 8.395061728395062e-05, | |
"loss": 0.5868, | |
"step": 1316 | |
}, | |
{ | |
"epoch": 2.9258539294640378, | |
"grad_norm": 0.007000662852078676, | |
"learning_rate": 8.148148148148148e-05, | |
"loss": 0.8366, | |
"step": 1317 | |
}, | |
{ | |
"epoch": 2.9280755345737295, | |
"grad_norm": 0.009017952717840672, | |
"learning_rate": 7.901234567901235e-05, | |
"loss": 0.856, | |
"step": 1318 | |
}, | |
{ | |
"epoch": 2.930297139683421, | |
"grad_norm": 0.009106653742492199, | |
"learning_rate": 7.65432098765432e-05, | |
"loss": 0.844, | |
"step": 1319 | |
}, | |
{ | |
"epoch": 2.932518744793113, | |
"grad_norm": 0.004958700388669968, | |
"learning_rate": 7.407407407407407e-05, | |
"loss": 0.7263, | |
"step": 1320 | |
}, | |
{ | |
"epoch": 2.9347403499028046, | |
"grad_norm": 0.014845774509012699, | |
"learning_rate": 7.160493827160494e-05, | |
"loss": 0.7577, | |
"step": 1321 | |
}, | |
{ | |
"epoch": 2.936961955012497, | |
"grad_norm": 0.008830921724438667, | |
"learning_rate": 6.913580246913581e-05, | |
"loss": 0.6669, | |
"step": 1322 | |
}, | |
{ | |
"epoch": 2.9391835601221885, | |
"grad_norm": 0.009541644714772701, | |
"learning_rate": 6.666666666666667e-05, | |
"loss": 0.7751, | |
"step": 1323 | |
}, | |
{ | |
"epoch": 2.9414051652318802, | |
"grad_norm": 0.008899768814444542, | |
"learning_rate": 6.419753086419754e-05, | |
"loss": 0.6798, | |
"step": 1324 | |
}, | |
{ | |
"epoch": 2.943626770341572, | |
"grad_norm": 0.012478734366595745, | |
"learning_rate": 6.17283950617284e-05, | |
"loss": 0.8341, | |
"step": 1325 | |
}, | |
{ | |
"epoch": 2.9458483754512637, | |
"grad_norm": 0.008129597641527653, | |
"learning_rate": 5.925925925925926e-05, | |
"loss": 0.8493, | |
"step": 1326 | |
}, | |
{ | |
"epoch": 2.9480699805609554, | |
"grad_norm": 0.01137120183557272, | |
"learning_rate": 5.679012345679012e-05, | |
"loss": 0.7003, | |
"step": 1327 | |
}, | |
{ | |
"epoch": 2.950291585670647, | |
"grad_norm": 0.0069569917395710945, | |
"learning_rate": 5.4320987654320986e-05, | |
"loss": 0.7322, | |
"step": 1328 | |
}, | |
{ | |
"epoch": 2.952513190780339, | |
"grad_norm": 0.005949252285063267, | |
"learning_rate": 5.185185185185185e-05, | |
"loss": 0.8372, | |
"step": 1329 | |
}, | |
{ | |
"epoch": 2.9547347958900305, | |
"grad_norm": 0.010746531188488007, | |
"learning_rate": 4.938271604938272e-05, | |
"loss": 0.6656, | |
"step": 1330 | |
}, | |
{ | |
"epoch": 2.9569564009997222, | |
"grad_norm": 0.007687109522521496, | |
"learning_rate": 4.691358024691358e-05, | |
"loss": 0.8294, | |
"step": 1331 | |
}, | |
{ | |
"epoch": 2.959178006109414, | |
"grad_norm": 0.008320269174873829, | |
"learning_rate": 4.4444444444444447e-05, | |
"loss": 0.8337, | |
"step": 1332 | |
}, | |
{ | |
"epoch": 2.9613996112191057, | |
"grad_norm": 0.009467615745961666, | |
"learning_rate": 4.197530864197531e-05, | |
"loss": 0.8912, | |
"step": 1333 | |
}, | |
{ | |
"epoch": 2.9636212163287974, | |
"grad_norm": 0.0069550564512610435, | |
"learning_rate": 3.950617283950617e-05, | |
"loss": 0.8289, | |
"step": 1334 | |
}, | |
{ | |
"epoch": 2.965842821438489, | |
"grad_norm": 0.0068250177428126335, | |
"learning_rate": 3.7037037037037037e-05, | |
"loss": 0.6434, | |
"step": 1335 | |
}, | |
{ | |
"epoch": 2.968064426548181, | |
"grad_norm": 0.005152587778866291, | |
"learning_rate": 3.456790123456791e-05, | |
"loss": 0.6858, | |
"step": 1336 | |
}, | |
{ | |
"epoch": 2.970286031657873, | |
"grad_norm": 0.009315388277173042, | |
"learning_rate": 3.209876543209877e-05, | |
"loss": 0.6759, | |
"step": 1337 | |
}, | |
{ | |
"epoch": 2.9725076367675647, | |
"grad_norm": 0.009328274056315422, | |
"learning_rate": 2.962962962962963e-05, | |
"loss": 0.7544, | |
"step": 1338 | |
}, | |
{ | |
"epoch": 2.9747292418772564, | |
"grad_norm": 0.00911303423345089, | |
"learning_rate": 2.7160493827160493e-05, | |
"loss": 0.8303, | |
"step": 1339 | |
}, | |
{ | |
"epoch": 2.976950846986948, | |
"grad_norm": 0.008570064790546894, | |
"learning_rate": 2.469135802469136e-05, | |
"loss": 0.7677, | |
"step": 1340 | |
}, | |
{ | |
"epoch": 2.97917245209664, | |
"grad_norm": 0.007688554469496012, | |
"learning_rate": 2.2222222222222223e-05, | |
"loss": 0.6908, | |
"step": 1341 | |
}, | |
{ | |
"epoch": 2.9813940572063315, | |
"grad_norm": 0.009369606152176857, | |
"learning_rate": 1.9753086419753087e-05, | |
"loss": 0.8807, | |
"step": 1342 | |
}, | |
{ | |
"epoch": 2.9836156623160233, | |
"grad_norm": 0.009632132947444916, | |
"learning_rate": 1.7283950617283953e-05, | |
"loss": 0.7748, | |
"step": 1343 | |
}, | |
{ | |
"epoch": 2.985837267425715, | |
"grad_norm": 0.008006836287677288, | |
"learning_rate": 1.4814814814814815e-05, | |
"loss": 0.8687, | |
"step": 1344 | |
}, | |
{ | |
"epoch": 2.9880588725354067, | |
"grad_norm": 0.0064094155095517635, | |
"learning_rate": 1.234567901234568e-05, | |
"loss": 0.7026, | |
"step": 1345 | |
}, | |
{ | |
"epoch": 2.990280477645099, | |
"grad_norm": 0.007208329625427723, | |
"learning_rate": 9.876543209876543e-06, | |
"loss": 0.7213, | |
"step": 1346 | |
}, | |
{ | |
"epoch": 2.9925020827547906, | |
"grad_norm": 0.006725262384861708, | |
"learning_rate": 7.4074074074074075e-06, | |
"loss": 0.6405, | |
"step": 1347 | |
}, | |
{ | |
"epoch": 2.9947236878644823, | |
"grad_norm": 0.009039103053510189, | |
"learning_rate": 4.938271604938272e-06, | |
"loss": 0.5949, | |
"step": 1348 | |
}, | |
{ | |
"epoch": 2.996945292974174, | |
"grad_norm": 0.007107519078999758, | |
"learning_rate": 2.469135802469136e-06, | |
"loss": 0.7717, | |
"step": 1349 | |
}, | |
{ | |
"epoch": 2.9991668980838657, | |
"grad_norm": 0.008029144257307053, | |
"learning_rate": 0.0, | |
"loss": 0.6898, | |
"step": 1350 | |
}, | |
{ | |
"epoch": 2.9991668980838657, | |
"step": 1350, | |
"total_flos": 2.317351723938611e+16, | |
"train_loss": 0.7935600814112911, | |
"train_runtime": 1657.45, | |
"train_samples_per_second": 13.036, | |
"train_steps_per_second": 0.815 | |
} | |
], | |
"logging_steps": 1.0, | |
"max_steps": 1350, | |
"num_input_tokens_seen": 0, | |
"num_train_epochs": 3, | |
"save_steps": 500, | |
"stateful_callbacks": { | |
"TrainerControl": { | |
"args": { | |
"should_epoch_stop": false, | |
"should_evaluate": false, | |
"should_log": false, | |
"should_save": true, | |
"should_training_stop": true | |
}, | |
"attributes": {} | |
} | |
}, | |
"total_flos": 2.317351723938611e+16, | |
"train_batch_size": 1, | |
"trial_name": null, | |
"trial_params": null | |
} | |